diff --git a/.idea/.gitignore b/.idea/.gitignore new file mode 100644 index 00000000..26d33521 --- /dev/null +++ b/.idea/.gitignore @@ -0,0 +1,3 @@ +# Default ignored files +/shelf/ +/workspace.xml diff --git a/.idea/AutoTimer.iml b/.idea/AutoTimer.iml new file mode 100644 index 00000000..74d515a0 --- /dev/null +++ b/.idea/AutoTimer.iml @@ -0,0 +1,10 @@ + + + + + + + + + + \ No newline at end of file diff --git a/.idea/inspectionProfiles/profiles_settings.xml b/.idea/inspectionProfiles/profiles_settings.xml new file mode 100644 index 00000000..105ce2da --- /dev/null +++ b/.idea/inspectionProfiles/profiles_settings.xml @@ -0,0 +1,6 @@ + + + + \ No newline at end of file diff --git a/.idea/misc.xml b/.idea/misc.xml new file mode 100644 index 00000000..80323bc6 --- /dev/null +++ b/.idea/misc.xml @@ -0,0 +1,7 @@ + + + + + + \ No newline at end of file diff --git a/.idea/modules.xml b/.idea/modules.xml new file mode 100644 index 00000000..19a65c40 --- /dev/null +++ b/.idea/modules.xml @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json deleted file mode 100644 index d3325723..00000000 --- a/.vscode/settings.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "python.pythonPath": "env/bin/python2.7" -} \ No newline at end of file diff --git a/README.md b/README.md index 01b1b98d..38948395 100644 --- a/README.md +++ b/README.md @@ -1,15 +1,44 @@ -Tracking the desktop applications in real time and time spent on each application. +# AutoTimer -Check out this for more https://youtu.be/ZBLYcvPl1MA +--- +### Here's original writing by original author: [Kalle Hallden](https://github.com/KalleHallden) +Auto timer is used to track the desktop applications in real time and time spent on each application. -Dependencies: +Check out this for more https://youtu.be/ZBLYcvPl1MA + +#### Dependencies: - selenium +#### Windows Depencies + +- pywin32 +- python-dateutil +- uiautomation + +--- +### Here's what I added +#### Environment -Windows Depencies +- Windows 10 +- IDE: PyCharm +- Python 3.6 + +#### Windows Dependencies - pywin32 - python-dateutil -- uiautomation +- uiautomation + +#### Dependencies I deleted + +- AppKit +- Foundation + +--- +### Explanation +1. I deleted the macOS part because I found that downloading AppKit package is a trouble thing in Windows OS. + +2. I updated the python version from 2.7 to 3.6, and I deleted the old env dir and add a new one, which is named venv. +3. There are still have some bugs. For example, When I open a page in Chrome, the program fail to track it. And when I open a login page with a Form saved in Chrome, the cmd will print the Form's content. \ No newline at end of file diff --git a/activities.json b/activities.json index e69de29b..272a9689 100644 --- a/activities.json +++ b/activities.json @@ -0,0 +1,272 @@ +{ + "activities": [ + { + "name": "AutoTimer \u2013 autotimer.py", + "time_entries": [ + { + "days": 0, + "end_time": "2021-06-20 15:12:07", + "hours": 0, + "minutes": 0, + "seconds": 10, + "start_time": "2021-06-20 15:11:57" + } + ] + }, + { + "name": "AutoTimer \u2013 activities.json", + "time_entries": [ + { + "days": 0, + "end_time": "2021-06-20 15:12:12", + "hours": 0, + "minutes": 0, + "seconds": 5, + "start_time": "2021-06-20 15:12:07" + }, + { + "days": 0, + "end_time": "2021-06-20 15:12:17", + "hours": 0, + "minutes": 0, + "seconds": 4, + "start_time": "2021-06-20 15:12:13" + }, + { + "days": 0, + "end_time": "2021-06-20 15:12:30", + "hours": 0, + "minutes": 0, + "seconds": 8, + "start_time": "2021-06-20 15:12:21" + } + ] + }, + { + "name": "", + "time_entries": [ + { + "days": 0, + "end_time": "2021-06-20 15:12:13", + "hours": 0, + "minutes": 0, + "seconds": 1, + "start_time": "2021-06-20 15:12:12" + }, + { + "days": 0, + "end_time": "2021-06-20 15:12:34", + "hours": 0, + "minutes": 0, + "seconds": 3, + "start_time": "2021-06-20 15:12:31" + }, + { + "days": 0, + "end_time": "2021-06-20 15:56:15", + "hours": 0, + "minutes": 0, + "seconds": 1, + "start_time": "2021-06-20 15:56:14" + }, + { + "days": 0, + "end_time": "2021-06-20 15:56:32", + "hours": 0, + "minutes": 0, + "seconds": 6, + "start_time": "2021-06-20 15:56:25" + }, + { + "days": 0, + "end_time": "2021-06-20 15:57:06", + "hours": 0, + "minutes": 0, + "seconds": 3, + "start_time": "2021-06-20 15:57:03" + }, + { + "days": 0, + "end_time": "2021-06-20 15:57:29", + "hours": 0, + "minutes": 0, + "seconds": 3, + "start_time": "2021-06-20 15:57:26" + }, + { + "days": 0, + "end_time": "2021-06-20 15:57:47", + "hours": 0, + "minutes": 0, + "seconds": 1, + "start_time": "2021-06-20 15:57:46" + } + ] + }, + { + "name": "site-packages", + "time_entries": [ + { + "days": 0, + "end_time": "2021-06-20 15:12:21", + "hours": 0, + "minutes": 0, + "seconds": 4, + "start_time": "2021-06-20 15:12:17" + } + ] + }, + { + "name": "stackoverflow.com", + "time_entries": [ + { + "days": 0, + "end_time": "2021-06-20 15:12:31", + "hours": 0, + "minutes": 0, + "seconds": 1, + "start_time": "2021-06-20 15:12:30" + } + ] + }, + { + "name": "AutoTimer \u2013 main.py", + "time_entries": [ + { + "days": 0, + "end_time": "2021-06-20 15:56:14", + "hours": 0, + "minutes": 0, + "seconds": 8, + "start_time": "2021-06-20 15:56:06" + }, + { + "days": 0, + "end_time": "2021-06-20 15:56:25", + "hours": 0, + "minutes": 0, + "seconds": 1, + "start_time": "2021-06-20 15:56:23" + }, + { + "days": 0, + "end_time": "2021-06-20 15:56:38", + "hours": 0, + "minutes": 0, + "seconds": 6, + "start_time": "2021-06-20 15:56:32" + } + ] + }, + { + "name": "AutoTimer", + "time_entries": [ + { + "days": 0, + "end_time": "2021-06-20 15:56:23", + "hours": 0, + "minutes": 0, + "seconds": 8, + "start_time": "2021-06-20 15:56:15" + }, + { + "days": 0, + "end_time": "2021-06-20 15:56:56", + "hours": 0, + "minutes": 0, + "seconds": 7, + "start_time": "2021-06-20 15:56:49" + } + ] + }, + { + "name": "AutoTimer \u2013 activity.py", + "time_entries": [ + { + "days": 0, + "end_time": "2021-06-20 15:56:49", + "hours": 0, + "minutes": 0, + "seconds": 11, + "start_time": "2021-06-20 15:56:38" + }, + { + "days": 0, + "end_time": "2021-06-20 15:57:00", + "hours": 0, + "minutes": 0, + "seconds": 4, + "start_time": "2021-06-20 15:56:56" + }, + { + "days": 0, + "end_time": "2021-06-20 15:57:46", + "hours": 0, + "minutes": 0, + "seconds": 5, + "start_time": "2021-06-20 15:57:41" + } + ] + }, + { + "name": "\u641c\u7d22", + "time_entries": [ + { + "days": 0, + "end_time": "2021-06-20 15:57:03", + "hours": 0, + "minutes": 0, + "seconds": 3, + "start_time": "2021-06-20 15:57:00" + } + ] + }, + { + "name": "\u767b\u5f55", + "time_entries": [ + { + "days": 0, + "end_time": "2021-06-20 15:57:24", + "hours": 0, + "minutes": 0, + "seconds": 18, + "start_time": "2021-06-20 15:57:06" + } + ] + }, + { + "name": "\u5fae\u4fe1", + "time_entries": [ + { + "days": 0, + "end_time": "2021-06-20 15:57:26", + "hours": 0, + "minutes": 0, + "seconds": 2, + "start_time": "2021-06-20 15:57:24" + }, + { + "days": 0, + "end_time": "2021-06-20 15:57:41", + "hours": 0, + "minutes": 0, + "seconds": 12, + "start_time": "2021-06-20 15:57:29" + } + ] + }, + { + "name": "\u7f51\u6613\u6709\u9053\u8bcd\u5178", + "time_entries": [ + { + "days": 0, + "end_time": "2021-06-20 15:57:55", + "hours": 0, + "minutes": 0, + "seconds": 8, + "start_time": "2021-06-20 15:57:47" + } + ] + } + ] +} \ No newline at end of file diff --git a/activity.py b/activity.py deleted file mode 100644 index cbd828a2..00000000 --- a/activity.py +++ /dev/null @@ -1,103 +0,0 @@ - -import datetime -import json -from dateutil import parser - - -class AcitivyList: - def __init__(self, activities): - self.activities = activities - - def initialize_me(self): - activity_list = AcitivyList([]) - with open('activities.json', 'r') as f: - data = json.load(f) - activity_list = AcitivyList( - activities = self.get_activities_from_json(data) - ) - return activity_list - - def get_activities_from_json(self, data): - return_list = [] - for activity in data['activities']: - return_list.append( - Activity( - name = activity['name'], - time_entries = self.get_time_entires_from_json(activity), - ) - ) - self.activities = return_list - return return_list - - def get_time_entires_from_json(self, data): - return_list = [] - for entry in data['time_entries']: - return_list.append( - TimeEntry( - start_time = parser.parse(entry['start_time']), - end_time = parser.parse(entry['end_time']), - days = entry['days'], - hours = entry['hours'], - minutes = entry['minutes'], - seconds = entry['seconds'], - ) - ) - self.time_entries = return_list - return return_list - - def serialize(self): - return { - 'activities' : self.activities_to_json() - } - - def activities_to_json(self): - activities_ = [] - for activity in self.activities: - activities_.append(activity.serialize()) - - return activities_ - - -class Activity: - def __init__(self, name, time_entries): - self.name = name - self.time_entries = time_entries - - def serialize(self): - return { - 'name' : self.name, - 'time_entries' : self.make_time_entires_to_json() - } - - def make_time_entires_to_json(self): - time_list = [] - for time in self.time_entries: - time_list.append(time.serialize()) - return time_list - - -class TimeEntry: - def __init__(self, start_time, end_time, days, hours, minutes, seconds): - self.start_time = start_time - self.end_time = end_time - self.total_time = end_time - start_time - self.days = days - self.hours = hours - self.minutes = minutes - self.seconds = seconds - - def _get_specific_times(self): - self.days, self.seconds = self.total_time.days, self.total_time.seconds - self.hours = self.days * 24 + self.seconds // 3600 - self.minutes = (self.seconds % 3600) // 60 - self.seconds = self.seconds % 60 - - def serialize(self): - return { - 'start_time' : self.start_time.strftime("%Y-%m-%d %H:%M:%S"), - 'end_time' : self.end_time.strftime("%Y-%m-%d %H:%M:%S"), - 'days' : self.days, - 'hours' : self.hours, - 'minutes' : self.minutes, - 'seconds' : self.seconds - } \ No newline at end of file diff --git a/activity.pyc b/activity.pyc deleted file mode 100644 index 01f04137..00000000 Binary files a/activity.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/operations/__init__.py b/com/main/__init__.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_internal/operations/__init__.py rename to com/main/__init__.py diff --git a/com/main/__pycache__/__init__.cpython-36.pyc b/com/main/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..4f9674e3 Binary files /dev/null and b/com/main/__pycache__/__init__.cpython-36.pyc differ diff --git a/com/main/__pycache__/activity.cpython-36.pyc b/com/main/__pycache__/activity.cpython-36.pyc new file mode 100644 index 00000000..f6a1ca70 Binary files /dev/null and b/com/main/__pycache__/activity.cpython-36.pyc differ diff --git a/com/main/__pycache__/autotimer.cpython-36.pyc b/com/main/__pycache__/autotimer.cpython-36.pyc new file mode 100644 index 00000000..f4ee12e6 Binary files /dev/null and b/com/main/__pycache__/autotimer.cpython-36.pyc differ diff --git a/env/lib/python2.7/no-global-site-packages.txt b/com/main/activities.json similarity index 100% rename from env/lib/python2.7/no-global-site-packages.txt rename to com/main/activities.json diff --git a/com/main/activity.py b/com/main/activity.py new file mode 100644 index 00000000..0f1b34dc --- /dev/null +++ b/com/main/activity.py @@ -0,0 +1,112 @@ +import datetime +import json +from dateutil import parser + + +class AcitivyList: + def __init__(self, activities): + self.activities = activities + + def initialize_me(self): + """ + Loading data from json file in disks to memory. + Please put the json file under project root dir. + + :return: A list of Activity items. + """ + activity_list = None + with open('activities.json', 'r') as f: + data = json.load(f) + activity_list = AcitivyList( + activities=self.get_activities_from_json(data) + ) + return activity_list + + def get_activities_from_json(self, data): + return_list = [] + for activity in data['activities']: + return_list.append( + Activity( + name=activity['name'], + time_entries=self.get_time_entires_from_json(activity), + ) + ) + self.activities = return_list + return return_list + + def get_time_entires_from_json(self, data): + return_list = [] + for entry in data['time_entries']: + return_list.append( + TimeEntry( + start_time=parser.parse(entry['start_time']), + end_time=parser.parse(entry['end_time']), + days=entry['days'], + hours=entry['hours'], + minutes=entry['minutes'], + seconds=entry['seconds'], + ) + ) + self.time_entries = return_list + return return_list + + def serialize(self): + return { + 'activities': self.activities_to_json() + } + + def activities_to_json(self): + activities_ = [] + for activity in self.activities: + activities_.append(activity.serialize()) + + return activities_ + + +class Activity: + """ + Describe information of an activity, which is a window or a web. + """ + + def __init__(self, name, time_entries): + self.name = name + self.time_entries = time_entries + + def serialize(self): + return { + 'name': self.name, + 'time_entries': self.make_time_entires_to_json() + } + + def make_time_entires_to_json(self): + time_list = [] + for time in self.time_entries: + time_list.append(time.serialize()) + return time_list + + +class TimeEntry: + def __init__(self, start_time, end_time, days, hours, minutes, seconds): + self.start_time = start_time + self.end_time = end_time + self.total_time = end_time - start_time + self.days = days + self.hours = hours + self.minutes = minutes + self.seconds = seconds + + def _get_specific_times(self): + self.days, self.seconds = self.total_time.days, self.total_time.seconds + self.hours = self.days * 24 + self.seconds // 3600 + self.minutes = (self.seconds % 3600) // 60 + self.seconds = self.seconds % 60 + + def serialize(self): + return { + 'start_time': self.start_time.strftime("%Y-%m-%d %H:%M:%S"), + 'end_time': self.end_time.strftime("%Y-%m-%d %H:%M:%S"), + 'days': self.days, + 'hours': self.hours, + 'minutes': self.minutes, + 'seconds': self.seconds + } diff --git a/autotimer.py b/com/main/autotimer.py similarity index 79% rename from autotimer.py rename to com/main/autotimer.py index e299f60b..e95a7e1f 100644 --- a/autotimer.py +++ b/com/main/autotimer.py @@ -1,18 +1,15 @@ -from __future__ import print_function +# from __future__ import print_function import time -from os import system -from activity import * +from com.main.activity import * import json import datetime import sys + if sys.platform in ['Windows', 'win32', 'cygwin']: import win32gui import uiautomation as auto -elif sys.platform in ['Mac', 'darwin', 'os2', 'os2emx']: - from AppKit import NSWorkspace - from Foundation import * elif sys.platform in ['linux', 'linux2']: - import linux as l + import linux as l active_window_name = "" activity_name = "" @@ -31,9 +28,6 @@ def get_active_window(): if sys.platform in ['Windows', 'win32', 'cygwin']: window = win32gui.GetForegroundWindow() _active_window_name = win32gui.GetWindowText(window) - elif sys.platform in ['Mac', 'darwin', 'os2', 'os2emx']: - _active_window_name = (NSWorkspace.sharedWorkspace() - .activeApplication()['NSApplicationName']) else: print("sys.platform={platform} is not supported." .format(platform=sys.platform)) @@ -42,29 +36,25 @@ def get_active_window(): def get_chrome_url(): + _active_window_name = None if sys.platform in ['Windows', 'win32', 'cygwin']: window = win32gui.GetForegroundWindow() + _active_window_name = win32gui.GetWindowText(window) chromeControl = auto.ControlFromHandle(window) edit = chromeControl.EditControl() return 'https://' + edit.GetValuePattern().Value - elif sys.platform in ['Mac', 'darwin', 'os2', 'os2emx']: - textOfMyScript = """tell app "google chrome" to get the url of the active tab of window 1""" - s = NSAppleScript.initWithSource_( - NSAppleScript.alloc(), textOfMyScript) - results, err = s.executeAndReturnError_(None) - return results.stringValue() else: print("sys.platform={platform} is not supported." .format(platform=sys.platform)) print(sys.version) return _active_window_name + try: activeList.initialize_me() except Exception: print('No json') - try: while True: previous_site = "" @@ -77,7 +67,6 @@ def get_chrome_url(): if 'Google Chrome' in new_window_name: new_window_name = l.get_chrome_url_x() - if active_window_name != new_window_name: print(active_window_name) activity_name = active_window_name @@ -104,7 +93,7 @@ def get_chrome_url(): active_window_name = new_window_name time.sleep(1) - + except KeyboardInterrupt: with open('activities.json', 'w') as json_file: json.dump(activeList.serialize(), json_file, indent=4, sort_keys=True) diff --git a/linux.py b/com/main/linux.py similarity index 100% rename from linux.py rename to com/main/linux.py diff --git a/env/.Python b/env/.Python deleted file mode 120000 index 0db37215..00000000 --- a/env/.Python +++ /dev/null @@ -1 +0,0 @@ -/usr/local/Cellar/python@2/2.7.15_3/Frameworks/Python.framework/Versions/2.7/Python \ No newline at end of file diff --git a/env/bin/activate b/env/bin/activate deleted file mode 100644 index da5e5cca..00000000 --- a/env/bin/activate +++ /dev/null @@ -1,78 +0,0 @@ -# This file must be used with "source bin/activate" *from bash* -# you cannot run it directly - -deactivate () { - unset -f pydoc >/dev/null 2>&1 - - # reset old environment variables - # ! [ -z ${VAR+_} ] returns true if VAR is declared at all - if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then - PATH="$_OLD_VIRTUAL_PATH" - export PATH - unset _OLD_VIRTUAL_PATH - fi - if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then - PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME" - export PYTHONHOME - unset _OLD_VIRTUAL_PYTHONHOME - fi - - # This should detect bash and zsh, which have a hash command that must - # be called to get it to forget past commands. Without forgetting - # past commands the $PATH changes we made may not be respected - if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then - hash -r 2>/dev/null - fi - - if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then - PS1="$_OLD_VIRTUAL_PS1" - export PS1 - unset _OLD_VIRTUAL_PS1 - fi - - unset VIRTUAL_ENV - if [ ! "${1-}" = "nondestructive" ] ; then - # Self destruct! - unset -f deactivate - fi -} - -# unset irrelevant variables -deactivate nondestructive - -VIRTUAL_ENV="/Users/kalle/Documents/Projects/MyProjects/AutoTimer/env" -export VIRTUAL_ENV - -_OLD_VIRTUAL_PATH="$PATH" -PATH="$VIRTUAL_ENV/bin:$PATH" -export PATH - -# unset PYTHONHOME if set -if ! [ -z "${PYTHONHOME+_}" ] ; then - _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME" - unset PYTHONHOME -fi - -if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then - _OLD_VIRTUAL_PS1="${PS1-}" - if [ "x" != x ] ; then - PS1="${PS1-}" - else - PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}" - fi - export PS1 -fi - -# Make sure to unalias pydoc if it's already there -alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true - -pydoc () { - python -m pydoc "$@" -} - -# This should detect bash and zsh, which have a hash command that must -# be called to get it to forget past commands. Without forgetting -# past commands the $PATH changes we made may not be respected -if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then - hash -r 2>/dev/null -fi diff --git a/env/bin/activate.csh b/env/bin/activate.csh deleted file mode 100644 index 9cdbf1c4..00000000 --- a/env/bin/activate.csh +++ /dev/null @@ -1,42 +0,0 @@ -# This file must be used with "source bin/activate.csh" *from csh*. -# You cannot run it directly. -# Created by Davide Di Blasi . - -set newline='\ -' - -alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH:q" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT:q" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate && unalias pydoc' - -# Unset irrelevant variables. -deactivate nondestructive - -setenv VIRTUAL_ENV "/Users/kalle/Documents/Projects/MyProjects/AutoTimer/env" - -set _OLD_VIRTUAL_PATH="$PATH:q" -setenv PATH "$VIRTUAL_ENV:q/bin:$PATH:q" - - - -if ("" != "") then - set env_name = "" -else - set env_name = "$VIRTUAL_ENV:t:q" -endif - -# Could be in a non-interactive environment, -# in which case, $prompt is undefined and we wouldn't -# care about the prompt anyway. -if ( $?prompt ) then - set _OLD_VIRTUAL_PROMPT="$prompt:q" -if ( "$prompt:q" =~ *"$newline:q"* ) then - : -else - set prompt = "[$env_name:q] $prompt:q" -endif -endif - -unset env_name - -alias pydoc python -m pydoc - -rehash diff --git a/env/bin/activate.fish b/env/bin/activate.fish deleted file mode 100644 index 78dd5d0c..00000000 --- a/env/bin/activate.fish +++ /dev/null @@ -1,101 +0,0 @@ -# This file must be used using `source bin/activate.fish` *within a running fish ( http://fishshell.com ) session*. -# Do not run it directly. - -function _bashify_path -d "Converts a fish path to something bash can recognize" - set fishy_path $argv - set bashy_path $fishy_path[1] - for path_part in $fishy_path[2..-1] - set bashy_path "$bashy_path:$path_part" - end - echo $bashy_path -end - -function _fishify_path -d "Converts a bash path to something fish can recognize" - echo $argv | tr ':' '\n' -end - -function deactivate -d 'Exit virtualenv mode and return to the normal environment.' - # reset old environment variables - if test -n "$_OLD_VIRTUAL_PATH" - # https://github.com/fish-shell/fish-shell/issues/436 altered PATH handling - if test (echo $FISH_VERSION | tr "." "\n")[1] -lt 3 - set -gx PATH (_fishify_path $_OLD_VIRTUAL_PATH) - else - set -gx PATH $_OLD_VIRTUAL_PATH - end - set -e _OLD_VIRTUAL_PATH - end - - if test -n "$_OLD_VIRTUAL_PYTHONHOME" - set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME - set -e _OLD_VIRTUAL_PYTHONHOME - end - - if test -n "$_OLD_FISH_PROMPT_OVERRIDE" - # Set an empty local `$fish_function_path` to allow the removal of `fish_prompt` using `functions -e`. - set -l fish_function_path - - # Erase virtualenv's `fish_prompt` and restore the original. - functions -e fish_prompt - functions -c _old_fish_prompt fish_prompt - functions -e _old_fish_prompt - set -e _OLD_FISH_PROMPT_OVERRIDE - end - - set -e VIRTUAL_ENV - - if test "$argv[1]" != 'nondestructive' - # Self-destruct! - functions -e pydoc - functions -e deactivate - functions -e _bashify_path - functions -e _fishify_path - end -end - -# Unset irrelevant variables. -deactivate nondestructive - -set -gx VIRTUAL_ENV "/Users/kalle/Documents/Projects/MyProjects/AutoTimer/env" - -# https://github.com/fish-shell/fish-shell/issues/436 altered PATH handling -if test (echo $FISH_VERSION | tr "." "\n")[1] -lt 3 - set -gx _OLD_VIRTUAL_PATH (_bashify_path $PATH) -else - set -gx _OLD_VIRTUAL_PATH $PATH -end -set -gx PATH "$VIRTUAL_ENV/bin" $PATH - -# Unset `$PYTHONHOME` if set. -if set -q PYTHONHOME - set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME - set -e PYTHONHOME -end - -function pydoc - python -m pydoc $argv -end - -if test -z "$VIRTUAL_ENV_DISABLE_PROMPT" - # Copy the current `fish_prompt` function as `_old_fish_prompt`. - functions -c fish_prompt _old_fish_prompt - - function fish_prompt - # Save the current $status, for fish_prompts that display it. - set -l old_status $status - - # Prompt override provided? - # If not, just prepend the environment name. - if test -n "" - printf '%s%s' "" (set_color normal) - else - printf '%s(%s) ' (set_color normal) (basename "$VIRTUAL_ENV") - end - - # Restore the original $status - echo "exit $old_status" | source - _old_fish_prompt - end - - set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" -end diff --git a/env/bin/activate.ps1 b/env/bin/activate.ps1 deleted file mode 100644 index 6d8ae2aa..00000000 --- a/env/bin/activate.ps1 +++ /dev/null @@ -1,60 +0,0 @@ -# This file must be dot sourced from PoSh; you cannot run it directly. Do this: . ./activate.ps1 - -$script:THIS_PATH = $myinvocation.mycommand.path -$script:BASE_DIR = split-path (resolve-path "$THIS_PATH/..") -Parent - -function global:deactivate([switch] $NonDestructive) -{ - if (test-path variable:_OLD_VIRTUAL_PATH) - { - $env:PATH = $variable:_OLD_VIRTUAL_PATH - remove-variable "_OLD_VIRTUAL_PATH" -scope global - } - - if (test-path function:_old_virtual_prompt) - { - $function:prompt = $function:_old_virtual_prompt - remove-item function:\_old_virtual_prompt - } - - if ($env:VIRTUAL_ENV) - { - $old_env = split-path $env:VIRTUAL_ENV -leaf - remove-item env:VIRTUAL_ENV -erroraction silentlycontinue - } - - if (!$NonDestructive) - { - # Self destruct! - remove-item function:deactivate - remove-item function:pydoc - } -} - -function global:pydoc -{ - python -m pydoc $args -} - -# unset irrelevant variables -deactivate -nondestructive - -$VIRTUAL_ENV = $BASE_DIR -$env:VIRTUAL_ENV = $VIRTUAL_ENV - -$global:_OLD_VIRTUAL_PATH = $env:PATH -$env:PATH = "$env:VIRTUAL_ENV/bin:" + $env:PATH -if (!$env:VIRTUAL_ENV_DISABLE_PROMPT) -{ - function global:_old_virtual_prompt - { - "" - } - $function:_old_virtual_prompt = $function:prompt - function global:prompt - { - # Add a prefix to the current prompt, but don't discard it. - write-host "($( split-path $env:VIRTUAL_ENV -leaf )) " -nonewline - & $function:_old_virtual_prompt - } -} diff --git a/env/bin/activate_this.py b/env/bin/activate_this.py deleted file mode 100644 index 59b5d724..00000000 --- a/env/bin/activate_this.py +++ /dev/null @@ -1,46 +0,0 @@ -"""Activate virtualenv for current interpreter: - -Use exec(open(this_file).read(), {'__file__': this_file}). - -This can be used when you must use an existing Python interpreter, not the virtualenv bin/python. -""" -import os -import site -import sys - -try: - __file__ -except NameError: - raise AssertionError("You must use exec(open(this_file).read(), {'__file__': this_file}))") - -# prepend bin to PATH (this file is inside the bin directory) -bin_dir = os.path.dirname(os.path.abspath(__file__)) -os.environ["PATH"] = os.pathsep.join([bin_dir] + os.environ.get("PATH", "").split(os.pathsep)) - -base = os.path.dirname(bin_dir) - -# virtual env is right above bin directory -os.environ["VIRTUAL_ENV"] = base - -# add the virtual environments site-package to the host python import mechanism -IS_PYPY = hasattr(sys, "pypy_version_info") -IS_JYTHON = sys.platform.startswith("java") -if IS_JYTHON: - site_packages = os.path.join(base, "Lib", "site-packages") -elif IS_PYPY: - site_packages = os.path.join(base, "site-packages") -else: - IS_WIN = sys.platform == "win32" - if IS_WIN: - site_packages = os.path.join(base, "Lib", "site-packages") - else: - site_packages = os.path.join(base, "lib", "python{}".format(sys.version[:3]), "site-packages") - -prev = set(sys.path) -site.addsitedir(site_packages) -sys.real_prefix = sys.prefix -sys.prefix = base - -# Move the added items to the front of the path, in place -new = list(sys.path) -sys.path[:] = [i for i in new if i not in prev] + [i for i in new if i in prev] diff --git a/env/bin/easy_install b/env/bin/easy_install deleted file mode 100755 index 8ed99c3f..00000000 --- a/env/bin/easy_install +++ /dev/null @@ -1,10 +0,0 @@ -#!/Users/kalle/Documents/Projects/MyProjects/AutoTimer/env/bin/python2.7 -# -*- coding: utf-8 -*- -import re -import sys - -from setuptools.command.easy_install import main - -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/env/bin/easy_install-2.7 b/env/bin/easy_install-2.7 deleted file mode 100755 index 8ed99c3f..00000000 --- a/env/bin/easy_install-2.7 +++ /dev/null @@ -1,10 +0,0 @@ -#!/Users/kalle/Documents/Projects/MyProjects/AutoTimer/env/bin/python2.7 -# -*- coding: utf-8 -*- -import re -import sys - -from setuptools.command.easy_install import main - -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/env/bin/pip b/env/bin/pip deleted file mode 100755 index 2e81c34e..00000000 --- a/env/bin/pip +++ /dev/null @@ -1,10 +0,0 @@ -#!/Users/kalle/Documents/Projects/MyProjects/AutoTimer/env/bin/python2.7 -# -*- coding: utf-8 -*- -import re -import sys - -from pip._internal import main - -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/env/bin/pip2 b/env/bin/pip2 deleted file mode 100755 index 2e81c34e..00000000 --- a/env/bin/pip2 +++ /dev/null @@ -1,10 +0,0 @@ -#!/Users/kalle/Documents/Projects/MyProjects/AutoTimer/env/bin/python2.7 -# -*- coding: utf-8 -*- -import re -import sys - -from pip._internal import main - -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/env/bin/pip2.7 b/env/bin/pip2.7 deleted file mode 100755 index 2e81c34e..00000000 --- a/env/bin/pip2.7 +++ /dev/null @@ -1,10 +0,0 @@ -#!/Users/kalle/Documents/Projects/MyProjects/AutoTimer/env/bin/python2.7 -# -*- coding: utf-8 -*- -import re -import sys - -from pip._internal import main - -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/env/bin/python b/env/bin/python deleted file mode 120000 index 8f7a3c1a..00000000 --- a/env/bin/python +++ /dev/null @@ -1 +0,0 @@ -python2.7 \ No newline at end of file diff --git a/env/bin/python-config b/env/bin/python-config deleted file mode 100755 index c12bfde3..00000000 --- a/env/bin/python-config +++ /dev/null @@ -1,78 +0,0 @@ -#!/Users/kalle/Documents/Projects/MyProjects/AutoTimer/env/bin/python - -import sys -import getopt -import sysconfig - -valid_opts = ['prefix', 'exec-prefix', 'includes', 'libs', 'cflags', - 'ldflags', 'help'] - -if sys.version_info >= (3, 2): - valid_opts.insert(-1, 'extension-suffix') - valid_opts.append('abiflags') -if sys.version_info >= (3, 3): - valid_opts.append('configdir') - - -def exit_with_usage(code=1): - sys.stderr.write("Usage: {0} [{1}]\n".format( - sys.argv[0], '|'.join('--'+opt for opt in valid_opts))) - sys.exit(code) - -try: - opts, args = getopt.getopt(sys.argv[1:], '', valid_opts) -except getopt.error: - exit_with_usage() - -if not opts: - exit_with_usage() - -pyver = sysconfig.get_config_var('VERSION') -getvar = sysconfig.get_config_var - -opt_flags = [flag for (flag, val) in opts] - -if '--help' in opt_flags: - exit_with_usage(code=0) - -for opt in opt_flags: - if opt == '--prefix': - print(sysconfig.get_config_var('prefix')) - - elif opt == '--exec-prefix': - print(sysconfig.get_config_var('exec_prefix')) - - elif opt in ('--includes', '--cflags'): - flags = ['-I' + sysconfig.get_path('include'), - '-I' + sysconfig.get_path('platinclude')] - if opt == '--cflags': - flags.extend(getvar('CFLAGS').split()) - print(' '.join(flags)) - - elif opt in ('--libs', '--ldflags'): - abiflags = getattr(sys, 'abiflags', '') - libs = ['-lpython' + pyver + abiflags] - libs += getvar('LIBS').split() - libs += getvar('SYSLIBS').split() - # add the prefix/lib/pythonX.Y/config dir, but only if there is no - # shared library in prefix/lib/. - if opt == '--ldflags': - if not getvar('Py_ENABLE_SHARED'): - libs.insert(0, '-L' + getvar('LIBPL')) - if not getvar('PYTHONFRAMEWORK'): - libs.extend(getvar('LINKFORSHARED').split()) - print(' '.join(libs)) - - elif opt == '--extension-suffix': - ext_suffix = sysconfig.get_config_var('EXT_SUFFIX') - if ext_suffix is None: - ext_suffix = sysconfig.get_config_var('SO') - print(ext_suffix) - - elif opt == '--abiflags': - if not getattr(sys, 'abiflags', None): - exit_with_usage() - print(sys.abiflags) - - elif opt == '--configdir': - print(sysconfig.get_config_var('LIBPL')) diff --git a/env/bin/python2 b/env/bin/python2 deleted file mode 120000 index 8f7a3c1a..00000000 --- a/env/bin/python2 +++ /dev/null @@ -1 +0,0 @@ -python2.7 \ No newline at end of file diff --git a/env/bin/python2.7 b/env/bin/python2.7 deleted file mode 100755 index 4464ff5e..00000000 Binary files a/env/bin/python2.7 and /dev/null differ diff --git a/env/bin/wheel b/env/bin/wheel deleted file mode 100755 index da2f8948..00000000 --- a/env/bin/wheel +++ /dev/null @@ -1,10 +0,0 @@ -#!/Users/kalle/Documents/Projects/MyProjects/AutoTimer/env/bin/python2.7 -# -*- coding: utf-8 -*- -import re -import sys - -from wheel.cli import main - -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/env/include/python2.7 b/env/include/python2.7 deleted file mode 120000 index 385f6708..00000000 --- a/env/include/python2.7 +++ /dev/null @@ -1 +0,0 @@ -/usr/local/Cellar/python@2/2.7.15_3/Frameworks/Python.framework/Versions/2.7/include/python2.7 \ No newline at end of file diff --git a/env/lib/python2.7/LICENSE.txt b/env/lib/python2.7/LICENSE.txt deleted file mode 120000 index 745034a2..00000000 --- a/env/lib/python2.7/LICENSE.txt +++ /dev/null @@ -1 +0,0 @@ -/usr/local/Cellar/python@2/2.7.15_3/Frameworks/Python.framework/Versions/2.7/lib/python2.7/LICENSE.txt \ No newline at end of file diff --git a/env/lib/python2.7/UserDict.py b/env/lib/python2.7/UserDict.py deleted file mode 120000 index ae603b2b..00000000 --- a/env/lib/python2.7/UserDict.py +++ /dev/null @@ -1 +0,0 @@ -/usr/local/Cellar/python@2/2.7.15_3/Frameworks/Python.framework/Versions/2.7/lib/python2.7/UserDict.py \ No newline at end of file diff --git a/env/lib/python2.7/UserDict.pyc b/env/lib/python2.7/UserDict.pyc deleted file mode 100644 index c1df1e97..00000000 Binary files a/env/lib/python2.7/UserDict.pyc and /dev/null differ diff --git a/env/lib/python2.7/_abcoll.py b/env/lib/python2.7/_abcoll.py deleted file mode 120000 index b07e149d..00000000 --- a/env/lib/python2.7/_abcoll.py +++ /dev/null @@ -1 +0,0 @@ -/usr/local/Cellar/python@2/2.7.15_3/Frameworks/Python.framework/Versions/2.7/lib/python2.7/_abcoll.py \ No newline at end of file diff --git a/env/lib/python2.7/_abcoll.pyc b/env/lib/python2.7/_abcoll.pyc deleted file mode 100644 index 96d462aa..00000000 Binary files a/env/lib/python2.7/_abcoll.pyc and /dev/null differ diff --git a/env/lib/python2.7/_weakrefset.py b/env/lib/python2.7/_weakrefset.py deleted file mode 120000 index 3ba9a279..00000000 --- a/env/lib/python2.7/_weakrefset.py +++ /dev/null @@ -1 +0,0 @@ -/usr/local/Cellar/python@2/2.7.15_3/Frameworks/Python.framework/Versions/2.7/lib/python2.7/_weakrefset.py \ No newline at end of file diff --git a/env/lib/python2.7/_weakrefset.pyc b/env/lib/python2.7/_weakrefset.pyc deleted file mode 100644 index d15aaa6f..00000000 Binary files a/env/lib/python2.7/_weakrefset.pyc and /dev/null differ diff --git a/env/lib/python2.7/abc.py b/env/lib/python2.7/abc.py deleted file mode 120000 index 9a1dec7d..00000000 --- a/env/lib/python2.7/abc.py +++ /dev/null @@ -1 +0,0 @@ -/usr/local/Cellar/python@2/2.7.15_3/Frameworks/Python.framework/Versions/2.7/lib/python2.7/abc.py \ No newline at end of file diff --git a/env/lib/python2.7/abc.pyc b/env/lib/python2.7/abc.pyc deleted file mode 100644 index 40c4092a..00000000 Binary files a/env/lib/python2.7/abc.pyc and /dev/null differ diff --git a/env/lib/python2.7/codecs.py b/env/lib/python2.7/codecs.py deleted file mode 120000 index 909f20ba..00000000 --- a/env/lib/python2.7/codecs.py +++ /dev/null @@ -1 +0,0 @@ -/usr/local/Cellar/python@2/2.7.15_3/Frameworks/Python.framework/Versions/2.7/lib/python2.7/codecs.py \ No newline at end of file diff --git a/env/lib/python2.7/codecs.pyc b/env/lib/python2.7/codecs.pyc deleted file mode 100644 index 87f6e7b1..00000000 Binary files a/env/lib/python2.7/codecs.pyc and /dev/null differ diff --git a/env/lib/python2.7/config b/env/lib/python2.7/config deleted file mode 120000 index e32e9546..00000000 --- a/env/lib/python2.7/config +++ /dev/null @@ -1 +0,0 @@ -/usr/local/Cellar/python@2/2.7.15_3/Frameworks/Python.framework/Versions/2.7/lib/python2.7/config \ No newline at end of file diff --git a/env/lib/python2.7/copy_reg.py b/env/lib/python2.7/copy_reg.py deleted file mode 120000 index 994942d2..00000000 --- a/env/lib/python2.7/copy_reg.py +++ /dev/null @@ -1 +0,0 @@ -/usr/local/Cellar/python@2/2.7.15_3/Frameworks/Python.framework/Versions/2.7/lib/python2.7/copy_reg.py \ No newline at end of file diff --git a/env/lib/python2.7/copy_reg.pyc b/env/lib/python2.7/copy_reg.pyc deleted file mode 100644 index 93ea131e..00000000 Binary files a/env/lib/python2.7/copy_reg.pyc and /dev/null differ diff --git a/env/lib/python2.7/distutils/__init__.py b/env/lib/python2.7/distutils/__init__.py deleted file mode 100644 index b9b0f24f..00000000 --- a/env/lib/python2.7/distutils/__init__.py +++ /dev/null @@ -1,134 +0,0 @@ -import os -import sys -import warnings - -# opcode is not a virtualenv module, so we can use it to find the stdlib -# Important! To work on pypy, this must be a module that resides in the -# lib-python/modified-x.y.z directory -import opcode - -dirname = os.path.dirname - -distutils_path = os.path.join(os.path.dirname(opcode.__file__), "distutils") -if os.path.normpath(distutils_path) == os.path.dirname(os.path.normpath(__file__)): - warnings.warn("The virtualenv distutils package at %s appears to be in the same location as the system distutils?") -else: - __path__.insert(0, distutils_path) # noqa: F821 - if sys.version_info < (3, 4): - import imp - - real_distutils = imp.load_module("_virtualenv_distutils", None, distutils_path, ("", "", imp.PKG_DIRECTORY)) - else: - import importlib.machinery - - distutils_path = os.path.join(distutils_path, "__init__.py") - loader = importlib.machinery.SourceFileLoader("_virtualenv_distutils", distutils_path) - if sys.version_info < (3, 5): - import types - - real_distutils = types.ModuleType(loader.name) - else: - import importlib.util - - spec = importlib.util.spec_from_loader(loader.name, loader) - real_distutils = importlib.util.module_from_spec(spec) - loader.exec_module(real_distutils) - - # Copy the relevant attributes - try: - __revision__ = real_distutils.__revision__ - except AttributeError: - pass - __version__ = real_distutils.__version__ - -from distutils import dist, sysconfig # isort:skip - -try: - basestring -except NameError: - basestring = str - -# patch build_ext (distutils doesn't know how to get the libs directory -# path on windows - it hardcodes the paths around the patched sys.prefix) - -if sys.platform == "win32": - from distutils.command.build_ext import build_ext as old_build_ext - - class build_ext(old_build_ext): - def finalize_options(self): - if self.library_dirs is None: - self.library_dirs = [] - elif isinstance(self.library_dirs, basestring): - self.library_dirs = self.library_dirs.split(os.pathsep) - - self.library_dirs.insert(0, os.path.join(sys.real_prefix, "Libs")) - old_build_ext.finalize_options(self) - - from distutils.command import build_ext as build_ext_module - - build_ext_module.build_ext = build_ext - -# distutils.dist patches: - -old_find_config_files = dist.Distribution.find_config_files - - -def find_config_files(self): - found = old_find_config_files(self) - if os.name == "posix": - user_filename = ".pydistutils.cfg" - else: - user_filename = "pydistutils.cfg" - user_filename = os.path.join(sys.prefix, user_filename) - if os.path.isfile(user_filename): - for item in list(found): - if item.endswith("pydistutils.cfg"): - found.remove(item) - found.append(user_filename) - return found - - -dist.Distribution.find_config_files = find_config_files - -# distutils.sysconfig patches: - -old_get_python_inc = sysconfig.get_python_inc - - -def sysconfig_get_python_inc(plat_specific=0, prefix=None): - if prefix is None: - prefix = sys.real_prefix - return old_get_python_inc(plat_specific, prefix) - - -sysconfig_get_python_inc.__doc__ = old_get_python_inc.__doc__ -sysconfig.get_python_inc = sysconfig_get_python_inc - -old_get_python_lib = sysconfig.get_python_lib - - -def sysconfig_get_python_lib(plat_specific=0, standard_lib=0, prefix=None): - if standard_lib and prefix is None: - prefix = sys.real_prefix - return old_get_python_lib(plat_specific, standard_lib, prefix) - - -sysconfig_get_python_lib.__doc__ = old_get_python_lib.__doc__ -sysconfig.get_python_lib = sysconfig_get_python_lib - -old_get_config_vars = sysconfig.get_config_vars - - -def sysconfig_get_config_vars(*args): - real_vars = old_get_config_vars(*args) - if sys.platform == "win32": - lib_dir = os.path.join(sys.real_prefix, "libs") - if isinstance(real_vars, dict) and "LIBDIR" not in real_vars: - real_vars["LIBDIR"] = lib_dir # asked for all - elif isinstance(real_vars, list) and "LIBDIR" in args: - real_vars = real_vars + [lib_dir] # asked for list - return real_vars - - -sysconfig_get_config_vars.__doc__ = old_get_config_vars.__doc__ -sysconfig.get_config_vars = sysconfig_get_config_vars diff --git a/env/lib/python2.7/distutils/__init__.pyc b/env/lib/python2.7/distutils/__init__.pyc deleted file mode 100644 index 67afea5a..00000000 Binary files a/env/lib/python2.7/distutils/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/distutils/distutils.cfg b/env/lib/python2.7/distutils/distutils.cfg deleted file mode 100644 index 1af230ec..00000000 --- a/env/lib/python2.7/distutils/distutils.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# This is a config file local to this virtualenv installation -# You may include options that will be used by all distutils commands, -# and by easy_install. For instance: -# -# [easy_install] -# find_links = http://mylocalsite diff --git a/env/lib/python2.7/encodings b/env/lib/python2.7/encodings deleted file mode 120000 index cd8d4a36..00000000 --- a/env/lib/python2.7/encodings +++ /dev/null @@ -1 +0,0 @@ -/usr/local/Cellar/python@2/2.7.15_3/Frameworks/Python.framework/Versions/2.7/lib/python2.7/encodings \ No newline at end of file diff --git a/env/lib/python2.7/fnmatch.py b/env/lib/python2.7/fnmatch.py deleted file mode 120000 index 1777e411..00000000 --- a/env/lib/python2.7/fnmatch.py +++ /dev/null @@ -1 +0,0 @@ -/usr/local/Cellar/python@2/2.7.15_3/Frameworks/Python.framework/Versions/2.7/lib/python2.7/fnmatch.py \ No newline at end of file diff --git a/env/lib/python2.7/fnmatch.pyc b/env/lib/python2.7/fnmatch.pyc deleted file mode 100644 index 59aaa905..00000000 Binary files a/env/lib/python2.7/fnmatch.pyc and /dev/null differ diff --git a/env/lib/python2.7/genericpath.py b/env/lib/python2.7/genericpath.py deleted file mode 120000 index 136dccce..00000000 --- a/env/lib/python2.7/genericpath.py +++ /dev/null @@ -1 +0,0 @@ -/usr/local/Cellar/python@2/2.7.15_3/Frameworks/Python.framework/Versions/2.7/lib/python2.7/genericpath.py \ No newline at end of file diff --git a/env/lib/python2.7/genericpath.pyc b/env/lib/python2.7/genericpath.pyc deleted file mode 100644 index c7ec1ee3..00000000 Binary files a/env/lib/python2.7/genericpath.pyc and /dev/null differ diff --git a/env/lib/python2.7/lib-dynload b/env/lib/python2.7/lib-dynload deleted file mode 120000 index 10bd418b..00000000 --- a/env/lib/python2.7/lib-dynload +++ /dev/null @@ -1 +0,0 @@ -/usr/local/Cellar/python@2/2.7.15_3/Frameworks/Python.framework/Versions/2.7/lib/python2.7/lib-dynload \ No newline at end of file diff --git a/env/lib/python2.7/linecache.py b/env/lib/python2.7/linecache.py deleted file mode 120000 index 7de7361f..00000000 --- a/env/lib/python2.7/linecache.py +++ /dev/null @@ -1 +0,0 @@ -/usr/local/Cellar/python@2/2.7.15_3/Frameworks/Python.framework/Versions/2.7/lib/python2.7/linecache.py \ No newline at end of file diff --git a/env/lib/python2.7/linecache.pyc b/env/lib/python2.7/linecache.pyc deleted file mode 100644 index d7916116..00000000 Binary files a/env/lib/python2.7/linecache.pyc and /dev/null differ diff --git a/env/lib/python2.7/locale.py b/env/lib/python2.7/locale.py deleted file mode 120000 index d8d7c71a..00000000 --- a/env/lib/python2.7/locale.py +++ /dev/null @@ -1 +0,0 @@ -/usr/local/Cellar/python@2/2.7.15_3/Frameworks/Python.framework/Versions/2.7/lib/python2.7/locale.py \ No newline at end of file diff --git a/env/lib/python2.7/locale.pyc b/env/lib/python2.7/locale.pyc deleted file mode 100644 index b09a14eb..00000000 Binary files a/env/lib/python2.7/locale.pyc and /dev/null differ diff --git a/env/lib/python2.7/ntpath.py b/env/lib/python2.7/ntpath.py deleted file mode 120000 index 4e23c93a..00000000 --- a/env/lib/python2.7/ntpath.py +++ /dev/null @@ -1 +0,0 @@ -/usr/local/Cellar/python@2/2.7.15_3/Frameworks/Python.framework/Versions/2.7/lib/python2.7/ntpath.py \ No newline at end of file diff --git a/env/lib/python2.7/ntpath.pyc b/env/lib/python2.7/ntpath.pyc deleted file mode 100644 index 7b4baad0..00000000 Binary files a/env/lib/python2.7/ntpath.pyc and /dev/null differ diff --git a/env/lib/python2.7/orig-prefix.txt b/env/lib/python2.7/orig-prefix.txt deleted file mode 100644 index 92a64959..00000000 --- a/env/lib/python2.7/orig-prefix.txt +++ /dev/null @@ -1 +0,0 @@ -/usr/local/Cellar/python@2/2.7.15_3/Frameworks/Python.framework/Versions/2.7 \ No newline at end of file diff --git a/env/lib/python2.7/os.py b/env/lib/python2.7/os.py deleted file mode 120000 index fd6efbd9..00000000 --- a/env/lib/python2.7/os.py +++ /dev/null @@ -1 +0,0 @@ -/usr/local/Cellar/python@2/2.7.15_3/Frameworks/Python.framework/Versions/2.7/lib/python2.7/os.py \ No newline at end of file diff --git a/env/lib/python2.7/os.pyc b/env/lib/python2.7/os.pyc deleted file mode 100644 index ef2fe62a..00000000 Binary files a/env/lib/python2.7/os.pyc and /dev/null differ diff --git a/env/lib/python2.7/posixpath.py b/env/lib/python2.7/posixpath.py deleted file mode 120000 index 05a0fd73..00000000 --- a/env/lib/python2.7/posixpath.py +++ /dev/null @@ -1 +0,0 @@ -/usr/local/Cellar/python@2/2.7.15_3/Frameworks/Python.framework/Versions/2.7/lib/python2.7/posixpath.py \ No newline at end of file diff --git a/env/lib/python2.7/posixpath.pyc b/env/lib/python2.7/posixpath.pyc deleted file mode 100644 index 28527228..00000000 Binary files a/env/lib/python2.7/posixpath.pyc and /dev/null differ diff --git a/env/lib/python2.7/re.py b/env/lib/python2.7/re.py deleted file mode 120000 index 5a78d41b..00000000 --- a/env/lib/python2.7/re.py +++ /dev/null @@ -1 +0,0 @@ -/usr/local/Cellar/python@2/2.7.15_3/Frameworks/Python.framework/Versions/2.7/lib/python2.7/re.py \ No newline at end of file diff --git a/env/lib/python2.7/re.pyc b/env/lib/python2.7/re.pyc deleted file mode 100644 index e0743c5b..00000000 Binary files a/env/lib/python2.7/re.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/AVFoundation/_AVFoundation.so b/env/lib/python2.7/site-packages/AVFoundation/_AVFoundation.so deleted file mode 100755 index 1164a0ac..00000000 Binary files a/env/lib/python2.7/site-packages/AVFoundation/_AVFoundation.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/AVFoundation/__init__.py b/env/lib/python2.7/site-packages/AVFoundation/__init__.py deleted file mode 100644 index 380c1cfb..00000000 --- a/env/lib/python2.7/site-packages/AVFoundation/__init__.py +++ /dev/null @@ -1,29 +0,0 @@ -''' -Python mapping for the AVFoundation framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Foundation - -from AVFoundation import _metadata -from AVFoundation._AVFoundation import * -from AVFoundation._inlines import _inline_list_ - - -sys.modules['AVFoundation'] = mod = objc.ObjCLazyModule( - "AVFoundation", - "com.apple.avfoundation", - objc.pathForFramework("/System/Library/Frameworks/AVFoundation.framework"), - _metadata.__dict__, _inline_list_, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Foundation,)) - -import sys -del sys.modules['AVFoundation._metadata'] diff --git a/env/lib/python2.7/site-packages/AVFoundation/__init__.pyc b/env/lib/python2.7/site-packages/AVFoundation/__init__.pyc deleted file mode 100644 index 0af6044b..00000000 Binary files a/env/lib/python2.7/site-packages/AVFoundation/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/AVFoundation/_inlines.so b/env/lib/python2.7/site-packages/AVFoundation/_inlines.so deleted file mode 100755 index 8020e7a4..00000000 Binary files a/env/lib/python2.7/site-packages/AVFoundation/_inlines.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/AVFoundation/_metadata.py b/env/lib/python2.7/site-packages/AVFoundation/_metadata.py deleted file mode 100644 index a7888945..00000000 --- a/env/lib/python2.7/site-packages/AVFoundation/_metadata.py +++ /dev/null @@ -1,784 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Mon Feb 18 19:04:03 2019 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -misc.update({'AVAudio3DPoint': objc.createStructType('AVAudio3DPoint', b'{AVAudio3DPoint=fff}', ['x', 'y', 'z']), 'AVAudioConverterPrimeInfo': objc.createStructType('AVAudioConverterPrimeInfo', b'{AVAudioConverterPrimeInfo=II}', ['leadingFrames', 'trailingFrames']), 'AVSampleCursorSyncInfo': objc.createStructType('AVSampleCursorSyncInfo', b'{_AVSampleCursorSyncInfo=ZZZ}', ['sampleIsFullSync', 'sampleIsPartialSync', 'sampleIsDroppable']), 'AVCaptureWhiteBalanceChromaticityValues': objc.createStructType('AVCaptureWhiteBalanceChromaticityValues', b'{_AVCaptureWhiteBalanceChromaticityValues=ff}', ['x', 'y']), 'AVAudio3DVectorOrientation': objc.createStructType('AVAudio3DVectorOrientation', b'{AVAudio3DVectorOrientation={AVAudio3DPoint=fff}{AVAudio3DPoint=fff}}', ['forward', 'up']), 'AVPixelAspectRatio': objc.createStructType('AVPixelAspectRatio', sel32or64(b'{_AVPixelAspectRatio=ii}', b'{_AVPixelAspectRatio=qq}'), ['horizontalSpacing', 'verticalSpacing']), 'AVCaptureWhiteBalanceTemperatureAndTintValues': objc.createStructType('AVCaptureWhiteBalanceTemperatureAndTintValues', b'{_AVCaptureWhiteBalanceTemperatureAndTintValues=ff}', ['temperature', 'tint']), 'AVAudio3DAngularOrientation': objc.createStructType('AVAudio3DAngularOrientation', b'{AVAudio3DAngularOrientation=fff}', ['yaw', 'pitch', 'roll']), 'AVSampleCursorStorageRange': objc.createStructType('AVSampleCursorStorageRange', b'{_AVSampleCursorStorageRange=qq}', ['offset', 'length']), 'AVSampleCursorDependencyInfo': objc.createStructType('AVSampleCursorDependencyInfo', b'{_AVSampleCursorDependencyInfo=ZZZZZZ}', ['sampleIndicatesWhetherItHasDependentSamples', 'sampleHasDependentSamples', 'sampleIndicatesWhetherItDependsOnOthers', 'sampleDependsOnOthers', 'sampleIndicatesWhetherItHasRedundantCoding', 'sampleHasRedundantCoding']), 'AVBeatRange': objc.createStructType('AVBeatRange', b'{_AVBeatRange=dd}', ['start', 'length']), 'AVSampleCursorChunkInfo': objc.createStructType('AVSampleCursorChunkInfo', b'{_AVSampleCursorChunkInfo=qZZZ}', ['chunkSampleCount', 'chunkHasUniformSampleSizes', 'chunkHasUniformSampleDurations', 'chunkHasUniformFormatDescriptions']), 'AVEdgeWidths': objc.createStructType('AVEdgeWidths', sel32or64(b'{_AVEdgeWidths=ffff}', b'{_AVEdgeWidths=dddd}'), ['left', 'top', 'right', 'bottom']), 'AVCaptureWhiteBalanceGains': objc.createStructType('AVCaptureWhiteBalanceGains', b'{_AVCaptureWhiteBalanceGains=fff}', ['redGain', 'greenGain', 'blueGain'])}) -constants = '''$AVAssetChapterMetadataGroupsDidChangeNotification$AVAssetContainsFragmentsDidChangeNotification$AVAssetDurationDidChangeNotification$AVAssetExportPreset1280x720$AVAssetExportPreset1920x1080$AVAssetExportPreset3840x2160$AVAssetExportPreset640x480$AVAssetExportPreset960x540$AVAssetExportPresetAppleM4A$AVAssetExportPresetAppleM4V1080pHD$AVAssetExportPresetAppleM4V480pSD$AVAssetExportPresetAppleM4V720pHD$AVAssetExportPresetAppleM4VAppleTV$AVAssetExportPresetAppleM4VCellular$AVAssetExportPresetAppleM4VWiFi$AVAssetExportPresetAppleM4ViPod$AVAssetExportPresetAppleProRes422LPCM$AVAssetExportPresetHEVC1920x1080$AVAssetExportPresetHEVC3840x2160$AVAssetExportPresetHEVCHighestQuality$AVAssetExportPresetHighestQuality$AVAssetExportPresetLowQuality$AVAssetExportPresetMediumQuality$AVAssetExportPresetPassthrough$AVAssetImageGeneratorApertureModeCleanAperture$AVAssetImageGeneratorApertureModeEncodedPixels$AVAssetImageGeneratorApertureModeProductionAperture$AVAssetMediaSelectionGroupsDidChangeNotification$AVAssetResourceLoadingRequestStreamingContentKeyRequestRequiresPersistentKey$AVAssetTrackSegmentsDidChangeNotification$AVAssetTrackTimeRangeDidChangeNotification$AVAssetTrackTrackAssociationsDidChangeNotification$AVAssetWasDefragmentedNotification$AVAssetWriterInputMediaDataLocationBeforeMainMediaDataNotInterleaved$AVAssetWriterInputMediaDataLocationInterleavedWithMainMediaData$AVAudioBitRateStrategy_Constant$AVAudioBitRateStrategy_LongTermAverage$AVAudioBitRateStrategy_Variable$AVAudioBitRateStrategy_VariableConstrained$AVAudioEngineConfigurationChangeNotification$AVAudioFileTypeKey$AVAudioTimePitchAlgorithmLowQualityZeroLatency$AVAudioTimePitchAlgorithmSpectral$AVAudioTimePitchAlgorithmTimeDomain$AVAudioTimePitchAlgorithmVarispeed$AVAudioUnitComponentTagsDidChangeNotification$AVAudioUnitManufacturerNameApple$AVAudioUnitTypeEffect$AVAudioUnitTypeFormatConverter$AVAudioUnitTypeGenerator$AVAudioUnitTypeMIDIProcessor$AVAudioUnitTypeMixer$AVAudioUnitTypeMusicDevice$AVAudioUnitTypeMusicEffect$AVAudioUnitTypeOfflineEffect$AVAudioUnitTypeOutput$AVAudioUnitTypePanner$AVCaptureDeviceSubjectAreaDidChangeNotification$AVCaptureDeviceTypeBuiltInDualCamera$AVCaptureDeviceTypeBuiltInDuoCamera$AVCaptureDeviceTypeBuiltInMicrophone$AVCaptureDeviceTypeBuiltInTelephotoCamera$AVCaptureDeviceTypeBuiltInTrueDepthCamera$AVCaptureDeviceTypeBuiltInWideAngleCamera$AVCaptureDeviceWasConnectedNotification$AVCaptureDeviceWasDisconnectedNotification$AVCaptureExposureDurationCurrent@{_CMTime=qiIq}$AVCaptureExposureTargetBiasCurrent@f$AVCaptureISOCurrent@f$AVCaptureInputPortFormatDescriptionDidChangeNotification$AVCaptureLensPositionCurrent@f$AVCaptureMaxAvailableTorchLevel@f$AVCaptureSessionDidStartRunningNotification$AVCaptureSessionDidStopRunningNotification$AVCaptureSessionErrorKey$AVCaptureSessionInterruptionEndedNotification$AVCaptureSessionInterruptionReasonKey$AVCaptureSessionInterruptionSystemPressureStateKey$AVCaptureSessionPreset1280x720$AVCaptureSessionPreset1920x1080$AVCaptureSessionPreset320x240$AVCaptureSessionPreset352x288$AVCaptureSessionPreset3840x2160$AVCaptureSessionPreset640x480$AVCaptureSessionPreset960x540$AVCaptureSessionPresetHigh$AVCaptureSessionPresetInputPriority$AVCaptureSessionPresetLow$AVCaptureSessionPresetMedium$AVCaptureSessionPresetPhoto$AVCaptureSessionPresetiFrame1280x720$AVCaptureSessionPresetiFrame960x540$AVCaptureSessionRuntimeErrorNotification$AVCaptureSessionWasInterruptedNotification$AVCaptureSystemPressureLevelCritical$AVCaptureSystemPressureLevelFair$AVCaptureSystemPressureLevelNominal$AVCaptureSystemPressureLevelSerious$AVCaptureSystemPressureLevelShutdown$AVCaptureWhiteBalanceGainsCurrent@{_AVCaptureWhiteBalanceGains=fff}$AVChannelLayoutKey$AVContentKeyRequestProtocolVersionsKey$AVContentKeyRequestRetryReasonReceivedObsoleteContentKey$AVContentKeyRequestRetryReasonReceivedResponseWithExpiredLease$AVContentKeyRequestRetryReasonTimedOut$AVContentKeySystemClearKey$AVContentKeySystemFairPlayStreaming$AVCoreAnimationBeginTimeAtZero@d$AVEncoderAudioQualityForVBRKey$AVEncoderAudioQualityKey$AVEncoderBitDepthHintKey$AVEncoderBitRateKey$AVEncoderBitRatePerChannelKey$AVEncoderBitRateStrategyKey$AVErrorDeviceKey$AVErrorDiscontinuityFlagsKey$AVErrorFileSizeKey$AVErrorFileTypeKey$AVErrorMediaSubTypeKey$AVErrorMediaTypeKey$AVErrorPIDKey$AVErrorPersistentTrackIDKey$AVErrorPresentationTimeStampKey$AVErrorRecordingSuccessfullyFinishedKey$AVErrorTimeKey$AVFileType3GPP$AVFileType3GPP2$AVFileTypeAC3$AVFileTypeAIFC$AVFileTypeAIFF$AVFileTypeAMR$AVFileTypeAVCI$AVFileTypeAppleM4A$AVFileTypeAppleM4V$AVFileTypeCoreAudioFormat$AVFileTypeDNG$AVFileTypeEnhancedAC3$AVFileTypeHEIC$AVFileTypeHEIF$AVFileTypeJPEG$AVFileTypeMPEG4$AVFileTypeMPEGLayer3$AVFileTypeQuickTimeMovie$AVFileTypeSunAU$AVFileTypeTIFF$AVFileTypeWAVE$AVFormatIDKey$AVFoundationErrorDomain$AVFragmentedMovieContainsMovieFragmentsDidChangeNotification$AVFragmentedMovieDurationDidChangeNotification$AVFragmentedMovieTrackSegmentsDidChangeNotification$AVFragmentedMovieTrackTimeRangeDidChangeNotification$AVFragmentedMovieTrackTotalSampleDataLengthDidChangeNotification$AVFragmentedMovieWasDefragmentedNotification$AVLayerVideoGravityResize$AVLayerVideoGravityResizeAspect$AVLayerVideoGravityResizeAspectFill$AVLinearPCMBitDepthKey$AVLinearPCMIsBigEndianKey$AVLinearPCMIsFloatKey$AVLinearPCMIsNonInterleaved$AVMediaCharacteristicAudible$AVMediaCharacteristicContainsOnlyForcedSubtitles$AVMediaCharacteristicDescribesMusicAndSoundForAccessibility$AVMediaCharacteristicDescribesVideoForAccessibility$AVMediaCharacteristicDubbedTranslation$AVMediaCharacteristicEasyToRead$AVMediaCharacteristicFrameBased$AVMediaCharacteristicIsAuxiliaryContent$AVMediaCharacteristicIsMainProgramContent$AVMediaCharacteristicLanguageTranslation$AVMediaCharacteristicLegible$AVMediaCharacteristicTranscribesSpokenDialogForAccessibility$AVMediaCharacteristicUsesWideGamutColorSpace$AVMediaCharacteristicVisual$AVMediaCharacteristicVoiceOverTranslation$AVMediaTypeAudio$AVMediaTypeClosedCaption$AVMediaTypeDepthData$AVMediaTypeMetadata$AVMediaTypeMetadataObject$AVMediaTypeMuxed$AVMediaTypeSubtitle$AVMediaTypeText$AVMediaTypeTimecode$AVMediaTypeVideo$AVMetadata3GPUserDataKeyAlbumAndTrack$AVMetadata3GPUserDataKeyAuthor$AVMetadata3GPUserDataKeyCollection$AVMetadata3GPUserDataKeyCopyright$AVMetadata3GPUserDataKeyDescription$AVMetadata3GPUserDataKeyGenre$AVMetadata3GPUserDataKeyKeywordList$AVMetadata3GPUserDataKeyLocation$AVMetadata3GPUserDataKeyMediaClassification$AVMetadata3GPUserDataKeyMediaRating$AVMetadata3GPUserDataKeyPerformer$AVMetadata3GPUserDataKeyRecordingYear$AVMetadata3GPUserDataKeyThumbnail$AVMetadata3GPUserDataKeyTitle$AVMetadata3GPUserDataKeyUserRating$AVMetadataCommonIdentifierAlbumName$AVMetadataCommonIdentifierArtist$AVMetadataCommonIdentifierArtwork$AVMetadataCommonIdentifierAssetIdentifier$AVMetadataCommonIdentifierAuthor$AVMetadataCommonIdentifierContributor$AVMetadataCommonIdentifierCopyrights$AVMetadataCommonIdentifierCreationDate$AVMetadataCommonIdentifierCreator$AVMetadataCommonIdentifierDescription$AVMetadataCommonIdentifierFormat$AVMetadataCommonIdentifierLanguage$AVMetadataCommonIdentifierLastModifiedDate$AVMetadataCommonIdentifierLocation$AVMetadataCommonIdentifierMake$AVMetadataCommonIdentifierModel$AVMetadataCommonIdentifierPublisher$AVMetadataCommonIdentifierRelation$AVMetadataCommonIdentifierSoftware$AVMetadataCommonIdentifierSource$AVMetadataCommonIdentifierSubject$AVMetadataCommonIdentifierTitle$AVMetadataCommonIdentifierType$AVMetadataCommonKeyAlbumName$AVMetadataCommonKeyArtist$AVMetadataCommonKeyArtwork$AVMetadataCommonKeyAuthor$AVMetadataCommonKeyContributor$AVMetadataCommonKeyCopyrights$AVMetadataCommonKeyCreationDate$AVMetadataCommonKeyCreator$AVMetadataCommonKeyDescription$AVMetadataCommonKeyFormat$AVMetadataCommonKeyIdentifier$AVMetadataCommonKeyLanguage$AVMetadataCommonKeyLastModifiedDate$AVMetadataCommonKeyLocation$AVMetadataCommonKeyMake$AVMetadataCommonKeyModel$AVMetadataCommonKeyPublisher$AVMetadataCommonKeyRelation$AVMetadataCommonKeySoftware$AVMetadataCommonKeySource$AVMetadataCommonKeySubject$AVMetadataCommonKeyTitle$AVMetadataCommonKeyType$AVMetadataExtraAttributeBaseURIKey$AVMetadataExtraAttributeInfoKey$AVMetadataExtraAttributeValueURIKey$AVMetadataFormatHLSMetadata$AVMetadataFormatID3Metadata$AVMetadataFormatISOUserData$AVMetadataFormatQuickTimeMetadata$AVMetadataFormatQuickTimeUserData$AVMetadataFormatUnknown$AVMetadataFormatiTunesMetadata$AVMetadataID3MetadataKeyAlbumSortOrder$AVMetadataID3MetadataKeyAlbumTitle$AVMetadataID3MetadataKeyAttachedPicture$AVMetadataID3MetadataKeyAudioEncryption$AVMetadataID3MetadataKeyAudioSeekPointIndex$AVMetadataID3MetadataKeyBand$AVMetadataID3MetadataKeyBeatsPerMinute$AVMetadataID3MetadataKeyComments$AVMetadataID3MetadataKeyCommercial$AVMetadataID3MetadataKeyCommercialInformation$AVMetadataID3MetadataKeyCommerical$AVMetadataID3MetadataKeyComposer$AVMetadataID3MetadataKeyConductor$AVMetadataID3MetadataKeyContentGroupDescription$AVMetadataID3MetadataKeyContentType$AVMetadataID3MetadataKeyCopyright$AVMetadataID3MetadataKeyCopyrightInformation$AVMetadataID3MetadataKeyDate$AVMetadataID3MetadataKeyEncodedBy$AVMetadataID3MetadataKeyEncodedWith$AVMetadataID3MetadataKeyEncodingTime$AVMetadataID3MetadataKeyEncryption$AVMetadataID3MetadataKeyEqualization$AVMetadataID3MetadataKeyEqualization2$AVMetadataID3MetadataKeyEventTimingCodes$AVMetadataID3MetadataKeyFileOwner$AVMetadataID3MetadataKeyFileType$AVMetadataID3MetadataKeyGeneralEncapsulatedObject$AVMetadataID3MetadataKeyGroupIdentifier$AVMetadataID3MetadataKeyInitialKey$AVMetadataID3MetadataKeyInternationalStandardRecordingCode$AVMetadataID3MetadataKeyInternetRadioStationName$AVMetadataID3MetadataKeyInternetRadioStationOwner$AVMetadataID3MetadataKeyInvolvedPeopleList_v23$AVMetadataID3MetadataKeyInvolvedPeopleList_v24$AVMetadataID3MetadataKeyLanguage$AVMetadataID3MetadataKeyLeadPerformer$AVMetadataID3MetadataKeyLength$AVMetadataID3MetadataKeyLink$AVMetadataID3MetadataKeyLyricist$AVMetadataID3MetadataKeyMPEGLocationLookupTable$AVMetadataID3MetadataKeyMediaType$AVMetadataID3MetadataKeyModifiedBy$AVMetadataID3MetadataKeyMood$AVMetadataID3MetadataKeyMusicCDIdentifier$AVMetadataID3MetadataKeyMusicianCreditsList$AVMetadataID3MetadataKeyOfficialArtistWebpage$AVMetadataID3MetadataKeyOfficialAudioFileWebpage$AVMetadataID3MetadataKeyOfficialAudioSourceWebpage$AVMetadataID3MetadataKeyOfficialInternetRadioStationHomepage$AVMetadataID3MetadataKeyOfficialPublisherWebpage$AVMetadataID3MetadataKeyOriginalAlbumTitle$AVMetadataID3MetadataKeyOriginalArtist$AVMetadataID3MetadataKeyOriginalFilename$AVMetadataID3MetadataKeyOriginalLyricist$AVMetadataID3MetadataKeyOriginalReleaseTime$AVMetadataID3MetadataKeyOriginalReleaseYear$AVMetadataID3MetadataKeyOwnership$AVMetadataID3MetadataKeyPartOfASet$AVMetadataID3MetadataKeyPayment$AVMetadataID3MetadataKeyPerformerSortOrder$AVMetadataID3MetadataKeyPlayCounter$AVMetadataID3MetadataKeyPlaylistDelay$AVMetadataID3MetadataKeyPopularimeter$AVMetadataID3MetadataKeyPositionSynchronization$AVMetadataID3MetadataKeyPrivate$AVMetadataID3MetadataKeyProducedNotice$AVMetadataID3MetadataKeyPublisher$AVMetadataID3MetadataKeyRecommendedBufferSize$AVMetadataID3MetadataKeyRecordingDates$AVMetadataID3MetadataKeyRecordingTime$AVMetadataID3MetadataKeyRelativeVolumeAdjustment$AVMetadataID3MetadataKeyRelativeVolumeAdjustment2$AVMetadataID3MetadataKeyReleaseTime$AVMetadataID3MetadataKeyReverb$AVMetadataID3MetadataKeySeek$AVMetadataID3MetadataKeySetSubtitle$AVMetadataID3MetadataKeySignature$AVMetadataID3MetadataKeySize$AVMetadataID3MetadataKeySubTitle$AVMetadataID3MetadataKeySynchronizedLyric$AVMetadataID3MetadataKeySynchronizedTempoCodes$AVMetadataID3MetadataKeyTaggingTime$AVMetadataID3MetadataKeyTermsOfUse$AVMetadataID3MetadataKeyTime$AVMetadataID3MetadataKeyTitleDescription$AVMetadataID3MetadataKeyTitleSortOrder$AVMetadataID3MetadataKeyTrackNumber$AVMetadataID3MetadataKeyUniqueFileIdentifier$AVMetadataID3MetadataKeyUnsynchronizedLyric$AVMetadataID3MetadataKeyUserText$AVMetadataID3MetadataKeyUserURL$AVMetadataID3MetadataKeyYear$AVMetadataISOUserDataKeyCopyright$AVMetadataISOUserDataKeyDate$AVMetadataISOUserDataKeyTaggedCharacteristic$AVMetadataIcyMetadataKeyStreamTitle$AVMetadataIcyMetadataKeyStreamURL$AVMetadataIdentifier3GPUserDataAlbumAndTrack$AVMetadataIdentifier3GPUserDataAuthor$AVMetadataIdentifier3GPUserDataCollection$AVMetadataIdentifier3GPUserDataCopyright$AVMetadataIdentifier3GPUserDataDescription$AVMetadataIdentifier3GPUserDataGenre$AVMetadataIdentifier3GPUserDataKeywordList$AVMetadataIdentifier3GPUserDataLocation$AVMetadataIdentifier3GPUserDataMediaClassification$AVMetadataIdentifier3GPUserDataMediaRating$AVMetadataIdentifier3GPUserDataPerformer$AVMetadataIdentifier3GPUserDataRecordingYear$AVMetadataIdentifier3GPUserDataThumbnail$AVMetadataIdentifier3GPUserDataTitle$AVMetadataIdentifier3GPUserDataUserRating$AVMetadataIdentifierID3MetadataAlbumSortOrder$AVMetadataIdentifierID3MetadataAlbumTitle$AVMetadataIdentifierID3MetadataAttachedPicture$AVMetadataIdentifierID3MetadataAudioEncryption$AVMetadataIdentifierID3MetadataAudioSeekPointIndex$AVMetadataIdentifierID3MetadataBand$AVMetadataIdentifierID3MetadataBeatsPerMinute$AVMetadataIdentifierID3MetadataComments$AVMetadataIdentifierID3MetadataCommercial$AVMetadataIdentifierID3MetadataCommercialInformation$AVMetadataIdentifierID3MetadataCommerical$AVMetadataIdentifierID3MetadataComposer$AVMetadataIdentifierID3MetadataConductor$AVMetadataIdentifierID3MetadataContentGroupDescription$AVMetadataIdentifierID3MetadataContentType$AVMetadataIdentifierID3MetadataCopyright$AVMetadataIdentifierID3MetadataCopyrightInformation$AVMetadataIdentifierID3MetadataDate$AVMetadataIdentifierID3MetadataEncodedBy$AVMetadataIdentifierID3MetadataEncodedWith$AVMetadataIdentifierID3MetadataEncodingTime$AVMetadataIdentifierID3MetadataEncryption$AVMetadataIdentifierID3MetadataEqualization$AVMetadataIdentifierID3MetadataEqualization2$AVMetadataIdentifierID3MetadataEventTimingCodes$AVMetadataIdentifierID3MetadataFileOwner$AVMetadataIdentifierID3MetadataFileType$AVMetadataIdentifierID3MetadataGeneralEncapsulatedObject$AVMetadataIdentifierID3MetadataGroupIdentifier$AVMetadataIdentifierID3MetadataInitialKey$AVMetadataIdentifierID3MetadataInternationalStandardRecordingCode$AVMetadataIdentifierID3MetadataInternetRadioStationName$AVMetadataIdentifierID3MetadataInternetRadioStationOwner$AVMetadataIdentifierID3MetadataInvolvedPeopleList_v23$AVMetadataIdentifierID3MetadataInvolvedPeopleList_v24$AVMetadataIdentifierID3MetadataLanguage$AVMetadataIdentifierID3MetadataLeadPerformer$AVMetadataIdentifierID3MetadataLength$AVMetadataIdentifierID3MetadataLink$AVMetadataIdentifierID3MetadataLyricist$AVMetadataIdentifierID3MetadataMPEGLocationLookupTable$AVMetadataIdentifierID3MetadataMediaType$AVMetadataIdentifierID3MetadataModifiedBy$AVMetadataIdentifierID3MetadataMood$AVMetadataIdentifierID3MetadataMusicCDIdentifier$AVMetadataIdentifierID3MetadataMusicianCreditsList$AVMetadataIdentifierID3MetadataOfficialArtistWebpage$AVMetadataIdentifierID3MetadataOfficialAudioFileWebpage$AVMetadataIdentifierID3MetadataOfficialAudioSourceWebpage$AVMetadataIdentifierID3MetadataOfficialInternetRadioStationHomepage$AVMetadataIdentifierID3MetadataOfficialPublisherWebpage$AVMetadataIdentifierID3MetadataOriginalAlbumTitle$AVMetadataIdentifierID3MetadataOriginalArtist$AVMetadataIdentifierID3MetadataOriginalFilename$AVMetadataIdentifierID3MetadataOriginalLyricist$AVMetadataIdentifierID3MetadataOriginalReleaseTime$AVMetadataIdentifierID3MetadataOriginalReleaseYear$AVMetadataIdentifierID3MetadataOwnership$AVMetadataIdentifierID3MetadataPartOfASet$AVMetadataIdentifierID3MetadataPayment$AVMetadataIdentifierID3MetadataPerformerSortOrder$AVMetadataIdentifierID3MetadataPlayCounter$AVMetadataIdentifierID3MetadataPlaylistDelay$AVMetadataIdentifierID3MetadataPopularimeter$AVMetadataIdentifierID3MetadataPositionSynchronization$AVMetadataIdentifierID3MetadataPrivate$AVMetadataIdentifierID3MetadataProducedNotice$AVMetadataIdentifierID3MetadataPublisher$AVMetadataIdentifierID3MetadataRecommendedBufferSize$AVMetadataIdentifierID3MetadataRecordingDates$AVMetadataIdentifierID3MetadataRecordingTime$AVMetadataIdentifierID3MetadataRelativeVolumeAdjustment$AVMetadataIdentifierID3MetadataRelativeVolumeAdjustment2$AVMetadataIdentifierID3MetadataReleaseTime$AVMetadataIdentifierID3MetadataReverb$AVMetadataIdentifierID3MetadataSeek$AVMetadataIdentifierID3MetadataSetSubtitle$AVMetadataIdentifierID3MetadataSignature$AVMetadataIdentifierID3MetadataSize$AVMetadataIdentifierID3MetadataSubTitle$AVMetadataIdentifierID3MetadataSynchronizedLyric$AVMetadataIdentifierID3MetadataSynchronizedTempoCodes$AVMetadataIdentifierID3MetadataTaggingTime$AVMetadataIdentifierID3MetadataTermsOfUse$AVMetadataIdentifierID3MetadataTime$AVMetadataIdentifierID3MetadataTitleDescription$AVMetadataIdentifierID3MetadataTitleSortOrder$AVMetadataIdentifierID3MetadataTrackNumber$AVMetadataIdentifierID3MetadataUniqueFileIdentifier$AVMetadataIdentifierID3MetadataUnsynchronizedLyric$AVMetadataIdentifierID3MetadataUserText$AVMetadataIdentifierID3MetadataUserURL$AVMetadataIdentifierID3MetadataYear$AVMetadataIdentifierISOUserDataCopyright$AVMetadataIdentifierISOUserDataDate$AVMetadataIdentifierISOUserDataTaggedCharacteristic$AVMetadataIdentifierIcyMetadataStreamTitle$AVMetadataIdentifierIcyMetadataStreamURL$AVMetadataIdentifierQuickTimeMetadataAlbum$AVMetadataIdentifierQuickTimeMetadataArranger$AVMetadataIdentifierQuickTimeMetadataArtist$AVMetadataIdentifierQuickTimeMetadataArtwork$AVMetadataIdentifierQuickTimeMetadataAuthor$AVMetadataIdentifierQuickTimeMetadataCameraFrameReadoutTime$AVMetadataIdentifierQuickTimeMetadataCameraIdentifier$AVMetadataIdentifierQuickTimeMetadataCollectionUser$AVMetadataIdentifierQuickTimeMetadataComment$AVMetadataIdentifierQuickTimeMetadataComposer$AVMetadataIdentifierQuickTimeMetadataContentIdentifier$AVMetadataIdentifierQuickTimeMetadataCopyright$AVMetadataIdentifierQuickTimeMetadataCreationDate$AVMetadataIdentifierQuickTimeMetadataCredits$AVMetadataIdentifierQuickTimeMetadataDescription$AVMetadataIdentifierQuickTimeMetadataDetectedFace$AVMetadataIdentifierQuickTimeMetadataDirectionFacing$AVMetadataIdentifierQuickTimeMetadataDirectionMotion$AVMetadataIdentifierQuickTimeMetadataDirector$AVMetadataIdentifierQuickTimeMetadataDisplayName$AVMetadataIdentifierQuickTimeMetadataEncodedBy$AVMetadataIdentifierQuickTimeMetadataGenre$AVMetadataIdentifierQuickTimeMetadataInformation$AVMetadataIdentifierQuickTimeMetadataKeywords$AVMetadataIdentifierQuickTimeMetadataLocationBody$AVMetadataIdentifierQuickTimeMetadataLocationDate$AVMetadataIdentifierQuickTimeMetadataLocationISO6709$AVMetadataIdentifierQuickTimeMetadataLocationName$AVMetadataIdentifierQuickTimeMetadataLocationNote$AVMetadataIdentifierQuickTimeMetadataLocationRole$AVMetadataIdentifierQuickTimeMetadataMake$AVMetadataIdentifierQuickTimeMetadataModel$AVMetadataIdentifierQuickTimeMetadataOriginalArtist$AVMetadataIdentifierQuickTimeMetadataPerformer$AVMetadataIdentifierQuickTimeMetadataPhonogramRights$AVMetadataIdentifierQuickTimeMetadataPreferredAffineTransform$AVMetadataIdentifierQuickTimeMetadataProducer$AVMetadataIdentifierQuickTimeMetadataPublisher$AVMetadataIdentifierQuickTimeMetadataRatingUser$AVMetadataIdentifierQuickTimeMetadataSoftware$AVMetadataIdentifierQuickTimeMetadataTitle$AVMetadataIdentifierQuickTimeMetadataVideoOrientation$AVMetadataIdentifierQuickTimeMetadataYear$AVMetadataIdentifierQuickTimeMetadataiXML$AVMetadataIdentifierQuickTimeUserDataAlbum$AVMetadataIdentifierQuickTimeUserDataArranger$AVMetadataIdentifierQuickTimeUserDataArtist$AVMetadataIdentifierQuickTimeUserDataAuthor$AVMetadataIdentifierQuickTimeUserDataChapter$AVMetadataIdentifierQuickTimeUserDataComment$AVMetadataIdentifierQuickTimeUserDataComposer$AVMetadataIdentifierQuickTimeUserDataCopyright$AVMetadataIdentifierQuickTimeUserDataCreationDate$AVMetadataIdentifierQuickTimeUserDataCredits$AVMetadataIdentifierQuickTimeUserDataDescription$AVMetadataIdentifierQuickTimeUserDataDirector$AVMetadataIdentifierQuickTimeUserDataDisclaimer$AVMetadataIdentifierQuickTimeUserDataEncodedBy$AVMetadataIdentifierQuickTimeUserDataFullName$AVMetadataIdentifierQuickTimeUserDataGenre$AVMetadataIdentifierQuickTimeUserDataHostComputer$AVMetadataIdentifierQuickTimeUserDataInformation$AVMetadataIdentifierQuickTimeUserDataKeywords$AVMetadataIdentifierQuickTimeUserDataLocationISO6709$AVMetadataIdentifierQuickTimeUserDataMake$AVMetadataIdentifierQuickTimeUserDataModel$AVMetadataIdentifierQuickTimeUserDataOriginalArtist$AVMetadataIdentifierQuickTimeUserDataOriginalFormat$AVMetadataIdentifierQuickTimeUserDataOriginalSource$AVMetadataIdentifierQuickTimeUserDataPerformers$AVMetadataIdentifierQuickTimeUserDataPhonogramRights$AVMetadataIdentifierQuickTimeUserDataProducer$AVMetadataIdentifierQuickTimeUserDataProduct$AVMetadataIdentifierQuickTimeUserDataPublisher$AVMetadataIdentifierQuickTimeUserDataSoftware$AVMetadataIdentifierQuickTimeUserDataSpecialPlaybackRequirements$AVMetadataIdentifierQuickTimeUserDataTaggedCharacteristic$AVMetadataIdentifierQuickTimeUserDataTrack$AVMetadataIdentifierQuickTimeUserDataTrackName$AVMetadataIdentifierQuickTimeUserDataURLLink$AVMetadataIdentifierQuickTimeUserDataWarning$AVMetadataIdentifierQuickTimeUserDataWriter$AVMetadataIdentifieriTunesMetadataAccountKind$AVMetadataIdentifieriTunesMetadataAcknowledgement$AVMetadataIdentifieriTunesMetadataAlbum$AVMetadataIdentifieriTunesMetadataAlbumArtist$AVMetadataIdentifieriTunesMetadataAppleID$AVMetadataIdentifieriTunesMetadataArranger$AVMetadataIdentifieriTunesMetadataArtDirector$AVMetadataIdentifieriTunesMetadataArtist$AVMetadataIdentifieriTunesMetadataArtistID$AVMetadataIdentifieriTunesMetadataAuthor$AVMetadataIdentifieriTunesMetadataBeatsPerMin$AVMetadataIdentifieriTunesMetadataComposer$AVMetadataIdentifieriTunesMetadataConductor$AVMetadataIdentifieriTunesMetadataContentRating$AVMetadataIdentifieriTunesMetadataCopyright$AVMetadataIdentifieriTunesMetadataCoverArt$AVMetadataIdentifieriTunesMetadataCredits$AVMetadataIdentifieriTunesMetadataDescription$AVMetadataIdentifieriTunesMetadataDirector$AVMetadataIdentifieriTunesMetadataDiscCompilation$AVMetadataIdentifieriTunesMetadataDiscNumber$AVMetadataIdentifieriTunesMetadataEQ$AVMetadataIdentifieriTunesMetadataEncodedBy$AVMetadataIdentifieriTunesMetadataEncodingTool$AVMetadataIdentifieriTunesMetadataExecProducer$AVMetadataIdentifieriTunesMetadataGenreID$AVMetadataIdentifieriTunesMetadataGrouping$AVMetadataIdentifieriTunesMetadataLinerNotes$AVMetadataIdentifieriTunesMetadataLyrics$AVMetadataIdentifieriTunesMetadataOnlineExtras$AVMetadataIdentifieriTunesMetadataOriginalArtist$AVMetadataIdentifieriTunesMetadataPerformer$AVMetadataIdentifieriTunesMetadataPhonogramRights$AVMetadataIdentifieriTunesMetadataPlaylistID$AVMetadataIdentifieriTunesMetadataPredefinedGenre$AVMetadataIdentifieriTunesMetadataProducer$AVMetadataIdentifieriTunesMetadataPublisher$AVMetadataIdentifieriTunesMetadataRecordCompany$AVMetadataIdentifieriTunesMetadataReleaseDate$AVMetadataIdentifieriTunesMetadataSoloist$AVMetadataIdentifieriTunesMetadataSongID$AVMetadataIdentifieriTunesMetadataSongName$AVMetadataIdentifieriTunesMetadataSoundEngineer$AVMetadataIdentifieriTunesMetadataThanks$AVMetadataIdentifieriTunesMetadataTrackNumber$AVMetadataIdentifieriTunesMetadataTrackSubTitle$AVMetadataIdentifieriTunesMetadataUserComment$AVMetadataIdentifieriTunesMetadataUserGenre$AVMetadataKeySpaceAudioFile$AVMetadataKeySpaceCommon$AVMetadataKeySpaceHLSDateRange$AVMetadataKeySpaceID3$AVMetadataKeySpaceISOUserData$AVMetadataKeySpaceIcy$AVMetadataKeySpaceQuickTimeMetadata$AVMetadataKeySpaceQuickTimeUserData$AVMetadataKeySpaceiTunes$AVMetadataObjectTypeAztecCode$AVMetadataObjectTypeCode128Code$AVMetadataObjectTypeCode39Code$AVMetadataObjectTypeCode39Mod43Code$AVMetadataObjectTypeCode93Code$AVMetadataObjectTypeDataMatrixCode$AVMetadataObjectTypeEAN13Code$AVMetadataObjectTypeEAN8Code$AVMetadataObjectTypeFace$AVMetadataObjectTypeITF14Code$AVMetadataObjectTypeInterleaved2of5Code$AVMetadataObjectTypePDF417Code$AVMetadataObjectTypeQRCode$AVMetadataObjectTypeUPCECode$AVMetadataQuickTimeMetadataKeyAlbum$AVMetadataQuickTimeMetadataKeyArranger$AVMetadataQuickTimeMetadataKeyArtist$AVMetadataQuickTimeMetadataKeyArtwork$AVMetadataQuickTimeMetadataKeyAuthor$AVMetadataQuickTimeMetadataKeyCameraFrameReadoutTime$AVMetadataQuickTimeMetadataKeyCameraIdentifier$AVMetadataQuickTimeMetadataKeyCollectionUser$AVMetadataQuickTimeMetadataKeyComment$AVMetadataQuickTimeMetadataKeyComposer$AVMetadataQuickTimeMetadataKeyContentIdentifier$AVMetadataQuickTimeMetadataKeyCopyright$AVMetadataQuickTimeMetadataKeyCreationDate$AVMetadataQuickTimeMetadataKeyCredits$AVMetadataQuickTimeMetadataKeyDescription$AVMetadataQuickTimeMetadataKeyDirectionFacing$AVMetadataQuickTimeMetadataKeyDirectionMotion$AVMetadataQuickTimeMetadataKeyDirector$AVMetadataQuickTimeMetadataKeyDisplayName$AVMetadataQuickTimeMetadataKeyEncodedBy$AVMetadataQuickTimeMetadataKeyGenre$AVMetadataQuickTimeMetadataKeyInformation$AVMetadataQuickTimeMetadataKeyKeywords$AVMetadataQuickTimeMetadataKeyLocationBody$AVMetadataQuickTimeMetadataKeyLocationDate$AVMetadataQuickTimeMetadataKeyLocationISO6709$AVMetadataQuickTimeMetadataKeyLocationName$AVMetadataQuickTimeMetadataKeyLocationNote$AVMetadataQuickTimeMetadataKeyLocationRole$AVMetadataQuickTimeMetadataKeyMake$AVMetadataQuickTimeMetadataKeyModel$AVMetadataQuickTimeMetadataKeyOriginalArtist$AVMetadataQuickTimeMetadataKeyPerformer$AVMetadataQuickTimeMetadataKeyPhonogramRights$AVMetadataQuickTimeMetadataKeyProducer$AVMetadataQuickTimeMetadataKeyPublisher$AVMetadataQuickTimeMetadataKeyRatingUser$AVMetadataQuickTimeMetadataKeySoftware$AVMetadataQuickTimeMetadataKeyTitle$AVMetadataQuickTimeMetadataKeyYear$AVMetadataQuickTimeMetadataKeyiXML$AVMetadataQuickTimeUserDataKeyAlbum$AVMetadataQuickTimeUserDataKeyArranger$AVMetadataQuickTimeUserDataKeyArtist$AVMetadataQuickTimeUserDataKeyAuthor$AVMetadataQuickTimeUserDataKeyChapter$AVMetadataQuickTimeUserDataKeyComment$AVMetadataQuickTimeUserDataKeyComposer$AVMetadataQuickTimeUserDataKeyCopyright$AVMetadataQuickTimeUserDataKeyCreationDate$AVMetadataQuickTimeUserDataKeyCredits$AVMetadataQuickTimeUserDataKeyDescription$AVMetadataQuickTimeUserDataKeyDirector$AVMetadataQuickTimeUserDataKeyDisclaimer$AVMetadataQuickTimeUserDataKeyEncodedBy$AVMetadataQuickTimeUserDataKeyFullName$AVMetadataQuickTimeUserDataKeyGenre$AVMetadataQuickTimeUserDataKeyHostComputer$AVMetadataQuickTimeUserDataKeyInformation$AVMetadataQuickTimeUserDataKeyKeywords$AVMetadataQuickTimeUserDataKeyLocationISO6709$AVMetadataQuickTimeUserDataKeyMake$AVMetadataQuickTimeUserDataKeyModel$AVMetadataQuickTimeUserDataKeyOriginalArtist$AVMetadataQuickTimeUserDataKeyOriginalFormat$AVMetadataQuickTimeUserDataKeyOriginalSource$AVMetadataQuickTimeUserDataKeyPerformers$AVMetadataQuickTimeUserDataKeyPhonogramRights$AVMetadataQuickTimeUserDataKeyProducer$AVMetadataQuickTimeUserDataKeyProduct$AVMetadataQuickTimeUserDataKeyPublisher$AVMetadataQuickTimeUserDataKeySoftware$AVMetadataQuickTimeUserDataKeySpecialPlaybackRequirements$AVMetadataQuickTimeUserDataKeyTaggedCharacteristic$AVMetadataQuickTimeUserDataKeyTrack$AVMetadataQuickTimeUserDataKeyTrackName$AVMetadataQuickTimeUserDataKeyURLLink$AVMetadataQuickTimeUserDataKeyWarning$AVMetadataQuickTimeUserDataKeyWriter$AVMetadataiTunesMetadataKeyAccountKind$AVMetadataiTunesMetadataKeyAcknowledgement$AVMetadataiTunesMetadataKeyAlbum$AVMetadataiTunesMetadataKeyAlbumArtist$AVMetadataiTunesMetadataKeyAppleID$AVMetadataiTunesMetadataKeyArranger$AVMetadataiTunesMetadataKeyArtDirector$AVMetadataiTunesMetadataKeyArtist$AVMetadataiTunesMetadataKeyArtistID$AVMetadataiTunesMetadataKeyAuthor$AVMetadataiTunesMetadataKeyBeatsPerMin$AVMetadataiTunesMetadataKeyComposer$AVMetadataiTunesMetadataKeyConductor$AVMetadataiTunesMetadataKeyContentRating$AVMetadataiTunesMetadataKeyCopyright$AVMetadataiTunesMetadataKeyCoverArt$AVMetadataiTunesMetadataKeyCredits$AVMetadataiTunesMetadataKeyDescription$AVMetadataiTunesMetadataKeyDirector$AVMetadataiTunesMetadataKeyDiscCompilation$AVMetadataiTunesMetadataKeyDiscNumber$AVMetadataiTunesMetadataKeyEQ$AVMetadataiTunesMetadataKeyEncodedBy$AVMetadataiTunesMetadataKeyEncodingTool$AVMetadataiTunesMetadataKeyExecProducer$AVMetadataiTunesMetadataKeyGenreID$AVMetadataiTunesMetadataKeyGrouping$AVMetadataiTunesMetadataKeyLinerNotes$AVMetadataiTunesMetadataKeyLyrics$AVMetadataiTunesMetadataKeyOnlineExtras$AVMetadataiTunesMetadataKeyOriginalArtist$AVMetadataiTunesMetadataKeyPerformer$AVMetadataiTunesMetadataKeyPhonogramRights$AVMetadataiTunesMetadataKeyPlaylistID$AVMetadataiTunesMetadataKeyPredefinedGenre$AVMetadataiTunesMetadataKeyProducer$AVMetadataiTunesMetadataKeyPublisher$AVMetadataiTunesMetadataKeyRecordCompany$AVMetadataiTunesMetadataKeyReleaseDate$AVMetadataiTunesMetadataKeySoloist$AVMetadataiTunesMetadataKeySongID$AVMetadataiTunesMetadataKeySongName$AVMetadataiTunesMetadataKeySoundEngineer$AVMetadataiTunesMetadataKeyThanks$AVMetadataiTunesMetadataKeyTrackNumber$AVMetadataiTunesMetadataKeyTrackSubTitle$AVMetadataiTunesMetadataKeyUserComment$AVMetadataiTunesMetadataKeyUserGenre$AVMovieReferenceRestrictionsKey$AVNumberOfChannelsKey$AVOutputSettingsPreset1280x720$AVOutputSettingsPreset1920x1080$AVOutputSettingsPreset3840x2160$AVOutputSettingsPreset640x480$AVOutputSettingsPreset960x540$AVOutputSettingsPresetHEVC1920x1080$AVOutputSettingsPresetHEVC3840x2160$AVPlayerAvailableHDRModesDidChangeNotification$AVPlayerItemDidPlayToEndTimeNotification$AVPlayerItemFailedToPlayToEndTimeErrorKey$AVPlayerItemFailedToPlayToEndTimeNotification$AVPlayerItemLegibleOutputTextStylingResolutionDefault$AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly$AVPlayerItemNewAccessLogEntryNotification$AVPlayerItemNewErrorLogEntryNotification$AVPlayerItemPlaybackStalledNotification$AVPlayerItemTimeJumpedNotification$AVPlayerItemTrackVideoFieldModeDeinterlaceFields$AVPlayerWaitingToMinimizeStallsReason$AVPlayerWaitingWhileEvaluatingBufferingRateReason$AVPlayerWaitingWithNoItemToPlayReason$AVRouteDetectorMultipleRoutesDetectedDidChangeNotification$AVSampleBufferAudioRendererFlushTimeKey$AVSampleBufferAudioRendererWasFlushedAutomaticallyNotification$AVSampleBufferDisplayLayerFailedToDecodeNotification$AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey$AVSampleBufferRenderSynchronizerRateDidChangeNotification$AVSampleRateConverterAlgorithmKey$AVSampleRateConverterAlgorithm_Mastering$AVSampleRateConverterAlgorithm_MinimumPhase$AVSampleRateConverterAlgorithm_Normal$AVSampleRateConverterAudioQualityKey$AVSampleRateKey$AVSpeechSynthesisIPANotationAttribute$AVSpeechSynthesisVoiceIdentifierAlex$AVSpeechUtteranceDefaultSpeechRate@f$AVSpeechUtteranceMaximumSpeechRate@f$AVSpeechUtteranceMinimumSpeechRate@f$AVStreamingKeyDeliveryContentKeyType$AVStreamingKeyDeliveryPersistentContentKeyType$AVTrackAssociationTypeAudioFallback$AVTrackAssociationTypeChapterList$AVTrackAssociationTypeForcedSubtitlesOnly$AVTrackAssociationTypeMetadataReferent$AVTrackAssociationTypeSelectionFollower$AVTrackAssociationTypeTimecode$AVURLAssetAllowsCellularAccessKey$AVURLAssetHTTPCookiesKey$AVURLAssetPreferPreciseDurationAndTimingKey$AVURLAssetReferenceRestrictionsKey$AVVideoAllowFrameReorderingKey$AVVideoAllowWideColorKey$AVVideoApertureModeCleanAperture$AVVideoApertureModeEncodedPixels$AVVideoApertureModeProductionAperture$AVVideoAverageBitRateKey$AVVideoAverageNonDroppableFrameRateKey$AVVideoCleanApertureHeightKey$AVVideoCleanApertureHorizontalOffsetKey$AVVideoCleanApertureKey$AVVideoCleanApertureVerticalOffsetKey$AVVideoCleanApertureWidthKey$AVVideoCodecAppleProRes422$AVVideoCodecAppleProRes4444$AVVideoCodecH264$AVVideoCodecHEVC$AVVideoCodecJPEG$AVVideoCodecKey$AVVideoCodecTypeAppleProRes422$AVVideoCodecTypeAppleProRes4444$AVVideoCodecTypeH264$AVVideoCodecTypeHEVC$AVVideoCodecTypeJPEG$AVVideoColorPrimariesKey$AVVideoColorPrimaries_EBU_3213$AVVideoColorPrimaries_ITU_R_2020$AVVideoColorPrimaries_ITU_R_709_2$AVVideoColorPrimaries_P3_D65$AVVideoColorPrimaries_SMPTE_C$AVVideoColorPropertiesKey$AVVideoCompressionPropertiesKey$AVVideoDecompressionPropertiesKey$AVVideoEncoderSpecificationKey$AVVideoExpectedSourceFrameRateKey$AVVideoH264EntropyModeCABAC$AVVideoH264EntropyModeCAVLC$AVVideoH264EntropyModeKey$AVVideoHeightKey$AVVideoMaxKeyFrameIntervalDurationKey$AVVideoMaxKeyFrameIntervalKey$AVVideoPixelAspectRatioHorizontalSpacingKey$AVVideoPixelAspectRatioKey$AVVideoPixelAspectRatioVerticalSpacingKey$AVVideoProfileLevelH264Baseline30$AVVideoProfileLevelH264Baseline31$AVVideoProfileLevelH264Baseline41$AVVideoProfileLevelH264BaselineAutoLevel$AVVideoProfileLevelH264High40$AVVideoProfileLevelH264High41$AVVideoProfileLevelH264HighAutoLevel$AVVideoProfileLevelH264Main30$AVVideoProfileLevelH264Main31$AVVideoProfileLevelH264Main32$AVVideoProfileLevelH264Main41$AVVideoProfileLevelH264MainAutoLevel$AVVideoProfileLevelKey$AVVideoQualityKey$AVVideoScalingModeFit$AVVideoScalingModeKey$AVVideoScalingModeResize$AVVideoScalingModeResizeAspect$AVVideoScalingModeResizeAspectFill$AVVideoTransferFunctionKey$AVVideoTransferFunction_ITU_R_2100_HLG$AVVideoTransferFunction_ITU_R_709_2$AVVideoTransferFunction_SMPTE_240M_1995$AVVideoTransferFunction_SMPTE_ST_2084_PQ$AVVideoWidthKey$AVVideoYCbCrMatrixKey$AVVideoYCbCrMatrix_ITU_R_2020$AVVideoYCbCrMatrix_ITU_R_601_4$AVVideoYCbCrMatrix_ITU_R_709_2$AVVideoYCbCrMatrix_SMPTE_240M_1995$''' -enums = '''$AVAUDIOENGINE_HAVE_MUSICPLAYER@1$AVAUDIOFORMAT_HAVE_CMFORMATDESCRIPTION@1$AVAUDIOIONODE_HAVE_AUDIOUNIT@1$AVAUDIONODE_HAVE_AUAUDIOUNIT@1$AVAUDIOUNITCOMPONENT_HAVE_AUDIOCOMPONENT@1$AVAUDIOUNIT_HAVE_AUDIOUNIT@1$AVAssetExportSessionStatusCancelled@5$AVAssetExportSessionStatusCompleted@3$AVAssetExportSessionStatusExporting@2$AVAssetExportSessionStatusFailed@4$AVAssetExportSessionStatusUnknown@0$AVAssetExportSessionStatusWaiting@1$AVAssetImageGeneratorCancelled@2$AVAssetImageGeneratorFailed@1$AVAssetImageGeneratorSucceeded@0$AVAssetReaderStatusCancelled@4$AVAssetReaderStatusCompleted@2$AVAssetReaderStatusFailed@3$AVAssetReaderStatusReading@1$AVAssetReaderStatusUnknown@0$AVAssetReferenceRestrictionForbidAll@65535$AVAssetReferenceRestrictionForbidCrossSiteReference@4$AVAssetReferenceRestrictionForbidLocalReferenceToLocal@8$AVAssetReferenceRestrictionForbidLocalReferenceToRemote@2$AVAssetReferenceRestrictionForbidNone@0$AVAssetReferenceRestrictionForbidRemoteReferenceToLocal@1$AVAssetWriterStatusCancelled@4$AVAssetWriterStatusCompleted@2$AVAssetWriterStatusFailed@3$AVAssetWriterStatusUnknown@0$AVAssetWriterStatusWriting@1$AVAudio3DMixingRenderingAlgorithmEqualPowerPanning@0$AVAudio3DMixingRenderingAlgorithmHRTF@2$AVAudio3DMixingRenderingAlgorithmHRTFHQ@6$AVAudio3DMixingRenderingAlgorithmSoundField@3$AVAudio3DMixingRenderingAlgorithmSphericalHead@1$AVAudio3DMixingRenderingAlgorithmStereoPassThrough@5$AVAudioConverterInputStatus_EndOfStream@2$AVAudioConverterInputStatus_HaveData@0$AVAudioConverterInputStatus_NoDataNow@1$AVAudioConverterOutputStatus_EndOfStream@2$AVAudioConverterOutputStatus_Error@3$AVAudioConverterOutputStatus_HaveData@0$AVAudioConverterOutputStatus_InputRanDry@1$AVAudioConverterPrimeMethod_None@2$AVAudioConverterPrimeMethod_Normal@1$AVAudioConverterPrimeMethod_Pre@0$AVAudioEngineManualRenderingErrorInitialized@-80801$AVAudioEngineManualRenderingErrorInvalidMode@-80800$AVAudioEngineManualRenderingErrorNotRunning@-80802$AVAudioEngineManualRenderingModeOffline@0$AVAudioEngineManualRenderingModeRealtime@1$AVAudioEngineManualRenderingStatusCannotDoInCurrentContext@2$AVAudioEngineManualRenderingStatusError@-1$AVAudioEngineManualRenderingStatusInsufficientDataFromInputNode@1$AVAudioEngineManualRenderingStatusSuccess@0$AVAudioEnvironmentDistanceAttenuationModelExponential@1$AVAudioEnvironmentDistanceAttenuationModelInverse@2$AVAudioEnvironmentDistanceAttenuationModelLinear@3$AVAudioOtherFormat@0$AVAudioPCMFormatFloat32@1$AVAudioPCMFormatFloat64@2$AVAudioPCMFormatInt16@3$AVAudioPCMFormatInt32@4$AVAudioPlayerNodeBufferInterrupts@2$AVAudioPlayerNodeBufferInterruptsAtLoop@4$AVAudioPlayerNodeBufferLoops@1$AVAudioPlayerNodeCompletionDataConsumed@0$AVAudioPlayerNodeCompletionDataPlayedBack@2$AVAudioPlayerNodeCompletionDataRendered@1$AVAudioQualityHigh@96$AVAudioQualityLow@32$AVAudioQualityMax@127$AVAudioQualityMedium@64$AVAudioQualityMin@0$AVAudioUnitDistortionPresetDrumsBitBrush@0$AVAudioUnitDistortionPresetDrumsBufferBeats@1$AVAudioUnitDistortionPresetDrumsLoFi@2$AVAudioUnitDistortionPresetMultiBrokenSpeaker@3$AVAudioUnitDistortionPresetMultiCellphoneConcert@4$AVAudioUnitDistortionPresetMultiDecimated1@5$AVAudioUnitDistortionPresetMultiDecimated2@6$AVAudioUnitDistortionPresetMultiDecimated3@7$AVAudioUnitDistortionPresetMultiDecimated4@8$AVAudioUnitDistortionPresetMultiDistortedCubed@10$AVAudioUnitDistortionPresetMultiDistortedFunk@9$AVAudioUnitDistortionPresetMultiDistortedSquared@11$AVAudioUnitDistortionPresetMultiEcho1@12$AVAudioUnitDistortionPresetMultiEcho2@13$AVAudioUnitDistortionPresetMultiEchoTight1@14$AVAudioUnitDistortionPresetMultiEchoTight2@15$AVAudioUnitDistortionPresetMultiEverythingIsBroken@16$AVAudioUnitDistortionPresetSpeechAlienChatter@17$AVAudioUnitDistortionPresetSpeechCosmicInterference@18$AVAudioUnitDistortionPresetSpeechGoldenPi@19$AVAudioUnitDistortionPresetSpeechRadioTower@20$AVAudioUnitDistortionPresetSpeechWaves@21$AVAudioUnitEQFilterTypeBandPass@5$AVAudioUnitEQFilterTypeBandStop@6$AVAudioUnitEQFilterTypeHighPass@2$AVAudioUnitEQFilterTypeHighShelf@8$AVAudioUnitEQFilterTypeLowPass@1$AVAudioUnitEQFilterTypeLowShelf@7$AVAudioUnitEQFilterTypeParametric@0$AVAudioUnitEQFilterTypeResonantHighPass@4$AVAudioUnitEQFilterTypeResonantHighShelf@10$AVAudioUnitEQFilterTypeResonantLowPass@3$AVAudioUnitEQFilterTypeResonantLowShelf@9$AVAudioUnitReverbPresetCathedral@8$AVAudioUnitReverbPresetLargeChamber@7$AVAudioUnitReverbPresetLargeHall@4$AVAudioUnitReverbPresetLargeHall2@12$AVAudioUnitReverbPresetLargeRoom@2$AVAudioUnitReverbPresetLargeRoom2@9$AVAudioUnitReverbPresetMediumChamber@6$AVAudioUnitReverbPresetMediumHall@3$AVAudioUnitReverbPresetMediumHall2@10$AVAudioUnitReverbPresetMediumHall3@11$AVAudioUnitReverbPresetMediumRoom@1$AVAudioUnitReverbPresetPlate@5$AVAudioUnitReverbPresetSmallRoom@0$AVAuthorizationStatusAuthorized@3$AVAuthorizationStatusDenied@2$AVAuthorizationStatusNotDetermined@0$AVAuthorizationStatusRestricted@1$AVCaptureAutoFocusRangeRestrictionFar@2$AVCaptureAutoFocusRangeRestrictionNear@1$AVCaptureAutoFocusRangeRestrictionNone@0$AVCaptureAutoFocusSystemContrastDetection@1$AVCaptureAutoFocusSystemNone@0$AVCaptureAutoFocusSystemPhaseDetection@2$AVCaptureColorSpace_P3_D65@1$AVCaptureColorSpace_sRGB@0$AVCaptureDevicePositionBack@1$AVCaptureDevicePositionFront@2$AVCaptureDevicePositionUnspecified@0$AVCaptureDeviceTransportControlsNotPlayingMode@0$AVCaptureDeviceTransportControlsPlayingMode@1$AVCaptureExposureModeAutoExpose@1$AVCaptureExposureModeContinuousAutoExposure@2$AVCaptureExposureModeCustom@3$AVCaptureExposureModeLocked@0$AVCaptureFlashModeAuto@2$AVCaptureFlashModeOff@0$AVCaptureFlashModeOn@1$AVCaptureFocusModeAutoFocus@1$AVCaptureFocusModeContinuousAutoFocus@2$AVCaptureFocusModeLocked@0$AVCaptureLensStabilizationStatusActive@2$AVCaptureLensStabilizationStatusOff@1$AVCaptureLensStabilizationStatusOutOfRange@3$AVCaptureLensStabilizationStatusUnavailable@4$AVCaptureLensStabilizationStatusUnsupported@0$AVCaptureOutputDataDroppedReasonDiscontinuity@3$AVCaptureOutputDataDroppedReasonLateData@1$AVCaptureOutputDataDroppedReasonNone@0$AVCaptureOutputDataDroppedReasonOutOfBuffers@2$AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient@2$AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient@3$AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableDueToSystemPressure@5$AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground@1$AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps@4$AVCaptureSystemPressureFactorDepthModuleTemperature@4$AVCaptureSystemPressureFactorNone@0$AVCaptureSystemPressureFactorPeakPower@2$AVCaptureSystemPressureFactorSystemTemperature@1$AVCaptureTorchModeAuto@2$AVCaptureTorchModeOff@0$AVCaptureTorchModeOn@1$AVCaptureVideoOrientationLandscapeLeft@4$AVCaptureVideoOrientationLandscapeRight@3$AVCaptureVideoOrientationPortrait@1$AVCaptureVideoOrientationPortraitUpsideDown@2$AVCaptureVideoStabilizationModeAuto@-1$AVCaptureVideoStabilizationModeCinematic@2$AVCaptureVideoStabilizationModeOff@0$AVCaptureVideoStabilizationModeStandard@1$AVCaptureWhiteBalanceModeAutoWhiteBalance@1$AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance@2$AVCaptureWhiteBalanceModeLocked@0$AVContentAuthorizationBusy@4$AVContentAuthorizationCancelled@2$AVContentAuthorizationCompleted@1$AVContentAuthorizationNotAvailable@5$AVContentAuthorizationNotPossible@6$AVContentAuthorizationTimedOut@3$AVContentAuthorizationUnknown@0$AVContentKeyRequestStatusCancelled@4$AVContentKeyRequestStatusFailed@5$AVContentKeyRequestStatusReceivedResponse@1$AVContentKeyRequestStatusRenewed@2$AVContentKeyRequestStatusRequestingResponse@0$AVContentKeyRequestStatusRetried@3$AVDepthDataAccuracyAbsolute@1$AVDepthDataAccuracyRelative@0$AVDepthDataQualityHigh@1$AVDepthDataQualityLow@0$AVErrorAirPlayControllerRequiresInternet@-11856$AVErrorAirPlayReceiverRequiresInternet@-11857$AVErrorApplicationIsNotAuthorized@-11836$AVErrorApplicationIsNotAuthorizedToUseDevice@-11852$AVErrorCompositionTrackSegmentsNotContiguous@-11824$AVErrorContentIsNotAuthorized@-11835$AVErrorContentIsProtected@-11831$AVErrorContentIsUnavailable@-11863$AVErrorContentNotUpdated@-11866$AVErrorCreateContentKeyRequestFailed@-11860$AVErrorDecodeFailed@-11821$AVErrorDecoderNotFound@-11833$AVErrorDecoderTemporarilyUnavailable@-11839$AVErrorDeviceAlreadyUsedByAnotherSession@-11804$AVErrorDeviceInUseByAnotherApplication@-11815$AVErrorDeviceLockedForConfigurationByAnotherProcess@-11817$AVErrorDeviceNotConnected@-11814$AVErrorDeviceWasDisconnected@-11808$AVErrorDiskFull@-11807$AVErrorDisplayWasDisabled@-11845$AVErrorEncoderNotFound@-11834$AVErrorEncoderTemporarilyUnavailable@-11840$AVErrorExportFailed@-11820$AVErrorExternalPlaybackNotSupportedForAsset@-11870$AVErrorFailedToLoadMediaData@-11849$AVErrorFailedToParse@-11853$AVErrorFileAlreadyExists@-11823$AVErrorFileFailedToParse@-11829$AVErrorFileFormatNotRecognized@-11828$AVErrorFileTypeDoesNotSupportSampleReferences@-11854$AVErrorFormatUnsupported@-11864$AVErrorIncompatibleAsset@-11848$AVErrorInvalidCompositionTrackSegmentDuration@-11825$AVErrorInvalidCompositionTrackSegmentSourceDuration@-11827$AVErrorInvalidCompositionTrackSegmentSourceStartTime@-11826$AVErrorInvalidOutputURLPathExtension@-11843$AVErrorInvalidSourceMedia@-11822$AVErrorInvalidVideoComposition@-11841$AVErrorMalformedDepth@-11865$AVErrorMaximumDurationReached@-11810$AVErrorMaximumFileSizeReached@-11811$AVErrorMaximumNumberOfSamplesForFileFormatReached@-11813$AVErrorMaximumStillImageCaptureRequestsExceeded@-11830$AVErrorMediaChanged@-11809$AVErrorMediaDiscontinuity@-11812$AVErrorNoCompatibleAlternatesForExternalDisplay@-11868$AVErrorNoDataCaptured@-11805$AVErrorNoImageAtTime@-11832$AVErrorNoLongerPlayable@-11867$AVErrorNoSourceTrack@-11869$AVErrorOperationNotAllowed@-11862$AVErrorOperationNotSupportedForAsset@-11838$AVErrorOutOfMemory@-11801$AVErrorReferenceForbiddenByReferencePolicy@-11842$AVErrorScreenCaptureFailed@-11844$AVErrorServerIncorrectlyConfigured@-11850$AVErrorSessionConfigurationChanged@-11806$AVErrorSessionNotRunning@-11803$AVErrorTorchLevelUnavailable@-11846$AVErrorUndecodableMediaData@-11855$AVErrorUnknown@-11800$AVErrorUnsupportedOutputSettings@-11861$AVErrorVideoCompositorFailed@-11858$AVKeyValueStatusCancelled@4$AVKeyValueStatusFailed@3$AVKeyValueStatusLoaded@2$AVKeyValueStatusLoading@1$AVKeyValueStatusUnknown@0$AVMovieWritingAddMovieHeaderToDestination@0$AVMovieWritingTruncateDestinationToMovieHeaderOnly@1$AVMusicSequenceLoadSMF_ChannelsToTracks@1$AVMusicSequenceLoadSMF_PreserveTracks@0$AVMusicTrackLoopCountForever@-1$AVPlayerActionAtItemEndAdvance@0$AVPlayerActionAtItemEndNone@2$AVPlayerActionAtItemEndPause@1$AVPlayerHDRModeDolbyVision@4$AVPlayerHDRModeHDR10@2$AVPlayerHDRModeHLG@1$AVPlayerItemStatusFailed@2$AVPlayerItemStatusReadyToPlay@1$AVPlayerItemStatusUnknown@0$AVPlayerLooperStatusCancelled@3$AVPlayerLooperStatusFailed@2$AVPlayerLooperStatusReady@1$AVPlayerLooperStatusUnknown@0$AVPlayerStatusFailed@2$AVPlayerStatusReadyToPlay@1$AVPlayerStatusUnknown@0$AVPlayerTimeControlStatusPaused@0$AVPlayerTimeControlStatusPlaying@2$AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate@1$AVQueuedSampleBufferRenderingStatusFailed@2$AVQueuedSampleBufferRenderingStatusRendering@1$AVQueuedSampleBufferRenderingStatusUnknown@0$AVSampleBufferRequestDirectionForward@1$AVSampleBufferRequestDirectionNone@0$AVSampleBufferRequestDirectionReverse@-1$AVSampleBufferRequestModeImmediate@0$AVSampleBufferRequestModeScheduled@1$AVSpeechBoundaryImmediate@0$AVSpeechBoundaryWord@1$AVSpeechSynthesisVoiceQualityDefault@1$AVSpeechSynthesisVoiceQualityEnhanced@2$AVVideoFieldModeBoth@0$AVVideoFieldModeBottomOnly@2$AVVideoFieldModeDeinterlace@3$AVVideoFieldModeTopOnly@1$''' -misc.update({}) -functions={'AVMakeBeatRange': (b'{_AVBeatRange=dd}dd', '', {'inline': True}), 'AVAudioMake3DPoint': (b'{AVAudio3DPoint=fff}fff',), 'AVMakeRectWithAspectRatioInsideRect': (sel32or64(b'{CGRect={CGPoint=ff}{CGSize=ff}}{CGSize=ff}{CGRect={CGPoint=ff}{CGSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}{CGSize=dd}{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'AVAudioMake3DVector': (b'{AVAudio3DPoint=fff}fff',), 'AVAudioMake3DVectorOrientation': (b'{AVAudio3DVectorOrientation={AVAudio3DPoint=fff}{AVAudio3DPoint=fff}}{AVAudio3DPoint=fff}{AVAudio3DPoint=fff}',), 'AVAudioMake3DAngularOrientation': (b'{AVAudio3DAngularOrientation=fff}fff',)} -aliases = {'AVLinearPCMIsNonInterleavedKey': 'AVLinearPCMIsNonInterleaved', 'AVAudio3DVector': 'AVAudio3DPoint'} -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'AVAsset', b'canContainFragments', {'retval': {'type': 'Z'}}) - r(b'AVAsset', b'containsFragments', {'retval': {'type': 'Z'}}) - r(b'AVAsset', b'copyCGImageAtTime:actualTime:error:', {'retval': {'type': 'Z'}}) - r(b'AVAsset', b'duration', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVAsset', b'hasProtectedContent', {'retval': {'type': b'Z'}}) - r(b'AVAsset', b'isCompatibleWithAirPlayVideo', {'retval': {'type': 'Z'}}) - r(b'AVAsset', b'isComposable', {'retval': {'type': b'Z'}}) - r(b'AVAsset', b'isExportable', {'retval': {'type': b'Z'}}) - r(b'AVAsset', b'isPlayable', {'retval': {'type': b'Z'}}) - r(b'AVAsset', b'isReadable', {'retval': {'type': b'Z'}}) - r(b'AVAsset', b'overallDurationHint', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVAsset', b'providesPreciseDurationAndTiming', {'retval': {'type': b'Z'}}) - r(b'AVAssetCache', b'isPlayableOffline', {'retval': {'type': b'Z'}}) - r(b'AVAssetExportSession', b'canPerformMultiplePassesOverSourceMediaData', {'retval': {'type': b'Z'}}) - r(b'AVAssetExportSession', b'determineCompatibilityOfExportPreset:withAsset:outputFileType:completionHandler:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}}}, 'type': '@?'}}}) - r(b'AVAssetExportSession', b'determineCompatibleFileTypesWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'AVAssetExportSession', b'exportAsynchronouslyWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}, 'type': '@?'}}}) - r(b'AVAssetExportSession', b'maxDuration', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVAssetExportSession', b'setCanPerformMultiplePassesOverSourceMediaData:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVAssetExportSession', b'setShouldOptimizeForNetworkUse:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVAssetExportSession', b'setTimeRange:', {'arguments': {2: {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}}) - r(b'AVAssetExportSession', b'shouldOptimizeForNetworkUse', {'retval': {'type': b'Z'}}) - r(b'AVAssetExportSession', b'timeRange', {'retval': {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}) - r(b'AVAssetImageGenerator', b'appliesPreferredTrackTransform', {'retval': {'type': b'Z'}}) - r(b'AVAssetImageGenerator', b'copyCGImageAtTime:actualTime:error:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}, 3: {'type': b'^{_CMTime=qiIq}'}, 4: {'type_modifier': b'o'}}}) - r(b'AVAssetImageGenerator', b'generateCGImagesAsynchronouslyForTimes:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'{_CMTime=qiIq}'}, 2: {'type': b'^{__CGImage}'}, 3: {'type': b'{_CMTime=qiIq}'}, 4: {'type': sel32or64(b'i', b'q')}}}}}}) - r(b'AVAssetImageGenerator', b'requestedTimeToleranceAfter', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVAssetImageGenerator', b'requestedTimeToleranceBefore', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVAssetImageGenerator', b'setAppliesPreferredTrackTransform:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVAssetImageGenerator', b'setRequestedTimeToleranceAfter:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVAssetImageGenerator', b'setRequestedTimeToleranceBefore:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVAssetReader', b'assetReaderWithAsset:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'AVAssetReader', b'canAddOutput:', {'retval': {'type': b'Z'}}) - r(b'AVAssetReader', b'initWithAsset:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'AVAssetReader', b'setTimeRange:', {'arguments': {2: {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}}) - r(b'AVAssetReader', b'startReading', {'retval': {'type': b'Z'}}) - r(b'AVAssetReader', b'timeRange', {'retval': {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}) - r(b'AVAssetReaderOutput', b'alwaysCopiesSampleData', {'retval': {'type': b'Z'}}) - r(b'AVAssetReaderOutput', b'setAlwaysCopiesSampleData:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVAssetReaderOutput', b'setSupportsRandomAccess:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVAssetReaderOutput', b'supportsRandomAccess', {'retval': {'type': b'Z'}}) - r(b'AVAssetResourceLoader', b'preloadsEligibleContentKeys', {'retval': {'type': b'Z'}}) - r(b'AVAssetResourceLoader', b'setPreloadsEligibleContentKeys:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVAssetResourceLoadingContentInformationRequest', b'isByteRangeAccessSupported', {'retval': {'type': b'Z'}}) - r(b'AVAssetResourceLoadingContentInformationRequest', b'setByteRangeAccessSupported:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVAssetResourceLoadingDataRequest', b'requestsAllDataToEndOfResource', {'retval': {'type': b'Z'}}) - r(b'AVAssetResourceLoadingRequest', b'isCancelled', {'retval': {'type': b'Z'}}) - r(b'AVAssetResourceLoadingRequest', b'isFinished', {'retval': {'type': b'Z'}}) - r(b'AVAssetResourceLoadingRequest', b'streamingContentKeyRequestDataForApp:contentIdentifier:options:error:', {'arguments': {5: {'type_modifier': b'o'}}}) - r(b'AVAssetResourceLoadingRequestor', b'providesExpiredSessionReports', {'retval': {'type': b'Z'}}) - r(b'AVAssetTrack', b'canProvideSampleCursors', {'retval': {'type': b'Z'}}) - r(b'AVAssetTrack', b'hasMediaCharacteristic:', {'retval': {'type': b'Z'}}) - r(b'AVAssetTrack', b'isDecodable', {'retval': {'type': b'Z'}}) - r(b'AVAssetTrack', b'isEnabled', {'retval': {'type': b'Z'}}) - r(b'AVAssetTrack', b'isPlayable', {'retval': {'type': b'Z'}}) - r(b'AVAssetTrack', b'isSelfContained', {'retval': {'type': b'Z'}}) - r(b'AVAssetTrack', b'makeSampleCursorWithPresentationTimeStamp:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVAssetTrack', b'minFrameDuration', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVAssetTrack', b'requiresFrameReordering', {'retval': {'type': b'Z'}}) - r(b'AVAssetTrack', b'samplePresentationTimeForTrackTime:', {'retval': {'type': b'{_CMTime=qiIq}'}, 'arguments': {2: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVAssetTrack', b'segmentForTrackTime:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVAssetTrack', b'timeRange', {'retval': {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}) - r(b'AVAssetTrackSegment', b'isEmpty', {'retval': {'type': b'Z'}}) - r(b'AVAssetTrackSegment', b'timeMapping', {'retval': {'type': b'{_CMTimeMapping={_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}}'}}) - r(b'AVAssetWriter', b'assetWriterWithURL:fileType:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'AVAssetWriter', b'canAddInput:', {'retval': {'type': b'Z'}}) - r(b'AVAssetWriter', b'canAddInputGroup:', {'retval': {'type': b'Z'}}) - r(b'AVAssetWriter', b'canApplyOutputSettings:forMediaType:', {'retval': {'type': b'Z'}}) - r(b'AVAssetWriter', b'endSessionAtSourceTime:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVAssetWriter', b'finishWriting', {'retval': {'type': b'Z'}}) - r(b'AVAssetWriter', b'finishWritingWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'AVAssetWriter', b'initWithURL:fileType:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'AVAssetWriter', b'movieFragmentInterval', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVAssetWriter', b'overallDurationHint', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVAssetWriter', b'setMovieFragmentInterval:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVAssetWriter', b'setOverallDurationHint:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVAssetWriter', b'setShouldOptimizeForNetworkUse:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVAssetWriter', b'shouldOptimizeForNetworkUse', {'retval': {'type': b'Z'}}) - r(b'AVAssetWriter', b'startSessionAtSourceTime:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVAssetWriter', b'startWriting', {'retval': {'type': b'Z'}}) - r(b'AVAssetWriterInput', b'appendSampleBuffer:', {'retval': {'type': b'Z'}}) - r(b'AVAssetWriterInput', b'canAddTrackAssociationWithTrackOfInput:type:', {'retval': {'type': b'Z'}}) - r(b'AVAssetWriterInput', b'canPerformMultiplePasses', {'retval': {'type': b'Z'}}) - r(b'AVAssetWriterInput', b'expectsMediaDataInRealTime', {'retval': {'type': b'Z'}}) - r(b'AVAssetWriterInput', b'isReadyForMoreMediaData', {'retval': {'type': b'Z'}}) - r(b'AVAssetWriterInput', b'marksOutputTrackAsEnabled', {'retval': {'type': b'Z'}}) - r(b'AVAssetWriterInput', b'performsMultiPassEncodingIfSupported', {'retval': {'type': b'Z'}}) - r(b'AVAssetWriterInput', b'preferredMediaChunkDuration', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVAssetWriterInput', b'requestMediaDataWhenReadyOnQueue:usingBlock:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'AVAssetWriterInput', b'respondToEachPassDescriptionOnQueue:usingBlock:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'AVAssetWriterInput', b'setExpectsMediaDataInRealTime:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVAssetWriterInput', b'setMarksOutputTrackAsEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVAssetWriterInput', b'setPerformsMultiPassEncodingIfSupported:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVAssetWriterInput', b'setPreferredMediaChunkDuration:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVAssetWriterInputMetadataAdaptor', b'appendTimedMetadataGroup:', {'retval': {'type': b'Z'}}) - r(b'AVAssetWriterInputPixelBufferAdaptor', b'appendPixelBuffer:withPresentationTime:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVAsynchronousCIImageFilteringRequest', b'compositionTime', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVAsynchronousVideoCompositionRequest', b'compositionTime', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVAudioBuffer', b'data', {'retval': {'c_array_of_variable_length': True}}) - r(b'AVAudioBuffer', b'packetDescriptions', {'retval': {'c_array_of_variable_length': True}}) - r(b'AVAudioChannelLayout', b'isEqual:', {'retval': {'type': b'Z'}}) - r(b'AVAudioConverter', b'convertToBuffer:error:withInputFromBlock:', {'arguments': {3: {'type_modifier': b'o'}, 4: {'callable': {'retval': {'type': b'@'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'I'}, 2: {'type': sel32or64(b'o^i', b'o^q')}}}}}}) - r(b'AVAudioConverter', b'convertToBuffer:fromBuffer:error:', {'retval': {'type': b'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'AVAudioConverter', b'dither', {'retval': {'type': b'Z'}}) - r(b'AVAudioConverter', b'downmix', {'retval': {'type': b'Z'}}) - r(b'AVAudioConverter', b'setDither:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVAudioConverter', b'setDownmix:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVAudioEngine', b'connectMIDI:to:format:block:', {'arguments': {5: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'q'}, 2: {'type': b'C'}, 3: {'type': sel32or64(b'i', b'q')}, 4: {'type': b'n^v', 'c_array_length_in_arg': 3}}}}}}) - r(b'AVAudioEngine', b'connectMIDI:toNodes:format:block:', {'arguments': {5: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'q'}, 2: {'type': b'C'}, 3: {'type': sel32or64(b'i', b'q')}, 4: {'type': b'n^v', 'c_array_length_in_arg': 3}}}}}}) - r(b'AVAudioEngine', b'enableManualRenderingMode:format:maximumFrameCount:error:', {'retval': {'type': b'Z'}, 'arguments': {5: {'type_modifier': b'o'}}}) - r(b'AVAudioEngine', b'isAutoShutdownEnabled', {'retval': {'type': b'Z'}}) - r(b'AVAudioEngine', b'isInManualRenderingMode', {'retval': {'type': b'Z'}}) - r(b'AVAudioEngine', b'isRunning', {'retval': {'type': b'Z'}}) - r(b'AVAudioEngine', b'manualRenderingBlock', {'retval': {'callable': {'retval': {'type': sel32or64(b'i', b'q')}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'I'}, 2: {'type': b'o^{AudioBufferList=L[1{AudioBuffer=LL^v}]}'}, 3: {'type': b'o^i'}}}}}) - r(b'AVAudioEngine', b'renderOffline:toBuffer:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'AVAudioEngine', b'setAutoShutdownEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVAudioEngine', b'startAndReturnError:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type_modifier': b'o'}}}) - r(b'AVAudioEnvironmentReverbParameters', b'enable', {'retval': {'type': b'Z'}}) - r(b'AVAudioEnvironmentReverbParameters', b'setEnable:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVAudioFile', b'initForReading:commonFormat:interleaved:error:', {'arguments': {4: {'type': b'Z'}, 5: {'type_modifier': b'o'}}}) - r(b'AVAudioFile', b'initForReading:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'AVAudioFile', b'initForWriting:settings:commonFormat:interleaved:error:', {'arguments': {5: {'type': b'Z'}, 6: {'type_modifier': b'o'}}}) - r(b'AVAudioFile', b'initForWriting:settings:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'AVAudioFile', b'readIntoBuffer:error:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'AVAudioFile', b'readIntoBuffer:frameCount:error:', {'retval': {'type': b'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'AVAudioFile', b'writeFromBuffer:error:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'AVAudioFormat', b'initWithCommonFormat:sampleRate:channels:interleaved:', {'arguments': {5: {'type': b'Z'}}}) - r(b'AVAudioFormat', b'initWithCommonFormat:sampleRate:interleaved:channelLayout:', {'arguments': {4: {'type': b'Z'}}}) - r(b'AVAudioFormat', b'initWithStreamDescription:', {'arguments': {2: {'type_modifier': b'n'}}}) - r(b'AVAudioFormat', b'initWithStreamDescription:channelLayout:', {'arguments': {2: {'type_modifier': b'n'}}}) - r(b'AVAudioFormat', b'isEqual:', {'retval': {'type': b'Z'}}) - r(b'AVAudioFormat', b'isInterleaved', {'retval': {'type': b'Z'}}) - r(b'AVAudioFormat', b'isStandard', {'retval': {'type': b'Z'}}) - r(b'AVAudioFormat', b'streamDescription', {'retval': {'c_array_of_fixed_length': 1}}) - r(b'AVAudioInputNode', b'setManualRenderingInputPCMFormat:inputBlock:', {'retval': {'type': b'Z'}, 'arguments': {3: {'callable': {'retval': {'type': b'^{AudioBufferList=L[1{AudioBuffer=LL^v}]}'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'I'}}}}}}) - r(b'AVAudioMixInputParameters', b'getVolumeRampForTime:startVolume:endVolume:timeRange:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'{_CMTime=qiIq}'}, 3: {'type_modifier': b'o'}, 4: {'type_modifier': b'o'}, 5: {'type': b'^{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}', 'type_modifier': b'o'}}}) - r(b'AVAudioNode', b'installTapOnBus:bufferSize:format:block:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'AVAudioPlayer', b'enableRate', {'retval': {'type': b'Z'}}) - r(b'AVAudioPlayer', b'initWithContentsOfURL:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'AVAudioPlayer', b'initWithContentsOfURL:fileTypeHint:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'AVAudioPlayer', b'initWithData:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'AVAudioPlayer', b'initWithData:fileTypeHint:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'AVAudioPlayer', b'isMeteringEnabled', {'retval': {'type': b'Z'}}) - r(b'AVAudioPlayer', b'isPlaying', {'retval': {'type': b'Z'}}) - r(b'AVAudioPlayer', b'play', {'retval': {'type': b'Z'}}) - r(b'AVAudioPlayer', b'playAtTime:', {'retval': {'type': b'Z'}}) - r(b'AVAudioPlayer', b'prepareToPlay', {'retval': {'type': b'Z'}}) - r(b'AVAudioPlayer', b'setEnableRate:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVAudioPlayer', b'setMeteringEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVAudioPlayerNode', b'isPlaying', {'retval': {'type': b'Z'}}) - r(b'AVAudioPlayerNode', b'loadFromURL:options:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'AVAudioPlayerNode', b'scheduleBuffer:atTime:options:completionCallbackType:completionHandler:', {'arguments': {6: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'i', b'q')}}}}}}) - r(b'AVAudioPlayerNode', b'scheduleBuffer:atTime:options:completionHandler:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'AVAudioPlayerNode', b'scheduleBuffer:completionCallbackType:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'i', b'q')}}}}}}) - r(b'AVAudioPlayerNode', b'scheduleBuffer:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'AVAudioPlayerNode', b'scheduleFile:atTime:completionCallbackType:completionHandler:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'i', b'q')}}}}}}) - r(b'AVAudioPlayerNode', b'scheduleFile:atTime:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'AVAudioPlayerNode', b'scheduleSegment:startingFrame:frameCount:atTime:completionCallbackType:completionHandler:', {'arguments': {7: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'i', b'q')}}}}}}) - r(b'AVAudioPlayerNode', b'scheduleSegment:startingFrame:frameCount:atTime:completionHandler:', {'arguments': {6: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'AVAudioRecorder', b'deleteRecording', {'retval': {'type': b'Z'}}) - r(b'AVAudioRecorder', b'initWithURL:format:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'AVAudioRecorder', b'initWithURL:settings:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'AVAudioRecorder', b'isMeteringEnabled', {'retval': {'type': b'Z'}}) - r(b'AVAudioRecorder', b'isRecording', {'retval': {'type': b'Z'}}) - r(b'AVAudioRecorder', b'prepareToRecord', {'retval': {'type': b'Z'}}) - r(b'AVAudioRecorder', b'record', {'retval': {'type': b'Z'}}) - r(b'AVAudioRecorder', b'recordAtTime:', {'retval': {'type': b'Z'}}) - r(b'AVAudioRecorder', b'recordAtTime:forDuration:', {'retval': {'type': b'Z'}}) - r(b'AVAudioRecorder', b'recordForDuration:', {'retval': {'type': b'Z'}}) - r(b'AVAudioRecorder', b'setMeteringEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVAudioSequencer', b'beatsForHostTime:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'AVAudioSequencer', b'dataWithSMPTEResolution:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'AVAudioSequencer', b'hostTimeForBeats:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'AVAudioSequencer', b'isPlaying', {'retval': {'type': 'Z'}}) - r(b'AVAudioSequencer', b'loadFromData:options:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'AVAudioSequencer', b'loadFromURL:options:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'AVAudioSequencer', b'startAndReturnError:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type_modifier': b'o'}}}) - r(b'AVAudioSequencer', b'writeToURL:SMPTEResolution:replaceExisting:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type': 'Z'}, 5: {'type_modifier': b'o'}}}) - r(b'AVAudioTime', b'initWithAudioTimeStamp:sampleRate:', {'arguments': {2: {'type_modifier': b'n'}}}) - r(b'AVAudioTime', b'isHostTimeValid', {'retval': {'type': b'Z'}}) - r(b'AVAudioTime', b'isSampleTimeValid', {'retval': {'type': b'Z'}}) - r(b'AVAudioTime', b'timeWithAudioTimeStamp:sampleRate:', {'arguments': {2: {'type_modifier': b'n'}}}) - r(b'AVAudioUnit', b'instantiateWithComponentDescription:options:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'AVAudioUnit', b'loadAudioUnitPresetAtURL:error:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'AVAudioUnitComponent', b'hasCustomView', {'retval': {'type': b'Z'}}) - r(b'AVAudioUnitComponent', b'hasMIDIInput', {'retval': {'type': b'Z'}}) - r(b'AVAudioUnitComponent', b'hasMIDIOutput', {'retval': {'type': b'Z'}}) - r(b'AVAudioUnitComponent', b'isSandboxSafe', {'retval': {'type': b'Z'}}) - r(b'AVAudioUnitComponent', b'passesAUVal', {'retval': {'type': b'Z'}}) - r(b'AVAudioUnitComponent', b'supportsNumberInputChannels:outputChannels:', {'retval': {'type': b'Z'}}) - r(b'AVAudioUnitComponentManager', b'componentsPassingTest:', {'arguments': {2: {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'o^Z'}}}, 'type': '@?'}}}) - r(b'AVAudioUnitEQFilterParameters', b'bypass', {'retval': {'type': b'Z'}}) - r(b'AVAudioUnitEQFilterParameters', b'setBypass:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVAudioUnitEffect', b'bypass', {'retval': {'type': b'Z'}}) - r(b'AVAudioUnitEffect', b'setBypass:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVAudioUnitGenerator', b'bypass', {'retval': {'type': b'Z'}}) - r(b'AVAudioUnitGenerator', b'setBypass:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVAudioUnitSampler', b'loadAudioFilesAtURLs:error:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'AVAudioUnitSampler', b'loadInstrumentAtURL:error:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'AVAudioUnitSampler', b'loadSoundBankInstrumentAtURL:program:bankMSB:bankLSB:error:', {'retval': {'type': b'Z'}, 'arguments': {6: {'type_modifier': b'o'}}}) - r(b'AVAudioUnitTimeEffect', b'bypass', {'retval': {'type': b'Z'}}) - r(b'AVAudioUnitTimeEffect', b'setBypass:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCameraCalibrationData', b'extrinsicMatrix', {'retval': {'type': b'{_matrix_float4x3=?}'}}) - r(b'AVCameraCalibrationData', b'intrinsicMatrix', {'retval': {'type': b'{_matrix_float3x3=?}'}}) - r(b'AVCaptureAudioChannel', b'isEnabled', {'retval': {'type': b'Z'}}) - r(b'AVCaptureAudioChannel', b'setEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCaptureConnection', b'automaticallyAdjustsVideoMirroring', {'retval': {'type': b'Z'}}) - r(b'AVCaptureConnection', b'enablesVideoStabilizationWhenAvailable', {'retval': {'type': b'Z'}}) - r(b'AVCaptureConnection', b'isActive', {'retval': {'type': b'Z'}}) - r(b'AVCaptureConnection', b'isCameraIntrinsicMatrixDeliveryEnabled', {'retval': {'type': b'Z'}}) - r(b'AVCaptureConnection', b'isCameraIntrinsicMatrixDeliverySupported', {'retval': {'type': b'Z'}}) - r(b'AVCaptureConnection', b'isEnabled', {'retval': {'type': b'Z'}}) - r(b'AVCaptureConnection', b'isVideoFieldModeSupported', {'retval': {'type': b'Z'}}) - r(b'AVCaptureConnection', b'isVideoMaxFrameDurationSupported', {'retval': {'type': b'Z'}}) - r(b'AVCaptureConnection', b'isVideoMinFrameDurationSupported', {'retval': {'type': b'Z'}}) - r(b'AVCaptureConnection', b'isVideoMirrored', {'retval': {'type': b'Z'}}) - r(b'AVCaptureConnection', b'isVideoMirroringSupported', {'retval': {'type': b'Z'}}) - r(b'AVCaptureConnection', b'isVideoOrientationSupported', {'retval': {'type': b'Z'}}) - r(b'AVCaptureConnection', b'isVideoStabilizationEnabled', {'retval': {'type': b'Z'}}) - r(b'AVCaptureConnection', b'isVideoStabilizationSupported', {'retval': {'type': b'Z'}}) - r(b'AVCaptureConnection', b'setAutomaticallyAdjustsVideoMirroring:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCaptureConnection', b'setCameraIntrinsicMatrixDeliveryEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCaptureConnection', b'setEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCaptureConnection', b'setEnablesVideoStabilizationWhenAvailable:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCaptureConnection', b'setVideoMaxFrameDuration:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVCaptureConnection', b'setVideoMinFrameDuration:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVCaptureConnection', b'setVideoMirrored:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCaptureConnection', b'videoMaxFrameDuration', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVCaptureConnection', b'videoMinFrameDuration', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVCaptureDepthDataOutput', b'alwaysDiscardsLateDepthData', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDepthDataOutput', b'isFilteringEnabled', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDepthDataOutput', b'setAlwaysDiscardsLateDepthData:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCaptureDepthDataOutput', b'setFilteringEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCaptureDevice', b'activeDepthDataMinFrameDuration', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVCaptureDevice', b'activeMaxExposureDuration', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVCaptureDevice', b'activeVideoMaxFrameDuration', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVCaptureDevice', b'activeVideoMinFrameDuration', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVCaptureDevice', b'automaticallyAdjustsVideoHDREnabled', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDevice', b'automaticallyEnablesLowLightBoostWhenAvailable', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDevice', b'chromaticityValuesForDeviceWhiteBalanceGains:', {'retval': {'type': b'{_AVCaptureWhiteBalanceChromaticityValues=ff}'}, 'arguments': {2: {'type': b'{_AVCaptureWhiteBalanceGains=fff}'}}}) - r(b'AVCaptureDevice', b'deviceWhiteBalanceGains', {'retval': {'type': b'{_AVCaptureWhiteBalanceGains=fff}'}}) - r(b'AVCaptureDevice', b'deviceWhiteBalanceGainsForChromaticityValues:', {'retval': {'type': b'{_AVCaptureWhiteBalanceGains=fff}'}, 'arguments': {2: {'type': b'{_AVCaptureWhiteBalanceChromaticityValues=ff}'}}}) - r(b'AVCaptureDevice', b'deviceWhiteBalanceGainsForTemperatureAndTintValues:', {'retval': {'type': b'{_AVCaptureWhiteBalanceGains=fff}'}, 'arguments': {2: {'type': b'{_AVCaptureWhiteBalanceTemperatureAndTintValues=ff}'}}}) - r(b'AVCaptureDevice', b'exposureDuration', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVCaptureDevice', b'grayWorldDeviceWhiteBalanceGains', {'retval': {'type': b'{_AVCaptureWhiteBalanceGains=fff}'}}) - r(b'AVCaptureDevice', b'hasFlash', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDevice', b'hasMediaType:', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDevice', b'hasTorch', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDevice', b'isAdjustingExposure', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDevice', b'isAdjustingFocus', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDevice', b'isAdjustingWhiteBalance', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDevice', b'isAutoFocusRangeRestrictionSupported', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDevice', b'isConnected', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDevice', b'isExposureModeSupported:', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDevice', b'isExposurePointOfInterestSupported', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDevice', b'isFlashActive', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDevice', b'isFlashAvailable', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDevice', b'isFlashModeSupported:', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDevice', b'isFocusModeSupported:', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDevice', b'isFocusPointOfInterestSupported', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDevice', b'isInUseByAnotherApplication', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDevice', b'isLockingFocusWithCustomLensPositionSupported', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDevice', b'isLockingWhiteBalanceWithCustomDeviceGainsSupported', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDevice', b'isLowLightBoostEnabled', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDevice', b'isLowLightBoostSupported', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDevice', b'isRampingVideoZoom', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDevice', b'isSmoothAutoFocusEnabled', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDevice', b'isSmoothAutoFocusSupported', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDevice', b'isSubjectAreaChangeMonitoringEnabled', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDevice', b'isSuspended', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDevice', b'isTorchActive', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDevice', b'isTorchAvailable', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDevice', b'isTorchModeSupported:', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDevice', b'isVideoHDREnabled', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDevice', b'isWhiteBalanceModeSupported:', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDevice', b'lockForConfiguration:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type_modifier': b'o'}}}) - r(b'AVCaptureDevice', b'requestAccessForMediaType:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}}}}}}) - r(b'AVCaptureDevice', b'setActiveDepthDataMinFrameDuration:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVCaptureDevice', b'setActiveMaxExposureDuration:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVCaptureDevice', b'setActiveVideoMaxFrameDuration:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVCaptureDevice', b'setActiveVideoMinFrameDuration:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVCaptureDevice', b'setAutomaticallyAdjustsVideoHDREnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCaptureDevice', b'setAutomaticallyEnablesLowLightBoostWhenAvailable:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCaptureDevice', b'setExposureModeCustomWithDuration:ISO:completionHandler:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVCaptureDevice', b'setSmoothAutoFocusEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCaptureDevice', b'setSubjectAreaChangeMonitoringEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCaptureDevice', b'setTorchModeOnWithLevel:error:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'AVCaptureDevice', b'setVideoHDREnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCaptureDevice', b'setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:completionHandler:', {'arguments': {2: {'type': b'{_AVCaptureWhiteBalanceGains=fff}'}}}) - r(b'AVCaptureDevice', b'supportsAVCaptureSessionPreset:', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDevice', b'temperatureAndTintValuesForDeviceWhiteBalanceGains:', {'retval': {'type': b'{_AVCaptureWhiteBalanceTemperatureAndTintValues=ff}'}, 'arguments': {2: {'type': b'{_AVCaptureWhiteBalanceGains=fff}'}}}) - r(b'AVCaptureDevice', b'transportControlsSupported', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDeviceFormat', b'highResolutionStillImageDimensions', {'retval': {'type': b'{_CMVideoDimensions=ii}'}}) - r(b'AVCaptureDeviceFormat', b'isPortraitEffectsMatteStillImageDeliverySupported', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDeviceFormat', b'isVideoBinned', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDeviceFormat', b'isVideoHDRSupported', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDeviceFormat', b'isVideoStabilizationModeSupported:', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDeviceFormat', b'isVideoStabilizationSupported', {'retval': {'type': b'Z'}}) - r(b'AVCaptureDeviceFormat', b'maxExposureDuration', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVCaptureDeviceFormat', b'minExposureDuration', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVCaptureDeviceInput', b'deviceInputWithDevice:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'AVCaptureDeviceInput', b'initWithDevice:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'AVCaptureDeviceInput', b'setUnifiedAutoExposureDefaultsEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCaptureDeviceInput', b'unifiedAutoExposureDefaultsEnabled', {'retval': {'type': b'Z'}}) - r(b'AVCaptureFileOutput', b'isRecording', {'retval': {'type': b'Z'}}) - r(b'AVCaptureFileOutput', b'isRecordingPaused', {'retval': {'type': b'Z'}}) - r(b'AVCaptureFileOutput', b'maxRecordedDuration', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVCaptureFileOutput', b'recordedDuration', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVCaptureFileOutput', b'setMaxRecordedDuration:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVCaptureInputPort', b'isEnabled', {'retval': {'type': b'Z'}}) - r(b'AVCaptureInputPort', b'setEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCaptureManualExposureBracketedStillImageSettings', b'exposureDuration', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVCaptureManualExposureBracketedStillImageSettings', b'manualExposureSettingsWithExposureDuration:ISO:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVCaptureMetadataInput', b'appendTimedMetadataGroup:error:', {'retval': {'type': b'Z'}}) - r(b'AVCaptureMovieFileOutput', b'movieFragmentInterval', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVCaptureMovieFileOutput', b'recordsVideoOrientationAndMirroringChangesAsMetadataTrackForConnection:', {'retval': {'type': b'Z'}}) - r(b'AVCaptureMovieFileOutput', b'setMovieFragmentInterval:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVCaptureMovieFileOutput', b'setRecordsVideoOrientationAndMirroringChanges:asMetadataTrackForConnection:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCapturePhoto', b'isRawPhoto', {'retval': {'type': b'Z'}}) - r(b'AVCapturePhoto', b'timestamp', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVCapturePhotoBracketSettings', b'isLensStabilizationEnabled', {'retval': {'type': b'Z'}}) - r(b'AVCapturePhotoBracketSettings', b'setLensStabilizationEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCapturePhotoOutput', b'isCameraCalibrationDataDeliverySupported', {'retval': {'type': b'Z'}}) - r(b'AVCapturePhotoOutput', b'isDepthDataDeliveryEnabled', {'retval': {'type': b'Z'}}) - r(b'AVCapturePhotoOutput', b'isDepthDataDeliverySupported', {'retval': {'type': b'Z'}}) - r(b'AVCapturePhotoOutput', b'isDualCameraDualPhotoDeliveryEnabled', {'retval': {'type': b'Z'}}) - r(b'AVCapturePhotoOutput', b'isDualCameraDualPhotoDeliverySupported', {'retval': {'type': b'Z'}}) - r(b'AVCapturePhotoOutput', b'isDualCameraFusionSupported', {'retval': {'type': b'Z'}}) - r(b'AVCapturePhotoOutput', b'isFlashScene', {'retval': {'type': b'Z'}}) - r(b'AVCapturePhotoOutput', b'isHighResolutionCaptureEnabled', {'retval': {'type': b'Z'}}) - r(b'AVCapturePhotoOutput', b'isLensStabilizationDuringBracketedCaptureSupported', {'retval': {'type': b'Z'}}) - r(b'AVCapturePhotoOutput', b'isLivePhotoAutoTrimmingEnabled', {'retval': {'type': b'Z'}}) - r(b'AVCapturePhotoOutput', b'isLivePhotoCaptureEnabled', {'retval': {'type': b'Z'}}) - r(b'AVCapturePhotoOutput', b'isLivePhotoCaptureSupported', {'retval': {'type': b'Z'}}) - r(b'AVCapturePhotoOutput', b'isLivePhotoCaptureSuspended', {'retval': {'type': b'Z'}}) - r(b'AVCapturePhotoOutput', b'isPortraitEffectsMatteDeliveryEnabled', {'retval': {'type': b'Z'}}) - r(b'AVCapturePhotoOutput', b'isPortraitEffectsMatteDeliverySupported', {'retval': {'type': b'Z'}}) - r(b'AVCapturePhotoOutput', b'isStillImageStabilizationScene', {'retval': {'type': b'Z'}}) - r(b'AVCapturePhotoOutput', b'isStillImageStabilizationSupported', {'retval': {'type': b'Z'}}) - r(b'AVCapturePhotoOutput', b'setDepthDataDeliveryEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCapturePhotoOutput', b'setDualCameraDualPhotoDeliveryEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCapturePhotoOutput', b'setHighResolutionCaptureEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCapturePhotoOutput', b'setLivePhotoAutoTrimmingEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCapturePhotoOutput', b'setLivePhotoCaptureEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCapturePhotoOutput', b'setLivePhotoCaptureSuspended:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCapturePhotoOutput', b'setPortraitEffectsMatteDeliveryEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCapturePhotoSettings', b'embedsDepthDataInPhoto', {'retval': {'type': b'Z'}}) - r(b'AVCapturePhotoSettings', b'embedsPortraitEffectsMatteInPhoto', {'retval': {'type': b'Z'}}) - r(b'AVCapturePhotoSettings', b'isAutoDualCameraFusionEnabled', {'retval': {'type': b'Z'}}) - r(b'AVCapturePhotoSettings', b'isAutoStillImageStabilizationEnabled', {'retval': {'type': b'Z'}}) - r(b'AVCapturePhotoSettings', b'isCameraCalibrationDataDeliveryEnabled', {'retval': {'type': b'Z'}}) - r(b'AVCapturePhotoSettings', b'isDepthDataDeliveryEnabled', {'retval': {'type': b'Z'}}) - r(b'AVCapturePhotoSettings', b'isDepthDataFiltered', {'retval': {'type': b'Z'}}) - r(b'AVCapturePhotoSettings', b'isDualCameraDualPhotoDeliveryEnabled', {'retval': {'type': b'Z'}}) - r(b'AVCapturePhotoSettings', b'isHighResolutionPhotoEnabled', {'retval': {'type': b'Z'}}) - r(b'AVCapturePhotoSettings', b'isPortraitEffectsMatteDeliveryEnabled', {'retval': {'type': b'Z'}}) - r(b'AVCapturePhotoSettings', b'setAutoDualCameraFusionEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCapturePhotoSettings', b'setAutoStillImageStabilizationEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCapturePhotoSettings', b'setCameraCalibrationDataDeliveryEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCapturePhotoSettings', b'setDepthDataDeliveryEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCapturePhotoSettings', b'setDepthDataFiltered:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCapturePhotoSettings', b'setDualCameraDualPhotoDeliveryEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCapturePhotoSettings', b'setEmbedsDepthDataInPhoto:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCapturePhotoSettings', b'setEmbedsPortraitEffectsMatteInPhoto:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCapturePhotoSettings', b'setHighResolutionPhotoEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCapturePhotoSettings', b'setPortraitEffectsMatteDeliveryEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCaptureResolvedPhotoSettings', b'embeddedThumbnailDimensions', {'retval': {'type': b'{_CMVideoDimensions=ii}'}}) - r(b'AVCaptureResolvedPhotoSettings', b'isDualCameraFusionEnabled', {'retval': {'type': b'Z'}}) - r(b'AVCaptureResolvedPhotoSettings', b'isFlashEnabled', {'retval': {'type': b'Z'}}) - r(b'AVCaptureResolvedPhotoSettings', b'isStillImageStabilizationEnabled', {'retval': {'type': b'Z'}}) - r(b'AVCaptureResolvedPhotoSettings', b'livePhotoMovieDimensions', {'retval': {'type': b'{_CMVideoDimensions=ii}'}}) - r(b'AVCaptureResolvedPhotoSettings', b'photoDimensions', {'retval': {'type': b'{_CMVideoDimensions=ii}'}}) - r(b'AVCaptureResolvedPhotoSettings', b'portraitEffectsMatteDimensions', {'retval': {'type': b'{_CMVideoDimensions=ii}'}}) - r(b'AVCaptureResolvedPhotoSettings', b'previewDimensions', {'retval': {'type': b'{_CMVideoDimensions=ii}'}}) - r(b'AVCaptureResolvedPhotoSettings', b'rawPhotoDimensions', {'retval': {'type': b'{_CMVideoDimensions=ii}'}}) - r(b'AVCaptureScreenInput', b'capturesCursor', {'retval': {'type': b'Z'}}) - r(b'AVCaptureScreenInput', b'capturesMouseClicks', {'retval': {'type': b'Z'}}) - r(b'AVCaptureScreenInput', b'minFrameDuration', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVCaptureScreenInput', b'removesDuplicateFrames', {'retval': {'type': b'Z'}}) - r(b'AVCaptureScreenInput', b'setCapturesCursor:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCaptureScreenInput', b'setCapturesMouseClicks:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCaptureScreenInput', b'setMinFrameDuration:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVCaptureScreenInput', b'setRemovesDuplicateFrames:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCaptureSession', b'automaticallyConfiguresApplicationAudioSession', {'retval': {'type': b'Z'}}) - r(b'AVCaptureSession', b'automaticallyConfiguresCaptureDeviceForWideColor', {'retval': {'type': b'Z'}}) - r(b'AVCaptureSession', b'canAddConnection:', {'retval': {'type': b'Z'}}) - r(b'AVCaptureSession', b'canAddInput:', {'retval': {'type': b'Z'}}) - r(b'AVCaptureSession', b'canAddOutput:', {'retval': {'type': b'Z'}}) - r(b'AVCaptureSession', b'canSetSessionPreset:', {'retval': {'type': b'Z'}}) - r(b'AVCaptureSession', b'isInterrupted', {'retval': {'type': b'Z'}}) - r(b'AVCaptureSession', b'isRunning', {'retval': {'type': b'Z'}}) - r(b'AVCaptureSession', b'setAutomaticallyConfiguresApplicationAudioSession:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCaptureSession', b'setAutomaticallyConfiguresCaptureDeviceForWideColor:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCaptureSession', b'setUsesApplicationAudioSession:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCaptureSession', b'usesApplicationAudioSession', {'retval': {'type': b'Z'}}) - r(b'AVCaptureStillImageOutput', b'automaticallyEnablesStillImageStabilizationWhenAvailable', {'retval': {'type': b'Z'}}) - r(b'AVCaptureStillImageOutput', b'captureStillImageAsynchronouslyFromConnection:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'^{opaqueCMSampleBuffer=}'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'AVCaptureStillImageOutput', b'isCapturingStillImage', {'retval': {'type': b'Z'}}) - r(b'AVCaptureStillImageOutput', b'isHighResolutionStillImageOutputEnabled', {'retval': {'type': b'Z'}}) - r(b'AVCaptureStillImageOutput', b'isLensStabilizationDuringBracketedCaptureEnabled', {'retval': {'type': b'Z'}}) - r(b'AVCaptureStillImageOutput', b'isLensStabilizationDuringBracketedCaptureSupported', {'retval': {'type': b'Z'}}) - r(b'AVCaptureStillImageOutput', b'isStillImageStabilizationActive', {'retval': {'type': b'Z'}}) - r(b'AVCaptureStillImageOutput', b'isStillImageStabilizationSupported', {'retval': {'type': b'Z'}}) - r(b'AVCaptureStillImageOutput', b'setAutomaticallyEnablesStillImageStabilizationWhenAvailable:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCaptureStillImageOutput', b'setHighResolutionStillImageOutputEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCaptureStillImageOutput', b'setLensStabilizationDuringBracketedCaptureEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCaptureSynchronizedData', b'timestamp', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVCaptureSynchronizedDepthData', b'depthDataWasDropped', {'retval': {'type': b'Z'}}) - r(b'AVCaptureSynchronizedSampleBufferData', b'sampleBufferWasDropped', {'retval': {'type': b'Z'}}) - r(b'AVCaptureVideoDataOutput', b'alwaysDiscardsLateVideoFrames', {'retval': {'type': b'Z'}}) - r(b'AVCaptureVideoDataOutput', b'minFrameDuration', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVCaptureVideoDataOutput', b'setAlwaysDiscardsLateVideoFrames:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCaptureVideoDataOutput', b'setMinFrameDuration:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVCaptureVideoPreviewLayer', b'automaticallyAdjustsMirroring', {'retval': {'type': b'Z'}}) - r(b'AVCaptureVideoPreviewLayer', b'isMirrored', {'retval': {'type': b'Z'}}) - r(b'AVCaptureVideoPreviewLayer', b'isMirroringSupported', {'retval': {'type': b'Z'}}) - r(b'AVCaptureVideoPreviewLayer', b'isOrientationSupported', {'retval': {'type': b'Z'}}) - r(b'AVCaptureVideoPreviewLayer', b'setAutomaticallyAdjustsMirroring:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCaptureVideoPreviewLayer', b'setMirrored:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVCompositionTrack', b'segmentForTrackTime:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVCompositionTrackSegment', b'compositionTrackSegmentWithTimeRange:', {'arguments': {2: {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}}) - r(b'AVCompositionTrackSegment', b'compositionTrackSegmentWithURL:trackID:sourceTimeRange:targetTimeRange:', {'arguments': {4: {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}, 5: {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}}) - r(b'AVCompositionTrackSegment', b'initWithTimeRange:', {'arguments': {2: {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}}) - r(b'AVCompositionTrackSegment', b'initWithURL:trackID:sourceTimeRange:targetTimeRange:', {'arguments': {4: {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}, 5: {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}}) - r(b'AVCompositionTrackSegment', b'isEmpty', {'retval': {'type': b'Z'}}) - r(b'AVContentKeyRequest', b'canProvidePersistableContentKey', {'retval': {'type': b'Z'}}) - r(b'AVContentKeyRequest', b'makeStreamingContentKeyRequestDataForApp:contentIdentifier:options:completionHandler:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'AVContentKeyRequest', b'persistableContentKeyFromKeyVendorResponse:options:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'AVContentKeyRequest', b'renewsExpiringResponseData', {'retval': {'type': b'Z'}}) - r(b'AVContentKeyRequest', b'respondByRequestingPersistableContentKeyRequestAndReturnError:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type_modifier': b'o'}}}) - r(b'AVDepthData', b'depthDataByReplacingDepthDataMapWithPixelBuffer:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'AVDepthData', b'depthDataFromDictionaryRepresentation:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'AVDepthData', b'dictionaryRepresentationForAuxiliaryDataType:', {'arguments': {2: {'type_modifier': b'o'}}}) - r(b'AVDepthData', b'isDepthDataFiltered', {'retval': {'type': b'Z'}}) - r(b'AVFrameRateRange', b'maxFrameDuration', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVFrameRateRange', b'minFrameDuration', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVMIDIPlayer', b'initWithContentsOfURL:soundBankURL:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'AVMIDIPlayer', b'initWithData:soundBankURL:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'AVMIDIPlayer', b'isPlaying', {'retval': {'type': b'Z'}}) - r(b'AVMIDIPlayer', b'play:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}, 'type': '@?'}}}) - r(b'AVMediaSelection', b'mediaSelectionCriteriaCanBeAppliedAutomaticallyToMediaSelectionGroup:', {'retval': {'type': 'Z'}}) - r(b'AVMediaSelectionGroup', b'allowsEmptySelection', {'retval': {'type': b'Z'}}) - r(b'AVMediaSelectionOption', b'hasMediaCharacteristic:', {'retval': {'type': b'Z'}}) - r(b'AVMediaSelectionOption', b'isPlayable', {'retval': {'type': b'Z'}}) - r(b'AVMetadataFaceObject', b'hasRollAngle', {'retval': {'type': b'Z'}}) - r(b'AVMetadataFaceObject', b'hasYawAngle', {'retval': {'type': b'Z'}}) - r(b'AVMetadataItem', b'duration', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVMetadataItem', b'metadataItemWithPropertiesOfMetadataItem:valueLoadingHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'AVMetadataItem', b'statusOfValueForKey:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'AVMetadataItem', b'time', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVMetadataObject', b'duration', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVMetadataObject', b'time', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVMovie', b'canContainMovieFragments', {'retval': {'type': b'Z'}}) - r(b'AVMovie', b'containsMovieFragments', {'retval': {'type': 'Z'}}) - r(b'AVMovie', b'isCompatibleWithFileType:', {'retval': {'type': b'Z'}}) - r(b'AVMovie', b'movieHeaderWithFileType:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'AVMovie', b'writeMovieHeaderToURL:fileType:options:error:', {'retval': {'type': 'Z'}, 'arguments': {5: {'type_modifier': b'o'}}}) - r(b'AVMovieTrack', b'mediaDecodeTimeRange', {'retval': {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}) - r(b'AVMovieTrack', b'mediaPresentationTimeRange', {'retval': {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}) - r(b'AVMusicTrack', b'isLoopingEnabled', {'retval': {'type': 'Z'}}) - r(b'AVMusicTrack', b'isMuted', {'retval': {'type': 'Z'}}) - r(b'AVMusicTrack', b'isSoloed', {'retval': {'type': 'Z'}}) - r(b'AVMusicTrack', b'setLoopingEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'AVMusicTrack', b'setMuted:', {'arguments': {2: {'type': 'Z'}}}) - r(b'AVMusicTrack', b'setSoloed:', {'arguments': {2: {'type': 'Z'}}}) - r(b'AVMutableAudioMixInputParameters', b'setVolume:atTime:', {'arguments': {3: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVMutableAudioMixInputParameters', b'setVolumeRampFromStartVolume:toEndVolume:timeRange:', {'arguments': {4: {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}}) - r(b'AVMutableComposition', b'insertEmptyTimeRange:', {'arguments': {2: {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}}) - r(b'AVMutableComposition', b'insertTimeRange:ofAsset:atTime:error:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}, 4: {'type': b'{_CMTime=qiIq}'}, 5: {'type_modifier': b'o'}}}) - r(b'AVMutableComposition', b'removeTimeRange:', {'arguments': {2: {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}}) - r(b'AVMutableComposition', b'scaleTimeRange:toDuration:', {'arguments': {2: {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}, 3: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVMutableCompositionTrack', b'insertEmptyTimeRange:', {'arguments': {2: {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}}) - r(b'AVMutableCompositionTrack', b'insertTimeRange:ofTrack:atTime:error:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}, 4: {'type': b'{_CMTime=qiIq}'}, 5: {'type_modifier': b'o'}}}) - r(b'AVMutableCompositionTrack', b'insertTimeRanges:ofTracks:atTime:error:', {'retval': {'type': b'Z'}, 'arguments': {4: {'type': b'{_CMTime=qiIq}'}, 5: {'type_modifier': b'o'}}}) - r(b'AVMutableCompositionTrack', b'removeTimeRange:', {'arguments': {2: {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}}) - r(b'AVMutableCompositionTrack', b'scaleTimeRange:toDuration:', {'arguments': {2: {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}, 3: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVMutableCompositionTrack', b'validateTrackSegments:error:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'AVMutableMetadataItem', b'duration', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVMutableMetadataItem', b'setDuration:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVMutableMetadataItem', b'setTime:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVMutableMetadataItem', b'time', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVMutableMovie', b'initWithData:options:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'AVMutableMovie', b'initWithSettingsFromMovie:options:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'AVMutableMovie', b'initWithURL:options:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'AVMutableMovie', b'insertEmptyTimeRange:', {'arguments': {2: {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}}) - r(b'AVMutableMovie', b'insertTimeRange:ofAsset:atTime:copySampleData:error:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}, 4: {'type': b'{_CMTime=qiIq}'}, 5: {'type': 'Z'}, 6: {'type_modifier': b'o'}}}) - r(b'AVMutableMovie', b'interleavingPeriod', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVMutableMovie', b'isModified', {'retval': {'type': 'Z'}}) - r(b'AVMutableMovie', b'movieWithData:options:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'AVMutableMovie', b'movieWithSettingsFromMovie:options:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'AVMutableMovie', b'movieWithURL:options:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'AVMutableMovie', b'removeTimeRange:', {'arguments': {2: {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}}) - r(b'AVMutableMovie', b'scaleTimeRange:toDuration:', {'arguments': {2: {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}, 3: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVMutableMovie', b'setInterleavingPeriod:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVMutableMovie', b'setModified:', {'arguments': {2: {'type': 'Z'}}}) - r(b'AVMutableMovieTrack', b'appendSampleBuffer:decodeTime:presentationTime:error:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type': b'^{_CMTime=qiIq}', 'type_modifier': b'o'}, 4: {'type': b'^{_CMTime=qiIq}', 'type_modifier': b'o'}, 5: {'type_modifier': b'o'}}}) - r(b'AVMutableMovieTrack', b'hasProtectedContent', {'retval': {'type': 'Z'}}) - r(b'AVMutableMovieTrack', b'insertEmptyTimeRange:', {'arguments': {2: {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}}) - r(b'AVMutableMovieTrack', b'insertMediaTimeRange:intoTimeRange:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}, 3: {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}}) - r(b'AVMutableMovieTrack', b'insertTimeRange:ofTrack:atTime:copySampleData:error:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}, 4: {'type': b'{_CMTime=qiIq}'}, 5: {'type': 'Z'}, 6: {'type_modifier': b'o'}}}) - r(b'AVMutableMovieTrack', b'isEnabled', {'retval': {'type': 'Z'}}) - r(b'AVMutableMovieTrack', b'isModified', {'retval': {'type': 'Z'}}) - r(b'AVMutableMovieTrack', b'movieWithURL:options:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'AVMutableMovieTrack', b'preferredMediaChunkDuration', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVMutableMovieTrack', b'removeTimeRange:', {'arguments': {2: {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}}) - r(b'AVMutableMovieTrack', b'scaleTimeRange:toDuration:', {'arguments': {2: {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}, 3: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVMutableMovieTrack', b'setEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'AVMutableMovieTrack', b'setModified:', {'arguments': {2: {'type': 'Z'}}}) - r(b'AVMutableMovieTrack', b'setPreferredMediaChunkDuration:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVMutableTimedMetadataGroup', b'setTimeRange:', {'arguments': {2: {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}}) - r(b'AVMutableTimedMetadataGroup', b'timeRange', {'retval': {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}) - r(b'AVMutableVideoComposition', b'frameDuration', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVMutableVideoComposition', b'setFrameDuration:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVMutableVideoCompositionInstruction', b'enablePostProcessing', {'retval': {'type': b'Z'}}) - r(b'AVMutableVideoCompositionInstruction', b'setEnablePostProcessing:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVMutableVideoCompositionInstruction', b'setTimeRange:', {'arguments': {2: {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}}) - r(b'AVMutableVideoCompositionInstruction', b'timeRange', {'retval': {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}) - r(b'AVMutableVideoCompositionLayerInstruction', b'setCropRectangle:atTime:', {'arguments': {3: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVMutableVideoCompositionLayerInstruction', b'setCropRectangleRampFromStartCropRectangle:toEndCropRectangle:timeRange:', {'arguments': {4: {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}}) - r(b'AVMutableVideoCompositionLayerInstruction', b'setOpacity:atTime:', {'arguments': {3: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVMutableVideoCompositionLayerInstruction', b'setOpacityRampFromStartOpacity:toEndOpacity:timeRange:', {'arguments': {4: {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}}) - r(b'AVMutableVideoCompositionLayerInstruction', b'setTransform:atTime:', {'arguments': {3: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVMutableVideoCompositionLayerInstruction', b'setTransformRampFromStartTransform:toEndTransform:timeRange:', {'arguments': {4: {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}}) - r(b'AVOutputSettingsAssistant', b'setSourceVideoAverageFrameDuration:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVOutputSettingsAssistant', b'setSourceVideoMinFrameDuration:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVOutputSettingsAssistant', b'sourceVideoAverageFrameDuration', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVOutputSettingsAssistant', b'sourceVideoMinFrameDuration', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVPlayer', b'addBoundaryTimeObserverForTimes:queue:usingBlock:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'AVPlayer', b'addPeriodicTimeObserverForInterval:queue:usingBlock:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}, 4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'{_CMTime=qiIq}'}}}}}}) - r(b'AVPlayer', b'allowsExternalPlayback', {'retval': {'type': 'Z'}}) - r(b'AVPlayer', b'appliesMediaSelectionCriteriaAutomatically', {'retval': {'type': b'Z'}}) - r(b'AVPlayer', b'automaticallyWaitsToMinimizeStalling', {'retval': {'type': b'Z'}}) - r(b'AVPlayer', b'currentTime', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVPlayer', b'isClosedCaptionDisplayEnabled', {'retval': {'type': b'Z'}}) - r(b'AVPlayer', b'isExternalPlaybackActive', {'retval': {'type': 'Z'}}) - r(b'AVPlayer', b'isMuted', {'retval': {'type': b'Z'}}) - r(b'AVPlayer', b'outputObscuredDueToInsufficientExternalProtection', {'retval': {'type': b'Z'}}) - r(b'AVPlayer', b'prerollAtRate:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}}}, 'type': '@?'}}}) - r(b'AVPlayer', b'preventsDisplaySleepDuringVideoPlayback', {'retval': {'type': b'Z'}}) - r(b'AVPlayer', b'seekToDate:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}}}, 'type': '@?'}}}) - r(b'AVPlayer', b'seekToTime:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVPlayer', b'seekToTime:completionHandler:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}}}, 'type': '@?'}}}) - r(b'AVPlayer', b'seekToTime:toleranceBefore:toleranceAfter:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}, 3: {'type': b'{_CMTime=qiIq}'}, 4: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVPlayer', b'seekToTime:toleranceBefore:toleranceAfter:completionHandler:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}, 3: {'type': b'{_CMTime=qiIq}'}, 4: {'type': b'{_CMTime=qiIq}'}, 5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}}}, 'type': '@?'}}}) - r(b'AVPlayer', b'setAllowsExternalPlayback:', {'arguments': {2: {'type': 'Z'}}}) - r(b'AVPlayer', b'setAppliesMediaSelectionCriteriaAutomatically:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVPlayer', b'setAutomaticallyWaitsToMinimizeStalling:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVPlayer', b'setClosedCaptionDisplayEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVPlayer', b'setMuted:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVPlayer', b'setPreventsDisplaySleepDuringVideoPlayback:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVPlayer', b'setRate:time:atHostTime:', {'arguments': {3: {'type': b'{_CMTime=qiIq}'}, 4: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVPlayer', b'setUsesExternalPlaybackWhileExternalScreenIsActive:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVPlayer', b'usesExternalPlaybackWhileExternalScreenIsActive', {'retval': {'type': b'Z'}}) - r(b'AVPlayerItem', b'canPlayFastForward', {'retval': {'type': b'Z'}}) - r(b'AVPlayerItem', b'canPlayFastReverse', {'retval': {'type': b'Z'}}) - r(b'AVPlayerItem', b'canPlayReverse', {'retval': {'type': b'Z'}}) - r(b'AVPlayerItem', b'canPlaySlowForward', {'retval': {'type': b'Z'}}) - r(b'AVPlayerItem', b'canPlaySlowReverse', {'retval': {'type': b'Z'}}) - r(b'AVPlayerItem', b'canStepBackward', {'retval': {'type': b'Z'}}) - r(b'AVPlayerItem', b'canStepForward', {'retval': {'type': b'Z'}}) - r(b'AVPlayerItem', b'canUseNetworkResourcesForLiveStreamingWhilePaused', {'retval': {'type': 'Z'}}) - r(b'AVPlayerItem', b'currentTime', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVPlayerItem', b'duration', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVPlayerItem', b'forwardPlaybackEndTime', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVPlayerItem', b'isApplicationAuthorizedForPlayback', {'retval': {'type': b'Z'}}) - r(b'AVPlayerItem', b'isAuthorizationRequiredForPlayback', {'retval': {'type': b'Z'}}) - r(b'AVPlayerItem', b'isContentAuthorizedForPlayback', {'retval': {'type': b'Z'}}) - r(b'AVPlayerItem', b'isPlaybackBufferEmpty', {'retval': {'type': b'Z'}}) - r(b'AVPlayerItem', b'isPlaybackBufferFull', {'retval': {'type': b'Z'}}) - r(b'AVPlayerItem', b'isPlaybackLikelyToKeepUp', {'retval': {'type': b'Z'}}) - r(b'AVPlayerItem', b'requestContentAuthorizationAsynchronouslyWithTimeoutInterval:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'AVPlayerItem', b'reversePlaybackEndTime', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVPlayerItem', b'seekToDate:', {'retval': {'type': b'Z'}}) - r(b'AVPlayerItem', b'seekToDate:completionHandler:', {'retval': {'type': b'Z'}, 'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}}}, 'type': '@?'}}}) - r(b'AVPlayerItem', b'seekToTime:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVPlayerItem', b'seekToTime:completionHandler:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}}}, 'type': '@?'}}}) - r(b'AVPlayerItem', b'seekToTime:toleranceBefore:toleranceAfter:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}, 3: {'type': b'{_CMTime=qiIq}'}, 4: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVPlayerItem', b'seekToTime:toleranceBefore:toleranceAfter:completionHandler:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}}}}}}) - r(b'AVPlayerItem', b'seekingWaitsForVideoCompositionRendering', {'retval': {'type': b'Z'}}) - r(b'AVPlayerItem', b'setCanUseNetworkResourcesForLiveStreamingWhilePaused:', {'arguments': {2: {'type': 'Z'}}}) - r(b'AVPlayerItem', b'setForwardPlaybackEndTime:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVPlayerItem', b'setReversePlaybackEndTime:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVPlayerItem', b'setSeekingWaitsForVideoCompositionRendering:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVPlayerItemOutput', b'itemTimeForCVTimeStamp:', {'retval': {'type': b'{_CMTime=qiIq}'}, 'arguments': {2: {'type': sel32or64(b'{_CVTimeStamp=IiqQdq{CVSMPTETime=ssLLLssss}QQ}', b'{_CVTimeStamp=IiqQdq{CVSMPTETime=ssIIIssss}QQ}')}}}) - r(b'AVPlayerItemOutput', b'itemTimeForHostTime:', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVPlayerItemOutput', b'itemTimeForMachAbsoluteTime:', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVPlayerItemOutput', b'setSuppressesPlayerRendering:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVPlayerItemOutput', b'suppressesPlayerRendering', {'retval': {'type': b'Z'}}) - r(b'AVPlayerItemTrack', b'isEnabled', {'retval': {'type': b'Z'}}) - r(b'AVPlayerItemTrack', b'setEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVPlayerItemVideoOutput', b'copyPixelBufferForItemTime:itemTimeForDisplay:', {'retval': {'already_cfretained': True}, 'arguments': {2: {'type': b'{_CMTime=qiIq}'}, 3: {'type': b'^{_CMTime=qiIq}', 'type_modifier': b'o'}}}) - r(b'AVPlayerItemVideoOutput', b'hasNewPixelBufferForItemTime:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVPlayerLayer', b'isReadyForDisplay', {'retval': {'type': b'Z'}}) - r(b'AVPlayerLooper', b'initWithPlayer:templateItem:timeRange:', {'arguments': {4: {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}}) - r(b'AVPlayerLooper', b'playerLooperWithPlayer:templateItem:timeRange:', {'arguments': {4: {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}}) - r(b'AVPortraitEffectsMatte', b'dictionaryRepresentationForAuxiliaryDataType:', {'arguments': {2: {'type_modifier': b'o'}}}) - r(b'AVPortraitEffectsMatte', b'portraitEffectsMatteByReplacingPortraitEffectsMatteWithPixelBuffer:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'AVPortraitEffectsMatte', b'portraitEffectsMatteFromDictionaryRepresentation:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'AVQueuePlayer', b'canInsertItem:afterItem:', {'retval': {'type': b'Z'}}) - r(b'AVRouteDetector', b'isRouteDetectionEnabled', {'retval': {'type': 'Z'}}) - r(b'AVRouteDetector', b'multipleRoutesDetected', {'retval': {'type': 'Z'}}) - r(b'AVRouteDetector', b'setRouteDetectionEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'AVSampleBufferAudioRenderer', b'flushFromSourceTime:completionHandler:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}}}}}}) - r(b'AVSampleBufferAudioRenderer', b'isMuted', {'retval': {'type': b'Z'}}) - r(b'AVSampleBufferAudioRenderer', b'setMuted:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVSampleBufferDisplayLayer', b'isReadyForMoreMediaData', {'retval': {'type': b'Z'}}) - r(b'AVSampleBufferDisplayLayer', b'preventsCapture', {'retval': {'type': 'Z'}}) - r(b'AVSampleBufferDisplayLayer', b'requestMediaDataWhenReadyOnQueue:usingBlock:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'AVSampleBufferDisplayLayer', b'setPreventsCapture:', {'arguments': {2: {'type': 'Z'}}}) - r(b'AVSampleBufferGenerator', b'notifyOfDataReadyForSampleBuffer:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}, 2: {'type': b'@'}}}}}}) - r(b'AVSampleBufferRenderSynchronizer', b'addBoundaryTimeObserverForTimes:queue:usingBlock:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'AVSampleBufferRenderSynchronizer', b'addPeriodicTimeObserverForInterval:queue:usingBlock:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}, 4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'{_CMTime=qiIq}'}}}}}}) - r(b'AVSampleBufferRenderSynchronizer', b'removeRenderer:atTime:completionHandler:', {'arguments': {3: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVSampleBufferRenderSynchronizer', b'setRate:time:', {'arguments': {3: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVSampleBufferRequest', b'overrideTime', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVSampleBufferRequest', b'setOverrideTime:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}}}) - r(b'AVSampleCursor', b'currentChunkInfo', {'retval': {'type': b'{_AVSampleCursorChunkInfo=qZZZ}'}}) - r(b'AVSampleCursor', b'currentChunkStorageRange', {'retval': {'type': b'{_AVSampleCursorStorageRange=qq}'}}) - r(b'AVSampleCursor', b'currentSampleDependencyInfo', {'retval': {'type': b'{_AVSampleCursorDependencyInfo=ZZZZZZ}'}}) - r(b'AVSampleCursor', b'currentSampleDuration', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVSampleCursor', b'currentSampleStorageRange', {'retval': {'type': b'{_AVSampleCursorStorageRange=qq}'}}) - r(b'AVSampleCursor', b'currentSampleSyncInfo', {'retval': {'type': b'{_AVSampleCursorSyncInfo=ZZZ}'}}) - r(b'AVSampleCursor', b'decodeTimeStamp', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVSampleCursor', b'presentationTimeStamp', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVSampleCursor', b'samplesWithEarlierDecodeTimeStampsMayHaveLaterPresentationTimeStampsThanCursor:', {'retval': {'type': b'Z'}}) - r(b'AVSampleCursor', b'samplesWithLaterDecodeTimeStampsMayHaveEarlierPresentationTimeStampsThanCursor:', {'retval': {'type': b'Z'}}) - r(b'AVSampleCursor', b'stepByDecodeTime:wasPinned:', {'retval': {'type': b'{_CMTime=qiIq}'}, 'arguments': {2: {'type': b'{_CMTime=qiIq}'}, 3: {'type': b'^Z', 'type_modifier': b'o'}}}) - r(b'AVSampleCursor', b'stepByPresentationTime:wasPinned:', {'retval': {'type': b'{_CMTime=qiIq}'}, 'arguments': {2: {'type': b'{_CMTime=qiIq}'}, 3: {'type': b'^Z', 'type_modifier': b'o'}}}) - r(b'AVSpeechSynthesizer', b'continueSpeaking', {'retval': {'type': 'Z'}}) - r(b'AVSpeechSynthesizer', b'isPaused', {'retval': {'type': 'Z'}}) - r(b'AVSpeechSynthesizer', b'isSpeaking', {'retval': {'type': 'Z'}}) - r(b'AVSpeechSynthesizer', b'pauseSpeakingAtBoundary:', {'retval': {'type': 'Z'}}) - r(b'AVSpeechSynthesizer', b'setPaused:', {'arguments': {2: {'type': 'Z'}}}) - r(b'AVSpeechSynthesizer', b'setSpeaking:', {'arguments': {2: {'type': 'Z'}}}) - r(b'AVSpeechSynthesizer', b'stopSpeakingAtBoundary:', {'retval': {'type': 'Z'}}) - r(b'AVTimedMetadataGroup', b'initWithItems:timeRange:', {'arguments': {3: {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}}) - r(b'AVTimedMetadataGroup', b'timeRange', {'retval': {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}) - r(b'AVURLAsset', b'isPlayableExtendedMIMEType:', {'retval': {'type': b'Z'}}) - r(b'AVURLAsset', b'mayRequireContentKeysForMediaDataProcessing', {'retval': {'type': b'Z'}}) - r(b'AVVideoComposition', b'frameDuration', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'AVVideoComposition', b'isValidForAsset:timeRange:validationDelegate:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}}) - r(b'AVVideoComposition', b'videoCompositionWithAsset:applyingCIFiltersWithHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'AVVideoCompositionInstruction', b'enablePostProcessing', {'retval': {'type': b'Z'}}) - r(b'AVVideoCompositionInstruction', b'timeRange', {'retval': {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}) - r(b'AVVideoCompositionLayerInstruction', b'getCropRectangleRampForTime:startCropRectangle:endCropRectangle:timeRange:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'{_CMTime=qiIq}'}, 3: {'type_modifier': b'o'}, 4: {'type_modifier': b'o'}, 5: {'type': b'^{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}', 'type_modifier': b'o'}}}) - r(b'AVVideoCompositionLayerInstruction', b'getOpacityRampForTime:startOpacity:endOpacity:timeRange:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'{_CMTime=qiIq}'}, 3: {'type_modifier': b'o'}, 4: {'type_modifier': b'o'}, 5: {'type': b'^{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}', 'type_modifier': b'o'}}}) - r(b'AVVideoCompositionLayerInstruction', b'getTransformRampForTime:startTransform:endTransform:timeRange:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'{_CMTime=qiIq}'}, 3: {'type_modifier': b'o'}, 4: {'type_modifier': b'o'}, 5: {'type': b'^{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}', 'type_modifier': b'o'}}}) - r(b'AVVideoCompositionRenderContext', b'edgeWidths', {'retval': {'type': sel32or64(b'{_AVEdgeWidths=ffff}', b'{_AVEdgeWidths=dddd}')}}) - r(b'AVVideoCompositionRenderContext', b'highQualityRendering', {'retval': {'type': b'Z'}}) - r(b'AVVideoCompositionRenderContext', b'pixelAspectRatio', {'retval': {'type': sel32or64(b'{_AVPixelAspectRatio=ii}', b'{_AVPixelAspectRatio=qq}')}}) - r(b'NSCoder', b'decodeCMTimeForKey:', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'NSCoder', b'decodeCMTimeMappingForKey:', {'retval': {'type': b'{_CMTimeMapping={_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}}'}}) - r(b'NSCoder', b'decodeCMTimeRangeForKey:', {'retval': {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}) - r(b'NSCoder', b'encodeCMTime:forKey:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}}}) - r(b'NSCoder', b'encodeCMTimeMapping:forKey:', {'arguments': {2: {'type': b'{_CMTimeMapping={_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}}'}}}) - r(b'NSCoder', b'encodeCMTimeRange:forKey:', {'arguments': {2: {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}}) - r(b'NSObject', b'audioPlayerDecodeErrorDidOccur:error:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'audioPlayerDidFinishPlaying:successfully:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'Z'}}}) - r(b'NSObject', b'audioRecorderDidFinishRecording:successfully:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'Z'}}}) - r(b'NSObject', b'audioRecorderEncodeErrorDidOccur:error:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'cancelAllPendingVideoCompositionRequests', {'required': False, 'retval': {'type': b'v'}}) - r(b'NSObject', b'captureOutput:didCapturePhotoForResolvedSettings:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'captureOutput:didDropSampleBuffer:fromConnection:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'^{opaqueCMSampleBuffer=}'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'captureOutput:didFinishCaptureForResolvedSettings:error:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'captureOutput:didFinishProcessingLivePhotoToMovieFileAtURL:duration:photoDisplayTime:resolvedSettings:error:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'{_CMTime=qiIq}'}, 5: {'type': b'{_CMTime=qiIq}'}, 6: {'type': b'@'}, 7: {'type': b'@'}}}) - r(b'NSObject', b'captureOutput:didFinishProcessingPhoto:error:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'captureOutput:didFinishProcessingPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'^{opaqueCMSampleBuffer=}'}, 4: {'type': b'^{opaqueCMSampleBuffer=}'}, 5: {'type': b'@'}, 6: {'type': b'@'}, 7: {'type': b'@'}}}) - r(b'NSObject', b'captureOutput:didFinishProcessingRawPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'^{opaqueCMSampleBuffer=}'}, 4: {'type': b'^{opaqueCMSampleBuffer=}'}, 5: {'type': b'@'}, 6: {'type': b'@'}, 7: {'type': b'@'}}}) - r(b'NSObject', b'captureOutput:didFinishRecordingLivePhotoMovieForEventualFileAtURL:resolvedSettings:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'captureOutput:didOutputMetadataObjects:fromConnection:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'captureOutput:didOutputSampleBuffer:fromConnection:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'^{opaqueCMSampleBuffer=}'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'captureOutput:didPauseRecordingToOutputFileAtURL:fromConnections:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'captureOutput:didResumeRecordingToOutputFileAtURL:fromConnections:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'captureOutput:didStartRecordingToOutputFileAtURL:fromConnections:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'captureOutput:willBeginCaptureForResolvedSettings:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'captureOutput:willCapturePhotoForResolvedSettings:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'captureOutput:willFinishRecordingToOutputFileAtURL:fromConnections:error:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'captureOutputShouldProvideSampleAccurateRecordingStart:', {'required': True, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'containsTweening', {'required': True, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'contentKeySession:contentKeyRequest:didFailWithError:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'contentKeySession:contentKeyRequestDidSucceed:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'contentKeySession:didProvideContentKeyRequest:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'contentKeySession:didProvidePersistableContentKeyRequest:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'contentKeySession:didProvideRenewingContentKeyRequest:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'contentKeySession:didUpdatePersistableContentKey:forContentKeyIdentifier:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'contentKeySession:shouldRetryContentKeyRequest:reason:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'contentKeySessionContentProtectionSessionIdentifierDidChange:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'contentKeySessionDidGenerateExpiredSessionReport:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'dataOutputSynchronizer:didOutputSynchronizedDataCollection:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'depthDataOutput:didDropDepthData:timestamp:connection:reason:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'{_CMTime=qiIq}'}, 5: {'type': b'@'}, 6: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'depthDataOutput:didOutputDepthData:timestamp:connection:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'{_CMTime=qiIq}'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'destinationForMixer:bus:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'enablePostProcessing', {'required': True, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'enqueueSampleBuffer:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'^{opaqueCMSampleBuffer=}'}}}) - r(b'NSObject', b'flush', {'required': True, 'retval': {'type': b'v'}}) - r(b'NSObject', b'isAssociatedWithFragmentMinder', {'required': True, 'retval': {'type': 'Z'}}) - r(b'NSObject', b'isReadyForMoreMediaData', {'required': True, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'legibleOutput:didOutputAttributedStrings:nativeSampleBuffers:forItemTime:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'{_CMTime=qiIq}'}}}) - r(b'NSObject', b'loadValuesAsynchronouslyForKeys:completionHandler:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}, 'type': b'@?'}}}) - r(b'NSObject', b'mayRequireContentKeysForMediaDataProcessing', {'required': True, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'metadataCollector:didCollectDateRangeMetadataGroups:indexesOfNewGroups:indexesOfModifiedGroups:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'metadataOutput:didOutputTimedMetadataGroups:fromPlayerItemTrack:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'obstruction', {'required': True, 'retval': {'type': b'f'}}) - r(b'NSObject', b'occlusion', {'required': True, 'retval': {'type': b'f'}}) - r(b'NSObject', b'outputMediaDataWillChange:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'outputSequenceWasFlushed:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'pan', {'required': True, 'retval': {'type': b'f'}}) - r(b'NSObject', b'passthroughTrackID', {'required': True, 'retval': {'type': b'i'}}) - r(b'NSObject', b'position', {'required': True, 'retval': {'type': b'{AVAudio3DPoint=fff}'}}) - r(b'NSObject', b'rate', {'required': True, 'retval': {'type': b'f'}}) - r(b'NSObject', b'renderContextChanged:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'renderingAlgorithm', {'required': True, 'retval': {'type': sel32or64(b'i', b'q')}}) - r(b'NSObject', b'replacementDepthDataForPhoto:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'replacementEmbeddedThumbnailPixelBufferWithPhotoFormat:forPhoto:', {'required': False, 'retval': {'type': b'^{__CVBuffer=}'}, 'arguments': {2: {'type': b'^@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'replacementMetadataForPhoto:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'replacementPortraitEffectsMatteForPhoto:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'requestMediaDataWhenReadyOnQueue:usingBlock:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}, 'type': b'@?'}}}) - r(b'NSObject', b'requiredPixelBufferAttributesForRenderContext', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'requiredSourceTrackIDs', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'resourceLoader:didCancelAuthenticationChallenge:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'resourceLoader:didCancelLoadingRequest:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'resourceLoader:shouldWaitForLoadingOfRequestedResource:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'resourceLoader:shouldWaitForRenewalOfRequestedResource:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'resourceLoader:shouldWaitForResponseToAuthenticationChallenge:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'reverbBlend', {'required': True, 'retval': {'type': b'f'}}) - r(b'NSObject', b'setObstruction:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'f'}}}) - r(b'NSObject', b'setOcclusion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'f'}}}) - r(b'NSObject', b'setPan:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'f'}}}) - r(b'NSObject', b'setPosition:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'{AVAudio3DPoint=fff}'}}}) - r(b'NSObject', b'setRate:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'f'}}}) - r(b'NSObject', b'setRenderingAlgorithm:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'setReverbBlend:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'f'}}}) - r(b'NSObject', b'setVolume:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'f'}}}) - r(b'NSObject', b'sourcePixelBufferAttributes', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'speechSynthesizer:willSpeakRangeOfSpeechString:utterance:', {'arguments': {3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSObject', b'startVideoCompositionRequest:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'statusOfValueForKey:error:', {'required': True, 'retval': {'type': sel32or64(b'i', b'q')}, 'arguments': {2: {'type': b'@'}, 3: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'stopRequestingMediaData', {'required': True, 'retval': {'type': b'v'}}) - r(b'NSObject', b'supportsWideColorSourceFrames', {'required': False, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'timeRange', {'required': True, 'retval': {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}) - r(b'NSObject', b'timebase', {'required': True, 'retval': {'type': b'^{OpaqueCMTimebase=}'}}) - r(b'NSObject', b'videoComposition:shouldContinueValidatingAfterFindingEmptyTimeRange:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}}) - r(b'NSObject', b'videoComposition:shouldContinueValidatingAfterFindingInvalidTimeRangeInInstruction:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'videoComposition:shouldContinueValidatingAfterFindingInvalidTrackIDInInstruction:layerInstruction:asset:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'videoComposition:shouldContinueValidatingAfterFindingInvalidValueForKey:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'volume', {'required': True, 'retval': {'type': b'f'}}) - r(b'NSValue', b'CMTimeMappingValue', {'retval': {'type': b'{_CMTimeMapping={_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}}'}}) - r(b'NSValue', b'CMTimeRangeValue', {'retval': {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}) - r(b'NSValue', b'CMTimeValue', {'retval': {'type': b'{_CMTime=qiIq}'}}) - r(b'NSValue', b'valueWithCMTime:', {'arguments': {2: {'type': b'{_CMTime=qiIq}'}}}) - r(b'NSValue', b'valueWithCMTimeMapping:', {'arguments': {2: {'type': b'{_CMTimeMapping={_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}}'}}}) - r(b'NSValue', b'valueWithCMTimeRange:', {'arguments': {2: {'type': b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'}}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/AVFoundation/_metadata.pyc b/env/lib/python2.7/site-packages/AVFoundation/_metadata.pyc deleted file mode 100644 index 6cb25a0a..00000000 Binary files a/env/lib/python2.7/site-packages/AVFoundation/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/AVKit/_AVKit.so b/env/lib/python2.7/site-packages/AVKit/_AVKit.so deleted file mode 100755 index bd34f319..00000000 Binary files a/env/lib/python2.7/site-packages/AVKit/_AVKit.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/AVKit/__init__.py b/env/lib/python2.7/site-packages/AVKit/__init__.py deleted file mode 100644 index 43d35a08..00000000 --- a/env/lib/python2.7/site-packages/AVKit/__init__.py +++ /dev/null @@ -1,28 +0,0 @@ -''' -Python mapping for the AddressBook framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Cocoa -import Quartz - -from AVKit import _metadata, _AVKit - -sys.modules['AVKit'] = mod = objc.ObjCLazyModule( - "AVKit", - "com.apple.AVKit", - objc.pathForFramework("/System/Library/Frameworks/AVKit.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Cocoa, Quartz)) - -import sys -del sys.modules['AVKit._metadata'] -del sys.modules['AVKit._AVKit'] diff --git a/env/lib/python2.7/site-packages/AVKit/__init__.pyc b/env/lib/python2.7/site-packages/AVKit/__init__.pyc deleted file mode 100644 index c993a28e..00000000 Binary files a/env/lib/python2.7/site-packages/AVKit/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/AVKit/_metadata.py b/env/lib/python2.7/site-packages/AVKit/_metadata.py deleted file mode 100644 index 32682e54..00000000 --- a/env/lib/python2.7/site-packages/AVKit/_metadata.py +++ /dev/null @@ -1,44 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Sat Jul 22 11:10:57 2017 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$$''' -enums = '''$AVCaptureViewControlsStyleDefault@0$AVCaptureViewControlsStyleFloating@1$AVCaptureViewControlsStyleInline@0$AVCaptureViewControlsStyleInlineDeviceSelection@2$AVPlayerViewControlsStyleDefault@1$AVPlayerViewControlsStyleFloating@2$AVPlayerViewControlsStyleInline@1$AVPlayerViewControlsStyleMinimal@3$AVPlayerViewControlsStyleNone@0$AVPlayerViewTrimCancelButton@1$AVPlayerViewTrimOKButton@0$AVRoutePickerViewButtonStateActive@2$AVRoutePickerViewButtonStateActiveHighlighted@3$AVRoutePickerViewButtonStateNormal@0$AVRoutePickerViewButtonStateNormalHighlighted@1$''' -misc.update({}) -aliases = {'AVPlayerViewControlsStyleDefault': 'AVPlayerViewControlsStyleInline', 'AVCaptureViewControlsStyleDefault': 'AVCaptureViewControlsStyleInline'} -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'AVCaptureView', b'setSession:showVideoPreview:showAudioPreview:', {'arguments': {3: {'type': b'Z'}, 4: {'type': b'Z'}}}) - r(b'AVRoutePickerView', b'isRoutePickerButtonBordered', {'retval': {'type': 'Z'}}) - r(b'AVRoutePickerView', b'setRoutePickerButtonBordered:', {'arguments': {2: {'type': 'Z'}}}) - r(b'AVPlayerView', b'beginTrimmingWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'q'}}}}}}) - r(b'AVPlayerView', b'canBeginTrimming', {'retval': {'type': b'Z'}}) - r(b'AVPlayerView', b'isReadyForDisplay', {'retval': {'type': b'Z'}}) - r(b'AVPlayerView', b'setShowsFrameSteppingButtons:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVPlayerView', b'setShowsFullScreenToggleButton:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVPlayerView', b'setShowsSharingServiceButton:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AVPlayerView', b'setUpdatesNowPlayingInfoCenter:', {'arguments': {2: {'type': 'Z'}}}) - r(b'AVPlayerView', b'showsFrameSteppingButtons', {'retval': {'type': b'Z'}}) - r(b'AVPlayerView', b'showsFullScreenToggleButton', {'retval': {'type': b'Z'}}) - r(b'AVPlayerView', b'showsSharingServiceButton', {'retval': {'type': b'Z'}}) - r(b'AVPlayerView', b'updatesNowPlayingInfoCenter', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'captureView:startRecordingToFileOutput:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/AVKit/_metadata.pyc b/env/lib/python2.7/site-packages/AVKit/_metadata.pyc deleted file mode 100644 index 66d38768..00000000 Binary files a/env/lib/python2.7/site-packages/AVKit/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Accounts/__init__.py b/env/lib/python2.7/site-packages/Accounts/__init__.py deleted file mode 100644 index 8d8dfbe7..00000000 --- a/env/lib/python2.7/site-packages/Accounts/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -''' -Python mapping for the Accounts framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Foundation - -from Accounts import _metadata - -sys.modules['Accounts'] = mod = objc.ObjCLazyModule( - "Accounts", - "com.apple.Accounts", - objc.pathForFramework("/System/Library/Frameworks/Accounts.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Foundation,)) - -import sys -del sys.modules['Accounts._metadata'] diff --git a/env/lib/python2.7/site-packages/Accounts/__init__.pyc b/env/lib/python2.7/site-packages/Accounts/__init__.pyc deleted file mode 100644 index 69bbac08..00000000 Binary files a/env/lib/python2.7/site-packages/Accounts/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Accounts/_metadata.py b/env/lib/python2.7/site-packages/Accounts/_metadata.py deleted file mode 100644 index 65c2e543..00000000 --- a/env/lib/python2.7/site-packages/Accounts/_metadata.py +++ /dev/null @@ -1,34 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Mon Dec 28 08:55:02 2015 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$ACAccountStoreDidChangeNotification$ACAccountTypeIdentifierFacebook$ACAccountTypeIdentifierLinkedIn$ACAccountTypeIdentifierSinaWeibo$ACAccountTypeIdentifierTencentWeibo$ACAccountTypeIdentifierTwitter$ACErrorDomain$ACFacebookAppIdKey$ACFacebookAppVersionKey$ACFacebookAudienceEveryone$ACFacebookAudienceFriends$ACFacebookAudienceKey$ACFacebookAudienceOnlyMe$ACFacebookPermissionGroupKey$ACFacebookPermissionGroupRead$ACFacebookPermissionGroupReadWrite$ACFacebookPermissionGroupWrite$ACFacebookPermissionsKey$ACLinkedInAppIdKey$ACLinkedInPermissionsKey$ACTencentWeiboAppIdKey$''' -enums = '''$ACAccountCredentialRenewResultFailed@2$ACAccountCredentialRenewResultRejected@1$ACAccountCredentialRenewResultRenewed@0$ACErrorAccessDeniedByProtectionPolicy@10$ACErrorAccessInfoInvalid@8$ACErrorAccountAlreadyExists@5$ACErrorAccountAuthenticationFailed@3$ACErrorAccountMissingRequiredProperty@2$ACErrorAccountNotFound@6$ACErrorAccountTypeInvalid@4$ACErrorClientPermissionDenied@9$ACErrorCoreDataSaveFailed@18$ACErrorCredentialItemNotExpired@23$ACErrorCredentialItemNotFound@22$ACErrorCredentialNotFound@11$ACErrorDeniedByPlugin@17$ACErrorFailedSerializingAccountInfo@19$ACErrorFetchCredentialFailed@12$ACErrorInvalidClientBundleID@16$ACErrorInvalidCommand@20$ACErrorMissingTransportMessageID@21$ACErrorPermissionDenied@7$ACErrorRemoveCredentialFailed@14$ACErrorStoreCredentialFailed@13$ACErrorUnknown@1$ACErrorUpdatingNonexistentAccount@15$''' -misc.update({}) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'ACAccountStore', b'removeAccount:withCompletionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}, 2: {'type': b'@'}}}}}}) - r(b'ACAccountStore', b'renewCredentialsForAccount:completion:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'i', b'q')}, 2: {'type': b'@'}}}}}}) - r(b'ACAccountStore', b'requestAccessToAccountsWithType:options:completion:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}, 2: {'type': b'@'}}}}}}) - r(b'ACAccountStore', b'requestAccessToAccountsWithType:withCompletionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}, 2: {'type': b'@'}}}}}}) - r(b'ACAccountStore', b'saveAccount:withCompletionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}, 2: {'type': b'@'}}}}}}) - r(b'ACAccountType', b'accessGranted', {'retval': {'type': b'Z'}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/Accounts/_metadata.pyc b/env/lib/python2.7/site-packages/Accounts/_metadata.pyc deleted file mode 100644 index 1952f29d..00000000 Binary files a/env/lib/python2.7/site-packages/Accounts/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/AddressBook/_AddressBook.so b/env/lib/python2.7/site-packages/AddressBook/_AddressBook.so deleted file mode 100755 index b158b23e..00000000 Binary files a/env/lib/python2.7/site-packages/AddressBook/_AddressBook.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/AddressBook/__init__.py b/env/lib/python2.7/site-packages/AddressBook/__init__.py deleted file mode 100644 index ad31d346..00000000 --- a/env/lib/python2.7/site-packages/AddressBook/__init__.py +++ /dev/null @@ -1,32 +0,0 @@ -''' -Python mapping for the AddressBook framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Foundation - -from AddressBook import _metadata -from AddressBook._AddressBook import * - -try: - long -except NameError: - long = int - -sys.modules['AddressBook'] = mod = objc.ObjCLazyModule( - "AddressBook", - "com.apple.AddressBook.framework", - objc.pathForFramework("/System/Library/Frameworks/AddressBook.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Foundation,)) - -import sys -del sys.modules['AddressBook._metadata'] diff --git a/env/lib/python2.7/site-packages/AddressBook/__init__.pyc b/env/lib/python2.7/site-packages/AddressBook/__init__.pyc deleted file mode 100644 index 43e486bb..00000000 Binary files a/env/lib/python2.7/site-packages/AddressBook/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/AddressBook/_metadata.py b/env/lib/python2.7/site-packages/AddressBook/_metadata.py deleted file mode 100644 index 32dd2b37..00000000 --- a/env/lib/python2.7/site-packages/AddressBook/_metadata.py +++ /dev/null @@ -1,72 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Mon Jul 18 11:30:07 2016 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -misc.update({'ABRecordRef': objc.createStructType('ABRecordRef', b'{__ABBookflags=b1b1b1b1b1b1b1b1b1b1b1b21}', ['hasUnsavedChanges', 'readOnly', 'importMe', 'needConversion', 'cleanedUp', 'importTips', 'restoreFromMetaData', 'prefsNeedSync', 'waitingForReset', 'enforcesConstraints', 'tracksAllSources', '_reserved']), 'ABAddressBookRef': objc.createStructType('ABAddressBookRef', b'{__ABAddressBookRef=}', []), 'ABMutableMultiValueRef': objc.createStructType('ABMutableMultiValueRef', b'{__ABMultiValue=}', []), 'ABActionEnabledCallback': objc.createStructType('ABActionEnabledCallback', b'{__ABMultiValue=}', []), 'ABPickerAttributes': objc.createStructType('ABPickerAttributes', b'{OpaqueABPicker=}', []), 'ABGroupRef': objc.createStructType('ABGroupRef', b'{__ABGroup=}', []), 'ABSearchElementRef': objc.createStructType('ABSearchElementRef', b'{__ABSearchElementRef=}', []), 'ABPersonRef': objc.createStructType('ABPersonRef', b'{__ABPerson=}', []), 'ABMultiValueRef': objc.createStructType('ABMultiValueRef', b'{__ABMultiValue=}', []), 'ABPeoplePickerSelectionBehavior': objc.createStructType('ABPeoplePickerSelectionBehavior', b'{__ABBookflags=b1b1b1b1b1b1b1b1b1b1b1b21}', ['hasUnsavedChanges', 'readOnly', 'importMe', 'needConversion', 'cleanedUp', 'importTips', 'restoreFromMetaData', 'prefsNeedSync', 'waitingForReset', 'enforcesConstraints', 'tracksAllSources', '_reserved']), 'ABPickerRef': objc.createStructType('ABPickerRef', b'{OpaqueABPicker=}', [])}) -constants = '''$ABAddressBookErrorDomain$ABMultiValueIdentifiersErrorKey$ABPeoplePickerDisplayedPropertyDidChangeNotification$ABPeoplePickerGroupSelectionDidChangeNotification$ABPeoplePickerNameSelectionDidChangeNotification$ABPeoplePickerValueSelectionDidChangeNotification$kABAIMHomeLabel$kABAIMInstantProperty$kABAIMMobileMeLabel$kABAIMWorkLabel$kABAddressCityKey$kABAddressCountryCodeKey$kABAddressCountryKey$kABAddressHomeLabel$kABAddressProperty$kABAddressStateKey$kABAddressStreetKey$kABAddressWorkLabel$kABAddressZIPKey$kABAlternateBirthdayComponentsProperty$kABAnniversaryLabel$kABAssistantLabel$kABBirthdayComponentsProperty$kABBirthdayProperty$kABBrotherLabel$kABCalendarURIsProperty$kABChildLabel$kABCreationDateProperty$kABDatabaseChangedExternallyNotification$kABDatabaseChangedNotification$kABDeletedRecords$kABDepartmentProperty$kABEmailHomeLabel$kABEmailMobileMeLabel$kABEmailProperty$kABEmailWorkLabel$kABFatherLabel$kABFirstNamePhoneticProperty$kABFirstNameProperty$kABFriendLabel$kABGroupNameProperty$kABHomeLabel$kABHomePageLabel$kABHomePageProperty$kABICQHomeLabel$kABICQInstantProperty$kABICQWorkLabel$kABInsertedRecords$kABInstantMessageProperty$kABInstantMessageServiceAIM$kABInstantMessageServiceFacebook$kABInstantMessageServiceGaduGadu$kABInstantMessageServiceGoogleTalk$kABInstantMessageServiceICQ$kABInstantMessageServiceJabber$kABInstantMessageServiceKey$kABInstantMessageServiceMSN$kABInstantMessageServiceQQ$kABInstantMessageServiceSkype$kABInstantMessageServiceYahoo$kABInstantMessageUsernameKey$kABJabberHomeLabel$kABJabberInstantProperty$kABJabberWorkLabel$kABJobTitleProperty$kABLastNamePhoneticProperty$kABLastNameProperty$kABMSNHomeLabel$kABMSNInstantProperty$kABMSNWorkLabel$kABMaidenNameProperty$kABManagerLabel$kABMiddleNamePhoneticProperty$kABMiddleNameProperty$kABMobileMeLabel$kABModificationDateProperty$kABMotherLabel$kABNicknameProperty$kABNoteProperty$kABOrganizationPhoneticProperty$kABOrganizationProperty$kABOtherDateComponentsProperty$kABOtherDatesProperty$kABOtherLabel$kABParentLabel$kABPartnerLabel$kABPersonFlags$kABPhoneHomeFAXLabel$kABPhoneHomeLabel$kABPhoneMainLabel$kABPhoneMobileLabel$kABPhonePagerLabel$kABPhoneProperty$kABPhoneWorkFAXLabel$kABPhoneWorkLabel$kABPhoneiPhoneLabel$kABRelatedNamesProperty$kABSisterLabel$kABSocialProfileProperty$kABSocialProfileServiceFacebook$kABSocialProfileServiceFlickr$kABSocialProfileServiceKey$kABSocialProfileServiceLinkedIn$kABSocialProfileServiceMySpace$kABSocialProfileServiceSinaWeibo$kABSocialProfileServiceTencentWeibo$kABSocialProfileServiceTwitter$kABSocialProfileServiceYelp$kABSocialProfileURLKey$kABSocialProfileUserIdentifierKey$kABSocialProfileUsernameKey$kABSpouseLabel$kABSuffixProperty$kABTitleProperty$kABUIDProperty$kABURLsProperty$kABUpdatedRecords$kABWorkLabel$kABYahooHomeLabel$kABYahooInstantProperty$kABYahooWorkLabel$''' -enums = '''$ABAddRecordsError@1001$ABMultipleValueSelection@2$ABNoValueSelection@0$ABPropertyReadOnlyError@1014$ABPropertyUnsupportedBySourceError@1013$ABPropertyValueValidationError@1012$ABRemoveRecordsError@1002$ABSingleValueSelection@1$kABArrayProperty@5$kABBitsInBitFieldMatch@11$kABContainsSubString@7$kABContainsSubStringCaseInsensitive@8$kABDataProperty@7$kABDateComponentsProperty@8$kABDateProperty@4$kABDefaultNameOrdering@0$kABDictionaryProperty@6$kABDoesNotContainSubString@12$kABDoesNotContainSubStringCaseInsensitive@13$kABEqual@0$kABEqualCaseInsensitive@6$kABErrorInProperty@0$kABFirstNameFirst@32$kABGreaterThan@4$kABGreaterThanOrEqual@5$kABIntegerProperty@2$kABLastNameFirst@16$kABLessThan@2$kABLessThanOrEqual@3$kABMultiArrayProperty@261$kABMultiDataProperty@263$kABMultiDateComponentsProperty@264$kABMultiDateProperty@260$kABMultiDictionaryProperty@262$kABMultiIntegerProperty@258$kABMultiRealProperty@259$kABMultiStringProperty@257$kABMultiValueMask@256$kABNameOrderingMask@56$kABNotEqual@1$kABNotEqualCaseInsensitive@14$kABNotWithinIntervalAroundToday@19$kABNotWithinIntervalAroundTodayYearless@20$kABNotWithinIntervalFromToday@23$kABNotWithinIntervalFromTodayYearless@24$kABPickerAllowGroupSelection@4$kABPickerAllowMultipleSelection@8$kABPickerMultipleValueSelection@2$kABPickerSingleValueSelection@1$kABPrefixMatch@9$kABPrefixMatchCaseInsensitive@10$kABRealProperty@3$kABSearchAnd@0$kABSearchOr@1$kABShowAsCompany@1$kABShowAsMask@7$kABShowAsPerson@0$kABShowAsResource@2$kABShowAsRoom@3$kABStringProperty@1$kABSuffixMatch@15$kABSuffixMatchCaseInsensitive@16$kABWithinIntervalAroundToday@17$kABWithinIntervalAroundTodayYearless@18$kABWithinIntervalFromToday@21$kABWithinIntervalFromTodayYearless@22$kEventABPeoplePickerDisplayedPropertyChanged@4$kEventABPeoplePickerGroupDoubleClicked@5$kEventABPeoplePickerGroupSelectionChanged@1$kEventABPeoplePickerNameDoubleClicked@6$kEventABPeoplePickerNameSelectionChanged@2$kEventABPeoplePickerValueSelectionChanged@3$kEventClassABPeoplePicker@1633841264$kEventParamABPickerRef@1633841264$''' -misc.update({}) -functions={'ABMultiValueReplaceValue': (sel32or64(b'B^{__ABMultiValue=}@l', b'B^{__ABMultiValue=}@q'),), 'ABPickerSetDelegate': (b'v^{OpaqueABPicker=}^{OpaqueHIObjectRef=}',), 'ABAddRecord': (b'B^{__ABAddressBookRef=}@',), 'ABPickerClearSearchField': (b'v^{OpaqueABPicker=}',), 'ABPickerSelectGroup': (b'v^{OpaqueABPicker=}^{__ABGroup=}B',), 'ABCopyDefaultCountryCode': (b'^{__CFString=}^{__ABAddressBookRef=}', '', {'retval': {'already_cfretained': True}}), 'ABPickerGetAttributes': (sel32or64(b'L^{OpaqueABPicker=}', b'I^{OpaqueABPicker=}'),), 'ABSave': (b'B^{__ABAddressBookRef=}',), 'ABPersonCreateWithVCardRepresentation': (b'^{__ABPerson=}^{__CFData=}', '', {'retval': {'already_cfretained': True}}), 'ABGroupAddMember': (b'B^{__ABGroup=}^{__ABPerson=}',), 'ABPersonCreateSearchElement': (sel32or64(b'^{__ABSearchElementRef=}^{__CFString=}^{__CFString=}^{__CFString=}@l', b'^{__ABSearchElementRef=}^{__CFString=}^{__CFString=}^{__CFString=}@q'), '', {'retval': {'already_cfretained': True}}), 'ABMultiValueRemove': (sel32or64(b'B^{__ABMultiValue=}l', b'B^{__ABMultiValue=}q'),), 'ABSearchElementMatchesRecord': (b'B^{__ABSearchElementRef=}@',), 'ABRecordCopyRecordType': (b'^{__CFString=}@', '', {'retval': {'already_cfretained': True}}), 'ABPickerSelectIdentifier': (b'v^{OpaqueABPicker=}^{__ABPerson=}^{__CFString=}B',), 'ABMultiValueCopyPrimaryIdentifier': (b'^{__CFString=}^{__ABMultiValue=}', '', {'retval': {'already_cfretained': True}}), 'ABMultiValueIndexForIdentifier': (sel32or64(b'l^{__ABMultiValue=}^{__CFString=}', b'q^{__ABMultiValue=}^{__CFString=}'),), 'ABPickerSelectRecord': (b'v^{OpaqueABPicker=}@B',), 'ABMultiValueCreateMutableCopy': (b'^{__ABMultiValue=}^{__ABMultiValue=}', '', {'retval': {'already_cfretained': True}}), 'ABRecordRemoveValue': (b'B@^{__CFString=}',), 'ABPickerCopySelectedGroups': (b'^{__CFArray=}^{OpaqueABPicker=}', '', {'retval': {'already_cfretained': True}}), 'ABMultiValueCopyIdentifierAtIndex': (sel32or64(b'^{__CFString=}^{__ABMultiValue=}l', b'^{__CFString=}^{__ABMultiValue=}q'), '', {'retval': {'already_cfretained': True}}), 'ABCancelLoadingImageDataForTag': (sel32or64(b'vl', b'vq'),), 'ABCopyArrayOfMatchingRecords': (b'^{__CFArray=}^{__ABAddressBookRef=}^{__ABSearchElementRef=}', '', {'retval': {'already_cfretained': True}}), 'ABPickerRemoveProperty': (b'v^{OpaqueABPicker=}^{__CFString=}',), 'ABMultiValueCount': (sel32or64(b'l^{__ABMultiValue=}', b'q^{__ABMultiValue=}'),), 'ABPickerCopySelectedIdentifiers': (b'^{__CFArray=}^{OpaqueABPicker=}^{__ABPerson=}', '', {'retval': {'already_cfretained': True}}), 'ABGroupCreate': (b'^{__ABGroup=}', '', {'retval': {'already_cfretained': True}}), 'ABMultiValueCreateCopy': (b'^{__ABMultiValue=}^{__ABMultiValue=}', '', {'retval': {'already_cfretained': True}}), 'ABMultiValueAdd': (b'B^{__ABMultiValue=}@^{__CFString=}^^{__CFString}', '', {'arguments': {3: {'type_modifier': 'o'}}}), 'ABHasUnsavedChanges': (b'B^{__ABAddressBookRef=}',), 'ABMultiValueReplaceLabel': (sel32or64(b'B^{__ABMultiValue=}^{__CFString=}l', b'B^{__ABMultiValue=}^{__CFString=}q'),), 'ABGroupCopyParentGroups': (b'^{__CFArray=}^{__ABGroup=}', '', {'retval': {'already_cfretained': True}}), 'ABGroupSetDistributionIdentifier': (b'B^{__ABGroup=}^{__ABPerson=}^{__CFString=}^{__CFString=}',), 'ABPickerDeselectRecord': (b'v^{OpaqueABPicker=}@',), 'ABGroupCreateSearchElement': (sel32or64(b'^{__ABSearchElementRef=}^{__CFString=}^{__CFString=}^{__CFString=}@l', b'^{__ABSearchElementRef=}^{__CFString=}^{__CFString=}^{__CFString=}@q'), '', {'retval': {'already_cfretained': True}}), 'ABCopyRecordTypeFromUniqueId': (b'^{__CFString=}^{__ABAddressBookRef=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'ABRemoveRecord': (b'B^{__ABAddressBookRef=}@',), 'ABGroupCopyDistributionIdentifier': (b'^{__CFString=}^{__ABGroup=}^{__ABPerson=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'ABPersonCopyImageData': (b'^{__CFData=}^{__ABPerson=}', '', {'retval': {'already_cfretained': True}}), 'ABPickerDeselectGroup': (b'v^{OpaqueABPicker=}^{__ABGroup=}',), 'ABGroupRemoveGroup': (b'B^{__ABGroup=}^{__ABGroup=}',), 'ABRemoveProperties': (sel32or64(b'l^{__ABAddressBookRef=}^{__CFString=}^{__CFArray=}', b'q^{__ABAddressBookRef=}^{__CFString=}^{__CFArray=}'),), 'ABGroupCopyArrayOfAllSubgroups': (b'^{__CFArray=}^{__ABGroup=}', '', {'retval': {'already_cfretained': True}}), 'ABMultiValueCreate': (b'^{__ABMultiValue=}', '', {'retval': {'already_cfretained': True}}), 'ABPickerSelectInAddressBook': (b'v^{OpaqueABPicker=}',), 'ABGroupCopyArrayOfAllMembers': (b'^{__CFArray=}^{__ABGroup=}', '', {'retval': {'already_cfretained': True}}), 'ABSearchElementCreateWithConjunction': (sel32or64(b'^{__ABSearchElementRef=}l^{__CFArray=}', b'^{__ABSearchElementRef=}q^{__CFArray=}'), '', {'retval': {'already_cfretained': True}}), 'ABMultiValueCopyValueAtIndex': (sel32or64(b'@^{__ABMultiValue=}l', b'@^{__ABMultiValue=}q'), '', {'retval': {'already_cfretained': True}}), 'ABPersonSetImageData': (b'B^{__ABPerson=}^{__CFData=}',), 'ABCreateFormattedAddressFromDictionary': (b'^{__CFString=}^{__ABAddressBookRef=}^{__CFDictionary=}', '', {'retval': {'already_cfretained': True}}), 'ABRecordSetValue': (b'B@^{__CFString=}@',), 'ABPickerGetDelegate': (b'^{OpaqueHIObjectRef=}^{OpaqueABPicker=}',), 'ABPersonCreate': (b'^{__ABPerson=}', '', {'retval': {'already_cfretained': True}}), 'ABCopyArrayOfAllPeople': (b'^{__CFArray=}^{__ABAddressBookRef=}', '', {'retval': {'already_cfretained': True}}), 'ABPickerCopyProperties': (b'^{__CFArray=}^{OpaqueABPicker=}', '', {'retval': {'already_cfretained': True}}), 'ABMultiValueSetPrimaryIdentifier': (b'B^{__ABMultiValue=}^{__CFString=}',), 'ABPickerCopyDisplayedProperty': (b'^{__CFString=}^{OpaqueABPicker=}', '', {'retval': {'already_cfretained': True}}), 'ABPickerCopySelectedRecords': (b'^{__CFArray=}^{OpaqueABPicker=}', '', {'retval': {'already_cfretained': True}}), 'ABGroupAddGroup': (b'B^{__ABGroup=}^{__ABGroup=}',), 'ABLocalizedPropertyOrLabel': (b'@@',), 'ABMultiValueCreateMutable': (b'^{__ABMultiValue=}', '', {'retval': {'already_cfretained': True}}), 'ABRecordCreateCopy': (b'@@', '', {'retval': {'already_cfretained': True}}), 'ABGetMe': (b'^{__ABPerson=}^{__ABAddressBookRef=}',), 'ABPickerSetFrame': (sel32or64(b'v^{OpaqueABPicker=}^{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'v^{OpaqueABPicker=}^{CGRect={CGPoint=dd}{CGSize=dd}}'), '', {'arguments': {1: {'type_modifier': 'n'}}}), 'ABAddPropertiesAndTypes': (sel32or64(b'l^{__ABAddressBookRef=}^{__CFString=}^{__CFDictionary=}', b'q^{__ABAddressBookRef=}^{__CFString=}^{__CFDictionary=}'),), 'ABCopyRecordForUniqueId': (b'@^{__ABAddressBookRef=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'ABSetMe': (b'v^{__ABAddressBookRef=}^{__ABPerson=}',), 'ABRecordCopyValue': (b'@@^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'ABTypeOfProperty': (sel32or64(b'l^{__ABAddressBookRef=}^{__CFString=}^{__CFString=}', b'q^{__ABAddressBookRef=}^{__CFString=}^{__CFString=}'),), 'ABMultiValueInsert': (sel32or64(b'B^{__ABMultiValue=}@^{__CFString=}l^^{__CFString}', b'B^{__ABMultiValue=}@^{__CFString=}q^^{__CFString}'), '', {'arguments': {4: {'type_modifier': 'o'}}}), 'ABPickerAddProperty': (b'v^{OpaqueABPicker=}^{__CFString=}',), 'ABMultiValueCopyLabelAtIndex': (sel32or64(b'^{__CFString=}^{__ABMultiValue=}l', b'^{__CFString=}^{__ABMultiValue=}q'), '', {'retval': {'already_cfretained': True}}), 'ABPickerChangeAttributes': (sel32or64(b'v^{OpaqueABPicker=}LL', b'v^{OpaqueABPicker=}II'),), 'ABPickerDeselectAll': (b'v^{OpaqueABPicker=}',), 'ABBeginLoadingImageDataForClient': (sel32or64(b'l^{__ABPerson=}^?^v', b'q^{__ABPerson=}^?^v'), '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__CFData=}'}, 1: {'type': b'l'}, 2: {'type': b'^v'}}}}}}), 'ABGetSharedAddressBook': (b'^{__ABAddressBookRef=}',), 'ABRecordIsReadOnly': (b'B@',), 'ABPickerIsVisible': (b'B^{OpaqueABPicker=}',), 'ABRecordCopyUniqueId': (b'^{__CFString=}@', '', {'retval': {'already_cfretained': True}}), 'ABCopyArrayOfAllGroups': (b'^{__CFArray=}^{__ABAddressBookRef=}', '', {'retval': {'already_cfretained': True}}), 'ABPickerCreate': (b'^{OpaqueABPicker=}', '', {'retval': {'already_cfretained': True}}), 'ABGroupRemoveMember': (b'B^{__ABGroup=}^{__ABPerson=}',), 'ABPickerDeselectIdentifier': (b'v^{OpaqueABPicker=}^{__ABPerson=}^{__CFString=}',), 'ABPickerSetColumnTitle': (b'v^{OpaqueABPicker=}^{__CFString=}^{__CFString=}',), 'ABPickerCopySelectedValues': (b'^{__CFArray=}^{OpaqueABPicker=}', '', {'retval': {'already_cfretained': True}}), 'ABPickerGetFrame': (sel32or64(b'v^{OpaqueABPicker=}^{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'v^{OpaqueABPicker=}^{CGRect={CGPoint=dd}{CGSize=dd}}'), '', {'arguments': {1: {'type_modifier': 'o'}}}), 'ABMultiValuePropertyType': (sel32or64(b'l^{__ABMultiValue=}', b'q^{__ABMultiValue=}'),), 'ABPersonCopyVCardRepresentation': (b'^{__CFData=}^{__ABPerson=}', '', {'retval': {'already_cfretained': True}}), 'ABCopyArrayOfPropertiesForRecordType': (b'^{__CFArray=}^{__ABAddressBookRef=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'ABPickerCopyColumnTitle': (b'^{__CFString=}^{OpaqueABPicker=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'ABPickerSetVisibility': (b'v^{OpaqueABPicker=}B',), 'ABPersonCopyParentGroups': (b'^{__CFArray=}^{__ABPerson=}', '', {'retval': {'already_cfretained': True}}), 'ABCopyLocalizedPropertyOrLabel': (b'^{__CFString=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'ABPickerEditInAddressBook': (b'v^{OpaqueABPicker=}',), 'ABPickerSetDisplayedProperty': (b'v^{OpaqueABPicker=}^{__CFString=}',)} -cftypes=[('ABAddressBookRef', b'^{__ABAddressBookRef=}', None, 'ABAddressBook'), ('ABGroupRef', b'^{__ABGroup=}', None, 'ABGroup'), ('ABMultiValueRef', b'^{__ABMultiValue=}', None, 'ABMultiValue'), ('ABMutableMultiValueRef', b'^{__ABMultiValue=}', None, 'ABMutableMultiValue'), ('ABPersonRef', b'^{__ABPerson=}', None, 'ABPerson'), ('ABSearchElementRef', b'^{__ABSearchElementRef=}', None, 'ABSearchElement'), ('ABPickerRef', b'^{OpaqueABPicker}', None, 'ABPeoplePickerView')] -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'ABAddressBook', b'addRecord:', {'retval': {'type': 'Z'}}) - r(b'ABAddressBook', b'addRecord:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'ABAddressBook', b'hasUnsavedChanges', {'retval': {'type': 'Z'}}) - r(b'ABAddressBook', b'removeRecord:', {'retval': {'type': 'Z'}}) - r(b'ABAddressBook', b'removeRecord:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'ABAddressBook', b'save', {'retval': {'type': 'Z'}}) - r(b'ABAddressBook', b'saveAndReturnError:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type_modifier': b'o'}}}) - r(b'ABGroup', b'addMember:', {'retval': {'type': 'Z'}}) - r(b'ABGroup', b'addSubgroup:', {'retval': {'type': 'Z'}}) - r(b'ABGroup', b'removeMember:', {'retval': {'type': 'Z'}}) - r(b'ABGroup', b'removeSubgroup:', {'retval': {'type': 'Z'}}) - r(b'ABGroup', b'setDistributionIdentifier:forProperty:person:', {'retval': {'type': 'Z'}}) - r(b'ABMutableMultiValue', b'removeValueAndLabelAtIndex:', {'retval': {'type': 'Z'}}) - r(b'ABMutableMultiValue', b'replaceLabelAtIndex:withLabel:', {'retval': {'type': 'Z'}}) - r(b'ABMutableMultiValue', b'replaceValueAtIndex:withValue:', {'retval': {'type': 'Z'}}) - r(b'ABMutableMultiValue', b'setPrimaryIdentifier:', {'retval': {'type': 'Z'}}) - r(b'ABPeoplePickerView', b'allowsGroupSelection', {'retval': {'type': 'Z'}}) - r(b'ABPeoplePickerView', b'allowsMultipleSelection', {'retval': {'type': 'Z'}}) - r(b'ABPeoplePickerView', b'selectGroup:byExtendingSelection:', {'arguments': {3: {'type': 'Z'}}}) - r(b'ABPeoplePickerView', b'selectIdentifier:forPerson:byExtendingSelection:', {'arguments': {4: {'type': 'Z'}}}) - r(b'ABPeoplePickerView', b'selectRecord:byExtendingSelection:', {'arguments': {3: {'type': 'Z'}}}) - r(b'ABPeoplePickerView', b'setAllowsGroupSelection:', {'arguments': {2: {'type': 'Z'}}}) - r(b'ABPeoplePickerView', b'setAllowsMultipleSelection:', {'arguments': {2: {'type': 'Z'}}}) - r(b'ABPeoplePickerView', b'setGroupDoubleAction:', {'arguments': {2: {'sel_of_type': b'v@:@'}}}) - r(b'ABPeoplePickerView', b'setNameDoubleAction:', {'arguments': {2: {'sel_of_type': b'v@:@'}}}) - r(b'ABPerson', b'setImageData:', {'retval': {'type': 'Z'}}) - r(b'ABPersonView', b'editing', {'retval': {'type': 'Z'}}) - r(b'ABPersonView', b'setEditing:', {'arguments': {2: {'type': 'Z'}}}) - r(b'ABPersonView', b'setShouldShowLinkedPeople:', {'arguments': {2: {'type': b'Z'}}}) - r(b'ABPersonView', b'shouldShowLinkedPeople', {'retval': {'type': b'Z'}}) - r(b'ABRecord', b'isReadOnly', {'retval': {'type': 'Z'}}) - r(b'ABRecord', b'removeValueForProperty:', {'retval': {'type': 'Z'}}) - r(b'ABRecord', b'setValue:forProperty:', {'retval': {'type': 'Z'}}) - r(b'ABRecord', b'setValue:forProperty:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'ABSearchElement', b'matchesRecord:', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'actionProperty', {'retval': {'type': b'@'}}) - r(b'NSObject', b'consumeImageData:forTag:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'performActionForPerson:identifier:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'shouldEnableActionForPerson:identifier:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'titleForPerson:identifier:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) -finally: - objc._updatingMetadata(False) -protocols={'ABActionDelegate': objc.informal_protocol('ABActionDelegate', [objc.selector(None, b'shouldEnableActionForPerson:identifier:', b'Z@:@@', isRequired=False), objc.selector(None, b'actionProperty', b'@@:', isRequired=False), objc.selector(None, b'performActionForPerson:identifier:', b'v@:@@', isRequired=False), objc.selector(None, b'titleForPerson:identifier:', b'@@:@@', isRequired=False)])} -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/AddressBook/_metadata.pyc b/env/lib/python2.7/site-packages/AddressBook/_metadata.pyc deleted file mode 100644 index f8dae5f2..00000000 Binary files a/env/lib/python2.7/site-packages/AddressBook/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/AppKit/_AppKit.so b/env/lib/python2.7/site-packages/AppKit/_AppKit.so deleted file mode 100755 index 19205b22..00000000 Binary files a/env/lib/python2.7/site-packages/AppKit/_AppKit.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/AppKit/__init__.py b/env/lib/python2.7/site-packages/AppKit/__init__.py deleted file mode 100644 index 833a91c1..00000000 --- a/env/lib/python2.7/site-packages/AppKit/__init__.py +++ /dev/null @@ -1,170 +0,0 @@ -''' -Python mapping for the AppKit framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import sys -import objc -import Foundation - -from AppKit import _metadata -from AppKit._inlines import _inline_list_ - -def _setup_conveniences(): - def fontdescriptor_get(self, key, default=None): - value = self.objectForKey_(key) - if value is None: - return default - return value - - def fontdescriptor_getitem(self, key, default=None): - value = self.objectForKey_(key) - if value is None: - raise KeyError(key) - return value - - objc.addConvenienceForClass('NSFontDescriptor', ( - ('__getitem__', fontdescriptor_getitem), - ('get', fontdescriptor_get), - )) - -_setup_conveniences() - -def NSDictionaryOfVariableBindings(*names): - """ - Return a dictionary with the given names and there values. - """ - import sys - variables = sys._getframe(1).f_locals - - return { - nm: variables[nm] - for nm in names - } - - -sys.modules['AppKit'] = mod = objc.ObjCLazyModule('AppKit', - "com.apple.AppKit", objc.pathForFramework("/System/Library/Frameworks/AppKit.framework"), - _metadata.__dict__, _inline_list_, { - '__doc__': __doc__, - 'objc': objc, - 'NSDictionaryOfVariableBindings': NSDictionaryOfVariableBindings, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Foundation,)) - -# NSApp is a global variable that can be changed in ObjC, -# somewhat emulate that (it is *not* possible to assign to -# NSApp in Python) -from AppKit._nsapp import NSApp -mod.NSApp = NSApp - -# Manually written wrappers: -import AppKit._AppKit -for nm in dir(AppKit._AppKit): - setattr(mod, nm, getattr(AppKit._AppKit, nm)) - -# Fix types for a number of character constants -try: - unichr -except NameError: - unichr = chr -mod.NSEnterCharacter = unichr(mod.NSEnterCharacter) -mod.NSBackspaceCharacter = unichr(mod.NSBackspaceCharacter) -mod.NSTabCharacter = unichr(mod.NSTabCharacter) -mod.NSNewlineCharacter = unichr(mod.NSNewlineCharacter) -mod.NSFormFeedCharacter = unichr(mod.NSFormFeedCharacter) -mod.NSCarriageReturnCharacter = unichr(mod.NSCarriageReturnCharacter) -mod.NSBackTabCharacter = unichr(mod.NSBackTabCharacter) -mod.NSDeleteCharacter = unichr(mod.NSDeleteCharacter) -mod.NSLineSeparatorCharacter = unichr(mod.NSLineSeparatorCharacter) -mod.NSParagraphSeparatorCharacter = unichr(mod.NSParagraphSeparatorCharacter) - - -for nm in [ - "NSUpArrowFunctionKey", - "NSDownArrowFunctionKey", - "NSLeftArrowFunctionKey", - "NSRightArrowFunctionKey", - "NSF1FunctionKey", - "NSF2FunctionKey", - "NSF3FunctionKey", - "NSF4FunctionKey", - "NSF5FunctionKey", - "NSF6FunctionKey", - "NSF7FunctionKey", - "NSF8FunctionKey", - "NSF9FunctionKey", - "NSF10FunctionKey", - "NSF11FunctionKey", - "NSF12FunctionKey", - "NSF13FunctionKey", - "NSF14FunctionKey", - "NSF15FunctionKey", - "NSF16FunctionKey", - "NSF17FunctionKey", - "NSF18FunctionKey", - "NSF19FunctionKey", - "NSF20FunctionKey", - "NSF21FunctionKey", - "NSF22FunctionKey", - "NSF23FunctionKey", - "NSF24FunctionKey", - "NSF25FunctionKey", - "NSF26FunctionKey", - "NSF27FunctionKey", - "NSF28FunctionKey", - "NSF29FunctionKey", - "NSF30FunctionKey", - "NSF31FunctionKey", - "NSF32FunctionKey", - "NSF33FunctionKey", - "NSF34FunctionKey", - "NSF35FunctionKey", - "NSInsertFunctionKey", - "NSDeleteFunctionKey", - "NSHomeFunctionKey", - "NSBeginFunctionKey", - "NSEndFunctionKey", - "NSPageUpFunctionKey", - "NSPageDownFunctionKey", - "NSPrintScreenFunctionKey", - "NSScrollLockFunctionKey", - "NSPauseFunctionKey", - "NSSysReqFunctionKey", - "NSBreakFunctionKey", - "NSResetFunctionKey", - "NSStopFunctionKey", - "NSMenuFunctionKey", - "NSUserFunctionKey", - "NSSystemFunctionKey", - "NSPrintFunctionKey", - "NSClearLineFunctionKey", - "NSClearDisplayFunctionKey", - "NSInsertLineFunctionKey", - "NSDeleteLineFunctionKey", - "NSInsertCharFunctionKey", - "NSDeleteCharFunctionKey", - "NSPrevFunctionKey", - "NSNextFunctionKey", - "NSSelectFunctionKey", - "NSExecuteFunctionKey", - "NSUndoFunctionKey", - "NSRedoFunctionKey", - "NSFindFunctionKey", - "NSHelpFunctionKey", - "NSModeSwitchFunctionKey", - ]: - try: - setattr(mod, nm, unichr(getattr(mod, nm))) - except AttributeError: - pass - -try: - mod.NSImageNameApplicationIcon -except AttributeError: - mod.NSImageNameApplicationIcon = "NSApplicationIcon" - -import sys -del sys.modules['AppKit._metadata'] diff --git a/env/lib/python2.7/site-packages/AppKit/__init__.pyc b/env/lib/python2.7/site-packages/AppKit/__init__.pyc deleted file mode 100644 index 4d1772fc..00000000 Binary files a/env/lib/python2.7/site-packages/AppKit/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/AppKit/_inlines.so b/env/lib/python2.7/site-packages/AppKit/_inlines.so deleted file mode 100755 index 229d1804..00000000 Binary files a/env/lib/python2.7/site-packages/AppKit/_inlines.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/AppKit/_metadata.py b/env/lib/python2.7/site-packages/AppKit/_metadata.py deleted file mode 100644 index 04284b46..00000000 --- a/env/lib/python2.7/site-packages/AppKit/_metadata.py +++ /dev/null @@ -1,3456 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Sat Aug 18 18:39:27 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -misc.update({'NSOpenGLPixelFormatAuxiliary': objc.createStructType('NSOpenGLPixelFormatAuxiliary', b'{_CGLPixelFormatObject=}', []), 'NSScreenAuxiliaryOpaque': objc.createStructType('NSScreenAuxiliaryOpaque', b'{NSScreenAuxiliary=}', []), 'NSOpenGLContextAuxiliary': objc.createStructType('NSOpenGLContextAuxiliary', b'{_CGLContextObject=}', [])}) -constants = '''$NSAFMAscender$NSAFMCapHeight$NSAFMCharacterSet$NSAFMDescender$NSAFMEncodingScheme$NSAFMFamilyName$NSAFMFontName$NSAFMFormatVersion$NSAFMFullName$NSAFMItalicAngle$NSAFMMappingScheme$NSAFMNotice$NSAFMUnderlinePosition$NSAFMUnderlineThickness$NSAFMVersion$NSAFMWeight$NSAFMXHeight$NSAbortModalException$NSAbortPrintingException$NSAboutPanelOptionApplicationIcon$NSAboutPanelOptionApplicationName$NSAboutPanelOptionApplicationVersion$NSAboutPanelOptionCredits$NSAboutPanelOptionVersion$NSAccessibilityActivationPointAttribute$NSAccessibilityAllowedValuesAttribute$NSAccessibilityAlternateUIVisibleAttribute$NSAccessibilityAnnotationElement$NSAccessibilityAnnotationLabel$NSAccessibilityAnnotationLocation$NSAccessibilityAnnotationTextAttribute$NSAccessibilityAnnouncementKey$NSAccessibilityAnnouncementRequestedNotification$NSAccessibilityApplicationActivatedNotification$NSAccessibilityApplicationDeactivatedNotification$NSAccessibilityApplicationHiddenNotification$NSAccessibilityApplicationRole$NSAccessibilityApplicationShownNotification$NSAccessibilityAscendingSortDirectionValue$NSAccessibilityAttachmentTextAttribute$NSAccessibilityAttributedStringForRangeParameterizedAttribute$NSAccessibilityAutocorrectedTextAttribute$NSAccessibilityBackgroundColorTextAttribute$NSAccessibilityBoundsForRangeParameterizedAttribute$NSAccessibilityBrowserRole$NSAccessibilityBusyIndicatorRole$NSAccessibilityButtonRole$NSAccessibilityCancelAction$NSAccessibilityCancelButtonAttribute$NSAccessibilityCellForColumnAndRowParameterizedAttribute$NSAccessibilityCellRole$NSAccessibilityCenterTabStopMarkerTypeValue$NSAccessibilityCentimetersUnitValue$NSAccessibilityCheckBoxRole$NSAccessibilityChildrenAttribute$NSAccessibilityClearButtonAttribute$NSAccessibilityCloseButtonAttribute$NSAccessibilityCloseButtonSubrole$NSAccessibilityCollectionListSubrole$NSAccessibilityColorWellRole$NSAccessibilityColumnCountAttribute$NSAccessibilityColumnHeaderUIElementsAttribute$NSAccessibilityColumnIndexRangeAttribute$NSAccessibilityColumnRole$NSAccessibilityColumnTitlesAttribute$NSAccessibilityColumnsAttribute$NSAccessibilityComboBoxRole$NSAccessibilityConfirmAction$NSAccessibilityContainsProtectedContentAttribute$NSAccessibilityContentListSubrole$NSAccessibilityContentsAttribute$NSAccessibilityCreatedNotification$NSAccessibilityCriticalValueAttribute$NSAccessibilityCustomTextAttribute$NSAccessibilityDecimalTabStopMarkerTypeValue$NSAccessibilityDecrementAction$NSAccessibilityDecrementArrowSubrole$NSAccessibilityDecrementButtonAttribute$NSAccessibilityDecrementPageSubrole$NSAccessibilityDefaultButtonAttribute$NSAccessibilityDefinitionListSubrole$NSAccessibilityDeleteAction$NSAccessibilityDescendingSortDirectionValue$NSAccessibilityDescriptionAttribute$NSAccessibilityDescriptionListSubrole$NSAccessibilityDialogSubrole$NSAccessibilityDisclosedByRowAttribute$NSAccessibilityDisclosedRowsAttribute$NSAccessibilityDisclosingAttribute$NSAccessibilityDisclosureLevelAttribute$NSAccessibilityDisclosureTriangleRole$NSAccessibilityDocumentAttribute$NSAccessibilityDrawerCreatedNotification$NSAccessibilityDrawerRole$NSAccessibilityEditedAttribute$NSAccessibilityEnabledAttribute$NSAccessibilityErrorCodeExceptionInfo$NSAccessibilityException$NSAccessibilityExpandedAttribute$NSAccessibilityExtrasMenuBarAttribute$NSAccessibilityFilenameAttribute$NSAccessibilityFirstLineIndentMarkerTypeValue$NSAccessibilityFloatingWindowSubrole$NSAccessibilityFocusedAttribute$NSAccessibilityFocusedUIElementAttribute$NSAccessibilityFocusedUIElementChangedNotification$NSAccessibilityFocusedWindowAttribute$NSAccessibilityFocusedWindowChangedNotification$NSAccessibilityFontFamilyKey$NSAccessibilityFontNameKey$NSAccessibilityFontSizeKey$NSAccessibilityFontTextAttribute$NSAccessibilityForegroundColorTextAttribute$NSAccessibilityFrontmostAttribute$NSAccessibilityFullScreenButtonAttribute$NSAccessibilityFullScreenButtonSubrole$NSAccessibilityGridRole$NSAccessibilityGroupRole$NSAccessibilityGrowAreaAttribute$NSAccessibilityGrowAreaRole$NSAccessibilityHandleRole$NSAccessibilityHandlesAttribute$NSAccessibilityHeadIndentMarkerTypeValue$NSAccessibilityHeaderAttribute$NSAccessibilityHelpAttribute$NSAccessibilityHelpTagCreatedNotification$NSAccessibilityHelpTagRole$NSAccessibilityHiddenAttribute$NSAccessibilityHorizontalOrientationValue$NSAccessibilityHorizontalScrollBarAttribute$NSAccessibilityHorizontalUnitDescriptionAttribute$NSAccessibilityHorizontalUnitsAttribute$NSAccessibilityHorizontialUnitDescriptionAttribute$NSAccessibilityHorizontialUnitsAttribute$NSAccessibilityIdentifierAttribute$NSAccessibilityImageRole$NSAccessibilityInchesUnitValue$NSAccessibilityIncrementAction$NSAccessibilityIncrementArrowSubrole$NSAccessibilityIncrementButtonAttribute$NSAccessibilityIncrementPageSubrole$NSAccessibilityIncrementorRole$NSAccessibilityIndexAttribute$NSAccessibilityInsertionPointLineNumberAttribute$NSAccessibilityLabelUIElementsAttribute$NSAccessibilityLabelValueAttribute$NSAccessibilityLanguageTextAttribute$NSAccessibilityLayoutAreaRole$NSAccessibilityLayoutChangedNotification$NSAccessibilityLayoutItemRole$NSAccessibilityLayoutPointForScreenPointParameterizedAttribute$NSAccessibilityLayoutSizeForScreenSizeParameterizedAttribute$NSAccessibilityLeftTabStopMarkerTypeValue$NSAccessibilityLevelIndicatorRole$NSAccessibilityLineForIndexParameterizedAttribute$NSAccessibilityLinkRole$NSAccessibilityLinkTextAttribute$NSAccessibilityLinkedUIElementsAttribute$NSAccessibilityListItemIndexTextAttribute$NSAccessibilityListItemLevelTextAttribute$NSAccessibilityListItemPrefixTextAttribute$NSAccessibilityListRole$NSAccessibilityMainAttribute$NSAccessibilityMainWindowAttribute$NSAccessibilityMainWindowChangedNotification$NSAccessibilityMarkedMisspelledTextAttribute$NSAccessibilityMarkerGroupUIElementAttribute$NSAccessibilityMarkerTypeAttribute$NSAccessibilityMarkerTypeDescriptionAttribute$NSAccessibilityMarkerUIElementsAttribute$NSAccessibilityMarkerValuesAttribute$NSAccessibilityMatteContentUIElementAttribute$NSAccessibilityMatteHoleAttribute$NSAccessibilityMatteRole$NSAccessibilityMaxValueAttribute$NSAccessibilityMenuBarAttribute$NSAccessibilityMenuBarItemRole$NSAccessibilityMenuBarRole$NSAccessibilityMenuButtonRole$NSAccessibilityMenuItemRole$NSAccessibilityMenuRole$NSAccessibilityMinValueAttribute$NSAccessibilityMinimizeButtonAttribute$NSAccessibilityMinimizeButtonSubrole$NSAccessibilityMinimizedAttribute$NSAccessibilityMisspelledTextAttribute$NSAccessibilityModalAttribute$NSAccessibilityMovedNotification$NSAccessibilityNextContentsAttribute$NSAccessibilityNumberOfCharactersAttribute$NSAccessibilityOrderedByRowAttribute$NSAccessibilityOrientationAttribute$NSAccessibilityOutlineRole$NSAccessibilityOutlineRowSubrole$NSAccessibilityOverflowButtonAttribute$NSAccessibilityPageRole$NSAccessibilityParentAttribute$NSAccessibilityPicasUnitValue$NSAccessibilityPickAction$NSAccessibilityPlaceholderValueAttribute$NSAccessibilityPointsUnitValue$NSAccessibilityPopUpButtonRole$NSAccessibilityPopoverRole$NSAccessibilityPositionAttribute$NSAccessibilityPressAction$NSAccessibilityPreviousContentsAttribute$NSAccessibilityPriorityKey$NSAccessibilityProgressIndicatorRole$NSAccessibilityProxyAttribute$NSAccessibilityRTFForRangeParameterizedAttribute$NSAccessibilityRadioButtonRole$NSAccessibilityRadioGroupRole$NSAccessibilityRaiseAction$NSAccessibilityRangeForIndexParameterizedAttribute$NSAccessibilityRangeForLineParameterizedAttribute$NSAccessibilityRangeForPositionParameterizedAttribute$NSAccessibilityRatingIndicatorSubrole$NSAccessibilityRelevanceIndicatorRole$NSAccessibilityRequiredAttribute$NSAccessibilityResizedNotification$NSAccessibilityRightTabStopMarkerTypeValue$NSAccessibilityRoleAttribute$NSAccessibilityRoleDescriptionAttribute$NSAccessibilityRowCollapsedNotification$NSAccessibilityRowCountAttribute$NSAccessibilityRowCountChangedNotification$NSAccessibilityRowExpandedNotification$NSAccessibilityRowHeaderUIElementsAttribute$NSAccessibilityRowIndexRangeAttribute$NSAccessibilityRowRole$NSAccessibilityRowsAttribute$NSAccessibilityRulerMarkerRole$NSAccessibilityRulerRole$NSAccessibilityScreenPointForLayoutPointParameterizedAttribute$NSAccessibilityScreenSizeForLayoutSizeParameterizedAttribute$NSAccessibilityScrollAreaRole$NSAccessibilityScrollBarRole$NSAccessibilitySearchButtonAttribute$NSAccessibilitySearchFieldSubrole$NSAccessibilitySearchMenuAttribute$NSAccessibilitySectionListSubrole$NSAccessibilitySecureTextFieldSubrole$NSAccessibilitySelectedAttribute$NSAccessibilitySelectedCellsAttribute$NSAccessibilitySelectedCellsChangedNotification$NSAccessibilitySelectedChildrenAttribute$NSAccessibilitySelectedChildrenChangedNotification$NSAccessibilitySelectedChildrenMovedNotification$NSAccessibilitySelectedColumnsAttribute$NSAccessibilitySelectedColumnsChangedNotification$NSAccessibilitySelectedRowsAttribute$NSAccessibilitySelectedRowsChangedNotification$NSAccessibilitySelectedTextAttribute$NSAccessibilitySelectedTextChangedNotification$NSAccessibilitySelectedTextRangeAttribute$NSAccessibilitySelectedTextRangesAttribute$NSAccessibilityServesAsTitleForUIElementsAttribute$NSAccessibilityShadowTextAttribute$NSAccessibilitySharedCharacterRangeAttribute$NSAccessibilitySharedFocusElementsAttribute$NSAccessibilitySharedTextUIElementsAttribute$NSAccessibilitySheetCreatedNotification$NSAccessibilitySheetRole$NSAccessibilityShowAlternateUIAction$NSAccessibilityShowDefaultUIAction$NSAccessibilityShowMenuAction$NSAccessibilityShownMenuAttribute$NSAccessibilitySizeAttribute$NSAccessibilitySliderRole$NSAccessibilitySortButtonRole$NSAccessibilitySortButtonSubrole$NSAccessibilitySortDirectionAttribute$NSAccessibilitySplitGroupRole$NSAccessibilitySplitterRole$NSAccessibilitySplittersAttribute$NSAccessibilityStandardWindowSubrole$NSAccessibilityStaticTextRole$NSAccessibilityStrikethroughColorTextAttribute$NSAccessibilityStrikethroughTextAttribute$NSAccessibilityStringForRangeParameterizedAttribute$NSAccessibilityStyleRangeForIndexParameterizedAttribute$NSAccessibilitySubroleAttribute$NSAccessibilitySuperscriptTextAttribute$NSAccessibilitySwitchSubrole$NSAccessibilitySystemDialogSubrole$NSAccessibilitySystemFloatingWindowSubrole$NSAccessibilitySystemWideRole$NSAccessibilityTabButtonSubrole$NSAccessibilityTabGroupRole$NSAccessibilityTableRole$NSAccessibilityTableRowSubrole$NSAccessibilityTabsAttribute$NSAccessibilityTailIndentMarkerTypeValue$NSAccessibilityTextAlignmentAttribute$NSAccessibilityTextAreaRole$NSAccessibilityTextAttachmentSubrole$NSAccessibilityTextFieldRole$NSAccessibilityTextLinkSubrole$NSAccessibilityTimelineSubrole$NSAccessibilityTitleAttribute$NSAccessibilityTitleChangedNotification$NSAccessibilityTitleUIElementAttribute$NSAccessibilityToggleSubrole$NSAccessibilityToolbarButtonAttribute$NSAccessibilityToolbarButtonSubrole$NSAccessibilityToolbarRole$NSAccessibilityTopLevelUIElementAttribute$NSAccessibilityUIElementDestroyedNotification$NSAccessibilityUIElementsKey$NSAccessibilityURLAttribute$NSAccessibilityUnderlineColorTextAttribute$NSAccessibilityUnderlineTextAttribute$NSAccessibilityUnitDescriptionAttribute$NSAccessibilityUnitsAttribute$NSAccessibilityUnitsChangedNotification$NSAccessibilityUnknownMarkerTypeValue$NSAccessibilityUnknownOrientationValue$NSAccessibilityUnknownRole$NSAccessibilityUnknownSortDirectionValue$NSAccessibilityUnknownSubrole$NSAccessibilityUnknownUnitValue$NSAccessibilityValueAttribute$NSAccessibilityValueChangedNotification$NSAccessibilityValueDescriptionAttribute$NSAccessibilityValueIndicatorRole$NSAccessibilityVerticalOrientationValue$NSAccessibilityVerticalScrollBarAttribute$NSAccessibilityVerticalUnitDescriptionAttribute$NSAccessibilityVerticalUnitsAttribute$NSAccessibilityVisibleCellsAttribute$NSAccessibilityVisibleCharacterRangeAttribute$NSAccessibilityVisibleChildrenAttribute$NSAccessibilityVisibleColumnsAttribute$NSAccessibilityVisibleNameKey$NSAccessibilityVisibleRowsAttribute$NSAccessibilityWarningValueAttribute$NSAccessibilityWindowAttribute$NSAccessibilityWindowCreatedNotification$NSAccessibilityWindowDeminiaturizedNotification$NSAccessibilityWindowMiniaturizedNotification$NSAccessibilityWindowMovedNotification$NSAccessibilityWindowResizedNotification$NSAccessibilityWindowRole$NSAccessibilityWindowsAttribute$NSAccessibilityZoomButtonAttribute$NSAccessibilityZoomButtonSubrole$NSAlignmentBinding$NSAllRomanInputSourcesLocaleIdentifier$NSAllowsEditingMultipleValuesSelectionBindingOption$NSAllowsNullArgumentBindingOption$NSAlternateImageBinding$NSAlternateTitleBinding$NSAlwaysPresentsApplicationModalAlertsBindingOption$NSAnimateBinding$NSAnimationDelayBinding$NSAnimationProgressMark$NSAnimationProgressMarkNotification$NSAnimationTriggerOrderIn$NSAnimationTriggerOrderOut$NSAntialiasThresholdChangedNotification$NSApp$NSAppKitIgnoredException$NSAppKitVersionNumber@d$NSAppKitVirtualMemoryException$NSAppearanceDocumentAttribute$NSAppearanceNameAccessibilityHighContrastAqua$NSAppearanceNameAccessibilityHighContrastDarkAqua$NSAppearanceNameAccessibilityHighContrastVibrantDark$NSAppearanceNameAccessibilityHighContrastVibrantLight$NSAppearanceNameAqua$NSAppearanceNameDarkAqua$NSAppearanceNameLightContent$NSAppearanceNameVibrantDark$NSAppearanceNameVibrantLight$NSApplicationDidBecomeActiveNotification$NSApplicationDidChangeOcclusionStateNotification$NSApplicationDidChangeScreenParametersNotification$NSApplicationDidFinishLaunchingNotification$NSApplicationDidFinishRestoringWindowsNotification$NSApplicationDidHideNotification$NSApplicationDidResignActiveNotification$NSApplicationDidUnhideNotification$NSApplicationDidUpdateNotification$NSApplicationFileType$NSApplicationLaunchIsDefaultLaunchKey$NSApplicationLaunchRemoteNotificationKey$NSApplicationLaunchUserNotificationKey$NSApplicationWillBecomeActiveNotification$NSApplicationWillFinishLaunchingNotification$NSApplicationWillHideNotification$NSApplicationWillResignActiveNotification$NSApplicationWillTerminateNotification$NSApplicationWillUnhideNotification$NSApplicationWillUpdateNotification$NSArgumentBinding$NSAttachmentAttributeName$NSAttributedStringBinding$NSAuthorDocumentAttribute$NSBackgroundColorAttributeName$NSBackgroundColorDocumentAttribute$NSBackingPropertyOldColorSpaceKey$NSBackingPropertyOldScaleFactorKey$NSBadBitmapParametersException$NSBadComparisonException$NSBadRTFColorTableException$NSBadRTFDirectiveException$NSBadRTFFontTableException$NSBadRTFStyleSheetException$NSBaseURLDocumentOption$NSBaselineOffsetAttributeName$NSBottomMarginDocumentAttribute$NSBrowserColumnConfigurationDidChangeNotification$NSBrowserIllegalDelegateException$NSCalibratedBlackColorSpace$NSCalibratedRGBColorSpace$NSCalibratedWhiteColorSpace$NSCategoryDocumentAttribute$NSCharacterEncodingDocumentAttribute$NSCharacterEncodingDocumentOption$NSCharacterShapeAttributeName$NSCocoaVersionDocumentAttribute$NSCollectionElementKindInterItemGapIndicator$NSCollectionElementKindSectionFooter$NSCollectionElementKindSectionHeader$NSColorListDidChangeNotification$NSColorListIOException$NSColorListNotEditableException$NSColorPanelColorDidChangeNotification$NSColorPboardType$NSComboBoxSelectionDidChangeNotification$NSComboBoxSelectionIsChangingNotification$NSComboBoxWillDismissNotification$NSComboBoxWillPopUpNotification$NSCommentDocumentAttribute$NSCompanyDocumentAttribute$NSConditionallySetsEditableBindingOption$NSConditionallySetsEnabledBindingOption$NSConditionallySetsHiddenBindingOption$NSContentArrayBinding$NSContentArrayForMultipleSelectionBinding$NSContentBinding$NSContentDictionaryBinding$NSContentHeightBinding$NSContentObjectBinding$NSContentObjectsBinding$NSContentPlacementTagBindingOption$NSContentSetBinding$NSContentValuesBinding$NSContentWidthBinding$NSContextHelpModeDidActivateNotification$NSContextHelpModeDidDeactivateNotification$NSContinuouslyUpdatesValueBindingOption$NSControlTextDidBeginEditingNotification$NSControlTextDidChangeNotification$NSControlTextDidEndEditingNotification$NSControlTintDidChangeNotification$NSConvertedDocumentAttribute$NSCopyrightDocumentAttribute$NSCreatesSortDescriptorBindingOption$NSCreationTimeDocumentAttribute$NSCriticalValueBinding$NSCursorAttributeName$NSCustomColorSpace$NSDataBinding$NSDefaultAttributesDocumentAttribute$NSDefaultAttributesDocumentOption$NSDefaultTabIntervalDocumentAttribute$NSDefinitionPresentationTypeDictionaryApplication$NSDefinitionPresentationTypeKey$NSDefinitionPresentationTypeOverlay$NSDeletesObjectsOnRemoveBindingsOption$NSDeviceBitsPerSample$NSDeviceBlackColorSpace$NSDeviceCMYKColorSpace$NSDeviceColorSpaceName$NSDeviceIsPrinter$NSDeviceIsScreen$NSDeviceRGBColorSpace$NSDeviceResolution$NSDeviceSize$NSDeviceWhiteColorSpace$NSDirectoryFileType$NSDisplayNameBindingOption$NSDisplayPatternBindingOption$NSDisplayPatternTitleBinding$NSDisplayPatternValueBinding$NSDocFormatTextDocumentType$NSDocumentEditedBinding$NSDocumentTypeDocumentAttribute$NSDocumentTypeDocumentOption$NSDoubleClickArgumentBinding$NSDoubleClickTargetBinding$NSDragPboard$NSDraggingException$NSDraggingImageComponentIconKey$NSDraggingImageComponentLabelKey$NSDrawerDidCloseNotification$NSDrawerDidOpenNotification$NSDrawerWillCloseNotification$NSDrawerWillOpenNotification$NSEditableBinding$NSEditorDocumentAttribute$NSEnabledBinding$NSEventTrackingRunLoopMode$NSExcludedElementsDocumentAttribute$NSExcludedKeysBinding$NSExpansionAttributeName$NSFileContentsPboardType$NSFileTypeDocumentAttribute$NSFileTypeDocumentOption$NSFilenamesPboardType$NSFilesPromisePboardType$NSFilesystemFileType$NSFilterPredicateBinding$NSFindPanelCaseInsensitiveSearch$NSFindPanelSearchOptionsPboardType$NSFindPanelSubstringMatch$NSFindPboard$NSFontAttributeName$NSFontBinding$NSFontBoldBinding$NSFontCascadeListAttribute$NSFontCharacterSetAttribute$NSFontCollectionActionKey$NSFontCollectionAllFonts$NSFontCollectionDidChangeNotification$NSFontCollectionDisallowAutoActivationOption$NSFontCollectionFavorites$NSFontCollectionIncludeDisabledFontsOption$NSFontCollectionNameKey$NSFontCollectionOldNameKey$NSFontCollectionRecentlyUsed$NSFontCollectionRemoveDuplicatesOption$NSFontCollectionUser$NSFontCollectionVisibilityKey$NSFontCollectionWasHidden$NSFontCollectionWasRenamed$NSFontCollectionWasShown$NSFontColorAttribute$NSFontFaceAttribute$NSFontFamilyAttribute$NSFontFamilyNameBinding$NSFontFeatureSelectorIdentifierKey$NSFontFeatureSettingsAttribute$NSFontFeatureTypeIdentifierKey$NSFontFixedAdvanceAttribute$NSFontItalicBinding$NSFontMatrixAttribute$NSFontNameAttribute$NSFontNameBinding$NSFontPboard$NSFontPboardType$NSFontSetChangedNotification$NSFontSizeAttribute$NSFontSizeBinding$NSFontSlantTrait$NSFontSymbolicTrait$NSFontTraitsAttribute$NSFontUnavailableException$NSFontVariationAttribute$NSFontVariationAxisDefaultValueKey$NSFontVariationAxisIdentifierKey$NSFontVariationAxisMaximumValueKey$NSFontVariationAxisMinimumValueKey$NSFontVariationAxisNameKey$NSFontVisibleNameAttribute$NSFontWeightBlack@d$NSFontWeightBold@d$NSFontWeightHeavy@d$NSFontWeightLight@d$NSFontWeightMedium@d$NSFontWeightRegular@d$NSFontWeightSemibold@d$NSFontWeightThin@d$NSFontWeightTrait$NSFontWeightUltraLight@d$NSFontWidthTrait$NSForegroundColorAttributeName$NSFullScreenModeAllScreens$NSFullScreenModeApplicationPresentationOptions$NSFullScreenModeSetting$NSFullScreenModeWindowLevel$NSGeneralPboard$NSGlyphInfoAttributeName$NSGraphicsContextDestinationAttributeName$NSGraphicsContextPDFFormat$NSGraphicsContextPSFormat$NSGraphicsContextRepresentationFormatAttributeName$NSGridViewSizeForContent@d$NSHTMLPboardType$NSHTMLTextDocumentType$NSHandlesContentAsCompoundValueBindingOption$NSHeaderTitleBinding$NSHiddenBinding$NSHyphenationFactorDocumentAttribute$NSIllegalSelectorException$NSImageBinding$NSImageCacheException$NSImageColorSyncProfileData$NSImageCompressionFactor$NSImageCompressionMethod$NSImageCurrentFrame$NSImageCurrentFrameDuration$NSImageDitherTransparency$NSImageEXIFData$NSImageFallbackBackgroundColor$NSImageFrameCount$NSImageGamma$NSImageHintCTM$NSImageHintInterpolation$NSImageHintUserInterfaceLayoutDirection$NSImageInterlaced$NSImageLoopCount$NSImageNameActionTemplate$NSImageNameAddTemplate$NSImageNameAdvanced$NSImageNameApplicationIcon$NSImageNameBluetoothTemplate$NSImageNameBonjour$NSImageNameBookmarksTemplate$NSImageNameCaution$NSImageNameColorPanel$NSImageNameColumnViewTemplate$NSImageNameComputer$NSImageNameDotMac$NSImageNameEnterFullScreenTemplate$NSImageNameEveryone$NSImageNameExitFullScreenTemplate$NSImageNameFlowViewTemplate$NSImageNameFolder$NSImageNameFolderBurnable$NSImageNameFolderSmart$NSImageNameFollowLinkFreestandingTemplate$NSImageNameFontPanel$NSImageNameGoBackTemplate$NSImageNameGoForwardTemplate$NSImageNameGoLeftTemplate$NSImageNameGoRightTemplate$NSImageNameHomeTemplate$NSImageNameIChatTheaterTemplate$NSImageNameIconViewTemplate$NSImageNameInfo$NSImageNameInvalidDataFreestandingTemplate$NSImageNameLeftFacingTriangleTemplate$NSImageNameListViewTemplate$NSImageNameLockLockedTemplate$NSImageNameLockUnlockedTemplate$NSImageNameMenuMixedStateTemplate$NSImageNameMenuOnStateTemplate$NSImageNameMobileMe$NSImageNameMultipleDocuments$NSImageNameNetwork$NSImageNamePathTemplate$NSImageNamePreferencesGeneral$NSImageNameQuickLookTemplate$NSImageNameRefreshFreestandingTemplate$NSImageNameRefreshTemplate$NSImageNameRemoveTemplate$NSImageNameRevealFreestandingTemplate$NSImageNameRightFacingTriangleTemplate$NSImageNameShareTemplate$NSImageNameSlideshowTemplate$NSImageNameSmartBadgeTemplate$NSImageNameStatusAvailable$NSImageNameStatusNone$NSImageNameStatusPartiallyAvailable$NSImageNameStatusUnavailable$NSImageNameStopProgressFreestandingTemplate$NSImageNameStopProgressTemplate$NSImageNameTouchBarAddDetailTemplate$NSImageNameTouchBarAddTemplate$NSImageNameTouchBarAlarmTemplate$NSImageNameTouchBarAudioInputMuteTemplate$NSImageNameTouchBarAudioInputTemplate$NSImageNameTouchBarAudioOutputMuteTemplate$NSImageNameTouchBarAudioOutputVolumeHighTemplate$NSImageNameTouchBarAudioOutputVolumeLowTemplate$NSImageNameTouchBarAudioOutputVolumeMediumTemplate$NSImageNameTouchBarAudioOutputVolumeOffTemplate$NSImageNameTouchBarBookmarksTemplate$NSImageNameTouchBarColorPickerFill$NSImageNameTouchBarColorPickerFont$NSImageNameTouchBarColorPickerStroke$NSImageNameTouchBarCommunicationAudioTemplate$NSImageNameTouchBarCommunicationVideoTemplate$NSImageNameTouchBarComposeTemplate$NSImageNameTouchBarDeleteTemplate$NSImageNameTouchBarDownloadTemplate$NSImageNameTouchBarEnterFullScreenTemplate$NSImageNameTouchBarExitFullScreenTemplate$NSImageNameTouchBarFastForwardTemplate$NSImageNameTouchBarFolderCopyToTemplate$NSImageNameTouchBarFolderMoveToTemplate$NSImageNameTouchBarFolderTemplate$NSImageNameTouchBarGetInfoTemplate$NSImageNameTouchBarGoBackTemplate$NSImageNameTouchBarGoDownTemplate$NSImageNameTouchBarGoForwardTemplate$NSImageNameTouchBarGoUpTemplate$NSImageNameTouchBarHistoryTemplate$NSImageNameTouchBarIconViewTemplate$NSImageNameTouchBarListViewTemplate$NSImageNameTouchBarMailTemplate$NSImageNameTouchBarNewFolderTemplate$NSImageNameTouchBarNewMessageTemplate$NSImageNameTouchBarOpenInBrowserTemplate$NSImageNameTouchBarPauseTemplate$NSImageNameTouchBarPlayPauseTemplate$NSImageNameTouchBarPlayTemplate$NSImageNameTouchBarPlayheadTemplate$NSImageNameTouchBarQuickLookTemplate$NSImageNameTouchBarRecordStartTemplate$NSImageNameTouchBarRecordStopTemplate$NSImageNameTouchBarRefreshTemplate$NSImageNameTouchBarRemoveTemplate$NSImageNameTouchBarRewindTemplate$NSImageNameTouchBarRotateLeftTemplate$NSImageNameTouchBarRotateRightTemplate$NSImageNameTouchBarSearchTemplate$NSImageNameTouchBarShareTemplate$NSImageNameTouchBarSidebarTemplate$NSImageNameTouchBarSkipAhead15SecondsTemplate$NSImageNameTouchBarSkipAhead30SecondsTemplate$NSImageNameTouchBarSkipAheadTemplate$NSImageNameTouchBarSkipBack15SecondsTemplate$NSImageNameTouchBarSkipBack30SecondsTemplate$NSImageNameTouchBarSkipBackTemplate$NSImageNameTouchBarSkipToEndTemplate$NSImageNameTouchBarSkipToStartTemplate$NSImageNameTouchBarSlideshowTemplate$NSImageNameTouchBarTagIconTemplate$NSImageNameTouchBarTextBoldTemplate$NSImageNameTouchBarTextBoxTemplate$NSImageNameTouchBarTextCenterAlignTemplate$NSImageNameTouchBarTextItalicTemplate$NSImageNameTouchBarTextJustifiedAlignTemplate$NSImageNameTouchBarTextLeftAlignTemplate$NSImageNameTouchBarTextListTemplate$NSImageNameTouchBarTextRightAlignTemplate$NSImageNameTouchBarTextStrikethroughTemplate$NSImageNameTouchBarTextUnderlineTemplate$NSImageNameTouchBarUserAddTemplate$NSImageNameTouchBarUserGroupTemplate$NSImageNameTouchBarUserTemplate$NSImageNameTouchBarVolumeDownTemplate$NSImageNameTouchBarVolumeUpTemplate$NSImageNameTrashEmpty$NSImageNameTrashFull$NSImageNameUser$NSImageNameUserAccounts$NSImageNameUserGroup$NSImageNameUserGuest$NSImageProgressive$NSImageRGBColorTable$NSImageRepRegistryDidChangeNotification$NSIncludedKeysBinding$NSInitialKeyBinding$NSInitialValueBinding$NSInkTextPboardType$NSInsertsNullPlaceholderBindingOption$NSInterfaceStyleDefault$NSInvokesSeparatelyWithArrayObjectsBindingOption$NSIsIndeterminateBinding$NSKernAttributeName$NSKeywordsDocumentAttribute$NSLabelBinding$NSLeftMarginDocumentAttribute$NSLigatureAttributeName$NSLinkAttributeName$NSLocalizedKeyDictionaryBinding$NSMacSimpleTextDocumentType$NSManagedObjectContextBinding$NSManagerDocumentAttribute$NSMarkedClauseSegmentAttributeName$NSMaxValueBinding$NSMaxWidthBinding$NSMaximumRecentsBinding$NSMenuDidAddItemNotification$NSMenuDidBeginTrackingNotification$NSMenuDidChangeItemNotification$NSMenuDidEndTrackingNotification$NSMenuDidRemoveItemNotification$NSMenuDidSendActionNotification$NSMenuWillSendActionNotification$NSMinValueBinding$NSMinWidthBinding$NSMixedStateImageBinding$NSModalPanelRunLoopMode$NSModificationTimeDocumentAttribute$NSMultipleTextSelectionPboardType$NSMultipleValuesMarker$NSMultipleValuesPlaceholderBindingOption$NSNamedColorSpace$NSNibLoadingException$NSNibOwner$NSNibTopLevelObjects$NSNoSelectionMarker$NSNoSelectionPlaceholderBindingOption$NSNotApplicableMarker$NSNotApplicablePlaceholderBindingOption$NSNullPlaceholderBindingOption$NSObliquenessAttributeName$NSObservedKeyPathKey$NSObservedObjectKey$NSOffStateImageBinding$NSOfficeOpenXMLTextDocumentType$NSOnStateImageBinding$NSOpenDocumentTextDocumentType$NSOptionsKey$NSOutlineViewColumnDidMoveNotification$NSOutlineViewColumnDidResizeNotification$NSOutlineViewDisclosureButtonKey$NSOutlineViewItemDidCollapseNotification$NSOutlineViewItemDidExpandNotification$NSOutlineViewItemWillCollapseNotification$NSOutlineViewItemWillExpandNotification$NSOutlineViewSelectionDidChangeNotification$NSOutlineViewSelectionIsChangingNotification$NSOutlineViewShowHideButtonKey$NSPDFPboardType$NSPICTPboardType$NSPPDIncludeNotFoundException$NSPPDIncludeStackOverflowException$NSPPDIncludeStackUnderflowException$NSPPDParseException$NSPaperSizeDocumentAttribute$NSParagraphStyleAttributeName$NSPasteboardCommunicationException$NSPasteboardNameDrag$NSPasteboardNameFind$NSPasteboardNameFont$NSPasteboardNameGeneral$NSPasteboardNameRuler$NSPasteboardTypeColor$NSPasteboardTypeFileURL$NSPasteboardTypeFindPanelSearchOptions$NSPasteboardTypeFont$NSPasteboardTypeHTML$NSPasteboardTypeMultipleTextSelection$NSPasteboardTypePDF$NSPasteboardTypePNG$NSPasteboardTypeRTF$NSPasteboardTypeRTFD$NSPasteboardTypeRuler$NSPasteboardTypeSound$NSPasteboardTypeString$NSPasteboardTypeTIFF$NSPasteboardTypeTabularText$NSPasteboardTypeTextFinderOptions$NSPasteboardTypeURL$NSPasteboardURLReadingContentsConformToTypesKey$NSPasteboardURLReadingFileURLsOnlyKey$NSPatternColorSpace$NSPlainFileType$NSPlainTextDocumentType$NSPopUpButtonCellWillPopUpNotification$NSPopUpButtonWillPopUpNotification$NSPopoverCloseReasonDetachToWindow$NSPopoverCloseReasonKey$NSPopoverCloseReasonStandard$NSPopoverDidCloseNotification$NSPopoverDidShowNotification$NSPopoverWillCloseNotification$NSPopoverWillShowNotification$NSPositioningRectBinding$NSPostScriptPboardType$NSPredicateBinding$NSPredicateFormatBindingOption$NSPreferredScrollerStyleDidChangeNotification$NSPrefixSpacesDocumentAttribute$NSPrintAllPages$NSPrintAllPresetsJobStyleHint$NSPrintBottomMargin$NSPrintCancelJob$NSPrintCopies$NSPrintDetailedErrorReporting$NSPrintFaxCoverSheetName$NSPrintFaxHighResolution$NSPrintFaxJob$NSPrintFaxModem$NSPrintFaxNumber$NSPrintFaxReceiverNames$NSPrintFaxReceiverNumbers$NSPrintFaxReturnReceipt$NSPrintFaxSendTime$NSPrintFaxTrimPageEnds$NSPrintFaxUseCoverSheet$NSPrintFirstPage$NSPrintFormName$NSPrintHeaderAndFooter$NSPrintHorizontalPagination$NSPrintHorizontallyCentered$NSPrintJobDisposition$NSPrintJobFeatures$NSPrintJobSavingFileNameExtensionHidden$NSPrintJobSavingURL$NSPrintLastPage$NSPrintLeftMargin$NSPrintManualFeed$NSPrintMustCollate$NSPrintNoPresetsJobStyleHint$NSPrintOperationExistsException$NSPrintOrientation$NSPrintPackageException$NSPrintPagesAcross$NSPrintPagesDown$NSPrintPagesPerSheet$NSPrintPanelAccessorySummaryItemDescriptionKey$NSPrintPanelAccessorySummaryItemNameKey$NSPrintPaperFeed$NSPrintPaperName$NSPrintPaperSize$NSPrintPhotoJobStyleHint$NSPrintPreviewJob$NSPrintPrinter$NSPrintPrinterName$NSPrintReversePageOrder$NSPrintRightMargin$NSPrintSaveJob$NSPrintSavePath$NSPrintScalingFactor$NSPrintSelectionOnly$NSPrintSpoolJob$NSPrintTime$NSPrintTopMargin$NSPrintVerticalPagination$NSPrintVerticallyCentered$NSPrintingCommunicationException$NSRTFDPboardType$NSRTFDTextDocumentType$NSRTFPboardType$NSRTFPropertyStackOverflowException$NSRTFTextDocumentType$NSRaisesForNotApplicableKeysBindingOption$NSReadOnlyDocumentAttribute$NSRecentSearchesBinding$NSRepresentedFilenameBinding$NSRightMarginDocumentAttribute$NSRowHeightBinding$NSRuleEditorPredicateComparisonModifier$NSRuleEditorPredicateCompoundType$NSRuleEditorPredicateCustomSelector$NSRuleEditorPredicateLeftExpression$NSRuleEditorPredicateOperatorType$NSRuleEditorPredicateOptions$NSRuleEditorPredicateRightExpression$NSRuleEditorRowsDidChangeNotification$NSRulerPboard$NSRulerPboardType$NSRulerViewUnitCentimeters$NSRulerViewUnitInches$NSRulerViewUnitPicas$NSRulerViewUnitPoints$NSScreenColorSpaceDidChangeNotification$NSScrollViewDidEndLiveMagnifyNotification$NSScrollViewDidEndLiveScrollNotification$NSScrollViewDidLiveScrollNotification$NSScrollViewWillStartLiveMagnifyNotification$NSScrollViewWillStartLiveScrollNotification$NSSelectedIdentifierBinding$NSSelectedIndexBinding$NSSelectedLabelBinding$NSSelectedObjectBinding$NSSelectedObjectsBinding$NSSelectedTagBinding$NSSelectedValueBinding$NSSelectedValuesBinding$NSSelectionIndexPathsBinding$NSSelectionIndexesBinding$NSSelectorNameBindingOption$NSSelectsAllWhenSettingContentBindingOption$NSShadowAttributeName$NSSharingServiceNameAddToAperture$NSSharingServiceNameAddToIPhoto$NSSharingServiceNameAddToSafariReadingList$NSSharingServiceNameCloudSharing$NSSharingServiceNameComposeEmail$NSSharingServiceNameComposeMessage$NSSharingServiceNamePostImageOnFlickr$NSSharingServiceNamePostOnFacebook$NSSharingServiceNamePostOnLinkedIn$NSSharingServiceNamePostOnSinaWeibo$NSSharingServiceNamePostOnTencentWeibo$NSSharingServiceNamePostOnTwitter$NSSharingServiceNamePostVideoOnTudou$NSSharingServiceNamePostVideoOnVimeo$NSSharingServiceNamePostVideoOnYouku$NSSharingServiceNameSendViaAirDrop$NSSharingServiceNameUseAsDesktopPicture$NSSharingServiceNameUseAsFacebookProfileImage$NSSharingServiceNameUseAsLinkedInProfileImage$NSSharingServiceNameUseAsTwitterProfileImage$NSShellCommandFileType$NSSliderAccessoryWidthDefault@d$NSSliderAccessoryWidthWide@d$NSSortDescriptorsBinding$NSSoundPboardType$NSSpeechCharacterModeProperty$NSSpeechCommandDelimiterProperty$NSSpeechCommandPrefix$NSSpeechCommandSuffix$NSSpeechCurrentVoiceProperty$NSSpeechDictionaryAbbreviations$NSSpeechDictionaryEntryPhonemes$NSSpeechDictionaryEntrySpelling$NSSpeechDictionaryLocaleIdentifier$NSSpeechDictionaryModificationDate$NSSpeechDictionaryPronunciations$NSSpeechErrorCount$NSSpeechErrorNewestCharacterOffset$NSSpeechErrorNewestCode$NSSpeechErrorOldestCharacterOffset$NSSpeechErrorOldestCode$NSSpeechErrorsProperty$NSSpeechInputModeProperty$NSSpeechModeLiteral$NSSpeechModeNormal$NSSpeechModePhoneme$NSSpeechModeText$NSSpeechNumberModeProperty$NSSpeechOutputToFileURLProperty$NSSpeechPhonemeInfoExample$NSSpeechPhonemeInfoHiliteEnd$NSSpeechPhonemeInfoHiliteStart$NSSpeechPhonemeInfoOpcode$NSSpeechPhonemeInfoSymbol$NSSpeechPhonemeSymbolsProperty$NSSpeechPitchBaseProperty$NSSpeechPitchModProperty$NSSpeechRateProperty$NSSpeechRecentSyncProperty$NSSpeechResetProperty$NSSpeechStatusNumberOfCharactersLeft$NSSpeechStatusOutputBusy$NSSpeechStatusOutputPaused$NSSpeechStatusPhonemeCode$NSSpeechStatusProperty$NSSpeechSynthesizerInfoIdentifier$NSSpeechSynthesizerInfoProperty$NSSpeechSynthesizerInfoVersion$NSSpeechVolumeProperty$NSSpellCheckerDidChangeAutomaticCapitalizationNotification$NSSpellCheckerDidChangeAutomaticDashSubstitutionNotification$NSSpellCheckerDidChangeAutomaticPeriodSubstitutionNotification$NSSpellCheckerDidChangeAutomaticQuoteSubstitutionNotification$NSSpellCheckerDidChangeAutomaticSpellingCorrectionNotification$NSSpellCheckerDidChangeAutomaticTextCompletionNotification$NSSpellCheckerDidChangeAutomaticTextReplacementNotification$NSSpellingStateAttributeName$NSSplitViewControllerAutomaticDimension@d$NSSplitViewDidResizeSubviewsNotification$NSSplitViewItemUnspecifiedDimension@d$NSSplitViewWillResizeSubviewsNotification$NSStrikethroughColorAttributeName$NSStrikethroughStyleAttributeName$NSStringPboardType$NSStrokeColorAttributeName$NSStrokeWidthAttributeName$NSSubjectDocumentAttribute$NSSuperscriptAttributeName$NSSystemColorsDidChangeNotification$NSTIFFException$NSTIFFPboardType$NSTabColumnTerminatorsAttributeName$NSTableViewColumnDidMoveNotification$NSTableViewColumnDidResizeNotification$NSTableViewRowViewKey$NSTableViewSelectionDidChangeNotification$NSTableViewSelectionIsChangingNotification$NSTabularTextPboardType$NSTargetBinding$NSTextAlternativesAttributeName$NSTextAlternativesSelectedAlternativeStringNotification$NSTextCheckingDocumentAuthorKey$NSTextCheckingDocumentTitleKey$NSTextCheckingDocumentURLKey$NSTextCheckingOrthographyKey$NSTextCheckingQuotesKey$NSTextCheckingReferenceDateKey$NSTextCheckingReferenceTimeZoneKey$NSTextCheckingRegularExpressionsKey$NSTextCheckingReplacementsKey$NSTextCheckingSelectedRangeKey$NSTextColorBinding$NSTextDidBeginEditingNotification$NSTextDidChangeNotification$NSTextDidEndEditingNotification$NSTextEffectAttributeName$NSTextEffectLetterpressStyle$NSTextEncodingNameDocumentAttribute$NSTextEncodingNameDocumentOption$NSTextFinderCaseInsensitiveKey$NSTextFinderMatchingTypeKey$NSTextInputContextKeyboardSelectionDidChangeNotification$NSTextLayoutSectionOrientation$NSTextLayoutSectionRange$NSTextLayoutSectionsAttribute$NSTextLineTooLongException$NSTextListMarkerBox$NSTextListMarkerCheck$NSTextListMarkerCircle$NSTextListMarkerDecimal$NSTextListMarkerDiamond$NSTextListMarkerDisc$NSTextListMarkerHyphen$NSTextListMarkerLowercaseAlpha$NSTextListMarkerLowercaseHexadecimal$NSTextListMarkerLowercaseLatin$NSTextListMarkerLowercaseRoman$NSTextListMarkerOctal$NSTextListMarkerSquare$NSTextListMarkerUppercaseAlpha$NSTextListMarkerUppercaseHexadecimal$NSTextListMarkerUppercaseLatin$NSTextListMarkerUppercaseRoman$NSTextMovementUserInfoKey$NSTextNoSelectionException$NSTextReadException$NSTextSizeMultiplierDocumentOption$NSTextStorageDidProcessEditingNotification$NSTextStorageWillProcessEditingNotification$NSTextViewDidChangeSelectionNotification$NSTextViewDidChangeTypingAttributesNotification$NSTextViewWillChangeNotifyingTextViewNotification$NSTextWriteException$NSTimeoutDocumentOption$NSTitleBinding$NSTitleDocumentAttribute$NSToolTipAttributeName$NSToolTipBinding$NSToolbarCloudSharingItemIdentifier$NSToolbarCustomizeToolbarItemIdentifier$NSToolbarDidRemoveItemNotification$NSToolbarFlexibleSpaceItemIdentifier$NSToolbarPrintItemIdentifier$NSToolbarSeparatorItemIdentifier$NSToolbarShowColorsItemIdentifier$NSToolbarShowFontsItemIdentifier$NSToolbarSpaceItemIdentifier$NSToolbarToggleSidebarItemIdentifier$NSToolbarWillAddItemNotification$NSTopMarginDocumentAttribute$NSTouchBarItemIdentifierCandidateList$NSTouchBarItemIdentifierCharacterPicker$NSTouchBarItemIdentifierFixedSpaceLarge$NSTouchBarItemIdentifierFixedSpaceSmall$NSTouchBarItemIdentifierFlexibleSpace$NSTouchBarItemIdentifierOtherItemsProxy$NSTouchBarItemIdentifierTextAlignment$NSTouchBarItemIdentifierTextColorPicker$NSTouchBarItemIdentifierTextFormat$NSTouchBarItemIdentifierTextList$NSTouchBarItemIdentifierTextStyle$NSTransparentBinding$NSTypeIdentifierAddressText$NSTypeIdentifierDateText$NSTypeIdentifierPhoneNumberText$NSTypeIdentifierTransitInformationText$NSTypedStreamVersionException$NSURLPboardType$NSUnderlineColorAttributeName$NSUnderlineStyleAttributeName$NSUserActivityDocumentURLKey$NSUsesScreenFontsDocumentAttribute$NSVCardPboardType$NSValidatesImmediatelyBindingOption$NSValueBinding$NSValuePathBinding$NSValueTransformerBindingOption$NSValueTransformerNameBindingOption$NSValueURLBinding$NSVerticalGlyphFormAttributeName$NSViewAnimationEffectKey$NSViewAnimationEndFrameKey$NSViewAnimationFadeInEffect$NSViewAnimationFadeOutEffect$NSViewAnimationStartFrameKey$NSViewAnimationTargetKey$NSViewBoundsDidChangeNotification$NSViewDidUpdateTrackingAreasNotification$NSViewFocusDidChangeNotification$NSViewFrameDidChangeNotification$NSViewGlobalFrameDidChangeNotification$NSViewModeDocumentAttribute$NSViewNoIntrinsicMetric@d$NSViewSizeDocumentAttribute$NSViewZoomDocumentAttribute$NSVisibleBinding$NSVoiceAge$NSVoiceDemoText$NSVoiceGender$NSVoiceGenderFemale$NSVoiceGenderMale$NSVoiceGenderNeuter$NSVoiceIdentifier$NSVoiceIndividuallySpokenCharacters$NSVoiceLanguage$NSVoiceLocaleIdentifier$NSVoiceName$NSVoiceSupportedCharacters$NSWarningValueBinding$NSWebArchiveTextDocumentType$NSWebPreferencesDocumentOption$NSWebResourceLoadDelegateDocumentOption$NSWidthBinding$NSWindowDidBecomeKeyNotification$NSWindowDidBecomeMainNotification$NSWindowDidChangeBackingPropertiesNotification$NSWindowDidChangeOcclusionStateNotification$NSWindowDidChangeScreenNotification$NSWindowDidChangeScreenProfileNotification$NSWindowDidDeminiaturizeNotification$NSWindowDidEndLiveResizeNotification$NSWindowDidEndSheetNotification$NSWindowDidEnterFullScreenNotification$NSWindowDidEnterVersionBrowserNotification$NSWindowDidExitFullScreenNotification$NSWindowDidExitVersionBrowserNotification$NSWindowDidExposeNotification$NSWindowDidMiniaturizeNotification$NSWindowDidMoveNotification$NSWindowDidResignKeyNotification$NSWindowDidResignMainNotification$NSWindowDidResizeNotification$NSWindowDidUpdateNotification$NSWindowServerCommunicationException$NSWindowWillBeginSheetNotification$NSWindowWillCloseNotification$NSWindowWillEnterFullScreenNotification$NSWindowWillEnterVersionBrowserNotification$NSWindowWillExitFullScreenNotification$NSWindowWillExitVersionBrowserNotification$NSWindowWillMiniaturizeNotification$NSWindowWillMoveNotification$NSWindowWillStartLiveResizeNotification$NSWordMLTextDocumentType$NSWordTablesReadException$NSWordTablesWriteException$NSWorkspaceAccessibilityDisplayOptionsDidChangeNotification$NSWorkspaceActiveSpaceDidChangeNotification$NSWorkspaceApplicationKey$NSWorkspaceCompressOperation$NSWorkspaceCopyOperation$NSWorkspaceDecompressOperation$NSWorkspaceDecryptOperation$NSWorkspaceDesktopImageAllowClippingKey$NSWorkspaceDesktopImageFillColorKey$NSWorkspaceDesktopImageScalingKey$NSWorkspaceDestroyOperation$NSWorkspaceDidActivateApplicationNotification$NSWorkspaceDidChangeFileLabelsNotification$NSWorkspaceDidDeactivateApplicationNotification$NSWorkspaceDidHideApplicationNotification$NSWorkspaceDidLaunchApplicationNotification$NSWorkspaceDidMountNotification$NSWorkspaceDidPerformFileOperationNotification$NSWorkspaceDidRenameVolumeNotification$NSWorkspaceDidTerminateApplicationNotification$NSWorkspaceDidUnhideApplicationNotification$NSWorkspaceDidUnmountNotification$NSWorkspaceDidWakeNotification$NSWorkspaceDuplicateOperation$NSWorkspaceEncryptOperation$NSWorkspaceLaunchConfigurationAppleEvent$NSWorkspaceLaunchConfigurationArchitecture$NSWorkspaceLaunchConfigurationArguments$NSWorkspaceLaunchConfigurationEnvironment$NSWorkspaceLinkOperation$NSWorkspaceMoveOperation$NSWorkspaceRecycleOperation$NSWorkspaceScreensDidSleepNotification$NSWorkspaceScreensDidWakeNotification$NSWorkspaceSessionDidBecomeActiveNotification$NSWorkspaceSessionDidResignActiveNotification$NSWorkspaceVolumeLocalizedNameKey$NSWorkspaceVolumeOldLocalizedNameKey$NSWorkspaceVolumeOldURLKey$NSWorkspaceVolumeURLKey$NSWorkspaceWillLaunchApplicationNotification$NSWorkspaceWillPowerOffNotification$NSWorkspaceWillSleepNotification$NSWorkspaceWillUnmountNotification$NSWritingDirectionAttributeName$''' -constants = constants + '$NSBlack@%s$'%(sel32or64('f', 'd'),) -constants = constants + '$NSViewNoInstrinsicMetric@%s$'%(sel32or64('f', 'd'),) -constants = constants + '$NSDarkGray@%s$'%(sel32or64('f', 'd'),) -constants = constants + '$NSWhite@%s$'%(sel32or64('f', 'd'),) -constants = constants + '$NSFontIdentityMatrix@%s$'%(sel32or64('^f', '^d'),) -constants = constants + '$NSLightGray@%s$'%(sel32or64('f', 'd'),) -constants = constants + '$NSUnderlineStrikethroughMask@%s$'%(sel32or64('I', 'Q'),) -constants = constants + '$NSUnderlineByWordMask@%s$'%(sel32or64('I', 'Q'),) -enums = '''$NS16BitBigEndianBitmapFormat@1024$NS16BitLittleEndianBitmapFormat@256$NS32BitBigEndianBitmapFormat@2048$NS32BitLittleEndianBitmapFormat@512$NSAWTEventType@16$NSAboveBottom@4$NSAboveTop@1$NSAcceleratorButton@8$NSAccessibilityAnnotationPositionEnd@2$NSAccessibilityAnnotationPositionFullRange@0$NSAccessibilityAnnotationPositionStart@1$NSAccessibilityCustomRotorSearchDirectionNext@1$NSAccessibilityCustomRotorSearchDirectionPrevious@0$NSAccessibilityCustomRotorTypeAnnotation@2$NSAccessibilityCustomRotorTypeAny@1$NSAccessibilityCustomRotorTypeBoldText@3$NSAccessibilityCustomRotorTypeCustom@0$NSAccessibilityCustomRotorTypeHeading@4$NSAccessibilityCustomRotorTypeHeadingLevel1@5$NSAccessibilityCustomRotorTypeHeadingLevel2@6$NSAccessibilityCustomRotorTypeHeadingLevel3@7$NSAccessibilityCustomRotorTypeHeadingLevel4@8$NSAccessibilityCustomRotorTypeHeadingLevel5@9$NSAccessibilityCustomRotorTypeHeadingLevel6@10$NSAccessibilityCustomRotorTypeImage@11$NSAccessibilityCustomRotorTypeItalicText@12$NSAccessibilityCustomRotorTypeLandmark@13$NSAccessibilityCustomRotorTypeLink@14$NSAccessibilityCustomRotorTypeList@15$NSAccessibilityCustomRotorTypeMisspelledWord@16$NSAccessibilityCustomRotorTypeTable@17$NSAccessibilityCustomRotorTypeTextField@18$NSAccessibilityCustomRotorTypeUnderlinedText@19$NSAccessibilityCustomRotorTypeVisitedLink@20$NSAccessibilityOrientationHorizontal@2$NSAccessibilityOrientationUnknown@0$NSAccessibilityOrientationVertical@1$NSAccessibilityPriorityHigh@90$NSAccessibilityPriorityLow@10$NSAccessibilityPriorityMedium@50$NSAccessibilityRulerMarkerTypeIndentFirstLine@7$NSAccessibilityRulerMarkerTypeIndentHead@5$NSAccessibilityRulerMarkerTypeIndentTail@6$NSAccessibilityRulerMarkerTypeTabStopCenter@3$NSAccessibilityRulerMarkerTypeTabStopDecimal@4$NSAccessibilityRulerMarkerTypeTabStopLeft@1$NSAccessibilityRulerMarkerTypeTabStopRight@2$NSAccessibilityRulerMarkerTypeUnknown@0$NSAccessibilitySortDirectionAscending@1$NSAccessibilitySortDirectionDescending@2$NSAccessibilitySortDirectionUnknown@0$NSAccessibilityUnitsCentimeters@2$NSAccessibilityUnitsInches@1$NSAccessibilityUnitsPicas@4$NSAccessibilityUnitsPoints@3$NSAccessibilityUnitsUnknown@0$NSAddTraitFontAction@2$NSAdobeCNS1CharacterCollection@1$NSAdobeGB1CharacterCollection@2$NSAdobeJapan1CharacterCollection@3$NSAdobeJapan2CharacterCollection@4$NSAdobeKorea1CharacterCollection@5$NSAlertAlternateReturn@0$NSAlertDefaultReturn@1$NSAlertErrorReturn@-2$NSAlertFirstButtonReturn@1000$NSAlertOtherReturn@-1$NSAlertSecondButtonReturn@1001$NSAlertStyleCritical@2$NSAlertStyleInformational@1$NSAlertStyleWarning@0$NSAlertThirdButtonReturn@1002$NSAllScrollerParts@2$NSAlphaFirstBitmapFormat@1$NSAlphaNonpremultipliedBitmapFormat@2$NSAlphaShiftKeyMask@65536$NSAlternateKeyMask@524288$NSAnimationBlocking@0$NSAnimationEaseIn@1$NSAnimationEaseInOut@0$NSAnimationEaseOut@2$NSAnimationEffectDisappearingItemDefault@0$NSAnimationEffectPoof@10$NSAnimationLinear@3$NSAnimationNonblocking@1$NSAnimationNonblockingThreaded@2$NSAnyType@0$NSAppKitDefined@13$NSAppKitDefinedMask@8192$NSAppKitVersionNumber10_0@577$NSAppKitVersionNumber10_1@620$NSAppKitVersionNumber10_10@1343$NSAppKitVersionNumber10_10_2@1344$NSAppKitVersionNumber10_10_3@1347$NSAppKitVersionNumber10_10_4@1348$NSAppKitVersionNumber10_10_5@1348$NSAppKitVersionNumber10_10_Max@1349$NSAppKitVersionNumber10_11@1404.0$NSAppKitVersionNumber10_11_1@1404.13$NSAppKitVersionNumber10_11_2@1404.34$NSAppKitVersionNumber10_11_3@1404.34$NSAppKitVersionNumber10_12@1504$NSAppKitVersionNumber10_12_1@1504.6$NSAppKitVersionNumber10_12_2@1504.76$NSAppKitVersionNumber10_13@1561$NSAppKitVersionNumber10_13_1@1561.1$NSAppKitVersionNumber10_13_2@1561.2$NSAppKitVersionNumber10_13_4@1561.4$NSAppKitVersionNumber10_2@663$NSAppKitVersionNumber10_3@743$NSAppKitVersionNumber10_4@824$NSAppKitVersionNumber10_5@949$NSAppKitVersionNumber10_6@1038$NSAppKitVersionNumber10_7@1138$NSAppKitVersionNumber10_8@1187$NSAppKitVersionNumber10_9@1265$NSApplicationActivateAllWindows@1$NSApplicationActivateIgnoringOtherApps@2$NSApplicationActivatedEventType@1$NSApplicationActivationPolicyAccessory@1$NSApplicationActivationPolicyProhibited@2$NSApplicationActivationPolicyRegular@0$NSApplicationDeactivatedEventType@2$NSApplicationDefined@15$NSApplicationDefinedMask@32768$NSApplicationDelegateReplyCancel@1$NSApplicationDelegateReplyFailure@2$NSApplicationDelegateReplySuccess@0$NSApplicationOcclusionStateVisible@2$NSApplicationPresentationAutoHideDock@1$NSApplicationPresentationAutoHideMenuBar@4$NSApplicationPresentationAutoHideToolbar@2048$NSApplicationPresentationDefault@0$NSApplicationPresentationDisableAppleMenu@16$NSApplicationPresentationDisableCursorLocationAssistance@4096$NSApplicationPresentationDisableForceQuit@64$NSApplicationPresentationDisableHideApplication@256$NSApplicationPresentationDisableMenuBarTransparency@512$NSApplicationPresentationDisableProcessSwitching@32$NSApplicationPresentationDisableSessionTermination@128$NSApplicationPresentationFullScreen@1024$NSApplicationPresentationHideDock@2$NSApplicationPresentationHideMenuBar@8$NSAscendingPageOrder@1$NSAsciiWithDoubleByteEUCGlyphPacking@2$NSAtBottom@5$NSAtTop@2$NSAutoPagination@0$NSAutosaveAsOperation@5$NSAutosaveElsewhereOperation@3$NSAutosaveInPlaceOperation@4$NSAutosaveOperation@3$NSBMPFileType@1$NSBackTabCharacter@25$NSBackgroundStyleDark@1$NSBackgroundStyleEmphasized@1$NSBackgroundStyleLight@0$NSBackgroundStyleLowered@3$NSBackgroundStyleNormal@0$NSBackgroundStyleRaised@2$NSBackgroundTab@1$NSBackingStoreBuffered@2$NSBackingStoreNonretained@1$NSBackingStoreRetained@0$NSBackspaceCharacter@8$NSBacktabTextMovement@18$NSBeginFunctionKey@63274$NSBelowBottom@6$NSBelowTop@3$NSBevelLineJoinStyle@2$NSBezelBorder@2$NSBezelStyleCircular@7$NSBezelStyleDisclosure@5$NSBezelStyleHelpButton@9$NSBezelStyleInline@15$NSBezelStyleRecessed@13$NSBezelStyleRegularSquare@2$NSBezelStyleRoundRect@12$NSBezelStyleRounded@1$NSBezelStyleRoundedDisclosure@14$NSBezelStyleShadowlessSquare@6$NSBezelStyleSmallSquare@10$NSBezelStyleTexturedRounded@11$NSBezelStyleTexturedSquare@8$NSBezierPathElementClosePath@3$NSBezierPathElementCurveTo@2$NSBezierPathElementLineTo@1$NSBezierPathElementMoveTo@0$NSBitmapFormatAlphaFirst@1$NSBitmapFormatAlphaNonpremultiplied@2$NSBitmapFormatFloatingPointSamples@4$NSBitmapFormatSixteenBitBigEndian@1024$NSBitmapFormatSixteenBitLittleEndian@256$NSBitmapFormatThirtyTwoBitBigEndian@2048$NSBitmapFormatThirtyTwoBitLittleEndian@512$NSBitmapImageFileTypeBMP@1$NSBitmapImageFileTypeGIF@2$NSBitmapImageFileTypeJPEG@3$NSBitmapImageFileTypeJPEG2000@5$NSBitmapImageFileTypePNG@4$NSBitmapImageFileTypeTIFF@0$NSBlueControlTint@1$NSBoldFontMask@2$NSBorderlessWindowMask@0$NSBottomTabsBezelBorder@2$NSBoxCustom@4$NSBoxOldStyle@3$NSBoxPrimary@0$NSBoxSecondary@1$NSBoxSeparator@2$NSBreakFunctionKey@63282$NSBrowserAutoColumnResizing@1$NSBrowserDropAbove@1$NSBrowserDropOn@0$NSBrowserNoColumnResizing@0$NSBrowserUserColumnResizing@2$NSButtLineCapStyle@0$NSButtonTypeAccelerator@8$NSButtonTypeMomentaryChange@5$NSButtonTypeMomentaryLight@0$NSButtonTypeMomentaryPushIn@7$NSButtonTypeMultiLevelAccelerator@9$NSButtonTypeOnOff@6$NSButtonTypePushOnPushOff@1$NSButtonTypeRadio@4$NSButtonTypeSwitch@3$NSButtonTypeToggle@2$NSCMYKColorSpaceModel@2$NSCMYKModeColorPanel@2$NSCancelButton@0$NSCancelTextMovement@23$NSCarriageReturnCharacter@13$NSCellAllowsMixedState@16$NSCellChangesContents@14$NSCellDisabled@0$NSCellEditable@3$NSCellHasImageHorizontal@12$NSCellHasImageOnLeftOrBottom@13$NSCellHasOverlappingImage@11$NSCellHighlighted@5$NSCellHitContentArea@1$NSCellHitEditableTextArea@2$NSCellHitNone@0$NSCellHitTrackableArea@4$NSCellIsBordered@10$NSCellIsInsetButton@15$NSCellLightsByBackground@9$NSCellLightsByContents@6$NSCellLightsByGray@7$NSCellState@1$NSCenterTabStopType@2$NSCenterTextAlignment@2$NSChangeAutosaved@4$NSChangeBackgroundCell@8$NSChangeBackgroundCellMask@8$NSChangeCleared@2$NSChangeDiscardable@256$NSChangeDone@0$NSChangeGrayCell@4$NSChangeGrayCellMask@4$NSChangeReadOtherContents@3$NSChangeRedone@5$NSChangeUndone@1$NSCircularBezelStyle@7$NSCircularSlider@1$NSClearControlTint@7$NSClearDisplayFunctionKey@63290$NSClearLineFunctionKey@63289$NSClipPagination@2$NSClockAndCalendarDatePickerStyle@1$NSClosableWindowMask@2$NSClosePathBezierPathElement@3$NSCloudKitSharingServiceAllowPrivate@2$NSCloudKitSharingServiceAllowPublic@1$NSCloudKitSharingServiceAllowReadOnly@16$NSCloudKitSharingServiceAllowReadWrite@32$NSCloudKitSharingServiceStandard@0$NSCollectionElementCategoryDecorationView@2$NSCollectionElementCategoryInterItemGap@3$NSCollectionElementCategoryItem@0$NSCollectionElementCategorySupplementaryView@1$NSCollectionUpdateActionDelete@1$NSCollectionUpdateActionInsert@0$NSCollectionUpdateActionMove@3$NSCollectionUpdateActionNone@4$NSCollectionUpdateActionReload@2$NSCollectionViewDropBefore@1$NSCollectionViewDropOn@0$NSCollectionViewItemHighlightAsDropTarget@3$NSCollectionViewItemHighlightForDeselection@2$NSCollectionViewItemHighlightForSelection@1$NSCollectionViewItemHighlightNone@0$NSCollectionViewScrollDirectionHorizontal@1$NSCollectionViewScrollDirectionVertical@0$NSCollectionViewScrollPositionBottom@4$NSCollectionViewScrollPositionCenteredHorizontally@16$NSCollectionViewScrollPositionCenteredVertically@2$NSCollectionViewScrollPositionLeadingEdge@64$NSCollectionViewScrollPositionLeft@8$NSCollectionViewScrollPositionNearestHorizontalEdge@512$NSCollectionViewScrollPositionNearestVerticalEdge@256$NSCollectionViewScrollPositionNone@0$NSCollectionViewScrollPositionRight@32$NSCollectionViewScrollPositionTop@1$NSCollectionViewScrollPositionTrailingEdge@128$NSColorListModeColorPanel@5$NSColorPanelAllModesMask@65535$NSColorPanelCMYKModeMask@4$NSColorPanelColorListModeMask@32$NSColorPanelCrayonModeMask@128$NSColorPanelCustomPaletteModeMask@16$NSColorPanelGrayModeMask@1$NSColorPanelHSBModeMask@8$NSColorPanelModeCMYK@2$NSColorPanelModeColorList@5$NSColorPanelModeCrayon@7$NSColorPanelModeCustomPalette@4$NSColorPanelModeGray@0$NSColorPanelModeHSB@3$NSColorPanelModeNone@-1$NSColorPanelModeRGB@1$NSColorPanelModeWheel@6$NSColorPanelRGBModeMask@2$NSColorPanelWheelModeMask@64$NSColorRenderingIntentAbsoluteColorimetric@1$NSColorRenderingIntentDefault@0$NSColorRenderingIntentPerceptual@3$NSColorRenderingIntentRelativeColorimetric@2$NSColorRenderingIntentSaturation@4$NSColorSpaceModelCMYK@2$NSColorSpaceModelDeviceN@4$NSColorSpaceModelGray@0$NSColorSpaceModelIndexed@5$NSColorSpaceModelLAB@3$NSColorSpaceModelPatterned@6$NSColorSpaceModelRGB@1$NSColorSpaceModelUnknown@-1$NSColorSystemEffectDeepPressed@2$NSColorSystemEffectDisabled@3$NSColorSystemEffectNone@0$NSColorSystemEffectPressed@1$NSColorSystemEffectRollover@4$NSColorTypeCatalog@2$NSColorTypeComponentBased@0$NSColorTypePattern@1$NSCommandKeyMask@1048576$NSCompositeClear@0$NSCompositeColor@27$NSCompositeColorBurn@20$NSCompositeColorDodge@19$NSCompositeCopy@1$NSCompositeDarken@17$NSCompositeDestinationAtop@9$NSCompositeDestinationIn@7$NSCompositeDestinationOut@8$NSCompositeDestinationOver@6$NSCompositeDifference@23$NSCompositeExclusion@24$NSCompositeHardLight@22$NSCompositeHighlight@12$NSCompositeHue@25$NSCompositeLighten@18$NSCompositeLuminosity@28$NSCompositeMultiply@14$NSCompositeOverlay@16$NSCompositePlusDarker@11$NSCompositePlusLighter@13$NSCompositeSaturation@26$NSCompositeScreen@15$NSCompositeSoftLight@21$NSCompositeSourceAtop@5$NSCompositeSourceIn@3$NSCompositeSourceOut@4$NSCompositeSourceOver@2$NSCompositeXOR@10$NSCompositingOperationClear@0$NSCompositingOperationColor@27$NSCompositingOperationColorBurn@20$NSCompositingOperationColorDodge@19$NSCompositingOperationCopy@1$NSCompositingOperationDarken@17$NSCompositingOperationDestinationAtop@9$NSCompositingOperationDestinationIn@7$NSCompositingOperationDestinationOut@8$NSCompositingOperationDestinationOver@6$NSCompositingOperationDifference@23$NSCompositingOperationExclusion@24$NSCompositingOperationHardLight@22$NSCompositingOperationHighlight@12$NSCompositingOperationHue@25$NSCompositingOperationLighten@18$NSCompositingOperationLuminosity@28$NSCompositingOperationMultiply@14$NSCompositingOperationOverlay@16$NSCompositingOperationPlusDarker@11$NSCompositingOperationPlusLighter@13$NSCompositingOperationSaturation@26$NSCompositingOperationScreen@15$NSCompositingOperationSoftLight@21$NSCompositingOperationSourceAtop@5$NSCompositingOperationSourceIn@3$NSCompositingOperationSourceOut@4$NSCompositingOperationSourceOver@2$NSCompositingOperationXOR@10$NSCompressedFontMask@512$NSCondensedFontMask@64$NSContentsCellMask@1$NSContinuousCapacityLevelIndicatorStyle@1$NSControlCharacterActionContainerBreak@32$NSControlCharacterActionHorizontalTab@4$NSControlCharacterActionLineBreak@8$NSControlCharacterActionParagraphBreak@16$NSControlCharacterActionWhitespace@2$NSControlCharacterActionZeroAdvancement@1$NSControlGlyph@16777215$NSControlKeyMask@262144$NSControlSizeMini@2$NSControlSizeRegular@0$NSControlSizeSmall@1$NSControlStateMixed@-1$NSControlStateOff@0$NSControlStateOn@1$NSControlStateValueMixed@-1$NSControlStateValueOff@0$NSControlStateValueOn@1$NSCorrectionIndicatorTypeDefault@0$NSCorrectionIndicatorTypeGuesses@2$NSCorrectionIndicatorTypeReversion@1$NSCorrectionResponseAccepted@1$NSCorrectionResponseEdited@4$NSCorrectionResponseIgnored@3$NSCorrectionResponseNone@0$NSCorrectionResponseRejected@2$NSCorrectionResponseReverted@5$NSCrayonModeColorPanel@7$NSCriticalAlertStyle@2$NSCriticalRequest@0$NSCursorPointingDevice@2$NSCursorUpdate@17$NSCursorUpdateMask@131072$NSCurveToBezierPathElement@2$NSCustomPaletteModeColorPanel@4$NSDatePickerElementFlagEra@256$NSDatePickerElementFlagHourMinute@12$NSDatePickerElementFlagHourMinuteSecond@14$NSDatePickerElementFlagTimeZone@16$NSDatePickerElementFlagYearMonth@192$NSDatePickerElementFlagYearMonthDay@224$NSDatePickerModeRange@1$NSDatePickerModeSingle@0$NSDatePickerStyleClockAndCalendar@1$NSDatePickerStyleTextField@2$NSDatePickerStyleTextFieldAndStepper@0$NSDecimalTabStopType@3$NSDefaultControlTint@0$NSDefaultTokenStyle@0$NSDeleteCharFunctionKey@63294$NSDeleteCharacter@127$NSDeleteFunctionKey@63272$NSDeleteLineFunctionKey@63292$NSDescendingPageOrder@-1$NSDeviceIndependentModifierFlagsMask@4294901760$NSDeviceNColorSpaceModel@4$NSDirectSelection@0$NSDisclosureBezelStyle@5$NSDiscreteCapacityLevelIndicatorStyle@2$NSDisplayGamutP3@2$NSDisplayGamutSRGB@1$NSDisplayWindowRunLoopOrdering@600000$NSDocModalWindowMask@64$NSDockWindowLevel@20$NSDoubleType@6$NSDownArrowFunctionKey@63233$NSDownTextMovement@22$NSDragOperationAll@15$NSDragOperationAll_Obsolete@15$NSDragOperationCopy@1$NSDragOperationDelete@32$NSDragOperationGeneric@4$NSDragOperationLink@2$NSDragOperationMove@16$NSDragOperationNone@0$NSDragOperationPrivate@8$NSDraggingContextOutsideApplication@0$NSDraggingContextWithinApplication@1$NSDraggingFormationDefault@0$NSDraggingFormationList@3$NSDraggingFormationNone@1$NSDraggingFormationPile@2$NSDraggingFormationStack@4$NSDraggingItemEnumerationClearNonenumeratedImages@65536$NSDraggingItemEnumerationConcurrent@1$NSDrawerClosedState@0$NSDrawerClosingState@3$NSDrawerOpenState@2$NSDrawerOpeningState@1$NSEndFunctionKey@63275$NSEnterCharacter@3$NSEraDatePickerElementFlag@256$NSEraserPointingDevice@3$NSEvenOddWindingRule@1$NSEventButtonMaskPenLowerSide@2$NSEventButtonMaskPenTip@1$NSEventButtonMaskPenUpperSide@4$NSEventGestureAxisHorizontal@1$NSEventGestureAxisNone@0$NSEventGestureAxisVertical@2$NSEventMaskAny@18446744073709551615$NSEventMaskAppKitDefined@8192$NSEventMaskApplicationDefined@32768$NSEventMaskBeginGesture@524288$NSEventMaskCursorUpdate@131072$NSEventMaskDirectTouch@137438953472$NSEventMaskEndGesture@1048576$NSEventMaskFlagsChanged@4096$NSEventMaskGesture@536870912$NSEventMaskKeyDown@1024$NSEventMaskKeyUp@2048$NSEventMaskLeftMouseDown@2$NSEventMaskLeftMouseDragged@64$NSEventMaskLeftMouseUp@4$NSEventMaskMagnify@1073741824$NSEventMaskMouseEntered@256$NSEventMaskMouseExited@512$NSEventMaskMouseMoved@32$NSEventMaskOtherMouseDown@33554432$NSEventMaskOtherMouseDragged@134217728$NSEventMaskOtherMouseUp@67108864$NSEventMaskPeriodic@65536$NSEventMaskPressure@17179869184$NSEventMaskRightMouseDown@8$NSEventMaskRightMouseDragged@128$NSEventMaskRightMouseUp@16$NSEventMaskRotate@262144$NSEventMaskScrollWheel@4194304$NSEventMaskSmartMagnify@4294967296$NSEventMaskSwipe@2147483648$NSEventMaskSystemDefined@16384$NSEventMaskTabletPoint@8388608$NSEventMaskTabletProximity@16777216$NSEventModifierFlagCapsLock@65536$NSEventModifierFlagCommand@1048576$NSEventModifierFlagControl@262144$NSEventModifierFlagDeviceIndependentFlagsMask@4294901760$NSEventModifierFlagFunction@8388608$NSEventModifierFlagHelp@4194304$NSEventModifierFlagNumericPad@2097152$NSEventModifierFlagOption@524288$NSEventModifierFlagShift@131072$NSEventPhaseBegan@1$NSEventPhaseCancelled@16$NSEventPhaseChanged@4$NSEventPhaseEnded@8$NSEventPhaseMayBegin@32$NSEventPhaseNone@0$NSEventPhaseStationary@2$NSEventSubtypeApplicationActivated@1$NSEventSubtypeApplicationDeactivated@2$NSEventSubtypeMouseEvent@0$NSEventSubtypePowerOff@1$NSEventSubtypeScreenChanged@8$NSEventSubtypeTabletPoint@1$NSEventSubtypeTabletProximity@2$NSEventSubtypeTouch@3$NSEventSubtypeWindowExposed@0$NSEventSubtypeWindowMoved@4$NSEventSwipeTrackingClampGestureAmount@2$NSEventSwipeTrackingLockDirection@1$NSEventTypeAppKitDefined@13$NSEventTypeApplicationDefined@15$NSEventTypeBeginGesture@19$NSEventTypeCursorUpdate@17$NSEventTypeDirectTouch@37$NSEventTypeEndGesture@20$NSEventTypeFlagsChanged@12$NSEventTypeGesture@29$NSEventTypeKeyDown@10$NSEventTypeKeyUp@11$NSEventTypeLeftMouseDown@1$NSEventTypeLeftMouseDragged@6$NSEventTypeLeftMouseUp@2$NSEventTypeMagnify@30$NSEventTypeMouseEntered@8$NSEventTypeMouseExited@9$NSEventTypeMouseMoved@5$NSEventTypeOtherMouseDown@25$NSEventTypeOtherMouseDragged@27$NSEventTypeOtherMouseUp@26$NSEventTypePeriodic@16$NSEventTypePressure@34$NSEventTypeQuickLook@33$NSEventTypeRightMouseDown@3$NSEventTypeRightMouseDragged@7$NSEventTypeRightMouseUp@4$NSEventTypeRotate@18$NSEventTypeScrollWheel@22$NSEventTypeSmartMagnify@32$NSEventTypeSwipe@31$NSEventTypeSystemDefined@14$NSEventTypeTabletPoint@23$NSEventTypeTabletProximity@24$NSExclude10_4ElementsIconCreationOption@4$NSExcludeQuickDrawElementsIconCreationOption@2$NSExecuteFunctionKey@63298$NSExpandedFontMask@32$NSF10FunctionKey@63245$NSF11FunctionKey@63246$NSF12FunctionKey@63247$NSF13FunctionKey@63248$NSF14FunctionKey@63249$NSF15FunctionKey@63250$NSF16FunctionKey@63251$NSF17FunctionKey@63252$NSF18FunctionKey@63253$NSF19FunctionKey@63254$NSF1FunctionKey@63236$NSF20FunctionKey@63255$NSF21FunctionKey@63256$NSF22FunctionKey@63257$NSF23FunctionKey@63258$NSF24FunctionKey@63259$NSF25FunctionKey@63260$NSF26FunctionKey@63261$NSF27FunctionKey@63262$NSF28FunctionKey@63263$NSF29FunctionKey@63264$NSF2FunctionKey@63237$NSF30FunctionKey@63265$NSF31FunctionKey@63266$NSF32FunctionKey@63267$NSF33FunctionKey@63268$NSF34FunctionKey@63269$NSF35FunctionKey@63270$NSF3FunctionKey@63238$NSF4FunctionKey@63239$NSF5FunctionKey@63240$NSF6FunctionKey@63241$NSF7FunctionKey@63242$NSF8FunctionKey@63243$NSF9FunctionKey@63244$NSFPCurrentField@134$NSFPPreviewButton@131$NSFPPreviewField@128$NSFPRevertButton@130$NSFPSetButton@132$NSFPSizeField@129$NSFPSizeTitle@133$NSFileHandlingPanelCancelButton@0$NSFileHandlingPanelOKButton@1$NSFileWrapperReadingImmediate@1$NSFileWrapperReadingWithoutMapping@2$NSFileWrapperWritingAtomic@1$NSFileWrapperWritingWithNameUpdating@2$NSFindFunctionKey@63301$NSFindPanelActionNext@2$NSFindPanelActionPrevious@3$NSFindPanelActionReplace@5$NSFindPanelActionReplaceAll@4$NSFindPanelActionReplaceAllInSelection@8$NSFindPanelActionReplaceAndFind@6$NSFindPanelActionSelectAll@9$NSFindPanelActionSelectAllInSelection@10$NSFindPanelActionSetFindString@7$NSFindPanelActionShowFindPanel@1$NSFindPanelSubstringMatchTypeContains@0$NSFindPanelSubstringMatchTypeEndsWith@3$NSFindPanelSubstringMatchTypeFullWord@2$NSFindPanelSubstringMatchTypeStartsWith@1$NSFitPagination@1$NSFixedPitchFontMask@1024$NSFlagsChanged@12$NSFlagsChangedMask@4096$NSFloatType@3$NSFloatingPointSamplesBitmapFormat@4$NSFloatingWindowLevel@3$NSFocusRingAbove@2$NSFocusRingBelow@1$NSFocusRingOnly@0$NSFocusRingTypeDefault@0$NSFocusRingTypeExterior@2$NSFocusRingTypeNone@1$NSFontAntialiasedIntegerAdvancementsRenderingMode@3$NSFontAntialiasedRenderingMode@1$NSFontAssetDownloadError@66304$NSFontAssetRequestOptionUsesStandardUI@1$NSFontBoldTrait@2$NSFontClarendonSerifsClass@1073741824$NSFontCollectionApplicationOnlyMask@1$NSFontCollectionVisibilityComputer@4$NSFontCollectionVisibilityProcess@1$NSFontCollectionVisibilityUser@2$NSFontCondensedTrait@64$NSFontDefaultRenderingMode@0$NSFontDescriptorClassClarendonSerifs@1073741824$NSFontDescriptorClassFreeformSerifs@1879048192$NSFontDescriptorClassMask@4026531840$NSFontDescriptorClassModernSerifs@805306368$NSFontDescriptorClassOldStyleSerifs@268435456$NSFontDescriptorClassOrnamentals@2415919104$NSFontDescriptorClassSansSerif@2147483648$NSFontDescriptorClassScripts@2684354560$NSFontDescriptorClassSlabSerifs@1342177280$NSFontDescriptorClassSymbolic@3221225472$NSFontDescriptorClassTransitionalSerifs@536870912$NSFontDescriptorClassUnknown@0$NSFontDescriptorTraitBold@2$NSFontDescriptorTraitCondensed@64$NSFontDescriptorTraitExpanded@32$NSFontDescriptorTraitItalic@1$NSFontDescriptorTraitLooseLeading@65536$NSFontDescriptorTraitMonoSpace@1024$NSFontDescriptorTraitTightLeading@32768$NSFontDescriptorTraitUIOptimized@4096$NSFontDescriptorTraitVertical@2048$NSFontErrorMaximum@66335$NSFontErrorMinimum@66304$NSFontExpandedTrait@32$NSFontFamilyClassMask@4026531840$NSFontFreeformSerifsClass@1879048192$NSFontIntegerAdvancementsRenderingMode@2$NSFontItalicTrait@1$NSFontModernSerifsClass@805306368$NSFontMonoSpaceTrait@1024$NSFontOldStyleSerifsClass@268435456$NSFontOrnamentalsClass@2415919104$NSFontPanelAllEffectsModeMask@1048320$NSFontPanelAllModesMask@4294967295$NSFontPanelCollectionModeMask@4$NSFontPanelDocumentColorEffectModeMask@2048$NSFontPanelFaceModeMask@1$NSFontPanelModeMaskAllEffects@1048320$NSFontPanelModeMaskCollection@4$NSFontPanelModeMaskDocumentColorEffect@2048$NSFontPanelModeMaskFace@1$NSFontPanelModeMaskShadowEffect@4096$NSFontPanelModeMaskSize@2$NSFontPanelModeMaskStrikethroughEffect@512$NSFontPanelModeMaskTextColorEffect@1024$NSFontPanelModeMaskUnderlineEffect@256$NSFontPanelModesMaskAllModes@4294967295$NSFontPanelModesMaskStandardModes@65535$NSFontPanelShadowEffectModeMask@4096$NSFontPanelSizeModeMask@2$NSFontPanelStandardModesMask@65535$NSFontPanelStrikethroughEffectModeMask@512$NSFontPanelTextColorEffectModeMask@1024$NSFontPanelUnderlineEffectModeMask@256$NSFontSansSerifClass@2147483648$NSFontScriptsClass@2684354560$NSFontSlabSerifsClass@1342177280$NSFontSymbolicClass@3221225472$NSFontTransitionalSerifsClass@536870912$NSFontUIOptimizedTrait@4096$NSFontUnknownClass@0$NSFontVerticalTrait@2048$NSFormFeedCharacter@12$NSFourByteGlyphPacking@4$NSFullScreenWindowMask@16384$NSFullSizeContentViewWindowMask@32768$NSFunctionKeyMask@8388608$NSGIFFileType@2$NSGestureRecognizerStateBegan@1$NSGestureRecognizerStateCancelled@4$NSGestureRecognizerStateChanged@2$NSGestureRecognizerStateEnded@3$NSGestureRecognizerStateFailed@5$NSGestureRecognizerStatePossible@0$NSGestureRecognizerStateRecognized@3$NSGlyphAbove@2$NSGlyphAttributeBidiLevel@2$NSGlyphAttributeElastic@1$NSGlyphAttributeInscribe@5$NSGlyphAttributeSoft@0$NSGlyphBelow@1$NSGlyphInscribeAbove@2$NSGlyphInscribeBase@0$NSGlyphInscribeBelow@1$NSGlyphInscribeOverBelow@4$NSGlyphInscribeOverstrike@3$NSGlyphLayoutAgainstAPoint@1$NSGlyphLayoutAtAPoint@0$NSGlyphLayoutWithPrevious@2$NSGlyphPropertyControlCharacter@2$NSGlyphPropertyElastic@4$NSGlyphPropertyNonBaseCharacter@8$NSGlyphPropertyNull@1$NSGradientConcaveStrong@2$NSGradientConcaveWeak@1$NSGradientConvexStrong@4$NSGradientConvexWeak@3$NSGradientDrawsAfterEndingLocation@2$NSGradientDrawsBeforeStartingLocation@1$NSGradientNone@0$NSGraphiteControlTint@6$NSGrayColorSpaceModel@0$NSGrayModeColorPanel@0$NSGridCellPlacementBottom@3$NSGridCellPlacementCenter@4$NSGridCellPlacementFill@5$NSGridCellPlacementInherited@0$NSGridCellPlacementLeading@2$NSGridCellPlacementNone@1$NSGridCellPlacementTop@2$NSGridCellPlacementTrailing@3$NSGridRowAlignmentFirstBaseline@2$NSGridRowAlignmentInherited@0$NSGridRowAlignmentLastBaseline@3$NSGridRowAlignmentNone@1$NSGrooveBorder@3$NSHSBModeColorPanel@3$NSHUDWindowMask@8192$NSHapticFeedbackPatternAlignment@1$NSHapticFeedbackPatternGeneric@0$NSHapticFeedbackPatternLevelChange@2$NSHapticFeedbackPerformanceTimeDefault@0$NSHapticFeedbackPerformanceTimeDrawCompleted@2$NSHapticFeedbackPerformanceTimeNow@1$NSHeavierFontAction@5$NSHelpButtonBezelStyle@9$NSHelpFunctionKey@63302$NSHelpKeyMask@4194304$NSHighlightModeMatrix@1$NSHomeFunctionKey@63273$NSHorizontalRuler@0$NSHourMinuteDatePickerElementFlag@12$NSHourMinuteSecondDatePickerElementFlag@14$NSIdentityMappingCharacterCollection@0$NSIllegalTextMovement@0$NSImageAbove@5$NSImageAlignBottom@5$NSImageAlignBottomLeft@6$NSImageAlignBottomRight@7$NSImageAlignCenter@0$NSImageAlignLeft@4$NSImageAlignRight@8$NSImageAlignTop@1$NSImageAlignTopLeft@2$NSImageAlignTopRight@3$NSImageBelow@4$NSImageCacheAlways@1$NSImageCacheBySize@2$NSImageCacheDefault@0$NSImageCacheNever@3$NSImageCellType@2$NSImageFrameButton@4$NSImageFrameGrayBezel@2$NSImageFrameGroove@3$NSImageFrameNone@0$NSImageFramePhoto@1$NSImageInterpolationDefault@0$NSImageInterpolationHigh@3$NSImageInterpolationLow@2$NSImageInterpolationMedium@4$NSImageInterpolationNone@1$NSImageLayoutDirectionLeftToRight@2$NSImageLayoutDirectionRightToLeft@3$NSImageLayoutDirectionUnspecified@-1$NSImageLeading@7$NSImageLeft@2$NSImageLoadStatusCancelled@1$NSImageLoadStatusCompleted@0$NSImageLoadStatusInvalidData@2$NSImageLoadStatusReadError@4$NSImageLoadStatusUnexpectedEOF@3$NSImageOnly@1$NSImageOverlaps@6$NSImageRepLoadStatusCompleted@-6$NSImageRepLoadStatusInvalidData@-4$NSImageRepLoadStatusReadingHeader@-2$NSImageRepLoadStatusUnexpectedEOF@-5$NSImageRepLoadStatusUnknownType@-1$NSImageRepLoadStatusWillNeedAllData@-3$NSImageRepMatchesDevice@0$NSImageResizingModeStretch@0$NSImageResizingModeTile@1$NSImageRight@3$NSImageScaleAxesIndependently@1$NSImageScaleNone@2$NSImageScaleProportionallyDown@0$NSImageScaleProportionallyUpOrDown@3$NSImageTrailing@8$NSIndexedColorSpaceModel@5$NSInformationalAlertStyle@1$NSInformationalRequest@10$NSInlineBezelStyle@15$NSInsertCharFunctionKey@63293$NSInsertFunctionKey@63271$NSInsertLineFunctionKey@63291$NSIntType@1$NSItalicFontMask@1$NSJPEG2000FileType@5$NSJPEGFileType@3$NSJapaneseEUCGlyphPacking@1$NSJustifiedTextAlignment@3$NSKeyDown@10$NSKeyDownMask@1024$NSKeyUp@11$NSKeyUpMask@2048$NSLABColorSpaceModel@3$NSLandscapeOrientation@1$NSLayoutAttributeBaseline@11$NSLayoutAttributeBottom@4$NSLayoutAttributeCenterX@9$NSLayoutAttributeCenterY@10$NSLayoutAttributeFirstBaseline@12$NSLayoutAttributeHeight@8$NSLayoutAttributeLastBaseline@11$NSLayoutAttributeLeading@5$NSLayoutAttributeLeft@1$NSLayoutAttributeNotAnAttribute@0$NSLayoutAttributeRight@2$NSLayoutAttributeTop@3$NSLayoutAttributeTrailing@6$NSLayoutAttributeWidth@7$NSLayoutCantFit@2$NSLayoutConstraintOrientationHorizontal@0$NSLayoutConstraintOrientationVertical@1$NSLayoutDone@1$NSLayoutFormatAlignAllBaseline@2048$NSLayoutFormatAlignAllBottom@16$NSLayoutFormatAlignAllCenterX@512$NSLayoutFormatAlignAllCenterY@1024$NSLayoutFormatAlignAllFirstBaseline@4096$NSLayoutFormatAlignAllLastBaseline@2048$NSLayoutFormatAlignAllLeading@32$NSLayoutFormatAlignAllLeft@2$NSLayoutFormatAlignAllRight@4$NSLayoutFormatAlignAllTop@8$NSLayoutFormatAlignAllTrailing@64$NSLayoutFormatAlignmentMask@65535$NSLayoutFormatDirectionLeadingToTrailing@0$NSLayoutFormatDirectionLeftToRight@65536$NSLayoutFormatDirectionMask@196608$NSLayoutFormatDirectionRightToLeft@131072$NSLayoutLeftToRight@0$NSLayoutNotDone@0$NSLayoutOutOfGlyphs@3$NSLayoutPriorityDefaultHigh@750$NSLayoutPriorityDefaultLow@250$NSLayoutPriorityDragThatCanResizeWindow@510$NSLayoutPriorityDragThatCannotResizeWindow@490$NSLayoutPriorityFittingSizeCompression@50$NSLayoutPriorityRequired@1000$NSLayoutPriorityWindowSizeStayPut@500$NSLayoutRelationEqual@0$NSLayoutRelationGreaterThanOrEqual@1$NSLayoutRelationLessThanOrEqual@-1$NSLayoutRightToLeft@1$NSLeftArrowFunctionKey@63234$NSLeftMouseDown@1$NSLeftMouseDownMask@2$NSLeftMouseDragged@6$NSLeftMouseDraggedMask@64$NSLeftMouseUp@2$NSLeftMouseUpMask@4$NSLeftTabStopType@0$NSLeftTabsBezelBorder@1$NSLeftTextAlignment@0$NSLeftTextMovement@19$NSLevelIndicatorPlaceholderVisibilityAlways@1$NSLevelIndicatorPlaceholderVisibilityAutomatic@0$NSLevelIndicatorPlaceholderVisibilityWhileEditing@2$NSLevelIndicatorStyleContinuousCapacity@1$NSLevelIndicatorStyleDiscreteCapacity@2$NSLevelIndicatorStyleRating@3$NSLevelIndicatorStyleRelevancy@0$NSLighterFontAction@6$NSLineBorder@1$NSLineBreakByCharWrapping@1$NSLineBreakByClipping@2$NSLineBreakByTruncatingHead@3$NSLineBreakByTruncatingMiddle@5$NSLineBreakByTruncatingTail@4$NSLineBreakByWordWrapping@0$NSLineCapStyleButt@0$NSLineCapStyleRound@1$NSLineCapStyleSquare@2$NSLineDoesntMove@0$NSLineJoinStyleBevel@2$NSLineJoinStyleMiter@0$NSLineJoinStyleRound@1$NSLineMovesDown@3$NSLineMovesLeft@1$NSLineMovesRight@2$NSLineMovesUp@4$NSLineSeparatorCharacter@8232$NSLineSweepDown@2$NSLineSweepLeft@0$NSLineSweepRight@1$NSLineSweepUp@3$NSLineToBezierPathElement@1$NSLinearSlider@0$NSListModeMatrix@2$NSMacintoshInterfaceStyle@3$NSMainMenuWindowLevel@24$NSMediaLibraryAudio@1$NSMediaLibraryImage@2$NSMediaLibraryMovie@4$NSMenuFunctionKey@63285$NSMenuPropertyItemAccessibilityDescription@32$NSMenuPropertyItemAttributedTitle@2$NSMenuPropertyItemEnabled@16$NSMenuPropertyItemImage@8$NSMenuPropertyItemKeyEquivalent@4$NSMenuPropertyItemTitle@1$NSMiniControlSize@2$NSMiniaturizableWindowMask@4$NSMiterLineJoinStyle@0$NSMixedState@-1$NSModalPanelWindowLevel@8$NSModalResponseAbort@-1001$NSModalResponseCancel@0$NSModalResponseContinue@-1002$NSModalResponseOK@1$NSModalResponseStop@-1000$NSModeSwitchFunctionKey@63303$NSMomentaryChangeButton@5$NSMomentaryLight@7$NSMomentaryLightButton@0$NSMomentaryPushButton@0$NSMomentaryPushInButton@7$NSMouseEntered@8$NSMouseEnteredMask@256$NSMouseEventSubtype@0$NSMouseExited@9$NSMouseExitedMask@512$NSMouseMoved@5$NSMouseMovedMask@32$NSMoveToBezierPathElement@0$NSMultiLevelAcceleratorButton@9$NSNarrowFontMask@16$NSNativeShortGlyphPacking@5$NSNaturalTextAlignment@4$NSNewlineCharacter@10$NSNextFunctionKey@63296$NSNextStepInterfaceStyle@1$NSNoBorder@0$NSNoCellMask@0$NSNoFontChangeAction@0$NSNoImage@0$NSNoInterfaceStyle@0$NSNoModeColorPanel@-1$NSNoScrollerParts@0$NSNoTabsBezelBorder@4$NSNoTabsLineBorder@5$NSNoTabsNoBorder@6$NSNoTitle@0$NSNoUnderlineStyle@0$NSNonStandardCharacterSetFontMask@8$NSNonZeroWindingRule@0$NSNonactivatingPanelMask@128$NSNormalWindowLevel@0$NSNullCellType@0$NSNullGlyph@0$NSNumericPadKeyMask@2097152$NSOKButton@1$NSOPENGL_CURRENT_VERSION@1$NSOffState@0$NSOnOffButton@6$NSOnState@1$NSOneByteGlyphPacking@0$NSOnlyScrollerArrows@1$NSOpenGLCPCurrentRendererID@309$NSOpenGLCPGPUFragmentProcessing@311$NSOpenGLCPGPUVertexProcessing@310$NSOpenGLCPHasDrawable@314$NSOpenGLCPMPSwapsInFlight@315$NSOpenGLCPRasterizationEnable@221$NSOpenGLCPReclaimResources@308$NSOpenGLCPStateValidation@301$NSOpenGLCPSurfaceBackingSize@304$NSOpenGLCPSurfaceOpacity@236$NSOpenGLCPSurfaceOrder@235$NSOpenGLCPSurfaceSurfaceVolatile@306$NSOpenGLCPSwapInterval@222$NSOpenGLCPSwapRectangle@200$NSOpenGLCPSwapRectangleEnable@201$NSOpenGLContextParameterCurrentRendererID@309$NSOpenGLContextParameterGPUFragmentProcessing@311$NSOpenGLContextParameterGPUVertexProcessing@310$NSOpenGLContextParameterHasDrawable@314$NSOpenGLContextParameterMPSwapsInFlight@315$NSOpenGLContextParameterRasterizationEnable@221$NSOpenGLContextParameterReclaimResources@308$NSOpenGLContextParameterStateValidation@301$NSOpenGLContextParameterSurfaceBackingSize@304$NSOpenGLContextParameterSurfaceOpacity@236$NSOpenGLContextParameterSurfaceOrder@235$NSOpenGLContextParameterSurfaceSurfaceVolatile@306$NSOpenGLContextParameterSwapInterval@222$NSOpenGLContextParameterSwapRectangle@200$NSOpenGLContextParameterSwapRectangleEnable@201$NSOpenGLGOClearFormatCache@502$NSOpenGLGOFormatCacheSize@501$NSOpenGLGOResetLibrary@504$NSOpenGLGORetainRenderers@503$NSOpenGLGOUseBuildCache@506$NSOpenGLPFAAccelerated@73$NSOpenGLPFAAcceleratedCompute@97$NSOpenGLPFAAccumSize@14$NSOpenGLPFAAllRenderers@1$NSOpenGLPFAAllowOfflineRenderers@96$NSOpenGLPFAAlphaSize@11$NSOpenGLPFAAuxBuffers@7$NSOpenGLPFAAuxDepthStencil@57$NSOpenGLPFABackingStore@76$NSOpenGLPFAClosestPolicy@74$NSOpenGLPFAColorFloat@58$NSOpenGLPFAColorSize@8$NSOpenGLPFACompliant@83$NSOpenGLPFADepthSize@12$NSOpenGLPFADoubleBuffer@5$NSOpenGLPFAFullScreen@54$NSOpenGLPFAMPSafe@78$NSOpenGLPFAMaximumPolicy@52$NSOpenGLPFAMinimumPolicy@51$NSOpenGLPFAMultiScreen@81$NSOpenGLPFAMultisample@59$NSOpenGLPFANoRecovery@72$NSOpenGLPFAOffScreen@53$NSOpenGLPFAOpenGLProfile@99$NSOpenGLPFAPixelBuffer@90$NSOpenGLPFARemotePixelBuffer@91$NSOpenGLPFARendererID@70$NSOpenGLPFARobust@75$NSOpenGLPFASampleAlpha@61$NSOpenGLPFASampleBuffers@55$NSOpenGLPFASamples@56$NSOpenGLPFAScreenMask@84$NSOpenGLPFASingleRenderer@71$NSOpenGLPFAStencilSize@13$NSOpenGLPFAStereo@6$NSOpenGLPFASupersample@60$NSOpenGLPFATripleBuffer@3$NSOpenGLPFAVirtualScreenCount@128$NSOpenGLPFAWindow@80$NSOpenGLProfileVersion3_2Core@12800$NSOpenGLProfileVersion4_1Core@16640$NSOpenGLProfileVersionLegacy@4096$NSOtherMouseDown@25$NSOtherMouseDownMask@33554432$NSOtherMouseDragged@27$NSOtherMouseDraggedMask@134217728$NSOtherMouseUp@26$NSOtherMouseUpMask@67108864$NSOtherTextMovement@0$NSOutlineViewDropOnItemIndex@-1$NSPDFPanelRequestsParentDirectory@16777216$NSPDFPanelShowsOrientation@8$NSPDFPanelShowsPaperSize@4$NSPNGFileType@4$NSPageControllerTransitionStyleHorizontalStrip@2$NSPageControllerTransitionStyleStackBook@1$NSPageControllerTransitionStyleStackHistory@0$NSPageDownFunctionKey@63277$NSPageUpFunctionKey@63276$NSPaperOrientationLandscape@1$NSPaperOrientationPortrait@0$NSParagraphSeparatorCharacter@8233$NSPasteboardContentsCurrentHostOnly@1$NSPasteboardReadingAsData@0$NSPasteboardReadingAsKeyedArchive@4$NSPasteboardReadingAsPropertyList@2$NSPasteboardReadingAsString@1$NSPasteboardWritingPromised@512$NSPathStyleNavigationBar@1$NSPathStylePopUp@2$NSPathStyleStandard@0$NSPatternColorSpaceModel@6$NSPauseFunctionKey@63280$NSPenLowerSideMask@2$NSPenPointingDevice@1$NSPenTipMask@1$NSPenUpperSideMask@4$NSPeriodic@16$NSPeriodicMask@65536$NSPlainTextTokenStyle@1$NSPointingDeviceTypeCursor@2$NSPointingDeviceTypeEraser@3$NSPointingDeviceTypePen@1$NSPointingDeviceTypeUnknown@0$NSPopUpArrowAtBottom@2$NSPopUpArrowAtCenter@1$NSPopUpMenuWindowLevel@101$NSPopUpNoArrow@0$NSPopoverAppearanceHUD@1$NSPopoverAppearanceMinimal@0$NSPopoverBehaviorApplicationDefined@0$NSPopoverBehaviorSemitransient@2$NSPopoverBehaviorTransient@1$NSPortraitOrientation@0$NSPositiveDoubleType@7$NSPositiveFloatType@4$NSPositiveIntType@2$NSPosterFontMask@256$NSPowerOffEventType@1$NSPressedTab@2$NSPressureBehaviorPrimaryAccelerator@3$NSPressureBehaviorPrimaryClick@1$NSPressureBehaviorPrimaryDeepClick@5$NSPressureBehaviorPrimaryDeepDrag@6$NSPressureBehaviorPrimaryDefault@0$NSPressureBehaviorPrimaryGeneric@2$NSPressureBehaviorUnknown@-1$NSPrevFunctionKey@63295$NSPrintFunctionKey@63288$NSPrintPanelShowsCopies@1$NSPrintPanelShowsOrientation@8$NSPrintPanelShowsPageRange@2$NSPrintPanelShowsPageSetupAccessory@256$NSPrintPanelShowsPaperSize@4$NSPrintPanelShowsPreview@131072$NSPrintPanelShowsPrintSelection@32$NSPrintPanelShowsScaling@16$NSPrintRenderingQualityBest@0$NSPrintRenderingQualityResponsive@1$NSPrintScreenFunctionKey@63278$NSPrinterTableError@2$NSPrinterTableNotFound@1$NSPrinterTableOK@0$NSPrintingCancelled@0$NSPrintingFailure@3$NSPrintingPaginationModeAutomatic@0$NSPrintingPaginationModeClip@2$NSPrintingPaginationModeFit@1$NSPrintingReplyLater@2$NSPrintingSuccess@1$NSProgressIndicatorBarStyle@0$NSProgressIndicatorPreferredAquaThickness@12$NSProgressIndicatorPreferredLargeThickness@18$NSProgressIndicatorPreferredSmallThickness@10$NSProgressIndicatorPreferredThickness@14$NSProgressIndicatorSpinningStyle@1$NSProgressIndicatorStyleBar@0$NSProgressIndicatorStyleSpinning@1$NSPushInCell@2$NSPushInCellMask@2$NSPushOnPushOffButton@1$NSQTMovieLoopingBackAndForthPlayback@2$NSQTMovieLoopingPlayback@1$NSQTMovieNormalPlayback@0$NSRGBColorSpaceModel@1$NSRGBModeColorPanel@1$NSRadioButton@4$NSRadioModeMatrix@0$NSRangeDateMode@1$NSRatingLevelIndicatorStyle@3$NSRecessedBezelStyle@13$NSRedoFunctionKey@63300$NSRegularControlSize@0$NSRegularSquareBezelStyle@2$NSRelevancyLevelIndicatorStyle@0$NSRemoteNotificationTypeAlert@4$NSRemoteNotificationTypeBadge@1$NSRemoteNotificationTypeNone@0$NSRemoteNotificationTypeSound@2$NSRemoveTraitFontAction@7$NSResetCursorRectsRunLoopOrdering@700000$NSResetFunctionKey@63283$NSResizableWindowMask@8$NSReturnTextMovement@16$NSRightArrowFunctionKey@63235$NSRightMouseDown@3$NSRightMouseDownMask@8$NSRightMouseDragged@7$NSRightMouseDraggedMask@128$NSRightMouseUp@4$NSRightMouseUpMask@16$NSRightTabStopType@1$NSRightTabsBezelBorder@3$NSRightTextAlignment@1$NSRightTextMovement@20$NSRoundLineCapStyle@1$NSRoundLineJoinStyle@1$NSRoundRectBezelStyle@12$NSRoundedBezelStyle@1$NSRoundedDisclosureBezelStyle@14$NSRoundedTokenStyle@2$NSRuleEditorNestingModeCompound@2$NSRuleEditorNestingModeList@1$NSRuleEditorNestingModeSimple@3$NSRuleEditorNestingModeSingle@0$NSRuleEditorRowTypeCompound@1$NSRuleEditorRowTypeSimple@0$NSRunAbortedResponse@-1001$NSRunContinuesResponse@-1002$NSRunStoppedResponse@-1000$NSSaveAsOperation@1$NSSaveOperation@0$NSSaveToOperation@2$NSScaleNone@2$NSScaleProportionally@0$NSScaleToFit@1$NSScreenChangedEventType@8$NSScreenSaverWindowLevel@1000$NSScrollElasticityAllowed@2$NSScrollElasticityAutomatic@0$NSScrollElasticityNone@1$NSScrollLockFunctionKey@63279$NSScrollViewFindBarPositionAboveContent@1$NSScrollViewFindBarPositionAboveHorizontalRuler@0$NSScrollViewFindBarPositionBelowContent@2$NSScrollWheel@22$NSScrollWheelMask@4194304$NSScrollerArrowsDefaultSetting@0$NSScrollerArrowsMaxEnd@0$NSScrollerArrowsMinEnd@1$NSScrollerArrowsNone@2$NSScrollerDecrementArrow@1$NSScrollerDecrementLine@4$NSScrollerDecrementPage@1$NSScrollerIncrementArrow@0$NSScrollerIncrementLine@5$NSScrollerIncrementPage@3$NSScrollerKnob@2$NSScrollerKnobSlot@6$NSScrollerKnobStyleDark@1$NSScrollerKnobStyleDefault@0$NSScrollerKnobStyleLight@2$NSScrollerNoPart@0$NSScrollerStyleLegacy@0$NSScrollerStyleOverlay@1$NSScrubberAlignmentCenter@3$NSScrubberAlignmentLeading@1$NSScrubberAlignmentNone@0$NSScrubberAlignmentTrailing@2$NSScrubberModeFixed@0$NSScrubberModeFree@1$NSSearchFieldClearRecentsMenuItemTag@1002$NSSearchFieldNoRecentsMenuItemTag@1003$NSSearchFieldRecentsMenuItemTag@1001$NSSearchFieldRecentsTitleMenuItemTag@1000$NSSegmentDistributionFill@1$NSSegmentDistributionFillEqually@2$NSSegmentDistributionFillProportionally@3$NSSegmentDistributionFit@0$NSSegmentStyleAutomatic@0$NSSegmentStyleCapsule@5$NSSegmentStyleRoundRect@3$NSSegmentStyleRounded@1$NSSegmentStyleSeparated@8$NSSegmentStyleSmallSquare@6$NSSegmentStyleTexturedRounded@2$NSSegmentStyleTexturedSquare@4$NSSegmentSwitchTrackingMomentary@2$NSSegmentSwitchTrackingMomentaryAccelerator@3$NSSegmentSwitchTrackingSelectAny@1$NSSegmentSwitchTrackingSelectOne@0$NSSelectByCharacter@0$NSSelectByParagraph@2$NSSelectByWord@1$NSSelectFunctionKey@63297$NSSelectedTab@0$NSSelectingNext@1$NSSelectingPrevious@2$NSSelectionAffinityDownstream@1$NSSelectionAffinityUpstream@0$NSServiceApplicationLaunchFailedError@66561$NSServiceApplicationNotFoundError@66560$NSServiceErrorMaximum@66817$NSServiceErrorMinimum@66560$NSServiceInvalidPasteboardDataError@66563$NSServiceMalformedServiceDictionaryError@66564$NSServiceMiscellaneousError@66800$NSServiceRequestTimedOutError@66562$NSShadowlessSquareBezelStyle@6$NSSharingContentScopeFull@2$NSSharingContentScopeItem@0$NSSharingContentScopePartial@1$NSSharingServiceErrorMaximum@67327$NSSharingServiceErrorMinimum@67072$NSSharingServiceNotConfiguredError@67072$NSShiftKeyMask@131072$NSShowControlGlyphs@1$NSShowInvisibleGlyphs@2$NSSingleDateMode@0$NSSingleUnderlineStyle@1$NSSizeDownFontAction@4$NSSizeUpFontAction@3$NSSliderTypeCircular@1$NSSliderTypeLinear@0$NSSmallCapsFontMask@128$NSSmallControlSize@1$NSSmallIconButtonBezelStyle@2$NSSmallSquareBezelStyle@10$NSSpecialPageOrder@0$NSSpeechImmediateBoundary@0$NSSpeechSentenceBoundary@2$NSSpeechWordBoundary@1$NSSpellingStateGrammarFlag@2$NSSpellingStateSpellingFlag@1$NSSplitViewDividerStylePaneSplitter@3$NSSplitViewDividerStyleThick@1$NSSplitViewDividerStyleThin@2$NSSplitViewItemBehaviorContentList@2$NSSplitViewItemBehaviorDefault@0$NSSplitViewItemBehaviorSidebar@1$NSSplitViewItemCollapseBehaviorDefault@0$NSSplitViewItemCollapseBehaviorPreferResizingSiblingsWithFixedSplitView@2$NSSplitViewItemCollapseBehaviorPreferResizingSplitViewWithFixedSiblings@1$NSSplitViewItemCollapseBehaviorUseConstraints@3$NSSpringLoadingContinuousActivation@2$NSSpringLoadingDisabled@0$NSSpringLoadingEnabled@1$NSSpringLoadingHighlightEmphasized@2$NSSpringLoadingHighlightNone@0$NSSpringLoadingHighlightStandard@1$NSSpringLoadingNoHover@4$NSSquareLineCapStyle@2$NSSquareStatusItemLength@-2$NSStackViewDistributionEqualCentering@4$NSStackViewDistributionEqualSpacing@3$NSStackViewDistributionFill@0$NSStackViewDistributionFillEqually@1$NSStackViewDistributionFillProportionally@2$NSStackViewDistributionGravityAreas@-1$NSStackViewGravityBottom@3$NSStackViewGravityCenter@2$NSStackViewGravityLeading@1$NSStackViewGravityTop@1$NSStackViewGravityTrailing@3$NSStackViewVisibilityPriorityDetachOnlyIfNecessary@900$NSStackViewVisibilityPriorityMustHold@1000$NSStackViewVisibilityPriorityNotVisible@0$NSStatusItemBehaviorRemovalAllowed@2$NSStatusItemBehaviorTerminationOnRemoval@4$NSStatusWindowLevel@25$NSStopFunctionKey@63284$NSStringDrawingDisableScreenFontSubstitution@4$NSStringDrawingOneShot@16$NSStringDrawingTruncatesLastVisibleLine@32$NSStringDrawingUsesDeviceMetrics@8$NSStringDrawingUsesFontLeading@2$NSStringDrawingUsesLineFragmentOrigin@1$NSSubmenuWindowLevel@3$NSSwitchButton@3$NSSysReqFunctionKey@63281$NSSystemDefined@14$NSSystemDefinedMask@16384$NSSystemFunctionKey@63287$NSTIFFCompressionCCITTFAX3@3$NSTIFFCompressionCCITTFAX4@4$NSTIFFCompressionJPEG@6$NSTIFFCompressionLZW@5$NSTIFFCompressionNEXT@32766$NSTIFFCompressionNone@1$NSTIFFCompressionOldJPEG@32865$NSTIFFCompressionPackBits@32773$NSTIFFFileType@0$NSTabCharacter@9$NSTabPositionBottom@3$NSTabPositionLeft@2$NSTabPositionNone@0$NSTabPositionRight@4$NSTabPositionTop@1$NSTabTextMovement@17$NSTabViewBorderTypeBezel@2$NSTabViewBorderTypeLine@1$NSTabViewBorderTypeNone@0$NSTabViewControllerTabStyleSegmentedControlOnBottom@1$NSTabViewControllerTabStyleSegmentedControlOnTop@0$NSTabViewControllerTabStyleToolbar@2$NSTabViewControllerTabStyleUnspecified@-1$NSTableColumnAutoresizingMask@1$NSTableColumnNoResizing@0$NSTableColumnUserResizingMask@2$NSTableRowActionEdgeLeading@0$NSTableRowActionEdgeTrailing@1$NSTableViewAnimationEffectFade@1$NSTableViewAnimationEffectGap@2$NSTableViewAnimationEffectNone@0$NSTableViewAnimationSlideDown@32$NSTableViewAnimationSlideLeft@48$NSTableViewAnimationSlideRight@64$NSTableViewAnimationSlideUp@16$NSTableViewDashedHorizontalGridLineMask@8$NSTableViewDraggingDestinationFeedbackStyleGap@2$NSTableViewDraggingDestinationFeedbackStyleNone@-1$NSTableViewDraggingDestinationFeedbackStyleRegular@0$NSTableViewDraggingDestinationFeedbackStyleSourceList@1$NSTableViewDropAbove@1$NSTableViewDropOn@0$NSTableViewFirstColumnOnlyAutoresizingStyle@5$NSTableViewGridNone@0$NSTableViewLastColumnOnlyAutoresizingStyle@4$NSTableViewNoColumnAutoresizing@0$NSTableViewReverseSequentialColumnAutoresizingStyle@3$NSTableViewRowActionStyleDestructive@1$NSTableViewRowActionStyleRegular@0$NSTableViewRowSizeStyleCustom@0$NSTableViewRowSizeStyleDefault@-1$NSTableViewRowSizeStyleLarge@3$NSTableViewRowSizeStyleMedium@2$NSTableViewRowSizeStyleSmall@1$NSTableViewSelectionHighlightStyleNone@-1$NSTableViewSelectionHighlightStyleRegular@0$NSTableViewSelectionHighlightStyleSourceList@1$NSTableViewSequentialColumnAutoresizingStyle@2$NSTableViewSolidHorizontalGridLineMask@2$NSTableViewSolidVerticalGridLineMask@1$NSTableViewUniformColumnAutoresizingStyle@1$NSTabletPoint@23$NSTabletPointEventSubtype@1$NSTabletPointMask@8388608$NSTabletProximity@24$NSTabletProximityEventSubtype@2$NSTabletProximityMask@16777216$NSTerminateCancel@0$NSTerminateLater@2$NSTerminateNow@1$NSTextAlignmentCenter@2$NSTextAlignmentJustified@3$NSTextAlignmentLeft@0$NSTextAlignmentNatural@4$NSTextAlignmentRight@1$NSTextBlockAbsoluteValueType@0$NSTextBlockBaselineAlignment@3$NSTextBlockBorder@0$NSTextBlockBottomAlignment@2$NSTextBlockHeight@4$NSTextBlockMargin@1$NSTextBlockMaximumHeight@6$NSTextBlockMaximumWidth@2$NSTextBlockMiddleAlignment@1$NSTextBlockMinimumHeight@5$NSTextBlockMinimumWidth@1$NSTextBlockPadding@-1$NSTextBlockPercentageValueType@1$NSTextBlockTopAlignment@0$NSTextBlockWidth@0$NSTextCellType@1$NSTextFieldAndStepperDatePickerStyle@0$NSTextFieldDatePickerStyle@2$NSTextFieldRoundedBezel@1$NSTextFieldSquareBezel@0$NSTextFinderActionHideFindInterface@11$NSTextFinderActionHideReplaceInterface@13$NSTextFinderActionNextMatch@2$NSTextFinderActionPreviousMatch@3$NSTextFinderActionReplace@5$NSTextFinderActionReplaceAll@4$NSTextFinderActionReplaceAllInSelection@8$NSTextFinderActionReplaceAndFind@6$NSTextFinderActionSelectAll@9$NSTextFinderActionSelectAllInSelection@10$NSTextFinderActionSetSearchString@7$NSTextFinderActionShowFindInterface@1$NSTextFinderActionShowReplaceInterface@12$NSTextFinderMatchingTypeContains@0$NSTextFinderMatchingTypeEndsWith@3$NSTextFinderMatchingTypeFullWord@2$NSTextFinderMatchingTypeStartsWith@1$NSTextLayoutOrientationHorizontal@0$NSTextLayoutOrientationVertical@1$NSTextListPrependEnclosingMarker@1$NSTextMovementBacktab@18$NSTextMovementCancel@23$NSTextMovementDown@22$NSTextMovementLeft@19$NSTextMovementOther@0$NSTextMovementReturn@16$NSTextMovementRight@20$NSTextMovementTab@17$NSTextMovementUp@21$NSTextReadInapplicableDocumentTypeError@65806$NSTextReadWriteErrorMaximum@66303$NSTextReadWriteErrorMinimum@65792$NSTextStorageEditedAttributes@1$NSTextStorageEditedCharacters@2$NSTextTableAutomaticLayoutAlgorithm@0$NSTextTableFixedLayoutAlgorithm@1$NSTextWriteInapplicableDocumentTypeError@66062$NSTextWritingDirectionEmbedding@0$NSTextWritingDirectionOverride@2$NSTexturedBackgroundWindowMask@256$NSTexturedRoundedBezelStyle@11$NSTexturedSquareBezelStyle@8$NSThickSquareBezelStyle@3$NSThickerSquareBezelStyle@4$NSTickMarkAbove@1$NSTickMarkBelow@0$NSTickMarkLeft@1$NSTickMarkPositionAbove@1$NSTickMarkPositionBelow@0$NSTickMarkPositionLeading@1$NSTickMarkPositionTrailing@0$NSTickMarkRight@0$NSTimeZoneDatePickerElementFlag@16$NSTitledWindowMask@1$NSToggleButton@2$NSTokenStyleDefault@0$NSTokenStyleNone@1$NSTokenStylePlainSquared@4$NSTokenStyleRounded@2$NSTokenStyleSquared@3$NSToolbarDisplayModeDefault@0$NSToolbarDisplayModeIconAndLabel@1$NSToolbarDisplayModeIconOnly@2$NSToolbarDisplayModeLabelOnly@3$NSToolbarItemVisibilityPriorityHigh@1000$NSToolbarItemVisibilityPriorityLow@-1000$NSToolbarItemVisibilityPriorityStandard@0$NSToolbarItemVisibilityPriorityUser@2000$NSToolbarSizeModeDefault@0$NSToolbarSizeModeRegular@1$NSToolbarSizeModeSmall@2$NSTopTabsBezelBorder@0$NSTornOffMenuWindowLevel@3$NSTouchBarItemPriorityHigh@1000.0$NSTouchBarItemPriorityLow@-1000.0$NSTouchBarItemPriorityNormal@0.0$NSTouchEventSubtype@3$NSTouchPhaseBegan@1$NSTouchPhaseCancelled@16$NSTouchPhaseEnded@8$NSTouchPhaseMoved@2$NSTouchPhaseStationary@4$NSTouchPhaseTouching@7$NSTouchTypeDirect@0$NSTouchTypeIndirect@1$NSTouchTypeMaskDirect@1$NSTouchTypeMaskIndirect@2$NSTrackModeMatrix@3$NSTrackingActiveAlways@128$NSTrackingActiveInActiveApp@64$NSTrackingActiveInKeyWindow@32$NSTrackingActiveWhenFirstResponder@16$NSTrackingAssumeInside@256$NSTrackingCursorUpdate@4$NSTrackingEnabledDuringMouseDrag@1024$NSTrackingInVisibleRect@512$NSTrackingMouseEnteredAndExited@1$NSTrackingMouseMoved@2$NSTwoByteGlyphPacking@3$NSTypesetterBehavior_10_2@2$NSTypesetterBehavior_10_2_WithCompatibility@1$NSTypesetterBehavior_10_3@3$NSTypesetterBehavior_10_4@4$NSTypesetterContainerBreakAction@32$NSTypesetterHorizontalTabAction@4$NSTypesetterLatestBehavior@-1$NSTypesetterLineBreakAction@8$NSTypesetterOriginalBehavior@0$NSTypesetterParagraphBreakAction@16$NSTypesetterWhitespaceAction@2$NSTypesetterZeroAdvancementAction@1$NSUnboldFontMask@4$NSUnderlinePatternDash@512$NSUnderlinePatternDashDot@768$NSUnderlinePatternDashDotDot@1024$NSUnderlinePatternDot@256$NSUnderlinePatternSolid@0$NSUnderlineStyleByWord@32768$NSUnderlineStyleDouble@9$NSUnderlineStyleNone@0$NSUnderlineStylePatternDash@512$NSUnderlineStylePatternDashDot@768$NSUnderlineStylePatternDashDotDot@1024$NSUnderlineStylePatternDot@256$NSUnderlineStylePatternSolid@0$NSUnderlineStyleSingle@1$NSUnderlineStyleThick@2$NSUndoFunctionKey@63299$NSUnifiedTitleAndToolbarWindowMask@4096$NSUnitalicFontMask@16777216$NSUnknownColorSpaceModel@-1$NSUnknownPageOrder@2$NSUnknownPointingDevice@0$NSUnscaledWindowMask@2048$NSUpArrowFunctionKey@63232$NSUpTextMovement@21$NSUpdateWindowsRunLoopOrdering@500000$NSUserFunctionKey@63286$NSUserInterfaceLayoutDirectionLeftToRight@0$NSUserInterfaceLayoutDirectionRightToLeft@1$NSUserInterfaceLayoutOrientationHorizontal@0$NSUserInterfaceLayoutOrientationVertical@1$NSUtilityWindowMask@16$NSVariableStatusItemLength@-1$NSVerticalRuler@1$NSViaPanelFontAction@1$NSViewControllerTransitionAllowUserInteraction@4096$NSViewControllerTransitionCrossfade@1$NSViewControllerTransitionNone@0$NSViewControllerTransitionSlideBackward@384$NSViewControllerTransitionSlideDown@32$NSViewControllerTransitionSlideForward@320$NSViewControllerTransitionSlideLeft@64$NSViewControllerTransitionSlideRight@128$NSViewControllerTransitionSlideUp@16$NSViewHeightSizable@16$NSViewLayerContentsPlacementBottom@8$NSViewLayerContentsPlacementBottomLeft@9$NSViewLayerContentsPlacementBottomRight@7$NSViewLayerContentsPlacementCenter@3$NSViewLayerContentsPlacementLeft@10$NSViewLayerContentsPlacementRight@6$NSViewLayerContentsPlacementScaleAxesIndependently@0$NSViewLayerContentsPlacementScaleProportionallyToFill@2$NSViewLayerContentsPlacementScaleProportionallyToFit@1$NSViewLayerContentsPlacementTop@4$NSViewLayerContentsPlacementTopLeft@11$NSViewLayerContentsPlacementTopRight@5$NSViewLayerContentsRedrawBeforeViewResize@3$NSViewLayerContentsRedrawCrossfade@4$NSViewLayerContentsRedrawDuringViewResize@2$NSViewLayerContentsRedrawNever@0$NSViewLayerContentsRedrawOnSetNeedsDisplay@1$NSViewMaxXMargin@4$NSViewMaxYMargin@32$NSViewMinXMargin@1$NSViewMinYMargin@8$NSViewNotSizable@0$NSViewWidthSizable@2$NSVisualEffectBlendingModeBehindWindow@0$NSVisualEffectBlendingModeWithinWindow@1$NSVisualEffectMaterialAppearanceBased@0$NSVisualEffectMaterialContentBackground@18$NSVisualEffectMaterialDark@2$NSVisualEffectMaterialFullScreenUI@15$NSVisualEffectMaterialHUDWindow@13$NSVisualEffectMaterialHeaderView@10$NSVisualEffectMaterialLight@1$NSVisualEffectMaterialMediumLight@8$NSVisualEffectMaterialMenu@5$NSVisualEffectMaterialPopover@6$NSVisualEffectMaterialSelection@4$NSVisualEffectMaterialSheet@11$NSVisualEffectMaterialSidebar@7$NSVisualEffectMaterialTitlebar@3$NSVisualEffectMaterialToolTip@17$NSVisualEffectMaterialUltraDark@9$NSVisualEffectMaterialUnderPageBackground@22$NSVisualEffectMaterialUnderWindowBackground@21$NSVisualEffectMaterialWindowBackground@12$NSVisualEffectStateActive@1$NSVisualEffectStateFollowsWindowActiveState@0$NSVisualEffectStateInactive@2$NSWantsBidiLevels@4$NSWarningAlertStyle@0$NSWheelModeColorPanel@6$NSWindingRuleEvenOdd@1$NSWindingRuleNonZero@0$NSWindowAbove@1$NSWindowAnimationBehaviorAlertPanel@5$NSWindowAnimationBehaviorDefault@0$NSWindowAnimationBehaviorDocumentWindow@3$NSWindowAnimationBehaviorNone@2$NSWindowAnimationBehaviorUtilityWindow@4$NSWindowBackingLocationDefault@0$NSWindowBackingLocationMainMemory@2$NSWindowBackingLocationVideoMemory@1$NSWindowBelow@-1$NSWindowCloseButton@0$NSWindowCollectionBehaviorCanJoinAllSpaces@1$NSWindowCollectionBehaviorDefault@0$NSWindowCollectionBehaviorFullScreenAllowsTiling@2048$NSWindowCollectionBehaviorFullScreenAuxiliary@256$NSWindowCollectionBehaviorFullScreenDisallowsTiling@4096$NSWindowCollectionBehaviorFullScreenNone@512$NSWindowCollectionBehaviorFullScreenPrimary@128$NSWindowCollectionBehaviorIgnoresCycle@64$NSWindowCollectionBehaviorManaged@4$NSWindowCollectionBehaviorMoveToActiveSpace@2$NSWindowCollectionBehaviorParticipatesInCycle@32$NSWindowCollectionBehaviorStationary@16$NSWindowCollectionBehaviorTransient@8$NSWindowDepthOnehundredtwentyeightBitRGB@544$NSWindowDepthSixtyfourBitRGB@528$NSWindowDepthTwentyfourBitRGB@520$NSWindowDocumentIconButton@4$NSWindowDocumentVersionsButton@6$NSWindowExposedEventType@0$NSWindowFullScreenButton@7$NSWindowListOrderedFrontToBack@1$NSWindowMiniaturizeButton@1$NSWindowMovedEventType@4$NSWindowNumberListAllApplications@1$NSWindowNumberListAllSpaces@16$NSWindowOcclusionStateVisible@2$NSWindowOut@0$NSWindowSharingNone@0$NSWindowSharingReadOnly@1$NSWindowSharingReadWrite@2$NSWindowStyleMaskBorderless@0$NSWindowStyleMaskClosable@2$NSWindowStyleMaskDocModalWindow@64$NSWindowStyleMaskFullScreen@16384$NSWindowStyleMaskFullSizeContentView@32768$NSWindowStyleMaskHUDWindow@8192$NSWindowStyleMaskMiniaturizable@4$NSWindowStyleMaskNonactivatingPanel@128$NSWindowStyleMaskResizable@8$NSWindowStyleMaskTexturedBackground@256$NSWindowStyleMaskTitled@1$NSWindowStyleMaskUnifiedTitleAndToolbar@4096$NSWindowStyleMaskUtilityWindow@16$NSWindowTabbingModeAutomatic@0$NSWindowTabbingModeDisallowed@2$NSWindowTabbingModePreferred@1$NSWindowTitleHidden@1$NSWindowTitleVisible@0$NSWindowToolbarButton@3$NSWindowUserTabbingPreferenceAlways@1$NSWindowUserTabbingPreferenceInFullScreen@2$NSWindowUserTabbingPreferenceManual@0$NSWindowZoomButton@2$NSWindows95InterfaceStyle@2$NSWorkspaceAuthorizationInvalidError@67328$NSWorkspaceAuthorizationTypeCreateSymbolicLink@0$NSWorkspaceAuthorizationTypeReplaceFile@2$NSWorkspaceAuthorizationTypeSetAttributes@1$NSWorkspaceErrorMaximum@67455$NSWorkspaceErrorMinimum@67328$NSWorkspaceLaunchAllowingClassicStartup@131072$NSWorkspaceLaunchAndHide@1048576$NSWorkspaceLaunchAndHideOthers@2097152$NSWorkspaceLaunchAndPrint@2$NSWorkspaceLaunchAsync@65536$NSWorkspaceLaunchDefault@65536$NSWorkspaceLaunchInhibitingBackgroundOnly@128$NSWorkspaceLaunchNewInstance@524288$NSWorkspaceLaunchPreferringClassic@262144$NSWorkspaceLaunchWithErrorPresentation@64$NSWorkspaceLaunchWithoutActivation@512$NSWorkspaceLaunchWithoutAddingToRecents@256$NSWritingDirectionEmbedding@0$NSWritingDirectionLeftToRight@0$NSWritingDirectionNatural@-1$NSWritingDirectionOverride@2$NSWritingDirectionRightToLeft@1$NSYearMonthDatePickerElementFlag@192$NSYearMonthDayDatePickerElementFlag@224$NumGlyphsToGetEachTime@20$''' -misc.update({'NSAnyEventMask': sel32or64(4294967295, 18446744073709551615), 'NSTouchPhaseAny': sel32or64(4294967295, 18446744073709551615), 'NS_USER_ACTIVITY_SUPPORTED': sel32or64(0, 1), 'NSAttachmentCharacter': b'\xef\xbf\xbc'.decode("utf-8"), 'NSDragOperationEvery': sel32or64(4294967295, 18446744073709551615)}) -misc.update({'NSAppKitVersionNumber10_4_1': 824.1, 'NSAppKitVersionNumber10_4_3': 824.23, 'NSAppKitVersionNumber10_3_9': 743.36, 'NSAppKitVersionNumber10_4_4': 824.33, 'NSAppKitVersionNumber10_4_7': 824.41, 'NSAppKitVersionNumber10_3_2': 743.14, 'NSAppKitVersionNumber10_3_3': 743.2, 'NSAppKitVersionNumber10_3_7': 743.33, 'NSAppKitVersionNumber10_3_5': 743.24, 'NSAppKitVersionNumberWithDockTilePlugInSupport': 1001.0, 'NSAppKitVersionNumberWithCursorSizeSupport': 682.0, 'NSAppKitVersionNumber10_5_2': 949.27, 'NSAppKitVersionNumber10_5_3': 949.33, 'NSAppKitVersionNumberWithCustomSheetPosition': 686.0, 'NSAppKitVersionNumber10_2_3': 663.6, 'NSAppKitVersionNumberWithDirectionalTabs': 631.0, 'NSAppKitVersionNumberWithColumnResizingBrowser': 685.0, 'NSAppKitVersionNumberWithDeferredWindowDisplaySupport': 1019.0, 'NSAppKitVersionNumberWithContinuousScrollingBrowser': 680.0, 'NSAppKitVersionNumberWithPatternColorLeakFix': 641.0, 'NSAppKitVersionNumber10_7_4': 1138.47, 'NSAppKitVersionNumber10_7_2': 1138.23, 'NSAppKitVersionNumber10_7_3': 1138.32, 'NSBaselineNotSet': -1.0}) -functions={'NSRectClipList': (sel32or64(b'v^{_NSRect={_NSPoint=ff}{_NSSize=ff}}i', b'v^{CGRect={CGPoint=dd}{CGSize=dd}}q'), '', {'arguments': {0: {'c_array_length_in_arg': 1, 'type_modifier': 'n'}}}), 'NSApplicationLoad': (b'Z',), 'NSCountWindows': (sel32or64(b'v^i', b'v^q'), '', {'arguments': {0: {'type_modifier': 'o'}}}), 'NSGetAlertPanel': (b'@@@@@@', '', {'arguments': {1: {'printf_format': True}}, 'variadic': True}), 'NSApplicationMain': (b'ii^^c',), 'NSOpenGLGetVersion': (b'v^i^i', '', {'arguments': {0: {'type_modifier': 'o'}, 1: {'type_modifier': 'o'}}}), 'NSAccessibilityActionDescription': (b'@@',), 'NSRunAlertPanelRelativeToWindow': (sel32or64(b'i@@@@@@', b'q@@@@@@'), '', {'arguments': {1: {'printf_format': 1}}, 'variadic': True}), 'NSTouchTypeMaskFromType': (b'Qq', '', {'inline': True}), 'NSDrawLightBezel': (sel32or64(b'v{_NSRect={_NSPoint=ff}{_NSSize=ff}}{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'v{CGRect={CGPoint=dd}{CGSize=dd}}{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'NSDrawNinePartImage': (sel32or64(b'v{_NSRect={_NSPoint=ff}{_NSSize=ff}}@@@@@@@@@IfZ', b'v{CGRect={CGPoint=dd}{CGSize=dd}}@@@@@@@@@QdZ'),), 'NSOpenGLSetOption': (b'vIi',), 'NSRectClip': (sel32or64(b'v{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'v{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'NSDottedFrameRect': (sel32or64(b'v{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'v{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'NSDrawBitmap': (sel32or64(b'v{_NSRect={_NSPoint=ff}{_NSSize=ff}}iiiiiiZZ@[5^C]', b'v{CGRect={CGPoint=dd}{CGSize=dd}}qqqqqqZZ@[5^C]'),), 'NSConvertGlyphsToPackedGlyphs': (sel32or64(b'i^IiI^c', b'q^IqQ^c'),), 'NSGetFileType': (b'@@',), 'NSWindowList': (sel32or64(b'vi^i', b'vq^q'), '', {'arguments': {1: {'c_array_length_in_arg': 0, 'type_modifier': 'o'}}}), 'NSAccessibilityRaiseBadArgumentException': (b'v@@@',), 'NSAccessibilityUnignoredDescendant': (b'@@',), 'NSRectFill': (sel32or64(b'v{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'v{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'NSGetCriticalAlertPanel': (b'@@@@@@', '', {'arguments': {1: {'printf_format': True}}, 'variadic': True}), 'NSAccessibilityFrameInView': (sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}@{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}@{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'NSDrawThreePartImage': (sel32or64(b'v{_NSRect={_NSPoint=ff}{_NSSize=ff}}@@@ZIfZ', b'v{CGRect={CGPoint=dd}{CGSize=dd}}@@@ZQdZ'),), 'NSAccessibilityRoleDescription': (b'@@@',), 'NSRunCriticalAlertPanel': (sel32or64(b'i@@@@@', b'q@@@@@'), '', {'arguments': {1: {'printf_format': 1}}, 'variadic': True}), 'NSFrameRect': (sel32or64(b'v{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'v{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'NSDrawColorTiledRects': (sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}{_NSRect={_NSPoint=ff}{_NSSize=ff}}{_NSRect={_NSPoint=ff}{_NSSize=ff}}^I^@i', b'{CGRect={CGPoint=dd}{CGSize=dd}}{CGRect={CGPoint=dd}{CGSize=dd}}{CGRect={CGPoint=dd}{CGSize=dd}}^Q^@q'), '', {'arguments': {2: {'c_array_length_in_arg': 4, 'type_modifier': 'n'}, 3: {'c_array_length_in_arg': 4, 'type_modifier': 'n'}}}), 'NSBeginCriticalAlertSheet': (b'v@@@@@@::^v@', '', {'arguments': {9: {'printf_format': True}, 6: {'sel_of_type': sel32or64(b'v@:@i^v', b'v@:@q^v')}, 7: {'sel_of_type': sel32or64(b'v@:@i^v', b'v@:@q^v')}}, 'variadic': True}), 'NSBeginAlertSheet': (b'v@@@@@@::^v@', '', {'arguments': {9: {'printf_format': True}, 6: {'sel_of_type': sel32or64(b'v@:@i^v', b'v@:@q^v')}, 7: {'sel_of_type': sel32or64(b'v@:@i^v', b'v@:@q^v')}}, 'variadic': True}), 'NSCountWindowsForContext': (sel32or64(b'vi^i', b'vq^q'), '', {'arguments': {1: {'type_modifier': 'o'}}}), 'NSGetWindowServerMemory': (sel32or64(b'ii^i^i^@', b'qq^q^q^@'), '', {'arguments': {1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}, 3: {'type_modifier': 'o'}}}), 'NSShowAnimationEffect': (sel32or64(b'vI{_NSPoint=ff}{_NSSize=ff}@:^v', b'vQ{CGPoint=dd}{CGSize=dd}@:^v'), '', {'arguments': {4: {'sel_of_type': b'v@:^v'}}}), 'NSRunCriticalAlertPanelRelativeToWindow': (sel32or64(b'i@@@@@@', b'q@@@@@@'), '', {'arguments': {1: {'printf_format': 1}}, 'variadic': True}), 'NSAccessibilityUnignoredChildren': (b'@@',), 'NSRectFillListUsingOperation': (sel32or64(b'v^{_NSRect={_NSPoint=ff}{_NSSize=ff}}iI', b'v^{CGRect={CGPoint=dd}{CGSize=dd}}qQ'), '', {'arguments': {0: {'c_array_length_in_arg': 1, 'type_modifier': 'n'}}}), 'NSCreateFilenamePboardType': (b'@@', '', {'retval': {'already_cfretained': True}}), 'NSInterfaceStyleForKey': (sel32or64(b'I@@', b'Q@@'),), 'NSAvailableWindowDepths': (b'^i', '', {'retval': {'c_array_delimited_by_null': True}}), 'NSBeginInformationalAlertSheet': (b'v@@@@@@::^v@', '', {'arguments': {9: {'printf_format': True}, 6: {'sel_of_type': sel32or64(b'v@:@i^v', b'v@:@q^v')}, 7: {'sel_of_type': sel32or64(b'v@:@i^v', b'v@:@q^v')}}, 'variadic': True}), 'NSUnregisterServicesProvider': (b'v@',), 'NSEventMaskFromType': (sel32or64(b'II', b'QQ'),), 'NSRectFillUsingOperation': (sel32or64(b'v{_NSRect={_NSPoint=ff}{_NSSize=ff}}I', b'v{CGRect={CGPoint=dd}{CGSize=dd}}Q'),), 'NSBitsPerSampleFromDepth': (sel32or64(b'ii', b'qi'),), 'NSEnableScreenUpdates': (b'v',), 'NSDrawDarkBezel': (sel32or64(b'v{_NSRect={_NSPoint=ff}{_NSSize=ff}}{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'v{CGRect={CGPoint=dd}{CGSize=dd}}{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'NSRunInformationalAlertPanelRelativeToWindow': (sel32or64(b'i@@@@@@', b'q@@@@@@'), '', {'arguments': {1: {'printf_format': 1}}, 'variadic': True}), 'NSPerformService': (b'Z@@',), 'NSGetFileTypes': (b'@@',), 'NSDrawWhiteBezel': (sel32or64(b'v{_NSRect={_NSPoint=ff}{_NSSize=ff}}{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'v{CGRect={CGPoint=dd}{CGSize=dd}}{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'NSReleaseAlertPanel': (b'v@',), 'NSAccessibilityUnignoredAncestor': (b'@@',), 'NSAccessibilityPostNotificationWithUserInfo': (b'v@@@',), 'NSSetFocusRingStyle': (sel32or64(b'vI', b'vQ'),), 'NSAccessibilityPostNotification': (b'v@@',), 'NSDrawTiledRects': (sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}{_NSRect={_NSPoint=ff}{_NSSize=ff}}{_NSRect={_NSPoint=ff}{_NSSize=ff}}^I^fi', b'{CGRect={CGPoint=dd}{CGSize=dd}}{CGRect={CGPoint=dd}{CGSize=dd}}{CGRect={CGPoint=dd}{CGSize=dd}}^Q^dq'), '', {'arguments': {2: {'c_array_length_in_arg': 4, 'type_modifier': 'n'}, 3: {'c_array_length_in_arg': 4, 'type_modifier': 'n'}}}), 'NSAccessibilityPointInView': (sel32or64(b'{_NSPoint=ff}@{_NSPoint=ff}', b'{CGPoint=dd}@{CGPoint=dd}'),), 'NSUpdateDynamicServices': (b'v',), 'NSIsControllerMarker': (b'Z@',), 'NSDrawButton': (sel32or64(b'v{_NSRect={_NSPoint=ff}{_NSSize=ff}}{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'v{CGRect={CGPoint=dd}{CGSize=dd}}{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'NSSetShowsServicesMenuItem': (sel32or64(b'i@Z', b'q@Z'),), 'NSOpenGLGetOption': (b'vI^i', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'NSCreateFileContentsPboardType': (b'@@', '', {'retval': {'already_cfretained': True}}), 'NSCopyBits': (sel32or64(b'vi{_NSRect={_NSPoint=ff}{_NSSize=ff}}{_NSPoint=ff}', b'vq{CGRect={CGPoint=dd}{CGSize=dd}}{CGPoint=dd}'), '', {'retval': {'already_cfretained': True}}), 'NSDisableScreenUpdates': (b'v',), 'NSEdgeInsetsMake': (sel32or64(b'{NSEdgeInsets=ffff}ffff', b'{NSEdgeInsets=dddd}dddd'),), 'NSReadPixel': (sel32or64(b'@{_NSPoint=ff}', b'@{CGPoint=dd}'),), 'NSWindowListForContext': (sel32or64(b'vii^i', b'vqq^q'), '', {'arguments': {2: {'c_array_length_in_arg': 1, 'type_modifier': 'o'}}}), 'NSAccessibilityRoleDescriptionForUIElement': (b'@@',), 'NSDrawWindowBackground': (sel32or64(b'v{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'v{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'NSShowsServicesMenuItem': (b'Z@',), 'NSPlanarFromDepth': (b'Zi',), 'NSHighlightRect': (sel32or64(b'v{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'v{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'NSFrameRectWithWidthUsingOperation': (sel32or64(b'v{_NSRect={_NSPoint=ff}{_NSSize=ff}}fI', b'v{CGRect={CGPoint=dd}{CGSize=dd}}dQ'),), 'NSRectFillListWithColorsUsingOperation': (sel32or64(b'v^{_NSRect={_NSPoint=ff}{_NSSize=ff}}^@iI', b'v^{CGRect={CGPoint=dd}{CGSize=dd}}^@qQ'), '', {'arguments': {0: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}, 1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}}}), 'NSDrawGroove': (sel32or64(b'v{_NSRect={_NSPoint=ff}{_NSSize=ff}}{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'v{CGRect={CGPoint=dd}{CGSize=dd}}{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'NSNumberOfColorComponents': (sel32or64(b'i@', b'q@'),), 'NSFrameRectWithWidth': (sel32or64(b'v{_NSRect={_NSPoint=ff}{_NSSize=ff}}f', b'v{CGRect={CGPoint=dd}{CGSize=dd}}d'),), 'NSEraseRect': (sel32or64(b'v{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'v{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'NSRectFillListWithColors': (sel32or64(b'v^{_NSRect={_NSPoint=ff}{_NSSize=ff}}^@i', b'v^{CGRect={CGPoint=dd}{CGSize=dd}}^@q'), '', {'arguments': {0: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}, 1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}}}), 'NSBestDepth': (sel32or64(b'i@iiZ^Z', b'i@qqZ^Z'), '', {'arguments': {4: {'type_modifier': 'o'}}}), 'NSColorSpaceFromDepth': (b'@i',), 'NSBeep': (b'v',), 'NSAccessibilitySetMayContainProtectedContent': (b'ZZ',), 'NSBitsPerPixelFromDepth': (sel32or64(b'ii', b'qi'),), 'NSAccessibilityUnignoredChildrenForOnlyChild': (b'@@',), 'NSDrawGrayBezel': (sel32or64(b'v{_NSRect={_NSPoint=ff}{_NSSize=ff}}{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'v{CGRect={CGPoint=dd}{CGSize=dd}}{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'NSRectFillList': (sel32or64(b'v^{_NSRect={_NSPoint=ff}{_NSSize=ff}}i', b'v^{CGRect={CGPoint=dd}{CGSize=dd}}q'), '', {'arguments': {0: {'c_array_length_in_arg': 1, 'type_modifier': 'n'}}}), 'NSRunAlertPanel': (sel32or64(b'i@@@@@', b'q@@@@@'), '', {'arguments': {1: {'printf_format': 1}}, 'variadic': True}), 'NSGetInformationalAlertPanel': (b'@@@@@@', '', {'arguments': {1: {'printf_format': True}}, 'variadic': True}), 'NSRectFillListWithGrays': (sel32or64(b'v^{_NSRect={_NSPoint=ff}{_NSSize=ff}}^fi', b'v^{CGRect={CGPoint=dd}{CGSize=dd}}^dq'), '', {'arguments': {0: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}, 1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}}}), 'NSRunInformationalAlertPanel': (sel32or64(b'i@@@@@', b'q@@@@@'), '', {'arguments': {1: {'printf_format': 1}}, 'variadic': True}), 'NSRegisterServicesProvider': (b'v@@',)} -aliases = {'NSLayoutFormatAlignAllLastBaseline': 'NSLayoutFormatAlignAllBaseline', 'NSImageRepRegistryChangedNotification': 'NSImageRepRegistryDidChangeNotification', 'NSDragOperationAll': 'NSDragOperationAll_Obsolete', 'NSTickMarkRight': 'NSTickMarkBelow', 'NSModalPanelWindowLevel': 'kCGModalPanelWindowLevel', 'NSSubmenuWindowLevel': 'kCGTornOffMenuWindowLevel', 'NSTickMarkPositionTrailing': 'NSTickMarkPositionBelow', 'NSGestureRecognizerStateRecognized': 'NSGestureRecognizerStateEnded', 'NSGridCellPlacementBottom': 'NSGridCellPlacementTrailing', 'NSTornOffMenuWindowLevel': 'kCGTornOffMenuWindowLevel', 'NSPopUpMenuWindowLevel': 'kCGPopUpMenuWindowLevel', 'NSLayoutAttributeBaseline': 'NSLayoutAttributeLastBaseline', 'NSGridCellPlacementTop': 'NSGridCellPlacementLeading', 'NSMainMenuWindowLevel': 'kCGMainMenuWindowLevel', 'NSDraggingItemEnumerationConcurrent': 'NSEnumerationConcurrent', 'NSScreenSaverWindowLevel': 'kCGScreenSaverWindowLevel', 'NSEventDurationForever': 'DBL_MAX', 'APPKIT_PRIVATE_EXTERN': '__private_extern__', 'NSLayoutAttributeLastBaseline': 'NSLayoutAttributeBaseline', 'IBAction': 'void', 'NSFileHandlingPanelCancelButton': 'NSModalResponseCancel', 'NSWorkspaceLaunchDefault': 'NSWorkspaceLaunchAsync', 'NSNormalWindowLevel': 'kCGNormalWindowLevel', 'NSFileHandlingPanelOKButton': 'NSModalResponseOK', 'NSFloatingWindowLevel': 'kCGFloatingWindowLevel', 'NSTickMarkPositionLeading': 'NSTickMarkPositionAbove', 'NSDockWindowLevel': 'kCGDockWindowLevel', 'NSTickMarkLeft': 'NSTickMarkAbove', 'NSStackViewSpacingUseDefault': 'FLT_MAX', 'NSStatusWindowLevel': 'kCGStatusWindowLevel'} -misc.update({'NSModalSession': objc.createOpaquePointerType('NSModalSession', b'^{_NSModalSession}')}) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'CIImage', b'drawAtPoint:fromRect:operation:fraction:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'CIImage', b'drawInRect:fromRect:operation:fraction:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSATSTypesetter', b'bidiProcessingEnabled', {'retval': {'type': 'Z'}}) - r(b'NSATSTypesetter', b'boundingBoxForControlGlyphAtIndex:forTextContainer:proposedLineFragment:glyphPosition:characterIndex:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {4: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 5: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 6: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSATSTypesetter', b'characterRangeForGlyphRange:actualGlyphRange:', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 3: {'type_modifier': b'o'}}}) - r(b'NSATSTypesetter', b'deleteGlyphsInRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSATSTypesetter', b'getGlyphsInRange:glyphs:characterIndexes:glyphInscriptions:elasticBits:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 3: {'type_modifier': b'o', 'c_array_length_in_arg': 2}, 4: {'type_modifier': b'o', 'c_array_length_in_arg': 2}, 5: {'type_modifier': b'o', 'c_array_length_in_arg': 2}, 6: {'type': '^Z', 'type_modifier': b'o', 'c_array_length_in_arg': 2}}}) - r(b'NSATSTypesetter', b'getLineFragmentRect:usedRect:forParagraphSeparatorGlyphRange:atProposedOrigin:', {'retval': {'type': 'v'}, 'arguments': {2: {'type_modifier': b'o'}, 3: {'type_modifier': b'o'}, 4: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 5: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSATSTypesetter', b'glyphRangeForCharacterRange:actualCharacterRange:', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 3: {'type_modifier': b'o'}}}) - r(b'NSATSTypesetter', b'layoutParagraphAtPoint:', {'arguments': {2: {'type_modifier': b'N'}}}) - r(b'NSATSTypesetter', b'lineFragmentRectForProposedRect:remainingRect:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 3: {'type_modifier': b'o'}}}) - r(b'NSATSTypesetter', b'lineSpacingAfterGlyphAtIndex:withProposedLineFragmentRect:', {'arguments': {3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSATSTypesetter', b'paragraphGlyphRange', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}) - r(b'NSATSTypesetter', b'paragraphSeparatorGlyphRange', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}) - r(b'NSATSTypesetter', b'paragraphSpacingAfterGlyphAtIndex:withProposedLineFragmentRect:', {'arguments': {3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSATSTypesetter', b'paragraphSpacingBeforeGlyphAtIndex:withProposedLineFragmentRect:', {'arguments': {3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSATSTypesetter', b'setAttachmentSize:forGlyphRange:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSATSTypesetter', b'setBidiLevels:forGlyphRange:', {'arguments': {2: {'type': '^z', 'type_modifier': b'n', 'c_array_length_in_arg': 3}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSATSTypesetter', b'setBidiProcessingEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSATSTypesetter', b'setDrawsOutsideLineFragment:forGlyphRange:', {'arguments': {2: {'type': 'Z'}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSATSTypesetter', b'setHardInvalidation:forGlyphRange:', {'arguments': {2: {'type': 'Z'}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSATSTypesetter', b'setLineFragmentRect:forGlyphRange:usedRect:baselineOffset:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 4: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 5: {'type': sel32or64(b'f', b'd')}}}) - r(b'NSATSTypesetter', b'setLocation:withAdvancements:forStartOfGlyphRange:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 3: {'type_modifier': b'n', 'c_array_length_in_arg': 4}, 4: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSATSTypesetter', b'setNotShownAttribute:forGlyphRange:', {'arguments': {2: {'type': 'Z'}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSATSTypesetter', b'setParagraphGlyphRange:separatorGlyphRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSATSTypesetter', b'setUsesFontLeading:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSATSTypesetter', b'shouldBreakLineByHyphenatingBeforeCharacterAtIndex:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSATSTypesetter', b'shouldBreakLineByWordBeforeCharacterAtIndex:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSATSTypesetter', b'substituteGlyphsInRange:withGlyphs:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 3: {'type_modifier': b'n', 'c_array_length_in_arg': 2}}}) - r(b'NSATSTypesetter', b'usesFontLeading', {'retval': {'type': 'Z'}}) - r(b'NSATSTypesetter', b'willSetLineFragmentRect:forGlyphRange:usedRect:baselineOffset:', {'arguments': {2: {'type': sel32or64(b'^{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'^{CGRect={CGPoint=dd}{CGSize=dd}}'), 'type_modifier': b'N'}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 4: {'type': sel32or64(b'^{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'^{CGRect={CGPoint=dd}{CGSize=dd}}'), 'type_modifier': b'N'}, 5: {'type': sel32or64(b'^f', b'^d'), 'type_modifier': b'N'}}}) - r(b'NSAccessibilityCustomAction', b'handler', {'retval': {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^v'}}}}}) - r(b'NSAccessibilityCustomAction', b'initWithName:handler:', {'arguments': {3: {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSAccessibilityCustomAction', b'initWithName:target:selector:', {'arguments': {4: {'sel_of_type': b'Z@:'}}}) - r(b'NSAccessibilityCustomAction', b'setHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSActionCell', b'setAction:', {'retval': {'type': 'v'}, 'arguments': {2: {'sel_of_type': b'v@:@'}}}) - r(b'NSActionCell', b'setBezeled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSActionCell', b'setBordered:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSActionCell', b'setEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSActionCell', b'setFloatingPointFormat:left:right:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSAlert', b'alertWithMessageText:defaultButton:alternateButton:otherButton:informativeTextWithFormat:', {'arguments': {6: {'printf_format': True, 'type': '@'}}, 'variadic': True}) - r(b'NSAlert', b'beginSheetModalForWindow:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'i', b'q')}}}}}}) - r(b'NSAlert', b'beginSheetModalForWindow:modalDelegate:didEndSelector:contextInfo:', {'arguments': {4: {'sel_of_type': sel32or64(b'v@:@i^v', b'v@:@q^v')}, 5: {'type': '^v'}}}) - r(b'NSAlert', b'setShowsHelp:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSAlert', b'setShowsSuppressionButton:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSAlert', b'showsHelp', {'retval': {'type': 'Z'}}) - r(b'NSAlert', b'showsSuppressionButton', {'retval': {'type': 'Z'}}) - r(b'NSAnimation', b'isAnimating', {'retval': {'type': 'Z'}}) - r(b'NSAnimationContext', b'allowsImplicitAnimation', {'retval': {'type': b'Z'}}) - r(b'NSAnimationContext', b'completionHandler', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}) - r(b'NSAnimationContext', b'runAnimationGroup:completionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSAnimationContext', b'setAllowsImplicitAnimation:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSAnimationContext', b'setCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSAppearance', b'allowsVibrancy', {'retval': {'type': b'Z'}}) - r(b'NSApplication', b'activateIgnoringOtherApps:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSApplication', b'addWindowsItem:title:filename:', {'arguments': {4: {'type': 'Z'}}}) - r(b'NSApplication', b'beginSheet:modalForWindow:modalDelegate:didEndSelector:contextInfo:', {'arguments': {5: {'sel_of_type': sel32or64(b'v@:@i^v', b'v@:@q^v')}, 6: {'type': '^v'}}}) - r(b'NSApplication', b'changeWindowsItem:title:filename:', {'arguments': {4: {'type': 'Z'}}}) - r(b'NSApplication', b'detachDrawingThread:toTarget:withObject:', {'arguments': {2: {'sel_of_type': b'v@:@'}}}) - r(b'NSApplication', b'enumerateWindowsWithOptions:usingBlock:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'o^Z'}}}}}}) - r(b'NSApplication', b'isActive', {'retval': {'type': 'Z'}}) - r(b'NSApplication', b'isAutomaticCustomizeTouchBarMenuItemEnabled', {'retval': {'type': 'Z'}}) - r(b'NSApplication', b'isFullKeyboardAccessEnabled', {'retval': {'type': 'Z'}}) - r(b'NSApplication', b'isHidden', {'retval': {'type': 'Z'}}) - r(b'NSApplication', b'isRegisteredForRemoteNotifications', {'retval': {'type': b'Z'}}) - r(b'NSApplication', b'isRunning', {'retval': {'type': 'Z'}}) - r(b'NSApplication', b'makeWindowsPerform:inOrder:', {'arguments': {2: {'sel_of_type': b'v@:'}, 3: {'type': 'Z'}}}) - r(b'NSApplication', b'nextEventMatchingMask:untilDate:inMode:dequeue:', {'arguments': {5: {'type': 'Z'}}}) - r(b'NSApplication', b'postEvent:atStart:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSApplication', b'replyToApplicationShouldTerminate:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSApplication', b'restoreWindowWithIdentifier:state:completionHandler:', {'retval': {'type': b'Z'}, 'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSApplication', b'searchString:inUserInterfaceItemString:searchRange:foundRange:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 5: {'type': sel32or64(b'^{_NSRange=II}', b'^{_NSRange=QQ}'), 'type_modifier': b'o'}}}) - r(b'NSApplication', b'sendAction:to:from:', {'retval': {'type': 'Z'}, 'arguments': {2: {'sel_of_type': b'v@:@'}}}) - r(b'NSApplication', b'setActivationPolicy:', {'retval': {'type': 'Z'}}) - r(b'NSApplication', b'setAutomaticCustomizeTouchBarMenuItemEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSApplication', b'setWindowsNeedUpdate:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSApplication', b'targetForAction:', {'arguments': {2: {'sel_of_type': b'v@:@'}}}) - r(b'NSApplication', b'targetForAction:to:from:', {'arguments': {2: {'sel_of_type': b'v@:@'}}}) - r(b'NSApplication', b'tryToPerform:with:', {'retval': {'type': 'Z'}, 'arguments': {2: {'sel_of_type': b'v@:@'}}}) - r(b'NSArrayController', b'addSelectedObjects:', {'retval': {'type': 'Z'}}) - r(b'NSArrayController', b'addSelectionIndexes:', {'retval': {'type': 'Z'}}) - r(b'NSArrayController', b'alwaysUsesMultipleValuesMarker', {'retval': {'type': 'Z'}}) - r(b'NSArrayController', b'automaticallyRearrangesObjects', {'retval': {'type': 'Z'}}) - r(b'NSArrayController', b'avoidsEmptySelection', {'retval': {'type': 'Z'}}) - r(b'NSArrayController', b'canInsert', {'retval': {'type': 'Z'}}) - r(b'NSArrayController', b'canSelectNext', {'retval': {'type': 'Z'}}) - r(b'NSArrayController', b'canSelectPrevious', {'retval': {'type': 'Z'}}) - r(b'NSArrayController', b'clearsFilterPredicateOnInsertion', {'retval': {'type': 'Z'}}) - r(b'NSArrayController', b'preservesSelection', {'retval': {'type': 'Z'}}) - r(b'NSArrayController', b'removeSelectedObjects:', {'retval': {'type': 'Z'}}) - r(b'NSArrayController', b'removeSelectionIndexes:', {'retval': {'type': 'Z'}}) - r(b'NSArrayController', b'selectsInsertedObjects', {'retval': {'type': 'Z'}}) - r(b'NSArrayController', b'setAlwaysUsesMultipleValuesMarker:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSArrayController', b'setAutomaticallyRearrangesObjects:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSArrayController', b'setAvoidsEmptySelection:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSArrayController', b'setClearsFilterPredicateOnInsertion:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSArrayController', b'setPreservesSelection:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSArrayController', b'setSelectedObjects:', {'retval': {'type': 'Z'}}) - r(b'NSArrayController', b'setSelectionIndex:', {'retval': {'type': 'Z'}}) - r(b'NSArrayController', b'setSelectionIndexes:', {'retval': {'type': 'Z'}}) - r(b'NSArrayController', b'setSelectsInsertedObjects:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSAttributedString', b'RTFDFileWrapperFromRange:documentAttributes:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSAttributedString', b'RTFDFromRange:documentAttributes:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSAttributedString', b'RTFFromRange:documentAttributes:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSAttributedString', b'URLAtIndex:effectiveRange:', {'arguments': {3: {'type': sel32or64(b'^{_NSRange=II}', b'^{_NSRange=QQ}'), 'type_modifier': b'o'}}}) - r(b'NSAttributedString', b'boundingRectWithSize:options:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSAttributedString', b'containsAttachments', {'retval': {'type': 'Z'}}) - r(b'NSAttributedString', b'containsAttachmentsInRange:', {'retval': {'type': 'Z'}}) - r(b'NSAttributedString', b'dataFromRange:documentAttributes:error:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 4: {'type_modifier': b'o'}}}) - r(b'NSAttributedString', b'docFormatFromRange:documentAttributes:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSAttributedString', b'doubleClickAtIndex:', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}) - r(b'NSAttributedString', b'drawAtPoint:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSAttributedString', b'drawInRect:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSAttributedString', b'drawWithRect:options:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSAttributedString', b'fileWrapperFromRange:documentAttributes:error:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 4: {'type_modifier': b'o'}}}) - r(b'NSAttributedString', b'fontAttributesInRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSAttributedString', b'initWithData:options:documentAttributes:error:', {'arguments': {4: {'type_modifier': b'o'}, 5: {'type_modifier': b'o'}}}) - r(b'NSAttributedString', b'initWithDocFormat:documentAttributes:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSAttributedString', b'initWithHTML:baseURL:documentAttributes:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSAttributedString', b'initWithHTML:documentAttributes:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSAttributedString', b'initWithHTML:options:documentAttributes:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSAttributedString', b'initWithPath:documentAttributes:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSAttributedString', b'initWithRTF:documentAttributes:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSAttributedString', b'initWithRTFD:documentAttributes:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSAttributedString', b'initWithRTFDFileWrapper:documentAttributes:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSAttributedString', b'initWithURL:documentAttributes:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSAttributedString', b'initWithURL:options:documentAttributes:error:', {'arguments': {4: {'type_modifier': b'o'}, 5: {'type_modifier': b'o'}}}) - r(b'NSAttributedString', b'lineBreakBeforeIndex:withinRange:', {'arguments': {3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSAttributedString', b'lineBreakByHyphenatingBeforeIndex:withinRange:', {'arguments': {3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSAttributedString', b'nextWordFromIndex:forward:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSAttributedString', b'rangeOfTextBlock:atIndex:', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}) - r(b'NSAttributedString', b'rangeOfTextList:atIndex:', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}) - r(b'NSAttributedString', b'rangeOfTextTable:atIndex:', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}) - r(b'NSAttributedString', b'rulerAttributesInRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSAttributedString', b'size', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSBezierPath', b'appendBezierPathWithArcFromPoint:toPoint:radius:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSBezierPath', b'appendBezierPathWithArcWithCenter:radius:startAngle:endAngle:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSBezierPath', b'appendBezierPathWithArcWithCenter:radius:startAngle:endAngle:clockwise:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 6: {'type': 'Z'}}}) - r(b'NSBezierPath', b'appendBezierPathWithGlyphs:count:inFont:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'NSBezierPath', b'appendBezierPathWithOvalInRect:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSBezierPath', b'appendBezierPathWithPackedGlyphs:', {'arguments': {2: {'c_array_delimited_by_null': True, 'type': '^v', 'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'NSBezierPath', b'appendBezierPathWithPoints:count:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'NSBezierPath', b'appendBezierPathWithRect:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSBezierPath', b'appendBezierPathWithRoundedRect:xRadius:yRadius:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSBezierPath', b'bezierPathWithOvalInRect:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSBezierPath', b'bezierPathWithRect:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSBezierPath', b'bezierPathWithRoundedRect:xRadius:yRadius:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSBezierPath', b'bounds', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSBezierPath', b'cachesBezierPath', {'retval': {'type': 'Z'}}) - r(b'NSBezierPath', b'clipRect:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSBezierPath', b'containsPoint:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSBezierPath', b'controlPointBounds', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSBezierPath', b'currentPoint', {'retval': {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}) - r(b'NSBezierPath', b'curveToPoint:controlPoint1:controlPoint2:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 4: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSBezierPath', b'drawPackedGlyphs:atPoint:', {'arguments': {2: {'c_array_delimited_by_null': True, 'type': '^v', 'type_modifier': b'n'}, 3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSBezierPath', b'elementAtIndex:associatedPoints:', {'arguments': {3: {'type_modifier': b'o', 'c_array_of_variable_length': True}}}) - r(b'NSBezierPath', b'fillRect:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSBezierPath', b'getLineDash:count:phase:', {'arguments': {2: {'type_modifier': b'o', 'c_array_length_in_arg': 3}, 3: {'type_modifier': b'N'}, 4: {'type_modifier': b'o'}}}) - r(b'NSBezierPath', b'isEmpty', {'retval': {'type': 'Z'}}) - r(b'NSBezierPath', b'lineToPoint:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSBezierPath', b'moveToPoint:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSBezierPath', b'relativeCurveToPoint:controlPoint1:controlPoint2:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 4: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSBezierPath', b'relativeLineToPoint:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSBezierPath', b'relativeMoveToPoint:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSBezierPath', b'setAssociatedPoints:atIndex:', {'arguments': {2: {'c_array_of_variable_length': True}, 3: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSBezierPath', b'setCachesBezierPath:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSBezierPath', b'setLineDash:count:phase:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'NSBezierPath', b'strokeLineFromPoint:toPoint:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSBezierPath', b'strokeRect:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSBitmapImageRep', b'CGImage', {'retval': {'type': '^{CGImage=}'}}) - r(b'NSBitmapImageRep', b'bitmapData', {'retval': {'type': '^v', 'c_array_of_variable_length': True}}) - r(b'NSBitmapImageRep', b'canBeCompressedUsing:', {'retval': {'type': 'Z'}}) - r(b'NSBitmapImageRep', b'getBitmapDataPlanes:', {'arguments': {2: {'type': '^*', 'c_array_of_variable_length': True}}}) - r(b'NSBitmapImageRep', b'getCompression:factor:', {'arguments': {2: {'type_modifier': b'o'}, 3: {'type_modifier': b'o'}}}) - r(b'NSBitmapImageRep', b'getPixel:atX:y:', {'arguments': {2: {'type_modifier': b'o', 'c_array_of_variable_length': True}}}) - r(b'NSBitmapImageRep', b'getTIFFCompressionTypes:count:', {'arguments': {2: {'type': sel32or64(b'^^I', b'^^Q'), 'type_modifier': b'o', 'c_array_length_in_arg': 3}, 3: {'type': sel32or64(b'^i', b'^q'), 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'NSBitmapImageRep', b'incrementalLoadFromData:complete:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSBitmapImageRep', b'initWithBitmapDataPlanes:pixelsWide:pixelsHigh:bitsPerSample:samplesPerPixel:hasAlpha:isPlanar:colorSpaceName:bitmapFormat:bytesPerRow:bitsPerPixel:', {'arguments': {2: {'type': '^*'}, 7: {'type': b'Z'}, 8: {'type': b'Z'}}}) - r(b'NSBitmapImageRep', b'initWithBitmapDataPlanes:pixelsWide:pixelsHigh:bitsPerSample:samplesPerPixel:hasAlpha:isPlanar:colorSpaceName:bytesPerRow:bitsPerPixel:', {'arguments': {2: {'type': '^*'}, 7: {'type': b'Z'}, 8: {'type': b'Z'}}}) - r(b'NSBitmapImageRep', b'initWithCGImage:', {'arguments': {2: {'type': '^{CGImage=}'}}}) - r(b'NSBitmapImageRep', b'initWithFocusedViewRect:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSBitmapImageRep', b'isPlanar', {'retval': {'type': 'Z'}}) - r(b'NSBitmapImageRep', b'setPixel:atX:y:', {'arguments': {2: {'type': sel32or64(b'^I', b'^Q'), 'type_modifier': b'n', 'c_array_of_variable_length': True}}}) - r(b'NSBox', b'borderRect', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSBox', b'contentViewMargins', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSBox', b'isTransparent', {'retval': {'type': 'Z'}}) - r(b'NSBox', b'setContentViewMargins:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSBox', b'setFrameFromContentFrame:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSBox', b'setTransparent:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSBox', b'titleRect', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSBrowser', b'acceptsArrowKeys', {'retval': {'type': 'Z'}}) - r(b'NSBrowser', b'allowsBranchSelection', {'retval': {'type': 'Z'}}) - r(b'NSBrowser', b'allowsEmptySelection', {'retval': {'type': 'Z'}}) - r(b'NSBrowser', b'allowsMultipleSelection', {'retval': {'type': 'Z'}}) - r(b'NSBrowser', b'allowsTypeSelect', {'retval': {'type': 'Z'}}) - r(b'NSBrowser', b'autohidesScroller', {'retval': {'type': 'Z'}}) - r(b'NSBrowser', b'canDragRowsWithIndexes:inColumn:withEvent:', {'retval': {'type': 'Z'}}) - r(b'NSBrowser', b'draggingImageForRowsWithIndexes:inColumn:withEvent:offset:', {'arguments': {5: {'type_modifier': b'N'}}}) - r(b'NSBrowser', b'drawTitleOfColumn:inRect:', {'arguments': {3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSBrowser', b'editItemAtIndexPath:withEvent:select:', {'arguments': {4: {'type': 'Z'}}}) - r(b'NSBrowser', b'frameOfColumn:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSBrowser', b'frameOfInsideOfColumn:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSBrowser', b'getRow:column:forPoint:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type_modifier': b'o'}, 3: {'type_modifier': b'o'}}}) - r(b'NSBrowser', b'hasHorizontalScroller', {'retval': {'type': 'Z'}}) - r(b'NSBrowser', b'isLeafItem:', {'retval': {'type': 'Z'}}) - r(b'NSBrowser', b'isLoaded', {'retval': {'type': 'Z'}}) - r(b'NSBrowser', b'isTitled', {'retval': {'type': 'Z'}}) - r(b'NSBrowser', b'prefersAllColumnUserResizing', {'retval': {'type': 'Z'}}) - r(b'NSBrowser', b'reusesColumns', {'retval': {'type': 'Z'}}) - r(b'NSBrowser', b'sendAction', {'retval': {'type': 'Z'}}) - r(b'NSBrowser', b'sendsActionOnArrowKeys', {'retval': {'type': 'Z'}}) - r(b'NSBrowser', b'separatesColumns', {'retval': {'type': 'Z'}}) - r(b'NSBrowser', b'setAcceptsArrowKeys:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSBrowser', b'setAllowsBranchSelection:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSBrowser', b'setAllowsEmptySelection:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSBrowser', b'setAllowsMultipleSelection:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': 'Z'}}}) - r(b'NSBrowser', b'setAllowsTypeSelect:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': 'Z'}}}) - r(b'NSBrowser', b'setAutohidesScroller:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSBrowser', b'setDoubleAction:', {'arguments': {2: {'sel_of_type': b'v@:@'}}}) - r(b'NSBrowser', b'setDraggingSourceOperationMask:forLocal:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSBrowser', b'setHasHorizontalScroller:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSBrowser', b'setPath:', {'retval': {'type': 'Z'}}) - r(b'NSBrowser', b'setPrefersAllColumnUserResizing:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSBrowser', b'setReusesColumns:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSBrowser', b'setSendsActionOnArrowKeys:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSBrowser', b'setSeparatesColumns:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSBrowser', b'setTakesTitleFromPreviousColumn:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSBrowser', b'setTitled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSBrowser', b'takesTitleFromPreviousColumn', {'retval': {'type': 'Z'}}) - r(b'NSBrowser', b'titleFrameOfColumn:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSBrowserCell', b'isLeaf', {'retval': {'type': 'Z'}}) - r(b'NSBrowserCell', b'isLoaded', {'retval': {'type': 'Z'}}) - r(b'NSBrowserCell', b'setLeaf:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': 'Z'}}}) - r(b'NSBrowserCell', b'setLoaded:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': 'Z'}}}) - r(b'NSBundle', b'loadNibFile:externalNameTable:withZone:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type': '^{_NSZone=}'}}}) - r(b'NSBundle', b'loadNibNamed:owner:', {'retval': {'type': 'Z'}}) - r(b'NSBundle', b'loadNibNamed:owner:topLevelObjects:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSButton', b'allowsMixedState', {'retval': {'type': 'Z'}}) - r(b'NSButton', b'buttonWithImage:target:action:', {'arguments': {4: {'sel_of_type': b'v@:@'}}}) - r(b'NSButton', b'buttonWithTitle:image:target:action:', {'arguments': {5: {'sel_of_type': b'v@:@'}}}) - r(b'NSButton', b'buttonWithTitle:target:action:', {'arguments': {4: {'sel_of_type': b'v@:@'}}}) - r(b'NSButton', b'checkboxWithTitle:target:action:', {'arguments': {4: {'sel_of_type': b'v@:@'}}}) - r(b'NSButton', b'getPeriodicDelay:interval:', {'arguments': {2: {'type_modifier': b'o'}, 3: {'type_modifier': b'o'}}}) - r(b'NSButton', b'highlight:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSButton', b'imageHugsTitle', {'retval': {'type': 'Z'}}) - r(b'NSButton', b'isBordered', {'retval': {'type': 'Z'}}) - r(b'NSButton', b'isSpringLoaded', {'retval': {'type': b'Z'}}) - r(b'NSButton', b'isTransparent', {'retval': {'type': 'Z'}}) - r(b'NSButton', b'performKeyEquivalent:', {'retval': {'type': 'Z'}}) - r(b'NSButton', b'radioButtonWithTitle:target:action:', {'arguments': {4: {'sel_of_type': b'v@:@'}}}) - r(b'NSButton', b'setAllowsMixedState:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSButton', b'setBordered:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSButton', b'setImageHugsTitle:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSButton', b'setShowsBorderOnlyWhileMouseInside:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSButton', b'setSpringLoaded:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSButton', b'setTransparent:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSButton', b'showsBorderOnlyWhileMouseInside', {'retval': {'type': 'Z'}}) - r(b'NSButtonCell', b'drawBezelWithFrame:inView:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSButtonCell', b'drawImage:withFrame:inView:', {'arguments': {3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSButtonCell', b'drawTitle:withFrame:inView:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSButtonCell', b'getPeriodicDelay:interval:', {'arguments': {2: {'type_modifier': b'o'}, 3: {'type_modifier': b'o'}}}) - r(b'NSButtonCell', b'imageDimsWhenDisabled', {'retval': {'type': 'Z'}}) - r(b'NSButtonCell', b'isOpaque', {'retval': {'type': 'Z'}}) - r(b'NSButtonCell', b'isTransparent', {'retval': {'type': 'Z'}}) - r(b'NSButtonCell', b'setImageDimsWhenDisabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSButtonCell', b'setShowsBorderOnlyWhileMouseInside:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSButtonCell', b'setTransparent:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSButtonCell', b'showsBorderOnlyWhileMouseInside', {'retval': {'type': 'Z'}}) - r(b'NSCachedImageRep', b'initWithSize:depth:separate:alpha:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}, 4: {'type': 'Z'}, 5: {'type': 'Z'}}}) - r(b'NSCachedImageRep', b'initWithWindow:rect:', {'arguments': {3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSCachedImageRep', b'rect', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSCandidateListTouchBarItem', b'allowsCollapsing', {'retval': {'type': 'Z'}}) - r(b'NSCandidateListTouchBarItem', b'allowsTextInputContextCandidates', {'retval': {'type': 'Z'}}) - r(b'NSCandidateListTouchBarItem', b'attributedStringForCandidate', {'retval': {'callable': {'retval': {'type': b'@'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'i', b'q')}}}}}) - r(b'NSCandidateListTouchBarItem', b'isCandidateListVisible', {'retval': {'type': 'Z'}}) - r(b'NSCandidateListTouchBarItem', b'isCollapsed', {'retval': {'type': 'Z'}}) - r(b'NSCandidateListTouchBarItem', b'setAllowsCollapsing:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCandidateListTouchBarItem', b'setAllowsTextInputContextCandidates:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCandidateListTouchBarItem', b'setAttributedStringForCandidate:', {'arguments': {2: {'callable': {'retval': {'type': b'@'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'i', b'q')}}}}}}) - r(b'NSCandidateListTouchBarItem', b'setCandidateListVisible:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCandidateListTouchBarItem', b'setCollapsed:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCandidateListTouchBarItem', b'updateWithInsertionPointVisibility:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCell', b'acceptsFirstResponder', {'retval': {'type': 'Z'}}) - r(b'NSCell', b'action', {'retval': {'type': ':', 'sel_of_type': b'v@:@'}}) - r(b'NSCell', b'allowsEditingTextAttributes', {'retval': {'type': 'Z'}}) - r(b'NSCell', b'allowsMixedState', {'retval': {'type': 'Z'}}) - r(b'NSCell', b'allowsUndo', {'retval': {'type': 'Z'}}) - r(b'NSCell', b'calcDrawInfo:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSCell', b'cellSize', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSCell', b'cellSizeForBounds:', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSCell', b'continueTracking:at:inView:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSCell', b'drawInteriorWithFrame:inView:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSCell', b'drawWithExpansionFrame:inView:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSCell', b'drawWithFrame:inView:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSCell', b'drawingRectForBounds:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSCell', b'editWithFrame:inView:editor:delegate:event:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSCell', b'expansionFrameWithFrame:inView:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSCell', b'getPeriodicDelay:interval:', {'arguments': {2: {'type_modifier': b'o'}, 3: {'type_modifier': b'o'}}}) - r(b'NSCell', b'hasValidObjectValue', {'retval': {'type': 'Z'}}) - r(b'NSCell', b'highlight:withFrame:inView:', {'arguments': {2: {'type': 'Z'}, 3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSCell', b'highlightColorWithFrame:inView:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSCell', b'hitTestForEvent:inRect:ofView:', {'arguments': {3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSCell', b'imageRectForBounds:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSCell', b'importsGraphics', {'retval': {'type': 'Z'}}) - r(b'NSCell', b'isBezeled', {'retval': {'type': 'Z'}}) - r(b'NSCell', b'isBordered', {'retval': {'type': 'Z'}}) - r(b'NSCell', b'isContinuous', {'retval': {'type': 'Z'}}) - r(b'NSCell', b'isEditable', {'retval': {'type': 'Z'}}) - r(b'NSCell', b'isEnabled', {'retval': {'type': 'Z'}}) - r(b'NSCell', b'isEntryAcceptable:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': '@'}}}) - r(b'NSCell', b'isHighlighted', {'retval': {'type': 'Z'}}) - r(b'NSCell', b'isOpaque', {'retval': {'type': 'Z'}}) - r(b'NSCell', b'isScrollable', {'retval': {'type': 'Z'}}) - r(b'NSCell', b'isSelectable', {'retval': {'type': 'Z'}}) - r(b'NSCell', b'menuForEvent:inRect:ofView:', {'arguments': {3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSCell', b'prefersTrackingUntilMouseUp', {'retval': {'type': b'Z'}}) - r(b'NSCell', b'refusesFirstResponder', {'retval': {'type': 'Z'}}) - r(b'NSCell', b'resetCursorRect:inView:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSCell', b'selectWithFrame:inView:editor:delegate:start:length:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSCell', b'sendsActionOnEndEditing', {'retval': {'type': 'Z'}}) - r(b'NSCell', b'setAction:', {'arguments': {2: {'sel_of_type': b'v@:@'}}}) - r(b'NSCell', b'setAllowsEditingTextAttributes:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCell', b'setAllowsMixedState:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCell', b'setAllowsUndo:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCell', b'setAttributedStringValue:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': '@'}}}) - r(b'NSCell', b'setBezeled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCell', b'setBordered:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCell', b'setContinuous:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCell', b'setEditable:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCell', b'setEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCell', b'setFloatingPointFormat:left:right:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCell', b'setHighlighted:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCell', b'setImportsGraphics:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCell', b'setRefusesFirstResponder:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCell', b'setScrollable:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCell', b'setSelectable:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCell', b'setSendsActionOnEndEditing:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCell', b'setShowsFirstResponder:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCell', b'setTruncatesLastVisibleLine:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCell', b'setUsesSingleLineMode:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCell', b'setWraps:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCell', b'showsFirstResponder', {'retval': {'type': 'Z'}}) - r(b'NSCell', b'startTrackingAt:inView:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSCell', b'stopTracking:at:inView:mouseIsUp:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 5: {'type': 'Z'}}}) - r(b'NSCell', b'titleRectForBounds:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSCell', b'trackMouse:inRect:ofView:untilMouseUp:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 5: {'type': 'Z'}}}) - r(b'NSCell', b'truncatesLastVisibleLine', {'retval': {'type': 'Z'}}) - r(b'NSCell', b'usesSingleLineMode', {'retval': {'type': 'Z'}}) - r(b'NSCell', b'wantsNotificationForMarkedText', {'retval': {'type': 'Z'}}) - r(b'NSCell', b'wraps', {'retval': {'type': 'Z'}}) - r(b'NSClipView', b'automaticallyAdjustsContentInsets', {'retval': {'type': b'Z'}}) - r(b'NSClipView', b'autoscroll:', {'retval': {'type': 'Z'}}) - r(b'NSClipView', b'copiesOnScroll', {'retval': {'type': 'Z'}}) - r(b'NSClipView', b'documentRect', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSClipView', b'documentVisibleRect', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSClipView', b'drawsBackground', {'retval': {'type': 'Z'}}) - r(b'NSClipView', b'onstrainScrollPoint:', {'retval': {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSClipView', b'scrollToPoint:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSClipView', b'setAutomaticallyAdjustsContentInsets:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSClipView', b'setCopiesOnScroll:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSClipView', b'setDrawsBackground:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCollectionView', b'allowsEmptySelection', {'retval': {'type': 'Z'}}) - r(b'NSCollectionView', b'allowsMultipleSelection', {'retval': {'type': 'Z'}}) - r(b'NSCollectionView', b'backgroundViewScrollsWithContent', {'retval': {'type': 'Z'}}) - r(b'NSCollectionView', b'draggingImageForItemsAtIndexPaths:withEvent:offset:', {'arguments': {4: {'type_modifier': b'N'}}}) - r(b'NSCollectionView', b'draggingImageForItemsAtIndexes:withEvent:offset:', {'arguments': {4: {'type_modifier': b'N'}}}) - r(b'NSCollectionView', b'frameForItemAtIndex:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSCollectionView', b'isFirstResponder', {'retval': {'type': 'Z'}}) - r(b'NSCollectionView', b'isSelectable', {'retval': {'type': 'Z'}}) - r(b'NSCollectionView', b'maxItemSize', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSCollectionView', b'minItemSize', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSCollectionView', b'performBatchUpdates:completionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}}}}}}) - r(b'NSCollectionView', b'setAllowsEmptySelection:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCollectionView', b'setAllowsMultipleSelection:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCollectionView', b'setBackgroundViewScrollsWithContent:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCollectionView', b'setDraggingSourceOperationMask:forLocal:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSCollectionView', b'setMaxItemSize:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSCollectionView', b'setMinItemSize:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSCollectionView', b'setSelectable:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCollectionViewFlowLayout', b'invalidateFlowLayoutDelegateMetrics', {'retval': {'type': 'Z'}}) - r(b'NSCollectionViewFlowLayout', b'sectionAtIndexIsCollapsed:', {'retval': {'type': 'Z'}}) - r(b'NSCollectionViewFlowLayout', b'sectionFootersPinToVisibleBounds', {'retval': {'type': 'Z'}}) - r(b'NSCollectionViewFlowLayout', b'sectionHeadersPinToVisibleBounds', {'retval': {'type': 'Z'}}) - r(b'NSCollectionViewFlowLayout', b'setSectionFootersPinToVisibleBounds:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCollectionViewFlowLayout', b'setSectionHeadersPinToVisibleBounds:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCollectionViewFlowLayout', b'shouldInvalidateLayoutForBoundsChange:', {'retval': {'type': 'Z'}}) - r(b'NSCollectionViewFlowLayoutInvalidationContext', b'invalidateFlowLayoutAttributes', {'retval': {'type': 'Z'}}) - r(b'NSCollectionViewFlowLayoutInvalidationContext', b'invalidateFlowLayoutDelegateMetrics', {'retval': {'type': 'Z'}}) - r(b'NSCollectionViewFlowLayoutInvalidationContext', b'setInvalidateFlowLayoutAttributes:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCollectionViewFlowLayoutInvalidationContext', b'setInvalidateFlowLayoutDelegateMetrics:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCollectionViewItem', b'isSelected', {'retval': {'type': 'Z'}}) - r(b'NSCollectionViewItem', b'setSelected:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCollectionViewLayout', b'shouldInvalidateLayoutForBoundsChange:', {'retval': {'type': 'Z'}}) - r(b'NSCollectionViewLayout', b'shouldInvalidateLayoutForPreferredLayoutAttributes:withOriginalAttributes:', {'retval': {'type': 'Z'}}) - r(b'NSCollectionViewLayoutAttributes', b'isHidden', {'retval': {'type': 'Z'}}) - r(b'NSCollectionViewLayoutAttributes', b'setHidden:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCollectionViewLayoutInvalidationContext', b'invalidateDataSourceCounts', {'retval': {'type': 'Z'}}) - r(b'NSCollectionViewLayoutInvalidationContext', b'invalidateEverything', {'retval': {'type': 'Z'}}) - r(b'NSColor', b'colorWithColorSpace:components:count:', {'arguments': {3: {'type_modifier': b'n', 'c_array_length_in_arg': 4}}}) - r(b'NSColor', b'drawSwatchInRect:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSColor', b'getComponents:', {'arguments': {2: {'type': sel32or64(b'^f', b'^d'), 'type_modifier': b'o', 'c_array_of_variable_length': True}}}) - r(b'NSColor', b'getCyan:magenta:yellow:black:alpha:', {'arguments': {2: {'type_modifier': b'o'}, 3: {'type_modifier': b'o'}, 4: {'type_modifier': b'o'}, 5: {'type_modifier': b'o'}, 6: {'type_modifier': b'o'}}}) - r(b'NSColor', b'getHue:saturation:brightness:alpha:', {'arguments': {2: {'type_modifier': b'o'}, 3: {'type_modifier': b'o'}, 4: {'type_modifier': b'o'}, 5: {'type_modifier': b'o'}}}) - r(b'NSColor', b'getRed:green:blue:alpha:', {'arguments': {2: {'type_modifier': b'o'}, 3: {'type_modifier': b'o'}, 4: {'type_modifier': b'o'}, 5: {'type_modifier': b'o'}}}) - r(b'NSColor', b'getWhite:alpha:', {'arguments': {2: {'type_modifier': b'o'}, 3: {'type_modifier': b'o'}}}) - r(b'NSColor', b'ignoresAlpha', {'retval': {'type': b'Z'}}) - r(b'NSColor', b'setIgnoresAlpha:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSColorList', b'isEditable', {'retval': {'type': 'Z'}}) - r(b'NSColorList', b'writeToFile:', {'retval': {'type': 'Z'}}) - r(b'NSColorList', b'writeToURL:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSColorPanel', b'dragColor:withEvent:fromView:', {'retval': {'type': 'Z'}}) - r(b'NSColorPanel', b'isContinuous', {'retval': {'type': 'Z'}}) - r(b'NSColorPanel', b'setAction:', {'arguments': {2: {'sel_of_type': b'v@:@'}}}) - r(b'NSColorPanel', b'setContinuous:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSColorPanel', b'setShowsAlpha:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSColorPanel', b'sharedColorPanelExists', {'retval': {'type': b'Z'}}) - r(b'NSColorPanel', b'showsAlpha', {'retval': {'type': 'Z'}}) - r(b'NSColorPicker', b'minContentSize', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSColorPickerTouchBarItem', b'isEnabled', {'retval': {'type': 'Z'}}) - r(b'NSColorPickerTouchBarItem', b'setEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSColorPickerTouchBarItem', b'setShowsAlpha:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSColorPickerTouchBarItem', b'showsAlpha', {'retval': {'type': 'Z'}}) - r(b'NSColorSpace', b'CGColorSpace', {'retval': {'type': '^{CGColorSpace=}'}}) - r(b'NSColorSpace', b'colorSyncProfile', {'retval': {'type': '^{OpaqueCMProfileRef=}'}}) - r(b'NSColorSpace', b'initWithCGColorSpace:', {'arguments': {2: {'type': '^{CGColorSpace=}'}}}) - r(b'NSColorSpace', b'initWithColorSyncProfile:', {'arguments': {2: {'type': '^{OpaqueCMProfileRef=}'}}}) - r(b'NSColorWell', b'activate:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSColorWell', b'drawWellInside:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSColorWell', b'isActive', {'retval': {'type': 'Z'}}) - r(b'NSColorWell', b'isBordered', {'retval': {'type': 'Z'}}) - r(b'NSColorWell', b'setBordered:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSComboBox', b'completes', {'retval': {'type': 'Z'}}) - r(b'NSComboBox', b'hasVerticalScroller', {'retval': {'type': 'Z'}}) - r(b'NSComboBox', b'intercellSpacing', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSComboBox', b'isButtonBordered', {'retval': {'type': 'Z'}}) - r(b'NSComboBox', b'setButtonBordered:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSComboBox', b'setCompletes:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSComboBox', b'setHasVerticalScroller:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSComboBox', b'setIntercellSpacing:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSComboBox', b'setUsesDataSource:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSComboBox', b'usesDataSource', {'retval': {'type': 'Z'}}) - r(b'NSComboBoxCell', b'completes', {'retval': {'type': 'Z'}}) - r(b'NSComboBoxCell', b'hasVerticalScroller', {'retval': {'type': 'Z'}}) - r(b'NSComboBoxCell', b'intercellSpacing', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSComboBoxCell', b'isButtonBordered', {'retval': {'type': 'Z'}}) - r(b'NSComboBoxCell', b'setButtonBordered:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSComboBoxCell', b'setCompletes:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSComboBoxCell', b'setHasVerticalScroller:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSComboBoxCell', b'setIntercellSpacing:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSComboBoxCell', b'setUsesDataSource:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSComboBoxCell', b'usesDataSource', {'retval': {'type': 'Z'}}) - r(b'NSControl', b'abortEditing', {'retval': {'type': 'Z'}}) - r(b'NSControl', b'action', {'retval': {'type': ':', 'sel_of_type': b'v@:@'}}) - r(b'NSControl', b'allowsExpansionToolTips', {'retval': {'type': b'Z'}}) - r(b'NSControl', b'ignoresMultiClick', {'retval': {'type': 'Z'}}) - r(b'NSControl', b'initWithFrame:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSControl', b'isContinuous', {'retval': {'type': 'Z'}}) - r(b'NSControl', b'isEnabled', {'retval': {'type': 'Z'}}) - r(b'NSControl', b'isHighlighted', {'retval': {'type': b'Z'}}) - r(b'NSControl', b'refusesFirstResponder', {'retval': {'type': 'Z'}}) - r(b'NSControl', b'sendAction:to:', {'retval': {'type': 'Z'}, 'arguments': {2: {'sel_of_type': b'v@:@'}}}) - r(b'NSControl', b'setAction:', {'arguments': {2: {'sel_of_type': b'v@:@'}}}) - r(b'NSControl', b'setAllowsExpansionToolTips:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSControl', b'setContinuous:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSControl', b'setEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSControl', b'setFloatingPointFormat:left:right:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSControl', b'setHighlighted:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSControl', b'setIgnoresMultiClick:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSControl', b'setRefusesFirstResponder:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSControl', b'setUsesSingleLineMode:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSControl', b'usesSingleLineMode', {'retval': {'type': b'Z'}}) - r(b'NSController', b'commitEditing', {'retval': {'type': 'Z'}}) - r(b'NSController', b'commitEditingWithDelegate:didCommitSelector:contextInfo:', {'arguments': {3: {'type': ':', 'sel_of_type': b'v@:@Z^v'}, 4: {'type': '^v'}}}) - r(b'NSController', b'isEditing', {'retval': {'type': 'Z'}}) - r(b'NSCursor', b'hotSpot', {'retval': {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}) - r(b'NSCursor', b'initWithImage:foregroundColorHint:backgroundColorHint:hotSpot:', {'arguments': {5: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSCursor', b'initWithImage:hotSpot:', {'arguments': {3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSCursor', b'isSetOnMouseEntered', {'retval': {'type': 'Z'}}) - r(b'NSCursor', b'isSetOnMouseExited', {'retval': {'type': 'Z'}}) - r(b'NSCursor', b'setHiddenUntilMouseMoves:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCursor', b'setOnMouseEntered:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCursor', b'setOnMouseExited:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCustomImageRep', b'drawingHandler', {'retval': {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}}}) - r(b'NSCustomImageRep', b'initWithDrawSelector:delegate:', {'arguments': {2: {'sel_of_type': b'v@:@'}}}) - r(b'NSCustomImageRep', b'initWithSize:flipped:drawingHandler:', {'arguments': {3: {'type': b'Z'}, 4: {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}}}}) - r(b'NSDatePicker', b'drawsBackground', {'retval': {'type': 'Z'}}) - r(b'NSDatePicker', b'isBezeled', {'retval': {'type': 'Z'}}) - r(b'NSDatePicker', b'isBordered', {'retval': {'type': 'Z'}}) - r(b'NSDatePicker', b'setBezeled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSDatePicker', b'setBordered:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSDatePicker', b'setDrawsBackground:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSDatePickerCell', b'drawsBackground', {'retval': {'type': 'Z'}}) - r(b'NSDatePickerCell', b'setDrawsBackground:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSDictionaryControllerKeyValuePair', b'isExplicitlyIncluded', {'retval': {'type': b'Z'}}) - r(b'NSDockTile', b'performActivityWithSynchronousWaiting:usingBlock:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSDockTile', b'setShowsApplicationBadge:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSDockTile', b'showsApplicationBadge', {'retval': {'type': 'Z'}}) - r(b'NSDockTile', b'size', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSDocument', b'allowsDocumentSharing', {'retval': {'type': 'Z'}}) - r(b'NSDocument', b'autosaveDocumentWithDelegate:didAutosaveSelector:contextInfo:', {'arguments': {3: {'sel_of_type': b'v@:@Z^v'}, 4: {'type': '^v'}}}) - r(b'NSDocument', b'autosaveWithImplicitCancellability:completionHandler:', {'arguments': {2: {'type': b'Z'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'NSDocument', b'autosavesDrafts', {'retval': {'type': b'Z'}}) - r(b'NSDocument', b'autosavesInPlace', {'retval': {'type': b'Z'}}) - r(b'NSDocument', b'autosavingIsImplicitlyCancellable', {'retval': {'type': b'Z'}}) - r(b'NSDocument', b'canAsynchronouslyWriteToURL:ofType:forSaveOperation:', {'retval': {'type': b'Z'}}) - r(b'NSDocument', b'canCloseDocumentWithDelegate:shouldCloseSelector:contextInfo:', {'arguments': {3: {'sel_of_type': b'v@:@Z^v'}, 4: {'type': '^v'}}}) - r(b'NSDocument', b'canConcurrentlyReadDocumentsOfType:', {'retval': {'type': 'Z'}}) - r(b'NSDocument', b'checkAutosavingSafetyAndReturnError:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type_modifier': b'o'}}}) - r(b'NSDocument', b'continueActivityUsingBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSDocument', b'continueAsynchronousWorkOnMainThreadUsingBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSDocument', b'dataOfType:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSDocument', b'duplicateAndReturnError:', {'arguments': {2: {'type_modifier': b'o'}}}) - r(b'NSDocument', b'duplicateDocumentWithDelegate:didDuplicateSelector:contextInfo:', {'arguments': {3: {'sel_of_type': b'v@Z^v'}, 4: {'type': '^v'}}}) - r(b'NSDocument', b'fileAttributesToWriteToURL:ofType:forSaveOperation:originalContentsURL:error:', {'arguments': {6: {'type_modifier': b'o'}}}) - r(b'NSDocument', b'fileNameExtensionWasHiddenInLastRunSavePanel', {'retval': {'type': 'Z'}}) - r(b'NSDocument', b'fileWrapperOfType:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSDocument', b'hasUnautosavedChanges', {'retval': {'type': 'Z'}}) - r(b'NSDocument', b'hasUndoManager', {'retval': {'type': 'Z'}}) - r(b'NSDocument', b'initForURL:withContentsOfURL:ofType:error:', {'arguments': {5: {'type_modifier': b'o'}}}) - r(b'NSDocument', b'initWithContentsOfURL:ofType:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSDocument', b'initWithType:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSDocument', b'isBrowsingVersions', {'retval': {'type': 'Z'}}) - r(b'NSDocument', b'isDocumentEdited', {'retval': {'type': 'Z'}}) - r(b'NSDocument', b'isDraft', {'retval': {'type': b'Z'}}) - r(b'NSDocument', b'isEntireFileLoaded', {'retval': {'type': b'Z'}}) - r(b'NSDocument', b'isInViewingMode', {'retval': {'type': b'Z'}}) - r(b'NSDocument', b'isLocked', {'retval': {'type': b'Z'}}) - r(b'NSDocument', b'isNativeType:', {'retval': {'type': 'Z'}}) - r(b'NSDocument', b'keepBackupFile', {'retval': {'type': 'Z'}}) - r(b'NSDocument', b'loadDataRepresentation:ofType:', {'retval': {'type': 'Z'}}) - r(b'NSDocument', b'loadFileWrapperRepresentation:ofType:', {'retval': {'type': 'Z'}}) - r(b'NSDocument', b'lockDocumentWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}}}}}}) - r(b'NSDocument', b'lockWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'NSDocument', b'moveDocumentWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}}}}}}) - r(b'NSDocument', b'moveToURL:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}}}}}}) - r(b'NSDocument', b'performActivityWithSynchronousWaiting:usingBlock:', {'arguments': {2: {'type': b'Z'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSDocument', b'performAsynchronousFileAccessUsingBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'callable': {'retval': {'type': 'v'}, 'arguments': {0: {'type': '^v'}}}, 'type': b'@?'}}}}}}) - r(b'NSDocument', b'performSynchronousFileAccessUsingBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSDocument', b'preparePageLayout:', {'retval': {'type': 'Z'}}) - r(b'NSDocument', b'prepareSavePanel:', {'retval': {'type': 'Z'}}) - r(b'NSDocument', b'presentError:', {'retval': {'type': 'Z'}}) - r(b'NSDocument', b'presentError:modalForWindow:delegate:didPresentSelector:contextInfo:', {'arguments': {5: {'sel_of_type': b'v@:Z^v'}, 6: {'type': '^v'}}}) - r(b'NSDocument', b'preservesVersions', {'retval': {'type': b'Z'}}) - r(b'NSDocument', b'printDocumentWithSettings:showPrintPanel:delegate:didPrintSelector:contextInfo:', {'arguments': {3: {'type': 'Z'}, 5: {'sel_of_type': b'v@:@Z^v'}, 6: {'type': '^v'}}}) - r(b'NSDocument', b'printOperationWithSettings:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSDocument', b'printShowingPrintPanel:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSDocument', b'readFromData:ofType:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSDocument', b'readFromFile:ofType:', {'retval': {'type': 'Z'}}) - r(b'NSDocument', b'readFromFileWrapper:ofType:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSDocument', b'readFromURL:ofType:', {'retval': {'type': 'Z'}}) - r(b'NSDocument', b'readFromURL:ofType:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSDocument', b'restoreDocumentWindowWithIdentifier:state:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'NSDocument', b'revertToContentsOfURL:ofType:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSDocument', b'revertToSavedFromFile:ofType:', {'retval': {'type': 'Z'}}) - r(b'NSDocument', b'revertToSavedFromURL:ofType:', {'retval': {'type': 'Z'}}) - r(b'NSDocument', b'runModalPageLayoutWithPrintInfo:delegate:didRunSelector:contextInfo:', {'arguments': {4: {'sel_of_type': b'v@:@Z^v'}, 5: {'type': '^v'}}}) - r(b'NSDocument', b'runModalPrintOperation:delegate:didRunSelector:contextInfo:', {'arguments': {4: {'sel_of_type': b'v@:@Z^v'}, 5: {'type': '^v'}}}) - r(b'NSDocument', b'runModalSavePanelForSaveOperation:delegate:didSaveSelector:contextInfo:', {'arguments': {4: {'sel_of_type': b'v@:@Z^v'}, 5: {'type': '^v'}}}) - r(b'NSDocument', b'saveDocumentWithDelegate:didSaveSelector:contextInfo:', {'arguments': {3: {'sel_of_type': b'v@:@Z^v'}, 4: {'type': '^v'}}}) - r(b'NSDocument', b'saveToFile:saveOperation:delegate:didSaveSelector:contextInfo:', {'arguments': {5: {'sel_of_type': b'v@:@Z^v'}, 6: {'type': '^v'}}}) - r(b'NSDocument', b'saveToURL:ofType:forSaveOperation:completionHandler:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'NSDocument', b'saveToURL:ofType:forSaveOperation:delegate:didSaveSelector:contextInfo:', {'arguments': {6: {'sel_of_type': b'v@:@Z^v'}, 7: {'type': '^v'}}}) - r(b'NSDocument', b'saveToURL:ofType:forSaveOperation:error:', {'retval': {'type': 'Z'}, 'arguments': {5: {'type_modifier': b'o'}}}) - r(b'NSDocument', b'setDraft:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSDocument', b'setHasUndoManager:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSDocument', b'shareDocumentWithSharingService:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}}}}}}) - r(b'NSDocument', b'shouldChangePrintInfo:', {'retval': {'type': 'Z'}}) - r(b'NSDocument', b'shouldCloseWindowController:delegate:shouldCloseSelector:contextInfo:', {'arguments': {4: {'sel_of_type': b'v@:@Z^v'}, 5: {'type': '^v'}}}) - r(b'NSDocument', b'shouldRunSavePanelWithAccessoryView', {'retval': {'type': 'Z'}}) - r(b'NSDocument', b'stopBrowsingVersionsWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSDocument', b'unlockDocumentWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}}}}}}) - r(b'NSDocument', b'unlockWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'NSDocument', b'usesUbiquitousStorage', {'retval': {'type': b'Z'}}) - r(b'NSDocument', b'validateUserInterfaceItem:', {'retval': {'type': 'Z'}}) - r(b'NSDocument', b'writeSafelyToURL:ofType:forSaveOperation:error:', {'retval': {'type': 'Z'}, 'arguments': {5: {'type_modifier': b'o'}}}) - r(b'NSDocument', b'writeToFile:ofType:', {'retval': {'type': 'Z'}}) - r(b'NSDocument', b'writeToFile:ofType:originalFile:saveOperation:', {'retval': {'type': 'Z'}}) - r(b'NSDocument', b'writeToURL:ofType:', {'retval': {'type': 'Z'}}) - r(b'NSDocument', b'writeToURL:ofType:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSDocument', b'writeToURL:ofType:forSaveOperation:originalContentsURL:error:', {'retval': {'type': 'Z'}, 'arguments': {6: {'type_modifier': b'o'}}}) - r(b'NSDocument', b'writeWithBackupToFile:ofType:saveOperation:', {'retval': {'type': 'Z'}}) - r(b'NSDocumentController', b'allowsAutomaticShareMenu', {'retval': {'type': 'Z'}}) - r(b'NSDocumentController', b'beginOpenPanel:forTypes:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'i', b'q')}}}}}}) - r(b'NSDocumentController', b'beginOpenPanelWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'NSDocumentController', b'closeAllDocumentsWithDelegate:didCloseAllSelector:contextInfo:', {'arguments': {3: {'sel_of_type': b'v@:@Z^v'}, 4: {'type': '^v'}}}) - r(b'NSDocumentController', b'duplicateDocumentWithContentsOfURL:copying:displayName:error:', {'arguments': {3: {'type': b'Z'}, 5: {'type_modifier': b'o'}}}) - r(b'NSDocumentController', b'hasEditedDocuments', {'retval': {'type': 'Z'}}) - r(b'NSDocumentController', b'makeDocumentForURL:withContentsOfURL:ofType:error:', {'arguments': {5: {'type_modifier': b'o'}}}) - r(b'NSDocumentController', b'makeDocumentWithContentsOfURL:ofType:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSDocumentController', b'makeUntitledDocumentOfType:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSDocumentController', b'openDocumentWithContentsOfFile:display:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSDocumentController', b'openDocumentWithContentsOfURL:display:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSDocumentController', b'openDocumentWithContentsOfURL:display:completionHandler:', {'arguments': {3: {'type': b'Z'}, 4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'NSDocumentController', b'openDocumentWithContentsOfURL:display:error:', {'arguments': {3: {'type': 'Z'}, 4: {'type_modifier': b'o'}}}) - r(b'NSDocumentController', b'openUntitledDocumentAndDisplay:error:', {'arguments': {2: {'type': 'Z'}, 3: {'type_modifier': b'o'}}}) - r(b'NSDocumentController', b'openUntitledDocumentOfType:display:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSDocumentController', b'presentError:', {'retval': {'type': 'Z'}}) - r(b'NSDocumentController', b'presentError:modalForWindow:delegate:didPresentSelector:contextInfo:', {'arguments': {5: {'sel_of_type': b'v@:Z^v'}, 6: {'type': '^v'}}}) - r(b'NSDocumentController', b'reopenDocumentForURL:withContentsOfURL:display:completionHandler:', {'arguments': {4: {'type': b'Z'}, 5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'Z'}, 3: {'type': b'@'}}}}}}) - r(b'NSDocumentController', b'reopenDocumentForURL:withContentsOfURL:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSDocumentController', b'reviewUnsavedDocumentsWithAlertTitle:cancellable:delegate:didReviewAllSelector:contextInfo:', {'arguments': {3: {'type': 'Z'}, 5: {'sel_of_type': b'v@:@Z^v'}, 6: {'type': '^v'}}}) - r(b'NSDocumentController', b'setAllowsAutomaticShareMenu:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSDocumentController', b'setShouldCreateUI:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSDocumentController', b'shouldCreateUI', {'retval': {'type': 'Z'}}) - r(b'NSDocumentController', b'typeForContentsOfURL:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSDocumentController', b'validateUserInterfaceItem:', {'retval': {'type': 'Z'}}) - r(b'NSDraggingItem', b'imageComponentsProvider', {'retval': {'callable': {'retval': {'type': b'@'}, 'arguments': {0: {'type': b'^v'}}}}}) - r(b'NSDraggingItem', b'setImageComponentsProvider:', {'arguments': {2: {'callable': {'retval': {'type': b'@'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSDraggingSession', b'animatesToStartingPositionsOnCancelOrFail', {'retval': {'type': b'Z'}}) - r(b'NSDraggingSession', b'enumerateDraggingItemsWithOptions:forView:classes:searchOptions:usingBlock:', {'arguments': {6: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'i', b'q')}, 3: {'type': b'o^Z'}}}}}}) - r(b'NSDraggingSession', b'setAnimatesToStartingPositionsOnCancelOrFail:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSDrawer', b'contentSize', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSDrawer', b'initWithContentSize:preferredEdge:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSDrawer', b'maxContentSize', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSDrawer', b'minContentSize', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSDrawer', b'setContentSize:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSDrawer', b'setMaxContentSize:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSDrawer', b'setMinContentSize:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSEPSImageRep', b'boundingBox', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSEvent', b'CGEvent', {'retval': {'type': '^{__CGEvent=}'}}) - r(b'NSEvent', b'addGlobalMonitorForEventsMatchingMask:handler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'NSEvent', b'addLocalMonitorForEventsMatchingMask:handler:', {'arguments': {3: {'callable': {'retval': {'type': b'@'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'NSEvent', b'enterExitEventWithType:location:modifierFlags:timestamp:windowNumber:context:eventNumber:trackingNumber:userData:', {'arguments': {3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 10: {'type': '^v'}}}) - r(b'NSEvent', b'eventRef', {'retval': {'type': 'r^{OpaqueEventRef=}'}}) - r(b'NSEvent', b'eventWithCGEvent:', {'arguments': {2: {'type': '^{__CGEvent=}'}}}) - r(b'NSEvent', b'eventWithEventRef:', {'arguments': {2: {'type': '^{OpaqueEventRef=}'}}}) - r(b'NSEvent', b'hasPreciseScrollingDeltas', {'retval': {'type': b'Z'}}) - r(b'NSEvent', b'isARepeat', {'retval': {'type': 'Z'}}) - r(b'NSEvent', b'isDirectionInvertedFromDevice', {'retval': {'type': b'Z'}}) - r(b'NSEvent', b'isEnteringProximity', {'retval': {'type': 'Z'}}) - r(b'NSEvent', b'isMouseCoalescingEnabled', {'retval': {'type': 'Z'}}) - r(b'NSEvent', b'isSwipeTrackingFromScrollEventsEnabled', {'retval': {'type': b'Z'}}) - r(b'NSEvent', b'keyEventWithType:location:modifierFlags:timestamp:windowNumber:context:characters:charactersIgnoringModifiers:isARepeat:keyCode:', {'arguments': {3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 10: {'type': 'Z'}}}) - r(b'NSEvent', b'locationInWindow', {'retval': {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}) - r(b'NSEvent', b'mouseEventWithType:location:modifierFlags:timestamp:windowNumber:context:eventNumber:clickCount:pressure:', {'arguments': {3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSEvent', b'mouseLocation', {'retval': {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}) - r(b'NSEvent', b'otherEventWithType:location:modifierFlags:timestamp:windowNumber:context:subtype:data1:data2:', {'arguments': {3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSEvent', b'setMouseCoalescingEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSEvent', b'tilt', {'retval': {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}) - r(b'NSEvent', b'trackSwipeEventWithOptions:dampenAmountThresholdMin:max:usingHandler:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'f', b'd')}, 2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': b'Z'}, 4: {'type': b'o^Z'}}}}}}) - r(b'NSEvent', b'userData', {'retval': {'type': '^v'}}) - r(b'NSFilePromiseReceiver', b'receivePromisedFilesAtDestination:options:operationQueue:reader:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'NSFileWrapper', b'initWithURL:options:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSFileWrapper', b'isDirectory', {'retval': {'type': 'Z'}}) - r(b'NSFileWrapper', b'isRegularFile', {'retval': {'type': 'Z'}}) - r(b'NSFileWrapper', b'isSymbolicLink', {'retval': {'type': 'Z'}}) - r(b'NSFileWrapper', b'matchesContentsOfURL:', {'retval': {'type': 'Z'}}) - r(b'NSFileWrapper', b'needsToBeUpdatedFromPath:', {'retval': {'type': 'Z'}}) - r(b'NSFileWrapper', b'readFromURL:options:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSFileWrapper', b'updateFromPath:', {'retval': {'type': 'Z'}}) - r(b'NSFileWrapper', b'writeToFile:atomically:updateFilenames:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': 'Z'}, 4: {'type': 'Z'}}}) - r(b'NSFileWrapper', b'writeToURL:options:originalContentsURL:error:', {'retval': {'type': 'Z'}, 'arguments': {5: {'type_modifier': b'o'}}}) - r(b'NSFont', b'advancementForGlyph:', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSFont', b'boundingRectForFont', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSFont', b'boundingRectForGlyph:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSFont', b'fontWithName:matrix:', {'arguments': {3: {'c_array_of_fixed_length': 6, 'type_modifier': b'n'}}}) - r(b'NSFont', b'getAdvancements:forCGGlyphs:count:', {'arguments': {2: {'type_modifier': b'o', 'c_array_length_in_arg': 4}, 3: {'type_modifier': b'n', 'c_array_length_in_arg': 4}}}) - r(b'NSFont', b'getAdvancements:forGlyphs:count:', {'arguments': {2: {'type_modifier': b'o', 'c_array_length_in_arg': 4}, 3: {'type_modifier': b'n', 'c_array_length_in_arg': 4}}}) - r(b'NSFont', b'getAdvancements:forPackedGlyphs:length:', {'arguments': {2: {'type_modifier': b'o', 'c_array_length_in_arg': 4}, 3: {'c_array_delimited_by_null': True, 'type': '^t', 'type_modifier': b'n', 'c_array_length_in_arg': 4}}}) - r(b'NSFont', b'getBoundingRects:forCGGlyphs:count:', {'arguments': {2: {'type_modifier': b'o', 'c_array_length_in_arg': 4}, 3: {'type_modifier': b'n', 'c_array_length_in_arg': 4}}}) - r(b'NSFont', b'getBoundingRects:forGlyphs:count:', {'arguments': {2: {'type_modifier': b'o', 'c_array_length_in_arg': 4}, 3: {'type_modifier': b'n', 'c_array_length_in_arg': 4}}}) - r(b'NSFont', b'glyphIsEncoded:', {'retval': {'type': 'Z'}}) - r(b'NSFont', b'isBaseFont', {'retval': {'type': 'Z'}}) - r(b'NSFont', b'isFixedPitch', {'retval': {'type': 'Z'}}) - r(b'NSFont', b'isVertical', {'retval': {'type': b'Z'}}) - r(b'NSFont', b'matrix', {'retval': {'c_array_of_fixed_length': 6}}) - r(b'NSFont', b'maximumAdvancement', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSFont', b'positionOfGlyph:forCharacter:struckOverRect:', {'retval': {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 'arguments': {3: {'type': 'S'}, 4: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSFont', b'positionOfGlyph:precededByGlyph:isNominal:', {'retval': {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 'arguments': {4: {'type': '^Z', 'type_modifier': b'o'}}}) - r(b'NSFont', b'positionOfGlyph:struckOverGlyph:metricsExist:', {'retval': {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 'arguments': {4: {'type': '^Z', 'type_modifier': b'o'}}}) - r(b'NSFont', b'positionOfGlyph:struckOverRect:metricsExist:', {'retval': {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 'arguments': {3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 4: {'type': '^Z', 'type_modifier': b'o'}}}) - r(b'NSFont', b'positionOfGlyph:withRelation:toBaseGlyph:totalAdvancement:metricsExist:', {'retval': {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 'arguments': {5: {'type_modifier': b'o'}, 6: {'type': '^Z', 'type_modifier': b'o'}}}) - r(b'NSFont', b'positionsForCompositeSequence:numberOfGlyphs:pointArray:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}, 4: {'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'NSFontAssetRequest', b'downloadFontAssetsWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'NSFontCollection', b'hideFontCollectionWithName:visibility:error:', {'retval': {'type': b'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSFontCollection', b'renameFontCollectionWithName:visibility:toName:error:', {'retval': {'type': b'Z'}, 'arguments': {5: {'type_modifier': b'o'}}}) - r(b'NSFontCollection', b'showFontCollection:withName:visibility:error:', {'retval': {'type': b'Z'}, 'arguments': {5: {'type_modifier': b'o'}}}) - r(b'NSFontDescriptor', b'requiresFontAssetRequest', {'retval': {'type': 'Z'}}) - r(b'NSFontDescriptor', b'setRequiresFontAssetRequest:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSFontManager', b'addCollection:options:', {'retval': {'type': 'Z'}}) - r(b'NSFontManager', b'convertWeight:ofFont:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSFontManager', b'fontMenu:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSFontManager', b'fontNamed:hasTraits:', {'retval': {'type': 'Z'}}) - r(b'NSFontManager', b'fontPanel:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSFontManager', b'isEnabled', {'retval': {'type': 'Z'}}) - r(b'NSFontManager', b'isMultiple', {'retval': {'type': 'Z'}}) - r(b'NSFontManager', b'removeCollection:', {'retval': {'type': 'Z'}}) - r(b'NSFontManager', b'sendAction', {'retval': {'type': 'Z'}}) - r(b'NSFontManager', b'setAction:', {'arguments': {2: {'sel_of_type': b'v@:@'}}}) - r(b'NSFontManager', b'setEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSFontManager', b'setSelectedAttributes:isMultiple:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSFontManager', b'setSelectedFont:isMultiple:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSFontPanel', b'isEnabled', {'retval': {'type': 'Z'}}) - r(b'NSFontPanel', b'setEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSFontPanel', b'setPanelFont:isMultiple:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSFontPanel', b'setWorksWhenModal:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSFontPanel', b'sharedFontPanelExists', {'retval': {'type': 'Z'}}) - r(b'NSFontPanel', b'worksWhenModal', {'retval': {'type': 'Z'}}) - r(b'NSForm', b'setBezeled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSForm', b'setBordered:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSForm', b'setFrameSize:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSFormCell', b'isOpaque', {'retval': {'type': 'Z'}}) - r(b'NSFormCell', b'titleWidth:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSGestureRecognizer', b'canBePreventedByGestureRecognizer:', {'retval': {'type': b'Z'}}) - r(b'NSGestureRecognizer', b'canPreventGestureRecognizer:', {'retval': {'type': b'Z'}}) - r(b'NSGestureRecognizer', b'delaysKeyEvents', {'retval': {'type': b'Z'}}) - r(b'NSGestureRecognizer', b'delaysMagnificationEvents', {'retval': {'type': b'Z'}}) - r(b'NSGestureRecognizer', b'delaysOtherMouseButtonEvents', {'retval': {'type': b'Z'}}) - r(b'NSGestureRecognizer', b'delaysPrimaryMouseButtonEvents', {'retval': {'type': b'Z'}}) - r(b'NSGestureRecognizer', b'delaysRotationEvents', {'retval': {'type': b'Z'}}) - r(b'NSGestureRecognizer', b'delaysSecondaryMouseButtonEvents', {'retval': {'type': b'Z'}}) - r(b'NSGestureRecognizer', b'initWithTarget:action:', {'arguments': {3: {'sel_of_type': b'v@:@'}}}) - r(b'NSGestureRecognizer', b'isEnabled', {'retval': {'type': b'Z'}}) - r(b'NSGestureRecognizer', b'setAction:', {'arguments': {2: {'sel_of_type': b'v@:@'}}}) - r(b'NSGestureRecognizer', b'setDelaysKeyEvents:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSGestureRecognizer', b'setDelaysMagnificationEvents:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSGestureRecognizer', b'setDelaysOtherMouseButtonEvents:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSGestureRecognizer', b'setDelaysPrimaryMouseButtonEvents:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSGestureRecognizer', b'setDelaysRotationEvents:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSGestureRecognizer', b'setDelaysSecondaryMouseButtonEvents:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSGestureRecognizer', b'setEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSGestureRecognizer', b'shouldBeRequiredToFailByGestureRecognizer:', {'retval': {'type': b'Z'}}) - r(b'NSGestureRecognizer', b'shouldRequireFailureOfGestureRecognizer:', {'retval': {'type': b'Z'}}) - r(b'NSGlyphGenerator', b'generateGlyphsForGlyphStorage:desiredNumberOfCharacters:glyphIndex:characterIndex:', {'arguments': {4: {'type_modifier': b'o'}, 5: {'type_modifier': b'o'}}}) - r(b'NSGradient', b'drawFromCenter:radius:toCenter:radius:options:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 3: {'type': sel32or64(b'f', b'd')}, 4: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSGradient', b'drawFromPoint:toPoint:options:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSGradient', b'drawInBezierPath:relativeCenterPosition:', {'arguments': {3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSGradient', b'drawInRect:angle:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSGradient', b'drawInRect:relativeCenterPosition:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSGradient', b'getColor:location:atIndex:', {'arguments': {2: {'type_modifier': b'o'}, 3: {'type_modifier': b'o'}}}) - r(b'NSGradient', b'initWithColors:atLocations:colorSpace:', {'arguments': {3: {'type_modifier': b'n', 'c_array_length_in_arg': 2}}}) - r(b'NSGradient', b'initWithColorsAndLocations:', {'suggestion': 'use initWithColors:atLocation:colorSpace:', 'variadic': True}) - r(b'NSGraphicsContext', b'currentContextDrawingToScreen', {'retval': {'type': 'Z'}}) - r(b'NSGraphicsContext', b'focusStack', {'retval': {'type': '^v'}}) - r(b'NSGraphicsContext', b'graphicsContextWithCGContext:flipped:', {'arguments': {3: {'type': b'Z'}}}) - r(b'NSGraphicsContext', b'graphicsContextWithGraphicsPort:flipped:', {'arguments': {2: {'type': '^{CGContext=}'}, 3: {'type': 'Z'}}}) - r(b'NSGraphicsContext', b'graphicsPort', {'retval': {'type': '^{CGContext=}'}}) - r(b'NSGraphicsContext', b'isDrawingToScreen', {'retval': {'type': 'Z'}}) - r(b'NSGraphicsContext', b'isFlipped', {'retval': {'type': 'Z'}}) - r(b'NSGraphicsContext', b'patternPhase', {'retval': {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}) - r(b'NSGraphicsContext', b'setFocusStack:', {'arguments': {2: {'type': '^v'}}}) - r(b'NSGraphicsContext', b'setPatternPhase:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSGraphicsContext', b'setShouldAntialias:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSGraphicsContext', b'shouldAntialias', {'retval': {'type': 'Z'}}) - r(b'NSGridColumn', b'isHidden', {'retval': {'type': 'Z'}}) - r(b'NSGridColumn', b'setHidden:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSGridRow', b'isHidden', {'retval': {'type': 'Z'}}) - r(b'NSGridRow', b'setHidden:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSGridView', b'isHidden', {'retval': {'type': 'Z'}}) - r(b'NSGridView', b'setHidden:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSGroupTouchBarItem', b'prefersEqualWidths', {'retval': {'type': 'Z'}}) - r(b'NSGroupTouchBarItem', b'setPrefersEqualWidths:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSHelpManager', b'isContextHelpModeActive', {'retval': {'type': 'Z'}}) - r(b'NSHelpManager', b'registerBooksInBundle:', {'retval': {'type': b'Z'}}) - r(b'NSHelpManager', b'setContextHelpModeActive:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSHelpManager', b'showContextHelpForObject:locationHint:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSImage', b'CGImageForProposedRect:context:hints:', {'arguments': {2: {'type': sel32or64(b'^{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'^{CGRect={CGPoint=dd}{CGSize=dd}}'), 'type_modifier': b'o'}}}) - r(b'NSImage', b'alignmentRect', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSImage', b'bestRepresentationForRect:context:hints:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSImage', b'cacheDepthMatchesImageDepth', {'retval': {'type': 'Z'}}) - r(b'NSImage', b'canInitWithPasteboard:', {'retval': {'type': 'Z'}}) - r(b'NSImage', b'compositeToPoint:fromRect:operation:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSImage', b'compositeToPoint:fromRect:operation:fraction:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSImage', b'compositeToPoint:operation:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSImage', b'compositeToPoint:operation:fraction:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSImage', b'dissolveToPoint:fraction:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSImage', b'dissolveToPoint:fromRect:fraction:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSImage', b'drawAtPoint:fromRect:operation:fraction:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSImage', b'drawInRect:fromRect:operation:fraction:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSImage', b'drawInRect:fromRect:operation:fraction:respectFlipped:hints:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 6: {'type': 'Z'}}}) - r(b'NSImage', b'drawRepresentation:inRect:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSImage', b'hitTestRect:withImageDestinationRect:context:hints:flipped:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 6: {'type': 'Z'}}}) - r(b'NSImage', b'hitTestRect:withImageDestinationRect:context_hints:flipped:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSImage', b'imageWithSize:flipped:drawingHandler:', {'arguments': {3: {'type': b'Z'}, 4: {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}}}}) - r(b'NSImage', b'initWithCGImage:size:', {'arguments': {3: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSImage', b'initWithIconRef:', {'arguments': {2: {'type': '^{OpaqueIconRef=}'}}}) - r(b'NSImage', b'initWithSize:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSImage', b'isCachedSeparately', {'retval': {'type': 'Z'}}) - r(b'NSImage', b'isDataRetained', {'retval': {'type': 'Z'}}) - r(b'NSImage', b'isFlipped', {'retval': {'type': 'Z'}}) - r(b'NSImage', b'isTemplate', {'retval': {'type': 'Z'}}) - r(b'NSImage', b'isValid', {'retval': {'type': 'Z'}}) - r(b'NSImage', b'lockFocusFlipped:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSImage', b'matchesOnMultipleResolution', {'retval': {'type': 'Z'}}) - r(b'NSImage', b'matchesOnlyOnBestFittingAxis', {'retval': {'type': b'Z'}}) - r(b'NSImage', b'prefersColorMatch', {'retval': {'type': 'Z'}}) - r(b'NSImage', b'scalesWhenResized', {'retval': {'type': 'Z'}}) - r(b'NSImage', b'setAlignmentRect:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSImage', b'setCacheDepthMatchesImageDepth:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSImage', b'setCachedSeparately:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSImage', b'setDataRetained:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSImage', b'setFlipped:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSImage', b'setMatchesOnMultipleResolution:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSImage', b'setMatchesOnlyOnBestFittingAxis:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSImage', b'setName:', {'retval': {'type': 'Z'}}) - r(b'NSImage', b'setPrefersColorMatch:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSImage', b'setScalesWhenResized:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSImage', b'setSize:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSImage', b'setTemplate:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSImage', b'setUsesEPSOnResolutionMismatch:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSImage', b'size', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSImage', b'usesEPSOnResolutionMismatch', {'retval': {'type': 'Z'}}) - r(b'NSImageRep', b'CGImageForProposedRect:context:hints:', {'arguments': {2: {'type': sel32or64(b'^{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'^{CGRect={CGPoint=dd}{CGSize=dd}}'), 'type_modifier': b'N'}}}) - r(b'NSImageRep', b'canInitWithData:', {'retval': {'type': 'Z'}}) - r(b'NSImageRep', b'canInitWithPasteboard:', {'retval': {'type': 'Z'}}) - r(b'NSImageRep', b'draw', {'retval': {'type': 'Z'}}) - r(b'NSImageRep', b'drawAtPoint:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSImageRep', b'drawInRect:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSImageRep', b'drawInRect:fromRect:operation:fraction:respectFlipped:hints:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 6: {'type': 'Z'}}}) - r(b'NSImageRep', b'hasAlpha', {'retval': {'type': 'Z'}}) - r(b'NSImageRep', b'isOpaque', {'retval': {'type': 'Z'}}) - r(b'NSImageRep', b'setAlpha:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSImageRep', b'setOpaque:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSImageRep', b'setSize:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSImageRep', b'size', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSImageView', b'allowsCutCopyPaste', {'retval': {'type': 'Z'}}) - r(b'NSImageView', b'animates', {'retval': {'type': 'Z'}}) - r(b'NSImageView', b'isEditable', {'retval': {'type': 'Z'}}) - r(b'NSImageView', b'setAllowsCutCopyPaste:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSImageView', b'setAnimates:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSImageView', b'setEditable:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSInputManager', b'handleMouseEvent:', {'retval': {'type': 'Z'}}) - r(b'NSInputManager', b'markedTextSelectionChanged:client:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSInputManager', b'wantsToDelayTextChangeNotifications', {'retval': {'type': 'Z'}}) - r(b'NSInputManager', b'wantsToHandleMouseEvents', {'retval': {'type': 'Z'}}) - r(b'NSInputManager', b'wantsToInterpretAllKeystrokes', {'retval': {'type': 'Z'}}) - r(b'NSItemProvider', b'loadItemForTypeIdentifier:options:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'NSItemProvider', b'loadObjectOfClass:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'NSItemProvider', b'registerItemForTypeIdentifier:loadHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'callable': {'args': {0: {'type': '^@'}, 1: {'type': '@'}, 2: {'type': '@'}}, 'retval': {'type': 'v'}}, 'type': b'@?'}, 2: {'type': b'#'}, 3: {'type': b'@'}}}}}}) - r(b'NSLayoutAnchor', b'hasAmbiguousLayout', {'retval': {'type': b'Z'}}) - r(b'NSLayoutConstraint', b'isActive', {'retval': {'type': b'Z'}}) - r(b'NSLayoutConstraint', b'setActive:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSLayoutConstraint', b'setShouldBeArchived:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSLayoutConstraint', b'shouldBeArchived', {'retval': {'type': b'Z'}}) - r(b'NSLayoutGuide', b'hasAmbiguousLayout', {'retval': {'type': 'Z'}}) - r(b'NSLayoutManager', b'CGGlyphAtIndex:isValidIndex:', {'arguments': {3: {'type': '^Z', 'type_modifier': b'o'}}}) - r(b'NSLayoutManager', b'addTemporaryAttribute:value:forCharacterRange:', {'arguments': {4: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSLayoutManager', b'addTemporaryAttributes:forCharacterRange:', {'arguments': {3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSLayoutManager', b'allowsNonContiguousLayout', {'retval': {'type': 'Z'}}) - r(b'NSLayoutManager', b'attachmentSizeForGlyphAtIndex:', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSLayoutManager', b'backgroundLayoutEnabled', {'retval': {'type': 'Z'}}) - r(b'NSLayoutManager', b'boundingRectForGlyphRange:inTextContainer:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSLayoutManager', b'boundsRectForTextBlock:atIndex:effectiveRange:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSLayoutManager', b'boundsRectForTextBlock:glyphRange:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSLayoutManager', b'characterIndexForPoint:inTextContainer:fractionOfDistanceBetweenInsertionPoints:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 4: {'type_modifier': b'o'}}}) - r(b'NSLayoutManager', b'characterRangeForGlyphRange:actualGlyphRange:', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 3: {'type_modifier': b'o'}}}) - r(b'NSLayoutManager', b'deleteGlyphsInRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSLayoutManager', b'drawBackgroundForGlyphRange:atPoint:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSLayoutManager', b'drawGlyphsForGlyphRange:atPoint:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSLayoutManager', b'drawStrikethroughForGlyphRange:strikethroughType:baselineOffset:lineFragmentRect:lineFragmentGlyphRange:containerOrigin:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 5: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 6: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 7: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSLayoutManager', b'drawUnderlineForGlyphRange:underlineType:baselineOffset:lineFragmentRect:lineFragmentGlyphRange:containerOrigin:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 5: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 6: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 7: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSLayoutManager', b'drawsOutsideLineFragmentForGlyphAtIndex:', {'retval': {'type': 'Z'}}) - r(b'NSLayoutManager', b'ensureGlyphsForCharacterRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSLayoutManager', b'ensureGlyphsForGlyphRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSLayoutManager', b'ensureLayoutForBoundingRect:inTextContainer:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSLayoutManager', b'ensureLayoutForCharacterRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSLayoutManager', b'ensureLayoutForGlyphRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSLayoutManager', b'enumerateEnclosingRectsForGlyphRange:withinSelectedGlyphRange:inTextContainer:usingBlock:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 2: {'type': b'o^Z'}}}}}}) - r(b'NSLayoutManager', b'enumerateLineFragmentsForGlyphRange:usingBlock:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 5: {'type': b'o^Z'}}}}}}) - r(b'NSLayoutManager', b'extraLineFragmentRect', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSLayoutManager', b'extraLineFragmentUsedRect', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSLayoutManager', b'fillBackgroundRectArray:count:forCharacterRange:color:', {'arguments': {2: {'type': sel32or64(b'^{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'^{CGRect={CGPoint=dd}{CGSize=dd}}'), 'type_modifier': b'N', 'c_array_length_in_arg': 3}, 4: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSLayoutManager', b'fractionOfDistanceThroughGlyphForPoint:inTextContainer:', {'retval': {'type': sel32or64(b'f', b'd')}, 'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSLayoutManager', b'getFirstUnlaidCharacterIndex:glyphIndex:', {'arguments': {2: {'type_modifier': b'o'}, 3: {'type_modifier': b'o'}}}) - r(b'NSLayoutManager', b'getGlyphs:range:', {'arguments': {2: {'type_modifier': b'o', 'c_array_length_in_arg': 3}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSLayoutManager', b'getGlyphsInRange:glyphs:characterIndexes:glyphInscriptions:elasticBits:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 3: {'type_modifier': b'o', 'c_array_length_in_arg': 2}, 4: {'type_modifier': b'o', 'c_array_length_in_arg': 2}, 5: {'type_modifier': b'o', 'c_array_length_in_arg': 2}, 6: {'type': '^Z', 'type_modifier': b'o', 'c_array_length_in_arg': 2}}}) - r(b'NSLayoutManager', b'getGlyphsInRange:glyphs:characterIndexes:glyphInscriptions:elasticBits:bidiLevels:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 3: {'type_modifier': b'o', 'c_array_length_in_arg': 2}, 4: {'type_modifier': b'o', 'c_array_length_in_arg': 2}, 5: {'type_modifier': b'o', 'c_array_length_in_arg': 2}, 6: {'type': '^Z', 'type_modifier': b'o', 'c_array_length_in_arg': 2}, 7: {'type': '^C', 'type_modifier': b'o', 'c_array_length_in_arg': 2}}}) - r(b'NSLayoutManager', b'getGlyphsInRange:glyphs:properties:characterIndexes:bidiLevels:', {'arguments': {3: {'type': '^I', 'type_modifier': b'o', 'c_array_length_in_arg': 2}, 4: {'type': '^I', 'type_modifier': b'o', 'c_array_length_in_arg': 2}, 5: {'type': '^I', 'type_modifier': b'o', 'c_array_length_in_arg': 2}, 6: {'type': '^I', 'type_modifier': b'o', 'c_array_length_in_arg': 2}}}) - r(b'NSLayoutManager', b'getLineFragmentInsertionPointsForCharacterAtIndex:alternatePositions:inDisplayOrder:positions:characterIndexes:', {'arguments': {3: {'type': 'Z'}, 4: {'type': 'Z'}, 5: {'type': sel32or64(b'r^f', b'r^d'), 'type_modifier': b'o'}, 6: {'type': sel32or64(b'^I', b'^Q'), 'type_modifier': b'o'}}}) - r(b'NSLayoutManager', b'glyphAtIndex:isValidIndex:', {'arguments': {3: {'type': '^Z', 'type_modifier': b'o'}}}) - r(b'NSLayoutManager', b'glyphIndexForPoint:inTextContainer:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSLayoutManager', b'glyphIndexForPoint:inTextContainer:fractionOfDistanceThroughGlyph:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 4: {'null_accepted': False, 'type_modifier': b'o'}}}) - r(b'NSLayoutManager', b'glyphRangeForBoundingRect:inTextContainer:', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSLayoutManager', b'glyphRangeForBoundingRectWithoutAdditionalLayout:inTextContainer:', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSLayoutManager', b'glyphRangeForCharacterRange:actualCharacterRange:', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 3: {'type_modifier': b'o'}}}) - r(b'NSLayoutManager', b'glyphRangeForTextContainer:', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}) - r(b'NSLayoutManager', b'hasNonContiguousLayout', {'retval': {'type': 'Z'}}) - r(b'NSLayoutManager', b'insertGlyphs:length:forStartingGlyphAtIndex:characterIndex:', {'arguments': {2: {'type': '^I', 'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'NSLayoutManager', b'invalidateDisplayForCharacterRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSLayoutManager', b'invalidateDisplayForGlyphRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSLayoutManager', b'invalidateGlyphsForCharacterRange:changeInLength:actualCharacterRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 4: {'type': sel32or64(b'^{_NSRange=II}', b'^{_NSRange=QQ}'), 'type_modifier': b'o'}}}) - r(b'NSLayoutManager', b'invalidateGlyphsOnLayoutInvalidationForGlyphRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSLayoutManager', b'invalidateLayoutForCharacterRange:actualCharacterRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 3: {'type': sel32or64(b'^{_NSRange=II}', b'^{_NSRange=QQ}'), 'type_modifier': b'o'}}}) - r(b'NSLayoutManager', b'invalidateLayoutForCharacterRange:isSoft:actualCharacterRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 3: {'type': 'Z'}, 4: {'type': sel32or64(b'^{_NSRange=II}', b'^{_NSRange=QQ}'), 'type_modifier': b'o'}}}) - r(b'NSLayoutManager', b'isValidGlyphIndex:', {'retval': {'type': 'Z'}}) - r(b'NSLayoutManager', b'layoutManagerOwnsFirstResponderInWindow:', {'retval': {'type': 'Z'}}) - r(b'NSLayoutManager', b'layoutRectForTextBlock:atIndex:effectiveRange:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {4: {'type': sel32or64(b'^{_NSRange=II}', b'^{_NSRange=QQ}'), 'type_modifier': b'o'}}}) - r(b'NSLayoutManager', b'layoutRectForTextBlock:glyphRange:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSLayoutManager', b'lineFragmentRectForGlyphAtIndex:effectiveRange:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {3: {'type': sel32or64(b'^{_NSRange=II}', b'^{_NSRange=QQ}'), 'type_modifier': b'o'}}}) - r(b'NSLayoutManager', b'lineFragmentRectForGlyphAtIndex:effectiveRange:withoutAdditionalLayout:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {3: {'type_modifier': b'o'}, 4: {'type': 'Z'}}}) - r(b'NSLayoutManager', b'lineFragmentUsedRectForGlyphAtIndex:effectiveRange:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSLayoutManager', b'lineFragmentUsedRectForGlyphAtIndex:effectiveRange:withoutAdditionalLayout:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {3: {'type_modifier': b'o'}, 4: {'type': 'Z'}}}) - r(b'NSLayoutManager', b'locationForGlyphAtIndex:', {'retval': {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}) - r(b'NSLayoutManager', b'notShownAttributeForGlyphAtIndex:', {'retval': {'type': 'Z'}}) - r(b'NSLayoutManager', b'rangeOfNominallySpacedGlyphsContainingIndex:', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}) - r(b'NSLayoutManager', b'rectArrayForCharacterRange:withinSelectedCharacterRange:inTextContainer:rectCount:', {'retval': {'c_array_length_in_arg': 5}, 'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 5: {'type_modifier': b'o'}}}) - r(b'NSLayoutManager', b'rectArrayForGlyphRange:withinSelectedGlyphRange:inTextContainer:rectCount:', {'retval': {'c_array_length_in_arg': 5}, 'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 5: {'type_modifier': b'o'}}}) - r(b'NSLayoutManager', b'removeTemporaryAttribute:forCharacterRange:', {'arguments': {3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSLayoutManager', b'rulerAccessoryViewForTextView:paragraphStyle:ruler:enabled:', {'arguments': {5: {'type': 'Z'}}}) - r(b'NSLayoutManager', b'setAllowsNonContiguousLayout:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSLayoutManager', b'setAttachmentSize:forGlyphRange:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSLayoutManager', b'setBackgroundLayoutEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSLayoutManager', b'setBoundsRect:forTextBlock:glyphRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 4: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSLayoutManager', b'setDrawsOutsideLineFragment:forGlyphAtIndex:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSLayoutManager', b'setExtraLineFragmentRect:usedRect:textContainer:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSLayoutManager', b'setGlyphs:properties:characterIndexes:font:forGlyphRange:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 6}, 3: {'type_modifier': b'n', 'c_array_length_in_arg': 6}, 4: {'type_modifier': b'n', 'c_array_length_in_arg': 6}}}) - r(b'NSLayoutManager', b'setLayoutRect:forTextBlock:glyphRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 4: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSLayoutManager', b'setLineFragmentRect:forGlyphRange:usedRect:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 4: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSLayoutManager', b'setLocation:forStartOfGlyphRange:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSLayoutManager', b'setLocations:startingGlyphIndexes:count:forGlyphRange:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 4}, 3: {'type_modifier': b'n', 'c_array_length_in_arg': 4}, 5: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSLayoutManager', b'setNotShownAttribute:forGlyphAtIndex:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSLayoutManager', b'setShowsControlCharacters:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSLayoutManager', b'setShowsInvisibleCharacters:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSLayoutManager', b'setTemporaryAttributes:forCharacterRange:', {'arguments': {3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSLayoutManager', b'setTextContainer:forGlyphRange:', {'arguments': {3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSLayoutManager', b'setUsesFontLeading:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSLayoutManager', b'setUsesScreenFonts:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSLayoutManager', b'showAttachmentCell:inRect:characterIndex:', {'arguments': {3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSLayoutManager', b'showCGGlyphs:positions:count:font:matrix:attributes:inContext:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 4}, 3: {'type_modifier': b'n', 'c_array_length_in_arg': 4}}}) - r(b'NSLayoutManager', b'showPackedGlyphs:length:glyphRange:atPoint:font:color:printingAdjustment:', {'arguments': {2: {'c_array_delimited_by_null': True, 'type': '^v', 'type_modifier': b'n', 'c_array_length_in_arg': 3}, 4: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 5: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 8: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSLayoutManager', b'showsControlCharacters', {'retval': {'type': 'Z'}}) - r(b'NSLayoutManager', b'showsInvisibleCharacters', {'retval': {'type': 'Z'}}) - r(b'NSLayoutManager', b'strikethroughGlyphRange:strikethroughType:lineFragmentRect:lineFragmentGlyphRange:containerOrigin:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 4: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 5: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 6: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSLayoutManager', b'temporaryAttribute:atCharacterIndex:effectiveRange:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSLayoutManager', b'temporaryAttribute:atCharacterIndex:longestEffectiveRange:inRange:', {'arguments': {4: {'type_modifier': b'o'}, 5: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSLayoutManager', b'temporaryAttributesAtCharacterIndex:effectiveRange:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSLayoutManager', b'temporaryAttributesAtCharacterIndex:longestEffectiveRange:inRange:', {'arguments': {3: {'type_modifier': b'o'}, 4: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSLayoutManager', b'textContainerForGlyphAtIndex:effectiveRange:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSLayoutManager', b'textContainerForGlyphAtIndex:effectiveRange:withoutAdditionalLayout:', {'arguments': {3: {'type_modifier': b'o'}, 4: {'type': 'Z'}}}) - r(b'NSLayoutManager', b'textStorage:edited:range:changeInLength:invalidatedRange:', {'arguments': {3: {'type': sel32or64(b'I', b'Q')}, 4: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 6: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSLayoutManager', b'underlineGlyphRange:underlineType:lineFragmentRect:lineFragmentGlyphRange:containerOrigin:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 4: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 5: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 6: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSLayoutManager', b'usedRectForTextContainer:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSLayoutManager', b'usesFontLeading', {'retval': {'type': 'Z'}}) - r(b'NSLayoutManager', b'usesScreenFonts', {'retval': {'type': 'Z'}}) - r(b'NSLevelIndicator', b'drawsTieredCapacityLevels', {'retval': {'type': 'Z'}}) - r(b'NSLevelIndicator', b'isEditable', {'retval': {'type': 'Z'}}) - r(b'NSLevelIndicator', b'rectOfTickMarkAtIndex:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSLevelIndicator', b'setDrawsTieredCapacityLevels:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSLevelIndicator', b'setEditable:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSLevelIndicatorCell', b'rectOfTickMarkAtIndex:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSMatrix', b'acceptsFirstMouse:', {'retval': {'type': 'Z'}}) - r(b'NSMatrix', b'allowsEmptySelection', {'retval': {'type': 'Z'}}) - r(b'NSMatrix', b'autorecalculatesCellSize', {'retval': {'type': b'Z'}}) - r(b'NSMatrix', b'autosizesCells', {'retval': {'type': 'Z'}}) - r(b'NSMatrix', b'cellFrameAtRow:column:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSMatrix', b'cellSize', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSMatrix', b'context:', {'arguments': {2: {'type': '^v'}}}) - r(b'NSMatrix', b'drawsBackground', {'retval': {'type': 'Z'}}) - r(b'NSMatrix', b'drawsCellBackground', {'retval': {'type': 'Z'}}) - r(b'NSMatrix', b'getNumberOfRows:columns:', {'arguments': {2: {'type_modifier': b'o'}, 3: {'type_modifier': b'o'}}}) - r(b'NSMatrix', b'getRow:column:forPoint:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type_modifier': b'o'}, 3: {'type_modifier': b'o'}, 4: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSMatrix', b'getRow:column:ofCell:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type_modifier': b'o'}, 3: {'type_modifier': b'o'}}}) - r(b'NSMatrix', b'highlightCell:atRow:column:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSMatrix', b'initWithFrame:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSMatrix', b'initWithFrame:mode:cellClass:numberOfRows:numberOfColumns:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSMatrix', b'initWithFrame:mode:prototype:numberOfRows:numberOfColumns:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSMatrix', b'intercellSpacing', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSMatrix', b'isAutoscroll', {'retval': {'type': 'Z'}}) - r(b'NSMatrix', b'isSelectionByRect', {'retval': {'type': 'Z'}}) - r(b'NSMatrix', b'performKeyEquivalent:', {'retval': {'type': 'Z'}}) - r(b'NSMatrix', b'selectCellWithTag:', {'retval': {'type': 'Z'}}) - r(b'NSMatrix', b'sendAction', {'retval': {'type': 'Z'}}) - r(b'NSMatrix', b'sendAction:to:forAllCells:', {'arguments': {2: {'sel_of_type': b'Z@:@'}, 4: {'type': 'Z'}}}) - r(b'NSMatrix', b'setAllowsEmptySelection:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSMatrix', b'setAutorecalculatesCellSize:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSMatrix', b'setAutoscroll:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSMatrix', b'setAutosizesCells:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSMatrix', b'setCellSize:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSMatrix', b'setDoubleAction:', {'arguments': {2: {'sel_of_type': b'v@:@'}}}) - r(b'NSMatrix', b'setDrawsBackground:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSMatrix', b'setDrawsCellBackground:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSMatrix', b'setIntercellSpacing:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSMatrix', b'setScrollable:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSMatrix', b'setSelectionByRect:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSMatrix', b'setSelectionFrom:to:anchor:highlight:', {'arguments': {5: {'type': 'Z'}}}) - r(b'NSMatrix', b'setTabKeyTraversesCells:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSMatrix', b'setValidateSize:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSMatrix', b'sortUsingFunction:context:', {'arguments': {2: {'callable': {'retval': {'type': sel32or64(b'i', b'q')}, 'arguments': {0: {'type': b'@'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'callable_retained': False}, 3: {'type': '@'}}}) - r(b'NSMatrix', b'sortUsingSelector:', {'arguments': {2: {'sel_of_type': sel32or64(b'i@:@', b'q@:@')}}}) - r(b'NSMatrix', b'tabKeyTraversesCells', {'retval': {'type': 'Z'}}) - r(b'NSMatrix', b'textShouldBeginEditing:', {'retval': {'type': 'Z'}}) - r(b'NSMatrix', b'textShouldEndEditing:', {'retval': {'type': 'Z'}}) - r(b'NSMediaLibraryBrowserController', b'isVisible', {'retval': {'type': b'Z'}}) - r(b'NSMediaLibraryBrowserController', b'setVisible:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSMenu', b'addItemWithTitle:action:keyEquivalent:', {'arguments': {3: {'sel_of_type': b'v@:@'}}}) - r(b'NSMenu', b'allowsContextMenuPlugIns', {'retval': {'type': 'Z'}}) - r(b'NSMenu', b'autoenablesItems', {'retval': {'type': 'Z'}}) - r(b'NSMenu', b'indexOfItemWithTarget:andAction:', {'arguments': {3: {'sel_of_type': b'v@:@'}}}) - r(b'NSMenu', b'insertItemWithTitle:action:keyEquivalent:atIndex:', {'arguments': {3: {'sel_of_type': b'v@:@'}}}) - r(b'NSMenu', b'isAttached', {'retval': {'type': 'Z'}}) - r(b'NSMenu', b'isTornOff', {'retval': {'type': 'Z'}}) - r(b'NSMenu', b'locationForSubmenu:', {'retval': {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}) - r(b'NSMenu', b'menuBarVisible', {'retval': {'type': 'Z'}}) - r(b'NSMenu', b'menuChangedMessagesEnabled', {'retval': {'type': 'Z'}}) - r(b'NSMenu', b'menuZone', {'retval': {'type': '^{_NSZone=}'}}) - r(b'NSMenu', b'performKeyEquivalent:', {'retval': {'type': 'Z'}}) - r(b'NSMenu', b'popUpMenuPositioningItem:atLocation:inView:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSMenu', b'setAllowsContextMenuPlugIns:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSMenu', b'setAutoenablesItems:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSMenu', b'setMenuBarVisible:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSMenu', b'setMenuChangedMessagesEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSMenu', b'setMenuZone:', {'arguments': {2: {'type': '^{_NSZone=}'}}}) - r(b'NSMenu', b'setShowsStateColumn:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSMenu', b'showsStateColumn', {'retval': {'type': 'Z'}}) - r(b'NSMenuItem', b'allowsKeyEquivalentWhenHidden', {'retval': {'type': 'Z'}}) - r(b'NSMenuItem', b'hasSubmenu', {'retval': {'type': 'Z'}}) - r(b'NSMenuItem', b'initWithTitle:action:keyEquivalent:', {'arguments': {3: {'sel_of_type': b'v@:@'}}}) - r(b'NSMenuItem', b'isAlternate', {'retval': {'type': 'Z'}}) - r(b'NSMenuItem', b'isEnabled', {'retval': {'type': 'Z'}}) - r(b'NSMenuItem', b'isHidden', {'retval': {'type': 'Z'}}) - r(b'NSMenuItem', b'isHiddenOrHasHiddenAncestor', {'retval': {'type': 'Z'}}) - r(b'NSMenuItem', b'isHighlighted', {'retval': {'type': 'Z'}}) - r(b'NSMenuItem', b'isSeparatorItem', {'retval': {'type': 'Z'}}) - r(b'NSMenuItem', b'setAction:', {'arguments': {2: {'sel_of_type': b'v@:@'}}}) - r(b'NSMenuItem', b'setAllowsKeyEquivalentWhenHidden:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSMenuItem', b'setAlternate:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSMenuItem', b'setEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSMenuItem', b'setHidden:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSMenuItem', b'setUsesUserKeyEquivalents:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSMenuItem', b'usesUserKeyEquivalents', {'retval': {'type': b'Z'}}) - r(b'NSMenuItemCell', b'drawBorderAndBackgroundWithFrame:inView:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSMenuItemCell', b'drawImageWithFrame:inView:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSMenuItemCell', b'drawKeyEquivalentWithFrame:inView:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSMenuItemCell', b'drawSeparatorItemWithFrame:inView:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSMenuItemCell', b'drawStateImageWithFrame:inView:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSMenuItemCell', b'drawTitleWithFrame:inView:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSMenuItemCell', b'keyEquivalentRectForBounds:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSMenuItemCell', b'keyEquivalentWidth', {'retval': {'type': sel32or64(b'f', b'd')}}) - r(b'NSMenuItemCell', b'needsDisplay', {'retval': {'type': 'Z'}}) - r(b'NSMenuItemCell', b'needsSizing', {'retval': {'type': 'Z'}}) - r(b'NSMenuItemCell', b'setNeedsDisplay:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSMenuItemCell', b'setNeedsSizing:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSMenuItemCell', b'stateImageRectForBounds:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSMenuItemCell', b'titleRectForBounds:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSMenuView', b'indexOfItemAtPoint:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSMenuView', b'initWithFrame:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSMenuView', b'innerRect', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSMenuView', b'isAttached', {'retval': {'type': 'Z'}}) - r(b'NSMenuView', b'isHorizontal', {'retval': {'type': 'Z'}}) - r(b'NSMenuView', b'isTornOff', {'retval': {'type': 'Z'}}) - r(b'NSMenuView', b'locationForSubmenu:', {'retval': {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}) - r(b'NSMenuView', b'needsSizing', {'retval': {'type': 'Z'}}) - r(b'NSMenuView', b'rectOfItemAtIndex:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSMenuView', b'setHorizontal:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSMenuView', b'setNeedsSizing:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSMenuView', b'setWindowFrameForAttachingToRect:onScreen:preferredEdge:popUpSelectedItem:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSMenuView', b'trackWithEvent:', {'retval': {'type': 'Z'}}) - r(b'NSMovie', b'QTMovie', {'retval': {'type': '^^{MovieType}'}}) - r(b'NSMovie', b'canInitWithPasteboard:', {'retval': {'type': 'Z'}}) - r(b'NSMovie', b'initWithMovie:', {'arguments': {2: {'type': '^^{MovieType}'}}}) - r(b'NSMovie', b'initWithURL:byReference:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSMovieView', b'isControllerVisible', {'retval': {'type': 'Z'}}) - r(b'NSMovieView', b'isEditable', {'retval': {'type': 'Z'}}) - r(b'NSMovieView', b'isMuted', {'retval': {'type': 'Z'}}) - r(b'NSMovieView', b'isPlaying', {'retval': {'type': 'Z'}}) - r(b'NSMovieView', b'movieController', {'retval': {'type': '^{ComponentInstanceRecord=[1l]}'}}) - r(b'NSMovieView', b'movieRect', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSMovieView', b'playsEveryFrame', {'retval': {'type': 'Z'}}) - r(b'NSMovieView', b'playsSelectionOnly', {'retval': {'type': 'Z'}}) - r(b'NSMovieView', b'setEditable:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSMovieView', b'setMuted:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSMovieView', b'setPlaysEveryFrame:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSMovieView', b'setPlaysSelectionOnly:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSMovieView', b'showController:adjustingSize:', {'arguments': {2: {'type': 'Z'}, 3: {'type': 'Z'}}}) - r(b'NSMovieView', b'sizeForMagnification:', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSMutableAttributedString', b'applyFontTraits:range:', {'arguments': {3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSMutableAttributedString', b'fixAttachmentAttributeInRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSMutableAttributedString', b'fixAttributesInRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSMutableAttributedString', b'fixFontAttributeInRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSMutableAttributedString', b'fixParagraphStyleAttributeInRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSMutableAttributedString', b'readFromData:options:documentAttributes:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSMutableAttributedString', b'readFromData:options:documentAttributes:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}, 5: {'type_modifier': b'o'}}}) - r(b'NSMutableAttributedString', b'readFromURL:options:documentAttributes:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSMutableAttributedString', b'readFromURL:options:documentAttributes:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}, 5: {'type_modifier': b'o'}}}) - r(b'NSMutableAttributedString', b'setAlignment:range:', {'arguments': {3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSMutableAttributedString', b'setBaseWritingDirection:range:', {'arguments': {3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSMutableAttributedString', b'subscriptRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSMutableAttributedString', b'superscriptRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSMutableAttributedString', b'unscriptRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSMutableParagraphStyle', b'allowsDefaultTighteningForTruncation', {'retval': {'type': b'Z'}}) - r(b'NSMutableParagraphStyle', b'setAllowsDefaultTighteningForTruncation:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSNib', b'instantiateNibWithExternalNameTable:', {'retval': {'type': 'Z'}}) - r(b'NSNib', b'instantiateNibWithOwner:topLevelObjects:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSNib', b'instantiateWithOwner:topLevelObjects:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSObject', b'accessibilityActionDescription:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'accessibilityActionNames', {'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityActivationPoint', {'required': True, 'retval': {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}) - r(b'NSObject', b'accessibilityAllowedValues', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityApplicationFocusedUIElement', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityArrayAttributeCount:', {'retval': {'type': sel32or64(b'I', b'Q')}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'accessibilityArrayAttributeValues:index:maxCount:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'I', b'Q')}, 4: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'accessibilityAttributeNames', {'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityAttributeValue:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'accessibilityAttributeValue:forParameter:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'accessibilityAttributedStringForRange:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSObject', b'accessibilityCancelButton', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityCellForColumn:row:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': sel32or64(b'i', b'q')}, 3: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'accessibilityChildren', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityChildrenInNavigationOrder', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityClearButton', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityCloseButton', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityColumnCount', {'required': True, 'retval': {'type': sel32or64(b'i', b'q')}}) - r(b'NSObject', b'accessibilityColumnHeaderUIElements', {'required': False, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityColumnIndexRange', {'required': True, 'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}) - r(b'NSObject', b'accessibilityColumnTitles', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityColumns', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityContents', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityCriticalValue', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityCustomActions', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityCustomRotors', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityDecrementButton', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityDefaultButton', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityDisclosedByRow', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityDisclosedRows', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityDisclosureLevel', {'required': True, 'retval': {'type': sel32or64(b'i', b'q')}}) - r(b'NSObject', b'accessibilityDocument', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityElementWithToken:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'accessibilityExtrasMenuBar', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityFilename', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityFocusedUIElement', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityFocusedWindow', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityFrame', {'required': True, 'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSObject', b'accessibilityFrameForRange:', {'required': True, 'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSObject', b'accessibilityFullScreenButton', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityGrowArea', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityHandles', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityHeader', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityHeaderGroup', {'required': False, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityHelp', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityHitTest:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSObject', b'accessibilityHorizontalScrollBar', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityHorizontalUnitDescription', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityHorizontalUnits', {'required': True, 'retval': {'type': sel32or64(b'i', b'q')}}) - r(b'NSObject', b'accessibilityIdentifier', {'required': False, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityIncrementButton', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityIndex', {'required': True, 'retval': {'type': sel32or64(b'i', b'q')}}) - r(b'NSObject', b'accessibilityIndexOfChild:', {'retval': {'type': sel32or64(b'I', b'Q')}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'accessibilityInsertionPointLineNumber', {'required': True, 'retval': {'type': sel32or64(b'i', b'q')}}) - r(b'NSObject', b'accessibilityIsAttributeSettable:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'accessibilityIsIgnored', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'accessibilityLabel', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityLabelUIElements', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityLabelValue', {'required': True, 'retval': {'type': b'f'}}) - r(b'NSObject', b'accessibilityLayoutPointForScreenPoint:', {'required': True, 'retval': {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSObject', b'accessibilityLayoutSizeForScreenSize:', {'required': True, 'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}, 'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSObject', b'accessibilityLineForIndex:', {'required': True, 'retval': {'type': sel32or64(b'i', b'q')}, 'arguments': {2: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'accessibilityLinkedUIElements', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityMainWindow', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityMarkerGroupUIElement', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityMarkerTypeDescription', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityMarkerUIElements', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityMarkerValues', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityMaxValue', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityMenuBar', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityMinValue', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityMinimizeButton', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityNextContents', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityNotifiesWhenDestroyed', {'retval': {'type': b'Z'}}) - r(b'NSObject', b'accessibilityNumberOfCharacters', {'required': True, 'retval': {'type': sel32or64(b'i', b'q')}}) - r(b'NSObject', b'accessibilityOrientation', {'required': True, 'retval': {'type': sel32or64(b'i', b'q')}}) - r(b'NSObject', b'accessibilityOverflowButton', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityParameterizedAttributeNames', {'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityParent', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityPerformAction:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'accessibilityPerformCancel', {'required': True, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'accessibilityPerformConfirm', {'required': True, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'accessibilityPerformDecrement', {'required': False, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'accessibilityPerformDelete', {'required': True, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'accessibilityPerformIncrement', {'required': True, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'accessibilityPerformPick', {'required': True, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'accessibilityPerformPress', {'required': True, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'accessibilityPerformRaise', {'required': True, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'accessibilityPerformShowAlternateUI', {'required': True, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'accessibilityPerformShowDefaultUI', {'required': True, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'accessibilityPerformShowMenu', {'required': True, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'accessibilityPlaceholderValue', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityPreviousContents', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityProxy', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityRTFForRange:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSObject', b'accessibilityRangeForIndex:', {'required': True, 'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 'arguments': {2: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'accessibilityRangeForLine:', {'required': True, 'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 'arguments': {2: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'accessibilityRangeForPosition:', {'required': True, 'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSObject', b'accessibilityRangeInTargetElementWithToken:', {'required': False, 'retval': {'type': b'{_NSRange=QQ}'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'accessibilityRole', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityRoleDescription', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityRowCount', {'required': True, 'retval': {'type': sel32or64(b'i', b'q')}}) - r(b'NSObject', b'accessibilityRowHeaderUIElements', {'required': False, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityRowIndexRange', {'required': True, 'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}) - r(b'NSObject', b'accessibilityRows', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityRulerMarkerType', {'required': True, 'retval': {'type': sel32or64(b'i', b'q')}}) - r(b'NSObject', b'accessibilityScreenPointForLayoutPoint:', {'required': True, 'retval': {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSObject', b'accessibilityScreenSizeForLayoutSize:', {'required': True, 'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}, 'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSObject', b'accessibilitySearchButton', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilitySearchMenu', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilitySelectedCells', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilitySelectedChildren', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilitySelectedColumns', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilitySelectedRows', {'required': False, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilitySelectedText', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilitySelectedTextRange', {'required': True, 'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}) - r(b'NSObject', b'accessibilitySelectedTextRanges', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityServesAsTitleForUIElements', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilitySetOverrideValue:forAttribute:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'accessibilitySetValue:forAttribute:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'accessibilitySharedCharacterRange', {'required': True, 'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}) - r(b'NSObject', b'accessibilitySharedFocusElements', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilitySharedTextUIElements', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityShownMenu', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilitySortDirection', {'required': True, 'retval': {'type': sel32or64(b'i', b'q')}}) - r(b'NSObject', b'accessibilitySplitters', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityStringForRange:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSObject', b'accessibilityStyleRangeForIndex:', {'required': True, 'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 'arguments': {2: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'accessibilitySubrole', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityTabs', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityTitle', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityTitleUIElement', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityToolbarButton', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityTopLevelUIElement', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityURL', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityUnitDescription', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityUnits', {'required': True, 'retval': {'type': sel32or64(b'i', b'q')}}) - r(b'NSObject', b'accessibilityValue', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityValueDescription', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityVerticalScrollBar', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityVerticalUnitDescription', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityVerticalUnits', {'required': True, 'retval': {'type': sel32or64(b'i', b'q')}}) - r(b'NSObject', b'accessibilityVisibleCells', {'required': False, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityVisibleCharacterRange', {'required': False, 'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}) - r(b'NSObject', b'accessibilityVisibleChildren', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityVisibleColumns', {'required': False, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityVisibleRows', {'required': False, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityWarningValue', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityWindow', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityWindows', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'accessibilityZoomButton', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'action', {'required': True, 'retval': {'type': ':'}}) - r(b'NSObject', b'activeCompressionOptions', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'activeConversationChanged:toNewConversation:', {'required': True, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'activeConversationWillChange:fromOldConversation:', {'required': True, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'alertShowHelp:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'allowsMultipleSelection', {'required': False, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'alphaControlAddedOrRemoved:', {'required': True, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'anchoringViewForSharingService:showRelativeToRect:preferredEdge:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'^{CGRect={CGPoint=dd}{CGSize=dd}}'}, 4: {'type': b'^Q'}}}) - r(b'NSObject', b'animateDismissalOfViewController:fromViewController:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'animatePresentationOfViewController:fromViewController:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'animatesToDestination', {'required': True, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'animation:didReachProgressMark:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': 'f'}}}) - r(b'NSObject', b'animation:valueForProgress:', {'required': False, 'retval': {'type': 'f'}, 'arguments': {2: {'type': b'@'}, 3: {'type': 'f'}}}) - r(b'NSObject', b'animationDidEnd:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'animationDidStop:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'animationForKey:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'animationShouldStart:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'animations', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'animator', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'appearance', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'application:continueUserActivity:restorationHandler:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': b'@?'}}}) - r(b'NSObject', b'application:delegateHandlesKey:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'application:didDecodeRestorableState:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'application:didFailToContinueUserActivityWithType:error:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'application:didFailToRegisterForRemoteNotificationsWithError:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'application:didReceiveRemoteNotification:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'application:didRegisterForRemoteNotificationsWithDeviceToken:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'application:didUpdateUserActivity:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'application:openFile:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'application:openFileWithoutUI:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'application:openFiles:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'application:openTempFile:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'application:openURLs:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'application:printFile:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'application:printFiles:', {'retval': {'type': 'v'}}) - r(b'NSObject', b'application:printFiles:withSettings:showPrintPanels:', {'required': False, 'retval': {'type': 'I'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': 'Z'}}}) - r(b'NSObject', b'application:userDidAcceptCloudKitShareWithMetadata:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'application:willContinueUserActivityWithType:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'application:willEncodeRestorableState:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'application:willPresentError:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'applicationDidBecomeActive:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'applicationDidChangeOcclusionState:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'applicationDidChangeScreenParameters:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'applicationDidFinishLaunching:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'applicationDidHide:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'applicationDidResignActive:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'applicationDidUnhide:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'applicationDidUpdate:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'applicationDockMenu:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'applicationOpenUntitledFile:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'applicationShouldHandleReopen:hasVisibleWindows:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': 'Z'}}}) - r(b'NSObject', b'applicationShouldOpenUntitledFile:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'applicationShouldTerminate:', {'required': False, 'retval': {'type': 'I'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'applicationShouldTerminateAfterLastWindowClosed:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'applicationWillBecomeActive:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'applicationWillFinishLaunching:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'applicationWillHide:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'applicationWillResignActive:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'applicationWillTerminate:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'applicationWillUnhide:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'applicationWillUpdate:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'applyLayoutAttributes:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'attachColorList:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'attachment', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'attachmentBoundsForTextContainer:proposedLineFragment:glyphPosition:characterIndex:', {'required': True, 'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 4: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 5: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'attributedString', {'required': False, 'retval': {'type': b'@'}}) - r(b'NSObject', b'attributedSubstringForProposedRange:actualRange:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 3: {'type': sel32or64(b'^{_NSRange=II}', b'^{_NSRange=QQ}'), 'type_modifier': b'o'}}}) - r(b'NSObject', b'attributedSubstringFromRange:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSObject', b'awakeFromNib', {'retval': {'type': b'v'}}) - r(b'NSObject', b'baselineDeltaForCharacterAtIndex:', {'required': False, 'retval': {'type': sel32or64(b'f', b'd')}, 'arguments': {2: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'bind:toObject:withKeyPath:options:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'browser:acceptDrop:atRow:column:dropOperation:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}, 5: {'type': sel32or64(b'i', b'q')}, 6: {'type': 'I'}}}) - r(b'NSObject', b'browser:canDragRowsWithIndexes:inColumn:withEvent:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}, 5: {'type': b'@'}}}) - r(b'NSObject', b'browser:child:ofItem:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}, 4: {'type': b'@'}}}) - r(b'NSObject', b'browser:createRowsForColumn:inMatrix:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}, 4: {'type': b'@'}}}) - r(b'NSObject', b'browser:didChangeLastColumn:toColumn:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'browser:draggingImageForRowsWithIndexes:inColumn:withEvent:offset:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}, 5: {'type': b'@'}, 6: {'type': sel32or64(b'N^{_NSPoint=ff}', b'N^{CGPoint=dd}')}}}) - r(b'NSObject', b'browser:headerViewControllerForItem:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'browser:heightOfRow:inColumn:', {'required': False, 'retval': {'type': sel32or64(b'f', b'd')}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'browser:isColumnValid:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'browser:isLeafItem:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'browser:namesOfPromisedFilesDroppedAtDestination:forDraggedRowsWithIndexes:inColumn:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'browser:nextTypeSelectMatchFromRow:toRow:inColumn:forString:', {'required': False, 'retval': {'type': sel32or64(b'I', b'Q')}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}, 4: {'type': sel32or64(b'i', b'q')}, 5: {'type': sel32or64(b'i', b'q')}, 6: {'type': b'@'}}}) - r(b'NSObject', b'browser:numberOfChildrenOfItem:', {'required': False, 'retval': {'type': sel32or64(b'i', b'q')}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'browser:numberOfRowsInColumn:', {'required': False, 'retval': {'type': sel32or64(b'i', b'q')}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'browser:objectValueForItem:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'browser:previewViewControllerForLeafItem:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'browser:selectCellWithString:inColumn:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'browser:selectRow:inColumn:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'browser:selectionIndexesForProposedSelection:inColumn:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'browser:setObjectValue:forItem:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'browser:shouldEditItem:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'browser:shouldShowCellExpansionForRow:column:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'browser:shouldSizeColumn:forUserResize:toWidth:', {'required': False, 'retval': {'type': sel32or64(b'f', b'd')}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}, 4: {'type': 'Z'}, 5: {'type': sel32or64(b'f', b'd')}}}) - r(b'NSObject', b'browser:shouldTypeSelectForEvent:withCurrentSearchString:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'browser:sizeToFitWidthOfColumn:', {'required': False, 'retval': {'type': sel32or64(b'f', b'd')}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'browser:titleOfColumn:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'browser:typeSelectStringForRow:inColumn:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'browser:validateDrop:proposedRow:column:dropOperation:', {'required': False, 'retval': {'type': sel32or64(b'I', b'Q')}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'^i', b'^q'), 'type_modifier': b'N'}, 5: {'type': sel32or64(b'^i', b'^q'), 'type_modifier': b'N'}, 6: {'type': sel32or64(b'^I', b'^Q'), 'type_modifier': b'N'}}}) - r(b'NSObject', b'browser:willDisplayCell:atRow:column:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}, 5: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'browser:writeRowsWithIndexes:inColumn:toPasteboard:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}, 5: {'type': b'@'}}}) - r(b'NSObject', b'browserColumnConfigurationDidChange:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'browserDidScroll:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'browserWillScroll:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'buttonToolTip', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'canBeDisabled', {'required': True, 'retval': {'type': 'Z'}}) - r(b'NSObject', b'cancelOperation:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'candidateListTouchBarItem:beginSelectingCandidateAtIndex:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'candidateListTouchBarItem:changeSelectionFromCandidateAtIndex:toIndex:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'candidateListTouchBarItem:changedCandidateListVisibility:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': 'Z'}}}) - r(b'NSObject', b'candidateListTouchBarItem:endSelectingCandidateAtIndex:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'capitalizeWord:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'cellBaselineOffset', {'required': True, 'retval': {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}) - r(b'NSObject', b'cellFrameForTextContainer:proposedLineFragment:glyphPosition:characterIndex:', {'required': True, 'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 4: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 5: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'cellSize', {'required': True, 'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSObject', b'centerSelectionInVisibleArea:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'changeCaseOfLetter:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'changeColor:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'changeFont:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'changeSpelling:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'characterIndexForPoint:', {'required': True, 'retval': {'type': sel32or64(b'I', b'Q')}, 'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSObject', b'cloudShareForUserInterfaceItem:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'collectionView:acceptDrop:index:dropOperation:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}, 5: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'collectionView:acceptDrop:indexPath:dropOperation:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'collectionView:canDragItemsAtIndexPaths:withEvent:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'collectionView:canDragItemsAtIndexes:withEvent:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'collectionView:cancelPrefetchingForItemsAtIndexPaths:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'collectionView:didChangeItemsAtIndexPaths:toHighlightState:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'collectionView:didDeselectItemsAtIndexPaths:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'collectionView:didEndDisplayingItem:forRepresentedObjectAtIndexPath:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'collectionView:didEndDisplayingSupplementaryView:forElementOfKind:atIndexPath:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'collectionView:didSelectItemsAtIndexPaths:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'collectionView:draggingImageForItemsAtIndexPaths:withEvent:offset:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': sel32or64(b'^{_NSPoint=ff}', b'^{CGPoint=dd}'), 'type_modifier': b'N'}}}) - r(b'NSObject', b'collectionView:draggingImageForItemsAtIndexes:withEvent:offset:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': sel32or64(b'^{_NSPoint=ff}', b'^{CGPoint=dd}'), 'type_modifier': b'N'}}}) - r(b'NSObject', b'collectionView:draggingSession:endedAtPoint:dragOperation:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 5: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'collectionView:draggingSession:endedAtPoint:draggingOperation:', {'arguments': {4: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 5: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'collectionView:draggingSession:willBeginAtPoint:forItemsAtIndexPaths:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 5: {'type': b'@'}}}) - r(b'NSObject', b'collectionView:draggingSession:willBeginAtPoint:forItemsAtIndexes:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 5: {'type': b'@'}}}) - r(b'NSObject', b'collectionView:itemForRepresentedObjectAtIndexPath:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'collectionView:layout:insetForSectionAtIndex:', {'required': False, 'retval': {'type': sel32or64(b'{NSEdgeInsets=ffff}', b'{NSEdgeInsets=dddd}')}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'collectionView:layout:minimumInteritemSpacingForSectionAtIndex:', {'required': False, 'retval': {'type': sel32or64(b'f', b'd')}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'collectionView:layout:minimumLineSpacingForSectionAtIndex:', {'required': False, 'retval': {'type': sel32or64(b'f', b'd')}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'collectionView:layout:referenceSizeForFooterInSection:', {'required': False, 'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'collectionView:layout:referenceSizeForHeaderInSection:', {'required': False, 'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'collectionView:layout:sizeForItemAtIndexPath:', {'required': False, 'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'collectionView:namesOfPromisedFilesDroppedAtDestination:forDraggedItemsAtIndexPaths:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'collectionView:namesOfPromisedFilesDroppedAtDestination:forDraggedItemsAtIndexes:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'collectionView:numberOfItemsInSection:', {'required': True, 'retval': {'type': sel32or64(b'i', b'q')}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'collectionView:pasteboardWriterForItemAtIndex:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'collectionView:pasteboardWriterForItemAtIndexPath:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'collectionView:prefetchItemsAtIndexPaths:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'collectionView:shouldChangeItemsAtIndexPaths:toHighlightState:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'collectionView:shouldDeselectItemsAtIndexPaths:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'collectionView:shouldSelectItemsAtIndexPaths:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'collectionView:transitionLayoutForOldLayout:newLayout:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'collectionView:updateDraggingItemsForDrag:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'collectionView:validateDrop:proposedIndex:dropOperation:', {'required': False, 'retval': {'type': sel32or64(b'i', b'q')}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'^i', b'^q'), 'type_modifier': b'N'}, 5: {'type': sel32or64(b'^i', b'^q'), 'type_modifier': b'N'}}}) - r(b'NSObject', b'collectionView:validateDrop:proposedIndexPath:dropOperation:', {'required': False, 'retval': {'type': sel32or64(b'I', b'Q')}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': '^@', 'type_modifier': b'N'}, 5: {'type': b'^q'}}}) - r(b'NSObject', b'collectionView:viewForSupplementaryElementOfKind:atIndexPath:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'collectionView:willDisplayItem:forRepresentedObjectAtIndexPath:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'collectionView:willDisplaySupplementaryView:forElementKind:atIndexPath:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'collectionView:writeItemsAtIndexPaths:toPasteboard:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'collectionView:writeItemsAtIndexes:toPasteboard:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'comboBox:completedString:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'comboBox:indexOfItemWithStringValue:', {'required': False, 'retval': {'type': sel32or64(b'I', b'Q')}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'comboBox:objectValueForItemAtIndex:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'comboBoxCell:completedString:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'comboBoxCell:indexOfItemWithStringValue:', {'required': False, 'retval': {'type': sel32or64(b'I', b'Q')}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'comboBoxCell:objectValueForItemAtIndex:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'comboBoxSelectionDidChange:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'comboBoxSelectionIsChanging:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'comboBoxWillDismiss:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'comboBoxWillPopUp:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'commitEditing', {'required': True, 'retval': {'type': 'Z'}}) - r(b'NSObject', b'commitEditingAndReturnError:', {'required': True, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'^@'}}}) - r(b'NSObject', b'commitEditingWithDelegate:didCommitSelector:contextInfo:', {'required': True, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': ':', 'sel_of_type': b'v@:@Z^v'}, 4: {'type': '^v'}}}) - r(b'NSObject', b'complete:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'compressWithPrioritizedCompressionOptions:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'concludeDragOperation:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'confinementRectForMenu:onScreen:', {'required': False, 'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'contentView', {'required': False, 'retval': {'type': b'@'}}) - r(b'NSObject', b'contentViewAtIndex:effectiveCharacterRange:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': sel32or64(b'o^{_NSRange=II}', b'o^{_NSRange=QQ}')}}}) - r(b'NSObject', b'control:didFailToFormatString:errorDescription:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'control:didFailToValidatePartialString:errorDescription:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'control:isValidObject:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'control:textShouldBeginEditing:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'control:textShouldEndEditing:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'control:textView:completions:forPartialWordRange:indexOfSelectedItem:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 6: {'null_accepted': False, 'type': sel32or64(b'^i', b'^q'), 'type_modifier': b'N'}}}) - r(b'NSObject', b'control:textView:doCommandBySelector:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': ':', 'sel_of_type': b'v@:@'}}}) - r(b'NSObject', b'controlTextDidBeginEditing:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'controlTextDidChange:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'controlTextDidEndEditing:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'conversationIdentifier', {'required': True, 'retval': {'type': sel32or64(b'i', b'q')}}) - r(b'NSObject', b'currentMode', {'required': True, 'retval': {'type': 'i'}}) - r(b'NSObject', b'customWindowsToEnterFullScreenForWindow:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'customWindowsToEnterFullScreenForWindow:onScreen:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'customWindowsToExitFullScreenForWindow:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'datePickerCell:validateProposedDateValue:timeInterval:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': '^@', 'type_modifier': b'N'}, 4: {'type': sel32or64(b'^f', b'^d'), 'type_modifier': b'N'}}}) - r(b'NSObject', b'defaultAnimationForKey:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'defaultPlaceholderForMarker:withBinding:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'deleteBackward:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'deleteBackwardByDecomposingPreviousCharacter:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'deleteForward:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'deleteToBeginningOfLine:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'deleteToBeginningOfParagraph:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'deleteToEndOfLine:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'deleteToEndOfParagraph:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'deleteToMark:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'deleteWordBackward:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'deleteWordForward:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'detachColorList:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'detachableWindowForPopover:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'didBeginInteractingWithScrubber:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'didCancelInteractingWithScrubber:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'didFinishInteractingWithScrubber:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'didReplaceCharacters', {'required': False, 'retval': {'type': b'v'}}) - r(b'NSObject', b'didTransitionFromLayout:toLayout:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'discardEditing', {'required': True, 'retval': {'type': b'v'}}) - r(b'NSObject', b'doCommandBySelector:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': ':', 'sel_of_type': b'v@:@'}}}) - r(b'NSObject', b'doCommandBySelector:client:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': ':', 'sel_of_type': b'v@:@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'dockMenu', {'required': False, 'retval': {'type': b'@'}}) - r(b'NSObject', b'draggedImage', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'draggedImage:beganAt:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSObject', b'draggedImage:endedAt:deposited:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 4: {'type': 'Z'}}}) - r(b'NSObject', b'draggedImage:endedAt:operation:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 4: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'draggedImage:movedTo:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSObject', b'draggedImageLocation', {'required': True, 'retval': {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}) - r(b'NSObject', b'draggingDestinationWindow', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'draggingEnded:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'draggingEntered:', {'required': False, 'retval': {'type': sel32or64(b'I', b'Q')}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'draggingExited:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'draggingFormation', {'required': True, 'retval': {'type': sel32or64(b'i', b'q')}}) - r(b'NSObject', b'draggingLocation', {'required': True, 'retval': {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}) - r(b'NSObject', b'draggingPasteboard', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'draggingSequenceNumber', {'required': True, 'retval': {'type': sel32or64(b'i', b'q')}}) - r(b'NSObject', b'draggingSession:endedAtPoint:operation:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 4: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'draggingSession:movedToPoint:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSObject', b'draggingSession:sourceOperationMaskForDraggingContext', {'retval': {'type': sel32or64(b'I', b'Q')}, 'arguments': {3: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'draggingSession:sourceOperationMaskForDraggingContext:', {'required': True, 'retval': {'type': sel32or64(b'I', b'Q')}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'draggingSession:willBeginAtPoint:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSObject', b'draggingSource', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'draggingSourceOperationMask', {'required': True, 'retval': {'type': sel32or64(b'I', b'Q')}}) - r(b'NSObject', b'draggingSourceOperationMaskForLocal:', {'retval': {'type': sel32or64(b'I', b'Q')}, 'arguments': {2: {'type': 'Z'}}}) - r(b'NSObject', b'draggingUpdated:', {'required': False, 'retval': {'type': sel32or64(b'I', b'Q')}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'drawCharactersInRange:forContentView:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 3: {'type': b'@'}}}) - r(b'NSObject', b'drawWithFrame:inView:', {'required': True, 'retval': {'type': 'v'}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 3: {'type': b'@'}}}) - r(b'NSObject', b'drawWithFrame:inView:characterIndex:', {'required': True, 'retval': {'type': 'v'}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'drawWithFrame:inView:characterIndex:layoutManager:', {'required': True, 'retval': {'type': 'v'}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'I', b'Q')}, 5: {'type': b'@'}}}) - r(b'NSObject', b'drawerDidClose:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'drawerDidOpen:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'drawerShouldClose:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'drawerShouldOpen:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'drawerWillClose:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'drawerWillOpen:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'drawerWillResizeContents:toSize:', {'required': False, 'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSObject', b'drawsVerticallyForCharacterAtIndex:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'editable', {'retval': {'type': b'Z'}}) - r(b'NSObject', b'effectiveAppearance', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'enumerateDraggingItemsWithOptions:forView:classes:searchOptions:usingBlock:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}, 6: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'i', b'q')}, 3: {'type': b'o^Z'}}}, 'type': b'@?'}}}) - r(b'NSObject', b'exposeBinding:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'exposedBindings', {'retval': {'type': b'@'}}) - r(b'NSObject', b'filePromiseProvider:fileNameForType:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'filePromiseProvider:writePromiseToURL:completionHandler:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'findBarView', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'findBarViewDidChangeHeight', {'required': True, 'retval': {'type': b'v'}}) - r(b'NSObject', b'findBarVisible', {'retval': {'type': b'Z'}}) - r(b'NSObject', b'firstRectForCharacterRange:', {'required': True, 'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSObject', b'firstRectForCharacterRange:actualRange:', {'required': True, 'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 3: {'type': sel32or64(b'^{_NSRange=II}', b'^{_NSRange=QQ}'), 'type_modifier': b'o'}}}) - r(b'NSObject', b'firstSelectedRange', {'required': False, 'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}) - r(b'NSObject', b'fontManager:willIncludeFont:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'fractionOfDistanceThroughGlyphForPoint:', {'required': False, 'retval': {'type': sel32or64(b'f', b'd')}, 'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSObject', b'gestureRecognizer:shouldAttemptToRecognizeWithEvent:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'gestureRecognizer:shouldBeRequiredToFailByGestureRecognizer:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'gestureRecognizer:shouldReceiveTouch:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'gestureRecognizer:shouldRecognizeSimultaneouslyWithGestureRecognizer:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'gestureRecognizer:shouldRequireFailureOfGestureRecognizer:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'gestureRecognizerShouldBegin:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'hasMarkedText', {'required': True, 'retval': {'type': 'Z'}}) - r(b'NSObject', b'highlight:withFrame:inView:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': 'Z'}, 3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 4: {'type': b'@'}}}) - r(b'NSObject', b'hyphenCharacterForGlyphAtIndex:', {'retval': {'type': 'i'}, 'arguments': {2: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'hyphenationFactorForGlyphAtIndex:', {'retval': {'type': 'f'}, 'arguments': {2: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'identifier', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'ignoreModifierKeysForDraggingSession:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'ignoreModifierKeysWhileDragging', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'ignoreSpelling:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'image:didLoadPartOfRepresentation:withValidRows:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'image:didLoadRepresentation:withStatus:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'image:didLoadRepresentationHeader:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'image:willLoadRepresentation:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'imageDidNotDraw:inRect:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSObject', b'imageForBounds:textContainer:characterIndex:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'indent:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'infoForBinding:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'initWithPasteboardPropertyList:ofType:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'initWithPickerMask:colorPanel:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': b'@'}}}) - r(b'NSObject', b'inputClientBecomeActive:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'inputClientDisabled:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'inputClientEnabled:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'inputClientResignActive:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'insertBacktab:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'insertContainerBreak:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'insertDoubleQuoteIgnoringSubstitution:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'insertGlyphs:length:forStartingGlyphAtIndex:characterIndex:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': '^I', 'type_modifier': b'n', 'c_array_length_in_arg': 3}, 3: {'type': sel32or64(b'I', b'Q')}, 4: {'type': sel32or64(b'I', b'Q')}, 5: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'insertLineBreak:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'insertNewButtonImage:in:', {'required': True, 'retval': {'type': 'v'}, 'arguments': {2: {'type': '@'}, 3: {'type': '@'}}}) - r(b'NSObject', b'insertNewline:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'insertNewlineIgnoringFieldEditor:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'insertParagraphSeparator:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'insertSingleQuoteIgnoringSubstitution:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'insertTab:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'insertTabIgnoringFieldEditor:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'insertText:', {'required': True, 'retval': {'type': 'v'}, 'arguments': {2: {'type': '@'}}}) - r(b'NSObject', b'insertText:client:', {'required': True, 'retval': {'type': 'v'}, 'arguments': {2: {'type': '@'}, 3: {'type': '@'}}}) - r(b'NSObject', b'insertText:replacementRange:', {'required': True, 'retval': {'type': 'v'}, 'arguments': {2: {'type': '@'}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSObject', b'ioCommandBySelector:', {'arguments': {2: {'type': ':', 'sel_of_type': b'v@:@'}}}) - r(b'NSObject', b'isAccessibilityAlternateUIVisible', {'required': True, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'isAccessibilityDisclosed', {'required': True, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'isAccessibilityEdited', {'required': True, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'isAccessibilityElement', {'required': True, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'isAccessibilityEnabled', {'required': True, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'isAccessibilityExpanded', {'required': True, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'isAccessibilityFocused', {'required': False, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'isAccessibilityFrontmost', {'required': True, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'isAccessibilityHidden', {'required': True, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'isAccessibilityMain', {'required': True, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'isAccessibilityMinimized', {'required': True, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'isAccessibilityModal', {'required': True, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'isAccessibilityOrderedByRow', {'required': True, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'isAccessibilityProtectedContent', {'required': True, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'isAccessibilityRequired', {'required': True, 'retval': {'type': 'Z'}}) - r(b'NSObject', b'isAccessibilitySelected', {'required': True, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'isAccessibilitySelectorAllowed:', {'required': True, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b':'}}}) - r(b'NSObject', b'isContentDiscarded', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'isEditable', {'required': False, 'retval': {'type': 'Z'}}) - r(b'NSObject', b'isExplicitlyIncluded', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'isFindBarVisible', {'required': True, 'retval': {'type': 'Z'}}) - r(b'NSObject', b'isSelectable', {'required': False, 'retval': {'type': 'Z'}}) - r(b'NSObject', b'itemsForSharingServicePickerTouchBarItem:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'key', {'retval': {'type': b'@'}}) - r(b'NSObject', b'keyPathsForValuesAffectingPreview', {'required': False, 'retval': {'type': b'@'}}) - r(b'NSObject', b'layer:shouldInheritContentsScale:fromWindow:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'd'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'layoutManager:boundingBoxForControlGlyphAtIndex:forTextContainer:proposedLineFragment:glyphPosition:characterIndex:', {'required': False, 'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'I', b'Q')}, 4: {'type': b'@'}, 5: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 6: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 7: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'layoutManager:didCompleteLayoutForTextContainer:atEnd:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': 'Z'}}}) - r(b'NSObject', b'layoutManager:lineSpacingAfterGlyphAtIndex:withProposedLineFragmentRect:', {'required': False, 'retval': {'type': sel32or64(b'f', b'd')}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'I', b'Q')}, 4: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSObject', b'layoutManager:paragraphSpacingAfterGlyphAtIndex:withProposedLineFragmentRect:', {'required': False, 'retval': {'type': sel32or64(b'f', b'd')}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'I', b'Q')}, 4: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSObject', b'layoutManager:paragraphSpacingBeforeGlyphAtIndex:withProposedLineFragmentRect:', {'required': False, 'retval': {'type': sel32or64(b'f', b'd')}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'I', b'Q')}, 4: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSObject', b'layoutManager:shouldBreakLineByHyphenatingBeforeCharacterAtIndex:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'layoutManager:shouldBreakLineByWordBeforeCharacterAtIndex:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'layoutManager:shouldGenerateGlyphs:properties:characterIndexes:font:forGlyphRange:', {'required': False, 'retval': {'type': b'Q'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'^S'}, 4: {'type': b'^q'}, 5: {'type': b'^Q'}, 6: {'type': b'@'}, 7: {'type': b'{_NSRange=QQ}'}}}) - r(b'NSObject', b'layoutManager:shouldGenerateGlyphs:properties:characterIndexes:forGlyphRange:', {'arguments': {3: {'type': 'n^S', 'c_array_length_in_arg': 6}, 4: {'type': sel32or64(b'n^I', b'n^Q'), 'c_array_length_in_arg': 6}, 5: {'type': sel32or64(b'n^I', b'n^Q'), 'c_array_length_in_arg': 6}, 6: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSObject', b'layoutManager:shouldSetLineFragmentRect:lineFragmentUsedRect:baselineOffset:inTextContainer:forGlyphRange:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'N^{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'N^{CGRect={CGPoint=dd}{CGSize=dd}}')}, 4: {'type': sel32or64(b'N^{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'N^{CGRect={CGPoint=dd}{CGSize=dd}}')}, 5: {'type': sel32or64(b'N^f', b'N^d')}, 6: {'type': b'@'}, 7: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSObject', b'layoutManager:shouldUseAction:forControlCharacterAtIndex:', {'required': False, 'retval': {'type': sel32or64(b'i', b'q')}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'I', b'Q')}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'layoutManager:shouldUseTemporaryAttributes:forDrawingToScreen:atCharacterIndex:effectiveRange:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': 'Z'}, 5: {'type': sel32or64(b'I', b'Q')}, 6: {'type': sel32or64(b'^{_NSRange=II}', b'^{_NSRange=QQ}'), 'type_modifier': b'o'}}}) - r(b'NSObject', b'layoutManager:textContainer:didChangeGeometryFromSize:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSObject', b'layoutManagerDidInvalidateLayout:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'layoutOptions', {'required': True, 'retval': {'type': sel32or64(b'I', b'Q')}}) - r(b'NSObject', b'layoutOrientation', {'required': True, 'retval': {'type': sel32or64(b'i', b'q')}}) - r(b'NSObject', b'listener:shouldAcceptNewConnection:', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'localizedKey', {'retval': {'type': b'@'}}) - r(b'NSObject', b'localizedSummaryItems', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'localizedTitlesForItem:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'lowercaseWord:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'makeBaseWritingDirectionLeftToRight:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'makeBaseWritingDirectionNatural:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'makeBaseWritingDirectionRightToLeft:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'makeTextWritingDirectionLeftToRight:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'makeTextWritingDirectionNatural:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'makeTextWritingDirectionRightToLeft:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'markedRange', {'required': True, 'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}) - r(b'NSObject', b'markedTextAbandoned:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'markedTextSelectionChanged:client:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 3: {'type': b'@'}}}) - r(b'NSObject', b'menu:updateItem:atIndex:shouldCancel:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}, 5: {'type': 'Z'}}}) - r(b'NSObject', b'menu:willHighlightItem:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'menuDidClose:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'menuHasKeyEquivalent:forEvent:target:action:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': '^@', 'type_modifier': b'o'}, 5: {'type': '^:', 'type_modifier': b'o'}}}) - r(b'NSObject', b'menuNeedsUpdate:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'menuWillOpen:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'minContentSize', {'required': True, 'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSObject', b'minimumSizeWithPrioritizedCompressionOptions:', {'required': True, 'retval': {'type': b'{CGSize=dd}'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'mouseDownOnCharacterIndex:atCoordinate:withModifier:client:', {'required': True, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 4: {'type': sel32or64(b'I', b'Q')}, 5: {'type': b'@'}}}) - r(b'NSObject', b'mouseDraggedOnCharacterIndex:atCoordinate:withModifier:client:', {'required': True, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 4: {'type': sel32or64(b'I', b'Q')}, 5: {'type': b'@'}}}) - r(b'NSObject', b'mouseUpOnCharacterIndex:atCoordinate:withModifier:client:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 4: {'type': sel32or64(b'I', b'Q')}, 5: {'type': b'@'}}}) - r(b'NSObject', b'moveBackward:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'moveBackwardAndModifySelection:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'moveDown:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'moveDownAndModifySelection:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'moveForward:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'moveForwardAndModifySelection:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'moveLeft:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'moveLeftAndModifySelection:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'moveParagraphBackwardAndModifySelection:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'moveParagraphForwardAndModifySelection:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'moveRight:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'moveRightAndModifySelection:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'moveToBeginningOfDocument:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'moveToBeginningOfDocumentAndModifySelection:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'moveToBeginningOfLine:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'moveToBeginningOfLineAndModifySelection:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'moveToBeginningOfParagraph:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'moveToBeginningOfParagraphAndModifySelection:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'moveToEndOfDocument:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'moveToEndOfDocumentAndModifySelection:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'moveToEndOfLine:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'moveToEndOfLineAndModifySelection:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'moveToEndOfParagraph:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'moveToEndOfParagraphAndModifySelection:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'moveToLeftEndOfLine:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'moveToLeftEndOfLineAndModifySelection:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'moveToRightEndOfLine:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'moveToRightEndOfLineAndModifySelection:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'moveUp:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'moveUpAndModifySelection:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'moveWordBackward:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'moveWordBackwardAndModifySelection:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'moveWordForward:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'moveWordForwardAndModifySelection:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'moveWordLeft:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'moveWordLeftAndModifySelection:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'moveWordRight:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'moveWordRightAndModifySelection:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'namesOfPromisedFilesDroppedAtDestination:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'numberOfItemsForScrubber:', {'required': True, 'retval': {'type': sel32or64(b'i', b'q')}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'numberOfItemsInComboBox:', {'required': False, 'retval': {'type': sel32or64(b'i', b'q')}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'numberOfItemsInComboBoxCell:', {'required': False, 'retval': {'type': sel32or64(b'i', b'q')}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'numberOfItemsInMenu:', {'required': False, 'retval': {'type': sel32or64(b'i', b'q')}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'numberOfRowsInTableView:', {'required': False, 'retval': {'type': sel32or64(b'i', b'q')}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'numberOfSectionsInCollectionView:', {'required': False, 'retval': {'type': sel32or64(b'i', b'q')}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'numberOfValidItemsForDrop', {'required': True, 'retval': {'type': sel32or64(b'i', b'q')}}) - r(b'NSObject', b'objectDidBeginEditing:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'objectDidEndEditing:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'operationQueueForFilePromiseProvider:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'optionDescriptionsForBinding:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'optionsForSharingService:shareProvider:', {'required': False, 'retval': {'type': b'Q'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'outlineView:acceptDrop:item:childIndex:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'outlineView:child:ofItem:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}, 4: {'type': b'@'}}}) - r(b'NSObject', b'outlineView:dataCellForTableColumn:item:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'outlineView:didAddRowView:forRow:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'outlineView:didClickTableColumn:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'outlineView:didDragTableColumn:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'outlineView:didRemoveRowView:forRow:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'outlineView:draggingSession:endedAtPoint:', {'arguments': {4: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSObject', b'outlineView:draggingSession:endedAtPoint:operation:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 5: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'outlineView:draggingSession:willBeginAtPoint:', {'arguments': {4: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSObject', b'outlineView:draggingSession:willBeginAtPoint:forItems:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 5: {'type': b'@'}}}) - r(b'NSObject', b'outlineView:heightOfRowByItem:', {'required': False, 'retval': {'type': sel32or64(b'f', b'd')}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'outlineView:isGroupItem:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'outlineView:isItemExpandable:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'outlineView:itemForPersistentObject:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'outlineView:mouseDownInHeaderOfTableColumn:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'outlineView:namesOfPromisedFilesDroppedAtDestination:forDraggedItems:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'outlineView:nextTypeSelectMatchFromItem:toItem:forString:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'outlineView:numberOfChildrenOfItem:', {'required': False, 'retval': {'type': sel32or64(b'i', b'q')}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'outlineView:objectValueForTableColumn:byItem:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'outlineView:pasteboardWriterForItem:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'outlineView:persistentObjectForItem:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'outlineView:rowViewForItem:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'outlineView:selectionIndexesForProposedSelection:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'outlineView:setObjectValue:forTableColumn:byItem:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'outlineView:shouldCollapseItem:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'outlineView:shouldEditTableColumn:item:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'outlineView:shouldExpandItem:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'outlineView:shouldReorderColumn:toColumn:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'outlineView:shouldSelectItem:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'outlineView:shouldSelectTableColumn:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'outlineView:shouldShowCellExpansionForTableColumn:item:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'outlineView:shouldShowOutlineCellForItem:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'outlineView:shouldTrackCell:forTableColumn:item:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'outlineView:shouldTypeSelectForEvent:withCurrentSearchString:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'outlineView:sizeToFitWidthOfColumn:', {'required': False, 'retval': {'type': sel32or64(b'f', b'd')}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'outlineView:sortDescriptorsDidChange:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'outlineView:toolTipForCell:rect:tableColumn:item:mouseLocation:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'^{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'^{CGRect={CGPoint=dd}{CGSize=dd}}'), 'type_modifier': b'N'}, 5: {'type': b'@'}, 6: {'type': b'@'}, 7: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSObject', b'outlineView:typeSelectStringForTableColumn:item:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'outlineView:updateDraggingItemsForDrag:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'outlineView:validateDrop:proposedItem:proposedChildIndex:', {'required': False, 'retval': {'type': sel32or64(b'I', b'Q')}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'outlineView:viewForTableColumn:item:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'outlineView:willDisplayCell:forTableColumn:item:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'outlineView:willDisplayOutlineCell:forTableColumn:item:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'outlineView:writeItems:toPasteboard:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'outlineViewColumnDidMove:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'outlineViewColumnDidResize:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'outlineViewItemDidCollapse:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'outlineViewItemDidExpand:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'outlineViewItemWillCollapse:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'outlineViewItemWillExpand:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'outlineViewSelectionDidChange:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'outlineViewSelectionIsChanging:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'pageController:didTransitionToObject:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'pageController:frameForObject:', {'required': False, 'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'pageController:identifierForObject:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'pageController:prepareViewController:withObject:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'pageController:viewControllerForIdentifier:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'pageControllerDidEndLiveTransition:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'pageControllerWillStartLiveTransition:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'pageDown:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'pageDownAndModifySelection:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'pageUp:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'pageUpAndModifySelection:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'panel:compareFilename:with:caseSensitive:', {'retval': {'type': sel32or64(b'i', b'q')}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': 'Z'}}}) - r(b'NSObject', b'panel:didChangeToDirectoryURL:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'panel:directoryDidChange:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'panel:isValidFilename:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'panel:shouldEnableURL:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'panel:shouldShowFilename:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'panel:userEnteredFilename:confirmed:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': 'Z'}}}) - r(b'NSObject', b'panel:validateURL:error:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'panel:willExpand:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': 'Z'}}}) - r(b'NSObject', b'panelSelectionDidChange:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'pasteboard:item:provideDataForType:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'pasteboard:provideDataForType:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'pasteboardChangedOwner:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'pasteboardFinishedWithDataProvider:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'pasteboardPropertyListForType:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'pathCell:willDisplayOpenPanel:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'pathCell:willPopUpMenu:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'pathControl:acceptDrop:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'pathControl:shouldDragItem:withPasteboard:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'pathControl:shouldDragPathComponentCell:withPasteboard:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'pathControl:validateDrop:', {'required': False, 'retval': {'type': sel32or64(b'I', b'Q')}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'pathControl:willDisplayOpenPanel:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'pathControl:willPopUpMenu:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'performActionForItem:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'performDragOperation:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'performFeedbackPattern:performanceTime:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'q'}, 3: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'performSegueWithIdentifier:sender:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'popoverDidClose:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'popoverDidDetach:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'popoverDidShow:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'popoverShouldClose:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'popoverShouldDetach:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'popoverWillClose:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'popoverWillShow:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'preferredLayoutAttributesFittingAttributes:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'prepareForDragOperation:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'prepareForInterfaceBuilder', {'retval': {'type': b'v'}}) - r(b'NSObject', b'prepareForReuse', {'required': False, 'retval': {'type': b'v'}}) - r(b'NSObject', b'prepareForSegue:sender:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'provideNewButtonImage', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'provideNewView:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': 'Z'}}}) - r(b'NSObject', b'quickLookPreviewItems:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'readSelectionFromPasteboard:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'readableTypesForPasteboard:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'readingOptionsForType:pasteboard:', {'required': False, 'retval': {'type': sel32or64(b'I', b'Q')}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'rectsForCharacterInRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSObject', b'rectsForCharacterRange:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSObject', b'remoteObjectProxyWithErrorHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'replaceCharactersInRange:withString:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 3: {'type': b'@'}}}) - r(b'NSObject', b'resetSpringLoading', {'required': True, 'retval': {'type': b'v'}}) - r(b'NSObject', b'restoreWindowWithIdentifier:state:completionHandler:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'rootItemForBrowser:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'rotor:resultForSearchParameters:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'ruleEditor:child:forCriterion:withRowType:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}, 4: {'type': b'@'}, 5: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'ruleEditor:displayValueForCriterion:inRow:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'ruleEditor:numberOfChildrenForCriterion:withRowType:', {'required': True, 'retval': {'type': sel32or64(b'i', b'q')}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'ruleEditor:predicatePartsForCriterion:withDisplayValue:inRow:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'ruleEditorRowsDidChange:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'rulerView:didAddMarker:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'rulerView:didMoveMarker:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'rulerView:didRemoveMarker:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'rulerView:handleMouseDown:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'rulerView:locationForPoint:', {'retval': {'type': sel32or64(b'f', b'd')}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSObject', b'rulerView:pointForLocation:', {'retval': {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'f', b'd')}}}) - r(b'NSObject', b'rulerView:pointForlocation:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {3: {'type': sel32or64(b'f', b'd')}}}) - r(b'NSObject', b'rulerView:shouldAddMarker:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'rulerView:shouldMoveMarker:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'rulerView:shouldRemoveMarker:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'rulerView:willAddMarker:atLocation:', {'retval': {'type': sel32or64(b'f', b'd')}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'f', b'd')}}}) - r(b'NSObject', b'rulerView:willMoveMarker:toLocation:', {'retval': {'type': sel32or64(b'f', b'd')}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'f', b'd')}}}) - r(b'NSObject', b'rulerView:willSetClientView:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'scrollLineDown:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'scrollLineUp:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'scrollPageDown:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'scrollPageUp:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'scrollRangeToVisible:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSObject', b'scrollToBeginningOfDocument:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'scrollToEndOfDocument:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'scrubber:didChangeVisibleRange:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSObject', b'scrubber:didHighlightItemAtIndex:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'scrubber:didSelectItemAtIndex:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'scrubber:layout:sizeForItemAtIndex:', {'required': False, 'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'scrubber:viewForItemAtIndex:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'searchFieldDidEndSearching:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'searchFieldDidStartSearching:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'searchForItemsWithSearchString:resultLimit:matchedItemHandler:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}, 4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'sectionCollapseButton', {'required': False, 'retval': {'type': b'@'}}) - r(b'NSObject', b'selectAll:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'selectLine:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'selectParagraph:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'selectToMark:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'selectWord:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'selectable', {'retval': {'type': b'Z'}}) - r(b'NSObject', b'selectedRange', {'required': True, 'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}) - r(b'NSObject', b'selectedRanges', {'required': False, 'retval': {'type': b'@'}}) - r(b'NSObject', b'selectionShouldChangeInOutlineView:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'selectionShouldChangeInTableView:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityActivationPoint:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSObject', b'setAccessibilityAllowedValues:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityAlternateUIVisible:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'Z'}}}) - r(b'NSObject', b'setAccessibilityApplicationFocusedUIElement:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityCancelButton:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityChildren:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityChildrenInNavigationOrder:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityClearButton:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityCloseButton:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityColumnCount:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'setAccessibilityColumnHeaderUIElements:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityColumnIndexRange:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSObject', b'setAccessibilityColumnTitles:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityColumns:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityContents:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityCriticalValue:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityCustomActions:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityCustomRotors:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityDecrementButton:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityDefaultButton:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityDisclosed:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'Z'}}}) - r(b'NSObject', b'setAccessibilityDisclosedByRow:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityDisclosedRows:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityDisclosureLevel:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'setAccessibilityDocument:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityEdited:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'Z'}}}) - r(b'NSObject', b'setAccessibilityElement:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'Z'}}}) - r(b'NSObject', b'setAccessibilityEnabled:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'Z'}}}) - r(b'NSObject', b'setAccessibilityExpanded:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'Z'}}}) - r(b'NSObject', b'setAccessibilityExtrasMenuBar:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityFilename:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityFocused:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'Z'}}}) - r(b'NSObject', b'setAccessibilityFocusedWindow:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityFrame:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSObject', b'setAccessibilityFrontmost:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'Z'}}}) - r(b'NSObject', b'setAccessibilityFullScreenButton:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityGrowArea:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityHandles:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityHeader:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityHelp:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityHidden:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'Z'}}}) - r(b'NSObject', b'setAccessibilityHorizontalScrollBar:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityHorizontalUnitDescription:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityHorizontalUnits:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'setAccessibilityIdentifier:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityIncrementButton:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityIndex:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'setAccessibilityInsertionPointLineNumber:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'setAccessibilityLabel:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityLabelUIElements:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityLabelValue:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'f'}}}) - r(b'NSObject', b'setAccessibilityLinkedUIElements:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityMain:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'Z'}}}) - r(b'NSObject', b'setAccessibilityMainWindow:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityMarkerGroupUIElement:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityMarkerTypeDescription:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityMarkerUIElements:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityMarkerValues:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityMaxValue:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityMenuBar:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityMinValue:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityMinimizeButton:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityMinimized:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'Z'}}}) - r(b'NSObject', b'setAccessibilityModal:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'Z'}}}) - r(b'NSObject', b'setAccessibilityNextContents:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityNumberOfCharacters:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'setAccessibilityOrderedByRow:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'Z'}}}) - r(b'NSObject', b'setAccessibilityOrientation:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'setAccessibilityOverflowButton:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityParent:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityPlaceholderValue:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityPreviousContents:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityProtectedContent:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'Z'}}}) - r(b'NSObject', b'setAccessibilityProxy:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityRequired:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'Z'}}}) - r(b'NSObject', b'setAccessibilityRole:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityRoleDescription:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityRowCount:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'setAccessibilityRowHeaderUIElements:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityRowIndexRange:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSObject', b'setAccessibilityRows:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityRulerMarkerType:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'setAccessibilitySearchButton:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilitySearchMenu:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilitySelected:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'Z'}}}) - r(b'NSObject', b'setAccessibilitySelectedCells:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilitySelectedChildren:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilitySelectedColumns:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilitySelectedRows:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilitySelectedText:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilitySelectedTextRange:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSObject', b'setAccessibilitySelectedTextRanges:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityServesAsTitleForUIElements:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilitySharedCharacterRange:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSObject', b'setAccessibilitySharedFocusElements:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilitySharedTextUIElements:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityShownMenu:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilitySortDirection:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'setAccessibilitySplitters:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilitySubrole:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityTabs:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityTitle:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityTitleUIElement:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityToolbarButton:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityTopLevelUIElement:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityURL:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityUnitDescription:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityUnits:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'setAccessibilityValue:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityValueDescription:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityVerticalScrollBar:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityVerticalUnitDescription:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityVerticalUnits:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'setAccessibilityVisibleCells:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityVisibleCharacterRange:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSObject', b'setAccessibilityVisibleChildren:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityVisibleColumns:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityVisibleRows:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityWarningValue:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityWindow:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityWindows:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAccessibilityZoomButton:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAllowsMultipleSelection:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'Z'}}}) - r(b'NSObject', b'setAnimatesToDestination:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'Z'}}}) - r(b'NSObject', b'setAnimations:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAppearance:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setAttachment:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setColor:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setDefaultPlaceholder:forMarker:withBinding:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'setDockTile:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setDraggingFormation:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'setEditable:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'Z'}}}) - r(b'NSObject', b'setFindBarView:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setFindBarVisible:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': 'Z'}}}) - r(b'NSObject', b'setFirstSelectedRange:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSObject', b'setIdentifier:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setIntAttribute:value:forGlyphAtIndex:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': sel32or64(b'i', b'q')}, 3: {'type': sel32or64(b'i', b'q')}, 4: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'setKey:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setLocalizedKey:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setMark:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setMarkedText:selectedRange:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSObject', b'setMarkedText:selectedRange:replacementRange:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 4: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSObject', b'setMode:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': 'i'}}}) - r(b'NSObject', b'setNumberOfValidItemsForDrop:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'setSectionCollapseButton:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setSelectable:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'Z'}}}) - r(b'NSObject', b'setSelectedRanges:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setString:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setValue:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setVisibleCharacterRanges:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'sharingService:didCompleteForItems:error:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'sharingService:didFailToShareItems:error:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'sharingService:didSaveShare:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'sharingService:didShareItems:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'sharingService:didStopSharing:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'sharingService:sourceFrameOnScreenForShareItem:', {'required': False, 'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'sharingService:sourceWindowForShareItems:sharingContentScope:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'^i', b'^q')}}}) - r(b'NSObject', b'sharingService:transitionImageForShareItem:contentRect:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'^{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'^{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSObject', b'sharingService:willShareItems:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'sharingServicePicker:delegateForSharingService:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'sharingServicePicker:didChooseSharingService:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'sharingServicePicker:sharingServicesForItems:proposedSharingServices:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'shouldPerformSegueWithIdentifier:sender:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'shouldReplaceCharactersInRanges:withStrings:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'showAllHelpTopicsForSearchString:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'showRelativeToRect:ofView:preferredEdge:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 4: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'slideDraggedImageTo:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSObject', b'sound:didFinishPlaying:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': 'Z'}}}) - r(b'NSObject', b'speechRecognizer:didRecognizeCommand:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'speechSynthesizer:didEncounterErrorAtIndex:ofString:message:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'I', b'Q')}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'speechSynthesizer:didEncounterSyncMessage:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'speechSynthesizer:didFinishSpeaking:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': 'Z'}}}) - r(b'NSObject', b'speechSynthesizer:willSpeakPhoneme:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': 's'}}}) - r(b'NSObject', b'speechSynthesizer:willSpeakWord:ofString:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 4: {'type': b'@'}}}) - r(b'NSObject', b'splitView:additionalEffectiveRectOfDividerAtIndex:', {'required': False, 'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'splitView:canCollapseSubview:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'splitView:constrainMaxCoordinate:ofSubviewAt:', {'required': False, 'retval': {'type': sel32or64(b'f', b'd')}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'f', b'd')}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'splitView:constrainMinCoordinate:ofSubviewAt:', {'required': False, 'retval': {'type': sel32or64(b'f', b'd')}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'f', b'd')}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'splitView:constrainSplitPosition:ofSubviewAt:', {'required': False, 'retval': {'type': sel32or64(b'f', b'd')}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'f', b'd')}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'splitView:effectiveRect:forDrawnRect:ofDividerAtIndex:', {'required': False, 'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 4: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 5: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'splitView:onstrainMaxCoordinate:ofSubviewAt:', {'retval': {'type': sel32or64(b'f', b'd')}, 'arguments': {3: {'type': sel32or64(b'f', b'd')}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'splitView:onstrainMinCoordinate:ofSubviewAt:', {'retval': {'type': sel32or64(b'f', b'd')}, 'arguments': {3: {'type': sel32or64(b'f', b'd')}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'splitView:onstrainSplitPosition:ofSubviewAt:', {'retval': {'type': sel32or64(b'f', b'd')}, 'arguments': {3: {'type': sel32or64(b'f', b'd')}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'splitView:resizeSubviewsWithOldSize:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSObject', b'splitView:shouldAdjustSizeOfSubview:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'splitView:shouldCollapseSubview:forDoubleClickOnDividerAtIndex:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'splitView:shouldHideDividerAtIndex:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'splitViewDidResizeSubviews:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'splitViewWillResizeSubviews:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'springLoadingActivated:draggingInfo:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': 'Z'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'springLoadingEntered:', {'required': False, 'retval': {'type': sel32or64(b'I', b'Q')}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'springLoadingExited:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'springLoadingHighlight', {'required': True, 'retval': {'type': b'q'}}) - r(b'NSObject', b'springLoadingHighlightChanged:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'springLoadingUpdated:', {'required': False, 'retval': {'type': sel32or64(b'I', b'Q')}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'stackView:didReattachViews:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'stackView:willDetachViews:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'string', {'required': False, 'retval': {'type': b'@'}}) - r(b'NSObject', b'stringAtIndex:effectiveRange:endsWithSearchBoundary:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': sel32or64(b'^{_NSRange=II}', b'^{_NSRange=QQ}')}, 4: {'type': b'^Z'}}}) - r(b'NSObject', b'stringAtIndex:effectiveRange:endswithSearchBoundary:', {'arguments': {2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': sel32or64(b'o^{_NSRange=II}', b'o^{_NSRange=QQ}')}, 4: {'type': 'o^Z'}}}) - r(b'NSObject', b'stringLength', {'required': False, 'retval': {'type': sel32or64(b'I', b'Q')}}) - r(b'NSObject', b'supportsMode:', {'required': True, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': 'i'}}}) - r(b'NSObject', b'swapWithMark:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'tabView:didSelectTabViewItem:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'tabView:shouldSelectTabViewItem:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'tabView:willSelectTabViewItem:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'tabViewDidChangeNumberOfTabViewItems:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'tableView:acceptDrop:row:dropOperation:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}, 5: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'tableView:dataCellForTableColumn:row:', {'required': False, 'retval': {'type': '@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'tableView:didAddRowView:forRow:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'tableView:didClickTableColumn:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'tableView:didDragTableColumn:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'tableView:didRemoveRowView:forRow:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'tableView:draggingSession:endedAtPoint:operation:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 5: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'tableView:draggingSession:willBeginAtPoint:forRowIndexes:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 5: {'type': b'@'}}}) - r(b'NSObject', b'tableView:heightOfRow:', {'required': False, 'retval': {'type': sel32or64(b'f', b'd')}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'tableView:isGroupRow:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'tableView:mouseDownInHeaderOfTableColumn:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'tableView:namesOfPromisedFilesDroppedAtDestination:forDraggedRowsWithIndexes:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'tableView:nextTypeSelectMatchFromRow:toRow:forString:', {'required': False, 'retval': {'type': sel32or64(b'i', b'q')}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}, 4: {'type': sel32or64(b'i', b'q')}, 5: {'type': b'@'}}}) - r(b'NSObject', b'tableView:objectValueForTableColumn:row:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'tableView:pasteboardWriterForRow:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'tableView:rowActionsForRow:edge:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'tableView:rowViewForRow:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'tableView:selectionIndexesForProposedSelection:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'tableView:setObjectValue:forTableColumn:row:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'tableView:shouldEditTableColumn:row:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'tableView:shouldReorderColumn:toColumn:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'tableView:shouldSelectRow:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'tableView:shouldSelectTableColumn:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'tableView:shouldShowCellExpansionForTableColumn:row:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'tableView:shouldTrackCell:forTableColumn:row:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'tableView:shouldTypeSelectForEvent:withCurrentSearchString:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'tableView:sizeToFitWidthOfColumn:', {'required': False, 'retval': {'type': sel32or64(b'f', b'd')}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'tableView:sortDescriptorsDidChange:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'tableView:toolTipForCell:rect:tableColumn:row:mouseLocation:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'^{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'^{CGRect={CGPoint=dd}{CGSize=dd}}'), 'type_modifier': b'N'}, 5: {'type': b'@'}, 6: {'type': sel32or64(b'i', b'q')}, 7: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSObject', b'tableView:typeSelectStringForTableColumn:row:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'tableView:updateDraggingItemsForDrag:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'tableView:validateDrop:proposedRow:proposedDropOperation:', {'required': False, 'retval': {'type': sel32or64(b'I', b'Q')}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}, 5: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'tableView:viewForTableColumn:row:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'tableView:willDisplayCell:forTableColumn:row:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'tableView:writeRows:toPasteboard:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'tableView:writeRowsWithIndexes:toPasteboard:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'tableViewColumnDidMove:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'tableViewColumnDidResize:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'tableViewSelectionDidChange:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'tableViewSelectionIsChanging:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'tag', {'required': True, 'retval': {'type': sel32or64(b'i', b'q')}}) - r(b'NSObject', b'terminate:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'textDidBeginEditing:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'textDidChange:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'textDidEndEditing:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'textField:textView:candidates:forSelectedRange:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSObject', b'textField:textView:candidatesForSelectedRange:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSObject', b'textField:textView:shouldSelectCandidateAtIndex:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'textShouldBeginEditing:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'textShouldEndEditing:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'textStorage:didProcessEditing:range:changeInLength:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'I', b'Q')}, 4: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 5: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'textStorage:willProcessEditing:range:changeInLength:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'I', b'Q')}, 4: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 5: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'textStorageDidProcessEditing:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'textStorageWillProcessEditing:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'textView:URLForContentsOfTextAttachment:atIndex:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'textView:candidates:forSelectedRange:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSObject', b'textView:candidatesForSelectedRange:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSObject', b'textView:clickedOnCell:inRect:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSObject', b'textView:clickedOnCell:inRect:atIndex:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 5: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'textView:clickedOnLink:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'textView:clickedOnLink:atIndex:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'textView:completions:forPartialWordRange:indexOfSelectedItem:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 5: {'type': sel32or64(b'^i', b'^q'), 'type_modifier': b'N'}}}) - r(b'NSObject', b'textView:didCheckTextInRange:types:options:results:orthography:wordCount:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 4: {'type': sel32or64(b'i', b'q')}, 5: {'type': b'@'}, 6: {'type': b'@'}, 7: {'type': b'@'}, 8: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'textView:doCommandBySelector:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': ':', 'sel_of_type': b'v@:@'}}}) - r(b'NSObject', b'textView:doubleClickedOnCell:inRect:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSObject', b'textView:doubleClickedOnCell:inRect:atIndex:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 5: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'textView:draggedCell:inRect:event:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 5: {'type': b'@'}}}) - r(b'NSObject', b'textView:draggedCell:inRect:event:atIndex:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 5: {'type': b'@'}, 6: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'textView:menu:forEvent:atIndex:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'textView:shouldChangeTextInRange:replacementString:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 4: {'type': b'@'}}}) - r(b'NSObject', b'textView:shouldChangeTextInRanges:replacementStrings:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'textView:shouldChangeTypingAttributes:toAttributes:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'textView:shouldSelectCandidateAtIndex:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'textView:shouldSetSpellingState:range:', {'required': False, 'retval': {'type': sel32or64(b'i', b'q')}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}, 4: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSObject', b'textView:shouldUpdateTouchBarItemIdentifiers:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'textView:willChangeSelectionFromCharacterRange:toCharacterRange:', {'required': False, 'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 4: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSObject', b'textView:willChangeSelectionFromCharacterRanges:toCharacterRanges:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'textView:willCheckTextInRange:options:types:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 4: {'type': b'@'}, 5: {'type': sel32or64(b'^i', b'^q'), 'type_modifier': b'N'}}}) - r(b'NSObject', b'textView:willDisplayToolTip:forCharacterAtIndex:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'textView:willShowSharingServicePicker:forItems:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'textView:writablePasteboardTypesForCell:atIndex:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'textView:writeCell:atIndex:toPasteboard:type:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'I', b'Q')}, 5: {'type': b'@'}, 6: {'type': b'@'}}}) - r(b'NSObject', b'textViewDidChangeSelection:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'textViewDidChangeTypingAttributes:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'tokenField:completionsForSubstring:indexOfToken:indexOfSelectedItem:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}, 5: {'type': sel32or64(b'^i', b'^q'), 'type_modifier': b'o'}}}) - r(b'NSObject', b'tokenField:displayStringForRepresentedObject:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'tokenField:editingStringForRepresentedObject:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'tokenField:hasMenuForRepresentedObject:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'tokenField:menuForRepresentedObject:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'tokenField:readFromPasteboard:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'tokenField:representedObjectForEditingString:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'tokenField:shouldAddObjects:atIndex:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'tokenField:styleForRepresentedObject:', {'required': False, 'retval': {'type': sel32or64(b'I', b'Q')}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'tokenField:writeRepresentedObjects:toPasteboard:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'tokenFieldCell:completionsForSubstring:indexOfToken:indexOfSelectedItem:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}, 5: {'type': sel32or64(b'^i', b'^q'), 'type_modifier': b'o'}}}) - r(b'NSObject', b'tokenFieldCell:displayStringForRepresentedObject:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'tokenFieldCell:editingStringForRepresentedObject:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'tokenFieldCell:hasMenuForRepresentedObject:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'tokenFieldCell:menuForRepresentedObject:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'tokenFieldCell:readFromPasteboard:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'tokenFieldCell:representedObjectForEditingString:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'tokenFieldCell:shouldAddObjects:atIndex:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'tokenFieldCell:styleForRepresentedObject:', {'required': False, 'retval': {'type': sel32or64(b'I', b'Q')}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'tokenFieldCell:writeRepresentedObjects:toPasteboard:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'toolbar:itemForItemIdentifier:willBeInsertedIntoToolbar:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': 'Z'}}}) - r(b'NSObject', b'toolbarAllowedItemIdentifiers:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'toolbarDefaultItemIdentifiers:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'toolbarDidRemoveItem:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'toolbarSelectableItemIdentifiers:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'toolbarWillAddItem:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'touchBar', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'touchBar:makeItemForIdentifier:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'trackMouse:inRect:ofView:atCharacterIndex:untilMouseUp:', {'required': True, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 4: {'type': b'@'}, 5: {'type': sel32or64(b'I', b'Q')}, 6: {'type': 'Z'}}}) - r(b'NSObject', b'trackMouse:inRect:ofView:untilMouseUp:', {'required': True, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 4: {'type': b'@'}, 5: {'type': 'Z'}}}) - r(b'NSObject', b'transpose:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'transposeWords:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'unbind:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'undoManagerForTextView:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'unmarkText', {'required': True, 'retval': {'type': b'v'}}) - r(b'NSObject', b'updateDraggingItemsForDrag:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'uppercaseWord:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'validAttributesForMarkedText', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'validModesForFontPanel:', {'required': False, 'retval': {'type': sel32or64(b'I', b'Q')}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'validateMenuItem:', {'required': True, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'validateToolbarItem:', {'required': True, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'validateUserInterfaceItem:', {'required': True, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'value', {'retval': {'type': b'@'}}) - r(b'NSObject', b'valueClassForBinding:', {'retval': {'type': '#'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'view:stringForToolTip:point:userData:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': 'i'}, 4: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 5: {'type': '^v'}}}) - r(b'NSObject', b'viewSizeChanged:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'visibleCharacterRanges', {'required': False, 'retval': {'type': b'@'}}) - r(b'NSObject', b'wantsPeriodicDraggingUpdates', {'required': False, 'retval': {'type': 'Z'}}) - r(b'NSObject', b'wantsToDelayTextChangeNotifications', {'required': True, 'retval': {'type': 'Z'}}) - r(b'NSObject', b'wantsToHandleMouseEvents', {'required': True, 'retval': {'type': 'Z'}}) - r(b'NSObject', b'wantsToInterpretAllKeystrokes', {'required': True, 'retval': {'type': 'Z'}}) - r(b'NSObject', b'wantsToTrackMouse', {'required': True, 'retval': {'type': 'Z'}}) - r(b'NSObject', b'wantsToTrackMouseForEvent:inRect:ofView:atCharacterIndex:', {'required': True, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 4: {'type': b'@'}, 5: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'willTransitionFromLayout:toLayout:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'window:didDecodeRestorableState:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'window:shouldDragDocumentWithEvent:from:withPasteboard:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 5: {'type': b'@'}}}) - r(b'NSObject', b'window:shouldPopUpDocumentPathMenu:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'window:startCustomAnimationToEnterFullScreenOnScreen:withDuration:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'd'}}}) - r(b'NSObject', b'window:startCustomAnimationToEnterFullScreenWithDuration:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'd'}}}) - r(b'NSObject', b'window:startCustomAnimationToExitFullScreenWithDuration:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'd'}}}) - r(b'NSObject', b'window:willEncodeRestorableState:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'window:willPositionSheet:usingRect:', {'required': False, 'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSObject', b'window:willResizeForVersionBrowserWithMaxPreferredSize:maxAllowedSize:', {'required': False, 'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}, 4: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSObject', b'window:willUseFullScreenContentSize:', {'required': False, 'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSObject', b'window:willUseFullScreenPresentationOptions:', {'required': False, 'retval': {'type': sel32or64(b'I', b'Q')}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'windowDidBecomeKey:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'windowDidBecomeMain:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'windowDidChangeBackingProperties:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'windowDidChangeOcclusionState:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'windowDidChangeScreen:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'windowDidChangeScreenProfile:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'windowDidDeminiaturize:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'windowDidEndLiveResize:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'windowDidEndSheet:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'windowDidEnterFullScreen:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'windowDidEnterVersionBrowser:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'windowDidExitFullScreen:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'windowDidExitVersionBrowser:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'windowDidExpose:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'windowDidFailToEnterFullScreen:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'windowDidFailToExitFullScreen:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'windowDidMiniaturize:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'windowDidMove:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'windowDidResignKey:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'windowDidResignMain:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'windowDidResize:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'windowDidUpdate:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'windowLevel', {'required': False, 'retval': {'type': sel32or64(b'i', b'q')}}) - r(b'NSObject', b'windowShouldClose:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'windowShouldZoom:toFrame:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSObject', b'windowWillBeginSheet:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'windowWillClose:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'windowWillEnterFullScreen:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'windowWillEnterVersionBrowser:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'windowWillExitFullScreen:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'windowWillExitVersionBrowser:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'windowWillMiniaturize:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'windowWillMove:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'windowWillResize:toSize:', {'required': False, 'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSObject', b'windowWillReturnFieldEditor:toObject:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'windowWillReturnUndoManager:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'windowWillStartLiveResize:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'windowWillUseStandardFrame:defaultFrame:', {'required': False, 'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSObject', b'writableTypesForPasteboard:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'writeSelectionToPasteboard:types:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'writingOptionsForType:pasteboard:', {'required': False, 'retval': {'type': sel32or64(b'I', b'Q')}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'yank:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObjectController', b'automaticallyPreparesContent', {'retval': {'type': 'Z'}}) - r(b'NSObjectController', b'canAdd', {'retval': {'type': 'Z'}}) - r(b'NSObjectController', b'canRemove', {'retval': {'type': 'Z'}}) - r(b'NSObjectController', b'fetchWithRequest:merge:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': 'Z'}, 4: {'type_modifier': b'o'}}}) - r(b'NSObjectController', b'isEditable', {'retval': {'type': 'Z'}}) - r(b'NSObjectController', b'setAutomaticallyPreparesContent:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSObjectController', b'setEditable:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSObjectController', b'setUsesLazyFetching:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': 'Z'}}}) - r(b'NSObjectController', b'usesLazyFetching', {'retval': {'type': 'Z'}}) - r(b'NSObjectController', b'validateUserInterfaceItem:', {'retval': {'type': 'Z'}}) - r(b'NSOpenGLContext', b'CGLContextObj', {'retval': {'type': '^{_CGLContextObj}'}}) - r(b'NSOpenGLContext', b'getValues:forParameter:', {'arguments': {2: {'type': '^i'}}}) - r(b'NSOpenGLContext', b'setOffScreen:width:height:rowbytes:', {'arguments': {2: {'type_modifier': b'n', 'c_array_of_variable_length': True}}}) - r(b'NSOpenGLContext', b'setValues:forParameter:', {'arguments': {2: {'type_modifier': b'n', 'c_array_of_variable_length': True}}}) - r(b'NSOpenGLLayer', b'canDrawInOpenGLContext:pixelFormat:forLayerTime:displayTime:', {'retval': {'type': 'Z'}, 'arguments': {5: {'type': sel32or64(b'^{_CVTimeStamp=IiqQdq{CVSMPTETime=ssLLLssss}QQ}', b'^{_CVTimeStamp=IiqQdq{CVSMPTETime=ssIIIssss}QQ}'), 'type_modifier': b'n'}}}) - r(b'NSOpenGLLayer', b'drawInOpenGLContext:pixelFormat:forLayerTime:displayTime:', {'arguments': {5: {'type': sel32or64(b'^{_CVTimeStamp=IiqQdq{CVSMPTETime=ssLLLssss}QQ}', b'^{_CVTimeStamp=IiqQdq{CVSMPTETime=ssIIIssss}QQ}'), 'type_modifier': b'n'}}}) - r(b'NSOpenGLPixelFormat', b'CGLPixelFormatObj', {'retval': {'type': '^{_CGLPixelFormatObject}'}}) - r(b'NSOpenGLPixelFormat', b'getValues:forAttribute:forVirtualScreen:', {'arguments': {2: {'type': '^i', 'type_modifier': b'o'}}}) - r(b'NSOpenGLPixelFormat', b'initWithAttributes:', {'arguments': {2: {'c_array_delimited_by_null': True, 'type': 'r^I', 'type_modifier': b'n'}}}) - r(b'NSOpenGLView', b'initWithFrame:pixelFormat:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSOpenPanel', b'allowsMultipleSelection', {'retval': {'type': 'Z'}}) - r(b'NSOpenPanel', b'beginForDirectory:file:types:modelessDelegate:didEndSelector:contextInfo:', {'arguments': {6: {'type': ':', 'sel_of_type': sel32or64(b'v@:@i^v', b'v@:@q^v')}, 7: {'type': '^v'}}}) - r(b'NSOpenPanel', b'beginSheetForDirectory:file:types:modalForWindow:modalDelegate:didEndSelector:contextInfo:', {'arguments': {7: {'sel_of_type': sel32or64(b'v@:@i^v', b'v@:@q^v')}, 8: {'type': '^v'}}}) - r(b'NSOpenPanel', b'canChooseDirectories', {'retval': {'type': 'Z'}}) - r(b'NSOpenPanel', b'canChooseFiles', {'retval': {'type': 'Z'}}) - r(b'NSOpenPanel', b'canDownloadUbiquitousContents', {'retval': {'type': b'Z'}}) - r(b'NSOpenPanel', b'canResolveUbiquitousConflicts', {'retval': {'type': b'Z'}}) - r(b'NSOpenPanel', b'isAccessoryViewDisclosed', {'retval': {'type': 'Z'}}) - r(b'NSOpenPanel', b'resolvesAliases', {'retval': {'type': 'Z'}}) - r(b'NSOpenPanel', b'setAccessoryViewDisclosed:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSOpenPanel', b'setAllowsMultipleSelection:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSOpenPanel', b'setCanChooseDirectories:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSOpenPanel', b'setCanChooseFiles:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSOpenPanel', b'setCanDownloadUbiquitousContents:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSOpenPanel', b'setCanResolveUbiquitousConflicts:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSOpenPanel', b'setResolvesAliases:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSOutlineView', b'autoresizesOutlineColumn', {'retval': {'type': 'Z'}}) - r(b'NSOutlineView', b'autosaveExpandedItems', {'retval': {'type': 'Z'}}) - r(b'NSOutlineView', b'collapseItem:collapseChildren:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSOutlineView', b'expandItem:expandChildren:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSOutlineView', b'frameOfOutlineCellAtRow:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSOutlineView', b'indentationMarkerFollowsCell', {'retval': {'type': 'Z'}}) - r(b'NSOutlineView', b'isExpandable:', {'retval': {'type': 'Z'}}) - r(b'NSOutlineView', b'isItemExpanded:', {'retval': {'type': 'Z'}}) - r(b'NSOutlineView', b'reloadItem:reloadChildren:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSOutlineView', b'setAutoresizesOutlineColumn:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSOutlineView', b'setAutosaveExpandedItems:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSOutlineView', b'setIndentationMarkerFollowsCell:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSOutlineView', b'setStronglyReferencesItems:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSOutlineView', b'shouldCollapseAutoExpandedItemsForDeposited:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': 'Z'}}}) - r(b'NSOutlineView', b'stronglyReferencesItems', {'retval': {'type': 'Z'}}) - r(b'NSPDFImageRep', b'bounds', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSPDFInfo', b'isFileExtensionHidden', {'retval': {'type': b'Z'}}) - r(b'NSPDFInfo', b'setFileExtensionHidden:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSPDFPanel', b'beginSheetWithPDFInfo:modalForWindow:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'i', b'q')}}}}}}) - r(b'NSPICTImageRep', b'boundingBox', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSPageLayout', b'beginSheetWithPrintInfo:modalForWindow:delegate:didEndSelector:contextInfo:', {'arguments': {5: {'sel_of_type': sel32or64(b'v@:@i^v', b'v@:@q^v')}, 6: {'type': '^v'}}}) - r(b'NSPanel', b'becomesKeyOnlyIfNeeded', {'retval': {'type': 'Z'}}) - r(b'NSPanel', b'isFloatingPanel', {'retval': {'type': 'Z'}}) - r(b'NSPanel', b'setBecomesKeyOnlyIfNeeded:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSPanel', b'setFloatingPanel:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSPanel', b'setWorksWhenModal:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSPanel', b'worksWhenModal', {'retval': {'type': 'Z'}}) - r(b'NSParagraphStyle', b'allowsDefaultTighteningForTruncation', {'retval': {'type': 'Z'}}) - r(b'NSPasteboard', b'canReadItemWithDataConformingToTypes:', {'retval': {'type': 'Z'}}) - r(b'NSPasteboard', b'canReadObjectForClasses:options:', {'retval': {'type': 'Z'}}) - r(b'NSPasteboard', b'setData:forType:', {'retval': {'type': 'Z'}}) - r(b'NSPasteboard', b'setPropertyList:forType:', {'retval': {'type': 'Z'}}) - r(b'NSPasteboard', b'setString:forType:', {'retval': {'type': 'Z'}}) - r(b'NSPasteboard', b'writeFileContents:', {'retval': {'type': 'Z'}}) - r(b'NSPasteboard', b'writeFileWrapper:', {'retval': {'type': 'Z'}}) - r(b'NSPasteboard', b'writeObjects:', {'retval': {'type': 'Z'}}) - r(b'NSPasteboardItem', b'setData:forType:', {'retval': {'type': 'Z'}}) - r(b'NSPasteboardItem', b'setDataProvider:forTypes:', {'retval': {'type': 'Z'}}) - r(b'NSPasteboardItem', b'setPropertyList:forType:', {'retval': {'type': 'Z'}}) - r(b'NSPasteboardItem', b'setString:forType:', {'retval': {'type': 'Z'}}) - r(b'NSPathCell', b'mouseEntered:withFrame:inView:', {'arguments': {3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSPathCell', b'mouseExited:withFrame:inView:', {'arguments': {3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSPathCell', b'pathComponentCellAtPoint:withFrame:inView:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSPathCell', b'rectOfPathComponentCell:withFrame:inView:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSPathCell', b'setDoubleAction:', {'retval': {'type': 'v'}, 'arguments': {2: {'sel_of_type': b'v@:@'}}}) - r(b'NSPathControl', b'isEditable', {'retval': {'type': b'Z'}}) - r(b'NSPathControl', b'setDoubleAction:', {'arguments': {2: {'sel_of_type': b'v@:@'}}}) - r(b'NSPathControl', b'setDraggingSourceOperationMask:forLocal:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSPathControl', b'setEditable:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSPersistentDocument', b'configurePersistentStoreCoordinatorForURL:ofType:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSPersistentDocument', b'configurePersistentStoreCoordinatorForURL:ofType:modelConfiguration:storeOptions:error:', {'retval': {'type': 'Z'}, 'arguments': {6: {'type_modifier': b'o'}}}) - r(b'NSPersistentDocument', b'readFromURL:ofType:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSPersistentDocument', b'revertToContentsOfURL:ofType:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSPersistentDocument', b'writeToURL:ofType:forSaveOperation:originalContentsURL:error:', {'retval': {'type': 'Z'}, 'arguments': {6: {'type_modifier': b'o'}}}) - r(b'NSPopUpButton', b'autoenablesItems', {'retval': {'type': 'Z'}}) - r(b'NSPopUpButton', b'indexOfItemWithTarget:andAction:', {'arguments': {3: {'sel_of_type': b'v@:@'}}}) - r(b'NSPopUpButton', b'initWithFrame:pullsDown:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 3: {'type': 'Z'}}}) - r(b'NSPopUpButton', b'pullsDown', {'retval': {'type': 'Z'}}) - r(b'NSPopUpButton', b'selectItemWithTag:', {'retval': {'type': 'Z'}}) - r(b'NSPopUpButton', b'setAutoenablesItems:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSPopUpButton', b'setPullsDown:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSPopUpButtonCell', b'altersStateOfSelectedItem', {'retval': {'type': 'Z'}}) - r(b'NSPopUpButtonCell', b'attachPopUpWithFrame:inView:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSPopUpButtonCell', b'autoenablesItems', {'retval': {'type': 'Z'}}) - r(b'NSPopUpButtonCell', b'indexOfItemWithTarget:andAction:', {'arguments': {3: {'sel_of_type': b'v@:@'}}}) - r(b'NSPopUpButtonCell', b'initTextCell:pullsDown:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSPopUpButtonCell', b'performClickWithFrame:inView:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSPopUpButtonCell', b'pullsDown', {'retval': {'type': 'Z'}}) - r(b'NSPopUpButtonCell', b'selectItemWithTag:', {'retval': {'type': 'Z'}}) - r(b'NSPopUpButtonCell', b'setAltersStateOfSelectedItem:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSPopUpButtonCell', b'setAutoenablesItems:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSPopUpButtonCell', b'setPullsDown:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSPopUpButtonCell', b'setUsesItemFromMenu:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSPopUpButtonCell', b'usesItemFromMenu', {'retval': {'type': 'Z'}}) - r(b'NSPopover', b'animates', {'retval': {'type': b'Z'}}) - r(b'NSPopover', b'isDetached', {'retval': {'type': 'Z'}}) - r(b'NSPopover', b'isShown', {'retval': {'type': b'Z'}}) - r(b'NSPopover', b'setAnimates:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSPopover', b'setDetached:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSPopover', b'setShown:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSPopoverTouchBarItem', b'setShowsCloseButton:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSPopoverTouchBarItem', b'showsCloseButton', {'retval': {'type': 'Z'}}) - r(b'NSPrintInfo', b'PMPageFormat', {'retval': {'type': '^{OpaquePMPageFormat=}'}}) - r(b'NSPrintInfo', b'PMPrintSession', {'retval': {'type': '^{OpaquePMPrintSession=}'}}) - r(b'NSPrintInfo', b'PMPrintSettings', {'retval': {'type': '^{OpaquePMPrintSettings=}'}}) - r(b'NSPrintInfo', b'imageablePageBounds', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSPrintInfo', b'isHorizontallyCentered', {'retval': {'type': 'Z'}}) - r(b'NSPrintInfo', b'isSelectionOnly', {'retval': {'type': 'Z'}}) - r(b'NSPrintInfo', b'isVerticallyCentered', {'retval': {'type': 'Z'}}) - r(b'NSPrintInfo', b'paperSize', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSPrintInfo', b'setHorizontallyCentered:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSPrintInfo', b'setPaperSize:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSPrintInfo', b'setSelectionOnly:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSPrintInfo', b'setVerticallyCentered:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSPrintInfo', b'sizeForPaperName:', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSPrintOperation', b'EPSOperationWithView:insideRect:toData:', {'arguments': {3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSPrintOperation', b'EPSOperationWithView:insideRect:toData:printInfo:', {'arguments': {3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSPrintOperation', b'EPSOperationWithView:insideRect:toPath:printInfo:', {'arguments': {3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSPrintOperation', b'PDFOperationWithView:insideRect:toData:', {'arguments': {3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSPrintOperation', b'PDFOperationWithView:insideRect:toData:printInfo:', {'arguments': {3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSPrintOperation', b'PDFOperationWithView:insideRect:toPath:printInfo:', {'arguments': {3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSPrintOperation', b'canSpawnSeparateThread', {'retval': {'type': 'Z'}}) - r(b'NSPrintOperation', b'deliverResult', {'retval': {'type': 'Z'}}) - r(b'NSPrintOperation', b'isCopyingOperation', {'retval': {'type': 'Z'}}) - r(b'NSPrintOperation', b'pageRange', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}) - r(b'NSPrintOperation', b'runOperation', {'retval': {'type': 'Z'}}) - r(b'NSPrintOperation', b'runOperationModalForWindow:delegate:didRunSelector:contextInfo:', {'arguments': {4: {'sel_of_type': b'v@:@Z^v'}, 5: {'type': '^v'}}}) - r(b'NSPrintOperation', b'setCanSpawnSeparateThread:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSPrintOperation', b'setShowPanels:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSPrintOperation', b'setShowsPrintPanel:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSPrintOperation', b'setShowsProgressPanel:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSPrintOperation', b'showPanels', {'retval': {'type': 'Z'}}) - r(b'NSPrintOperation', b'showsPrintPanel', {'retval': {'type': 'Z'}}) - r(b'NSPrintOperation', b'showsProgressPanel', {'retval': {'type': 'Z'}}) - r(b'NSPrintPanel', b'beginSheetWithPrintInfo:modalForWindow:delegate:didEndSelector:contextInfo:', {'arguments': {5: {'sel_of_type': sel32or64(b'v@:@i^v', b'v@:@q^v')}, 6: {'type': '^v'}}}) - r(b'NSPrinter', b'acceptsBinary', {'retval': {'type': 'Z'}}) - r(b'NSPrinter', b'booleanForKey:inTable:', {'retval': {'type': 'Z'}}) - r(b'NSPrinter', b'imageRectForPaper:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSPrinter', b'isColor', {'retval': {'type': 'Z'}}) - r(b'NSPrinter', b'isFontAvailable:', {'retval': {'type': 'Z'}}) - r(b'NSPrinter', b'isKey:inTable:', {'retval': {'type': 'Z'}}) - r(b'NSPrinter', b'isOutputStackInReverseOrder', {'retval': {'type': 'Z'}}) - r(b'NSPrinter', b'pageSizeForPaper:', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSPrinter', b'printerWithName:domain:includeUnavailable:', {'arguments': {4: {'type': 'Z'}}}) - r(b'NSPrinter', b'rectForKey:inTable:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSPrinter', b'sizeForKey:inTable:', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSProgressIndicator', b'isBezeled', {'retval': {'type': 'Z'}}) - r(b'NSProgressIndicator', b'isDisplayedWhenStopped', {'retval': {'type': 'Z'}}) - r(b'NSProgressIndicator', b'isIndeterminate', {'retval': {'type': 'Z'}}) - r(b'NSProgressIndicator', b'setBezeled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSProgressIndicator', b'setDisplayedWhenStopped:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSProgressIndicator', b'setIndeterminate:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSProgressIndicator', b'setUsesThreadedAnimation:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSProgressIndicator', b'usesThreadedAnimation', {'retval': {'type': 'Z'}}) - r(b'NSQuickDrawView', b'qdPort', {'retval': {'type': '^v'}}) - r(b'NSResponder', b'acceptsFirstResponder', {'retval': {'type': 'Z'}}) - r(b'NSResponder', b'becomeFirstResponder', {'retval': {'type': 'Z'}}) - r(b'NSResponder', b'doCommandBySelector:', {'arguments': {2: {'sel_of_type': b'v@:@'}}}) - r(b'NSResponder', b'noResponderFor:', {'arguments': {2: {'sel_of_type': b'v@:@'}}}) - r(b'NSResponder', b'performKeyEquivalent:', {'retval': {'type': 'Z'}}) - r(b'NSResponder', b'performMnemonic:', {'retval': {'type': 'Z'}}) - r(b'NSResponder', b'presentError:', {'retval': {'type': 'Z'}}) - r(b'NSResponder', b'presentError:modalForWindow:delegate:didPresentSelector:contextInfo:', {'arguments': {5: {'sel_of_type': b'v@:Z^v'}, 6: {'type': '^v'}}}) - r(b'NSResponder', b'resignFirstResponder', {'retval': {'type': 'Z'}}) - r(b'NSResponder', b'shouldBeTreatedAsInkEvent:', {'retval': {'type': 'Z'}}) - r(b'NSResponder', b'supplementalTargetForAction:sender:', {'arguments': {2: {'sel_of_type': b'v@:@'}}}) - r(b'NSResponder', b'tryToPerform:with:', {'retval': {'type': 'Z'}, 'arguments': {2: {'sel_of_type': b'v@:@'}}}) - r(b'NSResponder', b'validateProposedFirstResponder:forEvent:', {'retval': {'type': b'Z'}}) - r(b'NSResponder', b'wantsForwardedScrollEventsForAxis:', {'retval': {'type': b'Z'}}) - r(b'NSResponder', b'wantsScrollEventsForSwipeTrackingOnAxis:', {'retval': {'type': b'Z'}}) - r(b'NSRuleEditor', b'canRemoveAllRows', {'retval': {'type': 'Z'}}) - r(b'NSRuleEditor', b'insertRowAtIndex:withType:asSubrowOfRow:animate:', {'arguments': {5: {'type': 'Z'}}}) - r(b'NSRuleEditor', b'isEditable', {'retval': {'type': 'Z'}}) - r(b'NSRuleEditor', b'removeRowsAtIndexes:includeSubrows:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSRuleEditor', b'selectRowIndexes:byExtendingSelection:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSRuleEditor', b'setCanRemoveAllRows:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSRuleEditor', b'setEditable:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSRulerMarker', b'drawRect:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSRulerMarker', b'imageOrigin', {'retval': {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}) - r(b'NSRulerMarker', b'imageRectInRuler', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSRulerMarker', b'initWithRulerView:markerLocation:image:imageOrigin:', {'arguments': {5: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSRulerMarker', b'isDragging', {'retval': {'type': 'Z'}}) - r(b'NSRulerMarker', b'isMovable', {'retval': {'type': 'Z'}}) - r(b'NSRulerMarker', b'isRemovable', {'retval': {'type': 'Z'}}) - r(b'NSRulerMarker', b'setImageOrigin:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSRulerMarker', b'setMovable:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSRulerMarker', b'setRemovable:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSRulerMarker', b'trackMouse:adding:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': 'Z'}}}) - r(b'NSRulerView', b'drawHashMarksAndLabelsInRect:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSRulerView', b'drawMarkersInRect:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSRulerView', b'isFlipped', {'retval': {'type': 'Z'}}) - r(b'NSRulerView', b'trackMarker:withMouseEvent:', {'retval': {'type': 'Z'}}) - r(b'NSRunningApplication', b'activateWithOptions:', {'retval': {'type': 'Z'}}) - r(b'NSRunningApplication', b'forceTerminate', {'retval': {'type': 'Z'}}) - r(b'NSRunningApplication', b'hide', {'retval': {'type': 'Z'}}) - r(b'NSRunningApplication', b'isActive', {'retval': {'type': 'Z'}}) - r(b'NSRunningApplication', b'isFinishedLaunching', {'retval': {'type': 'Z'}}) - r(b'NSRunningApplication', b'isHidden', {'retval': {'type': 'Z'}}) - r(b'NSRunningApplication', b'isTerminated', {'retval': {'type': 'Z'}}) - r(b'NSRunningApplication', b'ownsMenuBar', {'retval': {'type': b'Z'}}) - r(b'NSRunningApplication', b'terminate', {'retval': {'type': 'Z'}}) - r(b'NSRunningApplication', b'unhide', {'retval': {'type': 'Z'}}) - r(b'NSSavePanel', b'allowsOtherFileTypes', {'retval': {'type': 'Z'}}) - r(b'NSSavePanel', b'beginForDirectory:file:types:modelessDelegate:didEndSelector:contextInfo:', {'arguments': {6: {'type': ':', 'sel_of_type': sel32or64(b'v@:@i^v', b'v@:@q^v')}, 7: {'type': '^v'}}}) - r(b'NSSavePanel', b'beginSheetForDirectory:file:modalForWindow:modalDelegate:didEndSelector:contextInfo:', {'arguments': {6: {'sel_of_type': sel32or64(b'v@:@i^v', b'v@:@q^v')}, 7: {'type': '^v'}}}) - r(b'NSSavePanel', b'beginSheetForDirectory:file:types:modalForWindow:modalDelegate:didEndSelector:contextInfo:', {'arguments': {7: {'sel_of_type': sel32or64(b'v@:@i^v', b'v@:@q^v')}, 8: {'type': '^v'}}}) - r(b'NSSavePanel', b'beginSheetModalForWindow:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'i', b'q')}}}}}}) - r(b'NSSavePanel', b'beginWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'i', b'q')}}}}}}) - r(b'NSSavePanel', b'canCreateDirectories', {'retval': {'type': 'Z'}}) - r(b'NSSavePanel', b'canSelectHiddenExtension', {'retval': {'type': 'Z'}}) - r(b'NSSavePanel', b'isExpanded', {'retval': {'type': 'Z'}}) - r(b'NSSavePanel', b'isExtensionHidden', {'retval': {'type': 'Z'}}) - r(b'NSSavePanel', b'setAllowsOtherFileTypes:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSSavePanel', b'setCanCreateDirectories:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSSavePanel', b'setCanSelectHiddenExtension:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSSavePanel', b'setExtensionHidden:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSSavePanel', b'setShowsHiddenFiles:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSSavePanel', b'setShowsTagField:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSSavePanel', b'setTreatsFilePackagesAsDirectories:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSSavePanel', b'showsHiddenFiles', {'retval': {'type': 'Z'}}) - r(b'NSSavePanel', b'showsTagField', {'retval': {'type': b'Z'}}) - r(b'NSSavePanel', b'treatsFilePackagesAsDirectories', {'retval': {'type': 'Z'}}) - r(b'NSScreen', b'canRepresentDisplayGamut:', {'retval': {'type': 'Z'}}) - r(b'NSScreen', b'frame', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSScreen', b'screensHaveSeparateSpaces', {'retval': {'type': b'Z'}}) - r(b'NSScreen', b'supportedWindowDepths', {'retval': {'c_array_delimited_by_null': True}}) - r(b'NSScreen', b'visibleFrame', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSScrollView', b'allowsMagnification', {'retval': {'type': b'Z'}}) - r(b'NSScrollView', b'autohidesScrollers', {'retval': {'type': 'Z'}}) - r(b'NSScrollView', b'automaticallyAdjustsContentInsets', {'retval': {'type': b'Z'}}) - r(b'NSScrollView', b'contentSize', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSScrollView', b'contentSizeForFrameSize:hasHorizontalScroller:hasVerticalScroller:borderType:', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}, 'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}, 3: {'type': 'Z'}, 4: {'type': 'Z'}}}) - r(b'NSScrollView', b'documentVisibleRect', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSScrollView', b'drawsBackground', {'retval': {'type': 'Z'}}) - r(b'NSScrollView', b'frameSizeForContentSize:hasHorizontalScroller:hasVerticalScroller:borderType:', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}, 'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}, 3: {'type': 'Z'}, 4: {'type': 'Z'}}}) - r(b'NSScrollView', b'hasHorizontalRuler', {'retval': {'type': 'Z'}}) - r(b'NSScrollView', b'hasHorizontalScroller', {'retval': {'type': 'Z'}}) - r(b'NSScrollView', b'hasVerticalRuler', {'retval': {'type': 'Z'}}) - r(b'NSScrollView', b'hasVerticalScroller', {'retval': {'type': 'Z'}}) - r(b'NSScrollView', b'rulersVisible', {'retval': {'type': 'Z'}}) - r(b'NSScrollView', b'scrollsDynamically', {'retval': {'type': 'Z'}}) - r(b'NSScrollView', b'setAllowsMagnification:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSScrollView', b'setAutohidesScrollers:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSScrollView', b'setAutomaticallyAdjustsContentInsets:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSScrollView', b'setDrawsBackground:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSScrollView', b'setHasHorizontalRuler:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSScrollView', b'setHasHorizontalScroller:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSScrollView', b'setHasVerticalRuler:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSScrollView', b'setHasVerticalScroller:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSScrollView', b'setRulersVisible:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSScrollView', b'setScrollsDynamically:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSScrollView', b'setUsesPredominantAxisScrolling:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSScrollView', b'usesPredominantAxisScrolling', {'retval': {'type': b'Z'}}) - r(b'NSScroller', b'drawArrow:highlight:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSScroller', b'drawKnobSlotInRect:highlight:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 3: {'type': 'Z'}}}) - r(b'NSScroller', b'highlight:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSScroller', b'isCompatibleWithOverlayScrollers', {'retval': {'type': b'Z'}}) - r(b'NSScroller', b'rectForPart:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSScroller', b'testPart:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSScrubber', b'floatsSelectionViews', {'retval': {'type': 'Z'}}) - r(b'NSScrubber', b'isContinuous', {'retval': {'type': 'Z'}}) - r(b'NSScrubber', b'performSequentialBatchUpdates:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSScrubber', b'setContinuous:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSScrubber', b'setFloatsSelectionViews:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSScrubber', b'setShowsAdditionalContentIndicators:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSScrubber', b'setShowsArrowButtons:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSScrubber', b'showsAdditionalContentIndicators', {'retval': {'type': 'Z'}}) - r(b'NSScrubber', b'showsArrowButtons', {'retval': {'type': 'Z'}}) - r(b'NSScrubberArrangedView', b'isHighlighted', {'retval': {'type': 'Z'}}) - r(b'NSScrubberArrangedView', b'isSelected', {'retval': {'type': 'Z'}}) - r(b'NSScrubberArrangedView', b'setHighlighted:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSScrubberArrangedView', b'setSelected:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSScrubberLayout', b'automaticallyMirrorsInRightToLeftLayout', {'retval': {'type': 'Z'}}) - r(b'NSScrubberLayout', b'shouldInvalidateLayoutForChangeFromVisibleRect:toVisibleRect:', {'retval': {'type': 'Z'}}) - r(b'NSScrubberLayout', b'shouldInvalidateLayoutForHighlightChange', {'retval': {'type': 'Z'}}) - r(b'NSScrubberLayout', b'shouldInvalidateLayoutForSelectionChange', {'retval': {'type': 'Z'}}) - r(b'NSSearchField', b'centersPlaceholder', {'retval': {'type': 'Z'}}) - r(b'NSSearchField', b'rectForCancelButtonWhenCentered:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSSearchField', b'rectForSearchButtonWhenCentered:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSSearchField', b'rectForSearchTextWhenCentered:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSSearchField', b'sendsSearchStringImmediately', {'retval': {'type': b'Z'}}) - r(b'NSSearchField', b'sendsWholeSearchString', {'retval': {'type': b'Z'}}) - r(b'NSSearchField', b'setCentersPlaceholder:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSSearchField', b'setSendsSearchStringImmediately:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSSearchField', b'setSendsWholeSearchString:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSSearchFieldCell', b'cancelButtonRectForBounds:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSSearchFieldCell', b'searchButtonRectForBounds:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSSearchFieldCell', b'searchTextRectForBounds:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSSearchFieldCell', b'sendsSearchStringImmediately', {'retval': {'type': 'Z'}}) - r(b'NSSearchFieldCell', b'sendsWholeSearchString', {'retval': {'type': 'Z'}}) - r(b'NSSearchFieldCell', b'setSendsSearchStringImmediately:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSSearchFieldCell', b'setSendsWholeSearchString:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSSecureTextFieldCell', b'echosBullets', {'retval': {'type': 'Z'}}) - r(b'NSSecureTextFieldCell', b'setEchosBullets:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSSegmentedCell', b'drawSegment:inFrame:withView:', {'arguments': {3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSSegmentedCell', b'isEnabledForSegment:', {'retval': {'type': 'Z'}}) - r(b'NSSegmentedCell', b'isSelectedForSegment:', {'retval': {'type': 'Z'}}) - r(b'NSSegmentedCell', b'selectSegmentWithTag:', {'retval': {'type': 'Z'}}) - r(b'NSSegmentedCell', b'setEnabled:forSegment:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSSegmentedCell', b'setSelected:forSegment:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSSegmentedControl', b'isEnabledForSegment:', {'retval': {'type': 'Z'}}) - r(b'NSSegmentedControl', b'isSelectedForSegment:', {'retval': {'type': 'Z'}}) - r(b'NSSegmentedControl', b'isSpringLoaded', {'retval': {'type': b'Z'}}) - r(b'NSSegmentedControl', b'segmentedControlWithImages:trackingMode:target:action:', {'arguments': {5: {'sel_of_type': b'v@:@'}}}) - r(b'NSSegmentedControl', b'segmentedControlWithLabels:trackingMode:target:action:', {'arguments': {5: {'sel_of_type': b'v@:@'}}}) - r(b'NSSegmentedControl', b'selectSegmentWithTag:', {'retval': {'type': 'Z'}}) - r(b'NSSegmentedControl', b'setEnabled:forSegment:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSSegmentedControl', b'setSelected:forSegment:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSSegmentedControl', b'setShowsMenuIndicator:forSegment:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSSegmentedControl', b'setSpringLoaded:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSSegmentedControl', b'showsMenuIndicatorForSegment:', {'retval': {'type': 'Z'}}) - r(b'NSSet', b'enumerateIndexPathsWithOptions:usingBlock:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'o^Z'}}}}}}) - r(b'NSShadow', b'setShadowOffset:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSShadow', b'shadowOffset', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSSharingService', b'canPerformWithItems:', {'retval': {'type': b'Z'}}) - r(b'NSSharingService', b'initWithTitle:image:alternateImage:handler:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSSharingServicePickerTouchBarItem', b'isEnabled', {'retval': {'type': 'Z'}}) - r(b'NSSharingServicePickerTouchBarItem', b'setEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSSimpleHorizontalTypesetter', b'baseOfTypesetterGlyphInfo', {'retval': {'type': b'^{_NSTypesetterGlyphInfo={_NSPoint=ff}fffI@{_NSSize=ff}{_struct _NSTypesetterGlyphInfo::(anonymous at /Applications/Xcode-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.10.sdk/System/Library/Frameworks/AppKit.framework/Headers/NSSimpleHorizontalTypesetter.h:58:5)=b1b1b1}{=b1b1b1}}'}}) - r(b'NSSimpleHorizontalTypesetter', b'growGlyphCaches:fillGlyphInfo:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSSimpleHorizontalTypesetter', b'layoutControlGlyphForLineFragment:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSSimpleHorizontalTypesetter', b'layoutGlyphsInHorizontalLineFragment:baseline:', {'arguments': {2: {'type_modifier': b'N'}, 3: {'type_modifier': b'N'}}}) - r(b'NSSimpleHorizontalTypesetter', b'layoutGlyphsInLayoutManager:startingAtGlyphIndex:maxNumberOfLineFragments:nextGlyphIndex:', {'arguments': {5: {'type_modifier': b'o'}}}) - r(b'NSSimpleHorizontalTypesetter', b'typesetterLaidOneGlyph:', {'arguments': {2: {'type': b'^{_NSTypesetterGlyphInfo={_NSPoint=ff}fffI@{_NSSize=ff}{_struct _NSTypesetterGlyphInfo::(anonymous at /Applications/Xcode-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.10.sdk/System/Library/Frameworks/AppKit.framework/Headers/NSSimpleHorizontalTypesetter.h:58:5)=b1b1b1}{=b1b1b1}}', 'type_modifier': b'N'}}}) - r(b'NSSimpleHorizontalTypesetter', b'willSetLineFragmentRect:forGlyphRange:usedRect:', {'arguments': {2: {'type': sel32or64(b'^{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'^{CGRect={CGPoint=dd}{CGSize=dd}}'), 'type_modifier': b'N'}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 4: {'type': sel32or64(b'^{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'^{CGRect={CGPoint=dd}{CGSize=dd}}'), 'type_modifier': b'N'}}}) - r(b'NSSlider', b'acceptsFirstMouse:', {'retval': {'type': 'Z'}}) - r(b'NSSlider', b'allowsTickMarkValuesOnly', {'retval': {'type': 'Z'}}) - r(b'NSSlider', b'indexOfTickMarkAtPoint:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSSlider', b'isVertical', {'retval': {'type': 'Z'}}) - r(b'NSSlider', b'rectOfTickMarkAtIndex:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSSlider', b'setAllowsTickMarkValuesOnly:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSSlider', b'setVertical:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSSlider', b'sliderWithValue:minValue:maxValue:target:action:', {'arguments': {6: {'sel_of_type': b'v@:@'}}}) - r(b'NSSliderAccessory', b'isEnabled', {'retval': {'type': 'Z'}}) - r(b'NSSliderAccessory', b'setEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSSliderAccessoryBehavior', b'behaviorWithHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'NSSliderCell', b'allowsTickMarkValuesOnly', {'retval': {'type': 'Z'}}) - r(b'NSSliderCell', b'barRectFlipped:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSSliderCell', b'drawBarInside:flipped:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 3: {'type': 'Z'}}}) - r(b'NSSliderCell', b'drawKnob:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSSliderCell', b'indexOfTickMarkAtPoint:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSSliderCell', b'isVertical', {'retval': {'type': 'Z'}}) - r(b'NSSliderCell', b'knobRectFlipped:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': 'Z'}}}) - r(b'NSSliderCell', b'prefersTrackingUntilMouseUp', {'retval': {'type': 'Z'}}) - r(b'NSSliderCell', b'rectOfTickMarkAtIndex:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSSliderCell', b'setAllowsTickMarkValuesOnly:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSSliderCell', b'setVertical:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSSliderCell', b'trackRect', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSSound', b'canInitWithPasteboard:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': '@'}}}) - r(b'NSSound', b'initWithContentsOfFile:byReference:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSSound', b'initWithContentsOfURL:byReference:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSSound', b'isPlaying', {'retval': {'type': 'Z'}}) - r(b'NSSound', b'loops', {'retval': {'type': 'Z'}}) - r(b'NSSound', b'name', {'retval': {'type': '@'}}) - r(b'NSSound', b'pause', {'retval': {'type': 'Z'}}) - r(b'NSSound', b'play', {'retval': {'type': 'Z'}}) - r(b'NSSound', b'resume', {'retval': {'type': 'Z'}}) - r(b'NSSound', b'setLoops:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSSound', b'setName:', {'retval': {'type': 'Z'}}) - r(b'NSSound', b'stop', {'retval': {'type': 'Z'}}) - r(b'NSSpeechRecognizer', b'blocksOtherRecognizers', {'retval': {'type': 'Z'}}) - r(b'NSSpeechRecognizer', b'listensInForegroundOnly', {'retval': {'type': 'Z'}}) - r(b'NSSpeechRecognizer', b'setBlocksOtherRecognizers:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSSpeechRecognizer', b'setListensInForegroundOnly:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSSpeechSynthesizer', b'isAnyApplicationSpeaking', {'retval': {'type': b'Z'}}) - r(b'NSSpeechSynthesizer', b'isSpeaking', {'retval': {'type': 'Z'}}) - r(b'NSSpeechSynthesizer', b'objectForProperty:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSSpeechSynthesizer', b'setObject:forProperty:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSSpeechSynthesizer', b'setUsesFeedbackWindow:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSSpeechSynthesizer', b'setVoice:', {'retval': {'type': 'Z'}}) - r(b'NSSpeechSynthesizer', b'startSpeakingString:', {'retval': {'type': 'Z'}}) - r(b'NSSpeechSynthesizer', b'startSpeakingString:toURL:', {'retval': {'type': 'Z'}}) - r(b'NSSpeechSynthesizer', b'usesFeedbackWindow', {'retval': {'type': 'Z'}}) - r(b'NSSpeechSynthesizer', b'voice', {'retval': {'type': '@'}}) - r(b'NSSpellChecker', b'automaticallyIdentifiesLanguages', {'retval': {'type': 'Z'}}) - r(b'NSSpellChecker', b'checkGrammarOfString:startingAt:language:wrap:inSpellDocumentWithTag:details:', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 'arguments': {5: {'type': 'Z'}, 7: {'type_modifier': b'o'}}}) - r(b'NSSpellChecker', b'checkSpellingOfString:startingAt:', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}) - r(b'NSSpellChecker', b'checkSpellingOfString:startingAt:language:wrap:inSpellDocumentWithTag:wordCount:', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 'arguments': {5: {'type': 'Z'}, 7: {'type_modifier': b'o'}}}) - r(b'NSSpellChecker', b'checkString:range:types:options:inSpellDocumentWithTag:orthography:wordCount:', {'arguments': {3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 7: {'type_modifier': b'o'}, 8: {'type_modifier': b'o'}}}) - r(b'NSSpellChecker', b'completionsForPartialWordRange:inString:language:inSpellDocumentWithTag:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSSpellChecker', b'deletesAutospaceBetweenString:andString:language:', {'retval': {'type': 'Z'}}) - r(b'NSSpellChecker', b'hasLearnedWord:', {'retval': {'type': 'Z'}}) - r(b'NSSpellChecker', b'isAutomaticCapitalizationEnabled', {'retval': {'type': 'Z'}}) - r(b'NSSpellChecker', b'isAutomaticDashSubstitutionEnabled', {'retval': {'type': b'Z'}}) - r(b'NSSpellChecker', b'isAutomaticPeriodSubstitutionEnabled', {'retval': {'type': 'Z'}}) - r(b'NSSpellChecker', b'isAutomaticQuoteSubstitutionEnabled', {'retval': {'type': b'Z'}}) - r(b'NSSpellChecker', b'isAutomaticSpellingCorrectionEnabled', {'retval': {'type': b'Z'}}) - r(b'NSSpellChecker', b'isAutomaticTextCompletionEnabled', {'retval': {'type': b'Z'}}) - r(b'NSSpellChecker', b'isAutomaticTextReplacementEnabled', {'retval': {'type': b'Z'}}) - r(b'NSSpellChecker', b'preventsAutocorrectionBeforeString:language:', {'retval': {'type': 'Z'}}) - r(b'NSSpellChecker', b'requestCandidatesForSelectedRange:inString:types:options:inSpellDocumentWithTag:completionHandler:', {'arguments': {7: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'i', b'q')}, 2: {'type': b'@'}}}}}}) - r(b'NSSpellChecker', b'requestCheckingOfString:range:types:options:inSpellDocumentWithTag:completionHandler:', {'arguments': {3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 7: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'i', b'q')}, 2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}}}}}}) - r(b'NSSpellChecker', b'setAutomaticallyIdentifiesLanguages:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSSpellChecker', b'setLanguage:', {'retval': {'type': 'Z'}}) - r(b'NSSpellChecker', b'sharedSpellCheckerExists', {'retval': {'type': b'Z'}}) - r(b'NSSpellChecker', b'showCorrectionIndicatorOfType:primaryString:alternativeStrings:forStringInRect:view:completionHandler:', {'arguments': {7: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'NSSpellChecker', b'unlearnWord:', {'arguments': {2: {'type': '@'}}}) - r(b'NSSplitView', b'arrangesAllSubviews', {'retval': {'type': 'Z'}}) - r(b'NSSplitView', b'drawDividerInRect:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSSplitView', b'isPaneSplitter', {'retval': {'type': 'Z'}}) - r(b'NSSplitView', b'isSpringLoaded', {'retval': {'type': 'Z'}}) - r(b'NSSplitView', b'isSubviewCollapsed:', {'retval': {'type': 'Z'}}) - r(b'NSSplitView', b'isVertical', {'retval': {'type': 'Z'}}) - r(b'NSSplitView', b'setArrangesAllSubviews:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSSplitView', b'setIsPaneSplitter:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSSplitView', b'setVertical:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSSplitViewController', b'splitView:canCollapseSubview:', {'retval': {'type': b'Z'}}) - r(b'NSSplitViewController', b'splitView:shouldCollapseSubview:forDoubleClickOnDividerAtIndex:', {'retval': {'type': b'Z'}}) - r(b'NSSplitViewController', b'splitView:shouldHideDividerAtIndex:', {'retval': {'type': b'Z'}}) - r(b'NSSplitViewController', b'validateUserInterfaceItem:', {'retval': {'type': 'Z'}}) - r(b'NSSplitViewItem', b'canCollapse', {'retval': {'type': b'Z'}}) - r(b'NSSplitViewItem', b'isCollapsed', {'retval': {'type': b'Z'}}) - r(b'NSSplitViewItem', b'isSpringLoaded', {'retval': {'type': 'Z'}}) - r(b'NSSplitViewItem', b'setCanCollapse:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSSplitViewItem', b'setCollapsed:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSSplitViewItem', b'setSpringLoaded:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSStackView', b'detachesHiddenViews', {'retval': {'type': 'Z'}}) - r(b'NSStackView', b'hasEqualSpacing', {'retval': {'type': b'Z'}}) - r(b'NSStackView', b'setDetachesHiddenViews:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSStackView', b'setHasEqualSpacing:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSStatusBar', b'isVertical', {'retval': {'type': 'Z'}}) - r(b'NSStatusBarButton', b'appearsDisabled', {'retval': {'type': b'Z'}}) - r(b'NSStatusBarButton', b'setAppearsDisabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSStatusItem', b'drawStatusBarBackgroundInRect:withHighlight:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 3: {'type': 'Z'}}}) - r(b'NSStatusItem', b'highlightMode', {'retval': {'type': 'Z'}}) - r(b'NSStatusItem', b'isEnabled', {'retval': {'type': 'Z'}}) - r(b'NSStatusItem', b'isVisible', {'retval': {'type': 'Z'}}) - r(b'NSStatusItem', b'setAction:', {'arguments': {2: {'sel_of_type': b'v@:@'}}}) - r(b'NSStatusItem', b'setDoubleAction:', {'arguments': {2: {'sel_of_type': b'v@:@'}}}) - r(b'NSStatusItem', b'setEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSStatusItem', b'setHighlightMode:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSStatusItem', b'setVisible:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSStepper', b'autorepeat', {'retval': {'type': 'Z'}}) - r(b'NSStepper', b'setAutorepeat:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSStepper', b'setValueWraps:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSStepper', b'valueWraps', {'retval': {'type': 'Z'}}) - r(b'NSStepperCell', b'autorepeat', {'retval': {'type': 'Z'}}) - r(b'NSStepperCell', b'setAutorepeat:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSStepperCell', b'setValueWraps:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSStepperCell', b'valueWraps', {'retval': {'type': 'Z'}}) - r(b'NSStoryboardSegue', b'initWithIdentifier:source:destination:performHandler:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSStoryboardSegue', b'performHandler', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}) - r(b'NSStoryboardSegue', b'segueWithIdentifier:source:destination:performHandler:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSStoryboardSegue', b'setPerformHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSString', b'boundingRectWithSize:options:attributes:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSString', b'drawAtPoint:withAttributes:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSString', b'drawInRect:withAttributes:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSString', b'drawWithRect:options:attributes:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSString', b'sizeWithAttributes:', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSTabView', b'allowsTruncatedLabels', {'retval': {'type': 'Z'}}) - r(b'NSTabView', b'contentRect', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSTabView', b'drawsBackground', {'retval': {'type': 'Z'}}) - r(b'NSTabView', b'minimumSize', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSTabView', b'setAllowsTruncatedLabels:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTabView', b'setDrawsBackground:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTabView', b'tabViewItemAtPoint:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSTabViewController', b'canPropagateSelectedChildViewControllerTitle', {'retval': {'type': b'Z'}}) - r(b'NSTabViewController', b'setCanPropagateSelectedChildViewControllerTitle:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSTabViewController', b'tabView:shouldSelectTabViewItem:', {'retval': {'type': b'Z'}}) - r(b'NSTabViewController', b'toolbar:itemForItemIdentifier:willBeInsertedIntoToolbar:', {'arguments': {4: {'type': b'Z'}}}) - r(b'NSTabViewItem', b'drawLabel:inRect:', {'arguments': {2: {'type': 'Z'}, 3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSTabViewItem', b'sizeOfLabel:', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}, 'arguments': {2: {'type': 'Z'}}}) - r(b'NSTableColumn', b'isEditable', {'retval': {'type': 'Z'}}) - r(b'NSTableColumn', b'isHidden', {'retval': {'type': 'Z'}}) - r(b'NSTableColumn', b'isResizable', {'retval': {'type': 'Z'}}) - r(b'NSTableColumn', b'setEditable:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTableColumn', b'setHidden:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTableColumn', b'setResizable:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTableHeaderCell', b'drawSortIndicatorWithFrame:inView:ascending:priority:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 4: {'type': 'Z'}}}) - r(b'NSTableHeaderCell', b'sortIndicatorRectForBounds:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSTableHeaderView', b'columnAtPoint:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSTableHeaderView', b'headerRectOfColumn:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSTableRowView', b'isEmphasized', {'retval': {'type': b'Z'}}) - r(b'NSTableRowView', b'isFloating', {'retval': {'type': b'Z'}}) - r(b'NSTableRowView', b'isGroupRowStyle', {'retval': {'type': b'Z'}}) - r(b'NSTableRowView', b'isNextRowSelected', {'retval': {'type': b'Z'}}) - r(b'NSTableRowView', b'isPreviousRowSelected', {'retval': {'type': b'Z'}}) - r(b'NSTableRowView', b'isSelected', {'retval': {'type': b'Z'}}) - r(b'NSTableRowView', b'isTargetForDropOperation', {'retval': {'type': b'Z'}}) - r(b'NSTableRowView', b'setEmphasized:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSTableRowView', b'setFloating:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSTableRowView', b'setGroupRowStyle:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSTableRowView', b'setNextRowSelected:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSTableRowView', b'setPreviousRowSelected:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSTableRowView', b'setSelected:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSTableRowView', b'setTargetForDropOperation:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSTableView', b'allowsColumnReordering', {'retval': {'type': 'Z'}}) - r(b'NSTableView', b'allowsColumnResizing', {'retval': {'type': 'Z'}}) - r(b'NSTableView', b'allowsColumnSelection', {'retval': {'type': 'Z'}}) - r(b'NSTableView', b'allowsEmptySelection', {'retval': {'type': 'Z'}}) - r(b'NSTableView', b'allowsMultipleSelection', {'retval': {'type': 'Z'}}) - r(b'NSTableView', b'allowsTypeSelect', {'retval': {'type': 'Z'}}) - r(b'NSTableView', b'autoresizesAllColumnsToFit', {'retval': {'type': 'Z'}}) - r(b'NSTableView', b'autosaveTableColumns', {'retval': {'type': 'Z'}}) - r(b'NSTableView', b'canDragRowsWithIndexes:atPoint:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSTableView', b'columnAtPoint:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSTableView', b'columnIndexesInRect:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSTableView', b'columnsInRect:', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSTableView', b'dragImageForRows:event:dragImageOffset:', {'arguments': {4: {'type_modifier': b'N'}}}) - r(b'NSTableView', b'dragImageForRowsWithIndexes:tableColumns:event:offset:', {'arguments': {5: {'type_modifier': b'N'}}}) - r(b'NSTableView', b'drawBackgroundInClipRect:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSTableView', b'drawGridInClipRect:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSTableView', b'drawRow:clipRect:', {'arguments': {3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSTableView', b'drawsGrid', {'retval': {'type': 'Z'}}) - r(b'NSTableView', b'editColumn:row:withEvent:select:', {'arguments': {5: {'type': 'Z'}}}) - r(b'NSTableView', b'enumerateAvailableRowViewsUsingBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'i', b'q')}}}}}}) - r(b'NSTableView', b'floatsGroupRows', {'retval': {'type': b'Z'}}) - r(b'NSTableView', b'frameOfCellAtColumn:row:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSTableView', b'highlightSelectionInClipRect:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSTableView', b'intercellSpacing', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSTableView', b'isColumnSelected:', {'retval': {'type': 'Z'}}) - r(b'NSTableView', b'isRowSelected:', {'retval': {'type': 'Z'}}) - r(b'NSTableView', b'rectOfColumn:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSTableView', b'rectOfRow:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSTableView', b'rowActionsVisible', {'retval': {'type': 'Z'}}) - r(b'NSTableView', b'rowAtPoint:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSTableView', b'rowViewAtRow:makeIfNecessary:', {'arguments': {3: {'type': b'Z'}}}) - r(b'NSTableView', b'rowsInRect:', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSTableView', b'selectColumn:byExtendingSelection:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSTableView', b'selectColumnIndexes:byExtendingSelection:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSTableView', b'selectRow:byExtendingSelection:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSTableView', b'selectRowIndexes:byExtendingSelection:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSTableView', b'setAllowsColumnReordering:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTableView', b'setAllowsColumnResizing:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTableView', b'setAllowsColumnSelection:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTableView', b'setAllowsEmptySelection:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTableView', b'setAllowsMultipleSelection:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTableView', b'setAllowsTypeSelect:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTableView', b'setAutoresizesAllColumnsToFit:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTableView', b'setAutosaveTableColumns:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTableView', b'setDoubleAction:', {'arguments': {2: {'sel_of_type': b'v@:@'}}}) - r(b'NSTableView', b'setDraggingSourceOperationMask:forLocal:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSTableView', b'setDrawsGrid:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTableView', b'setFloatsGroupRows:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSTableView', b'setIntercellSpacing:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSTableView', b'setRowActionsVisible:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTableView', b'setUsesAlternatingRowBackgroundColors:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTableView', b'setUsesAutomaticRowHeights:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTableView', b'setUsesStaticContents:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSTableView', b'setVerticalMotionCanBeginDrag:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTableView', b'shouldFocusCell:atColumn:row:', {'retval': {'type': 'Z'}}) - r(b'NSTableView', b'textShouldBeginEditing:', {'retval': {'type': b'Z'}}) - r(b'NSTableView', b'textShouldEndEditing:', {'retval': {'type': 'Z'}}) - r(b'NSTableView', b'usesAlternatingRowBackgroundColors', {'retval': {'type': 'Z'}}) - r(b'NSTableView', b'usesAutomaticRowHeights', {'retval': {'type': 'Z'}}) - r(b'NSTableView', b'usesStaticContents', {'retval': {'type': b'Z'}}) - r(b'NSTableView', b'verticalMotionCanBeginDrag', {'retval': {'type': 'Z'}}) - r(b'NSTableView', b'viewAtColumn:row:makeIfNecessary:', {'arguments': {4: {'type': b'Z'}}}) - r(b'NSText', b'RTFDFromRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSText', b'RTFFromRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSText', b'drawsBackground', {'retval': {'type': 'Z'}}) - r(b'NSText', b'importsGraphics', {'retval': {'type': 'Z'}}) - r(b'NSText', b'isEditable', {'retval': {'type': 'Z'}}) - r(b'NSText', b'isFieldEditor', {'retval': {'type': 'Z'}}) - r(b'NSText', b'isHorizontallyResizable', {'retval': {'type': 'Z'}}) - r(b'NSText', b'isRichText', {'retval': {'type': 'Z'}}) - r(b'NSText', b'isRulerVisible', {'retval': {'type': 'Z'}}) - r(b'NSText', b'isSelectable', {'retval': {'type': 'Z'}}) - r(b'NSText', b'isVerticallyResizable', {'retval': {'type': 'Z'}}) - r(b'NSText', b'maxSize', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSText', b'minSize', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSText', b'readRTFDFromFile:', {'retval': {'type': 'Z'}}) - r(b'NSText', b'replaceCharactersInRange:withRTF:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSText', b'replaceCharactersInRange:withRTFD:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSText', b'replaceCharactersInRange:withString:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSText', b'scrollRangeToVisible:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSText', b'selectedRange', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}) - r(b'NSText', b'setDrawsBackground:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSText', b'setEditable:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSText', b'setFieldEditor:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSText', b'setFont:range:', {'arguments': {3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSText', b'setHorizontallyResizable:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSText', b'setImportsGraphics:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSText', b'setMaxSize:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSText', b'setMinSize:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSText', b'setRichText:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSText', b'setSelectable:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSText', b'setSelectedRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSText', b'setTextColor:range:', {'arguments': {3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSText', b'setUsesFontPanel:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSText', b'setVerticallyResizable:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSText', b'usesFontPanel', {'retval': {'type': 'Z'}}) - r(b'NSText', b'writeRTFDToFile:atomically:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': 'Z'}}}) - r(b'NSTextBlock', b'boundsRectForContentRect:inRect:textContainer:characterRange:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 5: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTextBlock', b'drawBackgroundWithFrame:inView:characterRange:layoutManager:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 4: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTextBlock', b'rectForLayoutAtPoint:inRect:textContainer:characterRange:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 5: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTextContainer', b'containerSize', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSTextContainer', b'containsPoint:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSTextContainer', b'heightTracksTextView', {'retval': {'type': 'Z'}}) - r(b'NSTextContainer', b'initWithContainerSize:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSTextContainer', b'isSimpleRectangularTextContainer', {'retval': {'type': 'Z'}}) - r(b'NSTextContainer', b'lineFragmentRectForProposedRect:atIndex:writingDirection:remainingRect', {'arguments': {5: {'type_modifier': b'o'}}}) - r(b'NSTextContainer', b'lineFragmentRectForProposedRect:atIndex:writingDirection:remainingRect:', {'arguments': {5: {'type_modifier': b'o'}}}) - r(b'NSTextContainer', b'lineFragmentRectForProposedRect:sweepDirection:movementDirection:remainingRect:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 5: {'type_modifier': b'o'}}}) - r(b'NSTextContainer', b'setContainerSize:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSTextContainer', b'setHeightTracksTextView:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextContainer', b'setWidthTracksTextView:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextContainer', b'widthTracksTextView', {'retval': {'type': 'Z'}}) - r(b'NSTextField', b'acceptsFirstResponder', {'retval': {'type': 'Z'}}) - r(b'NSTextField', b'allowsCharacterPickerTouchBarItem', {'retval': {'type': 'Z'}}) - r(b'NSTextField', b'allowsDefaultTighteningForTruncation', {'retval': {'type': 'Z'}}) - r(b'NSTextField', b'allowsEditingTextAttributes', {'retval': {'type': 'Z'}}) - r(b'NSTextField', b'drawsBackground', {'retval': {'type': 'Z'}}) - r(b'NSTextField', b'importsGraphics', {'retval': {'type': 'Z'}}) - r(b'NSTextField', b'isAutomaticTextCompletionEnabled', {'retval': {'type': 'Z'}}) - r(b'NSTextField', b'isBezeled', {'retval': {'type': 'Z'}}) - r(b'NSTextField', b'isBordered', {'retval': {'type': 'Z'}}) - r(b'NSTextField', b'isEditable', {'retval': {'type': 'Z'}}) - r(b'NSTextField', b'isSelectable', {'retval': {'type': 'Z'}}) - r(b'NSTextField', b'setAllowsCharacterPickerTouchBarItem:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextField', b'setAllowsDefaultTighteningForTruncation:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextField', b'setAllowsEditingTextAttributes:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextField', b'setAutomaticTextCompletionEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextField', b'setBezeled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextField', b'setBordered:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextField', b'setDrawsBackground:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextField', b'setEditable:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextField', b'setImportsGraphics:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextField', b'setSelectable:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextField', b'setTextColor:', {'arguments': {2: {'type': '@'}}}) - r(b'NSTextField', b'textShouldBeginEditing:', {'retval': {'type': 'Z'}}) - r(b'NSTextField', b'textShouldEndEditing:', {'retval': {'type': 'Z'}}) - r(b'NSTextFieldCell', b'drawsBackground', {'retval': {'type': 'Z'}}) - r(b'NSTextFieldCell', b'setDrawsBackground:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextFieldCell', b'setWantsNotificationForMarkedText:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextFinder', b'findIndicatorNeedsUpdate', {'retval': {'type': b'Z'}}) - r(b'NSTextFinder', b'incrementalSearchingShouldDimContentView', {'retval': {'type': b'Z'}}) - r(b'NSTextFinder', b'isIncrementalSearchingEnabled', {'retval': {'type': b'Z'}}) - r(b'NSTextFinder', b'setFindIndicatorNeedsUpdate:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSTextFinder', b'setIncrementalSearchingEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSTextFinder', b'setIncrementalSearchingShouldDimContentView:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSTextFinder', b'validateAction:', {'retval': {'type': b'Z'}}) - r(b'NSTextInputContext', b'acceptsGlyphInfo', {'retval': {'type': 'Z'}}) - r(b'NSTextInputContext', b'handleEvent:', {'retval': {'type': 'Z'}}) - r(b'NSTextInputContext', b'setAcceptsGlyphInfo:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextStorage', b'edited:range:changeInLength:', {'arguments': {3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTextStorage', b'editedRange', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}) - r(b'NSTextStorage', b'ensureAttributesAreFixedInRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTextStorage', b'fixesAttributesLazily', {'retval': {'type': 'Z'}}) - r(b'NSTextStorage', b'invalidateAttributesInRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTextTable', b'boundsRectForBlock:contentRect:inRect:textContainer:characterRange:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 4: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 6: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTextTable', b'collapsesBorders', {'retval': {'type': 'Z'}}) - r(b'NSTextTable', b'drawBackgroundForBlock:withFrame:inView:characterRange:layoutManager:', {'retval': {'type': 'v'}, 'arguments': {3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 5: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTextTable', b'hidesEmptyCells', {'retval': {'type': 'Z'}}) - r(b'NSTextTable', b'rectForBlock:layoutAtPoint:inRect:textContainer:characterRange:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 4: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 6: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTextTable', b'setCollapsesBorders:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextTable', b'setHidesEmptyCells:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextView', b'acceptsGlyphInfo', {'retval': {'type': 'Z'}}) - r(b'NSTextView', b'allowsCharacterPickerTouchBarItem', {'retval': {'type': 'Z'}}) - r(b'NSTextView', b'allowsDocumentBackgroundColorChange', {'retval': {'type': 'Z'}}) - r(b'NSTextView', b'allowsImageEditing', {'retval': {'type': 'Z'}}) - r(b'NSTextView', b'allowsUndo', {'retval': {'type': 'Z'}}) - r(b'NSTextView', b'characterIndexForInsertionAtPoint:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSTextView', b'completionsForPartialWordRange:indexOfSelectedItem:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 3: {'type_modifier': b'o'}}}) - r(b'NSTextView', b'displaysLinkToolTips', {'retval': {'type': 'Z'}}) - r(b'NSTextView', b'dragImageForSelectionWithEvent:origin:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSTextView', b'dragSelectionWithEvent:offset:slideBack:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}, 4: {'type': 'Z'}}}) - r(b'NSTextView', b'drawInsertionPointInRect:color:turnedOn:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 4: {'type': 'Z'}}}) - r(b'NSTextView', b'drawViewBackgroundInRect:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSTextView', b'drawsBackground', {'retval': {'type': 'Z'}}) - r(b'NSTextView', b'importsGraphics', {'retval': {'type': 'Z'}}) - r(b'NSTextView', b'initWithFrame:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSTextView', b'initWithFrame:textContainer:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSTextView', b'insertCompletion:forPartialWordRange:movement:isFinal:', {'arguments': {3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 5: {'type': 'Z'}}}) - r(b'NSTextView', b'isAutomaticDashSubstitutionEnabled', {'retval': {'type': 'Z'}}) - r(b'NSTextView', b'isAutomaticDataDetectionEnabled', {'retval': {'type': 'Z'}}) - r(b'NSTextView', b'isAutomaticLinkDetectionEnabled', {'retval': {'type': 'Z'}}) - r(b'NSTextView', b'isAutomaticQuoteSubstitutionEnabled', {'retval': {'type': 'Z'}}) - r(b'NSTextView', b'isAutomaticSpellingCorrectionEnabled', {'retval': {'type': 'Z'}}) - r(b'NSTextView', b'isAutomaticTextCompletionEnabled', {'retval': {'type': b'Z'}}) - r(b'NSTextView', b'isAutomaticTextReplacementEnabled', {'retval': {'type': 'Z'}}) - r(b'NSTextView', b'isCoalescingUndo', {'retval': {'type': 'Z'}}) - r(b'NSTextView', b'isContinuousSpellCheckingEnabled', {'retval': {'type': 'Z'}}) - r(b'NSTextView', b'isEditable', {'retval': {'type': 'Z'}}) - r(b'NSTextView', b'isFieldEditor', {'retval': {'type': 'Z'}}) - r(b'NSTextView', b'isGrammarCheckingEnabled', {'retval': {'type': 'Z'}}) - r(b'NSTextView', b'isIncrementalSearchingEnabled', {'retval': {'type': b'Z'}}) - r(b'NSTextView', b'isRichText', {'retval': {'type': 'Z'}}) - r(b'NSTextView', b'isRulerVisible', {'retval': {'type': 'Z'}}) - r(b'NSTextView', b'isSelectable', {'retval': {'type': 'Z'}}) - r(b'NSTextView', b'performValidatedReplacementInRange:withAttributedString:', {'retval': {'type': b'Z'}}) - r(b'NSTextView', b'rangeForUserCharacterAttributeChange', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}) - r(b'NSTextView', b'rangeForUserCompletion', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}) - r(b'NSTextView', b'rangeForUserParagraphAttributeChange', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}) - r(b'NSTextView', b'rangeForUserTextChange', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}) - r(b'NSTextView', b'readSelectionFromPasteboard:', {'retval': {'type': 'Z'}}) - r(b'NSTextView', b'readSelectionFromPasteboard:type:', {'retval': {'type': 'Z'}}) - r(b'NSTextView', b'rulerView:shouldAddMarker:', {'retval': {'type': 'Z'}}) - r(b'NSTextView', b'rulerView:shouldMoveMarker:', {'retval': {'type': 'Z'}}) - r(b'NSTextView', b'rulerView:shouldRemoveMarker:', {'retval': {'type': 'Z'}}) - r(b'NSTextView', b'rulerView:willMoveMarker:toLocation:', {'retval': {'type': sel32or64(b'f', b'd')}, 'arguments': {4: {'type': sel32or64(b'f', b'd')}}}) - r(b'NSTextView', b'selectionRangeForProposedRange:granularity:', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTextView', b'setAcceptsGlyphInfo:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextView', b'setAlignment:range:', {'arguments': {3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTextView', b'setAllowsCharacterPickerTouchBarItem:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextView', b'setAllowsDocumentBackgroundColorChange:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextView', b'setAllowsImageEditing:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextView', b'setAllowsUndo:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextView', b'setAutomaticDashSubstitutionEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextView', b'setAutomaticDataDetectionEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextView', b'setAutomaticLinkDetectionEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextView', b'setAutomaticQuoteSubstitutionEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextView', b'setAutomaticSpellingCorrectionEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextView', b'setAutomaticTextCompletionEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSTextView', b'setAutomaticTextReplacementEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextView', b'setBaseWritingDirection:range:', {'arguments': {3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTextView', b'setConstrainedFrameSize:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSTextView', b'setContinuousSpellCheckingEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextView', b'setDisplaysLinkToolTips:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextView', b'setDrawsBackground:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextView', b'setEditable:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextView', b'setFieldEditor:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextView', b'setGrammarCheckingEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextView', b'setImportsGraphics:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextView', b'setIncrementalSearchingEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSTextView', b'setNeedsDisplayInRect:avoidAdditionalLayout:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 3: {'type': 'Z'}}}) - r(b'NSTextView', b'setRichText:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextView', b'setRulerVisible:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextView', b'setSelectable:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextView', b'setSelectedRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTextView', b'setSelectedRange:affinity:stillSelecting:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 4: {'type': 'Z'}}}) - r(b'NSTextView', b'setSelectedRanges:affinity:stillSelecting:', {'arguments': {4: {'type': 'Z'}}}) - r(b'NSTextView', b'setSmartInsertDeleteEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextView', b'setSpellingState:range:', {'arguments': {3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTextView', b'setTextContainerInset:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSTextView', b'setUsesFindBar:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSTextView', b'setUsesFindPanel:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextView', b'setUsesFontPanel:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextView', b'setUsesInspectorBar:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSTextView', b'setUsesRolloverButtonForSelection:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSTextView', b'setUsesRuler:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextView', b'shouldChangeTextInRange:replacementString:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTextView', b'shouldChangeTextInRanges:replacementStrings:', {'retval': {'type': 'Z'}}) - r(b'NSTextView', b'shouldDrawInsertionPoint', {'retval': {'type': 'Z'}}) - r(b'NSTextView', b'showFindIndicatorForRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTextView', b'smartDeleteRangeForProposedRange:', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTextView', b'smartInsertAfterStringForString:replacingRange:', {'arguments': {3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTextView', b'smartInsertBeforeStringForString:replacingRange:', {'arguments': {3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTextView', b'smartInsertDeleteEnabled', {'retval': {'type': 'Z'}}) - r(b'NSTextView', b'smartInsertForString:replacingRange:beforeString:afterString:', {'arguments': {3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 4: {'type_modifier': b'o'}, 5: {'type_modifier': b'o'}}}) - r(b'NSTextView', b'stronglyReferencesTextStorage', {'retval': {'type': b'Z'}}) - r(b'NSTextView', b'textContainerInset', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSTextView', b'textContainerOrigin', {'retval': {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}) - r(b'NSTextView', b'updateInsertionPointStateAndRestartTimer:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTextView', b'usesFindBar', {'retval': {'type': b'Z'}}) - r(b'NSTextView', b'usesFindPanel', {'retval': {'type': 'Z'}}) - r(b'NSTextView', b'usesFontPanel', {'retval': {'type': 'Z'}}) - r(b'NSTextView', b'usesInspectorBar', {'retval': {'type': b'Z'}}) - r(b'NSTextView', b'usesRolloverButtonForSelection', {'retval': {'type': b'Z'}}) - r(b'NSTextView', b'usesRuler', {'retval': {'type': 'Z'}}) - r(b'NSTextView', b'writeSelectionToPasteboard:type:', {'retval': {'type': 'Z'}}) - r(b'NSTextView', b'writeSelectionToPasteboard:types:', {'retval': {'type': 'Z'}}) - r(b'NSTitlebarAccessoryViewController', b'isHidden', {'retval': {'type': 'Z'}}) - r(b'NSTitlebarAccessoryViewController', b'setHidden:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSToolbar', b'allowsExtensionItems', {'retval': {'type': b'Z'}}) - r(b'NSToolbar', b'allowsUserCustomization', {'retval': {'type': 'Z'}}) - r(b'NSToolbar', b'autosavesConfiguration', {'retval': {'type': 'Z'}}) - r(b'NSToolbar', b'customizationPaletteIsRunning', {'retval': {'type': 'Z'}}) - r(b'NSToolbar', b'isVisible', {'retval': {'type': 'Z'}}) - r(b'NSToolbar', b'setAllowsExtensionItems:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSToolbar', b'setAllowsUserCustomization:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSToolbar', b'setAutosavesConfiguration:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSToolbar', b'setShowsBaselineSeparator:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSToolbar', b'setVisible:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSToolbar', b'showsBaselineSeparator', {'retval': {'type': 'Z'}}) - r(b'NSToolbarItem', b'allowsDuplicatesInToolbar', {'retval': {'type': 'Z'}}) - r(b'NSToolbarItem', b'autovalidates', {'retval': {'type': 'Z'}}) - r(b'NSToolbarItem', b'isEnabled', {'retval': {'type': 'Z'}}) - r(b'NSToolbarItem', b'maxSize', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSToolbarItem', b'minSize', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSToolbarItem', b'setAction:', {'arguments': {2: {'sel_of_type': b'v@:@'}}}) - r(b'NSToolbarItem', b'setAutovalidates:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSToolbarItem', b'setEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSToolbarItem', b'setMaxSize:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSToolbarItem', b'setMinSize:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSTouch', b'deviceSize', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSTouch', b'isResting', {'retval': {'type': 'Z'}}) - r(b'NSTouchBar', b'isVisible', {'retval': {'type': 'Z'}}) - r(b'NSTouchBar', b'setVisible:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTouchBarItem', b'isVisible', {'retval': {'type': 'Z'}}) - r(b'NSTouchBarItem', b'setVisible:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTrackingArea', b'initWithRect:options:owner:userInfo:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSTrackingArea', b'rect', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSTreeController', b'addSelectionIndexPaths:', {'retval': {'type': 'Z'}}) - r(b'NSTreeController', b'alwaysUsesMultipleValuesMarker', {'retval': {'type': 'Z'}}) - r(b'NSTreeController', b'avoidsEmptySelection', {'retval': {'type': 'Z'}}) - r(b'NSTreeController', b'canAddChild', {'retval': {'type': 'Z'}}) - r(b'NSTreeController', b'canInsert', {'retval': {'type': 'Z'}}) - r(b'NSTreeController', b'canInsertChild', {'retval': {'type': 'Z'}}) - r(b'NSTreeController', b'preservesSelection', {'retval': {'type': 'Z'}}) - r(b'NSTreeController', b'removeSelectionIndexPaths:', {'retval': {'type': 'Z'}}) - r(b'NSTreeController', b'selectsInsertedObjects', {'retval': {'type': 'Z'}}) - r(b'NSTreeController', b'setAlwaysUsesMultipleValuesMarker:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTreeController', b'setAvoidsEmptySelection:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTreeController', b'setPreservesSelection:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTreeController', b'setSelectionIndexPath:', {'retval': {'type': 'Z'}}) - r(b'NSTreeController', b'setSelectionIndexPaths:', {'retval': {'type': 'Z'}}) - r(b'NSTreeController', b'setSelectsInsertedObjects:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTreeNode', b'isLeaf', {'retval': {'type': 'Z'}}) - r(b'NSTreeNode', b'sortWithSortDescriptors:recursively:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSTypesetter', b'bidiProcessingEnabled', {'retval': {'type': 'Z'}}) - r(b'NSTypesetter', b'boundingBoxForControlGlyphAtIndex:forTextContainer:proposedLineFragment:glyphPosition:characterIndex:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': sel32or64(b'I', b'Q')}, 4: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 5: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 6: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSTypesetter', b'characterRangeForGlyphRange:actualGlyphRange:', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 3: {'type_modifier': b'o'}}}) - r(b'NSTypesetter', b'deleteGlyphsInRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTypesetter', b'endLineWithGlyphRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTypesetter', b'getGlyphsInRange:glyphs:characterIndexes:glyphInscriptions:elasticBits:bidiLevels:', {'retval': {'type': sel32or64(b'I', b'Q')}, 'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 3: {'type_modifier': b'o', 'c_array_length_in_arg': 2}, 4: {'type_modifier': b'o', 'c_array_length_in_arg': 2}, 5: {'type_modifier': b'o', 'c_array_length_in_arg': 2}, 6: {'type': '^Z', 'type_modifier': b'o', 'c_array_length_in_arg': 2}, 7: {'type': '^C', 'type_modifier': b'o', 'c_array_length_in_arg': 2}}}) - r(b'NSTypesetter', b'getLineFragmentRect:usedRect:forParagraphSeparatorGlyphRange:atProposedOrigin:', {'arguments': {2: {'type_modifier': b'o'}, 3: {'type_modifier': b'o'}, 4: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 5: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSTypesetter', b'getLineFragmentRect:usedRect:remainingRect:forStartingGlyphAtIndex:proposedRect:lineSpacing:paragraphSpacingBefore:paragraphSpacingAfter:', {'arguments': {2: {'type': sel32or64(b'^{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'^{CGRect={CGPoint=dd}{CGSize=dd}}'), 'type_modifier': b'N'}, 3: {'type': sel32or64(b'^{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'^{CGRect={CGPoint=dd}{CGSize=dd}}'), 'type_modifier': b'N'}, 4: {'type': sel32or64(b'^{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'^{CGRect={CGPoint=dd}{CGSize=dd}}'), 'type_modifier': b'N'}, 6: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSTypesetter', b'glyphRangeForCharacterRange:actualCharacterRange:', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 3: {'type_modifier': b'o'}}}) - r(b'NSTypesetter', b'layoutCharactersInRange:forLayoutManager:maximumNumberOfLineFragments:', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTypesetter', b'layoutGlyphsInLayoutManager:startingAtGlyphIndex:maxNumberOfLineFragments:nextGlyphIndex:', {'arguments': {5: {'type_modifier': b'o'}}}) - r(b'NSTypesetter', b'layoutParagraphAtPoint:', {'arguments': {2: {'type_modifier': b'N'}}}) - r(b'NSTypesetter', b'lineSpacingAfterGlyphAtIndex:withProposedLineFragmentRect:', {'arguments': {3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSTypesetter', b'paragraphCharacterRange', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}) - r(b'NSTypesetter', b'paragraphGlyphRange', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}) - r(b'NSTypesetter', b'paragraphSeparatorCharacterRange', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}) - r(b'NSTypesetter', b'paragraphSeparatorGlyphRange', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}) - r(b'NSTypesetter', b'paragraphSpacingAfterGlyphAtIndex:withProposedLineFragmentRect:', {'arguments': {3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSTypesetter', b'paragraphSpacingBeforeGlyphAtIndex:withProposedLineFragmentRect:', {'arguments': {3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSTypesetter', b'printingAdjustmentInLayoutManager:forNominallySpacedGlyphRange:packedGlyphs:count:', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}, 'arguments': {3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 4: {'c_array_delimited_by_null': True, 'type': '^v', 'type_modifier': b'n', 'c_array_length_in_arg': 5}}}) - r(b'NSTypesetter', b'setAttachmentSize:forGlyphRange:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTypesetter', b'setBidiLevels:forGlyphRange:', {'arguments': {2: {'type': '^z', 'type_modifier': b'n', 'c_array_length_in_arg': 3}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTypesetter', b'setBidiProcessingEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTypesetter', b'setDrawsOutsideLineFragment:forGlyphRange:', {'arguments': {2: {'type': 'Z'}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTypesetter', b'setHardInvalidation:forGlyphRange:', {'arguments': {2: {'type': 'Z'}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTypesetter', b'setLineFragmentRect:forGlyphRange:usedRect:baselineOffset:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 4: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSTypesetter', b'setLocation:withAdvancements:forStartOfGlyphRange:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 3: {'type_modifier': b'n', 'c_array_length_in_arg': 4}, 4: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTypesetter', b'setNotShownAttribute:forGlyphRange:', {'arguments': {2: {'type': 'Z'}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTypesetter', b'setParagraphGlyphRange:separatorGlyphRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTypesetter', b'setUsesFontLeading:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSTypesetter', b'shouldBreakLineByHyphenatingBeforeCharacterAtIndex:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSTypesetter', b'shouldBreakLineByWordBeforeCharacterAtIndex:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSTypesetter', b'substituteGlyphsInRange:withGlyphs:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 3: {'type_modifier': b'n', 'c_array_length_in_arg': 2}}}) - r(b'NSTypesetter', b'usesFontLeading', {'retval': {'type': 'Z'}}) - r(b'NSTypesetter', b'willSetLineFragmentRect:forGlyphRange:usedRect:baselineOffset:', {'arguments': {2: {'type': sel32or64(b'^{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'^{CGRect={CGPoint=dd}{CGSize=dd}}'), 'type_modifier': b'N'}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 4: {'type': sel32or64(b'^{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'^{CGRect={CGPoint=dd}{CGSize=dd}}'), 'type_modifier': b'N'}, 5: {'type': sel32or64(b'^f', b'^d'), 'type_modifier': b'N'}}}) - r(b'NSUndoManager', b'groupsByEvent', {'retval': {'type': 'Z'}}) - r(b'NSUndoManager', b'setGroupsByEvent:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSUserDefaultsController', b'appliesImmediately', {'retval': {'type': 'Z'}}) - r(b'NSUserDefaultsController', b'hasUnappliedChanges', {'retval': {'type': 'Z'}}) - r(b'NSUserDefaultsController', b'setAppliesImmediately:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSUserInterfaceCompressionOptions', b'containsOptions:', {'retval': {'type': 'Z'}}) - r(b'NSUserInterfaceCompressionOptions', b'intersectsOptions:', {'retval': {'type': 'Z'}}) - r(b'NSUserInterfaceCompressionOptions', b'isEmpty', {'retval': {'type': 'Z'}}) - r(b'NSUserInterfaceCompressionOptions', b'setEmpty:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSView', b'acceptsFirstMouse:', {'retval': {'type': 'Z'}}) - r(b'NSView', b'acceptsTouchEvents', {'retval': {'type': 'Z'}}) - r(b'NSView', b'addCursorRect:cursor:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSView', b'addToolTipRect:owner:userData:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 4: {'type': '^v'}}}) - r(b'NSView', b'addTrackingRect:owner:userData:assumeInside:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 4: {'type': '^v'}, 5: {'type': 'Z'}}}) - r(b'NSView', b'adjustPageHeightNew:top:bottom:limit:', {'arguments': {2: {'type_modifier': b'o'}}}) - r(b'NSView', b'adjustPageWidthNew:left:right:limit:', {'arguments': {2: {'type_modifier': b'o'}}}) - r(b'NSView', b'adjustScroll:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSView', b'alignmentRectInsets', {'retval': {'type': sel32or64(b'{NSEdgeInsets=ffff}', b'{NSEdgeInsets=dddd}')}}) - r(b'NSView', b'allowsVibrancy', {'retval': {'type': b'Z'}}) - r(b'NSView', b'autoresizesSubviews', {'retval': {'type': 'Z'}}) - r(b'NSView', b'autoscroll:', {'retval': {'type': 'Z'}}) - r(b'NSView', b'beginPageInRect:atPlacement:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSView', b'bitmapImageRepForCachingDisplayInRect:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSView', b'bounds', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSView', b'cacheDisplayInRect:toBitmapImageRep:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSView', b'canBecomeKeyView', {'retval': {'type': 'Z'}}) - r(b'NSView', b'canDraw', {'retval': {'type': 'Z'}}) - r(b'NSView', b'canDrawConcurrently', {'retval': {'type': 'Z'}}) - r(b'NSView', b'canDrawSubviewsIntoLayer', {'retval': {'type': b'Z'}}) - r(b'NSView', b'centerScanRect:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSView', b'context:', {'arguments': {2: {'type': '^v'}}}) - r(b'NSView', b'convertPoint:fromView:', {'retval': {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSView', b'convertPoint:toView:', {'retval': {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSView', b'convertPointFromBase:', {'retval': {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSView', b'convertPointToBase:', {'retval': {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSView', b'convertRect:fromView:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSView', b'convertRect:toView:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSView', b'convertRectFromBase:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSView', b'convertRectToBase:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSView', b'convertSize:fromView:', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}, 'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSView', b'convertSize:toView:', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}, 'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSView', b'convertSizeFromBase:', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}, 'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSView', b'convertSizeToBase:', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}, 'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSView', b'dataWithEPSInsideRect:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSView', b'dataWithPDFInsideRect:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSView', b'displayIfNeededInRect:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSView', b'displayIfNeededInRectIgnoringOpacity:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSView', b'displayRect:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSView', b'displayRectIgnoringOpacity:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSView', b'displayRectIgnoringOpacity:inContext:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSView', b'dragFile:fromRect:slideBack:event:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 4: {'type': 'Z'}}}) - r(b'NSView', b'dragImage:at:offset:event:pasteboard:source:slideBack:', {'arguments': {3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 4: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}, 8: {'type': 'Z'}}}) - r(b'NSView', b'dragPromisedFilesOfTypes:fromRect:source:slideBack:event:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 5: {'type': 'Z'}}}) - r(b'NSView', b'drawPageBorderWithSize:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSView', b'drawRect:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSView', b'drawSheetBorderWithSize:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSView', b'enterFullScreenMode:withOptions:', {'retval': {'type': 'Z'}}) - r(b'NSView', b'frame', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSView', b'getRectsBeingDrawn:count:', {'arguments': {2: {'type': sel32or64(b'^^{_NSRect}', b'^^{CGRect}')}, 3: {'type': sel32or64(b'^i', b'^q'), 'type_modifier': b'o'}}}) - r(b'NSView', b'getRectsExposedDuringLiveResize:count:', {'arguments': {2: {'c_array_of_fixed_length': 4, 'type': sel32or64(b'^{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'^{CGRect={CGPoint=dd}{CGSize=dd}}'), 'type_modifier': b'o'}, 3: {'type': sel32or64(b'^i', b'^q'), 'type_modifier': b'o'}}}) - r(b'NSView', b'hasAmbiguousLayout', {'retval': {'type': b'Z'}}) - r(b'NSView', b'hitTest:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSView', b'inLiveResize', {'retval': {'type': 'Z'}}) - r(b'NSView', b'isCompatibleWithResponsiveScrolling', {'retval': {'type': b'Z'}}) - r(b'NSView', b'isDescendantOf:', {'retval': {'type': 'Z'}}) - r(b'NSView', b'isDrawingFindIndicator', {'retval': {'type': b'Z'}}) - r(b'NSView', b'isFlipped', {'retval': {'type': 'Z'}}) - r(b'NSView', b'isHidden', {'retval': {'type': 'Z'}}) - r(b'NSView', b'isHiddenOrHasHiddenAncestor', {'retval': {'type': 'Z'}}) - r(b'NSView', b'isInFullScreenMode', {'retval': {'type': 'Z'}}) - r(b'NSView', b'isOpaque', {'retval': {'type': 'Z'}}) - r(b'NSView', b'isRotatedFromBase', {'retval': {'type': 'Z'}}) - r(b'NSView', b'isRotatedOrScaledFromBase', {'retval': {'type': 'Z'}}) - r(b'NSView', b'knowsPageRange:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type_modifier': b'o'}}}) - r(b'NSView', b'layerUsesCoreImageFilters', {'retval': {'type': b'Z'}}) - r(b'NSView', b'locationOfPrintRect:', {'retval': {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSView', b'lockFocusIfCanDraw', {'retval': {'type': 'Z'}}) - r(b'NSView', b'lockFocusIfCanDrawInContext:', {'retval': {'type': 'Z'}}) - r(b'NSView', b'mouse:inRect:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSView', b'mouseDownCanMoveWindow', {'retval': {'type': 'Z'}}) - r(b'NSView', b'needsDisplay', {'retval': {'type': 'Z'}}) - r(b'NSView', b'needsLayout', {'retval': {'type': b'Z'}}) - r(b'NSView', b'needsPanelToBecomeKey', {'retval': {'type': 'Z'}}) - r(b'NSView', b'needsToDrawRect:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSView', b'needsUpdateConstraints', {'retval': {'type': b'Z'}}) - r(b'NSView', b'performKeyEquivalent:', {'retval': {'type': 'Z'}}) - r(b'NSView', b'performMnemonic:', {'retval': {'type': 'Z'}}) - r(b'NSView', b'postsBoundsChangedNotifications', {'retval': {'type': 'Z'}}) - r(b'NSView', b'postsFrameChangedNotifications', {'retval': {'type': 'Z'}}) - r(b'NSView', b'preservesContentDuringLiveResize', {'retval': {'type': 'Z'}}) - r(b'NSView', b'rectForPage:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSView', b'rectPreservedDuringLiveResize', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSView', b'removeCursorRect:cursor:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSView', b'requiresConstraintBasedLayout', {'retval': {'type': b'Z'}}) - r(b'NSView', b'resizeSubviewsWithOldSize:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSView', b'resizeWithOldSuperviewSize:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSView', b'rulerView:shouldAddMarker:', {'retval': {'type': 'Z'}}) - r(b'NSView', b'rulerView:shouldMoveMarker:', {'retval': {'type': 'Z'}}) - r(b'NSView', b'rulerView:shouldRemoveMarker:', {'retval': {'type': 'Z'}}) - r(b'NSView', b'scaleUnitSquareToSize:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSView', b'scrollClipView:toPoint:', {'arguments': {3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSView', b'scrollPoint:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSView', b'scrollRect:by:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 3: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSView', b'scrollRectToVisible:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSView', b'setAcceptsTouchEvents:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSView', b'setAutoresizesSubviews:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSView', b'setBounds:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSView', b'setBoundsOrigin:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSView', b'setBoundsSize:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSView', b'setCanDrawConcurrently:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSView', b'setCanDrawSubviewsIntoLayer:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSView', b'setFrame:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSView', b'setFrameOrigin:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSView', b'setFrameSize:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSView', b'setHidden:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSView', b'setKeyboardFocusRingNeedsDisplayInRect:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSView', b'setLayerUsesCoreImageFilters:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSView', b'setNeedsDisplay:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSView', b'setNeedsDisplayInRect:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSView', b'setNeedsLayout:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSView', b'setNeedsUpdateConstraints:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSView', b'setPostsBoundsChangedNotifications:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSView', b'setPostsFrameChangedNotifications:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSView', b'setTranslatesAutoresizingMaskIntoConstraints:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSView', b'setWantsBestResolutionOpenGLSurface:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSView', b'setWantsExtendedDynamicRangeOpenGLSurface:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSView', b'setWantsLayer:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSView', b'setWantsRestingTouches:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSView', b'shouldDelayWindowOrderingForEvent:', {'retval': {'type': 'Z'}}) - r(b'NSView', b'shouldDrawColor', {'retval': {'type': 'Z'}}) - r(b'NSView', b'showDefinitionForAttributedString:atPoint:', {'arguments': {3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSView', b'showDefinitionForAttributedString:range:options:baselineOriginProvider:', {'arguments': {3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 5: {'callable': {'retval': {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}}}}) - r(b'NSView', b'sortSubviewsUsingFunction:context:', {'arguments': {2: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': b'@'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '^?', 'callable_retained': False}, 3: {'type': '@'}}}) - r(b'NSView', b'translateOriginToPoint:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSView', b'translateRectsNeedingDisplayInRect:by:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 3: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSView', b'translatesAutoresizingMaskIntoConstraints', {'retval': {'type': b'Z'}}) - r(b'NSView', b'visibleRect', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSView', b'wantsBestResolutionOpenGLSurface', {'retval': {'type': b'Z'}}) - r(b'NSView', b'wantsDefaultClipping', {'retval': {'type': 'Z'}}) - r(b'NSView', b'wantsExtendedDynamicRangeOpenGLSurface', {'retval': {'type': 'Z'}}) - r(b'NSView', b'wantsLayer', {'retval': {'type': 'Z'}}) - r(b'NSView', b'wantsRestingTouches', {'retval': {'type': 'Z'}}) - r(b'NSView', b'wantsUpdateLayer', {'retval': {'type': b'Z'}}) - r(b'NSView', b'writeEPSInsideRect:toPasteboard:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSView', b'writePDFInsideRect:toPasteboard:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSViewController', b'commitEditing', {'retval': {'type': 'Z'}}) - r(b'NSViewController', b'commitEditingWithDelegate:didCommitSelector:contextInfo:', {'arguments': {3: {'type': ':', 'sel_of_type': b'v@:@Z^v'}, 4: {'type': '^v'}}}) - r(b'NSViewController', b'isViewLoaded', {'retval': {'type': b'Z'}}) - r(b'NSViewController', b'transitionFromViewController:toViewController:options:completionHandler:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSVisualEffectView', b'isEmphasized', {'retval': {'type': 'Z'}}) - r(b'NSVisualEffectView', b'setEmphasized:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSWindow', b'acceptsMouseMovedEvents', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'allowsAutomaticWindowTabbing', {'retval': {'type': b'Z'}}) - r(b'NSWindow', b'allowsConcurrentViewDrawing', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'allowsToolTipsWhenApplicationIsInactive', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'animationResizeTime:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSWindow', b'areCursorRectsEnabled', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'aspectRatio', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSWindow', b'autorecalculatesContentBorderThicknessForEdge:', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'autorecalculatesKeyViewLoop', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'beginCriticalSheet:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'i', b'q')}}}}}}) - r(b'NSWindow', b'beginSheet:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'i', b'q')}}}}}}) - r(b'NSWindow', b'cacheImageInRect:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSWindow', b'canBeVisibleOnAllSpaces', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'canBecomeKeyWindow', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'canBecomeMainWindow', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'canBecomeVisibleWithoutLogin', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'canHide', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'canRepresentDisplayGamut:', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'canStoreColor', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'cascadeTopLeftFromPoint:', {'retval': {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSWindow', b'contentAspectRatio', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSWindow', b'contentMaxSize', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSWindow', b'contentMinSize', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSWindow', b'contentRectForFrameRect:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSWindow', b'contentRectForFrameRect:styleMask:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 3: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSWindow', b'contentResizeIncrements', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSWindow', b'convertBaseToScreen:', {'retval': {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSWindow', b'convertScreenToBase:', {'retval': {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSWindow', b'dataWithEPSInsideRect:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSWindow', b'dataWithPDFInsideRect:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSWindow', b'displaysWhenScreenProfileChanges', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'dragImage:at:offset:event:pasteboard:source:slideBack:', {'arguments': {3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 4: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}, 8: {'type': 'Z'}}}) - r(b'NSWindow', b'fieldEditor:forObject:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSWindow', b'frame', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSWindow', b'frameRectForContentRect:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSWindow', b'frameRectForContentRect:styleMask:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 3: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSWindow', b'hasCloseBox', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'hasDynamicDepthLimit', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'hasShadow', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'hasTitleBar', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'hidesOnDeactivate', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'ignoresMouseEvents', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'inLiveResize', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'initWithContentRect:styleMask:backing:defer:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 5: {'type': 'Z'}}}) - r(b'NSWindow', b'initWithContentRect:styleMask:backing:defer:screen:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 5: {'type': 'Z'}}}) - r(b'NSWindow', b'initWithWindowRef:', {'arguments': {2: {'type': '^{OpaqueWindowPtr=}'}}}) - r(b'NSWindow', b'isAutodisplay', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'isDocumentEdited', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'isExcludedFromWindowsMenu', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'isFloatingPanel', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'isFlushWindowDisabled', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'isKeyWindow', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'isMainWindow', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'isMiniaturizable', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'isMiniaturized', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'isModalPanel', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'isMovable', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'isMovableByWindowBackground', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'isOnActiveSpace', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'isOneShot', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'isOpaque', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'isReleasedWhenClosed', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'isResizable', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'isRestorable', {'retval': {'type': b'Z'}}) - r(b'NSWindow', b'isSheet', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'isVisible', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'isZoomable', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'isZoomed', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'makeFirstResponder:', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'maxSize', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSWindow', b'minSize', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSWindow', b'mouseLocationOutsideOfEventStream', {'retval': {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}) - r(b'NSWindow', b'nextEventMatchingMask:untilDate:inMode:dequeue:', {'arguments': {5: {'type': 'Z'}}}) - r(b'NSWindow', b'onstrainFrameRect:toScreen:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSWindow', b'postEvent:atStart:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSWindow', b'preservesContentDuringLiveResize', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'preventsApplicationTerminationWhenModal', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'resizeIncrements', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSWindow', b'setAcceptsMouseMovedEvents:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSWindow', b'setAllowsAutomaticWindowTabbing:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSWindow', b'setAllowsConcurrentViewDrawing:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSWindow', b'setAllowsToolTipsWhenApplicationIsInactive:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSWindow', b'setAspectRatio:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSWindow', b'setAutodisplay:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSWindow', b'setAutorecalculatesContentBorderThickness:forEdge:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSWindow', b'setAutorecalculatesKeyViewLoop:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSWindow', b'setCanBeVisibleOnAllSpaces:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSWindow', b'setCanBecomeVisibleWithoutLogin:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSWindow', b'setCanHide:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSWindow', b'setContentAspectRatio:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSWindow', b'setContentMaxSize:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSWindow', b'setContentMinSize:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSWindow', b'setContentResizeIncrements:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSWindow', b'setContentSize:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSWindow', b'setDisplaysWhenScreenProfileChanges:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSWindow', b'setDocumentEdited:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSWindow', b'setDynamicDepthLimit:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSWindow', b'setExcludedFromWindowsMenu:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSWindow', b'setFrame:display:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 3: {'type': 'Z'}}}) - r(b'NSWindow', b'setFrame:display:animate:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 3: {'type': 'Z'}, 4: {'type': 'Z'}}}) - r(b'NSWindow', b'setFrameAutosaveName:', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'setFrameOrigin:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSWindow', b'setFrameTopLeftPoint:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSWindow', b'setFrameUsingName:', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'setFrameUsingName:force:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': 'Z'}}}) - r(b'NSWindow', b'setHasShadow:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSWindow', b'setHidesOnDeactivate:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSWindow', b'setIgnoresMouseEvents:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSWindow', b'setIsMiniaturized:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSWindow', b'setIsVisible:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSWindow', b'setIsZoomed:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSWindow', b'setMaxSize:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSWindow', b'setMinSize:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSWindow', b'setMovable:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSWindow', b'setMovableByWindowBackground:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSWindow', b'setOneShot:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSWindow', b'setOpaque:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSWindow', b'setPreservesContentDuringLiveResize:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSWindow', b'setPreventsApplicationTerminationWhenModal:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSWindow', b'setReleasedWhenClosed:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSWindow', b'setResizeIncrements:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSWindow', b'setRestorable:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSWindow', b'setShowsResizeIndicator:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSWindow', b'setShowsToolbarButton:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSWindow', b'setTitlebarAppearsTransparent:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSWindow', b'setViewsNeedDisplay:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSWindow', b'showsResizeIndicator', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'showsToolbarButton', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'titlebarAppearsTransparent', {'retval': {'type': b'Z'}}) - r(b'NSWindow', b'trackEventsMatchingMask:timeout:mode:handler:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'o^Z'}}}}}}) - r(b'NSWindow', b'tryToPerform:with:', {'retval': {'type': 'Z'}, 'arguments': {2: {'sel_of_type': b'v@:@'}}}) - r(b'NSWindow', b'useOptimizedDrawing:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSWindow', b'viewsNeedDisplay', {'retval': {'type': 'Z'}}) - r(b'NSWindow', b'windowNumberAtPoint:belowWindowWithNumber:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSWindow', b'windowRef', {'retval': {'type': '^{OpaqueWindowPtr=}'}}) - r(b'NSWindow', b'worksWhenModal', {'retval': {'type': 'Z'}}) - r(b'NSWindowController', b'isWindowLoaded', {'retval': {'type': 'Z'}}) - r(b'NSWindowController', b'setDocumentEdited:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSWindowController', b'setShouldCascadeWindows:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSWindowController', b'setShouldCloseDocument:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSWindowController', b'shouldCascadeWindows', {'retval': {'type': 'Z'}}) - r(b'NSWindowController', b'shouldCloseDocument', {'retval': {'type': 'Z'}}) - r(b'NSWindowTabGroup', b'isOverviewVisible', {'retval': {'type': 'Z'}}) - r(b'NSWindowTabGroup', b'isTabBarVisible', {'retval': {'type': 'Z'}}) - r(b'NSWindowTabGroup', b'setOverviewVisible:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSWindowTabGroup', b'setTabBarVisible:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSWorkspace', b'accessibilityDisplayShouldDifferentiateWithoutColor', {'retval': {'type': b'Z'}}) - r(b'NSWorkspace', b'accessibilityDisplayShouldIncreaseContrast', {'retval': {'type': b'Z'}}) - r(b'NSWorkspace', b'accessibilityDisplayShouldInvertColors', {'retval': {'type': 'Z'}}) - r(b'NSWorkspace', b'accessibilityDisplayShouldReduceMotion', {'retval': {'type': 'Z'}}) - r(b'NSWorkspace', b'accessibilityDisplayShouldReduceTransparency', {'retval': {'type': b'Z'}}) - r(b'NSWorkspace', b'duplicateURLs:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'NSWorkspace', b'fileSystemChanged', {'retval': {'type': 'Z'}}) - r(b'NSWorkspace', b'filenameExtension:isValidForType:', {'retval': {'type': 'Z'}}) - r(b'NSWorkspace', b'getFileSystemInfoForPath:isRemovable:isWritable:isUnmountable:description:type:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': '^Z', 'type_modifier': b'o'}, 4: {'type': '^Z', 'type_modifier': b'o'}, 5: {'type': '^Z', 'type_modifier': b'o'}, 6: {'type_modifier': b'o'}, 7: {'type_modifier': b'o'}}}) - r(b'NSWorkspace', b'getInfoForFile:application:type:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}, 4: {'type_modifier': b'o'}}}) - r(b'NSWorkspace', b'isFilePackageAtPath:', {'retval': {'type': 'Z'}}) - r(b'NSWorkspace', b'isSwitchControlEnabled', {'retval': {'type': 'Z'}}) - r(b'NSWorkspace', b'isVoiceOverEnabled', {'retval': {'type': 'Z'}}) - r(b'NSWorkspace', b'launchAppWithBundleIdentifier:options:additionalEventParamDescriptor:launchIdentifier:', {'retval': {'type': 'Z'}, 'arguments': {5: {'type_modifier': b'o'}}}) - r(b'NSWorkspace', b'launchApplication:', {'retval': {'type': 'Z'}}) - r(b'NSWorkspace', b'launchApplication:showIcon:autolaunch:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': 'Z'}, 4: {'type': 'Z'}}}) - r(b'NSWorkspace', b'launchApplicationAtURL:options:configuration:error:', {'arguments': {5: {'type_modifier': b'o'}}}) - r(b'NSWorkspace', b'openFile:', {'retval': {'type': 'Z'}}) - r(b'NSWorkspace', b'openFile:fromImage:at:inView:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSWorkspace', b'openFile:withApplication:', {'retval': {'type': 'Z'}}) - r(b'NSWorkspace', b'openFile:withApplication:andDeactivate:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type': 'Z'}}}) - r(b'NSWorkspace', b'openTempFile:', {'retval': {'type': 'Z'}}) - r(b'NSWorkspace', b'openURL:', {'retval': {'type': 'Z'}}) - r(b'NSWorkspace', b'openURL:options:configuration:error:', {'arguments': {5: {'type_modifier': b'o'}}}) - r(b'NSWorkspace', b'openURLs:withAppBundleIdentifier:options:additionalEventParamDescriptor:launchIdentifiers:', {'retval': {'type': 'Z'}, 'arguments': {6: {'type_modifier': b'o'}}}) - r(b'NSWorkspace', b'openURLs:withApplicationAtURL:options:configuration:error:', {'arguments': {6: {'type_modifier': b'o'}}}) - r(b'NSWorkspace', b'performFileOperation:source:destination:files:tag:', {'retval': {'type': 'Z'}, 'arguments': {6: {'type_modifier': b'o'}}}) - r(b'NSWorkspace', b'recycleURLs:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'NSWorkspace', b'requestAuthorizationOfType:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'NSWorkspace', b'selectFile:inFileViewerRootedAtPath:', {'retval': {'type': 'Z'}}) - r(b'NSWorkspace', b'setDesktopImageURL:forScreen:options:error:', {'retval': {'type': 'Z'}, 'arguments': {5: {'type_modifier': b'o'}}}) - r(b'NSWorkspace', b'setIcon:forFile:options:', {'retval': {'type': 'Z'}}) - r(b'NSWorkspace', b'showSearchResultsForQueryString:', {'retval': {'type': 'Z'}}) - r(b'NSWorkspace', b'slideImage:from:to:', {'retval': {'type': 'v'}, 'arguments': {3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 4: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSWorkspace', b'type:conformsToType:', {'retval': {'type': 'Z'}}) - r(b'NSWorkspace', b'typeOfFile:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSWorkspace', b'unmountAndEjectDeviceAtPath:', {'retval': {'type': 'Z'}}) - r(b'NSWorkspace', b'unmountAndEjectDeviceAtURL:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSWorkspace', b'userDefaultsChanged', {'retval': {'type': 'Z'}}) -finally: - objc._updatingMetadata(False) -protocols={'NSSavePanelDelegateDeprecated': objc.informal_protocol('NSSavePanelDelegateDeprecated', [objc.selector(None, b'panel:compareFilename:with:caseSensitive:', sel32or64(b'i@:@@@Z', b'q@:@@@Z'), isRequired=False), objc.selector(None, b'panel:directoryDidChange:', b'v@:@@', isRequired=False), objc.selector(None, b'panel:shouldShowFilename:', b'Z@:@@', isRequired=False), objc.selector(None, b'panel:isValidFilename:', b'Z@:@@', isRequired=False)]), 'NSAccessibilityAdditions': objc.informal_protocol('NSAccessibilityAdditions', [objc.selector(None, b'accessibilitySetOverrideValue:forAttribute:', b'Z@:@@', isRequired=False)]), 'NSApplicationScriptingDelegation': objc.informal_protocol('NSApplicationScriptingDelegation', [objc.selector(None, b'application:delegateHandlesKey:', b'Z@:@@', isRequired=False)]), 'NSNibAwaking': objc.informal_protocol('NSNibAwaking', [objc.selector(None, b'awakeFromNib', b'v@:', isRequired=False), objc.selector(None, b'prepareForInterfaceBuilder', b'v@:', isRequired=False)]), 'NSToolTipOwner': objc.informal_protocol('NSToolTipOwner', [objc.selector(None, b'view:stringForToolTip:point:userData:', sel32or64(b'@@:@i{_NSPoint=ff}^v', b'@@:@q{CGPoint=dd}^v'), isRequired=False)]), 'NSDraggingDestination': objc.informal_protocol('NSDraggingDestination', [objc.selector(None, b'wantsPeriodicDraggingUpdates', b'Z@:', isRequired=False), objc.selector(None, b'draggingExited:', b'v@:@', isRequired=False), objc.selector(None, b'draggingEnded:', b'v@:@', isRequired=False), objc.selector(None, b'concludeDragOperation:', b'v@:@', isRequired=False), objc.selector(None, b'performDragOperation:', b'Z@:@', isRequired=False), objc.selector(None, b'draggingEntered:', sel32or64(b'I@:@', b'Q@:@'), isRequired=False), objc.selector(None, b'prepareForDragOperation:', b'Z@:@', isRequired=False), objc.selector(None, b'draggingUpdated:', sel32or64(b'I@:@', b'Q@:@'), isRequired=False)]), 'NSOutlineViewNotifications': objc.informal_protocol('NSOutlineViewNotifications', [objc.selector(None, b'outlineViewItemWillExpand:', b'v@:@', isRequired=False), objc.selector(None, b'outlineViewSelectionDidChange:', b'v@:@', isRequired=False), objc.selector(None, b'outlineViewSelectionIsChanging:', b'v@:@', isRequired=False), objc.selector(None, b'outlineViewColumnDidResize:', b'v@:@', isRequired=False), objc.selector(None, b'outlineViewItemWillCollapse:', b'v@:@', isRequired=False), objc.selector(None, b'outlineViewItemDidExpand:', b'v@:@', isRequired=False), objc.selector(None, b'outlineViewColumnDidMove:', b'v@:@', isRequired=False), objc.selector(None, b'outlineViewItemDidCollapse:', b'v@:@', isRequired=False)]), 'NSDraggingSource': objc.informal_protocol('NSDraggingSource', [objc.selector(None, b'namesOfPromisedFilesDroppedAtDestination:', b'@@:@', isRequired=False), objc.selector(None, b'draggedImage:endedAt:operation:', sel32or64(b'v@:@{_NSPoint=ff}I', b'v@:@{CGPoint=dd}Q'), isRequired=False), objc.selector(None, b'draggedImage:beganAt:', sel32or64(b'v@:@{_NSPoint=ff}', b'v@:@{CGPoint=dd}'), isRequired=False), objc.selector(None, b'draggingSourceOperationMaskForLocal:', sel32or64(b'I@:Z', b'Q@:Z'), isRequired=False), objc.selector(None, b'draggedImage:movedTo:', sel32or64(b'v@:@{_NSPoint=ff}', b'v@:@{CGPoint=dd}'), isRequired=False), objc.selector(None, b'ignoreModifierKeysWhileDragging', b'Z@:', isRequired=False)]), 'NSPasteboardOwner': objc.informal_protocol('NSPasteboardOwner', [objc.selector(None, b'pasteboard:provideDataForType:', b'v@:@@', isRequired=False), objc.selector(None, b'pasteboardChangedOwner:', b'v@:@', isRequired=False)]), 'NSAccessibility': objc.informal_protocol('NSAccessibility', [objc.selector(None, b'accessibilityAttributeValue:', b'@@:@', isRequired=False), objc.selector(None, b'accessibilityParameterizedAttributeNames', b'@@:', isRequired=False), objc.selector(None, b'accessibilityArrayAttributeCount:', sel32or64(b'I@:@', b'Q@:@'), isRequired=False), objc.selector(None, b'accessibilityIsAttributeSettable:', b'Z@:@', isRequired=False), objc.selector(None, b'accessibilityAttributeValue:forParameter:', b'@@:@@', isRequired=False), objc.selector(None, b'accessibilityArrayAttributeValues:index:maxCount:', sel32or64(b'@@:@II', b'@@:@QQ'), isRequired=False), objc.selector(None, b'accessibilityActionNames', b'@@:', isRequired=False), objc.selector(None, b'accessibilityAttributeNames', b'@@:', isRequired=False), objc.selector(None, b'accessibilityNotifiesWhenDestroyed', b'Z@:', isRequired=False), objc.selector(None, b'accessibilityIndexOfChild:', sel32or64(b'I@:@', b'Q@:@'), isRequired=False), objc.selector(None, b'accessibilityPerformAction:', b'v@:@', isRequired=False), objc.selector(None, b'accessibilityIsIgnored', b'Z@:', isRequired=False), objc.selector(None, b'accessibilityActionDescription:', b'@@:@', isRequired=False), objc.selector(None, b'accessibilityHitTest:', sel32or64(b'@@:{_NSPoint=ff}', b'@@:{CGPoint=dd}'), isRequired=False), objc.selector(None, b'accessibilitySetValue:forAttribute:', b'v@:@@', isRequired=False), objc.selector(None, b'accessibilityFocusedUIElement', b'@@:', isRequired=False)]), 'NSRulerMarkerClientViewDelegation': objc.informal_protocol('NSRulerMarkerClientViewDelegation', [objc.selector(None, b'rulerView:didRemoveMarker:', b'v@:@@', isRequired=False), objc.selector(None, b'rulerView:shouldRemoveMarker:', b'Z@:@@', isRequired=False), objc.selector(None, b'rulerView:shouldMoveMarker:', b'Z@:@@', isRequired=False), objc.selector(None, b'rulerView:locationForPoint:', sel32or64(b'f@:@{_NSPoint=ff}', b'd@:@{CGPoint=dd}'), isRequired=False), objc.selector(None, b'rulerView:willAddMarker:atLocation:', sel32or64(b'f@:@@f', b'd@:@@d'), isRequired=False), objc.selector(None, b'rulerView:didMoveMarker:', b'v@:@@', isRequired=False), objc.selector(None, b'rulerView:pointForLocation:', sel32or64(b'{_NSPoint=ff}@:@f', b'{CGPoint=dd}@:@d'), isRequired=False), objc.selector(None, b'rulerView:handleMouseDown:', b'v@:@@', isRequired=False), objc.selector(None, b'rulerView:willMoveMarker:toLocation:', sel32or64(b'f@:@@f', b'd@:@@d'), isRequired=False), objc.selector(None, b'rulerView:didAddMarker:', b'v@:@@', isRequired=False), objc.selector(None, b'rulerView:shouldAddMarker:', b'Z@:@@', isRequired=False), objc.selector(None, b'rulerView:willSetClientView:', b'v@:@@', isRequired=False)]), 'NSFontPanelValidationAdditions': objc.informal_protocol('NSFontPanelValidationAdditions', [objc.selector(None, b'validModesForFontPanel:', sel32or64(b'I@:@', b'Q@:@'), isRequired=False)]), 'NSToolbarItemValidation': objc.informal_protocol('NSToolbarItemValidation', [objc.selector(None, b'validateToolbarItem:', b'Z@:@', isRequired=False)]), 'NSKeyValueBindingCreation': objc.informal_protocol('NSKeyValueBindingCreation', [objc.selector(None, b'bind:toObject:withKeyPath:options:', b'v@:@@@@', isRequired=False), objc.selector(None, b'exposeBinding:', b'v@:@', isRequired=False), objc.selector(None, b'valueClassForBinding:', b'#@:@', isRequired=False), objc.selector(None, b'unbind:', b'v@:@', isRequired=False), objc.selector(None, b'infoForBinding:', b'@@:@', isRequired=False), objc.selector(None, b'exposedBindings', b'@@:', isRequired=False), objc.selector(None, b'optionDescriptionsForBinding:', b'@@:@', isRequired=False)]), 'NSDictionaryControllerKeyValuePair': objc.informal_protocol('NSDictionaryControllerKeyValuePair', [objc.selector(None, b'setKey:', b'v@:@', isRequired=False), objc.selector(None, b'setLocalizedKey:', b'v@:@', isRequired=False), objc.selector(None, b'value', b'@@:', isRequired=False), objc.selector(None, b'isExplicitlyIncluded', b'Z@:', isRequired=False), objc.selector(None, b'setValue:', b'v@:@', isRequired=False), objc.selector(None, b'key', b'@@:', isRequired=False), objc.selector(None, b'localizedKey', b'@@:', isRequired=False)]), 'NSEditor': objc.informal_protocol('NSEditor', [objc.selector(None, b'discardEditing', b'v@:', isRequired=False), objc.selector(None, b'commitEditing', b'Z@:', isRequired=False), objc.selector(None, b'commitEditingWithDelegate:didCommitSelector:contextInfo:', b'v@:@:^v', isRequired=False), objc.selector(None, b'commitEditingAndReturnError:', b'Z@:^@', isRequired=False)]), 'NSFontManagerDelegate': objc.informal_protocol('NSFontManagerDelegate', [objc.selector(None, b'fontManager:willIncludeFont:', b'Z@:@@', isRequired=False)]), 'NSControlSubclassNotifications': objc.informal_protocol('NSControlSubclassNotifications', [objc.selector(None, b'controlTextDidChange:', b'v@:@', isRequired=False), objc.selector(None, b'controlTextDidBeginEditing:', b'v@:@', isRequired=False), objc.selector(None, b'controlTextDidEndEditing:', b'v@:@', isRequired=False)]), 'NSServicesRequests': objc.informal_protocol('NSServicesRequests', [objc.selector(None, b'readSelectionFromPasteboard:', b'Z@:@', isRequired=False), objc.selector(None, b'writeSelectionToPasteboard:types:', b'Z@:@@', isRequired=False)]), 'NSTableViewDataSourceDeprecated': objc.informal_protocol('NSTableViewDataSourceDeprecated', [objc.selector(None, b'tableView:writeRows:toPasteboard:', b'Z@:@@@', isRequired=False)]), 'NSPlaceholders': objc.informal_protocol('NSPlaceholders', [objc.selector(None, b'setDefaultPlaceholder:forMarker:withBinding:', b'v@:@@@', isRequired=False), objc.selector(None, b'defaultPlaceholderForMarker:withBinding:', b'@@:@@', isRequired=False)]), 'NSDeprecatedTextStorageDelegateInterface': objc.informal_protocol('NSDeprecatedTextStorageDelegateInterface', [objc.selector(None, b'textStorageWillProcessEditing:', b'v@:@', isRequired=False), objc.selector(None, b'textStorageDidProcessEditing:', b'v@:@', isRequired=False)]), 'NSDraggingSourceDeprecated': objc.informal_protocol('NSDraggingSourceDeprecated', [objc.selector(None, b'namesOfPromisedFilesDroppedAtDestination:', b'@@:@', isRequired=False), objc.selector(None, b'draggedImage:endedAt:operation:', sel32or64(b'v@:@{_NSPoint=ff}I', b'v@:@{CGPoint=dd}Q'), isRequired=False), objc.selector(None, b'draggedImage:beganAt:', sel32or64(b'v@:@{_NSPoint=ff}', b'v@:@{CGPoint=dd}'), isRequired=False), objc.selector(None, b'draggedImage:endedAt:deposited:', sel32or64(b'v@:@{_NSPoint=ff}Z', b'v@:@{CGPoint=dd}Z'), isRequired=False), objc.selector(None, b'draggingSourceOperationMaskForLocal:', sel32or64(b'I@:Z', b'Q@:Z'), isRequired=False), objc.selector(None, b'draggedImage:movedTo:', sel32or64(b'v@:@{_NSPoint=ff}', b'v@:@{CGPoint=dd}'), isRequired=False), objc.selector(None, b'ignoreModifierKeysWhileDragging', b'Z@:', isRequired=False)]), 'NSColorPanelResponderMethod': objc.informal_protocol('NSColorPanelResponderMethod', [objc.selector(None, b'changeColor:', b'v@:@', isRequired=False)]), 'NSMenuValidation': objc.informal_protocol('NSMenuValidation', [objc.selector(None, b'validateMenuItem:', b'Z@:@', isRequired=False)]), 'NSEditorRegistration': objc.informal_protocol('NSEditorRegistration', [objc.selector(None, b'objectDidEndEditing:', b'v@:@', isRequired=False), objc.selector(None, b'objectDidBeginEditing:', b'v@:@', isRequired=False)]), 'NSFontManagerResponderMethod': objc.informal_protocol('NSFontManagerResponderMethod', [objc.selector(None, b'changeFont:', b'v@:@', isRequired=False)]), 'NSLayerDelegateContentsScaleUpdating': objc.informal_protocol('NSLayerDelegateContentsScaleUpdating', [objc.selector(None, b'layer:shouldInheritContentsScale:fromWindow:', sel32or64(b'Z@:@f@', b'Z@:@d@'), isRequired=False)])} -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/AppKit/_metadata.pyc b/env/lib/python2.7/site-packages/AppKit/_metadata.pyc deleted file mode 100644 index d8d3d398..00000000 Binary files a/env/lib/python2.7/site-packages/AppKit/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/AppKit/_nsapp.py b/env/lib/python2.7/site-packages/AppKit/_nsapp.py deleted file mode 100644 index 94007edd..00000000 --- a/env/lib/python2.7/site-packages/AppKit/_nsapp.py +++ /dev/null @@ -1,26 +0,0 @@ -import objc -from AppKit import __bundle__ - -class _NSApp (object): - """ - Helper class to emulate NSApp in Python. - """ - def __getrealapp(self): - d = {} - objc.loadBundleVariables(__bundle__, d, [ ('NSApp', b'@' ) ]) - return d.get('NSApp') - - __class__ = property(lambda self: self.__getrealapp().__class__) - - def __getattr__(self, name): - return getattr(self.__getrealapp(), name) - - def __setattr__(self, name, value): - return setattr(self.__getrealapp(), name, value) - - def __call__(self): - # Compatibility with previous versions. - return self.__getrealapp() - -NSApp = _NSApp() -del _NSApp diff --git a/env/lib/python2.7/site-packages/AppKit/_nsapp.pyc b/env/lib/python2.7/site-packages/AppKit/_nsapp.pyc deleted file mode 100644 index 65c8d157..00000000 Binary files a/env/lib/python2.7/site-packages/AppKit/_nsapp.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/AppleScriptKit/__init__.py b/env/lib/python2.7/site-packages/AppleScriptKit/__init__.py deleted file mode 100644 index d0eb08c7..00000000 --- a/env/lib/python2.7/site-packages/AppleScriptKit/__init__.py +++ /dev/null @@ -1,25 +0,0 @@ -''' -Python mapping for the AppleScriptKit framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import sys -import objc -import AppKit - -from AppleScriptKit import _metadata - -sys.modules['AppleScriptKit'] = mod = objc.ObjCLazyModule("AppleScriptKit", - "com.apple.AppleScriptKit", - objc.pathForFramework("/System/Library/Frameworks/AppleScriptKit.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (AppKit,)) - -import sys -del sys.modules['AppleScriptKit._metadata'] diff --git a/env/lib/python2.7/site-packages/AppleScriptKit/__init__.pyc b/env/lib/python2.7/site-packages/AppleScriptKit/__init__.pyc deleted file mode 100644 index b58e6d3d..00000000 Binary files a/env/lib/python2.7/site-packages/AppleScriptKit/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/AppleScriptKit/_metadata.py b/env/lib/python2.7/site-packages/AppleScriptKit/_metadata.py deleted file mode 100644 index abcc1124..00000000 --- a/env/lib/python2.7/site-packages/AppleScriptKit/_metadata.py +++ /dev/null @@ -1,23 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Tue Sep 18 21:38:55 2012 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$$''' -enums = '''$$''' -misc.update({}) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/AppleScriptKit/_metadata.pyc b/env/lib/python2.7/site-packages/AppleScriptKit/_metadata.pyc deleted file mode 100644 index 5c2d6baf..00000000 Binary files a/env/lib/python2.7/site-packages/AppleScriptKit/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/AppleScriptObjC/__init__.py b/env/lib/python2.7/site-packages/AppleScriptObjC/__init__.py deleted file mode 100644 index 2d2b857e..00000000 --- a/env/lib/python2.7/site-packages/AppleScriptObjC/__init__.py +++ /dev/null @@ -1,25 +0,0 @@ -''' -Python mapping for the AppleScriptObjC framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import sys -import objc -import Foundation - -from AppleScriptObjC import _metadata - -sys.modules['AppleScriptObjC'] = mod = objc.ObjCLazyModule( - "AppleScriptObjC", "com.apple.AppleScriptObjC", - objc.pathForFramework("/System/Library/Frameworks/AppleScriptObjC.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Foundation,)) - -import sys -del sys.modules['AppleScriptObjC._metadata'] diff --git a/env/lib/python2.7/site-packages/AppleScriptObjC/__init__.pyc b/env/lib/python2.7/site-packages/AppleScriptObjC/__init__.pyc deleted file mode 100644 index 76894c7a..00000000 Binary files a/env/lib/python2.7/site-packages/AppleScriptObjC/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/AppleScriptObjC/_metadata.py b/env/lib/python2.7/site-packages/AppleScriptObjC/_metadata.py deleted file mode 100644 index c07ddd39..00000000 --- a/env/lib/python2.7/site-packages/AppleScriptObjC/_metadata.py +++ /dev/null @@ -1,23 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Tue Sep 18 21:41:22 2012 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$$''' -enums = '''$$''' -misc.update({}) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/AppleScriptObjC/_metadata.pyc b/env/lib/python2.7/site-packages/AppleScriptObjC/_metadata.pyc deleted file mode 100644 index 65452d36..00000000 Binary files a/env/lib/python2.7/site-packages/AppleScriptObjC/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/ApplicationServices/__init__.py b/env/lib/python2.7/site-packages/ApplicationServices/__init__.py deleted file mode 100644 index 806b6fc3..00000000 --- a/env/lib/python2.7/site-packages/ApplicationServices/__init__.py +++ /dev/null @@ -1,28 +0,0 @@ -''' -Python mapping for the ApplicationServices framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import sys -import objc -import Quartz.ImageIO -import Quartz.CoreGraphics -import CoreText -import HIServices -#import ATS -#import ColorSync -#import LangAnalysis -#import PrintCore -#import QD -#import SpeechSynthesis - -sys.modules['ApplicationServices'] = mod = objc.ObjCLazyModule('ApplicationServices', - "com.apple.ApplicationServices", - objc.pathForFramework("/System/Library/Frameworks/ApplicationServices.framework"), - {}, None, { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'objc': objc, - }, ( Quartz.ImageIO, Quartz.CoreGraphics, HIServices, CoreText )) diff --git a/env/lib/python2.7/site-packages/ApplicationServices/__init__.pyc b/env/lib/python2.7/site-packages/ApplicationServices/__init__.pyc deleted file mode 100644 index 77c25f05..00000000 Binary files a/env/lib/python2.7/site-packages/ApplicationServices/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Automator/__init__.py b/env/lib/python2.7/site-packages/Automator/__init__.py deleted file mode 100644 index 9e665c8f..00000000 --- a/env/lib/python2.7/site-packages/Automator/__init__.py +++ /dev/null @@ -1,23 +0,0 @@ -''' -Python mapping for the Automator framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import sys -import objc -import AppKit -from Automator import _metadata - -sys.modules['Automator'] = objc.ObjCLazyModule('Automator', - "com.apple.AutomatorFramework", objc.pathForFramework("/System/Library/Frameworks/Automator.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - '__path__': __path__, - 'objc': objc, - '__loader__': globals().get('__loader__', None), - }, (AppKit,)) - -import sys -del sys.modules['Automator._metadata'] diff --git a/env/lib/python2.7/site-packages/Automator/__init__.pyc b/env/lib/python2.7/site-packages/Automator/__init__.pyc deleted file mode 100644 index 2608ce5e..00000000 Binary files a/env/lib/python2.7/site-packages/Automator/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Automator/_metadata.py b/env/lib/python2.7/site-packages/Automator/_metadata.py deleted file mode 100644 index e7cd0111..00000000 --- a/env/lib/python2.7/site-packages/Automator/_metadata.py +++ /dev/null @@ -1,56 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Tue Jun 5 12:56:26 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$$''' -enums = '''$AMActionApplicationResourceError@-206$AMActionApplicationVersionResourceError@-207$AMActionArchitectureMismatchError@-202$AMActionExceptionError@-213$AMActionExecutionError@-212$AMActionFailedGatekeeperError@-217$AMActionFileResourceError@-208$AMActionInitializationError@-211$AMActionInsufficientDataError@-215$AMActionIsDeprecatedError@-216$AMActionLicenseResourceError@-209$AMActionLinkError@-205$AMActionLoadError@-204$AMActionMalwareError@-221$AMActionNotLoadableError@-201$AMActionPropertyListInvalidError@-214$AMActionQuarantineError@-219$AMActionRequiredActionResourceError@-210$AMActionRuntimeMismatchError@-203$AMActionSignatureCorruptError@-218$AMActionThirdPartyActionsNotAllowedError@-222$AMActionXProtectError@-220$AMConversionFailedError@-302$AMConversionNoDataError@-301$AMConversionNotPossibleError@-300$AMLogLevelDebug@0$AMLogLevelError@3$AMLogLevelInfo@1$AMLogLevelWarn@2$AMNoSuchActionError@-200$AMUserCanceledError@-128$AMWorkflowActionsNotLoadedError@-113$AMWorkflowNewerActionVersionError@-111$AMWorkflowNewerVersionError@-100$AMWorkflowNoEnabledActionsError@-114$AMWorkflowOlderActionVersionError@-112$AMWorkflowPropertyListInvalidError@-101$''' -misc.update({'AMAutomatorErrorDomain': b'com.apple.Automator'.decode("utf-8"), 'AMActionErrorKey': b'AMActionErrorKey'.decode("utf-8")}) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'AMAction', b'ignoresInput', {'retval': {'type': 'Z'}}) - r(b'AMAction', b'initWithContentsOfURL:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'AMAction', b'initWithDefinition:fromArchive:', {'arguments': {3: {'type': 'Z'}}}) - r(b'AMAction', b'isStopped', {'retval': {'type': 'Z'}}) - r(b'AMAction', b'logMessageWithLevel:format:', {'arguments': {3: {'printf_format': True}}, 'variadic': True}) - r(b'AMAction', b'runWithInput:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'AMAction', b'runWithInput:fromAction:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'AMBundleAction', b'hasView', {'retval': {'type': 'Z'}}) - r(b'AMBundleAction', b'initWithDefinition:fromArchive:', {'arguments': {3: {'type': 'Z'}}}) - r(b'AMShellScriptAction', b'remapLineEndings', {'retval': {'type': 'Z'}}) - r(b'AMWorkflow', b'initWithContentsOfURL:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'AMWorkflow', b'runWorkflowAtURL:withInput:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'AMWorkflow', b'setValue:forVariableWithName:', {'retval': {'type': 'Z'}}) - r(b'AMWorkflow', b'writeToURL:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'AMWorkflowController', b'canRun', {'retval': {'type': 'Z'}}) - r(b'AMWorkflowController', b'isPaused', {'retval': {'type': 'Z'}}) - r(b'AMWorkflowController', b'isRunning', {'retval': {'type': 'Z'}}) - r(b'AMWorkflowView', b'isEditable', {'retval': {'type': 'Z'}}) - r(b'AMWorkflowView', b'setEditable:', {'arguments': {2: {'type': 'Z'}}}) - r(b'AMWorkspace', b'runWorkflowAtPath:withInput:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSObject', b'workflowController:didError:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'workflowController:didRunAction:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'workflowController:willRunAction:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'workflowControllerDidRun:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'workflowControllerDidStop:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'workflowControllerWillRun:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'workflowControllerWillStop:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) -finally: - objc._updatingMetadata(False) -protocols={'AMWorkflowControllerDelegate': objc.informal_protocol('AMWorkflowControllerDelegate', [objc.selector(None, b'workflowControllerDidStop:', b'v@:@', isRequired=False), objc.selector(None, b'workflowControllerWillRun:', b'v@:@', isRequired=False), objc.selector(None, b'workflowController:willRunAction:', b'v@:@@', isRequired=False), objc.selector(None, b'workflowControllerDidRun:', b'v@:@', isRequired=False), objc.selector(None, b'workflowController:didRunAction:', b'v@:@@', isRequired=False), objc.selector(None, b'workflowControllerWillStop:', b'v@:@', isRequired=False), objc.selector(None, b'workflowController:didError:', b'v@:@@', isRequired=False)])} -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/Automator/_metadata.pyc b/env/lib/python2.7/site-packages/Automator/_metadata.pyc deleted file mode 100644 index d7305cf6..00000000 Binary files a/env/lib/python2.7/site-packages/Automator/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CFNetwork/__init__.py b/env/lib/python2.7/site-packages/CFNetwork/__init__.py deleted file mode 100644 index ac4d352e..00000000 --- a/env/lib/python2.7/site-packages/CFNetwork/__init__.py +++ /dev/null @@ -1,44 +0,0 @@ -''' -Python mapping for the CFNetwork framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import sys -import objc -import os -import CoreFoundation - -from CFNetwork import _metadata - -def CFSocketStreamSOCKSGetError(err): - return err.error & 0xFFFF - -def CFSocketStreamSOCKSGetErrorSubdomain(err): - return (err.error >> 16) & 0xFFFF - -frameworkPath = "/System/Library/Frameworks/CFNetwork.framework" -if not os.path.exists(frameworkPath): - frameworkPath = "/System/Library/Frameworks/CoreServices.framework/Frameworks/CFNetwork.framework" - - -sys.modules['CFNetwork'] = mod = objc.ObjCLazyModule( - "CFNetwork", "com.apple.CFNetwork", - objc.pathForFramework(frameworkPath), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'CFSocketStreamSOCKSGetError': CFSocketStreamSOCKSGetError, - 'CFSocketStreamSOCKSGetErrorSubdomain': CFSocketStreamSOCKSGetErrorSubdomain, - }, (CoreFoundation,)) - - -import CFNetwork._manual -for nm in dir(CFNetwork._manual): - setattr(mod, nm, getattr(CFNetwork._manual, nm)) - -import sys -del sys.modules['CFNetwork._metadata'] diff --git a/env/lib/python2.7/site-packages/CFNetwork/__init__.pyc b/env/lib/python2.7/site-packages/CFNetwork/__init__.pyc deleted file mode 100644 index bbb46cae..00000000 Binary files a/env/lib/python2.7/site-packages/CFNetwork/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CFNetwork/_manual.so b/env/lib/python2.7/site-packages/CFNetwork/_manual.so deleted file mode 100755 index 4bc9d9b3..00000000 Binary files a/env/lib/python2.7/site-packages/CFNetwork/_manual.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CFNetwork/_metadata.py b/env/lib/python2.7/site-packages/CFNetwork/_metadata.py deleted file mode 100644 index 00cd2a2a..00000000 --- a/env/lib/python2.7/site-packages/CFNetwork/_metadata.py +++ /dev/null @@ -1,33 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Tue Jun 5 13:33:27 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -misc.update({'CFHostClientContext': objc.createStructType('CFHostClientContext', sel32or64(b'{CFHostClientContext=l^v^?^?^?}', b'{CFHostClientContext=q^v^?^?^?}'), []), 'CFNetServiceClientContext': objc.createStructType('CFNetServiceClientContext', sel32or64(b'{CFNetServiceClientContext=l^v^?^?^?}', b'{CFNetServiceClientContext=q^v^?^?^?}'), [])}) -constants = '''$kCFDNSServiceFailureKey@^{__CFString=}$kCFErrorDomainCFNetwork@^{__CFString=}$kCFErrorDomainWinSock@^{__CFString=}$kCFFTPResourceGroup@^{__CFString=}$kCFFTPResourceLink@^{__CFString=}$kCFFTPResourceModDate@^{__CFString=}$kCFFTPResourceMode@^{__CFString=}$kCFFTPResourceName@^{__CFString=}$kCFFTPResourceOwner@^{__CFString=}$kCFFTPResourceSize@^{__CFString=}$kCFFTPResourceType@^{__CFString=}$kCFFTPStatusCodeKey@^{__CFString=}$kCFGetAddrInfoFailureKey@^{__CFString=}$kCFHTTPAuthenticationAccountDomain@^{__CFString=}$kCFHTTPAuthenticationPassword@^{__CFString=}$kCFHTTPAuthenticationSchemeBasic@^{__CFString=}$kCFHTTPAuthenticationSchemeDigest@^{__CFString=}$kCFHTTPAuthenticationSchemeKerberos@^{__CFString=}$kCFHTTPAuthenticationSchemeNTLM@^{__CFString=}$kCFHTTPAuthenticationSchemeNegotiate@^{__CFString=}$kCFHTTPAuthenticationSchemeNegotiate2@^{__CFString=}$kCFHTTPAuthenticationSchemeOAuth1@^{__CFString=}$kCFHTTPAuthenticationSchemeXMobileMeAuthToken@^{__CFString=}$kCFHTTPAuthenticationUsername@^{__CFString=}$kCFHTTPVersion1_0@^{__CFString=}$kCFHTTPVersion1_1@^{__CFString=}$kCFHTTPVersion2_0@^{__CFString=}$kCFNetworkProxiesExceptionsList@^{__CFString=}$kCFNetworkProxiesExcludeSimpleHostnames@^{__CFString=}$kCFNetworkProxiesFTPEnable@^{__CFString=}$kCFNetworkProxiesFTPPassive@^{__CFString=}$kCFNetworkProxiesFTPPort@^{__CFString=}$kCFNetworkProxiesFTPProxy@^{__CFString=}$kCFNetworkProxiesGopherEnable@^{__CFString=}$kCFNetworkProxiesGopherPort@^{__CFString=}$kCFNetworkProxiesGopherProxy@^{__CFString=}$kCFNetworkProxiesHTTPEnable@^{__CFString=}$kCFNetworkProxiesHTTPPort@^{__CFString=}$kCFNetworkProxiesHTTPProxy@^{__CFString=}$kCFNetworkProxiesHTTPSEnable@^{__CFString=}$kCFNetworkProxiesHTTPSPort@^{__CFString=}$kCFNetworkProxiesHTTPSProxy@^{__CFString=}$kCFNetworkProxiesProxyAutoConfigEnable@^{__CFString=}$kCFNetworkProxiesProxyAutoConfigJavaScript@^{__CFString=}$kCFNetworkProxiesProxyAutoConfigURLString@^{__CFString=}$kCFNetworkProxiesProxyAutoDiscoveryEnable@^{__CFString=}$kCFNetworkProxiesRTSPEnable@^{__CFString=}$kCFNetworkProxiesRTSPPort@^{__CFString=}$kCFNetworkProxiesRTSPProxy@^{__CFString=}$kCFNetworkProxiesSOCKSEnable@^{__CFString=}$kCFNetworkProxiesSOCKSPort@^{__CFString=}$kCFNetworkProxiesSOCKSProxy@^{__CFString=}$kCFProxyAutoConfigurationHTTPResponseKey@^{__CFString=}$kCFProxyAutoConfigurationJavaScriptKey@^{__CFString=}$kCFProxyAutoConfigurationURLKey@^{__CFString=}$kCFProxyHostNameKey@^{__CFString=}$kCFProxyPasswordKey@^{__CFString=}$kCFProxyPortNumberKey@^{__CFString=}$kCFProxyTypeAutoConfigurationJavaScript@^{__CFString=}$kCFProxyTypeAutoConfigurationURL@^{__CFString=}$kCFProxyTypeFTP@^{__CFString=}$kCFProxyTypeHTTP@^{__CFString=}$kCFProxyTypeHTTPS@^{__CFString=}$kCFProxyTypeKey@^{__CFString=}$kCFProxyTypeNone@^{__CFString=}$kCFProxyTypeSOCKS@^{__CFString=}$kCFProxyUsernameKey@^{__CFString=}$kCFSOCKSNegotiationMethodKey@^{__CFString=}$kCFSOCKSStatusCodeKey@^{__CFString=}$kCFSOCKSVersionKey@^{__CFString=}$kCFStreamErrorDomainSOCKS@i$kCFStreamErrorDomainSSL@i$kCFStreamNetworkServiceType@^{__CFString=}$kCFStreamNetworkServiceTypeBackground@^{__CFString=}$kCFStreamNetworkServiceTypeCallSignaling@^{__CFString=}$kCFStreamNetworkServiceTypeResponsiveData@^{__CFString=}$kCFStreamNetworkServiceTypeVideo@^{__CFString=}$kCFStreamNetworkServiceTypeVoIP@^{__CFString=}$kCFStreamNetworkServiceTypeVoice@^{__CFString=}$kCFStreamPropertyConnectionIsCellular@^{__CFString=}$kCFStreamPropertyFTPAttemptPersistentConnection@^{__CFString=}$kCFStreamPropertyFTPFetchResourceInfo@^{__CFString=}$kCFStreamPropertyFTPFileTransferOffset@^{__CFString=}$kCFStreamPropertyFTPPassword@^{__CFString=}$kCFStreamPropertyFTPProxy@^{__CFString=}$kCFStreamPropertyFTPProxyHost@^{__CFString=}$kCFStreamPropertyFTPProxyPassword@^{__CFString=}$kCFStreamPropertyFTPProxyPort@^{__CFString=}$kCFStreamPropertyFTPProxyUser@^{__CFString=}$kCFStreamPropertyFTPResourceSize@^{__CFString=}$kCFStreamPropertyFTPUsePassiveMode@^{__CFString=}$kCFStreamPropertyFTPUserName@^{__CFString=}$kCFStreamPropertyHTTPAttemptPersistentConnection@^{__CFString=}$kCFStreamPropertyHTTPFinalRequest@^{__CFString=}$kCFStreamPropertyHTTPFinalURL@^{__CFString=}$kCFStreamPropertyHTTPProxy@^{__CFString=}$kCFStreamPropertyHTTPProxyHost@^{__CFString=}$kCFStreamPropertyHTTPProxyPort@^{__CFString=}$kCFStreamPropertyHTTPRequestBytesWrittenCount@^{__CFString=}$kCFStreamPropertyHTTPResponseHeader@^{__CFString=}$kCFStreamPropertyHTTPSProxyHost@^{__CFString=}$kCFStreamPropertyHTTPSProxyPort@^{__CFString=}$kCFStreamPropertyHTTPShouldAutoredirect@^{__CFString=}$kCFStreamPropertyNoCellular@^{__CFString=}$kCFStreamPropertyProxyLocalBypass@^{__CFString=}$kCFStreamPropertySOCKSPassword@^{__CFString=}$kCFStreamPropertySOCKSProxy@^{__CFString=}$kCFStreamPropertySOCKSProxyHost@^{__CFString=}$kCFStreamPropertySOCKSProxyPort@^{__CFString=}$kCFStreamPropertySOCKSUser@^{__CFString=}$kCFStreamPropertySOCKSVersion@^{__CFString=}$kCFStreamPropertySSLContext@^{__CFString=}$kCFStreamPropertySSLPeerCertificates@^{__CFString=}$kCFStreamPropertySSLPeerTrust@^{__CFString=}$kCFStreamPropertySSLSettings@^{__CFString=}$kCFStreamPropertyShouldCloseNativeSocket@^{__CFString=}$kCFStreamPropertySocketExtendedBackgroundIdleMode@^{__CFString=}$kCFStreamPropertySocketRemoteHost@^{__CFString=}$kCFStreamPropertySocketRemoteNetService@^{__CFString=}$kCFStreamPropertySocketSecurityLevel@^{__CFString=}$kCFStreamSSLAllowsAnyRoot@^{__CFString=}$kCFStreamSSLAllowsExpiredCertificates@^{__CFString=}$kCFStreamSSLAllowsExpiredRoots@^{__CFString=}$kCFStreamSSLCertificates@^{__CFString=}$kCFStreamSSLIsServer@^{__CFString=}$kCFStreamSSLLevel@^{__CFString=}$kCFStreamSSLPeerName@^{__CFString=}$kCFStreamSSLValidatesCertificateChain@^{__CFString=}$kCFStreamSocketSOCKSVersion4@^{__CFString=}$kCFStreamSocketSOCKSVersion5@^{__CFString=}$kCFStreamSocketSecurityLevelNegotiatedSSL@^{__CFString=}$kCFStreamSocketSecurityLevelNone@^{__CFString=}$kCFStreamSocketSecurityLevelSSLv2@^{__CFString=}$kCFStreamSocketSecurityLevelSSLv3@^{__CFString=}$kCFStreamSocketSecurityLevelTLSv1@^{__CFString=}$kCFURLErrorFailingURLErrorKey@^{__CFString=}$kCFURLErrorFailingURLStringErrorKey@^{__CFString=}$''' -constants = constants + '$kCFStreamErrorDomainNetServices@%s$'%(sel32or64('l', 'i'),) -constants = constants + '$kCFStreamErrorDomainFTP@%s$'%(sel32or64('l', 'i'),) -constants = constants + '$kCFStreamErrorDomainSystemConfiguration@%s$'%(sel32or64('l', 'i'),) -constants = constants + '$kCFStreamErrorDomainWinSock@%s$'%(sel32or64('l', 'q'),) -constants = constants + '$kCFStreamErrorDomainNetDB@%s$'%(sel32or64('l', 'i'),) -constants = constants + '$kCFStreamErrorDomainMach@%s$'%(sel32or64('l', 'i'),) -constants = constants + '$kCFStreamErrorDomainHTTP@%s$'%(sel32or64('l', 'i'),) -enums = '''$kCFErrorHTTPAuthenticationTypeUnsupported@300$kCFErrorHTTPBadCredentials@301$kCFErrorHTTPBadProxyCredentials@307$kCFErrorHTTPBadURL@305$kCFErrorHTTPConnectionLost@302$kCFErrorHTTPParseFailure@303$kCFErrorHTTPProxyConnectionFailure@306$kCFErrorHTTPRedirectionLoopDetected@304$kCFErrorHTTPSProxyConnectionFailure@310$kCFErrorPACFileAuth@309$kCFErrorPACFileError@308$kCFFTPErrorUnexpectedStatusCode@200$kCFHTTPCookieCannotParseCookieFile@-4000$kCFHostAddresses@0$kCFHostErrorHostNotFound@1$kCFHostErrorUnknown@2$kCFHostNames@1$kCFHostReachability@2$kCFNetDiagnosticConnectionDown@-66557$kCFNetDiagnosticConnectionIndeterminate@-66558$kCFNetDiagnosticConnectionUp@-66559$kCFNetDiagnosticErr@-66560$kCFNetDiagnosticNoErr@0$kCFNetServiceErrorBadArgument@-72004$kCFNetServiceErrorCancel@-72005$kCFNetServiceErrorCollision@-72001$kCFNetServiceErrorDNSServiceFailure@-73000$kCFNetServiceErrorInProgress@-72003$kCFNetServiceErrorInvalid@-72006$kCFNetServiceErrorNotFound@-72002$kCFNetServiceErrorTimeout@-72007$kCFNetServiceErrorUnknown@-72000$kCFNetServiceFlagIsDefault@4$kCFNetServiceFlagIsDomain@2$kCFNetServiceFlagIsRegistrationDomain@4$kCFNetServiceFlagMoreComing@1$kCFNetServiceFlagNoAutoRename@1$kCFNetServiceFlagRemove@8$kCFNetServiceMonitorTXT@1$kCFNetServicesErrorBadArgument@-72004$kCFNetServicesErrorCancel@-72005$kCFNetServicesErrorCollision@-72001$kCFNetServicesErrorInProgress@-72003$kCFNetServicesErrorInvalid@-72006$kCFNetServicesErrorNotFound@-72002$kCFNetServicesErrorTimeout@-72007$kCFNetServicesErrorUnknown@-72000$kCFSOCKS4ErrorIdConflict@112$kCFSOCKS4ErrorIdentdFailed@111$kCFSOCKS4ErrorRequestFailed@110$kCFSOCKS4ErrorUnknownStatusCode@113$kCFSOCKS5ErrorBadCredentials@122$kCFSOCKS5ErrorBadResponseAddr@121$kCFSOCKS5ErrorBadState@120$kCFSOCKS5ErrorNoAcceptableMethod@124$kCFSOCKS5ErrorUnsupportedNegotiationMethod@123$kCFSOCKSErrorUnknownClientVersion@100$kCFSOCKSErrorUnsupportedServerVersion@101$kCFStreamErrorHTTPAuthenticationBadPassword@-1002$kCFStreamErrorHTTPAuthenticationBadUserName@-1001$kCFStreamErrorHTTPAuthenticationTypeUnsupported@-1000$kCFStreamErrorHTTPBadURL@-3$kCFStreamErrorHTTPParseFailure@-1$kCFStreamErrorHTTPRedirectionLoop@-2$kCFStreamErrorHTTPSProxyFailureUnexpectedResponseToCONNECTMethod@311$kCFStreamErrorSOCKS4IdConflict@93$kCFStreamErrorSOCKS4IdentdFailed@92$kCFStreamErrorSOCKS4RequestFailed@91$kCFStreamErrorSOCKS4SubDomainResponse@2$kCFStreamErrorSOCKS5BadResponseAddr@1$kCFStreamErrorSOCKS5BadState@2$kCFStreamErrorSOCKS5SubDomainMethod@4$kCFStreamErrorSOCKS5SubDomainResponse@5$kCFStreamErrorSOCKS5SubDomainUserPass@3$kCFStreamErrorSOCKSSubDomainNone@0$kCFStreamErrorSOCKSSubDomainVersionCode@1$kCFStreamErrorSOCKSUnknownClientVersion@3$kCFStreamSocketSecurityNone@0$kCFStreamSocketSecuritySSLv2@1$kCFStreamSocketSecuritySSLv23@3$kCFStreamSocketSecuritySSLv3@2$kCFStreamSocketSecurityTLSv1@4$kCFURLErrorAppTransportSecurityRequiresSecureConnection@-1022$kCFURLErrorBackgroundSessionInUseByAnotherProcess@-996$kCFURLErrorBackgroundSessionWasDisconnected@-997$kCFURLErrorBadServerResponse@-1011$kCFURLErrorBadURL@-1000$kCFURLErrorCallIsActive@-1019$kCFURLErrorCancelled@-999$kCFURLErrorCannotCloseFile@-3002$kCFURLErrorCannotConnectToHost@-1004$kCFURLErrorCannotCreateFile@-3000$kCFURLErrorCannotDecodeContentData@-1016$kCFURLErrorCannotDecodeRawData@-1015$kCFURLErrorCannotFindHost@-1003$kCFURLErrorCannotLoadFromNetwork@-2000$kCFURLErrorCannotMoveFile@-3005$kCFURLErrorCannotOpenFile@-3001$kCFURLErrorCannotParseResponse@-1017$kCFURLErrorCannotRemoveFile@-3004$kCFURLErrorCannotWriteToFile@-3003$kCFURLErrorClientCertificateRejected@-1205$kCFURLErrorClientCertificateRequired@-1206$kCFURLErrorDNSLookupFailed@-1006$kCFURLErrorDataLengthExceedsMaximum@-1103$kCFURLErrorDataNotAllowed@-1020$kCFURLErrorDownloadDecodingFailedMidStream@-3006$kCFURLErrorDownloadDecodingFailedToComplete@-3007$kCFURLErrorFileDoesNotExist@-1100$kCFURLErrorFileIsDirectory@-1101$kCFURLErrorFileOutsideSafeArea@-1104$kCFURLErrorHTTPTooManyRedirects@-1007$kCFURLErrorInternationalRoamingOff@-1018$kCFURLErrorNetworkConnectionLost@-1005$kCFURLErrorNoPermissionsToReadFile@-1102$kCFURLErrorNotConnectedToInternet@-1009$kCFURLErrorRedirectToNonExistentLocation@-1010$kCFURLErrorRequestBodyStreamExhausted@-1021$kCFURLErrorResourceUnavailable@-1008$kCFURLErrorSecureConnectionFailed@-1200$kCFURLErrorServerCertificateHasBadDate@-1201$kCFURLErrorServerCertificateHasUnknownRoot@-1203$kCFURLErrorServerCertificateNotYetValid@-1204$kCFURLErrorServerCertificateUntrusted@-1202$kCFURLErrorTimedOut@-1001$kCFURLErrorUnknown@-998$kCFURLErrorUnsupportedURL@-1002$kCFURLErrorUserAuthenticationRequired@-1013$kCFURLErrorUserCancelledAuthentication@-1012$kCFURLErrorZeroByteResource@-1014$kSOCKS5NoAcceptableMethod@255$''' -misc.update({}) -functions={'CFNetworkExecuteProxyAutoConfigurationURL': (sel32or64(b'^{__CFRunLoopSource=}^{__CFURL=}^{__CFURL=}^?^{_CFStreamClientContext=l^v^?^?^?}', b'^{__CFRunLoopSource=}^{__CFURL=}^{__CFURL=}^?^{_CFStreamClientContext=q^v^?^?^?}'), '', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'^{__CFArray=}'}, 2: {'type': b'^{__CFError=}'}}}}}}), 'CFHTTPAuthenticationRequiresOrderedRequests': (b'Z^{_CFHTTPAuthentication=}',), 'CFHTTPAuthenticationCreateFromResponse': (b'^{_CFHTTPAuthentication=}^{__CFAllocator=}^{__CFHTTPMessage=}', '', {'retval': {'already_cfretained': True}}), 'CFNetServiceBrowserGetTypeID': (sel32or64(b'L', b'Q'),), 'CFHostCreateWithName': (b'^{__CFHost=}^{__CFAllocator=}@', '', {'retval': {'already_cfretained': True}}), 'CFSocketStreamPairSetSecurityProtocol': (b'Z^{__CFReadStream=}^{__CFWriteStream=}i',), 'CFHostGetAddressing': (b'^{__CFArray=}^{__CFHost=}o^Z',), 'CFHTTPMessageSetBody': (b'v^{__CFHTTPMessage=}^{__CFData=}',), 'CFHTTPAuthenticationGetTypeID': (sel32or64(b'L', b'Q'),), 'CFNetDiagnosticCreateWithStreams': (b'^{__CFNetDiagnostic=}^{__CFAllocator=}^{__CFReadStream=}^{__CFWriteStream=}', '', {'retval': {'already_cfretained': True}}), 'CFSocketStreamSOCKSGetError': (sel32or64(b'l^{_CFStreamError=ll}', b'i^{_CFStreamError=qi}'), '', {'arguments': {0: {'type_modifier': 'o'}}}), 'CFHostCreateWithAddress': (b'^{__CFHost=}^{__CFAllocator=}^{__CFData=}', '', {'retval': {'already_cfretained': True}}), 'CFNetServiceBrowserSearchForDomains': (sel32or64(b'Z^{__CFNetServiceBrowser=}Z^{_CFStreamError=ll}', b'Z^{__CFNetServiceBrowser=}Z^{_CFStreamError=qi}'), '', {'arguments': {2: {'type_modifier': 'o'}}}), 'CFNetServiceUnscheduleFromRunLoop': (b'v^{__CFNetService=}^{__CFRunLoop=}@',), 'CFNetServiceMonitorStop': (sel32or64(b'v^{__CFNetServiceMonitor=}^{_CFStreamError=ll}', b'v^{__CFNetServiceMonitor=}^{_CFStreamError=qi}'), '', {'arguments': {1: {'type_modifier': 'o'}}}), 'CFNetworkCopySystemProxySettings': (b'^{__CFDictionary}', '', {'retval': {'already_cfretained': True}}), 'CFHostGetReachability': (b'^{__CFData=}^{__CFHost=}o^Z',), 'CFHTTPMessageIsHeaderComplete': (b'Z^{__CFHTTPMessage=}',), 'CFHTTPMessageGetTypeID': (sel32or64(b'L', b'Q'),), 'CFNetServiceMonitorGetTypeID': (sel32or64(b'L', b'Q'),), 'CFNetServiceGetPortNumber': (sel32or64(b'l^{__CFNetService=}', b'i^{__CFNetService=}'),), 'CFHTTPMessageCreateRequest': (b'^{__CFHTTPMessage=}^{__CFAllocator=}@^{__CFURL=}@', '', {'retval': {'already_cfretained': True}}), 'CFNetServiceCreateCopy': (b'^{__CFNetService=}^{__CFAllocator=}^{__CFNetService=}', '', {'retval': {'already_cfretained': True}}), 'CFNetServiceGetName': (b'@^{__CFNetService=}',), 'CFHTTPMessageCopyRequestMethod': (b'@^{__CFHTTPMessage=}', '', {'retval': {'already_cfretained': True}}), 'CFHTTPAuthenticationCopyRealm': (b'@^{_CFHTTPAuthentication=}', '', {'retval': {'already_cfretained': True}}), 'CFNetServiceMonitorInvalidate': (b'v^{__CFNetServiceMonitor=}',), 'CFNetServiceGetProtocolSpecificInformation': (b'@^{__CFNetService=}',), 'CFNetServiceCancel': (b'v^{__CFNetService=}',), 'CFNetServiceMonitorCreate': (b'^{__CFNetServiceMonitor=}^{__CFAllocator=}^{__CFNetService=}^?^{CFNetServiceClientContext=l^v^?^?^?}', '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__CFNetServiceMonitor=}'}, 1: {'type': b'^{__CFNetService=}'}, 2: {'type': b'i'}, 3: {'type': b'^{__CFData=}'}, 4: {'type': b'^{CFStreamError=li}'}, 5: {'type': b'^v'}}}}}}), 'CFWriteStreamCreateWithFTPURL': (b'^{__CFWriteStream=}^{__CFAllocator=}^{__CFURL=}', '', {'retval': {'already_cfretained': True}}), 'CFNetServiceBrowserUnscheduleFromRunLoop': (b'v^{__CFNetServiceBrowser=}^{__CFRunLoop=}@',), 'CFHTTPMessageCopyRequestURL': (b'^{__CFURL=}^{__CFHTTPMessage=}', '', {'retval': {'already_cfretained': True}}), 'CFHTTPMessageCopyVersion': (b'@^{__CFHTTPMessage=}', '', {'retval': {'already_cfretained': True}}), 'CFNetServiceCreate': (sel32or64(b'^{__CFNetService=}^{__CFAllocator=}@@@l', b'^{__CFNetService=}^{__CFAllocator=}@@@i'), '', {'retval': {'already_cfretained': True}}), 'CFNetServiceMonitorUnscheduleFromRunLoop': (b'v^{__CFNetServiceMonitor=}^{__CFRunLoop=}@',), 'CFHostScheduleWithRunLoop': (b'v^{__CFHost=}^{__CFRunLoop=}@',), 'CFNetServiceMonitorStart': (sel32or64(b'Z^{__CFNetServiceMonitor=}i^{_CFStreamError=ll}', b'Z^{__CFNetServiceMonitor=}i^{_CFStreamError=qi}'), '', {'arguments': {2: {'type_modifier': 'o'}}}), 'CFHostGetNames': (b'^{__CFArray=}^{__CFHost=}o^Z',), 'CFNetDiagnosticCopyNetworkStatusPassively': (sel32or64(b'l^{__CFNetDiagnostic=}^^{__CFString}', b'q^{__CFNetDiagnostic=}^^{__CFString}'), '', {'arguments': {1: {'type_modifier': 'o'}}}), 'CFHTTPMessageCopyResponseStatusLine': (b'@^{__CFHTTPMessage=}', '', {'retval': {'already_cfretained': True}}), 'CFNetDiagnosticDiagnoseProblemInteractively': (sel32or64(b'l^{__CFNetDiagnostic=}', b'q^{__CFNetDiagnostic=}'),), 'CFHTTPMessageAddAuthentication': (b'Z^{__CFHTTPMessage=}^{__CFHTTPMessage=}@@@Z',), 'CFNetDiagnosticCreateWithURL': (b'^{__CFNetDiagnostic=}^{__CFAllocator=}^{__CFURL=}', '', {'retval': {'already_cfretained': True}}), 'CFNetServiceGetDomain': (b'@^{__CFNetService=}',), 'CFFTPCreateParsedResourceListing': (sel32or64(b'l^{__CFAllocator=}^vl^^{__CFDictionary}', b'q^{__CFAllocator=}^vq^^{__CFDictionary}'), '', {'arguments': {1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}, 3: {'type_modifier': 'o'}}}), 'CFNetServiceGetTypeID': (sel32or64(b'L', b'Q'),), 'CFHTTPMessageCreateEmpty': (b'^{__CFHTTPMessage=}^{__CFAllocator=}Z', '', {'retval': {'already_cfretained': True}}), 'CFHTTPMessageAppendBytes': (sel32or64(b'Z^{__CFHTTPMessage=}^vl', b'Z^{__CFHTTPMessage=}^vq'), '', {'arguments': {1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}}}), 'CFHostCancelInfoResolution': (b'v^{__CFHost=}i',), 'CFNetServiceBrowserSearchForServices': (sel32or64(b'Z^{__CFNetServiceBrowser=}@@^{_CFStreamError=ll}', b'Z^{__CFNetServiceBrowser=}@@^{_CFStreamError=qi}'), '', {'arguments': {3: {'type_modifier': 'o'}}}), 'CFHTTPAuthenticationIsValid': (sel32or64(b'Z^{_CFHTTPAuthentication=}^{_CFStreamError=ll}', b'Z^{_CFHTTPAuthentication=}^{_CFStreamError=qi}'), '', {'arguments': {1: {'type_modifier': 'o'}}}), 'CFHTTPReadStreamSetProxy': (sel32or64(b'v^{__CFReadStream=}@l', b'v^{__CFReadStream=}@q'),), 'CFHTTPAuthenticationAppliesToRequest': (b'Z^{_CFHTTPAuthentication=}^{__CFHTTPMessage=}',), 'CFNetServiceBrowserInvalidate': (b'v^{__CFNetServiceBrowser=}',), 'CFNetServiceGetAddressing': (b'^{__CFArray=}^{__CFNetService=}',), 'CFHTTPMessageSetHeaderFieldValue': (b'v^{__CFHTTPMessage=}@@',), 'CFNetServiceBrowserStopSearch': (sel32or64(b'v^{__CFNetServiceBrowser=}^{_CFStreamError=ll}', b'v^{__CFNetServiceBrowser=}^{_CFStreamError=qi}'), '', {'arguments': {1: {'type_modifier': 'o'}}}), 'CFHTTPMessageApplyCredentials': (sel32or64(b'Z^{__CFHTTPMessage=}^{_CFHTTPAuthentication=}@@^{_CFStreamError=ll}', b'Z^{__CFHTTPMessage=}^{_CFHTTPAuthentication=}@@^{_CFStreamError=qi}'), '', {'arguments': {4: {'type_modifier': 'o'}}}), 'CFHTTPReadStreamSetRedirectsAutomatically': (b'v^{__CFReadStream=}Z',), 'CFNetServiceRegisterWithOptions': (sel32or64(b'Z^{__CFNetService=}L^{_CFStreamError=ll}', b'Z^{__CFNetService=}Q^{_CFStreamError=qi}'), '', {'arguments': {2: {'type_modifier': 'o'}}}), 'CFHTTPAuthenticationCopyMethod': (b'@^{_CFHTTPAuthentication=}', '', {'retval': {'already_cfretained': True}}), 'CFNetServiceBrowserCreate': (b'^{__CFNetServiceBrowser=}^{__CFAllocator=}^?^{CFNetServiceClientContext=l^v^?^?^?}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__CFNetServiceBrowser=}'}, 1: {'type': b'L'}, 2: {'type': b'@'}, 3: {'type': b'^{CFStreamError=li}'}, 4: {'type': b'^v'}}}}}}), 'CFNetServiceGetTargetHost': (b'@^{__CFNetService=}',), 'CFNetworkCopyProxiesForAutoConfigurationScript': (b'^{__CFArray=}@^{__CFURL=}^^{__CFError}', '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'CFNetServiceCreateDictionaryWithTXTData': (b'^{__CFDictionary=}^{__CFAllocator=}^{__CFData=}', '', {'retval': {'already_cfretained': True}}), 'CFNetServiceRegister': (sel32or64(b'Z^{__CFNetService=}^{_CFStreamError=ll}', b'Z^{__CFNetService=}^{_CFStreamError=qi}'), '', {'arguments': {1: {'type_modifier': 'o'}}}), 'CFHTTPMessageCopySerializedMessage': (b'^{__CFData=}^{__CFHTTPMessage=}', '', {'retval': {'already_cfretained': True}}), 'CFHostCreateCopy': (b'^{__CFHost=}^{__CFAllocator=}^{__CFHost=}', '', {'retval': {'already_cfretained': True}}), 'CFHTTPAuthenticationRequiresAccountDomain': (b'Z^{_CFHTTPAuthentication=}',), 'CFNetServiceSetTXTData': (b'Z^{__CFNetService=}^{__CFData=}',), 'CFNetworkCopyProxiesForURL': (b'^{__CFArray=}^{__CFURL=}^{__CFDictionary=}', '', {'retval': {'already_cfretained': True}}), 'CFReadStreamCreateForStreamedHTTPRequest': (b'^{__CFReadStream=}^{__CFAllocator=}^{__CFHTTPMessage=}^{__CFReadStream=}', '', {'retval': {'already_cfretained': True}}), 'CFReadStreamCreateForHTTPRequest': (b'^{__CFReadStream=}^{__CFAllocator=}^{__CFHTTPMessage=}', '', {'retval': {'already_cfretained': True}}), 'CFHTTPMessageCreateCopy': (b'^{__CFHTTPMessage=}^{__CFAllocator=}^{__CFHTTPMessage=}', '', {'retval': {'already_cfretained': True}}), 'CFNetServiceGetType': (b'@^{__CFNetService=}',), 'CFNetServiceScheduleWithRunLoop': (b'v^{__CFNetService=}^{__CFRunLoop=}@',), 'CFHTTPMessageCopyHeaderFieldValue': (b'@^{__CFHTTPMessage=}@', '', {'retval': {'already_cfretained': True}}), 'CFNetServiceGetTXTData': (b'^{__CFData=}^{__CFNetService=}',), 'CFHostStartInfoResolution': (sel32or64(b'Z^{__CFHost=}i^{_CFStreamError=ll}', b'Z^{__CFHost=}i^{_CFStreamError=qi}'), '', {'arguments': {2: {'type_modifier': 'o'}}}), 'CFHTTPAuthenticationRequiresUserNameAndPassword': (b'Z^{_CFHTTPAuthentication=}',), 'CFNetDiagnosticSetName': (b'v^{__CFNetDiagnostic=}@',), 'CFNetServiceCreateTXTDataWithDictionary': (b'^{__CFData=}^{__CFAllocator=}^{__CFDictionary=}', '', {'retval': {'already_cfretained': True}}), 'CFReadStreamCreateWithFTPURL': (b'^{__CFReadStream=}^{__CFAllocator=}^{__CFURL=}', '', {'retval': {'already_cfretained': True}}), 'CFNetServiceSetClient': (b'Z^{__CFNetService=}^?^{CFNetServiceClientContext=l^v^?^?^?}', '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__CFNetService=}'}, 1: {'type': b'^{CFStreamError=li}'}, 2: {'type': b'^v'}}}}}}), 'CFNetServiceMonitorScheduleWithRunLoop': (b'v^{__CFNetServiceMonitor=}^{__CFRunLoop=}@',), 'CFHostUnscheduleFromRunLoop': (b'v^{__CFHost=}^{__CFRunLoop=}@',), 'CFHTTPMessageApplyCredentialDictionary': (sel32or64(b'Z^{__CFHTTPMessage=}^{_CFHTTPAuthentication=}^{__CFDictionary=}^{_CFStreamError=ll}', b'Z^{__CFHTTPMessage=}^{_CFHTTPAuthentication=}^{__CFDictionary=}^{_CFStreamError=qi}'), '', {'arguments': {3: {'type_modifier': 'o'}}}), 'CFHTTPMessageIsRequest': (b'Z^{__CFHTTPMessage=}',), 'CFNetServiceResolve': (sel32or64(b'Z^{__CFNetService=}^{_CFStreamError=ll}', b'Z^{__CFNetService=}^{_CFStreamError=qi}'), '', {'arguments': {1: {'type_modifier': 'o'}}}), 'CFHTTPMessageCopyBody': (b'^{__CFData=}^{__CFHTTPMessage=}', '', {'retval': {'already_cfretained': True}}), 'CFNetServiceBrowserScheduleWithRunLoop': (b'v^{__CFNetServiceBrowser=}^{__CFRunLoop=}@',), 'CFHTTPMessageCreateResponse': (sel32or64(b'^{__CFHTTPMessage=}^{__CFAllocator=}l@@', b'^{__CFHTTPMessage=}^{__CFAllocator=}q@@'), '', {'retval': {'already_cfretained': True}}), 'CFHostGetTypeID': (sel32or64(b'L', b'Q'),), 'CFHTTPMessageCopyAllHeaderFields': (b'^{__CFDictionary=}^{__CFHTTPMessage=}', '', {'retval': {'already_cfretained': True}}), 'CFHTTPMessageGetResponseStatusCode': (sel32or64(b'l^{__CFHTTPMessage=}', b'q^{__CFHTTPMessage=}'),), 'CFHTTPAuthenticationCopyDomains': (b'^{__CFArray=}^{_CFHTTPAuthentication=}', '', {'retval': {'already_cfretained': True}}), 'CFNetServiceSetProtocolSpecificInformation': (b'v^{__CFNetService=}@',), 'CFSocketStreamSOCKSGetErrorSubdomain': (sel32or64(b'l^{_CFStreamError=ll}', b'i^{_CFStreamError=qi}'), '', {'arguments': {0: {'type_modifier': 'o'}}}), 'CFStreamCreatePairWithSocketToCFHost': (sel32or64(b'v^{__CFAllocator=}^{__CFHost=}l^^{__CFReadStream}^^{__CFWriteStream}', b'v^{__CFAllocator=}^{__CFHost=}i^^{__CFReadStream}^^{__CFWriteStream}'), '', {'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o'}, 4: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CFStreamCreatePairWithSocketToNetService': (b'v^{__CFAllocator=}^{__CFNetService=}^^{__CFReadStream}^^{__CFWriteStream}', '', {'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o'}, 3: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CFNetServiceResolveWithTimeout': (sel32or64(b'Z^{__CFNetService=}d^{_CFStreamError=ll}', b'Z^{__CFNetService=}d^{_CFStreamError=qi}'), '', {'arguments': {2: {'type_modifier': 'o'}}}), 'CFHostSetClient': (b'Z^{__CFHost=}^?^{CFHostClientContext=l^v^?^?^?}', '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__CFHost=}'}, 1: {'type': b'i'}, 2: {'type': b'^{CFStreamError=li}'}, 3: {'type': b'^v'}}}}}})} -cftypes=[('CFHTTPMessageRef', b'^{__CFHTTPMessage=}', 'CFHTTPMessageGetTypeID', None), ('CFHostRef', b'^{__CFHost=}', 'CFHostGetTypeID', None), ('CFNetDiagnosticRef', b'^{__CFNetDiagnostic=}', 'CFNetDiagnosticGetTypeID', None), ('CFNetServiceBrowserRef', b'^{__CFNetServiceBrowser=}', 'CFNetServiceBrowserGetTypeID', None), ('CFNetServiceMonitorRef', b'^{__CFNetServiceMonitor=}', 'CFNetServiceMonitorGetTypeID', None), ('CFNetServiceRef', b'^{__CFNetService=}', 'CFNetServiceGetTypeID', None), ('CFHTTPAuthenticationRef', b'^{_CFHTTPAuthentication}', 'CFHTTPAuthenticationGetTypeID', None)] -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/CFNetwork/_metadata.pyc b/env/lib/python2.7/site-packages/CFNetwork/_metadata.pyc deleted file mode 100644 index 0ca1c768..00000000 Binary files a/env/lib/python2.7/site-packages/CFNetwork/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CFOpenDirectory/__init__.py b/env/lib/python2.7/site-packages/CFOpenDirectory/__init__.py deleted file mode 100644 index b23daec1..00000000 --- a/env/lib/python2.7/site-packages/CFOpenDirectory/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -''' -Python mapping for the CFOpenDirectory framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import sys -import objc -import CoreFoundation -import Foundation - -from CFOpenDirectory import _metadata - -# This actually loads the OpenDirectory framework instead of the embedded CFOpenDirectory framework -sys.modules['CFOpenDirectory'] = mod = objc.ObjCLazyModule('CFOpenDirectory', - "com.apple.OpenDirectory", - objc.pathForFramework("/System/Library/Frameworks/OpenDirectory.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'objc': objc, - }, ( CoreFoundation, Foundation,)) - -import sys -del sys.modules['CFOpenDirectory._metadata'] diff --git a/env/lib/python2.7/site-packages/CFOpenDirectory/__init__.pyc b/env/lib/python2.7/site-packages/CFOpenDirectory/__init__.pyc deleted file mode 100644 index 52f6a533..00000000 Binary files a/env/lib/python2.7/site-packages/CFOpenDirectory/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CFOpenDirectory/_metadata.py b/env/lib/python2.7/site-packages/CFOpenDirectory/_metadata.py deleted file mode 100644 index cc4dc6be..00000000 --- a/env/lib/python2.7/site-packages/CFOpenDirectory/_metadata.py +++ /dev/null @@ -1,25 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Sun Jun 11 10:49:58 2017 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$kODAuthenticationTypeClearTextReadOnly$kODAttributeTypeAccessControlEntry$kODAttributeTypeAddressLine1$kODAttributeTypeAddressLine2$kODAttributeTypeAddressLine3$kODAttributeTypeAdminLimits$kODAttributeTypeAdvertisedServices$kODAttributeTypeAlias$kODAttributeTypeAllAttributes$kODAttributeTypeAllTypes$kODAttributeTypeAltSecurityIdentities$kODAttributeTypeAreaCode$kODAttributeTypeAttrListRefCount$kODAttributeTypeAttrListRefs$kODAttributeTypeAttrListValueRefCount$kODAttributeTypeAttrListValueRefs$kODAttributeTypeAuthCredential$kODAttributeTypeAuthMethod$kODAttributeTypeAuthenticationAuthority$kODAttributeTypeAuthenticationHint$kODAttributeTypeAuthorityRevocationList$kODAttributeTypeAutomaticSearchPath$kODAttributeTypeAutomountInformation$kODAttributeTypeBirthday$kODAttributeTypeBootParams$kODAttributeTypeBuildVersion$kODAttributeTypeBuilding$kODAttributeTypeCACertificate$kODAttributeTypeCapacity$kODAttributeTypeCertificateRevocationList$kODAttributeTypeCity$kODAttributeTypeComment$kODAttributeTypeCompany$kODAttributeTypeComputers$kODAttributeTypeConfigAvailable$kODAttributeTypeConfigFile$kODAttributeTypeContactGUID$kODAttributeTypeContactPerson$kODAttributeTypeCopyTimestamp$kODAttributeTypeCoreFWVersion$kODAttributeTypeCountry$kODAttributeTypeCreationTimestamp$kODAttributeTypeCrossCertificatePair$kODAttributeTypeCustomSearchPath$kODAttributeTypeDNSDomain$kODAttributeTypeDNSName$kODAttributeTypeDNSNameServer$kODAttributeTypeDataStamp$kODAttributeTypeDateRecordCreated$kODAttributeTypeDepartment$kODAttributeTypeDirRefCount$kODAttributeTypeDirRefs$kODAttributeTypeEMailAddress$kODAttributeTypeEMailContacts$kODAttributeTypeENetAddress$kODAttributeTypeExpire$kODAttributeTypeFWVersion$kODAttributeTypeFaxNumber$kODAttributeTypeFirstName$kODAttributeTypeFullName$kODAttributeTypeFunctionalState$kODAttributeTypeGUID$kODAttributeTypeGroup$kODAttributeTypeGroupMembers$kODAttributeTypeGroupMembership$kODAttributeTypeGroupServices$kODAttributeTypeHTML$kODAttributeTypeHardwareUUID$kODAttributeTypeHomeDirectory$kODAttributeTypeHomeDirectoryQuota$kODAttributeTypeHomeDirectorySoftQuota$kODAttributeTypeHomeLocOwner$kODAttributeTypeHomePhoneNumber$kODAttributeTypeIMHandle$kODAttributeTypeIPAddress$kODAttributeTypeIPAddressAndENetAddress$kODAttributeTypeIPv6Address$kODAttributeTypeInternetAlias$kODAttributeTypeJPEGPhoto$kODAttributeTypeJobTitle$kODAttributeTypeKDCAuthKey$kODAttributeTypeKDCConfigData$kODAttributeTypeKerberosRealm$kODAttributeTypeKerberosServices$kODAttributeTypeKeywords$kODAttributeTypeLDAPReadReplicas$kODAttributeTypeLDAPSearchBaseSuffix$kODAttributeTypeLDAPWriteReplicas$kODAttributeTypeLastName$kODAttributeTypeLocalOnlySearchPath$kODAttributeTypeLocaleRelay$kODAttributeTypeLocaleSubnets$kODAttributeTypeLocation$kODAttributeTypeMCXFlags$kODAttributeTypeMCXSettings$kODAttributeTypeMIME$kODAttributeTypeMailAttribute$kODAttributeTypeMapCoordinates$kODAttributeTypeMapGUID$kODAttributeTypeMapURI$kODAttributeTypeMetaAmbiguousName$kODAttributeTypeMetaAugmentedAttributes$kODAttributeTypeMetaAutomountMap$kODAttributeTypeMetaNodeLocation$kODAttributeTypeMetaRecordName$kODAttributeTypeMiddleName$kODAttributeTypeMobileNumber$kODAttributeTypeModificationTimestamp$kODAttributeTypeNFSHomeDirectory$kODAttributeTypeNTDomainComputerAccount$kODAttributeTypeNamePrefix$kODAttributeTypeNameSuffix$kODAttributeTypeNativeOnly$kODAttributeTypeNestedGroups$kODAttributeTypeNetGroupTriplet$kODAttributeTypeNetGroups$kODAttributeTypeNetworkInterfaces$kODAttributeTypeNetworkNumber$kODAttributeTypeNickName$kODAttributeTypeNodeOptions$kODAttributeTypeNodePath$kODAttributeTypeNodeRefCount$kODAttributeTypeNodeRefs$kODAttributeTypeNodeSASLRealm$kODAttributeTypeNote$kODAttributeTypeNumTableList$kODAttributeTypeOperatingSystem$kODAttributeTypeOperatingSystemVersion$kODAttributeTypeOrganizationInfo$kODAttributeTypeOrganizationName$kODAttributeTypeOriginalHomeDirectory$kODAttributeTypeOriginalNFSHomeDirectory$kODAttributeTypeOriginalNodeName$kODAttributeTypeOwner$kODAttributeTypeOwnerGUID$kODAttributeTypePGPPublicKey$kODAttributeTypePIDValue$kODAttributeTypePagerNumber$kODAttributeTypeParentLocales$kODAttributeTypePassword$kODAttributeTypePasswordPlus$kODAttributeTypePasswordPolicyOptions$kODAttributeTypePasswordServerList$kODAttributeTypePasswordServerLocation$kODAttributeTypePhoneContacts$kODAttributeTypePhoneNumber$kODAttributeTypePicture$kODAttributeTypePlugInInfo$kODAttributeTypePluginIndex$kODAttributeTypePort$kODAttributeTypePostalAddress$kODAttributeTypePostalAddressContacts$kODAttributeTypePostalCode$kODAttributeTypePresetUserIsAdmin$kODAttributeTypePrimaryComputerGUID$kODAttributeTypePrimaryComputerList$kODAttributeTypePrimaryGroupID$kODAttributeTypePrimaryLocale$kODAttributeTypePrimaryNTDomain$kODAttributeTypePrintServiceInfoText$kODAttributeTypePrintServiceInfoXML$kODAttributeTypePrintServiceUserData$kODAttributeTypePrinter1284DeviceID$kODAttributeTypePrinterLPRHost$kODAttributeTypePrinterLPRQueue$kODAttributeTypePrinterMakeAndModel$kODAttributeTypePrinterType$kODAttributeTypePrinterURI$kODAttributeTypePrinterXRISupported$kODAttributeTypeProcessName$kODAttributeTypeProfiles$kODAttributeTypeProfilesTimestamp$kODAttributeTypeProtocolNumber$kODAttributeTypeProtocols$kODAttributeTypePwdAgingPolicy$kODAttributeTypeRPCNumber$kODAttributeTypeReadOnlyNode$kODAttributeTypeRealUserID$kODAttributeTypeRecRefCount$kODAttributeTypeRecRefs$kODAttributeTypeRecordName$kODAttributeTypeRecordType$kODAttributeTypeRelationships$kODAttributeTypeRelativeDNPrefix$kODAttributeTypeResourceInfo$kODAttributeTypeResourceType$kODAttributeTypeSMBAcctFlags$kODAttributeTypeSMBGroupRID$kODAttributeTypeSMBHome$kODAttributeTypeSMBHomeDrive$kODAttributeTypeSMBKickoffTime$kODAttributeTypeSMBLogoffTime$kODAttributeTypeSMBLogonTime$kODAttributeTypeSMBPWDLastSet$kODAttributeTypeSMBPrimaryGroupSID$kODAttributeTypeSMBProfilePath$kODAttributeTypeSMBRID$kODAttributeTypeSMBSID$kODAttributeTypeSMBScriptPath$kODAttributeTypeSMBUserWorkstations$kODAttributeTypeSchema$kODAttributeTypeSearchPath$kODAttributeTypeSearchPolicy$kODAttributeTypeServiceType$kODAttributeTypeServicesLocator$kODAttributeTypeSetupAdvertising$kODAttributeTypeSetupAutoRegister$kODAttributeTypeSetupLocation$kODAttributeTypeSetupOccupation$kODAttributeTypeStandardOnly$kODAttributeTypeState$kODAttributeTypeStreet$kODAttributeTypeSubNodes$kODAttributeTypeTimePackage$kODAttributeTypeTimeToLive$kODAttributeTypeTotalRefCount$kODAttributeTypeTotalSize$kODAttributeTypeTrustInformation$kODAttributeTypeURL$kODAttributeTypeUniqueID$kODAttributeTypeUserCertificate$kODAttributeTypeUserPKCS12Data$kODAttributeTypeUserSMIMECertificate$kODAttributeTypeUserShell$kODAttributeTypeVFSDumpFreq$kODAttributeTypeVFSLinkDir$kODAttributeTypeVFSOpts$kODAttributeTypeVFSPassNo$kODAttributeTypeVFSType$kODAttributeTypeVersion$kODAttributeTypeWeblogURI$kODAttributeTypeXMLPlist$kODAuthenticationType2WayRandom$kODAuthenticationType2WayRandomChangePasswd$kODAuthenticationTypeAPOP$kODAuthenticationTypeCRAM_MD5$kODAuthenticationTypeChangePasswd$kODAuthenticationTypeClearText$kODAuthenticationTypeCrypt$kODAuthenticationTypeDIGEST_MD5$kODAuthenticationTypeDeleteUser$kODAuthenticationTypeGetEffectivePolicy$kODAuthenticationTypeGetGlobalPolicy$kODAuthenticationTypeGetKerberosPrincipal$kODAuthenticationTypeGetPolicy$kODAuthenticationTypeGetUserData$kODAuthenticationTypeGetUserName$kODAuthenticationTypeKerberosTickets$kODAuthenticationTypeMPPEMasterKeys$kODAuthenticationTypeMSCHAP2$kODAuthenticationTypeNTLMv2$kODAuthenticationTypeNTLMv2WithSessionKey$kODAuthenticationTypeNewUser$kODAuthenticationTypeNewUserWithPolicy$kODAuthenticationTypeNodeNativeClearTextOK$kODAuthenticationTypeNodeNativeNoClearText$kODAuthenticationTypeReadSecureHash$kODAuthenticationTypeSMBNTv2UserSessionKey$kODAuthenticationTypeSMBWorkstationCredentialSessionKey$kODAuthenticationTypeSMB_LM_Key$kODAuthenticationTypeSMB_NT_Key$kODAuthenticationTypeSMB_NT_UserSessionKey$kODAuthenticationTypeSMB_NT_WithUserSessionKey$kODAuthenticationTypeSecureHash$kODAuthenticationTypeSetCertificateHashAsCurrent$kODAuthenticationTypeSetGlobalPolicy$kODAuthenticationTypeSetLMHash$kODAuthenticationTypeSetNTHash$kODAuthenticationTypeSetPassword$kODAuthenticationTypeSetPasswordAsCurrent$kODAuthenticationTypeSetPolicy$kODAuthenticationTypeSetPolicyAsCurrent$kODAuthenticationTypeSetUserData$kODAuthenticationTypeSetUserName$kODAuthenticationTypeSetWorkstationPassword$kODAuthenticationTypeWithAuthorizationRef$kODAuthenticationTypeWriteSecureHash$kODErrorDomainFramework@^{__CFString=}$kODModuleConfigOptionConnectionIdleDisconnect@^{__CFString=}$kODModuleConfigOptionConnectionSetupTimeout@^{__CFString=}$kODModuleConfigOptionManInTheMiddle@^{__CFString=}$kODModuleConfigOptionPacketEncryption@^{__CFString=}$kODModuleConfigOptionPacketSigning@^{__CFString=}$kODModuleConfigOptionQueryTimeout@^{__CFString=}$kODNodeOptionsQuerySkippedSubnode@^{__CFString=}$kODPolicyAttributeCreationTime$kODPolicyAttributeCurrentDate$kODPolicyAttributeCurrentDayOfWeek$kODPolicyAttributeCurrentTime$kODPolicyAttributeCurrentTimeOfDay$kODPolicyAttributeDaysUntilExpiration$kODPolicyAttributeEnableAtTimeOfDay$kODPolicyAttributeEnableOnDate$kODPolicyAttributeEnableOnDayOfWeek$kODPolicyAttributeExpiresAtTimeOfDay$kODPolicyAttributeExpiresEveryNDays$kODPolicyAttributeExpiresOnDate$kODPolicyAttributeExpiresOnDayOfWeek$kODPolicyAttributeFailedAuthentications$kODPolicyAttributeLastAuthenticationTime$kODPolicyAttributeLastFailedAuthenticationTime$kODPolicyAttributeLastPasswordChangeTime$kODPolicyAttributeMaximumFailedAuthentications$kODPolicyAttributeNewPasswordRequiredTime$kODPolicyAttributePassword$kODPolicyAttributePasswordHashes$kODPolicyAttributePasswordHistory$kODPolicyAttributePasswordHistoryDepth$kODPolicyAttributeRecordName$kODPolicyAttributeRecordType$kODPolicyCategoryAuthentication$kODPolicyCategoryPasswordChange$kODPolicyCategoryPasswordContent$kODPolicyKeyContent$kODPolicyKeyContentDescription$kODPolicyKeyEvaluationDetails$kODPolicyKeyIdentifier$kODPolicyKeyParameters$kODPolicyKeyPolicySatisfied$kODPolicyTypeAccountExpiresOnDate@^{__CFString=}$kODPolicyTypeAccountMaximumFailedLogins@^{__CFString=}$kODPolicyTypeAccountMaximumMinutesOfNonUse@^{__CFString=}$kODPolicyTypeAccountMaximumMinutesUntilDisabled@^{__CFString=}$kODPolicyTypeAccountMinutesUntilFailedLoginReset@^{__CFString=}$kODPolicyTypePasswordCannotBeAccountName@^{__CFString=}$kODPolicyTypePasswordChangeRequired@^{__CFString=}$kODPolicyTypePasswordHistory@^{__CFString=}$kODPolicyTypePasswordMaximumAgeInMinutes@^{__CFString=}$kODPolicyTypePasswordMaximumNumberOfCharacters@^{__CFString=}$kODPolicyTypePasswordMinimumNumberOfCharacters@^{__CFString=}$kODPolicyTypePasswordRequiresAlpha@^{__CFString=}$kODPolicyTypePasswordRequiresMixedCase@^{__CFString=}$kODPolicyTypePasswordRequiresNumeric@^{__CFString=}$kODPolicyTypePasswordRequiresSymbol@^{__CFString=}$kODPolicyTypePasswordSelfModification@^{__CFString=}$kODRecordTypeAFPServer$kODRecordTypeAliases$kODRecordTypeAttributeTypes$kODRecordTypeAugments$kODRecordTypeAutoServerSetup$kODRecordTypeAutomount$kODRecordTypeAutomountMap$kODRecordTypeBootp$kODRecordTypeCertificateAuthorities$kODRecordTypeComputerGroups$kODRecordTypeComputerLists$kODRecordTypeComputers$kODRecordTypeConfiguration$kODRecordTypeEthernets$kODRecordTypeFTPServer$kODRecordTypeFileMakerServers$kODRecordTypeGroups$kODRecordTypeHostServices$kODRecordTypeHosts$kODRecordTypeLDAPServer$kODRecordTypeLocations$kODRecordTypeMounts$kODRecordTypeNFS$kODRecordTypeNetDomains$kODRecordTypeNetGroups$kODRecordTypeNetworks$kODRecordTypePeople$kODRecordTypePresetComputerGroups$kODRecordTypePresetComputerLists$kODRecordTypePresetComputers$kODRecordTypePresetGroups$kODRecordTypePresetUsers$kODRecordTypePrintService$kODRecordTypePrintServiceUser$kODRecordTypePrinters$kODRecordTypeProtocols$kODRecordTypeQTSServer$kODRecordTypeQueryInformation$kODRecordTypeRPC$kODRecordTypeRecordTypes$kODRecordTypeResources$kODRecordTypeSMBServer$kODRecordTypeServer$kODRecordTypeServices$kODRecordTypeSharePoints$kODRecordTypeUsers$kODRecordTypeWebServer$kODSessionDefault@^{__ODSession=}$kODSessionProxyAddress@^{__CFString=}$kODSessionProxyPassword@^{__CFString=}$kODSessionProxyPort@^{__CFString=}$kODSessionProxyUsername@^{__CFString=}$''' -enums = '''$kODErrorCredentialsAccountDisabled@5301$kODErrorCredentialsAccountExpired@5302$kODErrorCredentialsAccountInactive@5303$kODErrorCredentialsAccountNotFound@5300$kODErrorCredentialsContactMaster@5204$kODErrorCredentialsInvalid@5000$kODErrorCredentialsInvalidComputer@5501$kODErrorCredentialsInvalidLogonHours@5500$kODErrorCredentialsMethodNotSupported@5100$kODErrorCredentialsNotAuthorized@5101$kODErrorCredentialsOperationFailed@5103$kODErrorCredentialsParameterError@5102$kODErrorCredentialsPasswordChangeRequired@5401$kODErrorCredentialsPasswordChangeTooSoon@5407$kODErrorCredentialsPasswordExpired@5400$kODErrorCredentialsPasswordNeedsDigit@5406$kODErrorCredentialsPasswordNeedsLetter@5405$kODErrorCredentialsPasswordQualityFailed@5402$kODErrorCredentialsPasswordTooLong@5404$kODErrorCredentialsPasswordTooShort@5403$kODErrorCredentialsPasswordUnrecoverable@5408$kODErrorCredentialsServerCommunicationError@5205$kODErrorCredentialsServerError@5202$kODErrorCredentialsServerNotFound@5201$kODErrorCredentialsServerTimeout@5203$kODErrorCredentialsServerUnreachable@5200$kODErrorDaemonError@10002$kODErrorNodeConnectionFailed@2100$kODErrorNodeDisabled@2002$kODErrorNodeUnknownHost@2200$kODErrorNodeUnknownName@2000$kODErrorNodeUnknownType@2001$kODErrorPluginError@10001$kODErrorPluginOperationNotSupported@10000$kODErrorPluginOperationTimeout@10003$kODErrorPolicyOutOfRange@6001$kODErrorPolicyUnsupported@6000$kODErrorQueryInvalidMatchType@3100$kODErrorQuerySynchronize@3000$kODErrorQueryTimeout@3102$kODErrorQueryUnsupportedMatchType@3101$kODErrorRecordAlreadyExists@4102$kODErrorRecordAttributeNotFound@4201$kODErrorRecordAttributeUnknownType@4200$kODErrorRecordAttributeValueNotFound@4203$kODErrorRecordAttributeValueSchemaError@4202$kODErrorRecordInvalidType@4101$kODErrorRecordNoLongerExists@4104$kODErrorRecordParameterError@4100$kODErrorRecordPermissionError@4001$kODErrorRecordReadOnlyNode@4000$kODErrorRecordTypeDisabled@4103$kODErrorSessionDaemonNotRunning@1002$kODErrorSessionDaemonRefused@1003$kODErrorSessionLocalOnlyDaemonInUse@1000$kODErrorSessionNormalDaemonInUse@1001$kODErrorSessionProxyCommunicationError@1100$kODErrorSessionProxyIPUnreachable@1102$kODErrorSessionProxyUnknownHost@1103$kODErrorSessionProxyVersionMismatch@1101$kODErrorSuccess@0$kODExpirationTimeExpired@0$kODExpirationTimeNeverExpires@-1$kODMatchAny@1$kODMatchBeginsWith@8194$kODMatchContains@8196$kODMatchEndsWith@8195$kODMatchEqualTo@8193$kODMatchGreaterThan@8198$kODMatchInsensitiveBeginsWith@8450$kODMatchInsensitiveContains@8452$kODMatchInsensitiveEndsWith@8451$kODMatchInsensitiveEqualTo@8449$kODMatchLessThan@8199$kODNodeTypeAuthentication@8705$kODNodeTypeConfigure@8706$kODNodeTypeContacts@8708$kODNodeTypeLocalNodes@8704$kODNodeTypeNetwork@8709$''' -misc.update({}) -functions={'ODNodeCopySubnodeNames': (b'^{__CFArray=}^{_ODNode=}^^{__CFError}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'ODRecordVerifyPasswordExtended': (b'B^{_ODRecord=}@^{__CFArray=}^^{__CFArray}^^{_ODContext}^^{__CFError}', '', {'arguments': {3: {'type_modifier': 'o'}, 4: {'type_modifier': 'o'}, 5: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'ODNodeCustomFunction': (b'@^{__ODNode=}^{__CFString=}@^^{__CFError=}', '', {'arguments': {3: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODRecordSetPolicy': (b'B^{__ODRecord=}^{__CFString=}@^^{__CFError=}', '', {'arguments': {3: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODNodeCreateCopy': (b'^{_ODNode=}^{__CFAllocator=}^{_ODNode=}^^{__CFError}', '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'ODNodeGetTypeID': (sel32or64(b'L', b'Q'),), 'ODNodeCustomCall': (sel32or64(b'^{__CFData=}^{_ODNode=}l^{__CFData=}^^{__CFError}', b'^{__CFData=}^{_ODNode=}q^{__CFData=}^^{__CFError}'), '', {'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'ODRecordDelete': (b'B^{_ODRecord=}^^{__CFError}', '', {'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'ODRecordSetNodeCredentials': (b'B^{_ODRecord=}^{__CFString=}^{__CFString=}^^{__CFError}', '', {'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'ODNodeCopySupportedPolicies': (b'^{__CFDictionary=}^{__ODNode=}^^{__CFError=}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODRecordCopySupportedPolicies': (b'^{__CFDictionary=}^{__ODRecord=}^^{__CFError=}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODRecordGetRecordName': (b'^{__CFString=}^{_ODRecord=}',), 'ODNodeRemovePolicy': (b'B^{__ODNode=}^{__CFString=}^^{__CFError=}', '', {'arguments': {2: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODNodeCopyUnreachableSubnodeNames': (b'^{__CFArray=}^{_ODNode=}^^{__CFError}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'ODRecordSecondsUntilAuthenticationsExpire': (b'q^{__ODRecord=}',), 'ODQuerySetCallback': (b'v^{_ODQuery=}^?^v', '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__ODQuery=}'}, 1: {'type': b'@'}, 2: {'type': b'^{__CFError=}'}, 3: {'type': b'^v'}}}}}}), 'ODNodeSetCredentialsExtended': (b'B^{_ODNode=}@@^{__CFArray=}^^{__CFArray}^^{_ODContext}^^{__CFError}', '', {'arguments': {6: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'ODRecordPasswordChangeAllowed': (b'B^{__ODRecord=}^{__CFString=}^^{__CFError=}', '', {'arguments': {2: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODRecordSetPolicies': (b'B^{__ODRecord=}^{__CFDictionary=}^^{__CFError=}', '', {'arguments': {2: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODNodePasswordContentCheck': (b'B^{__ODNode=}^{__CFString=}^{__CFString=}^^{__CFError=}', '', {'arguments': {3: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODQuerySynchronize': (b'v^{_ODQuery=}',), 'ODRecordSecondsUntilPasswordExpires': (b'q^{__ODRecord=}',), 'ODSessionCopyNodeNames': (b'^{__CFArray=}^{__CFAllocator=}^{_ODSessionRef=}^^{__CFError}', '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'ODNodeCopySupportedAttributes': (b'^{__CFArray=}^{_ODNode=}@^^{__CFError}', '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'ODRecordGetRecordType': (b'^{__CFString=}^{_ODRecord=}',), 'ODRecordAddMember': (b'B^{_ODRecord=}^{_ODRecord=}^^{__CFError}', '', {'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'ODRecordCopyAccountPolicies': (b'^{__CFDictionary=}^{__ODRecord=}^^{__CFError=}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODNodeCopyRecord': (b'^{_ODRecord=}^{_ODNode=}@^{__CFString=}@^^{__CFError}', '', {'retval': {'already_cfretained': True}, 'arguments': {4: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'ODQueryScheduleWithRunLoop': (b'v^{_ODQuery=}^{__CFRunLoop=}^{__CFString=}',), 'ODNodeGetName': (b'^{__CFString=}^{_ODNode=}',), 'ODSessionCreate': (b'^{_ODSessionRef=}^{__CFAllocator=}^{__CFDictionary=}^^{__CFError}', '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'ODRecordContainsMember': (b'B^{_ODRecord=}^{_ODRecord=}^^{__CFError}', '', {'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'ODRecordAddAccountPolicy': (b'B^{__ODRecord=}^{__CFDictionary=}@^^{__CFError=}', '', {'arguments': {3: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODRecordRemovePolicy': (b'B^{__ODRecord=}^{__CFString=}^^{__CFError=}', '', {'arguments': {2: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODQueryCreateWithNodeType': (sel32or64(b'^{_ODQuery=}^{__CFAllocator=}I@@I@@l^^{__CFError}', b'^{_ODQuery=}^{__CFAllocator=}I@@I@@q^^{__CFError}'), '', {'retval': {'already_cfretained': True}, 'arguments': {8: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'ODNodeCopySupportedRecordTypes': (b'^{__CFArray=}^{_ODNode=}^^{__CFError}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'ODRecordWillPasswordExpire': (b'B^{__ODRecord=}Q',), 'ODQuerySetDispatchQueue': (b'v^{_ODQuery=}^{dispatch_queue_s=}',), 'ODRecordVerifyPassword': (b'B^{_ODRecord=}^{__CFString=}^^{__CFError}', '', {'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'ODNodeCopyDetails': (b'^{__CFDictionary=}^{_ODNode=}^{__CFArray=}^^{__CFError}', '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'ODQueryCreateWithNode': (sel32or64(b'^{_ODQuery=}^{__CFAllocator=}^{_ODNode=}@@I@@l^^{__CFError}', b'^{_ODQuery=}^{__CFAllocator=}^{_ODNode=}@@I@@q^^{__CFError}'), '', {'retval': {'already_cfretained': True}, 'arguments': {8: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'ODContextGetTypeID': (sel32or64(b'L', b'Q'),), 'ODNodeRemoveAccountPolicy': (b'B^{__ODNode=}^{__CFDictionary=}@^^{__CFError=}', '', {'arguments': {3: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODNodeCopyAccountPolicies': (b'^{__CFDictionary=}^{__ODNode=}^^{__CFError=}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODRecordCopyPasswordPolicy': (b'^{__CFDictionary=}^{__CFAllocator=}^{_ODRecord=}^^{__CFError}', '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'ODNodeSetCredentials': (b'B^{_ODNode=}@^{__CFString=}^{__CFString=}^^{__CFError}', '', {'arguments': {4: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'ODNodeCreateRecord': (b'^{_ODRecord=}^{_ODNode=}@^{__CFString=}^{__CFDictionary=}^^{__CFError}', '', {'retval': {'already_cfretained': True}, 'arguments': {4: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'ODRecordGetTypeID': (sel32or64(b'L', b'Q'),), 'ODNodeSetAccountPolicies': (b'B^{__ODNode=}^{__CFDictionary=}^^{__CFError=}', '', {'arguments': {2: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODRecordCopyValues': (b'^{__CFArray=}^{_ODRecord=}@^^{__CFError}', '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'ODRecordSetValue': (b'B^{_ODRecord=}@@^^{__CFError}', '', {'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'ODNodeCopyPolicies': (b'^{__CFDictionary=}^{__ODNode=}^^{__CFError=}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODNodeSetPolicy': (b'B^{__ODNode=}^{__CFString=}@^^{__CFError=}', '', {'arguments': {3: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODRecordSetNodeCredentialsExtended': (b'B^{_ODRecord=}@@^{__CFArray=}^^{__CFArray}^^{_ODContext}^^{__CFError}', '', {'arguments': {4: {'type_modifier': 'o'}, 5: {'type_modifier': 'o'}, 6: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'ODQueryGetTypeID': (sel32or64(b'L', b'Q'),), 'ODRecordSetNodeCredentialsUsingKerberosCache': (b'B^{_ODRecord=}^{__CFString=}^^{__CFError}', '', {'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'ODNodeCreateWithName': (b'^{_ODNode=}^{__CFAllocator=}^{_ODSessionRef=}^{__CFString=}^^{__CFError}', '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'ODNodeSetPolicies': (b'B^{__ODNode=}^{__CFDictionary=}^^{__CFError=}', '', {'arguments': {2: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODRecordRemoveAccountPolicy': (b'B^{__ODRecord=}^{__CFDictionary=}@^^{__CFError=}', '', {'arguments': {3: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODRecordRemoveValue': (b'B^{_ODRecord=}@@^^{__CFError}', '', {'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'ODSessionGetTypeID': (sel32or64(b'L', b'Q'),), 'ODRecordCopyDetails': (b'^{__CFDictionary=}^{_ODRecord=}^{__CFArray=}^^{__CFError}', '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'ODNodeSetCredentialsUsingKerberosCache': (b'B^{_ODNode=}^{__CFString=}^^{__CFError}', '', {'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'ODRecordChangePassword': (b'B^{_ODRecord=}^{__CFString=}^{__CFString=}^^{__CFError}', '', {'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'ODQueryCopyResults': (b'^{__CFArray=}^{_ODQuery=}B^^{__CFError}', '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'ODRecordSynchronize': (b'B^{_ODRecord=}^^{__CFError}', '', {'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'ODRecordRemoveMember': (b'B^{_ODRecord=}^{_ODRecord=}^^{__CFError}', '', {'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'ODNodeCreateWithNodeType': (b'^{_ODNode=}^{__CFAllocator=}^{_ODSessionRef=}I^^{__CFError}', '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'ODRecordWillAuthenticationsExpire': (b'B^{__ODRecord=}Q',), 'ODRecordCopyEffectivePolicies': (b'^{__CFDictionary=}^{__ODRecord=}^^{__CFError=}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODRecordCopyPolicies': (b'^{__CFDictionary=}^{__ODRecord=}^^{__CFError=}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODQueryUnscheduleFromRunLoop': (b'v^{_ODQuery=}^{__CFRunLoop=}^{__CFString=}',), 'ODRecordSetAccountPolicies': (b'B^{__ODRecord=}^{__CFDictionary=}^^{__CFError=}', '', {'arguments': {2: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODRecordAddValue': (b'B^{_ODRecord=}@@^^{__CFError}', '', {'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'ODRecordAuthenticationAllowed': (b'B^{__ODRecord=}^^{__CFError=}', '', {'arguments': {1: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODNodeAddAccountPolicy': (b'B^{__ODNode=}^{__CFDictionary=}@^^{__CFError=}', '', {'arguments': {3: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}})} -cftypes=[('ODContextRef', b'^{__ODContext=}', 'ODContextGetTypeID', None), ('ODNodeRef', b'^{__ODNode=}', 'ODNodeGetTypeID', None), ('ODQueryRef', b'^{__ODQuery=}', 'ODQueryGetTypeID', None), ('ODRecordRef', b'^{__ODRecord=}', 'ODRecordGetTypeID', None), ('ODSessionRef', b'^{__ODSession=}', 'ODSessionGetTypeID', None)] -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/CFOpenDirectory/_metadata.pyc b/env/lib/python2.7/site-packages/CFOpenDirectory/_metadata.pyc deleted file mode 100644 index e932a939..00000000 Binary files a/env/lib/python2.7/site-packages/CFOpenDirectory/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CalendarStore/__init__.py b/env/lib/python2.7/site-packages/CalendarStore/__init__.py deleted file mode 100644 index 7216d4e5..00000000 --- a/env/lib/python2.7/site-packages/CalendarStore/__init__.py +++ /dev/null @@ -1,25 +0,0 @@ -''' -Python mapping for the CalendarStore framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import sys -import objc -import Foundation - -from CalendarStore import _metadata - -sys.modules['CalendarStore'] = objc.ObjCLazyModule( - "CalendarStore", "com.apple.CalendarStore", - objc.pathForFramework("/System/Library/Frameworks/CalendarStore.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - '__path__': __path__, - 'objc': objc, - '__loader__': globals().get('__loader__', None), - }, (Foundation,)) - -import sys -del sys.modules['CalendarStore._metadata'] diff --git a/env/lib/python2.7/site-packages/CalendarStore/__init__.pyc b/env/lib/python2.7/site-packages/CalendarStore/__init__.pyc deleted file mode 100644 index e0c184e0..00000000 Binary files a/env/lib/python2.7/site-packages/CalendarStore/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CalendarStore/_metadata.py b/env/lib/python2.7/site-packages/CalendarStore/_metadata.py deleted file mode 100644 index 19ab9fec..00000000 --- a/env/lib/python2.7/site-packages/CalendarStore/_metadata.py +++ /dev/null @@ -1,43 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Sat Aug 16 17:58:55 2014 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$CalAlarmActionDisplay$CalAlarmActionEmail$CalAlarmActionProcedure$CalAlarmActionSound$CalAttendeeStatusAccepted$CalAttendeeStatusDeclined$CalAttendeeStatusNeedsAction$CalAttendeeStatusTentative$CalCalendarStoreErrorDomain$CalCalendarTypeBirthday$CalCalendarTypeCalDAV$CalCalendarTypeExchange$CalCalendarTypeIMAP$CalCalendarTypeLocal$CalCalendarTypeSubscription$CalCalendarsChangedExternallyNotification$CalCalendarsChangedNotification$CalDeletedRecordsKey$CalEventsChangedExternallyNotification$CalEventsChangedNotification$CalInsertedRecordsKey$CalSenderProcessIDKey$CalTasksChangedExternallyNotification$CalTasksChangedNotification$CalUpdatedRecordsKey$CalUserUIDKey$''' -constants = constants + '$CalDefaultRecurrenceInterval@%s$'%(sel32or64('I', 'Q'),) -enums = '''$CalCalendarNotEditableError@1025$CalCalendarNotInRepository@1027$CalCalendarTitleNotUniqueError@1028$CalDateInvalidError@1026$CalPriorityHigh@1$CalPriorityLow@9$CalPriorityMedium@5$CalPriorityNone@0$CalRecurrenceDaily@0$CalRecurrenceMonthly@2$CalRecurrenceWeekly@1$CalRecurrenceYearly@3$CalSpanAllEvents@2$CalSpanFutureEvents@1$CalSpanThisEvent@0$''' -misc.update({}) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'CalCalendar', b'isEditable', {'retval': {'type': 'Z'}}) - r(b'CalCalendarItem', b'hasAlarm', {'retval': {'type': 'Z'}}) - r(b'CalCalendarStore', b'removeCalendar:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'CalCalendarStore', b'removeEvent:span:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'CalCalendarStore', b'removeTask:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'CalCalendarStore', b'saveCalendar:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'CalCalendarStore', b'saveEvent:span:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'CalCalendarStore', b'saveTask:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'CalEvent', b'isAllDay', {'retval': {'type': 'Z'}}) - r(b'CalEvent', b'isDetached', {'retval': {'type': 'Z'}}) - r(b'CalEvent', b'setIsAllDay:', {'arguments': {2: {'type': 'Z'}}}) - r(b'CalRecurrenceEnd', b'usesEndDate', {'retval': {'type': 'Z'}}) - r(b'CalTask', b'isCompleted', {'retval': {'type': 'Z'}}) - r(b'CalTask', b'setIsCompleted:', {'arguments': {2: {'type': 'Z'}}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/CalendarStore/_metadata.pyc b/env/lib/python2.7/site-packages/CalendarStore/_metadata.pyc deleted file mode 100644 index c0575be9..00000000 Binary files a/env/lib/python2.7/site-packages/CalendarStore/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CloudKit/__init__.py b/env/lib/python2.7/site-packages/CloudKit/__init__.py deleted file mode 100644 index f19673bb..00000000 --- a/env/lib/python2.7/site-packages/CloudKit/__init__.py +++ /dev/null @@ -1,29 +0,0 @@ -''' -Python mapping for the CloudKit framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Accounts -import Foundation -import CoreData -import CoreLocation - -from CloudKit import _metadata - -sys.modules['CloudKit'] = mod = objc.ObjCLazyModule( - "CloudKit", - "com.apple.CloudKit", - objc.pathForFramework("/System/Library/Frameworks/CloudKit.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (CoreData, CoreLocation, Accounts, Foundation)) - -import sys -del sys.modules['CloudKit._metadata'] diff --git a/env/lib/python2.7/site-packages/CloudKit/__init__.pyc b/env/lib/python2.7/site-packages/CloudKit/__init__.pyc deleted file mode 100644 index de600ced..00000000 Binary files a/env/lib/python2.7/site-packages/CloudKit/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CloudKit/_metadata.py b/env/lib/python2.7/site-packages/CloudKit/_metadata.py deleted file mode 100644 index a8d3ba89..00000000 --- a/env/lib/python2.7/site-packages/CloudKit/_metadata.py +++ /dev/null @@ -1,176 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Tue Jun 5 21:07:07 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$CKAccountChangedNotification$CKCurrentUserDefaultName$CKErrorDomain$CKErrorRetryAfterKey$CKOwnerDefaultName$CKPartialErrorsByItemIDKey$CKQueryOperationMaximumResults@Q$CKRecordChangedErrorAncestorRecordKey$CKRecordChangedErrorClientRecordKey$CKRecordChangedErrorServerRecordKey$CKRecordParentKey$CKRecordShareKey$CKRecordTypeShare$CKRecordTypeUserRecord$CKRecordZoneDefaultName$CKShareThumbnailImageDataKey$CKShareTitleKey$CKShareTypeKey$''' -enums = '''$CKAccountStatusAvailable@1$CKAccountStatusCouldNotDetermine@0$CKAccountStatusNoAccount@3$CKAccountStatusRestricted@2$CKApplicationPermissionStatusCouldNotComplete@1$CKApplicationPermissionStatusDenied@2$CKApplicationPermissionStatusGranted@3$CKApplicationPermissionStatusInitialState@0$CKApplicationPermissionUserDiscoverability@1$CKDatabaseScopePrivate@2$CKDatabaseScopePublic@1$CKDatabaseScopeShared@3$CKErrorAlreadyShared@30$CKErrorAssetFileModified@17$CKErrorAssetFileNotFound@16$CKErrorAssetNotAvailable@35$CKErrorBadContainer@5$CKErrorBadDatabase@24$CKErrorBatchRequestFailed@22$CKErrorChangeTokenExpired@21$CKErrorConstraintViolation@19$CKErrorIncompatibleVersion@18$CKErrorInternalError@1$CKErrorInvalidArguments@12$CKErrorLimitExceeded@27$CKErrorManagedAccountRestricted@32$CKErrorMissingEntitlement@8$CKErrorNetworkFailure@4$CKErrorNetworkUnavailable@3$CKErrorNotAuthenticated@9$CKErrorOperationCancelled@20$CKErrorPartialFailure@2$CKErrorParticipantMayNeedVerification@33$CKErrorPermissionFailure@10$CKErrorQuotaExceeded@25$CKErrorReferenceViolation@31$CKErrorRequestRateLimited@7$CKErrorResultsTruncated@13$CKErrorServerRecordChanged@14$CKErrorServerRejectedRequest@15$CKErrorServerResponseLost@34$CKErrorServiceUnavailable@6$CKErrorTooManyParticipants@29$CKErrorUnknownItem@11$CKErrorUserDeletedZone@28$CKErrorZoneBusy@23$CKErrorZoneNotFound@26$CKNotificationTypeDatabase@4$CKNotificationTypeQuery@1$CKNotificationTypeReadNotification@3$CKNotificationTypeRecordZone@2$CKOperationGroupTransferSizeGigabytes@5$CKOperationGroupTransferSizeHundredsOfGigabytes@7$CKOperationGroupTransferSizeHundredsOfMegabytes@4$CKOperationGroupTransferSizeKilobytes@1$CKOperationGroupTransferSizeMegabytes@2$CKOperationGroupTransferSizeTensOfGigabytes@6$CKOperationGroupTransferSizeTensOfMegabytes@3$CKOperationGroupTransferSizeUnknown@0$CKQueryNotificationReasonRecordCreated@1$CKQueryNotificationReasonRecordDeleted@3$CKQueryNotificationReasonRecordUpdated@2$CKQuerySubscriptionOptionsFiresOnRecordCreation@1$CKQuerySubscriptionOptionsFiresOnRecordDeletion@4$CKQuerySubscriptionOptionsFiresOnRecordUpdate@2$CKQuerySubscriptionOptionsFiresOnce@8$CKRecordSaveAllKeys@2$CKRecordSaveChangedKeys@1$CKRecordSaveIfServerRecordUnchanged@0$CKRecordZoneCapabilityAtomic@2$CKRecordZoneCapabilityFetchChanges@1$CKRecordZoneCapabilitySharing@4$CKReferenceActionDeleteSelf@1$CKReferenceActionNone@0$CKShareParticipantAcceptanceStatusAccepted@2$CKShareParticipantAcceptanceStatusPending@1$CKShareParticipantAcceptanceStatusRemoved@3$CKShareParticipantAcceptanceStatusUnknown@0$CKShareParticipantPermissionNone@1$CKShareParticipantPermissionReadOnly@2$CKShareParticipantPermissionReadWrite@3$CKShareParticipantPermissionUnknown@0$CKShareParticipantRoleOwner@1$CKShareParticipantRolePrivateUser@3$CKShareParticipantRolePublicUser@4$CKShareParticipantRoleUnknown@0$CKShareParticipantTypeOwner@1$CKShareParticipantTypePrivateUser@3$CKShareParticipantTypePublicUser@4$CKShareParticipantTypeUnknown@0$CKSubscriptionOptionsFiresOnRecordCreation@1$CKSubscriptionOptionsFiresOnRecordDeletion@4$CKSubscriptionOptionsFiresOnRecordUpdate@2$CKSubscriptionOptionsFiresOnce@8$CKSubscriptionTypeDatabase@3$CKSubscriptionTypeQuery@1$CKSubscriptionTypeRecordZone@2$''' -misc.update({}) -aliases = {'CK_UNIT_TESTS_EXTERN': 'CK_EXTERN'} -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'CKAcceptSharesOperation', b'acceptSharesCompletionBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}) - r(b'CKAcceptSharesOperation', b'perShareCompletionBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}}}}}) - r(b'CKAcceptSharesOperation', b'setAcceptSharesCompletionBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'CKAcceptSharesOperation', b'setPerShareCompletionBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}}}}}}) - r(b'CKContainer', b'acceptShareMetadata:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CKContainer', b'accountStatusWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'q'}, 2: {'type': b'@'}}}}}}) - r(b'CKContainer', b'discoverAllContactUserInfosWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CKContainer', b'discoverUserInfoWithEmailAddress:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CKContainer', b'discoverUserInfoWithUserRecordID:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CKContainer', b'fetchAllLongLivedOperationIDsWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}}}}}}) - r(b'CKContainer', b'fetchLongLivedOperationWithID:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CKContainer', b'fetchShareMetadataWithURL:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CKContainer', b'fetchShareParticipantWithEmailAddress:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CKContainer', b'fetchShareParticipantWithPhoneNumber:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CKContainer', b'fetchShareParticipantWithUserRecordID:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CKContainer', b'fetchUserRecordIDWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CKContainer', b'requestApplicationPermission:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'q'}, 2: {'type': b'@'}}}}}}) - r(b'CKContainer', b'statusForApplicationPermission:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'q'}, 2: {'type': b'@'}}}}}}) - r(b'CKDatabase', b'deleteRecordWithID:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CKDatabase', b'deleteRecordZoneWithID:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CKDatabase', b'deleteSubscriptionWithID:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CKDatabase', b'fetchAllRecordZonesWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CKDatabase', b'fetchAllSubscriptionsWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CKDatabase', b'fetchRecordWithID:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CKDatabase', b'fetchRecordZoneWithID:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CKDatabase', b'fetchSubscriptionWithID:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CKDatabase', b'performQuery:inZoneWithID:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CKDatabase', b'saveRecord:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CKDatabase', b'saveRecordZone:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CKDatabase', b'saveSubscription:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CKDiscoverAllContactsOperation', b'discoverAllContactsCompletionBlock', {'retval': {'callable': {'retval': {'type': b'@'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}) - r(b'CKDiscoverAllContactsOperation', b'setDiscoverAllContactsCompletionBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'@'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'CKDiscoverAllUserIdentitiesOperation', b'discoverAllUserIdentitiesCompletionBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}) - r(b'CKDiscoverAllUserIdentitiesOperation', b'setDiscoverAllUserIdentitiesCompletionBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'CKDiscoverAllUserIdentitiesOperation', b'setUserIdentityDiscoveredBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'CKDiscoverAllUserIdentitiesOperation', b'userIdentityDiscoveredBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}) - r(b'CKDiscoverUserIdentitiesOperation', b'discoverUserIdentitiesCompletionBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}) - r(b'CKDiscoverUserIdentitiesOperation', b'setDiscoverUserIdentitiesCompletionBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'CKDiscoverUserIdentitiesOperation', b'setUserIdentityDiscoveredBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CKDiscoverUserIdentitiesOperation', b'userIdentityDiscoveredBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}) - r(b'CKDiscoverUserInfosOperation', b'discoverUserInfosCompletionBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}}}}}) - r(b'CKDiscoverUserInfosOperation', b'setDiscoverUserInfosCompletionBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}}}}}}) - r(b'CKFetchDatabaseChangesOperation', b'changeTokenUpdatedBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}) - r(b'CKFetchDatabaseChangesOperation', b'fetchAllChanges', {'retval': {'type': 'Z'}}) - r(b'CKFetchDatabaseChangesOperation', b'fetchDatabaseChangesCompletionBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'Z'}, 3: {'type': b'@'}}}}}) - r(b'CKFetchDatabaseChangesOperation', b'recordZoneWithIDChangedBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}) - r(b'CKFetchDatabaseChangesOperation', b'recordZoneWithIDWasDeletedBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}) - r(b'CKFetchDatabaseChangesOperation', b'recordZoneWithIDWasPurgedBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}) - r(b'CKFetchDatabaseChangesOperation', b'setChangeTokenUpdatedBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'CKFetchDatabaseChangesOperation', b'setFetchAllChanges:', {'arguments': {2: {'type': 'Z'}}}) - r(b'CKFetchDatabaseChangesOperation', b'setFetchDatabaseChangesCompletionBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'Z'}, 3: {'type': b'@'}}}}}}) - r(b'CKFetchDatabaseChangesOperation', b'setRecordZoneWithIDChangedBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'CKFetchDatabaseChangesOperation', b'setRecordZoneWithIDWasDeletedBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'CKFetchDatabaseChangesOperation', b'setRecordZoneWithIDWasPurgedBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'CKFetchNotificationChangesOperation', b'fetchNotificationChangesCompletionBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}) - r(b'CKFetchNotificationChangesOperation', b'moreComing', {'retval': {'type': b'Z'}}) - r(b'CKFetchNotificationChangesOperation', b'notificationChangedBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}) - r(b'CKFetchNotificationChangesOperation', b'setFetchNotificationChangesCompletionBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CKFetchNotificationChangesOperation', b'setNotificationChangedBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'CKFetchRecordChangesOperation', b'fetchAllChanges', {'retval': {'type': 'Z'}}) - r(b'CKFetchRecordChangesOperation', b'fetchRecordChangesCompletionBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}}}}}) - r(b'CKFetchRecordChangesOperation', b'moreComing', {'retval': {'type': b'Z'}}) - r(b'CKFetchRecordChangesOperation', b'perRecordProgressBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'd'}}}}}) - r(b'CKFetchRecordChangesOperation', b'recordChangedBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}) - r(b'CKFetchRecordChangesOperation', b'recordWithIDWasDeletedBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}) - r(b'CKFetchRecordChangesOperation', b'serverChangeTokenFetchedBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}) - r(b'CKFetchRecordChangesOperation', b'setFetchAllChanges:', {'arguments': {2: {'type': 'Z'}}}) - r(b'CKFetchRecordChangesOperation', b'setFetchRecordChangesCompletionBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}}}}}}) - r(b'CKFetchRecordChangesOperation', b'setPerRecordProgressBlock:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'd'}}}}}}) - r(b'CKFetchRecordChangesOperation', b'setRecordChangedBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'CKFetchRecordChangesOperation', b'setRecordWithIDWasDeletedBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'CKFetchRecordChangesOperation', b'setServerChangeTokenFetchedBlock:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'CKFetchRecordZoneChangesOperation', b'fetchAllChanges', {'retval': {'type': 'Z'}}) - r(b'CKFetchRecordZoneChangesOperation', b'fetchRecordZoneChangesCompletionBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}) - r(b'CKFetchRecordZoneChangesOperation', b'recordChangedBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}) - r(b'CKFetchRecordZoneChangesOperation', b'recordWithIDWasDeletedBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}) - r(b'CKFetchRecordZoneChangesOperation', b'recordZoneChangeTokensUpdatedBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}}}}}) - r(b'CKFetchRecordZoneChangesOperation', b'recordZoneFetchCompletionBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'Z'}, 5: {'type': b'@'}}}}}) - r(b'CKFetchRecordZoneChangesOperation', b'setFetchAllChanges:', {'arguments': {2: {'type': 'Z'}}}) - r(b'CKFetchRecordZoneChangesOperation', b'setFetchRecordZoneChangesCompletionBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'CKFetchRecordZoneChangesOperation', b'setRecordChangedBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'CKFetchRecordZoneChangesOperation', b'setRecordWithIDWasDeletedBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CKFetchRecordZoneChangesOperation', b'setRecordZoneChangeTokensUpdatedBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}}}}}}) - r(b'CKFetchRecordZoneChangesOperation', b'setRecordZoneFetchCompletionBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'Z'}, 5: {'type': b'@'}}}}}}) - r(b'CKFetchRecordZonesOperation', b'fetchRecordZonesCompletionBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}) - r(b'CKFetchRecordZonesOperation', b'setFetchRecordZonesCompletionBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CKFetchRecordsOperation', b'fetchRecordsCompletionBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}) - r(b'CKFetchRecordsOperation', b'perRecordCompletionBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}}}}}) - r(b'CKFetchRecordsOperation', b'perRecordProgressBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'd'}}}}}) - r(b'CKFetchRecordsOperation', b'setFetchRecordsCompletionBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CKFetchRecordsOperation', b'setPerRecordCompletionBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}}}}}}) - r(b'CKFetchRecordsOperation', b'setPerRecordProgressBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'd'}}}}}}) - r(b'CKFetchShareMetadataOperation', b'fetchShareMetadataCompletionBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}) - r(b'CKFetchShareMetadataOperation', b'perShareMetadataBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}}}}}) - r(b'CKFetchShareMetadataOperation', b'setFetchShareMetadataCompletionBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'CKFetchShareMetadataOperation', b'setPerShareMetadataBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}}}}}}) - r(b'CKFetchShareMetadataOperation', b'setShouldFetchRootRecord:', {'arguments': {2: {'type': 'Z'}}}) - r(b'CKFetchShareMetadataOperation', b'shouldFetchRootRecord', {'retval': {'type': 'Z'}}) - r(b'CKFetchShareParticipantsOperation', b'fetchShareParticipantsCompletionBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}) - r(b'CKFetchShareParticipantsOperation', b'setFetchShareParticipantsCompletionBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'CKFetchShareParticipantsOperation', b'setShareParticipantFetchedBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'CKFetchShareParticipantsOperation', b'shareParticipantFetchedBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}) - r(b'CKFetchSubscriptionsOperation', b'fetchSubscriptionCompletionBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}) - r(b'CKFetchSubscriptionsOperation', b'setFetchSubscriptionCompletionBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CKFetchWebAuthTokenOperation', b'fetchWebAuthTokenCompletionBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}) - r(b'CKFetchWebAuthTokenOperation', b'setFetchWebAuthTokenCompletionBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CKMarkNotificationsReadOperation', b'markNotificationsReadCompletionBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}) - r(b'CKMarkNotificationsReadOperation', b'setMarkNotificationsReadCompletionBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CKModifyBadgeOperation', b'modifyBadgeCompletionBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}) - r(b'CKModifyBadgeOperation', b'setModifyBadgeCompletionBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'CKModifyRecordZonesOperation', b'modifyRecordZonesCompletionBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}}}}}) - r(b'CKModifyRecordZonesOperation', b'setModifyRecordZonesCompletionBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}}}}}}) - r(b'CKModifyRecordsOperation', b'atomic', {'retval': {'type': b'Z'}}) - r(b'CKModifyRecordsOperation', b'modifyRecordsCompletionBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}}}}}) - r(b'CKModifyRecordsOperation', b'perRecordCompletionBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}) - r(b'CKModifyRecordsOperation', b'perRecordProgressBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'd'}}}}}) - r(b'CKModifyRecordsOperation', b'setAtomic:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CKModifyRecordsOperation', b'setModifyRecordsCompletionBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}}}}}}) - r(b'CKModifyRecordsOperation', b'setPerRecordCompletionBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'CKModifyRecordsOperation', b'setPerRecordProgressBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'd'}}}}}}) - r(b'CKModifySubscriptionsOperation', b'modifySubscriptionsCompletionBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}}}}}) - r(b'CKModifySubscriptionsOperation', b'setModifySubscriptionsCompletionBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}}}}}}) - r(b'CKNotification', b'isPruned', {'retval': {'type': b'Z'}}) - r(b'CKNotificationInfo', b'setShouldBadge:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CKNotificationInfo', b'setShouldSendContentAvailable:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CKNotificationInfo', b'setShouldSendMutableContent:', {'arguments': {2: {'type': 'Z'}}}) - r(b'CKNotificationInfo', b'shouldBadge', {'retval': {'type': b'Z'}}) - r(b'CKNotificationInfo', b'shouldSendContentAvailable', {'retval': {'type': b'Z'}}) - r(b'CKNotificationInfo', b'shouldSendMutableContent', {'retval': {'type': 'Z'}}) - r(b'CKOperation', b'allowsCellularAccess', {'retval': {'type': b'Z'}}) - r(b'CKOperation', b'isLongLived', {'retval': {'type': 'Z'}}) - r(b'CKOperation', b'longLivedOperationWasPersistedBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}) - r(b'CKOperation', b'setAllowsCellularAccess:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CKOperation', b'setLongLived:', {'arguments': {2: {'type': 'Z'}}}) - r(b'CKOperation', b'setLongLivedOperationWasPersistedBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'CKOperation', b'setUsesBackgroundSession:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CKOperation', b'usesBackgroundSession', {'retval': {'type': b'Z'}}) - r(b'CKOperationConfiguration', b'allowsCellularAccess', {'retval': {'type': 'Z'}}) - r(b'CKOperationConfiguration', b'isLongLived', {'retval': {'type': 'Z'}}) - r(b'CKOperationConfiguration', b'setAllowsCellularAccess:', {'arguments': {2: {'type': 'Z'}}}) - r(b'CKOperationConfiguration', b'setLongLived:', {'arguments': {2: {'type': 'Z'}}}) - r(b'CKQueryNotification', b'isPublicDatabase', {'retval': {'type': b'Z'}}) - r(b'CKQueryOperation', b'queryCompletionBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}) - r(b'CKQueryOperation', b'recordFetchedBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}) - r(b'CKQueryOperation', b'setQueryCompletionBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CKQueryOperation', b'setRecordFetchedBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'CKUserIdentity', b'hasiCloudAccount', {'retval': {'type': 'Z'}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/CloudKit/_metadata.pyc b/env/lib/python2.7/site-packages/CloudKit/_metadata.pyc deleted file mode 100644 index 6fb3c067..00000000 Binary files a/env/lib/python2.7/site-packages/CloudKit/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Cocoa/__init__.py b/env/lib/python2.7/site-packages/Cocoa/__init__.py deleted file mode 100644 index 2679ba2c..00000000 --- a/env/lib/python2.7/site-packages/Cocoa/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -''' -Python mapping for the Cocoa framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import sys -import objc -import Foundation -import AppKit - -mod = objc.ObjCLazyModule('Cocoa', None, None, {}, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (AppKit, Foundation)) -sys.modules['Cocoa'] = mod diff --git a/env/lib/python2.7/site-packages/Cocoa/__init__.pyc b/env/lib/python2.7/site-packages/Cocoa/__init__.pyc deleted file mode 100644 index 2cc482a8..00000000 Binary files a/env/lib/python2.7/site-packages/Cocoa/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Collaboration/__init__.py b/env/lib/python2.7/site-packages/Collaboration/__init__.py deleted file mode 100644 index 53b9fac9..00000000 --- a/env/lib/python2.7/site-packages/Collaboration/__init__.py +++ /dev/null @@ -1,25 +0,0 @@ -''' -Python mapping for the Collaboration framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import sys -import objc -import Foundation - -from Collaboration import _metadata - -sys.modules['Collaboration'] = objc.ObjCLazyModule( - "Collaboration", "com.apple.Collaboration", - objc.pathForFramework("/System/Library/Frameworks/Collaboration.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'objc': objc, - }, (Foundation,)) - -import sys -del sys.modules['Collaboration._metadata'] diff --git a/env/lib/python2.7/site-packages/Collaboration/__init__.pyc b/env/lib/python2.7/site-packages/Collaboration/__init__.pyc deleted file mode 100644 index aeefb553..00000000 Binary files a/env/lib/python2.7/site-packages/Collaboration/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Collaboration/_metadata.py b/env/lib/python2.7/site-packages/Collaboration/_metadata.py deleted file mode 100644 index cc523f65..00000000 --- a/env/lib/python2.7/site-packages/Collaboration/_metadata.py +++ /dev/null @@ -1,44 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Sun Sep 30 22:20:12 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$$''' -enums = '''$$''' -misc.update({}) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'CBIdentity', b'CSIdentity', {'retval': {'type': '^{__CSIdentity=}'}}) - r(b'CBIdentity', b'UUIDString', {'deprecated': 1011}) - r(b'CBIdentity', b'identityWithCSIdentity:', {'arguments': {2: {'type': '^{__CSIdentity=}'}}}) - r(b'CBIdentity', b'identityWithUUIDString:authority:', {'deprecated': 1011}) - r(b'CBIdentity', b'isHidden', {'retval': {'type': 'Z'}}) - r(b'CBIdentity', b'isMemberOfGroup:', {'retval': {'type': 'Z'}}) - r(b'CBIdentity', b'members', {'deprecated': 1011}) - r(b'CBIdentityAuthority', b'CSIdentityAuthority', {'retval': {'type': '^{__CSIdentityAuthority=}'}}) - r(b'CBIdentityAuthority', b'identityAuthorityWithCSIdentityAuthority:', {'arguments': {2: {'type': '^{__CSIdentityAuthority=}'}}}) - r(b'CBIdentityPicker', b'allowsMultipleSelection', {'retval': {'type': 'Z'}}) - r(b'CBIdentityPicker', b'runModalForWindow:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'I'}}}}}}) - r(b'CBIdentityPicker', b'runModalForWindow:modalDelegate:didEndSelector:contextInfo:', {'deprecated': 1011, 'arguments': {4: {'sel_of_type': sel32or64(b'v@:@i^v', b'v@:@q^v')}, 5: {'type': '^v'}}}) - r(b'CBIdentityPicker', b'setAllowsMultipleSelection:', {'arguments': {2: {'type': 'Z'}}}) - r(b'CBUserIdentity', b'authenticateWithPassword:', {'retval': {'type': 'Z'}}) - r(b'CBUserIdentity', b'certificate', {'retval': {'type': '^{OpaqueSecCertificateRef=}'}}) - r(b'CBUserIdentity', b'isEnabled', {'retval': {'type': 'Z'}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/Collaboration/_metadata.pyc b/env/lib/python2.7/site-packages/Collaboration/_metadata.pyc deleted file mode 100644 index 3acb496c..00000000 Binary files a/env/lib/python2.7/site-packages/Collaboration/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/ColorSync/__init__.py b/env/lib/python2.7/site-packages/ColorSync/__init__.py deleted file mode 100644 index fe61f9d4..00000000 --- a/env/lib/python2.7/site-packages/ColorSync/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -''' -Python mapping for the ColorSync framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import sys -import objc -import CoreFoundation - -from ColorSync import _metadata - -sys.modules['ColorSync'] = mod = objc.ObjCLazyModule('ColorSync', - "com.apple.ColorSync", - objc.pathForFramework("/System/Library/Frameworks/ColorSync.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'objc': objc, - }, ( CoreFoundation,)) - -import sys -del sys.modules['ColorSync._metadata'] diff --git a/env/lib/python2.7/site-packages/ColorSync/__init__.pyc b/env/lib/python2.7/site-packages/ColorSync/__init__.pyc deleted file mode 100644 index 2d597e34..00000000 Binary files a/env/lib/python2.7/site-packages/ColorSync/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/ColorSync/_metadata.py b/env/lib/python2.7/site-packages/ColorSync/_metadata.py deleted file mode 100644 index b0aa33b2..00000000 --- a/env/lib/python2.7/site-packages/ColorSync/_metadata.py +++ /dev/null @@ -1,27 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Tue Jul 31 21:41:45 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -misc.update({'ColorSyncMD5': objc.createStructType('ColorSyncMD5', b'{_ColorSyncMD5=[8C]}', ['digest'])}) -constants = '''$kCMMApplyTransformProcName$kCMMCreateTransformPropertyProcName$kCMMInitializeLinkProfileProcName$kCMMInitializeTransformProcName$kColorSyncACESCGLinearProfile$kColorSyncAdobeRGB1998Profile$kColorSyncBestQuality$kColorSyncBlackPointCompensation$kColorSyncCameraDeviceClass$kColorSyncConversion1DLut$kColorSyncConversion3DLut$kColorSyncConversionBPC$kColorSyncConversionChannelID$kColorSyncConversionGridPoints$kColorSyncConversionInpChan$kColorSyncConversionMatrix$kColorSyncConversionNDLut$kColorSyncConversionOutChan$kColorSyncConversionParamCurve0$kColorSyncConversionParamCurve1$kColorSyncConversionParamCurve2$kColorSyncConversionParamCurve3$kColorSyncConversionParamCurve4$kColorSyncConvertQuality$kColorSyncConvertThreadCount$kColorSyncConvertUseExtendedRange$kColorSyncConvertUseVectorUnit$kColorSyncCustomProfiles$kColorSyncDCIP3Profile$kColorSyncDeviceClass$kColorSyncDeviceDefaultProfileID$kColorSyncDeviceDescription$kColorSyncDeviceDescriptions$kColorSyncDeviceHostScope$kColorSyncDeviceID$kColorSyncDeviceModeDescription$kColorSyncDeviceModeDescriptions$kColorSyncDeviceProfileID$kColorSyncDeviceProfileIsCurrent$kColorSyncDeviceProfileIsDefault$kColorSyncDeviceProfileIsFactory$kColorSyncDeviceProfileURL$kColorSyncDeviceProfilesNotification$kColorSyncDeviceRegisteredNotification$kColorSyncDeviceUnregisteredNotification$kColorSyncDeviceUserScope$kColorSyncDisplayDeviceClass$kColorSyncDisplayDeviceProfilesNotification$kColorSyncDisplayP3Profile$kColorSyncDraftQuality$kColorSyncFactoryProfiles$kColorSyncFixedPointRange$kColorSyncGenericCMYKProfile$kColorSyncGenericGrayGamma22Profile$kColorSyncGenericGrayProfile$kColorSyncGenericLabProfile$kColorSyncGenericRGBProfile$kColorSyncGenericXYZProfile$kColorSyncITUR2020Profile$kColorSyncITUR709Profile$kColorSyncNormalQuality$kColorSyncPreferredCMM$kColorSyncPrinterDeviceClass$kColorSyncProfile$kColorSyncProfileClass$kColorSyncProfileColorSpace$kColorSyncProfileComputerDomain$kColorSyncProfileDescription$kColorSyncProfileHeader$kColorSyncProfileHostScope$kColorSyncProfileMD5Digest$kColorSyncProfilePCS$kColorSyncProfileRepositoryChangeNotification$kColorSyncProfileURL$kColorSyncProfileUserDomain$kColorSyncProfileUserScope$kColorSyncROMMRGBProfile$kColorSyncRegistrationUpdateWindowServer$kColorSyncRenderingIntent$kColorSyncRenderingIntentAbsolute$kColorSyncRenderingIntentPerceptual$kColorSyncRenderingIntentRelative$kColorSyncRenderingIntentSaturation$kColorSyncRenderingIntentUseProfileHeader$kColorSyncSRGBProfile$kColorSyncScannerDeviceClass$kColorSyncSigAToB0Tag$kColorSyncSigAToB1Tag$kColorSyncSigAToB2Tag$kColorSyncSigAbstractClass$kColorSyncSigBToA0Tag$kColorSyncSigBToA1Tag$kColorSyncSigBToA2Tag$kColorSyncSigBlueColorantTag$kColorSyncSigBlueTRCTag$kColorSyncSigCmykData$kColorSyncSigColorSpaceClass$kColorSyncSigCopyrightTag$kColorSyncSigDeviceMfgDescTag$kColorSyncSigDeviceModelDescTag$kColorSyncSigDisplayClass$kColorSyncSigGamutTag$kColorSyncSigGrayData$kColorSyncSigGrayTRCTag$kColorSyncSigGreenColorantTag$kColorSyncSigGreenTRCTag$kColorSyncSigInputClass$kColorSyncSigLabData$kColorSyncSigLinkClass$kColorSyncSigMediaBlackPointTag$kColorSyncSigMediaWhitePointTag$kColorSyncSigNamedColor2Tag$kColorSyncSigNamedColorClass$kColorSyncSigOutputClass$kColorSyncSigPreview0Tag$kColorSyncSigPreview1Tag$kColorSyncSigPreview2Tag$kColorSyncSigProfileDescriptionTag$kColorSyncSigProfileSequenceDescTag$kColorSyncSigRedColorantTag$kColorSyncSigRedTRCTag$kColorSyncSigRgbData$kColorSyncSigTechnologyTag$kColorSyncSigViewingCondDescTag$kColorSyncSigViewingConditionsTag$kColorSyncSigXYZData$kColorSyncTranformInfo$kColorSyncTransformCodeFragmentMD5$kColorSyncTransformCodeFragmentType$kColorSyncTransformCreator$kColorSyncTransformDeviceToDevice$kColorSyncTransformDeviceToPCS$kColorSyncTransformDstSpace$kColorSyncTransformFullConversionData$kColorSyncTransformGamutCheck$kColorSyncTransformInfo$kColorSyncTransformPCSToDevice$kColorSyncTransformPCSToPCS$kColorSyncTransformParametricConversionData$kColorSyncTransformSimplifiedConversionData$kColorSyncTransformSrcSpace$kColorSyncTransformTag$''' -enums = '''$COLORSYNC_MD5_LENGTH@16$kColorSync10BitInteger@8$kColorSync16BitFloat@4$kColorSync16BitInteger@3$kColorSync1BitGamut@1$kColorSync32BitFloat@7$kColorSync32BitInteger@5$kColorSync32BitNamedColorIndex@6$kColorSync8BitInteger@2$kColorSyncAlphaFirst@4$kColorSyncAlphaInfoMask@31$kColorSyncAlphaLast@3$kColorSyncAlphaNone@0$kColorSyncAlphaNoneSkipFirst@6$kColorSyncAlphaNoneSkipLast@5$kColorSyncAlphaPremultipliedFirst@2$kColorSyncAlphaPremultipliedLast@1$kColorSyncByteOrder16Big@12288$kColorSyncByteOrder16Little@4096$kColorSyncByteOrder32Big@16384$kColorSyncByteOrder32Little@8192$kColorSyncByteOrderDefault@0$kColorSyncByteOrderMask@28672$''' -misc.update({'COLORSYNC_PROFILE_INSTALL_ENTITLEMENT': b'com.apple.developer.ColorSync.profile.install'}) -functions={'ColorSyncProfileCopyDescriptionString': (b'^{__CFString=}^{ColorSyncProfile=}', '', {'retval': {'already_cfretained': True}}), 'ColorSyncCMMCopyLocalizedName': (b'^{__CFString=}^{ColorSyncCMM=}', '', {'retval': {'already_cfretained': True}}), 'ColorSyncProfileCreateLink': (b'^{ColorSyncProfile=}^{__CFArray=}^{__CFDictionary=}', '', {'retval': {'already_cfretained': True}}), 'ColorSyncProfileCreateDisplayTransferTablesFromVCGT': (b'B^{ColorSyncProfile=}n^L', '', {'arguments': {1: {'comment': 'Unclear if this is correct'}}}), 'CGDisplayGetDisplayIDFromUUID': (b'I^{__CFUUID=}',), 'ColorSyncProfileCreateDeviceProfile': (b'^{ColorSyncProfile=}^{__CFString=}^{__CFUUID=}@', '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'comment': 'CFTypeRef'}}}), 'ColorSyncProfileCopyHeader': (b'^{__CFData=}^{ColorSyncProfile=}', '', {'retval': {'already_cfretained': True}}), 'ColorSyncProfileCopyTagSignatures': (b'^{__CFArray=}^{ColorSyncProfile=}', '', {'retval': {'already_cfretained': True}}), 'ColorSyncCMMGetTypeID': (sel32or64(b'I', b'Q'),), 'ColorSyncProfileCreateWithURL': (b'^{ColorSyncProfile=}^{__CFURL=}o^{__CFError=}', '', {'retval': {'already_cfretained': True}}), 'ColorSyncProfileVerify': (b'B^{ColorSyncProfile=}o^{__CFError=}o^{__CFError=}',), 'ColorSyncTransformConvert': (b'B^{ColorSyncTransform=}LL^viiL^viiL^{__CFDictionary=}', '', {'arguments': {3: {'type_modifier': 'o', 'c_array_of_variable_length': True}, 7: {'type_modifier': 'n', 'c_array_of_variable_length': True}}}), 'ColorSyncCMMCreate': (b'^{ColorSyncCMM=}^{__CFBundle=}', '', {'retval': {'already_cfretained': True}}), 'ColorSyncProfileCreateWithDisplayID': (b'^{ColorSyncProfile=}I', '', {'retval': {'already_cfretained': True}}), 'ColorSyncProfileCreateMutableCopy': (b'^{ColorSyncProfile=}^{ColorSyncProfile=}', '', {'retval': {'already_cfretained': True}}), 'CGDisplayCreateUUIDFromDisplayID': (b'^{__CFUUID=}I', '', {'retval': {'already_cfretained': True}}), 'ColorSyncProfileInstall': (b'B^{ColorSyncProfile=}^{__CFString=}^{__CFString=}o^^{__CFError=}',), 'ColorSyncProfileSetTag': (b'v^{ColorSyncProfile=}^{__CFString=}^{__CFData=}',), 'ColorSyncTransformCreate': (b'^{ColorSyncTransform=}^{__CFArray=}^{__CFDictionary=}', '', {'retval': {'already_cfretained': True}}), 'ColorSyncProfileCopyTag': (b'^{__CFData=}^{ColorSyncProfile=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'ColorSyncIterateDeviceProfiles': (b'v^?^v', '', {'arguments': {0: {'callable': {'retval': {'type': b'B'}, 'arguments': {0: {'type': b'^{__CFDictionary=}'}, 1: {'type': b'^v'}}}, 'callable_retained': False}}}), 'ColorSyncProfileGetURL': (b'^{__CFData=}^{ColorSyncProfile=}o^{__CFError=}',), 'ColorSyncTransformSetProperty': (b'v^{ColorSyncTransform=}@@',), 'ColorSyncUnregisterDevice': (b'B^{__CFString=}^{__CFUUID=}',), 'ColorSyncDeviceCopyDeviceInfo': (b'^{__CFDictionary=}^{__CFString=}^{__CFUUID=}', '', {'retval': {'already_cfretained': True}}), 'ColorSyncProfileCreateMutable': (b'^{ColorSyncProfile=}', '', {'retval': {'already_cfretained': True}}), 'ColorSyncIterateInstalledProfiles': (b'v^?N^I^v^^{__CFError=}', '', {'arguments': {0: {'callable': {'retval': {'type': b'B'}, 'arguments': {0: {'type': b'^{__CFDictionary=}'}, 1: {'type': b'^v'}}}, 'callable_retained': False}, 3: {'type_modifier': 'o'}}}), 'ColorSyncProfileUninstall': (b'B^{ColorSyncProfile=}o^^{__CFError=}',), 'ColorSyncProfileCopyData': (b'^{__CFData=}^{ColorSyncProfile=}o^{__CFError=}', '', {'retval': {'already_cfretained': True}}), 'ColorSyncIterateInstalledCMMs': (b'v^?^v', '', {'arguments': {0: {'callable': {'retval': {'type': b'B'}, 'arguments': {0: {'type': b'^{ColorSyncCMM=}'}, 1: {'type': b'^v'}}}, 'callable_retained': False}}}), 'ColorSyncCMMGetBundle': (b'^{__CFBundle=}^{ColorSyncCMM=}',), 'ColorSyncProfileGetTypeID': (sel32or64(b'I', b'Q'),), 'ColorSyncProfileCreateWithName': (b'^{ColorSyncProfile=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'ColorSyncTransformGetTypeID': (sel32or64(b'I', b'Q'),), 'ColorSyncProfileGetDisplayTransferFormulaFromVCGT': (b'B^{ColorSyncProfile=}o^fo^fo^fo^fo^fo^fo^fo^fo^f',), 'ColorSyncRegisterDevice': (b'B^{__CFString=}^{__CFUUID=}^{__CFDictionary=}',), 'ColorSyncProfileEstimateGammaWithDisplayID': (b'fIo^{__CFError=}',), 'ColorSyncProfileGetMD5': (b'{_ColorSyncMD5=[8C]}^{ColorSyncProfile=}',), 'ColorSyncProfileRemoveTag': (b'v^{ColorSyncProfile=}^{__CFString=}',), 'ColorSyncProfileContainsTag': (b'^{__CFData=}^{ColorSyncProfile=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'ColorSyncProfileEstimateGamma': (b'f^{ColorSyncProfile=}o^{__CFError=}',), 'ColorSyncProfileCreate': (b'^{ColorSyncProfile=}^{__CFData=}o^{__CFError=}', '', {'retval': {'already_cfretained': True}}), 'ColorSyncProfileSetHeader': (b'v^{ColorSyncProfile=}^{__CFData=}',), 'ColorSyncTransformCopyProperty': (b'@^{ColorSyncTransform=}@^{__CFDictionary=}', '', {'retval': {'already_cfretained': True}}), 'ColorSyncCMMCopyCMMIdentifier': (b'^{__CFString=}^{ColorSyncCMM=}', '', {'retval': {'already_cfretained': True}})} -aliases = {'ColorSyncMutableProfileRef': 'ColorSyncProfileRef'} -cftypes=[('ColorSyncCMMRef', b'^{ColorSyncCMM=}', 'ColorSyncCMMGetTypeID', None), ('ColorSyncProfileRef', b'^{ColorSyncProfile=}', 'ColorSyncProfileGetTypeID', None), ('ColorSyncTransformRef', b'^{ColorSyncTransform=}', 'ColorSyncTransformGetTypeID', None)] -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/ColorSync/_metadata.pyc b/env/lib/python2.7/site-packages/ColorSync/_metadata.pyc deleted file mode 100644 index 2a23965a..00000000 Binary files a/env/lib/python2.7/site-packages/ColorSync/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Contacts/_Contacts.so b/env/lib/python2.7/site-packages/Contacts/_Contacts.so deleted file mode 100755 index f1c6dabe..00000000 Binary files a/env/lib/python2.7/site-packages/Contacts/_Contacts.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Contacts/__init__.py b/env/lib/python2.7/site-packages/Contacts/__init__.py deleted file mode 100644 index 70684bf9..00000000 --- a/env/lib/python2.7/site-packages/Contacts/__init__.py +++ /dev/null @@ -1,32 +0,0 @@ -''' -Python mapping for the Contacts framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Foundation - -from Contacts import _metadata -from Contacts._Contacts import * - -try: - long -except NameError: - long = int - -sys.modules['Contacts'] = mod = objc.ObjCLazyModule( - "Contacts", - "com.apple.Contacts.framework", - objc.pathForFramework("/System/Library/Frameworks/Contacts.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Foundation,)) - -import sys -del sys.modules['Contacts._metadata'] diff --git a/env/lib/python2.7/site-packages/Contacts/__init__.pyc b/env/lib/python2.7/site-packages/Contacts/__init__.pyc deleted file mode 100644 index 59d2ce1a..00000000 Binary files a/env/lib/python2.7/site-packages/Contacts/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Contacts/_metadata.py b/env/lib/python2.7/site-packages/Contacts/_metadata.py deleted file mode 100644 index 81da2b32..00000000 --- a/env/lib/python2.7/site-packages/Contacts/_metadata.py +++ /dev/null @@ -1,47 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Sun Sep 30 19:44:11 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$CNContactBirthdayKey$CNContactDatesKey$CNContactDepartmentNameKey$CNContactEmailAddressesKey$CNContactFamilyNameKey$CNContactGivenNameKey$CNContactIdentifierKey$CNContactImageDataAvailableKey$CNContactImageDataKey$CNContactInstantMessageAddressesKey$CNContactJobTitleKey$CNContactMiddleNameKey$CNContactNamePrefixKey$CNContactNameSuffixKey$CNContactNicknameKey$CNContactNonGregorianBirthdayKey$CNContactNoteKey$CNContactOrganizationNameKey$CNContactPhoneNumbersKey$CNContactPhoneticFamilyNameKey$CNContactPhoneticGivenNameKey$CNContactPhoneticMiddleNameKey$CNContactPhoneticOrganizationNameKey$CNContactPostalAddressesKey$CNContactPreviousFamilyNameKey$CNContactPropertyAttribute$CNContactPropertyNotFetchedExceptionName$CNContactRelationsKey$CNContactSocialProfilesKey$CNContactStoreDidChangeNotification$CNContactThumbnailImageDataKey$CNContactTypeKey$CNContactUrlAddressesKey$CNContainerIdentifierKey$CNContainerNameKey$CNContainerTypeKey$CNErrorDomain$CNErrorUserInfoAffectedRecordIdentifiersKey$CNErrorUserInfoAffectedRecordsKey$CNErrorUserInfoKeyPathsKey$CNErrorUserInfoValidationErrorsKey$CNGroupIdentifierKey$CNGroupNameKey$CNInstantMessageAddressServiceKey$CNInstantMessageAddressUsernameKey$CNInstantMessageServiceAIM$CNInstantMessageServiceFacebook$CNInstantMessageServiceGaduGadu$CNInstantMessageServiceGoogleTalk$CNInstantMessageServiceICQ$CNInstantMessageServiceJabber$CNInstantMessageServiceMSN$CNInstantMessageServiceQQ$CNInstantMessageServiceSkype$CNInstantMessageServiceYahoo$CNLabelContactRelationAssistant$CNLabelContactRelationBrother$CNLabelContactRelationChild$CNLabelContactRelationDaughter$CNLabelContactRelationFather$CNLabelContactRelationFriend$CNLabelContactRelationManager$CNLabelContactRelationMother$CNLabelContactRelationParent$CNLabelContactRelationPartner$CNLabelContactRelationSister$CNLabelContactRelationSon$CNLabelContactRelationSpouse$CNLabelDateAnniversary$CNLabelEmailiCloud$CNLabelHome$CNLabelOther$CNLabelPhoneNumberHomeFax$CNLabelPhoneNumberMain$CNLabelPhoneNumberMobile$CNLabelPhoneNumberOtherFax$CNLabelPhoneNumberPager$CNLabelPhoneNumberWorkFax$CNLabelPhoneNumberiPhone$CNLabelURLAddressHomePage$CNLabelWork$CNPostalAddressCityKey$CNPostalAddressCountryKey$CNPostalAddressISOCountryCodeKey$CNPostalAddressLocalizedPropertyNameAttribute$CNPostalAddressPostalCodeKey$CNPostalAddressPropertyAttribute$CNPostalAddressStateKey$CNPostalAddressStreetKey$CNPostalAddressSubAdministrativeAreaKey$CNPostalAddressSubLocalityKey$CNSocialProfileServiceFacebook$CNSocialProfileServiceFlickr$CNSocialProfileServiceGameCenter$CNSocialProfileServiceKey$CNSocialProfileServiceLinkedIn$CNSocialProfileServiceMySpace$CNSocialProfileServiceSinaWeibo$CNSocialProfileServiceTencentWeibo$CNSocialProfileServiceTwitter$CNSocialProfileServiceYelp$CNSocialProfileURLStringKey$CNSocialProfileUserIdentifierKey$CNSocialProfileUsernameKey$''' -enums = '''$CNAuthorizationStatusAuthorized@3$CNAuthorizationStatusDenied@2$CNAuthorizationStatusNotDetermined@0$CNAuthorizationStatusRestricted@1$CNContactDisplayNameOrderFamilyNameFirst@2$CNContactDisplayNameOrderGivenNameFirst@1$CNContactDisplayNameOrderUserDefault@0$CNContactFormatterStyleFullName@0$CNContactFormatterStylePhoneticFullName@1$CNContactSortOrderFamilyName@3$CNContactSortOrderGivenName@2$CNContactSortOrderNone@0$CNContactSortOrderUserDefault@1$CNContactTypeOrganization@1$CNContactTypePerson@0$CNContainerTypeCardDAV@3$CNContainerTypeExchange@2$CNContainerTypeLocal@1$CNContainerTypeUnassigned@0$CNEntityTypeContacts@0$CNErrorCodeAuthorizationDenied@100$CNErrorCodeClientIdentifierDoesNotExist@601$CNErrorCodeClientIdentifierInvalid@600$CNErrorCodeCommunicationError@1$CNErrorCodeContainmentCycle@202$CNErrorCodeContainmentScope@203$CNErrorCodeDataAccessError@2$CNErrorCodeInsertedRecordAlreadyExists@201$CNErrorCodeNoAccessableWritableContainers@101$CNErrorCodeParentRecordDoesNotExist@204$CNErrorCodePolicyViolation@500$CNErrorCodePredicateInvalid@400$CNErrorCodeRecordDoesNotExist@200$CNErrorCodeRecordIdentifierInvalid@205$CNErrorCodeVCardMalformed@700$CNErrorCodeVCardSummarizationError@701$CNErrorCodeValidationConfigurationError@302$CNErrorCodeValidationMultipleErrors@300$CNErrorCodeValidationTypeMismatch@301$CNPostalAddressFormatterStyleMailingAddress@0$''' -misc.update({}) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'CNContact', b'areKeysAvailable:', {'retval': {'type': 'Z'}}) - r(b'CNContact', b'isKeyAvailable:', {'retval': {'type': 'Z'}}) - r(b'CNContact', b'isUnifiedWithContactWithIdentifier:', {'retval': {'type': 'Z'}}) - r(b'CNContactFetchRequest', b'mutableObjects', {'retval': {'type': 'Z'}}) - r(b'CNContactFetchRequest', b'setMutableObjects:', {'arguments': {2: {'type': 'Z'}}}) - r(b'CNContactFetchRequest', b'setUnifyResults:', {'arguments': {2: {'type': 'Z'}}}) - r(b'CNContactFetchRequest', b'unifyResults', {'retval': {'type': 'Z'}}) - r(b'CNContactStore', b'containersMatchingPredicate:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'CNContactStore', b'enumerateContactsWithFetchRequest:error:usingBlock:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}, 4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'o^Z'}}}}}}) - r(b'CNContactStore', b'executeSaveRequest:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'CNContactStore', b'groupsMatchingPredicate:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'CNContactStore', b'requestAccessForEntityType:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}, 2: {'type': b'@'}}}}}}) - r(b'CNContactStore', b'unifiedContactWithIdentifier:keysToFetch:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'CNContactStore', b'unifiedContactsMatchingPredicate:keysToFetch:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'CNContactStore', b'unifiedMeContactWithKeysToFetch:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'CNContactVCardSerialization', b'contactsWithData:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'CNContactVCardSerialization', b'dataWithContacts:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'CNPhoneNumber', b'init', {'deprecated': 1013}) - r(b'CNPhoneNumber', b'new', {'deprecated': 1013}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/Contacts/_metadata.pyc b/env/lib/python2.7/site-packages/Contacts/_metadata.pyc deleted file mode 100644 index 183e7fa5..00000000 Binary files a/env/lib/python2.7/site-packages/Contacts/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/ContactsUI/_ContactsUI.so b/env/lib/python2.7/site-packages/ContactsUI/_ContactsUI.so deleted file mode 100755 index e01c7a5d..00000000 Binary files a/env/lib/python2.7/site-packages/ContactsUI/_ContactsUI.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/ContactsUI/__init__.py b/env/lib/python2.7/site-packages/ContactsUI/__init__.py deleted file mode 100644 index 4b683f6f..00000000 --- a/env/lib/python2.7/site-packages/ContactsUI/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -''' -Python mapping for the ContactsUI framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import AppKit -import Contacts - -from ContactsUI import _metadata -from ContactsUI._ContactsUI import * - -try: - long -except NameError: - long = int - -sys.modules['ContactsUI'] = mod = objc.ObjCLazyModule( - "ContactsUI", - "com.apple.ContactsUI.framework", - objc.pathForFramework("/System/Library/Frameworks/ContactsUI.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (AppKit, Contacts)) - -import sys -del sys.modules['ContactsUI._metadata'] diff --git a/env/lib/python2.7/site-packages/ContactsUI/__init__.pyc b/env/lib/python2.7/site-packages/ContactsUI/__init__.pyc deleted file mode 100644 index 3958ad3b..00000000 Binary files a/env/lib/python2.7/site-packages/ContactsUI/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/ContactsUI/_metadata.py b/env/lib/python2.7/site-packages/ContactsUI/_metadata.py deleted file mode 100644 index a8e13c41..00000000 --- a/env/lib/python2.7/site-packages/ContactsUI/_metadata.py +++ /dev/null @@ -1,23 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Mon Nov 23 15:57:07 2015 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$$''' -enums = '''$$''' -misc.update({}) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/ContactsUI/_metadata.pyc b/env/lib/python2.7/site-packages/ContactsUI/_metadata.pyc deleted file mode 100644 index 311af93f..00000000 Binary files a/env/lib/python2.7/site-packages/ContactsUI/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreAudio/_CoreAudio.so b/env/lib/python2.7/site-packages/CoreAudio/_CoreAudio.so deleted file mode 100755 index ab664ef4..00000000 Binary files a/env/lib/python2.7/site-packages/CoreAudio/_CoreAudio.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreAudio/__init__.py b/env/lib/python2.7/site-packages/CoreAudio/__init__.py deleted file mode 100644 index e09da3c9..00000000 --- a/env/lib/python2.7/site-packages/CoreAudio/__init__.py +++ /dev/null @@ -1,31 +0,0 @@ -''' -Python mapping for the CoreAudio framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Foundation - -from CoreAudio import _metadata -from CoreAudio._inlines import _inline_list_ - -sys.modules['CoreAudio'] = mod = objc.ObjCLazyModule( - "CoreAudio", - "com.apple.CoreAudio", - objc.pathForFramework("/System/Library/Frameworks/CoreAudio.framework"), - _metadata.__dict__, _inline_list_, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Foundation,)) - -import sys -del sys.modules['CoreAudio._metadata'] - -import CoreAudio._CoreAudio -for nm in dir(CoreAudio._CoreAudio): - setattr(mod, nm, getattr(CoreAudio._CoreAudio, nm)) diff --git a/env/lib/python2.7/site-packages/CoreAudio/__init__.pyc b/env/lib/python2.7/site-packages/CoreAudio/__init__.pyc deleted file mode 100644 index 569a6047..00000000 Binary files a/env/lib/python2.7/site-packages/CoreAudio/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreAudio/_inlines.so b/env/lib/python2.7/site-packages/CoreAudio/_inlines.so deleted file mode 100755 index 0f2e6a70..00000000 Binary files a/env/lib/python2.7/site-packages/CoreAudio/_inlines.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreAudio/_metadata.py b/env/lib/python2.7/site-packages/CoreAudio/_metadata.py deleted file mode 100644 index c9eb2a5e..00000000 --- a/env/lib/python2.7/site-packages/CoreAudio/_metadata.py +++ /dev/null @@ -1,27 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Sun Aug 12 15:31:00 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -misc.update({'AudioClassDescription': objc.createStructType('AudioClassDescription', sel32or64(b'{AudioClassDescription=LLL}', b'{AudioClassDescription=III}'), ['mType', 'mSubType', 'mManufacturer']), 'AudioStreamPacketDescription': objc.createStructType('AudioStreamPacketDescription', sel32or64(b'{AudioStreamPacketDescription=qLL}', b'{AudioStreamPacketDescription=qII}'), ['mStartOffset', 'mVariableFramesInPacket', 'mDataByteSize']), 'AudioStreamRangedDescription': objc.createStructType('AudioStreamRangedDescription', sel32or64(b'{AudioStreamRangedDescription={AudioStreamBasicDescription=dLLLLLLLL}{AudioValueRange=dd}}', b'{AudioStreamRangedDescription={AudioStreamBasicDescription=dIIIIIIII}{AudioValueRange=dd}}'), ['mFormat', 'mSampleRateRange']), 'AudioChannelDescription': objc.createStructType('AudioChannelDescription', sel32or64(b'{AudioChannelDescription=LL[3f]}', b'{AudioChannelDescription=II[3f]}'), ['mChannelLabel', 'mChannelFlags', 'mCoordinates']), 'AudioChannelLayout': objc.createStructType('AudioChannelLayout', sel32or64(b'{AudioChannelLayout=LLL[1{AudioChannelDescription=LL[3f]}]}', b'{AudioChannelLayout=III[1{AudioChannelDescription=II[3f]}]}'), ['mChannelLayoutTag', 'mChannelBitmap', 'mNumberChannelDescriptions', 'mChannelDescriptions']), 'SMPTETime': objc.createStructType('SMPTETime', sel32or64(b'{SMPTETime=ssLLLssss}', b'{SMPTETime=ssIIIssss}'), ['mSubframes', 'mSubframeDivisor', 'mCounter', 'mType', 'mFlags', 'mHours', 'mMinutes', 'mSeconds', 'mFrames']), 'AudioValueRange': objc.createStructType('AudioValueRange', b'{AudioValueRange=dd}', ['mMinimum', 'mMaximum']), 'AudioTimeStamp': objc.createStructType('AudioTimeStamp', sel32or64(b'{AudioTimeStamp=dQdQ{SMPTETime=ssLLLssss}LL}', b'{AudioTimeStamp=dQdQ{SMPTETime=ssIIIssss}II}'), ['mSampleTime', 'mHostTime', 'mRateScalar', 'mWordClockTime', 'mSMPTETime', 'mFlags', 'mReserved']), 'AudioHardwareIOProcStreamUsage': objc.createStructType('AudioHardwareIOProcStreamUsage', sel32or64(b'{AudioHardwareIOProcStreamUsage=^vL[1L]}', b'{AudioHardwareIOProcStreamUsage=^vI[1I]}'), ['mIOProc', 'mNumberStreams', 'mStreamIsOn']), 'AudioStreamBasicDescription': objc.createStructType('AudioStreamBasicDescription', sel32or64(b'{AudioStreamBasicDescription=dLLLLLLLL}', b'{AudioStreamBasicDescription=dIIIIIIII}'), ['mSampleRate', 'mFormatID', 'mFormatFlags', 'mBytesPerPacket', 'mFramesPerPacket', 'mBytesPerFrame', 'mChannelsPerFrame', 'mBitsPerChannel', 'mReserved']), 'AudioObjectPropertyAddress': objc.createStructType('AudioObjectPropertyAddress', sel32or64(b'{AudioObjectPropertyAddress=LLL}', b'{AudioObjectPropertyAddress=III}'), ['mSelector', 'mScope', 'mElement'])}) -constants = '''$$''' -enums = '''$CA_PREFER_FIXED_POINT@0$COREAUDIOTYPES_VERSION@20150414$kAppleLosslessFormatFlag_16BitSourceData@1$kAppleLosslessFormatFlag_20BitSourceData@2$kAppleLosslessFormatFlag_24BitSourceData@3$kAppleLosslessFormatFlag_32BitSourceData@4$kAudioAggregateDeviceClassID@1633773415$kAudioAggregateDevicePropertyActiveSubDeviceList@1634169456$kAudioAggregateDevicePropertyClockDevice@1634755428$kAudioAggregateDevicePropertyComposition@1633906541$kAudioAggregateDevicePropertyFullSubDeviceList@1735554416$kAudioAggregateDevicePropertyMasterSubDevice@1634562932$kAudioBooleanControlClassID@1953458028$kAudioBooleanControlPropertyValue@1650685548$kAudioBootChimeVolumeControlClassID@1886544237$kAudioBoxClassID@1633841016$kAudioBoxPropertyAcquired@1652060014$kAudioBoxPropertyAcquisitionFailed@1652060006$kAudioBoxPropertyBoxUID@1651861860$kAudioBoxPropertyClockDeviceList@1650682915$kAudioBoxPropertyDeviceList@1650751011$kAudioBoxPropertyHasAudio@1651007861$kAudioBoxPropertyHasMIDI@1651010921$kAudioBoxPropertyHasVideo@1651013225$kAudioBoxPropertyIsProtected@1651536495$kAudioBoxPropertyTransportType@1953653102$kAudioChannelBit_Center@4$kAudioChannelBit_CenterSurround@256$kAudioChannelBit_LFEScreen@8$kAudioChannelBit_Left@1$kAudioChannelBit_LeftCenter@64$kAudioChannelBit_LeftSurround@16$kAudioChannelBit_LeftSurroundDirect@512$kAudioChannelBit_Right@2$kAudioChannelBit_RightCenter@128$kAudioChannelBit_RightSurround@32$kAudioChannelBit_RightSurroundDirect@1024$kAudioChannelBit_TopBackCenter@65536$kAudioChannelBit_TopBackLeft@32768$kAudioChannelBit_TopBackRight@131072$kAudioChannelBit_TopCenterSurround@2048$kAudioChannelBit_VerticalHeightCenter@8192$kAudioChannelBit_VerticalHeightLeft@4096$kAudioChannelBit_VerticalHeightRight@16384$kAudioChannelCoordinates_Azimuth@0$kAudioChannelCoordinates_BackFront@1$kAudioChannelCoordinates_Distance@2$kAudioChannelCoordinates_DownUp@2$kAudioChannelCoordinates_Elevation@1$kAudioChannelCoordinates_LeftRight@0$kAudioChannelFlags_AllOff@0$kAudioChannelFlags_Meters@4$kAudioChannelFlags_RectangularCoordinates@1$kAudioChannelFlags_SphericalCoordinates@2$kAudioChannelLabel_Ambisonic_W@200$kAudioChannelLabel_Ambisonic_X@201$kAudioChannelLabel_Ambisonic_Y@202$kAudioChannelLabel_Ambisonic_Z@203$kAudioChannelLabel_BeginReserved@4026531840$kAudioChannelLabel_BinauralLeft@208$kAudioChannelLabel_BinauralRight@209$kAudioChannelLabel_Center@3$kAudioChannelLabel_CenterSurround@9$kAudioChannelLabel_CenterSurroundDirect@44$kAudioChannelLabel_ClickTrack@304$kAudioChannelLabel_DialogCentricMix@43$kAudioChannelLabel_Discrete@400$kAudioChannelLabel_Discrete_0@65536$kAudioChannelLabel_Discrete_1@65537$kAudioChannelLabel_Discrete_10@65546$kAudioChannelLabel_Discrete_11@65547$kAudioChannelLabel_Discrete_12@65548$kAudioChannelLabel_Discrete_13@65549$kAudioChannelLabel_Discrete_14@65550$kAudioChannelLabel_Discrete_15@65551$kAudioChannelLabel_Discrete_2@65538$kAudioChannelLabel_Discrete_3@65539$kAudioChannelLabel_Discrete_4@65540$kAudioChannelLabel_Discrete_5@65541$kAudioChannelLabel_Discrete_6@65542$kAudioChannelLabel_Discrete_65535@131071$kAudioChannelLabel_Discrete_7@65543$kAudioChannelLabel_Discrete_8@65544$kAudioChannelLabel_Discrete_9@65545$kAudioChannelLabel_EndReserved@4294967294$kAudioChannelLabel_ForeignLanguage@305$kAudioChannelLabel_HOA_ACN@500$kAudioChannelLabel_HOA_ACN_0@131072$kAudioChannelLabel_HOA_ACN_1@131073$kAudioChannelLabel_HOA_ACN_10@131082$kAudioChannelLabel_HOA_ACN_11@131083$kAudioChannelLabel_HOA_ACN_12@131084$kAudioChannelLabel_HOA_ACN_13@131085$kAudioChannelLabel_HOA_ACN_14@131086$kAudioChannelLabel_HOA_ACN_15@131087$kAudioChannelLabel_HOA_ACN_2@131074$kAudioChannelLabel_HOA_ACN_3@131075$kAudioChannelLabel_HOA_ACN_4@131076$kAudioChannelLabel_HOA_ACN_5@131077$kAudioChannelLabel_HOA_ACN_6@131078$kAudioChannelLabel_HOA_ACN_65024@196096$kAudioChannelLabel_HOA_ACN_7@131079$kAudioChannelLabel_HOA_ACN_8@131080$kAudioChannelLabel_HOA_ACN_9@131081$kAudioChannelLabel_Haptic@45$kAudioChannelLabel_HeadphonesLeft@301$kAudioChannelLabel_HeadphonesRight@302$kAudioChannelLabel_HearingImpaired@40$kAudioChannelLabel_LFE2@37$kAudioChannelLabel_LFEScreen@4$kAudioChannelLabel_Left@1$kAudioChannelLabel_LeftCenter@7$kAudioChannelLabel_LeftSurround@5$kAudioChannelLabel_LeftSurroundDirect@10$kAudioChannelLabel_LeftTotal@38$kAudioChannelLabel_LeftWide@35$kAudioChannelLabel_MS_Mid@204$kAudioChannelLabel_MS_Side@205$kAudioChannelLabel_Mono@42$kAudioChannelLabel_Narration@41$kAudioChannelLabel_RearSurroundLeft@33$kAudioChannelLabel_RearSurroundRight@34$kAudioChannelLabel_Right@2$kAudioChannelLabel_RightCenter@8$kAudioChannelLabel_RightSurround@6$kAudioChannelLabel_RightSurroundDirect@11$kAudioChannelLabel_RightTotal@39$kAudioChannelLabel_RightWide@36$kAudioChannelLabel_TopBackCenter@17$kAudioChannelLabel_TopBackLeft@16$kAudioChannelLabel_TopBackRight@18$kAudioChannelLabel_TopCenterSurround@12$kAudioChannelLabel_Unknown@4294967295$kAudioChannelLabel_Unused@0$kAudioChannelLabel_UseCoordinates@100$kAudioChannelLabel_VerticalHeightCenter@14$kAudioChannelLabel_VerticalHeightLeft@13$kAudioChannelLabel_VerticalHeightRight@15$kAudioChannelLabel_XY_X@206$kAudioChannelLabel_XY_Y@207$kAudioChannelLayoutTag_AAC_3_0@7471107$kAudioChannelLayoutTag_AAC_4_0@7602180$kAudioChannelLayoutTag_AAC_5_0@7864325$kAudioChannelLayoutTag_AAC_5_1@8126470$kAudioChannelLayoutTag_AAC_6_0@9240582$kAudioChannelLayoutTag_AAC_6_1@9306119$kAudioChannelLayoutTag_AAC_7_0@9371655$kAudioChannelLayoutTag_AAC_7_1@8323080$kAudioChannelLayoutTag_AAC_7_1_B@11993096$kAudioChannelLayoutTag_AAC_7_1_C@12058632$kAudioChannelLayoutTag_AAC_Octagonal@9437192$kAudioChannelLayoutTag_AAC_Quadraphonic@7077892$kAudioChannelLayoutTag_AC3_1_0_1@9764866$kAudioChannelLayoutTag_AC3_2_1_1@10027012$kAudioChannelLayoutTag_AC3_3_0@9830403$kAudioChannelLayoutTag_AC3_3_0_1@9961476$kAudioChannelLayoutTag_AC3_3_1@9895940$kAudioChannelLayoutTag_AC3_3_1_1@10092549$kAudioChannelLayoutTag_Ambisonic_B_Format@7012356$kAudioChannelLayoutTag_AudioUnit_4@7077892$kAudioChannelLayoutTag_AudioUnit_5@7143429$kAudioChannelLayoutTag_AudioUnit_5_0@7733253$kAudioChannelLayoutTag_AudioUnit_5_1@7929862$kAudioChannelLayoutTag_AudioUnit_6@7208966$kAudioChannelLayoutTag_AudioUnit_6_0@9109510$kAudioChannelLayoutTag_AudioUnit_6_1@8192007$kAudioChannelLayoutTag_AudioUnit_7_0@9175047$kAudioChannelLayoutTag_AudioUnit_7_0_Front@9699335$kAudioChannelLayoutTag_AudioUnit_7_1@8388616$kAudioChannelLayoutTag_AudioUnit_7_1_Front@8257544$kAudioChannelLayoutTag_AudioUnit_8@7274504$kAudioChannelLayoutTag_BeginReserved@4026531840$kAudioChannelLayoutTag_Binaural@6946818$kAudioChannelLayoutTag_Cube@7340040$kAudioChannelLayoutTag_DTS_3_1@11010052$kAudioChannelLayoutTag_DTS_4_1@11075589$kAudioChannelLayoutTag_DTS_6_0_A@11141126$kAudioChannelLayoutTag_DTS_6_0_B@11206662$kAudioChannelLayoutTag_DTS_6_0_C@11272198$kAudioChannelLayoutTag_DTS_6_1_A@11337735$kAudioChannelLayoutTag_DTS_6_1_B@11403271$kAudioChannelLayoutTag_DTS_6_1_C@11468807$kAudioChannelLayoutTag_DTS_6_1_D@11927559$kAudioChannelLayoutTag_DTS_7_0@11534343$kAudioChannelLayoutTag_DTS_7_1@11599880$kAudioChannelLayoutTag_DTS_8_0_A@11665416$kAudioChannelLayoutTag_DTS_8_0_B@11730952$kAudioChannelLayoutTag_DTS_8_1_A@11796489$kAudioChannelLayoutTag_DTS_8_1_B@11862025$kAudioChannelLayoutTag_DVD_0@6553601$kAudioChannelLayoutTag_DVD_1@6619138$kAudioChannelLayoutTag_DVD_10@8912900$kAudioChannelLayoutTag_DVD_11@8978437$kAudioChannelLayoutTag_DVD_12@7929862$kAudioChannelLayoutTag_DVD_13@7536644$kAudioChannelLayoutTag_DVD_14@7667717$kAudioChannelLayoutTag_DVD_15@8912900$kAudioChannelLayoutTag_DVD_16@8978437$kAudioChannelLayoutTag_DVD_17@7929862$kAudioChannelLayoutTag_DVD_18@9043973$kAudioChannelLayoutTag_DVD_19@7733253$kAudioChannelLayoutTag_DVD_2@8585219$kAudioChannelLayoutTag_DVD_20@7995398$kAudioChannelLayoutTag_DVD_3@8650756$kAudioChannelLayoutTag_DVD_4@8716291$kAudioChannelLayoutTag_DVD_5@8781828$kAudioChannelLayoutTag_DVD_6@8847365$kAudioChannelLayoutTag_DVD_7@7405571$kAudioChannelLayoutTag_DVD_8@7536644$kAudioChannelLayoutTag_DVD_9@7667717$kAudioChannelLayoutTag_DiscreteInOrder@9633792$kAudioChannelLayoutTag_EAC3_6_1_A@10289159$kAudioChannelLayoutTag_EAC3_6_1_B@10354695$kAudioChannelLayoutTag_EAC3_6_1_C@10420231$kAudioChannelLayoutTag_EAC3_7_1_A@10485768$kAudioChannelLayoutTag_EAC3_7_1_B@10551304$kAudioChannelLayoutTag_EAC3_7_1_C@10616840$kAudioChannelLayoutTag_EAC3_7_1_D@10682376$kAudioChannelLayoutTag_EAC3_7_1_E@10747912$kAudioChannelLayoutTag_EAC3_7_1_F@10813448$kAudioChannelLayoutTag_EAC3_7_1_G@10878984$kAudioChannelLayoutTag_EAC3_7_1_H@10944520$kAudioChannelLayoutTag_EAC_6_0_A@10158086$kAudioChannelLayoutTag_EAC_7_0_A@10223623$kAudioChannelLayoutTag_Emagic_Default_7_1@8454152$kAudioChannelLayoutTag_EndReserved@4294901759$kAudioChannelLayoutTag_HOA_ACN_N3D@12517376$kAudioChannelLayoutTag_HOA_ACN_SN3D@12451840$kAudioChannelLayoutTag_Hexagonal@7208966$kAudioChannelLayoutTag_ITU_1_0@6553601$kAudioChannelLayoutTag_ITU_2_0@6619138$kAudioChannelLayoutTag_ITU_2_1@8585219$kAudioChannelLayoutTag_ITU_2_2@8650756$kAudioChannelLayoutTag_ITU_3_0@7405571$kAudioChannelLayoutTag_ITU_3_1@7536644$kAudioChannelLayoutTag_ITU_3_2@7667717$kAudioChannelLayoutTag_ITU_3_2_1@7929862$kAudioChannelLayoutTag_ITU_3_4_1@8388616$kAudioChannelLayoutTag_MPEG_1_0@6553601$kAudioChannelLayoutTag_MPEG_2_0@6619138$kAudioChannelLayoutTag_MPEG_3_0_A@7405571$kAudioChannelLayoutTag_MPEG_3_0_B@7471107$kAudioChannelLayoutTag_MPEG_4_0_A@7536644$kAudioChannelLayoutTag_MPEG_4_0_B@7602180$kAudioChannelLayoutTag_MPEG_5_0_A@7667717$kAudioChannelLayoutTag_MPEG_5_0_B@7733253$kAudioChannelLayoutTag_MPEG_5_0_C@7798789$kAudioChannelLayoutTag_MPEG_5_0_D@7864325$kAudioChannelLayoutTag_MPEG_5_1_A@7929862$kAudioChannelLayoutTag_MPEG_5_1_B@7995398$kAudioChannelLayoutTag_MPEG_5_1_C@8060934$kAudioChannelLayoutTag_MPEG_5_1_D@8126470$kAudioChannelLayoutTag_MPEG_6_1_A@8192007$kAudioChannelLayoutTag_MPEG_7_1_A@8257544$kAudioChannelLayoutTag_MPEG_7_1_B@8323080$kAudioChannelLayoutTag_MPEG_7_1_C@8388616$kAudioChannelLayoutTag_MatrixStereo@6750210$kAudioChannelLayoutTag_MidSide@6815746$kAudioChannelLayoutTag_Mono@6553601$kAudioChannelLayoutTag_Octagonal@7274504$kAudioChannelLayoutTag_Pentagonal@7143429$kAudioChannelLayoutTag_Quadraphonic@7077892$kAudioChannelLayoutTag_SMPTE_DTV@8519688$kAudioChannelLayoutTag_Stereo@6619138$kAudioChannelLayoutTag_StereoHeadphones@6684674$kAudioChannelLayoutTag_TMH_10_2_full@9568277$kAudioChannelLayoutTag_TMH_10_2_std@9502736$kAudioChannelLayoutTag_Unknown@4294901760$kAudioChannelLayoutTag_UseChannelBitmap@65536$kAudioChannelLayoutTag_UseChannelDescriptions@0$kAudioChannelLayoutTag_XY@6881282$kAudioClipLightControlClassID@1668049264$kAudioClockDeviceClassID@1633905771$kAudioClockDevicePropertyAvailableNominalSampleRates@1853059619$kAudioClockDevicePropertyClockDomain@1668049764$kAudioClockDevicePropertyControlList@1668575852$kAudioClockDevicePropertyDeviceIsAlive@1818850926$kAudioClockDevicePropertyDeviceIsRunning@1735354734$kAudioClockDevicePropertyDeviceUID@1668639076$kAudioClockDevicePropertyLatency@1819569763$kAudioClockDevicePropertyNominalSampleRate@1853059700$kAudioClockDevicePropertyTransportType@1953653102$kAudioClockSourceControlClassID@1668047723$kAudioClockSourceControlPropertyItemKind@1668049771$kAudioClockSourceItemKindInternal@1768846368$kAudioControlClassID@1633907820$kAudioControlPropertyElement@1667591277$kAudioControlPropertyScope@1668506480$kAudioControlPropertyVariant@1668702578$kAudioDataDestinationControlClassID@1684370292$kAudioDataSourceControlClassID@1685287523$kAudioDeviceClassID@1633969526$kAudioDevicePermissionsError@560492391$kAudioDeviceProcessorOverload@1870030194$kAudioDevicePropertyActualSampleRate@1634955892$kAudioDevicePropertyAvailableNominalSampleRates@1853059619$kAudioDevicePropertyBufferFrameSize@1718839674$kAudioDevicePropertyBufferFrameSizeRange@1718843939$kAudioDevicePropertyBufferSize@1651730810$kAudioDevicePropertyBufferSizeRange@1651735075$kAudioDevicePropertyChannelCategoryName@1667460717$kAudioDevicePropertyChannelCategoryNameCFString@1818452846$kAudioDevicePropertyChannelName@1667788397$kAudioDevicePropertyChannelNameCFString@1818454126$kAudioDevicePropertyChannelNominalLineLevel@1852601964$kAudioDevicePropertyChannelNominalLineLevelNameForID@1668181110$kAudioDevicePropertyChannelNominalLineLevelNameForIDCFString@1818455660$kAudioDevicePropertyChannelNominalLineLevels@1852601891$kAudioDevicePropertyChannelNumberName@1668181613$kAudioDevicePropertyChannelNumberNameCFString@1818455662$kAudioDevicePropertyClipLight@1668049264$kAudioDevicePropertyClockDevice@1634755428$kAudioDevicePropertyClockDomain@1668049764$kAudioDevicePropertyClockSource@1668510307$kAudioDevicePropertyClockSourceKindForID@1668506475$kAudioDevicePropertyClockSourceNameForID@1668506478$kAudioDevicePropertyClockSourceNameForIDCFString@1818456942$kAudioDevicePropertyClockSources@1668506403$kAudioDevicePropertyConfigurationApplication@1667330160$kAudioDevicePropertyDataSource@1936945763$kAudioDevicePropertyDataSourceKindForID@1936941931$kAudioDevicePropertyDataSourceNameForID@1936941934$kAudioDevicePropertyDataSourceNameForIDCFString@1819501422$kAudioDevicePropertyDataSources@1936941859$kAudioDevicePropertyDeviceCanBeDefaultDevice@1684434036$kAudioDevicePropertyDeviceCanBeDefaultSystemDevice@1936092276$kAudioDevicePropertyDeviceHasChanged@1684629094$kAudioDevicePropertyDeviceIsAlive@1818850926$kAudioDevicePropertyDeviceIsRunning@1735354734$kAudioDevicePropertyDeviceIsRunningSomewhere@1735356005$kAudioDevicePropertyDeviceManufacturer@1835101042$kAudioDevicePropertyDeviceManufacturerCFString@1819107691$kAudioDevicePropertyDeviceName@1851878757$kAudioDevicePropertyDeviceNameCFString@1819173229$kAudioDevicePropertyDeviceUID@1969841184$kAudioDevicePropertyDriverShouldOwniSub@1769174370$kAudioDevicePropertyHighPassFilterSetting@1751740518$kAudioDevicePropertyHighPassFilterSettingNameForID@1667787120$kAudioDevicePropertyHighPassFilterSettingNameForIDCFString@1751740524$kAudioDevicePropertyHighPassFilterSettings@1751740451$kAudioDevicePropertyHogMode@1869180523$kAudioDevicePropertyIOCycleUsage@1852012899$kAudioDevicePropertyIOProcStreamUsage@1937077093$kAudioDevicePropertyIOStoppedAbnormally@1937010788$kAudioDevicePropertyIcon@1768124270$kAudioDevicePropertyIsHidden@1751737454$kAudioDevicePropertyJackIsConnected@1784767339$kAudioDevicePropertyLatency@1819569763$kAudioDevicePropertyListenback@1819504226$kAudioDevicePropertyModelUID@1836411236$kAudioDevicePropertyMute@1836414053$kAudioDevicePropertyNominalSampleRate@1853059700$kAudioDevicePropertyPhantomPower@1885888878$kAudioDevicePropertyPhaseInvert@1885893481$kAudioDevicePropertyPlayThru@1953002101$kAudioDevicePropertyPlayThruDestination@1835295859$kAudioDevicePropertyPlayThruDestinationNameForID@1835295854$kAudioDevicePropertyPlayThruDestinationNameForIDCFString@1835295843$kAudioDevicePropertyPlayThruDestinations@1835295779$kAudioDevicePropertyPlayThruSolo@1953002099$kAudioDevicePropertyPlayThruStereoPan@1836281966$kAudioDevicePropertyPlayThruStereoPanChannels@1836281891$kAudioDevicePropertyPlayThruVolumeDecibels@1836475490$kAudioDevicePropertyPlayThruVolumeDecibelsToScalar@1836462707$kAudioDevicePropertyPlayThruVolumeDecibelsToScalarTransferFunction@1836479590$kAudioDevicePropertyPlayThruVolumeRangeDecibels@1836475427$kAudioDevicePropertyPlayThruVolumeScalar@1836479331$kAudioDevicePropertyPlayThruVolumeScalarToDecibels@1836462692$kAudioDevicePropertyPlugIn@1886156135$kAudioDevicePropertyPreferredChannelLayout@1936879204$kAudioDevicePropertyPreferredChannelsForStereo@1684236338$kAudioDevicePropertyRegisterBufferList@1919055206$kAudioDevicePropertyRelatedDevices@1634429294$kAudioDevicePropertySafetyOffset@1935763060$kAudioDevicePropertyScopeInput@1768845428$kAudioDevicePropertyScopeOutput@1869968496$kAudioDevicePropertyScopePlayThrough@1886679669$kAudioDevicePropertySolo@1936682095$kAudioDevicePropertyStereoPan@1936744814$kAudioDevicePropertyStereoPanChannels@1936748067$kAudioDevicePropertyStreamConfiguration@1936482681$kAudioDevicePropertyStreamFormat@1936092532$kAudioDevicePropertyStreamFormatMatch@1936092525$kAudioDevicePropertyStreamFormatSupported@1936092479$kAudioDevicePropertyStreamFormats@1936092451$kAudioDevicePropertyStreams@1937009955$kAudioDevicePropertySubMute@1936553332$kAudioDevicePropertySubVolumeDecibels@1937140836$kAudioDevicePropertySubVolumeDecibelsToScalar@1935946358$kAudioDevicePropertySubVolumeDecibelsToScalarTransferFunction@1937142886$kAudioDevicePropertySubVolumeRangeDecibels@1937138723$kAudioDevicePropertySubVolumeScalar@1937140845$kAudioDevicePropertySubVolumeScalarToDecibels@1937125988$kAudioDevicePropertySupportsMixing@1835628607$kAudioDevicePropertyTalkback@1952541794$kAudioDevicePropertyTransportType@1953653102$kAudioDevicePropertyUsesVariableBufferFrameSizes@1986425722$kAudioDevicePropertyVolumeDecibels@1987013732$kAudioDevicePropertyVolumeDecibelsToScalar@1684157046$kAudioDevicePropertyVolumeDecibelsToScalarTransferFunction@1986229350$kAudioDevicePropertyVolumeRangeDecibels@1986290211$kAudioDevicePropertyVolumeScalar@1987013741$kAudioDevicePropertyVolumeScalarToDecibels@1983013986$kAudioDeviceStartTimeDontConsultDeviceFlag@2$kAudioDeviceStartTimeDontConsultHALFlag@4$kAudioDeviceStartTimeIsInputFlag@1$kAudioDeviceTransportTypeAVB@1700886114$kAudioDeviceTransportTypeAggregate@1735554416$kAudioDeviceTransportTypeAirPlay@1634300528$kAudioDeviceTransportTypeAutoAggregate@1718055536$kAudioDeviceTransportTypeBluetooth@1651275109$kAudioDeviceTransportTypeBluetoothLE@1651271009$kAudioDeviceTransportTypeBuiltIn@1651274862$kAudioDeviceTransportTypeDisplayPort@1685090932$kAudioDeviceTransportTypeFireWire@825440564$kAudioDeviceTransportTypeHDMI@1751412073$kAudioDeviceTransportTypePCI@1885563168$kAudioDeviceTransportTypeThunderbolt@1953002862$kAudioDeviceTransportTypeUSB@1970496032$kAudioDeviceTransportTypeUnknown@0$kAudioDeviceTransportTypeVirtual@1986622068$kAudioDeviceUnknown@0$kAudioDeviceUnsupportedFormatError@560226676$kAudioEndPointClassID@1701733488$kAudioEndPointDeviceClassID@1701078390$kAudioEndPointDevicePropertyComposition@1633906541$kAudioEndPointDevicePropertyEndPointList@1634169456$kAudioEndPointDevicePropertyIsPrivate@1886546294$kAudioFormat60958AC3@1667326771$kAudioFormatAC3@1633889587$kAudioFormatAES3@1634038579$kAudioFormatALaw@1634492791$kAudioFormatAMR@1935764850$kAudioFormatAMR_WB@1935767394$kAudioFormatAppleIMA4@1768775988$kAudioFormatAppleLossless@1634492771$kAudioFormatAudible@1096107074$kAudioFormatDVIIntelIMA@1836253201$kAudioFormatEnhancedAC3@1700998451$kAudioFormatFLAC@1718378851$kAudioFormatFlagIsAlignedHigh@16$kAudioFormatFlagIsBigEndian@2$kAudioFormatFlagIsFloat@1$kAudioFormatFlagIsNonInterleaved@32$kAudioFormatFlagIsNonMixable@64$kAudioFormatFlagIsPacked@8$kAudioFormatFlagIsSignedInteger@4$kAudioFormatFlagsAreAllClear@2147483648$kAudioFormatFlagsAudioUnitCanonical@41$kAudioFormatFlagsCanonical@9$kAudioFormatFlagsNativeEndian@0$kAudioFormatFlagsNativeFloatPacked@9$kAudioFormatLinearPCM@1819304813$kAudioFormatMACE3@1296122675$kAudioFormatMACE6@1296122678$kAudioFormatMIDIStream@1835623529$kAudioFormatMPEG4AAC@1633772320$kAudioFormatMPEG4AAC_ELD@1633772389$kAudioFormatMPEG4AAC_ELD_SBR@1633772390$kAudioFormatMPEG4AAC_ELD_V2@1633772391$kAudioFormatMPEG4AAC_HE@1633772392$kAudioFormatMPEG4AAC_HE_V2@1633772400$kAudioFormatMPEG4AAC_LD@1633772396$kAudioFormatMPEG4AAC_Spatial@1633772403$kAudioFormatMPEG4CELP@1667591280$kAudioFormatMPEG4HVXC@1752594531$kAudioFormatMPEG4TwinVQ@1953986161$kAudioFormatMPEGLayer1@778924081$kAudioFormatMPEGLayer2@778924082$kAudioFormatMPEGLayer3@778924083$kAudioFormatMicrosoftGSM@1836253233$kAudioFormatOpus@1869641075$kAudioFormatParameterValueStream@1634760307$kAudioFormatQDesign@1363430723$kAudioFormatQDesign2@1363430706$kAudioFormatQUALCOMM@1365470320$kAudioFormatTimeCode@1953066341$kAudioFormatULaw@1970037111$kAudioFormatiLBC@1768710755$kAudioHardwareBadDeviceError@560227702$kAudioHardwareBadObjectError@560947818$kAudioHardwareBadPropertySizeError@561211770$kAudioHardwareBadStreamError@561214578$kAudioHardwareIllegalOperationError@1852797029$kAudioHardwareNoError@0$kAudioHardwareNotRunningError@1937010544$kAudioHardwarePowerHintFavorSavingPower@1$kAudioHardwarePowerHintNone@0$kAudioHardwarePropertyBootChimeVolumeDecibels@1650620004$kAudioHardwarePropertyBootChimeVolumeDecibelsToScalar@1650733686$kAudioHardwarePropertyBootChimeVolumeDecibelsToScalarTransferFunction@1651930214$kAudioHardwarePropertyBootChimeVolumeRangeDecibels@1650615331$kAudioHardwarePropertyBootChimeVolumeScalar@1650620019$kAudioHardwarePropertyBootChimeVolumeScalarToDecibels@1651913316$kAudioHardwarePropertyBoxList@1651472419$kAudioHardwarePropertyClockDeviceList@1668049699$kAudioHardwarePropertyDefaultInputDevice@1682533920$kAudioHardwarePropertyDefaultOutputDevice@1682929012$kAudioHardwarePropertyDefaultSystemOutputDevice@1934587252$kAudioHardwarePropertyDeviceForUID@1685416292$kAudioHardwarePropertyDevices@1684370979$kAudioHardwarePropertyHogModeIsAllowed@1752131442$kAudioHardwarePropertyIsInitingOrExiting@1768845172$kAudioHardwarePropertyMixStereoToMono@1937010031$kAudioHardwarePropertyPlugInForBundleID@1885954665$kAudioHardwarePropertyPlugInList@1886152483$kAudioHardwarePropertyPowerHint@1886353256$kAudioHardwarePropertyProcessIsAudible@1886221684$kAudioHardwarePropertyProcessIsMaster@1835103092$kAudioHardwarePropertyRunLoop@1919839344$kAudioHardwarePropertyServiceRestarted@1936880500$kAudioHardwarePropertySleepingIsAllowed@1936483696$kAudioHardwarePropertyTranslateBundleIDToPlugIn@1651074160$kAudioHardwarePropertyTranslateBundleIDToTransportManager@1953325673$kAudioHardwarePropertyTranslateUIDToBox@1969841250$kAudioHardwarePropertyTranslateUIDToClockDevice@1969841251$kAudioHardwarePropertyTranslateUIDToDevice@1969841252$kAudioHardwarePropertyTransportManagerList@1953326883$kAudioHardwarePropertyUnloadingIsAllowed@1970170980$kAudioHardwarePropertyUserIDChanged@1702193508$kAudioHardwarePropertyUserSessionIsActiveOrHeadless@1970496882$kAudioHardwareUnknownPropertyError@2003332927$kAudioHardwareUnspecifiedError@2003329396$kAudioHardwareUnsupportedOperationError@1970171760$kAudioHighPassFilterControlClassID@1751740518$kAudioISubOwnerControlClassID@1635017576$kAudioJackControlClassID@1784767339$kAudioLFEMuteControlClassID@1937072749$kAudioLFEVolumeControlClassID@1937072758$kAudioLevelControlClassID@1818588780$kAudioLevelControlPropertyConvertDecibelsToScalar@1818453107$kAudioLevelControlPropertyConvertScalarToDecibels@1818456932$kAudioLevelControlPropertyDecibelRange@1818453106$kAudioLevelControlPropertyDecibelValue@1818453110$kAudioLevelControlPropertyDecibelsToScalarTransferFunction@1818457190$kAudioLevelControlPropertyScalarValue@1818456950$kAudioLevelControlTranferFunction10Over1@13$kAudioLevelControlTranferFunction11Over1@14$kAudioLevelControlTranferFunction12Over1@15$kAudioLevelControlTranferFunction1Over2@2$kAudioLevelControlTranferFunction1Over3@1$kAudioLevelControlTranferFunction2Over1@5$kAudioLevelControlTranferFunction3Over1@6$kAudioLevelControlTranferFunction3Over2@4$kAudioLevelControlTranferFunction3Over4@3$kAudioLevelControlTranferFunction4Over1@7$kAudioLevelControlTranferFunction5Over1@8$kAudioLevelControlTranferFunction6Over1@9$kAudioLevelControlTranferFunction7Over1@10$kAudioLevelControlTranferFunction8Over1@11$kAudioLevelControlTranferFunction9Over1@12$kAudioLevelControlTranferFunctionLinear@0$kAudioLineLevelControlClassID@1852601964$kAudioListenbackControlClassID@1819504226$kAudioMuteControlClassID@1836414053$kAudioObjectClassID@1634689642$kAudioObjectClassIDWildcard@707406378$kAudioObjectPropertyBaseClass@1650682995$kAudioObjectPropertyClass@1668047219$kAudioObjectPropertyControlList@1668575852$kAudioObjectPropertyCreator@1869638759$kAudioObjectPropertyElementCategoryName@1818452846$kAudioObjectPropertyElementMaster@0$kAudioObjectPropertyElementName@1818454126$kAudioObjectPropertyElementNumberName@1818455662$kAudioObjectPropertyElementWildcard@4294967295$kAudioObjectPropertyFirmwareVersion@1719105134$kAudioObjectPropertyIdentify@1768187246$kAudioObjectPropertyListenerAdded@1818850145$kAudioObjectPropertyListenerRemoved@1818850162$kAudioObjectPropertyManufacturer@1819107691$kAudioObjectPropertyModelName@1819111268$kAudioObjectPropertyName@1819173229$kAudioObjectPropertyOwnedObjects@1870098020$kAudioObjectPropertyOwner@1937007734$kAudioObjectPropertyScopeGlobal@1735159650$kAudioObjectPropertyScopeInput@1768845428$kAudioObjectPropertyScopeOutput@1869968496$kAudioObjectPropertyScopePlayThrough@1886679669$kAudioObjectPropertyScopeWildcard@707406378$kAudioObjectPropertySelectorWildcard@707406378$kAudioObjectPropertySerialNumber@1936618861$kAudioObjectSystemObject@1$kAudioObjectUnknown@0$kAudioPhantomPowerControlClassID@1885888878$kAudioPhaseInvertControlClassID@1885893481$kAudioPlugInClassID@1634757735$kAudioPlugInCreateAggregateDevice@1667327847$kAudioPlugInDestroyAggregateDevice@1684105063$kAudioPlugInPropertyBoxList@1651472419$kAudioPlugInPropertyBundleID@1885956452$kAudioPlugInPropertyClockDeviceList@1668049699$kAudioPlugInPropertyDeviceList@1684370979$kAudioPlugInPropertyTranslateUIDToBox@1969841250$kAudioPlugInPropertyTranslateUIDToClockDevice@1969841251$kAudioPlugInPropertyTranslateUIDToDevice@1969841252$kAudioPropertyWildcardChannel@4294967295$kAudioPropertyWildcardPropertyID@707406378$kAudioPropertyWildcardSection@255$kAudioSelectorControlClassID@1936483188$kAudioSelectorControlItemKindSpacer@1936745330$kAudioSelectorControlPropertyAvailableItems@1935892841$kAudioSelectorControlPropertyCurrentItem@1935893353$kAudioSelectorControlPropertyItemKind@1668049771$kAudioSelectorControlPropertyItemName@1935894894$kAudioSliderControlClassID@1936483442$kAudioSliderControlPropertyRange@1935962738$kAudioSliderControlPropertyValue@1935962742$kAudioSoloControlClassID@1936682095$kAudioStereoPanControlClassID@1936744814$kAudioStereoPanControlPropertyPanningChannels@1936745315$kAudioStereoPanControlPropertyValue@1936745334$kAudioStreamClassID@1634956402$kAudioStreamPropertyAvailablePhysicalFormats@1885762657$kAudioStreamPropertyAvailableVirtualFormats@1936092513$kAudioStreamPropertyDirection@1935960434$kAudioStreamPropertyIsActive@1935762292$kAudioStreamPropertyLatency@1819569763$kAudioStreamPropertyOwningDevice@1937007734$kAudioStreamPropertyPhysicalFormat@1885762592$kAudioStreamPropertyPhysicalFormatMatch@1885762669$kAudioStreamPropertyPhysicalFormatSupported@1885762623$kAudioStreamPropertyPhysicalFormats@1885762595$kAudioStreamPropertyStartingChannel@1935894638$kAudioStreamPropertyTerminalType@1952805485$kAudioStreamPropertyVirtualFormat@1936092532$kAudioStreamTerminalTypeDigitalAudioInterface@1936745574$kAudioStreamTerminalTypeDisplayPort@1685090932$kAudioStreamTerminalTypeHDMI@1751412073$kAudioStreamTerminalTypeHeadphones@1751412840$kAudioStreamTerminalTypeHeadsetMicrophone@1752000867$kAudioStreamTerminalTypeLFESpeaker@1818649971$kAudioStreamTerminalTypeLine@1818848869$kAudioStreamTerminalTypeMicrophone@1835623282$kAudioStreamTerminalTypeReceiverMicrophone@1919773027$kAudioStreamTerminalTypeReceiverSpeaker@1920168043$kAudioStreamTerminalTypeSpeaker@1936747378$kAudioStreamTerminalTypeTTY@1953790303$kAudioStreamTerminalTypeUnknown@0$kAudioStreamUnknown@0$kAudioSubDeviceClassID@1634956642$kAudioSubDeviceDriftCompensationHighQuality@96$kAudioSubDeviceDriftCompensationLowQuality@32$kAudioSubDeviceDriftCompensationMaxQuality@127$kAudioSubDeviceDriftCompensationMediumQuality@64$kAudioSubDeviceDriftCompensationMinQuality@0$kAudioSubDevicePropertyDriftCompensation@1685218932$kAudioSubDevicePropertyDriftCompensationQuality@1685218929$kAudioSubDevicePropertyExtraLatency@2020373603$kAudioSystemObjectClassID@1634957683$kAudioTalkbackControlClassID@1952541794$kAudioTimeStampHostTimeValid@2$kAudioTimeStampNothingValid@0$kAudioTimeStampRateScalarValid@4$kAudioTimeStampSMPTETimeValid@16$kAudioTimeStampSampleHostTimeValid@3$kAudioTimeStampSampleTimeValid@1$kAudioTimeStampWordClockTimeValid@8$kAudioTransportManagerClassID@1953656941$kAudioTransportManagerCreateEndPointDevice@1667523958$kAudioTransportManagerDestroyEndPointDevice@1684301174$kAudioTransportManagerPropertyEndPointList@1701733411$kAudioTransportManagerPropertyTranslateUIDToEndPoint@1969841253$kAudioTransportManagerPropertyTransportType@1953653102$kAudioVolumeControlClassID@1986817381$kAudio_BadFilePathError@561017960$kAudio_FileNotFoundError@-43$kAudio_FilePermissionError@-54$kAudio_MemFullError@-108$kAudio_ParamError@-50$kAudio_TooManyFilesOpenError@-42$kAudio_UnimplementedError@-4$kLinearPCMFormatFlagIsAlignedHigh@16$kLinearPCMFormatFlagIsBigEndian@2$kLinearPCMFormatFlagIsFloat@1$kLinearPCMFormatFlagIsNonInterleaved@32$kLinearPCMFormatFlagIsNonMixable@64$kLinearPCMFormatFlagIsPacked@8$kLinearPCMFormatFlagIsSignedInteger@4$kLinearPCMFormatFlagsAreAllClear@2147483648$kLinearPCMFormatFlagsSampleFractionMask@8064$kLinearPCMFormatFlagsSampleFractionShift@7$kMPEG4Object_AAC_LC@2$kMPEG4Object_AAC_LTP@4$kMPEG4Object_AAC_Main@1$kMPEG4Object_AAC_SBR@5$kMPEG4Object_AAC_SSR@3$kMPEG4Object_AAC_Scalable@6$kMPEG4Object_CELP@8$kMPEG4Object_HVXC@9$kMPEG4Object_TwinVQ@7$kSMPTETimeRunning@2$kSMPTETimeType2398@11$kSMPTETimeType24@0$kSMPTETimeType25@1$kSMPTETimeType2997@4$kSMPTETimeType2997Drop@5$kSMPTETimeType30@3$kSMPTETimeType30Drop@2$kSMPTETimeType50@10$kSMPTETimeType5994@7$kSMPTETimeType5994Drop@9$kSMPTETimeType60@6$kSMPTETimeType60Drop@8$kSMPTETimeUnknown@0$kSMPTETimeValid@1$''' -misc.update({'kAudioEndPointInputChannelsKey': b'channels-in', 'kAudioSubDeviceUIDKey': b'uid', 'kAudioAggregateDeviceNameKey': b'name', 'kAudioSubDeviceExtraOutputLatencyKey': b'latency-out', 'kAudioAggregateDeviceMasterSubDeviceKey': b'master', 'kAudioEndPointOutputChannelsKey': b'channels-out', 'kAudioSubDeviceExtraInputLatencyKey': b'latency-in', 'kAudioEndPointDeviceEndPointListKey': b'endpoints', 'kAudioEndPointDeviceUIDKey': b'uid', 'kAudioAggregateDeviceClockDeviceKey': b'clock', 'kAudioEndPointDeviceNameKey': b'name', 'kAudioEndPointDeviceIsPrivateKey': b'private', 'kAudioSubDeviceOutputChannelsKey': b'channels-out', 'kAudioEndPointNameKey': b'name', 'kAudioSubDeviceDriftCompensationQualityKey': b'drift quality', 'kAudioSubDeviceDriftCompensationKey': b'drift', 'kAudioEndPointDeviceMasterEndPointKey': b'master', 'kAudioSubDeviceNameKey': b'name', 'kAudioHardwareRunLoopMode': b'com.apple.audio.CoreAudio', 'kAudioAggregateDeviceUIDKey': b'uid', 'kAudioSubDeviceInputChannelsKey': b'channels-in', 'kAudioAggregateDeviceSubDeviceListKey': b'subdevices', 'kAudioAggregateDeviceIsStackedKey': b'stacked', 'kAudioAggregateDeviceIsPrivateKey': b'private', 'kAudioEndPointUIDKey': b'uid'}) -functions={'AudioDeviceSetProperty': (sel32or64(b'lL^{AudioTimeStamp=dQdQ{SMPTETime=ssLLLssss}LL}LZLL^v', b'iI^{AudioTimeStamp=dQdQ{SMPTETime=ssIIIssss}II}IZII^v'), '', {'arguments': {1: {'type_modifier': 'n'}, 6: {'c_array_length_in_arg': 5, 'type_modifier': 'n'}}}), 'AudioDeviceRemovePropertyListener': (sel32or64(b'lLLZL^?', b'iIIZI^?'), '', {'arguments': {4: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': b'I'}, 1: {'type': b'I'}, 2: {'type': b'Z'}, 3: {'type': b'I'}, 4: {'type': b'^v'}}}}}}), 'FillOutAudioTimeStampWithSampleAndHostTime': (sel32or64(b'vo^{AudioTimeStamp=dQdQ{SMPTETime=ssLLLssss}LL}dQ', b'vo^{AudioTimeStamp=dQdQ{SMPTETime=ssIIIssss}II}dQ'),), 'AudioObjectRemovePropertyListenerBlock': (sel32or64(b'lL^{AudioObjectPropertyAddress=LLL}@@?', b'iI^{AudioObjectPropertyAddress=III}@@?'), '', {'arguments': {1: {'type_modifier': 'n'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}, 1: {'type': 'I'}, 2: {'type': sel32or64(b'n^{AudioObjectPropertyAddress=LLL}', b'n^{AudioObjectPropertyAddress=III}'), 'c_array_length_in_arg': 1}}}}}}), 'AudioGetHostClockMinimumTimeDelta': (sel32or64(b'L', b'I'), '', {'variadic': True}), 'AudioHardwareGetProperty': (sel32or64(b'lL^L^v', b'iI^I^v'), '', {'arguments': {1: {'type_modifier': 'N'}, 2: {'c_array_length_in_arg': 1, 'type_modifier': 'o'}}}), 'AudioDeviceStartAtTime': (sel32or64(b'lL^?^{AudioTimeStamp=dQdQ{SMPTETime=ssLLLssss}LL}L', b'iI^?^{AudioTimeStamp=dQdQ{SMPTETime=ssIIIssss}II}I'), '', {'arguments': {2: {'type_modifier': 'N'}}}), 'AudioConvertHostTimeToNanos': (b'QQ',), 'AudioDeviceGetProperty': (sel32or64(b'lLLZL^L^v', b'iIIZI^I^v'), '', {'arguments': {4: {'type_modifier': 'N'}, 5: {'c_array_length_in_arg': 4, 'type_modifier': 'o'}}}), 'AudioHardwareDevicesDied': (b'i^^{AudioHardwarePlugInInterface=^v^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?}I^I', '', {'arguments': {2: {'c_array_length_in_arg': 1, 'type_modifier': 'n'}}}), 'AudioObjectAddPropertyListenerBlock': (sel32or64(b'lL^{AudioObjectPropertyAddress=LLL}@@?', b'iI^{AudioObjectPropertyAddress=III}@@?'), '', {'arguments': {1: {'type_modifier': 'n'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}, 1: {'type': 'I'}, 2: {'type': sel32or64('n^{AudioObjectPropertyAddress=LLL}', 'n^{AudioObjectPropertyAddress=III}'), 'c_array_length_in_arg': 1}}}}}}), 'AudioHardwareClaimAudioDeviceID': (b'i^^{AudioHardwarePlugInInterface=^v^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?}^I', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'AudioObjectSetPropertyData': (sel32or64(b'lL^{AudioObjectPropertyAddress=LLL}L^vL^v', b'iI^{AudioObjectPropertyAddress=III}I^vI^v'), '', {'arguments': {1: {'type_modifier': 'n'}, 3: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}, 5: {'c_array_length_in_arg': 4, 'type_modifier': 'n'}}}), 'AudioObjectAddPropertyListener': (sel32or64(b'lL^{AudioObjectPropertyAddress=LLL}^?^v', b'iI^{AudioObjectPropertyAddress=III}^?^v'), '', {'arguments': {1: {'type_modifier': 'n'}, 2: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': b'I'}, 1: {'type': b'I'}, 2: {'type': sel32or64('n^{AudioObjectPropertyAddress=LLL}', b'n^{AudioObjectPropertyAddress=III}')}, 3: {'type': b'^v'}}}}}}), 'AudioHardwareAddRunLoopSource': (sel32or64(b'l^{__CFRunLoopSource=}', b'i^{__CFRunLoopSource=}'),), 'AudioStreamSetProperty': (sel32or64(b'lL^{AudioTimeStamp=dQdQ{SMPTETime=ssLLLssss}LL}LLL^v', b'iI^{AudioTimeStamp=dQdQ{SMPTETime=ssIIIssss}II}III^v'), '', {'arguments': {1: {'type_modifier': 'n'}, 5: {'c_array_length_in_arg': 4, 'type_modifier': 'n'}}}), 'AudioStreamAddPropertyListener': (sel32or64(b'lLLL^?^v', b'iIII^?^v'), '', {'arguments': {3: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': b'I'}, 1: {'type': b'I'}, 2: {'type': b'I'}, 3: {'type': b'^v'}}}}}}), 'AudioHardwareDevicePropertyChanged': (b'i^^{AudioHardwarePlugInInterface=^v^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?}IIZI',), 'AudioStreamGetPropertyInfo': (sel32or64(b'lLLL^L^Z', b'iIII^I^Z'), '', {'arguments': {3: {'type_modifier': 'o'}, 4: {'type_modifier': 'o'}}}), 'AudioHardwareDevicesCreated': (b'i^^{AudioHardwarePlugInInterface=^v^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?}I^I', '', {'arguments': {2: {'c_array_length_in_arg': 1, 'type_modifier': 'n'}}}), 'AudioGetCurrentHostTime': (b'Q', '', {'variadic': True}), 'AudioObjectCreate': (b'i^^{AudioHardwarePlugInInterface=^v^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?}II^I', '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'type_modifier': 'o'}}}), 'AudioConvertNanosToHostTime': (b'QQ',), 'AudioObjectRemovePropertyListener': (sel32or64(b'lL^{AudioObjectPropertyAddress=LLL}^?^v', b'iI^{AudioObjectPropertyAddress=III}^?^v'), '', {'arguments': {1: {'type_modifier': 'n'}, 2: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': b'I'}, 1: {'type': b'I'}, 2: {'type': sel32or64(b'n^{AudioObjectPropertyAddress=LLL}', b'n^{AudioObjectPropertyAddress=III}')}, 3: {'type': b'^v'}}}}}}), 'AudioDeviceGetNearestStartTime': (sel32or64(b'lL^{AudioTimeStamp=dQdQ{SMPTETime=ssLLLssss}LL}L', b'iI^{AudioTimeStamp=dQdQ{SMPTETime=ssIIIssss}II}I'), '', {'arguments': {1: {'type_modifier': 'N'}}}), 'AudioStreamRemovePropertyListener': (sel32or64(b'lLLL^?', b'iIII^?'), '', {'arguments': {3: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': b'I'}, 1: {'type': b'I'}, 2: {'type': b'I'}, 3: {'type': b'^v'}}}}}}), 'AudioDeviceStop': (sel32or64(b'lL^?', b'iI^?'),), 'AudioHardwareGetPropertyInfo': (sel32or64(b'lL^L^Z', b'iI^I^Z'), '', {'arguments': {1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}}}), 'AudioHardwareStreamPropertyChanged': (b'i^^{AudioHardwarePlugInInterface=^v^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?}IIII',), 'AudioHardwareUnload': (sel32or64(b'l', b'i'), '', {'variadic': True}), 'AudioStreamGetProperty': (sel32or64(b'lLLL^L^v', b'iIII^I^v'), '', {'arguments': {3: {'type_modifier': 'N'}, 4: {'c_array_length_in_arg': 3, 'type_modifier': 'o'}}}), 'AudioHardwareClaimAudioStreamID': (b'i^^{AudioHardwarePlugInInterface=^v^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?}I^I', '', {'arguments': {2: {'type_modifier': 'o'}}}), 'FillOutAudioTimeStampWithSampleTime': (sel32or64(b'vo^{AudioTimeStamp=dQdQ{SMPTETime=ssLLLssss}LL}d', b'vo^{AudioTimeStamp=dQdQ{SMPTETime=ssIIIssss}II}d'),), 'CalculateLPCMFlags': (b'IIIBBB',), 'AudioHardwareCreateAggregateDevice': (sel32or64(b'l^{__CFDictionary=}^L', b'i^{__CFDictionary=}^I'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'type_modifier': 'o'}}}), 'AudioHardwareDestroyAggregateDevice': (sel32or64(b'lL', b'iI'),), 'AudioDeviceAddIOProc': (sel32or64(b'lL^?^v', b'iI^?^v'), '', {'arguments': {1: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': b'I'}, 1: {'type': sel32or64(b'n^{AudioTimeStamp=dQdQ{SMPTETime=ssLLLssss}LL}', b'n^{AudioTimeStamp=dQdQ{SMPTETime=ssIIIssss}II}')}, 2: {'type': b'^{AudioBufferList=I[1{AudioBuffer=II^v}]}'}, 3: {'type': sel32or64(b'n^{AudioTimeStamp=dQdQ{SMPTETime=ssLLLssss}LL}', b'n^{AudioTimeStamp=dQdQ{SMPTETime=ssIIIssss}II}')}, 4: {'type': b'^{AudioBufferList=I[1{AudioBuffer=II^v}]}'}, 5: {'type': sel32or64(b'n^{AudioTimeStamp=dQdQ{SMPTETime=ssLLLssss}LL}', b'n^{AudioTimeStamp=dQdQ{SMPTETime=ssIIIssss}II}')}, 6: {'type': b'^v'}}}}}}), 'FillOutAudioTimeStampWithHostTime': (sel32or64(b'vo^{AudioTimeStamp=dQdQ{SMPTETime=ssLLLssss}LL}Q', b'vo^{AudioTimeStamp=dQdQ{SMPTETime=ssIIIssss}II}Q'),), 'AudioObjectGetPropertyData': (sel32or64(b'lL^{AudioObjectPropertyAddress=LLL}L^v^L^v', b'iI^{AudioObjectPropertyAddress=III}I^v^I^v'), '', {'arguments': {1: {'type_modifier': 'n'}, 3: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}, 4: {'type_modifier': 'N'}, 5: {'c_array_length_in_arg': 4, 'type_modifier': 'o'}}}), 'FillOutASBDForLPCM': (sel32or64(b'vo^{AudioStreamBasicDescription=dLLLLLLLL}dIIIBBB', b'vo^{AudioStreamBasicDescription=dIIIIIIII}dIIIBBB'),), 'AudioObjectShow': (sel32or64(b'vL', b'vI'),), 'AudioObjectPropertiesChanged': (b'i^^{AudioHardwarePlugInInterface=^v^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?}II^{AudioObjectPropertyAddress=III}', '', {'arguments': {3: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}}}), 'AudioHardwareSetProperty': (sel32or64(b'lLL^v', b'iII^v'), '', {'arguments': {2: {'c_array_length_in_arg': 1, 'type_modifier': 'n'}}}), 'IsAudioFormatNativeEndian': (sel32or64(b'Bn^{AudioStreamBasicDescription=dLLLLLLLL}', b'Bn^{AudioStreamBasicDescription=dIIIIIIII}'),), 'AudioHardwareRemoveRunLoopSource': (sel32or64(b'l^{__CFRunLoopSource=}', b'i^{__CFRunLoopSource=}'),), 'AudioDeviceGetCurrentTime': (sel32or64(b'lL^{AudioTimeStamp=dQdQ{SMPTETime=ssLLLssss}LL}', b'iI^{AudioTimeStamp=dQdQ{SMPTETime=ssIIIssss}II}'), '', {'arguments': {1: {'type_modifier': 'o'}}}), 'AudioChannelLayoutTag_GetNumberOfChannels': (sel32or64(b'LL', b'II'),), 'AudioHardwareStreamsCreated': (b'i^^{AudioHardwarePlugInInterface=^v^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?}II^I', '', {'arguments': {3: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}}}), 'AudioGetHostClockFrequency': (b'd', '', {'variadic': True}), 'AudioDeviceStart': (sel32or64(b'lL^?', b'iI^?'),), 'AudioDeviceCreateIOProcIDWithBlock': (sel32or64(b'l^^?L@@?', b'i^^?I@@?'), '', {'retval': {'already_cfretained': True}, 'arguments': {0: {'type_modifier': 'o'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}, 1: {'type': sel32or64(b'n^{AudioTimeStamp=dQdQ{SMPTETime=ssLLLssss}LL}', b'n^{AudioTimeStamp=dQdQ{SMPTETime=ssIIIssss}II}')}, 2: {'type': b'^{AudioBufferList=I[1{AudioBuffer=II^v}]}'}, 3: {'type': sel32or64(b'n^{AudioTimeStamp=dQdQ{SMPTETime=ssLLLssss}LL}', b'n^{AudioTimeStamp=dQdQ{SMPTETime=ssIIIssss}II}')}, 4: {'type': '^{AudioBufferList=I[1{AudioBuffer=II^v}]}'}, 5: {'type': sel32or64('N^{AudioTimeStamp=dQdQ{SMPTETime=ssLLLssss}LL}', 'N^{AudioTimeStamp=dQdQ{SMPTETime=ssIIIssss}II}')}}}}}}), 'AudioDeviceRemoveIOProc': (sel32or64(b'lL^?', b'iI^?'), '', {'arguments': {1: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': b'I'}, 1: {'type': sel32or64(b'n^{AudioTimeStamp=dQdQ{SMPTETime=ssLLLssss}LL}', b'n^{AudioTimeStamp=dQdQ{SMPTETime=ssIIIssss}II}')}, 2: {'type': b'^{AudioBufferList=I[1{AudioBuffer=II^v}]}'}, 3: {'type': sel32or64(b'n^{AudioTimeStamp=dQdQ{SMPTETime=ssLLLssss}LL}', b'n^{AudioTimeStamp=dQdQ{SMPTETime=ssIIIssss}II}')}, 4: {'type': b'^{AudioBufferList=I[1{AudioBuffer=II^v}]}'}, 5: {'type': sel32or64(b'n^{AudioTimeStamp=dQdQ{SMPTETime=ssLLLssss}LL}', b'n^{AudioTimeStamp=dQdQ{SMPTETime=ssIIIssss}II}')}, 6: {'type': b'^v'}}}}}}), 'AudioDeviceAddPropertyListener': (sel32or64(b'lLLZL^?^v', b'iIIZI^?^v'), '', {'arguments': {4: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': b'I'}, 1: {'type': b'I'}, 2: {'type': b'Z'}, 3: {'type': b'I'}, 4: {'type': b'^v'}}}}}}), 'AudioObjectHasProperty': (sel32or64(b'ZL^{AudioObjectPropertyAddress=LLL}', b'ZI^{AudioObjectPropertyAddress=III}'), '', {'arguments': {1: {'type_modifier': 'n'}}}), 'AudioHardwareRemovePropertyListener': (sel32or64(b'lL^?', b'iI^?'), '', {'arguments': {1: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': b'I'}, 1: {'type': b'^v'}}}}}}), 'AudioDeviceGetPropertyInfo': (sel32or64(b'lLLZL^L^Z', b'iIIZI^I^Z'), '', {'arguments': {4: {'type_modifier': 'o'}, 5: {'type_modifier': 'o'}}}), 'AudioObjectsPublishedAndDied': (b'i^^{AudioHardwarePlugInInterface=^v^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?}II^II^I', '', {'arguments': {3: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}, 5: {'c_array_length_in_arg': 4, 'type_modifier': 'n'}}}), 'AudioDeviceRead': (sel32or64(b'lL^{AudioTimeStamp=dQdQ{SMPTETime=ssLLLssss}LL}^{AudioBufferList=L[1{AudioBuffer=LL^v}]}', b'iI^{AudioTimeStamp=dQdQ{SMPTETime=ssIIIssss}II}^{AudioBufferList=I[1{AudioBuffer=II^v}]}'), '', {'arguments': {1: {'type_modifier': 'n'}, 2: {'type_modifier': 'o'}}}), 'AudioHardwareStreamsDied': (b'i^^{AudioHardwarePlugInInterface=^v^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?}II^I', '', {'arguments': {3: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}}}), 'AudioObjectIsPropertySettable': (sel32or64(b'lL^{AudioObjectPropertyAddress=LLL}^Z', b'iI^{AudioObjectPropertyAddress=III}^Z'), '', {'arguments': {1: {'type_modifier': 'n'}, 2: {'type_modifier': 'o'}}}), 'AudioHardwareAddPropertyListener': (sel32or64(b'lL^?^v', b'iI^?^v'), '', {'arguments': {1: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': b'I'}, 1: {'type': b'^v'}}}}}}), 'AudioDeviceTranslateTime': (sel32or64(b'lL^{AudioTimeStamp=dQdQ{SMPTETime=ssLLLssss}LL}^{AudioTimeStamp=dQdQ{SMPTETime=ssLLLssss}LL}', b'iI^{AudioTimeStamp=dQdQ{SMPTETime=ssIIIssss}II}^{AudioTimeStamp=dQdQ{SMPTETime=ssIIIssss}II}'), '', {'arguments': {1: {'type_modifier': 'n'}, 2: {'type_modifier': 'o'}}}), 'AudioDeviceCreateIOProcID': (sel32or64(b'lL^?^v^^?', b'iI^?^v^^?'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': b'I'}, 1: {'type': sel32or64(b'n^{AudioTimeStamp=dQdQ{SMPTETime=ssLLLssss}LL}', b'n^{AudioTimeStamp=dQdQ{SMPTETime=ssIIIssss}II}')}, 2: {'type': b'^{AudioBufferList=I[1{AudioBuffer=II^v}]}'}, 3: {'type': sel32or64(b'n^{AudioTimeStamp=dQdQ{SMPTETime=ssLLLssss}LL}', b'n^{AudioTimeStamp=dQdQ{SMPTETime=ssIIIssss}II}')}, 4: {'type': b'^{AudioBufferList=I[1{AudioBuffer=II^v}]}'}, 5: {'type': sel32or64(b'n^{AudioTimeStamp=dQdQ{SMPTETime=ssLLLssss}LL}', b'n^{AudioTimeStamp=dQdQ{SMPTETime=ssIIIssss}II}')}, 6: {'type': b'^v'}}}}, 3: {'type_modifier': 'o'}}}), 'AudioObjectGetPropertyDataSize': (sel32or64(b'lL^{AudioObjectPropertyAddress=LLL}L^v^L', b'iI^{AudioObjectPropertyAddress=III}I^v^I'), '', {'arguments': {1: {'type_modifier': 'n'}, 3: {'c_array_length_in_arg': (2, 4), 'type_modifier': 'n'}, 4: {'type_modifier': 'o'}}}), 'AudioDeviceDestroyIOProcID': (sel32or64(b'lL^?', b'iI^?'),)} -aliases = {'kAudioChannelLayoutTag_ITU_2_0': 'kAudioChannelLayoutTag_Stereo', 'kAudioDevicePropertyDeviceNameCFString': 'kAudioObjectPropertyName', 'kAudioChannelLayoutTag_ITU_3_2': 'kAudioChannelLayoutTag_MPEG_5_0_A', 'kLinearPCMFormatFlagIsNonInterleaved': 'kAudioFormatFlagIsNonInterleaved', 'kAudioChannelLayoutTag_ITU_3_0': 'kAudioChannelLayoutTag_MPEG_3_0_A', 'kAudioChannelLayoutTag_ITU_3_1': 'kAudioChannelLayoutTag_MPEG_4_0_A', 'kAudioChannelLayoutTag_AudioUnit_8': 'kAudioChannelLayoutTag_Octagonal', 'kAudioChannelLayoutTag_DVD_19': 'kAudioChannelLayoutTag_MPEG_5_0_B', 'kAudioChannelLayoutTag_AudioUnit_4': 'kAudioChannelLayoutTag_Quadraphonic', 'kAudioChannelLayoutTag_AudioUnit_5': 'kAudioChannelLayoutTag_Pentagonal', 'kAudioChannelLayoutTag_AudioUnit_6': 'kAudioChannelLayoutTag_Hexagonal', 'kAudioChannelLayoutTag_DVD_12': 'kAudioChannelLayoutTag_MPEG_5_1_A', 'kAudioChannelLayoutTag_DVD_13': 'kAudioChannelLayoutTag_DVD_8', 'kAudioChannelLayoutTag_DVD_16': 'kAudioChannelLayoutTag_DVD_11', 'kAudioChannelLayoutTag_DVD_17': 'kAudioChannelLayoutTag_DVD_12', 'kAudioChannelLayoutTag_DVD_14': 'kAudioChannelLayoutTag_DVD_9', 'kAudioChannelLayoutTag_DVD_15': 'kAudioChannelLayoutTag_DVD_10', 'kLinearPCMFormatFlagsAreAllClear': 'kAudioFormatFlagsAreAllClear', 'kLinearPCMFormatFlagIsAlignedHigh': 'kAudioFormatFlagIsAlignedHigh', 'kAudioChannelLayoutTag_MPEG_1_0': 'kAudioChannelLayoutTag_Mono', 'kAudioDevicePropertyScopePlayThrough': 'kAudioObjectPropertyScopePlayThrough', 'kAudioChannelLayoutTag_AudioUnit_6_1': 'kAudioChannelLayoutTag_MPEG_6_1_A', 'kAudioPropertyWildcardPropertyID': 'kAudioObjectPropertySelectorWildcard', 'kAudioStreamPropertyOwningDevice': 'kAudioObjectPropertyOwner', 'kAudioChannelLayoutTag_AudioUnit_5_0': 'kAudioChannelLayoutTag_MPEG_5_0_B', 'kAudioChannelLayoutTag_AudioUnit_5_1': 'kAudioChannelLayoutTag_MPEG_5_1_A', 'kAudioChannelLayoutTag_MPEG_2_0': 'kAudioChannelLayoutTag_Stereo', 'kAudioDevicePropertyChannelNumberNameCFString': 'kAudioObjectPropertyElementNumberName', 'kAudioChannelLayoutTag_AAC_4_0': 'kAudioChannelLayoutTag_MPEG_4_0_B', 'kAudioChannelLayoutTag_ITU_1_0': 'kAudioChannelLayoutTag_Mono', 'kAudioDeviceUnknown': 'kAudioObjectUnknown', 'kAudioChannelLayoutTag_AAC_3_0': 'kAudioChannelLayoutTag_MPEG_3_0_B', 'kAudioStreamPropertyLatency': 'kAudioDevicePropertyLatency', 'kLinearPCMFormatFlagIsSignedInteger': 'kAudioFormatFlagIsSignedInteger', 'kAudioDevicePropertyScopeOutput': 'kAudioObjectPropertyScopeOutput', 'kLinearPCMFormatFlagIsPacked': 'kAudioFormatFlagIsPacked', 'kAudioStreamUnknown': 'kAudioObjectUnknown', 'kAudioDevicePropertyChannelCategoryNameCFString': 'kAudioObjectPropertyElementCategoryName', 'kAudioChannelLayoutTag_DVD_3': 'kAudioChannelLayoutTag_ITU_2_2', 'kLinearPCMFormatFlagIsBigEndian': 'kAudioFormatFlagIsBigEndian', 'kAudioChannelLayoutTag_DVD_20': 'kAudioChannelLayoutTag_MPEG_5_1_B', 'kAudioChannelLayoutTag_AAC_5_1': 'kAudioChannelLayoutTag_MPEG_5_1_D', 'kAudioChannelLayoutTag_AAC_5_0': 'kAudioChannelLayoutTag_MPEG_5_0_D', 'kLinearPCMFormatFlagIsNonMixable': 'kAudioFormatFlagIsNonMixable', 'kAudioChannelLayoutTag_AudioUnit_7_1_Front': 'kAudioChannelLayoutTag_MPEG_7_1_A', 'kLinearPCMFormatFlagIsFloat': 'kAudioFormatFlagIsFloat', 'kAudioChannelLayoutTag_DVD_0': 'kAudioChannelLayoutTag_Mono', 'kAudioChannelLayoutTag_DVD_1': 'kAudioChannelLayoutTag_Stereo', 'kAudioChannelLayoutTag_DVD_2': 'kAudioChannelLayoutTag_ITU_2_1', 'kAudioChannelLayoutTag_AAC_Quadraphonic': 'kAudioChannelLayoutTag_Quadraphonic', 'kAudioChannelLayoutTag_AAC_7_1': 'kAudioChannelLayoutTag_MPEG_7_1_B', 'kAudioChannelLayoutTag_DVD_7': 'kAudioChannelLayoutTag_MPEG_3_0_A', 'kAudioChannelLayoutTag_DVD_8': 'kAudioChannelLayoutTag_MPEG_4_0_A', 'kAudioChannelLayoutTag_DVD_9': 'kAudioChannelLayoutTag_MPEG_5_0_A', 'kAudioChannelLayoutTag_ITU_3_2_1': 'kAudioChannelLayoutTag_MPEG_5_1_A', 'kAudioClockSourceControlPropertyItemKind': 'kAudioSelectorControlPropertyItemKind', 'kAudioDevicePropertyScopeInput': 'kAudioObjectPropertyScopeInput', 'kAudioDevicePropertyChannelNameCFString': 'kAudioObjectPropertyElementName', 'kAudioChannelLayoutTag_ITU_3_4_1': 'kAudioChannelLayoutTag_MPEG_7_1_C', 'kAudioDevicePropertyDeviceManufacturerCFString': 'kAudioObjectPropertyManufacturer', 'kAudioPropertyWildcardChannel': 'kAudioObjectPropertyElementWildcard', 'kAudioChannelLayoutTag_AudioUnit_7_1': 'kAudioChannelLayoutTag_MPEG_7_1_C'} -misc.update({'AudioHardwarePlugInRef': objc.createOpaquePointerType('AudioHardwarePlugInRef', b'^{AudioHardwarePlugInInterface=}')}) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/CoreAudio/_metadata.pyc b/env/lib/python2.7/site-packages/CoreAudio/_metadata.pyc deleted file mode 100644 index 7a7265bf..00000000 Binary files a/env/lib/python2.7/site-packages/CoreAudio/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreAudioKit/_CoreAudioKit.so b/env/lib/python2.7/site-packages/CoreAudioKit/_CoreAudioKit.so deleted file mode 100755 index e4ef6af2..00000000 Binary files a/env/lib/python2.7/site-packages/CoreAudioKit/_CoreAudioKit.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreAudioKit/__init__.py b/env/lib/python2.7/site-packages/CoreAudioKit/__init__.py deleted file mode 100644 index 8c0d4286..00000000 --- a/env/lib/python2.7/site-packages/CoreAudioKit/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -''' -Python mapping for the CoreAudioKit framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Foundation - -from CoreAudioKit import _metadata -import CoreAudio - -sys.modules['CoreAudioKit'] = mod = objc.ObjCLazyModule( - "CoreAudio", - "com.apple.CoreAudioKit", - objc.pathForFramework("/System/Library/Frameworks/CoreAudioKit.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (CoreAudio, Foundation,)) - -import sys -del sys.modules['CoreAudioKit._metadata'] diff --git a/env/lib/python2.7/site-packages/CoreAudioKit/__init__.pyc b/env/lib/python2.7/site-packages/CoreAudioKit/__init__.pyc deleted file mode 100644 index e7a8375f..00000000 Binary files a/env/lib/python2.7/site-packages/CoreAudioKit/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreAudioKit/_metadata.py b/env/lib/python2.7/site-packages/CoreAudioKit/_metadata.py deleted file mode 100644 index 32c8fb08..00000000 --- a/env/lib/python2.7/site-packages/CoreAudioKit/_metadata.py +++ /dev/null @@ -1,36 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Tue Jun 26 22:49:15 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$$''' -enums = '''$AUViewParametersDisplayFlag@4$AUViewPropertiesDisplayFlag@2$AUViewTitleDisplayFlag@1$''' -misc.update({}) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'AUAudioUnit', b'requestViewControllerWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'AUAudioUnitViewConfiguration', b'hostHasController', {'retval': {'type': b'Z'}}) - r(b'AUAudioUnitViewConfiguration', b'initWithWidth:height:hostHasController:', {'arguments': {4: {'type': b'Z'}}}) - r(b'AUGenericView', b'setShowsExpertParameters:', {'arguments': {2: {'type': b'Z'}}}) - r(b'AUGenericView', b'showsExpertParameters', {'retval': {'type': b'Z'}}) - r(b'CANetworkBrowserWindowController', b'isAVBSupported', {'retval': {'type': b'Z'}}) - r(b'NSObject', b'customViewPersistentData', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'setCustomViewPersistentData:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/CoreAudioKit/_metadata.pyc b/env/lib/python2.7/site-packages/CoreAudioKit/_metadata.pyc deleted file mode 100644 index 05a3b1bf..00000000 Binary files a/env/lib/python2.7/site-packages/CoreAudioKit/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreBluetooth/_CoreBluetooth.so b/env/lib/python2.7/site-packages/CoreBluetooth/_CoreBluetooth.so deleted file mode 100755 index 126f529a..00000000 Binary files a/env/lib/python2.7/site-packages/CoreBluetooth/_CoreBluetooth.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreBluetooth/__init__.py b/env/lib/python2.7/site-packages/CoreBluetooth/__init__.py deleted file mode 100644 index 701536a9..00000000 --- a/env/lib/python2.7/site-packages/CoreBluetooth/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -''' -Python mapping for the CloudKit framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Foundation - -from CoreBluetooth import _metadata, _CoreBluetooth - -sys.modules['CoreBluetooth'] = mod = objc.ObjCLazyModule( - "CoreBluetooth", - "com.apple.CoreBluetooth", - objc.pathForFramework("/System/Library/Frameworks/CoreBluetooth.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Foundation,)) - -import sys -del sys.modules['CoreBluetooth._metadata'] -del sys.modules['CoreBluetooth._CoreBluetooth'] diff --git a/env/lib/python2.7/site-packages/CoreBluetooth/__init__.pyc b/env/lib/python2.7/site-packages/CoreBluetooth/__init__.pyc deleted file mode 100644 index b09cb17b..00000000 Binary files a/env/lib/python2.7/site-packages/CoreBluetooth/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreBluetooth/_metadata.py b/env/lib/python2.7/site-packages/CoreBluetooth/_metadata.py deleted file mode 100644 index 21216adb..00000000 --- a/env/lib/python2.7/site-packages/CoreBluetooth/_metadata.py +++ /dev/null @@ -1,74 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Tue Jun 12 22:36:27 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$CBATTErrorDomain$CBAdvertisementDataIsConnectable$CBAdvertisementDataLocalNameKey$CBAdvertisementDataManufacturerDataKey$CBAdvertisementDataOverflowServiceUUIDsKey$CBAdvertisementDataServiceDataKey$CBAdvertisementDataServiceUUIDsKey$CBAdvertisementDataSolicitedServiceUUIDsKey$CBAdvertisementDataTxPowerLevelKey$CBCentralManagerOptionRestoreIdentifierKey$CBCentralManagerOptionShowPowerAlertKey$CBCentralManagerRestoredStatePeripheralsKey$CBCentralManagerRestoredStateScanOptionsKey$CBCentralManagerRestoredStateScanServicesKey$CBCentralManagerScanOptionAllowDuplicatesKey$CBCentralManagerScanOptionSolicitedServiceUUIDsKey$CBConnectPeripheralOptionNotifyOnConnectionKey$CBConnectPeripheralOptionNotifyOnDisconnectionKey$CBConnectPeripheralOptionNotifyOnNotificationKey$CBConnectPeripheralOptionStartDelayKey$CBErrorDomain$CBPeripheralManagerOptionRestoreIdentifierKey$CBPeripheralManagerOptionShowPowerAlertKey$CBPeripheralManagerRestoredStateAdvertisementDataKey$CBPeripheralManagerRestoredStateServicesKey$CBUUIDAppearanceString$CBUUIDCharacteristicAggregateFormatString$CBUUIDCharacteristicExtendedPropertiesString$CBUUIDCharacteristicFormatString$CBUUIDCharacteristicUserDescriptionString$CBUUIDCharacteristicValidRangeString$CBUUIDClientCharacteristicConfigurationString$CBUUIDDeviceNameString$CBUUIDGenericAccessProfileString$CBUUIDGenericAttributeProfileString$CBUUIDL2CAPPSMCharacteristicString$CBUUIDPeripheralPreferredConnectionParametersString$CBUUIDPeripheralPrivacyFlagString$CBUUIDReconnectionAddressString$CBUUIDServerCharacteristicConfigurationString$CBUUIDServiceChangedString$''' -enums = '''$CBATTErrorAttributeNotFound@10$CBATTErrorAttributeNotLong@11$CBATTErrorInsufficientAuthentication@5$CBATTErrorInsufficientAuthorization@8$CBATTErrorInsufficientEncryption@15$CBATTErrorInsufficientEncryptionKeySize@12$CBATTErrorInsufficientResources@17$CBATTErrorInvalidAttributeValueLength@13$CBATTErrorInvalidHandle@1$CBATTErrorInvalidOffset@7$CBATTErrorInvalidPdu@4$CBATTErrorPrepareQueueFull@9$CBATTErrorReadNotPermitted@2$CBATTErrorRequestNotSupported@6$CBATTErrorSuccess@0$CBATTErrorUnlikelyError@14$CBATTErrorUnsupportedGroupType@16$CBATTErrorWriteNotPermitted@3$CBAttributePermissionsReadEncryptionRequired@4$CBAttributePermissionsReadable@1$CBAttributePermissionsWriteEncryptionRequired@8$CBAttributePermissionsWriteable@2$CBCentralManagerStatePoweredOff@4$CBCentralManagerStatePoweredOn@5$CBCentralManagerStateResetting@1$CBCentralManagerStateUnauthorized@3$CBCentralManagerStateUnknown@0$CBCentralManagerStateUnsupported@2$CBCharacteristicPropertyAuthenticatedSignedWrites@64$CBCharacteristicPropertyBroadcast@1$CBCharacteristicPropertyExtendedProperties@128$CBCharacteristicPropertyIndicate@32$CBCharacteristicPropertyIndicateEncryptionRequired@512$CBCharacteristicPropertyNotify@16$CBCharacteristicPropertyNotifyEncryptionRequired@256$CBCharacteristicPropertyRead@2$CBCharacteristicPropertyWrite@8$CBCharacteristicPropertyWriteWithoutResponse@4$CBCharacteristicWriteWithResponse@0$CBCharacteristicWriteWithoutResponse@1$CBErrorAlreadyAdvertising@9$CBErrorConnectionFailed@10$CBErrorConnectionLimitReached@11$CBErrorConnectionTimeout@6$CBErrorInvalidHandle@2$CBErrorInvalidParameters@1$CBErrorNotConnected@3$CBErrorOperationCancelled@5$CBErrorOperationNotSupported@13$CBErrorOutOfSpace@4$CBErrorPeripheralDisconnected@7$CBErrorUUIDNotAllowed@8$CBErrorUnknown@0$CBErrorUnknownDevice@12$CBErrorUnkownDevice@12$CBManagerStatePoweredOff@4$CBManagerStatePoweredOn@5$CBManagerStateResetting@1$CBManagerStateUnauthorized@3$CBManagerStateUnknown@0$CBManagerStateUnsupported@2$CBPeripheralAuthorizationStatusAuthorized@3$CBPeripheralAuthorizationStatusDenied@2$CBPeripheralAuthorizationStatusNotDetermined@0$CBPeripheralAuthorizationStatusRestricted@1$CBPeripheralManagerAuthorizationStatusAuthorized@3$CBPeripheralManagerAuthorizationStatusDenied@2$CBPeripheralManagerAuthorizationStatusNotDetermined@0$CBPeripheralManagerAuthorizationStatusRestricted@1$CBPeripheralManagerConnectionLatencyHigh@2$CBPeripheralManagerConnectionLatencyLow@0$CBPeripheralManagerConnectionLatencyMedium@1$CBPeripheralManagerStatePoweredOff@4$CBPeripheralManagerStatePoweredOn@5$CBPeripheralManagerStateResetting@1$CBPeripheralManagerStateUnauthorized@3$CBPeripheralManagerStateUnknown@0$CBPeripheralManagerStateUnsupported@2$CBPeripheralStateConnected@2$CBPeripheralStateConnecting@1$CBPeripheralStateDisconnected@0$CBPeripheralStateDisconnecting@3$''' -misc.update({}) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'CBCentralManager', b'isScanning', {'retval': {'type': 'Z'}}) - r(b'CBCharacteristic', b'isBroadcasted', {'retval': {'type': b'Z'}}) - r(b'CBCharacteristic', b'isNotifying', {'retval': {'type': b'Z'}}) - r(b'CBMutableService', b'initWithType:primary:', {'arguments': {3: {'type': b'Z'}}}) - r(b'CBMutableService', b'isPrimary', {'retval': {'type': b'Z'}}) - r(b'CBMutableService', b'setIsPrimary:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CBPeripheral', b'canSendWriteWithoutResponse', {'retval': {'type': 'Z'}}) - r(b'CBPeripheral', b'isConnected', {'retval': {'type': b'Z'}}) - r(b'CBPeripheral', b'setNotifyValue:forCharacteristic:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CBPeripheralManager', b'isAdvertising', {'retval': {'type': b'Z'}}) - r(b'CBPeripheralManager', b'publishL2CAPChannelWithEncryption:', {'arguments': {2: {'type': 'Z'}}}) - r(b'CBPeripheralManager', b'updateValue:forCharacteristic:onSubscribedCentrals:', {'retval': {'type': b'Z'}}) - r(b'CBService', b'isPrimary', {'retval': {'type': b'Z'}}) - r(b'NSObject', b'centralManager:didConnectPeripheral:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'centralManager:didDisconnectPeripheral:error:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'centralManager:didDiscoverPeripheral:advertisementData:RSSI:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'centralManager:didFailToConnectPeripheral:error:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'centralManager:didRetrieveConnectedPeripherals:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'centralManager:didRetrievePeripherals:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'centralManager:willRestoreState:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'centralManagerDidUpdateState:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'peripheral:didDiscoverCharacteristicsForService:error:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'peripheral:didDiscoverDescriptorsForCharacteristic:error:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'peripheral:didDiscoverIncludedServicesForService:error:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'peripheral:didDiscoverServices:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'peripheral:didModifyServices:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'peripheral:didUpdateNotificationStateForCharacteristic:error:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'peripheral:didUpdateValueForCharacteristic:error:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'peripheral:didUpdateValueForDescriptor:error:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'peripheral:didWriteValueForCharacteristic:error:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'peripheral:didWriteValueForDescriptor:error:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'peripheralDidInvalidateServices:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'peripheralDidUpdateName:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'peripheralDidUpdateRSSI:error:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'peripheralManager:central:didSubscribeToCharacteristic:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'peripheralManager:central:didUnsubscribeFromCharacteristic:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'peripheralManager:didAddService:error:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'peripheralManager:didOpenL2CAPChannel:error:', {'arguments': {3: {'type': 'S'}}}) - r(b'NSObject', b'peripheralManager:didPublishL2CAPChannel:error:', {'arguments': {3: {'type': 'S'}}}) - r(b'NSObject', b'peripheralManager:didReceiveReadRequest:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'peripheralManager:didReceiveWriteRequests:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'peripheralManager:didUnpublishL2CAPChannel:error:', {'arguments': {3: {'type': 'S'}}}) - r(b'NSObject', b'peripheralManager:willRestoreState:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'peripheralManagerDidStartAdvertising:error:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'peripheralManagerDidUpdateState:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'peripheralManagerIsReadyToUpdateSubscribers:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/CoreBluetooth/_metadata.pyc b/env/lib/python2.7/site-packages/CoreBluetooth/_metadata.pyc deleted file mode 100644 index 591d3ad0..00000000 Binary files a/env/lib/python2.7/site-packages/CoreBluetooth/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreData/_CoreData.so b/env/lib/python2.7/site-packages/CoreData/_CoreData.so deleted file mode 100755 index db498fb0..00000000 Binary files a/env/lib/python2.7/site-packages/CoreData/_CoreData.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreData/__init__.py b/env/lib/python2.7/site-packages/CoreData/__init__.py deleted file mode 100644 index c82c8747..00000000 --- a/env/lib/python2.7/site-packages/CoreData/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -''' -Python mapping for the CoreData framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import sys -import objc -import Foundation - -from CoreData import _metadata -from CoreData._CoreData import * -import CoreData._convenience - -sys.modules['CoreData'] = objc.ObjCLazyModule( - "CoreData", "com.apple.CoreData", - objc.pathForFramework("/System/Library/Frameworks/CoreData.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'objc': objc - }, (Foundation,)) - -import sys -del sys.modules['CoreData._metadata'] diff --git a/env/lib/python2.7/site-packages/CoreData/__init__.pyc b/env/lib/python2.7/site-packages/CoreData/__init__.pyc deleted file mode 100644 index 859690f9..00000000 Binary files a/env/lib/python2.7/site-packages/CoreData/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreData/_convenience.py b/env/lib/python2.7/site-packages/CoreData/_convenience.py deleted file mode 100644 index 5dfb5b86..00000000 --- a/env/lib/python2.7/site-packages/CoreData/_convenience.py +++ /dev/null @@ -1,56 +0,0 @@ -""" -This adds some usefull conveniences to NSManagedObject and subclasses thereof - -These conveniences try to enable KVO by default on NSManagedObject instances, -this no longer works on Leopard due to the way NSManagedObject is implemented -there (it generates accessor methods at runtime, which interferes with the -implementation in this file). -""" -__all__ = () -from objc import addConvenienceForClass, super -from Foundation import NSObject -import os - -# XXX: This is fairly crude, need further research. -# This code basicly tries to outsmart tricks that -# CoreData plays, and that's asking for problems. -if os.uname()[2] < '13.': - def _first_python(cls): - if '__objc_python_subclass__' in cls.__dict__: - return cls - return None -else: - def _first_python(cls): - for cls in cls.mro(): - if '__objc_python_subclass__' in cls.__dict__: - return cls - return None - -def NSMOsetValue_ForKey_(self, name, value): - try: - first = _first_python(self.__class__) - if first is not None: - super(first, self).setValue_forKey_(value, name) - else: - self.setValue_forKey_(value, name) - - except KeyError as msg: - NSObject.__setattr__(self, name, value) - - -def NSMOgetValueForKey_(self, name): - try: - first = _first_python(self.__class__) - if first is not None: - return super(first, self).valueForKey_(name) - else: - return self.valueForKey_(name) - - except KeyError as msg: - raise AttributeError(name) - -if os.uname()[2] < '13.' or True: - addConvenienceForClass('NSManagedObject', ( - ('__setattr__', NSMOsetValue_ForKey_), - ('__getattr__', NSMOgetValueForKey_), - )) diff --git a/env/lib/python2.7/site-packages/CoreData/_convenience.pyc b/env/lib/python2.7/site-packages/CoreData/_convenience.pyc deleted file mode 100644 index 1039f4bb..00000000 Binary files a/env/lib/python2.7/site-packages/CoreData/_convenience.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreData/_metadata.py b/env/lib/python2.7/site-packages/CoreData/_metadata.py deleted file mode 100644 index 00a4900d..00000000 --- a/env/lib/python2.7/site-packages/CoreData/_metadata.py +++ /dev/null @@ -1,165 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Tue Aug 8 21:44:05 2017 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$NSAddedPersistentStoresKey$NSAffectedObjectsErrorKey$NSAffectedStoresErrorKey$NSBinaryExternalRecordType$NSBinaryStoreInsecureDecodingCompatibilityOption$NSBinaryStoreSecureDecodingClasses$NSBinaryStoreType$NSCoreDataVersionNumber@d$NSDeletedObjectsKey$NSDetailedErrorsKey$NSEntityNameInPathKey$NSErrorMergePolicy$NSExternalRecordExtensionOption$NSExternalRecordsDirectoryOption$NSExternalRecordsFileFormatOption$NSIgnorePersistentStoreVersioningOption$NSInMemoryStoreType$NSInferMappingModelAutomaticallyOption$NSInsertedObjectsKey$NSInvalidatedAllObjectsKey$NSInvalidatedObjectsKey$NSManagedObjectContextDidSaveNotification$NSManagedObjectContextObjectsDidChangeNotification$NSManagedObjectContextQueryGenerationKey$NSManagedObjectContextWillSaveNotification$NSMergeByPropertyObjectTrumpMergePolicy$NSMergeByPropertyStoreTrumpMergePolicy$NSMigratePersistentStoresAutomaticallyOption$NSMigrationDestinationObjectKey$NSMigrationEntityMappingKey$NSMigrationEntityPolicyKey$NSMigrationManagerKey$NSMigrationPropertyMappingKey$NSMigrationSourceObjectKey$NSModelPathKey$NSObjectURIKey$NSOverwriteMergePolicy$NSPersistentHistoryTrackingKey$NSPersistentStoreConnectionPoolMaxSizeKey$NSPersistentStoreCoordinatorStoresDidChangeNotification$NSPersistentStoreCoordinatorStoresWillChangeNotification$NSPersistentStoreCoordinatorWillRemoveStoreNotification$NSPersistentStoreDidImportUbiquitousContentChangesNotification$NSPersistentStoreForceDestroyOption$NSPersistentStoreOSCompatibility$NSPersistentStoreRebuildFromUbiquitousContentOption$NSPersistentStoreRemoveUbiquitousMetadataOption$NSPersistentStoreSaveConflictsErrorKey$NSPersistentStoreTimeoutOption$NSPersistentStoreUbiquitousContainerIdentifierKey$NSPersistentStoreUbiquitousContentNameKey$NSPersistentStoreUbiquitousContentURLKey$NSPersistentStoreUbiquitousPeerTokenOption$NSPersistentStoreUbiquitousTransitionTypeKey$NSReadOnlyPersistentStoreOption$NSRefreshedObjectsKey$NSRemovedPersistentStoresKey$NSRollbackMergePolicy$NSSQLiteAnalyzeOption$NSSQLiteErrorDomain$NSSQLiteManualVacuumOption$NSSQLitePragmasOption$NSSQLiteStoreType$NSStoreModelVersionHashesKey$NSStoreModelVersionIdentifiersKey$NSStorePathKey$NSStoreTypeKey$NSStoreUUIDInPathKey$NSStoreUUIDKey$NSUUIDChangedPersistentStoresKey$NSUpdatedObjectsKey$NSValidateXMLStoreOption$NSValidationKeyErrorKey$NSValidationObjectErrorKey$NSValidationPredicateErrorKey$NSValidationValueErrorKey$NSXMLExternalRecordType$NSXMLStoreType$''' -enums = '''$NSAddEntityMappingType@2$NSBatchDeleteRequestType@7$NSBatchDeleteResultTypeCount@2$NSBatchDeleteResultTypeObjectIDs@1$NSBatchDeleteResultTypeStatusOnly@0$NSBatchUpdateRequestType@6$NSBinaryDataAttributeType@1000$NSBooleanAttributeType@800$NSCascadeDeleteRule@2$NSConfinementConcurrencyType@0$NSCopyEntityMappingType@4$NSCoreDataError@134060$NSCoreDataVersionNumber10_10@526.0$NSCoreDataVersionNumber10_10_2@526.1$NSCoreDataVersionNumber10_10_3@526.2$NSCoreDataVersionNumber10_11@640.0$NSCoreDataVersionNumber10_11_3@641.3$NSCountResultType@4$NSCustomEntityMappingType@1$NSDateAttributeType@900$NSDecimalAttributeType@400$NSDenyDeleteRule@3$NSDictionaryResultType@2$NSDoubleAttributeType@500$NSEntityMigrationPolicyError@134170$NSErrorMergePolicyType@0$NSExternalRecordImportError@134200$NSFetchIndexElementTypeBinary@0$NSFetchIndexElementTypeRTree@1$NSFetchRequestExpressionType@50$NSFetchRequestType@1$NSFetchedResultsChangeDelete@2$NSFetchedResultsChangeInsert@1$NSFetchedResultsChangeMove@3$NSFetchedResultsChangeUpdate@4$NSFloatAttributeType@600$NSInferredMappingModelError@134190$NSInteger16AttributeType@100$NSInteger32AttributeType@200$NSInteger64AttributeType@300$NSMainQueueConcurrencyType@2$NSManagedObjectConstraintMergeError@133021$NSManagedObjectConstraintValidationError@1551$NSManagedObjectContextLockingError@132000$NSManagedObjectExternalRelationshipError@133010$NSManagedObjectIDResultType@1$NSManagedObjectMergeError@133020$NSManagedObjectReferentialIntegrityError@133000$NSManagedObjectResultType@0$NSManagedObjectValidationError@1550$NSMergeByPropertyObjectTrumpMergePolicyType@2$NSMergeByPropertyStoreTrumpMergePolicyType@1$NSMigrationCancelledError@134120$NSMigrationConstraintViolationError@134111$NSMigrationError@134110$NSMigrationManagerDestinationStoreError@134160$NSMigrationManagerSourceStoreError@134150$NSMigrationMissingMappingModelError@134140$NSMigrationMissingSourceModelError@134130$NSNoActionDeleteRule@0$NSNullifyDeleteRule@1$NSObjectIDAttributeType@2000$NSOverwriteMergePolicyType@3$NSPersistentHistoryChangeTypeDelete@2$NSPersistentHistoryChangeTypeInsert@0$NSPersistentHistoryChangeTypeUpdate@1$NSPersistentHistoryResultTypeChangesOnly@4$NSPersistentHistoryResultTypeCount@2$NSPersistentHistoryResultTypeObjectIDs@1$NSPersistentHistoryResultTypeStatusOnly@0$NSPersistentHistoryResultTypeTransactionsAndChanges@5$NSPersistentHistoryResultTypeTransactionsOnly@3$NSPersistentHistoryTokenExpiredError@134301$NSPersistentStoreCoordinatorLockingError@132010$NSPersistentStoreIncompatibleSchemaError@134020$NSPersistentStoreIncompatibleVersionHashError@134100$NSPersistentStoreIncompleteSaveError@134040$NSPersistentStoreInvalidTypeError@134000$NSPersistentStoreOpenError@134080$NSPersistentStoreOperationError@134070$NSPersistentStoreSaveConflictsError@134050$NSPersistentStoreSaveError@134030$NSPersistentStoreTimeoutError@134090$NSPersistentStoreTypeMismatchError@134010$NSPersistentStoreUbiquitousTransitionTypeAccountAdded@1$NSPersistentStoreUbiquitousTransitionTypeAccountRemoved@2$NSPersistentStoreUbiquitousTransitionTypeContentRemoved@3$NSPersistentStoreUbiquitousTransitionTypeInitialImportCompleted@4$NSPersistentStoreUnsupportedRequestTypeError@134091$NSPrivateQueueConcurrencyType@1$NSRemoveEntityMappingType@3$NSRollbackMergePolicyType@4$NSSQLiteError@134180$NSSaveRequestType@2$NSSnapshotEventMergePolicy@64$NSSnapshotEventRefresh@32$NSSnapshotEventRollback@16$NSSnapshotEventUndoDeletion@4$NSSnapshotEventUndoInsertion@2$NSSnapshotEventUndoUpdate@8$NSStatusOnlyResultType@0$NSStringAttributeType@700$NSTransformEntityMappingType@5$NSTransformableAttributeType@1800$NSURIAttributeType@1200$NSUUIDAttributeType@1100$NSUndefinedAttributeType@0$NSUndefinedEntityMappingType@0$NSUpdatedObjectIDsResultType@1$NSUpdatedObjectsCountResultType@2$NSValidationDateTooLateError@1630$NSValidationDateTooSoonError@1640$NSValidationInvalidDateError@1650$NSValidationInvalidURIError@1690$NSValidationMissingMandatoryPropertyError@1570$NSValidationMultipleErrorsError@1560$NSValidationNumberTooLargeError@1610$NSValidationNumberTooSmallError@1620$NSValidationRelationshipDeniedDeleteError@1600$NSValidationRelationshipExceedsMaximumCountError@1590$NSValidationRelationshipLacksMinimumCountError@1580$NSValidationStringPatternMatchingError@1680$NSValidationStringTooLongError@1660$NSValidationStringTooShortError@1670$''' -misc.update({'NSCoreDataVersionNumber_iPhoneOS_3_0': 241.0, 'NSCoreDataVersionNumber_iPhoneOS_3_1': 248.0, 'NSCoreDataVersionNumber_iPhoneOS_3_2': 310.2, 'NSCoreDataVersionNumber10_7_2': 358.12, 'NSCoreDataVersionNumber10_7_3': 358.13, 'NSCoreDataVersionNumber10_7_4': 358.14, 'NSCoreDataVersionNumber10_8': 407.5, 'NSCoreDataVersionNumber10_9': 481.0, 'NSCoreDataVersionNumber10_5_3': 186.0, 'NSCoreDataVersionNumber10_6': 246.0, 'NSCoreDataVersionNumber10_7': 358.4, 'NSCoreDataVersionNumber10_4': 46.0, 'NSCoreDataVersionNumber10_5': 185.0, 'NSCoreDataVersionNumber10_8_2': 407.7, 'NSCoreDataVersionNumber10_6_3': 251.0, 'NSCoreDataVersionNumber10_6_2': 250.0, 'NSCoreDataVersionNumber10_4_3': 77.0, 'NSCoreDataVersionNumber10_9_2': 481.1, 'NSCoreDataVersionNumber10_9_3': 481.3, 'NSCoreDataVersionNumber_iPhoneOS_5_0': 386.1, 'NSCoreDataVersionNumber_iPhoneOS_5_1': 386.5, 'NSCoreDataVersionNumber_iPhoneOS_7_0': 479.1, 'NSCoreDataVersionNumber_iPhoneOS_7_1': 479.3, 'NSCoreDataVersionNumber_iPhoneOS_6_1': 419.1, 'NSCoreDataVersionNumber_iPhoneOS_6_0': 419.0, 'NSCoreDataVersionNumber_iPhoneOS_4_3': 320.17, 'NSCoreDataVersionNumber_iPhoneOS_4_2': 320.15, 'NSCoreDataVersionNumber_iPhoneOS_4_1': 320.11, 'NSCoreDataVersionNumber_iPhoneOS_4_0': 320.5}) -aliases = {'COREDATA_PRIVATE_EXTERN': '__private_extern__'} -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'NSAsynchronousFetchRequest', b'completionBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}) - r(b'NSAsynchronousFetchRequest', b'initWithFetchRequest:completionBlock:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'NSAtomicStore', b'load:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type_modifier': b'o'}}}) - r(b'NSAtomicStore', b'save:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type_modifier': b'o'}}}) - r(b'NSAttributeDescription', b'allowsExternalBinaryDataStorage', {'retval': {'type': b'Z'}}) - r(b'NSAttributeDescription', b'setAllowsExternalBinaryDataStorage:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSBatchUpdateRequest', b'includesSubentities', {'retval': {'type': b'Z'}}) - r(b'NSBatchUpdateRequest', b'setIncludesSubentities:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSCoreDataCoreSpotlightDelegate', b'searchableIndex:reindexSearchableItemsWithIdentifiers:acknowledgementHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSEntityDescription', b'isAbstract', {'retval': {'type': 'Z'}}) - r(b'NSEntityDescription', b'isKindOfEntity:', {'retval': {'type': 'Z'}}) - r(b'NSEntityDescription', b'setAbstract:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSEntityMigrationPolicy', b'beginEntityMapping:manager:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSEntityMigrationPolicy', b'createDestinationInstancesForSourceInstance:entityMapping:manager:error:', {'retval': {'type': 'Z'}, 'arguments': {5: {'type_modifier': b'o'}}}) - r(b'NSEntityMigrationPolicy', b'createRelationshipsForDestinationInstance:entityMapping:manager:error:', {'retval': {'type': 'Z'}, 'arguments': {5: {'type_modifier': b'o'}}}) - r(b'NSEntityMigrationPolicy', b'endEntityMapping:manager:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSEntityMigrationPolicy', b'endInstanceCreationForEntityMapping:manager:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSEntityMigrationPolicy', b'endRelationshipCreationForEntityMapping:manager:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSEntityMigrationPolicy', b'performCustomValidationForEntityMapping:manager:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSFetchIndexElementDescription', b'isAscending', {'retval': {'type': 'Z'}}) - r(b'NSFetchIndexElementDescription', b'setAscending:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSFetchRequest', b'execute:', {'arguments': {2: {'type_modifier': b'o'}}}) - r(b'NSFetchRequest', b'includesPendingChanges', {'retval': {'type': 'Z'}}) - r(b'NSFetchRequest', b'includesPropertyValues', {'retval': {'type': 'Z'}}) - r(b'NSFetchRequest', b'includesSubentities', {'retval': {'type': 'Z'}}) - r(b'NSFetchRequest', b'returnsDistinctResults', {'retval': {'type': 'Z'}}) - r(b'NSFetchRequest', b'returnsObjectsAsFaults', {'retval': {'type': 'Z'}}) - r(b'NSFetchRequest', b'setIncludesPendingChanges:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSFetchRequest', b'setIncludesPropertyValues:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSFetchRequest', b'setIncludesSubentities:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSFetchRequest', b'setReturnsDistinctResults:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSFetchRequest', b'setReturnsObjectsAsFaults:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSFetchRequest', b'setShouldRefreshRefetchedObjects:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSFetchRequest', b'shouldRefreshRefetchedObjects', {'retval': {'type': b'Z'}}) - r(b'NSFetchRequestExpression', b'expressionForFetch:context:countOnly:', {'arguments': {4: {'type': 'Z'}}}) - r(b'NSFetchRequestExpression', b'isCountOnlyRequest', {'retval': {'type': 'Z'}}) - r(b'NSFetchedResultsController', b'performFetch:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type_modifier': b'o'}}}) - r(b'NSIncrementalStore', b'executeRequest:withContext:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSIncrementalStore', b'loadMetadata:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type_modifier': b'o'}}}) - r(b'NSIncrementalStore', b'newValueForRelationship:forObjectWithID:withContext:error:', {'arguments': {5: {'type_modifier': b'o'}}}) - r(b'NSIncrementalStore', b'newValuesForObjectWithID:withContext:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSIncrementalStore', b'obtainPermanentIDsForObjects:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSManagedObject', b'contextShouldIgnoreUnmodeledPropertyChanges', {'retval': {'type': 'Z'}}) - r(b'NSManagedObject', b'hasChanges', {'retval': {'type': b'Z'}}) - r(b'NSManagedObject', b'hasFaultForRelationshipNamed:', {'retval': {'type': 'Z'}}) - r(b'NSManagedObject', b'hasPersistentChangedValues', {'retval': {'type': 'Z'}}) - r(b'NSManagedObject', b'isDeleted', {'retval': {'type': 'Z'}}) - r(b'NSManagedObject', b'isFault', {'retval': {'type': 'Z'}}) - r(b'NSManagedObject', b'isInserted', {'retval': {'type': 'Z'}}) - r(b'NSManagedObject', b'isUpdated', {'retval': {'type': 'Z'}}) - r(b'NSManagedObject', b'observationInfo', {'retval': {'type': '^v'}}) - r(b'NSManagedObject', b'setObservationInfo:', {'arguments': {2: {'type': '^v'}}}) - r(b'NSManagedObject', b'validateForDelete:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type_modifier': b'o'}}}) - r(b'NSManagedObject', b'validateForInsert:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type_modifier': b'o'}}}) - r(b'NSManagedObject', b'validateForUpdate:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type_modifier': b'o'}}}) - r(b'NSManagedObject', b'validateValue:forKey:error:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type_modifier': b'N'}, 4: {'type_modifier': b'o'}}}) - r(b'NSManagedObjectContext', b'automaticallyMergesChangesFromParent', {'retval': {'type': 'Z'}}) - r(b'NSManagedObjectContext', b'countForFetchRequest:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSManagedObjectContext', b'executeFetchRequest:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSManagedObjectContext', b'executeRequest:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSManagedObjectContext', b'existingObjectWithID:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSManagedObjectContext', b'hasChanges', {'retval': {'type': 'Z'}}) - r(b'NSManagedObjectContext', b'observeValueForKeyPath:ofObject:change:context:', {'arguments': {5: {'type': '^v'}}}) - r(b'NSManagedObjectContext', b'obtainPermanentIDsForObjects:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSManagedObjectContext', b'performBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSManagedObjectContext', b'performBlockAndWait:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSManagedObjectContext', b'propagatesDeletesAtEndOfEvent', {'retval': {'type': 'Z'}}) - r(b'NSManagedObjectContext', b'refreshObject:mergeChanges:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSManagedObjectContext', b'retainsRegisteredObjects', {'retval': {'type': 'Z'}}) - r(b'NSManagedObjectContext', b'save:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type_modifier': b'o'}}}) - r(b'NSManagedObjectContext', b'setAutomaticallyMergesChangesFromParent:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSManagedObjectContext', b'setPropagatesDeletesAtEndOfEvent:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSManagedObjectContext', b'setQueryGenerationFromToken:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSManagedObjectContext', b'setRetainsRegisteredObjects:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSManagedObjectContext', b'setShouldDeleteInaccessibleFaults:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSManagedObjectContext', b'shouldDeleteInaccessibleFaults', {'retval': {'type': 'Z'}}) - r(b'NSManagedObjectContext', b'shouldHandleInaccessibleFault:forObjectID:triggeredByProperty:', {'retval': {'type': 'Z'}}) - r(b'NSManagedObjectContext', b'tryLock', {'retval': {'type': 'Z'}}) - r(b'NSManagedObjectID', b'isTemporaryID', {'retval': {'type': 'Z'}}) - r(b'NSManagedObjectModel', b'isConfiguration:compatibleWithStoreMetadata:', {'retval': {'type': 'Z'}}) - r(b'NSMappingModel', b'inferredMappingModelForSourceModel:destinationModel:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSMergePolicy', b'resolveConflicts:error:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSMergePolicy', b'resolveConstraintConflicts:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSMergePolicy', b'resolveOptimisticLockingVersionConflicts:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSMigrationManager', b'migrateStoreFromURL:type:options:withMappingModel:toDestinationURL:destinationType:destinationOptions:error:', {'retval': {'type': 'Z'}, 'arguments': {9: {'type_modifier': b'o'}}}) - r(b'NSMigrationManager', b'setUsesStoreSpecificMigrationManager:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSMigrationManager', b'usesStoreSpecificMigrationManager', {'retval': {'type': b'Z'}}) - r(b'NSObject', b'controller:didChangeObject:atIndexPath:forChangeType:newIndexPath:', {'arguments': {5: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'controller:didChangeSection:atIndex:forChangeType:', {'arguments': {4: {'type': sel32or64(b'I', b'Q')}, 5: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSPersistentContainer', b'loadPersistentStoresWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'NSPersistentContainer', b'performBackgroundTask:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSPersistentStore', b'isReadOnly', {'retval': {'type': 'Z'}}) - r(b'NSPersistentStore', b'loadMetadata', {'retval': {'type': 'Z'}}) - r(b'NSPersistentStore', b'loadMetadata:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type_modifier': b'o'}}}) - r(b'NSPersistentStore', b'metadataForPersistentStoreWithURL:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSPersistentStore', b'migrationManagerClass', {'retval': {'type_modifier': b'o'}}) - r(b'NSPersistentStore', b'setMetadata:forPersistentStoreWithURL:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSPersistentStore', b'setReadOnly:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSPersistentStoreCoordinator', b'addPersistentStoreWithDescription:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'NSPersistentStoreCoordinator', b'addPersistentStoreWithType:configuration:URL:options:error:', {'arguments': {6: {'type_modifier': b'o'}}}) - r(b'NSPersistentStoreCoordinator', b'destroyPersistentStoreAtURL:withType:options:error:', {'retval': {'type': 'Z'}, 'arguments': {5: {'type_modifier': b'o'}}}) - r(b'NSPersistentStoreCoordinator', b'executeRequest:withContext:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSPersistentStoreCoordinator', b'importStoreWithIdentifier:fromExternalRecordsDirectory:toURL:options:withType:error:', {'arguments': {7: {'type_modifier': b'o'}}}) - r(b'NSPersistentStoreCoordinator', b'metadataForPersistentStoreOfType:URL:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSPersistentStoreCoordinator', b'metadataForPersistentStoreWithURL:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSPersistentStoreCoordinator', b'migratePersistentStore:toURL:options:withType:error:', {'arguments': {6: {'type_modifier': b'o'}}}) - r(b'NSPersistentStoreCoordinator', b'performBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSPersistentStoreCoordinator', b'performBlockAndWait:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSPersistentStoreCoordinator', b'removePersistentStore:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSPersistentStoreCoordinator', b'removeUbiquitousContentAndPersistentStoreAtURL:options:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSPersistentStoreCoordinator', b'replacePersistentStoreAtURL:destinationOptions:withPersistentStoreFromURL:sourceOptions:storeType:error:', {'retval': {'type': 'Z'}, 'arguments': {7: {'type_modifier': b'o'}}}) - r(b'NSPersistentStoreCoordinator', b'setMetadata:forPersistentStoreOfType:URL:error:', {'retval': {'type': 'Z'}, 'arguments': {5: {'type_modifier': b'o'}}}) - r(b'NSPersistentStoreCoordinator', b'setMetadata:forPersistentStoreOfType:URL:options:error:', {'retval': {'type': 'Z'}, 'arguments': {6: {'type_modifier': b'o'}}}) - r(b'NSPersistentStoreCoordinator', b'setURL:forPersistentStore:', {'retval': {'type': 'Z'}}) - r(b'NSPersistentStoreCoordinator', b'tryLock', {'retval': {'type': 'Z'}}) - r(b'NSPersistentStoreDescription', b'isReadOnly', {'retval': {'type': 'Z'}}) - r(b'NSPersistentStoreDescription', b'setReadOnly:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSPersistentStoreDescription', b'setShouldAddStoreAsynchronously:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSPersistentStoreDescription', b'setShouldInferMappingModelAutomatically:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSPersistentStoreDescription', b'setShouldMigrateStoreAutomatically:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSPersistentStoreDescription', b'shouldAddStoreAsynchronously', {'retval': {'type': 'Z'}}) - r(b'NSPersistentStoreDescription', b'shouldInferMappingModelAutomatically', {'retval': {'type': 'Z'}}) - r(b'NSPersistentStoreDescription', b'shouldMigrateStoreAutomatically', {'retval': {'type': 'Z'}}) - r(b'NSPropertyDescription', b'isIndexed', {'retval': {'type': 'Z'}}) - r(b'NSPropertyDescription', b'isIndexedBySpotlight', {'retval': {'type': 'Z'}}) - r(b'NSPropertyDescription', b'isOptional', {'retval': {'type': 'Z'}}) - r(b'NSPropertyDescription', b'isStoredInExternalRecord', {'retval': {'type': 'Z'}}) - r(b'NSPropertyDescription', b'isTransient', {'retval': {'type': 'Z'}}) - r(b'NSPropertyDescription', b'setIndexed:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSPropertyDescription', b'setIndexedBySpotlight:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSPropertyDescription', b'setOptional:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSPropertyDescription', b'setStoredInExternalRecord:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSPropertyDescription', b'setTransient:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSRelationshipDescription', b'isOrdered', {'retval': {'type': b'Z'}}) - r(b'NSRelationshipDescription', b'isToMany', {'retval': {'type': 'Z'}}) - r(b'NSRelationshipDescription', b'setOrdered:', {'arguments': {2: {'type': b'Z'}}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/CoreData/_metadata.pyc b/env/lib/python2.7/site-packages/CoreData/_metadata.pyc deleted file mode 100644 index 991a740c..00000000 Binary files a/env/lib/python2.7/site-packages/CoreData/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreFoundation/_CoreFoundation.so b/env/lib/python2.7/site-packages/CoreFoundation/_CoreFoundation.so deleted file mode 100755 index 0394bd78..00000000 Binary files a/env/lib/python2.7/site-packages/CoreFoundation/_CoreFoundation.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreFoundation/__init__.py b/env/lib/python2.7/site-packages/CoreFoundation/__init__.py deleted file mode 100644 index 1dcf20c9..00000000 --- a/env/lib/python2.7/site-packages/CoreFoundation/__init__.py +++ /dev/null @@ -1,30 +0,0 @@ -''' -Python mapping for the CoreFoundation framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import sys -import objc - -from CoreFoundation import _metadata -from CoreFoundation._inlines import _inline_list_ - -sys.modules['CoreFoundation'] = mod = objc.ObjCLazyModule('CoreFoundation', - "com.apple.CoreFoundation", objc.pathForFramework("/System/Library/Frameworks/CoreFoundation.framework"), - _metadata.__dict__, _inline_list_, - { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, ()) - -import CoreFoundation._CoreFoundation -import CoreFoundation._static -for nm in dir(CoreFoundation._CoreFoundation): - setattr(mod, nm, getattr(CoreFoundation._CoreFoundation, nm)) -for nm in dir(CoreFoundation._static): - setattr(mod, nm, getattr(CoreFoundation._static, nm)) - -import sys -del sys.modules['CoreFoundation._metadata'] diff --git a/env/lib/python2.7/site-packages/CoreFoundation/__init__.pyc b/env/lib/python2.7/site-packages/CoreFoundation/__init__.pyc deleted file mode 100644 index 6128904c..00000000 Binary files a/env/lib/python2.7/site-packages/CoreFoundation/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreFoundation/_inlines.so b/env/lib/python2.7/site-packages/CoreFoundation/_inlines.so deleted file mode 100755 index 125622d1..00000000 Binary files a/env/lib/python2.7/site-packages/CoreFoundation/_inlines.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreFoundation/_metadata.py b/env/lib/python2.7/site-packages/CoreFoundation/_metadata.py deleted file mode 100644 index 9dffe7ad..00000000 --- a/env/lib/python2.7/site-packages/CoreFoundation/_metadata.py +++ /dev/null @@ -1,27 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Tue Jun 5 12:42:11 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -misc.update({'CFXMLEntityInfo': objc.createStructType('CFXMLEntityInfo', sel32or64(b'{_CFXMLEntityInfo=l^{__CFString=}{_CFXMLExternalID=^{__CFURL=}^{__CFString=}}^{__CFString=}}', b'{_CFXMLEntityInfo=q^{__CFString=}{_CFXMLExternalID=^{__CFURL=}^{__CFString=}}^{__CFString=}}'), ['entityType', 'replacementText', 'entityID', 'notationName']), 'CFXMLElementInfo': objc.createStructType('CFXMLElementInfo', b'{_CFXMLElementInfo=^{__CFDictionary=}^{__CFArray=}Z[3c]}', ['attributes', 'attributeOrder', 'isEmpty', '_reserved']), 'CFXMLAttributeListDeclarationInfo': objc.createStructType('CFXMLAttributeListDeclarationInfo', sel32or64(b'{_CFXMLAttributeListDeclarationInfo=l^{_CFXMLAttributeDeclarationInfo=^{__CFString=}^{__CFString=}^{__CFString=}}}', b'{_CFXMLAttributeListDeclarationInfo=q^{_CFXMLAttributeDeclarationInfo=^{__CFString=}^{__CFString=}^{__CFString=}}}'), ['numberOfAttributes', 'attributes']), 'CFXMLElementTypeDeclarationInfo': objc.createStructType('CFXMLElementTypeDeclarationInfo', b'{_CFXMLElementTypeDeclarationInfo=^{__CFString=}}', ['contentDescription']), 'CFGregorianDate': objc.createStructType('CFGregorianDate', sel32or64(b'{_CFGregorianDate=lccccd}', b'{_CFGregorianDate=iccccd}'), ['year', 'month', 'day', 'hour', 'minute', 'second']), 'CFXMLExternalID': objc.createStructType('CFXMLExternalID', b'{_CFXMLExternalID=^{__CFURL=}^{__CFString=}}', ['systemID', 'publicID']), 'CFUUIDBytes': objc.createStructType('CFUUIDBytes', b'{_CFUUIDBytes=CCCCCCCCCCCCCCCC}', ['byte0', 'byte1', 'byte2', 'byte3', 'byte4', 'byte5', 'byte6', 'byte7', 'byte8', 'byte9', 'byte10', 'byte11', 'byte12', 'byte13', 'byte14', 'byte15']), 'CFXMLAttributeDeclarationInfo': objc.createStructType('CFXMLAttributeDeclarationInfo', b'{_CFXMLAttributeDeclarationInfo=^{__CFString=}^{__CFString=}^{__CFString=}}', ['attributeName', 'typeString', 'defaultString']), 'CFSwappedFloat32': objc.createStructType('CFSwappedFloat32', b'{_CFSwappedFloat32=I}', ['v']), 'CFSwappedFloat64': objc.createStructType('CFSwappedFloat64', b'{_CFSwappedFloat64=Q}', ['v']), 'CFXMLDocumentTypeInfo': objc.createStructType('CFXMLDocumentTypeInfo', b'{_CFXMLDocumentTypeInfo={_CFXMLExternalID=^{__CFURL=}^{__CFString=}}}', ['externalID']), 'CFStreamError': objc.createStructType('CFStreamError', sel32or64(b'{_CFStreamError=ll}', b'{_CFStreamError=qi}'), ['domain', 'error']), 'CFXMLEntityReferenceInfo': objc.createStructType('CFXMLEntityReferenceInfo', sel32or64(b'{_CFXMLEntityReferenceInfo=l}', b'{_CFXMLEntityReferenceInfo=q}'), ['entityType']), 'CFXMLProcessingInstructionInfo': objc.createStructType('CFXMLProcessingInstructionInfo', b'{_CFXMLProcessingInstructionInfo=^{__CFString=}}', ['dataString']), 'CFRange': objc.createStructType('CFRange', sel32or64(b'{_CFRange=ll}', b'{_CFRange=qq}'), ['location', 'length']), 'CFSocketSignature': objc.createStructType('CFSocketSignature', sel32or64(b'{_CFSocketSignature=lll^{__CFData=}}', b'{_CFSocketSignature=iii^{__CFData=}}'), ['protocolFamily', 'socketType', 'protocol', 'address']), 'CFXMLDocumentInfo': objc.createStructType('CFXMLDocumentInfo', sel32or64(b'{_CFXMLDocumentInfo=^{__CFURL=}L}', b'{_CFXMLDocumentInfo=^{__CFURL=}I}'), ['sourceURL', 'encoding']), 'CFGregorianUnits': objc.createStructType('CFGregorianUnits', sel32or64(b'{_CFGregorianUnits=llllld}', b'{_CFGregorianUnits=iiiiid}'), ['years', 'months', 'days', 'hours', 'minutes', 'seconds']), 'CFXMLNotationInfo': objc.createStructType('CFXMLNotationInfo', b'{_CFXMLNotationInfo={_CFXMLExternalID=^{__CFURL=}^{__CFString=}}}', ['externalID'])}) -constants = '''$kCFAbsoluteTimeIntervalSince1904@d$kCFAbsoluteTimeIntervalSince1970@d$kCFAllocatorDefault@^{__CFAllocator=}$kCFAllocatorMalloc@^{__CFAllocator=}$kCFAllocatorMallocZone@^{__CFAllocator=}$kCFAllocatorNull@^{__CFAllocator=}$kCFAllocatorSystemDefault@^{__CFAllocator=}$kCFAllocatorUseContext@^{__CFAllocator=}$kCFBooleanFalse@^{__CFBoolean=}$kCFBooleanTrue@^{__CFBoolean=}$kCFBuddhistCalendar@^{__CFString=}$kCFBundleDevelopmentRegionKey@^{__CFString=}$kCFBundleExecutableKey@^{__CFString=}$kCFBundleIdentifierKey@^{__CFString=}$kCFBundleInfoDictionaryVersionKey@^{__CFString=}$kCFBundleLocalizationsKey@^{__CFString=}$kCFBundleNameKey@^{__CFString=}$kCFBundleVersionKey@^{__CFString=}$kCFChineseCalendar@^{__CFString=}$kCFCoreFoundationVersionNumber@d$kCFDateFormatterAMSymbol@^{__CFString=}$kCFDateFormatterCalendar@^{__CFString=}$kCFDateFormatterCalendarName@^{__CFString=}$kCFDateFormatterDefaultDate@^{__CFString=}$kCFDateFormatterDefaultFormat@^{__CFString=}$kCFDateFormatterDoesRelativeDateFormattingKey@^{__CFString=}$kCFDateFormatterEraSymbols@^{__CFString=}$kCFDateFormatterGregorianStartDate@^{__CFString=}$kCFDateFormatterIsLenient@^{__CFString=}$kCFDateFormatterLongEraSymbols@^{__CFString=}$kCFDateFormatterMonthSymbols@^{__CFString=}$kCFDateFormatterPMSymbol@^{__CFString=}$kCFDateFormatterQuarterSymbols@^{__CFString=}$kCFDateFormatterShortMonthSymbols@^{__CFString=}$kCFDateFormatterShortQuarterSymbols@^{__CFString=}$kCFDateFormatterShortStandaloneMonthSymbols@^{__CFString=}$kCFDateFormatterShortStandaloneQuarterSymbols@^{__CFString=}$kCFDateFormatterShortStandaloneWeekdaySymbols@^{__CFString=}$kCFDateFormatterShortWeekdaySymbols@^{__CFString=}$kCFDateFormatterStandaloneMonthSymbols@^{__CFString=}$kCFDateFormatterStandaloneQuarterSymbols@^{__CFString=}$kCFDateFormatterStandaloneWeekdaySymbols@^{__CFString=}$kCFDateFormatterTimeZone@^{__CFString=}$kCFDateFormatterTwoDigitStartDate@^{__CFString=}$kCFDateFormatterVeryShortMonthSymbols@^{__CFString=}$kCFDateFormatterVeryShortStandaloneMonthSymbols@^{__CFString=}$kCFDateFormatterVeryShortStandaloneWeekdaySymbols@^{__CFString=}$kCFDateFormatterVeryShortWeekdaySymbols@^{__CFString=}$kCFDateFormatterWeekdaySymbols@^{__CFString=}$kCFErrorDescriptionKey@^{__CFString=}$kCFErrorDomainCocoa@^{__CFString=}$kCFErrorDomainMach@^{__CFString=}$kCFErrorDomainOSStatus@^{__CFString=}$kCFErrorDomainPOSIX@^{__CFString=}$kCFErrorFilePathKey@^{__CFString=}$kCFErrorLocalizedDescriptionKey@^{__CFString=}$kCFErrorLocalizedFailureKey$kCFErrorLocalizedFailureReasonKey@^{__CFString=}$kCFErrorLocalizedRecoverySuggestionKey@^{__CFString=}$kCFErrorURLKey@^{__CFString=}$kCFErrorUnderlyingErrorKey@^{__CFString=}$kCFGregorianCalendar@^{__CFString=}$kCFHebrewCalendar@^{__CFString=}$kCFISO8601Calendar@^{__CFString=}$kCFIndianCalendar@^{__CFString=}$kCFIslamicCalendar@^{__CFString=}$kCFIslamicCivilCalendar@^{__CFString=}$kCFIslamicTabularCalendar@^{__CFString=}$kCFIslamicUmmAlQuraCalendar@^{__CFString=}$kCFJapaneseCalendar@^{__CFString=}$kCFLocaleAlternateQuotationBeginDelimiterKey@^{__CFString=}$kCFLocaleAlternateQuotationEndDelimiterKey@^{__CFString=}$kCFLocaleCalendar@^{__CFString=}$kCFLocaleCalendarIdentifier@^{__CFString=}$kCFLocaleCollationIdentifier@^{__CFString=}$kCFLocaleCollatorIdentifier@^{__CFString=}$kCFLocaleCountryCode@^{__CFString=}$kCFLocaleCountryCodeKey$kCFLocaleCurrencyCode@^{__CFString=}$kCFLocaleCurrencySymbol@^{__CFString=}$kCFLocaleCurrentLocaleDidChangeNotification@^{__CFString=}$kCFLocaleDecimalSeparator@^{__CFString=}$kCFLocaleExemplarCharacterSet@^{__CFString=}$kCFLocaleGroupingSeparator@^{__CFString=}$kCFLocaleIdentifier@^{__CFString=}$kCFLocaleLanguageCode@^{__CFString=}$kCFLocaleLanguageCodeKey$kCFLocaleMeasurementSystem@^{__CFString=}$kCFLocaleQuotationBeginDelimiterKey@^{__CFString=}$kCFLocaleQuotationEndDelimiterKey@^{__CFString=}$kCFLocaleScriptCode@^{__CFString=}$kCFLocaleUsesMetricSystem@^{__CFString=}$kCFLocaleVariantCode@^{__CFString=}$kCFNull@^{__CFNull=}$kCFNumberFormatterAlwaysShowDecimalSeparator@^{__CFString=}$kCFNumberFormatterCurrencyCode@^{__CFString=}$kCFNumberFormatterCurrencyDecimalSeparator@^{__CFString=}$kCFNumberFormatterCurrencyGroupingSeparator@^{__CFString=}$kCFNumberFormatterCurrencySymbol@^{__CFString=}$kCFNumberFormatterDecimalSeparator@^{__CFString=}$kCFNumberFormatterDefaultFormat@^{__CFString=}$kCFNumberFormatterExponentSymbol@^{__CFString=}$kCFNumberFormatterFormatWidth@^{__CFString=}$kCFNumberFormatterGroupingSeparator@^{__CFString=}$kCFNumberFormatterGroupingSize@^{__CFString=}$kCFNumberFormatterInfinitySymbol@^{__CFString=}$kCFNumberFormatterInternationalCurrencySymbol@^{__CFString=}$kCFNumberFormatterIsLenient@^{__CFString=}$kCFNumberFormatterMaxFractionDigits@^{__CFString=}$kCFNumberFormatterMaxIntegerDigits@^{__CFString=}$kCFNumberFormatterMaxSignificantDigits@^{__CFString=}$kCFNumberFormatterMinFractionDigits@^{__CFString=}$kCFNumberFormatterMinIntegerDigits@^{__CFString=}$kCFNumberFormatterMinSignificantDigits@^{__CFString=}$kCFNumberFormatterMinusSign@^{__CFString=}$kCFNumberFormatterMultiplier@^{__CFString=}$kCFNumberFormatterNaNSymbol@^{__CFString=}$kCFNumberFormatterNegativePrefix@^{__CFString=}$kCFNumberFormatterNegativeSuffix@^{__CFString=}$kCFNumberFormatterPaddingCharacter@^{__CFString=}$kCFNumberFormatterPaddingPosition@^{__CFString=}$kCFNumberFormatterPerMillSymbol@^{__CFString=}$kCFNumberFormatterPercentSymbol@^{__CFString=}$kCFNumberFormatterPlusSign@^{__CFString=}$kCFNumberFormatterPositivePrefix@^{__CFString=}$kCFNumberFormatterPositiveSuffix@^{__CFString=}$kCFNumberFormatterRoundingIncrement@^{__CFString=}$kCFNumberFormatterRoundingMode@^{__CFString=}$kCFNumberFormatterSecondaryGroupingSize@^{__CFString=}$kCFNumberFormatterUseGroupingSeparator@^{__CFString=}$kCFNumberFormatterUseSignificantDigits@^{__CFString=}$kCFNumberFormatterZeroSymbol@^{__CFString=}$kCFNumberNaN@^{__CFNumber=}$kCFNumberNegativeInfinity@^{__CFNumber=}$kCFNumberPositiveInfinity@^{__CFNumber=}$kCFPersianCalendar@^{__CFString=}$kCFPreferencesAnyApplication@^{__CFString=}$kCFPreferencesAnyHost@^{__CFString=}$kCFPreferencesAnyUser@^{__CFString=}$kCFPreferencesCurrentApplication@^{__CFString=}$kCFPreferencesCurrentHost@^{__CFString=}$kCFPreferencesCurrentUser@^{__CFString=}$kCFRepublicOfChinaCalendar@^{__CFString=}$kCFRunLoopCommonModes@^{__CFString=}$kCFRunLoopDefaultMode@^{__CFString=}$kCFSocketCommandKey@^{__CFString=}$kCFSocketErrorKey@^{__CFString=}$kCFSocketNameKey@^{__CFString=}$kCFSocketRegisterCommand@^{__CFString=}$kCFSocketResultKey@^{__CFString=}$kCFSocketRetrieveCommand@^{__CFString=}$kCFSocketValueKey@^{__CFString=}$kCFStreamErrorDomainSOCKS@i$kCFStreamErrorDomainSSL@i$kCFStreamPropertyAppendToFile@^{__CFString=}$kCFStreamPropertyDataWritten@^{__CFString=}$kCFStreamPropertyFileCurrentOffset@^{__CFString=}$kCFStreamPropertySOCKSPassword$kCFStreamPropertySOCKSProxy$kCFStreamPropertySOCKSProxyHost$kCFStreamPropertySOCKSProxyPort$kCFStreamPropertySOCKSUser$kCFStreamPropertySOCKSVersion$kCFStreamPropertyShouldCloseNativeSocket$kCFStreamPropertySocketNativeHandle@^{__CFString=}$kCFStreamPropertySocketRemoteHostName@^{__CFString=}$kCFStreamPropertySocketRemotePortNumber@^{__CFString=}$kCFStreamPropertySocketSecurityLevel$kCFStreamSocketSOCKSVersion4$kCFStreamSocketSOCKSVersion5$kCFStreamSocketSecurityLevelNegotiatedSSL$kCFStreamSocketSecurityLevelNone$kCFStreamSocketSecurityLevelSSLv2$kCFStreamSocketSecurityLevelSSLv3$kCFStreamSocketSecurityLevelTLSv1$kCFStringTransformFullwidthHalfwidth@^{__CFString=}$kCFStringTransformHiraganaKatakana@^{__CFString=}$kCFStringTransformLatinArabic@^{__CFString=}$kCFStringTransformLatinCyrillic@^{__CFString=}$kCFStringTransformLatinGreek@^{__CFString=}$kCFStringTransformLatinHangul@^{__CFString=}$kCFStringTransformLatinHebrew@^{__CFString=}$kCFStringTransformLatinHiragana@^{__CFString=}$kCFStringTransformLatinKatakana@^{__CFString=}$kCFStringTransformLatinThai@^{__CFString=}$kCFStringTransformMandarinLatin@^{__CFString=}$kCFStringTransformStripCombiningMarks@^{__CFString=}$kCFStringTransformStripDiacritics@^{__CFString=}$kCFStringTransformToLatin@^{__CFString=}$kCFStringTransformToUnicodeName@^{__CFString=}$kCFStringTransformToXMLHex@^{__CFString=}$kCFTimeZoneSystemTimeZoneDidChangeNotification@^{__CFString=}$kCFURLAddedToDirectoryDateKey@^{__CFString=}$kCFURLApplicationIsScriptableKey$kCFURLAttributeModificationDateKey@^{__CFString=}$kCFURLContentAccessDateKey@^{__CFString=}$kCFURLContentModificationDateKey@^{__CFString=}$kCFURLCreationDateKey@^{__CFString=}$kCFURLCustomIconKey@^{__CFString=}$kCFURLDocumentIdentifierKey@^{__CFString=}$kCFURLEffectiveIconKey@^{__CFString=}$kCFURLFileAllocatedSizeKey@^{__CFString=}$kCFURLFileDirectoryContents@^{__CFString=}$kCFURLFileExists@^{__CFString=}$kCFURLFileLastModificationTime@^{__CFString=}$kCFURLFileLength@^{__CFString=}$kCFURLFileOwnerID@^{__CFString=}$kCFURLFilePOSIXMode@^{__CFString=}$kCFURLFileResourceIdentifierKey@^{__CFString=}$kCFURLFileResourceTypeBlockSpecial@^{__CFString=}$kCFURLFileResourceTypeCharacterSpecial@^{__CFString=}$kCFURLFileResourceTypeDirectory@^{__CFString=}$kCFURLFileResourceTypeKey@^{__CFString=}$kCFURLFileResourceTypeNamedPipe@^{__CFString=}$kCFURLFileResourceTypeRegular@^{__CFString=}$kCFURLFileResourceTypeSocket@^{__CFString=}$kCFURLFileResourceTypeSymbolicLink@^{__CFString=}$kCFURLFileResourceTypeUnknown@^{__CFString=}$kCFURLFileSecurityKey@^{__CFString=}$kCFURLFileSizeKey@^{__CFString=}$kCFURLGenerationIdentifierKey@^{__CFString=}$kCFURLHTTPStatusCode@^{__CFString=}$kCFURLHTTPStatusLine@^{__CFString=}$kCFURLHasHiddenExtensionKey@^{__CFString=}$kCFURLIsAliasFileKey@^{__CFString=}$kCFURLIsApplicationKey$kCFURLIsDirectoryKey@^{__CFString=}$kCFURLIsExcludedFromBackupKey@^{__CFString=}$kCFURLIsExecutableKey@^{__CFString=}$kCFURLIsHiddenKey@^{__CFString=}$kCFURLIsMountTriggerKey@^{__CFString=}$kCFURLIsPackageKey@^{__CFString=}$kCFURLIsReadableKey@^{__CFString=}$kCFURLIsRegularFileKey@^{__CFString=}$kCFURLIsSymbolicLinkKey@^{__CFString=}$kCFURLIsSystemImmutableKey@^{__CFString=}$kCFURLIsUbiquitousItemKey@^{__CFString=}$kCFURLIsUserImmutableKey@^{__CFString=}$kCFURLIsVolumeKey@^{__CFString=}$kCFURLIsWritableKey@^{__CFString=}$kCFURLKeysOfUnsetValuesKey@^{__CFString=}$kCFURLLabelColorKey@^{__CFString=}$kCFURLLabelNumberKey@^{__CFString=}$kCFURLLinkCountKey@^{__CFString=}$kCFURLLocalizedLabelKey@^{__CFString=}$kCFURLLocalizedNameKey@^{__CFString=}$kCFURLLocalizedTypeDescriptionKey@^{__CFString=}$kCFURLNameKey@^{__CFString=}$kCFURLParentDirectoryURLKey@^{__CFString=}$kCFURLPathKey@^{__CFString=}$kCFURLPreferredIOBlockSizeKey@^{__CFString=}$kCFURLQuarantinePropertiesKey@^{__CFString=}$kCFURLTagNamesKey@^{__CFString=}$kCFURLTotalFileAllocatedSizeKey@^{__CFString=}$kCFURLTotalFileSizeKey@^{__CFString=}$kCFURLTypeIdentifierKey@^{__CFString=}$kCFURLUbiquitousItemDownloadingErrorKey@^{__CFString=}$kCFURLUbiquitousItemDownloadingStatusCurrent@^{__CFString=}$kCFURLUbiquitousItemDownloadingStatusDownloaded@^{__CFString=}$kCFURLUbiquitousItemDownloadingStatusKey@^{__CFString=}$kCFURLUbiquitousItemDownloadingStatusNotDownloaded@^{__CFString=}$kCFURLUbiquitousItemHasUnresolvedConflictsKey@^{__CFString=}$kCFURLUbiquitousItemIsDownloadedKey@^{__CFString=}$kCFURLUbiquitousItemIsDownloadingKey@^{__CFString=}$kCFURLUbiquitousItemIsUploadedKey@^{__CFString=}$kCFURLUbiquitousItemIsUploadingKey@^{__CFString=}$kCFURLUbiquitousItemPercentDownloadedKey@^{__CFString=}$kCFURLUbiquitousItemPercentUploadedKey@^{__CFString=}$kCFURLUbiquitousItemUploadingErrorKey@^{__CFString=}$kCFURLVolumeAvailableCapacityForImportantUsageKey$kCFURLVolumeAvailableCapacityForOpportunisticUsageKey$kCFURLVolumeAvailableCapacityKey@^{__CFString=}$kCFURLVolumeCreationDateKey@^{__CFString=}$kCFURLVolumeIdentifierKey@^{__CFString=}$kCFURLVolumeIsAutomountedKey@^{__CFString=}$kCFURLVolumeIsBrowsableKey@^{__CFString=}$kCFURLVolumeIsEjectableKey@^{__CFString=}$kCFURLVolumeIsEncryptedKey$kCFURLVolumeIsInternalKey@^{__CFString=}$kCFURLVolumeIsJournalingKey@^{__CFString=}$kCFURLVolumeIsLocalKey@^{__CFString=}$kCFURLVolumeIsReadOnlyKey@^{__CFString=}$kCFURLVolumeIsRemovableKey@^{__CFString=}$kCFURLVolumeIsRootFileSystemKey$kCFURLVolumeLocalizedFormatDescriptionKey@^{__CFString=}$kCFURLVolumeLocalizedNameKey@^{__CFString=}$kCFURLVolumeMaximumFileSizeKey@^{__CFString=}$kCFURLVolumeNameKey@^{__CFString=}$kCFURLVolumeResourceCountKey@^{__CFString=}$kCFURLVolumeSupportsAccessPermissionsKey$kCFURLVolumeSupportsAdvisoryFileLockingKey@^{__CFString=}$kCFURLVolumeSupportsCasePreservedNamesKey@^{__CFString=}$kCFURLVolumeSupportsCaseSensitiveNamesKey@^{__CFString=}$kCFURLVolumeSupportsCompressionKey$kCFURLVolumeSupportsExclusiveRenamingKey$kCFURLVolumeSupportsExtendedSecurityKey@^{__CFString=}$kCFURLVolumeSupportsFileCloningKey$kCFURLVolumeSupportsHardLinksKey@^{__CFString=}$kCFURLVolumeSupportsImmutableFilesKey$kCFURLVolumeSupportsJournalingKey@^{__CFString=}$kCFURLVolumeSupportsPersistentIDsKey@^{__CFString=}$kCFURLVolumeSupportsRenamingKey@^{__CFString=}$kCFURLVolumeSupportsRootDirectoryDatesKey@^{__CFString=}$kCFURLVolumeSupportsSparseFilesKey@^{__CFString=}$kCFURLVolumeSupportsSwapRenamingKey$kCFURLVolumeSupportsSymbolicLinksKey@^{__CFString=}$kCFURLVolumeSupportsVolumeSizesKey@^{__CFString=}$kCFURLVolumeSupportsZeroRunsKey@^{__CFString=}$kCFURLVolumeTotalCapacityKey@^{__CFString=}$kCFURLVolumeURLForRemountingKey@^{__CFString=}$kCFURLVolumeURLKey@^{__CFString=}$kCFURLVolumeUUIDStringKey@^{__CFString=}$kCFUserNotificationAlertHeaderKey@^{__CFString=}$kCFUserNotificationAlertMessageKey@^{__CFString=}$kCFUserNotificationAlternateButtonTitleKey@^{__CFString=}$kCFUserNotificationCheckBoxTitlesKey@^{__CFString=}$kCFUserNotificationDefaultButtonTitleKey@^{__CFString=}$kCFUserNotificationIconURLKey@^{__CFString=}$kCFUserNotificationLocalizationURLKey@^{__CFString=}$kCFUserNotificationOtherButtonTitleKey@^{__CFString=}$kCFUserNotificationPopUpSelectionKey@^{__CFString=}$kCFUserNotificationPopUpTitlesKey@^{__CFString=}$kCFUserNotificationProgressIndicatorValueKey@^{__CFString=}$kCFUserNotificationSoundURLKey@^{__CFString=}$kCFUserNotificationTextFieldTitlesKey@^{__CFString=}$kCFUserNotificationTextFieldValuesKey@^{__CFString=}$kCFXMLTreeErrorDescription@^{__CFString=}$kCFXMLTreeErrorLineNumber@^{__CFString=}$kCFXMLTreeErrorLocation@^{__CFString=}$kCFXMLTreeErrorStatusCode@^{__CFString=}$''' -enums = '''$CFByteOrderBigEndian@2$CFByteOrderLittleEndian@1$CFByteOrderUnknown@0$CFNotificationSuspensionBehaviorCoalesce@2$CFNotificationSuspensionBehaviorDeliverImmediately@4$CFNotificationSuspensionBehaviorDrop@1$CFNotificationSuspensionBehaviorHold@3$CF_USE_OSBYTEORDER_H@1$COREFOUNDATION_CFPLUGINCOM_SEPARATE@1$FALSE@0$TRUE@1$kCFBookmarkResolutionWithoutMountingMask@512$kCFBookmarkResolutionWithoutUIMask@256$kCFBundleExecutableArchitectureI386@7$kCFBundleExecutableArchitecturePPC@18$kCFBundleExecutableArchitecturePPC64@16777234$kCFBundleExecutableArchitectureX86_64@16777223$kCFCalendarComponentsWrap@1$kCFCalendarUnitDay@16$kCFCalendarUnitEra@2$kCFCalendarUnitHour@32$kCFCalendarUnitMinute@64$kCFCalendarUnitMonth@8$kCFCalendarUnitQuarter@2048$kCFCalendarUnitSecond@128$kCFCalendarUnitWeek@256$kCFCalendarUnitWeekOfMonth@4096$kCFCalendarUnitWeekOfYear@8192$kCFCalendarUnitWeekday@512$kCFCalendarUnitWeekdayOrdinal@1024$kCFCalendarUnitYear@4$kCFCalendarUnitYearForWeekOfYear@16384$kCFCharacterSetAlphaNumeric@10$kCFCharacterSetCapitalizedLetter@13$kCFCharacterSetControl@1$kCFCharacterSetDecimalDigit@4$kCFCharacterSetDecomposable@9$kCFCharacterSetIllegal@12$kCFCharacterSetLetter@5$kCFCharacterSetLowercaseLetter@6$kCFCharacterSetNewline@15$kCFCharacterSetNonBase@8$kCFCharacterSetPunctuation@11$kCFCharacterSetSymbol@14$kCFCharacterSetUppercaseLetter@7$kCFCharacterSetWhitespace@2$kCFCharacterSetWhitespaceAndNewline@3$kCFCompareAnchored@8$kCFCompareBackwards@4$kCFCompareCaseInsensitive@1$kCFCompareDiacriticInsensitive@128$kCFCompareEqualTo@0$kCFCompareForcedOrdering@512$kCFCompareGreaterThan@1$kCFCompareLessThan@-1$kCFCompareLocalized@32$kCFCompareNonliteral@16$kCFCompareNumerically@64$kCFCompareWidthInsensitive@256$kCFCoreFoundationVersionNumber10_10@1151.16$kCFCoreFoundationVersionNumber10_10_1@1151.16$kCFCoreFoundationVersionNumber10_10_2@1152$kCFCoreFoundationVersionNumber10_10_3@1153.18$kCFCoreFoundationVersionNumber10_10_4@1153.18$kCFCoreFoundationVersionNumber10_10_5@1153.18$kCFCoreFoundationVersionNumber10_10_Max@1199$kCFCoreFoundationVersionNumber10_11@1253$kCFCoreFoundationVersionNumber10_11_1@1255.1$kCFCoreFoundationVersionNumber10_11_2@1256.14$kCFCoreFoundationVersionNumber10_11_3@1256.14$kCFCoreFoundationVersionNumber10_11_4@1258.1$kCFCoreFoundationVersionNumber10_11_Max@1299$kCFDataSearchAnchored@2$kCFDataSearchBackwards@1$kCFDateFormatterFullStyle@4$kCFDateFormatterLongStyle@3$kCFDateFormatterMediumStyle@2$kCFDateFormatterNoStyle@0$kCFDateFormatterShortStyle@1$kCFFileDescriptorReadCallBack@1$kCFFileDescriptorWriteCallBack@2$kCFFileSecurityClearAccessControlList@32$kCFFileSecurityClearGroup@2$kCFFileSecurityClearGroupUUID@16$kCFFileSecurityClearMode@4$kCFFileSecurityClearOwner@1$kCFFileSecurityClearOwnerUUID@8$kCFGregorianAllUnits@16777215$kCFGregorianUnitsDays@4$kCFGregorianUnitsHours@8$kCFGregorianUnitsMinutes@16$kCFGregorianUnitsMonths@2$kCFGregorianUnitsSeconds@32$kCFGregorianUnitsYears@1$kCFISO8601DateFormatWithColonSeparatorInTime@512$kCFISO8601DateFormatWithColonSeparatorInTimeZone@1024$kCFISO8601DateFormatWithDashSeparatorInDate@256$kCFISO8601DateFormatWithDay@16$kCFISO8601DateFormatWithFractionalSeconds@2048$kCFISO8601DateFormatWithMonth@2$kCFISO8601DateFormatWithSpaceBetweenDateAndTime@128$kCFISO8601DateFormatWithTime@32$kCFISO8601DateFormatWithTimeZone@64$kCFISO8601DateFormatWithWeekOfYear@4$kCFISO8601DateFormatWithYear@1$kCFLocaleLanguageDirectionBottomToTop@4$kCFLocaleLanguageDirectionLeftToRight@1$kCFLocaleLanguageDirectionRightToLeft@2$kCFLocaleLanguageDirectionTopToBottom@3$kCFLocaleLanguageDirectionUnknown@0$kCFMessagePortBecameInvalidError@-5$kCFMessagePortIsInvalid@-3$kCFMessagePortReceiveTimeout@-2$kCFMessagePortSendTimeout@-1$kCFMessagePortSuccess@0$kCFMessagePortTransportError@-4$kCFNotFound@-1$kCFNotificationDeliverImmediately@1$kCFNotificationPostToAllSessions@2$kCFNumberCFIndexType@14$kCFNumberCGFloatType@16$kCFNumberCharType@7$kCFNumberDoubleType@13$kCFNumberFloat32Type@5$kCFNumberFloat64Type@6$kCFNumberFloatType@12$kCFNumberFormatterCurrencyAccountingStyle@10$kCFNumberFormatterCurrencyISOCodeStyle@8$kCFNumberFormatterCurrencyPluralStyle@9$kCFNumberFormatterCurrencyStyle@2$kCFNumberFormatterDecimalStyle@1$kCFNumberFormatterNoStyle@0$kCFNumberFormatterOrdinalStyle@6$kCFNumberFormatterPadAfterPrefix@1$kCFNumberFormatterPadAfterSuffix@3$kCFNumberFormatterPadBeforePrefix@0$kCFNumberFormatterPadBeforeSuffix@2$kCFNumberFormatterParseIntegersOnly@1$kCFNumberFormatterPercentStyle@3$kCFNumberFormatterRoundCeiling@0$kCFNumberFormatterRoundDown@2$kCFNumberFormatterRoundFloor@1$kCFNumberFormatterRoundHalfDown@5$kCFNumberFormatterRoundHalfEven@4$kCFNumberFormatterRoundHalfUp@6$kCFNumberFormatterRoundUp@3$kCFNumberFormatterScientificStyle@4$kCFNumberFormatterSpellOutStyle@5$kCFNumberIntType@9$kCFNumberLongLongType@11$kCFNumberLongType@10$kCFNumberMaxType@16$kCFNumberNSIntegerType@15$kCFNumberSInt16Type@2$kCFNumberSInt32Type@3$kCFNumberSInt64Type@4$kCFNumberSInt8Type@1$kCFNumberShortType@8$kCFPropertyListBinaryFormat_v1_0@200$kCFPropertyListImmutable@0$kCFPropertyListMutableContainers@1$kCFPropertyListMutableContainersAndLeaves@2$kCFPropertyListOpenStepFormat@1$kCFPropertyListReadCorruptError@3840$kCFPropertyListReadStreamError@3842$kCFPropertyListReadUnknownVersionError@3841$kCFPropertyListWriteStreamError@3851$kCFPropertyListXMLFormat_v1_0@100$kCFRunLoopAfterWaiting@64$kCFRunLoopAllActivities@268435455$kCFRunLoopBeforeSources@4$kCFRunLoopBeforeTimers@2$kCFRunLoopBeforeWaiting@32$kCFRunLoopEntry@1$kCFRunLoopExit@128$kCFRunLoopRunFinished@1$kCFRunLoopRunHandledSource@4$kCFRunLoopRunStopped@2$kCFRunLoopRunTimedOut@3$kCFSocketAcceptCallBack@2$kCFSocketAutomaticallyReenableAcceptCallBack@2$kCFSocketAutomaticallyReenableDataCallBack@3$kCFSocketAutomaticallyReenableReadCallBack@1$kCFSocketAutomaticallyReenableWriteCallBack@8$kCFSocketCloseOnInvalidate@128$kCFSocketConnectCallBack@4$kCFSocketDataCallBack@3$kCFSocketError@-1$kCFSocketLeaveErrors@64$kCFSocketNoCallBack@0$kCFSocketReadCallBack@1$kCFSocketSuccess@0$kCFSocketTimeout@-2$kCFSocketWriteCallBack@8$kCFStreamErrorDomainCustom@-1$kCFStreamErrorDomainMacOSStatus@2$kCFStreamErrorDomainPOSIX@1$kCFStreamEventCanAcceptBytes@4$kCFStreamEventEndEncountered@16$kCFStreamEventErrorOccurred@8$kCFStreamEventHasBytesAvailable@2$kCFStreamEventNone@0$kCFStreamEventOpenCompleted@1$kCFStreamStatusAtEnd@5$kCFStreamStatusClosed@6$kCFStreamStatusError@7$kCFStreamStatusNotOpen@0$kCFStreamStatusOpen@2$kCFStreamStatusOpening@1$kCFStreamStatusReading@3$kCFStreamStatusWriting@4$kCFStringEncodingANSEL@1537$kCFStringEncodingASCII@1536$kCFStringEncodingBig5@2563$kCFStringEncodingBig5_E@2569$kCFStringEncodingBig5_HKSCS_1999@2566$kCFStringEncodingCNS_11643_92_P1@1617$kCFStringEncodingCNS_11643_92_P2@1618$kCFStringEncodingCNS_11643_92_P3@1619$kCFStringEncodingDOSArabic@1049$kCFStringEncodingDOSBalticRim@1030$kCFStringEncodingDOSCanadianFrench@1048$kCFStringEncodingDOSChineseSimplif@1057$kCFStringEncodingDOSChineseTrad@1059$kCFStringEncodingDOSCyrillic@1043$kCFStringEncodingDOSGreek@1029$kCFStringEncodingDOSGreek1@1041$kCFStringEncodingDOSGreek2@1052$kCFStringEncodingDOSHebrew@1047$kCFStringEncodingDOSIcelandic@1046$kCFStringEncodingDOSJapanese@1056$kCFStringEncodingDOSKorean@1058$kCFStringEncodingDOSLatin1@1040$kCFStringEncodingDOSLatin2@1042$kCFStringEncodingDOSLatinUS@1024$kCFStringEncodingDOSNordic@1050$kCFStringEncodingDOSPortuguese@1045$kCFStringEncodingDOSRussian@1051$kCFStringEncodingDOSThai@1053$kCFStringEncodingDOSTurkish@1044$kCFStringEncodingEBCDIC_CP037@3074$kCFStringEncodingEBCDIC_US@3073$kCFStringEncodingEUC_CN@2352$kCFStringEncodingEUC_JP@2336$kCFStringEncodingEUC_KR@2368$kCFStringEncodingEUC_TW@2353$kCFStringEncodingGBK_95@1585$kCFStringEncodingGB_18030_2000@1586$kCFStringEncodingGB_2312_80@1584$kCFStringEncodingHZ_GB_2312@2565$kCFStringEncodingISOLatin1@513$kCFStringEncodingISOLatin10@528$kCFStringEncodingISOLatin2@514$kCFStringEncodingISOLatin3@515$kCFStringEncodingISOLatin4@516$kCFStringEncodingISOLatin5@521$kCFStringEncodingISOLatin6@522$kCFStringEncodingISOLatin7@525$kCFStringEncodingISOLatin8@526$kCFStringEncodingISOLatin9@527$kCFStringEncodingISOLatinArabic@518$kCFStringEncodingISOLatinCyrillic@517$kCFStringEncodingISOLatinGreek@519$kCFStringEncodingISOLatinHebrew@520$kCFStringEncodingISOLatinThai@523$kCFStringEncodingISO_2022_CN@2096$kCFStringEncodingISO_2022_CN_EXT@2097$kCFStringEncodingISO_2022_JP@2080$kCFStringEncodingISO_2022_JP_1@2082$kCFStringEncodingISO_2022_JP_2@2081$kCFStringEncodingISO_2022_JP_3@2083$kCFStringEncodingISO_2022_KR@2112$kCFStringEncodingInvalidId@4294967295$kCFStringEncodingJIS_C6226_78@1572$kCFStringEncodingJIS_X0201_76@1568$kCFStringEncodingJIS_X0208_83@1569$kCFStringEncodingJIS_X0208_90@1570$kCFStringEncodingJIS_X0212_90@1571$kCFStringEncodingKOI8_R@2562$kCFStringEncodingKOI8_U@2568$kCFStringEncodingKSC_5601_87@1600$kCFStringEncodingKSC_5601_92_Johab@1601$kCFStringEncodingMacArabic@4$kCFStringEncodingMacArmenian@24$kCFStringEncodingMacBengali@13$kCFStringEncodingMacBurmese@19$kCFStringEncodingMacCeltic@39$kCFStringEncodingMacCentralEurRoman@29$kCFStringEncodingMacChineseSimp@25$kCFStringEncodingMacChineseTrad@2$kCFStringEncodingMacCroatian@36$kCFStringEncodingMacCyrillic@7$kCFStringEncodingMacDevanagari@9$kCFStringEncodingMacDingbats@34$kCFStringEncodingMacEthiopic@28$kCFStringEncodingMacExtArabic@31$kCFStringEncodingMacFarsi@140$kCFStringEncodingMacGaelic@40$kCFStringEncodingMacGeorgian@23$kCFStringEncodingMacGreek@6$kCFStringEncodingMacGujarati@11$kCFStringEncodingMacGurmukhi@10$kCFStringEncodingMacHFS@255$kCFStringEncodingMacHebrew@5$kCFStringEncodingMacIcelandic@37$kCFStringEncodingMacInuit@236$kCFStringEncodingMacJapanese@1$kCFStringEncodingMacKannada@16$kCFStringEncodingMacKhmer@20$kCFStringEncodingMacKorean@3$kCFStringEncodingMacLaotian@22$kCFStringEncodingMacMalayalam@17$kCFStringEncodingMacMongolian@27$kCFStringEncodingMacOriya@12$kCFStringEncodingMacRoman@0$kCFStringEncodingMacRomanLatin1@2564$kCFStringEncodingMacRomanian@38$kCFStringEncodingMacSinhalese@18$kCFStringEncodingMacSymbol@33$kCFStringEncodingMacTamil@14$kCFStringEncodingMacTelugu@15$kCFStringEncodingMacThai@21$kCFStringEncodingMacTibetan@26$kCFStringEncodingMacTurkish@35$kCFStringEncodingMacUkrainian@152$kCFStringEncodingMacVT100@252$kCFStringEncodingMacVietnamese@30$kCFStringEncodingNextStepJapanese@2818$kCFStringEncodingNextStepLatin@2817$kCFStringEncodingNonLossyASCII@3071$kCFStringEncodingShiftJIS@2561$kCFStringEncodingShiftJIS_X0213@1576$kCFStringEncodingShiftJIS_X0213_00@1576$kCFStringEncodingShiftJIS_X0213_MenKuTen@1577$kCFStringEncodingUTF16@256$kCFStringEncodingUTF16BE@268435712$kCFStringEncodingUTF16LE@335544576$kCFStringEncodingUTF32@201326848$kCFStringEncodingUTF32BE@402653440$kCFStringEncodingUTF32LE@469762304$kCFStringEncodingUTF7@67109120$kCFStringEncodingUTF7_IMAP@2576$kCFStringEncodingUTF8@134217984$kCFStringEncodingUnicode@256$kCFStringEncodingVISCII@2567$kCFStringEncodingWindowsArabic@1286$kCFStringEncodingWindowsBalticRim@1287$kCFStringEncodingWindowsCyrillic@1282$kCFStringEncodingWindowsGreek@1283$kCFStringEncodingWindowsHebrew@1285$kCFStringEncodingWindowsKoreanJohab@1296$kCFStringEncodingWindowsLatin1@1280$kCFStringEncodingWindowsLatin2@1281$kCFStringEncodingWindowsLatin5@1284$kCFStringEncodingWindowsVietnamese@1288$kCFStringNormalizationFormC@2$kCFStringNormalizationFormD@0$kCFStringNormalizationFormKC@3$kCFStringNormalizationFormKD@1$kCFStringTokenizerAttributeLanguage@131072$kCFStringTokenizerAttributeLatinTranscription@65536$kCFStringTokenizerTokenHasDerivedSubTokensMask@4$kCFStringTokenizerTokenHasHasNumbersMask@8$kCFStringTokenizerTokenHasNonLettersMask@16$kCFStringTokenizerTokenHasSubTokensMask@2$kCFStringTokenizerTokenIsCJWordMask@32$kCFStringTokenizerTokenNone@0$kCFStringTokenizerTokenNormal@1$kCFStringTokenizerUnitLineBreak@3$kCFStringTokenizerUnitParagraph@2$kCFStringTokenizerUnitSentence@1$kCFStringTokenizerUnitWord@0$kCFStringTokenizerUnitWordBoundary@4$kCFTimeZoneNameStyleDaylightSaving@2$kCFTimeZoneNameStyleGeneric@4$kCFTimeZoneNameStyleShortDaylightSaving@3$kCFTimeZoneNameStyleShortGeneric@5$kCFTimeZoneNameStyleShortStandard@1$kCFTimeZoneNameStyleStandard@0$kCFURLBookmarkCreationMinimalBookmarkMask@512$kCFURLBookmarkCreationPreferFileIDResolutionMask@256$kCFURLBookmarkCreationSecurityScopeAllowOnlyReadAccess@4096$kCFURLBookmarkCreationSuitableForBookmarkFile@1024$kCFURLBookmarkCreationWithSecurityScope@2048$kCFURLBookmarkResolutionWithSecurityScope@1024$kCFURLBookmarkResolutionWithoutMountingMask@512$kCFURLBookmarkResolutionWithoutUIMask@256$kCFURLComponentFragment@12$kCFURLComponentHost@8$kCFURLComponentNetLocation@2$kCFURLComponentParameterString@10$kCFURLComponentPassword@6$kCFURLComponentPath@3$kCFURLComponentPort@9$kCFURLComponentQuery@11$kCFURLComponentResourceSpecifier@4$kCFURLComponentScheme@1$kCFURLComponentUser@5$kCFURLComponentUserInfo@7$kCFURLEnumeratorDefaultBehavior@0$kCFURLEnumeratorDescendRecursively@1$kCFURLEnumeratorDirectoryPostOrderSuccess@4$kCFURLEnumeratorEnd@2$kCFURLEnumeratorError@3$kCFURLEnumeratorGenerateFileReferenceURLs@4$kCFURLEnumeratorIncludeDirectoriesPostOrder@32$kCFURLEnumeratorIncludeDirectoriesPreOrder@16$kCFURLEnumeratorSkipInvisibles@2$kCFURLEnumeratorSkipPackageContents@8$kCFURLEnumeratorSuccess@1$kCFURLHFSPathStyle@1$kCFURLImproperArgumentsError@-15$kCFURLPOSIXPathStyle@0$kCFURLPropertyKeyUnavailableError@-17$kCFURLRemoteHostUnavailableError@-14$kCFURLResourceAccessViolationError@-13$kCFURLResourceNotFoundError@-12$kCFURLTimeoutError@-18$kCFURLUnknownError@-10$kCFURLUnknownPropertyKeyError@-16$kCFURLUnknownSchemeError@-11$kCFURLWindowsPathStyle@2$kCFUserNotificationAlternateResponse@1$kCFUserNotificationCancelResponse@3$kCFUserNotificationCautionAlertLevel@2$kCFUserNotificationDefaultResponse@0$kCFUserNotificationNoDefaultButtonFlag@32$kCFUserNotificationNoteAlertLevel@1$kCFUserNotificationOtherResponse@2$kCFUserNotificationPlainAlertLevel@3$kCFUserNotificationStopAlertLevel@0$kCFUserNotificationUseRadioButtonsFlag@64$kCFXMLEntityTypeCharacter@4$kCFXMLEntityTypeParameter@0$kCFXMLEntityTypeParsedExternal@2$kCFXMLEntityTypeParsedInternal@1$kCFXMLEntityTypeUnparsed@3$kCFXMLErrorElementlessDocument@11$kCFXMLErrorEncodingConversionFailure@3$kCFXMLErrorMalformedCDSect@7$kCFXMLErrorMalformedCharacterReference@13$kCFXMLErrorMalformedCloseTag@8$kCFXMLErrorMalformedComment@12$kCFXMLErrorMalformedDTD@5$kCFXMLErrorMalformedDocument@10$kCFXMLErrorMalformedName@6$kCFXMLErrorMalformedParsedCharacterData@14$kCFXMLErrorMalformedProcessingInstruction@4$kCFXMLErrorMalformedStartTag@9$kCFXMLErrorNoData@15$kCFXMLErrorUnexpectedEOF@1$kCFXMLErrorUnknownEncoding@2$kCFXMLNodeCurrentVersion@1$kCFXMLNodeTypeAttribute@3$kCFXMLNodeTypeAttributeListDeclaration@15$kCFXMLNodeTypeCDATASection@7$kCFXMLNodeTypeComment@5$kCFXMLNodeTypeDocument@1$kCFXMLNodeTypeDocumentFragment@8$kCFXMLNodeTypeDocumentType@11$kCFXMLNodeTypeElement@2$kCFXMLNodeTypeElementTypeDeclaration@14$kCFXMLNodeTypeEntity@9$kCFXMLNodeTypeEntityReference@10$kCFXMLNodeTypeNotation@13$kCFXMLNodeTypeProcessingInstruction@4$kCFXMLNodeTypeText@6$kCFXMLNodeTypeWhitespace@12$kCFXMLParserAddImpliedAttributes@32$kCFXMLParserAllOptions@16777215$kCFXMLParserNoOptions@0$kCFXMLParserReplacePhysicalEntities@4$kCFXMLParserResolveExternalEntities@16$kCFXMLParserSkipMetaData@2$kCFXMLParserSkipWhitespace@8$kCFXMLParserValidateDocument@1$kCFXMLStatusParseInProgress@-1$kCFXMLStatusParseNotBegun@-2$kCFXMLStatusParseSuccessful@0$''' -misc.update({'kCFCoreFoundationVersionNumber10_7_1': 635.0, 'kCFCoreFoundationVersionNumber10_4_4_Intel': 368.26, 'kCFCoreFoundationVersionNumber10_5_4': 476.14, 'kCFCoreFoundationVersionNumber10_4_4_PowerPC': 368.25, 'kCFCoreFoundationVersionNumber10_8_2': 744.12, 'kCFCoreFoundationVersionNumber10_8_3': 744.18, 'kCFCoreFoundationVersionNumber10_8_1': 744.0, 'kCFCoreFoundationVersionNumber10_5_1': 476.0, 'kCFCoreFoundationVersionNumber10_8_4': 744.19, 'kCFCoreFoundationVersionNumber10_9_2': 855.14, 'kCFCoreFoundationVersionNumber10_9_1': 855.11, 'kCFCoreFoundationVersionNumber10_4_7': 368.27, 'kCFCoreFoundationVersionNumber10_4_2': 368.11, 'kCFCoreFoundationVersionNumber10_4_3': 368.18, 'kCFCoreFoundationVersionNumber10_4_1': 368.1, 'kCFCoreFoundationVersionNumber10_5_7': 476.18, 'kCFCoreFoundationVersionNumber10_5_6': 476.17, 'kCFCoreFoundationVersionNumber10_5_5': 476.15, 'kCFCoreFoundationVersionNumber10_4_6_Intel': 368.26, 'kCFCoreFoundationVersionNumber10_5_3': 476.13, 'kCFCoreFoundationVersionNumber10_5_2': 476.1, 'kCFCoreFoundationVersionNumber10_4_8': 368.27, 'kCFCoreFoundationVersionNumber10_4_9': 368.28, 'kCFCoreFoundationVersionNumber10_2_4': 263.3, 'kCFCoreFoundationVersionNumber10_2_5': 263.5, 'kCFCoreFoundationVersionNumber10_2_6': 263.5, 'kCFCoreFoundationVersionNumber10_2_7': 263.5, 'kCFCoreFoundationVersionNumber10_3_9': 299.35, 'kCFCoreFoundationVersionNumber10_2_1': 263.1, 'kCFCoreFoundationVersionNumber10_2_2': 263.1, 'kCFCoreFoundationVersionNumber10_2_3': 263.3, 'kCFCoreFoundationVersionNumber10_3_5': 299.31, 'kCFCoreFoundationVersionNumber10_3_4': 299.31, 'kCFCoreFoundationVersionNumber10_3_7': 299.33, 'kCFCoreFoundationVersionNumber10_3_6': 299.32, 'kCFCoreFoundationVersionNumber10_3_1': 299.0, 'kCFCoreFoundationVersionNumber10_3_3': 299.3, 'kCFCoreFoundationVersionNumber10_3_2': 299.0, 'kCFCoreFoundationVersionNumber10_1_3': 227.2, 'kCFCoreFoundationVersionNumber10_1_2': 227.2, 'kCFCoreFoundationVersionNumber10_1_1': 226.0, 'kCFCoreFoundationVersionNumber10_1_4': 227.3, 'kCFCoreFoundationVersionNumber10_4_6_PowerPC': 368.25, 'kCFCoreFoundationVersionNumber10_6_2': 550.13, 'kCFCoreFoundationVersionNumber10_6_3': 550.19, 'kCFCoreFoundationVersionNumber10_6_4': 550.29, 'kCFCoreFoundationVersionNumber10_6_5': 550.42, 'kCFCoreFoundationVersionNumber10_6_6': 550.42, 'kCFCoreFoundationVersionNumber10_6_7': 550.42, 'kCFCoreFoundationVersionNumber10_6_8': 550.43, 'kCFCoreFoundationVersionNumber10_0_3': 196.5, 'kCFCoreFoundationVersionNumber10_7_3': 635.19, 'kCFCoreFoundationVersionNumber10_7_2': 635.15, 'kCFCoreFoundationVersionNumber10_4_10': 368.28, 'kCFCoreFoundationVersionNumber10_7_4': 635.21, 'kCFCoreFoundationVersionNumber10_4_5_PowerPC': 368.25, 'kCFCoreFoundationVersionNumber10_3_8': 299.33, 'kCFCoreFoundationVersionNumber10_6_1': 550.0, 'kCFCoreFoundationVersionNumber10_4_5_Intel': 368.26, 'kCFCoreFoundationVersionNumber10_2_8': 263.5, 'kCFCoreFoundationVersionNumber10_5_8': 476.19, 'kCFCoreFoundationVersionNumber10_4_11': 368.31, 'kCFCoreFoundationVersionNumber10_5': 476.0, 'kCFCoreFoundationVersionNumber10_4': 368.0, 'kCFCoreFoundationVersionNumber10_7': 635.0, 'kCFCoreFoundationVersionNumber10_6': 550.0, 'kCFCoreFoundationVersionNumber10_1': 226.0, 'kCFCoreFoundationVersionNumber10_0': 196.4, 'kCFCoreFoundationVersionNumber10_3': 299.0, 'kCFCoreFoundationVersionNumber10_2': 263.0, 'kCFCoreFoundationVersionNumber10_7_5': 635.21, 'kCFCoreFoundationVersionNumber10_9': 855.11, 'kCFCoreFoundationVersionNumber10_8': 744.0}) -functions={'CFURLCreateByResolvingBookmarkData': (sel32or64(b'^{__CFURL=}^{__CFAllocator=}^{__CFData=}L^{__CFURL=}^{__CFArray=}^Z^^{__CFError=}', b'^{__CFURL=}^{__CFAllocator=}^{__CFData=}Q^{__CFURL=}^{__CFArray=}^Z^^{__CFError=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {5: {'type_modifier': 'o'}, 6: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'CFConvertDoubleSwappedToHost': (b'd{_CFSwappedFloat64=Q}',), 'CFURLCreateCopyAppendingPathComponent': (b'^{__CFURL=}^{__CFAllocator=}^{__CFURL=}^{__CFString=}Z', '', {'retval': {'already_cfretained': True}}), 'CFRangeMake': (sel32or64(b'{_CFRange=ll}ll', b'{_CFRange=qq}qq'),), 'CFBitVectorGetCount': (sel32or64(b'l^{__CFBitVector=}', b'q^{__CFBitVector=}'),), 'CFDictionaryContainsKey': (b'Z^{__CFDictionary=}@',), 'CFPreferencesCopyValue': (b'@^{__CFString=}^{__CFString=}^{__CFString=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CFGetAllocator': (b'^{__CFAllocator=}@',), 'CFSetCreateMutable': (sel32or64(b'^{__CFSet=}^{__CFAllocator=}l^{_CFSetCallBacks=l^?^?^?^?^?}', b'^{__CFSet=}^{__CFAllocator=}q^{_CFSetCallBacks=q^?^?^?^?^?}'), '', {'retval': {'already_cfretained': True}}), 'CFErrorGetCode': (sel32or64(b'l^{__CFError=}', b'q^{__CFError=}'),), 'CFStringGetFileSystemRepresentation': (sel32or64(b'Z^{__CFString=}^tl', b'Z^{__CFString=}^tq'), '', {'arguments': {1: {'c_array_length_in_arg': 2, 'type_modifier': 'o'}}}), 'CFLocaleGetTypeID': (sel32or64(b'L', b'Q'),), 'CFUUIDGetUUIDBytes': (b'{_CFUUIDBytes=CCCCCCCCCCCCCCCC}^{__CFUUID=}',), 'CFDateFormatterCreateDateFormatFromTemplate': (sel32or64(b'^{__CFString=}^{__CFAllocator=}^{__CFString=}L^{__CFLocale=}', b'^{__CFString=}^{__CFAllocator=}^{__CFString=}Q^{__CFLocale=}'), '', {'retval': {'already_cfretained': True}}), 'CFTreeInsertSibling': (b'v^{__CFTree=}^{__CFTree=}',), 'CFSocketConnectToAddress': (sel32or64(b'l^{__CFSocket=}^{__CFData=}d', b'q^{__CFSocket=}^{__CFData=}d'),), 'CFWriteStreamScheduleWithRunLoop': (b'v^{__CFWriteStream=}^{__CFRunLoop=}^{__CFString=}',), 'CFDateFormatterCreateStringWithAbsoluteTime': (b'^{__CFString=}^{__CFAllocator=}^{__CFDateFormatter=}d', '', {'retval': {'already_cfretained': True}}), 'CFReadStreamScheduleWithRunLoop': (b'v^{__CFReadStream=}^{__CFRunLoop=}^{__CFString=}',), 'CFArrayAppendValue': (b'v^{__CFArray=}@',), 'CFSetRemoveValue': (b'v^{__CFSet=}@',), 'CFBundleCopyPrivateFrameworksURL': (b'^{__CFURL=}^{__CFBundle=}', '', {'retval': {'already_cfretained': True}}), 'CFBitVectorCreateMutable': (sel32or64(b'^{__CFBitVector=}^{__CFAllocator=}l', b'^{__CFBitVector=}^{__CFAllocator=}q'), '', {'retval': {'already_cfretained': True}}), 'CFLocaleCreateCanonicalLocaleIdentifierFromString': (b'^{__CFString=}^{__CFAllocator=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CFStringTokenizerCopyBestStringLanguage': (sel32or64(b'^{__CFString=}^{__CFString=}{_CFRange=ll}', b'^{__CFString=}^{__CFString=}{_CFRange=qq}'), '', {'retval': {'already_cfretained': True}}), 'CFUUIDCreate': (b'^{__CFUUID=}^{__CFAllocator=}', '', {'retval': {'already_cfretained': True}}), 'CFCalendarGetRangeOfUnit': (sel32or64(b'{_CFRange=ll}^{__CFCalendar=}LLd', b'{_CFRange=qq}^{__CFCalendar=}QQd'),), 'CFStringFindWithOptionsAndLocale': (sel32or64(b'Z^{__CFString=}^{__CFString=}{_CFRange=ll}L^{__CFLocale=}^{_CFRange=ll}', b'Z^{__CFString=}^{__CFString=}{_CFRange=qq}Q^{__CFLocale=}^{_CFRange=qq}'), '', {'arguments': {5: {'type_modifier': 'o'}}}), 'CFURLSetResourcePropertyForKey': (b'Z^{__CFURL=}^{__CFString=}@^^{__CFError=}', '', {'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'CFFileSecurityCopyOwnerUUID': (b'Z^{__CFFileSecurity=}^^{__CFUUID=}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'type_modifier': 'o'}}}), 'CFCalendarAddComponents': (sel32or64(b'Z^{__CFCalendar=}^dL^c', b'Z^{__CFCalendar=}^dQ^c'), '', {'variadic': True}), 'CFLocaleCopyCommonISOCurrencyCodes': (b'^{__CFArray=}', '', {'retval': {'already_cfretained': True}}), 'CFCalendarGetOrdinalityOfUnit': (sel32or64(b'l^{__CFCalendar=}LLd', b'q^{__CFCalendar=}QQd'),), 'CFPreferencesRemoveSuitePreferencesFromApp': (b'v^{__CFString=}^{__CFString=}',), 'CFCalendarGetMinimumDaysInFirstWeek': (sel32or64(b'l^{__CFCalendar=}', b'q^{__CFCalendar=}'),), 'CFURLCreateWithFileSystemPathRelativeToBase': (sel32or64(b'^{__CFURL=}^{__CFAllocator=}^{__CFString=}lZ^{__CFURL=}', b'^{__CFURL=}^{__CFAllocator=}^{__CFString=}qZ^{__CFURL=}'), '', {'retval': {'already_cfretained': True}}), 'CFBinaryHeapGetCountOfValue': (sel32or64(b'l^{__CFBinaryHeap=}@', b'q^{__CFBinaryHeap=}@'),), 'CFStringIsSurrogateLowCharacter': (b'ZT',), 'CFStringTrim': (b'v^{__CFString=}^{__CFString=}',), 'CFXMLNodeGetTypeID': (sel32or64(b'L', b'Q'),), 'CFStringSetExternalCharactersNoCopy': (sel32or64(b'v^{__CFString=}^Tll', b'v^{__CFString=}^Tqq'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'c_array_length_in_arg': 3, 'type_modifier': 'n'}}}), 'CFLocaleGetSystem': (b'^{__CFLocale=}',), 'CFDataGetLength': (sel32or64(b'l^{__CFData=}', b'q^{__CFData=}'),), 'CFWriteStreamWrite': (sel32or64(b'l^{__CFWriteStream=}^vl', b'q^{__CFWriteStream=}^vq'), '', {'arguments': {1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}}}), 'CFBundleGetVersionNumber': (sel32or64(b'L^{__CFBundle=}', b'I^{__CFBundle=}'),), 'CFGetRetainCount': (sel32or64(b'l@', b'q@'),), 'CFRunLoopObserverGetContext': (sel32or64(b'v^{__CFRunLoopObserver=}^{_CFRunLoopObserverContext=l^v^?^?^?}', b'v^{__CFRunLoopObserver=}^{_CFRunLoopObserverContext=q^v^?^?^?}'),), 'CFDataCreateWithBytesNoCopy': (sel32or64(b'^{__CFData=}^{__CFAllocator=}^vl^{__CFAllocator=}', b'^{__CFData=}^{__CFAllocator=}^vq^{__CFAllocator=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}}}), 'CFURLEnumeratorGetNextURL': (sel32or64(b'l^{__CFURLEnumerator=}^^{__CFURL=}^^{__CFError=}', b'q^{__CFURLEnumerator=}^^{__CFURL=}^^{__CFError=}'), '', {'arguments': {1: {'type_modifier': 'o'}, 2: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'CFRunLoopTimerGetTypeID': (sel32or64(b'L', b'Q'),), 'CFStringConvertNSStringEncodingToEncoding': (sel32or64(b'LL', b'IQ'),), 'CFURLCreateBookmarkDataFromFile': (b'^{__CFData=}^{__CFAllocator=}^{__CFURL=}^^{__CFError=}', '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'CFBundleCopyResourceURLForLocalization': (b'^{__CFURL=}^{__CFBundle=}^{__CFString=}^{__CFString=}^{__CFString=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CFStringGetMaximumSizeForEncoding': (sel32or64(b'llL', b'qqI'),), 'CFStringTransform': (sel32or64(b'Z^{__CFString=}^{_CFRange=ll}^{__CFString=}Z', b'Z^{__CFString=}^{_CFRange=qq}^{__CFString=}Z'), '', {'arguments': {1: {'type_modifier': 'N'}}}), 'CFURLStopAccessingSecurityScopedResource': (b'v^{__CFURL=}',), 'CFDataCreateMutableCopy': (sel32or64(b'^{__CFData=}^{__CFAllocator=}l^{__CFData=}', b'^{__CFData=}^{__CFAllocator=}q^{__CFData=}'), '', {'retval': {'already_cfretained': True}}), 'CFMachPortGetContext': (sel32or64(b'v^{__CFMachPort=}^{_CFMachPortContext=l^v^?^?^?}', b'v^{__CFMachPort=}^{_CFMachPortContext=q^v^?^?^?}'),), 'CFDateFormatterGetDateStyle': (sel32or64(b'l^{__CFDateFormatter=}', b'q^{__CFDateFormatter=}'),), 'CFStringGetHyphenationLocationBeforeIndex': (sel32or64(b'l^{__CFString=}l{_CFRange=ll}L^{__CFLocale=}^L', b'q^{__CFString=}q{_CFRange=qq}Q^{__CFLocale=}^I'), '', {'arguments': {5: {'type_modifier': 'o'}}}), 'CFRunLoopIsWaiting': (b'Z^{__CFRunLoop=}',), 'CFAttributedStringReplaceString': (sel32or64(b'v^{__CFAttributedString=}{_CFRange=ll}^{__CFString=}', b'v^{__CFAttributedString=}{_CFRange=qq}^{__CFString=}'),), 'CFSocketCreateWithNative': (sel32or64(b'^{__CFSocket=}^{__CFAllocator=}iL^?^{_CFSocketContext=l^v^?^?^?}', b'^{__CFSocket=}^{__CFAllocator=}iQ^?^{_CFSocketContext=q^v^?^?^?}'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__CFSocket=}'}, 1: {'type': b'Q'}, 2: {'type': b'^{__CFData=}'}, 3: {'type': b'^v'}, 4: {'type': b'^v'}}}}}}), 'CFMessagePortCreateLocal': (sel32or64(b'^{__CFMessagePort=}^{__CFAllocator=}^{__CFString=}^?^{_CFMessagePortContext=l^v^?^?^?}^Z', b'^{__CFMessagePort=}^{__CFAllocator=}^{__CFString=}^?^{_CFMessagePortContext=q^v^?^?^?}^Z'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'callable': {'retval': {'type': b'^{__CFData=}'}, 'arguments': {0: {'type': b'^{__CFMessagePort=}'}, 1: {'type': b'i'}, 2: {'type': b'^{__CFData=}'}, 3: {'type': b'^v'}}}}}}), 'CFTimeZoneCopyDefault': (b'^{__CFTimeZone=}', '', {'retval': {'already_cfretained': True}}), 'CFArrayGetValueAtIndex': (sel32or64(b'@^{__CFArray=}l', b'@^{__CFArray=}q'),), 'CFErrorCopyFailureReason': (b'^{__CFString=}^{__CFError=}', '', {'retval': {'already_cfretained': True}}), 'CFBinaryHeapContainsValue': (b'Z^{__CFBinaryHeap=}@',), 'CFNumberFormatterGetStyle': (sel32or64(b'l^{__CFNumberFormatter=}', b'q^{__CFNumberFormatter=}'),), 'CFXMLParserCreate': (sel32or64(b'^{__CFXMLParser=}^{__CFAllocator=}^{__CFData=}^{__CFURL=}Ll^{_CFXMLParserCallBacks=l^?^?^?^?^?}^{_CFXMLParserContext=l^v^?^?^?}', b'^{__CFXMLParser=}^{__CFAllocator=}^{__CFData=}^{__CFURL=}Qq^{_CFXMLParserCallBacks=q^?^?^?^?^?}^{_CFXMLParserContext=q^v^?^?^?}'), '', {'retval': {'already_cfretained': True}}), 'CFLocaleCopyPreferredLanguages': (b'^{__CFArray=}', '', {'retval': {'already_cfretained': True}}), 'CFBagCreateCopy': (b'^{__CFBag=}^{__CFAllocator=}^{__CFBag=}', '', {'retval': {'already_cfretained': True}}), 'CFNotificationCenterGetDistributedCenter': (b'^{__CFNotificationCenter=}',), 'CFXMLTreeGetNode': (b'^{__CFXMLNode=}^{__CFTree=}',), 'CFDateCreate': (b'^{__CFDate=}^{__CFAllocator=}d', '', {'retval': {'already_cfretained': True}}), 'CFErrorCopyDescription': (b'^{__CFString=}^{__CFError=}', '', {'retval': {'already_cfretained': True}}), 'CFCharacterSetGetTypeID': (sel32or64(b'L', b'Q'),), 'CFWriteStreamCopyProperty': (b'@^{__CFWriteStream=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CFAttributedStringGetLength': (sel32or64(b'l^{__CFAttributedString=}', b'q^{__CFAttributedString=}'),), 'CFStringGetCStringPtr': (sel32or64(b'^t@L', b'^t@I'), '', {'retval': {'c_array_delimited_by_null': True}}), 'CFFileDescriptorEnableCallBacks': (sel32or64(b'v^{__CFFileDescriptor=}L', b'v^{__CFFileDescriptor=}Q'),), 'CFURLGetString': (b'^{__CFString=}^{__CFURL=}',), 'CFReadStreamSetProperty': (b'Z^{__CFReadStream=}^{__CFString=}@',), 'CFFileDescriptorInvalidate': (b'v^{__CFFileDescriptor=}',), 'CFBagGetCountOfValue': (sel32or64(b'l^{__CFBag=}@', b'q^{__CFBag=}@'),), 'CFAbsoluteTimeGetCurrent': (b'd',), 'CFLocaleCopyISOCurrencyCodes': (b'^{__CFArray=}', '', {'retval': {'already_cfretained': True}}), 'CFCalendarGetTypeID': (sel32or64(b'L', b'Q'),), 'CFBundleCopySharedFrameworksURL': (b'^{__CFURL=}^{__CFBundle=}', '', {'retval': {'already_cfretained': True}}), 'CFAttributedStringCreateWithSubstring': (sel32or64(b'^{__CFAttributedString=}^{__CFAllocator=}^{__CFAttributedString=}{_CFRange=ll}', b'^{__CFAttributedString=}^{__CFAllocator=}^{__CFAttributedString=}{_CFRange=qq}'), '', {'retval': {'already_cfretained': True}}), 'CFStringAppend': (b'v^{__CFString=}^{__CFString=}',), 'CFRelease': (b'v@',), 'CFAllocatorGetDefault': (b'^{__CFAllocator=}',), 'CFStringTokenizerAdvanceToNextToken': (sel32or64(b'L^{__CFStringTokenizer=}', b'Q^{__CFStringTokenizer=}'),), 'CFAttributedStringGetAttributeAndLongestEffectiveRange': (sel32or64(b'@^{__CFAttributedString=}l^{__CFString=}{_CFRange=ll}^{_CFRange=ll}', b'@^{__CFAttributedString=}q^{__CFString=}{_CFRange=qq}^{_CFRange=qq}'), '', {'arguments': {4: {'type_modifier': 'o'}}}), 'CFFileDescriptorGetContext': (sel32or64(b'v^{__CFFileDescriptor=}^{_CFFileDescriptorContext=l^v^?^?^?}', b'v^{__CFFileDescriptor=}^{_CFFileDescriptorContext=q^v^?^?^?}'),), 'CFUserNotificationPopUpSelection': (sel32or64(b'Ll', b'Qq'),), 'CFStringConvertIANACharSetNameToEncoding': (sel32or64(b'L^{__CFString=}', b'I^{__CFString=}'),), 'CFDateFormatterGetTimeStyle': (sel32or64(b'l^{__CFDateFormatter=}', b'q^{__CFDateFormatter=}'),), 'CFSocketSetSocketFlags': (sel32or64(b'v^{__CFSocket=}L', b'v^{__CFSocket=}Q'),), 'CFXMLCreateStringByUnescapingEntities': (b'^{__CFString=}^{__CFAllocator=}^{__CFString=}^{__CFDictionary=}', '', {'retval': {'already_cfretained': True}}), 'CFStringCreateWithSubstring': (sel32or64(b'^{__CFString=}^{__CFAllocator=}^{__CFString=}{_CFRange=ll}', b'^{__CFString=}^{__CFAllocator=}^{__CFString=}{_CFRange=qq}'), '', {'retval': {'already_cfretained': True}}), 'CFWriteStreamSetDispatchQueue': (b'v^{__CFWriteStream=}^{dispatch_queue_s=}',), 'CFStringCreateMutableCopy': (sel32or64(b'^{__CFString=}^{__CFAllocator=}l^{__CFString=}', b'^{__CFString=}^{__CFAllocator=}q^{__CFString=}'), '', {'retval': {'already_cfretained': True}}), 'CFBundleCopyExecutableArchitectures': (b'^{__CFArray=}^{__CFBundle=}', '', {'retval': {'already_cfretained': True}}), 'CFDictionaryCreateCopy': (b'^{__CFDictionary=}^{__CFAllocator=}^{__CFDictionary=}', '', {'retval': {'already_cfretained': True}}), 'CFRunLoopPerformBlock': (b'v^{__CFRunLoop=}@@?', '', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}, 'block': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'v'}}}}}}), 'CFStringPad': (sel32or64(b'v^{__CFString=}^{__CFString=}ll', b'v^{__CFString=}^{__CFString=}qq'),), 'CFLocaleGetValue': (b'@^{__CFLocale=}^{__CFString=}',), 'CFLocaleCopyISOLanguageCodes': (b'^{__CFArray=}', '', {'retval': {'already_cfretained': True}}), 'CFSocketSendData': (sel32or64(b'l^{__CFSocket=}^{__CFData=}^{__CFData=}d', b'q^{__CFSocket=}^{__CFData=}^{__CFData=}d'),), 'CFDataIncreaseLength': (sel32or64(b'v^{__CFData=}l', b'v^{__CFData=}q'),), 'CFBagGetValueIfPresent': (b'Z^{__CFBag=}@^@', '', {'arguments': {2: {'type_modifier': 'o'}}}), 'CFBooleanGetTypeID': (sel32or64(b'L', b'Q'),), 'CFBundleCopyAuxiliaryExecutableURL': (b'^{__CFURL=}^{__CFBundle=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CFURLEnumeratorCreateForMountedVolumes': (sel32or64(b'^{__CFURLEnumerator=}^{__CFAllocator=}L^{__CFArray=}', b'^{__CFURLEnumerator=}^{__CFAllocator=}Q^{__CFArray=}'), '', {'retval': {'already_cfretained': True}}), 'CFFileSecurityGetMode': (b'Z^{__CFFileSecurity=}^S', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'CFReadStreamSetClient': (sel32or64(b'Z^{__CFReadStream=}L^?^{_CFStreamClientContext=l^v^?^?^?}', b'Z^{__CFReadStream=}Q^?^{_CFStreamClientContext=q^v^?^?^?}'), '', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__CFReadStream=}'}, 1: {'type': b'Q'}, 2: {'type': b'^v'}}}}}}), 'CFStringConvertEncodingToWindowsCodepage': (sel32or64(b'LL', b'II'),), 'CFMachPortGetInvalidationCallBack': (b'^?^{__CFMachPort=}',), 'CFURLCopyFileSystemPath': (sel32or64(b'^{__CFString=}^{__CFURL=}l', b'^{__CFString=}^{__CFURL=}q'), '', {'retval': {'already_cfretained': True}}), 'CFRunLoopSourceSignal': (b'v^{__CFRunLoopSource=}',), 'CFBundleCopyInfoDictionaryInDirectory': (b'^{__CFDictionary=}^{__CFURL=}', '', {'retval': {'already_cfretained': True}}), 'CFUserNotificationDisplayAlert': (sel32or64(b'ldL^{__CFURL=}^{__CFURL=}^{__CFURL=}^{__CFString=}^{__CFString=}^{__CFString=}^{__CFString=}^{__CFString=}^L', b'idQ^{__CFURL=}^{__CFURL=}^{__CFURL=}^{__CFString=}^{__CFString=}^{__CFString=}^{__CFString=}^{__CFString=}^Q'), '', {'arguments': {10: {'type_modifier': 'o'}}}), 'CFURLClearResourcePropertyCacheForKey': (b'v^{__CFURL=}^{__CFString=}',), 'CFBinaryHeapGetMinimum': (b'@^{__CFBinaryHeap=}',), 'CFNotificationCenterAddObserver': (sel32or64(b'v^{__CFNotificationCenter=}@^?@@l', b'v^{__CFNotificationCenter=}@^?@@q'), '', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'@'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}}}}), 'CFCalendarCopyTimeZone': (b'^{__CFTimeZone=}^{__CFCalendar=}', '', {'retval': {'already_cfretained': True}}), 'CFAttributedStringGetTypeID': (sel32or64(b'L', b'Q'),), 'CFPropertyListWriteToStream': (sel32or64(b'l@^{__CFWriteStream=}l^^{__CFString=}', b'q@^{__CFWriteStream=}q^^{__CFString=}'), '', {'arguments': {3: {'type_modifier': 'o'}}}), 'CFBagAddValue': (b'v^{__CFBag=}@',), 'CFCharacterSetCreateMutable': (b'^{__CFCharacterSet=}^{__CFAllocator=}', '', {'retval': {'already_cfretained': True}}), 'CFTreeGetParent': (b'^{__CFTree=}^{__CFTree=}',), 'CFCalendarGetComponentDifference': (sel32or64(b'Z^{__CFCalendar=}ddL^c', b'Z^{__CFCalendar=}ddQ^c'), '', {'variadic': True}), 'CFURLGetByteRangeForComponent': (sel32or64(b'{_CFRange=ll}^{__CFURL=}l^{_CFRange=ll}', b'{_CFRange=qq}^{__CFURL=}q^{_CFRange=qq}'), '', {'arguments': {2: {'type_modifier': 'o'}}}), 'CFRunLoopRunInMode': (sel32or64(b'l^{__CFString=}dZ', b'i^{__CFString=}dZ'),), 'CFBundleCopyExecutableURL': (b'^{__CFURL=}^{__CFBundle=}', '', {'retval': {'already_cfretained': True}}), 'CFStringReplace': (sel32or64(b'v^{__CFString=}{_CFRange=ll}^{__CFString=}', b'v^{__CFString=}{_CFRange=qq}^{__CFString=}'),), 'CFSocketGetNative': (b'i^{__CFSocket=}',), 'CFConvertFloatSwappedToHost': (b'f{_CFSwappedFloat32=I}',), 'CFBundleOpenBundleResourceMap': (sel32or64(b's^{__CFBundle=}', b'i^{__CFBundle=}'),), 'CFDataFind': (sel32or64(b'{_CFRange=ll}^{__CFData=}^{__CFData=}{_CFRange=ll}L', b'{_CFRange=qq}^{__CFData=}^{__CFData=}{_CFRange=qq}Q'),), 'CFMachPortCreate': (sel32or64(b'^{__CFMachPort=}^{__CFAllocator=}^?^{_CFMachPortContext=l^v^?^?^?}^Z', b'^{__CFMachPort=}^{__CFAllocator=}^?^{_CFMachPortContext=q^v^?^?^?}^Z'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__CFMachPort=}'}, 1: {'type': b'^v'}, 2: {'type': b'q'}, 3: {'type': b'^v'}}}}}}), 'CFAttributedStringReplaceAttributedString': (sel32or64(b'v^{__CFAttributedString=}{_CFRange=ll}^{__CFAttributedString=}', b'v^{__CFAttributedString=}{_CFRange=qq}^{__CFAttributedString=}'),), 'CFTimeZoneCreateWithName': (b'^{__CFTimeZone=}^{__CFAllocator=}^{__CFString=}Z', '', {'retval': {'already_cfretained': True}}), 'CFBundleGetPackageInfoInDirectory': (sel32or64(b'Z^{__CFURL=}^L^L', b'Z^{__CFURL=}^I^I'), '', {'arguments': {1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}}}), 'CFURLCreateData': (sel32or64(b'^{__CFData=}^{__CFAllocator=}^{__CFURL=}LZ', b'^{__CFData=}^{__CFAllocator=}^{__CFURL=}IZ'), '', {'retval': {'already_cfretained': True}}), 'CFNumberFormatterCreateStringWithNumber': (b'^{__CFString=}^{__CFAllocator=}^{__CFNumberFormatter=}^{__CFNumber=}', '', {'retval': {'already_cfretained': True}}), 'CFCalendarGetMaximumRangeOfUnit': (sel32or64(b'{_CFRange=ll}^{__CFCalendar=}L', b'{_CFRange=qq}^{__CFCalendar=}Q'),), 'CFRunLoopRemoveSource': (b'v^{__CFRunLoop=}^{__CFRunLoopSource=}^{__CFString=}',), 'CFSwapInt32': (b'II',), 'CFXMLParserCreateWithDataFromURL': (sel32or64(b'^{__CFXMLParser=}^{__CFAllocator=}^{__CFURL=}Ll^{_CFXMLParserCallBacks=l^?^?^?^?^?}^{_CFXMLParserContext=l^v^?^?^?}', b'^{__CFXMLParser=}^{__CFAllocator=}^{__CFURL=}Qq^{_CFXMLParserCallBacks=q^?^?^?^?^?}^{_CFXMLParserContext=q^v^?^?^?}'), '', {'retval': {'already_cfretained': True}}), 'CFRunLoopTimerGetNextFireDate': (b'd^{__CFRunLoopTimer=}',), 'CFBitVectorGetCountOfBit': (sel32or64(b'l^{__CFBitVector=}{_CFRange=ll}L', b'q^{__CFBitVector=}{_CFRange=qq}I'),), 'CFNotificationCenterGetDarwinNotifyCenter': (b'^{__CFNotificationCenter=}',), 'CFPropertyListWrite': (sel32or64(b'l@^{__CFWriteStream=}lL^^{__CFError=}', b'q@^{__CFWriteStream=}qQ^^{__CFError=}'), '', {'arguments': {4: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'CFDataAppendBytes': (sel32or64(b'v^{__CFData=}^vl', b'v^{__CFData=}^vq'), '', {'arguments': {1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}}}), 'CFDictionaryGetCount': (sel32or64(b'l^{__CFDictionary=}', b'q^{__CFDictionary=}'),), 'CFLocaleCreateLocaleIdentifierFromWindowsLocaleCode': (b'^{__CFString=}^{__CFAllocator=}I', '', {'retval': {'already_cfretained': True}}), 'CFUserNotificationUpdate': (sel32or64(b'l^{__CFUserNotification=}dL^{__CFDictionary=}', b'i^{__CFUserNotification=}dQ^{__CFDictionary=}'),), 'CFMessagePortInvalidate': (b'v^{__CFMessagePort=}',), 'CFSwapInt64': (b'QQ',), 'CFURLCreateWithFileSystemPath': (sel32or64(b'^{__CFURL=}^{__CFAllocator=}^{__CFString=}lZ', b'^{__CFURL=}^{__CFAllocator=}^{__CFString=}qZ'), '', {'retval': {'already_cfretained': True}}), 'CFAttributedStringGetAttributes': (sel32or64(b'^{__CFDictionary=}^{__CFAttributedString=}l^{_CFRange=ll}', b'^{__CFDictionary=}^{__CFAttributedString=}q^{_CFRange=qq}'), '', {'arguments': {2: {'type_modifier': 'o'}}}), 'CFSocketSetDefaultNameRegistryPortNumber': (b'vS',), 'CFFileSecurityGetTypeID': (sel32or64(b'L', b'Q'),), 'CFBundleCopyResourceURLsOfType': (b'^{__CFArray=}^{__CFBundle=}^{__CFString=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CFConvertFloat32SwappedToHost': (b'f{_CFSwappedFloat32=I}',), 'CFDictionaryReplaceValue': (b'v^{__CFDictionary=}@@',), 'CFXMLTreeCreateFromDataWithError': (sel32or64(b'^{__CFTree=}^{__CFAllocator=}^{__CFData=}^{__CFURL=}Ll^^{__CFDictionary=}', b'^{__CFTree=}^{__CFAllocator=}^{__CFData=}^{__CFURL=}Qq^^{__CFDictionary=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {5: {'type_modifier': 'o'}}}), 'CFTimeZoneSetDefault': (b'v^{__CFTimeZone=}',), 'CFArrayApplyFunction': (sel32or64(b'v^{__CFArray=}{_CFRange=ll}^?@', b'v^{__CFArray=}{_CFRange=qq}^?@'), '', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'@'}, 1: {'type': b'@'}}}, 'callable_retained': False}}}), 'CFMessagePortGetInvalidationCallBack': (b'^?^{__CFMessagePort=}', '', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'@'}, 1: {'type': b'^v'}}}}}), 'CFUserNotificationGetResponseDictionary': (b'^{__CFDictionary=}^{__CFUserNotification=}',), 'CFSwapInt32HostToLittle': (b'II',), 'CFSocketInvalidate': (b'v^{__CFSocket=}',), 'CFStringGetMostCompatibleMacStringEncoding': (sel32or64(b'LL', b'II'),), 'CFRunLoopObserverIsValid': (b'Z^{__CFRunLoopObserver=}',), 'CFStringInsert': (sel32or64(b'v^{__CFString=}l^{__CFString=}', b'v^{__CFString=}q^{__CFString=}'),), 'CFXMLParserGetTypeID': (sel32or64(b'L', b'Q'),), 'CFMessagePortGetContext': (sel32or64(b'v^{__CFMessagePort=}^{_CFMessagePortContext=l^v^?^?^?}', b'v^{__CFMessagePort=}^{_CFMessagePortContext=q^v^?^?^?}'),), 'CFStringIsEncodingAvailable': (sel32or64(b'ZL', b'ZI'),), 'CFStringGetLength': (sel32or64(b'l^{__CFString=}', b'q^{__CFString=}'),), 'CFURLCanBeDecomposed': (b'Z^{__CFURL=}',), 'CFStringCreateWithCStringNoCopy': (sel32or64(b'^{__CFString=}^{__CFAllocator=}^tL^{__CFAllocator=}', b'^{__CFString=}^{__CFAllocator=}^tI^{__CFAllocator=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'c_array_delimited_by_null': True, 'type_modifier': 'n'}}}), 'CFReadStreamClose': (b'v^{__CFReadStream=}',), 'CFBagCreate': (sel32or64(b'^{__CFBag=}^{__CFAllocator=}^^vl^{_CFBagCallBacks=l^?^?^?^?^?}', b'^{__CFBag=}^{__CFAllocator=}^^vq^{_CFBagCallBacks=q^?^?^?^?^?}'), '', {'retval': {'already_cfretained': True}}), 'CFRunLoopAddTimer': (b'v^{__CFRunLoop=}^{__CFRunLoopTimer=}^{__CFString=}',), 'CFDictionaryGetValueIfPresent': (b'Z^{__CFDictionary=}@^@', '', {'arguments': {2: {'type_modifier': 'o'}}}), 'CFArrayCreateCopy': (b'^{__CFArray=}^{__CFAllocator=}^{__CFArray=}', '', {'retval': {'already_cfretained': True}}), 'CFBitVectorGetLastIndexOfBit': (sel32or64(b'l^{__CFBitVector=}{_CFRange=ll}L', b'q^{__CFBitVector=}{_CFRange=qq}I'),), 'CFDataCreateMutable': (sel32or64(b'^{__CFData=}^{__CFAllocator=}l', b'^{__CFData=}^{__CFAllocator=}q'), '', {'retval': {'already_cfretained': True}}), 'CFStringAppendCString': (sel32or64(b'v^{__CFString=}^tL', b'v^{__CFString=}^tI'), '', {'arguments': {1: {'c_array_delimited_by_null': True, 'type_modifier': 'n'}}}), 'CFLocaleGetIdentifier': (b'^{__CFString=}^{__CFLocale=}',), 'CFStringConvertWindowsCodepageToEncoding': (sel32or64(b'LL', b'II'),), 'CFTreeRemove': (b'v^{__CFTree=}',), 'CFBundleCloseBundleResourceMap': (sel32or64(b'v^{__CFBundle=}s', b'v^{__CFBundle=}i'),), 'CFStreamCreateBoundPair': (sel32or64(b'v^{__CFAllocator=}^^{__CFReadStream=}^^{__CFWriteStream=}l', b'v^{__CFAllocator=}^^{__CFReadStream=}^^{__CFWriteStream=}q'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}}}), 'CFRunLoopTimerSetNextFireDate': (b'v^{__CFRunLoopTimer=}d',), 'CFAllocatorSetDefault': (b'v^{__CFAllocator=}',), 'CFPreferencesSetAppValue': (b'v^{__CFString=}@^{__CFString=}',), 'CFTimeZoneGetNextDaylightSavingTimeTransition': (b'd^{__CFTimeZone=}d',), 'CFDateFormatterGetFormat': (b'^{__CFString=}^{__CFDateFormatter=}',), 'CFLocaleCreateLocaleIdentifierFromComponents': (b'^{__CFString=}^{__CFAllocator=}^{__CFDictionary=}', '', {'retval': {'already_cfretained': True}}), 'CFPreferencesSynchronize': (b'Z^{__CFString=}^{__CFString=}^{__CFString=}',), 'CFReadStreamCopyDispatchQueue': (b'^{dispatch_queue_s=}^{__CFReadStream=}', '', {'retval': {'already_cfretained': True}}), 'CFStringGetMaximumSizeOfFileSystemRepresentation': (sel32or64(b'l^{__CFString=}', b'q^{__CFString=}'),), 'CFBundleCreate': (b'^{__CFBundle=}^{__CFAllocator=}^{__CFURL=}', '', {'retval': {'already_cfretained': True}}), 'CFURLGetPortNumber': (sel32or64(b'l^{__CFURL=}', b'i^{__CFURL=}'),), 'CFStringAppendCharacters': (sel32or64(b'v^{__CFString=}^Tl', b'v^{__CFString=}^Tq'), '', {'arguments': {1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}}}), 'CFArrayGetLastIndexOfValue': (sel32or64(b'l^{__CFArray=}{_CFRange=ll}@', b'q^{__CFArray=}{_CFRange=qq}@'),), 'CFRunLoopTimerCreate': (sel32or64(b'^{__CFRunLoopTimer=}^{__CFAllocator=}ddLl^?^{_CFRunLoopTimerContext=l^v^?^?^?}', b'^{__CFRunLoopTimer=}^{__CFAllocator=}ddQq^?^{_CFRunLoopTimerContext=q^v^?^?^?}'), '', {'retval': {'already_cfretained': True}, 'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__CFRunLoopTimer=}'}, 1: {'type': b'^v'}}}}}}), 'CFLocaleCreate': (b'^{__CFLocale=}^{__CFAllocator=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CFStringHasSuffix': (b'Z^{__CFString=}^{__CFString=}',), 'CFEqual': (b'Z@@',), 'CFRunLoopGetNextTimerFireDate': (b'd^{__CFRunLoop=}^{__CFString=}',), 'CFErrorCreate': (sel32or64(b'^{__CFError=}^{__CFAllocator=}^{__CFString=}l^{__CFDictionary=}', b'^{__CFError=}^{__CFAllocator=}^{__CFString=}q^{__CFDictionary=}'), '', {'retval': {'already_cfretained': True}}), 'CFStringCreateByCombiningStrings': (b'^{__CFString=}^{__CFAllocator=}^{__CFArray=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CFStringCreateWithFileSystemRepresentation': (b'^{__CFString=}^{__CFAllocator=}^t', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'c_array_delimited_by_null': True, 'type_modifier': 'n'}}}), 'CFBundleGetPlugIn': (b'^{__CFBundle=}^{__CFBundle=}',), 'CFAllocatorGetPreferredSizeForSize': (sel32or64(b'l^{__CFAllocator=}lL', b'q^{__CFAllocator=}qQ'),), 'CFDateFormatterSetFormat': (b'v^{__CFDateFormatter=}^{__CFString=}',), 'CFBinaryHeapApplyFunction': (b'v^{__CFBinaryHeap=}^?@', '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'@'}, 1: {'type': b'@'}}}, 'callable_retained': False}}}), 'CFRunLoopTimerSetTolerance': (b'v^{__CFRunLoopTimer=}d',), 'CFStringGetBytes': (sel32or64(b'l^{__CFString=}{_CFRange=ll}LCZ^tl^l', b'q^{__CFString=}{_CFRange=qq}ICZ^tq^q'), '', {'arguments': {5: {'c_array_length_in_arg': (6, 7), 'type_modifier': 'o'}, 7: {'type_modifier': 'o'}}}), 'CFLocaleCopyAvailableLocaleIdentifiers': (b'^{__CFArray=}', '', {'retval': {'already_cfretained': True}}), 'CFStringCreateArrayBySeparatingStrings': (b'^{__CFArray=}^{__CFAllocator=}^{__CFString=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CFStringGetDoubleValue': (b'd^{__CFString=}',), 'CFStringCreateMutable': (sel32or64(b'^{__CFString=}^{__CFAllocator=}l', b'^{__CFString=}^{__CFAllocator=}q'), '', {'retval': {'already_cfretained': True}}), 'CFSetGetCount': (sel32or64(b'l^{__CFSet=}', b'q^{__CFSet=}'),), 'CFURLCreateWithBytes': (sel32or64(b'^{__CFURL=}^{__CFAllocator=}^vlL^{__CFURL=}', b'^{__CFURL=}^{__CFAllocator=}^vqI^{__CFURL=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}}}), 'CFStringCompareWithOptionsAndLocale': (sel32or64(b'l^{__CFString=}^{__CFString=}{_CFRange=ll}L^{__CFLocale=}', b'q^{__CFString=}^{__CFString=}{_CFRange=qq}Q^{__CFLocale=}'),), 'CFPropertyListCreateFromStream': (sel32or64(b'@^{__CFAllocator=}^{__CFReadStream=}lL^l^^{__CFString=}', b'@^{__CFAllocator=}^{__CFReadStream=}qQ^q^^{__CFString=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {4: {'type_modifier': 'o'}, 5: {'type_modifier': 'o'}}}), 'CFAbsoluteTimeAddGregorianUnits': (sel32or64(b'dd^{__CFTimeZone=}{_CFGregorianUnits=llllld}', b'dd^{__CFTimeZone=}{_CFGregorianUnits=iiiiid}'),), 'CFLocaleCopyCurrent': (b'^{__CFLocale=}', '', {'retval': {'already_cfretained': True}}), 'CFDateFormatterGetLocale': (b'^{__CFLocale=}^{__CFDateFormatter=}',), 'CFURLEnumeratorGetSourceDidChange': (b'Z^{__CFURLEnumerator=}',), 'CFNullGetTypeID': (sel32or64(b'L', b'Q'),), 'CFStringUppercase': (b'v^{__CFString=}^{__CFLocale=}',), 'CFTreeGetFirstChild': (b'^{__CFTree=}^{__CFTree=}',), 'CFAbsoluteTimeGetDayOfYear': (sel32or64(b'ld^{__CFTimeZone=}', b'id^{__CFTimeZone=}'),), 'CFURLCreateFromFileSystemRepresentation': (sel32or64(b'^{__CFURL=}^{__CFAllocator=}^tlZ', b'^{__CFURL=}^{__CFAllocator=}^tqZ'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'c_array_delimited_by_null': True, 'type_modifier': 'n'}}}), 'CFBundleGetInfoDictionary': (b'^{__CFDictionary=}^{__CFBundle=}',), 'CFByteOrderGetCurrent': (sel32or64(b'l', b'q'),), 'CFAttributedStringEndEditing': (b'v^{__CFAttributedString=}',), 'CFUserNotificationCancel': (sel32or64(b'l^{__CFUserNotification=}', b'i^{__CFUserNotification=}'),), 'CFUserNotificationSecureTextField': (sel32or64(b'Ll', b'Qq'),), 'CFBitVectorCreate': (sel32or64(b'^{__CFBitVector=}^{__CFAllocator=}^Cl', b'^{__CFBitVector=}^{__CFAllocator=}^Cq'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'c_array_of_variable_length': True, 'type_modifier': 'n'}}}), 'CFCharacterSetRemoveCharactersInRange': (sel32or64(b'v^{__CFCharacterSet=}{_CFRange=ll}', b'v^{__CFCharacterSet=}{_CFRange=qq}'),), 'CFMachPortSetInvalidationCallBack': (b'v^{__CFMachPort=}^?', '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__CFMachPort=}'}, 1: {'type': b'^v'}}}}}}), 'CFCharacterSetAddCharactersInString': (b'v^{__CFCharacterSet=}^{__CFString=}',), 'CFBitVectorGetBitAtIndex': (sel32or64(b'L^{__CFBitVector=}l', b'I^{__CFBitVector=}q'),), 'CFURLIsFileReferenceURL': (b'Z^{__CFURL=}',), 'CFURLCopyPath': (b'^{__CFString=}^{__CFURL=}', '', {'retval': {'already_cfretained': True}}), 'CFArrayGetFirstIndexOfValue': (sel32or64(b'l^{__CFArray=}{_CFRange=ll}@', b'q^{__CFArray=}{_CFRange=qq}@'),), 'CFCharacterSetCreateWithCharactersInRange': (sel32or64(b'^{__CFCharacterSet=}^{__CFAllocator=}{_CFRange=ll}', b'^{__CFCharacterSet=}^{__CFAllocator=}{_CFRange=qq}'), '', {'retval': {'already_cfretained': True}}), 'CFCharacterSetIsLongCharacterMember': (sel32or64(b'Z^{__CFCharacterSet=}L', b'Z^{__CFCharacterSet=}I'),), 'CFUUIDCreateFromUUIDBytes': (b'^{__CFUUID=}^{__CFAllocator=}{_CFUUIDBytes=CCCCCCCCCCCCCCCC}', '', {'retval': {'already_cfretained': True}}), 'CFRunLoopAddSource': (b'v^{__CFRunLoop=}^{__CFRunLoopSource=}^{__CFString=}',), 'CFDictionaryContainsValue': (b'Z^{__CFDictionary=}@',), 'CFTimeZoneCopyKnownNames': (b'^{__CFArray=}', '', {'retval': {'already_cfretained': True}}), 'CFBundleCopyPreferredLocalizationsFromArray': (b'^{__CFArray=}^{__CFArray=}', '', {'retval': {'already_cfretained': True}}), 'CFBitVectorFlipBitAtIndex': (sel32or64(b'v^{__CFBitVector=}l', b'v^{__CFBitVector=}q'),), 'CFPropertyListCreateXMLData': (b'^{__CFData=}^{__CFAllocator=}@', '', {'retval': {'already_cfretained': True}}), 'CFURLCreateResourcePropertyForKeyFromBookmarkData': (b'@^{__CFAllocator=}^{__CFString=}^{__CFData=}', '', {'retval': {'already_cfretained': True}}), 'CFDateGetAbsoluteTime': (b'd^{__CFDate=}',), 'CFNumberIsFloatType': (b'Z^{__CFNumber=}',), 'CFTreePrependChild': (b'v^{__CFTree=}^{__CFTree=}',), 'CFRunLoopWakeUp': (b'v^{__CFRunLoop=}',), 'CFDateFormatterCreateStringWithDate': (b'^{__CFString=}^{__CFAllocator=}^{__CFDateFormatter=}^{__CFDate=}', '', {'retval': {'already_cfretained': True}}), 'CFCharacterSetHasMemberInPlane': (sel32or64(b'Z^{__CFCharacterSet=}l', b'Z^{__CFCharacterSet=}q'),), 'CFURLCopyResourceSpecifier': (b'^{__CFString=}^{__CFURL=}', '', {'retval': {'already_cfretained': True}}), 'CFStringFold': (sel32or64(b'v^{__CFString=}L^{__CFLocale=}', b'v^{__CFString=}Q^{__CFLocale=}'),), 'CFStringTokenizerCopyCurrentTokenAttribute': (sel32or64(b'@^{__CFStringTokenizer=}L', b'@^{__CFStringTokenizer=}Q'), '', {'retval': {'already_cfretained': True}}), 'CFNotificationCenterRemoveEveryObserver': (b'v^{__CFNotificationCenter=}@',), 'CFMessagePortGetName': (b'^{__CFString=}^{__CFMessagePort=}',), 'CFURLCopyPassword': (b'^{__CFString=}^{__CFURL=}', '', {'retval': {'already_cfretained': True}}), 'CFWriteStreamClose': (b'v^{__CFWriteStream=}',), 'CFMessagePortCreateRunLoopSource': (sel32or64(b'^{__CFRunLoopSource=}^{__CFAllocator=}^{__CFMessagePort=}l', b'^{__CFRunLoopSource=}^{__CFAllocator=}^{__CFMessagePort=}q'), '', {'retval': {'already_cfretained': True}}), 'CFStringGetCString': (sel32or64(b'Z^{__CFString=}^tlL', b'Z^{__CFString=}^tqI'), '', {'arguments': {1: {'c_array_length_in_arg': 2, 'type_modifier': 'o'}}}), '_CFAutoreleasePoolPrintPools': (b'v',), 'CFPropertyListCreateData': (sel32or64(b'^{__CFData=}^{__CFAllocator=}@lL^^{__CFError=}', b'^{__CFData=}^{__CFAllocator=}@qQ^^{__CFError=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {4: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'CFURLCopyQueryString': (b'^{__CFString=}^{__CFURL=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CFRunLoopTimerCreateWithHandler': (sel32or64(b'^{__CFRunLoopTimer=}^{__CFAllocator=}ddLl@?', b'^{__CFRunLoopTimer=}^{__CFAllocator=}ddQq@?'), '', {'retval': {'already_cfretained': True}, 'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'^{__CFRunLoopTimer=}'}}}, 'block': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__CFRunLoopTimer=}'}}}}}}), 'CFTimeZoneCopyAbbreviationDictionary': (b'^{__CFDictionary=}', '', {'retval': {'already_cfretained': True}}), 'CFStringConvertEncodingToIANACharSetName': (sel32or64(b'^{__CFString=}L', b'^{__CFString=}I'),), 'CFSwapInt16LittleToHost': (b'SS',), 'CFNumberFormatterSetProperty': (b'v^{__CFNumberFormatter=}^{__CFString=}@',), 'CFSocketCopyPeerAddress': (b'^{__CFData=}^{__CFSocket=}', '', {'retval': {'already_cfretained': True}}), 'CFURLCreateFromFileSystemRepresentationRelativeToBase': (sel32or64(b'^{__CFURL=}^{__CFAllocator=}^tlZ^{__CFURL=}', b'^{__CFURL=}^{__CFAllocator=}^tqZ^{__CFURL=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'c_array_delimited_by_null': True, 'type_modifier': 'n'}}}), 'CFAttributedStringSetAttributes': (sel32or64(b'v^{__CFAttributedString=}{_CFRange=ll}^{__CFDictionary=}Z', b'v^{__CFAttributedString=}{_CFRange=qq}^{__CFDictionary=}Z'),), 'CFNumberFormatterCopyProperty': (b'@^{__CFNumberFormatter=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CFSocketGetTypeID': (sel32or64(b'L', b'Q'),), 'CFURLGetBaseURL': (b'^{__CFURL=}^{__CFURL=}',), 'CFSetGetCountOfValue': (sel32or64(b'l^{__CFSet=}@', b'q^{__CFSet=}@'),), 'CFWriteStreamSetProperty': (b'Z^{__CFWriteStream=}^{__CFString=}@',), 'CFDictionarySetValue': (b'v^{__CFDictionary=}@@',), 'CFRunLoopSourceGetTypeID': (sel32or64(b'L', b'Q'),), 'CFWriteStreamCopyError': (b'^{__CFError=}^{__CFWriteStream=}', '', {'retval': {'already_cfretained': True}}), 'CFBitVectorSetBits': (sel32or64(b'v^{__CFBitVector=}{_CFRange=ll}L', b'v^{__CFBitVector=}{_CFRange=qq}I'),), 'CFURLCreateBookmarkDataFromAliasRecord': (b'^{__CFData=}^{__CFAllocator=}^{__CFData=}', '', {'retval': {'already_cfretained': True}}), 'CFAttributedStringRemoveAttribute': (sel32or64(b'v^{__CFAttributedString=}{_CFRange=ll}^{__CFString=}', b'v^{__CFAttributedString=}{_CFRange=qq}^{__CFString=}'),), 'CFURLCreateFromFSRef': (b'^{__CFURL=}^{__CFAllocator=}^{FSRef=[80C]}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'type_modifier': 'n'}}}), 'CFBitVectorFlipBits': (sel32or64(b'v^{__CFBitVector=}{_CFRange=ll}', b'v^{__CFBitVector=}{_CFRange=qq}'),), 'CFCalendarCopyCurrent': (b'^{__CFCalendar=}', '', {'retval': {'already_cfretained': True}}), 'CFRunLoopAddObserver': (b'v^{__CFRunLoop=}^{__CFRunLoopObserver=}^{__CFString=}',), 'CFURLCreateCopyDeletingPathExtension': (b'^{__CFURL=}^{__CFAllocator=}^{__CFURL=}', '', {'retval': {'already_cfretained': True}}), 'CFMessagePortIsValid': (b'Z^{__CFMessagePort=}',), 'CFBundleCopySupportFilesDirectoryURL': (b'^{__CFURL=}^{__CFBundle=}', '', {'retval': {'already_cfretained': True}}), 'CFNumberGetType': (sel32or64(b'l^{__CFNumber=}', b'q^{__CFNumber=}'),), 'CFDataCreateCopy': (b'^{__CFData=}^{__CFAllocator=}^{__CFData=}', '', {'retval': {'already_cfretained': True}}), 'CFDictionaryCreateMutableCopy': (sel32or64(b'^{__CFDictionary=}^{__CFAllocator=}l^{__CFDictionary=}', b'^{__CFDictionary=}^{__CFAllocator=}q^{__CFDictionary=}'), '', {'retval': {'already_cfretained': True}}), 'CFStringConvertEncodingToNSStringEncoding': (sel32or64(b'LL', b'QI'),), 'CFXMLParserGetSourceURL': (b'^{__CFURL=}^{__CFXMLParser=}',), 'CFSetContainsValue': (b'Z^{__CFSet=}@',), 'CFBundleCopyInfoDictionaryForURL': (b'^{__CFDictionary=}^{__CFURL=}', '', {'retval': {'already_cfretained': True}}), 'CFMessagePortSetInvalidationCallBack': (b'v^{__CFMessagePort=}^?', '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__CFMessagePort=}'}, 1: {'type': b'^v'}}}}}}), 'CFTreeRemoveAllChildren': (b'v^{__CFTree=}',), 'CFFileDescriptorIsValid': (b'Z^{__CFFileDescriptor=}',), 'CFSetGetValueIfPresent': (b'Z^{__CFSet=}@^@', '', {'arguments': {2: {'type_modifier': 'o'}}}), 'CFBinaryHeapGetCount': (sel32or64(b'l^{__CFBinaryHeap=}', b'q^{__CFBinaryHeap=}'),), 'CFRunLoopContainsObserver': (b'Z^{__CFRunLoop=}^{__CFRunLoopObserver=}^{__CFString=}',), 'CFRunLoopObserverGetOrder': (sel32or64(b'l^{__CFRunLoopObserver=}', b'q^{__CFRunLoopObserver=}'),), 'CFBagReplaceValue': (b'v^{__CFBag=}@',), 'CFTreeSetContext': (sel32or64(b'v^{__CFTree=}^{_CFTreeContext=l^v^?^?^?}', b'v^{__CFTree=}^{_CFTreeContext=q^v^?^?^?}'),), 'CFReadStreamCopyProperty': (b'@^{__CFReadStream=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CFStringIsHyphenationAvailableForLocale': (b'Z^{__CFLocale=}',), 'CFWriteStreamCopyDispatchQueue': (b'^{dispatch_queue_s=}^{__CFWriteStream=}', '', {'retval': {'already_cfretained': True}}), 'CFRunLoopSourceCreate': (sel32or64(b'^{__CFRunLoopSource=}^{__CFAllocator=}l^{_CFRunLoopSourceContext=l^v^?^?^?^?^?^?^?^?}', b'^{__CFRunLoopSource=}^{__CFAllocator=}q^{_CFRunLoopSourceContext=q^v^?^?^?^?^?^?^?^?}'), '', {'retval': {'already_cfretained': True}}), 'CFCharacterSetInvert': (b'v^{__CFCharacterSet=}',), 'CFMachPortInvalidate': (b'v^{__CFMachPort=}',), 'CFSwapInt32BigToHost': (b'II',), 'CFDataReplaceBytes': (sel32or64(b'v^{__CFData=}{_CFRange=ll}^vl', b'v^{__CFData=}{_CFRange=qq}^vq'), '', {'arguments': {2: {'c_array_length_in_arg': 3, 'type_modifier': 'n'}}}), 'CFDataGetBytePtr': (b'^v^{__CFData=}', '', {'retval': {'c_array_of_variable_length': True}}), 'CFSocketDisableCallBacks': (sel32or64(b'v^{__CFSocket=}L', b'v^{__CFSocket=}Q'),), 'CFBundleCopyLocalizationsForURL': (b'^{__CFArray=}^{__CFURL=}', '', {'retval': {'already_cfretained': True}}), 'CFReadStreamGetBuffer': (sel32or64(b'^v^{__CFReadStream=}l^l', b'^v^{__CFReadStream=}q^q'), '', {'retval': {'c_array_length_in_arg': 2}, 'arguments': {2: {'type_modifier': 'o'}}}), 'CFErrorGetDomain': (b'^{__CFString=}^{__CFError=}',), 'CFStringHasPrefix': (b'Z^{__CFString=}^{__CFString=}',), 'CFTimeZoneIsDaylightSavingTime': (b'Z^{__CFTimeZone=}d',), 'CFWriteStreamCanAcceptBytes': (b'Z^{__CFWriteStream=}',), 'CFWriteStreamOpen': (b'Z^{__CFWriteStream=}',), 'CFBitVectorSetCount': (sel32or64(b'v^{__CFBitVector=}l', b'v^{__CFBitVector=}q'),), 'CFErrorCreateWithUserInfoKeysAndValues': (sel32or64(b'^{__CFError=}^{__CFAllocator=}^{__CFString=}l^@^@l', b'^{__CFError=}^{__CFAllocator=}^{__CFString=}q^@^@q'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'c_array_length_in_arg': 5, 'type_modifier': 'n'}, 4: {'c_array_length_in_arg': 5, 'type_modifier': 'n'}}}), 'CFUserNotificationCreate': (sel32or64(b'^{__CFUserNotification=}^{__CFAllocator=}dL^l^{__CFDictionary=}', b'^{__CFUserNotification=}^{__CFAllocator=}dQ^i^{__CFDictionary=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'type_modifier': 'o'}}}), 'CFURLResourceIsReachable': (b'Z^{__CFURL=}^^{__CFError=}', '', {'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'CFStringIsSurrogateHighCharacter': (b'ZT',), 'CFPropertyListCreateWithData': (sel32or64(b'@^{__CFAllocator=}^{__CFData=}L^l^^{__CFError=}', b'@^{__CFAllocator=}^{__CFData=}Q^q^^{__CFError=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'type_modifier': 'o'}, 4: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'CFSocketGetDefaultNameRegistryPortNumber': (b'S',), 'CFBundleCopyLocalizationsForPreferences': (b'^{__CFArray=}^{__CFArray=}^{__CFArray=}', '', {'retval': {'already_cfretained': True}}), 'CFRunLoopObserverInvalidate': (b'v^{__CFRunLoopObserver=}',), 'CFURLGetFSRef': (b'Z^{__CFURL=}^{FSRef=[80C]}', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'CFURLCreateCopyDeletingLastPathComponent': (b'^{__CFURL=}^{__CFAllocator=}^{__CFURL=}', '', {'retval': {'already_cfretained': True}}), 'CFBundleCreateBundlesFromDirectory': (b'^{__CFArray=}^{__CFAllocator=}^{__CFURL=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CFStringFindCharacterFromSet': (sel32or64(b'Z^{__CFString=}^{__CFCharacterSet=}{_CFRange=ll}L^{_CFRange=ll}', b'Z^{__CFString=}^{__CFCharacterSet=}{_CFRange=qq}Q^{_CFRange=qq}'), '', {'arguments': {4: {'type_modifier': 'o'}}}), 'CFAttributedStringCreateMutable': (sel32or64(b'^{__CFAttributedString=}^{__CFAllocator=}l', b'^{__CFAttributedString=}^{__CFAllocator=}q'), '', {'retval': {'already_cfretained': True}}), 'CFTreeGetContext': (sel32or64(b'v^{__CFTree=}^{_CFTreeContext=l^v^?^?^?}', b'v^{__CFTree=}^{_CFTreeContext=q^v^?^?^?}'),), 'CFArrayCreateMutable': (sel32or64(b'^{__CFArray=}^{__CFAllocator=}l^{_CFArrayCallBacks=l^?^?^?^?}', b'^{__CFArray=}^{__CFAllocator=}q^{_CFArrayCallBacks=q^?^?^?^?}'), '', {'retval': {'already_cfretained': True}}), 'CFShow': (b'v@',), 'CFFileSecuritySetOwner': (b'Z^{__CFFileSecurity=}I',), 'CFSocketCopyAddress': (b'^{__CFData=}^{__CFSocket=}', '', {'retval': {'already_cfretained': True}}), 'CFBagGetValues': (b'v^{__CFBag=}^@', '', {'arguments': {1: {'c_array_of_variable_length': True, 'type_modifier': 'o'}}}), 'CFFileSecuritySetOwnerUUID': (b'Z^{__CFFileSecurity=}^{__CFUUID=}',), 'CFLocaleCreateCanonicalLocaleIdentifierFromScriptManagerCodes': (b'^{__CFString=}^{__CFAllocator=}ss', '', {'retval': {'already_cfretained': True}}), 'CFDictionaryRemoveValue': (b'v^{__CFDictionary=}@',), 'CFWriteStreamSetClient': (sel32or64(b'Z^{__CFWriteStream=}L^?^{_CFStreamClientContext=l^v^?^?^?}', b'Z^{__CFWriteStream=}Q^?^{_CFStreamClientContext=q^v^?^?^?}'), '', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__CFWriteStream=}'}, 1: {'type': b'Q'}, 2: {'type': b'^v'}}}}}}), 'CFRunLoopSourceIsValid': (b'Z^{__CFRunLoopSource=}',), 'CFCharacterSetIsCharacterMember': (b'Z^{__CFCharacterSet=}T',), 'CFTreeGetChildCount': (sel32or64(b'l^{__CFTree=}', b'q^{__CFTree=}'),), 'CFURLSetTemporaryResourcePropertyForKey': (b'v^{__CFURL=}^{__CFString=}@',), 'CFConvertDoubleHostToSwapped': (b'{_CFSwappedFloat64=Q}d',), 'CFSetGetValue': (b'@^{__CFSet=}@',), 'CFMessagePortSendRequest': (sel32or64(b'l^{__CFMessagePort=}l^{__CFData=}dd^{__CFString=}^^{__CFData=}', b'i^{__CFMessagePort=}i^{__CFData=}dd^{__CFString=}^^{__CFData=}'), '', {'arguments': {6: {'type_modifier': 'o'}}}), 'CFUUIDGetConstantUUIDWithBytes': (b'^{__CFUUID=}^{__CFAllocator=}CCCCCCCCCCCCCCCC',), 'CFSocketCreateRunLoopSource': (sel32or64(b'^{__CFRunLoopSource=}^{__CFAllocator=}^{__CFSocket=}l', b'^{__CFRunLoopSource=}^{__CFAllocator=}^{__CFSocket=}q'), '', {'retval': {'already_cfretained': True}}), 'CFWriteStreamCreateWithBuffer': (sel32or64(b'^{__CFWriteStream=}^{__CFAllocator=}^vl', b'^{__CFWriteStream=}^{__CFAllocator=}^vq'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}}}), 'CFXMLTreeCreateXMLData': (b'^{__CFData=}^{__CFAllocator=}^{__CFTree=}', '', {'retval': {'already_cfretained': True}}), 'CFURLCopyUserName': (b'^{__CFString=}^{__CFURL=}', '', {'retval': {'already_cfretained': True}}), 'CFURLCopyAbsoluteURL': (b'^{__CFURL=}^{__CFURL=}', '', {'retval': {'already_cfretained': True}}), 'CFNumberGetTypeID': (sel32or64(b'L', b'Q'),), 'CFStringCompareWithOptions': (sel32or64(b'l^{__CFString=}^{__CFString=}{_CFRange=ll}L', b'q^{__CFString=}^{__CFString=}{_CFRange=qq}Q'),), 'CFWriteStreamGetTypeID': (sel32or64(b'L', b'Q'),), 'CFSwapInt16': (b'SS',), 'CFDateGetTimeIntervalSinceDate': (b'd^{__CFDate=}^{__CFDate=}',), 'CFAttributedStringGetString': (b'^{__CFString=}^{__CFAttributedString=}',), 'CFURLCopyNetLocation': (b'^{__CFString=}^{__CFURL=}', '', {'retval': {'already_cfretained': True}}), 'CFStringFind': (sel32or64(b'{_CFRange=ll}^{__CFString=}^{__CFString=}L', b'{_CFRange=qq}^{__CFString=}^{__CFString=}Q'),), 'CFConvertFloat64HostToSwapped': (b'{_CFSwappedFloat64=Q}d',), 'CFSetReplaceValue': (b'v^{__CFSet=}@',), 'CFURLCreateCopyAppendingPathExtension': (b'^{__CFURL=}^{__CFAllocator=}^{__CFURL=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CFTimeZoneCreate': (b'^{__CFTimeZone=}^{__CFAllocator=}^{__CFString=}^{__CFData=}', '', {'retval': {'already_cfretained': True}}), 'CFURLCopyScheme': (b'^{__CFString=}^{__CFURL=}', '', {'retval': {'already_cfretained': True}}), 'CFXMLParserParse': (b'Z^{__CFXMLParser=}',), 'CFRunLoopRemoveTimer': (b'v^{__CFRunLoop=}^{__CFRunLoopTimer=}^{__CFString=}',), 'CFPreferencesAppValueIsForced': (b'Z^{__CFString=}^{__CFString=}',), 'CFSocketCreate': (sel32or64(b'^{__CFSocket=}^{__CFAllocator=}lllL^?^{_CFSocketContext=l^v^?^?^?}', b'^{__CFSocket=}^{__CFAllocator=}iiiQ^?^{_CFSocketContext=q^v^?^?^?}'), '', {'retval': {'already_cfretained': True}, 'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__CFSocket=}'}, 1: {'type': b'Q'}, 2: {'type': b'^{__CFData=}'}, 3: {'type': b'^v'}, 4: {'type': b'^v'}}}}}}), 'CFNotificationCenterGetTypeID': (sel32or64(b'L', b'Q'),), 'CFURLCreateStringByReplacingPercentEscapes': (b'^{__CFString=}^{__CFAllocator=}^{__CFString=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CFBitVectorContainsBit': (sel32or64(b'Z^{__CFBitVector=}{_CFRange=ll}L', b'Z^{__CFBitVector=}{_CFRange=qq}I'),), 'CFMachPortCreateWithPort': (sel32or64(b'^{__CFMachPort=}^{__CFAllocator=}I^?^{_CFMachPortContext=l^v^?^?^?}^Z', b'^{__CFMachPort=}^{__CFAllocator=}I^?^{_CFMachPortContext=q^v^?^?^?}^Z'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__CFMachPort=}'}, 1: {'type': b'^v'}, 2: {'type': b'q'}, 3: {'type': b'^v'}}}}}}), 'CFPreferencesAppSynchronize': (b'Z^{__CFString=}',), 'CFFileDescriptorGetTypeID': (sel32or64(b'L', b'Q'),), 'CFBundleCopyBundleLocalizations': (b'^{__CFArray=}^{__CFBundle=}', '', {'retval': {'already_cfretained': True}}), 'CFFileSecurityCreate': (b'^{__CFFileSecurity=}^{__CFAllocator=}', '', {'retval': {'already_cfretained': True}}), 'CFHash': (sel32or64(b'L@', b'Q@'),), 'CFCharacterSetIntersect': (b'v^{__CFCharacterSet=}^{__CFCharacterSet=}',), 'CFXMLNodeCreateCopy': (b'^{__CFXMLNode=}^{__CFAllocator=}^{__CFXMLNode=}', '', {'retval': {'already_cfretained': True}}), 'CFArrayCreate': (sel32or64(b'^{__CFArray=}^{__CFAllocator=}^^vl^{_CFArrayCallBacks=l^?^?^?^?}', b'^{__CFArray=}^{__CFAllocator=}^^vq^{_CFArrayCallBacks=q^?^?^?^?}'), '', {'retval': {'already_cfretained': True}}), 'CFBooleanGetValue': (b'Z^{__CFBoolean=}',), 'CFArrayContainsValue': (sel32or64(b'Z^{__CFArray=}{_CFRange=ll}@', b'Z^{__CFArray=}{_CFRange=qq}@'),), 'CFSwapInt32HostToBig': (b'II',), 'CFURLWriteDataAndPropertiesToResource': (sel32or64(b'Z^{__CFURL=}^{__CFData=}^{__CFDictionary=}^l', b'Z^{__CFURL=}^{__CFData=}^{__CFDictionary=}^i'), '', {'arguments': {3: {'type_modifier': 'o'}}}), 'CFArrayInsertValueAtIndex': (sel32or64(b'v^{__CFArray=}l@', b'v^{__CFArray=}q@'),), 'CFDictionaryCreateMutable': (sel32or64(b'^{__CFDictionary=}^{__CFAllocator=}l^{_CFDictionaryKeyCallBacks=l^?^?^?^?^?}^{_CFDictionaryValueCallBacks=l^?^?^?^?}', b'^{__CFDictionary=}^{__CFAllocator=}q^{_CFDictionaryKeyCallBacks=q^?^?^?^?^?}^{_CFDictionaryValueCallBacks=q^?^?^?^?}'), '', {'retval': {'already_cfretained': True}}), 'CFAllocatorGetTypeID': (sel32or64(b'L', b'Q'),), 'CFReadStreamRead': (sel32or64(b'l^{__CFReadStream=}^vl', b'q^{__CFReadStream=}^vq'), '', {'arguments': {1: {'c_array_length_in_result': True, 'type_modifier': 'o', 'c_array_length_in_arg': 2}}}), 'CFDataGetBytes': (sel32or64(b'v^{__CFData=}{_CFRange=ll}^v', b'v^{__CFData=}{_CFRange=qq}^v'), '', {'arguments': {2: {'c_array_length_in_arg': 1, 'type_modifier': 'o'}}}), 'CFStringCreateWithCharactersNoCopy': (sel32or64(b'^{__CFString=}^{__CFAllocator=}^Tl^{__CFAllocator=}', b'^{__CFString=}^{__CFAllocator=}^Tq^{__CFAllocator=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'c_array_delimited_by_null': True, 'type_modifier': 'n'}}}), 'CFStringGetLongCharacterForSurrogatePair': (sel32or64(b'LTT', b'ITT'),), 'CFSetAddValue': (b'v^{__CFSet=}@',), 'CFFileSecuritySetMode': (b'Z^{__CFFileSecurity=}S',), 'CFURLCreateStringByAddingPercentEscapes': (sel32or64(b'^{__CFString=}^{__CFAllocator=}^{__CFString=}^{__CFString=}^{__CFString=}L', b'^{__CFString=}^{__CFAllocator=}^{__CFString=}^{__CFString=}^{__CFString=}I'), '', {'retval': {'already_cfretained': True}}), 'CFStringGetIntValue': (sel32or64(b'l^{__CFString=}', b'i^{__CFString=}'),), 'CFDictionaryGetCountOfValue': (sel32or64(b'l^{__CFDictionary=}@', b'q^{__CFDictionary=}@'),), 'CFDataGetMutableBytePtr': (b'^v^{__CFData=}', '', {'retval': {'c_array_of_variable_length': True}}), 'CFURLCreateAbsoluteURLWithBytes': (sel32or64(b'^{__CFURL=}^{__CFAllocator=}^vlL^{__CFURL=}Z', b'^{__CFURL=}^{__CFAllocator=}^vqI^{__CFURL=}Z'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}}}), 'CFBundleOpenBundleResourceFiles': (sel32or64(b'l^{__CFBundle=}^s^s', b'i^{__CFBundle=}^i^i'), '', {'arguments': {1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}}}), 'CFRunLoopTimerGetInterval': (b'd^{__CFRunLoopTimer=}',), 'CFFileSecurityGetOwner': (b'Z^{__CFFileSecurity=}^I', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'CFCalendarGetIdentifier': (b'^{__CFString=}^{__CFCalendar=}',), 'CFStringCompare': (sel32or64(b'l^{__CFString=}^{__CFString=}L', b'q^{__CFString=}^{__CFString=}Q'),), 'CFURLClearResourcePropertyCache': (b'v^{__CFURL=}',), 'CFRunLoopSourceInvalidate': (b'v^{__CFRunLoopSource=}',), 'CFCalendarCreateWithIdentifier': (b'^{__CFCalendar=}^{__CFAllocator=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CFMachPortGetPort': (b'I^{__CFMachPort=}',), 'CFBinaryHeapRemoveMinimumValue': (b'v^{__CFBinaryHeap=}',), 'CFBagRemoveValue': (b'v^{__CFBag=}@',), 'CFStringReplaceAll': (b'v^{__CFString=}^{__CFString=}',), 'CFArraySetValueAtIndex': (sel32or64(b'v^{__CFArray=}l@', b'v^{__CFArray=}q@'),), 'CFBundleCopyExecutableArchitecturesForURL': (b'^{__CFArray=}^{__CFURL=}', '', {'retval': {'already_cfretained': True}}), 'CFRunLoopObserverDoesRepeat': (b'Z^{__CFRunLoopObserver=}',), 'CFDateCompare': (sel32or64(b'l^{__CFDate=}^{__CFDate=}^v', b'q^{__CFDate=}^{__CFDate=}^v'),), 'CFGregorianDateIsValid': (sel32or64(b'Z{_CFGregorianDate=lccccd}L', b'Z{_CFGregorianDate=iccccd}Q'),), 'CFAutorelease': (b'@@',), 'CFRunLoopTimerInvalidate': (b'v^{__CFRunLoopTimer=}',), 'CFDictionaryGetCountOfKey': (sel32or64(b'l^{__CFDictionary=}@', b'q^{__CFDictionary=}@'),), 'CFStringGetCharacterAtIndex': (sel32or64(b'T^{__CFString=}l', b'T^{__CFString=}q'),), 'CFStringCreateWithCString': (sel32or64(b'^{__CFString=}^{__CFAllocator=}^tL', b'^{__CFString=}^{__CFAllocator=}^tI'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'c_array_delimited_by_null': True, 'type_modifier': 'n'}}}), 'CFReadStreamCopyError': (b'^{__CFError=}^{__CFReadStream=}', '', {'retval': {'already_cfretained': True}}), 'CFUserNotificationCheckBoxChecked': (sel32or64(b'Ll', b'Qq'),), 'CFAttributedStringCreate': (b'^{__CFAttributedString=}^{__CFAllocator=}^{__CFString=}^{__CFDictionary=}', '', {'retval': {'already_cfretained': True}}), 'CFStringFindWithOptions': (sel32or64(b'Z^{__CFString=}^{__CFString=}{_CFRange=ll}L^{_CFRange=ll}', b'Z^{__CFString=}^{__CFString=}{_CFRange=qq}Q^{_CFRange=qq}'), '', {'arguments': {4: {'type_modifier': 'o'}}}), 'CFSetRemoveAllValues': (b'v^{__CFSet=}',), 'CFArraySortValues': (sel32or64(b'v^{__CFArray=}{_CFRange=ll}^?@', b'v^{__CFArray=}{_CFRange=qq}^?@'), '', {'arguments': {2: {'callable': {'retval': {'type': b'l'}, 'arguments': {0: {'type': b'@'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'callable_retained': False}}}), 'CFCalendarGetFirstWeekday': (sel32or64(b'l^{__CFCalendar=}', b'q^{__CFCalendar=}'),), 'CFStreamCreatePairWithPeerSocketSignature': (sel32or64(b'v^{__CFAllocator=}^{_CFSocketSignature=lll^{__CFData=}}^^{__CFReadStream=}^^{__CFWriteStream=}', b'v^{__CFAllocator=}^{_CFSocketSignature=iii^{__CFData=}}^^{__CFReadStream=}^^{__CFWriteStream=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'type_modifier': 'n'}, 2: {'type_modifier': 'o'}, 3: {'type_modifier': 'o'}}}), 'CFURLSetResourcePropertiesForKeys': (b'Z^{__CFURL=}^{__CFDictionary=}^^{__CFError=}', '', {'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'CFXMLParserGetLocation': (sel32or64(b'l^{__CFXMLParser=}', b'q^{__CFXMLParser=}'),), 'CFStringTokenizerGetCurrentTokenRange': (sel32or64(b'{_CFRange=ll}^{__CFStringTokenizer=}', b'{_CFRange=qq}^{__CFStringTokenizer=}'),), 'CFBagContainsValue': (b'Z^{__CFBag=}@',), 'CFUUIDCreateWithBytes': (b'^{__CFUUID=}^{__CFAllocator=}CCCCCCCCCCCCCCCC', '', {'retval': {'already_cfretained': True}}), 'CFArrayGetCount': (sel32or64(b'l^{__CFArray=}', b'q^{__CFArray=}'),), 'CFArrayCreateMutableCopy': (sel32or64(b'^{__CFArray=}^{__CFAllocator=}l^{__CFArray=}', b'^{__CFArray=}^{__CFAllocator=}q^{__CFArray=}'), '', {'retval': {'already_cfretained': True}}), 'CFSetSetValue': (b'v^{__CFSet=}@',), 'CFSwapInt64HostToBig': (b'QQ',), 'CFCharacterSetUnion': (b'v^{__CFCharacterSet=}^{__CFCharacterSet=}',), 'CFFileSecurityCopyGroupUUID': (b'Z^{__CFFileSecurity=}^^{__CFUUID=}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'type_modifier': 'o'}}}), 'CFAttributedStringSetAttribute': (sel32or64(b'v^{__CFAttributedString=}{_CFRange=ll}^{__CFString=}@', b'v^{__CFAttributedString=}{_CFRange=qq}^{__CFString=}@'),), 'CFReadStreamOpen': (b'Z^{__CFReadStream=}',), 'CFXMLNodeGetVersion': (sel32or64(b'l^{__CFXMLNode=}', b'q^{__CFXMLNode=}'),), 'CFStringCreateWithBytesNoCopy': (sel32or64(b'^{__CFString=}^{__CFAllocator=}^tlLZ^{__CFAllocator=}', b'^{__CFString=}^{__CFAllocator=}^tqIZ^{__CFAllocator=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}}}), 'CFBundleGetTypeID': (sel32or64(b'L', b'Q'),), 'CFURLDestroyResource': (sel32or64(b'Z^{__CFURL=}^l', b'Z^{__CFURL=}^i'), '', {'arguments': {1: {'type_modifier': 'o'}}}), 'CFBagSetValue': (b'v^{__CFBag=}@',), 'CFURLWriteBookmarkDataToFile': (sel32or64(b'Z^{__CFData=}^{__CFURL=}L^^{__CFError=}', b'Z^{__CFData=}^{__CFURL=}Q^^{__CFError=}'), '', {'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'CFFileDescriptorCreateRunLoopSource': (sel32or64(b'^{__CFRunLoopSource=}^{__CFAllocator=}^{__CFFileDescriptor=}l', b'^{__CFRunLoopSource=}^{__CFAllocator=}^{__CFFileDescriptor=}q'), '', {'retval': {'already_cfretained': True}}), 'CFGetTypeID': (sel32or64(b'L@', b'Q@'),), 'CFURLCopyFragment': (b'^{__CFString=}^{__CFURL=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CFWriteStreamCreateWithFile': (b'^{__CFWriteStream=}^{__CFAllocator=}^{__CFURL=}', '', {'retval': {'already_cfretained': True}}), 'CFAbsoluteTimeGetDifferenceAsGregorianUnits': (sel32or64(b'{_CFGregorianUnits=llllld}dd^{__CFTimeZone=}L', b'{_CFGregorianUnits=iiiiid}dd^{__CFTimeZone=}Q'),), 'CFReadStreamCreateWithFile': (b'^{__CFReadStream=}^{__CFAllocator=}^{__CFURL=}', '', {'retval': {'already_cfretained': True}}), 'CFBundleCopyLocalizedString': (b'^{__CFString=}^{__CFBundle=}^{__CFString=}^{__CFString=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CFErrorCopyRecoverySuggestion': (b'^{__CFString=}^{__CFError=}', '', {'retval': {'already_cfretained': True}}), 'CFURLCreateBookmarkData': (sel32or64(b'^{__CFData=}^{__CFAllocator=}^{__CFURL=}L^{__CFArray=}^{__CFURL=}^^{__CFError=}', b'^{__CFData=}^{__CFAllocator=}^{__CFURL=}Q^{__CFArray=}^{__CFURL=}^^{__CFError=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {5: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'CFStringCreateWithBytes': (sel32or64(b'^{__CFString=}^{__CFAllocator=}^tlLZ', b'^{__CFString=}^{__CFAllocator=}^tqIZ'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}}}), 'CFRunLoopSourceGetOrder': (sel32or64(b'l^{__CFRunLoopSource=}', b'q^{__CFRunLoopSource=}'),), 'CFBundleLoadExecutable': (b'Z^{__CFBundle=}',), 'CFStringCreateCopy': (b'^{__CFString=}^{__CFAllocator=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CFURLCreateFilePathURL': (b'^{__CFURL=}^{__CFAllocator=}^{__CFURL=}^^{__CFError=}', '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'CFUserNotificationDisplayNotice': (sel32or64(b'ldL^{__CFURL=}^{__CFURL=}^{__CFURL=}^{__CFString=}^{__CFString=}^{__CFString=}', b'idQ^{__CFURL=}^{__CFURL=}^{__CFURL=}^{__CFString=}^{__CFString=}^{__CFString=}'),), 'CFUserNotificationGetResponseValue': (sel32or64(b'^{__CFString=}^{__CFUserNotification=}^{__CFString=}l', b'^{__CFString=}^{__CFUserNotification=}^{__CFString=}q'),), 'CFRunLoopContainsTimer': (b'Z^{__CFRunLoop=}^{__CFRunLoopTimer=}^{__CFString=}',), 'CFPreferencesSetValue': (b'v^{__CFString=}@^{__CFString=}^{__CFString=}^{__CFString=}',), 'CFReadStreamGetStatus': (sel32or64(b'l^{__CFReadStream=}', b'q^{__CFReadStream=}'),), 'CFCopyTypeIDDescription': (sel32or64(b'^{__CFString=}L', b'^{__CFString=}Q'), '', {'retval': {'already_cfretained': True}}), 'CFConvertFloatHostToSwapped': (b'{_CFSwappedFloat32=I}f',), 'CFCalendarDecomposeAbsoluteTime': (b'Z^{__CFCalendar=}d^c', '', {'variadic': True}), 'CFBinaryHeapCreate': (sel32or64(b'^{__CFBinaryHeap=}^{__CFAllocator=}l^{_CFBinaryHeapCallBacks=l^?^?^?^?}^{_CFBinaryHeapCompareContext=l^v^?^?^?}', b'^{__CFBinaryHeap=}^{__CFAllocator=}q^{_CFBinaryHeapCallBacks=q^?^?^?^?}^{_CFBinaryHeapCompareContext=q^v^?^?^?}'), '', {'retval': {'already_cfretained': True}}), 'CFStringCreateExternalRepresentation': (sel32or64(b'^{__CFData=}^{__CFAllocator=}^{__CFString=}LC', b'^{__CFData=}^{__CFAllocator=}^{__CFString=}IC'), '', {'retval': {'already_cfretained': True}}), 'CFDateFormatterCreateISO8601Formatter': (b'@@L', '', {'retval': {'already_cfretained': True}}), 'CFStringCreateWithFormat': (b'^{__CFString=}^{__CFAllocator=}^{__CFDictionary=}^{__CFString=}', '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'printf_format': True}}, 'variadic': True}), 'CFBundleCopyResourceURLsOfTypeForLocalization': (b'^{__CFArray=}^{__CFBundle=}^{__CFString=}^{__CFString=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CFConvertFloat32HostToSwapped': (b'{_CFSwappedFloat32=I}f',), 'CFRunLoopObserverCreateWithHandler': (sel32or64(b'^{__CFRunLoopObserver=}^{__CFAllocator=}LZl@?', b'^{__CFRunLoopObserver=}^{__CFAllocator=}QZq@?'), '', {'retval': {'already_cfretained': True}, 'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'^{__CFRunLoopObserver=}'}, 2: {'type': sel32or64(b'I', b'Q')}}}, 'block': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__CFRunLoopObserver=}'}, 1: {'type': b'L'}}}}}}), 'CFDataCreate': (sel32or64(b'^{__CFData=}^{__CFAllocator=}^vl', b'^{__CFData=}^{__CFAllocator=}^vq'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}}}), 'CFSwapInt16HostToLittle': (b'SS',), 'CFSetCreate': (sel32or64(b'^{__CFSet=}^{__CFAllocator=}^^vl^{_CFSetCallBacks=l^?^?^?^?^?}', b'^{__CFSet=}^{__CFAllocator=}^^vq^{_CFSetCallBacks=q^?^?^?^?^?}'), '', {'retval': {'already_cfretained': True}}), 'CFGregorianDateGetAbsoluteTime': (sel32or64(b'd{_CFGregorianDate=lccccd}^{__CFTimeZone=}', b'd{_CFGregorianDate=iccccd}^{__CFTimeZone=}'),), 'CFStringGetListOfAvailableEncodings': (sel32or64(b'^L', b'^I'), '', {'retval': {'c_array_of_variable_length': True}}), 'CFRunLoopTimerGetContext': (sel32or64(b'v^{__CFRunLoopTimer=}^{_CFRunLoopTimerContext=l^v^?^?^?}', b'v^{__CFRunLoopTimer=}^{_CFRunLoopTimerContext=q^v^?^?^?}'),), 'CFXMLParserAbort': (sel32or64(b'v^{__CFXMLParser=}l^{__CFString=}', b'v^{__CFXMLParser=}q^{__CFString=}'),), 'CFPropertyListCreateFromXMLData': (sel32or64(b'@^{__CFAllocator=}^{__CFData=}L^^{__CFString=}', b'@^{__CFAllocator=}^{__CFData=}Q^^{__CFString=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'type_modifier': 'o'}}}), 'CFStringFindAndReplace': (sel32or64(b'l^{__CFString=}^{__CFString=}^{__CFString=}{_CFRange=ll}L', b'q^{__CFString=}^{__CFString=}^{__CFString=}{_CFRange=qq}Q'),), 'CFDictionaryGetTypeID': (sel32or64(b'L', b'Q'),), 'CFBundleGetDevelopmentRegion': (b'^{__CFString=}^{__CFBundle=}',), 'CFBundleGetMainBundle': (b'^{__CFBundle=}',), 'CFXMLNodeCreate': (sel32or64(b'^{__CFXMLNode=}^{__CFAllocator=}l^{__CFString=}^vl', b'^{__CFXMLNode=}^{__CFAllocator=}q^{__CFString=}^vq'), '', {'retval': {'already_cfretained': True}}), 'CFBundleUnloadExecutable': (b'v^{__CFBundle=}',), 'CFArrayGetCountOfValue': (sel32or64(b'l^{__CFArray=}{_CFRange=ll}@', b'q^{__CFArray=}{_CFRange=qq}@'),), 'CFRunLoopTimerIsValid': (b'Z^{__CFRunLoopTimer=}',), 'CFConvertFloat64SwappedToHost': (b'd{_CFSwappedFloat64=Q}',), 'CFReadStreamHasBytesAvailable': (b'Z^{__CFReadStream=}',), 'CFDataSetLength': (sel32or64(b'v^{__CFData=}l', b'v^{__CFData=}q'),), 'CFStringTokenizerCreate': (sel32or64(b'^{__CFStringTokenizer=}^{__CFAllocator=}^{__CFString=}{_CFRange=ll}L^{__CFLocale=}', b'^{__CFStringTokenizer=}^{__CFAllocator=}^{__CFString=}{_CFRange=qq}Q^{__CFLocale=}'), '', {'retval': {'already_cfretained': True}}), 'CFMachPortGetTypeID': (sel32or64(b'L', b'Q'),), 'CFTreeGetChildAtIndex': (sel32or64(b'^{__CFTree=}^{__CFTree=}l', b'^{__CFTree=}^{__CFTree=}q'),), 'CFSwapInt16BigToHost': (b'SS',), 'CFStringCreateWithCharacters': (sel32or64(b'^{__CFString=}^{__CFAllocator=}^Tl', b'^{__CFString=}^{__CFAllocator=}^Tq'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'c_array_delimited_by_null': True, 'type_modifier': 'n'}}}), 'CFUserNotificationCreateRunLoopSource': (sel32or64(b'^{__CFRunLoopSource=}^{__CFAllocator=}^{__CFUserNotification=}^?l', b'^{__CFRunLoopSource=}^{__CFAllocator=}^{__CFUserNotification=}^?q'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__CFUserNotification=}'}, 1: {'type': sel32or64(b'i', b'q')}}}, 'function_pointer_retained': True}}}), 'CFStringTrimWhitespace': (b'v^{__CFString=}',), 'CFMessagePortCreateRemote': (b'^{__CFMessagePort=}^{__CFAllocator=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CFStringDelete': (sel32or64(b'v^{__CFString=}{_CFRange=ll}', b'v^{__CFString=}{_CFRange=qq}'),), 'CFBundleCopyResourceURLInDirectory': (b'^{__CFURL=}^{__CFURL=}^{__CFString=}^{__CFString=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CFTreeFindRoot': (b'^{__CFTree=}^{__CFTree=}',), 'CFLocaleCopyDisplayNameForPropertyValue': (b'^{__CFString=}^{__CFLocale=}^{__CFString=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CFStringTokenizerGetTypeID': (sel32or64(b'L', b'Q'),), 'CFSocketGetSocketFlags': (sel32or64(b'L^{__CFSocket=}', b'Q^{__CFSocket=}'),), 'CFLocaleGetLanguageLineDirection': (sel32or64(b'l^{__CFString=}', b'q^{__CFString=}'),), 'CFCopyHomeDirectoryURL': (b'^{__CFURL=}', '', {'retval': {'already_cfretained': True}}), 'CFUUIDCreateFromString': (b'^{__CFUUID=}^{__CFAllocator=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CFPreferencesCopyApplicationList': (b'^{__CFArray=}^{__CFString=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CFErrorCopyUserInfo': (b'^{__CFDictionary=}^{__CFError=}', '', {'retval': {'already_cfretained': True}}), 'CFMachPortIsValid': (b'Z^{__CFMachPort=}',), 'CFCalendarComposeAbsoluteTime': (b'Z^{__CFCalendar=}^d^c', '', {'variadic': True}), 'CFReadStreamUnscheduleFromRunLoop': (b'v^{__CFReadStream=}^{__CFRunLoop=}^{__CFString=}',), 'CFDictionaryGetValue': (b'@^{__CFDictionary=}@',), 'CFReadStreamCreateWithBytesNoCopy': (sel32or64(b'^{__CFReadStream=}^{__CFAllocator=}^vl^{__CFAllocator=}', b'^{__CFReadStream=}^{__CFAllocator=}^vq^{__CFAllocator=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}}}), 'CFSwapInt32LittleToHost': (b'II',), 'CFBinaryHeapRemoveAllValues': (b'v^{__CFBinaryHeap=}',), 'CFWriteStreamGetStatus': (sel32or64(b'l^{__CFWriteStream=}', b'q^{__CFWriteStream=}'),), 'CFURLCreateFileReferenceURL': (b'^{__CFURL=}^{__CFAllocator=}^{__CFURL=}^^{__CFError=}', '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'CFRunLoopObserverGetActivities': (sel32or64(b'L^{__CFRunLoopObserver=}', b'Q^{__CFRunLoopObserver=}'),), 'CFTimeZoneCreateWithTimeIntervalFromGMT': (b'^{__CFTimeZone=}^{__CFAllocator=}d', '', {'retval': {'already_cfretained': True}}), 'CFPropertyListCreateWithStream': (sel32or64(b'@^{__CFAllocator=}^{__CFReadStream=}lL^l^^{__CFError=}', b'@^{__CFAllocator=}^{__CFReadStream=}qQ^q^^{__CFError=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {4: {'type_modifier': 'o'}, 5: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'CFFileSecurityClearProperties': (sel32or64(b'Z^{__CFFileSecurity=}L', b'Z^{__CFFileSecurity=}Q'),), 'CFURLCopyResourcePropertyForKey': (b'Z^{__CFURL=}^{__CFString=}^@^^{__CFError=}', '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o'}, 3: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'CFDateFormatterCopyProperty': (b'@^{__CFDateFormatter=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CFSwapInt64LittleToHost': (b'QQ',), 'CFUserNotificationReceiveResponse': (sel32or64(b'l^{__CFUserNotification=}d^L', b'i^{__CFUserNotification=}d^Q'), '', {'arguments': {2: {'type_modifier': 'o'}}}), 'CFNumberFormatterGetLocale': (b'^{__CFLocale=}^{__CFNumberFormatter=}',), 'CFURLStartAccessingSecurityScopedResource': (b'Z^{__CFURL=}',), 'CFMachPortCreateRunLoopSource': (sel32or64(b'^{__CFRunLoopSource=}^{__CFAllocator=}^{__CFMachPort=}l', b'^{__CFRunLoopSource=}^{__CFAllocator=}^{__CFMachPort=}q'), '', {'retval': {'already_cfretained': True}}), 'CFURLGetBytes': (sel32or64(b'l^{__CFURL=}^Cl', b'q^{__CFURL=}^Cq'), '', {'arguments': {1: {'c_array_length_in_arg': 2, 'c_array_length_in_result': True, 'type_modifier': 'o'}}}), 'CFFileDescriptorGetNativeDescriptor': (b'i^{__CFFileDescriptor=}',), 'CFTimeZoneSetAbbreviationDictionary': (b'v^{__CFDictionary=}',), 'CFUserNotificationGetTypeID': (sel32or64(b'L', b'Q'),), 'CFTimeZoneCopySystem': (b'^{__CFTimeZone=}', '', {'retval': {'already_cfretained': True}}), 'CFShowStr': (b'v^{__CFString=}',), 'CFURLEnumeratorGetTypeID': (sel32or64(b'L', b'Q'),), 'CFBundleCopyResourceURL': (b'^{__CFURL=}^{__CFBundle=}^{__CFString=}^{__CFString=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CFBitVectorGetBits': (sel32or64(b'v^{__CFBitVector=}{_CFRange=ll}^C', b'v^{__CFBitVector=}{_CFRange=qq}^C'), '', {'arguments': {2: {'c_array_of_variable_length': True, 'type_modifier': 'o'}}}), 'CFFileSecuritySetGroup': (b'Z^{__CFFileSecurity=}I',), 'CFNotificationCenterGetLocalCenter': (b'^{__CFNotificationCenter=}',), 'CFTimeZoneGetData': (b'^{__CFData=}^{__CFTimeZone=}',), 'CFArrayReplaceValues': (sel32or64(b'v^{__CFArray=}{_CFRange=ll}^@l', b'v^{__CFArray=}{_CFRange=qq}^@q'), '', {'arguments': {2: {'c_array_length_in_arg': 3, 'type_modifier': 'n'}}}), 'CFStringGetCharactersPtr': (b'^T^{__CFString=}', '', {'retval': {'c_array_delimited_by_null': True}}), 'CFStringTokenizerGoToTokenAtIndex': (sel32or64(b'L^{__CFStringTokenizer=}l', b'Q^{__CFStringTokenizer=}q'),), 'CFBundleGetIdentifier': (b'^{__CFString=}^{__CFBundle=}',), 'CFTreeApplyFunctionToChildren': (b'v^{__CFTree=}^?@', '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'@'}, 1: {'type': b'@'}}}, 'callable_retained': False}}}), 'CFDataGetTypeID': (sel32or64(b'L', b'Q'),), 'CFRunLoopAddCommonMode': (b'v^{__CFRunLoop=}^{__CFString=}',), 'CFCalendarSetLocale': (b'v^{__CFCalendar=}^{__CFLocale=}',), 'CFStreamCreatePairWithSocketToHost': (sel32or64(b'v^{__CFAllocator=}^{__CFString=}L^^{__CFReadStream=}^^{__CFWriteStream=}', b'v^{__CFAllocator=}^{__CFString=}I^^{__CFReadStream=}^^{__CFWriteStream=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'type_modifier': 'o'}, 4: {'type_modifier': 'o'}}}), 'CFNumberCreate': (sel32or64(b'^{__CFNumber=}^{__CFAllocator=}l^v', b'^{__CFNumber=}^{__CFAllocator=}q^v'), '', {'retval': {'already_cfretained': True}}), 'CFCharacterSetAddCharactersInRange': (sel32or64(b'v^{__CFCharacterSet=}{_CFRange=ll}', b'v^{__CFCharacterSet=}{_CFRange=qq}'),), 'CFMessagePortIsRemote': (b'Z^{__CFMessagePort=}',), 'CFURLGetTypeID': (sel32or64(b'L', b'Q'),), 'CFReadStreamGetError': (sel32or64(b'{_CFStreamError=ll}^{__CFReadStream=}', b'{_CFStreamError=qi}^{__CFReadStream=}'),), 'CFBagCreateMutableCopy': (sel32or64(b'^{__CFBag=}^{__CFAllocator=}l^{__CFBag=}', b'^{__CFBag=}^{__CFAllocator=}q^{__CFBag=}'), '', {'retval': {'already_cfretained': True}}), 'CFWriteStreamCreateWithAllocatedBuffers': (b'^{__CFWriteStream=}^{__CFAllocator=}^{__CFAllocator=}', '', {'retval': {'already_cfretained': True}}), 'CFBagGetTypeID': (sel32or64(b'L', b'Q'),), 'CFCalendarGetTimeRangeOfUnit': (sel32or64(b'Z^{__CFCalendar=}Ld^d^d', b'Z^{__CFCalendar=}Qd^d^d'), '', {'arguments': {3: {'type_modifier': 'o'}, 4: {'type_modifier': 'o'}}}), 'CFBundlePreflightExecutable': (b'Z^{__CFBundle=}^^{__CFError=}', '', {'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'CFArrayRemoveAllValues': (b'v^{__CFArray=}',), 'CFStringGetLineBounds': (sel32or64(b'v^{__CFString=}{_CFRange=ll}^l^l^l', b'v^{__CFString=}{_CFRange=qq}^q^q^q'), '', {'arguments': {2: {'type_modifier': 'o'}, 3: {'type_modifier': 'o'}, 4: {'type_modifier': 'o'}}}), 'CFTimeZoneCopyAbbreviation': (b'^{__CFString=}^{__CFTimeZone=}d', '', {'retval': {'already_cfretained': True}}), 'CFFileSecurityGetGroup': (b'Z^{__CFFileSecurity=}^I', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'CFCharacterSetGetPredefined': (sel32or64(b'^{__CFCharacterSet=}l', b'^{__CFCharacterSet=}q'),), 'CFArrayRemoveValueAtIndex': (sel32or64(b'v^{__CFArray=}l', b'v^{__CFArray=}q'),), 'CFStringGetSmallestEncoding': (sel32or64(b'L^{__CFString=}', b'I^{__CFString=}'),), 'CFTreeAppendChild': (b'v^{__CFTree=}^{__CFTree=}',), 'CFURLCreatePropertyFromResource': (sel32or64(b'@^{__CFAllocator=}^{__CFURL=}^{__CFString=}^l', b'@^{__CFAllocator=}^{__CFURL=}^{__CFString=}^i'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'type_modifier': 'o'}}}), 'CFURLCopyHostName': (b'^{__CFString=}^{__CFURL=}', '', {'retval': {'already_cfretained': True}}), 'CFAbsoluteTimeGetDayOfWeek': (sel32or64(b'ld^{__CFTimeZone=}', b'id^{__CFTimeZone=}'),), 'CFSwapInt64HostToLittle': (b'QQ',), 'CFArrayExchangeValuesAtIndices': (sel32or64(b'v^{__CFArray=}ll', b'v^{__CFArray=}qq'),), 'CFTimeZoneGetTypeID': (sel32or64(b'L', b'Q'),), 'CFRunLoopObserverCreate': (sel32or64(b'^{__CFRunLoopObserver=}^{__CFAllocator=}LZl^?^{_CFRunLoopObserverContext=l^v^?^?^?}', b'^{__CFRunLoopObserver=}^{__CFAllocator=}QZq^?^{_CFRunLoopObserverContext=q^v^?^?^?}'), '', {'retval': {'already_cfretained': True}, 'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__CFRunLoopObserver=}'}, 1: {'type': b'Q'}, 2: {'type': b'^v'}}}}}}), 'CFRunLoopTimerGetTolerance': (b'd^{__CFRunLoopTimer=}',), 'CFBinaryHeapCreateCopy': (sel32or64(b'^{__CFBinaryHeap=}^{__CFAllocator=}l^{__CFBinaryHeap=}', b'^{__CFBinaryHeap=}^{__CFAllocator=}q^{__CFBinaryHeap=}'), '', {'retval': {'already_cfretained': True}}), 'CFStringCreateFromExternalRepresentation': (sel32or64(b'^{__CFString=}^{__CFAllocator=}^{__CFData=}L', b'^{__CFString=}^{__CFAllocator=}^{__CFData=}I'), '', {'retval': {'already_cfretained': True}}), 'CFTimeZoneResetSystem': (b'v',), 'CFStringNormalize': (sel32or64(b'v^{__CFString=}l', b'v^{__CFString=}q'),), 'CFRunLoopContainsSource': (b'Z^{__CFRunLoop=}^{__CFRunLoopSource=}^{__CFString=}',), 'CFLocaleCreateCanonicalLanguageIdentifierFromString': (b'^{__CFString=}^{__CFAllocator=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CFRunLoopGetTypeID': (sel32or64(b'L', b'Q'),), 'CFDictionaryApplyFunction': (b'v^{__CFDictionary=}^?@', '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'@'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'callable_retained': False}}}), 'CFLocaleGetLanguageCharacterDirection': (sel32or64(b'l^{__CFString=}', b'q^{__CFString=}'),), 'CFStringCreateArrayWithFindResults': (sel32or64(b'^{__CFArray=}^{__CFAllocator=}^{__CFString=}^{__CFString=}{_CFRange=ll}L', b'^{__CFArray=}^{__CFAllocator=}^{__CFString=}^{__CFString=}{_CFRange=qq}Q'), '', {'retval': {'already_cfretained': True}}), 'CFArrayGetTypeID': (sel32or64(b'L', b'Q'),), 'CFNumberFormatterSetFormat': (b'v^{__CFNumberFormatter=}^{__CFString=}',), 'CFStreamCreatePairWithSocket': (b'v^{__CFAllocator=}i^^{__CFReadStream=}^^{__CFWriteStream=}', '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'type_modifier': 'o'}, 3: {'type_modifier': 'o'}}}), 'CFBitVectorCreateMutableCopy': (sel32or64(b'^{__CFBitVector=}^{__CFAllocator=}l^{__CFBitVector=}', b'^{__CFBitVector=}^{__CFAllocator=}q^{__CFBitVector=}'), '', {'retval': {'already_cfretained': True}}), 'CFPreferencesGetAppBooleanValue': (b'Z^{__CFString=}^{__CFString=}^Z', '', {'arguments': {2: {'type_modifier': 'o'}}}), 'CFSocketSetAddress': (sel32or64(b'l^{__CFSocket=}^{__CFData=}', b'q^{__CFSocket=}^{__CFData=}'),), 'CFRunLoopCopyAllModes': (b'^{__CFArray=}^{__CFRunLoop=}', '', {'retval': {'already_cfretained': True}}), 'CFURLCreateStringByReplacingPercentEscapesUsingEncoding': (sel32or64(b'^{__CFString=}^{__CFAllocator=}^{__CFString=}^{__CFString=}L', b'^{__CFString=}^{__CFAllocator=}^{__CFString=}^{__CFString=}I'), '', {'retval': {'already_cfretained': True}}), 'CFRunLoopRun': (b'v',), 'CFPreferencesGetAppIntegerValue': (sel32or64(b'l^{__CFString=}^{__CFString=}^Z', b'q^{__CFString=}^{__CFString=}^Z'), '', {'arguments': {2: {'type_modifier': 'o'}}}), 'CFMessagePortSetName': (b'Z^{__CFMessagePort=}^{__CFString=}',), 'CFDateFormatterCreate': (sel32or64(b'^{__CFDateFormatter=}^{__CFAllocator=}^{__CFLocale=}ll', b'^{__CFDateFormatter=}^{__CFAllocator=}^{__CFLocale=}qq'), '', {'retval': {'already_cfretained': True}}), 'CFUUIDCreateString': (b'^{__CFString=}^{__CFAllocator=}^{__CFUUID=}', '', {'retval': {'already_cfretained': True}}), 'CFNumberFormatterCreate': (sel32or64(b'^{__CFNumberFormatter=}^{__CFAllocator=}^{__CFLocale=}l', b'^{__CFNumberFormatter=}^{__CFAllocator=}^{__CFLocale=}q'), '', {'retval': {'already_cfretained': True}}), 'CFXMLParserGetStatusCode': (sel32or64(b'l^{__CFXMLParser=}', b'q^{__CFXMLParser=}'),), 'CFCalendarGetMinimumRangeOfUnit': (sel32or64(b'{_CFRange=ll}^{__CFCalendar=}L', b'{_CFRange=qq}^{__CFCalendar=}Q'),), 'CFBitVectorCreateCopy': (b'^{__CFBitVector=}^{__CFAllocator=}^{__CFBitVector=}', '', {'retval': {'already_cfretained': True}}), 'CFTimeZoneGetSecondsFromGMT': (b'd^{__CFTimeZone=}d',), 'CFRunLoopTimerDoesRepeat': (b'Z^{__CFRunLoopTimer=}',), 'CFCharacterSetCreateInvertedSet': (b'^{__CFCharacterSet=}^{__CFAllocator=}^{__CFCharacterSet=}', '', {'retval': {'already_cfretained': True}}), 'CFStringGetParagraphBounds': (sel32or64(b'v^{__CFString=}{_CFRange=ll}^l^l^l', b'v^{__CFString=}{_CFRange=qq}^q^q^q'), '', {'arguments': {2: {'type_modifier': 'o'}, 3: {'type_modifier': 'o'}, 4: {'type_modifier': 'o'}}}), 'CFStringGetSystemEncoding': (sel32or64(b'L', b'I'),), 'CFBundleCopyResourceURLsOfTypeInDirectory': (b'^{__CFArray=}^{__CFURL=}^{__CFString=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CFAttributedStringCreateMutableCopy': (sel32or64(b'^{__CFAttributedString=}^{__CFAllocator=}l^{__CFAttributedString=}', b'^{__CFAttributedString=}^{__CFAllocator=}q^{__CFAttributedString=}'), '', {'retval': {'already_cfretained': True}}), 'CFStringTokenizerGetCurrentSubTokens': (sel32or64(b'l^{__CFStringTokenizer=}^{_CFRange=ll}l^{__CFArray=}', b'q^{__CFStringTokenizer=}^{_CFRange=qq}q^{__CFArray=}'), '', {'arguments': {1: {'c_array_length_in_result': True, 'c_array_length_in_arg': 2, 'type_modifier': 'o'}}}), 'CFBundleCopyBundleURL': (b'^{__CFURL=}^{__CFBundle=}', '', {'retval': {'already_cfretained': True}}), 'CFXMLNodeGetInfoPtr': (b'^v^{__CFXMLNode=}',), 'CFSocketCreateConnectedToSocketSignature': (sel32or64(b'^{__CFSocket=}^{__CFAllocator=}^{_CFSocketSignature=lll^{__CFData=}}L^?^{_CFSocketContext=l^v^?^?^?}d', b'^{__CFSocket=}^{__CFAllocator=}^{_CFSocketSignature=iii^{__CFData=}}Q^?^{_CFSocketContext=q^v^?^?^?}d'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__CFSocket=}'}, 1: {'type': b'Q'}, 2: {'type': b'^{__CFData=}'}, 3: {'type': b'^v'}, 4: {'type': b'^v'}}}}}}), 'CFURLCreateDataAndPropertiesFromResource': (sel32or64(b'Z^{__CFAllocator=}^{__CFURL=}^^{__CFData=}^^{__CFDictionary=}^{__CFArray=}^l', b'Z^{__CFAllocator=}^{__CFURL=}^^{__CFData=}^^{__CFDictionary=}^{__CFArray=}^i'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'type_modifier': 'o'}, 3: {'type_modifier': 'o'}, 5: {'type_modifier': 'o'}}}), 'CFAbsoluteTimeGetWeekOfYear': (sel32or64(b'ld^{__CFTimeZone=}', b'id^{__CFTimeZone=}'),), 'CFDateFormatterSetProperty': (b'v^{__CFDateFormatter=}^{__CFString=}@',), 'CFTreeGetTypeID': (sel32or64(b'L', b'Q'),), 'CFRunLoopStop': (b'v^{__CFRunLoop=}',), 'CFNotificationCenterPostNotification': (b'v^{__CFNotificationCenter=}^{__CFString=}@^{__CFDictionary=}Z',), 'CFXMLTreeCreateFromData': (sel32or64(b'^{__CFTree=}^{__CFAllocator=}^{__CFData=}^{__CFURL=}Ll', b'^{__CFTree=}^{__CFAllocator=}^{__CFData=}^{__CFURL=}Qq'), '', {'retval': {'already_cfretained': True}}), 'CFBundleCopyBuiltInPlugInsURL': (b'^{__CFURL=}^{__CFBundle=}', '', {'retval': {'already_cfretained': True}}), 'CFXMLTreeCreateWithDataFromURL': (sel32or64(b'^{__CFTree=}^{__CFAllocator=}^{__CFURL=}Ll', b'^{__CFTree=}^{__CFAllocator=}^{__CFURL=}Qq'), '', {'retval': {'already_cfretained': True}}), 'CFLocaleCreateComponentsFromLocaleIdentifier': (b'^{__CFDictionary=}^{__CFAllocator=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CFPropertyListIsValid': (sel32or64(b'Z@l', b'Z@q'),), 'CFNumberFormatterGetDecimalInfoForCurrencyCode': (b'Z^{__CFString=}^i^d', '', {'arguments': {1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}}}), 'CFSocketEnableCallBacks': (sel32or64(b'v^{__CFSocket=}L', b'v^{__CFSocket=}Q'),), 'CFSetCreateCopy': (b'^{__CFSet=}^{__CFAllocator=}^{__CFSet=}', '', {'retval': {'already_cfretained': True}}), 'CFSwapInt64BigToHost': (b'QQ',), 'CFReadStreamGetTypeID': (sel32or64(b'L', b'Q'),), 'CFFileDescriptorCreate': (sel32or64(b'^{__CFFileDescriptor=}^{__CFAllocator=}iZ^?^{_CFFileDescriptorContext=l^v^?^?^?}', b'^{__CFFileDescriptor=}^{__CFAllocator=}iZ^?^{_CFFileDescriptorContext=q^v^?^?^?}'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__CFFileDescriptor=}'}, 1: {'type': b'Q'}, 2: {'type': b'^v'}}}}}}), 'CFBagCreateMutable': (sel32or64(b'^{__CFBag=}^{__CFAllocator=}l^{_CFBagCallBacks=l^?^?^?^?^?}', b'^{__CFBag=}^{__CFAllocator=}q^{_CFBagCallBacks=q^?^?^?^?^?}'), '', {'retval': {'already_cfretained': True}}), 'CFURLCreateWithString': (b'^{__CFURL=}^{__CFAllocator=}^{__CFString=}^{__CFURL=}', '', {'retval': {'already_cfretained': True}}), 'CFDictionaryAddValue': (b'v^{__CFDictionary=}@@',), 'CFFileSecurityCreateCopy': (b'^{__CFFileSecurity=}^{__CFAllocator=}^{__CFFileSecurity=}', '', {'retval': {'already_cfretained': True}}), 'CFCharacterSetRemoveCharactersInString': (b'v^{__CFCharacterSet=}^{__CFString=}',), 'CFRunLoopRemoveObserver': (b'v^{__CFRunLoop=}^{__CFRunLoopObserver=}^{__CFString=}',), 'CFAttributedStringGetMutableString': (b'^{__CFString=}^{__CFAttributedString=}',), 'CFDictionaryCreate': (sel32or64(b'^{__CFDictionary=}^{__CFAllocator=}^^v^^vl^{_CFDictionaryKeyCallBacks=l^?^?^?^?^?}^{_CFDictionaryValueCallBacks=l^?^?^?^?}', b'^{__CFDictionary=}^{__CFAllocator=}^^v^^vq^{_CFDictionaryKeyCallBacks=q^?^?^?^?^?}^{_CFDictionaryValueCallBacks=q^?^?^?^?}'), '', {'retval': {'already_cfretained': True}}), 'CFNumberGetByteSize': (sel32or64(b'l^{__CFNumber=}', b'q^{__CFNumber=}'),), 'CFXMLParserCopyErrorDescription': (b'^{__CFString=}^{__CFXMLParser=}', '', {'retval': {'already_cfretained': True}}), 'CFCharacterSetCreateWithBitmapRepresentation': (b'^{__CFCharacterSet=}^{__CFAllocator=}^{__CFData=}', '', {'retval': {'already_cfretained': True}}), 'CFBundleGetValueForInfoDictionaryKey': (b'@^{__CFBundle=}^{__CFString=}',), 'CFXMLParserGetCallBacks': (sel32or64(b'v^{__CFXMLParser=}^{_CFXMLParserCallBacks=l^?^?^?^?^?}', b'v^{__CFXMLParser=}^{_CFXMLParserCallBacks=q^?^?^?^?^?}'),), 'CFWriteStreamUnscheduleFromRunLoop': (b'v^{__CFWriteStream=}^{__CFRunLoop=}^{__CFString=}',), 'CFAttributedStringCreateCopy': (b'^{__CFAttributedString=}^{__CFAllocator=}^{__CFAttributedString=}', '', {'retval': {'already_cfretained': True}}), 'CFBitVectorSetBitAtIndex': (sel32or64(b'v^{__CFBitVector=}lL', b'v^{__CFBitVector=}qI'),), 'CFReadStreamSetDispatchQueue': (b'v^{__CFReadStream=}^{dispatch_queue_s=}',), 'CFMessagePortSetDispatchQueue': (b'v^{__CFMessagePort=}^{dispatch_queue_s=}',), 'CFStringGetNameOfEncoding': (sel32or64(b'^{__CFString=}L', b'^{__CFString=}I'),), 'CFBitVectorSetAllBits': (sel32or64(b'v^{__CFBitVector=}L', b'v^{__CFBitVector=}I'),), 'CFSocketGetContext': (sel32or64(b'v^{__CFSocket=}^{_CFSocketContext=l^v^?^?^?}', b'v^{__CFSocket=}^{_CFSocketContext=q^v^?^?^?}'),), 'CFLocaleGetWindowsLocaleCodeFromLocaleIdentifier': (b'I^{__CFString=}',), 'CFXMLParserGetLineNumber': (sel32or64(b'l^{__CFXMLParser=}', b'q^{__CFXMLParser=}'),), 'CFTimeZoneGetDaylightSavingTimeOffset': (b'd^{__CFTimeZone=}d',), 'CFPreferencesAddSuitePreferencesToApp': (b'v^{__CFString=}^{__CFString=}',), 'CFURLGetFileSystemRepresentation': (sel32or64(b'Z^{__CFURL=}Z^tl', b'Z^{__CFURL=}Z^tq'), '', {'arguments': {2: {'c_array_delimited_by_null': True, 'type_modifier': 'o', 'c_array_length_in_arg': 3}}}), 'CFSetApplyFunction': (b'v^{__CFSet=}^?@', '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'@'}, 1: {'type': b'@'}}}, 'callable_retained': False}}}), 'CFStringCapitalize': (b'v^{__CFString=}^{__CFLocale=}',), 'CFBinaryHeapGetMinimumIfPresent': (b'Z^{__CFBinaryHeap=}^@', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'CFURLCopyPathExtension': (b'^{__CFString=}^{__CFURL=}', '', {'retval': {'already_cfretained': True}}), 'CFLocaleCopyISOCountryCodes': (b'^{__CFArray=}', '', {'retval': {'already_cfretained': True}}), 'CFLocaleCreateCopy': (b'^{__CFLocale=}^{__CFAllocator=}^{__CFLocale=}', '', {'retval': {'already_cfretained': True}}), 'CFURLEnumeratorSkipDescendents': (b'v^{__CFURLEnumerator=}',), 'CFBinaryHeapAddValue': (b'v^{__CFBinaryHeap=}@',), 'CFBinaryHeapGetValues': (b'v^{__CFBinaryHeap=}^^v',), 'CFDateFormatterGetAbsoluteTimeFromString': (sel32or64(b'Z^{__CFDateFormatter=}^{__CFString=}^{_CFRange=ll}^d', b'Z^{__CFDateFormatter=}^{__CFString=}^{_CFRange=qq}^d'), '', {'arguments': {2: {'type_modifier': 'N'}, 3: {'type_modifier': 'o'}}}), 'CFTreeSortChildren': (b'v^{__CFTree=}^?@', '', {'arguments': {1: {'callable': {'retval': {'type': b'l'}, 'arguments': {0: {'type': b'@'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'callable_retained': False}}}), 'CFURLCopyResourcePropertiesForKeys': (b'^{__CFDictionary=}^{__CFURL=}^{__CFArray=}^^{__CFError=}', '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'CFNumberCompare': (sel32or64(b'l^{__CFNumber=}^{__CFNumber=}^v', b'q^{__CFNumber=}^{__CFNumber=}^v'),), 'CFURLHasDirectoryPath': (b'Z^{__CFURL=}',), 'CFSwapInt16HostToBig': (b'SS',), 'CFXMLCreateStringByEscapingEntities': (b'^{__CFString=}^{__CFAllocator=}^{__CFString=}^{__CFDictionary=}', '', {'retval': {'already_cfretained': True}}), 'CFPreferencesSetMultiple': (b'v^{__CFDictionary=}^{__CFArray=}^{__CFString=}^{__CFString=}^{__CFString=}',), 'CFBagGetValue': (b'@^{__CFBag=}@',), 'CFBundleGetBundleWithIdentifier': (b'^{__CFBundle=}^{__CFString=}',), 'CFMakeCollectable': (b'@@',), 'CFSetGetTypeID': (sel32or64(b'L', b'Q'),), 'CFStringAppendFormat': (b'v^{__CFString=}^{__CFDictionary=}^{__CFString=}', '', {'arguments': {2: {'printf_format': True}}, 'variadic': True}), 'CFNumberGetValue': (sel32or64(b'Z^{__CFNumber=}l^v', b'Z^{__CFNumber=}q^v'),), 'CFStringTokenizerSetString': (sel32or64(b'v^{__CFStringTokenizer=}^{__CFString=}{_CFRange=ll}', b'v^{__CFStringTokenizer=}^{__CFString=}{_CFRange=qq}'),), 'CFRunLoopGetMain': (b'^{__CFRunLoop=}',), 'CFDictionaryRemoveAllValues': (b'v^{__CFDictionary=}',), 'CFPropertyListCreateDeepCopy': (sel32or64(b'@^{__CFAllocator=}@L', b'@^{__CFAllocator=}@Q'), '', {'retval': {'already_cfretained': True}}), 'CFUUIDGetTypeID': (sel32or64(b'L', b'Q'),), 'CFNotificationCenterPostNotificationWithOptions': (sel32or64(b'v^{__CFNotificationCenter=}^{__CFString=}@^{__CFDictionary=}L', b'v^{__CFNotificationCenter=}^{__CFString=}@^{__CFDictionary=}Q'),), 'CFStringLowercase': (b'v^{__CFString=}^{__CFLocale=}',), 'CFCalendarSetMinimumDaysInFirstWeek': (sel32or64(b'v^{__CFCalendar=}l', b'v^{__CFCalendar=}q'),), 'CFRetain': (b'@@',), 'CFStringGetCharacters': (sel32or64(b'v^{__CFString=}{_CFRange=ll}^T', b'v^{__CFString=}{_CFRange=qq}^T'), '', {'arguments': {2: {'c_array_length_in_arg': 1, 'type_modifier': 'o'}}}), 'CFTimeZoneGetName': (b'^{__CFString=}^{__CFTimeZone=}',), 'CFURLCopyStrictPath': (b'^{__CFString=}^{__CFURL=}^Z', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'type_modifier': 'o'}}}), 'CFBundleIsExecutableLoaded': (b'Z^{__CFBundle=}',), 'CFArrayAppendArray': (sel32or64(b'v^{__CFArray=}^{__CFArray=}{_CFRange=ll}', b'v^{__CFArray=}^{__CFArray=}{_CFRange=qq}'),), 'CFNumberFormatterGetTypeID': (sel32or64(b'L', b'Q'),), 'CFDateGetTypeID': (sel32or64(b'L', b'Q'),), 'CFPreferencesCopyMultiple': (b'^{__CFDictionary=}^{__CFArray=}^{__CFString=}^{__CFString=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CFStringGetTypeID': (sel32or64(b'L', b'Q'),), 'CFBinaryHeapGetTypeID': (sel32or64(b'L', b'Q'),), 'CFTimeZoneCopyLocalizedName': (sel32or64(b'^{__CFString=}^{__CFTimeZone=}l^{__CFLocale=}', b'^{__CFString=}^{__CFTimeZone=}q^{__CFLocale=}'), '', {'retval': {'already_cfretained': True}}), 'CFCalendarCopyLocale': (b'^{__CFLocale=}^{__CFCalendar=}', '', {'retval': {'already_cfretained': True}}), 'CFFileDescriptorDisableCallBacks': (sel32or64(b'v^{__CFFileDescriptor=}L', b'v^{__CFFileDescriptor=}Q'),), 'CFBundleLoadExecutableAndReturnError': (b'Z^{__CFBundle=}^^{__CFError=}', '', {'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'CFNumberFormatterCreateNumberFromString': (sel32or64(b'^{__CFNumber=}^{__CFAllocator=}^{__CFNumberFormatter=}^{__CFString=}^{_CFRange=ll}L', b'^{__CFNumber=}^{__CFAllocator=}^{__CFNumberFormatter=}^{__CFString=}^{_CFRange=qq}Q'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'type_modifier': 'N'}}}), 'CFAttributedStringGetAttribute': (sel32or64(b'@^{__CFAttributedString=}l^{__CFString=}^{_CFRange=ll}', b'@^{__CFAttributedString=}q^{__CFString=}^{_CFRange=qq}'), '', {'arguments': {3: {'type_modifier': 'o'}}}), 'CFURLCopyLastPathComponent': (b'^{__CFString=}^{__CFURL=}', '', {'retval': {'already_cfretained': True}}), 'CFBundleCopyResourcesDirectoryURL': (b'^{__CFURL=}^{__CFBundle=}', '', {'retval': {'already_cfretained': True}}), 'CFRunLoopGetCurrent': (b'^{__CFRunLoop=}',), 'CFDateFormatterCreateDateFromString': (sel32or64(b'^{__CFDate=}^{__CFAllocator=}^{__CFDateFormatter=}^{__CFString=}^{_CFRange=ll}', b'^{__CFDate=}^{__CFAllocator=}^{__CFDateFormatter=}^{__CFString=}^{_CFRange=qq}'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'type_modifier': 'N'}}}), 'CFURLEnumeratorGetDescendentLevel': (sel32or64(b'l^{__CFURLEnumerator=}', b'q^{__CFURLEnumerator=}'),), 'CFStringGetSurrogatePairForLongCharacter': (sel32or64(b'ZL^T', b'ZI^T'), '', {'arguments': {1: {'c_array_of_fixed_length': 2, 'type_modifier': 'o'}}}), 'CFBagApplyFunction': (b'v^{__CFBag=}^?@', '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'@'}, 1: {'type': b'@'}}}, 'callable_retained': False}}}), 'CFBundleCopySharedSupportURL': (b'^{__CFURL=}^{__CFBundle=}', '', {'retval': {'already_cfretained': True}}), 'CFCharacterSetCreateWithCharactersInString': (b'^{__CFCharacterSet=}^{__CFAllocator=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CFBitVectorGetTypeID': (sel32or64(b'L', b'Q'),), 'CFPreferencesCopyKeyList': (b'^{__CFArray=}^{__CFString=}^{__CFString=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CFDateFormatterGetTypeID': (sel32or64(b'L', b'Q'),), 'CFRunLoopSourceGetContext': (sel32or64(b'v^{__CFRunLoopSource=}^{_CFRunLoopSourceContext=l^v^?^?^?^?^?^?^?^?}', b'v^{__CFRunLoopSource=}^{_CFRunLoopSourceContext=q^v^?^?^?^?^?^?^?^?}'),), 'CFBundleGetAllBundles': (b'^{__CFArray=}',), 'CFFileSecuritySetGroupUUID': (b'Z^{__CFFileSecurity=}^{__CFUUID=}',), 'CFCharacterSetCreateMutableCopy': (b'^{__CFCharacterSet=}^{__CFAllocator=}^{__CFCharacterSet=}', '', {'retval': {'already_cfretained': True}}), 'CFStringGetRangeOfComposedCharactersAtIndex': (sel32or64(b'{_CFRange=ll}^{__CFString=}l', b'{_CFRange=qq}^{__CFString=}q'),), 'CFAttributedStringBeginEditing': (b'v^{__CFAttributedString=}',), 'CFNumberFormatterGetFormat': (b'^{__CFString=}^{__CFNumberFormatter=}',), 'CFErrorGetTypeID': (sel32or64(b'L', b'Q'),), 'CFURLCopyParameterString': (b'^{__CFString=}^{__CFURL=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CFAttributedStringGetAttributesAndLongestEffectiveRange': (sel32or64(b'^{__CFDictionary=}^{__CFAttributedString=}l{_CFRange=ll}^{_CFRange=ll}', b'^{__CFDictionary=}^{__CFAttributedString=}q{_CFRange=qq}^{_CFRange=qq}'), '', {'arguments': {3: {'type_modifier': 'o'}}}), 'CFXMLParserGetContext': (sel32or64(b'v^{__CFXMLParser=}^{_CFXMLParserContext=l^v^?^?^?}', b'v^{__CFXMLParser=}^{_CFXMLParserContext=q^v^?^?^?}'),), 'CFCopyDescription': (b'^{__CFString=}@', '', {'retval': {'already_cfretained': True}}), 'CFDataDeleteBytes': (sel32or64(b'v^{__CFData=}{_CFRange=ll}', b'v^{__CFData=}{_CFRange=qq}'),), 'CFWriteStreamGetError': (sel32or64(b'{_CFStreamError=ll}^{__CFWriteStream=}', b'{_CFStreamError=qi}^{__CFWriteStream=}'),), 'CFURLCreateResourcePropertiesForKeysFromBookmarkData': (b'^{__CFDictionary=}^{__CFAllocator=}^{__CFArray=}^{__CFData=}', '', {'retval': {'already_cfretained': True}}), 'CFBitVectorGetFirstIndexOfBit': (sel32or64(b'l^{__CFBitVector=}{_CFRange=ll}L', b'q^{__CFBitVector=}{_CFRange=qq}I'),), 'CFCharacterSetCreateCopy': (b'^{__CFCharacterSet=}^{__CFAllocator=}^{__CFCharacterSet=}', '', {'retval': {'already_cfretained': True}}), 'CFStringCreateMutableWithExternalCharactersNoCopy': (sel32or64(b'^{__CFString=}^{__CFAllocator=}^Tll^{__CFAllocator=}', b'^{__CFString=}^{__CFAllocator=}^Tqq^{__CFAllocator=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}}}), 'CFRunLoopCopyCurrentMode': (b'^{__CFString=}^{__CFRunLoop=}', '', {'retval': {'already_cfretained': True}}), 'CFBundleGetPackageInfo': (sel32or64(b'v^{__CFBundle=}^L^L', b'v^{__CFBundle=}^I^I'), '', {'arguments': {1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}}}), 'CFCalendarSetFirstWeekday': (sel32or64(b'v^{__CFCalendar=}l', b'v^{__CFCalendar=}q'),), 'CFStringGetFastestEncoding': (sel32or64(b'L^{__CFString=}', b'I^{__CFString=}'),), 'CFSocketIsValid': (b'Z^{__CFSocket=}',), 'CFTreeGetChildren': (b'v^{__CFTree=}^^{__CFTree=}', '', {'arguments': {1: {'c_array_of_variable_length': True, 'type_modifier': 'o'}}}), 'CFBundleGetLocalInfoDictionary': (b'^{__CFDictionary=}^{__CFBundle=}',), 'CFArrayBSearchValues': (sel32or64(b'l^{__CFArray=}{_CFRange=ll}@^?@', b'q^{__CFArray=}{_CFRange=qq}@^?@'), '', {'arguments': {3: {'callable': {'retval': {'type': b'l'}, 'arguments': {0: {'type': b'@'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'callable_retained': False}}}), 'CFTreeGetNextSibling': (b'^{__CFTree=}^{__CFTree=}',), 'CFMessagePortGetTypeID': (sel32or64(b'L', b'Q'),), 'CFBagGetCount': (sel32or64(b'l^{__CFBag=}', b'q^{__CFBag=}'),), 'CFBagRemoveAllValues': (b'v^{__CFBag=}',), 'CFCharacterSetCreateBitmapRepresentation': (b'^{__CFData=}^{__CFAllocator=}^{__CFCharacterSet=}', '', {'retval': {'already_cfretained': True}}), 'CFXMLParserGetDocument': (b'^v^{__CFXMLParser=}',), 'CFXMLNodeGetTypeCode': (sel32or64(b'l^{__CFXMLNode=}', b'q^{__CFXMLNode=}'),), 'CFArrayGetValues': (sel32or64(b'v^{__CFArray=}{_CFRange=ll}^@', b'v^{__CFArray=}{_CFRange=qq}^@'), '', {'arguments': {2: {'c_array_length_in_arg': 1, 'type_modifier': 'o'}}}), 'CFCharacterSetIsSupersetOfSet': (b'Z^{__CFCharacterSet=}^{__CFCharacterSet=}',), 'CFRunLoopObserverGetTypeID': (sel32or64(b'L', b'Q'),), 'CFAbsoluteTimeGetGregorianDate': (sel32or64(b'{_CFGregorianDate=lccccd}d^{__CFTimeZone=}', b'{_CFGregorianDate=iccccd}d^{__CFTimeZone=}'),), 'CFNotificationCenterRemoveObserver': (b'v^{__CFNotificationCenter=}@^{__CFString=}@',), 'CFCalendarSetTimeZone': (b'v^{__CFCalendar=}^{__CFTimeZone=}',), 'CFSetCreateMutableCopy': (sel32or64(b'^{__CFSet=}^{__CFAllocator=}l^{__CFSet=}', b'^{__CFSet=}^{__CFAllocator=}q^{__CFSet=}'), '', {'retval': {'already_cfretained': True}}), 'CFXMLTreeCreateWithNode': (b'^{__CFTree=}^{__CFAllocator=}^{__CFXMLNode=}', '', {'retval': {'already_cfretained': True}}), 'CFRunLoopTimerGetOrder': (sel32or64(b'l^{__CFRunLoopTimer=}', b'q^{__CFRunLoopTimer=}'),), 'CFTreeCreate': (sel32or64(b'^{__CFTree=}^{__CFAllocator=}^{_CFTreeContext=l^v^?^?^?}', b'^{__CFTree=}^{__CFAllocator=}^{_CFTreeContext=q^v^?^?^?}'), '', {'retval': {'already_cfretained': True}}), 'CFPreferencesCopyAppValue': (b'@^{__CFString=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CFXMLNodeGetString': (b'^{__CFString=}^{__CFXMLNode=}',), 'CFSocketCreateWithSocketSignature': (sel32or64(b'^{__CFSocket=}^{__CFAllocator=}^{_CFSocketSignature=lll^{__CFData=}}L^?^{_CFSocketContext=l^v^?^?^?}', b'^{__CFSocket=}^{__CFAllocator=}^{_CFSocketSignature=iii^{__CFData=}}Q^?^{_CFSocketContext=q^v^?^?^?}'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__CFSocket=}'}, 1: {'type': b'Q'}, 2: {'type': b'^{__CFData=}'}, 3: {'type': b'^v'}, 4: {'type': b'^v'}}}}}}), 'CFURLEnumeratorCreateForDirectoryURL': (sel32or64(b'^{__CFURLEnumerator=}^{__CFAllocator=}^{__CFURL=}L^{__CFArray=}', b'^{__CFURLEnumerator=}^{__CFAllocator=}^{__CFURL=}Q^{__CFArray=}'), '', {'retval': {'already_cfretained': True}})} -aliases = {'kCFBookmarkResolutionWithoutMountingMask': 'kCFURLBookmarkResolutionWithoutMountingMask', 'kCFFileSecurityRemoveACL': '_FILESEC_REMOVE_ACL', 'CFXMLTreeRef': 'CFTreeRef', 'kCFBookmarkResolutionWithoutUIMask': 'kCFURLBookmarkResolutionWithoutUIMask'} -cftypes=[('CFAllocatorRef', b'^{__CFAllocator=}', 'CFAllocatorGetTypeID', None), ('CFArrayRef', b'^{__CFArray=}', 'CFArrayGetTypeID', 'NSArray'), ('CFAttributedStringRef', b'^{__CFAttributedString=}', 'CFAttributedStringGetTypeID', '__NSCFAttributedString,NSCFAttributedString'), ('CFBagRef', b'^{__CFBag=}', 'CFBagGetTypeID', None), ('CFBinaryHeapRef', b'^{__CFBinaryHeap=}', 'CFBinaryHeapGetTypeID', None), ('CFBitVectorRef', b'^{__CFBitVector=}', 'CFBitVectorGetTypeID', None), ('CFBooleanRef', b'^{__CFBoolean=}', 'CFBooleanGetTypeID', '__NSCFBoolean,NSCFBoolean'), ('CFBundleRef', b'^{__CFBundle=}', 'CFBundleGetTypeID', None), ('CFCalendarRef', b'^{__CFCalendar=}', 'CFCalendarGetTypeID', '__NSCFCalendar,NSCFCalendar'), ('CFCharacterSetRef', b'^{__CFCharacterSet=}', 'CFCharacterSetGetTypeID', '__NSCFCharacterSet,NSCFCharacterSet'), ('CFDataRef', b'^{__CFData=}', 'CFDataGetTypeID', '__NSCFData,NSCFData'), ('CFDateFormatterRef', b'^{__CFDateFormatter=}', 'CFDateFormatterGetTypeID', None), ('CFDateRef', b'^{__CFDate=}', 'CFDateGetTypeID', '__NSCFDate,NSCFDate,__NSDate'), ('CFDictionaryRef', b'^{__CFDictionary=}', 'CFDictionaryGetTypeID', 'NSDictionary'), ('CFErrorRef', b'^{__CFError=}', 'CFErrorGetTypeID', '__NSCFError,NSCFError'), ('CFFileDescriptorRef', b'^{__CFFileDescriptor=}', 'CFFileDescriptorGetTypeID', None), ('CFFileSecurityRef', b'^{__CFFileSecurity=}', 'CFFileSecurityGetTypeID', '__NSFileSecurity'), ('CFLocaleRef', b'^{__CFLocale=}', 'CFLocaleGetTypeID', '__NSCFLocale,NSCFLocale'), ('CFMachPortRef', b'^{__CFMachPort=}', 'CFMachPortGetTypeID', 'NSMachPort'), ('CFMessagePortRef', b'^{__CFMessagePort=}', 'CFMessagePortGetTypeID', None), ('CFMutableArrayRef', b'^{__CFArray=}', 'CFArrayGetTypeID', 'NSMutableArray'), ('CFMutableAttributedStringRef', b'^{__CFAttributedString=}', 'CFAttributedStringGetTypeID', '__NSCFAttributedString,NSCFAttributedString'), ('CFMutableBagRef', b'^{__CFBag=}', 'CFBagGetTypeID', None), ('CFMutableBitVectorRef', b'^{__CFBitVector=}', 'CFBitVectorGetTypeID', None), ('CFMutableCharacterSetRef', b'^{__CFCharacterSet=}', 'CFCharacterSetGetTypeID', None), ('CFMutableDataRef', b'^{__CFData=}', 'CFDataGetTypeID', 'NSMutableData'), ('CFMutableDictionaryRef', b'^{__CFDictionary=}', 'CFDictionaryGetTypeID', 'NSMutableDictionary'), ('CFMutableSetRef', b'^{__CFSet=}', 'CFSetGetTypeID', 'NSMutableSet'), ('CFMutableStringRef', b'^{__CFString=}', 'CFStringGetTypeID', None), ('CFNotificationCenterRef', b'^{__CFNotificationCenter=}', 'CFNotificationCenterGetTypeID', None), ('CFNullRef', b'^{__CFNull=}', 'CFNullGetTypeID', 'NSNull'), ('CFNumberFormatterRef', b'^{__CFNumberFormatter=}', 'CFNumberFormatterGetTypeID', None), ('CFNumberRef', b'^{__CFNumber=}', 'CFNumberGetTypeID', '__NSCFNumber,NSCFNumber'), ('CFPlugInInstanceRef', b'^{__CFPlugInInstance=}', 'CFPlugInInstanceGetTypeID', None), ('CFReadStreamRef', b'^{__CFReadStream=}', 'CFReadStreamGetTypeID', '__NSCFInputStream,NSCFInputStream'), ('CFRunLoopObserverRef', b'^{__CFRunLoopObserver=}', 'CFRunLoopObserverGetTypeID', None), ('CFRunLoopRef', b'^{__CFRunLoop=}', 'CFRunLoopGetTypeID', None), ('CFRunLoopSourceRef', b'^{__CFRunLoopSource=}', 'CFRunLoopSourceGetTypeID', None), ('CFRunLoopTimerRef', b'^{__CFRunLoopTimer=}', 'CFRunLoopTimerGetTypeID', '__NSCFTimer,NSCFTimer'), ('CFSetRef', b'^{__CFSet=}', 'CFSetGetTypeID', 'NSSet'), ('CFSocketRef', b'^{__CFSocket=}', 'CFSocketGetTypeID', None), ('CFStringRef', b'^{__CFString=}', 'CFStringGetTypeID', 'NSString'), ('CFStringTokenizerRef', b'^{__CFStringTokenizer=}', 'CFStringTokenizerGetTypeID', None), ('CFTimeZoneRef', b'^{__CFTimeZone=}', 'CFTimeZoneGetTypeID', 'NSTimeZone'), ('CFTreeRef', b'^{__CFTree=}', 'CFTreeGetTypeID', None), ('CFURLEnumeratorRef', b'^{__CFURLEnumerator=}', 'CFURLEnumeratorGetTypeID', None), ('CFURLRef', b'^{__CFURL=}', 'CFURLGetTypeID', 'NSURL'), ('CFUUIDRef', b'^{__CFUUID=}', 'CFUUIDGetTypeID', None), ('CFUserNotificationRef', b'^{__CFUserNotification=}', 'CFUserNotificationGetTypeID', None), ('CFWriteStreamRef', b'^{__CFWriteStream=}', 'CFWriteStreamGetTypeID', '__NSCFOutputStream,NSCFOutputStream'), ('CFXMLNodeRef', b'^{__CFXMLNode=}', 'CFXMLNodeGetTypeID', None), ('CFXMLParserRef', b'^{__CFXMLParser=}', 'CFXMLParserGetTypeID', None)] -expressions = {'kCFISO8601DateFormatWithFullTime': 'kCFISO8601DateFormatWithTime | kCFISO8601DateFormatWithColonSeparatorInTime | kCFISO8601DateFormatWithTimeZone | kCFISO8601DateFormatWithColonSeparatorInTimeZone', 'kCFISO8601DateFormatWithFullDate': 'kCFISO8601DateFormatWithYear | kCFISO8601DateFormatWithMonth | kCFISO8601DateFormatWithDay | kCFISO8601DateFormatWithDashSeparatorInDate', 'kCFISO8601DateFormatWithInternetDateTime': 'kCFISO8601DateFormatWithFullDate | kCFISO8601DateFormatWithFullTime'} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/CoreFoundation/_metadata.pyc b/env/lib/python2.7/site-packages/CoreFoundation/_metadata.pyc deleted file mode 100644 index fcc63a36..00000000 Binary files a/env/lib/python2.7/site-packages/CoreFoundation/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreFoundation/_static.py b/env/lib/python2.7/site-packages/CoreFoundation/_static.py deleted file mode 100644 index e5e1a8c6..00000000 --- a/env/lib/python2.7/site-packages/CoreFoundation/_static.py +++ /dev/null @@ -1,91 +0,0 @@ -import objc as _objc -import CoreFoundation as _CF - -# -# 'Emulation' for CFArray contructors -# -def _setup(): - NSArray = _objc.lookUpClass('NSArray') - NSMutableArray = _objc.lookUpClass('NSMutableArray') - - def CFArrayCreate(allocator, values, numvalues, callbacks): - assert callbacks is None - return NSArray.alloc().initWithArray_(values[:numvalues]) - - def CFArrayCreateMutable(allocator, capacity, callbacks): - assert callbacks is None - return NSMutableArray.alloc().init() - - return CFArrayCreate, CFArrayCreateMutable - -CFArrayCreate, CFArrayCreateMutable = _setup() - -# CFDictionary emulation functions - -def _setup(): - NSDictionary = _objc.lookUpClass('NSDictionary') - NSMutableDictionary = _objc.lookUpClass('NSMutableDictionary') - def CFDictionaryCreate(allocator, keys, values, numValues, - keyCallbacks, valueCallbacks): - assert keyCallbacks is None - assert valueCallbacks is None - - keys = list(keys)[:numValues] - values = list(values)[:numValues] - - return NSDictionary.dictionaryWithDictionary_(dict(zip(keys, values))) - - def CFDictionaryCreateMutable(allocator, capacity, keyCallbacks, valueCallbacks): - assert keyCallbacks is None - assert valueCallbacks is None - - return NSMutableDictionary.dictionary() - - return CFDictionaryCreate, CFDictionaryCreateMutable - -CFDictionaryCreate, CFDictionaryCreateMutable = _setup() - - - -# CFSet emulation functions - -def _setup(): - NSSet = _objc.lookUpClass('NSSet') - NSMutableSet = _objc.lookUpClass('NSMutableSet') - - def CFSetCreate(allocator, values, numvalues, callbacks): - assert callbacks is None - return NSSet.alloc().initWithArray_(values[:numvalues]) - - def CFSetCreateMutable(allocator, capacity, callbacks): - assert callbacks is None - return NSMutableSet.alloc().init() - - return CFSetCreate, CFSetCreateMutable - -CFSetCreate, CFSetCreateMutable = _setup() - -kCFTypeArrayCallBacks = None -kCFTypeDictionaryKeyCallBacks = None -kCFTypeDictionaryValueCallBacks = None -kCFTypeSetCallBacks = None - - -# -# Implementation of a number of macro's in the CFBundle API -# - -def CFCopyLocalizedString(key, comment): - return _CF.CFBundleCopyLocalizedString(_CF.CFBundleGetMainBundle(), (key), (key), None) - -def CFCopyLocalizedStringFromTable(key, tbl, comment): - return _CF.CFBundleCopyLocalizedString(_CF.CFBundleGetMainBundle(), (key), (key), (tbl)) - -def CFCopyLocalizedStringFromTableInBundle(key, tbl, bundle, comment): - return _CF.CFBundleCopyLocalizedString((bundle), (key), (key), (tbl)) - -def CFCopyLocalizedStringWithDefaultValue(key, tbl, bundle, value, comment): - return _CF.CFBundleCopyLocalizedString((bundle), (key), (value), (tbl)) - -def CFSTR(strval): - return _objc.lookUpClass('NSString').stringWithString_(strval) diff --git a/env/lib/python2.7/site-packages/CoreFoundation/_static.pyc b/env/lib/python2.7/site-packages/CoreFoundation/_static.pyc deleted file mode 100644 index 43f77474..00000000 Binary files a/env/lib/python2.7/site-packages/CoreFoundation/_static.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreLocation/_CoreLocation.so b/env/lib/python2.7/site-packages/CoreLocation/_CoreLocation.so deleted file mode 100755 index 8ace154b..00000000 Binary files a/env/lib/python2.7/site-packages/CoreLocation/_CoreLocation.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreLocation/__init__.py b/env/lib/python2.7/site-packages/CoreLocation/__init__.py deleted file mode 100644 index 3dd00768..00000000 --- a/env/lib/python2.7/site-packages/CoreLocation/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -''' -Python mapping for the CoreLocation framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import sys -import objc -import Foundation - -from CoreLocation import _metadata -from CoreLocation._CoreLocation import * -import os - -if int(os.uname()[2].split('.')[0]) < 12: - # OSX <= 10.7 - kCLErrorGeocodeFoundNoResult = 7 - kCLErrorGeocodeCanceled = 8 -else: - # OSX 10.8 or later - kCLErrorGeocodeFoundNoResult = 8 - kCLErrorGeocodeCanceled = 10 - -sys.modules['CoreLocation'] = mod = objc.ObjCLazyModule( - "CoreLocation", "com.apple.corelocation", - objc.pathForFramework("/System/Library/Frameworks/CoreLocation.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'objc': objc, - 'kCLErrorGeocodeFoundNoResult': kCLErrorGeocodeFoundNoResult, - 'kCLErrorGeocodeCanceled': kCLErrorGeocodeCanceled, - }, (Foundation,)) - -import sys -del sys.modules['CoreLocation._metadata'] diff --git a/env/lib/python2.7/site-packages/CoreLocation/__init__.pyc b/env/lib/python2.7/site-packages/CoreLocation/__init__.pyc deleted file mode 100644 index f12a4264..00000000 Binary files a/env/lib/python2.7/site-packages/CoreLocation/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreLocation/_metadata.py b/env/lib/python2.7/site-packages/CoreLocation/_metadata.py deleted file mode 100644 index bcdc03e1..00000000 --- a/env/lib/python2.7/site-packages/CoreLocation/_metadata.py +++ /dev/null @@ -1,78 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Tue Jun 5 22:24:48 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -misc.update({'CLLocationCoordinate2D': objc.createStructType('CLLocationCoordinate2D', b'{_CLLocationCoordinate2D=dd}', ['latitude', 'longitude'])}) -constants = '''$CLLocationDistanceMax@d$CLTimeIntervalMax@d$kCLDistanceFilterNone@d$kCLErrorDomain$kCLErrorUserInfoAlternateRegionKey$kCLHeadingFilterNone@d$kCLLocationAccuracyBest@d$kCLLocationAccuracyBestForNavigation@d$kCLLocationAccuracyHundredMeters@d$kCLLocationAccuracyKilometer@d$kCLLocationAccuracyNearestTenMeters@d$kCLLocationAccuracyThreeKilometers@d$kCLLocationCoordinate2DInvalid@{_CLLocationCoordinate2D=dd}$''' -enums = '''$CLActivityTypeAirborne@5$CLActivityTypeAutomotiveNavigation@2$CLActivityTypeFitness@3$CLActivityTypeOther@1$CLActivityTypeOtherNavigation@4$CLDeviceOrientationFaceDown@6$CLDeviceOrientationFaceUp@5$CLDeviceOrientationLandscapeLeft@3$CLDeviceOrientationLandscapeRight@4$CLDeviceOrientationPortrait@1$CLDeviceOrientationPortraitUpsideDown@2$CLDeviceOrientationUnknown@0$CLProximityFar@3$CLProximityImmediate@1$CLProximityNear@2$CLProximityUnknown@0$CLRegionStateInside@1$CLRegionStateOutside@2$CLRegionStateUnknown@0$kCLAuthorizationStatusAuthorized@3$kCLAuthorizationStatusAuthorizedAlways@3$kCLAuthorizationStatusDenied@2$kCLAuthorizationStatusNotDetermined@0$kCLAuthorizationStatusRestricted@1$kCLErrorDeferredAccuracyTooLow@13$kCLErrorDeferredCanceled@15$kCLErrorDeferredDistanceFiltered@14$kCLErrorDeferredFailed@11$kCLErrorDeferredNotUpdatingLocation@12$kCLErrorDenied@1$kCLErrorGeocodeFoundPartialResult@9$kCLErrorHeadingFailure@3$kCLErrorLocationUnknown@0$kCLErrorNetwork@2$kCLErrorRangingFailure@17$kCLErrorRangingUnavailable@16$kCLErrorRegionMonitoringDenied@4$kCLErrorRegionMonitoringFailure@5$kCLErrorRegionMonitoringResponseDelayed@7$kCLErrorRegionMonitoringSetupDelayed@6$''' -misc.update({}) -functions={'CLLocationCoordinate2DIsValid': (b'Z{_CLLocationCoordinate2D=dd}',), 'CLLocationCoordinate2DMake': (b'{_CLLocationCoordinate2D=dd}dd',)} -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'CLBeaconRegion', b'notifyEntryStateOnDisplay', {'retval': {'type': b'Z'}}) - r(b'CLBeaconRegion', b'setNotifyEntryStateOnDisplay:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CLCircularRegion', b'center', {'retval': {'type': b'{_CLLocationCoordinate2D=dd}'}}) - r(b'CLCircularRegion', b'containsCoordinate:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'{_CLLocationCoordinate2D=dd}'}}}) - r(b'CLCircularRegion', b'initWithCenter:radius:identifier:', {'arguments': {2: {'type': b'{_CLLocationCoordinate2D=dd}'}}}) - r(b'CLGeocoder', b'geocodeAddressDictionary:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CLGeocoder', b'geocodeAddressString:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CLGeocoder', b'geocodeAddressString:inRegion:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CLGeocoder', b'geocodeAddressString:inRegion:preferredLocale:completionHandler:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CLGeocoder', b'geocodePostalAddress:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CLGeocoder', b'geocodePostalAddress:preferredLocale:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CLGeocoder', b'isGeocoding', {'retval': {'type': b'Z'}}) - r(b'CLGeocoder', b'reverseGeocodeLocation:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CLGeocoder', b'reverseGeocodeLocation:preferredLocale:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CLLocation', b'coordinate', {'retval': {'type': '{_CLLocationCoordinate2D=dd}'}}) - r(b'CLLocation', b'initWithCoordinate:altitude:horizontalAccuracy:verticalAccuracy:course:speed:timestamp:', {'arguments': {2: {'type': b'{_CLLocationCoordinate2D=dd}'}}}) - r(b'CLLocation', b'initWithCoordinate:altitude:horizontalAccuracy:verticalAccuracy:timestamp:', {'arguments': {2: {'type': '{_CLLocationCoordinate2D=dd}'}}}) - r(b'CLLocationManager', b'deferredLocationUpdatesAvailable', {'retval': {'type': b'Z'}}) - r(b'CLLocationManager', b'headingAvailable', {'retval': {'type': b'Z'}}) - r(b'CLLocationManager', b'isMonitoringAvailableForClass:', {'retval': {'type': b'Z'}}) - r(b'CLLocationManager', b'isRangingAvailable', {'retval': {'type': b'Z'}}) - r(b'CLLocationManager', b'locationServicesEnabled', {'retval': {'type': 'Z'}}) - r(b'CLLocationManager', b'pausesLocationUpdatesAutomatically', {'retval': {'type': b'Z'}}) - r(b'CLLocationManager', b'regionMonitoringAvailable', {'retval': {'type': b'Z'}}) - r(b'CLLocationManager', b'regionMonitoringEnabled', {'retval': {'type': b'Z'}}) - r(b'CLLocationManager', b'setPausesLocationUpdatesAutomatically:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CLLocationManager', b'significantLocationChangeMonitoringAvailable', {'retval': {'type': b'Z'}}) - r(b'CLRegion', b'center', {'retval': {'type': b'{_CLLocationCoordinate2D=dd}'}}) - r(b'CLRegion', b'containsCoordinate:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'{_CLLocationCoordinate2D=dd}'}}}) - r(b'CLRegion', b'initCircularRegionWithCenter:radius:identifier:', {'arguments': {2: {'type': b'{_CLLocationCoordinate2D=dd}'}}}) - r(b'CLRegion', b'notifyOnEntry', {'retval': {'type': b'Z'}}) - r(b'CLRegion', b'notifyOnExit', {'retval': {'type': b'Z'}}) - r(b'CLRegion', b'setNotifyOnEntry:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CLRegion', b'setNotifyOnExit:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSObject', b'locationManager:didChangeAuthorizationStatus:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'i'}}}) - r(b'NSObject', b'locationManager:didDetermineState:forRegion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}, 4: {'type': b'@'}}}) - r(b'NSObject', b'locationManager:didEnterRegion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'locationManager:didExitRegion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'locationManager:didFailWithError:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'locationManager:didFinishDeferredUpdatesWithError:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'locationManager:didStartMonitoringForRegion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'locationManager:didUpdateHeading:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'locationManager:didUpdateLocations:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'locationManager:didUpdateToLocation:fromLocation:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'locationManager:monitoringDidFailForRegion:withError:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'locationManagerDidPauseLocationUpdates:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'locationManagerDidResumeLocationUpdates:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'locationManagerShouldDisplayHeadingCalibration:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/CoreLocation/_metadata.pyc b/env/lib/python2.7/site-packages/CoreLocation/_metadata.pyc deleted file mode 100644 index 552c3d09..00000000 Binary files a/env/lib/python2.7/site-packages/CoreLocation/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreML/_CoreML.so b/env/lib/python2.7/site-packages/CoreML/_CoreML.so deleted file mode 100755 index d2b12f62..00000000 Binary files a/env/lib/python2.7/site-packages/CoreML/_CoreML.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreML/__init__.py b/env/lib/python2.7/site-packages/CoreML/__init__.py deleted file mode 100644 index 5078415c..00000000 --- a/env/lib/python2.7/site-packages/CoreML/__init__.py +++ /dev/null @@ -1,28 +0,0 @@ -''' -Python mapping for the CoreML framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Foundation - -from CoreML import _metadata -from CoreML._CoreML import * - - -sys.modules['CoreML'] = mod = objc.ObjCLazyModule( - "CoreML", - "com.apple.CoreML", - objc.pathForFramework("/System/Library/Frameworks/CoreML.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Foundation,)) - -import sys -del sys.modules['CoreML._metadata'] diff --git a/env/lib/python2.7/site-packages/CoreML/__init__.pyc b/env/lib/python2.7/site-packages/CoreML/__init__.pyc deleted file mode 100644 index 891a913c..00000000 Binary files a/env/lib/python2.7/site-packages/CoreML/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreML/_metadata.py b/env/lib/python2.7/site-packages/CoreML/_metadata.py deleted file mode 100644 index a75691e6..00000000 --- a/env/lib/python2.7/site-packages/CoreML/_metadata.py +++ /dev/null @@ -1,60 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Tue Sep 18 22:28:08 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$MLModelAuthorKey$MLModelCreatorDefinedKey$MLModelDescriptionKey$MLModelErrorDomain$MLModelLicenseKey$MLModelVersionStringKey$''' -enums = '''$MLComputeUnitsAll@2$MLComputeUnitsCPUAndGPU@1$MLComputeUnitsCPUOnly@0$MLFeatureTypeDictionary@6$MLFeatureTypeDouble@2$MLFeatureTypeImage@4$MLFeatureTypeInt64@1$MLFeatureTypeInvalid@0$MLFeatureTypeMultiArray@5$MLFeatureTypeSequence@7$MLFeatureTypeString@3$MLImageSizeConstraintTypeEnumerated@2$MLImageSizeConstraintTypeRange@3$MLImageSizeConstraintTypeUnspecified@0$MLModelErrorCustomLayer@4$MLModelErrorCustomModel@5$MLModelErrorFeatureType@1$MLModelErrorGeneric@0$MLModelErrorIO@3$MLMultiArrayDataTypeDouble@65600$MLMultiArrayDataTypeFloat32@65568$MLMultiArrayDataTypeInt32@131104$MLMultiArrayShapeConstraintTypeEnumerated@2$MLMultiArrayShapeConstraintTypeRange@3$MLMultiArrayShapeConstraintTypeUnspecified@1$''' -misc.update({}) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'MLDictionaryFeatureProvider', b'initWithDictionary:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'MLFeatureDescription', b'isAllowedValue:', {'retval': {'type': 'Z'}}) - r(b'MLFeatureDescription', b'isOptional', {'retval': {'type': 'Z'}}) - r(b'MLFeatureDescription', b'setOptional:', {'arguments': {2: {'type': 'Z'}}}) - r(b'MLFeatureValue', b'featureValueWithDictionary:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'MLFeatureValue', b'isEqualToFeatureValue:', {'retval': {'type': 'Z'}}) - r(b'MLFeatureValue', b'isUndefined', {'retval': {'type': 'Z'}}) - r(b'MLModel', b'compileModelAtURL:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'MLModel', b'modelWithContentsOfURL:configuration:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'MLModel', b'modelWithContentsOfURL:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'MLModel', b'predictionFromFeatures:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'MLModel', b'predictionFromFeatures:options:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'MLModel', b'predictionsFromBatch:options:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'MLMultiArray', b'dataPointer', {'retval': {'c_array_of_variable_length': True}}) - r(b'MLMultiArray', b'initWithDataPointer:shape:dataType:strides:deallocator:error:', {'arguments': {2: {'type_modifier': b'n', 'c_array_of_variable_length': True}, 6: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'^v'}}}}, 7: {'type_modifier': b'o'}}}) - r(b'MLMultiArray', b'initWithShape:dataType:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'MLPredictionOptions', b'setUsesCPUOnly:', {'arguments': {2: {'type': 'Z'}}}) - r(b'MLPredictionOptions', b'usesCPUOnly', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'count', {'required': True, 'retval': {'type': sel32or64(b'i', b'q')}}) - r(b'NSObject', b'encodeToCommandBuffer:inputs:outputs:error:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'evaluateOnCPUWithInputs:outputs:error:', {'required': True, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'featureNames', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'featureProviderAtIndex:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'q'}}}) - r(b'NSObject', b'featureProviderCount', {'required': False, 'retval': {'type': b'q'}}) - r(b'NSObject', b'featureValueForName:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'featuresAtIndex:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'initWithModelDescription:parameterDictionary:error:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'initWithParameterDictionary:error:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'outputShapesForInputShapes:error:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'predictionFromFeatures:options:error:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'predictionsFromBatch:options:error:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'setWeightData:error:', {'required': True, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': '^@', 'type_modifier': b'o'}}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/CoreML/_metadata.pyc b/env/lib/python2.7/site-packages/CoreML/_metadata.pyc deleted file mode 100644 index 2c0df2af..00000000 Binary files a/env/lib/python2.7/site-packages/CoreML/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreMedia/_CoreMedia.so b/env/lib/python2.7/site-packages/CoreMedia/_CoreMedia.so deleted file mode 100755 index 6e7fa357..00000000 Binary files a/env/lib/python2.7/site-packages/CoreMedia/_CoreMedia.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreMedia/__init__.py b/env/lib/python2.7/site-packages/CoreMedia/__init__.py deleted file mode 100644 index f3772d2c..00000000 --- a/env/lib/python2.7/site-packages/CoreMedia/__init__.py +++ /dev/null @@ -1,28 +0,0 @@ -''' -Python mapping for the CoreMedia framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Foundation - -from CoreMedia import _metadata -from CoreMedia import _macros -from CoreMedia import _CoreMedia - -sys.modules['CoreMedia'] = mod = objc.ObjCLazyModule( - "CoreMedia", - "com.apple.CoreMedia", - objc.pathForFramework("/System/Library/Frameworks/CoreMedia.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (_macros, _CoreMedia, Foundation,)) - -import sys -del sys.modules['CoreMedia._metadata'] diff --git a/env/lib/python2.7/site-packages/CoreMedia/__init__.pyc b/env/lib/python2.7/site-packages/CoreMedia/__init__.pyc deleted file mode 100644 index 62495830..00000000 Binary files a/env/lib/python2.7/site-packages/CoreMedia/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreMedia/_macros.py b/env/lib/python2.7/site-packages/CoreMedia/_macros.py deleted file mode 100644 index 5664fe38..00000000 --- a/env/lib/python2.7/site-packages/CoreMedia/_macros.py +++ /dev/null @@ -1,49 +0,0 @@ -import CoreMedia - -def CMTIMERANGE_IS_VALID(range): - return CMTIME_IS_VALID(range.start) and CMTIME_IS_VALID(range.duration) and range.duration.epoch == 0 and range.duration.value >= 0 - -def CMTIMERANGE_IS_INVALID(range): - return not CMTIMERANGE_IS_VALID(range) - -def CMTIMERANGE_IS_INDEFINITE(range): - return CMTIMERANGE_IS_VALID(range) and (CMTIME_IS_INDEFINITE(range.start) or CMTIME_IS_INDEFINITE(range.duration)) - -def CMTIMERANGE_IS_EMPTY(range): - return CMTIMERANGE_IS_VALID(range) and range.duration == kCMTimeZero - -def CMTIMEMAPPING_IS_VALID(mapping): - return CMTIMERANGE_IS_VALID(mapping.target) - -def CMTIMEMAPPING_IS_INVALID(mapping): - return not CMTIMEMAPPING_IS_VALID(mapping) - -def CMTIMEMAPPING_IS_EMPTY(mapping): - return not CMTIME_IS_NUMERIC(mapping.source.start) and CMTIMERANGE_IS_VALID(mapping.target) - -def CMSimpleQueueGetFullness(queue): - if CMSimpleQueueGetCapacity(queue): - return CMSimpleQueueGetCount(queue) / CMSimpleQueueGetCapacity(queue) - else: - return 0.0 - -def CMTIME_IS_VALID(time): - return (time.flags & CoreMedia.kCMTimeFlags_Valid) != 0 - -def CMTIME_IS_INVALID(time): - return not CMTIME_IS_VALID(time) - -def CMTIME_IS_POSITIVE_INFINITY(time): - return CMTIME_IS_VALID(time) and (time.flags & CoreMedia.kCMTimeFlags_PositiveInfinity) != 0 - -def CMTIME_IS_NEGATIVE_INFINITY(time): - return CMTIME_IS_VALID(time) and (time.flags & CoreMedia.kCMTimeFlags_NegativeInfinity) != 0 - -def CMTIME_IS_INDEFINITE(time): - return CMTIME_IS_VALID(time) and (time.flags & CoreMedia.kCMTimeFlags_Indefinite) != 0 - -def CMTIME_IS_NUMERIC(time): - return (time.flags & (CoreMedia.kCMTimeFlags_Valid | CoreMedia.kCMTimeFlags_ImpliedValueFlagsMask)) == CoreMedia.kCMTimeFlags_Valid - -def CMTIME_HAS_BEEN_ROUNDED(time): - return CMTIME_IS_NUMERIC(time) and (time.flags & CoreMedia.kCMTimeFlags_HasBeenRounded) != 0 diff --git a/env/lib/python2.7/site-packages/CoreMedia/_macros.pyc b/env/lib/python2.7/site-packages/CoreMedia/_macros.pyc deleted file mode 100644 index 429f475f..00000000 Binary files a/env/lib/python2.7/site-packages/CoreMedia/_macros.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreMedia/_metadata.py b/env/lib/python2.7/site-packages/CoreMedia/_metadata.py deleted file mode 100644 index 30dfcfe0..00000000 --- a/env/lib/python2.7/site-packages/CoreMedia/_metadata.py +++ /dev/null @@ -1,28 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Tue Mar 5 21:30:47 2019 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -misc.update({'CMTime': objc.createStructType('CMTime', b'{_CMTime=qiIq}', ['value', 'timescale', 'flags', 'epoch']), 'CMTimeMapping': objc.createStructType('CMTimeMapping', b'{_CMTimeMapping={_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}}', ['source', 'target']), 'CMVideoDimensions': objc.createStructType('CMVideoDimensions', b'{_CMVideoDimensions=ii}', ['width', 'height']), 'CMTimeRange': objc.createStructType('CMTimeRange', b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}', ['start', 'duration']), 'CMSampleTimingInfo': objc.createStructType('CMSampleTimingInfo', b'{_CMSampleTimingInfo={_CMTime=qiIq}{_CMTime=qiIq}{_CMTime=qiIq}}', ['duration', 'presentationTimeStamp', 'decodeTimeStamp'])}) -constants = '''$kCMFormatDescriptionChromaLocation_Bottom@^{__CFString=}$kCMFormatDescriptionChromaLocation_BottomLeft@^{__CFString=}$kCMFormatDescriptionChromaLocation_Center@^{__CFString=}$kCMFormatDescriptionChromaLocation_DV420@^{__CFString=}$kCMFormatDescriptionChromaLocation_Left@^{__CFString=}$kCMFormatDescriptionChromaLocation_Top@^{__CFString=}$kCMFormatDescriptionChromaLocation_TopLeft@^{__CFString=}$kCMFormatDescriptionColorPrimaries_DCI_P3@^{__CFString=}$kCMFormatDescriptionColorPrimaries_EBU_3213@^{__CFString=}$kCMFormatDescriptionColorPrimaries_ITU_R_2020@^{__CFString=}$kCMFormatDescriptionColorPrimaries_ITU_R_709_2@^{__CFString=}$kCMFormatDescriptionColorPrimaries_P22@^{__CFString=}$kCMFormatDescriptionColorPrimaries_P3_D65@^{__CFString=}$kCMFormatDescriptionColorPrimaries_SMPTE_C@^{__CFString=}$kCMFormatDescriptionConformsToMPEG2VideoProfile@^{__CFString=}$kCMFormatDescriptionExtensionKey_MetadataKeyTable@^{__CFString=}$kCMFormatDescriptionExtension_AlternativeTransferCharacteristics@^{__CFString=}$kCMFormatDescriptionExtension_BytesPerRow@^{__CFString=}$kCMFormatDescriptionExtension_ChromaLocationBottomField@^{__CFString=}$kCMFormatDescriptionExtension_ChromaLocationTopField@^{__CFString=}$kCMFormatDescriptionExtension_CleanAperture@^{__CFString=}$kCMFormatDescriptionExtension_ColorPrimaries@^{__CFString=}$kCMFormatDescriptionExtension_ContentLightLevelInfo@^{__CFString=}$kCMFormatDescriptionExtension_Depth@^{__CFString=}$kCMFormatDescriptionExtension_FieldCount@^{__CFString=}$kCMFormatDescriptionExtension_FieldDetail@^{__CFString=}$kCMFormatDescriptionExtension_FormatName@^{__CFString=}$kCMFormatDescriptionExtension_FullRangeVideo@^{__CFString=}$kCMFormatDescriptionExtension_GammaLevel@^{__CFString=}$kCMFormatDescriptionExtension_ICCProfile@^{__CFString=}$kCMFormatDescriptionExtension_MasteringDisplayColorVolume@^{__CFString=}$kCMFormatDescriptionExtension_OriginalCompressionSettings@^{__CFString=}$kCMFormatDescriptionExtension_PixelAspectRatio@^{__CFString=}$kCMFormatDescriptionExtension_RevisionLevel@^{__CFString=}$kCMFormatDescriptionExtension_SampleDescriptionExtensionAtoms@^{__CFString=}$kCMFormatDescriptionExtension_SpatialQuality@^{__CFString=}$kCMFormatDescriptionExtension_TemporalQuality@^{__CFString=}$kCMFormatDescriptionExtension_TransferFunction@^{__CFString=}$kCMFormatDescriptionExtension_Vendor@^{__CFString=}$kCMFormatDescriptionExtension_VerbatimISOSampleEntry@^{__CFString=}$kCMFormatDescriptionExtension_VerbatimImageDescription@^{__CFString=}$kCMFormatDescriptionExtension_VerbatimSampleDescription@^{__CFString=}$kCMFormatDescriptionExtension_Version@^{__CFString=}$kCMFormatDescriptionExtension_YCbCrMatrix@^{__CFString=}$kCMFormatDescriptionFieldDetail_SpatialFirstLineEarly@^{__CFString=}$kCMFormatDescriptionFieldDetail_SpatialFirstLineLate@^{__CFString=}$kCMFormatDescriptionFieldDetail_TemporalBottomFirst@^{__CFString=}$kCMFormatDescriptionFieldDetail_TemporalTopFirst@^{__CFString=}$kCMFormatDescriptionKey_CleanApertureHeight@^{__CFString=}$kCMFormatDescriptionKey_CleanApertureHeightRational@^{__CFString=}$kCMFormatDescriptionKey_CleanApertureHorizontalOffset@^{__CFString=}$kCMFormatDescriptionKey_CleanApertureHorizontalOffsetRational@^{__CFString=}$kCMFormatDescriptionKey_CleanApertureVerticalOffset@^{__CFString=}$kCMFormatDescriptionKey_CleanApertureVerticalOffsetRational@^{__CFString=}$kCMFormatDescriptionKey_CleanApertureWidth@^{__CFString=}$kCMFormatDescriptionKey_CleanApertureWidthRational@^{__CFString=}$kCMFormatDescriptionKey_PixelAspectRatioHorizontalSpacing@^{__CFString=}$kCMFormatDescriptionKey_PixelAspectRatioVerticalSpacing@^{__CFString=}$kCMFormatDescriptionTransferFunction_ITU_R_2020@^{__CFString=}$kCMFormatDescriptionTransferFunction_ITU_R_2100_HLG@^{__CFString=}$kCMFormatDescriptionTransferFunction_ITU_R_709_2@^{__CFString=}$kCMFormatDescriptionTransferFunction_Linear@^{__CFString=}$kCMFormatDescriptionTransferFunction_SMPTE_240M_1995@^{__CFString=}$kCMFormatDescriptionTransferFunction_SMPTE_ST_2084_PQ@^{__CFString=}$kCMFormatDescriptionTransferFunction_SMPTE_ST_428_1@^{__CFString=}$kCMFormatDescriptionTransferFunction_UseGamma@^{__CFString=}$kCMFormatDescriptionVendor_Apple@^{__CFString=}$kCMFormatDescriptionYCbCrMatrix_ITU_R_2020@^{__CFString=}$kCMFormatDescriptionYCbCrMatrix_ITU_R_601_4@^{__CFString=}$kCMFormatDescriptionYCbCrMatrix_ITU_R_709_2@^{__CFString=}$kCMFormatDescriptionYCbCrMatrix_SMPTE_240M_1995@^{__CFString=}$kCMHEVCTemporalLevelInfoKey_ConstraintIndicatorFlags@^{__CFString=}$kCMHEVCTemporalLevelInfoKey_LevelIndex@^{__CFString=}$kCMHEVCTemporalLevelInfoKey_ProfileCompatibilityFlags@^{__CFString=}$kCMHEVCTemporalLevelInfoKey_ProfileIndex@^{__CFString=}$kCMHEVCTemporalLevelInfoKey_ProfileSpace@^{__CFString=}$kCMHEVCTemporalLevelInfoKey_TemporalLevel@^{__CFString=}$kCMHEVCTemporalLevelInfoKey_TierFlag@^{__CFString=}$kCMImageDescriptionFlavor_3GPFamily@^{__CFString=}$kCMImageDescriptionFlavor_ISOFamily@^{__CFString=}$kCMImageDescriptionFlavor_QuickTimeMovie@^{__CFString=}$kCMMemoryPoolOption_AgeOutPeriod@^{__CFString=}$kCMMetadataBaseDataType_AffineTransformF64@^{__CFString=}$kCMMetadataBaseDataType_BMP@^{__CFString=}$kCMMetadataBaseDataType_DimensionsF32@^{__CFString=}$kCMMetadataBaseDataType_Float32@^{__CFString=}$kCMMetadataBaseDataType_Float64@^{__CFString=}$kCMMetadataBaseDataType_GIF@^{__CFString=}$kCMMetadataBaseDataType_JPEG@^{__CFString=}$kCMMetadataBaseDataType_JSON@^{__CFString=}$kCMMetadataBaseDataType_PNG@^{__CFString=}$kCMMetadataBaseDataType_PointF32@^{__CFString=}$kCMMetadataBaseDataType_PolygonF32@^{__CFString=}$kCMMetadataBaseDataType_PolylineF32@^{__CFString=}$kCMMetadataBaseDataType_RawData@^{__CFString=}$kCMMetadataBaseDataType_RectF32@^{__CFString=}$kCMMetadataBaseDataType_SInt16@^{__CFString=}$kCMMetadataBaseDataType_SInt32@^{__CFString=}$kCMMetadataBaseDataType_SInt64@^{__CFString=}$kCMMetadataBaseDataType_SInt8@^{__CFString=}$kCMMetadataBaseDataType_UInt16@^{__CFString=}$kCMMetadataBaseDataType_UInt32@^{__CFString=}$kCMMetadataBaseDataType_UInt64@^{__CFString=}$kCMMetadataBaseDataType_UInt8@^{__CFString=}$kCMMetadataBaseDataType_UTF16@^{__CFString=}$kCMMetadataBaseDataType_UTF8@^{__CFString=}$kCMMetadataDataType_QuickTimeMetadataDirection@^{__CFString=}$kCMMetadataDataType_QuickTimeMetadataLocation_ISO6709@^{__CFString=}$kCMMetadataFormatDescriptionKey_ConformingDataTypes@^{__CFString=}$kCMMetadataFormatDescriptionKey_DataType@^{__CFString=}$kCMMetadataFormatDescriptionKey_DataTypeNamespace@^{__CFString=}$kCMMetadataFormatDescriptionKey_LanguageTag@^{__CFString=}$kCMMetadataFormatDescriptionKey_LocalID@^{__CFString=}$kCMMetadataFormatDescriptionKey_Namespace@^{__CFString=}$kCMMetadataFormatDescriptionKey_SetupData@^{__CFString=}$kCMMetadataFormatDescriptionKey_StructuralDependency@^{__CFString=}$kCMMetadataFormatDescriptionKey_Value@^{__CFString=}$kCMMetadataFormatDescriptionMetadataSpecificationKey_DataType@^{__CFString=}$kCMMetadataFormatDescriptionMetadataSpecificationKey_ExtendedLanguageTag@^{__CFString=}$kCMMetadataFormatDescriptionMetadataSpecificationKey_Identifier@^{__CFString=}$kCMMetadataFormatDescriptionMetadataSpecificationKey_SetupData@^{__CFString=}$kCMMetadataFormatDescriptionMetadataSpecificationKey_StructuralDependency@^{__CFString=}$kCMMetadataFormatDescription_StructuralDependencyKey_DependencyIsInvalidFlag@^{__CFString=}$kCMMetadataIdentifier_QuickTimeMetadataDirection_Facing@^{__CFString=}$kCMMetadataIdentifier_QuickTimeMetadataLocation_ISO6709@^{__CFString=}$kCMMetadataIdentifier_QuickTimeMetadataPreferredAffineTransform@^{__CFString=}$kCMMetadataIdentifier_QuickTimeMetadataVideoOrientation@^{__CFString=}$kCMMetadataKeySpace_HLSDateRange@^{__CFString=}$kCMMetadataKeySpace_ID3@^{__CFString=}$kCMMetadataKeySpace_ISOUserData@^{__CFString=}$kCMMetadataKeySpace_Icy@^{__CFString=}$kCMMetadataKeySpace_QuickTimeMetadata@^{__CFString=}$kCMMetadataKeySpace_QuickTimeUserData@^{__CFString=}$kCMMetadataKeySpace_iTunes@^{__CFString=}$kCMSampleAttachmentKey_DependsOnOthers@^{__CFString=}$kCMSampleAttachmentKey_DisplayImmediately@^{__CFString=}$kCMSampleAttachmentKey_DoNotDisplay@^{__CFString=}$kCMSampleAttachmentKey_EarlierDisplayTimesAllowed@^{__CFString=}$kCMSampleAttachmentKey_HEVCStepwiseTemporalSubLayerAccess@^{__CFString=}$kCMSampleAttachmentKey_HEVCSyncSampleNALUnitType@^{__CFString=}$kCMSampleAttachmentKey_HEVCTemporalLevelInfo@^{__CFString=}$kCMSampleAttachmentKey_HEVCTemporalSubLayerAccess@^{__CFString=}$kCMSampleAttachmentKey_HasRedundantCoding@^{__CFString=}$kCMSampleAttachmentKey_IsDependedOnByOthers@^{__CFString=}$kCMSampleAttachmentKey_NotSync@^{__CFString=}$kCMSampleAttachmentKey_PartialSync@^{__CFString=}$kCMSampleBufferAttachmentKey_CameraIntrinsicMatrix@^{__CFString=}$kCMSampleBufferAttachmentKey_DisplayEmptyMediaImmediately@^{__CFString=}$kCMSampleBufferAttachmentKey_DrainAfterDecoding@^{__CFString=}$kCMSampleBufferAttachmentKey_DroppedFrameReason@^{__CFString=}$kCMSampleBufferAttachmentKey_DroppedFrameReasonInfo@^{__CFString=}$kCMSampleBufferAttachmentKey_EmptyMedia@^{__CFString=}$kCMSampleBufferAttachmentKey_EndsPreviousSampleDuration@^{__CFString=}$kCMSampleBufferAttachmentKey_FillDiscontinuitiesWithSilence@^{__CFString=}$kCMSampleBufferAttachmentKey_ForceKeyFrame@^{__CFString=}$kCMSampleBufferAttachmentKey_GradualDecoderRefresh@^{__CFString=}$kCMSampleBufferAttachmentKey_PermanentEmptyMedia@^{__CFString=}$kCMSampleBufferAttachmentKey_PostNotificationWhenConsumed@^{__CFString=}$kCMSampleBufferAttachmentKey_ResetDecoderBeforeDecoding@^{__CFString=}$kCMSampleBufferAttachmentKey_ResumeOutput@^{__CFString=}$kCMSampleBufferAttachmentKey_Reverse@^{__CFString=}$kCMSampleBufferAttachmentKey_SampleReferenceByteOffset@^{__CFString=}$kCMSampleBufferAttachmentKey_SampleReferenceURL@^{__CFString=}$kCMSampleBufferAttachmentKey_SpeedMultiplier@^{__CFString=}$kCMSampleBufferAttachmentKey_StillImageLensStabilizationInfo@^{__CFString=}$kCMSampleBufferAttachmentKey_TransitionID@^{__CFString=}$kCMSampleBufferAttachmentKey_TrimDurationAtEnd@^{__CFString=}$kCMSampleBufferAttachmentKey_TrimDurationAtStart@^{__CFString=}$kCMSampleBufferConduitNotificationParameter_MaxUpcomingOutputPTS@^{__CFString=}$kCMSampleBufferConduitNotificationParameter_MinUpcomingOutputPTS@^{__CFString=}$kCMSampleBufferConduitNotificationParameter_ResumeTag@^{__CFString=}$kCMSampleBufferConduitNotificationParameter_UpcomingOutputPTSRangeMayOverlapQueuedOutputPTSRange@^{__CFString=}$kCMSampleBufferConduitNotification_InhibitOutputUntil@^{__CFString=}$kCMSampleBufferConduitNotification_ResetOutput@^{__CFString=}$kCMSampleBufferConduitNotification_UpcomingOutputPTSRangeChanged@^{__CFString=}$kCMSampleBufferConsumerNotification_BufferConsumed@^{__CFString=}$kCMSampleBufferDroppedFrameReasonInfo_CameraModeSwitch@^{__CFString=}$kCMSampleBufferDroppedFrameReason_Discontinuity@^{__CFString=}$kCMSampleBufferDroppedFrameReason_FrameWasLate@^{__CFString=}$kCMSampleBufferDroppedFrameReason_OutOfBuffers@^{__CFString=}$kCMSampleBufferLensStabilizationInfo_Active@^{__CFString=}$kCMSampleBufferLensStabilizationInfo_Off@^{__CFString=}$kCMSampleBufferLensStabilizationInfo_OutOfRange@^{__CFString=}$kCMSampleBufferLensStabilizationInfo_Unavailable@^{__CFString=}$kCMSampleBufferNotificationParameter_OSStatus@^{__CFString=}$kCMSampleBufferNotification_DataBecameReady@^{__CFString=}$kCMSampleBufferNotification_DataFailed@^{__CFString=}$kCMSoundDescriptionFlavor_3GPFamily@^{__CFString=}$kCMSoundDescriptionFlavor_ISOFamily@^{__CFString=}$kCMSoundDescriptionFlavor_QuickTimeMovie@^{__CFString=}$kCMSoundDescriptionFlavor_QuickTimeMovieV2@^{__CFString=}$kCMTextFormatDescriptionColor_Alpha@^{__CFString=}$kCMTextFormatDescriptionColor_Blue@^{__CFString=}$kCMTextFormatDescriptionColor_Green@^{__CFString=}$kCMTextFormatDescriptionColor_Red@^{__CFString=}$kCMTextFormatDescriptionExtension_BackgroundColor@^{__CFString=}$kCMTextFormatDescriptionExtension_DefaultFontName@^{__CFString=}$kCMTextFormatDescriptionExtension_DefaultStyle@^{__CFString=}$kCMTextFormatDescriptionExtension_DefaultTextBox@^{__CFString=}$kCMTextFormatDescriptionExtension_DisplayFlags@^{__CFString=}$kCMTextFormatDescriptionExtension_FontTable@^{__CFString=}$kCMTextFormatDescriptionExtension_HorizontalJustification@^{__CFString=}$kCMTextFormatDescriptionExtension_TextJustification@^{__CFString=}$kCMTextFormatDescriptionExtension_VerticalJustification@^{__CFString=}$kCMTextFormatDescriptionRect_Bottom@^{__CFString=}$kCMTextFormatDescriptionRect_Left@^{__CFString=}$kCMTextFormatDescriptionRect_Right@^{__CFString=}$kCMTextFormatDescriptionRect_Top@^{__CFString=}$kCMTextFormatDescriptionStyle_Ascent@^{__CFString=}$kCMTextFormatDescriptionStyle_EndChar@^{__CFString=}$kCMTextFormatDescriptionStyle_Font@^{__CFString=}$kCMTextFormatDescriptionStyle_FontFace@^{__CFString=}$kCMTextFormatDescriptionStyle_FontSize@^{__CFString=}$kCMTextFormatDescriptionStyle_ForegroundColor@^{__CFString=}$kCMTextFormatDescriptionStyle_Height@^{__CFString=}$kCMTextFormatDescriptionStyle_StartChar@^{__CFString=}$kCMTextMarkupAlignmentType_End@^{__CFString=}$kCMTextMarkupAlignmentType_Left@^{__CFString=}$kCMTextMarkupAlignmentType_Middle@^{__CFString=}$kCMTextMarkupAlignmentType_Right@^{__CFString=}$kCMTextMarkupAlignmentType_Start@^{__CFString=}$kCMTextMarkupAttribute_Alignment@^{__CFString=}$kCMTextMarkupAttribute_BackgroundColorARGB@^{__CFString=}$kCMTextMarkupAttribute_BaseFontSizePercentageRelativeToVideoHeight@^{__CFString=}$kCMTextMarkupAttribute_BoldStyle@^{__CFString=}$kCMTextMarkupAttribute_CharacterBackgroundColorARGB@^{__CFString=}$kCMTextMarkupAttribute_CharacterEdgeStyle@^{__CFString=}$kCMTextMarkupAttribute_FontFamilyName@^{__CFString=}$kCMTextMarkupAttribute_ForegroundColorARGB@^{__CFString=}$kCMTextMarkupAttribute_GenericFontFamilyName@^{__CFString=}$kCMTextMarkupAttribute_ItalicStyle@^{__CFString=}$kCMTextMarkupAttribute_OrthogonalLinePositionPercentageRelativeToWritingDirection@^{__CFString=}$kCMTextMarkupAttribute_RelativeFontSize@^{__CFString=}$kCMTextMarkupAttribute_TextPositionPercentageRelativeToWritingDirection@^{__CFString=}$kCMTextMarkupAttribute_UnderlineStyle@^{__CFString=}$kCMTextMarkupAttribute_VerticalLayout@^{__CFString=}$kCMTextMarkupAttribute_WritingDirectionSizePercentage@^{__CFString=}$kCMTextMarkupCharacterEdgeStyle_Depressed@^{__CFString=}$kCMTextMarkupCharacterEdgeStyle_DropShadow@^{__CFString=}$kCMTextMarkupCharacterEdgeStyle_None@^{__CFString=}$kCMTextMarkupCharacterEdgeStyle_Raised@^{__CFString=}$kCMTextMarkupCharacterEdgeStyle_Uniform@^{__CFString=}$kCMTextMarkupGenericFontName_Casual@^{__CFString=}$kCMTextMarkupGenericFontName_Cursive@^{__CFString=}$kCMTextMarkupGenericFontName_Default@^{__CFString=}$kCMTextMarkupGenericFontName_Fantasy@^{__CFString=}$kCMTextMarkupGenericFontName_Monospace@^{__CFString=}$kCMTextMarkupGenericFontName_MonospaceSansSerif@^{__CFString=}$kCMTextMarkupGenericFontName_MonospaceSerif@^{__CFString=}$kCMTextMarkupGenericFontName_ProportionalSansSerif@^{__CFString=}$kCMTextMarkupGenericFontName_ProportionalSerif@^{__CFString=}$kCMTextMarkupGenericFontName_SansSerif@^{__CFString=}$kCMTextMarkupGenericFontName_Serif@^{__CFString=}$kCMTextMarkupGenericFontName_SmallCapital@^{__CFString=}$kCMTextVerticalLayout_LeftToRight@^{__CFString=}$kCMTextVerticalLayout_RightToLeft@^{__CFString=}$kCMTimeCodeFormatDescriptionExtension_SourceReferenceName@^{__CFString=}$kCMTimeCodeFormatDescriptionKey_LangCode@^{__CFString=}$kCMTimeCodeFormatDescriptionKey_Value@^{__CFString=}$kCMTimeEpochKey@^{__CFString=}$kCMTimeFlagsKey@^{__CFString=}$kCMTimeIndefinite@{_CMTime=qiIq}$kCMTimeInvalid@{_CMTime=qiIq}$kCMTimeMappingInvalid@{_CMTimeMapping={_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}}$kCMTimeMappingSourceKey@^{__CFString=}$kCMTimeMappingTargetKey@^{__CFString=}$kCMTimeNegativeInfinity@{_CMTime=qiIq}$kCMTimePositiveInfinity@{_CMTime=qiIq}$kCMTimeRangeDurationKey@^{__CFString=}$kCMTimeRangeInvalid@{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}$kCMTimeRangeStartKey@^{__CFString=}$kCMTimeRangeZero@{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}$kCMTimeScaleKey@^{__CFString=}$kCMTimeValueKey@^{__CFString=}$kCMTimeZero@{_CMTime=qiIq}$kCMTimebaseNotificationKey_EventTime@^{__CFString=}$kCMTimebaseNotification_EffectiveRateChanged@^{__CFString=}$kCMTimebaseNotification_TimeJumped@^{__CFString=}$kCMTimingInfoInvalid@{_CMSampleTimingInfo={_CMTime=qiIq}{_CMTime=qiIq}{_CMTime=qiIq}}$''' -enums = '''$COREMEDIA_DECLARE_BRIDGED_TYPES@1$COREMEDIA_DECLARE_NULLABILITY@1$COREMEDIA_DECLARE_NULLABILITY_BEGIN_END@1$COREMEDIA_DECLARE_RETURNS_NOT_RETAINED_ON_PARAMETERS@1$COREMEDIA_DECLARE_RETURNS_RETAINED@1$COREMEDIA_DECLARE_RETURNS_RETAINED_ON_PARAMETERS@1$COREMEDIA_USE_ALIGNED_CMBASECLASS_VERSION@1$kCMAttachmentMode_ShouldNotPropagate@0$kCMAttachmentMode_ShouldPropagate@1$kCMAudioCodecType_AAC_AudibleProtected@1633771875$kCMAudioCodecType_AAC_LCProtected@1885430115$kCMAudioFormatDescriptionMask_All@15$kCMAudioFormatDescriptionMask_ChannelLayout@4$kCMAudioFormatDescriptionMask_Extensions@8$kCMAudioFormatDescriptionMask_MagicCookie@2$kCMAudioFormatDescriptionMask_StreamBasicDescription@1$kCMBlockBufferAlwaysCopyDataFlag@2$kCMBlockBufferAssureMemoryNowFlag@1$kCMBlockBufferBadCustomBlockSourceErr@-12702$kCMBlockBufferBadLengthParameterErr@-12704$kCMBlockBufferBadOffsetParameterErr@-12703$kCMBlockBufferBadPointerParameterErr@-12705$kCMBlockBufferBlockAllocationFailedErr@-12701$kCMBlockBufferCustomBlockSourceVersion@0$kCMBlockBufferDontOptimizeDepthFlag@4$kCMBlockBufferEmptyBBufErr@-12706$kCMBlockBufferInsufficientSpaceErr@-12708$kCMBlockBufferNoErr@0$kCMBlockBufferPermitEmptyReferenceFlag@8$kCMBlockBufferStructureAllocationFailedErr@-12700$kCMBlockBufferUnallocatedBlockErr@-12707$kCMBufferQueueError_AllocationFailed@-12760$kCMBufferQueueError_BadTriggerDuration@-12765$kCMBufferQueueError_CannotModifyQueueFromTriggerCallback@-12766$kCMBufferQueueError_EnqueueAfterEndOfData@-12763$kCMBufferQueueError_InvalidBuffer@-12769$kCMBufferQueueError_InvalidCMBufferCallbacksStruct@-12762$kCMBufferQueueError_InvalidTriggerCondition@-12767$kCMBufferQueueError_InvalidTriggerToken@-12768$kCMBufferQueueError_QueueIsFull@-12764$kCMBufferQueueError_RequiredParameterMissing@-12761$kCMBufferQueueTrigger_WhenBufferCountBecomesGreaterThan@11$kCMBufferQueueTrigger_WhenBufferCountBecomesLessThan@10$kCMBufferQueueTrigger_WhenDataBecomesReady@7$kCMBufferQueueTrigger_WhenDurationBecomesGreaterThan@3$kCMBufferQueueTrigger_WhenDurationBecomesGreaterThanOrEqualTo@4$kCMBufferQueueTrigger_WhenDurationBecomesGreaterThanOrEqualToAndBufferCountBecomesGreaterThan@12$kCMBufferQueueTrigger_WhenDurationBecomesLessThan@1$kCMBufferQueueTrigger_WhenDurationBecomesLessThanOrEqualTo@2$kCMBufferQueueTrigger_WhenEndOfDataReached@8$kCMBufferQueueTrigger_WhenMaxPresentationTimeStampChanges@6$kCMBufferQueueTrigger_WhenMinPresentationTimeStampChanges@5$kCMBufferQueueTrigger_WhenReset@9$kCMClockError_AllocationFailed@-12747$kCMClockError_InvalidParameter@-12746$kCMClockError_MissingRequiredParameter@-12745$kCMClockError_UnsupportedOperation@-12756$kCMClosedCaptionFormatType_ATSC@1635017571$kCMClosedCaptionFormatType_CEA608@1664495672$kCMClosedCaptionFormatType_CEA708@1664561208$kCMFormatDescriptionBridgeError_AllocationFailed@-12713$kCMFormatDescriptionBridgeError_IncompatibleFormatDescription@-12716$kCMFormatDescriptionBridgeError_InvalidFormatDescription@-12715$kCMFormatDescriptionBridgeError_InvalidParameter@-12712$kCMFormatDescriptionBridgeError_InvalidSerializedSampleDescription@-12714$kCMFormatDescriptionBridgeError_InvalidSlice@-12719$kCMFormatDescriptionBridgeError_UnsupportedSampleDescriptionFlavor@-12717$kCMFormatDescriptionError_AllocationFailed@-12711$kCMFormatDescriptionError_InvalidParameter@-12710$kCMFormatDescriptionError_ValueNotAvailable@-12718$kCMMPEG2VideoProfile_HDV_1080i50@1751414323$kCMMPEG2VideoProfile_HDV_1080i60@1751414322$kCMMPEG2VideoProfile_HDV_1080p24@1751414326$kCMMPEG2VideoProfile_HDV_1080p25@1751414327$kCMMPEG2VideoProfile_HDV_1080p30@1751414328$kCMMPEG2VideoProfile_HDV_720p24@1751414324$kCMMPEG2VideoProfile_HDV_720p25@1751414325$kCMMPEG2VideoProfile_HDV_720p30@1751414321$kCMMPEG2VideoProfile_HDV_720p50@1751414369$kCMMPEG2VideoProfile_HDV_720p60@1751414329$kCMMPEG2VideoProfile_XDCAM_EX_1080i50_VBR35@2019849827$kCMMPEG2VideoProfile_XDCAM_EX_1080i60_VBR35@2019849826$kCMMPEG2VideoProfile_XDCAM_EX_1080p24_VBR35@2019849828$kCMMPEG2VideoProfile_XDCAM_EX_1080p25_VBR35@2019849829$kCMMPEG2VideoProfile_XDCAM_EX_1080p30_VBR35@2019849830$kCMMPEG2VideoProfile_XDCAM_EX_720p24_VBR35@2019849780$kCMMPEG2VideoProfile_XDCAM_EX_720p25_VBR35@2019849781$kCMMPEG2VideoProfile_XDCAM_EX_720p30_VBR35@2019849777$kCMMPEG2VideoProfile_XDCAM_EX_720p50_VBR35@2019849825$kCMMPEG2VideoProfile_XDCAM_EX_720p60_VBR35@2019849785$kCMMPEG2VideoProfile_XDCAM_HD422_1080i50_CBR50@2019833187$kCMMPEG2VideoProfile_XDCAM_HD422_1080i60_CBR50@2019833186$kCMMPEG2VideoProfile_XDCAM_HD422_1080p24_CBR50@2019833188$kCMMPEG2VideoProfile_XDCAM_HD422_1080p25_CBR50@2019833189$kCMMPEG2VideoProfile_XDCAM_HD422_1080p30_CBR50@2019833190$kCMMPEG2VideoProfile_XDCAM_HD422_540p@2019846194$kCMMPEG2VideoProfile_XDCAM_HD422_720p24_CBR50@2019833140$kCMMPEG2VideoProfile_XDCAM_HD422_720p25_CBR50@2019833141$kCMMPEG2VideoProfile_XDCAM_HD422_720p30_CBR50@2019833137$kCMMPEG2VideoProfile_XDCAM_HD422_720p50_CBR50@2019833185$kCMMPEG2VideoProfile_XDCAM_HD422_720p60_CBR50@2019833145$kCMMPEG2VideoProfile_XDCAM_HD_1080i50_VBR35@2019849779$kCMMPEG2VideoProfile_XDCAM_HD_1080i60_VBR35@2019849778$kCMMPEG2VideoProfile_XDCAM_HD_1080p24_VBR35@2019849782$kCMMPEG2VideoProfile_XDCAM_HD_1080p25_VBR35@2019849783$kCMMPEG2VideoProfile_XDCAM_HD_1080p30_VBR35@2019849784$kCMMPEG2VideoProfile_XDCAM_HD_540p@2019846244$kCMMPEG2VideoProfile_XF@2019981873$kCMMediaType_Audio@1936684398$kCMMediaType_ClosedCaption@1668047728$kCMMediaType_Metadata@1835365473$kCMMediaType_Muxed@1836415096$kCMMediaType_Subtitle@1935832172$kCMMediaType_Text@1952807028$kCMMediaType_TimeCode@1953325924$kCMMediaType_Video@1986618469$kCMMetadataDataTypeRegistryError_AllocationFailed@-16310$kCMMetadataDataTypeRegistryError_BadDataTypeIdentifier@-16312$kCMMetadataDataTypeRegistryError_DataTypeAlreadyRegistered@-16313$kCMMetadataDataTypeRegistryError_MultipleConformingBaseTypes@-16315$kCMMetadataDataTypeRegistryError_RequiredParameterMissing@-16311$kCMMetadataDataTypeRegistryError_RequiresConformingBaseType@-16314$kCMMetadataFormatType_Boxed@1835360888$kCMMetadataFormatType_EMSG@1701671783$kCMMetadataFormatType_ICY@1768126752$kCMMetadataFormatType_ID3@1768174368$kCMMetadataIdentifierError_AllocationFailed@-16300$kCMMetadataIdentifierError_BadIdentifier@-16307$kCMMetadataIdentifierError_BadKey@-16302$kCMMetadataIdentifierError_BadKeyLength@-16303$kCMMetadataIdentifierError_BadKeySpace@-16306$kCMMetadataIdentifierError_BadKeyType@-16304$kCMMetadataIdentifierError_BadNumberKey@-16305$kCMMetadataIdentifierError_NoKeyValueAvailable@-16308$kCMMetadataIdentifierError_RequiredParameterMissing@-16301$kCMMuxedStreamType_DV@1685463072$kCMMuxedStreamType_MPEG1System@1836069235$kCMMuxedStreamType_MPEG2Program@1836069488$kCMMuxedStreamType_MPEG2Transport@1836069492$kCMPersistentTrackID_Invalid@0$kCMPixelFormat_16BE555@16$kCMPixelFormat_16BE565@1110783541$kCMPixelFormat_16LE555@1278555445$kCMPixelFormat_16LE5551@892679473$kCMPixelFormat_16LE565@1278555701$kCMPixelFormat_24RGB@24$kCMPixelFormat_32ARGB@32$kCMPixelFormat_32BGRA@1111970369$kCMPixelFormat_422YpCbCr10@1983000880$kCMPixelFormat_422YpCbCr16@1983000886$kCMPixelFormat_422YpCbCr8@846624121$kCMPixelFormat_422YpCbCr8_yuvs@2037741171$kCMPixelFormat_4444YpCbCrA8@1983131704$kCMPixelFormat_444YpCbCr10@1983131952$kCMPixelFormat_444YpCbCr8@1983066168$kCMPixelFormat_8IndexedGray_WhiteIsZero@40$kCMSampleBufferError_AllocationFailed@-12730$kCMSampleBufferError_AlreadyHasDataBuffer@-12732$kCMSampleBufferError_ArrayTooSmall@-12737$kCMSampleBufferError_BufferHasNoSampleSizes@-12735$kCMSampleBufferError_BufferHasNoSampleTimingInfo@-12736$kCMSampleBufferError_BufferNotReady@-12733$kCMSampleBufferError_CannotSubdivide@-12739$kCMSampleBufferError_DataCanceled@-16751$kCMSampleBufferError_DataFailed@-16750$kCMSampleBufferError_InvalidEntryCount@-12738$kCMSampleBufferError_InvalidMediaFormat@-12743$kCMSampleBufferError_InvalidMediaTypeForOperation@-12741$kCMSampleBufferError_InvalidSampleData@-12742$kCMSampleBufferError_Invalidated@-12744$kCMSampleBufferError_RequiredParameterMissing@-12731$kCMSampleBufferError_SampleIndexOutOfRange@-12734$kCMSampleBufferError_SampleTimingInfoInvalid@-12740$kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment@1$kCMSimpleQueueError_AllocationFailed@-12770$kCMSimpleQueueError_ParameterOutOfRange@-12772$kCMSimpleQueueError_QueueIsFull@-12773$kCMSimpleQueueError_RequiredParameterMissing@-12771$kCMSubtitleFormatType_3GText@1954034535$kCMSubtitleFormatType_WebVTT@2004251764$kCMSyncError_AllocationFailed@-12754$kCMSyncError_InvalidParameter@-12753$kCMSyncError_MissingRequiredParameter@-12752$kCMSyncError_RateMustBeNonZero@-12755$kCMTextDisplayFlag_allSubtitlesForced@2147483648$kCMTextDisplayFlag_continuousKaraoke@2048$kCMTextDisplayFlag_fillTextRegion@262144$kCMTextDisplayFlag_forcedSubtitlesPresent@1073741824$kCMTextDisplayFlag_obeySubtitleFormatting@536870912$kCMTextDisplayFlag_scrollDirectionMask@384$kCMTextDisplayFlag_scrollDirection_bottomToTop@0$kCMTextDisplayFlag_scrollDirection_leftToRight@384$kCMTextDisplayFlag_scrollDirection_rightToLeft@128$kCMTextDisplayFlag_scrollDirection_topToBottom@256$kCMTextDisplayFlag_scrollIn@32$kCMTextDisplayFlag_scrollOut@64$kCMTextDisplayFlag_writeTextVertically@131072$kCMTextFormatType_3GText@1954034535$kCMTextFormatType_QTText@1952807028$kCMTextJustification_bottom_right@-1$kCMTextJustification_centered@1$kCMTextJustification_left_top@0$kCMTimeCodeFlag_24HourMax@2$kCMTimeCodeFlag_DropFrame@1$kCMTimeCodeFlag_NegTimesOK@4$kCMTimeCodeFormatType_Counter32@1668166450$kCMTimeCodeFormatType_Counter64@1668167220$kCMTimeCodeFormatType_TimeCode32@1953325924$kCMTimeCodeFormatType_TimeCode64@1952658996$kCMTimeFlags_HasBeenRounded@2$kCMTimeFlags_ImpliedValueFlagsMask@28$kCMTimeFlags_Indefinite@16$kCMTimeFlags_NegativeInfinity@8$kCMTimeFlags_PositiveInfinity@4$kCMTimeFlags_Valid@1$kCMTimeMaxTimescale@2147483647$kCMTimeRoundingMethod_Default@1$kCMTimeRoundingMethod_QuickTime@4$kCMTimeRoundingMethod_RoundAwayFromZero@3$kCMTimeRoundingMethod_RoundHalfAwayFromZero@1$kCMTimeRoundingMethod_RoundTowardNegativeInfinity@6$kCMTimeRoundingMethod_RoundTowardPositiveInfinity@5$kCMTimeRoundingMethod_RoundTowardZero@2$kCMTimebaseError_AllocationFailed@-12750$kCMTimebaseError_InvalidParameter@-12749$kCMTimebaseError_MissingRequiredParameter@-12748$kCMTimebaseError_ReadOnly@-12757$kCMTimebaseError_TimerIntervalTooShort@-12751$kCMTimebaseVeryLongCFTimeInterval@8073216000.0$kCMVideoCodecType_422YpCbCr8@846624121$kCMVideoCodecType_Animation@1919706400$kCMVideoCodecType_AppleProRes422@1634755438$kCMVideoCodecType_AppleProRes422HQ@1634755432$kCMVideoCodecType_AppleProRes422LT@1634755443$kCMVideoCodecType_AppleProRes422Proxy@1634755439$kCMVideoCodecType_AppleProRes4444@1634743400$kCMVideoCodecType_AppleProRes4444XQ@1634743416$kCMVideoCodecType_AppleProResRAW@1634759278$kCMVideoCodecType_AppleProResRAWHQ@1634759272$kCMVideoCodecType_Cinepak@1668704612$kCMVideoCodecType_DVCNTSC@1685480224$kCMVideoCodecType_DVCPAL@1685480304$kCMVideoCodecType_DVCPROHD1080i50@1685481525$kCMVideoCodecType_DVCPROHD1080i60@1685481526$kCMVideoCodecType_DVCPROHD1080p25@1685481522$kCMVideoCodecType_DVCPROHD1080p30@1685481523$kCMVideoCodecType_DVCPROHD720p50@1685481585$kCMVideoCodecType_DVCPROHD720p60@1685481584$kCMVideoCodecType_DVCPro50NTSC@1685468526$kCMVideoCodecType_DVCPro50PAL@1685468528$kCMVideoCodecType_DVCProPAL@1685483632$kCMVideoCodecType_H263@1748121139$kCMVideoCodecType_H264@1635148593$kCMVideoCodecType_HEVC@1752589105$kCMVideoCodecType_JPEG@1785750887$kCMVideoCodecType_JPEG_OpenDML@1684890161$kCMVideoCodecType_MPEG1Video@1836069238$kCMVideoCodecType_MPEG2Video@1836069494$kCMVideoCodecType_MPEG4Video@1836070006$kCMVideoCodecType_SorensonVideo@1398165809$kCMVideoCodecType_SorensonVideo3@1398165811$''' -misc.update({}) -functions={'CMBlockBufferCreateEmpty': (sel32or64(b'l^{__CFAllocator=}II^^{OpaqueCMBlockBuffer=}', b'i^{__CFAllocator=}II^^{OpaqueCMBlockBuffer=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMTimebaseCreateWithMasterTimebase': (sel32or64(b'l^{__CFAllocator=}^{OpaqueCMTimebase=}^^{OpaqueCMTimebase=}', b'i^{__CFAllocator=}^{OpaqueCMTimebase=}^^{OpaqueCMTimebase=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMBufferQueueMarkEndOfData': (sel32or64(b'l^{opaqueCMBufferQueue=}', b'i^{opaqueCMBufferQueue=}'),), 'CMFormatDescriptionCreate': (sel32or64(b'l^{__CFAllocator=}LL^{__CFDictionary=}^^{opaqueCMFormatDescription=}', b'i^{__CFAllocator=}II^{__CFDictionary=}^^{opaqueCMFormatDescription=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {4: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMBufferQueueIsEmpty': (b'Z^{opaqueCMBufferQueue=}',), 'CMMetadataFormatDescriptionCreateFromBigEndianMetadataDescriptionBlockBuffer': (sel32or64(b'l^{__CFAllocator=}^{OpaqueCMBlockBuffer=}^{__CFString=}^^{opaqueCMFormatDescription=}', b'i^{__CFAllocator=}^{OpaqueCMBlockBuffer=}^{__CFString=}^^{opaqueCMFormatDescription=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMAudioFormatDescriptionGetStreamBasicDescription': (sel32or64(b'^{AudioStreamBasicDescription=dLLLLLLLL}^{opaqueCMFormatDescription=}', b'^{AudioStreamBasicDescription=dIIIIIIII}^{opaqueCMFormatDescription=}'),), 'CMTimeMappingMakeFromDictionary': (b'{_CMTimeMapping={_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}}^{__CFDictionary=}',), 'CMBufferQueueEnqueue': (sel32or64(b'l^{opaqueCMBufferQueue=}@', b'i^{opaqueCMBufferQueue=}@'),), 'CMBufferQueueInstallTrigger': (sel32or64(b'l^{opaqueCMBufferQueue=}^?^vi{_CMTime=qiIq}^^{opaqueCMBufferQueueTriggerToken=}', b'i^{opaqueCMBufferQueue=}^?^vi{_CMTime=qiIq}^^{opaqueCMBufferQueueTriggerToken=}'), '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'^{opaqueCMBufferQueueTriggerToken=}'}}}}, 5: {'type_modifier': 'o'}}}), 'CMTimebaseGetMasterClock': (b'^{OpaqueCMClock=}^{OpaqueCMTimebase=}',), 'CMTextFormatDescriptionGetDefaultStyle': (sel32or64(b'l^{opaqueCMFormatDescription=}^S^Z^Z^Z^f^f', b'i^{opaqueCMFormatDescription=}^S^Z^Z^Z^d^d'), '', {'arguments': {1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}, 3: {'type_modifier': 'o'}, 4: {'type_modifier': 'o'}, 5: {'type_modifier': 'o'}, 6: {'c_array_of_fixed_length': 4, 'type_modifier': 'o'}}}), 'CMSampleBufferGetSampleTimingInfo': (sel32or64(b'l^{opaqueCMSampleBuffer=}l^{_CMSampleTimingInfo={_CMTime=qiIq}{_CMTime=qiIq}{_CMTime=qiIq}}', b'i^{opaqueCMSampleBuffer=}q^{_CMSampleTimingInfo={_CMTime=qiIq}{_CMTime=qiIq}{_CMTime=qiIq}}'), '', {'arguments': {2: {'type_modifier': 'o'}}}), 'CMBufferQueueGetMaxPresentationTimeStamp': (b'{_CMTime=qiIq}^{opaqueCMBufferQueue=}',), 'CMTimebaseCopyMasterClock': (b'^{OpaqueCMClock=}^{OpaqueCMTimebase=}', '', {'retval': {'already_cfretained': True}}), 'CMClockMightDrift': (b'Z^{OpaqueCMClock=}^{OpaqueCMClock=}',), 'CMMetadataCreateIdentifierForKeyAndKeySpace': (sel32or64(b'l^{__CFAllocator=}@^{__CFString=}^^{__CFString=}', b'i^{__CFAllocator=}@^{__CFString=}^^{__CFString=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMSetAttachments': (b'v@^{__CFDictionary=}I',), 'CMTimebaseGetTimeAndRate': (sel32or64(b'l^{OpaqueCMTimebase=}^{_CMTime=qiIq}^d', b'i^{OpaqueCMTimebase=}^{_CMTime=qiIq}^d'),), 'CMMetadataDataTypeRegistryDataTypeConformsToDataType': (b'Z^{__CFString=}^{__CFString=}',), 'CMVideoFormatDescriptionGetDimensions': (b'{_CMVideoDimensions=ii}^{opaqueCMFormatDescription=}',), 'CMMemoryPoolInvalidate': (b'v^{OpaqueCMMemoryPool=}',), 'CMBufferQueueRemoveTrigger': (sel32or64(b'l^{opaqueCMBufferQueue=}^{opaqueCMBufferQueueTriggerToken=}', b'i^{opaqueCMBufferQueue=}^{opaqueCMBufferQueueTriggerToken=}'),), 'CMSampleBufferCreate': (sel32or64(b'l^{__CFAllocator=}^{OpaqueCMBlockBuffer=}Z^?^v^{opaqueCMFormatDescription=}ll^{_CMSampleTimingInfo={_CMTime=qiIq}{_CMTime=qiIq}{_CMTime=qiIq}}l^L^^{opaqueCMSampleBuffer=}', b'i^{__CFAllocator=}^{OpaqueCMBlockBuffer=}Z^?^v^{opaqueCMFormatDescription=}qq^{_CMSampleTimingInfo={_CMTime=qiIq}{_CMTime=qiIq}{_CMTime=qiIq}}q^Q^^{opaqueCMSampleBuffer=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {8: {'c_array_length_in_arg': 7, 'type_modifier': 'n'}, 11: {'already_cfretained': True, 'type_modifier': 'o'}, 10: {'c_array_length_in_arg': 9, 'type_modifier': 'n'}, 3: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': b'^{opaqueCMSampleBuffer=}'}, 1: {'type': b'^v'}}}}}}), 'CMTimeCodeFormatDescriptionGetFrameQuanta': (b'I^{opaqueCMFormatDescription=}',), 'CMTimeCodeFormatDescriptionCopyAsBigEndianTimeCodeDescriptionBlockBuffer': (sel32or64(b'l^{__CFAllocator=}^{opaqueCMFormatDescription=}^{__CFString=}^^{OpaqueCMBlockBuffer=}', b'i^{__CFAllocator=}^{opaqueCMFormatDescription=}^{__CFString=}^^{OpaqueCMBlockBuffer=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMSampleBufferSetDataBufferFromAudioBufferList': (sel32or64(b'l^{opaqueCMSampleBuffer=}^{__CFAllocator=}^{__CFAllocator=}I^{AudioBufferList=L[1{AudioBuffer=LL^v}]}', b'i^{opaqueCMSampleBuffer=}^{__CFAllocator=}^{__CFAllocator=}I^{AudioBufferList=I[1{AudioBuffer=II^v}]}'),), 'CMSwapBigEndianTextDescriptionToHost': (sel32or64(b'l^CL', b'i^CQ'), '', {'arguments': {0: {'c_array_length_in_arg': 1, 'type_modifier': 'N'}}}), 'CMBufferQueueGetTotalSize': (sel32or64(b'L^{opaqueCMBufferQueue=}', b'Q^{opaqueCMBufferQueue=}'),), 'CMTimebaseGetTimeWithTimeScale': (b'{_CMTime=qiIq}^{OpaqueCMTimebase=}iI',), 'CMTimebaseCreateWithMasterClock': (sel32or64(b'l^{__CFAllocator=}^{OpaqueCMClock=}^^{OpaqueCMTimebase=}', b'i^{__CFAllocator=}^{OpaqueCMClock=}^^{OpaqueCMTimebase=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMSwapHostEndianTextDescriptionToBig': (sel32or64(b'l^CL', b'i^CQ'), '', {'arguments': {0: {'c_array_length_in_arg': 1, 'type_modifier': 'N'}}}), 'CMSyncGetRelativeRateAndAnchorTime': (sel32or64(b'l@@^d^{_CMTime=qiIq}^{_CMTime=qiIq}', b'i@@^d^{_CMTime=qiIq}^{_CMTime=qiIq}'), '', {'arguments': {2: {'type_modifier': 'o'}, 3: {'type_modifier': 'o'}, 4: {'type_modifier': 'o'}}}), 'CMTimeRangeFromTimeToTime': (b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}{_CMTime=qiIq}{_CMTime=qiIq}',), 'CMClockMakeHostTimeFromSystemUnits': (b'{_CMTime=qiIq}Q',), 'CMSwapHostEndianTimeCodeDescriptionToBig': (sel32or64(b'l^CL', b'i^CQ'), '', {'arguments': {0: {'c_array_length_in_arg': 1, 'type_modifier': 'N'}}}), 'CMAudioFormatDescriptionEqual': (b'Z^{opaqueCMFormatDescription=}^{opaqueCMFormatDescription=}I^I', '', {'arguments': {3: {'type_modifier': 'o'}}}), 'CMAudioSampleBufferCreateWithPacketDescriptionsAndMakeDataReadyHandler': (b'i^{__CFAllocator=}^{OpaqueCMBlockBuffer=}Z^{opaqueCMFormatDescription=}q{_CMTime=qiIq}^{AudioStreamPacketDescription=qII}^^{opaqueCMSampleBuffer=}@?', '', {'retval': {'already_cfretained': True}, 'arguments': {8: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': '^v'}, 1: {'type': '^{OpaqueCMSampleBuffer=}'}}}}, 6: {'c_array_length_in_arg': 5, 'type_modifier': 'n'}, 7: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMSampleBufferMakeDataReady': (sel32or64(b'l^{opaqueCMSampleBuffer=}', b'i^{opaqueCMSampleBuffer=}'),), 'CMClosedCaptionFormatDescriptionCreateFromBigEndianClosedCaptionDescriptionData': (sel32or64(b'l^{__CFAllocator=}^CL^{__CFString=}^^{opaqueCMFormatDescription=}', b'i^{__CFAllocator=}^CQ^{__CFString=}^^{opaqueCMFormatDescription=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}, 4: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMTimeSubtract': (b'{_CMTime=qiIq}{_CMTime=qiIq}{_CMTime=qiIq}',), 'CMSampleBufferGetSampleTimingInfoArray': (sel32or64(b'l^{opaqueCMSampleBuffer=}l^{_CMSampleTimingInfo={_CMTime=qiIq}{_CMTime=qiIq}{_CMTime=qiIq}}^l', b'i^{opaqueCMSampleBuffer=}q^{_CMSampleTimingInfo={_CMTime=qiIq}{_CMTime=qiIq}{_CMTime=qiIq}}^q'), '', {'arguments': {2: {'c_array_length_in_arg': 1, 'type_modifier': 'o'}, 3: {'type_modifier': 'o'}}}), 'CMSampleBufferGetSampleAttachmentsArray': (b'^{__CFArray=}^{opaqueCMSampleBuffer=}Z',), 'CMTimeAbsoluteValue': (b'{_CMTime=qiIq}{_CMTime=qiIq}',), 'CMBufferQueueTestTrigger': (b'Z^{opaqueCMBufferQueue=}^{opaqueCMBufferQueueTriggerToken=}',), 'CMFormatDescriptionGetMediaType': (sel32or64(b'L^{opaqueCMFormatDescription=}', b'I^{opaqueCMFormatDescription=}'),), 'CMTimebaseCopyMaster': (b'@^{OpaqueCMTimebase=}', '', {'retval': {'already_cfretained': True}}), 'CMSimpleQueueCreate': (sel32or64(b'l^{__CFAllocator=}i^^{opaqueCMSimpleQueue=}', b'i^{__CFAllocator=}i^^{opaqueCMSimpleQueue=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMMetadataFormatDescriptionGetKeyWithLocalID': (sel32or64(b'^{__CFDictionary=}^{opaqueCMFormatDescription=}L', b'^{__CFDictionary=}^{opaqueCMFormatDescription=}I'),), 'CMMetadataDataTypeRegistryRegisterDataType': (sel32or64(b'l^{__CFString=}^{__CFString=}^{__CFArray=}', b'i^{__CFString=}^{__CFString=}^{__CFArray=}'),), 'CMBufferQueueGetTypeID': (sel32or64(b'L', b'Q'),), 'CMBlockBufferGetDataPointer': (sel32or64(b'l^{OpaqueCMBlockBuffer=}L^L^L^^c', b'i^{OpaqueCMBlockBuffer=}Q^Q^Q^^c'),), 'CMSampleBufferGetImageBuffer': (b'^{__CVBuffer=}^{opaqueCMSampleBuffer=}',), 'CMBufferQueueCallForEachBuffer': (sel32or64(b'l^{opaqueCMBufferQueue=}^?^v', b'i^{opaqueCMBufferQueue=}^?^v'), '', {'arguments': {1: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': b'@'}, 1: {'type': b'^v'}}}}}}), 'CMMuxedFormatDescriptionCreate': (sel32or64(b'l^{__CFAllocator=}L^{__CFDictionary=}^^{opaqueCMFormatDescription=}', b'i^{__CFAllocator=}I^{__CFDictionary=}^^{opaqueCMFormatDescription=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMSampleBufferCreateForImageBufferWithMakeDataReadyHandler': (b'i^{__CFAllocator=}^{__CVBuffer=}Z^{opaqueCMFormatDescription=}^{_CMSampleTimingInfo={_CMTime=qiIq}{_CMTime=qiIq}{_CMTime=qiIq}}^^{opaqueCMSampleBuffer=}@?', '', {'retval': {'already_cfretained': True}, 'arguments': {5: {'already_cfretained': True, 'type_modifier': 'o'}, 6: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': '^v'}, 1: {'type': '^{OpaqueCMSampleBuffer=}'}}}}}}), 'CMSwapHostEndianSoundDescriptionToBig': (sel32or64(b'l^CL', b'i^CQ'), '', {'arguments': {0: {'c_array_length_in_arg': 1, 'type_modifier': 'N'}}}), 'CMSwapBigEndianSoundDescriptionToHost': (sel32or64(b'l^CL', b'i^CQ'), '', {'arguments': {0: {'c_array_length_in_arg': 1, 'type_modifier': 'N'}}}), 'CMTimebaseGetMaster': (b'@^{OpaqueCMTimebase=}',), 'CMSampleBufferTrackDataReadiness': (sel32or64(b'l^{opaqueCMSampleBuffer=}^{opaqueCMSampleBuffer=}', b'i^{opaqueCMSampleBuffer=}^{opaqueCMSampleBuffer=}'),), 'CMClockConvertHostTimeToSystemUnits': (b'Q{_CMTime=qiIq}',), 'CMBlockBufferFillDataBytes': (sel32or64(b'lc^{OpaqueCMBlockBuffer=}LL', b'ic^{OpaqueCMBlockBuffer=}QQ'),), 'CMBufferQueueGetBufferCount': (sel32or64(b'l^{opaqueCMBufferQueue=}', b'q^{opaqueCMBufferQueue=}'),), 'CMTimeMappingMake': (b'{_CMTimeMapping={_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}}{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}',), 'CMSampleBufferCopyPCMDataIntoAudioBufferList': (sel32or64(b'l^{opaqueCMSampleBuffer=}ii^{AudioBufferList=L[1{AudioBuffer=LL^v}]}', b'i^{opaqueCMSampleBuffer=}ii^{AudioBufferList=I[1{AudioBuffer=II^v}]}'), '', {'retval': {'already_cfretained': True}}), 'CMTimeMultiplyByRatio': (b'{_CMTime=qiIq}{_CMTime=qiIq}ii',), 'CMTextFormatDescriptionGetFontName': (sel32or64(b'l^{opaqueCMFormatDescription=}S^^{__CFString=}', b'i^{opaqueCMFormatDescription=}S^^{__CFString=}'), '', {'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMTimeRangeCopyDescription': (b'^{__CFString=}^{__CFAllocator=}{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}', '', {'retval': {'already_cfretained': True}}), 'CMBufferQueueGetFirstPresentationTimeStamp': (b'{_CMTime=qiIq}^{opaqueCMBufferQueue=}',), 'CMSimpleQueueGetHead': (b'@^{opaqueCMSimpleQueue=}',), 'CMMetadataDataTypeRegistryDataTypeIsBaseDataType': (b'Z^{__CFString=}',), 'CMMetadataFormatDescriptionCreateWithKeys': (sel32or64(b'l^{__CFAllocator=}L^{__CFArray=}^^{opaqueCMFormatDescription=}', b'i^{__CFAllocator=}I^{__CFArray=}^^{opaqueCMFormatDescription=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMAudioDeviceClockSetAudioDeviceUID': (sel32or64(b'l^{OpaqueCMClock=}^{__CFString=}', b'i^{OpaqueCMClock=}^{__CFString=}'),), 'CMSwapHostEndianMetadataDescriptionToBig': (sel32or64(b'l^CL', b'i^CQ'), '', {'arguments': {0: {'c_array_length_in_arg': 1, 'type_modifier': 'N'}}}), 'CMBlockBufferGetDataLength': (sel32or64(b'L^{OpaqueCMBlockBuffer=}', b'Q^{OpaqueCMBlockBuffer=}'),), 'CMSampleBufferGetNumSamples': (sel32or64(b'l^{opaqueCMSampleBuffer=}', b'q^{opaqueCMSampleBuffer=}'),), 'CMSwapBigEndianClosedCaptionDescriptionToHost': (sel32or64(b'l^CL', b'i^CQ'), '', {'arguments': {0: {'c_array_length_in_arg': 1, 'type_modifier': 'N'}}}), 'CMSampleBufferCopySampleBufferForRange': (sel32or64(b'l^{__CFAllocator=}^{opaqueCMSampleBuffer=}{_CFRange=ll}^^{opaqueCMSampleBuffer=}', b'i^{__CFAllocator=}^{opaqueCMSampleBuffer=}{_CFRange=qq}^^{opaqueCMSampleBuffer=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMGetAttachment': (b'@@^{__CFString=}^I', '', {'arguments': {2: {'type_modifier': 'o'}}}), 'CMSampleBufferGetDecodeTimeStamp': (b'{_CMTime=qiIq}^{opaqueCMSampleBuffer=}',), 'CMTimeCodeFormatDescriptionCreateFromBigEndianTimeCodeDescriptionBlockBuffer': (sel32or64(b'l^{__CFAllocator=}^{OpaqueCMBlockBuffer=}^{__CFString=}^^{opaqueCMFormatDescription=}', b'i^{__CFAllocator=}^{OpaqueCMBlockBuffer=}^{__CFString=}^^{opaqueCMFormatDescription=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMAudioFormatDescriptionCopyAsBigEndianSoundDescriptionBlockBuffer': (sel32or64(b'l^{__CFAllocator=}^{opaqueCMFormatDescription=}^{__CFString=}^^{OpaqueCMBlockBuffer=}', b'i^{__CFAllocator=}^{opaqueCMFormatDescription=}^{__CFString=}^^{OpaqueCMBlockBuffer=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMTextFormatDescriptionCreateFromBigEndianTextDescriptionBlockBuffer': (sel32or64(b'l^{__CFAllocator=}^{OpaqueCMBlockBuffer=}^{__CFString=}L^^{opaqueCMFormatDescription=}', b'i^{__CFAllocator=}^{OpaqueCMBlockBuffer=}^{__CFString=}I^^{opaqueCMFormatDescription=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {4: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMBufferQueueGetFirstDecodeTimeStamp': (b'{_CMTime=qiIq}^{opaqueCMBufferQueue=}',), 'CMSampleBufferCallForEachSample': (sel32or64(b'l^{opaqueCMSampleBuffer=}^?^v', b'i^{opaqueCMSampleBuffer=}^?^v'), '', {'arguments': {1: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': b'^{opaqueCMSampleBuffer=}'}, 1: {'type': b'q'}, 2: {'type': b'^v'}}}, 'callable_retained': False}}}), 'CMSwapHostEndianClosedCaptionDescriptionToBig': (sel32or64(b'l^CL', b'i^CQ'), '', {'arguments': {0: {'c_array_length_in_arg': 1, 'type_modifier': 'N'}}}), 'CMSampleBufferGetDuration': (b'{_CMTime=qiIq}^{opaqueCMSampleBuffer=}',), 'CMMetadataFormatDescriptionCreateWithMetadataSpecifications': (sel32or64(b'l^{__CFAllocator=}L^{__CFArray=}^^{opaqueCMFormatDescription=}', b'i^{__CFAllocator=}I^{__CFArray=}^^{opaqueCMFormatDescription=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMSwapHostEndianImageDescriptionToBig': (sel32or64(b'l^CL', b'i^CQ'), '', {'arguments': {0: {'c_array_length_in_arg': 1, 'type_modifier': 'N'}}}), 'CMSimpleQueueGetCapacity': (b'i^{opaqueCMSimpleQueue=}',), 'CMBufferQueueContainsEndOfData': (b'Z^{opaqueCMBufferQueue=}',), 'CMTimeRangeMake': (b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}{_CMTime=qiIq}{_CMTime=qiIq}',), 'CMTimeMappingShow': (b'v{_CMTimeMapping={_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}}',), 'CMMemoryPoolGetTypeID': (sel32or64(b'L', b'Q'),), 'CMVideoFormatDescriptionCreateFromBigEndianImageDescriptionData': (sel32or64(b'l^{__CFAllocator=}^CLL^{__CFString=}^^{opaqueCMFormatDescription=}', b'i^{__CFAllocator=}^CQI^{__CFString=}^^{opaqueCMFormatDescription=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}, 5: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMTimebaseGetMasterTimebase': (b'^{OpaqueCMTimebase=}^{OpaqueCMTimebase=}',), 'CMTimeMakeWithSeconds': (b'{_CMTime=qiIq}di',), 'CMVideoFormatDescriptionGetExtensionKeysCommonWithImageBuffers': (b'^{__CFArray=}',), 'CMClockGetTypeID': (sel32or64(b'L', b'Q'),), 'CMTextFormatDescriptionGetJustification': (sel32or64(b'l^{opaqueCMFormatDescription=}^c^c', b'i^{opaqueCMFormatDescription=}^c^c'), '', {'arguments': {1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}}}), 'CMSampleBufferHasDataFailed': (sel32or64(b'Z^{opaqueCMSampleBuffer=}^l', b'Z^{opaqueCMSampleBuffer=}^i'), '', {'arguments': {1: {'type_modifier': 'o'}}}), 'CMTimeMappingCopyAsDictionary': (b'^{__CFDictionary=}{_CMTimeMapping={_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}}^{__CFAllocator=}', '', {'retval': {'already_cfretained': True}}), 'CMFormatDescriptionGetMediaSubType': (sel32or64(b'L^{opaqueCMFormatDescription=}', b'I^{opaqueCMFormatDescription=}'),), 'CMSwapBigEndianMetadataDescriptionToHost': (sel32or64(b'l^CL', b'i^CQ'), '', {'arguments': {0: {'c_array_length_in_arg': 1, 'type_modifier': 'N'}}}), 'CMBlockBufferAppendMemoryBlock': (sel32or64(b'l^{OpaqueCMBlockBuffer=}^vL^{__CFAllocator=}^{_CMBlockBufferCustomBlockSource=I^?^?^v}LLI', b'i^{OpaqueCMBlockBuffer=}^vQ^{__CFAllocator=}^{_CMBlockBufferCustomBlockSource=I^?^?^v}QQI'), '', {'arguments': {1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}, 4: {'type_modifier': 'n'}}}), 'CMTimebaseRemoveTimer': (sel32or64(b'l^{OpaqueCMTimebase=}^{__CFRunLoopTimer=}', b'i^{OpaqueCMTimebase=}^{__CFRunLoopTimer=}'),), 'CMSimpleQueueEnqueue': (sel32or64(b'l^{opaqueCMSimpleQueue=}@', b'i^{opaqueCMSimpleQueue=}@'),), 'CMTimeFoldIntoRange': (b'{_CMTime=qiIq}{_CMTime=qiIq}{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}',), 'CMFormatDescriptionGetExtension': (b'@^{opaqueCMFormatDescription=}^{__CFString=}',), 'CMAudioFormatDescriptionGetRichestDecodableFormat': (sel32or64(b'^{AudioFormatListItem={AudioStreamBasicDescription=dLLLLLLLL}L}^{opaqueCMFormatDescription=}', b'^{AudioFormatListItem={AudioStreamBasicDescription=dIIIIIIII}I}^{opaqueCMFormatDescription=}'),), 'CMSyncGetRelativeRate': (b'd@@',), 'CMMetadataCreateKeySpaceFromIdentifier': (sel32or64(b'l^{__CFAllocator=}^{__CFString=}^^{__CFString=}', b'i^{__CFAllocator=}^{__CFString=}^^{__CFString=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMTimebaseCopyUltimateMasterClock': (b'^{OpaqueCMClock=}^{OpaqueCMTimebase=}', '', {'retval': {'already_cfretained': True}}), 'CMBufferQueueGetCallbacksForUnsortedSampleBuffers': (b'^{_CMBufferCallbacks=I^v^?^?^?^?^?^{__CFString=}^?}',), 'CMSampleBufferGetDataBuffer': (b'^{OpaqueCMBlockBuffer=}^{opaqueCMSampleBuffer=}',), 'CMSampleBufferInvalidate': (sel32or64(b'l^{opaqueCMSampleBuffer=}', b'i^{opaqueCMSampleBuffer=}'),), 'CMBufferQueueGetDuration': (b'{_CMTime=qiIq}^{opaqueCMBufferQueue=}',), 'CMMetadataFormatDescriptionCopyAsBigEndianMetadataDescriptionBlockBuffer': (sel32or64(b'l^{__CFAllocator=}^{opaqueCMFormatDescription=}^{__CFString=}^^{OpaqueCMBlockBuffer=}', b'i^{__CFAllocator=}^{opaqueCMFormatDescription=}^{__CFString=}^^{OpaqueCMBlockBuffer=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMSetAttachment': (b'v@^{__CFString=}@I',), 'CMBufferQueueDequeueAndRetain': (b'@^{opaqueCMBufferQueue=}', '', {'retval': {'already_cfretained': True}}), 'CMTimebaseGetUltimateMasterClock': (b'^{OpaqueCMClock=}^{OpaqueCMTimebase=}',), 'CMTimeCopyDescription': (b'^{__CFString=}^{__CFAllocator=}{_CMTime=qiIq}', '', {'retval': {'already_cfretained': True}}), 'CMTimebaseRemoveTimerDispatchSource': (sel32or64(b'l^{OpaqueCMTimebase=}@', b'i^{OpaqueCMTimebase=}@'),), 'CMCopyDictionaryOfAttachments': (b'^{__CFDictionary=}^{__CFAllocator=}@I', '', {'retval': {'already_cfretained': True}}), 'CMBufferQueueSetValidationCallback': (sel32or64(b'l^{opaqueCMBufferQueue=}^?^v', b'i^{opaqueCMBufferQueue=}^?^v'), '', {'arguments': {1: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': b'^{opaqueCMBufferQueue=}'}, 1: {'type': b'@'}, 2: {'type': b'^v'}}}}}}), 'CMTimeGetSeconds': (b'd{_CMTime=qiIq}',), 'CMSampleBufferGetSampleSize': (sel32or64(b'L^{opaqueCMSampleBuffer=}l', b'Q^{opaqueCMSampleBuffer=}q'),), 'CMBufferQueueGetEndPresentationTimeStamp': (b'{_CMTime=qiIq}^{opaqueCMBufferQueue=}',), 'CMSampleBufferGetTypeID': (sel32or64(b'L', b'Q'),), 'CMAudioDeviceClockGetAudioDevice': (sel32or64(b'l^{OpaqueCMClock=}^^{__CFString=}^L^Z', b'i^{OpaqueCMClock=}^^{__CFString=}^I^Z'), '', {'arguments': {1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}, 3: {'type_modifier': 'o'}}}), 'CMAudioDeviceClockSetAudioDeviceID': (sel32or64(b'l^{OpaqueCMClock=}L', b'i^{OpaqueCMClock=}I'),), 'CMTimeCodeFormatDescriptionGetFrameDuration': (b'{_CMTime=qiIq}^{opaqueCMFormatDescription=}',), 'CMTimebaseGetTime': (b'{_CMTime=qiIq}^{OpaqueCMTimebase=}',), 'CMSimpleQueueDequeue': (b'@^{opaqueCMSimpleQueue=}',), 'CMTimeRangeMakeFromDictionary': (b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}^{__CFDictionary=}',), 'CMTimebaseGetEffectiveRate': (b'd^{OpaqueCMTimebase=}',), 'CMVideoFormatDescriptionMatchesImageBuffer': (b'Z^{opaqueCMFormatDescription=}^{__CVBuffer=}',), 'CMSampleBufferCreateReadyWithImageBuffer': (sel32or64(b'l^{__CFAllocator=}^{__CVBuffer=}^{opaqueCMFormatDescription=}^{_CMSampleTimingInfo={_CMTime=qiIq}{_CMTime=qiIq}{_CMTime=qiIq}}^^{opaqueCMSampleBuffer=}', b'i^{__CFAllocator=}^{__CVBuffer=}^{opaqueCMFormatDescription=}^{_CMSampleTimingInfo={_CMTime=qiIq}{_CMTime=qiIq}{_CMTime=qiIq}}^^{opaqueCMSampleBuffer=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'type_modifier': 'n'}, 4: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMTimeMultiply': (b'{_CMTime=qiIq}{_CMTime=qiIq}i',), 'CMSampleBufferCreateCopy': (sel32or64(b'l^{__CFAllocator=}^{opaqueCMSampleBuffer=}^^{opaqueCMSampleBuffer=}', b'i^{__CFAllocator=}^{opaqueCMSampleBuffer=}^^{opaqueCMSampleBuffer=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMMemoryPoolCreate': (b'^{OpaqueCMMemoryPool=}^{__CFDictionary=}', '', {'retval': {'already_cfretained': True}}), 'CMSampleBufferGetAudioStreamPacketDescriptions': (sel32or64(b'l^{opaqueCMSampleBuffer=}L^{AudioStreamPacketDescription=qLL}^L', b'i^{opaqueCMSampleBuffer=}Q^{AudioStreamPacketDescription=qII}^Q'), '', {'arguments': {2: {'c_array_length_in_arg': 1, 'type_modifier': 'o'}, 3: {'type_modifier': 'o'}}}), 'CMMetadataCreateKeyFromIdentifier': (sel32or64(b'l^{__CFAllocator=}^{__CFString=}^@', b'i^{__CFAllocator=}^{__CFString=}^@'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMBufferQueueInstallTriggerWithIntegerThreshold': (sel32or64(b'l^{opaqueCMBufferQueue=}^?^vil^^{opaqueCMBufferQueueTriggerToken=}', b'i^{opaqueCMBufferQueue=}^?^viq^^{opaqueCMBufferQueueTriggerToken=}'), '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'^{opaqueCMBufferQueueTriggerToken=}'}}}}, 5: {'type_modifier': 'o'}}}), 'CMAudioClockCreate': (b'i^{__CFAllocator=}^^{OpaqueCMClock=}', '', {'retval': {'already_cfretained': True}}), 'CMMetadataFormatDescriptionCreateWithMetadataFormatDescriptionAndMetadataSpecifications': (sel32or64(b'l^{__CFAllocator=}^{opaqueCMFormatDescription=}^{__CFArray=}^^{opaqueCMFormatDescription=}', b'i^{__CFAllocator=}^{opaqueCMFormatDescription=}^{__CFArray=}^^{opaqueCMFormatDescription=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMMemoryPoolGetAllocator': (b'^{__CFAllocator=}^{OpaqueCMMemoryPool=}',), 'CMSampleBufferCallBlockForEachSample': (sel32or64(b'l^{opaqueCMSampleBuffer=}@?', b'i^{opaqueCMSampleBuffer=}@?'), '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}, 1: {'type': '^{opaqueCMSampleBuffer=}'}, 2: {'type': 'q'}}}, 'block': {'retval': {'type': b'i'}, 'arguments': {0: {'type': b'^{opaqueCMSampleBuffer=}'}, 1: {'type': b'q'}}}}}}), 'CMTimeRangeShow': (b'v{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}',), 'CMBlockBufferCreateContiguous': (sel32or64(b'l^{__CFAllocator=}^{OpaqueCMBlockBuffer=}^{__CFAllocator=}^{_CMBlockBufferCustomBlockSource=I^?^?^v}LLI^^{OpaqueCMBlockBuffer=}', b'i^{__CFAllocator=}^{OpaqueCMBlockBuffer=}^{__CFAllocator=}^{_CMBlockBufferCustomBlockSource=I^?^?^v}QQI^^{OpaqueCMBlockBuffer=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'type_modifier': 'n'}, 7: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMBufferQueueDequeueIfDataReadyAndRetain': (b'@^{opaqueCMBufferQueue=}', '', {'retval': {'already_cfretained': True}}), 'CMBlockBufferGetTypeID': (sel32or64(b'L', b'Q'),), 'CMTimeCodeFormatDescriptionCreateFromBigEndianTimeCodeDescriptionData': (sel32or64(b'l^{__CFAllocator=}^CL^{__CFString=}^^{opaqueCMFormatDescription=}', b'i^{__CFAllocator=}^CQ^{__CFString=}^^{opaqueCMFormatDescription=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}, 4: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMBlockBufferReplaceDataBytes': (sel32or64(b'l^v^{OpaqueCMBlockBuffer=}LL', b'i^v^{OpaqueCMBlockBuffer=}QQ'), '', {'arguments': {0: {'c_array_length_in_arg': 3, 'type_modifier': 'n'}}}), 'CMDoesBigEndianSoundDescriptionRequireLegacyCBRSampleTableLayout': (b'Z^{OpaqueCMBlockBuffer=}^{__CFString=}',), 'CMBufferQueueGetMinPresentationTimeStamp': (b'{_CMTime=qiIq}^{opaqueCMBufferQueue=}',), 'CMTimebaseSetRateAndAnchorTime': (sel32or64(b'l^{OpaqueCMTimebase=}d{_CMTime=qiIq}{_CMTime=qiIq}', b'i^{OpaqueCMTimebase=}d{_CMTime=qiIq}{_CMTime=qiIq}'),), 'CMTimebaseSetRate': (sel32or64(b'l^{OpaqueCMTimebase=}d', b'i^{OpaqueCMTimebase=}d'),), 'CMMetadataDataTypeRegistryGetBaseDataTypeForConformingDataType': (b'^{__CFString=}^{__CFString=}',), 'CMSimpleQueueGetCount': (b'i^{opaqueCMSimpleQueue=}',), 'CMSampleBufferSetDataBuffer': (sel32or64(b'l^{opaqueCMSampleBuffer=}^{OpaqueCMBlockBuffer=}', b'i^{opaqueCMSampleBuffer=}^{OpaqueCMBlockBuffer=}'),), 'CMBlockBufferIsEmpty': (b'Z^{OpaqueCMBlockBuffer=}',), 'CMSyncConvertTime': (b'{_CMTime=qiIq}{_CMTime=qiIq}@@',), 'CMSyncMightDrift': (b'Z@@',), 'CMTextFormatDescriptionCreateFromBigEndianTextDescriptionData': (sel32or64(b'l^{__CFAllocator=}^CL^{__CFString=}L^^{opaqueCMFormatDescription=}', b'i^{__CFAllocator=}^CQ^{__CFString=}I^^{opaqueCMFormatDescription=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}, 5: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMTimeCompare': (b'i{_CMTime=qiIq}{_CMTime=qiIq}',), 'CMAudioFormatDescriptionGetMagicCookie': (sel32or64(b'^v^{opaqueCMFormatDescription=}^L', b'^v^{opaqueCMFormatDescription=}^Q'), '', {'retval': {'c_array_length_in_arg': 1}, 'arguments': {1: {'type_modifier': 'o'}}}), 'CMSwapBigEndianImageDescriptionToHost': (sel32or64(b'l^CL', b'i^CQ'), '', {'arguments': {0: {'c_array_length_in_arg': 1, 'type_modifier': 'N'}}}), 'CMSampleBufferSetDataReady': (sel32or64(b'l^{opaqueCMSampleBuffer=}', b'i^{opaqueCMSampleBuffer=}'),), 'CMMetadataDataTypeRegistryGetDataTypeDescription': (b'^{__CFString=}^{__CFString=}',), 'CMFormatDescriptionGetExtensions': (b'^{__CFDictionary=}^{opaqueCMFormatDescription=}',), 'CMMetadataFormatDescriptionCreateFromBigEndianMetadataDescriptionData': (sel32or64(b'l^{__CFAllocator=}^CL^{__CFString=}^^{opaqueCMFormatDescription=}', b'i^{__CFAllocator=}^CQ^{__CFString=}^^{opaqueCMFormatDescription=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}, 4: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMSampleBufferDataIsReady': (b'Z^{opaqueCMSampleBuffer=}',), 'CMBlockBufferCreateWithMemoryBlock': (sel32or64(b'l^{__CFAllocator=}^vL^{__CFAllocator=}^{_CMBlockBufferCustomBlockSource=I^?^?^v}LLI^^{OpaqueCMBlockBuffer=}', b'i^{__CFAllocator=}^vQ^{__CFAllocator=}^{_CMBlockBufferCustomBlockSource=I^?^?^v}QQI^^{OpaqueCMBlockBuffer=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {8: {'already_cfretained': True, 'type_modifier': 'o'}, 1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}, 4: {'type_modifier': 'n'}}}), 'CMBufferQueueGetHead': (b'@^{opaqueCMBufferQueue=}',), 'CMBlockBufferAppendBufferReference': (sel32or64(b'l^{OpaqueCMBlockBuffer=}^{OpaqueCMBlockBuffer=}LLI', b'i^{OpaqueCMBlockBuffer=}^{OpaqueCMBlockBuffer=}QQI'),), 'CMTimeMakeFromDictionary': (b'{_CMTime=qiIq}^{__CFDictionary=}',), 'CMSampleBufferSetInvalidateHandler': (sel32or64(b'l^{opaqueCMSampleBuffer=}@?', b'i^{opaqueCMSampleBuffer=}@?'), '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}, 1: {'type': '^{opaqueCMSampleBuffer=}'}}}}}}), 'CMTimebaseSetTimerNextFireTime': (sel32or64(b'l^{OpaqueCMTimebase=}^{__CFRunLoopTimer=}{_CMTime=qiIq}I', b'i^{OpaqueCMTimebase=}^{__CFRunLoopTimer=}{_CMTime=qiIq}I'),), 'CMTimeMappingMakeEmpty': (b'{_CMTimeMapping={_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}}{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}',), 'CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer': (sel32or64(b'l^{opaqueCMSampleBuffer=}^L^{AudioBufferList=L[1{AudioBuffer=LL^v}]}L^{__CFAllocator=}^{__CFAllocator=}I^^{OpaqueCMBlockBuffer=}', b'i^{opaqueCMSampleBuffer=}^Q^{AudioBufferList=I[1{AudioBuffer=II^v}]}Q^{__CFAllocator=}^{__CFAllocator=}I^^{OpaqueCMBlockBuffer=}'), '', {'arguments': {1: {'type_modifier': 'o'}, 7: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMTimeConvertScale': (b'{_CMTime=qiIq}{_CMTime=qiIq}iI',), 'CMMetadataDataTypeRegistryGetBaseDataTypes': (b'^{__CFArray=}',), 'CMFormatDescriptionGetTypeID': (sel32or64(b'L', b'Q'),), 'CMVideoFormatDescriptionCreateFromBigEndianImageDescriptionBlockBuffer': (sel32or64(b'l^{__CFAllocator=}^{OpaqueCMBlockBuffer=}L^{__CFString=}^^{opaqueCMFormatDescription=}', b'i^{__CFAllocator=}^{OpaqueCMBlockBuffer=}I^{__CFString=}^^{opaqueCMFormatDescription=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {4: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMBufferQueueInstallTriggerHandler': (b'i@I{_CMTime=qiIq}^{opaqueCMBufferQueueTriggerToken=}@?', '', {'arguments': {3: {'type_modifier': 'o'}, 4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}, 1: {'type': '^{opaqueCMBufferQueueTriggerToken=}'}}}}}}), 'CMClockGetAnchorTime': (sel32or64(b'l^{OpaqueCMClock=}^{_CMTime=qiIq}^{_CMTime=qiIq}', b'i^{OpaqueCMClock=}^{_CMTime=qiIq}^{_CMTime=qiIq}'), '', {'arguments': {1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}}}), 'CMBlockBufferCopyDataBytes': (sel32or64(b'l^{OpaqueCMBlockBuffer=}LL^v', b'i^{OpaqueCMBlockBuffer=}QQ^v'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'c_array_length_in_arg': 2, 'type_modifier': 'o'}}}), 'CMSampleBufferSetOutputPresentationTimeStamp': (sel32or64(b'l^{opaqueCMSampleBuffer=}{_CMTime=qiIq}', b'i^{opaqueCMSampleBuffer=}{_CMTime=qiIq}'),), 'CMBlockBufferIsRangeContiguous': (sel32or64(b'Z^{OpaqueCMBlockBuffer=}LL', b'Z^{OpaqueCMBlockBuffer=}QQ'),), 'CMMetadataCreateKeyFromIdentifierAsCFData': (sel32or64(b'l^{__CFAllocator=}^{__CFString=}^^{__CFData=}', b'i^{__CFAllocator=}^{__CFString=}^^{__CFData=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMAudioDeviceClockCreateFromAudioDeviceID': (sel32or64(b'l^{__CFAllocator=}L^^{OpaqueCMClock=}', b'i^{__CFAllocator=}I^^{OpaqueCMClock=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMBufferQueueInstallTriggerHandlerWithIntegerThreshold': (b'i@Iq^{opaqueCMBufferQueueTriggerToken=}@?', '', {'arguments': {3: {'type_modifier': 'o'}, 4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}, 1: {'type': '^{opaqueCMBufferQueueTriggerToken=}'}}}}}}), 'CMTimeRangeEqual': (b'Z{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}',), 'CMTimeRangeGetIntersection': (b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}',), 'CMClockGetHostTimeClock': (b'^{OpaqueCMClock=}',), 'CMTimeMapTimeFromRangeToRange': (b'{_CMTime=qiIq}{_CMTime=qiIq}{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}',), 'CMBufferQueueReset': (sel32or64(b'l^{opaqueCMBufferQueue=}', b'i^{opaqueCMBufferQueue=}'),), 'CMTimeMapDurationFromRangeToRange': (b'{_CMTime=qiIq}{_CMTime=qiIq}{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}',), 'CMTextFormatDescriptionGetDefaultTextBox': (sel32or64(b'l^{opaqueCMFormatDescription=}Zf^{CGRect={CGPoint=ff}{CGSize=ff}}', b'i^{opaqueCMFormatDescription=}Zd^{CGRect={CGPoint=dd}{CGSize=dd}}'), '', {'arguments': {3: {'type_modifier': 'o'}}}), 'CMTimeRangeGetEnd': (b'{_CMTime=qiIq}{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}',), 'CMBufferQueueSetValidationHandler': (b'i@@?', '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}, 1: {'type': '@'}, 2: {'type': '@'}}}}}}), 'CMTimeAdd': (b'{_CMTime=qiIq}{_CMTime=qiIq}{_CMTime=qiIq}',), 'CMTimeRangeContainsTimeRange': (b'Z{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}',), 'CMSampleBufferSetDataFailed': (sel32or64(b'l^{opaqueCMSampleBuffer=}l', b'i^{opaqueCMSampleBuffer=}i'),), 'CMAudioFormatDescriptionGetMostCompatibleFormat': (sel32or64(b'^{AudioFormatListItem={AudioStreamBasicDescription=dLLLLLLLL}L}^{opaqueCMFormatDescription=}', b'^{AudioFormatListItem={AudioStreamBasicDescription=dIIIIIIII}I}^{opaqueCMFormatDescription=}'),), 'CMTimeClampToRange': (b'{_CMTime=qiIq}{_CMTime=qiIq}{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}',), 'CMFormatDescriptionEqualIgnoringExtensionKeys': (b'Z^{opaqueCMFormatDescription=}^{opaqueCMFormatDescription=}@@',), 'CMSampleBufferIsValid': (b'Z^{opaqueCMSampleBuffer=}',), 'CMAudioSampleBufferCreateWithPacketDescriptions': (sel32or64(b'l^{__CFAllocator=}^{OpaqueCMBlockBuffer=}Z^?^v^{opaqueCMFormatDescription=}l{_CMTime=qiIq}^{AudioStreamPacketDescription=qLL}^^{opaqueCMSampleBuffer=}', b'i^{__CFAllocator=}^{OpaqueCMBlockBuffer=}Z^?^v^{opaqueCMFormatDescription=}q{_CMTime=qiIq}^{AudioStreamPacketDescription=qII}^^{opaqueCMSampleBuffer=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {8: {'c_array_length_in_arg': 6, 'type_modifier': 'n'}, 9: {'already_cfretained': True, 'type_modifier': 'o'}, 3: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': b'^{opaqueCMSampleBuffer=}'}, 1: {'type': b'^v'}}}}}}), 'CMBufferQueueGetMinDecodeTimeStamp': (b'{_CMTime=qiIq}^{opaqueCMBufferQueue=}',), 'CMMemoryPoolFlush': (b'v^{OpaqueCMMemoryPool=}',), 'CMMetadataFormatDescriptionGetIdentifiers': (b'^{__CFArray=}^{opaqueCMFormatDescription=}',), 'CMVideoFormatDescriptionCreateForImageBuffer': (sel32or64(b'l^{__CFAllocator=}^{__CVBuffer=}^^{opaqueCMFormatDescription=}', b'i^{__CFAllocator=}^{__CVBuffer=}^^{opaqueCMFormatDescription=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMAudioFormatDescriptionGetFormatList': (sel32or64(b'^{AudioFormatListItem={AudioStreamBasicDescription=dLLLLLLLL}L}^{opaqueCMFormatDescription=}^L', b'^{AudioFormatListItem={AudioStreamBasicDescription=dIIIIIIII}I}^{opaqueCMFormatDescription=}^Q'), '', {'retval': {'c_array_length_in_arg': 1}, 'arguments': {1: {'type_modifier': 'o'}}}), 'CMSampleBufferGetFormatDescription': (b'^{opaqueCMFormatDescription=}^{opaqueCMSampleBuffer=}',), 'CMTextFormatDescriptionCopyAsBigEndianTextDescriptionBlockBuffer': (sel32or64(b'l^{__CFAllocator=}^{opaqueCMFormatDescription=}^{__CFString=}^^{OpaqueCMBlockBuffer=}', b'i^{__CFAllocator=}^{opaqueCMFormatDescription=}^{__CFString=}^^{OpaqueCMBlockBuffer=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMSampleBufferGetOutputSampleTimingInfoArray': (sel32or64(b'l^{opaqueCMSampleBuffer=}l^{_CMSampleTimingInfo={_CMTime=qiIq}{_CMTime=qiIq}{_CMTime=qiIq}}^l', b'i^{opaqueCMSampleBuffer=}q^{_CMSampleTimingInfo={_CMTime=qiIq}{_CMTime=qiIq}{_CMTime=qiIq}}^q'), '', {'arguments': {2: {'c_array_length_in_arg': 1, 'type_modifier': 'o'}, 3: {'type_modifier': 'o'}}}), 'CMVideoFormatDescriptionGetHEVCParameterSetAtIndex': (sel32or64(b'l^{opaqueCMFormatDescription=}L^^C^L^L^i', b'i^{opaqueCMFormatDescription=}Q^^C^Q^Q^i'),), 'CMTimebaseSetTime': (sel32or64(b'l^{OpaqueCMTimebase=}{_CMTime=qiIq}', b'i^{OpaqueCMTimebase=}{_CMTime=qiIq}'),), 'CMVideoFormatDescriptionGetH264ParameterSetAtIndex': (sel32or64(b'l^{opaqueCMFormatDescription=}L^^C^L^L^i', b'i^{opaqueCMFormatDescription=}Q^^C^Q^Q^i'),), 'CMMetadataDataTypeRegistryGetConformingDataTypes': (b'^{__CFArray=}^{__CFString=}',), 'CMBufferQueueCreate': (sel32or64(b'l^{__CFAllocator=}l^{_CMBufferCallbacks=I^v^?^?^?^?^?^{__CFString=}^?}^^{opaqueCMBufferQueue=}', b'i^{__CFAllocator=}q^{_CMBufferCallbacks=I^v^?^?^?^?^?^{__CFString=}^?}^^{opaqueCMBufferQueue=}'), '', {'retval': {'already_cfretained': True}}), 'CMSyncGetTime': (b'{_CMTime=qiIq}@',), 'CMAudioFormatDescriptionCreateFromBigEndianSoundDescriptionData': (sel32or64(b'l^{__CFAllocator=}^CL^{__CFString=}^^{opaqueCMFormatDescription=}', b'i^{__CFAllocator=}^CQ^{__CFString=}^^{opaqueCMFormatDescription=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}, 4: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMMetadataDataTypeRegistryDataTypeIsRegistered': (b'Z^{__CFString=}',), 'CMAudioSampleBufferCreateReadyWithPacketDescriptions': (sel32or64(b'l^{__CFAllocator=}^{OpaqueCMBlockBuffer=}^{opaqueCMFormatDescription=}l{_CMTime=qiIq}^{AudioStreamPacketDescription=qLL}^^{opaqueCMSampleBuffer=}', b'i^{__CFAllocator=}^{OpaqueCMBlockBuffer=}^{opaqueCMFormatDescription=}q{_CMTime=qiIq}^{AudioStreamPacketDescription=qII}^^{opaqueCMSampleBuffer=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {5: {'c_array_length_in_arg': 3, 'type_modifier': 'n'}, 6: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMSampleBufferGetOutputDuration': (b'{_CMTime=qiIq}^{opaqueCMSampleBuffer=}',), 'CMVideoFormatDescriptionGetPresentationDimensions': (sel32or64(b'{CGSize=ff}^{opaqueCMFormatDescription=}ZZ', b'{CGSize=dd}^{opaqueCMFormatDescription=}ZZ'),), 'CMTimeMake': (b'{_CMTime=qiIq}qi',), 'CMTimebaseNotificationBarrier': (sel32or64(b'l^{OpaqueCMTimebase=}', b'i^{OpaqueCMTimebase=}'),), 'CMTimebaseSetTimerDispatchSourceNextFireTime': (sel32or64(b'l^{OpaqueCMTimebase=}@{_CMTime=qiIq}I', b'i^{OpaqueCMTimebase=}@{_CMTime=qiIq}I'),), 'CMClockGetTime': (b'{_CMTime=qiIq}^{OpaqueCMClock=}',), 'CMSampleBufferCreateCopyWithNewTiming': (sel32or64(b'l^{__CFAllocator=}^{opaqueCMSampleBuffer=}l^{_CMSampleTimingInfo={_CMTime=qiIq}{_CMTime=qiIq}{_CMTime=qiIq}}^^{opaqueCMSampleBuffer=}', b'i^{__CFAllocator=}^{opaqueCMSampleBuffer=}q^{_CMSampleTimingInfo={_CMTime=qiIq}{_CMTime=qiIq}{_CMTime=qiIq}}^^{opaqueCMSampleBuffer=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}, 4: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMTimebaseSetTimerDispatchSourceToFireImmediately': (sel32or64(b'l^{OpaqueCMTimebase=}@', b'i^{OpaqueCMTimebase=}@'),), 'CMTimeRangeCopyAsDictionary': (b'^{__CFDictionary=}{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}^{__CFAllocator=}', '', {'retval': {'already_cfretained': True}}), 'CMTimeMultiplyByFloat64': (b'{_CMTime=qiIq}{_CMTime=qiIq}d',), 'CMBlockBufferAssureBlockMemory': (sel32or64(b'l^{OpaqueCMBlockBuffer=}', b'i^{OpaqueCMBlockBuffer=}'),), 'CMAudioDeviceClockCreate': (sel32or64(b'l^{__CFAllocator=}^{__CFString=}^^{OpaqueCMClock=}', b'i^{__CFAllocator=}^{__CFString=}^^{OpaqueCMClock=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMSampleBufferGetTotalSampleSize': (sel32or64(b'L^{opaqueCMSampleBuffer=}', b'Q^{opaqueCMSampleBuffer=}'),), 'CMClockInvalidate': (b'v^{OpaqueCMClock=}',), 'CMTimebaseAddTimer': (sel32or64(b'l^{OpaqueCMTimebase=}^{__CFRunLoopTimer=}^{__CFRunLoop=}', b'i^{OpaqueCMTimebase=}^{__CFRunLoopTimer=}^{__CFRunLoop=}'),), 'CMTimeRangeGetUnion': (b'{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}',), 'CMSampleBufferGetPresentationTimeStamp': (b'{_CMTime=qiIq}^{opaqueCMSampleBuffer=}',), 'CMClosedCaptionFormatDescriptionCopyAsBigEndianClosedCaptionDescriptionBlockBuffer': (sel32or64(b'l^{__CFAllocator=}^{opaqueCMFormatDescription=}^{__CFString=}^^{OpaqueCMBlockBuffer=}', b'i^{__CFAllocator=}^{opaqueCMFormatDescription=}^{__CFString=}^^{OpaqueCMBlockBuffer=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMBlockBufferAccessDataBytes': (sel32or64(b'l^{OpaqueCMBlockBuffer=}LL^v^^c', b'i^{OpaqueCMBlockBuffer=}QQ^v^^c'), '', {'suggestion': 'Use CMBlockBufferCopyDataBytes'}), 'CMTimeMappingCopyDescription': (b'^{__CFString=}^{__CFAllocator=}{_CMTimeMapping={_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}}', '', {'retval': {'already_cfretained': True}}), 'CMAudioFormatDescriptionCreateSummary': (sel32or64(b'l^{__CFAllocator=}^{__CFArray=}I^^{opaqueCMFormatDescription=}', b'i^{__CFAllocator=}^{__CFArray=}I^^{opaqueCMFormatDescription=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMAudioFormatDescriptionCreateFromBigEndianSoundDescriptionBlockBuffer': (sel32or64(b'l^{__CFAllocator=}^{OpaqueCMBlockBuffer=}^{__CFString=}^^{opaqueCMFormatDescription=}', b'i^{__CFAllocator=}^{OpaqueCMBlockBuffer=}^{__CFString=}^^{opaqueCMFormatDescription=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMSampleBufferCreateForImageBuffer': (sel32or64(b'l^{__CFAllocator=}^{__CVBuffer=}Z^?^v^{opaqueCMFormatDescription=}^{_CMSampleTimingInfo={_CMTime=qiIq}{_CMTime=qiIq}{_CMTime=qiIq}}^^{opaqueCMSampleBuffer=}', b'i^{__CFAllocator=}^{__CVBuffer=}Z^?^v^{opaqueCMFormatDescription=}^{_CMSampleTimingInfo={_CMTime=qiIq}{_CMTime=qiIq}{_CMTime=qiIq}}^^{opaqueCMSampleBuffer=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': b'^{opaqueCMSampleBuffer=}'}, 1: {'type': b'^v'}}}}, 6: {'type_modifier': 'n'}, 7: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMAudioFormatDescriptionCreate': (sel32or64(b'l^{__CFAllocator=}^{AudioStreamBasicDescription=dLLLLLLLL}L^{AudioChannelLayout=LLL[1{AudioChannelDescription=LL[3f]}]}L^v^{__CFDictionary=}^^{opaqueCMFormatDescription=}', b'i^{__CFAllocator=}^{AudioStreamBasicDescription=dIIIIIIII}Q^{AudioChannelLayout=III[1{AudioChannelDescription=II[3f]}]}Q^v^{__CFDictionary=}^^{opaqueCMFormatDescription=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'type_modifier': 'n'}, 5: {'c_array_length_in_arg': 4, 'type_modifier': 'n'}, 7: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMSampleBufferGetOutputPresentationTimeStamp': (b'{_CMTime=qiIq}^{opaqueCMSampleBuffer=}',), 'CMBufferQueueGetCallbacksForSampleBuffersSortedByOutputPTS': (b'^{_CMBufferCallbacks=I^v^?^?^?^?^?^{__CFString=}^?}',), 'CMVideoFormatDescriptionCreate': (sel32or64(b'l^{__CFAllocator=}Lii^{__CFDictionary=}^^{opaqueCMFormatDescription=}', b'i^{__CFAllocator=}Iii^{__CFDictionary=}^^{opaqueCMFormatDescription=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {5: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMTimeRangeContainsTime': (b'Z{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}{_CMTime=qiIq}',), 'CMTimeCopyAsDictionary': (b'^{__CFDictionary=}{_CMTime=qiIq}^{__CFAllocator=}', '', {'retval': {'already_cfretained': True}}), 'CMMetadataFormatDescriptionCreateByMergingMetadataFormatDescriptions': (sel32or64(b'l^{__CFAllocator=}^{opaqueCMFormatDescription=}^{opaqueCMFormatDescription=}^^{opaqueCMFormatDescription=}', b'i^{__CFAllocator=}^{opaqueCMFormatDescription=}^{opaqueCMFormatDescription=}^^{opaqueCMFormatDescription=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMTimebaseCopyMasterTimebase': (b'^{OpaqueCMTimebase=}^{OpaqueCMTimebase=}', '', {'retval': {'already_cfretained': True}}), 'CMVideoFormatDescriptionCopyAsBigEndianImageDescriptionBlockBuffer': (sel32or64(b'l^{__CFAllocator=}^{opaqueCMFormatDescription=}L^{__CFString=}^^{OpaqueCMBlockBuffer=}', b'i^{__CFAllocator=}^{opaqueCMFormatDescription=}I^{__CFString=}^^{OpaqueCMBlockBuffer=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {4: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMFormatDescriptionEqual': (b'Z^{opaqueCMFormatDescription=}^{opaqueCMFormatDescription=}',), 'CMTimebaseSetAnchorTime': (sel32or64(b'l^{OpaqueCMTimebase=}{_CMTime=qiIq}{_CMTime=qiIq}', b'i^{OpaqueCMTimebase=}{_CMTime=qiIq}{_CMTime=qiIq}'),), 'CMSimpleQueueReset': (sel32or64(b'l^{opaqueCMSimpleQueue=}', b'i^{opaqueCMSimpleQueue=}'),), 'CMSampleBufferGetOutputDecodeTimeStamp': (b'{_CMTime=qiIq}^{opaqueCMSampleBuffer=}',), 'CMTimebaseGetTypeID': (sel32or64(b'L', b'Q'),), 'CMSampleBufferCreateReady': (sel32or64(b'l^{__CFAllocator=}^{OpaqueCMBlockBuffer=}^{opaqueCMFormatDescription=}ll^{_CMSampleTimingInfo={_CMTime=qiIq}{_CMTime=qiIq}{_CMTime=qiIq}}l^L^^{opaqueCMSampleBuffer=}', b'i^{__CFAllocator=}^{OpaqueCMBlockBuffer=}^{opaqueCMFormatDescription=}qq^{_CMSampleTimingInfo={_CMTime=qiIq}{_CMTime=qiIq}{_CMTime=qiIq}}q^Q^^{opaqueCMSampleBuffer=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {8: {'already_cfretained': True, 'type_modifier': 'o'}, 5: {'c_array_length_in_arg': 4, 'type_modifier': 'n'}, 7: {'c_array_length_in_arg': 6, 'type_modifier': 'n'}}}), 'CMTimeShow': (b'v{_CMTime=qiIq}',), 'CMSampleBufferSetInvalidateCallback': (sel32or64(b'l^{opaqueCMSampleBuffer=}^?Q', b'i^{opaqueCMSampleBuffer=}^?Q'), '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{opaqueCMSampleBuffer=}'}, 1: {'type': b'Q'}}}}}}), 'CMBufferQueueResetWithCallback': (sel32or64(b'l^{opaqueCMBufferQueue=}^?^v', b'i^{opaqueCMBufferQueue=}^?^v'), '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'@'}, 1: {'type': b'^v'}}}, 'callable_retained': False}}}), 'CMTimeMakeWithEpoch': (b'{_CMTime=qiIq}qiq',), 'CMTimeCodeFormatDescriptionGetTimeCodeFlags': (b'I^{opaqueCMFormatDescription=}',), 'CMClosedCaptionFormatDescriptionCreateFromBigEndianClosedCaptionDescriptionBlockBuffer': (sel32or64(b'l^{__CFAllocator=}^{OpaqueCMBlockBuffer=}^{__CFString=}^^{opaqueCMFormatDescription=}', b'i^{__CFAllocator=}^{OpaqueCMBlockBuffer=}^{__CFString=}^^{opaqueCMFormatDescription=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMVideoFormatDescriptionGetCleanAperture': (sel32or64(b'{CGRect={CGPoint=ff}{CGSize=ff}}^{opaqueCMFormatDescription=}Z', b'{CGRect={CGPoint=dd}{CGSize=dd}}^{opaqueCMFormatDescription=}Z'),), 'CMSwapBigEndianTimeCodeDescriptionToHost': (sel32or64(b'l^CL', b'i^CQ'), '', {'arguments': {0: {'c_array_length_in_arg': 1, 'type_modifier': 'N'}}}), 'CMTimebaseAddTimerDispatchSource': (sel32or64(b'l^{OpaqueCMTimebase=}@', b'i^{OpaqueCMTimebase=}@'),), 'CMTimeMinimum': (b'{_CMTime=qiIq}{_CMTime=qiIq}{_CMTime=qiIq}',), 'CMAudioFormatDescriptionGetChannelLayout': (sel32or64(b'^{AudioChannelLayout=LLL[1{AudioChannelDescription=LL[3f]}]}^{opaqueCMFormatDescription=}^L', b'^{AudioChannelLayout=III[1{AudioChannelDescription=II[3f]}]}^{opaqueCMFormatDescription=}^Q'), '', {'retval': {'c_array_length_in_arg': 1}, 'arguments': {1: {'type_modifier': 'o'}}}), 'CMTextFormatDescriptionGetDisplayFlags': (sel32or64(b'l^{opaqueCMFormatDescription=}^I', b'i^{opaqueCMFormatDescription=}^I'), '', {'arguments': {1: {'type_modifier': 'o'}}}), 'CMSimpleQueueGetTypeID': (sel32or64(b'L', b'Q'),), 'CMTimeMaximum': (b'{_CMTime=qiIq}{_CMTime=qiIq}{_CMTime=qiIq}',), 'CMSampleBufferCreateWithMakeDataReadyHandler': (b'i^{__CFAllocator=}^{OpaqueCMBlockBuffer=}Z^{OpaqueCMFormatDescription=}ll^{_CMSampleTimingInfo={_CMTime=qiIq}{_CMTime=qiIq}{_CMTime=qiIq}}^^{opaqueCMSampleBuffer=}@?^^{opaqueCMSampleBuffer=}@?', '', {'retval': {'already_cfretained': True}, 'arguments': {8: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': '^v'}, 1: {'type': '^{OpaqueCMSampleBuffer=}'}}}}, 6: {'c_array_length_in_arg': 5, 'type_modifier': 'n'}, 7: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMTimeCodeFormatDescriptionCreate': (sel32or64(b'l^{__CFAllocator=}L{_CMTime=qiIq}II^{__CFDictionary=}^^{opaqueCMFormatDescription=}', b'i^{__CFAllocator=}I{_CMTime=qiIq}II^{__CFDictionary=}^^{opaqueCMFormatDescription=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {6: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMRemoveAttachment': (b'v@^{__CFString=}',), 'CMPropagateAttachments': (b'v@@',), 'CMBlockBufferCreateWithBufferReference': (sel32or64(b'l^{__CFAllocator=}^{OpaqueCMBlockBuffer=}LLI^^{OpaqueCMBlockBuffer=}', b'i^{__CFAllocator=}^{OpaqueCMBlockBuffer=}QQI^^{OpaqueCMBlockBuffer=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {5: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMRemoveAllAttachments': (b'v@',), 'CMTimebaseGetRate': (b'd^{OpaqueCMTimebase=}',), 'CMSampleBufferGetSampleSizeArray': (sel32or64(b'l^{opaqueCMSampleBuffer=}l^L^l', b'i^{opaqueCMSampleBuffer=}q^Q^q'), '', {'arguments': {2: {'c_array_length_in_arg': 1, 'type_modifier': 'o'}, 3: {'type_modifier': 'o'}}}), 'CMTimebaseSetTimerToFireImmediately': (sel32or64(b'l^{OpaqueCMTimebase=}^{__CFRunLoopTimer=}', b'i^{OpaqueCMTimebase=}^{__CFRunLoopTimer=}'),), 'CMBufferQueueIsAtEndOfData': (b'Z^{opaqueCMBufferQueue=}',)} -aliases = {'CMSubtitleFormatDescriptionGetFormatType': 'CMFormatDescriptionGetMediaSubType', 'COREMEDIA_DECLARE_BRIDGED_TYPES': 'COREMEDIA_TRUE', 'CMVideoFormatDescriptionGetCodecType': 'CMFormatDescriptionGetMediaSubType', 'COREMEDIA_DECLARE_NULLABILITY_BEGIN_END': 'COREMEDIA_TRUE', 'kCMFormatDescriptionExtension_YCbCrMatrix': 'kCVImageBufferYCbCrMatrixKey', 'kCMFormatDescriptionExtension_FieldCount': 'kCVImageBufferFieldCountKey', 'CM_RETURNS_NOT_RETAINED_PARAMETER': 'CF_RETURNS_NOT_RETAINED', 'kCMFormatDescriptionExtension_GammaLevel': 'kCVImageBufferGammaLevelKey', 'kCMFormatDescriptionChromaLocation_Bottom': 'kCVImageBufferChromaLocation_Bottom', 'kCMFormatDescriptionKey_CleanApertureVerticalOffset': 'kCVImageBufferCleanApertureVerticalOffsetKey', 'CM_RETURNS_RETAINED': 'CF_RETURNS_RETAINED', 'kCMFormatDescriptionYCbCrMatrix_SMPTE_240M_1995': 'kCVImageBufferYCbCrMatrix_SMPTE_240M_1995', 'kCMFormatDescriptionExtension_ColorPrimaries': 'kCVImageBufferColorPrimariesKey', 'kCMFormatDescriptionYCbCrMatrix_ITU_R_601_4': 'kCVImageBufferYCbCrMatrix_ITU_R_601_4', 'kCMFormatDescriptionColorPrimaries_SMPTE_C': 'kCVImageBufferColorPrimaries_SMPTE_C', 'kCMFormatDescriptionChromaLocation_DV420': 'kCVImageBufferChromaLocation_DV420', 'CM_NULLABLE': '__nullable', 'COREMEDIA_DECLARE_RETURNS_NOT_RETAINED_ON_PARAMETERS': 'COREMEDIA_TRUE', 'kCMFormatDescriptionChromaLocation_Left': 'kCVImageBufferChromaLocation_Left', 'kCMTimeRoundingMethod_Default': 'kCMTimeRoundingMethod_RoundHalfAwayFromZero', 'kCMFormatDescriptionKey_PixelAspectRatioVerticalSpacing': 'kCVImageBufferPixelAspectRatioVerticalSpacingKey', 'CM_NONNULL': '__nonnull', 'kCMFormatDescriptionTransferFunction_UseGamma': 'kCVImageBufferTransferFunction_UseGamma', 'kCMVideoCodecType_422YpCbCr8': 'kCMPixelFormat_422YpCbCr8', 'kCMFormatDescriptionExtension_VerbatimImageDescription': 'kCMFormatDescriptionExtension_VerbatimSampleDescription', 'kCMFormatDescriptionExtension_ChromaLocationTopField': 'kCVImageBufferChromaLocationTopFieldKey', 'kCMFormatDescriptionExtension_PixelAspectRatio': 'kCVImageBufferPixelAspectRatioKey', 'kCMFormatDescriptionKey_PixelAspectRatioHorizontalSpacing': 'kCVImageBufferPixelAspectRatioHorizontalSpacingKey', 'kCMFormatDescriptionTransferFunction_SMPTE_240M_1995': 'kCVImageBufferTransferFunction_SMPTE_240M_1995', 'kCMFormatDescriptionExtension_ChromaLocationBottomField': 'kCVImageBufferChromaLocationBottomFieldKey', 'kCMFormatDescriptionExtension_TransferFunction': 'kCVImageBufferTransferFunctionKey', 'kCMTimebaseFarFutureCFAbsoluteTime': 'kCMTimebaseVeryLongCFTimeInterval', 'CM_RETURNS_RETAINED_PARAMETER': 'CF_RETURNS_RETAINED', 'kCMFormatDescriptionKey_CleanApertureHorizontalOffset': 'kCVImageBufferCleanApertureHorizontalOffsetKey', 'kCMFormatDescriptionTransferFunction_ITU_R_709_2': 'kCVImageBufferTransferFunction_ITU_R_709_2', 'kCMFormatDescriptionColorPrimaries_EBU_3213': 'kCVImageBufferColorPrimaries_EBU_3213', 'COREMEDIA_DECLARE_NULLABILITY': 'COREMEDIA_TRUE', 'kCMFormatDescriptionKey_CleanApertureWidth': 'kCVImageBufferCleanApertureWidthKey', 'kCMFormatDescriptionExtension_FieldDetail': 'kCVImageBufferFieldDetailKey', 'kCMFormatDescriptionFieldDetail_SpatialFirstLineLate': 'kCVImageBufferFieldDetailSpatialFirstLineLate', 'kCMFormatDescriptionChromaLocation_Top': 'kCVImageBufferChromaLocation_Top', 'kCMFormatDescriptionColorPrimaries_ITU_R_709_2': 'kCVImageBufferColorPrimaries_ITU_R_709_2', 'COREMEDIA_USE_ALIGNED_CMBASECLASS_VERSION': 'COREMEDIA_TRUE', 'COREMEDIA_DECLARE_RETURNS_RETAINED_ON_PARAMETERS': 'COREMEDIA_TRUE', 'kCMFormatDescriptionChromaLocation_TopLeft': 'kCVImageBufferChromaLocation_TopLeft', 'kCMFormatDescriptionFieldDetail_SpatialFirstLineEarly': 'kCVImageBufferFieldDetailSpatialFirstLineEarly', 'kCMFormatDescriptionFieldDetail_TemporalBottomFirst': 'kCVImageBufferFieldDetailTemporalBottomFirst', 'kCMFormatDescriptionExtension_CleanAperture': 'kCVImageBufferCleanApertureKey', 'kCMFormatDescriptionYCbCrMatrix_ITU_R_709_2': 'kCVImageBufferYCbCrMatrix_ITU_R_709_2', 'COREMEDIA_USE_DERIVED_ENUMS_FOR_CONSTANTS': 'COREMEDIA_TRUE', 'kCMFormatDescriptionKey_CleanApertureHeight': 'kCVImageBufferCleanApertureHeightKey', 'kCMFormatDescriptionFieldDetail_TemporalTopFirst': 'kCVImageBufferFieldDetailTemporalTopFirst', 'kCMFormatDescriptionChromaLocation_Center': 'kCVImageBufferChromaLocation_Center', 'kCMFormatDescriptionChromaLocation_BottomLeft': 'kCVImageBufferChromaLocation_BottomLeft', 'COREMEDIA_DECLARE_RETURNS_RETAINED': 'COREMEDIA_TRUE'} -cftypes=[('CMBufferQueueRef', b'^{opaqueCMBufferQueue=}', 'CMBufferQueueGetTypeID', None), ('CMMemoryPoolRef', b'^{opaqueCMMemoryPool=}', 'CMMemoryPoolGetTypeID', None), ('CMFormatDescriptionRef', b'^{opaqueCMFormatDescription=}', 'CMFormatDescriptionGetTypeID', None), ('CMTimebaseRef', b'^{opaqueCMTimebase=}', 'CMTimebaseGetTypeID', None), ('CMSimpleQueueRef', b'^{opaqueCMSimpleQueue=}', 'CMSimpleQueueGetTypeID', None), ('CMClockRef', b'^{opaqueCMClock=}', 'CMClockGetTypeID', None), ('CMBlockBufferRef', b'^{opaqueCMBlockBuffer=}', 'CMBlockBufferGetTypeID', None), ('CMSimpleQueueef', b'^{opaqueCMSimpleQueue}', 'CMSimpleQueueetTypeID', None), ('CMSampleBufferRef', b'^{opaqueCMSampleBuffer=}', 'CMSampleBufferGetTypeID', None), ('CMSampleBufferrRef', b'^{opaqueCMSampleBufferr=}', 'CMSampleBufferrGetTypeID', None)] -misc.update({'CMBufferQueueTriggerToken': objc.createOpaquePointerType('CMBufferQueueTriggerToken', b'^{opaqueCMBufferQueueTriggerToken=}')}) -expressions = {'kCMTimebaseVeryLongCFTimeInterval': '(CFTimeInterval)(256.0 * 365.0 * 24.0 * 60.0 * 60.0)'} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/CoreMedia/_metadata.pyc b/env/lib/python2.7/site-packages/CoreMedia/_metadata.pyc deleted file mode 100644 index 1cf164c1..00000000 Binary files a/env/lib/python2.7/site-packages/CoreMedia/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreMediaIO/_CoreMediaIO.so b/env/lib/python2.7/site-packages/CoreMediaIO/_CoreMediaIO.so deleted file mode 100755 index 81356720..00000000 Binary files a/env/lib/python2.7/site-packages/CoreMediaIO/_CoreMediaIO.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreMediaIO/__init__.py b/env/lib/python2.7/site-packages/CoreMediaIO/__init__.py deleted file mode 100644 index bbc9002e..00000000 --- a/env/lib/python2.7/site-packages/CoreMediaIO/__init__.py +++ /dev/null @@ -1,42 +0,0 @@ -''' -Python mapping for the CoreMediaIO framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Foundation - -from CoreMediaIO import _metadata - -sys.modules['CoreMediaIO'] = mod = objc.ObjCLazyModule( - "CoreMediaIO", - "com.apple.CoreMediaIO", - objc.pathForFramework("/System/Library/Frameworks/CoreMediaIO.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Foundation,)) - -import sys -del sys.modules['CoreMediaIO._metadata'] - -from CoreMediaIO import _CoreMediaIO - -for nm in dir(_CoreMediaIO): - setattr(mod, nm, getattr(_CoreMediaIO, nm)) - -def CMIOGetNextSequenceNumber(value): - if value == 0xffffffffffffffff: - return 0 - return value + 1 -mod.CMIOGetNextSequenceNumber = CMIOGetNextSequenceNumber - -def CMIODiscontinuityFlagsHaveHardDiscontinuities(value): - return (value & mod.kCMIOSampleBufferDiscontinuityFlag_DurationWasExtended) != 0 - -mod.CMIODiscontinuityFlagsHaveHardDiscontinuities = CMIODiscontinuityFlagsHaveHardDiscontinuities diff --git a/env/lib/python2.7/site-packages/CoreMediaIO/__init__.pyc b/env/lib/python2.7/site-packages/CoreMediaIO/__init__.pyc deleted file mode 100644 index 27e186bc..00000000 Binary files a/env/lib/python2.7/site-packages/CoreMediaIO/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreMediaIO/_metadata.py b/env/lib/python2.7/site-packages/CoreMediaIO/_metadata.py deleted file mode 100644 index ecaaa120..00000000 --- a/env/lib/python2.7/site-packages/CoreMediaIO/_metadata.py +++ /dev/null @@ -1,26 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Sun Sep 9 13:27:52 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -misc.update({'CMIODeviceRS422Command': objc.createStructType('CMIODeviceRS422Command', sel32or64(b'{CMIODeviceRS422Command=^CL^CLL}', b'{CMIODeviceRS422Command=^CI^CII}'), ['mCommand', 'mCommandLength', 'mResponse', 'mResponseLength', 'mResponseUsed']), 'CMIODeviceAVCCommand': objc.createStructType('CMIODeviceAVCCommand', sel32or64(b'{CMIODeviceAVCCommand=^CL^CLL}', b'{CMIODeviceAVCCommand=^CI^CII}'), ['mCommand', 'mCommandLength', 'mResponse', 'mResponseLength', 'mResponseUsed']), 'CMIODeviceSMPTETimeCallback': objc.createStructType('CMIODeviceSMPTETimeCallback', b'{CMIODeviceSMPTETimeCallback=^?^v}', ['mGetSMPTETimeProc', 'mRefCon']), 'CMIOStreamScheduledOutputNotificationProcAndRefCon': objc.createStructType('CMIOStreamScheduledOutputNotificationProcAndRefCon', b'{CMIOStreamScheduledOutputNotificationProcAndRefCon=^?^?}', ['scheduledOutputNotificationProc', 'scheduledOutputNotificationRefCon']), 'CMIOStreamDeck': objc.createStructType('CMIOStreamDeck', sel32or64(b'{CMIOStreamDeck=LLL}', b'{CMIOStreamDeck=III}'), ['mStatus', 'mState', 'mState2']), 'CMIODeviceStreamConfiguration': objc.createStructType('CMIODeviceStreamConfiguration', sel32or64(b'{CMIODeviceStreamConfiguration=L^L}', b'{CMIODeviceStreamConfiguration=I^I}'), ['mNumberStreams', 'mNumberChannels']), 'CMIOObjectPropertyAddress': objc.createStructType('CMIOObjectPropertyAddress', sel32or64(b'{CMIOObjectPropertyAddress=LLL}', b'{CMIOObjectPropertyAddress=III}'), ['mSelector', 'mScope', 'mElement'])}) -constants = '''$kCMIOBlockBufferAttachmentKey_CVPixelBufferReference@^{__CFString=}$kCMIOSampleBufferAttachmentKey_CAAudioTimeStamp@^{__CFString=}$kCMIOSampleBufferAttachmentKey_ClientSequenceID@^{__CFString=}$kCMIOSampleBufferAttachmentKey_ClosedCaptionSampleBuffer@^{__CFString=}$kCMIOSampleBufferAttachmentKey_DiscontinuityFlags@^{__CFString=}$kCMIOSampleBufferAttachmentKey_HDV1_PackData@^{__CFString=}$kCMIOSampleBufferAttachmentKey_HDV2_VAUX@^{__CFString=}$kCMIOSampleBufferAttachmentKey_HostTime@^{__CFString=}$kCMIOSampleBufferAttachmentKey_MouseAndKeyboardModifiers@^{__CFString=}$kCMIOSampleBufferAttachmentKey_MuxedSourcePresentationTimeStamp@^{__CFString=}$kCMIOSampleBufferAttachmentKey_NativeSMPTEFrameCount@^{__CFString=}$kCMIOSampleBufferAttachmentKey_NoDataMarker@^{__CFString=}$kCMIOSampleBufferAttachmentKey_NumberOfVideoFramesInBuffer@^{__CFString=}$kCMIOSampleBufferAttachmentKey_NumberOfVideoFramesInGOP@^{__CFString=}$kCMIOSampleBufferAttachmentKey_PulldownCadenceInfo@^{__CFString=}$kCMIOSampleBufferAttachmentKey_RepeatedBufferContents@^{__CFString=}$kCMIOSampleBufferAttachmentKey_SMPTETime@^{__CFString=}$kCMIOSampleBufferAttachmentKey_SequenceNumber@^{__CFString=}$kCMIOSampleBufferAttachmentKey_SourceAudioFormatDescription@^{__CFString=}$kCMIOSampleBufferAttachment_MouseAndKeyboardModifiersKey_CursorFrameRect@^{__CFString=}$kCMIOSampleBufferAttachment_MouseAndKeyboardModifiersKey_CursorIsDrawnInFramebuffer@^{__CFString=}$kCMIOSampleBufferAttachment_MouseAndKeyboardModifiersKey_CursorIsVisible@^{__CFString=}$kCMIOSampleBufferAttachment_MouseAndKeyboardModifiersKey_CursorPositionX@^{__CFString=}$kCMIOSampleBufferAttachment_MouseAndKeyboardModifiersKey_CursorPositionY@^{__CFString=}$kCMIOSampleBufferAttachment_MouseAndKeyboardModifiersKey_CursorReference@^{__CFString=}$kCMIOSampleBufferAttachment_MouseAndKeyboardModifiersKey_CursorScale@^{__CFString=}$kCMIOSampleBufferAttachment_MouseAndKeyboardModifiersKey_CursorSeed@^{__CFString=}$kCMIOSampleBufferAttachment_MouseAndKeyboardModifiersKey_KeyboardModifiers@^{__CFString=}$kCMIOSampleBufferAttachment_MouseAndKeyboardModifiersKey_KeyboardModifiersEvent@^{__CFString=}$kCMIOSampleBufferAttachment_MouseAndKeyboardModifiersKey_MouseButtonState@^{__CFString=}$''' -enums = '''$kCMIOAVCDeviceType_DVCPro100_720p@1685481584$kCMIOAVCDeviceType_DVCPro100_NTSC@1685467502$kCMIOAVCDeviceType_DVCPro100_PAL@1685467504$kCMIOAVCDeviceType_DVCPro50_NTSC@1685468526$kCMIOAVCDeviceType_DVCPro50_PAL@1685468528$kCMIOAVCDeviceType_DVCProHD_1080i50@1685481525$kCMIOAVCDeviceType_DVCProHD_1080i60@1685481526$kCMIOAVCDeviceType_DVCPro_NTSC@1685483630$kCMIOAVCDeviceType_DVCPro_PAL@1685483632$kCMIOAVCDeviceType_DV_NTSC@1685480224$kCMIOAVCDeviceType_DV_PAL@1685480304$kCMIOAVCDeviceType_MPEG2@1836082994$kCMIOAVCDeviceType_Unknown@1970170734$kCMIOBacklightCompensationControlClassID@1651207284$kCMIOBlackLevelControlClassID@1651207286$kCMIOBooleanControlClassID@1953458028$kCMIOBooleanControlPropertyValue@1650685548$kCMIOBrightnessControlClassID@1651665268$kCMIOContrastControlClassID@1668576116$kCMIOControlClassID@1633907820$kCMIOControlPropertyElement@1667591277$kCMIOControlPropertyScope@1668506480$kCMIOControlPropertyVariant@1668702578$kCMIODataDestinationControlClassID@1684370292$kCMIODataSourceControlClassID@1685287523$kCMIODeckShuttlePause@0$kCMIODeckShuttlePlay1x@6$kCMIODeckShuttlePlayFast@7$kCMIODeckShuttlePlayFaster@8$kCMIODeckShuttlePlayFastest@9$kCMIODeckShuttlePlayHighSpeed@10$kCMIODeckShuttlePlayNextFrame@1$kCMIODeckShuttlePlayPreviousFrame@-1$kCMIODeckShuttlePlaySlow1@3$kCMIODeckShuttlePlaySlow2@4$kCMIODeckShuttlePlaySlow3@5$kCMIODeckShuttlePlaySlowest@2$kCMIODeckShuttleReverse1x@-6$kCMIODeckShuttleReverseFast@-7$kCMIODeckShuttleReverseFaster@-8$kCMIODeckShuttleReverseFastest@-9$kCMIODeckShuttleReverseHighSpeed@-10$kCMIODeckShuttleReverseSlow1@-3$kCMIODeckShuttleReverseSlow2@-4$kCMIODeckShuttleReverseSlow3@-5$kCMIODeckShuttleReverseSlowest@-2$kCMIODeckStateFastForward@6$kCMIODeckStateFastRewind@7$kCMIODeckStatePause@2$kCMIODeckStatePlay@1$kCMIODeckStatePlayReverse@5$kCMIODeckStatePlaySlow@3$kCMIODeckStateReverseSlow@4$kCMIODeckStateStop@0$kCMIODeckStatusBusy@1$kCMIODeckStatusLocal@2$kCMIODeckStatusNoDevice@7$kCMIODeckStatusNotThreaded@3$kCMIODeckStatusOpcode@5$kCMIODeckStatusSearchingForDevice@6$kCMIODeckStatusTapeInserted@4$kCMIODeviceAVCSignalMode8mmNTSC@6$kCMIODeviceAVCSignalMode8mmPAL@134$kCMIODeviceAVCSignalModeAudio@32$kCMIODeviceAVCSignalModeDVCPro100_50@240$kCMIODeviceAVCSignalModeDVCPro100_60@112$kCMIODeviceAVCSignalModeDVCPro25_525_60@120$kCMIODeviceAVCSignalModeDVCPro25_625_50@248$kCMIODeviceAVCSignalModeDVCPro50_525_60@116$kCMIODeviceAVCSignalModeDVCPro50_625_50@244$kCMIODeviceAVCSignalModeDVHS@1$kCMIODeviceAVCSignalModeHD1125_60@8$kCMIODeviceAVCSignalModeHD1250_50@136$kCMIODeviceAVCSignalModeHDV1_50@144$kCMIODeviceAVCSignalModeHDV1_60@16$kCMIODeviceAVCSignalModeHDV2_50@154$kCMIODeviceAVCSignalModeHDV2_60@26$kCMIODeviceAVCSignalModeHi8NTSC@14$kCMIODeviceAVCSignalModeHi8PAL@142$kCMIODeviceAVCSignalModeMPEG12Mbps_50@148$kCMIODeviceAVCSignalModeMPEG12Mbps_60@20$kCMIODeviceAVCSignalModeMPEG25Mbps_50@144$kCMIODeviceAVCSignalModeMPEG25Mbps_60@16$kCMIODeviceAVCSignalModeMPEG6Mbps_50@152$kCMIODeviceAVCSignalModeMPEG6Mbps_60@24$kCMIODeviceAVCSignalModeMicroMV12Mbps_50@164$kCMIODeviceAVCSignalModeMicroMV12Mbps_60@36$kCMIODeviceAVCSignalModeMicroMV6Mbps_50@168$kCMIODeviceAVCSignalModeMicroMV6Mbps_60@40$kCMIODeviceAVCSignalModeSD525_60@0$kCMIODeviceAVCSignalModeSD625_50@128$kCMIODeviceAVCSignalModeSDL525_60@4$kCMIODeviceAVCSignalModeSDL625_50@132$kCMIODeviceAVCSignalModeSVHS525_60@13$kCMIODeviceAVCSignalModeSVHS625_50@237$kCMIODeviceAVCSignalModeVHSMESECAM@213$kCMIODeviceAVCSignalModeVHSMPAL@37$kCMIODeviceAVCSignalModeVHSNPAL@181$kCMIODeviceAVCSignalModeVHSNTSC@5$kCMIODeviceAVCSignalModeVHSPAL@165$kCMIODeviceAVCSignalModeVHSSECAM@197$kCMIODeviceClassID@1633969526$kCMIODevicePermissionsError@560492391$kCMIODevicePropertyAVCDeviceSignalMode@1886221165$kCMIODevicePropertyAVCDeviceType@1886216564$kCMIODevicePropertyCanProcessAVCCommand@1886216547$kCMIODevicePropertyCanProcessRS422Command@1916023346$kCMIODevicePropertyCanSwitchFrameRatesWithoutFrameDrops@1718775396$kCMIODevicePropertyClientSyncDiscontinuity@1886217075$kCMIODevicePropertyDeviceCanBeDefaultDevice@1684434036$kCMIODevicePropertyDeviceHasChanged@1684629094$kCMIODevicePropertyDeviceIsAlive@1818850926$kCMIODevicePropertyDeviceIsRunning@1735354734$kCMIODevicePropertyDeviceIsRunningSomewhere@1735356005$kCMIODevicePropertyDeviceMaster@1886219880$kCMIODevicePropertyDeviceUID@1969841184$kCMIODevicePropertyExcludeNonDALAccess@1769500257$kCMIODevicePropertyHogMode@1869180523$kCMIODevicePropertyIIDCCSRData@1668510308$kCMIODevicePropertyIIDCInitialUnitSpace@1769303667$kCMIODevicePropertyLatency@1819569763$kCMIODevicePropertyLinkedAndSyncedCoreAudioDeviceUID@1886155620$kCMIODevicePropertyLinkedCoreAudioDeviceUID@1886156132$kCMIODevicePropertyLocation@1684828003$kCMIODevicePropertyLocationBuiltInDisplay@1$kCMIODevicePropertyLocationExternalDevice@3$kCMIODevicePropertyLocationExternalDisplay@2$kCMIODevicePropertyLocationExternalWirelessDevice@4$kCMIODevicePropertyLocationUnknown@0$kCMIODevicePropertyModelUID@1836411236$kCMIODevicePropertyPlugIn@1886156135$kCMIODevicePropertySMPTETimeCallback@1886221155$kCMIODevicePropertyScopeInput@1768845428$kCMIODevicePropertyScopeOutput@1869968496$kCMIODevicePropertyScopePlayThrough@1886679669$kCMIODevicePropertyStreamConfiguration@1936482681$kCMIODevicePropertyStreams@1937009955$kCMIODevicePropertySuspendedByUser@1935833461$kCMIODevicePropertyTransportType@1953653102$kCMIODevicePropertyVideoDigitizerComponents@1986292071$kCMIODeviceUnknown@0$kCMIODeviceUnsupportedFormatError@560226676$kCMIODirectionControlClassID@1684632165$kCMIOExposureControlClassID@2020635506$kCMIOExposureControlPropertyConvergenceSpeed@1701016432$kCMIOExposureControlPropertyIntegrationTime@1701408372$kCMIOExposureControlPropertyLockThreshold@1701602155$kCMIOExposureControlPropertyMaximumGain@1701667192$kCMIOExposureControlPropertyRegionOfInterest@1701998441$kCMIOExposureControlPropertyStability@1702065273$kCMIOExposureControlPropertyStable@1702065250$kCMIOExposureControlPropertyTarget@1702127476$kCMIOExposureControlPropertyUnlockThreshold@1702194283$kCMIOFeatureControlClassID@1718903668$kCMIOFeatureControlPropertyAbsoluteNative@1717792353$kCMIOFeatureControlPropertyAbsoluteRange@1717789042$kCMIOFeatureControlPropertyAbsoluteUnitName@1717794158$kCMIOFeatureControlPropertyAbsoluteValue@1717789046$kCMIOFeatureControlPropertyAutomaticManual@1717789037$kCMIOFeatureControlPropertyConvertAbsoluteToNative@1717645934$kCMIOFeatureControlPropertyConvertNativeToAbsolute@1718497889$kCMIOFeatureControlPropertyNativeRange@1717792370$kCMIOFeatureControlPropertyNativeValue@1717792374$kCMIOFeatureControlPropertyOnOff@1717792623$kCMIOFeatureControlPropertyTune@1717793902$kCMIOFocusControlClassID@1717794163$kCMIOGainControlClassID@1734437230$kCMIOGammaControlClassID@1735224673$kCMIOHardwareBadDeviceError@560227702$kCMIOHardwareBadObjectError@560947818$kCMIOHardwareBadPropertySizeError@561211770$kCMIOHardwareBadStreamError@561214578$kCMIOHardwareIllegalOperationError@1852797029$kCMIOHardwareNoError@0$kCMIOHardwareNotRunningError@1937010544$kCMIOHardwareNotStoppedError@1920298528$kCMIOHardwarePropertyAllowScreenCaptureDevices@2036691744$kCMIOHardwarePropertyAllowWirelessScreenCaptureDevices@2004050788$kCMIOHardwarePropertyDefaultInputDevice@1682533920$kCMIOHardwarePropertyDefaultOutputDevice@1682929012$kCMIOHardwarePropertyDeviceForUID@1685416292$kCMIOHardwarePropertyDevices@1684370979$kCMIOHardwarePropertyIsInitingOrExiting@1768845172$kCMIOHardwarePropertyPlugInForBundleID@1885954665$kCMIOHardwarePropertyProcessIsMaster@1835103092$kCMIOHardwarePropertySleepingIsAllowed@1936483696$kCMIOHardwarePropertySuspendedBySystem@1935833459$kCMIOHardwarePropertyUnloadingIsAllowed@1970170980$kCMIOHardwarePropertyUserSessionIsActiveOrHeadless@1970496882$kCMIOHardwareSuspendedBySystemError@1684369017$kCMIOHardwareUnknownPropertyError@2003332927$kCMIOHardwareUnspecifiedError@2003329396$kCMIOHardwareUnsupportedOperationError@1970171760$kCMIOHueControlClassID@1752524064$kCMIOInvalidSequenceNumber@18446744073709551615$kCMIOIrisControlClassID@1769105779$kCMIOJackControlClassID@1784767339$kCMIONoiseReductionControlClassID@1932684914$kCMIOObjectClassID@1634689642$kCMIOObjectClassIDWildcard@707406378$kCMIOObjectPropertyClass@1668047219$kCMIOObjectPropertyCreator@1869638759$kCMIOObjectPropertyElementCategoryName@1818452846$kCMIOObjectPropertyElementMaster@0$kCMIOObjectPropertyElementName@1818454126$kCMIOObjectPropertyElementNumberName@1818455662$kCMIOObjectPropertyElementWildcard@4294967295$kCMIOObjectPropertyListenerAdded@1818850145$kCMIOObjectPropertyListenerRemoved@1818850162$kCMIOObjectPropertyManufacturer@1819107691$kCMIOObjectPropertyName@1819173229$kCMIOObjectPropertyOwnedObjects@1870098020$kCMIOObjectPropertyOwner@1937007734$kCMIOObjectPropertyScopeGlobal@1735159650$kCMIOObjectPropertyScopeWildcard@707406378$kCMIOObjectPropertySelectorWildcard@707406378$kCMIOObjectSystemObject@1$kCMIOObjectUnknown@0$kCMIOOpticalFilterClassID@1869637236$kCMIOPanControlClassID@1885433376$kCMIOPanTiltAbsoluteControlClassID@1886675298$kCMIOPanTiltRelativeControlClassID@1886679660$kCMIOPlugInClassID@1634757735$kCMIOPlugInPropertyBundleID@1885956452$kCMIOPowerLineFrequencyControlClassID@1886873201$kCMIOSampleBufferDiscontinuityFlag_BufferOverrun@128$kCMIOSampleBufferDiscontinuityFlag_ClientSyncDiscontinuity@1024$kCMIOSampleBufferDiscontinuityFlag_CodecSettingsChanged@131072$kCMIOSampleBufferDiscontinuityFlag_DataFormatChanged@8192$kCMIOSampleBufferDiscontinuityFlag_DataWasDropped@64$kCMIOSampleBufferDiscontinuityFlag_DataWasFlushed@32$kCMIOSampleBufferDiscontinuityFlag_DiscontinuityInDTS@256$kCMIOSampleBufferDiscontinuityFlag_DurationWasExtended@32768$kCMIOSampleBufferDiscontinuityFlag_MalformedData@16$kCMIOSampleBufferDiscontinuityFlag_NoDataMarker@4096$kCMIOSampleBufferDiscontinuityFlag_PacketError@4$kCMIOSampleBufferDiscontinuityFlag_RelatedToDiscontinuity@512$kCMIOSampleBufferDiscontinuityFlag_SleepWakeCycle@65536$kCMIOSampleBufferDiscontinuityFlag_StreamDiscontinuity@8$kCMIOSampleBufferDiscontinuityFlag_TimecodeDiscontinuity@2$kCMIOSampleBufferDiscontinuityFlag_TimingReferenceJumped@16384$kCMIOSampleBufferDiscontinuityFlag_TrickPlay@2048$kCMIOSampleBufferDiscontinuityFlag_UnknownDiscontinuity@1$kCMIOSampleBufferNoDataEvent_DeviceDidNotSync@2$kCMIOSampleBufferNoDataEvent_DeviceInWrongMode@3$kCMIOSampleBufferNoDataEvent_NoMedia@1$kCMIOSampleBufferNoDataEvent_ProcessingError@4$kCMIOSampleBufferNoDataEvent_SleepWakeCycle@5$kCMIOSampleBufferNoDataEvent_Unknown@0$kCMIOSampleBufferNoDiscontinuities@0$kCMIOSaturationControlClassID@1935766645$kCMIOSelectorControlClassID@1936483188$kCMIOSelectorControlPropertyAvailableItems@1935892841$kCMIOSelectorControlPropertyCurrentItem@1935893353$kCMIOSelectorControlPropertyItemName@1935894894$kCMIOSharpnessControlClassID@1936224880$kCMIOShutterControlClassID@1936225394$kCMIOStreamClassID@1634956402$kCMIOStreamPropertyCanProcessDeckCommand@1885627236$kCMIOStreamPropertyClock@1886217068$kCMIOStreamPropertyDeck@1684366187$kCMIOStreamPropertyDeckCueing@1668638051$kCMIOStreamPropertyDeckDropness@1685221232$kCMIOStreamPropertyDeckFrameNumber@1952673636$kCMIOStreamPropertyDeckLocal@1819239276$kCMIOStreamPropertyDeckThreaded@1953002084$kCMIOStreamPropertyDeviceSyncTimeoutInMSec@1886219826$kCMIOStreamPropertyDirection@1935960434$kCMIOStreamPropertyEndOfData@1886217572$kCMIOStreamPropertyFirstOutputPresentationTimeStamp@1886351476$kCMIOStreamPropertyFormatDescription@1885762592$kCMIOStreamPropertyFormatDescriptions@1885762657$kCMIOStreamPropertyFrameRate@1852207732$kCMIOStreamPropertyFrameRateRanges@1718776423$kCMIOStreamPropertyFrameRates@1852207651$kCMIOStreamPropertyInitialPresentationTimeStampForLinkedAndSyncedAudio@1768975475$kCMIOStreamPropertyLatency@1819569763$kCMIOStreamPropertyMinimumFrameRate@1835430516$kCMIOStreamPropertyNoDataEventCount@1886219827$kCMIOStreamPropertyNoDataTimeoutInMSec@1886219825$kCMIOStreamPropertyOutputBufferQueueSize@1886220145$kCMIOStreamPropertyOutputBufferRepeatCount@1886220146$kCMIOStreamPropertyOutputBufferUnderrunCount@1886220149$kCMIOStreamPropertyOutputBuffersNeededForThrottledPlayback@1835624038$kCMIOStreamPropertyOutputBuffersRequiredForStartup@1886220147$kCMIOStreamPropertyPreferredFormatDescription@1886545508$kCMIOStreamPropertyPreferredFrameRate@1886545522$kCMIOStreamPropertyScheduledOutputNotificationProc@1936682608$kCMIOStreamPropertyStartingChannel@1935894638$kCMIOStreamPropertyStillImage@1937010023$kCMIOStreamPropertyStillImageFormatDescriptions@1937008244$kCMIOStreamPropertyTerminalType@1952805485$kCMIOStreamUnknown@0$kCMIOSystemObjectClassID@1634957683$kCMIOTemperatureControlClassID@1952804208$kCMIOTiltControlClassID@1953066100$kCMIOWhiteBalanceControlClassID@2003329644$kCMIOWhiteBalanceUControlClassID@2003329653$kCMIOWhiteBalanceVControlClassID@2003329654$kCMIOWhiteLevelControlClassID@2003332214$kCMIOZoomControlClassID@2054123373$kCMIOZoomRelativeControlClassID@2054122866$''' -misc.update({}) -functions={'CMIOObjectShow': (sel32or64(b'vL', b'vI'),), 'CMIOStreamDeckJog': (sel32or64(b'lLl', b'iIi'),), 'CMIOObjectAddPropertyListenerBlock': (sel32or64(b'lL^{CMIOObjectPropertyAddress=LLL}@@?', b'iI^{CMIOObjectPropertyAddress=III}@@?'), '', {'arguments': {1: {'type_modifier': 'n'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}, 1: {'type': 'I'}, 2: {'type': '^{CMIOObjectPropertyAddress=III}', 'type_modifier': 'n', 'c_array_length_in_arg': 1}}}}}}), 'CMIOStreamDeckCueTo': (sel32or64(b'lLQZ', b'iIQZ'),), 'CMIOObjectGetPropertyData': (sel32or64(b'lL^{CMIOObjectPropertyAddress=LLL}L^vL^L^v', b'iI^{CMIOObjectPropertyAddress=III}I^vI^I^v'), '', {'arguments': {1: {'type_modifier': 'n'}, 3: {'c_array_length_in_arg': 2, 'type_modifier': 'o'}, 5: {'type_modifier': 'o'}, 6: {'c_array_length_in_arg': (4, 5), 'type_modifier': 'o'}}}), 'CMIOSampleBufferCreateForImageBuffer': (sel32or64(b'l^{__CFAllocator=}^{__CVBuffer=}^{opaqueCMFormatDescription=}^{_CMSampleTimingInfo={_CMTime=qiIq}{_CMTime=qiIq}{_CMTime=qiIq}}QL^^{opaqueCMSampleBuffer=}', b'i^{__CFAllocator=}^{__CVBuffer=}^{opaqueCMFormatDescription=}^{_CMSampleTimingInfo={_CMTime=qiIq}{_CMTime=qiIq}{_CMTime=qiIq}}QI^^{opaqueCMSampleBuffer=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'type_modifier': 'n'}, 6: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMIOObjectRemovePropertyListener': (sel32or64(b'lL^{CMIOObjectPropertyAddress=LLL}^?^v', b'iI^{CMIOObjectPropertyAddress=III}^?^v'), '', {'arguments': {1: {'type_modifier': 'n'}, 2: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': b'I'}, 1: {'type': b'I'}, 2: {'type': b'^{CMIOObjectPropertyAddress=III}'}, 3: {'type': b'^v'}}}}}}), 'CMIOSampleBufferCreateNoDataMarker': (sel32or64(b'l^{__CFAllocator=}L^{opaqueCMFormatDescription=}QL^^{opaqueCMSampleBuffer=}', b'i^{__CFAllocator=}I^{opaqueCMFormatDescription=}QI^^{opaqueCMSampleBuffer=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {5: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMIOObjectGetPropertyDataSize': (sel32or64(b'lL^{CMIOObjectPropertyAddress=LLL}L^v^L', b'iI^{CMIOObjectPropertyAddress=III}I^v^I'), '', {'arguments': {1: {'type_modifier': 'n'}, 3: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}, 4: {'type_modifier': 'o'}}}), 'CMIOStreamClockPostTimingEvent': (sel32or64(b'l{_CMTime=qiIq}QZ@', b'i{_CMTime=qiIq}QZ@'),), 'CMIODeviceStopStream': (sel32or64(b'lLL', b'iII'),), 'CMIODeviceStartStream': (sel32or64(b'lLL', b'iII'),), 'CMIOSampleBufferCreate': (sel32or64(b'l^{__CFAllocator=}^{OpaqueCMBlockBuffer=}^{opaqueCMFormatDescription=}LL^{_CMSampleTimingInfo={_CMTime=qiIq}{_CMTime=qiIq}{_CMTime=qiIq}}L^LQL^^{opaqueCMSampleBuffer=}', b'i^{__CFAllocator=}^{OpaqueCMBlockBuffer=}^{opaqueCMFormatDescription=}II^{_CMSampleTimingInfo={_CMTime=qiIq}{_CMTime=qiIq}{_CMTime=qiIq}}I^QQI^^{opaqueCMSampleBuffer=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {10: {'already_cfretained': True, 'type_modifier': 'o'}, 5: {'c_array_length_in_arg': 4, 'type_modifier': 'n'}, 7: {'c_array_length_in_arg': 6, 'type_modifier': 'n'}}}), 'CMIOStreamCopyBufferQueue': (sel32or64(b'lL^?^v^^{opaqueCMSimpleQueue=}', b'iI^?^v^^{opaqueCMSimpleQueue=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'I'}, 1: {'type': b'^v'}, 2: {'type': b'^v'}}}}, 3: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMIOObjectAddPropertyListener': (sel32or64(b'lL^{CMIOObjectPropertyAddress=LLL}^?^v', b'iI^{CMIOObjectPropertyAddress=III}^?^v'), '', {'arguments': {1: {'type_modifier': 'n'}, 2: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': b'I'}, 1: {'type': b'I'}, 2: {'type': b'^{CMIOObjectPropertyAddress=III}'}, 3: {'type': b'^v'}}}}}}), 'CMIOObjectHasProperty': (sel32or64(b'ZL^{CMIOObjectPropertyAddress=LLL}', b'ZI^{CMIOObjectPropertyAddress=III}'), '', {'arguments': {1: {'type_modifier': 'n'}}}), 'CMIOObjectIsPropertySettable': (sel32or64(b'lL^{CMIOObjectPropertyAddress=LLL}^Z', b'iI^{CMIOObjectPropertyAddress=III}^Z'), '', {'arguments': {1: {'type_modifier': 'n'}, 2: {'type_modifier': 'o'}}}), 'CMIOSampleBufferGetSequenceNumber': (b'Q^{opaqueCMSampleBuffer=}',), 'CMIOStreamClockCreate': (sel32or64(b'l^{__CFAllocator=}^{__CFString=}^v{_CMTime=qiIq}LL^@', b'i^{__CFAllocator=}^{__CFString=}^v{_CMTime=qiIq}II^@'), '', {'retval': {'already_cfretained': True}, 'arguments': {6: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CMIOStreamClockInvalidate': (sel32or64(b'l@', b'i@'),), 'CMIOObjectsPublishedAndDied': (sel32or64(b'l^^{CMIOHardwarePlugInInterface=^v^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?}LL^LL^L', b'i^^{CMIOHardwarePlugInInterface=^v^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?}II^II^I'),), 'CMIOObjectSetPropertyData': (sel32or64(b'lL^{CMIOObjectPropertyAddress=LLL}L^vL^v', b'iI^{CMIOObjectPropertyAddress=III}I^vI^v'),), 'CMIOSampleBufferGetDiscontinuityFlags': (sel32or64(b'L^{opaqueCMSampleBuffer=}', b'I^{opaqueCMSampleBuffer=}'),), 'CMIOStreamDeckPlay': (sel32or64(b'lL', b'iI'),), 'CMIOSampleBufferSetDiscontinuityFlags': (sel32or64(b'v^{__CFAllocator=}^{opaqueCMSampleBuffer=}L', b'v^{__CFAllocator=}^{opaqueCMSampleBuffer=}I'),), 'CMIOStreamDeckStop': (sel32or64(b'lL', b'iI'),), 'CMIOObjectPropertiesChanged': (sel32or64(b'l^^{CMIOHardwarePlugInInterface=^v^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?}LL^{CMIOObjectPropertyAddress=LLL}', b'i^^{CMIOHardwarePlugInInterface=^v^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?}II^{CMIOObjectPropertyAddress=III}'),), 'CMIOSampleBufferSetSequenceNumber': (b'v^{__CFAllocator=}^{opaqueCMSampleBuffer=}Q',), 'CMIOSampleBufferCopySampleAttachments': (sel32or64(b'l^{opaqueCMSampleBuffer=}^{opaqueCMSampleBuffer=}', b'i^{opaqueCMSampleBuffer=}^{opaqueCMSampleBuffer=}'), '', {'retval': {'already_cfretained': True}}), 'CMIOObjectCreate': (sel32or64(b'l^^{CMIOHardwarePlugInInterface=^v^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?}LL^L', b'i^^{CMIOHardwarePlugInInterface=^v^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?^?}II^I'), '', {'retval': {'already_cfretained': True}}), 'CMIOStreamClockConvertHostTimeToDeviceTime': (b'{_CMTime=qiIq}Q@',), 'CMIOObjectRemovePropertyListenerBlock': (sel32or64(b'lL^{CMIOObjectPropertyAddress=LLL}@@?', b'iI^{CMIOObjectPropertyAddress=III}@@?'), '', {'arguments': {1: {'type_modifier': 'n'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}, 1: {'type': 'I'}, 2: {'type': '^{CMIOObjectPropertyAddress=III}', 'type_modifier': 'n', 'c_array_length_in_arg': 1}}}}}}), 'CMIOSampleBufferCopyNonRequiredAttachments': (sel32or64(b'l^{opaqueCMSampleBuffer=}^{opaqueCMSampleBuffer=}I', b'i^{opaqueCMSampleBuffer=}^{opaqueCMSampleBuffer=}I'), '', {'retval': {'already_cfretained': True}})} -aliases = {'kCMIOStreamUnknown': 'kCMIOObjectUnknown', 'kCMIODeviceUnknown': 'kCMIOObjectUnknown'} -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/CoreMediaIO/_metadata.pyc b/env/lib/python2.7/site-packages/CoreMediaIO/_metadata.pyc deleted file mode 100644 index fd665a5a..00000000 Binary files a/env/lib/python2.7/site-packages/CoreMediaIO/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreServices/CarbonCore/__init__.py b/env/lib/python2.7/site-packages/CoreServices/CarbonCore/__init__.py deleted file mode 100644 index ed2c07a5..00000000 --- a/env/lib/python2.7/site-packages/CoreServices/CarbonCore/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -''' -Python mapping for the CoreServices/CarbonCore framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. - -Note that PyObjC only wrappers the non-deprecated parts of the CoreServices -framework. -''' -import sys -import objc - -from CoreServices.CarbonCore import _metadata -from CoreServices._inlines import _inline_list_ - -sys.modules['CoreServices.CarbonCore'] = mod = objc.ObjCLazyModule('CoreServices.CarbonCore', - "com.apple.CarbonCore", - objc.pathForFramework("/System/Library/Frameworks/CoreServices.framework"), - _metadata.__dict__, _inline_list_, { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'objc': objc, - }, ()) - -import sys -del sys.modules['CoreServices.CarbonCore._metadata'] diff --git a/env/lib/python2.7/site-packages/CoreServices/CarbonCore/__init__.pyc b/env/lib/python2.7/site-packages/CoreServices/CarbonCore/__init__.pyc deleted file mode 100644 index e91ee64f..00000000 Binary files a/env/lib/python2.7/site-packages/CoreServices/CarbonCore/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreServices/CarbonCore/_metadata.py b/env/lib/python2.7/site-packages/CoreServices/CarbonCore/_metadata.py deleted file mode 100644 index ac8644ba..00000000 --- a/env/lib/python2.7/site-packages/CoreServices/CarbonCore/_metadata.py +++ /dev/null @@ -1,27 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Sat Aug 18 17:26:59 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -misc.update({'ScriptCodeRun': objc.createStructType('ScriptCodeRun', sel32or64(b'{ScriptCodeRun=Ls}', b'{ScriptCodeRun=Qs}'), ['offset', 'script']), 'NumFormatStringRec': objc.createStructType('NumFormatStringRec', b'{NumFormatString=CC[254c]}', ['fLength', 'fVersion', 'data']), 'UnicodeMapping': objc.createStructType('UnicodeMapping', sel32or64(b'{UnicodeMapping=LLl}', b'{UnicodeMapping=IIi}'), ['unicodeEncoding', 'otherEncoding', 'mappingVersion']), 'TECConversionInfo': objc.createStructType('TECConversionInfo', sel32or64(b'{TECConversionInfo=LLSS}', b'{TECConversionInfo=IISS}'), ['sourceEncoding', 'destinationEncoding', 'reserved1', 'reserved2']), 'LocaleAndVariant': objc.createStructType('LocaleAndVariant', sel32or64(b'{LocaleAndVariant=^{OpaqueLocaleRef=}L}', b'{LocaleAndVariant=^{OpaqueLocaleRef=}I}'), ['locale', 'opVariant']), 'TextEncodingRun': objc.createStructType('TextEncodingRun', sel32or64(b'{TextEncodingRun=LL}', b'{TextEncodingRun=QI}'), ['offset', 'textEncoding']), 'TECInfo': objc.createStructType('TECInfo', sel32or64(b'{TECInfo=SSLLL[32C][32C]SS}', b'{TECInfo=SSIII[32C][32C]SS}'), ['format', 'tecVersion', 'tecTextConverterFeatures', 'tecUnicodeConverterFeatures', 'tecTextCommonFeatures', 'tecTextEncodingsFolderName', 'tecExtensionFileName', 'tecLowestTEFileVersion', 'tecHighestTEFileVersion']), 'Nanoseconds': objc.createStructType('Nanoseconds', sel32or64(b'{UnsignedWide=LL}', b'{UnsignedWide=II}'), ['lo', 'hi'])}) -constants = '''$$''' -enums = '''$_MixedModeMagic@43774$kBig5_BasicVariant@0$kBig5_DOSVariant@3$kBig5_ETenVariant@2$kBig5_StandardVariant@1$kCCRegisterCBit@16$kCCRegisterNBit@19$kCCRegisterVBit@17$kCCRegisterXBit@20$kCCRegisterZBit@18$kCFM68kRTA@16$kCSDiskSpaceRecoveryOptionNoUI@1$kCStackBased@1$kCallingConventionMask@15$kCallingConventionPhase@0$kCallingConventionWidth@4$kCurrentMixedModeStateRecord@1$kD0DispatchedCStackBased@9$kD0DispatchedPascalStackBased@8$kD1DispatchedPascalStackBased@12$kDOSJapanesePalmVariant@1$kDOSJapaneseStandardVariant@0$kDispatchedParameterPhase@8$kDispatchedSelectorSizePhase@6$kDispatchedSelectorSizeWidth@2$kDontPassSelector@8$kEUC_CN_BasicVariant@0$kEUC_CN_DOSVariant@1$kEUC_KR_BasicVariant@0$kEUC_KR_DOSVariant@1$kFSAllowConcurrentAsyncIOBit@3$kFourByteCode@3$kFragmentIsPrepared@0$kFragmentNeedsPreparing@2$kHebrewFigureSpaceVariant@1$kHebrewStandardVariant@0$kISOLatin1MusicCDVariant@1$kISOLatin1StandardVariant@0$kISOLatinArabicExplicitOrderVariant@2$kISOLatinArabicImplicitOrderVariant@0$kISOLatinArabicVisualOrderVariant@1$kISOLatinHebrewExplicitOrderVariant@2$kISOLatinHebrewImplicitOrderVariant@0$kISOLatinHebrewVisualOrderVariant@1$kJapaneseBasicVariant@2$kJapanesePostScriptPrintVariant@4$kJapanesePostScriptScrnVariant@3$kJapaneseStandardVariant@0$kJapaneseStdNoVerticalsVariant@1$kJapaneseVertAtKuPlusTenVariant@5$kLoadCFrag@1$kLocaleAllPartsMask@63$kLocaleAndVariantNameMask@3$kLocaleLanguageMask@1$kLocaleLanguageVariantMask@2$kLocaleNameMask@1$kLocaleOperationVariantNameMask@2$kLocaleRegionMask@16$kLocaleRegionVariantMask@32$kLocaleScriptMask@4$kLocaleScriptVariantMask@8$kM68kISA@0$kMacArabicAlBayanVariant@3$kMacArabicStandardVariant@0$kMacArabicThuluthVariant@2$kMacArabicTrueTypeVariant@1$kMacCroatianCurrencySignVariant@1$kMacCroatianDefaultVariant@0$kMacCroatianEuroSignVariant@2$kMacCyrillicCurrSignStdVariant@1$kMacCyrillicCurrSignUkrVariant@2$kMacCyrillicDefaultVariant@0$kMacCyrillicEuroSignVariant@3$kMacFarsiStandardVariant@0$kMacFarsiTrueTypeVariant@1$kMacGreekDefaultVariant@0$kMacGreekEuroSignVariant@2$kMacGreekNoEuroSignVariant@1$kMacHebrewFigureSpaceVariant@1$kMacHebrewStandardVariant@0$kMacIcelandicStandardVariant@0$kMacIcelandicStdCurrSignVariant@2$kMacIcelandicStdDefaultVariant@0$kMacIcelandicStdEuroSignVariant@4$kMacIcelandicTTCurrSignVariant@3$kMacIcelandicTTDefaultVariant@1$kMacIcelandicTTEuroSignVariant@5$kMacIcelandicTrueTypeVariant@1$kMacJapaneseBasicVariant@2$kMacJapanesePostScriptPrintVariant@4$kMacJapanesePostScriptScrnVariant@3$kMacJapaneseStandardVariant@0$kMacJapaneseStdNoVerticalsVariant@1$kMacJapaneseVertAtKuPlusTenVariant@5$kMacRomanCurrencySignVariant@1$kMacRomanDefaultVariant@0$kMacRomanEuroSignVariant@2$kMacRomanLatin1CroatianVariant@8$kMacRomanLatin1DefaultVariant@0$kMacRomanLatin1IcelandicVariant@11$kMacRomanLatin1RomanianVariant@14$kMacRomanLatin1StandardVariant@2$kMacRomanLatin1TurkishVariant@6$kMacRomanStandardVariant@0$kMacRomanianCurrencySignVariant@1$kMacRomanianDefaultVariant@0$kMacRomanianEuroSignVariant@2$kMacVT100CurrencySignVariant@1$kMacVT100DefaultVariant@0$kMacVT100EuroSignVariant@2$kNoByteCode@0$kOld68kRTA@0$kOneByteCode@1$kPascalStackBased@0$kPassSelector@0$kPowerPCISA@1$kPowerPCRTA@0$kProcDescriptorIsAbsolute@0$kProcDescriptorIsIndex@32$kProcDescriptorIsProcPtr@0$kProcDescriptorIsRelative@1$kRegisterA0@4$kRegisterA1@5$kRegisterA2@6$kRegisterA3@7$kRegisterA4@12$kRegisterA5@13$kRegisterA6@14$kRegisterBased@2$kRegisterD0@0$kRegisterD1@1$kRegisterD2@2$kRegisterD3@3$kRegisterD4@8$kRegisterD5@9$kRegisterD6@10$kRegisterD7@11$kRegisterParameterMask@2147481600$kRegisterParameterPhase@11$kRegisterParameterSizePhase@0$kRegisterParameterSizeWidth@2$kRegisterParameterWhichPhase@2$kRegisterParameterWhichWidth@3$kRegisterParameterWidth@5$kRegisterResultLocationPhase@6$kRegisterResultLocationWidth@5$kResultSizeMask@48$kResultSizePhase@4$kResultSizeWidth@2$kRoutineDescriptorVersion@7$kRoutineIsDispatchedDefaultRoutine@16$kRoutineIsNotDispatchedDefaultRoutine@0$kSelectorsAreIndexable@1$kSelectorsAreNotIndexable@0$kShiftJIS_BasicVariant@0$kShiftJIS_DOSVariant@1$kShiftJIS_MusicCDVariant@2$kSpecialCase@15$kSpecialCaseCaretHook@0$kSpecialCaseDrawHook@4$kSpecialCaseEOLHook@1$kSpecialCaseGNEFilterProc@11$kSpecialCaseHighHook@0$kSpecialCaseHitTestHook@5$kSpecialCaseMBarHook@12$kSpecialCaseNWidthHook@3$kSpecialCaseProtocolHandler@7$kSpecialCaseSelectorMask@1008$kSpecialCaseSelectorPhase@4$kSpecialCaseSelectorWidth@6$kSpecialCaseSocketListener@8$kSpecialCaseTEDoText@10$kSpecialCaseTEFindWord@6$kSpecialCaseTERecalc@9$kSpecialCaseTextWidthHook@2$kSpecialCaseWidthHook@2$kStackDispatchedPascalStackBased@14$kStackParameterMask@-64$kStackParameterPhase@6$kStackParameterWidth@2$kTECAddFallbackInterruptBit@7$kTECAddFallbackInterruptMask@128$kTECAddForceASCIIChangesBit@4$kTECAddForceASCIIChangesMask@16$kTECAddTextRunHeuristicsBit@6$kTECAddTextRunHeuristicsMask@64$kTECChinesePluginSignature@1887070319$kTECDisableFallbacksBit@16$kTECDisableFallbacksMask@65536$kTECDisableLooseMappingsBit@17$kTECDisableLooseMappingsMask@131072$kTECFallbackTextLengthFixBit@1$kTECFallbackTextLengthFixMask@2$kTECInfoCurrentFormat@2$kTECInternetNameDefaultUsageMask@0$kTECInternetNameStrictUsageMask@1$kTECInternetNameTolerantUsageMask@2$kTECJapanesePluginSignature@1886023790$kTECKeepInfoFixBit@0$kTECKeepInfoFixMask@1$kTECKoreanPluginSignature@1886089074$kTECPreferredEncodingFixBit@5$kTECPreferredEncodingFixMask@32$kTECSignature@1701733238$kTECTextRunBitClearFixBit@2$kTECTextRunBitClearFixMask@4$kTECTextToUnicodeScanFixBit@3$kTECTextToUnicodeScanFixMask@8$kTECUnicodePluginSignature@1886744169$kTEC_MIBEnumDontCare@-1$kTextCenter@1$kTextEncodingANSEL@1537$kTextEncodingBaseName@1$kTextEncodingBig5@2563$kTextEncodingBig5_E@2569$kTextEncodingBig5_HKSCS_1999@2566$kTextEncodingCNS_11643_92_P1@1617$kTextEncodingCNS_11643_92_P2@1618$kTextEncodingCNS_11643_92_P3@1619$kTextEncodingDOSArabic@1049$kTextEncodingDOSBalticRim@1030$kTextEncodingDOSCanadianFrench@1048$kTextEncodingDOSChineseSimplif@1057$kTextEncodingDOSChineseTrad@1059$kTextEncodingDOSCyrillic@1043$kTextEncodingDOSGreek@1029$kTextEncodingDOSGreek1@1041$kTextEncodingDOSGreek2@1052$kTextEncodingDOSHebrew@1047$kTextEncodingDOSIcelandic@1046$kTextEncodingDOSJapanese@1056$kTextEncodingDOSKorean@1058$kTextEncodingDOSLatin1@1040$kTextEncodingDOSLatin2@1042$kTextEncodingDOSLatinUS@1024$kTextEncodingDOSNordic@1050$kTextEncodingDOSPortuguese@1045$kTextEncodingDOSRussian@1051$kTextEncodingDOSThai@1053$kTextEncodingDOSTurkish@1044$kTextEncodingDefaultFormat@0$kTextEncodingDefaultVariant@0$kTextEncodingEBCDIC_CP037@3074$kTextEncodingEBCDIC_LatinCore@3073$kTextEncodingEBCDIC_US@3073$kTextEncodingEUC_CN@2352$kTextEncodingEUC_JP@2336$kTextEncodingEUC_KR@2368$kTextEncodingEUC_TW@2353$kTextEncodingFormatName@3$kTextEncodingFullName@0$kTextEncodingGBK_95@1585$kTextEncodingGB_18030_2000@1586$kTextEncodingGB_18030_2005@1586$kTextEncodingGB_2312_80@1584$kTextEncodingHZ_GB_2312@2565$kTextEncodingISO10646_1993@257$kTextEncodingISOLatin1@513$kTextEncodingISOLatin10@528$kTextEncodingISOLatin2@514$kTextEncodingISOLatin3@515$kTextEncodingISOLatin4@516$kTextEncodingISOLatin5@521$kTextEncodingISOLatin6@522$kTextEncodingISOLatin7@525$kTextEncodingISOLatin8@526$kTextEncodingISOLatin9@527$kTextEncodingISOLatinArabic@518$kTextEncodingISOLatinCyrillic@517$kTextEncodingISOLatinGreek@519$kTextEncodingISOLatinHebrew@520$kTextEncodingISO_2022_CN@2096$kTextEncodingISO_2022_CN_EXT@2097$kTextEncodingISO_2022_JP@2080$kTextEncodingISO_2022_JP_1@2082$kTextEncodingISO_2022_JP_2@2081$kTextEncodingISO_2022_JP_3@2083$kTextEncodingISO_2022_KR@2112$kTextEncodingJIS_C6226_78@1572$kTextEncodingJIS_X0201_76@1568$kTextEncodingJIS_X0208_83@1569$kTextEncodingJIS_X0208_90@1570$kTextEncodingJIS_X0212_90@1571$kTextEncodingJIS_X0213_MenKuTen@1577$kTextEncodingKOI8_R@2562$kTextEncodingKOI8_U@2568$kTextEncodingKSC_5601_87@1600$kTextEncodingKSC_5601_92_Johab@1601$kTextEncodingMacArabic@4$kTextEncodingMacArmenian@24$kTextEncodingMacBengali@13$kTextEncodingMacBurmese@19$kTextEncodingMacCeltic@39$kTextEncodingMacCentralEurRoman@29$kTextEncodingMacChineseSimp@25$kTextEncodingMacChineseTrad@2$kTextEncodingMacCroatian@36$kTextEncodingMacCyrillic@7$kTextEncodingMacDevanagari@9$kTextEncodingMacDingbats@34$kTextEncodingMacEastEurRoman@29$kTextEncodingMacEthiopic@28$kTextEncodingMacExtArabic@31$kTextEncodingMacFarsi@140$kTextEncodingMacGaelic@40$kTextEncodingMacGeez@28$kTextEncodingMacGeorgian@23$kTextEncodingMacGreek@6$kTextEncodingMacGujarati@11$kTextEncodingMacGurmukhi@10$kTextEncodingMacHFS@255$kTextEncodingMacHebrew@5$kTextEncodingMacIcelandic@37$kTextEncodingMacInuit@236$kTextEncodingMacJapanese@1$kTextEncodingMacKannada@16$kTextEncodingMacKeyboardGlyphs@41$kTextEncodingMacKhmer@20$kTextEncodingMacKorean@3$kTextEncodingMacLaotian@22$kTextEncodingMacMalayalam@17$kTextEncodingMacMongolian@27$kTextEncodingMacOriya@12$kTextEncodingMacRSymbol@8$kTextEncodingMacRoman@0$kTextEncodingMacRomanLatin1@2564$kTextEncodingMacRomanian@38$kTextEncodingMacSimpChinese@25$kTextEncodingMacSinhalese@18$kTextEncodingMacSymbol@33$kTextEncodingMacTamil@14$kTextEncodingMacTelugu@15$kTextEncodingMacThai@21$kTextEncodingMacTibetan@26$kTextEncodingMacTradChinese@2$kTextEncodingMacTurkish@35$kTextEncodingMacUkrainian@152$kTextEncodingMacUnicode@126$kTextEncodingMacUninterp@32$kTextEncodingMacVT100@252$kTextEncodingMacVietnamese@30$kTextEncodingMultiRun@4095$kTextEncodingNextStepJapanese@2818$kTextEncodingNextStepLatin@2817$kTextEncodingShiftJIS@2561$kTextEncodingShiftJIS_X0213@1576$kTextEncodingShiftJIS_X0213_00@1576$kTextEncodingUS_ASCII@1536$kTextEncodingUnicodeDefault@256$kTextEncodingUnicodeV10_0@276$kTextEncodingUnicodeV11_0@277$kTextEncodingUnicodeV1_1@257$kTextEncodingUnicodeV2_0@259$kTextEncodingUnicodeV2_1@259$kTextEncodingUnicodeV3_0@260$kTextEncodingUnicodeV3_1@261$kTextEncodingUnicodeV3_2@262$kTextEncodingUnicodeV4_0@264$kTextEncodingUnicodeV5_0@266$kTextEncodingUnicodeV5_1@267$kTextEncodingUnicodeV6_0@269$kTextEncodingUnicodeV6_1@270$kTextEncodingUnicodeV6_3@272$kTextEncodingUnicodeV7_0@273$kTextEncodingUnicodeV8_0@274$kTextEncodingUnicodeV9_0@275$kTextEncodingUnknown@65535$kTextEncodingVISCII@2567$kTextEncodingVariantName@2$kTextEncodingWindowsANSI@1280$kTextEncodingWindowsArabic@1286$kTextEncodingWindowsBalticRim@1287$kTextEncodingWindowsCyrillic@1282$kTextEncodingWindowsGreek@1283$kTextEncodingWindowsHebrew@1285$kTextEncodingWindowsKoreanJohab@1296$kTextEncodingWindowsLatin1@1280$kTextEncodingWindowsLatin2@1281$kTextEncodingWindowsLatin5@1284$kTextEncodingWindowsVietnamese@1288$kTextFlushDefault@0$kTextFlushLeft@-2$kTextFlushRight@-1$kTextLanguageDontCare@-128$kTextRegionDontCare@-128$kTextScriptDontCare@-128$kThinkCStackBased@5$kTwoByteCode@2$kUCBidiCatArabicNumber@6$kUCBidiCatBlockSeparator@8$kUCBidiCatBoundaryNeutral@19$kUCBidiCatCommonNumberSeparator@7$kUCBidiCatEuroNumber@3$kUCBidiCatEuroNumberSeparator@4$kUCBidiCatEuroNumberTerminator@5$kUCBidiCatFirstStrongIsolate@22$kUCBidiCatLeftRight@1$kUCBidiCatLeftRightEmbedding@13$kUCBidiCatLeftRightIsolate@20$kUCBidiCatLeftRightOverride@15$kUCBidiCatNonSpacingMark@18$kUCBidiCatNotApplicable@0$kUCBidiCatOtherNeutral@11$kUCBidiCatPopDirectionalFormat@17$kUCBidiCatPopDirectionalIsolate@23$kUCBidiCatRightLeft@2$kUCBidiCatRightLeftArabic@12$kUCBidiCatRightLeftEmbedding@14$kUCBidiCatRightLeftIsolate@21$kUCBidiCatRightLeftOverride@16$kUCBidiCatSegmentSeparator@9$kUCBidiCatWhitespace@10$kUCCharPropTypeBidiCategory@3$kUCCharPropTypeCombiningClass@2$kUCCharPropTypeDecimalDigitValue@4$kUCCharPropTypeGenlCategory@1$kUCGenlCatLetterLowercase@15$kUCGenlCatLetterModifier@17$kUCGenlCatLetterOther@18$kUCGenlCatLetterTitlecase@16$kUCGenlCatLetterUppercase@14$kUCGenlCatMarkEnclosing@7$kUCGenlCatMarkNonSpacing@5$kUCGenlCatMarkSpacingCombining@6$kUCGenlCatNumberDecimalDigit@8$kUCGenlCatNumberLetter@9$kUCGenlCatNumberOther@10$kUCGenlCatOtherControl@1$kUCGenlCatOtherFormat@2$kUCGenlCatOtherNotAssigned@0$kUCGenlCatOtherPrivateUse@4$kUCGenlCatOtherSurrogate@3$kUCGenlCatPunctClose@23$kUCGenlCatPunctConnector@20$kUCGenlCatPunctDash@21$kUCGenlCatPunctFinalQuote@25$kUCGenlCatPunctInitialQuote@24$kUCGenlCatPunctOpen@22$kUCGenlCatPunctOther@26$kUCGenlCatSeparatorLine@12$kUCGenlCatSeparatorParagraph@13$kUCGenlCatSeparatorSpace@11$kUCGenlCatSymbolCurrency@29$kUCGenlCatSymbolMath@28$kUCGenlCatSymbolModifier@30$kUCGenlCatSymbolOther@31$kUCHighSurrogateRangeEnd@56319$kUCHighSurrogateRangeStart@55296$kUCLowSurrogateRangeEnd@57343$kUCLowSurrogateRangeStart@56320$kUnicode16BitFormat@0$kUnicode32BitFormat@3$kUnicodeByteOrderMark@65279$kUnicodeCanonicalCompVariant@3$kUnicodeCanonicalDecompVariant@2$kUnicodeFallbackInterruptSafeMask@4$kUnicodeFallbackSequencingMask@3$kUnicodeHFSPlusCompVariant@9$kUnicodeHFSPlusDecompVariant@8$kUnicodeMaxDecomposedVariant@2$kUnicodeNoCompatibilityVariant@1$kUnicodeNoComposedVariant@3$kUnicodeNoCorporateVariant@4$kUnicodeNoSubset@0$kUnicodeNormalizationFormC@3$kUnicodeNormalizationFormD@5$kUnicodeNotAChar@65535$kUnicodeObjectReplacement@65532$kUnicodeReplacementChar@65533$kUnicodeSCSUFormat@8$kUnicodeSwappedByteOrderMark@65534$kUnicodeUTF16BEFormat@4$kUnicodeUTF16Format@0$kUnicodeUTF16LEFormat@5$kUnicodeUTF32BEFormat@6$kUnicodeUTF32Format@3$kUnicodeUTF32LEFormat@7$kUnicodeUTF7Format@1$kUnicodeUTF8Format@2$kUseCurrentISA@0$kUseNativeISA@4$kWildcardCFragVersion@-1$kWindowsLatin1PalmVariant@1$kWindowsLatin1StandardVariant@0$kX86ISA@2$kX86RTA@32$''' -misc.update({'kTECMacOSXDispatchTableNameString': b'ConverterPluginGetPluginDispatchTable'}) -functions={'TECCountDirectTextEncodingConversions': (sel32or64(b'l^L', b'i^Q'), '', {'arguments': {0: {'type_modifier': 'o'}}}), 'TECConvertText': (sel32or64(b'l^{OpaqueTECObjectRef=}^CL^L^CL^L', b'i^{OpaqueTECObjectRef=}^CQ^Q^CQ^Q'), '', {'arguments': {1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}, 3: {'type_modifier': 'o'}, 4: {'c_array_length_in_arg': (5, 6), 'type_modifier': 'o'}, 6: {'type_modifier': 'o'}}}), 'UCIsSurrogateLowCharacter': (b'ZT',), 'TECGetMailTextEncodings': (sel32or64(b'ls^LL^L', b'is^IQ^Q'), '', {'arguments': {1: {'c_array_length_in_arg': (2, 3), 'type_modifier': 'o'}, 3: {'type_modifier': 'o'}}}), 'UCGetUnicodeScalarValueForSurrogatePair': (sel32or64(b'LTT', b'ITT'),), 'LocaleRefFromLocaleString': (sel32or64(b'l^t^^{OpaqueLocaleRef=}', b'i^t^^{OpaqueLocaleRef=}'), '', {'arguments': {0: {'c_array_delimited_by_null': True, 'type_modifier': 'n'}, 1: {'type_modifier': 'o'}}}), 'GetTextEncodingBase': (sel32or64(b'LL', b'II'),), 'TECCreateConverterFromPath': (sel32or64(b'l^^{OpaqueTECObjectRef=}^LL', b'i^^{OpaqueTECObjectRef=}^IQ'), '', {'retval': {'already_cfretained': True}, 'arguments': {0: {'type_modifier': 'o'}, 1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}}}), 'TECFlushText': (sel32or64(b'l^{OpaqueTECObjectRef=}^CL^L', b'i^{OpaqueTECObjectRef=}^CQ^Q'), '', {'arguments': {1: {'c_array_length_in_arg': (2, 3), 'type_modifier': 'o'}, 3: {'type_modifier': 'o'}}}), 'UpgradeScriptInfoToTextEncoding': (sel32or64(b'lsss^C^L', b'isss^C^I'), '', {'arguments': {3: {'type_modifier': 'n'}, 4: {'type_modifier': 'o'}}}), 'TECCountSubTextEncodings': (sel32or64(b'lL^L', b'iI^Q'), '', {'arguments': {1: {'type_modifier': 'o'}}}), 'TECGetDirectTextEncodingConversions': (sel32or64(b'l^{TECConversionInfo=LLSS}L^L', b'i^{TECConversionInfo=IISS}Q^Q'), '', {'arguments': {0: {'c_array_length_in_arg': (1, 2), 'type_modifier': 'o'}, 2: {'type_modifier': 'o'}}}), 'GetTextEncodingName': (sel32or64(b'lLLsLL^L^s^L^t', b'iIIsIQ^Q^s^I^t'), '', {'arguments': {8: {'c_array_length_in_arg': (4, 5), 'type_modifier': 'o'}, 5: {'type_modifier': 'o'}, 6: {'type_modifier': 'o'}, 7: {'type_modifier': 'o'}}}), 'TECCreateConverter': (sel32or64(b'l^^{OpaqueTECObjectRef=}LL', b'i^^{OpaqueTECObjectRef=}II'), '', {'retval': {'already_cfretained': True}, 'arguments': {0: {'type_modifier': 'o'}}}), 'TECCountMailTextEncodings': (sel32or64(b'ls^L', b'is^Q'), '', {'arguments': {1: {'type_modifier': 'o'}}}), 'TECGetDestinationTextEncodings': (sel32or64(b'lL^LL^L', b'iI^IQ^Q'), '', {'arguments': {1: {'c_array_length_in_arg': (2, 3), 'type_modifier': 'o'}, 3: {'type_modifier': 'o'}}}), 'LocaleOperationGetName': (sel32or64(b'lL^{OpaqueLocaleRef=}L^L^t', b'iI^{OpaqueLocaleRef=}Q^Q^t'), '', {'arguments': {3: {'type_modifier': 'o'}, 4: {'c_array_length_in_arg': (2, 3), 'type_modifier': 'o'}}}), 'LocaleOperationCountNames': (sel32or64(b'lL^L', b'iI^Q'),), 'TECDisposeConverter': (sel32or64(b'l^{OpaqueTECObjectRef=}', b'i^{OpaqueTECObjectRef=}'),), 'TECGetTextEncodingFromInternetName': (sel32or64(b'l^L^C', b'i^I^C'), '', {'arguments': {0: {'type_modifier': 'o'}, 1: {'type_modifier': 'n'}}}), 'TECFlushMultipleEncodings': (sel32or64(b'l^{OpaqueTECObjectRef=}^CL^L^{TextEncodingRun=LL}L^L', b'i^{OpaqueTECObjectRef=}^CQ^Q^{TextEncodingRun=QI}Q^Q'), '', {'arguments': {1: {'c_array_length_in_arg': (2, 3), 'type_modifier': 'o'}, 3: {'type_modifier': 'o'}, 4: {'c_array_length_in_arg': (5, 6), 'type_modifier': 'o'}, 6: {'type_modifier': 'o'}}}), 'CSBackupIsItemExcluded': (b'Z^{__CFURL=}^Z', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'CSDiskSpaceStartRecovery': (b'v^{__CFURL=}Qi^^{__CFUUID=}@@?', '', {'arguments': {3: {'type_modifier': 'o'}, 5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}, 1: {'type': 'Z'}, 2: {'type': 'Q'}, 3: {'type': '@'}}}}}}), 'GetTextEncodingFormat': (sel32or64(b'LL', b'II'),), 'TECCountAvailableTextEncodings': (sel32or64(b'l^L', b'i^Q'), '', {'arguments': {0: {'type_modifier': 'o'}}}), 'TECGetWebTextEncodings': (sel32or64(b'ls^LL^L', b'is^IQ^Q'), '', {'arguments': {1: {'c_array_length_in_arg': (2, 3), 'type_modifier': 'o'}, 3: {'type_modifier': 'o'}}}), 'TECCreateOneToManyConverter': (sel32or64(b'l^^{OpaqueTECObjectRef=}LL^L', b'i^^{OpaqueTECObjectRef=}IQ^I'), '', {'retval': {'already_cfretained': True}, 'arguments': {0: {'type_modifier': 'o'}, 3: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}}}), 'TECGetInfo': (sel32or64(b'l^^^{TECInfo=SSLLL[32C][32C]SS}', b'i^^^{TECInfo=SSIII[32C][32C]SS}'), '', {'arguments': {0: {'type_modifier': 'o'}}}), 'RevertTextEncodingToScriptInfo': (sel32or64(b'lL^s^s[256C]', b'iI^s^s[256C]'), '', {'arguments': {1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}, 3: {'type_modifier': 'o'}}}), 'TECCountAvailableSniffers': (sel32or64(b'l^L', b'i^Q'), '', {'arguments': {0: {'type_modifier': 'o'}}}), 'TECConvertTextToMultipleEncodings': (sel32or64(b'l^{OpaqueTECObjectRef=}^CL^L^CL^L^{TextEncodingRun=LL}L^L', b'i^{OpaqueTECObjectRef=}^CQ^Q^CQ^Q^{TextEncodingRun=QI}Q^Q'), '', {'arguments': {1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}, 3: {'type_modifier': 'o'}, 4: {'c_array_length_in_arg': (5, 6), 'type_modifier': 'o'}, 6: {'type_modifier': 'o'}, 7: {'c_array_length_in_arg': (8, 9), 'type_modifier': 'o'}, 9: {'type_modifier': 'o'}}}), 'UCIsSurrogateHighCharacter': (b'ZT',), 'TECCopyTextEncodingInternetNameAndMIB': (sel32or64(b'lLL^^{__CFString=}^l', b'iII^^{__CFString=}^i'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o'}, 3: {'type_modifier': 'o'}}}), 'LocaleStringToLangAndRegionCodes': (sel32or64(b'l^t^s^s', b'i^t^s^s'), '', {'arguments': {0: {'c_array_delimited_by_null': True, 'type_modifier': 'n'}, 1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}}}), 'TECCreateSniffer': (sel32or64(b'l^^{OpaqueTECSnifferObjectRef=}^LL', b'i^^{OpaqueTECSnifferObjectRef=}^IQ'), '', {'retval': {'already_cfretained': True}, 'arguments': {0: {'type_modifier': 'o'}, 1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}}}), 'TECClearConverterContextInfo': (sel32or64(b'l^{OpaqueTECObjectRef=}', b'i^{OpaqueTECObjectRef=}'),), 'TECCountWebTextEncodings': (sel32or64(b'ls^L', b'is^Q'), '', {'arguments': {1: {'type_modifier': 'o'}}}), 'LocaleRefGetPartString': (sel32or64(b'l^{OpaqueLocaleRef=}LL^c', b'i^{OpaqueLocaleRef=}IQ^c'), '', {'arguments': {3: {'c_array_delimited_by_null': True, 'type_modifier': 'o'}}}), 'TECCountDestinationTextEncodings': (sel32or64(b'lL^L', b'iI^Q'), '', {'arguments': {1: {'type_modifier': 'o'}}}), 'TECSetBasicOptions': (sel32or64(b'l^{OpaqueTECObjectRef=}L', b'i^{OpaqueTECObjectRef=}I'),), 'CSBackupSetItemExcluded': (sel32or64(b'l^{__CFURL=}ZZ', b'i^{__CFURL=}ZZ'),), 'TECDisposeSniffer': (sel32or64(b'l^{OpaqueTECSnifferObjectRef=}', b'i^{OpaqueTECSnifferObjectRef=}'),), 'TECClearSnifferContextInfo': (sel32or64(b'l^{OpaqueTECSnifferObjectRef=}', b'i^{OpaqueTECSnifferObjectRef=}'),), 'UCGetCharProperty': (sel32or64(b'l^TLl^L', b'i^TQi^I'), '', {'arguments': {0: {'c_array_length_in_arg': 1, 'type_modifier': 'n'}, 3: {'type_modifier': 'o'}}}), 'CSDiskSpaceGetRecoveryEstimate': (b'Q^{__CFURL=}',), 'NearestMacTextEncodings': (sel32or64(b'lL^L^L', b'iI^I^I'), '', {'arguments': {1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}}}), 'CSDiskSpaceCancelRecovery': (b'v^{__CFUUID=}',), 'GetTextEncodingFromScriptInfo': (sel32or64(b'lsss^L', b'isss^I'), '', {'arguments': {3: {'type_modifier': 'o'}}}), 'TECGetTextEncodingFromInternetNameOrMIB': (sel32or64(b'l^LL^{__CFString=}l', b'i^II^{__CFString=}i'), '', {'arguments': {0: {'type_modifier': 'o'}, 2: {'type_modifier': 'n'}}}), 'TECSniffTextEncoding': (sel32or64(b'l^{OpaqueTECSnifferObjectRef=}^CL^LL^LL^LL', b'i^{OpaqueTECSnifferObjectRef=}^CQ^IQ^QQ^QQ'), '', {'arguments': {1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}, 3: {'c_array_length_in_arg': 4, 'type_modifier': 'o'}, 5: {'c_array_length_in_arg': 6, 'type_modifier': 'o'}, 7: {'c_array_length_in_arg': 8, 'type_modifier': 'o'}}}), 'TECGetSubTextEncodings': (sel32or64(b'lL^LL^L', b'iI^IQ^Q'), '', {'arguments': {1: {'c_array_length_in_arg': (2, 3), 'type_modifier': 'o'}, 3: {'type_modifier': 'o'}}}), 'TECGetTextEncodingInternetName': (sel32or64(b'lL[256C]', b'iI[256C]'), '', {'arguments': {1: {'type_modifier': 'o'}}}), 'ResolveDefaultTextEncoding': (sel32or64(b'LL', b'II'),), 'TECGetAvailableSniffers': (sel32or64(b'l^LL^L', b'i^IQ^Q'), '', {'arguments': {0: {'c_array_length_in_arg': (1, 2), 'type_modifier': 'o'}, 2: {'type_modifier': 'o'}}}), 'GetTextEncodingVariant': (sel32or64(b'LL', b'II'),), 'CreateTextEncoding': (sel32or64(b'LLLL', b'IIII'), '', {'retval': {'already_cfretained': True}}), 'LocaleRefFromLangOrRegionCode': (sel32or64(b'lss^^{OpaqueLocaleRef=}', b'iss^^{OpaqueLocaleRef=}'), '', {'arguments': {2: {'type_modifier': 'o'}}}), 'TECGetAvailableTextEncodings': (sel32or64(b'l^LL^L', b'i^IQ^Q'), '', {'arguments': {0: {'c_array_length_in_arg': (1, 2), 'type_modifier': 'o'}, 2: {'type_modifier': 'o'}}}), 'LocaleOperationGetIndName': (sel32or64(b'lLLL^L^t^^{OpaqueLocaleRef=}', b'iIQQ^Q^t^^{OpaqueLocaleRef=}'), '', {'arguments': {3: {'type_modifier': 'o'}, 4: {'c_array_length_in_arg': (2, 3), 'type_modifier': 'o'}, 5: {'type_modifier': 'o'}}}), 'GetScriptInfoFromTextEncoding': (sel32or64(b'lL^s^s', b'iI^s^s'), '', {'arguments': {1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}}})} -cftypes=[('FSFileOperationRef', b'^{__FSFileOperation=}', None, None), ('FSFileSecurityRef', b'^{__FSFileSecurity=}', None, None)] -misc.update({'TECSnifferObjectRef': objc.createOpaquePointerType('TECSnifferObjectRef', b'^{OpaqueTECSnifferObjectRef=}'), 'TECObjectRef': objc.createOpaquePointerType('TECObjectRef', b'^{OpaqueTECObjectRef=}')}) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/CoreServices/CarbonCore/_metadata.pyc b/env/lib/python2.7/site-packages/CoreServices/CarbonCore/_metadata.pyc deleted file mode 100644 index 5987a08e..00000000 Binary files a/env/lib/python2.7/site-packages/CoreServices/CarbonCore/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreServices/DictionaryServices/__init__.py b/env/lib/python2.7/site-packages/CoreServices/DictionaryServices/__init__.py deleted file mode 100644 index de62ec6a..00000000 --- a/env/lib/python2.7/site-packages/CoreServices/DictionaryServices/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -''' -Python mapping for the DictionaryServices framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import sys -import objc -import Foundation - -from CoreServices.DictionaryServices import _metadata - -sys.modules['CoreServices.DictionaryServices'] = mod = objc.ObjCLazyModule('DictionaryServices', - "com.apple.CoreServices", - objc.pathForFramework("/System/Library/Frameworks/CoreServices.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'objc': objc, - }, ( Foundation,)) - -import sys -del sys.modules['CoreServices.DictionaryServices._metadata'] diff --git a/env/lib/python2.7/site-packages/CoreServices/DictionaryServices/__init__.pyc b/env/lib/python2.7/site-packages/CoreServices/DictionaryServices/__init__.pyc deleted file mode 100644 index e0b400ee..00000000 Binary files a/env/lib/python2.7/site-packages/CoreServices/DictionaryServices/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreServices/DictionaryServices/_metadata.py b/env/lib/python2.7/site-packages/CoreServices/DictionaryServices/_metadata.py deleted file mode 100644 index 58552746..00000000 --- a/env/lib/python2.7/site-packages/CoreServices/DictionaryServices/_metadata.py +++ /dev/null @@ -1,25 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Wed Sep 19 17:27:15 2012 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$$''' -enums = '''$$''' -misc.update({}) -functions={'DCSDictionaryGetTypeID': (b'l', '', {'comment': 'Function not present in header files'}), 'DCSGetTermRangeInString': (sel32or64(b'{_CFRange=ll}^{__DCSDictionary=}^{__CFString=}i', b'{_CFRange=qq}^{__DCSDictionary=}^{__CFString=}q'),), 'DCSCopyTextDefinition': (sel32or64(b'^{__CFString=}^{__DCSDictionary=}^{__CFString=}{_CFRange=ll}', b'^{__CFString=}^{__DCSDictionary=}^{__CFString=}{_CFRange=qq}'), '', {'retval': {'already_retained': True, 'already_cfretained': True}})} -cftypes=[('DCSDictionaryRef', b'^{__DCSDictionary=}', 'DCSDictionaryGetTypeID', None)] -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/CoreServices/DictionaryServices/_metadata.pyc b/env/lib/python2.7/site-packages/CoreServices/DictionaryServices/_metadata.pyc deleted file mode 100644 index 62520946..00000000 Binary files a/env/lib/python2.7/site-packages/CoreServices/DictionaryServices/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreServices/LaunchServices/__init__.py b/env/lib/python2.7/site-packages/CoreServices/LaunchServices/__init__.py deleted file mode 100644 index 63512e91..00000000 --- a/env/lib/python2.7/site-packages/CoreServices/LaunchServices/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -''' -Python mapping for the LaunchServices framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import objc, sys -import os -import Foundation - -from CoreServices.LaunchServices import _metadata - - -sys.modules['CoreServices.LaunchServices'] = mod = objc.ObjCLazyModule( - "LaunchServices", - "com.apple.CoreServices", - objc.pathForFramework('/System/Library/Frameworks/CoreServices.framework/CoreServices'), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Foundation,)) - -import sys -del sys.modules['CoreServices.LaunchServices._metadata'] diff --git a/env/lib/python2.7/site-packages/CoreServices/LaunchServices/__init__.pyc b/env/lib/python2.7/site-packages/CoreServices/LaunchServices/__init__.pyc deleted file mode 100644 index 1db6c9da..00000000 Binary files a/env/lib/python2.7/site-packages/CoreServices/LaunchServices/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreServices/LaunchServices/_metadata.py b/env/lib/python2.7/site-packages/CoreServices/LaunchServices/_metadata.py deleted file mode 100644 index 09a137e9..00000000 --- a/env/lib/python2.7/site-packages/CoreServices/LaunchServices/_metadata.py +++ /dev/null @@ -1,28 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Mon Oct 24 13:37:21 2016 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -misc.update({'LSItemInfoRecord': objc.createStructType('LSItemInfoRecord', sel32or64(b'{LSItemInfoRecord=LLL^{__CFString=}^{__CFString=}L}', b'{LSItemInfoRecord=III^{__CFString=}}'), sel32or64(['flags', 'filetype', 'creator', 'extension', 'iconFileName', 'kindID'], ['flags', 'filetype', 'creator', 'extension']), None, 2), 'LSLaunchFSRefSpec': objc.createStructType('LSLaunchFSRefSpec', sel32or64(b'{LSLaunchFSRefSpec=^{FSRef=[80C]}L^{FSRef=[80C]}^{AEDesc=L^^{OpaqueAEDataStorageType=}}L^v}', b'{LSLaunchFSRefSpec=^{FSRef=[80C]}Q^{FSRef=[80C]}^{AEDesc=I^^{OpaqueAEDataStorageType=}}I^v}'), ['appRef', 'numDocs', 'itemRefs', 'passThruParams', 'launchFlags', 'asyncRefCon'], None, 2), 'LSApplicationParameters': objc.createStructType('LSApplicationParameters', sel32or64(b'{LSApplicationParameters=lL^{FSRef=[80C]}^v^{__CFDictionary=}^{__CFArray=}^{AEDesc=L^^{OpaqueAEDataStorageType=}}}', b'{LSApplicationParameters=qI^{FSRef=[80C]}^v^{__CFDictionary=}^{__CFArray=}^{AEDesc=I^^{OpaqueAEDataStorageType=}}}'), ['version', 'flags', 'application', 'asyncLaunchRefCon', 'environment', 'argv', 'initialEvent']), 'LSLaunchURLSpec': objc.createStructType('LSLaunchURLSpec', sel32or64(b'{LSLaunchURLSpec=^{__CFURL=}^{__CFArray=}^{AEDesc=L^^{OpaqueAEDataStorageType=}}L^v}', b'{LSLaunchURLSpec=^{__CFURL=}^{__CFArray=}^{AEDesc=I^^{OpaqueAEDataStorageType=}}I^v}'), ['appURL', 'itemURLs', 'passThruParams', 'launchFlags', 'asyncRefCon'], None, 2)}) -constants = '''$kLSItemContentType@^{__CFString=}$kLSItemDisplayKind@^{__CFString=}$kLSItemDisplayName@^{__CFString=}$kLSItemExtension@^{__CFString=}$kLSItemExtensionIsHidden@^{__CFString=}$kLSItemFileCreator@^{__CFString=}$kLSItemFileType@^{__CFString=}$kLSItemIsInvisible@^{__CFString=}$kLSItemQuarantineProperties@^{__CFString=}$kLSItemRoleHandlerDisplayName@^{__CFString=}$kLSQuarantineAgentBundleIdentifierKey@^{__CFString=}$kLSQuarantineAgentNameKey@^{__CFString=}$kLSQuarantineDataURLKey@^{__CFString=}$kLSQuarantineOriginURLKey@^{__CFString=}$kLSQuarantineTimeStampKey@^{__CFString=}$kLSQuarantineTypeCalendarEventAttachment@^{__CFString=}$kLSQuarantineTypeEmailAttachment@^{__CFString=}$kLSQuarantineTypeInstantMessageAttachment@^{__CFString=}$kLSQuarantineTypeKey@^{__CFString=}$kLSQuarantineTypeOtherAttachment@^{__CFString=}$kLSQuarantineTypeOtherDownload@^{__CFString=}$kLSQuarantineTypeWebDownload@^{__CFString=}$kLSSharedFileListFavoriteItems@^{__CFString=}$kLSSharedFileListFavoriteVolumes@^{__CFString=}$kLSSharedFileListGlobalLoginItems@^{__CFString=}$kLSSharedFileListItemBeforeFirst@==^{OpaqueLSSharedFileListItemRef=}$kLSSharedFileListItemHidden@^{__CFString=}$kLSSharedFileListItemLast@==^{OpaqueLSSharedFileListItemRef=}$kLSSharedFileListLoginItemHidden@^{__CFString=}$kLSSharedFileListRecentApplicationItems@^{__CFString=}$kLSSharedFileListRecentDocumentItems@^{__CFString=}$kLSSharedFileListRecentItemsMaxAmount@^{__CFString=}$kLSSharedFileListRecentServerItems@^{__CFString=}$kLSSharedFileListSessionLoginItems@^{__CFString=}$kLSSharedFileListVolumesComputerVisible@^{__CFString=}$kLSSharedFileListVolumesIDiskVisible@^{__CFString=}$kLSSharedFileListVolumesNetworkVisible@^{__CFString=}$kUTExportedTypeDeclarationsKey@^{__CFString=}$kUTImportedTypeDeclarationsKey@^{__CFString=}$kUTTagClassFilenameExtension@^{__CFString=}$kUTTagClassMIMEType@^{__CFString=}$kUTTagClassNSPboardType@^{__CFString=}$kUTTagClassOSType@^{__CFString=}$kUTType3DContent@^{__CFString=}$kUTTypeAVIMovie@^{__CFString=}$kUTTypeAliasFile@^{__CFString=}$kUTTypeAliasRecord@^{__CFString=}$kUTTypeAppleICNS@^{__CFString=}$kUTTypeAppleProtectedMPEG4Audio@^{__CFString=}$kUTTypeAppleProtectedMPEG4Video@^{__CFString=}$kUTTypeAppleScript@^{__CFString=}$kUTTypeApplication@^{__CFString=}$kUTTypeApplicationBundle@^{__CFString=}$kUTTypeApplicationFile@^{__CFString=}$kUTTypeArchive@^{__CFString=}$kUTTypeAssemblyLanguageSource@^{__CFString=}$kUTTypeAudio@^{__CFString=}$kUTTypeAudioInterchangeFileFormat@^{__CFString=}$kUTTypeAudiovisualContent@^{__CFString=}$kUTTypeBMP@^{__CFString=}$kUTTypeBinaryPropertyList@^{__CFString=}$kUTTypeBookmark@^{__CFString=}$kUTTypeBundle@^{__CFString=}$kUTTypeBzip2Archive@^{__CFString=}$kUTTypeCHeader@^{__CFString=}$kUTTypeCPlusPlusHeader@^{__CFString=}$kUTTypeCPlusPlusSource@^{__CFString=}$kUTTypeCSource@^{__CFString=}$kUTTypeCalendarEvent@^{__CFString=}$kUTTypeCommaSeparatedText@^{__CFString=}$kUTTypeCompositeContent@^{__CFString=}$kUTTypeConformsToKey@^{__CFString=}$kUTTypeContact@^{__CFString=}$kUTTypeContent@^{__CFString=}$kUTTypeData@^{__CFString=}$kUTTypeDatabase@^{__CFString=}$kUTTypeDelimitedText@^{__CFString=}$kUTTypeDescriptionKey@^{__CFString=}$kUTTypeDirectory@^{__CFString=}$kUTTypeDiskImage@^{__CFString=}$kUTTypeElectronicPublication@^{__CFString=}$kUTTypeEmailMessage@^{__CFString=}$kUTTypeExecutable@^{__CFString=}$kUTTypeFileURL@^{__CFString=}$kUTTypeFlatRTFD@^{__CFString=}$kUTTypeFolder@^{__CFString=}$kUTTypeFont@^{__CFString=}$kUTTypeFramework@^{__CFString=}$kUTTypeGIF@^{__CFString=}$kUTTypeGNUZipArchive@^{__CFString=}$kUTTypeHTML@^{__CFString=}$kUTTypeICO@^{__CFString=}$kUTTypeIconFileKey@^{__CFString=}$kUTTypeIdentifierKey@^{__CFString=}$kUTTypeImage@^{__CFString=}$kUTTypeInkText@^{__CFString=}$kUTTypeInternetLocation@^{__CFString=}$kUTTypeItem@^{__CFString=}$kUTTypeJPEG@^{__CFString=}$kUTTypeJPEG2000@^{__CFString=}$kUTTypeJSON@^{__CFString=}$kUTTypeJavaArchive@^{__CFString=}$kUTTypeJavaClass@^{__CFString=}$kUTTypeJavaScript@^{__CFString=}$kUTTypeJavaSource@^{__CFString=}$kUTTypeLog@^{__CFString=}$kUTTypeM3UPlaylist@^{__CFString=}$kUTTypeMIDIAudio@^{__CFString=}$kUTTypeMP3@^{__CFString=}$kUTTypeMPEG@^{__CFString=}$kUTTypeMPEG2TransportStream@^{__CFString=}$kUTTypeMPEG2Video@^{__CFString=}$kUTTypeMPEG4@^{__CFString=}$kUTTypeMPEG4Audio@^{__CFString=}$kUTTypeMessage@^{__CFString=}$kUTTypeMountPoint@^{__CFString=}$kUTTypeMovie@^{__CFString=}$kUTTypeOSAScript@^{__CFString=}$kUTTypeOSAScriptBundle@^{__CFString=}$kUTTypeObjectiveCPlusPlusSource@^{__CFString=}$kUTTypeObjectiveCSource@^{__CFString=}$kUTTypePDF@^{__CFString=}$kUTTypePHPScript@^{__CFString=}$kUTTypePICT@^{__CFString=}$kUTTypePKCS12@^{__CFString=}$kUTTypePNG@^{__CFString=}$kUTTypePackage@^{__CFString=}$kUTTypePerlScript@^{__CFString=}$kUTTypePlainText@^{__CFString=}$kUTTypePlaylist@^{__CFString=}$kUTTypePluginBundle@^{__CFString=}$kUTTypePresentation@^{__CFString=}$kUTTypePropertyList@^{__CFString=}$kUTTypePythonScript@^{__CFString=}$kUTTypeQuickLookGenerator@^{__CFString=}$kUTTypeQuickTimeImage@^{__CFString=}$kUTTypeQuickTimeMovie@^{__CFString=}$kUTTypeRTF@^{__CFString=}$kUTTypeRTFD@^{__CFString=}$kUTTypeRawImage@^{__CFString=}$kUTTypeReferenceURLKey@^{__CFString=}$kUTTypeResolvable@^{__CFString=}$kUTTypeRubyScript@^{__CFString=}$kUTTypeScalableVectorGraphics@^{__CFString=}$kUTTypeScript@^{__CFString=}$kUTTypeShellScript@^{__CFString=}$kUTTypeSourceCode@^{__CFString=}$kUTTypeSpotlightImporter@^{__CFString=}$kUTTypeSpreadsheet@^{__CFString=}$kUTTypeSwiftSource$kUTTypeSymLink@^{__CFString=}$kUTTypeSystemPreferencesPane@^{__CFString=}$kUTTypeTIFF@^{__CFString=}$kUTTypeTXNTextAndMultimediaData@^{__CFString=}$kUTTypeTabSeparatedText@^{__CFString=}$kUTTypeTagSpecificationKey@^{__CFString=}$kUTTypeText@^{__CFString=}$kUTTypeToDoItem@^{__CFString=}$kUTTypeURL@^{__CFString=}$kUTTypeURLBookmarkData@^{__CFString=}$kUTTypeUTF16ExternalPlainText@^{__CFString=}$kUTTypeUTF16PlainText@^{__CFString=}$kUTTypeUTF8PlainText@^{__CFString=}$kUTTypeUTF8TabSeparatedText@^{__CFString=}$kUTTypeUnixExecutable@^{__CFString=}$kUTTypeVCard@^{__CFString=}$kUTTypeVersionKey@^{__CFString=}$kUTTypeVideo@^{__CFString=}$kUTTypeVolume@^{__CFString=}$kUTTypeWaveformAudio@^{__CFString=}$kUTTypeWebArchive@^{__CFString=}$kUTTypeWindowsExecutable@^{__CFString=}$kUTTypeX509Certificate@^{__CFString=}$kUTTypeXML@^{__CFString=}$kUTTypeXMLPropertyList@^{__CFString=}$kUTTypeXPCService@^{__CFString=}$kUTTypeZipArchive@^{__CFString=}$''' -enums = '''$appleMenuFolderIconResource@-3982$controlPanelFolderIconResource@-3976$desktopIconResource@-3992$dropFolderIconResource@-3979$extensionsFolderIconResource@-3973$floppyIconResource@-3998$fontsFolderIconResource@-3968$fullTrashIconResource@-3984$genericApplicationIconResource@-3996$genericCDROMIconResource@-3987$genericDeskAccessoryIconResource@-3991$genericDocumentIconResource@-4000$genericEditionFileIconResource@-3989$genericExtensionIconResource@-16415$genericFileServerIconResource@-3972$genericFolderIconResource@-3999$genericHardDiskIconResource@-3995$genericMoverObjectIconResource@-3969$genericPreferencesIconResource@-3971$genericQueryDocumentIconResource@-16506$genericRAMDiskIconResource@-3988$genericStationeryIconResource@-3985$genericSuitcaseIconResource@-3970$kAFPServerIcon@1634103411$kAlertCautionBadgeIcon@1667392615$kAlertCautionIcon@1667331444$kAlertNoteIcon@1852798053$kAlertStopIcon@1937010544$kAliasBadgeIcon@1633838183$kAppearanceFolderIcon@1634758770$kAppleExtrasFolderIcon@1634040004$kAppleLogoIcon@1667330156$kAppleMenuFolderIcon@1634561653$kAppleMenuFolderIconResource@-3982$kAppleMenuIcon@1935765612$kAppleScriptBadgeIcon@1935897200$kAppleTalkIcon@1635019883$kAppleTalkZoneIcon@1635023470$kApplicationSupportFolderIcon@1634956656$kApplicationsFolderIcon@1634758771$kAssistantsFolderIcon@1634956484$kBackwardArrowIcon@1650553455$kBurningIcon@1651864174$kClipboardIcon@1129072976$kClippingPictureTypeIcon@1668051056$kClippingSoundTypeIcon@1668051059$kClippingTextTypeIcon@1668051060$kClippingUnknownTypeIcon@1668051061$kColorSyncFolderIcon@1886547814$kComputerIcon@1919905652$kConnectToIcon@1668178804$kContextualMenuItemsFolderIcon@1668116085$kControlPanelDisabledFolderIcon@1668575812$kControlPanelFolderIcon@1668575852$kControlPanelFolderIconResource@-3976$kControlStripModulesFolderIcon@1935963844$kDeleteAliasIcon@1684106345$kDesktopIcon@1684370283$kDesktopIconResource@-3992$kDocumentsFolderIcon@1685021555$kDropFolderIcon@1684172664$kDropFolderIconResource@-3979$kEjectMediaIcon@1701471587$kExtensionsDisabledFolderIcon@1702392900$kExtensionsFolderIcon@1702392942$kExtensionsFolderIconResource@-3973$kFTPServerIcon@1718906995$kFavoriteItemsIcon@1717663346$kFavoritesFolderIcon@1717663347$kFinderIcon@1179534418$kFloppyIconResource@-3998$kFontSuitcaseIcon@1179011404$kFontsFolderIcon@1718578804$kFontsFolderIconResource@-3968$kForwardArrowIcon@1717662319$kFullTrashIcon@1718907496$kFullTrashIconResource@-3984$kGenericApplicationIcon@1095782476$kGenericApplicationIconResource@-3996$kGenericCDROMIcon@1667523698$kGenericCDROMIconResource@-3987$kGenericComponentIcon@1953001063$kGenericControlPanelIcon@1095782467$kGenericControlStripModuleIcon@1935959414$kGenericDeskAccessoryIcon@1095782468$kGenericDeskAccessoryIconResource@-3991$kGenericDocumentIcon@1685021557$kGenericDocumentIconResource@-4000$kGenericEditionFileIcon@1701082214$kGenericEditionFileIconResource@-3989$kGenericExtensionIcon@1229867348$kGenericExtensionIconResource@-16415$kGenericFileServerIcon@1936881266$kGenericFileServerIconResource@-3972$kGenericFloppyIcon@1718382713$kGenericFolderIcon@1718379634$kGenericFolderIconResource@-3999$kGenericFontIcon@1717987692$kGenericFontScalerIcon@1935895666$kGenericHardDiskIcon@1751413611$kGenericHardDiskIconResource@-3995$kGenericIDiskIcon@1768190827$kGenericMoverObjectIcon@1836021362$kGenericMoverObjectIconResource@-3969$kGenericNetworkIcon@1735288180$kGenericPCCardIcon@1885564259$kGenericPreferencesIcon@1886545254$kGenericPreferencesIconResource@-3971$kGenericQueryDocumentIcon@1902473849$kGenericQueryDocumentIconResource@-16506$kGenericRAMDiskIcon@1918987620$kGenericRAMDiskIconResource@-3988$kGenericRemovableMediaIcon@1919774582$kGenericSharedLibaryIcon@1936223330$kGenericStationeryIcon@1935961955$kGenericStationeryIconResource@-3985$kGenericSuitcaseIcon@1937074548$kGenericSuitcaseIconResource@-3970$kGenericURLIcon@1735750252$kGenericWORMIcon@2003792493$kGenericWindowIcon@1735879022$kGridIcon@1735551332$kGroupIcon@1735554416$kGuestUserIcon@1735750514$kHTTPServerIcon@1752461427$kHelpFolderIcon@-999789456$kHelpIcon@1751477360$kHelpIconResource@-20271$kIPFileServerIcon@1769173622$kIconServicesCatalogInfoMask@531550$kIconServicesNoBadgeFlag@1$kIconServicesNormalUsageFlag@0$kIconServicesUpdateIfNeededFlag@2$kInternationResourcesIcon@1768319340$kInternationalResourcesIcon@1768319340$kInternetFolderIcon@1768846532$kInternetLocationAppleShareIcon@1768710502$kInternetLocationAppleTalkZoneIcon@1768710516$kInternetLocationFTPIcon@1768711796$kInternetLocationFileIcon@1768711785$kInternetLocationGenericIcon@1768712037$kInternetLocationHTTPIcon@1768712308$kInternetLocationMailIcon@1768713569$kInternetLocationNSLNeighborhoodIcon@1768713843$kInternetLocationNewsIcon@1768713847$kInternetPlugInFolderIcon@-999398028$kInternetSearchSitesFolderIcon@1769173862$kKeepArrangedIcon@1634889319$kKeyboardLayoutIcon@1801873772$kLSAcceptAllowLoginUI@2$kLSAcceptDefault@1$kLSAppDoesNotClaimTypeErr@-10820$kLSAppDoesNotSupportSchemeWarning@-10821$kLSAppInTrashErr@-10660$kLSApplicationNotFoundErr@-10814$kLSAttributeNotFoundErr@-10662$kLSAttributeNotSettableErr@-10663$kLSCannotSetInfoErr@-10823$kLSDataErr@-10817$kLSDataTooOldErr@-10816$kLSDataUnavailableErr@-10813$kLSExecutableIncorrectFormat@-10661$kLSHandlerOptionsDefault@0$kLSHandlerOptionsIgnoreCreator@1$kLSIncompatibleApplicationVersionErr@-10664$kLSIncompatibleSystemVersionErr@-10825$kLSInitializeDefaults@1$kLSItemInfoAppIsScriptable@2048$kLSItemInfoAppPrefersClassic@1024$kLSItemInfoAppPrefersNative@512$kLSItemInfoExtensionIsHidden@1048576$kLSItemInfoIsAliasFile@16$kLSItemInfoIsApplication@4$kLSItemInfoIsClassicApp@256$kLSItemInfoIsContainer@8$kLSItemInfoIsInvisible@64$kLSItemInfoIsNativeApp@128$kLSItemInfoIsPackage@2$kLSItemInfoIsPlainFile@1$kLSItemInfoIsSymlink@32$kLSItemInfoIsVolume@4096$kLSLaunchAndDisplayErrors@64$kLSLaunchAndHide@1048576$kLSLaunchAndHideOthers@2097152$kLSLaunchAndPrint@2$kLSLaunchAsync@65536$kLSLaunchDefaults@1$kLSLaunchDontAddToRecents@256$kLSLaunchDontSwitch@512$kLSLaunchHasUntrustedContents@4194304$kLSLaunchInClassic@262144$kLSLaunchInProgressErr@-10818$kLSLaunchInhibitBGOnly@128$kLSLaunchNewInstance@524288$kLSLaunchNoParams@2048$kLSLaunchReserved2@4$kLSLaunchReserved3@8$kLSLaunchReserved4@16$kLSLaunchReserved5@32$kLSLaunchStartClassic@131072$kLSMinCatInfoBitmap@6154$kLSMultipleSessionsNotSupportedErr@-10829$kLSNoClassicEnvironmentErr@-10828$kLSNoExecutableErr@-10827$kLSNoLaunchPermissionErr@-10826$kLSNoRegistrationInfoErr@-10824$kLSNoRosettaEnvironmentErr@-10665$kLSNotAnApplicationErr@-10811$kLSNotInitializedErr@-10812$kLSNotRegisteredErr@-10819$kLSRequestAllFlags@16$kLSRequestAllInfo@4294967295$kLSRequestAppTypeFlags@8$kLSRequestBasicFlagsOnly@4$kLSRequestExtension@1$kLSRequestExtensionFlagsOnly@64$kLSRequestIconAndKind@32$kLSRequestTypeCreator@2$kLSRolesAll@4294967295$kLSRolesEditor@4$kLSRolesNone@1$kLSRolesShell@8$kLSRolesViewer@2$kLSServerCommunicationErr@-10822$kLSSharedFileListDoNotMountVolumes@2$kLSSharedFileListNoUserInteraction@1$kLSUnknownCreator@0$kLSUnknownErr@-10810$kLSUnknownKindID@0$kLSUnknownType@0$kLSUnknownTypeErr@-10815$kLocalesFolderIcon@-999526557$kLockedBadgeIcon@1818387559$kLockedIcon@1819239275$kMacOSReadMeFolderIcon@1836020420$kMountedBadgeIcon@1835164775$kMountedFolderIcon@1835955300$kMountedFolderIconResource@-3977$kNoFilesIcon@1852205420$kNoFolderIcon@1852206180$kNoWriteIcon@1853321844$kOpenFolderIcon@1868983396$kOpenFolderIconResource@-3997$kOwnedFolderIcon@1870098020$kOwnedFolderIconResource@-3980$kOwnerIcon@1937077106$kPreferencesFolderIcon@1886545604$kPreferencesFolderIconResource@-3974$kPrintMonitorFolderIcon@1886547572$kPrintMonitorFolderIconResource@-3975$kPrinterDescriptionFolderIcon@1886413926$kPrinterDriverFolderIcon@-999263644$kPrivateFolderIcon@1886549606$kPrivateFolderIconResource@-3994$kProtectedApplicationFolderIcon@1885433968$kProtectedSystemFolderIcon@1886615923$kPublicFolderIcon@1886741094$kQuestionMarkIcon@1903519091$kRecentApplicationsFolderIcon@1918988400$kRecentDocumentsFolderIcon@1919184739$kRecentItemsIcon@1919118964$kRecentServersFolderIcon@1920168566$kRightContainerArrowIcon@1919115634$kScriptingAdditionsFolderIcon@-999070862$kScriptsFolderIcon@1935897284$kSharedBadgeIcon@1935828071$kSharedFolderIcon@1936221804$kSharedFolderIconResource@-3978$kSharedLibrariesFolderIcon@-999528094$kSharingPrivsNotApplicableIcon@1936223841$kSharingPrivsReadOnlyIcon@1936224879$kSharingPrivsReadWriteIcon@1936224887$kSharingPrivsUnknownIcon@1936225643$kSharingPrivsWritableIcon@2003986804$kShortcutIcon@1936224884$kShutdownItemsDisabledFolderIcon@1936221252$kShutdownItemsFolderIcon@1936221286$kSortAscendingIcon@1634954852$kSortDescendingIcon@1685286500$kSoundFileIcon@1936091500$kSpeakableItemsFolder@1936747369$kStartupFolderIconResource@-3981$kStartupItemsDisabledFolderIcon@1937011268$kStartupItemsFolderIcon@1937011316$kSystemExtensionDisabledFolderIcon@1835098948$kSystemFolderIcon@1835098995$kSystemFolderIconResource@-3983$kSystemIconsCreator@1835098995$kSystemSuitcaseIcon@2054388083$kTextEncodingsFolderIcon@-999004808$kToolbarAdvancedIcon@1952604534$kToolbarApplicationsFolderIcon@1950445683$kToolbarCustomizeIcon@1952675187$kToolbarDeleteIcon@1952736620$kToolbarDesktopFolderIcon@1950643051$kToolbarDocumentsFolderIcon@1950642019$kToolbarDownloadsFolderIcon@1950644078$kToolbarFavoritesIcon@1952866678$kToolbarHomeIcon@1953001325$kToolbarInfoIcon@1952606574$kToolbarLabelsIcon@1952607330$kToolbarLibraryFolderIcon@1951164770$kToolbarMovieFolderIcon@1951231862$kToolbarMusicFolderIcon@1951233395$kToolbarPicturesFolderIcon@1951426915$kToolbarPublicFolderIcon@1951429986$kToolbarSitesFolderIcon@1951626355$kToolbarUtilitiesFolderIcon@1951757420$kTrashIcon@1953657704$kTrashIconResource@-3993$kTrueTypeFlatFontIcon@1936092788$kTrueTypeFontIcon@1952868716$kTrueTypeMultiFlatFontIcon@1953784678$kUnknownFSObjectIcon@1970169459$kUnlockedIcon@1970037611$kUserFolderIcon@1969646692$kUserIDiskIcon@1969517419$kUserIcon@1970496882$kUsersFolderIcon@1970500292$kUtilitiesFolderIcon@1970563524$kVoicesFolderIcon@1719037795$kWorkgroupFolderIcon@2003201124$mountedFolderIconResource@-3977$openFolderIconResource@-3997$ownedFolderIconResource@-3980$preferencesFolderIconResource@-3974$printMonitorFolderIconResource@-3975$privateFolderIconResource@-3994$sharedFolderIconResource@-3978$startupFolderIconResource@-3981$systemFolderIconResource@-3983$trashIconResource@-3993$''' -misc.update({}) -functions={'LSSharedFileListItemCopyDisplayName': (b'^{__CFString=}^{OpaqueLSSharedFileListItemRef=}', '', {'retval': {'already_cfretained': True}}), '_LSCopyAllApplicationURLs': (b'v^@', '', {'arguments': {0: {'already_retained': True, 'type_modifier': 'o'}}}), 'LSCopyItemInfoForRef': (sel32or64(b'l^{FSRef=[80C]}L^{LSItemInfoRecord=LLL^{__CFString=}^{__CFString=}L}', b'i^{FSRef=[80C]}I^{LSItemInfoRecord=III^{__CFString=}}'), '', {'retval': {'already_cfretained': True}, 'arguments': {0: {'type_modifier': 'n'}, 2: {'type_modifier': 'o'}}}), 'GetIconRefFromTypeInfo': (sel32or64(b'sLL^{__CFString=}^{__CFString=}L^^{OpaqueIconRef=}', b'sII^{__CFString=}^{__CFString=}I^^{OpaqueIconRef=}'), '', {'arguments': {5: {'type_modifier': 'o'}}}), 'ReadIconFromFSRef': (sel32or64(b'l^{FSRef=[80C]}^^^{IconFamilyResource=Ll[1{IconFamilyElement=Ll[1C]}]}', b'i^{FSRef=[80C]}^^^{IconFamilyResource=Ii[1{IconFamilyElement=Ii[1C]}]}'), '', {'arguments': {0: {'type_modifier': 'n'}}}), 'LSSharedFileListRemoveAllItems': (sel32or64(b'l^{OpaqueLSSharedFileListRef=}', b'i^{OpaqueLSSharedFileListRef=}'),), 'LSCopyItemAttribute': (sel32or64(b'l^{FSRef=[80C]}L^{__CFString=}^@', b'i^{FSRef=[80C]}I^{__CFString=}^@'), '', {'retval': {'already_cfretained': True}, 'arguments': {0: {'type_modifier': 'n'}, 3: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'LSSharedFileListItemMove': (sel32or64(b'l^{OpaqueLSSharedFileListRef=}^{OpaqueLSSharedFileListItemRef=}^{OpaqueLSSharedFileListItemRef=}', b'i^{OpaqueLSSharedFileListRef=}^{OpaqueLSSharedFileListItemRef=}^{OpaqueLSSharedFileListItemRef=}'),), 'LSSetDefaultRoleHandlerForContentType': (sel32or64(b'l^{__CFString=}L^{__CFString=}', b'i^{__CFString=}I^{__CFString=}'),), 'LSSetHandlerOptionsForContentType': (sel32or64(b'l^{__CFString=}L', b'i^{__CFString=}I'),), 'LSSharedFileListGetTypeID': (sel32or64(b'L', b'Q'),), 'LSInit': (sel32or64(b'lL', b'iI'),), 'LSCopyDefaultHandlerForURLScheme': (b'^{__CFString=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'LSCopyAllRoleHandlersForContentType': (sel32or64(b'^{__CFArray=}^{__CFString=}L', b'^{__CFArray=}^{__CFString=}I'), '', {'retval': {'already_cfretained': True}}), 'IsDataAvailableInIconRef': (sel32or64(b'ZL^{OpaqueIconRef=}', b'ZI^{OpaqueIconRef=}'),), 'IsValidIconRef': (b'Z^{OpaqueIconRef=}',), 'LSCanRefAcceptItem': (sel32or64(b'l^{FSRef=[80C]}^{FSRef=[80C]}LL^Z', b'i^{FSRef=[80C]}^{FSRef=[80C]}II^Z'), '', {'arguments': {0: {'type_modifier': 'n'}, 1: {'type_modifier': 'n'}, 4: {'type_modifier': 'o'}}}), 'LSCopyKindStringForTypeInfo': (sel32or64(b'lLL^{__CFString=}^^{__CFString=}', b'iII^{__CFString=}^^{__CFString=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'UTTypeCopyPreferredTagWithClass': (b'^{__CFString=}^{__CFString=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'FlushIconRefs': (b'sLL',), 'LSSetExtensionHiddenForURL': (sel32or64(b'l^{__CFURL=}Z', b'i^{__CFURL=}Z'),), 'LSOpenFSRef': (sel32or64(b'l^{FSRef=[80C]}^{FSRef=[80C]}', b'i^{FSRef=[80C]}^{FSRef=[80C]}'), '', {'arguments': {0: {'type_modifier': 'n'}, 1: {'type_modifier': 'o'}}}), 'LSSharedFileListInsertItemFSRef': (b'^{OpaqueLSSharedFileListItemRef=}^{OpaqueLSSharedFileListRef=}^{OpaqueLSSharedFileListItemRef=}^{__CFString=}^{OpaqueIconRef=}^{FSRef=[80C]}^{__CFDictionary=}^{__CFArray=}', '', {'arguments': {4: {'type_modifier': 'n'}}}), 'RegisterIconRefFromIconFile': (b'sLL^{FSSpec=sl[64C]}^^{OpaqueIconRef=}', '', {'arguments': {3: {'type_modifier': 'o'}}}), 'LSCopyItemAttributes': (sel32or64(b'l^{FSRef=[80C]}L^{__CFArray=}^^{__CFDictionary=}', b'i^{FSRef=[80C]}I^{__CFArray=}^^{__CFDictionary=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {0: {'type_modifier': 'n'}, 3: {'type_modifier': 'o'}}}), 'LSSharedFileListItemSetProperty': (sel32or64(b'l^{OpaqueLSSharedFileListItemRef=}^{__CFString=}@', b'i^{OpaqueLSSharedFileListItemRef=}^{__CFString=}@'),), 'UTTypeCreateAllIdentifiersForTag': (b'^{__CFArray=}^{__CFString=}^{__CFString=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'UTTypeCopyDeclaringBundleURL': (b'^{__CFURL=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'LSGetHandlerOptionsForContentType': (sel32or64(b'L^{__CFString=}', b'I^{__CFString=}'),), 'LSTerm': (sel32or64(b'l', b'i'),), 'LSSharedFileListItemCopyProperty': (b'@^{OpaqueLSSharedFileListItemRef=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'UpdateIconRef': (b's^{OpaqueIconRef=}',), 'LSGetApplicationForInfo': (sel32or64(b'lLL^{__CFString=}L^{FSRef=[80C]}^^{__CFURL=}', b'iII^{__CFString=}I^{FSRef=[80C]}^^{__CFURL=}'), '', {'arguments': {4: {'type_modifier': 'o'}, 5: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'LSSharedFileListItemGetTypeID': (sel32or64(b'L', b'Q'),), 'GetIconRefFromComponent': (sel32or64(b'l^{ComponentRecord=[1l]}^^{OpaqueIconRef=}', b'i^{ComponentRecord=[1q]}^^{OpaqueIconRef=}'), '', {'arguments': {1: {'type_modifier': 'o'}}}), 'UTTypeCopyDeclaration': (b'^{__CFDictionary=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'LSSharedFileListSetProperty': (sel32or64(b'l^{OpaqueLSSharedFileListRef=}^{__CFString=}@', b'i^{OpaqueLSSharedFileListRef=}^{__CFString=}@'),), 'LSSharedFileListItemCopyResolvedURL': (sel32or64(b'^{__CFURL=}^{OpaqueLSSharedFileListItemRef=}L^^{__CFError=}', b'^{__CFURL=}^{OpaqueLSSharedFileListItemRef=}I^^{__CFError=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'LSSharedFileListRemoveObserver': (b'v^{OpaqueLSSharedFileListRef=}^{__CFRunLoop=}^{__CFString=}^?^v', '', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{OpaqueLSSharedFileListRef=}'}, 1: {'type': b'^v'}}}}}}), 'RegisterIconRefFromFSRef': (sel32or64(b'lLL^{FSRef=[80C]}^^{OpaqueIconRef=}', b'iII^{FSRef=[80C]}^^{OpaqueIconRef=}'), '', {'arguments': {2: {'type_modifier': 'n'}}}), 'LSCopyApplicationForMIMEType': (sel32or64(b'l^{__CFString=}L^^{__CFURL=}', b'i^{__CFString=}I^^{__CFURL=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'ReleaseIconRef': (b's^{OpaqueIconRef=}',), 'UTTypeCreatePreferredIdentifierForTag': (b'^{__CFString=}^{__CFString=}^{__CFString=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'LSSharedFileListItemResolve': (sel32or64(b'l^{OpaqueLSSharedFileListItemRef=}L^^{__CFURL=}^{FSRef=[80C]}', b'i^{OpaqueLSSharedFileListItemRef=}I^^{__CFURL=}^{FSRef=[80C]}'), '', {'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o'}, 3: {'type_modifier': 'o'}}}), 'GetIconRefFromIconFamilyPtr': (sel32or64(b'l^{IconFamilyResource=Ll[1{IconFamilyElement=Ll[1C]}]}l^^{OpaqueIconRef=}', b'i^{IconFamilyResource=Ii[1{IconFamilyElement=Ii[1C]}]}q^^{OpaqueIconRef=}'), '', {'arguments': {0: {'type_modifier': 'n'}, 2: {'type_modifier': 'o'}}}), 'LSSharedFileListCreate': (b'^{OpaqueLSSharedFileListRef=}^{__CFAllocator=}^{__CFString=}@', '', {'retval': {'already_cfretained': True}}), 'WriteIconFile': (b's^^{IconFamilyResource=Ll[1{IconFamilyElement=Ll[1C]}]}^{FSSpec=sl[64C]}',), 'OverrideIconRef': (b's^{OpaqueIconRef=}^{OpaqueIconRef=}',), 'LSSharedFileListCopyProperty': (b'@^{OpaqueLSSharedFileListRef=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'IsIconRefComposite': (b's^{OpaqueIconRef=}^^{OpaqueIconRef=}^^{OpaqueIconRef=}', '', {'arguments': {1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}}}), 'LSCanURLAcceptURL': (sel32or64(b'l^{__CFURL=}^{__CFURL=}LL^Z', b'i^{__CFURL=}^{__CFURL=}II^Z'), '', {'arguments': {4: {'type_modifier': 'o'}}}), 'GetIconRefFromFile': (b's^{FSSpec=sl[64C]}^^{OpaqueIconRef=}^s', '', {'arguments': {2: {'type_modifier': 'o'}}}), 'RemoveIconRefOverride': (b's^{OpaqueIconRef=}',), 'LSSharedFileListSetAuthorization': (sel32or64(b'l^{OpaqueLSSharedFileListRef=}^{AuthorizationOpaqueRef=}', b'i^{OpaqueLSSharedFileListRef=}^{AuthorizationOpaqueRef=}'),), 'LSOpenItemsWithRole': (sel32or64(b'l^{FSRef=[80C]}lL^{AEKeyDesc=L{AEDesc=L^^{OpaqueAEDataStorageType=}}}^{LSApplicationParameters=lL^{FSRef=[80C]}^v^{__CFDictionary=}^{__CFArray=}^{AEDesc=L^^{OpaqueAEDataStorageType=}}}^{ProcessSerialNumber=LL}l', b'i^{FSRef=[80C]}qI^{AEKeyDesc=I{AEDesc=I^^{OpaqueAEDataStorageType=}}}^{LSApplicationParameters=qI^{FSRef=[80C]}^v^{__CFDictionary=}^{__CFArray=}^{AEDesc=I^^{OpaqueAEDataStorageType=}}}^{ProcessSerialNumber=II}q'), '', {'arguments': {0: {'c_array_length_in_arg': 1, 'type_modifier': 'n'}, 3: {'type_modifier': 'n'}, 4: {'type_modifier': 'n'}, 5: {'c_array_length_in_arg': 6, 'type_modifier': 'o'}}}), 'RegisterIconRefFromResource': (b'sLL^{FSSpec=sl[64C]}s^^{OpaqueIconRef=}',), 'LSSharedFileListGetSeedValue': (sel32or64(b'L^{OpaqueLSSharedFileListRef=}', b'I^{OpaqueLSSharedFileListRef=}'),), 'LSOpenApplication': (sel32or64(b'l^{LSApplicationParameters=lL^{FSRef=[80C]}^v^{__CFDictionary=}^{__CFArray=}^{AEDesc=L^^{OpaqueAEDataStorageType=}}}^{ProcessSerialNumber=LL}', b'i^{LSApplicationParameters=qI^{FSRef=[80C]}^v^{__CFDictionary=}^{__CFArray=}^{AEDesc=I^^{OpaqueAEDataStorageType=}}}^{ProcessSerialNumber=II}'), '', {'arguments': {0: {'type_modifier': 'n'}, 1: {'type_modifier': 'o'}}}), 'LSGetApplicationForItem': (sel32or64(b'l^{FSRef=[80C]}L^{FSRef=[80C]}^^{__CFURL=}', b'i^{FSRef=[80C]}I^{FSRef=[80C]}^^{__CFURL=}'), '', {'arguments': {0: {'type_modifier': 'n'}, 2: {'type_modifier': 'o'}, 3: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'LSSetDefaultHandlerForURLScheme': (sel32or64(b'l^{__CFString=}^{__CFString=}', b'i^{__CFString=}^{__CFString=}'),), 'GetIconRef': (sel32or64(b'ssLL^^{OpaqueIconRef=}', b'ssII^^{OpaqueIconRef=}'), '', {'arguments': {3: {'type_modifier': 'o'}}}), 'LSRegisterURL': (sel32or64(b'l^{__CFURL=}Z', b'i^{__CFURL=}Z'),), 'GetIconRefOwners': (b's^{OpaqueIconRef=}^S', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'LSCopyAllHandlersForURLScheme': (b'^{__CFArray=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'LSCopyDefaultApplicationURLForContentType': (sel32or64(b'^{__CFURL=}^{__CFString=}L^^{__CFError=}', b'^{__CFURL=}^{__CFString=}I^^{__CFError=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'UTTypeIsDynamic': (b'Z^{__CFString=}',), 'AcquireIconRef': (b's^{OpaqueIconRef=}',), 'ReadIconFile': (b's^{FSSpec=sl[64C]}^^^{IconFamilyResource=Ll[1{IconFamilyElement=Ll[1C]}]}',), 'LSSharedFileListItemCopyIconRef': (b'^{OpaqueIconRef=}^{OpaqueLSSharedFileListItemRef=}', '', {'retval': {'already_cfretained': True}}), 'UTGetOSTypeFromString': (sel32or64(b'L^{__CFString=}', b'I^{__CFString=}'),), 'LSGetApplicationForURL': (sel32or64(b'l^{__CFURL=}L^{FSRef=[80C]}^^{__CFURL=}', b'i^{__CFURL=}I^{FSRef=[80C]}^^{__CFURL=}'), '', {'arguments': {2: {'type_modifier': 'o'}, 3: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'LSSharedFileListInsertItemURL': (b'^{OpaqueLSSharedFileListItemRef=}^{OpaqueLSSharedFileListRef=}^{OpaqueLSSharedFileListItemRef=}^{__CFString=}^{OpaqueIconRef=}^{__CFURL=}^{__CFDictionary=}^{__CFArray=}', '', {'retval': {'already_cfretained': True}}), 'LSOpenURLsWithRole': (sel32or64(b'l^{__CFArray=}L^{AEKeyDesc=L{AEDesc=L^^{OpaqueAEDataStorageType=}}}^{LSApplicationParameters=lL^{FSRef=[80C]}^v^{__CFDictionary=}^{__CFArray=}^{AEDesc=L^^{OpaqueAEDataStorageType=}}}^{ProcessSerialNumber=LL}l', b'i^{__CFArray=}I^{AEKeyDesc=I{AEDesc=I^^{OpaqueAEDataStorageType=}}}^{LSApplicationParameters=qI^{FSRef=[80C]}^v^{__CFDictionary=}^{__CFArray=}^{AEDesc=I^^{OpaqueAEDataStorageType=}}}^{ProcessSerialNumber=II}q'), '', {'arguments': {2: {'type_modifier': 'n'}, 3: {'type_modifier': 'n'}, 4: {'c_array_length_in_arg': 5, 'type_modifier': 'o'}}}), 'LSCopyDefaultRoleHandlerForContentType': (sel32or64(b'^{__CFString=}^{__CFString=}L', b'^{__CFString=}^{__CFString=}I'), '', {'retval': {'already_cfretained': True}}), 'UnregisterIconRef': (sel32or64(b'sLL', b'sII'),), 'LSOpenFromURLSpec': (sel32or64(b'l^{LSLaunchURLSpec=^{__CFURL=}^{__CFArray=}^{AEDesc=L^^{OpaqueAEDataStorageType=}}L^v}^^{__CFURL=}', b'i^{LSLaunchURLSpec=^{__CFURL=}^{__CFArray=}^{AEDesc=I^^{OpaqueAEDataStorageType=}}I^v}^^{__CFURL=}'), '', {'arguments': {0: {'type_modifier': 'n'}, 1: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'UTTypeConformsTo': (b'Z^{__CFString=}^{__CFString=}',), 'SetCustomIconsEnabled': (b'ssZ',), 'LSRegisterFSRef': (sel32or64(b'l^{FSRef=[80C]}Z', b'i^{FSRef=[80C]}Z'), '', {'arguments': {0: {'type_modifier': 'n'}}}), 'LSSetItemAttribute': (sel32or64(b'l^{FSRef=[80C]}L^{__CFString=}@', b'i^{FSRef=[80C]}I^{__CFString=}@'), '', {'arguments': {0: {'type_modifier': 'n'}}}), 'UTCreateStringForOSType': (sel32or64(b'^{__CFString=}L', b'^{__CFString=}I'), '', {'retval': {'already_cfretained': True}}), 'LSCopyKindStringForRef': (sel32or64(b'l^{FSRef=[80C]}^^{__CFString=}', b'i^{FSRef=[80C]}^^{__CFString=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {0: {'type_modifier': 'n'}, 1: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'LSCopyDisplayNameForRef': (sel32or64(b'l^{FSRef=[80C]}^^{__CFString=}', b'i^{FSRef=[80C]}^^{__CFString=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {0: {'type_modifier': 'n'}, 1: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CompositeIconRef': (b's^{OpaqueIconRef=}^{OpaqueIconRef=}^^{OpaqueIconRef=}', '', {'arguments': {2: {'type_modifier': 'o'}}}), 'UTTypeEqual': (b'Z^{__CFString=}^{__CFString=}',), 'UTTypeCopyAllTagsWithClass': (b'^{__CFArray=}^{__CFString=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'LSCopyKindStringForMIMEType': (sel32or64(b'l^{__CFString=}^^{__CFString=}', b'i^{__CFString=}^^{__CFString=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'RegisterIconRefFromIconFamily': (sel32or64(b'sLL^^{IconFamilyResource=Ll[1{IconFamilyElement=Ll[1C]}]}^^{OpaqueIconRef=}', b'sII^^{IconFamilyResource=Ii[1{IconFamilyElement=Ii[1C]}]}^^{OpaqueIconRef=}'),), 'UTTypeIsDeclared': (b'Z^{__CFString=}',), 'LSCopyDisplayNameForURL': (sel32or64(b'l^{__CFURL=}^^{__CFString=}', b'i^{__CFURL=}^^{__CFString=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'GetIconRefFromFolder': (sel32or64(b'ssllcc^^{OpaqueIconRef=}', b'ssiicc^^{OpaqueIconRef=}'), '', {'arguments': {5: {'type_modifier': 'o'}}}), 'LSSetExtensionHiddenForRef': (sel32or64(b'l^{FSRef=[80C]}Z', b'i^{FSRef=[80C]}Z'), '', {'arguments': {0: {'type_modifier': 'n'}}}), 'OverrideIconRefFromResource': (b's^{OpaqueIconRef=}^{FSSpec=sl[64C]}s',), 'LSGetExtensionInfo': (sel32or64(b'lL^T^L', b'iQ^T^Q'), '', {'arguments': {1: {'c_array_length_in_arg': 0, 'type_modifier': 'n'}, 2: {'type_modifier': 'o'}}}), 'LSCopyDefaultApplicationURLForURL': (sel32or64(b'^{__CFURL=}^{__CFURL=}L^^{__CFError=}', b'^{__CFURL=}^{__CFURL=}I^^{__CFError=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'GetIconRefFromFileInfo': (sel32or64(b'l^{FSRef=[80C]}L^TL^{FSCatalogInfo=SsLLCCCC{UTCDateTime=SLS}{UTCDateTime=SLS}{UTCDateTime=SLS}{UTCDateTime=SLS}{UTCDateTime=SLS}[4L][16C][16C]QQQQLL}L^^{OpaqueIconRef=}^s', b'i^{FSRef=[80C]}Q^TI^{FSCatalogInfo=SsIICCCC{UTCDateTime=SIS}{UTCDateTime=SIS}{UTCDateTime=SIS}{UTCDateTime=SIS}{UTCDateTime=SIS}{FSPermissionInfo=IICCS^{__FSFileSecurity=}}[16C][16C]QQQQII}I^^{OpaqueIconRef=}^s'), '', {'arguments': {0: {'type_modifier': 'n'}, 2: {'c_array_length_in_arg': 1, 'type_modifier': 'n'}, 4: {'null_accepted': True, 'type_modifier': 'n'}, 6: {'type_modifier': 'o'}, 7: {'type_modifier': 'o'}}}), 'UTTypeCopyDescription': (b'^{__CFString=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'LSCopyApplicationURLsForBundleIdentifier': (b'^{__CFArray=}^{__CFString=}^^{__CFError=}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'FlushIconRefsByVolume': (b'ss',), 'GetCustomIconsEnabled': (b'ss^Z', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'LSCopyItemInfoForURL': (sel32or64(b'l^{__CFURL=}L^{LSItemInfoRecord=LLL^{__CFString=}^{__CFString=}L}', b'i^{__CFURL=}I^{LSItemInfoRecord=III^{__CFString=}}'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'type_modifier': 'o'}}}), 'LSFindApplicationForInfo': (sel32or64(b'lL^{__CFString=}^{__CFString=}^{FSRef=[80C]}^^{__CFURL=}', b'iI^{__CFString=}^{__CFString=}^{FSRef=[80C]}^^{__CFURL=}'), '', {'arguments': {3: {'type_modifier': 'o'}, 4: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'LSOpenFromRefSpec': (sel32or64(b'l^{LSLaunchFSRefSpec=^{FSRef=[80C]}L^{FSRef=[80C]}^{AEDesc=L^^{OpaqueAEDataStorageType=}}L^v}^{FSRef=[80C]}', b'i^{LSLaunchFSRefSpec=^{FSRef=[80C]}Q^{FSRef=[80C]}^{AEDesc=I^^{OpaqueAEDataStorageType=}}I^v}^{FSRef=[80C]}'), '', {'arguments': {0: {'type_modifier': 'n'}, 1: {'type_modifier': 'o'}}}), 'LSSharedFileListItemRemove': (sel32or64(b'l^{OpaqueLSSharedFileListRef=}^{OpaqueLSSharedFileListItemRef=}', b'i^{OpaqueLSSharedFileListRef=}^{OpaqueLSSharedFileListItemRef=}'),), 'LSCopyKindStringForURL': (sel32or64(b'l^{__CFURL=}^^{__CFString=}', b'i^{__CFURL=}^^{__CFString=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'LSCopyApplicationURLsForURL': (sel32or64(b'^{__CFArray=}^{__CFURL=}L', b'^{__CFArray=}^{__CFURL=}I'), '', {'retval': {'already_cfretained': True}}), 'LSOpenCFURLRef': (sel32or64(b'l^{__CFURL=}^^{__CFURL=}', b'i^{__CFURL=}^^{__CFURL=}'), '', {'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'LSSharedFileListCopySnapshot': (sel32or64(b'^{__CFArray=}^{OpaqueLSSharedFileListRef=}^L', b'^{__CFArray=}^{OpaqueLSSharedFileListRef=}^I'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'type_modifier': 'o'}}}), 'LSSharedFileListAddObserver': (b'v^{OpaqueLSSharedFileListRef=}^{__CFRunLoop=}^{__CFString=}^?^v', '', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{OpaqueLSSharedFileListRef=}'}, 1: {'type': b'^v'}}}, 'callable_retained': True}}}), 'LSSharedFileListItemGetID': (sel32or64(b'L^{OpaqueLSSharedFileListItemRef=}', b'I^{OpaqueLSSharedFileListItemRef=}'),)} -aliases = {'mountedFolderIconResource': 'kMountedFolderIconResource', 'genericFolderIconResource': 'kGenericFolderIconResource', 'genericApplicationIconResource': 'kGenericApplicationIconResource', 'genericFileServerIconResource': 'kGenericFileServerIconResource', 'printMonitorFolderIconResource': 'kPrintMonitorFolderIconResource', 'sharedFolderIconResource': 'kSharedFolderIconResource', 'openFolderIconResource': 'kOpenFolderIconResource', 'controlPanelFolderIconResource': 'kControlPanelFolderIconResource', 'desktopIconResource': 'kDesktopIconResource', 'floppyIconResource': 'kFloppyIconResource', 'genericSuitcaseIconResource': 'kGenericSuitcaseIconResource', 'fontsFolderIconResource': 'kFontsFolderIconResource', 'kLSInvalidExtensionIndex': 'ULONG_MAX', 'genericEditionFileIconResource': 'kGenericEditionFileIconResource', 'genericQueryDocumentIconResource': 'kGenericQueryDocumentIconResource', 'genericMoverObjectIconResource': 'kGenericMoverObjectIconResource', 'extensionsFolderIconResource': 'kExtensionsFolderIconResource', 'genericRAMDiskIconResource': 'kGenericRAMDiskIconResource', 'dropFolderIconResource': 'kDropFolderIconResource', 'genericHardDiskIconResource': 'kGenericHardDiskIconResource', 'genericDocumentIconResource': 'kGenericDocumentIconResource', 'appleMenuFolderIconResource': 'kAppleMenuFolderIconResource', 'systemFolderIconResource': 'kSystemFolderIconResource', 'genericDeskAccessoryIconResource': 'kGenericDeskAccessoryIconResource', 'privateFolderIconResource': 'kPrivateFolderIconResource', 'preferencesFolderIconResource': 'kPreferencesFolderIconResource', 'fullTrashIconResource': 'kFullTrashIconResource', 'trashIconResource': 'kTrashIconResource', 'genericPreferencesIconResource': 'kGenericPreferencesIconResource', 'genericStationeryIconResource': 'kGenericStationeryIconResource', 'genericExtensionIconResource': 'kGenericExtensionIconResource', 'ownedFolderIconResource': 'kOwnedFolderIconResource', 'startupFolderIconResource': 'kStartupFolderIconResource', 'genericCDROMIconResource': 'kGenericCDROMIconResource', 'kInternationResourcesIcon': 'kInternationalResourcesIcon'} -cftypes=[('LSSharedFileListItemRef', b'^{OpaqueLSSharedFileListItemRef=}', 'LSSharedFileListItemGetTypeID', None), ('LSSharedFileListRef', b'^{OpaqueLSSharedFileListRef=}', 'LSSharedFileListGetTypeID', None)] -misc.update({'IconRef': objc.createOpaquePointerType('IconRef', b'^{OpaqueIconRef=}')}) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/CoreServices/LaunchServices/_metadata.pyc b/env/lib/python2.7/site-packages/CoreServices/LaunchServices/_metadata.pyc deleted file mode 100644 index b91e6128..00000000 Binary files a/env/lib/python2.7/site-packages/CoreServices/LaunchServices/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreServices/Metadata/__init__.py b/env/lib/python2.7/site-packages/CoreServices/Metadata/__init__.py deleted file mode 100644 index b878c6f3..00000000 --- a/env/lib/python2.7/site-packages/CoreServices/Metadata/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -''' -Python mapping for the CoreServices/Metadata framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. - -Note that PyObjC only wrappers the non-deprecated parts of the CoreServices -framework. -''' -import sys -import objc - -from CoreServices.Metadata import _metadata - -sys.modules['CoreServices.Metadata'] = mod = objc.ObjCLazyModule('CoreServices.Metadata', - "com.apple.Metadata", - objc.pathForFramework("/System/Library/Frameworks/CoreServices.framework"), - _metadata.__dict__, - None, { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'objc': objc, - }, ()) - -import sys -del sys.modules['CoreServices.Metadata._metadata'] diff --git a/env/lib/python2.7/site-packages/CoreServices/Metadata/__init__.pyc b/env/lib/python2.7/site-packages/CoreServices/Metadata/__init__.pyc deleted file mode 100644 index f7120125..00000000 Binary files a/env/lib/python2.7/site-packages/CoreServices/Metadata/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreServices/Metadata/_metadata.py b/env/lib/python2.7/site-packages/CoreServices/Metadata/_metadata.py deleted file mode 100644 index f68f45f4..00000000 --- a/env/lib/python2.7/site-packages/CoreServices/Metadata/_metadata.py +++ /dev/null @@ -1,27 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Mon Aug 13 10:46:22 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -misc.update({'MDQueryBatchingParams': objc.createStructType('MDQueryBatchingParams', sel32or64(b'{_MDQueryBatchingParams=LLLLLL}', b'{_MDQueryBatchingParams=QQQQQQ}'), ['first_max_num', 'first_max_ms', 'progress_max_num', 'progress_max_ms', 'update_max_num', 'update_max_ms'])}) -constants = '''$kMDAttributeAllValues@^{__CFString=}$kMDAttributeDisplayValues@^{__CFString=}$kMDAttributeMultiValued@^{__CFString=}$kMDAttributeName@^{__CFString=}$kMDAttributeReadOnlyValues@^{__CFString=}$kMDAttributeType@^{__CFString=}$kMDExporterAvaliable@^{__CFString=}$kMDItemAcquisitionMake@^{__CFString=}$kMDItemAcquisitionModel@^{__CFString=}$kMDItemAlbum@^{__CFString=}$kMDItemAltitude@^{__CFString=}$kMDItemAperture@^{__CFString=}$kMDItemAppleLoopDescriptors@^{__CFString=}$kMDItemAppleLoopsKeyFilterType@^{__CFString=}$kMDItemAppleLoopsLoopMode@^{__CFString=}$kMDItemAppleLoopsRootKey@^{__CFString=}$kMDItemApplicationCategories@^{__CFString=}$kMDItemAttributeChangeDate@^{__CFString=}$kMDItemAudiences@^{__CFString=}$kMDItemAudioBitRate@^{__CFString=}$kMDItemAudioChannelCount@^{__CFString=}$kMDItemAudioEncodingApplication@^{__CFString=}$kMDItemAudioSampleRate@^{__CFString=}$kMDItemAudioTrackNumber@^{__CFString=}$kMDItemAuthorAddresses@^{__CFString=}$kMDItemAuthorEmailAddresses@^{__CFString=}$kMDItemAuthors@^{__CFString=}$kMDItemBitsPerSample@^{__CFString=}$kMDItemCFBundleIdentifier@^{__CFString=}$kMDItemCameraOwner@^{__CFString=}$kMDItemCity@^{__CFString=}$kMDItemCodecs@^{__CFString=}$kMDItemColorSpace@^{__CFString=}$kMDItemComment@^{__CFString=}$kMDItemComposer@^{__CFString=}$kMDItemContactKeywords@^{__CFString=}$kMDItemContentCreationDate@^{__CFString=}$kMDItemContentModificationDate@^{__CFString=}$kMDItemContentType@^{__CFString=}$kMDItemContentTypeTree@^{__CFString=}$kMDItemContributors@^{__CFString=}$kMDItemCopyright@^{__CFString=}$kMDItemCountry@^{__CFString=}$kMDItemCoverage@^{__CFString=}$kMDItemCreator@^{__CFString=}$kMDItemDateAdded@^{__CFString=}$kMDItemDeliveryType@^{__CFString=}$kMDItemDescription@^{__CFString=}$kMDItemDirector@^{__CFString=}$kMDItemDisplayName@^{__CFString=}$kMDItemDownloadedDate@^{__CFString=}$kMDItemDueDate@^{__CFString=}$kMDItemDurationSeconds@^{__CFString=}$kMDItemEXIFGPSVersion@^{__CFString=}$kMDItemEXIFVersion@^{__CFString=}$kMDItemEditors@^{__CFString=}$kMDItemEmailAddresses@^{__CFString=}$kMDItemEncodingApplications@^{__CFString=}$kMDItemExecutableArchitectures@^{__CFString=}$kMDItemExecutablePlatform@^{__CFString=}$kMDItemExposureMode@^{__CFString=}$kMDItemExposureProgram@^{__CFString=}$kMDItemExposureTimeSeconds@^{__CFString=}$kMDItemExposureTimeString@^{__CFString=}$kMDItemFNumber@^{__CFString=}$kMDItemFSContentChangeDate@^{__CFString=}$kMDItemFSCreationDate@^{__CFString=}$kMDItemFSExists@^{__CFString=}$kMDItemFSHasCustomIcon@^{__CFString=}$kMDItemFSInvisible@^{__CFString=}$kMDItemFSIsExtensionHidden@^{__CFString=}$kMDItemFSIsReadable@^{__CFString=}$kMDItemFSIsStationery@^{__CFString=}$kMDItemFSIsWriteable@^{__CFString=}$kMDItemFSLabel@^{__CFString=}$kMDItemFSName@^{__CFString=}$kMDItemFSNodeCount@^{__CFString=}$kMDItemFSOwnerGroupID@^{__CFString=}$kMDItemFSOwnerUserID@^{__CFString=}$kMDItemFSSize@^{__CFString=}$kMDItemFinderComment@^{__CFString=}$kMDItemFlashOnOff@^{__CFString=}$kMDItemFocalLength@^{__CFString=}$kMDItemFocalLength35mm@^{__CFString=}$kMDItemFonts@^{__CFString=}$kMDItemGPSAreaInformation@^{__CFString=}$kMDItemGPSDOP@^{__CFString=}$kMDItemGPSDateStamp@^{__CFString=}$kMDItemGPSDestBearing@^{__CFString=}$kMDItemGPSDestDistance@^{__CFString=}$kMDItemGPSDestLatitude@^{__CFString=}$kMDItemGPSDestLongitude@^{__CFString=}$kMDItemGPSDifferental@^{__CFString=}$kMDItemGPSMapDatum@^{__CFString=}$kMDItemGPSMeasureMode@^{__CFString=}$kMDItemGPSProcessingMethod@^{__CFString=}$kMDItemGPSStatus@^{__CFString=}$kMDItemGPSTrack@^{__CFString=}$kMDItemGenre@^{__CFString=}$kMDItemHTMLContent@^{__CFString=}$kMDItemHasAlphaChannel@^{__CFString=}$kMDItemHeadline@^{__CFString=}$kMDItemISOSpeed@^{__CFString=}$kMDItemIdentifier@^{__CFString=}$kMDItemImageDirection@^{__CFString=}$kMDItemInformation@^{__CFString=}$kMDItemInstantMessageAddresses@^{__CFString=}$kMDItemInstructions@^{__CFString=}$kMDItemIsApplicationManaged@^{__CFString=}$kMDItemIsGeneralMIDISequence@^{__CFString=}$kMDItemIsLikelyJunk@^{__CFString=}$kMDItemKeySignature@^{__CFString=}$kMDItemKeywords@^{__CFString=}$kMDItemKind@^{__CFString=}$kMDItemLabelID@^{__CFString=}$kMDItemLabelIcon@^{__CFString=}$kMDItemLabelKind@^{__CFString=}$kMDItemLabelUUID@^{__CFString=}$kMDItemLanguages@^{__CFString=}$kMDItemLastUsedDate@^{__CFString=}$kMDItemLatitude@^{__CFString=}$kMDItemLayerNames@^{__CFString=}$kMDItemLensModel@^{__CFString=}$kMDItemLongitude@^{__CFString=}$kMDItemLyricist@^{__CFString=}$kMDItemMaxAperture@^{__CFString=}$kMDItemMediaTypes@^{__CFString=}$kMDItemMeteringMode@^{__CFString=}$kMDItemMusicalGenre@^{__CFString=}$kMDItemMusicalInstrumentCategory@^{__CFString=}$kMDItemMusicalInstrumentName@^{__CFString=}$kMDItemNamedLocation@^{__CFString=}$kMDItemNumberOfPages@^{__CFString=}$kMDItemOrganizations@^{__CFString=}$kMDItemOrientation@^{__CFString=}$kMDItemOriginalFormat@^{__CFString=}$kMDItemOriginalSource@^{__CFString=}$kMDItemPageHeight@^{__CFString=}$kMDItemPageWidth@^{__CFString=}$kMDItemParticipants@^{__CFString=}$kMDItemPath@^{__CFString=}$kMDItemPerformers@^{__CFString=}$kMDItemPhoneNumbers@^{__CFString=}$kMDItemPixelCount@^{__CFString=}$kMDItemPixelHeight@^{__CFString=}$kMDItemPixelWidth@^{__CFString=}$kMDItemProducer@^{__CFString=}$kMDItemProfileName@^{__CFString=}$kMDItemProjects@^{__CFString=}$kMDItemPublishers@^{__CFString=}$kMDItemRecipientAddresses@^{__CFString=}$kMDItemRecipientEmailAddresses@^{__CFString=}$kMDItemRecipients@^{__CFString=}$kMDItemRecordingDate@^{__CFString=}$kMDItemRecordingYear@^{__CFString=}$kMDItemRedEyeOnOff@^{__CFString=}$kMDItemResolutionHeightDPI@^{__CFString=}$kMDItemResolutionWidthDPI@^{__CFString=}$kMDItemRights@^{__CFString=}$kMDItemSecurityMethod@^{__CFString=}$kMDItemSpeed@^{__CFString=}$kMDItemStarRating@^{__CFString=}$kMDItemStateOrProvince@^{__CFString=}$kMDItemStreamable@^{__CFString=}$kMDItemSubject@^{__CFString=}$kMDItemSupportFileType@^{__CFString=}$kMDItemTempo@^{__CFString=}$kMDItemTextContent@^{__CFString=}$kMDItemTheme@^{__CFString=}$kMDItemTimeSignature@^{__CFString=}$kMDItemTimestamp@^{__CFString=}$kMDItemTitle@^{__CFString=}$kMDItemTotalBitRate@^{__CFString=}$kMDItemURL@^{__CFString=}$kMDItemVersion@^{__CFString=}$kMDItemVideoBitRate@^{__CFString=}$kMDItemWhereFroms@^{__CFString=}$kMDItemWhiteBalance@^{__CFString=}$kMDLabelAddedNotification@^{__CFString=}$kMDLabelBundleURL@^{__CFString=}$kMDLabelChangedNotification@^{__CFString=}$kMDLabelContentChangeDate@^{__CFString=}$kMDLabelDisplayName@^{__CFString=}$kMDLabelIconData@^{__CFString=}$kMDLabelIconUUID@^{__CFString=}$kMDLabelIsMutuallyExclusiveSetMember@^{__CFString=}$kMDLabelKind@^{__CFString=}$kMDLabelKindIsMutuallyExclusiveSetKey@^{__CFString=}$kMDLabelKindVisibilityKey@^{__CFString=}$kMDLabelRemovedNotification@^{__CFString=}$kMDLabelSetsFinderColor@^{__CFString=}$kMDLabelUUID@^{__CFString=}$kMDLabelVisibility@^{__CFString=}$kMDPrivateVisibility@^{__CFString=}$kMDPublicVisibility@^{__CFString=}$kMDQueryDidFinishNotification@^{__CFString=}$kMDQueryDidUpdateNotification@^{__CFString=}$kMDQueryProgressNotification@^{__CFString=}$kMDQueryResultContentRelevance@^{__CFString=}$kMDQueryScopeAllIndexed@^{__CFString=}$kMDQueryScopeComputer@^{__CFString=}$kMDQueryScopeComputerIndexed@^{__CFString=}$kMDQueryScopeHome@^{__CFString=}$kMDQueryScopeNetwork@^{__CFString=}$kMDQueryScopeNetworkIndexed@^{__CFString=}$kMDQueryUpdateAddedItems@^{__CFString=}$kMDQueryUpdateChangedItems@^{__CFString=}$kMDQueryUpdateRemovedItems@^{__CFString=}$''' -enums = '''$kMDLabelLocalDomain@1$kMDLabelUserDomain@0$kMDQueryAllowFSTranslation@8$kMDQueryReverseSortOrderFlag@1$kMDQuerySynchronous@1$kMDQueryWantsUpdates@4$''' -misc.update({}) -functions={'MDQueryCreate': (b'^{__MDQuery=}^{__CFAllocator=}^{__CFString=}^{__CFArray=}^{__CFArray=}', '', {'retval': {'already_cfretained': True}}), 'MDItemCopyAttributes': (b'^{__CFDictionary=}^{__MDItem=}^{__CFArray=}', '', {'retval': {'already_cfretained': True}}), 'MDLabelCreate': (b'^{__MDLabel=}^{__CFAllocator=}^{__CFString=}^{__CFString=}I', '', {'retval': {'already_cfretained': True}}), 'MDLabelGetTypeID': (sel32or64(b'L', b'Q'),), 'MDQueryGetAttributeValueOfResultAtIndex': (sel32or64(b'@^{__MDQuery=}^{__CFString=}l', b'@^{__MDQuery=}^{__CFString=}q'),), 'MDQueryCreateForItems': (b'^{__MDQuery=}^{__CFAllocator=}^{__CFString=}^{__CFArray=}^{__CFArray=}^{__CFArray=}', '', {'retval': {'already_cfretained': True}}), 'MDQueryCopyValueListAttributes': (b'^{__CFArray=}^{__MDQuery=}', '', {'retval': {'already_cfretained': True}}), 'MDLabelCopyAttributeName': (b'^{__CFString=}^{__MDLabel=}', '', {'retval': {'already_cfretained': True}}), 'MDCopyLabelsWithKind': (b'^{__CFArray=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'MDQueryGetIndexOfResult': (sel32or64(b'l^{__MDQuery=}@', b'q^{__MDQuery=}@'),), 'MDLabelDelete': (b'Z^{__MDLabel=}',), 'MDSchemaCopyDisplayNameForAttribute': (b'^{__CFString=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'MDItemCopyAttribute': (b'@^{__MDItem=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'MDQueryDisableUpdates': (b'v^{__MDQuery=}',), 'MDItemCopyAttributeList': (b'^{__CFDictionary=}^{__MDItem=}', '', {'retval': {'already_cfretained': True}, 'variadic': True}), 'MDItemCreate': (b'^{__MDItem=}^{__CFAllocator=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'MDQueryGetSortOptionFlagsForAttribute': (b'I^{__MDQuery=}^{__CFString=}',), 'MDQueryGetBatchingParameters': (sel32or64(b'{_MDQueryBatchingParams=LLLLLL}^{__MDQuery=}', b'{_MDQueryBatchingParams=QQQQQQ}^{__MDQuery=}'),), 'MDCopyLabelWithUUID': (b'^{__MDLabel=}^{__CFUUID=}', '', {'retval': {'already_cfretained': True}}), 'MDQueryEnableUpdates': (b'v^{__MDQuery=}',), 'MDQuerySetMaxCount': (sel32or64(b'v^{__MDQuery=}l', b'v^{__MDQuery=}q'),), 'MDQueryCopyValuesOfAttribute': (b'^{__CFArray=}^{__MDQuery=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'MDItemsCreateWithURLs': (b'^{__CFArray=}^{__CFAllocator=}^{__CFArray=}', '', {'retval': {'already_cfretained': True}}), 'MDQuerySetCreateValueFunction': (sel32or64(b'v^{__MDQuery=}^?^v^{_CFArrayCallBacks=l^?^?^?^?}', b'v^{__MDQuery=}^?^v^{_CFArrayCallBacks=q^?^?^?^?}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'callable': {'retval': {'type': b'^v'}, 'arguments': {0: {'type': b'^{__MDQuery=}'}, 1: {'type': b'^{__CFString=}'}, 2: {'type': b'@'}, 3: {'type': b'^v'}}}}}}), 'MDSchemaCopyDisplayDescriptionForAttribute': (b'^{__CFString=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'MDCopyLabelsMatchingExpression': (b'^{__CFArray=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'MDQuerySetSortOrder': (b'Z^{__MDQuery=}^{__CFArray=}',), 'MDQuerySetSortComparator': (b'v^{__MDQuery=}^?^v', '', {'arguments': {1: {'callable': {'retval': {'type': sel32or64(b'i',b'q')}, 'arguments': {0: {'type': b'^@', 'type_modifier': 'n', 'c_array_of_variable_length': True}, 1: {'type': b'^@', 'type_modifier': 'n', 'c_array_of_variable_length': True}, 2: {'type': b'^v'}}}}}}), 'MDSchemaCopyAllAttributes': (b'^{__CFArray=}', '', {'retval': {'already_cfretained': True}}), 'MDQueryStop': (b'v^{__MDQuery=}',), 'MDQuerySetSearchScope': (sel32or64(b'v^{__MDQuery=}^{__CFArray=}L', b'v^{__MDQuery=}^{__CFArray=}I'),), 'MDQueryIsGatheringComplete': (b'Z^{__MDQuery=}',), 'MDQuerySetCreateResultFunction': (sel32or64(b'v^{__MDQuery=}^?^v^{_CFArrayCallBacks=l^?^?^?^?}', b'v^{__MDQuery=}^?^v^{_CFArrayCallBacks=q^?^?^?^?}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'callable': {'retval': {'type': b'^v'}, 'arguments': {0: {'type': b'^{__MDQuery=}'}, 1: {'type': b'^{__MDItem=}'}, 2: {'type': b'^v'}}}}}}), 'MDItemCopyLabels': (b'^{__CFArray=}^{__MDItem=}', '', {'retval': {'already_cfretained': True}}), 'MDSchemaCopyAttributesForContentType': (b'^{__CFDictionary=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'MDItemSetLabel': (b'Z^{__MDItem=}^{__MDLabel=}',), 'MDQueryExecute': (sel32or64(b'Z^{__MDQuery=}L', b'Z^{__MDQuery=}Q'),), 'MDQuerySetDispatchQueue': (b'v^{__MDQuery=}@',), 'MDLabelSetAttributes': (b'Z^{__MDLabel=}^{__CFDictionary=}',), 'MDItemGetTypeID': (sel32or64(b'L', b'Q'),), 'MDQueryCopyQueryString': (b'^{__CFString=}^{__MDQuery=}', '', {'retval': {'already_cfretained': True}}), 'MDQueryCopySortingAttributes': (b'^{__CFArray=}^{__MDQuery=}', '', {'retval': {'already_cfretained': True}}), 'MDQuerySetSortOptionFlagsForAttribute': (b'Z^{__MDQuery=}^{__CFString=}I',), 'MDQuerySetBatchingParameters': (sel32or64(b'v^{__MDQuery=}{_MDQueryBatchingParams=LLLLLL}', b'v^{__MDQuery=}{_MDQueryBatchingParams=QQQQQQ}'),), 'MDQueryCreateSubset': (b'^{__MDQuery=}^{__CFAllocator=}^{__MDQuery=}^{__CFString=}^{__CFArray=}^{__CFArray=}', '', {'retval': {'already_cfretained': True}}), 'MDQueryGetTypeID': (sel32or64(b'L', b'Q'),), 'MDItemCreateWithURL': (b'^{__MDItem=}^{__CFAllocator=}^{__CFURL=}', '', {'retval': {'already_cfretained': True}}), 'MDQuerySetSortComparatorBlock': (b'v^{__MDQuery=}@?', '', {'arguments': {1: {'callable': {'retval': {'type': sel32or64(b'i', b'q')}, 'arguments': {0: {'type': '^v'}, 1: {'type': '^@', 'type_modifier': 'n', 'c_array_of_variable_length': True}, 2: {'type': '^@', 'type_modifier': 'n', 'c_array_of_variable_length': True}}}, 'block': {'retval': {'type': b'l'}, 'arguments': {0: {'type': b'^@'}, 1: {'type': b'^@'}}}}}}), 'MDItemCopyAttributeNames': (b'^{__CFArray=}^{__MDItem=}', '', {'retval': {'already_cfretained': True}}), 'MDQueryGetCountOfResultsWithAttributeValue': (sel32or64(b'l^{__MDQuery=}^{__CFString=}@', b'q^{__MDQuery=}^{__CFString=}@'),), 'MDItemsCopyAttributes': (b'^{__CFArray=}^{__CFArray=}^{__CFArray=}', '', {'retval': {'already_cfretained': True}}), 'MDQueryGetResultCount': (sel32or64(b'l^{__MDQuery=}', b'q^{__MDQuery=}'),), 'MDCopyLabelKinds': (b'^{__CFArray=}', '', {'retval': {'already_cfretained': True}}), 'MDItemRemoveLabel': (b'Z^{__MDItem=}^{__MDLabel=}',), 'MDQueryGetResultAtIndex': (sel32or64(b'@^{__MDQuery=}l', b'@^{__MDQuery=}q'),), 'MDSchemaCopyMetaAttributesForAttribute': (b'^{__CFDictionary=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'MDLabelCopyAttribute': (b'@^{__MDLabel=}^{__CFString=}', '', {'retval': {'already_cfretained': True}})} -aliases = {'MD_AVAIL': 'AVAILABLE_MAC_OS_X_VERSION_10_4_AND_LATER', 'MD_AVAIL_LEOPARD': 'AVAILABLE_MAC_OS_X_VERSION_10_5_AND_LATER'} -cftypes=[('MDItemRef', b'^{__MDItem=}', None, None), ('MDLabelRef', b'^{__MDLabel=}', None, None), ('MDQueryRef', b'^{__MDQuery=}', None, None)] -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/CoreServices/Metadata/_metadata.pyc b/env/lib/python2.7/site-packages/CoreServices/Metadata/_metadata.pyc deleted file mode 100644 index 0481d6b3..00000000 Binary files a/env/lib/python2.7/site-packages/CoreServices/Metadata/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreServices/SearchKit/__init__.py b/env/lib/python2.7/site-packages/CoreServices/SearchKit/__init__.py deleted file mode 100644 index 3b312a40..00000000 --- a/env/lib/python2.7/site-packages/CoreServices/SearchKit/__init__.py +++ /dev/null @@ -1,125 +0,0 @@ -''' -Python mapping for the SearchKit framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import objc, sys - -import CoreFoundation -from CoreServices.SearchKit import _metadata - -mod = objc.ObjCLazyModule( - "SearchKit", "com.apple.SearchKit", - objc.pathForFramework( - "/System/Library/Frameworks/CoreServices.framework/Frameworks/SearchKit.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (CoreFoundation,)) - -import sys -del sys.modules['CoreServices.SearchKit._metadata'] - - - -# SKIndexGetTypeID is documented, but not actually exported by Leopard. Try to -# emulate the missing functionality. -# -# See also radar:6525606. -# -# UPDATE 20151123: The workaround is still necessary on OSX 10.11 -def workaround(): - from Foundation import NSMutableData, NSAutoreleasePool - - pool = NSAutoreleasePool.alloc().init() - try: - rI = mod.SKIndexCreateWithMutableData(NSMutableData.data(), - None, mod.kSKIndexInverted, None) - - indexID = mod.CFGetTypeID(rI) - - r = mod.SKIndexDocumentIteratorCreate(rI, None) - iterID = mod.CFGetTypeID(r) - del r - - r = mod.SKSearchGroupCreate([rI]) - groupID = mod.CFGetTypeID(r) - - r = mod.SKSearchResultsCreateWithQuery(r, ".*", mod.kSKSearchRanked, 1, None, None) - resultID = mod.CFGetTypeID(r) - - if mod.SKSearchGetTypeID() == 0: - # Type doesn't get registered unless you try to use it. - # That's no good for PyObjC, therefore forcefully create - # a SKSearch object - mod.SKSearchCreate(rI, "q", 0) - searchref = objc.registerCFSignature( - "SKSearchRef", b"^{__SKSearch=}", mod.SKSearchGetTypeID()) - else: - searchref = mod.SKSearchRef - - del r - del rI - - r = mod.SKSummaryCreateWithString("foo") - summaryID = mod.CFGetTypeID(r) - del r - - finally: - del pool - - def SKIndexGetTypeID(): - return indexID - - def SKIndexDocumentIteratorGetTypeID(): - return iterID - - def SKSearchGroupGetTypeID(): - return groupID - - def SKSearchResultsGetTypeID(): - return resultID - - def SKSummaryGetTypeID(): - return summaryID - - indexType = objc.registerCFSignature( - "SKIndexRef", b"^{__SKIndex=}", indexID) - iterType = objc.registerCFSignature( - "SKIndexDocumentIteratorRef", b"^{__SKIndexDocumentIterator=}", iterID) - groupType = objc.registerCFSignature( - "SKSearchGroupRef", b"^{__SKSearchGroup=}", groupID) - resultType = objc.registerCFSignature( - "SKSearchResultsRef", b"^{__SKSearchResults=}", resultID) - summaryType = objc.registerCFSignature( - "SKSummaryRef", b"^{__SKSummary=}", summaryID) - - - # For some reason SKDocumentGetTypeID doesn't return the right value - # when the framework loader calls it the first time around, - # by this time the framework is fully initialized and we get - # the correct result. - SKDocumentRef = objc.registerCFSignature( - "SKDocumentRef", b"@", mod.SKDocumentGetTypeID()) - - - return (SKIndexGetTypeID, indexType, SKIndexDocumentIteratorGetTypeID, iterType, - SKSearchGroupGetTypeID, groupType, SKSearchResultsGetTypeID, resultType, - SKSummaryGetTypeID, summaryType, iterType, - SKDocumentRef, searchref) - -(mod.SKIndexGetTypeID, mod.SKIndexRef, - mod.SKIndexDocumentIteratorGetTypeID, mod.SKIndexDocumentRef, - mod.SKSearchGroupGetTypeID, mod.SKSearchGroupRef, - mod.SKSearchResultsGetTypeID, mod.SKSearchResultsRef, - mod.SKSummaryGetTypeID, mod.SKSummaryRef, - mod.SKIndexDocumentIteratorRef, - mod.SKDocumentRef, mod.SKSearchRef, -) = workaround() - -del workaround - -sys.modules['CoreServices.SearchKit'] = mod diff --git a/env/lib/python2.7/site-packages/CoreServices/SearchKit/__init__.pyc b/env/lib/python2.7/site-packages/CoreServices/SearchKit/__init__.pyc deleted file mode 100644 index 434157f6..00000000 Binary files a/env/lib/python2.7/site-packages/CoreServices/SearchKit/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreServices/SearchKit/_metadata.py b/env/lib/python2.7/site-packages/CoreServices/SearchKit/_metadata.py deleted file mode 100644 index a00dadda..00000000 --- a/env/lib/python2.7/site-packages/CoreServices/SearchKit/_metadata.py +++ /dev/null @@ -1,25 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Mon Sep 24 11:16:09 2012 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$kSKEndTermChars@^{__CFString=}$kSKLanguageTypes@^{__CFString=}$kSKMaximumTerms@^{__CFString=}$kSKMinTermLength@^{__CFString=}$kSKProximityIndexing@^{__CFString=}$kSKStartTermChars@^{__CFString=}$kSKStopWords@^{__CFString=}$kSKSubstitutions@^{__CFString=}$kSKTermChars@^{__CFString=}$''' -enums = '''$kSKDocumentStateAddPending@2$kSKDocumentStateDeletePending@3$kSKDocumentStateIndexed@1$kSKDocumentStateNotIndexed@0$kSKIndexInverted@1$kSKIndexInvertedVector@3$kSKIndexUnknown@0$kSKIndexVector@2$kSKSearchBooleanRanked@1$kSKSearchOptionDefault@0$kSKSearchOptionFindSimilar@4$kSKSearchOptionNoRelevanceScores@1$kSKSearchOptionSpaceMeansOR@2$kSKSearchPrefixRanked@3$kSKSearchRanked@0$kSKSearchRequiredRanked@2$''' -misc.update({}) -functions={'SKIndexGetMaximumTermID': (sel32or64(b'l^{__SKIndex=}', b'q^{__SKIndex=}'),), 'SKDocumentGetName': (b'^{__CFString=}@',), 'SKIndexRemoveDocument': (b'Z^{__SKIndex=}@',), 'SKIndexCopyTermIDArrayForDocumentID': (sel32or64(b'^{__CFArray=}^{__SKIndex=}l', b'^{__CFArray=}^{__SKIndex=}q'), '', {'retval': {'already_cfretained': True}}), 'SKSearchGetTypeID': (sel32or64(b'L', b'Q'),), 'SKIndexDocumentIteratorCreate': (b'^{__SKIndexDocumentIterator=}^{__SKIndex=}@', '', {'retval': {'already_cfretained': True}}), 'SKDocumentGetTypeID': (sel32or64(b'L', b'Q'),), 'SKSummaryGetParagraphSummaryInfo': (sel32or64(b'l^{__SKSummary=}l^l^l', b'q^{__SKSummary=}q^q^q'), '', {'arguments': {2: {'type_modifier': 'o'}, 3: {'type_modifier': 'o'}}}), 'SKSummaryGetParagraphCount': (sel32or64(b'l^{__SKSummary=}', b'q^{__SKSummary=}'),), 'SKIndexGetMaximumDocumentID': (sel32or64(b'l^{__SKIndex=}', b'q^{__SKIndex=}'),), 'SKSearchGroupCreate': (b'^{__SKSearchGroup=}^{__CFArray=}', '', {'retval': {'already_cfretained': True}}), 'SKIndexFlush': (b'Z^{__SKIndex=}',), 'SKIndexCreateWithURL': (b'^{__SKIndex=}^{__CFURL=}^{__CFString=}i^{__CFDictionary=}', '', {'retval': {'already_cfretained': True}}), 'SKSummaryGetSentenceCount': (sel32or64(b'l^{__SKSummary=}', b'q^{__SKSummary=}'),), 'SKIndexGetMaximumBytesBeforeFlush': (sel32or64(b'l^{__SKIndex=}', b'q^{__SKIndex=}'),), 'SKIndexCopyDocumentIDArrayForTermID': (sel32or64(b'^{__CFArray=}^{__SKIndex=}l', b'^{__CFArray=}^{__SKIndex=}q'), '', {'retval': {'already_cfretained': True}}), 'SKSearchResultsCreateWithDocuments': (sel32or64(b'^{__SKSearchResults=}^{__SKSearchGroup=}^{__CFArray=}l^v^?', b'^{__SKSearchResults=}^{__SKSearchGroup=}^{__CFArray=}q^v^?'), '', {'retval': {'already_cfretained': True}, 'arguments': {4: {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^{__SKIndex=}'}, 1: {'type': b'@'}, 2: {'type': b'^v'}}}}}}), 'SKSummaryCopyParagraphAtIndex': (sel32or64(b'^{__CFString=}^{__SKSummary=}l', b'^{__CFString=}^{__SKSummary=}q'), '', {'retval': {'already_cfretained': True}}), 'SKIndexGetDocumentID': (sel32or64(b'l^{__SKIndex=}@', b'q^{__SKIndex=}@'),), 'SKIndexOpenWithURL': (b'^{__SKIndex=}^{__CFURL=}^{__CFString=}Z',), 'SKIndexCreateWithMutableData': (b'^{__SKIndex=}^{__CFData=}^{__CFString=}i^{__CFDictionary=}', '', {'retval': {'already_cfretained': True}}), 'SKIndexCopyDocumentURLsForDocumentIDs': (sel32or64(b'v^{__SKIndex=}l^l^^{__CFURL=}', b'v^{__SKIndex=}q^q^^{__CFURL=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'c_array_length_in_arg': 1, 'type_modifier': 'n'}, 3: {'already_cfretained': True, 'c_array_length_in_arg': 1, 'type_modifier': 'o'}}}), 'SKSearchResultsCreateWithQuery': (sel32or64(b'^{__SKSearchResults=}^{__SKSearchGroup=}^{__CFString=}il^v^?', b'^{__SKSearchResults=}^{__SKSearchGroup=}^{__CFString=}iq^v^?'), '', {'retval': {'already_cfretained': True}, 'arguments': {5: {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^{__SKIndex=}'}, 1: {'type': b'@'}, 2: {'type': b'^v'}}}}}}), 'SKIndexCompact': (b'Z^{__SKIndex=}',), 'SKSearchGroupGetTypeID': (sel32or64(b'L', b'Q'),), 'SKSummaryCopySentenceAtIndex': (sel32or64(b'^{__CFString=}^{__SKSummary=}l', b'^{__CFString=}^{__SKSummary=}q'), '', {'retval': {'already_cfretained': True}}), 'SKSearchGroupCopyIndexes': (b'^{__CFArray=}^{__SKSearchGroup=}', '', {'retval': {'already_cfretained': True}}), 'SKLoadDefaultExtractorPlugIns': (b'v',), 'SKIndexMoveDocument': (b'Z^{__SKIndex=}@@',), 'SKIndexOpenWithData': (b'^{__SKIndex=}^{__CFData=}^{__CFString=}',), 'SKIndexCopyDocumentProperties': (b'^{__CFDictionary=}^{__SKIndex=}@', '', {'retval': {'already_cfretained': True}}), 'SKIndexCopyDocumentRefsForDocumentIDs': (sel32or64(b'v^{__SKIndex=}l^l^@', b'v^{__SKIndex=}q^q^@'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'type_modifier': 'n', 'c_array_length_in_arg': 1}, 3: {'type_modifier': 'o', 'c_array_length_in_arg': 1}}}), 'SKIndexOpenWithMutableData': (b'^{__SKIndex=}^{__CFData=}^{__CFString=}',), 'SKSearchResultsCopyMatchingTerms': (sel32or64(b'^{__CFArray=}^{__SKSearchResults=}l', b'^{__CFArray=}^{__SKSearchResults=}q'), '', {'retval': {'already_cfretained': True}}), 'SKDocumentGetParent': (b'@@',), 'SKSearchResultsGetTypeID': (sel32or64(b'L', b'Q'),), 'SKSummaryCreateWithString': (b'^{__SKSummary=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'SKIndexGetDocumentTermCount': (sel32or64(b'l^{__SKIndex=}l', b'q^{__SKIndex=}q'),), 'SKIndexSetDocumentProperties': (b'v^{__SKIndex=}@^{__CFDictionary=}',), 'SKSummaryGetSentenceSummaryInfo': (sel32or64(b'l^{__SKSummary=}l^l^l^l', b'q^{__SKSummary=}q^q^q^q'), '', {'arguments': {2: {'type_modifier': 'o'}, 3: {'type_modifier': 'o'}, 4: {'type_modifier': 'o'}}}), 'SKDocumentCreate': (b'@^{__CFString=}@^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'SKDocumentCreateWithURL': (b'@^{__CFURL=}', '', {'retval': {'already_cfretained': True}}), 'SKIndexClose': (b'v^{__SKIndex=}',), 'SKIndexGetDocumentTermFrequency': (sel32or64(b'l^{__SKIndex=}ll', b'q^{__SKIndex=}qq'),), 'SKIndexDocumentIteratorCopyNext': (b'@^{__SKIndexDocumentIterator=}', '', {'retval': {'already_cfretained': True}}), 'SKIndexRenameDocument': (b'Z^{__SKIndex=}@^{__CFString=}',), 'SKSummaryGetTypeID': (sel32or64(b'L', b'Q'),), 'SKDocumentGetSchemeName': (b'^{__CFString=}@',), 'SKDocumentCopyURL': (b'^{__CFURL=}@', '', {'retval': {'already_cfretained': True}}), 'SKIndexGetTypeID': (sel32or64(b'L', b'Q'),), 'SKSearchCancel': (b'v^{__SKSearch=}',), 'SKIndexGetDocumentCount': (sel32or64(b'l^{__SKIndex=}', b'q^{__SKIndex=}'),), 'SKSummaryCopySentenceSummaryString': (sel32or64(b'^{__CFString=}^{__SKSummary=}l', b'^{__CFString=}^{__SKSummary=}q'), '', {'retval': {'already_cfretained': True}}), 'SKIndexGetTermDocumentCount': (sel32or64(b'l^{__SKIndex=}l', b'q^{__SKIndex=}q'),), 'SKIndexGetTermIDForTermString': (sel32or64(b'l^{__SKIndex=}^{__CFString=}', b'q^{__SKIndex=}^{__CFString=}'),), 'SKSummaryCopyParagraphSummaryString': (sel32or64(b'^{__CFString=}^{__SKSummary=}l', b'^{__CFString=}^{__SKSummary=}q'), '', {'retval': {'already_cfretained': True}}), 'SKIndexGetDocumentState': (b'i^{__SKIndex=}@',), 'SKSearchCreate': (sel32or64(b'^{__SKSearch=}^{__SKIndex=}^{__CFString=}L', b'^{__SKSearch=}^{__SKIndex=}^{__CFString=}I'), '', {'retval': {'already_cfretained': True}}), 'SKIndexAddDocumentWithText': (b'Z^{__SKIndex=}@^{__CFString=}Z',), 'SKIndexAddDocument': (b'Z^{__SKIndex=}@^{__CFString=}Z',), 'SKIndexSetMaximumBytesBeforeFlush': (sel32or64(b'v^{__SKIndex=}l', b'v^{__SKIndex=}q'),), 'SKSearchFindMatches': (sel32or64(b'Z^{__SKSearch=}l^l^fd^l', b'Z^{__SKSearch=}q^q^fd^q'), '', {'arguments': {2: {'c_array_length_in_arg': (1, 5), 'type_modifier': 'o'}, 3: {'c_array_length_in_arg': (1, 5), 'type_modifier': 'o'}, 5: {'type_modifier': 'o'}}}), 'SKIndexCopyTermStringForTermID': (sel32or64(b'^{__CFString=}^{__SKIndex=}l', b'^{__CFString=}^{__SKIndex=}q'), '', {'retval': {'already_cfretained': True}}), 'SKIndexGetAnalysisProperties': (b'^{__CFDictionary=}^{__SKIndex=}',), 'SKSearchResultsGetCount': (sel32or64(b'l^{__SKSearchResults=}', b'q^{__SKSearchResults=}'),), 'SKSearchResultsGetInfoInRange': (sel32or64(b'l^{__SKSearchResults=}{_CFRange=ll}^@^^{__SKIndex=}^f', b'q^{__SKSearchResults=}{_CFRange=qq}^@^^{__SKIndex=}^f'), '', {'arguments': {2: {'c_array_length_in_arg': 1, 'c_array_length_in_result': True, 'type_modifier': 'o'}, 3: {'c_array_length_in_arg': 1, 'c_array_length_in_result': True, 'type_modifier': 'o'}, 4: {'c_array_length_in_arg': 1, 'c_array_length_in_result': True, 'type_modifier': 'o'}}}), 'SKIndexCopyDocumentForDocumentID': (sel32or64(b'@^{__SKIndex=}l', b'@^{__SKIndex=}q'), '', {'retval': {'already_cfretained': True}}), 'SKIndexCopyInfoForDocumentIDs': (sel32or64(b'v^{__SKIndex=}l^l^^{__CFString=}^l', b'v^{__SKIndex=}q^q^^{__CFString=}^q'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'c_array_length_in_arg': 1, 'type_modifier': 'n'}, 3: {'already_cfretained': True, 'c_array_length_in_arg': 1, 'type_modifier': 'o'}, 4: {'c_array_length_in_arg': 1, 'type_modifier': 'o'}}}), 'SKIndexGetIndexType': (b'i^{__SKIndex=}',), 'SKIndexDocumentIteratorGetTypeID': (sel32or64(b'L', b'Q'),)} -cftypes=[('SKIndexDocumentIteratorRef', b'^{__SKIndexDocumentIterator=}', 'SKIndexDocumentIteratorGetTypeID', None), ('SKIndexRef', b'^{__SKIndex=}', 'SKIndexGetTypeID', None), ('SKSearchGroupRef', b'^{__SKSearchGroup=}', 'SKSearchGroupGetTypeID', None), ('SKSearchRef', b'^{__SKSearch=}', 'SKSearchGetTypeID', None), ('SKSearchResultsRef', b'^{__SKSearchResults=}', 'SKSearchResultsGetTypeID', None), ('SKSummaryRef', b'^{__SKSummary=}', 'SKSummaryGetTypeID', None), ('SKDocumentRef', b'@', 'SKDocumentGetTypeID', None)] -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/CoreServices/SearchKit/_metadata.pyc b/env/lib/python2.7/site-packages/CoreServices/SearchKit/_metadata.pyc deleted file mode 100644 index ab70058b..00000000 Binary files a/env/lib/python2.7/site-packages/CoreServices/SearchKit/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreServices/__init__.py b/env/lib/python2.7/site-packages/CoreServices/__init__.py deleted file mode 100644 index 509c4b36..00000000 --- a/env/lib/python2.7/site-packages/CoreServices/__init__.py +++ /dev/null @@ -1,29 +0,0 @@ -''' -Python mapping for the CoreServices framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. - -Note that PyObjC only wrappers the non-deprecated parts of the CoreServices -framework. -''' -import sys -import objc -import FSEvents - -from CoreServices import DictionaryServices -from CoreServices import LaunchServices -from CoreServices import Metadata -from CoreServices import CarbonCore -from CoreServices import SearchKit - - -sys.modules['CoreServices'] = mod = objc.ObjCLazyModule('CoreServices', - "com.apple.CoreServices", - objc.pathForFramework("/System/Library/Frameworks/CoreServices.framework"), - {}, None, { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'objc': objc, - }, (FSEvents, DictionaryServices, LaunchServices, SearchKit, Metadata, CarbonCore)) diff --git a/env/lib/python2.7/site-packages/CoreServices/__init__.pyc b/env/lib/python2.7/site-packages/CoreServices/__init__.pyc deleted file mode 100644 index 8165ca9d..00000000 Binary files a/env/lib/python2.7/site-packages/CoreServices/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreServices/_inlines.so b/env/lib/python2.7/site-packages/CoreServices/_inlines.so deleted file mode 100755 index 40ad8506..00000000 Binary files a/env/lib/python2.7/site-packages/CoreServices/_inlines.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreSpotlight/_CoreSpotlight.so b/env/lib/python2.7/site-packages/CoreSpotlight/_CoreSpotlight.so deleted file mode 100755 index b7fdae72..00000000 Binary files a/env/lib/python2.7/site-packages/CoreSpotlight/_CoreSpotlight.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreSpotlight/__init__.py b/env/lib/python2.7/site-packages/CoreSpotlight/__init__.py deleted file mode 100644 index aa6b9a58..00000000 --- a/env/lib/python2.7/site-packages/CoreSpotlight/__init__.py +++ /dev/null @@ -1,28 +0,0 @@ -''' -Python mapping for the CoreSpotlight framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Foundation - -from CoreSpotlight import _metadata -from CoreSpotlight._CoreSpotlight import * - - -sys.modules['CoreSpotlight'] = mod = objc.ObjCLazyModule( - "CoreSpotlight", - "com.apple.CoreSpotlight", - objc.pathForFramework("/System/Library/Frameworks/CoreSpotlight.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Foundation,)) - -import sys -del sys.modules['CoreSpotlight._metadata'] diff --git a/env/lib/python2.7/site-packages/CoreSpotlight/__init__.pyc b/env/lib/python2.7/site-packages/CoreSpotlight/__init__.pyc deleted file mode 100644 index aea73ece..00000000 Binary files a/env/lib/python2.7/site-packages/CoreSpotlight/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreSpotlight/_metadata.py b/env/lib/python2.7/site-packages/CoreSpotlight/_metadata.py deleted file mode 100644 index 4e17f1c0..00000000 --- a/env/lib/python2.7/site-packages/CoreSpotlight/_metadata.py +++ /dev/null @@ -1,53 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Tue Jun 5 10:07:49 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$CSIndexErrorDomain$CSMailboxArchive$CSMailboxDrafts$CSMailboxInbox$CSMailboxJunk$CSMailboxSent$CSMailboxTrash$CSQueryContinuationActionType$CSSearchQueryErrorDomain$CSSearchQueryString$CSSearchableItemActionType$CSSearchableItemActivityIdentifier$CoreSpotlightVersionNumber@d$CoreSpotlightVersionString@*$''' -enums = '''$CSIndexErrorCodeIndexUnavailableError@-1000$CSIndexErrorCodeIndexingUnsupported@-1005$CSIndexErrorCodeInvalidClientStateError@-1002$CSIndexErrorCodeInvalidItemError@-1001$CSIndexErrorCodeQuotaExceeded@-1004$CSIndexErrorCodeRemoteConnectionError@-1003$CSIndexErrorCodeUnknownError@-1$CSSearchQueryErrorCodeCancelled@-2003$CSSearchQueryErrorCodeIndexUnreachable@-2001$CSSearchQueryErrorCodeInvalidQuery@-2002$CSSearchQueryErrorCodeUnknown@-2000$''' -misc.update({}) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'CSCustomAttributeKey', b'initWithKeyName:searchable:searchableByDefault:unique:multiValued:', {'arguments': {3: {'type': 'Z'}, 4: {'type': 'Z'}, 5: {'type': 'Z'}, 6: {'type': 'Z'}}}) - r(b'CSCustomAttributeKey', b'isMultiValued', {'retval': {'type': 'Z'}}) - r(b'CSCustomAttributeKey', b'isSearchable', {'retval': {'type': 'Z'}}) - r(b'CSCustomAttributeKey', b'isSearchableByDefault', {'retval': {'type': 'Z'}}) - r(b'CSCustomAttributeKey', b'isUnique', {'retval': {'type': 'Z'}}) - r(b'CSCustomAttributeKey', b'setMultiValued:', {'arguments': {2: {'type': 'Z'}}}) - r(b'CSCustomAttributeKey', b'setSearchable:', {'arguments': {2: {'type': 'Z'}}}) - r(b'CSCustomAttributeKey', b'setSearchableByDefault:', {'arguments': {2: {'type': 'Z'}}}) - r(b'CSCustomAttributeKey', b'setUnique:', {'arguments': {2: {'type': 'Z'}}}) - r(b'CSSearchQuery', b'completionHandler', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}) - r(b'CSSearchQuery', b'foundItemsHandler', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}) - r(b'CSSearchQuery', b'isCancelled', {'retval': {'type': 'Z'}}) - r(b'CSSearchQuery', b'setCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'CSSearchQuery', b'setFoundItemsHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'CSSearchableIndex', b'deleteAllSearchableItemsWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'CSSearchableIndex', b'deleteSearchableItemsWithDomainIdentifiers:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'CSSearchableIndex', b'deleteSearchableItemsWithIdentifiers:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'CSSearchableIndex', b'endIndexBatchWithClientState:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'CSSearchableIndex', b'fetchLastClientStateWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'CSSearchableIndex', b'indexSearchableItems:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'CSSearchableIndex', b'isIndexingAvailable', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'dataForSearchableIndex:itemIdentifier:typeIdentifier:error:', {'arguments': {5: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'fileURLForSearchableIndex:itemIdentifier:typeIdentifier:inPlace:error:', {'arguments': {5: {'type': 'Z'}, 6: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'searchableIndex:reindexAllSearchableItemsWithAcknowledgementHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}, 'type': '@?'}}}) - r(b'NSObject', b'searchableIndex:reindexSearchableItemsWithIdentifiers:acknowledgementHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}, 'type': '@?'}}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/CoreSpotlight/_metadata.pyc b/env/lib/python2.7/site-packages/CoreSpotlight/_metadata.pyc deleted file mode 100644 index 2ad01f31..00000000 Binary files a/env/lib/python2.7/site-packages/CoreSpotlight/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreText/__init__.py b/env/lib/python2.7/site-packages/CoreText/__init__.py deleted file mode 100644 index fbdc42a7..00000000 --- a/env/lib/python2.7/site-packages/CoreText/__init__.py +++ /dev/null @@ -1,30 +0,0 @@ -''' -Python mapping for the CoreText framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import sys -import objc -import CoreFoundation -import Quartz -import CoreText._manual - -from CoreText import _metadata - -sys.modules['CoreText'] = mod = objc.ObjCLazyModule('CoreText', - "com.apple.CoreText", - objc.pathForFramework("/System/Library/Frameworks/ApplicationServices.framework/Frameworks/CoreText.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'objc': objc, - }, ( CoreFoundation, Quartz, CoreText._manual,)) - -import CoreText._manual as m -for nm in dir(m): - setattr(mod, nm, getattr(m, nm)) - -import sys -del sys.modules['CoreText._metadata'] diff --git a/env/lib/python2.7/site-packages/CoreText/__init__.pyc b/env/lib/python2.7/site-packages/CoreText/__init__.pyc deleted file mode 100644 index e9053057..00000000 Binary files a/env/lib/python2.7/site-packages/CoreText/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreText/_manual.so b/env/lib/python2.7/site-packages/CoreText/_manual.so deleted file mode 100755 index ee38a9f8..00000000 Binary files a/env/lib/python2.7/site-packages/CoreText/_manual.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreText/_metadata.py b/env/lib/python2.7/site-packages/CoreText/_metadata.py deleted file mode 100644 index 3950f993..00000000 --- a/env/lib/python2.7/site-packages/CoreText/_metadata.py +++ /dev/null @@ -1,27 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Thu Jul 5 22:44:34 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -misc.update({'ROTAHeader': objc.createStructType('ROTAHeader', sel32or64(b'{ROTAHeader=lSSSS{SFNTLookupTable=S(SFNTLookupFormatSpecificHeader={SFNTLookupArrayHeader=[1S]}{SFNTLookupSegmentHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSegment=SS[1S]}]}{SFNTLookupSingleHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSingle=S[1S]}]}{SFNTLookupTrimmedArrayHeader=SS[1S]})}}', b'{ROTAHeader=iSSSS{SFNTLookupTable=S(SFNTLookupFormatSpecificHeader={SFNTLookupArrayHeader=[1S]}{SFNTLookupSegmentHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSegment=SS[1S]}]}{SFNTLookupSingleHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSingle=S[1S]}]}{SFNTLookupTrimmedArrayHeader=SS[1S]})}}'), ['Version', 'Flags', 'NMasters', 'FirstGlyph', 'LastGlyph', 'lookup']), 'MorxContextualSubtable': objc.createStructType('MorxContextualSubtable', sel32or64(b'{MorxContextualSubtable={STXHeader=LLLL}L}', b'{MorxContextualSubtable={STXHeader=IIII}I}'), ['header', 'substitutionTableOffset']), 'SFNTLookupSingleHeader': objc.createStructType('SFNTLookupSingleHeader', b'{SFNTLookupSingleHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSingle=S[1S]}]}', ['binSearch', 'entries']), 'MortLigatureSubtable': objc.createStructType('MortLigatureSubtable', b'{MortLigatureSubtable={STHeader=CCSSS}SSS}', ['header', 'ligatureActionTableOffset', 'componentTableOffset', 'ligatureTableOffset']), 'MortSubtable': objc.createStructType('MortSubtable', sel32or64(b'{MortSubtable=SSL(MortSpecificSubtable={MortRearrangementSubtable={STHeader=CCSSS}}{MortContextualSubtable={STHeader=CCSSS}S}{MortLigatureSubtable={STHeader=CCSSS}SSS}{MortSwashSubtable={SFNTLookupTable=S(SFNTLookupFormatSpecificHeader={SFNTLookupArrayHeader=[1S]}{SFNTLookupSegmentHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSegment=SS[1S]}]}{SFNTLookupSingleHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSingle=S[1S]}]}{SFNTLookupTrimmedArrayHeader=SS[1S]})}}{MortInsertionSubtable={STHeader=CCSSS}})}', b'{MortSubtable=SSI(MortSpecificSubtable={MortRearrangementSubtable={STHeader=CCSSS}}{MortContextualSubtable={STHeader=CCSSS}S}{MortLigatureSubtable={STHeader=CCSSS}SSS}{MortSwashSubtable={SFNTLookupTable=S(SFNTLookupFormatSpecificHeader={SFNTLookupArrayHeader=[1S]}{SFNTLookupSegmentHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSegment=SS[1S]}]}{SFNTLookupSingleHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSingle=S[1S]}]}{SFNTLookupTrimmedArrayHeader=SS[1S]})}}{MortInsertionSubtable={STHeader=CCSSS}})}'), ['length', 'coverage', 'flags', 'u']), 'sfntFeatureName': objc.createStructType('sfntFeatureName', sel32or64(b'{sfntFeatureName=SSlSs}', b'{sfntFeatureName=SSiSs}'), ['featureType', 'settingCount', 'offsetToSettings', 'featureFlags', 'nameID']), 'sfntNameRecord': objc.createStructType('sfntNameRecord', b'{sfntNameRecord=SSSSSS}', ['platformID', 'scriptID', 'languageID', 'nameID', 'length', 'offset']), 'KerxStateEntry': objc.createStructType('KerxStateEntry', b'{KerxStateEntry=SSS}', ['newState', 'flags', 'valueIndex']), 'CTParagraphStyleSetting': objc.createStructType('CTParagraphStyleSetting', sel32or64(b'{CTParagraphStyleSetting=IL^v}', b'{CTParagraphStyleSetting=IQ^v}'), ['spec', 'valueSize', 'value']), 'STEntryTwo': objc.createStructType('STEntryTwo', b'{STEntryTwo=SSSS}', ['newState', 'flags', 'offset1', 'offset2']), 'SFNTLookupBinarySearchHeader': objc.createStructType('SFNTLookupBinarySearchHeader', b'{SFNTLookupBinarySearchHeader=SSSSS}', ['unitSize', 'nUnits', 'searchRange', 'entrySelector', 'rangeShift']), 'OpbdTable': objc.createStructType('OpbdTable', sel32or64(b'{OpbdTable=lS{SFNTLookupTable=S(SFNTLookupFormatSpecificHeader={SFNTLookupArrayHeader=[1S]}{SFNTLookupSegmentHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSegment=SS[1S]}]}{SFNTLookupSingleHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSingle=S[1S]}]}{SFNTLookupTrimmedArrayHeader=SS[1S]})}}', b'{OpbdTable=iS{SFNTLookupTable=S(SFNTLookupFormatSpecificHeader={SFNTLookupArrayHeader=[1S]}{SFNTLookupSegmentHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSegment=SS[1S]}]}{SFNTLookupSingleHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSingle=S[1S]}]}{SFNTLookupTrimmedArrayHeader=SS[1S]})}}'), ['version', 'format', 'lookupTable']), 'BslnFormat2Part': objc.createStructType('BslnFormat2Part', b'{BslnFormat2Part=S[32s]}', ['stdGlyph', 'ctlPoints']), 'KerxIndexArrayHeader': objc.createStructType('KerxIndexArrayHeader', b'{KerxIndexArrayHeader=SSSSS[1s][1S][1S][1S]}', ['glyphCount', 'kernValueCount', 'leftClassCount', 'rightClassCount', 'flags', 'kernValue', 'leftClass', 'rightClass', 'kernIndex']), 'TrakTable': objc.createStructType('TrakTable', sel32or64(b'{TrakTable=lSSS}', b'{TrakTable=iSSS}'), ['version', 'format', 'horizOffset', 'vertOffset']), 'KerxKerningPair': objc.createStructType('KerxKerningPair', b'{KerxKerningPair=SS}', ['left', 'right']), 'KernIndexArrayHeader': objc.createStructType('KernIndexArrayHeader', b'{KernIndexArrayHeader=SCCCC[1s][1C][1C][1C]}', ['glyphCount', 'kernValueCount', 'leftClassCount', 'rightClassCount', 'flags', 'kernValue', 'leftClass', 'rightClass', 'kernIndex']), 'KernVersion0SubtableHeader': objc.createStructType('KernVersion0SubtableHeader', b'{KernVersion0SubtableHeader=SSS(KernFormatSpecificHeader={KernOrderedListHeader=SSSS[1S]}{KernStateHeader={STHeader=CCSSS}S[1C]}{KernSimpleArrayHeader=SSSS[1S]}{KernIndexArrayHeader=SCCCC[1s][1C][1C][1C]})}', ['version', 'length', 'stInfo', 'fsHeader']), 'STClassTable': objc.createStructType('STClassTable', b'{STClassTable=SS[1C]}', ['firstGlyph', 'nGlyphs', 'classes']), 'sfntDirectoryEntry': objc.createStructType('sfntDirectoryEntry', sel32or64(b'{sfntDirectoryEntry=LLLL}', b'{sfntDirectoryEntry=IIII}'), ['tableTag', 'checkSum', 'offset', 'length']), 'TrakTableEntry': objc.createStructType('TrakTableEntry', sel32or64(b'{TrakTableEntry=lSS}', b'{TrakTableEntry=iSS}'), ['track', 'nameTableIndex', 'sizesOffset']), 'KernOffsetTable': objc.createStructType('KernOffsetTable', b'{KernOffsetTable=SS[1S]}', ['firstGlyph', 'nGlyphs', 'offsetTable']), 'KernSimpleArrayHeader': objc.createStructType('KernSimpleArrayHeader', b'{KernSimpleArrayHeader=SSSS[1S]}', ['rowWidth', 'leftOffsetTable', 'rightOffsetTable', 'theArray', 'firstTable']), 'STXEntryZero': objc.createStructType('STXEntryZero', b'{STXEntryZero=SS}', ['newState', 'flags']), 'JustPCGlyphRepeatAddAction': objc.createStructType('JustPCGlyphRepeatAddAction', b'{JustPCGlyphRepeatAddAction=SS}', ['flags', 'glyph']), 'sfntCMapExtendedSubHeader': objc.createStructType('sfntCMapExtendedSubHeader', sel32or64(b'{sfntCMapExtendedSubHeader=SSLL}', b'{sfntCMapExtendedSubHeader=SSII}'), ['format', 'reserved', 'length', 'language']), 'MorxRearrangementSubtable': objc.createStructType('MorxRearrangementSubtable', sel32or64(b'{MorxRearrangementSubtable={STXHeader=LLLL}}', b'{MorxRearrangementSubtable={STXHeader=IIII}}'), ['header']), 'sfntVariationAxis': objc.createStructType('sfntVariationAxis', sel32or64(b'{sfntVariationAxis=Llllss}', b'{sfntVariationAxis=Iiiiss}'), ['axisTag', 'minValue', 'defaultValue', 'maxValue', 'flags', 'nameID']), 'MortRearrangementSubtable': objc.createStructType('MortRearrangementSubtable', b'{MortRearrangementSubtable={STHeader=CCSSS}}', ['header']), 'MortChain': objc.createStructType('MortChain', sel32or64(b'{MortChain=LLSS[1{MortFeatureEntry=SSLL}]}', b'{MortChain=IISS[1{MortFeatureEntry=SSII}]}'), ['defaultFlags', 'length', 'nFeatures', 'nSubtables', 'featureEntries']), 'sfntCMapEncoding': objc.createStructType('sfntCMapEncoding', sel32or64(b'{sfntCMapEncoding=SSL}', b'{sfntCMapEncoding=SSI}'), ['platformID', 'scriptID', 'offset']), 'MortInsertionSubtable': objc.createStructType('MortInsertionSubtable', b'{MortInsertionSubtable={STHeader=CCSSS}}', ['header']), 'SFNTLookupSegment': objc.createStructType('SFNTLookupSegment', b'{SFNTLookupSegment=SS[1S]}', ['lastGlyph', 'firstGlyph', 'value']), 'KernStateHeader': objc.createStructType('KernStateHeader', b'{KernStateHeader={STHeader=CCSSS}S[1C]}', ['header', 'valueTable', 'firstTable']), 'sfntFontRunFeature': objc.createStructType('sfntFontRunFeature', b'{sfntFontRunFeature=SS}', ['featureType', 'setting']), 'KerxCoordinateAction': objc.createStructType('KerxCoordinateAction', b'{KerxCoordinateAction=SSSS}', ['markX', 'markY', 'currX', 'currY']), 'JustPCDuctilityAction': objc.createStructType('JustPCDuctilityAction', sel32or64(b'{JustPCDuctilityAction=Llll}', b'{JustPCDuctilityAction=Iiii}'), ['ductilityAxis', 'minimumLimit', 'noStretchValue', 'maximumLimit']), 'sfntCMapSubHeader': objc.createStructType('sfntCMapSubHeader', b'{sfntCMapSubHeader=SSS}', ['format', 'length', 'languageID']), 'AnchorPointTable': objc.createStructType('AnchorPointTable', sel32or64(b'{AnchorPointTable=L[1{AnchorPoint=ss}]}', b'{AnchorPointTable=I[1{AnchorPoint=ss}]}'), ['nPoints', 'points']), 'MorxChain': objc.createStructType('MorxChain', sel32or64(b'{MorxChain=LLLL[1{MortFeatureEntry=SSLL}]}', b'{MorxChain=IIII[1{MortFeatureEntry=SSII}]}'), ['defaultFlags', 'length', 'nFeatures', 'nSubtables', 'featureEntries']), 'KerxTableHeader': objc.createStructType('KerxTableHeader', sel32or64(b'{KerxTableHeader=lL[1L]}', b'{KerxTableHeader=iI[1I]}'), ['version', 'nTables', 'firstSubtable']), 'STEntryOne': objc.createStructType('STEntryOne', b'{STEntryOne=SSS}', ['newState', 'flags', 'offset1']), 'KernVersion0Header': objc.createStructType('KernVersion0Header', b'{KernVersion0Header=SS[1S]}', ['version', 'nTables', 'firstSubtable']), 'SFNTLookupTable': objc.createStructType('SFNTLookupTable', b'{SFNTLookupTable=S(SFNTLookupFormatSpecificHeader={SFNTLookupArrayHeader=[1S]}{SFNTLookupSegmentHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSegment=SS[1S]}]}{SFNTLookupSingleHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSingle=S[1S]}]}{SFNTLookupTrimmedArrayHeader=SS[1S]})}', ['format', 'fsHeader']), 'JustPCDecompositionAction': objc.createStructType('JustPCDecompositionAction', sel32or64(b'{JustPCDecompositionAction=llSS[1S]}', b'{JustPCDecompositionAction=iiSS[1S]}'), ['lowerLimit', 'upperLimit', 'order', 'count', 'glyphs']), 'STXEntryTwo': objc.createStructType('STXEntryTwo', b'{STXEntryTwo=SSSS}', ['newState', 'flags', 'index1', 'index2']), 'KerxControlPointHeader': objc.createStructType('KerxControlPointHeader', sel32or64(b'{KerxControlPointHeader={STXHeader=LLLL}L[1C]}', b'{KerxControlPointHeader={STXHeader=IIII}I[1C]}'), ['header', 'flags', 'firstTable']), 'KerxAnchorPointAction': objc.createStructType('KerxAnchorPointAction', b'{KerxAnchorPointAction=SS}', ['markAnchorPoint', 'currAnchorPoint']), 'SFNTLookupSingle': objc.createStructType('SFNTLookupSingle', b'{SFNTLookupSingle=S[1S]}', ['glyph', 'value']), 'LtagTable': objc.createStructType('LtagTable', sel32or64(b'{LtagTable=LLL[1{LtagStringRange=SS}]}', b'{LtagTable=III[1{LtagStringRange=SS}]}'), ['version', 'flags', 'numTags', 'tagRange']), 'LcarCaretTable': objc.createStructType('LcarCaretTable', sel32or64(b'{LcarCaretTable=lS{SFNTLookupTable=S(SFNTLookupFormatSpecificHeader={SFNTLookupArrayHeader=[1S]}{SFNTLookupSegmentHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSegment=SS[1S]}]}{SFNTLookupSingleHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSingle=S[1S]}]}{SFNTLookupTrimmedArrayHeader=SS[1S]})}}', b'{LcarCaretTable=iS{SFNTLookupTable=S(SFNTLookupFormatSpecificHeader={SFNTLookupArrayHeader=[1S]}{SFNTLookupSegmentHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSegment=SS[1S]}]}{SFNTLookupSingleHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSingle=S[1S]}]}{SFNTLookupTrimmedArrayHeader=SS[1S]})}}'), ['version', 'format', 'lookup']), 'JustWidthDeltaGroup': objc.createStructType('JustWidthDeltaGroup', sel32or64(b'{JustWidthDeltaGroup=L[1{JustWidthDeltaEntry=LllllSS}]}', b'{JustWidthDeltaGroup=I[1{JustWidthDeltaEntry=IiiiiSS}]}'), ['count', 'entries']), 'KerxSimpleArrayHeader': objc.createStructType('KerxSimpleArrayHeader', sel32or64(b'{KerxSimpleArrayHeader=LLLL[1L]}', b'{KerxSimpleArrayHeader=IIII[1I]}'), ['rowWidth', 'leftOffsetTable', 'rightOffsetTable', 'theArray', 'firstTable']), 'MorxTable': objc.createStructType('MorxTable', sel32or64(b'{MorxTable=lL[1{MorxChain=LLLL[1{MortFeatureEntry=SSLL}]}]}', b'{MorxTable=iI[1{MorxChain=IIII[1{MortFeatureEntry=SSII}]}]}'), ['version', 'nChains', 'chains']), 'STEntryZero': objc.createStructType('STEntryZero', b'{STEntryZero=SS}', ['newState', 'flags']), 'MortTable': objc.createStructType('MortTable', sel32or64(b'{MortTable=lL[1{MortChain=LLSS[1{MortFeatureEntry=SSLL}]}]}', b'{MortTable=iI[1{MortChain=IISS[1{MortFeatureEntry=SSII}]}]}'), ['version', 'nChains', 'chains']), 'AnchorPoint': objc.createStructType('AnchorPoint', b'{AnchorPoint=ss}', ['x', 'y']), 'JustTable': objc.createStructType('JustTable', sel32or64(b'{JustTable=lSSS}', b'{JustTable=iSSS}'), ['version', 'format', 'horizHeaderOffset', 'vertHeaderOffset']), 'sfntVariationHeader': objc.createStructType('sfntVariationHeader', sel32or64(b'{sfntVariationHeader=lSSSSSS[1{sfntVariationAxis=Llllss}][1{sfntInstance=ss[1l]}]}', b'{sfntVariationHeader=iSSSSSS[1{sfntVariationAxis=Iiiiss}][1{sfntInstance=ss[1i]}]}'), ['version', 'offsetToData', 'countSizePairs', 'axisCount', 'axisSize', 'instanceCount', 'instanceSize', 'axis', 'instance']), 'KerxStateHeader': objc.createStructType('KerxStateHeader', sel32or64(b'{KerxStateHeader={STXHeader=LLLL}L[1C]}', b'{KerxStateHeader={STXHeader=IIII}I[1C]}'), ['header', 'valueTable', 'firstTable']), 'KerxOrderedListEntry': objc.createStructType('KerxOrderedListEntry', b'{KerxOrderedListEntry={KerxKerningPair=SS}s}', ['pair', 'value']), 'KerxSubtableHeader': objc.createStructType('KerxSubtableHeader', sel32or64(b'{KerxSubtableHeader=LLL(KerxFormatSpecificHeader={KerxOrderedListHeader=IIII[1L]}{KerxStateHeader={STXHeader=IIII}I[1C]}{KerxSimpleArrayHeader=IIII[1L]}{KerxIndexArrayHeader=SSSSS[1s][1S][1S][1S]}{KerxControlPointHeader={STXHeader=IIII}I[1C]})}', b'{KerxSubtableHeader=III(KerxFormatSpecificHeader={KerxOrderedListHeader=IIII[1I]}{KerxStateHeader={STXHeader=IIII}I[1C]}{KerxSimpleArrayHeader=IIII[1I]}{KerxIndexArrayHeader=SSSSS[1s][1S][1S][1S]}{KerxControlPointHeader={STXHeader=IIII}I[1C]})}'), ['length', 'stInfo', 'tupleIndex', 'fsHeader']), 'LtagStringRange': objc.createStructType('LtagStringRange', b'{LtagStringRange=SS}', ['offset', 'length']), 'KernOrderedListHeader': objc.createStructType('KernOrderedListHeader', b'{KernOrderedListHeader=SSSS[1S]}', ['nPairs', 'searchRange', 'entrySelector', 'rangeShift', 'table']), 'PropTable': objc.createStructType('PropTable', sel32or64(b'{PropTable=lSS{SFNTLookupTable=S(SFNTLookupFormatSpecificHeader={SFNTLookupArrayHeader=[1S]}{SFNTLookupSegmentHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSegment=SS[1S]}]}{SFNTLookupSingleHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSingle=S[1S]}]}{SFNTLookupTrimmedArrayHeader=SS[1S]})}}', b'{PropTable=iSS{SFNTLookupTable=S(SFNTLookupFormatSpecificHeader={SFNTLookupArrayHeader=[1S]}{SFNTLookupSegmentHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSegment=SS[1S]}]}{SFNTLookupSingleHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSingle=S[1S]}]}{SFNTLookupTrimmedArrayHeader=SS[1S]})}}'), ['version', 'format', 'defaultProps', 'lookup']), 'KernOrderedListEntry': objc.createStructType('KernOrderedListEntry', b'{KernOrderedListEntry={KernKerningPair=SS}s}', ['pair', 'value']), 'JustPCActionSubrecord': objc.createStructType('JustPCActionSubrecord', sel32or64(b'{JustPCActionSubrecord=SSLL}', b'{JustPCActionSubrecord=SSII}'), ['theClass', 'theType', 'length', 'data']), 'JustPostcompTable': objc.createStructType('JustPostcompTable', b'{JustPostcompTable={SFNTLookupTable=S(SFNTLookupFormatSpecificHeader={SFNTLookupArrayHeader=[1S]}{SFNTLookupSegmentHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSegment=SS[1S]}]}{SFNTLookupSingleHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSingle=S[1S]}]}{SFNTLookupTrimmedArrayHeader=SS[1S]})}}', ['lookupTable']), 'TrakTableData': objc.createStructType('TrakTableData', sel32or64(b'{TrakTableData=SSL[1{TrakTableEntry=lSS}]}', b'{TrakTableData=SSI[1{TrakTableEntry=iSS}]}'), ['nTracks', 'nSizes', 'sizeTableOffset', 'trakTable']), 'KerxControlPointAction': objc.createStructType('KerxControlPointAction', b'{KerxControlPointAction=SS}', ['markControlPoint', 'currControlPoint']), 'KernKerningPair': objc.createStructType('KernKerningPair', b'{KernKerningPair=SS}', ['left', 'right']), 'JustPCConditionalAddAction': objc.createStructType('JustPCConditionalAddAction', sel32or64(b'{JustPCConditionalAddAction=lSS}', b'{JustPCConditionalAddAction=iSS}'), ['substThreshold', 'addGlyph', 'substGlyph']), 'BslnFormat3Part': objc.createStructType('BslnFormat3Part', b'{BslnFormat3Part=S[32s]{SFNTLookupTable=S(SFNTLookupFormatSpecificHeader={SFNTLookupArrayHeader=[1S]}{SFNTLookupSegmentHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSegment=SS[1S]}]}{SFNTLookupSingleHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSingle=S[1S]}]}{SFNTLookupTrimmedArrayHeader=SS[1S]})}}', ['stdGlyph', 'ctlPoints', 'mappingData']), 'MortContextualSubtable': objc.createStructType('MortContextualSubtable', b'{MortContextualSubtable={STHeader=CCSSS}S}', ['header', 'substitutionTableOffset']), 'MortSwashSubtable': objc.createStructType('MortSwashSubtable', b'{MortSwashSubtable={SFNTLookupTable=S(SFNTLookupFormatSpecificHeader={SFNTLookupArrayHeader=[1S]}{SFNTLookupSegmentHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSegment=SS[1S]}]}{SFNTLookupSingleHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSingle=S[1S]}]}{SFNTLookupTrimmedArrayHeader=SS[1S]})}}', ['lookup']), 'MortFeatureEntry': objc.createStructType('MortFeatureEntry', sel32or64(b'{MortFeatureEntry=SSLL}', b'{MortFeatureEntry=SSII}'), ['featureType', 'featureSelector', 'enableFlags', 'disableFlags']), 'AnkrTable': objc.createStructType('AnkrTable', sel32or64(b'{AnkrTable=SSLL}', b'{AnkrTable=SSII}'), ['version', 'flags', 'lookupTableOffset', 'anchorPointTableOffset']), 'FontVariation': objc.createStructType('FontVariation', sel32or64(b'{FontVariation=Ll}', b'{FontVariation=Ii}'), ['name', 'value']), 'MorxSubtable': objc.createStructType('MorxSubtable', sel32or64(b'{MorxSubtable=LLL(MorxSpecificSubtable={MorxRearrangementSubtable={STXHeader=IIII}}{MorxContextualSubtable={STXHeader=IIII}I}{MorxLigatureSubtable={STXHeader=IIII}III}{MortSwashSubtable={SFNTLookupTable=S(SFNTLookupFormatSpecificHeader={SFNTLookupArrayHeader=[1S]}{SFNTLookupSegmentHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSegment=SS[1S]}]}{SFNTLookupSingleHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSingle=S[1S]}]}{SFNTLookupTrimmedArrayHeader=SS[1S]})}}{MorxInsertionSubtable={STXHeader=IIII}I})}', b'{MorxSubtable=III(MorxSpecificSubtable={MorxRearrangementSubtable={STXHeader=IIII}}{MorxContextualSubtable={STXHeader=IIII}I}{MorxLigatureSubtable={STXHeader=IIII}III}{MortSwashSubtable={SFNTLookupTable=S(SFNTLookupFormatSpecificHeader={SFNTLookupArrayHeader=[1S]}{SFNTLookupSegmentHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSegment=SS[1S]}]}{SFNTLookupSingleHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSingle=S[1S]}]}{SFNTLookupTrimmedArrayHeader=SS[1S]})}}{MorxInsertionSubtable={STXHeader=IIII}I})}'), ['length', 'coverage', 'flags', 'u']), 'MorxInsertionSubtable': objc.createStructType('MorxInsertionSubtable', sel32or64(b'{MorxInsertionSubtable={STXHeader=LLLL}L}', b'{MorxInsertionSubtable={STXHeader=IIII}I}'), ['header', 'insertionGlyphTableOffset']), 'BslnFormat0Part': objc.createStructType('BslnFormat0Part', b'{BslnFormat0Part=[32s]}', ['deltas']), 'JustWidthDeltaEntry': objc.createStructType('JustWidthDeltaEntry', sel32or64(b'{JustWidthDeltaEntry=LllllSS}', b'{JustWidthDeltaEntry=IiiiiSS}'), ['justClass', 'beforeGrowLimit', 'beforeShrinkLimit', 'afterGrowLimit', 'afterShrinkLimit', 'growFlags', 'shrinkFlags']), 'LcarCaretClassEntry': objc.createStructType('LcarCaretClassEntry', b'{LcarCaretClassEntry=S[1S]}', ['count', 'partials']), 'sfntFontFeatureSetting': objc.createStructType('sfntFontFeatureSetting', b'{sfntFontFeatureSetting=Ss}', ['setting', 'nameID']), 'ALMXGlyphEntry': objc.createStructType('ALMXGlyphEntry', b'{ALMXGlyphEntry=sssss}', ['GlyphIndexOffset', 'HorizontalAdvance', 'XOffsetToHOrigin', 'VerticalAdvance', 'YOffsetToVOrigin']), 'sfntNameHeader': objc.createStructType('sfntNameHeader', b'{sfntNameHeader=SSS[1{sfntNameRecord=SSSSSS}]}', ['format', 'count', 'stringOffset', 'rec']), 'KernSubtableHeader': objc.createStructType('KernSubtableHeader', sel32or64(b'{KernSubtableHeader=lSs(KernFormatSpecificHeader={KernOrderedListHeader=SSSS[1S]}{KernStateHeader={STHeader=CCSSS}S[1C]}{KernSimpleArrayHeader=SSSS[1S]}{KernIndexArrayHeader=SCCCC[1s][1C][1C][1C]})}', b'{KernSubtableHeader=iSs(KernFormatSpecificHeader={KernOrderedListHeader=SSSS[1S]}{KernStateHeader={STHeader=CCSSS}S[1C]}{KernSimpleArrayHeader=SSSS[1S]}{KernIndexArrayHeader=SCCCC[1s][1C][1C][1C]})}'), ['length', 'stInfo', 'tupleIndex', 'fsHeader']), 'sfntDirectory': objc.createStructType('sfntDirectory', sel32or64(b'{sfntDirectory=LSSSS[1{sfntDirectoryEntry=LLLL}]}', b'{sfntDirectory=ISSSS[1{sfntDirectoryEntry=IIII}]}'), ['format', 'numOffsets', 'searchRange', 'entrySelector', 'rangeShift', 'table']), 'sfntDescriptorHeader': objc.createStructType('sfntDescriptorHeader', sel32or64(b'{sfntDescriptorHeader=ll[1{sfntFontDescriptor=Ll}]}', b'{sfntDescriptorHeader=ii[1{sfntFontDescriptor=Ii}]}'), ['version', 'descriptorCount', 'descriptor']), 'OpbdSideValues': objc.createStructType('OpbdSideValues', b'{OpbdSideValues=ssss}', ['leftSideShift', 'topSideShift', 'rightSideShift', 'bottomSideShift']), 'JustPCAction': objc.createStructType('JustPCAction', sel32or64(b'{JustPCAction=L[1{JustPCActionSubrecord=SSLL}]}', b'{JustPCAction=I[1{JustPCActionSubrecord=SSII}]}'), ['actionCount', 'actions']), 'KernTableHeader': objc.createStructType('KernTableHeader', sel32or64(b'{KernTableHeader=ll[1S]}', b'{KernTableHeader=ii[1S]}'), ['version', 'nTables', 'firstSubtable']), 'PropLookupSegment': objc.createStructType('PropLookupSegment', b'{PropLookupSegment=SSS}', ['lastGlyph', 'firstGlyph', 'value']), 'BslnFormat1Part': objc.createStructType('BslnFormat1Part', b'{BslnFormat1Part=[32s]{SFNTLookupTable=S(SFNTLookupFormatSpecificHeader={SFNTLookupArrayHeader=[1S]}{SFNTLookupSegmentHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSegment=SS[1S]}]}{SFNTLookupSingleHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSingle=S[1S]}]}{SFNTLookupTrimmedArrayHeader=SS[1S]})}}', ['deltas', 'mappingData']), 'sfntInstance': objc.createStructType('sfntInstance', sel32or64(b'{sfntInstance=ss[1l]}', b'{sfntInstance=ss[1i]}'), ['nameID', 'flags', 'coord']), 'sfntFontDescriptor': objc.createStructType('sfntFontDescriptor', sel32or64(b'{sfntFontDescriptor=Ll}', b'{sfntFontDescriptor=Ii}'), ['name', 'value']), 'KernStateEntry': objc.createStructType('KernStateEntry', b'{KernStateEntry=SS}', ['newState', 'flags']), 'ALMXHeader': objc.createStructType('ALMXHeader', sel32or64(b'{ALMXHeader=lSSSS{SFNTLookupTable=S(SFNTLookupFormatSpecificHeader={SFNTLookupArrayHeader=[1S]}{SFNTLookupSegmentHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSegment=SS[1S]}]}{SFNTLookupSingleHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSingle=S[1S]}]}{SFNTLookupTrimmedArrayHeader=SS[1S]})}}', b'{ALMXHeader=iSSSS{SFNTLookupTable=S(SFNTLookupFormatSpecificHeader={SFNTLookupArrayHeader=[1S]}{SFNTLookupSegmentHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSegment=SS[1S]}]}{SFNTLookupSingleHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSingle=S[1S]}]}{SFNTLookupTrimmedArrayHeader=SS[1S]})}}'), ['Version', 'Flags', 'NMasters', 'FirstGlyph', 'LastGlyph', 'lookup']), 'MorxLigatureSubtable': objc.createStructType('MorxLigatureSubtable', sel32or64(b'{MorxLigatureSubtable={STXHeader=LLLL}LLL}', b'{MorxLigatureSubtable={STXHeader=IIII}III}'), ['header', 'ligatureActionTableOffset', 'componentTableOffset', 'ligatureTableOffset']), 'KerxOrderedListHeader': objc.createStructType('KerxOrderedListHeader', sel32or64(b'{KerxOrderedListHeader=LLLL[1L]}', b'{KerxOrderedListHeader=IIII[1I]}'), ['nPairs', 'searchRange', 'entrySelector', 'rangeShift', 'table']), 'STXEntryOne': objc.createStructType('STXEntryOne', b'{STXEntryOne=SSS}', ['newState', 'flags', 'index1']), 'STXClassTable': objc.createStructType('STXClassTable', b'{SFNTLookupTable=S(SFNTLookupFormatSpecificHeader={SFNTLookupArrayHeader=[1S]}{SFNTLookupSegmentHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSegment=SS[1S]}]}{SFNTLookupSingleHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSingle=S[1S]}]}{SFNTLookupTrimmedArrayHeader=SS[1S]})}', ['format', 'fsHeader']), 'SFNTLookupTrimmedArrayHeader': objc.createStructType('SFNTLookupTrimmedArrayHeader', b'{SFNTLookupTrimmedArrayHeader=SS[1S]}', ['firstGlyph', 'count', 'valueArray']), 'sfntCMapHeader': objc.createStructType('sfntCMapHeader', sel32or64(b'{sfntCMapHeader=SS[1{sfntCMapEncoding=SSL}]}', b'{sfntCMapHeader=SS[1{sfntCMapEncoding=SSI}]}'), ['version', 'numTables', 'encoding']), 'KerxControlPointEntry': objc.createStructType('KerxControlPointEntry', b'{KerxControlPointEntry=SSS}', ['newState', 'flags', 'actionIndex']), 'sfntFeatureHeader': objc.createStructType('sfntFeatureHeader', sel32or64(b'{sfntFeatureHeader=lSSl[1{sfntFeatureName=SSlSs}][1{sfntFontFeatureSetting=Ss}][1{sfntFontRunFeature=SS}]}', b'{sfntFeatureHeader=iSSi[1{sfntFeatureName=SSiSs}][1{sfntFontFeatureSetting=Ss}][1{sfntFontRunFeature=SS}]}'), ['version', 'featureNameCount', 'featureSetCount', 'reserved', 'names', 'settings', 'runs']), 'STXHeader': objc.createStructType('STXHeader', sel32or64(b'{STXHeader=LLLL}', b'{STXHeader=IIII}'), ['nClasses', 'classTableOffset', 'stateArrayOffset', 'entryTableOffset']), 'STHeader': objc.createStructType('STHeader', b'{STHeader=CCSSS}', ['filler', 'nClasses', 'classTableOffset', 'stateArrayOffset', 'entryTableOffset']), 'PropLookupSingle': objc.createStructType('PropLookupSingle', b'{PropLookupSingle=SS}', ['glyph', 'props']), 'JustDirectionTable': objc.createStructType('JustDirectionTable', b'{JustDirectionTable=SSS{SFNTLookupTable=S(SFNTLookupFormatSpecificHeader={SFNTLookupArrayHeader=[1S]}{SFNTLookupSegmentHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSegment=SS[1S]}]}{SFNTLookupSingleHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSingle=S[1S]}]}{SFNTLookupTrimmedArrayHeader=SS[1S]})}}', ['justClass', 'widthDeltaClusters', 'postcomp', 'lookup']), 'SFNTLookupArrayHeader': objc.createStructType('SFNTLookupArrayHeader', b'{SFNTLookupArrayHeader=[1S]}', ['lookupValues']), 'SFNTLookupSegmentHeader': objc.createStructType('SFNTLookupSegmentHeader', b'{SFNTLookupSegmentHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSegment=SS[1S]}]}', ['binSearch', 'segments']), 'ROTAGlyphEntry': objc.createStructType('ROTAGlyphEntry', b'{ROTAGlyphEntry=sss}', ['GlyphIndexOffset', 'HBaselineOffset', 'VBaselineOffset']), 'BslnTable': objc.createStructType('BslnTable', sel32or64(b'{BslnTable=lSS(BslnFormatUnion={BslnFormat0Part=[32s]}{BslnFormat1Part=[32s]{SFNTLookupTable=S(SFNTLookupFormatSpecificHeader={SFNTLookupArrayHeader=[1S]}{SFNTLookupSegmentHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSegment=SS[1S]}]}{SFNTLookupSingleHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSingle=S[1S]}]}{SFNTLookupTrimmedArrayHeader=SS[1S]})}}{BslnFormat2Part=S[32s]}{BslnFormat3Part=S[32s]{SFNTLookupTable=S(SFNTLookupFormatSpecificHeader={SFNTLookupArrayHeader=[1S]}{SFNTLookupSegmentHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSegment=SS[1S]}]}{SFNTLookupSingleHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSingle=S[1S]}]}{SFNTLookupTrimmedArrayHeader=SS[1S]})}})}', b'{BslnTable=iSS(BslnFormatUnion={BslnFormat0Part=[32s]}{BslnFormat1Part=[32s]{SFNTLookupTable=S(SFNTLookupFormatSpecificHeader={SFNTLookupArrayHeader=[1S]}{SFNTLookupSegmentHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSegment=SS[1S]}]}{SFNTLookupSingleHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSingle=S[1S]}]}{SFNTLookupTrimmedArrayHeader=SS[1S]})}}{BslnFormat2Part=S[32s]}{BslnFormat3Part=S[32s]{SFNTLookupTable=S(SFNTLookupFormatSpecificHeader={SFNTLookupArrayHeader=[1S]}{SFNTLookupSegmentHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSegment=SS[1S]}]}{SFNTLookupSingleHeader={SFNTLookupBinarySearchHeader=SSSSS}[1{SFNTLookupSingle=S[1S]}]}{SFNTLookupTrimmedArrayHeader=SS[1S]})}})}'), ['version', 'format', 'defaultBaseline', 'parts'])}) -constants = '''$kCTBackgroundColorAttributeName$kCTBaselineClassAttributeName@^{__CFString=}$kCTBaselineClassHanging@^{__CFString=}$kCTBaselineClassIdeographicCentered@^{__CFString=}$kCTBaselineClassIdeographicHigh@^{__CFString=}$kCTBaselineClassIdeographicLow@^{__CFString=}$kCTBaselineClassMath@^{__CFString=}$kCTBaselineClassRoman@^{__CFString=}$kCTBaselineInfoAttributeName@^{__CFString=}$kCTBaselineOffsetAttributeName$kCTBaselineOriginalFont@^{__CFString=}$kCTBaselineReferenceFont@^{__CFString=}$kCTBaselineReferenceInfoAttributeName@^{__CFString=}$kCTCharacterShapeAttributeName@^{__CFString=}$kCTFontAttributeName@^{__CFString=}$kCTFontBaselineAdjustAttribute@^{__CFString=}$kCTFontCascadeListAttribute@^{__CFString=}$kCTFontCharacterSetAttribute@^{__CFString=}$kCTFontCollectionDisallowAutoActivationOption@^{__CFString=}$kCTFontCollectionIncludeDisabledFontsOption@^{__CFString=}$kCTFontCollectionRemoveDuplicatesOption@^{__CFString=}$kCTFontCopyrightNameKey@^{__CFString=}$kCTFontDescriptionNameKey@^{__CFString=}$kCTFontDescriptorMatchingCurrentAssetSize@^{__CFString=}$kCTFontDescriptorMatchingDescriptors@^{__CFString=}$kCTFontDescriptorMatchingError@^{__CFString=}$kCTFontDescriptorMatchingPercentage@^{__CFString=}$kCTFontDescriptorMatchingResult@^{__CFString=}$kCTFontDescriptorMatchingSourceDescriptor@^{__CFString=}$kCTFontDescriptorMatchingTotalAssetSize@^{__CFString=}$kCTFontDescriptorMatchingTotalDownloadedSize@^{__CFString=}$kCTFontDesignerNameKey@^{__CFString=}$kCTFontDesignerURLNameKey@^{__CFString=}$kCTFontDisplayNameAttribute@^{__CFString=}$kCTFontDownloadableAttribute@^{__CFString=}$kCTFontDownloadedAttribute@^{__CFString=}$kCTFontEnabledAttribute@^{__CFString=}$kCTFontFamilyNameAttribute@^{__CFString=}$kCTFontFamilyNameKey@^{__CFString=}$kCTFontFeatureSelectorDefaultKey@^{__CFString=}$kCTFontFeatureSelectorIdentifierKey@^{__CFString=}$kCTFontFeatureSelectorNameKey@^{__CFString=}$kCTFontFeatureSelectorSettingKey@^{__CFString=}$kCTFontFeatureSettingsAttribute@^{__CFString=}$kCTFontFeatureTypeExclusiveKey@^{__CFString=}$kCTFontFeatureTypeIdentifierKey@^{__CFString=}$kCTFontFeatureTypeNameKey@^{__CFString=}$kCTFontFeatureTypeSelectorsKey@^{__CFString=}$kCTFontFeaturesAttribute@^{__CFString=}$kCTFontFixedAdvanceAttribute@^{__CFString=}$kCTFontFormatAttribute@^{__CFString=}$kCTFontFullNameKey@^{__CFString=}$kCTFontLanguagesAttribute@^{__CFString=}$kCTFontLicenseNameKey@^{__CFString=}$kCTFontLicenseURLNameKey@^{__CFString=}$kCTFontMacintoshEncodingsAttribute@^{__CFString=}$kCTFontManagerBundleIdentifier@^{__CFString=}$kCTFontManagerErrorDomain@^{__CFString=}$kCTFontManagerErrorFontURLsKey@^{__CFString=}$kCTFontManagerRegisteredFontsChangedNotification@^{__CFString=}$kCTFontManufacturerNameKey@^{__CFString=}$kCTFontMatrixAttribute@^{__CFString=}$kCTFontNameAttribute@^{__CFString=}$kCTFontOpenTypeFeatureTag@^{__CFString=}$kCTFontOpenTypeFeatureValue@^{__CFString=}$kCTFontOrientationAttribute@^{__CFString=}$kCTFontPostScriptCIDNameKey@^{__CFString=}$kCTFontPostScriptNameKey@^{__CFString=}$kCTFontPriorityAttribute@^{__CFString=}$kCTFontRegistrationScopeAttribute@^{__CFString=}$kCTFontSampleTextNameKey@^{__CFString=}$kCTFontSizeAttribute@^{__CFString=}$kCTFontSlantTrait@^{__CFString=}$kCTFontStyleNameAttribute@^{__CFString=}$kCTFontStyleNameKey@^{__CFString=}$kCTFontSubFamilyNameKey@^{__CFString=}$kCTFontSymbolicTrait@^{__CFString=}$kCTFontTrademarkNameKey@^{__CFString=}$kCTFontTraitsAttribute@^{__CFString=}$kCTFontURLAttribute@^{__CFString=}$kCTFontUniqueNameKey@^{__CFString=}$kCTFontVariationAttribute@^{__CFString=}$kCTFontVariationAxisDefaultValueKey@^{__CFString=}$kCTFontVariationAxisHiddenKey$kCTFontVariationAxisIdentifierKey@^{__CFString=}$kCTFontVariationAxisMaximumValueKey@^{__CFString=}$kCTFontVariationAxisMinimumValueKey@^{__CFString=}$kCTFontVariationAxisNameKey@^{__CFString=}$kCTFontVendorURLNameKey@^{__CFString=}$kCTFontVersionNameKey@^{__CFString=}$kCTFontWeightTrait@^{__CFString=}$kCTFontWidthTrait@^{__CFString=}$kCTForegroundColorAttributeName@^{__CFString=}$kCTForegroundColorFromContextAttributeName@^{__CFString=}$kCTFrameClippingPathsAttributeName@^{__CFString=}$kCTFramePathClippingPathAttributeName@^{__CFString=}$kCTFramePathFillRuleAttributeName@^{__CFString=}$kCTFramePathWidthAttributeName@^{__CFString=}$kCTFrameProgressionAttributeName@^{__CFString=}$kCTGlyphInfoAttributeName@^{__CFString=}$kCTHorizontalInVerticalFormsAttributeName$kCTKernAttributeName@^{__CFString=}$kCTLanguageAttributeName@^{__CFString=}$kCTLigatureAttributeName@^{__CFString=}$kCTParagraphStyleAttributeName@^{__CFString=}$kCTRubyAnnotationAttributeName@^{__CFString=}$kCTRubyAnnotationScaleToFitAttributeName$kCTRubyAnnotationSizeFactorAttributeName$kCTRunDelegateAttributeName@^{__CFString=}$kCTStrokeColorAttributeName@^{__CFString=}$kCTStrokeWidthAttributeName@^{__CFString=}$kCTSuperscriptAttributeName@^{__CFString=}$kCTTabColumnTerminatorsAttributeName@^{__CFString=}$kCTTypesetterOptionAllowUnboundedLayout$kCTTypesetterOptionDisableBidiProcessing@^{__CFString=}$kCTTypesetterOptionForcedEmbeddingLevel@^{__CFString=}$kCTUnderlineColorAttributeName@^{__CFString=}$kCTUnderlineStyleAttributeName@^{__CFString=}$kCTVerticalFormsAttributeName@^{__CFString=}$kCTWritingDirectionAttributeName@^{__CFString=}$''' -enums = '''$cmapFontTableTag@1668112752$descriptorFontTableTag@1717859171$featureFontTableTag@1717920116$kANKRCurrentVersion@0$kAbbrevSquaredLigaturesOffSelector@15$kAbbrevSquaredLigaturesOnSelector@14$kAllCapsSelector@1$kAllLowerCaseSelector@2$kAllTypeFeaturesOffSelector@1$kAllTypeFeaturesOnSelector@0$kAllTypographicFeaturesType@0$kAltHalfWidthTextSelector@6$kAltProportionalTextSelector@5$kAlternateHorizKanaOffSelector@1$kAlternateHorizKanaOnSelector@0$kAlternateKanaType@34$kAlternateVertKanaOffSelector@3$kAlternateVertKanaOnSelector@2$kAnnotationType@24$kAsteriskToMultiplyOffSelector@3$kAsteriskToMultiplyOnSelector@2$kBSLNControlPointFormatNoMap@2$kBSLNControlPointFormatWithMap@3$kBSLNCurrentVersion@65536$kBSLNDistanceFormatNoMap@0$kBSLNDistanceFormatWithMap@1$kBSLNHangingBaseline@3$kBSLNIdeographicCenterBaseline@1$kBSLNIdeographicHighBaseline@5$kBSLNIdeographicLowBaseline@2$kBSLNLastBaseline@31$kBSLNMathBaseline@4$kBSLNNoBaseline@255$kBSLNNoBaselineOverride@255$kBSLNNumBaselineClasses@32$kBSLNRomanBaseline@0$kBSLNTag@1651731566$kBoxAnnotationSelector@1$kCJKItalicRomanOffSelector@3$kCJKItalicRomanOnSelector@2$kCJKItalicRomanSelector@1$kCJKRomanSpacingType@103$kCJKSymbolAltFiveSelector@5$kCJKSymbolAltFourSelector@4$kCJKSymbolAltOneSelector@1$kCJKSymbolAltThreeSelector@3$kCJKSymbolAltTwoSelector@2$kCJKSymbolAlternativesType@29$kCJKVerticalRomanCenteredSelector@0$kCJKVerticalRomanHBaselineSelector@1$kCJKVerticalRomanPlacementType@31$kCTAdobeCNS1CharacterCollection@1$kCTAdobeGB1CharacterCollection@2$kCTAdobeJapan1CharacterCollection@3$kCTAdobeJapan2CharacterCollection@4$kCTAdobeKorea1CharacterCollection@5$kCTCenterTextAlignment@2$kCTCharacterCollectionAdobeCNS1@1$kCTCharacterCollectionAdobeGB1@2$kCTCharacterCollectionAdobeJapan1@3$kCTCharacterCollectionAdobeJapan2@4$kCTCharacterCollectionAdobeKorea1@5$kCTCharacterCollectionIdentityMapping@0$kCTFontAlertHeaderFontType@18$kCTFontApplicationFontType@9$kCTFontBoldTrait@2$kCTFontClarendonSerifsClass@1073741824$kCTFontClassClarendonSerifs@1073741824$kCTFontClassFreeformSerifs@1879048192$kCTFontClassMaskShift@28$kCTFontClassMaskTrait@4026531840$kCTFontClassModernSerifs@805306368$kCTFontClassOldStyleSerifs@268435456$kCTFontClassOrnamentals@2415919104$kCTFontClassSansSerif@2147483648$kCTFontClassScripts@2684354560$kCTFontClassSlabSerifs@1342177280$kCTFontClassSymbolic@3221225472$kCTFontClassTransitionalSerifs@536870912$kCTFontClassUnknown@0$kCTFontCollectionCopyDefaultOptions@0$kCTFontCollectionCopyStandardSort@2$kCTFontCollectionCopyUnique@1$kCTFontColorGlyphsTrait@8192$kCTFontCompositeTrait@16384$kCTFontCondensedTrait@64$kCTFontControlContentFontType@26$kCTFontDefaultOrientation@0$kCTFontDescriptorMatchingDidBegin@0$kCTFontDescriptorMatchingDidFailWithError@8$kCTFontDescriptorMatchingDidFinish@1$kCTFontDescriptorMatchingDidFinishDownloading@6$kCTFontDescriptorMatchingDidMatch@7$kCTFontDescriptorMatchingDownloading@5$kCTFontDescriptorMatchingStalled@3$kCTFontDescriptorMatchingWillBeginDownloading@4$kCTFontDescriptorMatchingWillBeginQuerying@2$kCTFontEmphasizedSystemDetailFontType@20$kCTFontEmphasizedSystemFontType@3$kCTFontExpandedTrait@32$kCTFontFormatBitmap@5$kCTFontFormatOpenTypePostScript@1$kCTFontFormatOpenTypeTrueType@2$kCTFontFormatPostScript@4$kCTFontFormatTrueType@3$kCTFontFormatUnrecognized@0$kCTFontFreeformSerifsClass@1879048192$kCTFontHorizontalOrientation@1$kCTFontItalicTrait@1$kCTFontLabelFontType@10$kCTFontManagerAutoActivationDefault@0$kCTFontManagerAutoActivationDisabled@1$kCTFontManagerAutoActivationEnabled@2$kCTFontManagerAutoActivationPromptUser@3$kCTFontManagerErrorAlreadyRegistered@105$kCTFontManagerErrorFileNotFound@101$kCTFontManagerErrorInUse@202$kCTFontManagerErrorInsufficientPermissions@102$kCTFontManagerErrorInvalidFontData@104$kCTFontManagerErrorNotRegistered@201$kCTFontManagerErrorSystemRequired@203$kCTFontManagerErrorUnrecognizedFormat@103$kCTFontManagerScopeNone@0$kCTFontManagerScopeProcess@1$kCTFontManagerScopeSession@3$kCTFontManagerScopeUser@2$kCTFontMenuItemCmdKeyFontType@14$kCTFontMenuItemFontType@12$kCTFontMenuItemMarkFontType@13$kCTFontMenuTitleFontType@11$kCTFontMessageFontType@23$kCTFontMiniEmphasizedSystemFontType@7$kCTFontMiniSystemFontType@6$kCTFontModernSerifsClass@805306368$kCTFontMonoSpaceTrait@1024$kCTFontNoFontType@4294967295$kCTFontOldStyleSerifsClass@268435456$kCTFontOptionsDefault@0$kCTFontOptionsPreferSystemFont@4$kCTFontOptionsPreventAutoActivation@1$kCTFontOrientationDefault@0$kCTFontOrientationHorizontal@1$kCTFontOrientationVertical@2$kCTFontOrnamentalsClass@2415919104$kCTFontPaletteFontType@24$kCTFontPriorityComputer@30000$kCTFontPriorityDynamic@50000$kCTFontPriorityNetwork@20000$kCTFontPriorityProcess@60000$kCTFontPrioritySystem@10000$kCTFontPriorityUser@40000$kCTFontPushButtonFontType@16$kCTFontSansSerifClass@2147483648$kCTFontScriptsClass@2684354560$kCTFontSlabSerifsClass@1342177280$kCTFontSmallEmphasizedSystemFontType@5$kCTFontSmallSystemFontType@4$kCTFontSmallToolbarFontType@22$kCTFontSymbolicClass@3221225472$kCTFontSystemDetailFontType@19$kCTFontSystemFontType@2$kCTFontTableAcnt@1633906292$kCTFontTableAnkr@1634626418$kCTFontTableAvar@1635148146$kCTFontTableBASE@1111577413$kCTFontTableBdat@1650745716$kCTFontTableBhed@1651008868$kCTFontTableBloc@1651273571$kCTFontTableBsln@1651731566$kCTFontTableCBDT@1128416340$kCTFontTableCBLC@1128418371$kCTFontTableCFF@1128678944$kCTFontTableCFF2@1128678962$kCTFontTableCOLR@1129270354$kCTFontTableCPAL@1129333068$kCTFontTableCidg@1667851367$kCTFontTableCmap@1668112752$kCTFontTableCvar@1668702578$kCTFontTableCvt@1668707360$kCTFontTableDSIG@1146308935$kCTFontTableEBDT@1161970772$kCTFontTableEBLC@1161972803$kCTFontTableEBSC@1161974595$kCTFontTableFdsc@1717859171$kCTFontTableFeat@1717920116$kCTFontTableFmtx@1718449272$kCTFontTableFond@1718578788$kCTFontTableFpgm@1718642541$kCTFontTableFvar@1719034226$kCTFontTableGDEF@1195656518$kCTFontTableGPOS@1196445523$kCTFontTableGSUB@1196643650$kCTFontTableGasp@1734439792$kCTFontTableGlyf@1735162214$kCTFontTableGvar@1735811442$kCTFontTableHVAR@1213612370$kCTFontTableHdmx@1751412088$kCTFontTableHead@1751474532$kCTFontTableHhea@1751672161$kCTFontTableHmtx@1752003704$kCTFontTableHsty@1752396921$kCTFontTableJSTF@1246975046$kCTFontTableJust@1786082164$kCTFontTableKern@1801810542$kCTFontTableKerx@1801810552$kCTFontTableLTSH@1280594760$kCTFontTableLcar@1818452338$kCTFontTableLoca@1819239265$kCTFontTableLtag@1819566439$kCTFontTableMATH@1296127048$kCTFontTableMERG@1296388679$kCTFontTableMVAR@1297498450$kCTFontTableMaxp@1835104368$kCTFontTableMeta@1835365473$kCTFontTableMort@1836020340$kCTFontTableMorx@1836020344$kCTFontTableName@1851878757$kCTFontTableOS2@1330851634$kCTFontTableOpbd@1869636196$kCTFontTableOptionExcludeSynthetic@1$kCTFontTableOptionNoOptions@0$kCTFontTablePCLT@1346587732$kCTFontTablePost@1886352244$kCTFontTablePrep@1886545264$kCTFontTableProp@1886547824$kCTFontTableSTAT@1398030676$kCTFontTableSVG@1398163232$kCTFontTableSbit@1935829364$kCTFontTableSbix@1935829368$kCTFontTableTrak@1953653099$kCTFontTableVDMX@1447316824$kCTFontTableVORG@1448038983$kCTFontTableVVAR@1448493394$kCTFontTableVhea@1986553185$kCTFontTableVmtx@1986884728$kCTFontTableXref@2020762982$kCTFontTableZapf@1516335206$kCTFontToolTipFontType@25$kCTFontToolbarFontType@21$kCTFontTraitBold@2$kCTFontTraitClassMask@4026531840$kCTFontTraitColorGlyphs@8192$kCTFontTraitComposite@16384$kCTFontTraitCondensed@64$kCTFontTraitExpanded@32$kCTFontTraitItalic@1$kCTFontTraitMonoSpace@1024$kCTFontTraitUIOptimized@4096$kCTFontTraitVertical@2048$kCTFontTransitionalSerifsClass@536870912$kCTFontUIFontAlertHeader@18$kCTFontUIFontApplication@9$kCTFontUIFontControlContent@26$kCTFontUIFontEmphasizedSystem@3$kCTFontUIFontEmphasizedSystemDetail@20$kCTFontUIFontLabel@10$kCTFontUIFontMenuItem@12$kCTFontUIFontMenuItemCmdKey@14$kCTFontUIFontMenuItemMark@13$kCTFontUIFontMenuTitle@11$kCTFontUIFontMessage@23$kCTFontUIFontMiniEmphasizedSystem@7$kCTFontUIFontMiniSystem@6$kCTFontUIFontNone@4294967295$kCTFontUIFontPalette@24$kCTFontUIFontPushButton@16$kCTFontUIFontSmallEmphasizedSystem@5$kCTFontUIFontSmallSystem@4$kCTFontUIFontSmallToolbar@22$kCTFontUIFontSystem@2$kCTFontUIFontSystemDetail@19$kCTFontUIFontToolTip@25$kCTFontUIFontToolbar@21$kCTFontUIFontUser@0$kCTFontUIFontUserFixedPitch@1$kCTFontUIFontUtilityWindowTitle@17$kCTFontUIFontViews@8$kCTFontUIFontWindowTitle@15$kCTFontUIOptimizedTrait@4096$kCTFontUnknownClass@0$kCTFontUserFixedPitchFontType@1$kCTFontUserFontType@0$kCTFontUtilityWindowTitleFontType@17$kCTFontVerticalOrientation@2$kCTFontVerticalTrait@2048$kCTFontViewsFontType@8$kCTFontWindowTitleFontType@15$kCTFramePathFillEvenOdd@0$kCTFramePathFillWindingNumber@1$kCTFrameProgressionLeftToRight@2$kCTFrameProgressionRightToLeft@1$kCTFrameProgressionTopToBottom@0$kCTIdentityMappingCharacterCollection@0$kCTJustifiedTextAlignment@3$kCTLeftTextAlignment@0$kCTLineBoundsExcludeTypographicLeading@1$kCTLineBoundsExcludeTypographicShifts@2$kCTLineBoundsIncludeLanguageExtents@32$kCTLineBoundsUseGlyphPathBounds@8$kCTLineBoundsUseHangingPunctuation@4$kCTLineBoundsUseOpticalBounds@16$kCTLineBreakByCharWrapping@1$kCTLineBreakByClipping@2$kCTLineBreakByTruncatingHead@3$kCTLineBreakByTruncatingMiddle@5$kCTLineBreakByTruncatingTail@4$kCTLineBreakByWordWrapping@0$kCTLineTruncationEnd@1$kCTLineTruncationMiddle@2$kCTLineTruncationStart@0$kCTNaturalTextAlignment@4$kCTParagraphStyleSpecifierAlignment@0$kCTParagraphStyleSpecifierBaseWritingDirection@13$kCTParagraphStyleSpecifierCount@18$kCTParagraphStyleSpecifierDefaultTabInterval@5$kCTParagraphStyleSpecifierFirstLineHeadIndent@1$kCTParagraphStyleSpecifierHeadIndent@2$kCTParagraphStyleSpecifierLineBoundsOptions@17$kCTParagraphStyleSpecifierLineBreakMode@6$kCTParagraphStyleSpecifierLineHeightMultiple@7$kCTParagraphStyleSpecifierLineSpacing@10$kCTParagraphStyleSpecifierLineSpacingAdjustment@16$kCTParagraphStyleSpecifierMaximumLineHeight@8$kCTParagraphStyleSpecifierMaximumLineSpacing@14$kCTParagraphStyleSpecifierMinimumLineHeight@9$kCTParagraphStyleSpecifierMinimumLineSpacing@15$kCTParagraphStyleSpecifierParagraphSpacing@11$kCTParagraphStyleSpecifierParagraphSpacingBefore@12$kCTParagraphStyleSpecifierTabStops@4$kCTParagraphStyleSpecifierTailIndent@3$kCTRightTextAlignment@1$kCTRubyAlignmentAuto@0$kCTRubyAlignmentCenter@2$kCTRubyAlignmentDistributeLetter@4$kCTRubyAlignmentDistributeSpace@5$kCTRubyAlignmentEnd@3$kCTRubyAlignmentInvalid@255$kCTRubyAlignmentLineEdge@6$kCTRubyAlignmentStart@1$kCTRubyOverhangAuto@0$kCTRubyOverhangEnd@2$kCTRubyOverhangInvalid@255$kCTRubyOverhangNone@3$kCTRubyOverhangStart@1$kCTRubyPositionAfter@1$kCTRubyPositionBefore@0$kCTRubyPositionCount@4$kCTRubyPositionInline@3$kCTRubyPositionInterCharacter@2$kCTRunDelegateCurrentVersion@1$kCTRunDelegateVersion1@1$kCTRunStatusHasNonIdentityMatrix@4$kCTRunStatusNoStatus@0$kCTRunStatusNonMonotonic@2$kCTRunStatusRightToLeft@1$kCTTextAlignmentCenter@2$kCTTextAlignmentJustified@3$kCTTextAlignmentLeft@0$kCTTextAlignmentNatural@4$kCTTextAlignmentRight@1$kCTUnderlinePatternDash@512$kCTUnderlinePatternDashDot@768$kCTUnderlinePatternDashDotDot@1024$kCTUnderlinePatternDot@256$kCTUnderlinePatternSolid@0$kCTUnderlineStyleDouble@9$kCTUnderlineStyleNone@0$kCTUnderlineStyleSingle@1$kCTUnderlineStyleThick@2$kCTVersionNumber10_10@458752$kCTVersionNumber10_11@524288$kCTVersionNumber10_12@589824$kCTVersionNumber10_13@655360$kCTVersionNumber10_14@720896$kCTVersionNumber10_5@131072$kCTVersionNumber10_5_2@131073$kCTVersionNumber10_5_3@131074$kCTVersionNumber10_5_5@131075$kCTVersionNumber10_6@196608$kCTVersionNumber10_6_7@196615$kCTVersionNumber10_7@262144$kCTVersionNumber10_8@327680$kCTVersionNumber10_9@393216$kCTWritingDirectionEmbedding@0$kCTWritingDirectionLeftToRight@0$kCTWritingDirectionNatural@-1$kCTWritingDirectionOverride@2$kCTWritingDirectionRightToLeft@1$kCanonicalCompositionOffSelector@1$kCanonicalCompositionOnSelector@0$kCaseSensitiveLayoutOffSelector@1$kCaseSensitiveLayoutOnSelector@0$kCaseSensitiveLayoutType@33$kCaseSensitiveSpacingOffSelector@3$kCaseSensitiveSpacingOnSelector@2$kCharacterAlternativesType@17$kCharacterShapeType@20$kCircleAnnotationSelector@3$kCommonLigaturesOffSelector@3$kCommonLigaturesOnSelector@2$kCompatibilityCompositionOffSelector@3$kCompatibilityCompositionOnSelector@2$kContextualAlternatesOffSelector@1$kContextualAlternatesOnSelector@0$kContextualAlternatesType@36$kContextualLigaturesOffSelector@19$kContextualLigaturesOnSelector@18$kContextualSwashAlternatesOffSelector@5$kContextualSwashAlternatesOnSelector@4$kCursiveConnectionType@2$kCursiveSelector@2$kDecomposeDiacriticsSelector@2$kDecorativeBordersSelector@4$kDefaultCJKRomanSelector@2$kDefaultLowerCaseSelector@0$kDefaultUpperCaseSelector@0$kDesignComplexityType@18$kDesignLevel1Selector@0$kDesignLevel2Selector@1$kDesignLevel3Selector@2$kDesignLevel4Selector@3$kDesignLevel5Selector@4$kDiacriticsType@9$kDiagonalFractionsSelector@2$kDiamondAnnotationSelector@8$kDingbatsSelector@1$kDiphthongLigaturesOffSelector@11$kDiphthongLigaturesOnSelector@10$kDisplayTextSelector@1$kEngravedTextSelector@2$kExpertCharactersSelector@10$kExponentsOffSelector@9$kExponentsOnSelector@8$kFleuronsSelector@3$kFontAlbanianLanguage@36$kFontAmharicLanguage@85$kFontAmharicScript@28$kFontArabicLanguage@12$kFontArabicScript@4$kFontArmenianLanguage@51$kFontArmenianScript@24$kFontAssameseLanguage@68$kFontAymaraLanguage@134$kFontAzerbaijanArLanguage@50$kFontAzerbaijaniLanguage@49$kFontBasqueLanguage@129$kFontBengaliLanguage@67$kFontBengaliScript@13$kFontBulgarianLanguage@44$kFontBurmeseLanguage@77$kFontBurmeseScript@19$kFontByelorussianLanguage@46$kFontCatalanLanguage@130$kFontChewaLanguage@92$kFontChineseScript@2$kFontCopyrightName@0$kFontCroatianLanguage@18$kFontCustom16BitScript@2$kFontCustom816BitScript@1$kFontCustom8BitScript@0$kFontCustomPlatform@4$kFontCyrillicScript@7$kFontCzechLanguage@38$kFontDanishLanguage@7$kFontDescriptionName@10$kFontDesignerName@9$kFontDesignerURLName@12$kFontDevanagariScript@9$kFontDutchLanguage@4$kFontDzongkhaLanguage@137$kFontEastEuropeanRomanScript@29$kFontEnglishLanguage@0$kFontEsperantoLanguage@94$kFontEstonianLanguage@27$kFontEthiopicScript@28$kFontExtendedArabicScript@31$kFontFaeroeseLanguage@30$kFontFamilyName@1$kFontFarsiLanguage@31$kFontFinnishLanguage@13$kFontFlemishLanguage@34$kFontFrenchLanguage@1$kFontFullName@4$kFontGallaLanguage@87$kFontGeezScript@28$kFontGeorgianLanguage@52$kFontGeorgianScript@23$kFontGermanLanguage@2$kFontGreekLanguage@14$kFontGreekScript@6$kFontGuaraniLanguage@133$kFontGujaratiLanguage@69$kFontGujaratiScript@11$kFontGurmukhiScript@10$kFontHebrewLanguage@10$kFontHebrewScript@5$kFontHindiLanguage@21$kFontHungarianLanguage@26$kFontISO10646_1993Semantics@2$kFontIcelandicLanguage@15$kFontIndonesianLanguage@81$kFontIrishLanguage@35$kFontItalianLanguage@3$kFontJapaneseLanguage@11$kFontJapaneseScript@1$kFontJavaneseRomLanguage@138$kFontKannadaLanguage@73$kFontKannadaScript@16$kFontKashmiriLanguage@61$kFontKazakhLanguage@48$kFontKhmerLanguage@78$kFontKhmerScript@20$kFontKirghizLanguage@54$kFontKoreanLanguage@23$kFontKoreanScript@3$kFontKurdishLanguage@60$kFontLaoLanguage@79$kFontLaotianScript@22$kFontLappishLanguage@29$kFontLastReservedName@255$kFontLatinLanguage@131$kFontLatvianLanguage@28$kFontLettishLanguage@28$kFontLicenseDescriptionName@13$kFontLicenseInfoURLName@14$kFontLithuanianLanguage@24$kFontMacCompatibleFullName@18$kFontMacedonianLanguage@43$kFontMacintoshPlatform@1$kFontMalagasyLanguage@93$kFontMalayArabicLanguage@84$kFontMalayRomanLanguage@83$kFontMalayalamLanguage@72$kFontMalayalamScript@17$kFontMalteseLanguage@16$kFontManufacturerName@8$kFontMarathiLanguage@66$kFontMicrosoftPlatform@3$kFontMicrosoftStandardScript@1$kFontMicrosoftSymbolScript@0$kFontMicrosoftUCS4Script@10$kFontMoldavianLanguage@53$kFontMongolianCyrLanguage@58$kFontMongolianLanguage@57$kFontMongolianScript@27$kFontNepaliLanguage@64$kFontNoLanguageCode@4294967295$kFontNoNameCode@4294967295$kFontNoPlatformCode@4294967295$kFontNoScriptCode@4294967295$kFontNorwegianLanguage@9$kFontOriyaLanguage@71$kFontOriyaScript@12$kFontOromoLanguage@87$kFontPashtoLanguage@59$kFontPersianLanguage@31$kFontPolishLanguage@25$kFontPortugueseLanguage@8$kFontPostScriptCIDName@20$kFontPostscriptName@6$kFontPreferredFamilyName@16$kFontPreferredSubfamilyName@17$kFontPunjabiLanguage@70$kFontQuechuaLanguage@132$kFontRSymbolScript@8$kFontReservedPlatform@2$kFontRomanScript@0$kFontRomanianLanguage@37$kFontRuandaLanguage@90$kFontRundiLanguage@91$kFontRussian@7$kFontRussianLanguage@32$kFontSaamiskLanguage@29$kFontSampleTextName@19$kFontSanskritLanguage@65$kFontSerbianLanguage@42$kFontSimpChineseLanguage@33$kFontSimpleChineseScript@25$kFontSindhiLanguage@62$kFontSindhiScript@31$kFontSinhaleseLanguage@76$kFontSinhaleseScript@18$kFontSlavicScript@29$kFontSlovakLanguage@39$kFontSlovenianLanguage@40$kFontSomaliLanguage@88$kFontSpanishLanguage@6$kFontStyleName@2$kFontSundaneseRomLanguage@139$kFontSwahiliLanguage@89$kFontSwedishLanguage@5$kFontTagalogLanguage@82$kFontTajikiLanguage@55$kFontTamilLanguage@74$kFontTamilScript@14$kFontTatarLanguage@135$kFontTeluguLanguage@75$kFontTeluguScript@15$kFontThaiLanguage@22$kFontThaiScript@21$kFontTibetanLanguage@63$kFontTibetanScript@26$kFontTigrinyaLanguage@86$kFontTradChineseLanguage@19$kFontTrademarkName@7$kFontTraditionalChineseScript@2$kFontTurkishLanguage@17$kFontTurkmenLanguage@56$kFontUighurLanguage@136$kFontUkrainianLanguage@45$kFontUnicodeDefaultSemantics@0$kFontUnicodePlatform@0$kFontUnicodeV1_1Semantics@1$kFontUnicodeV2_0BMPOnlySemantics@3$kFontUnicodeV2_0FullCoverageSemantics@4$kFontUnicodeV4_0VariationSequenceSemantics@5$kFontUnicode_FullRepertoire@6$kFontUninterpretedScript@32$kFontUniqueName@3$kFontUrduLanguage@20$kFontUzbekLanguage@47$kFontVendorURLName@11$kFontVersionName@5$kFontVietnameseLanguage@80$kFontVietnameseScript@30$kFontWelshLanguage@128$kFontYiddishLanguage@41$kFormInterrobangOffSelector@7$kFormInterrobangOnSelector@6$kFractionsType@11$kFullWidthCJKRomanSelector@3$kFullWidthIdeographsSelector@0$kFullWidthKanaSelector@0$kHalfWidthCJKRomanSelector@0$kHalfWidthIdeographsSelector@2$kHalfWidthTextSelector@2$kHanjaToHangulAltOneSelector@7$kHanjaToHangulAltThreeSelector@9$kHanjaToHangulAltTwoSelector@8$kHanjaToHangulSelector@1$kHideDiacriticsSelector@1$kHiraganaToKatakanaSelector@2$kHistoricalLigaturesOffSelector@21$kHistoricalLigaturesOnSelector@20$kHojoCharactersSelector@12$kHyphenToEnDashOffSelector@3$kHyphenToEnDashOnSelector@2$kHyphenToMinusOffSelector@1$kHyphenToMinusOnSelector@0$kHyphensToEmDashOffSelector@1$kHyphensToEmDashOnSelector@0$kIdeographicAltFiveSelector@5$kIdeographicAltFourSelector@4$kIdeographicAltOneSelector@1$kIdeographicAltThreeSelector@3$kIdeographicAltTwoSelector@2$kIdeographicAlternativesType@30$kIdeographicSpacingType@26$kIlluminatedCapsSelector@3$kInequalityLigaturesOffSelector@7$kInequalityLigaturesOnSelector@6$kInferiorsSelector@2$kInitialCapsAndSmallCapsSelector@5$kInitialCapsSelector@4$kInternationalSymbolsSelector@5$kInvertedBoxAnnotationSelector@9$kInvertedCircleAnnotationSelector@4$kInvertedRoundedBoxAnnotationSelector@10$kItalicCJKRomanType@32$kJIS1978CharactersSelector@2$kJIS1983CharactersSelector@3$kJIS1990CharactersSelector@4$kJIS2004CharactersSelector@11$kJUSTCurrentVersion@65536$kJUSTKashidaPriority@0$kJUSTLetterPriority@2$kJUSTNullPriority@3$kJUSTOverrideLimits@16384$kJUSTOverridePriority@32768$kJUSTOverrideUnlimited@8192$kJUSTPriorityCount@4$kJUSTPriorityMask@3$kJUSTSpacePriority@1$kJUSTStandardFormat@0$kJUSTTag@1786082164$kJUSTUnlimited@4096$kJUSTnoGlyphcode@65535$kJUSTpcConditionalAddAction@2$kJUSTpcDecompositionAction@0$kJUSTpcDuctilityAction@4$kJUSTpcGlyphRepeatAddAction@5$kJUSTpcGlyphStretchAction@3$kJUSTpcUnconditionalAddAction@1$kKERNCrossStream@16384$kKERNCrossStreamResetNote@2$kKERNCurrentVersion@65536$kKERNFormatMask@255$kKERNIndexArray@3$kKERNLineEndKerning@2$kKERNLineStart@1$kKERNNoCrossKerning@4$kKERNNoStakeNote@1$kKERNNotApplied@1$kKERNNotesRequested@8$kKERNOrderedList@0$kKERNResetCrossStream@32768$kKERNSimpleArray@2$kKERNStateTable@1$kKERNTag@1801810542$kKERNUnusedBits@7936$kKERNVariation@8192$kKERNVertical@32768$kKERXActionOffsetMask@16777215$kKERXActionTypeAnchorPoints@1073741824$kKERXActionTypeControlPoints@0$kKERXActionTypeCoordinates@2147483648$kKERXActionTypeMask@3221225472$kKERXControlPoint@4$kKERXCrossStream@1073741824$kKERXCrossStreamResetNote@2$kKERXCurrentVersion@131072$kKERXDescending@268435456$kKERXFormatMask@255$kKERXIndexArray@3$kKERXLineEndKerning@2$kKERXLineStart@1$kKERXNoCrossKerning@4$kKERXNoStakeNote@1$kKERXNotApplied@1$kKERXNotesRequested@8$kKERXOrderedList@0$kKERXResetCrossStream@32768$kKERXSimpleArray@2$kKERXStateTable@1$kKERXTag@1801810552$kKERXUnusedBits@268435200$kKERXUnusedFlags@1056964608$kKERXVariation@536870912$kKERXVertical@-2147483648$kKanaSpacingType@25$kKanaToRomanizationSelector@4$kKatakanaToHiraganaSelector@3$kLCARCtlPointFormat@1$kLCARCurrentVersion@65536$kLCARLinearFormat@0$kLCARTag@1818452338$kLTAGCurrentVersion@1$kLanguageTagType@39$kLastFeatureType@-1$kLetterCaseType@3$kLigaturesType@1$kLineFinalSwashesOffSelector@7$kLineFinalSwashesOnSelector@6$kLineInitialSwashesOffSelector@5$kLineInitialSwashesOnSelector@4$kLinguisticRearrangementOffSelector@1$kLinguisticRearrangementOnSelector@0$kLinguisticRearrangementType@5$kLogosOffSelector@7$kLogosOnSelector@6$kLowerCaseNumbersSelector@0$kLowerCasePetiteCapsSelector@2$kLowerCaseSmallCapsSelector@1$kLowerCaseType@37$kMORTContextualType@1$kMORTCoverDescending@16384$kMORTCoverIgnoreVertical@8192$kMORTCoverTypeMask@15$kMORTCoverVertical@32768$kMORTCurrInsertBefore@2048$kMORTCurrInsertCountMask@992$kMORTCurrInsertCountShift@5$kMORTCurrInsertKashidaLike@8192$kMORTCurrJustTableCountMask@127$kMORTCurrJustTableCountShift@0$kMORTCurrentVersion@65536$kMORTDoInsertionsBefore@128$kMORTInsertionType@5$kMORTInsertionsCountMask@63$kMORTIsSplitVowelPiece@64$kMORTLigFormOffsetMask@1073741823$kMORTLigFormOffsetShift@2$kMORTLigLastAction@-2147483648$kMORTLigStoreLigature@1073741824$kMORTLigatureType@2$kMORTMarkInsertBefore@1024$kMORTMarkInsertCountMask@31$kMORTMarkInsertCountShift@0$kMORTMarkInsertKashidaLike@4096$kMORTMarkJustTableCountMask@16256$kMORTMarkJustTableCountShift@7$kMORTRearrangementType@0$kMORTSwashType@4$kMORTTag@1836020340$kMORTraCDx@6$kMORTraCDxA@8$kMORTraCDxAB@12$kMORTraCDxBA@13$kMORTraDCx@7$kMORTraDCxA@9$kMORTraDCxAB@14$kMORTraDCxBA@15$kMORTraDx@2$kMORTraDxA@3$kMORTraDxAB@10$kMORTraDxBA@11$kMORTraNoAction@0$kMORTraxA@1$kMORTraxAB@4$kMORTraxBA@5$kMORXCoverDescending@1073741824$kMORXCoverIgnoreVertical@536870912$kMORXCoverTypeMask@255$kMORXCoverVertical@-2147483648$kMORXCurrentVersion@131072$kMORXTag@1836020344$kMathSymbolsSelector@6$kMathematicalExtrasType@15$kMathematicalGreekOffSelector@11$kMathematicalGreekOnSelector@10$kMonospacedNumbersSelector@0$kMonospacedTextSelector@1$kNLCCharactersSelector@13$kNoAlternatesSelector@0$kNoAnnotationSelector@0$kNoCJKItalicRomanSelector@0$kNoCJKSymbolAlternativesSelector@0$kNoFractionsSelector@0$kNoIdeographicAlternativesSelector@0$kNoOrnamentsSelector@0$kNoRubyKanaSelector@0$kNoStyleOptionsSelector@0$kNoStylisticAlternatesSelector@0$kNoTransliterationSelector@0$kNonFinalSwashesOffSelector@9$kNonFinalSwashesOnSelector@8$kNormalPositionSelector@0$kNumberCaseType@21$kNumberSpacingType@6$kOPBDControlPointFormat@1$kOPBDCurrentVersion@65536$kOPBDDistanceFormat@0$kOPBDTag@1869636196$kOrdinalsSelector@3$kOrnamentSetsType@16$kOverlappingCharactersType@13$kPROPALDirectionClass@2$kPROPANDirectionClass@6$kPROPBNDirectionClass@19$kPROPCSDirectionClass@7$kPROPCanHangLTMask@16384$kPROPCanHangRBMask@8192$kPROPCurrentVersion@196608$kPROPDirectionMask@31$kPROPENDirectionClass@3$kPROPESDirectionClass@4$kPROPETDirectionClass@5$kPROPIsFloaterMask@32768$kPROPLDirectionClass@0$kPROPLREDirectionClass@13$kPROPLRODirectionClass@14$kPROPNSMDirectionClass@18$kPROPNumDirectionClasses@20$kPROPONDirectionClass@11$kPROPPDFDirectionClass@17$kPROPPSDirectionClass@8$kPROPPairOffsetMask@3840$kPROPPairOffsetShift@8$kPROPPairOffsetSign@7$kPROPRDirectionClass@1$kPROPRLEDirectionClass@15$kPROPRLODirectionClass@16$kPROPRightConnectMask@128$kPROPSDirectionClass@9$kPROPSENDirectionClass@12$kPROPTag@1886547824$kPROPUseRLPairMask@4096$kPROPWSDirectionClass@10$kPROPZeroReserved@96$kParenthesisAnnotationSelector@5$kPartiallyConnectedSelector@1$kPeriodAnnotationSelector@6$kPeriodsToEllipsisOffSelector@11$kPeriodsToEllipsisOnSelector@10$kPiCharactersSelector@2$kPreventOverlapOffSelector@1$kPreventOverlapOnSelector@0$kProportionalCJKRomanSelector@1$kProportionalIdeographsSelector@1$kProportionalKanaSelector@1$kProportionalNumbersSelector@1$kProportionalTextSelector@0$kQuarterWidthNumbersSelector@3$kQuarterWidthTextSelector@4$kRareLigaturesOffSelector@5$kRareLigaturesOnSelector@4$kRebusPicturesOffSelector@9$kRebusPicturesOnSelector@8$kRequiredLigaturesOffSelector@1$kRequiredLigaturesOnSelector@0$kRomanNumeralAnnotationSelector@7$kRomanizationToHiraganaSelector@5$kRomanizationToKatakanaSelector@6$kRoundedBoxAnnotationSelector@2$kRubyKanaOffSelector@3$kRubyKanaOnSelector@2$kRubyKanaSelector@1$kRubyKanaType@28$kSFNTLookupSegmentArray@4$kSFNTLookupSegmentSingle@2$kSFNTLookupSimpleArray@0$kSFNTLookupSingleTable@6$kSFNTLookupTrimmedArray@8$kSTClassDeletedGlyph@2$kSTClassEndOfLine@3$kSTClassEndOfText@0$kSTClassOutOfBounds@1$kSTKCrossStreamReset@8192$kSTLigActionMask@16383$kSTMarkEnd@8192$kSTNoAdvance@16384$kSTRearrVerbMask@15$kSTSetMark@32768$kSTXHasLigAction@8192$kScientificInferiorsSelector@4$kShowDiacriticsSelector@0$kSimplifiedCharactersSelector@1$kSlashToDivideOffSelector@5$kSlashToDivideOnSelector@4$kSlashedZeroOffSelector@5$kSlashedZeroOnSelector@4$kSmallCapsSelector@3$kSmartQuotesOffSelector@9$kSmartQuotesOnSelector@8$kSmartSwashType@8$kSquaredLigaturesOffSelector@13$kSquaredLigaturesOnSelector@12$kStyleOptionsType@19$kStylisticAltEightOffSelector@17$kStylisticAltEightOnSelector@16$kStylisticAltEighteenOffSelector@37$kStylisticAltEighteenOnSelector@36$kStylisticAltElevenOffSelector@23$kStylisticAltElevenOnSelector@22$kStylisticAltFifteenOffSelector@31$kStylisticAltFifteenOnSelector@30$kStylisticAltFiveOffSelector@11$kStylisticAltFiveOnSelector@10$kStylisticAltFourOffSelector@9$kStylisticAltFourOnSelector@8$kStylisticAltFourteenOffSelector@29$kStylisticAltFourteenOnSelector@28$kStylisticAltNineOffSelector@19$kStylisticAltNineOnSelector@18$kStylisticAltNineteenOffSelector@39$kStylisticAltNineteenOnSelector@38$kStylisticAltOneOffSelector@3$kStylisticAltOneOnSelector@2$kStylisticAltSevenOffSelector@15$kStylisticAltSevenOnSelector@14$kStylisticAltSeventeenOffSelector@35$kStylisticAltSeventeenOnSelector@34$kStylisticAltSixOffSelector@13$kStylisticAltSixOnSelector@12$kStylisticAltSixteenOffSelector@33$kStylisticAltSixteenOnSelector@32$kStylisticAltTenOffSelector@21$kStylisticAltTenOnSelector@20$kStylisticAltThirteenOffSelector@27$kStylisticAltThirteenOnSelector@26$kStylisticAltThreeOffSelector@7$kStylisticAltThreeOnSelector@6$kStylisticAltTwelveOffSelector@25$kStylisticAltTwelveOnSelector@24$kStylisticAltTwentyOffSelector@41$kStylisticAltTwentyOnSelector@40$kStylisticAltTwoOffSelector@5$kStylisticAltTwoOnSelector@4$kStylisticAlternativesType@35$kSubstituteVerticalFormsOffSelector@1$kSubstituteVerticalFormsOnSelector@0$kSuperiorsSelector@1$kSwashAlternatesOffSelector@3$kSwashAlternatesOnSelector@2$kSymbolLigaturesOffSelector@17$kSymbolLigaturesOnSelector@16$kTRAKCurrentVersion@65536$kTRAKTag@1953653099$kTRAKUniformFormat@0$kTallCapsSelector@5$kTextSpacingType@22$kThirdWidthNumbersSelector@2$kThirdWidthTextSelector@3$kTitlingCapsSelector@4$kTraditionalAltFiveSelector@9$kTraditionalAltFourSelector@8$kTraditionalAltOneSelector@5$kTraditionalAltThreeSelector@7$kTraditionalAltTwoSelector@6$kTraditionalCharactersSelector@0$kTraditionalNamesCharactersSelector@14$kTranscodingCompositionOffSelector@5$kTranscodingCompositionOnSelector@4$kTransliterationType@23$kTypographicExtrasType@14$kUnconnectedSelector@0$kUnicodeDecompositionType@27$kUpperAndLowerCaseSelector@0$kUpperCaseNumbersSelector@1$kUpperCasePetiteCapsSelector@2$kUpperCaseSmallCapsSelector@1$kUpperCaseType@38$kVerticalFractionsSelector@1$kVerticalPositionType@10$kVerticalSubstitutionType@4$kWordFinalSwashesOffSelector@3$kWordFinalSwashesOnSelector@2$kWordInitialSwashesOffSelector@1$kWordInitialSwashesOnSelector@0$nameFontTableTag@1851878757$nonGlyphID@65535$os2FontTableTag@1330851634$sizeof_sfntCMapEncoding@8$sizeof_sfntCMapExtendedSubHeader@12$sizeof_sfntCMapHeader@4$sizeof_sfntCMapSubHeader@6$sizeof_sfntDescriptorHeader@8$sizeof_sfntDirectory@12$sizeof_sfntInstance@4$sizeof_sfntNameHeader@6$sizeof_sfntNameRecord@12$sizeof_sfntVariationAxis@20$sizeof_sfntVariationHeader@16$variationFontTableTag@1719034226$''' -misc.update({}) -functions={'CTFontManagerCreateFontDescriptorsFromURL': (b'^{__CFArray=}^{__CFURL=}', '', {'retval': {'already_cfretained': True}}), 'CTLineCreateTruncatedLine': (b'^{__CTLine=}^{__CTLine=}dI^{__CTLine=}', '', {'retval': {'already_cfretained': True}}), 'CTLineEnumerateCaretOffsets': (b'v^{__CTLine=}@?', '', {'retval': {'type': 'v'}, 'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}, 1: {'type': 'd'}, 2: {'type': 'L'}, 3: {'type': 'B'}, 4: {'type': 'o^B'}}}}}}), 'CTFramesetterCreateFrame': (sel32or64(b'^{__CTFrame=}^{__CTFramesetter=}{_CFRange=ll}^{CGPath=}^{__CFDictionary=}', b'^{__CTFrame=}^{__CTFramesetter=}{_CFRange=qq}^{CGPath=}^{__CFDictionary=}'), '', {'retval': {'already_cfretained': True}}), 'CTTypesetterSuggestClusterBreak': (sel32or64(b'l^{__CTTypesetter=}ld', b'q^{__CTTypesetter=}qd'),), 'CTFontCreateCopyWithFamily': (sel32or64(b'^{__CTFont=}^{__CTFont=}f^{CGAffineTransform=ffffff}^{__CFString=}', b'^{__CTFont=}^{__CTFont=}d^{CGAffineTransform=dddddd}^{__CFString=}'), '', {'retval': {'already_cfretained': True}}), 'CTFontGetGlyphsForCharacters': (sel32or64(b'B^{__CTFont=}^T^Sl', b'B^{__CTFont=}^T^Sq'), '', {'arguments': {1: {'c_array_length_in_arg': 3, 'type_modifier': 'n'}, 2: {'c_array_length_in_arg': 3, 'type_modifier': 'o'}}}), 'CTLineGetPenOffsetForFlush': (sel32or64(b'd^{__CTLine=}fd', b'd^{__CTLine=}dd'),), 'CTTypesetterSuggestLineBreak': (sel32or64(b'l^{__CTTypesetter=}ld', b'q^{__CTTypesetter=}qd'),), 'CTFontCreateWithGraphicsFont': (sel32or64(b'^{__CTFont=}^{CGFont=}f^{CGAffineTransform=ffffff}^{__CTFontDescriptor=}', b'^{__CTFont=}^{CGFont=}d^{CGAffineTransform=dddddd}^{__CTFontDescriptor=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'type_modifier': 'n'}}}), 'CTRunGetStringRange': (sel32or64(b'{_CFRange=ll}^{__CTRun=}', b'{_CFRange=qq}^{__CTRun=}'),), 'CTFontCreateWithQuickdrawInstance': (sel32or64(b'^{__CTFont=}*sCf', b'^{__CTFont=}*sCd'), '', {'retval': {'already_cfretained': True}}), 'CTFontManagerUnregisterFontsForURL': (b'B^{__CFURL=}I^^{__CFError=}', '', {'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'CTFontManagerCreateFontRequestRunLoopSource': (sel32or64(b'^{__CFRunLoopSource=}l@?', b'^{__CFRunLoopSource=}q@?'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'callable': {'retval': {'type': b'@'}, 'arguments': {0: {'type': '^v'}, 1: {'type': '@'}, 2: {'type': 'i'}}}}}}), 'CTFontManagerSetAutoActivationSetting': (b'v^{__CFString=}I',), 'CTRubyAnnotationCreateWithAttributes': (b'@LLL@@', '', {'retval': {'already_cfretained': True}}), 'CTFontDescriptorMatchFontDescriptorsWithProgressHandler': (b'B^{__CFArray=}^{__CFSet=}@?',), 'CTRunGetStringIndices': (sel32or64(b'v^{__CTRun=}{_CFRange=ll}^l', b'v^{__CTRun=}{_CFRange=qq}^q'), '', {'arguments': {2: {'c_array_length_in_arg': 1, 'type_modifier': 'o'}}}), 'CTRunDelegateGetTypeID': (sel32or64(b'L', b'Q'),), 'CTFontCopyAvailableTables': (b'^{__CFArray=}^{__CTFont=}I', '', {'retval': {'already_cfretained': True}}), 'CTFontManagerCompareFontFamilyNames': (sel32or64(b'l^{__CFString=}^{__CFString=}^v', b'q^{__CFString=}^{__CFString=}^v'),), 'CTRubyAnnotationCreate': (sel32or64(b'^{__CTRubyAnnotation=}CCf[4^{__CFString=}]', b'^{__CTRubyAnnotation=}CCd[4^{__CFString=}]'), '', {'retval': {'already_cfretained': True}}), 'CTRunGetStringIndicesPtr': (sel32or64(b'r^i^{__CTRun=}', b'r^q^{__CTRun=}'), '', {'retval': {'c_array_of_variable_length': True}}), 'CTFontGetAscent': (sel32or64(b'f^{__CTFont=}', b'd^{__CTFont=}'),), 'CTFontManagerRegisterGraphicsFont': (b'B^{CGFont=}^^{__CFError=}', '', {'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'CTFontCollectionCopyQueryDescriptors': (b'^{__CFArray=}^{__CTFontCollection=}', '', {'retval': {'already_cfretained': True}}), 'CTTypesetterCreateLine': (sel32or64(b'^{__CTLine=}^{__CTTypesetter=}{_CFRange=ll}', b'^{__CTLine=}^{__CTTypesetter=}{_CFRange=qq}'), '', {'retval': {'already_cfretained': True}}), 'CTFontGetDescent': (sel32or64(b'f^{__CTFont=}', b'd^{__CTFont=}'),), 'CTFontCreateWithFontDescriptor': (sel32or64(b'^{__CTFont=}^{__CTFontDescriptor=}f^{CGAffineTransform=ffffff}', b'^{__CTFont=}^{__CTFontDescriptor=}d^{CGAffineTransform=dddddd}'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'type_modifier': 'n'}}}), 'CTRunGetAttributes': (b'^{__CFDictionary=}^{__CTRun=}',), 'CTFontCopySupportedLanguages': (b'^{__CFArray=}^{__CTFont=}', '', {'retval': {'already_cfretained': True}}), 'CTFontCopyVariationAxes': (b'^{__CFArray=}^{__CTFont=}', '', {'retval': {'already_cfretained': True}}), 'CTTextTabGetLocation': (b'd^{__CTTextTab=}',), 'CTFontCopyPostScriptName': (b'^{__CFString=}^{__CTFont=}', '', {'retval': {'already_cfretained': True}}), 'CTFontCopyDefaultCascadeListForLanguages': (b'^{__CFArray=}^{__CTFont=}^{__CFArray=}', '', {'retval': {'already_cfretained': True}}), 'CTFontGetVerticalTranslationsForGlyphs': (sel32or64(b'v^{__CTFont=}^S^{_NSSize=ff}l', b'v^{__CTFont=}^S^{CGSize=dd}q'), '', {'arguments': {1: {'c_array_length_in_arg': 3, 'type_modifier': 'n'}, 2: {'c_array_length_in_arg': 3, 'type_modifier': 'o'}}}), 'CTFontGetTypeID': (sel32or64(b'L', b'Q'),), 'CTFontCollectionCreateMatchingFontDescriptorsWithOptions': (b'^{__CFArray=}^{__CTFontCollection=}^{__CFDictionary=}', '', {'retval': {'already_cfretained': True}}), 'CTFramesetterCreateWithAttributedString': (b'^{__CTFramesetter=}^{__CFAttributedString=}', '', {'retval': {'already_cfretained': True}}), 'CTFontCreatePathForGlyph': (sel32or64(b'^{CGPath=}^{__CTFont=}S^{CGAffineTransform=ffffff}', b'^{CGPath=}^{__CTFont=}S^{CGAffineTransform=dddddd}'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'type_modifier': 'n'}}}), 'CTFrameDraw': (b'v^{__CTFrame=}^{CGContext=}',), 'CTFontCollectionCopyExclusionDescriptors': (b'^{__CFArray=}^{__CTFontCollection=}', '', {'retval': {'already_cfretained': True}}), 'CTFontManagerGetAutoActivationSetting': (b'I^{__CFString=}',), 'CTFontGetOpticalBoundsForGlyphs': (sel32or64(b'{CGRect={CGPoint=ff}{CGSize=ff}}^{__CTFont=}^S^{_NSRect={_NSPoint=ff}{_NSSize=ff}}lL', b'{CGRect={CGPoint=dd}{CGSize=dd}}^{__CTFont=}^S^{CGRect={CGPoint=dd}{CGSize=dd}}qQ'), '', {'arguments': {1: {'c_array_length_in_arg': 3, 'type_modifier': 'n'}, 2: {'c_array_length_in_arg': 3, 'type_modifier': 'o'}}}), 'CTFontDescriptorCopyAttribute': (b'@^{__CTFontDescriptor=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CTFontCreateWithFontDescriptorAndOptions': (sel32or64(b'^{__CTFont=}^{__CTFontDescriptor=}f^{CGAffineTransform=ffffff}L', b'^{__CTFont=}^{__CTFontDescriptor=}d^{CGAffineTransform=dddddd}Q'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'type_modifier': 'n'}}}), 'CTFontGetMatrix': (sel32or64(b'{CGAffineTransform=ffffff}^{__CTFont=}', b'{CGAffineTransform=dddddd}^{__CTFont=}'),), 'CTFontGetSymbolicTraits': (b'I^{__CTFont=}',), 'CTFontCreateCopyWithAttributes': (sel32or64(b'^{__CTFont=}^{__CTFont=}f^{CGAffineTransform=ffffff}^{__CTFontDescriptor=}', b'^{__CTFont=}^{__CTFont=}d^{CGAffineTransform=dddddd}^{__CTFontDescriptor=}'), '', {'retval': {'already_cfretained': True}}), 'CTRubyAnnotationGetSizeFactor': (sel32or64(b'f^{__CTRubyAnnotation=}', b'd^{__CTRubyAnnotation=}'),), 'CTFontCollectionCopyFontAttribute': (b'^{__CFArray=}^{__CTFontCollection=}^{__CFString=}I', '', {'retval': {'already_cfretained': True}}), 'CTFontCopyFamilyName': (b'^{__CFString=}^{__CTFont=}', '', {'retval': {'already_cfretained': True}}), 'CTGlyphInfoGetTypeID': (sel32or64(b'L', b'Q'),), 'CTParagraphStyleCreate': (sel32or64(b'^{__CTParagraphStyle=}^{CTParagraphStyleSetting=II^v}L', b'^{__CTParagraphStyle=}^{CTParagraphStyleSetting=IQ^v}Q'), '', {'retval': {'already_cfretained': True}, 'arguments': {0: {'c_array_length_in_arg': 1, 'type_modifier': 'n'}}}), 'CTRunGetImageBounds': (sel32or64(b'{CGRect={CGPoint=ff}{CGSize=ff}}^{__CTRun=}^{CGContext=}{_CFRange=ll}', b'{CGRect={CGPoint=dd}{CGSize=dd}}^{__CTRun=}^{CGContext=}{_CFRange=qq}'),), 'CTFontManagerIsSupportedFont': (b'B^{__CFURL=}',), 'CTRunGetAdvancesPtr': (sel32or64(b'^{CGSize=ff}^{__CTRun=}', b'^{CGSize=dd}^{__CTRun=}'), '', {'retval': {'c_array_of_variable_length': True}}), 'CTRunGetStatus': (b'I^{__CTRun=}',), 'CTGlyphInfoGetCharacterIdentifier': (b'S^{__CTGlyphInfo=}',), 'CTFontGetUnitsPerEm': (b'I^{__CTFont=}',), 'CTFontCopyVariation': (b'^{__CFDictionary=}^{__CTFont=}', '', {'retval': {'already_cfretained': True}}), 'CTFrameGetFrameAttributes': (b'^{__CFDictionary=}^{__CTFrame=}',), 'CTFramesetterCreateWithTypesetter': (b'@@', '', {'retval': {'already_cfretained': True}}), 'CTTextTabCreate': (b'^{__CTTextTab=}Cd^{__CFDictionary=}', '', {'retval': {'already_cfretained': True}}), 'CTFontCollectionSetExclusionDescriptors': (b'v^{__CTFontCollection=}^{__CFArray=}',), 'CTFrameGetPath': (b'^{CGPath=}^{__CTFrame=}',), 'CTFrameGetTypeID': (sel32or64(b'L', b'Q'),), 'CTFramesetterGetTypeID': (sel32or64(b'L', b'Q'),), 'CTFontCollectionCreateFromAvailableFonts': (b'^{__CTFontCollection=}^{__CFDictionary=}', '', {'retval': {'already_cfretained': True}}), 'CTRunGetGlyphsPtr': (b'r^S^{__CTRun=}', '', {'retval': {'c_array_of_variable_length': True}}), 'CTFontDrawGlyphs': (sel32or64(b'v^{__CTFont=}^S^{CGPoint=ff}L^{CGContext=}', b'v^{__CTFont=}^S^{CGPoint=dd}Q^{CGContext=}'), '', {'arguments': {1: {'c_array_length_in_arg': 3, 'type_modifier': 'n'}, 2: {'c_array_length_in_arg': 3, 'type_modifier': 'n'}}}), 'CTFontGetGlyphCount': (sel32or64(b'l^{__CTFont=}', b'q^{__CTFont=}'),), 'CTFontManagerCreateFontDescriptorFromData': (b'^{__CTFontDescriptor=}^{__CFData=}', '', {'retval': {'already_cfretained': True}}), 'CTGlyphInfoGetCharacterCollection': (b'S^{__CTGlyphInfo=}',), 'CTFontCopyAttribute': (b'@^{__CTFont=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CTFontGetBoundingRectsForGlyphs': (sel32or64(b'{CGRect={CGPoint=ff}{CGSize=ff}}^{__CTFont=}I^S^{_NSRect={_NSPoint=ff}{_NSSize=ff}}l', b'{CGRect={CGPoint=dd}{CGSize=dd}}^{__CTFont=}I^S^{CGRect={CGPoint=dd}{CGSize=dd}}q'), '', {'arguments': {2: {'c_array_length_in_arg': 4, 'type_modifier': 'n'}, 3: {'c_array_length_in_arg': 4, 'type_modifier': 'o'}}}), 'CTFontGetBoundingBox': (sel32or64(b'{CGRect={CGPoint=ff}{CGSize=ff}}^{__CTFont=}', b'{CGRect={CGPoint=dd}{CGSize=dd}}^{__CTFont=}'),), 'CTTypesetterCreateWithAttributedStringAndOptions': (b'^{__CTTypesetter=}^{__CFAttributedString=}^{__CFDictionary=}', '', {'retval': {'already_cfretained': True}}), 'CTLineGetImageBounds': (sel32or64(b'{CGRect={CGPoint=ff}{CGSize=ff}}^{__CTLine=}^{CGContext=}', b'{CGRect={CGPoint=dd}{CGSize=dd}}^{__CTLine=}^{CGContext=}'),), 'CTFontCopyDisplayName': (b'^{__CFString=}^{__CTFont=}', '', {'retval': {'already_cfretained': True}}), 'CTGetCoreTextVersion': (b'I',), 'CTParagraphStyleCreateCopy': (b'^{__CTParagraphStyle=}^{__CTParagraphStyle=}', '', {'retval': {'already_cfretained': True}}), 'CTFontGetAdvancesForGlyphs': (sel32or64(b'd^{__CTFont=}I^S^{_NSSize=ff}l', b'd^{__CTFont=}I^S^{CGSize=dd}q'), '', {'arguments': {2: {'c_array_length_in_arg': 4, 'type_modifier': 'n'}, 3: {'c_array_length_in_arg': 4, 'type_modifier': 'o'}}}), 'CTTextTabGetOptions': (b'^{__CFDictionary=}^{__CTTextTab=}',), 'CTGlyphInfoCreateWithGlyph': (b'^{__CTGlyphInfo=}S^{__CTFont=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CTFontCreateWithPlatformFont': (sel32or64(b'^{__CTFont=}Lf^{CGAffineTransform=ffffff}^{__CTFontDescriptor=}', b'^{__CTFont=}Id^{CGAffineTransform=dddddd}^{__CTFontDescriptor=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'type_modifier': 'n'}}}), 'CTFontManagerUnregisterGraphicsFont': (b'B^{CGFont=}^^{__CFError=}', '', {'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'CTRubyAnnotationCreateCopy': (b'^{__CTRubyAnnotation=}^{__CTRubyAnnotation=}', '', {'retval': {'already_cfretained': True}}), 'CTTypesetterSuggestClusterBreakWithOffset': (sel32or64(b'l^{__CTTypesetter=}ldd', b'q^{__CTTypesetter=}qdd'),), 'CTRunGetTypeID': (sel32or64(b'L', b'Q'),), 'CTRubyAnnotationGetTextForPosition': (b'^{__CFString=}^{__CTRubyAnnotation=}C',), 'CTLineGetTypographicBounds': (sel32or64(b'd^{__CTLine=}^f^f^f', b'd^{__CTLine=}^d^d^d'), '', {'arguments': {1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}, 3: {'type_modifier': 'o'}}}), 'CTFontGetPlatformFont': (sel32or64(b'L^{__CTFont=}^^{__CTFontDescriptor}', b'I^{__CTFont=}^^{__CTFontDescriptor}'), '', {'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CTLineGetTrailingWhitespaceWidth': (b'd^{__CTLine=}',), 'CTFontManagerRegisterFontsForURL': (b'B^{__CFURL=}I^^{__CFError=}', '', {'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'CTFontCopyTable': (sel32or64(b'^{__CFData=}^{__CTFont=}LI', b'^{__CFData=}^{__CTFont=}II'), '', {'retval': {'already_cfretained': True}}), 'CTTypesetterSuggestLineBreakWithOffset': (sel32or64(b'l^{__CTTypesetter=}ldd', b'q^{__CTTypesetter=}qdd'),), 'CTGlyphInfoCreateWithCharacterIdentifier': (b'^{__CTGlyphInfo=}SS^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CTFontCopyCharacterSet': (b'^{__CFCharacterSet=}^{__CTFont=}', '', {'retval': {'already_cfretained': True}}), 'CTFontGetStringEncoding': (sel32or64(b'L^{__CTFont=}', b'I^{__CTFont=}'),), 'CTRunGetAdvances': (sel32or64(b'v^{__CTRun=}{_CFRange=ll}^{_NSSize=ff}', b'v^{__CTRun=}{_CFRange=qq}^{CGSize=dd}'), '', {'arguments': {2: {'c_array_length_in_arg': 1, 'type_modifier': 'o'}}}), 'CTFontCollectionCreateMatchingFontDescriptorsSortedWithCallback': (b'^{__CFArray=}^{__CTFontCollection=}^?@', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': b'^{__CTFontDescriptor=}'}, 1: {'type': b'^{__CTFontDescriptor=}'}, 2: {'type': b'@'}}}, 'callable_retained': False}}}), 'CTFontCopyFullName': (b'^{__CFString=}^{__CTFont=}', '', {'retval': {'already_cfretained': True}}), 'CTParagraphStyleGetValueForSpecifier': (sel32or64(b'B^{__CTParagraphStyle=}IL^v', b'B^{__CTParagraphStyle=}IQ^v'), '', {'arguments': {3: {'c_array_length_in_arg': 2, 'type_modifier': 'o'}}}), 'CTLineGetOffsetForStringIndex': (sel32or64(b'f^{__CTLine=}l^f', b'd^{__CTLine=}q^d'), '', {'arguments': {2: {'type_modifier': 'o'}}}), 'CTFontManagerEnableFontDescriptors': (b'v^{__CFArray=}B',), 'CTRubyAnnotationGetAlignment': (b'C^{__CTRubyAnnotation=}',), 'CTFontCopyLocalizedName': (b'^{__CFString=}^{__CTFont=}^{__CFString=}^^{__CFString}', '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'type_modifier': 'o'}}}), 'CTFontDescriptorCreateCopyWithFamily': (b'^{__CTFontDescriptor=}^{__CTFontDescriptor=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CTFontManagerGetScopeForURL': (b'I^{__CFURL=}',), 'CTFontGetSize': (sel32or64(b'f^{__CTFont=}', b'd^{__CTFont=}'),), 'CTFontCollectionGetTypeID': (sel32or64(b'L', b'Q'),), 'CTFontGetGlyphWithName': (b'S^{__CTFont=}^{__CFString=}',), 'CTLineGetGlyphRuns': (b'^{__CFArray=}^{__CTLine=}',), 'CTFontCreateWithNameAndOptions': (sel32or64(b'^{__CTFont=}^{__CFString=}f^{CGAffineTransform=ffffff}L', b'^{__CTFont=}^{__CFString=}d^{CGAffineTransform=dddddd}Q'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'type_modifier': 'n'}}}), 'CTFontDescriptorCreateCopyWithAttributes': (b'^{__CTFontDescriptor=}^{__CTFontDescriptor=}^{__CFDictionary=}', '', {'retval': {'already_cfretained': True}}), 'CTFontCopyFontDescriptor': (b'^{__CTFontDescriptor=}^{__CTFont=}', '', {'retval': {'already_cfretained': True}}), 'CTFontGetCapHeight': (sel32or64(b'f^{__CTFont=}', b'd^{__CTFont=}'),), 'CTFontGetUnderlineThickness': (sel32or64(b'f^{__CTFont=}', b'd^{__CTFont=}'),), 'CTFontManagerCopyAvailableFontURLs': (b'^{__CFArray=}', '', {'retval': {'already_cfretained': True}}), 'CTFontCopyFeatureSettings': (b'^{__CFArray=}^{__CTFont=}', '', {'retval': {'already_cfretained': True}}), 'CTFontDescriptorCreateMatchingFontDescriptor': (b'^{__CTFontDescriptor=}^{__CTFontDescriptor=}^{__CFSet=}', '', {'retval': {'already_cfretained': True}}), 'CTLineGetGlyphCount': (sel32or64(b'l^{__CTLine=}', b'q^{__CTLine=}'),), 'CTLineDraw': (b'v^{__CTLine=}^{CGContext=}',), 'CTFontDescriptorCreateCopyWithFeature': (b'^{__CTFontDescriptor=}^{__CTFontDescriptor=}^{__CFNumber=}^{__CFNumber=}', '', {'retval': {'already_cfretained': True}}), 'CTRubyAnnotationGetTypeID': (sel32or64(b'L', b'Q'),), 'CTTypesetterGetTypeID': (sel32or64(b'L', b'Q'),), 'CTRunGetTextMatrix': (sel32or64(b'{CGAffineTransform=ffffff}^{__CTRun=}', b'{CGAffineTransform=dddddd}^{__CTRun=}'),), 'CTFontGetLigatureCaretPositions': (sel32or64(b'l^{__CTFont=}S^fl', b'q^{__CTFont=}S^dq'), '', {'arguments': {2: {'c_array_length_in_arg': 3, 'type_modifier': 'o'}}}), 'CTFontCollectionCreateMutableCopy': (b'^{__CTFontCollection=}^{__CTFontCollection=}', '', {'retval': {'already_cfretained': True}}), 'CTFontDescriptorCreateWithNameAndSize': (sel32or64(b'^{__CTFontDescriptor=}^{__CFString=}f', b'^{__CTFontDescriptor=}^{__CFString=}d'), '', {'retval': {'already_cfretained': True}}), 'CTLineGetStringRange': (sel32or64(b'{_CFRange=ll}^{__CTLine=}', b'{_CFRange=qq}^{__CTLine=}'),), 'CTFontManagerCopyAvailablePostScriptNames': (b'^{__CFArray=}', '', {'retval': {'already_cfretained': True}}), 'CTRunDelegateGetRefCon': (b'^v^{__CTRunDelegate=}',), 'CTLineCreateJustifiedLine': (sel32or64(b'^{__CTLine=}^{__CTLine=}fd', b'^{__CTLine=}^{__CTLine=}dd'), '', {'retval': {'already_cfretained': True}}), 'CTFrameGetLines': (b'^{__CFArray=}^{__CTFrame=}',), 'CTFontCollectionCreateCopyWithFontDescriptors': (b'^{__CTFontCollection=}^{__CTFontCollection=}^{__CFArray=}^{__CFDictionary=}', '', {'retval': {'already_cfretained': True}}), 'CTRunGetGlyphCount': (sel32or64(b'l^{__CTRun=}', b'q^{__CTRun=}'),), 'CTFontDescriptorCreateMatchingFontDescriptors': (b'^{__CFArray=}^{__CTFontDescriptor=}^{__CFSet=}', '', {'retval': {'already_cfretained': True}}), 'CTFontCollectionSetQueryDescriptors': (b'v^{__CTFontCollection=}^{__CFArray=}',), 'CTFontDescriptorCopyLocalizedAttribute': (b'@^{__CTFontDescriptor=}^{__CFString=}^^{__CFString}', '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'type_modifier': 'o'}}}), 'CTFrameGetStringRange': (sel32or64(b'{_CFRange=ll}^{__CTFrame=}', b'{_CFRange=qq}^{__CTFrame=}'),), 'CTFrameGetLineOrigins': (sel32or64(b'v^{__CTFrame=}{_CFRange=ll}^{_NSPoint=ff}', b'v^{__CTFrame=}{_CFRange=qq}^{CGPoint=dd}'), '', {'arguments': {2: {'c_array_length_in_arg': 1, 'type_modifier': 'o'}}}), 'CTFontCreateWithName': (sel32or64(b'^{__CTFont=}^{__CFString=}f^{CGAffineTransform=ffffff}', b'^{__CTFont=}^{__CFString=}d^{CGAffineTransform=dddddd}'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'type_modifier': 'n'}}}), 'CTFramesetterGetTypesetter': (b'^{__CTTypesetter=}^{__CTFramesetter=}',), 'CTGlyphInfoCreateWithGlyphName': (b'^{__CTGlyphInfo=}^{__CFString=}^{__CTFont=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CTFontDescriptorCreateCopyWithSymbolicTraits': (b'^{__CTFontDescriptor=}^{__CTFontDescriptor=}II', '', {'retval': {'already_cfretained': True}}), 'CTLineGetBoundsWithOptions': (sel32or64(b'{CGRect={CGPoint=ff}{CGSize=ff}}^{__CTLine=}L', b'{CGRect={CGPoint=dd}{CGSize=dd}}^{__CTLine=}Q'),), 'CTFontCopyGraphicsFont': (b'^{CGFont=}^{__CTFont=}^^{__CTFontDescriptor}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CTFontCollectionCreateMatchingFontDescriptorsForFamily': (b'^{__CFArray=}^{__CTFontCollection=}^{__CFString=}^{__CFDictionary=}', '', {'retval': {'already_cfretained': True}}), 'CTFontGetXHeight': (sel32or64(b'f^{__CTFont=}', b'd^{__CTFont=}'),), 'CTRunGetPositions': (sel32or64(b'v^{__CTRun=}{_CFRange=ll}^{_NSPoint=ff}', b'v^{__CTRun=}{_CFRange=qq}^{CGPoint=dd}'), '', {'arguments': {2: {'c_array_length_in_arg': 1, 'type_modifier': 'o'}}}), 'CTFontDescriptorCreateCopyWithVariation': (sel32or64(b'^{__CTFontDescriptor=}^{__CTFontDescriptor=}^{__CFNumber=}f', b'^{__CTFontDescriptor=}^{__CTFontDescriptor=}^{__CFNumber=}d'), '', {'retval': {'already_cfretained': True}}), 'CTFontDescriptorCreateWithAttributes': (b'^{__CTFontDescriptor=}^{__CFDictionary=}', '', {'retval': {'already_cfretained': True}}), 'CTFontDescriptorGetTypeID': (sel32or64(b'L', b'Q'),), 'CTFontCollectionCreateMatchingFontDescriptors': (b'^{__CFArray=}^{__CTFontCollection=}', '', {'retval': {'already_cfretained': True}}), 'CTTextTabGetTypeID': (sel32or64(b'L', b'Q'),), 'CTFontManagerUnregisterFontsForURLs': (b'B^{__CFArray=}I^^{__CFArray=}', '', {'arguments': {2: {'type_modifier': 'o'}}}), 'CTFontCreateCopyWithSymbolicTraits': (sel32or64(b'^{__CTFont=}^{__CTFont=}f^{CGAffineTransform=ffffff}II', b'^{__CTFont=}^{__CTFont=}d^{CGAffineTransform=dddddd}II'), '', {'retval': {'already_cfretained': True}}), 'CTFontCopyTraits': (b'^{__CFDictionary=}^{__CTFont=}', '', {'retval': {'already_cfretained': True}}), 'CTRunDraw': (sel32or64(b'v^{__CTRun=}^{CGContext=}{_CFRange=ll}', b'v^{__CTRun=}^{CGContext=}{_CFRange=qq}'),), 'CTLineGetStringIndexForPosition': (sel32or64(b'l^{__CTLine=}{CGPoint=ff}', b'q^{__CTLine=}{CGPoint=dd}'),), 'CTFontDescriptorCopyAttributes': (b'^{__CFDictionary=}^{__CTFontDescriptor=}', '', {'retval': {'already_cfretained': True}}), 'CTFontGetLeading': (sel32or64(b'f^{__CTFont=}', b'd^{__CTFont=}'),), 'CTRunGetGlyphs': (sel32or64(b'v^{__CTRun=}{_CFRange=ll}^S', b'v^{__CTRun=}{_CFRange=qq}^S'), '', {'arguments': {2: {'c_array_length_in_arg': 1, 'type_modifier': 'o'}}}), 'CTFontCollectionCreateWithFontDescriptors': (b'^{__CTFontCollection=}^{__CFArray=}^{__CFDictionary=}', '', {'retval': {'already_cfretained': True}}), 'CTRunDelegateCreate': (sel32or64(b'^{__CTRunDelegate=}^{_CTRunDelegateCallbacks=l^?^?^?^?}^v', b'^{__CTRunDelegate=}^{_CTRunDelegateCallbacks=q^?^?^?^?}^v'), '', {'retval': {'already_cfretained': True}}), 'CTTypesetterCreateLineWithOffset': (sel32or64(b'^{__CTLine=}^{__CTTypesetter=}{_CFRange=ll}d', b'^{__CTLine=}^{__CTTypesetter=}{_CFRange=qq}d'), '', {'retval': {'already_cfretained': True}}), 'CTFontGetUnderlinePosition': (sel32or64(b'f^{__CTFont=}', b'd^{__CTFont=}'),), 'CTRunGetTypographicBounds': (sel32or64(b'd^{__CTRun=}{_CFRange=ll}^f^f^f', b'd^{__CTRun=}{_CFRange=qq}^d^d^d'), '', {'arguments': {2: {'type_modifier': 'o'}, 3: {'type_modifier': 'o'}, 4: {'type_modifier': 'o'}}}), 'CTTypesetterCreateWithAttributedString': (b'^{__CTTypesetter=}^{__CFAttributedString=}', '', {'retval': {'already_cfretained': True}}), 'CTLineCreateWithAttributedString': (b'^{__CTLine=}^{__CFAttributedString=}', '', {'retval': {'already_cfretained': True}}), 'CTTextTabGetAlignment': (b'C^{__CTTextTab=}',), 'CTFontCopyName': (b'^{__CFString=}^{__CTFont=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CTFontGetSlantAngle': (sel32or64(b'f^{__CTFont=}', b'd^{__CTFont=}'),), 'CTFramesetterSuggestFrameSizeWithConstraints': (sel32or64(b'{CGSize=ff}^{__CTTypesetter=}{_CFRange=ll}@{CGSize=ff}^{_CFRange=ll}', b'{CGSize=dd}^{__CTTypesetter=}{_CFRange=qq}@{CGSize=dd}^{_CFRange=qq}'), '', {'arguments': {4: {'type_modifier': 'o'}}}), 'CTFontCollectionCopyFontAttributes': (b'^{__CFArray=}^{__CTFontCollection=}^{__CFSet=}I', '', {'retval': {'already_cfretained': True}}), 'CTFontManagerRegisterFontsForURLs': (b'B^{__CFArray=}I^^{__CFArray=}', '', {'arguments': {2: {'type_modifier': 'o'}}}), 'CTRubyAnnotationGetOverhang': (b'C^{__CTRubyAnnotation=}',), 'CTFontCopyFeatures': (b'^{__CFArray=}^{__CTFont=}', '', {'retval': {'already_cfretained': True}}), 'CTFontCreateForString': (sel32or64(b'^{__CTFont=}^{__CTFont=}^{__CFString=}{_CFRange=ll}', b'^{__CTFont=}^{__CTFont=}^{__CFString=}{_CFRange=qq}'), '', {'retval': {'already_cfretained': True}}), 'CTGlyphInfoGetGlyphName': (b'^{__CFString=}^{__CTGlyphInfo=}',), 'CTParagraphStyleGetTypeID': (sel32or64(b'L', b'Q'),), 'CTFontCreateUIFontForLanguage': (sel32or64(b'^{__CTFont=}If^{__CFString=}', b'^{__CTFont=}Id^{__CFString=}'), '', {'retval': {'already_cfretained': True}}), 'CTFontManagerCopyAvailableFontFamilyNames': (b'^{__CFArray=}', '', {'retval': {'already_cfretained': True}}), 'CTFrameGetVisibleStringRange': (sel32or64(b'{_CFRange=ll}^{__CTFrame=}', b'{_CFRange=qq}^{__CTFrame=}'),), 'CTLineGetTypeID': (sel32or64(b'L', b'Q'),), 'CTRunGetPositionsPtr': (sel32or64(b'r^{_NSPoint=ff}^{__CTRun=}', b'r^{CGPoint=dd}^{__CTRun=}'), '', {'retval': {'c_array_of_variable_length': True}})} -aliases = {'kCTFontItalicTrait': 'kCTFontTraitItalic', 'kCTFontMessageFontType': 'kCTFontUIFontMessage', 'kCTNaturalTextAlignment': 'kCTTextAlignmentNatural', 'kCTFontDefaultOrientation': 'kCTFontOrientationDefault', 'kCTFontVerticalTrait': 'kCTFontTraitVertical', 'kFontChineseScript': 'kFontTraditionalChineseScript', 'kCTFontToolbarFontType': 'kCTFontUIFontToolbar', 'kCTFontClarendonSerifsClass': 'kCTFontClassClarendonSerifs', 'kCTFontCondensedTrait': 'kCTFontTraitCondensed', 'kCTIdentityMappingCharacterCollection': 'kCTCharacterCollectionIdentityMapping', 'kFontEthiopicScript': 'kFontGeezScript', 'kCTFontEmphasizedSystemFontType': 'kCTFontUIFontEmphasizedSystem', 'kCTFontSlabSerifsClass': 'kCTFontClassSlabSerifs', 'CT_AVAILABLE_BUT_DEPRECATED': '__OSX_AVAILABLE_BUT_DEPRECATED', 'CT_AVAILABLE_STARTING': '__OSX_AVAILABLE_STARTING', 'kCTFontVerticalOrientation': 'kCTFontOrientationVertical', 'kCTFontEmphasizedSystemDetailFontType': 'kCTFontUIFontEmphasizedSystemDetail', 'kCTFontWindowTitleFontType': 'kCTFontUIFontWindowTitle', 'kCTFontOldStyleSerifsClass': 'kCTFontClassOldStyleSerifs', 'kCTFontExpandedTrait': 'kCTFontTraitExpanded', 'kCTAdobeGB1CharacterCollection': 'kCTCharacterCollectionAdobeGB1', 'kCTFontUtilityWindowTitleFontType': 'kCTFontUIFontUtilityWindowTitle', 'kCTFontColorGlyphsTrait': 'kCTFontTraitColorGlyphs', 'kCTFontUserFontType': 'kCTFontUIFontUser', 'kCTFontModernSerifsClass': 'kCTFontClassModernSerifs', 'kCTFontMiniEmphasizedSystemFontType': 'kCTFontUIFontMiniEmphasizedSystem', 'kCTFontApplicationFontType': 'kCTFontUIFontApplication', 'CT_DEPRECATED_ENUMERATOR': '__CT_DEPRECATED_ENUMERATOR', 'kCTFontScriptsClass': 'kCTFontClassScripts', 'kCTFontFreeformSerifsClass': 'kCTFontClassFreeformSerifs', 'kCTFontMiniSystemFontType': 'kCTFontUIFontMiniSystem', 'kCTFontSystemDetailFontType': 'kCTFontUIFontSystemDetail', 'kCTFontMenuItemMarkFontType': 'kCTFontUIFontMenuItemMark', 'kFontSindhiScript': 'kFontExtendedArabicScript', 'kCTRunDelegateCurrentVersion': 'kCTRunDelegateVersion1', 'kCTFontOrnamentalsClass': 'kCTFontClassOrnamentals', 'kCTFontPaletteFontType': 'kCTFontUIFontPalette', 'kCTFontControlContentFontType': 'kCTFontUIFontControlContent', 'kCTFontMenuTitleFontType': 'kCTFontUIFontMenuTitle', 'kFontRussian': 'kFontCyrillicScript', 'kCTFontToolTipFontType': 'kCTFontUIFontToolTip', 'kCTFontTransitionalSerifsClass': 'kCTFontClassTransitionalSerifs', 'kCTFontLabelFontType': 'kCTFontUIFontLabel', 'kCTLeftTextAlignment': 'kCTTextAlignmentLeft', 'kCTAdobeKorea1CharacterCollection': 'kCTCharacterCollectionAdobeKorea1', 'kCTFontNoFontType': 'kCTFontUIFontNone', 'kCTFontUserFixedPitchFontType': 'kCTFontUIFontUserFixedPitch', 'kCTCenterTextAlignment': 'kCTTextAlignmentCenter', 'kCTAdobeJapan2CharacterCollection': 'kCTCharacterCollectionAdobeJapan2', 'kCTFontSmallSystemFontType': 'kCTFontUIFontSmallSystem', 'kCTFontMonoSpaceTrait': 'kCTFontTraitMonoSpace', 'kFontLatvianLanguage': 'kFontLettishLanguage', 'kCTFontSansSerifClass': 'kCTFontClassSansSerif', 'kCTJustifiedTextAlignment': 'kCTTextAlignmentJustified', 'kFontPersianLanguage': 'kFontFarsiLanguage', 'kCTFontAlertHeaderFontType': 'kCTFontUIFontAlertHeader', 'kCTFontBoldTrait': 'kCTFontTraitBold', 'kFontLappishLanguage': 'kFontSaamiskLanguage', 'kCTFontSmallEmphasizedSystemFontType': 'kCTFontUIFontSmallEmphasizedSystem', 'kCTFontSymbolicClass': 'kCTFontClassSymbolic', 'kCTFontMenuItemCmdKeyFontType': 'kCTFontUIFontMenuItemCmdKey', 'kCTAdobeCNS1CharacterCollection': 'kCTCharacterCollectionAdobeCNS1', 'kCTFontCompositeTrait': 'kCTFontTraitComposite', 'kCTFontUnknownClass': 'kCTFontClassUnknown', 'kCTFontUIOptimizedTrait': 'kCTFontTraitUIOptimized', 'kCTFontClassMaskTrait': 'kCTFontTraitClassMask', 'kCTFontMenuItemFontType': 'kCTFontUIFontMenuItem', 'kCTAdobeJapan1CharacterCollection': 'kCTCharacterCollectionAdobeJapan1', 'kCTFontPushButtonFontType': 'kCTFontUIFontPushButton', 'kCTFontSystemFontType': 'kCTFontUIFontSystem', 'kFontEastEuropeanRomanScript': 'kFontSlavicScript', 'kCTFontSmallToolbarFontType': 'kCTFontUIFontSmallToolbar', 'kCTFontHorizontalOrientation': 'kCTFontOrientationHorizontal', 'kFontOromoLanguage': 'kFontGallaLanguage', 'kCTRightTextAlignment': 'kCTTextAlignmentRight', 'kFontAmharicScript': 'kFontGeezScript', 'kCTFontViewsFontType': 'kCTFontUIFontViews'} -cftypes=[('CTFontCollectionRef', b'^{__CTFontCollection=}', 'CTFontCollectionGetTypeID', 'NSCTFontCollection'), ('CTFontDescriptorRef', b'^{__CTFontDescriptor=}', 'CTFontDescriptorGetTypeID', 'NSCTFontDescriptor'), ('CTFontRef', b'^{__CTFont=}', 'CTFontGetTypeID', 'NSCTFont'), ('CTFrameRef', b'^{__CTFrame=}', 'CTFrameGetTypeID', None), ('CTFramesetterRef', b'^{__CTFramesetter=}', 'CTFramesetterGetTypeID', None), ('CTGlyphInfoRef', b'^{__CTGlyphInfo=}', 'CTGlyphInfoGetTypeID', 'NSCTGlyphInfo'), ('CTLineRef', b'^{__CTLine=}', 'CTLineGetTypeID', None), ('CTParagraphStyleRef', b'^{__CTParagraphStyle=}', 'CTParagraphStyleGetTypeID', None), ('CTRubyAnnotationRef', b'^{__CTRubyAnnotation=}', None, None), ('CTRunDelegateRef', b'^{__CTRunDelegate=}', 'CTRunDelegateGetTypeID', None), ('CTRunRef', b'^{__CTRun=}', 'CTRunGetTypeID', None), ('CTTextTabRef', b'^{__CTTextTab=}', 'CTTextTabGetTypeID', None), ('CTTypesetterRef', b'^{__CTTypesetter=}', 'CTTypesetterGetTypeID', None)] -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/CoreText/_metadata.pyc b/env/lib/python2.7/site-packages/CoreText/_metadata.pyc deleted file mode 100644 index 3eda8ef5..00000000 Binary files a/env/lib/python2.7/site-packages/CoreText/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreWLAN/_CoreWLAN.so b/env/lib/python2.7/site-packages/CoreWLAN/_CoreWLAN.so deleted file mode 100755 index cdf22d0c..00000000 Binary files a/env/lib/python2.7/site-packages/CoreWLAN/_CoreWLAN.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreWLAN/__init__.py b/env/lib/python2.7/site-packages/CoreWLAN/__init__.py deleted file mode 100644 index 939d48da..00000000 --- a/env/lib/python2.7/site-packages/CoreWLAN/__init__.py +++ /dev/null @@ -1,114 +0,0 @@ -''' -Python mapping for the CoreWLAN framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Foundation - -from CoreWLAN import _metadata, _CoreWLAN - -def _CW8021XProfile__eq__(self, other): - if not isinstance(other, type(self)): - return False - - return self.isEqualToProfile_(other) - -def _CW8021XProfile__ne__(self, other): - if not isinstance(other, type(self)): - return True - - return not self.isEqualToProfile_(other) - -objc.addConvenienceForClass('CW8021XProfile', ( - ('__eq__', _CW8021XProfile__eq__), - ('__ne__', _CW8021XProfile__ne__), -)) - - -def _CWChannel__eq__(self, other): - if not isinstance(other, type(self)): - return False - - return self.isEqualToChannel_(other) - -def _CWChannel__ne__(self, other): - if not isinstance(other, type(self)): - return True - - return not self.isEqualToChannel_(other) - -objc.addConvenienceForClass('CWChannel', ( - ('__eq__', _CWChannel__eq__), - ('__ne__', _CWChannel__ne__), -)) - -def _CWConfiguration__eq__(self, other): - if not isinstance(other, type(self)): - return False - - return self.isEqualToConfiguration_(other) - -def _CWConfiguration__ne__(self, other): - if not isinstance(other, type(self)): - return True - - return not self.isEqualToConfiguration_(other) - -objc.addConvenienceForClass('CWConfiguration', ( - ('__eq__', _CWConfiguration__eq__), - ('__ne__', _CWConfiguration__ne__), -)) - -def _CWNetwork__eq__(self, other): - if not isinstance(other, type(self)): - return False - - return self.isEqualToNetwork_(other) - -def _CWNetwork__ne__(self, other): - if not isinstance(other, type(self)): - return True - - return not self.isEqualToNetwork_(other) - -objc.addConvenienceForClass('CWNetwork', ( - ('__eq__', _CWNetwork__eq__), - ('__ne__', _CWNetwork__ne__), -)) - -def _CWNetworkProfile__eq__(self, other): - if not isinstance(other, type(self)): - return False - - return self.isEqualToNetworkProfile_(other) - -def _CWNetworkProfile__ne__(self, other): - if not isinstance(other, type(self)): - return True - - return not self.isEqualToNetworkProfile_(other) - -objc.addConvenienceForClass('CWNetworkProfile', ( - ('__eq__', _CWNetworkProfile__eq__), - ('__ne__', _CWNetworkProfile__ne__), -)) - - -sys.modules['CoreWLAN'] = mod = objc.ObjCLazyModule( - "CoreWLAN", - "com.apple.framework.CoreWLAN", - objc.pathForFramework("/System/Library/Frameworks/CoreWLAN.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Foundation,)) - -import sys -del sys.modules['CoreWLAN._metadata'] -del sys.modules['CoreWLAN._CoreWLAN'] diff --git a/env/lib/python2.7/site-packages/CoreWLAN/__init__.pyc b/env/lib/python2.7/site-packages/CoreWLAN/__init__.pyc deleted file mode 100644 index 430ee1aa..00000000 Binary files a/env/lib/python2.7/site-packages/CoreWLAN/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CoreWLAN/_metadata.py b/env/lib/python2.7/site-packages/CoreWLAN/_metadata.py deleted file mode 100644 index 8f15d049..00000000 --- a/env/lib/python2.7/site-packages/CoreWLAN/_metadata.py +++ /dev/null @@ -1,114 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Sat Jul 22 13:49:51 2017 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$CWBSSIDDidChangeNotification$CWCountryCodeDidChangeNotification$CWErrorDomain$CWLinkDidChangeNotification$CWLinkQualityDidChangeNotification$CWLinkQualityNotificationRSSIKey$CWLinkQualityNotificationTransmitRateKey$CWModeDidChangeNotification$CWPowerDidChangeNotification$CWSSIDDidChangeNotification$CWScanCacheDidUpdateNotification$CWServiceDidChangeNotification$CoreWLANFrameworkVersionNumber@d$kCWAssocKey8021XProfile$kCWAssocKeyPassphrase$kCWBSSIDDidChangeNotification$kCWCountryCodeDidChangeNotification$kCWErrorDomain$kCWIBSSKeyChannel$kCWIBSSKeyPassphrase$kCWIBSSKeySSID$kCWLinkDidChangeNotification$kCWModeDidChangeNotification$kCWPowerDidChangeNotification$kCWSSIDDidChangeNotification$kCWScanKeyBSSID$kCWScanKeyDwellTime$kCWScanKeyMerge$kCWScanKeyRestTime$kCWScanKeySSID$kCWScanKeyScanType$''' -enums = '''$CWEventTypeBSSIDDidChange@3$CWEventTypeCountryCodeDidChange@4$CWEventTypeLinkDidChange@5$CWEventTypeLinkQualityDidChange@6$CWEventTypeModeDidChange@7$CWEventTypeNone@0$CWEventTypePowerDidChange@1$CWEventTypeRangingReportEvent@10$CWEventTypeSSIDDidChange@2$CWEventTypeScanCacheUpdated@8$CWEventTypeVirtualInterfaceStateChanged@9$CoreWLANFrameworkVersionNumber2_0@200$kCWAPFullErr@-3913$kCWAssociationDeniedErr@-3909$kCWAuthAlgUnsupportedErr@-3910$kCWAuthenticationAlgorithmUnsupportedErr@-3910$kCWChallengeFailureErr@-3912$kCWChannelBand2GHz@1$kCWChannelBand5GHz@2$kCWChannelBandUnknown@0$kCWChannelWidth160MHz@4$kCWChannelWidth20MHz@1$kCWChannelWidth40MHz@2$kCWChannelWidth80MHz@3$kCWChannelWidthUnknown@0$kCWCipherKeyFlagsMulticast@4$kCWCipherKeyFlagsNone@0$kCWCipherKeyFlagsRx@16$kCWCipherKeyFlagsTx@8$kCWCipherKeyFlagsUnicast@2$kCWCipherSuiteRejectedErr@-3923$kCWDSSSOFDMUnsupportedErr@-3916$kCWEAPOLErr@1$kCWErr@-3931$kCWError@-3931$kCWFormatErr@-3904$kCWHTFeaturesNotSupported@-3926$kCWHTFeaturesNotSupportedErr@-3926$kCWIBSSModeSecurityNone@0$kCWIBSSModeSecurityWEP104@2$kCWIBSSModeSecurityWEP40@1$kCWIPCError@-3929$kCWIPCFailureErr@-3929$kCWInterfaceModeHostAP@3$kCWInterfaceModeIBSS@2$kCWInterfaceModeNone@0$kCWInterfaceModeStation@1$kCWInterfaceStateAssociating@3$kCWInterfaceStateAuthenticating@2$kCWInterfaceStateInactive@0$kCWInterfaceStateRunning@4$kCWInterfaceStateScanning@1$kCWInvalidAKMPErr@-3920$kCWInvalidAuthSeqNumErr@-3911$kCWInvalidAuthenticationSequenceNumberErr@-3911$kCWInvalidFormatErr@-3904$kCWInvalidGroupCipherErr@-3918$kCWInvalidInfoElementErr@-3917$kCWInvalidInformationElementErr@-3917$kCWInvalidPMKErr@-3924$kCWInvalidPairwiseCipherErr@-3919$kCWInvalidParameterErr@-3900$kCWInvalidRSNCapabilitiesErr@-3922$kCWKeychainDomainNone@0$kCWKeychainDomainSystem@2$kCWKeychainDomainUser@1$kCWNoErr@0$kCWNoMemErr@-3901$kCWNoMemoryErr@-3901$kCWNotSupportedErr@-3903$kCWOpModeHostAP@2$kCWOpModeIBSS@1$kCWOpModeMonitorMode@3$kCWOpModeStation@0$kCWOpNotPermitted@-3930$kCWOperationNotPermittedErr@-3930$kCWPCOTransitionTimeNotSupported@-3927$kCWPCOTransitionTimeNotSupportedErr@-3927$kCWPHYMode11A@0$kCWPHYMode11B@1$kCWPHYMode11G@2$kCWPHYMode11N@3$kCWPHYMode11a@1$kCWPHYMode11ac@5$kCWPHYMode11b@2$kCWPHYMode11g@3$kCWPHYMode11n@4$kCWPHYModeNone@0$kCWParamErr@-3900$kCWReassociationDeniedErr@-3908$kCWRefNotBoundErr@-3928$kCWReferenceNotBoundErr@-3928$kCWScanTypeActive@0$kCWScanTypeFast@2$kCWScanTypePassive@1$kCWSecurityDynamicWEP@6$kCWSecurityEnterprise@10$kCWSecurityModeDynamicWEP@4$kCWSecurityModeOpen@0$kCWSecurityModeWEP@1$kCWSecurityModeWPA2_Enterprise@6$kCWSecurityModeWPA2_PSK@3$kCWSecurityModeWPA_Enterprise@5$kCWSecurityModeWPA_PSK@2$kCWSecurityModeWPS@7$kCWSecurityNone@0$kCWSecurityPersonal@5$kCWSecurityWEP@1$kCWSecurityWPA2Enterprise@9$kCWSecurityWPA2Personal@4$kCWSecurityWPAEnterprise@7$kCWSecurityWPAEnterpriseMixed@8$kCWSecurityWPAPersonal@2$kCWSecurityWPAPersonalMixed@3$kCWShortSlotUnsupportedErr@-3915$kCWSupplicantTimeoutErr@-3925$kCWTimeoutErr@-3905$kCWUknownErr@-3902$kCWUnknownErr@-3902$kCWUnspecifiedFailureErr@-3906$kCWUnsupportedCapabilitiesErr@-3907$kCWUnsupportedRSNVersionErr@-3921$kCWUnsupportedRateSetErr@-3914$''' -misc.update({'kCWSecurityUnknown': sel32or64(2147483647, 9223372036854775807), 'CWEventTypeUnknown': sel32or64(2147483647, 9223372036854775807)}) -misc.update({}) -functions={'CWKeychainFindWiFiPassword': (sel32or64(b'li@^@', b'iq@^@'), '', {'arguments': {2: {'type_modifier': 'o'}}}), 'CWKeychainSetEAPIdentity': (sel32or64(b'l^{__CFData=}^{OpaqueSecIdentityRef=}', b'i^{__CFData=}^{OpaqueSecIdentityRef=}'),), 'CWKeychainSetEAPUsernameAndPassword': (sel32or64(b'l^{__CFData=}^{__CFString=}^{__CFString=}', b'i^{__CFData=}^{__CFString=}^{__CFString=}'),), 'CWKeychainDeleteWiFiEAPUsernameAndPassword': (sel32or64(b'li@', b'iq@'),), 'CWKeychainDeletePassword': (sel32or64(b'l^{__CFData=}', b'i^{__CFData=}'),), 'CWKeychainCopyEAPIdentityList': (sel32or64(b'l^^{__CFArray=}', b'i^^{__CFArray=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {0: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CWKeychainCopyWiFiEAPIdentity': (sel32or64(b'li@^^{OpaqueSecIdentityRef=}', b'iq@^^{OpaqueSecIdentityRef=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CWKeychainSetWiFiEAPUsernameAndPassword': (sel32or64(b'li@@@', b'iq@@@'),), 'CWKeychainFindWiFiEAPUsernameAndPassword': (sel32or64(b'li@^@^@', b'iq@^@^@'), '', {'arguments': {2: {'type_modifier': 'o'}, 3: {'type_modifier': 'o'}}}), 'CWKeychainCopyEAPUsernameAndPassword': (sel32or64(b'l^{__CFData=}^^{__CFString=}^^{__CFString=}', b'i^{__CFData=}^^{__CFString=}^^{__CFString=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o'}, 2: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CWKeychainSetPassword': (sel32or64(b'l^{__CFData=}^{__CFString=}', b'i^{__CFData=}^{__CFString=}'),), 'CWKeychainCopyPassword': (sel32or64(b'l^{__CFData=}^^{__CFString=}', b'i^{__CFData=}^^{__CFString=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CWKeychainDeleteEAPUsernameAndPassword': (sel32or64(b'l^{__CFData=}', b'i^{__CFData=}'),), 'CWKeychainCopyEAPIdentity': (sel32or64(b'l^{__CFData=}^^{OpaqueSecIdentityRef=}', b'i^{__CFData=}^^{OpaqueSecIdentityRef=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CWKeychainDeleteWiFiPassword': (sel32or64(b'li@', b'iq@'),), 'CWMergeNetworks': (b'@@',), 'CWKeychainSetWiFiPassword': (sel32or64(b'li@@', b'iq@@'),), 'CWKeychainSetWiFiEAPIdentity': (sel32or64(b'li@^{OpaqueSecIdentityRef=}', b'iq@^{OpaqueSecIdentityRef=}'),)} -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'CW8021XProfile', b'alwaysPromptForPassword', {'retval': {'type': b'Z'}}) - r(b'CW8021XProfile', b'isEqualToProfile:', {'retval': {'type': b'Z'}}) - r(b'CW8021XProfile', b'setAlwaysPromptForPassword:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CWChannel', b'isEqualToChannel:', {'retval': {'type': b'Z'}}) - r(b'CWConfiguration', b'alwaysRememberNetworks', {'retval': {'type': b'Z'}}) - r(b'CWConfiguration', b'disconnectOnLogout', {'retval': {'type': b'Z'}}) - r(b'CWConfiguration', b'isEqualToConfiguration:', {'retval': {'type': b'Z'}}) - r(b'CWConfiguration', b'rememberJoinedNetworks', {'retval': {'type': b'Z'}}) - r(b'CWConfiguration', b'requireAdminForIBSSCreation', {'retval': {'type': b'Z'}}) - r(b'CWConfiguration', b'requireAdminForNetworkChange', {'retval': {'type': b'Z'}}) - r(b'CWConfiguration', b'requireAdminForPowerChange', {'retval': {'type': b'Z'}}) - r(b'CWConfiguration', b'requireAdministratorForAssociation', {'retval': {'type': b'Z'}}) - r(b'CWConfiguration', b'requireAdministratorForIBSSMode', {'retval': {'type': b'Z'}}) - r(b'CWConfiguration', b'requireAdministratorForPower', {'retval': {'type': b'Z'}}) - r(b'CWConfiguration', b'setAlwaysRememberNetworks:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CWConfiguration', b'setDisconnectOnLogout:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CWConfiguration', b'setRequireAdminForIBSSCreation:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CWConfiguration', b'setRequireAdminForNetworkChange:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CWConfiguration', b'setRequireAdminForPowerChange:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CWInterface', b'associateToEnterpriseNetwork:identity:username:password:error:', {'retval': {'type': b'Z'}, 'arguments': {6: {'type_modifier': b'o'}}}) - r(b'CWInterface', b'associateToNetwork:parameters:error:', {'retval': {'type': b'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'CWInterface', b'associateToNetwork:password:error:', {'retval': {'type': b'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'CWInterface', b'commitConfiguration:authorization:error:', {'retval': {'type': b'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'CWInterface', b'commitConfiguration:error:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'CWInterface', b'deviceAttached', {'retval': {'type': b'Z'}}) - r(b'CWInterface', b'enableIBSSWithParameters:error:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'CWInterface', b'isEqualToInterface:', {'retval': {'type': b'Z'}}) - r(b'CWInterface', b'power', {'retval': {'type': b'Z'}}) - r(b'CWInterface', b'powerOn', {'retval': {'type': b'Z'}}) - r(b'CWInterface', b'powerSave', {'retval': {'type': b'Z'}}) - r(b'CWInterface', b'scanForNetworksWithName:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'CWInterface', b'scanForNetworksWithName:includeHidden:error:', {'arguments': {3: {'type': 'Z'}, 4: {'type_modifier': b'o'}}}) - r(b'CWInterface', b'scanForNetworksWithParameters:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'CWInterface', b'scanForNetworksWithSSID:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'CWInterface', b'scanForNetworksWithSSID:includeHidden:error:', {'arguments': {3: {'type': 'Z'}, 4: {'type_modifier': b'o'}}}) - r(b'CWInterface', b'serviceActive', {'retval': {'type': b'Z'}}) - r(b'CWInterface', b'setChannel:error:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'CWInterface', b'setPairwiseMasterKey:error:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'CWInterface', b'setPower:error:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'Z'}, 3: {'type_modifier': b'o'}}}) - r(b'CWInterface', b'setWEPKey:flags:index:error:', {'retval': {'type': b'Z'}, 'arguments': {5: {'type_modifier': b'o'}}}) - r(b'CWInterface', b'setWLANChannel:error:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'CWInterface', b'startIBSSModeWithSSID:security:channel:password:error:', {'retval': {'type': b'Z'}, 'arguments': {6: {'type_modifier': b'o'}}}) - r(b'CWInterface', b'supportsAES_CCM', {'retval': {'type': b'Z'}}) - r(b'CWInterface', b'supportsHostAP', {'retval': {'type': b'Z'}}) - r(b'CWInterface', b'supportsIBSS', {'retval': {'type': b'Z'}}) - r(b'CWInterface', b'supportsMonitorMode', {'retval': {'type': b'Z'}}) - r(b'CWInterface', b'supportsPMGT', {'retval': {'type': b'Z'}}) - r(b'CWInterface', b'supportsShortGI20MHz', {'retval': {'type': b'Z'}}) - r(b'CWInterface', b'supportsShortGI40MHz', {'retval': {'type': b'Z'}}) - r(b'CWInterface', b'supportsTKIP', {'retval': {'type': b'Z'}}) - r(b'CWInterface', b'supportsTSN', {'retval': {'type': b'Z'}}) - r(b'CWInterface', b'supportsWEP', {'retval': {'type': b'Z'}}) - r(b'CWInterface', b'supportsWME', {'retval': {'type': b'Z'}}) - r(b'CWInterface', b'supportsWPA', {'retval': {'type': b'Z'}}) - r(b'CWInterface', b'supportsWPA2', {'retval': {'type': b'Z'}}) - r(b'CWInterface', b'supportsWoW', {'retval': {'type': b'Z'}}) - r(b'CWMutableConfiguration', b'rememberJoinedNetworks', {'retval': {'type': b'Z'}}) - r(b'CWMutableConfiguration', b'requireAdministratorForAssociation', {'retval': {'type': b'Z'}}) - r(b'CWMutableConfiguration', b'requireAdministratorForIBSSMode', {'retval': {'type': b'Z'}}) - r(b'CWMutableConfiguration', b'requireAdministratorForPower', {'retval': {'type': b'Z'}}) - r(b'CWMutableConfiguration', b'setRememberJoinedNetworks:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CWMutableConfiguration', b'setRequireAdministratorForAssociation:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CWMutableConfiguration', b'setRequireAdministratorForIBSSMode:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CWMutableConfiguration', b'setRequireAdministratorForPower:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CWNetwork', b'ibss', {'retval': {'type': b'Z'}}) - r(b'CWNetwork', b'isEqualToNetwork:', {'retval': {'type': b'Z'}}) - r(b'CWNetwork', b'isIBSS', {'retval': {'type': b'Z'}}) - r(b'CWNetwork', b'supportsPHYMode:', {'retval': {'type': b'Z'}}) - r(b'CWNetwork', b'supportsSecurity:', {'retval': {'type': b'Z'}}) - r(b'CWNetworkProfile', b'isEqualToNetworkProfile:', {'retval': {'type': b'Z'}}) - r(b'CWWiFiClient', b'startMonitoringEventWithType:error:', {'retval': {'type': b'Z'}}) - r(b'CWWiFiClient', b'stopMonitoringAllEventsAndReturnError:', {'retval': {'type': b'Z'}}) - r(b'CWWiFiClient', b'stopMonitoringEventWithType:error:', {'retval': {'type': b'Z'}}) - r(b'CWWirelessProfile', b'isEqualToProfile:', {'retval': {'type': b'Z'}}) - r(b'NSObject', b'bssidDidChangeForWiFiInterfaceWithName:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'clientConnectionInterrupted', {'required': False, 'retval': {'type': b'v'}}) - r(b'NSObject', b'clientConnectionInvalidated', {'required': False, 'retval': {'type': b'v'}}) - r(b'NSObject', b'countryCodeDidChangeForWiFiInterfaceWithName:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'linkDidChangeForWiFiInterfaceWithName:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'linkQualityDidChangeForWiFiInterfaceWithName:rssi:transmitRate:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}, 4: {'type': b'd'}}}) - r(b'NSObject', b'modeDidChangeForWiFiInterfaceWithName:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'powerStateDidChangeForWiFiInterfaceWithName:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'scanCacheUpdatedForWiFiInterfaceWithName:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'ssidDidChangeForWiFiInterfaceWithName:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/CoreWLAN/_metadata.pyc b/env/lib/python2.7/site-packages/CoreWLAN/_metadata.pyc deleted file mode 100644 index e00b168a..00000000 Binary files a/env/lib/python2.7/site-packages/CoreWLAN/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CryptoTokenKit/_CryptoTokenKit.so b/env/lib/python2.7/site-packages/CryptoTokenKit/_CryptoTokenKit.so deleted file mode 100755 index f254f91a..00000000 Binary files a/env/lib/python2.7/site-packages/CryptoTokenKit/_CryptoTokenKit.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CryptoTokenKit/__init__.py b/env/lib/python2.7/site-packages/CryptoTokenKit/__init__.py deleted file mode 100644 index 7a512353..00000000 --- a/env/lib/python2.7/site-packages/CryptoTokenKit/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -''' -Python mapping for the CryptoTokenKit framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Foundation - -from CryptoTokenKit import _metadata -from CryptoTokenKit._CryptoTokenKit import * - -sys.modules['CryptoTokenKit'] = mod = objc.ObjCLazyModule( - "CryptoTokenKit", - "com.apple.CryptoTokenKit", - objc.pathForFramework("/System/Library/Frameworks/CryptoTokenKit.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Foundation,)) - -import sys -del sys.modules['CryptoTokenKit._metadata'] diff --git a/env/lib/python2.7/site-packages/CryptoTokenKit/__init__.pyc b/env/lib/python2.7/site-packages/CryptoTokenKit/__init__.pyc deleted file mode 100644 index d210db5a..00000000 Binary files a/env/lib/python2.7/site-packages/CryptoTokenKit/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/CryptoTokenKit/_metadata.py b/env/lib/python2.7/site-packages/CryptoTokenKit/_metadata.py deleted file mode 100644 index 3e0a12fa..00000000 --- a/env/lib/python2.7/site-packages/CryptoTokenKit/_metadata.py +++ /dev/null @@ -1,68 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Sun Jun 11 11:15:38 2017 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$TKErrorDomain$''' -enums = '''$TKErrorAuthenticationFailed@-5$TKErrorCodeAuthenticationFailed@-5$TKErrorCodeAuthenticationNeeded@-9$TKErrorCodeBadParameter@-8$TKErrorCodeCanceledByUser@-4$TKErrorCodeCommunicationError@-2$TKErrorCodeCorruptedData@-3$TKErrorCodeNotImplemented@-1$TKErrorCodeObjectNotFound@-6$TKErrorCodeTokenNotFound@-7$TKErrorObjectNotFound@-6$TKErrorTokenNotFound@-7$TKSmartCardNoSlot@0$TKSmartCardPINCharsetAlphanumeric@1$TKSmartCardPINCharsetNumeric@0$TKSmartCardPINCharsetUpperAlphanumeric@2$TKSmartCardPINCompletionKey@2$TKSmartCardPINCompletionMaxLength@1$TKSmartCardPINCompletionTimeout@4$TKSmartCardPINConfirmationCurrent@2$TKSmartCardPINConfirmationNew@1$TKSmartCardPINConfirmationNone@0$TKSmartCardPINEncodingASCII@1$TKSmartCardPINEncodingBCD@2$TKSmartCardPINEncodingBinary@0$TKSmartCardPINJustificationLeft@0$TKSmartCardPINJustificationRight@1$TKSmartCardProtocolAny@65535$TKSmartCardProtocolNone@0$TKSmartCardProtocolT0@1$TKSmartCardProtocolT1@2$TKSmartCardProtocolT15@32768$TKSmartCardSlotEmpty@1$TKSmartCardSlotMuteCard@3$TKSmartCardSlotProbing@2$TKSmartCardSlotStateEmpty@1$TKSmartCardSlotStateMissing@0$TKSmartCardSlotStateMuteCard@3$TKSmartCardSlotStateProbing@2$TKSmartCardSlotStateValidCard@4$TKSmartCardSlotValidCard@4$TKTokenOperationDecryptData@3$TKTokenOperationNone@0$TKTokenOperationPerformKeyExchange@4$TKTokenOperationReadData@1$TKTokenOperationSignData@2$''' -misc.update({}) -aliases = {'TKSmartCardNoSlot': 'TKSmartCardSlotStateMissing', 'TKSmartCardSlotMuteCard': 'TKSmartCardSlotStateMuteCard', 'TKSmartCardSlotEmpty': 'TKSmartCardSlotStateEmpty', 'TKSmartCardSlotValidCard': 'TKSmartCardSlotStateValidCard', 'TKSmartCardSlotProbing': 'TKSmartCardSlotStateProbing'} -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'NSObject', b'token:createSessionWithError:', {'arguments': {3: {'type': 'o^@'}}}) - r(b'NSObject', b'tokenDriver:createTokenForSmartCard:AID:error:', {'arguments': {5: {'type': 'o^@'}}}) - r(b'NSObject', b'tokenSession:beginAuthForOperation:constraint:error:', {'arguments': {5: {'type': 'o^@'}}}) - r(b'NSObject', b'tokenSession:decryptData:usingKey:algorithm:error:', {'arguments': {6: {'type': 'o^@'}}}) - r(b'NSObject', b'tokenSession:performKeyExchangeWithPublicKey:usingKey:algorithm:parameters:error:', {'arguments': {7: {'type': 'o^@'}}}) - r(b'NSObject', b'tokenSession:signData:usingKey:algorithm:error:', {'arguments': {6: {'type': 'o^@'}}}) - r(b'NSObject', b'tokenSession:supportsOperation:usingKey:algorithm:', {'retval': {'type': 'Z'}}) - r(b'TKSmartCard', b'beginSessionWithReply:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}, 2: {'type': b'@'}}}}}}) - r(b'TKSmartCard', b'inSessionWithError:executeBlock:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type_modifier': b'o'}, 3: {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'o^@'}}}}}}) - r(b'TKSmartCard', b'sendIns:p1:p2:data:le:reply:', {'arguments': {7: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'S'}, 3: {'type': b'@'}}}}}}) - r(b'TKSmartCard', b'sendIns:p1:p2:data:le:sw:error:', {'arguments': {7: {'type_modifier': b'o'}, 8: {'type_modifier': b'o'}}}) - r(b'TKSmartCard', b'sensitive', {'retval': {'type': b'Z'}}) - r(b'TKSmartCard', b'setSensitive:', {'arguments': {2: {'type': b'Z'}}}) - r(b'TKSmartCard', b'setUseExtendedLength:', {'arguments': {2: {'type': b'Z'}}}) - r(b'TKSmartCard', b'transmitRequest:reply:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'TKSmartCard', b'useExtendedLength', {'retval': {'type': b'Z'}}) - r(b'TKSmartCard', b'valid', {'retval': {'type': b'Z'}}) - r(b'TKSmartCardATR', b'initWithSource:', {'arguments': {2: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'TKSmartCardSlotManager', b'getSlotWithName:reply:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'TKSmartCardUserInteraction', b'cancel', {'retval': {'type': 'Z'}}) - r(b'TKSmartCardUserInteraction', b'runWithReply:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}, 2: {'type': b'@'}}}}}}) - r(b'TKSmartCardUserInteractionForConfirmation', b'result', {'retval': {'type': 'Z'}}) - r(b'TKSmartCardUserInteractionForConfirmation', b'setResult:', {'arguments': {2: {'type': 'Z'}}}) - r(b'TKTokenAuthOperation', b'finishWithError:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type_modifier': b'o'}}}) - r(b'TKTokenKeyAlgorithm', b'isAlgorithm:', {'retval': {'type': 'Z'}}) - r(b'TKTokenKeyAlgorithm', b'supportsAlgorithm:', {'retval': {'type': 'Z'}}) - r(b'TKTokenKeychainContents', b'certificateForObjectID:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'TKTokenKeychainContents', b'keyForObjectID:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'TKTokenKeychainKey', b'canDecrypt', {'retval': {'type': 'Z'}}) - r(b'TKTokenKeychainKey', b'canPerformKeyExchange', {'retval': {'type': 'Z'}}) - r(b'TKTokenKeychainKey', b'canSign', {'retval': {'type': 'Z'}}) - r(b'TKTokenKeychainKey', b'isSuitableForLogin', {'retval': {'type': 'Z'}}) - r(b'TKTokenKeychainKey', b'setCanDecrypt:', {'arguments': {2: {'type': 'Z'}}}) - r(b'TKTokenKeychainKey', b'setCanPerformKeyExchange:', {'arguments': {2: {'type': 'Z'}}}) - r(b'TKTokenKeychainKey', b'setCanSign:', {'arguments': {2: {'type': 'Z'}}}) - r(b'TKTokenKeychainKey', b'setSuitableForLogin:', {'arguments': {2: {'type': 'Z'}}}) - r(b'TKTokenWatcher', b'addRemovalHandler:forTokenID:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'TKTokenWatcher', b'initWithInsertionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'TKTokenWatcher', b'setInsertionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/CryptoTokenKit/_metadata.pyc b/env/lib/python2.7/site-packages/CryptoTokenKit/_metadata.pyc deleted file mode 100644 index fdc060c5..00000000 Binary files a/env/lib/python2.7/site-packages/CryptoTokenKit/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/DVDPlayback/__init__.py b/env/lib/python2.7/site-packages/DVDPlayback/__init__.py deleted file mode 100644 index 49c4526f..00000000 --- a/env/lib/python2.7/site-packages/DVDPlayback/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -''' -Python mapping for the DVDPlayback framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Foundation - -from DVDPlayback import _metadata - -sys.modules['DVDPlayback'] = mod = objc.ObjCLazyModule( - "DVDPlayback", - "com.apple.dvdplayback", - objc.pathForFramework("/System/Library/Frameworks/DVDPlayback.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Foundation,)) - -import sys -del sys.modules['DVDPlayback._metadata'] diff --git a/env/lib/python2.7/site-packages/DVDPlayback/__init__.pyc b/env/lib/python2.7/site-packages/DVDPlayback/__init__.pyc deleted file mode 100644 index a83dc34f..00000000 Binary files a/env/lib/python2.7/site-packages/DVDPlayback/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/DVDPlayback/_metadata.py b/env/lib/python2.7/site-packages/DVDPlayback/_metadata.py deleted file mode 100644 index d62a8b71..00000000 --- a/env/lib/python2.7/site-packages/DVDPlayback/_metadata.py +++ /dev/null @@ -1,25 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Sun Jul 1 19:04:15 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$$''' -enums = '''$kDVDAMGMDomain@5$kDVDAspectRatio16x9@3$kDVDAspectRatio4x3@1$kDVDAspectRatio4x3PanAndScan@2$kDVDAspectRatioLetterBox@4$kDVDAspectRatioUninitialized@0$kDVDAudioAC3Format@1$kDVDAudioDDPlusFormat@8$kDVDAudioDTSFormat@5$kDVDAudioDTSHDFormat@9$kDVDAudioExtensionCodeDirectorsComment1@3$kDVDAudioExtensionCodeDirectorsComment2@4$kDVDAudioExtensionCodeNVisualImpaired@2$kDVDAudioExtensionCodeNormalCaptions@1$kDVDAudioExtensionCodeNotSpecified@0$kDVDAudioMLPFormat@7$kDVDAudioMPEG1Format@2$kDVDAudioMPEG2Format@3$kDVDAudioModeProLogic@1$kDVDAudioModeSPDIF@2$kDVDAudioModeUninitialized@0$kDVDAudioPCMFormat@4$kDVDAudioSDDSFormat@6$kDVDAudioUnknownFormat@0$kDVDButtonIndexNone@-1$kDVDErrorAlreadyPlaying@-70006$kDVDErrorAuthentification@-70025$kDVDErrorDisplayAuthentification@-70034$kDVDErrorDontNeedWakeup@-70009$kDVDErrorGraphicsDevice@-70018$kDVDErrorInitializingLib@-70002$kDVDErrorInvalidBookmarkForMedia@-70032$kDVDErrorInvalidBookmarkSize@-70031$kDVDErrorInvalidBookmarkVersion@-70030$kDVDErrorInvalidRegionCode@-70020$kDVDErrorIsAlreadySleeping@-70008$kDVDErrorMismatchedRegionCode@-70022$kDVDErrorMissingDrive@-70012$kDVDErrorMissingGraphicsDevice@-70017$kDVDErrorNavigation@-70029$kDVDErrorNoAudioOutputDevice@-70027$kDVDErrorNoFatalErrCallBack@-70007$kDVDErrorNoMoreRegionSets@-70023$kDVDErrorNoValidBookmarkForLastPlay@-70033$kDVDErrorNoValidMedia@-70015$kDVDErrorNotAllowedDuringPlayback@-70004$kDVDErrorNotSupportedConfiguration@-70013$kDVDErrorNotSupportedFunction@-70014$kDVDErrorOutOfVideoMemory@-70026$kDVDErrorPlaybackOpen@-70019$kDVDErrorRgnMgrInstall@-70021$kDVDErrorSystem@-70028$kDVDErrorTimeOutOfRange@-70010$kDVDErrorUnassignedGrafPort@-70005$kDVDErrorUninitializedLib@-70003$kDVDErrorUnknown@-70001$kDVDErrorUserActionNoOp@-70011$kDVDErrorWrongParam@-70016$kDVDErrordRegionCodeUninitialized@-70024$kDVDEventAngle@4$kDVDEventAngleNumbers@23$kDVDEventAudioStream@5$kDVDEventAudioStreamNumbers@22$kDVDEventBitrate@9$kDVDEventCCInfo@25$kDVDEventChapterTime@26$kDVDEventDisplayMode@7$kDVDEventDomain@8$kDVDEventError@24$kDVDEventGPRM@18$kDVDEventMenuCalled@15$kDVDEventPGC@17$kDVDEventPTT@2$kDVDEventParental@16$kDVDEventPlayback@11$kDVDEventRegionMismatch@19$kDVDEventScanSpeed@14$kDVDEventStill@10$kDVDEventStreams@13$kDVDEventSubpictureStream@6$kDVDEventSubpictureStreamNumbers@21$kDVDEventTitle@1$kDVDEventTitleTime@20$kDVDEventValidUOP@3$kDVDEventVideoStandard@12$kDVDFPDomain@0$kDVDFormatNTSC@1$kDVDFormatNTSC_HDTV@3$kDVDFormatPAL@2$kDVDFormatPAL_HDTV@4$kDVDFormatUninitialized@0$kDVDLanguageCodeAbkhazian@1633820704$kDVDLanguageCodeAfar@1633755168$kDVDLanguageCodeAfrikaans@1634082848$kDVDLanguageCodeAlbanian@1936793632$kDVDLanguageCodeAmharic@1634541600$kDVDLanguageCodeArabic@1634869280$kDVDLanguageCodeArmenian@1752768544$kDVDLanguageCodeAssamese@1634934816$kDVDLanguageCodeAymara@1635328032$kDVDLanguageCodeAzerbaijani@1635393568$kDVDLanguageCodeBashkir@1650532384$kDVDLanguageCodeBasque@1702174752$kDVDLanguageCodeBengali@1651384352$kDVDLanguageCodeBhutani@1685725216$kDVDLanguageCodeBihari@1650991136$kDVDLanguageCodeBislama@1651056672$kDVDLanguageCodeBreton@1651646496$kDVDLanguageCodeBulgarian@1650925600$kDVDLanguageCodeBurmese@1836654624$kDVDLanguageCodeByelorussian@1650794528$kDVDLanguageCodeCambodian@1802313760$kDVDLanguageCodeCatalan@1667309600$kDVDLanguageCodeChinese@2053644320$kDVDLanguageCodeCorsican@1668227104$kDVDLanguageCodeCroatian@1752309792$kDVDLanguageCodeCzech@1668489248$kDVDLanguageCodeDanish@1684086816$kDVDLanguageCodeDutch@1852579872$kDVDLanguageCodeEnglish@1701716000$kDVDLanguageCodeEsperanto@1701781536$kDVDLanguageCodeEstonian@1702109216$kDVDLanguageCodeFaeroese@1718558752$kDVDLanguageCodeFiji@1718231072$kDVDLanguageCodeFinnish@1718165536$kDVDLanguageCodeFrench@1718755360$kDVDLanguageCodeFrisian@1719214112$kDVDLanguageCodeGalician@1735139360$kDVDLanguageCodeGeorgian@1801527328$kDVDLanguageCodeGerman@1684348960$kDVDLanguageCodeGreek@1701584928$kDVDLanguageCodeGreenlandic@1802248224$kDVDLanguageCodeGuarani@1735270432$kDVDLanguageCodeGujarati@1735729184$kDVDLanguageCodeHausa@1751195680$kDVDLanguageCodeHebrew@1769414688$kDVDLanguageCodeHindi@1751719968$kDVDLanguageCodeHungarian@1752506400$kDVDLanguageCodeIcelandic@1769152544$kDVDLanguageCodeIndonesian@1768824864$kDVDLanguageCodeInterlingua@1767972896$kDVDLanguageCodeInterlingue@1768235040$kDVDLanguageCodeInupiak@1768628256$kDVDLanguageCodeIrish@1734418464$kDVDLanguageCodeItalian@1769218080$kDVDLanguageCodeJapanese@1784750112$kDVDLanguageCodeJavanese@1786191904$kDVDLanguageCodeKannada@1802379296$kDVDLanguageCodeKashmiri@1802706976$kDVDLanguageCodeKazakh@1802182688$kDVDLanguageCodeKinyarwanda@1920409632$kDVDLanguageCodeKirghiz@1803100192$kDVDLanguageCodeKirundi@1919819808$kDVDLanguageCodeKorean@1802444832$kDVDLanguageCodeKurdish@1802838048$kDVDLanguageCodeLaothian@1819222048$kDVDLanguageCodeLatin@1818304544$kDVDLanguageCodeLatvian@1819680800$kDVDLanguageCodeLingala@1819156512$kDVDLanguageCodeLithuanian@1819549728$kDVDLanguageCodeMacedonian@1835737120$kDVDLanguageCodeMalagasy@1835474976$kDVDLanguageCodeMalay@1836261408$kDVDLanguageCodeMalayalam@1835802656$kDVDLanguageCodeMaltese@1836326944$kDVDLanguageCodeMaori@1835606048$kDVDLanguageCodeMarathi@1836195872$kDVDLanguageCodeMoldavian@1835999264$kDVDLanguageCodeMongolian@1835933728$kDVDLanguageCodeNauru@1851858976$kDVDLanguageCodeNepali@1852121120$kDVDLanguageCodeNone@808460320$kDVDLanguageCodeNorwegian@1852776480$kDVDLanguageCodeOccitan@1868767264$kDVDLanguageCodeOriya@1869750304$kDVDLanguageCodeOromo@1869422624$kDVDLanguageCodePashto@1886593056$kDVDLanguageCodePersian@1717641248$kDVDLanguageCodePolish@1886134304$kDVDLanguageCodePortugese@1886658592$kDVDLanguageCodePunjabi@1885413408$kDVDLanguageCodeQuechua@1903501344$kDVDLanguageCodeRhaetoRomance@1919754272$kDVDLanguageCodeRomanian@1919885344$kDVDLanguageCodeRussian@1920278560$kDVDLanguageCodeSamoan@1936531488$kDVDLanguageCodeSangro@1936138272$kDVDLanguageCodeSanskrit@1935745056$kDVDLanguageCodeScotsGaelic@1734615072$kDVDLanguageCodeSerbian@1936859168$kDVDLanguageCodeSerboCroatian@1936203808$kDVDLanguageCodeSesotho@1936990240$kDVDLanguageCodeSetswana@1953374240$kDVDLanguageCodeShona@1936597024$kDVDLanguageCodeSindhi@1935941664$kDVDLanguageCodeSinghalese@1936269344$kDVDLanguageCodeSiswati@1936924704$kDVDLanguageCodeSlovak@1936400416$kDVDLanguageCodeSlovenian@1936465952$kDVDLanguageCodeSomali@1936662560$kDVDLanguageCodeSpanish@1702043680$kDVDLanguageCodeSudanese@1937055776$kDVDLanguageCodeSwahili@1937186848$kDVDLanguageCodeSwedish@1937121312$kDVDLanguageCodeTagalog@1953243168$kDVDLanguageCodeTajik@1952915488$kDVDLanguageCodeTamil@1952522272$kDVDLanguageCodeTatar@1953767456$kDVDLanguageCodeTelugu@1952784416$kDVDLanguageCodeThai@1952981024$kDVDLanguageCodeTibetan@1651449888$kDVDLanguageCodeTigrinya@1953046560$kDVDLanguageCodeTonga@1953439776$kDVDLanguageCodeTsonga@1953701920$kDVDLanguageCodeTurkish@1953636384$kDVDLanguageCodeTurkmen@1953177632$kDVDLanguageCodeTwi@1953964064$kDVDLanguageCodeUkranian@1969954848$kDVDLanguageCodeUninitialized@1061101600$kDVDLanguageCodeUrdu@1970413600$kDVDLanguageCodeUzbek@1970937888$kDVDLanguageCodeVietnamese@1986600992$kDVDLanguageCodeVolapuk@1986994208$kDVDLanguageCodeWelsh@1668882464$kDVDLanguageCodeWolof@2003771424$kDVDLanguageCodeXhosa@2020089888$kDVDLanguageCodeYiddish@1785274400$kDVDLanguageCodeYoruba@2037325856$kDVDLanguageCodeZulu@2054496288$kDVDLanguageNoPreference@707403808$kDVDMenuAngle@4$kDVDMenuAudio@3$kDVDMenuNone@6$kDVDMenuPTT@5$kDVDMenuRoot@1$kDVDMenuSubPicture@2$kDVDMenuTitle@0$kDVDRegionCode1@254$kDVDRegionCode2@253$kDVDRegionCode3@251$kDVDRegionCode4@247$kDVDRegionCode5@239$kDVDRegionCode6@223$kDVDRegionCode7@191$kDVDRegionCode8@127$kDVDRegionCodeUninitialized@255$kDVDSTOPDomain@4$kDVDScanDirectionBackward@1$kDVDScanDirectionForward@0$kDVDScanRate16x@16$kDVDScanRate1x@1$kDVDScanRate2x@2$kDVDScanRate32x@32$kDVDScanRate4x@4$kDVDScanRate8x@8$kDVDScanRateOneEigth@-8$kDVDScanRateOneFourth@-4$kDVDScanRateOneHalf@-2$kDVDStateIdle@6$kDVDStatePaused@3$kDVDStatePlaying@1$kDVDStatePlayingSlow@7$kDVDStatePlayingStill@2$kDVDStateScanning@5$kDVDStateStopped@4$kDVDStateUnknown@0$kDVDSubpictureExtensionCodeCaption4Children@3$kDVDSubpictureExtensionCodeCaptionBiggerSize@2$kDVDSubpictureExtensionCodeCaptionNormalSize@1$kDVDSubpictureExtensionCodeClosedCaption4Children@7$kDVDSubpictureExtensionCodeClosedCaptionBiggerSize@6$kDVDSubpictureExtensionCodeClosedCaptionNormalSize@5$kDVDSubpictureExtensionCodeForcedCaption@9$kDVDSubpictureExtensionCodeNotSpecified@0$kDVDSubpictureExtensionDirectorsComment4Children@15$kDVDSubpictureExtensionDirectorsCommentBiggerSize@14$kDVDSubpictureExtensionDirectorsCommentNormalSize@13$kDVDTTDomain@3$kDVDTTGRDomain@6$kDVDTimeCodeChapterDurationSeconds@6$kDVDTimeCodeChapterElapsedSeconds@4$kDVDTimeCodeChapterRemainingSeconds@5$kDVDTimeCodeElapsedSeconds@1$kDVDTimeCodeRemainingSeconds@2$kDVDTimeCodeTitleDurationSeconds@3$kDVDTimeCodeUninitialized@0$kDVDUOPAngleChange@4194304$kDVDUOPAudioStreamChange@1048576$kDVDUOPBackwardScan@512$kDVDUOPButton@131072$kDVDUOPForwardScan@256$kDVDUOPGoUp@16$kDVDUOPKaraokeModeChange@8388608$kDVDUOPMenuCallAngle@16384$kDVDUOPMenuCallAudio@8192$kDVDUOPMenuCallPTT@32768$kDVDUOPMenuCallRoot@2048$kDVDUOPMenuCallSubPicture@4096$kDVDUOPMenuCallTitle@1024$kDVDUOPNextPGSearch@128$kDVDUOPPTTPlaySearch@2$kDVDUOPPauseOff@67108864$kDVDUOPPauseOn@524288$kDVDUOPPrevTopPGSearch@64$kDVDUOPResume@65536$kDVDUOPScanOff@33554432$kDVDUOPStillOff@262144$kDVDUOPStop@8$kDVDUOPSubPictureStreamChange@2097152$kDVDUOPTimePTTSearch@32$kDVDUOPTimePlaySearch@1$kDVDUOPTitlePlay@4$kDVDUOPVideoModeChange@16777216$kDVDUserNavigationEnter@5$kDVDUserNavigationMoveDown@2$kDVDUserNavigationMoveLeft@3$kDVDUserNavigationMoveRight@4$kDVDUserNavigationMoveUp@1$kDVDVMGMDomain@1$kDVDVTSMDomain@2$''' -misc.update({}) -functions={'DVDIsDisplayingSubPicture': (b'i^Z', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'DVDGoToMenu': (b'iI',), 'DVDSetDefaultSubPictureLanguageCode': (b'iII',), 'DVDDoUserNavigation': (b'iI',), 'DVDOpenMediaVolume': (b'i^{FSRef=[80C]}', '', {'arguments': {0: {'type_modifier': 'n'}}}), 'DVDDoButtonActivate': (b'ii',), 'DVDGetTimeEventRate': (b'i^I', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'DVDGetDiscRegionCode': (b'i^I', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'DVDSetChapter': (b'iS',), 'DVDSetVideoDisplay': (b'iI',), 'DVDSetFatalErrorCallBack': (b'i^?^v', '', {'arguments': {0: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'i'}, 1: {'type': b'^v'}}}}}}), 'DVDOpenMediaFileWithURL': (b'i^{__CFURL=}',), 'DVDGetNumAngles': (b'i^S', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'DVDSleep': (b'i',), 'DVDStepFrame': (b'ic',), 'DVDIsOnMenu': (b'i^Z^I', '', {'arguments': {0: {'type_modifier': 'o'}, 1: {'type_modifier': 'o'}}}), 'DVDOpenMediaFile': (b'i^{FSRef=[80C]}', '', {'arguments': {0: {'type_modifier': 'n'}}}), 'DVDHasPreviousChapter': (b'i^Z', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'DVDGetTitle': (b'i^S', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'DVDGetAngle': (b'i^S', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'DVDGetAudioOutputModeCapabilities': (b'i^i', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'DVDUpdateVideo': (b'i', '', {'variadic': False}), 'DVDIsSupportedDisplay': (b'iI^Z', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'DVDGetButtonPosition': (b'iI^{CGRect={CGPoint=dd}{CGSize=dd}}^I', '', {'arguments': {1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}}}), 'DVDGetVideoWindowID': (b'i^I', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'DVDIsPlaying': (b'i^Z', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'DVDSetVideoWindowRef': (b'i^{OpaqueWindowPtr=}',), 'DVDGetAudioLanguageCodeByStream': (b'iS^I^I', '', {'arguments': {1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}}}), 'DVDScan': (b'isc',), 'DVDGetVideoDisplay': (b'i^I', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'DVDGetDriveRegionCode': (b'i^I^s', '', {'arguments': {0: {'type_modifier': 'o'}, 1: {'type_modifier': 'o'}}}), 'DVDSetTitle': (b'iS',), 'DVDIsMuted': (b'i^Z', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'DVDHasMenu': (b'iI^Z', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'DVDSetTimeEventRate': (b'iI',), 'DVDGetNumTitles': (b'i^S', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'DVDGetAspectRatio': (b'i^s', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'DVDGetAudioOutputMode': (b'i^i', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'DVDGotoBookmark': (b'i^vI', '', {'arguments': {0: {'c_array_length_in_arg': 1, 'type_modifier': 'n'}}}), 'DVDGetNativeVideoSize': (b'i^S^S', '', {'arguments': {0: {'type_modifier': 'o'}, 1: {'type_modifier': 'o'}}}), 'DVDDispose': (b'i', '', {'variadic': False}), 'DVDGetChapter': (b'i^S', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'DVDGetState': (b'i^i', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'DVDGetVideoWindowRef': (b'i^^{OpaqueWindowPtr=}', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'DVDOpenMediaVolumeWithURL': (b'i^{__CFURL=}',), 'DVDMute': (b'iZ',), 'DVDGetSPDIFDataOutDevice': (b'i^I', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'DVDGetVideoCGBounds': (b'i^{CGRect={CGPoint=dd}{CGSize=dd}}', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'DVDGetSPDIFDataOutDeviceCount': (b'i^I', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'DVDGetLastPlayBookmark': (b'i^v^I', '', {'arguments': {0: {'c_array_length_in_arg': 1, 'type_modifier': 'o'}, 1: {'type_modifier': 'N'}}}), 'DVDSwitchToDisplay': (b'iI^Z', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'DVDIsPaused': (b'i^Z', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'DVDIsRegisteredEventCallBack': (b'Z^v', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'DVDClearLastPlayBookmark': (b'i', '', {'variadic': False}), 'DVDSetAngle': (b'iS',), 'DVDGetSubPictureLanguageCodeByStream': (b'iS^I^I', '', {'arguments': {1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}}}), 'DVDGetSPDIFDataOutDeviceCFName': (b'iI^^{__CFString=}', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'DVDEnableWebAccess': (b'iZ',), 'DVDSetLastPlayBookmark': (b'i^vI', '', {'arguments': {0: {'c_array_length_in_arg': 1, 'type_modifier': 'n'}}}), 'DVDGetSubPictureLanguageCode': (b'i^I^I', '', {'arguments': {0: {'type_modifier': 'o'}, 1: {'type_modifier': 'o'}}}), 'DVDSetSubPictureStream': (b'iS',), 'DVDPreviousChapter': (b'i', '', {'variadic': False}), 'DVDIdle': (b'i', '', {'variadic': False}), 'DVDDoMenuCGClick': (b'i^{CGPoint=dd}^i', '', {'arguments': {0: {'type_modifier': 'n'}, 1: {'type_modifier': 'o'}}}), 'DVDGetMediaUniqueID': (b'i[8C]',), 'DVDGetAudioStream': (b'i^S', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'DVDGetAudioLanguageCode': (b'i^I^I', '', {'arguments': {0: {'type_modifier': 'o'}, 1: {'type_modifier': 'o'}}}), 'DVDSetTime': (b'isIS',), 'DVDReturnToTitle': (b'i', '', {'variadic': False}), 'DVDGetFormatStandard': (b'i^s', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'DVDInitialize': (b'i', '', {'variadic': False}), 'DVDUnregisterEventCallBack': (b'i^v',), 'DVDSetAspectRatio': (b'is',), 'DVDPlay': (b'i', '', {'variadic': False}), 'DVDResume': (b'i', '', {'variadic': False}), 'DVDHasMedia': (b'i^Z', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'DVDSetDefaultMenuLanguageCode': (b'iI',), 'DVDIsValidMediaRef': (b'i^{FSRef=[80C]}^Z', '', {'arguments': {0: {'type_modifier': 'n'}, 1: {'type_modifier': 'o'}}}), 'DVDDisplaySubPicture': (b'iZ',), 'DVDGetScanRate': (b'i^s^c', '', {'arguments': {0: {'type_modifier': 'o'}, 1: {'type_modifier': 'o'}}}), 'DVDRegisterEventCallBack': (b'i^?^II^v^^v', '', {'arguments': {0: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'I'}, 1: {'type': b'L'}, 2: {'type': b'L'}, 3: {'type': b'^v'}}}}, 1: {'type_modifier': 'n'}}}), 'DVDSetAudioOutputMode': (b'ii',), 'DVDSetDriveRegionCode': (b'iI^{AuthorizationOpaqueRef=}',), 'DVDIsValidMediaURL': (b'i^{__CFURL=}^Z', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'DVDGetAudioVolume': (b'i^S', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'DVDSetDefaultAudioLanguageCode': (b'iII',), 'DVDSetAudioStream': (b'iS',), 'DVDGetGPRMValue': (b'iI^I', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'DVDGetNumAudioStreams': (b'i^S', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'DVDCloseMediaFile': (b'i', '', {'variadic': False}), 'DVDCloseMediaVolume': (b'i', '', {'variadic': False}), 'DVDWakeUp': (b'i',), 'DVDGetAudioStreamFormatByStream': (b'iI^s^I^I^I', '', {'arguments': {1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}, 3: {'type_modifier': 'o'}, 4: {'type_modifier': 'o'}}}), 'DVDSetVideoCGBounds': (b'i^{CGRect={CGPoint=dd}{CGSize=dd}}', '', {'arguments': {0: {'type_modifier': 'n'}}}), 'DVDPause': (b'i', '', {'variadic': False}), 'DVDGetAudioStreamFormat': (b'i^s^I^I^I', '', {'arguments': {0: {'type_modifier': 'o'}, 1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}, 3: {'type_modifier': 'o'}}}), 'DVDGetBookmark': (b'i^v^I', '', {'arguments': {0: {'c_array_length_in_arg': 1, 'type_modifier': 'o'}, 1: {'type_modifier': 'N'}}}), 'DVDGetNumSubPictureStreams': (b'i^S', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'DVDGetButtoninfo': (b'i^I^I^I^I^I', '', {'arguments': {0: {'type_modifier': 'o'}, 1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}, 3: {'type_modifier': 'o'}, 4: {'type_modifier': 'o'}}}), 'DVDGetMenuLanguageCode': (b'i^I', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'DVDSetSPDIFDataOutDevice': (b'iI',), 'DVDSetVideoWindowID': (b'iI',), 'DVDDoMenuCGMouseOver': (b'i^{CGPoint=dd}^i', '', {'arguments': {0: {'type_modifier': 'n'}, 1: {'type_modifier': 'o'}}}), 'DVDNextChapter': (b'i', '', {'variadic': False}), 'DVDGetTime': (b'is^I^S', '', {'arguments': {1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}}}), 'DVDGetAudioVolumeInfo': (b'i^S^S^S', '', {'arguments': {0: {'type_modifier': 'o'}, 1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}}}), 'DVDGetMediaVolumeCFName': (b'i^^{__CFString=}', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'DVDHasNextChapter': (b'i^Z', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'DVDGoBackOneLevel': (b'i', '', {'variadic': False}), 'DVDGetSubPictureStream': (b'i^S', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'DVDStop': (b'i', '', {'variadic': False}), 'DVDSetAudioVolume': (b'iS',), 'DVDGetNumChapters': (b'iS^S', '', {'arguments': {1: {'type_modifier': 'o'}}})} -aliases = {'DVD_NONNULL': '__nonnull', 'DVD_NULLABLE': '__nullable'} -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/DVDPlayback/_metadata.pyc b/env/lib/python2.7/site-packages/DVDPlayback/_metadata.pyc deleted file mode 100644 index 045c3cc7..00000000 Binary files a/env/lib/python2.7/site-packages/DVDPlayback/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/DictionaryServices/__init__.py b/env/lib/python2.7/site-packages/DictionaryServices/__init__.py deleted file mode 100644 index eaa9babd..00000000 --- a/env/lib/python2.7/site-packages/DictionaryServices/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -''' -Python mapping for the DictionaryServices framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import sys -import objc -import CoreServices - -import warnings -warnings.warn("pyobjc-framework-DictionaryServices is deprecated, use 'import CoreServices' instead", DeprecationWarning) - -sys.modules['DictionaryServices'] = mod = objc.ObjCLazyModule('DictionaryServices', - "com.apple.CoreServices", - objc.pathForFramework("/System/Library/Frameworks/CoreServices.framework"), - None, None, { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'objc': objc, - }, ( CoreServices,)) diff --git a/env/lib/python2.7/site-packages/DictionaryServices/__init__.pyc b/env/lib/python2.7/site-packages/DictionaryServices/__init__.pyc deleted file mode 100644 index f85e6117..00000000 Binary files a/env/lib/python2.7/site-packages/DictionaryServices/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/DiscRecording/_DiscRecording.so b/env/lib/python2.7/site-packages/DiscRecording/_DiscRecording.so deleted file mode 100755 index ff8479d1..00000000 Binary files a/env/lib/python2.7/site-packages/DiscRecording/_DiscRecording.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/DiscRecording/__init__.py b/env/lib/python2.7/site-packages/DiscRecording/__init__.py deleted file mode 100644 index 832bd416..00000000 --- a/env/lib/python2.7/site-packages/DiscRecording/__init__.py +++ /dev/null @@ -1,40 +0,0 @@ -''' -Python mapping for the DiscRecording framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Foundation - -from DiscRecording import _metadata -from DiscRecording import _DiscRecording - - -sys.modules['DiscRecording'] = mod = objc.ObjCLazyModule( - "DiscRecording", - "com.apple.DiscRecording", - objc.pathForFramework("/System/Library/Frameworks/DiscRecording.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Foundation,)) - -import sys -del sys.modules['DiscRecording._metadata'] - - -from math import floor -def DRDeviceKPSForCDXFactor(xfactor): return float(xfactor) * mod.kDRDeviceBurnSpeedCD1x -def DRDeviceKPSForDVDXFactor(xfactor): return float(xfactor) * mod.kDRDeviceBurnSpeedDVD1x -def DRDeviceCDXFactorForKPS(kps): return floor(kps/mod.kDRDeviceBurnSpeedCD1x + 0.5) -def DRDeviceDVDXFactorForKPS(kps): return floor(kps/mod.kDRDeviceBurnSpeedDVD1x + 0.5) - -mod.DRDeviceKPSForCDXFactor = DRDeviceKPSForCDXFactor -mod.DRDeviceKPSForDVDXFactor = DRDeviceKPSForDVDXFactor -mod.DRDeviceCDXFactorForKPS = DRDeviceCDXFactorForKPS -mod.DRDeviceDVDXFactorForKPS = DRDeviceDVDXFactorForKPS diff --git a/env/lib/python2.7/site-packages/DiscRecording/__init__.pyc b/env/lib/python2.7/site-packages/DiscRecording/__init__.pyc deleted file mode 100644 index 629f17dc..00000000 Binary files a/env/lib/python2.7/site-packages/DiscRecording/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/DiscRecording/_metadata.py b/env/lib/python2.7/site-packages/DiscRecording/_metadata.py deleted file mode 100644 index 57340e0c..00000000 --- a/env/lib/python2.7/site-packages/DiscRecording/_metadata.py +++ /dev/null @@ -1,72 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Sun Jul 1 12:22:14 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -misc.update({'DRFileForkSizeInfo': objc.createStructType('DRFileForkSizeInfo', sel32or64(b'{DRFileForkSizeInfo=LLQ}', b'{DRFileForkSizeInfo=IIQ}'), ['fork', 'query', 'size']), 'DRFileProductionInfo': objc.createStructType('DRFileProductionInfo', sel32or64(b'{DRFileProductionInfo=Q^vLLLL}', b'{DRFileProductionInfo=Q^vIIII}'), ['requestedAddress', 'buffer', 'reqCount', 'actCount', 'blockSize', 'fork']), 'DRTrackProductionInfo': objc.createStructType('DRTrackProductionInfo', sel32or64(b'{DRTrackProductionInfo=^vLLLLQ}', b'{DRTrackProductionInfo=^vIIIIQ}'), ['buffer', 'reqCount', 'actCount', 'flags', 'blockSize', 'requestedAddress'])}) -constants = '''$DRAbstractFile$DRAccessDate$DRAllFilesystems$DRApplicationIdentifier$DRAttributeModificationDate$DRAudioFourChannelKey$DRAudioPreEmphasisKey$DRBackupDate$DRBibliographicFile$DRBlockSize$DRBlockSizeKey$DRBlockTypeKey$DRBurnAppendableKey$DRBurnCompletionActionEject$DRBurnCompletionActionKey$DRBurnCompletionActionMount$DRBurnDoubleLayerL0DataZoneBlocksKey$DRBurnFailureActionEject$DRBurnFailureActionKey$DRBurnFailureActionNone$DRBurnOverwriteDiscKey$DRBurnRequestedSpeedKey$DRBurnStatusChangedNotification$DRBurnStrategyBDDAO$DRBurnStrategyCDSAO$DRBurnStrategyCDTAO$DRBurnStrategyDVDDAO$DRBurnStrategyIsRequiredKey$DRBurnStrategyKey$DRBurnTestingKey$DRBurnUnderrunProtectionKey$DRBurnVerifyDiscKey$DRCDTextArrangerKey$DRCDTextCharacterCodeKey$DRCDTextClosedKey$DRCDTextComposerKey$DRCDTextCopyrightAssertedForNamesKey$DRCDTextCopyrightAssertedForSpecialMessagesKey$DRCDTextCopyrightAssertedForTitlesKey$DRCDTextDiscIdentKey$DRCDTextGenreCodeKey$DRCDTextGenreKey$DRCDTextKey$DRCDTextLanguageKey$DRCDTextMCNISRCKey$DRCDTextNSStringEncodingKey$DRCDTextPerformerKey$DRCDTextSizeKey$DRCDTextSongwriterKey$DRCDTextSpecialMessageKey$DRCDTextTOC2Key$DRCDTextTOCKey$DRCDTextTitleKey$DRContentModificationDate$DRCopyrightFile$DRCreationDate$DRDVDCopyrightInfoKey$DRDVDTimestampKey$DRDataFormKey$DRDataPreparer$DRDefaultDate$DRDeviceAppearedNotification$DRDeviceBurnSpeedBD1x@f$DRDeviceBurnSpeedCD1x@f$DRDeviceBurnSpeedDVD1x@f$DRDeviceBurnSpeedHDDVD1x@f$DRDeviceBurnSpeedMax@f$DRDeviceBurnSpeedsKey$DRDeviceCanTestWriteCDKey$DRDeviceCanTestWriteDVDKey$DRDeviceCanUnderrunProtectCDKey$DRDeviceCanUnderrunProtectDVDKey$DRDeviceCanWriteBDKey$DRDeviceCanWriteBDREKey$DRDeviceCanWriteBDRKey$DRDeviceCanWriteCDKey$DRDeviceCanWriteCDRKey$DRDeviceCanWriteCDRWKey$DRDeviceCanWriteCDRawKey$DRDeviceCanWriteCDSAOKey$DRDeviceCanWriteCDTAOKey$DRDeviceCanWriteCDTextKey$DRDeviceCanWriteDVDDAOKey$DRDeviceCanWriteDVDKey$DRDeviceCanWriteDVDPlusRDoubleLayerKey$DRDeviceCanWriteDVDPlusRKey$DRDeviceCanWriteDVDPlusRWDoubleLayerKey$DRDeviceCanWriteDVDPlusRWKey$DRDeviceCanWriteDVDRAMKey$DRDeviceCanWriteDVDRDualLayerKey$DRDeviceCanWriteDVDRKey$DRDeviceCanWriteDVDRWDualLayerKey$DRDeviceCanWriteDVDRWKey$DRDeviceCanWriteHDDVDKey$DRDeviceCanWriteHDDVDRAMKey$DRDeviceCanWriteHDDVDRDualLayerKey$DRDeviceCanWriteHDDVDRKey$DRDeviceCanWriteHDDVDRWDualLayerKey$DRDeviceCanWriteHDDVDRWKey$DRDeviceCanWriteISRCKey$DRDeviceCanWriteIndexPointsKey$DRDeviceCanWriteKey$DRDeviceCurrentWriteSpeedKey$DRDeviceDisappearedNotification$DRDeviceFirmwareRevisionKey$DRDeviceIORegistryEntryPathKey$DRDeviceIsBusyKey$DRDeviceIsTrayOpenKey$DRDeviceLoadingMechanismCanEjectKey$DRDeviceLoadingMechanismCanInjectKey$DRDeviceLoadingMechanismCanOpenKey$DRDeviceMaximumWriteSpeedKey$DRDeviceMediaBSDNameKey$DRDeviceMediaBlocksFreeKey$DRDeviceMediaBlocksOverwritableKey$DRDeviceMediaBlocksUsedKey$DRDeviceMediaClassBD$DRDeviceMediaClassCD$DRDeviceMediaClassDVD$DRDeviceMediaClassHDDVD$DRDeviceMediaClassKey$DRDeviceMediaClassUnknown$DRDeviceMediaDoubleLayerL0DataZoneBlocksKey$DRDeviceMediaFreeSpaceKey$DRDeviceMediaInfoKey$DRDeviceMediaIsAppendableKey$DRDeviceMediaIsBlankKey$DRDeviceMediaIsErasableKey$DRDeviceMediaIsOverwritableKey$DRDeviceMediaIsReservedKey$DRDeviceMediaOverwritableSpaceKey$DRDeviceMediaSessionCountKey$DRDeviceMediaStateInTransition$DRDeviceMediaStateKey$DRDeviceMediaStateMediaPresent$DRDeviceMediaStateNone$DRDeviceMediaTrackCountKey$DRDeviceMediaTypeBDR$DRDeviceMediaTypeBDRE$DRDeviceMediaTypeBDROM$DRDeviceMediaTypeCDR$DRDeviceMediaTypeCDROM$DRDeviceMediaTypeCDRW$DRDeviceMediaTypeDVDPlusR$DRDeviceMediaTypeDVDPlusRDoubleLayer$DRDeviceMediaTypeDVDPlusRW$DRDeviceMediaTypeDVDPlusRWDoubleLayer$DRDeviceMediaTypeDVDR$DRDeviceMediaTypeDVDRAM$DRDeviceMediaTypeDVDRDualLayer$DRDeviceMediaTypeDVDROM$DRDeviceMediaTypeDVDRW$DRDeviceMediaTypeDVDRWDualLayer$DRDeviceMediaTypeHDDVDR$DRDeviceMediaTypeHDDVDRAM$DRDeviceMediaTypeHDDVDRDualLayer$DRDeviceMediaTypeHDDVDROM$DRDeviceMediaTypeHDDVDRW$DRDeviceMediaTypeHDDVDRWDualLayer$DRDeviceMediaTypeKey$DRDeviceMediaTypeUnknown$DRDeviceMediaUsedSpaceKey$DRDevicePhysicalInterconnectATAPI$DRDevicePhysicalInterconnectFibreChannel$DRDevicePhysicalInterconnectFireWire$DRDevicePhysicalInterconnectKey$DRDevicePhysicalInterconnectLocationExternal$DRDevicePhysicalInterconnectLocationInternal$DRDevicePhysicalInterconnectLocationKey$DRDevicePhysicalInterconnectLocationUnknown$DRDevicePhysicalInterconnectSCSI$DRDevicePhysicalInterconnectUSB$DRDeviceProductNameKey$DRDeviceStatusChangedNotification$DRDeviceSupportLevelAppleShipping$DRDeviceSupportLevelAppleSupported$DRDeviceSupportLevelKey$DRDeviceSupportLevelNone$DRDeviceSupportLevelUnsupported$DRDeviceSupportLevelVendorSupported$DRDeviceTrackInfoKey$DRDeviceTrackRefsKey$DRDeviceVendorNameKey$DRDeviceWriteBufferSizeKey$DRDeviceWriteCapabilitiesKey$DREffectiveDate$DREraseStatusChangedNotification$DREraseTypeComplete$DREraseTypeKey$DREraseTypeQuick$DRErrorStatusAdditionalSenseStringKey$DRErrorStatusErrorInfoStringKey$DRErrorStatusErrorKey$DRErrorStatusErrorStringKey$DRErrorStatusKey$DRErrorStatusSenseCodeStringKey$DRErrorStatusSenseKey$DRExpirationDate$DRFreeBlocksKey$DRHFSPlus$DRHFSPlusCatalogNodeID$DRHFSPlusTextEncodingHint$DRISO9660$DRISO9660LevelOne$DRISO9660LevelTwo$DRISO9660VersionNumber$DRISOLevel$DRISOMacExtensions$DRISORockRidgeExtensions$DRIndexPointsKey$DRInvisible$DRJoliet$DRLinkTypeFinderAlias$DRLinkTypeHardLink$DRLinkTypeSymbolicLink$DRMacExtendedFinderFlags$DRMacFileCreator$DRMacFileType$DRMacFinderFlags$DRMacFinderHideExtension$DRMacIconLocation$DRMacScrollPosition$DRMacWindowBounds$DRMacWindowView$DRMaxBurnSpeedKey$DRMediaCatalogNumberKey$DRNextWritableAddressKey$DRPosixFileMode$DRPosixGID$DRPosixUID$DRPreGapIsRequiredKey$DRPreGapLengthKey$DRPublisher$DRRecordingDate$DRSCMSCopyrightFree$DRSCMSCopyrightProtectedCopy$DRSCMSCopyrightProtectedOriginal$DRSerialCopyManagementStateKey$DRSessionFormatKey$DRSessionNumberKey$DRStatusCurrentSessionKey$DRStatusCurrentSpeedKey$DRStatusCurrentTrackKey$DRStatusEraseTypeKey$DRStatusPercentCompleteKey$DRStatusProgressCurrentKPS$DRStatusProgressCurrentXFactor$DRStatusProgressInfoKey$DRStatusStateDone$DRStatusStateErasing$DRStatusStateFailed$DRStatusStateFinishing$DRStatusStateKey$DRStatusStateNone$DRStatusStatePreparing$DRStatusStateSessionClose$DRStatusStateSessionOpen$DRStatusStateTrackClose$DRStatusStateTrackOpen$DRStatusStateTrackWrite$DRStatusStateVerifying$DRStatusTotalSessionsKey$DRStatusTotalTracksKey$DRSubchannelDataFormKey$DRSubchannelDataFormNone$DRSubchannelDataFormPack$DRSubchannelDataFormRaw$DRSuppressMacSpecificFiles$DRSynchronousBehaviorKey$DRSystemIdentifier$DRTrackISRCKey$DRTrackIsEmptyKey$DRTrackLengthKey$DRTrackModeKey$DRTrackNumberKey$DRTrackPacketSizeKey$DRTrackPacketTypeFixed$DRTrackPacketTypeKey$DRTrackPacketTypeVariable$DRTrackStartAddressKey$DRTrackTypeClosed$DRTrackTypeIncomplete$DRTrackTypeInvisible$DRTrackTypeKey$DRTrackTypeReserved$DRUDF$DRUDFApplicationIdentifierSuffix$DRUDFExtendedFilePermissions$DRUDFInterchangeLevel$DRUDFMaxInterchangeLevel$DRUDFMaxVolumeSequenceNumber$DRUDFPrimaryVolumeDescriptorNumber$DRUDFRealTimeFile$DRUDFVersion102$DRUDFVersion150$DRUDFVolumeSequenceNumber$DRUDFVolumeSetIdentifier$DRUDFVolumeSetImplementationUse$DRUDFVolumeSetTimestamp$DRUDFWriteVersion$DRVerificationTypeChecksum$DRVerificationTypeKey$DRVerificationTypeNone$DRVerificationTypeProduceAgain$DRVerificationTypeReceiveData$DRVolumeCheckedDate$DRVolumeCreationDate$DRVolumeEffectiveDate$DRVolumeExpirationDate$DRVolumeModificationDate$DRVolumeSet$kDRAbstractFile@^{__CFString=}$kDRAccessDate@^{__CFString=}$kDRAllFilesystems@^{__CFString=}$kDRApplicationIdentifier@^{__CFString=}$kDRAttributeModificationDate@^{__CFString=}$kDRAudioFourChannelKey@^{__CFString=}$kDRAudioPreEmphasisKey@^{__CFString=}$kDRBackupDate@^{__CFString=}$kDRBibliographicFile@^{__CFString=}$kDRBlockSize@^{__CFString=}$kDRBlockSizeKey@^{__CFString=}$kDRBlockTypeKey@^{__CFString=}$kDRBufferZone1DataKey@^{__CFString=}$kDRBurnAppendableKey@^{__CFString=}$kDRBurnCompletionActionEject@^{__CFString=}$kDRBurnCompletionActionKey@^{__CFString=}$kDRBurnCompletionActionMount@^{__CFString=}$kDRBurnDoubleLayerL0DataZoneBlocksKey@^{__CFString=}$kDRBurnFailureActionEject@^{__CFString=}$kDRBurnFailureActionKey@^{__CFString=}$kDRBurnFailureActionNone@^{__CFString=}$kDRBurnKey@^{__CFString=}$kDRBurnOverwriteDiscKey@^{__CFString=}$kDRBurnRequestedSpeedKey@^{__CFString=}$kDRBurnStatusChangedNotification@^{__CFString=}$kDRBurnStrategyBDDAO@^{__CFString=}$kDRBurnStrategyCDSAO@^{__CFString=}$kDRBurnStrategyCDTAO@^{__CFString=}$kDRBurnStrategyDVDDAO@^{__CFString=}$kDRBurnStrategyIsRequiredKey@^{__CFString=}$kDRBurnStrategyKey@^{__CFString=}$kDRBurnTestingKey@^{__CFString=}$kDRBurnUnderrunProtectionKey@^{__CFString=}$kDRBurnVerifyDiscKey@^{__CFString=}$kDRCDTextArrangerKey@^{__CFString=}$kDRCDTextCFStringEncodingKey@^{__CFString=}$kDRCDTextCharacterCodeKey@^{__CFString=}$kDRCDTextClosedKey@^{__CFString=}$kDRCDTextComposerKey@^{__CFString=}$kDRCDTextCopyrightAssertedForNamesKey@^{__CFString=}$kDRCDTextCopyrightAssertedForSpecialMessagesKey@^{__CFString=}$kDRCDTextCopyrightAssertedForTitlesKey@^{__CFString=}$kDRCDTextDiscIdentKey@^{__CFString=}$kDRCDTextGenreCodeKey@^{__CFString=}$kDRCDTextGenreKey@^{__CFString=}$kDRCDTextKey@^{__CFString=}$kDRCDTextLanguageKey@^{__CFString=}$kDRCDTextMCNISRCKey@^{__CFString=}$kDRCDTextPerformerKey@^{__CFString=}$kDRCDTextSizeKey@^{__CFString=}$kDRCDTextSongwriterKey@^{__CFString=}$kDRCDTextSpecialMessageKey@^{__CFString=}$kDRCDTextTOC2Key@^{__CFString=}$kDRCDTextTOCKey@^{__CFString=}$kDRCDTextTitleKey@^{__CFString=}$kDRContentModificationDate@^{__CFString=}$kDRCopyrightFile@^{__CFString=}$kDRCreationDate@^{__CFString=}$kDRDVDCopyrightInfoKey@^{__CFString=}$kDRDVDTimestampKey@^{__CFString=}$kDRDataFormKey@^{__CFString=}$kDRDataPreparer@^{__CFString=}$kDRDefaultDate@^{__CFString=}$kDRDeviceAppearedNotification@^{__CFString=}$kDRDeviceBurnSpeedBD1x@f$kDRDeviceBurnSpeedCD1x@f$kDRDeviceBurnSpeedDVD1x@f$kDRDeviceBurnSpeedHDDVD1x@f$kDRDeviceBurnSpeedMax@f$kDRDeviceBurnSpeedsKey@^{__CFString=}$kDRDeviceCanTestWriteCDKey@^{__CFString=}$kDRDeviceCanTestWriteDVDKey@^{__CFString=}$kDRDeviceCanUnderrunProtectCDKey@^{__CFString=}$kDRDeviceCanUnderrunProtectDVDKey@^{__CFString=}$kDRDeviceCanWriteBDKey@^{__CFString=}$kDRDeviceCanWriteBDREKey@^{__CFString=}$kDRDeviceCanWriteBDRKey@^{__CFString=}$kDRDeviceCanWriteCDKey@^{__CFString=}$kDRDeviceCanWriteCDRKey@^{__CFString=}$kDRDeviceCanWriteCDRWKey@^{__CFString=}$kDRDeviceCanWriteCDRawKey@^{__CFString=}$kDRDeviceCanWriteCDSAOKey@^{__CFString=}$kDRDeviceCanWriteCDTAOKey@^{__CFString=}$kDRDeviceCanWriteCDTextKey@^{__CFString=}$kDRDeviceCanWriteDVDDAOKey@^{__CFString=}$kDRDeviceCanWriteDVDKey@^{__CFString=}$kDRDeviceCanWriteDVDPlusRDoubleLayerKey@^{__CFString=}$kDRDeviceCanWriteDVDPlusRKey@^{__CFString=}$kDRDeviceCanWriteDVDPlusRWDoubleLayerKey@^{__CFString=}$kDRDeviceCanWriteDVDPlusRWKey@^{__CFString=}$kDRDeviceCanWriteDVDRAMKey@^{__CFString=}$kDRDeviceCanWriteDVDRDualLayerKey@^{__CFString=}$kDRDeviceCanWriteDVDRKey@^{__CFString=}$kDRDeviceCanWriteDVDRWDualLayerKey@^{__CFString=}$kDRDeviceCanWriteDVDRWKey@^{__CFString=}$kDRDeviceCanWriteHDDVDKey@^{__CFString=}$kDRDeviceCanWriteHDDVDRAMKey@^{__CFString=}$kDRDeviceCanWriteHDDVDRDualLayerKey@^{__CFString=}$kDRDeviceCanWriteHDDVDRKey@^{__CFString=}$kDRDeviceCanWriteHDDVDRWDualLayerKey@^{__CFString=}$kDRDeviceCanWriteHDDVDRWKey@^{__CFString=}$kDRDeviceCanWriteISRCKey@^{__CFString=}$kDRDeviceCanWriteIndexPointsKey@^{__CFString=}$kDRDeviceCanWriteKey@^{__CFString=}$kDRDeviceCurrentWriteSpeedKey@^{__CFString=}$kDRDeviceDisappearedNotification@^{__CFString=}$kDRDeviceFirmwareRevisionKey@^{__CFString=}$kDRDeviceIORegistryEntryPathKey@^{__CFString=}$kDRDeviceIsBusyKey@^{__CFString=}$kDRDeviceIsTrayOpenKey@^{__CFString=}$kDRDeviceLoadingMechanismCanEjectKey@^{__CFString=}$kDRDeviceLoadingMechanismCanInjectKey@^{__CFString=}$kDRDeviceLoadingMechanismCanOpenKey@^{__CFString=}$kDRDeviceMaximumWriteSpeedKey@^{__CFString=}$kDRDeviceMediaBSDNameKey@^{__CFString=}$kDRDeviceMediaBlocksFreeKey@^{__CFString=}$kDRDeviceMediaBlocksOverwritableKey@^{__CFString=}$kDRDeviceMediaBlocksUsedKey@^{__CFString=}$kDRDeviceMediaClassBD@^{__CFString=}$kDRDeviceMediaClassCD@^{__CFString=}$kDRDeviceMediaClassDVD@^{__CFString=}$kDRDeviceMediaClassHDDVD@^{__CFString=}$kDRDeviceMediaClassKey@^{__CFString=}$kDRDeviceMediaClassUnknown@^{__CFString=}$kDRDeviceMediaDoubleLayerL0DataZoneBlocksKey@^{__CFString=}$kDRDeviceMediaInfoKey@^{__CFString=}$kDRDeviceMediaIsAppendableKey@^{__CFString=}$kDRDeviceMediaIsBlankKey@^{__CFString=}$kDRDeviceMediaIsErasableKey@^{__CFString=}$kDRDeviceMediaIsOverwritableKey@^{__CFString=}$kDRDeviceMediaIsReservedKey@^{__CFString=}$kDRDeviceMediaSessionCountKey@^{__CFString=}$kDRDeviceMediaStateInTransition@^{__CFString=}$kDRDeviceMediaStateKey@^{__CFString=}$kDRDeviceMediaStateMediaPresent@^{__CFString=}$kDRDeviceMediaStateNone@^{__CFString=}$kDRDeviceMediaTrackCountKey@^{__CFString=}$kDRDeviceMediaTypeBDR@^{__CFString=}$kDRDeviceMediaTypeBDRE@^{__CFString=}$kDRDeviceMediaTypeBDROM@^{__CFString=}$kDRDeviceMediaTypeCDR@^{__CFString=}$kDRDeviceMediaTypeCDROM@^{__CFString=}$kDRDeviceMediaTypeCDRW@^{__CFString=}$kDRDeviceMediaTypeDVDPlusR@^{__CFString=}$kDRDeviceMediaTypeDVDPlusRDoubleLayer@^{__CFString=}$kDRDeviceMediaTypeDVDPlusRW@^{__CFString=}$kDRDeviceMediaTypeDVDPlusRWDoubleLayer@^{__CFString=}$kDRDeviceMediaTypeDVDR@^{__CFString=}$kDRDeviceMediaTypeDVDRAM@^{__CFString=}$kDRDeviceMediaTypeDVDRDualLayer@^{__CFString=}$kDRDeviceMediaTypeDVDROM@^{__CFString=}$kDRDeviceMediaTypeDVDRW@^{__CFString=}$kDRDeviceMediaTypeDVDRWDualLayer@^{__CFString=}$kDRDeviceMediaTypeHDDVDR@^{__CFString=}$kDRDeviceMediaTypeHDDVDRAM@^{__CFString=}$kDRDeviceMediaTypeHDDVDRDualLayer@^{__CFString=}$kDRDeviceMediaTypeHDDVDROM@^{__CFString=}$kDRDeviceMediaTypeHDDVDRW@^{__CFString=}$kDRDeviceMediaTypeHDDVDRWDualLayer@^{__CFString=}$kDRDeviceMediaTypeKey@^{__CFString=}$kDRDeviceMediaTypeUnknown@^{__CFString=}$kDRDevicePhysicalInterconnectATAPI@^{__CFString=}$kDRDevicePhysicalInterconnectFibreChannel@^{__CFString=}$kDRDevicePhysicalInterconnectFireWire@^{__CFString=}$kDRDevicePhysicalInterconnectKey@^{__CFString=}$kDRDevicePhysicalInterconnectLocationExternal@^{__CFString=}$kDRDevicePhysicalInterconnectLocationInternal@^{__CFString=}$kDRDevicePhysicalInterconnectLocationKey@^{__CFString=}$kDRDevicePhysicalInterconnectLocationUnknown@^{__CFString=}$kDRDevicePhysicalInterconnectSCSI@^{__CFString=}$kDRDevicePhysicalInterconnectUSB@^{__CFString=}$kDRDeviceProductNameKey@^{__CFString=}$kDRDeviceStatusChangedNotification@^{__CFString=}$kDRDeviceSupportLevelAppleShipping@^{__CFString=}$kDRDeviceSupportLevelAppleSupported@^{__CFString=}$kDRDeviceSupportLevelKey@^{__CFString=}$kDRDeviceSupportLevelNone@^{__CFString=}$kDRDeviceSupportLevelUnsupported@^{__CFString=}$kDRDeviceSupportLevelVendorSupported@^{__CFString=}$kDRDeviceTrackInfoKey@^{__CFString=}$kDRDeviceTrackRefsKey@^{__CFString=}$kDRDeviceVendorNameKey@^{__CFString=}$kDRDeviceWriteBufferSizeKey@^{__CFString=}$kDRDeviceWriteCapabilitiesKey@^{__CFString=}$kDREffectiveDate@^{__CFString=}$kDREraseStatusChangedNotification@^{__CFString=}$kDREraseTypeComplete@^{__CFString=}$kDREraseTypeKey@^{__CFString=}$kDREraseTypeQuick@^{__CFString=}$kDRErrorStatusAdditionalSenseStringKey@^{__CFString=}$kDRErrorStatusErrorInfoStringKey@^{__CFString=}$kDRErrorStatusErrorKey@^{__CFString=}$kDRErrorStatusErrorStringKey@^{__CFString=}$kDRErrorStatusKey@^{__CFString=}$kDRErrorStatusSenseCodeStringKey@^{__CFString=}$kDRErrorStatusSenseKey@^{__CFString=}$kDRExpirationDate@^{__CFString=}$kDRFreeBlocksKey@^{__CFString=}$kDRHFSPlus@^{__CFString=}$kDRHFSPlusCatalogNodeID@^{__CFString=}$kDRHFSPlusTextEncodingHint@^{__CFString=}$kDRISO9660@^{__CFString=}$kDRISO9660LevelOne@^{__CFString=}$kDRISO9660LevelTwo@^{__CFString=}$kDRISO9660VersionNumber@^{__CFString=}$kDRISOLevel@^{__CFString=}$kDRISOMacExtensions@^{__CFString=}$kDRISORockRidgeExtensions@^{__CFString=}$kDRIndexPointsKey@^{__CFString=}$kDRInvisible@^{__CFString=}$kDRJoliet@^{__CFString=}$kDRMacExtendedFinderFlags@^{__CFString=}$kDRMacFileCreator@^{__CFString=}$kDRMacFileType@^{__CFString=}$kDRMacFinderFlags@^{__CFString=}$kDRMacFinderHideExtension@^{__CFString=}$kDRMacIconLocation@^{__CFString=}$kDRMacScrollPosition@^{__CFString=}$kDRMacWindowBounds@^{__CFString=}$kDRMacWindowView@^{__CFString=}$kDRMaxBurnSpeedKey@^{__CFString=}$kDRMediaCatalogNumberKey@^{__CFString=}$kDRNextWritableAddressKey@^{__CFString=}$kDRPosixFileMode@^{__CFString=}$kDRPosixGID@^{__CFString=}$kDRPosixUID@^{__CFString=}$kDRPreGapIsRequiredKey@^{__CFString=}$kDRPreGapLengthKey@^{__CFString=}$kDRPublisher@^{__CFString=}$kDRRecordingDate@^{__CFString=}$kDRSCMSCopyrightFree@^{__CFString=}$kDRSCMSCopyrightProtectedCopy@^{__CFString=}$kDRSCMSCopyrightProtectedOriginal@^{__CFString=}$kDRSerialCopyManagementStateKey@^{__CFString=}$kDRSessionFormatKey@^{__CFString=}$kDRSessionNumberKey@^{__CFString=}$kDRStatusCurrentSessionKey@^{__CFString=}$kDRStatusCurrentSpeedKey@^{__CFString=}$kDRStatusCurrentTrackKey@^{__CFString=}$kDRStatusEraseTypeKey@^{__CFString=}$kDRStatusPercentCompleteKey@^{__CFString=}$kDRStatusProgressCurrentKPS@^{__CFString=}$kDRStatusProgressCurrentXFactor@^{__CFString=}$kDRStatusProgressInfoKey@^{__CFString=}$kDRStatusStateDone@^{__CFString=}$kDRStatusStateErasing@^{__CFString=}$kDRStatusStateFailed@^{__CFString=}$kDRStatusStateFinishing@^{__CFString=}$kDRStatusStateKey@^{__CFString=}$kDRStatusStateNone@^{__CFString=}$kDRStatusStatePreparing@^{__CFString=}$kDRStatusStateSessionClose@^{__CFString=}$kDRStatusStateSessionOpen@^{__CFString=}$kDRStatusStateTrackClose@^{__CFString=}$kDRStatusStateTrackOpen@^{__CFString=}$kDRStatusStateTrackWrite@^{__CFString=}$kDRStatusStateVerifying@^{__CFString=}$kDRStatusTotalSessionsKey@^{__CFString=}$kDRStatusTotalTracksKey@^{__CFString=}$kDRSubchannelDataFormKey@^{__CFString=}$kDRSubchannelDataFormNone@^{__CFString=}$kDRSubchannelDataFormPack@^{__CFString=}$kDRSubchannelDataFormRaw@^{__CFString=}$kDRSuppressMacSpecificFiles@^{__CFString=}$kDRSynchronousBehaviorKey@^{__CFString=}$kDRSystemIdentifier@^{__CFString=}$kDRTrackISRCKey@^{__CFString=}$kDRTrackIsEmptyKey@^{__CFString=}$kDRTrackLengthKey@^{__CFString=}$kDRTrackModeKey@^{__CFString=}$kDRTrackNumberKey@^{__CFString=}$kDRTrackPacketSizeKey@^{__CFString=}$kDRTrackPacketTypeFixed@^{__CFString=}$kDRTrackPacketTypeKey@^{__CFString=}$kDRTrackPacketTypeVariable@^{__CFString=}$kDRTrackStartAddressKey@^{__CFString=}$kDRTrackTypeClosed@^{__CFString=}$kDRTrackTypeIncomplete@^{__CFString=}$kDRTrackTypeInvisible@^{__CFString=}$kDRTrackTypeKey@^{__CFString=}$kDRTrackTypeReserved@^{__CFString=}$kDRUDF@^{__CFString=}$kDRUDFApplicationIdentifierSuffix@^{__CFString=}$kDRUDFExtendedFilePermissions@^{__CFString=}$kDRUDFInterchangeLevel@^{__CFString=}$kDRUDFMaxInterchangeLevel@^{__CFString=}$kDRUDFMaxVolumeSequenceNumber@^{__CFString=}$kDRUDFPrimaryVolumeDescriptorNumber@^{__CFString=}$kDRUDFRealTimeFile@^{__CFString=}$kDRUDFVersion102@^{__CFString=}$kDRUDFVersion150@^{__CFString=}$kDRUDFVolumeSequenceNumber@^{__CFString=}$kDRUDFVolumeSetIdentifier@^{__CFString=}$kDRUDFVolumeSetImplementationUse@^{__CFString=}$kDRUDFVolumeSetTimestamp@^{__CFString=}$kDRUDFWriteVersion@^{__CFString=}$kDRVerificationTypeChecksum@^{__CFString=}$kDRVerificationTypeKey@^{__CFString=}$kDRVerificationTypeNone@^{__CFString=}$kDRVerificationTypeProduceAgain@^{__CFString=}$kDRVerificationTypeReceiveData@^{__CFString=}$kDRVolumeCheckedDate@^{__CFString=}$kDRVolumeCreationDate@^{__CFString=}$kDRVolumeEffectiveDate@^{__CFString=}$kDRVolumeExpirationDate@^{__CFString=}$kDRVolumeModificationDate@^{__CFString=}$kDRVolumeSet@^{__CFString=}$''' -constants = constants + '$kDRRefConCFTypeCallbacks@%s$'%(sel32or64('{DRRefConCallbacks=L^?^?}', '{DRRefConCallbacks=Q^?^?}'),) -enums = '''$DRCDTextEncodingASCII@1$DRCDTextEncodingISOLatin1Modified@5$DRCDTextGenreCodeAdultContemporary@2$DRCDTextGenreCodeAlternativeRock@3$DRCDTextGenreCodeChildrens@4$DRCDTextGenreCodeClassical@5$DRCDTextGenreCodeContemporaryChristian@6$DRCDTextGenreCodeCountry@7$DRCDTextGenreCodeDance@8$DRCDTextGenreCodeEasyListening@9$DRCDTextGenreCodeErotic@10$DRCDTextGenreCodeFolk@11$DRCDTextGenreCodeGospel@12$DRCDTextGenreCodeHipHop@13$DRCDTextGenreCodeJazz@14$DRCDTextGenreCodeLatin@15$DRCDTextGenreCodeMusical@16$DRCDTextGenreCodeNewAge@17$DRCDTextGenreCodeOpera@18$DRCDTextGenreCodeOperetta@19$DRCDTextGenreCodePop@20$DRCDTextGenreCodeRap@21$DRCDTextGenreCodeReggae@22$DRCDTextGenreCodeRhythmAndBlues@24$DRCDTextGenreCodeRock@23$DRCDTextGenreCodeSoundEffects@25$DRCDTextGenreCodeSoundtrack@26$DRCDTextGenreCodeSpokenWord@27$DRCDTextGenreCodeUnknown@1$DRCDTextGenreCodeWorldMusic@28$DRFileForkData@0$DRFileForkResource@1$DRFilesystemInclusionMaskHFSPlus@8$DRFilesystemInclusionMaskISO9660@1$DRFilesystemInclusionMaskJoliet@2$DRFilesystemInclusionMaskUDF@4$DRFlagSubchannelDataRequested@2$kDRAudioFileNotSupportedErr@2147614828$kDRBadLayoutErr@2147614821$kDRBlockSizeAudio@2352$kDRBlockSizeDVDData@2048$kDRBlockSizeMode1Data@2048$kDRBlockSizeMode2Data@2332$kDRBlockSizeMode2Form1Data@2048$kDRBlockSizeMode2Form2Data@2324$kDRBlockTypeAudio@0$kDRBlockTypeDVDData@8$kDRBlockTypeMode1Data@8$kDRBlockTypeMode2Data@13$kDRBlockTypeMode2Form1Data@10$kDRBlockTypeMode2Form2Data@12$kDRBurnMediaWriteFailureErr@2147614830$kDRBurnNotAllowedErr@2147614817$kDRBurnPowerCalibrationErr@2147614829$kDRBurnUnderrunErr@2147614816$kDRCDTextEncodingASCII@1536$kDRCDTextEncodingISOLatin1Modified@513$kDRCDTextGenreCodeAdultContemporary@2$kDRCDTextGenreCodeAlternativeRock@3$kDRCDTextGenreCodeChildrens@4$kDRCDTextGenreCodeClassical@5$kDRCDTextGenreCodeContemporaryChristian@6$kDRCDTextGenreCodeCountry@7$kDRCDTextGenreCodeDance@8$kDRCDTextGenreCodeEasyListening@9$kDRCDTextGenreCodeErotic@10$kDRCDTextGenreCodeFolk@11$kDRCDTextGenreCodeGospel@12$kDRCDTextGenreCodeHipHop@13$kDRCDTextGenreCodeJazz@14$kDRCDTextGenreCodeLatin@15$kDRCDTextGenreCodeMusical@16$kDRCDTextGenreCodeNewAge@17$kDRCDTextGenreCodeOpera@18$kDRCDTextGenreCodeOperetta@19$kDRCDTextGenreCodePop@20$kDRCDTextGenreCodeRap@21$kDRCDTextGenreCodeReggae@22$kDRCDTextGenreCodeRhythmAndBlues@24$kDRCDTextGenreCodeRock@23$kDRCDTextGenreCodeSoundEffects@25$kDRCDTextGenreCodeSoundtrack@26$kDRCDTextGenreCodeSpokenWord@27$kDRCDTextGenreCodeUnknown@1$kDRCDTextGenreCodeWorldMusic@28$kDRDataFormAudio@0$kDRDataFormDVDData@16$kDRDataFormMode1Data@16$kDRDataFormMode2Data@32$kDRDataFormMode2Form1Data@32$kDRDataFormMode2Form2Data@32$kDRDataProductionErr@2147614818$kDRDeviceAccessErr@2147614752$kDRDeviceBurnStrategyNotAvailableErr@2147615232$kDRDeviceBusyErr@2147614753$kDRDeviceCantWriteCDTextErr@2147615233$kDRDeviceCantWriteISRCErr@2147615235$kDRDeviceCantWriteIndexPointsErr@2147615234$kDRDeviceCantWriteSCMSErr@2147615236$kDRDeviceCommunicationErr@2147614754$kDRDeviceInvalidErr@2147614755$kDRDeviceNotReadyErr@2147614756$kDRDeviceNotSupportedErr@2147614757$kDRDevicePreGapLengthNotValidErr@2147615237$kDRDoubleLayerL0AlreadySpecifiedErr@2147614827$kDRDoubleLayerL0DataZoneBlocksParamErr@2147614826$kDRFileForkData@0$kDRFileForkResource@1$kDRFileForkSizeActual@0$kDRFileForkSizeEstimate@1$kDRFileLocationConflictErr@2147614977$kDRFileMessageForkSize@1718839674$kDRFileMessagePostBurn@1886352244$kDRFileMessagePreBurn@1886545184$kDRFileMessageProduceData@1886547812$kDRFileMessageRelease@1652122912$kDRFileMessageVerificationStarting@1987208825$kDRFileModifiedDuringBurnErr@2147614976$kDRFilesystemMaskDefault@4294967295$kDRFilesystemMaskHFSPlus@8$kDRFilesystemMaskISO9660@1$kDRFilesystemMaskJoliet@2$kDRFilesystemMaskUDF@4$kDRFilesystemsNotSupportedErr@2147614979$kDRFirstErr@2147614720$kDRFlagNoMoreData@1$kDRFlagSubchannelDataRequested@2$kDRFunctionNotSupportedErr@2147614823$kDRInternalErr@2147614720$kDRInvalidIndexPointsErr@2147614825$kDRLinkTypeFinderAlias@3$kDRLinkTypeHardLink@1$kDRLinkTypeSymbolicLink@2$kDRMediaBusyErr@2147614784$kDRMediaInvalidErr@2147614790$kDRMediaNotBlankErr@2147614788$kDRMediaNotErasableErr@2147614789$kDRMediaNotPresentErr@2147614785$kDRMediaNotSupportedErr@2147614787$kDRMediaNotWritableErr@2147614786$kDRSessionFormatAudio@0$kDRSessionFormatCDI@16$kDRSessionFormatCDXA@32$kDRSessionFormatDVDData@0$kDRSessionFormatMode1Data@0$kDRSpeedTestAlreadyRunningErr@2147614824$kDRTooManyNameConflictsErr@2147614978$kDRTooManyTracksForDVDErr@2147614820$kDRTrackMessageEstimateLength@1702065257$kDRTrackMessagePostBurn@1886352244$kDRTrackMessagePreBurn@1886545184$kDRTrackMessageProduceData@1886547812$kDRTrackMessageProducePreGap@1886548082$kDRTrackMessageVerificationDone@1986293614$kDRTrackMessageVerificationStarting@1987277938$kDRTrackMessageVerifyData@1987208825$kDRTrackMessageVerifyPreGap@1987211378$kDRTrackMode1Data@4$kDRTrackMode2Data@4$kDRTrackMode2Form1Data@4$kDRTrackMode2Form2Data@4$kDRTrackModeAudio@0$kDRTrackModeDVDData@5$kDRTrackReusedErr@2147614831$kDRUserCanceledErr@2147614822$kDRVerificationFailedErr@2147614819$''' -misc.update({}) -functions={'DREraseCreate': (b'^{__DRErase=}^{__DRDevice=}', '', {'retval': {'already_cfretained': True}}), 'DRDeviceCopyStatus': (b'^{__CFDictionary=}^{__DRDevice=}', '', {'retval': {'already_cfretained': True}}), 'DRFilesystemTrackCreate': (b'^{__DRTrack=}^{__DRFolder=}', '', {'retval': {'already_cfretained': True}}), 'DRNotificationCenterCreateRunLoopSource': (b'^{__CFRunLoopSource=}^{__DRNotificationCenter=}', '', {'retval': {'already_cfretained': True}}), 'DRCDTextBlockGetValue': (sel32or64(b'@^{__DRCDTextBlock=}l^{__CFString=}', b'@^{__DRCDTextBlock=}q^{__CFString=}'),), 'DRDeviceAcquireExclusiveAccess': (sel32or64(b'l^{__DRDevice=}', b'i^{__DRDevice=}'),), 'DRNotificationCenterCreate': (b'^{__DRNotificationCenter=}', '', {'retval': {'already_cfretained': True}}), 'DRBurnGetDevice': (b'^{__DRDevice=}^{__DRBurn=}',), 'DREraseCopyStatus': (b'^{__CFDictionary=}^{__DRErase=}', '', {'retval': {'already_cfretained': True}}), 'DRFilesystemTrackEstimateOverhead': (sel32or64(b'QQLL', b'QQII'),), 'DRFSObjectSetBaseName': (b'v@^{__CFString=}',), 'DRDeviceGetTypeID': (sel32or64(b'L', b'Q'),), 'DRBurnGetProperties': (b'^{__CFDictionary=}^{__DRBurn=}',), 'DREraseGetDevice': (b'^{__DRDevice=}^{__DRErase=}',), 'DRCopyLocalizedStringForAdditionalSense': (b'^{__CFString=}CC', '', {'retval': {'already_cfretained': True}}), 'DRFSObjectIsVirtual': (b'Z@',), 'DRNotificationCenterAddObserver': (b'v^{__DRNotificationCenter=}^v^?^{__CFString=}@', '', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__DRNotificationCenter=}'}, 1: {'type': b'^v'}, 2: {'type': b'^{__CFString=}'}, 3: {'type': b'@'}, 4: {'type': b'^{__CFDictionary=}'}}}}}}), 'DRFolderCopyChildren': (b'^{__CFArray=}^{__DRFolder=}', '', {'retval': {'already_cfretained': True}}), 'DRDeviceIsValid': (b'Z^{__DRDevice=}',), 'DRCopyLocalizedStringForValue': (b'^{__CFString=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'DRNotificationCenterRemoveObserver': (b'v^{__DRNotificationCenter=}^v^{__CFString=}@',), 'DRBurnSetProperties': (b'v^{__DRBurn=}^{__CFDictionary=}',), 'DRBurnGetTypeID': (sel32or64(b'L', b'Q'),), 'DRFSObjectSetSpecificName': (b'v@^{__CFString=}^{__CFString=}',), 'DRFolderCreateReal': (b'^{__DRFolder=}^{FSRef=[80C]}', '', {'retval': {'already_cfretained': True}, 'arguments': {0: {'type_modifier': 'n'}}}), 'DRTrackSpeedTest': (sel32or64(b'f^{__DRTrack=}LL', b'f^{__DRTrack=}II'),), 'DRCDTextBlockCreateArrayFromPackList': (b'^{__CFArray=}^{__CFData=}', '', {'retval': {'already_cfretained': True}}), 'DRCDTextBlockSetTrackDictionaries': (b'v^{__DRCDTextBlock=}^{__CFArray=}',), 'DRDeviceReleaseMediaReservation': (b'v^{__DRDevice=}',), 'DRCDTextBlockGetProperties': (b'^{__CFDictionary=}^{__DRCDTextBlock=}',), 'DRDeviceCloseTray': (sel32or64(b'l^{__DRDevice=}', b'i^{__DRDevice=}'),), 'DRBurnCopyStatus': (b'^{__CFDictionary=}^{__DRBurn=}', '', {'retval': {'already_cfretained': True}}), 'DRDeviceKPSForXFactor': (b'f@f',), 'DRFSObjectGetFilesystemMask': (sel32or64(b'L@^L^L', b'I@^I^I'), '', {'arguments': {1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}}}), 'DRFolderCreateVirtual': (b'^{__DRFolder=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'DREraseGetTypeID': (sel32or64(b'L', b'Q'),), 'DRDeviceOpenTray': (sel32or64(b'l^{__DRDevice=}', b'i^{__DRDevice=}'),), 'DRFSObjectSetFilesystemProperties': (b'v@^{__CFString=}^{__CFDictionary=}',), 'DRFSObjectCopyBaseName': (b'^{__CFString=}@', '', {'retval': {'already_cfretained': True}}), 'DRFileCreateRealWithURL': (b'^{__DRFile=}^{__CFURL=}', '', {'retval': {'already_cfretained': True}}), 'DRDeviceXFactorForKPS': (b'f@f',), 'DRFSObjectCopySpecificNames': (b'^{__CFDictionary=}@', '', {'retval': {'already_cfretained': True}}), 'DRFolderConvertRealToVirtual': (b'v^{__DRFolder=}',), 'DRCopyDeviceArray': (b'^{__CFArray=}', '', {'retval': {'already_cfretained': True}}), 'DRCDTextBlockGetTrackDictionaries': (b'^{__CFArray=}^{__DRCDTextBlock=}',), 'DRFSObjectSetFilesystemProperty': (b'v@^{__CFString=}^{__CFString=}@',), 'DRFileCreateVirtualWithData': (sel32or64(b'^{__DRFile=}^{__CFString=}^vL', b'^{__DRFile=}^{__CFString=}^vI'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}}}), 'DREraseGetProperties': (b'^{__CFDictionary=}^{__DRErase=}',), 'DRDeviceCopyDeviceForIORegistryEntryPath': (b'^{__DRDevice=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'DRFSObjectCopyFilesystemProperty': (b'@@^{__CFString=}^{__CFString=}Z', '', {'retval': {'already_cfretained': True}}), 'DRFileCreateReal': (b'^{__DRFile=}^{FSRef=[80C]}', '', {'retval': {'already_cfretained': True}, 'arguments': {0: {'type_modifier': 'n'}}}), 'DRCopyLocalizedStringForSenseCode': (b'^{__CFString=}C', '', {'retval': {'already_cfretained': True}}), 'DRCDTextBlockFlatten': (sel32or64(b'L^{__DRCDTextBlock=}', b'I^{__DRCDTextBlock=}'),), 'DRSetRefCon': (sel32or64(b'v@^v^{DRRefConCallbacks=L^?^?}', b'v@^v^{DRRefConCallbacks=Q^?^?}'),), 'DRFSObjectCopyRealURL': (b'^{__CFURL=}@', '', {'retval': {'already_cfretained': True}}), 'DRFileCreateVirtualLink': (sel32or64(b'^{__DRFile=}@L^{__CFString=}', b'^{__DRFile=}@I^{__CFString=}'), '', {'retval': {'already_cfretained': True}}), 'DRFSObjectGetRealFSRef': (b'v@^{FSRef=[80C]}', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'DRDeviceEjectMedia': (sel32or64(b'l^{__DRDevice=}', b'i^{__DRDevice=}'),), 'DRAudioTrackCreateWithURL': (b'^{__DRTrack=}^{__CFURL=}', '', {'retval': {'already_cfretained': True}}), 'DRCopyLocalizedStringForDiscRecordingError': (sel32or64(b'^{__CFString=}l', b'^{__CFString=}i'), '', {'retval': {'already_cfretained': True}}), 'DRFSObjectCopyFilesystemProperties': (b'^{__CFDictionary=}@^{__CFString=}Z', '', {'retval': {'already_cfretained': True}}), 'DRTrackGetProperties': (b'^{__CFDictionary=}^{__DRTrack=}',), 'DRFolderAddChild': (b'v^{__DRFolder=}@',), 'DRNotificationCenterGetTypeID': (sel32or64(b'L', b'Q'),), 'DRFolderCountChildren': (sel32or64(b'L^{__DRFolder=}', b'I^{__DRFolder=}'),), 'DRFileCreateVirtualWithCallback': (b'^{__DRFile=}^{__CFString=}^?^v', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'^{__DRFile=}'}, 2: {'type': b'I'}, 3: {'type': b'^v'}}}}}}), 'DRBurnCreate': (b'^{__DRBurn=}^{__DRDevice=}', '', {'retval': {'already_cfretained': True}}), 'DRDeviceReleaseExclusiveAccess': (b'v^{__DRDevice=}',), 'DRGetRefCon': (b'^v@',), 'DRGetVersion': (b'{NumVersion=CCCC}',), 'DRFSObjectSetSpecificNames': (b'v@^{__CFDictionary=}',), 'DRDeviceAcquireMediaReservation': (b'v^{__DRDevice=}',), 'DRTrackSetProperties': (b'v^{__DRTrack=}^{__CFDictionary=}',), 'DRFSObjectCopySpecificName': (b'^{__CFString=}@^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'DRCDTextBlockSetValue': (sel32or64(b'v^{__DRCDTextBlock=}l^{__CFString=}@', b'v^{__DRCDTextBlock=}q^{__CFString=}@'),), 'DREraseSetProperties': (b'v^{__DRErase=}^{__CFDictionary=}',), 'DREraseStart': (sel32or64(b'l^{__DRErase=}', b'i^{__DRErase=}'),), 'DRFolderRemoveChild': (b'v^{__DRFolder=}@',), 'DRFolderGetTypeID': (sel32or64(b'L', b'Q'),), 'DRBurnWriteLayout': (sel32or64(b'l^{__DRBurn=}@', b'i^{__DRBurn=}@'),), 'DRTrackEstimateLength': (b'Q^{__DRTrack=}',), 'DRBurnAbort': (b'v^{__DRBurn=}',), 'DRFSObjectGetParent': (b'^{__DRFolder=}@',), 'DRFSObjectSetFilesystemMask': (sel32or64(b'v@L', b'v@I'),), 'DRFileGetTypeID': (sel32or64(b'L', b'Q'),), 'DRCDTextBlockSetProperties': (b'v^{__DRCDTextBlock=}^{__CFDictionary=}',), 'DRDeviceCopyDeviceForBSDName': (b'^{__DRDevice=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'DRFSObjectCopyMangledNames': (b'^{__CFDictionary=}@', '', {'retval': {'already_cfretained': True}}), 'DRCDTextBlockGetTypeID': (sel32or64(b'L', b'Q'),), 'DRAudioTrackCreate': (b'^{__DRTrack=}^{FSRef=[80C]}', '', {'retval': {'already_cfretained': True}, 'arguments': {0: {'type_modifier': 'n'}}}), 'DRTrackCreate': (b'^{__DRTrack=}^{__CFDictionary=}^?', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': b'^{__DRTrack=}'}, 1: {'type': b'I'}, 2: {'type': b'^v'}}}}}}), 'DRFolderCreateRealWithURL': (b'^{__DRFolder=}^{__CFURL=}', '', {'retval': {'already_cfretained': True}}), 'DRCDTextBlockCreate': (sel32or64(b'^{__DRCDTextBlock=}^{__CFString=}L', b'^{__DRCDTextBlock=}^{__CFString=}I'), '', {'retval': {'already_cfretained': True}}), 'DRFSObjectCopyMangledName': (b'^{__CFString=}@^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'DRDeviceCopyInfo': (b'^{__CFDictionary=}^{__DRDevice=}', '', {'retval': {'already_cfretained': True}}), 'DRTrackGetTypeID': (sel32or64(b'L', b'Q'),)} -aliases = {'DRAudioTrackRef': 'DRTrackRef', 'DRCDTextEncodingASCII': 'NSASCIIStringEncoding', 'kDRCDTextEncodingASCII': 'kCFStringEncodingASCII', 'DRCDTextEncodingISOLatin1Modified': 'NSISOLatin1StringEncoding', 'kDRInternalErr': 'kDRFirstErr', 'kDRCDTextEncodingISOLatin1Modified': 'kCFStringEncodingISOLatin1', 'DRFilesystemTrackRef': 'DRTrackRef'} -cftypes=[('DRBurnRef', b'^{__DRBurn=}', None, None), ('DRCDTextBlockRef', b'^{__DRCDTextBlock=}', None, None), ('DRDeviceRef', b'^{__DRDevice=}', None, None), ('DREraseRef', b'^{__DRErase=}', None, None), ('DRFileRef', b'^{__DRFile=}', None, None), ('DRFolderRef', b'^{__DRFolder=}', None, None), ('DRNotificationCenterRef', b'^{__DRNotificationCenter=}', None, None), ('DRTrackRef', b'^{__DRTrack=}', None, None)] -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'DRBurn', b'appendable', {'retval': {'type': b'Z'}}) - r(b'DRBurn', b'setAppendable:', {'arguments': {2: {'type': b'Z'}}}) - r(b'DRBurn', b'setVerifyDisc:', {'arguments': {2: {'type': b'Z'}}}) - r(b'DRBurn', b'verifyDisc', {'retval': {'type': b'Z'}}) - r(b'DRDevice', b'acquireExclusiveAccess', {'retval': {'type': b'Z'}}) - r(b'DRDevice', b'closeTray', {'retval': {'type': b'Z'}}) - r(b'DRDevice', b'ejectMedia', {'retval': {'type': b'Z'}}) - r(b'DRDevice', b'isEqualToDevice:', {'retval': {'type': b'Z'}}) - r(b'DRDevice', b'isValid', {'retval': {'type': b'Z'}}) - r(b'DRDevice', b'mediaIsAppendable', {'retval': {'type': b'Z'}}) - r(b'DRDevice', b'mediaIsBlank', {'retval': {'type': b'Z'}}) - r(b'DRDevice', b'mediaIsBusy', {'retval': {'type': b'Z'}}) - r(b'DRDevice', b'mediaIsErasable', {'retval': {'type': b'Z'}}) - r(b'DRDevice', b'mediaIsOverwritable', {'retval': {'type': b'Z'}}) - r(b'DRDevice', b'mediaIsPresent', {'retval': {'type': b'Z'}}) - r(b'DRDevice', b'mediaIsReserved', {'retval': {'type': b'Z'}}) - r(b'DRDevice', b'mediaIsTransitioning', {'retval': {'type': b'Z'}}) - r(b'DRDevice', b'openTray', {'retval': {'type': b'Z'}}) - r(b'DRDevice', b'trayIsOpen', {'retval': {'type': b'Z'}}) - r(b'DRDevice', b'writesCD', {'retval': {'type': b'Z'}}) - r(b'DRDevice', b'writesDVD', {'retval': {'type': b'Z'}}) - r(b'DRFSObject', b'isVirtual', {'retval': {'type': b'Z'}}) - r(b'DRFSObject', b'propertiesForFilesystem:mergeWithOtherFilesystems:', {'arguments': {3: {'type': b'Z'}}}) - r(b'DRFSObject', b'propertyForKey:inFilesystem:mergeWithOtherFilesystems:', {'arguments': {4: {'type': b'Z'}}}) - r(b'DRMSF', b'isEqualToMSF:', {'retval': {'type': b'Z'}}) - r(b'NSObject', b'calculateSizeOfFile:fork:estimating:', {'required': True, 'retval': {'type': b'Q'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'L', b'I')}, 4: {'type': b'Z'}}}) - r(b'NSObject', b'cleanupFileAfterBurn:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'cleanupTrackAfterBurn:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'cleanupTrackAfterVerification:', {'required': True, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'estimateLengthOfTrack:', {'required': True, 'retval': {'type': b'Q'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'prepareFileForBurn:', {'required': True, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'prepareFileForVerification:', {'required': True, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'prepareTrack:forBurn:toMedia:', {'required': True, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'prepareTrackForVerification:', {'required': True, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'produceDataForTrack:intoBuffer:length:atAddress:blockSize:ioFlags:', {'required': True, 'retval': {'type': b'I'}, 'arguments': {2: {'type': b'@'}, 3: {'type': '^v', 'type_modifier': b'o', 'c_array_length_in_arg': 4}, 4: {'type': b'I'}, 5: {'type': b'Q'}, 6: {'type': b'I'}, 7: {'type': b'^I', 'type_modifier': b'N'}}}) - r(b'NSObject', b'produceFile:fork:intoBuffer:length:atAddress:blockSize:', {'required': True, 'retval': {'type': b'I'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'L', b'I')}, 4: {'type': '^v', 'type_modifier': b'o', 'c_array_length_in_arg': 5}, 5: {'type': b'I'}, 6: {'type': b'Q'}, 7: {'type': b'I'}}}) - r(b'NSObject', b'producePreGapForTrack:intoBuffer:length:atAddress:blockSize:ioFlags:', {'required': True, 'retval': {'type': b'I'}, 'arguments': {2: {'type': b'@'}, 3: {'type': '^v', 'type_modifier': b'o', 'c_array_length_in_arg': 4}, 4: {'type': b'I'}, 5: {'type': b'Q'}, 6: {'type': b'I'}, 7: {'type': b'^I', 'type_modifier': b'N'}}}) - r(b'NSObject', b'verifyDataForTrack:inBuffer:length:atAddress:blockSize:ioFlags:', {'required': True, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': '^v', 'type_modifier': b'n', 'c_array_length_in_arg': 4}, 4: {'type': b'I'}, 5: {'type': b'Q'}, 6: {'type': b'I'}, 7: {'type': b'^I', 'type_modifier': b'N'}}}) - r(b'NSObject', b'verifyPreGapForTrack:inBuffer:length:atAddress:blockSize:ioFlags:', {'required': True, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': '^v', 'type_modifier': b'n', 'c_array_length_in_arg': 4}, 4: {'type': b'I'}, 5: {'type': b'Q'}, 6: {'type': b'I'}, 7: {'type': b'^I', 'type_modifier': b'N'}}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/DiscRecording/_metadata.pyc b/env/lib/python2.7/site-packages/DiscRecording/_metadata.pyc deleted file mode 100644 index 14ecb931..00000000 Binary files a/env/lib/python2.7/site-packages/DiscRecording/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/DiscRecordingUI/__init__.py b/env/lib/python2.7/site-packages/DiscRecordingUI/__init__.py deleted file mode 100644 index 5ae0d37d..00000000 --- a/env/lib/python2.7/site-packages/DiscRecordingUI/__init__.py +++ /dev/null @@ -1,28 +0,0 @@ -''' -Python mapping for the DiscRecordingUI framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Foundation -import DiscRecording - -from DiscRecordingUI import _metadata - - -sys.modules['DiscRecordingUI'] = mod = objc.ObjCLazyModule( - "DiscRecordingUI", - "com.apple.DiscRecordingUI", - objc.pathForFramework("/System/Library/Frameworks/DiscRecordingUI.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (DiscRecording, Foundation,)) - -import sys -del sys.modules['DiscRecordingUI._metadata'] diff --git a/env/lib/python2.7/site-packages/DiscRecordingUI/__init__.pyc b/env/lib/python2.7/site-packages/DiscRecordingUI/__init__.pyc deleted file mode 100644 index dd60e4f3..00000000 Binary files a/env/lib/python2.7/site-packages/DiscRecordingUI/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/DiscRecordingUI/_metadata.py b/env/lib/python2.7/site-packages/DiscRecordingUI/_metadata.py deleted file mode 100644 index 1aa30dfd..00000000 --- a/env/lib/python2.7/site-packages/DiscRecordingUI/_metadata.py +++ /dev/null @@ -1,48 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Sun Jul 1 17:39:54 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -misc.update({'DRBurnSessionProgressDialogOptions': objc.createStructType('DRBurnSessionProgressDialogOptions', sel32or64(b'{DRBurnSessionProgressDialogOptions=LL^{__CFString=}}', b'{DRBurnSessionProgressDialogOptions=II^{__CFString=}}'), ['version', 'dialogOptionFlags', 'description']), 'DREraseSessionSetupDialogOptions': objc.createStructType('DREraseSessionSetupDialogOptions', sel32or64(b'{DREraseSessionSetupDialogOptions=LL}', b'{DREraseSessionSetupDialogOptions=II}'), ['version', 'dialogOptionFlags']), 'DREraseSessionProgressDialogOptions': objc.createStructType('DREraseSessionProgressDialogOptions', sel32or64(b'{DREraseSessionProgressDialogOptions=LL^{__CFString=}}', b'{DREraseSessionProgressDialogOptions=II^{__CFString=}}'), ['version', 'dialogOptionFlags', 'description']), 'DRBurnSessionSetupDialogOptions': objc.createStructType('DRBurnSessionSetupDialogOptions', sel32or64(b'{DRBurnSessionSetupDialogOptions=LL^{__CFString=}}', b'{DRBurnSessionSetupDialogOptions=II^{__CFString=}}'), ['version', 'dialogOptionFlags', 'defaultButtonTitle'])}) -constants = '''$DRBurnIcon$DRBurnProgressPanelDidFinishNotification$DRBurnProgressPanelWillBeginNotification$DRBurnSetupPanelDefaultButtonDefaultTitle$DREraseIcon$DREraseProgressPanelDidFinishNotification$DREraseProgressPanelWillBeginNotification$DRSetupPanelDeviceSelectionChangedNotification$DRSetupPanelSelectedDeviceKey$''' -enums = '''$kBurnSessionProgressDialogDefaultOptions@0$kBurnSessionProgressDialogDisplayVerboseProgress@1$kBurnSessionProgressDialogOptionsCurrentVersion@1$kBurnSessionSetupDialogAllowTestBurns@2147483652$kBurnSessionSetupDialogDefaultOptions@0$kBurnSessionSetupDialogDontHandleReservations@2$kBurnSessionSetupDialogForceClosedDiscs@1$kBurnSessionSetupDialogOptionsCurrentVersion@1$kDRBurnProgressSetupCallbacksCurrentVersion@1$kDRBurnSessionCancel@0$kDRBurnSessionOK@1$kDRBurnSessionSetupCallbacksCurrentVersion@1$kDREraseProgressSetupCallbacksCurrentVersion@1$kDREraseSessionCancel@0$kDREraseSessionOK@1$kDREraseSessionSetupCallbacksCurrentVersion@1$kEraseSessionProgressDialogDefaultOptions@0$kEraseSessionProgressDialogOptionsCurrentVersion@1$kEraseSessionSetupDialogDefaultOptions@0$kEraseSessionSetupDialogDontHandleReservations@1$kEraseSessionSetupDialogOptionsCurrentVersion@1$''' -misc.update({}) -functions={'DRBurnSessionSetBurn': (b'v^{__DRBurnSession=}^{__DRBurn=}',), 'DRBurnSessionCreate': (b'^{__DRBurnSession=}', '', {'retval': {'already_cfretained': True}}), 'DREraseSessionSetupDialog': (sel32or64(b'c^{__DREraseSession=}^{DREraseSessionSetupDialogOptions=LL}^{DREraseSessionSetupCallbacks=L^?^?^?}', b'c^{__DREraseSession=}^{DREraseSessionSetupDialogOptions=II}^{DREraseSessionSetupCallbacks=I^?^?^?}'), '', {'arguments': {1: {'type_modifier': 'n'}, 2: {'type_modifier': 'n'}}}), 'DRBurnSessionGetTypeID': (sel32or64(b'L', b'Q'),), 'DREraseSessionCreate': (b'^{__DREraseSession=}', '', {'retval': {'already_cfretained': True}}), 'DREraseSessionGetTypeID': (sel32or64(b'L', b'Q'),), 'DRBurnSessionSetupDialog': (sel32or64(b'c^{__DRBurnSession=}^{DRBurnSessionSetupDialogOptions=LL^{__CFString=}}^{DRBurnSessionSetupCallbacks=L^?^?^?}', b'c^{__DRBurnSession=}^{DRBurnSessionSetupDialogOptions=II^{__CFString=}}^{DRBurnSessionSetupCallbacks=I^?^?^?}'), '', {'arguments': {1: {'type_modifier': 'n'}, 2: {'type_modifier': 'n'}}}), 'DRBurnSessionGetBurn': (b'^{__DRBurn=}^{__DRBurnSession=}',), 'DREraseSessionGetErase': (b'^{__DRErase=}^{__DREraseSession=}',), 'DREraseSessionBeginProgressDialog': (sel32or64(b'v^{__DREraseSession=}^{DREraseSessionProgressDialogOptions=LL^{__CFString=}}^{DREraseSessionProgressCallbacks=L^?^?^?}', b'v^{__DREraseSession=}^{DREraseSessionProgressDialogOptions=II^{__CFString=}}^{DREraseSessionProgressCallbacks=I^?^?^?}'), '', {'arguments': {1: {'type_modifier': 'n'}, 2: {'type_modifier': 'n'}}}), 'DREraseSessionSetErase': (b'v^{__DREraseSession=}^{__DRErase=}',), 'DRBurnSessionBeginProgressDialog': (sel32or64(b'v^{__DRBurnSession=}@^{DRBurnSessionProgressDialogOptions=LL^{__CFString=}}^{DRBurnSessionProgressCallbacks=L^?^?^?}', b'v^{__DRBurnSession=}@^{DRBurnSessionProgressDialogOptions=II^{__CFString=}}^{DRBurnSessionProgressCallbacks=I^?^?^?}'), '', {'arguments': {2: {'type_modifier': 'n'}, 3: {'type_modifier': 'n'}}})} -cftypes=[('DRBurnSessionRef', b'^{__DRBurnSession=}', None, None), ('DREraseSessionRef', b'^{__DREraseSession=}', None, None)] -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'DRBurnProgressPanel', b'setVerboseProgressStatus:', {'arguments': {2: {'type': b'Z'}}}) - r(b'DRBurnProgressPanel', b'verboseProgressStatus', {'retval': {'type': b'Z'}}) - r(b'DRBurnSetupPanel', b'setCanSelectAppendableMedia:', {'arguments': {2: {'type': b'Z'}}}) - r(b'DRBurnSetupPanel', b'setCanSelectTestBurn:', {'arguments': {2: {'type': b'Z'}}}) - r(b'DRSetupPanel', b'mediaStateChanged:', {'retval': {'type': b'Z'}}) - r(b'NSObject', b'burnProgressPanel:burnDidFinish:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'burnProgressPanelDidFinish:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'burnProgressPanelWillBegin:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'eraseProgressPanel:eraseDidFinish:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'eraseProgressPanelDidFinish:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'eraseProgressPanelWillBegin:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setupPanel:determineBestDeviceOfA:orB:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'setupPanel:deviceContainsSuitableMedia:promptString:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'^@'}}}) - r(b'NSObject', b'setupPanel:deviceCouldBeTarget:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'setupPanelDeviceSelectionChanged:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setupPanelShouldHandleMediaReservations:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}}}) -finally: - objc._updatingMetadata(False) -protocols={'DRBurnProgressPanelDelegateMethods': objc.informal_protocol('DRBurnProgressPanelDelegateMethods', [objc.selector(None, b'burnProgressPanelDidFinish:', b'v@:@', isRequired=False), objc.selector(None, b'burnProgressPanelWillBegin:', b'v@:@', isRequired=False), objc.selector(None, b'burnProgressPanel:burnDidFinish:', b'Z@:@@', isRequired=False)]), 'DRSetupPanelDelegate': objc.informal_protocol('DRSetupPanelDelegate', [objc.selector(None, b'setupPanel:deviceContainsSuitableMedia:promptString:', b'Z@:@@^@', isRequired=False), objc.selector(None, b'setupPanel:deviceCouldBeTarget:', b'Z@:@@', isRequired=False), objc.selector(None, b'setupPanelShouldHandleMediaReservations:', b'Z@:@', isRequired=False), objc.selector(None, b'setupPanelDeviceSelectionChanged:', b'v@:@', isRequired=False), objc.selector(None, b'setupPanel:determineBestDeviceOfA:orB:', b'@@:@@@', isRequired=False)]), 'DREraseProgressPanelDelegateMethods': objc.informal_protocol('DREraseProgressPanelDelegateMethods', [objc.selector(None, b'eraseProgressPanelWillBegin:', b'v@:@', isRequired=False), objc.selector(None, b'eraseProgressPanel:eraseDidFinish:', b'Z@:@@', isRequired=False), objc.selector(None, b'eraseProgressPanelDidFinish:', b'v@:@', isRequired=False)])} -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/DiscRecordingUI/_metadata.pyc b/env/lib/python2.7/site-packages/DiscRecordingUI/_metadata.pyc deleted file mode 100644 index b00d05d4..00000000 Binary files a/env/lib/python2.7/site-packages/DiscRecordingUI/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/DiskArbitration/__init__.py b/env/lib/python2.7/site-packages/DiskArbitration/__init__.py deleted file mode 100644 index 4b50a271..00000000 --- a/env/lib/python2.7/site-packages/DiskArbitration/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -''' -Python mapping for the DiskArbitration framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import CoreFoundation - -from DiskArbitration import _metadata - -sys.modules['DiskArbitration'] = mod = objc.ObjCLazyModule( - "DiskArbitration", - "com.apple.DiskArbitration", - objc.pathForFramework("/System/Library/Frameworks/DiskArbitration.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (CoreFoundation,)) - -import sys -del sys.modules['DiskArbitration._metadata'] diff --git a/env/lib/python2.7/site-packages/DiskArbitration/__init__.pyc b/env/lib/python2.7/site-packages/DiskArbitration/__init__.pyc deleted file mode 100644 index 5ba2e155..00000000 Binary files a/env/lib/python2.7/site-packages/DiskArbitration/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/DiskArbitration/_metadata.py b/env/lib/python2.7/site-packages/DiskArbitration/_metadata.py deleted file mode 100644 index cd1a5dda..00000000 --- a/env/lib/python2.7/site-packages/DiskArbitration/_metadata.py +++ /dev/null @@ -1,25 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Sun Feb 10 11:03:03 2019 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$kDADiskDescriptionBusNameKey@^{__CFString=}$kDADiskDescriptionBusPathKey@^{__CFString=}$kDADiskDescriptionDeviceGUIDKey@^{__CFString=}$kDADiskDescriptionDeviceInternalKey@^{__CFString=}$kDADiskDescriptionDeviceModelKey@^{__CFString=}$kDADiskDescriptionDevicePathKey@^{__CFString=}$kDADiskDescriptionDeviceProtocolKey@^{__CFString=}$kDADiskDescriptionDeviceRevisionKey@^{__CFString=}$kDADiskDescriptionDeviceTDMLockedKey$kDADiskDescriptionDeviceUnitKey@^{__CFString=}$kDADiskDescriptionDeviceVendorKey@^{__CFString=}$kDADiskDescriptionMatchMediaUnformatted@^{__CFDictionary=}$kDADiskDescriptionMatchMediaWhole@^{__CFDictionary=}$kDADiskDescriptionMatchVolumeMountable@^{__CFDictionary=}$kDADiskDescriptionMatchVolumeUnrecognized@^{__CFDictionary=}$kDADiskDescriptionMediaBSDMajorKey@^{__CFString=}$kDADiskDescriptionMediaBSDMinorKey@^{__CFString=}$kDADiskDescriptionMediaBSDNameKey@^{__CFString=}$kDADiskDescriptionMediaBSDUnitKey@^{__CFString=}$kDADiskDescriptionMediaBlockSizeKey@^{__CFString=}$kDADiskDescriptionMediaContentKey@^{__CFString=}$kDADiskDescriptionMediaEjectableKey@^{__CFString=}$kDADiskDescriptionMediaEncryptedKey$kDADiskDescriptionMediaEncryptionDetailKey$kDADiskDescriptionMediaIconKey@^{__CFString=}$kDADiskDescriptionMediaKindKey@^{__CFString=}$kDADiskDescriptionMediaLeafKey@^{__CFString=}$kDADiskDescriptionMediaNameKey@^{__CFString=}$kDADiskDescriptionMediaPathKey@^{__CFString=}$kDADiskDescriptionMediaRemovableKey@^{__CFString=}$kDADiskDescriptionMediaSizeKey@^{__CFString=}$kDADiskDescriptionMediaTypeKey@^{__CFString=}$kDADiskDescriptionMediaUUIDKey@^{__CFString=}$kDADiskDescriptionMediaWholeKey@^{__CFString=}$kDADiskDescriptionMediaWritableKey@^{__CFString=}$kDADiskDescriptionVolumeKindKey@^{__CFString=}$kDADiskDescriptionVolumeMountableKey@^{__CFString=}$kDADiskDescriptionVolumeNameKey@^{__CFString=}$kDADiskDescriptionVolumeNetworkKey@^{__CFString=}$kDADiskDescriptionVolumePathKey@^{__CFString=}$kDADiskDescriptionVolumeUUIDKey@^{__CFString=}$kDADiskDescriptionWatchVolumeName@^{__CFArray=}$kDADiskDescriptionWatchVolumePath@^{__CFArray=}$''' -enums = '''$err_local_diskarbitration@14286848$kDADiskClaimOptionDefault@0$kDADiskEjectOptionDefault@0$kDADiskMountOptionDefault@0$kDADiskMountOptionWhole@1$kDADiskOptionDefault@0$kDADiskOptionEjectUponLogout@1$kDADiskOptionMountAutomatic@16$kDADiskOptionMountAutomaticNoDefer@32$kDADiskOptionPrivate@256$kDADiskRenameOptionDefault@0$kDADiskUnmountOptionDefault@0$kDADiskUnmountOptionForce@524288$kDADiskUnmountOptionWhole@1$kDAReturnBadArgument@4175036419$kDAReturnBusy@4175036418$kDAReturnError@4175036417$kDAReturnExclusiveAccess@4175036420$kDAReturnNoResources@4175036421$kDAReturnNotFound@4175036422$kDAReturnNotMounted@4175036423$kDAReturnNotPermitted@4175036424$kDAReturnNotPrivileged@4175036425$kDAReturnNotReady@4175036426$kDAReturnNotWritable@4175036427$kDAReturnSuccess@0$kDAReturnUnsupported@4175036428$''' -misc.update({}) -functions={'DASessionGetTypeID': (sel32or64(b'L', b'Q'),), 'DADiskClaim': (sel32or64(b'v^{__DADisk=}L^?^v^?^v', b'v^{__DADisk=}I^?^v^?^v'), '', {'arguments': {2: {'callable': {'retval': {'type': b'^{__DADissenter=}'}, 'arguments': {0: {'type': b'^{__DADisk=}'}, 1: {'type': b'^v'}}}}, 4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__DADisk=}'}, 1: {'type': b'^{__DADissenter=}'}, 2: {'type': b'^v'}}}}}}), 'DARegisterDiskDescriptionChangedCallback': (b'v^{__DASession=}^{__CFDictionary=}^{__CFArray=}^?^v', '', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__DADisk=}'}, 1: {'type': b'^{__CFArray=}'}, 2: {'type': b'^v'}}}}}}), 'DADiskCreateFromBSDName': (b'^{__DADisk=}^{__CFAllocator=}^{__DASession=}^t', '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'c_array_delimited_by_null': True, 'type_modifier': 'n'}}}), 'DAApprovalSessionCreate': (b'^{__DASession=}^{__CFAllocator=}', '', {'retval': {'already_cfretained': True}}), 'DADiskGetBSDName': (b'^t^{__DADisk=}', '', {'retval': {'c_array_delimited_by_null': True}}), 'DARegisterDiskDisappearedCallback': (b'v^{__DASession=}^{__CFDictionary=}^?^v', '', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__DADisk=}'}, 1: {'type': b'^v'}}}}}}), 'DASessionCreate': (b'^{__DASession=}^{__CFAllocator=}', '', {'retval': {'already_cfretained': True}}), 'DARegisterDiskAppearedCallback': (b'v^{__DASession=}^{__CFDictionary=}^?^v', '', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__DADisk=}'}, 1: {'type': b'^v'}}}}}}), 'DASessionUnscheduleFromRunLoop': (b'v^{__DASession=}^{__CFRunLoop=}^{__CFString=}',), 'DADiskGetTypeID': (sel32or64(b'L', b'Q'),), 'DADiskCopyIOMedia': (b'I^{__DADisk=}', '', {'retval': {'already_cfretained': True}}), 'DADissenterCreate': (b'^{__DADissenter=}^{__CFAllocator=}i^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'DADiskCreateFromIOMedia': (b'^{__DADisk=}^{__CFAllocator=}^{__DASession=}I', '', {'retval': {'already_cfretained': True}}), 'DASessionScheduleWithRunLoop': (b'v^{__DASession=}^{__CFRunLoop=}^{__CFString=}',), 'DAUnregisterCallback': (b'v^{__DASession=}^v^v',), 'DADissenterGetStatusString': (b'^{__CFString=}^{__DADissenter=}',), 'DADiskSetOptions': (sel32or64(b'i^{__DADisk=}LZ', b'i^{__DADisk=}IZ'),), 'DAApprovalSessionUnscheduleFromRunLoop': (b'v^{__DASession=}^{__CFRunLoop=}^{__CFString=}',), 'DASessionSetDispatchQueue': (b'v^{__DASession=}^{dispatch_queue_s=}',), 'DADiskCreateFromVolumePath': (b'^{__DADisk=}^{__CFAllocator=}^{__DASession=}^{__CFURL=}', '', {'retval': {'already_cfretained': True}}), 'DADiskCopyDescription': (b'^{__CFDictionary=}^{__DADisk=}', '', {'retval': {'already_cfretained': True}}), 'DADiskIsClaimed': (b'Z^{__DADisk=}',), 'DADiskGetOptions': (sel32or64(b'L^{__DADisk=}', b'I^{__DADisk=}'),), 'DADiskMountWithArguments': (sel32or64(b'v^{__DADisk=}^{__CFURL=}L^?^v^^{__CFString=}', b'v^{__DADisk=}^{__CFURL=}I^?^v^^{__CFString=}'), '', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__DADisk=}'}, 1: {'type': b'^{__DADissenter=}'}, 2: {'type': b'^v'}}}}, 4: {'c_array_delimited_by_null': True, 'type_modifier': 'n'}}}), 'DAApprovalSessionScheduleWithRunLoop': (b'v^{__DASession=}^{__CFRunLoop=}^{__CFString=}',), 'DAApprovalSessionGetTypeID': (sel32or64(b'L', b'Q'),), 'DADiskRename': (sel32or64(b'v^{__DADisk=}^{__CFString=}L^?^v', b'v^{__DADisk=}^{__CFString=}I^?^v'), '', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__DADisk=}'}, 1: {'type': b'^{__DADissenter=}'}, 2: {'type': b'^v'}}}}}}), 'DADissenterGetStatus': (b'i^{__DADissenter=}',), 'DARegisterDiskPeekCallback': (sel32or64(b'v^{__DASession=}^{__CFDictionary=}l^?^v', b'v^{__DASession=}^{__CFDictionary=}q^?^v'), '', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__DADisk=}'}, 1: {'type': b'^v'}}}}}}), 'DADiskUnclaim': (b'v^{__DADisk=}',), 'DADiskEject': (sel32or64(b'v^{__DADisk=}L^?^v', b'v^{__DADisk=}I^?^v'), '', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__DADisk=}'}, 1: {'type': b'^{__DADissenter=}'}, 2: {'type': b'^v'}}}}}}), 'DARegisterDiskMountApprovalCallback': (b'v^{__DASession=}^{__CFDictionary=}^?^v', '', {'arguments': {2: {'callable': {'retval': {'type': b'^{__DADissenter=}'}, 'arguments': {0: {'type': b'^{__DADisk=}'}, 1: {'type': b'^v'}}}}}}), 'DAUnregisterApprovalCallback': (b'v^{__DASession=}^v^v',), 'DARegisterDiskEjectApprovalCallback': (b'v^{__DASession=}^{__CFDictionary=}^?^v', '', {'arguments': {2: {'callable': {'retval': {'type': b'^{__DADissenter=}'}, 'arguments': {0: {'type': b'^{__DADisk=}'}, 1: {'type': b'^v'}}}}}}), 'DARegisterDiskUnmountApprovalCallback': (b'v^{__DASession=}^{__CFDictionary=}^?^v', '', {'arguments': {2: {'callable': {'retval': {'type': b'^{__DADissenter=}'}, 'arguments': {0: {'type': b'^{__DADisk=}'}, 1: {'type': b'^v'}}}}}}), 'DADiskCopyWholeDisk': (b'^{__DADisk=}^{__DADisk=}', '', {'retval': {'already_cfretained': True}}), 'DADiskUnmount': (sel32or64(b'v^{__DADisk=}L^?^v', b'v^{__DADisk=}I^?^v'), '', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__DADisk=}'}, 1: {'type': b'^{__DADissenter=}'}, 2: {'type': b'^v'}}}}}}), 'DADiskMount': (sel32or64(b'v^{__DADisk=}^{__CFURL=}L^?^v', b'v^{__DADisk=}^{__CFURL=}I^?^v'), '', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__DADisk=}'}, 1: {'type': b'^{__DADissenter=}'}, 2: {'type': b'^v'}}}}}})} -cftypes=[('DAApprovalSessionRef', b'^{__DASession=}', 'DAApprovalSessionGetTypeID', None), ('DADiskRef', b'^{__DADisk=}', 'DADiskGetTypeID', None), ('DADissenterRef', b'^{__DADissenter=}', 'DADissenterGetTypeID', None), ('DASessionRef', b'^{__DASession=}', 'DASessionGetTypeID', None)] -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/DiskArbitration/_metadata.pyc b/env/lib/python2.7/site-packages/DiskArbitration/_metadata.pyc deleted file mode 100644 index 7e82087b..00000000 Binary files a/env/lib/python2.7/site-packages/DiskArbitration/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/EventKit/__init__.py b/env/lib/python2.7/site-packages/EventKit/__init__.py deleted file mode 100644 index d16e0e88..00000000 --- a/env/lib/python2.7/site-packages/EventKit/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -''' -Python mapping for the EventKit framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Foundation - -from EventKit import _metadata - -sys.modules['EventKit'] = mod = objc.ObjCLazyModule( - "EventKit", - "com.apple.ical.EventKit", - objc.pathForFramework("/System/Library/Frameworks/EventKit.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Foundation,)) - -import sys -del sys.modules['EventKit._metadata'] diff --git a/env/lib/python2.7/site-packages/EventKit/__init__.pyc b/env/lib/python2.7/site-packages/EventKit/__init__.pyc deleted file mode 100644 index 2c312b3e..00000000 Binary files a/env/lib/python2.7/site-packages/EventKit/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/EventKit/_metadata.py b/env/lib/python2.7/site-packages/EventKit/_metadata.py deleted file mode 100644 index d3e5b79b..00000000 --- a/env/lib/python2.7/site-packages/EventKit/_metadata.py +++ /dev/null @@ -1,57 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Tue Jul 21 22:44:45 2015 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$EKErrorDomain$EKEventStoreChangedNotification$''' -enums = '''$EKAlarmProximityEnter@1$EKAlarmProximityLeave@2$EKAlarmProximityNone@0$EKAlarmTypeAudio@1$EKAlarmTypeDisplay@0$EKAlarmTypeEmail@3$EKAlarmTypeProcedure@2$EKAuthorizationStatusAuthorized@3$EKAuthorizationStatusDenied@2$EKAuthorizationStatusNotDetermined@0$EKAuthorizationStatusRestricted@1$EKCalendarEventAvailabilityBusy@1$EKCalendarEventAvailabilityFree@2$EKCalendarEventAvailabilityNone@0$EKCalendarEventAvailabilityTentative@4$EKCalendarEventAvailabilityUnavailable@8$EKCalendarTypeBirthday@4$EKCalendarTypeCalDAV@1$EKCalendarTypeExchange@2$EKCalendarTypeLocal@0$EKCalendarTypeSubscription@3$EKEntityMaskEvent@1$EKEntityMaskReminder@2$EKEntityTypeEvent@0$EKEntityTypeReminder@1$EKErrorAlarmGreaterThanRecurrence@8$EKErrorAlarmProximityNotSupported@21$EKErrorCalendarDoesNotAllowEvents@22$EKErrorCalendarDoesNotAllowReminders@23$EKErrorCalendarHasNoSource@14$EKErrorCalendarIsImmutable@16$EKErrorCalendarReadOnly@6$EKErrorCalendarSourceCannotBeModified@15$EKErrorDatesInverted@4$EKErrorDurationGreaterThanRecurrence@7$EKErrorEventNotMutable@0$EKErrorEventStoreNotAuthorized@29$EKErrorInternalFailure@5$EKErrorInvalidEntityType@27$EKErrorInvalidSpan@13$EKErrorInvitesCannotBeMoved@12$EKErrorLast@31$EKErrorNoCalendar@1$EKErrorNoEndDate@3$EKErrorNoStartDate@2$EKErrorOSNotSupported@30$EKErrorObjectBelongsToDifferentStore@11$EKErrorPriorityIsInvalid@26$EKErrorProcedureAlarmsNotMutable@28$EKErrorRecurringReminderRequiresDueDate@18$EKErrorReminderLocationsNotSupported@20$EKErrorSourceDoesNotAllowCalendarAddDelete@17$EKErrorSourceDoesNotAllowEvents@25$EKErrorSourceDoesNotAllowReminders@24$EKErrorStartDateCollidesWithOtherOccurrence@10$EKErrorStartDateTooFarInFuture@9$EKErrorStructuredLocationsNotSupported@19$EKEventAvailabilityBusy@0$EKEventAvailabilityFree@1$EKEventAvailabilityNotSupported@-1$EKEventAvailabilityTentative@2$EKEventAvailabilityUnavailable@3$EKEventStatusCanceled@3$EKEventStatusConfirmed@1$EKEventStatusNone@0$EKEventStatusTentative@2$EKFriday@6$EKMonday@2$EKParticipantRoleChair@3$EKParticipantRoleNonParticipant@4$EKParticipantRoleOptional@2$EKParticipantRoleRequired@1$EKParticipantRoleUnknown@0$EKParticipantScheduleStatusCannotDeliver@7$EKParticipantScheduleStatusDelivered@3$EKParticipantScheduleStatusDeliveryFailed@6$EKParticipantScheduleStatusNoPrivileges@5$EKParticipantScheduleStatusNone@0$EKParticipantScheduleStatusPending@1$EKParticipantScheduleStatusRecipientNotAllowed@8$EKParticipantScheduleStatusRecipientNotRecognized@4$EKParticipantScheduleStatusSent@2$EKParticipantStatusAccepted@2$EKParticipantStatusCompleted@6$EKParticipantStatusDeclined@3$EKParticipantStatusDelegated@5$EKParticipantStatusInProcess@7$EKParticipantStatusPending@1$EKParticipantStatusTentative@4$EKParticipantStatusUnknown@0$EKParticipantTypeGroup@4$EKParticipantTypePerson@1$EKParticipantTypeResource@3$EKParticipantTypeRoom@2$EKParticipantTypeUnknown@0$EKRecurrenceFrequencyDaily@0$EKRecurrenceFrequencyMonthly@2$EKRecurrenceFrequencyWeekly@1$EKRecurrenceFrequencyYearly@3$EKReminderPriorityHigh@1$EKReminderPriorityLow@9$EKReminderPriorityMedium@5$EKReminderPriorityNone@0$EKSaturday@7$EKSourceTypeBirthdays@5$EKSourceTypeCalDAV@2$EKSourceTypeExchange@1$EKSourceTypeLocal@0$EKSourceTypeMobileMe@3$EKSourceTypeSubscribed@4$EKSpanFutureEvents@1$EKSpanThisEvent@0$EKSunday@1$EKThursday@5$EKTuesday@3$EKWednesday@4$EKWeekdayFriday@6$EKWeekdayMonday@2$EKWeekdaySaturday@7$EKWeekdaySunday@1$EKWeekdayThursday@5$EKWeekdayTuesday@3$EKWeekdayWednesday@4$''' -misc.update({}) -functions={'DATE_COMPONENTS_DO_NOT_USE': (b'v',), 'DATETIME_COMPONENTS_DO_NOT_USE': (b'v',), 'EK_LOSE_FRACTIONAL_SECONDS_DO_NOT_USE': (b'v',)} -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'EKCalendar', b'allowsContentModifications', {'retval': {'type': b'Z'}}) - r(b'EKCalendar', b'isImmutable', {'retval': {'type': b'Z'}}) - r(b'EKCalendar', b'isSubscribed', {'retval': {'type': b'Z'}}) - r(b'EKCalendarItem', b'hasAlarms', {'retval': {'type': b'Z'}}) - r(b'EKCalendarItem', b'hasAttendees', {'retval': {'type': b'Z'}}) - r(b'EKCalendarItem', b'hasNotes', {'retval': {'type': b'Z'}}) - r(b'EKCalendarItem', b'hasRecurrenceRules', {'retval': {'type': b'Z'}}) - r(b'EKCalendarItem', b'isAllDay', {'retval': {'type': 'Z'}}) - r(b'EKEvent', b'isAllDay', {'retval': {'type': b'Z'}}) - r(b'EKEvent', b'isDetached', {'retval': {'type': b'Z'}}) - r(b'EKEvent', b'refresh', {'retval': {'type': b'Z'}}) - r(b'EKEvent', b'setAllDay:', {'arguments': {2: {'type': b'Z'}}}) - r(b'EKEventStore', b'commit:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type_modifier': b'o'}}}) - r(b'EKEventStore', b'enumerateEventsMatchingPredicate:usingBlock:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'o^B'}}}}}}) - r(b'EKEventStore', b'fetchRemindersMatchingPredicate:completion:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'EKEventStore', b'removeCalendar:commit:error:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type': b'Z'}, 4: {'type_modifier': b'o'}}}) - r(b'EKEventStore', b'removeEvent:span:commit:error:', {'retval': {'type': b'Z'}, 'arguments': {4: {'type': b'Z'}, 5: {'type_modifier': b'o'}}}) - r(b'EKEventStore', b'removeReminder:commit:error:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type': b'Z'}, 4: {'type_modifier': b'o'}}}) - r(b'EKEventStore', b'requestAccessToEntityType:completion:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}, 2: {'type': b'@'}}}}}}) - r(b'EKEventStore', b'saveCalendar:commit:error:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type': b'Z'}, 4: {'type_modifier': b'o'}}}) - r(b'EKEventStore', b'saveEvent:span:commit:error:', {'retval': {'type': b'Z'}, 'arguments': {4: {'type': b'Z'}, 5: {'type_modifier': b'o'}}}) - r(b'EKEventStore', b'saveReminder:commit:error:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type': b'Z'}, 4: {'type_modifier': b'o'}}}) - r(b'EKObject', b'hasChanges', {'retval': {'type': b'Z'}}) - r(b'EKObject', b'isNew', {'retval': {'type': b'Z'}}) - r(b'EKObject', b'refresh', {'retval': {'type': b'Z'}}) - r(b'EKParticipant', b'isCurrentUser', {'retval': {'type': b'Z'}}) - r(b'EKReminder', b'isCompleted', {'retval': {'type': b'Z'}}) - r(b'EKReminder', b'setCompleted:', {'arguments': {2: {'type': b'Z'}}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/EventKit/_metadata.pyc b/env/lib/python2.7/site-packages/EventKit/_metadata.pyc deleted file mode 100644 index 43a27c93..00000000 Binary files a/env/lib/python2.7/site-packages/EventKit/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/ExceptionHandling/__init__.py b/env/lib/python2.7/site-packages/ExceptionHandling/__init__.py deleted file mode 100644 index a71fdb5f..00000000 --- a/env/lib/python2.7/site-packages/ExceptionHandling/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -''' -Python mapping for the ExceptionHandling framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import sys -import objc -import Foundation - -from ExceptionHandling import _metadata - -sys.modules['ExceptionHandling'] = mod = objc.ObjCLazyModule('ExceptionHandling', - "com.apple.ExceptionHandling", - objc.pathForFramework("/System/Library/Frameworks/ExceptionHandling.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'objc': objc, - }, ( Foundation,)) - -import sys -del sys.modules['ExceptionHandling._metadata'] diff --git a/env/lib/python2.7/site-packages/ExceptionHandling/__init__.pyc b/env/lib/python2.7/site-packages/ExceptionHandling/__init__.pyc deleted file mode 100644 index e68c1317..00000000 Binary files a/env/lib/python2.7/site-packages/ExceptionHandling/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/ExceptionHandling/_metadata.py b/env/lib/python2.7/site-packages/ExceptionHandling/_metadata.py deleted file mode 100644 index 4995e316..00000000 --- a/env/lib/python2.7/site-packages/ExceptionHandling/_metadata.py +++ /dev/null @@ -1,39 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Wed Sep 19 17:33:23 2012 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$NSStackTraceKey$NSUncaughtRuntimeErrorException$NSUncaughtSystemExceptionException$''' -enums = '''$NSHandleOtherExceptionMask@512$NSHandleTopLevelExceptionMask@128$NSHandleUncaughtExceptionMask@2$NSHandleUncaughtRuntimeErrorMask@32$NSHandleUncaughtSystemExceptionMask@8$NSHangOnOtherExceptionMask@16$NSHangOnTopLevelExceptionMask@8$NSHangOnUncaughtExceptionMask@1$NSHangOnUncaughtRuntimeErrorMask@4$NSHangOnUncaughtSystemExceptionMask@2$NSLogOtherExceptionMask@256$NSLogTopLevelExceptionMask@64$NSLogUncaughtExceptionMask@1$NSLogUncaughtRuntimeErrorMask@16$NSLogUncaughtSystemExceptionMask@4$''' -misc.update({}) -functions={'NSExceptionHandlerResume': (b'v',)} -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'NSObject', b'exceptionHandler:shouldHandleException:mask:', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'exceptionHandler:shouldLogException:mask:', {'retval': {'type': 'Z'}}) -finally: - objc._updatingMetadata(False) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'NSObject', b'exceptionHandler:shouldHandleException:mask:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'exceptionHandler:shouldLogException:mask:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'I', b'Q')}}}) -finally: - objc._updatingMetadata(False) -protocols={'NSExceptionHandlerDelegate': objc.informal_protocol('NSExceptionHandlerDelegate', [objc.selector(None, b'exceptionHandler:shouldLogException:mask:', sel32or64(b'Z@:@@I', b'Z@:@@Q'), isRequired=False), objc.selector(None, b'exceptionHandler:shouldHandleException:mask:', sel32or64(b'Z@:@@I', b'Z@:@@Q'), isRequired=False)])} -expressions = {'NSHangOnEveryExceptionMask': '(NSHangOnUncaughtExceptionMask|NSHangOnUncaughtSystemExceptionMask|NSHangOnUncaughtRuntimeErrorMask|NSHangOnTopLevelExceptionMask|NSHangOnOtherExceptionMask)', 'NSLogAndHandleEveryExceptionMask': '(NSLogUncaughtExceptionMask|NSLogUncaughtSystemExceptionMask|NSLogUncaughtRuntimeErrorMask|NSHandleUncaughtExceptionMask|NSHandleUncaughtSystemExceptionMask|NSHandleUncaughtRuntimeErrorMask|NSLogTopLevelExceptionMask|NSHandleTopLevelExceptionMask|NSLogOtherExceptionMask|NSHandleOtherExceptionMask)'} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/ExceptionHandling/_metadata.pyc b/env/lib/python2.7/site-packages/ExceptionHandling/_metadata.pyc deleted file mode 100644 index e68c1890..00000000 Binary files a/env/lib/python2.7/site-packages/ExceptionHandling/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/ExternalAccessory/_ExternalAccessory.so b/env/lib/python2.7/site-packages/ExternalAccessory/_ExternalAccessory.so deleted file mode 100755 index e40650f9..00000000 Binary files a/env/lib/python2.7/site-packages/ExternalAccessory/_ExternalAccessory.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/ExternalAccessory/__init__.py b/env/lib/python2.7/site-packages/ExternalAccessory/__init__.py deleted file mode 100644 index 22528ac7..00000000 --- a/env/lib/python2.7/site-packages/ExternalAccessory/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -''' -Python mapping for the ExternalAccessory framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Foundation - -from ExternalAccessory import _metadata -from ExternalAccessory._ExternalAccessory import * - -sys.modules['ExternalAccessory'] = mod = objc.ObjCLazyModule( - "ExternalAccessory", - "com.apple.externalaccessory", - objc.pathForFramework("/System/Library/Frameworks/ExternalAccessory.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Foundation,)) - -import sys -del sys.modules['ExternalAccessory._metadata'] diff --git a/env/lib/python2.7/site-packages/ExternalAccessory/__init__.pyc b/env/lib/python2.7/site-packages/ExternalAccessory/__init__.pyc deleted file mode 100644 index 4ae53419..00000000 Binary files a/env/lib/python2.7/site-packages/ExternalAccessory/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/ExternalAccessory/_metadata.py b/env/lib/python2.7/site-packages/ExternalAccessory/_metadata.py deleted file mode 100644 index a0263425..00000000 --- a/env/lib/python2.7/site-packages/ExternalAccessory/_metadata.py +++ /dev/null @@ -1,31 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Sun Jul 2 20:58:25 2017 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$EAAccessoryDidConnectNotification$EAAccessoryDidDisconnectNotification$EAAccessoryKey$EAAccessorySelectedKey$EABluetoothAccessoryPickerErrorDomain$''' -enums = '''$EABluetoothAccessoryPickerAlreadyConnected@0$EABluetoothAccessoryPickerResultCancelled@2$EABluetoothAccessoryPickerResultFailed@3$EABluetoothAccessoryPickerResultNotFound@1$EAConnectionIDNone@0$EAWiFiUnconfiguredAccessoryBrowserStateConfiguring@3$EAWiFiUnconfiguredAccessoryBrowserStateSearching@2$EAWiFiUnconfiguredAccessoryBrowserStateStopped@1$EAWiFiUnconfiguredAccessoryBrowserStateWiFiUnavailable@0$EAWiFiUnconfiguredAccessoryConfigurationStatusFailed@2$EAWiFiUnconfiguredAccessoryConfigurationStatusSuccess@0$EAWiFiUnconfiguredAccessoryConfigurationStatusUserCancelledConfiguration@1$EAWiFiUnconfiguredAccessoryPropertySupportsAirPlay@1$EAWiFiUnconfiguredAccessoryPropertySupportsAirPrint@2$EAWiFiUnconfiguredAccessoryPropertySupportsHomeKit@4$''' -misc.update({}) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'EAAccessory', b'isConnected', {'retval': {'type': 'Z'}}) - r(b'EAAccessory', b'setConnected:', {'arguments': {2: {'type': 'Z'}}}) - r(b'EAAccessoryManager', b'showBluetoothAccessoryPickerWithNameFilter:completion:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/ExternalAccessory/_metadata.pyc b/env/lib/python2.7/site-packages/ExternalAccessory/_metadata.pyc deleted file mode 100644 index 427ca5fe..00000000 Binary files a/env/lib/python2.7/site-packages/ExternalAccessory/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/FSEvents/__init__.py b/env/lib/python2.7/site-packages/FSEvents/__init__.py deleted file mode 100644 index 83afd5a6..00000000 --- a/env/lib/python2.7/site-packages/FSEvents/__init__.py +++ /dev/null @@ -1,25 +0,0 @@ -''' -Python mapping for the FSEvents framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import sys -import objc -import Foundation -import FSEvents._callbacks - -from FSEvents import _metadata - -sys.modules['FSEvents'] = mod = objc.ObjCLazyModule('FSEvents', - "com.apple.CoreServices", - objc.pathForFramework("/System/Library/Frameworks/CoreServices.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'objc': objc, - }, ( FSEvents._callbacks, Foundation)) - -import sys -del sys.modules['FSEvents._metadata'] diff --git a/env/lib/python2.7/site-packages/FSEvents/__init__.pyc b/env/lib/python2.7/site-packages/FSEvents/__init__.pyc deleted file mode 100644 index 2385fe94..00000000 Binary files a/env/lib/python2.7/site-packages/FSEvents/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/FSEvents/_callbacks.so b/env/lib/python2.7/site-packages/FSEvents/_callbacks.so deleted file mode 100755 index 7e56f34c..00000000 Binary files a/env/lib/python2.7/site-packages/FSEvents/_callbacks.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/FSEvents/_metadata.py b/env/lib/python2.7/site-packages/FSEvents/_metadata.py deleted file mode 100644 index 5d77d12f..00000000 --- a/env/lib/python2.7/site-packages/FSEvents/_metadata.py +++ /dev/null @@ -1,26 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Sun Jul 2 12:31:23 2017 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -misc.update({'FSEventStreamContext': objc.createStructType('FSEventStreamContext', b'{FSEventStreamContext=l^v^?^?^?}', [])}) -constants = '''$$''' -enums = '''$kFSEventStreamCreateFlagFileEvents@16$kFSEventStreamCreateFlagIgnoreSelf@8$kFSEventStreamCreateFlagMarkSelf@32$kFSEventStreamCreateFlagNoDefer@2$kFSEventStreamCreateFlagNone@0$kFSEventStreamCreateFlagUseCFTypes@1$kFSEventStreamCreateFlagUseExtendedData@64$kFSEventStreamCreateFlagWatchRoot@4$kFSEventStreamEventFlagEventIdsWrapped@8$kFSEventStreamEventFlagHistoryDone@16$kFSEventStreamEventFlagItemChangeOwner@16384$kFSEventStreamEventFlagItemCloned@4194304$kFSEventStreamEventFlagItemCreated@256$kFSEventStreamEventFlagItemFinderInfoMod@8192$kFSEventStreamEventFlagItemInodeMetaMod@1024$kFSEventStreamEventFlagItemIsDir@131072$kFSEventStreamEventFlagItemIsFile@65536$kFSEventStreamEventFlagItemIsHardlink@1048576$kFSEventStreamEventFlagItemIsLastHardlink@2097152$kFSEventStreamEventFlagItemIsSymlink@262144$kFSEventStreamEventFlagItemModified@4096$kFSEventStreamEventFlagItemRemoved@512$kFSEventStreamEventFlagItemRenamed@2048$kFSEventStreamEventFlagItemXattrMod@32768$kFSEventStreamEventFlagKernelDropped@4$kFSEventStreamEventFlagMount@64$kFSEventStreamEventFlagMustScanSubDirs@1$kFSEventStreamEventFlagNone@0$kFSEventStreamEventFlagOwnEvent@524288$kFSEventStreamEventFlagRootChanged@32$kFSEventStreamEventFlagUnmount@128$kFSEventStreamEventFlagUserDropped@2$kFSEventStreamEventIdSinceNow@18446744073709551615$''' -misc.update({'kFSEventStreamEventExtendedDataPathKey': b'path'.decode("utf-8"), 'kFSEventStreamEventExtendedFileIDKey': b'fileID'.decode("utf-8")}) -functions={'FSEventStreamShow': (b'v^{__FSEventStream=}',), 'FSEventStreamGetLatestEventId': (b'Q^{__FSEventStream=}',), 'FSEventStreamRetain': (b'v^{__FSEventStream=}',), 'FSEventStreamSetDispatchQueue': (b'v^{__FSEventStream=}^{dispatch_queue_s=}',), 'FSEventsCopyUUIDForDevice': (b'^{__CFUUID=}i', '', {'retval': {'already_retained': True}}), 'FSEventStreamSetExclusionPaths': (b'v^{__FSEventStream=}^{__CFArray=}', '', {'retval': {'type': 'Z'}}), 'FSEventStreamScheduleWithRunLoop': (b'v^{__FSEventStream=}^{__CFRunLoop=}^{__CFString=}',), 'FSEventStreamInvalidate': (b'v^{__FSEventStream=}',), 'FSEventStreamStop': (b'v^{__FSEventStream=}',), 'FSEventsPurgeEventsForDeviceUpToEventId': (b'ZiQ',), 'FSEventStreamGetDeviceBeingWatched': (b'i^{__FSEventStream=}',), 'FSEventStreamCopyDescription': (b'^{__CFString=}^{__FSEventStream=}', '', {'retval': {'already_retained': True}}), 'FSEventStreamCopyPathsBeingWatched': (b'^{__CFArray=}^{__FSEventStream=}', '', {'retval': {'already_cfretained': True}}), 'FSEventStreamUnscheduleFromRunLoop': (b'v^{__FSEventStream=}^{__CFRunLoop=}^{__CFString=}',), 'FSEventStreamRelease': (b'v^{__FSEventStream=}',), 'FSEventStreamStart': (b'Z^{__FSEventStream=}',), 'FSEventStreamFlushSync': (b'v^{__FSEventStream=}',), 'FSEventsGetLastEventIdForDeviceBeforeTime': (b'Qid',), 'FSEventStreamFlushAsync': (b'Q^{__FSEventStream=}',), 'FSEventsGetCurrentEventId': (b'Q',)} -misc.update({'FSEventStreamRef': objc.createOpaquePointerType('FSEventStreamRef', b'^{__FSEventStream=}')}) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/FSEvents/_metadata.pyc b/env/lib/python2.7/site-packages/FSEvents/_metadata.pyc deleted file mode 100644 index f13fe9fa..00000000 Binary files a/env/lib/python2.7/site-packages/FSEvents/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/FinderSync/__init__.py b/env/lib/python2.7/site-packages/FinderSync/__init__.py deleted file mode 100644 index d2dd9e23..00000000 --- a/env/lib/python2.7/site-packages/FinderSync/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -''' -Python mapping for the FinderSync framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Foundation - -from FinderSync import _metadata - -sys.modules['FinderSync'] = mod = objc.ObjCLazyModule( - "FinderSync", - "com.apple.FinderSync", - objc.pathForFramework("/System/Library/Frameworks/FinderSync.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Foundation,)) - -import sys -del sys.modules['FinderSync._metadata'] diff --git a/env/lib/python2.7/site-packages/FinderSync/__init__.pyc b/env/lib/python2.7/site-packages/FinderSync/__init__.pyc deleted file mode 100644 index 8b173703..00000000 Binary files a/env/lib/python2.7/site-packages/FinderSync/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/FinderSync/_metadata.py b/env/lib/python2.7/site-packages/FinderSync/_metadata.py deleted file mode 100644 index 23f9fa8f..00000000 --- a/env/lib/python2.7/site-packages/FinderSync/_metadata.py +++ /dev/null @@ -1,40 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Tue Jul 31 22:06:04 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$$''' -enums = '''$FIMenuKindContextualMenuForContainer@1$FIMenuKindContextualMenuForItems@0$FIMenuKindContextualMenuForSidebar@2$FIMenuKindToolbarItemMenu@3$''' -misc.update({}) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'FIFinderSyncController', b'isExtensionEnabled', {'retval': {'type': 'Z'}}) - r(b'FIFinderSyncController', b'setLastUsedDate:forItemWithURL:completion:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'FIFinderSyncController', b'setTagData:forItemWithURL:completion:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'NSObject', b'beginObservingDirectoryAtURL:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'endObservingDirectoryAtURL:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'makeListenerEndpointForServiceName:itemURL:andReturnError:', {'arguments': {4: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'menuForMenuKind:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'Q'}}}) - r(b'NSObject', b'requestBadgeIdentifierForURL:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'toolbarItemImage', {'required': False, 'retval': {'type': b'@'}}) - r(b'NSObject', b'toolbarItemName', {'required': False, 'retval': {'type': b'@'}}) - r(b'NSObject', b'toolbarItemToolTip', {'required': False, 'retval': {'type': b'@'}}) - r(b'NSObject', b'valuesForAttributes:forItemWithURL:completion:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/FinderSync/_metadata.pyc b/env/lib/python2.7/site-packages/FinderSync/_metadata.pyc deleted file mode 100644 index dfe44f32..00000000 Binary files a/env/lib/python2.7/site-packages/FinderSync/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Foundation/_Foundation.so b/env/lib/python2.7/site-packages/Foundation/_Foundation.so deleted file mode 100755 index a235fd61..00000000 Binary files a/env/lib/python2.7/site-packages/Foundation/_Foundation.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Foundation/__init__.py b/env/lib/python2.7/site-packages/Foundation/__init__.py deleted file mode 100644 index 3402f785..00000000 --- a/env/lib/python2.7/site-packages/Foundation/__init__.py +++ /dev/null @@ -1,221 +0,0 @@ -''' -Python mapping for the Foundation framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import sys -import objc -import CoreFoundation - -from Foundation import _metadata -from Foundation._inlines import _inline_list_ - -objc.addConvenienceForClass('NSAttributedString', ( - ('__len__', lambda self: self.length()), -)) - -# XXX -objc.addConvenienceForBasicMapping('NSMergeConflict', True) -objc.addConvenienceForBasicMapping('NSUbiquitousKeyValueStore', False) -objc.addConvenienceForBasicMapping('NSUserDefaults', False) - - -def _setup_conveniences(): - NSNull = objc.lookUpClass('NSNull') - def nscache_getitem(self, key): - value = self.objectForKey_(key) - if value is None: - raise KeyError(key) - - elif value is NSNull.null(): - return None - - else: - return value - - def nscache_get(self, key, default=None): - value = self.objectForKey_(key) - if value is None: - return default - elif value is NSNull.null(): - return None - return value - - def nscache_setitem(self, key, value): - if value is None: - value = NSNull.null() - self.setObject_forKey_(value, key) - - objc.addConvenienceForClass('NSCache', ( - ('__getitem__', nscache_getitem), - ('get', nscache_get), - ('__setitem__', nscache_setitem), - ('__delitem__', lambda self, key: self.removeObjectForKey_(key)), - ('clear', lambda self: self.removeAllObjects()), - )) - - - def hash_add(self, value): - if value is None: - value = NSNull.null() - self.addObject_(value) - - def hash_contains(self, value): - if value is None: - value = NSNull.null() - return self.containsObject_(value) - - def hash_remove(self, value): - if value is None: - value = NSNull.null() - self.removeObject_(value) - - def hash_pop(self): - value = self.anyObject() - self.removeObject_(value) - if value is NSNull.null(): - return None - else: - return value - - # XXX: add more of the set interface - objc.addConvenienceForClass('NSHashTable', ( - ('__len__', lambda self: self.count()), - ('clear', lambda self: self.removeAllObjects()), - ('__iter__', lambda self: iter(self.objectEnumerator())), - ('add', hash_add), - ('remove', hash_remove), - ('__contains__', hash_contains), - ('pop', hash_pop), - )) - - # XXX: These convenience wrappers don't work due to type issues - #def charset_contains(self, value): - # try: - # return self.characterIsMember_(value) - # except ValueErorr: - # # Wrong type - # return False - - #objc.addConvenienceForClass('NSCharacterSet', ( - # ('__len__', lambda self: self.count()), - # ('__contains__', charset_contains), - #)) - - - # XXX: add full set interface (even if other value can only be a set) - #objc.addConvenienceForClass('NSMutableCharacterSet', ( - - # XXX: add __new__, __getitem__ and __iter__ as well - objc.addConvenienceForClass('NSIndexPath', ( - ('__len__', lambda self: self.count()), - )) - - if sys.maxsize > 2 ** 32: - NSNotFound = 0x7fffffffffffffff - else: - NSNotFound = 0x7fffffff - - def indexset_iter(self): - value = self.firstIndex() - while value != NSNotFound: - yield value - value = self.indexGreaterThanIndex_(value) - - def indexset_reversed(self): - value = self.lastIndex() - while value != NSNotFound: - yield value - value = self.indexLessThanIndex_(value) - - NSIndexSet = objc.lookUpClass('NSIndexSet') - def indexset_eq(self, other): - if not isinstance(other, NSIndexSet): - return False - - return self.isEqualToIndexSet_(other) - - def indexset_ne(self, other): - if not isinstance(other, NSIndexSet): - return True - - return not self.isEqualToIndexSet_(other) - - def indexset_contains(self, value): - try: - return self.containsIndex_(value) - except ValueError: - return False - - - - # XXX: Add __new__ - objc.addConvenienceForClass('NSIndexSet', ( - ('__len__', lambda self: self.count()), - ('__iter__', indexset_iter), - ('__reversed__', indexset_reversed), - ('__eq__', indexset_eq), - ('__ne__', indexset_ne), - ('__contains__', indexset_contains), - - )) - - # Add 'update', '-=', '+=' - objc.addConvenienceForClass('NSMutableIndexSet', ( - ('clear', lambda self: self.removeAllIndexes()), - ('add', lambda self, value: self.addIndex_(value)), - ('remove', lambda self, value: self.removeIndex_(value)), - )) - - - objc.addConvenienceForClass('NSLocale', ( - ('__getitem__', lambda self, key: self.objectForKey_(key)), - )) - - -_setup_conveniences() - -sys.modules['Foundation'] = mod = objc.ObjCLazyModule('Foundation', - 'com.apple.Foundation', - objc.pathForFramework("/System/Library/Frameworks/Foundation.framework"), - _metadata.__dict__, _inline_list_, { - '__doc__': __doc__, - 'objc': objc, - 'YES': objc.YES, - 'NO': objc.NO, - 'NSMaximumStringLength': sys.maxsize - 1, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (CoreFoundation,)) - -import sys -del sys.modules['Foundation._metadata'] - -import Foundation._Foundation -for nm in dir(Foundation._Foundation): - if nm.startswith('_'): continue - setattr(mod, nm, getattr(Foundation._Foundation, nm)) - -import objc -mod.NSDecimal = objc.NSDecimal - -import Foundation._nsobject -import Foundation._nsindexset - - -import Foundation._functiondefines -for nm in dir(Foundation._functiondefines): - setattr(mod, nm, getattr(Foundation._functiondefines, nm)) - - -# XXX: This is suboptimal, could calculate this in the metadata -# generator. -import sys -mod.NSIntegerMax = sys.maxsize -mod.NSIntegerMin = - sys.maxsize - 1 -mod.NSUIntegerMax = (sys.maxsize * 2) + 1 - -import Foundation._context -for nm in dir(Foundation._context): - setattr(mod, nm, getattr(Foundation._context, nm)) diff --git a/env/lib/python2.7/site-packages/Foundation/__init__.pyc b/env/lib/python2.7/site-packages/Foundation/__init__.pyc deleted file mode 100644 index 1623692a..00000000 Binary files a/env/lib/python2.7/site-packages/Foundation/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Foundation/_context.py b/env/lib/python2.7/site-packages/Foundation/_context.py deleted file mode 100644 index 8bd6ab7f..00000000 --- a/env/lib/python2.7/site-packages/Foundation/_context.py +++ /dev/null @@ -1,24 +0,0 @@ -import Foundation - -class NSDisabledAutomaticTermination (object): - def __init__(self, reason): - self._reason = reason - self._info = Foundation.NSProcessInfo.processInfo() - - def __enter__(self): - self._info.disableAutomaticTermination_(self._reason) - - def __exit__(self, exc_type, exc_val, exc_tb): - self._info.enableAutomaticTermination_(self._reason) - return False - -class NSDisabledSuddenTermination (object): - def __init__(self): - self._info = Foundation.NSProcessInfo.processInfo() - - def __enter__(self): - self._info.disableSuddenTermination() - - def __exit__(self, exc_type, exc_val, exc_tb): - self._info.enableSuddenTermination() - return False diff --git a/env/lib/python2.7/site-packages/Foundation/_context.pyc b/env/lib/python2.7/site-packages/Foundation/_context.pyc deleted file mode 100644 index 0b072a21..00000000 Binary files a/env/lib/python2.7/site-packages/Foundation/_context.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Foundation/_functiondefines.py b/env/lib/python2.7/site-packages/Foundation/_functiondefines.py deleted file mode 100644 index 217ceefa..00000000 --- a/env/lib/python2.7/site-packages/Foundation/_functiondefines.py +++ /dev/null @@ -1,31 +0,0 @@ -""" -Port of "function defines". -""" -from Foundation import NSBundle, NSProcessInfo - -def NSLocalizedString(key, comment): - return NSBundle.mainBundle().localizedStringForKey_value_table_(key, '', None) - -def NSLocalizedStringFromTable(key, tbl, comment): - return NSBundle.mainBundle().localizedStringForKey_value_table_(key, '', tbl) - -def NSLocalizedStringFromTableInBundle(key, tbl, bundle, comment): - return bundle.localizedStringForKey_value_table_(key, '', tbl) - -def NSLocalizedStringWithDefaultValue(key, tbl, bundle, val, comment): - return bundle.localizedStringForKey_value_table_(key, val, tbl) - - -def MIN(a, b): - if a < b: - return a - else: - return b - -def MAX(a, b): - if a < b: - return b - else: - return a - -ABS = abs diff --git a/env/lib/python2.7/site-packages/Foundation/_functiondefines.pyc b/env/lib/python2.7/site-packages/Foundation/_functiondefines.pyc deleted file mode 100644 index cf886775..00000000 Binary files a/env/lib/python2.7/site-packages/Foundation/_functiondefines.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Foundation/_inlines.so b/env/lib/python2.7/site-packages/Foundation/_inlines.so deleted file mode 100755 index 9d3baa5c..00000000 Binary files a/env/lib/python2.7/site-packages/Foundation/_inlines.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Foundation/_metadata.py b/env/lib/python2.7/site-packages/Foundation/_metadata.py deleted file mode 100644 index 212eee26..00000000 --- a/env/lib/python2.7/site-packages/Foundation/_metadata.py +++ /dev/null @@ -1,1443 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Tue Jun 5 20:21:27 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -misc.update({'NSEdgeInsets': objc.createStructType('NSEdgeInsets', sel32or64(b'{NSEdgeInsets=ffff}', b'{NSEdgeInsets=dddd}'), ['top', 'left', 'bottom', 'right']), 'NSHashEnumerator': objc.createStructType('NSHashEnumerator', sel32or64(b'{_NSHashEnumerator=II^v}', b'{_NSHashEnumerator=QQ^v}'), ['_pi', '_si', '_bs']), 'NSAffineTransformStruct': objc.createStructType('NSAffineTransformStruct', sel32or64(b'{_NSAffineTransformStruct=ffffff}', b'{_NSAffineTransformStruct=dddddd}'), ['m11', 'm12', 'm21', 'm22', 'tX', 'tY']), 'NSRect': objc.createStructType('NSRect', sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}'), ['origin', 'size']), 'NSRange': objc.createStructType('NSRange', sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}'), ['location', 'length']), 'NSZone': objc.createStructType('NSZone', b'{_NSZone=}', []), 'NSDecimal': objc.createStructType('NSDecimal', b'{_NSDecimal=b8b4b1b1b18[8S]}', ['_exponent', '_length', '_isNegative', '_isCompact', '_reserved', '_mantissa']), 'NSSize': objc.createStructType('NSSize', sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}'), ['width', 'height']), 'NSPoint': objc.createStructType('NSPoint', sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}'), ['x', 'y']), 'NSSwappedDouble': objc.createStructType('NSSwappedDouble', b'{_NSSwappedDouble=Q}', ['v']), 'NSMapEnumerator': objc.createStructType('NSMapEnumerator', sel32or64(b'{_NSMapEnumerator=II^v}', b'{_NSMapEnumerator=QQ^v}'), ['_pi', '_si', '_bs']), 'NSSwappedFloat': objc.createStructType('NSSwappedFloat', b'{_NSSwappedFloat=I}', ['v']), 'NSOperatingSystemVersion': objc.createStructType('NSOperatingSystemVersion', sel32or64(b'{_NSOperatingSystemVersion=iii}', b'{_NSOperatingSystemVersion=qqq}'), ['majorVersion', 'minorVersion', 'patchVersion'])}) -constants = '''$NSAMPMDesignation$NSAppleEventManagerWillProcessFirstEventNotification$NSAppleEventTimeOutDefault@d$NSAppleEventTimeOutNone@d$NSAppleScriptErrorAppName$NSAppleScriptErrorBriefMessage$NSAppleScriptErrorMessage$NSAppleScriptErrorNumber$NSAppleScriptErrorRange$NSArgumentDomain$NSAssertionHandlerKey$NSAverageKeyValueOperator$NSBuddhistCalendar$NSBundleDidLoadNotification$NSCalendarDayChangedNotification$NSCalendarIdentifierBuddhist$NSCalendarIdentifierChinese$NSCalendarIdentifierCoptic$NSCalendarIdentifierEthiopicAmeteAlem$NSCalendarIdentifierEthiopicAmeteMihret$NSCalendarIdentifierGregorian$NSCalendarIdentifierHebrew$NSCalendarIdentifierISO8601$NSCalendarIdentifierIndian$NSCalendarIdentifierIslamic$NSCalendarIdentifierIslamicCivil$NSCalendarIdentifierIslamicTabular$NSCalendarIdentifierIslamicUmmAlQura$NSCalendarIdentifierJapanese$NSCalendarIdentifierPersian$NSCalendarIdentifierRepublicOfChina$NSCharacterConversionException$NSChineseCalendar$NSClassDescriptionNeededForClassNotification$NSCocoaErrorDomain$NSConnectionDidDieNotification$NSConnectionDidInitializeNotification$NSConnectionReplyMode$NSCountKeyValueOperator$NSCurrencySymbol$NSCurrentLocaleDidChangeNotification$NSDateFormatString$NSDateTimeOrdering$NSDeallocateZombies@Z$NSDebugEnabled@Z$NSDecimalDigits$NSDecimalNumberDivideByZeroException$NSDecimalNumberExactnessException$NSDecimalNumberOverflowException$NSDecimalNumberUnderflowException$NSDecimalSeparator$NSDefaultRunLoopMode$NSDestinationInvalidException$NSDidBecomeSingleThreadedNotification$NSDistinctUnionOfArraysKeyValueOperator$NSDistinctUnionOfObjectsKeyValueOperator$NSDistinctUnionOfSetsKeyValueOperator$NSEarlierTimeDesignations$NSErrorFailingURLStringKey$NSExtensionItemAttachmentsKey$NSExtensionItemAttributedContentTextKey$NSExtensionItemAttributedTitleKey$NSExtensionItemsAndErrorsKey$NSExtensionJavaScriptFinalizeArgumentKey$NSExtensionJavaScriptPreprocessingResultsKey$NSFTPPropertyActiveTransferModeKey$NSFTPPropertyFTPProxy$NSFTPPropertyFileOffsetKey$NSFTPPropertyUserLoginKey$NSFTPPropertyUserPasswordKey$NSFailedAuthenticationException$NSFileAppendOnly$NSFileBusy$NSFileCreationDate$NSFileDeviceIdentifier$NSFileExtensionHidden$NSFileGroupOwnerAccountID$NSFileGroupOwnerAccountName$NSFileHFSCreatorCode$NSFileHFSTypeCode$NSFileHandleConnectionAcceptedNotification$NSFileHandleDataAvailableNotification$NSFileHandleNotificationDataItem$NSFileHandleNotificationFileHandleItem$NSFileHandleNotificationMonitorModes$NSFileHandleOperationException$NSFileHandleReadCompletionNotification$NSFileHandleReadToEndOfFileCompletionNotification$NSFileImmutable$NSFileManagerUnmountDissentingProcessIdentifierErrorKey$NSFileModificationDate$NSFileOwnerAccountID$NSFileOwnerAccountName$NSFilePathErrorKey$NSFilePosixPermissions$NSFileProtectionComplete$NSFileProtectionCompleteUnlessOpen$NSFileProtectionCompleteUntilFirstUserAuthentication$NSFileProtectionKey$NSFileProtectionNone$NSFileReferenceCount$NSFileSize$NSFileSystemFileNumber$NSFileSystemFreeNodes$NSFileSystemFreeSize$NSFileSystemNodes$NSFileSystemNumber$NSFileSystemSize$NSFileType$NSFileTypeBlockSpecial$NSFileTypeCharacterSpecial$NSFileTypeDirectory$NSFileTypeRegular$NSFileTypeSocket$NSFileTypeSymbolicLink$NSFileTypeUnknown$NSFoundationVersionNumber@d$NSGenericException$NSGlobalDomain$NSGrammarCorrections$NSGrammarRange$NSGrammarUserDescription$NSGregorianCalendar$NSHTTPCookieComment$NSHTTPCookieCommentURL$NSHTTPCookieDiscard$NSHTTPCookieDomain$NSHTTPCookieExpires$NSHTTPCookieManagerAcceptPolicyChangedNotification$NSHTTPCookieManagerCookiesChangedNotification$NSHTTPCookieMaximumAge$NSHTTPCookieName$NSHTTPCookieOriginURL$NSHTTPCookiePath$NSHTTPCookiePort$NSHTTPCookieSecure$NSHTTPCookieValue$NSHTTPCookieVersion$NSHTTPPropertyErrorPageDataKey$NSHTTPPropertyHTTPProxy$NSHTTPPropertyRedirectionHeadersKey$NSHTTPPropertyServerHTTPVersionKey$NSHTTPPropertyStatusCodeKey$NSHTTPPropertyStatusReasonKey$NSHangOnUncaughtException@Z$NSHebrewCalendar$NSHelpAnchorErrorKey$NSHourNameDesignations$NSISO8601Calendar$NSInconsistentArchiveException$NSIndianCalendar$NSInternalInconsistencyException$NSInternationalCurrencyString$NSInvalidArchiveOperationException$NSInvalidArgumentException$NSInvalidReceivePortException$NSInvalidSendPortException$NSInvalidUnarchiveOperationException$NSInvocationOperationCancelledException$NSInvocationOperationVoidResultException$NSIsNilTransformerName$NSIsNotNilTransformerName$NSIslamicCalendar$NSIslamicCivilCalendar$NSItemProviderErrorDomain$NSItemProviderPreferredImageSizeKey$NSJapaneseCalendar$NSJavaClasses$NSJavaDidCreateVirtualMachineNotification$NSJavaDidSetupVirtualMachineNotification$NSJavaLibraryPath$NSJavaOwnVirtualMachine$NSJavaPath$NSJavaPathSeparator$NSJavaRoot$NSJavaUserPath$NSJavaWillCreateVirtualMachineNotification$NSJavaWillSetupVirtualMachineNotification$NSKeepAllocationStatistics@Z$NSKeyValueChangeIndexesKey$NSKeyValueChangeKindKey$NSKeyValueChangeNewKey$NSKeyValueChangeNotificationIsPriorKey$NSKeyValueChangeOldKey$NSKeyedArchiveRootObjectKey$NSKeyedUnarchiveFromDataTransformerName$NSLaterTimeDesignations$NSLinguisticTagAdjective$NSLinguisticTagAdverb$NSLinguisticTagClassifier$NSLinguisticTagCloseParenthesis$NSLinguisticTagCloseQuote$NSLinguisticTagConjunction$NSLinguisticTagDash$NSLinguisticTagDeterminer$NSLinguisticTagIdiom$NSLinguisticTagInterjection$NSLinguisticTagNoun$NSLinguisticTagNumber$NSLinguisticTagOpenParenthesis$NSLinguisticTagOpenQuote$NSLinguisticTagOrganizationName$NSLinguisticTagOther$NSLinguisticTagOtherPunctuation$NSLinguisticTagOtherWhitespace$NSLinguisticTagOtherWord$NSLinguisticTagParagraphBreak$NSLinguisticTagParticle$NSLinguisticTagPersonalName$NSLinguisticTagPlaceName$NSLinguisticTagPreposition$NSLinguisticTagPronoun$NSLinguisticTagPunctuation$NSLinguisticTagSchemeLanguage$NSLinguisticTagSchemeLemma$NSLinguisticTagSchemeLexicalClass$NSLinguisticTagSchemeNameType$NSLinguisticTagSchemeNameTypeOrLexicalClass$NSLinguisticTagSchemeScript$NSLinguisticTagSchemeTokenType$NSLinguisticTagSentenceTerminator$NSLinguisticTagVerb$NSLinguisticTagWhitespace$NSLinguisticTagWord$NSLinguisticTagWordJoiner$NSLoadedClasses$NSLocalNotificationCenterType$NSLocaleAlternateQuotationBeginDelimiterKey$NSLocaleAlternateQuotationEndDelimiterKey$NSLocaleCalendar$NSLocaleCollationIdentifier$NSLocaleCollatorIdentifier$NSLocaleCountryCode$NSLocaleCurrencyCode$NSLocaleCurrencySymbol$NSLocaleDecimalSeparator$NSLocaleExemplarCharacterSet$NSLocaleGroupingSeparator$NSLocaleIdentifier$NSLocaleLanguageCode$NSLocaleMeasurementSystem$NSLocaleQuotationBeginDelimiterKey$NSLocaleQuotationEndDelimiterKey$NSLocaleScriptCode$NSLocaleUsesMetricSystem$NSLocaleVariantCode$NSLocalizedDescriptionKey$NSLocalizedFailureErrorKey$NSLocalizedFailureReasonErrorKey$NSLocalizedRecoveryOptionsErrorKey$NSLocalizedRecoverySuggestionErrorKey$NSMachErrorDomain$NSMallocException$NSMaximumKeyValueOperator$NSMetadataItemAcquisitionMakeKey$NSMetadataItemAcquisitionModelKey$NSMetadataItemAlbumKey$NSMetadataItemAltitudeKey$NSMetadataItemApertureKey$NSMetadataItemAppleLoopDescriptorsKey$NSMetadataItemAppleLoopsKeyFilterTypeKey$NSMetadataItemAppleLoopsLoopModeKey$NSMetadataItemAppleLoopsRootKeyKey$NSMetadataItemApplicationCategoriesKey$NSMetadataItemAttributeChangeDateKey$NSMetadataItemAudiencesKey$NSMetadataItemAudioBitRateKey$NSMetadataItemAudioChannelCountKey$NSMetadataItemAudioEncodingApplicationKey$NSMetadataItemAudioSampleRateKey$NSMetadataItemAudioTrackNumberKey$NSMetadataItemAuthorAddressesKey$NSMetadataItemAuthorEmailAddressesKey$NSMetadataItemAuthorsKey$NSMetadataItemBitsPerSampleKey$NSMetadataItemCFBundleIdentifierKey$NSMetadataItemCameraOwnerKey$NSMetadataItemCityKey$NSMetadataItemCodecsKey$NSMetadataItemColorSpaceKey$NSMetadataItemCommentKey$NSMetadataItemComposerKey$NSMetadataItemContactKeywordsKey$NSMetadataItemContentCreationDateKey$NSMetadataItemContentModificationDateKey$NSMetadataItemContentTypeKey$NSMetadataItemContentTypeTreeKey$NSMetadataItemContributorsKey$NSMetadataItemCopyrightKey$NSMetadataItemCountryKey$NSMetadataItemCoverageKey$NSMetadataItemCreatorKey$NSMetadataItemDateAddedKey$NSMetadataItemDeliveryTypeKey$NSMetadataItemDescriptionKey$NSMetadataItemDirectorKey$NSMetadataItemDisplayNameKey$NSMetadataItemDownloadedDateKey$NSMetadataItemDueDateKey$NSMetadataItemDurationSecondsKey$NSMetadataItemEXIFGPSVersionKey$NSMetadataItemEXIFVersionKey$NSMetadataItemEditorsKey$NSMetadataItemEmailAddressesKey$NSMetadataItemEncodingApplicationsKey$NSMetadataItemExecutableArchitecturesKey$NSMetadataItemExecutablePlatformKey$NSMetadataItemExposureModeKey$NSMetadataItemExposureProgramKey$NSMetadataItemExposureTimeSecondsKey$NSMetadataItemExposureTimeStringKey$NSMetadataItemFNumberKey$NSMetadataItemFSContentChangeDateKey$NSMetadataItemFSCreationDateKey$NSMetadataItemFSNameKey$NSMetadataItemFSSizeKey$NSMetadataItemFinderCommentKey$NSMetadataItemFlashOnOffKey$NSMetadataItemFocalLength35mmKey$NSMetadataItemFocalLengthKey$NSMetadataItemFontsKey$NSMetadataItemGPSAreaInformationKey$NSMetadataItemGPSDOPKey$NSMetadataItemGPSDateStampKey$NSMetadataItemGPSDestBearingKey$NSMetadataItemGPSDestDistanceKey$NSMetadataItemGPSDestLatitudeKey$NSMetadataItemGPSDestLongitudeKey$NSMetadataItemGPSDifferentalKey$NSMetadataItemGPSMapDatumKey$NSMetadataItemGPSMeasureModeKey$NSMetadataItemGPSProcessingMethodKey$NSMetadataItemGPSStatusKey$NSMetadataItemGPSTrackKey$NSMetadataItemGenreKey$NSMetadataItemHasAlphaChannelKey$NSMetadataItemHeadlineKey$NSMetadataItemISOSpeedKey$NSMetadataItemIdentifierKey$NSMetadataItemImageDirectionKey$NSMetadataItemInformationKey$NSMetadataItemInstantMessageAddressesKey$NSMetadataItemInstructionsKey$NSMetadataItemIsApplicationManagedKey$NSMetadataItemIsGeneralMIDISequenceKey$NSMetadataItemIsLikelyJunkKey$NSMetadataItemIsUbiquitousKey$NSMetadataItemKeySignatureKey$NSMetadataItemKeywordsKey$NSMetadataItemKindKey$NSMetadataItemLanguagesKey$NSMetadataItemLastUsedDateKey$NSMetadataItemLatitudeKey$NSMetadataItemLayerNamesKey$NSMetadataItemLensModelKey$NSMetadataItemLongitudeKey$NSMetadataItemLyricistKey$NSMetadataItemMaxApertureKey$NSMetadataItemMediaTypesKey$NSMetadataItemMeteringModeKey$NSMetadataItemMusicalGenreKey$NSMetadataItemMusicalInstrumentCategoryKey$NSMetadataItemMusicalInstrumentNameKey$NSMetadataItemNamedLocationKey$NSMetadataItemNumberOfPagesKey$NSMetadataItemOrganizationsKey$NSMetadataItemOrientationKey$NSMetadataItemOriginalFormatKey$NSMetadataItemOriginalSourceKey$NSMetadataItemPageHeightKey$NSMetadataItemPageWidthKey$NSMetadataItemParticipantsKey$NSMetadataItemPathKey$NSMetadataItemPerformersKey$NSMetadataItemPhoneNumbersKey$NSMetadataItemPixelCountKey$NSMetadataItemPixelHeightKey$NSMetadataItemPixelWidthKey$NSMetadataItemProducerKey$NSMetadataItemProfileNameKey$NSMetadataItemProjectsKey$NSMetadataItemPublishersKey$NSMetadataItemRecipientAddressesKey$NSMetadataItemRecipientEmailAddressesKey$NSMetadataItemRecipientsKey$NSMetadataItemRecordingDateKey$NSMetadataItemRecordingYearKey$NSMetadataItemRedEyeOnOffKey$NSMetadataItemResolutionHeightDPIKey$NSMetadataItemResolutionWidthDPIKey$NSMetadataItemRightsKey$NSMetadataItemSecurityMethodKey$NSMetadataItemSpeedKey$NSMetadataItemStarRatingKey$NSMetadataItemStateOrProvinceKey$NSMetadataItemStreamableKey$NSMetadataItemSubjectKey$NSMetadataItemTempoKey$NSMetadataItemTextContentKey$NSMetadataItemThemeKey$NSMetadataItemTimeSignatureKey$NSMetadataItemTimestampKey$NSMetadataItemTitleKey$NSMetadataItemTotalBitRateKey$NSMetadataItemURLKey$NSMetadataItemVersionKey$NSMetadataItemVideoBitRateKey$NSMetadataItemWhereFromsKey$NSMetadataItemWhiteBalanceKey$NSMetadataQueryAccessibleUbiquitousExternalDocumentsScope$NSMetadataQueryDidFinishGatheringNotification$NSMetadataQueryDidStartGatheringNotification$NSMetadataQueryDidUpdateNotification$NSMetadataQueryGatheringProgressNotification$NSMetadataQueryIndexedLocalComputerScope$NSMetadataQueryIndexedNetworkScope$NSMetadataQueryLocalComputerScope$NSMetadataQueryLocalDocumentsScope$NSMetadataQueryNetworkScope$NSMetadataQueryResultContentRelevanceAttribute$NSMetadataQueryUbiquitousDataScope$NSMetadataQueryUbiquitousDocumentsScope$NSMetadataQueryUpdateAddedItemsKey$NSMetadataQueryUpdateChangedItemsKey$NSMetadataQueryUpdateRemovedItemsKey$NSMetadataQueryUserHomeScope$NSMetadataUbiquitousItemContainerDisplayNameKey$NSMetadataUbiquitousItemDownloadRequestedKey$NSMetadataUbiquitousItemDownloadingErrorKey$NSMetadataUbiquitousItemDownloadingStatusCurrent$NSMetadataUbiquitousItemDownloadingStatusDownloaded$NSMetadataUbiquitousItemDownloadingStatusKey$NSMetadataUbiquitousItemDownloadingStatusNotDownloaded$NSMetadataUbiquitousItemHasUnresolvedConflictsKey$NSMetadataUbiquitousItemIsDownloadedKey$NSMetadataUbiquitousItemIsDownloadingKey$NSMetadataUbiquitousItemIsExternalDocumentKey$NSMetadataUbiquitousItemIsUploadedKey$NSMetadataUbiquitousItemIsUploadingKey$NSMetadataUbiquitousItemPercentDownloadedKey$NSMetadataUbiquitousItemPercentUploadedKey$NSMetadataUbiquitousItemURLInLocalContainerKey$NSMetadataUbiquitousItemUploadingErrorKey$NSMetadataUbiquitousSharedItemCurrentUserPermissionsKey$NSMetadataUbiquitousSharedItemCurrentUserRoleKey$NSMetadataUbiquitousSharedItemMostRecentEditorNameComponentsKey$NSMetadataUbiquitousSharedItemOwnerNameComponentsKey$NSMetadataUbiquitousSharedItemPermissionsReadOnly$NSMetadataUbiquitousSharedItemPermissionsReadWrite$NSMetadataUbiquitousSharedItemRoleOwner$NSMetadataUbiquitousSharedItemRoleParticipant$NSMinimumKeyValueOperator$NSMonthNameArray$NSNegateBooleanTransformerName$NSNegativeCurrencyFormatString$NSNetServicesErrorCode$NSNetServicesErrorDomain$NSNextDayDesignations$NSNextNextDayDesignations$NSOSStatusErrorDomain$NSObjectInaccessibleException$NSObjectNotAvailableException$NSOldStyleException$NSOperationNotSupportedForKeyException$NSPOSIXErrorDomain$NSParseErrorException$NSPersianCalendar$NSPersonNameComponentDelimiter$NSPersonNameComponentFamilyName$NSPersonNameComponentGivenName$NSPersonNameComponentKey$NSPersonNameComponentMiddleName$NSPersonNameComponentNickname$NSPersonNameComponentPrefix$NSPersonNameComponentSuffix$NSPortDidBecomeInvalidNotification$NSPortReceiveException$NSPortSendException$NSPortTimeoutException$NSPositiveCurrencyFormatString$NSPriorDayDesignations$NSProcessInfoThermalStateDidChangeNotification$NSProgressEstimatedTimeRemainingKey$NSProgressFileAnimationImageKey$NSProgressFileAnimationImageOriginalRectKey$NSProgressFileCompletedCountKey$NSProgressFileIconKey$NSProgressFileOperationKindCopying$NSProgressFileOperationKindDecompressingAfterDownloading$NSProgressFileOperationKindDownloading$NSProgressFileOperationKindKey$NSProgressFileOperationKindReceiving$NSProgressFileTotalCountKey$NSProgressFileURLKey$NSProgressKindFile$NSProgressThroughputKey$NSRangeException$NSRecoveryAttempterErrorKey$NSRegistrationDomain$NSRepublicOfChinaCalendar$NSRunLoopCommonModes$NSSecureUnarchiveFromDataTransformerName$NSShortDateFormatString$NSShortMonthNameArray$NSShortTimeDateFormatString$NSShortWeekDayNameArray$NSStreamDataWrittenToMemoryStreamKey$NSStreamFileCurrentOffsetKey$NSStreamNetworkServiceType$NSStreamNetworkServiceTypeBackground$NSStreamNetworkServiceTypeCallSignaling$NSStreamNetworkServiceTypeVideo$NSStreamNetworkServiceTypeVoIP$NSStreamNetworkServiceTypeVoice$NSStreamSOCKSErrorDomain$NSStreamSOCKSProxyConfigurationKey$NSStreamSOCKSProxyHostKey$NSStreamSOCKSProxyPasswordKey$NSStreamSOCKSProxyPortKey$NSStreamSOCKSProxyUserKey$NSStreamSOCKSProxyVersion4$NSStreamSOCKSProxyVersion5$NSStreamSOCKSProxyVersionKey$NSStreamSocketSSLErrorDomain$NSStreamSocketSecurityLevelKey$NSStreamSocketSecurityLevelNegotiatedSSL$NSStreamSocketSecurityLevelNone$NSStreamSocketSecurityLevelSSLv2$NSStreamSocketSecurityLevelSSLv3$NSStreamSocketSecurityLevelTLSv1$NSStringEncodingDetectionAllowLossyKey$NSStringEncodingDetectionDisallowedEncodingsKey$NSStringEncodingDetectionFromWindowsKey$NSStringEncodingDetectionLikelyLanguageKey$NSStringEncodingDetectionLossySubstitutionKey$NSStringEncodingDetectionSuggestedEncodingsKey$NSStringEncodingDetectionUseOnlySuggestedEncodingsKey$NSStringEncodingErrorKey$NSStringTransformFullwidthToHalfwidth$NSStringTransformHiraganaToKatakana$NSStringTransformLatinToArabic$NSStringTransformLatinToCyrillic$NSStringTransformLatinToGreek$NSStringTransformLatinToHangul$NSStringTransformLatinToHebrew$NSStringTransformLatinToHiragana$NSStringTransformLatinToKatakana$NSStringTransformLatinToThai$NSStringTransformMandarinToLatin$NSStringTransformStripCombiningMarks$NSStringTransformStripDiacritics$NSStringTransformToLatin$NSStringTransformToUnicodeName$NSStringTransformToXMLHex$NSSumKeyValueOperator$NSSystemClockDidChangeNotification$NSSystemTimeZoneDidChangeNotification$NSTaskDidTerminateNotification$NSTextCheckingAirlineKey$NSTextCheckingCityKey$NSTextCheckingCountryKey$NSTextCheckingFlightKey$NSTextCheckingJobTitleKey$NSTextCheckingNameKey$NSTextCheckingOrganizationKey$NSTextCheckingPhoneKey$NSTextCheckingStateKey$NSTextCheckingStreetKey$NSTextCheckingZIPKey$NSThisDayDesignations$NSThousandsSeparator$NSThreadWillExitNotification$NSThumbnail1024x1024SizeKey$NSTimeDateFormatString$NSTimeFormatString$NSURLAddedToDirectoryDateKey$NSURLApplicationIsScriptableKey$NSURLAttributeModificationDateKey$NSURLAuthenticationMethodClientCertificate$NSURLAuthenticationMethodDefault$NSURLAuthenticationMethodHTMLForm$NSURLAuthenticationMethodHTTPBasic$NSURLAuthenticationMethodHTTPDigest$NSURLAuthenticationMethodNTLM$NSURLAuthenticationMethodNegotiate$NSURLAuthenticationMethodServerTrust$NSURLCanonicalPathKey$NSURLContentAccessDateKey$NSURLContentModificationDateKey$NSURLCreationDateKey$NSURLCredentialStorageChangedNotification$NSURLCredentialStorageRemoveSynchronizableCredentials$NSURLCustomIconKey$NSURLDocumentIdentifierKey$NSURLEffectiveIconKey$NSURLErrorBackgroundTaskCancelledReasonKey$NSURLErrorDomain$NSURLErrorFailingURLErrorKey$NSURLErrorFailingURLPeerTrustErrorKey$NSURLErrorFailingURLStringErrorKey$NSURLErrorKey$NSURLFileAllocatedSizeKey$NSURLFileResourceIdentifierKey$NSURLFileResourceTypeBlockSpecial$NSURLFileResourceTypeCharacterSpecial$NSURLFileResourceTypeDirectory$NSURLFileResourceTypeKey$NSURLFileResourceTypeNamedPipe$NSURLFileResourceTypeRegular$NSURLFileResourceTypeSocket$NSURLFileResourceTypeSymbolicLink$NSURLFileResourceTypeUnknown$NSURLFileScheme$NSURLFileSecurityKey$NSURLFileSizeKey$NSURLGenerationIdentifierKey$NSURLHasHiddenExtensionKey$NSURLIsAliasFileKey$NSURLIsApplicationKey$NSURLIsDirectoryKey$NSURLIsExcludedFromBackupKey$NSURLIsExecutableKey$NSURLIsHiddenKey$NSURLIsMountTriggerKey$NSURLIsPackageKey$NSURLIsReadableKey$NSURLIsRegularFileKey$NSURLIsSymbolicLinkKey$NSURLIsSystemImmutableKey$NSURLIsUbiquitousItemKey$NSURLIsUserImmutableKey$NSURLIsVolumeKey$NSURLIsWritableKey$NSURLKeysOfUnsetValuesKey$NSURLLabelColorKey$NSURLLabelNumberKey$NSURLLinkCountKey$NSURLLocalizedLabelKey$NSURLLocalizedNameKey$NSURLLocalizedTypeDescriptionKey$NSURLNameKey$NSURLParentDirectoryURLKey$NSURLPathKey$NSURLPreferredIOBlockSizeKey$NSURLProtectionSpaceFTP$NSURLProtectionSpaceFTPProxy$NSURLProtectionSpaceHTTP$NSURLProtectionSpaceHTTPProxy$NSURLProtectionSpaceHTTPS$NSURLProtectionSpaceHTTPSProxy$NSURLProtectionSpaceSOCKSProxy$NSURLQuarantinePropertiesKey$NSURLSessionDownloadTaskResumeData$NSURLSessionTaskPriorityDefault@f$NSURLSessionTaskPriorityHigh@f$NSURLSessionTaskPriorityLow@f$NSURLSessionTransferSizeUnknown@q$NSURLTagNamesKey$NSURLThumbnailDictionaryKey$NSURLThumbnailKey$NSURLTotalFileAllocatedSizeKey$NSURLTotalFileSizeKey$NSURLTypeIdentifierKey$NSURLUbiquitousItemContainerDisplayNameKey$NSURLUbiquitousItemDownloadRequestedKey$NSURLUbiquitousItemDownloadingErrorKey$NSURLUbiquitousItemDownloadingStatusCurrent$NSURLUbiquitousItemDownloadingStatusDownloaded$NSURLUbiquitousItemDownloadingStatusKey$NSURLUbiquitousItemDownloadingStatusNotDownloaded$NSURLUbiquitousItemHasUnresolvedConflictsKey$NSURLUbiquitousItemIsDownloadedKey$NSURLUbiquitousItemIsDownloadingKey$NSURLUbiquitousItemIsSharedKey$NSURLUbiquitousItemIsUploadedKey$NSURLUbiquitousItemIsUploadingKey$NSURLUbiquitousItemPercentDownloadedKey$NSURLUbiquitousItemPercentUploadedKey$NSURLUbiquitousItemUploadingErrorKey$NSURLUbiquitousSharedItemCurrentUserPermissionsKey$NSURLUbiquitousSharedItemCurrentUserRoleKey$NSURLUbiquitousSharedItemMostRecentEditorNameComponentsKey$NSURLUbiquitousSharedItemOwnerNameComponentsKey$NSURLUbiquitousSharedItemPermissionsReadOnly$NSURLUbiquitousSharedItemPermissionsReadWrite$NSURLUbiquitousSharedItemRoleOwner$NSURLUbiquitousSharedItemRoleParticipant$NSURLVolumeAvailableCapacityForImportantUsageKey$NSURLVolumeAvailableCapacityForOpportunisticUsageKey$NSURLVolumeAvailableCapacityKey$NSURLVolumeCreationDateKey$NSURLVolumeIdentifierKey$NSURLVolumeIsAutomountedKey$NSURLVolumeIsBrowsableKey$NSURLVolumeIsEjectableKey$NSURLVolumeIsEncryptedKey$NSURLVolumeIsInternalKey$NSURLVolumeIsJournalingKey$NSURLVolumeIsLocalKey$NSURLVolumeIsReadOnlyKey$NSURLVolumeIsRemovableKey$NSURLVolumeIsRootFileSystemKey$NSURLVolumeLocalizedFormatDescriptionKey$NSURLVolumeLocalizedNameKey$NSURLVolumeMaximumFileSizeKey$NSURLVolumeNameKey$NSURLVolumeResourceCountKey$NSURLVolumeSupportsAccessPermissionsKey$NSURLVolumeSupportsAdvisoryFileLockingKey$NSURLVolumeSupportsCasePreservedNamesKey$NSURLVolumeSupportsCaseSensitiveNamesKey$NSURLVolumeSupportsCompressionKey$NSURLVolumeSupportsExclusiveRenamingKey$NSURLVolumeSupportsExtendedSecurityKey$NSURLVolumeSupportsFileCloningKey$NSURLVolumeSupportsHardLinksKey$NSURLVolumeSupportsImmutableFilesKey$NSURLVolumeSupportsJournalingKey$NSURLVolumeSupportsPersistentIDsKey$NSURLVolumeSupportsRenamingKey$NSURLVolumeSupportsRootDirectoryDatesKey$NSURLVolumeSupportsSparseFilesKey$NSURLVolumeSupportsSwapRenamingKey$NSURLVolumeSupportsSymbolicLinksKey$NSURLVolumeSupportsVolumeSizesKey$NSURLVolumeSupportsZeroRunsKey$NSURLVolumeTotalCapacityKey$NSURLVolumeURLForRemountingKey$NSURLVolumeURLKey$NSURLVolumeUUIDStringKey$NSUbiquitousKeyValueStoreChangeReasonKey$NSUbiquitousKeyValueStoreChangedKeysKey$NSUbiquitousKeyValueStoreDidChangeExternallyNotification$NSUbiquityIdentityDidChangeNotification$NSUnarchiveFromDataTransformerName$NSUndefinedKeyException$NSUnderlyingErrorKey$NSUndoManagerCheckpointNotification$NSUndoManagerDidCloseUndoGroupNotification$NSUndoManagerDidOpenUndoGroupNotification$NSUndoManagerDidRedoChangeNotification$NSUndoManagerDidUndoChangeNotification$NSUndoManagerGroupIsDiscardableKey$NSUndoManagerWillCloseUndoGroupNotification$NSUndoManagerWillRedoChangeNotification$NSUndoManagerWillUndoChangeNotification$NSUnionOfArraysKeyValueOperator$NSUnionOfObjectsKeyValueOperator$NSUnionOfSetsKeyValueOperator$NSUserActivityTypeBrowsingWeb$NSUserDefaultsDidChangeNotification$NSUserNotificationDefaultSoundName$NSWeekDayNameArray$NSWillBecomeMultiThreadedNotification$NSXMLParserErrorDomain$NSYearMonthWeekDesignations$NSZombieEnabled@Z$_NSConstantStringClassReference@^v$''' -constants = constants + '$NSZeroPoint@%s$'%(sel32or64('{_NSPoint=ff}', '{CGPoint=dd}'),) -constants = constants + '$NSEdgeInsetsZero@%s$'%(sel32or64('{NSEdgeInsets=ffff}', '{NSEdgeInsets=dddd}'),) -constants = constants + '$NSZeroSize@%s$'%(sel32or64('{_NSSize=ff}', '{CGSize=dd}'),) -constants = constants + '$NSZeroRect@%s$'%(sel32or64('{_NSRect={_NSPoint=ff}{_NSSize=ff}}', '{CGRect={CGPoint=dd}{CGSize=dd}}'),) -enums = '''$NSASCIIStringEncoding@1$NSActivityAutomaticTerminationDisabled@32768$NSActivityBackground@255$NSActivityIdleDisplaySleepDisabled@1099511627776$NSActivityIdleSystemSleepDisabled@1048576$NSActivityLatencyCritical@1095216660480$NSActivitySuddenTerminationDisabled@16384$NSActivityUserInitiated@16777215$NSActivityUserInitiatedAllowingIdleSystemSleep@15728639$NSAdminApplicationDirectory@4$NSAggregateExpressionType@14$NSAlignAllEdgesInward@15$NSAlignAllEdgesNearest@983040$NSAlignAllEdgesOutward@3840$NSAlignHeightInward@32$NSAlignHeightNearest@2097152$NSAlignHeightOutward@8192$NSAlignMaxXInward@4$NSAlignMaxXNearest@262144$NSAlignMaxXOutward@1024$NSAlignMaxYInward@8$NSAlignMaxYNearest@524288$NSAlignMaxYOutward@2048$NSAlignMinXInward@1$NSAlignMinXNearest@65536$NSAlignMinXOutward@256$NSAlignMinYInward@2$NSAlignMinYNearest@131072$NSAlignMinYOutward@512$NSAlignRectFlipped@9223372036854775808$NSAlignWidthInward@16$NSAlignWidthNearest@1048576$NSAlignWidthOutward@4096$NSAllApplicationsDirectory@100$NSAllDomainsMask@65535$NSAllLibrariesDirectory@101$NSAllPredicateModifier@1$NSAnchoredSearch@8$NSAndPredicateType@1$NSAnyKeyExpressionType@15$NSAnyPredicateModifier@2$NSAppleEventSendAlwaysInteract@48$NSAppleEventSendCanInteract@32$NSAppleEventSendCanSwitchLayer@64$NSAppleEventSendDontAnnotate@65536$NSAppleEventSendDontExecute@8192$NSAppleEventSendDontRecord@4096$NSAppleEventSendNeverInteract@16$NSAppleEventSendNoReply@1$NSAppleEventSendQueueReply@2$NSAppleEventSendWaitForReply@3$NSApplicationDirectory@1$NSApplicationScriptsDirectory@23$NSApplicationSupportDirectory@14$NSArgumentEvaluationScriptError@3$NSArgumentsWrongScriptError@6$NSAtomicWrite@1$NSAttributedStringEnumerationLongestEffectiveRangeNotRequired@1048576$NSAttributedStringEnumerationReverse@2$NSAutosavedInformationDirectory@11$NSBackgroundActivityResultDeferred@2$NSBackgroundActivityResultFinished@1$NSBackwardsSearch@4$NSBeginsWithComparison@5$NSBeginsWithPredicateOperatorType@8$NSBetweenPredicateOperatorType@100$NSBinarySearchingFirstEqual@256$NSBinarySearchingInsertionIndex@1024$NSBinarySearchingLastEqual@512$NSBlockExpressionType@19$NSBundleErrorMaximum@5119$NSBundleErrorMinimum@4992$NSBundleExecutableArchitectureI386@7$NSBundleExecutableArchitecturePPC@18$NSBundleExecutableArchitecturePPC64@16777234$NSBundleExecutableArchitectureX86_64@16777223$NSByteCountFormatterCountStyleBinary@3$NSByteCountFormatterCountStyleDecimal@2$NSByteCountFormatterCountStyleFile@0$NSByteCountFormatterCountStyleMemory@1$NSByteCountFormatterUseAll@65535$NSByteCountFormatterUseBytes@1$NSByteCountFormatterUseDefault@0$NSByteCountFormatterUseEB@64$NSByteCountFormatterUseGB@8$NSByteCountFormatterUseKB@2$NSByteCountFormatterUseMB@4$NSByteCountFormatterUsePB@32$NSByteCountFormatterUseTB@16$NSByteCountFormatterUseYBOrHigher@65280$NSByteCountFormatterUseZB@128$NSCachesDirectory@13$NSCalculationDivideByZero@4$NSCalculationLossOfPrecision@1$NSCalculationNoError@0$NSCalculationOverflow@3$NSCalculationUnderflow@2$NSCalendarCalendarUnit@1048576$NSCalendarMatchFirst@4096$NSCalendarMatchLast@8192$NSCalendarMatchNextTime@1024$NSCalendarMatchNextTimePreservingSmallerUnits@512$NSCalendarMatchPreviousTimePreservingSmallerUnits@256$NSCalendarMatchStrictly@2$NSCalendarSearchBackwards@4$NSCalendarUnitCalendar@1048576$NSCalendarUnitDay@16$NSCalendarUnitEra@2$NSCalendarUnitHour@32$NSCalendarUnitMinute@64$NSCalendarUnitMonth@8$NSCalendarUnitNanosecond@32768$NSCalendarUnitQuarter@2048$NSCalendarUnitSecond@128$NSCalendarUnitTimeZone@2097152$NSCalendarUnitWeekOfMonth@4096$NSCalendarUnitWeekOfYear@8192$NSCalendarUnitWeekday@512$NSCalendarUnitWeekdayOrdinal@1024$NSCalendarUnitYear@4$NSCalendarUnitYearForWeekOfYear@16384$NSCalendarWrapComponents@1$NSCannotCreateScriptCommandError@10$NSCaseInsensitivePredicateOption@1$NSCaseInsensitiveSearch@1$NSCloudSharingConflictError@5123$NSCloudSharingErrorMaximum@5375$NSCloudSharingErrorMinimum@5120$NSCloudSharingNetworkFailureError@5120$NSCloudSharingNoPermissionError@5124$NSCloudSharingOtherError@5375$NSCloudSharingQuotaExceededError@5121$NSCloudSharingTooManyParticipantsError@5122$NSCoderErrorMaximum@4991$NSCoderErrorMinimum@4864$NSCoderInvalidValueError@4866$NSCoderReadCorruptError@4864$NSCoderValueNotFoundError@4865$NSCollectorDisabledOption@2$NSConditionalExpressionType@20$NSConstantValueExpressionType@0$NSContainerSpecifierError@2$NSContainsComparison@7$NSContainsPredicateOperatorType@99$NSCoreServiceDirectory@10$NSCustomSelectorPredicateOperatorType@11$NSDataBase64DecodingIgnoreUnknownCharacters@1$NSDataBase64Encoding64CharacterLineLength@1$NSDataBase64Encoding76CharacterLineLength@2$NSDataBase64EncodingEndLineWithCarriageReturn@16$NSDataBase64EncodingEndLineWithLineFeed@32$NSDataReadingMapped@1$NSDataReadingMappedAlways@8$NSDataReadingMappedIfSafe@1$NSDataReadingUncached@2$NSDataSearchAnchored@2$NSDataSearchBackwards@1$NSDataWritingAtomic@1$NSDataWritingFileProtectionComplete@536870912$NSDataWritingFileProtectionCompleteUnlessOpen@805306368$NSDataWritingFileProtectionCompleteUntilFirstUserAuthentication@1073741824$NSDataWritingFileProtectionMask@4026531840$NSDataWritingFileProtectionNone@268435456$NSDataWritingWithoutOverwriting@2$NSDateComponentsFormatterUnitsStyleAbbreviated@1$NSDateComponentsFormatterUnitsStyleBrief@5$NSDateComponentsFormatterUnitsStyleFull@3$NSDateComponentsFormatterUnitsStylePositional@0$NSDateComponentsFormatterUnitsStyleShort@2$NSDateComponentsFormatterUnitsStyleSpellOut@4$NSDateComponentsFormatterZeroFormattingBehaviorDefault@1$NSDateComponentsFormatterZeroFormattingBehaviorDropAll@14$NSDateComponentsFormatterZeroFormattingBehaviorDropLeading@2$NSDateComponentsFormatterZeroFormattingBehaviorDropMiddle@4$NSDateComponentsFormatterZeroFormattingBehaviorDropTrailing@8$NSDateComponentsFormatterZeroFormattingBehaviorNone@0$NSDateComponentsFormatterZeroFormattingBehaviorPad@65536$NSDateFormatterBehavior10_0@1000$NSDateFormatterBehavior10_4@1040$NSDateFormatterBehaviorDefault@0$NSDateFormatterFullStyle@4$NSDateFormatterLongStyle@3$NSDateFormatterMediumStyle@2$NSDateFormatterNoStyle@0$NSDateFormatterShortStyle@1$NSDateIntervalFormatterFullStyle@4$NSDateIntervalFormatterLongStyle@3$NSDateIntervalFormatterMediumStyle@2$NSDateIntervalFormatterNoStyle@0$NSDateIntervalFormatterShortStyle@1$NSDayCalendarUnit@16$NSDecimalMaxSize@8$NSDecodingFailurePolicyRaiseException@0$NSDecodingFailurePolicySetErrorAndReturn@1$NSDemoApplicationDirectory@2$NSDesktopDirectory@12$NSDeveloperApplicationDirectory@3$NSDeveloperDirectory@6$NSDiacriticInsensitivePredicateOption@2$NSDiacriticInsensitiveSearch@128$NSDirectPredicateModifier@0$NSDirectoryEnumerationSkipsHiddenFiles@4$NSDirectoryEnumerationSkipsPackageDescendants@2$NSDirectoryEnumerationSkipsSubdirectoryDescendants@1$NSDistributedNotificationDeliverImmediately@1$NSDistributedNotificationPostToAllSessions@2$NSDocumentDirectory@9$NSDocumentationDirectory@8$NSDownloadsDirectory@15$NSEDGEINSETS_DEFINED@1$NSEndsWithComparison@6$NSEndsWithPredicateOperatorType@9$NSEnergyFormatterUnitCalorie@1793$NSEnergyFormatterUnitJoule@11$NSEnergyFormatterUnitKilocalorie@1794$NSEnergyFormatterUnitKilojoule@14$NSEnumerationConcurrent@1$NSEnumerationReverse@2$NSEqualToComparison@0$NSEqualToPredicateOperatorType@4$NSEraCalendarUnit@2$NSEvaluatedObjectExpressionType@1$NSEverySubelement@1$NSExecutableArchitectureMismatchError@3585$NSExecutableErrorMaximum@3839$NSExecutableErrorMinimum@3584$NSExecutableLinkError@3588$NSExecutableLoadError@3587$NSExecutableNotLoadableError@3584$NSExecutableRuntimeMismatchError@3586$NSFeatureUnsupportedError@3328$NSFileCoordinatorReadingForUploading@8$NSFileCoordinatorReadingImmediatelyAvailableMetadataOnly@4$NSFileCoordinatorReadingResolvesSymbolicLink@2$NSFileCoordinatorReadingWithoutChanges@1$NSFileCoordinatorWritingContentIndependentMetadataOnly@16$NSFileCoordinatorWritingForDeleting@1$NSFileCoordinatorWritingForMerging@4$NSFileCoordinatorWritingForMoving@2$NSFileCoordinatorWritingForReplacing@8$NSFileErrorMaximum@1023$NSFileErrorMinimum@0$NSFileLockingError@255$NSFileManagerItemReplacementUsingNewMetadataOnly@1$NSFileManagerItemReplacementWithoutDeletingBackupItem@2$NSFileManagerUnmountAllPartitionsAndEjectDisk@1$NSFileManagerUnmountBusyError@769$NSFileManagerUnmountUnknownError@768$NSFileManagerUnmountWithoutUI@2$NSFileNoSuchFileError@4$NSFileReadCorruptFileError@259$NSFileReadInapplicableStringEncodingError@261$NSFileReadInvalidFileNameError@258$NSFileReadNoPermissionError@257$NSFileReadNoSuchFileError@260$NSFileReadTooLargeError@263$NSFileReadUnknownError@256$NSFileReadUnknownStringEncodingError@264$NSFileReadUnsupportedSchemeError@262$NSFileVersionAddingByMoving@1$NSFileVersionReplacingByMoving@1$NSFileWrapperReadingImmediate@1$NSFileWrapperReadingWithoutMapping@2$NSFileWrapperWritingAtomic@1$NSFileWrapperWritingWithNameUpdating@2$NSFileWriteFileExistsError@516$NSFileWriteInapplicableStringEncodingError@517$NSFileWriteInvalidFileNameError@514$NSFileWriteNoPermissionError@513$NSFileWriteOutOfSpaceError@640$NSFileWriteUnknownError@512$NSFileWriteUnsupportedSchemeError@518$NSFileWriteVolumeReadOnlyError@642$NSForcedOrderingSearch@512$NSFormattingContextBeginningOfSentence@4$NSFormattingContextDynamic@1$NSFormattingContextListItem@3$NSFormattingContextMiddleOfSentence@5$NSFormattingContextStandalone@2$NSFormattingContextUnknown@0$NSFormattingError@2048$NSFormattingErrorMaximum@2559$NSFormattingErrorMinimum@2048$NSFormattingUnitStyleLong@3$NSFormattingUnitStyleMedium@2$NSFormattingUnitStyleShort@1$NSFoundationVersionNumber10_10@1151.16$NSFoundationVersionNumber10_10_1@1151.16$NSFoundationVersionNumber10_10_2@1152.14$NSFoundationVersionNumber10_10_3@1153.2$NSFoundationVersionNumber10_10_4@1153.2$NSFoundationVersionNumber10_10_5@1154.0$NSFoundationVersionNumber10_10_Max@1199.0$NSFoundationVersionNumber10_11@1252.0$NSFoundationVersionNumber10_11_1@1255.1$NSFoundationVersionNumber10_11_2@1256.1$NSFoundationVersionNumber10_11_3@1256.1$NSFoundationVersionNumber10_11_4@1258.0$NSFoundationVersionNumber10_11_Max@1299.0$NSFoundationVersionNumber10_8@945.0$NSFoundationVersionNumber10_8_1@945.0$NSFoundationVersionNumber10_8_2@945.11$NSFoundationVersionNumber10_8_3@945.16$NSFoundationVersionNumber10_8_4@945.18$NSFoundationVersionNumber10_9@1056$NSFoundationVersionNumber10_9_1@1056$NSFoundationVersionNumber10_9_2@1056.13$NSFoundationVersionWithFileManagerResourceForkSupport@412$NSFunctionExpressionType@4$NSGEOMETRY_TYPES_SAME_AS_CGGEOMETRY_TYPES@1$NSGreaterThanComparison@4$NSGreaterThanOrEqualToComparison@3$NSGreaterThanOrEqualToPredicateOperatorType@3$NSGreaterThanPredicateOperatorType@2$NSHPUXOperatingSystem@4$NSHTTPCookieAcceptPolicyAlways@0$NSHTTPCookieAcceptPolicyNever@1$NSHTTPCookieAcceptPolicyOnlyFromMainDocumentDomain@2$NSHashTableCopyIn@65536$NSHashTableObjectPointerPersonality@512$NSHashTableStrongMemory@0$NSHashTableWeakMemory@5$NSHashTableZeroingWeakMemory@1$NSHourCalendarUnit@32$NSINTEGER_DEFINED@1$NSISO2022JPStringEncoding@21$NSISOLatin1StringEncoding@5$NSISOLatin2StringEncoding@9$NSInPredicateOperatorType@10$NSIndexSubelement@0$NSInputMethodsDirectory@16$NSInternalScriptError@8$NSInternalSpecifierError@5$NSIntersectSetExpressionType@6$NSInvalidIndexSpecifierError@4$NSItemProviderFileOptionOpenInPlace@1$NSItemProviderItemUnavailableError@-1000$NSItemProviderRepresentationVisibilityAll@0$NSItemProviderRepresentationVisibilityGroup@2$NSItemProviderRepresentationVisibilityOwnProcess@3$NSItemProviderUnavailableCoercionError@-1200$NSItemProviderUnexpectedValueClassError@-1100$NSItemProviderUnknownError@-1$NSItemReplacementDirectory@99$NSJSONReadingAllowFragments@4$NSJSONReadingMutableContainers@1$NSJSONReadingMutableLeaves@2$NSJSONWritingPrettyPrinted@1$NSJSONWritingSortedKeys@2$NSJapaneseEUCStringEncoding@3$NSKeyPathExpressionType@3$NSKeySpecifierEvaluationScriptError@2$NSKeyValueChangeInsertion@2$NSKeyValueChangeRemoval@3$NSKeyValueChangeReplacement@4$NSKeyValueChangeSetting@1$NSKeyValueIntersectSetMutation@3$NSKeyValueMinusSetMutation@2$NSKeyValueObservingOptionInitial@4$NSKeyValueObservingOptionNew@1$NSKeyValueObservingOptionOld@2$NSKeyValueObservingOptionPrior@8$NSKeyValueSetSetMutation@4$NSKeyValueUnionSetMutation@1$NSKeyValueValidationError@1024$NSLengthFormatterUnitCentimeter@9$NSLengthFormatterUnitFoot@1282$NSLengthFormatterUnitInch@1281$NSLengthFormatterUnitKilometer@14$NSLengthFormatterUnitMeter@11$NSLengthFormatterUnitMile@1284$NSLengthFormatterUnitMillimeter@8$NSLengthFormatterUnitYard@1283$NSLessThanComparison@2$NSLessThanOrEqualToComparison@1$NSLessThanOrEqualToPredicateOperatorType@1$NSLessThanPredicateOperatorType@0$NSLibraryDirectory@5$NSLikePredicateOperatorType@7$NSLinguisticTaggerJoinNames@16$NSLinguisticTaggerOmitOther@8$NSLinguisticTaggerOmitPunctuation@2$NSLinguisticTaggerOmitWhitespace@4$NSLinguisticTaggerOmitWords@1$NSLinguisticTaggerUnitDocument@3$NSLinguisticTaggerUnitParagraph@2$NSLinguisticTaggerUnitSentence@1$NSLinguisticTaggerUnitWord@0$NSLiteralSearch@2$NSLocalDomainMask@2$NSLocaleLanguageDirectionBottomToTop@4$NSLocaleLanguageDirectionLeftToRight@1$NSLocaleLanguageDirectionRightToLeft@2$NSLocaleLanguageDirectionTopToBottom@3$NSLocaleLanguageDirectionUnknown@0$NSMACHOperatingSystem@5$NSMacOSRomanStringEncoding@30$NSMachPortDeallocateNone@0$NSMachPortDeallocateReceiveRight@2$NSMachPortDeallocateSendRight@1$NSMapTableCopyIn@65536$NSMapTableObjectPointerPersonality@512$NSMapTableStrongMemory@0$NSMapTableWeakMemory@5$NSMapTableZeroingWeakMemory@1$NSMappedRead@1$NSMassFormatterUnitGram@11$NSMassFormatterUnitKilogram@14$NSMassFormatterUnitOunce@1537$NSMassFormatterUnitPound@1538$NSMassFormatterUnitStone@1539$NSMatchesPredicateOperatorType@6$NSMatchingAnchored@4$NSMatchingCompleted@2$NSMatchingHitEnd@4$NSMatchingInternalError@16$NSMatchingProgress@1$NSMatchingReportCompletion@2$NSMatchingReportProgress@1$NSMatchingRequiredEnd@8$NSMatchingWithTransparentBounds@8$NSMatchingWithoutAnchoringBounds@16$NSMaxXEdge@2$NSMaxYEdge@3$NSMeasurementFormatterUnitOptionsNaturalScale@2$NSMeasurementFormatterUnitOptionsProvidedUnit@1$NSMeasurementFormatterUnitOptionsTemperatureWithoutUnit@4$NSMiddleSubelement@2$NSMinXEdge@0$NSMinYEdge@1$NSMinusSetExpressionType@7$NSMinuteCalendarUnit@64$NSMonthCalendarUnit@8$NSMoviesDirectory@17$NSMusicDirectory@18$NSNEXTSTEPStringEncoding@2$NSNetServiceListenForConnections@2$NSNetServiceNoAutoRename@1$NSNetServicesActivityInProgress@-72003$NSNetServicesBadArgumentError@-72004$NSNetServicesCancelledError@-72005$NSNetServicesCollisionError@-72001$NSNetServicesInvalidError@-72006$NSNetServicesNotFoundError@-72002$NSNetServicesTimeoutError@-72007$NSNetServicesUnknownError@-72000$NSNetworkDomainMask@4$NSNoScriptError@0$NSNoSpecifierError@0$NSNoSubelement@4$NSNoTopLevelContainersSpecifierError@1$NSNonLossyASCIIStringEncoding@7$NSNormalizedPredicateOption@4$NSNotEqualToPredicateOperatorType@5$NSNotPredicateType@0$NSNotificationCoalescingOnName@1$NSNotificationCoalescingOnSender@2$NSNotificationDeliverImmediately@1$NSNotificationNoCoalescing@0$NSNotificationPostToAllSessions@2$NSNotificationSuspensionBehaviorCoalesce@2$NSNotificationSuspensionBehaviorDeliverImmediately@4$NSNotificationSuspensionBehaviorDrop@1$NSNotificationSuspensionBehaviorHold@3$NSNumberFormatterBehavior10_0@1000$NSNumberFormatterBehavior10_4@1040$NSNumberFormatterBehaviorDefault@0$NSNumberFormatterCurrencyStyle@2$NSNumberFormatterDecimalStyle@1$NSNumberFormatterNoStyle@0$NSNumberFormatterPadAfterPrefix@1$NSNumberFormatterPadAfterSuffix@3$NSNumberFormatterPadBeforePrefix@0$NSNumberFormatterPadBeforeSuffix@2$NSNumberFormatterPercentStyle@3$NSNumberFormatterRoundCeiling@0$NSNumberFormatterRoundDown@2$NSNumberFormatterRoundFloor@1$NSNumberFormatterRoundHalfDown@5$NSNumberFormatterRoundHalfEven@4$NSNumberFormatterRoundHalfUp@6$NSNumberFormatterRoundUp@3$NSNumberFormatterScientificStyle@4$NSNumberFormatterSpellOutStyle@5$NSNumericSearch@64$NSOSF1OperatingSystem@7$NSObjectAutoreleasedEvent@3$NSObjectExtraRefDecrementedEvent@5$NSObjectExtraRefIncrementedEvent@4$NSObjectInternalRefDecrementedEvent@7$NSObjectInternalRefIncrementedEvent@6$NSOpenStepUnicodeReservedBase@62464$NSOperationNotSupportedForKeyScriptError@9$NSOperationNotSupportedForKeySpecifierError@6$NSOperationQueueDefaultMaxConcurrentOperationCount@-1$NSOperationQueuePriorityHigh@4$NSOperationQueuePriorityLow@-4$NSOperationQueuePriorityNormal@0$NSOperationQueuePriorityVeryHigh@8$NSOperationQueuePriorityVeryLow@-8$NSOrPredicateType@2$NSOrderedAscending@-1$NSOrderedDescending@1$NSOrderedSame@0$NSPersonNameComponentsFormatterPhonetic@2$NSPersonNameComponentsFormatterStyleAbbreviated@4$NSPersonNameComponentsFormatterStyleDefault@0$NSPersonNameComponentsFormatterStyleLong@3$NSPersonNameComponentsFormatterStyleMedium@2$NSPersonNameComponentsFormatterStyleShort@1$NSPicturesDirectory@19$NSPointerFunctionsCStringPersonality@768$NSPointerFunctionsCopyIn@65536$NSPointerFunctionsIntegerPersonality@1280$NSPointerFunctionsMachVirtualMemory@4$NSPointerFunctionsMallocMemory@3$NSPointerFunctionsObjectPersonality@0$NSPointerFunctionsObjectPointerPersonality@512$NSPointerFunctionsOpaqueMemory@2$NSPointerFunctionsOpaquePersonality@256$NSPointerFunctionsStrongMemory@0$NSPointerFunctionsStructPersonality@1024$NSPointerFunctionsWeakMemory@5$NSPointerFunctionsZeroingWeakMemory@1$NSPositionAfter@0$NSPositionBefore@1$NSPositionBeginning@2$NSPositionEnd@3$NSPositionReplace@4$NSPostASAP@2$NSPostNow@3$NSPostWhenIdle@1$NSPreferencePanesDirectory@22$NSPrinterDescriptionDirectory@20$NSProcessInfoThermalStateCritical@3$NSProcessInfoThermalStateFair@1$NSProcessInfoThermalStateNominal@0$NSProcessInfoThermalStateSerious@2$NSPropertyListBinaryFormat_v1_0@200$NSPropertyListErrorMaximum@4095$NSPropertyListErrorMinimum@3840$NSPropertyListImmutable@0$NSPropertyListMutableContainers@1$NSPropertyListMutableContainersAndLeaves@2$NSPropertyListOpenStepFormat@1$NSPropertyListReadCorruptError@3840$NSPropertyListReadStreamError@3842$NSPropertyListReadUnknownVersionError@3841$NSPropertyListWriteInvalidError@3852$NSPropertyListWriteStreamError@3851$NSPropertyListXMLFormat_v1_0@100$NSProprietaryStringEncoding@65536$NSQualityOfServiceBackground@9$NSQualityOfServiceDefault@-1$NSQualityOfServiceUserInitiated@25$NSQualityOfServiceUserInteractive@33$NSQualityOfServiceUtility@17$NSQuarterCalendarUnit@2048$NSRandomSubelement@3$NSReceiverEvaluationScriptError@1$NSReceiversCantHandleCommandScriptError@4$NSRegularExpressionAllowCommentsAndWhitespace@2$NSRegularExpressionAnchorsMatchLines@16$NSRegularExpressionCaseInsensitive@1$NSRegularExpressionDotMatchesLineSeparators@8$NSRegularExpressionIgnoreMetacharacters@4$NSRegularExpressionSearch@1024$NSRegularExpressionUseUnicodeWordBoundaries@64$NSRegularExpressionUseUnixLineSeparators@32$NSRelativeAfter@0$NSRelativeBefore@1$NSRequiredArgumentsMissingScriptError@5$NSRoundBankers@3$NSRoundDown@1$NSRoundPlain@0$NSRoundUp@2$NSSaveOptionsAsk@2$NSSaveOptionsNo@1$NSSaveOptionsYes@0$NSScannedOption@1$NSSecondCalendarUnit@128$NSSharedPublicDirectory@21$NSShiftJISStringEncoding@8$NSSolarisOperatingSystem@3$NSSortConcurrent@1$NSSortStable@16$NSStreamEventEndEncountered@16$NSStreamEventErrorOccurred@8$NSStreamEventHasBytesAvailable@2$NSStreamEventHasSpaceAvailable@4$NSStreamEventNone@0$NSStreamEventOpenCompleted@1$NSStreamStatusAtEnd@5$NSStreamStatusClosed@6$NSStreamStatusError@7$NSStreamStatusNotOpen@0$NSStreamStatusOpen@2$NSStreamStatusOpening@1$NSStreamStatusReading@3$NSStreamStatusWriting@4$NSStringEncodingConversionAllowLossy@1$NSStringEncodingConversionExternalRepresentation@2$NSStringEnumerationByComposedCharacterSequences@2$NSStringEnumerationByLines@0$NSStringEnumerationByParagraphs@1$NSStringEnumerationBySentences@4$NSStringEnumerationByWords@3$NSStringEnumerationLocalized@1024$NSStringEnumerationReverse@256$NSStringEnumerationSubstringNotRequired@512$NSSubqueryExpressionType@13$NSSunOSOperatingSystem@6$NSSymbolStringEncoding@6$NSSystemDomainMask@8$NSTaskTerminationReasonExit@1$NSTaskTerminationReasonUncaughtSignal@2$NSTextCheckingAllCustomTypes@18446744069414584320$NSTextCheckingAllSystemTypes@4294967295$NSTextCheckingAllTypes@18446744073709551615$NSTextCheckingTypeAddress@16$NSTextCheckingTypeCorrection@512$NSTextCheckingTypeDash@128$NSTextCheckingTypeDate@8$NSTextCheckingTypeGrammar@4$NSTextCheckingTypeLink@32$NSTextCheckingTypeOrthography@1$NSTextCheckingTypePhoneNumber@2048$NSTextCheckingTypeQuote@64$NSTextCheckingTypeRegularExpression@1024$NSTextCheckingTypeReplacement@256$NSTextCheckingTypeSpelling@2$NSTextCheckingTypeTransitInformation@4096$NSTimeZoneCalendarUnit@2097152$NSTimeZoneNameStyleDaylightSaving@2$NSTimeZoneNameStyleGeneric@4$NSTimeZoneNameStyleShortDaylightSaving@3$NSTimeZoneNameStyleShortGeneric@5$NSTimeZoneNameStyleShortStandard@1$NSTimeZoneNameStyleStandard@0$NSTrashDirectory@102$NSURLBookmarkCreationMinimalBookmark@512$NSURLBookmarkCreationPreferFileIDResolution@256$NSURLBookmarkCreationSecurityScopeAllowOnlyReadAccess@4096$NSURLBookmarkCreationSuitableForBookmarkFile@1024$NSURLBookmarkCreationWithSecurityScope@2048$NSURLBookmarkResolutionWithSecurityScope@1024$NSURLBookmarkResolutionWithoutMounting@512$NSURLBookmarkResolutionWithoutUI@256$NSURLCacheStorageAllowed@0$NSURLCacheStorageAllowedInMemoryOnly@1$NSURLCacheStorageNotAllowed@2$NSURLCredentialPersistenceForSession@1$NSURLCredentialPersistenceNone@0$NSURLCredentialPersistencePermanent@2$NSURLCredentialPersistenceSynchronizable@3$NSURLErrorAppTransportSecurityRequiresSecureConnection@-1022$NSURLErrorBackgroundSessionInUseByAnotherProcess@-996$NSURLErrorBackgroundSessionRequiresSharedContainer@-995$NSURLErrorBackgroundSessionWasDisconnected@-997$NSURLErrorBadServerResponse@-1011$NSURLErrorBadURL@-1000$NSURLErrorCallIsActive@-1019$NSURLErrorCancelled@-999$NSURLErrorCancelledReasonBackgroundUpdatesDisabled@1$NSURLErrorCancelledReasonInsufficientSystemResources@2$NSURLErrorCancelledReasonUserForceQuitApplication@0$NSURLErrorCannotCloseFile@-3002$NSURLErrorCannotConnectToHost@-1004$NSURLErrorCannotCreateFile@-3000$NSURLErrorCannotDecodeContentData@-1016$NSURLErrorCannotDecodeRawData@-1015$NSURLErrorCannotFindHost@-1003$NSURLErrorCannotLoadFromNetwork@-2000$NSURLErrorCannotMoveFile@-3005$NSURLErrorCannotOpenFile@-3001$NSURLErrorCannotParseResponse@-1017$NSURLErrorCannotRemoveFile@-3004$NSURLErrorCannotWriteToFile@-3003$NSURLErrorClientCertificateRejected@-1205$NSURLErrorClientCertificateRequired@-1206$NSURLErrorDNSLookupFailed@-1006$NSURLErrorDataLengthExceedsMaximum@-1103$NSURLErrorDataNotAllowed@-1020$NSURLErrorDownloadDecodingFailedMidStream@-3006$NSURLErrorDownloadDecodingFailedToComplete@-3007$NSURLErrorFileDoesNotExist@-1100$NSURLErrorFileIsDirectory@-1101$NSURLErrorFileOutsideSafeArea@-1104$NSURLErrorHTTPTooManyRedirects@-1007$NSURLErrorInternationalRoamingOff@-1018$NSURLErrorNetworkConnectionLost@-1005$NSURLErrorNoPermissionsToReadFile@-1102$NSURLErrorNotConnectedToInternet@-1009$NSURLErrorRedirectToNonExistentLocation@-1010$NSURLErrorRequestBodyStreamExhausted@-1021$NSURLErrorResourceUnavailable@-1008$NSURLErrorSecureConnectionFailed@-1200$NSURLErrorServerCertificateHasBadDate@-1201$NSURLErrorServerCertificateHasUnknownRoot@-1203$NSURLErrorServerCertificateNotYetValid@-1204$NSURLErrorServerCertificateUntrusted@-1202$NSURLErrorTimedOut@-1001$NSURLErrorUnknown@-1$NSURLErrorUnsupportedURL@-1002$NSURLErrorUserAuthenticationRequired@-1013$NSURLErrorUserCancelledAuthentication@-1012$NSURLErrorZeroByteResource@-1014$NSURLHandleLoadFailed@3$NSURLHandleLoadInProgress@2$NSURLHandleLoadSucceeded@1$NSURLHandleNotLoaded@0$NSURLNetworkServiceTypeBackground@3$NSURLNetworkServiceTypeCallSignaling@11$NSURLNetworkServiceTypeDefault@0$NSURLNetworkServiceTypeResponsiveData@6$NSURLNetworkServiceTypeVideo@2$NSURLNetworkServiceTypeVoIP@1$NSURLNetworkServiceTypeVoice@4$NSURLRelationshipContains@0$NSURLRelationshipOther@2$NSURLRelationshipSame@1$NSURLRequestReloadIgnoringCacheData@1$NSURLRequestReloadIgnoringLocalAndRemoteCacheData@4$NSURLRequestReloadIgnoringLocalCacheData@1$NSURLRequestReloadRevalidatingCacheData@5$NSURLRequestReturnCacheDataDontLoad@3$NSURLRequestReturnCacheDataElseLoad@2$NSURLRequestUseProtocolCachePolicy@0$NSURLResponseUnknownLength@-1$NSURLSessionAuthChallengeCancelAuthenticationChallenge@2$NSURLSessionAuthChallengePerformDefaultHandling@1$NSURLSessionAuthChallengeRejectProtectionSpace@3$NSURLSessionAuthChallengeUseCredential@0$NSURLSessionDelayedRequestCancel@2$NSURLSessionDelayedRequestContinueLoading@0$NSURLSessionDelayedRequestUseNewRequest@1$NSURLSessionMultipathServiceTypeAggregate@3$NSURLSessionMultipathServiceTypeHandover@1$NSURLSessionMultipathServiceTypeInteractive@2$NSURLSessionMultipathServiceTypeNone@0$NSURLSessionResponseAllow@1$NSURLSessionResponseBecomeDownload@2$NSURLSessionResponseBecomeStream@3$NSURLSessionResponseCancel@0$NSURLSessionTaskMetricsResourceFetchTypeLocalCache@3$NSURLSessionTaskMetricsResourceFetchTypeNetworkLoad@1$NSURLSessionTaskMetricsResourceFetchTypeServerPush@2$NSURLSessionTaskMetricsResourceFetchTypeUnknown@0$NSURLSessionTaskStateCanceling@2$NSURLSessionTaskStateCompleted@3$NSURLSessionTaskStateRunning@0$NSURLSessionTaskStateSuspended@1$NSUTF16BigEndianStringEncoding@2415919360$NSUTF16LittleEndianStringEncoding@2483028224$NSUTF16StringEncoding@10$NSUTF32BigEndianStringEncoding@2550137088$NSUTF32LittleEndianStringEncoding@2617245952$NSUTF32StringEncoding@2348810496$NSUTF8StringEncoding@4$NSUbiquitousFileErrorMaximum@4607$NSUbiquitousFileErrorMinimum@4352$NSUbiquitousFileNotUploadedDueToQuotaError@4354$NSUbiquitousFileUbiquityServerNotAvailable@4355$NSUbiquitousFileUnavailableError@4353$NSUbiquitousKeyValueStoreAccountChange@3$NSUbiquitousKeyValueStoreInitialSyncChange@1$NSUbiquitousKeyValueStoreQuotaViolationChange@2$NSUbiquitousKeyValueStoreServerChange@0$NSUncachedRead@2$NSUndoCloseGroupingRunLoopOrdering@350000$NSUnicodeStringEncoding@10$NSUnionSetExpressionType@5$NSUnknownKeyScriptError@7$NSUnknownKeySpecifierError@3$NSUserActivityConnectionUnavailableError@4609$NSUserActivityErrorMaximum@4863$NSUserActivityErrorMinimum@4608$NSUserActivityHandoffFailedError@4608$NSUserActivityHandoffUserInfoTooLargeError@4611$NSUserActivityRemoteApplicationTimedOutError@4610$NSUserCancelledError@3072$NSUserDirectory@7$NSUserDomainMask@1$NSUserNotificationActivationTypeActionButtonClicked@2$NSUserNotificationActivationTypeAdditionalActionClicked@4$NSUserNotificationActivationTypeContentsClicked@1$NSUserNotificationActivationTypeNone@0$NSUserNotificationActivationTypeReplied@3$NSValidationErrorMaximum@2047$NSValidationErrorMinimum@1024$NSVariableExpressionType@2$NSVolumeEnumerationProduceFileReferenceURLs@4$NSVolumeEnumerationSkipHiddenVolumes@2$NSWeekCalendarUnit@256$NSWeekOfMonthCalendarUnit@4096$NSWeekOfYearCalendarUnit@8192$NSWeekdayCalendarUnit@512$NSWeekdayOrdinalCalendarUnit@1024$NSWidthInsensitiveSearch@256$NSWindows95OperatingSystem@2$NSWindowsCP1250StringEncoding@15$NSWindowsCP1251StringEncoding@11$NSWindowsCP1252StringEncoding@12$NSWindowsCP1253StringEncoding@13$NSWindowsCP1254StringEncoding@14$NSWindowsNTOperatingSystem@1$NSWrapCalendarComponents@1$NSXMLAttributeCDATAKind@6$NSXMLAttributeDeclarationKind@10$NSXMLAttributeEntitiesKind@11$NSXMLAttributeEntityKind@10$NSXMLAttributeEnumerationKind@14$NSXMLAttributeIDKind@7$NSXMLAttributeIDRefKind@8$NSXMLAttributeIDRefsKind@9$NSXMLAttributeKind@3$NSXMLAttributeNMTokenKind@12$NSXMLAttributeNMTokensKind@13$NSXMLAttributeNotationKind@15$NSXMLCommentKind@6$NSXMLDTDKind@8$NSXMLDocumentHTMLKind@2$NSXMLDocumentIncludeContentTypeDeclaration@262144$NSXMLDocumentKind@1$NSXMLDocumentTextKind@3$NSXMLDocumentTidyHTML@512$NSXMLDocumentTidyXML@1024$NSXMLDocumentValidate@8192$NSXMLDocumentXHTMLKind@1$NSXMLDocumentXInclude@65536$NSXMLDocumentXMLKind@0$NSXMLElementDeclarationAnyKind@18$NSXMLElementDeclarationElementKind@20$NSXMLElementDeclarationEmptyKind@17$NSXMLElementDeclarationKind@11$NSXMLElementDeclarationMixedKind@19$NSXMLElementDeclarationUndefinedKind@16$NSXMLElementKind@2$NSXMLEntityDeclarationKind@9$NSXMLEntityGeneralKind@1$NSXMLEntityParameterKind@4$NSXMLEntityParsedKind@2$NSXMLEntityPredefined@5$NSXMLEntityUnparsedKind@3$NSXMLInvalidKind@0$NSXMLNamespaceKind@4$NSXMLNodeCompactEmptyElement@4$NSXMLNodeExpandEmptyElement@2$NSXMLNodeIsCDATA@1$NSXMLNodeLoadExternalEntitiesAlways@16384$NSXMLNodeLoadExternalEntitiesNever@524288$NSXMLNodeLoadExternalEntitiesSameOriginOnly@32768$NSXMLNodeNeverEscapeContents@32$NSXMLNodeOptionsNone@0$NSXMLNodePreserveAll@4293918750$NSXMLNodePreserveAttributeOrder@2097152$NSXMLNodePreserveCDATA@16777216$NSXMLNodePreserveCharacterReferences@134217728$NSXMLNodePreserveDTD@67108864$NSXMLNodePreserveEmptyElements@6$NSXMLNodePreserveEntities@4194304$NSXMLNodePreserveNamespaceOrder@1048576$NSXMLNodePreservePrefixes@8388608$NSXMLNodePreserveQuotes@24$NSXMLNodePreserveWhitespace@33554432$NSXMLNodePrettyPrint@131072$NSXMLNodePromoteSignificantWhitespace@268435456$NSXMLNodeUseDoubleQuotes@16$NSXMLNodeUseSingleQuotes@8$NSXMLNotationDeclarationKind@12$NSXMLParserAttributeHasNoValueError@41$NSXMLParserAttributeListNotFinishedError@51$NSXMLParserAttributeListNotStartedError@50$NSXMLParserAttributeNotFinishedError@40$NSXMLParserAttributeNotStartedError@39$NSXMLParserAttributeRedefinedError@42$NSXMLParserCDATANotFinishedError@63$NSXMLParserCharacterRefAtEOFError@10$NSXMLParserCharacterRefInDTDError@13$NSXMLParserCharacterRefInEpilogError@12$NSXMLParserCharacterRefInPrologError@11$NSXMLParserCommentContainsDoubleHyphenError@80$NSXMLParserCommentNotFinishedError@45$NSXMLParserConditionalSectionNotFinishedError@59$NSXMLParserConditionalSectionNotStartedError@58$NSXMLParserDOCTYPEDeclNotFinishedError@61$NSXMLParserDelegateAbortedParseError@512$NSXMLParserDocumentStartError@3$NSXMLParserElementContentDeclNotFinishedError@55$NSXMLParserElementContentDeclNotStartedError@54$NSXMLParserEmptyDocumentError@4$NSXMLParserEncodingNotSupportedError@32$NSXMLParserEntityBoundaryError@90$NSXMLParserEntityIsExternalError@29$NSXMLParserEntityIsParameterError@30$NSXMLParserEntityNotFinishedError@37$NSXMLParserEntityNotStartedError@36$NSXMLParserEntityRefAtEOFError@14$NSXMLParserEntityRefInDTDError@17$NSXMLParserEntityRefInEpilogError@16$NSXMLParserEntityRefInPrologError@15$NSXMLParserEntityRefLoopError@89$NSXMLParserEntityReferenceMissingSemiError@23$NSXMLParserEntityReferenceWithoutNameError@22$NSXMLParserEntityValueRequiredError@84$NSXMLParserEqualExpectedError@75$NSXMLParserExternalStandaloneEntityError@82$NSXMLParserExternalSubsetNotFinishedError@60$NSXMLParserExtraContentError@86$NSXMLParserGTRequiredError@73$NSXMLParserInternalError@1$NSXMLParserInvalidCharacterError@9$NSXMLParserInvalidCharacterInEntityError@87$NSXMLParserInvalidCharacterRefError@8$NSXMLParserInvalidConditionalSectionError@83$NSXMLParserInvalidDecimalCharacterRefError@7$NSXMLParserInvalidEncodingError@81$NSXMLParserInvalidEncodingNameError@79$NSXMLParserInvalidHexCharacterRefError@6$NSXMLParserInvalidURIError@91$NSXMLParserLTRequiredError@72$NSXMLParserLTSlashRequiredError@74$NSXMLParserLessThanSymbolInAttributeError@38$NSXMLParserLiteralNotFinishedError@44$NSXMLParserLiteralNotStartedError@43$NSXMLParserMisplacedCDATAEndStringError@62$NSXMLParserMisplacedXMLDeclarationError@64$NSXMLParserMixedContentDeclNotFinishedError@53$NSXMLParserMixedContentDeclNotStartedError@52$NSXMLParserNAMERequiredError@68$NSXMLParserNMTOKENRequiredError@67$NSXMLParserNamespaceDeclarationError@35$NSXMLParserNoDTDError@94$NSXMLParserNotWellBalancedError@85$NSXMLParserNotationNotFinishedError@49$NSXMLParserNotationNotStartedError@48$NSXMLParserOutOfMemoryError@2$NSXMLParserPCDATARequiredError@69$NSXMLParserParsedEntityRefAtEOFError@18$NSXMLParserParsedEntityRefInEpilogError@20$NSXMLParserParsedEntityRefInInternalError@88$NSXMLParserParsedEntityRefInInternalSubsetError@21$NSXMLParserParsedEntityRefInPrologError@19$NSXMLParserParsedEntityRefMissingSemiError@25$NSXMLParserParsedEntityRefNoNameError@24$NSXMLParserPrematureDocumentEndError@5$NSXMLParserProcessingInstructionNotFinishedError@47$NSXMLParserProcessingInstructionNotStartedError@46$NSXMLParserPublicIdentifierRequiredError@71$NSXMLParserResolveExternalEntitiesAlways@3$NSXMLParserResolveExternalEntitiesNever@0$NSXMLParserResolveExternalEntitiesNoNetwork@1$NSXMLParserResolveExternalEntitiesSameOriginOnly@2$NSXMLParserSeparatorRequiredError@66$NSXMLParserSpaceRequiredError@65$NSXMLParserStandaloneValueError@78$NSXMLParserStringNotClosedError@34$NSXMLParserStringNotStartedError@33$NSXMLParserTagNameMismatchError@76$NSXMLParserURIFragmentError@92$NSXMLParserURIRequiredError@70$NSXMLParserUndeclaredEntityError@26$NSXMLParserUnfinishedTagError@77$NSXMLParserUnknownEncodingError@31$NSXMLParserUnparsedEntityError@28$NSXMLParserXMLDeclNotFinishedError@57$NSXMLParserXMLDeclNotStartedError@56$NSXMLProcessingInstructionKind@5$NSXMLTextKind@7$NSXPCConnectionErrorMaximum@4224$NSXPCConnectionErrorMinimum@4096$NSXPCConnectionInterrupted@4097$NSXPCConnectionInvalid@4099$NSXPCConnectionPrivileged@4096$NSXPCConnectionReplyInvalid@4101$NSYearCalendarUnit@4$NSYearForWeekOfYearCalendarUnit@16384$NS_BLOCKS_AVAILABLE@1$NS_BigEndian@2$NS_LittleEndian@1$NS_UNICHAR_IS_EIGHT_BIT@0$NS_UnknownByteOrder@0$''' -misc.update({'NSUndefinedDateComponent': sel32or64(2147483647, 9223372036854775807), 'NSDateComponentUndefined': sel32or64(2147483647, 9223372036854775807), 'NSMaximumStringLength': sel32or64(2147483647, 9223372036854775807), 'NSNotFound': sel32or64(2147483647, 9223372036854775807)}) -misc.update({'NSFoundationVersionNumber10_2_3': 462.0, 'NSFoundationVersionNumber10_2_2': 462.0, 'NSFoundationVersionNumber10_2_1': 462.0, 'NSFoundationVersionNumber10_2_7': 462.7, 'NSFoundationVersionNumber10_2_6': 462.0, 'NSFoundationVersionNumber10_2_5': 462.0, 'NSFoundationVersionNumber10_2_4': 462.0, 'NSFoundationVersionNumber10_1_4': 425.0, 'NSFoundationVersionNumber10_4_4_Intel': 567.23, 'NSFoundationVersionNumber10_2_8': 462.7, 'NSFoundationVersionNumber10_1_1': 425.0, 'NSFoundationVersionNumber10_1_2': 425.0, 'NSFoundationVersionNumber10_1_3': 425.0, 'NSFoundationVersionNumber10_4_9': 567.29, 'NSFoundationVersionNumber10_4_1': 567.0, 'NSFoundationVersionNumber10_3_8': 500.56, 'NSFoundationVersionNumber10_3_9': 500.58, 'NSFoundationVersionNumber10_5_4': 677.19, 'NSFoundationVersionNumber10_5_5': 677.21, 'NSFoundationVersionNumber10_5_6': 677.22, 'NSFoundationVersionNumber10_5_7': 677.24, 'NSFoundationVersionNumber10_3_2': 500.3, 'NSFoundationVersionNumber10_3_3': 500.54, 'NSFoundationVersionNumber10_4_3': 567.21, 'NSFoundationVersionNumber10_3_1': 500.0, 'NSFoundationVersionNumber10_3_6': 500.56, 'NSFoundationVersionNumber10_3_7': 500.56, 'NSFoundationVersionNumber10_3_4': 500.56, 'NSFoundationVersionNumber10_3_5': 500.56, 'NSFoundationVersionNumber10_4_2': 567.12, 'NSFoundationVersionNumber10_4_5': 567.25, 'NSFoundationVersionNumber10_6': 751.0, 'NSFoundationVersionNumber10_7': 833.1, 'NSFoundationVersionNumber10_4': 567.0, 'NSFoundationVersionNumber10_5': 677.0, 'NSFoundationVersionNumber10_2': 462.0, 'NSFoundationVersionNumber10_4_7': 567.27, 'NSFoundationVersionNumber10_0': 397.4, 'NSFoundationVersionNumber10_1': 425.0, 'NSFoundationVersionNumber10_4_6': 567.26, 'NSFoundationVersionNumber10_8': 945.0, 'NSFoundationVersionNumber10_3': 500.0, 'NSFoundationVersionNumber10_4_4_PowerPC': 567.21, 'NSFoundationVersionNumber10_4_11': 567.36, 'NSFoundationVersionNumber10_4_10': 567.29, 'NSFoundationVersionNumber10_9_2': 1056.13, 'NSFoundationVersionNumber10_8_4': 945.18, 'NSFoundationVersionNumber10_8_1': 945.0, 'NSFoundationVersionNumber10_8_3': 945.16, 'NSFoundationVersionNumber10_8_2': 945.11, 'NSFoundationVersionNumber10_4_8': 567.28, 'NSFoundationVersionNumber10_5_8': 677.26, 'NSTimeIntervalSince1970': 978307200.0, 'NSFoundationVersionNumber10_6_7': 751.53, 'NSFoundationVersionNumber10_6_6': 751.53, 'NSFoundationVersionNumber10_6_5': 751.42, 'NSFoundationVersionNumber10_6_4': 751.29, 'NSFoundationVersionNumber10_6_3': 751.21, 'NSFoundationVersionNumber10_6_2': 751.14, 'NSFoundationVersionNumber10_6_1': 751.0, 'NSFoundationVersionNumber10_5_1': 677.1, 'NSFoundationVersionNumber10_5_2': 677.15, 'NSFoundationVersionNumber10_6_8': 751.62, 'NSFoundationVersionNumber10_5_3': 677.19, 'NSFoundationVersionNumber10_7_4': 833.25, 'NSFoundationVersionNumber10_7_2': 833.2, 'NSFoundationVersionNumber10_7_3': 833.24, 'NSFoundationVersionNumber10_7_1': 833.1}) -functions={'NSSwapShort': (b'SS',), 'NSJavaClassesForBundle': (b'@@Z^@', '', {'arguments': {2: {'type_modifier': 'o'}}}), 'NSDecimalIsNotANumber': (b'Z^{_NSDecimal=b8b4b1b1b18[8S]}', '', {'arguments': {0: {'type_modifier': 'n'}}}), 'NSSwapHostIntToBig': (b'II',), 'NSDecimalDivide': (sel32or64(b'I^{_NSDecimal=b8b4b1b1b18[8S]}^{_NSDecimal=b8b4b1b1b18[8S]}^{_NSDecimal=b8b4b1b1b18[8S]}I', b'Q^{_NSDecimal=b8b4b1b1b18[8S]}^{_NSDecimal=b8b4b1b1b18[8S]}^{_NSDecimal=b8b4b1b1b18[8S]}Q'), '', {'arguments': {0: {'type_modifier': 'o'}, 1: {'type_modifier': 'n'}, 2: {'type_modifier': 'n'}}}), 'NSEndMapTableEnumeration': (sel32or64(b'v^{_NSMapEnumerator=II^v}', b'v^{_NSMapEnumerator=QQ^v}'),), 'NSEqualRects': (sel32or64(b'Z{_NSRect={_NSPoint=ff}{_NSSize=ff}}{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'Z{CGRect={CGPoint=dd}{CGSize=dd}}{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'NSIntegralRect': (sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'NSEqualSizes': (sel32or64(b'Z{_NSSize=ff}{_NSSize=ff}', b'Z{CGSize=dd}{CGSize=dd}'),), 'NSSwapHostLongToLittle': (sel32or64(b'LL', b'QQ'),), 'NSSwapLittleDoubleToHost': (b'd{_NSSwappedDouble=Q}',), 'NSSizeFromCGSize': (sel32or64(b'{_NSSize=ff}{CGSize=ff}', b'{CGSize=dd}{CGSize=dd}'),), 'NSDecimalCompact': (b'v^{_NSDecimal=b8b4b1b1b18[8S]}', '', {'arguments': {0: {'type_modifier': 'N'}}}), 'NSCreateHashTable': (sel32or64(b'@{_NSHashTableCallBacks=^?^?^?^?^?}I', b'@{_NSHashTableCallBacks=^?^?^?^?^?}Q'), '', {'retval': {'already_cfretained': True}}), 'NSOpenStepRootDirectory': (b'@',), 'NSRoundDownToMultipleOfPageSize': (sel32or64(b'II', b'QQ'),), 'NSMapInsertIfAbsent': (b'^v@^v^v',), 'NSJavaNeedsToLoadClasses': (b'Z@',), 'NSFileTypeForHFSTypeCode': (sel32or64(b'@L', b'@I'),), 'NSEqualRanges': (sel32or64(b'Z{_NSRange=II}{_NSRange=II}', b'Z{_NSRange=QQ}{_NSRange=QQ}'),), 'NSDecimalNormalize': (sel32or64(b'I^{_NSDecimal=b8b4b1b1b18[8S]}^{_NSDecimal=b8b4b1b1b18[8S]}I', b'Q^{_NSDecimal=b8b4b1b1b18[8S]}^{_NSDecimal=b8b4b1b1b18[8S]}Q'), '', {'arguments': {0: {'type_modifier': 'o'}, 1: {'type_modifier': 'n'}}}), 'NSFreeHashTable': (b'v@',), 'NSHostByteOrder': (sel32or64(b'l', b'q'),), 'NSGetUncaughtExceptionHandler': (b'^?', '', {'retval': {'callable': {'retval': {'type': 'v'}, 'arguments': {0: {'type': '@'}}}}}), '_NSAutoreleaseNoPool': (b'v^v',), 'NSStringFromMapTable': (b'@@',), 'NSJavaNeedsVirtualMachine': (b'Z@',), 'NSPointFromString': (sel32or64(b'{_NSPoint=ff}@', b'{CGPoint=dd}@'),), 'NSEnumerateMapTable': (sel32or64(b'{_NSMapEnumerator=II^v}@', b'{_NSMapEnumerator=QQ^v}@'),), 'NSIsEmptyRect': (sel32or64(b'Z{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'Z{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'NSHeight': (sel32or64(b'f{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'd{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'NSHomeDirectory': (b'@',), 'NSResetMapTable': (b'v@',), 'NSMinY': (sel32or64(b'f{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'd{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'NSPageSize': (sel32or64(b'I', b'Q'),), 'NSUserName': (b'@',), 'NSMapInsert': (b'v@^v^v',), 'NSDeallocateObject': (b'v@',), 'NSDefaultMallocZone': (b'^{_NSZone=}',), 'NSRecordAllocationEvent': (b'vi@',), 'NSDecimalPower': (sel32or64(b'I^{_NSDecimal=b8b4b1b1b18[8S]}^{_NSDecimal=b8b4b1b1b18[8S]}II', b'Q^{_NSDecimal=b8b4b1b1b18[8S]}^{_NSDecimal=b8b4b1b1b18[8S]}QQ'), '', {'arguments': {0: {'type_modifier': 'o'}, 1: {'type_modifier': 'n'}}}), '_NSAutoreleaseHighWaterLog': (b'vI',), 'NSMaxRange': (sel32or64(b'I{_NSRange=II}', b'Q{_NSRange=QQ}'),), 'NSMinX': (sel32or64(b'f{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'd{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'NSLogPageSize': (sel32or64(b'I', b'Q'),), 'NSMouseInRect': (sel32or64(b'Z{_NSPoint=ff}{_NSRect={_NSPoint=ff}{_NSSize=ff}}Z', b'Z{CGPoint=dd}{CGRect={CGPoint=dd}{CGSize=dd}}Z'),), 'NSDecimalCompare': (sel32or64(b'i^{_NSDecimal=b8b4b1b1b18[8S]}^{_NSDecimal=b8b4b1b1b18[8S]}', b'q^{_NSDecimal=b8b4b1b1b18[8S]}^{_NSDecimal=b8b4b1b1b18[8S]}'), '', {'arguments': {0: {'type_modifier': 'n'}, 1: {'type_modifier': 'n'}}}), 'NSAllMapTableValues': (b'@@',), 'NSProtocolFromString': (b'@@',), 'NSPointInRect': (sel32or64(b'Z{_NSPoint=ff}{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'Z{CGPoint=dd}{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'NSSetZoneName': (b'v^{_NSZone=}@',), 'CFBridgingRetain': (b'@@',), 'NSCopyObject': (sel32or64(b'@@I^{_NSZone=}', b'@@Q^{_NSZone=}'), '', {'retval': {'already_cfretained': True}}), 'NSMidY': (sel32or64(b'f{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'd{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'NSSwapLongLong': (b'QQ',), 'NSDecrementExtraRefCountWasZero': (b'Z@',), 'NSSwapBigLongToHost': (sel32or64(b'LL', b'QQ'),), 'NSDecimalMultiply': (sel32or64(b'I^{_NSDecimal=b8b4b1b1b18[8S]}^{_NSDecimal=b8b4b1b1b18[8S]}^{_NSDecimal=b8b4b1b1b18[8S]}I', b'Q^{_NSDecimal=b8b4b1b1b18[8S]}^{_NSDecimal=b8b4b1b1b18[8S]}^{_NSDecimal=b8b4b1b1b18[8S]}Q'), '', {'arguments': {0: {'type_modifier': 'o'}, 1: {'type_modifier': 'n'}, 2: {'type_modifier': 'n'}}}), 'NSSwapBigLongLongToHost': (b'QQ',), 'NSShouldRetainWithZone': (b'Z@^{_NSZone=}',), 'NSStringFromRange': (sel32or64(b'@{_NSRange=II}', b'@{_NSRange=QQ}'),), 'NSHashGet': (b'^v@^v',), 'NSStringFromClass': (b'@#',), 'NSPointToCGPoint': (sel32or64(b'{CGPoint=ff}{_NSPoint=ff}', b'{CGPoint=dd}{CGPoint=dd}'),), 'NSUnionRect': (sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}{_NSRect={_NSPoint=ff}{_NSSize=ff}}{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}{CGRect={CGPoint=dd}{CGSize=dd}}{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'NSRectToCGRect': (sel32or64(b'{CGRect={CGPoint=ff}{CGSize=ff}}{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'NSCopyHashTableWithZone': (b'@@^{_NSZone=}', '', {'retval': {'already_cfretained': True}}), 'NSSwapBigShortToHost': (b'SS',), 'NSSwapHostShortToBig': (b'SS',), 'NSStringFromPoint': (sel32or64(b'@{_NSPoint=ff}', b'@{CGPoint=dd}'),), 'NSWidth': (sel32or64(b'f{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'd{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'NSRealMemoryAvailable': (sel32or64(b'I', b'Q'),), 'NSNextMapEnumeratorPair': (sel32or64(b'Z^{_NSMapEnumerator=II^v}^^v^^v', b'Z^{_NSMapEnumerator=QQ^v}^^v^^v'),), 'NSAllHashTableObjects': (b'@@',), 'NSPointFromCGPoint': (sel32or64(b'{_NSPoint=ff}{CGPoint=ff}', b'{CGPoint=dd}{CGPoint=dd}'),), 'NSSizeToCGSize': (sel32or64(b'{CGSize=ff}{_NSSize=ff}', b'{CGSize=dd}{CGSize=dd}'),), 'NSHashInsertKnownAbsent': (b'v@^v',), 'NSNextHashEnumeratorItem': (sel32or64(b'^v^{_NSHashEnumerator=II^v}', b'^v^{_NSHashEnumerator=QQ^v}'),), 'NSSwapHostLongLongToLittle': (b'QQ',), 'NSClassFromString': (b'#@',), 'NSSwapLittleLongToHost': (sel32or64(b'LL', b'QQ'),), 'NSMakePoint': (sel32or64(b'{_NSPoint=ff}ff', b'{CGPoint=dd}dd'),), 'NSSizeFromString': (sel32or64(b'{_NSSize=ff}@', b'{CGSize=dd}@'),), '_NSAutoreleaseFreedObject': (b'v^v',), 'NSConvertHostFloatToSwapped': (b'{_NSSwappedFloat=I}f',), 'NSIntersectsRect': (sel32or64(b'Z{_NSRect={_NSPoint=ff}{_NSSize=ff}}{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'Z{CGRect={CGPoint=dd}{CGSize=dd}}{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'NSEdgeInsetsMake': (sel32or64(b'{NSEdgeInsets=ffff}ffff', b'{NSEdgeInsets=dddd}dddd'),), 'NSIntersectionRect': (sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}{_NSRect={_NSPoint=ff}{_NSSize=ff}}{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}{CGRect={CGPoint=dd}{CGSize=dd}}{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'NSDecimalAdd': (sel32or64(b'I^{_NSDecimal=b8b4b1b1b18[8S]}^{_NSDecimal=b8b4b1b1b18[8S]}^{_NSDecimal=b8b4b1b1b18[8S]}I', b'Q^{_NSDecimal=b8b4b1b1b18[8S]}^{_NSDecimal=b8b4b1b1b18[8S]}^{_NSDecimal=b8b4b1b1b18[8S]}Q'), '', {'arguments': {0: {'type_modifier': 'o'}, 1: {'type_modifier': 'n'}, 2: {'type_modifier': 'n'}}}), 'NSCreateHashTableWithZone': (sel32or64(b'@{_NSHashTableCallBacks=^?^?^?^?^?}I^{_NSZone=}', b'@{_NSHashTableCallBacks=^?^?^?^?^?}Q^{_NSZone=}'), '', {'retval': {'already_cfretained': True}}), 'NSSwapFloat': (b'{_NSSwappedFloat=I}{_NSSwappedFloat=I}',), 'NSDecimalSubtract': (sel32or64(b'I^{_NSDecimal=b8b4b1b1b18[8S]}^{_NSDecimal=b8b4b1b1b18[8S]}^{_NSDecimal=b8b4b1b1b18[8S]}I', b'Q^{_NSDecimal=b8b4b1b1b18[8S]}^{_NSDecimal=b8b4b1b1b18[8S]}^{_NSDecimal=b8b4b1b1b18[8S]}Q'), '', {'arguments': {0: {'type_modifier': 'o'}, 1: {'type_modifier': 'n'}, 2: {'type_modifier': 'n'}}}), 'NSSetUncaughtExceptionHandler': (b'v^?', '', {'arguments': {0: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'typestr': '@'}}}, 'callable_retained': True}}}), 'NSFreeMapTable': (b'v@',), 'NSMapRemove': (b'v@^v',), 'NSLocationInRange': (sel32or64(b'ZI{_NSRange=II}', b'ZQ{_NSRange=QQ}'),), 'NSFullUserName': (b'@',), 'NSSwapLittleShortToHost': (b'SS',), 'NSSwapLong': (sel32or64(b'LL', b'QQ'),), 'NSJavaSetupVirtualMachine': (b'@',), 'NSResetHashTable': (b'v@',), 'NSStringFromRect': (sel32or64(b'@{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'@{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'NSSwapLittleLongLongToHost': (b'QQ',), 'NSSwapLittleFloatToHost': (b'f{_NSSwappedFloat=I}',), 'NSOffsetRect': (sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}{_NSRect={_NSPoint=ff}{_NSSize=ff}}ff', b'{CGRect={CGPoint=dd}{CGSize=dd}}{CGRect={CGPoint=dd}{CGSize=dd}}dd'),), 'NSCountMapTable': (sel32or64(b'I@', b'Q@'),), 'NSHFSTypeOfFile': (b'@@',), 'NSHashInsertIfAbsent': (b'^v@^v',), 'NSSwapBigIntToHost': (b'II',), 'NSRecycleZone': (b'v^{_NSZone=}',), 'NSStringFromProtocol': (b'@@',), 'NSFrameAddress': (sel32or64(b'^vI', b'^vQ'),), 'NSCountFrames': (sel32or64(b'I', b'Q'),), 'CFBridgingRelease': (b'@@',), 'NSMapMember': (b'Z@^v^^v^^v',), 'NSDivideRect': (sel32or64(b'v{_NSRect={_NSPoint=ff}{_NSSize=ff}}^{_NSRect={_NSPoint=ff}{_NSSize=ff}}^{_NSRect={_NSPoint=ff}{_NSSize=ff}}fI', b'v{CGRect={CGPoint=dd}{CGSize=dd}}^{CGRect={CGPoint=dd}{CGSize=dd}}^{CGRect={CGPoint=dd}{CGSize=dd}}dQ'), '', {'arguments': {1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}}}), 'NSRangeFromString': (sel32or64(b'{_NSRange=II}@', b'{_NSRange=QQ}@'),), 'NSMapGet': (b'^v@^v',), 'NSHashInsert': (b'v@^v',), 'NSSwapHostIntToLittle': (b'II',), 'NSEndHashTableEnumeration': (sel32or64(b'v^{_NSHashEnumerator=II^v}', b'v^{_NSHashEnumerator=QQ^v}'),), 'NSJavaBundleCleanup': (b'v@@',), 'NSSwapHostFloatToBig': (b'{_NSSwappedFloat=I}f',), 'NSTemporaryDirectory': (b'@',), 'NSDecimalMultiplyByPowerOf10': (sel32or64(b'I^{_NSDecimal=b8b4b1b1b18[8S]}^{_NSDecimal=b8b4b1b1b18[8S]}sI', b'Q^{_NSDecimal=b8b4b1b1b18[8S]}^{_NSDecimal=b8b4b1b1b18[8S]}sQ'), '', {'arguments': {0: {'type_modifier': 'o'}, 1: {'type_modifier': 'n'}}}), 'NSCompareHashTables': (b'Z@@',), 'NSMakeRect': (sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}ffff', b'{CGRect={CGPoint=dd}{CGSize=dd}}dddd'),), 'NSMakeCollectable': (b'@@',), 'NSGetSizeAndAlignment': (sel32or64(b'^c^c^I^I', b'^c^c^Q^Q'), '', {'retval': {'c_array_delimited_by_null': True}, 'arguments': {0: {'c_array_delimited_by_null': True, 'type_modifier': 'n'}, 1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}}}), 'NSDecimalRound': (sel32or64(b'v^{_NSDecimal=b8b4b1b1b18[8S]}^{_NSDecimal=b8b4b1b1b18[8S]}iI', b'v^{_NSDecimal=b8b4b1b1b18[8S]}^{_NSDecimal=b8b4b1b1b18[8S]}qQ'), '', {'arguments': {0: {'type_modifier': 'o'}, 1: {'type_modifier': 'n'}}}), 'NSInsetRect': (sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}{_NSRect={_NSPoint=ff}{_NSSize=ff}}ff', b'{CGRect={CGPoint=dd}{CGSize=dd}}{CGRect={CGPoint=dd}{CGSize=dd}}dd'),), 'NSAllocateObject': (sel32or64(b'@#I^{_NSZone=}', b'@#Q^{_NSZone=}'),), 'NSSwapInt': (b'II',), 'NSUnionRange': (sel32or64(b'{_NSRange=II}{_NSRange=II}{_NSRange=II}', b'{_NSRange=QQ}{_NSRange=QQ}{_NSRange=QQ}'),), 'NSSelectorFromString': (b':@',), 'NSStringFromHashTable': (b'@@',), 'NSZoneName': (b'@^{_NSZone=}',), 'NSHFSTypeCodeFromFileType': (sel32or64(b'L@', b'I@'),), 'NSSwapDouble': (b'{_NSSwappedDouble=Q}{_NSSwappedDouble=Q}',), 'NSLog': (b'v@', '', {'arguments': {0: {'printf_format': True}}, 'variadic': True}), 'NSMakeSize': (sel32or64(b'{_NSSize=ff}ff', b'{CGSize=dd}dd'),), 'NSSwapHostDoubleToLittle': (b'{_NSSwappedDouble=Q}d',), 'NSRectFromString': (sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}@', b'{CGRect={CGPoint=dd}{CGSize=dd}}@'),), 'NSDecimalString': (b'@^{_NSDecimal=b8b4b1b1b18[8S]}@', '', {'arguments': {0: {'type_modifier': 'n'}}}), 'NSCreateZone': (sel32or64(b'^{_NSZone=}IIZ', b'^{_NSZone=}QQZ'), '', {'retval': {'already_cfretained': True}}), 'NSAllMapTableKeys': (b'@@',), 'NSIncrementExtraRefCount': (b'v@',), 'NSDecimalCopy': (b'v^{_NSDecimal=b8b4b1b1b18[8S]}^{_NSDecimal=b8b4b1b1b18[8S]}', '', {'retval': {'already_cfretained': True}, 'arguments': {0: {'type_modifier': 'o'}, 1: {'type_modifier': 'n'}}}), 'NSStringFromSelector': (b'@:',), 'NSMakeRange': (sel32or64(b'{_NSRange=II}II', b'{_NSRange=QQ}QQ'),), 'NSConvertSwappedFloatToHost': (b'f{_NSSwappedFloat=I}',), 'NSRoundUpToMultipleOfPageSize': (sel32or64(b'II', b'QQ'),), 'NSContainsRect': (sel32or64(b'Z{_NSRect={_NSPoint=ff}{_NSSize=ff}}{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'Z{CGRect={CGPoint=dd}{CGSize=dd}}{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'NSSwapBigDoubleToHost': (b'd{_NSSwappedDouble=Q}',), 'NSIntersectionRange': (sel32or64(b'{_NSRange=II}{_NSRange=II}{_NSRange=II}', b'{_NSRange=QQ}{_NSRange=QQ}{_NSRange=QQ}'),), 'NSSwapHostDoubleToBig': (b'{_NSSwappedDouble=Q}d',), 'NSJavaSetup': (b'@@',), 'NSConvertHostDoubleToSwapped': (b'{_NSSwappedDouble=Q}d',), 'NSSwapHostLongToBig': (sel32or64(b'LL', b'QQ'),), 'NSJavaClassesFromPath': (b'@@@Z^@', '', {'arguments': {3: {'type_modifier': 'o'}}}), 'NSMaxY': (sel32or64(b'f{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'd{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'NSMaxX': (sel32or64(b'f{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'd{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'NSCreateMapTableWithZone': (sel32or64(b'@{_NSMapTableKeyCallBacks=^?^?^?^?^?^v}{_NSMapTableValueCallBacks=^?^?^?}I^{_NSZone=}', b'@{_NSMapTableKeyCallBacks=^?^?^?^?^?^v}{_NSMapTableValueCallBacks=^?^?^?}Q^{_NSZone=}'), '', {'retval': {'already_cfretained': True}}), 'NSExtraRefCount': (sel32or64(b'I@', b'Q@'),), 'NSRectFromCGRect': (sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}{CGRect={CGPoint=ff}{CGSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'NSIntegralRectWithOptions': (sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}{_NSRect={_NSPoint=ff}{_NSSize=ff}}Q', b'{CGRect={CGPoint=dd}{CGSize=dd}}{CGRect={CGPoint=dd}{CGSize=dd}}Q'),), 'NSStringFromSize': (sel32or64(b'@{_NSSize=ff}', b'@{CGSize=dd}'),), 'NSJavaProvidesClasses': (b'Z@',), 'NSHomeDirectoryForUser': (b'@@',), 'NSIsFreedObject': (b'Z@',), 'NSSwapBigFloatToHost': (b'f{_NSSwappedFloat=I}',), 'NSConvertSwappedDoubleToHost': (b'd{_NSSwappedDouble=Q}',), 'NSMidX': (sel32or64(b'f{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'd{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'NSReturnAddress': (sel32or64(b'^vI', b'^vQ'),), 'NSEqualPoints': (sel32or64(b'Z{_NSPoint=ff}{_NSPoint=ff}', b'Z{CGPoint=dd}{CGPoint=dd}'),), 'NSSwapHostLongLongToBig': (b'QQ',), 'NSCompareMapTables': (b'Z@@',), 'NSJavaBundleSetup': (b'@@@',), 'NSHashRemove': (b'v@^v',), 'NSSwapLittleIntToHost': (b'II',), 'NSCountHashTable': (sel32or64(b'I@', b'Q@'),), 'NSJavaObjectNamedInPath': (b'@@@',), 'NSMapInsertKnownAbsent': (b'v@^v^v',), 'NSCreateMapTable': (sel32or64(b'@{_NSMapTableKeyCallBacks=^?^?^?^?^?^v}{_NSMapTableValueCallBacks=^?^?^?}I', b'@{_NSMapTableKeyCallBacks=^?^?^?^?^?^v}{_NSMapTableValueCallBacks=^?^?^?}Q'), '', {'retval': {'already_cfretained': True}}), 'NSSwapHostFloatToLittle': (b'{_NSSwappedFloat=I}f',), 'NSEdgeInsetsEqual': (sel32or64(b'Z{NSEdgeInsets=ffff}{NSEdgeInsets=ffff}', b'Z{NSEdgeInsets=dddd}{NSEdgeInsets=dddd}'),), 'NSEnumerateHashTable': (sel32or64(b'{_NSHashEnumerator=II^v}@', b'{_NSHashEnumerator=QQ^v}@'),), 'NXReadNSObjectFromCoder': (b'@@',), 'NSCopyMapTableWithZone': (b'@@^{_NSZone=}', '', {'retval': {'already_cfretained': True}}), 'NSSwapHostShortToLittle': (b'SS',), 'NSSearchPathForDirectoriesInDomains': (sel32or64(b'@IIZ', b'@QQZ'),)} -aliases = {'NSCalendarUnitYear': 'kCFCalendarUnitYear', 'NSURLErrorBadURL': 'kCFURLErrorBadURL', 'NSWeekCalendarUnit': 'kCFCalendarUnitWeek', 'NSURLErrorCannotCreateFile': 'kCFURLErrorCannotCreateFile', 'NSWeekdayCalendarUnit': 'NSCalendarUnitWeekday', 'NSURLErrorFileIsDirectory': 'kCFURLErrorFileIsDirectory', 'NSPropertyListXMLFormat_v1_0': 'kCFPropertyListXMLFormat_v1_0', 'NSHashTableZeroingWeakMemory': 'NSPointerFunctionsZeroingWeakMemory', 'NSNumberFormatterPadBeforeSuffix': 'kCFNumberFormatterPadBeforeSuffix', 'NSCalendarUnitWeekdayOrdinal': 'kCFCalendarUnitWeekdayOrdinal', 'NSNumberFormatterDecimalStyle': 'kCFNumberFormatterDecimalStyle', 'NSMinuteCalendarUnit': 'NSCalendarUnitMinute', 'NSURLErrorRequestBodyStreamExhausted': 'kCFURLErrorRequestBodyStreamExhausted', 'NSISO8601DateFormatWithDay': 'kCFISO8601DateFormatWithDay', 'NSHashTableCopyIn': 'NSPointerFunctionsCopyIn', 'NSMaxXEdge': 'CGRectMaxXEdge', 'NSISO8601DateFormatWithYear': 'kCFISO8601DateFormatWithYear', 'NSNumberFormatterCurrencyAccountingStyle': 'kCFNumberFormatterCurrencyAccountingStyle', 'NSDateFormatterNoStyle': 'kCFDateFormatterNoStyle', 'NSTimeZoneCalendarUnit': 'NSCalendarUnitTimeZone', 'NSNumberFormatterSpellOutStyle': 'kCFNumberFormatterSpellOutStyle', 'NSNumberFormatterCurrencyPluralStyle': 'kCFNumberFormatterCurrencyPluralStyle', 'NSURLErrorHTTPTooManyRedirects': 'kCFURLErrorHTTPTooManyRedirects', 'NSURLErrorDataNotAllowed': 'kCFURLErrorDataNotAllowed', 'NSURLErrorDownloadDecodingFailedToComplete': 'kCFURLErrorDownloadDecodingFailedToComplete', 'NS_UnknownByteOrder': 'CFByteOrderUnknown', 'NSPropertyListOpenStepFormat': 'kCFPropertyListOpenStepFormat', 'NSURLErrorCallIsActive': 'kCFURLErrorCallIsActive', 'NSISO8601DateFormatWithDashSeparatorInDate': 'kCFISO8601DateFormatWithDashSeparatorInDate', 'NSCalendarUnitHour': 'kCFCalendarUnitHour', 'NSURLErrorSecureConnectionFailed': 'kCFURLErrorSecureConnectionFailed', 'NSRectEdgeMaxX': 'NSMaxXEdge', 'NSRectEdgeMaxY': 'NSMaxYEdge', 'NSNumberFormatterRoundCeiling': 'kCFNumberFormatterRoundCeiling', 'NSURLErrorServerCertificateUntrusted': 'kCFURLErrorServerCertificateUntrusted', 'NSURLErrorCannotFindHost': 'kCFURLErrorCannotFindHost', 'NSLocaleLanguageDirectionTopToBottom': 'kCFLocaleLanguageDirectionTopToBottom', 'NSNumberFormatterPadAfterPrefix': 'kCFNumberFormatterPadAfterPrefix', 'NSURLErrorNoPermissionsToReadFile': 'kCFURLErrorNoPermissionsToReadFile', 'NSQuarterCalendarUnit': 'NSCalendarUnitQuarter', 'NSNumberFormatterPercentStyle': 'kCFNumberFormatterPercentStyle', 'NSISO8601DateFormatWithFullTime': 'kCFISO8601DateFormatWithFullTime', 'NSIntegerMin': 'LONG_MIN', 'NSLocaleLanguageDirectionLeftToRight': 'kCFLocaleLanguageDirectionLeftToRight', 'NSNumberFormatterPadAfterSuffix': 'kCFNumberFormatterPadAfterSuffix', 'NSURLErrorClientCertificateRequired': 'kCFURLErrorClientCertificateRequired', 'NSSecondCalendarUnit': 'NSCalendarUnitSecond', 'NSURLErrorCannotConnectToHost': 'kCFURLErrorCannotConnectToHost', 'NSURLErrorDNSLookupFailed': 'kCFURLErrorDNSLookupFailed', 'NSURLErrorZeroByteResource': 'kCFURLErrorZeroByteResource', 'NSMonthCalendarUnit': 'NSCalendarUnitMonth', 'NSNumberFormatterNoStyle': 'kCFNumberFormatterNoStyle', 'NSMinYEdge': 'CGRectMinYEdge', 'NSHashTableWeakMemory': 'NSPointerFunctionsWeakMemory', 'NS_NONATOMIC_IOSONLY': 'atomic', 'NSURLErrorClientCertificateRejected': 'kCFURLErrorClientCertificateRejected', 'NSURLErrorUserCancelledAuthentication': 'kCFURLErrorUserCancelledAuthentication', 'NSCalendarUnitWeekOfYear': 'kCFCalendarUnitWeekOfYear', 'NSDateFormatterLongStyle': 'kCFDateFormatterLongStyle', 'NSMaxYEdge': 'CGRectMaxYEdge', 'NSWeekdayOrdinalCalendarUnit': 'NSCalendarUnitWeekdayOrdinal', 'NSURLErrorResourceUnavailable': 'kCFURLErrorResourceUnavailable', 'NSURLErrorNetworkConnectionLost': 'kCFURLErrorNetworkConnectionLost', 'NS_LittleEndian': 'CFByteOrderLittleEndian', 'NSEraCalendarUnit': 'NSCalendarUnitEra', 'NSISO8601DateFormatWithColonSeparatorInTime': 'kCFISO8601DateFormatWithColonSeparatorInTime', 'NSPropertyListMutableContainers': 'kCFPropertyListMutableContainers', 'NSHashTableObjectPointerPersonality': 'NSPointerFunctionsObjectPointerPersonality', 'NS_VOIDRETURN': 'return', 'NSOperationQualityOfServiceUtility': 'NSQualityOfServiceUtility', 'NSWeekOfMonthCalendarUnit': 'NSCalendarUnitWeekOfMonth', 'NSPropertyListBinaryFormat_v1_0': 'kCFPropertyListBinaryFormat_v1_0', 'NSNumberFormatterOrdinalStyle': 'kCFNumberFormatterOrdinalStyle', 'NSYearCalendarUnit': 'NSCalendarUnitYear', 'NS_NONATOMIC_IPHONEONLY': 'NS_NONATOMIC_IOSONLY', 'NSURLErrorRedirectToNonExistentLocation': 'kCFURLErrorRedirectToNonExistentLocation', 'NSURLErrorNotConnectedToInternet': 'kCFURLErrorNotConnectedToInternet', 'NSDataReadingMapped': 'NSDataReadingMappedIfSafe', 'NSURLErrorCannotDecodeRawData': 'kCFURLErrorCannotDecodeRawData', 'NSMapTableObjectPointerPersonality': 'NSPointerFunctionsObjectPointerPersonality', 'NSURLErrorCannotMoveFile': 'kCFURLErrorCannotMoveFile', 'NSPropertyListMutableContainersAndLeaves': 'kCFPropertyListMutableContainersAndLeaves', 'NSURLErrorCancelled': 'kCFURLErrorCancelled', 'NSRectEdgeMinX': 'NSMinXEdge', 'NSURLErrorTimedOut': 'kCFURLErrorTimedOut', 'NSPropertyListImmutable': 'kCFPropertyListImmutable', 'NSCalendarUnitYearForWeekOfYear': 'kCFCalendarUnitYearForWeekOfYear', 'NSCalendarCalendarUnit': 'NSCalendarUnitCalendar', 'NSURLErrorDownloadDecodingFailedMidStream': 'kCFURLErrorDownloadDecodingFailedMidStream', 'NSRectEdgeMinY': 'NSMinYEdge', 'NSISO8601DateFormatWithFullDate': 'kCFISO8601DateFormatWithFullDate', 'NSNumberFormatterRoundFloor': 'kCFNumberFormatterRoundFloor', 'NSOperationQualityOfServiceUserInitiated': 'NSQualityOfServiceUserInitiated', 'NSCalendarUnitWeekday': 'kCFCalendarUnitWeekday', 'NS_BigEndian': 'CFByteOrderBigEndian', 'NSMapTableZeroingWeakMemory': 'NSPointerFunctionsZeroingWeakMemory', 'NSMinXEdge': 'CGRectMinXEdge', 'NSOperationQualityOfServiceUserInteractive': 'NSQualityOfServiceUserInteractive', 'NSURLErrorCannotDecodeContentData': 'kCFURLErrorCannotDecodeContentData', 'NSUTF16StringEncoding': 'NSUnicodeStringEncoding', 'NSNumberFormatterRoundDown': 'kCFNumberFormatterRoundDown', 'NSISO8601DateFormatWithSpaceBetweenDateAndTime': 'kCFISO8601DateFormatWithSpaceBetweenDateAndTime', 'NSNumberFormatterRoundHalfUp': 'kCFNumberFormatterRoundHalfUp', 'NSISO8601DateFormatWithInternetDateTime': 'kCFISO8601DateFormatWithInternetDateTime', 'NSCalendarUnitMinute': 'kCFCalendarUnitMinute', 'NSISO8601DateFormatWithMonth': 'kCFISO8601DateFormatWithMonth', 'NSNumberFormatterScientificStyle': 'kCFNumberFormatterScientificStyle', 'NS_UNAVAILABLE': 'UNAVAILABLE_ATTRIBUTE', 'NSURLErrorInternationalRoamingOff': 'kCFURLErrorInternationalRoamingOff', 'NSCalendarUnitWeekOfMonth': 'kCFCalendarUnitWeekOfMonth', 'NSLocaleLanguageDirectionUnknown': 'kCFLocaleLanguageDirectionUnknown', 'NSCalendarUnitSecond': 'kCFCalendarUnitSecond', 'NSURLErrorCannotParseResponse': 'kCFURLErrorCannotParseResponse', 'NSOperationQualityOfServiceBackground': 'NSQualityOfServiceBackground', 'NSMapTableCopyIn': 'NSPointerFunctionsCopyIn', 'NSCalendarUnitMonth': 'kCFCalendarUnitMonth', 'NSURLErrorCannotWriteToFile': 'kCFURLErrorCannotWriteToFile', 'NSURLErrorServerCertificateHasBadDate': 'kCFURLErrorServerCertificateHasBadDate', 'NSURLErrorDataLengthExceedsMaximum': 'kCFURLErrorDataLengthExceedsMaximum', 'NSCalendarUnitEra': 'kCFCalendarUnitEra', 'NSDateFormatterFullStyle': 'kCFDateFormatterFullStyle', 'NSISO8601DateFormatWithColonSeparatorInTimeZone': 'kCFISO8601DateFormatWithColonSeparatorInTimeZone', 'NSURLErrorCannotOpenFile': 'kCFURLErrorCannotOpenFile', 'NSDateFormatterShortStyle': 'kCFDateFormatterShortStyle', 'NSDecimalNoScale': 'SHRT_MAX', 'NSLocaleLanguageDirectionRightToLeft': 'kCFLocaleLanguageDirectionRightToLeft', 'NSISO8601DateFormatWithTime': 'kCFISO8601DateFormatWithTime', 'NSNumberFormatterCurrencyISOCodeStyle': 'kCFNumberFormatterCurrencyISOCodeStyle', 'NSCalendarUnitQuarter': 'kCFCalendarUnitQuarter', 'NSURLErrorUserAuthenticationRequired': 'kCFURLErrorUserAuthenticationRequired', 'NSURLErrorCannotLoadFromNetwork': 'kCFURLErrorCannotLoadFromNetwork', 'NSNumberFormatterCurrencyStyle': 'kCFNumberFormatterCurrencyStyle', 'NSWeekOfYearCalendarUnit': 'NSCalendarUnitWeekOfYear', 'NSURLErrorServerCertificateNotYetValid': 'kCFURLErrorServerCertificateNotYetValid', 'NSMapTableWeakMemory': 'NSPointerFunctionsWeakMemory', 'NSURLErrorCannotRemoveFile': 'kCFURLErrorCannotRemoveFile', 'NSWrapCalendarComponents': 'NSCalendarWrapComponents', 'NSURLErrorFileDoesNotExist': 'kCFURLErrorFileDoesNotExist', 'NSLocaleLanguageDirectionBottomToTop': 'kCFLocaleLanguageDirectionBottomToTop', 'NSUncachedRead': 'NSDataReadingUncached', 'NSIntegerMax': 'LONG_MAX', 'NSDateFormatterMediumStyle': 'kCFDateFormatterMediumStyle', 'NSURLErrorUnsupportedURL': 'kCFURLErrorUnsupportedURL', 'NSNumberFormatterRoundHalfEven': 'kCFNumberFormatterRoundHalfEven', 'NSISO8601DateFormatWithWeekOfYear': 'kCFISO8601DateFormatWithWeekOfYear', 'NSDayCalendarUnit': 'NSCalendarUnitDay', 'NSISO8601DateFormatWithFractionalSeconds': 'kCFISO8601DateFormatWithFractionalSeconds', 'NSYearForWeekOfYearCalendarUnit': 'NSCalendarUnitYearForWeekOfYear', 'NSNumberFormatterPadBeforePrefix': 'kCFNumberFormatterPadBeforePrefix', 'NSUndefinedDateComponent': 'NSDateComponentUndefined', 'NSURLErrorServerCertificateHasUnknownRoot': 'kCFURLErrorServerCertificateHasUnknownRoot', 'NSURLErrorBadServerResponse': 'kCFURLErrorBadServerResponse', 'NSMappedRead': 'NSDataReadingMapped', 'NSUIntegerMax': 'ULONG_MAX', 'NSHourCalendarUnit': 'NSCalendarUnitHour', 'NSURLRequestReloadIgnoringCacheData': 'NSURLRequestReloadIgnoringLocalCacheData', 'NSNumberFormatterRoundUp': 'kCFNumberFormatterRoundUp', 'NSISO8601DateFormatWithTimeZone': 'kCFISO8601DateFormatWithTimeZone', 'NSURLErrorCannotCloseFile': 'kCFURLErrorCannotCloseFile', 'NSCalendarUnitDay': 'kCFCalendarUnitDay', 'NSAtomicWrite': 'NSDataWritingAtomic', 'NSNumberFormatterRoundHalfDown': 'kCFNumberFormatterRoundHalfDown', 'NSOperationQualityOfService': 'NSQualityOfService'} -misc.update({'NSAppleEventManagerSuspensionID': objc.createOpaquePointerType('NSAppleEventManagerSuspensionID', b'^{__NSAppleEventManagerSuspension=}'), 'NSZonePtr': objc.createOpaquePointerType('NSZonePtr', b'^{_NSZone=}')}) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'NSAffineTransform', b'setTransformStruct:', {'arguments': {2: {'type': sel32or64(b'{_NSAffineTransformStruct=ffffff}', b'{_NSAffineTransformStruct=dddddd}')}}}) - r(b'NSAffineTransform', b'transformPoint:', {'retval': {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSAffineTransform', b'transformSize:', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}, 'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSAffineTransform', b'transformStruct', {'retval': {'type': sel32or64(b'{_NSAffineTransformStruct=ffffff}', b'{_NSAffineTransformStruct=dddddd}')}}) - r(b'NSAppleEventDescriptor', b'aeDesc', {'retval': {'type': 'r^{AEDesc=I^^{OpaqueAEDataStorageType}}'}}) - r(b'NSAppleEventDescriptor', b'booleanValue', {'retval': {'type': 'Z'}}) - r(b'NSAppleEventDescriptor', b'descriptorWithBoolean:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSAppleEventDescriptor', b'descriptorWithDescriptorType:bytes:length:', {'arguments': {3: {'type_modifier': b'n', 'c_array_length_in_arg': 4}}}) - r(b'NSAppleEventDescriptor', b'dispatchRawAppleEvent:withRawReply:handlerRefCon:', {'retval': {'type': 's'}, 'arguments': {4: {'type': 'l'}}}) - r(b'NSAppleEventDescriptor', b'initWithAEDescNoCopy:', {'arguments': {2: {'type': 'r^{AEDesc=I^^{OpaqueAEDataStorageType}}', 'type_modifier': b'n'}}}) - r(b'NSAppleEventDescriptor', b'initWithDescriptorType:bytes:length:', {'arguments': {3: {'type_modifier': b'n', 'c_array_length_in_arg': 4}}}) - r(b'NSAppleEventDescriptor', b'isRecordDescriptor', {'retval': {'type': 'Z'}}) - r(b'NSAppleEventDescriptor', b'setEventHandler:andSelector:forEventClass:andEventID:', {'arguments': {3: {'sel_of_type': b'v@:@@'}}}) - r(b'NSAppleEventManager', b'dispatchRawAppleEvent:withRawReply:handlerRefCon:', {'arguments': {2: {'type': 'r^{AEDesc=I^^{OpaqueAEDataStorageType}}', 'type_modifier': b'n'}, 3: {'type': 'r^{AEDesc=I^^{OpaqueAEDataStorageType}}', 'type_modifier': b'o'}}}) - r(b'NSAppleEventManager', b'setEventHandler:andSelector:forEventClass:andEventID:', {'arguments': {3: {'sel_of_type': b'v@:@@'}}}) - r(b'NSAppleScript', b'compileAndReturnError:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type_modifier': b'o'}}}) - r(b'NSAppleScript', b'executeAndReturnError:', {'arguments': {2: {'type_modifier': b'o'}}}) - r(b'NSAppleScript', b'executeAppleEvent:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSAppleScript', b'initWithContentsOfURL:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSAppleScript', b'isCompiled', {'retval': {'type': 'Z'}}) - r(b'NSArchiver', b'archiveRootObject:toFile:', {'retval': {'type': 'Z'}}) - r(b'NSArray', b'addObserver:forKeyPath:options:context:', {'arguments': {5: {'type': '^v'}}}) - r(b'NSArray', b'addObserver:toObjectsAtIndexes:forKeyPath:options:context:', {'arguments': {6: {'type': '^v'}}}) - r(b'NSArray', b'arrayWithContentsOfURL:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSArray', b'arrayWithObjects:', {'c_array_delimited_by_null': True, 'variadic': True}) - r(b'NSArray', b'arrayWithObjects:count:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'NSArray', b'containsObject:', {'retval': {'type': 'Z'}}) - r(b'NSArray', b'context:', {'arguments': {2: {'type': '^v'}}}) - r(b'NSArray', b'context:hint:', {'arguments': {2: {'type': '^v'}}}) - r(b'NSArray', b'enumerateObjectsAtIndexes:options:usingBlock:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSArray', b'enumerateObjectsUsingBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSArray', b'enumerateObjectsWithOptions:usingBlock:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSArray', b'getObjects:', {'arguments': {2: {'type': '^@'}}, 'suggestion': 'convert to Python list instead'}) - r(b'NSArray', b'getObjects:range:', {'retval': {'type': 'v'}, 'arguments': {2: {'type_modifier': b'o', 'c_array_length_in_arg': 3}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSArray', b'indexOfObject:inRange:', {'arguments': {3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSArray', b'indexOfObject:inSortedRange:options:usingComparator:', {'arguments': {5: {'callable': {'retval': {'type': b'l'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'NSArray', b'indexOfObjectAtIndexes:options:passingTest:', {'arguments': {4: {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSArray', b'indexOfObjectIdenticalTo:inRange:', {'arguments': {3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSArray', b'indexOfObjectPassingTest:', {'arguments': {2: {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSArray', b'indexOfObjectWithOptions:passingTest:', {'arguments': {3: {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSArray', b'indexesOfObjectsAtIndexes:options:passingTest:', {'arguments': {4: {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSArray', b'indexesOfObjectsPassingTest:', {'arguments': {2: {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSArray', b'indexesOfObjectsWithOptions:passingTest:', {'arguments': {3: {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSArray', b'initWithArray:copyItems:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSArray', b'initWithContentsOfURL:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSArray', b'initWithObjects:', {'c_array_delimited_by_null': True, 'variadic': True}) - r(b'NSArray', b'initWithObjects:count:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'NSArray', b'isEqualToArray:', {'retval': {'type': 'Z'}}) - r(b'NSArray', b'makeObjectsPerformSelector:', {'arguments': {2: {'sel_of_type': b'v@:'}}}) - r(b'NSArray', b'makeObjectsPerformSelector:withObject:', {'arguments': {2: {'sel_of_type': b'v@:@'}}}) - r(b'NSArray', b'sortedArrayUsingComparator:', {'arguments': {2: {'callable': {'retval': {'type': b'l'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'NSArray', b'sortedArrayUsingFunction:context:', {'arguments': {2: {'callable': {'retval': {'type': b'l'}, 'arguments': {0: {'type': b'@'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'callable_retained': False}, 3: {'type': '@'}}}) - r(b'NSArray', b'sortedArrayUsingFunction:context:hint:', {'arguments': {2: {'callable': {'retval': {'type': b'l'}, 'arguments': {0: {'type': b'@'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'callable_retained': False}, 3: {'type': '@'}}}) - r(b'NSArray', b'sortedArrayUsingSelector:', {'arguments': {2: {'sel_of_type': b'i@:@'}}}) - r(b'NSArray', b'sortedArrayWithOptions:usingComparator:', {'arguments': {3: {'callable': {'retval': {'type': b'l'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'NSArray', b'subarrayWithRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSArray', b'writeToFile:atomically:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': 'Z'}}}) - r(b'NSArray', b'writeToURL:atomically:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': 'Z'}}}) - r(b'NSArray', b'writeToURL:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSAssertionHandler', b'handleFailureInFunction:file:lineNumber:description:', {'arguments': {5: {'printf_format': True, 'type': '@'}}, 'variadic': True}) - r(b'NSAssertionHandler', b'handleFailureInMethod:object:file:lineNumber:description:', {'arguments': {2: {'type': ':'}, 6: {'printf_format': True, 'type': '@'}}, 'variadic': True}) - r(b'NSAttributedString', b'attribute:atIndex:effectiveRange:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSAttributedString', b'attribute:atIndex:longestEffectiveRange:inRange:', {'arguments': {4: {'type_modifier': b'o'}, 5: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSAttributedString', b'attributedSubstringFromRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSAttributedString', b'attributesAtIndex:effectiveRange:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSAttributedString', b'attributesAtIndex:longestEffectiveRange:inRange:', {'arguments': {3: {'type_modifier': b'o'}, 4: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSAttributedString', b'enumerateAttribute:inRange:options:usingBlock:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 3: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSAttributedString', b'enumerateAttributesInRange:options:usingBlock:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 3: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSAttributedString', b'isEqualToAttributedString:', {'retval': {'type': 'Z'}}) - r(b'NSAutoreleasePool', b'enableFreedObjectCheck:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSAutoreleasePool', b'enableRelease:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSBackgroundActivityScheduler', b'repeats', {'retval': {'type': b'Z'}}) - r(b'NSBackgroundActivityScheduler', b'scheduleWithBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'i', b'q')}}}}}}) - r(b'NSBackgroundActivityScheduler', b'setRepeats:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSBackgroundActivityScheduler', b'shouldDefer', {'retval': {'type': b'Z'}}) - r(b'NSBlockOperation', b'addExecutionBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSBlockOperation', b'blockOperationWithBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSBundle', b'isLoaded', {'retval': {'type': 'Z'}}) - r(b'NSBundle', b'load', {'retval': {'type': 'Z'}}) - r(b'NSBundle', b'loadAndReturnError:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type_modifier': b'o'}}}) - r(b'NSBundle', b'preflightAndReturnError:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type_modifier': b'o'}}}) - r(b'NSBundle', b'unload', {'retval': {'type': 'Z'}}) - r(b'NSByteCountFormatter', b'allowsNonnumericFormatting', {'retval': {'type': b'Z'}}) - r(b'NSByteCountFormatter', b'includesActualByteCount', {'retval': {'type': b'Z'}}) - r(b'NSByteCountFormatter', b'includesCount', {'retval': {'type': b'Z'}}) - r(b'NSByteCountFormatter', b'includesUnit', {'retval': {'type': b'Z'}}) - r(b'NSByteCountFormatter', b'isAdaptive', {'retval': {'type': b'Z'}}) - r(b'NSByteCountFormatter', b'setAdaptive:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSByteCountFormatter', b'setAllowsNonnumericFormatting:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSByteCountFormatter', b'setIncludesActualByteCount:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSByteCountFormatter', b'setIncludesCount:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSByteCountFormatter', b'setIncludesUnit:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSByteCountFormatter', b'setZeroPadsFractionDigits:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSByteCountFormatter', b'zeroPadsFractionDigits', {'retval': {'type': b'Z'}}) - r(b'NSCache', b'evictsObjectsWithDiscardedContent', {'retval': {'type': 'Z'}}) - r(b'NSCache', b'setEvictsObjectsWithDiscardedContent:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCalendar', b'date:matchesComponents:', {'retval': {'type': b'Z'}}) - r(b'NSCalendar', b'enumerateDatesStartingAfterDate:matchingComponents:options:usingBlock:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'Z'}, 3: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSCalendar', b'isDate:equalToDate:toUnitGranularity:', {'retval': {'type': b'Z'}}) - r(b'NSCalendar', b'isDate:inSameDayAsDate:', {'retval': {'type': b'Z'}}) - r(b'NSCalendar', b'isDateInToday:', {'retval': {'type': b'Z'}}) - r(b'NSCalendar', b'isDateInTomorrow:', {'retval': {'type': b'Z'}}) - r(b'NSCalendar', b'isDateInWeekend:', {'retval': {'type': b'Z'}}) - r(b'NSCalendar', b'isDateInYesterday:', {'retval': {'type': b'Z'}}) - r(b'NSCalendar', b'maximumRangeOfUnit:', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}) - r(b'NSCalendar', b'minimumRangeOfUnit:', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}) - r(b'NSCalendar', b'nextWeekendStartDate:interval:options:afterDate:', {'retval': {'type': b'Z'}}) - r(b'NSCalendar', b'rangeOfUnit:inUnit:forDate:', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}) - r(b'NSCalendar', b'rangeOfUnit:startDate:interval:forDate:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}, 4: {'type_modifier': b'o'}}}) - r(b'NSCalendar', b'rangeOfWeekendStartDate:interval:containingDate:', {'retval': {'type': b'Z'}}) - r(b'NSCalendarDate', b'years:months:days:hours:minutes:seconds:sinceDate:', {'retval': {'type': 'v'}, 'arguments': {2: {'type_modifier': b'o'}, 3: {'type_modifier': b'o'}, 4: {'type_modifier': b'o'}, 5: {'type_modifier': b'o'}, 6: {'type_modifier': b'o'}, 7: {'type_modifier': b'o'}, 8: {'type': '@'}}}) - r(b'NSCharacterSet', b'characterIsMember:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': 'S'}}}) - r(b'NSCharacterSet', b'characterSetWithRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSCharacterSet', b'hasMemberInPlane:', {'retval': {'type': 'Z'}}) - r(b'NSCharacterSet', b'isSupersetOfSet:', {'retval': {'type': 'Z'}}) - r(b'NSCharacterSet', b'longCharacterIsMember:', {'retval': {'type': 'Z'}}) - r(b'NSCoder', b'allowsKeyedCoding', {'retval': {'type': 'Z'}}) - r(b'NSCoder', b'containsValueForKey:', {'retval': {'type': 'Z'}}) - r(b'NSCoder', b'decodeArrayOfObjCType:count:at:', {'arguments': {2: {'c_array_delimited_by_null': True, 'type': 'r*'}, 4: {'type_modifier': b'o', 'c_array_of_variable_length': True}}}) - r(b'NSCoder', b'decodeBoolForKey:', {'retval': {'type': 'Z'}}) - r(b'NSCoder', b'decodeBytesForKey:returnedLength:', {'retval': {'c_array_delimited_by_null': True, 'type': '^v', 'c_array_length_in_arg': 3}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSCoder', b'decodeBytesWithReturnedLength:', {'retval': {'c_array_length_in_arg': 2}, 'arguments': {2: {'type_modifier': b'o'}}}) - r(b'NSCoder', b'decodePoint', {'retval': {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}) - r(b'NSCoder', b'decodePointForKey:', {'retval': {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 'arguments': {2: {'type': '@'}}}) - r(b'NSCoder', b'decodeRect', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSCoder', b'decodeRectForKey:', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': '@'}}}) - r(b'NSCoder', b'decodeSize', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSCoder', b'decodeSizeForKey:', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}, 'arguments': {2: {'type': '@'}}}) - r(b'NSCoder', b'decodeTopLevelObjectAndReturnError:', {'arguments': {2: {'type_modifier': b'o'}}}) - r(b'NSCoder', b'decodeTopLevelObjectOfClass:forKey:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSCoder', b'decodeValueOfObjCType:at:', {'arguments': {2: {'c_array_delimited_by_null': True, 'type': '^t', 'type_modifier': b'n'}, 3: {'type': '^v', 'c_array_of_variable_length': True}}}) - r(b'NSCoder', b'decodeValuesOfObjCTypes:', {'arguments': {2: {'c_array_delimited_by_null': True, 'type': '^t', 'type_modifier': b'n'}}, 'variadic': True}) - r(b'NSCoder', b'encodeArrayOfObjCType:count:at:', {'arguments': {2: {'c_array_delimited_by_null': True, 'type': '^t', 'type_modifier': b'n'}, 4: {'type': '^v', 'type_modifier': b'n', 'c_array_of_variable_length': True}}}) - r(b'NSCoder', b'encodeBool:forKey:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSCoder', b'encodeBytes:length:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'NSCoder', b'encodeBytes:length:forKey:', {'arguments': {2: {'c_array_delimited_by_null': True, 'type': '^v', 'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'NSCoder', b'encodePoint:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSCoder', b'encodePoint:forKey:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSCoder', b'encodeRect:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSCoder', b'encodeRect:forKey:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSCoder', b'encodeSize:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSCoder', b'encodeSize:forKey:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSCoder', b'encodeValueOfObjCType:at:', {'arguments': {2: {'c_array_delimited_by_null': True, 'type': '^t', 'type_modifier': b'n'}, 3: {'type': '^v', 'type_modifier': b'n', 'c_array_of_variable_length': True}}}) - r(b'NSCoder', b'encodeValuesOfObjCTypes:', {'arguments': {2: {'c_array_delimited_by_null': True, 'type': '^t', 'type_modifier': b'n'}}, 'variadic': True}) - r(b'NSCoder', b'requiresSecureCoding', {'retval': {'type': b'Z'}}) - r(b'NSComparisonPredicate', b'initWithLeftExpression:rightExpression:customSelector:', {'arguments': {4: {'sel_of_type': b'Z@:@'}}}) - r(b'NSComparisonPredicate', b'predicateWithLeftExpression:rightExpression:customSelector:', {'arguments': {4: {'sel_of_type': b'Z@:@'}}}) - r(b'NSCondition', b'waitUntilDate:', {'retval': {'type': 'Z'}}) - r(b'NSConditionLock', b'lockBeforeDate:', {'retval': {'type': 'Z'}}) - r(b'NSConditionLock', b'lockWhenCondition:beforeDate:', {'retval': {'type': 'Z'}}) - r(b'NSConditionLock', b'tryLock', {'retval': {'type': 'Z'}}) - r(b'NSConditionLock', b'tryLockWhenCondition:', {'retval': {'type': 'Z'}}) - r(b'NSConnection', b'independentConversationQueueing', {'retval': {'type': 'Z'}}) - r(b'NSConnection', b'isValid', {'retval': {'type': 'Z'}}) - r(b'NSConnection', b'multipleThreadsEnabled', {'retval': {'type': 'Z'}}) - r(b'NSConnection', b'registerName:', {'retval': {'type': 'Z'}}) - r(b'NSConnection', b'registerName:withNameServer:', {'retval': {'type': 'Z'}}) - r(b'NSConnection', b'setIndependentConversationQueueing:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSData', b'bytes', {'retval': {'c_array_of_variable_length': True}}) - r(b'NSData', b'dataWithBytes:length:', {'arguments': {2: {'type': '^v', 'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'NSData', b'dataWithBytesNoCopy:length:', {'arguments': {2: {'type': '^v', 'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'NSData', b'dataWithBytesNoCopy:length:freeWhenDone:', {'arguments': {2: {'type': '^v', 'type_modifier': b'n', 'c_array_length_in_arg': 3}, 4: {'type': 'Z'}}}) - r(b'NSData', b'dataWithContentsOfFile:options:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSData', b'dataWithContentsOfURL:options:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSData', b'enumerateByteRangesUsingBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'^v', 'type_modifier': 'n', 'c_array_length_in_arg': 2}, 2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 3: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSData', b'getBytes:', {'arguments': {2: {'type': '^v'}}, 'suggestion': 'use -bytes instead'}) - r(b'NSData', b'getBytes:length:', {'arguments': {2: {'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'NSData', b'getBytes:range:', {'arguments': {2: {'type_modifier': b'o', 'c_array_length_in_arg': 3}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSData', b'initWithBytes:length:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'NSData', b'initWithBytesNoCopy:length:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'NSData', b'initWithBytesNoCopy:length:deallocator:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'^v', 'type_modifier': 'n', 'c_array_length_in_arg': 2}, 2: {'type': sel32or64(b'I', b'Q')}}}}}}) - r(b'NSData', b'initWithBytesNoCopy:length:freeWhenDone:', {'arguments': {2: {'type': '^v', 'type_modifier': b'n', 'c_array_length_in_arg': 3}, 4: {'type': 'Z'}}}) - r(b'NSData', b'initWithContentsOfFile:options:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSData', b'initWithContentsOfURL:options:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSData', b'isEqualToData:', {'retval': {'type': 'Z'}}) - r(b'NSData', b'rangeOfData:options:range:', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}) - r(b'NSData', b'subdataWithRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSData', b'writeToFile:atomically:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': 'Z'}}}) - r(b'NSData', b'writeToFile:options:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSData', b'writeToURL:atomically:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': 'Z'}}}) - r(b'NSData', b'writeToURL:options:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSDataDetector', b'dataDetectorWithTypes:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSDataDetector', b'initWithTypes:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSDate', b'isEqualToDate:', {'retval': {'type': 'Z'}}) - r(b'NSDateComponents', b'isLeapMonth', {'retval': {'type': b'Z'}}) - r(b'NSDateComponents', b'isValidDate', {'retval': {'type': b'Z'}}) - r(b'NSDateComponents', b'isValidDateInCalendar:', {'retval': {'type': b'Z'}}) - r(b'NSDateComponents', b'setLeapMonth:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSDateComponentsFormatter', b'allowsFractionalUnits', {'retval': {'type': b'Z'}}) - r(b'NSDateComponentsFormatter', b'collapsesLargestUnit', {'retval': {'type': b'Z'}}) - r(b'NSDateComponentsFormatter', b'getObjectValue:forString:errorDescription:', {'retval': {'type': b'Z'}}) - r(b'NSDateComponentsFormatter', b'includesApproximationPhrase', {'retval': {'type': b'Z'}}) - r(b'NSDateComponentsFormatter', b'includesTimeRemainingPhrase', {'retval': {'type': b'Z'}}) - r(b'NSDateComponentsFormatter', b'setAllowsFractionalUnits:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSDateComponentsFormatter', b'setCollapsesLargestUnit:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSDateComponentsFormatter', b'setIncludesApproximationPhrase:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSDateComponentsFormatter', b'setIncludesTimeRemainingPhrase:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSDateFormatter', b'allowsNaturalLanguage', {'retval': {'type': 'Z'}}) - r(b'NSDateFormatter', b'doesRelativeDateFormatting', {'retval': {'type': 'Z'}}) - r(b'NSDateFormatter', b'generatesCalendarDates', {'retval': {'type': 'Z'}}) - r(b'NSDateFormatter', b'getObjectValue:forString:range:error:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type_modifier': b'o'}, 4: {'type': sel32or64(b'^{_NSRange=II}', b'^{_NSRange=QQ}'), 'type_modifier': b'N'}, 5: {'type_modifier': b'o'}}}) - r(b'NSDateFormatter', b'initWithDateFormat:allowNaturalLanguage:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSDateFormatter', b'isLenient', {'retval': {'type': 'Z'}}) - r(b'NSDateFormatter', b'setDoesRelativeDateFormatting:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSDateFormatter', b'setGeneratesCalendarDates:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSDateFormatter', b'setLenient:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSDateInterval', b'containsDate:', {'retval': {'type': 'Z'}}) - r(b'NSDateInterval', b'intersectsDateInterval:', {'retval': {'type': 'Z'}}) - r(b'NSDateInterval', b'isEqualToDateInterval:', {'retval': {'type': 'Z'}}) - r(b'NSDecimalNumber', b'decimalNumberWithDecimal:', {'arguments': {2: {'type': '{NSDecimal=b8b4b1b1b18[8S]}'}}}) - r(b'NSDecimalNumber', b'decimalNumberWithMantissa:exponent:isNegative:', {'arguments': {4: {'type': 'Z'}}}) - r(b'NSDecimalNumber', b'decimalValue', {'retval': {'type': b'{_NSDecimal=b8b4b1b1b18[8S]}'}}) - r(b'NSDecimalNumber', b'initWithDecimal:', {'arguments': {2: {'type': '{NSDecimal=b8b4b1b1b18[8S]}'}}}) - r(b'NSDecimalNumber', b'initWithMantissa:exponent:isNegative:', {'arguments': {4: {'type': 'Z'}}}) - r(b'NSDecimalNumber', b'objCType', {'retval': {'c_array_delimited_by_null': True, 'type': '^t'}}) - r(b'NSDecimalNumberHandler', b'decimalNumberHandlerWithRoundingMode:scale:raiseOnExactness:raiseOnOverflow:raiseOnUnderflow:raiseOnDivideByZero:', {'arguments': {4: {'type': 'Z'}, 5: {'type': 'Z'}, 6: {'type': 'Z'}, 7: {'type': 'Z'}}}) - r(b'NSDecimalNumberHandler', b'initWithRoundingMode:scale:raiseOnExactness:raiseOnOverflow:raiseOnUnderflow:raiseOnDivideByZero:', {'arguments': {4: {'type': 'Z'}, 5: {'type': 'Z'}, 6: {'type': 'Z'}, 7: {'type': 'Z'}}}) - r(b'NSDictionary', b'dictionaryWithContentsOfURL:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSDictionary', b'dictionaryWithObjects:forKeys:count:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 4}, 3: {'type_modifier': b'n', 'c_array_length_in_arg': 4}}}) - r(b'NSDictionary', b'dictionaryWithObjectsAndKeys:', {'c_array_delimited_by_null': True, 'variadic': True}) - r(b'NSDictionary', b'enumerateKeysAndObjectsUsingBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSDictionary', b'enumerateKeysAndObjectsWithOptions:usingBlock:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSDictionary', b'fileExtensionHidden', {'retval': {'type': 'Z'}}) - r(b'NSDictionary', b'fileIsAppendOnly', {'retval': {'type': 'Z'}}) - r(b'NSDictionary', b'fileIsImmutable', {'retval': {'type': 'Z'}}) - r(b'NSDictionary', b'getObjects:andKeys:', {'arguments': {2: {'type': '^@'}, 3: {'type': '^@'}}, 'suggestion': 'convert to a python dict instead'}) - r(b'NSDictionary', b'initWithContentsOfURL:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSDictionary', b'initWithDictionary:copyItems:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSDictionary', b'initWithObjects:forKeys:count:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 4}, 3: {'type_modifier': b'n', 'c_array_length_in_arg': 4}}}) - r(b'NSDictionary', b'initWithObjectsAndKeys:', {'c_array_delimited_by_null': True, 'variadic': True}) - r(b'NSDictionary', b'isEqualToDictionary:', {'retval': {'type': 'Z'}}) - r(b'NSDictionary', b'keysOfEntriesPassingTest:', {'arguments': {2: {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSDictionary', b'keysOfEntriesWithOptions:passingTest:', {'arguments': {3: {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSDictionary', b'keysSortedByValueUsingComparator:', {'arguments': {2: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'NSDictionary', b'keysSortedByValueUsingSelector:', {'arguments': {2: {'sel_of_type': b'i@:@'}}}) - r(b'NSDictionary', b'keysSortedByValueWithOptions:usingComparator:', {'arguments': {3: {'callable': {'retval': {'type': sel32or64(b'i', b'q')}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'NSDictionary', b'writeToFile:atomically:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': 'Z'}}}) - r(b'NSDictionary', b'writeToURL:atomically:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': 'Z'}}}) - r(b'NSDictionary', b'writeToURL:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSDistributedLock', b'tryLock', {'retval': {'type': 'Z'}}) - r(b'NSDistributedNotificationCenter', b'addObserver:selector:name:object:', {'arguments': {3: {'sel_of_type': b'v@:@'}}}) - r(b'NSDistributedNotificationCenter', b'addObserver:selector:name:object:suspensionBehavior:', {'arguments': {3: {'sel_of_type': b'v@:@'}}}) - r(b'NSDistributedNotificationCenter', b'postNotificationName:object:userInfo:deliverImmediately:', {'arguments': {5: {'type': 'Z'}}}) - r(b'NSDistributedNotificationCenter', b'setSuspended:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSDistributedNotificationCenter', b'suspended', {'retval': {'type': 'Z'}}) - r(b'NSEnergyFormatter', b'getObjectValue:forString:errorDescription:', {'retval': {'type': b'Z'}}) - r(b'NSEnergyFormatter', b'isForFoodEnergyUse', {'retval': {'type': b'Z'}}) - r(b'NSEnergyFormatter', b'setForFoodEnergyUse:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSError', b'setUserInfoValueProviderForDomain:provider:', {'arguments': {3: {'callable': {'retval': {'type': b'@'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'NSError', b'userInfoValueProviderForDomain:', {'retval': {'callable': {'retval': {'type': b'@'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}) - r(b'NSException', b'raise:format:', {'arguments': {3: {'printf_format': True, 'type': '@'}}, 'variadic': True}) - r(b'NSException', b'raise:format:arguments:', {'arguments': {4: {'type': sel32or64(b'*', b'[1{?=II^v^v}]')}}, 'suggestion': 'use raise:format:'}) - r(b'NSExpression', b'expressionBlock', {'retval': {'callable': {'retval': {'type': b'@'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}}}}}) - r(b'NSExpression', b'expressionForBlock:arguments:', {'arguments': {2: {'callable': {'retval': {'type': b'@'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}}}}}}) - r(b'NSExpression', b'expressionWithFormat:', {'arguments': {2: {'printf_format': True}}, 'variadic': True}) - r(b'NSExtensionContext', b'completeRequestReturningItems:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}}}}}}) - r(b'NSExtensionContext', b'openURL:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}}}}}}) - r(b'NSFileCoordinator', b'coordinateAccessWithIntents:queue:byAccessor:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'NSFileCoordinator', b'coordinateReadingItemAtURL:options:error:byAccessor:', {'arguments': {4: {'type_modifier': b'o'}, 5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'NSFileCoordinator', b'coordinateReadingItemAtURL:options:writingItemAtURL:options:error:byAccessor:', {'arguments': {6: {'type_modifier': b'o'}, 7: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'NSFileCoordinator', b'coordinateWritingItemAtURL:options:error:byAccessor:', {'arguments': {4: {'type_modifier': b'o'}, 5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'NSFileCoordinator', b'coordinateWritingItemAtURL:options:writingItemAtURL:options:error:byAccessor:', {'arguments': {6: {'type_modifier': b'o'}, 7: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'NSFileCoordinator', b'prepareForReadingItemsAtURLs:options:writingItemsAtURLs:options:error:byAccessor:', {'arguments': {6: {'type_modifier': b'o'}, 7: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'callable': {'retval': {'type': 'v'}, 'arguments': {0: {'type': '^v'}}}, 'type': b'@?'}}}}}}) - r(b'NSFileHandle', b'fileHandleForReadingFromURL:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSFileHandle', b'fileHandleForUpdatingURL:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSFileHandle', b'fileHandleForWritingToURL:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSFileHandle', b'initWithFileDescriptor:closeOnDealloc:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSFileHandle', b'readabilityHandler', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}) - r(b'NSFileHandle', b'setReadabilityHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'NSFileHandle', b'setWriteabilityHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'NSFileHandle', b'writeabilityHandler', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}) - r(b'NSFileManager', b'URLForDirectory:inDomain:appropriateForURL:create:error:', {'arguments': {5: {'type': 'Z'}, 6: {'type_modifier': b'o'}}}) - r(b'NSFileManager', b'URLForPublishingUbiquitousItemAtURL:expirationDate:error:', {'arguments': {3: {'type_modifier': b'o'}, 4: {'type_modifier': b'o'}}}) - r(b'NSFileManager', b'attributesOfFileSystemForPath:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSFileManager', b'attributesOfItemAtPath:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSFileManager', b'changeCurrentDirectoryPath:', {'retval': {'type': 'Z'}}) - r(b'NSFileManager', b'changeFileAttributes:atPath:', {'retval': {'type': 'Z'}}) - r(b'NSFileManager', b'contentsEqualAtPath:andPath:', {'retval': {'type': 'Z'}}) - r(b'NSFileManager', b'contentsOfDirectoryAtPath:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSFileManager', b'contentsOfDirectoryAtURL:includingPropertiesForKeys:options:error:', {'arguments': {5: {'type_modifier': b'o'}}}) - r(b'NSFileManager', b'copyItemAtPath:toPath:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSFileManager', b'copyItemAtURL:toURL:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSFileManager', b'copyPath:toPath:handler:', {'retval': {'type': 'Z'}}) - r(b'NSFileManager', b'createDirectoryAtPath:attributes:', {'retval': {'type': 'Z'}}) - r(b'NSFileManager', b'createDirectoryAtPath:withIntermediateDirectories:attributes:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': 'Z'}, 5: {'type_modifier': b'o'}}}) - r(b'NSFileManager', b'createDirectoryAtURL:withIntermediateDirectories:attributes:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': 'Z'}, 5: {'type_modifier': b'o'}}}) - r(b'NSFileManager', b'createFileAtPath:contents:attributes:', {'retval': {'type': 'Z'}}) - r(b'NSFileManager', b'createSymbolicLinkAtPath:pathContent:', {'retval': {'type': 'Z'}}) - r(b'NSFileManager', b'createSymbolicLinkAtPath:withDestinationPath:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSFileManager', b'createSymbolicLinkAtURL:withDestinationURL:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSFileManager', b'destinationOfSymbolicLinkAtPath:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSFileManager', b'enumeratorAtURL:includingPropertiesForKeys:options:errorHandler:', {'arguments': {5: {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'NSFileManager', b'evictUbiquitousItemAtURL:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSFileManager', b'fileAttributesAtPath:traverseLink:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSFileManager', b'fileExistsAtPath:', {'retval': {'type': 'Z'}}) - r(b'NSFileManager', b'fileExistsAtPath:isDirectory:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': '^Z', 'type_modifier': b'o'}}}) - r(b'NSFileManager', b'fileSystemRepresentationWithPath:', {'retval': {'c_array_delimited_by_null': True, 'type': '^t'}}) - r(b'NSFileManager', b'getFileProviderMessageInterfacesForItemAtURL:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'NSFileManager', b'getRelationship:ofDirectory:inDomain:toItemAtURL:error:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type_modifier': b'o'}, 6: {'type_modifier': b'o'}}}) - r(b'NSFileManager', b'getRelationship:ofDirectoryAtURL:toItemAtURL:error:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type_modifier': b'o'}, 5: {'type_modifier': b'o'}}}) - r(b'NSFileManager', b'isDeletableFileAtPath:', {'retval': {'type': 'Z'}}) - r(b'NSFileManager', b'isExecutableFileAtPath:', {'retval': {'type': 'Z'}}) - r(b'NSFileManager', b'isReadableFileAtPath:', {'retval': {'type': 'Z'}}) - r(b'NSFileManager', b'isUbiquitousItemAtURL:', {'retval': {'type': 'Z'}}) - r(b'NSFileManager', b'isWritableFileAtPath:', {'retval': {'type': 'Z'}}) - r(b'NSFileManager', b'linkItemAtPath:toPath:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSFileManager', b'linkItemAtURL:toURL:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSFileManager', b'linkPath:toPath:handler:', {'retval': {'type': 'Z'}}) - r(b'NSFileManager', b'moveItemAtPath:toPath:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSFileManager', b'moveItemAtURL:toURL:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSFileManager', b'movePath:toPath:handler:', {'retval': {'type': 'Z'}}) - r(b'NSFileManager', b'removeFileAtPath:handler:', {'retval': {'type': 'Z'}}) - r(b'NSFileManager', b'removeItemAtPath:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSFileManager', b'removeItemAtURL:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSFileManager', b'replaceItemAtURL:withItemAtURL:backupItemName:options:resultingItemURL:error:', {'retval': {'type': 'Z'}, 'arguments': {6: {'type_modifier': b'o'}, 7: {'type_modifier': b'o'}}}) - r(b'NSFileManager', b'setAttributes:ofItemAtPath:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSFileManager', b'setUbiquitous:itemAtURL:destinationURL:error:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': 'Z'}, 5: {'type_modifier': b'o'}}}) - r(b'NSFileManager', b'startDownloadingUbiquitousItemAtURL:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSFileManager', b'stringWithFileSystemRepresentation:length:', {'arguments': {2: {'type': '^t', 'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'NSFileManager', b'subpathsOfDirectoryAtPath:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSFileManager', b'trashItemAtURL:resultingItemURL:error:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type_modifier': b'o'}, 4: {'type_modifier': b'o'}}}) - r(b'NSFileManager', b'unmountVolumeAtURL:options:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'NSFileVersion', b'addVersionOfItemAtURL:withContentsOfURL:options:error:', {'arguments': {5: {'type_modifier': b'o'}}}) - r(b'NSFileVersion', b'getNonlocalVersionsOfItemAtURL:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'NSFileVersion', b'hasLocalContents', {'retval': {'type': b'Z'}}) - r(b'NSFileVersion', b'hasThumbnail', {'retval': {'type': b'Z'}}) - r(b'NSFileVersion', b'isConflict', {'retval': {'type': b'Z'}}) - r(b'NSFileVersion', b'isDiscardable', {'retval': {'type': b'Z'}}) - r(b'NSFileVersion', b'isResolved', {'retval': {'type': b'Z'}}) - r(b'NSFileVersion', b'removeAndReturnError:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type_modifier': b'o'}}}) - r(b'NSFileVersion', b'removeOtherVersionsOfItemAtURL:error:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSFileVersion', b'replaceItemAtURL:options:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSFileVersion', b'setDiscardable:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSFileVersion', b'setResolved:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSFileVersions', b'addVersionOfItemAtURL:withContentsOfURL:options:error:', {'arguments': {5: {'type_modifier': b'o'}}}) - r(b'NSFileVersions', b'isConflict', {'retval': {'type': 'Z'}}) - r(b'NSFileVersions', b'isDiscardable', {'retval': {'type': 'Z'}}) - r(b'NSFileVersions', b'isResolved', {'retval': {'type': 'Z'}}) - r(b'NSFileVersions', b'removeAndReturnError:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type_modifier': b'o'}}}) - r(b'NSFileVersions', b'removeOtherVersionsOfItemAtURL:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSFileVersions', b'replaceItemAtURL:options:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSFileVersions', b'setConflict:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSFileVersions', b'setDiscardable:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSFileVersions', b'setResolved:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSFileWrapper', b'isDirectory', {'retval': {'type': b'Z'}}) - r(b'NSFileWrapper', b'isRegularFile', {'retval': {'type': b'Z'}}) - r(b'NSFileWrapper', b'isSymbolicLink', {'retval': {'type': b'Z'}}) - r(b'NSFileWrapper', b'matchesContentsOfURL:', {'retval': {'type': b'Z'}}) - r(b'NSFileWrapper', b'needsToBeUpdatedFromPath:', {'retval': {'type': b'Z'}}) - r(b'NSFileWrapper', b'readFromURL:options:error:', {'retval': {'type': b'Z'}}) - r(b'NSFileWrapper', b'updateFromPath:', {'retval': {'type': b'Z'}}) - r(b'NSFileWrapper', b'writeToFile:atomically:updateFilenames:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type': b'Z'}, 4: {'type': b'Z'}}}) - r(b'NSFileWrapper', b'writeToURL:options:originalContentsURL:error:', {'retval': {'type': b'Z'}}) - r(b'NSFormatter', b'getObjectValue:forString:errorDescription:', {'retval': {'type': 'Z'}, 'arguments': {2: {'null_accepted': False, 'type_modifier': b'o'}, 4: {'type_modifier': b'o'}}}) - r(b'NSFormatter', b'isPartialStringValid:newEditingString:errorDescription:', {'retval': {'type': 'Z'}, 'arguments': {3: {'null_accepted': False, 'type_modifier': b'N'}, 4: {'type_modifier': b'o'}}}) - r(b'NSFormatter', b'isPartialStringValid:proposedSelectedRange:originalString:originalSelectedRange:errorDescription:', {'retval': {'type': 'Z'}, 'arguments': {2: {'null_accepted': False, 'type_modifier': b'N'}, 3: {'null_accepted': False, 'type_modifier': b'N'}, 5: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 6: {'type_modifier': b'o'}}}) - r(b'NSGarbageCollector', b'disableCollectorForPointer:', {'arguments': {2: {'type': '^v'}}, 'suggestion': 'Not supported right now'}) - r(b'NSGarbageCollector', b'enableCollectorForPointer:', {'arguments': {2: {'type': '^v'}}, 'suggestion': 'Not supported right now'}) - r(b'NSGarbageCollector', b'isCollecting', {'retval': {'type': 'Z'}}) - r(b'NSGarbageCollector', b'isEnabled', {'retval': {'type': 'Z'}}) - r(b'NSHTTPCookie', b'isHTTPOnly', {'retval': {'type': 'Z'}}) - r(b'NSHTTPCookie', b'isSecure', {'retval': {'type': 'Z'}}) - r(b'NSHTTPCookie', b'isSessionOnly', {'retval': {'type': 'Z'}}) - r(b'NSHTTPCookieStorage', b'getCookiesForTask:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'NSHashTable', b'containsObject:', {'retval': {'type': 'Z'}}) - r(b'NSHashTable', b'intersectsHashTable:', {'retval': {'type': 'Z'}}) - r(b'NSHashTable', b'isEqualToHashTable:', {'retval': {'type': 'Z'}}) - r(b'NSHashTable', b'isSubsetOfHashTable:', {'retval': {'type': 'Z'}}) - r(b'NSHost', b'isEqualToHost:', {'retval': {'type': 'Z'}}) - r(b'NSHost', b'isHostCacheEnabled', {'retval': {'type': 'Z'}}) - r(b'NSHost', b'setHostCacheEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSIndexPath', b'getIndexes:', {'arguments': {2: {'type': sel32or64(b'^I', b'^Q'), 'type_modifier': b'o', 'c_array_of_variable_length': True}}, 'suggestion': 'Use -getIndexes:range: or -indexAtPosition: instead'}) - r(b'NSIndexPath', b'getIndexes:range:', {'arguments': {2: {'type': sel32or64(b'^I', b'^Q'), 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'NSIndexPath', b'indexPathWithIndexes:length:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'NSIndexPath', b'initWithIndexes:length:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'NSIndexSet', b'containsIndex:', {'retval': {'type': 'Z'}}) - r(b'NSIndexSet', b'containsIndexes:', {'retval': {'type': 'Z'}}) - r(b'NSIndexSet', b'containsIndexesInRange:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSIndexSet', b'countOfIndexesInRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSIndexSet', b'enumerateIndexesInRange:options:usingBlock:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'I', b'Q')}, 2: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSIndexSet', b'enumerateIndexesUsingBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'I', b'Q')}, 2: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSIndexSet', b'enumerateIndexesWithOptions:usingBlock:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'I', b'Q')}, 2: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSIndexSet', b'enumerateRangesInRange:options:usingBlock:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 2: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSIndexSet', b'enumerateRangesUsingBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 2: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSIndexSet', b'enumerateRangesWithOptions:usingBlock:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 2: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSIndexSet', b'getIndexes:maxCount:inIndexRange:', {'arguments': {2: {'null_accepted': False, 'c_array_length_in_arg': 3, 'c_array_length_in_result': True, 'type_modifier': b'o'}, 4: {'null_accepted': False, 'type_modifier': b'N'}}}) - r(b'NSIndexSet', b'indexInRange:options:passingTest:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 4: {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'I', b'Q')}, 2: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSIndexSet', b'indexPassingTest:', {'arguments': {2: {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'I', b'Q')}, 2: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSIndexSet', b'indexSetWithIndexesInRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSIndexSet', b'indexWithOptions:passingTest:', {'arguments': {3: {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'I', b'Q')}, 2: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSIndexSet', b'indexesInRange:options:passingTest:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 4: {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'I', b'Q')}, 2: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSIndexSet', b'indexesPassingTest:', {'arguments': {2: {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'I', b'Q')}, 2: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSIndexSet', b'indexesWithOptions:passingTest:', {'arguments': {3: {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'I', b'Q')}, 2: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSIndexSet', b'initWithIndexesInRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSIndexSet', b'intersectsIndexesInRange:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSIndexSet', b'isEqualToIndexSet:', {'retval': {'type': 'Z'}}) - r(b'NSInputStream', b'getBuffer:length:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type': '^*', 'type_modifier': b'o', 'c_array_length_in_arg': 3}, 3: {'type': sel32or64(b'^I', b'^Q'), 'type_modifier': b'o'}}, 'suggestion': 'Not supported at the moment'}) - r(b'NSInputStream', b'hasBytesAvailable', {'retval': {'type': 'Z'}}) - r(b'NSInputStream', b'read:maxLength:', {'arguments': {2: {'type': '^v', 'c_array_length_in_arg': 3, 'c_array_length_in_result': True, 'type_modifier': b'o'}}}) - r(b'NSInvocation', b'argumentsRetained', {'retval': {'type': 'Z'}}) - r(b'NSInvocation', b'getArgument:atIndex:', {'arguments': {2: {'type': '^v'}}}) - r(b'NSInvocation', b'getReturnValue:', {'arguments': {2: {'type': '^v'}}}) - r(b'NSInvocation', b'setArgument:atIndex:', {'arguments': {2: {'type': '^v'}}}) - r(b'NSInvocation', b'setReturnValue:', {'arguments': {2: {'type': '^v'}}}) - r(b'NSInvocation', b'setSelector:', {'arguments': {2: {'type': ':'}}}) - r(b'NSInvocationOperation', b'initWithTarget:selector:object:', {'arguments': {3: {'sel_of_type': b'v@:@'}}}) - r(b'NSItemProvider', b'canLoadObjectOfClass:', {'retval': {'type': 'Z'}}) - r(b'NSItemProvider', b'hasItemConformingToTypeIdentifier:', {'retval': {'type': b'Z'}}) - r(b'NSItemProvider', b'hasRepresentationConformingToTypeIdentifier:fileOptions:', {'retval': {'type': 'Z'}}) - r(b'NSItemProvider', b'loadDataRepresentationForTypeIdentifier:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'NSItemProvider', b'loadFileRepresentationForTypeIdentifier:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'NSItemProvider', b'loadInPlaceFileRepresentationForTypeIdentifier:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'Z'}, 3: {'type': b'@'}}}}}}) - r(b'NSItemProvider', b'loadPreviewImageWithOptions:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'NSItemProvider', b'previewImageHandler', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'callable': {'retval': {'type': 'v'}, 'arguments': {0: {'type': '^v'}, 1: {'type': '@'}, 2: {'type': '@'}}}, 'type': b'@?'}, 2: {'type': b'#'}, 3: {'type': b'@'}}}, 'type': '@?'}}) - r(b'NSItemProvider', b'registerDataRepresentationForTypeIdentifier:visibility:loadHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'@'}, 'arguments': {0: {'type': b'^v'}, 1: {'callable': {'retval': {'type': 'v'}, 'arguments': {0: {'type': '^v'}, 1: {'type': '@'}, 2: {'type': '@'}}}, 'type': b'@?'}}}}}}) - r(b'NSItemProvider', b'registerFileRepresentationForTypeIdentifier:fileOptions:visibility:loadHandler:', {'arguments': {5: {'callable': {'retval': {'type': b'@'}, 'arguments': {0: {'type': b'^v'}, 1: {'callable': {'retval': {'type': 'v'}, 'arguments': {0: {'type': '^v'}, 1: {'type': '@'}, 2: {'type': 'Z'}, 3: {'type': '@'}}}, 'type': b'@?'}}}}}}) - r(b'NSItemProvider', b'registerObjectOfClass:visibility:loadHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'@'}, 'arguments': {0: {'type': b'^v'}, 1: {'callable': {'retval': {'type': 'v'}, 'arguments': {0: {'type': '^v'}, 1: {'type': '@'}, 2: {'type': '@'}}}, 'type': b'@?'}}}}}}) - r(b'NSItemProvider', b'setPreviewImageHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'callable': {'retval': {'type': 'v'}, 'arguments': {0: {'type': '^v'}, 1: {'type': '@'}, 2: {'type': '@'}}}, 'type': b'@?'}, 2: {'type': b'#'}, 3: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSJSONSerialization', b'JSONObjectWithData:options:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSJSONSerialization', b'JSONObjectWithStream:options:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSJSONSerialization', b'dataWithJSONObject:options:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSJSONSerialization', b'isValidJSONObject:', {'retval': {'type': 'Z'}}) - r(b'NSJSONSerialization', b'writeJSONObject:toStream:options:error:', {'arguments': {5: {'type_modifier': b'o'}}}) - r(b'NSKeyedArchiver', b'archiveRootObject:toFile:', {'retval': {'type': 'Z'}}) - r(b'NSKeyedArchiver', b'archivedDataWithRootObject:requiringSecureCoding:error:', {'arguments': {3: {'type': 'Z'}, 4: {'type_modifier': b'o'}}}) - r(b'NSKeyedArchiver', b'encodeBool:forKey:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSKeyedArchiver', b'encodeBytes:length:forKey:', {'arguments': {2: {'type': '^v', 'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'NSKeyedArchiver', b'initRequiringSecureCoding:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSKeyedArchiver', b'setRequiresSecureCoding:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSKeyedUnarchiver', b'containsValueForKey:', {'retval': {'type': 'Z'}}) - r(b'NSKeyedUnarchiver', b'decodeBoolForKey:', {'retval': {'type': 'Z'}}) - r(b'NSKeyedUnarchiver', b'decodeBytesForKey:returnedLength:', {'retval': {'type': '^v', 'c_array_length_in_arg': 3}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSKeyedUnarchiver', b'initForReadingFromData:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSKeyedUnarchiver', b'setRequiresSecureCoding:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSKeyedUnarchiver', b'unarchivedObjectOfClass:fromData:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSKeyedUnarchiver', b'unarchivedObjectOfClasses:fromData:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSLengthFormatter', b'getObjectValue:forString:errorDescription:', {'retval': {'type': b'Z'}}) - r(b'NSLengthFormatter', b'isForPersonHeightUse', {'retval': {'type': b'Z'}}) - r(b'NSLengthFormatter', b'setForPersonHeightUse:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSLinguisticTagger', b'enumerateTagsForString:range:unit:scheme:options:orthography:usingBlock:', {'arguments': {8: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 3: {'type': b'o^Z'}}}}}}) - r(b'NSLinguisticTagger', b'enumerateTagsInRange:scheme:options:usingBlock:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 4: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSLinguisticTagger', b'enumerateTagsInRange:unit:scheme:options:usingBlock:', {'arguments': {6: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 3: {'type': b'o^Z'}}}}}}) - r(b'NSLinguisticTagger', b'orthographyAtIndex:effectiveRange:', {'arguments': {3: {'type': sel32or64(b'^{_NSRange=II}', b'^{_NSRange=QQ}'), 'type_modifier': b'o'}}}) - r(b'NSLinguisticTagger', b'possibleTagsAtIndex:scheme:tokenRange:sentenceRange:scores:', {'arguments': {4: {'type': sel32or64(b'^{_NSRange=II}', b'^{_NSRange=QQ}'), 'type_modifier': b'o'}, 5: {'type': sel32or64(b'^{_NSRange=II}', b'^{_NSRange=QQ}'), 'type_modifier': b'o'}, 6: {'type_modifier': b'o'}}}) - r(b'NSLinguisticTagger', b'tagAtIndex:scheme:tokenRange:sentenceRange:', {'arguments': {4: {'type': sel32or64(b'^{_NSRange=II}', b'^{_NSRange=QQ}'), 'type_modifier': b'o'}, 5: {'type': sel32or64(b'^{_NSRange=II}', b'^{_NSRange=QQ}'), 'type_modifier': b'o'}}}) - r(b'NSLinguisticTagger', b'tagAtIndex:unit:scheme:tokenRange:', {'arguments': {5: {'type_modifier': b'o'}}}) - r(b'NSLinguisticTagger', b'tagForString:atIndex:unit:scheme:orthography:tokenRange:', {'arguments': {7: {'type_modifier': b'o'}}}) - r(b'NSLinguisticTagger', b'tagsForString:range:unit:scheme:options:orthography:tokenRanges:', {'arguments': {8: {'type_modifier': b'o'}}}) - r(b'NSLinguisticTagger', b'tagsInRange:scheme:options:tokenRanges:', {'arguments': {5: {'type_modifier': b'o'}}}) - r(b'NSLinguisticTagger', b'tagsInRange:unit:scheme:options:tokenRanges:', {'arguments': {6: {'type_modifier': b'o'}}}) - r(b'NSLock', b'lockBeforeDate:', {'retval': {'type': 'Z'}}) - r(b'NSLock', b'tryLock', {'retval': {'type': 'Z'}}) - r(b'NSMachBootstrapServer', b'registerPort:name:', {'retval': {'type': 'Z'}}) - r(b'NSMassFormatter', b'getObjectValue:forString:errorDescription:', {'retval': {'type': b'Z'}}) - r(b'NSMassFormatter', b'isForPersonMassUse', {'retval': {'type': b'Z'}}) - r(b'NSMassFormatter', b'setForPersonMassUse:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSMeasurement', b'canBeConvertedToUnit:', {'retval': {'type': 'Z'}}) - r(b'NSMetadataQuery', b'enumerateResultsUsingBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': b'o^Z'}}}}}}) - r(b'NSMetadataQuery', b'enumerateResultsWithOptions:usingBlock:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': b'o^Z'}}}}}}) - r(b'NSMetadataQuery', b'isGathering', {'retval': {'type': 'Z'}}) - r(b'NSMetadataQuery', b'isStarted', {'retval': {'type': 'Z'}}) - r(b'NSMetadataQuery', b'isStopped', {'retval': {'type': 'Z'}}) - r(b'NSMetadataQuery', b'startQuery', {'retval': {'type': 'Z'}}) - r(b'NSMethodSignature', b'getArgumentTypeAtIndex:', {'retval': {'c_array_delimited_by_null': True, 'type': '^t'}}) - r(b'NSMethodSignature', b'isOneway', {'retval': {'type': 'Z'}}) - r(b'NSMethodSignature', b'methodReturnType', {'retval': {'c_array_delimited_by_null': True, 'type': '^t'}}) - r(b'NSMethodSignature', b'signatureWithObjCTypes:', {'arguments': {2: {'c_array_delimited_by_null': True, 'type': '^t', 'type_modifier': b'n'}}}) - r(b'NSMutableArray', b'context:', {'arguments': {2: {'type': '^v'}}}) - r(b'NSMutableArray', b'removeObject:inRange:', {'arguments': {3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSMutableArray', b'removeObjectIdenticalTo:inRange:', {'arguments': {3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSMutableArray', b'removeObjectsFromIndices:numIndices:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'NSMutableArray', b'removeObjectsInRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSMutableArray', b'replaceObjectsInRange:withObjects:count:', {'arguments': {3: {'type_modifier': b'n', 'c_array_length_in_arg': 4}}}) - r(b'NSMutableArray', b'replaceObjectsInRange:withObjectsFromArray:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSMutableArray', b'replaceObjectsInRange:withObjectsFromArray:range:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 4: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSMutableArray', b'sortUsingComparator:', {'arguments': {2: {'callable': {'retval': {'type': sel32or64(b'i', b'q')}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'NSMutableArray', b'sortUsingFunction:context:', {'arguments': {2: {'callable': {'retval': {'type': b'l'}, 'arguments': {0: {'type': b'@'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'callable_retained': False}, 3: {'type': '@'}}}) - r(b'NSMutableArray', b'sortUsingFunction:context:range:', {'arguments': {2: {'callable': {'retval': {'type': b'l'}, 'arguments': {0: {'type': b'@'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'callable_retained': False}, 3: {'type': '@'}, 4: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSMutableArray', b'sortUsingSelector:', {'arguments': {2: {'sel_of_type': b'i@:@'}}}) - r(b'NSMutableArray', b'sortWithOptions:usingComparator:', {'arguments': {3: {'callable': {'retval': {'type': sel32or64(b'i', b'q')}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'NSMutableAttributedString', b'addAttribute:value:range:', {'arguments': {4: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSMutableAttributedString', b'addAttributes:range:', {'arguments': {3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSMutableAttributedString', b'deleteCharactersInRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSMutableAttributedString', b'removeAttribute:range:', {'arguments': {3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSMutableAttributedString', b'replaceCharactersInRange:withAttributedString:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSMutableAttributedString', b'replaceCharactersInRange:withString:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSMutableAttributedString', b'setAttributes:range:', {'arguments': {3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSMutableCharacterSet', b'addCharactersInRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSMutableCharacterSet', b'removeCharactersInRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSMutableData', b'appendBytes:length:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'NSMutableData', b'mutableBytes', {'retval': {'type': '^v'}, 'suggestion': 'use your language native array access on this object'}) - r(b'NSMutableData', b'replaceBytesInRange:withBytes:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 3: {'type_modifier': b'n', 'c_array_length_in_arg': 2}}}) - r(b'NSMutableData', b'replaceBytesInRange:withBytes:length:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 3: {'type_modifier': b'n', 'c_array_length_in_arg': 4}}}) - r(b'NSMutableData', b'resetBytesInRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSMutableIndexSet', b'addIndexesInRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSMutableIndexSet', b'removeIndexesInRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSMutableOrderedSet', b'addObjects:count:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'NSMutableOrderedSet', b'replaceObjectsInRange:withObjects:count:', {'arguments': {3: {'type_modifier': b'n', 'c_array_length_in_arg': 4}}}) - r(b'NSMutableOrderedSet', b'sortRange:options:usingComparator:', {'arguments': {4: {'callable': {'retval': {'type': sel32or64(b'i', b'q')}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'NSMutableOrderedSet', b'sortUsingComparator:', {'arguments': {2: {'callable': {'retval': {'type': sel32or64(b'i', b'q')}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'NSMutableOrderedSet', b'sortWithOptions:usingComparator:', {'arguments': {3: {'callable': {'retval': {'type': sel32or64(b'i', b'q')}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'NSMutableString', b'appendFormat:', {'arguments': {2: {'printf_format': True, 'type': '@'}}, 'variadic': True}) - r(b'NSMutableString', b'applyTransform:reverse:range:updatedRange:', {'arguments': {3: {'type': 'Z'}, 5: {'type_modifier': b'o'}}}) - r(b'NSMutableString', b'deleteCharactersInRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSMutableString', b'replaceCharactersInRange:withString:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSMutableString', b'replaceOccurrencesOfString:withString:options:range:', {'arguments': {5: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSMutableURLRequest', b'HTTPShouldHandleCookies', {'retval': {'type': b'Z'}}) - r(b'NSMutableURLRequest', b'HTTPShouldUsePipelining', {'retval': {'type': b'Z'}}) - r(b'NSMutableURLRequest', b'allowsCellularAccess', {'retval': {'type': b'Z'}}) - r(b'NSMutableURLRequest', b'setAllowsCellularAccess:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSMutableURLRequest', b'setHTTPShouldHandleCookies:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSMutableURLRequest', b'setHTTPShouldUsePipelining:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSNetService', b'getInputStream:outputStream:', {'retval': {'type': 'Z'}, 'arguments': {2: {'null_accepted': False, 'type_modifier': b'o'}, 3: {'null_accepted': False, 'type_modifier': b'o'}}}) - r(b'NSNetService', b'includesPeerToPeer', {'retval': {'type': 'Z'}}) - r(b'NSNetService', b'setIncludesPeerToPeer:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'Z'}}}) - r(b'NSNetService', b'setTXTRecordData:', {'retval': {'type': 'Z'}}) - r(b'NSNetServiceBrowser', b'includesPeerToPeer', {'retval': {'type': 'Z'}}) - r(b'NSNetServiceBrowser', b'setIncludesPeerToPeer:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'Z'}}}) - r(b'NSNotificationCenter', b'addObserver:selector:name:object:', {'arguments': {3: {'sel_of_type': b'v@:@'}}}) - r(b'NSNotificationCenter', b'addObserverForName:object:queue:usingBlock:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'NSNotificationCenter', b'addObserverForName:object:usingBlock:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'NSNumber', b'boolValue', {'retval': {'type': 'Z'}}) - r(b'NSNumber', b'charValue', {'retval': {'type': 'z'}}) - r(b'NSNumber', b'decimalValue', {'retval': {'type': '{NSDecimal=b8b4b1b1b18[8S]}'}}) - r(b'NSNumber', b'initWithBool:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSNumber', b'initWithChar:', {'arguments': {2: {'type': 'z'}}}) - r(b'NSNumber', b'isEqualToNumber:', {'retval': {'type': 'Z'}}) - r(b'NSNumber', b'numberWithBool:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSNumber', b'numberWithChar:', {'arguments': {2: {'type': 'z'}}}) - r(b'NSNumberFormatter', b'allowsFloats', {'retval': {'type': 'Z'}}) - r(b'NSNumberFormatter', b'alwaysShowsDecimalSeparator', {'retval': {'type': 'Z'}}) - r(b'NSNumberFormatter', b'generatesDecimalNumbers', {'retval': {'type': 'Z'}}) - r(b'NSNumberFormatter', b'getObjectValue:forString:range:error:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type_modifier': b'o'}, 4: {'type_modifier': b'N'}, 5: {'type_modifier': b'o'}}}) - r(b'NSNumberFormatter', b'hasThousandSeparators', {'retval': {'type': 'Z'}}) - r(b'NSNumberFormatter', b'isLenient', {'retval': {'type': 'Z'}}) - r(b'NSNumberFormatter', b'isPartialStringValidationEnabled', {'retval': {'type': 'Z'}}) - r(b'NSNumberFormatter', b'localizesFormat', {'retval': {'type': 'Z'}}) - r(b'NSNumberFormatter', b'setAllowsFloats:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSNumberFormatter', b'setAlwaysShowsDecimalSeparator:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSNumberFormatter', b'setGeneratesDecimalNumbers:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSNumberFormatter', b'setHasThousandSeparators:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSNumberFormatter', b'setLenient:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSNumberFormatter', b'setLocalizesFormat:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSNumberFormatter', b'setPartialStringValidationEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSNumberFormatter', b'setUsesGroupingSeparator:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': 'Z'}}}) - r(b'NSNumberFormatter', b'setUsesSignificantDigits:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': 'Z'}}}) - r(b'NSNumberFormatter', b'usesGroupingSeparator', {'retval': {'type': 'Z'}}) - r(b'NSNumberFormatter', b'usesSignificantDigits', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'URL:resourceDataDidBecomeAvailable:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'URL:resourceDidFailLoadingWithReason:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'URLHandle:resourceDataDidBecomeAvailable:', {'required': True, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'URLHandle:resourceDidFailLoadingWithReason:', {'required': True, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'URLHandleResourceDidBeginLoading:', {'required': True, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'URLHandleResourceDidCancelLoading:', {'required': True, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'URLHandleResourceDidFinishLoading:', {'required': True, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'URLProtocol:cachedResponseIsValid:', {'required': True, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'URLProtocol:didCancelAuthenticationChallenge:', {'required': True, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'URLProtocol:didFailWithError:', {'required': True, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'URLProtocol:didLoadData:', {'required': True, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'URLProtocol:didReceiveAuthenticationChallenge:', {'required': True, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'URLProtocol:didReceiveResponse:cacheStoragePolicy:', {'required': True, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'URLProtocol:wasRedirectedToRequest:redirectResponse:', {'required': True, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'URLProtocolDidFinishLoading:', {'required': True, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'URLResourceDidCancelLoading:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'URLResourceDidFinishLoading:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'URLSession:dataTask:didBecomeDownloadTask:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'URLSession:dataTask:didReceiveData:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'URLSession:dataTask:didReceiveResponse:completionHandler:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'I', b'Q')}}}, 'type': b'@?'}}}) - r(b'NSObject', b'URLSession:dataTask:willCacheResponse:completionHandler:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': b'@?'}}}) - r(b'NSObject', b'URLSession:didBecomeInvalidWithError:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'URLSession:didReceiveChallenge:completionHandler:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'I', b'Q')}, 2: {'type': b'@'}}}, 'type': b'@?'}}}) - r(b'NSObject', b'URLSession:downloadTask:didFinishDownloadingToURL:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'URLSession:downloadTask:didResumeAtOffset:expectedTotalBytes:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'q'}, 5: {'type': b'q'}}}) - r(b'NSObject', b'URLSession:downloadTask:didWriteData:totalBytesWritten:totalBytesExpectedToWrite:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'q'}, 5: {'type': b'q'}, 6: {'type': b'q'}}}) - r(b'NSObject', b'URLSession:task:didCompleteWithError:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'URLSession:task:didReceiveChallenge:completionHandler:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'I', b'Q')}, 2: {'type': b'@'}}}, 'type': b'@?'}}}) - r(b'NSObject', b'URLSession:task:didSendBodyData:totalBytesSent:totalBytesExpectedToSend:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'q'}, 5: {'type': b'q'}, 6: {'type': b'q'}}}) - r(b'NSObject', b'URLSession:task:needNewBodyStream:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': b'@?'}}}) - r(b'NSObject', b'URLSession:task:willBeginDelayedRequest:completionHandler:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'i', b'q')}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'URLSession:task:willPerformHTTPRedirection:newRequest:completionHandler:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}, 6: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': b'@?'}}}) - r(b'NSObject', b'URLSessionDidFinishEventsForBackgroundURLSession:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'accessInstanceVariablesDirectly', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'accommodatePresentedItemDeletionWithCompletionHandler:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'accommodatePresentedSubitemDeletionAtURL:completionHandler:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'addObserver:forKeyPath:options:context:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': 'I'}, 5: {'type': '^v'}}}) - r(b'NSObject', b'allowsWeakReference', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'archiver:didEncodeObject:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'archiver:willEncodeObject:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'archiver:willReplaceObject:withObject:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'archiverDidFinish:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'archiverWillFinish:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'attemptRecoveryFromError:optionIndex:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'attemptRecoveryFromError:optionIndex:delegate:didRecoverSelector:contextInfo:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'I', b'Q')}, 4: {'type': b'@'}, 5: {'type': b':', 'sel_of_type': b'v@:Z^v'}, 6: {'type': '^v'}}}) - r(b'NSObject', b'attributeKeys', {'retval': {'type': b'@'}}) - r(b'NSObject', b'authenticateComponents:withData:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'authenticationDataForComponents:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'autoContentAccessingProxy', {'retval': {'type': b'@'}}) - r(b'NSObject', b'automaticallyNotifiesObserversForKey:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'autorelease', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'awakeAfterUsingCoder:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'beginContentAccess', {'required': True, 'retval': {'type': 'Z'}}) - r(b'NSObject', b'beginRequestWithExtensionContext:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'cache:willEvictObject:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'cancelAuthenticationChallenge:', {'required': True, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'cancelPreviousPerformRequestsWithTarget:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'cancelPreviousPerformRequestsWithTarget:selector:object:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': ':', 'sel_of_type': b'v@:@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'class', {'required': True, 'retval': {'type': b'#'}}) - r(b'NSObject', b'classCode', {'retval': {'type': sel32or64(b'L', b'Q')}}) - r(b'NSObject', b'classDescription', {'retval': {'type': b'@'}}) - r(b'NSObject', b'classFallbacksForKeyedArchiver', {'retval': {'type': b'@'}}) - r(b'NSObject', b'classForArchiver', {'retval': {'type': '#'}}) - r(b'NSObject', b'classForCoder', {'retval': {'type': '#'}}) - r(b'NSObject', b'classForKeyedArchiver', {'retval': {'type': '#'}}) - r(b'NSObject', b'classForKeyedUnarchiver', {'retval': {'type': '#'}}) - r(b'NSObject', b'classForPortCoder', {'retval': {'type': '#'}}) - r(b'NSObject', b'className', {'retval': {'type': b'@'}}) - r(b'NSObject', b'coerceValue:forKey:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'commitEditingAndReturnError:', {'arguments': {2: {'type': 'o'}}}) - r(b'NSObject', b'conformsToProtocol:', {'required': True, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'connection:canAuthenticateAgainstProtectionSpace:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'connection:didCancelAuthenticationChallenge:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'connection:didFailWithError:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'connection:didReceiveAuthenticationChallenge:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'connection:didReceiveData:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'connection:didReceiveResponse:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'connection:didSendBodyData:totalBytesWritten:totalBytesExpectedToWrite:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}, 4: {'type': sel32or64(b'i', b'q')}, 5: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'connection:didWriteData:totalBytesWritten:expectedTotalBytes:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'q'}, 4: {'type': b'q'}, 5: {'type': b'q'}}}) - r(b'NSObject', b'connection:handleRequest:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'connection:needNewBodyStream:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'connection:shouldMakeNewConnection:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'connection:willCacheResponse:', {'required': False, 'retval': {'type': '@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'connection:willSendRequest:redirectResponse:', {'required': False, 'retval': {'type': '@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'connection:willSendRequestForAuthenticationChallenge:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'connectionDidFinishDownloading:destinationURL:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'connectionDidFinishLoading:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'connectionDidResumeDownloading:totalBytesWritten:expectedTotalBytes:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'q'}, 4: {'type': b'q'}}}) - r(b'NSObject', b'connectionShouldUseCredentialStorage:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'continueWithoutCredentialForAuthenticationChallenge:', {'required': True, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'copy', {'retval': {'already_retained': True}}) - r(b'NSObject', b'copyScriptingValue:forKey:withProperties:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'copyWithZone:', {'required': True, 'retval': {'already_retained': True, 'type': b'@'}, 'arguments': {2: {'type': '^{_NSZone=}'}}}) - r(b'NSObject', b'countByEnumeratingWithState:objects:count:', {'required': True, 'retval': {'type': sel32or64(b'I', b'Q')}, 'arguments': {2: {'type': sel32or64(b'^{?=L^@^L[5L]}', b'^{?=Q^@^Q[5Q]}')}, 3: {'type': '^@'}, 4: {'type': sel32or64(b'I', b'Q')}}, 'suggestion': 'use python iteration'}) - r(b'NSObject', b'createConversationForConnection:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'debugDescription', {'required': False, 'retval': {'type': b'@'}}) - r(b'NSObject', b'description', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'dictionaryWithValuesForKeys:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'didChange:valuesAtIndexes:forKey:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': 'I'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'didChangeValueForKey:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'didChangeValueForKey:withSetMutation:usingObjects:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': 'I'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'discardContentIfPossible', {'required': True, 'retval': {'type': b'v'}}) - r(b'NSObject', b'doesContain:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'doesNotRecognizeSelector:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': ':'}}}) - r(b'NSObject', b'download:canAuthenticateAgainstProtectionSpace:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'download:decideDestinationWithSuggestedFilename:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'download:didCancelAuthenticationChallenge:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'download:didCreateDestination:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'download:didFailWithError:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'download:didReceiveAuthenticationChallenge:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'download:didReceiveDataOfLength:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'download:didReceiveResponse:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'download:shouldDecodeSourceDataOfMIMEType:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'download:willResumeWithResponse:fromByte:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': 'q'}}}) - r(b'NSObject', b'download:willSendRequest:redirectResponse:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'downloadDidBegin:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'downloadDidFinish:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'downloadShouldUseCredentialStorage:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'encodeWithCoder:', {'required': True, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'endContentAccess', {'required': True, 'retval': {'type': b'v'}}) - r(b'NSObject', b'exceptionDuringOperation:error:leftOperand:rightOperand:', {'required': True, 'retval': {'type': '@'}, 'arguments': {2: {'type': ':'}, 3: {'type': sel32or64(b'I', b'Q')}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'fileManager:shouldCopyItemAtPath:toPath:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'fileManager:shouldCopyItemAtURL:toURL:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'fileManager:shouldLinkItemAtPath:toPath:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'fileManager:shouldLinkItemAtURL:toURL:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'fileManager:shouldMoveItemAtPath:toPath:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'fileManager:shouldMoveItemAtURL:toURL:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'fileManager:shouldProceedAfterError:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'fileManager:shouldProceedAfterError:copyingItemAtPath:toPath:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'fileManager:shouldProceedAfterError:copyingItemAtURL:toURL:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'fileManager:shouldProceedAfterError:linkingItemAtPath:toPath:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'fileManager:shouldProceedAfterError:linkingItemAtURL:toURL:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'fileManager:shouldProceedAfterError:movingItemAtPath:toPath:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'fileManager:shouldProceedAfterError:movingItemAtURL:toURL:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'fileManager:shouldProceedAfterError:removingItemAtPath:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'fileManager:shouldProceedAfterError:removingItemAtURL:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'fileManager:shouldRemoveItemAtPath:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'fileManager:shouldRemoveItemAtURL:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'fileManager:willProcessPath:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'forwardInvocation:', {'retval': {'type': 'v'}}) - r(b'NSObject', b'handleMachMessage:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': '^v'}}}) - r(b'NSObject', b'handlePortMessage:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'handleQueryWithUnboundKey:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'handleTakeValue:forUnboundKey:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'hash', {'required': True, 'retval': {'type': sel32or64(b'I', b'Q')}}) - r(b'NSObject', b'indicesOfObjectsByEvaluatingObjectSpecifier:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'initWithCoder:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'initWithItemProviderData:typeIdentifier:error:', {'arguments': {4: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'initialize', {'retval': {'type': 'v'}}) - r(b'NSObject', b'insertValue:atIndex:inPropertyWithKey:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'I', b'Q')}, 4: {'type': b'@'}}}) - r(b'NSObject', b'insertValue:inPropertyWithKey:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'instanceMethodForSelector:', {'retval': {'type': '^?'}, 'arguments': {2: {'type': ':'}}}) - r(b'NSObject', b'instanceMethodSignatureForSelector:', {'arguments': {2: {'type': ':'}}}) - r(b'NSObject', b'instancesRespondToSelector:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': ':'}}}) - r(b'NSObject', b'inverseForRelationshipKey:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'isCaseInsensitiveLike:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'isContentDiscarded', {'required': True, 'retval': {'type': 'Z'}}) - r(b'NSObject', b'isEqual:', {'required': True, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'isEqualTo:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'isGreaterThan:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'isGreaterThanOrEqualTo:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'isKindOfClass:', {'required': True, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': '#'}}}) - r(b'NSObject', b'isLessThan:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'isLessThanOrEqualTo:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'isLike:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'isMemberOfClass:', {'required': True, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': '#'}}}) - r(b'NSObject', b'isNotEqualTo:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'isProxy', {'required': True, 'retval': {'type': 'Z'}}) - r(b'NSObject', b'isSubclassOfClass:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': '#'}}}) - r(b'NSObject', b'itemProviderVisibilityForRepresentationWithTypeIdentifier:', {'retval': {'type': sel32or64(b'i', b'q')}}) - r(b'NSObject', b'keyPathsForValuesAffectingValueForKey:', {'retval': {'type': '@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'listener:shouldAcceptNewConnection:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'load', {'retval': {'type': 'v'}}) - r(b'NSObject', b'loadDataWithTypeIdentifier:forItemProviderCompletionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'lock', {'required': True, 'retval': {'type': 'v'}}) - r(b'NSObject', b'makeNewConnection:sender:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'metadataQuery:replacementObjectForResultObject:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'metadataQuery:replacementValueForAttribute:value:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'methodForSelector:', {'retval': {'type': '^?'}, 'arguments': {2: {'type': ':'}}}) - r(b'NSObject', b'methodSignatureForSelector:', {'arguments': {2: {'type': ':'}}}) - r(b'NSObject', b'mutableArrayValueForKey:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'mutableArrayValueForKeyPath:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'mutableCopy', {'retval': {'already_retained': True, 'type': '@'}}) - r(b'NSObject', b'mutableCopyWithZone:', {'required': True, 'retval': {'already_retained': True, 'type': '@'}, 'arguments': {2: {'type': '^{_NSZone=}'}}}) - r(b'NSObject', b'mutableOrderedSetValueForKey:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'mutableOrderedSetValueForKeyPath:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'mutableSetValueForKey:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'mutableSetValueForKeyPath:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'netService:didAcceptConnectionWithInputStream:outputStream:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'netService:didNotPublish:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'netService:didNotResolve:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'netService:didUpdateTXTRecordData:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'netServiceBrowser:didFindDomain:moreComing:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': 'Z'}}}) - r(b'NSObject', b'netServiceBrowser:didFindService:moreComing:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': 'Z'}}}) - r(b'NSObject', b'netServiceBrowser:didNotSearch:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'netServiceBrowser:didRemoveDomain:moreComing:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': 'Z'}}}) - r(b'NSObject', b'netServiceBrowser:didRemoveService:moreComing:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': 'Z'}}}) - r(b'NSObject', b'netServiceBrowserDidStopSearch:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'netServiceBrowserWillSearch:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'netServiceDidPublish:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'netServiceDidResolveAddress:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'netServiceDidStop:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'netServiceWillPublish:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'netServiceWillResolve:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'newScriptingObjectOfClass:forValueForKey:withContentsValue:properties:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': '#'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'objectSpecifier', {'retval': {'type': b'@'}}) - r(b'NSObject', b'objectWithItemProviderData:typeIdentifier:error:', {'arguments': {4: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'observationInfo', {'retval': {'type': '^v'}}) - r(b'NSObject', b'observeValueForKeyPath:ofObject:change:context:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': '^v'}}}) - r(b'NSObject', b'parser:didEndElement:namespaceURI:qualifiedName:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'parser:didEndMappingPrefix:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'parser:didStartElement:namespaceURI:qualifiedName:attributes:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}, 6: {'type': b'@'}}}) - r(b'NSObject', b'parser:didStartMappingPrefix:toURI:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'parser:foundAttributeDeclarationWithName:forElement:type:defaultValue:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}, 6: {'type': b'@'}}}) - r(b'NSObject', b'parser:foundCDATA:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'parser:foundCharacters:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'parser:foundComment:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'parser:foundElementDeclarationWithName:model:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'parser:foundExternalEntityDeclarationWithName:publicID:systemID:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'parser:foundIgnorableWhitespace:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'parser:foundInternalEntityDeclarationWithName:value:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'parser:foundNotationDeclarationWithName:publicID:systemID:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'parser:foundProcessingInstructionWithTarget:data:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'parser:foundUnparsedEntityDeclarationWithName:publicID:systemID:notationName:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}, 6: {'type': b'@'}}}) - r(b'NSObject', b'parser:parseErrorOccurred:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'parser:resolveExternalEntityName:systemID:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'parser:validationErrorOccurred:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'parserDidEndDocument:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'parserDidStartDocument:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'performDefaultHandlingForAuthenticationChallenge:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'performSelector:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': ':'}}}) - r(b'NSObject', b'performSelector:onThread:withObject:waitUntilDone:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': b':', 'sel_of_type': b'v@:@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': 'Z'}}}) - r(b'NSObject', b'performSelector:onThread:withObject:waitUntilDone:modes:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': b':', 'sel_of_type': b'v@:@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': 'Z'}, 6: {'type': b'@'}}}) - r(b'NSObject', b'performSelector:withObject:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': ':'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'performSelector:withObject:afterDelay:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': b':', 'sel_of_type': b'v@:@'}, 3: {'type': b'@'}, 4: {'type': 'd'}}}) - r(b'NSObject', b'performSelector:withObject:afterDelay:inModes:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': b':', 'sel_of_type': b'v@:@'}, 3: {'type': b'@'}, 4: {'type': 'd'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'performSelector:withObject:withObject:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': ':'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'performSelectorInBackground:withObject:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': b':', 'sel_of_type': b'v@:@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'performSelectorOnMainThread:withObject:waitUntilDone:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': b':', 'sel_of_type': b'v@:@'}, 3: {'type': b'@'}, 4: {'type': 'Z'}}}) - r(b'NSObject', b'performSelectorOnMainThread:withObject:waitUntilDone:modes:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': b':', 'sel_of_type': b'v@:@'}, 3: {'type': b'@'}, 4: {'type': 'Z'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'poseAsClass:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': '#'}}}) - r(b'NSObject', b'presentedItemDidChange', {'required': False, 'retval': {'type': b'v'}}) - r(b'NSObject', b'presentedItemDidGainVersion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'presentedItemDidLoseVersion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'presentedItemDidMoveToURL:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'presentedItemDidResolveConflictVersion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'presentedItemOperationQueue', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'presentedItemURL', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'presentedSubitemAtURL:didGainVersion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'presentedSubitemAtURL:didLoseVersion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'presentedSubitemAtURL:didMoveToURL:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'presentedSubitemAtURL:didResolveConflictVersion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'presentedSubitemDidAppearAtURL:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'presentedSubitemDidChangeAtURL:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'primaryPresentedItemURL', {'required': False, 'retval': {'type': b'@'}}) - r(b'NSObject', b'rejectProtectionSpaceAndContinueWithChallenge:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'release', {'required': True, 'retval': {'type': 'Vv'}}) - r(b'NSObject', b'relinquishPresentedItemToReader:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'callable': {'retval': {'type': 'v'}, 'arguments': {0: {'type': '^v'}}}, 'type': b'@?'}}}, 'type': '@?'}}}) - r(b'NSObject', b'relinquishPresentedItemToWriter:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'callable': {'retval': {'type': 'v'}, 'arguments': {0: {'type': '^v'}}}, 'type': b'@?'}}}, 'type': '@?'}}}) - r(b'NSObject', b'remoteObjectProxy', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'remoteObjectProxyWithErrorHandler:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': b'@?'}}}) - r(b'NSObject', b'removeObserver:forKeyPath:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'removeObserver:forKeyPath:context:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'^v'}}}) - r(b'NSObject', b'removeValueAtIndex:fromPropertyWithKey:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': b'@'}}}) - r(b'NSObject', b'replaceValueAtIndex:inPropertyWithKey:withValue:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'replacementObjectForArchiver:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'replacementObjectForCoder:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'replacementObjectForKeyedArchiver:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'replacementObjectForPortCoder:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'resolveClassMethod:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': ':'}}}) - r(b'NSObject', b'resolveInstanceMethod:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': ':'}}}) - r(b'NSObject', b'respondsToSelector:', {'required': True, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': ':'}}}) - r(b'NSObject', b'retain', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'retainCount', {'required': True, 'retval': {'type': sel32or64(b'I', b'Q')}}) - r(b'NSObject', b'retainWeakReference', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'roundingMode', {'required': True, 'retval': {'type': sel32or64(b'I', b'Q')}}) - r(b'NSObject', b'savePresentedItemChangesWithCompletionHandler:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'scale', {'required': True, 'retval': {'type': 's'}}) - r(b'NSObject', b'scriptingBeginsWith:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'scriptingContains:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'scriptingEndsWith:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'scriptingIsEqualTo:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'scriptingIsGreaterThan:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'scriptingIsGreaterThanOrEqualTo:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'scriptingIsLessThan:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'scriptingIsLessThanOrEqualTo:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'scriptingProperties', {'retval': {'type': b'@'}}) - r(b'NSObject', b'scriptingValueForSpecifier:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'self', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'setKeys:triggerChangeNotificationsForDependentKey:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'setNilValueForKey:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setObservationInfo:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': '^v'}}}) - r(b'NSObject', b'setPresentedItemOperationQueue:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setPresentedItemURL:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setPrimaryPresentedItemURL:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setScriptingProperties:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setValue:forKey:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'setValue:forKeyPath:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'setValue:forUndefinedKey:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'setValuesForKeysWithDictionary:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setVersion:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'spellServer:checkGrammarInString:language:details:', {'required': False, 'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'spellServer:checkString:offset:types:options:orthography:wordCount:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'I', b'Q')}, 5: {'type': sel32or64(b'i', b'q')}, 6: {'type': b'@'}, 7: {'type': b'@'}, 8: {'type': sel32or64(b'^i', b'^q'), 'type_modifier': b'o'}}}) - r(b'NSObject', b'spellServer:didForgetWord:inLanguage:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'spellServer:didLearnWord:inLanguage:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'spellServer:findMisspelledWordInString:language:wordCount:countOnly:', {'required': False, 'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': sel32or64(b'^i', b'^q'), 'type_modifier': b'o'}, 6: {'type': 'Z'}}}) - r(b'NSObject', b'spellServer:recordResponse:toCorrection:forWord:language:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'I', b'Q')}, 4: {'type': b'@'}, 5: {'type': b'@'}, 6: {'type': b'@'}}}) - r(b'NSObject', b'spellServer:suggestCompletionsForPartialWordRange:inString:language:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'spellServer:suggestGuessesForWord:inLanguage:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'storedValueForKey:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'stream:handleEvent:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'superclass', {'required': True, 'retval': {'type': '#'}}) - r(b'NSObject', b'supportsSecureCoding', {'required': True, 'retval': {'type': 'Z'}}) - r(b'NSObject', b'takeStoredValue:forKey:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'takeValue:forKey:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'takeValue:forKeyPath:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'takeValuesFromDictionary:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'toManyRelationshipKeys', {'retval': {'type': b'@'}}) - r(b'NSObject', b'toOneRelationshipKeys', {'retval': {'type': b'@'}}) - r(b'NSObject', b'unableToSetNilForKey:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'unarchiver:cannotDecodeObjectOfClassName:originalClasses:', {'required': False, 'retval': {'type': '#'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'unarchiver:didDecodeObject:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'unarchiver:willReplaceObject:withObject:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'unarchiverDidFinish:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'unarchiverWillFinish:', {'required': False, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'unlock', {'required': True, 'retval': {'type': 'v'}}) - r(b'NSObject', b'useCredential:forAuthenticationChallenge:', {'required': True, 'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'useStoredAccessor', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'userActivity:didReceiveInputStream:outputStream:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'userActivityWasContinued:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'userActivityWillSave:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'userNotificationCenter:didActivateNotification:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'userNotificationCenter:didDeliverNotification:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'userNotificationCenter:shouldPresentNotification:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'validateValue:forKey:error:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': '^@', 'type_modifier': b'N'}, 3: {'type': b'@'}, 4: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'validateValue:forKeyPath:error:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': '^@', 'type_modifier': b'N'}, 3: {'type': '@'}, 4: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'valueAtIndex:inPropertyWithKey:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': b'@'}}}) - r(b'NSObject', b'valueForKey:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'valueForKeyPath:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'valueForUndefinedKey:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'valueWithName:inPropertyWithKey:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'valueWithUniqueID:inPropertyWithKey:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'valuesForKeys:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'version', {'retval': {'type': sel32or64(b'i', b'q')}}) - r(b'NSObject', b'willChange:valuesAtIndexes:forKey:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': 'I'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'willChangeValueForKey:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'willChangeValueForKey:withSetMutation:usingObjects:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': 'I'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'zone', {'required': True, 'retval': {'type': b'^{_NSZone=}'}}) - r(b'NSOperation', b'completionBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}) - r(b'NSOperation', b'isAsynchronous', {'retval': {'type': b'Z'}}) - r(b'NSOperation', b'isCancelled', {'retval': {'type': 'Z'}}) - r(b'NSOperation', b'isConcurrent', {'retval': {'type': 'Z'}}) - r(b'NSOperation', b'isExecuting', {'retval': {'type': 'Z'}}) - r(b'NSOperation', b'isFinished', {'retval': {'type': 'Z'}}) - r(b'NSOperation', b'isReady', {'retval': {'type': 'Z'}}) - r(b'NSOperation', b'setCompletionBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSOperationQueue', b'addOperationWithBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSOperationQueue', b'addOperations:waitUntilFinished:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSOperationQueue', b'isSuspended', {'retval': {'type': 'Z'}}) - r(b'NSOperationQueue', b'setSuspended:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSOrderedSet', b'containsObject:', {'retval': {'type': 'Z'}}) - r(b'NSOrderedSet', b'enumerateObjectsAtIndexes:options:usingBlock:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSOrderedSet', b'enumerateObjectsUsingBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSOrderedSet', b'enumerateObjectsWithOptions:usingBlock:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSOrderedSet', b'indexOfObject:inSortedRange:options:usingComparator:', {'arguments': {5: {'callable': {'retval': {'type': sel32or64(b'i', b'q')}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'NSOrderedSet', b'indexOfObjectAtIndexes:options:passingTest:', {'arguments': {4: {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSOrderedSet', b'indexOfObjectPassingTest:', {'arguments': {2: {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSOrderedSet', b'indexOfObjectWithOptions:passingTest:', {'arguments': {3: {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSOrderedSet', b'indexesOfObjecstWithOptions:passingTest:', {'arguments': {3: {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSOrderedSet', b'indexesOfObjectsAtIndexes:options:passingTest:', {'arguments': {4: {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSOrderedSet', b'indexesOfObjectsPassingTest:', {'arguments': {2: {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSOrderedSet', b'indexesOfObjectsWithOptions:passingTest:', {'arguments': {3: {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSOrderedSet', b'initWithArray:copyItems:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSOrderedSet', b'initWithArray:range:copyItems:', {'arguments': {4: {'type': 'Z'}}}) - r(b'NSOrderedSet', b'initWithObjects:', {'c_array_delimited_by_null': True, 'variadic': True}) - r(b'NSOrderedSet', b'initWithObjects:count:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'NSOrderedSet', b'initWithOrderedSet:copyItems:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSOrderedSet', b'initWithOrderedSet:range:copyItems:', {'arguments': {4: {'type': 'Z'}}}) - r(b'NSOrderedSet', b'initWithSet:copyItems:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSOrderedSet', b'insersectsSet:', {'retval': {'type': 'Z'}}) - r(b'NSOrderedSet', b'intersectsOrderedSet:', {'retval': {'type': 'Z'}}) - r(b'NSOrderedSet', b'intersectsSet:', {'retval': {'type': b'Z'}}) - r(b'NSOrderedSet', b'isEqualToOrderedSet:', {'retval': {'type': 'Z'}}) - r(b'NSOrderedSet', b'isSubsetOfOrderedSet:', {'retval': {'type': 'Z'}}) - r(b'NSOrderedSet', b'isSubsetOfSet:', {'retval': {'type': 'Z'}}) - r(b'NSOrderedSet', b'orderedSetWithArray:copyItems:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSOrderedSet', b'orderedSetWithArray:range:copyItems:', {'arguments': {4: {'type': 'Z'}}}) - r(b'NSOrderedSet', b'orderedSetWithObjects:', {'c_array_delimited_by_null': True, 'variadic': True}) - r(b'NSOrderedSet', b'orderedSetWithObjects:count:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'NSOrderedSet', b'orderedSetWithOrderedSet:copyItems:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSOrderedSet', b'orderedSetWithOrderedSet:range:copyItems:', {'arguments': {4: {'type': 'Z'}}}) - r(b'NSOrderedSet', b'orderedSetWithSet:copyItems:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSOrderedSet', b'sortedArrayUsingComparator:', {'arguments': {2: {'callable': {'retval': {'type': sel32or64(b'i', b'q')}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'NSOrderedSet', b'sortedArrayWithOptions:usingComparator:', {'arguments': {3: {'callable': {'retval': {'type': sel32or64(b'i', b'q')}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'NSOutputStream', b'hasSpaceAvailable', {'retval': {'type': 'Z'}}) - r(b'NSOutputStream', b'initToBuffer:capacity:', {'arguments': {2: {'type': '^v', 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'NSOutputStream', b'initToFileAtPath:append:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSOutputStream', b'initWithURL:append:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSOutputStream', b'outputStreamToBuffer:capacity:', {'arguments': {2: {'type': '^v', 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'NSOutputStream', b'outputStreamToFileAtPath:append:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSOutputStream', b'outputStreamWithURL:append:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSOutputStream', b'write:maxLength:', {'arguments': {2: {'type': '^v', 'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'NSPersonNameComponentsFormatter', b'getObjectValue:forString:errorDescription:', {'retval': {'type': 'Z'}, 'arguments': {2: {'null_accepted': False, 'type_modifier': b'o'}, 4: {'type_modifier': b'o'}}}) - r(b'NSPersonNameComponentsFormatter', b'isPhonetic', {'retval': {'type': 'Z'}}) - r(b'NSPersonNameComponentsFormatter', b'setPhonetic:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSPointerArray', b'addPointer:', {'arguments': {2: {'type': '@'}}, 'suggestion': 'use NSMutableArray'}) - r(b'NSPointerArray', b'insertPointer:atIndex:', {'arguments': {2: {'type': '@'}}, 'suggestion': 'use NSMutableArray'}) - r(b'NSPointerArray', b'pointerAtIndex:', {'retval': {'type': '@'}, 'suggestion': 'use NSMutableArray'}) - r(b'NSPointerArray', b'replacePointerAtIndex:withPointer:', {'arguments': {3: {'type': '@'}}, 'suggestion': 'use NSMutableArray'}) - r(b'NSPointerFunctions', b'acquireFunction', {'retval': {'type': '^v'}}) - r(b'NSPointerFunctions', b'setAcquireFunction:', {'arguments': {2: {'type': '^v'}}}) - r(b'NSPointerFunctions', b'setUsesStrongWriteBarrier:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSPointerFunctions', b'setUsesWeakReadAndWriteBarriers:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSPointerFunctions', b'usesStrongWriteBarrier', {'retval': {'type': 'Z'}}) - r(b'NSPointerFunctions', b'usesWeakReadAndWriteBarriers', {'retval': {'type': 'Z'}}) - r(b'NSPort', b'isValid', {'retval': {'type': 'Z'}}) - r(b'NSPort', b'sendBeforeDate:components:from:reserved:', {'retval': {'type': 'Z'}}) - r(b'NSPort', b'sendBeforeDate:msgid:components:from:reserved:', {'retval': {'type': 'Z'}}) - r(b'NSPortCoder', b'isBycopy', {'retval': {'type': 'Z'}}) - r(b'NSPortCoder', b'isByref', {'retval': {'type': 'Z'}}) - r(b'NSPortMessage', b'sendBeforeDate:', {'retval': {'type': 'Z'}}) - r(b'NSPortNameServer', b'registerPort:name:', {'retval': {'type': 'Z'}}) - r(b'NSPortNameServer', b'removePortForName:', {'retval': {'type': 'Z'}}) - r(b'NSPositionalSpecifier', b'insertionReplaces', {'retval': {'type': 'Z'}}) - r(b'NSPredicate', b'evaluateWithObject:', {'retval': {'type': 'Z'}}) - r(b'NSPredicate', b'evaluateWithObject:substitutionVariables:', {'retval': {'type': 'Z'}}) - r(b'NSPredicate', b'predicateWithBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'NSPredicate', b'predicateWithFormat:', {'arguments': {2: {'printf_format': True, 'type': '@'}}, 'variadic': True}) - r(b'NSPredicate', b'predicateWithFormat:arguments:', {'suggestion': 'use +predicateWithFormat:'}) - r(b'NSPredicate', b'predicateWithValue:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSProcessInfo', b'automaticTerminationSupportEnabled', {'retval': {'type': b'Z'}}) - r(b'NSProcessInfo', b'isOperatingSystemAtLeastVersion:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type': sel32or64(b'{_NSOperatingSystemVersion=iii}', b'{_NSOperatingSystemVersion=qqq}')}}}) - r(b'NSProcessInfo', b'operatingSystemVersion', {'retval': {'type': sel32or64(b'{_NSOperatingSystemVersion=iii}', b'{_NSOperatingSystemVersion=qqq}')}}) - r(b'NSProcessInfo', b'performActivityWithOptions:reason:usingBlock:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSProcessInfo', b'setAutomaticTerminationSupportEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSProgress', b'addSubscriberForFileURL:withPublishingHandler:', {'arguments': {3: {'callable': {'retval': {'callable': {'retval': {'type': 'v'}, 'arguments': {0: {'type': '^v'}}}, 'type': b'@?'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSProgress', b'cancellationHandler', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}) - r(b'NSProgress', b'isCancellable', {'retval': {'type': b'Z'}}) - r(b'NSProgress', b'isCancelled', {'retval': {'type': b'Z'}}) - r(b'NSProgress', b'isIndeterminate', {'retval': {'type': b'Z'}}) - r(b'NSProgress', b'isOld', {'retval': {'type': b'Z'}}) - r(b'NSProgress', b'isPausable', {'retval': {'type': b'Z'}}) - r(b'NSProgress', b'isPaused', {'retval': {'type': b'Z'}}) - r(b'NSProgress', b'pausingHandler', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}) - r(b'NSProgress', b'performAsCurrentWithPendingUnitCount:usingBlock:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSProgress', b'resumingHandler', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}) - r(b'NSProgress', b'setCancellable:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSProgress', b'setCancellationHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSProgress', b'setPausable:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSProgress', b'setPausingHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSProgress', b'setResumingHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSPropertyListSerialization', b'dataFromPropertyList:format:errorDescription:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSPropertyListSerialization', b'dataWithPropertyList:format:options:error:', {'arguments': {5: {'type_modifier': b'o'}}}) - r(b'NSPropertyListSerialization', b'propertyList:isValidForFormat:', {'retval': {'type': 'Z'}}) - r(b'NSPropertyListSerialization', b'propertyListFromData:mutabilityOption:format:errorDescription:', {'arguments': {4: {'type_modifier': b'o'}, 5: {'type_modifier': b'o'}}}) - r(b'NSPropertyListSerialization', b'propertyListWithData:options:format:error:', {'arguments': {4: {'type_modifier': b'o'}, 5: {'type_modifier': b'o'}}}) - r(b'NSPropertyListSerialization', b'propertyListWithStream:options:format:error:', {'arguments': {5: {'type_modifier': b'o'}}}) - r(b'NSPropertyListSerialization', b'writePropertyList:toStream:format:options:error:', {'arguments': {6: {'type_modifier': b'o'}}}) - r(b'NSProxy', b'allowsWeakReference', {'retval': {'type': 'Z'}}) - r(b'NSProxy', b'methodSignatureForSelector:', {'arguments': {2: {'type': ':'}}}) - r(b'NSProxy', b'respondsToSelector:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': ':'}}}) - r(b'NSProxy', b'retainWeakReference', {'retval': {'type': 'Z'}}) - r(b'NSRecursiveLock', b'lockBeforeDate:', {'retval': {'type': 'Z'}}) - r(b'NSRecursiveLock', b'tryLock', {'retval': {'type': 'Z'}}) - r(b'NSRegularExpression', b'enumerateMatchesInString:options:range:usingBlock:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSRegularExpression', b'initWithPattern:options:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSRegularExpression', b'regularExpressionWithPattern:options:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSRunLoop', b'cancelPerformSelector:target:argument:', {'arguments': {2: {'type': ':', 'sel_of_type': b'v@:@'}}}) - r(b'NSRunLoop', b'performBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSRunLoop', b'performInModes:block:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSRunLoop', b'performSelector:target:argument:order:modes:', {'arguments': {2: {'sel_of_type': b'v@:@'}}}) - r(b'NSRunLoop', b'runMode:beforeDate:', {'retval': {'type': 'Z'}}) - r(b'NSScanner', b'caseSensitive', {'retval': {'type': 'Z'}}) - r(b'NSScanner', b'isAtEnd', {'retval': {'type': 'Z'}}) - r(b'NSScanner', b'scanCharactersFromSet:intoString:', {'retval': {'type': 'Z'}, 'arguments': {3: {'null_accepted': False, 'type_modifier': b'o'}}}) - r(b'NSScanner', b'scanDecimal:', {'retval': {'type': 'Z'}, 'arguments': {2: {'null_accepted': False, 'type': b'^{_NSDecimal=b8b4b1b1b18[8S]}', 'type_modifier': b'o'}}}) - r(b'NSScanner', b'scanDouble:', {'retval': {'type': 'Z'}, 'arguments': {2: {'null_accepted': False, 'type_modifier': b'o'}}}) - r(b'NSScanner', b'scanFloat:', {'retval': {'type': 'Z'}, 'arguments': {2: {'null_accepted': False, 'type_modifier': b'o'}}}) - r(b'NSScanner', b'scanHexDouble:', {'retval': {'type': 'Z'}, 'arguments': {2: {'null_accepted': False, 'type': '^d', 'type_modifier': b'o'}}}) - r(b'NSScanner', b'scanHexFloat:', {'retval': {'type': 'Z'}, 'arguments': {2: {'null_accepted': False, 'type': '^f', 'type_modifier': b'o'}}}) - r(b'NSScanner', b'scanHexInt:', {'retval': {'type': 'Z'}, 'arguments': {2: {'null_accepted': False, 'type_modifier': b'o'}}}) - r(b'NSScanner', b'scanHexLongLong:', {'retval': {'type': 'Z'}, 'arguments': {2: {'null_accepted': False, 'type': '^Q', 'type_modifier': b'o'}}}) - r(b'NSScanner', b'scanInt:', {'retval': {'type': 'Z'}, 'arguments': {2: {'null_accepted': False, 'type_modifier': b'o'}}}) - r(b'NSScanner', b'scanInteger:', {'retval': {'type': 'Z'}, 'arguments': {2: {'null_accepted': False, 'type_modifier': b'o'}}}) - r(b'NSScanner', b'scanLongLong:', {'retval': {'type': 'Z'}, 'arguments': {2: {'null_accepted': False, 'type_modifier': b'o'}}}) - r(b'NSScanner', b'scanString:intoString:', {'retval': {'type': 'Z'}, 'arguments': {3: {'null_accepted': False, 'type_modifier': b'o'}}}) - r(b'NSScanner', b'scanUnsignedLongLong:', {'retval': {'type': 'Z'}, 'arguments': {2: {'null_accepted': False, 'type_modifier': b'o'}}}) - r(b'NSScanner', b'scanUpToCharactersFromSet:intoString:', {'retval': {'type': 'Z'}, 'arguments': {3: {'null_accepted': False, 'type_modifier': b'o'}}}) - r(b'NSScanner', b'scanUpToString:intoString:', {'retval': {'type': 'Z'}, 'arguments': {3: {'null_accepted': False, 'type_modifier': b'o'}}}) - r(b'NSScanner', b'setCaseSensitive:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSScriptClassDescription', b'hasOrderedToManyRelationshipForKey:', {'retval': {'type': 'Z'}}) - r(b'NSScriptClassDescription', b'hasPropertyForKey:', {'retval': {'type': 'Z'}}) - r(b'NSScriptClassDescription', b'hasReadablePropertyForKey:', {'retval': {'type': 'Z'}}) - r(b'NSScriptClassDescription', b'hasWritablePropertyForKey:', {'retval': {'type': 'Z'}}) - r(b'NSScriptClassDescription', b'isLocationRequiredToCreateForKey:', {'retval': {'type': 'Z'}}) - r(b'NSScriptClassDescription', b'isReadOnlyKey:', {'retval': {'type': 'Z'}}) - r(b'NSScriptClassDescription', b'matchesAppleEventCode:', {'retval': {'type': 'Z'}}) - r(b'NSScriptClassDescription', b'supportsCommand:', {'retval': {'type': 'Z'}}) - r(b'NSScriptCoercionHandler', b'registerCoercer:selector:toConvertFromClass:toClass:', {'arguments': {3: {'sel_of_type': b'@@:@#'}}}) - r(b'NSScriptCommand', b'isWellFormed', {'retval': {'type': 'Z'}}) - r(b'NSScriptCommandDescription', b'isOptionalArgumentWithName:', {'retval': {'type': 'Z'}}) - r(b'NSScriptObjectSpecifier', b'containerIsObjectBeingTested', {'retval': {'type': 'Z'}}) - r(b'NSScriptObjectSpecifier', b'containerIsRangeContainerObject', {'retval': {'type': 'Z'}}) - r(b'NSScriptObjectSpecifier', b'indicesOfObjectsByEvaluatingWithContainer:count:', {'retval': {'c_array_length_in_arg': 3}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSScriptObjectSpecifier', b'setContainerIsObjectBeingTested:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSScriptObjectSpecifier', b'setContainerIsRangeContainerObject:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSScriptWhoseTest', b'isTrue', {'retval': {'type': 'Z'}}) - r(b'NSSet', b'addObserver:forKeyPath:options:context:', {'arguments': {5: {'type': '^v'}}}) - r(b'NSSet', b'containsObject:', {'retval': {'type': 'Z'}}) - r(b'NSSet', b'enumerateObjectsUsingBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSSet', b'enumerateObjectsWithOptions:usingBlock:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSSet', b'initWithObjects:', {'c_array_delimited_by_null': True, 'variadic': True}) - r(b'NSSet', b'initWithObjects:count:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'NSSet', b'initWithSet:copyItems:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSSet', b'intersectsSet:', {'retval': {'type': 'Z'}}) - r(b'NSSet', b'isEqualToSet:', {'retval': {'type': 'Z'}}) - r(b'NSSet', b'isSubsetOfSet:', {'retval': {'type': 'Z'}}) - r(b'NSSet', b'makeObjectsPerformSelector:', {'arguments': {2: {'sel_of_type': b'v@:'}}}) - r(b'NSSet', b'makeObjectsPerformSelector:withObject:', {'arguments': {2: {'sel_of_type': b'v@:@'}}}) - r(b'NSSet', b'objectsPassingTest:', {'arguments': {2: {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSSet', b'objectsWithOptions:passingTest:', {'arguments': {3: {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSSet', b'setWithObjects:', {'c_array_delimited_by_null': True, 'variadic': True}) - r(b'NSSet', b'setWithObjects:count:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'NSSocketPortNameServer', b'registerPort:name:', {'retval': {'type': 'Z'}}) - r(b'NSSocketPortNameServer', b'registerPort:name:nameServerPortNumber:', {'retval': {'type': 'Z'}}) - r(b'NSSocketPortNameServer', b'removePortForName:', {'retval': {'type': 'Z'}}) - r(b'NSSortDescriptor', b'ascending', {'retval': {'type': 'Z'}}) - r(b'NSSortDescriptor', b'comparator', {'retval': {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}) - r(b'NSSortDescriptor', b'initWithKey:ascending:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSSortDescriptor', b'initWithKey:ascending:comparator:', {'arguments': {3: {'type': 'Z'}, 4: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'NSSortDescriptor', b'initWithKey:ascending:selector:', {'arguments': {3: {'type': 'Z'}, 4: {'sel_of_type': b'i@:@'}}}) - r(b'NSSortDescriptor', b'sortDescriptorWithKey:ascending:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSSortDescriptor', b'sortDescriptorWithKey:ascending:comparator:', {'arguments': {3: {'type': 'Z'}, 4: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'NSSortDescriptor', b'sortDescriptorWithKey:ascending:selector:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSSpellServer', b'isWordInUserDictionaries:caseSensitive:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': 'Z'}}}) - r(b'NSSpellServer', b'registerLanguage:byVendor:', {'retval': {'type': 'Z'}}) - r(b'NSStream', b'getBoundStreamsWithBufferSize:inputStream:outputStream:', {'arguments': {3: {'type_modifier': b'o'}, 4: {'type_modifier': b'o'}}}) - r(b'NSStream', b'getStreamsToHost:port:inputStream:outputStream:', {'arguments': {4: {'null_accepted': False, 'type_modifier': b'o'}, 5: {'null_accepted': False, 'type_modifier': b'o'}}}) - r(b'NSStream', b'getStreamsToHostWithName:port:inputStream:outputStream:', {'arguments': {4: {'type_modifier': b'o'}, 5: {'type_modifier': b'o'}}}) - r(b'NSStream', b'setProperty:forKey:', {'retval': {'type': 'Z'}}) - r(b'NSString', b'', {'retval': {'type': '*'}}) - r(b'NSString', b'UTF8String', {'retval': {'c_array_delimited_by_null': True, 'type': '^t'}}) - r(b'NSString', b'availableStringEncodings', {'retval': {'c_array_delimited_by_null': True, 'type': sel32or64(b'r^I', b'r^Q')}}) - r(b'NSString', b'boolValue', {'retval': {'type': 'Z'}}) - r(b'NSString', b'cString', {'retval': {'c_array_delimited_by_null': True, 'type': '^t'}}) - r(b'NSString', b'cStringUsingEncoding:', {'retval': {'c_array_delimited_by_null': True, 'type': '^v'}}) - r(b'NSString', b'canBeConvertedToEncoding:', {'retval': {'type': 'Z'}}) - r(b'NSString', b'characterAtIndex:', {'retval': {'type': 'T'}}) - r(b'NSString', b'compare:options:range:', {'arguments': {4: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSString', b'compare:options:range:locale:', {'arguments': {4: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSString', b'completePathIntoString:caseSensitive:matchesIntoArray:filterTypes:', {'arguments': {2: {'type_modifier': b'o'}, 3: {'type': 'Z'}, 4: {'type_modifier': b'o'}}}) - r(b'NSString', b'containsString:', {'retval': {'type': b'Z'}}) - r(b'NSString', b'dataUsingEncoding:allowLossyConversion:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSString', b'enumerateLinesUsingBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSString', b'enumerateLinguisticTagsInRange:scheme:options:orthography:usingBlock:', {'arguments': {6: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 4: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSString', b'enumerateSubstringsInRange:options:usingBlock:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 4: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSString', b'enumeratorLinguisticTagsInRange:scheme:options:orthography:usingBlock:', {'arguments': {6: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 4: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'NSString', b'fileSystemRepresentation', {'retval': {'c_array_delimited_by_null': True, 'type': '^t'}}) - r(b'NSString', b'getBytes:maxLength:usedLength:encoding:options:range:remainingRange:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': '^v', 'type_modifier': b'o', 'c_array_length_in_arg': (3, 4)}, 4: {'type': sel32or64(b'^I', b'^Q'), 'type_modifier': b'o'}, 7: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 8: {'type': sel32or64(b'^{_NSRange=II}', b'^{_NSRange=QQ}'), 'type_modifier': b'o'}}, 'suggestion': 'do not use'}) - r(b'NSString', b'getCString:', {'arguments': {2: {'type': '*'}}, 'suggestion': 'use -cString'}) - r(b'NSString', b'getCString:maxLength:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': '^v', 'type_modifier': b'o', 'c_array_length_in_arg': 3}}, 'suggestion': 'use -cString instead'}) - r(b'NSString', b'getCString:maxLength:encoding:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': '^v', 'type_modifier': b'o', 'c_array_length_in_arg': 3}}, 'suggestion': 'use -cString instead'}) - r(b'NSString', b'getCString:maxLength:range:remainingRange:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': '^v', 'type_modifier': b'o', 'c_array_length_in_arg': 3}, 5: {'type': sel32or64(b'^{_NSRange=II}', b'^{_NSRange=QQ}'), 'type_modifier': b'o'}}, 'suggestion': 'use -cString instead'}) - r(b'NSString', b'getCharacters:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': '^T', 'type_modifier': b'o', 'c_array_of_variable_length': True}}}) - r(b'NSString', b'getCharacters:range:', {'retval': {'type': 'v'}, 'arguments': {2: {'type': '^T', 'type_modifier': b'o', 'c_array_length_in_arg': 3}, 3: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSString', b'getFileSystemRepresentation:maxLength:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': '^t', 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'NSString', b'getLineStart:end:contentsEnd:forRange:', {'retval': {'type': 'v'}, 'arguments': {2: {'type_modifier': b'o'}, 3: {'type_modifier': b'o'}, 4: {'type_modifier': b'o'}, 5: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSString', b'getParagraphStart:end:contentsEnd:forRange:', {'retval': {'type': 'v'}, 'arguments': {2: {'type_modifier': b'o'}, 3: {'type_modifier': b'o'}, 4: {'type_modifier': b'o'}, 5: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSString', b'hasPrefix:', {'retval': {'type': 'Z'}}) - r(b'NSString', b'hasSuffix:', {'retval': {'type': 'Z'}}) - r(b'NSString', b'initWithBytes:length:encoding:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'NSString', b'initWithBytesNoCopy:length:encoding:freeWhenDone:', {'arguments': {2: {'type': '^v', 'type_modifier': b'n', 'c_array_length_in_arg': 3}, 5: {'type': 'Z'}}, 'suggestion': 'use -initWithBytes:length:encoding instead'}) - r(b'NSString', b'initWithCString:', {'arguments': {2: {'c_array_delimited_by_null': True, 'type': '^v', 'type_modifier': b'n'}}}) - r(b'NSString', b'initWithCString:encoding:', {'arguments': {2: {'c_array_delimited_by_null': True, 'type': '^t', 'type_modifier': b'n'}}}) - r(b'NSString', b'initWithCString:length:', {'arguments': {2: {'type': '^v', 'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'NSString', b'initWithCStringNoCopy:length:freeWhenDone:', {'arguments': {2: {'type': '^v', 'type_modifier': b'n', 'c_array_length_in_arg': 3}, 4: {'type': 'Z'}}, 'suggestion': 'use -initWithCString:length: instead'}) - r(b'NSString', b'initWithCharacters:length:', {'arguments': {2: {'type': '^T', 'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'NSString', b'initWithCharactersNoCopy:length:freeWhenDone:', {'retval': {'type': '@'}, 'arguments': {2: {'type': '^T', 'type_modifier': b'n', 'c_array_length_in_arg': 3}, 4: {'type': 'Z'}}, 'suggestion': 'use -initWithCharacters:length: instead'}) - r(b'NSString', b'initWithContentsOfFile:encoding:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSString', b'initWithContentsOfFile:usedEncoding:error:', {'arguments': {3: {'type': sel32or64(b'r^I', b'r^Q'), 'type_modifier': b'o'}, 4: {'type_modifier': b'o'}}}) - r(b'NSString', b'initWithContentsOfURL:', {'arguments': {2: {'type': '@'}}}) - r(b'NSString', b'initWithContentsOfURL:encoding:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSString', b'initWithContentsOfURL:usedEncoding:error:', {'arguments': {3: {'type': sel32or64(b'r^I', b'r^Q'), 'type_modifier': b'o'}, 4: {'type_modifier': b'o'}}}) - r(b'NSString', b'initWithFormat:', {'arguments': {2: {'printf_format': True, 'type': '@'}}, 'variadic': True}) - r(b'NSString', b'initWithFormat:arguments:', {'arguments': {3: {'type': sel32or64(b'*', b'[1{?=II^v^v}]')}}, 'suggestion': 'use -initWithFormat:'}) - r(b'NSString', b'initWithFormat:locale:', {'arguments': {2: {'printf_format': True, 'type': '@'}}, 'variadic': True}) - r(b'NSString', b'initWithFormat:locale:arguments:', {'arguments': {4: {'type': sel32or64(b'*', b'[1{?=II^v^v}]')}}, 'suggestion': 'use -initWithFormat:locale:'}) - r(b'NSString', b'initWithUTF8String:', {'arguments': {2: {'c_array_delimited_by_null': True, 'type': '^t', 'type_modifier': b'n'}}}) - r(b'NSString', b'isAbsolutePath', {'retval': {'type': 'Z'}}) - r(b'NSString', b'isEqualToString:', {'retval': {'type': 'Z'}}) - r(b'NSString', b'lineRangeForRange:', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSString', b'linguisticTagsInRange:scheme:options:orthography:tokenRanges:', {'arguments': {6: {'type_modifier': b'o'}}}) - r(b'NSString', b'localizedCaseInsensitiveContainsString:', {'retval': {'type': b'Z'}}) - r(b'NSString', b'localizedStandardContainsString:', {'retval': {'type': 'Z'}}) - r(b'NSString', b'localizedStringWithFormat:', {'arguments': {2: {'printf_format': True, 'type': '@'}}, 'variadic': True}) - r(b'NSString', b'lossyCString', {'retval': {'c_array_delimited_by_null': True, 'type': '^t'}}) - r(b'NSString', b'paragraphRangeForRange:', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSString', b'rangeOfCharacterFromSet:', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}) - r(b'NSString', b'rangeOfCharacterFromSet:options:', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}) - r(b'NSString', b'rangeOfCharacterFromSet:options:range:', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 'arguments': {4: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSString', b'rangeOfComposedCharacterSequenceAtIndex:', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}) - r(b'NSString', b'rangeOfComposedCharacterSequencesForRange:', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSString', b'rangeOfString:', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}) - r(b'NSString', b'rangeOfString:options:', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}) - r(b'NSString', b'rangeOfString:options:range:', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 'arguments': {4: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSString', b'rangeOfString:options:range:locale:', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}, 'arguments': {4: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSString', b'stringByAppendingFormat:', {'arguments': {2: {'printf_format': True, 'type': '@'}}, 'variadic': True}) - r(b'NSString', b'stringByApplyingTransform:reverse:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSString', b'stringByReplacingCharactersInRange:withString:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSString', b'stringByReplacingOccurrencesOfString:withString:options:range:', {'arguments': {5: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSString', b'stringEncodingForData:encodingOptions:convertedString:usedLossyConversion:', {'arguments': {4: {'type_modifier': b'o'}, 5: {'type': b'^Z', 'type_modifier': b'o'}}}) - r(b'NSString', b'stringWithCString:', {'arguments': {2: {'c_array_delimited_by_null': True, 'type': '^v', 'type_modifier': b'n'}}}) - r(b'NSString', b'stringWithCString:encoding:', {'arguments': {2: {'c_array_delimited_by_null': True, 'type': '^t', 'type_modifier': b'n'}}}) - r(b'NSString', b'stringWithCString:length:', {'arguments': {2: {'type': '^v', 'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'NSString', b'stringWithCharacters:length:', {'arguments': {2: {'type': 'r^T', 'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'NSString', b'stringWithContentsOfFile:encoding:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSString', b'stringWithContentsOfFile:usedEncoding:error:', {'arguments': {3: {'type': sel32or64(b'r^I', b'r^Q'), 'type_modifier': b'o'}, 4: {'type_modifier': b'o'}}}) - r(b'NSString', b'stringWithContentsOfURL:encoding:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSString', b'stringWithContentsOfURL:usedEncoding:error:', {'arguments': {3: {'type': sel32or64(b'r^I', b'r^Q'), 'type_modifier': b'o'}, 4: {'type_modifier': b'o'}}}) - r(b'NSString', b'stringWithFormat:', {'arguments': {2: {'printf_format': True, 'type': '@'}}, 'variadic': True}) - r(b'NSString', b'stringWithUTF8String:', {'arguments': {2: {'c_array_delimited_by_null': True, 'type': '^t', 'type_modifier': b'n'}}}) - r(b'NSString', b'substringWithRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSString', b'writeToFile:atomically:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': 'Z'}}}) - r(b'NSString', b'writeToFile:atomically:encoding:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': 'Z'}, 5: {'type_modifier': b'o'}}}) - r(b'NSString', b'writeToURL:atomically:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': 'Z'}}}) - r(b'NSString', b'writeToURL:atomically:encoding:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': 'Z'}, 5: {'type_modifier': b'o'}}}) - r(b'NSTask', b'isRunning', {'retval': {'type': 'Z'}}) - r(b'NSTask', b'launchedTaskWithExecutableURL:arguments:error:terminationHandler:', {'arguments': {4: {'type_modifier': b'o'}, 5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'NSTask', b'resume', {'retval': {'type': 'Z'}}) - r(b'NSTask', b'setTerminationHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'NSTask', b'suspend', {'retval': {'type': 'Z'}}) - r(b'NSTask', b'terminationHandler', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}) - r(b'NSTextCheckingResult', b'addressCheckingResultWithRange:components:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTextCheckingResult', b'correctionCheckingResultWithRange:replacementString:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTextCheckingResult', b'dashCheckingResultWithRange:replacementString:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTextCheckingResult', b'dateCheckingResultWithRange:date:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTextCheckingResult', b'dateCheckingResultWithRange:date:timeZone:duration:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTextCheckingResult', b'grammarCheckingResultWithRange:details:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTextCheckingResult', b'linkCheckingResultWithRange:URL:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTextCheckingResult', b'orthographyCheckingResultWithRange:orthography:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTextCheckingResult', b'phoneNumberCheckingResultWithRange:phoneNumber:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTextCheckingResult', b'quoteCheckingResultWithRange:replacementString:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTextCheckingResult', b'regularExpressionCheckingResultWithRanges:count:regularExpression:', {'arguments': {2: {'type': sel32or64(b'^{_NSRange=II}', b'^{_NSRange=QQ}'), 'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'NSTextCheckingResult', b'replacementCheckingResultWithRange:replacementString:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTextCheckingResult', b'spellCheckingResultWithRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSTextCheckingResult', b'transitInformationCheckingResultWithRange:components:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSThread', b'detachNewThreadSelector:toTarget:withObject:', {'arguments': {2: {'sel_of_type': b'v@:@'}}}) - r(b'NSThread', b'detachNewThreadWithBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSThread', b'initWithBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSThread', b'initWithTarget:selector:object:', {'arguments': {3: {'sel_of_type': b'v@:@'}}}) - r(b'NSThread', b'isCancelled', {'retval': {'type': 'Z'}}) - r(b'NSThread', b'isExecuting', {'retval': {'type': 'Z'}}) - r(b'NSThread', b'isFinished', {'retval': {'type': 'Z'}}) - r(b'NSThread', b'isMainThread', {'retval': {'type': 'Z'}}) - r(b'NSThread', b'isMultiThreaded', {'retval': {'type': 'Z'}}) - r(b'NSThread', b'setThreadPriority:', {'retval': {'type': 'Z'}}) - r(b'NSTimeZone', b'isDaylightSavingTime', {'retval': {'type': 'Z'}}) - r(b'NSTimeZone', b'isDaylightSavingTimeForDate:', {'retval': {'type': 'Z'}}) - r(b'NSTimeZone', b'isEqualToTimeZone:', {'retval': {'type': 'Z'}}) - r(b'NSTimer', b'initWithFireDate:interval:repeats:block:', {'arguments': {4: {'type': 'Z'}, 5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'NSTimer', b'initWithFireDate:interval:target:selector:userInfo:repeats:', {'arguments': {5: {'sel_of_type': b'v@:@'}, 7: {'type': 'Z'}}}) - r(b'NSTimer', b'isValid', {'retval': {'type': 'Z'}}) - r(b'NSTimer', b'scheduledTimerWithTimeInterval:invocation:repeats:', {'arguments': {4: {'type': 'Z'}}}) - r(b'NSTimer', b'scheduledTimerWithTimeInterval:repeats:block:', {'arguments': {3: {'type': 'Z'}, 4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'NSTimer', b'scheduledTimerWithTimeInterval:target:selector:userInfo:repeats:', {'arguments': {4: {'sel_of_type': b'v@:@'}, 6: {'type': 'Z'}}}) - r(b'NSTimer', b'timerWithTimeInterval:invocation:repeats:', {'arguments': {4: {'type': 'Z'}}}) - r(b'NSTimer', b'timerWithTimeInterval:repeats:block:', {'arguments': {3: {'type': 'Z'}, 4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'NSTimer', b'timerWithTimeInterval:target:selector:userInfo:repeats:', {'arguments': {4: {'sel_of_type': b'v@:@'}, 6: {'type': 'Z'}}}) - r(b'NSURL', b'URLByAppendingPathComponent:isDirectory:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSURL', b'URLByResolvingAliasFileAtURL:options:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSURL', b'URLByResolvingBookmarkData:options:relativeToURL:bookmarkDataIsStale:error:', {'arguments': {5: {'type': '^Z', 'type_modifier': b'o'}, 6: {'type_modifier': b'o'}}}) - r(b'NSURL', b'URLHandleUsingCache:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSURL', b'bookmarkDataWithContentsOfURL:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSURL', b'bookmarkDataWithOptions:includingResourceValuesForKeys:relativeToURL:error:', {'arguments': {5: {'type_modifier': b'o'}}}) - r(b'NSURL', b'checkPromisedItemIsReachableAndReturnError:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type_modifier': b'o'}}}) - r(b'NSURL', b'checkResourceIsReachableAndReturnError:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type_modifier': b'o'}}}) - r(b'NSURL', b'fileURLWithFileSystemRepresentation:isDirectory:relativeToURL:', {'arguments': {2: {'c_array_delimited_by_null': True, 'type_modifier': b'n'}, 3: {'type': 'Z'}}}) - r(b'NSURL', b'fileURLWithPath:isDirectory:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSURL', b'fileURLWithPath:isDirectory:relativeToURL:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSURL', b'getFileSystemRepresentation:maxLength:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': '^t', 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'NSURL', b'getPromisedItemResourceValue:forKey:error:', {'retval': {'type': b'Z'}}) - r(b'NSURL', b'getResourceValue:forKey:error:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type_modifier': b'o'}, 4: {'type_modifier': b'o'}}}) - r(b'NSURL', b'hasDirectoryPath', {'retval': {'type': 'Z'}}) - r(b'NSURL', b'initByResolvingBookmarkData:options:relativeToURL:bookmarkDataIsStale:error:', {'arguments': {5: {'type': '^Z', 'type_modifier': b'o'}, 6: {'type_modifier': b'o'}}}) - r(b'NSURL', b'initFileURLWithFileSystemRepresentation:isDirectory:relativeToURL:', {'arguments': {2: {'c_array_delimited_by_null': True, 'type_modifier': b'n'}, 3: {'type': 'Z'}}}) - r(b'NSURL', b'initFileURLWithPath:isDirectory:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSURL', b'initFileURLWithPath:isDirectory:relativeToURL:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSURL', b'initWithContentsOfURL:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSURL', b'isFileReferenceURL', {'retval': {'type': 'Z'}}) - r(b'NSURL', b'isFileURL', {'retval': {'type': 'Z'}}) - r(b'NSURL', b'loadResourceDataNotifyingClient:usingCache:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSURL', b'promisedItemResourceValuesForKeys:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSURL', b'resourceDataUsingCache:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSURL', b'resourceValuesForKeys:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSURL', b'setProperty:forKey:', {'retval': {'type': 'Z'}}) - r(b'NSURL', b'setResourceData:', {'retval': {'type': 'Z'}}) - r(b'NSURL', b'setResourceValue:forKey:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSURL', b'setResourceValues:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSURL', b'startAccessingSecurityScopedResource', {'retval': {'type': b'Z'}}) - r(b'NSURL', b'writeBookmarkData:toURL:options:error:', {'retval': {'type': 'Z'}, 'arguments': {5: {'type_modifier': b'o'}}}) - r(b'NSURL', b'writeToURL:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSURLCache', b'getCachedResponseForDataTask:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'NSURLComponents', b'componentsWithURL:resolvingAgainstBaseURL:', {'arguments': {3: {'type': b'Z'}}}) - r(b'NSURLComponents', b'initWithURL:resolvingAgainstBaseURL:', {'arguments': {3: {'type': b'Z'}}}) - r(b'NSURLConnection', b'canHandleRequest:', {'retval': {'type': 'Z'}}) - r(b'NSURLConnection', b'initWithRequest:delegate:startImmediately:', {'arguments': {4: {'type': 'Z'}}}) - r(b'NSURLConnection', b'sendAsynchronousRequest:queue:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'NSURLConnection', b'sendSynchronousRequest:returningResponse:error:', {'arguments': {3: {'type_modifier': b'o'}, 4: {'type_modifier': b'o'}}}) - r(b'NSURLCredential', b'hasPassword', {'retval': {'type': 'Z'}}) - r(b'NSURLCredentialStorage', b'getCredentialsForProtectionSpace:task:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'NSURLCredentialStorage', b'getDefaultCredentialForProtectionSpace:task:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'NSURLDownload', b'canResumeDownloadDecodedWithEncodingMIMEType:', {'retval': {'type': 'Z'}}) - r(b'NSURLDownload', b'deletesFileUponFailure', {'retval': {'type': 'Z'}}) - r(b'NSURLDownload', b'setDeletesFileUponFailure:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSURLDownload', b'setDestination:allowOverwrite:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSURLHandle', b'canInitWithURL:', {'retval': {'type': 'Z'}}) - r(b'NSURLHandle', b'didLoadBytes:loadComplete:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSURLHandle', b'initWithURL:cached:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSURLHandle', b'writeData:', {'retval': {'type': 'Z'}}) - r(b'NSURLHandle', b'writeProperty:forKey:', {'retval': {'type': 'Z'}}) - r(b'NSURLProtectionSpace', b'isProxy', {'retval': {'type': 'Z'}}) - r(b'NSURLProtectionSpace', b'receivesCredentialSecurely', {'retval': {'type': 'Z'}}) - r(b'NSURLProtocol', b'canInitWithRequest:', {'retval': {'type': 'Z'}}) - r(b'NSURLProtocol', b'canInitWithTask:', {'retval': {'type': b'Z'}}) - r(b'NSURLProtocol', b'registerClass:', {'retval': {'type': 'Z'}}) - r(b'NSURLProtocol', b'requestIsCacheEquivalent:toRequest:', {'retval': {'type': 'Z'}}) - r(b'NSURLRequest', b'HTTPShouldHandleCookies', {'retval': {'type': 'Z'}}) - r(b'NSURLRequest', b'HTTPShouldUsePipelining', {'retval': {'type': 'Z'}}) - r(b'NSURLRequest', b'allowsCellularAccess', {'retval': {'type': b'Z'}}) - r(b'NSURLRequest', b'supportsSecureCoding', {'retval': {'type': b'Z'}}) - r(b'NSURLSession', b'dataTaskWithRequest:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}}}}}}) - r(b'NSURLSession', b'dataTaskWithURL:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}}}}}}) - r(b'NSURLSession', b'downloadTaskWithResumeData:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}}}}}}) - r(b'NSURLSession', b'downloadTaskWithURL:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}}}}}}) - r(b'NSURLSession', b'flushWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSURLSession', b'getAllTasksWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'NSURLSession', b'getTasksWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}}}}}}) - r(b'NSURLSession', b'resetWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSURLSession', b'uploadTaskWithRequest:fromData:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}}}}}}) - r(b'NSURLSession', b'uploadTaskWithRequest:fromFile:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}}}}}}) - r(b'NSURLSessionConfiguration', b'HTTPShouldSetCookies', {'retval': {'type': b'Z'}}) - r(b'NSURLSessionConfiguration', b'HTTPShouldUsePipelining', {'retval': {'type': b'Z'}}) - r(b'NSURLSessionConfiguration', b'allowsCellularAccess', {'retval': {'type': b'Z'}}) - r(b'NSURLSessionConfiguration', b'isDiscretionary', {'retval': {'type': 'Z'}}) - r(b'NSURLSessionConfiguration', b'sessionSendsLaunchEvents', {'retval': {'type': b'Z'}}) - r(b'NSURLSessionConfiguration', b'setAllowsCellularAccess:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSURLSessionConfiguration', b'setDiscretionary:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSURLSessionConfiguration', b'setHTTPShouldSetCookies:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSURLSessionConfiguration', b'setHTTPShouldUsePipelining:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSURLSessionConfiguration', b'setSessionSendsLaunchEvents:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSURLSessionConfiguration', b'setShouldUseExtendedBackgroundIdleMode:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSURLSessionConfiguration', b'setWaitsForConnectivity:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSURLSessionConfiguration', b'shouldUseExtendedBackgroundIdleMode', {'retval': {'type': 'Z'}}) - r(b'NSURLSessionConfiguration', b'waitsForConnectivity', {'retval': {'type': 'Z'}}) - r(b'NSURLSessionDownloadTask', b'cancelByProducingResumeData:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'NSURLSessionStreamTask', b'readDataOfMinLength:maxLength:timeout:completionHandler:', {'arguments': {2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': sel32or64(b'I', b'Q')}, 4: {'type': sel32or64(b'f', b'd')}, 5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'Z'}, 3: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSURLSessionStreamTask', b'writeData:timeout:completionHandler:', {'arguments': {3: {'type': sel32or64(b'f', b'd')}, 4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSURLSessionTaskTransactionMetrics', b'isProxyConnection', {'retval': {'type': 'Z'}}) - r(b'NSURLSessionTaskTransactionMetrics', b'isReusedConnection', {'retval': {'type': 'Z'}}) - r(b'NSUUID', b'getUUIDBytes:', {'arguments': {2: {'type_modifier': b'o'}}}) - r(b'NSUbiquitousKeyValueStore', b'boolForKey:', {'retval': {'type': 'Z'}}) - r(b'NSUbiquitousKeyValueStore', b'setBool:forKey:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSUbiquitousKeyValueStore', b'synchronize', {'retval': {'type': b'Z'}}) - r(b'NSUbiquitousKeyValueStore', b'synchronize:', {'retval': {'type': 'Z'}}) - r(b'NSUnarchiver', b'isAtEnd', {'retval': {'type': 'Z'}}) - r(b'NSUndoManager', b'canRedo', {'retval': {'type': 'Z'}}) - r(b'NSUndoManager', b'canUndo', {'retval': {'type': 'Z'}}) - r(b'NSUndoManager', b'groupsByEvent', {'retval': {'type': 'Z'}}) - r(b'NSUndoManager', b'isRedoing', {'retval': {'type': 'Z'}}) - r(b'NSUndoManager', b'isUndoRegistrationEnabled', {'retval': {'type': 'Z'}}) - r(b'NSUndoManager', b'isUndoing', {'retval': {'type': 'Z'}}) - r(b'NSUndoManager', b'redoActionIsDiscardable', {'retval': {'type': 'Z'}}) - r(b'NSUndoManager', b'redoMenuTitleForUndoActionName:', {'arguments': {2: {'type': '@'}}}) - r(b'NSUndoManager', b'registerUndoWithTarget:handler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'NSUndoManager', b'registerUndoWithTarget:selector:object:', {'arguments': {3: {'sel_of_type': b'v@:@'}}}) - r(b'NSUndoManager', b'setActionIsDiscardable:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSUndoManager', b'setGroupsByEvent:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSUndoManager', b'undoActionIsDiscardable', {'retval': {'type': 'Z'}}) - r(b'NSUserActivity', b'getContinuationStreamsWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSUserActivity', b'isEligibleForHandoff', {'retval': {'type': 'Z'}}) - r(b'NSUserActivity', b'isEligibleForPublicIndexing', {'retval': {'type': 'Z'}}) - r(b'NSUserActivity', b'isEligibleForSearch', {'retval': {'type': 'Z'}}) - r(b'NSUserActivity', b'needsSave', {'retval': {'type': b'Z'}}) - r(b'NSUserActivity', b'setEligibleForHandoff:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSUserActivity', b'setEligibleForPublicIndexing:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSUserActivity', b'setEligibleForSearch:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSUserActivity', b'setNeedsSave:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSUserActivity', b'setSupportsContinuationStreams:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSUserActivity', b'supportsContinuationStreams', {'retval': {'type': b'Z'}}) - r(b'NSUserAppleScriptTask', b'executeWithAppleEvent:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'NSUserAutomatorTask', b'executeWithInput:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'@'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'NSUserDefaults', b'boolForKey:', {'retval': {'type': 'Z'}}) - r(b'NSUserDefaults', b'objectIsForcedForKey:', {'retval': {'type': 'Z'}}) - r(b'NSUserDefaults', b'objectIsForcedForKey:inDomain:', {'retval': {'type': 'Z'}}) - r(b'NSUserDefaults', b'setBool:forKey:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSUserDefaults', b'synchronize', {'retval': {'type': 'Z'}}) - r(b'NSUserNotification', b'hasActionButton', {'retval': {'type': b'Z'}}) - r(b'NSUserNotification', b'hasReplyButton', {'retval': {'type': b'Z'}}) - r(b'NSUserNotification', b'isPresented', {'retval': {'type': b'Z'}}) - r(b'NSUserNotification', b'isRemote', {'retval': {'type': b'Z'}}) - r(b'NSUserNotification', b'setHasActionButton:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSUserNotification', b'setHasReplyButton:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSUserScriptTask', b'executeWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'NSUserScriptTask', b'initWithURL:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSUserUnixTask', b'executeWithArguments:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'NSValue', b'getValue:', {'arguments': {2: {'type': '^v'}}, 'suggestion': 'use another method'}) - r(b'NSValue', b'initWithBytes:objCType:', {'arguments': {2: {'type': '^v', 'type_modifier': b'n', 'c_array_of_variable_length': True}, 3: {'c_array_delimited_by_null': True, 'type': '^t', 'type_modifier': b'n'}}, 'suggestion': 'use something else'}) - r(b'NSValue', b'isEqualToValue:', {'retval': {'type': 'Z'}}) - r(b'NSValue', b'objCType', {'retval': {'c_array_delimited_by_null': True, 'type': '^t'}}) - r(b'NSValue', b'pointValue', {'retval': {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}) - r(b'NSValue', b'pointerValue', {'retval': {'type': '^v'}, 'suggestion': 'use something else'}) - r(b'NSValue', b'rangeValue', {'retval': {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}) - r(b'NSValue', b'rectValue', {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSValue', b'sizeValue', {'retval': {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}) - r(b'NSValue', b'value:withObjCType:', {'arguments': {2: {'type': '^v', 'type_modifier': b'n', 'c_array_of_variable_length': True}, 3: {'c_array_delimited_by_null': True, 'type': '^t', 'type_modifier': b'n'}}, 'suggestion': 'use something else'}) - r(b'NSValue', b'valueWithBytes:objCType:', {'arguments': {2: {'type': '^v', 'type_modifier': b'n', 'c_array_of_variable_length': True}, 3: {'c_array_delimited_by_null': True, 'type': '^t', 'type_modifier': b'n'}}, 'suggestion': 'use something else'}) - r(b'NSValue', b'valueWithPoint:', {'arguments': {2: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSValue', b'valueWithPointer:', {'arguments': {2: {'type': '^v'}}, 'suggestion': 'use some other method'}) - r(b'NSValue', b'valueWithRange:', {'arguments': {2: {'type': sel32or64(b'{_NSRange=II}', b'{_NSRange=QQ}')}}}) - r(b'NSValue', b'valueWithRect:', {'arguments': {2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSValue', b'valueWithSize:', {'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) - r(b'NSValueTransformer', b'allowsReverseTransformation', {'retval': {'type': 'Z'}}) - r(b'NSXMLDTD', b'initWithContentsOfURL:options:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSXMLDTD', b'initWithData:options:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSXMLDTDNode', b'isExternal', {'retval': {'type': 'Z'}}) - r(b'NSXMLDocument', b'initWithContentsOfURL:options:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSXMLDocument', b'initWithData:options:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSXMLDocument', b'initWithXMLString:options:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSXMLDocument', b'isStandalone', {'retval': {'type': 'Z'}}) - r(b'NSXMLDocument', b'objectByApplyingXSLT:arguments:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSXMLDocument', b'objectByApplyingXSLTAtURL:arguments:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSXMLDocument', b'objectByApplyingXSLTString:arguments:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSXMLDocument', b'setStandalone:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSXMLDocument', b'validateAndReturnError:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type_modifier': b'o'}}}) - r(b'NSXMLElement', b'initWithXMLString:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSXMLElement', b'normalizeAdjacentTextNodesPreservingCDATA:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSXMLNode', b'canonicalXMLStringPreservingComments:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSXMLNode', b'nodesForXPath:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSXMLNode', b'objectsForXQuery:constants:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSXMLNode', b'objectsForXQuery:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSXMLNode', b'setStringValue:resolvingEntities:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSXMLParser', b'parse', {'retval': {'type': 'Z'}}) - r(b'NSXMLParser', b'setShouldProcessNamespaces:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSXMLParser', b'setShouldReportNamespacePrefixes:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSXMLParser', b'setShouldResolveExternalEntities:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSXMLParser', b'shouldProcessNamespaces', {'retval': {'type': 'Z'}}) - r(b'NSXMLParser', b'shouldReportNamespacePrefixes', {'retval': {'type': 'Z'}}) - r(b'NSXMLParser', b'shouldResolveExternalEntities', {'retval': {'type': 'Z'}}) - r(b'NSXPCConnection', b'interruptionHandler', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}) - r(b'NSXPCConnection', b'invalidationHandler', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}) - r(b'NSXPCConnection', b'setInterruptionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSXPCConnection', b'setInvalidationHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'NSXPCConnection', b'synchronousRemoteObjectProxyWithErrorHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'NSXPCInterface', b'classesForSelector:argumentIndex:ofReply:', {'arguments': {4: {'type': b'Z'}}}) - r(b'NSXPCInterface', b'interfaceForSelector:argumentIndex:ofReply:', {'arguments': {4: {'type': b'Z'}}}) - r(b'NSXPCInterface', b'setClasses:forSelector:argumentIndex:ofReply:', {'arguments': {5: {'type': b'Z'}}}) - r(b'NSXPCInterface', b'setInterface:forSelector:argumentIndex:ofReply:', {'arguments': {5: {'type': b'Z'}}}) -finally: - objc._updatingMetadata(False) -protocols={'NSMachPortDelegateMethods': objc.informal_protocol('NSMachPortDelegateMethods', [objc.selector(None, b'handleMachMessage:', b'v@:^v', isRequired=False)]), 'NSCoderMethods': objc.informal_protocol('NSCoderMethods', [objc.selector(None, b'classForCoder', b'#@:', isRequired=False), objc.selector(None, b'version', sel32or64(b'i@:', b'q@:'), isRequired=False), objc.selector(None, b'setVersion:', sel32or64(b'v@:i', b'v@:q'), isRequired=False), objc.selector(None, b'replacementObjectForCoder:', b'@@:@', isRequired=False), objc.selector(None, b'awakeAfterUsingCoder:', b'@@:@', isRequired=False)]), 'NSStreamDelegateEventExtensions': objc.informal_protocol('NSStreamDelegateEventExtensions', [objc.selector(None, b'stream:handleEvent:', b'v@:@I', isRequired=False)]), 'NSCopyLinkMoveHandler': objc.informal_protocol('NSCopyLinkMoveHandler', [objc.selector(None, b'fileManager:shouldProceedAfterError:', b'Z@:@@', isRequired=False), objc.selector(None, b'fileManager:willProcessPath:', b'v@:@@', isRequired=False)]), 'NSScriptClassDescription': objc.informal_protocol('NSScriptClassDescription', [objc.selector(None, b'className', b'@@:', isRequired=False), objc.selector(None, b'classCode', sel32or64(b'L@:', b'I@:'), isRequired=False)]), 'NSKeyValueObserverNotification': objc.informal_protocol('NSKeyValueObserverNotification', [objc.selector(None, b'didChange:valuesAtIndexes:forKey:', sel32or64(b'v@:I@@', b'v@:Q@@'), isRequired=False), objc.selector(None, b'didChangeValueForKey:', b'v@:@', isRequired=False), objc.selector(None, b'willChange:valuesAtIndexes:forKey:', sel32or64(b'v@:I@@', b'v@:Q@@'), isRequired=False), objc.selector(None, b'willChangeValueForKey:', b'v@:@', isRequired=False), objc.selector(None, b'didChangeValueForKey:withSetMutation:usingObjects:', sel32or64(b'v@:@I@', b'v@:@Q@'), isRequired=False), objc.selector(None, b'willChangeValueForKey:withSetMutation:usingObjects:', sel32or64(b'v@:@I@', b'v@:@Q@'), isRequired=False)]), 'NSKeyValueCoding': objc.informal_protocol('NSKeyValueCoding', [objc.selector(None, b'mutableOrderedSetValueForKeyPath:', b'@@:@', isRequired=False), objc.selector(None, b'mutableSetValueForKey:', b'@@:@', isRequired=False), objc.selector(None, b'accessInstanceVariablesDirectly', b'Z@:', isRequired=False), objc.selector(None, b'valueForKey:', b'@@:@', isRequired=False), objc.selector(None, b'mutableArrayValueForKey:', b'@@:@', isRequired=False), objc.selector(None, b'dictionaryWithValuesForKeys:', b'@@:@', isRequired=False), objc.selector(None, b'mutableOrderedSetValueForKey:', b'@@:@', isRequired=False), objc.selector(None, b'setValue:forKey:', b'v@:@@', isRequired=False), objc.selector(None, b'validateValue:forKeyPath:error:', b'Z@:^@@^@', isRequired=False), objc.selector(None, b'validateValue:forKey:error:', b'Z@:^@@^@', isRequired=False), objc.selector(None, b'valueForKeyPath:', b'@@:@', isRequired=False), objc.selector(None, b'valueForUndefinedKey:', b'@@:@', isRequired=False), objc.selector(None, b'mutableArrayValueForKeyPath:', b'@@:@', isRequired=False), objc.selector(None, b'setNilValueForKey:', b'v@:@', isRequired=False), objc.selector(None, b'setValue:forKeyPath:', b'v@:@@', isRequired=False), objc.selector(None, b'setValuesForKeysWithDictionary:', b'v@:@', isRequired=False), objc.selector(None, b'setValue:forUndefinedKey:', b'v@:@@', isRequired=False), objc.selector(None, b'mutableSetValueForKeyPath:', b'@@:@', isRequired=False)]), 'NSDeprecatedMethods': objc.informal_protocol('NSDeprecatedMethods', [objc.selector(None, b'poseAsClass:', b'v@:#', isRequired=False)]), 'NSDistantObjectRequestMethods': objc.informal_protocol('NSDistantObjectRequestMethods', [objc.selector(None, b'connection:handleRequest:', b'Z@:@@', isRequired=False)]), 'NSScriptKeyValueCoding': objc.informal_protocol('NSScriptKeyValueCoding', [objc.selector(None, b'removeValueAtIndex:fromPropertyWithKey:', sel32or64(b'v@:I@', b'v@:Q@'), isRequired=False), objc.selector(None, b'insertValue:inPropertyWithKey:', b'v@:@@', isRequired=False), objc.selector(None, b'valueWithUniqueID:inPropertyWithKey:', b'@@:@@', isRequired=False), objc.selector(None, b'insertValue:atIndex:inPropertyWithKey:', sel32or64(b'v@:@I@', b'v@:@Q@'), isRequired=False), objc.selector(None, b'coerceValue:forKey:', b'@@:@@', isRequired=False), objc.selector(None, b'replaceValueAtIndex:inPropertyWithKey:withValue:', sel32or64(b'v@:I@@', b'v@:Q@@'), isRequired=False), objc.selector(None, b'valueAtIndex:inPropertyWithKey:', sel32or64(b'@@:I@', b'@@:Q@'), isRequired=False), objc.selector(None, b'valueWithName:inPropertyWithKey:', b'@@:@@', isRequired=False)]), 'NSDiscardableContentProxy': objc.informal_protocol('NSDiscardableContentProxy', [objc.selector(None, b'autoContentAccessingProxy', b'@@:', isRequired=False)]), 'NSSpellServerDelegate': objc.informal_protocol('NSSpellServerDelegate', [objc.selector(None, b'spellServer:findMisspelledWordInString:language:wordCount:countOnly:', b'{_NSRange=II}@:@@@^iZ', isRequired=False), objc.selector(None, b'spellServer:suggestGuessesForWord:inLanguage:', b'@@:@@@', isRequired=False), objc.selector(None, b'spellServer:didLearnWord:inLanguage:', b'v@:@@@', isRequired=False), objc.selector(None, b'spellServer:didForgetWord:inLanguage:', b'v@:@@@', isRequired=False), objc.selector(None, b'spellServer:suggestCompletionsForPartialWordRange:inString:language:', b'@@:@{_NSRange=II}@@', isRequired=False), objc.selector(None, b'spellServer:checkGrammarInString:language:details:', b'{_NSRange=II}@:@@@^@', isRequired=False)]), 'NSDeprecatedKeyValueObservingCustomization': objc.informal_protocol('NSDeprecatedKeyValueObservingCustomization', [objc.selector(None, b'setKeys:triggerChangeNotificationsForDependentKey:', b'v@:@@', isRequired=False)]), 'NSFileManagerFileOperationAdditions': objc.informal_protocol('NSFileManagerFileOperationAdditions', [objc.selector(None, b'fileManager:shouldProceedAfterError:linkingItemAtURL:toURL:', b'Z@:@@@@', isRequired=False), objc.selector(None, b'fileManager:shouldRemoveItemAtURL:', b'Z@:@@', isRequired=False), objc.selector(None, b'fileManager:shouldCopyItemAtPath:toPath:', b'Z@:@@@', isRequired=False), objc.selector(None, b'fileManager:shouldRemoveItemAtPath:', b'Z@:@@', isRequired=False), objc.selector(None, b'fileManager:shouldProceedAfterError:movingItemAtURL:toURL:', b'Z@:@@@@', isRequired=False), objc.selector(None, b'fileManager:shouldProceedAfterError:movingItemAtPath:toPath:', b'Z@:@@@@', isRequired=False), objc.selector(None, b'fileManager:shouldMoveItemAtURL:toURL:', b'Z@:@@@', isRequired=False), objc.selector(None, b'fileManager:shouldLinkItemAtURL:toURL:', b'Z@:@@@', isRequired=False), objc.selector(None, b'fileManager:shouldProceedAfterError:removingItemAtPath:', b'Z@:@@@', isRequired=False), objc.selector(None, b'fileManager:shouldProceedAfterError:removingItemAtURL:', b'Z@:@@@', isRequired=False), objc.selector(None, b'fileManager:shouldMoveItemAtPath:toPath:', b'Z@:@@@', isRequired=False), objc.selector(None, b'fileManager:shouldProceedAfterError:linkingItemAtPath:toPath:', b'Z@:@@@@', isRequired=False), objc.selector(None, b'fileManager:shouldLinkItemAtPath:toPath:', b'Z@:@@@', isRequired=False), objc.selector(None, b'fileManager:shouldCopyItemAtURL:toURL:', b'Z@:@@@', isRequired=False), objc.selector(None, b'fileManager:shouldProceedAfterError:copyingItemAtPath:toPath:', b'Z@:@@@@', isRequired=False), objc.selector(None, b'fileManager:shouldProceedAfterError:copyingItemAtURL:toURL:', b'Z@:@@@@', isRequired=False)]), 'NSURLDownloadDelegate': objc.informal_protocol('NSURLDownloadDelegate', [objc.selector(None, b'download:didReceiveAuthenticationChallenge:', b'v@:@@', isRequired=False), objc.selector(None, b'downloadShouldUseCredentialStorage:', b'Z@:@', isRequired=False), objc.selector(None, b'download:didFailWithError:', b'v@:@@', isRequired=False), objc.selector(None, b'download:didCancelAuthenticationChallenge:', b'v@:@@', isRequired=False), objc.selector(None, b'download:canAuthenticateAgainstProtectionSpace:', b'Z@:@@', isRequired=False), objc.selector(None, b'download:shouldDecodeSourceDataOfMIMEType:', b'Z@:@@', isRequired=False), objc.selector(None, b'downloadDidBegin:', b'v@:@', isRequired=False), objc.selector(None, b'download:willSendRequest:redirectResponse:', b'@@:@@@', isRequired=False), objc.selector(None, b'download:didCreateDestination:', b'v@:@@', isRequired=False), objc.selector(None, b'downloadDidFinish:', b'v@:@', isRequired=False), objc.selector(None, b'download:didReceiveResponse:', b'v@:@@', isRequired=False), objc.selector(None, b'download:didReceiveDataOfLength:', sel32or64(b'v@:@I', b'v@:@Q'), isRequired=False), objc.selector(None, b'download:willResumeWithResponse:fromByte:', b'v@:@@q', isRequired=False), objc.selector(None, b'download:decideDestinationWithSuggestedFilename:', b'v@:@@', isRequired=False)]), 'NSComparisonMethods': objc.informal_protocol('NSComparisonMethods', [objc.selector(None, b'isCaseInsensitiveLike:', b'Z@:@', isRequired=False), objc.selector(None, b'isLessThan:', b'Z@:@', isRequired=False), objc.selector(None, b'isGreaterThanOrEqualTo:', b'Z@:@', isRequired=False), objc.selector(None, b'isNotEqualTo:', b'Z@:@', isRequired=False), objc.selector(None, b'isGreaterThan:', b'Z@:@', isRequired=False), objc.selector(None, b'isLike:', b'Z@:@', isRequired=False), objc.selector(None, b'isEqualTo:', b'Z@:@', isRequired=False), objc.selector(None, b'doesContain:', b'Z@:@', isRequired=False), objc.selector(None, b'isLessThanOrEqualTo:', b'Z@:@', isRequired=False)]), 'NSDeprecatedKeyValueCoding': objc.informal_protocol('NSDeprecatedKeyValueCoding', [objc.selector(None, b'valuesForKeys:', b'@@:@', isRequired=False), objc.selector(None, b'takeStoredValue:forKey:', b'v@:@@', isRequired=False), objc.selector(None, b'takeValue:forKey:', b'v@:@@', isRequired=False), objc.selector(None, b'storedValueForKey:', b'@@:@', isRequired=False), objc.selector(None, b'handleTakeValue:forUnboundKey:', b'v@:@@', isRequired=False), objc.selector(None, b'useStoredAccessor', b'Z@:', isRequired=False), objc.selector(None, b'takeValuesFromDictionary:', b'v@:@', isRequired=False), objc.selector(None, b'handleQueryWithUnboundKey:', b'@@:@', isRequired=False), objc.selector(None, b'takeValue:forKeyPath:', b'v@:@@', isRequired=False), objc.selector(None, b'unableToSetNilForKey:', b'v@:@', isRequired=False)]), 'NSScripting': objc.informal_protocol('NSScripting', [objc.selector(None, b'newScriptingObjectOfClass:forValueForKey:withContentsValue:properties:', b'@@:#@@@', isRequired=False), objc.selector(None, b'setScriptingProperties:', b'v@:@', isRequired=False), objc.selector(None, b'scriptingValueForSpecifier:', b'@@:@', isRequired=False), objc.selector(None, b'copyScriptingValue:forKey:withProperties:', b'@@:@@@', isRequired=False), objc.selector(None, b'scriptingProperties', b'@@:', isRequired=False)]), 'NSKeyValueObserving': objc.informal_protocol('NSKeyValueObserving', [objc.selector(None, b'observeValueForKeyPath:ofObject:change:context:', b'v@:@@@^v', isRequired=False)]), 'NSPortDelegateMethods': objc.informal_protocol('NSPortDelegateMethods', [objc.selector(None, b'handlePortMessage:', b'v@:@', isRequired=False)]), 'NSArchiverCallback': objc.informal_protocol('NSArchiverCallback', [objc.selector(None, b'classForArchiver', b'#@:', isRequired=False), objc.selector(None, b'replacementObjectForArchiver:', b'@@:@', isRequired=False)]), 'NSThreadPerformAdditions': objc.informal_protocol('NSThreadPerformAdditions', [objc.selector(None, b'performSelector:onThread:withObject:waitUntilDone:', b'v@::@@Z', isRequired=False), objc.selector(None, b'performSelectorOnMainThread:withObject:waitUntilDone:', b'v@::@Z', isRequired=False), objc.selector(None, b'performSelectorInBackground:withObject:', b'v@::@', isRequired=False), objc.selector(None, b'performSelector:onThread:withObject:waitUntilDone:modes:', b'v@::@@Z@', isRequired=False), objc.selector(None, b'performSelectorOnMainThread:withObject:waitUntilDone:modes:', b'v@::@Z@', isRequired=False)]), 'NSKeyedUnarchiverObjectSubstitution': objc.informal_protocol('NSKeyedUnarchiverObjectSubstitution', [objc.selector(None, b'classForKeyedUnarchiver', b'#@:', isRequired=False)]), 'NSScriptingComparisonMethods': objc.informal_protocol('NSScriptingComparisonMethods', [objc.selector(None, b'scriptingContains:', b'Z@:@', isRequired=False), objc.selector(None, b'scriptingIsGreaterThan:', b'Z@:@', isRequired=False), objc.selector(None, b'scriptingEndsWith:', b'Z@:@', isRequired=False), objc.selector(None, b'scriptingIsLessThan:', b'Z@:@', isRequired=False), objc.selector(None, b'scriptingBeginsWith:', b'Z@:@', isRequired=False), objc.selector(None, b'scriptingIsGreaterThanOrEqualTo:', b'Z@:@', isRequired=False), objc.selector(None, b'scriptingIsEqualTo:', b'Z@:@', isRequired=False), objc.selector(None, b'scriptingIsLessThanOrEqualTo:', b'Z@:@', isRequired=False)]), 'NSURLConnectionDelegate': objc.informal_protocol('NSURLConnectionDelegate', [objc.selector(None, b'connection:didReceiveData:', b'v@:@@', isRequired=False), objc.selector(None, b'connection:needNewBodyStream:', b'@@:@@', isRequired=False), objc.selector(None, b'connection:didSendBodyData:totalBytesWritten:totalBytesExpectedToWrite:', sel32or64(b'v@:@iii', b'v@:@qqq'), isRequired=False), objc.selector(None, b'connection:canAuthenticateAgainstProtectionSpace:', b'Z@:@@', isRequired=False), objc.selector(None, b'connection:willCacheResponse:', b'@@:@@', isRequired=False), objc.selector(None, b'connection:didReceiveAuthenticationChallenge:', b'v@:@@', isRequired=False), objc.selector(None, b'connectionDidFinishLoading:', b'v@:@', isRequired=False), objc.selector(None, b'connection:willSendRequest:redirectResponse:', b'@@:@@@', isRequired=False), objc.selector(None, b'connection:didReceiveResponse:', b'v@:@@', isRequired=False), objc.selector(None, b'connection:didFailWithError:', b'v@:@@', isRequired=False), objc.selector(None, b'connectionShouldUseCredentialStorage:', b'Z@:@', isRequired=False), objc.selector(None, b'connection:didCancelAuthenticationChallenge:', b'v@:@@', isRequired=False)]), 'NSNetServiceDelegateMethods': objc.informal_protocol('NSNetServiceDelegateMethods', [objc.selector(None, b'netServiceWillPublish:', b'v@:@', isRequired=False), objc.selector(None, b'netServiceDidPublish:', b'v@:@', isRequired=False), objc.selector(None, b'netService:didNotPublish:', b'v@:@@', isRequired=False), objc.selector(None, b'netServiceWillResolve:', b'v@:@', isRequired=False), objc.selector(None, b'netServiceDidResolveAddress:', b'v@:@', isRequired=False), objc.selector(None, b'netService:didNotResolve:', b'v@:@@', isRequired=False), objc.selector(None, b'netServiceDidStop:', b'v@:@', isRequired=False), objc.selector(None, b'netService:didUpdateTXTRecordData:', b'v@:@@', isRequired=False)]), 'NSKeyedArchiverDelegate': objc.informal_protocol('NSKeyedArchiverDelegate', [objc.selector(None, b'archiver:willEncodeObject:', b'@@:@@', isRequired=False), objc.selector(None, b'archiver:didEncodeObject:', b'v@:@@', isRequired=False), objc.selector(None, b'archiver:willReplaceObject:withObject:', b'v@:@@@', isRequired=False), objc.selector(None, b'archiverWillFinish:', b'v@:@', isRequired=False), objc.selector(None, b'archiverDidFinish:', b'v@:@', isRequired=False)]), 'NSDistributedObjects': objc.informal_protocol('NSDistributedObjects', [objc.selector(None, b'replacementObjectForPortCoder:', b'@@:@', isRequired=False), objc.selector(None, b'classForPortCoder', b'#@:', isRequired=False)]), 'NSKeyValueObserverRegistration': objc.informal_protocol('NSKeyValueObserverRegistration', [objc.selector(None, b'removeObserver:forKeyPath:context:', b'v@:@@^v', isRequired=False), objc.selector(None, b'addObserver:forKeyPath:options:context:', sel32or64(b'v@:@@I^v', b'v@:@@Q^v'), isRequired=False), objc.selector(None, b'removeObserver:forKeyPath:', b'v@:@@', isRequired=False)]), 'NSScriptObjectSpecifiers': objc.informal_protocol('NSScriptObjectSpecifiers', [objc.selector(None, b'objectSpecifier', b'@@:', isRequired=False), objc.selector(None, b'indicesOfObjectsByEvaluatingObjectSpecifier:', b'@@:@', isRequired=False)]), 'NSErrorRecoveryAttempting': objc.informal_protocol('NSErrorRecoveryAttempting', [objc.selector(None, b'attemptRecoveryFromError:optionIndex:delegate:didRecoverSelector:contextInfo:', sel32or64(b'v@:@I@:^v', b'v@:@Q@:^v'), isRequired=False), objc.selector(None, b'attemptRecoveryFromError:optionIndex:', sel32or64(b'Z@:@I', b'Z@:@Q'), isRequired=False)]), 'NSXMLParserDelegateEventAdditions': objc.informal_protocol('NSXMLParserDelegateEventAdditions', [objc.selector(None, b'parserDidStartDocument:', b'v@:@', isRequired=False), objc.selector(None, b'parserDidEndDocument:', b'v@:@', isRequired=False), objc.selector(None, b'parser:foundNotationDeclarationWithName:publicID:systemID:', b'v@:@@@@', isRequired=False), objc.selector(None, b'parser:foundUnparsedEntityDeclarationWithName:publicID:systemID:notationName:', b'v@:@@@@@', isRequired=False), objc.selector(None, b'parser:foundAttributeDeclarationWithName:forElement:type:defaultValue:', b'v@:@@@@@', isRequired=False), objc.selector(None, b'parser:foundElementDeclarationWithName:model:', b'v@:@@@', isRequired=False), objc.selector(None, b'parser:foundInternalEntityDeclarationWithName:value:', b'v@:@@@', isRequired=False), objc.selector(None, b'parser:foundExternalEntityDeclarationWithName:publicID:systemID:', b'v@:@@@@', isRequired=False), objc.selector(None, b'parser:didStartElement:namespaceURI:qualifiedName:attributes:', b'v@:@@@@@', isRequired=False), objc.selector(None, b'parser:didEndElement:namespaceURI:qualifiedName:', b'v@:@@@@', isRequired=False), objc.selector(None, b'parser:didStartMappingPrefix:toURI:', b'v@:@@@', isRequired=False), objc.selector(None, b'parser:didEndMappingPrefix:', b'v@:@@', isRequired=False), objc.selector(None, b'parser:foundCharacters:', b'v@:@@', isRequired=False), objc.selector(None, b'parser:foundIgnorableWhitespace:', b'v@:@@', isRequired=False), objc.selector(None, b'parser:foundProcessingInstructionWithTarget:data:', b'v@:@@@', isRequired=False), objc.selector(None, b'parser:foundComment:', b'v@:@@', isRequired=False), objc.selector(None, b'parser:foundCDATA:', b'v@:@@', isRequired=False), objc.selector(None, b'parser:resolveExternalEntityName:systemID:', b'@@:@@@', isRequired=False), objc.selector(None, b'parser:parseErrorOccurred:', b'v@:@@', isRequired=False), objc.selector(None, b'parser:validationErrorOccurred:', b'v@:@@', isRequired=False)]), 'NSClassDescriptionPrimitives': objc.informal_protocol('NSClassDescriptionPrimitives', [objc.selector(None, b'inverseForRelationshipKey:', b'@@:@', isRequired=False), objc.selector(None, b'attributeKeys', b'@@:', isRequired=False), objc.selector(None, b'toOneRelationshipKeys', b'@@:', isRequired=False), objc.selector(None, b'classDescription', b'@@:', isRequired=False), objc.selector(None, b'toManyRelationshipKeys', b'@@:', isRequired=False)]), 'NSNetServiceBrowserDelegateMethods': objc.informal_protocol('NSNetServiceBrowserDelegateMethods', [objc.selector(None, b'netServiceBrowserWillSearch:', b'v@:@', isRequired=False), objc.selector(None, b'netServiceBrowserDidStopSearch:', b'v@:@', isRequired=False), objc.selector(None, b'netServiceBrowser:didNotSearch:', b'v@:@@', isRequired=False), objc.selector(None, b'netServiceBrowser:didFindDomain:moreComing:', b'v@:@@Z', isRequired=False), objc.selector(None, b'netServiceBrowser:didFindService:moreComing:', b'v@:@@Z', isRequired=False), objc.selector(None, b'netServiceBrowser:didRemoveDomain:moreComing:', b'v@:@@Z', isRequired=False), objc.selector(None, b'netServiceBrowser:didRemoveService:moreComing:', b'v@:@@Z', isRequired=False)]), 'NSConnectionDelegateMethods': objc.informal_protocol('NSConnectionDelegateMethods', [objc.selector(None, b'makeNewConnection:sender:', b'Z@:@@', isRequired=False), objc.selector(None, b'connection:shouldMakeNewConnection:', b'Z@:@@', isRequired=False), objc.selector(None, b'authenticationDataForComponents:', b'@@:@', isRequired=False), objc.selector(None, b'authenticateComponents:withData:', b'Z@:@@', isRequired=False), objc.selector(None, b'createConversationForConnection:', b'@@:@', isRequired=False)]), 'NSURLClient': objc.informal_protocol('NSURLClient', [objc.selector(None, b'URLResourceDidFinishLoading:', b'v@:@', isRequired=False), objc.selector(None, b'URLResourceDidCancelLoading:', b'v@:@', isRequired=False), objc.selector(None, b'URL:resourceDataDidBecomeAvailable:', b'v@:@@', isRequired=False), objc.selector(None, b'URL:resourceDidFailLoadingWithReason:', b'v@:@@', isRequired=False)]), 'NSKeyValueObservingCustomization': objc.informal_protocol('NSKeyValueObservingCustomization', [objc.selector(None, b'observationInfo', b'^v@:', isRequired=False), objc.selector(None, b'setObservationInfo:', b'v@:^v', isRequired=False), objc.selector(None, b'keyPathsForValuesAffectingValueForKey:', b'@@:@', isRequired=False), objc.selector(None, b'automaticallyNotifiesObserversForKey:', b'Z@:@', isRequired=False)]), 'NSDelayedPerforming': objc.informal_protocol('NSDelayedPerforming', [objc.selector(None, b'performSelector:withObject:afterDelay:', b'v@::@d', isRequired=False), objc.selector(None, b'cancelPreviousPerformRequestsWithTarget:', b'v@:@', isRequired=False), objc.selector(None, b'cancelPreviousPerformRequestsWithTarget:selector:object:', b'v@:@:@', isRequired=False), objc.selector(None, b'performSelector:withObject:afterDelay:inModes:', b'v@::@d@', isRequired=False)]), 'NSMetadataQueryDelegate': objc.informal_protocol('NSMetadataQueryDelegate', [objc.selector(None, b'metadataQuery:replacementObjectForResultObject:', b'@@:@@', isRequired=False), objc.selector(None, b'metadataQuery:replacementValueForAttribute:value:', b'@@:@@@', isRequired=False)]), 'NSKeyedArchiverObjectSubstitution': objc.informal_protocol('NSKeyedArchiverObjectSubstitution', [objc.selector(None, b'classForKeyedArchiver', b'#@:', isRequired=False), objc.selector(None, b'replacementObjectForKeyedArchiver:', b'@@:@', isRequired=False), objc.selector(None, b'classFallbacksForKeyedArchiver', b'@@:', isRequired=False)]), 'NSKeyedUnarchiverDelegate': objc.informal_protocol('NSKeyedUnarchiverDelegate', [objc.selector(None, b'unarchiver:cannotDecodeObjectOfClassName:originalClasses:', b'#@:@@@', isRequired=False), objc.selector(None, b'unarchiver:didDecodeObject:', b'@@:@@', isRequired=False), objc.selector(None, b'unarchiver:willReplaceObject:withObject:', b'v@:@@@', isRequired=False), objc.selector(None, b'unarchiverWillFinish:', b'v@:@', isRequired=False), objc.selector(None, b'unarchiverDidFinish:', b'v@:@', isRequired=False)])} -expressions = {'NSAppleEventSendDefaultOptions': 'NSAppleEventSendWaitForReply | NSAppleEventSendCanInteract'} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/Foundation/_metadata.pyc b/env/lib/python2.7/site-packages/Foundation/_metadata.pyc deleted file mode 100644 index 4bc7ab92..00000000 Binary files a/env/lib/python2.7/site-packages/Foundation/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Foundation/_nsindexset.py b/env/lib/python2.7/site-packages/Foundation/_nsindexset.py deleted file mode 100644 index 2f4105c8..00000000 --- a/env/lib/python2.7/site-packages/Foundation/_nsindexset.py +++ /dev/null @@ -1,20 +0,0 @@ -import objc - - -def __len__(self): - return self.length() - -def __getitem__(self, idx): - if isinstance(idx, slice): - raise ValueError(idx) - return self.indexAtPosition_(idx) - -def __add__(self, value): - return self.indexPathByAddingIndex_(value) - - -objc.addConvenienceForClass("NSIndexPath", ( - ('__len__', __len__), - ('__getitem__', __getitem__), - ('__add__', __add__), -)) diff --git a/env/lib/python2.7/site-packages/Foundation/_nsindexset.pyc b/env/lib/python2.7/site-packages/Foundation/_nsindexset.pyc deleted file mode 100644 index d34f427b..00000000 Binary files a/env/lib/python2.7/site-packages/Foundation/_nsindexset.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Foundation/_nsobject.py b/env/lib/python2.7/site-packages/Foundation/_nsobject.py deleted file mode 100644 index f937e9a4..00000000 --- a/env/lib/python2.7/site-packages/Foundation/_nsobject.py +++ /dev/null @@ -1,208 +0,0 @@ -""" -Define a category on NSObject with some useful methods. -""" -import objc -import sys - -if sys.version_info[0] == 2: - def _str(v): return v - exec("""\ -def _raise(exc_type, exc_value, exc_trace): - raise exc_type, exc_value, exc_trace -""") -else: - def _str(v): - if isinstance(v, str): - return v - return v.decode('ascii') - - def _raise(exc_type, exc_value, exc_trace): - raise exc_type(exc_value).with_traceback(exc_trace) - - -NSObject = objc.lookUpClass('NSObject') - -class NSObject (objc.Category(NSObject)): - - @objc.namedSelector(b"_pyobjc_performOnThread:") - def _pyobjc_performOnThread_(self, callinfo): - try: - sel, arg = callinfo - # XXX: PyObjC's methodForSelector implementation doesn't work - # with Python methods, using getattr instead - #m = self.methodForSelector_(sel) - m = getattr(self, _str(sel)) - m(arg) - except: - import traceback - traceback.print_exc(file=sys.stderr) - - @objc.namedSelector(b"_pyobjc_performOnThreadWithResult:") - def _pyobjc_performOnThreadWithResult_(self, callinfo): - try: - sel, arg, result = callinfo - #m = self.methodForSelector_(sel) - m = getattr(self, _str(sel)) - r = m(arg) - result.append((True, r)) - except: - result.append((False, sys.exc_info())) - - - if hasattr(NSObject, "performSelector_onThread_withObject_waitUntilDone_"): - @objc.namedSelector(b"pyobjc_performSelector:onThread:withObject:waitUntilDone:") - def pyobjc_performSelector_onThread_withObject_waitUntilDone_( - self, aSelector, thread, arg, wait): - """ - A version of performSelector:onThread:withObject:waitUntilDone: that - will log exceptions in the called method (instead of aborting the - NSRunLoop on the other thread). - """ - self.performSelector_onThread_withObject_waitUntilDone_( - b'_pyobjc_performOnThread:', thread, (aSelector, arg), wait) - - @objc.namedSelector(b"pyobjc_performSelector:onThread:withObject:waitUntilDone:modes:") - def pyobjc_performSelector_onThread_withObject_waitUntilDone_modes_( - self, aSelector, thread, arg, wait, modes): - """ - A version of performSelector:onThread:withObject:waitUntilDone:modes: - that will log exceptions in the called method (instead of aborting the - NSRunLoop on the other thread). - """ - self.performSelector_onThread_withObject_waitUntilDone_modes_( - b'_pyobjc_performOnThread:', thread, (aSelector, arg), wait, modes) - - @objc.namedSelector(b"pyobjc_performSelector:withObject:afterDelay:") - def pyobjc_performSelector_withObject_afterDelay_( - self, aSelector, arg, delay): - """ - A version of performSelector:withObject:afterDelay: - that will log exceptions in the called method (instead of aborting the - NSRunLoop). - """ - self.performSelector_withObject_afterDelay_( - b'_pyobjc_performOnThread:', (aSelector, arg), delay) - - @objc.namedSelector(b"pyobjc_performSelector:withObject:afterDelay:inModes:") - def pyobjc_performSelector_withObject_afterDelay_inModes_( - self, aSelector, arg, delay, modes): - """ - A version of performSelector:withObject:afterDelay:inModes: - that will log exceptions in the called method (instead of aborting the - NSRunLoop). - """ - self.performSelector_withObject_afterDelay_inModes_( - b'_pyobjc_performOnThread:', (aSelector, arg), delay, modes) - - if hasattr(NSObject, "performSelectorInBackground_withObject_"): - @objc.namedSelector(b"pyobjc_performSelectorInBackground:withObject:") - def pyobjc_performSelectorInBackground_withObject_( - self, aSelector, arg): - """ - A version of performSelectorInBackground:withObject: - that will log exceptions in the called method (instead of aborting the - NSRunLoop). - """ - self.performSelectorInBackground_withObject_( - b'_pyobjc_performOnThread:', (aSelector, arg)) - - @objc.namedSelector(b"pyobjc_performSelectorOnMainThread:withObject:waitUntilDone:") - def pyobjc_performSelectorOnMainThread_withObject_waitUntilDone_( - self, aSelector, arg, wait): - """ - A version of performSelectorOnMainThread:withObject:waitUntilDone: - that will log exceptions in the called method (instead of aborting the - NSRunLoop in the main thread). - """ - self.performSelectorOnMainThread_withObject_waitUntilDone_( - b'_pyobjc_performOnThread:', (aSelector, arg), wait) - - @objc.namedSelector(b"pyobjc_performSelectorOnMainThread:withObject:waitUntilDone:modes:") - def pyobjc_performSelectorOnMainThread_withObject_waitUntilDone_modes_( - self, aSelector, arg, wait, modes): - """ - A version of performSelectorOnMainThread:withObject:waitUntilDone:modes: - that will log exceptions in the called method (instead of aborting the - NSRunLoop in the main thread). - """ - self.performSelectorOnMainThread_withObject_waitUntilDone_modes_( - b'_pyobjc_performOnThread:', (aSelector, arg), wait, modes) - - - # And some a some versions that return results - - @objc.namedSelector(b"pyobjc_performSelectorOnMainThread:withObject:modes:") - def pyobjc_performSelectorOnMainThread_withObject_modes_( - self, aSelector, arg, modes): - """ - Simular to performSelectorOnMainThread:withObject:waitUntilDone:modes:, - but: - - - always waits until done - - returns the return value of the called method - - if the called method raises an exception, this will raise the same - exception - """ - result = [] - self.performSelectorOnMainThread_withObject_waitUntilDone_modes_( - b'_pyobjc_performOnThreadWithResult:', - (aSelector, arg, result), True, modes) - isOK, result = result[0] - - if isOK: - return result - else: - exc_type, exc_value, exc_trace = result - _raise(exc_type, exc_value, exc_trace) - - @objc.namedSelector(b"pyobjc_performSelectorOnMainThread:withObject:") - def pyobjc_performSelectorOnMainThread_withObject_( - self, aSelector, arg): - result = [] - self.performSelectorOnMainThread_withObject_waitUntilDone_( - b'_pyobjc_performOnThreadWithResult:', - (aSelector, arg, result), True) - isOK, result = result[0] - - if isOK: - return result - else: - exc_type, exc_value, exc_trace = result - _raise(exc_type, exc_value, exc_trace) - - if hasattr(NSObject, "performSelector_onThread_withObject_waitUntilDone_"): - # These methods require Leopard, don't define them if the - # platform functionality isn't present. - - @objc.namedSelector(b"pyobjc_performSelector:onThread:withObject:modes:") - def pyobjc_performSelector_onThread_withObject_modes_( - self, aSelector, thread, arg, modes): - result = [] - self.performSelector_onThread_withObject_waitUntilDone_modes_( - b'_pyobjc_performOnThreadWithResult:', thread, - (aSelector, arg, result), True, modes) - isOK, result = result[0] - - if isOK: - return result - else: - exc_type, exc_value, exc_trace = result - _raise(exc_type, exc_value, exc_trace) - - @objc.namedSelector(b"pyobjc_performSelector:onThread:withObject:") - def pyobjc_performSelector_onThread_withObject_( - self, aSelector, thread, arg): - result = [] - self.performSelector_onThread_withObject_waitUntilDone_( - b'_pyobjc_performOnThreadWithResult:', thread, - (aSelector, arg, result), True) - isOK, result = result[0] - - if isOK: - return result - else: - exc_type, exc_value, exc_trace = result - _raise(exc_type, exc_value, exc_trace) - - -del NSObject diff --git a/env/lib/python2.7/site-packages/Foundation/_nsobject.pyc b/env/lib/python2.7/site-packages/Foundation/_nsobject.pyc deleted file mode 100644 index 804de166..00000000 Binary files a/env/lib/python2.7/site-packages/Foundation/_nsobject.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/GameCenter/_GameCenter.so b/env/lib/python2.7/site-packages/GameCenter/_GameCenter.so deleted file mode 100755 index ccb9b985..00000000 Binary files a/env/lib/python2.7/site-packages/GameCenter/_GameCenter.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/GameCenter/__init__.py b/env/lib/python2.7/site-packages/GameCenter/__init__.py deleted file mode 100644 index 174aa4dd..00000000 --- a/env/lib/python2.7/site-packages/GameCenter/__init__.py +++ /dev/null @@ -1,31 +0,0 @@ -''' -Python mapping for the GameCenter framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Cocoa - -from GameCenter import _metadata, _GameCenter - -try: - long -except NameError: - long = int - -sys.modules['GameCenter'] = mod = objc.ObjCLazyModule( - "GameCenter", - "com.apple.GameKit", - objc.pathForFramework("/System/Library/Frameworks/GameKit.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Cocoa,)) - -import sys -del sys.modules['GameCenter._metadata'] diff --git a/env/lib/python2.7/site-packages/GameCenter/__init__.pyc b/env/lib/python2.7/site-packages/GameCenter/__init__.pyc deleted file mode 100644 index 40b1f5b4..00000000 Binary files a/env/lib/python2.7/site-packages/GameCenter/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/GameCenter/_metadata.py b/env/lib/python2.7/site-packages/GameCenter/_metadata.py deleted file mode 100644 index 2a92c6da..00000000 --- a/env/lib/python2.7/site-packages/GameCenter/_metadata.py +++ /dev/null @@ -1,163 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Sun Aug 2 11:55:56 2015 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$GKErrorDomain$GKExchangeTimeoutDefault@d$GKExchangeTimeoutNone@d$GKPlayerAuthenticationDidChangeNotificationName$GKSessionErrorDomain$GKTurnTimeoutDefault@d$GKTurnTimeoutNone@d$GKVoiceChatServiceErrorDomain$''' -enums = '''$GKChallengeStateCompleted@2$GKChallengeStateDeclined@3$GKChallengeStateInvalid@0$GKChallengeStatePending@1$GKErrorAuthenticationInProgress@7$GKErrorCancelled@2$GKErrorChallengeInvalid@19$GKErrorCommunicationsFailure@3$GKErrorGameUnrecognized@15$GKErrorInvalidCredentials@5$GKErrorInvalidParameter@17$GKErrorInvalidPlayer@8$GKErrorInvitationsDisabled@25$GKErrorMatchRequestInvalid@13$GKErrorNotAuthenticated@6$GKErrorNotSupported@16$GKErrorParentalControlsBlocked@10$GKErrorPlayerPhotoFailure@26$GKErrorPlayerStatusExceedsMaximumLength@11$GKErrorPlayerStatusInvalid@12$GKErrorScoreNotSet@9$GKErrorTurnBasedInvalidParticipant@22$GKErrorTurnBasedInvalidState@24$GKErrorTurnBasedInvalidTurn@23$GKErrorTurnBasedMatchDataTooLarge@20$GKErrorTurnBasedTooManySessions@21$GKErrorUbiquityContainerUnavailable@27$GKErrorUnderage@14$GKErrorUnexpectedConnection@18$GKErrorUnknown@1$GKErrorUserDenied@4$GKGameCenterViewControllerStateAchievements@1$GKGameCenterViewControllerStateChallenges@2$GKGameCenterViewControllerStateDefault@-1$GKGameCenterViewControllerStateLeaderboards@0$GKInviteRecipientResponseAccepted@0$GKInviteRecipientResponseDeclined@1$GKInviteRecipientResponseFailed@2$GKInviteRecipientResponseIncompatible@3$GKInviteRecipientResponseNoAnswer@5$GKInviteRecipientResponseUnableToConnect@4$GKInviteeResponseAccepted@0$GKInviteeResponseDeclined@1$GKInviteeResponseFailed@2$GKInviteeResponseIncompatible@3$GKInviteeResponseNoAnswer@5$GKInviteeResponseUnableToConnect@4$GKLeaderboardPlayerScopeFriendsOnly@1$GKLeaderboardPlayerScopeGlobal@0$GKLeaderboardTimeScopeAllTime@2$GKLeaderboardTimeScopeToday@0$GKLeaderboardTimeScopeWeek@1$GKMatchSendDataReliable@0$GKMatchSendDataUnreliable@1$GKMatchTypeHosted@1$GKMatchTypePeerToPeer@0$GKMatchTypeTurnBased@2$GKPeerStateAvailable@0$GKPeerStateConnected@2$GKPeerStateConnecting@4$GKPeerStateDisconnected@3$GKPeerStateUnavailable@1$GKPlayerStateConnected@1$GKPlayerStateDisconnected@2$GKPlayerStateUnknown@0$GKSendDataReliable@0$GKSendDataUnreliable@1$GKSessionCancelledError@30504$GKSessionCannotEnableError@30509$GKSessionConnectionClosedError@30506$GKSessionConnectionFailedError@30505$GKSessionConnectivityError@30201$GKSessionDataTooBigError@30507$GKSessionDeclinedError@30502$GKSessionInProgressError@30510$GKSessionInternalError@30203$GKSessionInvalidParameterError@30500$GKSessionModeClient@1$GKSessionModePeer@2$GKSessionModeServer@0$GKSessionNotConnectedError@30508$GKSessionPeerNotFoundError@30501$GKSessionSystemError@30205$GKSessionTimedOutError@30503$GKSessionTransportError@30202$GKSessionUnknownError@30204$GKTurnBasedExchangeStatusActive@1$GKTurnBasedExchangeStatusCanceled@4$GKTurnBasedExchangeStatusComplete@2$GKTurnBasedExchangeStatusResolved@3$GKTurnBasedExchangeStatusUnknown@0$GKTurnBasedMatchOutcomeCustomRange@16711680$GKTurnBasedMatchOutcomeFirst@6$GKTurnBasedMatchOutcomeFourth@9$GKTurnBasedMatchOutcomeLost@3$GKTurnBasedMatchOutcomeNone@0$GKTurnBasedMatchOutcomeQuit@1$GKTurnBasedMatchOutcomeSecond@7$GKTurnBasedMatchOutcomeThird@8$GKTurnBasedMatchOutcomeTied@4$GKTurnBasedMatchOutcomeTimeExpired@5$GKTurnBasedMatchOutcomeWon@2$GKTurnBasedMatchStatusEnded@2$GKTurnBasedMatchStatusMatching@3$GKTurnBasedMatchStatusOpen@1$GKTurnBasedMatchStatusUnknown@0$GKTurnBasedParticipantStatusActive@4$GKTurnBasedParticipantStatusDeclined@2$GKTurnBasedParticipantStatusDone@5$GKTurnBasedParticipantStatusInvited@1$GKTurnBasedParticipantStatusMatching@3$GKTurnBasedParticipantStatusUnknown@0$GKVoiceChatPlayerConnected@0$GKVoiceChatPlayerConnecting@4$GKVoiceChatPlayerDisconnected@1$GKVoiceChatPlayerSilent@3$GKVoiceChatPlayerSpeaking@2$GKVoiceChatServiceAudioUnavailableError@32005$GKVoiceChatServiceClientMissingRequiredMethodsError@32007$GKVoiceChatServiceInternalError@32000$GKVoiceChatServiceInvalidCallIDError@32004$GKVoiceChatServiceInvalidParameterError@32016$GKVoiceChatServiceMethodCurrentlyInvalidError@32012$GKVoiceChatServiceNetworkConfigurationError@32013$GKVoiceChatServiceNoRemotePacketsError@32001$GKVoiceChatServiceOutOfMemoryError@32015$GKVoiceChatServiceRemoteParticipantBusyError@32008$GKVoiceChatServiceRemoteParticipantCancelledError@32009$GKVoiceChatServiceRemoteParticipantDeclinedInviteError@32011$GKVoiceChatServiceRemoteParticipantHangupError@32003$GKVoiceChatServiceRemoteParticipantResponseInvalidError@32010$GKVoiceChatServiceUnableToConnectError@32002$GKVoiceChatServiceUninitializedClientError@32006$GKVoiceChatServiceUnsupportedRemoteVersionError@32014$''' -misc.update({}) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'GKAchievement', b'challengeComposeControllerWithMessage:players:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'@'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKAchievement', b'isCompleted', {'retval': {'type': 'Z'}}) - r(b'GKAchievement', b'isHidden', {'retval': {'type': 'Z'}}) - r(b'GKAchievement', b'loadAchievementsWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKAchievement', b'reportAchievementWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKAchievement', b'reportAchievements:withCompletionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKAchievement', b'reportAchievements:withEligibleChallenges:withCompletionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKAchievement', b'resetAchievementsWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKAchievement', b'selectChallengeablePlayerIDs:withCompletionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKAchievement', b'selectChallengeablePlayers:withCompletionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKAchievement', b'setShowsCompletionBanner:', {'arguments': {2: {'type': 'Z'}}}) - r(b'GKAchievement', b'showsCompletionBanner', {'retval': {'type': 'Z'}}) - r(b'GKAchievementDescription', b'isHidden', {'retval': {'type': 'Z'}}) - r(b'GKAchievementDescription', b'isReplayable', {'retval': {'type': 'Z'}}) - r(b'GKAchievementDescription', b'loadAchievementDescriptionsWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKAchievementDescription', b'loadImageWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKChallenge', b'loadReceivedChallengesWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKDialogController', b'presentViewController:', {'retval': {'type': 'Z'}}) - r(b'GKInvite', b'isHosted', {'retval': {'type': 'Z'}}) - r(b'GKInvite', b'setHosted:', {'arguments': {2: {'type': 'Z'}}}) - r(b'GKLeaderboard', b'isLoading', {'retval': {'type': 'Z'}}) - r(b'GKLeaderboard', b'loadCategoriesWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKLeaderboard', b'loadImageWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKLeaderboard', b'loadLeaderboardsWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKLeaderboard', b'loadScoresWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKLeaderboard', b'setDefaultLeaderboard:withCompletionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKLeaderboardSet', b'loadImageWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKLeaderboardSet', b'loadLeaderboardSetsWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKLeaderboardSet', b'loadLeaderboardsWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKLocalPlayer', b'authenticateHandler', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}) - r(b'GKLocalPlayer', b'authenticateWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKLocalPlayer', b'deleteSavedGamesWithName:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKLocalPlayer', b'fetchSavedGamesWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKLocalPlayer', b'generateIdentityVerificationSignatureWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'I', b'Q')}, 5: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKLocalPlayer', b'isAuthenticated', {'retval': {'type': 'Z'}}) - r(b'GKLocalPlayer', b'loadDefaultLeaderboardCategoryIDWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKLocalPlayer', b'loadDefaultLeaderboardIdentifierWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKLocalPlayer', b'loadFriendPlayersWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKLocalPlayer', b'loadFriendsWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKLocalPlayer', b'loadLeaderboardSetsWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKLocalPlayer', b'resolveConflictingSavedGames:withData:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKLocalPlayer', b'saveGameData:withName:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKLocalPlayer', b'setAuthenticateHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKLocalPlayer', b'setDefaultLeaderboardCategoryID:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKLocalPlayer', b'setDefaultLeaderboardIdentifier:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKMatch', b'chooseBestHostPlayerWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKMatch', b'chooseBestHostingPlayerWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKMatch', b'rematchWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKMatch', b'sendData:toPlayers:dataMode:error:', {'retval': {'type': 'Z'}, 'arguments': {5: {'type_modifier': b'o'}}}) - r(b'GKMatch', b'sendData:toPlayers:withDataMode:error:', {'retval': {'type': 'Z'}, 'arguments': {5: {'type_modifier': b'o'}}}) - r(b'GKMatch', b'sendDataToAllPlayers:withDataMode:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'GKMatchRequest', b'inviteeResponseHandler', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'I', b'Q')}}}, 'type': '@?'}}) - r(b'GKMatchRequest', b'recipientResponseHandler', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'I', b'Q')}}}, 'type': '@?'}}) - r(b'GKMatchRequest', b'setInviteeResponseHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'I', b'Q')}}}, 'type': '@?'}}}) - r(b'GKMatchRequest', b'setRecipientResponseHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'I', b'Q')}}}, 'type': '@?'}}}) - r(b'GKMatchmaker', b'addPlayersToMatch:matchRequest:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKMatchmaker', b'findMatchForRequest:withCompletionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKMatchmaker', b'findPlayersForHostedMatchRequest:withCompletionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKMatchmaker', b'findPlayersForHostedRequest:withCompletionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKMatchmaker', b'inviteHandler', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}) - r(b'GKMatchmaker', b'matchForInvite:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKMatchmaker', b'queryActivityWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'i', b'q')}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKMatchmaker', b'queryPlayerGroupActivity:withCompletionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'i', b'q')}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKMatchmaker', b'setInviteHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKMatchmaker', b'startBrowsingForNearbyPlayersWithHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'Z'}}}, 'type': '@?'}}}) - r(b'GKMatchmaker', b'startBrowsingForNearbyPlayersWithReachableHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'Z'}}}, 'type': '@?'}}}) - r(b'GKMatchmakerViewController', b'isHosted', {'retval': {'type': 'Z'}}) - r(b'GKMatchmakerViewController', b'setHosted:', {'arguments': {2: {'type': 'Z'}}}) - r(b'GKMatchmakerViewController', b'setHostedPlayer:connected:', {'arguments': {3: {'type': 'Z'}}}) - r(b'GKMatchmakerViewController', b'setHostedPlayer:didConnect:', {'arguments': {3: {'type': 'Z'}}}) - r(b'GKMatchmakerViewController', b'setShowExistingMatches:', {'arguments': {2: {'type': 'Z'}}}) - r(b'GKMatchmakerViewController', b'showExistingMatches', {'retval': {'type': 'Z'}}) - r(b'GKNotificationBanner', b'showBannerWithTitle:message:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}, 'type': '@?'}}}) - r(b'GKNotificationBanner', b'showBannerWithTitle:message:duration:completionHandler:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}, 'type': '@?'}}}) - r(b'GKPlayer', b'loadPhotoForSize:withCompletionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKPlayer', b'loadPlayersForIdentifiers:withCompletionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKSavedGame', b'loadDataWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKScore', b'challengeComposeControllerWithMessage:players:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'@'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKScore', b'reportScoreWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKScore', b'reportScores:withCompletionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKScore', b'reportScores:withEligibleChallenges:withCompletionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKScore', b'setShouldSetDefaultLeaderboard:', {'arguments': {2: {'type': 'Z'}}}) - r(b'GKScore', b'shouldSetDefaultLeaderboard', {'retval': {'type': 'Z'}}) - r(b'GKSession', b'acceptConnectionFromPeer:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'GKSession', b'isActive', {'retval': {'type': 'Z'}}) - r(b'GKSession', b'isAvailable', {'retval': {'type': 'Z'}}) - r(b'GKSession', b'sendData:toPeers:withDataMode:error:', {'retval': {'type': 'Z'}, 'arguments': {5: {'type_modifier': b'o'}}}) - r(b'GKSession', b'sendDataToAllPeers:withDataMode:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'GKSession', b'setAvailable:', {'arguments': {2: {'type': 'Z'}}}) - r(b'GKSession', b'setIsActive:', {'arguments': {2: {'type': 'Z'}}}) - r(b'GKTurnBasedExchange', b'cancelWithLocalizableMessageKey:arguments:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKTurnBasedExchange', b'replyWithLocalizableMessageKey:arguments:data:completionHandler:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKTurnBasedExchange', b'setShowExistingMatches:', {'arguments': {2: {'type': 'Z'}}}) - r(b'GKTurnBasedExchange', b'showExistingMatches', {'retval': {'type': 'Z'}}) - r(b'GKTurnBasedMatch', b'acceptInviteWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKTurnBasedMatch', b'declineInviteWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKTurnBasedMatch', b'endMatchInTurnWithMatchData:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKTurnBasedMatch', b'endMatchInTurnWithMatchData:scores:achievements:completionHandler:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKTurnBasedMatch', b'endTurnWithNextParticipant:matchData:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKTurnBasedMatch', b'endTurnWithNextParticipants:turnTimeout:matchData:completionHandler:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKTurnBasedMatch', b'findMatchForRequest:withCompletionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKTurnBasedMatch', b'loadMatchDataWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKTurnBasedMatch', b'loadMatchWithID:withCompletionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKTurnBasedMatch', b'loadMatchesWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKTurnBasedMatch', b'participantQuitInTurnWithOutcome:nextParticipant:matchData:completionHandler:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKTurnBasedMatch', b'participantQuitInTurnWithOutcome:nextParticipants:turnTimeout:matchData:completionHandler:', {'arguments': {6: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKTurnBasedMatch', b'participantQuitOutOfTurnWithOutcome:withCompletionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKTurnBasedMatch', b'rematchWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKTurnBasedMatch', b'removeWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKTurnBasedMatch', b'saveCurrentTurnWithMatchData:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKTurnBasedMatch', b'saveMergedMatchData:withResolvedExchanges:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKTurnBasedMatch', b'sendExchangeToParticipants:data:localizableMessageKey:arguments:timeout:completionHandler:', {'arguments': {7: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKTurnBasedMatch', b'sendReminderToParticipants:localizableMessageKey:arguments:completionHandler:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKTurnBasedMatchmakerViewController', b'setShowExistingMatches:', {'arguments': {2: {'type': 'Z'}}}) - r(b'GKTurnBasedMatchmakerViewController', b'showExistingMatches', {'retval': {'type': 'Z'}}) - r(b'GKVoiceChat', b'isActive', {'retval': {'type': 'Z'}}) - r(b'GKVoiceChat', b'isVoIPAllowed', {'retval': {'type': 'Z'}}) - r(b'GKVoiceChat', b'playerStateUpdateHandler', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'I', b'Q')}}}, 'type': '@?'}}) - r(b'GKVoiceChat', b'playerVoiceChatStateDidChangeHandler', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}) - r(b'GKVoiceChat', b'setActive:', {'arguments': {2: {'type': 'Z'}}}) - r(b'GKVoiceChat', b'setMute:forPlayer:', {'arguments': {2: {'type': 'Z'}}}) - r(b'GKVoiceChat', b'setPlayer:muted:', {'arguments': {3: {'type': 'Z'}}}) - r(b'GKVoiceChat', b'setPlayerStateUpdateHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'I', b'Q')}}}, 'type': '@?'}}}) - r(b'GKVoiceChat', b'setPlayerVoiceChatStateDidChangeHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'handleTurnEventForMatch:didBecomeActive:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSObject', b'match:player:didChangeConnectionState:', {'arguments': {4: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'match:player:didChangeState:', {'arguments': {4: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'match:shouldReinviteDisconnectedPlayer:', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'match:shouldReinvitePlayer:', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'player:receivedTurnEventForMatch:didBecomeActive:', {'arguments': {4: {'type': 'Z'}}}) - r(b'NSObject', b'session:peer:didChangeState:', {'arguments': {4: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'shouldShowBannerForLocallyCompletedChallenge:', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'shouldShowBannerForLocallyReceivedChallenge:', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'shouldShowBannerForRemotelyCompletedChallenge:', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'voiceChatService:didReceiveInvitationFromParticipantID:callID:', {'arguments': {4: {'type': sel32or64(b'I', b'Q')}}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/GameCenter/_metadata.pyc b/env/lib/python2.7/site-packages/GameCenter/_metadata.pyc deleted file mode 100644 index 39056046..00000000 Binary files a/env/lib/python2.7/site-packages/GameCenter/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/GameController/__init__.py b/env/lib/python2.7/site-packages/GameController/__init__.py deleted file mode 100644 index a976fa38..00000000 --- a/env/lib/python2.7/site-packages/GameController/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -''' -Python mapping for the GameController framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Cocoa - -from GameController import _metadata - -sys.modules['GameController'] = mod = objc.ObjCLazyModule( - "GameController", - "com.apple.GameController", - objc.pathForFramework("/System/Library/Frameworks/GameController.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Cocoa,)) - -import sys -del sys.modules['GameController._metadata'] diff --git a/env/lib/python2.7/site-packages/GameController/__init__.pyc b/env/lib/python2.7/site-packages/GameController/__init__.pyc deleted file mode 100644 index 7ef17a4d..00000000 Binary files a/env/lib/python2.7/site-packages/GameController/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/GameController/_metadata.py b/env/lib/python2.7/site-packages/GameController/_metadata.py deleted file mode 100644 index 93c74e1a..00000000 --- a/env/lib/python2.7/site-packages/GameController/_metadata.py +++ /dev/null @@ -1,62 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Mon Feb 18 19:20:58 2019 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -misc.update({'GCMicroGamepadSnapshotData': objc.createStructType('GCMicroGamepadSnapshotData', b'{_GCMicroGamepadSnapshotData=SSffff}', ['version', 'size', 'dpadX', 'dpadY', 'buttonA', 'buttonX'], None, 1), 'GCQuaternion': objc.createStructType('GCQuaternion', b'{GCQuaternion=dddd}', ['x', 'y', 'z', 'w']), 'GCExtendedGamepadValueChangedHandler': objc.createStructType('GCExtendedGamepadValueChangedHandler', b'{_GCGamepadSnapShotDataV100=SSffffffff}', ['version', 'size', 'dpadX', 'dpadY', 'buttonA', 'buttonB', 'buttonX', 'buttonY', 'leftShoulder', 'rightShoulder']), 'GCAcceleration': objc.createStructType('GCAcceleration', b'{_GCAcceleration=ddd}', ['x', 'y', 'z']), 'GCGamepadSnapShotDataV100': objc.createStructType('GCGamepadSnapShotDataV100', b'{_GCGamepadSnapShotDataV100=SSffffffff}', ['version', 'size', 'dpadX', 'dpadY', 'buttonA', 'buttonB', 'buttonX', 'buttonY', 'leftShoulder', 'rightShoulder'], None, 1), 'GCEulerAngles': objc.createStructType('GCEulerAngles', b'{_GCEulerAngles=ddd}', ['pitch', 'yaw', 'roll']), 'GCExtendedGamepadSnapShotDataV100': objc.createStructType('GCExtendedGamepadSnapShotDataV100', b'{_GCExtendedGamepadSnapShotDataV100=SSffffffffffffff}', ['version', 'size', 'dpadX', 'dpadY', 'buttonA', 'buttonB', 'buttonX', 'buttonY', 'leftShoulder', 'rightShoulder', 'leftThumbstickX', 'leftThumbstickY', 'rightThumbstickX', 'rightThumbstickY', 'leftTrigger', 'rightTrigger'], None, 1), 'GCRotationRate': objc.createStructType('GCRotationRate', b'{_GCRotationRate=ddd}', ['x', 'y', 'z']), 'GCMicroGamepadSnapShotDataV100': objc.createStructType('GCMicroGamepadSnapShotDataV100', b'{_GCMicroGamepadSnapShotDataV100=SSffff}', ['version', 'size', 'dpadX', 'dpadY', 'buttonA', 'buttonX'], None, 1), 'GCExtendedGamepadSnapshotData': objc.createStructType('GCExtendedGamepadSnapshotData', b'{_GCExtendedGamepadSnapshotData=SSffffffffffffffZZ}', ['version', 'size', 'dpadX', 'dpadY', 'buttonA', 'buttonB', 'buttonX', 'buttonY', 'leftShoulder', 'rightShoulder', 'leftThumbstickX', 'leftThumbstickY', 'rightThumbstickX', 'rightThumbstickY', 'leftTrigger', 'rightTrigger', 'leftThumbstickButton', 'rightThumbstickButton'], None, 1)}) -constants = '''$GCControllerDidConnectNotification$GCControllerDidDisconnectNotification$GCCurrentExtendedGamepadSnapshotDataVersion@q$GCCurrentMicroGamepadSnapshotDataVersion@q$''' -enums = '''$GCControllerPlayerIndex1@0$GCControllerPlayerIndex2@1$GCControllerPlayerIndex3@2$GCControllerPlayerIndex4@3$GCControllerPlayerIndexUnset@-1$GCExtendedGamepadSnapshotDataVersion1@256$GCExtendedGamepadSnapshotDataVersion2@257$GCMicroGamepadSnapshotDataVersion1@256$''' -misc.update({}) -functions={'GCExtendedGamepadSnapshotDataFromNSData': (b'Z^{_GCExtendedGamepadSnapshotData=SSffffffffffffffZZ}@', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'NSDataFromGCMicroGamepadSnapShotDataV100': (b'@^{_GCMicroGamepadSnapShotDataV100=SSffff}', '', {'arguments': {0: {'type_modifier': 'n'}}}), 'GCGamepadSnapShotDataV100FromNSData': (b'Z^{_GCGamepadSnapShotDataV100=SSffffffff}@', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'NSDataFromGCMicroGamepadSnapshotData': (b'@^{_GCMicroGamepadSnapshotData=SSffff}', '', {'arguments': {0: {'type_modifier': 'n'}}}), 'NSDataFromGCGamepadSnapShotDataV100': (b'@^{_GCGamepadSnapShotDataV100=SSffffffff}', '', {'arguments': {0: {'type_modifier': 'n'}}}), 'GCMicroGamepadSnapshotDataFromNSData': (b'Z^{_GCMicroGamepadSnapshotData=SSffff}@', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'GCExtendedGamepadSnapShotDataV100FromNSData': (b'Z^{_GCExtendedGamepadSnapShotDataV100=SSffffffffffffff}@', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'GCMicroGamepadSnapShotDataV100FromNSData': (b'Z^{_GCMicroGamepadSnapShotDataV100=SSffff}@', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'NSDataFromGCExtendedGamepadSnapShotDataV100': (b'@^{_GCExtendedGamepadSnapShotDataV100=SSffffffffffffff}',), 'NSDataFromGCExtendedGamepadSnapshotData': (b'@^{_GCExtendedGamepadSnapshotData=SSffffffffffffffZZ}', '', {'arguments': {0: {'type_modifier': 'n'}}})} -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'GCController', b'controllerPausedHandler', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}) - r(b'GCController', b'isAttachedToDevice', {'retval': {'type': b'Z'}}) - r(b'GCController', b'setControllerPausedHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GCController', b'startWirelessControllerDiscoveryWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'GCControllerAxisInput', b'setValueChangedHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'f'}}}}}}) - r(b'GCControllerAxisInput', b'valueChangedHandler', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'f'}}}}}) - r(b'GCControllerButtonInput', b'isPressed', {'retval': {'type': b'Z'}}) - r(b'GCControllerButtonInput', b'pressedChangedHandler', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'f'}, 3: {'type': b'Z'}}}}}) - r(b'GCControllerButtonInput', b'setPressedChangedHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'f'}, 3: {'type': b'Z'}}}}}}) - r(b'GCControllerButtonInput', b'setValueChangedHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'f'}, 3: {'type': b'Z'}}}}}}) - r(b'GCControllerButtonInput', b'valueChangedHandler', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'f'}, 3: {'type': b'Z'}}}}}) - r(b'GCControllerDirectionPad', b'setValueChangedHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'f'}, 3: {'type': b'f'}}}}}}) - r(b'GCControllerDirectionPad', b'valueChangedHandler', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'f'}, 3: {'type': b'f'}}}}}) - r(b'GCControllerElement', b'isAnalog', {'retval': {'type': b'Z'}}) - r(b'GCEventViewController', b'controllerUserInteractionEnabled', {'retval': {'type': 'Z'}}) - r(b'GCEventViewController', b'setControllerUserInteractionEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'GCExtendedGamepad', b'setValueChangedHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GCExtendedGamepad', b'valueChangedHandler', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}) - r(b'GCGamepad', b'setValueChangedHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GCGamepad', b'valueChangedHandler', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}) - r(b'GCMicroGamepad', b'allowsRotation', {'retval': {'type': 'Z'}}) - r(b'GCMicroGamepad', b'reportsAbsoluteDpadValues', {'retval': {'type': 'Z'}}) - r(b'GCMicroGamepad', b'setAllowsRotation:', {'arguments': {2: {'type': 'Z'}}}) - r(b'GCMicroGamepad', b'setReportsAbsoluteDpadValues:', {'arguments': {2: {'type': 'Z'}}}) - r(b'GCMicroGamepad', b'setValueChangedHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GCMicroGamepad', b'valueChangedHandler', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}) - r(b'GCMotion', b'gravity', {'retval': {'type': b'{_GCAcceleration=ddd}'}}) - r(b'GCMotion', b'hasAttitudeAndRotationRate', {'retval': {'type': 'Z'}}) - r(b'GCMotion', b'rotationRate', {'retval': {'type': b'{_GCRotationRate=ddd}'}}) - r(b'GCMotion', b'setValueChangedHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GCMotion', b'userAcceleration', {'retval': {'type': b'{_GCAcceleration=ddd}'}}) - r(b'GCMotion', b'valueChangedHandler', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/GameController/_metadata.pyc b/env/lib/python2.7/site-packages/GameController/_metadata.pyc deleted file mode 100644 index 3c75347c..00000000 Binary files a/env/lib/python2.7/site-packages/GameController/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/GameKit/_GameKit.so b/env/lib/python2.7/site-packages/GameKit/_GameKit.so deleted file mode 100755 index 56bf0c76..00000000 Binary files a/env/lib/python2.7/site-packages/GameKit/_GameKit.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/GameKit/__init__.py b/env/lib/python2.7/site-packages/GameKit/__init__.py deleted file mode 100644 index a9f03e18..00000000 --- a/env/lib/python2.7/site-packages/GameKit/__init__.py +++ /dev/null @@ -1,28 +0,0 @@ -''' -Python mapping for the GameKit framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Cocoa - -from GameKit import _metadata -from GameKit._GameKit import * - - -sys.modules['GameKit'] = mod = objc.ObjCLazyModule( - "GameKit", - "com.apple.GameKit", - objc.pathForFramework("/System/Library/Frameworks/GameKit.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Cocoa,)) - -import sys -del sys.modules['GameKit._metadata'] diff --git a/env/lib/python2.7/site-packages/GameKit/__init__.pyc b/env/lib/python2.7/site-packages/GameKit/__init__.pyc deleted file mode 100644 index 3c041d1e..00000000 Binary files a/env/lib/python2.7/site-packages/GameKit/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/GameKit/_metadata.py b/env/lib/python2.7/site-packages/GameKit/_metadata.py deleted file mode 100644 index b382470c..00000000 --- a/env/lib/python2.7/site-packages/GameKit/_metadata.py +++ /dev/null @@ -1,242 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Sun Feb 4 13:19:39 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$GKErrorDomain$GKExchangeTimeoutDefault@d$GKExchangeTimeoutNone@d$GKGameSessionErrorDomain$GKPlayerAuthenticationDidChangeNotificationName$GKPlayerDidChangeNotificationName$GKSessionErrorDomain$GKTurnTimeoutDefault@d$GKTurnTimeoutNone@d$GKVoiceChatServiceErrorDomain$''' -enums = '''$GKAuthenticatingWithAuthKitInvocation@2$GKAuthenticatingWithGreenBuddyUI@1$GKAuthenticatingWithoutUI@0$GKChallengeStateCompleted@2$GKChallengeStateDeclined@3$GKChallengeStateInvalid@0$GKChallengeStatePending@1$GKConnectionStateConnected@1$GKConnectionStateNotConnected@0$GKErrorAuthenticationInProgress@7$GKErrorCancelled@2$GKErrorChallengeInvalid@19$GKErrorCommunicationsFailure@3$GKErrorGameSessionRequestInvalid@29$GKErrorGameUnrecognized@15$GKErrorInvalidCredentials@5$GKErrorInvalidParameter@17$GKErrorInvalidPlayer@8$GKErrorInvitationsDisabled@25$GKErrorMatchNotConnected@28$GKErrorMatchRequestInvalid@13$GKErrorNotAuthenticated@6$GKErrorNotSupported@16$GKErrorParentalControlsBlocked@10$GKErrorPlayerPhotoFailure@26$GKErrorPlayerStatusExceedsMaximumLength@11$GKErrorPlayerStatusInvalid@12$GKErrorScoreNotSet@9$GKErrorTurnBasedInvalidParticipant@22$GKErrorTurnBasedInvalidState@24$GKErrorTurnBasedInvalidTurn@23$GKErrorTurnBasedMatchDataTooLarge@20$GKErrorTurnBasedTooManySessions@21$GKErrorUbiquityContainerUnavailable@27$GKErrorUnderage@14$GKErrorUnexpectedConnection@18$GKErrorUnknown@1$GKErrorUserDenied@4$GKGameCenterViewControllerStateAchievements@1$GKGameCenterViewControllerStateChallenges@2$GKGameCenterViewControllerStateDefault@-1$GKGameCenterViewControllerStateLeaderboards@0$GKGameSessionErrorBadContainer@12$GKGameSessionErrorCloudDriveDisabled@15$GKGameSessionErrorCloudQuotaExceeded@13$GKGameSessionErrorConnectionCancelledByUser@5$GKGameSessionErrorConnectionFailed@6$GKGameSessionErrorInvalidSession@16$GKGameSessionErrorNetworkFailure@14$GKGameSessionErrorNotAuthenticated@2$GKGameSessionErrorSendDataNoRecipients@9$GKGameSessionErrorSendDataNotConnected@8$GKGameSessionErrorSendDataNotReachable@10$GKGameSessionErrorSendRateLimitReached@11$GKGameSessionErrorSessionConflict@3$GKGameSessionErrorSessionHasMaxConnectedPlayers@7$GKGameSessionErrorSessionNotShared@4$GKGameSessionErrorUnknown@1$GKInviteRecipientResponseAccepted@0$GKInviteRecipientResponseDeclined@1$GKInviteRecipientResponseFailed@2$GKInviteRecipientResponseIncompatible@3$GKInviteRecipientResponseNoAnswer@5$GKInviteRecipientResponseUnableToConnect@4$GKInviteeResponseAccepted@0$GKInviteeResponseDeclined@1$GKInviteeResponseFailed@2$GKInviteeResponseIncompatible@3$GKInviteeResponseNoAnswer@5$GKInviteeResponseUnableToConnect@4$GKLeaderboardPlayerScopeFriendsOnly@1$GKLeaderboardPlayerScopeGlobal@0$GKLeaderboardTimeScopeAllTime@2$GKLeaderboardTimeScopeToday@0$GKLeaderboardTimeScopeWeek@1$GKMatchSendDataReliable@0$GKMatchSendDataUnreliable@1$GKMatchTypeHosted@1$GKMatchTypePeerToPeer@0$GKMatchTypeTurnBased@2$GKPeerStateAvailable@0$GKPeerStateConnected@2$GKPeerStateConnecting@4$GKPeerStateDisconnected@3$GKPeerStateUnavailable@1$GKPhotoSizeNormal@1$GKPhotoSizeSmall@0$GKPlayerStateConnected@1$GKPlayerStateDisconnected@2$GKPlayerStateUnknown@0$GKSendDataReliable@0$GKSendDataUnreliable@1$GKSessionCancelledError@30504$GKSessionCannotEnableError@30509$GKSessionConnectionClosedError@30506$GKSessionConnectionFailedError@30505$GKSessionConnectivityError@30201$GKSessionDataTooBigError@30507$GKSessionDeclinedError@30502$GKSessionInProgressError@30510$GKSessionInternalError@30203$GKSessionInvalidParameterError@30500$GKSessionModeClient@1$GKSessionModePeer@2$GKSessionModeServer@0$GKSessionNotConnectedError@30508$GKSessionPeerNotFoundError@30501$GKSessionSystemError@30205$GKSessionTimedOutError@30503$GKSessionTransportError@30202$GKSessionUnknownError@30204$GKTransportTypeReliable@1$GKTransportTypeUnreliable@0$GKTurnBasedExchangeStatusActive@1$GKTurnBasedExchangeStatusCanceled@4$GKTurnBasedExchangeStatusComplete@2$GKTurnBasedExchangeStatusResolved@3$GKTurnBasedExchangeStatusUnknown@0$GKTurnBasedMatchOutcomeCustomRange@16711680$GKTurnBasedMatchOutcomeFirst@6$GKTurnBasedMatchOutcomeFourth@9$GKTurnBasedMatchOutcomeLost@3$GKTurnBasedMatchOutcomeNone@0$GKTurnBasedMatchOutcomeQuit@1$GKTurnBasedMatchOutcomeSecond@7$GKTurnBasedMatchOutcomeThird@8$GKTurnBasedMatchOutcomeTied@4$GKTurnBasedMatchOutcomeTimeExpired@5$GKTurnBasedMatchOutcomeWon@2$GKTurnBasedMatchStatusEnded@2$GKTurnBasedMatchStatusMatching@3$GKTurnBasedMatchStatusOpen@1$GKTurnBasedMatchStatusUnknown@0$GKTurnBasedParticipantStatusActive@4$GKTurnBasedParticipantStatusDeclined@2$GKTurnBasedParticipantStatusDone@5$GKTurnBasedParticipantStatusInvited@1$GKTurnBasedParticipantStatusMatching@3$GKTurnBasedParticipantStatusUnknown@0$GKVoiceChatPlayerConnected@0$GKVoiceChatPlayerConnecting@4$GKVoiceChatPlayerDisconnected@1$GKVoiceChatPlayerSilent@3$GKVoiceChatPlayerSpeaking@2$GKVoiceChatServiceAudioUnavailableError@32005$GKVoiceChatServiceClientMissingRequiredMethodsError@32007$GKVoiceChatServiceInternalError@32000$GKVoiceChatServiceInvalidCallIDError@32004$GKVoiceChatServiceInvalidParameterError@32016$GKVoiceChatServiceMethodCurrentlyInvalidError@32012$GKVoiceChatServiceNetworkConfigurationError@32013$GKVoiceChatServiceNoRemotePacketsError@32001$GKVoiceChatServiceOutOfMemoryError@32015$GKVoiceChatServiceRemoteParticipantBusyError@32008$GKVoiceChatServiceRemoteParticipantCancelledError@32009$GKVoiceChatServiceRemoteParticipantDeclinedInviteError@32011$GKVoiceChatServiceRemoteParticipantHangupError@32003$GKVoiceChatServiceRemoteParticipantResponseInvalidError@32010$GKVoiceChatServiceUnableToConnectError@32002$GKVoiceChatServiceUninitializedClientError@32006$GKVoiceChatServiceUnsupportedRemoteVersionError@32014$''' -misc.update({}) -aliases = {'GKInviteeResponseAccepted': 'GKInviteRecipientResponseAccepted', 'GKInviteeResponseNoAnswer': 'GKInviteRecipientResponseNoAnswer', 'GKInviteeResponseFailed': 'GKInviteRecipientResponseFailed', 'GKInviteeResponseIncompatible': 'GKInviteRecipientResponseIncompatible', 'GKInviteeResponseDeclined': 'GKInviteRecipientResponseDeclined', 'GKInviteeResponseUnableToConnect': 'GKInviteRecipientResponseUnableToConnect'} -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'GKAchievement', b'challengeComposeControllerWithMessage:players:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'Z'}, 3: {'type': b'@'}}}}}}) - r(b'GKAchievement', b'isCompleted', {'retval': {'type': b'Z'}}) - r(b'GKAchievement', b'isHidden', {'retval': {'type': b'Z'}}) - r(b'GKAchievement', b'loadAchievementsWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKAchievement', b'reportAchievementWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GKAchievement', b'reportAchievements:withCompletionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GKAchievement', b'reportAchievements:withEligibleChallenges:withCompletionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GKAchievement', b'resetAchievementsWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GKAchievement', b'selectChallengeablePlayerIDs:withCompletionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKAchievement', b'selectChallengeablePlayers:withCompletionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKAchievement', b'setShowsCompletionBanner:', {'arguments': {2: {'type': b'Z'}}}) - r(b'GKAchievement', b'showsCompletionBanner', {'retval': {'type': b'Z'}}) - r(b'GKAchievementDescription', b'isHidden', {'retval': {'type': b'Z'}}) - r(b'GKAchievementDescription', b'isReplayable', {'retval': {'type': b'Z'}}) - r(b'GKAchievementDescription', b'loadAchievementDescriptionsWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKAchievementDescription', b'loadImageWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKChallenge', b'challengeComposeControllerWithMessage:players:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'Z'}, 3: {'type': b'@'}}}}}}) - r(b'GKChallenge', b'loadReceivedChallengesWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKChallenge', b'reportScores:withEligibleChallenges:withCompletionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GKCloudPlayer', b'getCurrentSignedInPlayerForContainer:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKDialogController', b'presentViewController:', {'retval': {'type': b'Z'}}) - r(b'GKGameSession', b'clearBadgeForPlayers:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GKGameSession', b'createSessionInContainer:withTitle:maxConnectedPlayers:completionHandler:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKGameSession', b'getShareURLWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKGameSession', b'loadDataWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKGameSession', b'loadSessionWithIdentifier:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKGameSession', b'loadSessionsInContainer:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKGameSession', b'removeSessionWithIdentifier:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GKGameSession', b'saveData:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKGameSession', b'sendData:withTransportType:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GKGameSession', b'sendMessageWithLocalizedFormatKey:arguments:data:toPlayers:badgePlayers:completionHandler:', {'arguments': {6: {'type': b'Z'}, 7: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GKGameSession', b'setConnectionState:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GKInvite', b'isHosted', {'retval': {'type': b'Z'}}) - r(b'GKLeaderboard', b'isLoading', {'retval': {'type': b'Z'}}) - r(b'GKLeaderboard', b'loadCategoriesWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}}}}}}) - r(b'GKLeaderboard', b'loadImageWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKLeaderboard', b'loadLeaderboardsWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKLeaderboard', b'loadScoresWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKLeaderboard', b'setDefaultLeaderboard:withCompletionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GKLeaderboardSet', b'loadImageWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'GKLeaderboardSet', b'loadLeaderboardSetsWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKLeaderboardSet', b'loadLeaderboardsWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKLocalPlayer', b'authenticateHandler', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}) - r(b'GKLocalPlayer', b'authenticateWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GKLocalPlayer', b'deleteSavedGamesWithName:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GKLocalPlayer', b'fetchSavedGamesWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKLocalPlayer', b'generateIdentityVerificationSignatureWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'Q'}, 5: {'type': b'@'}}}}}}) - r(b'GKLocalPlayer', b'isAuthenticated', {'retval': {'type': b'Z'}}) - r(b'GKLocalPlayer', b'isUnderage', {'retval': {'type': b'Z'}}) - r(b'GKLocalPlayer', b'loadDefaultLeaderboardCategoryIDWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKLocalPlayer', b'loadDefaultLeaderboardIdentifierWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKLocalPlayer', b'loadFriendPlayersWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKLocalPlayer', b'loadFriendsWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKLocalPlayer', b'loadRecentPlayersWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKLocalPlayer', b'resolveConflictingSavedGames:withData:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKLocalPlayer', b'saveGameData:withName:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKLocalPlayer', b'setAuthenticateHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKLocalPlayer', b'setDefaultLeaderboardCategoryID:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GKLocalPlayer', b'setDefaultLeaderboardIdentifier:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GKMatch', b'chooseBestHostPlayerWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GKMatch', b'chooseBestHostingPlayerWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GKMatch', b'rematchWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKMatch', b'sendData:toPlayers:dataMode:error:', {'retval': {'type': b'Z'}, 'arguments': {5: {'type_modifier': b'o'}}}) - r(b'GKMatch', b'sendData:toPlayers:withDataMode:error:', {'retval': {'type': b'Z'}, 'arguments': {5: {'type_modifier': b'o'}}}) - r(b'GKMatch', b'sendDataToAllPlayers:withDataMode:error:', {'retval': {'type': b'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'GKMatchRequest', b'inviteeResponseHandler', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'i', b'q')}}}}}) - r(b'GKMatchRequest', b'recipientResponseHandler', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'i', b'q')}}}}}) - r(b'GKMatchRequest', b'setInviteeResponseHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'i', b'q')}}}}}}) - r(b'GKMatchRequest', b'setRecipientResponseHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'i', b'q')}}}}}}) - r(b'GKMatchmaker', b'addPlayersToMatch:matchRequest:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GKMatchmaker', b'findMatchForRequest:withCompletionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKMatchmaker', b'findPlayersForHostedMatchRequest:withCompletionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKMatchmaker', b'findPlayersForHostedRequest:withCompletionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKMatchmaker', b'inviteHandler', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}) - r(b'GKMatchmaker', b'matchForInvite:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKMatchmaker', b'queryActivityWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'i', b'q')}, 2: {'type': b'@'}}}}}}) - r(b'GKMatchmaker', b'queryPlayerGroupActivity:withCompletionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'i', b'q')}, 2: {'type': b'@'}}}}}}) - r(b'GKMatchmaker', b'setInviteHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKMatchmaker', b'startBrowsingForNearbyPlayersWithHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'Z'}}}}}}) - r(b'GKMatchmaker', b'startBrowsingForNearbyPlayersWithReachableHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'Z'}}}}}}) - r(b'GKMatchmakerViewController', b'isHosted', {'retval': {'type': b'Z'}}) - r(b'GKMatchmakerViewController', b'setHosted:', {'arguments': {2: {'type': b'Z'}}}) - r(b'GKMatchmakerViewController', b'setHostedPlayer:connected:', {'arguments': {3: {'type': b'Z'}}}) - r(b'GKMatchmakerViewController', b'setHostedPlayer:didConnect:', {'arguments': {3: {'type': b'Z'}}}) - r(b'GKNotificationBanner', b'showBannerWithTitle:message:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'GKNotificationBanner', b'showBannerWithTitle:message:duration:completionHandler:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'GKPlayer', b'isFriend', {'retval': {'type': b'Z'}}) - r(b'GKPlayer', b'loadPhotoForSize:withCompletionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKPlayer', b'loadPlayersForIdentifiers:withCompletionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKSavedGame', b'loadDataWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKScore', b'challengeComposeControllerWithMessage:players:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'Z'}, 3: {'type': b'@'}}}}}}) - r(b'GKScore', b'reportScoreWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GKScore', b'reportScores:withCompletionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GKScore', b'reportScores:withEligibleChallenges:withCompletionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GKScore', b'setShouldSetDefaultLeaderboard:', {'arguments': {2: {'type': b'Z'}}}) - r(b'GKScore', b'shouldSetDefaultLeaderboard', {'retval': {'type': b'Z'}}) - r(b'GKSession', b'acceptConnectionFromPeer:error:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'GKSession', b'isAvailable', {'retval': {'type': b'Z'}}) - r(b'GKSession', b'sendData:toPeers:withDataMode:error:', {'retval': {'type': b'Z'}, 'arguments': {5: {'type_modifier': b'o'}}}) - r(b'GKSession', b'sendDataToAllPeers:withDataMode:error:', {'retval': {'type': b'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'GKSession', b'setAvailable:', {'arguments': {2: {'type': b'Z'}}}) - r(b'GKSession', b'setDataReceiveHandler:withContext:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'GKTurnBasedExchange', b'cancelWithLocalizableMessageKey:arguments:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GKTurnBasedExchange', b'replyWithLocalizableMessageKey:arguments:data:completionHandler:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GKTurnBasedMatch', b'acceptInviteWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKTurnBasedMatch', b'cancelWithLocalizableMessageKey:arguments:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GKTurnBasedMatch', b'declineInviteWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GKTurnBasedMatch', b'endMatchInTurnWithMatchData:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GKTurnBasedMatch', b'endMatchInTurnWithMatchData:scores:achievements:completionHandler:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GKTurnBasedMatch', b'endTurnWithNextParticipant:matchData:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GKTurnBasedMatch', b'endTurnWithNextParticipants:turnTimeout:matchData:completionHandler:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GKTurnBasedMatch', b'findMatchForRequest:withCompletionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKTurnBasedMatch', b'loadMatchDataWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKTurnBasedMatch', b'loadMatchWithID:withCompletionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKTurnBasedMatch', b'loadMatchesWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKTurnBasedMatch', b'participantQuitInTurnWithOutcome:nextParticipant:matchData:completionHandler:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GKTurnBasedMatch', b'participantQuitInTurnWithOutcome:nextParticipants:turnTimeout:matchData:completionHandler:', {'arguments': {6: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GKTurnBasedMatch', b'participantQuitOutOfTurnWithOutcome:withCompletionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GKTurnBasedMatch', b'rematchWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKTurnBasedMatch', b'removeWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GKTurnBasedMatch', b'replyWithLocalizableMessageKey:arguments:data:completionHandler:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GKTurnBasedMatch', b'saveCurrentTurnWithMatchData:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GKTurnBasedMatch', b'saveMergedMatchData:withResolvedExchanges:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GKTurnBasedMatch', b'sendExchangeToParticipants:data:localizableMessageKey:arguments:timeout:completionHandler:', {'arguments': {7: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'GKTurnBasedMatch', b'sendReminderToParticipants:localizableMessageKey:arguments:completionHandler:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GKTurnBasedMatchmakerViewController', b'setShowExistingMatches:', {'arguments': {2: {'type': b'Z'}}}) - r(b'GKTurnBasedMatchmakerViewController', b'showExistingMatches', {'retval': {'type': b'Z'}}) - r(b'GKVoiceChat', b'isActive', {'retval': {'type': b'Z'}}) - r(b'GKVoiceChat', b'isVoIPAllowed', {'retval': {'type': b'Z'}}) - r(b'GKVoiceChat', b'playerStateUpdateHandler', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'i', b'q')}}}}}) - r(b'GKVoiceChat', b'playerVoiceChatStateDidChangeHandler', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'i', b'q')}}}}}) - r(b'GKVoiceChat', b'setActive:', {'arguments': {2: {'type': b'Z'}}}) - r(b'GKVoiceChat', b'setMute:forPlayer:', {'arguments': {2: {'type': b'Z'}}}) - r(b'GKVoiceChat', b'setPlayer:muted:', {'arguments': {3: {'type': b'Z'}}}) - r(b'GKVoiceChat', b'setPlayerStateUpdateHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'i', b'q')}}}}}}) - r(b'GKVoiceChat', b'setPlayerVoiceChatStateDidChangeHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'i', b'q')}}}}}}) - r(b'GKVoiceChatService', b'acceptCallID:error:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'GKVoiceChatService', b'isInputMeteringEnabled', {'retval': {'type': b'Z'}}) - r(b'GKVoiceChatService', b'isMicrophoneMuted', {'retval': {'type': b'Z'}}) - r(b'GKVoiceChatService', b'isOutputMeteringEnabled', {'retval': {'type': b'Z'}}) - r(b'GKVoiceChatService', b'isVoIPAllowed', {'retval': {'type': b'Z'}}) - r(b'GKVoiceChatService', b'setInputMeteringEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'GKVoiceChatService', b'setMicrophoneMuted:', {'arguments': {2: {'type': b'Z'}}}) - r(b'GKVoiceChatService', b'setOutputMeteringEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'GKVoiceChatService', b'startVoiceChatWithParticipantID:error:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NSObject', b'achievementViewControllerDidFinish:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'challengesViewControllerDidFinish:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'friendRequestComposeViewControllerDidFinish:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'gameCenterViewControllerDidFinish:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'handleInviteFromGameCenter:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'handleMatchEnded:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'handleTurnEventForMatch:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'handleTurnEventForMatch:didBecomeActive:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'Z'}}}) - r(b'NSObject', b'leaderboardViewControllerDidFinish:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'localPlayerDidCompleteChallenge:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'localPlayerDidReceiveChallenge:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'localPlayerDidSelectChallenge:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'match:didFailWithError:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'match:didReceiveData:forRecipient:fromRemotePlayer:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'match:didReceiveData:fromPlayer:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'match:didReceiveData:fromRemotePlayer:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'match:player:didChangeConnectionState:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'match:player:didChangeState:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'match:shouldReinviteDisconnectedPlayer:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'match:shouldReinvitePlayer:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'matchmakerViewController:didFailWithError:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'matchmakerViewController:didFindHostedPlayers:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'matchmakerViewController:didFindMatch:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'matchmakerViewController:didFindPlayers:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'matchmakerViewController:didReceiveAcceptFromHostedPlayer:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'matchmakerViewController:hostedPlayerDidAccept:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'matchmakerViewControllerWasCancelled:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'participantID', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'player:didAcceptInvite:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'player:didCompleteChallenge:issuedByFriend:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'player:didModifySavedGame:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'player:didReceiveChallenge:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'player:didRequestMatchWithOtherPlayers:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'player:didRequestMatchWithPlayers:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'player:didRequestMatchWithRecipients:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'player:hasConflictingSavedGames:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'player:issuedChallengeWasCompleted:byFriend:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'player:matchEnded:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'player:receivedExchangeCancellation:forMatch:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'player:receivedExchangeReplies:forCompletedExchange:forMatch:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'player:receivedExchangeRequest:forMatch:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'player:receivedTurnEventForMatch:didBecomeActive:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'Z'}}}) - r(b'NSObject', b'player:wantsToPlayChallenge:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'player:wantsToQuitMatch:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'remotePlayerDidCompleteChallenge:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'session:connectionWithPeerFailed:withError:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'session:didAddPlayer:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'session:didFailWithError:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'session:didReceiveConnectionRequestFromPeer:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'session:didReceiveData:fromPlayer:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'session:didReceiveMessage:withData:fromPlayer:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'session:didRemovePlayer:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'session:peer:didChangeState:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'i'}}}) - r(b'NSObject', b'session:player:didChangeConnectionState:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'session:player:didSaveData:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'shouldShowBannerForLocallyCompletedChallenge:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'shouldShowBannerForLocallyReceivedChallenge:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'shouldShowBannerForRemotelyCompletedChallenge:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'turnBasedMatchmakerViewController:didFailWithError:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'turnBasedMatchmakerViewController:didFindMatch:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'turnBasedMatchmakerViewController:playerQuitForMatch:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'turnBasedMatchmakerViewControllerWasCancelled:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'voiceChatService:didNotStartWithParticipantID:error:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'voiceChatService:didReceiveInvitationFromParticipantID:callID:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'voiceChatService:didStartWithParticipantID:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'voiceChatService:didStopWithParticipantID:error:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'voiceChatService:sendData:toParticipantID:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'voiceChatService:sendRealTimeData:toParticipantID:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/GameKit/_metadata.pyc b/env/lib/python2.7/site-packages/GameKit/_metadata.pyc deleted file mode 100644 index 9f57eb97..00000000 Binary files a/env/lib/python2.7/site-packages/GameKit/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/GameplayKit/_GameplayKit.so b/env/lib/python2.7/site-packages/GameplayKit/_GameplayKit.so deleted file mode 100755 index 83b6ea70..00000000 Binary files a/env/lib/python2.7/site-packages/GameplayKit/_GameplayKit.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/GameplayKit/__init__.py b/env/lib/python2.7/site-packages/GameplayKit/__init__.py deleted file mode 100644 index 782bfded..00000000 --- a/env/lib/python2.7/site-packages/GameplayKit/__init__.py +++ /dev/null @@ -1,29 +0,0 @@ -''' -Python mapping for the GameplayKit framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Cocoa -import SpriteKit - -from GameplayKit import _metadata -from GameplayKit._GameplayKit import * - - -sys.modules['GameplayKit'] = mod = objc.ObjCLazyModule( - "GameplayKit", - "com.apple.GameplayKit", - objc.pathForFramework("/System/Library/Frameworks/GameplayKit.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Cocoa, SpriteKit)) - -import sys -del sys.modules['GameplayKit._metadata'] diff --git a/env/lib/python2.7/site-packages/GameplayKit/__init__.pyc b/env/lib/python2.7/site-packages/GameplayKit/__init__.pyc deleted file mode 100644 index b0695dd9..00000000 Binary files a/env/lib/python2.7/site-packages/GameplayKit/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/GameplayKit/_metadata.py b/env/lib/python2.7/site-packages/GameplayKit/_metadata.py deleted file mode 100644 index ca42a089..00000000 --- a/env/lib/python2.7/site-packages/GameplayKit/_metadata.py +++ /dev/null @@ -1,96 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Sun Sep 30 22:14:19 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -misc.update({'GKBox': objc.createStructType('GKBox', b'{GKBox=}', ['boxMin', 'boxMax']), 'GKQuad': objc.createStructType('GKQuad', b'{GKQuad=}', ['quadMin', 'quadMax'])}) -constants = '''$$''' -enums = '''$GKGameModelMaxScore@16777216$GKGameModelMinScore@-16777216$GKMeshGraphTriangulationModeCenters@2$GKMeshGraphTriangulationModeEdgeMidpoints@4$GKMeshGraphTriangulationModeVertices@1$GKRTreeSplitStrategyHalve@0$GKRTreeSplitStrategyLinear@1$GKRTreeSplitStrategyQuadratic@2$GKRTreeSplitStrategyReduceOverlap@3$GK_VERSION@80000000$''' -misc.update({}) -aliases = {'GK_AVAILABLE': '__OSX_AVAILABLE_STARTING'} -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'GKAgent3D', b'rightHanded', {'retval': {'type': b'Z'}}) - r(b'GKAgent3D', b'rotation', {'retval': {'type': b'{_matrix_float3x3=?}'}}) - r(b'GKAgent3D', b'setRightHanded:', {'arguments': {2: {'type': b'Z'}}}) - r(b'GKAgent3D', b'setRotation:', {'arguments': {2: {'type': b'{_matrix_float3x3=?}'}}}) - r(b'GKDecisionTree', b'exportToURL:error:', {'retval': {'type': b'Z'}}) - r(b'GKGoal', b'goalToFollowPath:maxPredictionTime:forward:', {'arguments': {4: {'type': b'Z'}}}) - r(b'GKGraph', b'connectNodeToLowestCostNode:bidirectional:', {'arguments': {3: {'type': b'Z'}}}) - r(b'GKGraphNode', b'addConnectionsToNodes:bidirectional:', {'arguments': {3: {'type': b'Z'}}}) - r(b'GKGraphNode', b'removeConnectionsToNodes:bidirectional:', {'arguments': {3: {'type': b'Z'}}}) - r(b'GKGridGraph', b'diagonalsAllowed', {'retval': {'type': b'Z'}}) - r(b'GKGridGraph', b'graphFromGridStartingAt:width:height:diagonalsAllowed:', {'arguments': {5: {'type': b'Z'}}}) - r(b'GKGridGraph', b'graphFromGridStartingAt:width:height:diagonalsAllowed:nodeClass:', {'arguments': {5: {'type': b'Z'}}}) - r(b'GKGridGraph', b'initFromGridStartingAt:width:height:diagonalsAllowed:', {'arguments': {5: {'type': b'Z'}}}) - r(b'GKGridGraph', b'initFromGridStartingAt:width:height:diagonalsAllowed:nodeClass:', {'arguments': {5: {'type': b'Z'}}}) - r(b'GKNSPredicateRule', b'evaluatePredicateWithSystem:', {'retval': {'type': b'Z'}}) - r(b'GKNoise', b'remapValuesToTerracesWithPeaks:terracesInverted:', {'arguments': {3: {'type': b'Z'}}}) - r(b'GKNoiseMap', b'initWithNoise:size:origin:sampleCount:seamless:', {'arguments': {6: {'type': b'Z'}}}) - r(b'GKNoiseMap', b'isSeamless', {'retval': {'type': b'Z'}}) - r(b'GKNoiseMap', b'noiseMapWithNoise:size:origin:sampleCount:seamless:', {'arguments': {6: {'type': b'Z'}}}) - r(b'GKObstacleGraph', b'isConnectionLockedFromNode:toNode:', {'retval': {'type': b'Z'}}) - r(b'GKOctree', b'removeElement:', {'retval': {'type': b'Z'}}) - r(b'GKOctree', b'removeElement:withNode:', {'retval': {'type': b'Z'}}) - r(b'GKPath', b'initWithFloat3Points:count:radius:cyclical:', {'arguments': {2: {'type': 'n', 'arg_size_in_arg': 1}, 5: {'type': b'Z'}}}) - r(b'GKPath', b'initWithPoints:count:radius:cyclical:', {'arguments': {2: {'type': 'n', 'arg_size_in_arg': 1}, 5: {'type': b'Z'}}}) - r(b'GKPath', b'isCyclical', {'retval': {'type': b'Z'}}) - r(b'GKPath', b'pathWithFloat3Points:count:radius:cyclical:', {'arguments': {2: {'type': 'n', 'arg_size_in_arg': 1}, 5: {'type': b'Z'}}}) - r(b'GKPath', b'pathWithPoints:count:radius:cyclical:', {'arguments': {2: {'type': 'n', 'arg_size_in_arg': 1}, 5: {'type': b'Z'}}}) - r(b'GKPath', b'pointAtIndex:', {'deprecated': 1012}) - r(b'GKPath', b'setCyclical:', {'arguments': {2: {'type': b'Z'}}}) - r(b'GKPolygonObstacle', b'initWithPoints:count:', {'arguments': {2: {'type': 'n', 'arg_size_in_arg': 1}}}) - r(b'GKPolygonObstacle', b'obstacleWithPoints:count:', {'arguments': {2: {'type': 'n', 'arg_size_in_arg': 1}}}) - r(b'GKQuadtree', b'removeElement:', {'retval': {'type': b'Z'}}) - r(b'GKQuadtree', b'removeElement:withNode:', {'retval': {'type': b'Z'}}) - r(b'GKRandomDistribution', b'nextBool', {'retval': {'type': b'Z'}}) - r(b'GKRule', b'evaluatePredicateWithSystem:', {'retval': {'type': b'Z'}}) - r(b'GKRule', b'ruleWithBlockPredicate:action:', {'arguments': {2: {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'GKState', b'isValidNextState:', {'retval': {'type': b'Z'}}) - r(b'GKStateMachine', b'canEnterState:', {'retval': {'type': b'Z'}}) - r(b'GKStateMachine', b'enterState:', {'retval': {'type': b'Z'}}) - r(b'GKVoronoiNoiseSource', b'initWithFrequency:displacement:distanceEnabled:seed:', {'arguments': {4: {'type': b'Z'}}}) - r(b'GKVoronoiNoiseSource', b'isDistanceEnabled', {'retval': {'type': b'Z'}}) - r(b'GKVoronoiNoiseSource', b'setDistanceEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'GKVoronoiNoiseSource', b'voronoiNoiseWithFrequency:displacement:distanceEnabled:seed:', {'arguments': {4: {'type': b'Z'}}}) - r(b'NSObject', b'activePlayer', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'agentDidUpdate:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'agentWillUpdate:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'applyGameModelUpdate:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'bestMoveForActivePlayer', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'gameModel', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'gameModelUpdatesForPlayer:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'isLossForPlayer:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'isWinForPlayer:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'nextBool', {'required': True, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'nextInt', {'required': True, 'retval': {'type': b'q'}}) - r(b'NSObject', b'nextIntWithUpperBound:', {'required': True, 'retval': {'type': b'Q'}, 'arguments': {2: {'type': b'Q'}}}) - r(b'NSObject', b'nextUniform', {'required': True, 'retval': {'type': b'f'}}) - r(b'NSObject', b'playerId', {'required': True, 'retval': {'type': sel32or64(b'i', b'q')}}) - r(b'NSObject', b'players', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'randomSource', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'scoreForPlayer:', {'required': False, 'retval': {'type': b'q'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setGameModel:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setPlayerId:', {'arguments': {2: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'setRandomSource:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setValue:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'q'}}}) - r(b'NSObject', b'unapplyGameModelUpdate:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'value', {'required': True, 'retval': {'type': b'q'}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/GameplayKit/_metadata.pyc b/env/lib/python2.7/site-packages/GameplayKit/_metadata.pyc deleted file mode 100644 index 2873ffbb..00000000 Binary files a/env/lib/python2.7/site-packages/GameplayKit/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/HIServices/__init__.py b/env/lib/python2.7/site-packages/HIServices/__init__.py deleted file mode 100644 index e09c26e9..00000000 --- a/env/lib/python2.7/site-packages/HIServices/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -''' -Python mapping for the HIServices framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import sys -import objc -import Cocoa - -from HIServices import _metadata - -sys.modules['HIServices'] = mod = objc.ObjCLazyModule('HIServices', - "com.apple.ApplicationServices", - objc.pathForFramework("/System/Library/Frameworks/ApplicationServices.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'objc': objc, - }, ( Cocoa, )) - -import sys -del sys.modules['HIServices._metadata'] diff --git a/env/lib/python2.7/site-packages/HIServices/__init__.pyc b/env/lib/python2.7/site-packages/HIServices/__init__.pyc deleted file mode 100644 index fb998837..00000000 Binary files a/env/lib/python2.7/site-packages/HIServices/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/HIServices/_metadata.py b/env/lib/python2.7/site-packages/HIServices/_metadata.py deleted file mode 100644 index 437d3271..00000000 --- a/env/lib/python2.7/site-packages/HIServices/_metadata.py +++ /dev/null @@ -1,28 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Sun Feb 17 22:42:31 2019 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -misc.update({'ProcessInfoExtendedRec': objc.createStructType('ProcessInfoExtendedRec', sel32or64(b'{ProcessInfoExtendedRec=L^C{ProcessSerialNumber=LL}LLL^cLL{ProcessSerialNumber=LL}LL^{FSSpec=sl[64C]}LL}', b'{ProcessInfoExtendedRec=I^C{ProcessSerialNumber=II}III^cII{ProcessSerialNumber=II}II^{FSRef=[80C]}II}'), ['processInfoLength', 'processName', 'processNumber', 'processType', 'processSignature', 'processMode', 'processLocation', 'processSize', 'processFreeMem', 'processLauncher', 'processLaunchDate', 'processActiveTime', 'processAppSpec', 'processTempMemTotal', 'processPurgeableTempMemTotal']), 'ProcessInfoRec': objc.createStructType('ProcessInfoRec', sel32or64(b'{ProcessInfoRec=L^C{ProcessSerialNumber=LL}LLL^cLL{ProcessSerialNumber=LL}LL^{FSSpec=sl[64C]}}', b'{ProcessInfoRec=I^C{ProcessSerialNumber=II}III^cII{ProcessSerialNumber=II}II^{FSRef=[80C]}}'), ['processInfoLength', 'processName', 'processNumber', 'processType', 'processSignature', 'processMode', 'processLocation', 'processSize', 'processFreeMem', 'processLauncher', 'processLaunchDate', 'processActiveTime', 'processAppSpec']), 'LaunchParamBlockRec': objc.createStructType('LaunchParamBlockRec', sel32or64(b'{LaunchParamBlockRec=LSSLSS^{FSSpec=sl[64C]}{ProcessSerialNumber=LL}LLL^{AppParameters={_struct AppParameters::=SLL{Point=ss}S}{=SLL{Point=ss}S}LL}}', b'{LaunchParamBlockRec=ISSISS^{FSRef=[80C]}{ProcessSerialNumber=II}III^{AppParameters={_struct AppParameters::=SII{Point=ss}S}{=SII{Point=ss}S}II}}'), ['reserved1', 'reserved2', 'launchBlockID', 'launchEPBLength', 'launchFileFlags', 'launchControlFlags', 'launchAppSpec', 'launchProcessSN', 'launchPreferredSize', 'launchMinimumSize', 'launchAvailableSize', 'launchAppParameters']), 'SizeResourceRec': objc.createStructType('SizeResourceRec', sel32or64(b'{SizeResourceRec=SLL}', b'{SizeResourceRec=SII}'), ['flags', 'preferredHeapSize', 'minimumHeapSize']), 'CIcon': objc.createStructType('CIcon', b'{CIcon={PixMap=^cs{Rect=ssss}sslllssssL^^{ColorTable=lss[1{ColorSpec=s{RGBColor=SSS}}]}^v}{BitMap=^cs{Rect=ssss}}{BitMap=^cs{Rect=ssss}}^^c[1s]}', ['iconPMap', 'iconMask', 'iconBMap', 'iconData', 'iconMaskData']), 'AppParameters': objc.createStructType('AppParameters', sel32or64(b'{AppParameters={_struct AppParameters::=SLL{Point=ss}S}{=SLL{Point=ss}S}LL}', b'{AppParameters={_struct AppParameters::=SII{Point=ss}S}{=SII{Point=ss}S}II}'), ['theMsgEvent', 'eventRefCon', 'messageLength'])}) -constants = '''$kAXAttachmentTextAttribute@^{__CFString=}$kAXAutocorrectedTextAttribute@^{__CFString=}$kAXBackgroundColorTextAttribute@^{__CFString=}$kAXFontFamilyKey@^{__CFString=}$kAXFontNameKey@^{__CFString=}$kAXFontSizeKey@^{__CFString=}$kAXFontTextAttribute@^{__CFString=}$kAXForegoundColorTextAttribute@^{__CFString=}$kAXForegroundColorTextAttribute@^{__CFString=}$kAXLinkTextAttribute@^{__CFString=}$kAXMarkedMisspelledTextAttribute@^{__CFString=}$kAXMisspelledTextAttribute@^{__CFString=}$kAXNaturalLanguageTextAttribute@^{__CFString=}$kAXReplacementStringTextAttribute@^{__CFString=}$kAXShadowTextAttribute@^{__CFString=}$kAXStrikethroughColorTextAttribute@^{__CFString=}$kAXStrikethroughTextAttribute@^{__CFString=}$kAXSuperscriptTextAttribute@^{__CFString=}$kAXTrustedCheckOptionPrompt@^{__CFString=}$kAXUnderlineColorTextAttribute@^{__CFString=}$kAXUnderlineTextAttribute@^{__CFString=}$kAXVisibleNameKey@^{__CFString=}$''' -enums = '''$AX_ALLOW_OLD_SECURITY_METHOD@0$activDev@5$atAbsoluteCenter@5$atBottom@3$atBottomLeft@11$atBottomRight@15$atCenterBottom@7$atCenterLeft@9$atCenterRight@13$atCenterTop@6$atHorizontalCenter@4$atLeft@8$atNone@0$atRight@12$atTop@2$atTopLeft@10$atTopRight@14$atVerticalCenter@1$badPasteboardFlavorErr@-25133$badPasteboardIndexErr@-25131$badPasteboardItemErr@-25132$badPasteboardSyncErr@-25130$badTranslationRefErr@-3031$cdevGenErr@-1$cdevMemErr@0$cdevResErr@1$cdevUnset@3$clearDev@13$closeDev@2$copyDev@11$cursorDev@14$cutDev@10$deactivDev@6$duplicatePasteboardFlavorErr@-25134$extendedBlock@19523$hitDev@1$initDev@0$kAXCopyMultipleAttributeOptionStopOnError@1$kAXErrorAPIDisabled@-25211$kAXErrorActionUnsupported@-25206$kAXErrorAttributeUnsupported@-25205$kAXErrorCannotComplete@-25204$kAXErrorFailure@-25200$kAXErrorIllegalArgument@-25201$kAXErrorInvalidUIElement@-25202$kAXErrorInvalidUIElementObserver@-25203$kAXErrorNoValue@-25212$kAXErrorNotEnoughPrecision@-25214$kAXErrorNotImplemented@-25208$kAXErrorNotificationAlreadyRegistered@-25209$kAXErrorNotificationNotRegistered@-25210$kAXErrorNotificationUnsupported@-25207$kAXErrorParameterizedAttributeUnsupported@-25213$kAXErrorSuccess@0$kAXMenuItemModifierControl@4$kAXMenuItemModifierNoCommand@8$kAXMenuItemModifierNone@0$kAXMenuItemModifierOption@2$kAXMenuItemModifierShift@1$kAXPriorityHigh@90$kAXPriorityLow@10$kAXPriorityMedium@50$kAXUnderlineStyleDouble@9$kAXUnderlineStyleNone@0$kAXUnderlineStyleSingle@1$kAXUnderlineStyleThick@2$kAXValueAXErrorType@5$kAXValueCFRangeType@4$kAXValueCGPointType@1$kAXValueCGRectType@3$kAXValueCGSizeType@2$kAXValueIllegalType@0$kAlignAbsoluteCenter@5$kAlignBottom@3$kAlignBottomLeft@11$kAlignBottomRight@15$kAlignCenterBottom@7$kAlignCenterLeft@9$kAlignCenterRight@13$kAlignCenterTop@6$kAlignHorizontalCenter@4$kAlignLeft@8$kAlignNone@0$kAlignRight@12$kAlignTop@2$kAlignTopLeft@10$kAlignTopRight@14$kAlignVerticalCenter@1$kCurrentProcess@2$kHIShapeEnumerateInit@1$kHIShapeEnumerateRect@2$kHIShapeEnumerateTerminate@3$kHIShapeParseFromBottom@1$kHIShapeParseFromBottomRight@3$kHIShapeParseFromLeft@0$kHIShapeParseFromRight@2$kHIShapeParseFromTop@0$kHIShapeParseFromTopLeft@0$kNoProcess@0$kPasteboardClientIsOwner@2$kPasteboardFlavorNoFlags@0$kPasteboardFlavorNotSaved@4$kPasteboardFlavorPromised@512$kPasteboardFlavorRequestOnly@8$kPasteboardFlavorSenderOnly@1$kPasteboardFlavorSenderTranslated@2$kPasteboardFlavorSystemTranslated@256$kPasteboardModified@1$kPasteboardStandardLocationTrash@1953657704$kPasteboardStandardLocationUnknown@1970170734$kPlotIconRefNoImage@2$kPlotIconRefNoMask@4$kPlotIconRefNormalFlags@0$kProcessDictionaryIncludeAllInformationMask@-1$kProcessTransformToBackgroundApplication@2$kProcessTransformToForegroundApplication@1$kProcessTransformToUIElementApplication@4$kQuitAtNormalTimeMask@2$kQuitBeforeFBAsQuitMask@4$kQuitBeforeNormalTimeMask@1$kQuitBeforeShellQuitsMask@8$kQuitBeforeTerminatorAppQuitsMask@16$kQuitNeverMask@32$kQuitNotQuitDuringInstallMask@256$kQuitNotQuitDuringLogoutMask@512$kQuitOptionsMask@127$kSelectorAll1BitData@16843009$kSelectorAll32BitData@134219784$kSelectorAll4BitData@33686018$kSelectorAll8BitData@67372036$kSelectorAllAvailableData@4294967295$kSelectorAllHugeData@4278190080$kSelectorAllLargeData@255$kSelectorAllMiniData@16711680$kSelectorAllSmallData@65280$kSelectorHuge1Bit@16777216$kSelectorHuge32Bit@134217728$kSelectorHuge4Bit@33554432$kSelectorHuge8Bit@67108864$kSelectorHuge8BitMask@268435456$kSelectorLarge1Bit@1$kSelectorLarge32Bit@8$kSelectorLarge4Bit@2$kSelectorLarge8Bit@4$kSelectorLarge8BitMask@16$kSelectorMini1Bit@65536$kSelectorMini4Bit@131072$kSelectorMini8Bit@262144$kSelectorSmall1Bit@256$kSelectorSmall32Bit@2048$kSelectorSmall4Bit@512$kSelectorSmall8Bit@1024$kSelectorSmall8BitMask@4096$kSetFrontProcessCausedByUser@2$kSetFrontProcessFrontWindowOnly@1$kSystemProcess@1$kTransformDisabled@1$kTransformLabel1@256$kTransformLabel2@512$kTransformLabel3@768$kTransformLabel4@1024$kTransformLabel5@1280$kTransformLabel6@1536$kTransformLabel7@1792$kTransformNone@0$kTransformOffline@2$kTransformOpen@3$kTransformSelected@16384$kTransformSelectedDisabled@16385$kTransformSelectedOffline@16386$kTransformSelectedOpen@16387$kTranslationDataTranslation@1$kTranslationFileTranslation@2$kUAZoomFocusTypeInsertionPoint@1$kUAZoomFocusTypeOther@0$keyEvtDev@7$launchAllow24Bit@256$launchContinue@16384$launchDontSwitch@512$launchInhibitDaemon@128$launchNoFileFlags@2048$launchUseMinimum@1024$macDev@8$mode32BitCompatible@128$modeCanBackground@4096$modeControlPanel@524288$modeDeskAccessory@131072$modeDisplayManagerAware@4$modeDoesActivateOnFGSwitch@2048$modeGetAppDiedMsg@256$modeGetFrontClicks@512$modeHighLevelEventAware@64$modeLaunchDontSwitch@262144$modeLocalAndRemoteHLEvents@32$modeMultiLaunch@65536$modeNeedSuspendResume@16384$modeOnlyBackground@1024$modeReserved@16777216$modeStationeryAware@16$modeUseTextEditServices@8$noPasteboardPromiseKeeperErr@-25136$notPasteboardOwnerErr@-25135$nulDev@3$pasteDev@12$svAll1BitData@16843009$svAll4BitData@33686018$svAll8BitData@67372036$svAllAvailableData@4294967295$svAllLargeData@255$svAllMiniData@16711680$svAllSmallData@65280$svLarge1Bit@1$svLarge4Bit@2$svLarge8Bit@4$svMini1Bit@65536$svMini4Bit@131072$svMini8Bit@262144$svSmall1Bit@256$svSmall4Bit@512$svSmall8Bit@1024$ttDisabled@1$ttLabel1@256$ttLabel2@512$ttLabel3@768$ttLabel4@1024$ttLabel5@1280$ttLabel6@1536$ttLabel7@1792$ttNone@0$ttOffline@2$ttOpen@3$ttSelected@16384$ttSelectedDisabled@16385$ttSelectedOffline@16386$ttSelectedOpen@16387$undoDev@9$updateDev@4$''' -misc.update({'extendedBlockLen': sel32or64(32, 40)}) -misc.update({'kAXTableRole': b'AXTable'.decode("utf-8"), 'kAXAttributedStringForRangeParameterizedAttribute': b'AXAttributedStringForRange'.decode("utf-8"), 'kAXStandardWindowSubrole': b'AXStandardWindow'.decode("utf-8"), 'kAXFloatingWindowSubrole': b'AXFloatingWindow'.decode("utf-8"), 'kAXHiddenAttribute': b'AXHidden'.decode("utf-8"), 'kAXSharedCharacterRangeAttribute': b'AXSharedCharacterRange'.decode("utf-8"), 'kAXResizedNotification': b'AXResized'.decode("utf-8"), 'kAXMinValueAttribute': b'AXMinValue'.decode("utf-8"), 'kAXOutlineRole': b'AXOutline'.decode("utf-8"), 'kAXDockItemRole': b'AXDockItem'.decode("utf-8"), 'kAXFocusedWindowChangedNotification': b'AXFocusedWindowChanged'.decode("utf-8"), 'kAXPopUpButtonRole': b'AXPopUpButton'.decode("utf-8"), 'kAXWindowsAttribute': b'AXWindows'.decode("utf-8"), 'kAXMinimizedWindowDockItemSubrole': b'AXMinimizedWindowDockItem'.decode("utf-8"), 'kAXScreenSizeForLayoutSizeParameterizedAttribute': b'AXScreenSizeForLayoutSize'.decode("utf-8"), 'kAXGrowAreaAttribute': b'AXGrowArea'.decode("utf-8"), 'kAXDescendingSortDirectionValue': b'AXDescendingSortDirection'.decode("utf-8"), 'kAXMenuItemCmdCharAttribute': b'AXMenuItemCmdChar'.decode("utf-8"), 'kAXLevelIndicatorRole': b'AXLevelIndicator'.decode("utf-8"), 'kAXRoleDescriptionAttribute': b'AXRoleDescription'.decode("utf-8"), 'kAXSystemFloatingWindowSubrole': b'AXSystemFloatingWindow'.decode("utf-8"), 'kAXRowRole': b'AXRow'.decode("utf-8"), 'kAXToolbarRole': b'AXToolbar'.decode("utf-8"), 'kAXVerticalUnitsAttribute': b'AXVerticalUnits'.decode("utf-8"), 'kAXTitleUIElementAttribute': b'AXTitleUIElement'.decode("utf-8"), 'kAXPreviousContentsAttribute': b'AXPreviousContents'.decode("utf-8"), 'kAXSelectedAttribute': b'AXSelected'.decode("utf-8"), 'kAXWindowAttribute': b'AXWindow'.decode("utf-8"), 'kAXTabGroupRole': b'AXTabGroup'.decode("utf-8"), 'kAXValueWrapsAttribute': b'AXValueWraps'.decode("utf-8"), 'kAXSizeAttribute': b'AXSize'.decode("utf-8"), 'kAXSelectedTextRangesAttribute': b'AXSelectedTextRanges'.decode("utf-8"), 'kAXRangeForIndexParameterizedAttribute': b'AXRangeForIndex'.decode("utf-8"), 'kAXSelectedTextChangedNotification': b'AXSelectedTextChanged'.decode("utf-8"), 'kAXURLAttribute': b'AXURL'.decode("utf-8"), 'kAXMenuItemRole': b'AXMenuItem'.decode("utf-8"), 'kAXHelpTagRole': b'AXHelpTag'.decode("utf-8"), 'kAXParentAttribute': b'AXParent'.decode("utf-8"), 'kAXTimelineSubrole': b'AXTimeline'.decode("utf-8"), 'kAXDecrementButtonAttribute': b'AXDecrementButton'.decode("utf-8"), 'kAXFocusedApplicationAttribute': b'AXFocusedApplication'.decode("utf-8"), 'kAXMovedNotification': b'AXMoved'.decode("utf-8"), 'kAXBoundsForRangeParameterizedAttribute': b'AXBoundsForRange'.decode("utf-8"), 'kAXIncrementPageSubrole': b'AXIncrementPage'.decode("utf-8"), 'kAXFocusedUIElementAttribute': b'AXFocusedUIElement'.decode("utf-8"), 'kAXModalAttribute': b'AXModal'.decode("utf-8"), 'kAXMenuItemPrimaryUIElementAttribute': b'AXMenuItemPrimaryUIElement'.decode("utf-8"), 'kAXTimeFieldRole': b'AXTimeField'.decode("utf-8"), 'kAXRulerRole': b'AXRuler'.decode("utf-8"), 'kAXIncrementButtonAttribute': b'AXIncrementButton'.decode("utf-8"), 'kAXMenuBarAttribute': b'AXMenuBar'.decode("utf-8"), 'kAXMainWindowChangedNotification': b'AXMainWindowChanged'.decode("utf-8"), 'kAXSelectedChildrenMovedNotification': b'AXSelectedChildrenMoved'.decode("utf-8"), 'kAXColumnCountAttribute': b'AXColumnCount'.decode("utf-8"), 'kAXNextContentsAttribute': b'AXNextContents'.decode("utf-8"), 'kAXFrontmostAttribute': b'AXFrontmost'.decode("utf-8"), 'kAXShownMenuUIElementAttribute': b'AXShownMenuUIElement'.decode("utf-8"), 'kAXColumnTitleAttribute': b'AXColumnTitles'.decode("utf-8"), 'kAXAlternateUIVisibleAttribute': b'AXAlternateUIVisible'.decode("utf-8"), 'kAXRangeForLineParameterizedAttribute': b'AXRangeForLine'.decode("utf-8"), 'kAXSplittersAttribute': b'AXSplitters'.decode("utf-8"), 'kAXPickAction': b'AXPick'.decode("utf-8"), 'kAXOrderedByRowAttribute': b'AXOrderedByRow'.decode("utf-8"), 'kAXHelpTagCreatedNotification': b'AXHelpTagCreated'.decode("utf-8"), 'kAXStringForRangeParameterizedAttribute': b'AXStringForRange'.decode("utf-8"), 'kAXStaticTextRole': b'AXStaticText'.decode("utf-8"), 'kAXColumnRole': b'AXColumn'.decode("utf-8"), 'kAXMinimizeButtonAttribute': b'AXMinimizeButton'.decode("utf-8"), 'kAXMenuOpenedNotification': b'AXMenuOpened'.decode("utf-8"), 'kAXCellForColumnAndRowParameterizedAttribute': b'AXCellForColumnAndRow'.decode("utf-8"), 'kAXContentsAttribute': b'AXContents'.decode("utf-8"), 'kAXRadioGroupRole': b'AXRadioGroup'.decode("utf-8"), 'kAXDrawerCreatedNotification': b'AXDrawerCreated'.decode("utf-8"), 'kAXSplitGroupRole': b'AXSplitGroup'.decode("utf-8"), 'kAXDialogSubrole': b'AXDialog'.decode("utf-8"), 'kAXDisclosureLevelAttribute': b'AXDisclosureLevel'.decode("utf-8"), 'kAXMinimizedAttribute': b'AXMinimized'.decode("utf-8"), 'kAXFocusedAttribute': b'AXFocused'.decode("utf-8"), 'kAXServesAsTitleForUIElementsAttribute': b'AXServesAsTitleForUIElements'.decode("utf-8"), 'kAXMarkerUIElementsAttribute': b'AXMarkerUIElements'.decode("utf-8"), 'kAXFocusedUIElementChangedNotification': b'AXFocusedUIElementChanged'.decode("utf-8"), 'kAXDescription': b'AXDescription'.decode("utf-8"), 'kAXAnnouncementRequestedNotification': b'AXAnnouncementRequested'.decode("utf-8"), 'kAXMatteHoleAttribute': b'AXMatteHole'.decode("utf-8"), 'kAXMarkerTypeAttribute': b'AXMarkerType'.decode("utf-8"), 'kAXToggleSubrole': b'AXToggle'.decode("utf-8"), 'kAXNumberOfCharactersAttribute': b'AXNumberOfCharacters'.decode("utf-8"), 'kAXImageRole': b'AXImage'.decode("utf-8"), 'kAXFocusedWindowAttribute': b'AXFocusedWindow'.decode("utf-8"), 'kAXYearFieldAttribute': b'AXYearField'.decode("utf-8"), 'kAXMenuRole': b'AXMenu'.decode("utf-8"), 'kAXVisibleColumnsAttribute': b'AXVisibleColumns'.decode("utf-8"), 'kAXIncrementorRole': b'AXIncrementor'.decode("utf-8"), 'kAXHourFieldAttribute': b'AXHourField'.decode("utf-8"), 'kAXFolderDockItemSubrole': b'AXFolderDockItem'.decode("utf-8"), 'kAXMenuBarItemRole': b'AXMenuBarItem'.decode("utf-8"), 'kAXLayoutChangedNotification': b'AXLayoutChanged'.decode("utf-8"), 'kAXDefaultButtonAttribute': b'AXDefaultButton'.decode("utf-8"), 'kAXSearchButtonAttribute': b'AXSearchButton'.decode("utf-8"), 'kAXEnabledAttribute': b'AXEnabled'.decode("utf-8"), 'kAXRowCountAttribute': b'AXRowCount'.decode("utf-8"), 'kAXCancelButtonAttribute': b'AXCancelButton'.decode("utf-8"), 'kAXIncrementAction': b'AXIncrement'.decode("utf-8"), 'kAXRoleAttribute': b'AXRole'.decode("utf-8"), 'kAXWindowRole': b'AXWindow'.decode("utf-8"), 'kAXExpandedAttribute': b'AXExpanded'.decode("utf-8"), 'kAXTitleAttribute': b'AXTitle'.decode("utf-8"), 'kAXSubroleAttribute': b'AXSubrole'.decode("utf-8"), 'kAXSelectedChildrenAttribute': b'AXSelectedChildren'.decode("utf-8"), 'kAXPressAction': b'AXPress'.decode("utf-8"), 'kAXUnknownOrientationValue': b'AXUnknownOrientation'.decode("utf-8"), 'kAXApplicationShownNotification': b'AXApplicationShown'.decode("utf-8"), 'kAXSelectedRowsChangedNotification': b'AXSelectedRowsChanged'.decode("utf-8"), 'kPasteboardTypeFileURLPromise': b'com.apple.pasteboard.promised-file-url', 'kAXDockExtraDockItemSubrole': b'AXDockExtraDockItem'.decode("utf-8"), 'kAXCreatedNotification': b'AXCreated'.decode("utf-8"), 'kAXSecureTextFieldSubrole': b'AXSecureTextField'.decode("utf-8"), 'kAXHandleRole': b'AXHandle'.decode("utf-8"), 'kAXDisclosedRowsAttribute': b'AXDisclosedRows'.decode("utf-8"), 'kPasteboardClipboard': b'com.apple.pasteboard.clipboard', 'kPasteboardFind': b'com.apple.pasteboard.find', 'kAXTabsAttribute': b'AXTabs'.decode("utf-8"), 'kAXWindowMiniaturizedNotification': b'AXWindowMiniaturized'.decode("utf-8"), 'kAXOrientationAttribute': b'AXOrientation'.decode("utf-8"), 'kAXDefinitionListSubrole': b'AXDefinitionList'.decode("utf-8"), 'kAXTableRowSubrole': b'AXTableRow'.decode("utf-8"), 'kAXWindowMovedNotification': b'AXWindowMoved'.decode("utf-8"), 'kAXUnitDescriptionAttribute': b'AXUnitDescription'.decode("utf-8"), 'kAXSharedFocusElementsAttribute': b'AXSharedFocusElements'.decode("utf-8"), 'kAXStyleRangeForIndexParameterizedAttribute': b'AXStyleRangeForIndex'.decode("utf-8"), 'kAXSheetRole': b'AXSheet'.decode("utf-8"), 'kAXShowMenuAction': b'AXShowMenu'.decode("utf-8"), 'kAXMatteRole': b'AXMatte'.decode("utf-8"), 'kAXMatteContentUIElementAttribute': b'AXMatteContentUIElement'.decode("utf-8"), 'kAXBusyIndicatorRole': b'AXBusyIndicator'.decode("utf-8"), 'kAXRatingIndicatorSubrole': b'AXRatingIndicator'.decode("utf-8"), 'kAXRowExpandedNotification': b'AXRowExpanded'.decode("utf-8"), 'kAXScrollAreaRole': b'AXScrollArea'.decode("utf-8"), 'kAXVisibleCellsAttribute': b'AXVisibleCells'.decode("utf-8"), 'kAXExtrasMenuBarAttribute': b'AXExtrasMenuBar'.decode("utf-8"), 'kAXApplicationActivatedNotification': b'AXApplicationActivated'.decode("utf-8"), 'kAXUnitsAttribute': b'AXUnits'.decode("utf-8"), 'kAXRulerMarkerRole': b'AXRulerMarker'.decode("utf-8"), 'kAXSystemWideRole': b'AXSystemWide'.decode("utf-8"), 'kAXHandlesAttribute': b'AXHandles'.decode("utf-8"), 'kAXIsApplicationRunningAttribute': b'AXIsApplicationRunning'.decode("utf-8"), 'kAXMenuBarRole': b'AXMenuBar'.decode("utf-8"), 'kAXShowAlternateUIAction': b'AXShowAlternateUI'.decode("utf-8"), 'kAXSortButtonSubrole': b'AXSortButton'.decode("utf-8"), 'kAXUnknownSubrole': b'AXUnknown'.decode("utf-8"), 'kAXLabelUIElementsAttribute': b'AXLabelUIElements'.decode("utf-8"), 'kAXIncrementorAttribute': b'AXIncrementor'.decode("utf-8"), 'kAXVisibleChildrenAttribute': b'AXVisibleChildren'.decode("utf-8"), 'kAXCheckBoxRole': b'AXCheckBox'.decode("utf-8"), 'kAXSelectedCellsChangedNotification': b'AXSelectedCellsChanged'.decode("utf-8"), 'kAXCloseButtonSubrole': b'AXCloseButton'.decode("utf-8"), 'kAXColumnTitlesAttribute': b'AXColumnTitles'.decode("utf-8"), 'kAXAMPMFieldAttribute': b'AXAMPMField'.decode("utf-8"), 'kAXPopoverRole': b'AXPopover'.decode("utf-8"), 'kAXAnnouncementKey': b'AXAnnouncementKey'.decode("utf-8"), 'kAXValueIndicatorRole': b'AXValueIndicator'.decode("utf-8"), 'kAXValueChangedNotification': b'AXValueChanged'.decode("utf-8"), 'kAXSelectedColumnsChangedNotification': b'AXSelectedColumnsChanged'.decode("utf-8"), 'kAXApplicationRole': b'AXApplication'.decode("utf-8"), 'kAXColumnHeaderUIElementsAttribute': b'AXColumnHeaderUIElements'.decode("utf-8"), 'kAXMenuButtonRole': b'AXMenuButton'.decode("utf-8"), 'kAXBrowserRole': b'AXBrowser'.decode("utf-8"), 'kAXRowsAttribute': b'AXRows'.decode("utf-8"), 'kAXTextFieldRole': b'AXTextField'.decode("utf-8"), 'kAXFullScreenButtonSubrole': b'AXFullScreenButton'.decode("utf-8"), 'kAXSplitterRole': b'AXSplitter'.decode("utf-8"), 'kAXVisibleCharacterRangeAttribute': b'AXVisibleCharacterRange'.decode("utf-8"), 'kAXUnitsChangedNotification': b'AXUnitsChanged'.decode("utf-8"), 'kAXGroupRole': b'AXGroup'.decode("utf-8"), 'kAXMinimizeButtonSubrole': b'AXMinimizeButton'.decode("utf-8"), 'kAXDecrementPageSubrole': b'AXDecrementPage'.decode("utf-8"), 'kAXElementBusyChangedNotification': b'AXElementBusyChanged'.decode("utf-8"), 'kAXTitleChangedNotification': b'AXTitleChanged'.decode("utf-8"), 'kAXToolbarButtonAttribute': b'AXToolbarButton'.decode("utf-8"), 'kAXDecrementAction': b'AXDecrement'.decode("utf-8"), 'kAXHorizontalOrientationValue': b'AXHorizontalOrientation'.decode("utf-8"), 'kAXDocumentDockItemSubrole': b'AXDocumentDockItem'.decode("utf-8"), 'kAXSearchFieldSubrole': b'AXSearchField'.decode("utf-8"), 'kAXVerticalScrollBarAttribute': b'AXVerticalScrollBar'.decode("utf-8"), 'kAXVisibleTextAttribute': b'AXVisibleText'.decode("utf-8"), 'kAXRadioButtonRole': b'AXRadioButton'.decode("utf-8"), 'kAXValueAttribute': b'AXValue'.decode("utf-8"), 'kAXDisclosedByRowAttribute': b'AXDisclosedByRow'.decode("utf-8"), 'kAXSystemDialogSubrole': b'AXSystemDialog'.decode("utf-8"), 'kAXSeparatorDockItemSubrole': b'AXSeparatorDockItem'.decode("utf-8"), 'kAXApplicationHiddenNotification': b'AXApplicationHidden'.decode("utf-8"), 'kAXRelevanceIndicatorRole': b'AXRelevanceIndicator'.decode("utf-8"), 'kAXMonthFieldAttribute': b'AXMonthField'.decode("utf-8"), 'kAXVerticalUnitDescriptionAttribute': b'AXVerticalUnitDescription'.decode("utf-8"), 'kAXValueIncrementAttribute': b'AXValueIncrement'.decode("utf-8"), 'kAXCriticalValueAttribute': b'AXCriticalValue'.decode("utf-8"), 'kAXValueDescriptionAttribute': b'AXValueDescription'.decode("utf-8"), 'kAXShowDefaultUIAction': b'AXShowDefaultUI'.decode("utf-8"), 'kAXProxyAttribute': b'AXProxy'.decode("utf-8"), 'kAXComboBoxRole': b'AXComboBox'.decode("utf-8"), 'kAXMenuItemMarkCharAttribute': b'AXMenuItemMarkChar'.decode("utf-8"), 'kAXDayFieldAttribute': b'AXDayField'.decode("utf-8"), 'kAXSelectedColumnsAttribute': b'AXSelectedColumns'.decode("utf-8"), 'kAXGrowAreaRole': b'AXGrowArea'.decode("utf-8"), 'kAXDrawerRole': b'AXDrawer'.decode("utf-8"), 'kAXLinkedUIElementsAttribute': b'AXLinkedUIElements'.decode("utf-8"), 'kAXHorizontalScrollBarAttribute': b'AXHorizontalScrollBar'.decode("utf-8"), 'kAXListRole': b'AXList'.decode("utf-8"), 'kAXMarkerTypeDescriptionAttribute': b'AXMarkerTypeDescription'.decode("utf-8"), 'kAXSecondFieldAttribute': b'AXSecondField'.decode("utf-8"), 'kAXSheetCreatedNotification': b'AXSheetCreated'.decode("utf-8"), 'kAXWarningValueAttribute': b'AXWarningValue'.decode("utf-8"), 'kAXDisclosureTriangleRole': b'AXDisclosureTriangle'.decode("utf-8"), 'kAXAllowedValuesAttribute': b'AXAllowedValues'.decode("utf-8"), 'kAXRowHeaderUIElementsAttribute': b'AXRowHeaderUIElements'.decode("utf-8"), 'kAXHorizontalUnitDescriptionAttribute': b'AXHorizontalUnitDescription'.decode("utf-8"), 'kAXOverflowButtonAttribute': b'AXOverflowButton'.decode("utf-8"), 'kAXCloseButtonAttribute': b'AXCloseButton'.decode("utf-8"), 'kAXColumnsAttribute': b'AXColumns'.decode("utf-8"), 'kAXRangeForPositionParameterizedAttribute': b'AXRangeForPosition'.decode("utf-8"), 'kAXAscendingSortDirectionValue': b'AXAscendingSortDirection'.decode("utf-8"), 'kAXPriorityKey': b'AXPriorityKey'.decode("utf-8"), 'kAXToolbarButtonSubrole': b'AXToolbarButton'.decode("utf-8"), 'kAXZoomButtonAttribute': b'AXZoomButton'.decode("utf-8"), 'kAXSliderRole': b'AXSlider'.decode("utf-8"), 'kAXSelectedCellsAttribute': b'AXSelectedCells'.decode("utf-8"), 'kAXRaiseAction': b'AXRaise'.decode("utf-8"), 'kAXIncrementArrowSubrole': b'AXIncrementArrow'.decode("utf-8"), 'kAXLayoutAreaRole': b'AXLayoutArea'.decode("utf-8"), 'kAXUIElementDestroyedNotification': b'AXUIElementDestroyed'.decode("utf-8"), 'kAXMenuClosedNotification': b'AXMenuClosed'.decode("utf-8"), 'kAXHeaderAttribute': b'AXHeader'.decode("utf-8"), 'kAXDateFieldRole': b'AXDateField'.decode("utf-8"), 'kAXTextAreaRole': b'AXTextArea'.decode("utf-8"), 'kAXContentListSubrole': b'AXContentList'.decode("utf-8"), 'kAXOutlineRowSubrole': b'AXOutlineRow'.decode("utf-8"), 'kAXUIElementsKey': b'AXUIElementsKey'.decode("utf-8"), 'kAXUnknownSortDirectionValue': b'AXUnknownSortDirection'.decode("utf-8"), 'kAXLayoutItemRole': b'AXLayoutItem'.decode("utf-8"), 'kAXClearButtonAttribute': b'AXClearButton'.decode("utf-8"), 'kAXDescriptionListSubrole': b'AXDescriptionList'.decode("utf-8"), 'kAXDecrementArrowSubrole': b'AXDecrementArrow'.decode("utf-8"), 'kAXMinuteFieldAttribute': b'AXMinuteField'.decode("utf-8"), 'kAXInsertionPointLineNumberAttribute': b'AXInsertionPointLineNumber'.decode("utf-8"), 'kAXMenuItemSelectedNotification': b'AXMenuItemSelected'.decode("utf-8"), 'kAXUnknownRole': b'AXUnknown'.decode("utf-8"), 'kAXHorizontalUnitsAttribute': b'AXHorizontalUnits'.decode("utf-8"), 'kAXIdentifierAttribute': b'AXIdentifier'.decode("utf-8"), 'kAXApplicationDockItemSubrole': b'AXApplicationDockItem'.decode("utf-8"), 'kAXLineForIndexParameterizedAttribute': b'AXLineForIndex'.decode("utf-8"), 'kAXElementBusyAttribute': b'AXElementBusy'.decode("utf-8"), 'kAXTrashDockItemSubrole': b'AXTrashDockItem'.decode("utf-8"), 'kAXLayoutSizeForScreenSizeParameterizedAttribute': b'AXLayoutSizeForScreenSize'.decode("utf-8"), 'kAXMainAttribute': b'AXMain'.decode("utf-8"), 'kAXChildrenAttribute': b'AXChildren'.decode("utf-8"), 'kAXRowIndexRangeAttribute': b'AXRowIndexRange'.decode("utf-8"), 'kAXFilenameAttribute': b'AXFilename'.decode("utf-8"), 'kAXMenuItemCmdVirtualKeyAttribute': b'AXMenuItemCmdVirtualKey'.decode("utf-8"), 'kAXWindowCreatedNotification': b'AXWindowCreated'.decode("utf-8"), 'kAXMenuItemCmdModifiersAttribute': b'AXMenuItemCmdModifiers'.decode("utf-8"), 'kAXPositionAttribute': b'AXPosition'.decode("utf-8"), 'kAXVisibleRowsAttribute': b'AXVisibleRows'.decode("utf-8"), 'kAXVerticalOrientationValue': b'AXVerticalOrientation'.decode("utf-8"), 'kAXSelectedChildrenChangedNotification': b'AXSelectedChildrenChanged'.decode("utf-8"), 'kAXPlaceholderValueAttribute': b'AXPlaceholderValue'.decode("utf-8"), 'kAXFullScreenButtonAttribute': b'AXFullScreenButton'.decode("utf-8"), 'kAXRowCountChangedNotification': b'AXRowCountChanged'.decode("utf-8"), 'kAXMainWindowAttribute': b'AXMainWindow'.decode("utf-8"), 'kAXSelectedTextRangeAttribute': b'AXSelectedTextRange'.decode("utf-8"), 'kAXSelectedTextAttribute': b'AXSelectedText'.decode("utf-8"), 'kAXCellRole': b'AXCell'.decode("utf-8"), 'kAXSharedTextUIElementsAttribute': b'AXSharedTextUIElements'.decode("utf-8"), 'kAXScreenPointForLayoutPointParameterizedAttribute': b'AXScreenPointForLayoutPoint'.decode("utf-8"), 'kAXGridRole': b'AXGrid'.decode("utf-8"), 'kAXTopLevelUIElementAttribute': b'AXTopLevelUIElement'.decode("utf-8"), 'kAXDisclosingAttribute': b'AXDisclosing'.decode("utf-8"), 'kAXProcessSwitcherListSubrole': b'AXProcessSwitcherList'.decode("utf-8"), 'kAXApplicationDeactivatedNotification': b'AXApplicationDeactivated'.decode("utf-8"), 'kAXIsEditableAttribute': b'AXIsEditable'.decode("utf-8"), 'kAXWindowDeminiaturizedNotification': b'AXWindowDeminiaturized'.decode("utf-8"), 'kAXScrollBarRole': b'AXScrollBar'.decode("utf-8"), 'kAXCancelAction': b'AXCancel'.decode("utf-8"), 'kAXColorWellRole': b'AXColorWell'.decode("utf-8"), 'kAXLayoutPointForScreenPointParameterizedAttribute': b'AXLayoutPointForScreenPoint'.decode("utf-8"), 'kAXMaxValueAttribute': b'AXMaxValue'.decode("utf-8"), 'kPasteboardTypeFilePromiseContent': b'com.apple.pasteboard.promised-file-content-type', 'kAXConfirmAction': b'AXConfirm'.decode("utf-8"), 'kAXEditedAttribute': b'AXEdited'.decode("utf-8"), 'kAXProgressIndicatorRole': b'AXProgressIndicator'.decode("utf-8"), 'kAXRowCollapsedNotification': b'AXRowCollapsed'.decode("utf-8"), 'kAXTextAttribute': b'AXText'.decode("utf-8"), 'kAXSortDirectionAttribute': b'AXSortDirection'.decode("utf-8"), 'kAXIndexAttribute': b'AXIndex'.decode("utf-8"), 'kAXURLDockItemSubrole': b'AXURLDockItem'.decode("utf-8"), 'kAXDocumentAttribute': b'AXDocument'.decode("utf-8"), 'kAXDescriptionAttribute': b'AXDescription'.decode("utf-8"), 'kAXWindowResizedNotification': b'AXWindowResized'.decode("utf-8"), 'kAXMenuItemCmdGlyphAttribute': b'AXMenuItemCmdGlyph'.decode("utf-8"), 'kAXSelectedRowsAttribute': b'AXSelectedRows'.decode("utf-8"), 'kAXRTFForRangeParameterizedAttribute': b'AXRTFForRange'.decode("utf-8"), 'kAXLabelValueAttribute': b'AXLabelValue'.decode("utf-8"), 'kAXSwitchSubrole': b'AXSwitch'.decode("utf-8"), 'kAXHelpAttribute': b'AXHelp'.decode("utf-8"), 'kAXZoomButtonSubrole': b'AXZoomButton'.decode("utf-8"), 'kAXButtonRole': b'AXButton'.decode("utf-8"), 'kAXColumnIndexRangeAttribute': b'AXColumnIndexRange'.decode("utf-8")}) -functions={'HIShapeUnion': (sel32or64(b'l^{__HIShape=}^{__HIShape=}^{__HIShape=}', b'i^{__HIShape=}^{__HIShape=}^{__HIShape=}'),), 'AXObserverCreateWithInfoCallback': (sel32or64(b'li^?^^{__AXObserver=}', b'ii^?^^{__AXObserver=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__AXObserver=}'}, 1: {'type': b'^{__AXUIElement=}'}, 2: {'type': b'^{__CFString=}'}, 3: {'type': b'^{__CFDictionary=}'}, 4: {'type': b'^v'}}}}, 2: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'TranslationCopySourceType': (sel32or64(b'l^{OpaqueTranslationRef=}^^{__CFString=}', b'i^{OpaqueTranslationRef=}^^{__CFString=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'GetIconFamilyData': (sel32or64(b's^^{IconFamilyResource=Ll[1{IconFamilyElement=Ll[1C]}]}L^^c', b's^^{IconFamilyResource=Ii[1{IconFamilyElement=Ii[1C]}]}I^^c'),), 'IconRefIntersectsCGRect': (sel32or64(b'Z^{CGRect={CGPoint=ff}{CGSize=ff}}^{CGRect={CGPoint=ff}{CGSize=ff}}sL^{OpaqueIconRef=}', b'Z^{CGRect={CGPoint=dd}{CGSize=dd}}^{CGRect={CGPoint=dd}{CGSize=dd}}sI^{OpaqueIconRef=}'),), 'DisposeCIcon': (b'v^^{CIcon={PixMap=^cs{Rect=ssss}sslllssssL^^{ColorTable=lss[1{ColorSpec=s{RGBColor=SSS}}]}^v}{BitMap=^cs{Rect=ssss}}{BitMap=^cs{Rect=ssss}}^^c[1s]}',), 'DisposeIconSuite': (b's^^cZ',), 'HIShapeCreateMutableWithRect': (sel32or64(b'^{__HIShape=}^{CGRect={CGPoint=ff}{CGSize=ff}}', b'^{__HIShape=}^{CGRect={CGPoint=dd}{CGSize=dd}}'), '', {'retval': {'already_cfretained': True}, 'arguments': {0: {'type_modifier': 'n'}}}), 'PlotIconRefInContext': (sel32or64(b'l^{CGContext=}^{CGRect={CGPoint=ff}{CGSize=ff}}ss^{RGBColor=SSS}L^{OpaqueIconRef=}', b'i^{CGContext=}^{CGRect={CGPoint=dd}{CGSize=dd}}ss^{RGBColor=SSS}I^{OpaqueIconRef=}'),), 'AXUIElementPerformAction': (sel32or64(b'l^{__AXUIElement=}^{__CFString=}', b'i^{__AXUIElement=}^{__CFString=}'),), 'AXUIElementPostKeyboardEvent': (sel32or64(b'l^{__AXUIElement=}SSZ', b'i^{__AXUIElement=}SSZ'),), 'AXUIElementIsAttributeSettable': (sel32or64(b'l^{__AXUIElement=}^{__CFString=}^Z', b'i^{__AXUIElement=}^{__CFString=}^Z'), '', {'arguments': {2: {'type_modifier': 'o'}}}), 'GetCurrentProcess': (sel32or64(b's^{ProcessSerialNumber=LL}', b's^{ProcessSerialNumber=II}'),), 'HIShapeCreateCopy': (b'^{__HIShape=}^{__HIShape=}', '', {'retval': {'already_cfretained': True}}), 'AddIconToSuite': (b's^^c^^cL',), 'AXUIElementCopyParameterizedAttributeValue': (sel32or64(b'l^{__AXUIElement=}^{__CFString=}@^@', b'i^{__AXUIElement=}^{__CFString=}@^@'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'PasteboardResolvePromises': (sel32or64(b'l^{OpaquePasteboardRef=}', b'i^{OpaquePasteboardRef=}'),), 'GetSuiteLabel': (b's^^c',), 'NewIconActionUPP': (b'^?^?', '', {'arguments': {0: {'callable': {'retval': {'type': b's'}, 'arguments': {0: {'type': b'I'}, 1: {'type': b'^^^c'}, 2: {'type': b'^v'}}}}}}), 'AXUIElementCreateSystemWide': (b'^{__AXUIElement=}', '', {'retval': {'already_cfretained': True}}), 'PtInIconRef': (b'Z^{Point=ss}^{Rect=ssss}sL^{OpaqueIconRef=}',), 'SetIconCacheData': (b's^^c^v',), 'RectInIconRef': (b'Z^{Rect=ssss}^{Rect=ssss}sL^{OpaqueIconRef=}',), 'PasteboardCopyItemFlavors': (sel32or64(b'l^{OpaquePasteboardRef=}^v^^{__CFArray=}', b'i^{OpaquePasteboardRef=}^v^^{__CFArray=}'), '', {'retval': {'already_cfretained': True}}), 'AXUIElementSetAttributeValue': (sel32or64(b'l^{__AXUIElement=}^{__CFString=}@', b'i^{__AXUIElement=}^{__CFString=}@'),), 'SetSuiteLabel': (b's^^cs',), 'NewIconGetterUPP': (b'^?^?', '', {'arguments': {0: {'callable': {'retval': {'type': b'^^c'}, 'arguments': {0: {'type': b'I'}, 1: {'type': b'^v'}}}}}}), 'GetProcessInformation': (sel32or64(b's^{ProcessSerialNumber=LL}^{ProcessInfoRec=L^C{ProcessSerialNumber=LL}LLL^cLL{ProcessSerialNumber=LL}LL^{FSSpec=sl[64C]}}', b's^{ProcessSerialNumber=II}^{ProcessInfoRec=I^C{ProcessSerialNumber=II}III^cII{ProcessSerialNumber=II}II^{FSRef=[80C]}}'),), 'ProcessInformationCopyDictionary': (sel32or64(b'^{__CFDictionary=}^{ProcessSerialNumber=LL}L', b'^{__CFDictionary=}^{ProcessSerialNumber=II}I'), '', {'retval': {'already_cfretained': True}}), 'PlotCIconHandle': (b's^{Rect=ssss}ss^^{CIcon={PixMap=^cs{Rect=ssss}sslllssssL^^{ColorTable=lss[1{ColorSpec=s{RGBColor=SSS}}]}^v}{BitMap=^cs{Rect=ssss}}{BitMap=^cs{Rect=ssss}}^^c[1s]}',), 'UAZoomChangeFocus': (sel32or64(b'l^{CGRect={CGPoint=ff}{CGSize=ff}}^{CGRect={CGPoint=ff}{CGSize=ff}}L', b'i^{CGRect={CGPoint=dd}{CGSize=dd}}^{CGRect={CGPoint=dd}{CGSize=dd}}I'), '', {'arguments': {0: {'type_modifier': 'n'}, 1: {'type_modifier': 'n'}}}), 'HIShapeInset': (sel32or64(b'l^{__HIShape=}ff', b'i^{__HIShape=}dd'),), 'AXUIElementCopyActionNames': (sel32or64(b'l^{__AXUIElement=}^^{__CFArray=}', b'i^{__AXUIElement=}^^{__CFArray=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'HIShapeIntersectsRect': (sel32or64(b'Z^{__HIShape=}^{CGRect={CGPoint=ff}{CGSize=ff}}', b'Z^{__HIShape=}^{CGRect={CGPoint=dd}{CGSize=dd}}'), '', {'retval': {'c_array_of_fixed_length': 1}, 'arguments': {1: {'type_modifier': 'n'}}}), 'HIShapeEnumerate': (sel32or64(b'l^{__HIShape=}L^?^v', b'i^{__HIShape=}I^?^v'), '', {'arguments': {2: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': b'i'}, 1: {'type': b'^{__HIShape=}'}, 2: {'type': sel32or64(b'^{CGRect={CGPoint=ff}{CGSize=ff}}', b'^{CGRect={CGPoint=dd}{CGSize=dd}}'), 'type_modifier': 'n'}, 3: {'type': b'^v'}}}, 'callable_retained': False}}}), 'AXUIElementCopyAttributeNames': (sel32or64(b'l^{__AXUIElement=}^^{__CFArray=}', b'i^{__AXUIElement=}^^{__CFArray=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'SameProcess': (sel32or64(b's^{ProcessSerialNumber=LL}^{ProcessSerialNumber=LL}^Z', b's^{ProcessSerialNumber=II}^{ProcessSerialNumber=II}^Z'),), 'TransformProcessType': (sel32or64(b'l^{ProcessSerialNumber=LL}L', b'i^{ProcessSerialNumber=II}I'),), 'PtInIconID': (b'Z{Point=ss}^{Rect=ssss}ss',), 'PasteboardSynchronize': (sel32or64(b'L^{OpaquePasteboardRef=}', b'I^{OpaquePasteboardRef=}'),), 'TranslationGetTranslationFlags': (sel32or64(b'l^{OpaqueTranslationRef=}^L', b'i^{OpaqueTranslationRef=}^I'), '', {'arguments': {1: {'type_modifier': 'o'}}}), 'TranslationCopyDestinationType': (sel32or64(b'l^{OpaqueTranslationRef=}^^{__CFString=}', b'i^{OpaqueTranslationRef=}^^{__CFString=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'SetIconCacheProc': (b's^^c^?',), 'IconIDToRgn': (b's^{OpaqueRgnHandle=}^{Rect=ssss}ss',), 'IconRefContainsCGPoint': (sel32or64(b'Z^{CGPoint=ff}^{CGRect={CGPoint=ff}{CGSize=ff}}sL^{OpaqueIconRef=}', b'Z^{CGPoint=dd}^{CGRect={CGPoint=dd}{CGSize=dd}}sI^{OpaqueIconRef=}'),), 'LaunchApplication': (sel32or64(b's^{LaunchParamBlockRec=LSSLSS^{FSSpec=sl[64C]}{ProcessSerialNumber=LL}LLL^{AppParameters={_struct AppParameters::=SLL{Point=ss}S}{=SLL{Point=ss}S}LL}}', b's^{LaunchParamBlockRec=ISSISS^{FSRef=[80C]}{ProcessSerialNumber=II}III^{AppParameters={_struct AppParameters::=SII{Point=ss}S}{=SII{Point=ss}S}II}}'),), 'WakeUpProcess': (sel32or64(b's^{ProcessSerialNumber=LL}', b's^{ProcessSerialNumber=II}'),), 'ForEachIconDo': (b's^^cL^?^v',), 'GetIcon': (b'^^cs',), 'AXUIElementCopyParameterizedAttributeNames': (sel32or64(b'l^{__AXUIElement=}^^{__CFArray=}', b'i^{__AXUIElement=}^^{__CFArray=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'PasteboardCreate': (sel32or64(b'l^{__CFString=}^^{OpaquePasteboardRef=}', b'i^{__CFString=}^^{OpaquePasteboardRef=}'), '', {'retval': {'already_cfretained': True}}), 'SetFrontProcessWithOptions': (sel32or64(b'l^{ProcessSerialNumber=LL}L', b'i^{ProcessSerialNumber=II}I'),), 'GetIconCacheProc': (b's^^c^^?',), 'MakeIconCache': (b's^^^c^?^v',), 'InvokeIconActionUPP': (sel32or64(b'sL^^^c^v^?', b'sI^^^c^v^?'),), 'IconFamilyToIconSuite': (b's^^{IconFamilyResource=Ll[1{IconFamilyElement=Ll[1C]}]}L^^^c',), 'TranslationGetTypeID': (sel32or64(b'L', b'Q'),), 'PlotIconID': (b's^{Rect=ssss}sss',), 'CopyProcessName': (sel32or64(b'l^{ProcessSerialNumber=LL}^^{__CFString=}', b'i^{ProcessSerialNumber=II}^^{__CFString=}'), '', {'retval': {'already_cfretained': True}}), 'GetIconFromSuite': (b's^^^c^^cL',), 'GetIconCacheData': (b's^^c^^v',), 'PasteboardGetItemIdentifier': (sel32or64(b'l^{OpaquePasteboardRef=}l^^v', b'i^{OpaquePasteboardRef=}q^^v'),), 'IconRefToHIShape': (sel32or64(b'^{__HIShape=}^{CGRect={CGPoint=ff}{CGSize=ff}}sL^{OpaqueIconRef=}', b'^{__HIShape=}^{CGRect={CGPoint=dd}{CGSize=dd}}sI^{OpaqueIconRef=}'),), 'GetIconRefVariant': (sel32or64(b'^{OpaqueIconRef=}^{OpaqueIconRef=}L^s', b'^{OpaqueIconRef=}^{OpaqueIconRef=}I^s'),), 'IsProcessVisible': (sel32or64(b'Z^{ProcessSerialNumber=LL}', b'Z^{ProcessSerialNumber=II}'),), 'HIShapeIsRectangular': (b'Z^{__HIShape=}',), 'HIShapeCreateWithRect': (sel32or64(b'^{__HIShape=}^{CGRect={CGPoint=ff}{CGSize=ff}}', b'^{__HIShape=}^{CGRect={CGPoint=dd}{CGSize=dd}}'), '', {'retval': {'already_cfretained': True}, 'arguments': {0: {'type_modifier': 'n'}}}), 'AXUIElementCopyActionDescription': (sel32or64(b'l^{__AXUIElement=}^{__CFString=}^^{__CFString=}', b'i^{__AXUIElement=}^{__CFString=}^^{__CFString=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'AXObserverGetRunLoopSource': (b'^{__CFRunLoopSource=}^{__AXObserver=}',), 'PlotIconRef': (b's^{Rect=ssss}ssL^{OpaqueIconRef=}',), 'GetNextProcess': (sel32or64(b's^{ProcessSerialNumber=LL}', b's^{ProcessSerialNumber=II}'),), 'GetIconSuite': (b's^^^csL',), 'LoadIconCache': (b's^{Rect=ssss}ss^^c',), 'AXUIElementGetAttributeValueCount': (sel32or64(b'l^{__AXUIElement=}^{__CFString=}^l', b'i^{__AXUIElement=}^{__CFString=}^q'), '', {'arguments': {2: {'type_modifier': 'o'}}}), 'TranslationPerformForURL': (sel32or64(b'l^{OpaqueTranslationRef=}^{__CFURL=}^{__CFURL=}^^{__CFURL=}', b'i^{OpaqueTranslationRef=}^{__CFURL=}^{__CFURL=}^^{__CFURL=}'), '', {'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'AXValueGetType': (b'I^{__AXValue=}',), 'UAZoomEnabled': (b'Z',), 'GetProcessPID': (sel32or64(b'l^{ProcessSerialNumber=LL}^i', b'i^{ProcessSerialNumber=II}^i'),), 'PtInIconMethod': (b'Z{Point=ss}^{Rect=ssss}s^?^v',), 'AXUIElementGetPid': (sel32or64(b'l^{__AXUIElement=}^i', b'i^{__AXUIElement=}^i'), '', {'arguments': {1: {'type_modifier': 'o'}}}), 'RectInIconID': (b'Z^{Rect=ssss}^{Rect=ssss}ss',), 'GetProcessForPID': (sel32or64(b'li^{ProcessSerialNumber=LL}', b'ii^{ProcessSerialNumber=II}'),), 'TranslationCreate': (sel32or64(b'l^{__CFString=}^{__CFString=}L^^{OpaqueTranslationRef=}', b'i^{__CFString=}^{__CFString=}I^^{OpaqueTranslationRef=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'HIShapeUnionWithRect': (sel32or64(b'l^{__HIShape=}^{CGRect={CGPoint=ff}{CGSize=ff}}', b'i^{__HIShape=}^{CGRect={CGPoint=dd}{CGSize=dd}}'), '', {'arguments': {1: {'type_modifier': 'n'}}}), 'PasteboardGetItemFlavorFlags': (sel32or64(b'l^{OpaquePasteboardRef=}^v^{__CFString=}^L', b'i^{OpaquePasteboardRef=}^v^{__CFString=}^I'),), 'PasteboardCopyPasteLocation': (sel32or64(b'l^{OpaquePasteboardRef=}^^{__CFURL=}', b'i^{OpaquePasteboardRef=}^^{__CFURL=}'), '', {'retval': {'already_cfretained': True}}), 'PasteboardSetPromiseKeeper': (sel32or64(b'l^{OpaquePasteboardRef=}^?^v', b'i^{OpaquePasteboardRef=}^?^v'), '', {'arguments': {1: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': b'^{OpaquePasteboardRef=}'}, 1: {'type': b'^v'}, 2: {'type': b'^{__CFString=}'}, 3: {'type': b'^v'}}}}}}), 'PlotIconMethod': (b's^{Rect=ssss}ss^?^v',), 'IconSuiteToIconFamily': (b's^^cL^^^{IconFamilyResource=Ll[1{IconFamilyElement=Ll[1C]}]}',), 'HIShapeCreateUnion': (b'^{__HIShape=}^{__HIShape=}^{__HIShape=}', '', {'retval': {'already_cfretained': True}}), 'TranslationPerformForData': (sel32or64(b'l^{OpaqueTranslationRef=}^{__CFData=}^^{__CFData=}', b'i^{OpaqueTranslationRef=}^{__CFData=}^^{__CFData=}'), '', {'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'AXMakeProcessTrusted': (sel32or64(b'l^{__CFString=}', b'i^{__CFString=}'),), 'AXIsProcessTrustedWithOptions': (b'Z^{__CFDictionary=}', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'HIShapeOffset': (sel32or64(b'l^{__HIShape=}ff', b'i^{__HIShape=}dd'),), 'NewIconSuite': (b's^^^c',), 'AXUIElementCopyAttributeValues': (sel32or64(b'l^{__AXUIElement=}^{__CFString=}ll^^{__CFArray=}', b'i^{__AXUIElement=}^{__CFString=}qq^^{__CFArray=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {4: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'PtInIconSuite': (b'Z{Point=ss}^{Rect=ssss}s^^c',), 'SetIconFamilyData': (sel32or64(b's^^{IconFamilyResource=Ll[1{IconFamilyElement=Ll[1C]}]}L^^c', b's^^{IconFamilyResource=Ii[1{IconFamilyElement=Ii[1C]}]}I^^c'),), 'HIShapeIntersect': (sel32or64(b'l^{__HIShape=}^{__HIShape=}^{__HIShape=}', b'i^{__HIShape=}^{__HIShape=}^{__HIShape=}'),), 'IconRefToIconFamily': (sel32or64(b's^{OpaqueIconRef=}L^^^{IconFamilyResource=Ll[1{IconFamilyElement=Ll[1C]}]}', b's^{OpaqueIconRef=}I^^^{IconFamilyResource=Ii[1{IconFamilyElement=Ii[1C]}]}'),), 'TranslationCreateWithSourceArray': (sel32or64(b'l^{__CFArray=}L^^{__CFArray=}^^{__CFDictionary=}', b'i^{__CFArray=}I^^{__CFArray=}^^{__CFDictionary=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o'}, 3: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'KillProcess': (sel32or64(b's^{ProcessSerialNumber=LL}', b's^{ProcessSerialNumber=II}'),), 'RectInIconSuite': (b'Z^{Rect=ssss}^{Rect=ssss}s^^c',), 'SetFrontProcess': (sel32or64(b's^{ProcessSerialNumber=LL}', b's^{ProcessSerialNumber=II}'),), 'DisposeIconActionUPP': (b'v^?',), 'InvokeIconGetterUPP': (sel32or64(b'^^cL^v^?', b'^^cI^v^?'),), 'PasteboardCopyItemFlavorData': (sel32or64(b'l^{OpaquePasteboardRef=}^v^{__CFString=}^^{__CFData=}', b'i^{OpaquePasteboardRef=}^v^{__CFString=}^^{__CFData=}'), '', {'retval': {'already_cfretained': True}}), 'AXUIElementCreateApplication': (b'^{__AXUIElement=}i', '', {'retval': {'already_cfretained': True}}), 'PasteboardGetTypeID': (sel32or64(b'L', b'Q'),), 'GetProcessBundleLocation': (sel32or64(b'l^{ProcessSerialNumber=LL}^{FSRef=[80C]}', b'i^{ProcessSerialNumber=II}^{FSRef=[80C]}'),), 'RectInIconMethod': (b'Z^{Rect=ssss}^{Rect=ssss}s^?^v',), 'HIShapeContainsPoint': (sel32or64(b'Z^{__HIShape=}^{CGPoint=ff}', b'Z^{__HIShape=}^{CGPoint=dd}'), '', {'arguments': {1: {'type_modifier': 'n'}}}), 'DisposeIconGetterUPP': (b'v^?',), 'HIShapeGetBounds': (sel32or64(b'^{CGRect={CGPoint=ff}{CGSize=ff}}^{__HIShape=}^{CGRect={CGPoint=ff}{CGSize=ff}}', b'^{CGRect={CGPoint=dd}{CGSize=dd}}^{__HIShape=}^{CGRect={CGPoint=dd}{CGSize=dd}}'), '', {'arguments': {1: {'type_modifier': 'o'}}}), 'AXObserverAddNotification': (sel32or64(b'l^{__AXObserver=}^{__AXUIElement=}^{__CFString=}^v', b'i^{__AXObserver=}^{__AXUIElement=}^{__CFString=}^v'),), 'IconRefToRgn': (b's^{OpaqueRgnHandle=}^{Rect=ssss}sL^{OpaqueIconRef=}',), 'IconSuiteToRgn': (b's^{OpaqueRgnHandle=}^{Rect=ssss}s^^c',), 'IconMethodToRgn': (b's^{OpaqueRgnHandle=}^{Rect=ssss}s^?^v',), 'HIShapeCreateIntersection': (b'^{__HIShape=}^{__HIShape=}^{__HIShape=}', '', {'retval': {'already_cfretained': True}}), 'GetIconSizesFromIconRef': (b'sL^LL^{OpaqueIconRef=}',), 'HIShapeCreateMutableCopy': (b'^{__HIShape=}^{__HIShape=}', '', {'retval': {'already_cfretained': True}}), 'AXUIElementCopyAttributeValue': (sel32or64(b'l^{__AXUIElement=}^{__CFString=}^@', b'i^{__AXUIElement=}^{__CFString=}^@'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'GetFrontProcess': (sel32or64(b's^{ProcessSerialNumber=LL}', b's^{ProcessSerialNumber=II}'),), 'HIShapeCreateEmpty': (b'^{__HIShape=}', '', {'retval': {'already_cfretained': True}}), 'PlotIcon': (b'v^{Rect=ssss}^^c',), 'AXObserverGetTypeID': (sel32or64(b'L', b'Q'),), 'PlotCIcon': (b'v^{Rect=ssss}^^{CIcon={PixMap=^cs{Rect=ssss}sslllssssL^^{ColorTable=lss[1{ColorSpec=s{RGBColor=SSS}}]}^v}{BitMap=^cs{Rect=ssss}}{BitMap=^cs{Rect=ssss}}^^c[1s]}',), 'AXValueGetValue': (b'Z^{__AXValue=}I^v',), 'HIShapeGetTypeID': (sel32or64(b'L', b'Q'),), 'AXObserverCreate': (sel32or64(b'li^?^^{__AXObserver=}', b'ii^?^^{__AXObserver=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__AXObserver=}'}, 1: {'type': b'^{__AXUIElement=}'}, 2: {'type': b'^{__CFString=}'}, 3: {'type': b'^v'}}}}, 2: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'PlotIconSuite': (b's^{Rect=ssss}ss^^c',), 'PasteboardPutItemFlavor': (sel32or64(b'l^{OpaquePasteboardRef=}^v^{__CFString=}^{__CFData=}L', b'i^{OpaquePasteboardRef=}^v^{__CFString=}^{__CFData=}I'),), 'AXAPIEnabled': (b'Z',), 'AXObserverRemoveNotification': (sel32or64(b'l^{__AXObserver=}^{__AXUIElement=}^{__CFString=}', b'i^{__AXObserver=}^{__AXUIElement=}^{__CFString=}'),), 'PasteboardClear': (sel32or64(b'l^{OpaquePasteboardRef=}', b'i^{OpaquePasteboardRef=}'),), 'TranslationPerformForFile': (sel32or64(b'l^{OpaqueTranslationRef=}^{FSRef=[80C]}^{FSRef=[80C]}^{__CFString=}^{FSRef=[80C]}', b'i^{OpaqueTranslationRef=}^{FSRef=[80C]}^{FSRef=[80C]}^{__CFString=}^{FSRef=[80C]}'), '', {'arguments': {1: {'type_modifier': 'n'}, 2: {'type_modifier': 'n'}, 4: {'type_modifier': 'o'}}}), 'IsIconRefMaskEmpty': (b'Z^{OpaqueIconRef=}',), 'GetLabel': (b'ss^{RGBColor=SSS}[256C]',), 'PasteboardGetItemCount': (sel32or64(b'l^{OpaquePasteboardRef=}^L', b'i^{OpaquePasteboardRef=}^Q'),), 'PlotSICNHandle': (b's^{Rect=ssss}ss^^c',), 'GetCIcon': (b'^^{CIcon={PixMap=^cs{Rect=ssss}sslllssssL^^{ColorTable=lss[1{ColorSpec=s{RGBColor=SSS}}]}^v}{BitMap=^cs{Rect=ssss}}{BitMap=^cs{Rect=ssss}}^^c[1s]}s',), 'PlotIconHandle': (b's^{Rect=ssss}ss^^c',), 'HIShapeSetWithShape': (sel32or64(b'l^{__HIShape=}^{__HIShape=}', b'i^{__HIShape=}^{__HIShape=}'),), 'PasteboardSetPasteLocation': (sel32or64(b'l^{OpaquePasteboardRef=}^{__CFURL=}', b'i^{OpaquePasteboardRef=}^{__CFURL=}'),), 'HIShapeXor': (sel32or64(b'l^{__HIShape=}^{__HIShape=}^{__HIShape=}', b'i^{__HIShape=}^{__HIShape=}^{__HIShape=}'),), 'HIShapeCreateMutable': (b'^{__HIShape=}', '', {'retval': {'already_cfretained': True}}), 'ShowHideProcess': (sel32or64(b's^{ProcessSerialNumber=LL}Z', b's^{ProcessSerialNumber=II}Z'),), 'HIShapeIsEmpty': (b'Z^{__HIShape=}',), 'AXUIElementSetMessagingTimeout': (sel32or64(b'l^{__AXUIElement=}f', b'i^{__AXUIElement=}f'),), 'ExitToShell': (b'v',), 'AXIsProcessTrusted': (b'Z',), 'AXUIElementGetTypeID': (sel32or64(b'L', b'Q'),), 'PasteboardCopyName': (sel32or64(b'l^{OpaquePasteboardRef=}^^{__CFString=}', b'i^{OpaquePasteboardRef=}^^{__CFString=}'), '', {'retval': {'already_cfretained': True}}), 'HIShapeDifference': (sel32or64(b'l^{__HIShape=}^{__HIShape=}^{__HIShape=}', b'i^{__HIShape=}^{__HIShape=}^{__HIShape=}'),), 'AXValueGetTypeID': (sel32or64(b'L', b'Q'), '', {'variadic': False}), 'AXUIElementCopyElementAtPosition': (sel32or64(b'l^{__AXUIElement=}ff^^{__AXUIElement=}', b'i^{__AXUIElement=}ff^^{__AXUIElement=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'AXUIElementCopyMultipleAttributeValues': (sel32or64(b'l^{__AXUIElement=}^{__CFArray=}L^^{__CFArray=}', b'i^{__AXUIElement=}^{__CFArray=}I^^{__CFArray=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'AXValueCreate': (b'^{__AXValue=}I^v', '', {'retval': {'already_cfretained': True}}), 'HIShapeSetEmpty': (sel32or64(b'l^{__HIShape=}', b'i^{__HIShape=}'),), 'HIShapeReplacePathInCGContext': (sel32or64(b'l^{__HIShape=}^{CGContext=}', b'i^{__HIShape=}^{CGContext=}'),), 'HIShapeCreateDifference': (b'^{__HIShape=}^{__HIShape=}^{__HIShape=}', '', {'retval': {'already_cfretained': True}}), 'HIShapeCreateXor': (b'^{__HIShape=}^{__HIShape=}^{__HIShape=}', '', {'retval': {'already_cfretained': True}})} -aliases = {'svSmall4Bit': 'kSelectorSmall4Bit', 'svLarge8Bit': 'kSelectorLarge8Bit', 'atTopLeft': 'kAlignTopLeft', 'ttOpen': 'kTransformOpen', 'ttDisabled': 'kTransformDisabled', 'svAllAvailableData': 'kSelectorAllAvailableData', 'ttSelected': 'kTransformSelected', 'atBottomRight': 'kAlignBottomRight', 'svMini1Bit': 'kSelectorMini1Bit', 'atAbsoluteCenter': 'kAlignAbsoluteCenter', 'ttLabel6': 'kTransformLabel6', 'ttLabel7': 'kTransformLabel7', 'atVerticalCenter': 'kAlignVerticalCenter', 'ttLabel5': 'kTransformLabel5', 'ttLabel2': 'kTransformLabel2', 'ttLabel3': 'kTransformLabel3', 'ttLabel1': 'kTransformLabel1', 'svSmall1Bit': 'kSelectorSmall1Bit', 'atLeft': 'kAlignLeft', 'atCenterRight': 'kAlignCenterRight', 'ttSelectedDisabled': 'kTransformSelectedDisabled', 'atHorizontalCenter': 'kAlignHorizontalCenter', 'svAllLargeData': 'kSelectorAllLargeData', 'atRight': 'kAlignRight', 'ttSelectedOpen': 'kTransformSelectedOpen', 'svAll4BitData': 'kSelectorAll4BitData', 'svMini8Bit': 'kSelectorMini8Bit', 'atCenterLeft': 'kAlignCenterLeft', 'svSmall8Bit': 'kSelectorSmall8Bit', 'MacGetCurrentProcess': 'GetCurrentProcess', 'ttLabel4': 'kTransformLabel4', 'svLarge1Bit': 'kSelectorLarge1Bit', 'kPasteboardUniqueName': 'NULL', 'atCenterTop': 'kAlignCenterTop', 'svAllSmallData': 'kSelectorAllSmallData', 'svAll8BitData': 'kSelectorAll8BitData', 'ttSelectedOffline': 'kTransformSelectedOffline', 'atTop': 'kAlignTop', 'svAll1BitData': 'kSelectorAll1BitData', 'svMini4Bit': 'kSelectorMini4Bit', 'atBottom': 'kAlignBottom', 'atTopRight': 'kAlignTopRight', 'kICComponentInterfaceVersion': 'kICComponentInterfaceVersion4', 'svAllMiniData': 'kSelectorAllMiniData', 'atNone': 'kAlignNone', 'atBottomLeft': 'kAlignBottomLeft', 'kPasteboardPromisedData': 'NULL', 'ttNone': 'kTransformNone', 'atCenterBottom': 'kAlignCenterBottom', 'ttOffline': 'kTransformOffline', 'svLarge4Bit': 'kSelectorLarge4Bit', 'kPasteboardResolveAllPromises': 'NULL'} -cftypes=[('AXObserverRef', b'^{__AXObserver=}', 'AXObserverGetTypeID', None), ('AXUIElementRef', b'^{__AXUIElement=}', 'AXUIElementGetTypeID', None), ('AXValueRef', b'^{__AXValue=}', 'AXValueGetTypeID', None), ('HIMutableShapeRef', b'^{__HIShape=}', None, None), ('HIShapeRef', b'^{__HIShape=}', 'HIShapeGetTypeID', None), ('TranslationRef', b'^{OpaqueTranslationRef=}', 'TranslationGetTypeID', None)] -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/HIServices/_metadata.pyc b/env/lib/python2.7/site-packages/HIServices/_metadata.pyc deleted file mode 100644 index f48ca75f..00000000 Binary files a/env/lib/python2.7/site-packages/HIServices/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/IMServicePlugIn/_IMServicePlugIn.so b/env/lib/python2.7/site-packages/IMServicePlugIn/_IMServicePlugIn.so deleted file mode 100755 index b742ff60..00000000 Binary files a/env/lib/python2.7/site-packages/IMServicePlugIn/_IMServicePlugIn.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/IMServicePlugIn/__init__.py b/env/lib/python2.7/site-packages/IMServicePlugIn/__init__.py deleted file mode 100644 index 4742923c..00000000 --- a/env/lib/python2.7/site-packages/IMServicePlugIn/__init__.py +++ /dev/null @@ -1,32 +0,0 @@ -''' -Python mapping for the IMServicePlugIn framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Cocoa - -from IMServicePlugIn import _metadata, _IMServicePlugIn - -try: - long -except NameError: - long = int - -sys.modules['IMServicePlugIn'] = mod = objc.ObjCLazyModule( - "IMServicePlugIn", - "com.apple.GameKit", - objc.pathForFramework("/System/Library/Frameworks/GameKit.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Cocoa,)) - -import sys -del sys.modules['IMServicePlugIn._metadata'] -del sys.modules['IMServicePlugIn._IMServicePlugIn'] diff --git a/env/lib/python2.7/site-packages/IMServicePlugIn/__init__.pyc b/env/lib/python2.7/site-packages/IMServicePlugIn/__init__.pyc deleted file mode 100644 index 5a1473f2..00000000 Binary files a/env/lib/python2.7/site-packages/IMServicePlugIn/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/IMServicePlugIn/_metadata.py b/env/lib/python2.7/site-packages/IMServicePlugIn/_metadata.py deleted file mode 100644 index b3fd4b59..00000000 --- a/env/lib/python2.7/site-packages/IMServicePlugIn/_metadata.py +++ /dev/null @@ -1,29 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Sun Aug 2 18:47:32 2015 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$IMAccountSettingLoginHandle$IMAccountSettingPassword$IMAccountSettingServerHost$IMAccountSettingServerPort$IMAccountSettingUsesSSL$IMAttributeBackgroundColor$IMAttributeBaseWritingDirection$IMAttributeBold$IMAttributeFontFamily$IMAttributeFontSize$IMAttributeForegroundColor$IMAttributeItalic$IMAttributeLink$IMAttributeMessageBackgroundColor$IMAttributePreformatted$IMAttributeStrikethrough$IMAttributeUnderline$IMGroupListDefaultGroup$IMGroupListHandlesKey$IMGroupListNameKey$IMGroupListPermissionsKey$IMHandleCapabilityChatRoom$IMHandleCapabilityFileTransfer$IMHandleCapabilityHandlePicture$IMHandleCapabilityMessaging$IMHandleCapabilityOfflineMessaging$IMHandlePropertyAlias$IMHandlePropertyAuthorizationStatus$IMHandlePropertyAvailability$IMHandlePropertyCapabilities$IMHandlePropertyEmailAddress$IMHandlePropertyFirstName$IMHandlePropertyIdleDate$IMHandlePropertyLastName$IMHandlePropertyPictureData$IMHandlePropertyPictureIdentifier$IMHandlePropertyStatusMessage$IMSessionPropertyAvailability$IMSessionPropertyIdleDate$IMSessionPropertyIsInvisible$IMSessionPropertyPictureData$IMSessionPropertyStatusMessage$''' -enums = '''$IMGroupListCanAddNewMembers@4$IMGroupListCanRemoveMembers@8$IMGroupListCanRenameGroup@2$IMGroupListCanReorderGroup@1$IMGroupListCanReorderMembers@16$IMHandleAuthorizationStatusAccepted@0$IMHandleAuthorizationStatusDeclined@2$IMHandleAuthorizationStatusPending@1$IMHandleAvailabilityAvailable@1$IMHandleAvailabilityAway@0$IMHandleAvailabilityOffline@-1$IMHandleAvailabilityUnknown@-2$IMSessionAvailabilityAvailable@1$IMSessionAvailabilityAway@0$''' -misc.update({}) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'NSObject', b'plugInDidLogOutWithError:reconnect:', {'arguments': {3: {'type': 'Z'}}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/IMServicePlugIn/_metadata.pyc b/env/lib/python2.7/site-packages/IMServicePlugIn/_metadata.pyc deleted file mode 100644 index eb819a6a..00000000 Binary files a/env/lib/python2.7/site-packages/IMServicePlugIn/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/IOSurface/__init__.py b/env/lib/python2.7/site-packages/IOSurface/__init__.py deleted file mode 100644 index 68b6be83..00000000 --- a/env/lib/python2.7/site-packages/IOSurface/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -''' -Python mapping for the IOSurface framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Foundation - -from IOSurface import _metadata - -sys.modules['IOSurface'] = mod = objc.ObjCLazyModule( - "IOSurface", - "com.apple.IOSurface", - objc.pathForFramework("/System/Library/Frameworks/IOSurface.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Foundation,)) - -import sys -del sys.modules['IOSurface._metadata'] diff --git a/env/lib/python2.7/site-packages/IOSurface/__init__.pyc b/env/lib/python2.7/site-packages/IOSurface/__init__.pyc deleted file mode 100644 index 08c0762d..00000000 Binary files a/env/lib/python2.7/site-packages/IOSurface/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/IOSurface/_metadata.py b/env/lib/python2.7/site-packages/IOSurface/_metadata.py deleted file mode 100644 index e6ce1f4f..00000000 --- a/env/lib/python2.7/site-packages/IOSurface/_metadata.py +++ /dev/null @@ -1,37 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Tue Jun 5 23:25:02 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$IOSurfacePropertyAllocSizeKey$IOSurfacePropertyKeyAllocSize$IOSurfacePropertyKeyBytesPerElement$IOSurfacePropertyKeyBytesPerRow$IOSurfacePropertyKeyCacheMode$IOSurfacePropertyKeyElementHeight$IOSurfacePropertyKeyElementWidth$IOSurfacePropertyKeyHeight$IOSurfacePropertyKeyOffset$IOSurfacePropertyKeyPixelFormat$IOSurfacePropertyKeyPixelSizeCastingAllowed$IOSurfacePropertyKeyPlaneBase$IOSurfacePropertyKeyPlaneBytesPerElement$IOSurfacePropertyKeyPlaneBytesPerRow$IOSurfacePropertyKeyPlaneElementHeight$IOSurfacePropertyKeyPlaneElementWidth$IOSurfacePropertyKeyPlaneHeight$IOSurfacePropertyKeyPlaneInfo$IOSurfacePropertyKeyPlaneOffset$IOSurfacePropertyKeyPlaneSize$IOSurfacePropertyKeyPlaneWidth$IOSurfacePropertyKeyWidth$kIOSurfaceAllocSize@^{__CFString=}$kIOSurfaceBytesPerElement@^{__CFString=}$kIOSurfaceBytesPerRow@^{__CFString=}$kIOSurfaceCacheMode@^{__CFString=}$kIOSurfaceElementHeight@^{__CFString=}$kIOSurfaceElementWidth@^{__CFString=}$kIOSurfaceHeight@^{__CFString=}$kIOSurfaceIsGlobal@^{__CFString=}$kIOSurfaceOffset@^{__CFString=}$kIOSurfacePixelFormat@^{__CFString=}$kIOSurfacePixelSizeCastingAllowed$kIOSurfacePlaneBase@^{__CFString=}$kIOSurfacePlaneBitsPerElement$kIOSurfacePlaneBytesPerElement@^{__CFString=}$kIOSurfacePlaneBytesPerRow@^{__CFString=}$kIOSurfacePlaneComponentBitDepths$kIOSurfacePlaneComponentBitOffsets$kIOSurfacePlaneComponentNames$kIOSurfacePlaneElementHeight@^{__CFString=}$kIOSurfacePlaneElementWidth@^{__CFString=}$kIOSurfacePlaneHeight@^{__CFString=}$kIOSurfacePlaneInfo@^{__CFString=}$kIOSurfacePlaneOffset@^{__CFString=}$kIOSurfacePlaneSize@^{__CFString=}$kIOSurfacePlaneWidth@^{__CFString=}$kIOSurfaceSubsampling$kIOSurfaceWidth@^{__CFString=}$''' -enums = '''$_IOSURFACE_API_H@1$_IOSURFACE_H@1$kIOSurfaceComponentNameAlpha@1$kIOSurfaceComponentNameBlue@4$kIOSurfaceComponentNameChromaBlue@7$kIOSurfaceComponentNameChromaRed@6$kIOSurfaceComponentNameGreen@3$kIOSurfaceComponentNameLuma@5$kIOSurfaceComponentNameRed@2$kIOSurfaceComponentNameUnknown@0$kIOSurfaceComponentRangeFullRange@1$kIOSurfaceComponentRangeUnknown@0$kIOSurfaceComponentRangeVideoRange@2$kIOSurfaceComponentRangeWideRange@3$kIOSurfaceComponentTypeFloat@3$kIOSurfaceComponentTypeSignedInteger@2$kIOSurfaceComponentTypeUnknown@0$kIOSurfaceComponentTypeUnsignedInteger@1$kIOSurfaceCopybackCache@3$kIOSurfaceCopybackInnerCache@5$kIOSurfaceDefaultCache@0$kIOSurfaceInhibitCache@1$kIOSurfaceLockAvoidSync@2$kIOSurfaceLockReadOnly@1$kIOSurfaceMapCacheShift@8$kIOSurfaceMapCopybackCache@768$kIOSurfaceMapCopybackInnerCache@1280$kIOSurfaceMapDefaultCache@0$kIOSurfaceMapInhibitCache@256$kIOSurfaceMapWriteCombineCache@1024$kIOSurfaceMapWriteThruCache@512$kIOSurfacePurgeableEmpty@2$kIOSurfacePurgeableKeepCurrent@3$kIOSurfacePurgeableNonVolatile@0$kIOSurfacePurgeableVolatile@1$kIOSurfaceSubsampling411@4$kIOSurfaceSubsampling420@3$kIOSurfaceSubsampling422@2$kIOSurfaceSubsamplingNone@1$kIOSurfaceSubsamplingUnknown@0$kIOSurfaceWriteCombineCache@4$kIOSurfaceWriteThruCache@2$''' -misc.update({}) -functions={'IOSurfaceGetSeed': (b'I^{__IOSurface=}',), 'IOSurfaceGetRangeOfComponentOfPlane': (b'i^{__IOSurface=}II',), 'IOSurfaceLookupFromMachPort': (b'^{__IOSurface=}I', '', {'retval': {'already_cfretained': True}}), 'IOSurfaceGetBytesPerRow': (sel32or64(b'L^{__IOSurface=}', b'Q^{__IOSurface=}'),), 'IOSurfaceGetUseCount': (b'i^{__IOSurface=}',), 'IOSurfaceSetValue': (b'v^{__IOSurface=}^{__CFString=}@',), 'IOSurfaceGetPlaneCount': (sel32or64(b'L^{__IOSurface=}', b'Q^{__IOSurface=}'),), 'IOSurfaceLock': (b'i^{__IOSurface=}I^I', '', {'arguments': {2: {'type_modifier': 'N'}}}), 'IOSurfaceDecrementUseCount': (b'v^{__IOSurface=}',), 'IOSurfaceGetTypeOfComponentOfPlane': (b'i^{__IOSurface=}II',), 'IOSurfaceLookupFromXPCObject': (b'^{__IOSurface=}@', '', {'retval': {'already_cfretained': True}}), 'IOSurfaceGetElementHeight': (sel32or64(b'L^{__IOSurface=}', b'Q^{__IOSurface=}'),), 'IOSurfaceGetBaseAddressOfPlane': (sel32or64(b'^v^{__IOSurface=}L', b'^v^{__IOSurface=}Q'), '', {'retval': {'c_array_of_variable_length': True}}), 'IOSurfaceGetSubsampling': (b'i^{__IOSurface=}',), 'IOSurfaceLookup': (b'^{__IOSurface=}I', '', {'retval': {'already_cfretained': True}}), 'IOSurfaceGetPixelFormat': (sel32or64(b'L^{__IOSurface=}', b'I^{__IOSurface=}'),), 'IOSurfaceGetBitOffsetOfComponentOfPlane': (b'I^{__IOSurface=}II',), 'IOSurfaceCopyValue': (b'@^{__IOSurface=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'IOSurfaceIncrementUseCount': (b'v^{__IOSurface=}',), 'IOSurfaceGetElementWidthOfPlane': (sel32or64(b'L^{__IOSurface=}L', b'Q^{__IOSurface=}Q'),), 'IOSurfaceGetID': (b'I^{__IOSurface=}',), 'IOSurfaceSetValues': (b'v^{__IOSurface=}^{__CFDictionary=}',), 'IOSurfaceRemoveAllValues': (b'v^{__IOSurface=}',), 'IOSurfaceGetTypeID': (sel32or64(b'L', b'Q'),), 'IOSurfaceCreateXPCObject': (b'@^{__IOSurface=}', '', {'retval': {'already_cfretained': True}}), 'IOSurfaceGetAllocSize': (sel32or64(b'L^{__IOSurface=}', b'Q^{__IOSurface=}'),), 'IOSurfaceGetBitDepthOfComponentOfPlane': (b'I^{__IOSurface=}II',), 'IOSurfaceGetElementWidth': (sel32or64(b'L^{__IOSurface=}', b'Q^{__IOSurface=}'),), 'IOSurfaceGetBytesPerElementOfPlane': (sel32or64(b'L^{__IOSurface=}L', b'Q^{__IOSurface=}Q'),), 'IOSurfaceCreateMachPort': (b'I^{__IOSurface=}', '', {'retval': {'already_cfretained': True}}), 'IOSurfaceGetNumberOfComponentsOfPlane': (b'I^{__IOSurface=}I',), 'IOSurfaceGetWidth': (sel32or64(b'L^{__IOSurface=}', b'Q^{__IOSurface=}'),), 'IOSurfaceRemoveValue': (b'v^{__IOSurface=}^{__CFString=}',), 'IOSurfaceGetHeightOfPlane': (sel32or64(b'L^{__IOSurface=}L', b'Q^{__IOSurface=}Q'),), 'IOSurfaceGetHeight': (sel32or64(b'L^{__IOSurface=}', b'Q^{__IOSurface=}'),), 'IOSurfaceGetBaseAddress': (b'^v^{__IOSurface=}', '', {'retval': {'c_array_of_variable_length': True}}), 'IOSurfaceAlignProperty': (sel32or64(b'L^{__CFString=}L', b'Q^{__CFString=}Q'),), 'IOSurfaceGetBytesPerRowOfPlane': (sel32or64(b'L^{__IOSurface=}L', b'Q^{__IOSurface=}Q'),), 'IOSurfaceCreate': (b'^{__IOSurface=}^{__CFDictionary=}', '', {'retval': {'already_retained': True, 'already_cfretained': True}}), 'IOSurfaceGetElementHeightOfPlane': (sel32or64(b'L^{__IOSurface=}L', b'Q^{__IOSurface=}Q'),), 'IOSurfaceGetBytesPerElement': (sel32or64(b'L^{__IOSurface=}', b'Q^{__IOSurface=}'),), 'IOSurfaceGetPropertyAlignment': (sel32or64(b'L^{__CFString=}', b'Q^{__CFString=}'),), 'IOSurfaceGetNameOfComponentOfPlane': (b'i^{__IOSurface=}II',), 'IOSurfaceUnlock': (b'i^{__IOSurface=}I^I', '', {'arguments': {2: {'type_modifier': 'N'}}}), 'IOSurfaceGetPropertyMaximum': (sel32or64(b'L^{__CFString=}', b'Q^{__CFString=}'),), 'IOSurfaceGetWidthOfPlane': (sel32or64(b'L^{__IOSurface=}L', b'Q^{__IOSurface=}Q'),), 'IOSurfaceIsInUse': (b'Z^{__IOSurface=}',), 'IOSurfaceAllowsPixelSizeCasting': (b'Z^{__IOSurface=}',), 'IOSurfaceCopyAllValues': (b'^{__CFDictionary=}^{__IOSurface=}', '', {'retval': {'already_cfretained': True}})} -aliases = {'IOSFC_AVAILABLE_BUT_DEPRECATED': '__OSX_AVAILABLE_BUT_DEPRECATED', 'IOSFC_AVAILABLE_STARTING': '__OSX_AVAILABLE_STARTING', 'IOSFC_DEPRECATED': 'DEPRECATED_ATTRIBUTE'} -cftypes=[('IOSurfaceRef', b'^{__IOSurface=}', 'IOSurfaceGetTypeID', None)] -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'IOSurface', b'allowsPixelSizeCasting', {'retval': {'type': 'Z'}}) - r(b'IOSurface', b'baseAddressOfPlaneAtIndex:', {'retval': {'c_array_of_variable_length': True}}) - r(b'IOSurface', b'isInUse', {'retval': {'type': 'Z'}}) - r(b'IOSurface', b'lockWithOptions:seed:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'IOSurface', b'setPurgeable:oldState:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'IOSurface', b'unlockWithOptions:seed:', {'arguments': {3: {'type_modifier': b'o'}}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/IOSurface/_metadata.pyc b/env/lib/python2.7/site-packages/IOSurface/_metadata.pyc deleted file mode 100644 index f89c3436..00000000 Binary files a/env/lib/python2.7/site-packages/IOSurface/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/ImageCaptureCore/_ImageCaptureCore.so b/env/lib/python2.7/site-packages/ImageCaptureCore/_ImageCaptureCore.so deleted file mode 100755 index 67feed93..00000000 Binary files a/env/lib/python2.7/site-packages/ImageCaptureCore/_ImageCaptureCore.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/ImageCaptureCore/__init__.py b/env/lib/python2.7/site-packages/ImageCaptureCore/__init__.py deleted file mode 100644 index 73f43534..00000000 --- a/env/lib/python2.7/site-packages/ImageCaptureCore/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -''' -Python mapping for the ImageCaptureCore framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import sys -import objc -import Foundation - -from ImageCaptureCore import _metadata, _ImageCaptureCore - -sys.modules['ImageCaptureCore'] = mod = objc.ObjCLazyModule('ImageCaptureCore', - "com.apple.ImageCaptureCoreFramework", - objc.pathForFramework("/System/Library/Frameworks/ImageCaptureCore.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'objc': objc, - }, ( Foundation,)) - -import sys -del sys.modules['ImageCaptureCore._metadata'] diff --git a/env/lib/python2.7/site-packages/ImageCaptureCore/__init__.pyc b/env/lib/python2.7/site-packages/ImageCaptureCore/__init__.pyc deleted file mode 100644 index 707bf7ae..00000000 Binary files a/env/lib/python2.7/site-packages/ImageCaptureCore/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/ImageCaptureCore/_metadata.py b/env/lib/python2.7/site-packages/ImageCaptureCore/_metadata.py deleted file mode 100644 index 4391199f..00000000 --- a/env/lib/python2.7/site-packages/ImageCaptureCore/_metadata.py +++ /dev/null @@ -1,98 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Wed Jun 6 22:36:21 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$ICButtonTypeCopy$ICButtonTypeMail$ICButtonTypePrint$ICButtonTypeScan$ICButtonTypeTransfer$ICButtonTypeWeb$ICCameraDeviceCanAcceptPTPCommands$ICCameraDeviceCanDeleteAllFiles$ICCameraDeviceCanDeleteOneFile$ICCameraDeviceCanReceiveFile$ICCameraDeviceCanSyncClock$ICCameraDeviceCanTakePicture$ICCameraDeviceCanTakePictureUsingShutterReleaseOnCamera$ICCameraDeviceSupportsFastPTP$ICDeleteAfterSuccessfulDownload$ICDeviceCanEjectOrDisconnect$ICDeviceLocationDescriptionBluetooth$ICDeviceLocationDescriptionFireWire$ICDeviceLocationDescriptionMassStorage$ICDeviceLocationDescriptionUSB$ICDownloadSidecarFiles$ICDownloadsDirectoryURL$ICLocalizedStatusNotificationKey$ICOverwrite$ICSaveAsFilename$ICSavedAncillaryFiles$ICSavedFilename$ICScannerStatusRequestsOverviewScan$ICScannerStatusWarmUpDone$ICScannerStatusWarmingUp$ICStatusCodeKey$ICStatusNotificationKey$ICTransportTypeBluetooth$ICTransportTypeFireWire$ICTransportTypeMassStorage$ICTransportTypeTCPIP$ICTransportTypeUSB$''' -enums = '''$ICDeviceLocationTypeBluetooth@2048$ICDeviceLocationTypeBonjour@1024$ICDeviceLocationTypeLocal@256$ICDeviceLocationTypeMaskBluetooth@2048$ICDeviceLocationTypeMaskBonjour@1024$ICDeviceLocationTypeMaskLocal@256$ICDeviceLocationTypeMaskRemote@65024$ICDeviceLocationTypeMaskShared@512$ICDeviceLocationTypeShared@512$ICDeviceTypeCamera@1$ICDeviceTypeMaskCamera@1$ICDeviceTypeMaskScanner@2$ICDeviceTypeScanner@2$ICEXIFOrientation1@1$ICEXIFOrientation2@2$ICEXIFOrientation3@3$ICEXIFOrientation4@4$ICEXIFOrientation5@5$ICEXIFOrientation6@6$ICEXIFOrientation7@7$ICEXIFOrientation8@8$ICReturnCodeDeleteOffset@-21150$ICReturnCodeDownloadOffset@-21100$ICReturnCodeExFATOffset@-21200$ICReturnCodeMetadataOffset@-21050$ICReturnCodePTPOffset@-21250$ICReturnCodeSystemOffset@-21300$ICReturnCodeThumbnailOffset@-21000$ICReturnCommunicationTimedOut@-9923$ICReturnDeleteFilesCanceled@-9942$ICReturnDeleteFilesFailed@-9941$ICReturnDeviceCommandGeneralFailure@-9955$ICReturnDeviceCouldNotPair@-9951$ICReturnDeviceCouldNotUnpair@-9952$ICReturnDeviceFailedToCloseSession@-9928$ICReturnDeviceFailedToCompleteTransfer@-9956$ICReturnDeviceFailedToOpenSession@-9927$ICReturnDeviceFailedToSendData@-9957$ICReturnDeviceFailedToTakePicture@-9944$ICReturnDeviceIsBusyEnumerating@-9954$ICReturnDeviceIsPasscodeLocked@-9943$ICReturnDeviceNeedsCredentials@-9953$ICReturnDeviceSoftwareInstallationCanceled@-9948$ICReturnDeviceSoftwareInstallationCompleted@-9947$ICReturnDeviceSoftwareInstallationFailed@-9949$ICReturnDeviceSoftwareIsBeingInstalled@-9946$ICReturnDeviceSoftwareNotAvailable@-9950$ICReturnDeviceSoftwareNotInstalled@-9945$ICReturnDownloadCanceled@-9937$ICReturnDownloadFailed@-9934$ICReturnErrorDeviceEjected@-21300$ICReturnFailedToCompletePassThroughCommand@-9936$ICReturnFailedToCompleteSendMessageRequest@-9940$ICReturnFailedToDisabeTethering@-9939$ICReturnFailedToEnabeTethering@-9938$ICReturnInvalidParam@-9922$ICReturnMetadataAlreadyFetching@-21051$ICReturnMetadataCanceled@-21052$ICReturnMetadataInvalid@-21053$ICReturnMetadataNotAvailable@-21050$ICReturnMultiErrorDictionary@-30000$ICReturnReceivedUnsolicitedScannerErrorInfo@-9933$ICReturnReceivedUnsolicitedScannerStatusInfo@-9932$ICReturnScanOperationCanceled@-9924$ICReturnScannerFailedToCompleteOverviewScan@-9930$ICReturnScannerFailedToCompleteScan@-9931$ICReturnScannerFailedToSelectFunctionalUnit@-9929$ICReturnScannerInUseByLocalUser@-9925$ICReturnScannerInUseByRemoteUser@-9926$ICReturnSessionNotOpened@-9958$ICReturnSuccess@0$ICReturnThumbnailAlreadyFetching@-21001$ICReturnThumbnailCanceled@-21002$ICReturnThumbnailInvalid@-21003$ICReturnThumbnailNotAvailable@-21000$ICReturnUploadFailed@-9935$ICScannerBitDepth16Bits@16$ICScannerBitDepth1Bit@1$ICScannerBitDepth8Bits@8$ICScannerColorDataFormatTypeChunky@0$ICScannerColorDataFormatTypePlanar@1$ICScannerDocumentType10@25$ICScannerDocumentType10R@67$ICScannerDocumentType110@72$ICScannerDocumentType11R@69$ICScannerDocumentType12R@70$ICScannerDocumentType135@76$ICScannerDocumentType2A0@18$ICScannerDocumentType3R@61$ICScannerDocumentType4A0@17$ICScannerDocumentType4R@62$ICScannerDocumentType5R@63$ICScannerDocumentType6R@64$ICScannerDocumentType8R@65$ICScannerDocumentTypeA0@19$ICScannerDocumentTypeA1@20$ICScannerDocumentTypeA2@21$ICScannerDocumentTypeA3@11$ICScannerDocumentTypeA4@1$ICScannerDocumentTypeA5@5$ICScannerDocumentTypeA6@13$ICScannerDocumentTypeA7@22$ICScannerDocumentTypeA8@23$ICScannerDocumentTypeA9@24$ICScannerDocumentTypeAPSC@74$ICScannerDocumentTypeAPSH@73$ICScannerDocumentTypeAPSP@75$ICScannerDocumentTypeB5@2$ICScannerDocumentTypeBusinessCard@53$ICScannerDocumentTypeC0@44$ICScannerDocumentTypeC1@45$ICScannerDocumentTypeC10@51$ICScannerDocumentTypeC2@46$ICScannerDocumentTypeC3@47$ICScannerDocumentTypeC4@14$ICScannerDocumentTypeC5@15$ICScannerDocumentTypeC6@16$ICScannerDocumentTypeC7@48$ICScannerDocumentTypeC8@49$ICScannerDocumentTypeC9@50$ICScannerDocumentTypeDefault@0$ICScannerDocumentTypeE@60$ICScannerDocumentTypeISOB0@26$ICScannerDocumentTypeISOB1@27$ICScannerDocumentTypeISOB10@33$ICScannerDocumentTypeISOB2@28$ICScannerDocumentTypeISOB3@12$ICScannerDocumentTypeISOB4@6$ICScannerDocumentTypeISOB5@29$ICScannerDocumentTypeISOB6@7$ICScannerDocumentTypeISOB7@30$ICScannerDocumentTypeISOB8@31$ICScannerDocumentTypeISOB9@32$ICScannerDocumentTypeJISB0@34$ICScannerDocumentTypeJISB1@35$ICScannerDocumentTypeJISB10@43$ICScannerDocumentTypeJISB2@36$ICScannerDocumentTypeJISB3@37$ICScannerDocumentTypeJISB4@38$ICScannerDocumentTypeJISB6@39$ICScannerDocumentTypeJISB7@40$ICScannerDocumentTypeJISB8@41$ICScannerDocumentTypeJISB9@42$ICScannerDocumentTypeLF@78$ICScannerDocumentTypeMF@77$ICScannerDocumentTypeS10R@68$ICScannerDocumentTypeS12R@71$ICScannerDocumentTypeS8R@66$ICScannerDocumentTypeUSExecutive@10$ICScannerDocumentTypeUSLedger@9$ICScannerDocumentTypeUSLegal@4$ICScannerDocumentTypeUSLetter@3$ICScannerDocumentTypeUSStatement@52$ICScannerFeatureTypeBoolean@2$ICScannerFeatureTypeEnumeration@0$ICScannerFeatureTypeRange@1$ICScannerFeatureTypeTemplate@3$ICScannerFunctionalUnitStateOverviewScanInProgress@4$ICScannerFunctionalUnitStateReady@1$ICScannerFunctionalUnitStateScanInProgress@2$ICScannerFunctionalUnitTypeDocumentFeeder@3$ICScannerFunctionalUnitTypeFlatbed@0$ICScannerFunctionalUnitTypeNegativeTransparency@2$ICScannerFunctionalUnitTypePositiveTransparency@1$ICScannerMeasurementUnitCentimeters@1$ICScannerMeasurementUnitInches@0$ICScannerMeasurementUnitPicas@2$ICScannerMeasurementUnitPixels@5$ICScannerMeasurementUnitPoints@3$ICScannerMeasurementUnitTwips@4$ICScannerPixelDataTypeBW@0$ICScannerPixelDataTypeCIEXYZ@8$ICScannerPixelDataTypeCMY@4$ICScannerPixelDataTypeCMYK@5$ICScannerPixelDataTypeGray@1$ICScannerPixelDataTypePalette@3$ICScannerPixelDataTypeRGB@2$ICScannerPixelDataTypeYUV@6$ICScannerPixelDataTypeYUVK@7$ICScannerTransferModeFileBased@0$ICScannerTransferModeMemoryBased@1$''' -misc.update({}) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'ICCameraDevice', b'batteryLevelAvailable', {'retval': {'type': 'Z'}}) - r(b'ICCameraDevice', b'isAccessRestrictedAppleDevice', {'retval': {'type': 'Z'}}) - r(b'ICCameraDevice', b'requestDownloadFile:options:downloadDelegate:didDownloadSelector:contextInfo:', {'arguments': {5: {'sel_of_type': b'v@:@@@^v'}}}) - r(b'ICCameraDevice', b'requestReadDataFromFile:atOffset:length:readDelegate:didReadDataSelector:contextInfo:', {'arguments': {6: {'sel_of_type': b'v@:@@@^v'}}}) - r(b'ICCameraDevice', b'requestSendPTPCommand:outData:sendCommandDelegate:didSendCommandSelector:contextInfo:', {'arguments': {5: {'sel_of_type': b'v@:@@@@^v'}}}) - r(b'ICCameraDevice', b'requestUploadFile:options:uploadDelegate:didUploadSelector:contextInfo:', {'arguments': {5: {'sel_of_type': b'v@:@@^v'}}}) - r(b'ICCameraDevice', b'setTetheredCaptureEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'ICCameraDevice', b'tetheredCaptureEnabled', {'retval': {'type': 'Z'}}) - r(b'ICCameraItem', b'isInTemporaryStore', {'retval': {'type': 'Z'}}) - r(b'ICCameraItem', b'isLocked', {'retval': {'type': 'Z'}}) - r(b'ICCameraItem', b'isRaw', {'retval': {'type': 'Z'}}) - r(b'ICCameraItem', b'wasAddedAfterContentCatalogCompleted', {'retval': {'type': 'Z'}}) - r(b'ICDevice', b'hasConfigurableWiFiInterface', {'retval': {'type': 'Z'}}) - r(b'ICDevice', b'hasOpenSession', {'retval': {'type': 'Z'}}) - r(b'ICDevice', b'isRemote', {'retval': {'type': 'Z'}}) - r(b'ICDevice', b'isShared', {'retval': {'type': 'Z'}}) - r(b'ICDevice', b'requestSendMessage:outData:maxReturnedDataSize:sendMessageDelegate:didSendMessageSelector:contextInfo:', {'arguments': {6: {'sel_of_type': b'v@:I@@^v'}}}) - r(b'ICDeviceBrowser', b'isBrowsing', {'retval': {'type': 'Z'}}) - r(b'ICScannerBandData', b'isBigEndian', {'retval': {'type': 'Z'}}) - r(b'ICScannerFeatureBoolean', b'setValue:', {'arguments': {2: {'type': 'Z'}}}) - r(b'ICScannerFeatureBoolean', b'value', {'retval': {'type': 'Z'}}) - r(b'ICScannerFunctionalUnit', b'acceptsThresholdForBlackAndWhiteScanning', {'retval': {'type': 'Z'}}) - r(b'ICScannerFunctionalUnit', b'canPerformOverviewScan', {'retval': {'type': 'Z'}}) - r(b'ICScannerFunctionalUnit', b'overviewScanInProgress', {'retval': {'type': 'Z'}}) - r(b'ICScannerFunctionalUnit', b'scanInProgress', {'retval': {'type': 'Z'}}) - r(b'ICScannerFunctionalUnit', b'setUsesThresholdForBlackAndWhiteScanning:', {'arguments': {2: {'type': 'Z'}}}) - r(b'ICScannerFunctionalUnit', b'usesThresholdForBlackAndWhiteScanning', {'retval': {'type': 'Z'}}) - r(b'ICScannerFunctionalUnitDocumentFeeder', b'documentLoaded', {'retval': {'type': 'Z'}}) - r(b'ICScannerFunctionalUnitDocumentFeeder', b'duplexScanningEnabled', {'retval': {'type': 'Z'}}) - r(b'ICScannerFunctionalUnitDocumentFeeder', b'reverseFeederPageOrder', {'retval': {'type': 'Z'}}) - r(b'ICScannerFunctionalUnitDocumentFeeder', b'setDuplexScanningEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'ICScannerFunctionalUnitDocumentFeeder', b'supportsDuplexScanning', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'cameraDevice:didAddItem:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'cameraDevice:didAddItems:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'cameraDevice:didCompleteDeleteFilesWithError:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'cameraDevice:didReceiveMetadataForItem:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'cameraDevice:didReceivePTPEvent:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'cameraDevice:didReceiveThumbnailForItem:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'cameraDevice:didRemoveItem:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'cameraDevice:didRemoveItems:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'cameraDevice:didRenameItems:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'cameraDevice:shouldGetMetadataOfItem:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'cameraDevice:shouldGetThumbnailOfItem:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'cameraDeviceDidChangeCapability:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'device:didCloseSessionWithError:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'device:didEncounterError:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'device:didOpenSessionWithError:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'device:didReceiveButtonPress:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'device:didReceiveCustomNotification:data:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'device:didReceiveStatusInformation:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'deviceBrowser:deviceDidChangeName:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'deviceBrowser:deviceDidChangeSharingState:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'deviceBrowser:didAddDevice:moreComing:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': 'Z'}}}) - r(b'NSObject', b'deviceBrowser:didRemoveDevice:moreGoing:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': 'Z'}}}) - r(b'NSObject', b'deviceBrowser:requestsSelectDevice:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'deviceBrowserDidEnumerateLocalDevices:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'deviceDidBecomeReady:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'deviceDidBecomeReadyWithCompleteContentCatalog:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'deviceDidChangeName:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'deviceDidChangeSharingState:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'didDownloadFile:error:options:contextInfo:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': '^v'}}}) - r(b'NSObject', b'didReceiveDownloadProgressForFile:downloadedBytes:maxBytes:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'q'}, 4: {'type': b'q'}}}) - r(b'NSObject', b'didRemoveDevice:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'scannerDevice:didCompleteOverviewScanWithError:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'scannerDevice:didCompleteScanWithError:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'scannerDevice:didScanToBandData:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'scannerDevice:didScanToURL:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'scannerDevice:didScanToURL:data:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'scannerDevice:didSelectFunctionalUnit:error:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'scannerDeviceDidBecomeAvailable:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/ImageCaptureCore/_metadata.pyc b/env/lib/python2.7/site-packages/ImageCaptureCore/_metadata.pyc deleted file mode 100644 index 8a47d3db..00000000 Binary files a/env/lib/python2.7/site-packages/ImageCaptureCore/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/InputMethodKit/_InputMethodKit.so b/env/lib/python2.7/site-packages/InputMethodKit/_InputMethodKit.so deleted file mode 100755 index f534fc8e..00000000 Binary files a/env/lib/python2.7/site-packages/InputMethodKit/_InputMethodKit.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/InputMethodKit/__init__.py b/env/lib/python2.7/site-packages/InputMethodKit/__init__.py deleted file mode 100644 index c35b3f28..00000000 --- a/env/lib/python2.7/site-packages/InputMethodKit/__init__.py +++ /dev/null @@ -1,25 +0,0 @@ -''' -Python mapping for the InputMethodKit framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import sys -import objc -import Foundation - -from InputMethodKit import _metadata -from InputMethodKit._InputMethodKit import * - -sys.modules['InputMethodKit'] = mod = objc.ObjCLazyModule('InputMethodKit', - "com.apple.InputMethodKit", - objc.pathForFramework("/System/Library/Frameworks/InputMethodKit.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'objc': objc, - }, ( Foundation,)) - -import sys -del sys.modules['InputMethodKit._metadata'] diff --git a/env/lib/python2.7/site-packages/InputMethodKit/__init__.pyc b/env/lib/python2.7/site-packages/InputMethodKit/__init__.pyc deleted file mode 100644 index 1b72b9d4..00000000 Binary files a/env/lib/python2.7/site-packages/InputMethodKit/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/InputMethodKit/_metadata.py b/env/lib/python2.7/site-packages/InputMethodKit/_metadata.py deleted file mode 100644 index 9e1280a4..00000000 --- a/env/lib/python2.7/site-packages/InputMethodKit/_metadata.py +++ /dev/null @@ -1,73 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Wed Dec 31 16:50:47 2014 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$IMKCandidatesOpacityAttributeName$IMKCandidatesSendServerKeyEventFirst$IMKControllerClass$IMKDelegateClass$IMKModeDictionary$kIMKCommandClientName$kIMKCommandMenuItemName$''' -enums = '''$kIMKAnnotation@1$kIMKLocateCandidatesAboveHint@1$kIMKLocateCandidatesBelowHint@2$kIMKLocateCandidatesLeftHint@3$kIMKLocateCandidatesRightHint@4$kIMKMain@0$kIMKScrollingGridCandidatePanel@2$kIMKSingleColumnScrollingCandidatePanel@1$kIMKSingleRowSteppingCandidatePanel@3$kIMKSubList@2$''' -misc.update({}) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'IMKCandidates', b'dismissesAutomatically', {'retval': {'type': 'Z'}}) - r(b'IMKCandidates', b'isVisible', {'retval': {'type': 'Z'}}) - r(b'IMKCandidates', b'selectCandidateWithIdentifier:', {'retval': {'type': b'Z'}}) - r(b'IMKCandidates', b'selectionKeysKeylayout', {'retval': {'type': '^{__TISInputSource=}'}}) - r(b'IMKCandidates', b'setDismissesAutomatically:', {'arguments': {2: {'type': 'Z'}}}) - r(b'IMKCandidates', b'setSelectionKeysKeylayout:', {'arguments': {2: {'type': '^{__TISInputSource=}'}}}) - r(b'IMKInputController', b'doCommandBySelector:commandDictionary:', {'arguments': {2: {'type': ':', 'sel_of_type': b'v@:@'}}}) - r(b'IMKServer', b'lastKeyEventWasDeadKey', {'retval': {'type': b'Z'}}) - r(b'IMKServer', b'paletteWillTerminate', {'retval': {'type': b'Z'}}) - r(b'NSObject', b'didCommandBySelector:client:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': ':'}}}) - r(b'NSObject', b'handleEvent:client:', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'inputText:client:', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'inputText:key:modifiers:client:', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'mouseDownOnCharacterIndex:coordinate:withModifier:continueTracking:client:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 4: {'type': sel32or64(b'I', b'Q')}, 5: {'type': '^Z', 'type_modifier': b'o'}}}) - r(b'NSObject', b'mouseMovedOnCharacterIndex:coordinate:withModifier:client:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 4: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'mouseUpOnCharacterIndex:coordinate:withModifier:client:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 4: {'type': sel32or64(b'I', b'Q')}}}) -finally: - objc._updatingMetadata(False) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'NSObject', b'activateServer:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'deactivateServer:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'modes:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'mouseDownOnCharacterIndex:coordinate:withModifier:continueTracking:client:', {'required': True, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 4: {'type': sel32or64(b'I', b'Q')}, 5: {'type': '^Z', 'type_modifier': b'o'}, 6: {'type': b'@'}}}) - r(b'NSObject', b'mouseMovedOnCharacterIndex:coordinate:withModifier:client:', {'required': True, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 4: {'type': sel32or64(b'I', b'Q')}, 5: {'type': b'@'}}}) - r(b'NSObject', b'mouseUpOnCharacterIndex:coordinate:withModifier:client:', {'required': True, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 4: {'type': sel32or64(b'I', b'Q')}, 5: {'type': b'@'}}}) - r(b'NSObject', b'recognizedEvents:', {'required': True, 'retval': {'type': sel32or64(b'I', b'Q')}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setValue:forTag:client:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'l', b'q')}, 4: {'type': b'@'}}}) - r(b'NSObject', b'showPreferences:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'valueForTag:client:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': sel32or64(b'l', b'q')}, 3: {'type': b'@'}}}) -finally: - objc._updatingMetadata(False) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'NSObject', b'candidates:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'commitComposition:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'composedString:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'didCommandBySelector:client:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': ':'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'handleEvent:client:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'inputText:client:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'inputText:key:modifiers:client:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}, 4: {'type': sel32or64(b'I', b'Q')}, 5: {'type': b'@'}}}) - r(b'NSObject', b'originalString:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) -finally: - objc._updatingMetadata(False) -protocols={'IMKServerInput': objc.informal_protocol('IMKServerInput', [objc.selector(None, b'inputText:client:', b'Z@:@@', isRequired=False), objc.selector(None, b'candidates:', b'@@:@', isRequired=False), objc.selector(None, b'didCommandBySelector:client:', b'Z@::@', isRequired=False), objc.selector(None, b'handleEvent:client:', b'Z@:@@', isRequired=False), objc.selector(None, b'composedString:', b'@@:@', isRequired=False), objc.selector(None, b'inputText:key:modifiers:client:', sel32or64(b'Z@:@iI@', b'Z@:@qQ@'), isRequired=False), objc.selector(None, b'commitComposition:', b'v@:@', isRequired=False), objc.selector(None, b'originalString:', b'@@:@', isRequired=False)])} -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/InputMethodKit/_metadata.pyc b/env/lib/python2.7/site-packages/InputMethodKit/_metadata.pyc deleted file mode 100644 index 47bd8deb..00000000 Binary files a/env/lib/python2.7/site-packages/InputMethodKit/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/InstallerPlugins/__init__.py b/env/lib/python2.7/site-packages/InstallerPlugins/__init__.py deleted file mode 100644 index 05e5e0eb..00000000 --- a/env/lib/python2.7/site-packages/InstallerPlugins/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -''' -Python mapping for the InstallerPlugins framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import sys -import objc -import AppKit - -from InstallerPlugins import _metadata - -sys.modules['InstallerPlugins'] = mod = objc.ObjCLazyModule('InstallerPlugins', - "com.apple.InstallerPlugins", - objc.pathForFramework("/System/Library/Frameworks/InstallerPlugins.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'objc': objc, - }, ( AppKit,)) - -import sys -del sys.modules['InstallerPlugins._metadata'] diff --git a/env/lib/python2.7/site-packages/InstallerPlugins/__init__.pyc b/env/lib/python2.7/site-packages/InstallerPlugins/__init__.pyc deleted file mode 100644 index 4ad79f59..00000000 Binary files a/env/lib/python2.7/site-packages/InstallerPlugins/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/InstallerPlugins/_metadata.py b/env/lib/python2.7/site-packages/InstallerPlugins/_metadata.py deleted file mode 100644 index d683f9d6..00000000 --- a/env/lib/python2.7/site-packages/InstallerPlugins/_metadata.py +++ /dev/null @@ -1,41 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Fri Sep 21 15:13:54 2012 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -misc.update({'InstallerSection_Private': objc.createStructType('InstallerSection_Private', b'{InstallerSection_Private=}', []), 'InstallerState_Private': objc.createStructType('InstallerState_Private', b'{InstallerState_Private=}', []), 'InstallerPane_Private': objc.createStructType('InstallerPane_Private', b'{InstallerPane_Private=}', [])}) -constants = '''$InstallerState_Choice_CustomLocation$InstallerState_Choice_Identifier$InstallerState_Choice_Installed$''' -enums = '''$InstallerDirectionBackward@1$InstallerDirectionForward@0$InstallerDirectionUndefined@2$''' -misc.update({}) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'InstallerPane', b'gotoNextPane', {'retval': {'type': 'Z'}}) - r(b'InstallerPane', b'gotoPreviousPane', {'retval': {'type': 'Z'}}) - r(b'InstallerPane', b'nextEnabled', {'retval': {'type': 'Z'}}) - r(b'InstallerPane', b'previousEnabled', {'retval': {'type': 'Z'}}) - r(b'InstallerPane', b'setNextEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'InstallerPane', b'setPreviousEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'InstallerPane', b'shouldExitPane:', {'retval': {'type': 'Z'}}) - r(b'InstallerSection', b'gotoPane:', {'retval': {'type': 'Z'}}) - r(b'InstallerSection', b'shouldLoad', {'retval': {'type': 'Z'}}) - r(b'InstallerState', b'installStarted', {'retval': {'type': 'Z'}}) - r(b'InstallerState', b'installSucceeded', {'retval': {'type': 'Z'}}) - r(b'InstallerState', b'licenseAgreed', {'retval': {'type': 'Z'}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/InstallerPlugins/_metadata.pyc b/env/lib/python2.7/site-packages/InstallerPlugins/_metadata.pyc deleted file mode 100644 index 446ba6ad..00000000 Binary files a/env/lib/python2.7/site-packages/InstallerPlugins/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/InstantMessage/__init__.py b/env/lib/python2.7/site-packages/InstantMessage/__init__.py deleted file mode 100644 index 3fb7e89a..00000000 --- a/env/lib/python2.7/site-packages/InstantMessage/__init__.py +++ /dev/null @@ -1,25 +0,0 @@ -''' -Python mapping for the InstantMessage framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import sys -import objc -import Foundation -import Quartz - -from InstantMessage import _metadata - -sys.modules['InstantMessage'] = mod = objc.ObjCLazyModule('InstantMessage', - "com.apple.IMFramework", - objc.pathForFramework("/System/Library/Frameworks/InstantMessage.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'objc': objc, - }, ( Foundation, Quartz,)) - -import sys -del sys.modules['InstantMessage._metadata'] diff --git a/env/lib/python2.7/site-packages/InstantMessage/__init__.pyc b/env/lib/python2.7/site-packages/InstantMessage/__init__.pyc deleted file mode 100644 index 29a0f035..00000000 Binary files a/env/lib/python2.7/site-packages/InstantMessage/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/InstantMessage/_metadata.py b/env/lib/python2.7/site-packages/InstantMessage/_metadata.py deleted file mode 100644 index d749d2c9..00000000 --- a/env/lib/python2.7/site-packages/InstantMessage/_metadata.py +++ /dev/null @@ -1,46 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Wed Dec 31 16:26:55 2014 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$IMAVManagerStateChangedNotification$IMAVManagerURLToShareChangedNotification$IMCapabilityAudioConference$IMCapabilityDirectIM$IMCapabilityFileSharing$IMCapabilityFileTransfer$IMCapabilityText$IMCapabilityVideoConference$IMMyStatusChangedNotification$IMPersonAVBusyKey$IMPersonCapabilitiesKey$IMPersonEmailKey$IMPersonFirstNameKey$IMPersonIdleSinceKey$IMPersonInfoChangedNotification$IMPersonLastNameKey$IMPersonPictureDataKey$IMPersonScreenNameKey$IMPersonServiceNameKey$IMPersonStatusChangedNotification$IMPersonStatusKey$IMPersonStatusMessageKey$IMServiceStatusChangedNotification$IMStatusImagesChangedAppearanceNotification$''' -enums = '''$IMAVInactive@0$IMAVPending@4$IMAVRequested@1$IMAVRunning@5$IMAVShuttingDown@2$IMAVStartingUp@3$IMPersonStatusAvailable@4$IMPersonStatusAway@3$IMPersonStatusIdle@2$IMPersonStatusNoStatus@5$IMPersonStatusOffline@1$IMPersonStatusUnknown@0$IMServiceStatusDisconnected@1$IMServiceStatusLoggedIn@4$IMServiceStatusLoggedOut@0$IMServiceStatusLoggingIn@3$IMServiceStatusLoggingOut@2$IMVideoOptimizationDefault@0$IMVideoOptimizationReplacement@2$IMVideoOptimizationStills@1$''' -misc.update({}) -functions={'IMComparePersonStatus': (sel32or64(b'iII', b'qQQ'),)} -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'IMAVControl', b'isEnabled', {'retval': {'type': b'Z'}}) - r(b'IMAVControl', b'setAction:', {'arguments': {2: {'sel_of_type': b'v@:@'}}}) - r(b'IMAVControl', b'setEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'NSObject', b'getOpenGLBufferContext:pixelFormat:', {'arguments': {2: {'type_modifier': b'o'}, 3: {'type_modifier': b'o'}}}) - r(b'NSObject', b'getPixelBufferPixelFormat:', {'arguments': {2: {'type_modifier': b'o'}}}) - r(b'NSObject', b'renderIntoOpenGLBuffer:onScreen:forTime:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': '^{__CVBuffer=}'}, 3: {'type_modifier': b'n'}, 4: {'type': sel32or64(b'^{_CVTimeStamp=IiqQdq{CVSMPTETime=ssLLLssss}QQ}', b'^{_CVTimeStamp=IiqQdq{CVSMPTETime=ssIIIssss}QQ}'), 'type_modifier': b'n'}}}) - r(b'NSObject', b'renderIntoPixelBuffer:forTime:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': '^{__CVBuffer=}'}, 3: {'type': sel32or64(b'^{_CVTimeStamp=IiqQdq{CVSMPTETime=ssLLLssss}QQ}', b'^{_CVTimeStamp=IiqQdq{CVSMPTETime=ssIIIssss}QQ}'), 'type_modifier': b'n'}}}) -finally: - objc._updatingMetadata(False) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'NSObject', b'getOpenGLBufferContext:pixelFormat:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'^^{_CGLContextObject=}', 'type_modifier': b'o'}, 3: {'type': b'^^{_CGLPixelFormatObject=}', 'type_modifier': b'o'}}}) - r(b'NSObject', b'getPixelBufferPixelFormat:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'^I', 'type_modifier': b'o'}}}) - r(b'NSObject', b'renderIntoOpenGLBuffer:onScreen:forTime:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': '^{__CVBuffer=}'}, 3: {'type': b'^i', 'type_modifier': b'n'}, 4: {'type': sel32or64(b'^{_CVTimeStamp=IiqQdq{CVSMPTETime=ssLLLssss}QQ}', b'^{_CVTimeStamp=IiqQdq{CVSMPTETime=ssIIIssss}QQ}'), 'type_modifier': b'n'}}}) - r(b'NSObject', b'renderIntoPixelBuffer:forTime:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': '^{__CVBuffer=}'}, 3: {'type': sel32or64(b'^{_CVTimeStamp=IiqQdq{CVSMPTETime=ssLLLssss}QQ}', b'^{_CVTimeStamp=IiqQdq{CVSMPTETime=ssIIIssss}QQ}'), 'type_modifier': b'n'}}}) -finally: - objc._updatingMetadata(False) -protocols={'IMVideoDataSource': objc.informal_protocol('IMVideoDataSource', [objc.selector(None, b'renderIntoPixelBuffer:forTime:', sel32or64(b'Z@:^{__CVBuffer=}^{_CVTimeStamp=IiqQdq{CVSMPTETime=ssLLLssss}QQ}', b'Z@:^{__CVBuffer=}^{_CVTimeStamp=IiqQdq{CVSMPTETime=ssIIIssss}QQ}'), isRequired=False), objc.selector(None, b'getPixelBufferPixelFormat:', b'v@:^I', isRequired=False), objc.selector(None, b'getOpenGLBufferContext:pixelFormat:', b'v@:^^{_CGLContextObject=}^^{_CGLPixelFormatObject=}', isRequired=False), objc.selector(None, b'renderIntoOpenGLBuffer:onScreen:forTime:', sel32or64(b'Z@:^{__CVBuffer=}^i^{_CVTimeStamp=IiqQdq{CVSMPTETime=ssLLLssss}QQ}', b'Z@:^{__CVBuffer=}^i^{_CVTimeStamp=IiqQdq{CVSMPTETime=ssIIIssss}QQ}'), isRequired=False)])} -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/InstantMessage/_metadata.pyc b/env/lib/python2.7/site-packages/InstantMessage/_metadata.pyc deleted file mode 100644 index a8b996ef..00000000 Binary files a/env/lib/python2.7/site-packages/InstantMessage/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Intents/_Intents.so b/env/lib/python2.7/site-packages/Intents/_Intents.so deleted file mode 100755 index 4a5cf761..00000000 Binary files a/env/lib/python2.7/site-packages/Intents/_Intents.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Intents/__init__.py b/env/lib/python2.7/site-packages/Intents/__init__.py deleted file mode 100644 index 1c3e90d3..00000000 --- a/env/lib/python2.7/site-packages/Intents/__init__.py +++ /dev/null @@ -1,28 +0,0 @@ -''' -Python mapping for the Intents framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Foundation - -from Intents import _metadata -from Intents._Intents import * - - -sys.modules['Intents'] = mod = objc.ObjCLazyModule( - "Intents", - "com.apple.Intents", - objc.pathForFramework("/System/Library/Frameworks/Intents.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Foundation,)) - -import sys -del sys.modules['Intents._metadata'] diff --git a/env/lib/python2.7/site-packages/Intents/__init__.pyc b/env/lib/python2.7/site-packages/Intents/__init__.pyc deleted file mode 100644 index 3d83be73..00000000 Binary files a/env/lib/python2.7/site-packages/Intents/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Intents/_metadata.py b/env/lib/python2.7/site-packages/Intents/_metadata.py deleted file mode 100644 index 0ae92e5d..00000000 --- a/env/lib/python2.7/site-packages/Intents/_metadata.py +++ /dev/null @@ -1,327 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Wed Oct 31 11:38:30 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$INCancelWorkoutIntentIdentifier$INEndWorkoutIntentIdentifier$INGetRideStatusIntentIdentifier$INIntentErrorDomain$INListRideOptionsIntentIdentifier$INPauseWorkoutIntentIdentifier$INPersonHandleLabelHome$INPersonHandleLabelHomeFax$INPersonHandleLabelMain$INPersonHandleLabelMobile$INPersonHandleLabelOther$INPersonHandleLabelPager$INPersonHandleLabelWork$INPersonHandleLabelWorkFax$INPersonHandleLabeliPhone$INPersonRelationshipAssistant$INPersonRelationshipBrother$INPersonRelationshipChild$INPersonRelationshipFather$INPersonRelationshipFriend$INPersonRelationshipManager$INPersonRelationshipMother$INPersonRelationshipParent$INPersonRelationshipPartner$INPersonRelationshipSister$INPersonRelationshipSpouse$INRequestPaymentIntentIdentifier$INRequestRideIntentIdentifier$INResumeWorkoutIntentIdentifier$INSaveProfileInCarIntentIdentifier$INSearchCallHistoryIntentIdentifier$INSearchForMessagesIntentIdentifier$INSearchForPhotosIntentIdentifier$INSendMessageIntentIdentifier$INSendPaymentIntentIdentifier$INSetAudioSourceInCarIntentIdentifier$INSetClimateSettingsInCarIntentIdentifier$INSetDefrosterSettingsInCarIntentIdentifier$INSetMessageAttributeIntentIdentifier$INSetProfileInCarIntentIdentifier$INSetRadioStationIntentIdentifier$INSetSeatSettingsInCarIntentIdentifier$INStartAudioCallIntentIdentifier$INStartPhotoPlaybackIntentIdentifier$INStartVideoCallIntentIdentifier$INStartWorkoutIntentIdentifier$INWorkoutNameIdentifierCrosstraining$INWorkoutNameIdentifierCycle$INWorkoutNameIdentifierDance$INWorkoutNameIdentifierElliptical$INWorkoutNameIdentifierExercise$INWorkoutNameIdentifierHighIntensityIntervalTraining$INWorkoutNameIdentifierHike$INWorkoutNameIdentifierIndoorcycle$INWorkoutNameIdentifierIndoorrun$INWorkoutNameIdentifierIndoorwalk$INWorkoutNameIdentifierMove$INWorkoutNameIdentifierOther$INWorkoutNameIdentifierRower$INWorkoutNameIdentifierRun$INWorkoutNameIdentifierSit$INWorkoutNameIdentifierStairs$INWorkoutNameIdentifierStand$INWorkoutNameIdentifierSteps$INWorkoutNameIdentifierSwim$INWorkoutNameIdentifierWalk$INWorkoutNameIdentifierYoga$IntentsVersionNumber@d$IntentsVersionString@^C$''' -enums = '''$INAccountTypeChecking@1$INAccountTypeCredit@2$INAccountTypeDebit@3$INAccountTypeInvestment@4$INAccountTypeMortgage@5$INAccountTypePrepaid@6$INAccountTypeSaving@7$INAccountTypeUnknown@0$INActivateCarSignalIntentResponseCodeFailure@4$INActivateCarSignalIntentResponseCodeFailureRequiringAppLaunch@5$INActivateCarSignalIntentResponseCodeInProgress@2$INActivateCarSignalIntentResponseCodeReady@1$INActivateCarSignalIntentResponseCodeSuccess@3$INActivateCarSignalIntentResponseCodeUnspecified@0$INAddTasksIntentResponseCodeFailure@4$INAddTasksIntentResponseCodeFailureRequiringAppLaunch@5$INAddTasksIntentResponseCodeInProgress@2$INAddTasksIntentResponseCodeReady@1$INAddTasksIntentResponseCodeSuccess@3$INAddTasksIntentResponseCodeUnspecified@0$INAmountTypeAmountDue@2$INAmountTypeCurrentBalance@3$INAmountTypeMaximumTransferAmount@4$INAmountTypeMinimumDue@1$INAmountTypeMinimumTransferAmount@5$INAmountTypeStatementBalance@6$INAmountTypeUnknown@0$INAppendToNoteIntentResponseCodeFailure@4$INAppendToNoteIntentResponseCodeFailureCannotUpdatePasswordProtectedNote@6$INAppendToNoteIntentResponseCodeFailureRequiringAppLaunch@5$INAppendToNoteIntentResponseCodeInProgress@2$INAppendToNoteIntentResponseCodeReady@1$INAppendToNoteIntentResponseCodeSuccess@3$INAppendToNoteIntentResponseCodeUnspecified@0$INBalanceTypeMiles@3$INBalanceTypeMoney@1$INBalanceTypePoints@2$INBalanceTypeUnknown@0$INBillTypeAutoInsurance@1$INBillTypeCable@2$INBillTypeCarLease@3$INBillTypeCarLoan@4$INBillTypeCreditCard@5$INBillTypeElectricity@6$INBillTypeGarbageAndRecycling@8$INBillTypeGas@7$INBillTypeHealthInsurance@9$INBillTypeHomeInsurance@10$INBillTypeInternet@11$INBillTypeLifeInsurance@12$INBillTypeMortgage@13$INBillTypeMusicStreaming@14$INBillTypePhone@15$INBillTypeRent@16$INBillTypeSewer@17$INBillTypeStudentLoan@18$INBillTypeTrafficTicket@19$INBillTypeTuition@20$INBillTypeUnknown@0$INBillTypeUtilities@21$INBillTypeWater@22$INBookRestaurantReservationIntentCodeDenied@1$INBookRestaurantReservationIntentCodeFailure@2$INBookRestaurantReservationIntentCodeFailureRequiringAppLaunch@3$INBookRestaurantReservationIntentCodeFailureRequiringAppLaunchMustVerifyCredentials@4$INBookRestaurantReservationIntentCodeFailureRequiringAppLaunchServiceTemporarilyUnavailable@5$INBookRestaurantReservationIntentCodeSuccess@0$INCallCapabilityAudioCall@1$INCallCapabilityOptionAudioCall@1$INCallCapabilityOptionVideoCall@2$INCallCapabilityUnknown@0$INCallCapabilityVideoCall@2$INCallDestinationTypeEmergency@2$INCallDestinationTypeEmergencyDestination@2$INCallDestinationTypeNormal@1$INCallDestinationTypeNormalDestination@1$INCallDestinationTypeRedial@4$INCallDestinationTypeRedialDestination@4$INCallDestinationTypeUnknown@0$INCallDestinationTypeVoicemail@3$INCallDestinationTypeVoicemailDestination@3$INCallRecordTypeLatest@4$INCallRecordTypeMissed@2$INCallRecordTypeOptionLatest@8$INCallRecordTypeOptionMissed@2$INCallRecordTypeOptionOutgoing@1$INCallRecordTypeOptionReceived@4$INCallRecordTypeOptionVoicemail@16$INCallRecordTypeOutgoing@1$INCallRecordTypeReceived@3$INCallRecordTypeUnknown@0$INCallRecordTypeVoicemail@5$INCancelRideIntentResponseCodeFailure@3$INCancelRideIntentResponseCodeReady@1$INCancelRideIntentResponseCodeSuccess@2$INCancelRideIntentResponseCodeUnspecified@0$INCancelWorkoutIntentResponseCodeContinueInApp@2$INCancelWorkoutIntentResponseCodeFailure@3$INCancelWorkoutIntentResponseCodeFailureNoMatchingWorkout@5$INCancelWorkoutIntentResponseCodeFailureRequiringAppLaunch@4$INCancelWorkoutIntentResponseCodeHandleInApp@7$INCancelWorkoutIntentResponseCodeReady@1$INCancelWorkoutIntentResponseCodeSuccess@6$INCancelWorkoutIntentResponseCodeUnspecified@0$INCarAirCirculationModeFreshAir@1$INCarAirCirculationModeRecirculateAir@2$INCarAirCirculationModeUnknown@0$INCarAudioSourceAUX@5$INCarAudioSourceBluetooth@4$INCarAudioSourceCarPlay@1$INCarAudioSourceHardDrive@9$INCarAudioSourceMemoryCard@7$INCarAudioSourceOpticalDrive@8$INCarAudioSourceRadio@3$INCarAudioSourceUSB@6$INCarAudioSourceUnknown@0$INCarAudioSourceiPod@2$INCarDefrosterAll@3$INCarDefrosterFront@1$INCarDefrosterRear@2$INCarDefrosterUnknown@0$INCarSeatAll@12$INCarSeatDriver@1$INCarSeatFront@5$INCarSeatFrontLeft@3$INCarSeatFrontRight@4$INCarSeatPassenger@2$INCarSeatRear@8$INCarSeatRearLeft@6$INCarSeatRearRight@7$INCarSeatThirdRow@11$INCarSeatThirdRowLeft@9$INCarSeatThirdRowRight@10$INCarSeatUnknown@0$INCarSignalOptionAudible@1$INCarSignalOptionVisible@2$INConditionalOperatorAll@0$INConditionalOperatorAny@1$INConditionalOperatorNone@2$INCreateNoteIntentResponseCodeFailure@4$INCreateNoteIntentResponseCodeFailureRequiringAppLaunch@5$INCreateNoteIntentResponseCodeInProgress@2$INCreateNoteIntentResponseCodeReady@1$INCreateNoteIntentResponseCodeSuccess@3$INCreateNoteIntentResponseCodeUnspecified@0$INCreateTaskListIntentResponseCodeFailure@4$INCreateTaskListIntentResponseCodeFailureRequiringAppLaunch@5$INCreateTaskListIntentResponseCodeInProgress@2$INCreateTaskListIntentResponseCodeReady@1$INCreateTaskListIntentResponseCodeSuccess@3$INCreateTaskListIntentResponseCodeUnspecified@0$INDailyRoutineSituationEvening@1$INDailyRoutineSituationGym@5$INDailyRoutineSituationHome@2$INDailyRoutineSituationMorning@0$INDailyRoutineSituationSchool@4$INDailyRoutineSituationWork@3$INDateSearchTypeByCreatedDate@3$INDateSearchTypeByDueDate@1$INDateSearchTypeByModifiedDate@2$INDateSearchTypeUnknown@0$INEndWorkoutIntentResponseCodeContinueInApp@2$INEndWorkoutIntentResponseCodeFailure@3$INEndWorkoutIntentResponseCodeFailureNoMatchingWorkout@5$INEndWorkoutIntentResponseCodeFailureRequiringAppLaunch@4$INEndWorkoutIntentResponseCodeHandleInApp@7$INEndWorkoutIntentResponseCodeReady@1$INEndWorkoutIntentResponseCodeSuccess@6$INEndWorkoutIntentResponseCodeUnspecified@0$INGetAvailableRestaurantReservationBookingDefaultsIntentResponseCodeFailure@1$INGetAvailableRestaurantReservationBookingDefaultsIntentResponseCodeSuccess@0$INGetAvailableRestaurantReservationBookingDefaultsIntentResponseCodeUnspecified@2$INGetAvailableRestaurantReservationBookingsIntentCodeFailure@1$INGetAvailableRestaurantReservationBookingsIntentCodeFailureRequestUnsatisfiable@2$INGetAvailableRestaurantReservationBookingsIntentCodeFailureRequestUnspecified@3$INGetAvailableRestaurantReservationBookingsIntentCodeSuccess@0$INGetCarLockStatusIntentResponseCodeFailure@4$INGetCarLockStatusIntentResponseCodeFailureRequiringAppLaunch@5$INGetCarLockStatusIntentResponseCodeInProgress@2$INGetCarLockStatusIntentResponseCodeReady@1$INGetCarLockStatusIntentResponseCodeSuccess@3$INGetCarLockStatusIntentResponseCodeUnspecified@0$INGetCarPowerLevelStatusIntentResponseCodeFailure@4$INGetCarPowerLevelStatusIntentResponseCodeFailureRequiringAppLaunch@5$INGetCarPowerLevelStatusIntentResponseCodeInProgress@2$INGetCarPowerLevelStatusIntentResponseCodeReady@1$INGetCarPowerLevelStatusIntentResponseCodeSuccess@3$INGetCarPowerLevelStatusIntentResponseCodeUnspecified@0$INGetRestaurantGuestIntentResponseCodeFailure@1$INGetRestaurantGuestIntentResponseCodeSuccess@0$INGetRideStatusIntentResponseCodeFailure@4$INGetRideStatusIntentResponseCodeFailureRequiringAppLaunch@5$INGetRideStatusIntentResponseCodeFailureRequiringAppLaunchMustVerifyCredentials@6$INGetRideStatusIntentResponseCodeFailureRequiringAppLaunchServiceTemporarilyUnavailable@7$INGetRideStatusIntentResponseCodeInProgress@2$INGetRideStatusIntentResponseCodeReady@1$INGetRideStatusIntentResponseCodeSuccess@3$INGetRideStatusIntentResponseCodeUnspecified@0$INGetUserCurrentRestaurantReservationBookingsIntentResponseCodeFailure@1$INGetUserCurrentRestaurantReservationBookingsIntentResponseCodeFailureRequestUnsatisfiable@2$INGetUserCurrentRestaurantReservationBookingsIntentResponseCodeSuccess@0$INGetUserCurrentRestaurantReservationBookingsIntentResponseCodeUnspecified@3$INGetVisualCodeIntentResponseCodeContinueInApp@2$INGetVisualCodeIntentResponseCodeFailure@5$INGetVisualCodeIntentResponseCodeFailureAppConfigurationRequired@7$INGetVisualCodeIntentResponseCodeFailureRequiringAppLaunch@6$INGetVisualCodeIntentResponseCodeInProgress@3$INGetVisualCodeIntentResponseCodeReady@1$INGetVisualCodeIntentResponseCodeSuccess@4$INGetVisualCodeIntentResponseCodeUnspecified@0$INIntentErrorDecodingGeneric@9000$INIntentErrorDeletingAllInteractions@1902$INIntentErrorDeletingInteractionWithGroupIdentifier@1904$INIntentErrorDeletingInteractionWithIdentifiers@1903$INIntentErrorDonatingInteraction@1901$INIntentErrorEncodingFailed@8001$INIntentErrorEncodingGeneric@8000$INIntentErrorExtensionBringUpFailed@5001$INIntentErrorExtensionLaunchingTimeout@5000$INIntentErrorImageGeneric@6000$INIntentErrorImageLoadingFailed@6003$INIntentErrorImageNoServiceAvailable@6001$INIntentErrorImageProxyInvalid@6006$INIntentErrorImageProxyLoop@6005$INIntentErrorImageProxyTimeout@6007$INIntentErrorImageRetrievalFailed@6004$INIntentErrorImageScalingFailed@6009$INIntentErrorImageServiceFailure@6008$INIntentErrorImageStorageFailed@6002$INIntentErrorIntentSupportedByMultipleExtension@2001$INIntentErrorInteractionOperationNotSupported@1900$INIntentErrorInvalidIntentName@2004$INIntentErrorInvalidUserVocabularyFileLocation@4000$INIntentErrorMissingInformation@3002$INIntentErrorNoAppAvailable@2005$INIntentErrorNoHandlerProvidedForIntent@2003$INIntentErrorPermissionDenied@6010$INIntentErrorRequestTimedOut@3001$INIntentErrorRestrictedIntentsNotSupportedByExtension@2002$INIntentErrorVoiceShortcutCreationFailed@7000$INIntentErrorVoiceShortcutDeleteFailed@7002$INIntentErrorVoiceShortcutGetFailed@7001$INIntentHandlingStatusDeferredToApplication@5$INIntentHandlingStatusFailure@4$INIntentHandlingStatusInProgress@2$INIntentHandlingStatusReady@1$INIntentHandlingStatusSuccess@3$INIntentHandlingStatusUnspecified@0$INIntentHandlingStatusUserConfirmationRequired@6$INInteractionDirectionIncoming@2$INInteractionDirectionOutgoing@1$INInteractionDirectionUnspecified@0$INListRideOptionsIntentResponseCodeFailure@4$INListRideOptionsIntentResponseCodeFailurePreviousRideNeedsFeedback@10$INListRideOptionsIntentResponseCodeFailureRequiringAppLaunch@5$INListRideOptionsIntentResponseCodeFailureRequiringAppLaunchMustVerifyCredentials@6$INListRideOptionsIntentResponseCodeFailureRequiringAppLaunchNoServiceInArea@7$INListRideOptionsIntentResponseCodeFailureRequiringAppLaunchPreviousRideNeedsCompletion@9$INListRideOptionsIntentResponseCodeFailureRequiringAppLaunchServiceTemporarilyUnavailable@8$INListRideOptionsIntentResponseCodeInProgress@2$INListRideOptionsIntentResponseCodeReady@1$INListRideOptionsIntentResponseCodeSuccess@3$INListRideOptionsIntentResponseCodeUnspecified@0$INLocationSearchTypeByLocationTrigger@1$INLocationSearchTypeUnknown@0$INMediaItemTypeAlbum@2$INMediaItemTypeArtist@3$INMediaItemTypeAudioBook@10$INMediaItemTypeGenre@4$INMediaItemTypeMovie@11$INMediaItemTypeMusicStation@9$INMediaItemTypeMusicVideo@14$INMediaItemTypePlaylist@5$INMediaItemTypePodcastEpisode@7$INMediaItemTypePodcastPlaylist@8$INMediaItemTypePodcastShow@6$INMediaItemTypeSong@1$INMediaItemTypeTVShow@12$INMediaItemTypeTVShowEpisode@13$INMediaItemTypeUnknown@0$INMessageAttributeFlagged@3$INMessageAttributeOptionFlagged@4$INMessageAttributeOptionPlayed@16$INMessageAttributeOptionRead@1$INMessageAttributeOptionUnflagged@8$INMessageAttributeOptionUnread@2$INMessageAttributePlayed@5$INMessageAttributeRead@1$INMessageAttributeUnflagged@4$INMessageAttributeUnknown@0$INMessageAttributeUnread@2$INMessageTypeActivitySnippet@23$INMessageTypeAnimoji@22$INMessageTypeAudio@2$INMessageTypeDigitalTouch@3$INMessageTypeFile@24$INMessageTypeHandwriting@4$INMessageTypeLink@25$INMessageTypeMediaAddressCard@14$INMessageTypeMediaAudio@18$INMessageTypeMediaCalendar@12$INMessageTypeMediaImage@15$INMessageTypeMediaLocation@13$INMessageTypeMediaPass@17$INMessageTypeMediaVideo@16$INMessageTypePaymentNote@21$INMessageTypePaymentRequest@20$INMessageTypePaymentSent@19$INMessageTypeSticker@5$INMessageTypeTapbackDisliked@7$INMessageTypeTapbackEmphasized@8$INMessageTypeTapbackLaughed@11$INMessageTypeTapbackLiked@6$INMessageTypeTapbackLoved@9$INMessageTypeTapbackQuestioned@10$INMessageTypeText@1$INMessageTypeUnspecified@0$INNoteContentTypeImage@2$INNoteContentTypeText@1$INNoteContentTypeUnknown@0$INNotebookItemTypeNote@1$INNotebookItemTypeTask@3$INNotebookItemTypeTaskList@2$INNotebookItemTypeUnknown@0$INPauseWorkoutIntentResponseCodeContinueInApp@2$INPauseWorkoutIntentResponseCodeFailure@3$INPauseWorkoutIntentResponseCodeFailureNoMatchingWorkout@5$INPauseWorkoutIntentResponseCodeFailureRequiringAppLaunch@4$INPauseWorkoutIntentResponseCodeHandleInApp@7$INPauseWorkoutIntentResponseCodeReady@1$INPauseWorkoutIntentResponseCodeSuccess@6$INPauseWorkoutIntentResponseCodeUnspecified@0$INPayBillIntentResponseCodeFailure@4$INPayBillIntentResponseCodeFailureCredentialsUnverified@6$INPayBillIntentResponseCodeFailureInsufficientFunds@7$INPayBillIntentResponseCodeFailureRequiringAppLaunch@5$INPayBillIntentResponseCodeInProgress@2$INPayBillIntentResponseCodeReady@1$INPayBillIntentResponseCodeSuccess@3$INPayBillIntentResponseCodeUnspecified@0$INPaymentMethodTypeApplePay@8$INPaymentMethodTypeBrokerage@3$INPaymentMethodTypeChecking@1$INPaymentMethodTypeCredit@5$INPaymentMethodTypeDebit@4$INPaymentMethodTypePrepaid@6$INPaymentMethodTypeSavings@2$INPaymentMethodTypeStore@7$INPaymentMethodTypeUnknown@0$INPaymentStatusCanceled@3$INPaymentStatusCompleted@2$INPaymentStatusFailed@4$INPaymentStatusPending@1$INPaymentStatusUnknown@0$INPaymentStatusUnpaid@5$INPersonHandleTypeEmailAddress@1$INPersonHandleTypePhoneNumber@2$INPersonHandleTypeUnknown@0$INPersonSuggestionTypeInstantMessageAddress@2$INPersonSuggestionTypeNone@0$INPersonSuggestionTypeSocialProfile@1$INPhotoAttributeOptionBouncePhoto@134217728$INPhotoAttributeOptionBurstPhoto@1024$INPhotoAttributeOptionChromeFilter@131072$INPhotoAttributeOptionFadeFilter@4194304$INPhotoAttributeOptionFavorite@64$INPhotoAttributeOptionFlash@8$INPhotoAttributeOptionFrontFacingCamera@256$INPhotoAttributeOptionGIF@4$INPhotoAttributeOptionHDRPhoto@2048$INPhotoAttributeOptionInstantFilter@262144$INPhotoAttributeOptionLandscapeOrientation@16$INPhotoAttributeOptionLivePhoto@33554432$INPhotoAttributeOptionLongExposurePhoto@268435456$INPhotoAttributeOptionLoopPhoto@67108864$INPhotoAttributeOptionMonoFilter@2097152$INPhotoAttributeOptionNoirFilter@65536$INPhotoAttributeOptionPanoramaPhoto@8192$INPhotoAttributeOptionPhoto@1$INPhotoAttributeOptionPortraitOrientation@32$INPhotoAttributeOptionPortraitPhoto@16777216$INPhotoAttributeOptionProcessFilter@8388608$INPhotoAttributeOptionScreenshot@512$INPhotoAttributeOptionSelfie@128$INPhotoAttributeOptionSlowMotionVideo@32768$INPhotoAttributeOptionSquarePhoto@4096$INPhotoAttributeOptionTimeLapseVideo@16384$INPhotoAttributeOptionTonalFilter@524288$INPhotoAttributeOptionTransferFilter@1048576$INPhotoAttributeOptionVideo@2$INPlayMediaIntentResponseCodeContinueInApp@2$INPlayMediaIntentResponseCodeFailure@6$INPlayMediaIntentResponseCodeFailureNoUnplayedContent@9$INPlayMediaIntentResponseCodeFailureRequiringAppLaunch@7$INPlayMediaIntentResponseCodeFailureRestrictedContent@10$INPlayMediaIntentResponseCodeFailureUnknownMediaType@8$INPlayMediaIntentResponseCodeHandleInApp@5$INPlayMediaIntentResponseCodeInProgress@3$INPlayMediaIntentResponseCodeReady@1$INPlayMediaIntentResponseCodeSuccess@4$INPlayMediaIntentResponseCodeUnspecified@0$INPlaybackRepeatModeAll@2$INPlaybackRepeatModeNone@1$INPlaybackRepeatModeOne@3$INPlaybackRepeatModeUnknown@0$INRadioTypeAM@1$INRadioTypeDAB@5$INRadioTypeFM@2$INRadioTypeHD@3$INRadioTypeSatellite@4$INRadioTypeUnknown@0$INRecurrenceFrequencyDaily@3$INRecurrenceFrequencyHourly@2$INRecurrenceFrequencyMinute@1$INRecurrenceFrequencyMonthly@5$INRecurrenceFrequencyUnknown@0$INRecurrenceFrequencyWeekly@4$INRecurrenceFrequencyYearly@6$INRelativeReferenceNext@1$INRelativeReferencePrevious@2$INRelativeReferenceUnknown@0$INRelativeSettingHigher@3$INRelativeSettingHighest@4$INRelativeSettingLower@2$INRelativeSettingLowest@1$INRelativeSettingUnknown@0$INRequestPaymentCurrencyAmountUnsupportedReasonPaymentsAmountAboveMaximum@2$INRequestPaymentCurrencyAmountUnsupportedReasonPaymentsAmountBelowMinimum@1$INRequestPaymentCurrencyAmountUnsupportedReasonPaymentsCurrencyUnsupported@3$INRequestPaymentIntentResponseCodeFailure@4$INRequestPaymentIntentResponseCodeFailureCredentialsUnverified@6$INRequestPaymentIntentResponseCodeFailureNoBankAccount@10$INRequestPaymentIntentResponseCodeFailureNotEligible@11$INRequestPaymentIntentResponseCodeFailurePaymentsAmountAboveMaximum@8$INRequestPaymentIntentResponseCodeFailurePaymentsAmountBelowMinimum@7$INRequestPaymentIntentResponseCodeFailurePaymentsCurrencyUnsupported@9$INRequestPaymentIntentResponseCodeFailureRequiringAppLaunch@5$INRequestPaymentIntentResponseCodeFailureTermsAndConditionsAcceptanceRequired@12$INRequestPaymentIntentResponseCodeInProgress@2$INRequestPaymentIntentResponseCodeReady@1$INRequestPaymentIntentResponseCodeSuccess@3$INRequestPaymentIntentResponseCodeUnspecified@0$INRequestPaymentPayerUnsupportedReasonCredentialsUnverified@1$INRequestPaymentPayerUnsupportedReasonNoAccount@2$INRequestPaymentPayerUnsupportedReasonNoValidHandle@3$INRequestRideIntentResponseCodeFailure@4$INRequestRideIntentResponseCodeFailureRequiringAppLaunch@5$INRequestRideIntentResponseCodeFailureRequiringAppLaunchMustVerifyCredentials@6$INRequestRideIntentResponseCodeFailureRequiringAppLaunchNoServiceInArea@7$INRequestRideIntentResponseCodeFailureRequiringAppLaunchPreviousRideNeedsCompletion@9$INRequestRideIntentResponseCodeFailureRequiringAppLaunchServiceTemporarilyUnavailable@8$INRequestRideIntentResponseCodeInProgress@2$INRequestRideIntentResponseCodeReady@1$INRequestRideIntentResponseCodeSuccess@3$INRequestRideIntentResponseCodeUnspecified@0$INRestaurantReservationUserBookingStatusConfirmed@1$INRestaurantReservationUserBookingStatusDenied@2$INRestaurantReservationUserBookingStatusPending@0$INResumeWorkoutIntentResponseCodeContinueInApp@2$INResumeWorkoutIntentResponseCodeFailure@3$INResumeWorkoutIntentResponseCodeFailureNoMatchingWorkout@5$INResumeWorkoutIntentResponseCodeFailureRequiringAppLaunch@4$INResumeWorkoutIntentResponseCodeHandleInApp@7$INResumeWorkoutIntentResponseCodeReady@1$INResumeWorkoutIntentResponseCodeSuccess@6$INResumeWorkoutIntentResponseCodeUnspecified@0$INRideFeedbackTypeOptionRate@1$INRideFeedbackTypeOptionTip@2$INRidePhaseApproachingPickup@5$INRidePhaseCompleted@4$INRidePhaseConfirmed@2$INRidePhaseOngoing@3$INRidePhasePickup@6$INRidePhaseReceived@1$INRidePhaseUnknown@0$INSaveProfileInCarIntentResponseCodeFailure@4$INSaveProfileInCarIntentResponseCodeFailureRequiringAppLaunch@5$INSaveProfileInCarIntentResponseCodeInProgress@2$INSaveProfileInCarIntentResponseCodeReady@1$INSaveProfileInCarIntentResponseCodeSuccess@3$INSaveProfileInCarIntentResponseCodeUnspecified@0$INSearchCallHistoryIntentResponseCodeContinueInApp@2$INSearchCallHistoryIntentResponseCodeFailure@3$INSearchCallHistoryIntentResponseCodeFailureAppConfigurationRequired@5$INSearchCallHistoryIntentResponseCodeFailureRequiringAppLaunch@4$INSearchCallHistoryIntentResponseCodeInProgress@6$INSearchCallHistoryIntentResponseCodeReady@1$INSearchCallHistoryIntentResponseCodeSuccess@7$INSearchCallHistoryIntentResponseCodeUnspecified@0$INSearchForAccountsIntentResponseCodeFailure@4$INSearchForAccountsIntentResponseCodeFailureAccountNotFound@7$INSearchForAccountsIntentResponseCodeFailureCredentialsUnverified@6$INSearchForAccountsIntentResponseCodeFailureNotEligible@9$INSearchForAccountsIntentResponseCodeFailureRequiringAppLaunch@5$INSearchForAccountsIntentResponseCodeFailureTermsAndConditionsAcceptanceRequired@8$INSearchForAccountsIntentResponseCodeInProgress@2$INSearchForAccountsIntentResponseCodeReady@1$INSearchForAccountsIntentResponseCodeSuccess@3$INSearchForAccountsIntentResponseCodeUnspecified@0$INSearchForBillsIntentResponseCodeFailure@4$INSearchForBillsIntentResponseCodeFailureBillNotFound@7$INSearchForBillsIntentResponseCodeFailureCredentialsUnverified@6$INSearchForBillsIntentResponseCodeFailureRequiringAppLaunch@5$INSearchForBillsIntentResponseCodeInProgress@2$INSearchForBillsIntentResponseCodeReady@1$INSearchForBillsIntentResponseCodeSuccess@3$INSearchForBillsIntentResponseCodeUnspecified@0$INSearchForMessagesIntentResponseCodeFailure@4$INSearchForMessagesIntentResponseCodeFailureMessageServiceNotAvailable@6$INSearchForMessagesIntentResponseCodeFailureMessageTooManyResults@7$INSearchForMessagesIntentResponseCodeFailureRequiringAppLaunch@5$INSearchForMessagesIntentResponseCodeInProgress@2$INSearchForMessagesIntentResponseCodeReady@1$INSearchForMessagesIntentResponseCodeSuccess@3$INSearchForMessagesIntentResponseCodeUnspecified@0$INSearchForNotebookItemsIntentResponseCodeFailure@4$INSearchForNotebookItemsIntentResponseCodeFailureRequiringAppLaunch@5$INSearchForNotebookItemsIntentResponseCodeInProgress@2$INSearchForNotebookItemsIntentResponseCodeReady@1$INSearchForNotebookItemsIntentResponseCodeSuccess@3$INSearchForNotebookItemsIntentResponseCodeUnspecified@0$INSearchForPhotosIntentResponseCodeContinueInApp@2$INSearchForPhotosIntentResponseCodeFailure@3$INSearchForPhotosIntentResponseCodeFailureAppConfigurationRequired@5$INSearchForPhotosIntentResponseCodeFailureRequiringAppLaunch@4$INSearchForPhotosIntentResponseCodeReady@1$INSearchForPhotosIntentResponseCodeUnspecified@0$INSendMessageIntentResponseCodeFailure@4$INSendMessageIntentResponseCodeFailureMessageServiceNotAvailable@6$INSendMessageIntentResponseCodeFailureRequiringAppLaunch@5$INSendMessageIntentResponseCodeInProgress@2$INSendMessageIntentResponseCodeReady@1$INSendMessageIntentResponseCodeSuccess@3$INSendMessageIntentResponseCodeUnspecified@0$INSendMessageRecipientUnsupportedReasonMessagingServiceNotEnabledForRecipient@3$INSendMessageRecipientUnsupportedReasonNoAccount@1$INSendMessageRecipientUnsupportedReasonNoHandleForLabel@6$INSendMessageRecipientUnsupportedReasonNoValidHandle@4$INSendMessageRecipientUnsupportedReasonOffline@2$INSendMessageRecipientUnsupportedReasonRequestedHandleInvalid@5$INSendPaymentCurrencyAmountUnsupportedReasonPaymentsAmountAboveMaximum@2$INSendPaymentCurrencyAmountUnsupportedReasonPaymentsAmountBelowMinimum@1$INSendPaymentCurrencyAmountUnsupportedReasonPaymentsCurrencyUnsupported@3$INSendPaymentIntentResponseCodeFailure@4$INSendPaymentIntentResponseCodeFailureCredentialsUnverified@6$INSendPaymentIntentResponseCodeFailureInsufficientFunds@10$INSendPaymentIntentResponseCodeFailureNoBankAccount@11$INSendPaymentIntentResponseCodeFailureNotEligible@12$INSendPaymentIntentResponseCodeFailurePaymentsAmountAboveMaximum@8$INSendPaymentIntentResponseCodeFailurePaymentsAmountBelowMinimum@7$INSendPaymentIntentResponseCodeFailurePaymentsCurrencyUnsupported@9$INSendPaymentIntentResponseCodeFailureRequiringAppLaunch@5$INSendPaymentIntentResponseCodeFailureTermsAndConditionsAcceptanceRequired@13$INSendPaymentIntentResponseCodeInProgress@2$INSendPaymentIntentResponseCodeReady@1$INSendPaymentIntentResponseCodeSuccess@3$INSendPaymentIntentResponseCodeUnspecified@0$INSendPaymentPayeeUnsupportedReasonCredentialsUnverified@1$INSendPaymentPayeeUnsupportedReasonInsufficientFunds@2$INSendPaymentPayeeUnsupportedReasonNoAccount@3$INSendPaymentPayeeUnsupportedReasonNoValidHandle@4$INSendRideFeedbackIntentResponseCodeFailure@3$INSendRideFeedbackIntentResponseCodeReady@1$INSendRideFeedbackIntentResponseCodeSuccess@2$INSendRideFeedbackIntentResponseCodeUnspecified@0$INSetAudioSourceInCarIntentResponseCodeFailure@4$INSetAudioSourceInCarIntentResponseCodeFailureRequiringAppLaunch@5$INSetAudioSourceInCarIntentResponseCodeInProgress@2$INSetAudioSourceInCarIntentResponseCodeReady@1$INSetAudioSourceInCarIntentResponseCodeSuccess@3$INSetAudioSourceInCarIntentResponseCodeUnspecified@0$INSetCarLockStatusIntentResponseCodeFailure@4$INSetCarLockStatusIntentResponseCodeFailureRequiringAppLaunch@5$INSetCarLockStatusIntentResponseCodeInProgress@2$INSetCarLockStatusIntentResponseCodeReady@1$INSetCarLockStatusIntentResponseCodeSuccess@3$INSetCarLockStatusIntentResponseCodeUnspecified@0$INSetClimateSettingsInCarIntentResponseCodeFailure@4$INSetClimateSettingsInCarIntentResponseCodeFailureRequiringAppLaunch@5$INSetClimateSettingsInCarIntentResponseCodeInProgress@2$INSetClimateSettingsInCarIntentResponseCodeReady@1$INSetClimateSettingsInCarIntentResponseCodeSuccess@3$INSetClimateSettingsInCarIntentResponseCodeUnspecified@0$INSetDefrosterSettingsInCarIntentResponseCodeFailure@4$INSetDefrosterSettingsInCarIntentResponseCodeFailureRequiringAppLaunch@5$INSetDefrosterSettingsInCarIntentResponseCodeInProgress@2$INSetDefrosterSettingsInCarIntentResponseCodeReady@1$INSetDefrosterSettingsInCarIntentResponseCodeSuccess@3$INSetDefrosterSettingsInCarIntentResponseCodeUnspecified@0$INSetMessageAttributeIntentResponseCodeFailure@4$INSetMessageAttributeIntentResponseCodeFailureMessageAttributeNotSet@7$INSetMessageAttributeIntentResponseCodeFailureMessageNotFound@6$INSetMessageAttributeIntentResponseCodeFailureRequiringAppLaunch@5$INSetMessageAttributeIntentResponseCodeInProgress@2$INSetMessageAttributeIntentResponseCodeReady@1$INSetMessageAttributeIntentResponseCodeSuccess@3$INSetMessageAttributeIntentResponseCodeUnspecified@0$INSetProfileInCarIntentResponseCodeFailure@4$INSetProfileInCarIntentResponseCodeFailureRequiringAppLaunch@5$INSetProfileInCarIntentResponseCodeInProgress@2$INSetProfileInCarIntentResponseCodeReady@1$INSetProfileInCarIntentResponseCodeSuccess@3$INSetProfileInCarIntentResponseCodeUnspecified@0$INSetRadioStationIntentResponseCodeFailure@4$INSetRadioStationIntentResponseCodeFailureNotSubscribed@6$INSetRadioStationIntentResponseCodeFailureRequiringAppLaunch@5$INSetRadioStationIntentResponseCodeInProgress@2$INSetRadioStationIntentResponseCodeReady@1$INSetRadioStationIntentResponseCodeSuccess@3$INSetRadioStationIntentResponseCodeUnspecified@0$INSetSeatSettingsInCarIntentResponseCodeFailure@4$INSetSeatSettingsInCarIntentResponseCodeFailureRequiringAppLaunch@5$INSetSeatSettingsInCarIntentResponseCodeInProgress@2$INSetSeatSettingsInCarIntentResponseCodeReady@1$INSetSeatSettingsInCarIntentResponseCodeSuccess@3$INSetSeatSettingsInCarIntentResponseCodeUnspecified@0$INSetTaskAttributeIntentResponseCodeFailure@4$INSetTaskAttributeIntentResponseCodeFailureRequiringAppLaunch@5$INSetTaskAttributeIntentResponseCodeInProgress@2$INSetTaskAttributeIntentResponseCodeReady@1$INSetTaskAttributeIntentResponseCodeSuccess@3$INSetTaskAttributeIntentResponseCodeUnspecified@0$INSiriAuthorizationStatusAuthorized@3$INSiriAuthorizationStatusDenied@2$INSiriAuthorizationStatusNotDetermined@0$INSiriAuthorizationStatusRestricted@1$INSortTypeAsIs@1$INSortTypeByDate@2$INSortTypeUnknown@0$INSpatialEventArrive@1$INSpatialEventDepart@2$INSpatialEventUnknown@0$INStartAudioCallIntentResponseCodeContinueInApp@2$INStartAudioCallIntentResponseCodeFailure@3$INStartAudioCallIntentResponseCodeFailureAppConfigurationRequired@5$INStartAudioCallIntentResponseCodeFailureCallingServiceNotAvailable@6$INStartAudioCallIntentResponseCodeFailureContactNotSupportedByApp@7$INStartAudioCallIntentResponseCodeFailureNoValidNumber@8$INStartAudioCallIntentResponseCodeFailureRequiringAppLaunch@4$INStartAudioCallIntentResponseCodeReady@1$INStartAudioCallIntentResponseCodeUnspecified@0$INStartPhotoPlaybackIntentResponseCodeContinueInApp@2$INStartPhotoPlaybackIntentResponseCodeFailure@3$INStartPhotoPlaybackIntentResponseCodeFailureAppConfigurationRequired@5$INStartPhotoPlaybackIntentResponseCodeFailureRequiringAppLaunch@4$INStartPhotoPlaybackIntentResponseCodeReady@1$INStartPhotoPlaybackIntentResponseCodeUnspecified@0$INStartVideoCallIntentResponseCodeContinueInApp@2$INStartVideoCallIntentResponseCodeFailure@3$INStartVideoCallIntentResponseCodeFailureAppConfigurationRequired@5$INStartVideoCallIntentResponseCodeFailureCallingServiceNotAvailable@6$INStartVideoCallIntentResponseCodeFailureContactNotSupportedByApp@7$INStartVideoCallIntentResponseCodeFailureInvalidNumber@8$INStartVideoCallIntentResponseCodeFailureRequiringAppLaunch@4$INStartVideoCallIntentResponseCodeReady@1$INStartVideoCallIntentResponseCodeUnspecified@0$INStartWorkoutIntentResponseCodeContinueInApp@2$INStartWorkoutIntentResponseCodeFailure@3$INStartWorkoutIntentResponseCodeFailureNoMatchingWorkout@6$INStartWorkoutIntentResponseCodeFailureOngoingWorkout@5$INStartWorkoutIntentResponseCodeFailureRequiringAppLaunch@4$INStartWorkoutIntentResponseCodeHandleInApp@8$INStartWorkoutIntentResponseCodeReady@1$INStartWorkoutIntentResponseCodeSuccess@7$INStartWorkoutIntentResponseCodeUnspecified@0$INTaskStatusCompleted@2$INTaskStatusNotCompleted@1$INTaskStatusUnknown@0$INTaskTypeCompletable@2$INTaskTypeNotCompletable@1$INTaskTypeUnknown@0$INTransferMoneyIntentResponseCodeFailure@4$INTransferMoneyIntentResponseCodeFailureCredentialsUnverified@6$INTransferMoneyIntentResponseCodeFailureInsufficientFunds@7$INTransferMoneyIntentResponseCodeFailureRequiringAppLaunch@5$INTransferMoneyIntentResponseCodeInProgress@2$INTransferMoneyIntentResponseCodeReady@1$INTransferMoneyIntentResponseCodeSuccess@3$INTransferMoneyIntentResponseCodeUnspecified@0$INUpcomingMediaPredictionModeDefault@0$INUpcomingMediaPredictionModeOnlyPredictSuggestedIntents@1$INVisualCodeTypeBus@5$INVisualCodeTypeContact@1$INVisualCodeTypeRequestPayment@2$INVisualCodeTypeSendPayment@3$INVisualCodeTypeSubway@6$INVisualCodeTypeTransit@4$INVisualCodeTypeUnknown@0$INVocabularyStringTypeCarName@301$INVocabularyStringTypeCarProfileName@300$INVocabularyStringTypeContactGroupName@2$INVocabularyStringTypeContactName@1$INVocabularyStringTypeNotebookItemGroupName@501$INVocabularyStringTypeNotebookItemTitle@500$INVocabularyStringTypePaymentsAccountNickname@401$INVocabularyStringTypePaymentsOrganizationName@400$INVocabularyStringTypePhotoAlbumName@101$INVocabularyStringTypePhotoTag@100$INVocabularyStringTypeWorkoutActivityName@200$INWorkoutGoalUnitTypeFoot@3$INWorkoutGoalUnitTypeHour@8$INWorkoutGoalUnitTypeInch@1$INWorkoutGoalUnitTypeJoule@9$INWorkoutGoalUnitTypeKiloCalorie@10$INWorkoutGoalUnitTypeMeter@2$INWorkoutGoalUnitTypeMile@4$INWorkoutGoalUnitTypeMinute@7$INWorkoutGoalUnitTypeSecond@6$INWorkoutGoalUnitTypeUnknown@0$INWorkoutGoalUnitTypeYard@5$INWorkoutLocationTypeIndoor@2$INWorkoutLocationTypeOutdoor@1$INWorkoutLocationTypeUnknown@0$''' -misc.update({}) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'INBooleanResolutionResult', b'successWithResolvedValue:', {'arguments': {2: {'type': b'Z'}}}) - r(b'INInteraction', b'deleteAllInteractionsWithCompletion:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'INInteraction', b'deleteInteractionsWithGroupIdentifier:completion:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'INInteraction', b'deleteInteractionsWithIdentifiers:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'INInteraction', b'donateInteractionWithCompletion:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'INParameter', b'isEqualToParameter:', {'retval': {'type': 'Z'}}) - r(b'INPerson', b'initWithPersonHandle:nameComponents:displayName:image:contactIdentifier:customIdentifier:isMe:', {'arguments': {8: {'type': 'Z'}}}) - r(b'INPerson', b'isMe', {'retval': {'type': b'Z'}}) - r(b'INRestaurantGuestDisplayPreferences', b'emailAddressEditable', {'retval': {'type': b'Z'}}) - r(b'INRestaurantGuestDisplayPreferences', b'emailAddressFieldShouldBeDisplayed', {'retval': {'type': b'Z'}}) - r(b'INRestaurantGuestDisplayPreferences', b'nameEditable', {'retval': {'type': b'Z'}}) - r(b'INRestaurantGuestDisplayPreferences', b'nameFieldFirstNameOptional', {'retval': {'type': b'Z'}}) - r(b'INRestaurantGuestDisplayPreferences', b'nameFieldLastNameOptional', {'retval': {'type': b'Z'}}) - r(b'INRestaurantGuestDisplayPreferences', b'nameFieldShouldBeDisplayed', {'retval': {'type': b'Z'}}) - r(b'INRestaurantGuestDisplayPreferences', b'phoneNumberEditable', {'retval': {'type': b'Z'}}) - r(b'INRestaurantGuestDisplayPreferences', b'phoneNumberFieldShouldBeDisplayed', {'retval': {'type': b'Z'}}) - r(b'INRestaurantGuestDisplayPreferences', b'setEmailAddressEditable:', {'arguments': {2: {'type': b'Z'}}}) - r(b'INRestaurantGuestDisplayPreferences', b'setEmailAddressFieldShouldBeDisplayed:', {'arguments': {2: {'type': b'Z'}}}) - r(b'INRestaurantGuestDisplayPreferences', b'setNameEditable:', {'arguments': {2: {'type': b'Z'}}}) - r(b'INRestaurantGuestDisplayPreferences', b'setNameFieldFirstNameOptional:', {'arguments': {2: {'type': b'Z'}}}) - r(b'INRestaurantGuestDisplayPreferences', b'setNameFieldLastNameOptional:', {'arguments': {2: {'type': b'Z'}}}) - r(b'INRestaurantGuestDisplayPreferences', b'setNameFieldShouldBeDisplayed:', {'arguments': {2: {'type': b'Z'}}}) - r(b'INRestaurantGuestDisplayPreferences', b'setPhoneNumberEditable:', {'arguments': {2: {'type': b'Z'}}}) - r(b'INRestaurantGuestDisplayPreferences', b'setPhoneNumberFieldShouldBeDisplayed:', {'arguments': {2: {'type': b'Z'}}}) - r(b'INRestaurantReservationBooking', b'isBookingAvailable', {'retval': {'type': b'Z'}}) - r(b'INRestaurantReservationBooking', b'requiresEmailAddress', {'retval': {'type': b'Z'}}) - r(b'INRestaurantReservationBooking', b'requiresManualRequest', {'retval': {'type': b'Z'}}) - r(b'INRestaurantReservationBooking', b'requiresName', {'retval': {'type': b'Z'}}) - r(b'INRestaurantReservationBooking', b'requiresPhoneNumber', {'retval': {'type': b'Z'}}) - r(b'INRestaurantReservationBooking', b'setBookingAvailable:', {'arguments': {2: {'type': b'Z'}}}) - r(b'INRestaurantReservationBooking', b'setRequiresEmailAddress:', {'arguments': {2: {'type': b'Z'}}}) - r(b'INRestaurantReservationBooking', b'setRequiresManualRequest:', {'arguments': {2: {'type': b'Z'}}}) - r(b'INRestaurantReservationBooking', b'setRequiresName:', {'arguments': {2: {'type': b'Z'}}}) - r(b'INRestaurantReservationBooking', b'setRequiresPhoneNumber:', {'arguments': {2: {'type': b'Z'}}}) - r(b'INRideCompletionStatus', b'isCanceled', {'retval': {'type': b'Z'}}) - r(b'INRideCompletionStatus', b'isCompleted', {'retval': {'type': b'Z'}}) - r(b'INRideCompletionStatus', b'isMissedPickup', {'retval': {'type': b'Z'}}) - r(b'INRideCompletionStatus', b'isOutstanding', {'retval': {'type': b'Z'}}) - r(b'NSObject', b'alternativeSpeakableMatches', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'confirmActivateCarSignal:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmAddTasks:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmAppendToNote:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmBookRestaurantReservation:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmCancelRide:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmCancelWorkout:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmCreateNote:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmCreateTaskList:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmEndWorkout:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmGetAvailableRestaurantReservationBookingDefaults:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmGetAvailableRestaurantReservationBookings:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmGetCarLockStatus:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmGetCarPowerLevelStatus:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmGetRestaurantGuest:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmGetRideStatus:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmGetUserCurrentRestaurantReservationBookings:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmGetVisualCode:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmListRideOptions:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmPauseWorkout:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmPayBill:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmPlayMedia:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmRequestPayment:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmRequestRide:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmResumeWorkout:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmSaveProfileInCar:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmSearchCallHistory:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'confirmSearchForAccounts:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmSearchForBills:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmSearchForMessages:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'confirmSearchForNotebookItems:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmSearchForPhotos:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmSendMessage:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'confirmSendPayment:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmSendRideFeedback:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmSetAudioSourceInCar:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmSetCarLockStatus:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmSetClimateSettingsInCar:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmSetDefrosterSettingsInCar:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmSetMessageAttribute:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmSetProfileInCar:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmSetRadioStation:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmSetSeatSettingsInCar:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmSetTaskAttribute:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmStartAudioCall:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'confirmStartPhotoPlayback:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmStartVideoCall:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'confirmStartWorkout:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'confirmTransferMoney:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'getRideStatusResponseDidUpdate:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'handleActivateCarSignal:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handleAddTasks:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handleAppendToNote:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handleBookRestaurantReservation:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handleCancelRide:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handleCancelWorkout:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handleCreateNote:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handleCreateTaskList:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handleEndWorkout:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handleGetAvailableRestaurantReservationBookingDefaults:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handleGetAvailableRestaurantReservationBookings:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handleGetCarLockStatus:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handleGetCarPowerLevelStatus:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handleGetRestaurantGuest:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handleGetRideStatus:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handleGetUserCurrentRestaurantReservationBookings:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handleGetVisualCode:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handleListRideOptions:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handlePauseWorkout:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handlePayBill:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handlePlayMedia:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handleRequestPayment:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handleRequestRide:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handleResumeWorkout:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handleSaveProfileInCar:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handleSearchCallHistory:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'handleSearchForAccounts:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handleSearchForBills:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handleSearchForMessages:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'handleSearchForNotebookItems:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handleSearchForPhotos:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handleSendMessage:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'handleSendPayment:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handleSendRideFeedback:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handleSetAudioSourceInCar:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handleSetCarLockStatus:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handleSetClimateSettingsInCar:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handleSetDefrosterSettingsInCar:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handleSetMessageAttribute:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handleSetProfileInCar:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handleSetRadioStation:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handleSetSeatSettingsInCar:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handleSetTaskAttribute:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handleStartAudioCall:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'handleStartPhotoPlayback:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handleStartVideoCall:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'handleStartWorkout:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handleTransferMoney:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'handlerForIntent:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'identifier', {'required': False, 'retval': {'type': b'@'}}) - r(b'NSObject', b'pronunciationHint', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'resolveAccountNicknameForSearchForAccounts:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveAccountTypeForSearchForAccounts:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveAirCirculationModeForSetClimateSettingsInCar:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveAlbumNameForSearchForPhotos:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveAlbumNameForStartPhotoPlayback:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveAttributeForSetMessageAttribute:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveAttributesForSearchForMessages:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'resolveAudioSourceForSetAudioSourceInCar:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveBillPayeeForPayBill:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveBillPayeeForSearchForBills:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveBillTypeForPayBill:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveBillTypeForSearchForBills:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveBookingDateComponentsForBookRestaurantReservation:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveCallCapabilitiesForSearchCallHistory:withCompletion:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'resolveCallTypeForSearchCallHistory:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'resolveCallTypesForSearchCallHistory:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'resolveCarNameForActivateCarSignal:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveCarNameForGetCarLockStatus:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveCarNameForGetCarPowerLevelStatus:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveCarNameForSetCarLockStatus:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveChannelForSetRadioStation:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveClimateZoneForSetClimateSettingsInCar:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveContactsForStartAudioCall:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'resolveContactsForStartVideoCall:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'resolveContentForAppendToNote:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveContentForCreateNote:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveContentForSearchForNotebookItems:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveContentForSendMessage:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'resolveCurrencyAmountForRequestPayment:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveCurrencyAmountForRequestPayment:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveCurrencyAmountForSendPayment:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveCurrencyAmountForSendPayment:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveDateCreatedForSearchCallHistory:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'resolveDateCreatedForSearchForPhotos:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveDateCreatedForStartPhotoPlayback:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveDateSearchTypeForSearchForNotebookItems:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveDateTimeForSearchForNotebookItems:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveDateTimeRangeForSearchForMessages:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'resolveDefaultProfileForSetProfileInCar:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveDefrosterForSetDefrosterSettingsInCar:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveDestinationTypeForStartAudioCall:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'resolveDropOffLocationForListRideOptions:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveDropOffLocationForRequestRide:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveDueDateForPayBill:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveDueDateRangeForSearchForBills:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveEnableAirConditionerForSetClimateSettingsInCar:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveEnableAutoModeForSetClimateSettingsInCar:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveEnableClimateControlForSetClimateSettingsInCar:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveEnableCoolingForSetSeatSettingsInCar:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveEnableFanForSetClimateSettingsInCar:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveEnableForSetDefrosterSettingsInCar:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveEnableHeatingForSetSeatSettingsInCar:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveEnableMassageForSetSeatSettingsInCar:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveFanSpeedIndexForSetClimateSettingsInCar:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveFanSpeedPercentageForSetClimateSettingsInCar:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveFrequencyForSetRadioStation:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveFromAccountForPayBill:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveFromAccountForTransferMoney:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveGoalValueForStartWorkout:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveGroupNameForCreateNote:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveGroupNameForCreateTaskList:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveGroupNameForSendMessage:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'resolveGroupNamesForSearchForMessages:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'resolveGuestForBookRestaurantReservation:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveGuestProvidedSpecialRequestTextForBookRestaurantReservation:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveIdentifiersForSearchForMessages:withCompletion:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'resolveIsOpenEndedForStartWorkout:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveItemTypeForSearchForNotebookItems:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveLevelForSetSeatSettingsInCar:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveLocationCreatedForSearchForPhotos:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveLocationCreatedForStartPhotoPlayback:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveLocationForSearchForNotebookItems:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveLocationSearchTypeForSearchForNotebookItems:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveLockedForSetCarLockStatus:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveNoteForRequestPayment:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveNoteForSendPayment:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveNotificationIdentifiersForSearchForMessages:withCompletion:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'resolveOrganizationNameForSearchForAccounts:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolvePartySizeForBookRestaurantReservation:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolvePartySizeForGetAvailableRestaurantReservationBookings:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolvePartySizeForRequestRide:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolvePayeeForSendPayment:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolvePayeeForSendPayment:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolvePayerForRequestPayment:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolvePayerForRequestPayment:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolvePaymentDateRangeForSearchForBills:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolvePeopleInPhotoForSearchForPhotos:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolvePeopleInPhotoForStartPhotoPlayback:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolvePickupLocationForListRideOptions:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolvePickupLocationForRequestRide:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolvePreferredBookingDateComponentsForGetAvailableRestaurantReservationBookings:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolvePresetNumberForSetRadioStation:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveProfileNameForSaveProfileInCar:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveProfileNameForSetProfileInCar:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveProfileNumberForSaveProfileInCar:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveProfileNumberForSetProfileInCar:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveRadioTypeForSetRadioStation:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveRecipientForSearchCallHistory:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'resolveRecipientsForSearchForMessages:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'resolveRecipientsForSendMessage:completion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'resolveRecipientsForSendMessage:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'resolveRelativeAudioSourceReferenceForSetAudioSourceInCar:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveRelativeFanSpeedSettingForSetClimateSettingsInCar:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveRelativeLevelSettingForSetSeatSettingsInCar:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveRelativeTemperatureSettingForSetClimateSettingsInCar:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveRequestedBalanceTypeForSearchForAccounts:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveRestaurantForBookRestaurantReservation:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveRestaurantForGetAvailableRestaurantReservationBookingDefaults:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveRestaurantForGetAvailableRestaurantReservationBookings:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveRestaurantForGetUserCurrentRestaurantReservationBookings:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveRideOptionNameForRequestRide:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveScheduledPickupTimeForRequestRide:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveSearchTermsForSearchForMessages:withCompletion:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'resolveSearchTermsForSearchForPhotos:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveSeatForSetSeatSettingsInCar:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveSenderForSendMessage:withCompletion:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'resolveSendersForSearchForMessages:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'resolveServiceNameForSendMessage:withCompletion:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'resolveSignalsForActivateCarSignal:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveSpatialEventTriggerForAddTasks:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveSpatialEventTriggerForSetTaskAttribute:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveSpeakableGroupNameForSendMessage:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'resolveSpeakableGroupNamesForSearchForMessages:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'resolveStationNameForSetRadioStation:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveStatusForSearchForBills:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveStatusForSearchForNotebookItems:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveStatusForSetTaskAttribute:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveTargetNoteForAppendToNote:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveTargetTaskForSetTaskAttribute:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveTargetTaskListForAddTasks:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveTaskTitlesForAddTasks:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveTaskTitlesForCreateTaskList:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveTemperatureForSetClimateSettingsInCar:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveTemporalEventTriggerForAddTasks:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveTemporalEventTriggerForSetTaskAttribute:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveTitleForCreateNote:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveTitleForCreateTaskList:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveTitleForSearchForNotebookItems:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveToAccountForTransferMoney:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveTransactionAmountForPayBill:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveTransactionAmountForTransferMoney:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveTransactionNoteForPayBill:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveTransactionNoteForTransferMoney:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveTransactionScheduledDateForPayBill:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveTransactionScheduledDateForTransferMoney:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveUnseenForSearchCallHistory:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'resolveVisualCodeTypeForGetVisualCode:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveWorkoutGoalUnitTypeForStartWorkout:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveWorkoutLocationTypeForStartWorkout:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveWorkoutNameForCancelWorkout:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveWorkoutNameForEndWorkout:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveWorkoutNameForPauseWorkout:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveWorkoutNameForResumeWorkout:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'resolveWorkoutNameForStartWorkout:withCompletion:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@?'}}}) - r(b'NSObject', b'spokenPhrase', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'startSendingUpdatesForGetRideStatus:toObserver:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'stopSendingUpdatesForGetRideStatus:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'vocabularyIdentifier', {'required': True, 'retval': {'type': b'@'}}) - r(b'null', b'deferredLocalizedIntentsStringWithFormat:', {'variadic': True}) - r(b'null', b'deferredLocalizedIntentsStringWithFormat:fromTable:', {'variadic': True}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/Intents/_metadata.pyc b/env/lib/python2.7/site-packages/Intents/_metadata.pyc deleted file mode 100644 index 11afc628..00000000 Binary files a/env/lib/python2.7/site-packages/Intents/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/JavaScriptCore/__init__.py b/env/lib/python2.7/site-packages/JavaScriptCore/__init__.py deleted file mode 100644 index 766018c7..00000000 --- a/env/lib/python2.7/site-packages/JavaScriptCore/__init__.py +++ /dev/null @@ -1,28 +0,0 @@ -''' -Python mapping for the JavaScriptCore framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import objc -import sys -import CoreFoundation - -from JavaScriptCore import _metadata - -sys.modules['JavaScriptCore'] = mod = objc.ObjCLazyModule( - "JavaScriptCore", - "com.apple.JavaScriptCore", - objc.pathForFramework("/System/Library/Frameworks/JavaScriptCore.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (CoreFoundation,)) - -import sys -del sys.modules['JavaScriptCore._metadata'] - -import JavaScriptCore._util -mod.autoreleasing = JavaScriptCore._util.autoreleasing diff --git a/env/lib/python2.7/site-packages/JavaScriptCore/__init__.pyc b/env/lib/python2.7/site-packages/JavaScriptCore/__init__.pyc deleted file mode 100644 index 9776d8ec..00000000 Binary files a/env/lib/python2.7/site-packages/JavaScriptCore/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/JavaScriptCore/_metadata.py b/env/lib/python2.7/site-packages/JavaScriptCore/_metadata.py deleted file mode 100644 index 43db2f26..00000000 --- a/env/lib/python2.7/site-packages/JavaScriptCore/_metadata.py +++ /dev/null @@ -1,49 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Wed Feb 20 11:33:43 2019 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$JSPropertyDescriptorConfigurableKey$JSPropertyDescriptorEnumerableKey$JSPropertyDescriptorGetKey$JSPropertyDescriptorSetKey$JSPropertyDescriptorValueKey$JSPropertyDescriptorWritableKey$''' -enums = '''$WEBKIT_VERSION_1_0@256$WEBKIT_VERSION_1_1@272$WEBKIT_VERSION_1_2@288$WEBKIT_VERSION_1_3@304$WEBKIT_VERSION_2_0@512$WEBKIT_VERSION_3_0@768$WEBKIT_VERSION_3_1@784$WEBKIT_VERSION_4_0@1024$WEBKIT_VERSION_LATEST@39321$kJSClassAttributeNoAutomaticPrototype@2$kJSClassAttributeNone@0$kJSPropertyAttributeDontDelete@8$kJSPropertyAttributeDontEnum@4$kJSPropertyAttributeNone@0$kJSPropertyAttributeReadOnly@2$kJSTypeBoolean@2$kJSTypeNull@1$kJSTypeNumber@3$kJSTypeObject@5$kJSTypeString@4$kJSTypeSymbol@6$kJSTypeUndefined@0$kJSTypedArrayTypeArrayBuffer@9$kJSTypedArrayTypeFloat32Array@7$kJSTypedArrayTypeFloat64Array@8$kJSTypedArrayTypeInt16Array@1$kJSTypedArrayTypeInt32Array@2$kJSTypedArrayTypeInt8Array@0$kJSTypedArrayTypeNone@10$kJSTypedArrayTypeUint16Array@5$kJSTypedArrayTypeUint32Array@6$kJSTypedArrayTypeUint8Array@3$kJSTypedArrayTypeUint8ClampedArray@4$''' -misc.update({'JSC_OBJC_API_ENABLED': sel32or64(0, 1)}) -misc.update({}) -functions={'JSClassRetain': (b'^{OpaqueJSClass=}^{OpaqueJSClass=}',), 'JSValueCreateJSONString': (b'^{OpaqueJSString=}^{OpaqueJSContext=}^{OpaqueJSValue=}I^^{OpaqueJSValue=}', '', {'retval': {'already_cfretained': True}}), 'JSValueGetTypedArrayType': (b'I^{OpaqueJSContext=}^{OpaqueJSValue=}^^{OpaqueJSValue=}', '', {'arguments': {2: {'type_modifier': 'o'}}}), 'JSStringCreateWithCFString': (b'^{OpaqueJSString=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'JSValueToBoolean': (b'B^{OpaqueJSContext=}^{OpaqueJSValue=}',), 'JSStringCopyCFString': (b'^{__CFString=}^{__CFAllocator=}^{OpaqueJSString=}', '', {'retval': {'already_cfretained': True}}), 'JSObjectMakeTypedArray': (sel32or64(b'^{OpaqueJSValue=}^{OpaqueJSContext=}IL^^{OpaqueJSValue=}', b'^{OpaqueJSValue=}^{OpaqueJSContext=}IQ^^{OpaqueJSValue=}'), '', {'arguments': {3: {'type_modifier': 'o'}}}), 'JSValueMakeString': (b'^{OpaqueJSValue=}^{OpaqueJSContext=}^{OpaqueJSString=}',), 'JSObjectGetArrayBufferBytesPtr': (b'^v^{OpaqueJSContext=}^{OpaqueJSValue=}^^{OpaqueJSValue=}', '', {'retval': {'c_array_of_variable_length': True}, 'arguments': {2: {'type_modifier': 'o'}}}), 'JSObjectGetTypedArrayByteOffset': (sel32or64(b'L^{OpaqueJSContext=}^{OpaqueJSValue=}^^{OpaqueJSValue=}', b'Q^{OpaqueJSContext=}^{OpaqueJSValue=}^^{OpaqueJSValue=}'), '', {'arguments': {2: {'type_modifier': 'o'}}}), 'JSValueMakeNumber': (b'^{OpaqueJSValue=}^{OpaqueJSContext=}d',), 'JSObjectMakeError': (sel32or64(b'^{OpaqueJSValue=}^{OpaqueJSContext=}L^^{OpaqueJSValue=}^^{OpaqueJSValue=}', b'^{OpaqueJSValue=}^{OpaqueJSContext=}Q^^{OpaqueJSValue=}^^{OpaqueJSValue=}'), '', {'arguments': {2: {'c_array_length_in_arg': 1, 'type_modifier': 'n'}, 3: {'type_modifier': 'o'}}}), 'JSValueProtect': (b'v^{OpaqueJSContext=}^{OpaqueJSValue=}',), 'JSValueIsDate': (b'B^{OpaqueJSContext=}^{OpaqueJSValue=}',), 'JSObjectGetTypedArrayBytesPtr': (b'^v^{OpaqueJSContext=}^{OpaqueJSValue=}^^{OpaqueJSValue=}', '', {'retval': {'c_array_of_variable_length': True}, 'arguments': {2: {'type_modifier': 'o'}}}), 'JSValueToStringCopy': (b'^{OpaqueJSString=}^{OpaqueJSContext=}^{OpaqueJSValue=}^^{OpaqueJSValue=}', '', {'retval': {'already_cfretained': True}}), 'JSValueIsNumber': (b'B^{OpaqueJSContext=}^{OpaqueJSValue=}',), 'JSObjectHasProperty': (b'B^{OpaqueJSContext=}^{OpaqueJSValue=}^{OpaqueJSString=}',), 'JSValueIsEqual': (b'B^{OpaqueJSContext=}^{OpaqueJSValue=}^{OpaqueJSValue=}^^{OpaqueJSValue=}',), 'JSValueIsArray': (b'B^{OpaqueJSContext=}^{OpaqueJSValue=}',), 'JSValueToNumber': (b'd^{OpaqueJSContext=}^{OpaqueJSValue=}^^{OpaqueJSValue=}',), 'JSObjectGetTypedArrayByteLength': (sel32or64(b'L^{OpaqueJSContext=}^{OpaqueJSValue=}^^{OpaqueJSValue=}', b'Q^{OpaqueJSContext=}^{OpaqueJSValue=}^^{OpaqueJSValue=}'), '', {'arguments': {2: {'type_modifier': 'o'}}}), 'JSValueUnprotect': (b'v^{OpaqueJSContext=}^{OpaqueJSValue=}',), 'JSObjectSetPropertyAtIndex': (b'v^{OpaqueJSContext=}^{OpaqueJSValue=}I^{OpaqueJSValue=}^^{OpaqueJSValue=}', '', {'arguments': {4: {'type_modifier': 'o'}}}), 'JSObjectGetTypedArrayLength': (sel32or64(b'L^{OpaqueJSContext=}^{OpaqueJSValue=}^^{OpaqueJSValue=}', b'Q^{OpaqueJSContext=}^{OpaqueJSValue=}^^{OpaqueJSValue=}'), '', {'arguments': {2: {'type_modifier': 'o'}}}), 'JSObjectCallAsFunction': (sel32or64(b'^{OpaqueJSValue=}^{OpaqueJSContext=}^{OpaqueJSValue=}^{OpaqueJSValue=}L^^{OpaqueJSValue=}^^{OpaqueJSValue=}', b'^{OpaqueJSValue=}^{OpaqueJSContext=}^{OpaqueJSValue=}^{OpaqueJSValue=}Q^^{OpaqueJSValue=}^^{OpaqueJSValue=}'), '', {'arguments': {4: {'c_array_length_in_arg': 3, 'type_modifier': 'n'}, 5: {'type_modifier': 'o'}}}), 'JSPropertyNameArrayGetCount': (sel32or64(b'L^{OpaqueJSPropertyNameArray=}', b'Q^{OpaqueJSPropertyNameArray=}'),), 'JSStringGetLength': (sel32or64(b'L^{OpaqueJSString=}', b'Q^{OpaqueJSString=}'),), 'JSValueToObject': (b'^{OpaqueJSValue=}^{OpaqueJSContext=}^{OpaqueJSValue=}^^{OpaqueJSValue=}',), 'JSValueIsString': (b'B^{OpaqueJSContext=}^{OpaqueJSValue=}',), 'JSObjectCopyPropertyNames': (b'^{OpaqueJSPropertyNameArray=}^{OpaqueJSContext=}^{OpaqueJSValue=}', '', {'retval': {'already_cfretained': True}}), 'JSGlobalContextCopyName': (b'^{OpaqueJSString=}^{OpaqueJSContext=}', '', {'retval': {'already_cfretained': True}}), 'JSClassRelease': (b'v^{OpaqueJSClass=}',), 'JSValueMakeUndefined': (b'^{OpaqueJSValue=}^{OpaqueJSContext=}',), 'JSObjectMakeArray': (sel32or64(b'^{OpaqueJSValue=}^{OpaqueJSContext=}L^^{OpaqueJSValue=}^^{OpaqueJSValue=}', b'^{OpaqueJSValue=}^{OpaqueJSContext=}Q^^{OpaqueJSValue=}^^{OpaqueJSValue=}'), '', {'arguments': {2: {'c_array_length_in_arg': 1, 'type_modifier': 'n'}, 3: {'type_modifier': 'o'}}}), 'JSGlobalContextSetName': (b'v^{OpaqueJSContext=}^{OpaqueJSString=}',), 'JSStringGetMaximumUTF8CStringSize': (sel32or64(b'L^{OpaqueJSString=}', b'Q^{OpaqueJSString=}'),), 'JSStringCreateWithCharacters': (sel32or64(b'^{OpaqueJSString=}^TL', b'^{OpaqueJSString=}^TQ'), '', {'retval': {'already_cfretained': True}, 'arguments': {0: {'c_array_length_in_arg': 1, 'type_modifier': 'n'}}}), 'JSObjectMakeTypedArrayWithArrayBufferAndOffset': (sel32or64(b'^{OpaqueJSValue=}^{OpaqueJSContext=}I^{OpaqueJSValue=}LL^^{OpaqueJSValue=}', b'^{OpaqueJSValue=}^{OpaqueJSContext=}I^{OpaqueJSValue=}QQ^^{OpaqueJSValue=}'), '', {'arguments': {5: {'type_modifier': 'o'}}}), 'JSObjectCallAsConstructor': (sel32or64(b'^{OpaqueJSValue=}^{OpaqueJSContext=}^{OpaqueJSValue=}L^^{OpaqueJSValue=}^^{OpaqueJSValue=}', b'^{OpaqueJSValue=}^{OpaqueJSContext=}^{OpaqueJSValue=}Q^^{OpaqueJSValue=}^^{OpaqueJSValue=}'), '', {'arguments': {3: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}, 4: {'type_modifier': 'o'}}}), 'JSObjectIsFunction': (b'B^{OpaqueJSContext=}^{OpaqueJSValue=}',), 'JSValueIsInstanceOfConstructor': (b'B^{OpaqueJSContext=}^{OpaqueJSValue=}^{OpaqueJSValue=}^^{OpaqueJSValue=}',), 'JSCheckScriptSyntax': (b'B^{OpaqueJSContext=}^{OpaqueJSString=}^{OpaqueJSString=}i^^{OpaqueJSValue=}', '', {'arguments': {4: {'type_modifier': 'o'}}}), 'JSObjectIsConstructor': (b'B^{OpaqueJSContext=}^{OpaqueJSValue=}',), 'JSPropertyNameArrayRelease': (b'v^{OpaqueJSPropertyNameArray=}',), 'JSValueIsBoolean': (b'B^{OpaqueJSContext=}^{OpaqueJSValue=}',), 'JSContextGroupRetain': (b'^{OpaqueJSContextGroup=}^{OpaqueJSContextGroup=}',), 'JSGlobalContextRetain': (b'^{OpaqueJSContext=}^{OpaqueJSContext=}',), 'JSStringRelease': (b'v^{OpaqueJSString=}',), 'JSObjectMakeFunctionWithCallback': (b'^{OpaqueJSValue=}^{OpaqueJSContext=}^{OpaqueJSString=}^?', '', {'arguments': {2: {'callable': {'retval': {'type': b'^{OpaqueJSValue=}'}, 'arguments': {0: {'type': b'^{OpaqueJSContext=}'}, 1: {'type': b'^{OpaqueJSValue=}'}, 2: {'type': b'^{OpaqueJSValue=}'}, 3: {'type': b'L'}, 4: {'type': b'^^{OpaqueJSValue=}', 'type_modifier': 'n', 'c_array_length_in_arg': 3}, 5: {'type': b'^^{OpaqueJSValue=}', 'type_modifier': 'o'}}}}}}), 'JSValueIsObject': (b'B^{OpaqueJSContext=}^{OpaqueJSValue=}',), 'JSStringGetUTF8CString': (sel32or64(b'L^{OpaqueJSString=}^tL', b'Q^{OpaqueJSString=}^tQ'), '', {'arguments': {1: {'c_array_length_in_result': True, 'type_modifier': 'o', 'c_array_length_in_arg': 2}}}), 'JSGlobalContextCreate': (b'^{OpaqueJSContext=}^{OpaqueJSClass=}', '', {'retval': {'already_cfretained': True}}), 'JSObjectSetProperty': (b'v^{OpaqueJSContext=}^{OpaqueJSValue=}^{OpaqueJSString=}^{OpaqueJSValue=}I^^{OpaqueJSValue=}', '', {'arguments': {5: {'type_modifier': 'o'}}}), 'JSValueMakeBoolean': (b'^{OpaqueJSValue=}^{OpaqueJSContext=}B',), 'JSGlobalContextRelease': (b'v^{OpaqueJSContext=}',), 'JSObjectMakeRegExp': (sel32or64(b'^{OpaqueJSValue=}^{OpaqueJSContext=}L^^{OpaqueJSValue=}^^{OpaqueJSValue=}', b'^{OpaqueJSValue=}^{OpaqueJSContext=}Q^^{OpaqueJSValue=}^^{OpaqueJSValue=}'), '', {'arguments': {2: {'c_array_length_in_arg': 1, 'type_modifier': 'n'}, 3: {'type_modifier': 'o'}}}), 'JSStringIsEqualToUTF8CString': (b'B^{OpaqueJSString=}^t', '', {'arguments': {1: {'c_array_delimited_by_null': True, 'type_modifier': 'n'}}}), 'JSObjectGetProperty': (b'^{OpaqueJSValue=}^{OpaqueJSContext=}^{OpaqueJSValue=}^{OpaqueJSString=}^^{OpaqueJSValue=}', '', {'arguments': {3: {'type_modifier': 'o'}}}), 'JSObjectMakeArrayBufferWithBytesNoCopy': (sel32or64(b'^{OpaqueJSValue=}^{OpaqueJSContext=}n^vL^?^v^^{OpaqueJSValue=}', b'^{OpaqueJSValue=}^{OpaqueJSContext=}n^vQ^?^v^^{OpaqueJSValue=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'c_array_lenght_in_arg': '2'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'^v'}}}, 'callable_retained': True}, 5: {'type_modifier': 'o'}}}), 'JSValueIsNull': (b'B^{OpaqueJSContext=}^{OpaqueJSValue=}',), 'JSPropertyNameAccumulatorAddName': (b'v^{OpaqueJSPropertyNameAccumulator=}^{OpaqueJSString=}',), 'JSStringRetain': (b'^{OpaqueJSString=}^{OpaqueJSString=}',), 'JSObjectMakeConstructor': (b'^{OpaqueJSValue=}^{OpaqueJSContext=}^{OpaqueJSClass=}^?', '', {'arguments': {2: {'callable': {'retval': {'type': b'^{OpaqueJSValue=}'}, 'arguments': {0: {'type': b'^{OpaqueJSContext=}'}, 1: {'type': b'^{OpaqueJSValue=}'}, 2: {'type': b'L'}, 3: {'type': b'^^{OpaqueJSValue=}', 'type_modifier': 'n', 'c_array_length_in_arg': 2}, 4: {'type': b'^^{OpaqueJSValue=}', 'type_modifier': 'o'}}}}}}), 'JSObjectSetPrivate': (b'B^{OpaqueJSValue=}^v',), 'JSObjectMakeTypedArrayWithBytesNoCopy': (sel32or64(b'^{OpaqueJSValue=}^{OpaqueJSContext=}In^vL^?^v^^{OpaqueJSValue=}', b'^{OpaqueJSValue=}^{OpaqueJSContext=}In^vQ^?^v^^{OpaqueJSValue=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'c_array_lenght_in_arg': 3, 'type_modifier': 'n'}, 4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'^v'}}}}, 6: {'type_modifier': 'o'}}}), 'JSValueMakeFromJSONString': (b'^{OpaqueJSValue=}^{OpaqueJSContext=}^{OpaqueJSString=}',), 'JSEvaluateScript': (b'^{OpaqueJSValue=}^{OpaqueJSContext=}^{OpaqueJSString=}^{OpaqueJSValue=}^{OpaqueJSString=}i^^{OpaqueJSValue=}', '', {'arguments': {5: {'type_modifier': 'o'}}}), 'JSContextGetGlobalContext': (b'^{OpaqueJSContext=}^{OpaqueJSContext=}',), 'JSStringCreateWithUTF8CString': (b'^{OpaqueJSString=}^t', '', {'retval': {'already_cfretained': True}, 'arguments': {0: {'c_array_delimited_by_null': True, 'type_modifier': 'n'}}}), 'JSObjectGetTypedArrayBuffer': (b'^{OpaqueJSValue=}^{OpaqueJSContext=}^{OpaqueJSValue=}^^{OpaqueJSValue=}', '', {'arguments': {2: {'type_modifier': 'o'}}}), 'JSObjectGetArrayBufferByteLength': (sel32or64(b'L^{OpaqueJSContext=}^{OpaqueJSValue=}^^{OpaqueJSValue=}', b'Q^{OpaqueJSContext=}^{OpaqueJSValue=}^^{OpaqueJSValue=}'), '', {'arguments': {2: {'type_modifier': 'o'}}}), 'JSContextGetGlobalObject': (b'^{OpaqueJSValue=}^{OpaqueJSContext=}',), 'JSPropertyNameArrayRetain': (b'^{OpaqueJSPropertyNameArray=}^{OpaqueJSPropertyNameArray=}',), 'JSObjectMake': (b'^{OpaqueJSValue=}^{OpaqueJSContext=}^{OpaqueJSClass=}^v',), 'JSObjectGetPrivate': (b'^v^{OpaqueJSValue=}',), 'JSValueIsUndefined': (b'B^{OpaqueJSContext=}^{OpaqueJSValue=}',), 'JSValueIsObjectOfClass': (b'B^{OpaqueJSContext=}^{OpaqueJSValue=}^{OpaqueJSClass=}',), 'JSObjectGetPrototype': (b'^{OpaqueJSValue=}^{OpaqueJSContext=}^{OpaqueJSValue=}',), 'JSPropertyNameArrayGetNameAtIndex': (sel32or64(b'^{OpaqueJSString=}^{OpaqueJSPropertyNameArray=}L', b'^{OpaqueJSString=}^{OpaqueJSPropertyNameArray=}Q'),), 'JSValueMakeNull': (b'^{OpaqueJSValue=}^{OpaqueJSContext=}',), 'JSContextGetGroup': (b'^{OpaqueJSContextGroup=}^{OpaqueJSContext=}',), 'JSContextGroupCreate': (b'^{OpaqueJSContextGroup=}', '', {'retval': {'already_cfretained': True}}), 'JSObjectDeleteProperty': (b'B^{OpaqueJSContext=}^{OpaqueJSValue=}^{OpaqueJSString=}^^{OpaqueJSValue=}', '', {'arguments': {3: {'type_modifier': 'o'}}}), 'JSObjectMakeFunction': (b'^{OpaqueJSValue=}^{OpaqueJSContext=}^{OpaqueJSString=}I^^{OpaqueJSString=}^{OpaqueJSString=}^{OpaqueJSString=}i^^{OpaqueJSValue=}', '', {'arguments': {3: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}, 7: {'type_modifier': 'o'}}}), 'JSObjectGetPropertyAtIndex': (b'^{OpaqueJSValue=}^{OpaqueJSContext=}^{OpaqueJSValue=}I^^{OpaqueJSValue=}', '', {'arguments': {3: {'type_modifier': 'o'}}}), 'JSStringIsEqual': (b'B^{OpaqueJSString=}^{OpaqueJSString=}',), 'JSObjectSetPrototype': (b'v^{OpaqueJSContext=}^{OpaqueJSValue=}^{OpaqueJSValue=}',), 'JSContextGroupRelease': (b'v^{OpaqueJSContextGroup=}',), 'JSValueIsStrictEqual': (b'B^{OpaqueJSContext=}^{OpaqueJSValue=}^{OpaqueJSValue=}',), 'JSGlobalContextCreateInGroup': (b'^{OpaqueJSContext=}^{OpaqueJSContextGroup=}^{OpaqueJSClass=}', '', {'retval': {'already_cfretained': True}}), 'JSObjectMakeDate': (sel32or64(b'^{OpaqueJSValue=}^{OpaqueJSContext=}L^^{OpaqueJSValue=}^^{OpaqueJSValue=}', b'^{OpaqueJSValue=}^{OpaqueJSContext=}Q^^{OpaqueJSValue=}^^{OpaqueJSValue=}'), '', {'arguments': {2: {'c_array_length_in_arg': 1, 'type_modifier': 'n'}, 3: {'type_modifier': 'o'}}}), 'JSObjectMakeTypedArrayWithArrayBuffer': (b'^{OpaqueJSValue=}^{OpaqueJSContext=}I^{OpaqueJSValue=}^^{OpaqueJSValue=}', '', {'arguments': {3: {'type_modifier': 'o'}}}), 'JSValueMakeSymbol': (b'^{OpaqueJSValue=}^{OpaqueJSContext=}^{OpaqueJSString=}',), 'JSGarbageCollect': (b'v^{OpaqueJSContext=}',), 'JSValueGetType': (b'I^{OpaqueJSContext=}^{OpaqueJSValue=}',)} -aliases = {'JSObjectRef': 'JSValueRef', 'JSGlobalContextRef': 'JSContextRef'} -misc.update({'JSValueRef': objc.createOpaquePointerType('JSValueRef', b'^{OpaqueJSValue=}'), 'JSStringRef': objc.createOpaquePointerType('JSStringRef', b'^{OpaqueJSString=}'), 'JSContextRef': objc.createOpaquePointerType('JSContextRef', b'^{OpaqueJSContext=}'), 'JSPropertyNameArrayRef': objc.createOpaquePointerType('JSPropertyNameArrayRef', b'^{OpaqueJSPropertyNameArray=}'), 'JSClassRef': objc.createOpaquePointerType('JSClassRef', b'^{OpaqueJSClass=}'), 'JSContextGroupRef': objc.createOpaquePointerType('JSContextGroupRef', b'^{OpaqueJSContextGroup=}'), 'JSPropertyNameAccumulatorRef': objc.createOpaquePointerType('JSPropertyNameAccumulatorRef', b'^{OpaqueJSPropertyNameAccumulator=}')}) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'JSContext', b'exceptionHandler', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}) - r(b'JSContext', b'setExceptionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'JSValue', b'deleteProperty:', {'retval': {'type': 'Z'}}) - r(b'JSValue', b'hasProperty:', {'retval': {'type': 'Z'}}) - r(b'JSValue', b'isArray', {'retval': {'type': 'Z'}}) - r(b'JSValue', b'isBoolean', {'retval': {'type': 'Z'}}) - r(b'JSValue', b'isDate', {'retval': {'type': 'Z'}}) - r(b'JSValue', b'isEqualToObject:', {'retval': {'type': 'Z'}}) - r(b'JSValue', b'isEqualWithTypeCoercionToObject:', {'retval': {'type': 'Z'}}) - r(b'JSValue', b'isInstanceOf:', {'retval': {'type': 'Z'}}) - r(b'JSValue', b'isNull', {'retval': {'type': 'Z'}}) - r(b'JSValue', b'isNumber', {'retval': {'type': 'Z'}}) - r(b'JSValue', b'isObject', {'retval': {'type': 'Z'}}) - r(b'JSValue', b'isString', {'retval': {'type': 'Z'}}) - r(b'JSValue', b'isUndefined', {'retval': {'type': 'Z'}}) - r(b'JSValue', b'toBool', {'retval': {'type': 'Z'}}) - r(b'JSValue', b'valueWithBool:inContext:', {'arguments': {2: {'type': b'Z'}}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/JavaScriptCore/_metadata.pyc b/env/lib/python2.7/site-packages/JavaScriptCore/_metadata.pyc deleted file mode 100644 index fd645c13..00000000 Binary files a/env/lib/python2.7/site-packages/JavaScriptCore/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/JavaScriptCore/_util.py b/env/lib/python2.7/site-packages/JavaScriptCore/_util.py deleted file mode 100644 index 5ede3527..00000000 --- a/env/lib/python2.7/site-packages/JavaScriptCore/_util.py +++ /dev/null @@ -1,25 +0,0 @@ -import JavaScriptCore -import contextlib - -@contextlib.contextmanager -def autoreleasing(value): - try: - yield value - - finally: - if isinstance(value, JavaScriptCore.JSContextGroupRef): - JavaScriptCore.JSContextGroupRelease(value) - - elif isinstance(value, JavaScriptCore.JSGlobalContextRef): - JavaScriptCore.JSGlobalContextRelease(value) - - elif isinstance(value, JavaScriptCore.JSClassRef): - JavaScriptCore.JSClassRelease(value) - - elif isinstance(value, JavaScriptCore.JSPropertyNameArrayRef): - JavaScriptCore.JSPropertyNameArrayRelease(value) - - elif isinstance(value, JavaScriptCore.JSStringRef): - JavaScriptCore.JSStringRelease(value) - - diff --git a/env/lib/python2.7/site-packages/JavaScriptCore/_util.pyc b/env/lib/python2.7/site-packages/JavaScriptCore/_util.pyc deleted file mode 100644 index ff8ce882..00000000 Binary files a/env/lib/python2.7/site-packages/JavaScriptCore/_util.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/LatentSemanticMapping/__init__.py b/env/lib/python2.7/site-packages/LatentSemanticMapping/__init__.py deleted file mode 100644 index b224c0a4..00000000 --- a/env/lib/python2.7/site-packages/LatentSemanticMapping/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -''' -Python mapping for the LatentSemanticMapping framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import sys -import objc -import Foundation - -from LatentSemanticMapping import _metadata - -sys.modules['LatentSemanticMapping'] = mod = objc.ObjCLazyModule('LatentSemanticMapping', - "com.apple.speech.LatentSemanticMappingFramework", - objc.pathForFramework("/System/Library/Frameworks/LatentSemanticMapping.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'objc': objc, - }, ( Foundation,)) - -import sys -del sys.modules['LatentSemanticMapping._metadata'] diff --git a/env/lib/python2.7/site-packages/LatentSemanticMapping/__init__.pyc b/env/lib/python2.7/site-packages/LatentSemanticMapping/__init__.pyc deleted file mode 100644 index 78ddf7c6..00000000 Binary files a/env/lib/python2.7/site-packages/LatentSemanticMapping/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/LatentSemanticMapping/_metadata.py b/env/lib/python2.7/site-packages/LatentSemanticMapping/_metadata.py deleted file mode 100644 index 6377bdba..00000000 --- a/env/lib/python2.7/site-packages/LatentSemanticMapping/_metadata.py +++ /dev/null @@ -1,25 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Fri Sep 21 15:42:58 2012 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$$''' -enums = '''$kLSMClusterAgglomerative@4$kLSMClusterCategories@0$kLSMClusterKMeans@0$kLSMClusterTokens@2$kLSMClusterWords@1$kLSMMapBadCluster@-6644$kLSMMapBadPath@-6643$kLSMMapDiscardCounts@1$kLSMMapHashText@256$kLSMMapLoadMutable@2$kLSMMapNoSuchCategory@-6641$kLSMMapOutOfState@-6640$kLSMMapOverflow@-6645$kLSMMapPairs@1$kLSMMapTriplets@2$kLSMMapWriteError@-6642$kLSMResultBestWords@1$kLSMTextApplySpamHeuristics@4$kLSMTextPreserveAcronyms@2$kLSMTextPreserveCase@1$''' -misc.update({'kLSMAlgorithmDense': b'LSMAlgorithmDense'.decode("utf-8"), 'kLSMPrecisionFloat': b'LSMPrecisionFloat'.decode("utf-8"), 'kLSMSweepCutoffKey': b'LSMSweepCutoff'.decode("utf-8"), 'kLSMAlgorithmSparse': b'LSMAlgorithmSparse'.decode("utf-8"), 'kLSMDimensionKey': b'LSMDimension'.decode("utf-8"), 'kLSMAlgorithmKey': b'LSMAlgorithm'.decode("utf-8"), 'kLSMPrecisionKey': b'LSMPrecision'.decode("utf-8"), 'kLSMPrecisionDouble': b'LSMPrecisionDouble'.decode("utf-8"), 'kLSMSweepAgeKey': b'LSMSweepAge'.decode("utf-8"), 'kLSMIterationsKey': b'LSMIterations'.decode("utf-8")}) -functions={'LSMMapGetCategoryCount': (sel32or64(b'l^{__LSMMap=}', b'q^{__LSMMap=}'),), 'LSMMapAddTextWithWeight': (sel32or64(b'l^{__LSMMap=}^{__LSMText=}If', b'i^{__LSMMap=}^{__LSMText=}If'),), 'LSMTextAddToken': (sel32or64(b'l^{__LSMText=}^{__CFData=}', b'i^{__LSMText=}^{__CFData=}'),), 'LSMMapSetStopWords': (sel32or64(b'l^{__LSMMap=}^{__LSMText=}', b'i^{__LSMMap=}^{__LSMText=}'),), 'LSMTextGetTypeID': (sel32or64(b'L', b'Q'),), 'LSMMapStartTraining': (sel32or64(b'l^{__LSMMap=}', b'i^{__LSMMap=}'),), 'LSMMapCompile': (sel32or64(b'l^{__LSMMap=}', b'i^{__LSMMap=}'),), 'LSMMapWriteToStream': (sel32or64(b'l^{__LSMMap=}^{__LSMText=}^{__CFWriteStream=}L', b'i^{__LSMMap=}^{__LSMText=}^{__CFWriteStream=}Q'),), 'LSMResultCopyWordCluster': (sel32or64(b'^{__CFArray=}^{__LSMResult=}l', b'^{__CFArray=}^{__LSMResult=}q'), '', {'retval': {'already_cfretained': True}}), 'LSMResultGetCount': (sel32or64(b'l^{__LSMResult=}', b'q^{__LSMResult=}'),), 'LSMResultGetCategory': (sel32or64(b'I^{__LSMResult=}l', b'I^{__LSMResult=}q'),), 'LSMMapCreate': (sel32or64(b'^{__LSMMap=}^{__CFAllocator=}L', b'^{__LSMMap=}^{__CFAllocator=}Q'), '', {'retval': {'already_cfretained': True}}), 'LSMResultGetTypeID': (sel32or64(b'L', b'Q'),), 'LSMResultCreate': (sel32or64(b'^{__LSMResult=}^{__CFAllocator=}^{__LSMMap=}^{__LSMText=}lL', b'^{__LSMResult=}^{__CFAllocator=}^{__LSMMap=}^{__LSMText=}qQ'), '', {'retval': {'already_cfretained': True}}), 'LSMResultCopyWord': (sel32or64(b'^{__CFString=}^{__LSMResult=}l', b'^{__CFString=}^{__LSMResult=}q'), '', {'retval': {'already_cfretained': True}}), 'LSMMapCreateClusters': (sel32or64(b'^{__CFArray=}^{__CFAllocator=}^{__LSMMap=}^{__CFArray=}lL', b'^{__CFArray=}^{__CFAllocator=}^{__LSMMap=}^{__CFArray=}qQ'), '', {'retval': {'already_cfretained': True}}), 'LSMMapApplyClusters': (sel32or64(b'l^{__LSMMap=}^{__CFArray=}', b'i^{__LSMMap=}^{__CFArray=}'),), 'LSMTextAddWords': (sel32or64(b'l^{__LSMText=}^{__CFString=}^{__CFLocale=}L', b'i^{__LSMText=}^{__CFString=}^{__CFLocale=}Q'),), 'LSMResultCopyTokenCluster': (sel32or64(b'^{__CFArray=}^{__LSMResult=}l', b'^{__CFArray=}^{__LSMResult=}q'), '', {'retval': {'already_cfretained': True}}), 'LSMMapAddText': (sel32or64(b'l^{__LSMMap=}^{__LSMText=}I', b'i^{__LSMMap=}^{__LSMText=}I'),), 'LSMTextCreate': (b'^{__LSMText=}^{__CFAllocator=}^{__LSMMap=}', '', {'retval': {'already_cfretained': True}}), 'LSMMapWriteToURL': (sel32or64(b'l^{__LSMMap=}^{__CFURL=}L', b'i^{__LSMMap=}^{__CFURL=}Q'),), 'LSMResultGetScore': (sel32or64(b'f^{__LSMResult=}l', b'f^{__LSMResult=}q'),), 'LSMMapCreateFromURL': (sel32or64(b'^{__LSMMap=}^{__CFAllocator=}^{__CFURL=}L', b'^{__LSMMap=}^{__CFAllocator=}^{__CFURL=}Q'), '', {'retval': {'already_cfretained': True}}), 'LSMMapSetProperties': (b'v^{__LSMMap=}^{__CFDictionary=}',), 'LSMResultCopyToken': (sel32or64(b'^{__CFData=}^{__LSMResult=}l', b'^{__CFData=}^{__LSMResult=}q'), '', {'retval': {'already_cfretained': True}}), 'LSMMapGetProperties': (b'^{__CFDictionary=}^{__LSMMap=}',), 'LSMMapAddCategory': (b'I^{__LSMMap=}',), 'LSMTextAddWord': (sel32or64(b'l^{__LSMText=}^{__CFString=}', b'i^{__LSMText=}^{__CFString=}'),), 'LSMMapGetTypeID': (sel32or64(b'L', b'Q'),)} -cftypes=[('LSMMapRef', b'^{__LSMMap=}', 'LSMMapGetTypeID', None), ('LSMResultRef', b'^{__LSMResult=}', 'LSMResultGetTypeID', None), ('LSMTextRef', b'^{__LSMText=}', 'LSMTextGetTypeID', None)] -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/LatentSemanticMapping/_metadata.pyc b/env/lib/python2.7/site-packages/LatentSemanticMapping/_metadata.pyc deleted file mode 100644 index e182bdd7..00000000 Binary files a/env/lib/python2.7/site-packages/LatentSemanticMapping/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/LaunchServices/__init__.py b/env/lib/python2.7/site-packages/LaunchServices/__init__.py deleted file mode 100644 index 75a9b364..00000000 --- a/env/lib/python2.7/site-packages/LaunchServices/__init__.py +++ /dev/null @@ -1,23 +0,0 @@ -''' -Python mapping for the LaunchServices framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import objc, sys -import os -import CoreServices - -import warnings -warnings.warn("pyobjc-framework-LaunchServices is deprecated, use 'import CoreServices' instead", DeprecationWarning) - -sys.modules['LaunchServices'] = mod = objc.ObjCLazyModule( - "LaunchServices", - "com.apple.CoreServices", - objc.pathForFramework('/System/Library/Frameworks/CoreServices.framework/CoreServices'), - {}, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (CoreServices,)) diff --git a/env/lib/python2.7/site-packages/LaunchServices/__init__.pyc b/env/lib/python2.7/site-packages/LaunchServices/__init__.pyc deleted file mode 100644 index d774ed2d..00000000 Binary files a/env/lib/python2.7/site-packages/LaunchServices/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/LocalAuthentication/__init__.py b/env/lib/python2.7/site-packages/LocalAuthentication/__init__.py deleted file mode 100644 index 51c4d744..00000000 --- a/env/lib/python2.7/site-packages/LocalAuthentication/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -''' -Python mapping for the CloudKit framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Foundation - -from LocalAuthentication import _metadata - -sys.modules['LocalAuthentication'] = mod = objc.ObjCLazyModule( - "LocalAuthentication", - "com.apple.LocalAuthentication", - objc.pathForFramework("/System/Library/Frameworks/LocalAuthentication.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Foundation,)) - -import sys -del sys.modules['LocalAuthentication._metadata'] diff --git a/env/lib/python2.7/site-packages/LocalAuthentication/__init__.pyc b/env/lib/python2.7/site-packages/LocalAuthentication/__init__.pyc deleted file mode 100644 index be71bd3e..00000000 Binary files a/env/lib/python2.7/site-packages/LocalAuthentication/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/LocalAuthentication/_metadata.py b/env/lib/python2.7/site-packages/LocalAuthentication/_metadata.py deleted file mode 100644 index 41bf821b..00000000 --- a/env/lib/python2.7/site-packages/LocalAuthentication/_metadata.py +++ /dev/null @@ -1,40 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Mon Nov 20 13:25:40 2017 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$LAErrorDomain$LATouchIDAuthenticationMaximumAllowableReuseDuration@d$''' -enums = '''$LAAccessControlOperationCreateItem@0$LAAccessControlOperationCreateKey@2$LAAccessControlOperationUseItem@1$LAAccessControlOperationUseKeyDecrypt@4$LAAccessControlOperationUseKeyKeyExchange@5$LAAccessControlOperationUseKeySign@3$LABiometryNone@0$LABiometryTypeNone@0$LABiometryTypeTouchID@1$LACredentialTypeApplicationPassword@0$LAErrorAppCancel@-9$LAErrorAuthenticationFailed@-1$LAErrorInvalidContext@-10$LAErrorNotInteractive@-1004$LAErrorPasscodeNotSet@-5$LAErrorSystemCancel@-4$LAErrorTouchIDLockout@-8$LAErrorTouchIDNotAvailable@-6$LAErrorTouchIDNotEnrolled@-7$LAErrorUserCancel@-2$LAErrorUserFallback@-3$LAPolicyDeviceOwnerAuthentication@2$LAPolicyDeviceOwnerAuthenticationWithBiometrics@1$kLACredentialCTKPIN@-3$kLACredentialTypeApplicationPassword@0$kLACredentialTypePasscode@-1$kLACredentialTypePassphrase@-2$kLAErrorAppCancel@-9$kLAErrorAuthenticationFailed@-1$kLAErrorInvalidContext@-10$kLAErrorNotInteractive@-1004$kLAErrorPasscodeNotSet@-5$kLAErrorSystemCancel@-4$kLAErrorTouchIDLockout@-8$kLAErrorTouchIDNotAvailable@-6$kLAErrorTouchIDNotEnrolled@-7$kLAErrorUserCancel@-2$kLAErrorUserFallback@-3$kLAOptionAuthenticationReason@2$kLAOptionUserFallback@1$kLAPolicyDeviceOwnerAuthentication@2$kLAPolicyDeviceOwnerAuthenticationWithBiometrics@1$''' -misc.update({'kLAErrorDomain': b'com.apple.LocalAuthentication'.decode("utf-8")}) -aliases = {'kLAErrorBiometryNotAvailable': 'kLAErrorTouchIDNotAvailable', 'LAErrorBiometryNotEnrolled': 'kLAErrorBiometryNotEnrolled', 'kLAErrorBiometryNotEnrolled': 'kLAErrorTouchIDNotEnrolled', 'LAErrorBiometryLockout': 'kLAErrorBiometryLockout', 'LAErrorBiometryNotAvailable': 'kLAErrorBiometryNotAvailable', 'kLAErrorBiometryLockout': 'kLAErrorTouchIDLockout'} -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'LAContext', b'canEvaluatePolicy:error:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'LAContext', b'evaluatePolicy:localizedReason:reply:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}, 2: {'type': b'@'}}}}}}) - r(b'LAContext', b'evaluatePolicy:operation:localizedReason:reply:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}, 2: {'type': b'@'}}}}}}) - r(b'LAContext', b'interactionNotAllowed', {'retval': {'type': 'Z'}}) - r(b'LAContext', b'isCancelButtonVisible', {'retval': {'type': 'Z'}}) - r(b'LAContext', b'isCredentialSet:', {'retval': {'type': 'Z'}}) - r(b'LAContext', b'isFallbackButtonVisible', {'retval': {'type': 'Z'}}) - r(b'LAContext', b'setCancelButtonVisible:', {'arguments': {2: {'type': 'Z'}}}) - r(b'LAContext', b'setCredential:type:', {'retval': {'type': 'Z'}}) - r(b'LAContext', b'setFallbackButtonVisible:', {'arguments': {2: {'type': 'Z'}}}) - r(b'LAContext', b'setInteractionNotAllowed:', {'arguments': {2: {'type': 'Z'}}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/LocalAuthentication/_metadata.pyc b/env/lib/python2.7/site-packages/LocalAuthentication/_metadata.pyc deleted file mode 100644 index e94fd826..00000000 Binary files a/env/lib/python2.7/site-packages/LocalAuthentication/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/MapKit/_MapKit.so b/env/lib/python2.7/site-packages/MapKit/_MapKit.so deleted file mode 100755 index 3aa12740..00000000 Binary files a/env/lib/python2.7/site-packages/MapKit/_MapKit.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/MapKit/__init__.py b/env/lib/python2.7/site-packages/MapKit/__init__.py deleted file mode 100644 index e81fb345..00000000 --- a/env/lib/python2.7/site-packages/MapKit/__init__.py +++ /dev/null @@ -1,30 +0,0 @@ -''' -Python mapping for the MapKit framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Cocoa -import CoreLocation -import Quartz - -from MapKit import _metadata, _MapKit -from MapKit._inlines import _inline_list_ - -sys.modules['MapKit'] = mod = objc.ObjCLazyModule( - "MapKit", - "com.apple.MapKit", - objc.pathForFramework("/System/Library/Frameworks/MapKit.framework"), - _metadata.__dict__, _inline_list_, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Cocoa, CoreLocation, Quartz)) - -import sys -del sys.modules['MapKit._metadata'] -del sys.modules['MapKit._MapKit'] diff --git a/env/lib/python2.7/site-packages/MapKit/__init__.pyc b/env/lib/python2.7/site-packages/MapKit/__init__.pyc deleted file mode 100644 index 4ba1af5f..00000000 Binary files a/env/lib/python2.7/site-packages/MapKit/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/MapKit/_inlines.so b/env/lib/python2.7/site-packages/MapKit/_inlines.so deleted file mode 100755 index 9e2c4663..00000000 Binary files a/env/lib/python2.7/site-packages/MapKit/_inlines.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/MapKit/_metadata.py b/env/lib/python2.7/site-packages/MapKit/_metadata.py deleted file mode 100644 index d4d749a6..00000000 --- a/env/lib/python2.7/site-packages/MapKit/_metadata.py +++ /dev/null @@ -1,180 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Sun Jun 11 11:36:52 2017 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -misc.update({'MKMapRect': objc.createStructType('MKMapRect', b'{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}', ['origin', 'size']), 'MKMapSize': objc.createStructType('MKMapSize', b'{_MKMapSize=dd}', ['width', 'height']), 'MKCoordinateRegion': objc.createStructType('MKCoordinateRegion', b'{_MKCoordinateRegion={_CLLocationCoordinate2D=dd}{_MKCoordinateSpan=dd}}', ['center', 'span']), 'MKCoordinateSpan': objc.createStructType('MKCoordinateSpan', b'{_MKCoordinateSpan=dd}', ['latitudeDelta', 'longitudeDelta']), 'MKTileOverlayPath': objc.createStructType('MKTileOverlayPath', b'{_MKTileOverlayPath=qqqd}', ['x', 'y', 'z', 'contentScaleFactor']), 'MKMapPoint': objc.createStructType('MKMapPoint', b'{_MKMapPoint=dd}', ['x', 'y'])}) -constants = '''$MKAnnotationCalloutInfoDidChangeNotification$MKErrorDomain$MKLaunchOptionsCameraKey$MKLaunchOptionsDirectionsModeDefault$MKLaunchOptionsDirectionsModeDriving$MKLaunchOptionsDirectionsModeKey$MKLaunchOptionsDirectionsModeTransit$MKLaunchOptionsDirectionsModeWalking$MKLaunchOptionsMapCenterKey$MKLaunchOptionsMapSpanKey$MKLaunchOptionsMapTypeKey$MKLaunchOptionsShowsTrafficKey$MKMapItemTypeIdentifier$MKMapRectNull@{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}$MKMapRectWorld@{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}$MKMapSizeWorld@{_MKMapSize=dd}$MKMapViewDefaultAnnotationViewReuseIdentifier$MKMapViewDefaultClusterAnnotationViewReuseIdentifier$''' -enums = '''$MKAnnotationViewCollisionModeCircle@1$MKAnnotationViewCollisionModeRectangle@0$MKAnnotationViewDragStateCanceling@3$MKAnnotationViewDragStateDragging@2$MKAnnotationViewDragStateEnding@4$MKAnnotationViewDragStateNone@0$MKAnnotationViewDragStateStarting@1$MKDirectionsTransportTypeAny@268435455$MKDirectionsTransportTypeAutomobile@1$MKDirectionsTransportTypeTransit@4$MKDirectionsTransportTypeWalking@2$MKDistanceFormatterUnitStyleAbbreviated@1$MKDistanceFormatterUnitStyleDefault@0$MKDistanceFormatterUnitStyleFull@2$MKDistanceFormatterUnitsDefault@0$MKDistanceFormatterUnitsImperial@2$MKDistanceFormatterUnitsImperialWithYards@3$MKDistanceFormatterUnitsMetric@1$MKErrorDirectionsNotFound@5$MKErrorLoadingThrottled@3$MKErrorPlacemarkNotFound@4$MKErrorServerFailure@2$MKErrorUnknown@1$MKFeatureDisplayPriorityDefaultHigh@750$MKFeatureDisplayPriorityDefaultLow@250$MKFeatureDisplayPriorityRequired@1000$MKFeatureVisibilityAdaptive@0$MKFeatureVisibilityHidden@1$MKFeatureVisibilityVisible@2$MKMapTypeHybrid@2$MKMapTypeHybridFlyover@4$MKMapTypeMutedStandard@5$MKMapTypeSatellite@1$MKMapTypeSatelliteFlyover@3$MKMapTypeStandard@0$MKOverlayLevelAboveLabels@1$MKOverlayLevelAboveRoads@0$MKPinAnnotationColorGreen@1$MKPinAnnotationColorPurple@2$MKPinAnnotationColorRed@0$MKSearchCompletionFilterTypeLocationsAndQueries@0$MKSearchCompletionFilterTypeLocationsOnly@1$MKUserTrackingModeFollow@1$MKUserTrackingModeFollowWithHeading@2$MKUserTrackingModeNone@0$''' -misc.update({}) -functions={'MKMapRectOffset': (b'{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}dd',), 'MKMapRectIsEmpty': (b'Z{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}',), 'MKMapRectGetMidX': (b'd{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}',), 'MKMapRectGetMidY': (b'd{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}',), 'MKMapRectGetMinX': (b'd{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}',), 'MKMapRectGetWidth': (b'd{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}',), 'MKStringFromMapRect': (b'@{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}',), 'MKCoordinateSpanMake': (b'{_MKCoordinateSpan=dd}dd',), 'MKMapRectGetMaxX': (b'd{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}',), 'MKMapRectGetMaxY': (b'd{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}',), 'MKMapSizeEqualToSize': (b'Z{_MKMapSize=dd}{_MKMapSize=dd}',), 'MKMapRectIsNull': (b'Z{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}',), 'MKStringFromMapPoint': (b'@{_MKMapPoint=dd}',), 'MKMapRectDivide': (b'v{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}^{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}^{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}dI', '', {'arguments': {1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}}}), 'MKMetersPerMapPointAtLatitude': (b'dd',), 'MKCoordinateRegionMakeWithDistance': (b'{_MKCoordinateRegion={_CLLocationCoordinate2D=dd}{_MKCoordinateSpan=dd}}{_CLLocationCoordinate2D=dd}dd',), 'MKMapRectMake': (b'{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}dddd',), 'MKMapPointEqualToPoint': (b'Z{_MKMapPoint=dd}{_MKMapPoint=dd}',), 'MKMapRectContainsPoint': (b'Z{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}{_MKMapPoint=dd}',), 'MKMapRectUnion': (b'{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}',), 'MKCoordinateForMapPoint': (b'{_CLLocationCoordinate2D=dd}{_MKMapPoint=dd}',), 'MKMapRectRemainder': (b'{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}',), 'MKCoordinateRegionMake': (b'{_MKCoordinateRegion={_CLLocationCoordinate2D=dd}{_MKCoordinateSpan=dd}}{_CLLocationCoordinate2D=dd}{_MKCoordinateSpan=dd}',), 'MKStringFromMapSize': (b'@{_MKMapSize=dd}',), 'MKCoordinateRegionForMapRect': (b'{_MKCoordinateRegion={_CLLocationCoordinate2D=dd}{_MKCoordinateSpan=dd}}{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}',), 'MKMapSizeMake': (b'{_MKMapSize=dd}dd',), 'MKMapRectIntersection': (b'{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}',), 'MKMapRectInset': (b'{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}dd',), 'MKMapRectContainsRect': (b'Z{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}',), 'MKMapRectGetMinY': (b'd{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}',), 'MKRoadWidthAtZoomScale': (b'dd',), 'MKMapRectSpans180thMeridian': (b'Z{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}',), 'MKMapPointForCoordinate': (b'{_MKMapPoint=dd}{_CLLocationCoordinate2D=dd}',), 'MKMapPointsPerMeterAtLatitude': (b'dd',), 'MKMapRectIntersectsRect': (b'Z{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}',), 'MKMetersBetweenMapPoints': (b'd{_MKMapPoint=dd}{_MKMapPoint=dd}',), 'MKMapPointMake': (b'{_MKMapPoint=dd}dd',), 'MKMapRectEqualToRect': (b'Z{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}',), 'MKMapRectGetHeight': (b'd{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}',)} -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'MKAnnotationView', b'canShowCallout', {'retval': {'type': b'Z'}}) - r(b'MKAnnotationView', b'isDraggable', {'retval': {'type': b'Z'}}) - r(b'MKAnnotationView', b'isEnabled', {'retval': {'type': b'Z'}}) - r(b'MKAnnotationView', b'isHighlighted', {'retval': {'type': b'Z'}}) - r(b'MKAnnotationView', b'isSelected', {'retval': {'type': b'Z'}}) - r(b'MKAnnotationView', b'setCanShowCallout:', {'arguments': {2: {'type': b'Z'}}}) - r(b'MKAnnotationView', b'setDragState:animated:', {'arguments': {3: {'type': b'Z'}}}) - r(b'MKAnnotationView', b'setDraggable:', {'arguments': {2: {'type': b'Z'}}}) - r(b'MKAnnotationView', b'setEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'MKAnnotationView', b'setHighlighted:', {'arguments': {2: {'type': b'Z'}}}) - r(b'MKAnnotationView', b'setSelected:', {'arguments': {2: {'type': b'Z'}}}) - r(b'MKAnnotationView', b'setSelected:animated:', {'arguments': {2: {'type': b'Z'}, 3: {'type': b'Z'}}}) - r(b'MKCircle', b'boundingMapRect', {'retval': {'type': b'{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}'}}) - r(b'MKCircle', b'circleWithCenterCoordinate:radius:', {'arguments': {2: {'type': b'{_CLLocationCoordinate2D=dd}'}}}) - r(b'MKCircle', b'circleWithMapRect:', {'arguments': {2: {'type': b'{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}'}}}) - r(b'MKCircle', b'coordinate', {'retval': {'type': b'{_CLLocationCoordinate2D=dd}'}}) - r(b'MKDirections', b'calculateDirectionsWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'MKDirections', b'calculateETAWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'MKDirections', b'isCalculating', {'retval': {'type': b'Z'}}) - r(b'MKDirectionsRequest', b'isDirectionsRequestURL:', {'retval': {'type': b'Z'}}) - r(b'MKDirectionsRequest', b'requestsAlternateRoutes', {'retval': {'type': b'Z'}}) - r(b'MKDirectionsRequest', b'setRequestsAlternateRoutes:', {'arguments': {2: {'type': b'Z'}}}) - r(b'MKGeodesicPolyline', b'polylineWithCoordinates:count:', {'arguments': {2: {'type': b'^{_CLLocationCoordinate2D=dd}', 'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'MKGeodesicPolyline', b'polylineWithPoints:count:', {'arguments': {2: {'type': b'^{_MKMapPoint=dd}', 'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'MKLocalSearch', b'isSearching', {'retval': {'type': b'Z'}}) - r(b'MKLocalSearch', b'startWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'MKLocalSearchCompleter', b'isSearching', {'retval': {'type': 'Z'}}) - r(b'MKLocalSearchRequest', b'region', {'retval': {'type': b'{_MKCoordinateRegion={_CLLocationCoordinate2D=dd}{_MKCoordinateSpan=dd}}'}}) - r(b'MKLocalSearchRequest', b'setRegion:', {'arguments': {2: {'type': b'{_MKCoordinateRegion={_CLLocationCoordinate2D=dd}{_MKCoordinateSpan=dd}}'}}}) - r(b'MKLocalSearchResponse', b'boundingRegion', {'retval': {'type': b'{_MKCoordinateRegion={_CLLocationCoordinate2D=dd}{_MKCoordinateSpan=dd}}'}}) - r(b'MKMapCamera', b'cameraLookingAtCenterCoordinate:fromEyeCoordinate:eyeAltitude:', {'arguments': {2: {'type': b'{_CLLocationCoordinate2D=dd}'}, 3: {'type': b'{_CLLocationCoordinate2D=dd}'}}}) - r(b'MKMapCamera', b'centerCoordinate', {'retval': {'type': b'{_CLLocationCoordinate2D=dd}'}}) - r(b'MKMapCamera', b'setCenterCoordinate:', {'arguments': {2: {'type': b'{_CLLocationCoordinate2D=dd}'}}}) - r(b'MKMapItem', b'isCurrentLocation', {'retval': {'type': b'Z'}}) - r(b'MKMapItem', b'openInMapsWithLaunchOptions:', {'retval': {'type': b'Z'}}) - r(b'MKMapItem', b'openMapsWithItems:launchOptions:', {'retval': {'type': b'Z'}}) - r(b'MKMapSnapshot', b'pointForCoordinate:', {'arguments': {2: {'type': b'{_CLLocationCoordinate2D=dd}'}}}) - r(b'MKMapSnapshotOptions', b'mapRect', {'retval': {'type': b'{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}'}}) - r(b'MKMapSnapshotOptions', b'region', {'retval': {'type': b'{_MKCoordinateRegion={_CLLocationCoordinate2D=dd}{_MKCoordinateSpan=dd}}'}}) - r(b'MKMapSnapshotOptions', b'setMapRect:', {'arguments': {2: {'type': b'{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}'}}}) - r(b'MKMapSnapshotOptions', b'setRegion:', {'arguments': {2: {'type': b'{_MKCoordinateRegion={_CLLocationCoordinate2D=dd}{_MKCoordinateSpan=dd}}'}}}) - r(b'MKMapSnapshotOptions', b'setShowsBuildings:', {'arguments': {2: {'type': b'Z'}}}) - r(b'MKMapSnapshotOptions', b'setShowsPointsOfInterest:', {'arguments': {2: {'type': b'Z'}}}) - r(b'MKMapSnapshotOptions', b'showsBuildings', {'retval': {'type': b'Z'}}) - r(b'MKMapSnapshotOptions', b'showsPointsOfInterest', {'retval': {'type': b'Z'}}) - r(b'MKMapSnapshotter', b'isLoading', {'retval': {'type': b'Z'}}) - r(b'MKMapSnapshotter', b'startWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'MKMapSnapshotter', b'startWithQueue:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'MKMapView', b'annotationsInMapRect:', {'arguments': {2: {'type': b'{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}'}}}) - r(b'MKMapView', b'centerCoordinate', {'retval': {'type': b'{_CLLocationCoordinate2D=dd}'}}) - r(b'MKMapView', b'convertCoordinate:toPointToView:', {'arguments': {2: {'type': b'{_CLLocationCoordinate2D=dd}'}}}) - r(b'MKMapView', b'convertPoint:toCoordinateFromView:', {'retval': {'type': b'{_CLLocationCoordinate2D=dd}'}}) - r(b'MKMapView', b'convertRect:toRegionFromView:', {'retval': {'type': b'{_MKCoordinateRegion={_CLLocationCoordinate2D=dd}{_MKCoordinateSpan=dd}}'}}) - r(b'MKMapView', b'convertRegion:toRectToView:', {'arguments': {2: {'type': b'{_MKCoordinateRegion={_CLLocationCoordinate2D=dd}{_MKCoordinateSpan=dd}}'}}}) - r(b'MKMapView', b'deselectAnnotation:animated:', {'arguments': {3: {'type': b'Z'}}}) - r(b'MKMapView', b'isPitchEnabled', {'retval': {'type': b'Z'}}) - r(b'MKMapView', b'isRotateEnabled', {'retval': {'type': b'Z'}}) - r(b'MKMapView', b'isScrollEnabled', {'retval': {'type': b'Z'}}) - r(b'MKMapView', b'isUserLocationVisible', {'retval': {'type': b'Z'}}) - r(b'MKMapView', b'isZoomEnabled', {'retval': {'type': b'Z'}}) - r(b'MKMapView', b'mapRectThatFits:', {'retval': {'type': b'{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}'}, 'arguments': {2: {'type': b'{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}'}}}) - r(b'MKMapView', b'mapRectThatFits:edgePadding:', {'retval': {'type': b'{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}'}, 'arguments': {2: {'type': b'{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}'}, 3: {'type': b'{_NSEdgeInsets=dddd}'}}}) - r(b'MKMapView', b'region', {'retval': {'type': b'{_MKCoordinateRegion={_CLLocationCoordinate2D=dd}{_MKCoordinateSpan=dd}}'}}) - r(b'MKMapView', b'regionThatFits:', {'retval': {'type': b'{_MKCoordinateRegion={_CLLocationCoordinate2D=dd}{_MKCoordinateSpan=dd}}'}, 'arguments': {2: {'type': b'{_MKCoordinateRegion={_CLLocationCoordinate2D=dd}{_MKCoordinateSpan=dd}}'}}}) - r(b'MKMapView', b'selectAnnotation:animated:', {'arguments': {3: {'type': b'Z'}}}) - r(b'MKMapView', b'setCamera:animated:', {'arguments': {3: {'type': b'Z'}}}) - r(b'MKMapView', b'setCenterCoordinate:', {'arguments': {2: {'type': b'{_CLLocationCoordinate2D=dd}'}}}) - r(b'MKMapView', b'setCenterCoordinate:animated:', {'arguments': {2: {'type': b'{_CLLocationCoordinate2D=dd}'}, 3: {'type': b'Z'}}}) - r(b'MKMapView', b'setPitchEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'MKMapView', b'setRegion:', {'arguments': {2: {'type': b'{_MKCoordinateRegion={_CLLocationCoordinate2D=dd}{_MKCoordinateSpan=dd}}'}}}) - r(b'MKMapView', b'setRegion:animated:', {'arguments': {2: {'type': b'{_MKCoordinateRegion={_CLLocationCoordinate2D=dd}{_MKCoordinateSpan=dd}}'}, 3: {'type': b'Z'}}}) - r(b'MKMapView', b'setRotateEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'MKMapView', b'setScrollEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'MKMapView', b'setShowsBuildings:', {'arguments': {2: {'type': b'Z'}}}) - r(b'MKMapView', b'setShowsCompass:', {'arguments': {2: {'type': b'Z'}}}) - r(b'MKMapView', b'setShowsPointsOfInterest:', {'arguments': {2: {'type': b'Z'}}}) - r(b'MKMapView', b'setShowsScale:', {'arguments': {2: {'type': b'Z'}}}) - r(b'MKMapView', b'setShowsTraffic:', {'arguments': {2: {'type': 'Z'}}}) - r(b'MKMapView', b'setShowsUserLocation:', {'arguments': {2: {'type': b'Z'}}}) - r(b'MKMapView', b'setShowsZoomControls:', {'arguments': {2: {'type': b'Z'}}}) - r(b'MKMapView', b'setVisibleMapRect:', {'arguments': {2: {'type': b'{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}'}}}) - r(b'MKMapView', b'setVisibleMapRect:animated:', {'arguments': {2: {'type': b'{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}'}, 3: {'type': b'Z'}}}) - r(b'MKMapView', b'setVisibleMapRect:edgePadding:animated:', {'arguments': {2: {'type': b'{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}'}, 3: {'type': b'{_NSEdgeInsets=dddd}'}, 4: {'type': b'Z'}}}) - r(b'MKMapView', b'setZoomEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'MKMapView', b'showAnnotations:animated:', {'arguments': {3: {'type': b'Z'}}}) - r(b'MKMapView', b'showsBuildings', {'retval': {'type': b'Z'}}) - r(b'MKMapView', b'showsCompass', {'retval': {'type': b'Z'}}) - r(b'MKMapView', b'showsPointsOfInterest', {'retval': {'type': b'Z'}}) - r(b'MKMapView', b'showsScale', {'retval': {'type': b'Z'}}) - r(b'MKMapView', b'showsTraffic', {'retval': {'type': 'Z'}}) - r(b'MKMapView', b'showsUserLocation', {'retval': {'type': b'Z'}}) - r(b'MKMapView', b'showsZoomControls', {'retval': {'type': b'Z'}}) - r(b'MKMapView', b'visibleMapRect', {'retval': {'type': b'{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}'}}) - r(b'MKMultiPoint', b'getCoordinates:range:', {'arguments': {2: {'type': b'^{_CLLocationCoordinate2D=dd}', 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MKMultiPoint', b'points', {'retval': {'type': b'^{_MKMapPoint=dd}', 'c_array_of_variable_length': True}}) - r(b'MKOverlayRenderer', b'canDrawMapRect:zoomScale:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}'}}}) - r(b'MKOverlayRenderer', b'drawMapRect:zoomScale:inContext:', {'arguments': {2: {'type': b'{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}'}}}) - r(b'MKOverlayRenderer', b'mapPointForPoint:', {'retval': {'type': b'{_MKMapPoint=dd}'}}) - r(b'MKOverlayRenderer', b'mapRectForRect:', {'retval': {'type': b'{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}'}}) - r(b'MKOverlayRenderer', b'pointForMapPoint:', {'arguments': {2: {'type': b'{_MKMapPoint=dd}'}}}) - r(b'MKOverlayRenderer', b'rectForMapRect:', {'arguments': {2: {'type': b'{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}'}}}) - r(b'MKOverlayRenderer', b'setNeedsDisplayInMapRect:', {'arguments': {2: {'type': b'{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}'}}}) - r(b'MKOverlayRenderer', b'setNeedsDisplayInMapRect:zoomScale:', {'arguments': {2: {'type': b'{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}'}}}) - r(b'MKPinAnnotationView', b'animatesDrop', {'retval': {'type': b'Z'}}) - r(b'MKPinAnnotationView', b'setAnimatesDrop:', {'arguments': {2: {'type': b'Z'}}}) - r(b'MKPlacemark', b'initWithCoordinate:addressDictionary:', {'arguments': {2: {'type': b'{_CLLocationCoordinate2D=dd}'}}}) - r(b'MKPointAnnotation', b'coordinate', {'retval': {'type': b'{_CLLocationCoordinate2D=dd}'}}) - r(b'MKPointAnnotation', b'setCoordinate:', {'arguments': {2: {'type': b'{_CLLocationCoordinate2D=dd}'}}}) - r(b'MKPolygon', b'polygonWithCoordinates:count:', {'arguments': {2: {'type': b'^{_CLLocationCoordinate2D=dd}', 'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'MKPolygon', b'polygonWithCoordinates:count:interiorPolygons:', {'arguments': {2: {'type': b'^{_CLLocationCoordinate2D=dd}', 'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'MKPolygon', b'polygonWithPoints:count:', {'arguments': {2: {'type': b'^{_MKMapPoint=dd}', 'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'MKPolygon', b'polygonWithPoints:count:interiorPolygons:', {'arguments': {2: {'type': b'^{_MKMapPoint=dd}', 'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'MKPolyline', b'polylineWithCoordinates:count:', {'arguments': {2: {'type': b'^{_CLLocationCoordinate2D=dd}', 'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'MKPolyline', b'polylineWithPoints:count:', {'arguments': {2: {'type': b'^{_MKMapPoint=dd}', 'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'MKTileOverlay', b'URLForTilePath:', {'arguments': {2: {'type': b'{_MKTileOverlayPath=qqqd}'}}}) - r(b'MKTileOverlay', b'canReplaceMapContent', {'retval': {'type': b'Z'}}) - r(b'MKTileOverlay', b'isGeometryFlipped', {'retval': {'type': b'Z'}}) - r(b'MKTileOverlay', b'loadTileAtPath:result:', {'arguments': {2: {'type': b'{_MKTileOverlayPath=qqqd}'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'MKTileOverlay', b'setCanReplaceMapContent:', {'arguments': {2: {'type': b'Z'}}}) - r(b'MKTileOverlay', b'setGeometryFlipped:', {'arguments': {2: {'type': b'Z'}}}) - r(b'MKUserLocation', b'isUpdating', {'retval': {'type': b'Z'}}) - r(b'NSObject', b'boundingMapRect', {'required': True, 'retval': {'type': b'{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}'}}) - r(b'NSObject', b'canReplaceMapContent', {'required': False, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'coordinate', {'required': True, 'retval': {'type': b'{_CLLocationCoordinate2D=dd}'}}) - r(b'NSObject', b'intersectsMapRect:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'{_MKMapRect={_MKMapPoint=dd}{_MKMapSize=dd}}'}}}) - r(b'NSObject', b'mapView:annotationView:didChangeDragState:fromOldState:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'Q'}, 5: {'type': b'Q'}}}) - r(b'NSObject', b'mapView:didAddAnnotationViews:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'mapView:didAddOverlayRenderers:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'mapView:didDeselectAnnotationView:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'mapView:didFailToLocateUserWithError:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'mapView:didSelectAnnotationView:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'mapView:didUpdateUserLocation:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'mapView:regionDidChangeAnimated:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'Z'}}}) - r(b'NSObject', b'mapView:regionWillChangeAnimated:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'Z'}}}) - r(b'NSObject', b'mapView:rendererForOverlay:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'mapView:viewForAnnotation:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'mapViewDidFailLoadingMap:withError:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'mapViewDidFinishLoadingMap:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'mapViewDidFinishRenderingMap:fullyRendered:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'Z'}}}) - r(b'NSObject', b'mapViewDidStopLocatingUser:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'mapViewWillStartLoadingMap:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'mapViewWillStartLocatingUser:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'mapViewWillStartRenderingMap:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setCoordinate:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'{_CLLocationCoordinate2D=dd}'}}}) - r(b'NSObject', b'subtitle', {'required': False, 'retval': {'type': b'@'}}) - r(b'NSObject', b'title', {'required': False, 'retval': {'type': b'@'}}) - r(b'NSValue', b'MKCoordinateSpanValue', {'retval': {'type': b'{_MKCoordinateSpan=dd}'}}) - r(b'NSValue', b'MKCoordinateValue', {'retval': {'type': b'{_CLLocationCoordinate2D=dd}'}}) - r(b'NSValue', b'valueWithMKCoordinate:', {'arguments': {2: {'type': b'{_CLLocationCoordinate2D=dd}'}}}) - r(b'NSValue', b'valueWithMKCoordinateSpan:', {'arguments': {2: {'type': b'{_MKCoordinateSpan=dd}'}}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/MapKit/_metadata.pyc b/env/lib/python2.7/site-packages/MapKit/_metadata.pyc deleted file mode 100644 index a72feeb5..00000000 Binary files a/env/lib/python2.7/site-packages/MapKit/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/MediaAccessibility/__init__.py b/env/lib/python2.7/site-packages/MediaAccessibility/__init__.py deleted file mode 100644 index 3d4e3ff2..00000000 --- a/env/lib/python2.7/site-packages/MediaAccessibility/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -''' -Python mapping for the AddressBook framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Cocoa - -from MediaAccessibility import _metadata - -sys.modules['MediaAccessibility'] = mod = objc.ObjCLazyModule( - "MediaAccessibility", - "com.apple.MediaAccessibility", - objc.pathForFramework("/System/Library/Frameworks/MediaAccessibility.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Cocoa,)) - -import sys -del sys.modules['MediaAccessibility._metadata'] diff --git a/env/lib/python2.7/site-packages/MediaAccessibility/__init__.pyc b/env/lib/python2.7/site-packages/MediaAccessibility/__init__.pyc deleted file mode 100644 index c76a9438..00000000 Binary files a/env/lib/python2.7/site-packages/MediaAccessibility/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/MediaAccessibility/_metadata.py b/env/lib/python2.7/site-packages/MediaAccessibility/_metadata.py deleted file mode 100644 index e45e16d1..00000000 --- a/env/lib/python2.7/site-packages/MediaAccessibility/_metadata.py +++ /dev/null @@ -1,24 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Tue Dec 30 22:07:41 2014 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$MAMediaCharacteristicDescribesMusicAndSoundForAccessibility@^{__CFString=}$MAMediaCharacteristicDescribesVideoForAccessibility@^{__CFString=}$MAMediaCharacteristicTranscribesSpokenDialogForAccessibility@^{__CFString=}$kMAAudibleMediaSettingsChangedNotification@^{__CFString=}$kMACaptionAppearanceSettingsChangedNotification@^{__CFString=}$''' -enums = '''$kMACaptionAppearanceBehaviorUseContentIfAvailable@1$kMACaptionAppearanceBehaviorUseValue@0$kMACaptionAppearanceDisplayTypeAlwaysOn@2$kMACaptionAppearanceDisplayTypeAutomatic@1$kMACaptionAppearanceDisplayTypeForcedOnly@0$kMACaptionAppearanceDomainDefault@0$kMACaptionAppearanceDomainUser@1$kMACaptionAppearanceFontStyleCasual@5$kMACaptionAppearanceFontStyleCursive@6$kMACaptionAppearanceFontStyleDefault@0$kMACaptionAppearanceFontStyleMonospacedWithSerif@1$kMACaptionAppearanceFontStyleMonospacedWithoutSerif@3$kMACaptionAppearanceFontStyleProportionalWithSerif@2$kMACaptionAppearanceFontStyleProportionalWithoutSerif@4$kMACaptionAppearanceFontStyleSmallCapital@7$kMACaptionAppearanceTextEdgeStyleDepressed@3$kMACaptionAppearanceTextEdgeStyleDropShadow@5$kMACaptionAppearanceTextEdgeStyleNone@1$kMACaptionAppearanceTextEdgeStyleRaised@2$kMACaptionAppearanceTextEdgeStyleUndefined@0$kMACaptionAppearanceTextEdgeStyleUniform@4$''' -misc.update({}) -functions={'MACaptionAppearanceCopyFontDescriptorForStyle': (sel32or64(b'^{__CTFontDescriptor=}l^ll', b'^{__CTFontDescriptor=}q^qq'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'null_accepted': True, 'type_modifier': 'o'}}}), 'MACaptionAppearanceCopyForegroundColor': (sel32or64(b'^{CGColor=}l^l', b'^{CGColor=}q^q'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'null_accepted': True, 'type_modifier': 'o'}}}), 'MACaptionAppearanceCopySelectedLanguages': (sel32or64(b'^{__CFArray=}l', b'^{__CFArray=}q'), '', {'retval': {'already_cfretained': True}}), 'MACaptionAppearanceCopyWindowColor': (sel32or64(b'^{CGColor=}l^l', b'^{CGColor=}q^q'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'null_accepted': True, 'type_modifier': 'o'}}}), 'MACaptionAppearanceCopyPreferredCaptioningMediaCharacteristics': (sel32or64(b'^{__CFArray=}l', b'^{__CFArray=}q'), '', {'retval': {'already_cfretained': True}}), 'MACaptionAppearanceGetDisplayType': (sel32or64(b'll', b'qq'),), 'MACaptionAppearanceGetWindowRoundedCornerRadius': (sel32or64(b'fl^l', b'dq^q'), '', {'arguments': {1: {'null_accepted': True, 'type_modifier': 'o'}}}), 'MACaptionAppearanceSetDisplayType': (sel32or64(b'vll', b'vqq'),), 'MAAudibleMediaCopyPreferredCharacteristics': (b'^{__CFArray=}', '', {'retval': {'already_cfretained': True}, 'variadic': False}), 'MACaptionAppearanceGetRelativeCharacterSize': (sel32or64(b'fl^l', b'dq^q'), '', {'arguments': {1: {'null_accepted': True, 'type_modifier': 'o'}}}), 'MACaptionAppearanceGetForegroundOpacity': (sel32or64(b'fl^l', b'dq^q'), '', {'arguments': {1: {'null_accepted': True, 'type_modifier': 'o'}}}), 'MACaptionAppearanceCopyBackgroundColor': (sel32or64(b'^{CGColor=}l^l', b'^{CGColor=}q^q'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'null_accepted': True, 'type_modifier': 'o'}}}), 'MACaptionAppearanceGetBackgroundOpacity': (sel32or64(b'fl^l', b'dq^q'), '', {'arguments': {1: {'null_accepted': True, 'type_modifier': 'o'}}}), 'MACaptionAppearanceGetTextEdgeStyle': (sel32or64(b'll^l', b'qq^q'), '', {'arguments': {1: {'null_accepted': True, 'type_modifier': 'o'}}}), 'MACaptionAppearanceAddSelectedLanguage': (sel32or64(b'Bl^{__CFString=}', b'Bq^{__CFString=}'), '', {'arguments': {1: {'null_accepted': True, 'type_modifier': 'o'}}}), 'MACaptionAppearanceGetWindowOpacity': (sel32or64(b'fl^l', b'dq^q'), '', {'arguments': {1: {'null_accepted': True, 'type_modifier': 'o'}}})} -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/MediaAccessibility/_metadata.pyc b/env/lib/python2.7/site-packages/MediaAccessibility/_metadata.pyc deleted file mode 100644 index 270212dc..00000000 Binary files a/env/lib/python2.7/site-packages/MediaAccessibility/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/MediaLibrary/__init__.py b/env/lib/python2.7/site-packages/MediaLibrary/__init__.py deleted file mode 100644 index cdf457c0..00000000 --- a/env/lib/python2.7/site-packages/MediaLibrary/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -''' -Python mapping for the MediaLibrary framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Cocoa -import Quartz - -from MediaLibrary import _metadata - -sys.modules['MediaLibrary'] = mod = objc.ObjCLazyModule( - "MediaLibrary", - "com.apple.MediaLibrary", - objc.pathForFramework("/System/Library/Frameworks/MediaLibrary.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Cocoa, Quartz)) - -import sys -del sys.modules['MediaLibrary._metadata'] diff --git a/env/lib/python2.7/site-packages/MediaLibrary/__init__.pyc b/env/lib/python2.7/site-packages/MediaLibrary/__init__.pyc deleted file mode 100644 index 96ba07f8..00000000 Binary files a/env/lib/python2.7/site-packages/MediaLibrary/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/MediaLibrary/_metadata.py b/env/lib/python2.7/site-packages/MediaLibrary/_metadata.py deleted file mode 100644 index 5ff0b13f..00000000 --- a/env/lib/python2.7/site-packages/MediaLibrary/_metadata.py +++ /dev/null @@ -1,23 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Sat Jul 22 14:23:55 2017 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$MLApertureAllPhotosTypeIdentifier$MLApertureAllProjectsTypeIdentifier$MLApertureFacebookAlbumTypeIdentifier$MLApertureFacebookGroupTypeIdentifier$MLApertureFacesAlbumTypeIdentifier$MLApertureFlaggedTypeIdentifier$MLApertureFlickrAlbumTypeIdentifier$MLApertureFlickrGroupTypeIdentifier$MLApertureFolderAlbumTypeIdentifier$MLApertureLastImportAlbumTypeIdentifier$MLApertureLastNMonthsAlbumTypeIdentifier$MLApertureLastViewedEventAlbumTypeIdentifier$MLApertureLightTableTypeIdentifier$MLAperturePhotoStreamAlbumTypeIdentifier$MLAperturePlacesAlbumTypeIdentifier$MLAperturePlacesCityAlbumTypeIdentifier$MLAperturePlacesCountryAlbumTypeIdentifier$MLAperturePlacesPointOfInterestAlbumTypeIdentifier$MLAperturePlacesProvinceAlbumTypeIdentifier$MLApertureProjectAlbumTypeIdentifier$MLApertureProjectFolderAlbumTypeIdentifier$MLApertureRootGroupTypeIdentifier$MLApertureSlideShowTypeIdentifier$MLApertureSmugMugAlbumTypeIdentifier$MLApertureSmugMugGroupTypeIdentifier$MLApertureUserAlbumTypeIdentifier$MLApertureUserSmartAlbumTypeIdentifier$MLFinalCutEventCalendarGroupTypeIdentifier$MLFinalCutEventGroupTypeIdentifier$MLFinalCutEventLibraryGroupTypeIdentifier$MLFinalCutFolderGroupTypeIdentifier$MLFinalCutProjectGroupTypeIdentifier$MLFinalCutRootGroupTypeIdentifier$MLFolderGroupTypeIdentifier$MLFolderRootGroupTypeIdentifier$MLGarageBandFolderGroupTypeIdentifier$MLGarageBandRootGroupTypeIdentifier$MLLogicBouncesGroupTypeIdentifier$MLLogicProjectTypeIdentifier$MLLogicProjectsGroupTypeIdentifier$MLLogicRootGroupTypeIdentifier$MLMediaLoadAppFoldersKey$MLMediaLoadAppleLoops$MLMediaLoadExcludeSourcesKey$MLMediaLoadFoldersKey$MLMediaLoadIncludeSourcesKey$MLMediaLoadMoviesFolder$MLMediaLoadSourceTypesKey$MLMediaObjectAlbumKey$MLMediaObjectArtistKey$MLMediaObjectBitRateKey$MLMediaObjectChannelCountKey$MLMediaObjectCommentsKey$MLMediaObjectDurationKey$MLMediaObjectGenreKey$MLMediaObjectKeywordsKey$MLMediaObjectKindKey$MLMediaObjectProtectedKey$MLMediaObjectResolutionStringKey$MLMediaObjectSampleRateKey$MLMediaObjectTrackNumberKey$MLMediaSourceApertureIdentifier$MLMediaSourceAppDefinedFoldersIdentifier$MLMediaSourceCustomFoldersIdentifier$MLMediaSourceFinalCutIdentifier$MLMediaSourceGarageBandIdentifier$MLMediaSourceLogicIdentifier$MLMediaSourceMoviesFolderIdentifier$MLMediaSourcePhotoBoothIdentifier$MLMediaSourcePhotosIdentifier$MLMediaSourceiMovieIdentifier$MLMediaSourceiPhotoIdentifier$MLMediaSourceiTunesIdentifier$MLPhotosAlbumTypeIdentifier$MLPhotosAlbumsGroupTypeIdentifier$MLPhotosAllCollectionsGroupTypeIdentifier$MLPhotosAllMomentsGroupTypeIdentifier$MLPhotosAllPhotosAlbumTypeIdentifier$MLPhotosAllYearsGroupTypeIdentifier$MLPhotosAnimatedGroupTypeIdentifier$MLPhotosBurstGroupTypeIdentifier$MLPhotosCollectionGroupTypeIdentifier$MLPhotosDepthEffectGroupTypeIdentifier$MLPhotosFacesAlbumTypeIdentifier$MLPhotosFavoritesGroupTypeIdentifier$MLPhotosFolderTypeIdentifier$MLPhotosFrontCameraGroupTypeIdentifier$MLPhotosLastImportGroupTypeIdentifier$MLPhotosLivePhotosGroupTypeIdentifier$MLPhotosLongExposureGroupTypeIdentifier$MLPhotosMomentGroupTypeIdentifier$MLPhotosMyPhotoStreamTypeIdentifier$MLPhotosPanoramasGroupTypeIdentifier$MLPhotosPublishedAlbumTypeIdentifier$MLPhotosRootGroupTypeIdentifier$MLPhotosScreenshotGroupTypeIdentifier$MLPhotosSharedGroupTypeIdentifier$MLPhotosSharedPhotoStreamTypeIdentifier$MLPhotosSloMoGroupTypeIdentifier$MLPhotosSmartAlbumTypeIdentifier$MLPhotosTimelapseGroupTypeIdentifier$MLPhotosVideosGroupTypeIdentifier$MLPhotosYearGroupTypeIdentifier$MLiMovieEventCalendarGroupTypeIdentifier$MLiMovieEventGroupTypeIdentifier$MLiMovieEventLibraryGroupTypeIdentifier$MLiMovieFolderGroupTypeIdentifier$MLiMovieProjectGroupTypeIdentifier$MLiMovieRootGroupTypeIdentifier$MLiPhotoAlbumTypeIdentifier$MLiPhotoEventAlbumTypeIdentifier$MLiPhotoEventsFolderTypeIdentifier$MLiPhotoFacebookAlbumTypeIdentifier$MLiPhotoFacebookGroupTypeIdentifier$MLiPhotoFacesAlbumTypeIdentifier$MLiPhotoFlaggedAlbumTypeIdentifier$MLiPhotoFlickrAlbumTypeIdentifier$MLiPhotoFlickrGroupTypeIdentifier$MLiPhotoFolderAlbumTypeIdentifier$MLiPhotoLastImportAlbumTypeIdentifier$MLiPhotoLastNMonthsAlbumTypeIdentifier$MLiPhotoLastViewedEventAlbumTypeIdentifier$MLiPhotoLibraryAlbumTypeIdentifier$MLiPhotoPhotoStreamAlbumTypeIdentifier$MLiPhotoPlacesAlbumTypeIdentifier$MLiPhotoPlacesCityAlbumTypeIdentifier$MLiPhotoPlacesCountryAlbumTypeIdentifier$MLiPhotoPlacesPointOfInterestAlbumTypeIdentifier$MLiPhotoPlacesProvinceAlbumTypeIdentifier$MLiPhotoRootGroupTypeIdentifier$MLiPhotoSlideShowAlbumTypeIdentifier$MLiPhotoSmartAlbumTypeIdentifier$MLiPhotoSubscribedAlbumTypeIdentifier$MLiTunesAudioBooksPlaylistTypeIdentifier$MLiTunesFolderPlaylistTypeIdentifier$MLiTunesGeniusPlaylistTypeIdentifier$MLiTunesMoviesPlaylistTypeIdentifier$MLiTunesMusicPlaylistTypeIdentifier$MLiTunesMusicVideosPlaylistTypeIdentifier$MLiTunesPlaylistTypeIdentifier$MLiTunesPodcastPlaylistTypeIdentifier$MLiTunesPurchasedPlaylistTypeIdentifier$MLiTunesRootGroupTypeIdentifier$MLiTunesSavedGeniusPlaylistTypeIdentifier$MLiTunesSmartPlaylistTypeIdentifier$MLiTunesTVShowsPlaylistTypeIdentifier$MLiTunesVideoPlaylistTypeIdentifier$MLiTunesiTunesUPlaylistTypeIdentifier$''' -enums = '''$MLMediaSourceTypeAudio@1$MLMediaSourceTypeImage@2$MLMediaSourceTypeMovie@4$MLMediaTypeAudio@1$MLMediaTypeImage@2$MLMediaTypeMovie@4$''' -misc.update({}) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/MediaLibrary/_metadata.pyc b/env/lib/python2.7/site-packages/MediaLibrary/_metadata.pyc deleted file mode 100644 index 263e9866..00000000 Binary files a/env/lib/python2.7/site-packages/MediaLibrary/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/MediaPlayer/__init__.py b/env/lib/python2.7/site-packages/MediaPlayer/__init__.py deleted file mode 100644 index c0f9bd79..00000000 --- a/env/lib/python2.7/site-packages/MediaPlayer/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -''' -Python mapping for the MediaPlayer framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import AVFoundation - -from MediaPlayer import _metadata - -sys.modules['MediaPlayer'] = mod = objc.ObjCLazyModule( - "MediaPlayer", - "com.apple.MediaPlayer", - objc.pathForFramework("/System/Library/Frameworks/MediaPlayer.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (AVFoundation,)) - -import sys -del sys.modules['MediaPlayer._metadata'] diff --git a/env/lib/python2.7/site-packages/MediaPlayer/__init__.pyc b/env/lib/python2.7/site-packages/MediaPlayer/__init__.pyc deleted file mode 100644 index fcd4b157..00000000 Binary files a/env/lib/python2.7/site-packages/MediaPlayer/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/MediaPlayer/_metadata.py b/env/lib/python2.7/site-packages/MediaPlayer/_metadata.py deleted file mode 100644 index f981a865..00000000 --- a/env/lib/python2.7/site-packages/MediaPlayer/_metadata.py +++ /dev/null @@ -1,70 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Sun Jun 24 17:27:56 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$MPErrorDomain$MPLanguageOptionCharacteristicContainsOnlyForcedSubtitles$MPLanguageOptionCharacteristicDescribesMusicAndSound$MPLanguageOptionCharacteristicDescribesVideo$MPLanguageOptionCharacteristicDubbedTranslation$MPLanguageOptionCharacteristicEasyToRead$MPLanguageOptionCharacteristicIsAuxiliaryContent$MPLanguageOptionCharacteristicIsMainProgramContent$MPLanguageOptionCharacteristicLanguageTranslation$MPLanguageOptionCharacteristicTranscribesSpokenDialog$MPLanguageOptionCharacteristicVoiceOverTranslation$MPMediaEntityPropertyPersistentID$MPMediaItemPropertyAlbumArtist$MPMediaItemPropertyAlbumArtistPersistentID$MPMediaItemPropertyAlbumPersistentID$MPMediaItemPropertyAlbumTitle$MPMediaItemPropertyAlbumTrackCount$MPMediaItemPropertyAlbumTrackNumber$MPMediaItemPropertyArtist$MPMediaItemPropertyArtistPersistentID$MPMediaItemPropertyArtwork$MPMediaItemPropertyAssetURL$MPMediaItemPropertyBeatsPerMinute$MPMediaItemPropertyBookmarkTime$MPMediaItemPropertyComments$MPMediaItemPropertyComposer$MPMediaItemPropertyComposerPersistentID$MPMediaItemPropertyDateAdded$MPMediaItemPropertyDiscCount$MPMediaItemPropertyDiscNumber$MPMediaItemPropertyGenre$MPMediaItemPropertyGenrePersistentID$MPMediaItemPropertyHasProtectedAsset$MPMediaItemPropertyIsCloudItem$MPMediaItemPropertyIsCompilation$MPMediaItemPropertyIsExplicit$MPMediaItemPropertyLastPlayedDate$MPMediaItemPropertyLyrics$MPMediaItemPropertyMediaType$MPMediaItemPropertyPersistentID$MPMediaItemPropertyPlayCount$MPMediaItemPropertyPlaybackDuration$MPMediaItemPropertyPlaybackStoreID$MPMediaItemPropertyPodcastPersistentID$MPMediaItemPropertyPodcastTitle$MPMediaItemPropertyRating$MPMediaItemPropertyReleaseDate$MPMediaItemPropertySkipCount$MPMediaItemPropertyTitle$MPMediaItemPropertyUserGrouping$MPNowPlayingInfoCollectionIdentifier$MPNowPlayingInfoPropertyAssetURL$MPNowPlayingInfoPropertyAvailableLanguageOptions$MPNowPlayingInfoPropertyChapterCount$MPNowPlayingInfoPropertyChapterNumber$MPNowPlayingInfoPropertyCurrentLanguageOptions$MPNowPlayingInfoPropertyCurrentPlaybackDate$MPNowPlayingInfoPropertyDefaultPlaybackRate$MPNowPlayingInfoPropertyElapsedPlaybackTime$MPNowPlayingInfoPropertyExternalContentIdentifier$MPNowPlayingInfoPropertyExternalUserProfileIdentifier$MPNowPlayingInfoPropertyIsLiveStream$MPNowPlayingInfoPropertyMediaType$MPNowPlayingInfoPropertyPlaybackProgress$MPNowPlayingInfoPropertyPlaybackQueueCount$MPNowPlayingInfoPropertyPlaybackQueueIndex$MPNowPlayingInfoPropertyPlaybackRate$MPNowPlayingInfoPropertyServiceIdentifier$MPNowPlayingInfoPropertyServiceIdentifier,$''' -enums = '''$MPChangeLanguageOptionSettingNone@0$MPChangeLanguageOptionSettingNowPlayingItemOnly@1$MPChangeLanguageOptionSettingPermanent@2$MPErrorCancelled@6$MPErrorCloudServiceCapabilityMissing@2$MPErrorNetworkConnectionFailed@3$MPErrorNotFound@4$MPErrorNotSupported@5$MPErrorPermissionDenied@1$MPErrorRequestTimedOut@7$MPErrorUnknown@0$MPMediaTypeAny@18446744073709551615$MPMediaTypeAnyAudio@255$MPMediaTypeAnyVideo@65280$MPMediaTypeAudioBook@4$MPMediaTypeAudioITunesU@8$MPMediaTypeHomeVideo@8192$MPMediaTypeMovie@256$MPMediaTypeMusic@1$MPMediaTypeMusicVideo@2048$MPMediaTypePodcast@2$MPMediaTypeTVShow@512$MPMediaTypeVideoITunesU@4096$MPMediaTypeVideoPodcast@1024$MPNowPlayingInfoLanguageOptionTypeAudible@0$MPNowPlayingInfoLanguageOptionTypeLegible@1$MPNowPlayingInfoMediaTypeAudio@1$MPNowPlayingInfoMediaTypeNone@0$MPNowPlayingInfoMediaTypeVideo@2$MPNowPlayingPlaybackStateInterrupted@4$MPNowPlayingPlaybackStatePaused@2$MPNowPlayingPlaybackStatePlaying@1$MPNowPlayingPlaybackStateStopped@3$MPNowPlayingPlaybackStateUnknown@0$MPRemoteCommandHandlerStatusCommandFailed@200$MPRemoteCommandHandlerStatusDeviceNotFound@120$MPRemoteCommandHandlerStatusNoActionableNowPlayingItem@110$MPRemoteCommandHandlerStatusNoSuchContent@100$MPRemoteCommandHandlerStatusSuccess@0$MPRepeatTypeAll@2$MPRepeatTypeOff@0$MPRepeatTypeOne@1$MPSeekCommandEventTypeBeginSeeking@0$MPSeekCommandEventTypeEndSeeking@1$MPShuffleTypeCollections@2$MPShuffleTypeItems@1$MPShuffleTypeOff@0$''' -misc.update({}) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'MPChangeRepeatModeCommandEvent', b'preservesRepeatMode', {'retval': {'type': 'Z'}}) - r(b'MPChangeRepeatModeCommandEvent', b'setPreservesRepeatMode:', {'arguments': {2: {'type': 'Z'}}}) - r(b'MPChangeShuffleModeCommandEvent', b'preservesShuffleMode', {'retval': {'type': 'Z'}}) - r(b'MPChangeShuffleModeCommandEvent', b'setPreservesShuffleMode:', {'arguments': {2: {'type': 'Z'}}}) - r(b'MPContentItem', b'isContainer', {'retval': {'type': 'Z'}}) - r(b'MPContentItem', b'isExplicitContent', {'retval': {'type': 'Z'}}) - r(b'MPContentItem', b'isPlayable', {'retval': {'type': 'Z'}}) - r(b'MPContentItem', b'isStreamingContent', {'retval': {'type': 'Z'}}) - r(b'MPContentItem', b'setContainer:', {'arguments': {2: {'type': 'Z'}}}) - r(b'MPContentItem', b'setExplicitContent:', {'arguments': {2: {'type': 'Z'}}}) - r(b'MPContentItem', b'setPlayable:', {'arguments': {2: {'type': 'Z'}}}) - r(b'MPContentItem', b'setStreamingContent:', {'arguments': {2: {'type': 'Z'}}}) - r(b'MPFeedbackCommand', b'isActive', {'retval': {'type': 'Z'}}) - r(b'MPFeedbackCommand', b'setActive:', {'arguments': {2: {'type': 'Z'}}}) - r(b'MPFeedbackCommandEvent', b'isNegative', {'retval': {'type': 'Z'}}) - r(b'MPFeedbackCommandEvent', b'setNegative:', {'arguments': {2: {'type': 'Z'}}}) - r(b'MPMediaEntity', b'canFilterByProperty:', {'retval': {'type': 'Z'}}) - r(b'MPMediaEntity', b'enumerateValuesForProperties:usingBlock:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'o^Z'}}}}}}) - r(b'MPMediaEntity', b'enumerateValuesForProperties:usingBlock::', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'o^Z'}}}}}}) - r(b'MPMediaItem', b'hasProtectedAsset', {'retval': {'type': 'Z'}}) - r(b'MPMediaItem', b'isCloudItem', {'retval': {'type': 'Z'}}) - r(b'MPMediaItem', b'isCompilation', {'retval': {'type': 'Z'}}) - r(b'MPMediaItem', b'isExplicitItem', {'retval': {'type': 'Z'}}) - r(b'MPMediaItem', b'setCloudItem:', {'arguments': {2: {'type': 'Z'}}}) - r(b'MPMediaItem', b'setCompilation:', {'arguments': {2: {'type': 'Z'}}}) - r(b'MPMediaItem', b'setExplicitItem:', {'arguments': {2: {'type': 'Z'}}}) - r(b'MPMediaItem', b'setHasProtectedAsset:', {'arguments': {2: {'type': 'Z'}}}) - r(b'MPMediaItemArtwork', b'initWithBoundsSize:requestHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'@'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'{CGSize=ff}', b'{CGSize=dd}')}}}}}}) - r(b'MPNowPlayingInfoLanguageOption', b'isAutomaticAudibleLanguageOption', {'retval': {'type': 'Z'}}) - r(b'MPNowPlayingInfoLanguageOption', b'isAutomaticLegibleLanguageOption', {'retval': {'type': 'Z'}}) - r(b'MPNowPlayingInfoLanguageOptionGroup', b'allowEmptySelection', {'retval': {'type': 'Z'}}) - r(b'MPNowPlayingInfoLanguageOptionGroup', b'initWithLanguageOptions:defaultLanguageOption:allowEmptySelection:', {'arguments': {4: {'type': 'Z'}}}) - r(b'MPNowPlayingInfoLanguageOptionGroup', b'setAllowEmptySelection:', {'arguments': {2: {'type': 'Z'}}}) - r(b'MPPlayableContentManagerContext', b'contentLimitsEnabled', {'retval': {'type': 'Z'}}) - r(b'MPPlayableContentManagerContext', b'contentLimitsEnforced', {'retval': {'type': 'Z'}}) - r(b'MPPlayableContentManagerContext', b'endpointAvailable', {'retval': {'type': 'Z'}}) - r(b'MPPlayableContentManagerContext', b'setContentLimitsEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'MPPlayableContentManagerContext', b'setContentLimitsEnforced:', {'arguments': {2: {'type': 'Z'}}}) - r(b'MPPlayableContentManagerContext', b'setEndpointAvailable:', {'arguments': {2: {'type': 'Z'}}}) - r(b'MPRemoteCommand', b'addTargetWithHandler:', {'arguments': {2: {'callable': {'retval': {'type': sel32or64(b'i', b'q')}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'MPRemoteCommand', b'isEnabled', {'retval': {'type': 'Z'}}) - r(b'MPRemoteCommand', b'setEnabled:', {'arguments': {2: {'type': 'Z'}}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/MediaPlayer/_metadata.pyc b/env/lib/python2.7/site-packages/MediaPlayer/_metadata.pyc deleted file mode 100644 index ee5d34ab..00000000 Binary files a/env/lib/python2.7/site-packages/MediaPlayer/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/MediaToolbox/_MediaToolbox.so b/env/lib/python2.7/site-packages/MediaToolbox/_MediaToolbox.so deleted file mode 100755 index e439edbd..00000000 Binary files a/env/lib/python2.7/site-packages/MediaToolbox/_MediaToolbox.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/MediaToolbox/__init__.py b/env/lib/python2.7/site-packages/MediaToolbox/__init__.py deleted file mode 100644 index 7d48bd7d..00000000 --- a/env/lib/python2.7/site-packages/MediaToolbox/__init__.py +++ /dev/null @@ -1,30 +0,0 @@ -''' -Python mapping for the MediaToolbox framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Foundation - -from MediaToolbox import _metadata - -sys.modules['MediaToolbox'] = mod = objc.ObjCLazyModule( - "MediaToolbox", - "com.apple.MediaToolbox", - objc.pathForFramework("/System/Library/Frameworks/MediaToolbox.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Foundation,)) - -import MediaToolbox._MediaToolbox -for nm in dir(MediaToolbox._MediaToolbox): - setattr(mod, nm, getattr(MediaToolbox._MediaToolbox, nm)) - -import sys -del sys.modules['MediaToolbox._metadata'] diff --git a/env/lib/python2.7/site-packages/MediaToolbox/__init__.pyc b/env/lib/python2.7/site-packages/MediaToolbox/__init__.pyc deleted file mode 100644 index bb914bb5..00000000 Binary files a/env/lib/python2.7/site-packages/MediaToolbox/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/MediaToolbox/_metadata.py b/env/lib/python2.7/site-packages/MediaToolbox/_metadata.py deleted file mode 100644 index 6e1b3073..00000000 --- a/env/lib/python2.7/site-packages/MediaToolbox/_metadata.py +++ /dev/null @@ -1,25 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Tue Sep 4 22:31:30 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$$''' -enums = '''$kMTAudioProcessingTapCallbacksVersion_0@0$kMTAudioProcessingTapCreationFlag_PostEffects@2$kMTAudioProcessingTapCreationFlag_PreEffects@1$kMTAudioProcessingTapFlag_EndOfStream@512$kMTAudioProcessingTapFlag_StartOfStream@256$''' -misc.update({}) -functions={'MTAudioProcessingTapGetSourceAudio': (sel32or64(b'l^{opaqueMTAudioProcessingTap=}l^{AudioBufferList=L[1{AudioBuffer=LL^v}]}^I^{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}^l', b'i^{opaqueMTAudioProcessingTap=}q^{AudioBufferList=I[1{AudioBuffer=II^v}]}^I^{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}^q'), '', {'arguments': {3: {'type_modifier': 'o'}, 4: {'type_modifier': 'o'}, 5: {'type_modifier': 'o'}}}), 'MTAudioProcessingTapGetTypeID': (sel32or64(b'L', b'Q'),), 'MTRegisterProfessionalVideoWorkflowFormatReaders': (b'v',), 'MTCopyLocalizedNameForMediaType': (sel32or64(b'^{__CFString=}L', b'^{__CFString=}I'), '', {'retval': {'already_cfretained': True}}), 'MTCopyLocalizedNameForMediaSubType': (sel32or64(b'^{__CFString=}LL', b'^{__CFString=}II'), '', {'retval': {'already_cfretained': True}})} -cftypes=[('MTAudioProcessingTapRef', b'^{opaqueMTAudioProcessingTap=}', 'MTAudioProcessingTapGetTypeID', None)] -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/MediaToolbox/_metadata.pyc b/env/lib/python2.7/site-packages/MediaToolbox/_metadata.pyc deleted file mode 100644 index ac51323a..00000000 Binary files a/env/lib/python2.7/site-packages/MediaToolbox/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/ModelIO/_ModelIO.so b/env/lib/python2.7/site-packages/ModelIO/_ModelIO.so deleted file mode 100755 index b3fecb8b..00000000 Binary files a/env/lib/python2.7/site-packages/ModelIO/_ModelIO.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/ModelIO/__init__.py b/env/lib/python2.7/site-packages/ModelIO/__init__.py deleted file mode 100644 index 5579fbf4..00000000 --- a/env/lib/python2.7/site-packages/ModelIO/__init__.py +++ /dev/null @@ -1,29 +0,0 @@ -''' -Python mapping for the ModelIO framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Quartz -import Cocoa - -from ModelIO import _metadata -from ModelIO import _ModelIO - -sys.modules['ModelIO'] = mod = objc.ObjCLazyModule( - "ModelIO", - "com.apple.ModelIO", - objc.pathForFramework("/System/Library/Frameworks/ModelIO.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Cocoa, Quartz)) - - -import sys -del sys.modules['ModelIO._metadata'] diff --git a/env/lib/python2.7/site-packages/ModelIO/__init__.pyc b/env/lib/python2.7/site-packages/ModelIO/__init__.pyc deleted file mode 100644 index 4e38ceb7..00000000 Binary files a/env/lib/python2.7/site-packages/ModelIO/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/ModelIO/_metadata.py b/env/lib/python2.7/site-packages/ModelIO/_metadata.py deleted file mode 100644 index 52839848..00000000 --- a/env/lib/python2.7/site-packages/ModelIO/_metadata.py +++ /dev/null @@ -1,183 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Tue Aug 22 22:39:24 2017 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$MDLVertexAttributeAnisotropy$MDLVertexAttributeBinormal$MDLVertexAttributeBitangent$MDLVertexAttributeColor$MDLVertexAttributeEdgeCrease$MDLVertexAttributeJointIndices$MDLVertexAttributeJointWeights$MDLVertexAttributeNormal$MDLVertexAttributeOcclusionValue$MDLVertexAttributePosition$MDLVertexAttributeShadingBasisU$MDLVertexAttributeShadingBasisV$MDLVertexAttributeSubdivisionStencil$MDLVertexAttributeTangent$MDLVertexAttributeTextureCoordinate$kUTType3dObject$kUTTypeAlembic$kUTTypePolygon$kUTTypeStereolithography$kUTTypeUniversalSceneDescription$''' -enums = '''$MDLAnimatedValueInterpolationConstant@0$MDLAnimatedValueInterpolationLinear@1$MDLCameraProjectionOrthographic@1$MDLCameraProjectionPerspective@0$MDLDataPrecisionDouble@2$MDLDataPrecisionFloat@1$MDLDataPrecisionUndefined@0$MDLGeometryTypeLines@1$MDLGeometryTypePoints@0$MDLGeometryTypeQuads@4$MDLGeometryTypeTriangleStrips@3$MDLGeometryTypeTriangles@2$MDLGeometryTypeVariableTopology@5$MDLIndexBitDepthInvalid@0$MDLIndexBitDepthUInt16@16$MDLIndexBitDepthUInt32@32$MDLIndexBitDepthUInt8@8$MDLIndexBitDepthUint16@16$MDLIndexBitDepthUint32@32$MDLIndexBitDepthUint8@8$MDLLightTypeAmbient@1$MDLLightTypeDirectional@2$MDLLightTypeDiscArea@6$MDLLightTypeEnvironment@11$MDLLightTypeLinear@5$MDLLightTypePhotometric@9$MDLLightTypePoint@4$MDLLightTypeProbe@10$MDLLightTypeRectangularArea@7$MDLLightTypeSpot@3$MDLLightTypeSuperElliptical@8$MDLLightTypeUnknown@0$MDLMaterialFaceBack@1$MDLMaterialFaceDoubleSided@2$MDLMaterialFaceFront@0$MDLMaterialMipMapFilterModeLinear@1$MDLMaterialMipMapFilterModeNearest@0$MDLMaterialPropertyTypeColor@4$MDLMaterialPropertyTypeFloat@5$MDLMaterialPropertyTypeFloat2@6$MDLMaterialPropertyTypeFloat3@7$MDLMaterialPropertyTypeFloat4@8$MDLMaterialPropertyTypeMatrix44@9$MDLMaterialPropertyTypeNone@0$MDLMaterialPropertyTypeString@1$MDLMaterialPropertyTypeTexture@3$MDLMaterialPropertyTypeURL@2$MDLMaterialSemanticAmbientOcclusion@22$MDLMaterialSemanticAmbientOcclusionScale@23$MDLMaterialSemanticAnisotropic@7$MDLMaterialSemanticAnisotropicRotation@8$MDLMaterialSemanticBaseColor@0$MDLMaterialSemanticBump@14$MDLMaterialSemanticClearcoat@11$MDLMaterialSemanticClearcoatGloss@12$MDLMaterialSemanticDisplacement@20$MDLMaterialSemanticDisplacementScale@21$MDLMaterialSemanticEmission@13$MDLMaterialSemanticInterfaceIndexOfRefraction@16$MDLMaterialSemanticMaterialIndexOfRefraction@17$MDLMaterialSemanticMetallic@2$MDLMaterialSemanticNone@32768$MDLMaterialSemanticObjectSpaceNormal@18$MDLMaterialSemanticOpacity@15$MDLMaterialSemanticRoughness@6$MDLMaterialSemanticSheen@9$MDLMaterialSemanticSheenTint@10$MDLMaterialSemanticSpecular@3$MDLMaterialSemanticSpecularExponent@4$MDLMaterialSemanticSpecularTint@5$MDLMaterialSemanticSubsurface@1$MDLMaterialSemanticTangentSpaceNormal@19$MDLMaterialSemanticUserDefined@32769$MDLMaterialTextureFilterModeLinear@1$MDLMaterialTextureFilterModeNearest@0$MDLMaterialTextureWrapModeClamp@0$MDLMaterialTextureWrapModeMirror@2$MDLMaterialTextureWrapModeRepeat@1$MDLMeshBufferTypeIndex@2$MDLMeshBufferTypeVertex@1$MDLPrimitiveTypeCapsule@3$MDLPrimitiveTypeCone@2$MDLPrimitiveTypeCube@0$MDLPrimitiveTypeCylinder@4$MDLPrimitiveTypeNone@5$MDLPrimitiveTypeSphere@1$MDLProbePlacementIrradianceDistribution@1$MDLProbePlacementUniformGrid@0$MDLTextureChannelEncodingFloat16@258$MDLTextureChannelEncodingFloat16SR@770$MDLTextureChannelEncodingFloat32@260$MDLTextureChannelEncodingUInt16@2$MDLTextureChannelEncodingUInt24@3$MDLTextureChannelEncodingUInt32@4$MDLTextureChannelEncodingUInt8@1$MDLTextureChannelEncodingUint16@2$MDLTextureChannelEncodingUint24@3$MDLTextureChannelEncodingUint32@4$MDLTextureChannelEncodingUint8@1$MDLTransformOpRotationOrderXYZ@1$MDLTransformOpRotationOrderXZY@2$MDLTransformOpRotationOrderYXZ@3$MDLTransformOpRotationOrderYZX@4$MDLTransformOpRotationOrderZXY@5$MDLTransformOpRotationOrderZYX@6$MDLVertexFormatChar@131073$MDLVertexFormatChar2@131074$MDLVertexFormatChar2Normalized@262146$MDLVertexFormatChar3@131075$MDLVertexFormatChar3Normalized@262147$MDLVertexFormatChar4@131076$MDLVertexFormatChar4Normalized@262148$MDLVertexFormatCharBits@131072$MDLVertexFormatCharNormalized@262145$MDLVertexFormatCharNormalizedBits@262144$MDLVertexFormatFloat@786433$MDLVertexFormatFloat2@786434$MDLVertexFormatFloat3@786435$MDLVertexFormatFloat4@786436$MDLVertexFormatFloatBits@786432$MDLVertexFormatHalf@720897$MDLVertexFormatHalf2@720898$MDLVertexFormatHalf3@720899$MDLVertexFormatHalf4@720900$MDLVertexFormatHalfBits@720896$MDLVertexFormatInt@655361$MDLVertexFormatInt1010102Normalized@659460$MDLVertexFormatInt2@655362$MDLVertexFormatInt3@655363$MDLVertexFormatInt4@655364$MDLVertexFormatIntBits@655360$MDLVertexFormatInvalid@0$MDLVertexFormatPackedBit@4096$MDLVertexFormatShort@393217$MDLVertexFormatShort2@393218$MDLVertexFormatShort2Normalized@524290$MDLVertexFormatShort3@393219$MDLVertexFormatShort3Normalized@524291$MDLVertexFormatShort4@393220$MDLVertexFormatShort4Normalized@524292$MDLVertexFormatShortBits@393216$MDLVertexFormatShortNormalized@524289$MDLVertexFormatShortNormalizedBits@524288$MDLVertexFormatUChar@65537$MDLVertexFormatUChar2@65538$MDLVertexFormatUChar2Normalized@196610$MDLVertexFormatUChar3@65539$MDLVertexFormatUChar3Normalized@196611$MDLVertexFormatUChar4@65540$MDLVertexFormatUChar4Normalized@196612$MDLVertexFormatUCharBits@65536$MDLVertexFormatUCharNormalized@196609$MDLVertexFormatUCharNormalizedBits@196608$MDLVertexFormatUInt@589825$MDLVertexFormatUInt1010102Normalized@593924$MDLVertexFormatUInt2@589826$MDLVertexFormatUInt3@589827$MDLVertexFormatUInt4@589828$MDLVertexFormatUIntBits@589824$MDLVertexFormatUShort@327681$MDLVertexFormatUShort2@327682$MDLVertexFormatUShort2Normalized@458754$MDLVertexFormatUShort3@327683$MDLVertexFormatUShort3Normalized@458755$MDLVertexFormatUShort4@327684$MDLVertexFormatUShort4Normalized@458756$MDLVertexFormatUShortBits@327680$MDLVertexFormatUShortNormalized@458753$MDLVertexFormatUShortNormalizedBits@458752$''' -misc.update({}) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'MDLAnimatedMatrix4x4', b'copyDouble4x4ArrayInto:maxCount:', {'arguments': {2: {'c_array_lengt_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedMatrix4x4', b'copyFloat4x4ArrayInto:maxCount:', {'arguments': {2: {'c_array_lengt_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedMatrix4x4', b'getDouble4x4Array:maxCount:', {'arguments': {2: {'c_array_lengt_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedMatrix4x4', b'getFloat4x4Array:maxCount:', {'arguments': {2: {'c_array_lengt_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedMatrix4x4', b'resetWithDouble4x4Array:count:atTimes:count:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}, 4: {'type_modifier': b'n', 'c_array_length_in_arg': 5}}}) - r(b'MDLAnimatedMatrix4x4', b'resetWithFloat4x4Array:count:atTimes:count:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}, 4: {'type_modifier': b'n', 'c_array_length_in_arg': 5}}}) - r(b'MDLAnimatedQuaternionArray', b'getDoubleQuaternionArray:maxCount:', {'arguments': {2: {'c_array_length_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedQuaternionArray', b'getDoubleQuaternionArray:maxCount:atTime:', {'arguments': {2: {'c_array_length_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedQuaternionArray', b'getFloatQuaternionArray:maxCount:', {'arguments': {2: {'c_array_length_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedQuaternionArray', b'getFloatQuaternionArray:maxCount:atTime:', {'arguments': {2: {'c_array_length_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedQuaternionArray', b'resetWithDoubleQuaternionArray:count:atTimes:count:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}, 4: {'type_modifier': b'n', 'c_array_length_in_arg': 5}}}) - r(b'MDLAnimatedQuaternionArray', b'resetWithFloatQuaternionArray:count:atTimes:count:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}, 4: {'type_modifier': b'n', 'c_array_length_in_arg': 5}}}) - r(b'MDLAnimatedQuaternionArray', b'setDoubleQuaternionArray:count:atTime:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedQuaternionArray', b'setFloatQuaternionArray:count:atTime:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedScalar', b'copyDoubleArrayInto:maxCount:atTime:', {'arguments': {2: {'c_array_length_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedScalar', b'copyFloatArrayInto:maxCount:atTime:', {'arguments': {2: {'c_array_length_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedScalar', b'getDoubleArray:maxCount:', {'arguments': {2: {'c_array_length_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedScalar', b'getFloatArray:maxCount:', {'arguments': {2: {'c_array_length_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedScalar', b'resetWithDoubleArray:count:atTimes:count:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}, 4: {'type_modifier': b'n', 'c_array_length_in_arg': 5}}}) - r(b'MDLAnimatedScalar', b'resetWithFloatArray:atTimes:count:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 4}, 3: {'type_modifier': b'n', 'c_array_length_in_arg': 4}}}) - r(b'MDLAnimatedScalar', b'resetWithDoubleArray:atTimes:count:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 4}, 3: {'type_modifier': b'n', 'c_array_length_in_arg': 4}}}) - r(b'MDLAnimatedScalarArray', b'resetWithFloatArray:count:atTimes:count:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}, 4: {'type_modifier': b'n', 'c_array_length_in_arg': 5}}}) - r(b'MDLAnimatedScalarArray', b'copyDoubleArrayInto:maxCount:', {'arguments': {2: {'c_array_length_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedScalarArray', b'copyDoubleArrayInto:maxCount:atTime:', {'arguments': {2: {'c_array_length_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedScalarArray', b'copyFloatArrayInto:maxCount:', {'arguments': {2: {'c_array_length_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedScalarArray', b'copyFloatArrayInto:maxCount:atTime:', {'arguments': {2: {'c_array_length_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedScalarArray', b'getFloatArray:maxCount:atTime:', {'arguments': {2: {'c_array_length_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedScalarArray', b'getDoubleArray:maxCount:atTime:', {'arguments': {2: {'c_array_length_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedScalarArray', b'getFloatArray:maxCount:', {'arguments': {2: {'c_array_length_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedScalarArray', b'getDoubleArray:maxCount:', {'arguments': {2: {'c_array_length_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedScalarArray', b'resetWithDoubleArray:count:atTimes:count:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}, 4: {'type_modifier': b'n', 'c_array_length_in_arg': 5}}}) - r(b'MDLAnimatedScalarArray', b'resetWithFloatArray:atTimes:count:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}, 4: {'type_modifier': b'n', 'c_array_length_in_arg': 5}}}) - r(b'MDLAnimatedScalarArray', b'setDoubleArray:count:atTime:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedScalarArray', b'setFloatArray:count:atTime:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedValue', b'copyTimesInto:maxCount:', {'arguments': {2: {'c_array_length_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedValue', b'getTimes:maxCount:', {'arguments': {2: {'c_array_length_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedValue', b'isAnimated', {'retval': {'type': 'Z'}}) - r(b'MDLAnimatedVector2', b'copyDouble2ArrayInto:maxCount:', {'arguments': {2: {'c_array_length_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedVector2', b'copyFloat2ArrayInto:maxCount:', {'arguments': {2: {'c_array_length_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedVector2', b'getDouble2Array:maxCount:', {'arguments': {2: {'c_array_length_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedVector2', b'getFloat2Array:maxCount:', {'arguments': {2: {'c_array_length_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedVector2', b'resetWithDouble2Array:count:atTimes:count:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}, 4: {'type_modifier': b'n', 'c_array_length_in_arg': 5}}}) - r(b'MDLAnimatedVector2', b'resetWithFloat2Array:count:atTimes:count:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}, 4: {'type_modifier': b'n', 'c_array_length_in_arg': 5}}}) - r(b'MDLAnimatedVector3', b'copyDouble3ArrayInto:maxCount:', {'arguments': {2: {'c_array_length_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedVector3', b'copyFloat3ArrayInto:maxCount:', {'arguments': {2: {'c_array_length_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedVector3', b'getDouble3Array:maxCount:', {'arguments': {2: {'c_array_length_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedVector3', b'getFloat3Array:maxCount:', {'arguments': {2: {'c_array_length_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedVector3', b'resetWithDouble3Array:count:atTimes:count:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}, 4: {'type_modifier': b'n', 'c_array_length_in_arg': 5}}}) - r(b'MDLAnimatedVector3', b'resetWithFloat3Array:count:atTimes:count:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}, 4: {'type_modifier': b'n', 'c_array_length_in_arg': 5}}}) - r(b'MDLAnimatedVector3Array', b'getDouble3Array:maxCount:', {'arguments': {2: {'c_array_length_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedVector3Array', b'getDouble3Array:maxCount:atTime:', {'arguments': {2: {'c_array_length_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedVector3Array', b'getFloat3Array:maxCount:', {'arguments': {2: {'c_array_length_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedVector3Array', b'getFloat3Array:maxCount:atTime:', {'arguments': {2: {'c_array_length_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedVector3Array', b'resetWithDouble3Array:count:atTimes:count:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}, 4: {'type_modifier': b'n', 'c_array_length_in_arg': 5}}}) - r(b'MDLAnimatedVector3Array', b'resetWithFloat3Array:count:atTimes:count:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}, 4: {'type_modifier': b'n', 'c_array_length_in_arg': 5}}}) - r(b'MDLAnimatedVector3Array', b'setDouble3Array:count:atTime:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedVector3Array', b'setFloat3Array:count:atTime:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedVector4', b'copyDouble4ArrayInto:maxCount:', {'arguments': {2: {'c_array_lengt_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedVector4', b'copyFloat4ArrayInto:maxCount:', {'arguments': {2: {'c_array_lengt_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedVector4', b'getDouble4Array:maxCount:', {'arguments': {2: {'c_array_lengt_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedVector4', b'getFloat4Array:maxCount:', {'arguments': {2: {'c_array_lengt_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLAnimatedVector4', b'resetWithDouble4Array:count:atTimes:count:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}, 4: {'type_modifier': b'n', 'c_array_length_in_arg': 5}}}) - r(b'MDLAnimatedVector4', b'resetWithFloat4Array:count:atTimes:count:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}, 4: {'type_modifier': b'n', 'c_array_length_in_arg': 5}}}) - r(b'MDLAsset', b'canExportFileExtension:', {'retval': {'type': 'Z'}}) - r(b'MDLAsset', b'canImportFileExtension:', {'retval': {'type': 'Z'}}) - r(b'MDLAsset', b'exportAssetToURL:', {'retval': {'type': 'Z'}}) - r(b'MDLAsset', b'exportAssetToURL:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'MDLAsset', b'initWithURL:bufferAllocator:preserveIndexing:error:', {'arguments': {4: {'type': 'Z'}, 5: {'type_modifier': b'o'}}}) - r(b'MDLAsset', b'initWithURL:vertexDescriptor:bufferAllocator:preserveTopology:error:', {'arguments': {5: {'type': 'Z'}, 6: {'type_modifier': b'o'}}}) - r(b'MDLCamera', b'frameBoundingBox:setNearAndFar:', {'arguments': {3: {'type': 'Z'}}}) - r(b'MDLMaterialPropertyNode', b'evaluationFunction', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}) - r(b'MDLMaterialPropertyNode', b'initWithInputs:outputs:evaluationFunction:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'MDLMaterialPropertyNode', b'setEvaluationFunction:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'MDLMatrix4x4Array', b'getDouble4x4Array:maxCount:', {'arguments': {2: {'c_array_length_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLMatrix4x4Array', b'getFloat4x4Array:maxCount:', {'arguments': {2: {'c_array_length_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLMatrix4x4Array', b'setDouble4x4Array:count:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'MDLMatrix4x4Array', b'setFloat4x4Array:count:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'MDLMesh', b'generateAmbientOcclusionTextureWithQuality:attenuationFactor:objectsToConsider:vertexAttributeNamed:materialPropertyNamed:', {'retval': {'type': 'Z'}}) - r(b'MDLMesh', b'generateAmbientOcclusionTextureWithSize:raysPerSample:attenuationFactor:objectsToConsider:vertexAttributeNamed:materialPropertyNamed:', {'retval': {'type': 'Z'}}) - r(b'MDLMesh', b'generateAmbientOcclusionVertexColorsWithQuality:attenuationFactor:objectsToConsider:vertexAttributeNamed:', {'retval': {'type': 'Z'}}) - r(b'MDLMesh', b'generateAmbientOcclusionVertexColorsWithRaysPerSample:attenuationFactor:objectsToConsider:vertexAttributeNamed:', {'retval': {'type': 'Z'}}) - r(b'MDLMesh', b'generateLightMapTextureWithQuality:lightsToCondider:objectsToConsider:vertexAttributeNamed:materialPropertyNamed:', {'retval': {'type': 'Z'}}) - r(b'MDLMesh', b'generateLightMapTextureWithTextureSize:lightsToCondider:objectsToConsider:vertexAttributeNamed:materialPropertyNamed:', {'retval': {'type': 'Z'}}) - r(b'MDLMesh', b'generateLightMapVertexColorsWithLightsToConsider:objectsToConsider:vertexAttributeNamed:', {'retval': {'type': 'Z'}}) - r(b'MDLMesh', b'initBoxWithExtent:segments:inwardNormals:geometryType:allocator:', {'arguments': {4: {'type': 'Z'}}}) - r(b'MDLMesh', b'initCapsuleWithExtent:cylinderSegments:hemisphereSegments:inwardNormals:geometryType:allocator:', {'arguments': {5: {'type': 'Z'}}}) - r(b'MDLMesh', b'initConeWithExtent:segments:inwardNormals:cap:geometryType:allocator:', {'arguments': {4: {'type': 'Z'}, 5: {'type': 'Z'}}}) - r(b'MDLMesh', b'initCylinderWithExtent:segments:inwardNormals:topCap:bottomCap:cap:geometryType:allocator:', {'arguments': {4: {'type': 'Z'}, 5: {'type': 'Z'}, 6: {'type': 'Z'}}}) - r(b'MDLMesh', b'initHemisphereWithExtent:segments:inwardNormals:cap:geometryType:allocator:', {'arguments': {4: {'type': 'Z'}, 5: {'type': 'Z'}}}) - r(b'MDLMesh', b'initIcosahedronWithExtent:inwardNormals:segments:geometryType:allocator:', {'arguments': {3: {'type': 'Z'}}}) - r(b'MDLMesh', b'initMeshWithPrimitive:segments:inwardNormals:geometryType:allocator:', {'arguments': {4: {'type': 'Z'}}}) - r(b'MDLMesh', b'initSphereWithExtent:segments:inwardNormals:geometryType:allocator:', {'arguments': {4: {'type': 'Z'}}}) - r(b'MDLMesh', b'makeVerticesUniqueAndReturnError:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type_modifier': b'o'}}}) - r(b'MDLMesh', b'newBoxWithDimensions:segments:geometryType:inwardNormals:allocator:', {'arguments': {5: {'type': 'Z'}}}) - r(b'MDLMesh', b'newCapsuleWithHeight:radii:radialSegments:verticalSegments:hemisphereSegments:geometryType:inwardNormals:allocator:', {'arguments': {8: {'type': 'Z'}}}) - r(b'MDLMesh', b'newCylinderWithHeight:radii:radialSegments:verticalSegments:geometryType:inwardNormals:allocator:', {'arguments': {7: {'type': 'Z'}}}) - r(b'MDLMesh', b'newEllipsoidWithRadii:radialSegments:verticalSegments:geometryType:inwardNormals:hemisphere:allocator:', {'arguments': {6: {'type': 'Z'}, 7: {'type': 'Z'}}}) - r(b'MDLMesh', b'newEllipticalConeWithHeight:radii:radialSegments:verticalSegments:geometryType:inwardNormals:allocator:', {'arguments': {7: {'type': 'Z'}}}) - r(b'MDLMesh', b'newIcosahedronWithRadius:inwardNormals:allocator:', {'arguments': {3: {'type': 'Z'}}}) - r(b'MDLMesh', b'newMeshWithPrimitive:segments:inwardNormals:geometryType:allocator:', {'arguments': {4: {'type': 'Z'}}}) - r(b'MDLMeshBufferMap', b'bytes', {'retval': {'c_array_of_variable_length': True}}) - r(b'MDLMeshBufferMap', b'initWithBytes:deallocator:', {'arguments': {2: {'type_modifier': b'n', 'c_array_of_variable_length': True}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'MDLMorphDeformer', b'copyShapeSetTargetCountsInto:maxCount:', {'arguments': {2: {'c_array_length_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLMorphDeformer', b'copyShapeSetTargetWeightsInto:maxCount:', {'arguments': {2: {'c_array_length_in_result': True, 'type_modifier': b'o', 'c_array_length_in_arg': 3}}}) - r(b'MDLMorphDeformer', b'initWithTargetShapes:shapeSetTargetWeights:count:shapeSetTargetCounts:count:', {'arguments': {3: {'type_modifier': b'n', 'c_array_length_in_arg': 4}, 5: {'type_modifier': b'n', 'c_array_length_in_arg': 6}}}) - r(b'MDLNoiseTexture', b'initScalarNoiseWithSmoothness:name:textureDimensions:channelCount:channelEncoding:grayScale:', {'arguments': {7: {'type': 'Z'}}}) - r(b'MDLObject', b'enumerateChildObjectsOfClass:root:usingBlock:stopPointer:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'o^Z'}}}}, 5: {'type': 'N^Z'}}}) - r(b'MDLObject', b'hidden', {'retval': {'type': 'Z'}}) - r(b'MDLObject', b'resetsTransform', {'retval': {'type': 'Z'}}) - r(b'MDLObject', b'setHidden:', {'arguments': {2: {'type': 'Z'}}}) - r(b'MDLObject', b'setResetsTransform:', {'arguments': {2: {'type': 'Z'}}}) - r(b'MDLSkinDeformer', b'copyJointBindTransformsInto:maxCount:', {'retval': {'type': sel32or64(b'I', b'Q')}, 'arguments': {2: {'comment': 'matrix_float4x4', 'type': '?'}, 3: {'type': sel32or64(b'I', b'Q')}}}) - r(b'MDLTexture', b'hasAlphaValues', {'retval': {'type': 'Z'}}) - r(b'MDLTexture', b'initWithData:topLeftOrigin:name:dimensions:rowStride:channelCount:channelEncoding:isCube:', {'arguments': {3: {'type': 'Z'}, 9: {'type': 'Z'}}}) - r(b'MDLTexture', b'isCube', {'retval': {'type': 'Z'}}) - r(b'MDLTexture', b'setHasAlphaValues:', {'arguments': {2: {'type': 'Z'}}}) - r(b'MDLTexture', b'setIsCube:', {'arguments': {2: {'type': 'Z'}}}) - r(b'MDLTexture', b'texelDataWithBottomLeftOriginAtMipLevel:create:', {'arguments': {3: {'type': 'Z'}}}) - r(b'MDLTexture', b'texelDataWithTopLeftOriginAtMipLevel:create:', {'arguments': {3: {'type': 'Z'}}}) - r(b'MDLTexture', b'writeToURL:', {'retval': {'type': 'Z'}}) - r(b'MDLTexture', b'writeToURL:level:', {'retval': {'type': 'Z'}}) - r(b'MDLTexture', b'writeToURL:type:', {'retval': {'type': 'Z'}}) - r(b'MDLTexture', b'writeToURL:type:level:', {'retval': {'type': 'Z'}}) - r(b'MDLTransform', b'initWithMatrix:resetsTransform:', {'arguments': {3: {'type': 'Z'}}}) - r(b'MDLTransform', b'initWithTransformComponent:resetsTransform:', {'arguments': {3: {'type': 'Z'}}}) - r(b'MDLVertexAttributeData', b'dataStart', {'retval': {'c_array_of_variable_length': True}}) - r(b'MDLVertexAttributeData', b'setDataStart:', {'arguments': {2: {'c_array_of_variable_length': True}}}) - r(b'MDLVoxelArray', b'isValidSignedShellField', {'retval': {'type': 'Z'}}) - r(b'MDLVoxelArray', b'setIsValidSignedShellField:', {'arguments': {2: {'type': 'Z'}}}) - r(b'MDLVoxelArray', b'voxelExistsAtIndex:allowAnyX:allowAnyY:allowAnyZ:allowAnyShell:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': 'Z'}, 4: {'type': 'Z'}, 5: {'type': 'Z'}, 6: {'type': 'Z'}}}) - r(b'NSObject', b'canResolveAssetNamed:', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'capacity', {'retval': {'type': sel32or64(b'I', b'Q')}}) - r(b'NSObject', b'copyJointBindTransformsInto:maxCount:', {'retval': {'type': sel32or64(b'I', b'Q')}, 'arguments': {2: {'comment': 'matrix_float4x4', 'type': '?'}, 3: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'fillData:offset:', {'arguments': {3: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'globalTransformWithObject:atTime', {'arguments': {3: {'type': 'd'}}}) - r(b'NSObject', b'length', {'retval': {'type': sel32or64(b'I', b'Q')}}) - r(b'NSObject', b'localTransformAtTime:', {'arguments': {2: {'type': 'd'}}}) - r(b'NSObject', b'maximumTime', {'retval': {'type': 'd'}}) - r(b'NSObject', b'minimumTime', {'retval': {'type': 'd'}}) - r(b'NSObject', b'newBuffer:type:', {'arguments': {2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'newBufferFromZone:data:type:', {'arguments': {4: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'newBufferFromZone:length:type:', {'arguments': {3: {'type': sel32or64(b'I', b'Q')}, 4: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'newBufferWithData:type:', {'arguments': {3: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'newZone:', {'arguments': {2: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'objectAtIndexedSubscript:', {'arguments': {2: {'type': 'Q'}}}) - r(b'NSObject', b'primitiveType', {'retval': {'type': sel32or64(b'I', b'Q')}}) - r(b'NSObject', b'resetsTransform', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'setLocalTransform:forTime:', {'arguments': {3: {'type': 'd'}}}) - r(b'NSObject', b'setMaximumTime:', {'arguments': {2: {'type': 'd'}}}) - r(b'NSObject', b'setMinimumTime:', {'arguments': {2: {'type': 'd'}}}) - r(b'NSObject', b'setPrimitiveType:', {'arguments': {2: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'setResetsTransform:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSObject', b'setSphericalHarmonicsLevel:', {'arguments': {2: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'sphericalHarmonicsLevel', {'retval': {'type': sel32or64(b'I', b'Q')}}) - r(b'NSObject', b'type', {'retval': {'type': sel32or64(b'I', b'Q')}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/ModelIO/_metadata.pyc b/env/lib/python2.7/site-packages/ModelIO/_metadata.pyc deleted file mode 100644 index 4ded2fcd..00000000 Binary files a/env/lib/python2.7/site-packages/ModelIO/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/MultipeerConnectivity/_MultipeerConnectivity.so b/env/lib/python2.7/site-packages/MultipeerConnectivity/_MultipeerConnectivity.so deleted file mode 100755 index 95a0ef39..00000000 Binary files a/env/lib/python2.7/site-packages/MultipeerConnectivity/_MultipeerConnectivity.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/MultipeerConnectivity/__init__.py b/env/lib/python2.7/site-packages/MultipeerConnectivity/__init__.py deleted file mode 100644 index 4690de61..00000000 --- a/env/lib/python2.7/site-packages/MultipeerConnectivity/__init__.py +++ /dev/null @@ -1,28 +0,0 @@ -''' -Python mapping for the MultipeerConnectivity framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Foundation - -from MultipeerConnectivity import _metadata -import MultipeerConnectivity._MultipeerConnectivity - -sys.modules['MultipeerConnectivity'] = mod = objc.ObjCLazyModule( - "MultipeerConnectivity", - "com.apple.MultipeerConnectivity", - objc.pathForFramework("/System/Library/Frameworks/MultipeerConnectivity.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Foundation,)) - -import sys -del sys.modules['MultipeerConnectivity._metadata'] -del sys.modules['MultipeerConnectivity._MultipeerConnectivity'] diff --git a/env/lib/python2.7/site-packages/MultipeerConnectivity/__init__.pyc b/env/lib/python2.7/site-packages/MultipeerConnectivity/__init__.pyc deleted file mode 100644 index 7f37be3b..00000000 Binary files a/env/lib/python2.7/site-packages/MultipeerConnectivity/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/MultipeerConnectivity/_metadata.py b/env/lib/python2.7/site-packages/MultipeerConnectivity/_metadata.py deleted file mode 100644 index 1509b25c..00000000 --- a/env/lib/python2.7/site-packages/MultipeerConnectivity/_metadata.py +++ /dev/null @@ -1,56 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Tue Sep 22 23:54:15 2015 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$MCErrorDomain$kMCSessionMaximumNumberOfPeers@Q$kMCSessionMinimumNumberOfPeers@Q$''' -enums = '''$MCEncryptionNone@2$MCEncryptionOptional@0$MCEncryptionRequired@1$MCErrorCancelled@5$MCErrorInvalidParameter@2$MCErrorNotConnected@1$MCErrorTimedOut@4$MCErrorUnavailable@6$MCErrorUnknown@0$MCErrorUnsupported@3$MCSessionSendDataReliable@0$MCSessionSendDataUnreliable@1$MCSessionStateConnected@2$MCSessionStateConnecting@1$MCSessionStateNotConnected@0$''' -misc.update({}) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'MCSession', b'nearbyConnectionDataForPeer:withCompletionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'MCSession', b'sendData:toPeers:withMode:error:', {'retval': {'type': b'Z'}, 'arguments': {5: {'type_modifier': b'o'}}}) - r(b'MCSession', b'sendResourceAtURL:withName:toPeer:withCompletionHandler:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'MCSession', b'startStreamWithName:toPeer:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'NSObject', b'advertiser:didReceiveInvitationFromPeer:withContext:invitationHandler:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'session:didReceiveCertificate:fromPeer:certificateHandler:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}}}, 'type': '@?'}}}) - r(b'NSObject', b'session:peer:didChangeState:', {'arguments': {4: {'type': 'Q'}}}) -finally: - objc._updatingMetadata(False) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'NSObject', b'advertiser:didNotStartAdvertisingPeer:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'advertiser:didReceiveInvitationFromPeer:withContext:invitationHandler:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'advertiserAssistantDidDismissInvitation:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'advertiserAssistantWillPresentInvitation:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'browser:didNotStartBrowsingForPeers:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'browser:foundPeer:withDiscoveryInfo:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'browser:lostPeer:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'browserViewController:shouldPresentNearbyPeer:withDiscoveryInfo:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'browserViewControllerDidFinish:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'browserViewControllerWasCancelled:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'session:didFinishReceivingResourceWithName:fromPeer:atURL:withError:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}, 6: {'type': b'@'}}}) - r(b'NSObject', b'session:didReceiveCertificate:fromPeer:certificateHandler:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}}}, 'type': '@?'}}}) - r(b'NSObject', b'session:didReceiveData:fromPeer:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'session:didReceiveStream:withName:fromPeer:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'session:didStartReceivingResourceWithName:fromPeer:withProgress:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'session:peer:didChangeState:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': 'Q'}}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/MultipeerConnectivity/_metadata.pyc b/env/lib/python2.7/site-packages/MultipeerConnectivity/_metadata.pyc deleted file mode 100644 index 12a16fa3..00000000 Binary files a/env/lib/python2.7/site-packages/MultipeerConnectivity/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/NetFS/__init__.py b/env/lib/python2.7/site-packages/NetFS/__init__.py deleted file mode 100644 index 4b2c08fe..00000000 --- a/env/lib/python2.7/site-packages/NetFS/__init__.py +++ /dev/null @@ -1,31 +0,0 @@ -''' -Python mapping for the NetFS framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Foundation - -from NetFS import _metadata - -try: - long -except NameError: - long = int - -sys.modules['NetFS'] = mod = objc.ObjCLazyModule( - "NetFS", - "com.apple.NetFS", - objc.pathForFramework("/System/Library/Frameworks/NetFS.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Foundation,)) - -import sys -del sys.modules['NetFS._metadata'] diff --git a/env/lib/python2.7/site-packages/NetFS/__init__.pyc b/env/lib/python2.7/site-packages/NetFS/__init__.pyc deleted file mode 100644 index aacb8b07..00000000 Binary files a/env/lib/python2.7/site-packages/NetFS/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/NetFS/_metadata.py b/env/lib/python2.7/site-packages/NetFS/_metadata.py deleted file mode 100644 index a25696e1..00000000 --- a/env/lib/python2.7/site-packages/NetFS/_metadata.py +++ /dev/null @@ -1,24 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Mon Jul 18 12:12:04 2016 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$$''' -enums = '''$ENETFSACCOUNTRESTRICTED@-5999$ENETFSNOAUTHMECHSUPP@-5997$ENETFSNOPROTOVERSSUPP@-5996$ENETFSNOSHARESAVAIL@-5998$ENETFSPWDNEEDSCHANGE@-5045$ENETFSPWDPOLICY@-5046$''' -misc.update({'kNetFSOpenURLMountKey': b'OpenURLMount'.decode("utf-8"), 'kNetFSMountPathKey': b'MountPath'.decode("utf-8"), 'kNAUIOptionNoUI': b'NoUI'.decode("utf-8"), 'kNetFSServerDisplayNameKey': b'ServerDisplayName'.decode("utf-8"), 'kNetFSAuthorityParamsKey': b'AuthorityParams'.decode("utf-8"), 'kNetFSMountFlagsKey': b'MountFlags'.decode("utf-8"), 'kNetFSSupportsKerberosKey': b'SupportsKerberos'.decode("utf-8"), 'kNetFSMountedMultiUserKey': b'MountedMultiUser'.decode("utf-8"), 'kNetFSHostKey': b'Host'.decode("utf-8"), 'kNetFSConnectedAsUserKey': b'MountedByUser'.decode("utf-8"), 'kNetFSHasPasswordKey': b'HasPassword'.decode("utf-8"), 'kNetFSSupportsGuestKey': b'SupportsGuest'.decode("utf-8"), 'kNetFSAlreadyMountedKey': b'AlreadyMounted'.decode("utf-8"), 'kNAUIOptionAllowUI': b'AllowUI'.decode("utf-8"), 'kNetFSMechTypesSupportedKey': b'MechTypesSupported'.decode("utf-8"), 'kNetFSMountedByUserKey': b'MountedByUser'.decode("utf-8"), 'kNetFSMountedWithAuthenticationInfoKey': b'MountedWithAuthenticationInfo'.decode("utf-8"), 'kNetFSAuthenticationInfoKey': b'AuthenticationInfo'.decode("utf-8"), 'kNetFSNoMountAuthenticationKey': b'NoMountAuthentication'.decode("utf-8"), 'kNetFSPrinterShareKey': b'PrinterShare'.decode("utf-8"), 'kNetFSUseKerberosKey': b'Kerberos'.decode("utf-8"), 'kNetFSMountAtMountDirKey': b'MountAtMountDir'.decode("utf-8"), 'kNAUIOptionForceUI': b'ForceUI'.decode("utf-8"), 'kNetFSGetAccessRightsKey': b'GetAccessRights'.decode("utf-8"), 'kNetFSPasswordKey': b'Password'.decode("utf-8"), 'kNetFSMountedByKerberosKey': b'MountedByKerberos'.decode("utf-8"), 'kNetFSSchemeKey': b'Scheme'.decode("utf-8"), 'kNetFSConnectedMultiUserKey': b'ConnectedMultiUser'.decode("utf-8"), 'kNetFSAccessRightsKey': b'AccessRights'.decode("utf-8"), 'kNetFSMountedByGuestKey': b'MountedByGuest'.decode("utf-8"), 'kNetFSSoftMountKey': b'SoftMount'.decode("utf-8"), 'kNetFSChangePasswordKey': b'ChangePassword'.decode("utf-8"), 'kNetFSUseGuestKey': b'Guest'.decode("utf-8"), 'kNetFSUseAuthenticationInfoKey': b'UseAuthenticationInfo'.decode("utf-8"), 'kNetFSPathKey': b'Path'.decode("utf-8"), 'kNetFSSupportsChangePasswordKey': b'SupportsChangePassword'.decode("utf-8"), 'kNetFSNoUserPreferencesKey': b'NoUserPreferences'.decode("utf-8"), 'kNetFSForceNewSessionKey': b'ForceNewSession'.decode("utf-8"), 'kNAUIOptionKey': b'UIOption'.decode("utf-8"), 'kNetFSConnectedAsGuestKey': b'MountedByGuest'.decode("utf-8"), 'kNetFSConnectedWithAuthenticationInfoKey': b'ConnectedWithAuthenticationInfo'.decode("utf-8"), 'kNetFSDisplayNameKey': b'DisplayName'.decode("utf-8"), 'kNetFSAlternatePortKey': b'AlternatePort'.decode("utf-8"), 'kNetFSAllowLoopbackKey': b'AllowLoopback'.decode("utf-8"), 'kNetFSMountedURLKey': b'MountedURL'.decode("utf-8"), 'kNetFSGuestOnlyKey': b'GuestOnly'.decode("utf-8"), 'kNetFSAllowSubMountsKey': b'AllowSubMounts'.decode("utf-8"), 'kNetFSUserNameKey': b'UserName'.decode("utf-8"), 'kNetFSIsHiddenKey': b'IsHidden'.decode("utf-8")}) -functions={'NetFSMountURLProbe': (b'^{__CFString=}^{__CFString=}',), 'NetFSMountURLAsync': (b'i^{__CFURL=}^{__CFURL=}^{__CFString=}^{__CFString=}^{__CFDictionary=}^{__CFDictionary=}^^v^{dispatch_queue_s=}@?', '', {'arguments': {8: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}, 1: {'type': 'i'}, 2: {'type': '^v'}, 3: {'type': '@'}}}}, 6: {'type_modifier': 'o'}}}), 'NetFSMountURLSync': (b'i^{__CFURL=}^{__CFURL=}^{__CFString=}^{__CFString=}^{__CFDictionary=}^{__CFDictionary=}^^{__CFArray=}', '', {'arguments': {6: {'type_modifier': 'o'}}}), 'NetFSMountURLCancel': (b'i^v',), 'NetFSCopyURLForRemountingVolume': (b'^{__CFURL=}^{__CFURL=}', '', {'retval': {'already_cfretained': True}}), 'NetFSCFStringtoCString': (b'^t^{__CFString=}', '', {'retval': {'c_array_delimited_by_null': True}})} -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/NetFS/_metadata.pyc b/env/lib/python2.7/site-packages/NetFS/_metadata.pyc deleted file mode 100644 index 7dba1bf8..00000000 Binary files a/env/lib/python2.7/site-packages/NetFS/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/NetworkExtension/_NetworkExtension.so b/env/lib/python2.7/site-packages/NetworkExtension/_NetworkExtension.so deleted file mode 100755 index 75afba84..00000000 Binary files a/env/lib/python2.7/site-packages/NetworkExtension/_NetworkExtension.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/NetworkExtension/__init__.py b/env/lib/python2.7/site-packages/NetworkExtension/__init__.py deleted file mode 100644 index eef38890..00000000 --- a/env/lib/python2.7/site-packages/NetworkExtension/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -''' -Python mapping for the NetworkExtension framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Foundation - -from NetworkExtension import _metadata, _NetworkExtension - -sys.modules['NetworkExtension'] = mod = objc.ObjCLazyModule( - "NetworkExtension", - "com.apple.NetworkExtension", - objc.pathForFramework("/System/Library/Frameworks/NetworkExtension.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Foundation,)) - -import sys -del sys.modules['NetworkExtension._metadata'] diff --git a/env/lib/python2.7/site-packages/NetworkExtension/__init__.pyc b/env/lib/python2.7/site-packages/NetworkExtension/__init__.pyc deleted file mode 100644 index efc12b9e..00000000 Binary files a/env/lib/python2.7/site-packages/NetworkExtension/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/NetworkExtension/_metadata.py b/env/lib/python2.7/site-packages/NetworkExtension/_metadata.py deleted file mode 100644 index 98e91df4..00000000 --- a/env/lib/python2.7/site-packages/NetworkExtension/_metadata.py +++ /dev/null @@ -1,119 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Sun Sep 30 22:02:02 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$NEAppProxyErrorDomain$NEFilterConfigurationDidChangeNotification$NEFilterErrorDomain$NETunnelProviderErrorDomain$NEVPNConfigurationChangeNotification$NEVPNConnectionStartOptionPassword$NEVPNConnectionStartOptionUsername$NEVPNErrorDomain$NEVPNStatusDidChangeNotification$''' -enums = '''$NEAppProxyFlowErrorAborted@5$NEAppProxyFlowErrorDatagramTooLarge@9$NEAppProxyFlowErrorHostUnreachable@3$NEAppProxyFlowErrorInternal@8$NEAppProxyFlowErrorInvalidArgument@4$NEAppProxyFlowErrorNotConnected@1$NEAppProxyFlowErrorPeerReset@2$NEAppProxyFlowErrorReadAlreadyPending@10$NEAppProxyFlowErrorRefused@6$NEAppProxyFlowErrorTimedOut@7$NEEvaluateConnectionRuleActionConnectIfNeeded@1$NEEvaluateConnectionRuleActionNeverConnect@2$NEFilterFlowBytesMax@18446744073709551615$NEFilterManagerErrorConfigurationCannotBeRemoved@4$NEFilterManagerErrorConfigurationDisabled@2$NEFilterManagerErrorConfigurationInvalid@1$NEFilterManagerErrorConfigurationStale@3$NEOnDemandRuleActionConnect@1$NEOnDemandRuleActionDisconnect@2$NEOnDemandRuleActionEvaluateConnection@3$NEOnDemandRuleActionIgnore@4$NEOnDemandRuleInterfaceTypeAny@0$NEOnDemandRuleInterfaceTypeEthernet@1$NEOnDemandRuleInterfaceTypeWiFi@2$NEProviderStopReasonAuthenticationCanceled@6$NEProviderStopReasonConfigurationDisabled@9$NEProviderStopReasonConfigurationFailed@7$NEProviderStopReasonConfigurationRemoved@10$NEProviderStopReasonConnectionFailed@14$NEProviderStopReasonIdleTimeout@8$NEProviderStopReasonNoNetworkAvailable@3$NEProviderStopReasonNone@0$NEProviderStopReasonProviderDisabled@5$NEProviderStopReasonProviderFailed@2$NEProviderStopReasonSuperceded@11$NEProviderStopReasonUnrecoverableNetworkChange@4$NEProviderStopReasonUserInitiated@1$NEProviderStopReasonUserLogout@12$NEProviderStopReasonUserSwitch@13$NETunnelProviderErrorNetworkSettingsCanceled@2$NETunnelProviderErrorNetworkSettingsFailed@3$NETunnelProviderErrorNetworkSettingsInvalid@1$NETunnelProviderRoutingMethodDestinationIP@1$NETunnelProviderRoutingMethodSourceApplication@2$NEVPNErrorConfigurationDisabled@2$NEVPNErrorConfigurationInvalid@1$NEVPNErrorConfigurationReadWriteFailed@5$NEVPNErrorConfigurationStale@4$NEVPNErrorConfigurationUnknown@6$NEVPNErrorConnectionFailed@3$NEVPNIKEAuthenticationMethodCertificate@1$NEVPNIKEAuthenticationMethodNone@0$NEVPNIKEAuthenticationMethodSharedSecret@2$NEVPNIKEv2CertificateTypeECDSA256@2$NEVPNIKEv2CertificateTypeECDSA384@3$NEVPNIKEv2CertificateTypeECDSA521@4$NEVPNIKEv2CertificateTypeRSA@1$NEVPNIKEv2DeadPeerDetectionRateHigh@3$NEVPNIKEv2DeadPeerDetectionRateLow@1$NEVPNIKEv2DeadPeerDetectionRateMedium@2$NEVPNIKEv2DeadPeerDetectionRateNone@0$NEVPNIKEv2DiffieHellmanGroup0@0$NEVPNIKEv2DiffieHellmanGroup1@1$NEVPNIKEv2DiffieHellmanGroup14@14$NEVPNIKEv2DiffieHellmanGroup15@15$NEVPNIKEv2DiffieHellmanGroup16@16$NEVPNIKEv2DiffieHellmanGroup17@17$NEVPNIKEv2DiffieHellmanGroup18@18$NEVPNIKEv2DiffieHellmanGroup19@19$NEVPNIKEv2DiffieHellmanGroup2@2$NEVPNIKEv2DiffieHellmanGroup20@20$NEVPNIKEv2DiffieHellmanGroup21@21$NEVPNIKEv2DiffieHellmanGroup5@5$NEVPNIKEv2DiffieHellmanGroupInvalid@0$NEVPNIKEv2EncryptionAlgorithm3DES@2$NEVPNIKEv2EncryptionAlgorithmAES128@3$NEVPNIKEv2EncryptionAlgorithmAES128GCM@5$NEVPNIKEv2EncryptionAlgorithmAES256@4$NEVPNIKEv2EncryptionAlgorithmAES256GCM@6$NEVPNIKEv2EncryptionAlgorithmDES@1$NEVPNIKEv2IntegrityAlgorithmSHA160@2$NEVPNIKEv2IntegrityAlgorithmSHA256@3$NEVPNIKEv2IntegrityAlgorithmSHA384@4$NEVPNIKEv2IntegrityAlgorithmSHA512@5$NEVPNIKEv2IntegrityAlgorithmSHA96@1$NEVPNIKEv2TLSVersion1_0@1$NEVPNIKEv2TLSVersion1_1@2$NEVPNIKEv2TLSVersion1_2@3$NEVPNIKEv2TLSVersionDefault@0$NEVPNStatusConnected@3$NEVPNStatusConnecting@2$NEVPNStatusDisconnected@1$NEVPNStatusDisconnecting@5$NEVPNStatusInvalid@0$NEVPNStatusReasserting@4$NWPathStatusInvalid@0$NWPathStatusSatisfiable@3$NWPathStatusSatisfied@1$NWPathStatusUnsatisfied@2$NWTCPConnectionStateCancelled@5$NWTCPConnectionStateConnected@3$NWTCPConnectionStateConnecting@1$NWTCPConnectionStateDisconnected@4$NWTCPConnectionStateInvalid@0$NWTCPConnectionStateWaiting@2$NWUDPSessionStateCancelled@5$NWUDPSessionStateFailed@4$NWUDPSessionStateInvalid@0$NWUDPSessionStatePreparing@2$NWUDPSessionStateReady@3$NWUDPSessionStateWaiting@1$''' -misc.update({}) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'NEAppProxyFlow', b'openWithLocalEndpoint:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NEAppProxyProvider', b'handleNewFlow:', {'retval': {'type': 'Z'}}) - r(b'NEAppProxyProvider', b'startProxyWithOptions:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NEAppProxyProvider', b'stopProxyWithReason:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}, 'type': '@?'}}}) - r(b'NEAppProxyProviderManager', b'loadAllFromPreferencesWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NEAppProxyTCPFlow', b'readDataWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NEAppProxyTCPFlow', b'writeData:withCompletionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NEAppProxyUDPFlow', b'readDatagramsWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NEAppProxyUDPFlow', b'writeDatagrams:sentByEndpoints:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NEDNSSettings', b'matchDomainsNoSearch', {'retval': {'type': 'Z'}}) - r(b'NEDNSSettings', b'setMatchDomainsNoSearch:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NEFilterControlProvider', b'handleNewFlow:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NEFilterControlProvider', b'handleRemediationForFlow:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NEFilterManager', b'isEnabled', {'retval': {'type': 'Z'}}) - r(b'NEFilterManager', b'loadFromPreferencesWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NEFilterManager', b'removeFromPreferencesWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NEFilterManager', b'saveToPreferencesWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NEFilterManager', b'setEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NEFilterProviderConfiguration', b'filterBrowsers', {'retval': {'type': 'Z'}}) - r(b'NEFilterProviderConfiguration', b'filterSockets', {'retval': {'type': 'Z'}}) - r(b'NEFilterProviderConfiguration', b'setFilterBrowsers:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NEFilterProviderConfiguration', b'setFilterSockets:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NEPacketTunnelFlow', b'readPacketObjectsWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NEPacketTunnelFlow', b'readPacketsWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NEPacketTunnelFlow', b'writePacketObjects:', {'retval': {'type': 'Z'}}) - r(b'NEPacketTunnelFlow', b'writePackets:withProtocols:', {'retval': {'type': 'Z'}}) - r(b'NEPacketTunnelProvider', b'createTCPConnectionThroughTunnelToEndpoint:enableTLS:TLSParameters:delegate:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NEPacketTunnelProvider', b'startTunnelWithOptions:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NEPacketTunnelProvider', b'stopTunnelWithReason:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}, 'type': '@?'}}}) - r(b'NEProvider', b'createTCPConnectionToEndpoint:enableTLS:TLSParameters:delegate:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NEProvider', b'displayMessage:completionHandler:', {'deprecated': 1014, 'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}}}, 'type': '@?'}}}) - r(b'NEProvider', b'sleepWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}, 'type': '@?'}}}) - r(b'NEProxyServer', b'authenticationRequired', {'retval': {'type': 'Z'}}) - r(b'NEProxyServer', b'setAuthenticationRequired:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NEProxySettings', b'HTTPEnabled', {'retval': {'type': 'Z'}}) - r(b'NEProxySettings', b'HTTPSEnabled', {'retval': {'type': 'Z'}}) - r(b'NEProxySettings', b'autoProxyConfigurationEnabled', {'retval': {'type': 'Z'}}) - r(b'NEProxySettings', b'excludeSimpleHostnames', {'retval': {'type': 'Z'}}) - r(b'NEProxySettings', b'setAutoProxyConfigurationEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NEProxySettings', b'setExcludeSimpleHostnames:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NEProxySettings', b'setHTTPEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NEProxySettings', b'setHTTPSEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NETunnelProvider', b'handleAppMessage:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NETunnelProvider', b'reasserting', {'retval': {'type': 'Z'}}) - r(b'NETunnelProvider', b'setReasserting:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NETunnelProvider', b'setTunnelNetworkSettings:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NETunnelProviderManager', b'loadAllFromPreferencesWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NETunnelProviderSession', b'sendProviderMessage:returnError:responseHandler:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}, 4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NETunnelProviderSession', b'startTunnelWithOptions:andReturnError:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NEVPNConnection', b'startVPNTunnelAndReturnError:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type_modifier': b'o'}}}) - r(b'NEVPNConnection', b'startVPNTunnelWithOptions:andReturnError:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'NEVPNManager', b'isEnabled', {'retval': {'type': 'Z'}}) - r(b'NEVPNManager', b'isOnDemandEnabled', {'retval': {'type': 'Z'}}) - r(b'NEVPNManager', b'loadFromPreferencesWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NEVPNManager', b'protocol', {'deprecated': 1011}) - r(b'NEVPNManager', b'removeFromPreferencesWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NEVPNManager', b'saveToPreferencesWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NEVPNManager', b'setEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NEVPNManager', b'setOnDemandEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NEVPNManager', b'setProtocol:', {'deprecated': 1011}) - r(b'NEVPNProtocol', b'disconnectOnSleep', {'retval': {'type': 'Z'}}) - r(b'NEVPNProtocol', b'setDisconnectOnSleep:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NEVPNProtocolIKEv2', b'disableMOBIKE', {'retval': {'type': 'Z'}}) - r(b'NEVPNProtocolIKEv2', b'disableRedirect', {'retval': {'type': 'Z'}}) - r(b'NEVPNProtocolIKEv2', b'enablePFS', {'retval': {'type': 'Z'}}) - r(b'NEVPNProtocolIKEv2', b'enableRevocationCheck', {'retval': {'type': 'Z'}}) - r(b'NEVPNProtocolIKEv2', b'setDisableMOBIKE:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NEVPNProtocolIKEv2', b'setDisableRedirect:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NEVPNProtocolIKEv2', b'setEnablePFS:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NEVPNProtocolIKEv2', b'setEnableRevocationCheck:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NEVPNProtocolIKEv2', b'setStrictRevocationCheck:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NEVPNProtocolIKEv2', b'setUseConfigurationAttributeInternalIPSubnet:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NEVPNProtocolIKEv2', b'strictRevocationCheck', {'retval': {'type': 'Z'}}) - r(b'NEVPNProtocolIKEv2', b'useConfigurationAttributeInternalIPSubnet', {'retval': {'type': 'Z'}}) - r(b'NEVPNProtocolIPSec', b'setUseExtendedAuthentication:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NEVPNProtocolIPSec', b'useExtendedAuthentication', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'evaluateTrustForConnection:peerCertificateChain:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'provideIdentityForConnection:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'shouldEvaluateTrustForConnection:', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'shouldProvideIdentityForConnection:', {'retval': {'type': 'Z'}}) - r(b'NWPath', b'isEqualToPath:', {'retval': {'type': 'Z'}}) - r(b'NWPath', b'isExpensive', {'retval': {'type': 'Z'}}) - r(b'NWTCPConnection', b'hasBetterPath', {'retval': {'type': 'Z'}}) - r(b'NWTCPConnection', b'isViable', {'retval': {'type': 'Z'}}) - r(b'NWTCPConnection', b'readLength:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NWTCPConnection', b'readMinimumLength:maximumLength:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NWTCPConnection', b'write:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NWUDPSession', b'hasBetterPath', {'retval': {'type': 'Z'}}) - r(b'NWUDPSession', b'isViable', {'retval': {'type': 'Z'}}) - r(b'NWUDPSession', b'setReadHandler:maxDatagrams:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NWUDPSession', b'writeDatagram:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/NetworkExtension/_metadata.pyc b/env/lib/python2.7/site-packages/NetworkExtension/_metadata.pyc deleted file mode 100644 index cb54440a..00000000 Binary files a/env/lib/python2.7/site-packages/NetworkExtension/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/NotificationCenter/_NotificationCenter.so b/env/lib/python2.7/site-packages/NotificationCenter/_NotificationCenter.so deleted file mode 100755 index 8d1545d8..00000000 Binary files a/env/lib/python2.7/site-packages/NotificationCenter/_NotificationCenter.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/NotificationCenter/__init__.py b/env/lib/python2.7/site-packages/NotificationCenter/__init__.py deleted file mode 100644 index 5a84c771..00000000 --- a/env/lib/python2.7/site-packages/NotificationCenter/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -''' -Python mapping for the NotificationCenter framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Cocoa - -from NotificationCenter import _metadata, _NotificationCenter - -sys.modules['NotificationCenter'] = mod = objc.ObjCLazyModule( - "NotificationCenter", - "com.apple.notificationcenter", - objc.pathForFramework("/System/Library/Frameworks/NotificationCenter.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Cocoa, )) - -import sys -del sys.modules['NotificationCenter._metadata'] -del sys.modules['NotificationCenter._NotificationCenter'] diff --git a/env/lib/python2.7/site-packages/NotificationCenter/__init__.pyc b/env/lib/python2.7/site-packages/NotificationCenter/__init__.pyc deleted file mode 100644 index cd924115..00000000 Binary files a/env/lib/python2.7/site-packages/NotificationCenter/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/NotificationCenter/_metadata.py b/env/lib/python2.7/site-packages/NotificationCenter/_metadata.py deleted file mode 100644 index 2bcc54b9..00000000 --- a/env/lib/python2.7/site-packages/NotificationCenter/_metadata.py +++ /dev/null @@ -1,46 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Sun Sep 30 22:16:09 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$$''' -enums = '''$NCUpdateResultFailed@2$NCUpdateResultNewData@0$NCUpdateResultNoData@1$''' -misc.update({}) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'NCWidgetController', b'defaultWidgetController', {'deprecated': 1010}) - r(b'NCWidgetController', b'setHasContent:forWidgetWithBundleIdentifier:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NCWidgetListViewController', b'editing', {'retval': {'type': 'Z'}}) - r(b'NCWidgetListViewController', b'hasDividerLines', {'retval': {'type': 'Z'}}) - r(b'NCWidgetListViewController', b'setEditing:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NCWidgetListViewController', b'setHasDividerLines:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NCWidgetListViewController', b'setShowsAddButtonWhenEditing:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NCWidgetListViewController', b'showsAddButtonWhenEditing', {'retval': {'type': 'Z'}}) - r(b'NCWidgetListViewController', b'viewControllerAtRow:makeIfNecessary:', {'arguments': {3: {'type': 'Z'}}}) - r(b'NSObject', b'widgetAllowsEditing', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'widgetList:didRemoveRow:', {'arguments': {3: {'type': 'Q'}}}) - r(b'NSObject', b'widgetList:didReorderRow:toRow:', {'arguments': {3: {'type': 'Q'}, 4: {'type': 'Q'}}}) - r(b'NSObject', b'widgetList:shouldRemoveRow:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': 'Q'}}}) - r(b'NSObject', b'widgetList:shouldReorderRow:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': 'Q'}}}) - r(b'NSObject', b'widgetList:viewControllerForRow:', {'arguments': {3: {'type': 'Q'}}}) - r(b'NSObject', b'widgetMarginInsetsForProposedMarginInsets:', {'retval': {'type': '{NSEdgeInsets=dddd}'}, 'arguments': {2: {'type': '{NSEdgeInsets=dddd}'}}}) - r(b'NSObject', b'widgetPerformUpdateWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'I', b'Q')}}}, 'type': '@?'}}}) - r(b'NSObject', b'widgetSearch:searchForTerm:maxResults:', {'arguments': {4: {'type': sel32or64(b'I', b'Q')}}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/NotificationCenter/_metadata.pyc b/env/lib/python2.7/site-packages/NotificationCenter/_metadata.pyc deleted file mode 100644 index 7c47f631..00000000 Binary files a/env/lib/python2.7/site-packages/NotificationCenter/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/OSAKit/__init__.py b/env/lib/python2.7/site-packages/OSAKit/__init__.py deleted file mode 100644 index fde4253c..00000000 --- a/env/lib/python2.7/site-packages/OSAKit/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -''' -Python mapping for the OSAKit framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Cocoa - -from OSAKit import _metadata - - -sys.modules['OSAKit'] = mod = objc.ObjCLazyModule( - "OSAKit", - "com.apple.OSAKit", - objc.pathForFramework("/System/Library/Frameworks/OSAKit.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Cocoa,)) - -import sys -del sys.modules['OSAKit._metadata'] diff --git a/env/lib/python2.7/site-packages/OSAKit/__init__.pyc b/env/lib/python2.7/site-packages/OSAKit/__init__.pyc deleted file mode 100644 index 89d56f51..00000000 Binary files a/env/lib/python2.7/site-packages/OSAKit/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/OSAKit/_metadata.py b/env/lib/python2.7/site-packages/OSAKit/_metadata.py deleted file mode 100644 index 2fca3f21..00000000 --- a/env/lib/python2.7/site-packages/OSAKit/_metadata.py +++ /dev/null @@ -1,53 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Sun Sep 30 22:04:46 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$OSAScriptErrorAppAddressKey$OSAScriptErrorAppName$OSAScriptErrorAppNameKey$OSAScriptErrorBriefMessage$OSAScriptErrorBriefMessageKey$OSAScriptErrorExpectedTypeKey$OSAScriptErrorMessage$OSAScriptErrorMessageKey$OSAScriptErrorNumber$OSAScriptErrorNumberKey$OSAScriptErrorOffendingObjectKey$OSAScriptErrorPartialResultKey$OSAScriptErrorRange$OSAScriptErrorRangeKey$OSAStorageApplicationBundleType$OSAStorageApplicationType$OSAStorageScriptBundleType$OSAStorageScriptType$OSAStorageTextType$''' -enums = '''$OSACompileIntoContext@2$OSADontSetScriptLocation@16777216$OSANull@0$OSAPreventGetSource@1$OSAScriptRecording@2$OSAScriptRunning@1$OSAScriptStopped@0$OSAShowStartupScreen@536870912$OSAStayOpenApplet@268435456$OSASupportsAECoercion@8$OSASupportsAESending@16$OSASupportsCompiling@2$OSASupportsConvenience@64$OSASupportsDialects@128$OSASupportsEventHandling@256$OSASupportsGetSource@4$OSASupportsRecording@32$''' -misc.update({}) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'OSALanguage', b'isThreadSafe', {'retval': {'type': b'Z'}}) - r(b'OSAScript', b'compileAndReturnError:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type_modifier': b'o'}}}) - r(b'OSAScript', b'compiledDataForType:usingStorageOptions:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'OSAScript', b'executeAndReturnDisplayValue:error:', {'arguments': {2: {'type_modifier': b'o'}, 3: {'type_modifier': b'o'}}}) - r(b'OSAScript', b'executeAndReturnError:', {'arguments': {2: {'type_modifier': b'o'}}}) - r(b'OSAScript', b'executeAppleEvent:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'OSAScript', b'executeHandlerWithName:arguments:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'OSAScript', b'initWithCompiledData:error:', {'deprecated': 1006, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'OSAScript', b'initWithCompiledData:fromURL:usingStorageOptions:error:', {'arguments': {5: {'type_modifier': b'o'}}}) - r(b'OSAScript', b'initWithContentsOfURL:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'OSAScript', b'initWithContentsOfURL:language:error:', {'deprecated': 1006, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'OSAScript', b'initWithContentsOfURL:languageInstance:usingStorageOptions:error:', {'arguments': {5: {'type_modifier': b'o'}}}) - r(b'OSAScript', b'initWithScriptDataDescriptor:fromURL:languageInstance:usingStorageOptions:error:', {'arguments': {6: {'type_modifier': b'o'}}}) - r(b'OSAScript', b'isCompiled', {'retval': {'type': b'Z'}}) - r(b'OSAScript', b'writeToURL:ofType:error:', {'retval': {'type': b'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'OSAScript', b'writeToURL:ofType:usingStorageOptions:error:', {'retval': {'type': b'Z'}, 'arguments': {5: {'type_modifier': b'o'}}}) - r(b'OSAScriptController', b'isCompiling', {'retval': {'type': b'Z'}}) - r(b'OSAScriptView', b'indentsWrappedLines', {'retval': {'type': b'Z'}}) - r(b'OSAScriptView', b'setIndentsWrappedLines:', {'arguments': {2: {'type': b'Z'}}}) - r(b'OSAScriptView', b'setUsesScriptAssistant:', {'arguments': {2: {'type': b'Z'}}}) - r(b'OSAScriptView', b'setUsesTabs:', {'arguments': {2: {'type': b'Z'}}}) - r(b'OSAScriptView', b'setWrapsLines:', {'arguments': {2: {'type': b'Z'}}}) - r(b'OSAScriptView', b'usesScriptAssistant', {'retval': {'type': b'Z'}}) - r(b'OSAScriptView', b'usesTabs', {'retval': {'type': b'Z'}}) - r(b'OSAScriptView', b'wrapsLines', {'retval': {'type': b'Z'}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/OSAKit/_metadata.pyc b/env/lib/python2.7/site-packages/OSAKit/_metadata.pyc deleted file mode 100644 index 8df6e669..00000000 Binary files a/env/lib/python2.7/site-packages/OSAKit/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/OpenDirectory/__init__.py b/env/lib/python2.7/site-packages/OpenDirectory/__init__.py deleted file mode 100644 index f9b53cae..00000000 --- a/env/lib/python2.7/site-packages/OpenDirectory/__init__.py +++ /dev/null @@ -1,21 +0,0 @@ -""" -Wrappers for the OpenDirectory framework -""" -import sys -import objc -import CFOpenDirectory - -from OpenDirectory import _metadata - -sys.modules['OpenDirectory'] = mod = objc.ObjCLazyModule('OpenDirectory', - "com.apple.OpenDirectory", - objc.pathForFramework("/System/Library/Frameworks/OpenDirectory.framework/Frameworks/OpenDirectory.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'objc': objc, - }, (CFOpenDirectory,)) - -import sys -del sys.modules['OpenDirectory._metadata'] diff --git a/env/lib/python2.7/site-packages/OpenDirectory/__init__.pyc b/env/lib/python2.7/site-packages/OpenDirectory/__init__.pyc deleted file mode 100644 index b08a7fe6..00000000 Binary files a/env/lib/python2.7/site-packages/OpenDirectory/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/OpenDirectory/_metadata.py b/env/lib/python2.7/site-packages/OpenDirectory/_metadata.py deleted file mode 100644 index 2aac0026..00000000 --- a/env/lib/python2.7/site-packages/OpenDirectory/_metadata.py +++ /dev/null @@ -1,108 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Sun Jun 11 10:49:58 2017 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$ODFrameworkErrorDomain$ODSessionProxyAddress$ODSessionProxyPassword$ODSessionProxyPort$ODSessionProxyUsername$ODTrustTypeAnonymous$ODTrustTypeJoined$ODTrustTypeUsingCredentials$kODAttributeTypeNodeSASLRealm$kODAttributeTypeProfiles$kODAttributeTypeProfilesTimestamp$kODModuleConfigOptionConnectionIdleDisconnect@^{__CFString=}$kODModuleConfigOptionConnectionSetupTimeout@^{__CFString=}$kODModuleConfigOptionManInTheMiddle@^{__CFString=}$kODModuleConfigOptionPacketEncryption@^{__CFString=}$kODModuleConfigOptionPacketSigning@^{__CFString=}$kODModuleConfigOptionQueryTimeout@^{__CFString=}$kODPolicyAttributeCreationTime$kODPolicyAttributeCurrentDate$kODPolicyAttributeCurrentDayOfWeek$kODPolicyAttributeCurrentTime$kODPolicyAttributeCurrentTimeOfDay$kODPolicyAttributeDaysUntilExpiration$kODPolicyAttributeEnableAtTimeOfDay$kODPolicyAttributeEnableOnDate$kODPolicyAttributeEnableOnDayOfWeek$kODPolicyAttributeExpiresAtTimeOfDay$kODPolicyAttributeExpiresEveryNDays$kODPolicyAttributeExpiresOnDate$kODPolicyAttributeExpiresOnDayOfWeek$kODPolicyAttributeFailedAuthentications$kODPolicyAttributeLastAuthenticationTime$kODPolicyAttributeLastFailedAuthenticationTime$kODPolicyAttributeLastPasswordChangeTime$kODPolicyAttributeMaximumFailedAuthentications$kODPolicyAttributeNewPasswordRequiredTime$kODPolicyAttributePassword$kODPolicyAttributePasswordHashes$kODPolicyAttributePasswordHistory$kODPolicyAttributePasswordHistoryDepth$kODPolicyAttributeRecordName$kODPolicyAttributeRecordType$kODPolicyCategoryAuthentication$kODPolicyCategoryPasswordChange$kODPolicyCategoryPasswordContent$kODPolicyKeyContent$kODPolicyKeyIdentifier$kODPolicyKeyParameters$kODPolicyTypeAccountExpiresOnDate@^{__CFString=}$kODPolicyTypeAccountMaximumFailedLogins@^{__CFString=}$kODPolicyTypeAccountMaximumMinutesOfNonUse@^{__CFString=}$kODPolicyTypeAccountMaximumMinutesUntilDisabled@^{__CFString=}$kODPolicyTypeAccountMinutesUntilFailedLoginReset@^{__CFString=}$kODPolicyTypePasswordCannotBeAccountName@^{__CFString=}$kODPolicyTypePasswordChangeRequired@^{__CFString=}$kODPolicyTypePasswordHistory@^{__CFString=}$kODPolicyTypePasswordMaximumAgeInMinutes@^{__CFString=}$kODPolicyTypePasswordMaximumNumberOfCharacters@^{__CFString=}$kODPolicyTypePasswordMinimumNumberOfCharacters@^{__CFString=}$kODPolicyTypePasswordRequiresAlpha@^{__CFString=}$kODPolicyTypePasswordRequiresMixedCase@^{__CFString=}$kODPolicyTypePasswordRequiresNumeric@^{__CFString=}$kODPolicyTypePasswordRequiresSymbol@^{__CFString=}$kODPolicyTypePasswordSelfModification@^{__CFString=}$''' -enums = '''$ODPacketEncryptionAllow@1$ODPacketEncryptionDisabled@0$ODPacketEncryptionRequired@2$ODPacketEncryptionSSL@3$ODPacketSigningAllow@1$ODPacketSigningDisabled@0$ODPacketSigningRequired@2$kODErrorPolicyOutOfRange@6001$kODErrorPolicyUnsupported@6000$kODExpirationTimeExpired@0$kODExpirationTimeNeverExpires@-1$''' -misc.update({}) -functions={'ODNodeCustomFunction': (b'@^{__ODNode=}^{__CFString=}@^^{__CFError=}', '', {'arguments': {3: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODRecordSetPolicy': (b'B^{__ODRecord=}^{__CFString=}@^^{__CFError=}', '', {'arguments': {3: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODNodeCopySupportedPolicies': (b'^{__CFDictionary=}^{__ODNode=}^^{__CFError=}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODRecordCopySupportedPolicies': (b'^{__CFDictionary=}^{__ODRecord=}^^{__CFError=}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODNodeRemovePolicy': (b'B^{__ODNode=}^{__CFString=}^^{__CFError=}', '', {'arguments': {2: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODRecordSecondsUntilPasswordExpires': (b'q^{__ODRecord=}',), 'ODRecordPasswordChangeAllowed': (b'B^{__ODRecord=}^{__CFString=}^^{__CFError=}', '', {'arguments': {2: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODRecordSetPolicies': (b'B^{__ODRecord=}^{__CFDictionary=}^^{__CFError=}', '', {'arguments': {2: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODNodePasswordContentCheck': (b'B^{__ODNode=}^{__CFString=}^{__CFString=}^^{__CFError=}', '', {'arguments': {3: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODRecordCopyAccountPolicies': (b'^{__CFDictionary=}^{__ODRecord=}^^{__CFError=}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODRecordSecondsUntilAuthenticationsExpire': (b'q^{__ODRecord=}',), 'ODRecordCopyPolicies': (b'^{__CFDictionary=}^{__ODRecord=}^^{__CFError=}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODRecordRemovePolicy': (b'B^{__ODRecord=}^{__CFString=}^^{__CFError=}', '', {'arguments': {2: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODRecordAddAccountPolicy': (b'B^{__ODRecord=}^{__CFDictionary=}@^^{__CFError=}', '', {'arguments': {3: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODRecordWillPasswordExpire': (b'B^{__ODRecord=}Q',), 'ODNodeRemoveAccountPolicy': (b'B^{__ODNode=}^{__CFDictionary=}@^^{__CFError=}', '', {'arguments': {3: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODNodeCopyAccountPolicies': (b'^{__CFDictionary=}^{__ODNode=}^^{__CFError=}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODNodeSetAccountPolicies': (b'B^{__ODNode=}^{__CFDictionary=}^^{__CFError=}', '', {'arguments': {2: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODRecordWillAuthenticationsExpire': (b'B^{__ODRecord=}Q',), 'ODNodeCopyPolicies': (b'^{__CFDictionary=}^{__ODNode=}^^{__CFError=}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODNodeSetPolicies': (b'B^{__ODNode=}^{__CFDictionary=}^^{__CFError=}', '', {'arguments': {2: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODRecordRemoveAccountPolicy': (b'B^{__ODRecord=}^{__CFDictionary=}@^^{__CFError=}', '', {'arguments': {3: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODRecordCopyEffectivePolicies': (b'^{__CFDictionary=}^{__ODRecord=}^^{__CFError=}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODRecordSetAccountPolicies': (b'B^{__ODRecord=}^{__CFDictionary=}^^{__CFError=}', '', {'arguments': {2: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODRecordAuthenticationAllowed': (b'B^{__ODRecord=}^^{__CFError=}', '', {'arguments': {1: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}}), 'ODNodeAddAccountPolicy': (b'B^{__ODNode=}^{__CFDictionary=}@^^{__CFError=}', '', {'arguments': {3: {'null_accepted': True, 'already_cfretained': True, 'type_modifier': 'o'}}})} -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'NSObject', b'query:foundResults:error:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'ODConfiguration', b'addTrustType:trustAccount:trustPassword:username:password:joinExisting:error:', {'retval': {'type': b'Z'}, 'arguments': {7: {'type': b'Z'}, 8: {'type_modifier': b'o'}}}) - r(b'ODConfiguration', b'hideRegistration', {'retval': {'type': b'Z'}}) - r(b'ODConfiguration', b'manInTheMiddleProtection', {'retval': {'type': b'Z'}}) - r(b'ODConfiguration', b'removeTrustUsingUsername:password:deleteTrustAccount:error:', {'retval': {'type': b'Z'}, 'arguments': {4: {'type': b'Z'}, 5: {'type_modifier': b'o'}}}) - r(b'ODConfiguration', b'saveUsingAuthorization:error:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'ODConfiguration', b'setHideRegistration:', {'arguments': {2: {'type': b'Z'}}}) - r(b'ODConfiguration', b'setManInTheMiddleProtection:', {'arguments': {2: {'type': b'Z'}}}) - r(b'ODConfiguration', b'trustUsesKerberosKeytab', {'retval': {'type': b'Z'}}) - r(b'ODConfiguration', b'trustUsesMutualAuthentication', {'retval': {'type': b'Z'}}) - r(b'ODConfiguration', b'trustUsesSystemKeychain', {'retval': {'type': b'Z'}}) - r(b'ODNode', b'accountPoliciesAndReturnError:', {'arguments': {2: {'type_modifier': b'o'}}}) - r(b'ODNode', b'addAccountPolicy:toCategory:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'ODNode', b'createRecordWithRecordType:name:attributes:error:', {'arguments': {5: {'type_modifier': b'o'}}}) - r(b'ODNode', b'customCall:sendData:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'ODNode', b'customFunction:payload:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'ODNode', b'initWithSession:name:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'ODNode', b'initWithSession:type:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'ODNode', b'nodeDetailsForKeys:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'ODNode', b'nodeWithSession:name:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'ODNode', b'nodeWithSession:type:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'ODNode', b'passwordContentCheck:forRecordName:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'ODNode', b'policiesAndReturnError:', {'arguments': {2: {'type_modifier': b'o'}}}) - r(b'ODNode', b'recordWithRecordType:name:attributes:error:', {'arguments': {5: {'type_modifier': b'o'}}}) - r(b'ODNode', b'removeAccountPolicy:fromCategory:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'ODNode', b'removePolicy:error:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'ODNode', b'setAccountPolicies:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'ODNode', b'setCredentialsUsingKerberosCache:error:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'ODNode', b'setCredentialsWithRecordType:authenticationType:authenticationItems:continueItems:context:error:', {'retval': {'type': b'Z'}, 'arguments': {5: {'type_modifier': b'o'}, 6: {'type_modifier': b'o'}, 7: {'type_modifier': b'o'}}}) - r(b'ODNode', b'setCredentialsWithRecordType:recordName:password:error:', {'retval': {'type': b'Z'}, 'arguments': {5: {'type_modifier': b'o'}}}) - r(b'ODNode', b'setPolicies:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'ODNode', b'setPolicy:value:error:', {'retval': {'type': b'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'ODNode', b'subnodeNamesAndReturnError:', {'arguments': {2: {'type_modifier': b'o'}}}) - r(b'ODNode', b'supportedAttributesForRecordType:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'ODNode', b'supportedPoliciesAndReturnError:', {'arguments': {2: {'type_modifier': b'o'}}}) - r(b'ODNode', b'supportedRecordTypesAndReturnError:', {'arguments': {2: {'type_modifier': b'o'}}}) - r(b'ODNode', b'unreachableSubnodeNamesAndReturnError:', {'arguments': {2: {'type_modifier': b'o'}}}) - r(b'ODQuery', b'initWithNode:forRecordTypes:attribute:matchType:queryValues:returnAttributes:maximumResults:error:', {'arguments': {9: {'type_modifier': b'o'}}}) - r(b'ODQuery', b'queryWithNode:forRecordTypes:attribute:matchType:queryValues:returnAttributes:maximumResults:error:', {'arguments': {9: {'type_modifier': b'o'}}}) - r(b'ODQuery', b'resultsAllowingPartial:error:', {'arguments': {2: {'type': b'Z'}, 3: {'type_modifier': b'o'}}}) - r(b'ODRecord', b'accountPoliciesAndReturnError:', {'arguments': {2: {'type_modifier': b'o'}}}) - r(b'ODRecord', b'addAccountPolicy:toCategory:error:', {'retval': {'type': b'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'ODRecord', b'addMemberRecord:error:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'ODRecord', b'addValue:toAttribute:error:', {'retval': {'type': b'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'ODRecord', b'authenticationAllowedAndReturnError:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type_modifier': b'o'}}}) - r(b'ODRecord', b'changePassword:toPassword:error:', {'retval': {'type': b'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'ODRecord', b'deleteRecordAndReturnError:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type_modifier': b'o'}}}) - r(b'ODRecord', b'effectivePoliciesAndReturnError:', {'arguments': {2: {'type_modifier': b'o'}}}) - r(b'ODRecord', b'isMemberRecord:error:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'ODRecord', b'passwordChangeAllowed:error:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'ODRecord', b'passwordPolicyAndReturnError:', {'arguments': {2: {'type_modifier': b'o'}}}) - r(b'ODRecord', b'policiesAndReturnError:', {'arguments': {2: {'type_modifier': b'o'}}}) - r(b'ODRecord', b'recordDetailsForAttributes:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'ODRecord', b'removeAccountPolicy:fromCategory:error:', {'retval': {'type': b'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'ODRecord', b'removeMemberRecord:error:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'ODRecord', b'removePolicy:error:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'ODRecord', b'removeValue:fromAttribute:error:', {'retval': {'type': b'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'ODRecord', b'removeValuesForAttribute:error:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'ODRecord', b'setAccountPolicies:error:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'ODRecord', b'setNodeCredentials:password:error:', {'retval': {'type': b'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'ODRecord', b'setNodeCredentialsUsingKerberosCache:error:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'ODRecord', b'setNodeCredentialsWithRecordType:authenticationType:authenticationItems:continueItems:context:error:', {'retval': {'type': b'Z'}, 'arguments': {5: {'type_modifier': b'o'}, 6: {'type_modifier': b'o'}, 7: {'type_modifier': b'o'}}}) - r(b'ODRecord', b'setPolicies:error:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'ODRecord', b'setPolicy:value:error:', {'retval': {'type': b'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'ODRecord', b'setValue:forAttribute:error:', {'retval': {'type': b'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'ODRecord', b'supportedPoliciesAndReturnError:', {'arguments': {2: {'type_modifier': b'o'}}}) - r(b'ODRecord', b'synchronizeAndReturnError:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type_modifier': b'o'}}}) - r(b'ODRecord', b'valuesForAttribute:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'ODRecord', b'verifyExtendedWithAuthenticationType:authenticationItems:continueItems:context:error:', {'retval': {'type': b'Z'}, 'arguments': {4: {'type_modifier': b'o'}, 5: {'type_modifier': b'o'}, 6: {'type_modifier': b'o'}}}) - r(b'ODRecord', b'verifyPassword:error:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'ODRecord', b'willAuthenticationsExpire:', {'retval': {'type': b'Z'}}) - r(b'ODRecord', b'willPasswordExpire:', {'retval': {'type': b'Z'}}) - r(b'ODSession', b'addConfiguration:authorization:error:', {'retval': {'type': b'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'ODSession', b'configurationAuthorizationAllowingUserInteraction:error:', {'arguments': {2: {'type': b'Z'}, 3: {'type_modifier': b'o'}}}) - r(b'ODSession', b'deleteConfiguration:authorization:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'ODSession', b'deleteConfigurationWithNodename:authorization:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'ODSession', b'initWithOptions:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'ODSession', b'nodeNamesAndReturnError:', {'arguments': {2: {'type_modifier': b'o'}}}) - r(b'ODSession', b'sessionWithOptions:error:', {'arguments': {3: {'type_modifier': b'o'}}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/OpenDirectory/_metadata.pyc b/env/lib/python2.7/site-packages/OpenDirectory/_metadata.pyc deleted file mode 100644 index 9c90405f..00000000 Binary files a/env/lib/python2.7/site-packages/OpenDirectory/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Photos/_Photos.so b/env/lib/python2.7/site-packages/Photos/_Photos.so deleted file mode 100755 index c3d8d9dc..00000000 Binary files a/env/lib/python2.7/site-packages/Photos/_Photos.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Photos/__init__.py b/env/lib/python2.7/site-packages/Photos/__init__.py deleted file mode 100644 index 66c38209..00000000 --- a/env/lib/python2.7/site-packages/Photos/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -''' -Python mapping for the Photos framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Foundation - -from Photos import _metadata -import Photos._Photos - -sys.modules['Photos'] = mod = objc.ObjCLazyModule( - "Photos", - "com.apple.photos", - objc.pathForFramework("/System/Library/Frameworks/Photos.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Foundation,)) - -import sys -del sys.modules['Photos._metadata'] diff --git a/env/lib/python2.7/site-packages/Photos/__init__.pyc b/env/lib/python2.7/site-packages/Photos/__init__.pyc deleted file mode 100644 index 82cf81d3..00000000 Binary files a/env/lib/python2.7/site-packages/Photos/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Photos/_metadata.py b/env/lib/python2.7/site-packages/Photos/_metadata.py deleted file mode 100644 index 0599a8ff..00000000 --- a/env/lib/python2.7/site-packages/Photos/_metadata.py +++ /dev/null @@ -1,63 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Tue Jun 5 13:02:38 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$PHImageCancelledKey$PHImageErrorKey$PHImageManagerMaximumSize@{CGSize=dd}$PHImageResultIsDegradedKey$PHImageResultIsInCloudKey$PHImageResultRequestIDKey$PHLivePhotoEditingErrorDomain$PHLivePhotoShouldRenderAtPlaybackTime$PHLocalIdentifierNotFound$''' -enums = '''$PHAssetBurstSelectionTypeAutoPick@1$PHAssetBurstSelectionTypeNone@0$PHAssetBurstSelectionTypeUserPick@2$PHAssetCollectionSubtypeAlbumCloudShared@101$PHAssetCollectionSubtypeAlbumImported@6$PHAssetCollectionSubtypeAlbumMyPhotoStream@100$PHAssetCollectionSubtypeAlbumRegular@2$PHAssetCollectionSubtypeAlbumSyncedAlbum@5$PHAssetCollectionSubtypeAlbumSyncedEvent@3$PHAssetCollectionSubtypeAlbumSyncedFaces@4$PHAssetCollectionSubtypeSmartAlbumAllHidden@205$PHAssetCollectionSubtypeSmartAlbumBursts@207$PHAssetCollectionSubtypeSmartAlbumDepthEffect@212$PHAssetCollectionSubtypeSmartAlbumFavorites@203$PHAssetCollectionSubtypeSmartAlbumGeneric@200$PHAssetCollectionSubtypeSmartAlbumLivePhotos@213$PHAssetCollectionSubtypeSmartAlbumPanoramas@201$PHAssetCollectionSubtypeSmartAlbumRecentlyAdded@206$PHAssetCollectionSubtypeSmartAlbumScreenshots@211$PHAssetCollectionSubtypeSmartAlbumSelfPortraits@210$PHAssetCollectionSubtypeSmartAlbumSlomoVideos@208$PHAssetCollectionSubtypeSmartAlbumTimelapses@204$PHAssetCollectionSubtypeSmartAlbumUserLibrary@209$PHAssetCollectionSubtypeSmartAlbumVideos@202$PHAssetCollectionTypeAlbum@1$PHAssetCollectionTypeMoment@3$PHAssetCollectionTypeSmartAlbum@2$PHAssetEditOperationContent@2$PHAssetEditOperationDelete@1$PHAssetEditOperationProperties@3$PHAssetMediaSubtypeNone@0$PHAssetMediaSubtypePhotoDepthEffect@16$PHAssetMediaSubtypePhotoHDR@2$PHAssetMediaSubtypePhotoLive@8$PHAssetMediaSubtypePhotoPanorama@1$PHAssetMediaSubtypePhotoScreenshot@4$PHAssetMediaSubtypeVideoHighFrameRate@131072$PHAssetMediaSubtypeVideoStreamed@65536$PHAssetMediaSubtypeVideoTimelapse@262144$PHAssetMediaTypeAudio@3$PHAssetMediaTypeImage@1$PHAssetMediaTypeUnknown@0$PHAssetMediaTypeVideo@2$PHAssetPlaybackStyleImage@1$PHAssetPlaybackStyleImageAnimated@2$PHAssetPlaybackStyleLivePhoto@3$PHAssetPlaybackStyleUnsupported@0$PHAssetPlaybackStyleVideo@4$PHAssetPlaybackStyleVideoLooping@5$PHAssetResourceTypeAdjustmentBasePhoto@8$PHAssetResourceTypeAdjustmentData@7$PHAssetResourceTypeAlternatePhoto@4$PHAssetResourceTypeAudio@3$PHAssetResourceTypeFullSizePhoto@5$PHAssetResourceTypeFullSizeVideo@6$PHAssetResourceTypePairedVideo@9$PHAssetResourceTypePhoto@1$PHAssetResourceTypeVideo@2$PHAssetSourceTypeCloudShared@2$PHAssetSourceTypeNone@0$PHAssetSourceTypeUserLibrary@1$PHAssetSourceTypeiTunesSynced@4$PHAuthorizationStatusAuthorized@3$PHAuthorizationStatusDenied@2$PHAuthorizationStatusNotDetermined@0$PHAuthorizationStatusRestricted@1$PHCollectionEditOperationAddContent@3$PHCollectionEditOperationCreateContent@4$PHCollectionEditOperationDelete@6$PHCollectionEditOperationDeleteContent@1$PHCollectionEditOperationRearrangeContent@5$PHCollectionEditOperationRemoveContent@2$PHCollectionEditOperationRename@7$PHCollectionListSubtypeMomentListCluster@1$PHCollectionListSubtypeMomentListYear@2$PHCollectionListSubtypeRegularFolder@100$PHCollectionListSubtypeSmartFolderEvents@200$PHCollectionListSubtypeSmartFolderFaces@201$PHCollectionListTypeFolder@2$PHCollectionListTypeMomentList@1$PHCollectionListTypeSmartFolder@3$PHImageContentModeAspectFill@1$PHImageContentModeAspectFit@0$PHImageRequestOptionsDeliveryModeFastFormat@2$PHImageRequestOptionsDeliveryModeHighQualityFormat@1$PHImageRequestOptionsDeliveryModeOpportunistic@0$PHImageRequestOptionsResizeModeExact@2$PHImageRequestOptionsResizeModeFast@1$PHImageRequestOptionsResizeModeNone@0$PHImageRequestOptionsVersionCurrent@0$PHImageRequestOptionsVersionOriginal@2$PHImageRequestOptionsVersionUnadjusted@1$PHInvalidImageRequestID@0$PHLivePhotoEditingErrorCodeAborted@1$PHLivePhotoEditingErrorCodeUnknown@0$PHLivePhotoFrameTypePhoto@0$PHLivePhotoFrameTypeVideo@1$''' -misc.update({}) -aliases = {'PHCollectionListSubtypeAny': 'NSIntegerMax', 'PHAssetCollectionSubtypeAny': 'NSIntegerMax', 'PHImageContentModeDefault': 'PHImageContentModeAspectFit'} -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'NSObject', b'renderScale', {'retval': {'type': sel32or64(b'f', b'd')}}) - r(b'NSObject', b'time', {'retval': {'type': '{_CMTime=qiIq}'}}) - r(b'NSObject', b'type', {'retval': {'type': sel32or64(b'i', b'q')}}) - r(b'PHAsset', b'isFavorite', {'retval': {'type': 'Z'}}) - r(b'PHAsset', b'isHidden', {'retval': {'type': 'Z'}}) - r(b'PHAsset', b'isSyncFailureHidden', {'retval': {'type': 'Z'}}) - r(b'PHFetchOptions', b'includeHiddenAssets', {'retval': {'type': 'Z'}}) - r(b'PHFetchOptions', b'setIncludeHiddenAssets:', {'arguments': {2: {'type': 'Z'}}}) - r(b'PHFetchOptions', b'setWantsIncrementalChangeDetails:', {'arguments': {2: {'type': 'Z'}}}) - r(b'PHFetchOptions', b'wantsIncrementalChangeDetails', {'retval': {'type': 'Z'}}) - r(b'PHFetchResult', b'containsObject:', {'retval': {'type': 'Z'}}) - r(b'PHFetchResult', b'enumerateObjectsAtIndexes:options:usingBlock:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': b'o^Z'}}}}}}) - r(b'PHFetchResult', b'enumerateObjectsUsingBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': b'o^Z'}}}}}}) - r(b'PHFetchResult', b'enumerateObjectsWithOptions:usingBlock:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': b'o^Z'}}}}}}) - r(b'PHFetchResultChangeDetails', b'hasIncrementalChanges', {'retval': {'type': 'Z'}}) - r(b'PHFetchResultChangeDetails', b'hasMoves', {'retval': {'type': 'Z'}}) - r(b'PHImageManager', b'requestImageDataForAsset:options:resultHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'I'}, 4: {'type': b'@'}}}}}}) - r(b'PHImageManager', b'requestImageForAsset:targetSize:contentMode:options:resultHandler:', {'arguments': {6: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'PHImageRequestOptions', b'isNetworkAccessAllowed', {'retval': {'type': 'Z'}}) - r(b'PHImageRequestOptions', b'isSynchronous', {'retval': {'type': 'Z'}}) - r(b'PHImageRequestOptions', b'progressHandler', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'd'}, 2: {'type': b'@'}, 3: {'type': b'o^Z'}, 4: {'type': b'@'}}}}}) - r(b'PHImageRequestOptions', b'setNetworkAccessAllowed:', {'arguments': {2: {'type': 'Z'}}}) - r(b'PHImageRequestOptions', b'setProgressHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'd'}, 2: {'type': b'@'}, 3: {'type': b'o^Z'}, 4: {'type': b'@'}}}}}}) - r(b'PHImageRequestOptions', b'setSynchronous:', {'arguments': {2: {'type': 'Z'}}}) - r(b'PHLivePhotoEditingContext', b'frameProcessor', {'retval': {'callable': {'retval': {'type': b'@'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'o^@'}}}}}) - r(b'PHLivePhotoEditingContext', b'prepareLivePhotoForPlaybackWithTargetSize:options:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}, 2: {'type': b'@'}}}}}}) - r(b'PHLivePhotoEditingContext', b'saveLivePhotoToOutput:options:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}, 2: {'type': b'@'}}}}}}) - r(b'PHLivePhotoEditingContext', b'setFrameProcessor:', {'arguments': {2: {'callable': {'retval': {'type': b'@'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'o^@'}}}}}}) - r(b'PHObjectChangeDetails', b'assetContentChanged', {'retval': {'type': 'Z'}}) - r(b'PHObjectChangeDetails', b'objectWasDeleted', {'retval': {'type': 'Z'}}) - r(b'PHPhotoLibrary', b'performChanges:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}, 2: {'type': b'@'}}}}}}) - r(b'PHPhotoLibrary', b'performChangesAndWait:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'PHPhotoLibrary', b'requestAuthorization:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'i', b'q')}}}}}}) - r(b'PHProject', b'hasProjectPreview', {'retval': {'type': 'Z'}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/Photos/_metadata.pyc b/env/lib/python2.7/site-packages/Photos/_metadata.pyc deleted file mode 100644 index 2b53b648..00000000 Binary files a/env/lib/python2.7/site-packages/Photos/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/PhotosUI/_PhotosUI.so b/env/lib/python2.7/site-packages/PhotosUI/_PhotosUI.so deleted file mode 100755 index d9501614..00000000 Binary files a/env/lib/python2.7/site-packages/PhotosUI/_PhotosUI.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/PhotosUI/__init__.py b/env/lib/python2.7/site-packages/PhotosUI/__init__.py deleted file mode 100644 index f9636e90..00000000 --- a/env/lib/python2.7/site-packages/PhotosUI/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -''' -Python mapping for the Photos framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Foundation - -from PhotosUI import _metadata -import PhotosUI._PhotosUI - -sys.modules['PhotosUI'] = mod = objc.ObjCLazyModule( - "PhotosUI", - "com.apple.photosui", - objc.pathForFramework("/System/Library/Frameworks/PhotosUI.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Foundation,)) - -import sys -del sys.modules['PhotosUI._metadata'] diff --git a/env/lib/python2.7/site-packages/PhotosUI/__init__.pyc b/env/lib/python2.7/site-packages/PhotosUI/__init__.pyc deleted file mode 100644 index 0fed776e..00000000 Binary files a/env/lib/python2.7/site-packages/PhotosUI/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/PhotosUI/_metadata.py b/env/lib/python2.7/site-packages/PhotosUI/_metadata.py deleted file mode 100644 index 48f33c45..00000000 --- a/env/lib/python2.7/site-packages/PhotosUI/_metadata.py +++ /dev/null @@ -1,57 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Sun Feb 10 12:08:33 2019 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$PHProjectCategoryBook$PHProjectCategoryCalendar$PHProjectCategoryCard$PHProjectCategoryOther$PHProjectCategoryPrints$PHProjectCategorySlideshow$PHProjectCategoryUndefined$PHProjectCategoryWallDecor$PHProjectTypeUndefined$''' -enums = '''$PHLivePhotoViewContentModeAspectFill@1$PHLivePhotoViewContentModeAspectFit@0$PHLivePhotoViewPlaybackStyleFull@1$PHLivePhotoViewPlaybackStyleHint@2$PHLivePhotoViewPlaybackStyleUndefined@0$PHProjectCreationSourceAlbum@2$PHProjectCreationSourceMemory@3$PHProjectCreationSourceMoment@4$PHProjectCreationSourceProject@20$PHProjectCreationSourceProjectBook@21$PHProjectCreationSourceProjectCalendar@22$PHProjectCreationSourceProjectCard@23$PHProjectCreationSourceProjectExtension@26$PHProjectCreationSourceProjectPrintOrder@24$PHProjectCreationSourceProjectSlideshow@25$PHProjectCreationSourceUndefined@0$PHProjectCreationSourceUserSelection@1$PHProjectSectionTypeAuxiliary@3$PHProjectSectionTypeContent@2$PHProjectSectionTypeCover@1$PHProjectSectionTypeUndefined@0$PHProjectTextElementTypeBody@0$PHProjectTextElementTypeSubtitle@2$PHProjectTextElementTypeTitle@1$''' -misc.update({}) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'NSObject', b'beginProjectWithExtensionContext:projectInfo:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'canHandleAdjustmentData:', {'required': True, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'cancelContentEditing', {'required': True, 'retval': {'type': b'v'}}) - r(b'NSObject', b'extensionWillDiscardDataSource', {'required': False, 'retval': {'type': b'v'}}) - r(b'NSObject', b'finishContentEditingWithCompletionHandler:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'finishProjectWithCompletionHandler:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'footerTextForSubtypesOfProjectType:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'invalidateFooterTextForSubtypesOfProjectType:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'invalidateTypeDescriptionForProjectType:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'livePhotoView:didEndPlaybackWithStyle:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'livePhotoView:willBeginPlaybackWithStyle:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'resumeProjectWithExtensionContext:completion:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'shouldShowCancelConfirmation', {'required': True, 'retval': {'type': 'Z'}}) - r(b'NSObject', b'startContentEditingWithInput:placeholderImage:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'subtypesForProjectType:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'supportedProjectTypes', {'required': False, 'retval': {'type': b'@'}}) - r(b'NSObject', b'typeDescriptionDataSourceForCategory:invalidator:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'typeDescriptionForProjectType:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'PHLivePhotoView', b'isMuted', {'retval': {'type': 'Z'}}) - r(b'PHLivePhotoView', b'setMuted:', {'arguments': {2: {'type': 'Z'}}}) - r(b'PHLivePhotoView', b'stopPlaybackAnimated:', {'arguments': {2: {'type': 'Z'}}}) - r(b'PHProjectAssetElement', b'horizontallyFlipped', {'retval': {'type': b'Z'}}) - r(b'PHProjectAssetElement', b'verticallyFlipped', {'retval': {'type': b'Z'}}) - r(b'PHProjectExtensionContext', b'updatedProjectInfoFromProjectInfo:completion:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'PHProjectInfo', b'brandingEnabled', {'retval': {'type': b'Z'}}) - r(b'PHProjectInfo', b'pageNumbersEnabled', {'retval': {'type': b'Z'}}) - r(b'PHProjectTypeDescription', b'canProvideSubtypes', {'retval': {'type': b'Z'}}) - r(b'PHProjectTypeDescription', b'initWithProjectType:title:attributedDescription:image:canProvideSubtypes:', {'arguments': {6: {'type': b'Z'}}}) - r(b'PHProjectTypeDescription', b'initWithProjectType:title:description:image:canProvideSubtypes:', {'arguments': {6: {'type': b'Z'}}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/PhotosUI/_metadata.pyc b/env/lib/python2.7/site-packages/PhotosUI/_metadata.pyc deleted file mode 100644 index 7a3546bb..00000000 Binary files a/env/lib/python2.7/site-packages/PhotosUI/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/PreferencePanes/__init__.py b/env/lib/python2.7/site-packages/PreferencePanes/__init__.py deleted file mode 100644 index a0a6465a..00000000 --- a/env/lib/python2.7/site-packages/PreferencePanes/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -''' -Python mapping for the PreferencePanes framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import sys -import objc -import AppKit - -from PreferencePanes import _metadata - -sys.modules['PreferencePanes'] = mod = objc.ObjCLazyModule('PreferencePanes', - "com.apple.frameworks.preferencepanes", - objc.pathForFramework("/System/Library/Frameworks/PreferencePanes.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'objc': objc, - }, ( AppKit,)) - -import sys -del sys.modules['PreferencePanes._metadata'] diff --git a/env/lib/python2.7/site-packages/PreferencePanes/__init__.pyc b/env/lib/python2.7/site-packages/PreferencePanes/__init__.pyc deleted file mode 100644 index 45209bd5..00000000 Binary files a/env/lib/python2.7/site-packages/PreferencePanes/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/PreferencePanes/_metadata.py b/env/lib/python2.7/site-packages/PreferencePanes/_metadata.py deleted file mode 100644 index a1ef1850..00000000 --- a/env/lib/python2.7/site-packages/PreferencePanes/_metadata.py +++ /dev/null @@ -1,31 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Mon Jul 18 12:18:57 2016 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$NSPrefPaneHelpMenuAnchorKey$NSPrefPaneHelpMenuInfoPListKey$NSPrefPaneHelpMenuTitleKey$NSPreferencePaneCancelUnselectNotification$NSPreferencePaneDoUnselectNotification$NSPreferencePaneSwitchToPaneNotification$NSPreferencePaneUpdateHelpMenuNotification$NSPreferencePrefPaneIsAvailableNotification$''' -enums = '''$NSUnselectCancel@0$NSUnselectLater@2$NSUnselectNow@1$''' -misc.update({'kNSPrefPaneHelpMenuAnchorKey': b'anchor'.decode("utf-8"), 'kNSPrefPaneHelpMenuTitleKey': b'title'.decode("utf-8"), 'kNSPrefPaneHelpMenuInfoPListKey': b'NSPrefPaneHelpAnchors'.decode("utf-8")}) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'NSPreferencePane', b'autoSaveTextFields', {'retval': {'type': 'Z'}}) - r(b'NSPreferencePane', b'isSelected', {'retval': {'type': 'Z'}}) - r(b'NSPreferencePane', b'replyToShouldUnselect:', {'arguments': {2: {'type': 'Z'}}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/PreferencePanes/_metadata.pyc b/env/lib/python2.7/site-packages/PreferencePanes/_metadata.pyc deleted file mode 100644 index 3bc0f8f2..00000000 Binary files a/env/lib/python2.7/site-packages/PreferencePanes/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/PrintCore/__init__.py b/env/lib/python2.7/site-packages/PrintCore/__init__.py deleted file mode 100644 index 13157147..00000000 --- a/env/lib/python2.7/site-packages/PrintCore/__init__.py +++ /dev/null @@ -1,47 +0,0 @@ -''' -Python mapping for the PrintCore framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import sys -import objc -import Cocoa - -from PrintCore import _metadata - -sys.modules['PrintCore'] = mod = objc.ObjCLazyModule('PrintCore', - "com.apple.ApplicationServices", - objc.pathForFramework("/System/Library/Frameworks/ApplicationServices.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'objc': objc, - }, ( Cocoa, )) - -import sys -import functools -del sys.modules['PrintCore._metadata'] - -# -# PMRetain and PMRelease are "generic" functions -# where the argument can be an instance of a number -# of PrintCore types. -# -# The code below ensures these functions actually -# work as expected. -# -_PMRetain = mod.PMRetain -_PMRelease = mod.PMRelease - -@functools.wraps(_PMRetain) -def PMRetain(value): - return _PMRetain(value.__pointer__) - -@functools.wraps(_PMRelease) -def PMRelease(value): - return _PMRelease(value.__pointer__) - -mod.PMRetain = PMRetain -mod.PMRelease = PMRelease diff --git a/env/lib/python2.7/site-packages/PrintCore/__init__.pyc b/env/lib/python2.7/site-packages/PrintCore/__init__.pyc deleted file mode 100644 index c46633d7..00000000 Binary files a/env/lib/python2.7/site-packages/PrintCore/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/PrintCore/_metadata.py b/env/lib/python2.7/site-packages/PrintCore/_metadata.py deleted file mode 100644 index 3f811fd0..00000000 --- a/env/lib/python2.7/site-packages/PrintCore/_metadata.py +++ /dev/null @@ -1,40 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Sun Feb 17 23:48:47 2019 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -misc.update({'PMPaperMargins': objc.createStructType('PMPaperMargins', b'{PMRect=dddd}', ['top', 'left', 'bottom', 'right']), 'PMResolution': objc.createStructType('PMResolution', b'{PMResolution=dd}', ['hRes', 'vRes']), 'PMLanguageInfo': objc.createStructType('PMLanguageInfo', b'{PMLanguageInfo=[33C][33C][33C]}', ['level', 'version', 'release']), 'PMRect': objc.createStructType('PMRect', b'{PMRect=dddd}', ['top', 'left', 'bottom', 'right'])}) -constants = '''$$''' -enums = '''$kAllPPDDomains@1$kCUPSPPDDomain@6$kLocalPPDDomain@3$kNetworkPPDDomain@4$kPMAllocationFailure@-108$kPMBorderDoubleHairline@2$kPMBorderDoubleThickline@4$kPMBorderSingleHairline@1$kPMBorderSingleThickline@3$kPMCMYKColorSpaceModel@3$kPMCVMSymbolNotFound@-9662$kPMCancel@128$kPMCloseFailed@-9785$kPMColorSpaceModelCount@4$kPMCoverPageAfter@3$kPMCoverPageBefore@2$kPMCoverPageNone@1$kPMCreateMessageFailed@-9620$kPMDataFormatXMLCompressed@2$kPMDataFormatXMLDefault@0$kPMDataFormatXMLMinimal@1$kPMDeleteSubTicketFailed@-9585$kPMDestinationFax@3$kPMDestinationFile@2$kPMDestinationInvalid@0$kPMDestinationPreview@4$kPMDestinationPrinter@1$kPMDestinationProcessPDF@5$kPMDevNColorSpaceModel@4$kPMDocumentNotFound@-9644$kPMDontSwitchPDEError@-9531$kPMDuplexNoTumble@2$kPMDuplexNone@1$kPMDuplexTumble@3$kPMEditRequestFailed@-9544$kPMFeatureNotInstalled@-9533$kPMFileOrDirOperationFailed@-9634$kPMFontNameTooLong@-9704$kPMFontNotFound@-9703$kPMGeneralCGError@-9705$kPMGeneralError@-30870$kPMGrayColorSpaceModel@1$kPMHideInlineItems@0$kPMIOAttrNotAvailable@-9787$kPMIOMSymbolNotFound@-9661$kPMInternalError@-30870$kPMInvalidAllocator@-30890$kPMInvalidCVMContext@-9665$kPMInvalidCalibrationTarget@-30898$kPMInvalidConnection@-30887$kPMInvalidFileType@-30895$kPMInvalidIOMContext@-9664$kPMInvalidIndex@-30882$kPMInvalidItem@-30892$kPMInvalidJobID@-9666$kPMInvalidJobTemplate@-30885$kPMInvalidKey@-30888$kPMInvalidLookupSpec@-9542$kPMInvalidObject@-30896$kPMInvalidPBMRef@-9540$kPMInvalidPDEContext@-9530$kPMInvalidPMContext@-9663$kPMInvalidPageFormat@-30876$kPMInvalidPaper@-30897$kPMInvalidParameter@-50$kPMInvalidPreset@-30899$kPMInvalidPrintSession@-30879$kPMInvalidPrintSettings@-30875$kPMInvalidPrinter@-30880$kPMInvalidPrinterAddress@-9780$kPMInvalidPrinterInfo@-30886$kPMInvalidReply@-30894$kPMInvalidState@-9706$kPMInvalidSubTicket@-9584$kPMInvalidTicket@-30891$kPMInvalidType@-30893$kPMInvalidValue@-30889$kPMItemIsLocked@-9586$kPMJobBusy@-9642$kPMJobCanceled@-9643$kPMJobGetTicketBadFormatError@-9672$kPMJobGetTicketReadError@-9673$kPMJobManagerAborted@-9671$kPMJobNotFound@-9641$kPMJobStreamEndError@-9670$kPMJobStreamOpenFailed@-9668$kPMJobStreamReadFailed@-9669$kPMKeyNotFound@-9589$kPMKeyNotUnique@-9590$kPMKeyOrValueNotFound@-9623$kPMLandscape@2$kPMLastErrorCodeToMakeMaintenanceOfThisListEasier@-9799$kPMLayoutBottomTopLeftRight@7$kPMLayoutBottomTopRightLeft@8$kPMLayoutLeftRightBottomTop@2$kPMLayoutLeftRightTopBottom@1$kPMLayoutRightLeftBottomTop@4$kPMLayoutRightLeftTopBottom@3$kPMLayoutTopBottomLeftRight@5$kPMLayoutTopBottomRightLeft@6$kPMLockIgnored@-30878$kPMMessagingError@-9624$kPMNoDefaultItem@-9500$kPMNoDefaultPrinter@-30872$kPMNoDefaultSettings@-9501$kPMNoError@0$kPMNoPrinterJobID@-9667$kPMNoSelectedPrinters@-9541$kPMNoSuchEntry@-30874$kPMNotImplemented@-30873$kPMObjectInUse@-30881$kPMOpenFailed@-9781$kPMOutOfScope@-30871$kPMPMSymbolNotFound@-9660$kPMPageToPaperMappingNone@1$kPMPageToPaperMappingScaleToFit@2$kPMPaperTypeCoated@2$kPMPaperTypeGlossy@4$kPMPaperTypePlain@1$kPMPaperTypePremium@3$kPMPaperTypeTShirt@6$kPMPaperTypeTransparency@5$kPMPaperTypeUnknown@0$kPMPermissionError@-9636$kPMPluginNotFound@-9701$kPMPluginRegisterationFailed@-9702$kPMPortrait@1$kPMPrBrowserNoUI@-9545$kPMPrintAllPages@-1$kPMPrinterIdle@3$kPMPrinterProcessing@4$kPMPrinterStopped@5$kPMQualityBest@13$kPMQualityDraft@4$kPMQualityHighest@15$kPMQualityInkSaver@1$kPMQualityLowest@0$kPMQualityNormal@8$kPMQualityPhoto@11$kPMQueueAlreadyExists@-9639$kPMQueueJobFailed@-9640$kPMQueueNotFound@-9638$kPMRGBColorSpaceModel@2$kPMReadFailed@-9782$kPMReadGotZeroData@-9788$kPMReverseLandscape@4$kPMReversePortrait@3$kPMScalingCenterOnImgArea@6$kPMScalingCenterOnPaper@5$kPMScalingPinBottomLeft@3$kPMScalingPinBottomRight@4$kPMScalingPinTopLeft@1$kPMScalingPinTopRight@2$kPMServerAlreadyRunning@-9631$kPMServerAttributeRestricted@-9633$kPMServerCommunicationFailed@-9621$kPMServerNotFound@-9630$kPMServerSuspended@-9632$kPMShowDefaultInlineItems@32768$kPMShowInlineCopies@1$kPMShowInlineOrientation@8$kPMShowInlinePageRange@2$kPMShowInlinePageRangeWithSelection@64$kPMShowInlinePaperSize@4$kPMShowInlineScale@128$kPMShowPageAttributesPDE@256$kPMSimplexTumble@4$kPMStatusFailed@-9784$kPMStringConversionFailure@-30883$kPMSubTicketNotFound@-9583$kPMSyncRequestFailed@-9543$kPMTemplateIsLocked@-9588$kPMTicketIsLocked@-9587$kPMTicketTypeNotFound@-9580$kPMUnableToFindProcess@-9532$kPMUnexpectedImagingError@-9707$kPMUnknownColorSpaceModel@0$kPMUnknownDataType@-9591$kPMUnknownMessage@-9637$kPMUnlocked@0$kPMUnsupportedConnection@-9786$kPMUpdateTicketFailed@-9581$kPMUserOrGroupNotFound@-9635$kPMValidateTicketFailed@-9582$kPMValueOutOfRange@-30877$kPMWriteFailed@-9783$kPMXMLParseError@-30884$kSystemPPDDomain@2$kUserPPDDomain@5$''' -misc.update({'kPMNoPrintSettings': None, 'kPMMirrorStr': b'mirror', 'kPMFaxCoverSheetMessageStr': b'faxCoverSheetMessage', 'kPMUseOptionalAccountIDStr': b'com.apple.print.PrintSettings.PMUseOptionalAccountID', 'kPMSecondaryPaperFeedStr': b'com.apple.print.PrintSettings.PMSecondaryPaperFeed', 'kPMTotalSidesImagedStr': b'com.apple.print.PrintSettings.PMTotalSidesImaged', 'kPMSchedulerPDEKindID': b'com.apple.print.pde.SchedulerKind'.decode("utf-8"), 'kPMServerLocal': None, 'kPMJobPINPDEKindID': b'com.apple.print.pde.jobPIN'.decode("utf-8"), 'kPMPageToPaperMappingTypeStr': b'com.apple.print.PageToPaperMappingType', 'SUMMARY_DISPLAY_ORDER': b'Summary, Display, Order'.decode("utf-8"), 'kPMColorPDEKindID': b'com.apple.print.pde.ColorKind'.decode("utf-8"), 'kPMBorderTypeStr': b'com.apple.print.PrintSettings.PMBorderType', 'kPMPrinterFeaturesPDEKindID': b'com.apple.print.pde.PrinterFeaturesKind'.decode("utf-8"), 'kPMGraphicsContextCoreGraphics': b'com.apple.graphicscontext.coregraphics'.decode("utf-8"), 'kPMColorSyncProfileIDStr': b'com.apple.print.PrintSettings.PMColorSyncProfileID', 'kPMFaxToneDialingStr': b'faxToneDialing', 'kPDFWorkflowItemsKey': b'items'.decode("utf-8"), 'kPMCoverPagePDEKindID': b'com.apple.print.pde.CoverPageKind'.decode("utf-8"), 'kPMFaxSubjectLabelStr': b'faxSubjectLabel', 'kGeneralPageSetupDialogTypeIDStr': b'6E6ED964-B738-11D3-952F-0050E4603277'.decode("utf-8"), 'kPMCopiesAndPagesPDEKindID': b'com.apple.print.pde.CopiesAndPagesKind'.decode("utf-8"), 'kAppPageSetupDialogTypeIDStr': b'B9A0DA98-E57F-11D3-9E83-0050E4603277'.decode("utf-8"), 'kPMDocumentFormatDefault': b'com.apple.documentformat.default'.decode("utf-8"), 'kPDFWorkflowDisplayNameKey': b'displayName'.decode("utf-8"), 'kPMNoPageFormat': None, 'kPMImagingOptionsPDEKindID': b'com.apple.print.pde.ImagingOptionsKind'.decode("utf-8"), 'kPDFWorkflowModifiedKey': b'wasModifiedInline'.decode("utf-8"), 'kPMPrintSelectionOnlyStr': b'com.apple.print.PrintSettings.PMPrintSelectionOnly', 'kPMDestinationPrinterIDStr': b'DestinationPrinterID', 'kPMJobPriorityStr': b'com.apple.print.PrintSettings.PMJobPriority', 'kPMLayoutTileOrientationStr': b'com.apple.print.PrintSettings.PMLayoutTileOrientation', 'kPrinterModuleTypeIDStr': b'BDB091F4-E57F-11D3-B5CC-0050E4603277'.decode("utf-8"), 'kPMInkPDEKindID': b'com.apple.print.pde.InkKind'.decode("utf-8"), 'kPMSandboxCompatiblePDEs': b'PMSandboxCompatiblePDEs'.decode("utf-8"), 'kPMPrimaryPaperFeedStr': b'com.apple.print.PrintSettings.PMPrimaryPaperFeed', 'kPMFaxUseSoundStr': b'faxUseSound', 'kPMPSErrorHandlerStr': b'com.apple.print.PrintSettings.PMPSErrorHandler', 'kPMTotalBeginPagesStr': b'com.apple.print.PrintSettings.PMTotalBeginPages', 'kPMBorderStr': b'com.apple.print.PrintSettings.PMBorder', 'kPMGraphicsContextDefault': b'com.apple.graphicscontext.default'.decode("utf-8"), 'kPMPrintSelectionTitleKey': b'com.apple.printSelection.title'.decode("utf-8"), 'kDialogExtensionIntfIDStr': b'A996FD7E-B738-11D3-8519-0050E4603277'.decode("utf-8"), 'kPMInlineWorkflowStr': b'inlineWorkflow', 'kPMLayoutColumnsStr': b'com.apple.print.PrintSettings.PMLayoutColumns', 'kPMPageSetStr': b'page-set', 'kPMFaxModemPDEKindID': b'com.apple.print.pde.FaxModemKind'.decode("utf-8"), 'kPMPageToPaperMediaNameStr': b'com.apple.print.PageToPaperMappingMediaName', 'kPMPageAttributesKindID': b'com.apple.print.pde.PageAttributesKind'.decode("utf-8"), 'kPMLayoutNUpStr': b'com.apple.print.PrintSettings.PMLayoutNUp', 'kPMLayoutPDEKindID': b'com.apple.print.pde.LayoutUserOptionKind'.decode("utf-8"), 'kAppPrintDialogTypeIDStr': b'BCB07250-E57F-11D3-8CA6-0050E4603277'.decode("utf-8"), 'kPMUseOptionalPINStr': b'com.apple.print.PrintSettings.PMUseOptionalPIN', 'kPMErrorHandlingPDEKindID': b'com.apple.print.pde.ErrorHandlingKind'.decode("utf-8"), 'kPMApplicationColorMatchingStr': b'AP_ApplicationColorMatching', 'kPMPresetGraphicsTypeAll': b'All'.decode("utf-8"), 'kPMCopyCollateStr': b'com.apple.print.PrintSettings.PMCopyCollate', 'kPMPSTraySwitchStr': b'com.apple.print.PrintSettings.PMPSTraySwitch', 'kPMOutputFilenameStr': b'com.apple.print.PrintSettings.PMOutputFilename', 'kPMUnsupportedPDEKindID': b'com.apple.print.pde.UnsupportedPDEKind'.decode("utf-8"), 'kPMCustomProfilePathStr': b'PMCustomProfilePath', 'kPMLayoutDirectionStr': b'com.apple.print.PrintSettings.PMLayoutDirection', 'kPMFaxCoverPagePDEKindID': b'com.apple.print.pde.FaxCoverPageKind'.decode("utf-8"), 'kPMFaxSheetsLabelStr': b'faxSheetsLabel', 'kPMFaxToLabelStr': b'faxToLabel', 'kPMFaxWaitForDialToneStr': b'faxWaitForDialTone', 'kGeneralPrintDialogTypeIDStr': b'C1BF838E-B72A-11D3-9644-0050E4603277'.decode("utf-8"), 'kPMJobStateStr': b'com.apple.print.PrintSettings.PMJobState', 'kPMLayoutRowsStr': b'com.apple.print.PrintSettings.PMLayoutRows', 'kPMDocumentFormatPDF': b'application/pdf'.decode("utf-8"), 'kPMOutputOptionsPDEKindID': b'com.apple.print.pde.OutputOptionsKind'.decode("utf-8"), 'kPDFWorkflowItemURLKey': b'itemURL'.decode("utf-8"), 'kPMPPDDescriptionType': b'PMPPDDescriptionType'.decode("utf-8"), 'kPMPresetGraphicsTypeNone': b'None'.decode("utf-8"), 'kPMCopiesStr': b'com.apple.print.PrintSettings.PMCopies', 'kPMPresetGraphicsTypeGeneral': b'General'.decode("utf-8"), 'kPMColorMatchingModeStr': b'AP_ColorMatchingMode', 'kAppPrintThumbnailTypeIDStr': b'9320FE03-B5D5-11D5-84D1-003065D6135E'.decode("utf-8"), 'kPMFaxAddressesPDEKindID': b'com.apple.print.pde.FaxAddressesKind'.decode("utf-8"), 'kPMFitToPageStr': b'fit-to-page', 'kPMFaxSubjectStr': b'faxSubject', 'kPMPresetGraphicsTypeKey': b'com.apple.print.preset.graphicsType'.decode("utf-8"), 'kPMCustomPaperSizePDEKindID': b'com.apple.print.pde.CustomPaperSizeKind'.decode("utf-8"), 'kPMJobHoldUntilTimeStr': b'com.apple.print.PrintSettings.PMJobHoldUntilTime', 'kPMCoverPageStr': b'com.apple.print.PrintSettings.PMCoverPage', 'kPMDuplexPDEKindID': b'com.apple.print.pde.DuplexKind'.decode("utf-8"), 'kPMFaxDateLabelStr': b'faxDateLabel', 'kPMPageToPaperMappingAllowScalingUpStr': b'com.apple.print.PageToPaperMappingAllowScalingUp', 'kPMFaxNumberStr': b'phone', 'kPMCoverPageSourceStr': b'com.apple.print.PrintSettings.PMCoverPageSource', 'kPMDestinationTypeStr': b'com.apple.print.PrintSettings.PMDestinationType', 'kPMFaxPrefixStr': b'faxPrefix', 'kPMVendorColorMatchingStr': b'AP_VendorColorMatching', 'kPMPriorityPDEKindID': b'com.apple.print.pde.PriorityKind'.decode("utf-8"), 'kPMPaperSourcePDEKindID': b'com.apple.print.pde.PaperSourceKind'.decode("utf-8"), 'kPMPaperFeedPDEKindID': b'com.apple.print.pde.PaperFeedKind'.decode("utf-8"), 'kPMUniPrinterPDEKindID': b'com.apple.print.pde.UniPrinterKind'.decode("utf-8"), 'kPMColorMatchingPDEKindID': b'com.apple.print.pde.ColorMatchingKind'.decode("utf-8"), 'kPMFaxFromLabelStr': b'faxFromLabel', 'kPMMediaQualityPDEKindID': b'com.apple.print.pde.MediaQualityPDEKind'.decode("utf-8"), 'kPMDocumentFormatPostScript': b'application/postscript'.decode("utf-8"), 'kPMPresetGraphicsTypePhoto': b'Photo'.decode("utf-8"), 'kPMDuplexingStr': b'com.apple.print.PrintSettings.PMDuplexing', 'kPMPaperHandlingPDEKindID': b'com.apple.print.pde.PaperHandlingKind'.decode("utf-8"), 'kPMFaxToStr': b'faxTo', 'kPMSummaryPanelKindID': b'com.apple.print.pde.SummaryKind'.decode("utf-8"), 'kPMRotationScalingPDEKindID': b'com.apple.print.pde.RotationScalingKind'.decode("utf-8"), 'kPMPDFEffectsPDEKindID': b'com.apple.print.pde.PDFEffects'.decode("utf-8"), 'kPMOutputOrderStr': b'OutputOrder', 'kPMFaxCoverSheetStr': b'faxCoverSheet'}) -functions={'PMSetPageRange': (b'i^{OpaquePMPrintSettings=}II',), 'PMPaperGetPPDPaperName': (b'i^{OpaquePMPaper=}^^{__CFString=}', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'PMGetCopies': (b'i^{OpaquePMPrintSettings=}^I', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'PMPrinterGetOutputResolution': (b'i^{OpaquePMPrinter=}^{OpaquePMPrintSettings=}^{PMResolution=dd}', '', {'arguments': {2: {'type_modifier': 'o'}}}), 'PMPrinterCopyPresets': (b'i^{OpaquePMPrinter=}^^{__CFArray=}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'PMGetLastPage': (b'i^{OpaquePMPrintSettings=}^I', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'PMPaperGetWidth': (b'i^{OpaquePMPaper=}^d', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'PMGetPageFormatPaper': (b'i^{OpaquePMPageFormat=}^^{OpaquePMPaper=}', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'PMPrinterIsDefault': (b'Z^{OpaquePMPrinter=}',), 'PMGetPageFormatExtendedData': (b'i^{OpaquePMPageFormat=}I^I^v', '', {'arguments': {2: {'type_modifier': 'N'}, 3: {'c_array_length_in_arg': 2, 'type_modifier': 'o'}}}), 'PMPaperCreateLocalizedName': (b'i^{OpaquePMPaper=}^{OpaquePMPrinter=}^^{__CFString=}', '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'PMSessionError': (b'i^{OpaquePMPrintSession=}',), 'PMPresetGetAttributes': (b'i^{OpaquePMPreset=}^^{__CFDictionary=}', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'PMPrinterGetPaperList': (b'i^{OpaquePMPrinter=}^^{__CFArray=}', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'PMSessionCopyDestinationFormat': (b'i^{OpaquePMPrintSession=}^{OpaquePMPrintSettings=}^^{__CFString=}', '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'PMPrinterSendCommand': (b'i^{OpaquePMPrinter=}^{__CFString=}^{__CFString=}^{__CFDictionary=}',), 'PMPrinterIsPostScriptPrinter': (b'i^{OpaquePMPrinter=}^Z', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'PMSessionCreatePrinterList': (b'i^{OpaquePMPrintSession=}^^{__CFArray=}^q^^{OpaquePMPrinter=}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o'}, 2: {'type_modifier': 'o'}, 3: {'type_modifier': 'o'}}}), 'PMGetFirstPage': (b'i^{OpaquePMPrintSettings=}^I', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'PMWorkflowSubmitPDFWithSettings': (b'i^{__CFURL=}^{OpaquePMPrintSettings=}^{__CFURL=}',), 'PMCGImageCreateWithEPSDataProvider': (b'^{CGImage=}^{CGDataProvider=}^{CGImage=}', '', {'retval': {'already_cfretained': True}}), 'PMPrintSettingsCopyAsDictionary': (b'i^{OpaquePMPrintSettings=}^^{__CFDictionary=}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'PMPrinterPrintWithProvider': (b'i^{OpaquePMPrinter=}^{OpaquePMPrintSettings=}^{OpaquePMPageFormat=}^{__CFString=}^{CGDataProvider=}',), 'PMGetPageRange': (b'i^{OpaquePMPrintSettings=}^I^I', '', {'arguments': {1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}}}), 'PMSessionEndPageNoDialog': (b'i^{OpaquePMPrintSession=}',), 'PMGetOrientation': (b'i^{OpaquePMPageFormat=}^S', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'PMPageFormatCreateWithDataRepresentation': (b'i^{__CFData=}^^{OpaquePMPageFormat=}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'type_modifier': 'o'}}}), 'PMGetCollate': (b'i^{OpaquePMPrintSettings=}^Z', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'PMPresetCreatePrintSettings': (b'i^{OpaquePMPreset=}^{OpaquePMPrintSession=}^^{OpaquePMPrintSettings=}', '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'type_modifier': 'o'}}}), 'PMSetCollate': (b'i^{OpaquePMPrintSettings=}Z',), 'PMPrinterIsRemote': (b'i^{OpaquePMPrinter=}^Z', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'PMPrintSettingsGetJobName': (b'i^{OpaquePMPrintSettings=}^^{__CFString=}', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'PMSetDuplex': (b'i^{OpaquePMPrintSettings=}I',), 'PMPrinterGetMimeTypes': (b'i^{OpaquePMPrinter=}^{OpaquePMPrintSettings=}^^{__CFArray=}', '', {'arguments': {2: {'type_modifier': 'o'}}}), 'PMPrinterIsPostScriptCapable': (b'Z^{OpaquePMPrinter=}',), 'PMPrintSettingsSetJobName': (b'i^{OpaquePMPrintSettings=}^{__CFString=}',), 'PMSessionCopyOutputFormatList': (b'i^{OpaquePMPrintSession=}S^^{__CFArray=}', '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'PMSetPageFormatExtendedData': (b'i^{OpaquePMPageFormat=}II^v', '', {'arguments': {3: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}}}), 'PMPrinterCreateFromPrinterID': (b'^{OpaquePMPrinter=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'PMPrinterIsFavorite': (b'Z^{OpaquePMPrinter=}',), 'PMPrintSettingsCreateWithDataRepresentation': (b'i^{__CFData=}^^{OpaquePMPrintSettings=}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'type_modifier': 'o'}}}), 'PMPageFormatCreateDataRepresentation': (b'i^{OpaquePMPageFormat=}^^{__CFData=}I', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'PMSessionDefaultPageFormat': (b'i^{OpaquePMPrintSession=}^{OpaquePMPageFormat=}',), 'PMCopyPrintSettings': (b'i^{OpaquePMPrintSettings=}^{OpaquePMPrintSettings=}', '', {'retval': {'already_cfretained': True}}), 'PMPrinterGetCommInfo': (b'i^{OpaquePMPrinter=}^Z^Z', '', {'arguments': {1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}}}), 'PMPrinterGetLocation': (b'^{__CFString=}^{OpaquePMPrinter=}',), 'PMPageFormatGetPrinterID': (b'i^{OpaquePMPageFormat=}^^{__CFString=}', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'PMRetain': (b'i^v',), 'PMSessionGetCGGraphicsContext': (b'i^{OpaquePMPrintSession=}^^{CGContext=}', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'PMSessionSetCurrentPMPrinter': (b'i^{OpaquePMPrintSession=}^{OpaquePMPrinter=}',), 'PMPrinterGetID': (b'^{__CFString=}^{OpaquePMPrinter=}',), 'PMPaperIsCustom': (b'Z^{OpaquePMPaper=}',), 'PMGetUnadjustedPageRect': (b'i^{OpaquePMPageFormat=}^{PMRect=dddd}', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'PMSetOrientation': (b'i^{OpaquePMPageFormat=}SZ',), 'PMCreateGenericPrinter': (b'i^^{OpaquePMPrinter=}', '', {'retval': {'already_cfretained': True}, 'arguments': {0: {'type_modifier': 'o'}}}), 'PMGetAdjustedPaperRect': (b'i^{OpaquePMPageFormat=}^{PMRect=dddd}', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'PMCopyLocalizedPPD': (b'i^{__CFURL=}^^{__CFURL=}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'PMPaperGetID': (b'i^{OpaquePMPaper=}^^{__CFString=}', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'PMPaperGetHeight': (b'i^{OpaquePMPaper=}^d', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'PMPrinterCopyDeviceURI': (b'i^{OpaquePMPrinter=}^^{__CFURL=}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'PMPaperCreateCustom': (b'i^{OpaquePMPrinter=}^{__CFString=}^{__CFString=}dd^{PMRect=dddd}^^{OpaquePMPaper=}', '', {'retval': {'already_cfretained': True}, 'arguments': {5: {'type_modifier': 'n'}, 6: {'type_modifier': 'o'}}}), 'PMSessionEndDocumentNoDialog': (b'i^{OpaquePMPrintSession=}',), 'PMServerCreatePrinterList': (b'i^{OpaquePMServer=}^^{__CFArray=}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'PMPrinterGetDriverCreator': (b'i^{OpaquePMPrinter=}^I', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'PMSessionCopyDestinationLocation': (b'i^{OpaquePMPrintSession=}^{OpaquePMPrintSettings=}^^{__CFURL=}', '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'PMPrinterGetIndexedPrinterResolution': (b'i^{OpaquePMPrinter=}I^{PMResolution=dd}', '', {'arguments': {2: {'type_modifier': 'o'}}}), 'PMSessionCreatePageFormatList': (b'i^{OpaquePMPrintSession=}^{OpaquePMPrinter=}^^{__CFArray=}', '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'PMPrinterCopyHostName': (b'i^{OpaquePMPrinter=}^^{__CFString=}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'PMSetScale': (b'i^{OpaquePMPageFormat=}d',), 'PMPrinterPrintWithFile': (b'i^{OpaquePMPrinter=}^{OpaquePMPrintSettings=}^{OpaquePMPageFormat=}^{__CFString=}^{__CFURL=}',), 'PMSessionSetDataInSession': (b'i^{OpaquePMPrintSession=}^{__CFString=}@',), 'PMPaperGetPrinterID': (b'i^{OpaquePMPaper=}^^{__CFString=}', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'PMSessionValidatePrintSettings': (b'i^{OpaquePMPrintSession=}^{OpaquePMPrintSettings=}^Z', '', {'arguments': {2: {'type_modifier': 'o'}}}), 'PMPaperGetMargins': (b'i^{OpaquePMPaper=}^{PMRect=dddd}', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'PMPrinterCopyState': (b'i^{OpaquePMPrinter=}^^{__CFDictionary=}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'PMPrintSettingsGetValue': (b'i^{OpaquePMPrintSettings=}^{__CFString=}^@', '', {'arguments': {2: {'type_modifier': 'o'}}}), 'PMCreatePageFormat': (b'i^^{OpaquePMPageFormat=}', '', {'retval': {'already_cfretained': True}, 'arguments': {0: {'type_modifier': 'o'}}}), 'PMSessionBeginPageNoDialog': (b'i^{OpaquePMPrintSession=}^{OpaquePMPageFormat=}^{PMRect=dddd}',), 'PMPrinterGetLanguageInfo': (b'i^{OpaquePMPrinter=}^{PMLanguageInfo=[33C][33C][33C]}', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'PMSetLastPage': (b'i^{OpaquePMPrintSettings=}IZ',), 'PMCopyPPDData': (b'i^{__CFURL=}^^{__CFData=}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'PMSessionGetDataFromSession': (b'i^{OpaquePMPrintSession=}^{__CFString=}^@', '', {'arguments': {2: {'type_modifier': 'o'}}}), 'PMPrintSettingsCreateDataRepresentation': (b'i^{OpaquePMPrintSettings=}^^{__CFData=}I', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'PMPrinterGetDriverReleaseInfo': (b'i^{OpaquePMPrinter=}^{VersRec={NumVersion=CCCC}s[256C][256C]}', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'PMSessionSetError': (b'i^{OpaquePMPrintSession=}i',), 'PMSessionSetDestination': (b'i^{OpaquePMPrintSession=}^{OpaquePMPrintSettings=}S^{__CFString=}^{__CFURL=}',), 'PMSessionBeginCGDocumentNoDialog': (b'i^{OpaquePMPrintSession=}^{OpaquePMPrintSettings=}^{OpaquePMPageFormat=}',), 'PMSessionGetDestinationType': (b'i^{OpaquePMPrintSession=}^{OpaquePMPrintSettings=}^S', '', {'arguments': {2: {'type_modifier': 'o'}}}), 'PMCreateSession': (b'i^^{OpaquePMPrintSession=}', '', {'retval': {'already_cfretained': True}, 'arguments': {0: {'type_modifier': 'o'}}}), 'PMServerLaunchPrinterBrowser': (b'i^{OpaquePMServer=}^{__CFDictionary=}',), 'PMCopyPageFormat': (b'i^{OpaquePMPageFormat=}^{OpaquePMPageFormat=}', '', {'retval': {'already_cfretained': True}}), 'PMPrinterGetName': (b'^{__CFString=}^{OpaquePMPrinter=}',), 'PMCopyAvailablePPDs': (b'iS^^{__CFArray=}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'PMSessionDefaultPrintSettings': (b'i^{OpaquePMPrintSession=}^{OpaquePMPrintSettings=}',), 'PMPrinterWritePostScriptToURL': (b'i^{OpaquePMPrinter=}^{OpaquePMPrintSettings=}^{OpaquePMPageFormat=}^{__CFString=}^{__CFURL=}^{__CFURL=}',), 'PMPrinterSetDefault': (b'i^{OpaquePMPrinter=}',), 'PMPrinterSetOutputResolution': (b'i^{OpaquePMPrinter=}^{OpaquePMPrintSettings=}^{PMResolution=dd}', '', {'arguments': {2: {'type_modifier': 'n'}}}), 'PMPrinterGetState': (b'i^{OpaquePMPrinter=}^S', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'PMWorkflowSubmitPDFWithOptions': (b'i^{__CFURL=}^{__CFString=}^c^{__CFURL=}', '', {'arguments': {2: {'c_array_delimited_by_null': True, 'type_modifier': 'n'}}}), 'PMGetScale': (b'i^{OpaquePMPageFormat=}^d', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'PMSetFirstPage': (b'i^{OpaquePMPrintSettings=}IZ',), 'PMWorkflowCopyItems': (b'i^^{__CFArray=}', '', {'retval': {'already_cfretained': True}, 'arguments': {0: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'PMGetAdjustedPageRect': (b'i^{OpaquePMPageFormat=}^{PMRect=dddd}', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'PMPrinterCopyDescriptionURL': (b'i^{OpaquePMPrinter=}^{__CFString=}^^{__CFURL=}', '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'PMCreatePrintSettings': (b'i^^{OpaquePMPrintSettings=}', '', {'retval': {'already_cfretained': True}, 'arguments': {0: {'type_modifier': 'o'}}}), 'PMPrintSettingsSetValue': (b'i^{OpaquePMPrintSettings=}^{__CFString=}@Z',), 'PMGetDuplex': (b'i^{OpaquePMPrintSettings=}^I', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'PMPrinterGetMakeAndModelName': (b'i^{OpaquePMPrinter=}^^{__CFString=}', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'PMPresetCopyName': (b'i^{OpaquePMPreset=}^^{__CFString=}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'PMSessionGetCurrentPrinter': (b'i^{OpaquePMPrintSession=}^^{OpaquePMPrinter=}', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'PMPrinterGetPrinterResolutionCount': (b'i^{OpaquePMPrinter=}^I', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'PMSetCopies': (b'i^{OpaquePMPrintSettings=}IZ',), 'PMGetUnadjustedPaperRect': (b'i^{OpaquePMPageFormat=}^{PMRect=dddd}', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'PMSessionValidatePageFormat': (b'i^{OpaquePMPrintSession=}^{OpaquePMPageFormat=}^Z', '', {'arguments': {2: {'type_modifier': 'o'}}}), 'PMPrintSettingsCopyKeys': (b'i^{OpaquePMPrintSettings=}^^{__CFArray=}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'PMCreatePageFormatWithPMPaper': (b'i^^{OpaquePMPageFormat=}^{OpaquePMPaper=}', '', {'retval': {'already_cfretained': True}, 'arguments': {0: {'type_modifier': 'o'}}}), 'PMRelease': (b'i^v',)} -aliases = {'kPMPrintTimeAEType': 'cLongDateTime', 'kPMDontWantData': 'objc.NULL', 'kPMNoReference': 'objc.NULL', 'kPMLastPageAEType': 'typeSInt32', 'kPMFeatureAEType': 'typeAEList', 'kPMDestinationTypeDefault': 'kPMDestinationPrinter', 'kPMAllocationFailure': 'memFullErr', 'kPMNoError': 'noErr', 'kPMInternalError': 'kPMGeneralError', 'kPMPDFWorkFlowAEType': 'typeUTF8Text', 'kPMFaxNumberAEType': 'typeChar', 'kPMErrorHandlingAEType': 'typeEnumerated', 'kPMSaveAsPDFAEType': 'typeFileURL', 'kPMDuplexDefault': 'kPMDuplexNone', 'kPMDontWantBoolean': 'objc.NULL', 'kPMTargetPrinterAEType': 'typeChar', 'kPMCollateAEType': 'typeBoolean', 'kPMCopieAEType': 'typeSInt32', 'kPMFirstPageAEType': 'typeSInt32', 'kPMLayoutDownAEType': 'typeSInt32', 'kPMNoData': 'objc.NULL', 'kPMDontWantSize': 'objc.NULL', 'kPMSaveAsPSAEType': 'typeFileURL', 'kPMPresetAEType': 'typeUTF8Text', 'kPMLayoutAcrossAEType': 'typeSInt32', 'kPMInvalidParameter': 'paramErr'} -misc.update({'PMPrintSettings': objc.createOpaquePointerType('PMPrintSettings', b'^{OpaquePMPrintSettings}'), 'PMPrintSession': objc.createOpaquePointerType('PMPrintSession', b'^{OpaquePMPrintSession}'), 'PMPageFormat': objc.createOpaquePointerType('PMPageFormat', b'^{OpaquePMPageFormat}'), 'PMPaper': objc.createOpaquePointerType('PMPaper', b'^{OpaquePMPaper}'), 'PMPreset': objc.createOpaquePointerType('PMPreset', b'^{OpaquePMPreset}'), 'PMPrinter': objc.createOpaquePointerType('PMPrinter', b'^{OpaquePMPrinter}'), 'PMServer': objc.createOpaquePointerType('PMServer', b'^{OpaquePMServer}')}) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'NSObject', b'initWithBundle:', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'printWindowWillClose:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSObject', b'restoreValuesAndReturnError:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'saveValuesAndReturnError:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'shouldHide', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'shouldPrint', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'shouldShowHelp', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'willChangePPDOptionKeyValue:ppdChoice:', {'retval': {'type': 'Z'}}) -finally: - objc._updatingMetadata(False) -expressions = {'kPMLayoutTileOrientationKey': "kPMLayoutTileOrientationStr.decode('utf-8')", 'kPMColorSyncProfileIDKey': "kPMColorSyncProfileIDStr.decode('utf-8')", 'kPMFaxSheetsLabelKey': "kPMFaxSheetsLabelStr.decode('utf-8')", 'kPMUseOptionalAccountIDKey': "kPMUseOptionalAccountIDStr.decode('utf-8')", 'kPMJobPriorityKey': "kPMJobPriorityStr.decode('utf-8')", 'kPMLayoutNUpKey': "kPMLayoutNUpStr.decode('utf-8')", 'kPMCoverPageDefault': '(kPMCoverPageNone)', 'kPMPSTraySwitchKey': "kPMPSTraySwitchStr.decode('utf-8')", 'kPMColorMatchingModeKey': "kPMColorMatchingModeStr.decode('utf-8')", 'kPMJobStateKey': "kPMJobStateStr.decode('utf-8')", 'kPMPrintSelectionOnlyKey': "kPMPrintSelectionOnlyStr.decode('utf-8')", 'kPMFaxNumberKey': "kPMFaxNumberStr.decode('utf-8')", 'kPMLayoutRowsKey': "kPMLayoutRowsStr.decode('utf-8')", 'kPMFitToPageKey': "kPMFitToPageStr.decode('utf-8')", 'kPMDestinationPrinterIDKey': "kPMDestinationPrinterIDStr.decode('utf-8')", 'kPMFaxDateLabelKey': "kPMFaxDateLabelStr.decode('utf-8')", 'kPMFaxSubjectLabelKey': "kPMFaxSubjectLabelStr.decode('utf-8')", 'kPMBorderTypeKey': "kPMBorderTypeStr.decode('utf-8')", 'kPMUseOptionalPINKey': "kPMUseOptionalPINStr.decode('utf-8')", 'kPMFaxSubjectKey': "kPMFaxSubjectStr.decode('utf-8')", 'kPMTotalSidesImagedKey': "kPMTotalSidesImagedStr.decode('utf-8')", 'kPMPageSetKey': 'CFSTR(kPMPageSetStr)', 'kPMCoverPageKey': "kPMCoverPageStr.decode('utf-8')", 'kPMFaxFromLabelKey': "kPMFaxFromLabelStr.decode('utf-8')", 'kPMSecondaryPaperFeedKey': "kPMSecondaryPaperFeedStr.decode('utf-8')", 'kPMFaxWaitForDialToneKey': "kPMFaxWaitForDialToneStr.decode('utf-8')", 'kPMLayoutDirectionKey': "kPMLayoutDirectionStr.decode('utf-8')", 'kPMDestinationTypeKey': "kPMDestinationTypeStr.decode('utf-8')", 'kPMDuplexingKey': "kPMDuplexingStr.decode('utf-8')", 'kPMCopiesKey': "kPMCopiesStr.decode('utf-8')", 'kPMVendorColorMatching': "kPMVendorColorMatchingStr.decode('utf-8')", 'kPMFaxPrefixKey': "kPMFaxPrefixStr.decode('utf-8')", 'kPMFaxCoverSheetMessageKey': "kPMFaxCoverSheetMessageStr.decode('utf-8')", 'kPMFaxToLabelKey': "kPMFaxToLabelStr.decode('utf-8')", 'kPMFaxToneDialingKey': "kPMFaxToneDialingStr.decode('utf-8')", 'kPMPrimaryPaperFeedKey': "kPMPrimaryPaperFeedStr.decode('utf-8')", 'kPMCoverPageSourceKey': "kPMCoverPageSourceStr.decode('utf-8')", 'kPMOutputFilenameKey': "kPMOutputFilenameStr.decode('utf-8')", 'kPMOutputOrderKey': 'CFSTR(kPMOutputOrderStr)', 'kPMCopyCollateKey': "kPMCopyCollateStr.decode('utf-8')", 'kPMInlineWorkflowKey': "kPMInlineWorkflowStr.decode('utf-8')", 'kPMPageToPaperMappingAllowScalingUpKey': 'CFSTR(kPMPageToPaperMappingAllowScalingUpStr)', 'kPMFaxUseSoundKey': "kPMFaxUseSoundStr.decode('utf-8')", 'kPMBorderKey': "kPMBorderStr.decode('utf-8')", 'kPMFaxToKey': "kPMFaxToStr.decode('utf-8')", 'kPMApplicationColorMatching': "kPMApplicationColorMatchingStr.decode('utf-8')", 'kPMFaxCoverSheetKey': "kPMFaxCoverSheetStr.decode('utf-8')", 'kPMJobHoldUntilTimeKey': "kPMJobHoldUntilTimeStr.decode('utf-8')", 'kPMMirrorKey': 'CFSTR(kPMMirrorStr)', 'kPMLayoutColumnsKey': "kPMLayoutColumnsStr.decode('utf-8')", 'kPMTotalBeginPagesKey': "kPMTotalBeginPagesStr.decode('utf-8')", 'kPMPageToPaperMediaNameKey': 'CFSTR(kPMPageToPaperMediaNameStr)', 'kPMCustomProfilePathKey': "kPMCustomProfilePathStr.decode('utf-8')", 'kPMPSErrorHandlerKey': "kPMPSErrorHandlerStr.decode('utf-8')", 'kPMPageToPaperMappingTypeKey': 'CFSTR(kPMPageToPaperMappingTypeStr)'} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/PrintCore/_metadata.pyc b/env/lib/python2.7/site-packages/PrintCore/_metadata.pyc deleted file mode 100644 index d3768437..00000000 Binary files a/env/lib/python2.7/site-packages/PrintCore/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/PubSub/__init__.py b/env/lib/python2.7/site-packages/PubSub/__init__.py deleted file mode 100644 index 2683caf2..00000000 --- a/env/lib/python2.7/site-packages/PubSub/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -''' -Python mapping for the PubSub framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import sys -import objc -import Foundation - -from PubSub import _metadata - -sys.modules['PubSub'] = mod = objc.ObjCLazyModule('PubSub', - "com.apple.PubSub", - objc.pathForFramework("/System/Library/Frameworks/PubSub.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'objc': objc, - }, ( Foundation,)) - -import sys -del sys.modules['PubSub._metadata'] diff --git a/env/lib/python2.7/site-packages/PubSub/__init__.pyc b/env/lib/python2.7/site-packages/PubSub/__init__.pyc deleted file mode 100644 index 22e4c3e6..00000000 Binary files a/env/lib/python2.7/site-packages/PubSub/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/PubSub/_metadata.py b/env/lib/python2.7/site-packages/PubSub/_metadata.py deleted file mode 100644 index 2b07d3e7..00000000 --- a/env/lib/python2.7/site-packages/PubSub/_metadata.py +++ /dev/null @@ -1,59 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Mon Sep 24 10:35:14 2012 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$PSEnclosureDownloadStateDidChangeNotification$PSErrorDomain$PSFeedAddedEntriesKey$PSFeedDidChangeEntryFlagsKey$PSFeedEntriesChangedNotification$PSFeedRefreshingNotification$PSFeedRemovedEntriesKey$PSFeedUpdatedEntriesKey$''' -enums = '''$PSAtomFormat@2$PSEnclosureDownloadDidFail@4$PSEnclosureDownloadDidFinish@3$PSEnclosureDownloadIsActive@2$PSEnclosureDownloadIsIdle@0$PSEnclosureDownloadIsQueued@1$PSEnclosureDownloadWasDeleted@5$PSFeedSettingsUnlimitedSize@0$PSInternalError@1$PSLinkToAlternate@7$PSLinkToAtom@2$PSLinkToAtomService@3$PSLinkToFOAF@4$PSLinkToOther@0$PSLinkToRSD@5$PSLinkToRSS@1$PSLinkToSelf@6$PSNotAFeedError@2$PSRSSFormat@1$PSUnknownFormat@0$''' -misc.update({'PSFeedSettingsIntervalNever': -1.0, 'PSFeedSettingsAllTypes': None, 'PSFeedSettingsIntervalDefault': 0.0}) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'PSClient', b'addFeed:', {'retval': {'type': 'Z'}}) - r(b'PSClient', b'isPrivate', {'retval': {'type': 'Z'}}) - r(b'PSClient', b'removeFeed:', {'retval': {'type': 'Z'}}) - r(b'PSClient', b'setPrivate:', {'arguments': {2: {'type': 'Z'}}}) - r(b'PSEnclosure', b'download:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type_modifier': b'o'}}}) - r(b'PSEntry', b'isCurrent', {'retval': {'type': 'Z'}}) - r(b'PSEntry', b'isFlagged', {'retval': {'type': 'Z'}}) - r(b'PSEntry', b'isRead', {'retval': {'type': 'Z'}}) - r(b'PSEntry', b'setCurrent:', {'arguments': {2: {'type': 'Z'}}}) - r(b'PSEntry', b'setFlagged:', {'arguments': {2: {'type': 'Z'}}}) - r(b'PSEntry', b'setRead:', {'arguments': {2: {'type': 'Z'}}}) - r(b'PSFeed', b'XMLRepresentationWithEntries:', {'arguments': {2: {'type': 'Z'}}}) - r(b'PSFeed', b'isRefreshing', {'retval': {'type': 'Z'}}) - r(b'PSFeed', b'refresh:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type_modifier': b'o'}}}) - r(b'PSFeedSettings', b'downloadsEnclosures', {'retval': {'type': 'Z'}}) - r(b'PSFeedSettings', b'refreshesInBackground', {'retval': {'type': 'Z'}}) - r(b'PSFeedSettings', b'setDownloadsEnclosures:', {'arguments': {2: {'type': 'Z'}}}) - r(b'PSFeedSettings', b'setRefreshesInBackground:', {'arguments': {2: {'type': 'Z'}}}) -finally: - objc._updatingMetadata(False) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'NSObject', b'enclosure:downloadStateDidChange:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'i'}}}) - r(b'NSObject', b'feed:didAddEntries:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'feed:didChangeFlagsInEntries:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'feed:didRemoveEntriesWithIdentifiers:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'feed:didUpdateEntries:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'feedDidBeginRefresh:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'feedDidEndRefresh:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) -finally: - objc._updatingMetadata(False) -protocols={'PSClientDelegate': objc.informal_protocol('PSClientDelegate', [objc.selector(None, b'feed:didUpdateEntries:', b'v@:@@', isRequired=False), objc.selector(None, b'feedDidBeginRefresh:', b'v@:@', isRequired=False), objc.selector(None, b'enclosure:downloadStateDidChange:', b'v@:@i', isRequired=False), objc.selector(None, b'feed:didRemoveEntriesWithIdentifiers:', b'v@:@@', isRequired=False), objc.selector(None, b'feedDidEndRefresh:', b'v@:@', isRequired=False), objc.selector(None, b'feed:didAddEntries:', b'v@:@@', isRequired=False), objc.selector(None, b'feed:didChangeFlagsInEntries:', b'v@:@@', isRequired=False)])} -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/PubSub/_metadata.pyc b/env/lib/python2.7/site-packages/PubSub/_metadata.pyc deleted file mode 100644 index 265b8987..00000000 Binary files a/env/lib/python2.7/site-packages/PubSub/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/PyObjCTools/AppCategories.py b/env/lib/python2.7/site-packages/PyObjCTools/AppCategories.py deleted file mode 100644 index b76dc13d..00000000 --- a/env/lib/python2.7/site-packages/PyObjCTools/AppCategories.py +++ /dev/null @@ -1,35 +0,0 @@ -""" -A number of usefull categories on AppKit classes -""" -__all__ = () -import objc -from AppKit import NSGraphicsContext, NSAnimationContext - -class _ctxHelper(object): - def __enter__(self): - NSGraphicsContext.saveGraphicsState() - - def __exit__(self, exc_type, exc_value, exc_tb): - NSGraphicsContext.restoreGraphicsState() - return False - - -class NSGraphicsContext (objc.Category(NSGraphicsContext)): - @classmethod - def savedGraphicsState(self): - return _ctxHelper() - - -@objc.python_method -def __enter__(cls): - cls.beginGrouping() - -@objc.python_method -def __exit__(cls, exc_type, exc_value, exc_tb): - cls.endGrouping() - - -# Cannot use a category here because these special methods -# must be defined on the metaclass. -type(NSAnimationContext).__enter__ = __enter__ -type(NSAnimationContext).__exit__ = __exit__ diff --git a/env/lib/python2.7/site-packages/PyObjCTools/AppCategories.pyc b/env/lib/python2.7/site-packages/PyObjCTools/AppCategories.pyc deleted file mode 100644 index 3d23078f..00000000 Binary files a/env/lib/python2.7/site-packages/PyObjCTools/AppCategories.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/PyObjCTools/AppHelper.py b/env/lib/python2.7/site-packages/PyObjCTools/AppHelper.py deleted file mode 100644 index ccc65971..00000000 --- a/env/lib/python2.7/site-packages/PyObjCTools/AppHelper.py +++ /dev/null @@ -1,330 +0,0 @@ -"""AppKit helpers. - -Exported functions: -* runEventLoop - run NSApplicationMain in a safer way -* runConsoleEventLoop - run NSRunLoop.run() in a stoppable manner -* stopEventLoop - stops the event loop or terminates the application -* endSheetMethod - set correct signature for NSSheet callbacks -* callAfter - call a function on the main thread (async) -* callLater - call a function on the main thread after a delay (async) -""" - -__all__ = ( 'runEventLoop', 'runConsoleEventLoop', 'stopEventLoop', 'endSheetMethod', 'callAfter', 'callLater' ) - -from AppKit import (NSApp, NSRunAlertPanel, NSApplicationMain, - NSApplicationDidFinishLaunchingNotification) - -from Foundation import ( - NSAutoreleasePool, - NSDate, - NSDefaultRunLoopMode, - NSLog, - NSNotificationCenter, - NSObject, - NSRunLoop, - NSTimer, - NSThread, -) - -import os -import sys -import traceback -import objc -from objc import super - -PY3K = (sys.version_info[0] == 3) - -class PyObjCMessageRunner(NSObject): - """ - Wraps a Python function and its arguments and allows it to be posted to the - MainThread's `NSRunLoop`. - """ - def initWithPayload_(self, payload): - """ - Designated initializer. - """ - self = super(PyObjCMessageRunner, self).init() - if not self: - return None - - self._payload = payload - - return self - - def callAfter(self): - """ - Posts a message to the Main thread, to be executed immediately. - """ - self.performSelectorOnMainThread_withObject_waitUntilDone_( - self.scheduleCallWithDelay_, None, False, - ) - - def callLater_(self, delay): - """ - Posts a message to the Main thread, to be executed after the given - delay, in seconds. - """ - self.performSelectorOnMainThread_withObject_waitUntilDone_( - self.scheduleCallWithDelay_, delay, False, - ) - - def scheduleCallWithDelay_(self, delay): - """ - This is run once we're on the Main thread. - """ - assert NSThread.isMainThread(), "Call is not executing on the Main thread!" - - # There's no delay, just run the call now. - if not delay: - self.performCall() - return - - # There's a delay, schedule it for later. - self.performSelector_withObject_afterDelay_( - self.performCall, None, delay, - ) - - def performCall(self): - """ - Actually runs the payload. - """ - assert NSThread.isMainThread(), "Call is not executing on the Main thread!" - - # Unpack the payload. - (func, args, kwargs) = self._payload - - # Run it. - func(*args, **kwargs) - -def callAfter(func, *args, **kwargs): - """ - Call a function on the Main thread (async). - """ - pool = NSAutoreleasePool.alloc().init() - runner = PyObjCMessageRunner.alloc().initWithPayload_((func, args, kwargs)) - runner.callAfter() - del runner - del pool - -def callLater(delay, func, *args, **kwargs): - """ - Call a function on the Main thread after a delay (async). - """ - pool = NSAutoreleasePool.alloc().init() - runner = PyObjCMessageRunner.alloc().initWithPayload_((func, args, kwargs)) - runner.callLater_(delay) - del runner - del pool - -class PyObjCAppHelperApplicationActivator(NSObject): - - def activateNow_(self, aNotification): - NSApp().activateIgnoringOtherApps_(True) - - -class PyObjCAppHelperRunLoopStopper(NSObject): - singletons = {} - - def currentRunLoopStopper(cls): - runLoop = NSRunLoop.currentRunLoop() - return cls.singletons.get(runLoop) - currentRunLoopStopper = classmethod(currentRunLoopStopper) - - def init(self): - self = super(PyObjCAppHelperRunLoopStopper, self).init() - self.shouldStop = False - return self - - def shouldRun(self): - return not self.shouldStop - - def addRunLoopStopper_toRunLoop_(cls, runLoopStopper, runLoop): - if runLoop in cls.singletons: - raise ValueError("Stopper already registered for this runLoop") - cls.singletons[runLoop] = runLoopStopper - addRunLoopStopper_toRunLoop_ = classmethod(addRunLoopStopper_toRunLoop_) - - def removeRunLoopStopperFromRunLoop_(cls, runLoop): - if runLoop not in cls.singletons: - raise ValueError("Stopper not registered for this runLoop") - del cls.singletons[runLoop] - removeRunLoopStopperFromRunLoop_ = classmethod(removeRunLoopStopperFromRunLoop_) - - def stop(self): - self.shouldStop = True - # this should go away when/if runEventLoop uses - # runLoop iteration - if NSApp() is not None: - NSApp().terminate_(self) - - def performStop_(self, sender): - self.stop() - - -def stopEventLoop(): - """ - Stop the current event loop if possible - returns True if it expects that it was successful, False otherwise - """ - stopper = PyObjCAppHelperRunLoopStopper.currentRunLoopStopper() - if stopper is None: - if NSApp() is not None: - NSApp().terminate_(None) - return True - return False - NSTimer.scheduledTimerWithTimeInterval_target_selector_userInfo_repeats_( - 0.0, - stopper, - 'performStop:', - None, - False) - return True - - -def endSheetMethod(meth): - """ - Return a selector that can be used as the delegate callback for - sheet methods - """ - return objc.selector(meth, signature=b'v@:@' + objc._C_NSInteger + objc._C_NSInteger) - - -def unexpectedErrorAlertPanel(): - exceptionInfo = traceback.format_exception_only( - *sys.exc_info()[:2])[0].strip() - return NSRunAlertPanel("An unexpected error has occurred", - "%@", - "Continue", "Quit", None, "(%s)" % exceptionInfo) - - -def unexpectedErrorAlertPdb(): - import pdb - traceback.print_exc() - pdb.post_mortem(sys.exc_info()[2]) - return True - - -def machInterrupt(signum): - stopper = PyObjCAppHelperRunLoopStopper.currentRunLoopStopper() - if stopper is not None: - stopper.stop() - elif NSApp() is not None: - NSApp().terminate_(None) - else: - import os - os._exit(1) - - -def installMachInterrupt(): - try: - import signal - from PyObjCTools import MachSignals - except: - return - MachSignals.signal(signal.SIGINT, machInterrupt) - - -def runConsoleEventLoop(argv=None, installInterrupt=False, mode=NSDefaultRunLoopMode, maxTimeout=3.0): - if argv is None: - argv = sys.argv - if installInterrupt: - installMachInterrupt() - runLoop = NSRunLoop.currentRunLoop() - stopper = PyObjCAppHelperRunLoopStopper.alloc().init() - PyObjCAppHelperRunLoopStopper.addRunLoopStopper_toRunLoop_(stopper, runLoop) - try: - - while stopper.shouldRun(): - nextfire = runLoop.limitDateForMode_(mode) - if not stopper.shouldRun(): - break - - soon = NSDate.dateWithTimeIntervalSinceNow_(maxTimeout) - nextfire = nextfire.earlierDate_(soon) - if not runLoop.runMode_beforeDate_(mode, nextfire): - stopper.stop() - - finally: - PyObjCAppHelperRunLoopStopper.removeRunLoopStopperFromRunLoop_(runLoop) - - -RAISETHESE = (SystemExit, MemoryError, KeyboardInterrupt) - - -def runEventLoop(argv=None, unexpectedErrorAlert=None, installInterrupt=None, pdb=None, main=NSApplicationMain): - """Run the event loop, ask the user if we should continue if an - exception is caught. Use this function instead of NSApplicationMain(). - """ - if argv is None: - argv = sys.argv - - if pdb is None: - pdb = 'USE_PDB' in os.environ - - if pdb: - from PyObjCTools import Debugging - Debugging.installVerboseExceptionHandler() - # bring it to the front, starting from terminal - # often won't - activator = PyObjCAppHelperApplicationActivator.alloc().init() - NSNotificationCenter.defaultCenter().addObserver_selector_name_object_( - activator, - 'activateNow:', - NSApplicationDidFinishLaunchingNotification, - None, - ) - else: - Debugging = None - - if installInterrupt is None and pdb: - installInterrupt = True - - if unexpectedErrorAlert is None: - if pdb: - unexpectedErrorAlert = unexpectedErrorAlertPdb - else: - unexpectedErrorAlert = unexpectedErrorAlertPanel - - runLoop = NSRunLoop.currentRunLoop() - stopper = PyObjCAppHelperRunLoopStopper.alloc().init() - PyObjCAppHelperRunLoopStopper.addRunLoopStopper_toRunLoop_(stopper, runLoop) - - firstRun = NSApp() is None - try: - - while stopper.shouldRun(): - try: - if firstRun: - firstRun = False - if installInterrupt: - installMachInterrupt() - main(argv) - else: - NSApp().run() - except RAISETHESE: - traceback.print_exc() - break - except: - exctype, e, tb = sys.exc_info() - objc_exception = False - if isinstance(e, objc.error): - if PY3K: - error_str = str(e) - else: - error_str = unicode(str(e), 'utf-8', 'replace') - - NSLog("%@", error_str) - elif not unexpectedErrorAlert(): - NSLog("%@", "An exception has occured:") - traceback.print_exc() - sys.exit(0) - else: - NSLog("%@", "An exception has occured:") - traceback.print_exc() - else: - break - - finally: - if Debugging is not None: - Debugging.removeExceptionHandler() - PyObjCAppHelperRunLoopStopper.removeRunLoopStopperFromRunLoop_(runLoop) diff --git a/env/lib/python2.7/site-packages/PyObjCTools/AppHelper.pyc b/env/lib/python2.7/site-packages/PyObjCTools/AppHelper.pyc deleted file mode 100644 index d0c801a7..00000000 Binary files a/env/lib/python2.7/site-packages/PyObjCTools/AppHelper.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/PyObjCTools/Conversion.py b/env/lib/python2.7/site-packages/PyObjCTools/Conversion.py deleted file mode 100644 index 36b07b85..00000000 --- a/env/lib/python2.7/site-packages/PyObjCTools/Conversion.py +++ /dev/null @@ -1,200 +0,0 @@ -""" -Conversion.py -- Tools for converting between Python and Objective-C objects. - -Conversion offers API to convert between Python and Objective-C instances of -various classes. Currently, the focus is on Python and Objective-C -collections. -""" - -__all__ = [ - 'pythonCollectionFromPropertyList', 'propertyListFromPythonCollection', - 'serializePropertyList', 'deserializePropertyList', - 'toPythonDecimal', 'fromPythonDecimal', -] - -import Foundation -import datetime -import time -import sys -try: - import decimal -except ImportError: - decimal = None - -try: - unicode -except NameError: - unicode = str - -PY3K = (sys.version_info[0] == 3) - -try: - PYTHON_TYPES = ( - basestring, bool, int, float, long, list, tuple, dict, set, - datetime.date, datetime.datetime, bool, buffer, type(None), - ) -except NameError: - PYTHON_TYPES = ( - str, bool, int, float, list, tuple, dict, set, - datetime.date, datetime.datetime, bool, type(None), bytes, - ) - basestring = str - -DECIMAL_LOCALE = Foundation.NSDictionary.dictionaryWithObject_forKey_( - '.', 'NSDecimalSeparator') - -def toPythonDecimal(aNSDecimalNumber): - """ - Convert a NSDecimalNumber to a Python decimal.Decimal - """ - return decimal.Decimal( - aNSDecimalNumber.descriptionWithLocale_(DECIMAL_LOCALE)) - -def fromPythonDecimal(aPythonDecimal): - """ - Convert a Python decimal.Decimal to a NSDecimalNumber - """ - if PY3K: - value_str = str(aPythonDecimal) - else: - value_str = unicode(aPythonDecimal) - - return Foundation.NSDecimalNumber.decimalNumberWithString_locale_( - value_str, DECIMAL_LOCALE) - -FORMATS = dict( - xml=Foundation.NSPropertyListXMLFormat_v1_0, - binary=Foundation.NSPropertyListBinaryFormat_v1_0, - ascii=Foundation.NSPropertyListOpenStepFormat, # Not actually supported! -) - -def serializePropertyList(aPropertyList, format='xml'): - """ - Serialize a property list to an NSData object. Format is one of the - following strings: - - xml (default): - NSPropertyListXMLFormat_v1_0, the XML representation - - binary: - NSPropertyListBinaryFormat_v1_0, the efficient binary representation - - ascii: - NSPropertyListOpenStepFormat, the old-style ASCII property list - - It is expected that this property list is comprised of Objective-C - objects. In most cases Python data structures will work, but - decimal.Decimal and datetime.datetime objects are not transparently - bridged so it will fail in that case. If you expect to have these - objects in your property list, then use propertyListFromPythonCollection - before serializing it. - """ - try: - formatOption = FORMATS[format] - except KeyError: - raise ValueError("Invalid format: %s" % (format,)) - data, err = Foundation.NSPropertyListSerialization.dataFromPropertyList_format_errorDescription_(aPropertyList, formatOption, None) - if err is not None: - # braindead API! - errStr = err.encode('utf-8') - err.release() - raise ValueError(errStr) - return data - -def deserializePropertyList(propertyListData): - """ - Deserialize a property list from a NSData, str, unicode or buffer - - Returns an Objective-C property list. - """ - if isinstance(propertyListData, str): - propertyListData = buffer(propertyListData) - elif isinstance(propertyListData, unicode): - propertyListData = buffer(propertyListData.encode('utf-8')) - plist, fmt, err = Foundation.NSPropertyListSerialization.propertyListFromData_mutabilityOption_format_errorDescription_(propertyListData, Foundation.NSPropertyListMutableContainers, None, None) - if err is not None: - # braindead API! - errStr = err.encode('utf-8') - err.release() - raise ValueError(errStr) - return plist - -def propertyListFromPythonCollection(aPyCollection, conversionHelper=None): - """ - Convert a Python collection (dict, list, tuple, string) into an - Objective-C collection. - - If conversionHelper is defined, it must be a callable. It will be called - for any object encountered for which propertyListFromPythonCollection() - cannot automatically convert the object. The supplied helper function - should convert the object and return the converted form. If the conversion - helper cannot convert the type, it should raise an exception or return - None. - """ - if isinstance(aPyCollection, dict): - collection = Foundation.NSMutableDictionary.dictionary() - for aKey in aPyCollection: - if not isinstance(aKey, basestring): - raise TypeError("Property list keys must be strings") - convertedValue = propertyListFromPythonCollection( - aPyCollection[aKey], conversionHelper=conversionHelper) - collection[aKey] = convertedValue - return collection - elif isinstance(aPyCollection, (list, tuple)): - collection = Foundation.NSMutableArray.array() - for aValue in aPyCollection: - convertedValue = propertyListFromPythonCollection(aValue, - conversionHelper=conversionHelper) - collection.append(aValue) - return collection - elif isinstance(aPyCollection, (datetime.datetime, datetime.date)): - return Foundation.NSDate.dateWithTimeIntervalSince1970_( - time.mktime(aPyCollection.timetuple())) - elif decimal is not None and isinstance(aPyCollection, decimal.Decimal): - return fromPythonDecimal(aPyCollection) - elif isinstance(aPyCollection, PYTHON_TYPES): - # bridge will convert - return aPyCollection - elif conversionHelper is not None: - return conversionHelper(aPyCollection) - raise TypeError("Type '%s' encountered in Python collection; don't know how to convert." % type(aPyCollection)) - - -def pythonCollectionFromPropertyList(aCollection, conversionHelper=None): - """ - Converts a Foundation based property list into a Python - collection (all members will be instances or subclasses of standard Python - types) - - Like propertyListFromPythonCollection(), conversionHelper is an optional - callable that will be invoked any time an encountered object cannot be - converted. - """ - if isinstance(aCollection, Foundation.NSDictionary): - pyCollection = {} - for k in aCollection: - if not isinstance(k, basestring): - raise TypeError("Property list keys must be strings") - convertedValue = pythonCollectionFromPropertyList( - aCollection[k], conversionHelper) - pyCollection[k] = convertedValue - return pyCollection - elif isinstance(aCollection, Foundation.NSArray): - return [ - pythonCollectionFromPropertyList(item, conversionHelper) - for item in aCollection - ] - elif isinstance(aCollection, Foundation.NSData): - return buffer(aCollection) - elif isinstance(aCollection, Foundation.NSDate): - return datetime.datetime.fromtimestamp( - aCollection.timeIntervalSince1970()) - elif isinstance(aCollection, Foundation.NSDecimalNumber) and decimal is not None: - return toPythonDecimal(aCollection) - elif aCollection is Foundation.NSNull.null(): - return None - elif isinstance(aCollection, PYTHON_TYPES): - return aCollection - elif conversionHelper: - return conversionHelper(aCollection) - raise TypeError("Type '%s' encountered in ObjC collection; don't know how to convert." % type(aCollection)) diff --git a/env/lib/python2.7/site-packages/PyObjCTools/Conversion.pyc b/env/lib/python2.7/site-packages/PyObjCTools/Conversion.pyc deleted file mode 100644 index f8a57b84..00000000 Binary files a/env/lib/python2.7/site-packages/PyObjCTools/Conversion.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/PyObjCTools/Debugging.py b/env/lib/python2.7/site-packages/PyObjCTools/Debugging.py deleted file mode 100644 index cfd1c623..00000000 --- a/env/lib/python2.7/site-packages/PyObjCTools/Debugging.py +++ /dev/null @@ -1,195 +0,0 @@ -""" -Low level debugging helper for PyObjC. - -Allows you to log Python and ObjC (via atos) stack traces for NSExceptions -raised. - -General guidelines for use: - -- It's typically only useful when you log EVERY exception, because Foundation - and AppKit will swallow most of them. This means that you should never - use this module in a release build. - -- Typical use involves only calling installExceptionHandler or - installVerboseExceptionHandler. It may be removed at any time by calling - removeDebuggingHandler. -""" -from __future__ import print_function - -from Foundation import NSObject, NSLog -import objc -import os -import sys - -import traceback -from ExceptionHandling import NSExceptionHandler, NSLogUncaughtExceptionMask, NSLogAndHandleEveryExceptionMask, NSStackTraceKey - -DEFAULTMASK = NSLogUncaughtExceptionMask -EVERYTHINGMASK = NSLogAndHandleEveryExceptionMask - - -__all__ = [ - 'LOGSTACKTRACE', 'DEFAULTVERBOSITY', 'DEFAULTMASK', 'EVERYTHINGMASK', - 'installExceptionHandler', 'installVerboseExceptionHandler', - 'installPythonExceptionHandler', 'removeExceptionHandler', - 'handlerInstalled', -] - -def isPythonException(exception): - if hasattr(exception, '_pyobjc_info_'): - return False - - if not hasattr(exception, 'userInfo'): - return True - - return (exception.userInfo() or {}).get('__pyobjc_exc_type__') is not None - -def nsLogPythonException(exception): - userInfo = exception.userInfo() - NSLog('%@', '*** Python exception discarded!\n' + - ''.join(traceback.format_exception( - userInfo['__pyobjc_exc_type__'], - userInfo['__pyobjc_exc_value__'], - userInfo['__pyobjc_exc_traceback__'], - ))) - # we logged it, so don't log it for us - return False - - -_atos_command = None - -def _run_atos(stack): - global _atos_command - if _atos_command is None: - if os.path.exists('/usr/bin/atos'): - _atos_command = '/usr/bin/atos' - - if os.uname()[2].startswith('13.'): - # The atos command on OSX 10.9 gives a usage - # warning that's surpressed with the "-d" option. - _atos_command += ' -d' - - elif os.path.exists('/usr/bin/xcrun'): - _atos_command = '/usr/bin/xcrun atos' - - else: - return None - - return os.popen('%s -p %s %s'%(_atos_command, os.getpid(), stack)) - -def nsLogObjCException(exception): - stacktrace = None - - try: - stacktrace = exception.callStackSymbols() - - except AttributeError: - pass - - if stacktrace is None: - stack = exception.callStackReturnAddresses() - if stack: - pipe = _run_atos(" ".join(hex(v) for v in stack)) - if pipe is None: - return True - - stacktrace = pipe.readlines() - stacktrace.reverse() - pipe.close() - - if stacktrace is None: - userInfo = exception.userInfo() - stack = userInfo.get(NSStackTraceKey) - if not stack: - return True - - pipe = _run_atos(stack) - if pipe is None: - return True - - stacktrace = pipe.readlines() - stacktrace.reverse() - pipe.close() - - NSLog("%@", "*** ObjC exception '%s' (reason: '%s') discarded\n" % ( - exception.name(), exception.reason(), - ) + - 'Stack trace (most recent call last):\n' + - ''.join([(' '+line) for line in stacktrace]) - ) - return False - -LOGSTACKTRACE = 1 << 0 -DEFAULTVERBOSITY = 0 - -class PyObjCDebuggingDelegate(NSObject): - verbosity = objc.ivar('verbosity', b'i') - - def initWithVerbosity_(self, verbosity): - self = self.init() - self.verbosity = verbosity - return self - - @objc.typedSelector(b'c@:@@I') - def exceptionHandler_shouldLogException_mask_(self, sender, exception, aMask): - try: - if isPythonException(exception): - if self.verbosity & LOGSTACKTRACE: - nsLogObjCException(exception) - return nsLogPythonException(exception) - elif self.verbosity & LOGSTACKTRACE: - return nsLogObjCException(exception) - else: - return False - except: - print("*** Exception occurred during exception handler ***", - file=sys.stderr) - traceback.print_exc(sys.stderr) - return True - - @objc.typedSelector(b'c@:@@I') - def exceptionHandler_shouldHandleException_mask_(self, sender, exception, aMask): - return False - -def installExceptionHandler(verbosity=DEFAULTVERBOSITY, mask=DEFAULTMASK): - """ - Install the exception handling delegate that will log every exception - matching the given mask with the given verbosity. - """ - # we need to retain this, cause the handler doesn't - global _exceptionHandlerDelegate - delegate = PyObjCDebuggingDelegate.alloc().initWithVerbosity_(verbosity) - NSExceptionHandler.defaultExceptionHandler().setExceptionHandlingMask_(mask) - NSExceptionHandler.defaultExceptionHandler().setDelegate_(delegate) - _exceptionHandlerDelegate = delegate - -def installPythonExceptionHandler(): - """ - Install a verbose exception handling delegate that logs every exception - raised. - - Will log only Python stack traces, if available. - """ - installExceptionHandler(verbosity=DEFAULTVERBOSITY, mask=EVERYTHINGMASK) - -def installVerboseExceptionHandler(): - """ - Install a verbose exception handling delegate that logs every exception - raised. - - Will log both Python and ObjC stack traces, if available. - """ - installExceptionHandler(verbosity=LOGSTACKTRACE, mask=EVERYTHINGMASK) - -def removeExceptionHandler(): - """ - Remove the current exception handler delegate - """ - NSExceptionHandler.defaultExceptionHandler().setDelegate_(None) - NSExceptionHandler.defaultExceptionHandler().setExceptionHandlingMask_(0) - -def handlerInstalled(): - """ - Is an exception handler delegate currently installed? - """ - return NSExceptionHandler.defaultExceptionHandler().delegate() is not None diff --git a/env/lib/python2.7/site-packages/PyObjCTools/Debugging.pyc b/env/lib/python2.7/site-packages/PyObjCTools/Debugging.pyc deleted file mode 100644 index 7e7f79fa..00000000 Binary files a/env/lib/python2.7/site-packages/PyObjCTools/Debugging.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/PyObjCTools/FndCategories.py b/env/lib/python2.7/site-packages/PyObjCTools/FndCategories.py deleted file mode 100644 index 881eceda..00000000 --- a/env/lib/python2.7/site-packages/PyObjCTools/FndCategories.py +++ /dev/null @@ -1,36 +0,0 @@ -""" -A number of usefull categories on Foundation classes -""" -__all__ = () -import objc -from Foundation import NSAffineTransform - - -class NSAffineTransform (objc.Category(NSAffineTransform)): - def rotateByDegrees_atPoint_(self, angle, point): - """ - Rotate the coordinatespace ``angle`` degrees around - ``point``. - """ - self.rotateByDegrees_(angle) - - tf = NSAffineTransform.transform() - tf.rotateByDegrees_(-angle) - oldPt = tf.transformPoint_(point) - oldPt.x -= point.x - oldPt.y -= point.y - self.translateXBy_yBy_(oldPt.x, oldPt.y) - - def rotateByRadians_atPoint_(self, angle, point): - """ - Rotate the coordinatespace ``angle`` radians around - ``point``. - """ - self.rotateByRadians_(angle) - - tf = NSAffineTransform.transform() - tf.rotateByRadians_(-angle) - oldPt = tf.transformPoint_(point) - oldPt.x -= point.x - oldPt.y -= point.y - self.translateXBy_yBy_(oldPt.x, oldPt.y) diff --git a/env/lib/python2.7/site-packages/PyObjCTools/FndCategories.pyc b/env/lib/python2.7/site-packages/PyObjCTools/FndCategories.pyc deleted file mode 100644 index ad96ec5d..00000000 Binary files a/env/lib/python2.7/site-packages/PyObjCTools/FndCategories.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/PyObjCTools/KeyValueCoding.py b/env/lib/python2.7/site-packages/PyObjCTools/KeyValueCoding.py deleted file mode 100644 index 1833145d..00000000 --- a/env/lib/python2.7/site-packages/PyObjCTools/KeyValueCoding.py +++ /dev/null @@ -1,390 +0,0 @@ -""" -Support for Key-Value Coding in Python. This provides a simple functional -interface to Cocoa's Key-Value coding that also works for regular Python -objects. - -Public API: - - setKey(obj, key, value) -> None - setKeyPath (obj, keypath, value) -> None - - getKey(obj, key) -> value - getKeyPath (obj, keypath) -> value - -A keypath is a string containing a sequence of keys seperated by dots. The -path is followed by repeated calls to 'getKey'. This can be used to easily -access nested attributes. - -This API is mirroring the 'getattr' and 'setattr' APIs in Python, this makes -it more natural to work with Key-Value coding from Python. It also doesn't -require changes to existing Python classes to make use of Key-Value coding, -making it easier to build applications as a platform independent core with -a Cocoa GUI layer. - -See the Cocoa documentation on the Apple developer website for more -information on Key-Value coding. The protocol is basicly used to enable -weaker coupling between the view and model layers. -""" -from __future__ import unicode_literals -import sys - -__all__ = ("getKey", "setKey", "getKeyPath", "setKeyPath") -if sys.version_info[0] == 2: # pragma: no 3.x cover; pragma: no branch - __all__ = tuple(str(x) for x in __all__) - - -import objc -import types -import sys -import warnings - -if sys.version_info[0] == 2: # pragma: no 3.x cover - from itertools import imap as map - import collections as collections_abc - -else: # pragma: no cover (py3k) - basestring = str - import collections.abc as collections_abc - -_null = objc.lookUpClass('NSNull').null() - -def keyCaps(s): - return s[:1].capitalize() + s[1:] - -# From http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/393090 -# Title: Binary floating point summation accurate to full precision -# Version no: 2.2 - -def msum(iterable): - "Full precision summation using multiple floats for intermediate values" - # sorted, non-overlapping partial sums - partials = [] - for x in iterable: - i = 0 - for y in partials: - if abs(x) < abs(y): - x, y = y, x - hi = x + y - lo = y - (hi - x) - if lo: - partials[i] = lo - i += 1 - x = hi - partials[i:] = [x] - return sum(partials, 0.0) - -class _ArrayOperators (object): - - @staticmethod - def avg(obj, segments): - path = '.'.join(segments) - lst = getKeyPath(obj, path) - count = len(lst) - if count == 0: - return 0.0 - return msum(float(x) if x is not _null else 0.0 for x in lst) / count - - @staticmethod - def count(obj, segments): - return len(obj) - - @staticmethod - def distinctUnionOfArrays(obj, segments): - path = '.'.join(segments) - rval = [] - s = set() - r = [] - for lst in obj: - for item in (getKeyPath(item, path) for item in lst): - try: - if item in s or item in r: - continue - rval.append(item) - s.add(item) - - except TypeError: - if item in rval: - continue - - rval.append(item) - r.append(item) - return rval - - @staticmethod - def distinctUnionOfSets(obj, segments): - path = '.'.join(segments) - rval = set() - for lst in obj: - for item in (getKeyPath(item, path) for item in lst): - rval.add(item) - return rval - - @staticmethod - def distinctUnionOfObjects(obj, segments): - path = '.'.join(segments) - rval = [] - s = set() - r = [] - for item in (getKeyPath(item, path) for item in obj): - try: - if item in s or item in r: - continue - - rval.append(item) - s.add(item) - - except TypeError: - if item in rval: - continue - - rval.append(item) - r.append(item) - return rval - - - @staticmethod - def max(obj, segments): - path = '.'.join(segments) - return max(x for x in getKeyPath(obj, path) if x is not _null) - - @staticmethod - def min(obj, segments): - path = '.'.join(segments) - return min(x for x in getKeyPath(obj, path) if x is not _null) - - @staticmethod - def sum(obj, segments): - path = '.'.join(segments) - lst = getKeyPath(obj, path) - return msum(float(x) if x is not _null else 0.0 for x in lst) - - @staticmethod - def unionOfArrays(obj, segments): - path = '.'.join(segments) - rval = [] - for lst in obj: - rval.extend(getKeyPath(item, path) for item in lst) - return rval - - - @staticmethod - def unionOfObjects(obj, segments): - path = '.'.join(segments) - return [ getKeyPath(item, path) for item in obj] - - -def getKey(obj, key): - """ - Get the attribute referenced by 'key'. The key is used - to build the name of an attribute, or attribute accessor method. - - The following attributes and accesors are tried (in this order): - - - Accessor 'getKey' - - Accesoor 'get_key' - - Accessor or attribute 'key' - - Accessor or attribute 'isKey' - - Attribute '_key' - - If none of these exist, raise KeyError - """ - if obj is None: - return None - if isinstance(obj, (objc.objc_object, objc.objc_class)): - return obj.valueForKey_(key) - - # check for dict-like objects - getitem = getattr(obj, '__getitem__', None) - if getitem is not None: - try: - return getitem(key) - except (KeyError, IndexError, TypeError): - pass - - # check for array-like objects - if isinstance(obj, (collections_abc.Sequence, collections_abc.Set)) and not isinstance(obj, (basestring, collections_abc.Mapping)): - def maybe_get(obj, key): - try: - return getKey(obj, key) - except KeyError: - return _null - return [maybe_get(obj, key) for obj in iter(obj)] - - try: - m = getattr(obj, "get" + keyCaps(key)) - except AttributeError: - pass - else: - return m() - - try: - m = getattr(obj, "get_" + key) - except AttributeError: - pass - else: - return m() - - for keyName in (key, "is" + keyCaps(key)): - try: - m = getattr(obj, keyName) - except AttributeError: - continue - - if isinstance(m, types.MethodType) and m.__self__ is obj: - return m() - - elif isinstance(m, types.BuiltinMethodType): - # Can't access the bound self of methods of builtin classes :-( - return m() - - elif isinstance(m, objc.selector) and m.self is obj: - return m() - - else: - return m - - try: - return getattr(obj, "_" + key) - except AttributeError: - raise KeyError("Key %s does not exist" % (key,)) - - -def setKey(obj, key, value): - """ - Set the attribute referenced by 'key' to 'value'. The key is used - to build the name of an attribute, or attribute accessor method. - - The following attributes and accessors are tried (in this order): - - Mapping access (that is __setitem__ for collection.Mapping instances) - - Accessor 'setKey_' - - Accessor 'setKey' - - Accessor 'set_key' - - Attribute '_key' - - Attribute 'key' - - Raises KeyError if the key doesn't exist. - """ - if obj is None: - return - if isinstance(obj, (objc.objc_object, objc.objc_class)): - obj.setValue_forKey_(value, key) - return - - if isinstance(obj, collections_abc.Mapping): - obj[key] = value - return - - aBase = 'set' + keyCaps(key) - for accessor in (aBase + '_', aBase, 'set_' + key): - m = getattr(obj, accessor, None) - if m is None: - continue - try: - m(value) - return - except TypeError: - pass - - try: - m = getattr(obj, key) - except AttributeError: - pass - - else: - if isinstance(m, types.MethodType) and m.__self__ is obj: - # This looks like a getter method, don't call setattr - pass - - else: - try: - setattr(obj, key, value) - return - except AttributeError: - raise KeyError("Key %s does not exist" % (key,)) - - try: - getattr(obj, "_" + key) - except AttributeError: - pass - else: - setattr(obj, "_" + key, value) - return - - try: - setattr(obj, key, value) - except AttributeError: - raise KeyError("Key %s does not exist" % (key,)) - -def getKeyPath(obj, keypath): - """ - Get the value for the keypath. Keypath is a string containing a - path of keys, path elements are seperated by dots. - """ - if not keypath: - raise KeyError - - if obj is None: - return None - - - if isinstance(obj, (objc.objc_object, objc.objc_class)): - return obj.valueForKeyPath_(keypath) - - elements = keypath.split('.') - cur = obj - elemiter = iter(elements) - for e in elemiter: - if e[:1] == '@': - try: - oper = getattr(_ArrayOperators, e[1:]) - except AttributeError: - raise KeyError("Array operator %s not implemented" % (e,)) - return oper(cur, elemiter) - cur = getKey(cur, e) - return cur - -def setKeyPath(obj, keypath, value): - """ - Set the value at 'keypath'. The keypath is a string containing a - path of keys, seperated by dots. - """ - if obj is None: - return - - if isinstance(obj, (objc.objc_object, objc.objc_class)): - return obj.setValue_forKeyPath_(value, keypath) - - elements = keypath.split('.') - cur = obj - for e in elements[:-1]: - cur = getKey(cur, e) - - return setKey(cur, elements[-1], value) - - -class kvc(object): - def __init__(self, obj): - self.__pyobjc_object__ = obj - - def __getattr__(self, attr): - return getKey(self.__pyobjc_object__, attr) - - def __repr__(self): - return repr(self.__pyobjc_object__) - - def __setattr__(self, attr, value): - if not attr.startswith('_'): - setKey(self.__pyobjc_object__, attr, value) - - else: - object.__setattr__(self, attr, value) - - def __getitem__(self, item): - if not isinstance(item, basestring): - raise TypeError('Keys must be strings') - return getKeyPath(self.__pyobjc_object__, item) - - def __setitem__(self, item, value): - if not isinstance(item, basestring): - raise TypeError('Keys must be strings') - setKeyPath(self.__pyobjc_object__, item, value) diff --git a/env/lib/python2.7/site-packages/PyObjCTools/KeyValueCoding.pyc b/env/lib/python2.7/site-packages/PyObjCTools/KeyValueCoding.pyc deleted file mode 100644 index 4a0a31f7..00000000 Binary files a/env/lib/python2.7/site-packages/PyObjCTools/KeyValueCoding.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/PyObjCTools/MachSignals.py b/env/lib/python2.7/site-packages/PyObjCTools/MachSignals.py deleted file mode 100644 index a2ab40f9..00000000 --- a/env/lib/python2.7/site-packages/PyObjCTools/MachSignals.py +++ /dev/null @@ -1,34 +0,0 @@ -""" -Substitute for the signal module when using a CFRunLoop. - -This module is generally only used to support: - PyObjCTools.AppHelper.installMachInterrupt() - -A mach port is opened and registered to the CFRunLoop. -When a signal occurs the signal number is sent in a mach -message to the CFRunLoop. The handler then causes Python -code to get executed. - -In other words, Python's signal handling code does not wake -reliably when not running Python code, but this does. -""" - -from objc import _machsignals -__all__ = ['getsignal', 'signal'] - -def getsignal(signum): - """ - Return the signal handler for signal ``signum``. Returns ``None`` when - there is no signal handler for the signal. - """ - return _machsignals._signalmapping.get(signum) - -def signal(signum, handler): - """ - Install a new signal handler for ``signum``. Returns the old signal - handler (``None`` when there is no previous handler. - """ - rval = getsignal(signum) - _machsignals._signalmapping[signum] = handler - _machsignals.handle_signal(signum) - return rval diff --git a/env/lib/python2.7/site-packages/PyObjCTools/MachSignals.pyc b/env/lib/python2.7/site-packages/PyObjCTools/MachSignals.pyc deleted file mode 100644 index c364ba72..00000000 Binary files a/env/lib/python2.7/site-packages/PyObjCTools/MachSignals.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/PyObjCTools/Signals.py b/env/lib/python2.7/site-packages/PyObjCTools/Signals.py deleted file mode 100644 index 317b6ab5..00000000 --- a/env/lib/python2.7/site-packages/PyObjCTools/Signals.py +++ /dev/null @@ -1,78 +0,0 @@ -"""Signals.py -- dump a python stacktrace if something bad happens. - - DO NOT USE THIS MODULE IN PRODUCTION CODE - -This module has two functions in its public API: - -- dumpStackOnFatalSignal() - This function will install signal handlers that print a stacktrace and - then reraise the signal. - -- resetFatalSignals() - Restores the signal handlers to the state they had before the call to - dumpStackOnFatalSignal. - -This module is not designed to provide fine grained control over signal -handling. Nor is it intended to be terribly robust. It may give useful -information when your program gets unexpected signals, but it might just -as easily cause a crash when such a signal gets in. - - DO NOT USE THIS MODULE IN PRODUCTION CODE -""" -from __future__ import print_function -import signal -import traceback -import os - -__all__ = ["dumpStackOnFatalSignal", "resetFatalSignals"] - -originalHandlers = None - -def dumpHandler(signum, frame): - """ - the signal handler used in this module: print a stacktrace and - then re-raise the signal - """ - resetFatalSignals() - print("*** Handling fatal signal '%d'." % signum) - traceback.print_stack(frame) - print("*** Restored handlers and resignaling.") - os.kill(os.getpid(), signum) - -def installHandler(sig): - """ - Install our signal handler for a signal. The original handler - is saved in 'originalHandlers'. - """ - originalHandlers[sig] = signal.signal(sig, dumpHandler) - -def dumpStackOnFatalSignal(): - """ - Install signal handlers that might print a useful stack trace when - this process receives a fatal signal. - - NOTE: See module docstring - """ - - global originalHandlers - if not originalHandlers: - originalHandlers = {} - installHandler(signal.SIGQUIT) - installHandler(signal.SIGILL) - installHandler(signal.SIGTRAP) - installHandler(signal.SIGABRT) - installHandler(signal.SIGEMT) - installHandler(signal.SIGFPE) - installHandler(signal.SIGBUS) - installHandler(signal.SIGSEGV) - installHandler(signal.SIGSYS) - -def resetFatalSignals(): - """ - Restore the original signal handlers - """ - global originalHandlers - if originalHandlers: - for sig in originalHandlers: - signal.signal(sig, originalHandlers[sig]) - originalHandlers = None diff --git a/env/lib/python2.7/site-packages/PyObjCTools/Signals.pyc b/env/lib/python2.7/site-packages/PyObjCTools/Signals.pyc deleted file mode 100644 index 988fc656..00000000 Binary files a/env/lib/python2.7/site-packages/PyObjCTools/Signals.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/PyObjCTools/TestSupport.py b/env/lib/python2.7/site-packages/PyObjCTools/TestSupport.py deleted file mode 100644 index 77c8339e..00000000 --- a/env/lib/python2.7/site-packages/PyObjCTools/TestSupport.py +++ /dev/null @@ -1,1024 +0,0 @@ -""" -Helper code for implementing unittests. - -This module is unsupported and is primairily used in the PyObjC -testsuite. -""" -from __future__ import print_function -import plistlib as _pl -import unittest as _unittest -import objc -import os as _os -import gc as _gc -import subprocess as _subprocess -import sys as _sys -import struct as _struct -from distutils.sysconfig import get_config_var as _get_config_var -import re as _re -import warnings -import contextlib - -# Ensure that methods in this module get filtered in the tracebacks -# from unittest -__unittest = False - -# Have a way to disable the autorelease pool behaviour -_usepool = not _os.environ.get('PYOBJC_NO_AUTORELEASE') - -# Python 2/3 Compatibility for the PyObjC Test Suite -try: - unicode -except NameError: - unicode = str - -try: - long -except NameError: - long = int - -try: - basestring -except NameError: - basestring = str - -try: - bytes -except NameError: - bytes = str - -try: - unichr -except NameError: - unichr = chr - -def _typemap(tp): # XXX: Is this needed? - if tp is None: return None - return tp.replace(b'_NSRect', b'CGRect').replace(b'_NSPoint', b'CGPoint').replace(b'_NSSize', b'CGSize') - -@contextlib.contextmanager -def pyobjc_options(**kwds): - orig = {} - try: - for k in kwds: - orig[k] = getattr(objc.options, k) - setattr(objc.options, k, kwds[k]) - - yield - - finally: - for k in orig: - setattr(objc.options, k, orig[k]) - - -def sdkForPython(_cache=[]): - """ - Return the SDK version used to compile Python itself, - or None if no framework was used - """ - if not _cache: - - cflags = _get_config_var('CFLAGS') - m = _re.search('-isysroot\s+([^ ]*)(\s|$)', cflags) - if m is None: - _cache.append(None) - return None - - - path = m.group(1) - if path == '/': - result = tuple(map(int, os_release().split('.'))) - _cache.append(result) - return result - - bn = _os.path.basename(path) - version = bn[6:-4] - if version.endswith('u'): - version = version[:-1] - - - result = tuple(map(int, version.split('.'))) - _cache.append(result) - return result - - return _cache[0] - -def fourcc(v): - """ - Decode four-character-code integer definition - - (e.g. 'abcd') - """ - return _struct.unpack('>i', v)[0] - -def cast_int(value): - """ - Cast value to 32bit integer - - Usage: - cast_int(1 << 31) == -1 - - (where as: 1 << 31 == 2147483648) - """ - value = value & 0xffffffff - if value & 0x80000000: - value = ~value + 1 & 0xffffffff - return -value - else: - return value - -def cast_longlong(value): - """ - Cast value to 64bit integer - - Usage: - cast_longlong(1 << 63) == -1 - """ - value = value & 0xffffffffffffffff - if value & 0x8000000000000000: - value = ~value + 1 & 0xffffffffffffffff - return -value - else: - return value - -def cast_uint(value): - """ - Cast value to 32bit integer - - Usage: - cast_int(1 << 31) == 2147483648 - - """ - value = value & 0xffffffff - return value - -def cast_ulonglong(value): - """ - Cast value to 64bit integer - """ - value = value & 0xffffffffffffffff - return value - -_os_release = None -def os_release(): - """ - Returns the release of macOS (for example 10.5.1). - """ - global _os_release - if _os_release is not None: - return _os_release - - if hasattr(_pl, 'load'): - with open('/System/Library/CoreServices/SystemVersion.plist', 'rb') as fp: - pl = _pl.load(fp) - else: - pl = _pl.readPlist('/System/Library/CoreServices/SystemVersion.plist') - v = pl['ProductVersion'] - return '.'.join(v.split('.')) - - -def is32Bit(): - """ - Return True if we're running in 32-bit mode - """ - if _sys.maxsize > 2 ** 32: - return False - return True - -def onlyIf(expr, message=None): - """ - Usage:: - - class Tests (unittest.TestCase): - - @onlyIf(1 == 2) - def testUnlikely(self): - pass - - The test only runs when the argument expression is true - """ - def callback(function): - if not expr: - if hasattr(_unittest, 'skip'): - return _unittest.skip(message)(function) - return lambda self: None # pragma: no cover (py2.6) - else: - return function - return callback - -def onlyPython2(function): - """ - Usage: - class Tests (unittest.TestCase): - - @onlyPython2 - def testPython2(self): - pass - - The test is only executed for Python 2.x - """ - return onlyIf(_sys.version_info[0] == 2, "python2.x only")(function) - -def onlyPython3(function): - """ - Usage: - class Tests (unittest.TestCase): - - @onlyPython3 - def testPython3(self): - pass - - The test is only executed for Python 3.x - """ - return onlyIf(_sys.version_info[0] == 3, "python3.x only")(function) - -def onlyOn32Bit(function): - """ - Usage:: - - class Tests (unittest.TestCase): - - @onlyOn32Bit - def test32BitOnly(self): - pass - - The test runs only on 32-bit systems - """ - return onlyIf(is32Bit(), "32-bit only")(function) - -def onlyOn64Bit(function): - """ - Usage:: - - class Tests (unittest.TestCase): - - @onlyOn64Bit - def test64BitOnly(self): - pass - - The test runs only on 64-bit systems - """ - return onlyIf(not is32Bit(), "64-bit only")(function) - -def min_python_release(version): - """ - Usage:: - - class Tests (unittest.TestCase): - - @min_python_release('3.2') - def test_python_3_2(self): - pass - """ - parts = tuple(map(int, version.split('.'))) - return onlyIf(_sys.version_info[:2] >= parts, "Requires Python %s or later"%(version,)) - -def _sort_key(version): - parts = version.split('.') - if len(parts) == 2: - parts.append('0') - - if len(parts) != 3: - raise ValueError("Invalid version: %r"%(version,)) - - return tuple(int(x) for x in parts) - - -def os_level_key(release): - """ - Return an object that can be used to compare two releases. - """ - return _sort_key(release) - - -def min_sdk_level(release): - """ - Usage:: - - class Tests (unittest.TestCase): - @min_sdk_level('10.6') - def testSnowLeopardSDK(self): - pass - """ - v = (objc.PyObjC_BUILD_RELEASE // 100, objc.PyObjC_BUILD_RELEASE % 100, 0) - return onlyIf(v >= os_level_key(release), "Requires build with SDK %s or later"%(release,)) - -def max_sdk_level(release): - """ - Usage:: - - class Tests (unittest.TestCase): - @max_sdk_level('10.5') - def testUntilLeopardSDK(self): - pass - """ - v = (objc.PyObjC_BUILD_RELEASE // 100, objc.PyObjC_BUILD_RELEASE % 100, 0) - return onlyIf(v <= os_level_key(release), "Requires build with SDK %s or later"%(release,)) - -def min_os_level(release): - """ - Usage:: - - class Tests (unittest.TestCase): - - @min_os_level('10.6') - def testSnowLeopardCode(self): - pass - """ - return onlyIf(os_level_key(os_release()) >= os_level_key(release), "Requires OSX %s or later"%(release,)) - -def max_os_level(release): - """ - Usage:: - - class Tests (unittest.TestCase): - - @max_os_level('10.5') - def testUntilLeopard(self): - pass - """ - return onlyIf(os_level_key(os_release()) <= os_level_key(release), "Requires OSX upto %s"%(release,)) - -def os_level_between(min_release, max_release): - """ - Usage:: - - class Tests (unittest.TestCase): - - @os_level_between('10.5', '10.8') - def testUntilLeopard(self): - pass - """ - return onlyIf(os_level_key(min_release) <= os_level_key(os_release()) <= os_level_key(max_release), "Requires OSX %s upto %s"%(min_release, max_release)) - -_poolclass = objc.lookUpClass('NSAutoreleasePool') - -# NOTE: On at least OSX 10.8 there are multiple proxy classes for CFTypeRef... -_nscftype = tuple(cls for cls in objc.getClassList() if 'NSCFType' in cls.__name__) - -_typealias = {} - -if not is32Bit(): - _typealias[objc._C_LNG_LNG] = objc._C_LNG - _typealias[objc._C_ULNG_LNG] = objc._C_ULNG - -else: # pragma: no cover (32-bit) - _typealias[objc._C_LNG] = objc._C_INT - _typealias[objc._C_ULNG] = objc._C_UINT - -class TestCase (_unittest.TestCase): - """ - A version of TestCase that wraps every test into its own - autorelease pool. - - This also adds a number of useful assertion methods - """ - - - def assertIsCFType(self, tp, message = None): - if not isinstance(tp, objc.objc_class): - self.fail(message or "%r is not a CFTypeRef type"%(tp,)) - - if any(x is tp for x in _nscftype): - self.fail(message or "%r is not a unique CFTypeRef type"%(tp,)) - - for cls in tp.__bases__: - if 'NSCFType' in cls.__name__: - return - - self.fail(message or "%r is not a CFTypeRef type"%(tp,)) - - # NOTE: Don't test if this is a subclass of one of the known - # CF roots, this tests is mostly used to ensure that the - # type is distinct from one of those roots. - # XXX: With the next two lines enabled there are spurious test - # failures when a CF type is toll-free bridged to an - # (undocumented) Cocoa class. It might be worthwhile to - # look for these, but not in the test suite. - #if not issubclass(tp, _nscftype): - # self.fail(message or "%r is not a CFTypeRef subclass"%(tp,)) - - - def assertIsOpaquePointer(self, tp, message = None): - if not hasattr(tp, "__pointer__"): - self.fail(message or "%r is not an opaque-pointer"%(tp,)) - - if not hasattr(tp, "__typestr__"): - self.fail(message or "%r is not an opaque-pointer"%(tp,)) - - - def assertResultIsNullTerminated(self, method, message = None): - info = method.__metadata__() - if not info.get('retval', {}).get('c_array_delimited_by_null'): - self.fail(message or "result of %r is not a null-terminated array"%(method,)) - - def assertIsNullTerminated(self, method, message = None): - info = method.__metadata__() - if not info.get('c_array_delimited_by_null') or not info.get('variadic'): - self.fail(message or "%s is not a variadic function with a null-terminated list of arguments"%(method,)) - - def assertArgIsNullTerminated(self, method, argno, message = None): - if isinstance(method, objc.selector): - offset = 2 - else: - offset = 0 - info = method.__metadata__() - try: - if not info['arguments'][argno+offset].get('c_array_delimited_by_null'): - self.fail(message or "argument %d of %r is not a null-terminated array"%(argno, method)) - except (KeyError, IndexError): - self.fail(message or "argument %d of %r is not a null-terminated array"%(argno, method)) - - def assertArgIsVariableSize(self, method, argno, message = None): - if isinstance(method, objc.selector): - offset = 2 - else: - offset = 0 - info = method.__metadata__() - try: - if not info['arguments'][argno+offset].get('c_array_of_variable_length'): - self.fail(message or "argument %d of %r is not a variable sized array"%(argno, method,)) - except (KeyError, IndexError): - self.fail(message or "argument %d of %r is not a variable sized array"%(argno, method,)) - - def assertResultIsVariableSize(self, method, message = None): - info = method.__metadata__() - if not info.get('retval', {}).get('c_array_of_variable_length', False): - self.fail(message or "result of %r is not a variable sized array"%(method,)) - - def assertArgSizeInResult(self, method, argno, message = None): - if isinstance(method, objc.selector): - offset = 2 - else: - offset = 0 - info = method.__metadata__() - try: - if not info['arguments'][argno+offset].get('c_array_length_in_result'): - self.fail(message or "argument %d of %r does not have size in result"%(argno, method)) - except (KeyError, IndexError): - self.fail(message or "argument %d of %r does not have size in result"%(argno, method)) - - def assertArgIsPrintf(self, method, argno, message = None): - if isinstance(method, objc.selector): - offset = 2 - else: - offset = 0 - info = method.__metadata__() - if not info.get('variadic'): - self.fail(message or "%r is not a variadic function"%(method,)) - - try: - if not info['arguments'][argno+offset].get('printf_format'): - self.fail(message or "%r argument %d is not a printf format string"%(method, argno)) - except (KeyError, IndexError): - self.fail(message or "%r argument %d is not a printf format string"%(method, argno)) - - def assertArgIsCFRetained(self, method, argno, message = None): - if isinstance(method, objc.selector): - offset = 2 - else: - offset = 0 - info = method.__metadata__() - - try: - if not info['arguments'][argno+offset]['already_cfretained']: - self.fail(message or "%r is not cfretained"%(method,)) - except (KeyError, IndexError): - self.fail(message or "%r is not cfretained"%(method,)) - - def assertArgIsNotCFRetained(self, method, argno, message = None): - if isinstance(method, objc.selector): - offset = 2 - else: - offset = 0 - info = method.__metadata__() - try: - if info['arguments'][argno+offset]['already_cfretained']: - self.fail(message or "%r is cfretained"%(method,)) - except (KeyError, IndexError): - pass - - def assertResultIsCFRetained(self, method, message = None): - info = method.__metadata__() - - if not info.get('retval', {}).get('already_cfretained', False): - self.fail(message or "%r is not cfretained"%(method,)) - - def assertResultIsNotCFRetained(self, method, message = None): - info = method.__metadata__() - if info.get('retval', {}).get('already_cfretained', False): - self.fail(message or "%r is cfretained"%(method,)) - - def assertArgIsRetained(self, method, argno, message = None): - if isinstance(method, objc.selector): - offset = 2 - else: - offset = 0 - info = method.__metadata__() - - try: - if not info['arguments'][argno+offset]['already_retained']: - self.fail(message or "%r is not retained"%(method,)) - except (KeyError, IndexError): - self.fail(message or "%r is not retained"%(method,)) - - def assertArgIsNotRetained(self, method, argno, message = None): - if isinstance(method, objc.selector): - offset = 2 - else: - offset = 0 - info = method.__metadata__() - try: - if info['arguments'][argno+offset]['already_retained']: - self.fail(message or "%r is retained"%(method,)) - except (KeyError, IndexError): - pass - - def assertResultIsRetained(self, method, message = None): - info = method.__metadata__() - if not info.get('retval', {}).get('already_retained', False): - self.fail(message or "%r is not retained"%(method,)) - - def assertResultIsNotRetained(self, method, message = None): - info = method.__metadata__() - if info.get('retval', {}).get('already_retained', False): - self.fail(message or "%r is retained"%(method,)) - - def assertResultHasType(self, method, tp, message=None): - info = method.__metadata__() - type = info.get('retval').get('type', b'v') - if type != tp and _typemap(type) != _typemap(tp) \ - and _typealias.get(type, type) != _typealias.get(tp, tp): - self.fail(message or "result of %r is not of type %r, but %r"%( - method, tp, type)) - - def assertArgHasType(self, method, argno, tp, message=None): - if isinstance(method, objc.selector): - offset = 2 - else: - offset = 0 - info = method.__metadata__() - try: - i = info['arguments'][argno+offset] - - except (KeyError, IndexError): - self.fail(message or "arg %d of %s has no metadata (or doesn't exist)"%(argno, method)) - - else: - type = i.get('type', b'@') - - if type != tp and _typemap(type) != _typemap(tp) \ - and _typealias.get(type, type) != _typealias.get(tp, tp): - self.fail(message or "arg %d of %s is not of type %r, but %r"%( - argno, method, tp, type)) - - - def assertArgIsFunction(self, method, argno, sel_type, retained, message=None): - if isinstance(method, objc.selector): - offset = 2 - else: - offset = 0 - info = method.__metadata__() - - try: - i = info['arguments'][argno+offset] - except (KeyError, IndexError): - self.fail(message or "arg %d of %s has no metadata (or doesn't exist)"%(argno, method)) - - else: - type = i.get('type', b'@') - - if type != b'^?': - self.fail(message or "arg %d of %s is not of type function_pointer"%( - argno, method)) - - st = i.get('callable') - if st is None: - self.fail(message or "arg %d of %s is not of type function_pointer"%( - argno, method)) - - try: - iface = st['retval']['type'] - for a in st['arguments']: - iface += a['type'] - except KeyError: - self.fail(message or "arg %d of %s is a function pointer with incomplete type information"%(argno, method)) - - if iface != sel_type: - self.fail(message or "arg %d of %s is not a function_pointer with type %r, but %r"%(argno, method, sel_type, iface)) - - - st = info['arguments'][argno+offset].get('callable_retained', False) - if bool(st) != bool(retained): - self.fail(message or "arg %d of %s; retained: %r, expected: %r"%( - argno, method, st, retained)) - - def assertResultIsFunction(self, method, sel_type, message=None): - info = method.__metadata__() - - try: - i = info['retval'] - except (KeyError, IndexError): - self.fail(message or "result of %s has no metadata (or doesn't exist)"%(method,)) - - else: - type = i.get('type', b'@') - - if type != b'^?': - self.fail(message or "result of %s is not of type function_pointer"%( - method, )) - - st = i.get('callable') - if st is None: - self.fail(message or "result of %s is not of type function_pointer"%( - method, )) - - try: - iface = st['retval']['type'] - for a in st['arguments']: - iface += a['type'] - except KeyError: - self.fail(message or "result of %s is a function pointer with incomplete type information"%(method,)) - - if iface != sel_type: - self.fail(message or "result of %s is not a function_pointer with type %r, but %r"%(method, sel_type, iface)) - - - def assertArgIsBlock(self, method, argno, sel_type, message=None): - if isinstance(method, objc.selector): - offset = 2 - else: - offset = 0 - info = method.__metadata__() - try: - type = info['arguments'][argno+offset]['type'] - except (IndexError, KeyError): - self.fail("arg %d of %s does not exist"%(argno, method)) - - if type != b'@?': - self.fail(message or "arg %d of %s is not of type block: %s"%( - argno, method, type)) - - st = info['arguments'][argno+offset].get('callable') - if st is None: - self.fail(message or "arg %d of %s is not of type block: no callable"%( - argno, method)) - - try: - iface = st['retval']['type'] - if st['arguments'][0]['type'] != b'^v': - self.fail(message or "arg %d of %s has an invalid block signature %r"%(argno, method, st['arguments'][0]['type'])) - for a in st['arguments'][1:]: - iface += a['type'] - except KeyError: - self.fail(message or "result of %s is a block pointer with incomplete type information"%(method,)) - - if iface != sel_type: - self.fail(message or "arg %d of %s is not a block with type %r, but %r"%(argno, method, sel_type, iface)) - - def assertResultIsBlock(self, method, sel_type, message=None): - info = method.__metadata__() - - try: - type = info['retval']['type'] - if type != b'@?': - self.fail(message or "result of %s is not of type block: %s"%( - method, type)) - except KeyError: - self.fail(message or "result of %s is not of type block: %s"%( - method, b'v')) - - st = info['retval'].get('callable') - if st is None: - self.fail(message or "result of %s is not of type block: no callable specified"%( - method)) - - try: - iface = st['retval']['type'] - if st['arguments'][0]['type'] != b'^v': - self.fail(message or "result %s has an invalid block signature %r"%(method, st['arguments'][0]['type'])) - for a in st['arguments'][1:]: - iface += a['type'] - except KeyError: - self.fail(message or "result of %s is a block pointer with incomplete type information"%(method,)) - - if iface != sel_type: - self.fail(message or "result of %s is not a block with type %r, but %r"%(method, sel_type, iface)) - - def assertArgIsSEL(self, method, argno, sel_type, message=None): - if isinstance(method, objc.selector): - offset = 2 - else: - offset = 0 - info = method.__metadata__() - try: - i = info['arguments'][argno+offset] - except (KeyError, IndexError): - self.fail(message or "arg %d of %s has no metadata (or doesn't exist)"%(argno, method)) - - type = i.get('type', b'@') - if type != objc._C_SEL: - self.fail(message or "arg %d of %s is not of type SEL"%( - argno, method)) - - st = i.get('sel_of_type') - if st != sel_type and _typemap(st) != _typemap(sel_type): - self.fail(message or "arg %d of %s doesn't have sel_type %r but %r"%( - argno, method, sel_type, st)) - - def assertResultIsBOOL(self, method, message=None): - info = method.__metadata__() - type = info['retval']['type'] - if type != objc._C_NSBOOL: - self.fail(message or "result of %s is not of type BOOL, but %r"%( - method, type)) - - def assertArgIsBOOL(self, method, argno, message=None): - if isinstance(method, objc.selector): - offset = 2 - else: - offset = 0 - info = method.__metadata__() - type = info['arguments'][argno+offset]['type'] - if type != objc._C_NSBOOL: - self.fail(message or "arg %d of %s is not of type BOOL, but %r"%( - argno, method, type)) - - def assertArgIsFixedSize(self, method, argno, count, message=None): - if isinstance(method, objc.selector): - offset = 2 - else: - offset = 0 - info = method.__metadata__() - try: - cnt = info['arguments'][argno+offset]['c_array_of_fixed_length'] - if cnt != count: - self.fail(message or "arg %d of %s is not a C-array of length %d"%( - argno, method, count)) - except (KeyError, IndexError): - self.fail(message or "arg %d of %s is not a C-array of length %d"%( - argno, method, count)) - - def assertResultIsFixedSize(self, method, count, message=None): - info = method.__metadata__() - try: - cnt = info['retval']['c_array_of_fixed_length'] - if cnt != count: - self.fail(message or "result of %s is not a C-array of length %d"%( - method, count)) - except (KeyError, IndexError): - self.fail(message or "result of %s is not a C-array of length %d"%( - method, count)) - - def assertArgSizeInArg(self, method, argno, count, message=None): - if isinstance(method, objc.selector): - offset = 2 - else: - offset = 0 - info = method.__metadata__() - try: - cnt = info['arguments'][argno+offset]['c_array_length_in_arg'] - except (KeyError, IndexError): - self.fail(message or "arg %d of %s is not a C-array of with length in arg %s"%( - argno, method, count)) - - if isinstance(count, (list, tuple)): - count2 = tuple(x + offset for x in count) - else: - count2 = count + offset - if cnt != count2: - self.fail(message or "arg %d of %s is not a C-array of with length in arg %s"%( - argno, method, count)) - - def assertResultSizeInArg(self, method, count, message=None): - if isinstance(method, objc.selector): - offset = 2 - else: - offset = 0 - info = method.__metadata__() - cnt = info['retval']['c_array_length_in_arg'] - if cnt != count + offset: - self.fail(message or "result %s is not a C-array of with length in arg %d"%( - method, count)) - - - def assertArgIsOut(self, method, argno, message=None): - if isinstance(method, objc.selector): - offset = 2 - else: - offset = 0 - info = method.__metadata__() - type = info['arguments'][argno+offset]['type'] - if not type.startswith(b'o^') and not type.startswith(b'o*'): - self.fail(message or "arg %d of %s is not an 'out' argument"%( - argno, method)) - - def assertArgIsInOut(self, method, argno, message=None): - if isinstance(method, objc.selector): - offset = 2 - else: - offset = 0 - info = method.__metadata__() - type = info['arguments'][argno+offset]['type'] - if not type.startswith(b'N^') and not type.startswith(b'N*'): - self.fail(message or "arg %d of %s is not an 'inout' argument"%( - argno, method)) - - def assertArgIsIn(self, method, argno, message=None): - if isinstance(method, objc.selector): - offset = 2 - else: - offset = 0 - info = method.__metadata__() - type = info['arguments'][argno+offset]['type'] - if not type.startswith(b'n^') and not type.startswith(b'n*'): - self.fail(message or "arg %d of %s is not an 'in' argument"%( - argno, method)) - - - # - # Addition assert methods, all of them should only be necessary for - # python 2.7 or later - # - - if not hasattr(_unittest.TestCase, 'assertItemsEqual'): # pragma: no cover - def assertItemsEqual(self, seq1, seq2, message=None): - # This is based on unittest.util._count_diff_all_purpose from - # Python 2.7 - s, t = list(seq1), list(seq2) - m, n = len(s), len(t) - NULL = object() - result = [] - for i, elem in enumerate(s): - if elem is NULL: - continue - - cnt_s = cnt_t = 0 - for j in range(i, m): - if s[j] == elem: - cnt_s += 1 - s[j] = NULL - - for j, other_elem in enumerate(t): - if other_elem == elem: - cnt_t += 1 - t[j] = NULL - - if cnt_s != cnt_t: - result.append((cnt_s, cnt_t, elem)) - for i, elem in enumerate(t): - if elem is NULL: - continue - cnt_t = 0 - for j in range(i, n): - if t[j] == elem: - cnt_t += 1 - t[j] = NULL - - result.append((0, cnt_t, elem)) - - if result: - for actual, expected, value in result: - print("Seq1 %d, Seq2: %d value: %r"%(actual, expected, value)) - - self.fail(message or ("sequences do not contain the same items:" + - "\n".join(["Seq1 %d, Seq2: %d value: %r"%(item) for item in result]))) - - - - if not hasattr(_unittest.TestCase, 'assertStartswith'): - def assertStartswith(self, value, test, message = None): # pragma: no cover - if not value.startswith(test): - self.fail(message or "%r does not start with %r"%(value, test)) - - if not hasattr(_unittest.TestCase, 'assertIs'): # pragma: no cover - def assertIs(self, value, test, message = None): - if value is not test: - self.fail(message or "%r (id=%r) is not %r (id=%r) "%(value, id(value), test, id(test))) - - if not hasattr(_unittest.TestCase, 'assertIsNot'): # pragma: no cover - def assertIsNot(self, value, test, message = None): - if value is test: - self.fail(message or "%r is %r"%(value, test)) - - if not hasattr(_unittest.TestCase, 'assertIsNone'): # pragma: no cover - def assertIsNone(self, value, message = None): - self.assertIs(value, None) - - if not hasattr(_unittest.TestCase, 'assertIsNotNone'): # pragma: no cover - def assertIsNotNone(self, value, message = None): - if value is None: - sel.fail(message, "%r is not %r"%(value, test)) - - if not hasattr(_unittest.TestCase, 'assertStartsWith'): # pragma: no cover - def assertStartswith(self, value, check, message=None): - if not value.startswith(check): - self.fail(message or "not %r.startswith(%r)"%(value, check)) - - if not hasattr(_unittest.TestCase, 'assertHasAttr'): # pragma: no cover - def assertHasAttr(self, value, key, message=None): - if not hasattr(value, key): - self.fail(message or "%s is not an attribute of %r"%(key, value)) - - if not hasattr(_unittest.TestCase, 'assertNotHasAttr'): # pragma: no cover - def assertNotHasAttr(self, value, key, message=None): - if hasattr(value, key): - self.fail(message or "%s is an attribute of %r"%(key, value)) - - def assertIsSubclass(self, value, types, message=None): - if not issubclass(value, types): - self.fail(message or "%s is not a subclass of %r"%(value, types)) - - def assertIsNotSubclass(self, value, types, message=None): - if issubclass(value, types): - self.fail(message or "%s is a subclass of %r"%(value, types)) - - if not hasattr(_unittest.TestCase, 'assertIsInstance'): # pragma: no cover - def assertIsInstance(self, value, types, message=None): - if not isinstance(value, types): - self.fail(message or "%s is not an instance of %r but %s"%(value, types, type(value))) - - if not hasattr(_unittest.TestCase, 'assertIsNotInstance'): # pragma: no cover - def assertIsNotInstance(self, value, types, message=None): - if isinstance(value, types): - self.fail(message or "%s is an instance of %r"%(value, types)) - - if not hasattr(_unittest.TestCase, 'assertIn'): # pragma: no cover - def assertIn(self, value, seq, message=None): - if value not in seq: - self.fail(message or "%r is not in %r"%(value, seq)) - - if not hasattr(_unittest.TestCase, 'assertNotIn'): # pragma: no cover - def assertNotIn(self, value, seq, message=None): - if value in seq: - self.fail(message or "%r is in %r"%(value, seq)) - - - if not hasattr(_unittest.TestCase, 'assertGreaterThan'): # pragma: no cover - def assertGreaterThan(self, val, test, message=None): - if not (val > test): - self.fail(message or '%r <= %r'%(val, test)) - - if not hasattr(_unittest.TestCase, 'assertGreaterEqual'): # pragma: no cover - def assertGreaterEqual(self, val, test, message=None): - if not (val >= test): - self.fail(message or '%r < %r'%(val, test)) - - if not hasattr(_unittest.TestCase, 'assertLessThan'): # pragma: no cover - def assertLessThan(self, val, test, message=None): - if not (val < test): - self.fail(message or '%r >= %r'%(val, test)) - - if not hasattr(_unittest.TestCase, 'assertLessEqual'): # pragma: no cover - def assertLessEqual(self, val, test, message=None): - if not (val <= test): - self.fail(message or '%r > %r'%(val, test)) - - if not hasattr(_unittest.TestCase, "assertAlmostEquals"): # pragma: no cover - def assertAlmostEquals(self, val1, val2, message=None): - self.failUnless(abs (val1 - val2) < 0.00001, - message or 'abs(%r - %r) >= 0.00001'%(val1, val2)) - - - def run(self, *args): - """ - Run the test, same as unittest.TestCase.run, but every test is - run with a fresh autorelease pool. - """ - if _usepool: - p = _poolclass.alloc().init() - else: - p = 1 - - try: - _unittest.TestCase.run(self, *args) - finally: - _gc.collect() - del p - _gc.collect() - - -main = _unittest.main - -if hasattr(_unittest, 'expectedFailure'): - expectedFailure = _unittest.expectedFailure - -else: # pragma: no cover (py2.6) - - def expectedFailure(func): - def test(self): - try: - func(self) - - except AssertionError: - return - - self.fail("test unexpectedly passed") - test.__name__ == func.__name__ - - return test - -def expectedFailureIf(condition): - if condition: - return expectedFailure - else: - return lambda func: func diff --git a/env/lib/python2.7/site-packages/PyObjCTools/TestSupport.pyc b/env/lib/python2.7/site-packages/PyObjCTools/TestSupport.pyc deleted file mode 100644 index 1c34f9c5..00000000 Binary files a/env/lib/python2.7/site-packages/PyObjCTools/TestSupport.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/QTKit/_QTKit.so b/env/lib/python2.7/site-packages/QTKit/_QTKit.so deleted file mode 100755 index c77d0aca..00000000 Binary files a/env/lib/python2.7/site-packages/QTKit/_QTKit.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/QTKit/__init__.py b/env/lib/python2.7/site-packages/QTKit/__init__.py deleted file mode 100644 index 59562a37..00000000 --- a/env/lib/python2.7/site-packages/QTKit/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -''' -Python mapping for the QTKit framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import sys -import objc -import Cocoa -import Quartz - -from QTKit import _metadata, _QTKit - -sys.modules['QTKit'] = mod = objc.ObjCLazyModule('QTKit', - "com.apple.QTKit", - objc.pathForFramework("/System/Library/Frameworks/QTKit.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'objc': objc, - }, ( Cocoa, Quartz,)) - -import sys -del sys.modules['QTKit._metadata'] -del sys.modules['QTKit._QTKit'] diff --git a/env/lib/python2.7/site-packages/QTKit/__init__.pyc b/env/lib/python2.7/site-packages/QTKit/__init__.pyc deleted file mode 100644 index 87707900..00000000 Binary files a/env/lib/python2.7/site-packages/QTKit/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/QTKit/_metadata.py b/env/lib/python2.7/site-packages/QTKit/_metadata.py deleted file mode 100644 index d637f801..00000000 --- a/env/lib/python2.7/site-packages/QTKit/_metadata.py +++ /dev/null @@ -1,206 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Mon Aug 10 11:23:16 2015 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -misc.update({'QTTimeRange': objc.createStructType('QTTimeRange', sel32or64(b'{_QTTimeRange={_QTTime=qll}{_QTTime=qll}}', b'{_QTTimeRange={_QTTime=qqq}{_QTTime=qqq}}'), ['time', 'duration']), 'QTTime': objc.createStructType('QTTime', sel32or64(b'{_QTTime=qll}', b'{_QTTime=qqq}'), ['timeValue', 'timeScale', 'flags'])}) -constants = '''$QTAddImageCodecQuality$QTAddImageCodecType$QTCaptureConnectionAttributeDidChangeNotification$QTCaptureConnectionAttributeWillChangeNotification$QTCaptureConnectionAudioAveragePowerLevelsAttribute$QTCaptureConnectionAudioMasterVolumeAttribute$QTCaptureConnectionAudioPeakHoldLevelsAttribute$QTCaptureConnectionAudioVolumesAttribute$QTCaptureConnectionChangedAttributeKey$QTCaptureConnectionEnabledAudioChannelsAttribute$QTCaptureConnectionFormatDescriptionDidChangeNotification$QTCaptureConnectionFormatDescriptionWillChangeNotification$QTCaptureDeviceAVCTransportControlsAttribute$QTCaptureDeviceAVCTransportControlsPlaybackModeKey$QTCaptureDeviceAVCTransportControlsSpeedKey$QTCaptureDeviceAttributeDidChangeNotification$QTCaptureDeviceAttributeWillChangeNotification$QTCaptureDeviceAvailableInputSourcesAttribute$QTCaptureDeviceChangedAttributeKey$QTCaptureDeviceFormatDescriptionsDidChangeNotification$QTCaptureDeviceFormatDescriptionsWillChangeNotification$QTCaptureDeviceInputSourceIdentifierAttribute$QTCaptureDeviceInputSourceIdentifierKey$QTCaptureDeviceInputSourceLocalizedDisplayNameKey$QTCaptureDeviceLegacySequenceGrabberAttribute$QTCaptureDeviceLinkedDevicesAttribute$QTCaptureDeviceSuspendedAttribute$QTCaptureDeviceWasConnectedNotification$QTCaptureDeviceWasDisconnectedNotification$QTCaptureSessionErrorKey$QTCaptureSessionRuntimeErrorNotification$QTDataReferenceTypeFile$QTDataReferenceTypeHandle$QTDataReferenceTypePointer$QTDataReferenceTypeResource$QTDataReferenceTypeURL$QTDisallowedForInitializationPurposeException$QTErrorCaptureInputKey$QTErrorCaptureOutputKey$QTErrorDeviceKey$QTErrorExcludingDeviceKey$QTErrorFileSizeKey$QTErrorRecordingSuccesfullyFinishedKey$QTErrorRecordingSuccessfullyFinishedKey$QTErrorTimeKey$QTExportOptionsAppleM4A$QTExportOptionsAppleM4V480pSD$QTExportOptionsAppleM4V720pHD$QTExportOptionsAppleM4VAppleTV$QTExportOptionsAppleM4VCellular$QTExportOptionsAppleM4VWiFi$QTExportOptionsAppleM4ViPod$QTExportOptionsQuickTimeMovie1080p$QTExportOptionsQuickTimeMovie480p$QTExportOptionsQuickTimeMovie720p$QTFormatDescriptionAudioChannelLayoutAttribute$QTFormatDescriptionAudioMagicCookieAttribute$QTFormatDescriptionAudioStreamBasicDescriptionAttribute$QTFormatDescriptionVideoCleanApertureDisplaySizeAttribute$QTFormatDescriptionVideoEncodedPixelsSizeAttribute$QTFormatDescriptionVideoProductionApertureDisplaySizeAttribute$QTKitErrorDomain$QTMediaCharacteristicAudio$QTMediaCharacteristicCanSendVideo$QTMediaCharacteristicCanStep$QTMediaCharacteristicHasNoDuration$QTMediaCharacteristicHasSkinData$QTMediaCharacteristicHasVideoFrameRate$QTMediaCharacteristicNonLinear$QTMediaCharacteristicProvidesActions$QTMediaCharacteristicProvidesKeyFocus$QTMediaCharacteristicVisual$QTMediaCreationTimeAttribute$QTMediaDurationAttribute$QTMediaModificationTimeAttribute$QTMediaQualityAttribute$QTMediaSampleCountAttribute$QTMediaTimeScaleAttribute$QTMediaType3D$QTMediaTypeAttribute$QTMediaTypeBase$QTMediaTypeClosedCaption$QTMediaTypeFlash$QTMediaTypeHint$QTMediaTypeMPEG$QTMediaTypeMovie$QTMediaTypeMusic$QTMediaTypeMuxed$QTMediaTypeQTVR$QTMediaTypeQuartzComposer$QTMediaTypeSkin$QTMediaTypeSound$QTMediaTypeSprite$QTMediaTypeStream$QTMediaTypeSubtitle$QTMediaTypeText$QTMediaTypeTimeCode$QTMediaTypeTween$QTMediaTypeVideo$QTMetadata3GPUserDataKeyAuthor$QTMetadata3GPUserDataKeyCopyright$QTMetadata3GPUserDataKeyDescription$QTMetadata3GPUserDataKeyGenre$QTMetadata3GPUserDataKeyLocation$QTMetadata3GPUserDataKeyPerformer$QTMetadata3GPUserDataKeyRecordingYear$QTMetadata3GPUserDataKeyTitle$QTMetadataCommonKeyAlbumName$QTMetadataCommonKeyArtist$QTMetadataCommonKeyArtwork$QTMetadataCommonKeyAuthor$QTMetadataCommonKeyComment$QTMetadataCommonKeyContributor$QTMetadataCommonKeyCopyrights$QTMetadataCommonKeyCreationDate$QTMetadataCommonKeyCreator$QTMetadataCommonKeyDescription$QTMetadataCommonKeyFormat$QTMetadataCommonKeyIdentifier$QTMetadataCommonKeyLanguage$QTMetadataCommonKeyLastModifiedDate$QTMetadataCommonKeyLocation$QTMetadataCommonKeyMake$QTMetadataCommonKeyModel$QTMetadataCommonKeyPublisher$QTMetadataCommonKeyRelation$QTMetadataCommonKeySoftware$QTMetadataCommonKeySource$QTMetadataCommonKeySubject$QTMetadataCommonKeyTitle$QTMetadataCommonKeyType$QTMetadataFormatID3Metadata$QTMetadataFormatQuickTimeMetadata$QTMetadataFormatQuickTimeUserData$QTMetadataFormatiTunesMetadata$QTMetadataID3MetadataKeyAlbumSortOrder$QTMetadataID3MetadataKeyAlbumTitle$QTMetadataID3MetadataKeyAttachedPicture$QTMetadataID3MetadataKeyAudioEncryption$QTMetadataID3MetadataKeyAudioSeekPointIndex$QTMetadataID3MetadataKeyBand$QTMetadataID3MetadataKeyBeatsPerMinute$QTMetadataID3MetadataKeyComments$QTMetadataID3MetadataKeyCommercialInformation$QTMetadataID3MetadataKeyCommerical$QTMetadataID3MetadataKeyComposer$QTMetadataID3MetadataKeyConductor$QTMetadataID3MetadataKeyContentGroupDescription$QTMetadataID3MetadataKeyContentType$QTMetadataID3MetadataKeyCopyright$QTMetadataID3MetadataKeyCopyrightInformation$QTMetadataID3MetadataKeyDate$QTMetadataID3MetadataKeyEncodedBy$QTMetadataID3MetadataKeyEncodedWith$QTMetadataID3MetadataKeyEncodingTime$QTMetadataID3MetadataKeyEncryption$QTMetadataID3MetadataKeyEqualization$QTMetadataID3MetadataKeyEqualization2$QTMetadataID3MetadataKeyEventTimingCodes$QTMetadataID3MetadataKeyFileOwner$QTMetadataID3MetadataKeyFileType$QTMetadataID3MetadataKeyGeneralEncapsulatedObject$QTMetadataID3MetadataKeyGroupIdentifier$QTMetadataID3MetadataKeyInitialKey$QTMetadataID3MetadataKeyInternationalStandardRecordingCode$QTMetadataID3MetadataKeyInternetRadioStationName$QTMetadataID3MetadataKeyInternetRadioStationOwner$QTMetadataID3MetadataKeyInvolvedPeopleList_v23$QTMetadataID3MetadataKeyInvolvedPeopleList_v24$QTMetadataID3MetadataKeyLanguage$QTMetadataID3MetadataKeyLeadPerformer$QTMetadataID3MetadataKeyLength$QTMetadataID3MetadataKeyLink$QTMetadataID3MetadataKeyLyricist$QTMetadataID3MetadataKeyMPEGLocationLookupTable$QTMetadataID3MetadataKeyMediaType$QTMetadataID3MetadataKeyModifiedBy$QTMetadataID3MetadataKeyMood$QTMetadataID3MetadataKeyMusicCDIdentifier$QTMetadataID3MetadataKeyMusicianCreditsList$QTMetadataID3MetadataKeyOfficialArtistWebpage$QTMetadataID3MetadataKeyOfficialAudioFileWebpage$QTMetadataID3MetadataKeyOfficialAudioSourceWebpage$QTMetadataID3MetadataKeyOfficialInternetRadioStationHomepage$QTMetadataID3MetadataKeyOfficialPublisherWebpage$QTMetadataID3MetadataKeyOriginalAlbumTitle$QTMetadataID3MetadataKeyOriginalArtist$QTMetadataID3MetadataKeyOriginalFilename$QTMetadataID3MetadataKeyOriginalLyricist$QTMetadataID3MetadataKeyOriginalReleaseTime$QTMetadataID3MetadataKeyOriginalReleaseYear$QTMetadataID3MetadataKeyOwnership$QTMetadataID3MetadataKeyPartOfASet$QTMetadataID3MetadataKeyPayment$QTMetadataID3MetadataKeyPerformerSortOrder$QTMetadataID3MetadataKeyPlayCounter$QTMetadataID3MetadataKeyPlaylistDelay$QTMetadataID3MetadataKeyPopularimeter$QTMetadataID3MetadataKeyPositionSynchronization$QTMetadataID3MetadataKeyPrivate$QTMetadataID3MetadataKeyProducedNotice$QTMetadataID3MetadataKeyPublisher$QTMetadataID3MetadataKeyRecommendedBufferSize$QTMetadataID3MetadataKeyRecordingDates$QTMetadataID3MetadataKeyRecordingTime$QTMetadataID3MetadataKeyRelativeVolumeAdjustment$QTMetadataID3MetadataKeyRelativeVolumeAdjustment2$QTMetadataID3MetadataKeyReleaseTime$QTMetadataID3MetadataKeyReverb$QTMetadataID3MetadataKeySeek$QTMetadataID3MetadataKeySetSubtitle$QTMetadataID3MetadataKeySignature$QTMetadataID3MetadataKeySize$QTMetadataID3MetadataKeySubTitle$QTMetadataID3MetadataKeySynchronizedLyric$QTMetadataID3MetadataKeySynchronizedTempoCodes$QTMetadataID3MetadataKeyTaggingTime$QTMetadataID3MetadataKeyTermsOfUse$QTMetadataID3MetadataKeyTime$QTMetadataID3MetadataKeyTitleDescription$QTMetadataID3MetadataKeyTitleSortOrder$QTMetadataID3MetadataKeyTrackNumber$QTMetadataID3MetadataKeyUniqueFileIdentifier$QTMetadataID3MetadataKeyUnsynchronizedLyric$QTMetadataID3MetadataKeyUserText$QTMetadataID3MetadataKeyUserURL$QTMetadataID3MetadataKeyYear$QTMetadataISOUserDataKeyCopyright$QTMetadataKeySpaceCommon$QTMetadataKeySpaceID3$QTMetadataKeySpaceQuickTimeMetadata$QTMetadataKeySpaceQuickTimeUserData$QTMetadataKeySpaceiTunes$QTMetadataQuickTimeMetadataKeyAlbum$QTMetadataQuickTimeMetadataKeyArranger$QTMetadataQuickTimeMetadataKeyArtist$QTMetadataQuickTimeMetadataKeyArtwork$QTMetadataQuickTimeMetadataKeyAuthor$QTMetadataQuickTimeMetadataKeyComment$QTMetadataQuickTimeMetadataKeyComposer$QTMetadataQuickTimeMetadataKeyCopyright$QTMetadataQuickTimeMetadataKeyCreationDate$QTMetadataQuickTimeMetadataKeyCredits$QTMetadataQuickTimeMetadataKeyDescription$QTMetadataQuickTimeMetadataKeyDirector$QTMetadataQuickTimeMetadataKeyDisplayName$QTMetadataQuickTimeMetadataKeyEncodedBy$QTMetadataQuickTimeMetadataKeyGenre$QTMetadataQuickTimeMetadataKeyInformation$QTMetadataQuickTimeMetadataKeyKeywords$QTMetadataQuickTimeMetadataKeyLocationISO6709$QTMetadataQuickTimeMetadataKeyMake$QTMetadataQuickTimeMetadataKeyModel$QTMetadataQuickTimeMetadataKeyOriginalArtist$QTMetadataQuickTimeMetadataKeyPerformer$QTMetadataQuickTimeMetadataKeyPhonogramRights$QTMetadataQuickTimeMetadataKeyProducer$QTMetadataQuickTimeMetadataKeyPublisher$QTMetadataQuickTimeMetadataKeySoftware$QTMetadataQuickTimeMetadataKeyYear$QTMetadataQuickTimeMetadataKeyiXML$QTMetadataQuickTimeUserDataKeyAlbum$QTMetadataQuickTimeUserDataKeyArranger$QTMetadataQuickTimeUserDataKeyArtist$QTMetadataQuickTimeUserDataKeyAuthor$QTMetadataQuickTimeUserDataKeyChapter$QTMetadataQuickTimeUserDataKeyComment$QTMetadataQuickTimeUserDataKeyComposer$QTMetadataQuickTimeUserDataKeyCopyright$QTMetadataQuickTimeUserDataKeyCreationDate$QTMetadataQuickTimeUserDataKeyCredits$QTMetadataQuickTimeUserDataKeyDescription$QTMetadataQuickTimeUserDataKeyDirector$QTMetadataQuickTimeUserDataKeyDisclaimer$QTMetadataQuickTimeUserDataKeyEncodedBy$QTMetadataQuickTimeUserDataKeyFullName$QTMetadataQuickTimeUserDataKeyGenre$QTMetadataQuickTimeUserDataKeyHostComputer$QTMetadataQuickTimeUserDataKeyInformation$QTMetadataQuickTimeUserDataKeyKeywords$QTMetadataQuickTimeUserDataKeyLocationISO6709$QTMetadataQuickTimeUserDataKeyMake$QTMetadataQuickTimeUserDataKeyModel$QTMetadataQuickTimeUserDataKeyOriginalArtist$QTMetadataQuickTimeUserDataKeyOriginalFormat$QTMetadataQuickTimeUserDataKeyOriginalSource$QTMetadataQuickTimeUserDataKeyPerformers$QTMetadataQuickTimeUserDataKeyPhonogramRights$QTMetadataQuickTimeUserDataKeyProducer$QTMetadataQuickTimeUserDataKeyProduct$QTMetadataQuickTimeUserDataKeyPublisher$QTMetadataQuickTimeUserDataKeySoftware$QTMetadataQuickTimeUserDataKeySpecialPlaybackRequirements$QTMetadataQuickTimeUserDataKeyTrack$QTMetadataQuickTimeUserDataKeyTrackName$QTMetadataQuickTimeUserDataKeyURLLink$QTMetadataQuickTimeUserDataKeyWarning$QTMetadataQuickTimeUserDataKeyWriter$QTMetadataiTunesMetadataKeyAccountKind$QTMetadataiTunesMetadataKeyAcknowledgement$QTMetadataiTunesMetadataKeyAlbum$QTMetadataiTunesMetadataKeyAlbumArtist$QTMetadataiTunesMetadataKeyAppleID$QTMetadataiTunesMetadataKeyArranger$QTMetadataiTunesMetadataKeyArtDirector$QTMetadataiTunesMetadataKeyArtist$QTMetadataiTunesMetadataKeyArtistID$QTMetadataiTunesMetadataKeyAuthor$QTMetadataiTunesMetadataKeyBeatsPerMin$QTMetadataiTunesMetadataKeyComposer$QTMetadataiTunesMetadataKeyConductor$QTMetadataiTunesMetadataKeyContentRating$QTMetadataiTunesMetadataKeyCopyright$QTMetadataiTunesMetadataKeyCoverArt$QTMetadataiTunesMetadataKeyCredits$QTMetadataiTunesMetadataKeyDescription$QTMetadataiTunesMetadataKeyDirector$QTMetadataiTunesMetadataKeyDiscCompilation$QTMetadataiTunesMetadataKeyDiscNumber$QTMetadataiTunesMetadataKeyEQ$QTMetadataiTunesMetadataKeyEncodedBy$QTMetadataiTunesMetadataKeyEncodingTool$QTMetadataiTunesMetadataKeyExecProducer$QTMetadataiTunesMetadataKeyGenreID$QTMetadataiTunesMetadataKeyGrouping$QTMetadataiTunesMetadataKeyLinerNotes$QTMetadataiTunesMetadataKeyLyrics$QTMetadataiTunesMetadataKeyOnlineExtras$QTMetadataiTunesMetadataKeyOriginalArtist$QTMetadataiTunesMetadataKeyPerformer$QTMetadataiTunesMetadataKeyPhonogramRights$QTMetadataiTunesMetadataKeyPlaylistID$QTMetadataiTunesMetadataKeyPredefinedGenre$QTMetadataiTunesMetadataKeyProducer$QTMetadataiTunesMetadataKeyPublisher$QTMetadataiTunesMetadataKeyRecordCompany$QTMetadataiTunesMetadataKeyReleaseDate$QTMetadataiTunesMetadataKeySoloist$QTMetadataiTunesMetadataKeySongID$QTMetadataiTunesMetadataKeySongName$QTMetadataiTunesMetadataKeySoundEngineer$QTMetadataiTunesMetadataKeyThanks$QTMetadataiTunesMetadataKeyTrackNumber$QTMetadataiTunesMetadataKeyTrackSubTitle$QTMetadataiTunesMetadataKeyUserComment$QTMetadataiTunesMetadataKeyUserGenre$QTMovieActiveSegmentAttribute$QTMovieApertureModeAttribute$QTMovieApertureModeClassic$QTMovieApertureModeClean$QTMovieApertureModeDidChangeNotification$QTMovieApertureModeEncodedPixels$QTMovieApertureModeProduction$QTMovieAskUnresolvedDataRefsAttribute$QTMovieAutoAlternatesAttribute$QTMovieChapterDidChangeNotification$QTMovieChapterListDidChangeNotification$QTMovieChapterName$QTMovieChapterStartTime$QTMovieChapterTargetTrackAttribute$QTMovieCloseWindowRequestNotification$QTMovieCopyrightAttribute$QTMovieCreationTimeAttribute$QTMovieCurrentSizeAttribute$QTMovieCurrentTimeAttribute$QTMovieDataAttribute$QTMovieDataReferenceAttribute$QTMovieDataSizeAttribute$QTMovieDelegateAttribute$QTMovieDidEndNotification$QTMovieDisplayNameAttribute$QTMovieDontInteractWithUserAttribute$QTMovieDurationAttribute$QTMovieEditabilityDidChangeNotification$QTMovieEditableAttribute$QTMovieEditedNotification$QTMovieEnterFullScreenRequestNotification$QTMovieExitFullScreenRequestNotification$QTMovieExport$QTMovieExportManufacturer$QTMovieExportSettings$QTMovieExportType$QTMovieFileNameAttribute$QTMovieFileOffsetAttribute$QTMovieFlatten$QTMovieFrameImageDeinterlaceFields$QTMovieFrameImageHighQuality$QTMovieFrameImageOpenGLContext$QTMovieFrameImagePixelFormat$QTMovieFrameImageRepresentationsType$QTMovieFrameImageSessionMode$QTMovieFrameImageSingleField$QTMovieFrameImageSize$QTMovieFrameImageType$QTMovieFrameImageTypeCGImageRef$QTMovieFrameImageTypeCIImage$QTMovieFrameImageTypeCVOpenGLTextureRef$QTMovieFrameImageTypeCVPixelBufferRef$QTMovieFrameImageTypeNSImage$QTMovieHasApertureModeDimensionsAttribute$QTMovieHasAudioAttribute$QTMovieHasDurationAttribute$QTMovieHasVideoAttribute$QTMovieIsActiveAttribute$QTMovieIsInteractiveAttribute$QTMovieIsLinearAttribute$QTMovieIsSteppableAttribute$QTMovieLoadStateAttribute$QTMovieLoadStateDidChangeNotification$QTMovieLoadStateErrorAttribute$QTMovieLoopModeDidChangeNotification$QTMovieLoopsAttribute$QTMovieLoopsBackAndForthAttribute$QTMovieMessageNotificationParameter$QTMovieMessageStringPostedNotification$QTMovieModernizerOutputFormat_AppleProRes422$QTMovieModernizerOutputFormat_AppleProRes4444$QTMovieModernizerOutputFormat_H264$QTMovieModificationTimeAttribute$QTMovieMutedAttribute$QTMovieNaturalSizeAttribute$QTMovieNaturalSizeDidChangeNotification$QTMovieOpenAsyncOKAttribute$QTMovieOpenAsyncRequiredAttribute$QTMovieOpenForPlaybackAttribute$QTMoviePasteboardAttribute$QTMoviePasteboardType$QTMoviePlaysAllFramesAttribute$QTMoviePlaysSelectionOnlyAttribute$QTMoviePosterTimeAttribute$QTMoviePreferredMutedAttribute$QTMoviePreferredRateAttribute$QTMoviePreferredVolumeAttribute$QTMoviePreviewModeAttribute$QTMoviePreviewRangeAttribute$QTMovieRateAttribute$QTMovieRateChangesPreservePitchAttribute$QTMovieRateDidChangeNotification$QTMovieRateDidChangeNotificationParameter$QTMovieResolveDataRefsAttribute$QTMovieSelectionAttribute$QTMovieSelectionDidChangeNotification$QTMovieSizeDidChangeNotification$QTMovieStatusCodeNotificationParameter$QTMovieStatusFlagsNotificationParameter$QTMovieStatusStringNotificationParameter$QTMovieStatusStringPostedNotification$QTMovieTargetIDNotificationParameter$QTMovieTargetNameNotificationParameter$QTMovieTimeDidChangeNotification$QTMovieTimeScaleAttribute$QTMovieURLAttribute$QTMovieUneditableException$QTMovieViewControllerVisibleBinding$QTMovieViewFillColorBinding$QTMovieViewMovieBinding$QTMovieViewPreservesAspectRatioBinding$QTMovieVolumeAttribute$QTMovieVolumeDidChangeNotification$QTSampleBufferDateRecordedAttribute$QTSampleBufferExplicitSceneChange$QTSampleBufferHostTimeAttribute$QTSampleBufferSMPTETimeAttribute$QTSampleBufferSceneChangeTypeAttribute$QTSampleBufferTimeStampDiscontinuitySceneChange$QTTrackBoundsAttribute$QTTrackCreationTimeAttribute$QTTrackDimensionsAttribute$QTTrackDisplayNameAttribute$QTTrackEnabledAttribute$QTTrackFormatSummaryAttribute$QTTrackHasApertureModeDimensionsAttribute$QTTrackIDAttribute$QTTrackIsChapterTrackAttribute$QTTrackLayerAttribute$QTTrackMediaTypeAttribute$QTTrackModificationTimeAttribute$QTTrackRangeAttribute$QTTrackTimeScaleAttribute$QTTrackUsageInMovieAttribute$QTTrackUsageInPosterAttribute$QTTrackUsageInPreviewAttribute$QTTrackVolumeAttribute$''' -constants = constants + '$QTIndefiniteTime@%s$'%(sel32or64('{_QTTime=qll}', '{_QTTime=qqq}'),) -constants = constants + '$QTZeroTime@%s$'%(sel32or64('{_QTTime=qll}', '{_QTTime=qqq}'),) -enums = '''$AliasDataHandlerSubType@'alis'$AudioMediaCharacteristic@'ears'$BaseMediaType@'gnrc'$DataHandlerType@'dhlr'$FlashMediaType@'flsh'$FreeAtomType@'free'$HandleDataHandlerSubType@'hndl'$MAC_OS_X_VERSION_10_4@1040$MAC_OS_X_VERSION_10_5@1050$MAC_OS_X_VERSION_10_6@1060$MAC_OS_X_VERSION_10_7@1070$MPEGMediaType@'MPEG'$MediaHandlerType@'mhlr'$MovieBackwardPointerResourceType@'back'$MovieDataAtomType@'mdat'$MovieFileType@'MooV'$MovieForwardPointerResourceType@'fore'$MovieMediaType@'moov'$MovieResourceAtomType@'moov'$MovieResourceType@'moov'$MovieScrapType@'moov'$MusicMediaType@'musi'$NSINTEGER_DEFINED@1$NullDataHandlerSubType@'null'$PointerDataHandlerSubType@'ptr '$QTCaptureDeviceAVCTransportControlsFastForwardSpeed@13000$QTCaptureDeviceAVCTransportControlsFastReverseSpeed@-13000$QTCaptureDeviceAVCTransportControlsFastestForwardSpeed@19000$QTCaptureDeviceAVCTransportControlsFastestReverseSpeed@-19000$QTCaptureDeviceAVCTransportControlsNormalForwardSpeed@10000$QTCaptureDeviceAVCTransportControlsNormalReverseSpeed@-10000$QTCaptureDeviceAVCTransportControlsNotPlayingMode@0$QTCaptureDeviceAVCTransportControlsPlayingMode@1$QTCaptureDeviceAVCTransportControlsSlowForwardSpeed@7000$QTCaptureDeviceAVCTransportControlsSlowReverseSpeed@-7000$QTCaptureDeviceAVCTransportControlsSlowestForwardSpeed@1000$QTCaptureDeviceAVCTransportControlsSlowestReverseSpeed@-1000$QTCaptureDeviceAVCTransportControlsStoppedSpeed@0$QTCaptureDeviceAVCTransportControlsVeryFastForwardSpeed@16000$QTCaptureDeviceAVCTransportControlsVeryFastReverseSpeed@-16000$QTCaptureDeviceAVCTransportControlsVerySlowForwardSpeed@4000$QTCaptureDeviceAVCTransportControlsVerySlowReverseSpeed@-4000$QTCaptureFileOutputBufferDestinationNewFile@1$QTCaptureFileOutputBufferDestinationOldFile@2$QTErrorDeviceAlreadyUsedbyAnotherSession@1101$QTErrorDeviceExcludedByAnotherDevice@1302$QTErrorDeviceInUseByAnotherApplication@1301$QTErrorDeviceNotConnected@1300$QTErrorDeviceWasDisconnected@1203$QTErrorDiskFull@1202$QTErrorExportExecutionFailed@1503$QTErrorExportIOError@1506$QTErrorExportInsufficientSpaceOnDevice@1504$QTErrorExportNoSuchDirectoryOrFile@1505$QTErrorIncompatibleInput@1002$QTErrorIncompatibleOutput@1003$QTErrorInvalidDestinationFileTypeForExport@1501$QTErrorInvalidInputsOrOutputs@1100$QTErrorInvalidSourceFileTypeForExport@1502$QTErrorMaximumDurationReached@1205$QTErrorMaximumFileSizeReached@1206$QTErrorMaximumNumberOfSamplesForFileFormatReached@1208$QTErrorMediaChanged@1204$QTErrorMediaDiscontinuity@1207$QTErrorNoDataCaptured@1200$QTErrorSessionConfigurationChanged@1201$QTErrorUnknown@-1$QTIncludeAggressiveTypes@4$QTIncludeAllTypes@65535$QTIncludeCommonTypes@0$QTIncludeDynamicTypes@8$QTIncludeStillImageTypes@1$QTIncludeTranslatableTypes@2$QTKIT_VERSION_7_0@70000$QTKIT_VERSION_7_2@70200$QTKIT_VERSION_7_6_3@70603$QTKIT_VERSION_7_6_6@70606$QTKIT_VERSION_7_7@70700$QTKIT_VERSION_7_7_2@70702$QTKIT_VERSION_7_7_3@70703$QTMovieLoadStateComplete@100000$QTMovieLoadStateError@-1$QTMovieLoadStateLoaded@2000$QTMovieLoadStateLoading@1000$QTMovieLoadStatePlayable@10000$QTMovieLoadStatePlaythroughOK@20000$QTMovieModernizerStatusCancelled@3$QTMovieModernizerStatusCompletedWithSuccess@5$QTMovieModernizerStatusFailed@4$QTMovieModernizerStatusNotRequired@6$QTMovieModernizerStatusPreparing@1$QTMovieModernizerStatusRunning@2$QTMovieModernizerStatusUnknown@0$QTMovieOperationBeginPhase@0$QTMovieOperationEndPhase@2$QTMovieOperationUpdatePercentPhase@1$QTSampleBufferAudioBufferListOptionAssure16ByteAlignment@1$ResourceDataHandlerSubType@'rsrc'$SkinMediaType@'skin'$SkipAtomType@'skip'$SoundMediaType@'soun'$SpriteMediaType@'sprt'$TextMediaType@'text'$ThreeDeeMediaType@'qd3d'$TimeCode64MediaType@'tc64'$TimeCodeMediaType@'tmcd'$TweenMediaType@'twen'$URLDataHandlerSubType@'url '$VideoMediaType@'vide'$VisualMediaCharacteristic@'eyes'$WideAtomPlaceholderType@'wide'$WiredActionHandlerType@'wire'$codecHighQuality@768$codecLosslessQuality@1024$codecLowQuality@256$codecMaxQuality@1023$codecMinQuality@0$codecNormalQuality@512$graphicsModeComposition@259$graphicsModePerComponentAlpha@272$graphicsModePreBlackAlpha@258$graphicsModePreMulColorAlpha@261$graphicsModePreWhiteAlpha@257$graphicsModeStraightAlpha@256$graphicsModeStraightAlphaBlend@260$k16GrayCodecType@'b16g'$k32AlphaGrayCodecType@'b32a'$k422YpCbCr10CodecType@'v210'$k422YpCbCr16CodecType@'v216'$k422YpCbCr8CodecType@'2vuy'$k4444YpCbCrA8CodecType@'v408'$k4444YpCbCrA8RCodecType@'r408'$k444YpCbCr10CodecType@'v410'$k444YpCbCr8CodecType@'v308'$k48RGBCodecType@'b48r'$k64ARGBCodecType@'b64a'$kAVRJPEGCodecType@'avr '$kAnimationCodecType@'rle '$kBMPCodecType@'WRLE'$kBaseCodecType@'base'$kCMYKCodecType@'cmyk'$kCharacteristicCanSendVideo@'vsnd'$kCharacteristicCanStep@'step'$kCharacteristicHasNoDuration@'noti'$kCharacteristicHasSkinData@'skin'$kCharacteristicNonLinear@'nonl'$kCharacteristicProvidesActions@'actn'$kCharacteristicProvidesKeyFocus@'keyf'$kCharacteristicSupportsDisplayOffsets@'dtdd'$kCinepakCodecType@'cvid'$kCloudCodecType@'clou'$kComponentVideoCodecType@'yuv2'$kComponentVideoSigned@'yuvu'$kComponentVideoUnsigned@'yuvs'$kDVCNTSCCodecType@'dvc '$kDVCPALCodecType@'dvcp'$kDVCPROHD1080i50CodecType@'dvh5'$kDVCPROHD1080i60CodecType@'dvh6'$kDVCPROHD1080p25CodecType@'dvh2'$kDVCPROHD1080p30CodecType@'dvh3'$kDVCPROHD720p50CodecType@'dvhq'$kDVCPROHD720p60CodecType@'dvhp'$kDVCPROHD720pCodecType@'dvhp'$kDVCPro100NTSCCodecType@'dv1n'$kDVCPro100PALCodecType@'dv1p'$kDVCPro50NTSCCodecType@'dv5n'$kDVCPro50PALCodecType@'dv5p'$kDVCProPALCodecType@'dvpp'$kFLCCodecType@'flic'$kFireCodecType@'fire'$kGIFCodecType@'gif '$kGraphicsCodecType@'smc '$kH261CodecType@'h261'$kH263CodecType@'h263'$kH264CodecType@'avc1'$kIndeo4CodecType@'IV41'$kJPEG2000CodecType@'mjp2'$kJPEGCodecType@'jpeg'$kMPEG4VisualCodecType@'mp4v'$kMacPaintCodecType@'PNTG'$kMicrosoftVideo1CodecType@'msvc'$kMotionJPEGACodecType@'mjpa'$kMotionJPEGBCodecType@'mjpb'$kMpegYUV420CodecType@'myuv'$kOpenDMLJPEGCodecType@'dmb1'$kPNGCodecType@'png '$kPhotoCDCodecType@'kpcd'$kPixletCodecType@'pxlt'$kPlanarRGBCodecType@'8BPS'$kQTAnimatedGIFLoopCountInfinite@0$kQTFileType3DMF@860114246$kQTFileType3GP2@862416946$kQTFileType3GPP@862417008$kQTFileTypeAIFC@1095321155$kQTFileTypeAIFF@1095321158$kQTFileTypeAMC@1634558752$kQTFileTypeAMR@1634562592$kQTFileTypeAVI@1449547552$kQTFileTypeAudioCDTrack@1953653099$kQTFileTypeBMP@1112363110$kQTFileTypeDVC@1685480225$kQTFileTypeFLC@1179403040$kQTFileTypeFlash@1398228556$kQTFileTypeFlashPix@1179675000$kQTFileTypeGIF@1195984486$kQTFileTypeJFIF@1246774599$kQTFileTypeJPEG@1246774599$kQTFileTypeJPEG2000@1785737760$kQTFileTypeMIDI@1298752617$kQTFileTypeMP4@1836082996$kQTFileTypeMacPaint@1347310663$kQTFileTypeMovie@1299148630$kQTFileTypeMuLaw@1431060823$kQTFileTypePDF@1346651680$kQTFileTypePICS@1346978643$kQTFileTypePNG@1347307366$kQTFileTypePhotoShop@943870035$kQTFileTypePicture@1346978644$kQTFileTypeQuickDrawGXPicture@1902405496$kQTFileTypeQuickTimeImage@1903454566$kQTFileTypeSDV@1935963680$kQTFileTypeSGIImage@777209673$kQTFileTypeSoundDesignerII@1399075430$kQTFileTypeSystemSevenSound@1936091500$kQTFileTypeTIFF@1414088262$kQTFileTypeTargaImage@1414547779$kQTFileTypeText@1413830740$kQTFileTypeWave@1463899717$kQTQuartzComposerMediaType@'qtz '$kQTTimeIsIndefinite@1$kQuickDrawCodecType@'qdrw'$kQuickDrawGXCodecType@'qdgx'$kRawCodecType@'raw '$kSGICodecType@'.SGI'$kSorenson3CodecType@'SVQ3'$kSorensonCodecType@'SVQ1'$kSorensonYUV9CodecType@'syv9'$kTIFFCodecType@'tiff'$kTargaCodecType@'tga '$kUserDataAnimatedGIFBufferingSize@'gifb'$kUserDataAnimatedGIFLoopCount@'gifc'$kUserDataMovieControllerType@'ctyp'$kUserDataName@'name'$kUserDataTextAlbum@2841734242$kUserDataTextArtist@2839630420$kUserDataTextAuthor@2841736564$kUserDataTextChapter@2841864304$kUserDataTextComment@2841865588$kUserDataTextComposer@2841866093$kUserDataTextCopyright@2841866361$kUserDataTextCreationDate@2841928057$kUserDataTextDescription@2841929075$kUserDataTextDirector@2841930098$kUserDataTextDisclaimer@2841930099$kUserDataTextEditDate1@2841994289$kUserDataTextEncodedBy@2841996899$kUserDataTextFullName@2842583405$kUserDataTextGenre@2842125678$kUserDataTextHostComputer@2842194804$kUserDataTextInformation@2842259046$kUserDataTextKeywords@2842387833$kUserDataTextMake@2842517867$kUserDataTextModel@2842521444$kUserDataTextOriginalArtist@2842652773$kUserDataTextOriginalFormat@2842062196$kUserDataTextOriginalSource@2842915427$kUserDataTextPerformers@2842718822$kUserDataTextProducer@2842718820$kUserDataTextProduct@2840613444$kUserDataTextPublisher@2842719586$kUserDataTextSoftware@2842916722$kUserDataTextSpecialPlaybackRequirements@2842846577$kUserDataTextTrack@2842980971$kUserDataTextURLLink@2843046508$kUserDataTextWarning@2843177582$kUserDataTextWriter@2843177588$kVectorCodecType@'path'$kVideoCodecType@'rpza'$kWaterRippleCodecType@'ripl'$kWindowsRawCodecType@'WRAW'$kYUV420CodecType@'y420'$''' -misc.update({}) -functions={'QTStringForOSType': (sel32or64(b'@L', b'@I'),), 'QTMakeTimeWithTimeRecord': (sel32or64(b'{_QTTime=qll}{TimeRecord={wide=Ll}l^{TimeBaseRecord=}}', b'{_QTTime=qqq}{TimeRecord={wide=Ii}i^{TimeBaseRecord=}}'),), 'QTMakeTimeRange': (sel32or64(b'{_QTTimeRange={_QTTime=qll}{_QTTime=qll}}{_QTTime=qll}{_QTTime=qll}', b'{_QTTimeRange={_QTTime=qqq}{_QTTime=qqq}}{_QTTime=qqq}{_QTTime=qqq}'),), 'QTStringFromTime': (sel32or64(b'@{_QTTime=qll}', b'@{_QTTime=qqq}'),), 'QTTimeCompare': (sel32or64(b'i{_QTTime=qll}{_QTTime=qll}', b'q{_QTTime=qqq}{_QTTime=qqq}'),), 'QTTimeDecrement': (sel32or64(b'{_QTTime=qll}{_QTTime=qll}{_QTTime=qll}', b'{_QTTime=qqq}{_QTTime=qqq}{_QTTime=qqq}'),), 'QTMakeTimeWithTimeInterval': (sel32or64(b'{_QTTime=qll}d', b'{_QTTime=qqq}d'),), 'QTStringFromTimeRange': (sel32or64(b'@{_QTTimeRange={_QTTime=qll}{_QTTime=qll}}', b'@{_QTTimeRange={_QTTime=qqq}{_QTTime=qqq}}'),), 'QTOSTypeForString': (sel32or64(b'L@', b'I@'),), 'QTTimeRangeEnd': (sel32or64(b'{_QTTime=qll}{_QTTimeRange={_QTTime=qll}{_QTTime=qll}}', b'{_QTTime=qqq}{_QTTimeRange={_QTTime=qqq}{_QTTime=qqq}}'),), 'QTTimeFromString': (sel32or64(b'{_QTTime=qll}@', b'{_QTTime=qqq}@'),), 'QTGetTimeInterval': (sel32or64(b'Z{_QTTime=qll}^d', b'Z{_QTTime=qqq}^d'), '', {'arguments': {1: {'type_modifier': 'o'}}}), 'QTTimeInTimeRange': (sel32or64(b'Z{_QTTime=qll}{_QTTimeRange={_QTTime=qll}{_QTTime=qll}}', b'Z{_QTTime=qqq}{_QTTimeRange={_QTTime=qqq}{_QTTime=qqq}}'),), 'QTUnionTimeRange': (sel32or64(b'{_QTTimeRange={_QTTime=qll}{_QTTime=qll}}{_QTTimeRange={_QTTime=qll}{_QTTime=qll}}{_QTTimeRange={_QTTime=qll}{_QTTime=qll}}', b'{_QTTimeRange={_QTTime=qqq}{_QTTime=qqq}}{_QTTimeRange={_QTTime=qqq}{_QTTime=qqq}}{_QTTimeRange={_QTTime=qqq}{_QTTime=qqq}}'),), 'QTMakeTimeScaled': (sel32or64(b'{_QTTime=qll}{_QTTime=qll}l', b'{_QTTime=qqq}{_QTTime=qqq}q'),), 'QTTimeIncrement': (sel32or64(b'{_QTTime=qll}{_QTTime=qll}{_QTTime=qll}', b'{_QTTime=qqq}{_QTTime=qqq}{_QTTime=qqq}'),), 'QTTimeIsIndefinite': (sel32or64(b'Z{_QTTime=qll}', b'Z{_QTTime=qqq}'),), 'QTStringFromSMPTETime': (sel32or64(b'@{SMPTETime=ssLLLssss}', b'@{SMPTETime=ssIIIssss}'),), 'QTMakeTime': (sel32or64(b'{_QTTime=qll}ql', b'{_QTTime=qqq}qq'),), 'QTTimeRangeFromString': (sel32or64(b'{_QTTimeRange={_QTTime=qll}{_QTTime=qll}}@', b'{_QTTimeRange={_QTTime=qqq}{_QTTime=qqq}}@'),), 'QTEqualTimeRanges': (sel32or64(b'Z{_QTTimeRange={_QTTime=qll}{_QTTime=qll}}{_QTTimeRange={_QTTime=qll}{_QTTime=qll}}', b'Z{_QTTimeRange={_QTTime=qqq}{_QTTime=qqq}}{_QTTimeRange={_QTTime=qqq}{_QTTime=qqq}}'),), 'QTIntersectionTimeRange': (sel32or64(b'{_QTTimeRange={_QTTime=qll}{_QTTime=qll}}{_QTTimeRange={_QTTime=qll}{_QTTime=qll}}{_QTTimeRange={_QTTime=qll}{_QTTime=qll}}', b'{_QTTimeRange={_QTTime=qqq}{_QTTime=qqq}}{_QTTimeRange={_QTTime=qqq}{_QTTime=qqq}}{_QTTimeRange={_QTTime=qqq}{_QTTime=qqq}}'),), 'QTGetTimeRecord': (sel32or64(b'Z{_QTTime=qll}^{TimeRecord={wide=Ii}i^{TimeBaseRecord}}', b'Z{_QTTime=qqq}^{TimeRecord={wide=Ii}i^{TimeBaseRecord}}'), '', {'arguments': {1: {'type_modifier': 'o'}}})} -aliases = {'AVAILABLE_QTKIT_VERSION_7_0_AND_LATER_BUT_DEPRECATED_IN_QTKIT_VERSION_7_2': 'AVAILABLE_QTKIT_VERSION_7_0_AND_LATER', 'AVAILABLE_QTKIT_VERSION_7_0_AND_LATER_BUT_DEPRECATED_IN_QTKIT_VERSION_7_6_3': 'AVAILABLE_QTKIT_VERSION_7_0_AND_LATER', 'AVAILABLE_QTKIT_VERSION_7_0_AND_LATER_BUT_DEPRECATED_IN_QTKIT_VERSION_7_7_3': 'AVAILABLE_QTKIT_VERSION_7_0_AND_LATER', 'QTMovieOperationEndPhase': 'movieProgressClose', 'AVAILABLE_QTKIT_VERSION_7_2_AND_LATER': 'WEAK_IMPORT_ATTRIBUTE', 'AVAILABLE_QTKIT_VERSION_7_6_3_AND_LATER_BUT_DEPRECATED_IN_QTKIT_VERSION_7_7_3': 'AVAILABLE_QTKIT_VERSION_7_6_3_AND_LATER', 'AVAILABLE_QTKIT_VERSION_7_7_AND_LATER': 'WEAK_IMPORT_ATTRIBUTE', 'NSIntegerMax': 'LONG_MAX', 'AVAILABLE_QTKIT_VERSION_7_2_AND_LATER_BUT_DEPRECATED_IN_QTKIT_VERSION_7_7': 'AVAILABLE_QTKIT_VERSION_7_2_AND_LATER', 'AVAILABLE_QTKIT_VERSION_7_6_6_AND_LATER_BUT_DEPRECATED_IN_QTKIT_VERSION_7_7_3': 'AVAILABLE_QTKIT_VERSION_7_6_6_AND_LATER', 'QTMovieOperationBeginPhase': 'movieProgressOpen', 'AVAILABLE_QTKIT_VERSION_7_2_AND_LATER_BUT_DEPRECATED_IN_QTKIT_VERSION_7_6_3': 'AVAILABLE_QTKIT_VERSION_7_2_AND_LATER', 'AVAILABLE_QTKIT_VERSION_7_7_2_AND_LATER': 'WEAK_IMPORT_ATTRIBUTE', 'AVAILABLE_QTKIT_VERSION_7_2_AND_LATER_BUT_DEPRECATED_IN_QTKIT_VERSION_7_7_3': 'AVAILABLE_QTKIT_VERSION_7_2_AND_LATER', 'AVAILABLE_QTKIT_VERSION_7_6_6_AND_LATER': 'WEAK_IMPORT_ATTRIBUTE', 'QTMovieOperationUpdatePercentPhase': 'movieProgressUpdatePercent', 'NSUIntegerMax': 'ULONG_MAX', 'QTKIT_VERSION_MIN_REQUIRED': 'QTKIT_VERSION_7_0', 'AVAILABLE_QTKIT_VERSION_7_7_3_AND_LATER': 'WEAK_IMPORT_ATTRIBUTE', 'NSIntegerMin': 'LONG_MIN', 'AVAILABLE_QTKIT_VERSION_7_6_3_AND_LATER': 'WEAK_IMPORT_ATTRIBUTE', 'AVAILABLE_QTKIT_VERSION_7_0_AND_LATER': 'WEAK_IMPORT_ATTRIBUTE', 'AVAILABLE_QTKIT_VERSION_7_7_AND_LATER_BUT_DEPRECATED_IN_QTKIT_VERSION_7_7_3': 'AVAILABLE_QTKIT_VERSION_7_7_AND_LATER'} -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'NSCoder', b'decodeQTTimeForKey:', {'retval': {'type': sel32or64(b'{_QTTime=qll}', b'{_QTTime=qqq}')}}) - r(b'NSCoder', b'decodeQTTimeRangeForKey:', {'retval': {'type': sel32or64(b'{_QTTimeRange={_QTTime=qll}{_QTTime=qll}}', b'{_QTTimeRange={_QTTime=qqq}{_QTTime=qqq}}')}}) - r(b'NSCoder', b'encodeQTTime:forKey:', {'arguments': {2: {'type': sel32or64(b'{_QTTime=qll}', b'{_QTTime=qqq}')}}}) - r(b'NSCoder', b'encodeQTTimeRange:forKey:', {'arguments': {2: {'type': sel32or64(b'{_QTTimeRange={_QTTime=qll}{_QTTime=qll}}', b'{_QTTimeRange={_QTTime=qqq}{_QTTime=qqq}}')}}}) - r(b'NSObject', b'captureOutput:shouldChangeOutputFileAtURL:forConnections:dueToError:', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'exportSession:didReachProgess:', {'arguments': {3: {'type': 'd'}}}) - r(b'NSObject', b'movie:linkToURL:', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'movie:shouldContinueOperation:withPhase:atPercent:withAttributes:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type': 'i'}}}) - r(b'NSObject', b'movieShouldLoadData:', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'movieShouldTask:', {'retval': {'type': 'Z'}}) - r(b'NSValue', b'QTTimeRangeValue', {'retval': {'type': sel32or64(b'{_QTTimeRange={_QTTime=qll}{_QTTime=qll}}', b'{_QTTimeRange={_QTTime=qqq}{_QTTime=qqq}}')}}) - r(b'NSValue', b'QTTimeValue', {'retval': {'type': sel32or64(b'{_QTTime=qll}', b'{_QTTime=qqq}')}}) - r(b'NSValue', b'valueWithQTTime:', {'arguments': {2: {'type': sel32or64(b'{_QTTime=qll}', b'{_QTTime=qqq}')}}}) - r(b'NSValue', b'valueWithQTTimeRange:', {'arguments': {2: {'type': sel32or64(b'{_QTTimeRange={_QTTime=qll}{_QTTime=qll}}', b'{_QTTimeRange={_QTTime=qqq}{_QTTime=qqq}}')}}}) - r(b'QTCaptureConnection', b'attributeIsReadOnly:', {'retval': {'type': 'Z'}}) - r(b'QTCaptureConnection', b'isEnabled', {'retval': {'type': 'Z'}}) - r(b'QTCaptureConnection', b'setEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'QTCaptureDecompressedVideoOutput', b'automaticallyDropsLateVideoFrames', {'retval': {'type': b'Z'}}) - r(b'QTCaptureDecompressedVideoOutput', b'setAutomaticallyDropsLateVideoFrames:', {'arguments': {2: {'type': b'Z'}}}) - r(b'QTCaptureDevice', b'attributeIsReadOnly:', {'retval': {'type': 'Z'}}) - r(b'QTCaptureDevice', b'hasMediaType:', {'retval': {'type': 'Z'}}) - r(b'QTCaptureDevice', b'isConnected', {'retval': {'type': 'Z'}}) - r(b'QTCaptureDevice', b'isInUseByAnotherApplication', {'retval': {'type': 'Z'}}) - r(b'QTCaptureDevice', b'isOpen', {'retval': {'type': 'Z'}}) - r(b'QTCaptureDevice', b'open:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type_modifier': b'o'}}}) - r(b'QTCaptureFileOutput', b'isRecordingPaused', {'retval': {'type': b'Z'}}) - r(b'QTCaptureFileOutput', b'maximumRecordedDuration', {'retval': {'type': sel32or64(b'{_QTTime=qll}', b'{_QTTime=qqq}')}}) - r(b'QTCaptureFileOutput', b'recordedDuration', {'retval': {'type': sel32or64(b'{_QTTime=qll}', b'{_QTTime=qqq}')}}) - r(b'QTCaptureFileOutput', b'setMaximumRecordedDuration:', {'arguments': {2: {'type': sel32or64(b'{_QTTime=qll}', b'{_QTTime=qqq}')}}}) - r(b'QTCaptureSession', b'addInput:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'QTCaptureSession', b'addOutput:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'QTCaptureSession', b'isRunning', {'retval': {'type': 'Z'}}) - r(b'QTCaptureView', b'preservesAspectRatio', {'retval': {'type': 'Z'}}) - r(b'QTCaptureView', b'setPreservesAspectRatio:', {'arguments': {2: {'type': 'Z'}}}) - r(b'QTCompressionOptions', b'isEqualToCompressionOptions:', {'retval': {'type': 'Z'}}) - r(b'QTDataReference', b'dataRef', {'retval': {'type': '^*'}}) - r(b'QTDataReference', b'dataReferenceWithDataRef:type:', {'arguments': {2: {'type': '^*'}}}) - r(b'QTDataReference', b'initWithDataRef:type:', {'arguments': {2: {'type': '^*'}}}) - r(b'QTDataReference', b'setDataRef:', {'arguments': {2: {'type': '^*'}}}) - r(b'QTExportSession', b'initWithMovie:exportOptions:outputURL:error:', {'arguments': {5: {'type_modifier': b'o'}}}) - r(b'QTExportSession', b'isCancelled', {'retval': {'type': 'Z'}}) - r(b'QTExportSession', b'isFinished', {'retval': {'type': 'Z'}}) - r(b'QTExportSession', b'isRunning', {'retval': {'type': 'Z'}}) - r(b'QTExportSession', b'waitUntilFinished:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type_modifier': b'o'}}}) - r(b'QTFormatDescription', b'isEqualToFormatDescription:', {'retval': {'type': 'Z'}}) - r(b'QTMedia', b'hasCharacteristic:', {'retval': {'type': 'Z'}}) - r(b'QTMedia', b'initWithQuickTimeMedia:error:', {'arguments': {2: {'type': '^^{MediaType}'}, 3: {'type_modifier': b'o'}}}) - r(b'QTMedia', b'mediaWithQuickTimeMedia:error:', {'arguments': {2: {'type': '^^{MediaType}'}, 3: {'type_modifier': b'o'}}}) - r(b'QTMedia', b'quickTimeMedia', {'retval': {'type': '^^{MediaType}'}}) - r(b'QTMovie', b'addChapters:withAttributes:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'QTMovie', b'addImage:forDuration:withAttributes:', {'arguments': {3: {'type': sel32or64(b'{_QTTime=qll}', b'{_QTTime=qqq}')}}}) - r(b'QTMovie', b'attachToCurrentThread', {'retval': {'type': 'Z'}}) - r(b'QTMovie', b'canInitWithDataReference:', {'retval': {'type': 'Z'}}) - r(b'QTMovie', b'canInitWithFile:', {'retval': {'type': 'Z'}}) - r(b'QTMovie', b'canInitWithPasteboard:', {'retval': {'type': 'Z'}}) - r(b'QTMovie', b'canInitWithURL:', {'retval': {'type': 'Z'}}) - r(b'QTMovie', b'canUpdateMovieFile', {'retval': {'type': 'Z'}}) - r(b'QTMovie', b'chapterIndexForTime:', {'arguments': {2: {'type': sel32or64(b'{_QTTime=qll}', b'{_QTTime=qqq}')}}}) - r(b'QTMovie', b'currentTime', {'retval': {'type': sel32or64(b'{_QTTime=qll}', b'{_QTTime=qqq}')}}) - r(b'QTMovie', b'deleteSegment:', {'arguments': {2: {'type': sel32or64(b'{_QTTimeRange={_QTTime=qll}{_QTTime=qll}}', b'{_QTTimeRange={_QTTime=qqq}{_QTTime=qqq}}')}}}) - r(b'QTMovie', b'detachFromCurrentThread', {'retval': {'type': 'Z'}}) - r(b'QTMovie', b'duration', {'retval': {'type': sel32or64(b'{_QTTime=qll}', b'{_QTTime=qqq}')}}) - r(b'QTMovie', b'frameImageAtTime:', {'arguments': {2: {'type': sel32or64(b'{_QTTime=qll}', b'{_QTTime=qqq}')}}}) - r(b'QTMovie', b'frameImageAtTime:withAttributes:error:', {'retval': {'type': '^v'}, 'arguments': {2: {'type': sel32or64(b'{_QTTime=qll}', b'{_QTTime=qqq}')}, 4: {'type_modifier': b'o'}}}) - r(b'QTMovie', b'hasChapters', {'retval': {'type': 'Z'}}) - r(b'QTMovie', b'initToWritableData:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'QTMovie', b'initToWritableDataReference:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'QTMovie', b'initToWritableFile:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'QTMovie', b'initWithAttributes:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'QTMovie', b'initWithData:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'QTMovie', b'initWithDataReference:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'QTMovie', b'initWithFile:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'QTMovie', b'initWithMovie:timeRange:error:', {'arguments': {3: {'type': sel32or64(b'{_QTTimeRange={_QTTime=qll}{_QTTime=qll}}', b'{_QTTimeRange={_QTTime=qqq}{_QTTime=qqq}}')}, 4: {'type_modifier': b'o'}}}) - r(b'QTMovie', b'initWithPasteboard:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'QTMovie', b'initWithQuickTimeMovie:disposeWhenDone:error:', {'arguments': {2: {'type': '^^{MovieType}'}, 3: {'type': 'Z'}, 4: {'type_modifier': b'o'}}}) - r(b'QTMovie', b'initWithURL:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'QTMovie', b'insertEmptySegmentAt:', {'arguments': {2: {'type': sel32or64(b'{_QTTimeRange={_QTTime=qll}{_QTTime=qll}}', b'{_QTTimeRange={_QTTime=qqq}{_QTTime=qqq}}')}}}) - r(b'QTMovie', b'insertSegmentOfMovie:fromRange:scaledToRange:', {'arguments': {3: {'type': sel32or64(b'{_QTTimeRange={_QTTime=qll}{_QTTime=qll}}', b'{_QTTimeRange={_QTTime=qqq}{_QTTime=qqq}}')}, 4: {'type': sel32or64(b'{_QTTimeRange={_QTTime=qll}{_QTTime=qll}}', b'{_QTTimeRange={_QTTime=qqq}{_QTTime=qqq}}')}}}) - r(b'QTMovie', b'insertSegmentOfMovie:timeRange:atTime:', {'arguments': {3: {'type': sel32or64(b'{_QTTimeRange={_QTTime=qll}{_QTTime=qll}}', b'{_QTTimeRange={_QTTime=qqq}{_QTTime=qqq}}')}, 4: {'type': sel32or64(b'{_QTTime=qll}', b'{_QTTime=qqq}')}}}) - r(b'QTMovie', b'insertSegmentOfTrack:fromRange:scaledToRange:', {'arguments': {3: {'type': sel32or64(b'{_QTTimeRange={_QTTime=qll}{_QTTime=qll}}', b'{_QTTimeRange={_QTTime=qqq}{_QTTime=qqq}}')}, 4: {'type': sel32or64(b'{_QTTimeRange={_QTTime=qll}{_QTTime=qll}}', b'{_QTTimeRange={_QTTime=qqq}{_QTTime=qqq}}')}}}) - r(b'QTMovie', b'insertSegmentOfTrack:timeRange:atTime:', {'arguments': {3: {'type': sel32or64(b'{_QTTimeRange={_QTTime=qll}{_QTTime=qll}}', b'{_QTTimeRange={_QTTime=qqq}{_QTTime=qqq}}')}, 4: {'type': sel32or64(b'{_QTTime=qll}', b'{_QTTime=qqq}')}}}) - r(b'QTMovie', b'isIdling', {'retval': {'type': 'Z'}}) - r(b'QTMovie', b'movieNamed:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'QTMovie', b'movieWithAttributes:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'QTMovie', b'movieWithData:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'QTMovie', b'movieWithDataReference:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'QTMovie', b'movieWithFile:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'QTMovie', b'movieWithPasteboard:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'QTMovie', b'movieWithQuickTimeMovie:disposeWhenDone:error:', {'arguments': {2: {'type': '^^{MovieType}'}, 3: {'type': 'Z'}, 4: {'type_modifier': b'o'}}}) - r(b'QTMovie', b'movieWithTimeRange:error:', {'arguments': {2: {'type': sel32or64(b'{_QTTimeRange={_QTTime=qll}{_QTTime=qll}}', b'{_QTTimeRange={_QTTime=qqq}{_QTTime=qqq}}')}, 3: {'type_modifier': b'o'}}}) - r(b'QTMovie', b'movieWithURL:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'QTMovie', b'muted', {'retval': {'type': 'Z'}}) - r(b'QTMovie', b'quickTimeMovie', {'retval': {'type': '^^{MovieType}'}}) - r(b'QTMovie', b'quickTimeMovieController', {'retval': {'type': '^{ComponentInstanceRecord=[1l]}'}}) - r(b'QTMovie', b'removeChapters', {'retval': {'type': 'Z'}}) - r(b'QTMovie', b'scaleSegment:newDuration:', {'arguments': {2: {'type': sel32or64(b'{_QTTimeRange={_QTTime=qll}{_QTTime=qll}}', b'{_QTTimeRange={_QTTime=qqq}{_QTTime=qqq}}')}, 3: {'type': sel32or64(b'{_QTTime=qll}', b'{_QTTime=qqq}')}}}) - r(b'QTMovie', b'selectionDuration', {'retval': {'type': sel32or64(b'{_QTTime=qll}', b'{_QTTime=qqq}')}}) - r(b'QTMovie', b'selectionEnd', {'retval': {'type': sel32or64(b'{_QTTime=qll}', b'{_QTTime=qqq}')}}) - r(b'QTMovie', b'selectionStart', {'retval': {'type': sel32or64(b'{_QTTime=qll}', b'{_QTTime=qqq}')}}) - r(b'QTMovie', b'setCurrentTime:', {'arguments': {2: {'type': sel32or64(b'{_QTTime=qll}', b'{_QTTime=qqq}')}}}) - r(b'QTMovie', b'setIdling:', {'arguments': {2: {'type': 'Z'}}}) - r(b'QTMovie', b'setMuted:', {'arguments': {2: {'type': 'Z'}}}) - r(b'QTMovie', b'setSelection:', {'arguments': {2: {'type': sel32or64(b'{_QTTimeRange={_QTTime=qll}{_QTTime=qll}}', b'{_QTTimeRange={_QTTime=qqq}{_QTTime=qqq}}')}}}) - r(b'QTMovie', b'setVisualContext:', {'arguments': {2: {'type': '^{OpaqueQTVisualContext=}'}}}) - r(b'QTMovie', b'startTimeOfChapter:', {'retval': {'type': sel32or64(b'{_QTTime=qll}', b'{_QTTime=qqq}')}}) - r(b'QTMovie', b'trackByInsertingSegmentOfTrack:fromRange:scaledToRange:', {'arguments': {3: {'type': sel32or64(b'{_QTTimeRange={_QTTime=qll}{_QTTime=qll}}', b'{_QTTimeRange={_QTTime=qqq}{_QTTime=qqq}}')}, 4: {'type': sel32or64(b'{_QTTimeRange={_QTTime=qll}{_QTTime=qll}}', b'{_QTTimeRange={_QTTime=qqq}{_QTTime=qqq}}')}}}) - r(b'QTMovie', b'trackByInsertingSegmentOfTrack:timeRange:atTime:', {'arguments': {3: {'type': sel32or64(b'{_QTTimeRange={_QTTime=qll}{_QTTime=qll}}', b'{_QTTimeRange={_QTTime=qqq}{_QTTime=qqq}}')}, 4: {'type': sel32or64(b'{_QTTime=qll}', b'{_QTTime=qqq}')}}}) - r(b'QTMovie', b'updateMovieFile', {'retval': {'type': 'Z'}}) - r(b'QTMovie', b'visualContext', {'retval': {'type': '^{OpaqueQTVisualContext=}'}}) - r(b'QTMovie', b'writeToFile:withAttributes:', {'retval': {'type': 'Z'}}) - r(b'QTMovie', b'writeToFile:withAttributes:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'QTMovieModernizer', b'modernizeWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'QTMovieModernizer', b'requiresModernization:error:', {'retval': {'type': b'Z'}}) - r(b'QTMovieModernizer', b'requiresModernization:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'QTMovieView', b'areStepButtonsVisible', {'retval': {'type': 'Z'}}) - r(b'QTMovieView', b'areZoomButtonsVisible', {'retval': {'type': 'Z'}}) - r(b'QTMovieView', b'isBackButtonVisible', {'retval': {'type': 'Z'}}) - r(b'QTMovieView', b'isControllerVisible', {'retval': {'type': 'Z'}}) - r(b'QTMovieView', b'isCustomButtonVisible', {'retval': {'type': 'Z'}}) - r(b'QTMovieView', b'isEditable', {'retval': {'type': 'Z'}}) - r(b'QTMovieView', b'isHotSpotButtonVisible', {'retval': {'type': 'Z'}}) - r(b'QTMovieView', b'isTranslateButtonVisible', {'retval': {'type': 'Z'}}) - r(b'QTMovieView', b'isVolumeButtonVisible', {'retval': {'type': 'Z'}}) - r(b'QTMovieView', b'preservesAspectRatio', {'retval': {'type': 'Z'}}) - r(b'QTMovieView', b'setBackButtonVisible:', {'arguments': {2: {'type': 'Z'}}}) - r(b'QTMovieView', b'setControllerVisible:', {'arguments': {2: {'type': 'Z'}}}) - r(b'QTMovieView', b'setCustomButtonVisible:', {'arguments': {2: {'type': 'Z'}}}) - r(b'QTMovieView', b'setEditable:', {'arguments': {2: {'type': 'Z'}}}) - r(b'QTMovieView', b'setHotSpotButtonVisible:', {'arguments': {2: {'type': 'Z'}}}) - r(b'QTMovieView', b'setPreservesAspectRatio:', {'arguments': {2: {'type': 'Z'}}}) - r(b'QTMovieView', b'setShowsResizeIndicator:', {'arguments': {2: {'type': 'Z'}}}) - r(b'QTMovieView', b'setStepButtonsVisible:', {'arguments': {2: {'type': 'Z'}}}) - r(b'QTMovieView', b'setTranslateButtonVisible:', {'arguments': {2: {'type': 'Z'}}}) - r(b'QTMovieView', b'setVolumeButtonVisible:', {'arguments': {2: {'type': 'Z'}}}) - r(b'QTMovieView', b'setZoomButtonsVisible:', {'arguments': {2: {'type': 'Z'}}}) - r(b'QTSampleBuffer', b'decodeTime', {'retval': {'type': sel32or64(b'{_QTTime=qll}', b'{_QTTime=qqq}')}}) - r(b'QTSampleBuffer', b'duration', {'retval': {'type': sel32or64(b'{_QTTime=qll}', b'{_QTTime=qqq}')}}) - r(b'QTSampleBuffer', b'getAudioStreamPacketDescriptions:inRange:', {'retval': {'type': 'Z'}}) - r(b'QTSampleBuffer', b'presentationTime', {'retval': {'type': sel32or64(b'{_QTTime=qll}', b'{_QTTime=qqq}')}}) - r(b'QTTrack', b'trackWithQuickTimeTrack:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'QTTrack', b'initWithQuickTimeTrack:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'QTTrack', b'addImage:forDuration:withAttributes:', {'arguments': {3: {'type': sel32or64(b'{_QTTime=qll}', b'{_QTTime=qqq}')}}}) - r(b'QTTrack', b'deleteSegment:', {'arguments': {2: {'type': sel32or64(b'{_QTTimeRange={_QTTime=qll}{_QTTime=qll}}', b'{_QTTimeRange={_QTTime=qqq}{_QTTime=qqq}}')}}}) - r(b'QTTrack', b'insertEmptySegmentAt:', {'arguments': {2: {'type': sel32or64(b'{_QTTimeRange={_QTTime=qll}{_QTTime=qll}}', b'{_QTTimeRange={_QTTime=qqq}{_QTTime=qqq}}')}}}) - r(b'QTTrack', b'insertSegmentOfTrack:fromRange:scaledToRange:', {'arguments': {3: {'type': sel32or64(b'{_QTTimeRange={_QTTime=qll}{_QTTime=qll}}', b'{_QTTimeRange={_QTTime=qqq}{_QTTime=qqq}}')}, 4: {'type': sel32or64(b'{_QTTimeRange={_QTTime=qll}{_QTTime=qll}}', b'{_QTTimeRange={_QTTime=qqq}{_QTTime=qqq}}')}}}) - r(b'QTTrack', b'insertSegmentOfTrack:timeRange:atTime:', {'arguments': {3: {'type': sel32or64(b'{_QTTimeRange={_QTTime=qll}{_QTTime=qll}}', b'{_QTTimeRange={_QTTime=qqq}{_QTTime=qqq}}')}, 4: {'type': sel32or64(b'{_QTTime=qll}', b'{_QTTime=qqq}')}}}) - r(b'QTTrack', b'isEnabled', {'retval': {'type': 'Z'}}) - r(b'QTTrack', b'scaleSegment:newDuration:', {'arguments': {2: {'type': sel32or64(b'{_QTTimeRange={_QTTime=qll}{_QTTime=qll}}', b'{_QTTimeRange={_QTTime=qqq}{_QTTime=qqq}}')}, 3: {'type': sel32or64(b'{_QTTime=qll}', b'{_QTTime=qqq}')}}}) - r(b'QTTrack', b'setEnabled:', {'arguments': {2: {'type': 'Z'}}}) -finally: - objc._updatingMetadata(False) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'NSObject', b'captureOutput:didDropVideoFrameWithSampleBuffer:fromConnection:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'captureOutput:didFinishRecordingToOutputFileAtURL:forConnections:dueToError:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'captureOutput:didOutputAudioSampleBuffer:fromConnection:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'captureOutput:didOutputSampleBuffer:fromConnection:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'captureOutput:didOutputVideoFrame:withSampleBuffer:fromConnection:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'^{__CVBuffer=}'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'captureOutput:didOutputVideoFrame:withSampleBuffer:fromConnection:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'^{__CVBuffer=}'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'captureOutput:didPauseRecordingToOutputFileAtURL:forConnections:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'captureOutput:didResumeRecordingToOutputFileAtURL:forConnections:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'captureOutput:didStartRecordingToOutputFileAtURL:forConnections:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'captureOutput:mustChangeOutputFileAtURL:forConnections:dueToError:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'captureOutput:shouldChangeOutputFileAtURL:forConnections:dueToError:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'captureOutput:willFinishRecordingToOutputFileAtURL:forConnections:dueToError:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'captureOutput:willStartRecordingToOutputFileAtURL:forConnections:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'delegate', {'retval': {'type': b'@'}}) - r(b'NSObject', b'menuForEventDelegate:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setDelegate:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'view:willDisplayImage:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'view:willDisplayImage:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) -finally: - objc._updatingMetadata(False) -protocols={'QTCaptureDecompressedVideoOutput_Delegate': objc.informal_protocol('QTCaptureDecompressedVideoOutput_Delegate', [objc.selector(None, b'captureOutput:didOutputVideoFrame:withSampleBuffer:fromConnection:', b'v@:@^{__CVBuffer=}@@', isRequired=False), objc.selector(None, b'captureOutput:didDropVideoFrameWithSampleBuffer:fromConnection:', b'v@:@@@', isRequired=False)]), 'QTMovieView_Delegate': objc.informal_protocol('QTMovieView_Delegate', [objc.selector(None, b'view:willDisplayImage:', b'@@:@@', isRequired=False), objc.selector(None, b'menuForEventDelegate:', b'@@:@', isRequired=False)]), 'QTCaptureVideoPreviewOutput_Delegate': objc.informal_protocol('QTCaptureVideoPreviewOutput_Delegate', [objc.selector(None, b'captureOutput:didOutputVideoFrame:withSampleBuffer:fromConnection:', b'v@:@^{__CVBuffer=}@@', isRequired=False)]), 'QTCaptureView_Delegate': objc.informal_protocol('QTCaptureView_Delegate', [objc.selector(None, b'view:willDisplayImage:', b'@@:@@', isRequired=False)]), 'QTCaptureDecompressedAudioOutput_Delegate': objc.informal_protocol('QTCaptureDecompressedAudioOutput_Delegate', [objc.selector(None, b'captureOutput:didOutputAudioSampleBuffer:fromConnection:', b'v@:@@@', isRequired=False)]), 'QTCaptureFileOutput_Delegate': objc.informal_protocol('QTCaptureFileOutput_Delegate', [objc.selector(None, b'captureOutput:didOutputSampleBuffer:fromConnection:', b'v@:@@@', isRequired=False), objc.selector(None, b'captureOutput:didResumeRecordingToOutputFileAtURL:forConnections:', b'v@:@@@', isRequired=False), objc.selector(None, b'captureOutput:shouldChangeOutputFileAtURL:forConnections:dueToError:', b'Z@:@@@@', isRequired=False), objc.selector(None, b'captureOutput:didStartRecordingToOutputFileAtURL:forConnections:', b'v@:@@@', isRequired=False), objc.selector(None, b'captureOutput:willFinishRecordingToOutputFileAtURL:forConnections:dueToError:', b'v@:@@@@', isRequired=False), objc.selector(None, b'captureOutput:didPauseRecordingToOutputFileAtURL:forConnections:', b'v@:@@@', isRequired=False), objc.selector(None, b'captureOutput:mustChangeOutputFileAtURL:forConnections:dueToError:', b'v@:@@@@', isRequired=False), objc.selector(None, b'captureOutput:didFinishRecordingToOutputFileAtURL:forConnections:dueToError:', b'v@:@@@@', isRequired=False), objc.selector(None, b'captureOutput:willStartRecordingToOutputFileAtURL:forConnections:', b'v@:@@@', isRequired=False)]), 'QTMovie_Delegate': objc.informal_protocol('QTMovie_Delegate', [objc.selector(None, b'setDelegate:', b'v@:@', isRequired=False), objc.selector(None, b'delegate', b'@@:', isRequired=False)])} -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/QTKit/_metadata.pyc b/env/lib/python2.7/site-packages/QTKit/_metadata.pyc deleted file mode 100644 index 766ec0a0..00000000 Binary files a/env/lib/python2.7/site-packages/QTKit/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Quartz/CoreGraphics/__init__.py b/env/lib/python2.7/site-packages/Quartz/CoreGraphics/__init__.py deleted file mode 100644 index e1ee0f22..00000000 --- a/env/lib/python2.7/site-packages/Quartz/CoreGraphics/__init__.py +++ /dev/null @@ -1,112 +0,0 @@ -''' -Python mapping for the CoreGraphics framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import sys -import objc -import CoreFoundation -import Foundation - -from Quartz.CoreGraphics import _metadata -from Quartz.CoreGraphics._inlines import _inline_list_ - - -sys.modules['Quartz.CoreGraphics'] = mod = objc.ObjCLazyModule('Quartz.CoreGraphics', - "com.apple.CoreGraphics", - objc.pathForFramework("/System/Library/Frameworks/ApplicationServices.framework/Frameworks/CoreGraphics.framework"), - _metadata.__dict__, _inline_list_, { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'objc': objc, - }, ( CoreFoundation,)) - -import sys -del sys.modules['Quartz.CoreGraphics._metadata'] - - -def _load(mod): - import Quartz - Quartz.CoreGraphics = mod - - # XXX: CGFLOAT_MIN is a #define for FLT_MIN or DBL_MIN, which isn't detected properly - # by the metadata script. - import sys - - if sys.maxsize > 1 <<32: - mod.CGFLOAT_MIN=1.1754943508222875e-38 - mod.CGFLOAT_MAX=3.4028234663852886e+38 - else: - mod.CGFLOAT_MIN=2.2250738585072014e-308 - mod.CGFLOAT_MAX=1.7976931348623157e+308 - - import Quartz.CoreGraphics._callbacks as m - for nm in dir(m): - if nm.startswith('_'): continue - setattr(mod, nm, getattr(m, nm)) - import Quartz.CoreGraphics._doubleindirect as m - for nm in dir(m): - if nm.startswith('_'): continue - setattr(mod, nm, getattr(m, nm)) - import Quartz.CoreGraphics._sortandmap as m - for nm in dir(m): - if nm.startswith('_'): continue - setattr(mod, nm, getattr(m, nm)) - import Quartz.CoreGraphics._coregraphics as m - for nm in dir(m): - if nm.startswith('_'): continue - setattr(mod, nm, getattr(m, nm)) - import Quartz.CoreGraphics._contextmanager as m - for nm in dir(m): - if nm.startswith('_'): continue - setattr(mod, nm, getattr(m, nm)) - - mod.setCGPathElement(mod.CGPathElement) - del mod.setCGPathElement - - # a #define - mod.kCGEventFilterMaskPermitAllEvents = ( - mod.kCGEventFilterMaskPermitLocalMouseEvents | - mod.kCGEventFilterMaskPermitLocalKeyboardEvents | - mod.kCGEventFilterMaskPermitSystemDefinedEvents) - - def CGEventMaskBit(eventType): - return (1 << eventType) - mod.CGEventMaskBit = CGEventMaskBit - - mod.kCGColorSpaceUserGray = "kCGColorSpaceUserGray" - mod.kCGColorSpaceUserRGB = "kCGColorSpaceUserRGB" - mod.kCGColorSpaceUserCMYK = "kCGColorSpaceUserCMYK" - - - # Some pseudo-constants - mod.kCGBaseWindowLevel = mod.CGWindowLevelForKey(mod.kCGBaseWindowLevelKey) - mod.kCGMinimumWindowLevel = mod.CGWindowLevelForKey(mod.kCGMinimumWindowLevelKey) - mod.kCGDesktopWindowLevel = mod.CGWindowLevelForKey(mod.kCGDesktopWindowLevelKey) - mod.kCGDesktopIconWindowLevel = mod.CGWindowLevelForKey(mod.kCGDesktopIconWindowLevelKey) - mod.kCGBackstopMenuLevel = mod.CGWindowLevelForKey(mod.kCGBackstopMenuLevelKey) - mod.kCGNormalWindowLevel = mod.CGWindowLevelForKey(mod.kCGNormalWindowLevelKey) - mod.kCGFloatingWindowLevel = mod.CGWindowLevelForKey(mod.kCGFloatingWindowLevelKey) - mod.kCGTornOffMenuWindowLevel = mod.CGWindowLevelForKey(mod.kCGTornOffMenuWindowLevelKey) - mod.kCGDockWindowLevel = mod.CGWindowLevelForKey(mod.kCGDockWindowLevelKey) - mod.kCGMainMenuWindowLevel = mod.CGWindowLevelForKey(mod.kCGMainMenuWindowLevelKey) - mod.kCGStatusWindowLevel = mod.CGWindowLevelForKey(mod.kCGStatusWindowLevelKey) - mod.kCGModalPanelWindowLevel = mod.CGWindowLevelForKey(mod.kCGModalPanelWindowLevelKey) - mod.kCGPopUpMenuWindowLevel = mod.CGWindowLevelForKey(mod.kCGPopUpMenuWindowLevelKey) - mod.kCGDraggingWindowLevel = mod.CGWindowLevelForKey(mod.kCGDraggingWindowLevelKey) - mod.kCGScreenSaverWindowLevel = mod.CGWindowLevelForKey(mod.kCGScreenSaverWindowLevelKey) - mod.kCGCursorWindowLevel = mod.CGWindowLevelForKey(mod.kCGCursorWindowLevelKey) - mod.kCGOverlayWindowLevel = mod.CGWindowLevelForKey(mod.kCGOverlayWindowLevelKey) - mod.kCGHelpWindowLevel = mod.CGWindowLevelForKey(mod.kCGHelpWindowLevelKey) - mod.kCGUtilityWindowLevel = mod.CGWindowLevelForKey(mod.kCGUtilityWindowLevelKey) - mod.kCGAssistiveTechHighWindowLevel = mod.CGWindowLevelForKey(mod.kCGAssistiveTechHighWindowLevelKey) - mod.kCGMaximumWindowLevel = mod.CGWindowLevelForKey(mod.kCGMaximumWindowLevelKey) - - mod.CGSetLocalEventsFilterDuringSupressionState = mod.CGSetLocalEventsFilterDuringSuppressionState - - mod.kCGAnyInputEventType = 0xffffffff - - -_load(mod) diff --git a/env/lib/python2.7/site-packages/Quartz/CoreGraphics/__init__.pyc b/env/lib/python2.7/site-packages/Quartz/CoreGraphics/__init__.pyc deleted file mode 100644 index 86e10b21..00000000 Binary files a/env/lib/python2.7/site-packages/Quartz/CoreGraphics/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Quartz/CoreGraphics/_callbacks.so b/env/lib/python2.7/site-packages/Quartz/CoreGraphics/_callbacks.so deleted file mode 100755 index 7828ead5..00000000 Binary files a/env/lib/python2.7/site-packages/Quartz/CoreGraphics/_callbacks.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Quartz/CoreGraphics/_contextmanager.py b/env/lib/python2.7/site-packages/Quartz/CoreGraphics/_contextmanager.py deleted file mode 100644 index 4bb13913..00000000 --- a/env/lib/python2.7/site-packages/Quartz/CoreGraphics/_contextmanager.py +++ /dev/null @@ -1,95 +0,0 @@ -""" -This module defines a number of context managers. These are meant to be used -in the context of the with statement (introduced in Python 2.5). -""" -__all__ = ('CGSavedGState', 'CGTransparencyLayer', 'CGContextPage') -import Quartz.CoreGraphics as CG - -class CGSavedGState (object): - """ - Context manager for saving and restoring the graphics state. - - Usage:: - - with CGSavedGState(context): - statement - - This is equivalent to: - CGContextSaveGState(context) - try: - statement - - finally: - CGContextRestoreGState(context) - """ - def __init__(self, context): - self.context = context - - def __enter__(self): - CG.CGContextSaveGState(self.context) - return self - - def __exit__(self, exc_type, exc_value, exc_tp): - CG.CGContextRestoreGState(self.context) - return False - -class CGTransparencyLayer (object): - """ - Context manager for working in a transparancylayer. - - Usage:: - - with CGTransparencyLayer(context, info [, rect]): - statement - - This is equivalent to: - CGContextBeginTransparencyLayer(context, info) - try: - statement - - finally: - CGContextEndTransparencyLayer(context) - """ - def __init__(self, context, info, rect = None): - self.context = context - self.info = info - self.rect = rect - - def __enter__(self): - if self.rect is None: - result = CG.CGContextBeginTransparencyLayer(self.context, self.info) - else: - result = CG.CGContextBeginTransparencyLayerWithRect(self.context, self.rect, self.info) - return result - - def __exit__(self, exc_type, exc_value, exc_tp): - CG.CGContextEndTransparencyLayer(self.context) - return False - -class CGContextPage (object): - """ - Context manager for saving and restoring the graphics state. - - Usage:: - - with CGContextPage(context): - statement - - This is equivalent to: - CGContextBeginPage(context, None) - try: - statement - - finally: - CGContextEndPage(context) - """ - def __init__(self, context, mediaBox = None): - self.context = context - self.mediaBox = mediaBox - - def __enter__(self): - mediaRect = CG.CGContextBeginPage(self.context, self.mediaBox) - - def __exit__(self, exc_type, exc_value, exc_tp): - CG.CGContextEndPage(self.context) - return False diff --git a/env/lib/python2.7/site-packages/Quartz/CoreGraphics/_contextmanager.pyc b/env/lib/python2.7/site-packages/Quartz/CoreGraphics/_contextmanager.pyc deleted file mode 100644 index bea7abb8..00000000 Binary files a/env/lib/python2.7/site-packages/Quartz/CoreGraphics/_contextmanager.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Quartz/CoreGraphics/_coregraphics.so b/env/lib/python2.7/site-packages/Quartz/CoreGraphics/_coregraphics.so deleted file mode 100755 index d2d471d8..00000000 Binary files a/env/lib/python2.7/site-packages/Quartz/CoreGraphics/_coregraphics.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Quartz/CoreGraphics/_doubleindirect.so b/env/lib/python2.7/site-packages/Quartz/CoreGraphics/_doubleindirect.so deleted file mode 100755 index 7d38af15..00000000 Binary files a/env/lib/python2.7/site-packages/Quartz/CoreGraphics/_doubleindirect.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Quartz/CoreGraphics/_inlines.so b/env/lib/python2.7/site-packages/Quartz/CoreGraphics/_inlines.so deleted file mode 100755 index 0d82bb89..00000000 Binary files a/env/lib/python2.7/site-packages/Quartz/CoreGraphics/_inlines.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Quartz/CoreGraphics/_metadata.py b/env/lib/python2.7/site-packages/Quartz/CoreGraphics/_metadata.py deleted file mode 100644 index 66fd8734..00000000 --- a/env/lib/python2.7/site-packages/Quartz/CoreGraphics/_metadata.py +++ /dev/null @@ -1,35 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Sun Aug 19 18:07:18 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -misc.update({'CGFunctionCallbacks': objc.createStructType('CGFunctionCallbacks', b'{CGFunctionCallbacks=I^?^?}', []), 'CGDeviceByteColor': objc.createStructType('CGDeviceByteColor', b'{CGDeviceByteColor=CCC}', ['red', 'green', 'blue']), 'CGPoint': objc.registerStructAlias(sel32or64(b'{CGPoint=ff}', b'{CGPoint=dd}'), objc._resolve_name('Foundation.NSPoint')), 'CGDataProviderCallbacks': objc.createStructType('CGDataProviderCallbacks', b'{CGDataProviderCallbacks=^?^?^?^?}', []), 'CGDataProviderSequentialCallbacks': objc.createStructType('CGDataProviderSequentialCallbacks', b'{CGDataProviderSequentialCallbacks=I^?^?^?^?}', []), 'CGPatternCallbacks': objc.createStructType('CGPatternCallbacks', b'{CGPatternCallbacks=I^?^?}', []), 'CGSize': objc.registerStructAlias(sel32or64(b'{CGSize=ff}', b'{CGSize=dd}'), objc._resolve_name('Foundation.NSSize')), 'CGDeviceColor': objc.createStructType('CGDeviceColor', b'{CGDeviceColor=fff}', ['red', 'green', 'blue']), 'CGDataProviderDirectAccessCallbacks': objc.createStructType('CGDataProviderDirectAccessCallbacks', b'{CGDataProviderDirectAccessCallbacks=^?^?^?^?}', []), 'CGAffineTransform': objc.createStructType('CGAffineTransform', sel32or64(b'{CGAffineTransform=ffffff}', b'{CGAffineTransform=dddddd}'), ['a', 'b', 'c', 'd', 'tx', 'ty']), 'CGDataConsumerCallbacks': objc.createStructType('CGDataConsumerCallbacks', b'{CGDataConsumerCallbacks=^?^?}', []), 'CGVector': objc.createStructType('CGVector', sel32or64(b'{CGVector=ff}', b'{CGVector=dd}'), ['dx', 'dy']), 'CGPathElement': objc.createStructType('CGPathElement', sel32or64(b'{CGPathElement=I^{CGPoint=ff}}', b'{CGPathElement=I^{CGPoint=dd}}'), ['type', 'points']), 'CGScreenUpdateMoveDelta': objc.createStructType('CGScreenUpdateMoveDelta', b'{CGScreenUpdateMoveDelta=ii}', ['dX', 'dY']), 'CGEventTapInformation': objc.createStructType('CGEventTapInformation', b'{__CGEventTapInformation=IIIQiiBfff}', ['eventTapID', 'tapPoint', 'options', 'eventsOfInterest', 'tappingProcess', 'processBeingTapped', 'enabled', 'minUsecLatency', 'avgUsecLatency', 'maxUsecLatency']), 'CGPSConverterCallbacks': objc.createStructType('CGPSConverterCallbacks', b'{CGPSConverterCallbacks=I^?^?^?^?^?^?^?}', []), 'CGRect': objc.registerStructAlias(sel32or64(b'{CGRect={CGPoint=ff}{CGSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}'), objc._resolve_name('Foundation.NSRect'))}) -constants = '''$kCGColorBlack@^{__CFString=}$kCGColorClear@^{__CFString=}$kCGColorConversionBlackPointCompensation$kCGColorConversionTRCSize$kCGColorSpaceACESCGLinear$kCGColorSpaceAdobeRGB1998@^{__CFString=}$kCGColorSpaceDCIP3$kCGColorSpaceDisplayP3$kCGColorSpaceExtendedGray$kCGColorSpaceExtendedLinearGray$kCGColorSpaceExtendedLinearSRGB$kCGColorSpaceExtendedSRGB$kCGColorSpaceGenericCMYK@^{__CFString=}$kCGColorSpaceGenericGray@^{__CFString=}$kCGColorSpaceGenericGrayGamma2_2@^{__CFString=}$kCGColorSpaceGenericLab$kCGColorSpaceGenericRGB@^{__CFString=}$kCGColorSpaceGenericRGBLinear@^{__CFString=}$kCGColorSpaceGenericXYZ$kCGColorSpaceITUR_2020$kCGColorSpaceITUR_709$kCGColorSpaceLinearGray$kCGColorSpaceLinearSRGB$kCGColorSpaceROMMRGB$kCGColorSpaceSRGB@^{__CFString=}$kCGColorWhite@^{__CFString=}$kCGDisplayShowDuplicateLowResolutionModes@^{__CFString=}$kCGDisplayStreamColorSpace@^{__CFString=}$kCGDisplayStreamDestinationRect@^{__CFString=}$kCGDisplayStreamMinimumFrameTime@^{__CFString=}$kCGDisplayStreamPreserveAspectRatio@^{__CFString=}$kCGDisplayStreamQueueDepth@^{__CFString=}$kCGDisplayStreamShowCursor@^{__CFString=}$kCGDisplayStreamSourceRect@^{__CFString=}$kCGDisplayStreamYCbCrMatrix@^{__CFString=}$kCGDisplayStreamYCbCrMatrix_ITU_R_601_4@^{__CFString=}$kCGDisplayStreamYCbCrMatrix_ITU_R_709_2@^{__CFString=}$kCGDisplayStreamYCbCrMatrix_SMPTE_240M_1995@^{__CFString=}$kCGFontVariationAxisDefaultValue@^{__CFString=}$kCGFontVariationAxisMaxValue@^{__CFString=}$kCGFontVariationAxisMinValue@^{__CFString=}$kCGFontVariationAxisName@^{__CFString=}$kCGPDFContextAccessPermissions$kCGPDFContextAllowsCopying@^{__CFString=}$kCGPDFContextAllowsPrinting@^{__CFString=}$kCGPDFContextArtBox@^{__CFString=}$kCGPDFContextAuthor@^{__CFString=}$kCGPDFContextBleedBox@^{__CFString=}$kCGPDFContextCreator@^{__CFString=}$kCGPDFContextCropBox@^{__CFString=}$kCGPDFContextEncryptionKeyLength@^{__CFString=}$kCGPDFContextKeywords@^{__CFString=}$kCGPDFContextMediaBox@^{__CFString=}$kCGPDFContextOutputIntent@^{__CFString=}$kCGPDFContextOutputIntents@^{__CFString=}$kCGPDFContextOwnerPassword@^{__CFString=}$kCGPDFContextSubject@^{__CFString=}$kCGPDFContextTitle@^{__CFString=}$kCGPDFContextTrimBox@^{__CFString=}$kCGPDFContextUserPassword@^{__CFString=}$kCGPDFOutlineChildren$kCGPDFOutlineDestination$kCGPDFOutlineDestinationRect$kCGPDFOutlineTitle$kCGPDFXDestinationOutputProfile@^{__CFString=}$kCGPDFXInfo@^{__CFString=}$kCGPDFXOutputCondition@^{__CFString=}$kCGPDFXOutputConditionIdentifier@^{__CFString=}$kCGPDFXOutputIntentSubtype@^{__CFString=}$kCGPDFXRegistryName@^{__CFString=}$kCGWindowAlpha@^{__CFString=}$kCGWindowBackingLocationVideoMemory@^{__CFString=}$kCGWindowBounds@^{__CFString=}$kCGWindowIsOnscreen@^{__CFString=}$kCGWindowLayer@^{__CFString=}$kCGWindowMemoryUsage@^{__CFString=}$kCGWindowName@^{__CFString=}$kCGWindowNumber@^{__CFString=}$kCGWindowOwnerName@^{__CFString=}$kCGWindowOwnerPID@^{__CFString=}$kCGWindowSharingState@^{__CFString=}$kCGWindowStoreType@^{__CFString=}$kCGWindowWorkspace@^{__CFString=}$''' -constants = constants + '$CGRectZero@%s$'%(sel32or64('{CGRect={CGPoint=ff}{CGSize=ff}}', '{CGRect={CGPoint=dd}{CGSize=dd}}'),) -constants = constants + '$CGAffineTransformIdentity@%s$'%(sel32or64('{CGAffineTransform=ffffff}', '{CGAffineTransform=dddddd}'),) -constants = constants + '$CGRectNull@%s$'%(sel32or64('{CGRect={CGPoint=ff}{CGSize=ff}}', '{CGRect={CGPoint=dd}{CGSize=dd}}'),) -constants = constants + '$CGPointZero@%s$'%(sel32or64('{CGPoint=ff}', '{CGPoint=dd}'),) -constants = constants + '$CGRectInfinite@%s$'%(sel32or64('{CGRect={CGPoint=ff}{CGSize=ff}}', '{CGRect={CGPoint=dd}{CGSize=dd}}'),) -constants = constants + '$CGSizeZero@%s$'%(sel32or64('{CGSize=ff}', '{CGSize=dd}'),) -enums = '''$CGFLOAT_DEFINED@1$CGGlyphMax@65534$CGGlyphMin@0$CGPDFDataFormatJPEG2000@2$CGPDFDataFormatJPEGEncoded@1$CGPDFDataFormatRaw@0$CGRectMaxXEdge@2$CGRectMaxYEdge@3$CGRectMinXEdge@0$CGRectMinYEdge@1$CGVECTOR_DEFINED@1$kCGAnnotatedSessionEventTap@2$kCGAssistiveTechHighWindowLevelKey@20$kCGBackingStoreBuffered@2$kCGBackingStoreNonretained@1$kCGBackingStoreRetained@0$kCGBackstopMenuLevelKey@3$kCGBaseWindowLevelKey@0$kCGBitmapAlphaInfoMask@31$kCGBitmapByteOrder16Big@12288$kCGBitmapByteOrder16Little@4096$kCGBitmapByteOrder32Big@16384$kCGBitmapByteOrder32Little@8192$kCGBitmapByteOrderDefault@0$kCGBitmapByteOrderMask@28672$kCGBitmapFloatComponents@256$kCGBlendModeClear@16$kCGBlendModeColor@14$kCGBlendModeColorBurn@7$kCGBlendModeColorDodge@6$kCGBlendModeCopy@17$kCGBlendModeDarken@4$kCGBlendModeDestinationAtop@24$kCGBlendModeDestinationIn@22$kCGBlendModeDestinationOut@23$kCGBlendModeDestinationOver@21$kCGBlendModeDifference@10$kCGBlendModeExclusion@11$kCGBlendModeHardLight@9$kCGBlendModeHue@12$kCGBlendModeLighten@5$kCGBlendModeLuminosity@15$kCGBlendModeMultiply@1$kCGBlendModeNormal@0$kCGBlendModeOverlay@3$kCGBlendModePlusDarker@26$kCGBlendModePlusLighter@27$kCGBlendModeSaturation@13$kCGBlendModeScreen@2$kCGBlendModeSoftLight@8$kCGBlendModeSourceAtop@20$kCGBlendModeSourceIn@18$kCGBlendModeSourceOut@19$kCGBlendModeXOR@25$kCGCaptureNoFill@1$kCGCaptureNoOptions@0$kCGColorConversionTransformApplySpace@2$kCGColorConversionTransformFromSpace@0$kCGColorConversionTransformToSpace@1$kCGColorConverterTransformApplySpace@2$kCGColorConverterTransformFromSpace@0$kCGColorConverterTransformToSpace@1$kCGColorSpaceModelCMYK@2$kCGColorSpaceModelDeviceN@4$kCGColorSpaceModelIndexed@5$kCGColorSpaceModelLab@3$kCGColorSpaceModelMonochrome@0$kCGColorSpaceModelPattern@6$kCGColorSpaceModelRGB@1$kCGColorSpaceModelUnknown@-1$kCGColorSpaceModelXYZ@7$kCGConfigureForAppOnly@0$kCGConfigureForSession@1$kCGConfigurePermanently@2$kCGCursorWindowLevelKey@19$kCGDesktopIconWindowLevelKey@18$kCGDesktopWindowLevelKey@2$kCGDisplayAddFlag@16$kCGDisplayBeginConfigurationFlag@1$kCGDisplayDesktopShapeChangedFlag@4096$kCGDisplayDisabledFlag@512$kCGDisplayEnabledFlag@256$kCGDisplayFadeReservationInvalidToken@0$kCGDisplayMirrorFlag@1024$kCGDisplayMovedFlag@2$kCGDisplayRemoveFlag@32$kCGDisplaySetMainFlag@4$kCGDisplaySetModeFlag@8$kCGDisplayStreamFrameStatusFrameBlank@2$kCGDisplayStreamFrameStatusFrameComplete@0$kCGDisplayStreamFrameStatusFrameIdle@1$kCGDisplayStreamFrameStatusStopped@3$kCGDisplayStreamUpdateDirtyRects@2$kCGDisplayStreamUpdateMovedRects@1$kCGDisplayStreamUpdateReducedDirtyRects@3$kCGDisplayStreamUpdateRefreshedRects@0$kCGDisplayUnMirrorFlag@2048$kCGDockWindowLevelKey@7$kCGDraggingWindowLevelKey@12$kCGEncodingFontSpecific@0$kCGEncodingMacRoman@1$kCGErrorApplicationAlreadyRunning@1025$kCGErrorApplicationCanOnlyBeRunInOneSessionAtATime@1026$kCGErrorApplicationIncorrectExecutableFormatFound@1023$kCGErrorApplicationIsLaunching@1024$kCGErrorApplicationNotPermittedToExecute@1016$kCGErrorApplicationRequiresNewerSystem@1015$kCGErrorCannotComplete@1004$kCGErrorClassicApplicationsMustBeLaunchedByClassic@1027$kCGErrorFailure@1000$kCGErrorFirst@1000$kCGErrorForkFailed@1028$kCGErrorIllegalArgument@1001$kCGErrorInvalidConnection@1002$kCGErrorInvalidContext@1003$kCGErrorInvalidOperation@1010$kCGErrorLast@1029$kCGErrorNameTooLong@1005$kCGErrorNoCurrentPoint@1009$kCGErrorNoneAvailable@1011$kCGErrorNotImplemented@1006$kCGErrorRangeCheck@1007$kCGErrorRetryRegistration@1029$kCGErrorSuccess@0$kCGErrorTypeCheck@1008$kCGEventFilterMaskPermitLocalKeyboardEvents@2$kCGEventFilterMaskPermitLocalMouseEvents@1$kCGEventFilterMaskPermitSystemDefinedEvents@4$kCGEventFlagMaskAlphaShift@65536$kCGEventFlagMaskAlternate@524288$kCGEventFlagMaskCommand@1048576$kCGEventFlagMaskControl@262144$kCGEventFlagMaskHelp@4194304$kCGEventFlagMaskNonCoalesced@256$kCGEventFlagMaskNumericPad@2097152$kCGEventFlagMaskSecondaryFn@8388608$kCGEventFlagMaskShift@131072$kCGEventFlagsChanged@12$kCGEventKeyDown@10$kCGEventKeyUp@11$kCGEventLeftMouseDown@1$kCGEventLeftMouseDragged@6$kCGEventLeftMouseUp@2$kCGEventMouseMoved@5$kCGEventMouseSubtypeDefault@0$kCGEventMouseSubtypeTabletPoint@1$kCGEventMouseSubtypeTabletProximity@2$kCGEventNull@0$kCGEventOtherMouseDown@25$kCGEventOtherMouseDragged@27$kCGEventOtherMouseUp@26$kCGEventRightMouseDown@3$kCGEventRightMouseDragged@7$kCGEventRightMouseUp@4$kCGEventScrollWheel@22$kCGEventSourceGroupID@44$kCGEventSourceStateCombinedSessionState@0$kCGEventSourceStateHIDSystemState@1$kCGEventSourceStateID@45$kCGEventSourceStatePrivate@-1$kCGEventSourceUnixProcessID@41$kCGEventSourceUserData@42$kCGEventSourceUserID@43$kCGEventSuppressionStateRemoteMouseDrag@1$kCGEventSuppressionStateSuppressionInterval@0$kCGEventTabletPointer@23$kCGEventTabletProximity@24$kCGEventTapDisabledByTimeout@4294967294$kCGEventTapDisabledByUserInput@4294967295$kCGEventTapOptionDefault@0$kCGEventTapOptionListenOnly@1$kCGEventTargetProcessSerialNumber@39$kCGEventTargetUnixProcessID@40$kCGFloatingWindowLevelKey@5$kCGFontIndexInvalid@65535$kCGFontIndexMax@65534$kCGFontPostScriptFormatType1@1$kCGFontPostScriptFormatType3@3$kCGFontPostScriptFormatType42@42$kCGGesturePhaseBegan@1$kCGGesturePhaseCancelled@8$kCGGesturePhaseChanged@2$kCGGesturePhaseEnded@4$kCGGesturePhaseMayBegin@128$kCGGesturePhaseNone@0$kCGGlyphMax@65534$kCGGradientDrawsAfterEndLocation@2$kCGGradientDrawsBeforeStartLocation@1$kCGHIDEventTap@0$kCGHeadInsertEventTap@0$kCGHelpWindowLevelKey@16$kCGImageAlphaFirst@4$kCGImageAlphaLast@3$kCGImageAlphaNone@0$kCGImageAlphaNoneSkipFirst@6$kCGImageAlphaNoneSkipLast@5$kCGImageAlphaOnly@7$kCGImageAlphaPremultipliedFirst@2$kCGImageAlphaPremultipliedLast@1$kCGImageByteOrder16Big@12288$kCGImageByteOrder16Little@4096$kCGImageByteOrder32Big@16384$kCGImageByteOrder32Little@8192$kCGImageByteOrderDefault@0$kCGImageByteOrderMask@28672$kCGImagePixelFormatMask@983040$kCGImagePixelFormatPacked@0$kCGImagePixelFormatRGB101010@196608$kCGImagePixelFormatRGB555@65536$kCGImagePixelFormatRGB565@131072$kCGImagePixelFormatRGBCIF10@262144$kCGInterpolationDefault@0$kCGInterpolationHigh@3$kCGInterpolationLow@2$kCGInterpolationMedium@4$kCGInterpolationNone@1$kCGKeyboardEventAutorepeat@8$kCGKeyboardEventKeyboardType@10$kCGKeyboardEventKeycode@9$kCGLineCapButt@0$kCGLineCapRound@1$kCGLineCapSquare@2$kCGLineJoinBevel@2$kCGLineJoinMiter@0$kCGLineJoinRound@1$kCGMainMenuWindowLevelKey@8$kCGMaximumWindowLevelKey@14$kCGMinimumWindowLevelKey@1$kCGModalPanelWindowLevelKey@10$kCGMomentumScrollPhaseBegin@1$kCGMomentumScrollPhaseContinue@2$kCGMomentumScrollPhaseEnd@3$kCGMomentumScrollPhaseNone@0$kCGMouseButtonCenter@2$kCGMouseButtonLeft@0$kCGMouseButtonRight@1$kCGMouseEventButtonNumber@3$kCGMouseEventClickState@1$kCGMouseEventDeltaX@4$kCGMouseEventDeltaY@5$kCGMouseEventInstantMouser@6$kCGMouseEventNumber@0$kCGMouseEventPressure@2$kCGMouseEventSubtype@7$kCGMouseEventWindowUnderMousePointer@91$kCGMouseEventWindowUnderMousePointerThatCanHandleThisEvent@92$kCGNormalWindowLevelKey@4$kCGNullDirectDisplay@0$kCGNullWindowID@0$kCGNumReservedWindowLevels@16$kCGNumberOfEventSuppressionStates@2$kCGNumberOfWindowLevelKeys@21$kCGOverlayWindowLevelKey@15$kCGPDFAllowsCommenting@64$kCGPDFAllowsContentAccessibility@32$kCGPDFAllowsContentCopying@16$kCGPDFAllowsDocumentAssembly@8$kCGPDFAllowsDocumentChanges@4$kCGPDFAllowsFormFieldEntry@128$kCGPDFAllowsHighQualityPrinting@2$kCGPDFAllowsLowQualityPrinting@1$kCGPDFArtBox@4$kCGPDFBleedBox@2$kCGPDFCropBox@1$kCGPDFMediaBox@0$kCGPDFObjectTypeArray@7$kCGPDFObjectTypeBoolean@2$kCGPDFObjectTypeDictionary@8$kCGPDFObjectTypeInteger@3$kCGPDFObjectTypeName@5$kCGPDFObjectTypeNull@1$kCGPDFObjectTypeReal@4$kCGPDFObjectTypeStream@9$kCGPDFObjectTypeString@6$kCGPDFTrimBox@3$kCGPathEOFill@1$kCGPathEOFillStroke@4$kCGPathElementAddCurveToPoint@3$kCGPathElementAddLineToPoint@1$kCGPathElementAddQuadCurveToPoint@2$kCGPathElementCloseSubpath@4$kCGPathElementMoveToPoint@0$kCGPathFill@0$kCGPathFillStroke@3$kCGPathStroke@2$kCGPatternTilingConstantSpacing@2$kCGPatternTilingConstantSpacingMinimalDistortion@1$kCGPatternTilingNoDistortion@0$kCGPopUpMenuWindowLevelKey@11$kCGRenderingIntentAbsoluteColorimetric@1$kCGRenderingIntentDefault@0$kCGRenderingIntentPerceptual@3$kCGRenderingIntentRelativeColorimetric@2$kCGRenderingIntentSaturation@4$kCGScreenSaverWindowLevelKey@13$kCGScreenUpdateOperationMove@1$kCGScreenUpdateOperationReducedDirtyRectangleCount@2147483648$kCGScreenUpdateOperationRefresh@0$kCGScrollEventUnitLine@1$kCGScrollEventUnitPixel@0$kCGScrollPhaseBegan@1$kCGScrollPhaseCancelled@8$kCGScrollPhaseChanged@2$kCGScrollPhaseEnded@4$kCGScrollPhaseMayBegin@128$kCGScrollWheelEventDeltaAxis1@11$kCGScrollWheelEventDeltaAxis2@12$kCGScrollWheelEventDeltaAxis3@13$kCGScrollWheelEventFixedPtDeltaAxis1@93$kCGScrollWheelEventFixedPtDeltaAxis2@94$kCGScrollWheelEventFixedPtDeltaAxis3@95$kCGScrollWheelEventInstantMouser@14$kCGScrollWheelEventIsContinuous@88$kCGScrollWheelEventMomentumPhase@123$kCGScrollWheelEventPointDeltaAxis1@96$kCGScrollWheelEventPointDeltaAxis2@97$kCGScrollWheelEventPointDeltaAxis3@98$kCGScrollWheelEventScrollCount@100$kCGScrollWheelEventScrollPhase@99$kCGSessionEventTap@1$kCGStatusWindowLevelKey@9$kCGTabletEventDeviceID@24$kCGTabletEventPointButtons@18$kCGTabletEventPointPressure@19$kCGTabletEventPointX@15$kCGTabletEventPointY@16$kCGTabletEventPointZ@17$kCGTabletEventRotation@22$kCGTabletEventTangentialPressure@23$kCGTabletEventTiltX@20$kCGTabletEventTiltY@21$kCGTabletEventVendor1@25$kCGTabletEventVendor2@26$kCGTabletEventVendor3@27$kCGTabletProximityEventCapabilityMask@36$kCGTabletProximityEventDeviceID@31$kCGTabletProximityEventEnterProximity@38$kCGTabletProximityEventPointerID@30$kCGTabletProximityEventPointerType@37$kCGTabletProximityEventSystemTabletID@32$kCGTabletProximityEventTabletID@29$kCGTabletProximityEventVendorID@28$kCGTabletProximityEventVendorPointerSerialNumber@34$kCGTabletProximityEventVendorPointerType@33$kCGTabletProximityEventVendorUniqueID@35$kCGTailAppendEventTap@1$kCGTextClip@7$kCGTextFill@0$kCGTextFillClip@4$kCGTextFillStroke@2$kCGTextFillStrokeClip@6$kCGTextInvisible@3$kCGTextStroke@1$kCGTextStrokeClip@5$kCGTornOffMenuWindowLevelKey@6$kCGUtilityWindowLevelKey@17$kCGWindowImageBestResolution@8$kCGWindowImageBoundsIgnoreFraming@1$kCGWindowImageDefault@0$kCGWindowImageNominalResolution@16$kCGWindowImageOnlyShadows@4$kCGWindowImageShouldBeOpaque@2$kCGWindowListExcludeDesktopElements@16$kCGWindowListOptionAll@0$kCGWindowListOptionIncludingWindow@8$kCGWindowListOptionOnScreenAboveWindow@2$kCGWindowListOptionOnScreenBelowWindow@4$kCGWindowListOptionOnScreenOnly@1$kCGWindowSharingNone@0$kCGWindowSharingReadOnly@1$kCGWindowSharingReadWrite@2$''' -misc.update({'CGFLOAT_IS_DOUBLE': sel32or64(0, 1), 'kCGEventMaskForAllEvents': sel32or64(4294967295, 18446744073709551615)}) -misc.update({'kCGDisplayModeIsTelevisionOutput': b'kCGDisplayModeIsTelevisionOutput'.decode("utf-8"), 'kCGSessionUserNameKey': b'kCGSSessionUserNameKey'.decode("utf-8"), 'kCGSessionOnConsoleKey': b'kCGSSessionOnConsoleKey'.decode("utf-8"), 'kCGDisplayIOFlags': b'IOFlags'.decode("utf-8"), 'kCGDisplayModeIsStretched': b'kCGDisplayModeIsStretched'.decode("utf-8"), 'kCGNotifyGUIConsoleSessionChanged': b'com.apple.coregraphics.GUIConsoleSessionChanged', 'kCGSessionUserIDKey': b'kCGSSessionUserIDKey'.decode("utf-8"), 'kCGDisplayModeIsInterlaced': b'kCGDisplayModeIsInterlaced'.decode("utf-8"), 'kCGDisplayBlendSolidColor': 1.0, 'kCGDisplayHeight': b'Height'.decode("utf-8"), 'kCGDisplayBitsPerSample': b'BitsPerSample'.decode("utf-8"), 'kCGDisplayBlendNormal': 0.0, 'kCGMouseDownEventMaskingDeadSwitchTimeout': 60.0, 'kCGMaxDisplayReservationInterval': 15.0, 'kCGDisplayWidth': b'Width'.decode("utf-8"), 'kCGDisplaySamplesPerPixel': b'SamplesPerPixel'.decode("utf-8"), 'kCGNotifyEventTapRemoved': b'com.apple.coregraphics.eventTapRemoved', 'kCGSessionConsoleSetKey': b'kCGSSessionConsoleSetKey'.decode("utf-8"), 'kCGDisplayRefreshRate': b'RefreshRate'.decode("utf-8"), 'kCGDisplayBytesPerRow': b'kCGDisplayBytesPerRow'.decode("utf-8"), 'kCGDisplayBitsPerPixel': b'BitsPerPixel'.decode("utf-8"), 'kCGDisplayModeUsableForDesktopGUI': b'UsableForDesktopGUI'.decode("utf-8"), 'kCGSessionLoginDoneKey': b'kCGSessionLoginDoneKey'.decode("utf-8"), 'kCGIODisplayModeID': b'IODisplayModeID'.decode("utf-8"), 'kCGDisplayMode': b'Mode'.decode("utf-8"), 'kCGNotifyEventTapAdded': b'com.apple.coregraphics.eventTapAdded', 'kCGDisplayModeIsSafeForHardware': b'kCGDisplayModeIsSafeForHardware'.decode("utf-8"), 'kCGNotifyGUISessionUserChanged': b'com.apple.coregraphics.GUISessionUserChanged'}) -functions={'CGPDFDocumentGetVersion': (b'v^{CGPDFDocument=}^i^i', '', {'arguments': {1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}}}), 'CGContextAddArcToPoint': (sel32or64(b'v^{CGContext=}fffff', b'v^{CGContext=}ddddd'),), 'CGRectIntersection': (sel32or64(b'{CGRect={CGPoint=ff}{CGSize=ff}}{CGRect={CGPoint=ff}{CGSize=ff}}{CGRect={CGPoint=ff}{CGSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}{CGRect={CGPoint=dd}{CGSize=dd}}{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'CGDataConsumerRetain': (b'^{CGDataConsumer=}^{CGDataConsumer=}',), 'CGColorSpaceCreateDeviceCMYK': (b'^{CGColorSpace=}', '', {'retval': {'already_cfretained': True}}), 'CGGradientGetTypeID': (sel32or64(b'L', b'Q'),), 'CGColorConversionInfoCreate': (b'^{CGColorConversionInfo=}^{CGColorSpace=}^{CGColorSpace=}', '', {'retval': {'already_cfretained': True}}), 'CGPDFPageGetDrawingTransform': (sel32or64(b'{CGAffineTransform=ffffff}^{CGPDFPage=}I{CGRect={CGPoint=ff}{CGSize=ff}}iB', b'{CGAffineTransform=dddddd}^{CGPDFPage=}I{CGRect={CGPoint=dd}{CGSize=dd}}iB'),), 'CGFontCanCreatePostScriptSubset': (b'B^{CGFont=}I', '', {'retval': {'already_cfretained': True}}), 'CGContextSetStrokeColorSpace': (b'v^{CGContext=}^{CGColorSpace=}',), 'CGContextPathContainsPoint': (sel32or64(b'B^{CGContext=}{CGPoint=ff}I', b'B^{CGContext=}{CGPoint=dd}I'),), 'CGAffineTransformRotate': (sel32or64(b'{CGAffineTransform=ffffff}{CGAffineTransform=ffffff}f', b'{CGAffineTransform=dddddd}{CGAffineTransform=dddddd}d'),), 'CGContextRelease': (b'v^{CGContext=}',), 'CGPDFArrayGetStream': (sel32or64(b'B^{CGPDFArray=}L^^{CGPDFStream=}', b'B^{CGPDFArray=}Q^^{CGPDFStream=}'), '', {'arguments': {2: {'type_modifier': 'o'}}}), 'CGEventKeyboardSetUnicodeString': (sel32or64(b'v^{__CGEvent=}L^T', b'v^{__CGEvent=}Q^T'), '', {'arguments': {2: {'c_array_length_in_arg': 1, 'type_modifier': 'n'}}}), 'CGDisplayModeGetRefreshRate': (b'd^{CGDisplayMode=}',), 'CGShieldingWindowID': (b'II',), 'CGAffineTransformMake': (sel32or64(b'{CGAffineTransform=ffffff}ffffff', b'{CGAffineTransform=dddddd}dddddd'),), 'CGWindowListCreateDescriptionFromArray': (b'^{__CFArray=}^{__CFArray=}', '', {'retval': {'already_cfretained': True}}), 'CGContextAddLines': (sel32or64(b'v^{CGContext=}^{CGPoint=ff}L', b'v^{CGContext=}^{CGPoint=dd}Q'), '', {'arguments': {1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}}}), 'CGContextSetTextPosition': (sel32or64(b'v^{CGContext=}ff', b'v^{CGContext=}dd'),), 'CGContextGetTextPosition': (sel32or64(b'{CGPoint=ff}^{CGContext=}', b'{CGPoint=dd}^{CGContext=}'),), 'CGPDFPageGetRotationAngle': (b'i^{CGPDFPage=}',), 'CGContextGetPathBoundingBox': (sel32or64(b'{CGRect={CGPoint=ff}{CGSize=ff}}^{CGContext=}', b'{CGRect={CGPoint=dd}{CGSize=dd}}^{CGContext=}'),), 'CGRectContainsPoint': (sel32or64(b'B{CGRect={CGPoint=ff}{CGSize=ff}}{CGPoint=ff}', b'B{CGRect={CGPoint=dd}{CGSize=dd}}{CGPoint=dd}'),), 'CGPDFDictionaryGetCount': (sel32or64(b'L^{CGPDFDictionary=}', b'Q^{CGPDFDictionary=}'),), 'CGRectMake': (sel32or64(b'{CGRect={CGPoint=ff}{CGSize=ff}}ffff', b'{CGRect={CGPoint=dd}{CGSize=dd}}dddd'),), 'CGColorSpaceRetain': (b'^{CGColorSpace=}^{CGColorSpace=}',), 'CGPathCreateCopyByStrokingPath': (sel32or64(b'^{CGPath=}^{CGPath=}^{CGAffineTransform=ffffff}fIIf', b'^{CGPath=}^{CGPath=}^{CGAffineTransform=dddddd}dIId'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'type_modifier': 'n'}}}), 'CGContextAddEllipseInRect': (sel32or64(b'v^{CGContext=}{CGRect={CGPoint=ff}{CGSize=ff}}', b'v^{CGContext=}{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'CGContextEndTransparencyLayer': (b'v^{CGContext=}',), 'CGContextSelectFont': (sel32or64(b'v^{CGContext=}^cfI', b'v^{CGContext=}^cdI'), '', {'arguments': {1: {'c_array_delimited_by_null': True, 'type_modifier': 'n'}}}), 'CGLayerGetSize': (sel32or64(b'{CGSize=ff}^{CGLayer=}', b'{CGSize=dd}^{CGLayer=}'),), 'CGWindowListCreate': (b'^{__CFArray=}II', '', {'retval': {'already_cfretained': True}}), 'CGSizeEqualToSize': (sel32or64(b'B{CGSize=ff}{CGSize=ff}', b'B{CGSize=dd}{CGSize=dd}'),), 'CGColorConverterCreateSimple': (b'^{CGColorConversionInfo=}^{CGColorSpace=}^{CGColorSpace=}', '', {'retval': {'already_cfretained': True}}), 'CGPDFStringCopyTextString': (b'^{__CFString=}^{CGPDFString=}', '', {'retval': {'already_cfretained': True}}), 'CGColorSpaceGetBaseColorSpace': (b'^{CGColorSpace=}^{CGColorSpace=}',), 'CGPathCreateMutable': (b'^{CGPath=}', '', {'retval': {'already_cfretained': True}}), 'CGPatternCreate': (sel32or64(b'^{CGPattern=}^v{CGRect={CGPoint=ff}{CGSize=ff}}{CGAffineTransform=ffffff}ffIB^{CGPatternCallbacks=I^?^?}', b'^{CGPattern=}^v{CGRect={CGPoint=dd}{CGSize=dd}}{CGAffineTransform=dddddd}ddIB^{CGPatternCallbacks=I^?^?}'), '', {'retval': {'already_cfretained': True}}), 'CGBitmapContextGetHeight': (sel32or64(b'L^{CGContext=}', b'Q^{CGContext=}'),), 'CGPDFPageGetBoxRect': (sel32or64(b'{CGRect={CGPoint=ff}{CGSize=ff}}^{CGPDFPage=}I', b'{CGRect={CGPoint=dd}{CGSize=dd}}^{CGPDFPage=}I'),), 'CGPDFStringCopyDate': (b'^{__CFDate=}^{CGPDFString=}', '', {'retval': {'already_cfretained': True}}), 'CGDisplayStreamUpdateGetDropCount': (sel32or64(b'L^{CGDisplayStreamUpdate=}', b'Q^{CGDisplayStreamUpdate=}'),), 'CGDisplayBestModeForParametersAndRefreshRate': (sel32or64(b'^{__CFDictionary=}ILLLd^i', b'^{__CFDictionary=}IQQQd^I'), '', {'arguments': {5: {'type_modifier': 'o'}}}), 'CGPDFScannerPopString': (b'B^{CGPDFScanner=}^^{CGPDFString=}', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'CGPDFPageGetTypeID': (sel32or64(b'L', b'Q'),), 'CGContextAddRect': (sel32or64(b'v^{CGContext=}{CGRect={CGPoint=ff}{CGSize=ff}}', b'v^{CGContext=}{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'CGDataProviderCreateWithURL': (b'^{CGDataProvider=}^{__CFURL=}', '', {'retval': {'already_cfretained': True}}), 'CGPDFScannerCreate': (b'^{CGPDFScanner=}^{CGPDFContentStream=}^{CGPDFOperatorTable=}^v', '', {'retval': {'already_cfretained': False}}), 'CGConfigureDisplayFadeEffect': (b'i^{_CGDisplayConfigRef=}fffff',), 'CGDisplayFade': (sel32or64(b'iIffffffi', b'iIffffffI'),), 'CGPDFArrayGetObject': (sel32or64(b'B^{CGPDFArray=}L^{CGPDFObject=}', b'B^{CGPDFArray=}Q^{CGPDFObject=}'), '', {'arguments': {2: {'type_modifier': 'o'}}}), 'CGEventSetType': (b'v^{__CGEvent=}I',), 'CGDataProviderCreateWithFilename': (b'^{CGDataProvider=}^c', '', {'retval': {'already_cfretained': True}, 'arguments': {0: {'c_array_delimited_by_null': True, 'type_modifier': 'n'}}}), 'CGColorGetComponents': (sel32or64(b'^f^{CGColor=}', b'^d^{CGColor=}'), '', {'retval': {'c_array_of_variable_length': True}}), 'CGPDFContextSetOutline': (b'v^{CGContext=}^{__CFDictionary=}',), 'CGAffineTransformMakeTranslation': (sel32or64(b'{CGAffineTransform=ffffff}ff', b'{CGAffineTransform=dddddd}dd'),), 'CGSizeMake': (sel32or64(b'{CGSize=ff}ff', b'{CGSize=dd}dd'),), 'CGDisplayVendorNumber': (b'II',), 'CGPDFDocumentGetID': (b'^{CGPDFArray=}^{CGPDFDocument=}',), 'CGDataProviderCreateWithData': (sel32or64(b'^{CGDataProvider=}^v^vL^?', b'^{CGDataProvider=}^v^vQ^?'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'^v'}, 2: {'type': b'Q'}}}}}}), 'CGColorSpaceCreatePattern': (b'^{CGColorSpace=}^{CGColorSpace=}', '', {'retval': {'already_cfretained': True}}), 'CGContextSynchronize': (b'v^{CGContext=}',), 'CGDisplayModeGetIODisplayModeID': (b'i^{CGDisplayMode=}',), 'CGFontGetGlyphBBoxes': (sel32or64(b'B^{CGFont=}^SL^{CGRect={CGPoint=ff}{CGSize=ff}}', b'B^{CGFont=}^SQ^{CGRect={CGPoint=dd}{CGSize=dd}}'), '', {'arguments': {1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}, 3: {'c_array_length_in_arg': 2, 'type_modifier': 'o'}}}), 'CGPaletteCreateWithByteSamples': (b'^{_CGDirectPaletteRef=}^{CGDeviceByteColor=CCC}I', '', {'retval': {'already_cfretained': True}, 'arguments': {0: {'c_array_length_in_arg': 1, 'type_modifier': 'n'}}}), 'CGPDFContentStreamGetResource': (b'^{CGPDFObject=}^{CGPDFContentStream=}^c^c', '', {'arguments': {1: {'c_array_delimited_by_null': True, 'type_modifier': 'n'}, 2: {'c_array_delimited_by_null': True, 'type_modifier': 'n'}}}), 'CGAffineTransformMakeRotation': (sel32or64(b'{CGAffineTransform=ffffff}f', b'{CGAffineTransform=dddddd}d'),), 'CGGradientRetain': (b'^{CGGradient=}^{CGGradient=}',), 'CGPaletteCreateWithDisplay': (b'^{_CGDirectPaletteRef=}I', '', {'retval': {'already_cfretained': True}}), 'CGDisplayCreateImageForRect': (sel32or64(b'^{CGImage=}I{CGRect={CGPoint=ff}{CGSize=ff}}', b'^{CGImage=}I{CGRect={CGPoint=dd}{CGSize=dd}}'), '', {'retval': {'already_cfretained': True}}), 'CGImageGetWidth': (sel32or64(b'L^{CGImage=}', b'Q^{CGImage=}'),), 'CGPDFDocumentIsUnlocked': (b'B^{CGPDFDocument=}',), 'CGPathCreateWithRect': (sel32or64(b'^{CGPath=}{CGRect={CGPoint=ff}{CGSize=ff}}^{CGAffineTransform=ffffff}', b'^{CGPath=}{CGRect={CGPoint=dd}{CGSize=dd}}^{CGAffineTransform=dddddd}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'type_modifier': 'n'}}}), 'CGImageGetBitmapInfo': (b'I^{CGImage=}',), 'CGEventSourceSetKeyboardType': (b'v^{__CGEventSource=}I',), 'CGDataProviderGetInfo': (b'v^{CGDataProvider=}',), 'CGContextSetAllowsFontSmoothing': (b'v^{CGContext=}B',), 'CGDisplayUsesOpenGLAcceleration': (sel32or64(b'iI', b'II'),), 'CGPointMakeWithDictionaryRepresentation': (sel32or64(b'B^{__CFDictionary=}^{CGPoint=ff}', b'B^{__CFDictionary=}^{CGPoint=dd}'), '', {'arguments': {1: {'type_modifier': 'o'}}}), 'CGContextResetClip': (b'v^{CGContext=}',), 'CGPDFDictionaryApplyFunction': (b'v^{CGPDFDictionary=}^?^v', '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^t', 'c_array_delimeted_by_null': True, 'type_modifier': 'n'}, 1: {'type': b'^{CGPDFObject=}'}, 2: {'type': b'^v'}}}}}}), 'CGWindowServerCreateServerPort': (b'^{__CFMachPort=}', '', {'retval': {'already_cfretained': True}}), 'CGPathAddEllipseInRect': (sel32or64(b'v^{CGPath=}^{CGAffineTransform=ffffff}{CGRect={CGPoint=ff}{CGSize=ff}}', b'v^{CGPath=}^{CGAffineTransform=dddddd}{CGRect={CGPoint=dd}{CGSize=dd}}'), '', {'arguments': {1: {'type_modifier': 'n'}}}), 'CGColorSpaceGetColorTableCount': (sel32or64(b'L^{CGColorSpace=}', b'Q^{CGColorSpace=}'),), 'CGWindowListCreateImage': (sel32or64(b'^{CGImage=}{CGRect={CGPoint=ff}{CGSize=ff}}III', b'^{CGImage=}{CGRect={CGPoint=dd}{CGSize=dd}}III'), '', {'retval': {'already_cfretained': True}}), 'CGContextClearRect': (sel32or64(b'v^{CGContext=}{CGRect={CGPoint=ff}{CGSize=ff}}', b'v^{CGContext=}{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'CGPDFDocumentGetAccessPermissions': (b'I^{CGPDFDocument=}',), 'CGBitmapContextGetBitmapInfo': (b'I^{CGContext=}',), 'CGPathAddQuadCurveToPoint': (sel32or64(b'v^{CGPath=}^{CGAffineTransform=ffffff}ffff', b'v^{CGPath=}^{CGAffineTransform=dddddd}dddd'), '', {'arguments': {1: {'type_modifier': 'n'}}}), 'CGColorSpaceCreateDeviceGray': (b'^{CGColorSpace=}', '', {'retval': {'already_cfretained': True}}), 'CGRectIntersectsRect': (sel32or64(b'B{CGRect={CGPoint=ff}{CGSize=ff}}{CGRect={CGPoint=ff}{CGSize=ff}}', b'B{CGRect={CGPoint=dd}{CGSize=dd}}{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'CGPDFPageGetDocument': (b'^{CGPDFDocument=}^{CGPDFPage=}',), 'CGRestorePermanentDisplayConfiguration': (b'v',), 'CGImageGetTypeID': (sel32or64(b'L', b'Q'),), 'CGFontCreatePostScriptEncoding': (b'^{__CFData=}^{CGFont=}[256S]', '', {'retval': {'already_cfretained': True}}), 'CGFontGetStemV': (sel32or64(b'f^{CGFont=}', b'd^{CGFont=}'),), 'CGPointApplyAffineTransform': (sel32or64(b'{CGPoint=ff}{CGPoint=ff}{CGAffineTransform=ffffff}', b'{CGPoint=dd}{CGPoint=dd}{CGAffineTransform=dddddd}'),), 'CGEventSourceGetSourceStateID': (b'I^{__CGEventSource=}',), 'CGRectStandardize': (sel32or64(b'{CGRect={CGPoint=ff}{CGSize=ff}}{CGRect={CGPoint=ff}{CGSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'CGPathAddLineToPoint': (sel32or64(b'v^{CGPath=}^{CGAffineTransform=ffffff}ff', b'v^{CGPath=}^{CGAffineTransform=dddddd}dd'), '', {'arguments': {1: {'type_modifier': 'n'}}}), 'CGDataProviderCopyData': (b'^{__CFData=}^{CGDataProvider=}', '', {'retval': {'already_cfretained': True}}), 'CGColorCreateGenericGray': (sel32or64(b'^{CGColor=}ff', b'^{CGColor=}dd'), '', {'retval': {'already_cfretained': True}}), 'CGColorSpaceIsWideGamutRGB': (b'B^{CGColorSpace=}',), 'CGPSConverterGetTypeID': (sel32or64(b'L', b'Q'),), 'CGColorRetain': (b'^{CGColor=}^{CGColor=}',), 'CGColorCreateGenericCMYK': (sel32or64(b'^{CGColor=}fffff', b'^{CGColor=}ddddd'), '', {'retval': {'already_cfretained': True}}), 'CGGLContextCreate': (sel32or64(b'^{CGContext=}^v{CGSize=ff}^{CGColorSpace=}', b'^{CGContext=}^v{CGSize=dd}^{CGColorSpace=}'), '', {'retval': {'already_cfretained': True}}), 'CGBeginDisplayConfiguration': (b'i^^{_CGDisplayConfigRef=}', '', {'arguments': {0: {'type_modifier': 'o'}}}), 'CGDisplayStreamGetTypeID': (sel32or64(b'L', b'Q'),), 'CGBitmapContextGetBitsPerPixel': (sel32or64(b'L^{CGContext=}', b'Q^{CGContext=}'),), 'CGPDFDictionaryGetArray': (b'B^{CGPDFDictionary=}^c^^{CGPDFArray=}', '', {'arguments': {1: {'c_array_delimited_by_null': True, 'type_modifier': 'n'}, 2: {'type_modifier': 'o'}}}), 'CGColorSpaceCreateWithPlatformColorSpace': (b'^{CGColorSpace=}^v', '', {'retval': {'already_cfretained': True}}), 'CGContextSetCMYKStrokeColor': (sel32or64(b'v^{CGContext=}fffff', b'v^{CGContext=}ddddd'),), 'CGDisplayStreamGetRunLoopSource': (b'^{__CFRunLoopSource=}^{CGDisplayStream=}',), 'CGContextEndPage': (b'v^{CGContext=}',), 'CGUnregisterScreenRefreshCallback': (b'v^?^v', '', {'arguments': {0: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'I'}, 1: {'type': b'^{CGRect={CGPoint=dd}{CGSize=dd}}'}, 2: {'type': b'^v'}}}}}}), 'CGPathAddRect': (sel32or64(b'v^{CGPath=}^{CGAffineTransform=ffffff}{CGRect={CGPoint=ff}{CGSize=ff}}', b'v^{CGPath=}^{CGAffineTransform=dddddd}{CGRect={CGPoint=dd}{CGSize=dd}}'), '', {'arguments': {1: {'type_modifier': 'n'}}}), 'CGPDFContentStreamRelease': (b'v^{CGPDFContentStream=}',), 'CGPathApplyWithBlock': (b'v^{CGPath=}@?', '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'typestr': '^v'}, 1: {'typestr': '^{CGPathElement=}'}}}}}}), 'CGContextGetCTM': (sel32or64(b'{CGAffineTransform=ffffff}^{CGContext=}', b'{CGAffineTransform=dddddd}^{CGContext=}'),), 'CGDisplayStreamUpdateGetRects': (sel32or64(b'^{CGRect={CGPoint=ff}{CGSize=ff}}^{CGDisplayStreamUpdate=}i^L', b'^{CGRect={CGPoint=dd}{CGSize=dd}}^{CGDisplayStreamUpdate=}i^Q'), '', {'retval': {'c_array_length_in_arg': 2}, 'arguments': {2: {'type_modifier': 'o'}}}), 'CGPDFArrayGetName': (sel32or64(b'B^{CGPDFArray=}L^^c', b'B^{CGPDFArray=}Q^^c'), '', {'arguments': {2: {'type_modifier': 'o'}}}), 'CGEventSourceGetPixelsPerLine': (b'd^{__CGEventSource=}',), 'CGDisplayStreamUpdateGetMovedRectsDelta': (sel32or64(b'v^{CGDisplayStreamUpdate=}^f^f', b'v^{CGDisplayStreamUpdate=}^d^d'), '', {'arguments': {1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}}}), 'CGRectGetHeight': (sel32or64(b'f{CGRect={CGPoint=ff}{CGSize=ff}}', b'd{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'CGEventSourceGetTypeID': (sel32or64(b'L', b'Q'),), 'CGPDFDictionaryGetStream': (b'B^{CGPDFDictionary=}^t^^{CGPDFStream=}', '', {'arguments': {1: {'c_array_delimited_by_null': True, 'type_modifier': 'n'}, 2: {'type_modifier': 'o'}}}), 'CGDataProviderCreateWithCFData': (b'^{CGDataProvider=}^{__CFData=}', '', {'retval': {'already_cfretained': True}}), 'CGShieldingWindowLevel': (b'i',), 'CGDisplaySetPalette': (b'iI^{_CGDirectPaletteRef=}',), 'CGDisplayIsOnline': (sel32or64(b'iI', b'II'),), 'CGDisplayStreamCreate': (sel32or64(b'^{CGDisplayStream=}ILLi^{__CFDictionary=}@?', b'^{CGDisplayStream=}IQQi^{__CFDictionary=}@?'), '', {'retval': {'already_cfretained': True}, 'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}, 1: {'type': 'I'}, 2: {'type': 'Q'}, 3: {'type': '@'}, 4: {'type': '@'}}}}}}), 'CGFontGetCapHeight': (b'i^{CGFont=}',), 'CGContextShowGlyphsWithAdvances': (sel32or64(b'v^{CGContext=}^S^{CGSize=ff}L', b'v^{CGContext=}^S^{CGSize=dd}Q'), '', {'arguments': {1: {'c_array_length_in_arg': 3, 'type_modifier': 'n'}, 2: {'c_array_length_in_arg': 3, 'type_modifier': 'n'}}}), 'CGDataConsumerCreate': (b'^{CGDataConsumer=}^v^{CGDataConsumerCallbacks=^?^?}', '', {'retval': {'already_cfretained': True}}), 'CGPDFArrayGetInteger': (sel32or64(b'B^{CGPDFArray=}L^l', b'B^{CGPDFArray=}Q^q'), '', {'arguments': {2: {'type_modifier': 'o'}}}), 'CGPostScrollWheelEvent': (b'iIi', '', {'c_array_length_in_arg': 0, 'variadic': True}), 'CGColorCreateCopy': (b'^{CGColor=}^{CGColor=}', '', {'retval': {'already_cfretained': True}}), 'CGBitmapContextCreate': (sel32or64(b'^{CGContext=}^vLLLL^{CGColorSpace=}I', b'^{CGContext=}^vQQQQ^{CGColorSpace=}I'), '', {'retval': {'already_cfretained': True}}), 'CGPDFArrayApplyBlock': (b'v^{CGPDFArray=}@?', '', {'arguments': {1: {'callable': {'retval': {'type': b'B'}, 'arguments': {0: {'type': '^v'}, 1: {'type': 'l'}, 2: {'type': '^{CGPDFObject=}'}, 3: {'type': '^v'}}}}}}), 'CGPathAddRelativeArc': (sel32or64(b'v^{CGPath=}^{CGAffineTransform=ffffff}fffff', b'v^{CGPath=}^{CGAffineTransform=dddddd}ddddd'), '', {'arguments': {1: {'type_modifier': 'n'}}}), 'CGDisplaySetStereoOperation': (sel32or64(b'iIiiI', b'iIIII'),), 'CGPaletteIsEqualToPalette': (b'B^{_CGDirectPaletteRef=}^{_CGDirectPaletteRef=}',), 'CGContextSetShouldAntialias': (b'v^{CGContext=}B',), 'CGDisplayModeGetHeight': (sel32or64(b'L^{CGDisplayMode=}', b'Q^{CGDisplayMode=}'),), 'CGContextSetFillColor': (sel32or64(b'v^{CGContext=}^f', b'v^{CGContext=}^d'), '', {'arguments': {1: {'c_array_of_variable_length': True, 'type_modifier': 'n'}}}), 'CGImageRelease': (b'v^{CGImage=}',), 'CGInhibitLocalEvents': (sel32or64(b'ii', b'iI'),), 'CGContextSetGrayFillColor': (sel32or64(b'v^{CGContext=}ff', b'v^{CGContext=}dd'),), 'CGImageGetUTType': (b'@@',), 'CGPSConverterCreate': (b'^{CGPSConverter=}^v^{CGPSConverterCallbacks=I^?^?^?^?^?^?^?}^{__CFDictionary=}', '', {'retval': {'already_cfretained': True}}), 'CGDirectDisplayCopyCurrentMetalDevice': (b'@I', '', {'retval': {'already_cfretained': True}}), 'CGContextClipToMask': (sel32or64(b'v^{CGContext=}{CGRect={CGPoint=ff}{CGSize=ff}}^{CGImage=}', b'v^{CGContext=}{CGRect={CGPoint=dd}{CGSize=dd}}^{CGImage=}'),), 'CGDisplayCopyColorSpace': (b'^{CGColorSpace=}I', '', {'retval': {'already_cfretained': True}}), 'CGContextAddLineToPoint': (sel32or64(b'v^{CGContext=}ff', b'v^{CGContext=}dd'),), 'CGEventSourceGetLocalEventsSuppressionInterval': (b'd^{__CGEventSource=}',), 'CGColorSpaceGetTypeID': (sel32or64(b'L', b'Q'),), 'CGPathAddPath': (sel32or64(b'v^{CGPath=}^{CGAffineTransform=ffffff}^{CGPath=}', b'v^{CGPath=}^{CGAffineTransform=dddddd}^{CGPath=}'), '', {'arguments': {1: {'type_modifier': 'n'}}}), 'CGDataProviderRetain': (b'^{CGDataProvider=}^{CGDataProvider=}',), 'CGEventCreateFromData': (b'^{__CGEvent=}^{__CFAllocator=}^{__CFData=}', '', {'retval': {'already_cfretained': True}}), 'CGDisplayPixelsHigh': (sel32or64(b'LI', b'QI'),), 'CGConfigureDisplayStereoOperation': (sel32or64(b'i^{_CGDisplayConfigRef=}Iii', b'i^{_CGDisplayConfigRef=}III'),), 'CGPDFOperatorTableCreate': (b'^{CGPDFOperatorTable=}', '', {'retval': {'already_cfretained': True}}), 'CGPDFContextAddDestinationAtPoint': (sel32or64(b'v^{CGContext=}^{__CFString=}{CGPoint=ff}', b'v^{CGContext=}^{__CFString=}{CGPoint=dd}'),), 'CGPDFScannerGetContentStream': (b'^{CGPDFContentStream=}^{CGPDFScanner=}',), 'CGContextSetShouldSubpixelQuantizeFonts': (b'v^{CGContext=}B',), 'CGColorCreateWithPattern': (sel32or64(b'^{CGColor=}^{CGColorSpace=}^{CGPattern=}^f', b'^{CGColor=}^{CGColorSpace=}^{CGPattern=}^d'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'c_array_of_variable_length': True, 'type_modifier': 'n'}}}), 'CGPathContainsPoint': (sel32or64(b'B^{CGPath=}^{CGAffineTransform=ffffff}{CGPoint=ff}B', b'B^{CGPath=}^{CGAffineTransform=dddddd}{CGPoint=dd}B'), '', {'arguments': {1: {'type_modifier': 'n'}}}), 'CGSizeApplyAffineTransform': (sel32or64(b'{CGSize=ff}{CGSize=ff}{CGAffineTransform=ffffff}', b'{CGSize=dd}{CGSize=dd}{CGAffineTransform=dddddd}'),), 'CGRectIntegral': (sel32or64(b'{CGRect={CGPoint=ff}{CGSize=ff}}{CGRect={CGPoint=ff}{CGSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'CGDisplayPrimaryDisplay': (b'II',), 'CGContextConcatCTM': (sel32or64(b'v^{CGContext=}{CGAffineTransform=ffffff}', b'v^{CGContext=}{CGAffineTransform=dddddd}'),), 'CGFunctionRelease': (b'v^{CGFunction=}',), 'CGPDFDocumentGetOutline': (b'^{__CFDictionary=}^{CGPDFDocument=}',), 'CGWindowListCreateImageFromArray': (sel32or64(b'^{CGImage=}{CGRect={CGPoint=ff}{CGSize=ff}}^{__CFArray=}I', b'^{CGImage=}{CGRect={CGPoint=dd}{CGSize=dd}}^{__CFArray=}I'), '', {'retval': {'already_cfretained': True}}), 'CGPatternRetain': (b'^{CGPattern=}^{CGPattern=}',), 'CGPaletteCreateWithSamples': (b'^{_CGDirectPaletteRef=}^{CGDeviceColor=fff}I', '', {'retval': {'already_cfretained': True}, 'arguments': {0: {'c_array_length_in_arg': 1, 'type_modifier': 'n'}}}), 'CGDataProviderGetTypeID': (sel32or64(b'L', b'Q'),), 'CGPaletteCreateWithCapacity': (b'^{_CGDirectPaletteRef=}I', '', {'retval': {'already_cfretained': True}}), 'CGDisplayBytesPerRow': (sel32or64(b'LI', b'QI'),), 'CGSetLocalEventsSuppressionInterval': (b'id',), 'CGPDFArrayGetCount': (sel32or64(b'L^{CGPDFArray=}', b'Q^{CGPDFArray=}'),), 'CGPDFContextClose': (b'v^{CGContext=}',), 'CGDisplayIsBuiltin': (sel32or64(b'iI', b'II'),), 'CGContextIsPathEmpty': (b'B^{CGContext=}',), 'CGContextSetShadow': (sel32or64(b'v^{CGContext=}{CGSize=ff}f', b'v^{CGContext=}{CGSize=dd}d'),), 'CGPathGetBoundingBox': (sel32or64(b'{CGRect={CGPoint=ff}{CGSize=ff}}^{CGPath=}', b'{CGRect={CGPoint=dd}{CGSize=dd}}^{CGPath=}'),), 'CGColorGetNumberOfComponents': (sel32or64(b'L^{CGColor=}', b'Q^{CGColor=}'),), 'CGColorSpaceRelease': (b'v^{CGColorSpace=}',), 'CGGetDisplayTransferByTable': (b'iII^f^f^f^I', '', {'arguments': {2: {'c_array_length_in_arg': (1, 5), 'type_modifier': 'o'}, 3: {'c_array_length_in_arg': (1, 5), 'type_modifier': 'o'}, 4: {'c_array_length_in_arg': (1, 5), 'type_modifier': 'o'}, 5: {'type_modifier': 'o'}}}), 'CGPDFDictionaryApplyBlock': (b'v^{CGPDFDictionary=}@?', '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}, 1: {'c_array_delimited_by_null': True, 'type': 'n^t'}, 2: {'type': '^{CGPDFObject=}'}, 3: {'type': '^v'}}}}}}), 'CGContextShowGlyphsAtPoint': (sel32or64(b'v^{CGContext=}ff^SL', b'v^{CGContext=}dd^SQ'), '', {'arguments': {3: {'c_array_length_in_arg': 4, 'type_modifier': 'n'}}}), 'CGPathAddLines': (sel32or64(b'v^{CGPath=}^{CGAffineTransform=ffffff}^{CGPoint=ff}L', b'v^{CGPath=}^{CGAffineTransform=dddddd}^{CGPoint=dd}Q'), '', {'arguments': {1: {'type_modifier': 'n'}, 2: {'c_array_length_in_arg': 3, 'type_modifier': 'n'}}}), 'CGColorCreateGenericRGB': (sel32or64(b'^{CGColor=}ffff', b'^{CGColor=}dddd'), '', {'retval': {'already_cfretained': True}}), 'CGContextDrawPDFPage': (b'v^{CGContext=}^{CGPDFPage=}',), 'CGDisplayModeRetain': (b'^{CGDisplayMode=}^{CGDisplayMode=}',), 'CGDisplayGammaTableCapacity': (b'II',), 'CGFontCreateWithFontName': (b'^{CGFont=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CGDisplayCopyAllDisplayModes': (b'^{__CFArray=}I^{__CFDictionary=}', '', {'retval': {'already_cfretained': True}}), 'CGContextScaleCTM': (sel32or64(b'v^{CGContext=}ff', b'v^{CGContext=}dd'),), 'CGRectDivide': (sel32or64(b'v{CGRect={CGPoint=ff}{CGSize=ff}}^{CGRect={CGPoint=ff}{CGSize=ff}}^{CGRect={CGPoint=ff}{CGSize=ff}}fI', b'v{CGRect={CGPoint=dd}{CGSize=dd}}^{CGRect={CGPoint=dd}{CGSize=dd}}^{CGRect={CGPoint=dd}{CGSize=dd}}dI'), '', {'arguments': {1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}}}), 'CGContextSetLineCap': (b'v^{CGContext=}I',), 'CGImageMaskCreate': (sel32or64(b'^{CGImage=}LLLLL^{CGDataProvider=}^fB', b'^{CGImage=}QQQQQ^{CGDataProvider=}^dB'), '', {'retval': {'already_cfretained': True}}), 'CGContextDrawRadialGradient': (sel32or64(b'v^{CGContext=}^{CGGradient=}{CGPoint=ff}f{CGPoint=ff}fI', b'v^{CGContext=}^{CGGradient=}{CGPoint=dd}d{CGPoint=dd}dI'),), 'CGFontCopyVariations': (b'^{__CFDictionary=}^{CGFont=}', '', {'retval': {'already_cfretained': True}}), 'CGConfigureDisplayMirrorOfDisplay': (b'i^{_CGDisplayConfigRef=}II',), 'CGEventSourceCreate': (b'^{__CGEventSource=}I', '', {'retval': {'already_cfretained': True}}), 'CGRectGetMidY': (sel32or64(b'f{CGRect={CGPoint=ff}{CGSize=ff}}', b'd{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'CGContextFillEllipseInRect': (sel32or64(b'v^{CGContext=}{CGRect={CGPoint=ff}{CGSize=ff}}', b'v^{CGContext=}{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'CGContextSetAlpha': (sel32or64(b'v^{CGContext=}f', b'v^{CGContext=}d'),), 'CGContextAddQuadCurveToPoint': (sel32or64(b'v^{CGContext=}ffff', b'v^{CGContext=}dddd'),), 'CGColorSpaceGetNumberOfComponents': (sel32or64(b'L^{CGColorSpace=}', b'Q^{CGColorSpace=}'),), 'CGEventCreateKeyboardEvent': (b'^{__CGEvent=}^{__CGEventSource=}SB', '', {'retval': {'already_cfretained': True}}), 'CGPDFDocumentGetMediaBox': (sel32or64(b'{CGRect={CGPoint=ff}{CGSize=ff}}^{CGPDFDocument=}i', b'{CGRect={CGPoint=dd}{CGSize=dd}}^{CGPDFDocument=}i'),), 'CGEventSetSource': (b'v^{__CGEvent=}^{__CGEventSource=}',), 'CGSetDisplayTransferByByteTable': (b'iII^z^z^z', '', {'arguments': {2: {'c_array_length_in_arg': 1, 'type_modifier': 'n'}, 3: {'c_array_length_in_arg': 1, 'type_modifier': 'n'}, 4: {'c_array_length_in_arg': 1, 'type_modifier': 'n'}}}), 'CGPDFDictionaryGetString': (b'B^{CGPDFDictionary=}^t^^{CGPDFString=}', '', {'arguments': {1: {'c_array_delimited_by_null': True, 'type_modifier': 'n'}, 2: {'type_modifier': 'o'}}}), 'CGRegisterScreenRefreshCallback': (b'i^?^v', '', {'arguments': {0: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'I'}, 1: {'type': b'^{CGRect={CGPoint=dd}{CGSize=dd}}'}, 2: {'type': b'^v'}}}}}}), 'CGFontGetUnitsPerEm': (b'i^{CGFont=}',), 'CGContextEOClip': (b'v^{CGContext=}',), 'CGAcquireDisplayFadeReservation': (b'if^I', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'CGBitmapContextGetData': (b'^v^{CGContext=}', '', {'retval': {'c_array_of_variable_length': True}}), 'CGAffineTransformIsIdentity': (sel32or64(b'B{CGAffineTransform=ffffff}', b'B{CGAffineTransform=dddddd}'),), 'CGContextGetInterpolationQuality': (b'I^{CGContext=}',), 'CGPathGetPathBoundingBox': (sel32or64(b'{CGRect={CGPoint=ff}{CGSize=ff}}^{CGPath=}', b'{CGRect={CGPoint=dd}{CGSize=dd}}^{CGPath=}'),), 'CGContextRotateCTM': (sel32or64(b'v^{CGContext=}f', b'v^{CGContext=}d'),), 'CGImageCreateCopy': (b'^{CGImage=}^{CGImage=}', '', {'retval': {'already_cfretained': True}}), 'CGImageGetShouldInterpolate': (b'B^{CGImage=}',), 'CGContextStrokeRect': (sel32or64(b'v^{CGContext=}{CGRect={CGPoint=ff}{CGSize=ff}}', b'v^{CGContext=}{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'CGImageGetDecode': (sel32or64(b'^f^{CGImage=}', b'^d^{CGImage=}'), '', {'retval': {'c_array_of_variable_length': True}}), 'CGColorCreateCopyByMatchingToColorSpace': (b'^{CGColor=}^{CGColorSpace=}i^{CGColor=}^{__CFDictionary=}', '', {'retval': {'already_cfretained': True}}), 'CGContextSetAllowsAntialiasing': (b'v^{CGContext=}B',), 'CGPDFScannerPopDictionary': (b'B^{CGPDFScanner=}^^{CGPDFDictionary=}', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'CGRectGetMidX': (sel32or64(b'f{CGRect={CGPoint=ff}{CGSize=ff}}', b'd{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'CGFontCopyTableForTag': (b'^{__CFData=}^{CGFont=}I', '', {'retval': {'already_cfretained': True}}), 'CGEventSourceGetLocalEventsFilterDuringSuppressionState': (b'I^{__CGEventSource=}I',), 'CGColorGetTypeID': (sel32or64(b'L', b'Q'),), 'CGSetDisplayTransferByFormula': (b'iIfffffffff',), 'CGPDFStreamGetDictionary': (b'^{CGPDFDictionary=}^{CGPDFStream=}',), 'CGEventSourceSetLocalEventsFilterDuringSuppressionState': (b'v^{__CGEventSource=}II',), 'CGRectContainsRect': (sel32or64(b'B{CGRect={CGPoint=ff}{CGSize=ff}}{CGRect={CGPoint=ff}{CGSize=ff}}', b'B{CGRect={CGPoint=dd}{CGSize=dd}}{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'CGColorGetPattern': (b'^{CGPattern=}^{CGColor=}',), 'CGPaletteCreateDefaultColorPalette': (b'^{_CGDirectPaletteRef=}', '', {'retval': {'already_cfretained': True}}), 'CGFontCreatePostScriptSubset': (sel32or64(b'^{__CFData=}^{CGFont=}^{__CFString=}I^SL[256S]', b'^{__CFData=}^{CGFont=}^{__CFString=}I^SQ[256S]'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'c_array_length_in_arg': 4, 'type_modifier': 'n'}, 5: {'type_modifier': 'n'}}}), 'CGPDFDocumentGetCatalog': (b'^{CGPDFDictionary=}^{CGPDFDocument=}',), 'CGColorSpaceGetModel': (b'i^{CGColorSpace=}',), 'CGImageGetColorSpace': (b'^{CGColorSpace=}^{CGImage=}',), 'CGPDFArrayGetString': (sel32or64(b'B^{CGPDFArray=}L^^{CGPDFString=}', b'B^{CGPDFArray=}Q^^{CGPDFString=}'), '', {'arguments': {2: {'type_modifier': 'o'}}}), 'CGPointMake': (sel32or64(b'{CGPoint=ff}ff', b'{CGPoint=dd}dd'),), 'CGPaletteRelease': (b'v^{_CGDirectPaletteRef=}',), 'CGPDFDictionaryGetObject': (b'B^{CGPDFDictionary=}^t^^{CGPDFObject=}', '', {'arguments': {1: {'c_array_delimited_by_null': True, 'type_modifier': 'n'}, 2: {'type_modifier': 'o'}}}), 'CGEventTapCreateForPSN': (b'^{__CFMachPort=}^vIIQ^?^v', '', {'retval': {'already_cfretained': True}, 'arguments': {4: {'callable': {'retval': {'type': b'^{__CGEvent=}'}, 'arguments': {0: {'type': b'^{__CGEventTapProxy=}'}, 1: {'type': b'I'}, 2: {'type': b'^{__CGEvent=}'}, 3: {'type': b'^v'}}}}}}), 'CGReleaseDisplayFadeReservation': (b'iI',), 'CGDisplayRegisterReconfigurationCallback': (b'i^?^v', '', {'arguments': {0: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'I'}, 1: {'type': b'I'}, 2: {'type': b'^v'}}}}}}), 'CGPDFPageRetain': (b'^{CGPDFPage=}^{CGPDFPage=}',), 'CGLayerCreateWithContext': (sel32or64(b'^{CGLayer=}^{CGContext=}{CGSize=ff}^{__CFDictionary=}', b'^{CGLayer=}^{CGContext=}{CGSize=dd}^{__CFDictionary=}'), '', {'retval': {'already_cfretained': True}}), 'CGContextStrokeLineSegments': (sel32or64(b'v^{CGContext=}^{CGPoint=ff}L', b'v^{CGContext=}^{CGPoint=dd}Q'), '', {'arguments': {1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}}}), 'CGContextSetTextMatrix': (sel32or64(b'v^{CGContext=}{CGAffineTransform=ffffff}', b'v^{CGContext=}{CGAffineTransform=dddddd}'),), 'CGPDFObjectGetType': (b'I^{CGPDFObject=}',), 'CGEventSourceGetUserData': (b'q^{__CGEventSource=}',), 'CGContextSetStrokeColor': (sel32or64(b'v^{CGContext=}^f', b'v^{CGContext=}^d'), '', {'arguments': {1: {'c_array_of_variable_length': True, 'type_modifier': 'n'}}}), 'CGPDFScannerPopBoolean': (b'B^{CGPDFScanner=}^C', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'CGGradientRelease': (b'v^{CGGradient=}',), 'CGConfigureDisplayMode': (b'i^{_CGDisplayConfigRef=}I^{__CFDictionary=}',), 'CGWarpMouseCursorPosition': (sel32or64(b'i{CGPoint=ff}', b'i{CGPoint=dd}'),), 'CGPathCreateWithRoundedRect': (sel32or64(b'^{CGPath=}{CGRect={CGPoint=ff}{CGSize=ff}}ff^{CGAffineTransform=ffffff}', b'^{CGPath=}{CGRect={CGPoint=dd}{CGSize=dd}}dd^{CGAffineTransform=dddddd}'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'type_modifier': 'n'}}}), 'CGPDFDocumentGetInfo': (b'^{CGPDFDictionary=}^{CGPDFDocument=}',), 'CGContextSetStrokePattern': (sel32or64(b'v^{CGContext=}^{CGPattern=}^f', b'v^{CGContext=}^{CGPattern=}^d'), '', {'arguments': {2: {'c_array_of_variable_length': True, 'type_modifier': 'n'}}}), 'CGDisplayCanSetPalette': (sel32or64(b'iI', b'II'),), 'CGRectApplyAffineTransform': (sel32or64(b'{CGRect={CGPoint=ff}{CGSize=ff}}{CGRect={CGPoint=ff}{CGSize=ff}}{CGAffineTransform=ffffff}', b'{CGRect={CGPoint=dd}{CGSize=dd}}{CGRect={CGPoint=dd}{CGSize=dd}}{CGAffineTransform=dddddd}'),), 'CGEventSetDoubleValueField': (b'v^{__CGEvent=}Id',), 'CGContextFlush': (b'v^{CGContext=}',), 'CGWindowListCopyWindowInfo': (b'^{__CFArray=}II', '', {'retval': {'already_cfretained': True}}), 'CGFontGetXHeight': (b'i^{CGFont=}',), 'CGPDFContextCreate': (sel32or64(b'^{CGContext=}^{CGDataConsumer=}^{CGRect={CGPoint=ff}{CGSize=ff}}^{__CFDictionary=}', b'^{CGContext=}^{CGDataConsumer=}^{CGRect={CGPoint=dd}{CGSize=dd}}^{__CFDictionary=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'type_modifier': 'n'}}}), 'CGPaletteGetIndexForColor': (b'I^{_CGDirectPaletteRef=}{CGDeviceColor=fff}',), 'CGImageCreateWithJPEGDataProvider': (sel32or64(b'^{CGImage=}^{CGDataProvider=}^fBI', b'^{CGImage=}^{CGDataProvider=}^dBI'), '', {'retval': {'already_cfretained': True}}), 'CGPDFDocumentCreateWithURL': (b'^{CGPDFDocument=}^{__CFURL=}', '', {'retval': {'already_cfretained': True}}), 'CGPathAddArcToPoint': (sel32or64(b'v^{CGPath=}^{CGAffineTransform=ffffff}fffff', b'v^{CGPath=}^{CGAffineTransform=dddddd}ddddd'), '', {'arguments': {1: {'type_modifier': 'n'}}}), 'CGVectorMake': (sel32or64(b'{CGVector=ff}ff', b'{CGVector=dd}dd'),), 'CGDisplayIsActive': (sel32or64(b'iI', b'II'),), 'CGPDFScannerScan': (b'B^{CGPDFScanner=}',), 'CGPathCreateMutableCopyByTransformingPath': (sel32or64(b'^{CGPath=}^{CGPath=}^{CGAffineTransform=ffffff}', b'^{CGPath=}^{CGPath=}^{CGAffineTransform=dddddd}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'type_modifier': 'n'}}}), 'CGSetDisplayTransferByTable': (b'iII^f^f^f', '', {'arguments': {2: {'c_array_length_in_arg': 1, 'type_modifier': 'n'}, 3: {'c_array_length_in_arg': 1, 'type_modifier': 'n'}, 4: {'c_array_length_in_arg': 1, 'type_modifier': 'n'}}}), 'CGFontGetDescent': (b'i^{CGFont=}',), 'CGImageGetPixelFormatInfo': (b'I^{CGImage=}',), 'CGImageGetRenderingIntent': (b'I^{CGImage=}',), 'CGGLContextUpdateViewportSize': (sel32or64(b'v^{CGContext=}{CGSize=ff}', b'v^{CGContext=}{CGSize=dd}'),), 'CGGetEventTapList': (b'iI^{__CGEventTapInformation=IIIQiiBfff}^I', '', {'arguments': {1: {'c_array_length_in_arg': (0, 2), 'type_modifier': 'o'}, 2: {'type_modifier': 'o'}}}), 'CGEnableEventStateCombining': (sel32or64(b'ii', b'iI'),), 'CGColorSpaceCreateDeviceRGB': (b'^{CGColorSpace=}', '', {'retval': {'already_cfretained': True}}), 'CGPathEqualToPath': (b'B^{CGPath=}^{CGPath=}',), 'CGPDFScannerPopObject': (b'B^{CGPDFScanner=}^^{CGPDFObject=}', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'CGDisplayIsCaptured': (sel32or64(b'iI', b'II'),), 'CGPDFPageRelease': (b'v^{CGPDFPage=}',), 'CGDataProviderCreate': (b'^{CGDataProvider=}^v^{CGDataProviderCallbacks=^?^?^?^?}', '', {'retval': {'already_cfretained': True}}), 'CGRectIsEmpty': (sel32or64(b'B{CGRect={CGPoint=ff}{CGSize=ff}}', b'B{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'CGDisplayMoveCursorToPoint': (sel32or64(b'iI{CGPoint=ff}', b'iI{CGPoint=dd}'),), 'CGPDFScannerPopInteger': (sel32or64(b'B^{CGPDFScanner=}^l', b'B^{CGPDFScanner=}^q'), '', {'arguments': {1: {'type_modifier': 'o'}}}), 'CGPaletteGetNumberOfSamples': (b'I^{_CGDirectPaletteRef=}',), 'CGContextStrokePath': (b'v^{CGContext=}',), 'CGEventSetLocation': (sel32or64(b'v^{__CGEvent=}{CGPoint=ff}', b'v^{__CGEvent=}{CGPoint=dd}'),), 'CGEventPostToPSN': (b'v^{ProcessSerialNumber=II}^{__CGEvent=}', '', {'arguments': {0: {'type_modifier': 'n'}}}), 'CGAffineTransformScale': (sel32or64(b'{CGAffineTransform=ffffff}{CGAffineTransform=ffffff}ff', b'{CGAffineTransform=dddddd}{CGAffineTransform=dddddd}dd'),), 'CGEventSourceSetUserData': (b'v^{__CGEventSource=}q',), 'CGLayerRelease': (b'v^{CGLayer=}',), 'CGPDFArrayGetArray': (sel32or64(b'B^{CGPDFArray=}L^^{CGPDFArray=}', b'B^{CGPDFArray=}Q^^{CGPDFArray=}'), '', {'arguments': {2: {'type_modifier': 'o'}}}), 'CGContextDrawLayerInRect': (sel32or64(b'v^{CGContext=}{CGRect={CGPoint=ff}{CGSize=ff}}^{CGLayer=}', b'v^{CGContext=}{CGRect={CGPoint=dd}{CGSize=dd}}^{CGLayer=}'),), 'CGDataProviderRelease': (b'v^{CGDataProvider=}',), 'CGEventPost': (b'vI^{__CGEvent=}',), 'CGMainDisplayID': (b'I',), 'CGFontGetTypeID': (sel32or64(b'L', b'Q'),), 'CGPathAddRoundedRect': (sel32or64(b'v^{CGPath=}^{CGAffineTransform=ffffff}{CGRect={CGPoint=ff}{CGSize=ff}}ff', b'v^{CGPath=}^{CGAffineTransform=dddddd}{CGRect={CGPoint=dd}{CGSize=dd}}dd'), '', {'arguments': {1: {'type_modifier': 'n'}}}), 'CGRectEqualToRect': (sel32or64(b'B{CGRect={CGPoint=ff}{CGSize=ff}}{CGRect={CGPoint=ff}{CGSize=ff}}', b'B{CGRect={CGPoint=dd}{CGSize=dd}}{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'CGRectGetMaxY': (sel32or64(b'f{CGRect={CGPoint=ff}{CGSize=ff}}', b'd{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'CGBitmapContextGetWidth': (sel32or64(b'L^{CGContext=}', b'Q^{CGContext=}'),), 'CGShadingCreateRadial': (sel32or64(b'^{CGShading=}^{CGColorSpace=}{CGPoint=ff}f{CGPoint=ff}f^{CGFunction=}BB', b'^{CGShading=}^{CGColorSpace=}{CGPoint=dd}d{CGPoint=dd}d^{CGFunction=}BB'), '', {'retval': {'already_cfretained': True}}), 'CGPDFScannerRetain': (b'^{CGPDFScanner=}^{CGPDFScanner=}',), 'CGDisplayMirrorsDisplay': (b'II',), 'CGContextAddRects': (sel32or64(b'v^{CGContext=}^{CGRect={CGPoint=ff}{CGSize=ff}}L', b'v^{CGContext=}^{CGRect={CGPoint=dd}{CGSize=dd}}Q'), '', {'arguments': {1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}}}), 'CGDataConsumerCreateWithURL': (b'^{CGDataConsumer=}^{__CFURL=}', '', {'retval': {'already_cfretained': True}}), 'CGContextConvertRectToUserSpace': (sel32or64(b'{CGRect={CGPoint=ff}{CGSize=ff}}^{CGContext=}{CGRect={CGPoint=ff}{CGSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}^{CGContext=}{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'CGGradientCreateWithColors': (sel32or64(b'^{CGGradient=}^{CGColorSpace=}^{__CFArray=}^f', b'^{CGGradient=}^{CGColorSpace=}^{__CFArray=}^d'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'c_array_length_in_arg': 1, 'type_modifier': 'n'}}}), 'CGPathApply': (b'v^{CGPath=}^v^?', '', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'^{CGPathElement=i^{CGPoint=dd}}'}}}}}}), 'CGGetDisplaysWithOpenGLDisplayMask': (b'iII^I^I', '', {'arguments': {2: {'c_array_length_in_arg': (1, 3), 'type_modifier': 'o'}, 3: {'type_modifier': 'o'}}}), 'CGImageRetain': (b'^{CGImage=}^{CGImage=}',), 'CGContextAddArc': (sel32or64(b'v^{CGContext=}fffffi', b'v^{CGContext=}dddddi'),), 'CGFontCreateWithPlatformFont': (b'^{CGFont=}^v', '', {'retval': {'already_cfretained': True}}), 'CGPDFContextSetURLForRect': (sel32or64(b'v^{CGContext=}^{__CFURL=}{CGRect={CGPoint=ff}{CGSize=ff}}', b'v^{CGContext=}^{__CFURL=}{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'CGPDFContentStreamRetain': (b'^{CGPDFContentStream=}^{CGPDFContentStream=}',), 'CGCompleteDisplayConfiguration': (b'i^{_CGDisplayConfigRef=}I',), 'CGAffineTransformTranslate': (sel32or64(b'{CGAffineTransform=ffffff}{CGAffineTransform=ffffff}ff', b'{CGAffineTransform=dddddd}{CGAffineTransform=dddddd}dd'),), 'CGContextAddCurveToPoint': (sel32or64(b'v^{CGContext=}ffffff', b'v^{CGContext=}dddddd'),), 'CGPDFContentStreamCreateWithPage': (b'^{CGPDFContentStream=}^{CGPDFPage=}', '', {'retval': {'already_cfretained': True}}), 'CGEventSourceGetKeyboardType': (b'I^{__CGEventSource=}',), 'CGEventKeyboardGetUnicodeString': (sel32or64(b'v^{__CGEvent=}L^L^T', b'v^{__CGEvent=}Q^Q^T'), '', {'arguments': {2: {'type_modifier': 'o'}, 3: {'c_array_length_in_arg': (1, 2), 'type_modifier': 'o'}}}), 'CGDisplaySwitchToMode': (b'iI^{__CFDictionary=}',), 'CGEventSetIntegerValueField': (b'v^{__CGEvent=}Iq',), 'CGEventCreate': (b'^{__CGEvent=}^{__CGEventSource=}', '', {'retval': {'already_cfretained': True}}), 'CGContextSetFillColorSpace': (b'v^{CGContext=}^{CGColorSpace=}',), 'CGFontGetGlyphWithGlyphName': (b'S^{CGFont=}^{__CFString=}',), 'CGFunctionRetain': (b'^{CGFunction=}^{CGFunction=}',), 'CGContextConvertPointToUserSpace': (sel32or64(b'{CGPoint=ff}^{CGContext=}{CGPoint=ff}', b'{CGPoint=dd}^{CGContext=}{CGPoint=dd}'),), 'CGColorSpaceCopyName': (b'^{__CFString=}^{CGColorSpace=}', '', {'retval': {'already_cfretained': True}}), 'CGPatternRelease': (b'v^{CGPattern=}',), 'CGPointEqualToPoint': (sel32or64(b'B{CGPoint=ff}{CGPoint=ff}', b'B{CGPoint=dd}{CGPoint=dd}'),), 'CGCursorIsDrawnInFramebuffer': (sel32or64(b'i', b'I'),), 'CGEventSourceCounterForEventType': (b'III',), 'CGDisplayCaptureWithOptions': (b'iII',), 'CGDisplayIsStereo': (sel32or64(b'iI', b'II'),), 'CGBitmapContextGetBytesPerRow': (sel32or64(b'L^{CGContext=}', b'Q^{CGContext=}'),), 'CGContextBeginTransparencyLayer': (b'v^{CGContext=}^{__CFDictionary=}',), 'CGFontRetain': (b'^{CGFont=}^{CGFont=}',), 'CGContextSetLineDash': (sel32or64(b'v^{CGContext=}f^fL', b'v^{CGContext=}d^dQ'), '', {'arguments': {2: {'c_array_length_in_arg': 3, 'type_modifier': 'n'}}}), 'CGColorSpaceCreateICCBased': (sel32or64(b'^{CGColorSpace=}L^f^{CGDataProvider=}^{CGColorSpace=}', b'^{CGColorSpace=}Q^d^{CGDataProvider=}^{CGColorSpace=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'c_array_of_variable_length': True, 'type_modifier': 'n'}}}), 'CGContextSetGrayStrokeColor': (sel32or64(b'v^{CGContext=}ff', b'v^{CGContext=}dd'),), 'CGPDFOperatorTableRelease': (b'v^{CGPDFOperatorTable=}',), 'CGContextGetTypeID': (sel32or64(b'L', b'Q'),), 'CGRectOffset': (sel32or64(b'{CGRect={CGPoint=ff}{CGSize=ff}}{CGRect={CGPoint=ff}{CGSize=ff}}ff', b'{CGRect={CGPoint=dd}{CGSize=dd}}{CGRect={CGPoint=dd}{CGSize=dd}}dd'),), 'CGLayerGetTypeID': (sel32or64(b'L', b'Q'),), 'CGColorSpaceCreateCalibratedGray': (sel32or64(b'^{CGColorSpace=}[3f][3f]f', b'^{CGColorSpace=}[3d][3d]d'), '', {'retval': {'already_cfretained': True}}), 'CGEventTapCreate': (b'^{__CFMachPort=}IIIQ^?^v', '', {'retval': {'already_cfretained': True}, 'arguments': {4: {'callable': {'retval': {'type': b'^{__CGEvent=}'}, 'arguments': {0: {'type': b'^{__CGEventTapProxy=}'}, 1: {'type': b'I'}, 2: {'type': b'^{__CGEvent=}'}, 3: {'type': b'^v'}}}}}}), 'CGContextSetRenderingIntent': (b'v^{CGContext=}I',), 'CGDisplayCurrentMode': (b'^{__CFDictionary=}I',), 'CGConfigureDisplayWithDisplayMode': (b'i^{_CGDisplayConfigRef=}I^{CGDisplayMode=}^{__CFDictionary=}',), 'CGCursorIsVisible': (sel32or64(b'i', b'I'),), 'CGDisplayIsMain': (sel32or64(b'iI', b'II'),), 'CGSetLocalEventsFilterDuringSuppressionState': (b'iII',), 'CGPDFDictionaryGetNumber': (sel32or64(b'B^{CGPDFDictionary=}^t^f', b'B^{CGPDFDictionary=}^t^d'), '', {'arguments': {1: {'c_array_delimited_by_null': True, 'type_modifier': 'n'}, 2: {'type_modifier': 'o'}}}), 'CGAssociateMouseAndMouseCursorPosition': (sel32or64(b'ii', b'iI'),), 'CGEventGetDoubleValueField': (b'd^{__CGEvent=}I',), 'CGPDFDocumentAllowsPrinting': (b'B^{CGPDFDocument=}',), 'CGContextSetBlendMode': (b'v^{CGContext=}I',), 'CGFontGetGlyphAdvances': (sel32or64(b'B^{CGFont=}^SL^i', b'B^{CGFont=}^SQ^i'), '', {'arguments': {1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}, 3: {'c_array_length_in_arg': 2, 'type_modifier': 'o'}}}), 'CGContextReplacePathWithStrokedPath': (b'v^{CGContext=}',), 'CGGetDisplayTransferByFormula': (b'iI^f^f^f^f^f^f^f^f^f', '', {'arguments': {1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}, 3: {'type_modifier': 'o'}, 4: {'type_modifier': 'o'}, 5: {'type_modifier': 'o'}, 6: {'type_modifier': 'o'}, 7: {'type_modifier': 'o'}, 8: {'type_modifier': 'o'}, 9: {'type_modifier': 'o'}}}), 'CGDisplayIsInHWMirrorSet': (sel32or64(b'iI', b'II'),), 'CGDisplayCapture': (b'iI',), 'CGColorSpaceGetName': (b'^{__CFString=}^{CGColorSpace=}',), 'CGImageGetAlphaInfo': (b'I^{CGImage=}',), 'CGSizeCreateDictionaryRepresentation': (sel32or64(b'^{__CFDictionary=}{CGSize=ff}', b'^{__CFDictionary=}{CGSize=dd}'), '', {'retval': {'already_cfretained': True}}), 'CGPDFStringGetLength': (sel32or64(b'L^{CGPDFString=}', b'Q^{CGPDFString=}'),), 'CGScreenRegisterMoveCallback': (b'i^?^v', '', {'arguments': {0: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'{CGScreenUpdateMoveDelta=ii}'}, 1: {'type': b'Q'}, 2: {'type': b'^{CGRect={CGPoint=dd}{CGSize=dd}}'}, 3: {'type': b'^v'}}}}}}), 'CGColorConversionInfoGetTypeID': (sel32or64(b'I', b'Q'),), 'CGPDFDocumentRetain': (b'^{CGPDFDocument=}^{CGPDFDocument=}',), 'CGWaitForScreenUpdateRects': (sel32or64(b'iI^I^^{CGRect={CGPoint=ff}{CGSize=ff}}^L^{CGScreenUpdateMoveDelta=ii}', b'iI^I^^{CGRect={CGPoint=dd}{CGSize=dd}}^Q^{CGScreenUpdateMoveDelta=ii}'),), 'CGDisplayBitsPerSample': (sel32or64(b'LI', b'QI'),), 'CGDisplayModeGetPixelHeight': (sel32or64(b'L^{CGDisplayMode=}', b'Q^{CGDisplayMode=}'),), 'CGContextDrawTiledImage': (sel32or64(b'v^{CGContext=}{CGRect={CGPoint=ff}{CGSize=ff}}^{CGImage=}', b'v^{CGContext=}{CGRect={CGPoint=dd}{CGSize=dd}}^{CGImage=}'),), 'CGBitmapContextGetBitsPerComponent': (sel32or64(b'L^{CGContext=}', b'Q^{CGContext=}'),), 'CGContextAddPath': (b'v^{CGContext=}^{CGPath=}',), 'CGConfigureDisplayOrigin': (b'i^{_CGDisplayConfigRef=}Iii',), 'CGContextSetCharacterSpacing': (sel32or64(b'v^{CGContext=}f', b'v^{CGContext=}d'),), 'CGDisplaySamplesPerPixel': (sel32or64(b'LI', b'QI'),), 'CGOpenGLDisplayMaskToDisplayID': (b'II',), 'CGPDFArrayGetNumber': (sel32or64(b'B^{CGPDFArray=}L^f', b'B^{CGPDFArray=}Q^d'), '', {'arguments': {2: {'type_modifier': 'o'}}}), 'CGDisplaySetDisplayMode': (b'iI^{CGDisplayMode=}^{__CFDictionary=}',), 'CGRectIsNull': (sel32or64(b'B{CGRect={CGPoint=ff}{CGSize=ff}}', b'B{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'CGDataConsumerRelease': (b'v^{CGDataConsumer=}',), 'CGColorSpaceCreateWithICCProfile': (b'^{CGColorSpace=}^{__CFData=}', '', {'retval': {'already_cfretained': True}}), 'CGDisplayModeGetWidth': (sel32or64(b'L^{CGDisplayMode=}', b'Q^{CGDisplayMode=}'),), 'CGCaptureAllDisplays': (b'i',), 'CGPDFScannerPopArray': (b'B^{CGPDFScanner=}^^{CGPDFArray=}', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'CGContextClipToRect': (sel32or64(b'v^{CGContext=}{CGRect={CGPoint=ff}{CGSize=ff}}', b'v^{CGContext=}{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'CGDisplayStreamUpdateCreateMergedUpdate': (b'^{CGDisplayStreamUpdate=}^{CGDisplayStreamUpdate=}^{CGDisplayStreamUpdate=}', '', {'retval': {'already_cfretained': True}}), 'CGDisplayHideCursor': (b'iI',), 'CGPDFDocumentGetPage': (sel32or64(b'^{CGPDFPage=}^{CGPDFDocument=}L', b'^{CGPDFPage=}^{CGPDFDocument=}Q'),), 'CGSessionCopyCurrentDictionary': (b'^{__CFDictionary=}', '', {'retval': {'already_cfretained': True}}), 'CGFontGetFontBBox': (sel32or64(b'{CGRect={CGPoint=ff}{CGSize=ff}}^{CGFont=}', b'{CGRect={CGPoint=dd}{CGSize=dd}}^{CGFont=}'),), 'CGImageGetBitsPerComponent': (sel32or64(b'L^{CGImage=}', b'Q^{CGImage=}'),), 'CGFontCopyTableTags': (b'^{__CFArray=}^{CGFont=}', '', {'retval': {'already_cfretained': True}}), 'CGWaitForScreenRefreshRects': (sel32or64(b'i^^{CGRect={CGPoint=ff}{CGSize=ff}}^I', b'i^^{CGRect={CGPoint=dd}{CGSize=dd}}^I'),), 'CGDataProviderCreateDirect': (b'^{CGDataProvider=}^vq^{CGDataProviderDirectCallbacks=I^?^?^?^?}', '', {'retval': {'already_cfretained': True}}), 'CGDisplayStreamStop': (b'i^{CGDisplayStream=}',), 'CGShadingRetain': (b'^{CGShading=}^{CGShading=}',), 'CGBitmapContextGetColorSpace': (b'^{CGColorSpace=}^{CGContext=}',), 'CGContextShowTextAtPoint': (sel32or64(b'v^{CGContext=}ff^cL', b'v^{CGContext=}dd^cQ'), '', {'arguments': {3: {'c_array_length_in_arg': 4, 'type_modifier': 'n'}}}), 'CGBitmapContextCreateImage': (b'^{CGImage=}^{CGContext=}', '', {'retval': {'already_cfretained': True}}), 'CGContextTranslateCTM': (sel32or64(b'v^{CGContext=}ff', b'v^{CGContext=}dd'),), 'CGDisplayModelNumber': (b'II',), 'CGPDFContextCreateWithURL': (sel32or64(b'^{CGContext=}^{__CFURL=}^{CGRect={CGPoint=ff}{CGSize=ff}}^{__CFDictionary=}', b'^{CGContext=}^{__CFURL=}^{CGRect={CGPoint=dd}{CGSize=dd}}^{__CFDictionary=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'type_modifier': 'n'}}}), 'CGColorSpaceCopyICCProfile': (b'^{__CFData=}^{CGColorSpace=}', '', {'retval': {'already_cfretained': True}}), 'CGContextSetRGBStrokeColor': (sel32or64(b'v^{CGContext=}ffff', b'v^{CGContext=}dddd'),), 'CGBitmapContextCreateWithData': (sel32or64(b'^{CGContext=}^vLLLL^{CGColorSpace=}I^?^v', b'^{CGContext=}^vQQQQ^{CGColorSpace=}I^?^v'), '', {'retval': {'already_cfretained': True}, 'arguments': {7: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'^v'}}}}}}), 'CGDisplayModeRelease': (b'v^{CGDisplayMode=}',), 'CGAffineTransformConcat': (sel32or64(b'{CGAffineTransform=ffffff}{CGAffineTransform=ffffff}{CGAffineTransform=ffffff}', b'{CGAffineTransform=dddddd}{CGAffineTransform=dddddd}{CGAffineTransform=dddddd}'),), 'CGImageGetDataProvider': (b'^{CGDataProvider=}^{CGImage=}',), 'CGContextConvertRectToDeviceSpace': (sel32or64(b'{CGRect={CGPoint=ff}{CGSize=ff}}^{CGContext=}{CGRect={CGPoint=ff}{CGSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}^{CGContext=}{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'CGImageCreateWithMaskingColors': (sel32or64(b'^{CGImage=}^{CGImage=}^f', b'^{CGImage=}^{CGImage=}^d'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'c_array_of_variable_length': True, 'type_modifier': 'n'}}}), 'CGPDFDictionaryGetInteger': (sel32or64(b'B^{CGPDFDictionary=}^t^l', b'B^{CGPDFDictionary=}^t^q'), '', {'arguments': {1: {'c_array_delimited_by_null': True, 'type_modifier': 'n'}, 2: {'type_modifier': 'o'}}}), 'CGColorGetAlpha': (sel32or64(b'f^{CGColor=}', b'd^{CGColor=}'),), 'CGContextSetAllowsFontSubpixelPositioning': (b'v^{CGContext=}B',), 'CGPDFDocumentIsEncrypted': (b'B^{CGPDFDocument=}',), 'CGDisplayBestModeForParameters': (sel32or64(b'^{__CFDictionary=}ILLL^i', b'^{__CFDictionary=}IQQQ^I'), '', {'arguments': {4: {'type_modifier': 'o'}}}), 'CGPDFArrayGetBoolean': (sel32or64(b'B^{CGPDFArray=}L^C', b'B^{CGPDFArray=}Q^C'), '', {'arguments': {2: {'type_modifier': 'o'}}}), 'CGShadingCreateAxial': (sel32or64(b'^{CGShading=}^{CGColorSpace=}{CGPoint=ff}{CGPoint=ff}^{CGFunction=}BB', b'^{CGShading=}^{CGColorSpace=}{CGPoint=dd}{CGPoint=dd}^{CGFunction=}BB'), '', {'retval': {'already_cfretained': True}}), 'CGEventCreateData': (b'^{__CFData=}^{__CFAllocator=}^{__CGEvent=}', '', {'retval': {'already_cfretained': True}}), 'CGColorConverterRetain': (b'^{CGColorConversionInfo=}^{CGColorConversionInfo=}',), 'CGRectMakeWithDictionaryRepresentation': (sel32or64(b'B^{__CFDictionary=}^{CGRect={CGPoint=ff}{CGSize=ff}}', b'B^{__CFDictionary=}^{CGRect={CGPoint=dd}{CGSize=dd}}'), '', {'arguments': {1: {'type_modifier': 'o'}}}), 'CGFunctionCreate': (sel32or64(b'^{CGFunction=}^vL^fL^f^{CGFunctionCallbacks=I^?^?}', b'^{CGFunction=}^vQ^dQ^d^{CGFunctionCallbacks=I^?^?}'), '', {'retval': {'already_cfretained': True}}), 'CGDataProviderCreateDirectAccess': (sel32or64(b'^{CGDataProvider=}^vL^{CGDataProviderDirectAccessCallbacks=^?^?^?^?}', b'^{CGDataProvider=}^vQ^{CGDataProviderDirectAccessCallbacks=^?^?^?^?}'), '', {'retval': {'already_cfretained': True}}), 'CGPaletteCreateFromPaletteBlendedWithColor': (b'^{_CGDirectPaletteRef=}^{_CGDirectPaletteRef=}f{CGDeviceColor=fff}', '', {'retval': {'already_cfretained': True}}), 'CGContextGetTextMatrix': (sel32or64(b'{CGAffineTransform=ffffff}^{CGContext=}', b'{CGAffineTransform=dddddd}^{CGContext=}'),), 'CGDisplayModeGetIOFlags': (b'I^{CGDisplayMode=}',), 'CGDisplayModeCopyPixelEncoding': (b'^{__CFString=}^{CGDisplayMode=}', '', {'retval': {'already_cfretained': True}}), 'CGGetDisplaysWithRect': (sel32or64(b'i{CGRect={CGPoint=ff}{CGSize=ff}}I^I^I', b'i{CGRect={CGPoint=dd}{CGSize=dd}}I^I^I'), '', {'arguments': {2: {'c_array_length_in_arg': (1, 3), 'type_modifier': 'o'}, 3: {'type_modifier': 'o'}}}), 'CGFontCopyFullName': (b'^{__CFString=}^{CGFont=}', '', {'retval': {'already_cfretained': True}}), 'CGGetDisplaysWithPoint': (sel32or64(b'i{CGPoint=ff}I^I^I', b'i{CGPoint=dd}I^I^I'), '', {'arguments': {2: {'c_array_length_in_arg': (1, 3), 'type_modifier': 'o'}, 3: {'type_modifier': 'o'}}}), 'CGPDFStreamCopyData': (b'^{__CFData=}^{CGPDFStream=}^I', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'type_modifier': 'o'}}}), 'CGImageGetHeight': (sel32or64(b'L^{CGImage=}', b'Q^{CGImage=}'),), 'CGDisplayBitsPerPixel': (sel32or64(b'LI', b'QI'),), 'CGPDFDocumentGetTrimBox': (sel32or64(b'{CGRect={CGPoint=ff}{CGSize=ff}}^{CGPDFDocument=}i', b'{CGRect={CGPoint=dd}{CGSize=dd}}^{CGPDFDocument=}i'),), 'CGGetActiveDisplayList': (b'iI^I^I', '', {'arguments': {1: {'c_array_length_in_arg': (0, 2), 'type_modifier': 'o'}, 2: {'type_modifier': 'o'}}}), 'CGContextGetClipBoundingBox': (sel32or64(b'{CGRect={CGPoint=ff}{CGSize=ff}}^{CGContext=}', b'{CGRect={CGPoint=dd}{CGSize=dd}}^{CGContext=}'),), 'CGContextRetain': (b'^{CGContext=}^{CGContext=}',), 'CGDisplayRemoveReconfigurationCallback': (b'i^?^v', '', {'arguments': {0: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'I'}, 1: {'type': b'I'}, 2: {'type': b'^v'}}}}}}), 'CGPDFDocumentGetRotationAngle': (b'i^{CGPDFDocument=}i',), 'CGDisplayModeGetPixelWidth': (sel32or64(b'L^{CGDisplayMode=}', b'Q^{CGDisplayMode=}'),), 'CGContextClipToRects': (sel32or64(b'v^{CGContext=}^{CGRect={CGPoint=ff}{CGSize=ff}}L', b'v^{CGContext=}^{CGRect={CGPoint=dd}{CGSize=dd}}Q'), '', {'arguments': {1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}}}), 'CGEventCreateCopy': (b'^{__CGEvent=}^{__CGEvent=}', '', {'retval': {'already_cfretained': True}}), 'CGEventSourceSetLocalEventsSuppressionInterval': (b'v^{__CGEventSource=}d',), 'CGPDFDocumentGetBleedBox': (sel32or64(b'{CGRect={CGPoint=ff}{CGSize=ff}}^{CGPDFDocument=}i', b'{CGRect={CGPoint=dd}{CGSize=dd}}^{CGPDFDocument=}i'),), 'CGPathGetCurrentPoint': (sel32or64(b'{CGPoint=ff}^{CGPath=}', b'{CGPoint=dd}^{CGPath=}'),), 'CGContextShowGlyphsAtPositions': (sel32or64(b'v^{CGContext=}^S^{CGPoint=ff}L', b'v^{CGContext=}^S^{CGPoint=dd}Q'), '', {'arguments': {1: {'c_array_length_in_arg': 3, 'type_modifier': 'n'}, 2: {'c_array_length_in_arg': 3, 'type_modifier': 'n'}}}), 'CGEventCreateMouseEvent': (sel32or64(b'^{__CGEvent=}^{__CGEventSource=}I{CGPoint=ff}I', b'^{__CGEvent=}^{__CGEventSource=}I{CGPoint=dd}I'), '', {'retval': {'already_cfretained': True}}), 'CGContextSetStrokeColorWithColor': (b'v^{CGContext=}^{CGColor=}',), 'CGPaletteSetColorAtIndex': (b'v^{_CGDirectPaletteRef=}{CGDeviceColor=fff}I',), 'CGContextSetCMYKFillColor': (sel32or64(b'v^{CGContext=}fffff', b'v^{CGContext=}ddddd'),), 'CGImageCreateWithImageInRect': (sel32or64(b'^{CGImage=}^{CGImage=}{CGRect={CGPoint=ff}{CGSize=ff}}', b'^{CGImage=}^{CGImage=}{CGRect={CGPoint=dd}{CGSize=dd}}'), '', {'retval': {'already_cfretained': True}}), 'CGContextBeginPage': (sel32or64(b'v^{CGContext=}^{CGRect={CGPoint=ff}{CGSize=ff}}', b'v^{CGContext=}^{CGRect={CGPoint=dd}{CGSize=dd}}'), '', {'arguments': {1: {'type_modifier': 'n'}}}), 'CGImageCreateCopyWithColorSpace': (b'^{CGImage=}^{CGImage=}^{CGColorSpace=}', '', {'retval': {'already_cfretained': True}}), 'CGContextDrawPDFDocument': (sel32or64(b'v^{CGContext=}{CGRect={CGPoint=ff}{CGSize=ff}}^{CGPDFDocument=}i', b'v^{CGContext=}{CGRect={CGPoint=dd}{CGSize=dd}}^{CGPDFDocument=}i'),), 'CGEventGetUnflippedLocation': (sel32or64(b'{CGPoint=ff}^{__CGEvent=}', b'{CGPoint=dd}^{__CGEvent=}'),), 'CGFunctionGetTypeID': (sel32or64(b'L', b'Q'),), 'CGCaptureAllDisplaysWithOptions': (b'iI',), 'CGContextSetShadowWithColor': (sel32or64(b'v^{CGContext=}{CGSize=ff}f^{CGColor=}', b'v^{CGContext=}{CGSize=dd}d^{CGColor=}'),), 'CGContextSetInterpolationQuality': (b'v^{CGContext=}I',), 'CGPDFPageGetDictionary': (b'^{CGPDFDictionary=}^{CGPDFPage=}',), 'CGContextMoveToPoint': (sel32or64(b'v^{CGContext=}ff', b'v^{CGContext=}dd'),), 'CGContextDrawLayerAtPoint': (sel32or64(b'v^{CGContext=}{CGPoint=ff}^{CGLayer=}', b'v^{CGContext=}{CGPoint=dd}^{CGLayer=}'),), 'CGDisplayRestoreColorSyncSettings': (b'v',), 'CGPathCreateWithEllipseInRect': (sel32or64(b'^{CGPath=}{CGRect={CGPoint=ff}{CGSize=ff}}^{CGAffineTransform=ffffff}', b'^{CGPath=}{CGRect={CGPoint=dd}{CGSize=dd}}^{CGAffineTransform=dddddd}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'type_modifier': 'n'}}}), 'CGPDFPageGetPageNumber': (sel32or64(b'L^{CGPDFPage=}', b'Q^{CGPDFPage=}'),), 'CGColorRelease': (b'v^{CGColor=}',), 'CGColorSpaceGetColorTable': (b'v^{CGColorSpace=}^C', '', {'arguments': {1: {'c_array_of_variable_length': True, 'type_modifier': 'o'}}}), 'CGColorSpaceCopyICCData': (b'^{__CFData=}^{CGColorSpace=}', '', {'retval': {'already_cfretained': True}}), 'CGDisplayIsAlwaysInMirrorSet': (sel32or64(b'iI', b'II'),), 'CGFontCopyPostScriptName': (b'^{__CFString=}^{CGFont=}', '', {'retval': {'already_cfretained': True}}), 'CGFontCreateWithDataProvider': (b'^{CGFont=}^{CGDataProvider=}', '', {'retval': {'already_cfretained': True}}), 'CGRectCreateDictionaryRepresentation': (sel32or64(b'^{__CFDictionary=}{CGRect={CGPoint=ff}{CGSize=ff}}', b'^{__CFDictionary=}{CGRect={CGPoint=dd}{CGSize=dd}}'), '', {'retval': {'already_cfretained': True}}), 'CGRectInset': (sel32or64(b'{CGRect={CGPoint=ff}{CGSize=ff}}{CGRect={CGPoint=ff}{CGSize=ff}}ff', b'{CGRect={CGPoint=dd}{CGSize=dd}}{CGRect={CGPoint=dd}{CGSize=dd}}dd'),), 'CGRectGetWidth': (sel32or64(b'f{CGRect={CGPoint=ff}{CGSize=ff}}', b'd{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'CGContextStrokeRectWithWidth': (sel32or64(b'v^{CGContext=}{CGRect={CGPoint=ff}{CGSize=ff}}f', b'v^{CGContext=}{CGRect={CGPoint=dd}{CGSize=dd}}d'),), 'CGEventGetTypeID': (sel32or64(b'L', b'Q'),), 'CGFontGetItalicAngle': (sel32or64(b'f^{CGFont=}', b'd^{CGFont=}'),), 'CGRectUnion': (sel32or64(b'{CGRect={CGPoint=ff}{CGSize=ff}}{CGRect={CGPoint=ff}{CGSize=ff}}{CGRect={CGPoint=ff}{CGSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}{CGRect={CGPoint=dd}{CGSize=dd}}{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'CGPathAddRects': (sel32or64(b'v^{CGPath=}^{CGAffineTransform=ffffff}^{CGRect={CGPoint=ff}{CGSize=ff}}L', b'v^{CGPath=}^{CGAffineTransform=dddddd}^{CGRect={CGPoint=dd}{CGSize=dd}}Q'), '', {'arguments': {1: {'type_modifier': 'n'}, 2: {'c_array_length_in_arg': 3, 'type_modifier': 'n'}}}), 'CGColorSpaceSupportsOutput': (b'B^{CGColorSpace=}',), 'CGPDFContextSetDestinationForRect': (sel32or64(b'v^{CGContext=}^{__CFString=}{CGRect={CGPoint=ff}{CGSize=ff}}', b'v^{CGContext=}^{__CFString=}{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'CGPDFOperatorTableRetain': (b'^{CGPDFOperatorTable=}^{CGPDFOperatorTable=}',), 'CGPDFScannerPopName': (b'B^{CGPDFScanner=}^^c', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'CGLayerGetContext': (b'^{CGContext=}^{CGLayer=}',), 'CGImageGetBitsPerPixel': (sel32or64(b'L^{CGImage=}', b'Q^{CGImage=}'),), 'CGPathAddArc': (sel32or64(b'v^{CGPath=}^{CGAffineTransform=ffffff}fffffB', b'v^{CGPath=}^{CGAffineTransform=dddddd}dddddB'), '', {'arguments': {1: {'type_modifier': 'n'}}}), 'CGContextDrawLinearGradient': (sel32or64(b'v^{CGContext=}^{CGGradient=}{CGPoint=ff}{CGPoint=ff}I', b'v^{CGContext=}^{CGGradient=}{CGPoint=dd}{CGPoint=dd}I'),), 'CGDataConsumerGetTypeID': (sel32or64(b'L', b'Q'),), 'CGDisplayModeIsUsableForDesktopGUI': (b'B^{CGDisplayMode=}',), 'CGRectIsInfinite': (sel32or64(b'B{CGRect={CGPoint=ff}{CGSize=ff}}', b'B{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'CGEventSetTimestamp': (b'v^{__CGEvent=}Q',), 'CGPDFContextAddDocumentMetadata': (b'v^{CGContext=}^{__CFData=}',), 'CGDisplayWaitForBeamPositionOutsideLines': (b'iIII',), 'CGColorSpaceCreateWithICCData': (b'@@', '', {'retval': {'already_cfretained': True}}), 'CGPDFContextBeginPage': (b'v^{CGContext=}^{__CFDictionary=}',), 'CGPSConverterIsConverting': (b'B^{CGPSConverter=}',), 'CGEventPostToPid': (b'vi^{__CGEvent=}',), 'CGPathIsEmpty': (b'B^{CGPath=}',), 'CGDisplayScreenSize': (sel32or64(b'{CGSize=ff}I', b'{CGSize=dd}I'),), 'CGDisplayIsInMirrorSet': (sel32or64(b'iI', b'II'),), 'CGFontCopyGlyphNameForGlyph': (b'^{__CFString=}^{CGFont=}S', '', {'retval': {'already_cfretained': True}}), 'CGBitmapContextGetAlphaInfo': (b'I^{CGContext=}',), 'CGFontCreateCopyWithVariations': (b'^{CGFont=}^{CGFont=}^{__CFDictionary=}', '', {'retval': {'already_cfretained': True}}), 'CGPDFScannerRelease': (b'v^{CGPDFScanner=}',), 'CGContextCopyPath': (b'^{CGPath=}^{CGContext=}', '', {'retval': {'already_cfretained': True}}), 'CGShadingRelease': (b'v^{CGShading=}',), 'CGDisplayFadeOperationInProgress': (sel32or64(b'i', b'I'),), 'CGPostMouseEvent': (sel32or64(b'i{CGPoint=ff}iIi', b'i{CGPoint=dd}III'), '', {'c_array_length_in_arg': 2, 'variadic': True}), 'CGPDFContentStreamGetStreams': (b'^{__CFArray=}^{CGPDFContentStream=}',), 'CGContextSetMiterLimit': (sel32or64(b'v^{CGContext=}f', b'v^{CGContext=}d'),), 'CGPDFOperatorTableSetCallback': (b'v^{CGPDFOperatorTable=}^t^?', '', {'arguments': {1: {'c_array_delimited_by_null': True, 'type_modifier': 'n'}, 2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{CGPDFScanner=}'}, 1: {'type': b'^v'}}}}}}), 'CGEventTapIsEnabled': (b'B^{__CFMachPort=}',), 'CGContextDrawPath': (b'v^{CGContext=}I',), 'CGWindowServerCFMachPort': (b'^{__CFMachPort=}',), 'CGColorEqualToColor': (b'B^{CGColor=}^{CGColor=}',), 'CGContextSetFontSize': (sel32or64(b'v^{CGContext=}f', b'v^{CGContext=}d'),), 'CGEventGetIntegerValueField': (b'q^{__CGEvent=}I',), 'CGContextShowText': (sel32or64(b'v^{CGContext=}^cL', b'v^{CGContext=}^cQ'), '', {'arguments': {1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}}}), 'CGColorSpaceCreateWithName': (b'^{CGColorSpace=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CGRectGetMaxX': (sel32or64(b'f{CGRect={CGPoint=ff}{CGSize=ff}}', b'd{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'CGImageCreateWithMask': (b'^{CGImage=}^{CGImage=}^{CGImage=}', '', {'retval': {'already_cfretained': True}}), 'CGImageCreateWithPNGDataProvider': (sel32or64(b'^{CGImage=}^{CGDataProvider=}^fBI', b'^{CGImage=}^{CGDataProvider=}^dBI'), '', {'retval': {'already_cfretained': True}}), 'CGContextSetTextDrawingMode': (b'v^{CGContext=}I',), 'CGContextGetUserSpaceToDeviceSpaceTransform': (sel32or64(b'{CGAffineTransform=ffffff}^{CGContext=}', b'{CGAffineTransform=dddddd}^{CGContext=}'),), 'CGDataConsumerCreateWithCFData': (b'^{CGDataConsumer=}^{__CFData=}', '', {'retval': {'already_cfretained': True}}), 'CGColorSpaceCreateCalibratedRGB': (sel32or64(b'^{CGColorSpace=}[3f][3f][3f][9f]', b'^{CGColorSpace=}[3d][3d][3d][9d]'), '', {'retval': {'already_cfretained': True}}), 'CGContextSetLineJoin': (b'v^{CGContext=}I',), 'CGDataProviderCreateSequential': (b'^{CGDataProvider=}^v^{CGDataProviderSequentialCallbacks=I^?^?^?^?}', '', {'retval': {'already_cfretained': True}}), 'CGPDFArrayGetNull': (sel32or64(b'B^{CGPDFArray=}L', b'B^{CGPDFArray=}Q'),), 'CGEventSourceSecondsSinceLastEventType': (b'dII',), 'CGContextSetRGBFillColor': (sel32or64(b'v^{CGContext=}ffff', b'v^{CGContext=}dddd'),), 'CGPaletteCreateCopy': (b'^{_CGDirectPaletteRef=}^{_CGDirectPaletteRef=}', '', {'retval': {'already_cfretained': True}}), 'CGGetLastMouseDelta': (b'v^i^i', '', {'arguments': {0: {'type_modifier': 'o'}, 1: {'type_modifier': 'o'}}}), 'CGPDFArrayGetDictionary': (sel32or64(b'B^{CGPDFArray=}L^^{CGPDFDictionary=}', b'B^{CGPDFArray=}Q^^{CGPDFDictionary=}'), '', {'arguments': {2: {'type_modifier': 'o'}}}), 'CGPathCloseSubpath': (b'v^{CGPath=}',), 'CGPDFContentStreamCreateWithStream': (b'^{CGPDFContentStream=}^{CGPDFStream=}^{CGPDFDictionary=}^{CGPDFContentStream=}', '', {'retval': {'already_cfretained': True}}), 'CGDisplaySerialNumber': (b'II',), 'CGGetOnlineDisplayList': (b'iI^I^I', '', {'arguments': {1: {'c_array_length_in_arg': (0, 2), 'type_modifier': 'o'}, 2: {'type_modifier': 'o'}}}), 'CGCancelDisplayConfiguration': (b'i^{_CGDisplayConfigRef=}',), 'CGEventSourceSetPixelsPerLine': (b'v^{__CGEventSource=}d',), 'CGEventGetTimestamp': (b'Q^{__CGEvent=}',), 'CGContextSetShouldSubpixelPositionFonts': (b'v^{CGContext=}B',), 'CGContextSaveGState': (b'v^{CGContext=}',), 'CGReleaseAllDisplays': (b'i',), 'CGDisplayRelease': (b'iI',), 'CGContextConvertSizeToUserSpace': (sel32or64(b'{CGSize=ff}^{CGContext=}{CGSize=ff}', b'{CGSize=dd}^{CGContext=}{CGSize=dd}'),), 'CGColorCreateCopyWithAlpha': (sel32or64(b'^{CGColor=}^{CGColor=}f', b'^{CGColor=}^{CGColor=}d'), '', {'retval': {'already_cfretained': True}}), 'CGPDFScannerPopNumber': (sel32or64(b'B^{CGPDFScanner=}^f', b'B^{CGPDFScanner=}^d'), '', {'arguments': {1: {'type_modifier': 'o'}}}), 'CGContextEOFillPath': (b'v^{CGContext=}',), 'CGPathCreateCopy': (b'^{CGPath=}^{CGPath=}', '', {'retval': {'already_cfretained': True}}), 'CGPSConverterAbort': (b'B^{CGPSConverter=}',), 'CGContextFillPath': (b'v^{CGContext=}',), 'CGDisplayBaseAddress': (b'^vI', '', {'retval': {'c_array_of_variable_length': True}}), 'CGContextFillRects': (sel32or64(b'v^{CGContext=}^{CGRect={CGPoint=ff}{CGSize=ff}}L', b'v^{CGContext=}^{CGRect={CGPoint=dd}{CGSize=dd}}Q'), '', {'arguments': {1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}}}), 'CGContextStrokeEllipseInRect': (sel32or64(b'v^{CGContext=}{CGRect={CGPoint=ff}{CGSize=ff}}', b'v^{CGContext=}{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'CGEventGetFlags': (b'Q^{__CGEvent=}',), 'CGPDFContextEndPage': (b'v^{CGContext=}',), 'CGEventSourceButtonState': (b'BII',), 'CGContextConvertSizeToDeviceSpace': (sel32or64(b'{CGSize=ff}^{CGContext=}{CGSize=ff}', b'{CGSize=dd}^{CGContext=}{CGSize=dd}'),), 'CGReleaseScreenRefreshRects': (sel32or64(b'v^{CGRect={CGPoint=ff}{CGSize=ff}}', b'v^{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'CGContextSetFlatness': (sel32or64(b'v^{CGContext=}f', b'v^{CGContext=}d'),), 'CGContextDrawShading': (b'v^{CGContext=}^{CGShading=}',), 'CGWindowLevelForKey': (b'ii',), 'CGFontCopyVariationAxes': (b'^{__CFArray=}^{CGFont=}', '', {'retval': {'already_cfretained': True}}), 'CGContextDrawImage': (sel32or64(b'v^{CGContext=}{CGRect={CGPoint=ff}{CGSize=ff}}^{CGImage=}', b'v^{CGContext=}{CGRect={CGPoint=dd}{CGSize=dd}}^{CGImage=}'),), 'CGPDFDocumentCreateWithProvider': (b'^{CGPDFDocument=}^{CGDataProvider=}', '', {'retval': {'already_cfretained': True}}), 'CGPDFDocumentGetArtBox': (sel32or64(b'{CGRect={CGPoint=ff}{CGSize=ff}}^{CGPDFDocument=}i', b'{CGRect={CGPoint=dd}{CGSize=dd}}^{CGPDFDocument=}i'),), 'CGContextBeginPath': (b'v^{CGContext=}',), 'CGEventCreateScrollWheelEvent2': (b'v^{__CGEventSource=}iIiii', '', {'retval': {'already_cfretained': True}, 'c_array_length_in_arg': 2, 'variadic': True}), 'CGFontGetNumberOfGlyphs': (sel32or64(b'L^{CGFont=}', b'Q^{CGFont=}'),), 'CGPathIsRect': (sel32or64(b'B^{CGPath=}^{CGRect={CGPoint=ff}{CGSize=ff}}', b'B^{CGPath=}^{CGRect={CGPoint=dd}{CGSize=dd}}'), '', {'arguments': {1: {'type_modifier': 'o'}}}), 'CGPathCreateCopyByDashingPath': (sel32or64(b'^{CGPath=}^{CGPath=}^{CGAffineTransform=ffffff}f^fL', b'^{CGPath=}^{CGPath=}^{CGAffineTransform=dddddd}d^dQ'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'type_modifier': 'n'}, 3: {'c_array_length_in_arg': 4, 'type_modifier': 'n'}}}), 'CGDisplayIsAsleep': (sel32or64(b'iI', b'II'),), 'CGPSConverterConvert': (b'B^{CGPSConverter=}^{CGDataProvider=}^{CGDataConsumer=}^{__CFDictionary=}',), 'CGDisplayAddressForPosition': (b'^vIii', '', {'retval': {'c_array_of_variable_length': True}}), 'CGContextConvertPointToDeviceSpace': (sel32or64(b'{CGPoint=ff}^{CGContext=}{CGPoint=ff}', b'{CGPoint=dd}^{CGContext=}{CGPoint=dd}'),), 'CGDisplayBounds': (sel32or64(b'{CGRect={CGPoint=ff}{CGSize=ff}}I', b'{CGRect={CGPoint=dd}{CGSize=dd}}I'),), 'CGEventTapEnable': (b'v^{__CFMachPort=}B',), 'CGColorGetConstantColor': (b'^{CGColor=}^{__CFString=}',), 'CGDisplayUnitNumber': (b'II',), 'CGContextClosePath': (b'v^{CGContext=}',), 'CGImageIsMask': (b'B^{CGImage=}',), 'CGDisplayCopyDisplayMode': (b'^{CGDisplayMode=}I', '', {'retval': {'already_cfretained': True}}), 'CGPDFDocumentAllowsCopying': (b'B^{CGPDFDocument=}', '', {'retval': {'already_cfretained': True}}), 'CGDisplayCreateImage': (b'^{CGImage=}I', '', {'retval': {'already_cfretained': True}}), 'CGDisplayRotation': (b'dI',), 'CGDisplayBestModeForParametersAndRefreshRateWithProperty': (sel32or64(b'^{__CFDictionary=}ILLLd^{__CFString=}^i', b'^{__CFDictionary=}IQQQd^{__CFString=}^I'), '', {'arguments': {6: {'type_modifier': 'o'}}}), 'CGDisplayIDToOpenGLDisplayMask': (b'II',), 'CGColorCreate': (sel32or64(b'^{CGColor=}^{CGColorSpace=}^f', b'^{CGColor=}^{CGColorSpace=}^d'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'c_array_of_variable_length': True, 'type_modifier': 'n'}}}), 'CGEventTapPostEvent': (b'v^{__CGEventTapProxy=}^{__CGEvent=}',), 'CGImageGetByteOrderInfo': (b'I^{CGImage=}',), 'CGDisplayBeamPosition': (b'II',), 'CGPDFDocumentGetTypeID': (sel32or64(b'L', b'Q'),), 'CGShadingGetTypeID': (sel32or64(b'L', b'Q'),), 'CGEventSourceKeyState': (b'BIS',), 'CGContextBeginTransparencyLayerWithRect': (sel32or64(b'v^{CGContext=}{CGRect={CGPoint=ff}{CGSize=ff}}^{__CFDictionary=}', b'v^{CGContext=}{CGRect={CGPoint=dd}{CGSize=dd}}^{__CFDictionary=}'),), 'CGPointCreateDictionaryRepresentation': (sel32or64(b'^{__CFDictionary=}{CGPoint=ff}', b'^{__CFDictionary=}{CGPoint=dd}'), '', {'retval': {'already_cfretained': True}}), 'CGAffineTransformEqualToTransform': (sel32or64(b'B{CGAffineTransform=ffffff}{CGAffineTransform=ffffff}', b'B{CGAffineTransform=dddddd}{CGAffineTransform=dddddd}'),), 'CGDisplayPixelsWide': (sel32or64(b'LI', b'QI'),), 'CGEventSourceFlagsState': (b'QI',), 'CGLayerRetain': (b'^{CGLayer=}^{CGLayer=}',), 'CGContextFillRect': (sel32or64(b'v^{CGContext=}{CGRect={CGPoint=ff}{CGSize=ff}}', b'v^{CGContext=}{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'CGAffineTransformInvert': (sel32or64(b'{CGAffineTransform=ffffff}{CGAffineTransform=ffffff}', b'{CGAffineTransform=dddddd}{CGAffineTransform=dddddd}'),), 'CGPaletteGetColorAtIndex': (b'{CGDeviceColor=fff}^{_CGDirectPaletteRef=}I',), 'CGEventSetFlags': (b'v^{__CGEvent=}Q',), 'CGPDFDocumentGetCropBox': (sel32or64(b'{CGRect={CGPoint=ff}{CGSize=ff}}^{CGPDFDocument=}i', b'{CGRect={CGPoint=dd}{CGSize=dd}}^{CGPDFDocument=}i'),), 'CGEventCreateScrollWheelEvent': (b'^{__CGEvent=}^{__CGEventSource=}IIi', '', {'retval': {'already_cfretained': True}, 'c_array_length_in_arg': 2, 'variadic': True}), 'CGPatternGetTypeID': (sel32or64(b'L', b'Q'),), 'CGImageGetBytesPerRow': (sel32or64(b'L^{CGImage=}', b'Q^{CGImage=}'),), 'CGPathRelease': (b'v^{CGPath=}',), 'CGDisplayModeGetTypeID': (sel32or64(b'L', b'Q'),), 'CGEventGetType': (b'I^{__CGEvent=}',), 'CGContextSetAllowsFontSubpixelQuantization': (b'v^{CGContext=}B',), 'CGPathMoveToPoint': (sel32or64(b'v^{CGPath=}^{CGAffineTransform=ffffff}ff', b'v^{CGPath=}^{CGAffineTransform=dddddd}dd'), '', {'arguments': {1: {'type_modifier': 'n'}}}), 'CGFontRelease': (b'v^{CGFont=}',), 'CGPathCreateCopyByTransformingPath': (sel32or64(b'^{CGPath=}^{CGPath=}^{CGAffineTransform=ffffff}', b'^{CGPath=}^{CGPath=}^{CGAffineTransform=dddddd}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'type_modifier': 'n'}}}), 'CGPDFDocumentRelease': (b'v^{CGPDFDocument=}',), 'CGContextSetLineWidth': (sel32or64(b'v^{CGContext=}f', b'v^{CGContext=}d'),), 'CGDisplayStreamCreateWithDispatchQueue': (sel32or64(b'^{CGDisplayStream=}ILLi^{__CFDictionary=}^{dispatch_queue_s=}@?', b'^{CGDisplayStream=}IQQi^{__CFDictionary=}^{dispatch_queue_s=}@?'), '', {'retval': {'already_cfretained': True}, 'arguments': {6: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}, 1: {'type': 'I'}, 2: {'type': 'Q'}, 3: {'type': '@'}, 4: {'type': '@'}}}}}}), 'CGContextGetPathCurrentPoint': (sel32or64(b'{CGPoint=ff}^{CGContext=}', b'{CGPoint=dd}^{CGContext=}'),), 'CGContextSetPatternPhase': (sel32or64(b'v^{CGContext=}{CGSize=ff}', b'v^{CGContext=}{CGSize=dd}'),), 'CGFontGetAscent': (b'i^{CGFont=}',), 'CGPDFDictionaryGetDictionary': (b'B^{CGPDFDictionary=}^t^^{CGPDFDictionary=}', '', {'arguments': {1: {'c_array_delimited_by_null': True, 'type_modifier': 'n'}, 2: {'type_modifier': 'o'}}}), 'CGContextShowGlyphs': (sel32or64(b'v^{CGContext=}^SL', b'v^{CGContext=}^SQ'), '', {'arguments': {1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}}}), 'CGFontGetLeading': (b'i^{CGFont=}',), 'CGGradientCreateWithColorComponents': (sel32or64(b'^{CGGradient=}^{CGColorSpace=}^f^fL', b'^{CGGradient=}^{CGColorSpace=}^d^dQ'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'c_array_length_in_arg': 3, 'type_modifier': 'n'}, 2: {'c_array_length_in_arg': 3, 'type_modifier': 'n'}}}), 'CGDisplayGetDrawingContext': (b'^{CGContext=}I',), 'CGEventGetLocation': (sel32or64(b'{CGPoint=ff}^{__CGEvent=}', b'{CGPoint=dd}^{__CGEvent=}'),), 'CGDisplayStreamUpdateGetTypeID': (sel32or64(b'L', b'Q'),), 'CGRectGetMinY': (sel32or64(b'f{CGRect={CGPoint=ff}{CGSize=ff}}', b'd{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'CGRectGetMinX': (sel32or64(b'f{CGRect={CGPoint=ff}{CGSize=ff}}', b'd{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'CGContextSetFont': (b'v^{CGContext=}^{CGFont=}',), 'CGDisplayShowCursor': (b'iI',), 'CGContextRestoreGState': (b'v^{CGContext=}',), 'CGPathRetain': (b'^{CGPath=}^{CGPath=}',), 'CGPDFDocumentGetNumberOfPages': (sel32or64(b'L^{CGPDFDocument=}', b'Q^{CGPDFDocument=}'),), 'CGAffineTransformMakeScale': (sel32or64(b'{CGAffineTransform=ffffff}ff', b'{CGAffineTransform=dddddd}dd'),), 'CGPathAddCurveToPoint': (sel32or64(b'v^{CGPath=}^{CGAffineTransform=ffffff}ffffff', b'v^{CGPath=}^{CGAffineTransform=dddddd}dddddd'), '', {'arguments': {1: {'type_modifier': 'n'}}}), 'CGImageCreate': (sel32or64(b'^{CGImage=}LLLLL^{CGColorSpace=}I^{CGDataProvider=}^fBI', b'^{CGImage=}QQQQQ^{CGColorSpace=}I^{CGDataProvider=}^dBI'), '', {'retval': {'already_cfretained': True}, 'arguments': {8: {'c_array_of_variable_length': True, 'type_modifier': 'n'}}}), 'CGContextSetShouldSmoothFonts': (b'v^{CGContext=}B',), 'CGPDFDictionaryGetBoolean': (b'B^{CGPDFDictionary=}^c^C', '', {'arguments': {1: {'c_array_delimited_by_null': True, 'type_modifier': 'n'}, 2: {'type_modifier': 'o'}}}), 'CGPDFStringGetBytePtr': (b'^C^{CGPDFString=}', '', {'retval': {'c_array_of_variable_length': True}}), 'CGScreenUnregisterMoveCallback': (b'v^?^v', '', {'arguments': {0: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'{CGScreenUpdateMoveDelta=ii}'}, 1: {'type': b'Q'}, 2: {'type': b'^{CGRect={CGPoint=dd}{CGSize=dd}}'}, 3: {'type': b'^v'}}}}}}), 'CGColorGetColorSpace': (b'^{CGColorSpace=}^{CGColor=}',), 'CGColorConverterRelease': (b'v^{CGColorConversionInfo=}',), 'CGDisplayStreamStart': (b'i^{CGDisplayStream=}',), 'CGDisplayAvailableModes': (b'^{__CFArray=}I',), 'CGEventCreateSourceFromEvent': (b'^{__CGEventSource=}^{__CGEvent=}', '', {'retval': {'already_cfretained': True}}), 'CGPostKeyboardEvent': (sel32or64(b'iSSi', b'iSSI'),), 'CGPDFScannerPopStream': (b'B^{CGPDFScanner=}^^{CGPDFStream=}', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'CGPathCreateMutableCopy': (b'^{CGPath=}^{CGPath=}', '', {'retval': {'already_cfretained': True}}), 'CGPathGetTypeID': (sel32or64(b'L', b'Q'),), 'CGColorSpaceCreateIndexed': (sel32or64(b'^{CGColorSpace=}^{CGColorSpace=}L^C', b'^{CGColorSpace=}^{CGColorSpace=}Q^C'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'c_array_of_variable_length': True, 'type_modifier': 'n'}}}), 'CGColorSpaceCreateLab': (sel32or64(b'^{CGColorSpace=}[3f][3f][4f]', b'^{CGColorSpace=}[3d][3d][4d]'), '', {'retval': {'already_cfretained': True}}), 'CGDisplayIOServicePort': (b'II',), 'CGContextSetFillPattern': (sel32or64(b'v^{CGContext=}^{CGPattern=}^f', b'v^{CGContext=}^{CGPattern=}^d'), '', {'arguments': {2: {'c_array_of_variable_length': True, 'type_modifier': 'n'}}}), 'CGSizeMakeWithDictionaryRepresentation': (sel32or64(b'B^{__CFDictionary=}^{CGSize=ff}', b'B^{__CFDictionary=}^{CGSize=dd}'), '', {'arguments': {1: {'type_modifier': 'o'}}}), 'CGPDFDocumentUnlockWithPassword': (b'B^{CGPDFDocument=}^c',), 'CGContextClip': (b'v^{CGContext=}',), 'CGContextSetFillColorWithColor': (b'v^{CGContext=}^{CGColor=}',), 'CGPDFDictionaryGetName': (b'B^{CGPDFDictionary=}^t^^c', '', {'arguments': {1: {'c_array_delimited_by_null': True, 'type_modifier': 'n'}, 2: {'type_modifier': 'o'}}})} -aliases = {'CGSizeApplyAffineTransform': '__CGSizeApplyAffineTransform', 'CGSizeEqualToSize': '__CGSizeEqualToSize', 'CGFLOAT_TYPE': 'double', 'CGPointApplyAffineTransform': '__CGPointApplyAffineTransform', 'CG_LOCAL_32': 'CG_LOCAL', 'CGColorConverterRef': 'CGColorConversionInfoRef', 'kCGBitmapByteOrder16Host': 'kCGBitmapByteOrder16Little', 'CGPointEqualToPoint': '__CGPointEqualToPoint', 'kCGEventSupressionStateSupressionInterval': 'kCGEventSuppressionStateSuppressionInterval', 'CGEventSupressionState': 'CGEventSuppressionState', 'CG_PRIVATE_EXTERN': 'CG_LOCAL', 'CG_AVAILABLE_STARTING': '__OSX_AVAILABLE_STARTING', 'CG_LOCAL': '__private_extern__', 'kCGGlyphMax': 'kCGFontIndexMax', 'CG_EXTERN_64': sel32or64('CG_LOCAL', 'CG_EXTERN'), 'kCGWindowBackingCFNumberType': 'kCFNumberSInt32Type', 'CGFLOAT_MIN': sel32or64('FLT_MIN', 'DBL_MIN'), 'kCGWindowIDCFNumberType': 'kCFNumberSInt32Type', 'CG_LOCAL_64': 'CG_LOCAL', 'CGEventNoErr': 'kCGErrorSuccess', 'CGAffineTransformMake': '__CGAffineTransformMake', 'CGFLOAT_MAX': sel32or64('FLT_MAX', 'DBL_MAX'), 'kCGEventSupressionStateRemoteMouseDrag': 'kCGEventSuppressionStateRemoteMouseDrag', 'CGDisplayNoErr': 'kCGErrorSuccess', 'CG_OBSOLETE': '__CG_DEPRECATED', 'kCGNumberOfEventSupressionStates': 'kCGNumberOfEventSuppressionStates', 'CG_AVAILABLE_BUT_DEPRECATED': '__OSX_AVAILABLE_BUT_DEPRECATED', 'CG_EXTERN_32': 'CG_EXTERN', 'kCGWindowSharingCFNumberType': 'kCFNumberSInt32Type', 'kCGBitmapByteOrder32Host': 'kCGBitmapByteOrder32Little'} -cftypes=[('CGEventRef', b'^{__CGEvent=}', 'CGEventGetTypeID', None), ('CGEventSourceRef', b'^{__CGEventSource=}', 'CGEventSourceGetTypeID', None), ('CGColorRef', b'^{CGColor=}', 'CGColorGetTypeID', None), ('CGFontRef', b'^{CGFont=}', 'CGFontGetTypeID', None), ('CGColorConversionInfoRef', b'^{CGColorConversionInfo=}', 'CGColorConversionInfoGetTypeID', None), ('CGDataConsumerRef', b'^{CGDataConsumer=}', 'CGDataConsumerGetTypeID', None), ('CGPathRef', b'^{CGPath=}', 'CGPathGetTypeID', None), ('CGDisplayModeRef', b'^{CGDisplayMode}', 'CGDisplayModeGetTypeID', None), ('CGGradientRef', b'^{CGGradient=}', 'CGGradientGetTypeID', None), ('CGDirectDisplay', b'^{CGDirectDisplay=}', 'CGDirectDisplayGetTypeID', None), ('CGColorSpaceRef', b'^{CGColorSpace=}', 'CGColorSpaceGetTypeID', None), ('CGPDFDocumentRef', b'^{CGPDFDocument=}', 'CGPDFDocumentGetTypeID', None), ('CGFunctionRef', b'^{CGFunction=}', 'CGFunctionGetTypeID', None), ('CGImageRef', b'^{CGImage=}', 'CGImageGetTypeID', None), ('CGDisplayStreamRef', b'^{CGDisplayStream=}', 'CGDisplayStreamGetTypeID', None), ('CGPDFPageRef', b'^{CGPDFPage=}', 'CGPDFPageGetTypeID', None), ('CGLayerRef', b'^{CGLayer=}', 'CGLayerGetTypeID', None), ('CGPatternRef', b'^{CGPattern=}', 'CGPatternGetTypeID', None), ('CGPSConverterRef', b'^{CGPSConverter=}', 'CGPSConverterGetTypeID', None), ('CGContextRef', b'^{CGContext=}', 'CGContextGetTypeID', None), ('CGShadingRef', b'^{CGShading=}', 'CGShadingGetTypeID', None), ('CGDataProviderRef', b'^{CGDataProvider=}', 'CGDataProviderGetTypeID', None), ('CGDisplayStreamUpdateRef', b'^{CGDisplayStreamUpdate=}', 'CGDisplayStreamUpdateGetTypeID', None)] -misc.update({'CGPDFArrayRef': objc.createOpaquePointerType('CGPDFArrayRef', b'^{CGPDFArray=}'), 'CGPDFScannerRef': objc.createOpaquePointerType('CGPDFScannerRef', b'^{CGPDFScanner=}'), 'CGDirectPaletteRef': objc.createOpaquePointerType('CGDirectPaletteRef', b'^{_CGDirectPaletteRef=}'), 'CGPDFStringRef': objc.createOpaquePointerType('CGPDFStringRef', b'^{CGPDFString=}'), 'CGPDFContentStreamRef': objc.createOpaquePointerType('CGPDFContentStreamRef', b'^{CGPDFContentStream=}'), 'CGPDFOperatorTableRef': objc.createOpaquePointerType('CGPDFOperatorTableRef', b'^{CGPDFOperatorTable}'), 'CGPDFObject': objc.createOpaquePointerType('CGPDFObject', b'^{CGPDFObject=}'), 'CGDisplayConfigRef': objc.createOpaquePointerType('CGDisplayConfigRef', b'^{_CGDisplayConfigRef=}'), 'CGPDFDictionaryRef': objc.createOpaquePointerType('CGPDFDictionaryRef', b'^{CGPDFDictionary=}'), 'CGPDFStreamRef': objc.createOpaquePointerType('CGPDFStreamRef', b'^{CGPDFStream=}'), 'CGEventTapProxy': objc.createOpaquePointerType('CGEventTapProxy', b'^{__CGEventTapProxy=}')}) -expressions = {'kCGOverlayWindowLevel': 'CGWindowLevelForKey(kCGOverlayWindowLevelKey)', 'kCGMainMenuWindowLevel': 'CGWindowLevelForKey(kCGMainMenuWindowLevelKey)', 'kCGBaseWindowLevel': 'CGWindowLevelForKey(kCGBaseWindowLevelKey)', 'kCGStatusWindowLevel': 'CGWindowLevelForKey(kCGStatusWindowLevelKey)', 'kCGHelpWindowLevel': 'CGWindowLevelForKey(kCGHelpWindowLevelKey)', 'kCGNormalWindowLevel': 'CGWindowLevelForKey(kCGNormalWindowLevelKey)', 'kCGScreenSaverWindowLevel': 'CGWindowLevelForKey(kCGScreenSaverWindowLevelKey)', 'kCGMaximumWindowLevel': 'CGWindowLevelForKey(kCGMaximumWindowLevelKey)', 'kCGUtilityWindowLevel': 'CGWindowLevelForKey(kCGUtilityWindowLevelKey)', 'kCGCursorWindowLevel': 'CGWindowLevelForKey(kCGCursorWindowLevelKey)', 'kCGAssistiveTechHighWindowLevel': 'CGWindowLevelForKey(kCGAssistiveTechHighWindowLevelKey)', 'kCGModalPanelWindowLevel': 'CGWindowLevelForKey(kCGModalPanelWindowLevelKey)', 'kCGEventFilterMaskPermitAllEvents': '(kCGEventFilterMaskPermitLocalMouseEvents | kCGEventFilterMaskPermitLocalKeyboardEvents | kCGEventFilterMaskPermitSystemDefinedEvents)', 'kCGMinimumWindowLevel': 'CGWindowLevelForKey(kCGMinimumWindowLevelKey)', 'kCGPopUpMenuWindowLevel': 'CGWindowLevelForKey(kCGPopUpMenuWindowLevelKey)', 'kCGDesktopWindowLevel': 'CGWindowLevelForKey(kCGDesktopWindowLevelKey)', 'kCGFloatingWindowLevel': 'CGWindowLevelForKey(kCGFloatingWindowLevelKey)', 'kCGDraggingWindowLevel': 'CGWindowLevelForKey(kCGDraggingWindowLevelKey)', 'kCGDockWindowLevel': 'CGWindowLevelForKey(kCGDockWindowLevelKey)', 'kCGDesktopIconWindowLevel': 'CGWindowLevelForKey(kCGDesktopIconWindowLevelKey)', 'kCGBackstopMenuLevel': 'CGWindowLevelForKey(kCGBackstopMenuLevelKey)', 'kCGTornOffMenuWindowLevel': 'CGWindowLevelForKey(kCGTornOffMenuWindowLevelKey)'} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/Quartz/CoreGraphics/_metadata.pyc b/env/lib/python2.7/site-packages/Quartz/CoreGraphics/_metadata.pyc deleted file mode 100644 index 44e155d2..00000000 Binary files a/env/lib/python2.7/site-packages/Quartz/CoreGraphics/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Quartz/CoreGraphics/_sortandmap.so b/env/lib/python2.7/site-packages/Quartz/CoreGraphics/_sortandmap.so deleted file mode 100755 index 02ac1ede..00000000 Binary files a/env/lib/python2.7/site-packages/Quartz/CoreGraphics/_sortandmap.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Quartz/CoreVideo/_CVPixelBuffer.so b/env/lib/python2.7/site-packages/Quartz/CoreVideo/_CVPixelBuffer.so deleted file mode 100755 index 9c000d8d..00000000 Binary files a/env/lib/python2.7/site-packages/Quartz/CoreVideo/_CVPixelBuffer.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Quartz/CoreVideo/__init__.py b/env/lib/python2.7/site-packages/Quartz/CoreVideo/__init__.py deleted file mode 100644 index b2883717..00000000 --- a/env/lib/python2.7/site-packages/Quartz/CoreVideo/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -''' -Python mapping for the CoreVideo framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import sys -import objc -import CoreFoundation - -from Quartz.CoreVideo import _metadata - -sys.modules['Quartz.CoreVideo'] = mod = objc.ObjCLazyModule('Quartz.CoreVideo', - "com.apple.CoreVideo", - objc.pathForFramework("/System/Library/Frameworks/CoreVideo.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'objc': objc, - }, ( CoreFoundation, )) - -import sys -del sys.modules['Quartz.CoreVideo._metadata'] - -def _load(mod): - import Quartz - Quartz.CoreVideo = mod - import Quartz.CoreVideo._CVPixelBuffer as m - for nm in dir(m): - if nm.startswith('_'): continue - setattr(mod, nm, getattr(m, nm)) -_load(mod) diff --git a/env/lib/python2.7/site-packages/Quartz/CoreVideo/__init__.pyc b/env/lib/python2.7/site-packages/Quartz/CoreVideo/__init__.pyc deleted file mode 100644 index f57fd420..00000000 Binary files a/env/lib/python2.7/site-packages/Quartz/CoreVideo/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Quartz/CoreVideo/_metadata.py b/env/lib/python2.7/site-packages/Quartz/CoreVideo/_metadata.py deleted file mode 100644 index f8645743..00000000 --- a/env/lib/python2.7/site-packages/Quartz/CoreVideo/_metadata.py +++ /dev/null @@ -1,27 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Wed Jul 4 16:00:05 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -misc.update({'CVTimeStamp': objc.createStructType('CVTimeStamp', sel32or64(b'{_CVTimeStamp=IiqQdq{CVSMPTETime=ssLLLssss}QQ}', b'{_CVTimeStamp=IiqQdq{CVSMPTETime=ssIIIssss}QQ}'), ['version', 'videoTimeScale', 'videoTime', 'hostTime', 'rateScalar', 'videoRefreshPeriod', 'smpteTime', 'flags', 'reserved']), 'CVPlanarPixelBufferInfo_YCbCrBiPlanar': objc.createStructType('CVPlanarPixelBufferInfo_YCbCrBiPlanar', b'{CVPlanarPixelBufferInfo_YCbCrBiPlanar={CVPlanarComponentInfo=iI}{CVPlanarComponentInfo=iI}}', ['componentInfoY', 'componentInfoCbCr']), 'CVPlanarPixelBufferInfo_YCbCrPlanar': objc.createStructType('CVPlanarPixelBufferInfo_YCbCrPlanar', b'{CVPlanarPixelBufferInfo_YCbCrPlanar={CVPlanarComponentInfo=iI}{CVPlanarComponentInfo=iI}{CVPlanarComponentInfo=iI}}', ['componentInfoY', 'componentInfoCb', 'componentInfoCr']), 'CVPlanarComponentInfo': objc.createStructType('CVPlanarComponentInfo', b'{CVPlanarComponentInfo=iI}', ['offset', 'rowBytes']), 'CVTime': objc.createStructType('CVTime', b'{_CVTime=qii}', ['timeValue', 'timeScale', 'flags']), 'CVSMPTETime': objc.createStructType('CVSMPTETime', sel32or64(b'{CVSMPTETime=ssLLLssss}', b'{CVSMPTETime=ssIIIssss}'), ['subframes', 'subframeDivisor', 'counter', 'type', 'flags', 'hours', 'minutes', 'seconds', 'frames']), 'CVPlanarPixelBufferInfo': objc.createStructType('CVPlanarPixelBufferInfo', b'{CVPlanarPixelBufferInfo=[1{CVPlanarComponentInfo=iI}]}', ['componentInfo'])}) -constants = '''$kCVBufferMovieTimeKey@^{__CFString=}$kCVBufferNonPropagatedAttachmentsKey@^{__CFString=}$kCVBufferPropagatedAttachmentsKey@^{__CFString=}$kCVBufferTimeScaleKey@^{__CFString=}$kCVBufferTimeValueKey@^{__CFString=}$kCVImageBufferAlphaChannelIsOpaque@^{__CFString=}$kCVImageBufferCGColorSpaceKey@^{__CFString=}$kCVImageBufferChromaLocationBottomFieldKey@^{__CFString=}$kCVImageBufferChromaLocationTopFieldKey@^{__CFString=}$kCVImageBufferChromaLocation_Bottom@^{__CFString=}$kCVImageBufferChromaLocation_BottomLeft@^{__CFString=}$kCVImageBufferChromaLocation_Center@^{__CFString=}$kCVImageBufferChromaLocation_DV420@^{__CFString=}$kCVImageBufferChromaLocation_Left@^{__CFString=}$kCVImageBufferChromaLocation_Top@^{__CFString=}$kCVImageBufferChromaLocation_TopLeft@^{__CFString=}$kCVImageBufferChromaSubsamplingKey@^{__CFString=}$kCVImageBufferChromaSubsampling_411@^{__CFString=}$kCVImageBufferChromaSubsampling_420@^{__CFString=}$kCVImageBufferChromaSubsampling_422@^{__CFString=}$kCVImageBufferCleanApertureHeightKey@^{__CFString=}$kCVImageBufferCleanApertureHorizontalOffsetKey@^{__CFString=}$kCVImageBufferCleanApertureKey@^{__CFString=}$kCVImageBufferCleanApertureVerticalOffsetKey@^{__CFString=}$kCVImageBufferCleanApertureWidthKey@^{__CFString=}$kCVImageBufferColorPrimariesKey@^{__CFString=}$kCVImageBufferColorPrimaries_DCI_P3$kCVImageBufferColorPrimaries_EBU_3213@^{__CFString=}$kCVImageBufferColorPrimaries_ITU_R_2020$kCVImageBufferColorPrimaries_ITU_R_709_2@^{__CFString=}$kCVImageBufferColorPrimaries_P22@^{__CFString=}$kCVImageBufferColorPrimaries_P3_D65$kCVImageBufferColorPrimaries_SMPTE_C@^{__CFString=}$kCVImageBufferContentLightLevelInfoKey$kCVImageBufferDisplayDimensionsKey@^{__CFString=}$kCVImageBufferDisplayHeightKey@^{__CFString=}$kCVImageBufferDisplayWidthKey@^{__CFString=}$kCVImageBufferFieldCountKey@^{__CFString=}$kCVImageBufferFieldDetailKey@^{__CFString=}$kCVImageBufferFieldDetailSpatialFirstLineEarly@^{__CFString=}$kCVImageBufferFieldDetailSpatialFirstLineLate@^{__CFString=}$kCVImageBufferFieldDetailTemporalBottomFirst@^{__CFString=}$kCVImageBufferFieldDetailTemporalTopFirst@^{__CFString=}$kCVImageBufferGammaLevelKey@^{__CFString=}$kCVImageBufferICCProfileKey@^{__CFString=}$kCVImageBufferMasteringDisplayColorVolumeKey$kCVImageBufferPixelAspectRatioHorizontalSpacingKey@^{__CFString=}$kCVImageBufferPixelAspectRatioKey@^{__CFString=}$kCVImageBufferPixelAspectRatioVerticalSpacingKey@^{__CFString=}$kCVImageBufferPreferredCleanApertureKey@^{__CFString=}$kCVImageBufferTransferFunctionKey@^{__CFString=}$kCVImageBufferTransferFunction_EBU_3213@^{__CFString=}$kCVImageBufferTransferFunction_ITU_R_2020$kCVImageBufferTransferFunction_ITU_R_2100_HLG$kCVImageBufferTransferFunction_ITU_R_709_2@^{__CFString=}$kCVImageBufferTransferFunction_Linear$kCVImageBufferTransferFunction_SMPTE_240M_1995@^{__CFString=}$kCVImageBufferTransferFunction_SMPTE_C@^{__CFString=}$kCVImageBufferTransferFunction_SMPTE_ST_2084_PQ$kCVImageBufferTransferFunction_SMPTE_ST_428_1$kCVImageBufferTransferFunction_UseGamma@^{__CFString=}$kCVImageBufferTransferFunction_sRGB$kCVImageBufferYCbCrMatrixKey@^{__CFString=}$kCVImageBufferYCbCrMatrix_ITU_R_2020$kCVImageBufferYCbCrMatrix_ITU_R_601_4@^{__CFString=}$kCVImageBufferYCbCrMatrix_ITU_R_709_2@^{__CFString=}$kCVImageBufferYCbCrMatrix_SMPTE_240M_1995@^{__CFString=}$kCVIndefiniteTime@{_CVTime=qii}$kCVMetalTextureCacheMaximumTextureAgeKey$kCVMetalTextureUsage$kCVOpenGLBufferHeight@^{__CFString=}$kCVOpenGLBufferInternalFormat@^{__CFString=}$kCVOpenGLBufferMaximumMipmapLevel@^{__CFString=}$kCVOpenGLBufferPoolMaximumBufferAgeKey@^{__CFString=}$kCVOpenGLBufferPoolMinimumBufferCountKey@^{__CFString=}$kCVOpenGLBufferTarget@^{__CFString=}$kCVOpenGLBufferWidth@^{__CFString=}$kCVOpenGLTextureCacheChromaSamplingModeAutomatic@^{__CFString=}$kCVOpenGLTextureCacheChromaSamplingModeBestPerformance@^{__CFString=}$kCVOpenGLTextureCacheChromaSamplingModeHighestQuality@^{__CFString=}$kCVOpenGLTextureCacheChromaSamplingModeKey@^{__CFString=}$kCVPixelBufferBytesPerRowAlignmentKey@^{__CFString=}$kCVPixelBufferCGBitmapContextCompatibilityKey@^{__CFString=}$kCVPixelBufferCGImageCompatibilityKey@^{__CFString=}$kCVPixelBufferExtendedPixelsBottomKey@^{__CFString=}$kCVPixelBufferExtendedPixelsLeftKey@^{__CFString=}$kCVPixelBufferExtendedPixelsRightKey@^{__CFString=}$kCVPixelBufferExtendedPixelsTopKey@^{__CFString=}$kCVPixelBufferHeightKey@^{__CFString=}$kCVPixelBufferIOSurfaceCoreAnimationCompatibilityKey@^{__CFString=}$kCVPixelBufferIOSurfaceOpenGLESFBOCompatibilityKey@^{__CFString=}$kCVPixelBufferIOSurfaceOpenGLESTextureCompatibilityKey@^{__CFString=}$kCVPixelBufferIOSurfaceOpenGLFBOCompatibilityKey@^{__CFString=}$kCVPixelBufferIOSurfaceOpenGLTextureCompatibilityKey@^{__CFString=}$kCVPixelBufferIOSurfacePropertiesKey@^{__CFString=}$kCVPixelBufferMemoryAllocatorKey@^{__CFString=}$kCVPixelBufferMetalCompatibilityKey@^{__CFString=}$kCVPixelBufferOpenGLCompatibilityKey@^{__CFString=}$kCVPixelBufferOpenGLESCompatibilityKey@^{__CFString=}$kCVPixelBufferOpenGLTextureCacheCompatibilityKey$kCVPixelBufferPixelFormatTypeKey@^{__CFString=}$kCVPixelBufferPlaneAlignmentKey@^{__CFString=}$kCVPixelBufferPoolAllocationThresholdKey@^{__CFString=}$kCVPixelBufferPoolFlushExcessBuffers$kCVPixelBufferPoolFreeBufferNotification@^{__CFString=}$kCVPixelBufferPoolMaximumBufferAgeKey@^{__CFString=}$kCVPixelBufferPoolMinimumBufferCountKey@^{__CFString=}$kCVPixelBufferWidthKey@^{__CFString=}$kCVPixelFormatBitsPerBlock@^{__CFString=}$kCVPixelFormatBlackBlock@^{__CFString=}$kCVPixelFormatBlockHeight@^{__CFString=}$kCVPixelFormatBlockHorizontalAlignment@^{__CFString=}$kCVPixelFormatBlockVerticalAlignment@^{__CFString=}$kCVPixelFormatBlockWidth@^{__CFString=}$kCVPixelFormatCGBitmapContextCompatibility@^{__CFString=}$kCVPixelFormatCGBitmapInfo@^{__CFString=}$kCVPixelFormatCGImageCompatibility@^{__CFString=}$kCVPixelFormatCodecType@^{__CFString=}$kCVPixelFormatComponentRange$kCVPixelFormatComponentRange_FullRange$kCVPixelFormatComponentRange_VideoRange$kCVPixelFormatComponentRange_WideRange$kCVPixelFormatConstant@^{__CFString=}$kCVPixelFormatContainsAlpha@^{__CFString=}$kCVPixelFormatContainsGrayscale$kCVPixelFormatContainsRGB@^{__CFString=}$kCVPixelFormatContainsYCbCr@^{__CFString=}$kCVPixelFormatFillExtendedPixelsCallback@^{__CFString=}$kCVPixelFormatFourCC@^{__CFString=}$kCVPixelFormatHorizontalSubsampling@^{__CFString=}$kCVPixelFormatName@^{__CFString=}$kCVPixelFormatOpenGLCompatibility@^{__CFString=}$kCVPixelFormatOpenGLESCompatibility@^{__CFString=}$kCVPixelFormatOpenGLFormat@^{__CFString=}$kCVPixelFormatOpenGLInternalFormat@^{__CFString=}$kCVPixelFormatOpenGLType@^{__CFString=}$kCVPixelFormatPlanes@^{__CFString=}$kCVPixelFormatQDCompatibility@^{__CFString=}$kCVPixelFormatVerticalSubsampling@^{__CFString=}$kCVZeroTime@{_CVTime=qii}$''' -enums = '''$kCVAttachmentMode_ShouldNotPropagate@0$kCVAttachmentMode_ShouldPropagate@1$kCVPixelBufferLock_ReadOnly@1$kCVPixelBufferPoolFlushExcessBuffers@1$kCVPixelFormatType_128RGBAFloat@1380410945$kCVPixelFormatType_14Bayer_BGGR@1650943796$kCVPixelFormatType_14Bayer_GBRG@1734505012$kCVPixelFormatType_14Bayer_GRBG@1735549492$kCVPixelFormatType_14Bayer_RGGB@1919379252$kCVPixelFormatType_16BE555@16$kCVPixelFormatType_16BE565@1110783541$kCVPixelFormatType_16Gray@1647392359$kCVPixelFormatType_16LE555@1278555445$kCVPixelFormatType_16LE5551@892679473$kCVPixelFormatType_16LE565@1278555701$kCVPixelFormatType_1IndexedGray_WhiteIsZero@33$kCVPixelFormatType_1Monochrome@1$kCVPixelFormatType_24BGR@842285639$kCVPixelFormatType_24RGB@24$kCVPixelFormatType_2Indexed@2$kCVPixelFormatType_2IndexedGray_WhiteIsZero@34$kCVPixelFormatType_30RGB@1378955371$kCVPixelFormatType_30RGBLEPackedWideGamut@1999843442$kCVPixelFormatType_32ABGR@1094862674$kCVPixelFormatType_32ARGB@32$kCVPixelFormatType_32AlphaGray@1647522401$kCVPixelFormatType_32BGRA@1111970369$kCVPixelFormatType_32RGBA@1380401729$kCVPixelFormatType_420YpCbCr10BiPlanarFullRange@2019963440$kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange@2016686640$kCVPixelFormatType_420YpCbCr8BiPlanarFullRange@875704422$kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange@875704438$kCVPixelFormatType_420YpCbCr8Planar@2033463856$kCVPixelFormatType_420YpCbCr8PlanarFullRange@1714696752$kCVPixelFormatType_422YpCbCr10@1983000880$kCVPixelFormatType_422YpCbCr10BiPlanarFullRange@2019963442$kCVPixelFormatType_422YpCbCr10BiPlanarVideoRange@2016686642$kCVPixelFormatType_422YpCbCr16@1983000886$kCVPixelFormatType_422YpCbCr8@846624121$kCVPixelFormatType_422YpCbCr8FullRange@2037741158$kCVPixelFormatType_422YpCbCr8_yuvs@2037741171$kCVPixelFormatType_422YpCbCr_4A_8BiPlanar@1630697081$kCVPixelFormatType_4444AYpCbCr16@2033463606$kCVPixelFormatType_4444AYpCbCr8@2033463352$kCVPixelFormatType_4444YpCbCrA8@1983131704$kCVPixelFormatType_4444YpCbCrA8R@1916022840$kCVPixelFormatType_444YpCbCr10@1983131952$kCVPixelFormatType_444YpCbCr10BiPlanarFullRange@2019963956$kCVPixelFormatType_444YpCbCr10BiPlanarVideoRange@2016687156$kCVPixelFormatType_444YpCbCr8@1983066168$kCVPixelFormatType_48RGB@1647589490$kCVPixelFormatType_4Indexed@4$kCVPixelFormatType_4IndexedGray_WhiteIsZero@36$kCVPixelFormatType_64ARGB@1647719521$kCVPixelFormatType_64RGBAHalf@1380411457$kCVPixelFormatType_8Indexed@8$kCVPixelFormatType_8IndexedGray_WhiteIsZero@40$kCVPixelFormatType_ARGB2101010LEPacked@1815162994$kCVPixelFormatType_DepthFloat16@1751410032$kCVPixelFormatType_DepthFloat32@1717855600$kCVPixelFormatType_DisparityFloat16@1751411059$kCVPixelFormatType_DisparityFloat32@1717856627$kCVPixelFormatType_OneComponent16Half@1278226536$kCVPixelFormatType_OneComponent32Float@1278226534$kCVPixelFormatType_OneComponent8@1278226488$kCVPixelFormatType_TwoComponent16Half@843264104$kCVPixelFormatType_TwoComponent32Float@843264102$kCVPixelFormatType_TwoComponent8@843264056$kCVReturnAllocationFailed@-6662$kCVReturnDisplayLinkAlreadyRunning@-6671$kCVReturnDisplayLinkCallbacksNotSet@-6673$kCVReturnDisplayLinkNotRunning@-6672$kCVReturnError@-6660$kCVReturnFirst@-6660$kCVReturnInvalidArgument@-6661$kCVReturnInvalidDisplay@-6670$kCVReturnInvalidPixelBufferAttributes@-6682$kCVReturnInvalidPixelFormat@-6680$kCVReturnInvalidPoolAttributes@-6691$kCVReturnInvalidSize@-6681$kCVReturnLast@-6699$kCVReturnPixelBufferNotMetalCompatible@-6684$kCVReturnPixelBufferNotOpenGLCompatible@-6683$kCVReturnPoolAllocationFailed@-6690$kCVReturnRetry@-6692$kCVReturnSuccess@0$kCVReturnUnsupported@-6663$kCVReturnWouldExceedAllocationThreshold@-6689$kCVSMPTETimeRunning@2$kCVSMPTETimeType24@0$kCVSMPTETimeType25@1$kCVSMPTETimeType2997@4$kCVSMPTETimeType2997Drop@5$kCVSMPTETimeType30@3$kCVSMPTETimeType30Drop@2$kCVSMPTETimeType5994@7$kCVSMPTETimeType60@6$kCVSMPTETimeValid@1$kCVTimeIsIndefinite@1$kCVTimeStampBottomField@131072$kCVTimeStampHostTimeValid@2$kCVTimeStampIsInterlaced@196608$kCVTimeStampRateScalarValid@16$kCVTimeStampSMPTETimeValid@4$kCVTimeStampTopField@65536$kCVTimeStampVideoHostTimeValid@3$kCVTimeStampVideoRefreshPeriodValid@8$kCVTimeStampVideoTimeValid@1$kReturnRetry@-6692$''' -misc.update({}) -functions={'CVImageBufferGetEncodedSize': (sel32or64(b'{CGSize=ff}^{__CVBuffer=}', b'{CGSize=dd}^{__CVBuffer=}'),), 'CVOpenGLTextureRelease': (b'v^{__CVBuffer=}',), 'CVPixelBufferPoolRelease': (b'v^{__CVPixelBufferPool=}',), 'CVPixelBufferPoolGetTypeID': (sel32or64(b'L', b'Q'),), 'CVYCbCrMatrixGetIntegerCodePointForString': (b'i^{__CFString=}',), 'CVPixelBufferCreate': (sel32or64(b'i^{__CFAllocator=}LLL^{__CFDictionary=}^^{__CVBuffer=}', b'i^{__CFAllocator=}QQI^{__CFDictionary=}^^{__CVBuffer=}'), '', {'retval': {'already_cfretained': True}}), 'CVOpenGLBufferPoolGetTypeID': (sel32or64(b'L', b'Q'),), 'CVPixelBufferFillExtendedPixels': (b'i^{__CVBuffer=}',), 'CVOpenGLTextureCacheRetain': (b'^{__CVOpenGLTextureCache=}^{__CVOpenGLTextureCache=}',), 'CVOpenGLBufferPoolCreateOpenGLBuffer': (b'i^{__CFAllocator=}^{__CVOpenGLBufferPool=}^^{__CVBuffer=}', '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CVDisplayLinkSetCurrentCGDisplay': (b'i^{__CVDisplayLink=}I',), 'CVBufferSetAttachment': (b'v^{__CVBuffer=}^{__CFString=}@I',), 'CVGetCurrentHostTime': (b'Q', '', {'variadic': False}), 'CVPixelBufferPoolCreate': (b'i^{__CFAllocator=}^{__CFDictionary=}^{__CFDictionary=}^^{__CVPixelBufferPool=}', '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CVPixelBufferGetHeightOfPlane': (sel32or64(b'L^{__CVBuffer=}L', b'Q^{__CVBuffer=}Q'),), 'CVBufferRetain': (b'^{__CVBuffer=}^{__CVBuffer=}',), 'CVDisplayLinkTranslateTime': (sel32or64(b'i^{__CVDisplayLink=}^{_CVTimeStamp=IiqQdq{CVSMPTETime=ssLLLssss}QQ}^{_CVTimeStamp=IiqQdq{CVSMPTETime=ssLLLssss}QQ}', b'i^{__CVDisplayLink=}^{_CVTimeStamp=IiqQdq{CVSMPTETime=ssIIIssss}QQ}^{_CVTimeStamp=IiqQdq{CVSMPTETime=ssIIIssss}QQ}'), '', {'arguments': {1: {'type_modifier': 'n'}, 2: {'type_modifier': 'o'}}}), 'CVPixelBufferRetain': (b'^{__CVBuffer=}^{__CVBuffer=}',), 'CVPixelBufferGetPlaneCount': (sel32or64(b'L^{__CVBuffer=}', b'Q^{__CVBuffer=}'),), 'CVOpenGLTextureCacheRelease': (b'v^{__CVOpenGLTextureCache=}',), 'CVPixelBufferGetBaseAddress': (b'^v^{__CVBuffer=}', '', {'retval': {'c_array_of_variable_length': True}}), 'CVOpenGLBufferPoolRelease': (b'v^{__CVOpenGLBufferPool=}',), 'CVPixelBufferLockBaseAddress': (b'i^{__CVBuffer=}Q',), 'CVOpenGLTextureCacheGetTypeID': (sel32or64(b'L', b'Q'),), 'CVPixelBufferUnlockBaseAddress': (b'i^{__CVBuffer=}Q',), 'CVMetalTextureCacheCreateTextureFromImage': (b'i@@^{__CVBuffer=}@QLLL^^{__CVBuffer=}', '', {'arguments': {8: {'type_modifier': 'o'}}}), 'CVOpenGLTextureIsFlipped': (b'Z^{__CVBuffer=}',), 'CVMetalTextureCacheFlush': (b'v@Q',), 'CVPixelBufferPoolCreatePixelBuffer': (b'i^{__CFAllocator=}^{__CVPixelBufferPool=}^^{__CVBuffer=}', '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CVPixelBufferGetTypeID': (sel32or64(b'L', b'Q'),), 'CVDisplayLinkGetActualOutputVideoRefreshPeriod': (b'd^{__CVDisplayLink=}',), 'CVPixelBufferGetWidth': (sel32or64(b'L^{__CVBuffer=}', b'Q^{__CVBuffer=}'),), 'CVMetalTextureCacheGetTypeID': (b'L',), 'CVDisplayLinkCreateWithCGDisplay': (b'iI^^{__CVDisplayLink=}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CVBufferRelease': (b'v^{__CVBuffer=}',), 'CVDisplayLinkStart': (b'i^{__CVDisplayLink=}',), 'CVDisplayLinkGetCurrentTime': (sel32or64(b'i^{__CVDisplayLink=}^{_CVTimeStamp=IiqQdq{CVSMPTETime=ssLLLssss}QQ}', b'i^{__CVDisplayLink=}^{_CVTimeStamp=IiqQdq{CVSMPTETime=ssIIIssss}QQ}'), '', {'arguments': {1: {'type_modifier': 'o'}}}), 'CVPixelFormatDescriptionArrayCreateWithAllPixelFormatTypes': (b'^{__CFArray=}^{__CFAllocator=}', '', {'retval': {'already_cfretained': True}}), 'CVPixelBufferPoolGetAttributes': (b'^{__CFDictionary=}^{__CVPixelBufferPool=}',), 'CVBufferGetAttachments': (b'^{__CFDictionary=}^{__CVBuffer=}I',), 'CVPixelFormatDescriptionRegisterDescriptionWithPixelFormatType': (sel32or64(b'v^{__CFDictionary=}L', b'v^{__CFDictionary=}I'),), 'CVOpenGLBufferPoolCreate': (b'i^{__CFAllocator=}^{__CFDictionary=}^{__CFDictionary=}^^{__CVOpenGLBufferPool=}', '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CVDisplayLinkRetain': (b'^{__CVDisplayLink=}^{__CVDisplayLink=}',), 'CVPixelBufferCreateWithIOSurface': (b'i^{__CFAllocator=}^{__IOSurface=}^{__CFDictionary=}^^{__CVBuffer=}', '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CVDisplayLinkCreateWithOpenGLDisplayMask': (b'iI^^{__CVDisplayLink=}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CVDisplayLinkSetOutputHandler': (b'i^{__CVDisplayLink=}@?', '', {'arguments': {1: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': '^v'}, 1: {'type': '^{__CVDisplayLink=}'}, 2: {'type': '^{_CVTimeStamp=IiqQdq{CVSMPTETime=ssIIIssss}QQ}', 'type_modifier': 'n'}, 3: {'type': '^{_CVTimeStamp=IiqQdq{CVSMPTETime=ssIIIssss}QQ}', 'type_modifier': 'n'}, 4: {'type': 'Q'}, 5: {'type': 'o^Q'}}}}}}), 'CVYCbCrMatrixGetStringForIntegerCodePoint': (b'^{__CFString=}i',), 'CVOpenGLBufferCreate': (sel32or64(b'i^{__CFAllocator=}LL^{__CFDictionary=}^^{__CVBuffer=}', b'i^{__CFAllocator=}QQ^{__CFDictionary=}^^{__CVBuffer=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {4: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CVPixelBufferPoolCreatePixelBufferWithAuxAttributes': (b'i^{__CFAllocator=}^{__CVPixelBufferPool=}^{__CFDictionary=}^^{__CVBuffer=}', '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CVOpenGLTextureCacheFlush': (b'v^{__CVOpenGLTextureCache=}Q',), 'CVDisplayLinkCreateWithActiveCGDisplays': (b'i^^{__CVDisplayLink=}', '', {'retval': {'already_cfretained': True}, 'arguments': {0: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CVDisplayLinkGetNominalOutputVideoRefreshPeriod': (b'{_CVTime=qii}^{__CVDisplayLink=}',), 'CVPixelBufferCreateResolvedAttributesDictionary': (b'i^{__CFAllocator=}^{__CFArray=}^^{__CFDictionary=}', '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CVDisplayLinkSetOutputCallback': (b'i^{__CVDisplayLink=}^?^v', '', {'arguments': {1: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': b'^{__CVDisplayLink=}'}, 1: {'type': b'^{_CVTimeStamp=IiqQdq{CVSMPTETime=ssIIIssss}QQ}', 'type_modifier': 'n'}, 2: {'type': b'^{_CVTimeStamp=IiqQdq{CVSMPTETime=ssIIIssss}QQ}', 'type_modifier': 'N'}, 3: {'type': b'Q'}, 4: {'type': b'^Q', 'type_modifier': 'o'}, 5: {'type': b'^v'}}}}}}), 'CVOpenGLTextureGetName': (b'I^{__CVBuffer=}',), 'CVOpenGLBufferRelease': (b'v^{__CVBuffer=}',), 'CVOpenGLTextureRetain': (b'^{__CVBuffer=}^{__CVBuffer=}',), 'CVOpenGLBufferPoolGetAttributes': (b'^{__CFDictionary=}^{__CVOpenGLBufferPool=}',), 'CVPixelBufferGetWidthOfPlane': (sel32or64(b'L^{__CVBuffer=}L', b'Q^{__CVBuffer=}Q'),), 'CVBufferPropagateAttachments': (b'v^{__CVBuffer=}^{__CVBuffer=}',), 'CVPixelBufferPoolRetain': (b'^{__CVPixelBufferPool=}^{__CVPixelBufferPool=}',), 'CVPixelBufferGetHeight': (sel32or64(b'L^{__CVBuffer=}', b'Q^{__CVBuffer=}'),), 'CVColorPrimariesGetIntegerCodePointForString': (b'i^{__CFString=}',), 'CVOpenGLBufferGetTypeID': (sel32or64(b'L', b'Q'),), 'CVDisplayLinkRelease': (b'v^{__CVDisplayLink=}',), 'CVBufferGetAttachment': (b'@^{__CVBuffer=}^{__CFString=}^I', '', {'arguments': {2: {'type_modifier': 'o'}}}), 'CVDisplayLinkStop': (b'i^{__CVDisplayLink=}',), 'CVPixelFormatDescriptionCreateWithPixelFormatType': (sel32or64(b'^{__CFDictionary=}^{__CFAllocator=}L', b'^{__CFDictionary=}^{__CFAllocator=}I'), '', {'retval': {'already_cfretained': True}}), 'CVMetalTextureGetCleanTexCoords': (b'v^{__CVBuffer=}^f^f^f^f', '', {'arguments': {1: {'c_array_of_fixed_length': 2, 'type_modifier': 'o'}, 2: {'c_array_of_fixed_length': 2, 'type_modifier': 'o'}, 3: {'c_array_of_fixed_length': 2, 'type_modifier': 'o'}, 4: {'c_array_of_fixed_length': 2, 'type_modifier': 'o'}}}), 'CVPixelBufferGetIOSurface': (b'^{__IOSurface=}^{__CVBuffer=}',), 'CVOpenGLTextureCacheCreateTextureFromImage': (b'i^{__CFAllocator=}^{__CVOpenGLTextureCache=}^{__CVBuffer=}^{__CFDictionary=}^^{__CVBuffer=}', '', {'retval': {'already_cfretained': True}}), 'CVDisplayLinkCreateWithCGDisplays': (sel32or64(b'i^Il^^{__CVDisplayLink=}', b'i^Iq^^{__CVDisplayLink=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {0: {'c_array_length_in_arg': 1, 'type_modifier': 'n'}, 2: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'CVPixelBufferPoolGetPixelBufferAttributes': (b'^{__CFDictionary=}^{__CVPixelBufferPool=}',), 'CVOpenGLTextureGetTypeID': (sel32or64(b'L', b'Q'),), 'CVImageBufferIsFlipped': (b'Z^{__CVBuffer=}',), 'CVMetalTextureGetTexture': (b'@^{__CVBuffer=}',), 'CVPixelBufferIsPlanar': (b'Z^{__CVBuffer=}',), 'CVBufferRemoveAllAttachments': (b'v^{__CVBuffer=}',), 'CVTransferFunctionGetIntegerCodePointForString': (b'i^{__CFString=}',), 'CVPixelBufferCreateWithBytes': (sel32or64(b'i^{__CFAllocator=}LLL^vL^?^v^{__CFDictionary=}^^{__CVBuffer=}', b'i^{__CFAllocator=}QQI^vQ^?^v^{__CFDictionary=}^^{__CVBuffer=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {6: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'^v'}}}}}}), 'CVMetalTextureGetTypeID': (b'L',), 'CVOpenGLBufferPoolRetain': (b'^{__CVOpenGLBufferPool=}^{__CVOpenGLBufferPool=}',), 'CVPixelBufferCreateWithPlanarBytes': (sel32or64(b'i^{__CFAllocator=}LLL^vLL^^v^L^L^L^?^v^{__CFDictionary=}^^{__CVBuffer=}', b'i^{__CFAllocator=}QQI^vQQ^^v^Q^Q^Q^?^v^{__CFDictionary=}^^{__CVBuffer=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {11: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'^v'}, 2: {'type': b'Q'}, 3: {'type': b'Q'}, 4: {'type': b'^^v'}}}}}}), 'CVImageBufferGetCleanRect': (sel32or64(b'{CGRect={CGPoint=ff}{CGSize=ff}}^{__CVBuffer=}', b'{CGRect={CGPoint=dd}{CGSize=dd}}^{__CVBuffer=}'),), 'CVImageBufferCreateColorSpaceFromAttachments': (b'^{CGColorSpace=}^{__CFDictionary=}', '', {'retval': {'already_cfretained': True}}), 'CVPixelBufferGetBytesPerRowOfPlane': (sel32or64(b'L^{__CVBuffer=}L', b'Q^{__CVBuffer=}Q'),), 'CVColorPrimariesGetStringForIntegerCodePoint': (b'^{__CFString=}i',), 'CVDisplayLinkGetTypeID': (sel32or64(b'L', b'Q'),), 'CVImageBufferGetDisplaySize': (sel32or64(b'{CGSize=ff}^{__CVBuffer=}', b'{CGSize=dd}^{__CVBuffer=}'),), 'CVPixelBufferGetDataSize': (sel32or64(b'L^{__CVBuffer=}', b'Q^{__CVBuffer=}'),), 'CVOpenGLBufferPoolGetOpenGLBufferAttributes': (b'^{__CFDictionary=}^{__CVOpenGLBufferPool=}',), 'CVOpenGLBufferAttach': (b'i^{__CVBuffer=}^{_CGLContextObject=}Iii',), 'CVPixelBufferGetBaseAddressOfPlane': (sel32or64(b'^v^{__CVBuffer=}L', b'^v^{__CVBuffer=}Q'), '', {'retval': {'c_array_of_variable_length': True}}), 'CVDisplayLinkIsRunning': (b'Z^{__CVDisplayLink=}',), 'CVPixelBufferGetPixelFormatType': (sel32or64(b'L^{__CVBuffer=}', b'I^{__CVBuffer=}'),), 'CVBufferRemoveAttachment': (b'v^{__CVBuffer=}^{__CFString=}',), 'CVOpenGLBufferGetAttributes': (b'^{__CFDictionary=}^{__CVBuffer=}',), 'CVDisplayLinkGetOutputVideoLatency': (b'{_CVTime=qii}^{__CVDisplayLink=}',), 'CVPixelBufferGetBytesPerRow': (sel32or64(b'L^{__CVBuffer=}', b'Q^{__CVBuffer=}'),), 'CVMetalTextureCacheCreate': (b'i@@@@^{__CVBuffer=}',), 'CVPixelBufferGetExtendedPixels': (sel32or64(b'v^{__CVBuffer=}^L^L^L^L', b'v^{__CVBuffer=}^Q^Q^Q^Q'), '', {'arguments': {1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}, 3: {'type_modifier': 'o'}, 4: {'type_modifier': 'o'}}}), 'CVTransferFunctionGetStringForIntegerCodePoint': (b'^{__CFString=}i',), 'CVImageBufferGetColorSpace': (b'^{CGColorSpace=}^{__CVBuffer=}',), 'CVDisplayLinkGetCurrentCGDisplay': (b'I^{__CVDisplayLink=}',), 'CVDisplayLinkSetCurrentCGDisplayFromOpenGLContext': (b'i^{__CVDisplayLink=}^{_CGLContextObject=}^{_CGLPixelFormatObject=}',), 'CVPixelBufferRelease': (b'v^{__CVBuffer=}',), 'CVBufferSetAttachments': (b'v^{__CVBuffer=}^{__CFDictionary=}I',), 'CVOpenGLTextureGetTarget': (b'I^{__CVBuffer=}',), 'CVGetHostClockFrequency': (b'd', '', {'variadic': False}), 'CVGetHostClockMinimumTimeDelta': (b'I', '', {'variadic': False}), 'CVOpenGLBufferRetain': (b'^{__CVBuffer=}^{__CVBuffer=}',), 'CVMetalTextureIsFlipped': (b'Z^{__CVBuffer=}',), 'CVOpenGLTextureCacheCreate': (b'i^{__CFAllocator=}^{__CFDictionary=}^{_CGLContextObject=}^{_CGLPixelFormatObject=}^{__CFDictionary=}^^{__CVOpenGLTextureCache=}', '', {'retval': {'already_cfretained': True}}), 'CVPixelBufferPoolFlush': (b'v^{__CVPixelBufferPool=}L',), 'CVOpenGLTextureGetCleanTexCoords': (b'v^{__CVBuffer=}[2f][2f][2f][2f]',)} -aliases = {'CV_INLINE': 'CF_INLINE', 'CVImageBufferRef': 'CVBufferRef', 'kCVReturnError': 'kCVReturnFirst'} -cftypes=[('CVBufferRef', b'^{__CVBuffer=}', 'CVBufferGetTypeID', None), ('CVDisplayLinkRef', b'^{__CVDisplayLink=}', 'CVDisplayLinkGetTypeID', None), ('CVOpenGLBufferPoolRef', b'^{__CVOpenGLBufferPool=}', 'CVOpenGLBufferPoolGetTypeID', None), ('CVOpenGLTextureCacheRef', b'^{__CVOpenGLTextureCache=}', 'CVOpenGLTextureCacheGetTypeID', None), ('CVPixelBufferPoolRef', b'^{__CVPixelBufferPool=}', 'CVPixelBufferPoolGetTypeID', None), ('CVOpenGLBufferRef', b'^{__CVOpenGLBuffer=}', 'CVOpenGLBufferGetTypeID', None), ('CVPixelBufferRef', b'^{__CVPixelBuffer=}', 'CVPixelBufferGetTypeID', None), ('CVOpenGLTextureRef', b'^{__CVOpenGLTexture=}', 'CVOpenGLTextureGetTypeID', None)] -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/Quartz/CoreVideo/_metadata.pyc b/env/lib/python2.7/site-packages/Quartz/CoreVideo/_metadata.pyc deleted file mode 100644 index d828934e..00000000 Binary files a/env/lib/python2.7/site-packages/Quartz/CoreVideo/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Quartz/ImageIO/PyObjCOverrides.bridgesupport b/env/lib/python2.7/site-packages/Quartz/ImageIO/PyObjCOverrides.bridgesupport deleted file mode 100644 index a9b5d3c1..00000000 --- a/env/lib/python2.7/site-packages/Quartz/ImageIO/PyObjCOverrides.bridgesupport +++ /dev/null @@ -1,394 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/env/lib/python2.7/site-packages/Quartz/ImageIO/__init__.py b/env/lib/python2.7/site-packages/Quartz/ImageIO/__init__.py deleted file mode 100644 index 2b910f84..00000000 --- a/env/lib/python2.7/site-packages/Quartz/ImageIO/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -''' -Python mapping for the ImageIO framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import sys -import objc -import Quartz.CoreGraphics - -from Quartz.ImageIO import _metadata - -sys.modules['Quartz.ImageIO'] = mod = objc.ObjCLazyModule('Quartz.ImageIO', - "com.apple.ImageIO.framework", - objc.pathForFramework("/System/Library/Frameworks/ApplicationServices.framework/Frameworks/ImageIO.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'objc': objc, - }, ( Quartz.CoreGraphics,)) - -import sys -del sys.modules['Quartz.ImageIO._metadata'] diff --git a/env/lib/python2.7/site-packages/Quartz/ImageIO/__init__.pyc b/env/lib/python2.7/site-packages/Quartz/ImageIO/__init__.pyc deleted file mode 100644 index 86f490ca..00000000 Binary files a/env/lib/python2.7/site-packages/Quartz/ImageIO/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Quartz/ImageIO/_metadata.py b/env/lib/python2.7/site-packages/Quartz/ImageIO/_metadata.py deleted file mode 100644 index d908c0a4..00000000 --- a/env/lib/python2.7/site-packages/Quartz/ImageIO/_metadata.py +++ /dev/null @@ -1,26 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Wed Jul 4 16:14:03 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$kCFErrorDomainCGImageMetadata@^{__CFString=}$kCGImageAuxiliaryDataInfoData@^{__CFString=}$kCGImageAuxiliaryDataInfoDataDescription@^{__CFString=}$kCGImageAuxiliaryDataInfoMetadata@^{__CFString=}$kCGImageAuxiliaryDataTypeDepth@^{__CFString=}$kCGImageAuxiliaryDataTypeDisparity@^{__CFString=}$kCGImageAuxiliaryDataTypePortraitEffectsMatte@^{__CFString=}$kCGImageDestinationBackgroundColor@^{__CFString=}$kCGImageDestinationDateTime@^{__CFString=}$kCGImageDestinationEmbedThumbnail@^{__CFString=}$kCGImageDestinationImageMaxPixelSize@^{__CFString=}$kCGImageDestinationLossyCompressionQuality@^{__CFString=}$kCGImageDestinationMergeMetadata@^{__CFString=}$kCGImageDestinationMetadata@^{__CFString=}$kCGImageDestinationOptimizeColorForSharing@^{__CFString=}$kCGImageDestinationOrientation@^{__CFString=}$kCGImageMetadataEnumerateRecursively@^{__CFString=}$kCGImageMetadataNamespaceDublinCore@^{__CFString=}$kCGImageMetadataNamespaceExif@^{__CFString=}$kCGImageMetadataNamespaceExifAux@^{__CFString=}$kCGImageMetadataNamespaceExifEX@^{__CFString=}$kCGImageMetadataNamespaceIPTCCore@^{__CFString=}$kCGImageMetadataNamespaceIPTCExtension@^{__CFString=}$kCGImageMetadataNamespacePhotoshop@^{__CFString=}$kCGImageMetadataNamespaceTIFF@^{__CFString=}$kCGImageMetadataNamespaceXMPBasic@^{__CFString=}$kCGImageMetadataNamespaceXMPRights@^{__CFString=}$kCGImageMetadataPrefixDublinCore@^{__CFString=}$kCGImageMetadataPrefixExif@^{__CFString=}$kCGImageMetadataPrefixExifAux@^{__CFString=}$kCGImageMetadataPrefixExifEX@^{__CFString=}$kCGImageMetadataPrefixIPTCCore@^{__CFString=}$kCGImageMetadataPrefixIPTCExtension@^{__CFString=}$kCGImageMetadataPrefixPhotoshop@^{__CFString=}$kCGImageMetadataPrefixTIFF@^{__CFString=}$kCGImageMetadataPrefixXMPBasic@^{__CFString=}$kCGImageMetadataPrefixXMPRights@^{__CFString=}$kCGImageMetadataShouldExcludeGPS@^{__CFString=}$kCGImageMetadataShouldExcludeXMP@^{__CFString=}$kCGImageProperty8BIMDictionary@^{__CFString=}$kCGImageProperty8BIMLayerNames@^{__CFString=}$kCGImageProperty8BIMVersion@^{__CFString=}$kCGImagePropertyAPNGDelayTime@^{__CFString=}$kCGImagePropertyAPNGLoopCount@^{__CFString=}$kCGImagePropertyAPNGUnclampedDelayTime@^{__CFString=}$kCGImagePropertyAuxiliaryData@^{__CFString=}$kCGImagePropertyAuxiliaryDataType@^{__CFString=}$kCGImagePropertyBytesPerRow@^{__CFString=}$kCGImagePropertyCIFFCameraSerialNumber@^{__CFString=}$kCGImagePropertyCIFFContinuousDrive@^{__CFString=}$kCGImagePropertyCIFFDescription@^{__CFString=}$kCGImagePropertyCIFFDictionary@^{__CFString=}$kCGImagePropertyCIFFFirmware@^{__CFString=}$kCGImagePropertyCIFFFlashExposureComp@^{__CFString=}$kCGImagePropertyCIFFFocusMode@^{__CFString=}$kCGImagePropertyCIFFImageFileName@^{__CFString=}$kCGImagePropertyCIFFImageName@^{__CFString=}$kCGImagePropertyCIFFImageSerialNumber@^{__CFString=}$kCGImagePropertyCIFFLensMaxMM@^{__CFString=}$kCGImagePropertyCIFFLensMinMM@^{__CFString=}$kCGImagePropertyCIFFLensModel@^{__CFString=}$kCGImagePropertyCIFFMeasuredEV@^{__CFString=}$kCGImagePropertyCIFFMeteringMode@^{__CFString=}$kCGImagePropertyCIFFOwnerName@^{__CFString=}$kCGImagePropertyCIFFRecordID@^{__CFString=}$kCGImagePropertyCIFFReleaseMethod@^{__CFString=}$kCGImagePropertyCIFFReleaseTiming@^{__CFString=}$kCGImagePropertyCIFFSelfTimingTime@^{__CFString=}$kCGImagePropertyCIFFShootingMode@^{__CFString=}$kCGImagePropertyCIFFWhiteBalanceIndex@^{__CFString=}$kCGImagePropertyColorModel@^{__CFString=}$kCGImagePropertyColorModelCMYK@^{__CFString=}$kCGImagePropertyColorModelGray@^{__CFString=}$kCGImagePropertyColorModelLab@^{__CFString=}$kCGImagePropertyColorModelRGB@^{__CFString=}$kCGImagePropertyDNGActiveArea@^{__CFString=}$kCGImagePropertyDNGAnalogBalance@^{__CFString=}$kCGImagePropertyDNGAntiAliasStrength@^{__CFString=}$kCGImagePropertyDNGAsShotICCProfile@^{__CFString=}$kCGImagePropertyDNGAsShotNeutral@^{__CFString=}$kCGImagePropertyDNGAsShotPreProfileMatrix@^{__CFString=}$kCGImagePropertyDNGAsShotProfileName@^{__CFString=}$kCGImagePropertyDNGAsShotWhiteXY@^{__CFString=}$kCGImagePropertyDNGBackwardVersion@^{__CFString=}$kCGImagePropertyDNGBaselineExposure@^{__CFString=}$kCGImagePropertyDNGBaselineExposureOffset@^{__CFString=}$kCGImagePropertyDNGBaselineNoise@^{__CFString=}$kCGImagePropertyDNGBaselineSharpness@^{__CFString=}$kCGImagePropertyDNGBayerGreenSplit@^{__CFString=}$kCGImagePropertyDNGBestQualityScale@^{__CFString=}$kCGImagePropertyDNGBlackLevel@^{__CFString=}$kCGImagePropertyDNGBlackLevelDeltaH@^{__CFString=}$kCGImagePropertyDNGBlackLevelDeltaV@^{__CFString=}$kCGImagePropertyDNGBlackLevelRepeatDim@^{__CFString=}$kCGImagePropertyDNGCFALayout@^{__CFString=}$kCGImagePropertyDNGCFAPlaneColor@^{__CFString=}$kCGImagePropertyDNGCalibrationIlluminant1@^{__CFString=}$kCGImagePropertyDNGCalibrationIlluminant2@^{__CFString=}$kCGImagePropertyDNGCameraCalibration1@^{__CFString=}$kCGImagePropertyDNGCameraCalibration2@^{__CFString=}$kCGImagePropertyDNGCameraCalibrationSignature@^{__CFString=}$kCGImagePropertyDNGCameraSerialNumber@^{__CFString=}$kCGImagePropertyDNGChromaBlurRadius@^{__CFString=}$kCGImagePropertyDNGColorMatrix1@^{__CFString=}$kCGImagePropertyDNGColorMatrix2@^{__CFString=}$kCGImagePropertyDNGColorimetricReference@^{__CFString=}$kCGImagePropertyDNGCurrentICCProfile@^{__CFString=}$kCGImagePropertyDNGCurrentPreProfileMatrix@^{__CFString=}$kCGImagePropertyDNGDefaultBlackRender@^{__CFString=}$kCGImagePropertyDNGDefaultCropOrigin@^{__CFString=}$kCGImagePropertyDNGDefaultCropSize@^{__CFString=}$kCGImagePropertyDNGDefaultScale@^{__CFString=}$kCGImagePropertyDNGDefaultUserCrop@^{__CFString=}$kCGImagePropertyDNGDictionary@^{__CFString=}$kCGImagePropertyDNGExtraCameraProfiles@^{__CFString=}$kCGImagePropertyDNGFixVignetteRadial@^{__CFString=}$kCGImagePropertyDNGForwardMatrix1@^{__CFString=}$kCGImagePropertyDNGForwardMatrix2@^{__CFString=}$kCGImagePropertyDNGLensInfo@^{__CFString=}$kCGImagePropertyDNGLinearResponseLimit@^{__CFString=}$kCGImagePropertyDNGLinearizationTable@^{__CFString=}$kCGImagePropertyDNGLocalizedCameraModel@^{__CFString=}$kCGImagePropertyDNGMakerNoteSafety@^{__CFString=}$kCGImagePropertyDNGMaskedAreas@^{__CFString=}$kCGImagePropertyDNGNewRawImageDigest@^{__CFString=}$kCGImagePropertyDNGNoiseProfile@^{__CFString=}$kCGImagePropertyDNGNoiseReductionApplied@^{__CFString=}$kCGImagePropertyDNGOpcodeList1@^{__CFString=}$kCGImagePropertyDNGOpcodeList2@^{__CFString=}$kCGImagePropertyDNGOpcodeList3@^{__CFString=}$kCGImagePropertyDNGOriginalBestQualityFinalSize@^{__CFString=}$kCGImagePropertyDNGOriginalDefaultCropSize@^{__CFString=}$kCGImagePropertyDNGOriginalDefaultFinalSize@^{__CFString=}$kCGImagePropertyDNGOriginalRawFileData@^{__CFString=}$kCGImagePropertyDNGOriginalRawFileDigest@^{__CFString=}$kCGImagePropertyDNGOriginalRawFileName@^{__CFString=}$kCGImagePropertyDNGPreviewApplicationName@^{__CFString=}$kCGImagePropertyDNGPreviewApplicationVersion@^{__CFString=}$kCGImagePropertyDNGPreviewColorSpace@^{__CFString=}$kCGImagePropertyDNGPreviewDateTime@^{__CFString=}$kCGImagePropertyDNGPreviewSettingsDigest@^{__CFString=}$kCGImagePropertyDNGPreviewSettingsName@^{__CFString=}$kCGImagePropertyDNGPrivateData@^{__CFString=}$kCGImagePropertyDNGProfileCalibrationSignature@^{__CFString=}$kCGImagePropertyDNGProfileCopyright@^{__CFString=}$kCGImagePropertyDNGProfileEmbedPolicy@^{__CFString=}$kCGImagePropertyDNGProfileHueSatMapData1@^{__CFString=}$kCGImagePropertyDNGProfileHueSatMapData2@^{__CFString=}$kCGImagePropertyDNGProfileHueSatMapDims@^{__CFString=}$kCGImagePropertyDNGProfileHueSatMapEncoding@^{__CFString=}$kCGImagePropertyDNGProfileLookTableData@^{__CFString=}$kCGImagePropertyDNGProfileLookTableDims@^{__CFString=}$kCGImagePropertyDNGProfileLookTableEncoding@^{__CFString=}$kCGImagePropertyDNGProfileName@^{__CFString=}$kCGImagePropertyDNGProfileToneCurve@^{__CFString=}$kCGImagePropertyDNGRawDataUniqueID@^{__CFString=}$kCGImagePropertyDNGRawImageDigest@^{__CFString=}$kCGImagePropertyDNGRawToPreviewGain@^{__CFString=}$kCGImagePropertyDNGReductionMatrix1@^{__CFString=}$kCGImagePropertyDNGReductionMatrix2@^{__CFString=}$kCGImagePropertyDNGRowInterleaveFactor@^{__CFString=}$kCGImagePropertyDNGShadowScale@^{__CFString=}$kCGImagePropertyDNGSubTileBlockSize@^{__CFString=}$kCGImagePropertyDNGUniqueCameraModel@^{__CFString=}$kCGImagePropertyDNGVersion@^{__CFString=}$kCGImagePropertyDNGWarpFisheye@^{__CFString=}$kCGImagePropertyDNGWarpRectilinear@^{__CFString=}$kCGImagePropertyDNGWhiteLevel@^{__CFString=}$kCGImagePropertyDPIHeight@^{__CFString=}$kCGImagePropertyDPIWidth@^{__CFString=}$kCGImagePropertyDepth@^{__CFString=}$kCGImagePropertyExifApertureValue@^{__CFString=}$kCGImagePropertyExifAuxDictionary@^{__CFString=}$kCGImagePropertyExifAuxFirmware@^{__CFString=}$kCGImagePropertyExifAuxFlashCompensation@^{__CFString=}$kCGImagePropertyExifAuxImageNumber@^{__CFString=}$kCGImagePropertyExifAuxLensID@^{__CFString=}$kCGImagePropertyExifAuxLensInfo@^{__CFString=}$kCGImagePropertyExifAuxLensModel@^{__CFString=}$kCGImagePropertyExifAuxLensSerialNumber@^{__CFString=}$kCGImagePropertyExifAuxOwnerName@^{__CFString=}$kCGImagePropertyExifAuxSerialNumber@^{__CFString=}$kCGImagePropertyExifBodySerialNumber@^{__CFString=}$kCGImagePropertyExifBrightnessValue@^{__CFString=}$kCGImagePropertyExifCFAPattern@^{__CFString=}$kCGImagePropertyExifCameraOwnerName@^{__CFString=}$kCGImagePropertyExifColorSpace@^{__CFString=}$kCGImagePropertyExifComponentsConfiguration@^{__CFString=}$kCGImagePropertyExifCompressedBitsPerPixel@^{__CFString=}$kCGImagePropertyExifContrast@^{__CFString=}$kCGImagePropertyExifCustomRendered@^{__CFString=}$kCGImagePropertyExifDateTimeDigitized@^{__CFString=}$kCGImagePropertyExifDateTimeOriginal@^{__CFString=}$kCGImagePropertyExifDeviceSettingDescription@^{__CFString=}$kCGImagePropertyExifDictionary@^{__CFString=}$kCGImagePropertyExifDigitalZoomRatio@^{__CFString=}$kCGImagePropertyExifExposureBiasValue@^{__CFString=}$kCGImagePropertyExifExposureIndex@^{__CFString=}$kCGImagePropertyExifExposureMode@^{__CFString=}$kCGImagePropertyExifExposureProgram@^{__CFString=}$kCGImagePropertyExifExposureTime@^{__CFString=}$kCGImagePropertyExifFNumber@^{__CFString=}$kCGImagePropertyExifFileSource@^{__CFString=}$kCGImagePropertyExifFlash@^{__CFString=}$kCGImagePropertyExifFlashEnergy@^{__CFString=}$kCGImagePropertyExifFlashPixVersion@^{__CFString=}$kCGImagePropertyExifFocalLenIn35mmFilm@^{__CFString=}$kCGImagePropertyExifFocalLength@^{__CFString=}$kCGImagePropertyExifFocalPlaneResolutionUnit@^{__CFString=}$kCGImagePropertyExifFocalPlaneXResolution@^{__CFString=}$kCGImagePropertyExifFocalPlaneYResolution@^{__CFString=}$kCGImagePropertyExifGainControl@^{__CFString=}$kCGImagePropertyExifGamma@^{__CFString=}$kCGImagePropertyExifISOSpeed@^{__CFString=}$kCGImagePropertyExifISOSpeedLatitudeyyy@^{__CFString=}$kCGImagePropertyExifISOSpeedLatitudezzz@^{__CFString=}$kCGImagePropertyExifISOSpeedRatings@^{__CFString=}$kCGImagePropertyExifImageUniqueID@^{__CFString=}$kCGImagePropertyExifLensMake@^{__CFString=}$kCGImagePropertyExifLensModel@^{__CFString=}$kCGImagePropertyExifLensSerialNumber@^{__CFString=}$kCGImagePropertyExifLensSpecification@^{__CFString=}$kCGImagePropertyExifLightSource@^{__CFString=}$kCGImagePropertyExifMakerNote@^{__CFString=}$kCGImagePropertyExifMaxApertureValue@^{__CFString=}$kCGImagePropertyExifMeteringMode@^{__CFString=}$kCGImagePropertyExifOECF@^{__CFString=}$kCGImagePropertyExifPixelXDimension@^{__CFString=}$kCGImagePropertyExifPixelYDimension@^{__CFString=}$kCGImagePropertyExifRecommendedExposureIndex@^{__CFString=}$kCGImagePropertyExifRelatedSoundFile@^{__CFString=}$kCGImagePropertyExifSaturation@^{__CFString=}$kCGImagePropertyExifSceneCaptureType@^{__CFString=}$kCGImagePropertyExifSceneType@^{__CFString=}$kCGImagePropertyExifSensingMethod@^{__CFString=}$kCGImagePropertyExifSensitivityType@^{__CFString=}$kCGImagePropertyExifSharpness@^{__CFString=}$kCGImagePropertyExifShutterSpeedValue@^{__CFString=}$kCGImagePropertyExifSpatialFrequencyResponse@^{__CFString=}$kCGImagePropertyExifSpectralSensitivity@^{__CFString=}$kCGImagePropertyExifStandardOutputSensitivity@^{__CFString=}$kCGImagePropertyExifSubjectArea@^{__CFString=}$kCGImagePropertyExifSubjectDistRange@^{__CFString=}$kCGImagePropertyExifSubjectDistance@^{__CFString=}$kCGImagePropertyExifSubjectLocation@^{__CFString=}$kCGImagePropertyExifSubsecTime@^{__CFString=}$kCGImagePropertyExifSubsecTimeDigitized@^{__CFString=}$kCGImagePropertyExifSubsecTimeOrginal@^{__CFString=}$kCGImagePropertyExifSubsecTimeOriginal@^{__CFString=}$kCGImagePropertyExifUserComment@^{__CFString=}$kCGImagePropertyExifVersion@^{__CFString=}$kCGImagePropertyExifWhiteBalance@^{__CFString=}$kCGImagePropertyFileContentsDictionary@^{__CFString=}$kCGImagePropertyFileSize@^{__CFString=}$kCGImagePropertyGIFDelayTime@^{__CFString=}$kCGImagePropertyGIFDictionary@^{__CFString=}$kCGImagePropertyGIFHasGlobalColorMap@^{__CFString=}$kCGImagePropertyGIFImageColorMap@^{__CFString=}$kCGImagePropertyGIFLoopCount@^{__CFString=}$kCGImagePropertyGIFUnclampedDelayTime@^{__CFString=}$kCGImagePropertyGPSAltitude@^{__CFString=}$kCGImagePropertyGPSAltitudeRef@^{__CFString=}$kCGImagePropertyGPSAreaInformation@^{__CFString=}$kCGImagePropertyGPSDOP@^{__CFString=}$kCGImagePropertyGPSDateStamp@^{__CFString=}$kCGImagePropertyGPSDestBearing@^{__CFString=}$kCGImagePropertyGPSDestBearingRef@^{__CFString=}$kCGImagePropertyGPSDestDistance@^{__CFString=}$kCGImagePropertyGPSDestDistanceRef@^{__CFString=}$kCGImagePropertyGPSDestLatitude@^{__CFString=}$kCGImagePropertyGPSDestLatitudeRef@^{__CFString=}$kCGImagePropertyGPSDestLongitude@^{__CFString=}$kCGImagePropertyGPSDestLongitudeRef@^{__CFString=}$kCGImagePropertyGPSDictionary@^{__CFString=}$kCGImagePropertyGPSDifferental@^{__CFString=}$kCGImagePropertyGPSHPositioningError@^{__CFString=}$kCGImagePropertyGPSImgDirection@^{__CFString=}$kCGImagePropertyGPSImgDirectionRef@^{__CFString=}$kCGImagePropertyGPSLatitude@^{__CFString=}$kCGImagePropertyGPSLatitudeRef@^{__CFString=}$kCGImagePropertyGPSLongitude@^{__CFString=}$kCGImagePropertyGPSLongitudeRef@^{__CFString=}$kCGImagePropertyGPSMapDatum@^{__CFString=}$kCGImagePropertyGPSMeasureMode@^{__CFString=}$kCGImagePropertyGPSProcessingMethod@^{__CFString=}$kCGImagePropertyGPSSatellites@^{__CFString=}$kCGImagePropertyGPSSpeed@^{__CFString=}$kCGImagePropertyGPSSpeedRef@^{__CFString=}$kCGImagePropertyGPSStatus@^{__CFString=}$kCGImagePropertyGPSTimeStamp@^{__CFString=}$kCGImagePropertyGPSTrack@^{__CFString=}$kCGImagePropertyGPSTrackRef@^{__CFString=}$kCGImagePropertyGPSVersion@^{__CFString=}$kCGImagePropertyHasAlpha@^{__CFString=}$kCGImagePropertyHeight@^{__CFString=}$kCGImagePropertyIPTCActionAdvised@^{__CFString=}$kCGImagePropertyIPTCByline@^{__CFString=}$kCGImagePropertyIPTCBylineTitle@^{__CFString=}$kCGImagePropertyIPTCCaptionAbstract@^{__CFString=}$kCGImagePropertyIPTCCategory@^{__CFString=}$kCGImagePropertyIPTCCity@^{__CFString=}$kCGImagePropertyIPTCContact@^{__CFString=}$kCGImagePropertyIPTCContactInfoAddress@^{__CFString=}$kCGImagePropertyIPTCContactInfoCity@^{__CFString=}$kCGImagePropertyIPTCContactInfoCountry@^{__CFString=}$kCGImagePropertyIPTCContactInfoEmails@^{__CFString=}$kCGImagePropertyIPTCContactInfoPhones@^{__CFString=}$kCGImagePropertyIPTCContactInfoPostalCode@^{__CFString=}$kCGImagePropertyIPTCContactInfoStateProvince@^{__CFString=}$kCGImagePropertyIPTCContactInfoWebURLs@^{__CFString=}$kCGImagePropertyIPTCContentLocationCode@^{__CFString=}$kCGImagePropertyIPTCContentLocationName@^{__CFString=}$kCGImagePropertyIPTCCopyrightNotice@^{__CFString=}$kCGImagePropertyIPTCCountryPrimaryLocationCode@^{__CFString=}$kCGImagePropertyIPTCCountryPrimaryLocationName@^{__CFString=}$kCGImagePropertyIPTCCreatorContactInfo@^{__CFString=}$kCGImagePropertyIPTCCredit@^{__CFString=}$kCGImagePropertyIPTCDateCreated@^{__CFString=}$kCGImagePropertyIPTCDictionary@^{__CFString=}$kCGImagePropertyIPTCDigitalCreationDate@^{__CFString=}$kCGImagePropertyIPTCDigitalCreationTime@^{__CFString=}$kCGImagePropertyIPTCEditStatus@^{__CFString=}$kCGImagePropertyIPTCEditorialUpdate@^{__CFString=}$kCGImagePropertyIPTCExpirationDate@^{__CFString=}$kCGImagePropertyIPTCExpirationTime@^{__CFString=}$kCGImagePropertyIPTCExtAboutCvTerm@^{__CFString=}$kCGImagePropertyIPTCExtAboutCvTermCvId@^{__CFString=}$kCGImagePropertyIPTCExtAboutCvTermId@^{__CFString=}$kCGImagePropertyIPTCExtAboutCvTermName@^{__CFString=}$kCGImagePropertyIPTCExtAboutCvTermRefinedAbout@^{__CFString=}$kCGImagePropertyIPTCExtAddlModelInfo@^{__CFString=}$kCGImagePropertyIPTCExtArtworkCircaDateCreated@^{__CFString=}$kCGImagePropertyIPTCExtArtworkContentDescription@^{__CFString=}$kCGImagePropertyIPTCExtArtworkContributionDescription@^{__CFString=}$kCGImagePropertyIPTCExtArtworkCopyrightNotice@^{__CFString=}$kCGImagePropertyIPTCExtArtworkCopyrightOwnerID@^{__CFString=}$kCGImagePropertyIPTCExtArtworkCopyrightOwnerName@^{__CFString=}$kCGImagePropertyIPTCExtArtworkCreator@^{__CFString=}$kCGImagePropertyIPTCExtArtworkCreatorID@^{__CFString=}$kCGImagePropertyIPTCExtArtworkDateCreated@^{__CFString=}$kCGImagePropertyIPTCExtArtworkLicensorID@^{__CFString=}$kCGImagePropertyIPTCExtArtworkLicensorName@^{__CFString=}$kCGImagePropertyIPTCExtArtworkOrObject@^{__CFString=}$kCGImagePropertyIPTCExtArtworkPhysicalDescription@^{__CFString=}$kCGImagePropertyIPTCExtArtworkSource@^{__CFString=}$kCGImagePropertyIPTCExtArtworkSourceInvURL@^{__CFString=}$kCGImagePropertyIPTCExtArtworkSourceInventoryNo@^{__CFString=}$kCGImagePropertyIPTCExtArtworkStylePeriod@^{__CFString=}$kCGImagePropertyIPTCExtArtworkTitle@^{__CFString=}$kCGImagePropertyIPTCExtAudioBitrate@^{__CFString=}$kCGImagePropertyIPTCExtAudioBitrateMode@^{__CFString=}$kCGImagePropertyIPTCExtAudioChannelCount@^{__CFString=}$kCGImagePropertyIPTCExtCircaDateCreated@^{__CFString=}$kCGImagePropertyIPTCExtContainerFormat@^{__CFString=}$kCGImagePropertyIPTCExtContainerFormatIdentifier@^{__CFString=}$kCGImagePropertyIPTCExtContainerFormatName@^{__CFString=}$kCGImagePropertyIPTCExtContributor@^{__CFString=}$kCGImagePropertyIPTCExtContributorIdentifier@^{__CFString=}$kCGImagePropertyIPTCExtContributorName@^{__CFString=}$kCGImagePropertyIPTCExtContributorRole@^{__CFString=}$kCGImagePropertyIPTCExtControlledVocabularyTerm@^{__CFString=}$kCGImagePropertyIPTCExtCopyrightYear@^{__CFString=}$kCGImagePropertyIPTCExtCreator@^{__CFString=}$kCGImagePropertyIPTCExtCreatorIdentifier@^{__CFString=}$kCGImagePropertyIPTCExtCreatorName@^{__CFString=}$kCGImagePropertyIPTCExtCreatorRole@^{__CFString=}$kCGImagePropertyIPTCExtDataOnScreen@^{__CFString=}$kCGImagePropertyIPTCExtDataOnScreenRegion@^{__CFString=}$kCGImagePropertyIPTCExtDataOnScreenRegionD@^{__CFString=}$kCGImagePropertyIPTCExtDataOnScreenRegionH@^{__CFString=}$kCGImagePropertyIPTCExtDataOnScreenRegionText@^{__CFString=}$kCGImagePropertyIPTCExtDataOnScreenRegionUnit@^{__CFString=}$kCGImagePropertyIPTCExtDataOnScreenRegionW@^{__CFString=}$kCGImagePropertyIPTCExtDataOnScreenRegionX@^{__CFString=}$kCGImagePropertyIPTCExtDataOnScreenRegionY@^{__CFString=}$kCGImagePropertyIPTCExtDigitalImageGUID@^{__CFString=}$kCGImagePropertyIPTCExtDigitalSourceFileType@^{__CFString=}$kCGImagePropertyIPTCExtDigitalSourceType@^{__CFString=}$kCGImagePropertyIPTCExtDopesheet@^{__CFString=}$kCGImagePropertyIPTCExtDopesheetLink@^{__CFString=}$kCGImagePropertyIPTCExtDopesheetLinkLink@^{__CFString=}$kCGImagePropertyIPTCExtDopesheetLinkLinkQualifier@^{__CFString=}$kCGImagePropertyIPTCExtEmbdEncRightsExpr@^{__CFString=}$kCGImagePropertyIPTCExtEmbeddedEncodedRightsExpr@^{__CFString=}$kCGImagePropertyIPTCExtEmbeddedEncodedRightsExprLangID@^{__CFString=}$kCGImagePropertyIPTCExtEmbeddedEncodedRightsExprType@^{__CFString=}$kCGImagePropertyIPTCExtEpisode@^{__CFString=}$kCGImagePropertyIPTCExtEpisodeIdentifier@^{__CFString=}$kCGImagePropertyIPTCExtEpisodeName@^{__CFString=}$kCGImagePropertyIPTCExtEpisodeNumber@^{__CFString=}$kCGImagePropertyIPTCExtEvent@^{__CFString=}$kCGImagePropertyIPTCExtExternalMetadataLink@^{__CFString=}$kCGImagePropertyIPTCExtFeedIdentifier@^{__CFString=}$kCGImagePropertyIPTCExtGenre@^{__CFString=}$kCGImagePropertyIPTCExtGenreCvId@^{__CFString=}$kCGImagePropertyIPTCExtGenreCvTermId@^{__CFString=}$kCGImagePropertyIPTCExtGenreCvTermName@^{__CFString=}$kCGImagePropertyIPTCExtGenreCvTermRefinedAbout@^{__CFString=}$kCGImagePropertyIPTCExtHeadline@^{__CFString=}$kCGImagePropertyIPTCExtIPTCLastEdited@^{__CFString=}$kCGImagePropertyIPTCExtLinkedEncRightsExpr@^{__CFString=}$kCGImagePropertyIPTCExtLinkedEncodedRightsExpr@^{__CFString=}$kCGImagePropertyIPTCExtLinkedEncodedRightsExprLangID@^{__CFString=}$kCGImagePropertyIPTCExtLinkedEncodedRightsExprType@^{__CFString=}$kCGImagePropertyIPTCExtLocationCity@^{__CFString=}$kCGImagePropertyIPTCExtLocationCountryCode@^{__CFString=}$kCGImagePropertyIPTCExtLocationCountryName@^{__CFString=}$kCGImagePropertyIPTCExtLocationCreated@^{__CFString=}$kCGImagePropertyIPTCExtLocationGPSAltitude@^{__CFString=}$kCGImagePropertyIPTCExtLocationGPSLatitude@^{__CFString=}$kCGImagePropertyIPTCExtLocationGPSLongitude@^{__CFString=}$kCGImagePropertyIPTCExtLocationIdentifier@^{__CFString=}$kCGImagePropertyIPTCExtLocationLocationId@^{__CFString=}$kCGImagePropertyIPTCExtLocationLocationName@^{__CFString=}$kCGImagePropertyIPTCExtLocationProvinceState@^{__CFString=}$kCGImagePropertyIPTCExtLocationShown@^{__CFString=}$kCGImagePropertyIPTCExtLocationSublocation@^{__CFString=}$kCGImagePropertyIPTCExtLocationWorldRegion@^{__CFString=}$kCGImagePropertyIPTCExtMaxAvailHeight@^{__CFString=}$kCGImagePropertyIPTCExtMaxAvailWidth@^{__CFString=}$kCGImagePropertyIPTCExtModelAge@^{__CFString=}$kCGImagePropertyIPTCExtOrganisationInImageCode@^{__CFString=}$kCGImagePropertyIPTCExtOrganisationInImageName@^{__CFString=}$kCGImagePropertyIPTCExtPersonHeard@^{__CFString=}$kCGImagePropertyIPTCExtPersonHeardIdentifier@^{__CFString=}$kCGImagePropertyIPTCExtPersonHeardName@^{__CFString=}$kCGImagePropertyIPTCExtPersonInImage@^{__CFString=}$kCGImagePropertyIPTCExtPersonInImageCharacteristic@^{__CFString=}$kCGImagePropertyIPTCExtPersonInImageCvTermCvId@^{__CFString=}$kCGImagePropertyIPTCExtPersonInImageCvTermId@^{__CFString=}$kCGImagePropertyIPTCExtPersonInImageCvTermName@^{__CFString=}$kCGImagePropertyIPTCExtPersonInImageCvTermRefinedAbout@^{__CFString=}$kCGImagePropertyIPTCExtPersonInImageDescription@^{__CFString=}$kCGImagePropertyIPTCExtPersonInImageId@^{__CFString=}$kCGImagePropertyIPTCExtPersonInImageName@^{__CFString=}$kCGImagePropertyIPTCExtPersonInImageWDetails@^{__CFString=}$kCGImagePropertyIPTCExtProductInImage@^{__CFString=}$kCGImagePropertyIPTCExtProductInImageDescription@^{__CFString=}$kCGImagePropertyIPTCExtProductInImageGTIN@^{__CFString=}$kCGImagePropertyIPTCExtProductInImageName@^{__CFString=}$kCGImagePropertyIPTCExtPublicationEvent@^{__CFString=}$kCGImagePropertyIPTCExtPublicationEventDate@^{__CFString=}$kCGImagePropertyIPTCExtPublicationEventIdentifier@^{__CFString=}$kCGImagePropertyIPTCExtPublicationEventName@^{__CFString=}$kCGImagePropertyIPTCExtRating@^{__CFString=}$kCGImagePropertyIPTCExtRatingRatingRegion@^{__CFString=}$kCGImagePropertyIPTCExtRatingRegionCity@^{__CFString=}$kCGImagePropertyIPTCExtRatingRegionCountryCode@^{__CFString=}$kCGImagePropertyIPTCExtRatingRegionCountryName@^{__CFString=}$kCGImagePropertyIPTCExtRatingRegionGPSAltitude@^{__CFString=}$kCGImagePropertyIPTCExtRatingRegionGPSLatitude@^{__CFString=}$kCGImagePropertyIPTCExtRatingRegionGPSLongitude@^{__CFString=}$kCGImagePropertyIPTCExtRatingRegionIdentifier@^{__CFString=}$kCGImagePropertyIPTCExtRatingRegionLocationId@^{__CFString=}$kCGImagePropertyIPTCExtRatingRegionLocationName@^{__CFString=}$kCGImagePropertyIPTCExtRatingRegionProvinceState@^{__CFString=}$kCGImagePropertyIPTCExtRatingRegionSublocation@^{__CFString=}$kCGImagePropertyIPTCExtRatingRegionWorldRegion@^{__CFString=}$kCGImagePropertyIPTCExtRatingScaleMaxValue@^{__CFString=}$kCGImagePropertyIPTCExtRatingScaleMinValue@^{__CFString=}$kCGImagePropertyIPTCExtRatingSourceLink@^{__CFString=}$kCGImagePropertyIPTCExtRatingValue@^{__CFString=}$kCGImagePropertyIPTCExtRatingValueLogoLink@^{__CFString=}$kCGImagePropertyIPTCExtRegistryEntryRole@^{__CFString=}$kCGImagePropertyIPTCExtRegistryID@^{__CFString=}$kCGImagePropertyIPTCExtRegistryItemID@^{__CFString=}$kCGImagePropertyIPTCExtRegistryOrganisationID@^{__CFString=}$kCGImagePropertyIPTCExtReleaseReady@^{__CFString=}$kCGImagePropertyIPTCExtSeason@^{__CFString=}$kCGImagePropertyIPTCExtSeasonIdentifier@^{__CFString=}$kCGImagePropertyIPTCExtSeasonName@^{__CFString=}$kCGImagePropertyIPTCExtSeasonNumber@^{__CFString=}$kCGImagePropertyIPTCExtSeries@^{__CFString=}$kCGImagePropertyIPTCExtSeriesIdentifier@^{__CFString=}$kCGImagePropertyIPTCExtSeriesName@^{__CFString=}$kCGImagePropertyIPTCExtShownEvent@^{__CFString=}$kCGImagePropertyIPTCExtShownEventIdentifier@^{__CFString=}$kCGImagePropertyIPTCExtShownEventName@^{__CFString=}$kCGImagePropertyIPTCExtStorylineIdentifier@^{__CFString=}$kCGImagePropertyIPTCExtStreamReady@^{__CFString=}$kCGImagePropertyIPTCExtStylePeriod@^{__CFString=}$kCGImagePropertyIPTCExtSupplyChainSource@^{__CFString=}$kCGImagePropertyIPTCExtSupplyChainSourceIdentifier@^{__CFString=}$kCGImagePropertyIPTCExtSupplyChainSourceName@^{__CFString=}$kCGImagePropertyIPTCExtTemporalCoverage@^{__CFString=}$kCGImagePropertyIPTCExtTemporalCoverageFrom@^{__CFString=}$kCGImagePropertyIPTCExtTemporalCoverageTo@^{__CFString=}$kCGImagePropertyIPTCExtTranscript@^{__CFString=}$kCGImagePropertyIPTCExtTranscriptLink@^{__CFString=}$kCGImagePropertyIPTCExtTranscriptLinkLink@^{__CFString=}$kCGImagePropertyIPTCExtTranscriptLinkLinkQualifier@^{__CFString=}$kCGImagePropertyIPTCExtVideoBitrate@^{__CFString=}$kCGImagePropertyIPTCExtVideoBitrateMode@^{__CFString=}$kCGImagePropertyIPTCExtVideoDisplayAspectRatio@^{__CFString=}$kCGImagePropertyIPTCExtVideoEncodingProfile@^{__CFString=}$kCGImagePropertyIPTCExtVideoShotType@^{__CFString=}$kCGImagePropertyIPTCExtVideoShotTypeIdentifier@^{__CFString=}$kCGImagePropertyIPTCExtVideoShotTypeName@^{__CFString=}$kCGImagePropertyIPTCExtVideoStreamsCount@^{__CFString=}$kCGImagePropertyIPTCExtVisualColor@^{__CFString=}$kCGImagePropertyIPTCExtWorkflowTag@^{__CFString=}$kCGImagePropertyIPTCExtWorkflowTagCvId@^{__CFString=}$kCGImagePropertyIPTCExtWorkflowTagCvTermId@^{__CFString=}$kCGImagePropertyIPTCExtWorkflowTagCvTermName@^{__CFString=}$kCGImagePropertyIPTCExtWorkflowTagCvTermRefinedAbout@^{__CFString=}$kCGImagePropertyIPTCFixtureIdentifier@^{__CFString=}$kCGImagePropertyIPTCHeadline@^{__CFString=}$kCGImagePropertyIPTCImageOrientation@^{__CFString=}$kCGImagePropertyIPTCImageType@^{__CFString=}$kCGImagePropertyIPTCKeywords@^{__CFString=}$kCGImagePropertyIPTCLanguageIdentifier@^{__CFString=}$kCGImagePropertyIPTCObjectAttributeReference@^{__CFString=}$kCGImagePropertyIPTCObjectCycle@^{__CFString=}$kCGImagePropertyIPTCObjectName@^{__CFString=}$kCGImagePropertyIPTCObjectTypeReference@^{__CFString=}$kCGImagePropertyIPTCOriginalTransmissionReference@^{__CFString=}$kCGImagePropertyIPTCOriginatingProgram@^{__CFString=}$kCGImagePropertyIPTCProgramVersion@^{__CFString=}$kCGImagePropertyIPTCProvinceState@^{__CFString=}$kCGImagePropertyIPTCReferenceDate@^{__CFString=}$kCGImagePropertyIPTCReferenceNumber@^{__CFString=}$kCGImagePropertyIPTCReferenceService@^{__CFString=}$kCGImagePropertyIPTCReleaseDate@^{__CFString=}$kCGImagePropertyIPTCReleaseTime@^{__CFString=}$kCGImagePropertyIPTCRightsUsageTerms@^{__CFString=}$kCGImagePropertyIPTCScene@^{__CFString=}$kCGImagePropertyIPTCSource@^{__CFString=}$kCGImagePropertyIPTCSpecialInstructions@^{__CFString=}$kCGImagePropertyIPTCStarRating@^{__CFString=}$kCGImagePropertyIPTCSubLocation@^{__CFString=}$kCGImagePropertyIPTCSubjectReference@^{__CFString=}$kCGImagePropertyIPTCSupplementalCategory@^{__CFString=}$kCGImagePropertyIPTCTimeCreated@^{__CFString=}$kCGImagePropertyIPTCUrgency@^{__CFString=}$kCGImagePropertyIPTCWriterEditor@^{__CFString=}$kCGImagePropertyImageCount@^{__CFString=}$kCGImagePropertyImages@^{__CFString=}$kCGImagePropertyIsFloat@^{__CFString=}$kCGImagePropertyIsIndexed@^{__CFString=}$kCGImagePropertyJFIFDensityUnit@^{__CFString=}$kCGImagePropertyJFIFDictionary@^{__CFString=}$kCGImagePropertyJFIFIsProgressive@^{__CFString=}$kCGImagePropertyJFIFVersion@^{__CFString=}$kCGImagePropertyJFIFXDensity@^{__CFString=}$kCGImagePropertyJFIFYDensity@^{__CFString=}$kCGImagePropertyMakerAppleDictionary@^{__CFString=}$kCGImagePropertyMakerCanonAspectRatioInfo@^{__CFString=}$kCGImagePropertyMakerCanonCameraSerialNumber@^{__CFString=}$kCGImagePropertyMakerCanonContinuousDrive@^{__CFString=}$kCGImagePropertyMakerCanonDictionary@^{__CFString=}$kCGImagePropertyMakerCanonFirmware@^{__CFString=}$kCGImagePropertyMakerCanonFlashExposureComp@^{__CFString=}$kCGImagePropertyMakerCanonImageSerialNumber@^{__CFString=}$kCGImagePropertyMakerCanonLensModel@^{__CFString=}$kCGImagePropertyMakerCanonOwnerName@^{__CFString=}$kCGImagePropertyMakerFujiDictionary@^{__CFString=}$kCGImagePropertyMakerMinoltaDictionary@^{__CFString=}$kCGImagePropertyMakerNikonCameraSerialNumber@^{__CFString=}$kCGImagePropertyMakerNikonColorMode@^{__CFString=}$kCGImagePropertyMakerNikonDictionary@^{__CFString=}$kCGImagePropertyMakerNikonDigitalZoom@^{__CFString=}$kCGImagePropertyMakerNikonFlashExposureComp@^{__CFString=}$kCGImagePropertyMakerNikonFlashSetting@^{__CFString=}$kCGImagePropertyMakerNikonFocusDistance@^{__CFString=}$kCGImagePropertyMakerNikonFocusMode@^{__CFString=}$kCGImagePropertyMakerNikonISOSelection@^{__CFString=}$kCGImagePropertyMakerNikonISOSetting@^{__CFString=}$kCGImagePropertyMakerNikonImageAdjustment@^{__CFString=}$kCGImagePropertyMakerNikonLensAdapter@^{__CFString=}$kCGImagePropertyMakerNikonLensInfo@^{__CFString=}$kCGImagePropertyMakerNikonLensType@^{__CFString=}$kCGImagePropertyMakerNikonQuality@^{__CFString=}$kCGImagePropertyMakerNikonSharpenMode@^{__CFString=}$kCGImagePropertyMakerNikonShootingMode@^{__CFString=}$kCGImagePropertyMakerNikonShutterCount@^{__CFString=}$kCGImagePropertyMakerNikonWhiteBalanceMode@^{__CFString=}$kCGImagePropertyMakerOlympusDictionary@^{__CFString=}$kCGImagePropertyMakerPentaxDictionary@^{__CFString=}$kCGImagePropertyNamedColorSpace@^{__CFString=}$kCGImagePropertyOpenEXRAspectRatio@^{__CFString=}$kCGImagePropertyOpenEXRDictionary@^{__CFString=}$kCGImagePropertyOrientation@^{__CFString=}$kCGImagePropertyPNGAuthor@^{__CFString=}$kCGImagePropertyPNGChromaticities@^{__CFString=}$kCGImagePropertyPNGComment@^{__CFString=}$kCGImagePropertyPNGCompressionFilter@^{__CFString=}$kCGImagePropertyPNGCopyright@^{__CFString=}$kCGImagePropertyPNGCreationTime@^{__CFString=}$kCGImagePropertyPNGDescription@^{__CFString=}$kCGImagePropertyPNGDictionary@^{__CFString=}$kCGImagePropertyPNGDisclaimer@^{__CFString=}$kCGImagePropertyPNGGamma@^{__CFString=}$kCGImagePropertyPNGInterlaceType@^{__CFString=}$kCGImagePropertyPNGModificationTime@^{__CFString=}$kCGImagePropertyPNGSoftware@^{__CFString=}$kCGImagePropertyPNGSource@^{__CFString=}$kCGImagePropertyPNGTitle@^{__CFString=}$kCGImagePropertyPNGWarning@^{__CFString=}$kCGImagePropertyPNGXPixelsPerMeter@^{__CFString=}$kCGImagePropertyPNGYPixelsPerMeter@^{__CFString=}$kCGImagePropertyPNGsRGBIntent@^{__CFString=}$kCGImagePropertyPixelFormat@^{__CFString=}$kCGImagePropertyPixelHeight@^{__CFString=}$kCGImagePropertyPixelWidth@^{__CFString=}$kCGImagePropertyPrimaryImage$kCGImagePropertyProfileName@^{__CFString=}$kCGImagePropertyRawDictionary@^{__CFString=}$kCGImagePropertyTIFFArtist@^{__CFString=}$kCGImagePropertyTIFFCompression@^{__CFString=}$kCGImagePropertyTIFFCopyright@^{__CFString=}$kCGImagePropertyTIFFDateTime@^{__CFString=}$kCGImagePropertyTIFFDictionary@^{__CFString=}$kCGImagePropertyTIFFDocumentName@^{__CFString=}$kCGImagePropertyTIFFHostComputer@^{__CFString=}$kCGImagePropertyTIFFImageDescription@^{__CFString=}$kCGImagePropertyTIFFMake@^{__CFString=}$kCGImagePropertyTIFFModel@^{__CFString=}$kCGImagePropertyTIFFOrientation@^{__CFString=}$kCGImagePropertyTIFFPhotometricInterpretation@^{__CFString=}$kCGImagePropertyTIFFPrimaryChromaticities@^{__CFString=}$kCGImagePropertyTIFFResolutionUnit@^{__CFString=}$kCGImagePropertyTIFFSoftware@^{__CFString=}$kCGImagePropertyTIFFTileLength@^{__CFString=}$kCGImagePropertyTIFFTileWidth@^{__CFString=}$kCGImagePropertyTIFFTransferFunction@^{__CFString=}$kCGImagePropertyTIFFWhitePoint@^{__CFString=}$kCGImagePropertyTIFFXResolution@^{__CFString=}$kCGImagePropertyTIFFYResolution@^{__CFString=}$kCGImagePropertyThumbnailImages@^{__CFString=}$kCGImagePropertyWidth@^{__CFString=}$kCGImageSourceCreateThumbnailFromImageAlways@^{__CFString=}$kCGImageSourceCreateThumbnailFromImageIfAbsent@^{__CFString=}$kCGImageSourceCreateThumbnailWithTransform@^{__CFString=}$kCGImageSourceShouldAllowFloat@^{__CFString=}$kCGImageSourceShouldCache@^{__CFString=}$kCGImageSourceShouldCacheImmediately@^{__CFString=}$kCGImageSourceSubsampleFactor@^{__CFString=}$kCGImageSourceThumbnailMaxPixelSize@^{__CFString=}$kCGImageSourceTypeIdentifierHint@^{__CFString=}$''' -enums = '''$IIO_HAS_IOSURFACE@1$IMAGEIO_PNG_ALL_FILTERS@248$IMAGEIO_PNG_FILTER_AVG@64$IMAGEIO_PNG_FILTER_NONE@8$IMAGEIO_PNG_FILTER_PAETH@128$IMAGEIO_PNG_FILTER_SUB@16$IMAGEIO_PNG_FILTER_UP@32$IMAGEIO_PNG_NO_FILTERS@0$kCGImageMetadataErrorBadArgument@2$kCGImageMetadataErrorConflictingArguments@3$kCGImageMetadataErrorPrefixConflict@4$kCGImageMetadataErrorUnknown@0$kCGImageMetadataErrorUnsupportedFormat@1$kCGImageMetadataTypeAlternateArray@4$kCGImageMetadataTypeAlternateText@5$kCGImageMetadataTypeArrayOrdered@3$kCGImageMetadataTypeArrayUnordered@2$kCGImageMetadataTypeDefault@0$kCGImageMetadataTypeInvalid@-1$kCGImageMetadataTypeString@1$kCGImageMetadataTypeStructure@6$kCGImagePropertyOrientationDown@3$kCGImagePropertyOrientationDownMirrored@4$kCGImagePropertyOrientationLeft@8$kCGImagePropertyOrientationLeftMirrored@5$kCGImagePropertyOrientationRight@6$kCGImagePropertyOrientationRightMirrored@7$kCGImagePropertyOrientationUp@1$kCGImagePropertyOrientationUpMirrored@2$kCGImageStatusComplete@0$kCGImageStatusIncomplete@-1$kCGImageStatusInvalidData@-4$kCGImageStatusReadingHeader@-2$kCGImageStatusUnexpectedEOF@-5$kCGImageStatusUnknownType@-3$''' -misc.update({}) -functions={'CGImageMetadataSetValueMatchingImageProperty': (b'B^{CGImageMetadata=}^{__CFString=}^{__CFString=}@',), 'CGImageMetadataCreateFromXMPData': (b'^{CGImageMetadata=}^{__CFData=}', '', {'retval': {'already_cfretained': True}}), 'CGImageSourceGetCount': (sel32or64(b'L^{CGImageSource=}', b'Q^{CGImageSource=}'),), 'CGImageSourceCreateWithData': (b'^{CGImageSource=}^{__CFData=}^{__CFDictionary=}', '', {'retval': {'already_cfretained': True}}), 'CGImageSourceCopyTypeIdentifiers': (b'^{__CFArray=}', '', {'retval': {'already_cfretained': True}}), 'CGImageMetadataCopyTagWithPath': (b'^{CGImageMetadataTag=}^{CGImageMetadata=}^{CGImageMetadataTag=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CGImageMetadataGetTypeID': (sel32or64(b'L', b'Q'),), 'CGImageDestinationAddAuxiliaryDataInfo': (b'v^{CGImageDestination=}@@',), 'CGImageMetadataTagGetType': (b'i^{CGImageMetadataTag=}',), 'CGImageSourceGetStatus': (b'i^{CGImageSource=}',), 'CGImageMetadataCreateXMPData': (b'^{__CFData=}^{CGImageMetadata=}^{__CFDictionary=}', '', {'retval': {'already_cfretained': True}}), 'CGImageSourceCreateImageAtIndex': (sel32or64(b'^{CGImage=}^{CGImageSource=}L^{__CFDictionary=}', b'^{CGImage=}^{CGImageSource=}Q^{__CFDictionary=}'), '', {'retval': {'already_cfretained': True}}), 'CGImageMetadataTagCopyNamespace': (b'^{__CFString=}^{CGImageMetadataTag=}', '', {'retval': {'already_cfretained': True}}), 'CGImageSourceCopyMetadataAtIndex': (sel32or64(b'^{CGImageMetadata=}^{CGImageSource=}L^{__CFDictionary=}', b'^{CGImageMetadata=}^{CGImageSource=}Q^{__CFDictionary=}'), '', {'retval': {'already_cfretained': True}}), 'CGImageDestinationCreateWithURL': (sel32or64(b'^{CGImageDestination=}^{__CFURL=}^{__CFString=}L^{__CFDictionary=}', b'^{CGImageDestination=}^{__CFURL=}^{__CFString=}Q^{__CFDictionary=}'), '', {'retval': {'already_cfretained': True}}), 'CGImageSourceRemoveCacheAtIndex': (sel32or64(b'v^{CGImageSource=}L', b'v^{CGImageSource=}Q'),), 'CGImageSourceCreateThumbnailAtIndex': (sel32or64(b'^{CGImage=}^{CGImageSource=}L^{__CFDictionary=}', b'^{CGImage=}^{CGImageSource=}Q^{__CFDictionary=}'), '', {'retval': {'already_cfretained': True}}), 'CGImageSourceCreateIncremental': (b'^{CGImageSource=}^{__CFDictionary=}', '', {'retval': {'already_cfretained': True}}), 'CGImageSourceCopyPropertiesAtIndex': (sel32or64(b'^{__CFDictionary=}^{CGImageSource=}L^{__CFDictionary=}', b'^{__CFDictionary=}^{CGImageSource=}Q^{__CFDictionary=}'), '', {'retval': {'already_cfretained': True}}), 'CGImageSourceUpdateData': (b'v^{CGImageSource=}^{__CFData=}B',), 'CGImageDestinationSetProperties': (b'v^{CGImageDestination=}^{__CFDictionary=}',), 'CGImageMetadataSetTagWithPath': (b'B^{CGImageMetadata=}^{CGImageMetadataTag=}^{__CFString=}^{CGImageMetadataTag=}',), 'CGImageDestinationCreateWithDataConsumer': (sel32or64(b'^{CGImageDestination=}^{CGDataConsumer=}^{__CFString=}L^{__CFDictionary=}', b'^{CGImageDestination=}^{CGDataConsumer=}^{__CFString=}Q^{__CFDictionary=}'), '', {'retval': {'already_cfretained': True}}), 'CGImageDestinationCopyTypeIdentifiers': (b'^{__CFArray=}', '', {'retval': {'already_cfretained': True}}), 'CGImageDestinationCreateWithData': (sel32or64(b'^{CGImageDestination=}^{__CFData=}^{__CFString=}L^{__CFDictionary=}', b'^{CGImageDestination=}^{__CFData=}^{__CFString=}Q^{__CFDictionary=}'), '', {'retval': {'already_cfretained': True}}), 'CGImageSourceCopyAuxiliaryDataInfoAtIndex': (b'@^{CGImageSource=}L@', '', {'retval': {'already_cfretained': True}}), 'CGImageSourceGetTypeID': (sel32or64(b'L', b'Q'),), 'CGImageDestinationAddImageAndMetadata': (b'v^{CGImageDestination=}^{CGImage=}^{CGImageMetadata=}^{__CFDictionary=}',), 'CGImageMetadataTagCreate': (b'^{CGImageMetadataTag=}^{__CFString=}^{__CFString=}^{__CFString=}i@', '', {'retval': {'already_cfretained': True}}), 'CGImageDestinationCopyImageSource': (b'B^{CGImageDestination=}^{CGImageSource=}^{__CFDictionary=}^^{__CFError=}', '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'CGImageSourceUpdateDataProvider': (b'v^{CGImageSource=}^{CGDataProvider=}B',), 'CGImageDestinationAddImage': (b'v^{CGImageDestination=}^{CGImage=}^{__CFDictionary=}',), 'CGImageMetadataRemoveTagWithPath': (b'B^{CGImageMetadata=}^{CGImageMetadataTag=}^{__CFString=}',), 'CGImageSourceGetStatusAtIndex': (sel32or64(b'i^{CGImageSource=}L', b'i^{CGImageSource=}Q'),), 'CGImageSourceCreateWithURL': (b'^{CGImageSource=}^{__CFURL=}^{__CFDictionary=}', '', {'retval': {'already_cfretained': True}}), 'CGImageMetadataTagCopyValue': (b'@^{CGImageMetadataTag=}', '', {'retval': {'already_cfretained': True}}), 'CGImageMetadataCopyTags': (b'^{__CFArray=}^{CGImageMetadata=}', '', {'retval': {'already_cfretained': True}}), 'CGImageMetadataCopyStringValueWithPath': (b'^{__CFString=}^{CGImageMetadata=}^{CGImageMetadataTag=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CGImageSourceGetType': (b'^{__CFString=}^{CGImageSource=}',), 'CGImageDestinationAddImageFromSource': (sel32or64(b'v^{CGImageDestination=}^{CGImageSource=}L^{__CFDictionary=}', b'v^{CGImageDestination=}^{CGImageSource=}Q^{__CFDictionary=}'),), 'CGImageDestinationGetTypeID': (sel32or64(b'L', b'Q'),), 'CGImageDestinationFinalize': (b'B^{CGImageDestination=}',), 'CGImageSourceGetPrimaryImageIndex': (b'l@',), 'CGImageSourceCopyProperties': (b'^{__CFDictionary=}^{CGImageSource=}^{__CFDictionary=}', '', {'retval': {'already_cfretained': True}}), 'CGImageMetadataTagGetTypeID': (sel32or64(b'L', b'Q'),), 'CGImageMetadataSetValueWithPath': (b'B^{CGImageMetadata=}^{CGImageMetadataTag=}^{__CFString=}@',), 'CGImageMetadataCopyTagMatchingImageProperty': (b'^{CGImageMetadataTag=}^{CGImageMetadata=}^{__CFString=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CGImageMetadataRegisterNamespaceForPrefix': (b'B^{CGImageMetadata=}^{__CFString=}^{__CFString=}^^{__CFError=}', '', {'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'CGImageMetadataTagCopyPrefix': (b'^{__CFString=}^{CGImageMetadataTag=}', '', {'retval': {'already_cfretained': True}}), 'CGImageMetadataTagCopyName': (b'^{__CFString=}^{CGImageMetadataTag=}', '', {'retval': {'already_cfretained': True}}), 'CGImageMetadataEnumerateTagsUsingBlock': (b'v^{CGImageMetadata=}^{__CFString=}^{__CFDictionary=}@?', '', {'arguments': {3: {'callable': {'retval': {'type': b'B'}, 'arguments': {0: {'type': '^v'}, 1: {'type': '@'}, 2: {'type': '@'}}}}}}), 'CGImageMetadataCreateMutable': (b'^{CGImageMetadata=}', '', {'retval': {'already_cfretained': True}}), 'CGImageSourceCreateWithDataProvider': (b'^{CGImageSource=}^{CGDataProvider=}^{__CFDictionary=}', '', {'retval': {'already_cfretained': True}}), 'CGImageMetadataTagCopyQualifiers': (b'^{__CFArray=}^{CGImageMetadataTag=}', '', {'retval': {'already_cfretained': True}}), 'CGImageMetadataCreateMutableCopy': (b'^{CGImageMetadata=}^{CGImageMetadata=}', '', {'retval': {'already_cfretained': True}})} -aliases = {'IMAGEIO_AVAILABLE_STARTING': '__OSX_AVAILABLE_STARTING', 'IMAGEIO_AVAILABLE_BUT_DEPRECATED': '__OSX_AVAILABLE_BUT_DEPRECATED', '_iio_Nullable': '_Nullable', '_iio_Nonnull': '_Nonnull'} -cftypes=[('CGImageSourceRef', b'^{CGImageSource=}', 'CGImageSourceGetTypeID', None), ('CGImageDestinationRef', b'^{CGImageDestination=}', 'CGImageDestinationGetTypeID', None), ('CGImageMetadataTagRef', b'^{CGImageMetadataTag=}', 'CGImageMetadataTagGetTypeID', None), ('CGImageMetadataRef', b'^{CGImageMetadata=}', 'CGImageMetadataGetTypeID', None)] -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/Quartz/ImageIO/_metadata.pyc b/env/lib/python2.7/site-packages/Quartz/ImageIO/_metadata.pyc deleted file mode 100644 index 6b6dbf4c..00000000 Binary files a/env/lib/python2.7/site-packages/Quartz/ImageIO/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Quartz/ImageKit/__init__.py b/env/lib/python2.7/site-packages/Quartz/ImageKit/__init__.py deleted file mode 100644 index 13bc40d3..00000000 --- a/env/lib/python2.7/site-packages/Quartz/ImageKit/__init__.py +++ /dev/null @@ -1,30 +0,0 @@ -''' -Python mapping for the ImageKit framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import sys -import objc -import Cocoa - -from Quartz.ImageKit import _metadata -import Quartz.ImageKit._imagekit - -objc.addConvenienceForBasicMapping('IKImageBrowserGridGroup', False) -objc.addConvenienceForBasicMapping('IKImageCell', False) -objc.addConvenienceForBasicMapping('IKImageState', False) -objc.addConvenienceForBasicSequence('IKLinkedList', True) - -sys.modules['Quartz.ImageKit'] = mod = objc.ObjCLazyModule('Quartz.ImageKit', - "com.apple.imageKit", - objc.pathForFramework("/System/Library/Frameworks/Quartz.framework/Frameworks/ImageKit.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'objc': objc, - }, ( Cocoa, )) - -import sys -del sys.modules['Quartz.ImageKit._metadata'] diff --git a/env/lib/python2.7/site-packages/Quartz/ImageKit/__init__.pyc b/env/lib/python2.7/site-packages/Quartz/ImageKit/__init__.pyc deleted file mode 100644 index 673da1b7..00000000 Binary files a/env/lib/python2.7/site-packages/Quartz/ImageKit/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Quartz/ImageKit/_imagekit.so b/env/lib/python2.7/site-packages/Quartz/ImageKit/_imagekit.so deleted file mode 100755 index 3075647e..00000000 Binary files a/env/lib/python2.7/site-packages/Quartz/ImageKit/_imagekit.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Quartz/ImageKit/_metadata.py b/env/lib/python2.7/site-packages/Quartz/ImageKit/_metadata.py deleted file mode 100644 index 72dd41dd..00000000 --- a/env/lib/python2.7/site-packages/Quartz/ImageKit/_metadata.py +++ /dev/null @@ -1,149 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Tue Jun 26 07:59:02 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$IKFilterBrowserDefaultInputImage$IKFilterBrowserExcludeCategories$IKFilterBrowserExcludeFilters$IKFilterBrowserFilterDoubleClickNotification$IKFilterBrowserFilterSelectedNotification$IKFilterBrowserShowCategories$IKFilterBrowserShowPreview$IKFilterBrowserWillPreviewFilterNotification$IKImageBrowserBackgroundColorKey$IKImageBrowserCGImageRepresentationType$IKImageBrowserCGImageSourceRepresentationType$IKImageBrowserCellBackgroundLayer$IKImageBrowserCellForegroundLayer$IKImageBrowserCellLayerTypeBackground$IKImageBrowserCellLayerTypeForeground$IKImageBrowserCellLayerTypePlaceHolder$IKImageBrowserCellLayerTypeSelection$IKImageBrowserCellPlaceHolderLayer$IKImageBrowserCellSelectionLayer$IKImageBrowserCellsHighlightedTitleAttributesKey$IKImageBrowserCellsOutlineColorKey$IKImageBrowserCellsSubtitleAttributesKey$IKImageBrowserCellsTitleAttributesKey$IKImageBrowserGroupBackgroundColorKey$IKImageBrowserGroupFooterLayer$IKImageBrowserGroupHeaderLayer$IKImageBrowserGroupRangeKey$IKImageBrowserGroupStyleKey$IKImageBrowserGroupTitleKey$IKImageBrowserIconRefPathRepresentationType$IKImageBrowserIconRefRepresentationType$IKImageBrowserNSBitmapImageRepresentationType$IKImageBrowserNSDataRepresentationType$IKImageBrowserNSImageRepresentationType$IKImageBrowserNSURLRepresentationType$IKImageBrowserPDFPageRepresentationType$IKImageBrowserPathRepresentationType$IKImageBrowserQCCompositionPathRepresentationType$IKImageBrowserQCCompositionRepresentationType$IKImageBrowserQTMoviePathRepresentationType$IKImageBrowserQTMovieRepresentationType$IKImageBrowserQuickLookPathRepresentationType$IKImageBrowserSelectionColorKey$IKOverlayTypeBackground$IKOverlayTypeImage$IKPictureTakerAllowsEditingKey$IKPictureTakerAllowsFileChoosingKey$IKPictureTakerAllowsVideoCaptureKey$IKPictureTakerCropAreaSizeKey$IKPictureTakerImageTransformsKey$IKPictureTakerInformationalTextKey$IKPictureTakerOutputImageMaxSizeKey$IKPictureTakerRemainOpenAfterValidateKey$IKPictureTakerShowAddressBookPicture$IKPictureTakerShowAddressBookPictureKey$IKPictureTakerShowEffectsKey$IKPictureTakerShowEmptyPicture$IKPictureTakerShowEmptyPictureKey$IKPictureTakerShowRecentPictureKey$IKPictureTakerUpdateRecentPictureKey$IKSlideshowAudioFile$IKSlideshowModeImages$IKSlideshowModeOther$IKSlideshowModePDF$IKSlideshowPDFDisplayBox$IKSlideshowPDFDisplayMode$IKSlideshowPDFDisplaysAsBook$IKSlideshowScreen$IKSlideshowStartIndex$IKSlideshowStartPaused$IKSlideshowWrapAround$IKToolModeAnnotate$IKToolModeCrop$IKToolModeMove$IKToolModeNone$IKToolModeRotate$IKToolModeSelect$IKToolModeSelectEllipse$IKToolModeSelectLasso$IKToolModeSelectRect$IKUIFlavorAllowFallback$IKUISizeFlavor$IKUISizeMini$IKUISizeRegular$IKUISizeSmall$IKUImaxSize$IK_ApertureBundleIdentifier$IK_MailBundleIdentifier$IK_PhotosBundleIdentifier$IK_iPhotoBundleIdentifier$''' -enums = '''$IKCameraDeviceViewDisplayModeIcon@1$IKCameraDeviceViewDisplayModeTable@0$IKCameraDeviceViewTransferModeFileBased@0$IKCameraDeviceViewTransferModeMemoryBased@1$IKCellsStyleNone@0$IKCellsStyleOutlined@2$IKCellsStyleShadowed@1$IKCellsStyleSubtitled@8$IKCellsStyleTitled@4$IKDeviceBrowserViewDisplayModeIcon@2$IKDeviceBrowserViewDisplayModeOutline@1$IKDeviceBrowserViewDisplayModeTable@0$IKGroupBezelStyle@0$IKGroupDisclosureStyle@1$IKImageBrowserDropBefore@1$IKImageBrowserDropOn@0$IKImageStateInvalid@1$IKImageStateNoImage@0$IKImageStateReady@2$IKScannerDeviceViewDisplayModeAdvanced@1$IKScannerDeviceViewDisplayModeSimple@0$IKScannerDeviceViewTransferModeFileBased@0$IKScannerDeviceViewTransferModeMemoryBased@1$''' -misc.update({}) -aliases = {'IKImagePickerShowEffectsKey': 'IKPictureTakerShowEffectsKey', 'IKImagePickerOutputImageMaxSizeKey': 'IKPictureTakerOutputImageMaxSizeKey', 'IKImagePickerImageTransformsKey': 'IKPictureTakerImageTransformsKey', 'IKImagePickerAllowsFileChoosingKey': 'IKPictureTakerAllowsFileChoosingKey', 'IKImagePickerAllowsEditingKey': 'IKPictureTakerAllowsEditingKey', 'IKImagePickerInformationalTextKey': 'IKPictureTakerInformationalTextKey', 'IKImagePickerCropAreaSizeKey': 'IKPictureTakerCropAreaSizeKey', 'IKImagePickerAllowsVideoCaptureKey': 'IKPictureTakerAllowsVideoCaptureKey', 'IKImagePickerUpdateRecentPictureKey': 'IKPictureTakerUpdateRecentPictureKey', 'IKImagePickerShowRecentPictureKey': 'IKPictureTakerShowRecentPictureKey'} -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'IKCameraDeviceView', b'canDeleteSelectedItems', {'retval': {'type': b'Z'}}) - r(b'IKCameraDeviceView', b'canDownloadSelectedItems', {'retval': {'type': b'Z'}}) - r(b'IKCameraDeviceView', b'canRotateSelectedItemsLeft', {'retval': {'type': b'Z'}}) - r(b'IKCameraDeviceView', b'canRotateSelectedItemsRight', {'retval': {'type': b'Z'}}) - r(b'IKCameraDeviceView', b'displaysDownloadsDirectoryControl', {'retval': {'type': b'Z'}}) - r(b'IKCameraDeviceView', b'displaysPostProcessApplicationControl', {'retval': {'type': b'Z'}}) - r(b'IKCameraDeviceView', b'hasDisplayModeIcon', {'retval': {'type': b'Z'}}) - r(b'IKCameraDeviceView', b'hasDisplayModeTable', {'retval': {'type': b'Z'}}) - r(b'IKCameraDeviceView', b'selectIndexes:byExtendingSelection:', {'arguments': {3: {'type': b'Z'}}}) - r(b'IKCameraDeviceView', b'setDisplaysDownloadsDirectoryControl:', {'arguments': {2: {'type': b'Z'}}}) - r(b'IKCameraDeviceView', b'setDisplaysPostProcessApplicationControl:', {'arguments': {2: {'type': b'Z'}}}) - r(b'IKCameraDeviceView', b'setHasDisplayModeIcon:', {'arguments': {2: {'type': b'Z'}}}) - r(b'IKCameraDeviceView', b'setHasDisplayModeTable:', {'arguments': {2: {'type': b'Z'}}}) - r(b'IKDeviceBrowserView', b'displaysLocalCameras', {'retval': {'type': b'Z'}}) - r(b'IKDeviceBrowserView', b'displaysLocalScanners', {'retval': {'type': b'Z'}}) - r(b'IKDeviceBrowserView', b'displaysNetworkCameras', {'retval': {'type': b'Z'}}) - r(b'IKDeviceBrowserView', b'displaysNetworkScanners', {'retval': {'type': b'Z'}}) - r(b'IKDeviceBrowserView', b'setDisplaysLocalCameras:', {'arguments': {2: {'type': b'Z'}}}) - r(b'IKDeviceBrowserView', b'setDisplaysLocalScanners:', {'arguments': {2: {'type': b'Z'}}}) - r(b'IKDeviceBrowserView', b'setDisplaysNetworkCameras:', {'arguments': {2: {'type': b'Z'}}}) - r(b'IKDeviceBrowserView', b'setDisplaysNetworkScanners:', {'arguments': {2: {'type': b'Z'}}}) - r(b'IKFilterBrowserPanel', b'beginSheetWithOptions:modalForWindow:modalDelegate:didEndSelector:contextInfo:', {'arguments': {5: {'sel_of_type': sel32or64(b'v@:@i^v', b'v@:@q^v')}}}) - r(b'IKFilterBrowserPanel', b'beginWithOptions:modelessDelegate:didEndSelector:contextInfo:', {'arguments': {4: {'sel_of_type': sel32or64(b'v@:@i^v', b'v@:@q^v')}}}) - r(b'IKFilterBrowserView', b'setPreviewState:', {'arguments': {2: {'type': b'Z'}}}) - r(b'IKImageBrowserCell', b'isSelected', {'retval': {'type': b'Z'}}) - r(b'IKImageBrowserView', b'allowsDroppingOnItems', {'retval': {'type': b'Z'}}) - r(b'IKImageBrowserView', b'allowsEmptySelection', {'retval': {'type': b'Z'}}) - r(b'IKImageBrowserView', b'allowsMultipleSelection', {'retval': {'type': b'Z'}}) - r(b'IKImageBrowserView', b'allowsReordering', {'retval': {'type': b'Z'}}) - r(b'IKImageBrowserView', b'animates', {'retval': {'type': b'Z'}}) - r(b'IKImageBrowserView', b'canControlQuickLookPanel', {'retval': {'type': b'Z'}}) - r(b'IKImageBrowserView', b'constrainsToOriginalSize', {'retval': {'type': b'Z'}}) - r(b'IKImageBrowserView', b'isGroupExpandedAtIndex:', {'retval': {'type': b'Z'}}) - r(b'IKImageBrowserView', b'setAllowsDroppingOnItems:', {'arguments': {2: {'type': b'Z'}}}) - r(b'IKImageBrowserView', b'setAllowsEmptySelection:', {'arguments': {2: {'type': b'Z'}}}) - r(b'IKImageBrowserView', b'setAllowsMultipleSelection:', {'arguments': {2: {'type': b'Z'}}}) - r(b'IKImageBrowserView', b'setAllowsReordering:', {'arguments': {2: {'type': b'Z'}}}) - r(b'IKImageBrowserView', b'setAnimates:', {'arguments': {2: {'type': b'Z'}}}) - r(b'IKImageBrowserView', b'setCanControlQuickLookPanel:', {'arguments': {2: {'type': b'Z'}}}) - r(b'IKImageBrowserView', b'setConstrainsToOriginalSize:', {'arguments': {2: {'type': b'Z'}}}) - r(b'IKImageBrowserView', b'setSelectionIndexes:byExtendingSelection:', {'arguments': {3: {'type': b'Z'}}}) - r(b'IKImagePicker', b'beginImagePickerSheetForWindow:withDelegate:didEndSelector:contextInfo:', {'arguments': {4: {'sel_of_type': sel32or64(b'v@:@I^v', b'v@:@Q^v')}}}) - r(b'IKImagePicker', b'beginImagePickerWithDelegate:didEndSelector:contextInfo:', {'arguments': {3: {'sel_of_type': sel32or64(b'v@:@I^v', b'v@:@Q^v')}}}) - r(b'IKImageView', b'autohidesScrollers', {'retval': {'type': b'Z'}}) - r(b'IKImageView', b'autoresizes', {'retval': {'type': b'Z'}}) - r(b'IKImageView', b'doubleClickOpensImageEditPanel', {'retval': {'type': b'Z'}}) - r(b'IKImageView', b'editable', {'retval': {'type': b'Z'}}) - r(b'IKImageView', b'hasHorizontalScroller', {'retval': {'type': b'Z'}}) - r(b'IKImageView', b'hasVerticalScroller', {'retval': {'type': b'Z'}}) - r(b'IKImageView', b'setAutohidesScrollers:', {'arguments': {2: {'type': b'Z'}}}) - r(b'IKImageView', b'setAutoresizes:', {'arguments': {2: {'type': b'Z'}}}) - r(b'IKImageView', b'setDoubleClickOpensImageEditPanel:', {'arguments': {2: {'type': b'Z'}}}) - r(b'IKImageView', b'setEditable:', {'arguments': {2: {'type': b'Z'}}}) - r(b'IKImageView', b'setHasHorizontalScroller:', {'arguments': {2: {'type': b'Z'}}}) - r(b'IKImageView', b'setHasVerticalScroller:', {'arguments': {2: {'type': b'Z'}}}) - r(b'IKImageView', b'setSupportsDragAndDrop:', {'arguments': {2: {'type': b'Z'}}}) - r(b'IKImageView', b'supportsDragAndDrop', {'retval': {'type': b'Z'}}) - r(b'IKPictureTaker', b'beginPictureTakerSheetForWindow:withDelegate:didEndSelector:contextInfo:', {'arguments': {4: {'sel_of_type': sel32or64(b'v@:@i^v', b'v@:@q^v')}}}) - r(b'IKPictureTaker', b'beginPictureTakerWithDelegate:didEndSelector:contextInfo:', {'arguments': {3: {'sel_of_type': sel32or64(b'v@:@i^v', b'v@:@q^v')}}}) - r(b'IKPictureTaker', b'mirroring', {'retval': {'type': b'Z'}}) - r(b'IKPictureTaker', b'popUpRecentsMenuForView:withDelegate:didEndSelector:contextInfo:', {'arguments': {4: {'sel_of_type': sel32or64(b'v@:@i^v', b'v@:@q^v')}}}) - r(b'IKPictureTaker', b'setMirroring:', {'arguments': {2: {'type': b'Z'}}}) - r(b'IKScannerDeviceView', b'displaysDownloadsDirectoryControl', {'retval': {'type': b'Z'}}) - r(b'IKScannerDeviceView', b'displaysPostProcessApplicationControl', {'retval': {'type': b'Z'}}) - r(b'IKScannerDeviceView', b'hasDisplayModeAdvanced', {'retval': {'type': b'Z'}}) - r(b'IKScannerDeviceView', b'hasDisplayModeSimple', {'retval': {'type': b'Z'}}) - r(b'IKScannerDeviceView', b'setDisplaysDownloadsDirectoryControl:', {'arguments': {2: {'type': b'Z'}}}) - r(b'IKScannerDeviceView', b'setDisplaysPostProcessApplicationControl:', {'arguments': {2: {'type': b'Z'}}}) - r(b'IKScannerDeviceView', b'setHasDisplayModeAdvanced:', {'arguments': {2: {'type': b'Z'}}}) - r(b'IKScannerDeviceView', b'setHasDisplayModeSimple:', {'arguments': {2: {'type': b'Z'}}}) - r(b'IKSlideshow', b'canExportToApplication:', {'retval': {'type': b'Z'}}) - r(b'NSObject', b'cameraDeviceView:didDownloadFile:location:fileData:error:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}, 6: {'type': b'@'}}}) - r(b'NSObject', b'cameraDeviceView:didEncounterError:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'cameraDeviceViewSelectionDidChange:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'canExportSlideshowItemAtIndex:toApplication:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': b'@'}}}) - r(b'NSObject', b'deviceBrowserView:didEncounterError:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'deviceBrowserView:selectionDidChange:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'hasAdjustMode', {'required': False, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'hasDetailsMode', {'required': False, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'hasEffectsMode', {'required': False, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'image', {'required': True, 'retval': {'type': b'^{CGImage=}'}}) - r(b'NSObject', b'imageBrowser:backgroundWasRightClickedWithEvent:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'imageBrowser:cellAtIndex:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'I', b'L')}}}) - r(b'NSObject', b'imageBrowser:cellWasDoubleClickedAtIndex:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'imageBrowser:cellWasRightClickedAtIndex:withEvent:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'I', b'Q')}, 4: {'type': b'@'}}}) - r(b'NSObject', b'imageBrowser:groupAtIndex:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'imageBrowser:itemAtIndex:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'imageBrowser:moveCellsAtIndexes:toIndex:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'I', b'L')}}}) - r(b'NSObject', b'imageBrowser:moveItemsAtIndexes:toIndex:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'imageBrowser:removeCellsAtIndexes:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'imageBrowser:removeItemsAtIndexes:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'imageBrowser:writeCellsAtIndexes:toPasteboard:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'imageBrowser:writeItemsAtIndexes:toPasteboard:', {'retval': {'type': sel32or64(b'I', b'Q')}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'imageBrowserSelectionDidChange:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'imageProperties', {'required': False, 'retval': {'type': b'@'}}) - r(b'NSObject', b'imageRepresentation', {'retval': {'type': b'@'}}) - r(b'NSObject', b'imageRepresentationType', {'retval': {'type': b'@'}}) - r(b'NSObject', b'imageSubtitle', {'retval': {'type': b'@'}}) - r(b'NSObject', b'imageTitle', {'retval': {'type': b'@'}}) - r(b'NSObject', b'imageUID', {'retval': {'type': b'@'}}) - r(b'NSObject', b'imageVersion', {'retval': {'type': sel32or64(b'I', b'Q')}}) - r(b'NSObject', b'isSelectable', {'retval': {'type': b'Z'}}) - r(b'NSObject', b'nameOfSlideshowItemAtIndex:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'numberOfCellsInImageBrowser:', {'retval': {'type': sel32or64(b'I', b'L')}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'numberOfGroupsInImageBrowser:', {'retval': {'type': sel32or64(b'I', b'Q')}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'numberOfItemsInImageBrowser:', {'retval': {'type': sel32or64(b'I', b'Q')}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'numberOfSlideshowItems', {'required': True, 'retval': {'type': sel32or64(b'I', b'Q')}}) - r(b'NSObject', b'provideViewForUIConfiguration:excludedKeys:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'saveOptions:shouldShowUTType:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'scannerDeviceView:didEncounterError:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'scannerDeviceView:didScanToBandData:scanInfo:error:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'scannerDeviceView:didScanToURL:error:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'scannerDeviceView:didScanToURL:fileData:error:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'setImage:imageProperties:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'^{CGImage=}'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'slideshowDidChangeCurrentIndex:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'slideshowDidStop', {'required': False, 'retval': {'type': b'v'}}) - r(b'NSObject', b'slideshowItemAtIndex:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'slideshowWillStart', {'required': False, 'retval': {'type': b'v'}}) - r(b'NSObject', b'thumbnailWithMaximumSize:', {'required': False, 'retval': {'type': b'^{CGImage=}'}, 'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) -finally: - objc._updatingMetadata(False) -protocols={'IKImageBrowserItem': objc.informal_protocol('IKImageBrowserItem', [objc.selector(None, b'imageTitle', b'@@:', isRequired=False), objc.selector(None, b'imageSubtitle', b'@@:', isRequired=False), objc.selector(None, b'imageRepresentationType', b'@@:', isRequired=False), objc.selector(None, b'imageUID', b'@@:', isRequired=False), objc.selector(None, b'isSelectable', b'Z@:', isRequired=False), objc.selector(None, b'imageVersion', sel32or64(b'I@:', b'Q@:'), isRequired=False), objc.selector(None, b'imageRepresentation', b'@@:', isRequired=False)]), 'IKImageBrowserDataSourceDeprecated': objc.informal_protocol('IKImageBrowserDataSourceDeprecated', [objc.selector(None, b'imageBrowser:moveCellsAtIndexes:toIndex:', sel32or64(b'Z@:@@I', b'Z@:@@L'), isRequired=False), objc.selector(None, b'imageBrowser:cellAtIndex:', sel32or64(b'@@:@I', b'@@:@L'), isRequired=False), objc.selector(None, b'numberOfCellsInImageBrowser:', sel32or64(b'I@:@', b'L@:@'), isRequired=False), objc.selector(None, b'imageBrowser:writeCellsAtIndexes:toPasteboard:', b'v@:@@@', isRequired=False), objc.selector(None, b'imageBrowser:removeCellsAtIndexes:', b'v@:@@', isRequired=False)]), 'IKSaveOptionsDelegate': objc.informal_protocol('IKSaveOptionsDelegate', [objc.selector(None, b'saveOptions:shouldShowUTType:', b'Z@:@@', isRequired=False)]), 'IKImageBrowserDelegate': objc.informal_protocol('IKImageBrowserDelegate', [objc.selector(None, b'imageBrowser:cellWasRightClickedAtIndex:withEvent:', sel32or64(b'v@:@I@', b'v@:@Q@'), isRequired=False), objc.selector(None, b'imageBrowserSelectionDidChange:', b'v@:@', isRequired=False), objc.selector(None, b'imageBrowser:cellWasDoubleClickedAtIndex:', sel32or64(b'v@:@I', b'v@:@Q'), isRequired=False), objc.selector(None, b'imageBrowser:backgroundWasRightClickedWithEvent:', b'v@:@@', isRequired=False)]), 'IKImageBrowserDataSource': objc.informal_protocol('IKImageBrowserDataSource', [objc.selector(None, b'imageBrowser:groupAtIndex:', sel32or64(b'@@:@I', b'@@:@Q'), isRequired=False), objc.selector(None, b'numberOfItemsInImageBrowser:', sel32or64(b'I@:@', b'Q@:@'), isRequired=False), objc.selector(None, b'imageBrowser:moveItemsAtIndexes:toIndex:', sel32or64(b'Z@:@@I', b'Z@:@@Q'), isRequired=False), objc.selector(None, b'numberOfGroupsInImageBrowser:', sel32or64(b'I@:@', b'Q@:@'), isRequired=False), objc.selector(None, b'imageBrowser:itemAtIndex:', sel32or64(b'@@:@I', b'@@:@Q'), isRequired=False), objc.selector(None, b'imageBrowser:removeItemsAtIndexes:', b'v@:@@', isRequired=False), objc.selector(None, b'imageBrowser:writeItemsAtIndexes:toPasteboard:', sel32or64(b'I@:@@@', b'Q@:@@@'), isRequired=False)])} -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/Quartz/ImageKit/_metadata.pyc b/env/lib/python2.7/site-packages/Quartz/ImageKit/_metadata.pyc deleted file mode 100644 index ab2aa9e1..00000000 Binary files a/env/lib/python2.7/site-packages/Quartz/ImageKit/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Quartz/PDFKit/_PDFKit.so b/env/lib/python2.7/site-packages/Quartz/PDFKit/_PDFKit.so deleted file mode 100755 index b5aa59de..00000000 Binary files a/env/lib/python2.7/site-packages/Quartz/PDFKit/_PDFKit.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Quartz/PDFKit/__init__.py b/env/lib/python2.7/site-packages/Quartz/PDFKit/__init__.py deleted file mode 100644 index fa5cd17b..00000000 --- a/env/lib/python2.7/site-packages/Quartz/PDFKit/__init__.py +++ /dev/null @@ -1,25 +0,0 @@ -''' -Python mapping for the PDFKit framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import sys -import objc -import AppKit - -from Quartz.PDFKit import _metadata -from Quartz.PDFKit import _PDFKit - -sys.modules['Quartz.PDFKit'] = mod = objc.ObjCLazyModule('Quartz.PDFKit', - "com.apple.PDFKit", - objc.pathForFramework("/System/Library/Frameworks/Quartz.framework/Frameworks/PDFKit.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'objc': objc, - }, ( AppKit,)) - -import sys -del sys.modules['Quartz.PDFKit._metadata'] diff --git a/env/lib/python2.7/site-packages/Quartz/PDFKit/__init__.pyc b/env/lib/python2.7/site-packages/Quartz/PDFKit/__init__.pyc deleted file mode 100644 index 5b276b45..00000000 Binary files a/env/lib/python2.7/site-packages/Quartz/PDFKit/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Quartz/PDFKit/_metadata.py b/env/lib/python2.7/site-packages/Quartz/PDFKit/_metadata.py deleted file mode 100644 index f0fd3296..00000000 --- a/env/lib/python2.7/site-packages/Quartz/PDFKit/_metadata.py +++ /dev/null @@ -1,143 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Tue Jun 26 07:59:02 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$PDFAnnotationHighlightingModeInvert$PDFAnnotationHighlightingModeNone$PDFAnnotationHighlightingModeOutline$PDFAnnotationHighlightingModePush$PDFAnnotationKeyAction$PDFAnnotationKeyAdditionalActions$PDFAnnotationKeyAppearanceDictionary$PDFAnnotationKeyAppearanceState$PDFAnnotationKeyBorder$PDFAnnotationKeyBorderStyle$PDFAnnotationKeyColor$PDFAnnotationKeyContents$PDFAnnotationKeyDate$PDFAnnotationKeyDefaultAppearance$PDFAnnotationKeyDestination$PDFAnnotationKeyFlags$PDFAnnotationKeyHighlightingMode$PDFAnnotationKeyIconName$PDFAnnotationKeyInklist$PDFAnnotationKeyInteriorColor$PDFAnnotationKeyLineEndingStyles$PDFAnnotationKeyLinePoints$PDFAnnotationKeyName$PDFAnnotationKeyOpen$PDFAnnotationKeyPage$PDFAnnotationKeyParent$PDFAnnotationKeyPopup$PDFAnnotationKeyQuadPoints$PDFAnnotationKeyQuadding$PDFAnnotationKeyRect$PDFAnnotationKeySubtype$PDFAnnotationKeyTextLabel$PDFAnnotationKeyWidgetAppearanceDictionary$PDFAnnotationKeyWidgetBackgroundColor$PDFAnnotationKeyWidgetBorderColor$PDFAnnotationKeyWidgetCaption$PDFAnnotationKeyWidgetDefaultValue$PDFAnnotationKeyWidgetDownCaption$PDFAnnotationKeyWidgetFieldFlags$PDFAnnotationKeyWidgetFieldType$PDFAnnotationKeyWidgetMaxLen$PDFAnnotationKeyWidgetOptions$PDFAnnotationKeyWidgetRolloverCaption$PDFAnnotationKeyWidgetRotation$PDFAnnotationKeyWidgetTextLabelUI$PDFAnnotationKeyWidgetValue$PDFAnnotationLineEndingStyleCircle$PDFAnnotationLineEndingStyleClosedArrow$PDFAnnotationLineEndingStyleDiamond$PDFAnnotationLineEndingStyleNone$PDFAnnotationLineEndingStyleOpenArrow$PDFAnnotationLineEndingStyleSquare$PDFAnnotationSubtypeCircle$PDFAnnotationSubtypeFreeText$PDFAnnotationSubtypeHighlight$PDFAnnotationSubtypeInk$PDFAnnotationSubtypeLine$PDFAnnotationSubtypeLink$PDFAnnotationSubtypePopup$PDFAnnotationSubtypeSquare$PDFAnnotationSubtypeStamp$PDFAnnotationSubtypeStrikeOut$PDFAnnotationSubtypeText$PDFAnnotationSubtypeUnderline$PDFAnnotationSubtypeWidget$PDFAnnotationTextIconTypeComment$PDFAnnotationTextIconTypeHelp$PDFAnnotationTextIconTypeInsert$PDFAnnotationTextIconTypeKey$PDFAnnotationTextIconTypeNewParagraph$PDFAnnotationTextIconTypeNote$PDFAnnotationTextIconTypeParagraph$PDFAnnotationWidgetSubtypeButton$PDFAnnotationWidgetSubtypeChoice$PDFAnnotationWidgetSubtypeSignature$PDFAnnotationWidgetSubtypeText$PDFAppearanceCharacteristicsKeyBackgroundColor$PDFAppearanceCharacteristicsKeyBorderColor$PDFAppearanceCharacteristicsKeyCaption$PDFAppearanceCharacteristicsKeyDownCaption$PDFAppearanceCharacteristicsKeyRolloverCaption$PDFAppearanceCharacteristicsKeyRotation$PDFBorderKeyDashPattern$PDFBorderKeyLineWidth$PDFBorderKeyStyle$PDFDocumentAuthorAttribute$PDFDocumentCreationDateAttribute$PDFDocumentCreatorAttribute$PDFDocumentDidBeginFindNotification$PDFDocumentDidBeginPageFindNotification$PDFDocumentDidBeginPageWriteNotification$PDFDocumentDidBeginWriteNotification$PDFDocumentDidEndFindNotification$PDFDocumentDidEndPageFindNotification$PDFDocumentDidEndPageWriteNotification$PDFDocumentDidEndWriteNotification$PDFDocumentDidFindMatchNotification$PDFDocumentDidUnlockNotification$PDFDocumentKeywordsAttribute$PDFDocumentModificationDateAttribute$PDFDocumentOwnerPasswordOption$PDFDocumentProducerAttribute$PDFDocumentSubjectAttribute$PDFDocumentTitleAttribute$PDFDocumentUserPasswordOption$PDFThumbnailViewDocumentEditedNotification$PDFViewAnnotationHitNotification$PDFViewAnnotationWillHitNotification$PDFViewChangedHistoryNotification$PDFViewCopyPermissionNotification$PDFViewDisplayBoxChangedNotification$PDFViewDisplayModeChangedNotification$PDFViewDocumentChangedNotification$PDFViewPageChangedNotification$PDFViewPrintPermissionNotification$PDFViewScaleChangedNotification$PDFViewSelectionChangedNotification$PDFViewVisiblePagesChangedNotification$kPDFAnnotationKey_Action$kPDFAnnotationKey_AdditionalActions$kPDFAnnotationKey_AppearanceDictionary$kPDFAnnotationKey_AppearanceState$kPDFAnnotationKey_AppleExtras$kPDFAnnotationKey_Border$kPDFAnnotationKey_BorderStyle$kPDFAnnotationKey_Color$kPDFAnnotationKey_Contents$kPDFAnnotationKey_Date$kPDFAnnotationKey_DefaultAppearance$kPDFAnnotationKey_Destination$kPDFAnnotationKey_Flags$kPDFAnnotationKey_HighlightingMode$kPDFAnnotationKey_IconName$kPDFAnnotationKey_Inklist$kPDFAnnotationKey_InteriorColor$kPDFAnnotationKey_LineEndingStyles$kPDFAnnotationKey_LinePoints$kPDFAnnotationKey_Name$kPDFAnnotationKey_Open$kPDFAnnotationKey_Page$kPDFAnnotationKey_Parent$kPDFAnnotationKey_Popup$kPDFAnnotationKey_QuadPoints$kPDFAnnotationKey_Quadding$kPDFAnnotationKey_Rect$kPDFAnnotationKey_Subtype$kPDFAnnotationKey_TextLabel$kPDFAnnotationKey_WidgetAppearanceDictionary$kPDFAnnotationKey_WidgetDefaultValue$kPDFAnnotationKey_WidgetFieldFlags$kPDFAnnotationKey_WidgetFieldType$kPDFAnnotationKey_WidgetMaxLen$kPDFAnnotationKey_WidgetOptions$kPDFAnnotationKey_WidgetTextLabelUI$kPDFAnnotationKey_WidgetValue$''' -enums = '''$kPDFActionNamedFind@8$kPDFActionNamedFirstPage@3$kPDFActionNamedGoBack@5$kPDFActionNamedGoForward@6$kPDFActionNamedGoToPage@7$kPDFActionNamedLastPage@4$kPDFActionNamedNextPage@1$kPDFActionNamedNone@0$kPDFActionNamedPreviousPage@2$kPDFActionNamedPrint@9$kPDFActionNamedZoomIn@10$kPDFActionNamedZoomOut@11$kPDFAnnotationArea@4$kPDFBorderStyleBeveled@2$kPDFBorderStyleDashed@1$kPDFBorderStyleInset@3$kPDFBorderStyleSolid@0$kPDFBorderStyleUnderline@4$kPDFControlArea@16$kPDFDisplayBoxArtBox@4$kPDFDisplayBoxBleedBox@2$kPDFDisplayBoxCropBox@1$kPDFDisplayBoxMediaBox@0$kPDFDisplayBoxTrimBox@3$kPDFDisplayDirectionHorizontal@1$kPDFDisplayDirectionVertical@0$kPDFDisplaySinglePage@0$kPDFDisplaySinglePageContinuous@1$kPDFDisplayTwoUp@2$kPDFDisplayTwoUpContinuous@3$kPDFDocumentPermissionsNone@0$kPDFDocumentPermissionsOwner@2$kPDFDocumentPermissionsUser@1$kPDFIconArea@64$kPDFImageArea@256$kPDFInterpolationQualityHigh@2$kPDFInterpolationQualityLow@1$kPDFInterpolationQualityNone@0$kPDFLineStyleCircle@2$kPDFLineStyleClosedArrow@5$kPDFLineStyleDiamond@3$kPDFLineStyleNone@0$kPDFLineStyleOpenArrow@4$kPDFLineStyleSquare@1$kPDFLinkArea@8$kPDFMarkupTypeHighlight@0$kPDFMarkupTypeStrikeOut@1$kPDFMarkupTypeUnderline@2$kPDFNoArea@0$kPDFPageArea@1$kPDFPopupArea@128$kPDFPrintPageScaleDownToFit@2$kPDFPrintPageScaleNone@0$kPDFPrintPageScaleToFit@1$kPDFTextAnnotationIconComment@0$kPDFTextAnnotationIconHelp@3$kPDFTextAnnotationIconInsert@6$kPDFTextAnnotationIconKey@1$kPDFTextAnnotationIconNewParagraph@4$kPDFTextAnnotationIconNote@2$kPDFTextAnnotationIconParagraph@5$kPDFTextArea@2$kPDFTextFieldArea@32$kPDFWidgetCheckBoxControl@2$kPDFWidgetMixedState@-1$kPDFWidgetOffState@0$kPDFWidgetOnState@1$kPDFWidgetPushButtonControl@0$kPDFWidgetRadioButtonControl@1$kPDFWidgetUnknownControl@-1$''' -misc.update({}) -aliases = {'PDFKitPlatformViewController': 'NSViewController', 'PDFKitPlatformEvent': 'NSEvent', 'PDFKitPlatformControl': 'NSControl', 'PDFPointZero': 'NSZeroPoint', 'PDFRectZero': 'NSZeroRect', 'PDFKitPlatformButtonCell': 'NSButtonCell', 'PDFEdgeInsets': 'NSEdgeInsets', 'PDFKitPlatformButton': 'NSButton', 'PDFSize': 'NSSize', 'PDFRect': 'NSRect', 'kPDFImageArea': 'FLT_MX', 'PDFKitPlatformAccessibilityElement': 'NSAccessibilityElement', 'PDFKitPlatformTextView': 'NSTextView', 'PDFKitPlatformChoiceWidgetListView': 'NSTableView', 'PDFKitPlatformView': 'NSView', 'PDFSizeZero': 'NSZeroSize', 'kPDFDestinationUnspecifiedValue': 'FLT_MAX', 'PDFKitPlatformBezierPathElement': 'NSBezierPathElement', 'PDFKitPlatformColor': 'NSColor', 'PDFKitPlatformTextFieldCell': 'NSTextFieldCell', 'PDFKitPlatformScrollView': 'NSScrollView', 'PDFKitPlatformImageView': 'NSImageView', 'PDFKitPlatformTextFieldDidChangeText': 'NSControlTextDidChangeNotification', 'PDFPoint': 'NSPoint', 'PDFKitPlatformImage': 'NSImage', 'PDFKitPlatformChoiceWidgetComboBoxView': 'NSPopUpButton', 'PDFKitPlatformBezierPath': 'NSBezierPath', 'PDFKitPlatformTextFieldDidBeginEditing': 'NSControlTextDidBeginEditingNotification', 'PDFEdgeInsetsZero': 'NSEdgeInsetsZero', 'PDFKitPlatformTextFieldDidEndEditing': 'NSControlTextDidEndEditingNotification', 'PDFKitPlatformTextViewDidChangeSelection': 'NSTextViewDidChangeSelectionNotification', 'PDFKitPlatformTextField': 'NSTextField', 'PDFKitPlatformFont': 'NSFont'} -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'NSObject', b'PDFViewOpenPDF:forRemoteGoToAction:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'PDFViewPerformFind:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'PDFViewPerformGoToPage:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'PDFViewPerformPrint:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'PDFViewPrintJobTitle:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'PDFViewWillChangeScaleFactor:toScale:', {'required': False, 'retval': {'type': sel32or64(b'f', b'd')}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'f', b'd')}}}) - r(b'NSObject', b'PDFViewWillClickOnLink:withURL:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'classForAnnotationClass:', {'required': False, 'retval': {'type': '#'}, 'arguments': {2: {'type': b'#'}}}) - r(b'NSObject', b'classForAnnotationType:', {'required': False, 'retval': {'type': '#'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'classForPage', {'required': False, 'retval': {'type': '#'}}) - r(b'NSObject', b'didMatchString:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'documentDidBeginDocumentFind:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'documentDidBeginPageFind:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'documentDidEndDocumentFind:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'documentDidEndPageFind:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'documentDidFindMatch:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'documentDidUnlock:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'PDFActionResetForm', b'fieldsIncludedAreCleared', {'retval': {'type': b'Z'}}) - r(b'PDFActionResetForm', b'setFieldsIncludedAreCleared:', {'arguments': {2: {'type': b'Z'}}}) - r(b'PDFAnnotation', b'allowsToggleToOff', {'retval': {'type': 'Z'}}) - r(b'PDFAnnotation', b'hasAppearanceStream', {'retval': {'type': b'Z'}}) - r(b'PDFAnnotation', b'hasComb', {'retval': {'type': 'Z'}}) - r(b'PDFAnnotation', b'isHighlighted', {'retval': {'type': b'Z'}}) - r(b'PDFAnnotation', b'isListChoice', {'retval': {'type': b'Z'}}) - r(b'PDFAnnotation', b'isMultiline', {'retval': {'type': b'Z'}}) - r(b'PDFAnnotation', b'isOpen', {'retval': {'type': b'Z'}}) - r(b'PDFAnnotation', b'isPasswordField', {'retval': {'type': 'Z'}}) - r(b'PDFAnnotation', b'isReadOnly', {'retval': {'type': b'Z'}}) - r(b'PDFAnnotation', b'isSignature', {'retval': {'type': 'Z'}}) - r(b'PDFAnnotation', b'radiosInUnison', {'retval': {'type': 'Z'}}) - r(b'PDFAnnotation', b'setAllowsToggleToOff:', {'arguments': {2: {'type': b'Z'}}}) - r(b'PDFAnnotation', b'setBoolean:forAnnotationKey:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': 'Z'}}}) - r(b'PDFAnnotation', b'setComb:', {'arguments': {2: {'type': b'Z'}}}) - r(b'PDFAnnotation', b'setHighlighted:', {'arguments': {2: {'type': 'Z'}}}) - r(b'PDFAnnotation', b'setListChoice:', {'arguments': {2: {'type': b'Z'}}}) - r(b'PDFAnnotation', b'setMultiline:', {'arguments': {2: {'type': 'Z'}}}) - r(b'PDFAnnotation', b'setOpen:', {'arguments': {2: {'type': b'Z'}}}) - r(b'PDFAnnotation', b'setRadiosInUnison:', {'arguments': {2: {'type': 'Z'}}}) - r(b'PDFAnnotation', b'setReadOnly:', {'arguments': {2: {'type': b'Z'}}}) - r(b'PDFAnnotation', b'setRect:forAnnotationKey:', {'retval': {'type': 'Z'}}) - r(b'PDFAnnotation', b'setShouldDisplay:', {'arguments': {2: {'type': b'Z'}}}) - r(b'PDFAnnotation', b'setShouldPrint:', {'arguments': {2: {'type': b'Z'}}}) - r(b'PDFAnnotation', b'setValue:forAnnotationKey:', {'retval': {'type': 'Z'}}) - r(b'PDFAnnotation', b'shouldDisplay', {'retval': {'type': b'Z'}}) - r(b'PDFAnnotation', b'shouldPrint', {'retval': {'type': b'Z'}}) - r(b'PDFAnnotationButtonWidget', b'allowsToggleToOff', {'retval': {'type': b'Z'}}) - r(b'PDFAnnotationButtonWidget', b'isHighlighted', {'retval': {'type': b'Z'}}) - r(b'PDFAnnotationButtonWidget', b'setAllowsToggleToOff:', {'arguments': {2: {'type': b'Z'}}}) - r(b'PDFAnnotationButtonWidget', b'setHighlighted:', {'arguments': {2: {'type': b'Z'}}}) - r(b'PDFAnnotationChoiceWidget', b'isListChoice', {'retval': {'type': b'Z'}}) - r(b'PDFAnnotationChoiceWidget', b'setIsListChoice:', {'arguments': {2: {'type': b'Z'}}}) - r(b'PDFAnnotationLink', b'setHighlighted:', {'arguments': {2: {'type': b'Z'}}}) - r(b'PDFAnnotationPopup', b'isOpen', {'retval': {'type': b'Z'}}) - r(b'PDFAnnotationPopup', b'setIsOpen:', {'arguments': {2: {'type': b'Z'}}}) - r(b'PDFAnnotationStamp', b'isSignature', {'retval': {'type': 'Z'}}) - r(b'PDFAnnotationTextWidget', b'isMultiline', {'retval': {'type': b'Z'}}) - r(b'PDFAnnotationTextWidget', b'setIsMultiline:', {'arguments': {2: {'type': b'Z'}}}) - r(b'PDFDocument', b'allowsCommenting', {'retval': {'type': 'Z'}}) - r(b'PDFDocument', b'allowsContentAccessibility', {'retval': {'type': 'Z'}}) - r(b'PDFDocument', b'allowsCopying', {'retval': {'type': b'Z'}}) - r(b'PDFDocument', b'allowsDocumentAssembly', {'retval': {'type': 'Z'}}) - r(b'PDFDocument', b'allowsDocumentChanges', {'retval': {'type': 'Z'}}) - r(b'PDFDocument', b'allowsFormFieldEntry', {'retval': {'type': 'Z'}}) - r(b'PDFDocument', b'allowsPrinting', {'retval': {'type': b'Z'}}) - r(b'PDFDocument', b'isEncrypted', {'retval': {'type': b'Z'}}) - r(b'PDFDocument', b'isFinding', {'retval': {'type': b'Z'}}) - r(b'PDFDocument', b'isLocked', {'retval': {'type': b'Z'}}) - r(b'PDFDocument', b'printOperationForPrintInfo:scalingMode:autoRotate:', {'arguments': {4: {'type': b'Z'}}}) - r(b'PDFDocument', b'unlockWithPassword:', {'retval': {'type': b'Z'}}) - r(b'PDFDocument', b'writeToFile:', {'retval': {'type': b'Z'}}) - r(b'PDFDocument', b'writeToFile:withOptions:', {'retval': {'type': b'Z'}}) - r(b'PDFDocument', b'writeToURL:', {'retval': {'type': b'Z'}}) - r(b'PDFDocument', b'writeToURL:withOptions:', {'retval': {'type': b'Z'}}) - r(b'PDFOutline', b'isOpen', {'retval': {'type': b'Z'}}) - r(b'PDFOutline', b'setIsOpen:', {'arguments': {2: {'type': b'Z'}}}) - r(b'PDFPage', b'displaysAnnotations', {'retval': {'type': b'Z'}}) - r(b'PDFPage', b'setDisplaysAnnotations:', {'arguments': {2: {'type': b'Z'}}}) - r(b'PDFSelection', b'drawForPage:active:', {'arguments': {3: {'type': b'Z'}}}) - r(b'PDFSelection', b'drawForPage:withBox:active:', {'arguments': {4: {'type': b'Z'}}}) - r(b'PDFThumbnailView', b'allowsDragging', {'retval': {'type': b'Z'}}) - r(b'PDFThumbnailView', b'allowsMultipleSelection', {'retval': {'type': b'Z'}}) - r(b'PDFThumbnailView', b'setAllowsDragging:', {'arguments': {2: {'type': b'Z'}}}) - r(b'PDFThumbnailView', b'setAllowsMultipleSelection:', {'arguments': {2: {'type': b'Z'}}}) - r(b'PDFView', b'acceptsDraggedFiles', {'retval': {'type': 'Z'}}) - r(b'PDFView', b'allowsDragging', {'retval': {'type': b'Z'}}) - r(b'PDFView', b'autoScales', {'retval': {'type': b'Z'}}) - r(b'PDFView', b'canGoBack', {'retval': {'type': b'Z'}}) - r(b'PDFView', b'canGoForward', {'retval': {'type': b'Z'}}) - r(b'PDFView', b'canGoToFirstPage', {'retval': {'type': b'Z'}}) - r(b'PDFView', b'canGoToLastPage', {'retval': {'type': b'Z'}}) - r(b'PDFView', b'canGoToNextPage', {'retval': {'type': b'Z'}}) - r(b'PDFView', b'canGoToPreviousPage', {'retval': {'type': b'Z'}}) - r(b'PDFView', b'canZoomIn', {'retval': {'type': b'Z'}}) - r(b'PDFView', b'canZoomOut', {'retval': {'type': b'Z'}}) - r(b'PDFView', b'displaysAsBook', {'retval': {'type': b'Z'}}) - r(b'PDFView', b'displaysPageBreaks', {'retval': {'type': b'Z'}}) - r(b'PDFView', b'displaysRTL', {'retval': {'type': 'Z'}}) - r(b'PDFView', b'enableDataDetectors', {'retval': {'type': b'Z'}}) - r(b'PDFView', b'enablePageShadows:', {'arguments': {2: {'type': b'Z'}}}) - r(b'PDFView', b'pageForPoint:nearest:', {'arguments': {3: {'type': b'Z'}}}) - r(b'PDFView', b'pageShadowsEnabled', {'retval': {'type': b'Z'}}) - r(b'PDFView', b'printWithInfo:autoRotate:', {'arguments': {3: {'type': b'Z'}}}) - r(b'PDFView', b'printWithInfo:autoRotate:pageScaling:', {'arguments': {3: {'type': b'Z'}}}) - r(b'PDFView', b'setAcceptsDraggedFiles:', {'arguments': {2: {'type': 'Z'}}}) - r(b'PDFView', b'setAllowsDragging:', {'arguments': {2: {'type': b'Z'}}}) - r(b'PDFView', b'setAutoScales:', {'arguments': {2: {'type': b'Z'}}}) - r(b'PDFView', b'setCurrentSelection:animate:', {'arguments': {3: {'type': b'Z'}}}) - r(b'PDFView', b'setDisplaysAsBook:', {'arguments': {2: {'type': b'Z'}}}) - r(b'PDFView', b'setDisplaysPageBreaks:', {'arguments': {2: {'type': b'Z'}}}) - r(b'PDFView', b'setDisplaysRTL:', {'arguments': {2: {'type': 'Z'}}}) - r(b'PDFView', b'setEnableDataDetectors:', {'arguments': {2: {'type': b'Z'}}}) - r(b'PDFView', b'setShouldAntiAlias:', {'arguments': {2: {'type': b'Z'}}}) - r(b'PDFView', b'shouldAntiAlias', {'retval': {'type': b'Z'}}) -finally: - objc._updatingMetadata(False) -protocols={'PDFViewDelegate': objc.informal_protocol('PDFViewDelegate', [objc.selector(None, b'PDFViewWillClickOnLink:withURL:', b'v@:@@', isRequired=False), objc.selector(None, b'PDFViewOpenPDF:forRemoteGoToAction:', b'v@:@@', isRequired=False), objc.selector(None, b'PDFViewPerformFind:', b'v@:@', isRequired=False), objc.selector(None, b'PDFViewWillChangeScaleFactor:toScale:', sel32or64(b'f@:@f', b'd@:@d'), isRequired=False), objc.selector(None, b'PDFViewPerformPrint:', b'v@:@', isRequired=False), objc.selector(None, b'PDFViewPrintJobTitle:', b'@@:@', isRequired=False), objc.selector(None, b'PDFViewPerformGoToPage:', b'v@:@', isRequired=False)]), 'PDFDocumentDelegate': objc.informal_protocol('PDFDocumentDelegate', [objc.selector(None, b'classForPage', b'#@:', isRequired=False), objc.selector(None, b'classForAnnotationClass:', b'#@:#', isRequired=False), objc.selector(None, b'didMatchString:', b'v@:@', isRequired=False)]), 'PDFDocumentNotifications': objc.informal_protocol('PDFDocumentNotifications', [objc.selector(None, b'documentDidFindMatch:', b'v@:@', isRequired=False), objc.selector(None, b'documentDidBeginPageFind:', b'v@:@', isRequired=False), objc.selector(None, b'documentDidBeginDocumentFind:', b'v@:@', isRequired=False), objc.selector(None, b'documentDidUnlock:', b'v@:@', isRequired=False), objc.selector(None, b'documentDidEndPageFind:', b'v@:@', isRequired=False), objc.selector(None, b'documentDidEndDocumentFind:', b'v@:@', isRequired=False)])} -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/Quartz/PDFKit/_metadata.pyc b/env/lib/python2.7/site-packages/Quartz/PDFKit/_metadata.pyc deleted file mode 100644 index b4b8693b..00000000 Binary files a/env/lib/python2.7/site-packages/Quartz/PDFKit/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Quartz/QuartzComposer/__init__.py b/env/lib/python2.7/site-packages/Quartz/QuartzComposer/__init__.py deleted file mode 100644 index 925f262e..00000000 --- a/env/lib/python2.7/site-packages/Quartz/QuartzComposer/__init__.py +++ /dev/null @@ -1,25 +0,0 @@ -''' -Python mapping for the QuartzComposer framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import sys -import objc -import Quartz.CoreGraphics -import Foundation - -from Quartz.QuartzComposer import _metadata - -sys.modules['Quartz.QuartzComposer'] = mod = objc.ObjCLazyModule('Quartz.QuartzComposer', - "com.apple.QuartzComposer", - objc.pathForFramework("/System/Library/Frameworks/Quartz.framework/Frameworks/QuartzComposer.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'objc': objc, - }, ( Quartz.CoreGraphics, Foundation,)) - -import sys -del sys.modules['Quartz.QuartzComposer._metadata'] diff --git a/env/lib/python2.7/site-packages/Quartz/QuartzComposer/__init__.pyc b/env/lib/python2.7/site-packages/Quartz/QuartzComposer/__init__.pyc deleted file mode 100644 index 9d7d13f8..00000000 Binary files a/env/lib/python2.7/site-packages/Quartz/QuartzComposer/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Quartz/QuartzComposer/_metadata.py b/env/lib/python2.7/site-packages/Quartz/QuartzComposer/_metadata.py deleted file mode 100644 index e01618c1..00000000 --- a/env/lib/python2.7/site-packages/Quartz/QuartzComposer/_metadata.py +++ /dev/null @@ -1,105 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Tue Jun 26 07:59:02 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$QCCompositionAttributeBuiltInKey$QCCompositionAttributeCategoryKey$QCCompositionAttributeCopyrightKey$QCCompositionAttributeDescriptionKey$QCCompositionAttributeHasConsumersKey$QCCompositionAttributeIsTimeDependentKey$QCCompositionAttributeNameKey$QCCompositionCategoryDistortion$QCCompositionCategoryStylize$QCCompositionCategoryUtility$QCCompositionInputAudioPeakKey$QCCompositionInputAudioSpectrumKey$QCCompositionInputDestinationImageKey$QCCompositionInputImageKey$QCCompositionInputPaceKey$QCCompositionInputPreviewModeKey$QCCompositionInputPrimaryColorKey$QCCompositionInputRSSArticleDurationKey$QCCompositionInputRSSFeedURLKey$QCCompositionInputScreenImageKey$QCCompositionInputSecondaryColorKey$QCCompositionInputSourceImageKey$QCCompositionInputTrackInfoKey$QCCompositionInputTrackPositionKey$QCCompositionInputTrackSignalKey$QCCompositionInputXKey$QCCompositionInputYKey$QCCompositionOutputImageKey$QCCompositionOutputWebPageURLKey$QCCompositionPickerPanelDidSelectCompositionNotification$QCCompositionPickerViewDidSelectCompositionNotification$QCCompositionProtocolGraphicAnimation$QCCompositionProtocolGraphicTransition$QCCompositionProtocolImageFilter$QCCompositionProtocolMusicVisualizer$QCCompositionProtocolRSSVisualizer$QCCompositionProtocolScreenSaver$QCCompositionRepositoryDidUpdateNotification$QCPlugInAttributeCategoriesKey$QCPlugInAttributeCopyrightKey$QCPlugInAttributeDescriptionKey$QCPlugInAttributeExamplesKey$QCPlugInAttributeNameKey$QCPlugInExecutionArgumentEventKey$QCPlugInExecutionArgumentMouseLocationKey$QCPlugInPixelFormatARGB8$QCPlugInPixelFormatBGRA8$QCPlugInPixelFormatI8$QCPlugInPixelFormatIf$QCPlugInPixelFormatRGBAf$QCPortAttributeDefaultValueKey$QCPortAttributeMaximumValueKey$QCPortAttributeMenuItemsKey$QCPortAttributeMinimumValueKey$QCPortAttributeNameKey$QCPortAttributeTypeKey$QCPortTypeBoolean$QCPortTypeColor$QCPortTypeImage$QCPortTypeIndex$QCPortTypeNumber$QCPortTypeString$QCPortTypeStructure$QCRendererEventKey$QCRendererMouseLocationKey$QCViewDidStartRenderingNotification$QCViewDidStopRenderingNotification$''' -enums = '''$kQCPlugInExecutionModeConsumer@3$kQCPlugInExecutionModeProcessor@2$kQCPlugInExecutionModeProvider@1$kQCPlugInTimeModeIdle@1$kQCPlugInTimeModeNone@0$kQCPlugInTimeModeTimeBase@2$''' -misc.update({}) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'NSObject', b'CGLContextObj', {'required': True, 'retval': {'type': b'^{_CGLContextObject=}'}}) - r(b'NSObject', b'attributes', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'bindTextureRepresentationToCGLContext:textureUnit:normalizeCoordinates:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'^{_CGLContextObject=}'}, 3: {'type': b'I'}, 4: {'type': b'Z'}}}) - r(b'NSObject', b'bounds', {'required': True, 'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSObject', b'bufferBaseAddress', {'required': True, 'retval': {'type': b'^v'}}) - r(b'NSObject', b'bufferBytesPerRow', {'required': True, 'retval': {'type': sel32or64(b'I', b'Q')}}) - r(b'NSObject', b'bufferColorSpace', {'required': True, 'retval': {'type': b'^{CGColorSpace=}'}}) - r(b'NSObject', b'bufferPixelFormat', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'bufferPixelsHigh', {'required': True, 'retval': {'type': sel32or64(b'I', b'Q')}}) - r(b'NSObject', b'bufferPixelsWide', {'required': True, 'retval': {'type': sel32or64(b'I', b'Q')}}) - r(b'NSObject', b'canRenderWithCGLContext:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'^{_CGLContextObject=}'}}}) - r(b'NSObject', b'colorSpace', {'required': True, 'retval': {'type': b'^{CGColorSpace=}'}}) - r(b'NSObject', b'compositionParameterView:didChangeParameterWithKey:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'compositionParameterView:shouldDisplayParameterWithKey:attributes:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'compositionPickerView:didSelectComposition:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'compositionPickerViewDidStartAnimating:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'compositionPickerViewWillStopAnimating:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'compositionURL', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'copyRenderedTextureForCGLContext:pixelFormat:bounds:isFlipped:', {'required': False, 'retval': {'type': b'I'}, 'arguments': {2: {'type': b'^{_CGLContextObject=}'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 5: {'type': b'^Z'}}}) - r(b'NSObject', b'imageBounds', {'required': True, 'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSObject', b'imageColorSpace', {'required': True, 'retval': {'type': b'^{CGColorSpace=}'}}) - r(b'NSObject', b'inputKeys', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'lockBufferRepresentationWithPixelFormat:colorSpace:forBounds:', {'required': True, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'^{CGColorSpace=}'}, 4: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSObject', b'lockTextureRepresentationWithColorSpace:forBounds:', {'required': True, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'^{CGColorSpace=}'}, 3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSObject', b'logMessage:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}, 'variadic': True}) - r(b'NSObject', b'outputImageProviderFromBufferWithPixelFormat:pixelsWide:pixelsHigh:baseAddress:bytesPerRow:releaseCallback:releaseContext:colorSpace:shouldColorMatch:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'I', b'Q')}, 4: {'type': sel32or64(b'I', b'Q')}, 5: {'type': b'^v'}, 6: {'type': sel32or64(b'I', b'Q')}, 7: {'type': b'^?'}, 8: {'type': b'^v'}, 9: {'type': b'^{CGColorSpace=}'}, 10: {'type': b'Z'}}}) - r(b'NSObject', b'outputImageProviderFromTextureWithPixelFormat:pixelsWide:pixelsHigh:name:flipped:releaseCallback:releaseContext:colorSpace:shouldColorMatch:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'I', b'Q')}, 4: {'type': sel32or64(b'I', b'Q')}, 5: {'type': b'I'}, 6: {'type': b'Z'}, 7: {'type': b'^?'}, 8: {'type': b'^v'}, 9: {'type': b'^{CGColorSpace=}'}, 10: {'type': b'Z'}}}) - r(b'NSObject', b'outputKeys', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'propertyListFromInputValues', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'releaseRenderedTexture:forCGLContext:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'I'}, 3: {'type': b'^{_CGLContextObject=}'}}}) - r(b'NSObject', b'renderToBuffer:withBytesPerRow:pixelFormat:forBounds:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'^v'}, 3: {'type': sel32or64(b'I', b'Q')}, 4: {'type': b'@'}, 5: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSObject', b'renderWithCGLContext:forBounds:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'^{_CGLContextObject=}'}, 3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSObject', b'setInputValuesWithPropertyList:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setValue:forInputKey:', {'required': True, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'shouldColorMatch', {'required': False, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'supportedBufferPixelFormats', {'required': False, 'retval': {'type': b'@'}}) - r(b'NSObject', b'supportedRenderedTexturePixelFormats', {'required': False, 'retval': {'type': b'@'}}) - r(b'NSObject', b'textureColorSpace', {'required': True, 'retval': {'type': b'^{CGColorSpace=}'}}) - r(b'NSObject', b'textureFlipped', {'required': True, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'textureMatrix', {'required': True, 'retval': {'type': b'^f'}}) - r(b'NSObject', b'textureName', {'required': True, 'retval': {'type': b'I'}}) - r(b'NSObject', b'texturePixelsHigh', {'required': True, 'retval': {'type': sel32or64(b'I', b'Q')}}) - r(b'NSObject', b'texturePixelsWide', {'required': True, 'retval': {'type': sel32or64(b'I', b'Q')}}) - r(b'NSObject', b'textureTarget', {'required': True, 'retval': {'type': b'I'}}) - r(b'NSObject', b'unbindTextureRepresentationFromCGLContext:textureUnit:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'^{_CGLContextObject=}'}, 3: {'type': b'I'}}}) - r(b'NSObject', b'unlockBufferRepresentation', {'required': True, 'retval': {'type': b'v'}}) - r(b'NSObject', b'unlockTextureRepresentation', {'required': True, 'retval': {'type': b'v'}}) - r(b'NSObject', b'userInfo', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'valueForInputKey:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'valueForOutputKey:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'valueForOutputKey:ofType:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'QCCompositionParameterView', b'drawsBackground', {'retval': {'type': b'Z'}}) - r(b'QCCompositionParameterView', b'hasParameters', {'retval': {'type': b'Z'}}) - r(b'QCCompositionParameterView', b'setDrawsBackground:', {'arguments': {2: {'type': b'Z'}}}) - r(b'QCCompositionPickerView', b'allowsEmptySelection', {'retval': {'type': b'Z'}}) - r(b'QCCompositionPickerView', b'drawsBackground', {'retval': {'type': b'Z'}}) - r(b'QCCompositionPickerView', b'isAnimating', {'retval': {'type': b'Z'}}) - r(b'QCCompositionPickerView', b'setAllowsEmptySelection:', {'arguments': {2: {'type': b'Z'}}}) - r(b'QCCompositionPickerView', b'setDrawsBackground:', {'arguments': {2: {'type': b'Z'}}}) - r(b'QCCompositionPickerView', b'setShowsCompositionNames:', {'arguments': {2: {'type': b'Z'}}}) - r(b'QCCompositionPickerView', b'showsCompositionNames', {'retval': {'type': b'Z'}}) - r(b'QCPlugIn', b'createViewController', {'retval': {'already_retained': True}}) - r(b'QCPlugIn', b'didValueForInputKeyChange:', {'retval': {'type': b'Z'}}) - r(b'QCPlugIn', b'execute:atTime:withArguments:', {'retval': {'type': b'Z'}}) - r(b'QCPlugIn', b'loadPlugInAtPath:', {'retval': {'type': b'Z'}}) - r(b'QCPlugIn', b'setValue:forOutputKey:', {'retval': {'type': b'Z'}}) - r(b'QCPlugIn', b'startExecution:', {'retval': {'type': b'Z'}}) - r(b'QCRenderer', b'renderAtTime:arguments:', {'retval': {'type': b'Z'}}) - r(b'QCView', b'autostartsRendering', {'retval': {'type': b'Z'}}) - r(b'QCView', b'isPausedRendering', {'retval': {'type': b'Z'}}) - r(b'QCView', b'isRendering', {'retval': {'type': b'Z'}}) - r(b'QCView', b'loadComposition:', {'retval': {'type': b'Z'}}) - r(b'QCView', b'loadCompositionFromFile:', {'retval': {'type': b'Z'}}) - r(b'QCView', b'renderAtTime:arguments:', {'retval': {'type': b'Z'}}) - r(b'QCView', b'setAutostartsRendering:', {'arguments': {2: {'type': b'Z'}}}) - r(b'QCView', b'startRendering', {'retval': {'type': b'Z'}}) -finally: - objc._updatingMetadata(False) -protocols={'QCCompositionPickerViewDelegate': objc.informal_protocol('QCCompositionPickerViewDelegate', [objc.selector(None, b'compositionPickerView:didSelectComposition:', b'v@:@@', isRequired=False), objc.selector(None, b'compositionPickerViewWillStopAnimating:', b'v@:@', isRequired=False), objc.selector(None, b'compositionPickerViewDidStartAnimating:', b'v@:@', isRequired=False)]), 'QCCompositionParameterViewDelegate': objc.informal_protocol('QCCompositionParameterViewDelegate', [objc.selector(None, b'compositionParameterView:didChangeParameterWithKey:', b'v@:@@', isRequired=False), objc.selector(None, b'compositionParameterView:shouldDisplayParameterWithKey:attributes:', b'Z@:@@@', isRequired=False)])} -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/Quartz/QuartzComposer/_metadata.pyc b/env/lib/python2.7/site-packages/Quartz/QuartzComposer/_metadata.pyc deleted file mode 100644 index c3312948..00000000 Binary files a/env/lib/python2.7/site-packages/Quartz/QuartzComposer/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Quartz/QuartzCore/__init__.py b/env/lib/python2.7/site-packages/Quartz/QuartzCore/__init__.py deleted file mode 100644 index fa449356..00000000 --- a/env/lib/python2.7/site-packages/Quartz/QuartzCore/__init__.py +++ /dev/null @@ -1,57 +0,0 @@ -''' -Python mapping for the QuartzCore framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import sys -import objc -import Foundation - -from Quartz.QuartzCore import _metadata -import Quartz.QuartzCore._quartzcore - -# XXX: addConvenienceFor... should be metadata -def CIVector__getitem__(self, idx): - if isinstance(idx, slice): - start, stop, step = idx.indices(self.count()) - return [self[i] for i in range(start, stop, step)] - - if idx < 0: - new = self.count() + idx - if new < 0: - raise IndexError(idx) - idx = new - - return self.valueAtIndex_(idx) - -objc.addConvenienceForClass('CIVector', ( - ('__len__', lambda self: self.count()), - ('__getitem__', CIVector__getitem__), -)) - - -objc.addConvenienceForClass('CIContext', ( - ('__getitem__', lambda self, key: self.objectForKey_(key)), - ('__setitem__', lambda self, key, value: self.setObject_forKey_(value, key)), -)) -objc.addConvenienceForClass('CIContextImpl', ( - ('__getitem__', lambda self, key: self.objectForKey_(key)), - ('__setitem__', lambda self, key, value: self.setObject_forKey_(value, key)), -)) - -objc.addConvenienceForBasicSequence('QCStructure', True) - - -sys.modules['Quartz.QuartzCore'] = mod = objc.ObjCLazyModule('Quartz.QuartzCore', - "com.apple.QuartzCore", - objc.pathForFramework("/System/Library/Frameworks/QuartzCore.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'objc': objc, - }, (Foundation,)) - -import sys -del sys.modules['Quartz.QuartzCore._metadata'] diff --git a/env/lib/python2.7/site-packages/Quartz/QuartzCore/__init__.pyc b/env/lib/python2.7/site-packages/Quartz/QuartzCore/__init__.pyc deleted file mode 100644 index 675b8568..00000000 Binary files a/env/lib/python2.7/site-packages/Quartz/QuartzCore/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Quartz/QuartzCore/_metadata.py b/env/lib/python2.7/site-packages/Quartz/QuartzCore/_metadata.py deleted file mode 100644 index 097b95b8..00000000 --- a/env/lib/python2.7/site-packages/Quartz/QuartzCore/_metadata.py +++ /dev/null @@ -1,196 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Tue Jun 26 07:59:02 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -misc.update({'CATransform3D': objc.createStructType('CATransform3D', sel32or64(b'{CATransform3D=ffffffffffffffff}', b'{CATransform3D=dddddddddddddddd}'), ['m11', 'm12', 'm13', 'm14', 'm21', 'm22', 'm23', 'm24', 'm31', 'm32', 'm33', 'm34', 'm41', 'm42', 'm43', 'm44'])}) -constants = '''$CIDetectorAccuracy$CIDetectorAccuracyHigh$CIDetectorAccuracyLow$CIDetectorAspectRatio$CIDetectorEyeBlink$CIDetectorFocalLength$CIDetectorImageOrientation$CIDetectorMaxFeatureCount$CIDetectorMinFeatureSize$CIDetectorNumberOfAngles$CIDetectorReturnSubFeatures$CIDetectorSmile$CIDetectorTracking$CIDetectorTypeFace$CIDetectorTypeQRCode$CIDetectorTypeRectangle$CIDetectorTypeText$CIFeatureTypeFace$CIFeatureTypeQRCode$CIFeatureTypeRectangle$CIFeatureTypeText$kCAAlignmentCenter$kCAAlignmentJustified$kCAAlignmentLeft$kCAAlignmentNatural$kCAAlignmentRight$kCAAnimationCubic$kCAAnimationCubicPaced$kCAAnimationDiscrete$kCAAnimationLinear$kCAAnimationPaced$kCAAnimationRotateAuto$kCAAnimationRotateAutoReverse$kCAContentsFormatGray8Uint$kCAContentsFormatRGBA16Float$kCAContentsFormatRGBA8Uint$kCAEmitterBehaviorAlignToMotion$kCAEmitterBehaviorAttractor$kCAEmitterBehaviorColorOverLife$kCAEmitterBehaviorDrag$kCAEmitterBehaviorLight$kCAEmitterBehaviorSimpleAttractor$kCAEmitterBehaviorValueOverLife$kCAEmitterBehaviorWave$kCAEmitterLayerAdditive$kCAEmitterLayerBackToFront$kCAEmitterLayerCircle$kCAEmitterLayerCuboid$kCAEmitterLayerLine$kCAEmitterLayerOldestFirst$kCAEmitterLayerOldestLast$kCAEmitterLayerOutline$kCAEmitterLayerPoint$kCAEmitterLayerPoints$kCAEmitterLayerRectangle$kCAEmitterLayerSphere$kCAEmitterLayerSurface$kCAEmitterLayerUnordered$kCAEmitterLayerVolume$kCAFillModeBackwards$kCAFillModeBoth$kCAFillModeForwards$kCAFillModeFrozen$kCAFillModeRemoved$kCAFillRuleEvenOdd$kCAFillRuleNonZero$kCAFilterLinear$kCAFilterNearest$kCAFilterTrilinear$kCAGradientLayerAxial$kCAGradientLayerConic$kCAGradientLayerRadial$kCAGravityBottom$kCAGravityBottomLeft$kCAGravityBottomRight$kCAGravityCenter$kCAGravityLeft$kCAGravityResize$kCAGravityResizeAspect$kCAGravityResizeAspectFill$kCAGravityRight$kCAGravityTop$kCAGravityTopLeft$kCAGravityTopRight$kCALineCapButt$kCALineCapRound$kCALineCapSquare$kCALineJoinBevel$kCALineJoinMiter$kCALineJoinRound$kCAMediaTimingFunctionDefault$kCAMediaTimingFunctionEaseIn$kCAMediaTimingFunctionEaseInEaseOut$kCAMediaTimingFunctionEaseOut$kCAMediaTimingFunctionLinear$kCAOnOrderIn$kCAOnOrderOut$kCARendererColorSpace$kCARendererMetalCommandQueue$kCAScrollBoth$kCAScrollHorizontally$kCAScrollNone$kCAScrollVertically$kCATransactionAnimationDuration$kCATransactionAnimationTimingFunction$kCATransactionCompletionBlock$kCATransactionDisableActions$kCATransition$kCATransitionFade$kCATransitionFromBottom$kCATransitionFromLeft$kCATransitionFromRight$kCATransitionFromTop$kCATransitionMoveIn$kCATransitionPush$kCATransitionReveal$kCATruncationEnd$kCATruncationMiddle$kCATruncationNone$kCATruncationStart$kCAValueFunctionRotateX$kCAValueFunctionRotateY$kCAValueFunctionRotateZ$kCAValueFunctionScale$kCAValueFunctionScaleX$kCAValueFunctionScaleY$kCAValueFunctionScaleZ$kCAValueFunctionTranslate$kCAValueFunctionTranslateX$kCAValueFunctionTranslateY$kCAValueFunctionTranslateZ$kCIActiveKeys$kCIApplyOptionColorSpace$kCIApplyOptionDefinition$kCIApplyOptionExtent$kCIApplyOptionUserInfo$kCIAttributeClass$kCIAttributeDefault$kCIAttributeDescription$kCIAttributeDisplayName$kCIAttributeFilterAvailable_Mac$kCIAttributeFilterAvailable_iOS$kCIAttributeFilterCategories$kCIAttributeFilterDisplayName$kCIAttributeFilterName$kCIAttributeIdentity$kCIAttributeMax$kCIAttributeMin$kCIAttributeName$kCIAttributeReferenceDocumentation$kCIAttributeSliderMax$kCIAttributeSliderMin$kCIAttributeType$kCIAttributeTypeAngle$kCIAttributeTypeBoolean$kCIAttributeTypeColor$kCIAttributeTypeCount$kCIAttributeTypeDistance$kCIAttributeTypeGradient$kCIAttributeTypeImage$kCIAttributeTypeInteger$kCIAttributeTypeOffset$kCIAttributeTypeOpaqueColor$kCIAttributeTypePosition$kCIAttributeTypePosition3$kCIAttributeTypeRectangle$kCIAttributeTypeScalar$kCIAttributeTypeTime$kCIAttributeTypeTransform$kCICategoryBlur$kCICategoryBuiltIn$kCICategoryColorAdjustment$kCICategoryColorEffect$kCICategoryCompositeOperation$kCICategoryDistortionEffect$kCICategoryFilterGenerator$kCICategoryGenerator$kCICategoryGeometryAdjustment$kCICategoryGradient$kCICategoryHalftoneEffect$kCICategoryHighDynamicRange$kCICategoryInterlaced$kCICategoryNonSquarePixels$kCICategoryReduction$kCICategorySharpen$kCICategoryStillImage$kCICategoryStylize$kCICategoryTileEffect$kCICategoryTransition$kCICategoryVideo$kCIContextCacheIntermediates$kCIContextHighQualityDownsample$kCIContextOutputColorSpace$kCIContextOutputPremultiplied$kCIContextPriorityRequestLow$kCIContextUseSoftwareRenderer$kCIContextWorkingColorSpace$kCIContextWorkingFormat$kCIFilterGeneratorExportedKey$kCIFilterGeneratorExportedKeyName$kCIFilterGeneratorExportedKeyTargetObject$kCIFormatA16@i$kCIFormatA8@i$kCIFormatABGR8@i$kCIFormatARGB8@i$kCIFormatAf@i$kCIFormatAh@i$kCIFormatBGRA8@i$kCIFormatL16@i$kCIFormatL8@i$kCIFormatLA16@i$kCIFormatLA8@i$kCIFormatLAf@i$kCIFormatLAh@i$kCIFormatLf@i$kCIFormatLh@i$kCIFormatR16@i$kCIFormatR8@i$kCIFormatRG16@i$kCIFormatRG8@i$kCIFormatRGBA16@i$kCIFormatRGBA8@i$kCIFormatRGBAf@i$kCIFormatRGBAh@i$kCIFormatRGf@i$kCIFormatRGh@i$kCIFormatRf@i$kCIFormatRh@i$kCIImageApplyOrientationProperty$kCIImageAutoAdjustCrop$kCIImageAutoAdjustEnhance$kCIImageAutoAdjustFeatures$kCIImageAutoAdjustLevel$kCIImageAutoAdjustRedEye$kCIImageAuxiliaryDepth$kCIImageAuxiliaryDisparity$kCIImageAuxiliaryPortraitEffectsMatte$kCIImageColorSpace$kCIImageNearestSampling$kCIImageProperties$kCIImageProviderTileSize$kCIImageProviderUserInfo$kCIImageRepresentationAVDepthData$kCIImageRepresentationAVPortraitEffectsMatte$kCIImageRepresentationDepthImage$kCIImageRepresentationDisparityImage$kCIImageRepresentationPortraitEffectsMatteImage$kCIImageTextureFormat$kCIImageTextureTarget$kCIInputAllowDraftModeKey$kCIInputAmountKey$kCIInputAngleKey$kCIInputAspectRatioKey$kCIInputBackgroundImageKey$kCIInputBaselineExposureKey$kCIInputBiasKey$kCIInputBoostKey$kCIInputBoostShadowAmountKey$kCIInputBrightnessKey$kCIInputCenterKey$kCIInputColorKey$kCIInputColorNoiseReductionAmountKey$kCIInputContrastKey$kCIInputDecoderVersionKey$kCIInputDepthImageKey$kCIInputDisableGamutMapKey$kCIInputDisparityImageKey$kCIInputEVKey$kCIInputEnableChromaticNoiseTrackingKey$kCIInputEnableSharpeningKey$kCIInputEnableVendorLensCorrectionKey$kCIInputExtentKey$kCIInputGradientImageKey$kCIInputIgnoreImageOrientationKey$kCIInputImageKey$kCIInputImageOrientationKey$kCIInputIntensityKey$kCIInputLinearSpaceFilter$kCIInputLuminanceNoiseReductionAmountKey$kCIInputMaskImageKey$kCIInputMatteImageKey$kCIInputMoireAmountKey$kCIInputNeutralChromaticityXKey$kCIInputNeutralChromaticityYKey$kCIInputNeutralLocationKey$kCIInputNeutralTemperatureKey$kCIInputNeutralTintKey$kCIInputNoiseReductionAmountKey$kCIInputNoiseReductionContrastAmountKey$kCIInputNoiseReductionDetailAmountKey$kCIInputNoiseReductionSharpnessAmountKey$kCIInputRadiusKey$kCIInputRefractionKey$kCIInputSaturationKey$kCIInputScaleFactorKey$kCIInputScaleKey$kCIInputShadingImageKey$kCIInputSharpnessKey$kCIInputTargetImageKey$kCIInputTimeKey$kCIInputTransformKey$kCIInputVersionKey$kCIInputWeightsKey$kCIInputWidthKey$kCIOutputImageKey$kCIOutputNativeSizeKey$kCISamplerAffineMatrix$kCISamplerColorSpace$kCISamplerFilterLinear$kCISamplerFilterMode$kCISamplerFilterNearest$kCISamplerWrapBlack$kCISamplerWrapClamp$kCISamplerWrapMode$kCISupportedDecoderVersionsKey$kCIUIParameterSet$kCIUISetAdvanced$kCIUISetBasic$kCIUISetDevelopment$kCIUISetIntermediate$''' -constants = constants + '$CATransform3DIdentity@%s$'%(sel32or64('{CATransform3D=ffffffffffffffff}', '{CATransform3D=dddddddddddddddd}'),) -enums = '''$CA_WARN_DEPRECATED@1$CIDataMatrixCodeECCVersion000@0$CIDataMatrixCodeECCVersion050@50$CIDataMatrixCodeECCVersion080@80$CIDataMatrixCodeECCVersion100@100$CIDataMatrixCodeECCVersion140@140$CIDataMatrixCodeECCVersion200@200$CIQRCodeErrorCorrectionLevelH@72$CIQRCodeErrorCorrectionLevelL@76$CIQRCodeErrorCorrectionLevelM@77$CIQRCodeErrorCorrectionLevelQ@81$CIRenderDestinationAlphaNone@0$CIRenderDestinationAlphaPremultiplied@1$CIRenderDestinationAlphaUnpremultiplied@2$kCAConstraintHeight@7$kCAConstraintMaxX@2$kCAConstraintMaxY@6$kCAConstraintMidX@1$kCAConstraintMidY@5$kCAConstraintMinX@0$kCAConstraintMinY@4$kCAConstraintWidth@3$kCALayerBottomEdge@4$kCALayerHeightSizable@16$kCALayerLeftEdge@1$kCALayerMaxXMargin@4$kCALayerMaxXMaxYCorner@8$kCALayerMaxXMinYCorner@2$kCALayerMaxYMargin@32$kCALayerMinXMargin@1$kCALayerMinXMaxYCorner@4$kCALayerMinXMinYCorner@1$kCALayerMinYMargin@8$kCALayerNotSizable@0$kCALayerRightEdge@2$kCALayerTopEdge@8$kCALayerWidthSizable@2$''' -misc.update({}) -functions={'CATransform3DIsAffine': (sel32or64(b'B{CATransform3D=ffffffffffffffff}', b'B{CATransform3D=dddddddddddddddd}'),), 'CATransform3DInvert': (sel32or64(b'{CATransform3D=ffffffffffffffff}{CATransform3D=ffffffffffffffff}', b'{CATransform3D=dddddddddddddddd}{CATransform3D=dddddddddddddddd}'),), 'CATransform3DIsIdentity': (sel32or64(b'B{CATransform3D=ffffffffffffffff}', b'B{CATransform3D=dddddddddddddddd}'),), 'CATransform3DMakeScale': (sel32or64(b'{CATransform3D=ffffffffffffffff}fff', b'{CATransform3D=dddddddddddddddd}ddd'),), 'CATransform3DTranslate': (sel32or64(b'{CATransform3D=ffffffffffffffff}{CATransform3D=ffffffffffffffff}fff', b'{CATransform3D=dddddddddddddddd}{CATransform3D=dddddddddddddddd}ddd'),), 'CATransform3DEqualToTransform': (sel32or64(b'B{CATransform3D=ffffffffffffffff}{CATransform3D=ffffffffffffffff}', b'B{CATransform3D=dddddddddddddddd}{CATransform3D=dddddddddddddddd}'),), 'CATransform3DRotate': (sel32or64(b'{CATransform3D=ffffffffffffffff}{CATransform3D=ffffffffffffffff}ffff', b'{CATransform3D=dddddddddddddddd}{CATransform3D=dddddddddddddddd}dddd'),), 'CACurrentMediaTime': (b'd',), 'CATransform3DMakeRotation': (sel32or64(b'{CATransform3D=ffffffffffffffff}ffff', b'{CATransform3D=dddddddddddddddd}dddd'),), 'CATransform3DConcat': (sel32or64(b'{CATransform3D=ffffffffffffffff}{CATransform3D=ffffffffffffffff}{CATransform3D=ffffffffffffffff}', b'{CATransform3D=dddddddddddddddd}{CATransform3D=dddddddddddddddd}{CATransform3D=dddddddddddddddd}'),), 'CATransform3DScale': (sel32or64(b'{CATransform3D=ffffffffffffffff}{CATransform3D=ffffffffffffffff}fff', b'{CATransform3D=dddddddddddddddd}{CATransform3D=dddddddddddddddd}ddd'),), 'CATransform3DMakeTranslation': (sel32or64(b'{CATransform3D=ffffffffffffffff}fff', b'{CATransform3D=dddddddddddddddd}ddd'),), 'CATransform3DGetAffineTransform': (sel32or64(b'{CGAffineTransform=ffffff}{CATransform3D=ffffffffffffffff}', b'{CGAffineTransform=dddddd}{CATransform3D=dddddddddddddddd}'),), 'CATransform3DMakeAffineTransform': (sel32or64(b'{CATransform3D=ffffffffffffffff}{CGAffineTransform=ffffff}', b'{CATransform3D=dddddddddddddddd}{CGAffineTransform=dddddd}'),)} -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'CAAnimation', b'isRemovedOnCompletion', {'retval': {'type': b'Z'}}) - r(b'CAAnimation', b'setEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'CAAnimation', b'setRemovedOnCompletion:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CAAnimation', b'shouldArchiveValueForKey:', {'retval': {'type': b'Z'}}) - r(b'CAEmitterBehavior', b'isEnabled', {'retval': {'type': b'Z'}}) - r(b'CAEmitterBehavior', b'setEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CAEmitterCell', b'isEnabled', {'retval': {'type': b'Z'}}) - r(b'CAEmitterCell', b'setEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CAEmitterCell', b'shouldArchiveValueForKey:', {'retval': {'type': b'Z'}}) - r(b'CAEmitterLayer', b'preservesDepth', {'retval': {'type': b'Z'}}) - r(b'CAEmitterLayer', b'setPreservesDepth:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CALayer', b'allowsEdgeAntialiasing', {'retval': {'type': 'Z'}}) - r(b'CALayer', b'allowsGroupOpacity', {'retval': {'type': 'Z'}}) - r(b'CALayer', b'containsPoint:', {'retval': {'type': b'Z'}}) - r(b'CALayer', b'contentsAreFlipped', {'retval': {'type': b'Z'}}) - r(b'CALayer', b'drawsAsynchronously', {'retval': {'type': b'Z'}}) - r(b'CALayer', b'isDoubleSided', {'retval': {'type': b'Z'}}) - r(b'CALayer', b'isGeometryFlipped', {'retval': {'type': b'Z'}}) - r(b'CALayer', b'isHidden', {'retval': {'type': b'Z'}}) - r(b'CALayer', b'isOpaque', {'retval': {'type': b'Z'}}) - r(b'CALayer', b'masksToBounds', {'retval': {'type': b'Z'}}) - r(b'CALayer', b'needsDisplay', {'retval': {'type': b'Z'}}) - r(b'CALayer', b'needsDisplayForKey:', {'retval': {'type': b'Z'}}) - r(b'CALayer', b'needsDisplayOnBoundsChange', {'retval': {'type': b'Z'}}) - r(b'CALayer', b'needsLayout', {'retval': {'type': b'Z'}}) - r(b'CALayer', b'setAllowsEdgeAntialiasing:', {'arguments': {2: {'type': 'Z'}}}) - r(b'CALayer', b'setAllowsGroupOpacity:', {'arguments': {2: {'type': 'Z'}}}) - r(b'CALayer', b'setDoubleSided:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CALayer', b'setDrawsAsynchronously:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CALayer', b'setGeometryFlipped:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CALayer', b'setHidden:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CALayer', b'setMasksToBounds:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CALayer', b'setNeedsDisplayOnBoundsChange:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CALayer', b'setOpaque:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CALayer', b'setShouldRasterize:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CALayer', b'setWantsExtendedDynamicRangeContent:', {'arguments': {2: {'type': 'Z'}}}) - r(b'CALayer', b'shouldArchiveValueForKey:', {'retval': {'type': b'Z'}}) - r(b'CALayer', b'shouldRasterize', {'retval': {'type': b'Z'}}) - r(b'CALayer', b'wantsExtendedDynamicRangeContent', {'retval': {'type': 'Z'}}) - r(b'CAMetalLayer', b'allowsNextDrawableTimeout', {'retval': {'type': 'Z'}}) - r(b'CAMetalLayer', b'displaySyncEnabled', {'retval': {'type': 'Z'}}) - r(b'CAMetalLayer', b'framebufferOnly', {'retval': {'type': 'Z'}}) - r(b'CAMetalLayer', b'presentsWithTransaction', {'retval': {'type': 'Z'}}) - r(b'CAMetalLayer', b'setAllowsNextDrawableTimeout:', {'arguments': {2: {'type': 'Z'}}}) - r(b'CAMetalLayer', b'setDisplaySyncEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'CAMetalLayer', b'setFramebufferOnly:', {'arguments': {2: {'type': 'Z'}}}) - r(b'CAMetalLayer', b'setPresentsWithTransaction:', {'arguments': {2: {'type': 'Z'}}}) - r(b'CAMetalLayer', b'setWantsExtendedDynamicRangeContent:', {'arguments': {2: {'type': 'Z'}}}) - r(b'CAMetalLayer', b'wantsExtendedDynamicRangeContent', {'retval': {'type': 'Z'}}) - r(b'CAOpenGLLayer', b'canDrawInCGLContext:pixelFormat:forLayerTime:displayTime:', {'retval': {'type': b'Z'}, 'arguments': {5: {'type': sel32or64(b'^{_CVTimeStamp=IiqQdq{CVSMPTETime=ssLLLssss}QQ}', b'^{_CVTimeStamp=IiqQdq{CVSMPTETime=ssIIIssss}QQ}'), 'type_modifier': b'n'}}}) - r(b'CAOpenGLLayer', b'drawInCGLContext:pixelFormat:forLayerTime:displayTime:', {'arguments': {5: {'type': sel32or64(b'^{_CVTimeStamp=IiqQdq{CVSMPTETime=ssLLLssss}QQ}', b'^{_CVTimeStamp=IiqQdq{CVSMPTETime=ssIIIssss}QQ}'), 'type_modifier': b'n'}}}) - r(b'CAOpenGLLayer', b'isAsynchronous', {'retval': {'type': b'Z'}}) - r(b'CAOpenGLLayer', b'setAsynchronous:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CAOpenGLLayer', b'setWantsExtendedDynamicRangeContent:', {'arguments': {2: {'type': 'Z'}}}) - r(b'CAOpenGLLayer', b'wantsExtendedDynamicRangeContent', {'retval': {'type': 'Z'}}) - r(b'CAPropertyAnimation', b'isAdditive', {'retval': {'type': b'Z'}}) - r(b'CAPropertyAnimation', b'isCumulative', {'retval': {'type': b'Z'}}) - r(b'CAPropertyAnimation', b'setAdditive:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CAPropertyAnimation', b'setCumulative:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CARenderer', b'beginFrameAtTime:timeStamp:', {'arguments': {3: {'type': sel32or64(b'^{_CVTimeStamp=IiqQdq{CVSMPTETime=ssLLLssss}QQ}', b'^{_CVTimeStamp=IiqQdq{CVSMPTETime=ssIIIssss}QQ}'), 'type_modifier': b'n'}}}) - r(b'CARenderer', b'rendererWithCGLContext:options:', {'arguments': {2: {'type': '^{_CGLContextObject=}'}}}) - r(b'CAReplicatorLayer', b'preservesDepth', {'retval': {'type': b'Z'}}) - r(b'CAReplicatorLayer', b'setPreservesDepth:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CATextLayer', b'allowsFontSubpixelQuantization', {'retval': {'type': 'Z'}}) - r(b'CATextLayer', b'font', {'retval': {'type': b'@'}}) - r(b'CATextLayer', b'isWrapped', {'retval': {'type': b'Z'}}) - r(b'CATextLayer', b'setAllowsFontSubpixelQuantization:', {'arguments': {2: {'type': 'Z'}}}) - r(b'CATextLayer', b'setFont:', {'arguments': {2: {'type': b'@'}}}) - r(b'CATextLayer', b'setWrapped:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CATransaction', b'completionBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}) - r(b'CATransaction', b'disableActions', {'retval': {'type': b'Z'}}) - r(b'CATransaction', b'setCompletionBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'CATransaction', b'setDisableActions:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CIAztecCodeDescriptor', b'isCompact', {'retval': {'type': 'Z'}}) - r(b'CIColor', b'components', {'retval': {'c_array_of_variable_length': True}}) - r(b'CIContext', b'createCGImage:fromRect:format:colorSpace:deferred:', {'retval': {'already_cfretained': True}, 'arguments': {6: {'type': 'Z'}}}) - r(b'CIContext', b'createCGLayerWithSize:info:', {'retval': {'already_cfretained': True}}) - r(b'CIContext', b'prepareRender:fromRect:toDestination:atPoint:error:', {'retval': {'type': 'Z'}, 'arguments': {6: {'type_modifier': b'o'}}}) - r(b'CIContext', b'render:toBitmap:rowBytes:bounds:format:colorSpace:', {'arguments': {3: {'type_modifier': b'o', 'c_array_of_variable_length': True}}}) - r(b'CIContext', b'startTaskToClear:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'CIContext', b'startTaskToRender:fromRect:toDestination:atPoint:error:', {'arguments': {6: {'type_modifier': b'o'}}}) - r(b'CIContext', b'startTaskToRender:toDestination:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'CIContext', b'writeHEIFRepresentationOfImage:toURL:format:colorSpace:options:error:', {'retval': {'type': 'Z'}, 'arguments': {7: {'type_modifier': b'o'}}}) - r(b'CIContext', b'writeJPEGRepresentationOfImage:toURL:colorSpace:options:error:', {'retval': {'type': 'Z'}, 'arguments': {6: {'type_modifier': b'o'}}}) - r(b'CIContext', b'writePNGRepresentationOfImage:toURL:format:colorSpace:options:error:', {'retval': {'type': 'Z'}, 'arguments': {7: {'type_modifier': b'o'}}}) - r(b'CIFaceFeature', b'hasFaceAngle', {'retval': {'type': b'Z'}}) - r(b'CIFaceFeature', b'hasLeftEyePosition', {'retval': {'type': b'Z'}}) - r(b'CIFaceFeature', b'hasMouthPosition', {'retval': {'type': b'Z'}}) - r(b'CIFaceFeature', b'hasRightEyePosition', {'retval': {'type': b'Z'}}) - r(b'CIFaceFeature', b'hasSmile', {'retval': {'type': b'Z'}}) - r(b'CIFaceFeature', b'hasTrackingFrameCount', {'retval': {'type': b'Z'}}) - r(b'CIFaceFeature', b'hasTrackingID', {'retval': {'type': b'Z'}}) - r(b'CIFaceFeature', b'leftEyeClosed', {'retval': {'type': b'Z'}}) - r(b'CIFaceFeature', b'rightEyeClosed', {'retval': {'type': b'Z'}}) - r(b'CIFilter', b'apply:', {'c_array_delimited_by_null': True, 'variadic': True}) - r(b'CIFilter', b'filterArrayFromSerializedXMP:inputImageExtent:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'CIFilter', b'filterWithName:keysAndValues:', {'c_array_delimited_by_null': True, 'variadic': True}) - r(b'CIFilter', b'isEnabled', {'retval': {'type': b'Z'}}) - r(b'CIFilter', b'setEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CIFilterGenerator', b'writeToURL:atomically:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type': b'Z'}}}) - r(b'CIFilterShape', b'transformBy:interior:', {'arguments': {3: {'type': b'Z'}}}) - r(b'CIImage', b'imageWithTexture:size:flipped:colorSpace:', {'arguments': {4: {'type': b'Z'}}}) - r(b'CIImage', b'imageWithTexture:size:flipped:options:', {'arguments': {4: {'type': b'Z'}}}) - r(b'CIImage', b'initWithTexture:size:flipped:colorSpace:', {'arguments': {4: {'type': b'Z'}}}) - r(b'CIImage', b'initWithTexture:size:flipped:options:', {'arguments': {4: {'type': b'Z'}}}) - r(b'CIImage', b'writeHEIFRepresentationOfImage:toURL:format:colorSpace:options:error:', {'retval': {'type': 'Z'}, 'arguments': {7: {'type_modifier': b'o'}}}) - r(b'CIImage', b'writeJPEGRepresentationOfImage:toURL:colorSpace:options:error:', {'retval': {'type': 'Z'}, 'arguments': {6: {'type_modifier': b'o'}}}) - r(b'CIImage', b'writePNGRepresentationOfImage:toURL:format:colorSpace:options:error:', {'retval': {'type': 'Z'}, 'arguments': {7: {'type_modifier': b'o'}}}) - r(b'CIImageProcessorKernel', b'applyWithExtent:inputs:arguments:error:', {'arguments': {5: {'type_modifier': b'o'}}}) - r(b'CIImageProcessorKernel', b'processWithInputs:arguments:output:error:', {'retval': {'type': 'Z'}, 'arguments': {5: {'type_modifier': b'o'}}}) - r(b'CIKernel', b'applyWithExtent:roiCallback:arguments:', {'arguments': {3: {'callable': {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'i'}, 2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}}}}) - r(b'CIKernel', b'kernelWithFunctionName:fromMetalLibraryData:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'CIKernel', b'kernelWithFunctionName:fromMetalLibraryData:outputPixelFormat:error:', {'arguments': {5: {'type_modifier': b'o'}}}) - r(b'CIKernel', b'setROISelector:', {'arguments': {2: {'sel_of_type': sel32or64(b'{CGRect={CGPoint=ff}{CGSize=ff}}@:i{CGRect={CGPoint=ff}{CGSize=ff}}@', b'{CGRect={CGPoint=dd}{CGSize=dd}}@:i{CGRect={CGPoint=dd}{CGSize=dd}}@')}}}) - r(b'CIPDF417CodeDescriptor', b'isCompact', {'retval': {'type': 'Z'}}) - r(b'CIPlugIn', b'loadPlugIn:allowExecutableCode:', {'arguments': {3: {'type': b'Z'}}}) - r(b'CIPlugIn', b'loadPlugIn:allowNonExecutable:', {'arguments': {3: {'type': b'Z'}}}) - r(b'CIRenderDestination', b'blendsInDestinationColorSpace', {'retval': {'type': 'Z'}}) - r(b'CIRenderDestination', b'initWithWidth:height:pixelFormat:commandBuffer:mtlTextureProvider:', {'arguments': {6: {'callable': {'retval': {'type': b'@'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'CIRenderDestination', b'isClamped', {'retval': {'type': 'Z'}}) - r(b'CIRenderDestination', b'isDithered', {'retval': {'type': 'Z'}}) - r(b'CIRenderDestination', b'isFlipped', {'retval': {'type': 'Z'}}) - r(b'CIRenderDestination', b'setBlendsInDestinationColorSpace:', {'arguments': {2: {'type': 'Z'}}}) - r(b'CIRenderDestination', b'setClamped:', {'arguments': {2: {'type': 'Z'}}}) - r(b'CIRenderDestination', b'setDithered:', {'arguments': {2: {'type': 'Z'}}}) - r(b'CIRenderDestination', b'setFlipped:', {'arguments': {2: {'type': 'Z'}}}) - r(b'CIRenderTask', b'waitUntilCompletedAndReturnError:', {'arguments': {2: {'type_modifier': b'o'}}}) - r(b'CISampler', b'initWithImage:keysAndValues:', {'c_array_delimited_by_null': True, 'variadic': True}) - r(b'CISampler', b'samplerWithImage:keysAndValues:', {'c_array_delimited_by_null': True, 'variadic': True}) - r(b'CIVector', b'initWithValues:count:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'CIVector', b'vectorWithValues:count:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'CIWarpKernel', b'applyWithExtent:roiCallback:inputImage:arguments:', {'arguments': {3: {'callable': {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'i'}, 2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}}}}) - r(b'NSObject', b'actionForLayer:forKey:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'animationDidStart:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'animationDidStop:finished:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'Z'}}}) - r(b'NSObject', b'autoreverses', {'required': True, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'baseAddress', {'retval': {'type': '^v', 'c_array_of_variable_length': True}}) - r(b'NSObject', b'beginTime', {'required': True, 'retval': {'type': b'd'}}) - r(b'NSObject', b'bytesPerRow', {'retval': {'type': 'L'}}) - r(b'NSObject', b'displayLayer:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'drawLayer:inContext:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'^{CGContext=}'}}}) - r(b'NSObject', b'duration', {'required': True, 'retval': {'type': b'd'}}) - r(b'NSObject', b'fillMode', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'filterWithName:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'format', {'retval': {'type': sel32or64(b'i', b'q')}}) - r(b'NSObject', b'invalidateLayoutOfLayer:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'layoutSublayersOfLayer:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'load:', {'required': True, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'^v'}}}) - r(b'NSObject', b'preferredSizeOfLayer:', {'retval': {'type': sel32or64(b'{CGSize=ff}', b'{CGSize=dd}')}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'provideImageData:bytesPerRow:origin::size::userInfo:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'^v', 'type_modifier': b'o', 'c_array_of_variable_length': True}, 3: {'type': sel32or64(b'L', b'Q')}, 4: {'type': sel32or64(b'L', b'Q')}, 5: {'type': sel32or64(b'L', b'Q')}, 6: {'type': sel32or64(b'L', b'Q')}, 7: {'type': sel32or64(b'L', b'Q')}, 8: {'type': b'@'}}}) - r(b'NSObject', b'region', {'retval': {'type': sel32or64(b'{CGRect={CGPoint=ff}{CGSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}) - r(b'NSObject', b'repeatCount', {'required': True, 'retval': {'type': b'f'}}) - r(b'NSObject', b'repeatDuration', {'required': True, 'retval': {'type': b'd'}}) - r(b'NSObject', b'runActionForKey:object:arguments:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'setAutoreverses:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'Z'}}}) - r(b'NSObject', b'setBeginTime:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'd'}}}) - r(b'NSObject', b'setDuration:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'd'}}}) - r(b'NSObject', b'setFillMode:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setRepeatCount:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'f'}}}) - r(b'NSObject', b'setRepeatDuration:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'd'}}}) - r(b'NSObject', b'setSpeed:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'f'}}}) - r(b'NSObject', b'setTimeOffset:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'd'}}}) - r(b'NSObject', b'speed', {'required': True, 'retval': {'type': b'f'}}) - r(b'NSObject', b'timeOffset', {'required': True, 'retval': {'type': b'd'}}) -finally: - objc._updatingMetadata(False) -protocols={'CAAnimationDelegate': objc.informal_protocol('CAAnimationDelegate', [objc.selector(None, b'animationDidStart:', b'v@:@', isRequired=False), objc.selector(None, b'animationDidStop:finished:', b'v@:@Z', isRequired=False)]), 'CALayerDelegate': objc.informal_protocol('CALayerDelegate', [objc.selector(None, b'drawLayer:inContext:', b'v@:@^{CGContext=}', isRequired=False), objc.selector(None, b'actionForLayer:forKey:', b'@@:@@', isRequired=False), objc.selector(None, b'displayLayer:', b'v@:@', isRequired=False), objc.selector(None, b'layoutSublayersOfLayer:', b'v@:@', isRequired=False)]), 'CIImageProvider': objc.informal_protocol('CIImageProvider', [objc.selector(None, b'provideImageData:bytesPerRow:origin::size::userInfo:', sel32or64(b'v@:^vLLLLL@', b'v@:^vQQQQQ@'), isRequired=False)]), 'CALayoutManager': objc.informal_protocol('CALayoutManager', [objc.selector(None, b'preferredSizeOfLayer:', sel32or64(b'{CGSize=ff}@:@', b'{CGSize=dd}@:@'), isRequired=False), objc.selector(None, b'layoutSublayersOfLayer:', b'v@:@', isRequired=False), objc.selector(None, b'invalidateLayoutOfLayer:', b'v@:@', isRequired=False)])} -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/Quartz/QuartzCore/_metadata.pyc b/env/lib/python2.7/site-packages/Quartz/QuartzCore/_metadata.pyc deleted file mode 100644 index 378a50b0..00000000 Binary files a/env/lib/python2.7/site-packages/Quartz/QuartzCore/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Quartz/QuartzCore/_quartzcore.so b/env/lib/python2.7/site-packages/Quartz/QuartzCore/_quartzcore.so deleted file mode 100755 index 9a293b7d..00000000 Binary files a/env/lib/python2.7/site-packages/Quartz/QuartzCore/_quartzcore.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Quartz/QuartzFilters/__init__.py b/env/lib/python2.7/site-packages/Quartz/QuartzFilters/__init__.py deleted file mode 100644 index 4c6f3ecf..00000000 --- a/env/lib/python2.7/site-packages/Quartz/QuartzFilters/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -''' -Python mapping for the QuartzCore framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import sys -import objc -import Foundation - -from Quartz.QuartzFilters import _metadata - -sys.modules['Quartz.QuartzFilters'] = mod = objc.ObjCLazyModule('Quartz.QuartzFilters', - "com.apple.quartzfilters", - objc.pathForFramework("/System/Library/Frameworks/Quartz.framework/Frameworks/QuartzFilters.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'objc': objc, - }, ( Foundation,)) - -import sys -del sys.modules['Quartz.QuartzFilters._metadata'] diff --git a/env/lib/python2.7/site-packages/Quartz/QuartzFilters/__init__.pyc b/env/lib/python2.7/site-packages/Quartz/QuartzFilters/__init__.pyc deleted file mode 100644 index c0751cf2..00000000 Binary files a/env/lib/python2.7/site-packages/Quartz/QuartzFilters/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Quartz/QuartzFilters/_metadata.py b/env/lib/python2.7/site-packages/Quartz/QuartzFilters/_metadata.py deleted file mode 100644 index e2ec8068..00000000 --- a/env/lib/python2.7/site-packages/Quartz/QuartzFilters/_metadata.py +++ /dev/null @@ -1,35 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Tue Jun 26 07:59:02 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$globalUpdateOK@Z$kQuartzFilterApplicationDomain$kQuartzFilterManagerDidAddFilterNotification$kQuartzFilterManagerDidModifyFilterNotification$kQuartzFilterManagerDidRemoveFilterNotification$kQuartzFilterManagerDidSelectFilterNotification$kQuartzFilterPDFWorkflowDomain$kQuartzFilterPrintingDomain$''' -enums = '''$$''' -misc.update({}) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'NSObject', b'quartzFilterManager:didAddFilter:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'quartzFilterManager:didModifyFilter:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'quartzFilterManager:didRemoveFilter:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'quartzFilterManager:didSelectFilter:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'QuartzFilter', b'applyToContext:', {'retval': {'type': b'Z'}}) - r(b'QuartzFilterManager', b'selectFilter:', {'retval': {'type': b'Z'}}) -finally: - objc._updatingMetadata(False) -protocols={'QuartzFilterManagerDelegate': objc.informal_protocol('QuartzFilterManagerDelegate', [objc.selector(None, b'quartzFilterManager:didSelectFilter:', b'v@:@@', isRequired=False), objc.selector(None, b'quartzFilterManager:didAddFilter:', b'v@:@@', isRequired=False), objc.selector(None, b'quartzFilterManager:didModifyFilter:', b'v@:@@', isRequired=False), objc.selector(None, b'quartzFilterManager:didRemoveFilter:', b'v@:@@', isRequired=False)])} -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/Quartz/QuartzFilters/_metadata.pyc b/env/lib/python2.7/site-packages/Quartz/QuartzFilters/_metadata.pyc deleted file mode 100644 index 93512cb8..00000000 Binary files a/env/lib/python2.7/site-packages/Quartz/QuartzFilters/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Quartz/QuickLookUI/_QuickLookUI.so b/env/lib/python2.7/site-packages/Quartz/QuickLookUI/_QuickLookUI.so deleted file mode 100755 index 205ddbc7..00000000 Binary files a/env/lib/python2.7/site-packages/Quartz/QuickLookUI/_QuickLookUI.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Quartz/QuickLookUI/__init__.py b/env/lib/python2.7/site-packages/Quartz/QuickLookUI/__init__.py deleted file mode 100644 index 6804dde8..00000000 --- a/env/lib/python2.7/site-packages/Quartz/QuickLookUI/__init__.py +++ /dev/null @@ -1,25 +0,0 @@ -''' -Python mapping for the QuickLookUI framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import sys -import objc -import Cocoa - -from Quartz.QuickLookUI import _metadata -import Quartz.QuickLookUI._QuickLookUI - -sys.modules['Quartz.QuickLookUI'] = mod = objc.ObjCLazyModule('Quartz.QuickLookUI', - "com.apple.QuickLookUIFramework", - objc.pathForFramework("/System/Library/Frameworks/Quartz.framework/Frameworks/QuickLookUI.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'objc': objc, - }, ( Cocoa,)) - -import sys -del sys.modules['Quartz.QuickLookUI._metadata'] diff --git a/env/lib/python2.7/site-packages/Quartz/QuickLookUI/__init__.pyc b/env/lib/python2.7/site-packages/Quartz/QuickLookUI/__init__.pyc deleted file mode 100644 index d11a0662..00000000 Binary files a/env/lib/python2.7/site-packages/Quartz/QuickLookUI/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Quartz/QuickLookUI/_metadata.py b/env/lib/python2.7/site-packages/Quartz/QuickLookUI/_metadata.py deleted file mode 100644 index b4ac34bd..00000000 --- a/env/lib/python2.7/site-packages/Quartz/QuickLookUI/_metadata.py +++ /dev/null @@ -1,51 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Tue Jun 26 07:59:02 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$$''' -enums = '''$QLPreviewViewStyleCompact@1$QLPreviewViewStyleNormal@0$''' -misc.update({}) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'NSObject', b'acceptsPreviewPanelControl:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'beginPreviewPanelControl:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'endPreviewPanelControl:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'numberOfPreviewItemsInPreviewPanel:', {'required': True, 'retval': {'type': sel32or64(b'i', b'q')}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'preparePreviewOfSearchableItemWithIdentifier:queryString:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'previewItemDisplayState', {'required': False, 'retval': {'type': b'@'}}) - r(b'NSObject', b'previewItemTitle', {'required': False, 'retval': {'type': b'@'}}) - r(b'NSObject', b'previewItemURL', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'previewPanel:handleEvent:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'previewPanel:previewItemAtIndex:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'previewPanel:sourceFrameOnScreenForPreviewItem:', {'required': False, 'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'previewPanel:transitionImageForPreviewItem:contentRect:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'^{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'^{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSObject', b'setPreviewItemDisplayState:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setPreviewItemTitle:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setPreviewItemURL:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'QLPreviewPanel', b'enterFullScreenMode:withOptions:', {'retval': {'type': b'Z'}}) - r(b'QLPreviewPanel', b'isInFullScreenMode', {'retval': {'type': b'Z'}}) - r(b'QLPreviewPanel', b'sharedPreviewPanelExists', {'retval': {'type': b'Z'}}) - r(b'QLPreviewView', b'autostarts', {'retval': {'type': b'Z'}}) - r(b'QLPreviewView', b'setAutostarts:', {'arguments': {2: {'type': b'Z'}}}) - r(b'QLPreviewView', b'setShouldCloseWithWindow:', {'arguments': {2: {'type': b'Z'}}}) - r(b'QLPreviewView', b'shouldCloseWithWindow', {'retval': {'type': b'Z'}}) -finally: - objc._updatingMetadata(False) -protocols={'QLPreviewPanelController': objc.informal_protocol('QLPreviewPanelController', [objc.selector(None, b'beginPreviewPanelControl:', b'v@:@', isRequired=False), objc.selector(None, b'acceptsPreviewPanelControl:', b'Z@:@', isRequired=False), objc.selector(None, b'endPreviewPanelControl:', b'v@:@', isRequired=False)])} -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/Quartz/QuickLookUI/_metadata.pyc b/env/lib/python2.7/site-packages/Quartz/QuickLookUI/_metadata.pyc deleted file mode 100644 index fd7c366e..00000000 Binary files a/env/lib/python2.7/site-packages/Quartz/QuickLookUI/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Quartz/__init__.py b/env/lib/python2.7/site-packages/Quartz/__init__.py deleted file mode 100644 index 1c389742..00000000 --- a/env/lib/python2.7/site-packages/Quartz/__init__.py +++ /dev/null @@ -1,83 +0,0 @@ -""" -Helper module that makes it easier to import all of Quartz -""" -import sys -import objc -import Foundation -import AppKit - -def _load(): - submods = [] - sys.modules['Quartz'] = mod = objc.ObjCLazyModule('Quartz', - None, None, {}, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, submods) - - - try: - from Quartz import CoreGraphics as m - submods.append(m) - mod.CoreGraphics = m - except ImportError: - pass - - try: - from Quartz import ImageIO as m - submods.append(m) - mod.ImageIO = m - except ImportError: - pass - - try: - from Quartz import ImageKit as m - submods.append(m) - mod.ImageIO = m - except ImportError: - pass - - try: - from Quartz import CoreVideo as m - submods.append(m) - mod.CoreVideo = m - except ImportError: - pass - - try: - from Quartz import QuartzCore as m - submods.append(m) - mod.QuartCore = m - except ImportError: - pass - - try: - from Quartz import ImageIO as m - submods.append(m) - mod.ImageIO = m - except ImportError: - pass - - try: - from Quartz import PDFKit as m - submods.append(m) - mod.PDFKit = m - except ImportError: - pass - - try: - from Quartz import QuartzFilters as m - submods.append(m) - mod.QuartzFilters = m - except ImportError: - pass - - try: - from Quartz import QuickLookUI as m - submods.append(m) - mod.QuickLookUI = m - except ImportError: - pass - -_load() diff --git a/env/lib/python2.7/site-packages/Quartz/__init__.pyc b/env/lib/python2.7/site-packages/Quartz/__init__.pyc deleted file mode 100644 index 98cffbd7..00000000 Binary files a/env/lib/python2.7/site-packages/Quartz/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/SafariServices/_SafariServices.so b/env/lib/python2.7/site-packages/SafariServices/_SafariServices.so deleted file mode 100755 index fb975f54..00000000 Binary files a/env/lib/python2.7/site-packages/SafariServices/_SafariServices.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/SafariServices/__init__.py b/env/lib/python2.7/site-packages/SafariServices/__init__.py deleted file mode 100644 index 0b011dea..00000000 --- a/env/lib/python2.7/site-packages/SafariServices/__init__.py +++ /dev/null @@ -1,28 +0,0 @@ -''' -Python mapping for the SafariServices framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Foundation - -from SafariServices import _metadata -from SafariServices._SafariServices import * - - -sys.modules['SafariServices'] = mod = objc.ObjCLazyModule( - "SafariServices", - "com.apple.SafariServices", - objc.pathForFramework("/System/Library/Frameworks/SafariServices.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Foundation,)) - -import sys -del sys.modules['SafariServices._metadata'] diff --git a/env/lib/python2.7/site-packages/SafariServices/__init__.pyc b/env/lib/python2.7/site-packages/SafariServices/__init__.pyc deleted file mode 100644 index 0f81208e..00000000 Binary files a/env/lib/python2.7/site-packages/SafariServices/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/SafariServices/_metadata.py b/env/lib/python2.7/site-packages/SafariServices/_metadata.py deleted file mode 100644 index 26fdc3c5..00000000 --- a/env/lib/python2.7/site-packages/SafariServices/_metadata.py +++ /dev/null @@ -1,66 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Mon Feb 18 22:50:41 2019 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$SFErrorDomain$''' -enums = '''$SFErrorLoadingInterrupted@3$SFErrorNoAttachmentFound@2$SFErrorNoExtensionFound@1$SFSafariServicesVersion10_0@0$SFSafariServicesVersion10_1@1$SFSafariServicesVersion11_0@2$SFSafariServicesVersion12_0@3$SFSafariServicesVersion12_1@4$''' -misc.update({}) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'NSObject', b'additionalRequestHeadersForURL:completionHandler:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'contextMenuItemSelectedWithCommand:inPage:userInfo:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'messageReceivedFromContainingAppWithName:userInfo:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'messageReceivedWithName:fromPage:userInfo:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'popoverDidCloseInWindow:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'popoverViewController', {'required': False, 'retval': {'type': b'@'}}) - r(b'NSObject', b'popoverWillShowInWindow:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'toolbarItemClickedInWindow:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'validateContextMenuItemWithCommand:inPage:userInfo:validationHandler:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'validateToolbarItemInWindow:validationHandler:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'SFContentBlockerManager', b'getStateOfContentBlockerWithIdentifier:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'SFContentBlockerManager', b'reloadContentBlockerWithIdentifier:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'SFContentBlockerState', b'isEnabled', {'retval': {'type': 'Z'}}) - r(b'SFContentBlockerState', b'setEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SFSafariApplication', b'dispatchMessageWithName:toExtensionWithIdentifier:userInfo:completionHandler:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'SFSafariApplication', b'getActiveWindowWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'SFSafariApplication', b'getAllWindowsWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'SFSafariApplication', b'getHostApplicationWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'SFSafariApplication', b'openWindowWithURL:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'SFSafariApplication', b'showPreferencesForExtensionWithIdentifier:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'SFSafariExtension', b'getBaseURIWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'SFSafariExtensionManager', b'getStateOfSafariExtensionWithIdentifier:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'SFSafariExtensionState', b'isEnabled', {'retval': {'type': 'Z'}}) - r(b'SFSafariPage', b'getContainingTabWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'SFSafariPage', b'getPagePropertiesWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'SFSafariPage', b'getScreenshotOfVisibleAreaWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'SFSafariPageProperties', b'isActive', {'retval': {'type': 'Z'}}) - r(b'SFSafariPageProperties', b'usesPrivateBrowsing', {'retval': {'type': 'Z'}}) - r(b'SFSafariTab', b'activateWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'SFSafariTab', b'getActivePageWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'SFSafariTab', b'getContainingWindowWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'SFSafariTab', b'getPagesWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'SFSafariToolbarItem', b'setEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SFSafariToolbarItem', b'setEnabled:withBadgeText:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SFSafariWindow', b'getActiveTabWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'SFSafariWindow', b'getAllTabsWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'SFSafariWindow', b'getToolbarItemWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'SFSafariWindow', b'openTabWithURL:makeActiveIfPossible:completionHandler:', {'arguments': {3: {'type': 'Z'}, 4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/SafariServices/_metadata.pyc b/env/lib/python2.7/site-packages/SafariServices/_metadata.pyc deleted file mode 100644 index deae9386..00000000 Binary files a/env/lib/python2.7/site-packages/SafariServices/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/SceneKit/_SceneKit.so b/env/lib/python2.7/site-packages/SceneKit/_SceneKit.so deleted file mode 100755 index 720e6cf1..00000000 Binary files a/env/lib/python2.7/site-packages/SceneKit/_SceneKit.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/SceneKit/__init__.py b/env/lib/python2.7/site-packages/SceneKit/__init__.py deleted file mode 100644 index 8ac9c744..00000000 --- a/env/lib/python2.7/site-packages/SceneKit/__init__.py +++ /dev/null @@ -1,47 +0,0 @@ -''' -Python mapping for the SceneKit framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Quartz -import Cocoa - -from SceneKit import _metadata -from SceneKit import _SceneKit -from SceneKit._inlines import _inline_list_ - - -sys.modules['SceneKit'] = mod = objc.ObjCLazyModule( - "SceneKit", - "com.apple.SceneKit", - objc.pathForFramework("/System/Library/Frameworks/SceneKit.framework"), - _metadata.__dict__, _inline_list_, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Cocoa, Quartz)) - - -import sys -del sys.modules['SceneKit._metadata'] - -if not hasattr(mod, 'SCNMatrix4Identity'): - # Two "inline" functions that use a symbol that is available on 10.10 or later, - # avoid crashes by removing the inline function wrappers when that symbol - # is not available. - try: - mod.SCNMatrix4MakeTranslation - del mod.SCNMatrix4MakeTranslation - except AttributeError: - pass - - try: - mod.SCNMatrix4MakeScale - del mod.SCNMatrix4MakeScale - except AttributeError: - pass diff --git a/env/lib/python2.7/site-packages/SceneKit/__init__.pyc b/env/lib/python2.7/site-packages/SceneKit/__init__.pyc deleted file mode 100644 index 7c26711a..00000000 Binary files a/env/lib/python2.7/site-packages/SceneKit/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/SceneKit/_inlines.so b/env/lib/python2.7/site-packages/SceneKit/_inlines.so deleted file mode 100755 index 2ef46f17..00000000 Binary files a/env/lib/python2.7/site-packages/SceneKit/_inlines.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/SceneKit/_metadata.py b/env/lib/python2.7/site-packages/SceneKit/_metadata.py deleted file mode 100644 index 31690879..00000000 --- a/env/lib/python2.7/site-packages/SceneKit/_metadata.py +++ /dev/null @@ -1,302 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Thu Jul 5 22:37:23 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -misc.update({'SCNVector4': objc.createStructType('SCNVector4', sel32or64(b'{SCNVector4=ffff}', b'{SCNVector4=dddd}'), ['x', 'y', 'z', 'w']), 'SCNVector3': objc.createStructType('SCNVector3', sel32or64(b'{SCNVector3=fff}', b'{SCNVector3=ddd}'), ['x', 'y', 'z'])}) -constants = '''$SCNConsistencyElementIDErrorKey$SCNConsistencyElementTypeErrorKey$SCNConsistencyLineNumberErrorKey$SCNDetailedErrorsKey$SCNErrorDomain$SCNGeometrySourceSemanticBoneIndices$SCNGeometrySourceSemanticBoneWeights$SCNGeometrySourceSemanticColor$SCNGeometrySourceSemanticEdgeCrease$SCNGeometrySourceSemanticNormal$SCNGeometrySourceSemanticTangent$SCNGeometrySourceSemanticTexcoord$SCNGeometrySourceSemanticVertex$SCNGeometrySourceSemanticVertexCrease$SCNHitTestBackFaceCullingKey$SCNHitTestBoundingBoxOnlyKey$SCNHitTestClipToZRangeKey$SCNHitTestFirstFoundOnlyKey$SCNHitTestIgnoreChildNodesKey$SCNHitTestIgnoreHiddenNodesKey$SCNHitTestOptionCategoryBitMask$SCNHitTestOptionSearchMode$SCNHitTestRootNodeKey$SCNHitTestSortResultsKey$SCNLightAttenuationEndKey$SCNLightAttenuationFalloffExponentKey$SCNLightAttenuationStartKey$SCNLightShadowFarClippingKey$SCNLightShadowNearClippingKey$SCNLightSpotInnerAngleKey$SCNLightSpotOuterAngleKey$SCNLightTypeAmbient$SCNLightTypeDirectional$SCNLightTypeIES$SCNLightTypeOmni$SCNLightTypeProbe$SCNLightTypeSpot$SCNLightingModelBlinn$SCNLightingModelConstant$SCNLightingModelLambert$SCNLightingModelPhong$SCNLightingModelPhysicallyBased$SCNModelTransform$SCNModelViewProjectionTransform$SCNModelViewTransform$SCNNormalTransform$SCNParticlePropertyAngle$SCNParticlePropertyAngularVelocity$SCNParticlePropertyBounce$SCNParticlePropertyCharge$SCNParticlePropertyColor$SCNParticlePropertyContactNormal$SCNParticlePropertyContactPoint$SCNParticlePropertyFrame$SCNParticlePropertyFrameRate$SCNParticlePropertyFriction$SCNParticlePropertyLife$SCNParticlePropertyOpacity$SCNParticlePropertyPosition$SCNParticlePropertyRotationAxis$SCNParticlePropertySize$SCNParticlePropertyVelocity$SCNPhysicsShapeKeepAsCompoundKey$SCNPhysicsShapeOptionCollisionMargin$SCNPhysicsShapeScaleKey$SCNPhysicsShapeTypeBoundingBox$SCNPhysicsShapeTypeConcavePolyhedron$SCNPhysicsShapeTypeConvexHull$SCNPhysicsShapeTypeKey$SCNPhysicsTestBackfaceCullingKey$SCNPhysicsTestCollisionBitMaskKey$SCNPhysicsTestSearchModeAll$SCNPhysicsTestSearchModeAny$SCNPhysicsTestSearchModeClosest$SCNPhysicsTestSearchModeKey$SCNPreferLowPowerDeviceKey$SCNPreferredDeviceKey$SCNPreferredRenderingAPIKey$SCNProgramMappingChannelKey$SCNProjectionTransform$SCNSceneEndTimeAttributeKey$SCNSceneExportDestinationURL$SCNSceneFrameRateAttributeKey$SCNSceneSourceAnimationImportPolicyDoNotPlay$SCNSceneSourceAnimationImportPolicyKey$SCNSceneSourceAnimationImportPolicyPlay$SCNSceneSourceAnimationImportPolicyPlayRepeatedly$SCNSceneSourceAnimationImportPolicyPlayUsingSceneTimeBase$SCNSceneSourceAssetAuthorKey$SCNSceneSourceAssetAuthoringToolKey$SCNSceneSourceAssetContributorsKey$SCNSceneSourceAssetCreatedDateKey$SCNSceneSourceAssetDirectoryURLsKey$SCNSceneSourceAssetModifiedDateKey$SCNSceneSourceAssetUnitKey$SCNSceneSourceAssetUnitMeterKey$SCNSceneSourceAssetUnitNameKey$SCNSceneSourceAssetUpAxisKey$SCNSceneSourceCheckConsistencyKey$SCNSceneSourceConvertToYUpKey$SCNSceneSourceConvertUnitsToMetersKey$SCNSceneSourceCreateNormalsIfAbsentKey$SCNSceneSourceFlattenSceneKey$SCNSceneSourceLoadingOptionPreserveOriginalTopology$SCNSceneSourceOverrideAssetURLsKey$SCNSceneSourceStrictConformanceKey$SCNSceneSourceUseSafeModeKey$SCNSceneStartTimeAttributeKey$SCNSceneUpAxisAttributeKey$SCNShaderModifierEntryPointFragment$SCNShaderModifierEntryPointGeometry$SCNShaderModifierEntryPointLightingModel$SCNShaderModifierEntryPointSurface$SCNViewTransform$''' -constants = constants + '$SCNMatrix4Identity@%s$'%(sel32or64('{CATransform3D=ffffffffffffffff}', '{CATransform3D=dddddddddddddddd}'),) -constants = constants + '$SCNVector4Zero@%s$'%(sel32or64('{SCNVector4=ffff}', '{SCNVector4=dddd}'),) -constants = constants + '$SCNVector3Zero@%s$'%(sel32or64('{SCNVector3=fff}', '{SCNVector3=ddd}'),) -enums = '''$SCNActionTimingModeEaseIn@1$SCNActionTimingModeEaseInEaseOut@3$SCNActionTimingModeEaseOut@2$SCNActionTimingModeLinear@0$SCNAntialiasingModeMultisampling16X@4$SCNAntialiasingModeMultisampling2X@1$SCNAntialiasingModeMultisampling4X@2$SCNAntialiasingModeMultisampling8X@3$SCNAntialiasingModeNone@0$SCNBillboardAxisAll@7$SCNBillboardAxisX@1$SCNBillboardAxisY@2$SCNBillboardAxisZ@4$SCNBlendModeAdd@1$SCNBlendModeAlpha@0$SCNBlendModeMax@6$SCNBlendModeMultiply@3$SCNBlendModeReplace@5$SCNBlendModeScreen@4$SCNBlendModeSubtract@2$SCNBufferFrequencyPerFrame@0$SCNBufferFrequencyPerNode@1$SCNBufferFrequencyPerShadable@2$SCNCameraProjectionDirectionHorizontal@1$SCNCameraProjectionDirectionVertical@0$SCNChamferModeBack@2$SCNChamferModeBoth@0$SCNChamferModeFront@1$SCNClamp@1$SCNClampToBorder@3$SCNColorMaskAll@15$SCNColorMaskAlpha@1$SCNColorMaskBlue@2$SCNColorMaskGreen@4$SCNColorMaskNone@0$SCNColorMaskRed@8$SCNConsistencyInvalidArgumentError@1002$SCNConsistencyInvalidCountError@1001$SCNConsistencyInvalidURIError@1000$SCNConsistencyMissingAttributeError@1004$SCNConsistencyMissingElementError@1003$SCNConsistencyXMLSchemaValidationError@1005$SCNCullBack@0$SCNCullFront@1$SCNDebugOptionNone@0$SCNDebugOptionRenderAsWireframe@64$SCNDebugOptionShowBoundingBoxes@2$SCNDebugOptionShowCameras@1024$SCNDebugOptionShowConstraints@512$SCNDebugOptionShowCreases@256$SCNDebugOptionShowLightExtents@8$SCNDebugOptionShowLightInfluences@4$SCNDebugOptionShowPhysicsFields@16$SCNDebugOptionShowPhysicsShapes@1$SCNDebugOptionShowSkeletons@128$SCNDebugOptionShowWireframe@32$SCNFillModeFill@0$SCNFillModeLines@1$SCNFilterModeLinear@2$SCNFilterModeNearest@1$SCNFilterModeNone@0$SCNGeometryPrimitiveTypeLine@2$SCNGeometryPrimitiveTypePoint@3$SCNGeometryPrimitiveTypePolygon@4$SCNGeometryPrimitiveTypeTriangleStrip@1$SCNGeometryPrimitiveTypeTriangles@0$SCNHitTestSearchModeAll@1$SCNHitTestSearchModeAny@2$SCNHitTestSearchModeClosest@0$SCNInteractionModeFly@0$SCNInteractionModeOrbitAngleMapping@2$SCNInteractionModeOrbitArcball@4$SCNInteractionModeOrbitCenteredArcball@3$SCNInteractionModeOrbitTurntable@1$SCNInteractionModePan@5$SCNInteractionModeTruck@6$SCNLinearFiltering@2$SCNMirror@4$SCNMorpherCalculationModeAdditive@1$SCNMorpherCalculationModeNormalized@0$SCNMovabilityHintFixed@0$SCNMovabilityHintMovable@1$SCNNearestFiltering@1$SCNNoFiltering@0$SCNNodeFocusBehaviorFocusable@2$SCNNodeFocusBehaviorNone@0$SCNNodeFocusBehaviorOccluding@1$SCNParticleBirthDirectionConstant@0$SCNParticleBirthDirectionRandom@2$SCNParticleBirthDirectionSurfaceNormal@1$SCNParticleBirthLocationSurface@0$SCNParticleBirthLocationVertex@2$SCNParticleBirthLocationVolume@1$SCNParticleBlendModeAdditive@0$SCNParticleBlendModeAlpha@4$SCNParticleBlendModeMultiply@2$SCNParticleBlendModeReplace@5$SCNParticleBlendModeScreen@3$SCNParticleBlendModeSubtract@1$SCNParticleEventBirth@0$SCNParticleEventCollision@2$SCNParticleEventDeath@1$SCNParticleImageSequenceAnimationModeAutoReverse@2$SCNParticleImageSequenceAnimationModeClamp@1$SCNParticleImageSequenceAnimationModeRepeat@0$SCNParticleInputModeOverDistance@1$SCNParticleInputModeOverLife@0$SCNParticleInputModeOverOtherProperty@2$SCNParticleModifierStagePostCollision@3$SCNParticleModifierStagePostDynamics@1$SCNParticleModifierStagePreCollision@2$SCNParticleModifierStagePreDynamics@0$SCNParticleOrientationModeBillboardScreenAligned@0$SCNParticleOrientationModeBillboardViewAligned@1$SCNParticleOrientationModeBillboardYAligned@3$SCNParticleOrientationModeFree@2$SCNParticleSortingModeDistance@2$SCNParticleSortingModeNone@0$SCNParticleSortingModeOldestFirst@3$SCNParticleSortingModeProjectedDepth@1$SCNParticleSortingModeYoungestFirst@4$SCNPhysicsBodyTypeDynamic@1$SCNPhysicsBodyTypeKinematic@2$SCNPhysicsBodyTypeStatic@0$SCNPhysicsCollisionCategoryDefault@1$SCNPhysicsCollisionCategoryStatic@2$SCNPhysicsFieldScopeInsideExtent@0$SCNPhysicsFieldScopeOutsideExtent@1$SCNProgramCompilationError@1$SCNReferenceLoadingPolicyImmediate@0$SCNReferenceLoadingPolicyOnDemand@1$SCNRenderingAPIMetal@0$SCNRenderingAPIOpenGLCore32@2$SCNRenderingAPIOpenGLCore41@3$SCNRenderingAPIOpenGLLegacy@1$SCNRepeat@2$SCNSceneSourceStatusComplete@16$SCNSceneSourceStatusError@-1$SCNSceneSourceStatusParsing@4$SCNSceneSourceStatusProcessing@12$SCNSceneSourceStatusValidating@8$SCNShadowModeDeferred@1$SCNShadowModeForward@0$SCNShadowModeModulated@2$SCNTessellationSmoothingModeNone@0$SCNTessellationSmoothingModePNTriangles@1$SCNTessellationSmoothingModePhong@2$SCNTransparencyModeAOne@0$SCNTransparencyModeDefault@0$SCNTransparencyModeDualLayer@3$SCNTransparencyModeRGBZero@1$SCNTransparencyModeSingleLayer@2$SCNWrapModeClamp@1$SCNWrapModeClampToBorder@3$SCNWrapModeMirror@4$SCNWrapModeRepeat@2$''' -misc.update({'SCNPhysicsCollisionCategoryAll': sel32or64(4294967295, 18446744073709551615)}) -misc.update({}) -functions={'SCNMatrix4IsIdentity': (sel32or64(b'B{CATransform3D=ffffffffffffffff}', b'B{CATransform3D=dddddddddddddddd}'),), 'SCNVector4Make': (sel32or64(b'{SCNVector4=ffff}ffff', b'{SCNVector4=dddd}dddd'),), 'SCNMatrix4EqualToMatrix4': (sel32or64(b'B{CATransform3D=ffffffffffffffff}{CATransform3D=ffffffffffffffff}', b'B{CATransform3D=dddddddddddddddd}{CATransform3D=dddddddddddddddd}'),), 'SCNMatrix4MakeTranslation': (sel32or64(b'{CATransform3D=ffffffffffffffff}fff', b'{CATransform3D=dddddddddddddddd}ddd'),), 'SCNMatrix4FromMat4': (sel32or64(b'{CATransform3D=ffffffffffffffff}{_matrix_float4x4=[4$]}', b'{CATransform3D=dddddddddddddddd}{_matrix_float4x4=[4$]}'),), 'SCNMatrix4Scale': (sel32or64(b'{CATransform3D=ffffffffffffffff}{CATransform3D=ffffffffffffffff}fff', b'{CATransform3D=dddddddddddddddd}{CATransform3D=dddddddddddddddd}ddd'),), 'SCNMatrix4MakeRotation': (sel32or64(b'{CATransform3D=ffffffffffffffff}ffff', b'{CATransform3D=dddddddddddddddd}dddd'),), 'SCNVector4FromGLKVector4': (sel32or64(b'{SCNVector4=ffff}(_GLKVector4={=ffff}{=ffff}{=ffff}[4f])', b'{SCNVector4=dddd}(_GLKVector4={=ffff}{=ffff}{=ffff}[4f])'),), 'SCNExportJavaScriptModule': (b'v@',), 'SCNVector4ToFloat4': (sel32or64(b'${SCNVector4=ffff}', b'${SCNVector4=dddd}'),), 'SCNVector3EqualToVector3': (sel32or64(b'B{SCNVector3=fff}{SCNVector3=fff}', b'B{SCNVector3=ddd}{SCNVector3=ddd}'),), 'SCNMatrix4ToGLKMatrix4': (sel32or64(b'(_GLKMatrix4={=ffffffffffffffff}[16f]){CATransform3D=ffffffffffffffff}', b'(_GLKMatrix4={=ffffffffffffffff}[16f]){CATransform3D=dddddddddddddddd}'),), 'SCNMatrix4ToMat4': (sel32or64(b'{_matrix_float4x4=[4$]}{CATransform3D=ffffffffffffffff}', b'{_matrix_float4x4=[4$]}{CATransform3D=dddddddddddddddd}'),), 'SCNVector4EqualToVector4': (sel32or64(b'B{SCNVector4=ffff}{SCNVector4=ffff}', b'B{SCNVector4=dddd}{SCNVector4=dddd}'),), 'SCNMatrix4MakeScale': (sel32or64(b'{CATransform3D=ffffffffffffffff}fff', b'{CATransform3D=dddddddddddddddd}ddd'),), 'SCNMatrix4FromGLKMatrix4': (sel32or64(b'{CATransform3D=ffffffffffffffff}(_GLKMatrix4={=ffffffffffffffff}[16f])', b'{CATransform3D=dddddddddddddddd}(_GLKMatrix4={=ffffffffffffffff}[16f])'),), 'SCNMatrix4Invert': (sel32or64(b'{CATransform3D=ffffffffffffffff}{CATransform3D=ffffffffffffffff}', b'{CATransform3D=dddddddddddddddd}{CATransform3D=dddddddddddddddd}'),), 'SCNVector4ToGLKVector4': (sel32or64(b'(_GLKVector4={=ffff}{=ffff}{=ffff}[4f]){SCNVector4=ffff}', b'(_GLKVector4={=ffff}{=ffff}{=ffff}[4f]){SCNVector4=dddd}'),), 'SCNMatrix4Rotate': (sel32or64(b'{CATransform3D=ffffffffffffffff}{CATransform3D=ffffffffffffffff}ffff', b'{CATransform3D=dddddddddddddddd}{CATransform3D=dddddddddddddddd}dddd'),), 'SCNVector3ToGLKVector3': (sel32or64(b'(_GLKVector3={=fff}{=fff}{=fff}[3f]){SCNVector3=fff}', b'(_GLKVector3={=fff}{=fff}{=fff}[3f]){SCNVector3=ddd}'),), 'SCNVector3Make': (sel32or64(b'{SCNVector3=fff}fff', b'{SCNVector3=ddd}ddd'),), 'SCNVector4FromFloat4': (sel32or64(b'{SCNVector4=ffff}$', b'{SCNVector4=dddd}$'),), 'SCNVector3FromGLKVector3': (sel32or64(b'{SCNVector3=fff}(_GLKVector3={=fff}{=fff}{=fff}[3f])', b'{SCNVector3=ddd}(_GLKVector3={=fff}{=fff}{=fff}[3f])'),), 'SCNMatrix4Mult': (sel32or64(b'{CATransform3D=ffffffffffffffff}{CATransform3D=ffffffffffffffff}{CATransform3D=ffffffffffffffff}', b'{CATransform3D=dddddddddddddddd}{CATransform3D=dddddddddddddddd}{CATransform3D=dddddddddddddddd}'),), 'SCNMatrix4Translate': (sel32or64(b'{CATransform3D=ffffffffffffffff}{CATransform3D=ffffffffffffffff}fff', b'{CATransform3D=dddddddddddddddd}{CATransform3D=dddddddddddddddd}ddd'),)} -aliases = {'SCNHitTestOptionSortResults': 'SCNHitTestSortResultsKey', 'SCNPhysicsTestOptionSearchMode': 'SCNPhysicsTestSearchModeKey', 'SCNHitTestOptionIgnoreHiddenNodes': 'SCNHitTestIgnoreHiddenNodesKey', 'SCNSceneAttributeFrameRate': 'SCNSceneFrameRateAttributeKey', 'SCNRepeat': 'SCNWrapModeRepeat', 'SCNLinearFiltering': 'SCNFilterModeLinear', 'SCNSceneSourceLoadingOptionStrictConformance': 'SCNSceneSourceStrictConformanceKey', 'SCNMirror': 'SCNWrapModeMirror', 'SCNQuaternion': 'SCNVector4', 'SCNSceneSourceLoadingOptionOverrideAssetURLs': 'SCNSceneSourceOverrideAssetURLsKey', 'SCNViewOptionPreferredRenderingAPI': 'SCNPreferredRenderingAPIKey', 'SCNSceneAttributeEndTime': 'SCNSceneEndTimeAttributeKey', 'SCNViewOptionPreferredDevice': 'SCNPreferredDeviceKey', 'SCNHitTestOptionFirstFoundOnly': 'SCNHitTestFirstFoundOnlyKey', 'SCNSceneSourceLoadingOptionCheckConsistency': 'SCNSceneSourceCheckConsistencyKey', 'SCNSceneSourceLoadingOptionFlattenScene': 'SCNSceneSourceFlattenSceneKey', 'SCNPhysicsShapeOptionKeepAsCompound': 'SCNPhysicsShapeKeepAsCompoundKey', 'SCNSceneSourceLoadingOptionAssetDirectoryURLs': 'SCNSceneSourceAssetDirectoryURLsKey', 'SCNPhysicsTestOptionBackfaceCulling': 'SCNPhysicsTestBackfaceCullingKey', 'SCNNoFiltering': 'SCNFilterModeNone', 'SCNPhysicsShapeOptionScale': 'SCNPhysicsShapeScaleKey', 'SCNViewOptionPreferLowPowerDevice': 'SCNPreferLowPowerDeviceKey', 'SCNHitTestOptionRootNode': 'SCNHitTestRootNodeKey', 'SCNSceneSourceLoadingOptionCreateNormalsIfAbsent': 'SCNSceneSourceCreateNormalsIfAbsentKey', 'SCNSceneSourceLoadingOptionConvertUnitsToMeters': 'SCNSceneSourceConvertUnitsToMetersKey', 'SCNSceneSourceLoadingOptionAnimationImportPolicy': 'SCNSceneSourceAnimationImportPolicyKey', 'SCNSceneSourceLoadingOptionUseSafeMode': 'SCNSceneSourceUseSafeModeKey', 'SCNClamp': 'SCNWrapModeClamp', 'SCNSceneAttributeStartTime': 'SCNSceneStartTimeAttributeKey', 'SCNPhysicsTestOptionCollisionBitMask': 'SCNPhysicsTestCollisionBitMaskKey', 'SCNMatrix4': 'CATransform3D', 'SCNHitTestOptionClipToZRange': 'SCNHitTestClipToZRangeKey', 'SCNHitTestOptionBackFaceCulling': 'SCNHitTestBackFaceCullingKey', 'SCNHitTestOptionBoundingBoxOnly': 'SCNHitTestBoundingBoxOnlyKey', 'SCNSceneAttributeUpAxis': 'SCNSceneUpAxisAttributeKey', 'SCN_EXTERN': 'FOUNDATION_EXTERN', 'SCNClampToBorder': 'SCNWrapModeClampToBorder', 'SCNHitTestOptionIgnoreChildNodes': 'SCNHitTestIgnoreChildNodesKey', 'SCNSceneSourceLoadingOptionConvertToYUp': 'SCNSceneSourceConvertToYUpKey', 'SCNNearestFiltering': 'SCNFilterModeNearest', 'SCNPhysicsShapeOptionType': 'SCNPhysicsShapeTypeKey'} -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'CAAnimation', b'setUsesSceneTimeBase:', {'arguments': {2: {'type': b'Z'}}}) - r(b'CAAnimation', b'usesSceneTimeBase', {'retval': {'type': b'Z'}}) - r(b'NSObject', b'actionForKey:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'addAnimation:forKey:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'allowsTranslation', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'animationForKey:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'animationKeys', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'autoSwitchToFreeCamera', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'autoenablesDefaultLighting', {'required': True, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'colorPixelFormat', {'retval': {'type': sel32or64(b'I', b'Q')}}) - r(b'NSObject', b'context', {'required': True, 'retval': {'type': b'^v'}}) - r(b'NSObject', b'currentTime', {'required': True, 'retval': {'type': b'd'}}) - r(b'NSObject', b'debugOptions', {'retval': {'type': sel32or64(b'I', b'Q')}}) - r(b'NSObject', b'delegate', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'depthPixelFormat', {'retval': {'type': sel32or64(b'I', b'Q')}}) - r(b'NSObject', b'flyModeVelocity', {'retval': {'type': sel32or64(b'f', b'd')}}) - r(b'NSObject', b'getBoundingBoxMin:max:', {'required': True, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': sel32or64(b'^{SCNVector3=fff}', b'^{SCNVector3=ddd}'), 'type_modifier': b'o'}, 3: {'type': sel32or64(b'^{SCNVector3=fff}', b'^{SCNVector3=ddd}'), 'type_modifier': b'o'}}}) - r(b'NSObject', b'getBoundingSphereCenter:radius:', {'required': True, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': sel32or64(b'^{SCNVector3=fff}', b'^{SCNVector3=ddd}'), 'type_modifier': b'o'}, 3: {'type': sel32or64(b'^f', b'^d'), 'type_modifier': b'o'}}}) - r(b'NSObject', b'handleBindingOfSymbol:usingBlock:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'I'}, 2: {'type': b'I'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}, 'type': b'@?'}}}) - r(b'NSObject', b'handleUnbindingOfSymbol:usingBlock:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'I'}, 2: {'type': b'I'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'hasActions', {'required': True, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'hitTest:options:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': sel32or64(b'{CGPoint=ff}', b'{CGPoint=dd}')}, 3: {'type': b'@'}}}) - r(b'NSObject', b'isAnimationForKeyPaused:', {'required': True, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'isJitteringEnabled', {'required': True, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'isNodeInsideFrustum:withPointOfView:', {'required': True, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'isPlaying', {'required': True, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'loops', {'required': True, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'overlaySKScene', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'panSensitivity', {'retval': {'type': sel32or64(b'f', b'd')}}) - r(b'NSObject', b'pauseAnimationForKey:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'physicsWorld:didBeginContact:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'physicsWorld:didEndContact:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'physicsWorld:didUpdateContact:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'pointOfView', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'prepareObject:shouldAbortBlock:', {'required': True, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^v'}}}, 'type': '@?'}}}) - r(b'NSObject', b'prepareObjects:withCompletionHandler:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'Z'}}}, 'type': '@?'}}}) - r(b'NSObject', b'presentScene:withTransition:incomingPointOfView:completionHandler:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}, 'type': '@?'}}}) - r(b'NSObject', b'program', {'required': False, 'retval': {'type': b'@'}}) - r(b'NSObject', b'program:bindValueForSymbol:atLocation:programID:renderer:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'I'}, 5: {'type': b'I'}, 6: {'type': b'@'}}}) - r(b'NSObject', b'program:handleError:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'program:unbindValueForSymbol:atLocation:programID:renderer:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'I'}, 5: {'type': b'I'}, 6: {'type': b'@'}}}) - r(b'NSObject', b'programIsOpaque:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'projectPoint:', {'required': True, 'retval': {'type': sel32or64(b'{SCNVector3=fff}', b'{SCNVector3=ddd}')}, 'arguments': {2: {'type': sel32or64(b'{SCNVector3=fff}', b'{SCNVector3=ddd}')}}}) - r(b'NSObject', b'removeActionForKey:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'removeAllActions', {'required': True, 'retval': {'type': b'v'}}) - r(b'NSObject', b'removeAllAnimations', {'required': True, 'retval': {'type': b'v'}}) - r(b'NSObject', b'removeAnimationForKey:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'removeAnimationForKey:blendOutDuration:', {'arguments': {3: {'type': sel32or64(b'f', b'd')}}}) - r(b'NSObject', b'removeAnimationForKey:fadeOutDuration:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'f', b'd')}}}) - r(b'NSObject', b'renderNode:renderer:arguments:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'renderer:didApplyAnimationsAtTime:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'd'}}}) - r(b'NSObject', b'renderer:didApplyConstraintsAtTime:atTime:', {'arguments': {4: {'type': 'd'}}}) - r(b'NSObject', b'renderer:didRenderScene:atTime:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': 'd'}}}) - r(b'NSObject', b'renderer:didSimulatePhysicsAtTime:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'd'}}}) - r(b'NSObject', b'renderer:updateAtTime:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'd'}}}) - r(b'NSObject', b'renderer:willRenderScene:atTime:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': 'd'}}}) - r(b'NSObject', b'renderingAPI', {'retval': {'type': sel32or64(b'I', b'Q')}}) - r(b'NSObject', b'resumeAnimationForKey:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'rotationSensitivity', {'retval': {'type': sel32or64(b'f', b'd')}}) - r(b'NSObject', b'runAction:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'runAction:completionHandler:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}, 'type': '@?'}}}) - r(b'NSObject', b'runAction:forKey:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'runAction:forKey:completionHandler:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}, 'type': '@?'}}}) - r(b'NSObject', b'scene', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'sceneTime', {'required': True, 'retval': {'type': b'd'}}) - r(b'NSObject', b'setAllowsTranslation:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSObject', b'setAutoSwitchToFreeCamera:', {'arguments': {2: {'type': 'Z'}}}) - r(b'NSObject', b'setAutoenablesDefaultLighting:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'Z'}}}) - r(b'NSObject', b'setBoundingBoxMin:max:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': sel32or64(b'^{SCNVector3=fff}', b'^{SCNVector3=ddd}'), 'type_modifier': b'n'}, 3: {'type': sel32or64(b'^{SCNVector3=fff}', b'^{SCNVector3=ddd}'), 'type_modifier': b'n'}}}) - r(b'NSObject', b'setColorPixelFormat:', {'arguments': {2: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'setCurrentTime:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'd'}}}) - r(b'NSObject', b'setDebugOptions:', {'arguments': {2: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'setDelegate:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setDepthPixelFormat:', {'arguments': {2: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'setFlyModeVelocity:', {'arguments': {2: {'type': sel32or64(b'f', b'd')}}}) - r(b'NSObject', b'setJitteringEnabled:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'Z'}}}) - r(b'NSObject', b'setLoops:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'Z'}}}) - r(b'NSObject', b'setOverlaySKScene:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setPanSensitivity:', {'arguments': {2: {'type': sel32or64(b'f', b'd')}}}) - r(b'NSObject', b'setPlaying:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'Z'}}}) - r(b'NSObject', b'setPointOfView:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setProgram:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setRenderingAPI:', {'arguments': {2: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'setRotationSensitivity:', {'arguments': {2: {'type': sel32or64(b'f', b'd')}}}) - r(b'NSObject', b'setScene:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setSceneTime:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'd'}}}) - r(b'NSObject', b'setShaderModifiers:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setShowsStatistics:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'Z'}}}) - r(b'NSObject', b'setSpeed:forAnimationKey:', {'arguments': {2: {'type': sel32or64(b'f', b'd')}}}) - r(b'NSObject', b'setStencilPixelFormat:', {'arguments': {2: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'setTechnique:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setTruckSensitivity:', {'arguments': {2: {'type': sel32or64(b'f', b'd')}}}) - r(b'NSObject', b'shaderModifiers', {'required': False, 'retval': {'type': b'@'}}) - r(b'NSObject', b'showsStatistics', {'required': True, 'retval': {'type': b'Z'}}) - r(b'NSObject', b'stencilPixelFormat', {'retval': {'type': sel32or64(b'I', b'Q')}}) - r(b'NSObject', b'technique', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'truckSensitivity', {'retval': {'type': sel32or64(b'f', b'd')}}) - r(b'NSObject', b'unprojectPoint:', {'required': True, 'retval': {'type': sel32or64(b'{SCNVector3=fff}', b'{SCNVector3=ddd}')}, 'arguments': {2: {'type': sel32or64(b'{SCNVector3=fff}', b'{SCNVector3=ddd}')}}}) - r(b'NSObject', b'writeBytes:length:', {'arguments': {2: {'type': 'n^v', 'c_array_length_in_arg': 3}, 3: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'writeImage:withSceneDocumentURL:originalImageURL:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'SCNAction', b'customActionWithDuration:actionBlock:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'f', b'd')}}}}}}) - r(b'SCNAction', b'playAudioSource:waitForCompletion:', {'arguments': {3: {'type': 'Z'}}}) - r(b'SCNAction', b'rotateToX:y:z:duration:shortestUnitArc:', {'arguments': {6: {'type': b'Z'}}}) - r(b'SCNAction', b'runBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'SCNAction', b'runBlock:queue:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'SCNAction', b'setTimingFunction:', {'arguments': {2: {'callable': {'retval': {'type': b'f'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'f'}}}}}}) - r(b'SCNAction', b'timingFunction', {'retval': {'callable': {'retval': {'type': b'f'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'f'}}}}}) - r(b'SCNAnimation', b'autoreverses', {'retval': {'type': 'Z'}}) - r(b'SCNAnimation', b'fillsBackward', {'retval': {'type': 'Z'}}) - r(b'SCNAnimation', b'fillsForward', {'retval': {'type': 'Z'}}) - r(b'SCNAnimation', b'isAdditive', {'retval': {'type': 'Z'}}) - r(b'SCNAnimation', b'isAppliedOnCompletion', {'retval': {'type': 'Z'}}) - r(b'SCNAnimation', b'isCumulative', {'retval': {'type': 'Z'}}) - r(b'SCNAnimation', b'isRemovedOnCompletion', {'retval': {'type': 'Z'}}) - r(b'SCNAnimation', b'setAdditive:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SCNAnimation', b'setAppliedOnCompletion:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SCNAnimation', b'setAutoreverses:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SCNAnimation', b'setCumulative:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SCNAnimation', b'setFillsBackward:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SCNAnimation', b'setFillsForward:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SCNAnimation', b'setRemovedOnCompletion:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SCNAnimation', b'setUsesSceneTimeBase:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SCNAnimation', b'usesSceneTimeBase', {'retval': {'type': 'Z'}}) - r(b'SCNAnimationEvent', b'animationEventWithKeyTime:block:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'Z'}}}}}}) - r(b'SCNAnimationPlayer', b'paused', {'retval': {'type': 'Z'}}) - r(b'SCNAnimationPlayer', b'setPaused:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SCNAudioPlayer', b'didFinishPlayback', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}) - r(b'SCNAudioPlayer', b'setDidFinishPlayback:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'SCNAudioPlayer', b'setWillStartPlayback:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'SCNAudioPlayer', b'willStartPlayback', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}) - r(b'SCNAudioSource', b'didFinishPlayback', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}) - r(b'SCNAudioSource', b'isPositional', {'retval': {'type': 'Z'}}) - r(b'SCNAudioSource', b'setDidFinishPlayback:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'SCNAudioSource', b'setPositional:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SCNAudioSource', b'setShouldStream:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SCNAudioSource', b'setWillStartPlayback:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'SCNAudioSource', b'shouldStream', {'retval': {'type': 'Z'}}) - r(b'SCNAudioSource', b'willStartPlayback', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}) - r(b'SCNCamera', b'automaticallyAdjustsZRange', {'retval': {'type': b'Z'}}) - r(b'SCNCamera', b'setAutomaticallyAdjustsZRange:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SCNCamera', b'setUsesOrthographicProjection:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SCNCamera', b'setWantsDepthOfField:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SCNCamera', b'setWantsExposureAdaptation:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SCNCamera', b'setWantsHDR:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SCNCamera', b'usesOrthographicProjection', {'retval': {'type': b'Z'}}) - r(b'SCNCamera', b'wantsDepthOfField', {'retval': {'type': 'Z'}}) - r(b'SCNCamera', b'wantsExposureAdaptation', {'retval': {'type': 'Z'}}) - r(b'SCNCamera', b'wantsHDR', {'retval': {'type': 'Z'}}) - r(b'SCNCameraController', b'automaticTarget', {'retval': {'type': 'Z'}}) - r(b'SCNCameraController', b'inertiaEnabled', {'retval': {'type': 'Z'}}) - r(b'SCNCameraController', b'isInertiaRunning', {'retval': {'type': 'Z'}}) - r(b'SCNCameraController', b'setAutomaticTarget:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SCNCameraController', b'setInertiaEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SCNConstraint', b'isEnabled', {'retval': {'type': 'Z'}}) - r(b'SCNConstraint', b'isIncremental', {'retval': {'type': 'Z'}}) - r(b'SCNConstraint', b'setEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SCNConstraint', b'setIncremental:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SCNGeometry', b'setWantsAdaptiveSubdivision:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SCNGeometry', b'wantsAdaptiveSubdivision', {'retval': {'type': 'Z'}}) - r(b'SCNGeometrySource', b'floatComponents', {'retval': {'type': b'Z'}}) - r(b'SCNGeometrySource', b'geometrySourceWithData:semantic:vectorCount:floatComponents:componentsPerVector:bytesPerComponent:dataOffset:dataStride:', {'arguments': {5: {'type': b'Z'}}}) - r(b'SCNGeometrySource', b'geometrySourceWithNormals:count:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'SCNGeometrySource', b'geometrySourceWithTextureCoordinates:count:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'SCNGeometrySource', b'geometrySourceWithVertices:count:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'SCNGeometryTessellator', b'isAdaptive', {'retval': {'type': 'Z'}}) - r(b'SCNGeometryTessellator', b'isScreenSpace', {'retval': {'type': 'Z'}}) - r(b'SCNGeometryTessellator', b'setAdaptive:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SCNGeometryTessellator', b'setScreenSpace:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SCNLight', b'automaticallyAdjustsShadowProjection', {'retval': {'type': 'Z'}}) - r(b'SCNLight', b'castsShadow', {'retval': {'type': b'Z'}}) - r(b'SCNLight', b'forcesBackFaceCasters', {'retval': {'type': 'Z'}}) - r(b'SCNLight', b'sampleDistributedShadowMaps', {'retval': {'type': 'Z'}}) - r(b'SCNLight', b'setAutomaticallyAdjustsShadowProjection:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SCNLight', b'setCastsShadow:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SCNLight', b'setForcesBackFaceCasters:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SCNLight', b'setSampleDistributedShadowMaps:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SCNLookAtConstraint', b'gimbalLockEnabled', {'retval': {'type': b'Z'}}) - r(b'SCNLookAtConstraint', b'setGimbalLockEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SCNMaterial', b'isDoubleSided', {'retval': {'type': b'Z'}}) - r(b'SCNMaterial', b'isLitPerPixel', {'retval': {'type': b'Z'}}) - r(b'SCNMaterial', b'locksAmbientWithDiffuse', {'retval': {'type': b'Z'}}) - r(b'SCNMaterial', b'readsFromDepthBuffer', {'retval': {'type': b'Z'}}) - r(b'SCNMaterial', b'setDoubleSided:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SCNMaterial', b'setLitPerPixel:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SCNMaterial', b'setLocksAmbientWithDiffuse:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SCNMaterial', b'setReadsFromDepthBuffer:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SCNMaterial', b'setWritesToDepthBuffer:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SCNMaterial', b'writesToDepthBuffer', {'retval': {'type': b'Z'}}) - r(b'SCNMorpher', b'setUnifiesNormals:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SCNMorpher', b'unifiesNormals', {'retval': {'type': 'Z'}}) - r(b'SCNNode', b'castsShadow', {'retval': {'type': b'Z'}}) - r(b'SCNNode', b'childNodeWithName:recursively:', {'arguments': {3: {'type': b'Z'}}}) - r(b'SCNNode', b'childNodesPassingTest:', {'arguments': {2: {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'o^Z'}}}}}}) - r(b'SCNNode', b'enumerateChildNodesUsingBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'o^Z'}}}}}}) - r(b'SCNNode', b'enumerateHierarchyUsingBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'o^Z'}}}}}}) - r(b'SCNNode', b'isHidden', {'retval': {'type': b'Z'}}) - r(b'SCNNode', b'isPaused', {'retval': {'type': b'Z'}}) - r(b'SCNNode', b'setCastsShadow:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SCNNode', b'setHidden:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SCNNode', b'setPaused:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SCNParticleSystem', b'addModifierForProperties:atStage:withBlock:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'^^v'}, 2: {'type': sel32or64(b'^I', b'^Q')}, 3: {'type': sel32or64(b'i', b'q')}, 4: {'type': sel32or64(b'i', b'q')}, 5: {'type': b'f'}}}}}}) - r(b'SCNParticleSystem', b'affectedByGravity', {'retval': {'type': b'Z'}}) - r(b'SCNParticleSystem', b'affectedByPhysicsFields', {'retval': {'type': b'Z'}}) - r(b'SCNParticleSystem', b'handleEvent:forProperties:withBlock:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'^^v'}, 2: {'type': sel32or64(b'^I', b'^Q')}, 3: {'type': sel32or64(b'^I', b'^Q')}, 4: {'type': sel32or64(b'i', b'q')}}}}}}) - r(b'SCNParticleSystem', b'isBlackPassEnabled', {'retval': {'type': b'Z'}}) - r(b'SCNParticleSystem', b'isLightingEnabled', {'retval': {'type': b'Z'}}) - r(b'SCNParticleSystem', b'isLocal', {'retval': {'type': b'Z'}}) - r(b'SCNParticleSystem', b'loops', {'retval': {'type': b'Z'}}) - r(b'SCNParticleSystem', b'particleDiesOnCollision', {'retval': {'type': b'Z'}}) - r(b'SCNParticleSystem', b'setAffectedByGravity:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SCNParticleSystem', b'setAffectedByPhysicsFields:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SCNParticleSystem', b'setBlackPassEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SCNParticleSystem', b'setLightingEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SCNParticleSystem', b'setLocal:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SCNParticleSystem', b'setLoops:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SCNParticleSystem', b'setParticleDiesOnCollision:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SCNPhysicsBody', b'allowsResting', {'retval': {'type': b'Z'}}) - r(b'SCNPhysicsBody', b'applyForce:atPosition:impulse:', {'arguments': {4: {'type': b'Z'}}}) - r(b'SCNPhysicsBody', b'applyForce:impulse:', {'arguments': {3: {'type': b'Z'}}}) - r(b'SCNPhysicsBody', b'applyTorque:impulse:', {'arguments': {3: {'type': b'Z'}}}) - r(b'SCNPhysicsBody', b'isAffectedByGravity', {'retval': {'type': 'Z'}}) - r(b'SCNPhysicsBody', b'isResting', {'retval': {'type': b'Z'}}) - r(b'SCNPhysicsBody', b'setAffectedByGravity:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SCNPhysicsBody', b'setAllowsResting:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SCNPhysicsBody', b'setResting:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SCNPhysicsBody', b'setUsesDefaultMomentOfInertia:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SCNPhysicsBody', b'usesDefaultMomentOfInertia', {'retval': {'type': 'Z'}}) - r(b'SCNPhysicsField', b'customFieldWithEvaluationBlock:', {'arguments': {2: {'callable': {'retval': {'type': sel32or64(b'{SCNVector3=fff}', b'{SCNVector3=ddd}')}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'{SCNVector3=fff}', b'{SCNVector3=ddd}')}, 2: {'type': sel32or64(b'{SCNVector3=fff}', b'{SCNVector3=ddd}')}, 3: {'type': b'f'}, 4: {'type': b'f'}, 5: {'type': b'd'}}}}}}) - r(b'SCNPhysicsField', b'isActive', {'retval': {'type': b'Z'}}) - r(b'SCNPhysicsField', b'isExclusive', {'retval': {'type': b'Z'}}) - r(b'SCNPhysicsField', b'setActive:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SCNPhysicsField', b'setExclusive:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SCNPhysicsField', b'setUsesEllipsoidalExtent:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SCNPhysicsField', b'usesEllipsoidalExtent', {'retval': {'type': b'Z'}}) - r(b'SCNProgram', b'handleBindingOfBufferNamed:frequency:usingBlock:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}}}}) - r(b'SCNProgram', b'isOpaque', {'retval': {'type': b'Z'}}) - r(b'SCNProgram', b'setOpaque:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SCNReferenceNode', b'isLoaded', {'retval': {'type': 'Z'}}) - r(b'SCNReplicatorConstraint', b'replicatesOrientation', {'retval': {'type': 'Z'}}) - r(b'SCNReplicatorConstraint', b'replicatesPosition', {'retval': {'type': 'Z'}}) - r(b'SCNReplicatorConstraint', b'replicatesScale', {'retval': {'type': 'Z'}}) - r(b'SCNReplicatorConstraint', b'setReplicatesOrientation:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SCNReplicatorConstraint', b'setReplicatesPosition:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SCNReplicatorConstraint', b'setReplicatesScale:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SCNScene', b'isPaused', {'retval': {'type': b'Z'}}) - r(b'SCNScene', b'sceneWithURL:options:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'SCNScene', b'setPaused:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SCNScene', b'writeToURL:options:delegate:progressHandler:', {'retval': {'type': b'Z'}, 'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'f'}, 2: {'type': b'@'}, 3: {'type': b'o^Z'}}}}}}) - r(b'SCNSceneSource', b'entriesPassingTest:', {'arguments': {2: {'callable': {'retval': {'type': b'Z'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'o^Z'}}}}}}) - r(b'SCNSceneSource', b'sceneWithOptions:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'SCNSceneSource', b'sceneWithOptions:statusHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'f'}, 2: {'type': sel32or64(b'i', b'q')}, 3: {'type': b'@'}, 4: {'type': b'o^Z'}}}}}}) - r(b'SCNSphere', b'isGeodesic', {'retval': {'type': b'Z'}}) - r(b'SCNSphere', b'setGeodesic:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SCNTechnique', b'handleBindingOfSymbol:usingBlock:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'I'}, 2: {'type': b'I'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}}}}) - r(b'SCNText', b'isWrapped', {'retval': {'type': b'Z'}}) - r(b'SCNText', b'setWrapped:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SCNTransaction', b'completionBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}) - r(b'SCNTransaction', b'disableActions', {'retval': {'type': b'Z'}}) - r(b'SCNTransaction', b'setCompletionBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'SCNTransaction', b'setDisableActions:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SCNTransformConstraint', b'orientationConstraintInWorldSpace:withBlock:', {'arguments': {2: {'type': 'Z'}, 3: {'callable': {'retval': {'type': sel32or64(b'{SCNVector4=ffff}', b'{SCNVector4=dddd}')}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'{SCNVector4=ffff}', b'{SCNVector4=dddd}')}}}}}}) - r(b'SCNTransformConstraint', b'positionConstraintInWorldSpace:withBlock:', {'arguments': {2: {'type': 'Z'}, 3: {'callable': {'retval': {'type': sel32or64(b'{SCNVector3=fff}', b'{SCNVector3=ddd}')}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'{SCNVector3=fff}', b'{SCNVector3=ddd}')}}}}}}) - r(b'SCNTransformConstraint', b'transformConstraintInWorldSpace:withBlock:', {'arguments': {2: {'type': b'Z'}, 3: {'callable': {'retval': {'type': sel32or64(b'{CATransform3D=ffffffffffffffff}', b'{CATransform3D=dddddddddddddddd}')}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'{CATransform3D=ffffffffffffffff}', b'{CATransform3D=dddddddddddddddd}')}}}}}}) - r(b'SCNView', b'allowsCameraControl', {'retval': {'type': 'Z'}}) - r(b'SCNView', b'rendersContinuously', {'retval': {'type': 'Z'}}) - r(b'SCNView', b'setAllowsCameraControl:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'Z'}}}) - r(b'SCNView', b'setRendersContinuously:', {'arguments': {2: {'type': 'Z'}}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/SceneKit/_metadata.pyc b/env/lib/python2.7/site-packages/SceneKit/_metadata.pyc deleted file mode 100644 index 8a6b5492..00000000 Binary files a/env/lib/python2.7/site-packages/SceneKit/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/ScreenSaver/__init__.py b/env/lib/python2.7/site-packages/ScreenSaver/__init__.py deleted file mode 100644 index 5fbba288..00000000 --- a/env/lib/python2.7/site-packages/ScreenSaver/__init__.py +++ /dev/null @@ -1,25 +0,0 @@ -''' -Python mapping for the ScreenSaver framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import sys -import objc -import AppKit - -from ScreenSaver import _metadata -from ScreenSaver._inlines import _inline_list_ - -sys.modules['ScreenSaver'] = mod = objc.ObjCLazyModule('ScreenSaver', - "com.apple.ScreenSaver", - objc.pathForFramework("/System/Library/Frameworks/ScreenSaver.framework"), - _metadata.__dict__, _inline_list_, { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'objc': objc, - }, ( AppKit,)) - -import sys -del sys.modules['ScreenSaver._metadata'] diff --git a/env/lib/python2.7/site-packages/ScreenSaver/__init__.pyc b/env/lib/python2.7/site-packages/ScreenSaver/__init__.pyc deleted file mode 100644 index 127b9123..00000000 Binary files a/env/lib/python2.7/site-packages/ScreenSaver/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/ScreenSaver/_inlines.so b/env/lib/python2.7/site-packages/ScreenSaver/_inlines.so deleted file mode 100755 index 793f0549..00000000 Binary files a/env/lib/python2.7/site-packages/ScreenSaver/_inlines.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/ScreenSaver/_metadata.py b/env/lib/python2.7/site-packages/ScreenSaver/_metadata.py deleted file mode 100644 index 737f46cc..00000000 --- a/env/lib/python2.7/site-packages/ScreenSaver/_metadata.py +++ /dev/null @@ -1,34 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Mon Sep 24 11:07:50 2012 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$$''' -enums = '''$$''' -misc.update({}) -functions={'SSRandomPointForSizeWithinRect': (sel32or64(b'{_NSPoint=ff}{_NSSize=ff}{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGPoint=dd}{CGSize=dd}{CGRect={CGPoint=dd}{CGSize=dd}}'),), 'SSRandomIntBetween': (b'iii',), 'SSRandomFloatBetween': (sel32or64(b'fff', b'ddd'),), 'SSCenteredRectInRect': (sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}{_NSRect={_NSPoint=ff}{_NSSize=ff}}{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}{CGRect={CGPoint=dd}{CGSize=dd}}{CGRect={CGPoint=dd}{CGSize=dd}}'),)} -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'ScreenSaverView', b'hasConfigureSheet', {'retval': {'type': 'Z'}}) - r(b'ScreenSaverView', b'initWithFrame:isPreview:', {'arguments': {3: {'type': 'Z'}}}) - r(b'ScreenSaverView', b'isAnimating', {'retval': {'type': 'Z'}}) - r(b'ScreenSaverView', b'isPreview', {'retval': {'type': 'Z'}}) - r(b'ScreenSaverView', b'performGammaFade', {'retval': {'type': 'Z'}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/ScreenSaver/_metadata.pyc b/env/lib/python2.7/site-packages/ScreenSaver/_metadata.pyc deleted file mode 100644 index 5f8f3c5b..00000000 Binary files a/env/lib/python2.7/site-packages/ScreenSaver/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/ScriptingBridge/_ScriptingBridge.so b/env/lib/python2.7/site-packages/ScriptingBridge/_ScriptingBridge.so deleted file mode 100755 index 2096f06f..00000000 Binary files a/env/lib/python2.7/site-packages/ScriptingBridge/_ScriptingBridge.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/ScriptingBridge/__init__.py b/env/lib/python2.7/site-packages/ScriptingBridge/__init__.py deleted file mode 100644 index a73a1822..00000000 --- a/env/lib/python2.7/site-packages/ScriptingBridge/__init__.py +++ /dev/null @@ -1,32 +0,0 @@ -''' -Python mapping for the ScriptingBridge framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import sys -import objc -import Foundation - -from ScriptingBridge import _metadata -from ScriptingBridge._ScriptingBridge import * - -sys.modules['ScriptingBridge'] = mod = objc.ObjCLazyModule('ScriptingBridge', - "com.apple.ScriptingBridge", - objc.pathForFramework("/System/Library/Frameworks/ScriptingBridge.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'objc': objc, - }, ( Foundation,)) - -import sys -del sys.modules['ScriptingBridge._metadata'] - -# Override the default behaviour of the bridge to ensure that we -# make the minimal amount of AppleScript calls. -import objc -objc.addConvenienceForClass('SBElementArray', [ - ('__iter__', lambda self: iter(self.objectEnumerator())), -]) diff --git a/env/lib/python2.7/site-packages/ScriptingBridge/__init__.pyc b/env/lib/python2.7/site-packages/ScriptingBridge/__init__.pyc deleted file mode 100644 index 79bde73e..00000000 Binary files a/env/lib/python2.7/site-packages/ScriptingBridge/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/ScriptingBridge/_metadata.py b/env/lib/python2.7/site-packages/ScriptingBridge/_metadata.py deleted file mode 100644 index 539d933c..00000000 --- a/env/lib/python2.7/site-packages/ScriptingBridge/_metadata.py +++ /dev/null @@ -1,39 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Mon Sep 24 11:13:32 2012 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$$''' -enums = '''$$''' -misc.update({}) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'NSObject', b'eventDidFail:withError:', {'arguments': {2: {'type': 'r^{AEDesc=I^^{OpaqueAEDataStorageType}}'}}}) - r(b'SBApplication', b'isRunning', {'retval': {'type': 'Z'}}) - r(b'SBElementArray', b'arrayByApplyingSelector:', {'arguments': {2: {'sel_of_type': b'@@:'}}}) - r(b'SBElementArray', b'arrayByApplyingSelector:withObject:', {'arguments': {2: {'sel_of_type': b'@@:@'}}}) - r(b'SBObject', b'sendEvent:id:parameters:', {'variadic': True}) -finally: - objc._updatingMetadata(False) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'NSObject', b'eventDidFail:withError:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': 'r^{AEDesc=I^^{OpaqueAEDataStorageType}}'}, 3: {'type': b'@'}}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/ScriptingBridge/_metadata.pyc b/env/lib/python2.7/site-packages/ScriptingBridge/_metadata.pyc deleted file mode 100644 index ae4fa13a..00000000 Binary files a/env/lib/python2.7/site-packages/ScriptingBridge/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/SearchKit/__init__.py b/env/lib/python2.7/site-packages/SearchKit/__init__.py deleted file mode 100644 index 419bb2ad..00000000 --- a/env/lib/python2.7/site-packages/SearchKit/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -''' -Python mapping for the SearchKit framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import objc, sys - -import CoreServices - -import warnings -warnings.warn("pyobjc-framework-SearchKit is deprecated, use 'import CoreServices' instead", DeprecationWarning) - - -sys.modules['SearchKit'] = objc.ObjCLazyModule( - "SearchKit", "com.apple.SearchKit", - objc.pathForFramework( - "/System/Library/Frameworks/CoreServices.framework/Frameworks/SearchKit.framework"), - {}, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (CoreServices,)) diff --git a/env/lib/python2.7/site-packages/SearchKit/__init__.pyc b/env/lib/python2.7/site-packages/SearchKit/__init__.pyc deleted file mode 100644 index 77b99bd3..00000000 Binary files a/env/lib/python2.7/site-packages/SearchKit/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Security/_Security.so b/env/lib/python2.7/site-packages/Security/_Security.so deleted file mode 100755 index 6024b8ee..00000000 Binary files a/env/lib/python2.7/site-packages/Security/_Security.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Security/__init__.py b/env/lib/python2.7/site-packages/Security/__init__.py deleted file mode 100644 index 0c584f80..00000000 --- a/env/lib/python2.7/site-packages/Security/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -''' -Python mapping for the Security framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Foundation - -from Security import _metadata -from Security import _Security - -sys.modules['Security'] = mod = objc.ObjCLazyModule( - "Security", - "com.apple.security", - objc.pathForFramework("/System/Library/Frameworks/Security.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (_Security, Foundation,)) - -import sys -del sys.modules['Security._metadata'] diff --git a/env/lib/python2.7/site-packages/Security/__init__.pyc b/env/lib/python2.7/site-packages/Security/__init__.pyc deleted file mode 100644 index d21cd47c..00000000 Binary files a/env/lib/python2.7/site-packages/Security/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Security/_metadata.py b/env/lib/python2.7/site-packages/Security/_metadata.py deleted file mode 100644 index cb146538..00000000 --- a/env/lib/python2.7/site-packages/Security/_metadata.py +++ /dev/null @@ -1,28 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Sun Feb 17 11:24:45 2019 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -misc.update({'AuthorizationExternalForm': objc.createStructType('AuthorizationExternalForm', b'{_AuthorizationExternalForm=[32C]}', ['bytes']), 'SecKeychainSettings': objc.createStructType('SecKeychainSettings', b'{SecKeychainSettings=IZZI}', ['version', 'lockOnSleep', 'useLockInterval', 'lockInterval']), 'SecItemImportExportKeyParameters': objc.createStructType('SecItemImportExportKeyParameters', b'{_SecItemImportExportKeyParameters=II@@@@@@}', ['version', 'flags', 'passphrase', 'alertTitle', 'alertPrompt', 'accessRef', 'keyUsage', 'keyAttributes']), 'SecKeychainCallbackInfo': objc.createStructType('SecKeychainCallbackInfo', b'{SecKeychainCallbackInfo=I@@i}', ['version', 'item', 'keychain', 'pid']), 'AuthorizationItem': objc.createStructType('AuthorizationItem', b'{_AuthorizationItem=^cL^vI}', ['name', 'valueLength', 'value', 'flags'])}) -constants = '''$kCMSEncoderDigestAlgorithmSHA1$kCMSEncoderDigestAlgorithmSHA256$kSSLSessionConfig_3DES_fallback$kSSLSessionConfig_ATSv1$kSSLSessionConfig_ATSv1_noPFS$kSSLSessionConfig_RC4_fallback$kSSLSessionConfig_TLSv1_3DES_fallback$kSSLSessionConfig_TLSv1_RC4_fallback$kSSLSessionConfig_TLSv1_fallback$kSSLSessionConfig_anonymous$kSSLSessionConfig_default$kSSLSessionConfig_legacy$kSSLSessionConfig_legacy_DHE$kSSLSessionConfig_standard$kSecACLAuthorizationAny$kSecACLAuthorizationChangeACL$kSecACLAuthorizationChangeOwner$kSecACLAuthorizationDecrypt$kSecACLAuthorizationDelete$kSecACLAuthorizationDerive$kSecACLAuthorizationEncrypt$kSecACLAuthorizationExportClear$kSecACLAuthorizationExportWrapped$kSecACLAuthorizationGenKey$kSecACLAuthorizationImportClear$kSecACLAuthorizationImportWrapped$kSecACLAuthorizationIntegrity$kSecACLAuthorizationKeychainCreate$kSecACLAuthorizationKeychainDelete$kSecACLAuthorizationKeychainItemDelete$kSecACLAuthorizationKeychainItemInsert$kSecACLAuthorizationKeychainItemModify$kSecACLAuthorizationKeychainItemRead$kSecACLAuthorizationLogin$kSecACLAuthorizationMAC$kSecACLAuthorizationPartitionID$kSecACLAuthorizationSign$kSecAttrAccess$kSecAttrAccessControl$kSecAttrAccessGroup$kSecAttrAccessGroupToken$kSecAttrAccessible$kSecAttrAccessibleAfterFirstUnlock$kSecAttrAccessibleAfterFirstUnlockThisDeviceOnly$kSecAttrAccessibleAlways$kSecAttrAccessibleAlwaysThisDeviceOnly$kSecAttrAccessibleWhenPasscodeSetThisDeviceOnly$kSecAttrAccessibleWhenUnlocked$kSecAttrAccessibleWhenUnlockedThisDeviceOnly$kSecAttrAccount$kSecAttrApplicationLabel$kSecAttrApplicationTag$kSecAttrAuthenticationType$kSecAttrAuthenticationTypeDPA$kSecAttrAuthenticationTypeDefault$kSecAttrAuthenticationTypeHTMLForm$kSecAttrAuthenticationTypeHTTPBasic$kSecAttrAuthenticationTypeHTTPDigest$kSecAttrAuthenticationTypeMSN$kSecAttrAuthenticationTypeNTLM$kSecAttrAuthenticationTypeRPA$kSecAttrCanDecrypt$kSecAttrCanDerive$kSecAttrCanEncrypt$kSecAttrCanSign$kSecAttrCanUnwrap$kSecAttrCanVerify$kSecAttrCanWrap$kSecAttrCertificateEncoding$kSecAttrCertificateType$kSecAttrComment$kSecAttrCreationDate$kSecAttrCreator$kSecAttrDescription$kSecAttrEffectiveKeySize$kSecAttrGeneric$kSecAttrIsExtractable$kSecAttrIsInvisible$kSecAttrIsNegative$kSecAttrIsPermanent$kSecAttrIsSensitive$kSecAttrIssuer$kSecAttrKeyClass$kSecAttrKeyClassPrivate$kSecAttrKeyClassPublic$kSecAttrKeyClassSymmetric$kSecAttrKeySizeInBits$kSecAttrKeyType$kSecAttrKeyType3DES$kSecAttrKeyTypeAES$kSecAttrKeyTypeCAST$kSecAttrKeyTypeDES$kSecAttrKeyTypeDSA$kSecAttrKeyTypeEC$kSecAttrKeyTypeECDSA$kSecAttrKeyTypeECSECPrimeRandom$kSecAttrKeyTypeRC2$kSecAttrKeyTypeRC4$kSecAttrKeyTypeRSA$kSecAttrLabel$kSecAttrModificationDate$kSecAttrPRF$kSecAttrPRFHmacAlgSHA1$kSecAttrPRFHmacAlgSHA224$kSecAttrPRFHmacAlgSHA256$kSecAttrPRFHmacAlgSHA384$kSecAttrPRFHmacAlgSHA512$kSecAttrPath$kSecAttrPersistantReference$kSecAttrPersistentReference$kSecAttrPort$kSecAttrProtocol$kSecAttrProtocolAFP$kSecAttrProtocolAppleTalk$kSecAttrProtocolDAAP$kSecAttrProtocolEPPC$kSecAttrProtocolFTP$kSecAttrProtocolFTPAccount$kSecAttrProtocolFTPProxy$kSecAttrProtocolFTPS$kSecAttrProtocolHTTP$kSecAttrProtocolHTTPProxy$kSecAttrProtocolHTTPS$kSecAttrProtocolHTTPSProxy$kSecAttrProtocolIMAP$kSecAttrProtocolIMAPS$kSecAttrProtocolIPP$kSecAttrProtocolIRC$kSecAttrProtocolIRCS$kSecAttrProtocolLDAP$kSecAttrProtocolLDAPS$kSecAttrProtocolNNTP$kSecAttrProtocolNNTPS$kSecAttrProtocolPOP3$kSecAttrProtocolPOP3S$kSecAttrProtocolRTSP$kSecAttrProtocolRTSPProxy$kSecAttrProtocolSMB$kSecAttrProtocolSMTP$kSecAttrProtocolSOCKS$kSecAttrProtocolSSH$kSecAttrProtocolTelnet$kSecAttrProtocolTelnetS$kSecAttrPublicKeyHash$kSecAttrRounds$kSecAttrSalt$kSecAttrSecurityDomain$kSecAttrSerialNumber$kSecAttrServer$kSecAttrService$kSecAttrSubject$kSecAttrSubjectKeyID$kSecAttrSyncViewHint$kSecAttrSynchronizable$kSecAttrSynchronizableAny$kSecAttrTokenID$kSecAttrTokenIDSecureEnclave$kSecAttrType$kSecBase32Encoding$kSecBase64Encoding$kSecCFErrorArchitecture$kSecCFErrorGuestAttributes$kSecCFErrorInfoPlist$kSecCFErrorPath$kSecCFErrorPattern$kSecCFErrorRequirementSyntax$kSecCFErrorResourceAdded$kSecCFErrorResourceAltered$kSecCFErrorResourceMissing$kSecCFErrorResourceSeal$kSecCFErrorResourceSideband$kSecClass$kSecClassCertificate$kSecClassGenericPassword$kSecClassIdentity$kSecClassInternetPassword$kSecClassKey$kSecCodeAttributeArchitecture$kSecCodeAttributeBundleVersion$kSecCodeAttributeSubarchitecture$kSecCodeAttributeUniversalFileOffset$kSecCodeInfoCMS$kSecCodeInfoCdHashes$kSecCodeInfoCertificates$kSecCodeInfoChangedFiles$kSecCodeInfoDesignatedRequirement$kSecCodeInfoDigestAlgorithm$kSecCodeInfoDigestAlgorithms$kSecCodeInfoEntitlements$kSecCodeInfoEntitlementsDict$kSecCodeInfoFlags$kSecCodeInfoFormat$kSecCodeInfoIdentifier$kSecCodeInfoImplicitDesignatedRequirement$kSecCodeInfoMainExecutable$kSecCodeInfoPList$kSecCodeInfoPlatformIdentifier$kSecCodeInfoRequirementData$kSecCodeInfoRequirements$kSecCodeInfoSource$kSecCodeInfoStatus$kSecCodeInfoTeamIdentifier$kSecCodeInfoTime$kSecCodeInfoTimestamp$kSecCodeInfoTrust$kSecCodeInfoUnique$kSecCompressionRatio$kSecDecodeTypeAttribute$kSecDigestHMACKeyAttribute$kSecDigestHMACMD5$kSecDigestHMACSHA1$kSecDigestHMACSHA2$kSecDigestLengthAttribute$kSecDigestMD2$kSecDigestMD4$kSecDigestMD5$kSecDigestSHA1$kSecDigestSHA2$kSecDigestTypeAttribute$kSecEncodeLineLengthAttribute$kSecEncodeTypeAttribute$kSecEncryptKey$kSecEncryptionMode$kSecGuestAttributeArchitecture$kSecGuestAttributeAudit$kSecGuestAttributeCanonical$kSecGuestAttributeDynamicCode$kSecGuestAttributeDynamicCodeInfoPlist$kSecGuestAttributeHash$kSecGuestAttributeMachPort$kSecGuestAttributePid$kSecGuestAttributeSubarchitecture$kSecIVKey$kSecIdentityDomainDefault$kSecIdentityDomainKerberosKDC$kSecImportExportAccess$kSecImportExportKeychain$kSecImportExportPassphrase$kSecImportItemCertChain$kSecImportItemIdentity$kSecImportItemKeyID$kSecImportItemLabel$kSecImportItemTrust$kSecInputIsAttributeName$kSecInputIsDigest$kSecInputIsPlainText$kSecInputIsRaw$kSecKeyAlgorithmECDHKeyExchangeCofactor$kSecKeyAlgorithmECDHKeyExchangeCofactorX963SHA1$kSecKeyAlgorithmECDHKeyExchangeCofactorX963SHA224$kSecKeyAlgorithmECDHKeyExchangeCofactorX963SHA256$kSecKeyAlgorithmECDHKeyExchangeCofactorX963SHA384$kSecKeyAlgorithmECDHKeyExchangeCofactorX963SHA512$kSecKeyAlgorithmECDHKeyExchangeStandard$kSecKeyAlgorithmECDHKeyExchangeStandardX963SHA1$kSecKeyAlgorithmECDHKeyExchangeStandardX963SHA224$kSecKeyAlgorithmECDHKeyExchangeStandardX963SHA256$kSecKeyAlgorithmECDHKeyExchangeStandardX963SHA384$kSecKeyAlgorithmECDHKeyExchangeStandardX963SHA512$kSecKeyAlgorithmECDSASignatureDigestX962$kSecKeyAlgorithmECDSASignatureDigestX962SHA1$kSecKeyAlgorithmECDSASignatureDigestX962SHA224$kSecKeyAlgorithmECDSASignatureDigestX962SHA256$kSecKeyAlgorithmECDSASignatureDigestX962SHA384$kSecKeyAlgorithmECDSASignatureDigestX962SHA512$kSecKeyAlgorithmECDSASignatureMessageX962SHA1$kSecKeyAlgorithmECDSASignatureMessageX962SHA224$kSecKeyAlgorithmECDSASignatureMessageX962SHA256$kSecKeyAlgorithmECDSASignatureMessageX962SHA384$kSecKeyAlgorithmECDSASignatureMessageX962SHA512$kSecKeyAlgorithmECDSASignatureRFC4754$kSecKeyAlgorithmECIESEncryptionCofactorVariableIVX963SHA224AESGCM$kSecKeyAlgorithmECIESEncryptionCofactorVariableIVX963SHA256AESGCM$kSecKeyAlgorithmECIESEncryptionCofactorVariableIVX963SHA384AESGCM$kSecKeyAlgorithmECIESEncryptionCofactorVariableIVX963SHA512AESGCM$kSecKeyAlgorithmECIESEncryptionCofactorX963SHA1AESGCM$kSecKeyAlgorithmECIESEncryptionCofactorX963SHA224AESGCM$kSecKeyAlgorithmECIESEncryptionCofactorX963SHA256AESGCM$kSecKeyAlgorithmECIESEncryptionCofactorX963SHA384AESGCM$kSecKeyAlgorithmECIESEncryptionCofactorX963SHA512AESGCM$kSecKeyAlgorithmECIESEncryptionStandardVariableIVX963SHA224AESGCM$kSecKeyAlgorithmECIESEncryptionStandardVariableIVX963SHA256AESGCM$kSecKeyAlgorithmECIESEncryptionStandardVariableIVX963SHA384AESGCM$kSecKeyAlgorithmECIESEncryptionStandardVariableIVX963SHA512AESGCM$kSecKeyAlgorithmECIESEncryptionStandardX963SHA1AESGCM$kSecKeyAlgorithmECIESEncryptionStandardX963SHA224AESGCM$kSecKeyAlgorithmECIESEncryptionStandardX963SHA256AESGCM$kSecKeyAlgorithmECIESEncryptionStandardX963SHA384AESGCM$kSecKeyAlgorithmECIESEncryptionStandardX963SHA512AESGCM$kSecKeyAlgorithmRSAEncryptionOAEPSHA1$kSecKeyAlgorithmRSAEncryptionOAEPSHA1AESGCM$kSecKeyAlgorithmRSAEncryptionOAEPSHA224$kSecKeyAlgorithmRSAEncryptionOAEPSHA224AESGCM$kSecKeyAlgorithmRSAEncryptionOAEPSHA256$kSecKeyAlgorithmRSAEncryptionOAEPSHA256AESGCM$kSecKeyAlgorithmRSAEncryptionOAEPSHA384$kSecKeyAlgorithmRSAEncryptionOAEPSHA384AESGCM$kSecKeyAlgorithmRSAEncryptionOAEPSHA512$kSecKeyAlgorithmRSAEncryptionOAEPSHA512AESGCM$kSecKeyAlgorithmRSAEncryptionPKCS1$kSecKeyAlgorithmRSAEncryptionRaw$kSecKeyAlgorithmRSASignatureDigestPKCS1v15Raw$kSecKeyAlgorithmRSASignatureDigestPKCS1v15SHA1$kSecKeyAlgorithmRSASignatureDigestPKCS1v15SHA224$kSecKeyAlgorithmRSASignatureDigestPKCS1v15SHA256$kSecKeyAlgorithmRSASignatureDigestPKCS1v15SHA384$kSecKeyAlgorithmRSASignatureDigestPKCS1v15SHA512$kSecKeyAlgorithmRSASignatureDigestPSSSHA1$kSecKeyAlgorithmRSASignatureDigestPSSSHA224$kSecKeyAlgorithmRSASignatureDigestPSSSHA256$kSecKeyAlgorithmRSASignatureDigestPSSSHA384$kSecKeyAlgorithmRSASignatureDigestPSSSHA512$kSecKeyAlgorithmRSASignatureMessagePKCS1v15SHA1$kSecKeyAlgorithmRSASignatureMessagePKCS1v15SHA224$kSecKeyAlgorithmRSASignatureMessagePKCS1v15SHA256$kSecKeyAlgorithmRSASignatureMessagePKCS1v15SHA384$kSecKeyAlgorithmRSASignatureMessagePKCS1v15SHA512$kSecKeyAlgorithmRSASignatureMessagePSSSHA1$kSecKeyAlgorithmRSASignatureMessagePSSSHA224$kSecKeyAlgorithmRSASignatureMessagePSSSHA256$kSecKeyAlgorithmRSASignatureMessagePSSSHA384$kSecKeyAlgorithmRSASignatureMessagePSSSHA512$kSecKeyAlgorithmRSASignatureRaw$kSecKeyAttributeName$kSecKeyKeyExchangeParameterRequestedSize$kSecKeyKeyExchangeParameterSharedInfo$kSecLineLength64$kSecLineLength76$kSecMatchCaseInsensitive$kSecMatchDiacriticInsensitive$kSecMatchEmailAddressIfPresent$kSecMatchIssuers$kSecMatchItemList$kSecMatchLimit$kSecMatchLimitAll$kSecMatchLimitOne$kSecMatchPolicy$kSecMatchSearchList$kSecMatchSubjectContains$kSecMatchSubjectEndsWith$kSecMatchSubjectStartsWith$kSecMatchSubjectWholeString$kSecMatchTrustedOnly$kSecMatchValidOnDate$kSecMatchWidthInsensitive$kSecModeCBCKey$kSecModeCFBKey$kSecModeECBKey$kSecModeNoneKey$kSecModeOFBKey$kSecOAEPEncodingParametersAttributeName$kSecOAEPMGF1DigestAlgorithmAttributeName$kSecOAEPMessageLengthAttributeName$kSecOIDADC_CERT_POLICY$kSecOIDAPPLE_CERT_POLICY$kSecOIDAPPLE_EKU_CODE_SIGNING$kSecOIDAPPLE_EKU_CODE_SIGNING_DEV$kSecOIDAPPLE_EKU_ICHAT_ENCRYPTION$kSecOIDAPPLE_EKU_ICHAT_SIGNING$kSecOIDAPPLE_EKU_RESOURCE_SIGNING$kSecOIDAPPLE_EKU_SYSTEM_IDENTITY$kSecOIDAPPLE_EXTENSION$kSecOIDAPPLE_EXTENSION_AAI_INTERMEDIATE$kSecOIDAPPLE_EXTENSION_ADC_APPLE_SIGNING$kSecOIDAPPLE_EXTENSION_ADC_DEV_SIGNING$kSecOIDAPPLE_EXTENSION_APPLEID_INTERMEDIATE$kSecOIDAPPLE_EXTENSION_APPLE_SIGNING$kSecOIDAPPLE_EXTENSION_CODE_SIGNING$kSecOIDAPPLE_EXTENSION_INTERMEDIATE_MARKER$kSecOIDAPPLE_EXTENSION_ITMS_INTERMEDIATE$kSecOIDAPPLE_EXTENSION_WWDR_INTERMEDIATE$kSecOIDAuthorityInfoAccess$kSecOIDAuthorityKeyIdentifier$kSecOIDBasicConstraints$kSecOIDBiometricInfo$kSecOIDCSSMKeyStruct$kSecOIDCertIssuer$kSecOIDCertificatePolicies$kSecOIDClientAuth$kSecOIDCollectiveStateProvinceName$kSecOIDCollectiveStreetAddress$kSecOIDCommonName$kSecOIDCountryName$kSecOIDCrlDistributionPoints$kSecOIDCrlNumber$kSecOIDCrlReason$kSecOIDDOTMAC_CERT_EMAIL_ENCRYPT$kSecOIDDOTMAC_CERT_EMAIL_SIGN$kSecOIDDOTMAC_CERT_EXTENSION$kSecOIDDOTMAC_CERT_IDENTITY$kSecOIDDOTMAC_CERT_POLICY$kSecOIDDeltaCrlIndicator$kSecOIDDescription$kSecOIDEKU_IPSec$kSecOIDEmailAddress$kSecOIDEmailProtection$kSecOIDExtendedKeyUsage$kSecOIDExtendedKeyUsageAny$kSecOIDExtendedUseCodeSigning$kSecOIDGivenName$kSecOIDHoldInstructionCode$kSecOIDInvalidityDate$kSecOIDIssuerAltName$kSecOIDIssuingDistributionPoint$kSecOIDIssuingDistributionPoints$kSecOIDKERBv5_PKINIT_KP_CLIENT_AUTH$kSecOIDKERBv5_PKINIT_KP_KDC$kSecOIDKeyUsage$kSecOIDLocalityName$kSecOIDMS_NTPrincipalName$kSecOIDMicrosoftSGC$kSecOIDNameConstraints$kSecOIDNetscapeCertSequence$kSecOIDNetscapeCertType$kSecOIDNetscapeSGC$kSecOIDOCSPSigning$kSecOIDOrganizationName$kSecOIDOrganizationalUnitName$kSecOIDPolicyConstraints$kSecOIDPolicyMappings$kSecOIDPrivateKeyUsagePeriod$kSecOIDQC_Statements$kSecOIDSRVName$kSecOIDSerialNumber$kSecOIDServerAuth$kSecOIDStateProvinceName$kSecOIDStreetAddress$kSecOIDSubjectAltName$kSecOIDSubjectDirectoryAttributes$kSecOIDSubjectEmailAddress$kSecOIDSubjectInfoAccess$kSecOIDSubjectKeyIdentifier$kSecOIDSubjectPicture$kSecOIDSubjectSignatureBitmap$kSecOIDSurname$kSecOIDTimeStamping$kSecOIDTitle$kSecOIDUseExemptions$kSecOIDX509V1CertificateIssuerUniqueId$kSecOIDX509V1CertificateSubjectUniqueId$kSecOIDX509V1IssuerName$kSecOIDX509V1IssuerNameCStruct$kSecOIDX509V1IssuerNameLDAP$kSecOIDX509V1IssuerNameStd$kSecOIDX509V1SerialNumber$kSecOIDX509V1Signature$kSecOIDX509V1SignatureAlgorithm$kSecOIDX509V1SignatureAlgorithmParameters$kSecOIDX509V1SignatureAlgorithmTBS$kSecOIDX509V1SignatureCStruct$kSecOIDX509V1SignatureStruct$kSecOIDX509V1SubjectName$kSecOIDX509V1SubjectNameCStruct$kSecOIDX509V1SubjectNameLDAP$kSecOIDX509V1SubjectNameStd$kSecOIDX509V1SubjectPublicKey$kSecOIDX509V1SubjectPublicKeyAlgorithm$kSecOIDX509V1SubjectPublicKeyAlgorithmParameters$kSecOIDX509V1SubjectPublicKeyCStruct$kSecOIDX509V1ValidityNotAfter$kSecOIDX509V1ValidityNotBefore$kSecOIDX509V1Version$kSecOIDX509V3Certificate$kSecOIDX509V3CertificateCStruct$kSecOIDX509V3CertificateExtensionCStruct$kSecOIDX509V3CertificateExtensionCritical$kSecOIDX509V3CertificateExtensionId$kSecOIDX509V3CertificateExtensionStruct$kSecOIDX509V3CertificateExtensionType$kSecOIDX509V3CertificateExtensionValue$kSecOIDX509V3CertificateExtensionsCStruct$kSecOIDX509V3CertificateExtensionsStruct$kSecOIDX509V3CertificateNumberOfExtensions$kSecOIDX509V3SignedCertificate$kSecOIDX509V3SignedCertificateCStruct$kSecPaddingKey$kSecPaddingNoneKey$kSecPaddingOAEPKey$kSecPaddingPKCS1Key$kSecPaddingPKCS5Key$kSecPaddingPKCS7Key$kSecPolicyAppleCodeSigning$kSecPolicyAppleEAP$kSecPolicyAppleIDValidation$kSecPolicyAppleIPsec$kSecPolicyApplePKINITClient$kSecPolicyApplePKINITServer$kSecPolicyApplePassbookSigning$kSecPolicyApplePayIssuerEncryption$kSecPolicyAppleRevocation$kSecPolicyAppleSMIME$kSecPolicyAppleSSL$kSecPolicyAppleTimeStamping$kSecPolicyAppleX509Basic$kSecPolicyAppleiChat$kSecPolicyClient$kSecPolicyKU_CRLSign$kSecPolicyKU_DataEncipherment$kSecPolicyKU_DecipherOnly$kSecPolicyKU_DigitalSignature$kSecPolicyKU_EncipherOnly$kSecPolicyKU_KeyAgreement$kSecPolicyKU_KeyCertSign$kSecPolicyKU_KeyEncipherment$kSecPolicyKU_NonRepudiation$kSecPolicyMacAppStoreReceipt$kSecPolicyName$kSecPolicyOid$kSecPolicyRevocationFlags$kSecPolicyTeamIdentifier$kSecPrivateKeyAttrs$kSecPropertyKeyLabel$kSecPropertyKeyLocalizedLabel$kSecPropertyKeyType$kSecPropertyKeyValue$kSecPropertyTypeData$kSecPropertyTypeDate$kSecPropertyTypeError$kSecPropertyTypeSection$kSecPropertyTypeString$kSecPropertyTypeSuccess$kSecPropertyTypeTitle$kSecPropertyTypeURL$kSecPropertyTypeWarning$kSecPublicKeyAttrs$kSecRandomDefault$kSecReturnAttributes$kSecReturnData$kSecReturnPersistentRef$kSecReturnRef$kSecSignatureAttributeName$kSecTransformAbortAttributeName$kSecTransformAbortOriginatorKey$kSecTransformActionAttributeNotification$kSecTransformActionAttributeValidation$kSecTransformActionCanExecute$kSecTransformActionExternalizeExtraData$kSecTransformActionFinalize$kSecTransformActionInternalizeExtraData$kSecTransformActionProcessData$kSecTransformActionStartingExecution$kSecTransformDebugAttributeName$kSecTransformErrorDomain$kSecTransformInputAttributeName$kSecTransformOutputAttributeName$kSecTransformPreviousErrorKey$kSecTransformTransformName$kSecTrustCertificateTransparency$kSecTrustCertificateTransparencyWhiteList$kSecTrustEvaluationDate$kSecTrustExtendedValidation$kSecTrustOrganizationName$kSecTrustResultValue$kSecTrustRevocationChecked$kSecTrustRevocationValidUntilDate$kSecUseAuthenticationContext$kSecUseAuthenticationUI$kSecUseAuthenticationUIAllow$kSecUseAuthenticationUIFail$kSecUseAuthenticationUISkip$kSecUseItemList$kSecUseKeychain$kSecUseNoAuthenticationUI$kSecUseOperationPrompt$kSecValueData$kSecValuePersistentRef$kSecValueRef$kSecZLibEncoding$''' -enums = '''$SEC_KEYCHAIN_SETTINGS_VERS1@1$SEC_KEY_IMPORT_EXPORT_PARAMS_VERSION@0$SSL_DHE_DSS_EXPORT_WITH_DES40_CBC_SHA@17$SSL_DHE_DSS_WITH_3DES_EDE_CBC_SHA@19$SSL_DHE_DSS_WITH_DES_CBC_SHA@18$SSL_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA@20$SSL_DHE_RSA_WITH_3DES_EDE_CBC_SHA@22$SSL_DHE_RSA_WITH_DES_CBC_SHA@21$SSL_DH_DSS_EXPORT_WITH_DES40_CBC_SHA@11$SSL_DH_DSS_WITH_3DES_EDE_CBC_SHA@13$SSL_DH_DSS_WITH_DES_CBC_SHA@12$SSL_DH_RSA_EXPORT_WITH_DES40_CBC_SHA@14$SSL_DH_RSA_WITH_3DES_EDE_CBC_SHA@16$SSL_DH_RSA_WITH_DES_CBC_SHA@15$SSL_DH_anon_EXPORT_WITH_DES40_CBC_SHA@25$SSL_DH_anon_EXPORT_WITH_RC4_40_MD5@23$SSL_DH_anon_WITH_3DES_EDE_CBC_SHA@27$SSL_DH_anon_WITH_DES_CBC_SHA@26$SSL_DH_anon_WITH_RC4_128_MD5@24$SSL_FORTEZZA_DMS_WITH_FORTEZZA_CBC_SHA@29$SSL_FORTEZZA_DMS_WITH_NULL_SHA@28$SSL_NO_SUCH_CIPHERSUITE@65535$SSL_NULL_WITH_NULL_NULL@0$SSL_RSA_EXPORT_WITH_DES40_CBC_SHA@8$SSL_RSA_EXPORT_WITH_RC2_CBC_40_MD5@6$SSL_RSA_EXPORT_WITH_RC4_40_MD5@3$SSL_RSA_WITH_3DES_EDE_CBC_MD5@65411$SSL_RSA_WITH_3DES_EDE_CBC_SHA@10$SSL_RSA_WITH_DES_CBC_MD5@65410$SSL_RSA_WITH_DES_CBC_SHA@9$SSL_RSA_WITH_IDEA_CBC_MD5@65409$SSL_RSA_WITH_IDEA_CBC_SHA@7$SSL_RSA_WITH_NULL_MD5@1$SSL_RSA_WITH_NULL_SHA@2$SSL_RSA_WITH_RC2_CBC_MD5@65408$SSL_RSA_WITH_RC4_128_MD5@4$SSL_RSA_WITH_RC4_128_SHA@5$TLS_AES_128_CCM_8_SHA256@4869$TLS_AES_128_CCM_SHA256@4868$TLS_AES_128_GCM_SHA256@4865$TLS_AES_256_GCM_SHA384@4866$TLS_CHACHA20_POLY1305_SHA256@4867$TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA@19$TLS_DHE_DSS_WITH_AES_128_CBC_SHA@50$TLS_DHE_DSS_WITH_AES_128_CBC_SHA256@64$TLS_DHE_DSS_WITH_AES_128_GCM_SHA256@162$TLS_DHE_DSS_WITH_AES_256_CBC_SHA@56$TLS_DHE_DSS_WITH_AES_256_CBC_SHA256@106$TLS_DHE_DSS_WITH_AES_256_GCM_SHA384@163$TLS_DHE_PSK_WITH_3DES_EDE_CBC_SHA@143$TLS_DHE_PSK_WITH_AES_128_CBC_SHA@144$TLS_DHE_PSK_WITH_AES_128_CBC_SHA256@178$TLS_DHE_PSK_WITH_AES_128_GCM_SHA256@170$TLS_DHE_PSK_WITH_AES_256_CBC_SHA@145$TLS_DHE_PSK_WITH_AES_256_CBC_SHA384@179$TLS_DHE_PSK_WITH_AES_256_GCM_SHA384@171$TLS_DHE_PSK_WITH_NULL_SHA@45$TLS_DHE_PSK_WITH_NULL_SHA256@180$TLS_DHE_PSK_WITH_NULL_SHA384@181$TLS_DHE_PSK_WITH_RC4_128_SHA@142$TLS_DHE_RSA_WITH_3DES_EDE_CBC_SHA@22$TLS_DHE_RSA_WITH_AES_128_CBC_SHA@51$TLS_DHE_RSA_WITH_AES_128_CBC_SHA256@103$TLS_DHE_RSA_WITH_AES_128_GCM_SHA256@158$TLS_DHE_RSA_WITH_AES_256_CBC_SHA@57$TLS_DHE_RSA_WITH_AES_256_CBC_SHA256@107$TLS_DHE_RSA_WITH_AES_256_GCM_SHA384@159$TLS_DH_DSS_WITH_3DES_EDE_CBC_SHA@13$TLS_DH_DSS_WITH_AES_128_CBC_SHA@48$TLS_DH_DSS_WITH_AES_128_CBC_SHA256@62$TLS_DH_DSS_WITH_AES_128_GCM_SHA256@164$TLS_DH_DSS_WITH_AES_256_CBC_SHA@54$TLS_DH_DSS_WITH_AES_256_CBC_SHA256@104$TLS_DH_DSS_WITH_AES_256_GCM_SHA384@165$TLS_DH_RSA_WITH_3DES_EDE_CBC_SHA@16$TLS_DH_RSA_WITH_AES_128_CBC_SHA@49$TLS_DH_RSA_WITH_AES_128_CBC_SHA256@63$TLS_DH_RSA_WITH_AES_128_GCM_SHA256@160$TLS_DH_RSA_WITH_AES_256_CBC_SHA@55$TLS_DH_RSA_WITH_AES_256_CBC_SHA256@105$TLS_DH_RSA_WITH_AES_256_GCM_SHA384@161$TLS_DH_anon_WITH_3DES_EDE_CBC_SHA@27$TLS_DH_anon_WITH_AES_128_CBC_SHA@52$TLS_DH_anon_WITH_AES_128_CBC_SHA256@108$TLS_DH_anon_WITH_AES_128_GCM_SHA256@166$TLS_DH_anon_WITH_AES_256_CBC_SHA@58$TLS_DH_anon_WITH_AES_256_CBC_SHA256@109$TLS_DH_anon_WITH_AES_256_GCM_SHA384@167$TLS_DH_anon_WITH_RC4_128_MD5@24$TLS_ECDHE_ECDSA_WITH_3DES_EDE_CBC_SHA@49160$TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA@49161$TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256@49187$TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256@49195$TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA@49162$TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384@49188$TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384@49196$TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256@52393$TLS_ECDHE_ECDSA_WITH_NULL_SHA@49158$TLS_ECDHE_ECDSA_WITH_RC4_128_SHA@49159$TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA@49170$TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA@49171$TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256@49191$TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256@49199$TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA@49172$TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384@49192$TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384@49200$TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256@52392$TLS_ECDHE_RSA_WITH_NULL_SHA@49168$TLS_ECDHE_RSA_WITH_RC4_128_SHA@49169$TLS_ECDH_ECDSA_WITH_3DES_EDE_CBC_SHA@49155$TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA@49156$TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA256@49189$TLS_ECDH_ECDSA_WITH_AES_128_GCM_SHA256@49197$TLS_ECDH_ECDSA_WITH_AES_256_CBC_SHA@49157$TLS_ECDH_ECDSA_WITH_AES_256_CBC_SHA384@49190$TLS_ECDH_ECDSA_WITH_AES_256_GCM_SHA384@49198$TLS_ECDH_ECDSA_WITH_NULL_SHA@49153$TLS_ECDH_ECDSA_WITH_RC4_128_SHA@49154$TLS_ECDH_RSA_WITH_3DES_EDE_CBC_SHA@49165$TLS_ECDH_RSA_WITH_AES_128_CBC_SHA@49166$TLS_ECDH_RSA_WITH_AES_128_CBC_SHA256@49193$TLS_ECDH_RSA_WITH_AES_128_GCM_SHA256@49201$TLS_ECDH_RSA_WITH_AES_256_CBC_SHA@49167$TLS_ECDH_RSA_WITH_AES_256_CBC_SHA384@49194$TLS_ECDH_RSA_WITH_AES_256_GCM_SHA384@49202$TLS_ECDH_RSA_WITH_NULL_SHA@49163$TLS_ECDH_RSA_WITH_RC4_128_SHA@49164$TLS_ECDH_anon_WITH_3DES_EDE_CBC_SHA@49175$TLS_ECDH_anon_WITH_AES_128_CBC_SHA@49176$TLS_ECDH_anon_WITH_AES_256_CBC_SHA@49177$TLS_ECDH_anon_WITH_NULL_SHA@49173$TLS_ECDH_anon_WITH_RC4_128_SHA@49174$TLS_EMPTY_RENEGOTIATION_INFO_SCSV@255$TLS_NULL_WITH_NULL_NULL@0$TLS_PSK_WITH_3DES_EDE_CBC_SHA@139$TLS_PSK_WITH_AES_128_CBC_SHA@140$TLS_PSK_WITH_AES_128_CBC_SHA256@174$TLS_PSK_WITH_AES_128_GCM_SHA256@168$TLS_PSK_WITH_AES_256_CBC_SHA@141$TLS_PSK_WITH_AES_256_CBC_SHA384@175$TLS_PSK_WITH_AES_256_GCM_SHA384@169$TLS_PSK_WITH_NULL_SHA@44$TLS_PSK_WITH_NULL_SHA256@176$TLS_PSK_WITH_NULL_SHA384@177$TLS_PSK_WITH_RC4_128_SHA@138$TLS_RSA_PSK_WITH_3DES_EDE_CBC_SHA@147$TLS_RSA_PSK_WITH_AES_128_CBC_SHA@148$TLS_RSA_PSK_WITH_AES_128_CBC_SHA256@182$TLS_RSA_PSK_WITH_AES_128_GCM_SHA256@172$TLS_RSA_PSK_WITH_AES_256_CBC_SHA@149$TLS_RSA_PSK_WITH_AES_256_CBC_SHA384@183$TLS_RSA_PSK_WITH_AES_256_GCM_SHA384@173$TLS_RSA_PSK_WITH_NULL_SHA@46$TLS_RSA_PSK_WITH_NULL_SHA256@184$TLS_RSA_PSK_WITH_NULL_SHA384@185$TLS_RSA_PSK_WITH_RC4_128_SHA@146$TLS_RSA_WITH_3DES_EDE_CBC_SHA@10$TLS_RSA_WITH_AES_128_CBC_SHA@47$TLS_RSA_WITH_AES_128_CBC_SHA256@60$TLS_RSA_WITH_AES_128_GCM_SHA256@156$TLS_RSA_WITH_AES_256_CBC_SHA@53$TLS_RSA_WITH_AES_256_CBC_SHA256@61$TLS_RSA_WITH_AES_256_GCM_SHA384@157$TLS_RSA_WITH_NULL_MD5@1$TLS_RSA_WITH_NULL_SHA@2$TLS_RSA_WITH_NULL_SHA256@59$TLS_RSA_WITH_RC4_128_MD5@4$TLS_RSA_WITH_RC4_128_SHA@5$callerSecuritySession@-1$errAuthorizationBadAddress@-60033$errAuthorizationCanceled@-60006$errAuthorizationDenied@-60005$errAuthorizationExternalizeNotAllowed@-60009$errAuthorizationInteractionNotAllowed@-60007$errAuthorizationInternal@-60008$errAuthorizationInternalizeNotAllowed@-60010$errAuthorizationInvalidFlags@-60011$errAuthorizationInvalidPointer@-60004$errAuthorizationInvalidRef@-60002$errAuthorizationInvalidSet@-60001$errAuthorizationInvalidTag@-60003$errAuthorizationSuccess@0$errAuthorizationToolEnvironmentError@-60032$errAuthorizationToolExecuteFailure@-60031$errSSLBadCert@-9808$errSSLBadCertificateStatusResponse@-9862$errSSLBadCipherSuite@-9818$errSSLBadConfiguration@-9848$errSSLBadRecordMac@-9846$errSSLBufferOverflow@-9817$errSSLCertExpired@-9814$errSSLCertNotYetValid@-9815$errSSLCertificateRequired@-9863$errSSLClientCertRequested@-9842$errSSLClientHelloReceived@-9851$errSSLClosedAbort@-9806$errSSLClosedGraceful@-9805$errSSLClosedNoNotify@-9816$errSSLConfigurationFailed@-9854$errSSLConnectionRefused@-9844$errSSLCrypto@-9809$errSSLDecodeError@-9859$errSSLDecompressFail@-9857$errSSLDecryptionFail@-9845$errSSLFatalAlert@-9802$errSSLHandshakeFail@-9858$errSSLHostNameMismatch@-9843$errSSLIllegalParam@-9830$errSSLInappropriateFallback@-9860$errSSLInternal@-9810$errSSLMissingExtension@-9861$errSSLModuleAttach@-9811$errSSLNegotiation@-9801$errSSLNetworkTimeout@-9853$errSSLNoRootCert@-9813$errSSLPeerAccessDenied@-9832$errSSLPeerAuthCompleted@-9841$errSSLPeerBadCert@-9825$errSSLPeerBadRecordMac@-9820$errSSLPeerCertExpired@-9828$errSSLPeerCertRevoked@-9827$errSSLPeerCertUnknown@-9829$errSSLPeerDecodeError@-9833$errSSLPeerDecompressFail@-9823$errSSLPeerDecryptError@-9834$errSSLPeerDecryptionFail@-9821$errSSLPeerExportRestriction@-9835$errSSLPeerHandshakeFail@-9824$errSSLPeerInsufficientSecurity@-9837$errSSLPeerInternalError@-9838$errSSLPeerNoRenegotiation@-9840$errSSLPeerProtocolVersion@-9836$errSSLPeerRecordOverflow@-9822$errSSLPeerUnexpectedMsg@-9819$errSSLPeerUnknownCA@-9831$errSSLPeerUnsupportedCert@-9826$errSSLPeerUserCancelled@-9839$errSSLProtocol@-9800$errSSLRecordOverflow@-9847$errSSLSessionNotFound@-9804$errSSLTransportReset@-9852$errSSLUnexpectedMessage@-9856$errSSLUnexpectedRecord@-9849$errSSLUnknownPSKIdentity@-9864$errSSLUnknownRootCert@-9812$errSSLUnrecognizedName@-9865$errSSLUnsupportedExtension@-9855$errSSLWeakPeerEphemeralDHKey@-9850$errSSLWouldBlock@-9803$errSSLXCertChainInvalid@-9807$errSecACLAddFailed@-67698$errSecACLChangeFailed@-67699$errSecACLDeleteFailed@-67696$errSecACLNotSimple@-25240$errSecACLReplaceFailed@-67697$errSecAddinLoadFailed@-67711$errSecAddinUnloadFailed@-67714$errSecAlgorithmMismatch@-67730$errSecAllocate@-108$errSecAlreadyLoggedIn@-67814$errSecAppleAddAppACLSubject@-67589$errSecAppleInvalidKeyEndDate@-67593$errSecAppleInvalidKeyStartDate@-67592$errSecApplePublicKeyIncomplete@-67590$errSecAppleSSLv2Rollback@-67595$errSecAppleSignatureMismatch@-67591$errSecAttachHandleBusy@-67728$errSecAttributeNotInContext@-67720$errSecAuthFailed@-25293$errSecBadReq@-909$errSecBlockSizeMismatch@-67810$errSecBufferTooSmall@-25301$errSecCRLAlreadySigned@-67684$errSecCRLBadURI@-67617$errSecCRLExpired@-67613$errSecCRLNotFound@-67615$errSecCRLNotTrusted@-67620$errSecCRLNotValidYet@-67614$errSecCRLPolicyFailed@-67621$errSecCRLServerDown@-67616$errSecCSAmbiguousBundleFormat@-67011$errSecCSBadBundleFormat@-67028$errSecCSBadCallbackValue@-67020$errSecCSBadDictionaryFormat@-67058$errSecCSBadDiskImageFormat@-67001$errSecCSBadFrameworkVersion@-67009$errSecCSBadLVArch@-67017$errSecCSBadMainExecutable@-67010$errSecCSBadNestedCode@-67021$errSecCSBadObjectFormat@-67049$errSecCSBadResource@-67054$errSecCSBadTeamIdentifier@-66997$errSecCSCMSTooLarge@-67036$errSecCSCancelled@-67006$errSecCSDBAccess@-67032$errSecCSDBDenied@-67033$errSecCSDSStoreSymlink@-67012$errSecCSDbCorrupt@-67024$errSecCSFileHardQuarantined@-67026$errSecCSGuestInvalid@-67063$errSecCSHelperFailed@-67019$errSecCSHostProtocolContradiction@-67043$errSecCSHostProtocolDedicationError@-67042$errSecCSHostProtocolInvalidAttribute@-67031$errSecCSHostProtocolInvalidHash@-67035$errSecCSHostProtocolNotProxy@-67041$errSecCSHostProtocolRelativePath@-67044$errSecCSHostProtocolStateError@-67040$errSecCSHostProtocolUnrelated@-67039$errSecCSHostReject@-67047$errSecCSInfoPlistFailed@-67030$errSecCSInternalError@-67048$errSecCSInvalidAssociatedFileData@-66999$errSecCSInvalidAttributeValues@-67066$errSecCSInvalidEntitlements@-66994$errSecCSInvalidFlags@-67070$errSecCSInvalidObjectRef@-67071$errSecCSInvalidPlatform@-67005$errSecCSInvalidRuntimeVersion@-66993$errSecCSInvalidSymlink@-67003$errSecCSInvalidTeamIdentifier@-66998$errSecCSMultipleGuests@-67064$errSecCSNoMainExecutable@-67029$errSecCSNoMatches@-67027$errSecCSNoSuchCode@-67065$errSecCSNotAHost@-67046$errSecCSNotAppLike@-67002$errSecCSNotSupported@-67037$errSecCSObjectRequired@-67069$errSecCSOutdated@-67025$errSecCSRegularFile@-67015$errSecCSReqFailed@-67050$errSecCSReqInvalid@-67052$errSecCSReqUnsupported@-67051$errSecCSResourceDirectoryFailed@-67023$errSecCSResourceNotSupported@-67016$errSecCSResourceRulesInvalid@-67053$errSecCSResourcesInvalid@-67055$errSecCSResourcesNotFound@-67056$errSecCSResourcesNotSealed@-67057$errSecCSRevokedNotarization@-66992$errSecCSSignatureFailed@-67061$errSecCSSignatureInvalid@-67045$errSecCSSignatureNotVerifiable@-67060$errSecCSSignatureUnsupported@-67059$errSecCSSignatureUntrusted@-66996$errSecCSStaticCodeChanged@-67034$errSecCSStaticCodeNotFound@-67068$errSecCSTooBig@-67004$errSecCSUnimplemented@-67072$errSecCSUnsealedAppRoot@-67014$errSecCSUnsealedFrameworkRoot@-67008$errSecCSUnsigned@-67062$errSecCSUnsignedNestedCode@-67022$errSecCSUnsupportedDigestAlgorithm@-67000$errSecCSUnsupportedGuestAttributes@-67067$errSecCSVetoed@-67018$errSecCSWeakResourceEnvelope@-67007$errSecCSWeakResourceRules@-67013$errSecCallbackFailed@-67695$errSecCertificateCannotOperate@-67817$errSecCertificateExpired@-67818$errSecCertificateNotValidYet@-67819$errSecCertificateRevoked@-67820$errSecCertificateSuspended@-67821$errSecCodeSigningBadCertChainLength@-67647$errSecCodeSigningBadPathLengthConstraint@-67649$errSecCodeSigningDevelopment@-67651$errSecCodeSigningNoBasicConstraints@-67648$errSecCodeSigningNoExtendedKeyUsage@-67650$errSecConversionError@-67594$errSecCoreFoundationUnknown@-4960$errSecCreateChainFailed@-25318$errSecDataNotAvailable@-25316$errSecDataNotModifiable@-25317$errSecDataTooLarge@-25302$errSecDatabaseLocked@-67869$errSecDatastoreIsOpen@-67870$errSecDecode@-26275$errSecDeviceError@-67727$errSecDeviceFailed@-67588$errSecDeviceReset@-67587$errSecDeviceVerifyFailed@-67812$errSecDiskFull@-34$errSecDskFull@-34$errSecDuplicateCallback@-25297$errSecDuplicateItem@-25299$errSecDuplicateKeychain@-25296$errSecEMMLoadFailed@-67709$errSecEMMUnloadFailed@-67710$errSecEndOfData@-67634$errSecEventNotificationCallbackNotFound@-67723$errSecExtendedKeyUsageNotCritical@-67881$errSecFieldSpecifiedMultiple@-67866$errSecFileTooBig@-67597$errSecFunctionFailed@-67677$errSecFunctionIntegrityFail@-67670$errSecHostNameMismatch@-67602$errSecIDPFailure@-67622$errSecIO@-36$errSecInDarkWake@-25320$errSecIncompatibleDatabaseBlob@-67600$errSecIncompatibleFieldFormat@-67867$errSecIncompatibleKeyBlob@-67601$errSecIncompatibleVersion@-67704$errSecIncompleteCertRevocationCheck@-67635$errSecInputLengthError@-67724$errSecInsufficientClientID@-67586$errSecInsufficientCredentials@-67822$errSecInteractionNotAllowed@-25308$errSecInteractionRequired@-25315$errSecInternalComponent@-2070$errSecInternalError@-67671$errSecInvaldCRLAuthority@-67827$errSecInvalidACL@-67702$errSecInvalidAccessCredentials@-67700$errSecInvalidAccessRequest@-67876$errSecInvalidAction@-67823$errSecInvalidAddinFunctionTable@-67716$errSecInvalidAlgorithm@-67747$errSecInvalidAlgorithmParms@-67770$errSecInvalidAttributeAccessCredentials@-67796$errSecInvalidAttributeBase@-67788$errSecInvalidAttributeBlockSize@-67764$errSecInvalidAttributeDLDBHandle@-67794$errSecInvalidAttributeEffectiveBits@-67778$errSecInvalidAttributeEndDate@-67782$errSecInvalidAttributeInitVector@-67750$errSecInvalidAttributeIterationCount@-67792$errSecInvalidAttributeKey@-67748$errSecInvalidAttributeKeyLength@-67762$errSecInvalidAttributeKeyType@-67774$errSecInvalidAttributeLabel@-67772$errSecInvalidAttributeMode@-67776$errSecInvalidAttributeOutputSize@-67766$errSecInvalidAttributePadding@-67754$errSecInvalidAttributePassphrase@-67760$errSecInvalidAttributePrime@-67786$errSecInvalidAttributePrivateKeyFormat@-67800$errSecInvalidAttributePublicKeyFormat@-67798$errSecInvalidAttributeRandom@-67756$errSecInvalidAttributeRounds@-67768$errSecInvalidAttributeSalt@-67752$errSecInvalidAttributeSeed@-67758$errSecInvalidAttributeStartDate@-67780$errSecInvalidAttributeSubprime@-67790$errSecInvalidAttributeSymmetricKeyFormat@-67802$errSecInvalidAttributeVersion@-67784$errSecInvalidAttributeWrappedKeyFormat@-67804$errSecInvalidAuthority@-67824$errSecInvalidAuthorityKeyID@-67606$errSecInvalidBaseACLs@-67851$errSecInvalidBundleInfo@-67857$errSecInvalidCRL@-67830$errSecInvalidCRLEncoding@-67828$errSecInvalidCRLGroup@-67816$errSecInvalidCRLIndex@-67858$errSecInvalidCRLType@-67829$errSecInvalidCallback@-25298$errSecInvalidCertAuthority@-67826$errSecInvalidCertificateGroup@-67691$errSecInvalidCertificateRef@-67690$errSecInvalidContext@-67746$errSecInvalidDBList@-67681$errSecInvalidDBLocation@-67875$errSecInvalidData@-67673$errSecInvalidDatabaseBlob@-67598$errSecInvalidDigestAlgorithm@-67815$errSecInvalidEncoding@-67853$errSecInvalidExtendedKeyUsage@-67609$errSecInvalidFormType@-67831$errSecInvalidGUID@-67679$errSecInvalidHandle@-67680$errSecInvalidHandleUsage@-67668$errSecInvalidID@-67832$errSecInvalidIDLinkage@-67610$errSecInvalidIdentifier@-67833$errSecInvalidIndex@-67834$errSecInvalidIndexInfo@-67877$errSecInvalidInputVector@-67744$errSecInvalidItemRef@-25304$errSecInvalidKeyAttributeMask@-67738$errSecInvalidKeyBlob@-67599$errSecInvalidKeyFormat@-67742$errSecInvalidKeyHierarchy@-67713$errSecInvalidKeyLabel@-67740$errSecInvalidKeyRef@-67712$errSecInvalidKeyUsageForPolicy@-67608$errSecInvalidKeyUsageMask@-67736$errSecInvalidKeychain@-25295$errSecInvalidLoginName@-67813$errSecInvalidModifyMode@-67879$errSecInvalidName@-67689$errSecInvalidNetworkAddress@-67683$errSecInvalidNewOwner@-67878$errSecInvalidNumberOfFields@-67685$errSecInvalidOutputVector@-67745$errSecInvalidOwnerEdit@-25244$errSecInvalidPVC@-67708$errSecInvalidParsingModule@-67868$errSecInvalidPassthroughID@-67682$errSecInvalidPasswordRef@-25261$errSecInvalidPointer@-67675$errSecInvalidPolicyIdentifiers@-67835$errSecInvalidPrefsDomain@-25319$errSecInvalidQuery@-67693$errSecInvalidReason@-67837$errSecInvalidRecord@-67701$errSecInvalidRequestInputs@-67838$errSecInvalidRequestor@-67855$errSecInvalidResponseVector@-67839$errSecInvalidRoot@-67612$errSecInvalidSampleValue@-67703$errSecInvalidScope@-67706$errSecInvalidSearchRef@-25305$errSecInvalidServiceMask@-67717$errSecInvalidSignature@-67688$errSecInvalidStopOnPolicy@-67840$errSecInvalidSubServiceID@-67719$errSecInvalidSubjectKeyID@-67607$errSecInvalidSubjectName@-67655$errSecInvalidTimeString@-67836$errSecInvalidTrustSetting@-25242$errSecInvalidTrustSettings@-25262$errSecInvalidTuple@-67841$errSecInvalidTupleCredendtials@-67852$errSecInvalidTupleGroup@-67850$errSecInvalidValidityPeriod@-67854$errSecInvalidValue@-67694$errSecItemNotFound@-25300$errSecKeyBlobTypeIncorrect@-67732$errSecKeyHeaderInconsistent@-67733$errSecKeyIsSensitive@-25258$errSecKeySizeNotAllowed@-25311$errSecKeyUsageIncorrect@-67731$errSecLibraryReferenceNotFound@-67715$errSecMDSError@-67674$errSecMemoryError@-67672$errSecMissingAlgorithmParms@-67771$errSecMissingAttributeAccessCredentials@-67797$errSecMissingAttributeBase@-67789$errSecMissingAttributeBlockSize@-67765$errSecMissingAttributeDLDBHandle@-67795$errSecMissingAttributeEffectiveBits@-67779$errSecMissingAttributeEndDate@-67783$errSecMissingAttributeInitVector@-67751$errSecMissingAttributeIterationCount@-67793$errSecMissingAttributeKey@-67749$errSecMissingAttributeKeyLength@-67763$errSecMissingAttributeKeyType@-67775$errSecMissingAttributeLabel@-67773$errSecMissingAttributeMode@-67777$errSecMissingAttributeOutputSize@-67767$errSecMissingAttributePadding@-67755$errSecMissingAttributePassphrase@-67761$errSecMissingAttributePrime@-67787$errSecMissingAttributePrivateKeyFormat@-67801$errSecMissingAttributePublicKeyFormat@-67799$errSecMissingAttributeRandom@-67757$errSecMissingAttributeRounds@-67769$errSecMissingAttributeSalt@-67753$errSecMissingAttributeSeed@-67759$errSecMissingAttributeStartDate@-67781$errSecMissingAttributeSubprime@-67791$errSecMissingAttributeSymmetricKeyFormat@-67803$errSecMissingAttributeVersion@-67785$errSecMissingAttributeWrappedKeyFormat@-67805$errSecMissingEntitlement@-34018$errSecMissingRequiredExtension@-67880$errSecMissingValue@-67871$errSecMobileMeCSRVerifyFailure@-67665$errSecMobileMeFailedConsistencyCheck@-67666$errSecMobileMeNoRequestPending@-67664$errSecMobileMeRequestAlreadyPending@-67663$errSecMobileMeRequestQueued@-67657$errSecMobileMeRequestRedirected@-67658$errSecMobileMeServerAlreadyExists@-67661$errSecMobileMeServerError@-67659$errSecMobileMeServerNotAvailable@-67660$errSecMobileMeServerServiceErr@-67662$errSecModuleManagerInitializeFailed@-67721$errSecModuleManagerNotFound@-67722$errSecModuleManifestVerifyFailed@-67678$errSecModuleNotLoaded@-67718$errSecMultipleExecSegments@-66995$errSecMultiplePrivKeys@-25259$errSecMultipleValuesUnsupported@-67842$errSecNetworkFailure@-67636$errSecNoAccessForItem@-25243$errSecNoBasicConstraints@-67604$errSecNoBasicConstraintsCA@-67605$errSecNoCertificateModule@-25313$errSecNoDefaultAuthority@-67844$errSecNoDefaultKeychain@-25307$errSecNoFieldValues@-67859$errSecNoPolicyModule@-25314$errSecNoStorageModule@-25312$errSecNoSuchAttr@-25303$errSecNoSuchClass@-25306$errSecNoSuchKeychain@-25294$errSecNoTrustSettings@-25263$errSecNotAvailable@-25291$errSecNotInitialized@-67667$errSecNotLoggedIn@-67729$errSecNotSigner@-26267$errSecNotTrusted@-67843$errSecOCSPBadRequest@-67631$errSecOCSPBadResponse@-67630$errSecOCSPNoSigner@-67640$errSecOCSPNotTrustedToAnchor@-67637$errSecOCSPResponderInternalError@-67642$errSecOCSPResponderMalformedReq@-67641$errSecOCSPResponderSignatureRequired@-67644$errSecOCSPResponderTryLater@-67643$errSecOCSPResponderUnauthorized@-67645$errSecOCSPResponseNonceMismatch@-67646$errSecOCSPSignatureError@-67639$errSecOCSPStatusUnrecognized@-67633$errSecOCSPUnavailable@-67632$errSecOpWr@-49$errSecOutputLengthError@-67725$errSecPVCAlreadyConfigured@-67707$errSecPVCReferentNotFound@-67669$errSecParam@-50$errSecPassphraseRequired@-25260$errSecPathLengthConstraintExceeded@-67611$errSecPkcs12VerifyFailure@-25264$errSecPolicyNotFound@-25241$errSecPrivilegeNotGranted@-67705$errSecPrivilegeNotSupported@-67726$errSecPublicKeyInconsistent@-67811$errSecQuerySizeUnknown@-67809$errSecQuotaExceeded@-67596$errSecReadOnly@-25292$errSecReadOnlyAttr@-25309$errSecRecordModified@-67638$errSecRejectedForm@-67845$errSecRequestDescriptor@-67856$errSecRequestLost@-67846$errSecRequestRejected@-67847$errSecResourceSignBadCertChainLength@-67652$errSecResourceSignBadExtKeyUsage@-67653$errSecSMIMEBadExtendedKeyUsage@-67624$errSecSMIMEBadKeyUsage@-67625$errSecSMIMEEmailAddressesNotFound@-67623$errSecSMIMEKeyUsageNotCritical@-67626$errSecSMIMENoEmailAddress@-67627$errSecSMIMESubjAltNameNotCritical@-67628$errSecSSLBadExtendedKeyUsage@-67629$errSecSelfCheckFailed@-67676$errSecServiceNotAvailable@-67585$errSecSigningTimeMissing@-67894$errSecStagedOperationInProgress@-67806$errSecStagedOperationNotStarted@-67807$errSecSuccess@0$errSecTagNotFound@-67692$errSecTimestampAddInfoNotAvailable@-67892$errSecTimestampBadAlg@-67886$errSecTimestampBadDataFormat@-67888$errSecTimestampBadRequest@-67887$errSecTimestampInvalid@-67883$errSecTimestampMissing@-67882$errSecTimestampNotTrusted@-67884$errSecTimestampRejection@-67895$errSecTimestampRevocationNotification@-67898$errSecTimestampRevocationWarning@-67897$errSecTimestampServiceNotAvailable@-67885$errSecTimestampSystemFailure@-67893$errSecTimestampTimeNotAvailable@-67889$errSecTimestampUnacceptedExtension@-67891$errSecTimestampUnacceptedPolicy@-67890$errSecTimestampWaiting@-67896$errSecTrustNotAvailable@-25245$errSecTrustSettingDeny@-67654$errSecUnimplemented@-4$errSecUnknownCRLExtension@-67619$errSecUnknownCertExtension@-67618$errSecUnknownCriticalExtensionFlag@-67603$errSecUnknownFormat@-25257$errSecUnknownQualifiedCertStatement@-67656$errSecUnknownTag@-67687$errSecUnsupportedAddressType@-67848$errSecUnsupportedFieldFormat@-67860$errSecUnsupportedFormat@-25256$errSecUnsupportedIndexInfo@-67861$errSecUnsupportedKeyAttributeMask@-67739$errSecUnsupportedKeyFormat@-67734$errSecUnsupportedKeyLabel@-67741$errSecUnsupportedKeySize@-67735$errSecUnsupportedKeyUsageMask@-67737$errSecUnsupportedLocality@-67862$errSecUnsupportedNumAttributes@-67863$errSecUnsupportedNumIndexes@-67864$errSecUnsupportedNumRecordTypes@-67865$errSecUnsupportedNumSelectionPreds@-67873$errSecUnsupportedOperator@-67874$errSecUnsupportedQueryLimits@-67872$errSecUnsupportedService@-67849$errSecUnsupportedVectorOfBuffers@-67743$errSecUserCanceled@-128$errSecVerificationFailure@-67686$errSecVerifyActionFailed@-67825$errSecVerifyFailed@-67808$errSecWrPerm@-61$errSecWrongSecVersion@-25310$errSecureDownloadInvalidDownload@-20053$errSecureDownloadInvalidTicket@-20052$errSessionAuthorizationDenied@-60502$errSessionInvalidAttributes@-60501$errSessionInvalidId@-60500$errSessionSuccess@0$errSessionValueNotSet@-60503$kAlwaysAuthenticate@1$kAuthorizationExternalFormLength@32$kAuthorizationFlagCanNotPreAuthorize@1$kAuthorizationFlagDefaults@0$kAuthorizationFlagDestroyRights@8$kAuthorizationFlagExtendRights@2$kAuthorizationFlagInteractionAllowed@1$kAuthorizationFlagNoData@1048576$kAuthorizationFlagPartialRights@4$kAuthorizationFlagPreAuthorize@16$kAuthorizationResultAllow@0$kAuthorizationResultDeny@1$kAuthorizationResultUndefined@2$kAuthorizationResultUserCanceled@3$kCMSAttrAppleCodesigningHashAgility@16$kCMSAttrAppleCodesigningHashAgilityV2@32$kCMSAttrAppleExpirationTime@64$kCMSAttrNone@0$kCMSAttrSigningTime@8$kCMSAttrSmimeCapabilities@1$kCMSAttrSmimeEncryptionKeyPrefs@2$kCMSAttrSmimeMSEncryptionKeyPrefs@4$kCMSCertificateChain@2$kCMSCertificateChainWithRoot@3$kCMSCertificateNone@0$kCMSCertificateSignerOnly@1$kCMSSignerInvalidCert@4$kCMSSignerInvalidIndex@5$kCMSSignerInvalidSignature@3$kCMSSignerNeedsDetachedContent@2$kCMSSignerUnsigned@0$kCMSSignerValid@1$kDTLSProtocol1@9$kNeverAuthenticate@0$kSSLAborted@4$kSSLCiphersuiteGroupATS@3$kSSLCiphersuiteGroupATSCompatibility@4$kSSLCiphersuiteGroupCompatibility@1$kSSLCiphersuiteGroupDefault@0$kSSLCiphersuiteGroupLegacy@2$kSSLClientCertNone@0$kSSLClientCertRejected@3$kSSLClientCertRequested@1$kSSLClientCertSent@2$kSSLClientSide@1$kSSLClosed@3$kSSLConnected@2$kSSLDatagramType@1$kSSLHandshake@1$kSSLIdle@0$kSSLProtocol2@1$kSSLProtocol3@2$kSSLProtocol3Only@3$kSSLProtocolAll@6$kSSLProtocolUnknown@0$kSSLServerSide@0$kSSLSessionOptionAllowRenegotiation@8$kSSLSessionOptionAllowServerIdentityChange@5$kSSLSessionOptionBreakOnCertRequested@1$kSSLSessionOptionBreakOnClientAuth@2$kSSLSessionOptionBreakOnClientHello@7$kSSLSessionOptionBreakOnServerAuth@0$kSSLSessionOptionEnableSessionTickets@9$kSSLSessionOptionFallback@6$kSSLSessionOptionFalseStart@3$kSSLSessionOptionSendOneByteRecord@4$kSSLStreamType@0$kSec3DES192@192$kSecAES128@128$kSecAES192@192$kSecAES256@256$kSecAccessControlAnd@32768$kSecAccessControlApplicationPassword@2147483648$kSecAccessControlDevicePasscode@16$kSecAccessControlOr@16384$kSecAccessControlPrivateKeyUsage@1073741824$kSecAccessControlTouchIDAny@2$kSecAccessControlTouchIDCurrentSet@8$kSecAccessControlUserPresence@1$kSecAccountItemAttr@1633903476$kSecAddEvent@3$kSecAddEventMask@8$kSecAddressItemAttr@1633969266$kSecAlias@1634494835$kSecAppleSharePasswordItemClass@1634953328$kSecAuthenticationTypeAny@0$kSecAuthenticationTypeDPA@1633775716$kSecAuthenticationTypeDefault@1953261156$kSecAuthenticationTypeHTMLForm@1836216166$kSecAuthenticationTypeHTTPBasic@1886680168$kSecAuthenticationTypeHTTPDigest@1685353576$kSecAuthenticationTypeItemAttr@1635023216$kSecAuthenticationTypeMSN@1634628461$kSecAuthenticationTypeNTLM@1835824238$kSecAuthenticationTypeRPA@1633775730$kSecCSBasicValidateOnly@6$kSecCSCalculateCMSDigest@64$kSecCSCheckAllArchitectures@1$kSecCSCheckGatekeeperArchitectures@65$kSecCSCheckNestedCode@8$kSecCSCheckTrustedAnchors@134217728$kSecCSConsiderExpiration@2147483648$kSecCSContentInformation@16$kSecCSDedicatedHost@1$kSecCSDefaultFlags@0$kSecCSDoNotValidateExecutable@2$kSecCSDoNotValidateResources@4$kSecCSDynamicInformation@8$kSecCSEnforceRevocationChecks@1073741824$kSecCSFullReport@32$kSecCSGenerateGuestHash@2$kSecCSInternalInformation@1$kSecCSNoNetworkAccess@536870912$kSecCSQuickCheck@67108864$kSecCSReportProgress@268435456$kSecCSRequirementInformation@4$kSecCSRestrictSidebandData@512$kSecCSRestrictSymlinks@128$kSecCSRestrictToAppLike@256$kSecCSSigningInformation@2$kSecCSSingleThreaded@4096$kSecCSSkipResourceDirectory@32$kSecCSStrictValidate@16$kSecCSUseAllArchitectures@1$kSecCSUseSoftwareSigningCert@1024$kSecCSValidatePEH@2048$kSecCertificateEncoding@1667591779$kSecCertificateItemClass@2147487744$kSecCertificateType@1668577648$kSecCodeSignatureAdhoc@2$kSecCodeSignatureEnforcement@4096$kSecCodeSignatureForceExpiration@1024$kSecCodeSignatureForceHard@256$kSecCodeSignatureForceKill@512$kSecCodeSignatureHashSHA1@1$kSecCodeSignatureHashSHA256@2$kSecCodeSignatureHashSHA256Truncated@3$kSecCodeSignatureHashSHA384@4$kSecCodeSignatureHashSHA512@5$kSecCodeSignatureHost@1$kSecCodeSignatureLibraryValidation@8192$kSecCodeSignatureNoHash@0$kSecCodeSignatureRestrict@2048$kSecCodeSignatureRuntime@65536$kSecCodeStatusDebugged@268435456$kSecCodeStatusHard@256$kSecCodeStatusKill@512$kSecCodeStatusPlatform@67108864$kSecCodeStatusValid@1$kSecCommentItemAttr@1768123764$kSecCreationDateItemAttr@1667522932$kSecCreatorItemAttr@1668445298$kSecCredentialTypeDefault@0$kSecCredentialTypeNoUI@2$kSecCredentialTypeWithUI@1$kSecCrlEncoding@1668443747$kSecCrlType@1668445296$kSecCustomIconItemAttr@1668641641$kSecDataAccessEvent@10$kSecDataAccessEventMask@1024$kSecDefaultChangedEvent@9$kSecDefaultChangedEventMask@512$kSecDefaultKeySize@0$kSecDeleteEvent@4$kSecDeleteEventMask@16$kSecDescriptionItemAttr@1684370275$kSecDesignatedRequirementType@3$kSecEveryEventMask@4294967295$kSecFormatBSAFE@3$kSecFormatNetscapeCertSequence@13$kSecFormatOpenSSL@1$kSecFormatPEMSequence@10$kSecFormatPKCS12@12$kSecFormatPKCS7@11$kSecFormatRawKey@4$kSecFormatSSH@2$kSecFormatSSHv2@14$kSecFormatUnknown@0$kSecFormatWrappedLSH@8$kSecFormatWrappedOpenSSL@6$kSecFormatWrappedPKCS8@5$kSecFormatWrappedSSH@7$kSecFormatX509Cert@9$kSecGenericItemAttr@1734700641$kSecGenericPasswordItemClass@1734700656$kSecGuestRequirementType@2$kSecHonorRoot@256$kSecHostRequirementType@1$kSecInternetPasswordItemClass@1768842612$kSecInvalidRequirementType@6$kSecInvisibleItemAttr@1768846953$kSecItemPemArmour@1$kSecItemTypeAggregate@5$kSecItemTypeCertificate@4$kSecItemTypePrivateKey@1$kSecItemTypePublicKey@2$kSecItemTypeSessionKey@3$kSecItemTypeUnknown@0$kSecKeyAlias@2$kSecKeyAlwaysSensitive@15$kSecKeyApplicationTag@7$kSecKeyDecrypt@19$kSecKeyDerive@20$kSecKeyEffectiveKeySize@11$kSecKeyEncrypt@18$kSecKeyEndDate@13$kSecKeyExtractable@16$kSecKeyImportOnlyOne@1$kSecKeyKeyClass@0$kSecKeyKeyCreator@8$kSecKeyKeySizeInBits@10$kSecKeyKeyType@9$kSecKeyLabel@6$kSecKeyModifiable@5$kSecKeyNeverExtractable@17$kSecKeyNoAccessControl@4$kSecKeyOperationTypeDecrypt@3$kSecKeyOperationTypeEncrypt@2$kSecKeyOperationTypeKeyExchange@4$kSecKeyOperationTypeSign@0$kSecKeyOperationTypeVerify@1$kSecKeyPermanent@3$kSecKeyPrintName@1$kSecKeyPrivate@4$kSecKeySecurePassphrase@2$kSecKeySensitive@14$kSecKeySign@21$kSecKeySignRecover@23$kSecKeyStartDate@12$kSecKeyUnwrap@26$kSecKeyUsageAll@2147483647$kSecKeyUsageCRLSign@64$kSecKeyUsageContentCommitment@2$kSecKeyUsageCritical@2147483648$kSecKeyUsageDataEncipherment@8$kSecKeyUsageDecipherOnly@256$kSecKeyUsageDigitalSignature@1$kSecKeyUsageEncipherOnly@128$kSecKeyUsageKeyAgreement@16$kSecKeyUsageKeyCertSign@32$kSecKeyUsageKeyEncipherment@4$kSecKeyUsageNonRepudiation@2$kSecKeyUsageUnspecified@0$kSecKeyVerify@22$kSecKeyVerifyRecover@24$kSecKeyWrap@25$kSecKeychainListChangedEvent@11$kSecKeychainListChangedMask@2048$kSecKeychainPromptInvalid@64$kSecKeychainPromptInvalidAct@128$kSecKeychainPromptRequirePassphase@1$kSecKeychainPromptUnsigned@16$kSecKeychainPromptUnsignedAct@32$kSecLabelItemAttr@1818321516$kSecLibraryRequirementType@4$kSecLockEvent@1$kSecLockEventMask@2$kSecMatchBits@3$kSecModDateItemAttr@1835295092$kSecNegativeItemAttr@1852139361$kSecNoGuest@0$kSecPaddingNone@0$kSecPaddingOAEP@2$kSecPaddingPKCS1@1$kSecPaddingPKCS1MD2@32768$kSecPaddingPKCS1MD5@32769$kSecPaddingPKCS1SHA1@32770$kSecPaddingPKCS1SHA224@32771$kSecPaddingPKCS1SHA256@32772$kSecPaddingPKCS1SHA384@32773$kSecPaddingPKCS1SHA512@32774$kSecPaddingSigRaw@16384$kSecPasswordChangedEvent@6$kSecPasswordChangedEventMask@64$kSecPathItemAttr@1885434984$kSecPluginRequirementType@5$kSecPortItemAttr@1886351988$kSecPreferencesDomainCommon@2$kSecPreferencesDomainDynamic@3$kSecPreferencesDomainSystem@1$kSecPreferencesDomainUser@0$kSecPrivateKeyItemClass@16$kSecProtocolItemAttr@1886675820$kSecProtocolTypeAFP@1634103328$kSecProtocolTypeAny@0$kSecProtocolTypeAppleTalk@1635019883$kSecProtocolTypeCIFS@1667851891$kSecProtocolTypeCVSpserver@1668707184$kSecProtocolTypeDAAP@1684103536$kSecProtocolTypeEPPC@1701867619$kSecProtocolTypeFTP@1718906912$kSecProtocolTypeFTPAccount@1718906977$kSecProtocolTypeFTPProxy@1718907000$kSecProtocolTypeFTPS@1718906995$kSecProtocolTypeHTTP@1752462448$kSecProtocolTypeHTTPProxy@1752461432$kSecProtocolTypeHTTPS@1752461427$kSecProtocolTypeHTTPSProxy@1752462200$kSecProtocolTypeIMAP@1768776048$kSecProtocolTypeIMAPS@1768779891$kSecProtocolTypeIPP@1768976416$kSecProtocolTypeIRC@1769104160$kSecProtocolTypeIRCS@1769104243$kSecProtocolTypeLDAP@1818517872$kSecProtocolTypeLDAPS@1818521715$kSecProtocolTypeNNTP@1852732528$kSecProtocolTypeNNTPS@1853124723$kSecProtocolTypePOP3@1886351411$kSecProtocolTypePOP3S@1886351475$kSecProtocolTypeRTSP@1920234352$kSecProtocolTypeRTSPProxy@1920234360$kSecProtocolTypeSMB@1936548384$kSecProtocolTypeSMTP@1936553072$kSecProtocolTypeSOCKS@1936685088$kSecProtocolTypeSSH@1936943136$kSecProtocolTypeSVN@1937141280$kSecProtocolTypeTelnet@1952803950$kSecProtocolTypeTelnetS@1952803955$kSecPublicKeyItemClass@15$kSecRSAMax@4096$kSecRSAMin@1024$kSecReadPermStatus@2$kSecRevocationCRLMethod@2$kSecRevocationNetworkAccessDisabled@16$kSecRevocationOCSPMethod@1$kSecRevocationPreferCRL@4$kSecRevocationRequirePositiveResponse@8$kSecRevocationUseAnyAvailableMethod@3$kSecScriptCodeItemAttr@1935897200$kSecSecurityDomainItemAttr@1935961454$kSecServerItemAttr@1936881266$kSecServiceItemAttr@1937138533$kSecSignatureItemAttr@1936943463$kSecSymmetricKeyItemClass@17$kSecTransformErrorAbortInProgress@19$kSecTransformErrorAborted@20$kSecTransformErrorAttributeNotFound@1$kSecTransformErrorInvalidAlgorithm@6$kSecTransformErrorInvalidConnection@15$kSecTransformErrorInvalidInput@10$kSecTransformErrorInvalidInputDictionary@5$kSecTransformErrorInvalidLength@7$kSecTransformErrorInvalidOperation@2$kSecTransformErrorInvalidType@8$kSecTransformErrorMissingParameter@14$kSecTransformErrorMoreThanOneOutput@4$kSecTransformErrorNameAlreadyRegistered@11$kSecTransformErrorNotInitializedCorrectly@3$kSecTransformErrorUnsupportedAttribute@12$kSecTransformInvalidArgument@21$kSecTransformInvalidOverride@17$kSecTransformMetaAttributeCanCycle@7$kSecTransformMetaAttributeDeferred@5$kSecTransformMetaAttributeExternalize@8$kSecTransformMetaAttributeHasInboundConnection@10$kSecTransformMetaAttributeHasOutboundConnections@9$kSecTransformMetaAttributeName@1$kSecTransformMetaAttributeRef@2$kSecTransformMetaAttributeRequired@3$kSecTransformMetaAttributeRequiresOutboundConnection@4$kSecTransformMetaAttributeStream@6$kSecTransformMetaAttributeValue@0$kSecTransformOperationNotSupportedOnGroup@13$kSecTransformTransformIsExecuting@16$kSecTransformTransformIsNotRegistered@18$kSecTrustOptionAllowExpired@1$kSecTrustOptionAllowExpiredRoot@8$kSecTrustOptionFetchIssuerFromNet@4$kSecTrustOptionImplicitAnchors@64$kSecTrustOptionLeafIsCA@2$kSecTrustOptionRequireRevPerCert@16$kSecTrustOptionUseTrustSettings@32$kSecTrustResultConfirm@2$kSecTrustResultDeny@3$kSecTrustResultFatalTrustFailure@6$kSecTrustResultInvalid@0$kSecTrustResultOtherError@7$kSecTrustResultProceed@1$kSecTrustResultRecoverableTrustFailure@5$kSecTrustResultUnspecified@4$kSecTrustSettingsChangedEvent@12$kSecTrustSettingsChangedEventMask@4096$kSecTrustSettingsDefaultRootCertSetting@-1$kSecTrustSettingsDomainAdmin@1$kSecTrustSettingsDomainSystem@2$kSecTrustSettingsDomainUser@0$kSecTrustSettingsKeyUseAny@4294967295$kSecTrustSettingsKeyUseEnDecryptData@2$kSecTrustSettingsKeyUseEnDecryptKey@4$kSecTrustSettingsKeyUseKeyExchange@32$kSecTrustSettingsKeyUseSignCert@8$kSecTrustSettingsKeyUseSignRevocation@16$kSecTrustSettingsKeyUseSignature@1$kSecTrustSettingsResultDeny@3$kSecTrustSettingsResultInvalid@0$kSecTrustSettingsResultTrustAsRoot@2$kSecTrustSettingsResultTrustRoot@1$kSecTrustSettingsResultUnspecified@4$kSecTypeItemAttr@1954115685$kSecUnlockEvent@2$kSecUnlockEventMask@4$kSecUnlockStateStatus@1$kSecUpdateEvent@5$kSecUpdateEventMask@32$kSecUseOnlyGID@2$kSecUseOnlyUID@1$kSecVolumeItemAttr@1986817381$kSecWritePermStatus@4$kSecp192r1@192$kSecp256r1@256$kSecp384r1@384$kSecp521r1@521$kSecureDownloadDoNotEvaluateSigner@0$kSecureDownloadEvaluateSigner@1$kSecureDownloadFailEvaluation@2$kTLSProtocol1@4$kTLSProtocol11@7$kTLSProtocol12@8$kTLSProtocol13@10$kTLSProtocol1Only@5$kTLSProtocolMaxSupported@999$kTryAuthenticate@2$noSecuritySession@0$sessionHasGraphicAccess@16$sessionHasTTY@32$sessionIsRemote@4096$sessionIsRoot@1$sessionKeepCurrentBootstrap@32768$''' -misc.update({'kSecTrustSettingsAllowedError': b'kSecTrustSettingsAllowedError'.decode("utf-8"), 'kAuthorizationComment': b'comment', 'kAuthorizationEnvironmentIcon': b'icon', 'kAuthorizationRuleClassDeny': b'deny', 'kSecTrustSettingsPolicyString': b'kSecTrustSettingsPolicyString'.decode("utf-8"), 'kAuthorizationEnvironmentUsername': b'username', 'kAuthorizationRightExecute': b'system.privilege.admin', 'kAuthorizationRightRule': b'rule', 'kAuthorizationRuleIsAdmin': b'is-admin', 'kAuthorizationRuleClassAllow': b'allow', 'kAuthorizationEnvironmentPassword': b'password', 'kAuthorizationRuleAuthenticateAsAdmin': b'authenticate-admin', 'kSecTrustSettingsResult': b'kSecTrustSettingsResult'.decode("utf-8"), 'kAuthorizationEnvironmentPrompt': b'prompt', 'kAuthorizationRuleAuthenticateAsSessionUser': b'authenticate-session-owner', 'kSecTrustSettingsKeyUsage': b'kSecTrustSettingsKeyUsage'.decode("utf-8"), 'kAuthorizationEnvironmentShared': b'shared', 'kSecTrustSettingsPolicy': b'kSecTrustSettingsPolicy'.decode("utf-8"), 'kSecTrustSettingsApplication': b'kSecTrustSettingsApplication'.decode("utf-8")}) -functions={'CMSEncoderGetCertificateChainMode': (b'i@o^I',), 'SecKeyGeneratePair': (b'i@o^@o^@', '', {'arguments': {1: {'already_cfretained': True}, 2: {'already_cfretained': True}}}), 'SecCodeCopyPath': (b'i@Io^@', '', {'arguments': {2: {'already_cfretained': True}}}), 'SecCertificateCopySerialNumber': (b'@@o^@', '', {'retval': {'already_cfretained': True}}), 'SecKeyIsAlgorithmSupported': (sel32or64(b'Z@i@', b'Z@q@'),), 'SecTrustSetPolicies': (b'i@@',), 'SSLSetError': (b'i@i',), 'SecTransformCustomSetAttribute': (sel32or64(b'@^{OpaqueSecTransformImplementation=}@i@', b'@^{OpaqueSecTransformImplementation=}@q@'),), 'SSLGetSessionOption': (b'i@io^Z',), 'SecStaticCodeCreateWithPath': (b'i@Io^@', '', {'arguments': {2: {'already_cfretained': True}}}), 'SecTrustGetCertificateAtIndex': (sel32or64(b'@@i', b'@@q'),), 'SecTransformSetTransformAction': (b'@^{OpaqueSecTransformImplementation=}@@?', '', {'arguments': {2: {'callable': {'retval': {'type': b'@'}, 'arguments': {0: {'type': '^v'}}}}}}), 'SSLGetSessionState': (b'i@o^i',), 'SecItemImport': (b'i@@N^IN^IIn^{_SecItemImportExportKeyParameters=II@@@@@@}@o^@', '', {'arguments': {7: {'already_cfretained': True}}}), 'SSLGetMaxDatagramRecordSize': (b'i@o^L',), 'SecTrustEvaluate': (b'i@o^I',), 'CMSDecoderIsContentEncrypted': (b'i@o^Z',), 'SecTaskCreateFromSelf': (b'@@', '', {'retval': {'already_cfretained': True}}), 'SecureDownloadCopyCreationDate': (b'i^{OpaqueSecureDownload=}o^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'SecTransformSetAttributeAction': (b'@^{OpaqueSecTransformImplementation=}@@@?', '', {'arguments': {3: {'callable': {'retval': {'type': b'@'}, 'arguments': {0: {'type': '^v'}, 1: {'type': '@'}, 2: {'type': '@'}}}}}}), 'sec_certificate_copy_ref': (b'@@', '', {'retval': {'already_cfretained': True}}), 'SSLCopyDistinguishedNames': (b'i@o^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'SecTrustSetExceptions': (b'B@@',), 'SecItemAdd': (b'i@o^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'SecKeychainItemCopyKeychain': (b'i@o^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'SecPolicyCreateRevocation': (sel32or64(b'@I', b'@Q'), '', {'retval': {'already_cfretained': True}}), 'SecKeyCreateEncryptedData': (b'@@@@o^@', '', {'retval': {'already_cfretained': True}}), 'SecCertificateCopyNormalizedSubjectContent': (b'@@o^@', '', {'retval': {'already_cfretained': True}}), 'SecKeyCreateSignature': (b'@@@@o^@', '', {'retval': {'already_cfretained': True}}), 'CMSDecoderCreate': (b'io^@', '', {'arguments': {0: {'already_cfretained': True}}}), 'CMSDecoderCopyAllCerts': (b'i@o^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'SecDigestTransformCreate': (sel32or64(b'@@io^@', b'@@qo^@'), '', {'retval': {'already_cfretained': True}}), 'SSLSetEncryptionCertificate': (b'i@@',), 'SecHostCreateGuest': (b'iII@@Io^I',), 'SecTrustSettingsCopyCertificates': (b'iIo^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'SecKeychainUnlock': (b'i@In^vZ', '', {'arguments': {2: {'c_array_length_in_arg': 1}}}), 'SSLSetSessionTicketsEnabled': (b'i@Z',), 'SecHostSelectGuest': (b'iII',), 'AuthorizationCopyPrivilegedReference': (b'io^^{AuthorizationOpaqueRef=}',), 'CMSDecoderSetDetachedContent': (b'i@@',), 'sec_identity_create': (b'@@', '', {'retval': {'already_retained': True}}), 'SSLAddDistinguishedName': (b'i@n^vL', '', {'arguments': {1: {'c_array_length_in_arg': 2}}}), 'SecKeychainItemCopyFromPersistentReference': (b'i@o^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'SecTransformCopyExternalRepresentation': (b'@@', '', {'retval': {'already_cfretained': True}}), 'SecStaticCodeCheckValidityWithErrors': (b'i@I@o^@', '', {'arguments': {3: {'already_cfretained': True}}}), 'SSLGetNegotiatedProtocolVersion': (b'i@o^i',), 'sec_protocol_metadata_access_distinguished_names': (b'B@@?', '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}, 1: {'type': '@'}}}, 'callable_retained': True}}}), 'SecTransformPushbackAttribute': (b'@^{OpaqueSecTransformImplementation=}@@',), 'SecAccessCreateWithOwnerAndACL': (b'@III@o^@', '', {'retval': {'already_cfretained': True}}), 'SecTrustCopyResult': (b'@@', '', {'retval': {'already_cfretained': True}}), 'CMSDecoderCopySignerSigningTime': (b'i@Lo^d', '', {'arguments': {2: {'already_cfretained': True}}}), 'SecTrustSetNetworkFetchAllowed': (b'i@Z',), 'SSLSetCertificate': (b'i@@',), 'SecACLSetContents': (b'i@@@S',), 'SecTrustGetCertificateCount': (sel32or64(b'i@', b'q@'),), 'SecPKCS12Import': (b'i@@o^@', '', {'arguments': {2: {'already_cfretained': True}}}), 'SecTransformSetAttribute': (b'Z@@@o^@',), 'SecTrustSettingsSetTrustSettings': (b'i@I@',), 'SecKeyCopyExternalRepresentation': (b'@@o^@', '', {'retval': {'already_cfretained': True}}), 'SecTransformCreateGroupTransform': (b'@', '', {'retval': {'already_cfretained': True}}), 'CMSDecoderSetSearchKeychain': (b'i@@',), 'SecTrustedApplicationSetData': (b'i@@',), 'SSLSetSessionOption': (b'i@iZ',), 'SecKeychainSetPreferenceDomain': (b'ii',), 'SecTransformCreateFromExternalRepresentation': (b'@@o^@', '', {'retval': {'already_cfretained': True}}), 'SSLGetBufferedReadSize': (b'i@o^L',), 'SecTrustSetVerifyDate': (b'i@@',), 'sec_trust_copy_ref': (b'@@', '', {'retval': {'already_cfretained': True}}), 'SecACLGetTypeID': (sel32or64(b'I', b'Q'),), 'SSLContextGetTypeID': (sel32or64(b'I', b'Q'),), 'SessionCreate': (b'iII',), 'SecAccessCopyOwnerAndACL': (b'i@o^Io^Io^Io^@', '', {'arguments': {4: {'already_cfretained': True}}}), 'SecPolicyCreateWithProperties': (b'@@@', '', {'retval': {'already_cfretained': True}}), 'SecKeychainLockAll': (b'i',), 'SSLGetPeerDomainName': (b'i@o^tN^L', '', {'arguments': {1: {'c_array_length_in_arg': 2}}}), 'sec_protocol_metadata_access_ocsp_response': (b'B@@?', '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}, 1: {'type': '@'}}}}}}), 'SecACLCopyContents': (b'i@o^@o^@o^S', '', {'arguments': {1: {'already_cfretained': True}, 2: {'already_cfretained': True}}}), 'SecCodeMapMemory': (b'i@I',), 'CMSDecoderCopyContent': (b'i@o^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'SecAccessCreate': (b'i@@o^@', '', {'arguments': {2: {'already_cfretained': True}}}), 'SecTransformExecute': (b'@@o^@', '', {'retval': {'already_cfretained': True}}), 'SecCertificateCopyEmailAddresses': (b'i@o^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'AuthorizationMakeExternalForm': (b'i^{AuthorizationOpaqueRef=}o^{_AuthorizationExternalForm=[32C]}',), 'SecCodeCheckValidityWithErrors': (b'i@I@o^@', '', {'arguments': {3: {'already_cfretained': True}}}), 'SecCodeCopyDesignatedRequirement': (b'i@Io^@', '', {'arguments': {2: {'already_cfretained': True}}}), 'SecTransformNoData': (b'@',), 'SecTransformRegister': (b'Z@^?o^@', '', {'arguments': {1: {'callable': {'retval': {'callable': {'retval': {'type': '@'}, 'arguments': {0: {'type': '^v'}}}, 'type': b'@?'}, 'arguments': {0: {'type': '@'}, 1: {'type': '@'}, 2: {'type': '^{OpaqueSecTransformImplementation=}'}}}, 'callable_retained': True}}}), 'SecCodeCopyStaticCode': (b'i@Io^@', '', {'arguments': {2: {'already_cfretained': True}}}), 'CMSEncoderAddSignedAttributes': (b'i@I',), 'SecIdentityCopySystemIdentity': (b'i@o^@o^@', '', {'arguments': {1: {'already_cfretained': True}, 2: {'already_cfretained': True}}}), 'SecureDownloadGetDownloadSize': (b'i^{OpaqueSecureDownload=}o^q',), 'SecKeychainItemDelete': (b'i@o^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'SecStaticCodeCreateWithPathAndAttributes': (b'i@I@o^@', '', {'arguments': {3: {'already_cfretained': True}}}), 'sec_identity_create_with_certificates': (b'@@@', '', {'retval': {'already_retained': True}}), 'SSLCopyPeerTrust': (b'i@o^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'SecKeyVerifySignature': (b'Z@@@@o^@',), 'AuthorizationRightGet': (b'i^t^@', '', {'arguments': {0: {'c_array_delimited_by_null': True, 'type_modifier': 'n'}, 1: {'already_retained': True, 'type_modifier': 'o'}}}), 'SecDecryptTransformCreate': (b'@@o^@', '', {'retval': {'already_cfretained': True}}), 'SecCertificateCopyNormalizedIssuerContent': (b'@@o^@', '', {'retval': {'already_cfretained': True}}), 'SecPolicyCreateBasicX509': (b'@', '', {'retval': {'already_cfretained': True}}), 'SecKeychainLock': (b'i@',), 'SecTrustGetNetworkFetchAllowed': (b'i@o^Z',), 'SecureDownloadCreateWithTicket': (b'i@^?^v^?^vo^^{OpaqueSecureDownload=}', '', {'arguments': {1: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': '@'}, 1: {'type': '^v'}}}, 'callable_retained': True}, 3: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': '@'}, 1: {'type': 'i'}, 2: {'type': '^v'}}}, 'callable_retained': True}}}), 'CMSEncoderCopySupportingCerts': (b'i@o^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'SecKeychainItemSetAccess': (b'i@@',), 'SSLHandshake': (b'i@',), 'SecKeychainAddCallback': (b'i^?I^v', '', {'arguments': {0: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': 'I'}, 1: {'type': 'n^{SecKeychainCallbackInfo=I@@i}'}, 2: {'type': '^v'}}}, 'callable_retained': True}}}), 'SecureDownloadCopyURLs': (b'i^{OpaqueSecureDownload=}o^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'CMSEncoderAddRecipients': (b'i@@',), 'SecTrustCopyPublicKey': (b'@@', '', {'retval': {'already_cfretained': True}}), 'SecKeyCreateWithData': (b'@@@o^@', '', {'retval': {'already_cfretained': True}}), 'SecKeychainDelete': (b'i@',), 'sec_identity_copy_certificates_ref': (b'@@', '', {'retval': {'already_cfretained': True}}), 'AuthorizationRightSet': (b'i^{AuthorizationOpaqueRef=}^t@@@@', '', {'arguments': {1: {'c_array_delimited_by_null': True, 'type_modifier': 'n'}}}), 'SecACLRemove': (b'i@',), 'CMSDecoderCopyDetachedContent': (b'i@o^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'SecKeychainCreate': (b'in^tIn^vZ@o^@', '', {'arguments': {0: {'c_array_delimited_by_null': True}, 2: {'c_array_length_in_arg': 1}, 5: {'already_cfretained': True}}}), 'SecEncryptTransformGetTypeID': (sel32or64(b'I', b'Q'),), 'SSLGetDiffieHellmanParams': (b'i@o^vN^L', '', {'arguments': {1: {'c_array_length_in_arg': 2}}}), 'SSLSetSessionConfig': (b'i@@',), 'SecCertificateCreateWithData': (b'@@@', '', {'retval': {'already_cfretained': True}}), 'SecRequirementCopyData': (b'i@Io^@', '', {'arguments': {2: {'already_cfretained': True}}}), 'CMSDecoderCopyEncapsulatedContentType': (b'i@o^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'SecRequirementCreateWithStringAndErrors': (b'i@Io^@o^@', '', {'arguments': {3: {'already_cfretained': True}}}), 'SSLReHandshake': (b'i@',), 'sec_protocol_metadata_get_negotiated_ciphersuite': (b'I@',), 'SSLCopyRequestedPeerName': (b'i@o^tN^L', '', {'arguments': {1: {'c_array_length_in_arg': 2}}}), 'CMSDecoderCopySignerCert': (b'i@Lo^@', '', {'arguments': {2: {'already_cfretained': True}}}), 'SecTrustCopyExceptions': (b'@@', '', {'retval': {'already_cfretained': True}}), 'SecPolicyCreateSSL': (b'@Z@', '', {'retval': {'already_cfretained': True}}), 'SecKeychainItemCreatePersistentReference': (b'i@o^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'SSLCopyCertificateAuthorities': (b'i@o^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'SecTransformConnectTransforms': (b'@@@@@@o^@',), 'SecAccessCopyACLList': (b'i@o^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'SecHostSelectedGuest': (b'iIo^I',), 'SecAccessCopyMatchingACLList': (b'@@@', '', {'retval': {'already_cfretained': True}}), 'SecKeychainItemCreateCopy': (b'i@@@o^@', '', {'arguments': {3: {'already_cfretained': True}}}), 'SecKeyGeneratePairAsync': (b'v@^{dispatch_queue_s=}@?', '', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}, 1: {'type': '@'}, 2: {'type': '@'}, 3: {'type': '@'}}}}}}), 'SecCertificateCopyData': (b'@@', '', {'retval': {'already_cfretained': True}}), 'SecKeyGenerateSymmetric': (b'@@o^@', '', {'retval': {'already_cfretained': True}}), 'SecHostSetGuestStatus': (b'iII@I',), 'CMSDecoderCopySignerStatus': (b'i@L@Zo^Io^i',), 'SecTrustCopyCustomAnchorCertificates': (b'i@o^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'CMSEncoderCopySignerTimestampWithPolicy': (b'i@@Lo^d',), 'SecTrustSettingsImportExternalRepresentation': (b'iI@',), 'SecTrustCreateWithCertificates': (b'i@@o^@', '', {'arguments': {2: {'already_cfretained': True}}}), 'SecHostSetHostingPort': (b'iII',), 'SecDecryptTransformGetTypeID': (sel32or64(b'I', b'Q'),), 'CMSEncoderUpdateContent': (b'i@n^vL', '', {'arguments': {1: {'c_array_length_in_arg': 2}}}), 'SSLGetNegotiatedCipher': (b'i@o^i',), 'SecTrustCopyProperties': (b'@@', '', {'retval': {'already_cfretained': True}}), 'SecKeyCopyKeyExchangeResult': (b'@@@@@o^@', '', {'retval': {'already_cfretained': True}}), 'SecTrustEvaluateWithError': (b'B@o^@',), 'SecPolicyGetTypeID': (sel32or64(b'I', b'Q'),), 'SessionGetInfo': (b'iI^I^I', '', {'arguments': {1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}}}), 'sec_protocol_metadata_access_supported_signature_algorithms': (b'B@@?', '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}, 1: {'type': 'S'}}}}}}), 'SecTransformFindByName': (b'@@@',), 'SecIdentityCreateWithCertificate': (b'i@@o^@', '', {'arguments': {2: {'already_cfretained': True}}}), 'SecTaskCreateWithAuditToken': (b'@@{?=[8I]}', '', {'retval': {'already_cfretained': True}}), 'SecCertificateCopyValues': (b'@@@o^@', '', {'retval': {'already_cfretained': True}}), 'AuthorizationCreateFromExternalForm': (b'in^{_AuthorizationExternalForm=[32C]}o^^{AuthorizationOpaqueRef=}',), 'CMSDecoderUpdateMessage': (b'i@n^vL', '', {'arguments': {1: {'c_array_length_in_arg': 2}}}), 'CMSEncoderCopyEncapsulatedContentType': (b'i@o^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'SSLGetConnection': (b'i@o^@',), 'SecKeychainSetUserInteractionAllowed': (b'iZ',), 'SecTrustSetAnchorCertificatesOnly': (b'i@Z',), 'SSLGetPeerID': (b'i@o^vN^L', '', {'arguments': {1: {'c_array_length_in_arg': 2}}}), 'SecTransformCreateReadTransformWithReadStream': (b'@@', '', {'retval': {'already_cfretained': True}}), 'SecRequirementGetTypeID': (sel32or64(b'I', b'Q'),), 'SSLCreateContext': (b'@@ii', '', {'retval': {'already_cfretained': True}}), 'CMSEncoderGetHasDetachedContent': (b'i@o^Z',), 'SSLSetConnection': (b'i@@',), 'SecKeychainRemoveCallback': (b'i^?', '', {'arguments': {0: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': 'I'}, 1: {'type': 'n^{SecKeychainCallbackInfo=I@@i}'}, 2: {'type': '^v'}}}, 'callable_retained': True}}}), 'SecCertificateCopyPublicKey': (b'i@o^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'SecPolicyCopyProperties': (b'@@', '', {'retval': {'already_cfretained': True}}), 'CMSEncoderSetEncapsulatedContentTypeOID': (b'i@@',), 'SecDecodeTransformCreate': (b'@@o^@', '', {'retval': {'already_cfretained': True}}), 'sec_protocol_metadata_get_negotiated_protocol': (b'^t@', '', {'retval': {'c_array_delimited_by_null': True}}), 'SSLSetALPNProtocols': (b'i@@',), 'SSLGetProtocolVersionMin': (b'i@o^i',), 'SSLSetPeerDomainName': (b'i@n^tL', '', {'arguments': {1: {'c_array_length_in_arg': 2}}}), 'SecSignTransformCreate': (b'@@o^@', '', {'retval': {'already_cfretained': True}}), 'SecTransformGetTypeID': (sel32or64(b'I', b'Q'),), 'SecKeychainGetPath': (b'i@N^Io^t', '', {'arguments': {2: {'c_array_length_in_arg': 1}}}), 'SecCertificateCopySerialNumberData': (b'@@o^@', '', {'retval': {'already_cfretained': True}}), 'SecCertificateCopyCommonName': (b'i@o^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'SSLGetSupportedCiphers': (b'i@o^iN^L', '', {'arguments': {1: {'c_array_length_in_arg': 2}}}), 'sec_trust_create': (b'@@', '', {'retval': {'already_retained': True}}), 'CMSEncoderSetSignerAlgorithm': (b'i@@',), 'SecCertificateAddToKeychain': (b'i@@',), 'SecKeyGetBlockSize': (b'L@',), 'SecIdentityCopyPrivateKey': (b'i@o^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'CMSEncoderAddSupportingCerts': (b'i@@',), 'sec_certificate_create': (b'@@@', '', {'retval': {'already_retained': True}}), 'SSLSetMaxDatagramRecordSize': (b'i@L',), 'CMSDecoderCopySignerTimestamp': (b'i@Lo^d', '', {'arguments': {2: {'already_cfretained': True}}}), 'SSLSetDatagramHelloCookie': (b'i@n^vL', '', {'arguments': {1: {'c_array_length_in_arg': 2}}}), 'sec_identity_copy_ref': (b'@@', '', {'retval': {'already_cfretained': True}}), 'SecTaskGetTypeID': (sel32or64(b'I', b'Q'),), 'SSLSetIOFuncs': (b'i@^?^?', '', {'arguments': {1: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': '@'}, 1: {'type': 'o^v', 'c_array_length_in_arg': 2}, 2: {'type': 'N^L'}}}, 'callable_retained': True}, 2: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': '@'}, 1: {'type': 'n^v', 'c_array_length_in_arg': 2}, 2: {'type': 'N^L'}}}, 'callable_retained': True}}}), 'SecKeychainOpen': (b'in^to^@', '', {'arguments': {0: {'c_array_delimited_by_null': True}, 1: {'already_cfretained': True}}}), 'SecCodeGetTypeID': (sel32or64(b'I', b'Q'),), 'SecRequirementCreateWithData': (b'i@Io^@', '', {'arguments': {2: {'already_cfretained': True}}}), 'SecCodeCopySigningInformation': (b'i@Io^@', '', {'arguments': {2: {'already_cfretained': True}}}), 'SSLSetProtocolVersionMax': (b'i@i',), 'SecKeychainGetStatus': (b'i@o^I',), 'SSLGetClientCertificateState': (b'i@o^i',), 'CMSDecoderCopySignerTimestampCertificates': (b'i@Lo^@', '', {'arguments': {2: {'already_cfretained': True}}}), 'SecureDownloadFinished': (b'i^{OpaqueSecureDownload=}',), 'CMSEncoderSetCertificateChainMode': (b'i@I',), 'SecAccessControlCreateWithFlags': (sel32or64(b'@@@Io^@', b'@@@Qo^@'), '', {'retval': {'already_cfretained': True}}), 'SecTrustEvaluateAsync': (b'i@^{dispatch_queue_s}@?', '', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}, 1: {'type': '@'}, 2: {'type': 'I'}}}}}}), 'SecureDownloadCopyName': (b'i^{OpaqueSecureDownload=}o^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'SSLCopyALPNProtocols': (b'i@o^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'SecKeychainCopySearchList': (b'io^@', '', {'arguments': {0: {'already_cfretained': True}}}), 'SecDigestTransformGetTypeID': (sel32or64(b'I', b'Q'),), 'SecTrustSetOptions': (b'i@I',), 'SSLGetNumberEnabledCiphers': (b'i@o^L',), 'SecIdentityGetTypeID': (sel32or64(b'I', b'Q'),), 'SecKeychainCopyAccess': (b'i@o^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'CMSEncoderGetTypeID': (sel32or64(b'I', b'Q'),), 'SecTransformGetAttribute': (b'@@@',), 'CMSDecoderGetNumSigners': (b'i@o^L',), 'SecCertificateCopyPreferred': (b'@@@', '', {'retval': {'already_cfretained': True}}), 'SecRandomCopyBytes': (b'i^{__SecRandom=}L^v', '', {'arguments': {2: {'type_modifier': 'o', 'c_array_length_in_arg': 1}}}), 'CMSDecoderFinalizeMessage': (b'i@',), 'SecKeyWrapSymmetric': (b'@@@@o^@', '', {'retval': {'already_cfretained': True}}), 'SecVerifyTransformCreate': (b'@@@o^@', '', {'retval': {'already_cfretained': True}}), 'SecCodeCheckValidity': (b'i@I@',), 'CMSEncoderCopyEncodedContent': (b'i@o^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'CMSEncoderAddSigners': (b'i@@',), 'sec_protocol_metadata_copy_peer_public_key': (b'@@@', '', {'retval': {'already_retained': True}}), 'AuthorizationFree': (b'i^{AuthorizationOpaqueRef=}I',), 'SecCopyErrorMessageString': (b'@i^v', '', {'retval': {'already_cfretained': True}}), 'CMSEncoderCopySigners': (b'i@o^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'SecureDownloadRelease': (b'i^{OpaqueSecureDownload=}',), 'SecTrustSettingsCopyModificationDate': (b'i@Io^@', '', {'arguments': {2: {'already_cfretained': True}}}), 'SecureDownloadUpdateWithData': (b'i^{OpaqueSecureDownload=}@',), 'SecKeychainCopyDomainDefault': (b'iio^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'SecItemDelete': (b'i@',), 'SecStaticCodeCheckValidity': (b'i@I@',), 'sec_protocol_metadata_create_secret_with_context': (b'@@L^tL^vL', '', {'retval': {'already_retained': True}, 'arguments': {2: {'type_modifier': 'n', 'c_array_length_in_arg': 1}, 4: {'type_modifier': 'n', 'c_array_length_in_arg': 3}}}), 'SecCodeCopyHost': (b'i@Io^@', '', {'arguments': {2: {'already_cfretained': True}}}), 'sec_protocol_metadata_challenge_parameters_are_equal': (b'B@@',), 'SecTrustedApplicationGetTypeID': (sel32or64(b'I', b'Q'),), 'SecTransformSetDataAction': (b'@^{OpaqueSecTransformImplementation=}@@?', '', {'arguments': {2: {'callable': {'retval': {'type': b'@'}, 'arguments': {0: {'type': '^v'}, 1: {'type': '@'}}}}}}), 'SecKeychainAddGenericPassword': (b'i@In^tIn^tIn^vo^@', '', {'arguments': {2: {'c_array_length_in_arg': 1}, 4: {'c_array_length_in_arg': 3}, 6: {'c_array_length_in_arg': 5}, 7: {'already_cfretained': True}}}), 'CMSDecoderCopySignerTimestampWithPolicy': (b'i@@Lo^d', '', {'arguments': {3: {'already_cfretained': True}}}), 'SSLRead': (b'i@o^vLo^L', '', {'arguments': {1: {'c_array_length_in_arg': (2, 3)}}}), 'SecTaskCopyValueForEntitlement': (b'@@@o^@', '', {'retval': {'already_cfretained': True}}), 'SSLGetDatagramWriteSize': (b'i@o^L',), 'SecIdentitySetPreferred': (b'i@@@',), 'SecTrustCopyAnchorCertificates': (b'io^@', '', {'arguments': {0: {'already_cfretained': True}}}), 'SecKeychainSetDomainDefault': (b'ii@',), 'SecCertificateGetTypeID': (sel32or64(b'I', b'Q'),), 'SecCertificateCopyShortDescription': (b'@@@o^@', '', {'retval': {'already_cfretained': True}}), 'SSLCopyRequestedPeerNameLength': (b'i@o^L',), 'CMSEncoderCopySignerTimestamp': (b'i@Lo^d',), 'SSLSetClientSideAuthenticate': (b'i@i',), 'sec_protocol_metadata_get_early_data_accepted': (b'B@',), 'SecCodeCopySelf': (b'iIo^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'SSLGetNumberSupportedCiphers': (b'i@o^L',), 'SecIdentityCopyPreferred': (b'@@@@', '', {'retval': {'already_cfretained': True}}), 'SecACLUpdateAuthorizations': (b'i@@',), 'SecAccessGetTypeID': (sel32or64(b'I', b'Q'),), 'SecKeychainItemGetTypeID': (sel32or64(b'I', b'Q'),), 'SecTrustSetKeychains': (b'i@@',), 'SSLGetProtocolVersionMax': (b'i@o^i',), 'SecKeyCreateDecryptedData': (b'@@@@o^@', '', {'retval': {'already_cfretained': True}}), 'SecKeyCopyPublicKey': (b'@@', '', {'retval': {'already_cfretained': True}}), 'SecGroupTransformGetTypeID': (sel32or64(b'I', b'Q'),), 'SecKeychainCopySettings': (b'i@o^{SecKeychainSettings=IZZI}',), 'SecTrustSettingsCreateExternalRepresentation': (b'iIo^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'SecCertificateCopyKey': (b'@@', '', {'retval': {'already_cfretained': True}}), 'SecTrustGetTrustResult': (b'i@o^I',), 'SSLSetDiffieHellmanParams': (b'i@n^vL', '', {'arguments': {1: {'c_array_length_in_arg': 2}}}), 'SecTrustSettingsRemoveTrustSettings': (b'i@I',), 'SecRequirementCreateWithString': (b'i@Io^@', '', {'arguments': {2: {'already_cfretained': True}}}), 'SecKeychainSetDomainSearchList': (b'ii@',), 'SecRequirementCopyString': (b'i@Io^@', '', {'arguments': {2: {'already_cfretained': True}}}), 'SSLWrite': (b'i@n^vLo^L', '', {'arguments': {1: {'c_array_length_in_arg': 2}}}), 'sec_protocol_metadata_peers_are_equal': (b'B@@',), 'SSLSetPeerID': (b'i@n^vL', '', {'arguments': {1: {'c_array_length_in_arg': 2}}}), 'SecTransformCreate': (b'@@o^@', '', {'retval': {'already_cfretained': True}}), 'SecCertificateCopyNormalizedSubjectSequence': (b'@@', '', {'retval': {'already_cfretained': True}}), 'SecKeychainCopyDomainSearchList': (b'iio^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'SecKeyCreateFromData': (b'@@@o^@', '', {'retval': {'already_cfretained': True}}), 'SecTaskCopyValuesForEntitlements': (b'@@@o^@', '', {'retval': {'already_cfretained': True}}), 'SecStaticCodeGetTypeID': (sel32or64(b'I', b'Q'),), 'SecItemExport': (b'i@IIn^{_SecItemImportExportKeyParameters=II@@@@@@}o^@', '', {'arguments': {4: {'already_cfretained': True}}}), 'SSLSetProtocolVersionMin': (b'i@i',), 'SecCertificateCopyLongDescription': (b'@@@o^@', '', {'retval': {'already_cfretained': True}}), 'SecKeychainSetAccess': (b'i@@',), 'SecKeychainSetSearchList': (b'i@',), 'CMSEncoderCopyRecipients': (b'i@o^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'SecACLCopyAuthorizations': (b'@@', '', {'retval': {'already_cfretained': True}}), 'SSLGetEnabledCiphers': (b'i@o^iN^L', '', {'arguments': {1: {'c_array_length_in_arg': 2}}}), 'SecKeychainGetPreferenceDomain': (b'io^i',), 'SecKeychainGetVersion': (b'io^I',), 'SecKeyCreateRandomKey': (b'@@o^@', '', {'retval': {'already_cfretained': True}}), 'SSLGetPeerDomainNameLength': (b'i@o^L',), 'SecCertificateCopySubjectSummary': (b'@@', '', {'retval': {'already_cfretained': True}}), 'CMSDecoderCopySignerEmailAddress': (b'i@Lo^@', '', {'arguments': {2: {'already_cfretained': True}}}), 'SecTrustSetSignedCertificateTimestamps': (b'i@@',), 'SecTrustSetOCSPResponse': (b'i@@',), 'SSLSetCertificateAuthorities': (b'i@@Z',), 'SecACLCreateWithSimpleContents': (b'i@@@So^@', '', {'arguments': {4: {'already_cfretained': True}}}), 'SecTrustGetTypeID': (sel32or64(b'I', b'Q'),), 'SecTrustedApplicationCreateFromPath': (b'i^to^@', '', {'arguments': {0: {'c_array_delimited_by_null': True, 'type_modifier': 'n'}, 1: {'already_cfretained': True}}}), 'SSLSetOCSPResponse': (b'i@@',), 'SecTrustGetVerifyTime': (b'd@',), 'SecTransformExecuteAsync': (b'v@^{dispatch_queue_s=}@?', '', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}, 1: {'type': '@'}, 2: {'type': '@'}, 3: {'type': 'Z'}}}}}}), 'SecIdentityCopyCertificate': (b'i@o^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'SecKeychainAddInternetPassword': (b'i@In^tIn^tIn^tIn^tSIIIn^vo^@', '', {'arguments': {2: {'c_array_length_in_arg': 1}, 4: {'c_array_length_in_arg': 3}, 6: {'c_array_length_in_arg': 5}, 8: {'c_array_length_in_arg': 7}, 13: {'c_array_length_in_arg': 12}, 14: {'already_cfretained': True}}}), 'SecKeychainSetSettings': (b'i@n^{SecKeychainSettings=IZZI}',), 'SecIdentitySetSystemIdentity': (b'i@o^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'CMSEncoderSetHasDetachedContent': (b'i@Z',), 'SecEncodeTransformCreate': (b'@@o^@', '', {'retval': {'already_cfretained': True}}), 'SecKeychainGetUserInteractionAllowed': (b'io^Z',), 'sec_protocol_metadata_create_secret': (b'@@L^tL', '', {'retval': {'already_retained': True}, 'arguments': {2: {'type_modifier': 'n', 'c_array_length_in_arg': 1}}}), 'SecTrustCopyPolicies': (b'i@o^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'SecKeychainSetDefault': (b'i@',), 'SecCodeCopyGuestWithAttributes': (b'i@@Io^@', '', {'arguments': {3: {'already_cfretained': True}}}), 'SecTrustSetAnchorCertificates': (b'i@@',), 'SecKeychainGetTypeID': (sel32or64(b'I', b'Q'),), 'SecCertificateSetPreferred': (b'i@@@',), 'SecCertificateCopyNormalizedIssuerSequence': (b'@@', '', {'retval': {'already_cfretained': True}}), 'SecKeyGetTypeID': (sel32or64(b'I', b'Q'),), 'sec_protocol_metadata_get_negotiated_protocol_version': (b'i@',), 'SecItemCopyMatching': (b'i@o^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'SecItemUpdate': (b'i@@',), 'CMSDecoderGetTypeID': (sel32or64(b'I', b'Q'),), 'SecTrustedApplicationCopyData': (b'i@o^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'SecKeychainItemCopyAccess': (b'i@o^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'SecKeyUnwrapSymmetric': (b'@n^@@@o^@', '', {'retval': {'already_cfretained': True}}), 'SecTaskCopySigningIdentifier': (b'@@o^@', '', {'retval': {'already_cfretained': True}}), 'SecureDownloadCopyTicketLocation': (b'i^{OpaqueSecureDownload=}o^@', '', {'arguments': {1: {'already_cfretained': True}}}), 'SecTranformCustomGetAttribute': (sel32or64(b'@^{OpaqueSecTransformImplementation=}@i', b'@^{OpaqueSecTransformImplementation=}@q'),), 'SecKeychainCopyDefault': (b'io^@', '', {'arguments': {0: {'already_cfretained': True}}}), 'SSLSetEnabledCiphers': (b'i@n^iL', '', {'arguments': {1: {'c_array_length_in_arg': 2}}}), 'sec_protocol_metadata_access_peer_certificate_chain': (b'B@@?', '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}, 1: {'type': '@'}}}}}}), 'SecHostRemoveGuest': (b'iIII',), 'SSLClose': (b'i@',), 'SecKeyDeriveFromPassword': (b'@@@o^@', '', {'retval': {'already_cfretained': True}}), 'SecAccessControlGetTypeID': (sel32or64(b'I', b'Q'),), 'SecKeyCopyAttributes': (b'@@', '', {'retval': {'already_cfretained': True}}), 'AuthorizationRightRemove': (b'i^{AuthorizationOpaqueRef=}^t', '', {'arguments': {1: {'c_array_delimited_by_null': True, 'type_modifier': 'n'}}}), 'SecTrustSettingsCopyTrustSettings': (b'i@Io^@', '', {'arguments': {2: {'already_cfretained': True}}}), 'SecEncryptTransformCreate': (b'@@o^@', '', {'retval': {'already_cfretained': True}}), 'CMSEncoderCreate': (b'io^@', '', {'arguments': {0: {'already_cfretained': True}}}), 'CMSEncodeContent': (b'i@@@ZIn^vLo^@', '', {'arguments': {5: {'c_array_length_in_arg': 6}, 7: {'already_cfretained': True}}})} -aliases = {'SecTransformCustomGetAttribute': 'SecTranformCustomGetAttribute', 'errSessionInvalidFlags': 'errAuthorizationInvalidFlags', 'errSecCSSigDBAccess': 'errSecCSDBAccess', 'errSSLServerAuthCompleted': 'errSSLPeerAuthCompleted', 'errSSLLast': 'errSSLUnexpectedRecord', 'errSessionInternal': 'errAuthorizationInternal', 'kSecRequirementTypeCount': 'kSecInvalidRequirementType', 'errSSLClientAuthCompleted': 'errSSLPeerAuthCompleted', 'errSecCSSigDBDenied': 'errSecCSDBDenied'} -cftypes=[('SecKeyRef', b'^{OpaqueSecKeyRef=}', 'SecKeyGetTypeID', None), ('SecPasswordRef', b'^{OpaqueSecPasswordRef=}', 'SecPasswordGetTypeID', None), ('SecKeychainItemRef', b'^{OpaqueSecKeychainItemRef=}', 'SecKeyChainItemGetTypeID', None), ('SecTaskRef', b'^{__SecTask=}', 'SecTaskGetTypeID', None), ('SecCertificateRef', b'^{OpaqueSecCertificateRef=}', 'SecCertificateGetTypeID', None), ('SecTrustedApplicationRef', b'^{OpaqueSecTrustedApplicationRef=}', 'SecTrustedApplicationGetTypeID', None), ('CMSEncoderRef', b'^{_CMSEncoder=}', 'CMSEncoderGetTypeID', None), ('SecAccessControlRef', b'^{OpaqueSecAccessControlRef=}', 'SecAccessControlGetTypeID', None), ('SecCodeRef', b'^{__SecCode=}', 'SecCodeGetTypeID', None), ('CMSDecoderRef', b'^{_CMSDecoder=}', 'CMSDecoderGetTypeID', None), ('SecAccessRef', b'^{OpaqueSecAccessRef=}', 'SecAccessGetTypeID', None), ('SecIdentityRef', b'^{OpaqueSecIdentityRef=}', 'SecIdentityGetTypeID', None), ('SSLContextRef', b'^{SSLContext=}', 'SSLContextGetTypeID', None), ('SecRequirementRef', b'^{__SecRequirement=}', 'SecRequirementGetTypeID', None), ('SecPolicyRef', b'^{OpaqueSecPolicyRef=}', 'SecPolicyGetTypeID', None), ('SecTrustRef', b'^{__SecTrust=}', 'SecTrustGetTypeID', None), ('SecACLRef', b'^{OpaqueSecTrustRef=}', 'SecACLGetTypeID', None), ('SecKeychainRef', b'^{OpaqueSecKeychainRef=}', 'SecKeyChainGetTypeID', None)] -misc.update({'AuthorizationRef': objc.createOpaquePointerType('AuthorizationRef', b'^{AuthorizationOpaqueRef=}'), 'SecureDownloadRef': objc.createOpaquePointerType('SecureDownloadRef', b'^{OpaqueSecureDownload=}'), 'SecRandomRef': objc.createOpaquePointerType('SecRandomRef', b'^{__SecRandom=}'), 'SecTransformImplementationRef': objc.createOpaquePointerType('SecTransformImplementationRef', b'^{OpaqueSecTransformImplementation=}')}) -expressions = {'kAuthorizationEmptyEnvironment': 'None'} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/Security/_metadata.pyc b/env/lib/python2.7/site-packages/Security/_metadata.pyc deleted file mode 100644 index bc6e4a5b..00000000 Binary files a/env/lib/python2.7/site-packages/Security/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/SecurityFoundation/__init__.py b/env/lib/python2.7/site-packages/SecurityFoundation/__init__.py deleted file mode 100644 index be7ccf0f..00000000 --- a/env/lib/python2.7/site-packages/SecurityFoundation/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -''' -Python mapping for the SecurityFoundation framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Foundation -import Security - -from SecurityFoundation import _metadata - -sys.modules['SecurityFoundation'] = mod = objc.ObjCLazyModule( - "SecurityFoundation", - "com.apple.securityfoundatio", - objc.pathForFramework("/System/Library/Frameworks/SecurityFoundation.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Foundation, Security)) - -import sys -del sys.modules['SecurityFoundation._metadata'] diff --git a/env/lib/python2.7/site-packages/SecurityFoundation/__init__.pyc b/env/lib/python2.7/site-packages/SecurityFoundation/__init__.pyc deleted file mode 100644 index 77e57da4..00000000 Binary files a/env/lib/python2.7/site-packages/SecurityFoundation/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/SecurityFoundation/_metadata.py b/env/lib/python2.7/site-packages/SecurityFoundation/_metadata.py deleted file mode 100644 index f97c4b6f..00000000 --- a/env/lib/python2.7/site-packages/SecurityFoundation/_metadata.py +++ /dev/null @@ -1,30 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Thu Nov 23 11:27:12 2017 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$$''' -enums = '''$$''' -misc.update({}) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'SFAuthorization', b'obtainWithRight:flags:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'SFAuthorization', b'obtainWithRights:flags:environment:authorizedRights:error:', {'retval': {'type': 'Z'}, 'arguments': {5: {'type_modifier': b'o'}, 6: {'type_modifier': b'o'}}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/SecurityFoundation/_metadata.pyc b/env/lib/python2.7/site-packages/SecurityFoundation/_metadata.pyc deleted file mode 100644 index bc8ea8cf..00000000 Binary files a/env/lib/python2.7/site-packages/SecurityFoundation/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/SecurityInterface/_SecurityInterface.so b/env/lib/python2.7/site-packages/SecurityInterface/_SecurityInterface.so deleted file mode 100755 index f253fe26..00000000 Binary files a/env/lib/python2.7/site-packages/SecurityInterface/_SecurityInterface.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/SecurityInterface/__init__.py b/env/lib/python2.7/site-packages/SecurityInterface/__init__.py deleted file mode 100644 index e520f86f..00000000 --- a/env/lib/python2.7/site-packages/SecurityInterface/__init__.py +++ /dev/null @@ -1,29 +0,0 @@ -''' -Python mapping for the SecurityInterface framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import AppKit -import Security - -from SecurityInterface import _metadata -import SecurityInterface._SecurityInterface - - -sys.modules['SecurityInterface'] = mod = objc.ObjCLazyModule( - "SecurityInterface", - "com.apple.securityinterface", - objc.pathForFramework("/System/Library/Frameworks/SecurityInterface.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (AppKit, Security)) - -import sys -del sys.modules['SecurityInterface._metadata'] diff --git a/env/lib/python2.7/site-packages/SecurityInterface/__init__.pyc b/env/lib/python2.7/site-packages/SecurityInterface/__init__.pyc deleted file mode 100644 index af299034..00000000 Binary files a/env/lib/python2.7/site-packages/SecurityInterface/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/SecurityInterface/_metadata.py b/env/lib/python2.7/site-packages/SecurityInterface/_metadata.py deleted file mode 100644 index cef17292..00000000 --- a/env/lib/python2.7/site-packages/SecurityInterface/_metadata.py +++ /dev/null @@ -1,72 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Thu Nov 23 12:25:02 2017 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$SFAuthorizationPluginViewUserNameKey$SFAuthorizationPluginViewUserShortNameKey$SFCertificateViewDisclosureStateDidChange$SFDisplayViewException$''' -enums = '''$SFAuthorizationStartupState@0$SFAuthorizationViewInProgressState@2$SFAuthorizationViewLockedState@1$SFAuthorizationViewUnlockedState@3$SFButtonTypeBack@0$SFButtonTypeCancel@0$SFButtonTypeLogin@1$SFButtonTypeOK@1$SFViewTypeCredentials@1$SFViewTypeIdentityAndCredentials@0$''' -misc.update({}) -aliases = {'SFButtonTypeBack': 'SFButtonTypeCancel', 'SFButtonTypeLogin': 'SFButtonTypeOK', 'SFButtonTypeOK': 'NSOKButton', 'SFButtonTypeCancel': 'NSCancelButton'} -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'NSObject', b'authorizationViewCreatedAuthorization:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'authorizationViewDidAuthorize:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'authorizationViewDidDeauthorize:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'authorizationViewDidHide:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'authorizationViewReleasedAuthorization:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'authorizationViewShouldDeauthorize:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'certificatePanelShowHelp:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'chooseIdentityPanelShowHelp:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'SFAuthorizationPluginView', b'setButton:enabled:', {'arguments': {3: {'type': b'Z'}}}) - r(b'SFAuthorizationPluginView', b'setEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SFAuthorizationView', b'authorizationRights', {'retval': {'type': sel32or64(b'^{_AuthorizationRights=L^{_AuthorizationItem=^cL^vL}}', b'^{_AuthorizationRights=I^{_AuthorizationItem=^cQ^vI}}')}}) - r(b'SFAuthorizationView', b'authorize:', {'retval': {'type': b'Z'}}) - r(b'SFAuthorizationView', b'deauthorize:', {'retval': {'type': b'Z'}}) - r(b'SFAuthorizationView', b'isEnabled', {'retval': {'type': b'Z'}}) - r(b'SFAuthorizationView', b'setAuthorizationRights:', {'arguments': {2: {'type': sel32or64(b'^{_AuthorizationRights=L^{_AuthorizationItem=^cL^vL}}', b'^{_AuthorizationRights=I^{_AuthorizationItem=^cQ^vI}}')}}}) - r(b'SFAuthorizationView', b'setAutoupdate:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SFAuthorizationView', b'setAutoupdate:interval:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SFAuthorizationView', b'setEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SFAuthorizationView', b'updateStatus:', {'retval': {'type': b'Z'}}) - r(b'SFCertificatePanel', b'beginSheetForWindow:modalDelegate:didEndSelector:contextInfo:certificates:showGroup:', {'arguments': {4: {'sel_of_type': sel32or64(b'v@:@i^v', b'v@:@q^v')}, 7: {'type': b'Z'}}}) - r(b'SFCertificatePanel', b'beginSheetForWindow:modalDelegate:didEndSelector:contextInfo:trust:showGroup:', {'arguments': {4: {'sel_of_type': sel32or64(b'v@:@i^v', b'v@:@q^v')}, 7: {'type': b'Z'}}}) - r(b'SFCertificatePanel', b'runModalForCertificates:showGroup:', {'arguments': {3: {'type': b'Z'}}}) - r(b'SFCertificatePanel', b'runModalForTrust:showGroup:', {'arguments': {3: {'type': b'Z'}}}) - r(b'SFCertificatePanel', b'setShowsHelp:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SFCertificatePanel', b'showsHelp', {'retval': {'type': b'Z'}}) - r(b'SFCertificateTrustPanel', b'beginSheetForWindow:modalDelegate:didEndSelector:contextInfo:trust:message:', {'arguments': {4: {'sel_of_type': sel32or64(b'v@:@i^v', b'v@:@q^v')}}}) - r(b'SFCertificateView', b'detailsDisclosed', {'retval': {'type': b'Z'}}) - r(b'SFCertificateView', b'detailsDisplayed', {'retval': {'type': b'Z'}}) - r(b'SFCertificateView', b'isEditable', {'retval': {'type': b'Z'}}) - r(b'SFCertificateView', b'isTrustDisplayed', {'retval': {'type': b'Z'}}) - r(b'SFCertificateView', b'policiesDisclosed', {'retval': {'type': b'Z'}}) - r(b'SFCertificateView', b'setDetailsDisclosed:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SFCertificateView', b'setDisplayDetails:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SFCertificateView', b'setDisplayTrust:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SFCertificateView', b'setEditableTrust:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SFCertificateView', b'setPoliciesDisclosed:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SFChooseIdentityPanel', b'beginSheetForWindow:modalDelegate:didEndSelector:contextInfo:identities:message:', {'arguments': {4: {'sel_of_type': sel32or64(b'v@:@i^v', b'v@:@q^v')}}}) - r(b'SFChooseIdentityPanel', b'setShowsHelp:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SFChooseIdentityPanel', b'showsHelp', {'retval': {'type': b'Z'}}) - r(b'SFKeychainSavePanel', b'beginSheetForDirectory:file:modalForWindow:modalDelegate:didEndSelector:contextInfo:', {'arguments': {6: {'sel_of_type': sel32or64(b'v@:@i^v', b'v@:@q^v')}}}) - r(b'SFKeychainSettingsPanel', b'beginSheetForWindow:modalDelegate:didEndSelector:contextInfo:settings:keychain:', {'arguments': {4: {'sel_of_type': sel32or64(b'v@:@i^v', b'v@:@q^v')}, 6: {'type': sel32or64(b'^{SecKeychainSettings=LZZL}', b'^{SecKeychainSettings=IZZI}')}}}) - r(b'SFKeychainSettingsPanel', b'runModalForSettings:keychain:', {'arguments': {2: {'type': sel32or64(b'^{SecKeychainSettings=LZZL}', b'^{SecKeychainSettings=IZZI}')}}}) -finally: - objc._updatingMetadata(False) -protocols={'SFChooseIdentityPanelDelegate': objc.informal_protocol('SFChooseIdentityPanelDelegate', [objc.selector(None, b'chooseIdentityPanelShowHelp:', b'Z@:@', isRequired=False)]), 'SFCertificatePanelDelegate': objc.informal_protocol('SFCertificatePanelDelegate', [objc.selector(None, b'certificatePanelShowHelp:', b'Z@:@', isRequired=False)]), 'SFAuthorizationViewDelegate': objc.informal_protocol('SFAuthorizationViewDelegate', [objc.selector(None, b'authorizationViewCreatedAuthorization:', b'v@:@', isRequired=False), objc.selector(None, b'authorizationViewDidHide:', b'v@:@', isRequired=False), objc.selector(None, b'authorizationViewDidAuthorize:', b'v@:@', isRequired=False), objc.selector(None, b'authorizationViewDidDeauthorize:', b'v@:@', isRequired=False), objc.selector(None, b'authorizationViewShouldDeauthorize:', b'Z@:@', isRequired=False), objc.selector(None, b'authorizationViewReleasedAuthorization:', b'v@:@', isRequired=False)])} -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/SecurityInterface/_metadata.pyc b/env/lib/python2.7/site-packages/SecurityInterface/_metadata.pyc deleted file mode 100644 index 2f844e5e..00000000 Binary files a/env/lib/python2.7/site-packages/SecurityInterface/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/ServiceManagement/__init__.py b/env/lib/python2.7/site-packages/ServiceManagement/__init__.py deleted file mode 100644 index 9b847d23..00000000 --- a/env/lib/python2.7/site-packages/ServiceManagement/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -''' -Python mapping for the ServiceManagement framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import sys -import objc -import CoreFoundation - -from ServiceManagement import _metadata - -sys.modules['ServiceManagement'] = mod = objc.ObjCLazyModule('ServiceManagement', - "com.apple.bsd.ServiceManagement", - objc.pathForFramework("/System/Library/Frameworks/ServiceManagement.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'objc': objc, - }, ( CoreFoundation, )) - -import sys -del sys.modules['ServiceManagement._metadata'] diff --git a/env/lib/python2.7/site-packages/ServiceManagement/__init__.pyc b/env/lib/python2.7/site-packages/ServiceManagement/__init__.pyc deleted file mode 100644 index 8fcc1ccd..00000000 Binary files a/env/lib/python2.7/site-packages/ServiceManagement/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/ServiceManagement/_metadata.py b/env/lib/python2.7/site-packages/ServiceManagement/_metadata.py deleted file mode 100644 index d465c3db..00000000 --- a/env/lib/python2.7/site-packages/ServiceManagement/_metadata.py +++ /dev/null @@ -1,24 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Sun Dec 28 20:53:43 2014 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$kSMDomainSystemLaunchd@^{__CFString=}$kSMDomainUserLaunchd@^{__CFString=}$kSMErrorDomainFramework@^{__CFString=}$kSMErrorDomainIPC@^{__CFString=}$kSMErrorDomainLaunchd@^{__CFString=}$kSMInfoKeyAuthorizedClients@^{__CFString=}$kSMInfoKeyPrivilegedExecutables@^{__CFString=}$''' -enums = '''$kSMErrorAuthorizationFailure@4$kSMErrorInternalFailure@2$kSMErrorInvalidPlist@10$kSMErrorInvalidSignature@3$kSMErrorJobMustBeEnabled@9$kSMErrorJobNotFound@6$kSMErrorJobPlistNotFound@8$kSMErrorServiceUnavailable@7$kSMErrorToolNotValid@5$''' -misc.update({'kSMRightModifySystemDaemons': b'com.apple.ServiceManagement.daemons.modify', 'kSMRightBlessPrivilegedHelper': b'com.apple.ServiceManagement.blesshelper'}) -functions={'SMJobBless': (b'Z^{__CFString=}^{__CFString=}^{AuthorizationOpaqueRef=}^^{__CFError}', '', {'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'SMJobRemove': (b'Z^{__CFString=}^{__CFString=}^{AuthorizationOpaqueRef=}Z^^{__CFError}', '', {'arguments': {4: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'SMJobCopyDictionary': (b'^{__CFDictionary=}^{__CFString=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'SMJobSubmit': (b'Z^{__CFString=}^{__CFDictionary=}^{AuthorizationOpaqueRef=}^^{__CFError}', '', {'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o', 'null_accepted': True}}}), 'SMLoginItemSetEnabled': (b'Z^{__CFString=}Z',), 'SMCopyAllJobDictionaries': (b'^{__CFArray=}^{__CFString=}', '', {'retval': {'already_cfretained': True}})} -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/ServiceManagement/_metadata.pyc b/env/lib/python2.7/site-packages/ServiceManagement/_metadata.pyc deleted file mode 100644 index c86826c0..00000000 Binary files a/env/lib/python2.7/site-packages/ServiceManagement/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Social/__init__.py b/env/lib/python2.7/site-packages/Social/__init__.py deleted file mode 100644 index 4a559adb..00000000 --- a/env/lib/python2.7/site-packages/Social/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -''' -Python mapping for the Social framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Foundation - -from Social import _metadata - -sys.modules['Social'] = mod = objc.ObjCLazyModule( - "Social", - "com.apple.Social.framework", - objc.pathForFramework("/System/Library/Frameworks/Social.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Foundation,)) - -import sys -del sys.modules['Social._metadata'] diff --git a/env/lib/python2.7/site-packages/Social/__init__.pyc b/env/lib/python2.7/site-packages/Social/__init__.pyc deleted file mode 100644 index 0588c448..00000000 Binary files a/env/lib/python2.7/site-packages/Social/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Social/_metadata.py b/env/lib/python2.7/site-packages/Social/_metadata.py deleted file mode 100644 index f16db606..00000000 --- a/env/lib/python2.7/site-packages/Social/_metadata.py +++ /dev/null @@ -1,30 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Sun Dec 28 20:40:47 2014 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$SLServiceTypeFacebook$SLServiceTypeLinkedIn$SLServiceTypeSinaWeibo$SLServiceTypeTencentWeibo$SLServiceTypeTwitter$''' -enums = '''$SLRequestMethodDELETE@2$SLRequestMethodGET@0$SLRequestMethodPOST@1$SLRequestMethodPUT@3$''' -misc.update({}) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'SLComposeServiceViewController', b'isContentValid', {'retval': {'type': b'Z'}}) - r(b'SLRequest', b'performRequestWithHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'@'}}}}}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/Social/_metadata.pyc b/env/lib/python2.7/site-packages/Social/_metadata.pyc deleted file mode 100644 index 62f22c8e..00000000 Binary files a/env/lib/python2.7/site-packages/Social/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/SpriteKit/_SpriteKit.so b/env/lib/python2.7/site-packages/SpriteKit/_SpriteKit.so deleted file mode 100755 index 9a6d0e04..00000000 Binary files a/env/lib/python2.7/site-packages/SpriteKit/_SpriteKit.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/SpriteKit/__init__.py b/env/lib/python2.7/site-packages/SpriteKit/__init__.py deleted file mode 100644 index 57f77b40..00000000 --- a/env/lib/python2.7/site-packages/SpriteKit/__init__.py +++ /dev/null @@ -1,31 +0,0 @@ -''' -Python mapping for the SpriteKit framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Quartz -import Cocoa - -from SpriteKit import _metadata -from SpriteKit import _SpriteKit - -sys.modules['SpriteKit'] = mod = objc.ObjCLazyModule( - "SpriteKit", - "com.apple.SpriteKit", - objc.pathForFramework("/System/Library/Frameworks/SpriteKit.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Cocoa, Quartz)) - - -import sys, objc - -mod = sys.modules['SpriteKit'] -del sys.modules['SpriteKit._metadata'] diff --git a/env/lib/python2.7/site-packages/SpriteKit/__init__.pyc b/env/lib/python2.7/site-packages/SpriteKit/__init__.pyc deleted file mode 100644 index faba6edf..00000000 Binary files a/env/lib/python2.7/site-packages/SpriteKit/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/SpriteKit/_metadata.py b/env/lib/python2.7/site-packages/SpriteKit/_metadata.py deleted file mode 100644 index 6095056c..00000000 --- a/env/lib/python2.7/site-packages/SpriteKit/_metadata.py +++ /dev/null @@ -1,168 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Wed Jun 6 09:04:00 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$$''' -enums = '''$SKActionTimingEaseIn@1$SKActionTimingEaseInEaseOut@3$SKActionTimingEaseOut@2$SKActionTimingLinear@0$SKAttributeTypeFloat@1$SKAttributeTypeHalfFloat@5$SKAttributeTypeNone@0$SKAttributeTypeVectorFloat2@2$SKAttributeTypeVectorFloat3@3$SKAttributeTypeVectorFloat4@4$SKAttributeTypeVectorHalfFloat2@6$SKAttributeTypeVectorHalfFloat3@7$SKAttributeTypeVectorHalfFloat4@8$SKBlendModeAdd@1$SKBlendModeAlpha@0$SKBlendModeMultiply@3$SKBlendModeMultiplyX2@4$SKBlendModeReplace@6$SKBlendModeScreen@5$SKBlendModeSubtract@2$SKInterpolationModeLinear@1$SKInterpolationModeSpline@2$SKInterpolationModeStep@3$SKLabelHorizontalAlignmentModeCenter@0$SKLabelHorizontalAlignmentModeLeft@1$SKLabelHorizontalAlignmentModeRight@2$SKLabelVerticalAlignmentModeBaseline@0$SKLabelVerticalAlignmentModeBottom@3$SKLabelVerticalAlignmentModeCenter@1$SKLabelVerticalAlignmentModeTop@2$SKRepeatModeClamp@1$SKRepeatModeLoop@2$SKSceneScaleModeAspectFill@1$SKSceneScaleModeAspectFit@2$SKSceneScaleModeFill@0$SKSceneScaleModeResizeFill@3$SKTextureFilteringLinear@1$SKTextureFilteringNearest@0$SKTileAdjacencyAll@255$SKTileAdjacencyDown@16$SKTileAdjacencyLeft@64$SKTileAdjacencyLowerLeft@32$SKTileAdjacencyLowerRight@8$SKTileAdjacencyRight@4$SKTileAdjacencyUp@1$SKTileAdjacencyUpperLeft@128$SKTileAdjacencyUpperRight@2$SKTileDefinitionRotation0@0$SKTileDefinitionRotation180@2$SKTileDefinitionRotation270@3$SKTileDefinitionRotation90@1$SKTileHexFlatAdjacencyAll@63$SKTileHexFlatAdjacencyDown@8$SKTileHexFlatAdjacencyLowerLeft@16$SKTileHexFlatAdjacencyLowerRight@4$SKTileHexFlatAdjacencyUp@1$SKTileHexFlatAdjacencyUpperLeft@32$SKTileHexFlatAdjacencyUpperRight@2$SKTileHexPointyAdjacencyAdd@63$SKTileHexPointyAdjacencyLeft@32$SKTileHexPointyAdjacencyLowerLeft@16$SKTileHexPointyAdjacencyLowerRight@8$SKTileHexPointyAdjacencyRight@4$SKTileHexPointyAdjacencyUpperLeft@1$SKTileHexPointyAdjacencyUpperRight@2$SKTileSetTypeGrid@0$SKTileSetTypeHexagonalFlat@2$SKTileSetTypeHexagonalPointy@3$SKTileSetTypeIsometric@1$SKTransitionDirectionDown@1$SKTransitionDirectionLeft@3$SKTransitionDirectionRight@2$SKTransitionDirectionUp@0$SKUniformTypeFloat@1$SKUniformTypeFloatMatrix2@5$SKUniformTypeFloatMatrix3@6$SKUniformTypeFloatMatrix4@7$SKUniformTypeFloatVector2@2$SKUniformTypeFloatVector3@3$SKUniformTypeFloatVector4@4$SKUniformTypeNone@0$SKUniformTypeTexture@8$''' -misc.update({}) -aliases = {'SK_AVAILABLE': '__OSX_AVAILABLE_STARTING', 'SKColor': 'NSColor'} -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'NSObject', b'didApplyConstraintsForScene:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'didBeginContact:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'didEndContact:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'didEvaluateActionsForScene:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'didFinishUpdateForScene:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'didSimulatePhysicsForScene:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'update:forScene:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'd'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'view:shouldRenderAtTime:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': 'd'}}}) - r(b'SK3DNode', b'autoenablesDefaultLighting', {'retval': {'type': b'Z'}}) - r(b'SK3DNode', b'isPlaying', {'retval': {'type': b'Z'}}) - r(b'SK3DNode', b'loops', {'retval': {'type': b'Z'}}) - r(b'SK3DNode', b'projectPoint:', {'retval': {'type': b'%'}, 'arguments': {2: {'type': b'%'}}}) - r(b'SK3DNode', b'setAutoenablesDefaultLighting:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SK3DNode', b'setLoops:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SK3DNode', b'setPlaying:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SK3DNode', b'unprojectPoint:', {'retval': {'type': b'%'}, 'arguments': {2: {'type': b'%'}}}) - r(b'SKAction', b'animateWithTextures:timePerFrame:resize:restore:', {'arguments': {4: {'type': b'Z'}, 5: {'type': b'Z'}}}) - r(b'SKAction', b'customActionWithDuration:actionBlock:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': sel32or64(b'f', b'd')}}}}}}) - r(b'SKAction', b'followPath:asOffset:orientToPath:duration:', {'arguments': {3: {'type': b'Z'}, 4: {'type': b'Z'}}}) - r(b'SKAction', b'followPath:asOffset:orientToPath:speed:', {'arguments': {3: {'type': b'Z'}, 4: {'type': b'Z'}}}) - r(b'SKAction', b'performSelector:onTarget:', {'arguments': {2: {'sel_of_type': b'v@:'}}}) - r(b'SKAction', b'playSoundFileNamed:waitForCompletion:', {'arguments': {3: {'type': b'Z'}}}) - r(b'SKAction', b'rotateToAngle:duration:shortestUnitArc:', {'arguments': {4: {'type': b'Z'}}}) - r(b'SKAction', b'setTexture:resize:', {'arguments': {3: {'type': b'Z'}}}) - r(b'SKAction', b'setTimingFunc:', {'arguments': {2: {'callable': {'retval': {'type': b'f'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'f'}}}}}}) - r(b'SKAction', b'setTimingFunction:', {'arguments': {2: {'callable': {'retval': {'type': b'f'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'f'}}}}}}) - r(b'SKAction', b'timingFunc', {'retval': {'callable': {'retval': {'type': b'f'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'f'}}}}}) - r(b'SKAction', b'timingFunction', {'retval': {'callable': {'retval': {'type': b'f'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'f'}}}}}) - r(b'SKConstraint', b'enabled', {'retval': {'type': b'Z'}}) - r(b'SKConstraint', b'setEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SKEffectNode', b'setShouldCenterFilter:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SKEffectNode', b'setShouldEnableEffects:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SKEffectNode', b'setShouldRasterize:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SKEffectNode', b'shouldCenterFilter', {'retval': {'type': b'Z'}}) - r(b'SKEffectNode', b'shouldEnableEffects', {'retval': {'type': b'Z'}}) - r(b'SKEffectNode', b'shouldRasterize', {'retval': {'type': b'Z'}}) - r(b'SKFieldNode', b'direction', {'retval': {'type': b'%'}}) - r(b'SKFieldNode', b'isEnabled', {'retval': {'type': b'Z'}}) - r(b'SKFieldNode', b'isExclusive', {'retval': {'type': b'Z'}}) - r(b'SKFieldNode', b'linearGravityFieldWithVector:', {'arguments': {2: {'type': b'%'}}}) - r(b'SKFieldNode', b'setDirection:', {'arguments': {2: {'type': b'%'}}}) - r(b'SKFieldNode', b'setEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SKFieldNode', b'setExclusive:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SKFieldNode', b'velocityFieldWithVector:', {'arguments': {2: {'type': b'%'}}}) - r(b'SKLightNode', b'isEnabled', {'retval': {'type': b'Z'}}) - r(b'SKLightNode', b'setEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SKMutableTexture', b'modifyPixelDataWithBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'^v', 'type_modifier': 'N', 'c_array_length_in_arg': 2}, 2: {'type': b'Q'}}}}}}) - r(b'SKNode', b'containsPoint:', {'retval': {'type': b'Z'}}) - r(b'SKNode', b'enumerateChildNodesWithName:usingBlock:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'SKNode', b'hasActions', {'retval': {'type': b'Z'}}) - r(b'SKNode', b'inParentHierarchy:', {'retval': {'type': b'Z'}}) - r(b'SKNode', b'intersectsNode:', {'retval': {'type': b'Z'}}) - r(b'SKNode', b'isHidden', {'retval': {'type': b'Z'}}) - r(b'SKNode', b'isPaused', {'retval': {'type': b'Z'}}) - r(b'SKNode', b'isUserInteractionEnabled', {'retval': {'type': b'Z'}}) - r(b'SKNode', b'nodeWithFileNamed:securelyWithClasses:andError:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'SKNode', b'runAction:completion:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'SKNode', b'setHidden:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SKNode', b'setPaused:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SKNode', b'setUserInteractionEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SKPhysicsBody', b'affectedByGravity', {'retval': {'type': b'Z'}}) - r(b'SKPhysicsBody', b'allowsRotation', {'retval': {'type': b'Z'}}) - r(b'SKPhysicsBody', b'isDynamic', {'retval': {'type': b'Z'}}) - r(b'SKPhysicsBody', b'isResting', {'retval': {'type': b'Z'}}) - r(b'SKPhysicsBody', b'pinned', {'retval': {'type': b'Z'}}) - r(b'SKPhysicsBody', b'setAffectedByGravity:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SKPhysicsBody', b'setAllowsRotation:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SKPhysicsBody', b'setDynamic:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SKPhysicsBody', b'setPinned:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SKPhysicsBody', b'setResting:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SKPhysicsBody', b'setUsesPreciseCollisionDetection:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SKPhysicsBody', b'usesPreciseCollisionDetection', {'retval': {'type': b'Z'}}) - r(b'SKPhysicsJointPin', b'setShouldEnableLimits:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SKPhysicsJointPin', b'shouldEnableLimits', {'retval': {'type': b'Z'}}) - r(b'SKPhysicsJointSliding', b'setShouldEnableLimits:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SKPhysicsJointSliding', b'shouldEnableLimits', {'retval': {'type': b'Z'}}) - r(b'SKPhysicsWorld', b'enumerateBodiesAlongRayStart:end:usingBlock:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'{CGPoint=dd}'}, 3: {'type': b'{CGVector=dd}'}, 4: {'type': b'o^Z'}}}}}}) - r(b'SKPhysicsWorld', b'enumerateBodiesAtPoint:usingBlock:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'SKPhysicsWorld', b'enumerateBodiesInRect:usingBlock:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'^Z', 'type_modifier': 'o'}}}}}}) - r(b'SKPhysicsWorld', b'sampleFieldsAt:', {'retval': {'type': b'%'}, 'arguments': {2: {'type': b'%'}}}) - r(b'SKRegion', b'containsPoint:', {'retval': {'type': b'Z'}}) - r(b'SKRenderer', b'ignoresSiblingOrder', {'retval': {'type': 'Z'}}) - r(b'SKRenderer', b'setIgnoresSiblingOrder:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SKRenderer', b'setShouldCullNonVisibleNodes:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SKRenderer', b'setShowsDrawCount:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SKRenderer', b'setShowsFields:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SKRenderer', b'setShowsNodeCount:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SKRenderer', b'setShowsPhysics:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SKRenderer', b'setShowsQuadCount:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SKRenderer', b'shouldCullNonVisibleNodes', {'retval': {'type': 'Z'}}) - r(b'SKRenderer', b'showsDrawCount', {'retval': {'type': 'Z'}}) - r(b'SKRenderer', b'showsFields', {'retval': {'type': 'Z'}}) - r(b'SKRenderer', b'showsNodeCount', {'retval': {'type': 'Z'}}) - r(b'SKRenderer', b'showsPhysics', {'retval': {'type': 'Z'}}) - r(b'SKRenderer', b'showsQuadCount', {'retval': {'type': 'Z'}}) - r(b'SKShapeNode', b'isAntialiased', {'retval': {'type': b'Z'}}) - r(b'SKShapeNode', b'setAntialiased:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SKShapeNode', b'shapeNodeWithPath:centered:', {'arguments': {3: {'type': b'Z'}}}) - r(b'SKShapeNode', b'shapeNodeWithPoints:count:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'SKShapeNode', b'shapeNodeWithSplinePoints:count:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}}}) - r(b'SKSpriteNode', b'spriteNodeWithImageNamed:normalMapped:', {'arguments': {3: {'type': b'Z'}}}) - r(b'SKTexture', b'preloadTextures:withCompletionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'SKTexture', b'preloadWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'SKTexture', b'setUsesMipmaps:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SKTexture', b'textureNoiseWithSmoothness:size:grayscale:', {'arguments': {4: {'type': b'Z'}}}) - r(b'SKTexture', b'textureWithData:size:flipped:', {'arguments': {4: {'type': b'Z'}}}) - r(b'SKTexture', b'usesMipmaps', {'retval': {'type': b'Z'}}) - r(b'SKTextureAtlas', b'preloadTextureAtlases:withCompletionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'SKTextureAtlas', b'preloadWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'SKTileDefinition', b'flipHorizontally', {'retval': {'type': 'Z'}}) - r(b'SKTileDefinition', b'flipVertically', {'retval': {'type': 'Z'}}) - r(b'SKTileDefinition', b'setFlipHorizontally:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SKTileDefinition', b'setFlipVertically:', {'arguments': {2: {'type': 'Z'}}}) - r(b'SKTransition', b'pausesIncomingScene', {'retval': {'type': b'Z'}}) - r(b'SKTransition', b'pausesOutgoingScene', {'retval': {'type': b'Z'}}) - r(b'SKTransition', b'setPausesIncomingScene:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SKTransition', b'setPausesOutgoingScene:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SKView', b'allowsTransparency', {'retval': {'type': b'Z'}}) - r(b'SKView', b'ignoresSiblingOrder', {'retval': {'type': b'Z'}}) - r(b'SKView', b'isAsynchronous', {'retval': {'type': b'Z'}}) - r(b'SKView', b'isPaused', {'retval': {'type': b'Z'}}) - r(b'SKView', b'setAllowsTransparency:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SKView', b'setAsynchronous:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SKView', b'setIgnoresSiblingOrder:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SKView', b'setPaused:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SKView', b'setShouldCullNonVisibleNodes:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SKView', b'setShowsDrawCount:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SKView', b'setShowsFPS:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SKView', b'setShowsFields:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SKView', b'setShowsNodeCount:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SKView', b'setShowsPhysics:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SKView', b'setShowsQuadCount:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SKView', b'shouldCullNonVisibleNodes', {'retval': {'type': b'Z'}}) - r(b'SKView', b'showsDrawCount', {'retval': {'type': b'Z'}}) - r(b'SKView', b'showsFPS', {'retval': {'type': b'Z'}}) - r(b'SKView', b'showsFields', {'retval': {'type': b'Z'}}) - r(b'SKView', b'showsNodeCount', {'retval': {'type': b'Z'}}) - r(b'SKView', b'showsPhysics', {'retval': {'type': b'Z'}}) - r(b'SKView', b'showsQuadCount', {'retval': {'type': b'Z'}}) -finally: - objc._updatingMetadata(False) -expressions = {'SKTileAdjacencyRightEdge': 'SKTileAdjacencyDown | SKTileAdjacencyLowerLeft | SKTileAdjacencyLeft | SKTileAdjacencyUpperLeft | SKTileAdjacencyUp', 'SKTileAdjacencyUpperRightCorner': 'SKTileAdjacencyUp | SKTileAdjacencyUpperRight | SKTileAdjacencyRight | SKTileAdjacencyLowerRight | SKTileAdjacencyDown | SKTileAdjacencyLeft | SKTileAdjacencyUpperLeft', 'SKTileAdjacencyUpperRightEdge': 'SKTileAdjacencyDown | SKTileAdjacencyLowerLeft | SKTileAdjacencyLeft', 'SKTileAdjacencyLowerRightCorner': 'SKTileAdjacencyUp | SKTileAdjacencyUpperRight | SKTileAdjacencyRight | SKTileAdjacencyLowerRight | SKTileAdjacencyDown | SKTileAdjacencyLowerLeft | SKTileAdjacencyLeft', 'SKTileAdjacencyLowerRightEdge': 'SKTileAdjacencyLeft | SKTileAdjacencyUpperLeft | SKTileAdjacencyUp', 'SKTileAdjacencyDownEdge': 'SKTileAdjacencyUp | SKTileAdjacencyUpperRight | SKTileAdjacencyRight | SKTileAdjacencyLeft | SKTileAdjacencyUpperLeft', 'SKTileAdjacencyLeftEdge': 'SKTileAdjacencyUp | SKTileAdjacencyUpperRight | SKTileAdjacencyRight | SKTileAdjacencyLowerRight | SKTileAdjacencyDown', 'SKTileAdjacencyUpEdge': 'SKTileAdjacencyRight | SKTileAdjacencyLowerRight | SKTileAdjacencyDown | SKTileAdjacencyLowerLeft | SKTileAdjacencyLeft', 'SKTileAdjacencyLowerLeftEdge': 'SKTileAdjacencyUp | SKTileAdjacencyUpperRight | SKTileAdjacencyRight', 'SKTileAdjacencyUpperLeftCorner': 'SKTileAdjacencyUp | SKTileAdjacencyUpperRight | SKTileAdjacencyRight | SKTileAdjacencyDown | SKTileAdjacencyLowerLeft | SKTileAdjacencyLeft | SKTileAdjacencyUpperLeft', 'SKTileAdjacencyUpperLeftEdge': 'SKTileAdjacencyRight | SKTileAdjacencyLowerRight | SKTileAdjacencyDown', 'SKTileAdjacencyLowerLeftCorner': 'SKTileAdjacencyUp | SKTileAdjacencyRight | SKTileAdjacencyLowerRight | SKTileAdjacencyDown | SKTileAdjacencyLowerLeft | SKTileAdjacencyLeft | SKTileAdjacencyUpperLeft'} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/SpriteKit/_metadata.pyc b/env/lib/python2.7/site-packages/SpriteKit/_metadata.pyc deleted file mode 100644 index 8ddc6c21..00000000 Binary files a/env/lib/python2.7/site-packages/SpriteKit/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/StoreKit/_StoreKit.so b/env/lib/python2.7/site-packages/StoreKit/_StoreKit.so deleted file mode 100755 index 60d24303..00000000 Binary files a/env/lib/python2.7/site-packages/StoreKit/_StoreKit.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/StoreKit/__init__.py b/env/lib/python2.7/site-packages/StoreKit/__init__.py deleted file mode 100644 index 7bcb43a4..00000000 --- a/env/lib/python2.7/site-packages/StoreKit/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -''' -Python mapping for the StoreKit framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Foundation - -from StoreKit import _metadata -import StoreKit._StoreKit - -sys.modules['StoreKit'] = mod = objc.ObjCLazyModule( - "StoreKit", - "com.apple.StoreKit", - objc.pathForFramework("/System/Library/Frameworks/StoreKit.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Foundation,)) - -import sys -del sys.modules['StoreKit._metadata'] diff --git a/env/lib/python2.7/site-packages/StoreKit/__init__.pyc b/env/lib/python2.7/site-packages/StoreKit/__init__.pyc deleted file mode 100644 index d1868bb8..00000000 Binary files a/env/lib/python2.7/site-packages/StoreKit/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/StoreKit/_metadata.py b/env/lib/python2.7/site-packages/StoreKit/_metadata.py deleted file mode 100644 index 15cd073c..00000000 --- a/env/lib/python2.7/site-packages/StoreKit/_metadata.py +++ /dev/null @@ -1,43 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Thu Feb 21 10:44:53 2019 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$SKDownloadTimeRemainingUnknown@d$SKErrorDomain$SKReceiptPropertyIsExpired$SKReceiptPropertyIsRevoked$SKReceiptPropertyIsVolumePurchase$''' -enums = '''$SKDownloadStateActive@1$SKDownloadStateCancelled@5$SKDownloadStateFailed@4$SKDownloadStateFinished@3$SKDownloadStatePaused@2$SKDownloadStateWaiting@0$SKErrorClientInvalid@1$SKErrorCloudServiceNetworkConnectionFailed@7$SKErrorCloudServicePermissionDenied@6$SKErrorCloudServiceRevoked@8$SKErrorInvalidOfferIdentifier@11$SKErrorInvalidOfferPrice@14$SKErrorInvalidSignature@12$SKErrorMissingOfferParams@13$SKErrorPaymentCancelled@2$SKErrorPaymentInvalid@3$SKErrorPaymentNotAllowed@4$SKErrorPrivacyAcknowledgementRequired@9$SKErrorStoreProductNotAvailable@5$SKErrorUnauthorizedRequestData@10$SKErrorUnknown@0$SKPaymentTransactionStateDeferred@4$SKPaymentTransactionStateFailed@2$SKPaymentTransactionStatePurchased@1$SKPaymentTransactionStatePurchasing@0$SKPaymentTransactionStateRestored@3$SKProductDiscountPaymentModeFreeTrial@2$SKProductDiscountPaymentModePayAsYouGo@0$SKProductDiscountPaymentModePayUpFront@1$SKProductDiscountTypeIntroductory@0$SKProductDiscountTypeSubscription@1$SKProductPeriodUnitDay@0$SKProductPeriodUnitMonth@2$SKProductPeriodUnitWeek@1$SKProductPeriodUnitYear@3$''' -misc.update({}) -functions={'SKTerminateForInvalidReceipt': (b'v',)} -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'NSObject', b'paymentQueue:removedTransactions:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'paymentQueue:restoreCompletedTransactionsFailedWithError:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'paymentQueue:shouldAddStorePayment:forProduct:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'paymentQueue:updatedDownloads:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'paymentQueue:updatedTransactions:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'paymentQueueRestoreCompletedTransactionsFinished:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'productsRequest:didReceiveResponse:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'request:didFailWithError:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'requestDidFinish:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'SKMutablePayment', b'setSimulatesAskToBuyInSandbox:', {'arguments': {2: {'type': b'Z'}}}) - r(b'SKMutablePayment', b'simulatesAskToBuyInSandbox', {'retval': {'type': b'Z'}}) - r(b'SKPayment', b'simulatesAskToBuyInSandbox', {'retval': {'type': b'Z'}}) - r(b'SKPaymentQueue', b'canMakePayments', {'retval': {'type': b'Z'}}) - r(b'SKProduct', b'downloadable', {'retval': {'type': b'Z'}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/StoreKit/_metadata.pyc b/env/lib/python2.7/site-packages/StoreKit/_metadata.pyc deleted file mode 100644 index 5c86bd87..00000000 Binary files a/env/lib/python2.7/site-packages/StoreKit/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/SyncServices/_SyncServices.so b/env/lib/python2.7/site-packages/SyncServices/_SyncServices.so deleted file mode 100755 index e8eeeac9..00000000 Binary files a/env/lib/python2.7/site-packages/SyncServices/_SyncServices.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/SyncServices/__init__.py b/env/lib/python2.7/site-packages/SyncServices/__init__.py deleted file mode 100644 index 3cd2dbcd..00000000 --- a/env/lib/python2.7/site-packages/SyncServices/__init__.py +++ /dev/null @@ -1,25 +0,0 @@ -''' -Python mapping for the SyncServices framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import sys -import objc -import CoreData - -from SyncServices import _metadata -from SyncServices._SyncServices import * - -sys.modules['SyncServices'] = mod = objc.ObjCLazyModule('SyncServices', - "com.apple.syncservices", - objc.pathForFramework("/System/Library/Frameworks/SyncServices.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'objc': objc, - }, ( CoreData,)) - -import sys -del sys.modules['SyncServices._metadata'] diff --git a/env/lib/python2.7/site-packages/SyncServices/__init__.pyc b/env/lib/python2.7/site-packages/SyncServices/__init__.pyc deleted file mode 100644 index d86bb4dc..00000000 Binary files a/env/lib/python2.7/site-packages/SyncServices/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/SyncServices/_metadata.py b/env/lib/python2.7/site-packages/SyncServices/_metadata.py deleted file mode 100644 index 2882b7d3..00000000 --- a/env/lib/python2.7/site-packages/SyncServices/_metadata.py +++ /dev/null @@ -1,148 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Sun Aug 2 19:18:13 2015 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$ISyncAvailabilityChangedNotification$ISyncChangePropertyActionKey$ISyncChangePropertyClear$ISyncChangePropertyNameKey$ISyncChangePropertySet$ISyncChangePropertyValueIsDefaultKey$ISyncChangePropertyValueKey$ISyncClientTypeApplication$ISyncClientTypeDevice$ISyncClientTypePeer$ISyncClientTypeServer$ISyncErrorDomain$ISyncInvalidArgumentsException$ISyncInvalidEntityException$ISyncInvalidRecordException$ISyncInvalidRecordIdentifiersKey$ISyncInvalidRecordReasonsKey$ISyncInvalidRecordsKey$ISyncInvalidSchemaException$ISyncRecordEntityNameKey$ISyncServerUnavailableException$ISyncSessionCancelledException$ISyncSessionUnavailableException$ISyncUnsupportedEntityException$''' -enums = '''$ISyncChangeTypeAdd@1$ISyncChangeTypeDelete@3$ISyncChangeTypeModify@2$ISyncChangeTypeNone@0$ISyncServerDisabledReasonByPreference@1001$ISyncServerDisabledReasonNone@1000$ISyncServerDisabledReasonSharedNetworkHome@1002$ISyncServerDisabledReasonUnknown@1004$ISyncServerDisabledReasonUnresponsive@1003$ISyncSessionClientAlreadySyncingError@100$ISyncSessionDriverChangeAccepted@1$ISyncSessionDriverChangeError@3$ISyncSessionDriverChangeIgnored@2$ISyncSessionDriverChangeRefused@0$ISyncSessionDriverFatalError@300$ISyncSessionDriverModeFast@1$ISyncSessionDriverModeRefresh@3$ISyncSessionDriverModeSlow@2$ISyncSessionDriverPullFailureError@201$ISyncSessionDriverRegistrationError@200$ISyncSessionUserCanceledSessionError@101$ISyncStatusCancelled@5$ISyncStatusErrors@4$ISyncStatusFailed@6$ISyncStatusNever@7$ISyncStatusRunning@1$ISyncStatusSuccess@2$ISyncStatusWarnings@3$''' -misc.update({}) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'ISyncClient', b'canPullChangesForEntityName:', {'retval': {'type': 'Z'}}) - r(b'ISyncClient', b'canPushChangesForEntityName:', {'retval': {'type': 'Z'}}) - r(b'ISyncClient', b'formatsRelationships', {'retval': {'type': 'Z'}}) - r(b'ISyncClient', b'isEnabledForEntityName:', {'retval': {'type': 'Z'}}) - r(b'ISyncClient', b'setEnabled:forEntityNames:', {'arguments': {2: {'type': 'Z'}}}) - r(b'ISyncClient', b'setFormatsRelationships:', {'arguments': {2: {'type': 'Z'}}}) - r(b'ISyncClient', b'setShouldReplaceClientRecords:forEntityNames:', {'arguments': {2: {'type': 'Z'}}}) - r(b'ISyncClient', b'setShouldSynchronize:withClientsOfType:', {'arguments': {2: {'type': 'Z'}}}) - r(b'ISyncClient', b'setSyncAlertHandler:selector:', {'arguments': {3: {'sel_of_type': b'v@:@@'}}}) - r(b'ISyncClient', b'shouldReplaceClientRecordsForEntityName:', {'retval': {'type': 'Z'}}) - r(b'ISyncClient', b'shouldSynchronizeWithClientsOfType:', {'retval': {'type': 'Z'}}) - r(b'ISyncManager', b'clientWithIdentifier:needsSyncing:', {'arguments': {3: {'type': 'Z'}}}) - r(b'ISyncManager', b'isEnabled', {'retval': {'type': 'Z'}}) - r(b'ISyncManager', b'registerSchemaWithBundlePath:', {'retval': {'type': 'Z'}}) - r(b'ISyncRecordSnapshot', b'recordIdentifierForReference:isModified:', {'arguments': {3: {'type': '^Z', 'type_modifier': b'o'}}}) - r(b'ISyncSession', b'beginSessionInBackgroundWithClient:entityNames:target:selector:', {'arguments': {5: {'sel_of_type': b'v@:@@'}}}) - r(b'ISyncSession', b'beginSessionInBackgroundWithClient:entityNames:target:selector:lastAnchors:', {'arguments': {5: {'sel_of_type': b'v@:@@'}}}) - r(b'ISyncSession', b'clientLostRecordWithIdentifier:shouldReplaceOnNextSync:', {'arguments': {3: {'type': 'Z'}}}) - r(b'ISyncSession', b'isCancelled', {'retval': {'type': 'Z'}}) - r(b'ISyncSession', b'prepareToPullChangesForEntityNames:beforeDate:', {'retval': {'type': 'Z'}}) - r(b'ISyncSession', b'prepareToPullChangesInBackgroundForEntityNames:target:selector:', {'arguments': {4: {'sel_of_type': b'v@:@@'}}}) - r(b'ISyncSession', b'shouldPullChangesForEntityName:', {'retval': {'type': 'Z'}}) - r(b'ISyncSession', b'shouldPushAllRecordsForEntityName:', {'retval': {'type': 'Z'}}) - r(b'ISyncSession', b'shouldPushChangesForEntityName:', {'retval': {'type': 'Z'}}) - r(b'ISyncSession', b'shouldReplaceAllRecordsOnClientForEntityName:', {'retval': {'type': 'Z'}}) - r(b'ISyncSessionDriver', b'handlesSyncAlerts', {'retval': {'type': 'Z'}}) - r(b'ISyncSessionDriver', b'setHandlesSyncAlerts:', {'arguments': {2: {'type': 'Z'}}}) - r(b'ISyncSessionDriver', b'startAsynchronousSync:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type_modifier': b'o'}}}) - r(b'ISyncSessionDriver', b'sync', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'applyChange:forEntityName:remappedRecordIdentifier:formattedRecord:error:', {'retval': {'type': 'i'}, 'arguments': {4: {'type': '^@', 'type_modifier': b'o'}, 5: {'type': '^@', 'type_modifier': b'o'}, 6: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'changedRecordsForEntityName:moreComing:error:', {'arguments': {3: {'type': '^Z', 'type_modifier': b'o'}, 4: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'changesForEntityName:moreComing:error:', {'arguments': {3: {'type': '^Z', 'type_modifier': b'o'}, 4: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'deleteAllRecordsForEntityName:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'identifiersForRecordsToDeleteForEntityName:moreComing:error:', {'arguments': {3: {'type': '^Z', 'type_modifier': b'o'}, 4: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'isEqual:', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'isRelationship', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'isRequired', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'isToMany', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'persistentStoreCoordinator:willDeleteRecordWithIdentifier:inSyncSession:', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'persistentStoreCoordinatorShouldStartSyncing:', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'preferredSyncModeForEntityName', {'retval': {'type': 'i'}}) - r(b'NSObject', b'recordsForEntityName:moreComing:error:', {'arguments': {3: {'type': '^Z', 'type_modifier': b'o'}, 4: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'sessionDriver:didNegotiateAndReturnError:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'sessionDriver:didPullAndReturnError:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'sessionDriver:didPushAndReturnError:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'sessionDriver:didReceiveSyncAlertAndReturnError:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'sessionDriver:didRegisterClientAndReturnError:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'sessionDriver:willFinishSessionAndReturnError:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'sessionDriver:willNegotiateAndReturnError:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'sessionDriver:willPullAndReturnError:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'sessionDriver:willPushAndReturnError:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'shouldApplyRecord:withRecordIdentifier:', {'retval': {'type': 'Z'}}) - r(b'NSPersistentStoreCoordinator', b'syncWithClient:inBackground:handler:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': 'Z'}, 5: {'type': '^@', 'type_modifier': b'o'}}}) -finally: - objc._updatingMetadata(False) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'NSObject', b'applyChange:forEntityName:remappedRecordIdentifier:formattedRecord:error:', {'required': True, 'retval': {'type': 'i'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': '^@', 'type_modifier': b'o'}, 5: {'type': '^@', 'type_modifier': b'o'}, 6: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'changedRecordsForEntityName:moreComing:error:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': '^Z', 'type_modifier': b'o'}, 4: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'changesForEntityName:moreComing:error:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': '^Z', 'type_modifier': b'o'}, 4: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'clientDescriptionURL', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'clientIdentifier', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'deleteAllRecordsForEntityName:error:', {'required': True, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'entityNamesToPull', {'required': False, 'retval': {'type': b'@'}}) - r(b'NSObject', b'entityNamesToSync', {'required': False, 'retval': {'type': b'@'}}) - r(b'NSObject', b'identifiersForRecordsToDeleteForEntityName:moreComing:error:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': '^Z', 'type_modifier': b'o'}, 4: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'isEqual:', {'required': True, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'lastAnchorForEntityName:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'managedObjectContextsToMonitorWhenSyncingPersistentStoreCoordinator:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'managedObjectContextsToReloadAfterSyncingPersistentStoreCoordinator:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'nextAnchorForEntityName:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'persistentStoreCoordinator:didApplyChange:toManagedObject:inSyncSession:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'persistentStoreCoordinator:didCancelSyncSession:error:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'persistentStoreCoordinator:didCommitChanges:inSyncSession:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'persistentStoreCoordinator:didFinishSyncSession:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'persistentStoreCoordinator:didPullChangesInSyncSession:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'persistentStoreCoordinator:didPushChangesInSyncSession:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'persistentStoreCoordinator:willApplyChange:toManagedObject:inSyncSession:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'persistentStoreCoordinator:willDeleteRecordWithIdentifier:inSyncSession:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'persistentStoreCoordinator:willPullChangesInSyncSession:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'persistentStoreCoordinator:willPushChangesInSyncSession:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'persistentStoreCoordinator:willPushRecord:forManagedObject:inSyncSession:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'persistentStoreCoordinatorShouldStartSyncing:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'preferredSyncModeForEntityName:', {'required': True, 'retval': {'type': b'i'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'recordsForEntityName:moreComing:error:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': '^Z', 'type_modifier': b'o'}, 4: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'schemaBundleURLs', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'sessionBeginTimeout', {'required': False, 'retval': {'type': b'd'}}) - r(b'NSObject', b'sessionPullChangesTimeout', {'required': False, 'retval': {'type': b'd'}}) - r(b'NSObject', b'shouldApplyRecord:withRecordIdentifier:', {'required': True, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'supportedEntityNames', {'required': True, 'retval': {'type': b'@'}}) -finally: - objc._updatingMetadata(False) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'NSObject', b'attributedStringForIdentityPropertiesWithNames:inRecord:comparisonRecords:firstLineAttributes:secondLineAttributes:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}, 6: {'type': b'@'}}}) - r(b'NSObject', b'attributedStringForPropertiesWithNames:inRecord:comparisonRecords:defaultAttributes:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'changedRecordsForEntityName:moreComing:error:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': '^Z', 'type_modifier': b'o'}, 4: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'changesForEntityName:moreComing:error:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': '^Z', 'type_modifier': b'o'}, 4: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'entityNamesToPull', {'retval': {'type': b'@'}}) - r(b'NSObject', b'entityNamesToSync', {'retval': {'type': b'@'}}) - r(b'NSObject', b'identifiersForRecordsToDeleteForEntityName:moreComing:error:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': '^Z', 'type_modifier': b'o'}, 4: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'lastAnchorForEntityName:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'nextAnchorForEntityName:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'sessionBeginTimeout', {'retval': {'type': b'd'}}) - r(b'NSObject', b'sessionDriver:didNegotiateAndReturnError:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'sessionDriver:didPullAndReturnError:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'sessionDriver:didPushAndReturnError:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'sessionDriver:didReceiveSyncAlertAndReturnError:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'sessionDriver:didRegisterClientAndReturnError:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'sessionDriver:willFinishSessionAndReturnError:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'sessionDriver:willNegotiateAndReturnError:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'sessionDriver:willPullAndReturnError:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'sessionDriver:willPushAndReturnError:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': '^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'sessionDriverDidCancelSession:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'sessionDriverDidFinishSession:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'sessionDriverWillCancelSession:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'sessionPullChangesTimeout', {'retval': {'type': b'd'}}) -finally: - objc._updatingMetadata(False) -protocols={'ISyncSessionDriverDelegate': objc.informal_protocol('ISyncSessionDriverDelegate', [objc.selector(None, b'sessionDriver:willPullAndReturnError:', b'Z@:@^@', isRequired=False), objc.selector(None, b'sessionDriver:willPushAndReturnError:', b'Z@:@^@', isRequired=False), objc.selector(None, b'sessionDriver:didRegisterClientAndReturnError:', b'Z@:@^@', isRequired=False), objc.selector(None, b'sessionDriver:willFinishSessionAndReturnError:', b'Z@:@^@', isRequired=False), objc.selector(None, b'sessionDriver:willNegotiateAndReturnError:', b'Z@:@^@', isRequired=False), objc.selector(None, b'sessionDriver:didPullAndReturnError:', b'Z@:@^@', isRequired=False), objc.selector(None, b'sessionDriverDidFinishSession:', b'v@:@', isRequired=False), objc.selector(None, b'sessionDriver:didNegotiateAndReturnError:', b'Z@:@^@', isRequired=False), objc.selector(None, b'sessionDriver:didPushAndReturnError:', b'Z@:@^@', isRequired=False), objc.selector(None, b'sessionDriver:didReceiveSyncAlertAndReturnError:', b'Z@:@^@', isRequired=False), objc.selector(None, b'sessionDriverWillCancelSession:', b'v@:@', isRequired=False), objc.selector(None, b'sessionDriverDidCancelSession:', b'v@:@', isRequired=False)]), 'ISyncSessionDriverDataSourceOptionalMethods': objc.informal_protocol('ISyncSessionDriverDataSourceOptionalMethods', [objc.selector(None, b'entityNamesToSync', b'@@:', isRequired=False), objc.selector(None, b'sessionPullChangesTimeout', b'd@:', isRequired=False), objc.selector(None, b'sessionBeginTimeout', b'd@:', isRequired=False), objc.selector(None, b'nextAnchorForEntityName:', b'@@:@', isRequired=False), objc.selector(None, b'changesForEntityName:moreComing:error:', b'@@:@^Z^@', isRequired=False), objc.selector(None, b'changedRecordsForEntityName:moreComing:error:', b'@@:@^Z^@', isRequired=False), objc.selector(None, b'identifiersForRecordsToDeleteForEntityName:moreComing:error:', b'@@:@^Z^@', isRequired=False), objc.selector(None, b'entityNamesToPull', b'@@:', isRequired=False), objc.selector(None, b'lastAnchorForEntityName:', b'@@:@', isRequired=False)]), 'SyncUIHelperInformalProtocol': objc.informal_protocol('SyncUIHelperInformalProtocol', [objc.selector(None, b'attributedStringForIdentityPropertiesWithNames:inRecord:comparisonRecords:firstLineAttributes:secondLineAttributes:', b'@@:@@@@@', isRequired=False), objc.selector(None, b'attributedStringForPropertiesWithNames:inRecord:comparisonRecords:defaultAttributes:', b'@@:@@@@', isRequired=False)])} -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/SyncServices/_metadata.pyc b/env/lib/python2.7/site-packages/SyncServices/_metadata.pyc deleted file mode 100644 index 0ba70aa3..00000000 Binary files a/env/lib/python2.7/site-packages/SyncServices/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/SystemConfiguration/__init__.py b/env/lib/python2.7/site-packages/SystemConfiguration/__init__.py deleted file mode 100644 index 55d06065..00000000 --- a/env/lib/python2.7/site-packages/SystemConfiguration/__init__.py +++ /dev/null @@ -1,32 +0,0 @@ -''' -Python mapping for the SystemConfiguration framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import sys -import objc -import Foundation -import SystemConfiguration._manual - -from SystemConfiguration import _metadata - -sys.modules['SystemConfiguration'] = mod = objc.ObjCLazyModule('SystemConfiguration', - "com.apple.SystemConfiguration", - objc.pathForFramework("/System/Library/Frameworks/SystemConfiguration.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - 'objc': objc, - }, ( Foundation, SystemConfiguration._manual,)) - -import sys -del sys.modules['SystemConfiguration._metadata'] - -import SystemConfiguration._manual as m -for nm in dir(m): - setattr(mod, nm, getattr(m, nm)) - -mod.SCBondInterfaceRef = mod.SCNetworkInterfaceRef -mod.SCVLANInterfaceRef = mod.SCNetworkInterfaceRef diff --git a/env/lib/python2.7/site-packages/SystemConfiguration/__init__.pyc b/env/lib/python2.7/site-packages/SystemConfiguration/__init__.pyc deleted file mode 100644 index 20c67ab3..00000000 Binary files a/env/lib/python2.7/site-packages/SystemConfiguration/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/SystemConfiguration/_manual.so b/env/lib/python2.7/site-packages/SystemConfiguration/_manual.so deleted file mode 100755 index dca9febf..00000000 Binary files a/env/lib/python2.7/site-packages/SystemConfiguration/_manual.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/SystemConfiguration/_metadata.py b/env/lib/python2.7/site-packages/SystemConfiguration/_metadata.py deleted file mode 100644 index 9441e75f..00000000 --- a/env/lib/python2.7/site-packages/SystemConfiguration/_metadata.py +++ /dev/null @@ -1,26 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Fri Aug 7 15:21:46 2015 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$kCFErrorDomainSystemConfiguration@^{__CFString=}$kCNNetworkInfoKeyBSSID@^{__CFString=}$kCNNetworkInfoKeySSID@^{__CFString=}$kCNNetworkInfoKeySSIDData@^{__CFString=}$kSCBondStatusDeviceAggregationStatus@^{__CFString=}$kSCBondStatusDeviceCollecting@^{__CFString=}$kSCBondStatusDeviceDistributing@^{__CFString=}$kSCCompAnyRegex@^{__CFString=}$kSCCompGlobal@^{__CFString=}$kSCCompHostNames@^{__CFString=}$kSCCompInterface@^{__CFString=}$kSCCompNetwork@^{__CFString=}$kSCCompService@^{__CFString=}$kSCCompSystem@^{__CFString=}$kSCCompUsers@^{__CFString=}$kSCDynamicStoreDomainFile@^{__CFString=}$kSCDynamicStoreDomainPlugin@^{__CFString=}$kSCDynamicStoreDomainPrefs@^{__CFString=}$kSCDynamicStoreDomainSetup@^{__CFString=}$kSCDynamicStoreDomainState@^{__CFString=}$kSCDynamicStorePropNetInterfaces@^{__CFString=}$kSCDynamicStorePropNetPrimaryInterface@^{__CFString=}$kSCDynamicStorePropNetPrimaryService@^{__CFString=}$kSCDynamicStorePropNetServiceIDs@^{__CFString=}$kSCDynamicStorePropSetupCurrentSet@^{__CFString=}$kSCDynamicStorePropSetupLastUpdated@^{__CFString=}$kSCDynamicStoreUseSessionKeys@^{__CFString=}$kSCEntNet6to4@^{__CFString=}$kSCEntNetAirPort@^{__CFString=}$kSCEntNetAppleTalk@^{__CFString=}$kSCEntNetDHCP@^{__CFString=}$kSCEntNetDNS@^{__CFString=}$kSCEntNetEthernet@^{__CFString=}$kSCEntNetFireWire@^{__CFString=}$kSCEntNetIPSec@^{__CFString=}$kSCEntNetIPv4@^{__CFString=}$kSCEntNetIPv6@^{__CFString=}$kSCEntNetInterface@^{__CFString=}$kSCEntNetL2TP@^{__CFString=}$kSCEntNetLink@^{__CFString=}$kSCEntNetModem@^{__CFString=}$kSCEntNetNetInfo@^{__CFString=}$kSCEntNetPPP@^{__CFString=}$kSCEntNetPPPSerial@^{__CFString=}$kSCEntNetPPPoE@^{__CFString=}$kSCEntNetPPTP@^{__CFString=}$kSCEntNetProxies@^{__CFString=}$kSCEntNetSMB@^{__CFString=}$kSCEntUsersConsoleUser@^{__CFString=}$kSCNetworkInterfaceIPv4@=^{__SCNetworkInterface=}$kSCNetworkInterfaceType6to4@^{__CFString=}$kSCNetworkInterfaceTypeBluetooth@^{__CFString=}$kSCNetworkInterfaceTypeBond@^{__CFString=}$kSCNetworkInterfaceTypeEthernet@^{__CFString=}$kSCNetworkInterfaceTypeFireWire@^{__CFString=}$kSCNetworkInterfaceTypeIEEE80211@^{__CFString=}$kSCNetworkInterfaceTypeIPSec@^{__CFString=}$kSCNetworkInterfaceTypeIPv4@^{__CFString=}$kSCNetworkInterfaceTypeIrDA@^{__CFString=}$kSCNetworkInterfaceTypeL2TP@^{__CFString=}$kSCNetworkInterfaceTypeModem@^{__CFString=}$kSCNetworkInterfaceTypePPP@^{__CFString=}$kSCNetworkInterfaceTypePPTP@^{__CFString=}$kSCNetworkInterfaceTypeSerial@^{__CFString=}$kSCNetworkInterfaceTypeVLAN@^{__CFString=}$kSCNetworkInterfaceTypeWWAN@^{__CFString=}$kSCNetworkProtocolTypeAppleTalk@^{__CFString=}$kSCNetworkProtocolTypeDNS@^{__CFString=}$kSCNetworkProtocolTypeIPv4@^{__CFString=}$kSCNetworkProtocolTypeIPv6@^{__CFString=}$kSCNetworkProtocolTypeProxies@^{__CFString=}$kSCNetworkProtocolTypeSMB@^{__CFString=}$kSCPrefCurrentSet@^{__CFString=}$kSCPrefNetworkServices@^{__CFString=}$kSCPrefSets@^{__CFString=}$kSCPrefSystem@^{__CFString=}$kSCPropInterfaceName@^{__CFString=}$kSCPropMACAddress@^{__CFString=}$kSCPropNet6to4Relay@^{__CFString=}$kSCPropNetAirPortAllowNetCreation@^{__CFString=}$kSCPropNetAirPortAuthPassword@^{__CFString=}$kSCPropNetAirPortAuthPasswordEncryption@^{__CFString=}$kSCPropNetAirPortJoinMode@^{__CFString=}$kSCPropNetAirPortPowerEnabled@^{__CFString=}$kSCPropNetAirPortPreferredNetwork@^{__CFString=}$kSCPropNetAirPortSavePasswords@^{__CFString=}$kSCPropNetAppleTalkComputerName@^{__CFString=}$kSCPropNetAppleTalkComputerNameEncoding@^{__CFString=}$kSCPropNetAppleTalkConfigMethod@^{__CFString=}$kSCPropNetAppleTalkDefaultZone@^{__CFString=}$kSCPropNetAppleTalkNetworkID@^{__CFString=}$kSCPropNetAppleTalkNetworkRange@^{__CFString=}$kSCPropNetAppleTalkNodeID@^{__CFString=}$kSCPropNetAppleTalkSeedNetworkRange@^{__CFString=}$kSCPropNetAppleTalkSeedZones@^{__CFString=}$kSCPropNetDNSDomainName@^{__CFString=}$kSCPropNetDNSOptions@^{__CFString=}$kSCPropNetDNSSearchDomains@^{__CFString=}$kSCPropNetDNSSearchOrder@^{__CFString=}$kSCPropNetDNSServerAddresses@^{__CFString=}$kSCPropNetDNSServerPort@^{__CFString=}$kSCPropNetDNSServerTimeout@^{__CFString=}$kSCPropNetDNSSortList@^{__CFString=}$kSCPropNetDNSSupplementalMatchDomains@^{__CFString=}$kSCPropNetDNSSupplementalMatchOrders@^{__CFString=}$kSCPropNetEthernetMTU@^{__CFString=}$kSCPropNetEthernetMediaOptions@^{__CFString=}$kSCPropNetEthernetMediaSubType@^{__CFString=}$kSCPropNetIPSecAuthenticationMethod@^{__CFString=}$kSCPropNetIPSecConnectTime@^{__CFString=}$kSCPropNetIPSecLocalCertificate@^{__CFString=}$kSCPropNetIPSecLocalIdentifier@^{__CFString=}$kSCPropNetIPSecLocalIdentifierType@^{__CFString=}$kSCPropNetIPSecRemoteAddress@^{__CFString=}$kSCPropNetIPSecSharedSecret@^{__CFString=}$kSCPropNetIPSecSharedSecretEncryption@^{__CFString=}$kSCPropNetIPSecStatus@^{__CFString=}$kSCPropNetIPSecXAuthEnabled@^{__CFString=}$kSCPropNetIPSecXAuthName@^{__CFString=}$kSCPropNetIPSecXAuthPassword@^{__CFString=}$kSCPropNetIPSecXAuthPasswordEncryption@^{__CFString=}$kSCPropNetIPv4Addresses@^{__CFString=}$kSCPropNetIPv4BroadcastAddresses@^{__CFString=}$kSCPropNetIPv4ConfigMethod@^{__CFString=}$kSCPropNetIPv4DHCPClientID@^{__CFString=}$kSCPropNetIPv4DestAddresses@^{__CFString=}$kSCPropNetIPv4Router@^{__CFString=}$kSCPropNetIPv4SubnetMasks@^{__CFString=}$kSCPropNetIPv6Addresses@^{__CFString=}$kSCPropNetIPv6ConfigMethod@^{__CFString=}$kSCPropNetIPv6DestAddresses@^{__CFString=}$kSCPropNetIPv6Flags@^{__CFString=}$kSCPropNetIPv6PrefixLength@^{__CFString=}$kSCPropNetIPv6Router@^{__CFString=}$kSCPropNetInterfaceDeviceName@^{__CFString=}$kSCPropNetInterfaceHardware@^{__CFString=}$kSCPropNetInterfaceSubType@^{__CFString=}$kSCPropNetInterfaceSupportsModemOnHold@^{__CFString=}$kSCPropNetInterfaceType@^{__CFString=}$kSCPropNetInterfaces@^{__CFString=}$kSCPropNetL2TPIPSecSharedSecret@^{__CFString=}$kSCPropNetL2TPIPSecSharedSecretEncryption@^{__CFString=}$kSCPropNetL2TPTransport@^{__CFString=}$kSCPropNetLinkActive@^{__CFString=}$kSCPropNetLinkDetaching@^{__CFString=}$kSCPropNetLocalHostName@^{__CFString=}$kSCPropNetModemAccessPointName@^{__CFString=}$kSCPropNetModemConnectSpeed@^{__CFString=}$kSCPropNetModemConnectionPersonality@^{__CFString=}$kSCPropNetModemConnectionScript@^{__CFString=}$kSCPropNetModemDataCompression@^{__CFString=}$kSCPropNetModemDeviceContextID@^{__CFString=}$kSCPropNetModemDeviceModel@^{__CFString=}$kSCPropNetModemDeviceVendor@^{__CFString=}$kSCPropNetModemDialMode@^{__CFString=}$kSCPropNetModemErrorCorrection@^{__CFString=}$kSCPropNetModemHoldCallWaitingAudibleAlert@^{__CFString=}$kSCPropNetModemHoldDisconnectOnAnswer@^{__CFString=}$kSCPropNetModemHoldEnabled@^{__CFString=}$kSCPropNetModemHoldReminder@^{__CFString=}$kSCPropNetModemHoldReminderTime@^{__CFString=}$kSCPropNetModemNote@^{__CFString=}$kSCPropNetModemPulseDial@^{__CFString=}$kSCPropNetModemSpeaker@^{__CFString=}$kSCPropNetModemSpeed@^{__CFString=}$kSCPropNetNetInfoBindingMethods@^{__CFString=}$kSCPropNetNetInfoBroadcastServerTag@^{__CFString=}$kSCPropNetNetInfoServerAddresses@^{__CFString=}$kSCPropNetNetInfoServerTags@^{__CFString=}$kSCPropNetOverridePrimary@^{__CFString=}$kSCPropNetPPPACSPEnabled@^{__CFString=}$kSCPropNetPPPAuthEAPPlugins@^{__CFString=}$kSCPropNetPPPAuthName@^{__CFString=}$kSCPropNetPPPAuthPassword@^{__CFString=}$kSCPropNetPPPAuthPasswordEncryption@^{__CFString=}$kSCPropNetPPPAuthPrompt@^{__CFString=}$kSCPropNetPPPAuthProtocol@^{__CFString=}$kSCPropNetPPPCCPEnabled@^{__CFString=}$kSCPropNetPPPCCPMPPE128Enabled@^{__CFString=}$kSCPropNetPPPCCPMPPE40Enabled@^{__CFString=}$kSCPropNetPPPCommAlternateRemoteAddress@^{__CFString=}$kSCPropNetPPPCommConnectDelay@^{__CFString=}$kSCPropNetPPPCommDisplayTerminalWindow@^{__CFString=}$kSCPropNetPPPCommRedialCount@^{__CFString=}$kSCPropNetPPPCommRedialEnabled@^{__CFString=}$kSCPropNetPPPCommRedialInterval@^{__CFString=}$kSCPropNetPPPCommRemoteAddress@^{__CFString=}$kSCPropNetPPPCommTerminalScript@^{__CFString=}$kSCPropNetPPPCommUseTerminalScript@^{__CFString=}$kSCPropNetPPPConnectTime@^{__CFString=}$kSCPropNetPPPDeviceLastCause@^{__CFString=}$kSCPropNetPPPDialOnDemand@^{__CFString=}$kSCPropNetPPPDisconnectOnFastUserSwitch@^{__CFString=}$kSCPropNetPPPDisconnectOnIdle@^{__CFString=}$kSCPropNetPPPDisconnectOnIdleTimer@^{__CFString=}$kSCPropNetPPPDisconnectOnLogout@^{__CFString=}$kSCPropNetPPPDisconnectOnSleep@^{__CFString=}$kSCPropNetPPPDisconnectTime@^{__CFString=}$kSCPropNetPPPIPCPCompressionVJ@^{__CFString=}$kSCPropNetPPPIPCPUsePeerDNS@^{__CFString=}$kSCPropNetPPPIdleReminder@^{__CFString=}$kSCPropNetPPPIdleReminderTimer@^{__CFString=}$kSCPropNetPPPLCPCompressionACField@^{__CFString=}$kSCPropNetPPPLCPCompressionPField@^{__CFString=}$kSCPropNetPPPLCPEchoEnabled@^{__CFString=}$kSCPropNetPPPLCPEchoFailure@^{__CFString=}$kSCPropNetPPPLCPEchoInterval@^{__CFString=}$kSCPropNetPPPLCPMRU@^{__CFString=}$kSCPropNetPPPLCPMTU@^{__CFString=}$kSCPropNetPPPLCPReceiveACCM@^{__CFString=}$kSCPropNetPPPLCPTransmitACCM@^{__CFString=}$kSCPropNetPPPLastCause@^{__CFString=}$kSCPropNetPPPLogfile@^{__CFString=}$kSCPropNetPPPOverridePrimary@^{__CFString=}$kSCPropNetPPPPlugins@^{__CFString=}$kSCPropNetPPPRetryConnectTime@^{__CFString=}$kSCPropNetPPPSessionTimer@^{__CFString=}$kSCPropNetPPPStatus@^{__CFString=}$kSCPropNetPPPUseSessionTimer@^{__CFString=}$kSCPropNetPPPVerboseLogging@^{__CFString=}$kSCPropNetProxiesExceptionsList@^{__CFString=}$kSCPropNetProxiesExcludeSimpleHostnames@^{__CFString=}$kSCPropNetProxiesFTPEnable@^{__CFString=}$kSCPropNetProxiesFTPPassive@^{__CFString=}$kSCPropNetProxiesFTPPort@^{__CFString=}$kSCPropNetProxiesFTPProxy@^{__CFString=}$kSCPropNetProxiesGopherEnable@^{__CFString=}$kSCPropNetProxiesGopherPort@^{__CFString=}$kSCPropNetProxiesGopherProxy@^{__CFString=}$kSCPropNetProxiesHTTPEnable@^{__CFString=}$kSCPropNetProxiesHTTPPort@^{__CFString=}$kSCPropNetProxiesHTTPProxy@^{__CFString=}$kSCPropNetProxiesHTTPSEnable@^{__CFString=}$kSCPropNetProxiesHTTPSPort@^{__CFString=}$kSCPropNetProxiesHTTPSProxy@^{__CFString=}$kSCPropNetProxiesProxyAutoConfigEnable@^{__CFString=}$kSCPropNetProxiesProxyAutoConfigJavaScript@^{__CFString=}$kSCPropNetProxiesProxyAutoConfigURLString@^{__CFString=}$kSCPropNetProxiesProxyAutoDiscoveryEnable@^{__CFString=}$kSCPropNetProxiesRTSPEnable@^{__CFString=}$kSCPropNetProxiesRTSPPort@^{__CFString=}$kSCPropNetProxiesRTSPProxy@^{__CFString=}$kSCPropNetProxiesSOCKSEnable@^{__CFString=}$kSCPropNetProxiesSOCKSPort@^{__CFString=}$kSCPropNetProxiesSOCKSProxy@^{__CFString=}$kSCPropNetSMBNetBIOSName@^{__CFString=}$kSCPropNetSMBNetBIOSNodeType@^{__CFString=}$kSCPropNetSMBNetBIOSScope@^{__CFString=}$kSCPropNetSMBWINSAddresses@^{__CFString=}$kSCPropNetSMBWorkgroup@^{__CFString=}$kSCPropNetServiceOrder@^{__CFString=}$kSCPropSystemComputerName@^{__CFString=}$kSCPropSystemComputerNameEncoding@^{__CFString=}$kSCPropUserDefinedName@^{__CFString=}$kSCPropUsersConsoleUserGID@^{__CFString=}$kSCPropUsersConsoleUserName@^{__CFString=}$kSCPropUsersConsoleUserUID@^{__CFString=}$kSCPropVersion@^{__CFString=}$kSCResvInactive@^{__CFString=}$kSCResvLink@^{__CFString=}$kSCValNetAirPortAuthPasswordEncryptionKeychain@^{__CFString=}$kSCValNetAirPortJoinModeAutomatic@^{__CFString=}$kSCValNetAirPortJoinModePreferred@^{__CFString=}$kSCValNetAirPortJoinModeRanked@^{__CFString=}$kSCValNetAirPortJoinModeRecent@^{__CFString=}$kSCValNetAirPortJoinModeStrongest@^{__CFString=}$kSCValNetAppleTalkConfigMethodNode@^{__CFString=}$kSCValNetAppleTalkConfigMethodRouter@^{__CFString=}$kSCValNetAppleTalkConfigMethodSeedRouter@^{__CFString=}$kSCValNetIPSecAuthenticationMethodCertificate@^{__CFString=}$kSCValNetIPSecAuthenticationMethodHybrid@^{__CFString=}$kSCValNetIPSecAuthenticationMethodSharedSecret@^{__CFString=}$kSCValNetIPSecLocalIdentifierTypeKeyID@^{__CFString=}$kSCValNetIPSecSharedSecretEncryptionKeychain@^{__CFString=}$kSCValNetIPSecXAuthPasswordEncryptionKeychain@^{__CFString=}$kSCValNetIPSecXAuthPasswordEncryptionPrompt@^{__CFString=}$kSCValNetIPv4ConfigMethodAutomatic@^{__CFString=}$kSCValNetIPv4ConfigMethodBOOTP@^{__CFString=}$kSCValNetIPv4ConfigMethodDHCP@^{__CFString=}$kSCValNetIPv4ConfigMethodINFORM@^{__CFString=}$kSCValNetIPv4ConfigMethodLinkLocal@^{__CFString=}$kSCValNetIPv4ConfigMethodManual@^{__CFString=}$kSCValNetIPv4ConfigMethodPPP@^{__CFString=}$kSCValNetIPv6ConfigMethod6to4@^{__CFString=}$kSCValNetIPv6ConfigMethodAutomatic@^{__CFString=}$kSCValNetIPv6ConfigMethodLinkLocal@^{__CFString=}$kSCValNetIPv6ConfigMethodManual@^{__CFString=}$kSCValNetIPv6ConfigMethodRouterAdvertisement@^{__CFString=}$kSCValNetInterfaceSubTypeL2TP@^{__CFString=}$kSCValNetInterfaceSubTypePPPSerial@^{__CFString=}$kSCValNetInterfaceSubTypePPPoE@^{__CFString=}$kSCValNetInterfaceSubTypePPTP@^{__CFString=}$kSCValNetInterfaceType6to4@^{__CFString=}$kSCValNetInterfaceTypeEthernet@^{__CFString=}$kSCValNetInterfaceTypeFireWire@^{__CFString=}$kSCValNetInterfaceTypeIPSec@^{__CFString=}$kSCValNetInterfaceTypePPP@^{__CFString=}$kSCValNetL2TPIPSecSharedSecretEncryptionKeychain@^{__CFString=}$kSCValNetL2TPTransportIP@^{__CFString=}$kSCValNetL2TPTransportIPSec@^{__CFString=}$kSCValNetModemDialModeIgnoreDialTone@^{__CFString=}$kSCValNetModemDialModeManual@^{__CFString=}$kSCValNetModemDialModeWaitForDialTone@^{__CFString=}$kSCValNetNetInfoBindingMethodsBroadcast@^{__CFString=}$kSCValNetNetInfoBindingMethodsDHCP@^{__CFString=}$kSCValNetNetInfoBindingMethodsManual@^{__CFString=}$kSCValNetNetInfoDefaultServerTag@^{__CFString=}$kSCValNetPPPAuthPasswordEncryptionKeychain@^{__CFString=}$kSCValNetPPPAuthPasswordEncryptionToken@^{__CFString=}$kSCValNetPPPAuthPromptAfter@^{__CFString=}$kSCValNetPPPAuthPromptBefore@^{__CFString=}$kSCValNetPPPAuthProtocolCHAP@^{__CFString=}$kSCValNetPPPAuthProtocolEAP@^{__CFString=}$kSCValNetPPPAuthProtocolMSCHAP1@^{__CFString=}$kSCValNetPPPAuthProtocolMSCHAP2@^{__CFString=}$kSCValNetPPPAuthProtocolPAP@^{__CFString=}$kSCValNetSMBNetBIOSNodeTypeBroadcast@^{__CFString=}$kSCValNetSMBNetBIOSNodeTypeHybrid@^{__CFString=}$kSCValNetSMBNetBIOSNodeTypeMixed@^{__CFString=}$kSCValNetSMBNetBIOSNodeTypePeer@^{__CFString=}$''' -enums = '''$kSCBondStatusLinkInvalid@1$kSCBondStatusNoPartner@2$kSCBondStatusNotInActiveGroup@3$kSCBondStatusOK@0$kSCBondStatusUnknown@999$kSCNetworkConnectionConnected@2$kSCNetworkConnectionConnecting@1$kSCNetworkConnectionDisconnected@0$kSCNetworkConnectionDisconnecting@3$kSCNetworkConnectionInvalid@-1$kSCNetworkConnectionPPPAuthenticating@5$kSCNetworkConnectionPPPConnected@8$kSCNetworkConnectionPPPConnectingLink@2$kSCNetworkConnectionPPPDialOnTraffic@3$kSCNetworkConnectionPPPDisconnected@0$kSCNetworkConnectionPPPDisconnectingLink@10$kSCNetworkConnectionPPPHoldingLinkOff@11$kSCNetworkConnectionPPPInitializing@1$kSCNetworkConnectionPPPNegotiatingLink@4$kSCNetworkConnectionPPPNegotiatingNetwork@7$kSCNetworkConnectionPPPSuspended@12$kSCNetworkConnectionPPPTerminating@9$kSCNetworkConnectionPPPWaitingForCallBack@6$kSCNetworkConnectionPPPWaitingForRedial@13$kSCNetworkFlagsConnectionAutomatic@8$kSCNetworkFlagsConnectionRequired@4$kSCNetworkFlagsInterventionRequired@16$kSCNetworkFlagsIsDirect@131072$kSCNetworkFlagsIsLocalAddress@65536$kSCNetworkFlagsReachable@2$kSCNetworkFlagsTransientConnection@1$kSCNetworkReachabilityFlagsConnectionAutomatic@8$kSCNetworkReachabilityFlagsConnectionOnDemand@32$kSCNetworkReachabilityFlagsConnectionOnTraffic@8$kSCNetworkReachabilityFlagsConnectionRequired@4$kSCNetworkReachabilityFlagsInterventionRequired@16$kSCNetworkReachabilityFlagsIsDirect@131072$kSCNetworkReachabilityFlagsIsLocalAddress@65536$kSCNetworkReachabilityFlagsReachable@2$kSCNetworkReachabilityFlagsTransientConnection@1$kSCPreferencesNotificationApply@2$kSCPreferencesNotificationCommit@1$kSCStatusAccessError@1003$kSCStatusConnectionIgnore@5002$kSCStatusConnectionNoService@5001$kSCStatusFailed@1001$kSCStatusInvalidArgument@1002$kSCStatusKeyExists@1005$kSCStatusLocked@1006$kSCStatusMaxLink@3006$kSCStatusNeedLock@1007$kSCStatusNoConfigFile@3003$kSCStatusNoKey@1004$kSCStatusNoLink@3004$kSCStatusNoPrefsSession@3001$kSCStatusNoStoreServer@2002$kSCStatusNoStoreSession@2001$kSCStatusNotifierActive@2003$kSCStatusOK@0$kSCStatusPrefsBusy@3002$kSCStatusReachabilityUnknown@4001$kSCStatusStale@3005$''' -misc.update({'kSCNetworkConnectionSelectionOptionOnDemandHostName': b'OnDemandHostName'.decode("utf-8"), 'kSCNetworkConnectionPacketsOut': b'PacketsOut'.decode("utf-8"), 'kSCNetworkConnectionBytesIn': b'BytesIn'.decode("utf-8"), 'kSCNetworkConnectionPacketsIn': b'PacketsIn'.decode("utf-8"), 'kSCNetworkConnectionErrorsIn': b'ErrorsIn'.decode("utf-8"), 'kSCNetworkConnectionBytesOut': b'BytesOut'.decode("utf-8"), 'kSCNetworkConnectionSelectionOptionOnDemandRetry': b'OnDemandRetry'.decode("utf-8"), 'kSCNetworkConnectionErrorsOut': b'ErrorsOut'.decode("utf-8")}) -functions={'SCBondInterfaceCopyAll': (b'^{__CFArray=}^{__SCPreferences=}', '', {'retval': {'already_cfretained': True}}), 'SCPreferencesGetTypeID': (sel32or64(b'L', b'Q'),), 'SCPreferencesSetValue': (b'Z^{__SCPreferences=}^{__CFString=}@',), 'SCNetworkServiceEstablishDefaultConfiguration': (b'Z^{__SCNetworkService=}',), 'SCNetworkServiceCreate': (b'^{__SCNetworkService=}^{__SCPreferences=}^{__SCNetworkInterface=}', '', {'retval': {'already_cfretained': True}}), 'CNMarkPortalOnline': (b'Z^{__CFString=}',), 'SCNetworkInterfaceCreateWithInterface': (b'^{__SCNetworkInterface=}^{__SCNetworkInterface=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'SCDynamicStoreCopyDHCPInfo': (b'^{__CFDictionary=}^{__SCDynamicStore=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'SCNetworkSetCopyServices': (b'^{__CFArray=}^{__SCNetworkSet=}', '', {'retval': {'already_cfretained': True}}), 'SCNetworkSetSetCurrent': (b'Z^{__SCNetworkSet=}',), 'SCDynamicStoreCopyComputerName': (b'^{__CFString=}^{__SCDynamicStore=}^I', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'type_modifier': 'o'}}}), 'SCNetworkConnectionScheduleWithRunLoop': (b'Z^{__SCNetworkConnection=}^{__CFRunLoop=}^{__CFString=}',), 'SCDynamicStoreNotifyValue': (b'Z^{__SCDynamicStore=}^{__CFString=}',), 'SCDynamicStoreCopyLocalHostName': (b'^{__CFString=}^{__SCDynamicStore=}', '', {'retval': {'already_cfretained': True}}), 'SCNetworkInterfaceGetInterface': (b'^{__SCNetworkInterface=}^{__SCNetworkInterface=}',), 'SCNetworkSetRemoveService': (b'Z^{__SCNetworkSet=}^{__SCNetworkService=}',), 'SCVLANInterfaceCopyAll': (b'^{__CFArray=}^{__SCPreferences=}', '', {'retval': {'already_cfretained': True}}), 'SCNetworkProtocolGetConfiguration': (b'^{__CFDictionary=}^{__SCNetworkProtocol=}',), 'SCDynamicStoreCreateWithOptions': (sel32or64(b'^{__SCDynamicStore=}^{__CFAllocator=}^{__CFString=}^{__CFDictionary=}^?^{?=i^v^?^?^?}', b'^{__SCDynamicStore=}^{__CFAllocator=}^{__CFString=}^{__CFDictionary=}^?^{?=q^v^?^?^?}'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__SCDynamicStore=}'}, 1: {'type': b'^{__CFArray=}'}, 2: {'type': b'^v'}}}}}}), 'SCVLANInterfaceCreate': (b'^{__SCNetworkInterface=}^{__SCPreferences=}^{__SCNetworkInterface=}^{__CFNumber=}', '', {'retval': {'already_cfretained': True}}), 'SCNetworkServiceGetInterface': (b'^{__SCNetworkInterface=}^{__SCNetworkService=}',), 'SCDynamicStoreKeyCreateProxies': (b'^{__CFString=}^{__CFAllocator=}', '', {'retval': {'already_cfretained': True}}), 'SCNetworkReachabilityScheduleWithRunLoop': (b'Z^{__SCNetworkReachability=}^{__CFRunLoop=}^{__CFString=}',), 'SCDynamicStoreAddTemporaryValue': (b'Z^{__SCDynamicStore=}^{__CFString=}@',), 'SCNetworkConnectionStop': (b'Z^{__SCNetworkConnection=}Z',), 'SCBondInterfaceCopyAvailableMemberInterfaces': (b'^{__CFArray=}^{__SCPreferences=}', '', {'retval': {'already_cfretained': True}}), 'SCNetworkServiceAddProtocolType': (b'Z^{__SCNetworkService=}^{__CFString=}',), 'SCDynamicStoreCreate': (sel32or64(b'^{__SCDynamicStore=}^{__CFAllocator=}^{__CFString=}^?^{?=i^v^?^?^?}', b'^{__SCDynamicStore=}^{__CFAllocator=}^{__CFString=}^?^{?=q^v^?^?^?}'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__SCDynamicStore=}'}, 1: {'type': b'^{__CFArray=}'}, 2: {'type': b'^v'}}}}}}), 'SCPreferencesPathCreateUniqueChild': (b'^{__CFString=}^{__SCPreferences=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'SCNetworkServiceGetServiceID': (b'^{__CFString=}^{__SCNetworkService=}',), 'SCBondInterfaceCreate': (b'^{__SCNetworkInterface=}^{__SCPreferences=}', '', {'retval': {'already_cfretained': True}}), 'SCNetworkConnectionCopyUserOptions': (b'^{__CFDictionary=}^{__SCNetworkConnection=}', '', {'retval': {'already_cfretained': True}}), 'SCNetworkReachabilitySetDispatchQueue': (b'Z^{__SCNetworkReachability=}^{dispatch_queue_s=}',), 'SCPreferencesAddValue': (b'Z^{__SCPreferences=}^{__CFString=}@',), 'SCDynamicStoreKeyCreateHostNames': (b'^{__CFString=}^{__CFAllocator=}', '', {'retval': {'already_cfretained': True}}), 'CNMarkPortalOffline': (b'Z^{__CFString=}',), 'SCPreferencesCreate': (b'^{__SCPreferences=}^{__CFAllocator=}^{__CFString=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'SCVLANInterfaceSetOptions': (b'Z^{__SCNetworkInterface=}^{__CFDictionary=}',), 'SCDynamicStoreSetMultiple': (b'Z^{__SCDynamicStore=}^{__CFDictionary=}^{__CFArray=}^{__CFArray=}',), 'SCNetworkConnectionCopyUserPreferences': (b'Z^{__CFDictionary=}^^{__CFString}^^{__CFDictionary}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o'}, 2: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'SCNetworkInterfaceGetBSDName': (b'^{__CFString=}^{__SCNetworkInterface=}',), 'SCNetworkInterfaceSetExtendedConfiguration': (b'Z^{__SCNetworkInterface=}^{__CFString=}^{__CFDictionary=}',), 'SCNetworkCheckReachabilityByName': (b'Z*^I', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'SCNetworkInterfaceCopyMediaSubTypes': (b'^{__CFArray=}^{__CFArray=}', '', {'retval': {'already_cfretained': True}}), 'SCErrorString': (b'*i',), 'DHCPClientPreferencesCopyApplicationOptions': (b'^C^{__CFString=}^i', '', {'retval': {'free_result': True, 'already_cfretained': True, 'c_array_length_in_arg': 1}, 'arguments': {1: {'type_modifier': 'o'}}}), 'SCNetworkProtocolGetProtocolType': (b'^{__CFString=}^{__SCNetworkProtocol=}',), 'SCBondStatusGetTypeID': (sel32or64(b'L', b'Q'),), 'SCPreferencesCreateWithAuthorization': (b'^{__SCPreferences=}^{__CFAllocator=}^{__CFString=}^{__CFString=}^{AuthorizationOpaqueRef=}', '', {'retval': {'already_cfretained': True}}), 'SCBondInterfaceSetLocalizedDisplayName': (b'Z^{__SCNetworkInterface=}^{__CFString=}',), 'SCDynamicStoreKeyCreateConsoleUser': (b'^{__CFString=}^{__CFAllocator=}', '', {'retval': {'already_cfretained': True}}), 'SCNetworkServiceSetName': (b'Z^{__SCNetworkService=}^{__CFString=}',), 'SCDynamicStoreSetNotificationKeys': (b'Z^{__SCDynamicStore=}^{__CFArray=}^{__CFArray=}',), 'SCDynamicStoreKeyCreateNetworkInterface': (b'^{__CFString=}^{__CFAllocator=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'SCNetworkInterfaceSetMediaOptions': (b'Z^{__SCNetworkInterface=}^{__CFString=}^{__CFArray=}',), 'SCPreferencesGetValue': (b'@^{__SCPreferences=}^{__CFString=}',), 'SCNetworkInterfaceGetConfiguration': (b'^{__CFDictionary=}^{__SCNetworkInterface=}',), 'DHCPInfoGetLeaseStartTime': (b'^{__CFDate=}^{__CFDictionary=}',), 'SCNetworkInterfaceGetExtendedConfiguration': (b'^{__CFDictionary=}^{__SCNetworkInterface=}^{__CFString=}',), 'SCVLANInterfaceGetPhysicalInterface': (b'^{__SCNetworkInterface=}^{__SCNetworkInterface=}',), 'SCNetworkConnectionCopyServiceID': (b'^{__CFString=}^{__SCNetworkConnection=}', '', {'retval': {'already_cfretained': True}}), 'SCDynamicStoreGetTypeID': (sel32or64(b'L', b'Q'),), 'SCNetworkSetCopy': (b'^{__SCNetworkSet=}^{__SCPreferences=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'SCBondStatusGetMemberInterfaces': (b'^{__CFArray=}^{__SCBondStatus=}',), 'SCNetworkProtocolSetEnabled': (b'Z^{__SCNetworkProtocol=}Z',), 'SCNetworkConnectionUnscheduleFromRunLoop': (b'Z^{__SCNetworkConnection=}^{__CFRunLoop=}^{__CFString=}',), 'SCPreferencesSynchronize': (b'v^{__SCPreferences=}',), 'SCPreferencesSetComputerName': (sel32or64(b'Z^{__SCPreferences=}^{__CFString=}L', b'Z^{__SCPreferences=}^{__CFString=}I'),), 'SCVLANInterfaceRemove': (b'Z^{__SCNetworkInterface=}',), 'SCDynamicStoreKeyCreateNetworkInterfaceEntity': (b'^{__CFString=}^{__CFAllocator=}^{__CFString=}^{__CFString=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'SCDynamicStoreCopyConsoleUser': (b'^{__CFString=}^{__SCDynamicStore=}^I^I', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}}}), 'SCVLANInterfaceCopyAvailablePhysicalInterfaces': (b'^{__CFArray=}', '', {'retval': {'already_cfretained': True}}), 'SCNetworkConnectionCreateWithServiceID': (sel32or64(b'^{__SCNetworkConnection=}^{__CFAllocator=}^{__CFString=}^?^{?=i^v^?^?^?}', b'^{__SCNetworkConnection=}^{__CFAllocator=}^{__CFString=}^?^{?=q^v^?^?^?}'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__SCNetworkConnection=}'}, 1: {'type': b'i'}, 2: {'type': b'^v'}}}}}}), 'SCNetworkServiceGetTypeID': (sel32or64(b'L', b'Q'),), 'SCCopyLastError': (b'^{__CFError=}', '', {'retval': {'already_cfretained': True}}), 'SCNetworkServiceRemoveProtocolType': (b'Z^{__SCNetworkService=}^{__CFString=}',), 'SCDynamicStoreKeyCreateComputerName': (b'^{__CFString=}^{__CFAllocator=}', '', {'retval': {'already_cfretained': True}}), 'SCPreferencesUnscheduleFromRunLoop': (b'Z^{__SCPreferences=}^{__CFRunLoop=}^{__CFString=}',), 'SCNetworkReachabilityCreateWithName': (b'^{__SCNetworkReachability=}^{__CFAllocator=}*', '', {'retval': {'already_cfretained': True}}), 'SCBondInterfaceRemove': (b'Z^{__SCNetworkInterface=}',), 'SCNetworkCheckReachabilityByAddress': (b'Z^{sockaddr=CC[14c]}I^I', '', {'arguments': {0: {'type_modifier': 'n'}, 2: {'type_modifier': 'o'}}}), 'SCNetworkSetContainsInterface': (b'Z^{__SCNetworkSet=}^{__SCNetworkInterface=}',), 'SCNetworkServiceRemove': (b'Z^{__SCNetworkService=}',), 'SCPreferencesApplyChanges': (b'Z^{__SCPreferences=}',), 'CNCopySupportedInterfaces': (b'^{__CFArray=}', '', {'retval': {'already_cfretained': True}}), 'SCBondInterfaceGetMemberInterfaces': (b'^{__CFArray=}^{__SCNetworkInterface=}',), 'SCPreferencesPathGetLink': (b'^{__CFString=}^{__SCPreferences=}^{__CFString=}',), 'SCPreferencesPathSetLink': (b'Z^{__SCPreferences=}^{__CFString=}^{__CFString=}',), 'SCNetworkInterfaceGetHardwareAddressString': (b'^{__CFString=}^{__SCNetworkInterface=}',), 'SCNetworkSetGetTypeID': (sel32or64(b'L', b'Q'),), 'SCNetworkReachabilityUnscheduleFromRunLoop': (b'Z^{__SCNetworkReachability=}^{__CFRunLoop=}^{__CFString=}',), 'SCNetworkServiceSetEnabled': (b'Z^{__SCNetworkService=}Z',), 'SCPreferencesSetDispatchQueue': (b'Z^{__SCPreferences=}^{dispatch_queue_s=}',), 'SCNetworkReachabilityGetTypeID': (sel32or64(b'L', b'Q'),), 'SCDynamicStoreAddValue': (b'Z^{__SCDynamicStore=}^{__CFString=}@',), 'SCNetworkServiceGetEnabled': (b'Z^{__SCNetworkService=}',), 'SCNetworkSetAddService': (b'Z^{__SCNetworkSet=}^{__SCNetworkService=}',), 'SCNetworkConnectionGetTypeID': (sel32or64(b'L', b'Q'),), 'SCDynamicStoreCopyValue': (b'@^{__SCDynamicStore=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'SCBondInterfaceSetMemberInterfaces': (b'Z^{__SCNetworkInterface=}^{__CFArray=}',), 'DHCPInfoGetOptionData': (b'^{__CFData=}^{__CFDictionary=}C',), 'SCDynamicStoreCopyMultiple': (b'^{__CFDictionary=}^{__SCDynamicStore=}^{__CFArray=}^{__CFArray=}', '', {'retval': {'already_cfretained': True}}), 'SCBondStatusGetInterfaceStatus': (b'^{__CFDictionary=}^{__SCBondStatus=}^{__SCNetworkInterface=}',), 'SCNetworkConnectionCopyExtendedStatus': (b'^{__CFDictionary=}^{__SCNetworkConnection=}', '', {'retval': {'already_cfretained': True}}), 'SCNetworkProtocolGetTypeID': (sel32or64(b'L', b'Q'),), 'SCNetworkSetGetName': (b'^{__CFString=}^{__SCNetworkSet=}',), 'SCPreferencesSetCallback': (sel32or64(b'Z^{__SCPreferences=}^?^{?=i^v^?^?^?}', b'Z^{__SCPreferences=}^?^{?=q^v^?^?^?}'), '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__SCPreferences=}'}, 1: {'type': b'I'}, 2: {'type': b'^v'}}}}}}), 'SCNetworkInterfaceGetSupportedProtocolTypes': (b'^{__CFArray=}^{__SCNetworkInterface=}',), 'SCNetworkServiceCopyProtocol': (b'^{__SCNetworkProtocol=}^{__SCNetworkService=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'SCDynamicStoreRemoveValue': (b'Z^{__SCDynamicStore=}^{__CFString=}',), 'SCVLANInterfaceGetTag': (b'^{__CFNumber=}^{__SCNetworkInterface=}',), 'SCNetworkServiceCopy': (b'^{__SCNetworkService=}^{__SCPreferences=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'SCDynamicStoreCopyKeyList': (b'^{__CFArray=}^{__SCDynamicStore=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'SCNetworkInterfaceGetLocalizedDisplayName': (b'^{__CFString=}^{__SCNetworkInterface=}',), 'SCDynamicStoreCopyNotifiedKeys': (b'^{__CFArray=}^{__SCDynamicStore=}', '', {'retval': {'already_cfretained': True}}), 'SCNetworkInterfaceCopyMediaSubTypeOptions': (b'^{__CFArray=}^{__CFArray=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'DHCPInfoGetLeaseExpirationTime': (b'^{__CFDate=}^{__CFDictionary=}',), 'SCVLANInterfaceSetLocalizedDisplayName': (b'Z^{__SCNetworkInterface=}^{__CFString=}',), 'SCVLANInterfaceSetPhysicalInterfaceAndTag': (b'Z^{__SCNetworkInterface=}^{__SCNetworkInterface=}^{__CFNumber=}',), 'SCNetworkProtocolSetConfiguration': (b'Z^{__SCNetworkProtocol=}^{__CFDictionary=}',), 'SCBondInterfaceGetOptions': (b'^{__CFDictionary=}^{__SCNetworkInterface=}',), 'SCBondInterfaceCopyStatus': (b'^{__SCBondStatus=}^{__SCNetworkInterface=}', '', {'retval': {'already_cfretained': True}}), 'SCNetworkInterfaceGetTypeID': (sel32or64(b'L', b'Q'),), 'CNCopyCurrentNetworkInfo': (b'^{__CFDictionary=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'CNSetSupportedSSIDs': (b'Z^{__CFArray=}',), 'SCNetworkConnectionCopyStatistics': (b'^{__CFDictionary=}^{__SCNetworkConnection=}', '', {'retval': {'already_cfretained': True}}), 'SCDynamicStoreKeyCreate': (b'^{__CFString=}^{__CFAllocator=}@', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'printf_format': True}}, 'variadic': True}), 'SCPreferencesCommitChanges': (b'Z^{__SCPreferences=}',), 'SCNetworkSetSetServiceOrder': (b'Z^{__SCNetworkSet=}^{__CFArray=}',), 'SCPreferencesLock': (b'Z^{__SCPreferences=}Z',), 'SCNetworkSetCopyAll': (b'^{__CFArray=}^{__SCPreferences=}', '', {'retval': {'already_cfretained': True}}), 'SCNetworkInterfaceRefreshConfiguration': (b'Z^{__CFString=}',), 'SCNetworkSetSetName': (b'Z^{__SCNetworkSet=}^{__CFString=}',), 'SCPreferencesPathRemoveValue': (b'Z^{__SCPreferences=}^{__CFString=}',), 'SCDynamicStoreCopyProxies': (b'^{__CFDictionary=}^{__SCDynamicStore=}', '', {'retval': {'already_cfretained': True}}), 'SCNetworkServiceGetName': (b'^{__CFString=}^{__SCNetworkService=}',), 'SCNetworkServiceCopyAll': (b'^{__CFArray=}^{__SCPreferences=}', '', {'retval': {'already_cfretained': True}}), 'SCNetworkInterfaceSetMTU': (b'Z^{__SCNetworkInterface=}i',), 'DHCPClientPreferencesSetApplicationOptions': (sel32or64(b'Z^{__CFString=}^Cl', b'Z^{__CFString=}^Cq'), '', {'arguments': {1: {'c_array_length_in_arg': 2, 'type_modifier': 'n'}}}), 'SCDynamicStoreKeyCreateNetworkGlobalEntity': (b'^{__CFString=}^{__CFAllocator=}^{__CFString=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'SCNetworkSetCopyCurrent': (b'^{__SCNetworkSet=}^{__SCPreferences=}', '', {'retval': {'already_cfretained': True}}), 'SCNetworkInterfaceCopyAll': (b'^{__CFArray=}', '', {'retval': {'already_cfretained': True}}), 'SCPreferencesRemoveValue': (b'Z^{__SCPreferences=}^{__CFString=}',), 'SCPreferencesPathGetValue': (b'^{__CFDictionary=}^{__SCPreferences=}^{__CFString=}',), 'SCDynamicStoreSetValue': (b'Z^{__SCDynamicStore=}^{__CFString=}@',), 'SCNetworkConnectionGetStatus': (b'i^{__SCNetworkConnection=}',), 'SCNetworkServiceCopyProtocols': (b'^{__CFArray=}^{__SCNetworkService=}', '', {'retval': {'already_cfretained': True}}), 'SCDynamicStoreCopyLocation': (b'^{__CFString=}^{__SCDynamicStore=}', '', {'retval': {'already_cfretained': True}}), 'SCNetworkReachabilityCreateWithAddress': (b'^{__SCNetworkReachability=}^{__CFAllocator=}^{sockaddr=CC[14c]}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'type_modifier': 'n'}}}), 'SCNetworkSetRemove': (b'Z^{__SCNetworkSet=}',), 'SCNetworkInterfaceSetConfiguration': (b'Z^{__SCNetworkInterface=}^{__CFDictionary=}',), 'SCPreferencesSetLocalHostName': (b'Z^{__SCPreferences=}^{__CFString=}',), 'SCNetworkInterfaceGetInterfaceType': (b'^{__CFString=}^{__SCNetworkInterface=}',), 'SCPreferencesGetSignature': (b'^{__CFData=}^{__SCPreferences=}',), 'SCNetworkConnectionStart': (b'Z^{__SCNetworkConnection=}^{__CFDictionary=}Z',), 'SCNetworkInterfaceCopyMTU': (b'Z^{__SCNetworkInterface=}^i^i^i', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}, 3: {'type_modifier': 'o'}}}), 'SCNetworkSetGetServiceOrder': (b'^{__CFArray=}^{__SCNetworkSet=}',), 'SCDynamicStoreKeyCreateLocation': (b'^{__CFString=}^{__CFAllocator=}', '', {'retval': {'already_cfretained': True}}), 'SCBondInterfaceSetOptions': (b'Z^{__SCNetworkInterface=}^{__CFDictionary=}',), 'SCDynamicStoreCreateRunLoopSource': (sel32or64(b'^{__CFRunLoopSource=}^{__CFAllocator=}^{__SCDynamicStore=}l', b'^{__CFRunLoopSource=}^{__CFAllocator=}^{__SCDynamicStore=}q'), '', {'retval': {'already_cfretained': True}}), 'SCNetworkProtocolGetEnabled': (b'Z^{__SCNetworkProtocol=}',), 'SCNetworkSetCreate': (b'^{__SCNetworkSet=}^{__SCPreferences=}', '', {'retval': {'already_cfretained': True}}), 'SCNetworkConnectionSetDispatchQueue': (b'Z^{__SCNetworkConnection=}^{dispatch_queue_s=}',), 'SCDynamicStoreKeyCreateNetworkServiceEntity': (b'^{__CFString=}^{__CFAllocator=}^{__CFString=}^{__CFString=}^{__CFString=}', '', {'retval': {'already_cfretained': True}}), 'SCDynamicStoreSetDispatchQueue': (b'Z^{__SCDynamicStore=}^{dispatch_queue_s=}',), 'SCNetworkReachabilitySetCallback': (sel32or64(b'Z^{__SCNetworkReachability=}^?^{?=i^v^?^?^?}', b'Z^{__SCNetworkReachability=}^?^{?=q^v^?^?^?}'), '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^{__SCNetworkReachability=}'}, 1: {'type': b'I'}, 2: {'type': b'^v'}}}}}}), 'SCNetworkInterfaceGetSupportedInterfaceTypes': (b'^{__CFArray=}^{__SCNetworkInterface=}',), 'SCPreferencesCopyKeyList': (b'^{__CFArray=}^{__SCPreferences=}', '', {'retval': {'already_cfretained': True}}), 'SCNetworkReachabilityCreateWithAddressPair': (b'^{__SCNetworkReachability=}^{__CFAllocator=}^{sockaddr=CC[14c]}^{sockaddr=CC[14c]}', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'type_modifier': 'n'}, 2: {'type_modifier': 'n'}}}), 'SCNetworkInterfaceForceConfigurationRefresh': (b'Z^{__SCNetworkInterface=}',), 'SCNetworkSetGetSetID': (b'^{__CFString=}^{__SCNetworkSet=}',), 'SCNetworkInterfaceCopyMediaOptions': (b'Z^{__SCNetworkInterface=}^^{__CFDictionary}^^{__CFDictionary}^^{__CFArray}Z', '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'type_modifier': 'o'}, 2: {'type_modifier': 'o'}, 3: {'type_modifier': 'o'}}}), 'SCPreferencesScheduleWithRunLoop': (b'Z^{__SCPreferences=}^{__CFRunLoop=}^{__CFString=}',), 'SCVLANInterfaceGetOptions': (b'^{__CFDictionary=}^{__SCNetworkInterface=}',), 'SCNetworkReachabilityGetFlags': (b'Z^{__SCNetworkReachability=}^I', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'SCPreferencesPathSetValue': (b'Z^{__SCPreferences=}^{__CFString=}^{__CFDictionary=}',), 'SCPreferencesUnlock': (b'Z^{__SCPreferences=}',), 'SCError': (b'i',)} -aliases = {'kSCNetworkReachabilityFlagsConnectionAutomatic': 'kSCNetworkReachabilityFlagsConnectionOnTraffic', 'SCBondInterfaceRef': 'SCNetworkInterfaceRef', 'SCVLANInterfaceRef': 'SCNetworkInterfaceRef'} -cftypes=[('SCBondStatusRef', b'^{__SCBondStatus=}', 'SCBondStatusGetTypeID', None), ('SCDynamicStoreRef', b'^{__SCDynamicStore=}', 'SCDynamicStoreGetTypeID', None), ('SCNetworkConnectionRef', b'^{__SCNetworkConnection=}', 'SCNetworkConnectionGetTypeID', None), ('SCNetworkInterfaceRef', b'^{__SCNetworkInterface=}', 'SCNetworkInterfaceGetTypeID', None), ('SCNetworkProtocolRef', b'^{__SCNetworkProtocol=}', 'SCNetworkProtocolGetTypeID', None), ('SCNetworkReachabilityRef', b'^{__SCNetworkReachability=}', 'SCNetworkReachabilityGetTypeID', None), ('SCNetworkServiceRef', b'^{__SCNetworkService=}', 'SCNetworkServiceGetTypeID', None), ('SCNetworkSetRef', b'^{__SCNetworkSet=}', 'SCNetworkSetGetTypeID', None), ('SCPreferencesRef', b'^{__SCPreferences=}', 'SCPreferencesGetTypeID', None)] -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/SystemConfiguration/_metadata.pyc b/env/lib/python2.7/site-packages/SystemConfiguration/_metadata.pyc deleted file mode 100644 index 14fdd80d..00000000 Binary files a/env/lib/python2.7/site-packages/SystemConfiguration/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/VideoToolbox/_VideoToolbox.so b/env/lib/python2.7/site-packages/VideoToolbox/_VideoToolbox.so deleted file mode 100755 index e13fb98b..00000000 Binary files a/env/lib/python2.7/site-packages/VideoToolbox/_VideoToolbox.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/VideoToolbox/__init__.py b/env/lib/python2.7/site-packages/VideoToolbox/__init__.py deleted file mode 100644 index 55821bbf..00000000 --- a/env/lib/python2.7/site-packages/VideoToolbox/__init__.py +++ /dev/null @@ -1,29 +0,0 @@ -''' -Python mapping for the VideoToolbox framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Foundation -import Quartz -import CoreMedia - -from VideoToolbox import _metadata -import VideoToolbox._VideoToolbox - -sys.modules['VideoToolbox'] = mod = objc.ObjCLazyModule( - "VideoToolbox", - "com.apple.VideoToolbox", - objc.pathForFramework("/System/Library/Frameworks/VideoToolbox.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (VideoToolbox._VideoToolbox, Quartz, CoreMedia, Foundation,)) - -import sys -del sys.modules['VideoToolbox._metadata'] diff --git a/env/lib/python2.7/site-packages/VideoToolbox/__init__.pyc b/env/lib/python2.7/site-packages/VideoToolbox/__init__.pyc deleted file mode 100644 index 5abdd74c..00000000 Binary files a/env/lib/python2.7/site-packages/VideoToolbox/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/VideoToolbox/_metadata.py b/env/lib/python2.7/site-packages/VideoToolbox/_metadata.py deleted file mode 100644 index 0f61b8c4..00000000 --- a/env/lib/python2.7/site-packages/VideoToolbox/_metadata.py +++ /dev/null @@ -1,27 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Sun Sep 9 19:02:46 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -misc.update({'VTInt32Point': objc.createStructType('VTInt32Point', b'{VTInt32Point=ii}', ['x', 'y']), 'VTInt32Size': objc.createStructType('VTInt32Size', b'{VTInt32Size=ii}', ['width', 'height'])}) -constants = '''$kVTCompressionPropertyKey_AllowFrameReordering@^{__CFString=}$kVTCompressionPropertyKey_AllowOpenGOP@^{__CFString=}$kVTCompressionPropertyKey_AllowTemporalCompression@^{__CFString=}$kVTCompressionPropertyKey_AspectRatio16x9@^{__CFString=}$kVTCompressionPropertyKey_AverageBitRate@^{__CFString=}$kVTCompressionPropertyKey_BaseLayerFrameRate@^{__CFString=}$kVTCompressionPropertyKey_CleanAperture@^{__CFString=}$kVTCompressionPropertyKey_ColorPrimaries@^{__CFString=}$kVTCompressionPropertyKey_ContentLightLevelInfo@^{__CFString=}$kVTCompressionPropertyKey_DataRateLimits@^{__CFString=}$kVTCompressionPropertyKey_Depth@^{__CFString=}$kVTCompressionPropertyKey_EncoderID@^{__CFString=}$kVTCompressionPropertyKey_ExpectedDuration@^{__CFString=}$kVTCompressionPropertyKey_ExpectedFrameRate@^{__CFString=}$kVTCompressionPropertyKey_FieldCount@^{__CFString=}$kVTCompressionPropertyKey_FieldDetail@^{__CFString=}$kVTCompressionPropertyKey_H264EntropyMode@^{__CFString=}$kVTCompressionPropertyKey_ICCProfile@^{__CFString=}$kVTCompressionPropertyKey_MasteringDisplayColorVolume@^{__CFString=}$kVTCompressionPropertyKey_MaxFrameDelayCount@^{__CFString=}$kVTCompressionPropertyKey_MaxH264SliceBytes@^{__CFString=}$kVTCompressionPropertyKey_MaxKeyFrameInterval@^{__CFString=}$kVTCompressionPropertyKey_MaxKeyFrameIntervalDuration@^{__CFString=}$kVTCompressionPropertyKey_MaximizePowerEfficiency@^{__CFString=}$kVTCompressionPropertyKey_MoreFramesAfterEnd@^{__CFString=}$kVTCompressionPropertyKey_MoreFramesBeforeStart@^{__CFString=}$kVTCompressionPropertyKey_MultiPassStorage@^{__CFString=}$kVTCompressionPropertyKey_NumberOfPendingFrames@^{__CFString=}$kVTCompressionPropertyKey_PixelAspectRatio@^{__CFString=}$kVTCompressionPropertyKey_PixelBufferPoolIsShared@^{__CFString=}$kVTCompressionPropertyKey_PixelTransferProperties@^{__CFString=}$kVTCompressionPropertyKey_ProfileLevel@^{__CFString=}$kVTCompressionPropertyKey_ProgressiveScan@^{__CFString=}$kVTCompressionPropertyKey_Quality@^{__CFString=}$kVTCompressionPropertyKey_RealTime@^{__CFString=}$kVTCompressionPropertyKey_SourceFrameCount@^{__CFString=}$kVTCompressionPropertyKey_TransferFunction@^{__CFString=}$kVTCompressionPropertyKey_UsingHardwareAcceleratedVideoEncoder@^{__CFString=}$kVTCompressionPropertyKey_VideoEncoderPixelBufferAttributes@^{__CFString=}$kVTCompressionPropertyKey_YCbCrMatrix@^{__CFString=}$kVTDecompressionPropertyKey_ContentHasInterframeDependencies@^{__CFString=}$kVTDecompressionPropertyKey_DeinterlaceMode@^{__CFString=}$kVTDecompressionPropertyKey_FieldMode@^{__CFString=}$kVTDecompressionPropertyKey_MaxOutputPresentationTimeStampOfFramesBeingDecoded@^{__CFString=}$kVTDecompressionPropertyKey_MaximizePowerEfficiency@^{__CFString=}$kVTDecompressionPropertyKey_MinOutputPresentationTimeStampOfFramesBeingDecoded@^{__CFString=}$kVTDecompressionPropertyKey_NumberOfFramesBeingDecoded@^{__CFString=}$kVTDecompressionPropertyKey_OnlyTheseFrames@^{__CFString=}$kVTDecompressionPropertyKey_OutputPoolRequestedMinimumBufferCount@^{__CFString=}$kVTDecompressionPropertyKey_PixelBufferPool@^{__CFString=}$kVTDecompressionPropertyKey_PixelBufferPoolIsShared@^{__CFString=}$kVTDecompressionPropertyKey_PixelFormatsWithReducedResolutionSupport@^{__CFString=}$kVTDecompressionPropertyKey_PixelTransferProperties@^{__CFString=}$kVTDecompressionPropertyKey_RealTime@^{__CFString=}$kVTDecompressionPropertyKey_ReducedCoefficientDecode@^{__CFString=}$kVTDecompressionPropertyKey_ReducedFrameDelivery@^{__CFString=}$kVTDecompressionPropertyKey_ReducedResolutionDecode@^{__CFString=}$kVTDecompressionPropertyKey_SuggestedQualityOfServiceTiers@^{__CFString=}$kVTDecompressionPropertyKey_SupportedPixelFormatsOrderedByPerformance@^{__CFString=}$kVTDecompressionPropertyKey_SupportedPixelFormatsOrderedByQuality@^{__CFString=}$kVTDecompressionPropertyKey_ThreadCount@^{__CFString=}$kVTDecompressionPropertyKey_UsingHardwareAcceleratedVideoDecoder@^{__CFString=}$kVTDecompressionProperty_DeinterlaceMode_Temporal@^{__CFString=}$kVTDecompressionProperty_DeinterlaceMode_VerticalFilter@^{__CFString=}$kVTDecompressionProperty_FieldMode_BothFields@^{__CFString=}$kVTDecompressionProperty_FieldMode_BottomFieldOnly@^{__CFString=}$kVTDecompressionProperty_FieldMode_DeinterlaceFields@^{__CFString=}$kVTDecompressionProperty_FieldMode_SingleField@^{__CFString=}$kVTDecompressionProperty_FieldMode_TopFieldOnly@^{__CFString=}$kVTDecompressionProperty_OnlyTheseFrames_AllFrames@^{__CFString=}$kVTDecompressionProperty_OnlyTheseFrames_IFrames@^{__CFString=}$kVTDecompressionProperty_OnlyTheseFrames_KeyFrames@^{__CFString=}$kVTDecompressionProperty_OnlyTheseFrames_NonDroppableFrames@^{__CFString=}$kVTDecompressionProperty_TemporalLevelLimit@^{__CFString=}$kVTDecompressionResolutionKey_Height@^{__CFString=}$kVTDecompressionResolutionKey_Width@^{__CFString=}$kVTDownsamplingMode_Average@^{__CFString=}$kVTDownsamplingMode_Decimate@^{__CFString=}$kVTEncodeFrameOptionKey_ForceKeyFrame@^{__CFString=}$kVTH264EntropyMode_CABAC@^{__CFString=}$kVTH264EntropyMode_CAVLC@^{__CFString=}$kVTMultiPassStorageCreationOption_DoNotDelete@^{__CFString=}$kVTPixelTransferPropertyKey_DestinationCleanAperture@^{__CFString=}$kVTPixelTransferPropertyKey_DestinationColorPrimaries@^{__CFString=}$kVTPixelTransferPropertyKey_DestinationICCProfile@^{__CFString=}$kVTPixelTransferPropertyKey_DestinationPixelAspectRatio@^{__CFString=}$kVTPixelTransferPropertyKey_DestinationTransferFunction@^{__CFString=}$kVTPixelTransferPropertyKey_DestinationYCbCrMatrix@^{__CFString=}$kVTPixelTransferPropertyKey_DownsamplingMode@^{__CFString=}$kVTPixelTransferPropertyKey_ScalingMode@^{__CFString=}$kVTProfileLevel_H263_Profile0_Level10@^{__CFString=}$kVTProfileLevel_H263_Profile0_Level45@^{__CFString=}$kVTProfileLevel_H263_Profile3_Level45@^{__CFString=}$kVTProfileLevel_H264_Baseline_1_3@^{__CFString=}$kVTProfileLevel_H264_Baseline_3_0@^{__CFString=}$kVTProfileLevel_H264_Baseline_3_1@^{__CFString=}$kVTProfileLevel_H264_Baseline_3_2@^{__CFString=}$kVTProfileLevel_H264_Baseline_4_0@^{__CFString=}$kVTProfileLevel_H264_Baseline_4_1@^{__CFString=}$kVTProfileLevel_H264_Baseline_4_2@^{__CFString=}$kVTProfileLevel_H264_Baseline_5_0@^{__CFString=}$kVTProfileLevel_H264_Baseline_5_1@^{__CFString=}$kVTProfileLevel_H264_Baseline_5_2@^{__CFString=}$kVTProfileLevel_H264_Baseline_AutoLevel@^{__CFString=}$kVTProfileLevel_H264_Extended_5_0@^{__CFString=}$kVTProfileLevel_H264_Extended_AutoLevel@^{__CFString=}$kVTProfileLevel_H264_High_3_0@^{__CFString=}$kVTProfileLevel_H264_High_3_1@^{__CFString=}$kVTProfileLevel_H264_High_3_2@^{__CFString=}$kVTProfileLevel_H264_High_4_0@^{__CFString=}$kVTProfileLevel_H264_High_4_1@^{__CFString=}$kVTProfileLevel_H264_High_4_2@^{__CFString=}$kVTProfileLevel_H264_High_5_0@^{__CFString=}$kVTProfileLevel_H264_High_5_1@^{__CFString=}$kVTProfileLevel_H264_High_5_2@^{__CFString=}$kVTProfileLevel_H264_High_AutoLevel@^{__CFString=}$kVTProfileLevel_H264_Main_3_0@^{__CFString=}$kVTProfileLevel_H264_Main_3_1@^{__CFString=}$kVTProfileLevel_H264_Main_3_2@^{__CFString=}$kVTProfileLevel_H264_Main_4_0@^{__CFString=}$kVTProfileLevel_H264_Main_4_1@^{__CFString=}$kVTProfileLevel_H264_Main_4_2@^{__CFString=}$kVTProfileLevel_H264_Main_5_0@^{__CFString=}$kVTProfileLevel_H264_Main_5_1@^{__CFString=}$kVTProfileLevel_H264_Main_5_2@^{__CFString=}$kVTProfileLevel_H264_Main_AutoLevel@^{__CFString=}$kVTProfileLevel_HEVC_Main10_AutoLevel@^{__CFString=}$kVTProfileLevel_HEVC_Main_AutoLevel@^{__CFString=}$kVTProfileLevel_MP4V_AdvancedSimple_L0@^{__CFString=}$kVTProfileLevel_MP4V_AdvancedSimple_L1@^{__CFString=}$kVTProfileLevel_MP4V_AdvancedSimple_L2@^{__CFString=}$kVTProfileLevel_MP4V_AdvancedSimple_L3@^{__CFString=}$kVTProfileLevel_MP4V_AdvancedSimple_L4@^{__CFString=}$kVTProfileLevel_MP4V_Main_L2@^{__CFString=}$kVTProfileLevel_MP4V_Main_L3@^{__CFString=}$kVTProfileLevel_MP4V_Main_L4@^{__CFString=}$kVTProfileLevel_MP4V_Simple_L0@^{__CFString=}$kVTProfileLevel_MP4V_Simple_L1@^{__CFString=}$kVTProfileLevel_MP4V_Simple_L2@^{__CFString=}$kVTProfileLevel_MP4V_Simple_L3@^{__CFString=}$kVTPropertyDocumentationKey@^{__CFString=}$kVTPropertyReadWriteStatusKey@^{__CFString=}$kVTPropertyReadWriteStatus_ReadOnly@^{__CFString=}$kVTPropertyReadWriteStatus_ReadWrite@^{__CFString=}$kVTPropertyShouldBeSerializedKey@^{__CFString=}$kVTPropertySupportedValueListKey@^{__CFString=}$kVTPropertySupportedValueMaximumKey@^{__CFString=}$kVTPropertySupportedValueMinimumKey@^{__CFString=}$kVTPropertyTypeKey@^{__CFString=}$kVTPropertyType_Boolean@^{__CFString=}$kVTPropertyType_Enumeration@^{__CFString=}$kVTPropertyType_Number@^{__CFString=}$kVTScalingMode_CropSourceToCleanAperture@^{__CFString=}$kVTScalingMode_Letterbox@^{__CFString=}$kVTScalingMode_Normal@^{__CFString=}$kVTScalingMode_Trim@^{__CFString=}$kVTVideoDecoderSpecification_EnableHardwareAcceleratedVideoDecoder@^{__CFString=}$kVTVideoDecoderSpecification_PreferredDecoderGPURegistryID@^{__CFString=}$kVTVideoDecoderSpecification_RequireHardwareAcceleratedVideoDecoder@^{__CFString=}$kVTVideoDecoderSpecification_RequiredDecoderGPURegistryID@^{__CFString=}$kVTVideoEncoderList_CodecName@^{__CFString=}$kVTVideoEncoderList_CodecType@^{__CFString=}$kVTVideoEncoderList_DisplayName@^{__CFString=}$kVTVideoEncoderList_EncoderID@^{__CFString=}$kVTVideoEncoderList_EncoderName@^{__CFString=}$kVTVideoEncoderSpecification_EnableHardwareAcceleratedVideoEncoder@^{__CFString=}$kVTVideoEncoderSpecification_EncoderID@^{__CFString=}$kVTVideoEncoderSpecification_RequireHardwareAcceleratedVideoEncoder@^{__CFString=}$''' -enums = '''$kVTAllocationFailedErr@-12904$kVTColorCorrectionImageRotationFailedErr@-12219$kVTColorCorrectionPixelTransferFailedErr@-12212$kVTColorSyncTransformConvertFailedErr@-12919$kVTCompressionSessionBeginFinalPass@1$kVTCouldNotCreateColorCorrectionDataErr@-12918$kVTCouldNotCreateInstanceErr@-12907$kVTCouldNotFindTemporalFilterErr@-12217$kVTCouldNotFindVideoDecoderErr@-12906$kVTCouldNotFindVideoEncoderErr@-12908$kVTDecodeFrame_1xRealTimePlayback@4$kVTDecodeFrame_DoNotOutputFrame@2$kVTDecodeFrame_EnableAsynchronousDecompression@1$kVTDecodeFrame_EnableTemporalProcessing@8$kVTDecodeInfo_Asynchronous@1$kVTDecodeInfo_FrameDropped@2$kVTDecodeInfo_ImageBufferModifiable@4$kVTEncodeInfo_Asynchronous@1$kVTEncodeInfo_FrameDropped@2$kVTFormatDescriptionChangeNotSupportedErr@-12916$kVTFrameSiloInvalidTimeRangeErr@-12216$kVTFrameSiloInvalidTimeStampErr@-12215$kVTImageRotationNotSupportedErr@-12914$kVTInsufficientSourceColorDataErr@-12917$kVTInvalidSessionErr@-12903$kVTMultiPassStorageIdentifierMismatchErr@-12213$kVTMultiPassStorageInvalidErr@-12214$kVTParameterErr@-12902$kVTPixelTransferNotPermittedErr@-12218$kVTPixelTransferNotSupportedErr@-12905$kVTPropertyNotSupportedErr@-12900$kVTPropertyReadOnlyErr@-12901$kVTUnlimitedFrameDelayCount@-1$kVTVideoDecoderAuthorizationErr@-12210$kVTVideoDecoderBadDataErr@-12909$kVTVideoDecoderMalfunctionErr@-12911$kVTVideoDecoderNotAvailableNowErr@-12913$kVTVideoDecoderRemovedErr@-17690$kVTVideoDecoderUnsupportedDataFormatErr@-12910$kVTVideoEncoderAuthorizationErr@-12211$kVTVideoEncoderMalfunctionErr@-12912$kVTVideoEncoderNotAvailableNowErr@-12915$''' -misc.update({}) -functions={'VTPixelTransferSessionCreate': (sel32or64(b'l^{__CFAllocator=}^^{OpaqueVTPixelTransferSession=}', b'i^{__CFAllocator=}^^{OpaqueVTPixelTransferSession=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'VTCreateCGImageFromCVPixelBuffer': (sel32or64(b'l^{__CVBuffer=}^{__CFDictionary=}^^{CGImage=}', b'i^{__CVBuffer=}^{__CFDictionary=}^^{CGImage=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'VTMultiPassStorageGetTypeID': (sel32or64(b'L', b'Q'),), 'VTDecompressionSessionFinishDelayedFrames': (sel32or64(b'l^{OpaqueVTDecompressionSession=}', b'i^{OpaqueVTDecompressionSession=}'),), 'VTSessionCopySupportedPropertyDictionary': (sel32or64(b'l@^^{__CFDictionary=}', b'i@^^{__CFDictionary=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'VTSessionSetProperty': (sel32or64(b'l@^{__CFString=}@', b'i@^{__CFString=}@'),), 'VTPixelTransferSessionTransferImage': (sel32or64(b'l^{OpaqueVTPixelTransferSession=}^{__CVBuffer=}^{__CVBuffer=}', b'i^{OpaqueVTPixelTransferSession=}^{__CVBuffer=}^{__CVBuffer=}'),), 'VTFrameSiloGetProgressOfCurrentPass': (sel32or64(b'l^{OpaqueVTFrameSilo=}^f', b'i^{OpaqueVTFrameSilo=}^f'), '', {'arguments': {1: {'type_modifier': 'o'}}}), 'VTMultiPassStorageClose': (sel32or64(b'l^{OpaqueVTMultiPassStorage=}', b'i^{OpaqueVTMultiPassStorage=}'),), 'VTIsHardwareDecodeSupported': (sel32or64(b'ZL', b'ZI'),), 'VTCompressionSessionPrepareToEncodeFrames': (sel32or64(b'l^{OpaqueVTCompressionSession=}', b'i^{OpaqueVTCompressionSession=}'),), 'VTCompressionSessionInvalidate': (b'v^{OpaqueVTCompressionSession=}',), 'VTCompressionSessionCompleteFrames': (sel32or64(b'l^{OpaqueVTCompressionSession=}{_CMTime=qiIq}', b'i^{OpaqueVTCompressionSession=}{_CMTime=qiIq}'),), 'VTFrameSiloCallBlockForEachSampleBuffer': (sel32or64(b'l^{OpaqueVTFrameSilo=}{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}@?', b'i^{OpaqueVTFrameSilo=}{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}@?'), '', {'arguments': {2: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': '^v'}, 1: {'type': '^{opaqueCMSampleBuffer=}'}}}, 'block': {'retval': {'type': b'i'}, 'arguments': {0: {'type': b'^{opaqueCMSampleBuffer=}'}}}}}}), 'VTSessionCopyProperty': (sel32or64(b'l@^{__CFString=}^{__CFAllocator=}^@', b'i@^{__CFString=}^{__CFAllocator=}^@'), '', {'retval': {'already_cfretained': True}, 'arguments': {3: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'VTFrameSiloAddSampleBuffer': (sel32or64(b'l^{OpaqueVTFrameSilo=}^{opaqueCMSampleBuffer=}', b'i^{OpaqueVTFrameSilo=}^{opaqueCMSampleBuffer=}'),), 'VTDecompressionSessionDecodeFrame': (sel32or64(b'l^{OpaqueVTDecompressionSession=}^{opaqueCMSampleBuffer=}I^v^L', b'i^{OpaqueVTDecompressionSession=}^{opaqueCMSampleBuffer=}I^v^I'), '', {'arguments': {4: {'type_modifier': 'o'}}}), 'VTRegisterProfessionalVideoWorkflowVideoEncoders': (b'v',), 'VTCompressionSessionEncodeFrameWithOutputHandler': (sel32or64(b'l^{OpaqueVTCompressionSession=}^{__CVBuffer=}{_CMTime=qiIq}{_CMTime=qiIq}^{__CFDictionary=}^L@?', b'i^{OpaqueVTCompressionSession=}^{__CVBuffer=}{_CMTime=qiIq}{_CMTime=qiIq}^{__CFDictionary=}^I@?'), '', {'arguments': {5: {'type_modifier': 'o'}, 6: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}, 1: {'type': 'i'}, 2: {'type': [u'I', u'Q']}, 3: {'type': '^{opaqueCMSampleBuffer=}'}}}}}}), 'VTPixelTransferSessionGetTypeID': (sel32or64(b'L', b'Q'),), 'VTCompressionSessionGetTypeID': (sel32or64(b'L', b'Q'),), 'VTCompressionSessionGetPixelBufferPool': (b'^{__CVPixelBufferPool=}^{OpaqueVTCompressionSession=}',), 'VTFrameSiloCallFunctionForEachSampleBuffer': (sel32or64(b'l^{OpaqueVTFrameSilo=}{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}^v^?', b'i^{OpaqueVTFrameSilo=}{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}^v^?'), '', {'arguments': {3: {'callable': {'retval': {'type': b'i'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'^{opaqueCMSampleBuffer=}'}}}, 'callable_retained': False}}}), 'VTDecompressionSessionCopyBlackPixelBuffer': (sel32or64(b'l^{OpaqueVTDecompressionSession=}^^{__CVBuffer=}', b'i^{OpaqueVTDecompressionSession=}^^{__CVBuffer=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'VTDecompressionSessionCanAcceptFormatDescription': (b'Z^{OpaqueVTDecompressionSession=}^{opaqueCMFormatDescription=}',), 'VTCopyVideoEncoderList': (sel32or64(b'l^{__CFDictionary=}^^{__CFArray=}', b'i^{__CFDictionary=}^^{__CFArray=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {1: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'VTMultiPassStorageCreate': (sel32or64(b'l^{__CFAllocator=}^{__CFURL=}{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}^{__CFDictionary=}^^{OpaqueVTMultiPassStorage=}', b'i^{__CFAllocator=}^{__CFURL=}{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}^{__CFDictionary=}^^{OpaqueVTMultiPassStorage=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {4: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'VTDecompressionSessionGetTypeID': (sel32or64(b'L', b'Q'),), 'VTCompressionSessionBeginPass': (sel32or64(b'l^{OpaqueVTCompressionSession=}I^I', b'i^{OpaqueVTCompressionSession=}I^I'),), 'VTSessionSetProperties': (sel32or64(b'l@^{__CFDictionary=}', b'i@^{__CFDictionary=}'),), 'VTDecompressionSessionWaitForAsynchronousFrames': (sel32or64(b'l^{OpaqueVTDecompressionSession=}', b'i^{OpaqueVTDecompressionSession=}'),), 'VTFrameSiloSetTimeRangesForNextPass': (sel32or64(b'l^{OpaqueVTFrameSilo=}l^{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}', b'i^{OpaqueVTFrameSilo=}q^{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}'), '', {'arguments': {2: {'c_array_length_in_arg': 1, 'type_modifier': 'n'}}}), 'VTRegisterProfessionalVideoWorkflowVideoDecoders': (b'v',), 'VTPixelTransferSessionInvalidate': (b'v^{OpaqueVTPixelTransferSession=}',), 'VTDecompressionSessionDecodeFrameWithOutputHandler': (sel32or64(b'l^{OpaqueVTDecompressionSession=}^{opaqueCMSampleBuffer=}I^L@?', b'i^{OpaqueVTDecompressionSession=}^{opaqueCMSampleBuffer=}I^I@?'), '', {'arguments': {3: {'type_modifier': 'o'}, 4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}, 1: {'type': 'i'}, 2: {'type': 'I'}, 3: {'type': '^{__CVBuffer=}'}, 4: {'type': '{_CMTime=qiIq}'}, 5: {'type': '{_CMTime=qiIq}'}}}}}}), 'VTCompressionSessionEndPass': (sel32or64(b'l^{OpaqueVTCompressionSession=}^Z^I', b'i^{OpaqueVTCompressionSession=}^Z^I'), '', {'arguments': {1: {'type_modifier': 'o'}}}), 'VTCompressionSessionEncodeFrame': (sel32or64(b'l^{OpaqueVTCompressionSession=}^{__CVBuffer=}{_CMTime=qiIq}{_CMTime=qiIq}^{__CFDictionary=}^v^L', b'i^{OpaqueVTCompressionSession=}^{__CVBuffer=}{_CMTime=qiIq}{_CMTime=qiIq}^{__CFDictionary=}^v^I'), '', {'arguments': {6: {'type_modifier': 'o'}}}), 'VTFrameSiloCreate': (sel32or64(b'l^{__CFAllocator=}^{__CFURL=}{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}^{__CFDictionary=}^^{OpaqueVTFrameSilo=}', b'i^{__CFAllocator=}^{__CFURL=}{_CMTimeRange={_CMTime=qiIq}{_CMTime=qiIq}}^{__CFDictionary=}^^{OpaqueVTFrameSilo=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {4: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'VTCompressionSessionCreate': (sel32or64(b'l^{__CFAllocator=}iiL^{__CFDictionary=}^{__CFDictionary=}^{__CFAllocator=}^?^v^^{OpaqueVTCompressionSession=}', b'i^{__CFAllocator=}iiI^{__CFDictionary=}^{__CFDictionary=}^{__CFAllocator=}^?^v^^{OpaqueVTCompressionSession=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {9: {'already_cfretained': True, 'type_modifier': 'o'}, 7: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'^v'}, 2: {'type': b'i'}, 3: {'type': b'I'}, 4: {'type': b'^{opaqueCMSampleBuffer=}'}}}}}}), 'VTCopySupportedPropertyDictionaryForEncoder': (sel32or64(b'liiL^{__CFDictionary=}^^{__CFString=}^^{__CFDictionary=}', b'iiiI^{__CFDictionary=}^^{__CFString=}^^{__CFDictionary=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {4: {'already_cfretained': True, 'type_modifier': 'o'}, 5: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'VTFrameSiloGetTypeID': (sel32or64(b'L', b'Q'),), 'VTSessionCopySerializableProperties': (sel32or64(b'l@^{__CFAllocator=}^^{__CFDictionary=}', b'i@^{__CFAllocator=}^^{__CFDictionary=}'), '', {'retval': {'already_cfretained': True}, 'arguments': {2: {'already_cfretained': True, 'type_modifier': 'o'}}}), 'VTDecompressionSessionInvalidate': (b'v^{OpaqueVTDecompressionSession=}',)} -aliases = {'VT_SUPPORT_COLORSYNC_PIXEL_TRANSFER': 'COREMEDIA_TRUE'} -cftypes=[('VTPixelTransferSessionRef', b'^{OpaqueVTPixelTransferSession=}', ':VTPixelTransferSessionGetTypeID', None), ('VTDecompressionSessionRef', b'^{OpaqueVTDecompressionSession=}', ':VTDecompressionSessionGetTypeID', None), ('VTFrameSiloRef', b'^{OpaqueVTFrameSilo=}', ':VTFrameSiloGetTypeID', None), ('VTSessionRef', b'^{OpaqueVTSession=}', ':VTSessionGetTypeID', None), ('VTMultiPassStorageRef', b'^{OpaqueVTMultiPassStorage=}', ':VTMultiPassStorageGetTypeID', None)] -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/VideoToolbox/_metadata.pyc b/env/lib/python2.7/site-packages/VideoToolbox/_metadata.pyc deleted file mode 100644 index 5ef5dcd2..00000000 Binary files a/env/lib/python2.7/site-packages/VideoToolbox/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Vision/_Vision.so b/env/lib/python2.7/site-packages/Vision/_Vision.so deleted file mode 100755 index 88b0e8ea..00000000 Binary files a/env/lib/python2.7/site-packages/Vision/_Vision.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Vision/__init__.py b/env/lib/python2.7/site-packages/Vision/__init__.py deleted file mode 100644 index 03b6f69f..00000000 --- a/env/lib/python2.7/site-packages/Vision/__init__.py +++ /dev/null @@ -1,30 +0,0 @@ -''' -Python mapping for the Vision framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Foundation -import Quartz -import CoreML - -from Vision import _metadata -from Vision._Vision import * - - -sys.modules['Vision'] = mod = objc.ObjCLazyModule( - "Vision", - "com.apple.Vision", - objc.pathForFramework("/System/Library/Frameworks/Vision.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Foundation, Quartz, CoreML)) - -import sys -del sys.modules['Vision._metadata'] diff --git a/env/lib/python2.7/site-packages/Vision/__init__.pyc b/env/lib/python2.7/site-packages/Vision/__init__.pyc deleted file mode 100644 index 4f54cbdf..00000000 Binary files a/env/lib/python2.7/site-packages/Vision/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/Vision/_metadata.py b/env/lib/python2.7/site-packages/Vision/_metadata.py deleted file mode 100644 index 0e06121d..00000000 --- a/env/lib/python2.7/site-packages/Vision/_metadata.py +++ /dev/null @@ -1,77 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Wed Jun 6 23:11:48 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$VNBarcodeSymbologyAztec$VNBarcodeSymbologyCode128$VNBarcodeSymbologyCode39$VNBarcodeSymbologyCode39Checksum$VNBarcodeSymbologyCode39FullASCII$VNBarcodeSymbologyCode39FullASCIIChecksum$VNBarcodeSymbologyCode93$VNBarcodeSymbologyCode93i$VNBarcodeSymbologyDataMatrix$VNBarcodeSymbologyEAN13$VNBarcodeSymbologyEAN8$VNBarcodeSymbologyI2of5$VNBarcodeSymbologyI2of5Checksum$VNBarcodeSymbologyITF14$VNBarcodeSymbologyPDF417$VNBarcodeSymbologyQR$VNBarcodeSymbologyUPCE$VNImageOptionCameraIntrinsics$VNImageOptionProperties$VNNormalizedIdentityRect@{CGRect={CGPoint=dd}{CGSize=dd}}$VNVisionVersionNumber@d$''' -enums = '''$VNCoreMLRequestRevision1@1$VNDetectBarcodesRequestRevision1@1$VNDetectFaceLandmarksRequestRevision1@1$VNDetectFaceLandmarksRequestRevision2@2$VNDetectFaceRectanglesRequestRevision1@1$VNDetectFaceRectanglesRequestRevision2@2$VNDetectHorizonRequestRevision1@1$VNDetectRectanglesRequestRevision1@1$VNDetectTextRectanglesRequestRevision1@1$VNErrorIOError@6$VNErrorInternalError@9$VNErrorInvalidArgument@14$VNErrorInvalidFormat@2$VNErrorInvalidImage@13$VNErrorInvalidModel@15$VNErrorInvalidOperation@12$VNErrorInvalidOption@5$VNErrorMissingOption@7$VNErrorNotImplemented@8$VNErrorOK@0$VNErrorOperationFailed@3$VNErrorOutOfBoundsError@4$VNErrorOutOfMemory@10$VNErrorRequestCancelled@1$VNErrorUnknownError@11$VNErrorUnsupportedRevision@16$VNHomographicImageRegistrationRequestRevision1@1$VNImageCropAndScaleOptionCenterCrop@0$VNImageCropAndScaleOptionScaleFill@2$VNImageCropAndScaleOptionScaleFit@1$VNRequestRevisionUnspecified@0$VNRequestTrackingLevelAccurate@0$VNRequestTrackingLevelFast@1$VNTrackObjectRequestRevision1@1$VNTrackRectangleRequestRevision1@1$VNTranslationalImageRegistrationRequestRevision1@1$''' -misc.update({}) -functions={'VNImageRectForNormalizedRect': (b'{CGRect={CGPoint=dd}{CGSize=dd}}{CGRect={CGPoint=dd}{CGSize=dd}}LL',), 'VNNormalizedRectIsIdentityRect': (b'B{CGRect={CGPoint=dd}{CGSize=dd}}',), 'VNImagePointForNormalizedPoint': (b'{CGPoint=dd}{CGPoint=dd}LL',), 'VNNormalizedRectForImageRect': (b'{CGRect={CGPoint=dd}{CGSize=dd}}{CGRect={CGPoint=dd}{CGSize=dd}}LL',)} -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'NSObject', b'requestRevision', {'retval': {'type': 'Q'}}) - r(b'VNCoreMLModel', b'modelForMLModel:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'VNCoreMLRequest', b'initWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'VNCoreMLRequest', b'initWithModel:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'VNDetectTextRectanglesRequest', b'reportCharacterBoxes', {'retval': {'type': 'Z'}}) - r(b'VNDetectTextRectanglesRequest', b'setReportCharacterBoxes:', {'arguments': {2: {'type': 'Z'}}}) - r(b'VNFaceLandmarkRegion2D', b'normalizedPoints', {'retval': {'c_array_of_variable_length': True}}) - r(b'VNFaceLandmarkRegion2D', b'pointsInImageOfSize:', {'retval': {'c_array_of_variable_length': True}}) - r(b'VNImageRequestHandler', b'performRequests:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'VNRequest', b'completionHandler', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}) - r(b'VNRequest', b'initWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'VNRequest', b'preferBackgroundProcessing', {'retval': {'type': 'Z'}}) - r(b'VNRequest', b'setPreferBackgroundProcessing:', {'arguments': {2: {'type': 'Z'}}}) - r(b'VNRequest', b'setUsesCPUOnly:', {'arguments': {2: {'type': 'Z'}}}) - r(b'VNRequest', b'usesCPUOnly', {'retval': {'type': 'Z'}}) - r(b'VNSequenceRequestHandler', b'performRequests:onCGImage:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'VNSequenceRequestHandler', b'performRequests:onCGImage:orientation:error:', {'retval': {'type': 'Z'}, 'arguments': {5: {'type_modifier': b'o'}}}) - r(b'VNSequenceRequestHandler', b'performRequests:onCIImage:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'VNSequenceRequestHandler', b'performRequests:onCIImage:orientation:error:', {'retval': {'type': 'Z'}, 'arguments': {5: {'type_modifier': b'o'}}}) - r(b'VNSequenceRequestHandler', b'performRequests:onCVPixelBuffer:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'VNSequenceRequestHandler', b'performRequests:onCVPixelBuffer:orientation:error:', {'retval': {'type': 'Z'}, 'arguments': {5: {'type_modifier': b'o'}}}) - r(b'VNSequenceRequestHandler', b'performRequests:onImageData:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'VNSequenceRequestHandler', b'performRequests:onImageData:orientation:error:', {'retval': {'type': 'Z'}, 'arguments': {5: {'type_modifier': b'o'}}}) - r(b'VNSequenceRequestHandler', b'performRequests:onImageURL:error:', {'retval': {'type': 'Z'}, 'arguments': {4: {'type_modifier': b'o'}}}) - r(b'VNSequenceRequestHandler', b'performRequests:onImageURL:orientation:error:', {'retval': {'type': 'Z'}, 'arguments': {5: {'type_modifier': b'o'}}}) - r(b'VNTargetedImageRequest', b'initWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'VNTargetedImageRequest', b'initWithTargetedCGImage:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'VNTargetedImageRequest', b'initWithTargetedCGImage:options:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'VNTargetedImageRequest', b'initWithTargetedCGImage:orientation:options:completionHandler:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'VNTargetedImageRequest', b'initWithTargetedCIImage:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'VNTargetedImageRequest', b'initWithTargetedCIImage:options:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'VNTargetedImageRequest', b'initWithTargetedCIImage:orientation:options:completionHandler:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'VNTargetedImageRequest', b'initWithTargetedCVPixelBuffer:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'VNTargetedImageRequest', b'initWithTargetedCVPixelBuffer:options:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'VNTargetedImageRequest', b'initWithTargetedCVPixelBuffer:orientation:options:completionHandler:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'VNTargetedImageRequest', b'initWithTargetedImageData:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'VNTargetedImageRequest', b'initWithTargetedImageData:options:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'VNTargetedImageRequest', b'initWithTargetedImageData:orientation:options:completionHandler:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'VNTargetedImageRequest', b'initWithTargetedImageURL:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'VNTargetedImageRequest', b'initWithTargetedImageURL:options:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'VNTargetedImageRequest', b'initWithTargetedImageURL:orientation:options:completionHandler:', {'arguments': {5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'VNTrackObjectRequest', b'initWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'VNTrackObjectRequest', b'initWithDetectedObjectObservation:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'VNTrackRectangleRequest', b'initWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'VNTrackRectangleRequest', b'initWithRectangleObservation:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'VNTrackingRequest', b'initWithCompletionHandler:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'VNTrackingRequest', b'isLastFrame', {'retval': {'type': 'Z'}}) - r(b'VNTrackingRequest', b'setLastFrame:', {'arguments': {2: {'type': 'Z'}}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/Vision/_metadata.pyc b/env/lib/python2.7/site-packages/Vision/_metadata.pyc deleted file mode 100644 index 8de99cc6..00000000 Binary files a/env/lib/python2.7/site-packages/Vision/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/WebKit/_WebKit.so b/env/lib/python2.7/site-packages/WebKit/_WebKit.so deleted file mode 100755 index 8baa9e28..00000000 Binary files a/env/lib/python2.7/site-packages/WebKit/_WebKit.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/WebKit/__init__.py b/env/lib/python2.7/site-packages/WebKit/__init__.py deleted file mode 100644 index eccecd2b..00000000 --- a/env/lib/python2.7/site-packages/WebKit/__init__.py +++ /dev/null @@ -1,29 +0,0 @@ -''' -Python mapping for the WebKit framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' -import objc -import sys -import Foundation - -from WebKit import _metadata -from WebKit._WebKit import * - -objc.addConvenienceForBasicSequence('WebScriptObject', True) - -sys.modules['WebKit'] = mod = objc.ObjCLazyModule( - "WebKit", - "com.apple.WebKit", - objc.pathForFramework("/System/Library/Frameworks/WebKit.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Foundation,)) - -import sys -del sys.modules['WebKit._metadata'] - diff --git a/env/lib/python2.7/site-packages/WebKit/__init__.pyc b/env/lib/python2.7/site-packages/WebKit/__init__.pyc deleted file mode 100644 index 83099606..00000000 Binary files a/env/lib/python2.7/site-packages/WebKit/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/WebKit/_metadata.py b/env/lib/python2.7/site-packages/WebKit/_metadata.py deleted file mode 100644 index 79dcd1d7..00000000 --- a/env/lib/python2.7/site-packages/WebKit/_metadata.py +++ /dev/null @@ -1,484 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Wed Feb 20 11:33:49 2019 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -misc.update({'WebPreferencesPrivate': objc.createStructType('WebPreferencesPrivate', b'{WebPreferencesPrivate=}', []), 'DOMObjectInternal': objc.createStructType('DOMObjectInternal', b'{DOMObjectInternal=}', [])}) -constants = '''$DOMEventException$DOMException$DOMRangeException$DOMXPathException$WKErrorDomain$WKWebsiteDataTypeCookies$WKWebsiteDataTypeDiskCache$WKWebsiteDataTypeFetchCache$WKWebsiteDataTypeIndexedDBDatabases$WKWebsiteDataTypeLocalStorage$WKWebsiteDataTypeMemoryCache$WKWebsiteDataTypeOfflineWebApplicationCache$WKWebsiteDataTypeServiceWorkerRegistrations$WKWebsiteDataTypeSessionStorage$WKWebsiteDataTypeWebSQLDatabases$WebActionButtonKey$WebActionElementKey$WebActionModifierFlagsKey$WebActionNavigationTypeKey$WebActionOriginalURLKey$WebArchivePboardType$WebElementDOMNodeKey$WebElementFrameKey$WebElementImageAltStringKey$WebElementImageKey$WebElementImageRectKey$WebElementImageURLKey$WebElementIsSelectedKey$WebElementLinkLabelKey$WebElementLinkTargetFrameKey$WebElementLinkTitleKey$WebElementLinkURLKey$WebHistoryAllItemsRemovedNotification$WebHistoryItemChangedNotification$WebHistoryItemsAddedNotification$WebHistoryItemsKey$WebHistoryItemsRemovedNotification$WebHistoryLoadedNotification$WebHistorySavedNotification$WebKitErrorDomain$WebKitErrorMIMETypeKey$WebKitErrorPlugInNameKey$WebKitErrorPlugInPageURLStringKey$WebPlugInAttributesKey$WebPlugInBaseURLKey$WebPlugInContainerKey$WebPlugInContainingElementKey$WebPlugInShouldLoadMainResourceKey$WebPreferencesChangedNotification$WebViewDidBeginEditingNotification$WebViewDidChangeNotification$WebViewDidChangeSelectionNotification$WebViewDidChangeTypingStyleNotification$WebViewDidEndEditingNotification$WebViewProgressEstimateChangedNotification$WebViewProgressFinishedNotification$WebViewProgressStartedNotification$''' -enums = '''$DOM_ADDITION@2$DOM_ALLOW_KEYBOARD_INPUT@1$DOM_ANY_TYPE@0$DOM_ANY_UNORDERED_NODE_TYPE@8$DOM_ATTRIBUTE_NODE@2$DOM_AT_TARGET@2$DOM_BAD_BOUNDARYPOINTS_ERR@1$DOM_BOOLEAN_TYPE@3$DOM_BOTH@2$DOM_BUBBLING_PHASE@3$DOM_CAPTURING_PHASE@1$DOM_CDATA_SECTION_NODE@4$DOM_CHARSET_RULE@2$DOM_COMMENT_NODE@8$DOM_CSS_ATTR@22$DOM_CSS_CM@6$DOM_CSS_COUNTER@23$DOM_CSS_CUSTOM@3$DOM_CSS_DEG@11$DOM_CSS_DIMENSION@18$DOM_CSS_EMS@3$DOM_CSS_EXS@4$DOM_CSS_GRAD@13$DOM_CSS_HZ@16$DOM_CSS_IDENT@21$DOM_CSS_IN@8$DOM_CSS_INHERIT@0$DOM_CSS_KHZ@17$DOM_CSS_MM@7$DOM_CSS_MS@14$DOM_CSS_NUMBER@1$DOM_CSS_PC@10$DOM_CSS_PERCENTAGE@2$DOM_CSS_PRIMITIVE_VALUE@1$DOM_CSS_PT@9$DOM_CSS_PX@5$DOM_CSS_RAD@12$DOM_CSS_RECT@24$DOM_CSS_RGBCOLOR@25$DOM_CSS_S@15$DOM_CSS_STRING@19$DOM_CSS_UNKNOWN@0$DOM_CSS_URI@20$DOM_CSS_VALUE_LIST@2$DOM_CSS_VH@27$DOM_CSS_VMAX@29$DOM_CSS_VMIN@28$DOM_CSS_VW@26$DOM_DOCUMENT_FRAGMENT_NODE@11$DOM_DOCUMENT_NODE@9$DOM_DOCUMENT_POSITION_CONTAINED_BY@16$DOM_DOCUMENT_POSITION_CONTAINS@8$DOM_DOCUMENT_POSITION_DISCONNECTED@1$DOM_DOCUMENT_POSITION_FOLLOWING@4$DOM_DOCUMENT_POSITION_IMPLEMENTATION_SPECIFIC@32$DOM_DOCUMENT_POSITION_PRECEDING@2$DOM_DOCUMENT_TYPE_NODE@10$DOM_DOMSTRING_SIZE_ERR@2$DOM_DOM_DELTA_LINE@1$DOM_DOM_DELTA_PAGE@2$DOM_DOM_DELTA_PIXEL@0$DOM_ELEMENT_NODE@1$DOM_END_TO_END@2$DOM_END_TO_START@3$DOM_ENTITY_NODE@6$DOM_ENTITY_REFERENCE_NODE@5$DOM_FILTER_ACCEPT@1$DOM_FILTER_REJECT@2$DOM_FILTER_SKIP@3$DOM_FIRST_ORDERED_NODE_TYPE@9$DOM_FONT_FACE_RULE@5$DOM_HIERARCHY_REQUEST_ERR@3$DOM_HORIZONTAL@0$DOM_IMPORT_RULE@3$DOM_INDEX_SIZE_ERR@1$DOM_INUSE_ATTRIBUTE_ERR@10$DOM_INVALID_ACCESS_ERR@15$DOM_INVALID_CHARACTER_ERR@5$DOM_INVALID_EXPRESSION_ERR@51$DOM_INVALID_MODIFICATION_ERR@13$DOM_INVALID_NODE_TYPE_ERR@2$DOM_INVALID_STATE_ERR@11$DOM_KEYFRAMES_RULE@7$DOM_KEYFRAME_RULE@8$DOM_KEY_LOCATION_LEFT@1$DOM_KEY_LOCATION_NUMPAD@3$DOM_KEY_LOCATION_RIGHT@2$DOM_KEY_LOCATION_STANDARD@0$DOM_MEDIA_RULE@4$DOM_MODIFICATION@1$DOM_NAMESPACE_ERR@14$DOM_NAMESPACE_RULE@10$DOM_NODE_AFTER@1$DOM_NODE_BEFORE@0$DOM_NODE_BEFORE_AND_AFTER@2$DOM_NODE_INSIDE@3$DOM_NONE@0$DOM_NOTATION_NODE@12$DOM_NOT_FOUND_ERR@8$DOM_NOT_SUPPORTED_ERR@9$DOM_NO_DATA_ALLOWED_ERR@6$DOM_NO_MODIFICATION_ALLOWED_ERR@7$DOM_NUMBER_TYPE@1$DOM_ORDERED_NODE_ITERATOR_TYPE@5$DOM_ORDERED_NODE_SNAPSHOT_TYPE@7$DOM_PAGE_RULE@6$DOM_PROCESSING_INSTRUCTION_NODE@7$DOM_REMOVAL@3$DOM_SHOW_ALL@4294967295$DOM_SHOW_ATTRIBUTE@2$DOM_SHOW_CDATA_SECTION@8$DOM_SHOW_COMMENT@128$DOM_SHOW_DOCUMENT@256$DOM_SHOW_DOCUMENT_FRAGMENT@1024$DOM_SHOW_DOCUMENT_TYPE@512$DOM_SHOW_ELEMENT@1$DOM_SHOW_ENTITY@32$DOM_SHOW_ENTITY_REFERENCE@16$DOM_SHOW_NOTATION@2048$DOM_SHOW_PROCESSING_INSTRUCTION@64$DOM_SHOW_TEXT@4$DOM_START_TO_END@1$DOM_START_TO_START@0$DOM_STRING_TYPE@2$DOM_STYLE_RULE@1$DOM_SUPPORTS_RULE@12$DOM_SYNTAX_ERR@12$DOM_TEXT_NODE@3$DOM_TYPE_ERR@52$DOM_UNKNOWN_RULE@0$DOM_UNORDERED_NODE_ITERATOR_TYPE@4$DOM_UNORDERED_NODE_SNAPSHOT_TYPE@6$DOM_UNSPECIFIED_EVENT_TYPE_ERR@0$DOM_VARIABLES_RULE@7$DOM_VERTICAL@1$DOM_WEBKIT_KEYFRAMES_RULE@7$DOM_WEBKIT_KEYFRAME_RULE@8$DOM_WEBKIT_REGION_RULE@16$DOM_WRONG_DOCUMENT_ERR@4$WKAudiovisualMediaTypeAll@18446744073709551615$WKAudiovisualMediaTypeAudio@1$WKAudiovisualMediaTypeNone@0$WKAudiovisualMediaTypeVideo@2$WKErrorContentRuleListStoreCompileFailed@6$WKErrorContentRuleListStoreLookUpFailed@7$WKErrorContentRuleListStoreRemoveFailed@8$WKErrorContentRuleListStoreVersionMismatch@9$WKErrorJavaScriptExceptionOccurred@4$WKErrorJavaScriptResultTypeIsUnsupported@5$WKErrorUnknown@1$WKErrorWebContentProcessTerminated@2$WKErrorWebViewInvalidated@3$WKNavigationActionPolicyAllow@1$WKNavigationActionPolicyCancel@0$WKNavigationResponsePolicyAllow@1$WKNavigationResponsePolicyCancel@0$WKNavigationTypeBackForward@2$WKNavigationTypeFormResubmitted@4$WKNavigationTypeFormSubmitted@1$WKNavigationTypeLinkActivated@0$WKNavigationTypeOther@-1$WKNavigationTypeReload@3$WKUserInterfaceDirectionPolicyContent@0$WKUserInterfaceDirectionPolicySystem@1$WKUserScriptInjectionTimeAtDocumentEnd@1$WKUserScriptInjectionTimeAtDocumentStart@0$WebCacheModelDocumentBrowser@1$WebCacheModelDocumentViewer@0$WebCacheModelPrimaryWebBrowser@2$WebDragDestinationActionAny@4294967295$WebDragDestinationActionDHTML@1$WebDragDestinationActionEdit@2$WebDragDestinationActionLoad@4$WebDragDestinationActionNone@0$WebDragSourceActionAny@4294967295$WebDragSourceActionDHTML@1$WebDragSourceActionImage@2$WebDragSourceActionLink@4$WebDragSourceActionNone@0$WebDragSourceActionSelection@8$WebJNIReturnTypeBoolean@3$WebJNIReturnTypeByte@4$WebJNIReturnTypeChar@5$WebJNIReturnTypeDouble@10$WebJNIReturnTypeFloat@9$WebJNIReturnTypeInt@7$WebJNIReturnTypeInvalid@0$WebJNIReturnTypeLong@8$WebJNIReturnTypeObject@2$WebJNIReturnTypeShort@6$WebJNIReturnTypeVoid@1$WebKitErrorBlockedPlugInVersion@203$WebKitErrorCannotFindPlugIn@200$WebKitErrorCannotLoadPlugIn@201$WebKitErrorCannotShowMIMEType@100$WebKitErrorCannotShowURL@101$WebKitErrorFrameLoadInterruptedByPolicyChange@102$WebKitErrorJavaUnavailable@202$WebMenuItemPDFActualSize@24$WebMenuItemPDFAutoSize@27$WebMenuItemPDFContinuous@30$WebMenuItemPDFFacingPages@29$WebMenuItemPDFNextPage@31$WebMenuItemPDFPreviousPage@32$WebMenuItemPDFSinglePage@28$WebMenuItemPDFZoomIn@25$WebMenuItemPDFZoomOut@26$WebMenuItemTagCopy@8$WebMenuItemTagCopyImageToClipboard@6$WebMenuItemTagCopyLinkToClipboard@3$WebMenuItemTagCut@13$WebMenuItemTagDownloadImageToDisk@5$WebMenuItemTagDownloadLinkToDisk@2$WebMenuItemTagGoBack@9$WebMenuItemTagGoForward@10$WebMenuItemTagIgnoreSpelling@17$WebMenuItemTagLearnSpelling@18$WebMenuItemTagLookUpInDictionary@22$WebMenuItemTagNoGuessesFound@16$WebMenuItemTagOpenFrameInNewWindow@7$WebMenuItemTagOpenImageInNewWindow@4$WebMenuItemTagOpenLinkInNewWindow@1$WebMenuItemTagOpenWithDefaultApplication@23$WebMenuItemTagOther@19$WebMenuItemTagPaste@14$WebMenuItemTagReload@12$WebMenuItemTagSearchInSpotlight@20$WebMenuItemTagSearchWeb@21$WebMenuItemTagSpellingGuess@15$WebMenuItemTagStop@11$WebNavigationTypeBackForward@2$WebNavigationTypeFormResubmitted@4$WebNavigationTypeFormSubmitted@1$WebNavigationTypeLinkClicked@0$WebNavigationTypeOther@5$WebNavigationTypeReload@3$WebViewInsertActionDropped@2$WebViewInsertActionPasted@1$WebViewInsertActionTyped@0$''' -misc.update({'WK_API_ENABLED': sel32or64(0, 1)}) -misc.update({}) -functions={'WebInitForCarbon': (b'v',), 'WebConvertNSImageToCGImageRef': (b'^{CGImage=}@',)} -aliases = {'WebNSUInteger': 'NSUInteger', 'WKAudiovisualMediaTypeAll': 'NSUIntegerMax', 'WebNSInteger': 'NSInteger'} -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'DOMAttr', b'specified', {'retval': {'type': 'Z'}}) - r(b'DOMCSSStyleDeclaration', b'isPropertyImplicit:', {'retval': {'type': 'Z'}}) - r(b'DOMDocument', b'createNodeIterator::::', {'arguments': {5: {'type': 'Z'}}}) - r(b'DOMDocument', b'createNodeIterator:whatToShow:filter:expandEntityReferences:', {'arguments': {5: {'type': 'Z'}}}) - r(b'DOMDocument', b'createTreeWalker::::', {'arguments': {5: {'type': 'Z'}}}) - r(b'DOMDocument', b'createTreeWalker:whatToShow:filter:expandEntityReferences:', {'arguments': {5: {'type': 'Z'}}}) - r(b'DOMDocument', b'execCommand:', {'retval': {'type': 'Z'}}) - r(b'DOMDocument', b'execCommand:userInterface:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': 'Z'}}}) - r(b'DOMDocument', b'execCommand:userInterface:value:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': 'Z'}}}) - r(b'DOMDocument', b'getMatchedCSSRules:pseudoElement:authorOnly:', {'arguments': {4: {'type': 'Z'}}}) - r(b'DOMDocument', b'hasFocus', {'retval': {'type': b'Z'}}) - r(b'DOMDocument', b'importNode::', {'arguments': {3: {'type': 'Z'}}}) - r(b'DOMDocument', b'importNode:deep:', {'arguments': {3: {'type': 'Z'}}}) - r(b'DOMDocument', b'queryCommandEnabled:', {'retval': {'type': 'Z'}}) - r(b'DOMDocument', b'queryCommandIndeterm:', {'retval': {'type': 'Z'}}) - r(b'DOMDocument', b'queryCommandState:', {'retval': {'type': 'Z'}}) - r(b'DOMDocument', b'queryCommandSupported:', {'retval': {'type': 'Z'}}) - r(b'DOMDocument', b'setXmlStandalone:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMDocument', b'xmlStandalone', {'retval': {'type': 'Z'}}) - r(b'DOMElement', b'contains:', {'retval': {'type': 'Z'}}) - r(b'DOMElement', b'hasAttribute:', {'retval': {'type': 'Z'}}) - r(b'DOMElement', b'hasAttributeNS::', {'retval': {'type': 'Z'}}) - r(b'DOMElement', b'hasAttributeNS:localName:', {'retval': {'type': 'Z'}}) - r(b'DOMElement', b'scrollIntoView:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMElement', b'scrollIntoViewIfNeeded:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMEvent', b'bubbles', {'retval': {'type': 'Z'}}) - r(b'DOMEvent', b'cancelBubble', {'retval': {'type': 'Z'}}) - r(b'DOMEvent', b'cancelable', {'retval': {'type': 'Z'}}) - r(b'DOMEvent', b'initEvent:::', {'arguments': {3: {'type': 'Z'}, 4: {'type': 'Z'}}}) - r(b'DOMEvent', b'initEvent:canBubbleArg:cancelableArg:', {'arguments': {3: {'type': 'Z'}, 4: {'type': 'Z'}}}) - r(b'DOMEvent', b'returnValue', {'retval': {'type': 'Z'}}) - r(b'DOMEvent', b'setCancelBubble:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMEvent', b'setReturnValue:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMHTMLAreaElement', b'noHref', {'retval': {'type': 'Z'}}) - r(b'DOMHTMLAreaElement', b'setNoHref:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMHTMLButtonElement', b'autofocus', {'retval': {'type': 'Z'}}) - r(b'DOMHTMLButtonElement', b'disabled', {'retval': {'type': 'Z'}}) - r(b'DOMHTMLButtonElement', b'setAutofocus:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMHTMLButtonElement', b'setDisabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMHTMLButtonElement', b'setWillValidate:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMHTMLButtonElement', b'willValidate', {'retval': {'type': 'Z'}}) - r(b'DOMHTMLDListElement', b'compact', {'retval': {'type': 'Z'}}) - r(b'DOMHTMLDListElement', b'setCompact:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMHTMLDirectoryElement', b'compact', {'retval': {'type': 'Z'}}) - r(b'DOMHTMLDirectoryElement', b'setCompact:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMHTMLDocument', b'hasFocus', {'retval': {'type': b'Z'}}) - r(b'DOMHTMLElement', b'isContentEditable', {'retval': {'type': 'Z'}}) - r(b'DOMHTMLFrameElement', b'noResize', {'retval': {'type': 'Z'}}) - r(b'DOMHTMLFrameElement', b'setNoResize:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMHTMLHRElement', b'noShade', {'retval': {'type': 'Z'}}) - r(b'DOMHTMLHRElement', b'setNoShade:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMHTMLImageElement', b'complete', {'retval': {'type': 'Z'}}) - r(b'DOMHTMLImageElement', b'isMap', {'retval': {'type': 'Z'}}) - r(b'DOMHTMLImageElement', b'setComplete:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMHTMLImageElement', b'setIsMap:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMHTMLInputElement', b'autofocus', {'retval': {'type': 'Z'}}) - r(b'DOMHTMLInputElement', b'checked', {'retval': {'type': 'Z'}}) - r(b'DOMHTMLInputElement', b'defaultChecked', {'retval': {'type': 'Z'}}) - r(b'DOMHTMLInputElement', b'disabled', {'retval': {'type': 'Z'}}) - r(b'DOMHTMLInputElement', b'indeterminate', {'retval': {'type': 'Z'}}) - r(b'DOMHTMLInputElement', b'multiple', {'retval': {'type': 'Z'}}) - r(b'DOMHTMLInputElement', b'readOnly', {'retval': {'type': 'Z'}}) - r(b'DOMHTMLInputElement', b'setAutofocus:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMHTMLInputElement', b'setChecked:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMHTMLInputElement', b'setDefaultChecked:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMHTMLInputElement', b'setDisabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMHTMLInputElement', b'setIndeterminate:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMHTMLInputElement', b'setMultiple:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMHTMLInputElement', b'setReadOnly:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMHTMLInputElement', b'willValidate', {'retval': {'type': 'Z'}}) - r(b'DOMHTMLLinkElement', b'disabled', {'retval': {'type': 'Z'}}) - r(b'DOMHTMLLinkElement', b'setDisabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMHTMLMenuElement', b'compact', {'retval': {'type': 'Z'}}) - r(b'DOMHTMLMenuElement', b'setCompact:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMHTMLOListElement', b'compact', {'retval': {'type': 'Z'}}) - r(b'DOMHTMLOListElement', b'setCompact:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMHTMLObjectElement', b'declare', {'retval': {'type': 'Z'}}) - r(b'DOMHTMLObjectElement', b'setDeclare:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMHTMLOptGroupElement', b'disabled', {'retval': {'type': 'Z'}}) - r(b'DOMHTMLOptGroupElement', b'setDisabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMHTMLOptionElement', b'defaultSelected', {'retval': {'type': 'Z'}}) - r(b'DOMHTMLOptionElement', b'disabled', {'retval': {'type': 'Z'}}) - r(b'DOMHTMLOptionElement', b'selected', {'retval': {'type': 'Z'}}) - r(b'DOMHTMLOptionElement', b'setDefaultSelected:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMHTMLOptionElement', b'setDisabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMHTMLOptionElement', b'setSelected:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMHTMLPreElement', b'setWrap:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMHTMLPreElement', b'wrap', {'retval': {'type': 'Z'}}) - r(b'DOMHTMLScriptElement', b'defer', {'retval': {'type': 'Z'}}) - r(b'DOMHTMLScriptElement', b'setDefer:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMHTMLSelectElement', b'autofocus', {'retval': {'type': 'Z'}}) - r(b'DOMHTMLSelectElement', b'disabled', {'retval': {'type': 'Z'}}) - r(b'DOMHTMLSelectElement', b'multiple', {'retval': {'type': 'Z'}}) - r(b'DOMHTMLSelectElement', b'setAutofocus:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMHTMLSelectElement', b'setDisabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMHTMLSelectElement', b'setMultiple:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMHTMLSelectElement', b'willValidate', {'retval': {'type': 'Z'}}) - r(b'DOMHTMLStyleElement', b'disabled', {'retval': {'type': 'Z'}}) - r(b'DOMHTMLStyleElement', b'setDisabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMHTMLTableCellElement', b'noWrap', {'retval': {'type': 'Z'}}) - r(b'DOMHTMLTableCellElement', b'setNoWrap:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMHTMLTextAreaElement', b'autofocus', {'retval': {'type': b'Z'}}) - r(b'DOMHTMLTextAreaElement', b'disabled', {'retval': {'type': 'Z'}}) - r(b'DOMHTMLTextAreaElement', b'readOnly', {'retval': {'type': 'Z'}}) - r(b'DOMHTMLTextAreaElement', b'setAutofocus:', {'arguments': {2: {'type': b'Z'}}}) - r(b'DOMHTMLTextAreaElement', b'setDisabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMHTMLTextAreaElement', b'setReadOnly:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMHTMLTextAreaElement', b'willValidate', {'retval': {'type': 'Z'}}) - r(b'DOMHTMLUListElement', b'compact', {'retval': {'type': 'Z'}}) - r(b'DOMHTMLUListElement', b'setCompact:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMImplementation', b'hasFeature::', {'retval': {'type': 'Z'}}) - r(b'DOMImplementation', b'hasFeature:version:', {'retval': {'type': 'Z'}}) - r(b'DOMKeyboardEvent', b'altGraphKey', {'retval': {'type': 'Z'}}) - r(b'DOMKeyboardEvent', b'altKey', {'retval': {'type': 'Z'}}) - r(b'DOMKeyboardEvent', b'ctrlKey', {'retval': {'type': 'Z'}}) - r(b'DOMKeyboardEvent', b'getModifierState:', {'retval': {'type': 'Z'}}) - r(b'DOMKeyboardEvent', b'initKeyboardEvent:canBubble:cancelable:view:keyIdentifier:keyLocation:ctrlKey:altKey:shiftKey:metaKey:', {'arguments': {3: {'type': 'Z'}, 4: {'type': 'Z'}, 8: {'type': 'Z'}, 9: {'type': 'Z'}, 10: {'type': 'Z'}, 11: {'type': 'Z'}}}) - r(b'DOMKeyboardEvent', b'initKeyboardEvent:canBubble:cancelable:view:keyIdentifier:keyLocation:ctrlKey:altKey:shiftKey:metaKey:altGraphKey:', {'arguments': {3: {'type': 'Z'}, 4: {'type': 'Z'}, 8: {'type': 'Z'}, 9: {'type': 'Z'}, 10: {'type': 'Z'}, 11: {'type': 'Z'}, 12: {'type': 'Z'}}}) - r(b'DOMKeyboardEvent', b'initKeyboardEvent:canBubble:cancelable:view:keyIdentifier:location:ctrlKey:altKey:shiftKey:metaKey:', {'arguments': {3: {'type': b'Z'}, 4: {'type': b'Z'}, 8: {'type': b'Z'}, 9: {'type': b'Z'}, 10: {'type': b'Z'}, 11: {'type': b'Z'}}}) - r(b'DOMKeyboardEvent', b'initKeyboardEvent:canBubble:cancelable:view:keyIdentifier:location:ctrlKey:altKey:shiftKey:metaKey:altGraphKey:', {'arguments': {3: {'type': b'Z'}, 4: {'type': b'Z'}, 8: {'type': b'Z'}, 9: {'type': b'Z'}, 10: {'type': b'Z'}, 11: {'type': b'Z'}, 12: {'type': b'Z'}}}) - r(b'DOMKeyboardEvent', b'metaKey', {'retval': {'type': 'Z'}}) - r(b'DOMKeyboardEvent', b'shiftKey', {'retval': {'type': 'Z'}}) - r(b'DOMMouseEvent', b'altKey', {'retval': {'type': 'Z'}}) - r(b'DOMMouseEvent', b'ctrlKey', {'retval': {'type': 'Z'}}) - r(b'DOMMouseEvent', b'initMouseEvent:::::::::::::::', {'arguments': {3: {'type': b'Z'}, 4: {'type': b'Z'}, 11: {'type': b'Z'}, 12: {'type': b'Z'}, 13: {'type': b'Z'}, 14: {'type': b'Z'}}}) - r(b'DOMMouseEvent', b'initMouseEvent:canBubble:cancelable:view:detail:screenX:screenY:clientX:clientY:ctrlKey:altKey:shiftKey:metaKey:button:relatedTarget:', {'arguments': {3: {'type': 'Z'}, 4: {'type': 'Z'}, 11: {'type': 'Z'}, 12: {'type': 'Z'}, 13: {'type': 'Z'}, 14: {'type': 'Z'}}}) - r(b'DOMMouseEvent', b'metaKey', {'retval': {'type': 'Z'}}) - r(b'DOMMouseEvent', b'shiftKey', {'retval': {'type': 'Z'}}) - r(b'DOMMutationEvent', b'initMutationEvent::::::::', {'arguments': {3: {'type': 'Z'}, 4: {'type': 'Z'}}}) - r(b'DOMMutationEvent', b'initMutationEvent:canBubble:cancelable:relatedNode:prevValue:newValue:attrName:attrChange:', {'arguments': {3: {'type': 'Z'}, 4: {'type': 'Z'}}}) - r(b'DOMNode', b'cloneNode:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMNode', b'contains:', {'retval': {'type': 'Z'}}) - r(b'DOMNode', b'hasAttributes', {'retval': {'type': 'Z'}}) - r(b'DOMNode', b'hasChildNodes', {'retval': {'type': 'Z'}}) - r(b'DOMNode', b'isContentEditable', {'retval': {'type': 'Z'}}) - r(b'DOMNode', b'isDefaultNamespace:', {'retval': {'type': b'Z'}}) - r(b'DOMNode', b'isEqualNode:', {'retval': {'type': 'Z'}}) - r(b'DOMNode', b'isSameNode:', {'retval': {'type': 'Z'}}) - r(b'DOMNode', b'isSupported::', {'retval': {'type': 'Z'}}) - r(b'DOMNode', b'isSupported:version:', {'retval': {'type': 'Z'}}) - r(b'DOMNode', b'setIsContentEditable:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMNodeIterator', b'expandEntityReferences', {'retval': {'type': 'Z'}}) - r(b'DOMNodeIterator', b'pointerBeforeReferenceNode', {'retval': {'type': 'Z'}}) - r(b'DOMOverflowEvent', b'horizontalOverflow', {'retval': {'type': 'Z'}}) - r(b'DOMOverflowEvent', b'initOverflowEvent:horizontalOverflow:verticalOverflow:', {'arguments': {3: {'type': 'Z'}, 4: {'type': 'Z'}}}) - r(b'DOMOverflowEvent', b'verticalOverflow', {'retval': {'type': 'Z'}}) - r(b'DOMProgressEvent', b'lengthComputable', {'retval': {'type': b'Z'}}) - r(b'DOMRange', b'collapse:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMRange', b'collapsed', {'retval': {'type': 'Z'}}) - r(b'DOMRange', b'intersectsNode:', {'retval': {'type': 'Z'}}) - r(b'DOMRange', b'isPointInRange:offset:', {'retval': {'type': 'Z'}}) - r(b'DOMStyleSheet', b'disabled', {'retval': {'type': 'Z'}}) - r(b'DOMStyleSheet', b'setDisabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'DOMTreeWalker', b'expandEntityReferences', {'retval': {'type': 'Z'}}) - r(b'DOMUIEvent', b'initUIEvent:::::', {'arguments': {3: {'type': 'Z'}, 4: {'type': 'Z'}}}) - r(b'DOMUIEvent', b'initUIEvent:canBubble:cancelable:view:detail:', {'arguments': {3: {'type': 'Z'}, 4: {'type': 'Z'}}}) - r(b'DOMWheelEvent', b'altKey', {'retval': {'type': 'Z'}}) - r(b'DOMWheelEvent', b'ctrlKey', {'retval': {'type': 'Z'}}) - r(b'DOMWheelEvent', b'initWheelEvent:wheelDeltaY:view:screenX:screenY:clientX:clientY:ctrlKey:altKey:shiftKey:metaKey:', {'arguments': {9: {'type': b'Z'}, 10: {'type': b'Z'}, 11: {'type': b'Z'}, 12: {'type': b'Z'}}}) - r(b'DOMWheelEvent', b'isHorizontal', {'retval': {'type': 'Z'}}) - r(b'DOMWheelEvent', b'metaKey', {'retval': {'type': 'Z'}}) - r(b'DOMWheelEvent', b'shiftKey', {'retval': {'type': 'Z'}}) - r(b'DOMXPathResult', b'booleanValue', {'retval': {'type': 'Z'}}) - r(b'DOMXPathResult', b'invalidIteratorState', {'retval': {'type': 'Z'}}) - r(b'NSObject', b'acceptNode:', {'required': True, 'retval': {'type': 's'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'addEventListener:::', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': 'Z'}}}) - r(b'NSObject', b'addEventListener:listener:useCapture:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': 'Z'}}}) - r(b'NSObject', b'attributedString', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'canProvideDocumentSource', {'required': True, 'retval': {'type': 'Z'}}) - r(b'NSObject', b'cancel', {'required': True, 'retval': {'type': b'v'}}) - r(b'NSObject', b'chooseFilename:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'chooseFilenames:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'cookiesDidChangeInCookieStore:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'dataSourceUpdated:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'deselectAll', {'required': True, 'retval': {'type': b'v'}}) - r(b'NSObject', b'didFailWithError:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'didFinish', {'required': True, 'retval': {'type': b'v'}}) - r(b'NSObject', b'didReceiveData:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'didReceiveResponse:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'dispatchEvent:', {'required': True, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'documentSource', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'download', {'required': True, 'retval': {'type': b'v'}}) - r(b'NSObject', b'downloadWindowForAuthenticationSheet:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'finalizeForWebScript', {'retval': {'type': b'v'}}) - r(b'NSObject', b'finishedLoadingWithDataSource:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'handleEvent:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'ignore', {'required': True, 'retval': {'type': b'v'}}) - r(b'NSObject', b'invokeDefaultMethodWithArguments:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'invokeUndefinedMethodFromWebScript:withArguments:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'isKeyExcludedFromWebScript:', {'retval': {'type': 'Z'}, 'arguments': {2: {'c_array_delimited_by_null': True, 'type': 'n^t'}}}) - r(b'NSObject', b'isSelectorExcludedFromWebScript:', {'retval': {'type': 'Z'}, 'arguments': {2: {'type': ':'}}}) - r(b'NSObject', b'layout', {'required': True, 'retval': {'type': b'v'}}) - r(b'NSObject', b'lookupNamespaceURI:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'objectForWebScript', {'retval': {'type': b'@'}}) - r(b'NSObject', b'plugInViewWithArguments:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'receivedData:withDataSource:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'receivedError:withDataSource:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'removeEventListener:::', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': 'Z'}}}) - r(b'NSObject', b'removeEventListener:listener:useCapture:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': 'Z'}}}) - r(b'NSObject', b'request', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'searchFor:direction:caseSensitive:wrap:', {'required': True, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': 'Z'}, 4: {'type': 'Z'}, 5: {'type': 'Z'}}}) - r(b'NSObject', b'selectAll', {'required': True, 'retval': {'type': b'v'}}) - r(b'NSObject', b'selectedAttributedString', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'selectedString', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'setDataSource:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'setNeedsLayout:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': 'Z'}}}) - r(b'NSObject', b'string', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'supportsTextEncoding', {'required': True, 'retval': {'type': 'Z'}}) - r(b'NSObject', b'title', {'required': True, 'retval': {'type': b'@'}}) - r(b'NSObject', b'undoManagerForWebView:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'use', {'required': True, 'retval': {'type': b'v'}}) - r(b'NSObject', b'userContentController:didReceiveScriptMessage:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'viewDidMoveToHostWindow', {'required': True, 'retval': {'type': b'v'}}) - r(b'NSObject', b'viewWillMoveToHostWindow:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'webFrame', {'retval': {'type': b'@'}}) - r(b'NSObject', b'webPlugInCallJava:isStatic:returnType:method:arguments:callingURL:exceptionDescription:', {'retval': {'type': sel32or64(b'(jvalue=CcSslqfd^{_jobject=})', b'(jvalue=CcSsiqfd^{_jobject=})')}, 'arguments': {2: {'type': '^{_jobject=}'}, 3: {'type': 'Z'}, 4: {'type': b'i'}, 5: {'type': '^{_jmethodID=}'}, 6: {'type': '^(jvalue=CcSsiqfd^{_jobject})'}, 7: {'type': b'@'}, 8: {'type': b'^@', 'type_modifier': b'o'}}}) - r(b'NSObject', b'webPlugInContainerLoadRequest:inFrame:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'webPlugInContainerSelectionColor', {'retval': {'type': b'@'}}) - r(b'NSObject', b'webPlugInContainerShowStatus:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'webPlugInDestroy', {'retval': {'type': b'v'}}) - r(b'NSObject', b'webPlugInGetApplet', {'retval': {'type': '^{_jobject=}'}}) - r(b'NSObject', b'webPlugInInitialize', {'retval': {'type': b'v'}}) - r(b'NSObject', b'webPlugInMainResourceDidFailWithError:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'webPlugInMainResourceDidFinishLoading', {'retval': {'type': b'v'}}) - r(b'NSObject', b'webPlugInMainResourceDidReceiveData:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'webPlugInMainResourceDidReceiveResponse:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'webPlugInSetIsSelected:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': 'Z'}}}) - r(b'NSObject', b'webPlugInStart', {'retval': {'type': b'v'}}) - r(b'NSObject', b'webPlugInStop', {'retval': {'type': b'v'}}) - r(b'NSObject', b'webScriptNameForKey:', {'retval': {'type': b'@'}, 'arguments': {2: {'c_array_delimited_by_null': True, 'type': 'n^t'}}}) - r(b'NSObject', b'webScriptNameForSelector:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': ':'}}}) - r(b'NSObject', b'webView:contextMenuItemsForElement:defaultMenuItems:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'webView:createWebViewModalDialogWithRequest:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'webView:createWebViewWithConfiguration:forNavigationAction:windowFeatures:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'webView:createWebViewWithRequest:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'webView:decidePolicyForMIMEType:request:frame:decisionListener:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}, 6: {'type': b'@'}}}) - r(b'NSObject', b'webView:decidePolicyForNavigationAction:decisionHandler:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'i', b'q')}}}, 'type': '@?'}}}) - r(b'NSObject', b'webView:decidePolicyForNavigationAction:request:frame:decisionListener:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}, 6: {'type': b'@'}}}) - r(b'NSObject', b'webView:decidePolicyForNavigationResponse:decisionHandler:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'i', b'q')}}}, 'type': '@?'}}}) - r(b'NSObject', b'webView:decidePolicyForNewWindowAction:request:newFrameName:decisionListener:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}, 6: {'type': b'@'}}}) - r(b'NSObject', b'webView:didCancelClientRedirectForFrame:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'webView:didChangeLocationWithinPageForFrame:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'webView:didClearWindowObject:forFrame:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'webView:didCommitLoadForFrame:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'webView:didCommitNavigation:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'webView:didCreateJavaScriptContext:forFrame:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'webView:didFailLoadWithError:forFrame:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'webView:didFailNavigation:withError:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'webView:didFailProvisionalLoadWithError:forFrame:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'webView:didFailProvisionalNavigation:withError:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'webView:didFinishLoadForFrame:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'webView:didFinishNavigation:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'webView:didReceiveAuthenticationChallenge:completionHandler:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': sel32or64(b'i', b'q')}, 2: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'webView:didReceiveIcon:forFrame:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'webView:didReceiveServerRedirectForProvisionalLoadForFrame:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'webView:didReceiveServerRedirectForProvisionalNavigation:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'webView:didReceiveTitle:forFrame:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'webView:didStartProvisionalLoadForFrame:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'webView:didStartProvisionalNavigation:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'webView:doCommandBySelector:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': ':'}}}) - r(b'NSObject', b'webView:dragDestinationActionMaskForDraggingInfo:', {'required': False, 'retval': {'type': sel32or64(b'I', b'Q')}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'webView:dragSourceActionMaskForPoint:', {'required': False, 'retval': {'type': sel32or64(b'I', b'Q')}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}}}) - r(b'NSObject', b'webView:drawFooterInRect:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSObject', b'webView:drawHeaderInRect:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSObject', b'webView:identifierForInitialRequest:fromDataSource:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'webView:makeFirstResponder:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'webView:mouseDidMoveOverElement:modifierFlags:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'I', b'Q')}}}) - r(b'NSObject', b'webView:plugInFailedWithError:dataSource:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'webView:printFrameView:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'webView:resource:didCancelAuthenticationChallenge:fromDataSource:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'webView:resource:didFailLoadingWithError:fromDataSource:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'webView:resource:didFinishLoadingFromDataSource:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'webView:resource:didReceiveAuthenticationChallenge:fromDataSource:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'webView:resource:didReceiveContentLength:fromDataSource:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'i', b'q')}, 5: {'type': b'@'}}}) - r(b'NSObject', b'webView:resource:didReceiveResponse:fromDataSource:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'webView:resource:willSendRequest:redirectResponse:fromDataSource:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}, 6: {'type': b'@'}}}) - r(b'NSObject', b'webView:runBeforeUnloadConfirmPanelWithMessage:initiatedByFrame:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'webView:runJavaScriptAlertPanelWithMessage:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'webView:runJavaScriptAlertPanelWithMessage:initiatedByFrame:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'webView:runJavaScriptAlertPanelWithMessage:initiatedByFrame:completionHandler:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}, 'type': '@?'}}}) - r(b'NSObject', b'webView:runJavaScriptConfirmPanelWithMessage:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'webView:runJavaScriptConfirmPanelWithMessage:initiatedByFrame:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'webView:runJavaScriptConfirmPanelWithMessage:initiatedByFrame:completionHandler:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}, 'type': '@?'}}}) - r(b'NSObject', b'webView:runJavaScriptTextInputPanelWithPrompt:defaultText:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'webView:runJavaScriptTextInputPanelWithPrompt:defaultText:initiatedByFrame:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) - r(b'NSObject', b'webView:runJavaScriptTextInputPanelWithPrompt:defaultText:initiatedByFrame:completionHandler:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}, 6: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}, 'type': '@?'}}}) - r(b'NSObject', b'webView:runOpenPanelForFileButtonWithResultListener:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'webView:runOpenPanelForFileButtonWithResultListener:allowMultipleFiles:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': 'Z'}}}) - r(b'NSObject', b'webView:runOpenPanelWithParameters:initiatedByFrame:completionHandler:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}, 'type': '@?'}}}) - r(b'NSObject', b'webView:setContentRect:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSObject', b'webView:setFrame:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}) - r(b'NSObject', b'webView:setResizable:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': 'Z'}}}) - r(b'NSObject', b'webView:setStatusBarVisible:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': 'Z'}}}) - r(b'NSObject', b'webView:setStatusText:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'webView:setToolbarsVisible:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': 'Z'}}}) - r(b'NSObject', b'webView:shouldApplyStyle:toElementsInDOMRange:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'webView:shouldBeginEditingInDOMRange:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'webView:shouldChangeSelectedDOMRange:toDOMRange:affinity:stillSelecting:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': sel32or64(b'I', b'Q')}, 6: {'type': 'Z'}}}) - r(b'NSObject', b'webView:shouldChangeTypingStyle:toStyle:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'webView:shouldDeleteDOMRange:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'webView:shouldEndEditingInDOMRange:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'webView:shouldInsertNode:replacingDOMRange:givenAction:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'webView:shouldInsertText:replacingDOMRange:givenAction:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': sel32or64(b'i', b'q')}}}) - r(b'NSObject', b'webView:shouldPerformAction:fromSender:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': ':'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'webView:startURLSchemeTask:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'webView:stopURLSchemeTask:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'webView:unableToImplementPolicyWithError:frame:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) - r(b'NSObject', b'webView:validateUserInterfaceItem:defaultValidation:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': 'Z'}}}) - r(b'NSObject', b'webView:willCloseFrame:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'webView:willPerformClientRedirectToURL:delay:fireDate:forFrame:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'd'}, 5: {'type': b'@'}, 6: {'type': b'@'}}}) - r(b'NSObject', b'webView:willPerformDragDestinationAction:forDraggingInfo:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'I', b'Q')}, 4: {'type': b'@'}}}) - r(b'NSObject', b'webView:willPerformDragSourceAction:fromPoint:withPasteboard:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'I', b'Q')}, 4: {'type': sel32or64(b'{_NSPoint=ff}', b'{CGPoint=dd}')}, 5: {'type': b'@'}}}) - r(b'NSObject', b'webView:windowScriptObjectAvailable:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) - r(b'NSObject', b'webViewAreToolbarsVisible:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'webViewClose:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'webViewContentRect:', {'required': False, 'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'webViewDidBeginEditing:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'webViewDidChange:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'webViewDidChangeSelection:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'webViewDidChangeTypingStyle:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'webViewDidClose:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'webViewDidEndEditing:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'webViewFirstResponder:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'webViewFocus:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'webViewFooterHeight:', {'required': False, 'retval': {'type': b'f'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'webViewFrame:', {'required': False, 'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'webViewHeaderHeight:', {'required': False, 'retval': {'type': b'f'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'webViewIsResizable:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'webViewIsStatusBarVisible:', {'required': False, 'retval': {'type': 'Z'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'webViewRunModal:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'webViewShow:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'webViewStatusText:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'webViewUnfocus:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'NSObject', b'webViewWebContentProcessDidTerminate:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) - r(b'WKContentRuleListStore', b'compileContentRuleListForIdentifier:encodedContentRuleList:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'WKContentRuleListStore', b'getAvailableContentRuleListIdentifiers:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'WKContentRuleListStore', b'lookUpContentRuleListForIdentifier:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'WKContentRuleListStore', b'removeContentRuleListForIdentifier:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'WKFrameInfo', b'isMainFrame', {'retval': {'type': b'Z'}}) - r(b'WKHTTPCookieStore', b'deleteCookie:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'WKHTTPCookieStore', b'getAllCookies:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'WKHTTPCookieStore', b'setCookie:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'WKNavigationResponse', b'canShowMIMEType', {'retval': {'type': b'Z'}}) - r(b'WKNavigationResponse', b'isForMainFrame', {'retval': {'type': b'Z'}}) - r(b'WKOpenPanelParameters', b'allowsDirectories', {'retval': {'type': 'Z'}}) - r(b'WKOpenPanelParameters', b'allowsMultipleSelection', {'retval': {'type': 'Z'}}) - r(b'WKPreferences', b'isSafeBrowsingEnabled', {'retval': {'type': b'Z'}}) - r(b'WKPreferences', b'javaEnabled', {'retval': {'type': b'Z'}}) - r(b'WKPreferences', b'javaScriptCanOpenWindowsAutomatically', {'retval': {'type': b'Z'}}) - r(b'WKPreferences', b'javaScriptEnabled', {'retval': {'type': b'Z'}}) - r(b'WKPreferences', b'plugInsEnabled', {'retval': {'type': b'Z'}}) - r(b'WKPreferences', b'setJavaEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'WKPreferences', b'setJavaScriptCanOpenWindowsAutomatically:', {'arguments': {2: {'type': b'Z'}}}) - r(b'WKPreferences', b'setJavaScriptEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'WKPreferences', b'setPlugInsEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'WKPreferences', b'setSafeBrowsingEnabled:', {'arguments': {2: {'type': b'Z'}}}) - r(b'WKPreferences', b'setTabFocusesLinks:', {'arguments': {2: {'type': 'Z'}}}) - r(b'WKPreferences', b'tabFocusesLinks', {'retval': {'type': 'Z'}}) - r(b'WKUserScript', b'initWithSource:injectionTime:forMainFrameOnly:', {'arguments': {4: {'type': b'Z'}}}) - r(b'WKUserScript', b'isForMainFrameOnly', {'retval': {'type': b'Z'}}) - r(b'WKWebView', b'allowsBackForwardNavigationGestures', {'retval': {'type': b'Z'}}) - r(b'WKWebView', b'allowsLinkPreview', {'retval': {'type': 'Z'}}) - r(b'WKWebView', b'allowsMagnification', {'retval': {'type': b'Z'}}) - r(b'WKWebView', b'canGoBack', {'retval': {'type': b'Z'}}) - r(b'WKWebView', b'canGoForward', {'retval': {'type': b'Z'}}) - r(b'WKWebView', b'evaluateJavaScript:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'WKWebView', b'handlesURLScheme:', {'retval': {'type': 'Z'}}) - r(b'WKWebView', b'hasOnlySecureContent', {'retval': {'type': b'Z'}}) - r(b'WKWebView', b'isLoading', {'retval': {'type': b'Z'}}) - r(b'WKWebView', b'setAllowsBackForwardNavigationGestures:', {'arguments': {2: {'type': b'Z'}}}) - r(b'WKWebView', b'setAllowsLinkPreview:', {'arguments': {2: {'type': 'Z'}}}) - r(b'WKWebView', b'setAllowsMagnification:', {'arguments': {2: {'type': b'Z'}}}) - r(b'WKWebView', b'takeSnapshotWithConfiguration:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}}}}}}) - r(b'WKWebViewConfiguration', b'allowsAirPlayForMediaPlayback', {'retval': {'type': 'Z'}}) - r(b'WKWebViewConfiguration', b'allowsPictureInPictureMediaPlayback', {'retval': {'type': 'Z'}}) - r(b'WKWebViewConfiguration', b'setAllowsAirPlayForMediaPlayback:', {'arguments': {2: {'type': 'Z'}}}) - r(b'WKWebViewConfiguration', b'setAllowsPictureInPictureMediaPlayback:', {'arguments': {2: {'type': 'Z'}}}) - r(b'WKWebViewConfiguration', b'setSuppressesIncrementalRendering:', {'arguments': {2: {'type': b'Z'}}}) - r(b'WKWebViewConfiguration', b'suppressesIncrementalRendering', {'retval': {'type': b'Z'}}) - r(b'WKWebsiteDataStore', b'fetchDataRecordsOfTypes:completionHandler:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}}}}}}) - r(b'WKWebsiteDataStore', b'isPersistent', {'retval': {'type': 'Z'}}) - r(b'WKWebsiteDataStore', b'removeDataOfTypes:forDataRecords:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'WKWebsiteDataStore', b'removeDataOfTypes:modifiedSince:completionHandler:', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}}) - r(b'WebBackForwardList', b'containsItem:', {'retval': {'type': 'Z'}}) - r(b'WebDataSource', b'isLoading', {'retval': {'type': 'Z'}}) - r(b'WebFrame', b'globalContext', {'retval': {'type': '^{OpaqueJSContext=}'}}) - r(b'WebFrameView', b'allowsScrolling', {'retval': {'type': 'Z'}}) - r(b'WebFrameView', b'canPrintHeadersAndFooters', {'retval': {'type': 'Z'}}) - r(b'WebFrameView', b'documentViewShouldHandlePrint', {'retval': {'type': 'Z'}}) - r(b'WebFrameView', b'setAllowsScrolling:', {'arguments': {2: {'type': 'Z'}}}) - r(b'WebHistory', b'loadFromURL:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'WebHistory', b'saveToURL:error:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type_modifier': b'o'}}}) - r(b'WebPreferences', b'allowsAirPlayForMediaPlayback', {'retval': {'type': 'Z'}}) - r(b'WebPreferences', b'allowsAnimatedImageLooping', {'retval': {'type': 'Z'}}) - r(b'WebPreferences', b'allowsAnimatedImages', {'retval': {'type': 'Z'}}) - r(b'WebPreferences', b'arePlugInsEnabled', {'retval': {'type': 'Z'}}) - r(b'WebPreferences', b'autosaves', {'retval': {'type': 'Z'}}) - r(b'WebPreferences', b'isJavaEnabled', {'retval': {'type': 'Z'}}) - r(b'WebPreferences', b'isJavaScriptEnabled', {'retval': {'type': 'Z'}}) - r(b'WebPreferences', b'javaScriptCanOpenWindowsAutomatically', {'retval': {'type': 'Z'}}) - r(b'WebPreferences', b'loadsImagesAutomatically', {'retval': {'type': 'Z'}}) - r(b'WebPreferences', b'privateBrowsingEnabled', {'retval': {'type': 'Z'}}) - r(b'WebPreferences', b'setAllowsAirPlayForMediaPlayback:', {'arguments': {2: {'type': 'Z'}}}) - r(b'WebPreferences', b'setAllowsAnimatedImageLooping:', {'arguments': {2: {'type': 'Z'}}}) - r(b'WebPreferences', b'setAllowsAnimatedImages:', {'arguments': {2: {'type': 'Z'}}}) - r(b'WebPreferences', b'setAutosaves:', {'arguments': {2: {'type': 'Z'}}}) - r(b'WebPreferences', b'setJavaEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'WebPreferences', b'setJavaScriptCanOpenWindowsAutomatically:', {'arguments': {2: {'type': 'Z'}}}) - r(b'WebPreferences', b'setJavaScriptEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'WebPreferences', b'setLoadsImagesAutomatically:', {'arguments': {2: {'type': 'Z'}}}) - r(b'WebPreferences', b'setPlugInsEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'WebPreferences', b'setPrivateBrowsingEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'WebPreferences', b'setShouldPrintBackgrounds:', {'arguments': {2: {'type': 'Z'}}}) - r(b'WebPreferences', b'setSuppressesIncrementalRendering:', {'arguments': {2: {'type': b'Z'}}}) - r(b'WebPreferences', b'setTabsToLinks:', {'arguments': {2: {'type': 'Z'}}}) - r(b'WebPreferences', b'setUserStyleSheetEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'WebPreferences', b'setUsesPageCache:', {'arguments': {2: {'type': 'Z'}}}) - r(b'WebPreferences', b'shouldPrintBackgrounds', {'retval': {'type': 'Z'}}) - r(b'WebPreferences', b'suppressesIncrementalRendering', {'retval': {'type': b'Z'}}) - r(b'WebPreferences', b'tabsToLinks', {'retval': {'type': 'Z'}}) - r(b'WebPreferences', b'userStyleSheetEnabled', {'retval': {'type': 'Z'}}) - r(b'WebPreferences', b'usesPageCache', {'retval': {'type': 'Z'}}) - r(b'WebScriptObject', b'JSObject', {'retval': {'type': '^{OpaqueJSValue=}'}}) - r(b'WebScriptObject', b'throwException:', {'retval': {'type': 'Z'}}) - r(b'WebView', b'canGoBack', {'retval': {'type': 'Z'}}) - r(b'WebView', b'canGoForward', {'retval': {'type': 'Z'}}) - r(b'WebView', b'canMakeTextLarger', {'retval': {'type': 'Z'}}) - r(b'WebView', b'canMakeTextSmaller', {'retval': {'type': 'Z'}}) - r(b'WebView', b'canMakeTextStandardSize', {'retval': {'type': 'Z'}}) - r(b'WebView', b'canShowMIMEType:', {'retval': {'type': 'Z'}}) - r(b'WebView', b'canShowMIMETypeAsHTML:', {'retval': {'type': 'Z'}}) - r(b'WebView', b'drawsBackground', {'retval': {'type': 'Z'}}) - r(b'WebView', b'goBack', {'retval': {'type': 'Z'}}) - r(b'WebView', b'goForward', {'retval': {'type': 'Z'}}) - r(b'WebView', b'goToBackForwardItem:', {'retval': {'type': 'Z'}}) - r(b'WebView', b'isContinuousSpellCheckingEnabled', {'retval': {'type': 'Z'}}) - r(b'WebView', b'isEditable', {'retval': {'type': 'Z'}}) - r(b'WebView', b'isLoading', {'retval': {'type': 'Z'}}) - r(b'WebView', b'maintainsInactiveSelection', {'retval': {'type': 'Z'}}) - r(b'WebView', b'searchFor:direction:caseSensitive:wrap:', {'retval': {'type': 'Z'}, 'arguments': {3: {'type': 'Z'}, 4: {'type': 'Z'}, 5: {'type': 'Z'}}}) - r(b'WebView', b'setContinuousSpellCheckingEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'WebView', b'setDrawsBackground:', {'arguments': {2: {'type': 'Z'}}}) - r(b'WebView', b'setEditable:', {'arguments': {2: {'type': 'Z'}}}) - r(b'WebView', b'setMaintainsBackForwardList:', {'arguments': {2: {'type': 'Z'}}}) - r(b'WebView', b'setShouldCloseWithWindow:', {'arguments': {2: {'type': 'Z'}}}) - r(b'WebView', b'setShouldUpdateWhileOffscreen:', {'arguments': {2: {'type': 'Z'}}}) - r(b'WebView', b'setSmartInsertDeleteEnabled:', {'arguments': {2: {'type': 'Z'}}}) - r(b'WebView', b'shouldCloseWithWindow', {'retval': {'type': 'Z'}}) - r(b'WebView', b'shouldUpdateWhileOffscreen', {'retval': {'type': 'Z'}}) - r(b'WebView', b'smartInsertDeleteEnabled', {'retval': {'type': 'Z'}}) - r(b'WebView', b'supportsTextEncoding', {'retval': {'type': 'Z'}}) -finally: - objc._updatingMetadata(False) -protocols={'WebUIDelegate': objc.informal_protocol('WebUIDelegate', [objc.selector(None, b'webView:runOpenPanelForFileButtonWithResultListener:', b'v@:@@', isRequired=False), objc.selector(None, b'webViewFirstResponder:', b'@@:@', isRequired=False), objc.selector(None, b'webView:runJavaScriptAlertPanelWithMessage:', b'v@:@@', isRequired=False), objc.selector(None, b'webView:runJavaScriptConfirmPanelWithMessage:initiatedByFrame:', b'Z@:@@@', isRequired=False), objc.selector(None, b'webViewShow:', b'v@:@', isRequired=False), objc.selector(None, b'webView:runBeforeUnloadConfirmPanelWithMessage:initiatedByFrame:', b'Z@:@@@', isRequired=False), objc.selector(None, b'webView:drawHeaderInRect:', sel32or64(b'v@:@{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'v@:@{CGRect={CGPoint=dd}{CGSize=dd}}'), isRequired=False), objc.selector(None, b'webViewRunModal:', b'v@:@', isRequired=False), objc.selector(None, b'webViewIsStatusBarVisible:', b'Z@:@', isRequired=False), objc.selector(None, b'webView:runOpenPanelForFileButtonWithResultListener:allowMultipleFiles:', b'v@:@@Z', isRequired=False), objc.selector(None, b'webView:validateUserInterfaceItem:defaultValidation:', b'Z@:@@Z', isRequired=False), objc.selector(None, b'webViewFooterHeight:', b'f@:@', isRequired=False), objc.selector(None, b'webView:runJavaScriptTextInputPanelWithPrompt:defaultText:initiatedByFrame:', b'@@:@@@@', isRequired=False), objc.selector(None, b'webViewIsResizable:', b'Z@:@', isRequired=False), objc.selector(None, b'webView:setToolbarsVisible:', b'v@:@Z', isRequired=False), objc.selector(None, b'webView:setContentRect:', sel32or64(b'v@:@{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'v@:@{CGRect={CGPoint=dd}{CGSize=dd}}'), isRequired=False), objc.selector(None, b'webView:runJavaScriptAlertPanelWithMessage:initiatedByFrame:', b'v@:@@@', isRequired=False), objc.selector(None, b'webView:drawFooterInRect:', sel32or64(b'v@:@{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'v@:@{CGRect={CGPoint=dd}{CGSize=dd}}'), isRequired=False), objc.selector(None, b'webView:runJavaScriptTextInputPanelWithPrompt:defaultText:', b'@@:@@@', isRequired=False), objc.selector(None, b'webView:setResizable:', b'v@:@Z', isRequired=False), objc.selector(None, b'webViewContentRect:', sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}@:@', b'{CGRect={CGPoint=dd}{CGSize=dd}}@:@'), isRequired=False), objc.selector(None, b'webViewClose:', b'v@:@', isRequired=False), objc.selector(None, b'webView:shouldPerformAction:fromSender:', b'Z@:@:@', isRequired=False), objc.selector(None, b'webView:dragSourceActionMaskForPoint:', sel32or64(b'I@:@{_NSPoint=ff}', b'Q@:@{CGPoint=dd}'), isRequired=False), objc.selector(None, b'webViewAreToolbarsVisible:', b'Z@:@', isRequired=False), objc.selector(None, b'webView:setFrame:', sel32or64(b'v@:@{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'v@:@{CGRect={CGPoint=dd}{CGSize=dd}}'), isRequired=False), objc.selector(None, b'webView:dragDestinationActionMaskForDraggingInfo:', sel32or64(b'I@:@@', b'Q@:@@'), isRequired=False), objc.selector(None, b'webView:mouseDidMoveOverElement:modifierFlags:', sel32or64(b'v@:@@I', b'v@:@@Q'), isRequired=False), objc.selector(None, b'webViewHeaderHeight:', b'f@:@', isRequired=False), objc.selector(None, b'webView:runJavaScriptConfirmPanelWithMessage:', b'Z@:@@', isRequired=False), objc.selector(None, b'webViewStatusText:', b'@@:@', isRequired=False), objc.selector(None, b'webView:createWebViewWithRequest:', b'@@:@@', isRequired=False), objc.selector(None, b'webView:willPerformDragDestinationAction:forDraggingInfo:', sel32or64(b'v@:@I@', b'v@:@Q@'), isRequired=False), objc.selector(None, b'webViewUnfocus:', b'v@:@', isRequired=False), objc.selector(None, b'webView:makeFirstResponder:', b'v@:@@', isRequired=False), objc.selector(None, b'webView:setStatusText:', b'v@:@@', isRequired=False), objc.selector(None, b'webView:willPerformDragSourceAction:fromPoint:withPasteboard:', sel32or64(b'v@:@I{_NSPoint=ff}@', b'v@:@Q{CGPoint=dd}@'), isRequired=False), objc.selector(None, b'webView:contextMenuItemsForElement:defaultMenuItems:', b'@@:@@@', isRequired=False), objc.selector(None, b'webViewFocus:', b'v@:@', isRequired=False), objc.selector(None, b'webView:printFrameView:', b'v@:@@', isRequired=False), objc.selector(None, b'webViewFrame:', sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}@:@', b'{CGRect={CGPoint=dd}{CGSize=dd}}@:@'), isRequired=False), objc.selector(None, b'webView:setStatusBarVisible:', b'v@:@Z', isRequired=False), objc.selector(None, b'webView:createWebViewModalDialogWithRequest:', b'@@:@@', isRequired=False)]), 'WebViewEditingDelegate': objc.informal_protocol('WebViewEditingDelegate', [objc.selector(None, b'webViewDidBeginEditing:', b'v@:@', isRequired=False), objc.selector(None, b'webViewDidChangeSelection:', b'v@:@', isRequired=False), objc.selector(None, b'webView:shouldDeleteDOMRange:', b'Z@:@@', isRequired=False), objc.selector(None, b'webView:shouldChangeTypingStyle:toStyle:', b'Z@:@@@', isRequired=False), objc.selector(None, b'webView:shouldChangeSelectedDOMRange:toDOMRange:affinity:stillSelecting:', sel32or64(b'Z@:@@@IZ', b'Z@:@@@QZ'), isRequired=False), objc.selector(None, b'webView:shouldApplyStyle:toElementsInDOMRange:', b'Z@:@@@', isRequired=False), objc.selector(None, b'webView:doCommandBySelector:', b'Z@:@:', isRequired=False), objc.selector(None, b'webViewDidChangeTypingStyle:', b'v@:@', isRequired=False), objc.selector(None, b'undoManagerForWebView:', b'@@:@', isRequired=False), objc.selector(None, b'webViewDidEndEditing:', b'v@:@', isRequired=False), objc.selector(None, b'webView:shouldInsertText:replacingDOMRange:givenAction:', sel32or64(b'Z@:@@@i', b'Z@:@@@q'), isRequired=False), objc.selector(None, b'webViewDidChange:', b'v@:@', isRequired=False), objc.selector(None, b'webView:shouldEndEditingInDOMRange:', b'Z@:@@', isRequired=False), objc.selector(None, b'webView:shouldBeginEditingInDOMRange:', b'Z@:@@', isRequired=False), objc.selector(None, b'webView:shouldInsertNode:replacingDOMRange:givenAction:', sel32or64(b'Z@:@@@i', b'Z@:@@@q'), isRequired=False)]), 'WebPolicyDelegate': objc.informal_protocol('WebPolicyDelegate', [objc.selector(None, b'webView:unableToImplementPolicyWithError:frame:', b'v@:@@@', isRequired=False), objc.selector(None, b'webView:decidePolicyForNavigationAction:request:frame:decisionListener:', b'v@:@@@@@', isRequired=False), objc.selector(None, b'webView:decidePolicyForNewWindowAction:request:newFrameName:decisionListener:', b'v@:@@@@@', isRequired=False), objc.selector(None, b'webView:decidePolicyForMIMEType:request:frame:decisionListener:', b'v@:@@@@@', isRequired=False)]), 'WebDownloadDelegate': objc.informal_protocol('WebDownloadDelegate', [objc.selector(None, b'downloadWindowForAuthenticationSheet:', b'@@:@', isRequired=False)]), 'WebPlugIn': objc.informal_protocol('WebPlugIn', [objc.selector(None, b'webPlugInMainResourceDidReceiveResponse:', b'v@:@', isRequired=False), objc.selector(None, b'objectForWebScript', b'@@:', isRequired=False), objc.selector(None, b'webPlugInMainResourceDidFinishLoading', b'v@:', isRequired=False), objc.selector(None, b'webPlugInMainResourceDidFailWithError:', b'v@:@', isRequired=False), objc.selector(None, b'webPlugInMainResourceDidReceiveData:', b'v@:@', isRequired=False), objc.selector(None, b'webPlugInDestroy', b'v@:', isRequired=False), objc.selector(None, b'webPlugInStop', b'v@:', isRequired=False), objc.selector(None, b'webPlugInSetIsSelected:', b'v@:Z', isRequired=False), objc.selector(None, b'webPlugInInitialize', b'v@:', isRequired=False), objc.selector(None, b'webPlugInStart', b'v@:', isRequired=False)]), 'WebJavaPlugIn': objc.informal_protocol('WebJavaPlugIn', [objc.selector(None, b'webPlugInCallJava:isStatic:returnType:method:arguments:callingURL:exceptionDescription:', sel32or64(b'(jvalue=CcSslqfd^{_jobject=})@:^{_jobject=}Zi^{_jmethodID=}^(jvalue=CcSslqfd^{_jobject=})@^@', b'(jvalue=CcSsiqfd^{_jobject=})@:^{_jobject=}Zi^{_jmethodID=}^(jvalue=CcSsiqfd^{_jobject=})@^@'), isRequired=False), objc.selector(None, b'webPlugInGetApplet', b'^{_jobject=}@:', isRequired=False)]), 'WebResourceLoadDelegate': objc.informal_protocol('WebResourceLoadDelegate', [objc.selector(None, b'webView:resource:didCancelAuthenticationChallenge:fromDataSource:', b'v@:@@@@', isRequired=False), objc.selector(None, b'webView:resource:didFinishLoadingFromDataSource:', b'v@:@@@', isRequired=False), objc.selector(None, b'webView:identifierForInitialRequest:fromDataSource:', b'@@:@@@', isRequired=False), objc.selector(None, b'webView:resource:willSendRequest:redirectResponse:fromDataSource:', b'@@:@@@@@', isRequired=False), objc.selector(None, b'webView:plugInFailedWithError:dataSource:', b'v@:@@@', isRequired=False), objc.selector(None, b'webView:resource:didReceiveResponse:fromDataSource:', b'v@:@@@@', isRequired=False), objc.selector(None, b'webView:resource:didReceiveContentLength:fromDataSource:', sel32or64(b'v@:@@i@', b'v@:@@q@'), isRequired=False), objc.selector(None, b'webView:resource:didFailLoadingWithError:fromDataSource:', b'v@:@@@@', isRequired=False), objc.selector(None, b'webView:resource:didReceiveAuthenticationChallenge:fromDataSource:', b'v@:@@@@', isRequired=False)]), 'WebFrameLoadDelegate': objc.informal_protocol('WebFrameLoadDelegate', [objc.selector(None, b'webView:didCancelClientRedirectForFrame:', b'v@:@@', isRequired=False), objc.selector(None, b'webView:didClearWindowObject:forFrame:', b'v@:@@@', isRequired=False), objc.selector(None, b'webView:didReceiveTitle:forFrame:', b'v@:@@@', isRequired=False), objc.selector(None, b'webView:didStartProvisionalLoadForFrame:', b'v@:@@', isRequired=False), objc.selector(None, b'webView:didCommitLoadForFrame:', b'v@:@@', isRequired=False), objc.selector(None, b'webView:didFinishLoadForFrame:', b'v@:@@', isRequired=False), objc.selector(None, b'webView:didFailProvisionalLoadWithError:forFrame:', b'v@:@@@', isRequired=False), objc.selector(None, b'webView:didFailLoadWithError:forFrame:', b'v@:@@@', isRequired=False), objc.selector(None, b'webView:didReceiveIcon:forFrame:', b'v@:@@@', isRequired=False), objc.selector(None, b'webView:didReceiveServerRedirectForProvisionalLoadForFrame:', b'v@:@@', isRequired=False), objc.selector(None, b'webView:willPerformClientRedirectToURL:delay:fireDate:forFrame:', b'v@:@@d@@', isRequired=False), objc.selector(None, b'webView:windowScriptObjectAvailable:', b'v@:@@', isRequired=False), objc.selector(None, b'webView:didChangeLocationWithinPageForFrame:', b'v@:@@', isRequired=False), objc.selector(None, b'webView:willCloseFrame:', b'v@:@@', isRequired=False)]), 'WebPlugInContainer': objc.informal_protocol('WebPlugInContainer', [objc.selector(None, b'webPlugInContainerShowStatus:', b'v@:@', isRequired=False), objc.selector(None, b'webPlugInContainerSelectionColor', b'@@:', isRequired=False), objc.selector(None, b'webFrame', b'@@:', isRequired=False), objc.selector(None, b'webPlugInContainerLoadRequest:inFrame:', b'v@:@@', isRequired=False)]), 'WebScripting': objc.informal_protocol('WebScripting', [objc.selector(None, b'finalizeForWebScript', b'v@:', isRequired=False), objc.selector(None, b'invokeUndefinedMethodFromWebScript:withArguments:', b'@@:@@', isRequired=False), objc.selector(None, b'webScriptNameForKey:', b'@@:^c', isRequired=False), objc.selector(None, b'webScriptNameForSelector:', b'@@::', isRequired=False), objc.selector(None, b'invokeDefaultMethodWithArguments:', b'@@:@', isRequired=False), objc.selector(None, b'isSelectorExcludedFromWebScript:', b'Z@::', isRequired=False), objc.selector(None, b'isKeyExcludedFromWebScript:', b'Z@:^c', isRequired=False)])} -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/WebKit/_metadata.pyc b/env/lib/python2.7/site-packages/WebKit/_metadata.pyc deleted file mode 100644 index e97abe5a..00000000 Binary files a/env/lib/python2.7/site-packages/WebKit/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/easy_install.py b/env/lib/python2.7/site-packages/easy_install.py deleted file mode 100644 index d87e9840..00000000 --- a/env/lib/python2.7/site-packages/easy_install.py +++ /dev/null @@ -1,5 +0,0 @@ -"""Run the EasyInstall command""" - -if __name__ == '__main__': - from setuptools.command.easy_install import main - main() diff --git a/env/lib/python2.7/site-packages/easy_install.pyc b/env/lib/python2.7/site-packages/easy_install.pyc deleted file mode 100644 index a726b76e..00000000 Binary files a/env/lib/python2.7/site-packages/easy_install.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/iTunesLibrary/__init__.py b/env/lib/python2.7/site-packages/iTunesLibrary/__init__.py deleted file mode 100644 index 2c63f1bc..00000000 --- a/env/lib/python2.7/site-packages/iTunesLibrary/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -''' -Python mapping for the iTunesLibrary framework. - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions and classes. -''' - -import objc -import sys -import Foundation - -from iTunesLibrary import _metadata - -sys.modules['iTunesLibrary'] = mod = objc.ObjCLazyModule( - "iTunesLibrary", - "com.apple.iTunesLibrary", - objc.pathForFramework("/Library/Frameworks/iTunesLibrary.framework"), - _metadata.__dict__, None, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }, (Foundation,)) - -import sys -del sys.modules['iTunesLibrary._metadata'] diff --git a/env/lib/python2.7/site-packages/iTunesLibrary/__init__.pyc b/env/lib/python2.7/site-packages/iTunesLibrary/__init__.pyc deleted file mode 100644 index a47ed542..00000000 Binary files a/env/lib/python2.7/site-packages/iTunesLibrary/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/iTunesLibrary/_metadata.py b/env/lib/python2.7/site-packages/iTunesLibrary/_metadata.py deleted file mode 100644 index 8c6a3944..00000000 --- a/env/lib/python2.7/site-packages/iTunesLibrary/_metadata.py +++ /dev/null @@ -1,50 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Thu Jul 5 22:35:45 2018 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -constants = '''$ITLibMediaEntityPropertyPersistentID$ITLibMediaItemPropertyAddedDate$ITLibMediaItemPropertyAlbumArtist$ITLibMediaItemPropertyAlbumDiscCount$ITLibMediaItemPropertyAlbumDiscNumber$ITLibMediaItemPropertyAlbumIsCompilation$ITLibMediaItemPropertyAlbumIsGapless$ITLibMediaItemPropertyAlbumRating$ITLibMediaItemPropertyAlbumRatingComputed$ITLibMediaItemPropertyAlbumTitle$ITLibMediaItemPropertyAlbumTrackCount$ITLibMediaItemPropertyArtistName$ITLibMediaItemPropertyArtwork$ITLibMediaItemPropertyBeatsPerMinute$ITLibMediaItemPropertyBitRate$ITLibMediaItemPropertyCategory$ITLibMediaItemPropertyComments$ITLibMediaItemPropertyComposer$ITLibMediaItemPropertyContentRating$ITLibMediaItemPropertyDescription$ITLibMediaItemPropertyFileSize$ITLibMediaItemPropertyFileType$ITLibMediaItemPropertyGenre$ITLibMediaItemPropertyGrouping$ITLibMediaItemPropertyHasArtwork$ITLibMediaItemPropertyIsDRMProtected$ITLibMediaItemPropertyIsPurchased$ITLibMediaItemPropertyIsUserDisabled$ITLibMediaItemPropertyIsVideo$ITLibMediaItemPropertyKind$ITLibMediaItemPropertyLastPlayDate$ITLibMediaItemPropertyLocation$ITLibMediaItemPropertyLocationType$ITLibMediaItemPropertyLyricsContentRating$ITLibMediaItemPropertyMediaKind$ITLibMediaItemPropertyModifiedDate$ITLibMediaItemPropertyMovementCount$ITLibMediaItemPropertyMovementName$ITLibMediaItemPropertyMovementNumber$ITLibMediaItemPropertyPlayCount$ITLibMediaItemPropertyPlayStatus$ITLibMediaItemPropertyRating$ITLibMediaItemPropertyRatingComputed$ITLibMediaItemPropertyReleaseDate$ITLibMediaItemPropertySampleRate$ITLibMediaItemPropertySize$ITLibMediaItemPropertySkipDate$ITLibMediaItemPropertySortAlbumArtist$ITLibMediaItemPropertySortAlbumTitle$ITLibMediaItemPropertySortArtistName$ITLibMediaItemPropertySortComposer$ITLibMediaItemPropertySortTitle$ITLibMediaItemPropertyStartTime$ITLibMediaItemPropertyStopTime$ITLibMediaItemPropertyTitle$ITLibMediaItemPropertyTotalTime$ITLibMediaItemPropertyTrackNumber$ITLibMediaItemPropertyUserSkipCount$ITLibMediaItemPropertyVideoEpisode$ITLibMediaItemPropertyVideoEpisodeOrder$ITLibMediaItemPropertyVideoHeight$ITLibMediaItemPropertyVideoIsHD$ITLibMediaItemPropertyVideoSeason$ITLibMediaItemPropertyVideoSeries$ITLibMediaItemPropertyVideoSortSeries$ITLibMediaItemPropertyVideoWidth$ITLibMediaItemPropertyVoiceOverLanguage$ITLibMediaItemPropertyVolumeAdjustment$ITLibMediaItemPropertyVolumeNormalizationEnergy$ITLibMediaItemPropertyWork$ITLibMediaItemPropertyYear$ITLibPlaylistPropertyAllItemsPlaylist$ITLibPlaylistPropertyDistinguisedKind$ITLibPlaylistPropertyItems$ITLibPlaylistPropertyKind$ITLibPlaylistPropertyMaster$ITLibPlaylistPropertyName$ITLibPlaylistPropertyParentPersistentID$ITLibPlaylistPropertyVisible$''' -enums = '''$ITLibArtworkFormatBMP@6$ITLibArtworkFormatBitmap@1$ITLibArtworkFormatGIF@4$ITLibArtworkFormatJPEG@2$ITLibArtworkFormatJPEG2000@3$ITLibArtworkFormatNone@0$ITLibArtworkFormatPICT@8$ITLibArtworkFormatPNG@5$ITLibArtworkFormatTIFF@7$ITLibDistinguishedPlaylistKind90sMusic@42$ITLibDistinguishedPlaylistKindApplications@51$ITLibDistinguishedPlaylistKindBooks@4$ITLibDistinguishedPlaylistKindClassicalMusic@48$ITLibDistinguishedPlaylistKindHomeVideos@50$ITLibDistinguishedPlaylistKindLibraryMusicVideos@49$ITLibDistinguishedPlaylistKindLovedSongs@52$ITLibDistinguishedPlaylistKindMovies@1$ITLibDistinguishedPlaylistKindMusic@3$ITLibDistinguishedPlaylistKindMusicShowsAndMovies@53$ITLibDistinguishedPlaylistKindMusicVideos@47$ITLibDistinguishedPlaylistKindMyTopRated@43$ITLibDistinguishedPlaylistKindNone@0$ITLibDistinguishedPlaylistKindPodcasts@7$ITLibDistinguishedPlaylistKindPurchases@16$ITLibDistinguishedPlaylistKindRecentlyAdded@46$ITLibDistinguishedPlaylistKindRecentlyPlayed@45$ITLibDistinguishedPlaylistKindRingtones@5$ITLibDistinguishedPlaylistKindTVShows@2$ITLibDistinguishedPlaylistKindTop25MostPlayed@44$ITLibDistinguishedPlaylistKindVoiceMemos@14$ITLibDistinguishedPlaylistKindiTunesU@26$ITLibExportFeatureNone@0$ITLibInitOptionLazyLoadData@1$ITLibInitOptionNone@0$ITLibMediaItemLocationTypeFile@1$ITLibMediaItemLocationTypeRemote@3$ITLibMediaItemLocationTypeURL@2$ITLibMediaItemLocationTypeUnknown@0$ITLibMediaItemLyricsContentRatingClean@2$ITLibMediaItemLyricsContentRatingExplicit@1$ITLibMediaItemLyricsContentRatingNone@0$ITLibMediaItemMediaKindAlertTone@21$ITLibMediaItemMediaKindAudiobook@5$ITLibMediaItemMediaKindBook@19$ITLibMediaItemMediaKindDigitalBooklet@15$ITLibMediaItemMediaKindHomeVideo@12$ITLibMediaItemMediaKindIOSApplication@16$ITLibMediaItemMediaKindInteractiveBooklet@9$ITLibMediaItemMediaKindMovie@3$ITLibMediaItemMediaKindMusicVideo@7$ITLibMediaItemMediaKindPDFBook@20$ITLibMediaItemMediaKindPDFBooklet@6$ITLibMediaItemMediaKindPodcast@4$ITLibMediaItemMediaKindRingtone@14$ITLibMediaItemMediaKindSong@2$ITLibMediaItemMediaKindTVShow@8$ITLibMediaItemMediaKindUnknown@1$ITLibMediaItemMediaKindVoiceMemo@17$ITLibMediaItemMediaKindiTunesU@18$ITLibMediaItemPlayStatusNone@0$ITLibMediaItemPlayStatusPartiallyPlayed@1$ITLibMediaItemPlayStatusUnplayed@2$ITLibPlaylistKindFolder@3$ITLibPlaylistKindGenius@2$ITLibPlaylistKindGeniusMix@4$ITLibPlaylistKindRegular@0$ITLibPlaylistKindSmart@1$''' -misc.update({}) -r = objc.registerMetaDataForSelector -objc._updatingMetadata(True) -try: - r(b'ITLibAlbum', b'isCompilation', {'retval': {'type': 'Z'}}) - r(b'ITLibAlbum', b'isGapless', {'retval': {'type': 'Z'}}) - r(b'ITLibAlbum', b'isRatingComputed', {'retval': {'type': 'Z'}}) - r(b'ITLibMediaEntity', b'enumerateValuesExceptForProperties:usingBlock:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'o^Z'}}}}}}) - r(b'ITLibMediaEntity', b'enumerateValuesForProperties:usingBlock:', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'@'}, 2: {'type': b'@'}, 3: {'type': b'o^Z'}}}}}}) - r(b'ITLibMediaItem', b'hasArtworkAvailable', {'retval': {'type': 'Z'}}) - r(b'ITLibMediaItem', b'isCloud', {'retval': {'type': 'Z'}}) - r(b'ITLibMediaItem', b'isDRMProtected', {'retval': {'type': 'Z'}}) - r(b'ITLibMediaItem', b'isPurchased', {'retval': {'type': 'Z'}}) - r(b'ITLibMediaItem', b'isRatingComputed', {'retval': {'type': 'Z'}}) - r(b'ITLibMediaItem', b'isUserDisabled', {'retval': {'type': 'Z'}}) - r(b'ITLibMediaItem', b'isVideo', {'retval': {'type': 'Z'}}) - r(b'ITLibMediaItemVideoInfo', b'isHD', {'retval': {'type': 'Z'}}) - r(b'ITLibPlaylist', b'isAllItemsPlaylist', {'retval': {'type': 'Z'}}) - r(b'ITLibPlaylist', b'isMaster', {'retval': {'type': 'Z'}}) - r(b'ITLibPlaylist', b'isVisible', {'retval': {'type': 'Z'}}) - r(b'ITLibrary', b'initWithAPIVersion:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'ITLibrary', b'initWithAPIVersion:options:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'ITLibrary', b'libraryWithAPIVersion:error:', {'arguments': {3: {'type_modifier': b'o'}}}) - r(b'ITLibrary', b'libraryWithAPIVersion:options:error:', {'arguments': {4: {'type_modifier': b'o'}}}) - r(b'ITLibrary', b'reloadData', {'retval': {'type': 'Z'}}) - r(b'ITLibrary', b'shouldShowContentRating', {'retval': {'type': 'Z'}}) -finally: - objc._updatingMetadata(False) -expressions = {} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/iTunesLibrary/_metadata.pyc b/env/lib/python2.7/site-packages/iTunesLibrary/_metadata.pyc deleted file mode 100644 index b1849203..00000000 Binary files a/env/lib/python2.7/site-packages/iTunesLibrary/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/libdispatch/__init__.py b/env/lib/python2.7/site-packages/libdispatch/__init__.py deleted file mode 100644 index ac7059c6..00000000 --- a/env/lib/python2.7/site-packages/libdispatch/__init__.py +++ /dev/null @@ -1,32 +0,0 @@ -''' -Python mapping for the dispatch library on macOS - -This module does not contain docstrings for the wrapped code, check Apple's -documentation for details on how to use these functions. -''' - -import objc -import sys - -from libdispatch import _metadata -from libdispatch._inlines import _inline_list_ - - -sys.modules['libdispatch'] = mod = objc.ObjCLazyModule( - "libdispatch", - None, - None, - _metadata.__dict__, _inline_list_, { - '__doc__': __doc__, - 'objc': objc, - '__path__': __path__, - '__loader__': globals().get('__loader__', None), - }) - -import libdispatch._libdispatch as _manual -for nm in dir(_manual): - if nm.startswith('__'): continue - setattr(mod, nm, getattr(_manual, nm)) - -import sys -del sys.modules['libdispatch._metadata'] diff --git a/env/lib/python2.7/site-packages/libdispatch/__init__.pyc b/env/lib/python2.7/site-packages/libdispatch/__init__.pyc deleted file mode 100644 index 7290a2cb..00000000 Binary files a/env/lib/python2.7/site-packages/libdispatch/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/libdispatch/_inlines.so b/env/lib/python2.7/site-packages/libdispatch/_inlines.so deleted file mode 100755 index 1b3047c2..00000000 Binary files a/env/lib/python2.7/site-packages/libdispatch/_inlines.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/libdispatch/_libdispatch.so b/env/lib/python2.7/site-packages/libdispatch/_libdispatch.so deleted file mode 100755 index 2c7d69f3..00000000 Binary files a/env/lib/python2.7/site-packages/libdispatch/_libdispatch.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/libdispatch/_metadata.py b/env/lib/python2.7/site-packages/libdispatch/_metadata.py deleted file mode 100644 index 154dccda..00000000 --- a/env/lib/python2.7/site-packages/libdispatch/_metadata.py +++ /dev/null @@ -1,26 +0,0 @@ -# This file is generated by objective.metadata -# -# Last update: Wed Nov 29 13:42:12 2017 - -import objc, sys - -if sys.maxsize > 2 ** 32: - def sel32or64(a, b): return b -else: - def sel32or64(a, b): return a -if sys.byteorder == 'little': - def littleOrBig(a, b): return a -else: - def littleOrBig(a, b): return b - -misc = { -} -misc.update({'timespec': objc.createStructType('timespec', b'{timespec=ll}', ['tv_sec', 'tv_nsec'])}) -constants = '''$$''' -enums = '''$DISPATCH_AUTORELEASE_FREQUENCY_INHERIT@0$DISPATCH_AUTORELEASE_FREQUENCY_NEVER@2$DISPATCH_AUTORELEASE_FREQUENCY_WORK_ITEM@1$DISPATCH_BLOCK_ASSIGN_CURRENT@4$DISPATCH_BLOCK_BARRIER@1$DISPATCH_BLOCK_DETACHED@2$DISPATCH_BLOCK_ENFORCE_QOS_CLASS@32$DISPATCH_BLOCK_INHERIT_QOS_CLASS@16$DISPATCH_BLOCK_NO_QOS_CLASS@8$DISPATCH_IO_RANDOM@1$DISPATCH_IO_STOP@1$DISPATCH_IO_STREAM@0$DISPATCH_IO_STRICT_INTERVAL@1$DISPATCH_MACH_SEND_DEAD@1$DISPATCH_MEMORYPRESSURE_CRITICAL@4$DISPATCH_MEMORYPRESSURE_NORMAL@1$DISPATCH_MEMORYPRESSURE_WARN@2$DISPATCH_PROC_EXEC@536870912$DISPATCH_PROC_EXIT@2147483648$DISPATCH_PROC_FORK@1073741824$DISPATCH_PROC_SIGNAL@134217728$DISPATCH_QUEUE_PRIORITY_BACKGROUND@-32768$DISPATCH_QUEUE_PRIORITY_DEFAULT@0$DISPATCH_QUEUE_PRIORITY_HIGH@2$DISPATCH_QUEUE_PRIORITY_LOW@-2$DISPATCH_TIMER_STRICT@1$DISPATCH_TIME_FOREVER@18446744073709551615$DISPATCH_TIME_NOW@0$DISPATCH_VNODE_ATTRIB@8$DISPATCH_VNODE_DELETE@1$DISPATCH_VNODE_EXTEND@4$DISPATCH_VNODE_FUNLOCK@256$DISPATCH_VNODE_LINK@16$DISPATCH_VNODE_RENAME@32$DISPATCH_VNODE_REVOKE@64$DISPATCH_VNODE_WRITE@2$NSEC_PER_MSEC@1000000$NSEC_PER_SEC@1000000000$NSEC_PER_USEC@1000$USEC_PER_SEC@1000000$''' -misc.update({}) -functions={'dispatch_io_create_with_io': (b'@L@@@?', '', {'retval': {'already_retained': True}, 'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}, 1: {'type': 'i'}}}}}}), 'dispatch_source_get_data': (b'L@',), 'dispatch_data_create_concat': (b'@@@', '', {'retval': {'already_retained': True}}), 'dispatch_semaphore_create': (b'@l', '', {'retval': {'already_retained': True}}), 'dispatch_activate': (b'v@',), 'dispatch_io_set_interval': (b'v@QL',), 'dispatch_assert_queue_not': (b'v@', '', {'comment': 'XXX: V2 API'}), 'dispatch_barrier_async': (b'v@@?', '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}}}}}}), 'dispatch_after': (b'vQ@@?', '', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}}}}}}), 'dispatch_io_get_descriptor': (b'i@',), 'dispatch_source_set_timer': (b'v@QQQ',), 'dispatch_get_current_queue': (b'@',), 'dispatch_io_close': (b'v@L',), 'dispatch_source_get_mask': (b'L@',), 'dispatch_queue_attr_make_initially_inactive': (b'@@',), 'dispatch_block_create': (b'@?L@?', '', {'retval': {'callable': {'retval': {'type': 'v'}, 'arguments': {0: {'type': '^v'}}}, 'already_retained': True}, 'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}}}}}}), 'dispatch_block_perform': (b'vL@?', '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}}}}}}), 'dispatch_block_testcancel': (b'l@?', '', {'arguments': {0: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}}}}}}), 'dispatch_group_async': (b'v@@@?', '', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}}}}}}), 'dispatch_read': (b'viL@@?', '', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}, 1: {'comment': 'dispatch_data_t', 'type': '@'}, 2: {'type': 'i'}}}}}}), 'dispatch_assert_queue_barrier': (b'v@',), 'dispatch_once': (b'vN^l@?', '', {'arguments': {0: {'c_array_of_fixed_length': 1}, 1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}}}}}}), 'dispatch_source_set_registration_handler': (b'v@@?', '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}}}}}}), 'dispatch_once_f': (b'vN^l^v^?', '', {'arguments': {0: {'c_array_of_fixed_length': 1}, 2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}}}, 'callable_retained': False}}}), 'dispatch_get_context': (b'^v@',), 'dispatch_set_target_queue': (b'v@@',), 'dispatch_data_copy_region': (b'@@Lo^L', '', {'retval': {'already_retained': True}}), 'dispatch_notify': (b'v@?@@?', '', {'arguments': {0: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}}}}, 2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}}}}}}), 'dispatch_queue_set_specific': (b'v@^v^v^?', '', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}}}, 'callable_retained': True}}}), 'dispatch_io_write': (b'v@q@@@?', '', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}, 1: {'type': 'B'}, 2: {'type': '@'}, 3: {'type': 'i'}}}}}}), 'dispatch_barrier_async_f': (b'v@^v^?', '', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}}}, 'callable_retained': True}}}), 'dispatch_block_cancel': (b'v@?', '', {'arguments': {0: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}}}}}}), 'dispatch_group_async_f': (b'v@@^v^?', '', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}}}}}}), 'dispatch_apply_f': (b'vL@^v^?', '', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}, 1: {'type': 'L'}}}}}}), 'dispatch_set_context': (b'v@^v',), 'dispatch_queue_attr_make_with_qos_class': (b'@@Ii',), 'dispatch_suspend': (b'v@',), 'dispatch_set_finalizer_f': (b'v@^?', '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}}}, 'callable_retained': True}}}), 'dispatch_sync': (b'v@@?', '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}}}}}}), 'dispatch_block_notify': (b'v@?@@?', '', {'arguments': {0: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}}}}, 2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}}}}}}), 'dispatch_queue_create': (b'@^t@', '', {'arguments': {0: {'c_array_delimited_by_null': True, 'type_modifier': 'n'}}}), 'dispatch_group_wait': (b'l@Q',), 'dispatch_main': (b'v',), 'dispatch_barrier_sync': (b'v@@?', '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}}}}}}), 'dispatch_source_set_cancel_handler_f': (b'v@^?', '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}}}, 'callable_retained': True}}}), 'dispatch_group_create': (b'@', '', {'retval': {'already_retained': True}}), 'dispatch_source_set_event_handler_f': (b'v@^?', '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}}}, 'callable_retained': True}}}), 'dispatch_queue_get_specific': (b'^v@^v',), 'dispatch_async_f': (b'v@^v^?', '', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}}}, 'callable_retained': True}}}), 'dispatch_source_set_event_handler': (b'v@@?', '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}}}}}}), 'dispatch_time': (b'QQq',), 'dispatch_queue_get_label': (b'^t@', '', {'retval': {'c_array_delimited_by_null': True}}), 'dispatch_source_cancel': (b'v@',), 'dispatch_group_enter': (b'v@',), 'dispatch_io_create_with_path': (b'@L^tiS@@?', '', {'retval': {'already_retained': True}, 'arguments': {1: {'c_array_delimited_by_null': True, 'type_modifier': 'n'}, 5: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}, 1: {'type': 'i'}}}}}}), 'dispatch_testcancel': (b'l@',), 'dispatch_io_read': (b'v@qL@@?', '', {'arguments': {4: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}, 1: {'type': 'B'}, 2: {'type': '@'}, 3: {'type': 'i'}}}}}}), 'dispatch_source_testcancel': (b'l@',), 'dispatch_source_set_registration_handler_f': (b'v@^?', '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}}}, 'callable_retained': True}}}), 'dispatch_barrier_sync_f': (b'v@^v^?', '', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}}}, 'callable_retained': False}}}), 'dispatch_group_notify_f': (b'v@@^v^?', '', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}}}}}}), 'dispatch_data_create_subrange': (b'@@LL', '', {'retval': {'already_retained': True}}), 'dispatch_semaphore_signal': (b'l@',), 'dispatch_cancel': (b'v@',), 'dispatch_queue_get_qos_class': (b'I@^i', '', {'arguments': {1: {'type_modifier': 'o'}}}), 'dispatch_sync_f': (b'v@^v^?', '', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}}}, 'callable_retained': False}}}), 'dispatch_get_main_queue': (b'@',), 'dispatch_data_apply': (b'B@@?', '', {'arguments': {1: {'callable': {'retval': {'type': b'B'}, 'arguments': {0: {'type': '^v'}, 1: {'type': '@'}, 2: {'type': 'L'}, 3: {'type': 'n^v', 'c_array_length_in_arg': 4}, 4: {'type': 'L'}}}}}}), 'dispatch_write': (b'vi@@@?', '', {'arguments': {1: {'comment': 'dispatch_data_t'}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}, 1: {'comment': 'dispatch_data_t', 'type': '@'}, 2: {'type': 'i'}}}}}}), 'dispatch_io_barrier': (b'v@@?', '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}}}}}}), 'dispatch_get_global_queue': (b'@lL',), 'dispatch_walltime': (b'Qn^{timespec=ll}q',), 'dispatch_assert_queue': (b'v@',), 'dispatch_get_specific': (b'^v^v',), 'dispatch_after_f': (b'vQ@^v^?', '', {'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}}}, 'callable_retained': True}}}), 'dispatch_source_merge_data': (b'v@L',), 'dispatch_resume': (b'v@',), 'dispatch_semaphore_wait': (b'l@Q',), 'dispatch_io_create': (b'@Li@@?', '', {'retval': {'already_retained': True}, 'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}, 1: {'type': 'i'}}}}}}), 'dispatch_source_create': (b'@^{dispatch_source_type_s=}LL@', '', {'retval': {'already_retained': True}}), 'dispatch_wait': (b'l@Q',), 'dispatch_io_set_high_water': (b'v@L',), 'dispatch_data_create_map': (b'@@o^^vo^L', '', {'retval': {'already_retained': True}}), 'dispatch_async': (b'v@@?', '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}}}}}}), 'dispatch_group_notify': (b'v@@@?', '', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}}}}}}), 'dispatch_block_create_with_qos_class': (b'@?LIi@?', '', {'retval': {'callable': {'retval': {'type': 'v'}, 'arguments': {0: {'type': '^v'}}}, 'already_retained': True}, 'arguments': {3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}}}}}}), 'dispatch_io_set_low_water': (b'v@L',), 'dispatch_data_get_size': (b'L@',), 'dispatch_block_wait': (b'l@?Q', '', {'arguments': {0: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}}}}}}), 'dispatch_group_leave': (b'v@',), 'dispatch_source_get_handle': (b'L@',), 'dispatch_apply': (b'vL@@?', '', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}, 1: {'type': 'L'}}}}}}), 'dispatch_data_create': (b'@^vL@@?', '', {'retval': {'already_retained': True}, 'arguments': {0: {'type_modifier': 'n', 'c_array_length_in_arg': 1}, 3: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}}}}}}), 'dispatch_queue_attr_make_with_autorelease_frequency': (b'@@L',), 'dispatch_source_set_cancel_handler': (b'v@@?', '', {'arguments': {1: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': '^v'}}}}}}), 'dispatch_queue_create_with_target': (b'@^t@@', '', {'comment': 'XXX: V2 API', 'arguments': {0: {'c_array_delimited_by_null': True, 'type_modifier': 'n'}}})} -misc.update({'dispatch_source_t': objc.createOpaquePointerType('dispatch_source_t', b'^{dispatch_source_type_s=}')}) -expressions = {'DISPATCH_CURRENT_QUEUE_LABEL': 'None', 'DISPATCH_QUEUE_SERIAL': 'None', 'DISPATCH_QUEUE_SERIAL_WITH_AUTORELEASE_POOL': 'dispatch_queue_attr_make_with_autorelease_frequency(DISPATCH_QUEUE_SERIAL, DISPATCH_AUTORELEASE_FREQUENCY_WORK_ITEM)', 'DISPATCH_APPLY_AUTO': 'None', 'DISPATCH_QUEUE_SERIAL_INACTIVE': 'dispatch_queue_attr_make_initially_inactive(DISPATCH_QUEUE_SERIAL)', 'DISPATCH_DATA_DESTRUCTOR_DEFAULT': 'None', 'DISPATCH_QUEUE_CONCURRENT_INACTIVE': 'dispatch_queue_attr_make_initially_inactive(DISPATCH_QUEUE_CONCURRENT)', 'DISPATCH_QUEUE_CONCURRENT_WITH_AUTORELEASE_POOL': 'dispatch_queue_attr_make_with_autorelease_frequency(DISPATCH_QUEUE_CONCURRENT, DISPATCH_AUTORELEASE_FREQUENCY_WORK_ITEM)', 'DISPATCH_TARGET_QUEUE_DEFAULT': 'None'} - -# END OF FILE diff --git a/env/lib/python2.7/site-packages/libdispatch/_metadata.pyc b/env/lib/python2.7/site-packages/libdispatch/_metadata.pyc deleted file mode 100644 index bc34bcfe..00000000 Binary files a/env/lib/python2.7/site-packages/libdispatch/_metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/objc/__init__.py b/env/lib/python2.7/site-packages/objc/__init__.py deleted file mode 100644 index 8d01d687..00000000 --- a/env/lib/python2.7/site-packages/objc/__init__.py +++ /dev/null @@ -1,82 +0,0 @@ -""" -Python <-> Objective-C bridge (PyObjC) - -This module defines the core interfaces of the Python<->Objective-C bridge. -""" -import sys - -# Aliases for some common Objective-C constants -nil = None -YES = True -NO = False - -# Import the namespace from the _objc extension -def _update(g=globals()): - import objc._objc as _objc - for k in _objc.__dict__: - g.setdefault(k, getattr(_objc, k)) -_update() -del _update - -from objc._convenience import * -from objc._convenience_nsobject import * -from objc._convenience_nsdecimal import * -from objc._convenience_nsdata import * -from objc._convenience_nsdictionary import * -from objc._convenience_nsset import * -from objc._convenience_nsarray import * -from objc._convenience_nsstring import * -from objc._convenience_mapping import * -from objc._convenience_sequence import * - -from objc._bridgesupport import * - -from objc._dyld import * -from objc._protocols import * -from objc._descriptors import * -from objc._category import * -from objc._bridges import * -from objc._pythonify import * -from objc._locking import * -from objc._context import * -from objc._properties import * -from objc._lazyimport import * -from objc._compat import * -import objc._callable_docstr - -import objc._pycoder as _pycoder - -# Helper function for new-style metadata modules -def _resolve_name(name): - if '.' not in name: - raise ValueError(name) - - module, name = name.rsplit('.', 1) - m = __import__(module) - for k in module.split('.')[1:]: - m = getattr(m, k) - - return getattr(m, name) - - - -_NSAutoreleasePool = None -class autorelease_pool(object): - """ - A context manager that implements the same feature as - @synchronized statements in Objective-C. Locking can also - be done manually using the ``lock`` and ``unlock`` methods. - - The mutex for object ``anObject`` is represented by - ``objc.object_lock(anObject)``. - """ - def __init__(self): - global _NSAutoreleasePool - if _NSAutoreleasePool is None: - _NSAutoreleasePool = objc.lookUpClass('NSAutoreleasePool') - - def __enter__(self): - self._pool = _NSAutoreleasePool.alloc().init() - - def __exit__(self, type, value, tp): - del self._pool diff --git a/env/lib/python2.7/site-packages/objc/__init__.pyc b/env/lib/python2.7/site-packages/objc/__init__.pyc deleted file mode 100644 index 6926b9e8..00000000 Binary files a/env/lib/python2.7/site-packages/objc/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/objc/_bridges.py b/env/lib/python2.7/site-packages/objc/_bridges.py deleted file mode 100644 index 2209cad7..00000000 --- a/env/lib/python2.7/site-packages/objc/_bridges.py +++ /dev/null @@ -1,67 +0,0 @@ -from objc._objc import * -from objc import _objc -import struct -import sys -import datetime - -if sys.version_info[0] == 2: - import collections as collections_abc -else: - import collections.abc as collections_abc - -__all__ = [ 'registerListType', 'registerMappingType', 'registerSetType', 'registerDateType' ] - -def registerListType(type): - """ - Register 'type' as a list-like type that will be proxied - as an NSMutableArray subclass. - """ - if options._sequence_types is None: - options._sequence_types = () - - options._sequence_types += (type,) - -def registerMappingType(type): - """ - Register 'type' as a dictionary-like type that will be proxied - as an NSMutableDictionary subclass. - """ - if options._mapping_types is None: - options._mapping_types = () - - options._mapping_types += (type,) - -def registerSetType(type): - """ - Register 'type' as a set-like type that will be proxied - as an NSMutableSet subclass. - """ - if options._set_types is None: - options._set_types = () - - options._set_types += (type,) - -def registerDateType(type): - """ - Register 'type' as a date-like type that will be proxied - as an NSDate subclass. - """ - if options._date_types is None: - options._date_types = () - - options._date_types += (type,) - - -registerListType(collections_abc.Sequence) -registerListType(xrange if sys.version_info[0] == 2 else range) -registerMappingType(collections_abc.Mapping) -registerMappingType(dict) -registerSetType(set) -registerSetType(frozenset) -registerSetType(collections_abc.Set) -registerDateType(datetime.date) -registerDateType(datetime.datetime) - -if sys.version_info[0] == 2: - import UserDict - registerMappingType(UserDict.UserDict) diff --git a/env/lib/python2.7/site-packages/objc/_bridges.pyc b/env/lib/python2.7/site-packages/objc/_bridges.pyc deleted file mode 100644 index 803c1ef6..00000000 Binary files a/env/lib/python2.7/site-packages/objc/_bridges.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/objc/_bridgesupport.py b/env/lib/python2.7/site-packages/objc/_bridgesupport.py deleted file mode 100644 index 0d81bf62..00000000 --- a/env/lib/python2.7/site-packages/objc/_bridgesupport.py +++ /dev/null @@ -1,845 +0,0 @@ -""" -Backward compatibity with bridgesupport files -""" -__all__ = ('initFrameworkWrapper', 'parseBridgeSupport') - -import sys -import xml.etree.ElementTree as ET -import ctypes -import objc -import re -import warnings -import functools -import pkg_resources -import os - - -for method in (b'alloc', b'copy', b'copyWithZone:', b'mutableCopy', b'mutableCopyWithZone:'): - objc.registerMetaDataForSelector(b'NSObject', method, - { - 'retval': { 'already_retained': True }, - }) - - -# -# The rest of this file contains support for bridgesupport -# XML files. - -# NOTE: This search path only contains system locations to -# avoid accidently reiying on system-specific functionality. -BRIDGESUPPORT_DIRECTORIES = [ - '/System/Library/BridgeSupport', -] - -_SENTINEL=object() -_DEFAULT_SUGGESTION="don't use this method" -_BOOLEAN_ATTRIBUTES=[ - "already_retained", - "already_cfretained", - "c_array_length_in_result", - "c_array_delimited_by_null", - "c_array_of_variable_length", - "printf_format", - "free_result", -] - - -if sys.version_info[0] == 2: # pragma: no 3.x cover - def _as_bytes(value): - return value - - def _as_string(value): - return value - -else: # pragma: no 2.x cover - def _as_bytes(value): - if isinstance(value, bytes): - return value - return value.encode('ascii') - - def _as_string(value): - if isinstance(value, bytes): - return value.decode('ascii') - return value - -class _BridgeSupportParser (object): - """ - Parser for the bridge support file format. - - Instances of this class will not update the bridge state, - this makes it easier to test the class. - """ - TAG_MAP={} - - def __init__(self, xmldata, frameworkName): - self.frameworkName = frameworkName - - self.cftypes = [] - self.constants = [] - self.func_aliases = [] - self.functions = [] - self.informal_protocols = [] - self.meta = {} - self.opaque = [] - self.structs = [] - self.values = {} - - self.process_data(xmldata) - - def process_data(self, xmldata): - root = ET.fromstring(xmldata.strip()) - - if root.tag != 'signatures': - raise objc.error("invalid root node in bridgesupport file") - - for node in root: - method = getattr(self, 'do_%s'%(node.tag,), None) - if method is None: - continue - - method(node) - - def typestr2typestr(self, typestr): - typestr = _as_bytes(typestr) - - # As of macOS 10.13 metadata files may contain - # typestring that end with property specific data; - # first remove that junk. - if b',' in typestr: - typestr = typestr.split(b',', 1)[0] - - result = [] - for item in objc.splitSignature(typestr): - if item == objc._C_BOOL: - result.append(objc._C_NSBOOL) - - elif item == objc._C_NSBOOL: - result.append(objc._C_BOOL) - - elif item.startswith(objc._C_STRUCT_B) or item.startswith(objc._C_UNION_B): - # unions and structs have the same structure - start, stop = item[:1], item[-1:] - - name, fields = objc.splitStructSignature(objc._C_STRUCT_B + _as_bytes(item[1:-1]) + objc._C_STRUCT_E) - result.append(start) - if name is not None: - result.append(_as_bytes(name)) - result.append(b'=') - for nm, tp in fields: - if nm is not None: - result.append(b'"') - result.append(_as_bytes(nm)) - result.append(b'"') - - result.append(self.typestr2typestr(tp)) - result.append(stop) - - - elif item.startswith(objc._C_ARY_B): - m = re.match(br'^.(\d*)(.*).$', item) - result.append(objc._C_ARY_B) - result.append(m.group(1)) - result.append(self.typestr2typestr(m.group(2))) - result.append(objc._C_ARY_E) - - else: - result.append(item) - - return b''.join(result) - - - if sys.maxsize > 2**32: - def attribute_string(self, node, name, name64): - if name64 is not None: - value = node.get(name64) - if value is not None: - return value - - return node.get(name) - - else: - def attribute_string(self, node, name, name64): - return node.get(name) - - def attribute_bool(self, node, name, name64, dflt): - value = self.attribute_string(node, name, name64) - if value is None: - return dflt - - if value == "true": - return True - - return False - - def import_name(self, name): - module, field = name.rsplit('.', 1) - m = __import__(module) - try: - for nm in module.split('.')[1:]: - m = getattr(m, nm) - - return getattr(m, field) - - except AttributeError: - raise ImportError(name) - - - def xml_to_arg(self, node, is_method, is_arg): - argIdx = None - result = {} - - if is_arg and is_method: - argIdx = self.attribute_string(node, "index", None) - if argIdx is None: - return None, None - argIdx = int(argIdx) - - s = self.attribute_string(node, "type", "type64") - if s: - s = self.typestr2typestr(s) - result["type"] = s - - s = self.attribute_string(node, "type_modifier", None) - if s: - result["type_modifier"] = _as_bytes(s) - - s = self.attribute_string(node, "sel_of_type", "sel_of_type64") - if s: - result["sel_of_type"] = self.typestr2typestr(s) - - s = self.attribute_string(node, "c_array_of_fixed_length", None) - if s: - result["c_array_of_fixed_length"] = int(s) - - for attr in _BOOLEAN_ATTRIBUTES: - if attr == 'c_array_length_in_result' and not is_arg: - continue - s = self.attribute_bool(node, attr, None, False) - if s: - result[attr] = True - - s = self.attribute_bool(node, "null_accepted", None, True) - if not s: - result["null_accepted"] = False - - s = self.attribute_string(node, "c_array_length_in_arg", None) - if s: - if ',' in s: - start, stop = map(int, s.split(',')) - if is_method: - start += 2 - stop += 2 - - result["c_array_length_in_arg"] = (start, stop) - - else: - s = int(s) - if is_method: - s += 2 - - result["c_array_length_in_arg"] = s - - if self.attribute_bool(node, "function_pointer", None, False) \ - or self.attribute_bool(node, "block", None, False): - - v = self.attribute_bool(node, "function_pointer_retained", None, True) - result["callable_retained"] = v - - meta = result["callable"] = {} - arguments = meta["arguments"] = {} - idx = 0 - - if self.attribute_bool(node, "block", None, False): - # Blocks have an implicit first argument - arguments[idx] = { - "type": b"^v", - } - idx += 1 - - for al in node: - if al.tag == "arg": - _, d = self.xml_to_arg(al, False, False) - arguments[idx] = d - idx += 1 - - elif al.tag == "retval": - _, d = self.xml_to_arg(al, False, False) - meta["retval"] = d - - return argIdx, result - - def do_cftype(self, node): - name = self.attribute_string(node, "name", None) - typestr = self.attribute_string(node, "type", "type64") - funcname = self.attribute_string(node, "gettypeid_func", None) - tollfree = self.attribute_string(node, "tollfree", None) - - if not name or not typestr: - return - - typestr = self.typestr2typestr(typestr) - - if tollfree: - self.cftypes.append((name, typestr, None, tollfree)) - - else: - if funcname is None: - funcname = name[:-3] + 'GetTypeID' - - try: - dll = ctypes.CDLL(None) - gettypeid = getattr(dll, funcname) - gettypeid.restype = ctypes.c_long - except AttributeError: - self.cftypes.append((name, typestr, None, "NSCFType")) - return - - self.cftypes.append((name, typestr, gettypeid())) - - - def do_constant(self, node): - name = self.attribute_string(node, "name", None) - typestr = self.attribute_string(node, "type", "type64") - - if name is None or not typestr: - return - - typestr = self.typestr2typestr(typestr) - - if typestr.startswith(objc._C_STRUCT_B): - # Look for structs with embbeded function pointers - # and ignore those - - def has_embedded_function(typestr): - nm, fields = objc.splitStructSignature(_as_bytes(typestr)) - for nm, tp in fields: - if tp == b'?': - return True - elif tp == b'^?': - return True - elif tp.startswith(objc._C_STRUCT_B): - return has_embedded_function(tp) - - return False - - if has_embedded_function(typestr): - return - - magic = self.attribute_bool(node, "magic_cookie", None, False) - self.constants.append((name, typestr, magic)) - - - def do_class(self, node): - class_name = self.attribute_string(node, "name", None) - if not class_name: - return - - for method in node: - if method.tag != "method": - continue - - sel_name = self.attribute_string(method, "selector", None) - if sel_name is None: - continue - - sel_name = _as_bytes(sel_name) - variadic = self.attribute_bool( method, "variadic", None, False) - c_array = self.attribute_bool( method, "c_array_delimited_by_null", None, False) - c_length = self.attribute_string(method, "c_array_length_in_arg", None) - ignore = self.attribute_bool( method, "ignore", None, False) - - is_class = self.attribute_bool(method, "classmethod", None, _SENTINEL) - if is_class is _SENTINEL: - # Manpage says 'class_method', older PyObjC used 'classmethod' - is_class = self.attribute_bool(method, "class_method", None, False) - - - metadata = {} - if ignore: - suggestion = self.attribute_string(method, "suggestion", None) - if not suggestion: - suggestion = _DEFAULT_SUGGESTION - - metadata['suggestion'] = suggestion - - # Force minimal metadata for ignored methods - self.meta[(_as_bytes(class_name), _as_bytes(sel_name), is_class)] = metadata - continue - - if variadic: - metadata['variadic'] = True - if c_array: - metadata['c_array_delimited_by_null'] = c_array - - if c_length: - metadata['c_array_length_in_arg'] = int(c_length) + 2; - - arguments = metadata['arguments'] = {} - - for al in method: - if al.tag == "arg": - arg_idx, meta = self.xml_to_arg(al, True, True) - if arg_idx is not None and meta: - arguments[arg_idx+2] = meta - - elif al.tag == "retval": - _, meta = self.xml_to_arg(al, True, False) - if meta: - metadata['retval'] = meta - - if not arguments: - # No argument metadata after all. - del metadata['arguments'] - - if metadata: - self.meta[(_as_bytes(class_name), _as_bytes(sel_name), is_class)] = metadata - - - def do_enum(self, node): - name = self.attribute_string(node, "name", None) - value = self.attribute_string(node, "value", "value64") - - if value is None: - if sys.byteorder == 'little': - value = self.attribute_string(node, "le_value", None) - - else: - value = self.attribute_string(node, "be_value", None) - - if not name or not value: - return - - if value.lower() in ('+inf', '-inf', 'nan'): - value = float(value) - - elif '.' in value: - if value.endswith('f') or value.endswith('F'): - value = value[:-1] - if value.endswith('l') or value.endswith('L'): - value = value[:-1] - if value.startswith('0x') or value.startswith('0X'): - value = float.fromhex(value) - - else: - value = float(value) - - elif 'inf' in value: - value = float(value) - - else: - value = int(value, 10) - - self.values[name] = value - - - def do_function(self, node): - name = self.attribute_string(node, "name", None) - if not name: - return - - if self.attribute_bool(node, "ignore", None, False): - return - - meta = {} - siglist = [b"v"] - arguments = meta["arguments"] = {} - - variadic = self.attribute_bool(node, "variadic", None, False) - if variadic: - meta["variadic"] = True - v = self.attribute_bool(node, "c_array_delimited_by_null", None, False) - if v: - meta["c_array_delimited_by_null"] = True - - v = self.attribute_string(node, "c_array_length_in_arg", None) - if v: - meta["c_array_length_in_arg"] = int(v) - - for al in node: - if al.tag == "arg": - _, d = self.xml_to_arg(al, False, True) - if "type" not in d: - # Ignore functions without type info - return - siglist.append(d["type"]) - - arguments[len(siglist)-2] = d - - elif al.tag == "retval": - _, d = self.xml_to_arg(al, False, False) - if "type" not in d: - # Ignore functions without type info - return - siglist[0] = d["type"] - meta["retval"] = d - - if not meta['arguments']: - del meta['arguments'] - - self.functions.append((name, b"".join(siglist), "", meta)) - - - - def do_function_pointer(self, node): - name = self.attribute_string(node, "name", None) - original = self.attribute_string(node, "original", None) - if not name or not original: - return - - self.func_aliases.append((name, original)) - - def do_informal_protocol(self, node): - name = self.attribute_string(node, "name", None) - if not name: - return - - method_list = [] - for method in node: - sel_name = self.attribute_string(method, "selector", None) - typestr = self.attribute_string(method, "type", "type64") - is_class = self.attribute_bool(method, "classmethod", None, _SENTINEL) - if is_class is _SENTINEL: - # Manpage says 'class_method', older PyObjC used 'classmethod' - is_class = self.attribute_bool(method, "class_method", None, False) - - if not sel_name or not typestr: - continue - - typestr = self.typestr2typestr(typestr) - sel = objc.selector(None, selector=_as_bytes(sel_name), - signature=_as_bytes(typestr), isClassMethod=is_class) - method_list.append(sel) - - if method_list: - self.informal_protocols.append((name, method_list)) - - def do_null_const(self, node): - name = self.attribute_string(node, "name", None) - if not name: - return - - self.values[name] = None - - def do_opaque(self, node): - name = self.attribute_string(node, "name", None) - typestr = self.attribute_string(node, "type", "type64") - - if name is None or not typestr: - return - - typestr = self.typestr2typestr(typestr) - - self.opaque.append((name, typestr)) - - - def do_struct(self, node): - name = self.attribute_string(node, "name", None) - typestr = self.attribute_string(node, "type", "type64") - alias = self.attribute_string(node, "alias", None) - - if not name or not typestr: - return - - # Apple's bridgesupport files contain nice encoding like this: - # {tag="field"a"NSImage"}, that is not only are field names encoded - # but also class names. This is obviously completely undocumented, - # and not backward compatible (and it is not easily possible to detect - # if class names are present. - typestr = re.sub(r'@"[^"]*"', '@', typestr) - - typestr = self.typestr2typestr(typestr) - - if alias: - try: - value = self.import_name(alias) - except ImportError: - # Fall through to regular handling - pass - - else: - self.structs.append((name, typestr, value)) - return - - self.structs.append((name, typestr, None)) - - - def do_string_constant(self, node): - name = self.attribute_string(node, "name", None) - value = self.attribute_string(node, "value", "value64") - nsstring = self.attribute_bool(node, "nsstring", None, False) - - if not name or not value: - return - - if sys.version_info[0] == 2: # pragma: no 3.x cover - if nsstring: - if not isinstance(value, unicode): - value = value.decode('utf-8') - else: - if not isinstance(value, bytes): - try: - value = value.encode('latin1') - except UnicodeError as e: - warnings.warn("Error parsing BridgeSupport data for constant %s: %s" % (name, e), RuntimeWarning) - return - else: # pragma: no 2.x cover - if not nsstring: - try: - value = value.encode('latin1') - except UnicodeError as e: - warnings.warn("Error parsing BridgeSupport data for constant %s: %s" % (name, e), RuntimeWarning) - return - - self.values[name] = value - - -_libraries = [] - -def parseBridgeSupport(xmldata, globals, frameworkName, dylib_path=None, inlineTab=None): - - if dylib_path: - lib = ctypes.cdll.LoadLibrary(dylib_path) - _libraries.append(lib) - - objc._updatingMetadata(True) - try: - prs = _BridgeSupportParser(xmldata, frameworkName) - - globals.update(prs.values) - for entry in prs.cftypes: - tp = objc.registerCFSignature(*entry) - - globals[entry[0]] = tp - - for name, typestr in prs.opaque: - globals[name] = objc.createOpaquePointerType(name, typestr) - - for name, typestr, alias in prs.structs: - if alias is not None: - globals[name] = alias - objc.createStructAlias(name, typestr, alias) - else: - globals[name] = value = objc.createStructType(name, typestr, None) - - - for name, typestr, magic in prs.constants: - try: - value = objc._loadConstant(name, _as_string(typestr), magic) - except AttributeError: - continue - - globals[name] = value - - for class_name, sel_name, is_class in prs.meta: - objc.registerMetaDataForSelector(class_name, sel_name, prs.meta[(class_name, sel_name, is_class)]) - - if prs.functions: - objc.loadBundleFunctions(None, globals, prs.functions) - - if inlineTab is not None: - objc.loadFunctionList(inlineTab, globals, prs.functions) - - for name, orig in prs.func_aliases: - try: - globals[name] = globals[orig] - except KeyError: - pass - - finally: - objc._updatingMetadata(False) - - - - - -def _parseBridgeSupport(data, globals, frameworkName, *args, **kwds): - try: - objc.parseBridgeSupport(data, globals, frameworkName, *args, **kwds) - except objc.internal_error as e: - import warnings - warnings.warn("Error parsing BridgeSupport data for %s: %s" % (frameworkName, e), RuntimeWarning) - -def safe_resource_exists(package, resource): - try: - return pkg_resources.resource_exists(package, resource) - except ImportError: - # resource_exists raises ImportError when it cannot find - # the first argument. - return False - -def initFrameworkWrapper(frameworkName, - frameworkPath, frameworkIdentifier, globals, inlineTab=None, - scan_classes=None, frameworkResourceName=None): - """ - Load the named framework, using the identifier if that has result otherwise - using the path. Also loads the information in the bridgesupport file ( - either one embedded in the framework or one in a BrigeSupport library - directory). - """ - if frameworkResourceName is None: - frameworkResourceName = frameworkName - - if frameworkIdentifier is None: - if scan_classes is None: - bundle = objc.loadBundle( - frameworkName, - globals, - bundle_path=frameworkPath) - else: - bundle = objc.loadBundle( - frameworkName, - globals, - bundle_path=frameworkPath, - scan_classes=scan_classes) - - else: - try: - if scan_classes is None: - bundle = objc.loadBundle( - frameworkName, - globals, - bundle_identifier=frameworkIdentifier) - - else: - bundle = objc.loadBundle( - frameworkName, - globals, - bundle_identifier=frameworkIdentifier, - scan_classes=scan_classes) - - except ImportError: - if scan_classes is None: - bundle = objc.loadBundle( - frameworkName, - globals, - bundle_path=frameworkPath) - else: - bundle = objc.loadBundle( - frameworkName, - globals, - bundle_path=frameworkPath, - scan_classes=scan_classes) - - - # Make the objc module available, because it contains a lot of useful - # functionality. - globals['objc'] = objc - - # Explicitly push objc.super into the globals dict, that way super - # calls will behave as expected in all cases. - globals['super'] = objc.super - - # Look for metadata in the Python wrapper and prefer that over the - # data in the framework or in system locations. - # Needed because the system bridgesupport files are buggy. - if safe_resource_exists(frameworkResourceName, "PyObjC.bridgesupport"): - data = pkg_resources.resource_string(frameworkResourceName, - "PyObjC.bridgesupport") - _parseBridgeSupport(data, globals, frameworkName, inlineTab=inlineTab) - return bundle - - # Look for metadata in the framework bundle - path = bundle.pathForResource_ofType_inDirectory_(frameworkName, 'bridgesupport', 'BridgeSupport') - if path is not None: - dylib_path = bundle.pathForResource_ofType_inDirectory_(frameworkName, 'dylib', 'BridgeSupport') - with open(path, 'rb') as fp: - data = fp.read() - if dylib_path is not None: - _parseBridgeSupport(data, globals, frameworkName, dylib_path=dylib_path) - else: - _parseBridgeSupport(data, globals, frameworkName) - - # Check if we have additional metadata bundled with PyObjC - if safe_resource_exists(frameworkResourceName, "PyObjCOverrides.bridgesupport"): - data = pkg_resources.resource_string(frameworkResourceName, - "PyObjCOverrides.bridgesupport") - _parseBridgeSupport(data, globals, frameworkName, inlineTab=inlineTab) - - return bundle - - # If there is no metadata there look for metadata in the standard Library - # locations - fn = frameworkName + '.bridgesupport' - for dn in BRIDGESUPPORT_DIRECTORIES: - path = os.path.join(dn, fn) - if os.path.exists(path): - with open(path, 'rb') as fp: - data = fp.read() # pragma: no branch - - dylib_path = os.path.join(dn, frameworkName + '.dylib') - if os.path.exists(dylib_path): - _parseBridgeSupport(data, globals, frameworkName, dylib_path=dylib_path) - else: - _parseBridgeSupport(data, globals, frameworkName) - - # Check if we have additional metadata bundled with PyObjC - if safe_resource_exists(frameworkResourceName, "PyObjCOverrides.bridgesupport"): - data = pkg_resources.resource_string(frameworkResourceName, - "PyObjCOverrides.bridgesupport") - _parseBridgeSupport(data, globals, frameworkName, inlineTab=inlineTab) - - return bundle - - return bundle - -_ivar_dict = objc._objc._ivar_dict() -if hasattr(objc, '_ivar_dict'): - del objc._ivar_dict -def _structConvenience(structname, structencoding): - def makevar(self, name=None): - if name is None: - return objc.ivar(type=structencoding) - else: - return objc.ivar(name=name, type=structencoding) - makevar.__name__ = structname - makevar.__doc__ = "Create *ivar* for type encoding %r" % (structencoding,) - if hasattr(objc.ivar, '__qualname__'): - makevar.__qualname__ = objc.ivar.__qualname__ + "." + structname - _ivar_dict[structname] = classmethod(makevar) - - -# Fake it for basic C types -_structConvenience("bool", objc._C_BOOL) -_structConvenience("char", objc._C_CHR) -_structConvenience("int", objc._C_INT) -_structConvenience("short", objc._C_SHT) -_structConvenience("long", objc._C_LNG) -_structConvenience("long_long", objc._C_LNG_LNG) -_structConvenience("unsigned_char", objc._C_UCHR) -_structConvenience("unsigned_int", objc._C_UINT) -_structConvenience("unsigned_short", objc._C_USHT) -_structConvenience("unsigned_long", objc._C_ULNG) -_structConvenience("unsigned_long_long", objc._C_ULNG_LNG) -_structConvenience("float", objc._C_FLT) -_structConvenience("double", objc._C_DBL) -_structConvenience("BOOL", objc._C_NSBOOL) -_structConvenience("UniChar", objc._C_UNICHAR) -_structConvenience("char_text", objc._C_CHAR_AS_TEXT) -_structConvenience("char_int", objc._C_CHAR_AS_INT) - -_orig_createStructType = objc.createStructType - -@functools.wraps(objc.createStructType) -def createStructType(name, typestr, fieldnames, doc=None, pack=-1): - result = _orig_createStructType(name, typestr, fieldnames, doc, pack) - _structConvenience(name, result.__typestr__) - return result - -objc.createStructType = createStructType - - -_orig_registerStructAlias = objc.registerStructAlias -@functools.wraps(objc.registerStructAlias) -def registerStructAlias(typestr, structType): - # XXX: Disable deprecation warnings, this function is used by - # the framework wrappers. - #warnings.warn("use createStructAlias instead", DeprecationWarning) - return _orig_registerStructAlias(typestr, structType) - -def createStructAlias(name, typestr, structType): - result = _orig_registerStructAlias(typestr, structType) - _structConvenience(name, result.__typestr__) - return result - -objc.createStructAlias = createStructAlias -objc.registerStructAlias = registerStructAlias diff --git a/env/lib/python2.7/site-packages/objc/_bridgesupport.pyc b/env/lib/python2.7/site-packages/objc/_bridgesupport.pyc deleted file mode 100644 index ceb1718c..00000000 Binary files a/env/lib/python2.7/site-packages/objc/_bridgesupport.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/objc/_callable_docstr.py b/env/lib/python2.7/site-packages/objc/_callable_docstr.py deleted file mode 100644 index ed17fe1e..00000000 --- a/env/lib/python2.7/site-packages/objc/_callable_docstr.py +++ /dev/null @@ -1,256 +0,0 @@ -__all__ = () -import sys -import objc -from objc._objc import _nameForSignature - -basic_types = { - objc._C_VOID: "void", - objc._C_INT: "int", - objc._C_UINT: "unsigned int", - objc._C_LNG: "long", - objc._C_ULNG: "unsigned long", - objc._C_LNG_LNG: "long long", - objc._C_ULNG_LNG: "unsigned long long", - objc._C_FLT: "float", - objc._C_DBL: "double", - objc._C_SHT: "short", - objc._C_USHT: "unsigned short", - objc._C_CHR: "char", - objc._C_UCHR: "unsigned char", - objc._C_CHAR_AS_INT: "int8_t", - objc._C_CHAR_AS_TEXT: "char", - objc._C_UNICHAR: "UniChar", - objc._C_BOOL: "bool", - objc._C_NSBOOL: "BOOL", - objc._C_ID: "id", - objc._C_CLASS: "Class", - objc._C_SEL: "SEL", - objc._C_CHARPTR: "char*", -} - -prefixes = { - objc._C_IN: "in ", - objc._C_OUT: "out ", - objc._C_INOUT: "inout ", - objc._C_CONST: "const ", - objc._C_ONEWAY: "oneway ", -} - -def describe_type(typestr): - - nm = basic_types.get(typestr) - if nm is not None: - return nm - - if typestr == b"^?": - return "" - elif typestr == b"@?": - return "" - - if typestr.startswith(objc._C_PTR): - nm = _nameForSignature(typestr) - if nm is not None: - return nm - return describe_type(typestr[1:]) + '*' - - if typestr[:1] in prefixes: - return prefixes[typestr[:1]] + describe_type(typestr[1:]) - - if typestr.startswith(objc._C_STRUCT_B): - nm = _nameForSignature(typestr) - if nm is not None: - return nm - - typestr = typestr[1:] - idx = typestr.find(b'=') - if idx == -1: - return 'struct ' - - else: - nm = typestr[:idx] - if not nm: - nm = b'' - return 'struct %s'%(nm.decode('utf-8'),) - - - if typestr.startswith(objc._C_ARY_B): - typestr = typestr[1:] - d = b'' - while typestr[:1].isdigit(): - d += typestr[:1] - typestr = typestr[1:] - - return '%s[%s]' % (describe_type(typestr), d.decode('utf-8')) - - if typestr.startswith(objc._C_UNION_B): - typestr = typestr[1:] - idx = typestr.find(b'=') - if idx == -1: - return 'union ' - - else: - nm = typestr[:idx] - if not nm: - nm = b'' - return 'union %s'%(nm.decode('utf-8'),) - - return "" - -def describe_callable(callable): - name = callable.__name__ - try: - metadata = callable.__metadata__() - except objc.internal_error: - return None - - return describe_callable_metadata(name, metadata, ismethod=isinstance(callable, objc.selector)) - -def describe_callable_metadata(name, metadata, offset='', ismethod=False): - arg_info = [] - if ismethod: - arg_offset = 2 - - name_parts = name.split(':') - hdr_name = [] - if len(metadata['arguments']) > arg_offset: - for idx, (nm, info) in enumerate(zip(name_parts, metadata['arguments'][arg_offset:])): - if hdr_name: - hdr_name.append(' ') - hdr_name.append(nm) - hdr_name.append(':(') - hdr_name.append(describe_type(info['type'])) - hdr_name.append(')arg%d'%(idx,)) - - if info['type'][:1] in prefixes and info['type'][:1] not in (objc._C_ONEWAY, objc._C_CONST): - arg_info.append((idx, info)) - elif info.get('printf_format'): - arg_info.append((idx, info)) - elif info.get('callable'): - arg_info.append((idx, info)) - - else: - hdr_name.append(name) - - if metadata.get('variadic'): - hdr_name.append(", ...") - - header = "%s (%s)%s;"%( - "+" if metadata['classmethod'] else "-", - describe_type(metadata['retval']['type']), - ''.join(hdr_name)) - else: - hdr_name = [] - arg_offset = 0 - for idx, info in enumerate(metadata['arguments']): - if idx != 0: - hdr_name.append(', ') - hdr_name.append(describe_type(info['type'])) - hdr_name.append(' arg%d'%(idx,)) - if info['type'][:1] in prefixes and info['type'][:1] not in (objc._C_ONEWAY, objc._C_CONST): - arg_info.append((idx, info)) - elif info.get('printf_format'): - arg_info.append((idx, info)) - elif info.get('callable'): - arg_info.append((idx, info)) - if metadata.get('variadic'): - hdr_name.append(", ...") - - if not hdr_name: - hdr_name.append('void') - - header = "%s %s(%s);"%( - describe_type(metadata['retval']['type']), - name, - ''.join(hdr_name)) - - result = [header] - if metadata.get('suggestion'): - result.append('') - result.append('WARNING: %s'%(metadata['suggestion'],)) - - if arg_info: - result.append('') - for idx, info in arg_info: - if info.get('printf_format'): - result.append('arg%d: %%-style format string'%(idx,)) - continue - - elif info.get('callable'): - result.append('arg%d: %s'%(idx, describe_callable_metadata('callback', info['callable'], offset=' ' + offset))) - continue - - else: - arg = info.get('c_array_length_in_arg') - if arg is not None: - if isinstance(arg, tuple): - result.append('arg%d: array with length on input in arg%d, and output in arg%d'%(idx, arg[0] - arg_offset, arg[1] - arg_offset)) - else: - if info.get('c_array_length_in_result'): - result.append('arg%d: array with length on input in arg%d, and output in return value'%(idx, arg - arg_offset)) - else: - result.append('arg%d: array with length in arg%d'%(idx, arg - arg_offset)) - continue - - if info.get('c_array_length_in_result'): - result.append('arg%d: array with length in return value'%(idx,)) - continue - - if info.get('c_array_of_fixed_length'): - result.append('arg%d: array with length %d'%(idx, info.get('c_array_of_fixed_length'))) - continue - - if info.get('c_array_of_variable_length'): - result.append('arg%d: array with unknown length'%(idx,)) - continue - - if info.get('c_array_delimited_by_null'): - result.append('arg%d: array (will be NULL terminated in C)'%(idx,)) - continue - - result.append('arg%d: pass-by-reference %sargument'%(idx, prefixes.get(info['type'][:1]),)) - - if len(metadata['arguments']) > arg_offset: - if metadata.get('variadic') and metadata.get('c_array_delimited_by_null'): - if not arg_info: - result.append('') - - result.append('Variadic arguments form an array of C type %s'%(describe_type(metadata['arguments'][-1]['type']),)) - - return ('\n'+offset).join(result).replace('\n' + offset + '\n', '\n\n') - -objc.options._callable_doc = describe_callable - -if hasattr(objc.options, '_callable_signature'): # pragma: no branch; pragma: no 2.x cover - import inspect - - def callable_signature(callable): - # Create an inspect.Signature for an PyObjC callable - # both objc.function and objc.native_selector only support positional - # arguments, and not keyword arguments. - try: - metadata = callable.__metadata__() - except objc.internal_error: - # This can happen with some private methods with undocumented - # characters in type encodings - return None - - ismethod = isinstance(callable, objc.selector) - - if ismethod: - args = metadata['arguments'][2:] # Skip 'self' and 'selector' implicit arguments - else: - args = metadata['arguments'] - - parameters = [] - for idx, arg in enumerate(args): - p_name = 'arg%d'%(idx,) - parameters.append( - inspect.Parameter( - p_name, - inspect.Parameter.POSITIONAL_ONLY - ) - ) - - return inspect.Signature(parameters) - - objc.options._callable_signature = callable_signature diff --git a/env/lib/python2.7/site-packages/objc/_callable_docstr.pyc b/env/lib/python2.7/site-packages/objc/_callable_docstr.pyc deleted file mode 100644 index 24d6b125..00000000 Binary files a/env/lib/python2.7/site-packages/objc/_callable_docstr.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/objc/_category.py b/env/lib/python2.7/site-packages/objc/_category.py deleted file mode 100644 index 1d120104..00000000 --- a/env/lib/python2.7/site-packages/objc/_category.py +++ /dev/null @@ -1,73 +0,0 @@ -__all__ = ['classAddMethod', 'Category'] - -from objc._objc import selector, classAddMethods, objc_class, ivar - -from types import FunctionType, MethodType - -def classAddMethod(cls, name, method): - """ - Add a single method to a class. 'name' is the ObjC selector - """ - if isinstance(method, selector): - sel = selector(method.callable, - selector=name, - signature=method.signature, - isClassMethod=method.isClassMethod) - else: - sel = selector(method, selector=name) - - return classAddMethods(cls, [sel]) - - -# -# Syntactic support for categories -# - -class _CategoryMeta(type): - """ - Meta class for categories. - """ - __slots__ = () - _IGNORENAMES = ('__module__', '__name__', '__doc__') - def _newSubclass(cls, name, bases, methods): - return type.__new__(cls, name, bases, methods) - _newSubclass = classmethod(_newSubclass) - - def __new__(cls, name, bases, methods): - if len(bases) != 1: - raise TypeError("Cannot have multiple inheritance with Categories") - - c = bases[0].real_class - - if c.__name__ != name: - raise TypeError("Category name must be same as class name") - - - m = [ x[1] for x in methods.items() if x[0] not in cls._IGNORENAMES and isinstance(x[1], (FunctionType, MethodType, selector, classmethod))] - vars = [ x for x in methods.items() if x[0] not in cls._IGNORENAMES and not isinstance(x[1], (FunctionType, MethodType, selector, classmethod))] - for k, v in vars: - if isinstance(v, ivar): - raise TypeError("Cannot add instance variables in a Category") - - classAddMethods(c, m) - for k, v in vars: - setattr(c, k, v) - return c - -def Category(cls): - """ - Create a category on ``cls``. - - Usage: - class SomeClass (Category(SomeClass)): - def method(self): - pass - - ``SomeClass`` is an existing class that will be rebound to the same - value. The side-effect of this class definition is that the methods - in the class definition will be added to the existing class. - """ - if not isinstance(cls, objc_class): - raise TypeError("Category can only be used on Objective-C classes") - retval = _CategoryMeta._newSubclass('Category', (), dict(real_class=cls)) - return retval diff --git a/env/lib/python2.7/site-packages/objc/_category.pyc b/env/lib/python2.7/site-packages/objc/_category.pyc deleted file mode 100644 index 3dcfc3c8..00000000 Binary files a/env/lib/python2.7/site-packages/objc/_category.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/objc/_compat.py b/env/lib/python2.7/site-packages/objc/_compat.py deleted file mode 100644 index 8b6514a2..00000000 --- a/env/lib/python2.7/site-packages/objc/_compat.py +++ /dev/null @@ -1,35 +0,0 @@ -from objc import options as _options -import warnings - - -def setVerbose(value): - warnings.warn("Set objc.options.verbose instead", DeprecationWarning) - _options.verbose = bool(value) - - -def getVerbose(): - warnings.warn("Read objc.options.verbose instead", DeprecationWarning) - return _options.verbose - - -def setUseKVOForSetattr(value): - warnings.warn("Set objc.options.use_kvo instead", DeprecationWarning, 2) - _options.use_kvo = bool(value) - - -def getUseKVOForSetattr(): - warnings.warn("Read objc.options.use_kvo instead", DeprecationWarning) - return _options.use_kvo - - -if hasattr(_options, "strbridge_enabled"): # pragma: no 3.x cover; pragma: no branch - - def setStrBridgeEnabled(value): - warnings.warn("Set objc.options.strbridge_enabled instead", DeprecationWarning) - _options.strbridge_enabled = bool(value) - - - def getStrBridgeEnabled(): - warnings.warn("Read objc.options.strbridge_enabled instead", DeprecationWarning) - return _options.strbridge_enabled - diff --git a/env/lib/python2.7/site-packages/objc/_compat.pyc b/env/lib/python2.7/site-packages/objc/_compat.pyc deleted file mode 100644 index 9d8e054e..00000000 Binary files a/env/lib/python2.7/site-packages/objc/_compat.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/objc/_context.py b/env/lib/python2.7/site-packages/objc/_context.py deleted file mode 100644 index 7c1e68cb..00000000 --- a/env/lib/python2.7/site-packages/objc/_context.py +++ /dev/null @@ -1,49 +0,0 @@ -""" -A number of Cocoa API's have a 'context' argument that is a plain 'void*' -in ObjC, and an Integer value in Python. The 'context' object defined here -allows you to get a unique integer number that can be used as the context -argument for any Python object, and retrieve that object later on using the -context number. - -Usage:: - - ... - ctx = objc.context.register(myContext) - someObject.observeValueForKeyPath_ofObject_change_context_( - kp, obj, {}, ctx) - ... - -and in the callback:: - - def observeValueForKeyPath_ofObject_change_context_(self, - kp, obj, change, ctx): - - myContext = objc.context.get(ctx) - ... - -Use ``objc.context.unregister`` to remove the registration of ``myObject`` -when you're done. The argument to unregister is the same object as was -passed in during registration. -""" - -__all__ = ('context',) - -class ContextRegistry (object): - def __init__(self): - self._registry = {} - - def register(self, object): - uniq = id(object) - self._registry[uniq] = object - return uniq - - def unregister(self, object): - try: - del self._registry[id(object)] - except KeyError: - pass - - def get(self, uniq): - return self._registry[uniq] - -context = ContextRegistry() diff --git a/env/lib/python2.7/site-packages/objc/_context.pyc b/env/lib/python2.7/site-packages/objc/_context.pyc deleted file mode 100644 index 5d4a76aa..00000000 Binary files a/env/lib/python2.7/site-packages/objc/_context.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/objc/_convenience.py b/env/lib/python2.7/site-packages/objc/_convenience.py deleted file mode 100644 index a7859e0f..00000000 --- a/env/lib/python2.7/site-packages/objc/_convenience.py +++ /dev/null @@ -1,139 +0,0 @@ -""" -This module implements a callback function that is used by the C code to -add Python special methods to Objective-C classes with a suitable interface. -""" -from objc._objc import selector, lookUpClass, currentBundle, repythonify, splitSignature, _block_call, options -from objc._objc import registerMetaDataForSelector, _updatingMetadata, _rescanClass -import sys -import warnings -import collections - -__all__ = ( 'addConvenienceForClass', 'registerABCForClass') - -CLASS_METHODS = {} -CLASS_ABC = {} - -def register(f): - options._class_extender = f - -# XXX: interface is too wide (super_class is not needed, can pass actual class) -@register -def add_convenience_methods(cls, type_dict): - """ - Add additional methods to the type-dict of subclass 'name' of - 'super_class'. - - CLASS_METHODS is a global variable containing a mapping from - class name to a list of Python method names and implementation. - - Matching entries from both mappings are added to the 'type_dict'. - """ - for nm, value in CLASS_METHODS.get(cls.__name__, ()): - type_dict[nm] = value - - try: - for cls in CLASS_ABC[cls.__name__]: - cls.register(cls) - del CLASS_ABC[cls.__name__] - except KeyError: - pass - -def register(f): - options._make_bundleForClass = f - -@register -def makeBundleForClass(): - cb = currentBundle() - def bundleForClass(cls): - return cb - return selector(bundleForClass, isClassMethod=True) - -def registerABCForClass(classname, *abc_class): - """ - Register *classname* with the *abc_class*-es when - the class becomes available. - """ - try: - CLASS_ABC += tuple(abc_class) - except KeyError: - CLASS_ABC = tuple(abc_class) - - options._mapping_count += 1 - _rescanClass(classname) - - -def addConvenienceForClass(classname, methods): - """ - Add the list with methods to the class with the specified name - """ - try: - CLASS_METHODS[classname] += tuple(methods) - except KeyError: - CLASS_METHODS[classname] = tuple(methods) - - options._mapping_count += 1 - _rescanClass(classname) - - -# -# Helper functions for converting data item to/from a representation -# that is usable inside Cocoa data structures. -# -# In particular: -# -# - Python "None" is stored as +[NSNull null] because Cocoa containers -# won't store NULL as a value (and this transformation is undone when -# retrieving data) -# -# - When a getter returns NULL in Cocoa the queried value is not present, -# that's converted to an exception in Python. -# - -_NULL = lookUpClass('NSNull').null() - -def container_wrap(v): - if v is None: - return _NULL - return v - -def container_unwrap(v, exc_type, *exc_args): - if v is None: - raise exc_type(*exc_args) - elif v is _NULL: - return None - return v - -# -# -# Misc. small helpers -# -# - -if sys.version_info[0] == 2: # pragma: no 3.x cover - addConvenienceForClass('NSNull', ( - ('__nonzero__', lambda self: False ), - )) - - addConvenienceForClass('NSEnumerator', ( - ('__iter__', lambda self: self), - ('next', lambda self: container_unwrap(self.nextObject(), StopIteration)), - )) - -else: # pragma: no 2.x cover - addConvenienceForClass('NSNull', ( - ('__bool__', lambda self: False ), - )) - - addConvenienceForClass('NSEnumerator', ( - ('__iter__', lambda self: self), - ('__next__', lambda self: container_unwrap(self.nextObject(), StopIteration)), - )) - - -def __call__(self, *args, **kwds): - return _block_call(self, self.__block_signature__, args, kwds) - - -addConvenienceForClass('NSBlock', ( - ('__call__', __call__), -)) diff --git a/env/lib/python2.7/site-packages/objc/_convenience.pyc b/env/lib/python2.7/site-packages/objc/_convenience.pyc deleted file mode 100644 index dfa374da..00000000 Binary files a/env/lib/python2.7/site-packages/objc/_convenience.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/objc/_convenience_mapping.py b/env/lib/python2.7/site-packages/objc/_convenience_mapping.py deleted file mode 100644 index 50b59297..00000000 --- a/env/lib/python2.7/site-packages/objc/_convenience_mapping.py +++ /dev/null @@ -1,134 +0,0 @@ -""" -Convenience methods for Cocoa mapping types. -""" -__all__ = ('addConvenienceForBasicMapping',) - -from objc._objc import selector -from objc._convenience import addConvenienceForClass, CLASS_ABC -from objc._convenience import container_unwrap, container_wrap -import sys - -if sys.version_info[0] == 2: - import collections as collections_abc -else: - import collections.abc as collections_abc - -def __getitem__objectForKey_(self, key): - res = self.objectForKey_(container_wrap(key)) - return container_unwrap(res, KeyError, key) - - -def get_objectForKey_(self, key, dflt=None): - res = self.objectForKey_(container_wrap(key)) - if res is None: - res = dflt - return res - - -def contains_objectForKey_(self, key): - res = self.objectForKey_(container_wrap(key)) - return res is not None - - -_CONVENIENCES_MAPPING_RO = ( - ('__getitem__', __getitem__objectForKey_), - ('get', get_objectForKey_), - ('__contains__', contains_objectForKey_), -) - -if sys.version_info[0] == 2: # pragma: no 3.x cover; pragma: no branch - _CONVENIENCES_MAPPING_RO += ( - ('has_key', contains_objectForKey_), - ) - - -def __delitem__removeObjectForKey_(self, key): - self.removeObjectForKey_(container_wrap(key)) - -def update_setObject_forKey_(self, *args, **kwds): - # XXX - should this be more flexible? - if len(args) == 0: - pass - elif len(args) != 1: - raise TypeError("update expected at most 1 arguments, got {0}".format( - len(args))) - - else: - other = args[0] - if hasattr(other, 'keys'): - # This mirrors the implementation of dict.update, but seems - # wrong for Python3 (with collectons.abc.Dict) - for key in other.keys(): - self[key] = other[key] - - else: - for key, value in other: - self[key] = value - - for k in kwds: - self[k] = kwds[k] - - -def setdefault_setObject_forKey_(self, key, dflt=None): - try: - return self[key] - except KeyError: - self[key] = dflt - return dflt - - -def __setitem__setObject_forKey_(self, key, value): - self.setObject_forKey_(container_wrap(value), container_wrap(key)) - - -_pop_setObject_dflt=object() -def pop_setObject_forKey_(self, key, dflt=_pop_setObject_dflt): - try: - res = self[key] - except KeyError: - if dflt == _pop_setObject_dflt: - raise KeyError(key) - res = dflt - else: - del self[key] - return res - - -def popitem_setObject_forKey_(self): - try: - it = self.keyEnumerator() - k = container_unwrap(it.nextObject(), StopIteration) - except (StopIteration, IndexError): - raise KeyError("popitem on an empty %s" % (type(self).__name__,)) - else: - result = (k, container_unwrap(self.objectForKey_(k), KeyError)) - self.removeObjectForKey_(k) - return result - - -_CONVENIENCES_MAPPING_RW = _CONVENIENCES_MAPPING_RO + ( - ('__delitem__', __delitem__removeObjectForKey_), - ('__setitem__', __setitem__setObject_forKey_), - ('update', update_setObject_forKey_), - ('setdefault', setdefault_setObject_forKey_), - ('pop', pop_setObject_forKey_), - ('popitem', popitem_setObject_forKey_), -) - - -def addConvenienceForBasicMapping(classname, readonly=True): - """ - Add the convience methods for a Cocoa mapping type - - Used to add the basic collections.abc.Mapping or collections.abc.MutableMapping - APIs to a Cocoa class that has an API simular to NSDictionary. - """ - addConvenienceForClass(classname, - _CONVENIENCES_MAPPING_RO if readonly else _CONVENIENCES_MAPPING_RW) - - try: - lst = CLASS_ABC[classname] - except KeyError: - lst = CLASS_ABC[classname] = [] - - lst.append(collections_abc.Mapping if readonly else collections_abc.MutableMapping) diff --git a/env/lib/python2.7/site-packages/objc/_convenience_mapping.pyc b/env/lib/python2.7/site-packages/objc/_convenience_mapping.pyc deleted file mode 100644 index 5b012d40..00000000 Binary files a/env/lib/python2.7/site-packages/objc/_convenience_mapping.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/objc/_convenience_nsarray.py b/env/lib/python2.7/site-packages/objc/_convenience_nsarray.py deleted file mode 100644 index f416b35c..00000000 --- a/env/lib/python2.7/site-packages/objc/_convenience_nsarray.py +++ /dev/null @@ -1,444 +0,0 @@ -""" -Convenience interface for NSArray/NSMutableArray -""" -__all__ = () - -from objc._convenience import addConvenienceForClass, container_wrap, container_unwrap -from objc._objc import lookUpClass, registerMetaDataForSelector, _C_NSInteger, _C_ID -from objc._objc import _NSNotFound as NSNotFound - -import sys - -if sys.version_info[0] == 2: - import collections as collections_abc -else: - import collections.abc as collections_abc - -NSArray = lookUpClass('NSArray') -NSMutableArray = lookUpClass('NSMutableArray') - -collections_abc.Sequence.register(NSArray) -collections_abc.MutableSequence.register(NSMutableArray) - -if sys.version_info[0] == 2: # pragma: no 3.x cover - INT_TYPES = (int, long) - STR_TYPES = (str, unicode) - -else: # pragma: no 2.x cover - INT_TYPES = int - STR_TYPES = str - - -registerMetaDataForSelector( - b"NSObject", b"sortUsingFunction:context:", - dict( - arguments={ - 2: { - 'callable': { - 'retval': { 'type': _C_NSInteger }, - 'arguments': { - 0: { 'type': _C_ID }, - 1: { 'type': _C_ID }, - 2: { 'type': _C_ID }, - } - }, - 'callable_retained': False, - }, - 3: { 'type': _C_ID }, - }, - ) -) - - -def _ensure_array(anArray): - """ Return *anArray* as a list, tuple or NSArray """ - if not isinstance(anArray, (NSArray, list, tuple)): - anArray = list(anArray) - return anArray - - -def nsarray_reverse(self): - """ Reverse an array """ - begin = 0 - end = len(self) - 1 - while begin < end: - self.exchangeObjectAtIndex_withObjectAtIndex_(begin, end) - begin += 1 - end -= 1 - - -def nsarray_extend(self, anArray): - for item in anArray: - self.addObject_(container_wrap(item)) - - -_index_sentinel=object() -def nsarray_index(self, item, start=0, stop=_index_sentinel): - if start == 0 and stop is _index_sentinel: - res = self.indexOfObject_(container_wrap(item)) - if res == NSNotFound: - raise ValueError("%s.index(x): x not in list" % (type(self).__name__,)) - - else: - itemcount = self.count() - if start < 0: - start = itemcount + start - if start < 0: - start = 0 - - if stop is not _index_sentinel: - if stop < 0: - stop = itemcount + stop - if stop < 0: - stop = 0 - else: - stop = itemcount - - if itemcount == 0: - raise ValueError("%s.index(x): x not in list" % (type(self).__name__,)) - - if start >= itemcount: - raise ValueError("%s.index(x): x not in list" % (type(self).__name__,)) - - if stop >= itemcount: - stop = itemcount - 1 - - if stop <= start: - ln = 0 - - else: - ln = stop - start - - if ln == 0: - raise ValueError("%s.index(x): x not in list" % (type(self).__name__,)) - - if ln > sys.maxsize: # pragma: no cover - ln = sys.maxsize - - res = self.indexOfObject_inRange_(item, (start, ln)) - if res == NSNotFound: - raise ValueError("%s.index(x): x not in list" % (type(self).__name__,)) - - return res - - -def nsarray_insert(self, idx, item): - if idx < 0: - idx += self.count() - if idx < 0: - idx = 0 - self.insertObject_atIndex_(container_wrap(item), idx) - - -def nsarray__getitem__(self, idx): - if isinstance(idx, slice): - start, stop, step = idx.indices(len(self)) - return [self[i] for i in range(start, stop, step)] - - elif not isinstance(idx, INT_TYPES): - raise TypeError("index must be a number") - - if idx < 0: - idx += len(self) - if idx < 0: - raise IndexError("list index out of range") - - return container_unwrap(self.objectAtIndex_(idx), RuntimeError) - - -def nsarray__delitem__(self, idx): - if isinstance(idx, slice): - start, stop, step = idx.indices(self.count()) - if step == 1: - if start > stop: - # Nothing to remove - return - - return self.removeObjectsInRange_((start, stop-start)) - - r = reversed(range(start, stop, step)) - for i in r: - self.removeObjectAtIndex_(i) - return - - if idx < 0: - idx += self.count() - if idx < 0: - raise IndexError("list index out of range") - - self.removeObjectAtIndex_(idx) - - -def nsarray_pop(self, idx=-1): - length = self.count() - if length <= 0: - raise IndexError("pop from empty list") - - elif idx >= length or (idx + length) < 0: - raise IndexError("pop index out of range") - - elif idx < 0: - idx += len(self) - - rval = self.objectAtIndex_(idx) - self.removeObjectAtIndex_(idx) - return rval - - -def nsarray_remove(self, obj): - idx = self.indexOfObject_(obj) - if idx == NSNotFound: - raise ValueError("%s.remove(x): x not in list" % (type(self).__name__,)) - self.removeObjectAtIndex_(idx) - - -index_error_message = 'index is not an integer' -if sys.version_info[:2] >= (3, 5): - index_error_message='list indices must be integers or slices' - -def nsarray__setitem__(self, idx, anObject): - if isinstance(idx, slice): - start, stop, step = idx.indices(self.count()) - if step >=0: - if stop <= start: - # Empty slice: insert values - stop = start - - anObject = _ensure_array(anObject) - - if step == 1: - return self.replaceObjectsInRange_withObjectsFromArray_((start, stop - start), anObject) - - slice_len = len(range(start, stop, step)) - if slice_len != len(anObject): - raise ValueError("Replacing extended slice with %d elements by %d elements"%( - slice_len, len(anObject))) - - if step > 0: - # NOTE: 'anObject' cannot be 'self' because assigning to an extended - # slice cannot change the size of 'self' and slep 1 is handled earlier. - toAssign = anObject - - for inIdx, outIdx in enumerate(range(start, stop, step)): - self.replaceObjectAtIndex_withObject_(outIdx, toAssign[inIdx]) - - # slice.indexes already catches this: - #elif step == 0: - # raise ValueError("Step 0") - - else: - if anObject is self: - toAssign = list(anObject) - else: - toAssign = anObject - - for inIdx, outIdx in enumerate(range(start, stop, step)): - self.replaceObjectAtIndex_withObject_(outIdx, toAssign[inIdx]) - - elif not isinstance(idx, INT_TYPES): - raise TypeError(index_error_message) - - else: - if idx < 0: - idx += self.count() - if idx < 0: - raise IndexError("list index out of range") - - self.replaceObjectAtIndex_withObject_(idx, anObject) - - -def nsarray_add(self, other): - result = NSMutableArray(self) - result.addObjectsFromArray_(_ensure_array(other)) - return result - - -def nsarray_radd(self, other): - result = NSMutableArray(other) - result.addObjectsFromArray_(self) - return result - - -def nsarray_mul(self, other): - """ - This tries to implement anNSArray * N - somewhat efficently (and definitely more - efficient that repeated appending). - """ - result = NSMutableArray.array() - - if other <= 0: - return result - - n = 1 - tmp = self - while other: - if (other & n) != 0: - result.addObjectsFromArray_(tmp) - other -= n - - if other: - n <<= 1 - tmp = tmp.arrayByAddingObjectsFromArray_(tmp) - - return result - - -def nsarray_new(cls, sequence=None): - if not sequence: - return NSArray.array() - - elif isinstance(sequence, STR_TYPES): - return NSArray.arrayWithArray_(list(sequence)) - - else: - if not isinstance(sequence, (list, tuple)): - return NSArray.arrayWithArray_(list(sequence)) - - return NSArray.arrayWithArray_(sequence) - - -def nsmutablearray_new(cls, sequence=None): - if not sequence: - return NSMutableArray.array() - - elif isinstance(sequence, STR_TYPES): - return NSMutableArray.arrayWithArray_(list(sequence)) - - else: - if type(sequence) not in (list, tuple): - return NSMutableArray.arrayWithArray_(list(sequence)) - - # This is only valid when ``sequence`` is an built-in list or tuple, - # otherwise arrayWithArray might access the sequence differently - # then expected from a Python sequence initializer. - return NSMutableArray.arrayWithArray_(sequence) - - -def nsarray__contains__(self, elem): - return bool(self.containsObject_(container_wrap(elem))) - - -def nsarray_append(self, anObject): - self.addObject_(container_wrap(anObject)) - - -def nsarray_clear(self): - self.removeAllObjects() - - -if sys.version_info[0] == 2: # pragma: no 3.x cover - def nsarray_sort(self, cmp=cmp, key=None, reverse=False): - if key is None: - if reverse: - def sort_func(a, b, cmp): - return -cmp(a, b) - - else: - def sort_func(a, b, cmp): - return cmp(a, b) - else: - if reverse: - def sort_func(a, b, cmp): - return -cmp(key(a), key(b)) - else: - def sort_func(a, b, cmp): - return cmp(key(a), key(b)) - - self.sortUsingFunction_context_(sort_func, cmp) - -else: # pragma: no 2.x cover - def nsarray_sort(self, key=lambda x: x, reverse=False): - if reverse: - def sort_func(a, b, _): - a = key(a) - b = key(b) - if a < b: - return 1 - - elif b < a: - return -1 - - else: - return 0 - - else: - def sort_func(a, b, _): - a = key(a) - b = key(b) - - if a < b: - return -1 - elif b < a: - return 1 - else: - return 0 - - self.sortUsingFunction_context_(sort_func, None) - - -def nsarray__len__(self): - return self.count() - -# NOTE: 'no cover' because call of the system array -# classes are subclasses of NSMutableArray. -def nsarray__copy__(self): # pragma: no cover - return self.copy() - -def nsarray__iter__(self): - return iter(self.objectEnumerator()) - -addConvenienceForClass('NSArray', ( - ('__new__', staticmethod(nsarray_new)), - ('__add__', nsarray_add), - ('__radd__', nsarray_radd), - ('__mul__', nsarray_mul), - ('__rmul__', nsarray_mul), - ('__len__', nsarray__len__), - ('__contains__', nsarray__contains__), - ('__getitem__', nsarray__getitem__), - ('__copy__', nsarray__copy__), - ('__iter__', nsarray__iter__), - ('index', nsarray_index), - ('remove', nsarray_remove), - ('pop', nsarray_pop), -)) - -def nsmutablearray__copy__(self): - return self.mutableCopy() - -addConvenienceForClass('NSMutableArray', ( - ('__new__', staticmethod(nsmutablearray_new)), - ('__copy__', nsmutablearray__copy__), - ('__setitem__', nsarray__setitem__), - ('__delitem__', nsarray__delitem__), - ('extend', nsarray_extend), - ('append', nsarray_append), - ('sort', nsarray_sort), - ('insert', nsarray_insert), - ('reverse', nsarray_reverse), - ('clear', nsarray_clear), -)) - - -if sys.version_info[0] == 2: # pragma: no 3.x cover; pragma: no branch - def nsarray__getslice__(self, i, j): - i = max(i, 0); j = max(j, 0) - return nsarray__getitem__(self, slice(i, j)) - - def nsarray__setslice__(self, i, j, seq): - i = max(i, 0) - j = max(j, 0) - nsarray__setitem__(self, slice(i, j), seq) - - def nsarray__delslice__(self, i, j): - nsarray__delitem__(self, slice(i, j)) - - addConvenienceForClass('NSArray', ( - ('__getslice__', nsarray__getslice__), - )) - - addConvenienceForClass('NSMutableArray', ( - ('__setslice__', nsarray__setslice__), - ('__delslice__', nsarray__delslice__), - )) diff --git a/env/lib/python2.7/site-packages/objc/_convenience_nsarray.pyc b/env/lib/python2.7/site-packages/objc/_convenience_nsarray.pyc deleted file mode 100644 index c9830aca..00000000 Binary files a/env/lib/python2.7/site-packages/objc/_convenience_nsarray.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/objc/_convenience_nsdata.py b/env/lib/python2.7/site-packages/objc/_convenience_nsdata.py deleted file mode 100644 index c05ac377..00000000 --- a/env/lib/python2.7/site-packages/objc/_convenience_nsdata.py +++ /dev/null @@ -1,85 +0,0 @@ -""" -Specific support for NSData. - -NSData needs to be handles specially for correctness reasons, -and is therefore in the core instead of the Foundation -framework wrappers. -""" -from objc._objc import registerMetaDataForSelector -from objc._convenience import addConvenienceForClass -import sys - -registerMetaDataForSelector( - b'NSData', b'dataWithBytes:length:', - { - 'arguments': { - 2: { 'type': b'^v', 'type_modifier': b'n', 'c_array_length_in_arg': 3 } - } - }) - - -def nsdata__new__(cls, value=None): - if value is None: - return cls.data() - - else: - return cls.dataWithBytes_length_(value, len(value)) - -if sys.version_info[0] == 2: # pragma: no 3.x cover - def nsdata__str__(self): - if len(self) == 0: - return str(b"") - return str(self.bytes().tobytes()) - -else: # pragma: no 2.x cover - def nsdata__str__(self): - if len(self) == 0: - return str(b"") - return str(self.bytes().tobytes()) - - def nsdata__bytes__(self): - return bytes(self.bytes()) - -# XXX: These NSData helpers should use Cocoa method calls, -# instead of creating a memoryview/buffer object. -def nsdata__getitem__(self, item): - buff = self.bytes() - try: - return buff[item] - except TypeError: - return buff[:][item] - -def nsmutabledata__setitem__(self, item, value): - self.mutableBytes()[item] = value - -addConvenienceForClass('NSData', ( - ('__new__', staticmethod(nsdata__new__)), - ('__len__', lambda self: self.length()), - ('__str__', nsdata__str__), - ('__getitem__', nsdata__getitem__), -)) -addConvenienceForClass('NSMutableData', ( - ('__setitem__', nsmutabledata__setitem__), -)) - -if sys.version_info[0] == 2: - def nsdata__getslice__(self, i, j): - return self.bytes()[i:j] - - def nsmutabledata__setslice__(self, i, j, sequence): - # XXX - could use replaceBytes:inRange:, etc. - self.mutableBytes()[i:j] = sequence - - addConvenienceForClass('NSData', ( - ('__getslice__', nsdata__getslice__), - )) - addConvenienceForClass('NSMutableData', ( - ('__setslice__', nsmutabledata__setslice__), - )) - - - -if sys.version_info[0] == 3: # pragma: no 2.x cover; pragma: no branch - addConvenienceForClass('NSData', ( - ('__bytes__', nsdata__bytes__), - )) diff --git a/env/lib/python2.7/site-packages/objc/_convenience_nsdata.pyc b/env/lib/python2.7/site-packages/objc/_convenience_nsdata.pyc deleted file mode 100644 index a58683c1..00000000 Binary files a/env/lib/python2.7/site-packages/objc/_convenience_nsdata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/objc/_convenience_nsdecimal.py b/env/lib/python2.7/site-packages/objc/_convenience_nsdecimal.py deleted file mode 100644 index c9e078e8..00000000 --- a/env/lib/python2.7/site-packages/objc/_convenience_nsdecimal.py +++ /dev/null @@ -1,74 +0,0 @@ -""" -Support for NSDecimalNumber. - -The actual class is defined in Foundation, but having the wrapper -here is much more convenient. -""" -__all__ = () -from objc._convenience import addConvenienceForClass -from objc._objc import lookUpClass, NSDecimal -import sys -import operator - -NSDecimalNumber = lookUpClass('NSDecimalNumber') - -def decimal_new(cls, value=None): - if value is None: - return cls.numberWithInt_(0) - - else: - if isinstance(value, NSDecimal): - return cls.decimalNumberWithDecimal_(value) - elif isinstance(value, NSDecimalNumber): - return cls.decimalNumberWithDecimal_(value.decimalValue()) - elif isinstance(value, float): - return cls.numberWithDouble_(value) - elif isinstance(value, str): - value = NSDecimal(value) - return cls.decimalNumberWithDecimal_(value) - else: - # The value is either an integer, or - # invalid (and numberWithLongLong_ wil raise - # TypeError) - try: - return cls.numberWithLongLong_(value) - - except ValueError: - raise TypeError("Value is not a number") - -addConvenienceForClass('NSDecimalNumber', ( - ('__new__', staticmethod(decimal_new)), - ('__add__', lambda self, other: NSDecimalNumber(operator.add(NSDecimal(self), other))), - ('__radd__', lambda self, other: NSDecimalNumber(operator.add(other, NSDecimal(self)))), - ('__sub__', lambda self, other: NSDecimalNumber(operator.sub(NSDecimal(self), other))), - ('__rsub__', lambda self, other: NSDecimalNumber(operator.sub(other, NSDecimal(self)))), - ('__mul__', lambda self, other: NSDecimalNumber(operator.mul(NSDecimal(self), other))), - ('__rmul__', lambda self, other: NSDecimalNumber(operator.mul(other, NSDecimal(self)))), - ('__truediv__', lambda self, other: NSDecimalNumber(operator.truediv(NSDecimal(self), other))), - ('__rtruediv__', lambda self, other: NSDecimalNumber(operator.truediv(other, NSDecimal(self)))), - ('__floordiv__', lambda self, other: NSDecimalNumber(operator.floordiv(NSDecimal(self), other))), - ('__rfloordiv__', lambda self, other: NSDecimalNumber(operator.floordiv(other, NSDecimal(self)))), - ('__mod__', lambda self, other: NSDecimalNumber(operator.mod(NSDecimal(self), other))), - ('__rmod__', lambda self, other: NSDecimalNumber(operator.mod(other, NSDecimal(self)))), - ('__neg__', lambda self: NSDecimalNumber(operator.neg(NSDecimal(self)))), - ('__pos__', lambda self: NSDecimalNumber(operator.pos(NSDecimal(self)))), - ('__abs__', lambda self: NSDecimalNumber(abs(NSDecimal(self)))), - ('__lt__', lambda self, other: (NSDecimal(self) < other)), - ('__gt__', lambda self, other: (NSDecimal(self) > other)), - ('__le__', lambda self, other: (NSDecimal(self) <= other)), - ('__ge__', lambda self, other: (NSDecimal(self) >= other)), - ('__eq__', lambda self, other: (NSDecimal(self) == other)), - ('__ne__', lambda self, other: (NSDecimal(self) != other)), -)) - -if sys.version_info[0] == 2: # pragma: no 3.x cover - addConvenienceForClass('NSDecimalNumber', ( - ('__div__', lambda self, other: NSDecimalNumber(operator.div(NSDecimal(self), other))), - ('__rdiv__', lambda self, other: NSDecimalNumber(operator.div(other, NSDecimal(self)))), - ('__cmp__', lambda self, other: cmp(NSDecimalNumber(NSDecimal(self), other))), - )) - -else: # pragma: no 2.x cover - addConvenienceForClass('NSDecimalNumber', ( - ('__round__', lambda self, n=0 : NSDecimalNumber(round(NSDecimal(self), n))), - )) diff --git a/env/lib/python2.7/site-packages/objc/_convenience_nsdecimal.pyc b/env/lib/python2.7/site-packages/objc/_convenience_nsdecimal.pyc deleted file mode 100644 index 8e422be2..00000000 Binary files a/env/lib/python2.7/site-packages/objc/_convenience_nsdecimal.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/objc/_convenience_nsdictionary.py b/env/lib/python2.7/site-packages/objc/_convenience_nsdictionary.py deleted file mode 100644 index 46a84aa2..00000000 --- a/env/lib/python2.7/site-packages/objc/_convenience_nsdictionary.py +++ /dev/null @@ -1,443 +0,0 @@ -""" -Convenience interface for NSDictionary/NSMutableDictionary -""" -__all__ = () - -from objc._convenience_mapping import addConvenienceForBasicMapping -from objc._convenience import container_wrap, container_unwrap, addConvenienceForClass -from objc._objc import lookUpClass - -import sys, os - -if sys.version_info[0] == 2: - import collections as collections_abc -else: - import collections.abc as collections_abc - -NSDictionary = lookUpClass('NSDictionary') -NSMutableDictionary = lookUpClass('NSMutableDictionary') - -addConvenienceForBasicMapping('NSDictionary', True) -addConvenienceForBasicMapping('NSMutableDictionary', False) - - -def _all_contained_in(inner, outer): - """ - Return True iff all items in ``inner`` are also in ``outer``. - """ - for v in inner: - if v not in outer: - return False - - return True - - -def nsdict__len__(self): - return self.count() - - -def nsdict__iter__(self): - return iter(self.keyEnumerator()) - - -class nsdict_view (collections_abc.Set): - __slots__ = () - - def __eq__(self, other): - if not isinstance(other, collections_abc.Set): - return NotImplemented - - if len(self) == len(other): - return _all_contained_in(self, other) - - else: - return False - - def __ne__(self, other): - if not isinstance(other, collections_abc.Set): - return NotImplemented - - if len(self) == len(other): - return not _all_contained_in(self, other) - - else: - return True - - def __lt__(self, other): - if not isinstance(other, collections_abc.Set): - return NotImplemented - - if len(self) < len(other): - return _all_contained_in(self, other) - - else: - return False - - def __le__(self, other): - if not isinstance(other, collections_abc.Set): - return NotImplemented - - if len(self) <= len(other): - return _all_contained_in(self, other) - - else: - return False - - def __gt__(self, other): - if not isinstance(other, collections_abc.Set): - return NotImplemented - - if len(self) > len(other): - return _all_contained_in(other, self) - - else: - return False - - def __ge__(self, other): - if not isinstance(other, collections_abc.Set): - return NotImplemented - - if len(self) >= len(other): - return _all_contained_in(other, self) - - else: - return False - - def __and__(self, other): - if not isinstance(other, collections_abc.Set): - return NotImplemented - result = set(self) - result.intersection_update(other) - return result - - def __rand__(self, other): - if not isinstance(other, collections_abc.Set): - return NotImplemented - result = set(self) - result.intersection_update(other) - return result - - def __or__(self, other): - if not isinstance(other, collections_abc.Set): - return NotImplemented - result = set(self) - result.update(other) - return result - - def __ror__(self, other): - if not isinstance(other, collections_abc.Set): - return NotImplemented - result = set(self) - result.update(other) - return result - - def __sub__(self, other): - if not isinstance(other, collections_abc.Set): - return NotImplemented - result = set(self) - result.difference_update(other) - return result - - def __rsub__(self, other): - if not isinstance(other, collections_abc.Set): - return NotImplemented - result = set(other) - result.difference_update(self) - return result - - def __xor__(self, other): - if not isinstance(other, collections_abc.Set): - return NotImplemented - result = set(self) - result.symmetric_difference_update(other) - return result - - def __rxor__(self, other): - if not isinstance(other, collections_abc.Set): - return NotImplemented - result = set(self) - result.symmetric_difference_update(other) - return result - - -class nsdict_keys(nsdict_view): - __slots__ = ('__value', ) - - def __init__(self, value): - self.__value = value - - def __repr__(self): - keys = list(self.__value) - - return "".format(keys) - - def __len__(self): - return len(self.__value) - - def __iter__(self): - return iter(self.__value) - - def __contains__(self, value): - return value in self.__value - - -class nsdict_values(nsdict_view): - __slots__ = ('__value',) - - def __init__(self, value): - self.__value = value - - def __repr__(self): - values = list(self) - values.sort() - - return "".format(values) - - def __len__(self): - return len(self.__value) - - def __iter__(self): - return iter(self.__value.objectEnumerator()) - - def __contains__(self, value): - for v in iter(self): - if value == v: - return True - return False - - -class nsdict_items(nsdict_view): - __slots__ = ('__value',) - - def __init__(self, value): - self.__value = value - - def __repr__(self): - values = list(self) - values.sort() - - return "".format(values) - - def __len__(self): - return len(self.__value) - - def __iter__(self): - for k in self.__value: - yield (k, self.__value[k]) - - def __contains__(self, value): - for v in iter(self): - if value == v: - return True - return False - - -collections_abc.KeysView.register(nsdict_keys) -collections_abc.ValuesView.register(nsdict_values) -collections_abc.ItemsView.register(nsdict_items) - -collections_abc.Mapping.register(NSDictionary) -collections_abc.MutableMapping.register(NSMutableDictionary) - - -if int(os.uname()[2].split('.')[0]) <= 10: - # Limited functionality on OSX 10.6 and earlier - - def nsdict_fromkeys(cls, keys, value=None): - keys = [container_wrap(k) for k in keys] - values = [container_wrap(value)]*len(keys) - - return NSDictionary.dictionaryWithObjects_forKeys_(values, keys) - - # XXX: 'nsdict_fromkeys' doesn't work on OSX 10.5 - def nsmutabledict_fromkeys(cls, keys, value=None): - value = container_wrap(value) - - result = NSMutableDictionary.alloc().init() - for k in keys: - result[container_wrap(k)] = value - return result - -else: - def nsdict_fromkeys(cls, keys, value=None): - keys = [container_wrap(k) for k in keys] - values = [container_wrap(value)]*len(keys) - - return cls.dictionaryWithObjects_forKeys_(values, keys) - - def nsmutabledict_fromkeys(cls, keys, value=None): - value = container_wrap(value) - - result = cls.alloc().init() - for k in keys: - result[container_wrap(k)] = value - return result - - -def nsdict_new(cls, *args, **kwds): - if len(args) == 0: - pass - - elif len(args) == 1: - d = dict() - if isinstance(args[0], collections_abc.Mapping): - items = args[0].items() - else: - items = args[0] - for k , v in items: - d[container_wrap(k)] = container_wrap(v) - - for k, v in kwds.items(): - d[container_wrap(k)] = container_wrap(v) - - return cls.dictionaryWithDictionary_(d) - - else: - raise TypeError( - "dict expected at most 1 arguments, got {0}".format( - len(args))) - - if kwds: - d = dict() - for k, v in kwds.items(): - d[container_wrap(k)] = container_wrap(v) - - return cls.dictionaryWithDictionary_(d) - - return cls.dictionary() - - -def nsdict__eq__(self, other): - if not isinstance(other, collections_abc.Mapping): - return False - - return self.isEqualToDictionary_(other) - - -def nsdict__ne__(self, other): - return not nsdict__eq__(self, other) - - -if sys.version_info[0] == 3: # pragma: no 2.x cover - def nsdict__lt__(self, other): - return NotImplemented - - def nsdict__le__(self, other): - return NotImplemented - - def nsdict__ge__(self, other): - return NotImplemented - - def nsdict__gt__(self, other): - return NotImplemented - - addConvenienceForClass('NSDictionary', ( - ('keys', lambda self: nsdict_keys(self)), - ('values', lambda self: nsdict_values(self)), - ('items', lambda self: nsdict_items(self)), - )) - -else: # pragma: no 3.x cover - def nsdict__cmp__(self, other): - if not isinstance(other, collections_abc.Mapping): - return NotImplemented - - if len(self) < len(other): - return -1 - - elif len(self) > len(other): - return 1 - - sentinel = object() - - for a_key in sorted(self): - try: - if self[a_key] != other[a_key]: - break - - except KeyError: - break - - else: - a_key = sentinel - - for b_key in sorted(self): - try: - if self[b_key] != other[b_key]: - break - - except KeyError: - break - else: - b_key = sentinel - - r = cmp(a_key, b_key) - if r == 0 and a_key is not sentinel: - r = cmp(self[a_key], other[a_key]) - - return r - - def nsdict__lt__(self, other): - return nsdict_cmp(self, other) < 0 - - def nsdict__le__(self, other): - return nsdict_cmp(self, other) <= 0 - - def nsdict__ge__(self, other): - return nsdict_cmp(self, other) >= 0 - - def nsdict__gt__(self, other): - return nsdict_cmp(self, other) > 0 - - - def nsdict_iterkeys(aDict): - return iter(aDict.keyEnumerator()) - - def nsdict_itervalues(aDict): - return iter(aDict.objectEnumerator()) - - def nsdict_iteritems(aDict): - for key in aDict: - yield (key, aDict[key]) - - def nsdict_old_items(aDict): - return [(key, aDict[key]) for key in aDict] - - addConvenienceForClass('NSDictionary', ( - ('__cmp__', nsdict__cmp__), - ('fromkeys', classmethod(nsdict_fromkeys)), - ('viewkeys', lambda self: nsdict_keys(self)), - ('viewvalues', lambda self: nsdict_values(self)), - ('viewitems', lambda self: nsdict_items(self)), - ('keys', lambda self: self.allKeys()), - ('items', nsdict_old_items), - ('values', lambda self: self.allValues()), - ('iterkeys', nsdict_iterkeys), - ('iteritems', nsdict_iteritems), - ('itervalues', nsdict_itervalues), - )) - - addConvenienceForClass('NSMutableDictionary', ( - ('fromkeys', classmethod(nsmutabledict_fromkeys)), - )) - - -addConvenienceForClass('NSDictionary', ( - ('__new__', staticmethod(nsdict_new)), - ('fromkeys', classmethod(nsdict_fromkeys)), - ('__eq__', nsdict__eq__), - ('__ne__', nsdict__ne__), - ('__lt__', nsdict__lt__), - ('__le__', nsdict__le__), - ('__gt__', nsdict__gt__), - ('__ge__', nsdict__ge__), - ('__len__', nsdict__len__), - ('__iter__', nsdict__iter__), -)) - - -addConvenienceForClass('NSMutableDictionary', ( - ('__new__', staticmethod(nsdict_new)), - ('fromkeys', classmethod(nsdict_fromkeys)), - ('clear', lambda self: self.removeAllObjects()), -)) diff --git a/env/lib/python2.7/site-packages/objc/_convenience_nsdictionary.pyc b/env/lib/python2.7/site-packages/objc/_convenience_nsdictionary.pyc deleted file mode 100644 index 1787234d..00000000 Binary files a/env/lib/python2.7/site-packages/objc/_convenience_nsdictionary.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/objc/_convenience_nsobject.py b/env/lib/python2.7/site-packages/objc/_convenience_nsobject.py deleted file mode 100644 index 239abe2b..00000000 --- a/env/lib/python2.7/site-packages/objc/_convenience_nsobject.py +++ /dev/null @@ -1,128 +0,0 @@ -""" -Generic conveniences for NSObject - -The type is defined in Foundation, but NSObject is important -to the behavior of the bridge and therefore these conveniences -are kept in the core. -""" - -__all__ = () -import sys -from objc._convenience import addConvenienceForClass - -if sys.version_info[0] == 2: # pragma: no 3.x cover - STR_TYPES=(str, unicode) -else: # pragma: no 2.x cover - STR_TYPES=str - - -def nsobject_hash(self, _max=sys.maxsize, _const=((sys.maxsize + 1) * 2)): - rval = self.hash() - if rval > _max: - rval -= _const - - # -1 is not a valid hash in Python and hash(x) will - # translate a hash of -1 to -2, so we might as well - # do it here so that it's not too surprising.. - if rval == -1: - rval = -2 - return int(rval) - -def nsobject__eq__(self, other): - if self.__is_magic(): - return self is other - return bool(self.isEqualTo_(other)) - -def nsobject__ne__(self, other): - if self.__is_magic(): - return self is not other - return bool(self.isNotEqualTo_(other)) - -def nsobject__gt__(self, other): - if self.__is_magic(): - return NotImplemented - return bool(self.isGreaterThan_(other)) - -def nsobject__ge__(self, other): - if self.__is_magic(): - return NotImplemented - return bool(self.isGreaterThanOrEqualTo_(other)) - -def nsobject__lt__(self, other): - if self.__is_magic(): - return NotImplemented - return bool(self.isLessThan_(other)) - -def nsobject__le__(self, other): - if self.__is_magic(): - return NotImplemented - return bool(self.isLessThanOrEqualTo_(other)) - -class kvc (object): - """ - Key-Value-Coding accessor for Cocoa objects. - - Both attribute access and dict-like indexing will attempt to - access the requested item through Key-Value-Coding. - """ - __slots__ = ('__object',) - def __init__(self, value): - self.__object = value - - def __repr__(self): - return ""%(self.__object,) - - def __getattr__(self, key): - try: - return self.__object.valueForKey_(key) - except KeyError as msg: - if (hasattr(msg, '_pyobjc_info_') - and msg._pyobjc_info_['name'] == 'NSUnknownKeyException'): - raise AttributeError(key) - - raise - - def __setattr__(self, key, value): - if not key.startswith('_'): - return self.__object.setValue_forKey_(value, key) - else: - super(kvc, self).__setattr__(key, value) - - def __getitem__(self, key): - if not isinstance(key, STR_TYPES): - raise TypeError("Key must be string") - - return self.__object.valueForKey_(key) - - def __setitem__(self, key, value): - if not isinstance(key, STR_TYPES): - raise TypeError("Key must be string") - - return self.__object.setValue_forKey_(value, key) - - -addConvenienceForClass("NSObject", ( - ('__hash__', nsobject_hash), - ('__eq__', nsobject__eq__), - ('__ne__', nsobject__ne__), - ('__gt__', nsobject__gt__), - ('__ge__', nsobject__ge__), - ('__lt__', nsobject__lt__), - ('__le__', nsobject__le__), - ('_', property(kvc)), -)) - -if sys.version_info[0] == 2: # pragma: no 3.x cover; pragma: no branch - def nsobject__cmp__(self, other): - try: - func = self.compare_ - - except AttributeError: - return NotImplemented - - else: - return func(other) - - addConvenienceForClass("NSObject", ( - ("__cmp__", nsobject__cmp__), - )) diff --git a/env/lib/python2.7/site-packages/objc/_convenience_nsobject.pyc b/env/lib/python2.7/site-packages/objc/_convenience_nsobject.pyc deleted file mode 100644 index a747e815..00000000 Binary files a/env/lib/python2.7/site-packages/objc/_convenience_nsobject.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/objc/_convenience_nsset.py b/env/lib/python2.7/site-packages/objc/_convenience_nsset.py deleted file mode 100644 index ba0ff6a6..00000000 --- a/env/lib/python2.7/site-packages/objc/_convenience_nsset.py +++ /dev/null @@ -1,330 +0,0 @@ -""" -Convenience interface for NSSet/NSMutableSet -""" -__all__ = () - -from objc._convenience import addConvenienceForClass, container_wrap, container_unwrap -from objc._objc import lookUpClass - -import sys - -if sys.version_info[0] == 2: - import collections as collections_abc -else: - import collections.abc as collections_abc - -NSSet = lookUpClass('NSSet') -NSMutableSet = lookUpClass('NSMutableSet') - -collections_abc.Set.register(NSSet) -collections_abc.MutableSet.register(NSMutableSet) - -def nsset_isdisjoint(self, other): - if not hasattr(other, '__contains__'): - other = list(other) - - for item in self: - if item in other: - return False - return True - -def nsset_union(self, *other): - result = NSMutableSet() - result.unionSet_(self) - for val in other: - if isinstance(val, collections_abc.Set): - result.unionSet_(val) - else: - result.unionSet_(set(val)) - return result - -def nsset_intersection(self, *others): - if len(others) == 0: - return self.mutableCopy() - - others = [o if hasattr(o, '__contains__') else list(o) for o in others] - - result = NSMutableSet() - for item in self: - for o in others: - if item not in o: - break - else: - result.add(item) - return result - -def nsset_difference(self, *others): - result = self.mutableCopy() - - for value in others: - if isinstance(value, collections_abc.Set): - result.minusSet_(value) - else: - result.minusSet_(set(value)) - - return result - -def nsset_symmetric_difference(self, other): - result = NSMutableSet() - if not hasattr(other, '__contains__'): - other = list(other) - for item in iter(self): - if item not in other: - result.add(item) - for item in other: - if item not in self: - result.add(item) - return result - -def nsset__contains__(self, value): - hash(value) # Force error for non-hashable values - return self.containsObject_(value) - -def nsset__or__(self, other): - if not isinstance(other, collections_abc.Set): - raise TypeError("NSSet|value where value is not a set") - return nsset_union(self, other) - -def nsset__ror__(self, other): - if not isinstance(other, collections_abc.Set): - raise TypeError("value|NSSet where value is not a set") - return nsset_union(other, self) - -def nsset__and__(self, other): - if not isinstance(other, collections_abc.Set): - raise TypeError("NSSet&value where value is not a set") - return nsset_intersection(self, other) - -def nsset__rand__(self, other): - if not isinstance(other, collections_abc.Set): - raise TypeError("value&NSSet where value is not a set") - return nsset_intersection(other, self) - -def nsset__sub__(self, other): - if not isinstance(other, collections_abc.Set): - raise TypeError("NSSet-value where value is not a set") - return nsset_difference(self, other) - -def nsset__rsub__(self, other): - if not isinstance(other, collections_abc.Set): - raise TypeError("NSSet-value where value is not a set") - return nsset_difference(NSMutableSet(other), self) - -def nsset__xor__(self, other): - if not isinstance(other, collections_abc.Set): - raise TypeError("NSSet-value where value is not a set") - return nsset_symmetric_difference(other, self) - -def nsset_issubset(self, other): - if isinstance(other, collections_abc.Set): - return self.isSubsetOfSet_(other) - - else: - return self.isSubsetOfSet_(set(other)) - -def nsset__le__(self, other): - if not isinstance(other, collections_abc.Set): - raise TypeError() - return nsset_issubset(self, other) - -def nsset__eq__(self, other): - if not isinstance(other, collections_abc.Set): - return False - - return self.isEqualToSet_(other) - -def nsset__ne__(self, other): - if not isinstance(other, collections_abc.Set): - return True - - return not self.isEqualToSet_(other) - -def nsset__lt__(self, other): - if not isinstance(other, collections_abc.Set): - raise TypeError() - - return (self <= other) and (self != other) - -def nsset_issuperset(self, other): - if not isinstance(other, collections_abc.Set): - other = set(other) - - for item in other: - if item not in self: - return False - - return True - -def nsset__ge__(self, other): - if not isinstance(other, collections_abc.Set): - raise TypeError() - return nsset_issuperset(self, other) - -def nsset__gt__(self, other): - if not isinstance(other, collections_abc.Set): - raise TypeError() - return (self >= other) and (self != other) - -if sys.version_info[0] == 2: # pragma: no 3.x cover; pragma: no branch - def nsset__cmp__(self, other): - raise TypeError("Cannot compare sets using cmp") - -def nsset_update(self, *others): - for other in others: - if isinstance(other, collections_abc.Set): - self.unionSet_(other) - else: - self.unionSet_(set(other)) - -def nsset_intersection_update(self, *others): - for other in others: - if isinstance(other, collections_abc.Set): - self.intersectSet_(other) - else: - self.intersectSet_(set(other)) - -def nsset_difference_update(self, *others): - for other in others: - if isinstance(other, collections_abc.Set): - self.minusSet_(other) - else: - self.minusSet_(set(other)) - -def nsset_symmetric_difference_update(self, other): - toadd = set() - toremove = set() - - if isinstance(other, collections_abc.Set): - totest = other - else: - totest = set(other) - - for value in self: - if value in totest: - toremove.add(value) - for value in totest: - if value not in self: - toadd.add(value) - - self.minusSet_(toremove) - self.unionSet_(toadd) - -def nsset_pop(self): - if len(self) == 0: - raise KeyError() - - v = self.anyObject() - self.removeObject_(v) - return container_unwrap(v, KeyError) - -def nsset_remove(self, value): - hash(value) - value = container_wrap(value) - if value not in self: - raise KeyError(value) - self.removeObject_(value) - -def nsset_discard(self, value): - hash(value) - self.removeObject_(container_wrap(value)) - -def nsset_add(self, value): - hash(value) - self.addObject_(container_wrap(value)) - -class nsset__iter__ (object): - def __init__(self, value): - self._size = len(value) - self._enum = value.objectEnumerator() - - def __length_hint__(self): - return self._size - - def __iter__(self): - return self - - def __next__(self): - self._size -= 1 - return container_unwrap(self._enum.nextObject(), StopIteration) - - next = __next__ - - - - -addConvenienceForClass('NSSet', ( - ('__len__', lambda self: self.count()), - ('__iter__', lambda self: nsset__iter__(self)), - ('__contains__', nsset__contains__), - ('isdisjoint', nsset_isdisjoint), - ('union', nsset_union), - ('intersection', nsset_intersection), - ('difference', nsset_difference), - ('symmetric_difference', nsset_symmetric_difference), - ('issubset', nsset_issubset), - ('__eq__', nsset__eq__), - ('__ne__', nsset__ne__), - ('__le__', nsset__le__), - ('__lt__', nsset__lt__), - ('issuperset', nsset_issuperset), - ('__ge__', nsset__ge__), - ('__gt__', nsset__gt__), - ('__or__', nsset__or__), - ('__ror__', nsset__ror__), - ('__and__', nsset__and__), - ('__rand__', nsset__rand__), - ('__xor__', nsset__xor__), - ('__rxor__', nsset__xor__), - ('__sub__', nsset__sub__), - ('__rsub__', nsset__rsub__), -)) - -if sys.version_info[0] == 2: # pragma: no 3.x cover; pragma: no branch - addConvenienceForClass('NSSet', ( - ('__cmp__', nsset__cmp__), - )) - -addConvenienceForClass('NSMutableSet', ( - ('add', nsset_add), - ('remove', nsset_remove), - ('discard', nsset_discard), - ('update', nsset_update), - ('intersection_update', nsset_intersection_update), - ('difference_update', nsset_difference_update), - ('symmetric_difference_update', nsset_symmetric_difference_update), - ('clear', lambda self: self.removeAllObjects()), - ('pop', nsset_pop), -)) - -def nsset_new(cls, sequence=None): - if not sequence: - return NSSet.set() - - if isinstance(sequence, (NSSet, set, frozenset)): - return NSSet.set().setByAddingObjectsFromSet_(sequence) - - else: - return NSSet.set().setByAddingObjectsFromSet_(set(sequence)) - -def nsmutableset_new(cls, sequence=None): - if not sequence: - value = NSMutableSet.set() - - elif isinstance(sequence, (NSSet, set, frozenset)): - value = NSMutableSet.set() - value.unionSet_(sequence) - - else: - value = NSMutableSet.set() - value.unionSet_(set(sequence)) - - return value - -addConvenienceForClass('NSSet', ( - ('__new__', staticmethod(nsset_new)), -)) - -addConvenienceForClass('NSMutableSet', ( - ('__new__', staticmethod(nsmutableset_new)), -)) - diff --git a/env/lib/python2.7/site-packages/objc/_convenience_nsset.pyc b/env/lib/python2.7/site-packages/objc/_convenience_nsset.pyc deleted file mode 100644 index 383d47e5..00000000 Binary files a/env/lib/python2.7/site-packages/objc/_convenience_nsset.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/objc/_convenience_nsstring.py b/env/lib/python2.7/site-packages/objc/_convenience_nsstring.py deleted file mode 100644 index ba1b86df..00000000 --- a/env/lib/python2.7/site-packages/objc/_convenience_nsstring.py +++ /dev/null @@ -1,21 +0,0 @@ -""" -Convenience interface for NSString -""" -from objc._convenience import addConvenienceForClass - -__all__ = () - -_no_value = object() - -def nsstring_new(cls, value=_no_value): - if value is _no_value: - return cls.alloc().init() - else: - return cls.alloc().initWithString_(value) - -addConvenienceForClass('NSString', ( - ('__len__', lambda self: self.length() ), - ('endswith', lambda self, pfx: self.hasSuffix_(pfx)), - ('startswith', lambda self, pfx: self.hasPrefix_(pfx)), - ('__new__', staticmethod(nsstring_new)), -)) diff --git a/env/lib/python2.7/site-packages/objc/_convenience_nsstring.pyc b/env/lib/python2.7/site-packages/objc/_convenience_nsstring.pyc deleted file mode 100644 index 0f617345..00000000 Binary files a/env/lib/python2.7/site-packages/objc/_convenience_nsstring.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/objc/_convenience_sequence.py b/env/lib/python2.7/site-packages/objc/_convenience_sequence.py deleted file mode 100644 index 88bfd971..00000000 --- a/env/lib/python2.7/site-packages/objc/_convenience_sequence.py +++ /dev/null @@ -1,52 +0,0 @@ -""" -This module implements a callback function that is used by the C code to -add Python special methods to Objective-C classes with a suitable interface. -""" -from objc._convenience import addConvenienceForClass - -__all__ = ('addConvenienceForBasicSequence',) - -def seq_iter(self): - for idx in range(len(self)): - yield self[idx] - -def seq_len(self): - return self.count() - -def seq_getitem(self, idx): - if isinstance(idx, slice): - raise ValueError("Slices not supported") - - if idx < 0: - orig_idx = idx - idx += seq_len(self) - - if idx < 0: - raise IndexError(orig_idx) - - return self.objectAtIndex_(idx) - -def seq_setitem(self, idx, value): - if isinstance(idx, slice): - raise ValueError("Slices not supported") - - if idx < 0: - orig_idx = idx - idx += seq_len(self) - - if idx < 0: - raise IndexError(orig_idx) - - return self.setObject_atIndex_(value, idx) - -def addConvenienceForBasicSequence(classname, readonly=True): - addConvenienceForClass(classname, ( - ('__len__', seq_len), - ('__getitem__', seq_getitem), - ('__iter__', seq_iter), - )) - - if not readonly: - addConvenienceForClass(classname, ( - ('__setitem__', seq_setitem), - )) diff --git a/env/lib/python2.7/site-packages/objc/_convenience_sequence.pyc b/env/lib/python2.7/site-packages/objc/_convenience_sequence.pyc deleted file mode 100644 index 95556001..00000000 Binary files a/env/lib/python2.7/site-packages/objc/_convenience_sequence.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/objc/_descriptors.py b/env/lib/python2.7/site-packages/objc/_descriptors.py deleted file mode 100644 index a618f297..00000000 --- a/env/lib/python2.7/site-packages/objc/_descriptors.py +++ /dev/null @@ -1,328 +0,0 @@ -""" -Python <-> Objective-C bridge (PyObjC) - -This module defines the core interfaces of the Python<->Objective-C bridge. -""" - -__all__ = ['IBOutlet', 'IBAction', 'accessor', 'Accessor', 'typedAccessor', 'callbackFor', 'selectorFor', 'synthesize', 'namedselector', 'typedSelector', 'namedSelector', 'instancemethod', 'signature', 'IBInspectable', 'IB_DESIGNABLE', 'callbackPointer'] - -from objc._objc import ivar, selector, _makeClosure, selector, _C_SEL, _C_ID, _C_NSUInteger, _C_NSBOOL, _closurePointer -import sys, textwrap -import warnings - -try: - from inspect import getargspec -except ImportError: - getargspec = None - -try: - from inspect import getfullargspec -except ImportError: - getfullargspec = None - -_C_NSRange = [b"{_NSRange=II}", b"{_NSRange=QQ}"][sys.maxsize > 2**32] - -# -# Interface builder support. -# -def IB_DESIGNABLE(cls): - """ - Class decorator for annotating that a class can be used in Interface Builder. - - The decorator doesn't do anything. - """ - return cls - -def IBInspectable(prop): - """ - Decorator for an Objective-C property to tell IB that the updated in IB. - - The decorator doesn't do anything - """ - return prop - -def IBOutlet(name=None): - """ - Create an instance variable that can be used as an outlet in - Interface Builder. - """ - if name is None: - return ivar(isOutlet=1) - else: - return ivar(name, isOutlet=1) - -def IBAction(func): - """ - Return an Objective-C method object that can be used as an action - in Interface Builder. - """ - if func is None: - raise TypeError("IBAction argument must be a callable") - return selector(func, signature=b"v@:@") - -def instancemethod(func): - if func is None: - raise TypeError("instancemethod argument must be a callable") - return selector(func, isClassMethod=False) - -def accessor(func, typeSignature=b'@'): - """ - Return an Objective-C method object that is conformant with key-value coding - and key-value observing. - """ - if getfullargspec is not None: - args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, annotations = getfullargspec(func) - else: - args, varargs, varkw, defaults = getargspec(func) - kwonlyargs = kwonlydefaults = annotations = None - funcName = func.__name__ - maxArgs = len(args) - minArgs = maxArgs - len(defaults or ()) - # implicit self - selArgs = 1 + funcName.count('_') - if varargs is not None or varkw is not None or kwonlyargs: - raise TypeError('%s can not be an accessor because it accepts varargs, varkw or kwonly' % (funcName,)) - - if not (minArgs <= selArgs <= maxArgs): - if minArgs == maxArgs: - raise TypeError('%s expected to take %d args, but must accept %d from Objective-C (implicit self plus count of underscores)' % (funcName, maxArgs, selArgs)) - else: - raise TypeError('%s expected to take between %d and %d args, but must accept %d from Objective-C (implicit self plus count of underscores)' % (funcName, minArgs, maxArgs, selArgs)) - - if selArgs == 3: - if funcName.startswith('validate') and funcName.endswith('_error_'): - return selector(func, signature=_C_NSBOOL + b'@:N^@o^@') - - if funcName.startswith('insertObject_in') and funcName.endswith('AtIndex_'): - return selector(func, signature=b'v@:' + typeSignature + _C_NSUInteger) - elif funcName.startswith('replaceObjectIn') and funcName.endswith('AtIndex_withObject_'): - return selector(func, signature=b'v@:' + _C_NSUInteger + typeSignature) - - elif funcName.startswith('get') and funcName.endswith('_range_'): - return selector(func, signature=b'v@:o^@' + _C_NSRange) - - elif funcName.startswith('insert') and funcName.endswith('_atIndexes_'): - return selector(func, signature=b'v@:@@') - - elif funcName.startswith('replace') and 'AtIndexes_with' in funcName: - return selector(func, signature=b'v@:@@') - - # pass through to "too many arguments" - - elif selArgs == 2: - if funcName.startswith('objectIn') and funcName.endswith('AtIndex_'): - return selector(func, signature=typeSignature + b'@:' + _C_NSUInteger) - elif funcName.startswith('removeObjectFrom') and funcName.endswith('AtIndex_'): - return selector(func, signature=b'v@:' + _C_NSUInteger) - elif funcName.startswith('remove') and funcName.endswith('AtIndexes_'): - return selector(func, signature=b"v@:@") - elif funcName.endswith('AtIndexes_'): - return selector(func, signature=b"@@:@") - elif funcName.startswith('memberOf'): - return selector(func, signature=_C_NSBOOL + b"@:" + typeSignature) - elif funcName.startswith('add') and funcName.endswith('Object_'): - return selector(func, signature=b"v@:" + typeSignature) - elif funcName.startswith('add'): - return selector(func, signature=b"v@:@") - elif funcName.startswith('intersect'): - return selector(func, signature=b"v@:@") - - return selector(func, signature=b"v@:" + typeSignature) - - elif selArgs == 1: - if funcName.startswith('countOf'): - typeSignature = _C_NSUInteger - elif funcName.startswith('enumerator'): - typeSignature = b"@" - - - return selector(func, signature=typeSignature + b"@:") - - raise TypeError("%s not recognized as an accessor" % (funcName,)) - - -def typedSelector(signature): - def _typedSelector(func): - if func is None: - raise TypeError("typedSelector() function argument must be a callable") - return selector(func, signature=signature) - return _typedSelector - -def namedSelector(name, signature=None): - """ - Decorator for overriding the Objective-C SEL for a method, usage: - - @namedSelector("foo:bar:") - def foobar(self, foo, bar): - return foo + bar - """ - if signature is not None: - def _namedselector(func): - if func is None: - raise TypeError("IBAction argument must be a callable") - return selector(func, selector=name, signature=signature) - else: - def _namedselector(func): - if func is None: - raise TypeError("IBAction argument must be a callable") - return selector(func, selector=name) - - return _namedselector - -def namedselector(name, signature=None): - warnings.warn("use objc.namedSelector instead of objc.namedselector", DeprecationWarning, stacklevel=2) - return namedSelector(name, signature) - -def typedAccessor(typeSignature): - """ - Decorator for creating a typed accessor, usage: - - @typedAccessor('i') - def someIntegerAccessor(self): - return self.someInteger - - @typedAccessor('i') - def setSomeIntegerAccessor_(self, anInteger): - self.someInteger = anInteger - """ - def _typedAccessor(func): - return accessor(func, typeSignature) - return _typedAccessor - -def Accessor(func): - warnings.warn( - "Use objc.accessor instead of objc.Accessor", DeprecationWarning) - return accessor(func) - -# -# Callback support -# -def callbackFor(callable, argIndex=-1): - """ - Decorator for converting a function into an object that can be used - as a callback function for (Objective-)C API's that take such a beast - as one of their arguments. - - Note that using this decorator for methods is unsupported and that this - decorator is optional when the callback isn't stored by the called function - - Usage:: - - @objc.callbackFor(NSArray.sortedArrayUsingFunction_context_) - def compare(left, right, context): - return 1 - """ - def addClosure(function): - closure, meta = _makeClosure(function, callable, argIndex) - function.pyobjc_closure = closure - function.__metadata__ = lambda: meta - return function - - return addClosure - -def callbackPointer(closure): - """ - Return a value for "closure" that can be passed to a function - expecting a "void *" argument. - """ - if not hasattr(closure, 'pyobjc_closure'): - raise ValueError("Object is not decorated with 'callbackFor'") - - return _closurePointer(closure.pyobjc_closure) - - -def selectorFor(callable, argIndex=-1): - """ - Decorator that makes sure that the method has the right signature to be - used as the selector argument to the specified method. - - Usage:: - - @objc.selectorFor(NSApplication.beginSheet_modalForWindow_modalDelegate_didEndSelector_contextInfo_) - def sheetDidEnd_returnCode_contextInfo_(self, sheet, returnCode, info): - pass - """ - if argIndex == -1: - for arg in callable.__metadata__()['arguments']: - if arg['type'] == _C_SEL and 'sel_of_type' in arg: - signature = arg['sel_of_type'] - break - else: - raise ValueError("No selector argument with type information") - - else: - try: - signature = callable.__metadata__()['arguments'][argIndex]['sel_of_type'] - except (IndexError, KeyError): - raise ValueError("Not a selector argument with type information") - - def addSignature(function): - return selector(function, signature=signature) - - return addSignature - - -def synthesize(name, copy=False, readwrite=True, type=_C_ID, ivarName=None): - """ - Use this in a class dictionary to syntheze simple setting/setter methods. - - Note: this is only necessary to get propper behaviour when Key-Value coding - is used and special features (like copying) are needed - - usage:: - - class MyClass (NSObject): - objc.synthesize('someTitle', copy=True) - - """ - if not name: - raise ValueError("Empty property name") - - if ivarName is None: - ivarName = '_' + name - - classDict = sys._getframe(1).f_locals - - setterName = 'set%s%s_'%(name[0].upper(), name[1:]) - - if copy: - setter = textwrap.dedent(''' - def %(name)s(self, value): - self.%(ivar)s = value.copy() - ''' % dict(name=setterName, ivar=ivarName)) - - else: - setter = textwrap.dedent(''' - def %(name)s(self, value): - self.%(ivar)s = value - ''' % dict(name=setterName, ivar=ivarName)) - - getter = textwrap.dedent(''' - def %(name)s(self): - return self.%(ivar)s - ''' % dict(name=name, ivar=ivarName)) - - if readwrite: - exec(setter, globals(), classDict) - - exec(getter, globals(), classDict) - - classDict[ivarName] = ivar(type=type) - - -def signature(signature, **kw): - """ - A Python method decorator that allows easy specification - of Objective-C selectors. - - Usage:: - - @objc.signature('i@:if') - def methodWithX_andY_(self, x, y): - return 0 - """ - warnings.warn("Usage objc.typedSelector instead of objc.signature", DeprecationWarning) - kw['signature'] = signature - def makeSignature(func): - return selector(func, **kw) - return makeSignature diff --git a/env/lib/python2.7/site-packages/objc/_descriptors.pyc b/env/lib/python2.7/site-packages/objc/_descriptors.pyc deleted file mode 100644 index dcde4893..00000000 Binary files a/env/lib/python2.7/site-packages/objc/_descriptors.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/objc/_dyld.py b/env/lib/python2.7/site-packages/objc/_dyld.py deleted file mode 100644 index 00d608db..00000000 --- a/env/lib/python2.7/site-packages/objc/_dyld.py +++ /dev/null @@ -1,123 +0,0 @@ -""" -dyld emulation -""" - -__all__ = [ - 'dyld_framework', 'dyld_library', 'dyld_find', 'pathForFramework', - 'infoForFramework', -] - -import os, sys -from objc._framework import infoForFramework - - -# These are the defaults as per man dyld(1) -# -DEFAULT_FRAMEWORK_FALLBACK = ':'.join([ - os.path.expanduser("~/Library/Frameworks"), - "/Library/Frameworks", - "/Network/Library/Frameworks", - "/System/Library/Frameworks", -]) - -DEFAULT_LIBRARY_FALLBACK = ':'.join([ - os.path.expanduser("~/lib"), - "/usr/local/lib", - "/lib", - "/usr/lib", -]) - -def ensure_unicode(s): - """Not all of PyObjC understands unicode paths very well yet""" - if isinstance(s, bytes): - return s.decode('utf-8') - return s - -def inject_suffixes(iterator): - suffix = ensure_unicode(os.environ.get('DYLD_IMAGE_SUFFIX', None)) - if suffix is None: - return iterator - def _inject(iterator=iterator,suffix=suffix): - for path in iterator: - if path.endswith('.dylib'): - yield path[:-6] + suffix + '.dylib' - else: - yield path + suffix - yield path - return _inject() - -def dyld_framework(filename, framework_name, version=None): - """Find a framework using dyld semantics""" - filename = ensure_unicode(filename) - framework_name = ensure_unicode(framework_name) - version = ensure_unicode(version) - - def _search(): - spath = ensure_unicode(os.environ.get('DYLD_FRAMEWORK_PATH', None)) - if spath is not None: - for path in spath.split(':'): - if version: - yield os.path.join( - path, framework_name + '.framework', - 'Versions', version, framework_name - ) - else: - yield os.path.join( - path, framework_name + '.framework', framework_name - ) - yield filename - spath = ensure_unicode(os.environ.get( - 'DYLD_FALLBACK_FRAMEWORK_PATH', DEFAULT_FRAMEWORK_FALLBACK - )) - for path in spath.split(':'): - if version: - yield os.path.join( - path, framework_name + '.framework', 'Versions', - version, framework_name - ) - else: - yield os.path.join( - path, framework_name + '.framework', framework_name - ) - - - for f in inject_suffixes(_search()): - if os.path.exists(f): - return f - # raise .. - raise ImportError("Framework %s could not be found" % (framework_name,)) - -def dyld_library(filename, libname): - """Find a dylib using dyld semantics""" - filename = ensure_unicode(filename) - libname = ensure_unicode(libname) - def _search(): - spath = ensure_unicode(os.environ.get('DYLD_LIBRARY_PATH', None)) - if spath is not None: - for path in spath.split(':'): - yield os.path.join(path, libname) - yield filename - spath = ensure_unicode(os.environ.get( - 'DYLD_FALLBACK_LIBRARY_PATH', DEFAULT_LIBRARY_FALLBACK - )) - for path in spath.split(':'): - yield os.path.join(path, libname) - for f in inject_suffixes(_search()): - if os.path.exists(f): - return f - raise ValueError("dylib %s could not be found" %(filename,)) - - -def dyld_find(filename): - """Generic way to locate a dyld framework or dyld""" - filename = os.path.realpath(filename) - res = infoForFramework(filename) - if res: - framework_loc, framework_name, version = res - return dyld_framework(filename, framework_name, version) - else: - return dyld_library(filename, os.path.basename(filename)) - -def pathForFramework(path): - fpath, name, version = infoForFramework(dyld_find(path)) - return os.path.join(fpath, name + '.framework') diff --git a/env/lib/python2.7/site-packages/objc/_dyld.pyc b/env/lib/python2.7/site-packages/objc/_dyld.pyc deleted file mode 100644 index b599bec7..00000000 Binary files a/env/lib/python2.7/site-packages/objc/_dyld.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/objc/_framework.py b/env/lib/python2.7/site-packages/objc/_framework.py deleted file mode 100644 index 23234e4c..00000000 --- a/env/lib/python2.7/site-packages/objc/_framework.py +++ /dev/null @@ -1,24 +0,0 @@ -""" -Generic framework path manipulation -""" - -__all__ = ['infoForFramework'] - -# This regexp should find: -# \1 - framework location -# \2 - framework name -# \3 - framework version (optional) -# -FRAMEWORK_RE_STR = r"""(^.*)(?:^|/)(\w+).framework(?:/(?:Versions/([^/]+)/)?\2)?$""" -FRAMEWORK_RE = None - -def infoForFramework(filename): - """returns (location, name, version) or None""" - global FRAMEWORK_RE - if FRAMEWORK_RE is None: - import re - FRAMEWORK_RE = re.compile(FRAMEWORK_RE_STR) - is_framework = FRAMEWORK_RE.findall(filename) - if not is_framework: - return None - return is_framework[-1] diff --git a/env/lib/python2.7/site-packages/objc/_framework.pyc b/env/lib/python2.7/site-packages/objc/_framework.pyc deleted file mode 100644 index 6e37c691..00000000 Binary files a/env/lib/python2.7/site-packages/objc/_framework.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/objc/_lazyimport.py b/env/lib/python2.7/site-packages/objc/_lazyimport.py deleted file mode 100644 index dd58ee52..00000000 --- a/env/lib/python2.7/site-packages/objc/_lazyimport.py +++ /dev/null @@ -1,466 +0,0 @@ -""" -Helper module that will enable lazy imports of Cocoa wrapper items. - -This should improve startup times and memory usage, at the cost -of not being able to use 'from Cocoa import *' -""" -__all__ = ('ObjCLazyModule',) - -import sys -import re -import struct - -from objc import lookUpClass, getClassList, nosuchclass_error, loadBundle -import objc -import warnings -ModuleType = type(sys) - -_name_re = re.compile('^[A-Za-z_][A-Za-z_0-9]*$') - -def _check_deprecated(name, deprecation_version): - if objc.options.deprecation_warnings and objc.options.deprecation_warnings >= deprecation_version: - warnings.warn("%r is deprecated in macOS %d.%d"%(name, deprecation_version / 100, deprecation_version % 100), - objc.ApiDeprecationWarning, stacklevel=2) - -def _loadBundle(frameworkName, frameworkIdentifier, frameworkPath): - if frameworkIdentifier is None: - bundle = loadBundle( - frameworkName, - {}, - bundle_path=frameworkPath, - scan_classes=False) - - else: - try: - bundle = loadBundle( - frameworkName, - {}, - bundle_identifier=frameworkIdentifier, - scan_classes=False) - - except ImportError: - bundle = loadBundle( - frameworkName, - {}, - bundle_path=frameworkPath, - scan_classes=False) - - return bundle - -class GetAttrMap (object): - __slots__ = ('_container',) - def __init__(self, container): - self._container = container - - def __getitem__(self, key): - if key == 'CFSTR': - return lambda v: v.decode("utf-8") - try: - return getattr(self._container, key) - except AttributeError: - raise KeyError(key) - -class ObjCLazyModule (ModuleType): - """ - A module type that loads PyObjC metadata lazily, that is constants, global - variables and functions are created from the metadata as needed. This - reduces the resource usage of PyObjC (both in time and memory), as most - symbols exported by frameworks are never used in programs. - - The loading code assumes that the metadata dictionary is valid, and invalid - metadata may cause exceptions other than AttributeError when accessing module - attributes. - """ - - # Define slots for all attributes, that way they don't end up it __dict__. - __slots__ = ( - '_ObjCLazyModule__bundle', '_ObjCLazyModule__enummap', '_ObjCLazyModule__funcmap', - '_ObjCLazyModule__parents', '_ObjCLazyModule__varmap', '_ObjCLazyModule__inlinelist', - '_ObjCLazyModule__aliases', '_ObjCLazyModule__informal_protocols', - ) - - def __init__(self, name, frameworkIdentifier, frameworkPath, metadict=None, inline_list=None, initialdict=None, parents=()): - super(ObjCLazyModule, self).__init__(name) - - if frameworkIdentifier is not None or frameworkPath is not None: - self.__bundle = self.__dict__['__bundle__'] = _loadBundle(name, frameworkIdentifier, frameworkPath) - else: - self.__bundle = None - - pfx = name + '.' - for nm in list(sys.modules.keys()): - # See issue #95: there can be objects that aren't strings in - # sys.modules. - if hasattr(nm, 'startswith') and nm.startswith(pfx): - rest = nm[len(pfx):] - if '.' in rest: continue - if sys.modules[nm] is not None: - self.__dict__[rest] = sys.modules[nm] - - if metadict is None: - metadict = {} - - if initialdict: - self.__dict__.update(initialdict) - self.__dict__.update(metadict.get('misc', {})) - self.__parents = parents - self.__varmap = metadict.get('constants') - self.__varmap_deprecated = metadict.get('deprecated_constants', {}) - self.__varmap_dct = metadict.get('constants_dict', {}) - self.__enummap = metadict.get('enums') - self.__enum_deprecated = metadict.get('deprecated_enums', {}) - self.__funcmap = metadict.get('functions') - self.__aliases = metadict.get('aliases') - self.__aliases_deprecated = metadict.get('deprecated_aliases', {}) - self.__inlinelist = inline_list - - # informal protocols are not exposed, but added here - # for completeness sake. - self.__informal_protocols = metadict.get('protocols') - - self.__expressions = metadict.get('expressions') - self.__expressions_mapping = GetAttrMap(self) - - self.__load_cftypes(metadict.get('cftypes')) - - - def __dir__(self): - return self.__all__ - - def __getattr__(self, name): - if name == "__all__": - # Load everything immediately - value = self.__calc_all() - self.__dict__[name] = value - return value - - # First try parent module, as if we had done - # 'from parents import *' - for p in self.__parents: - try: - value = getattr(p, name) - except AttributeError: - pass - - else: - self.__dict__[name] = value - if '__all__' in self.__dict__: - del self.__dict__['__all__'] - return value - - if not _name_re.match(name): - # Name is not a valid identifier and cannot - # match. - raise AttributeError(name) - - # Check if the name is a constant from - # the metadata files - try: - value = self.__get_constant(name) - except AttributeError: - pass - else: - self.__dict__[name] = value - if '__all__' in self.__dict__: - del self.__dict__['__all__'] - return value - - # Then check if the name is class - try: - value = lookUpClass(name) - except nosuchclass_error: - pass - - else: - self.__dict__[name] = value - if '__all__' in self.__dict__: - del self.__dict__['__all__'] - return value - - # Finally give up and raise AttributeError - raise AttributeError(name) - - def __calc_all(self): - - # Ensure that all dynamic entries get loaded - if self.__varmap_dct: - dct = {} - objc.loadBundleVariables(self.__bundle, dct, - [ (nm, self.__varmap_dct[nm].encode('ascii')) - for nm in self.__varmap_dct if not self.__varmap_dct[nm].startswith('=')]) - for nm in dct: - if nm not in self.__dict__: - self.__dict__[nm] = dct[nm] - - for nm, tp in self.__varmap_dct.items(): - if tp.startswith('=='): - try: - self.__dict__[nm] = objc._loadConstant(nm, tp[2:], 2) - except AttributeError: - raise - pass - elif tp.startswith('='): - try: - self.__dict__[nm] = objc._loadConstant(nm, tp[1:], 1) - except AttributeError: - pass - - - self.__varmap_dct = {} - - if self.__varmap: - varmap = [] - specials = [] - for nm, tp in re.findall(r"\$([A-Z0-9a-z_]*)(@[^$]*)?(?=\$)", self.__varmap): - if tp and tp.startswith('@='): - specials.append((nm, tp[2:])) - else: - varmap.append((nm, b'@' if not tp else tp[1:].encode('ascii'))) - - dct = {} - objc.loadBundleVariables(self.__bundle, dct, varmap) - - for nm in dct: - if nm not in self.__dict__: - self.__dict__[nm] = dct[nm] - - for nm, tp in specials: - try: - if tp.startswith('='): - self.__dict__[nm] = objc._loadConstant(nm, tp[1:], 2) - else: - self.__dict__[nm] = objc._loadConstant(nm, tp, 1) - except AttributeError: - pass - - self.__varmap = "" - - if self.__enummap: - for nm, val in re.findall(r"\$([A-Z0-9a-z_]*)@([^$]*)(?=\$)", self.__enummap): - if nm not in self.__dict__: - self.__dict__[nm] = self.__prs_enum(val) - - self.__enummap = "" - - if self.__funcmap: - func_list = [] - for nm in self.__funcmap: - if nm not in self.__dict__: - func_list.append((nm,) + self.__funcmap[nm]) - - dct = {} - objc.loadBundleFunctions(self.__bundle, dct, func_list) - for nm in dct: - if nm not in self.__dict__: - self.__dict__[nm] = dct[nm] - - if self.__inlinelist is not None: - dct = {} - objc.loadFunctionList( - self.__inlinelist, dct, func_list, skip_undefined=True) - for nm in dct: - if nm not in self.__dict__: - self.__dict__[nm] = dct[nm] - - self.__funcmap = {} - - if self.__expressions: - for nm in list(self.__expressions): - try: - getattr(self, nm) - except AttributeError: - pass - - if self.__aliases: - for nm in list(self.__aliases): - try: - getattr(self, nm) - except AttributeError: - pass - - all_names = set() - - # Add all names that are already in our __dict__ - all_names.update(self.__dict__) - - # Merge __all__of parents ('from parent import *') - for p in self.__parents: - try: - all_names.update(p.__all__) - except AttributeError: - all_names.update(dir(p)) - - # Add all class names - all_names.update(cls.__name__ for cls in getClassList()) - - return [ v for v in all_names if not v.startswith('_') ] - - def __prs_enum(self, val): - if val.startswith("'"): - if isinstance(val, bytes): # pragma: no 3.x cover - val, = struct.unpack('>l', val[1:-1]) - else: # pragma: no 2.x cover - val, = struct.unpack('>l', val[1:-1].encode('latin1')) - - elif '.' in val or 'e' in val: - val = float(val) - - else: - val = int(val) - - return val - - def __get_constant(self, name): - if self.__varmap_dct: - if name in self.__varmap_dct: - tp = self.__varmap_dct.pop(name) - if tp.startswith('=='): - tp = tp[2:] - magic = 2 - elif tp.startswith('='): - tp = tp[1:] - magic = 1 - else: - magic = 0 - result = objc._loadConstant(name, tp, magic) - if name in self.__varmap_deprecated: - _check_deprecated(name, self.__varmap_deprecated[name]) - - return result - - if self.__varmap: - m = re.search(r"\$%s(@[^$]*)?\$"%(name,), self.__varmap) - if m is not None: - tp = m.group(1) - if not tp: - tp = '@' - else: - tp = tp[1:] - - d = {} - if tp.startswith('=='): - magic = 2 - tp = tp[2:] - elif tp.startswith('='): - tp = tp[1:] - magic = 1 - else: - magic = 0 - - result = objc._loadConstant(name, tp, magic) - - if name in self.__varmap_deprecated: - _check_deprecated(name, self.__varmap_deprecated[name]) - - return result - - if self.__enummap: - m = re.search(r"\$%s@([^$]*)\$"%(name,), self.__enummap) - if m is not None: - result = self.__prs_enum(m.group(1)) - if name in self.__enum_deprecated: - _check_deprecated(name, self.__enum_deprecated[name]) - return result - - if self.__funcmap: - if name in self.__funcmap: - # NOTE: Remove 'name' from funcmap because - # it won't be needed anymore (either the - # function doesn't exist, or it is loaded) - # Should use slightly less memory. - info = self.__funcmap.pop(name) - - func_list = [ (name,) + info ] - - d = {} - objc.loadBundleFunctions(self.__bundle, d, func_list) - if name in d: - return d[name] - - if self.__inlinelist is not None: - objc.loadFunctionList( - self.__inlinelist, d, func_list, skip_undefined=True) - if name in d: - return d[name] - - if self.__expressions: - if name in self.__expressions: - # NOTE: 'name' is popped because it is no longer needed - # in the metadata and popping should slightly reduce - # memory usage. - info = self.__expressions.pop(name) - try: - return eval(info, {}, self.__expressions_mapping) - except: # Ignore all errors in evaluation the expression. - pass - - if self.__aliases: - if name in self.__aliases: - alias = self.__aliases.pop(name) - if alias == 'ULONG_MAX': - result = (sys.maxsize * 2) + 1 - elif alias == 'LONG_MAX': - result = sys.maxsize - elif alias == 'LONG_MIN': - result = -sys.maxsize-1 - elif alias == 'DBL_MAX': - result = sys.float_info.max - elif alias == 'DBL_MIN': - result = sys.float_info.min - elif alias == 'FLT_MAX': - result = objc._FLT_MAX - elif alias == 'FLT_MIN': - result = objc._FLT_MIN - elif alias == 'objc.NULL': - result = objc.NULL - else: - result = getattr(self, alias) - - if name in self.__aliases_deprecated: - _check_deprecated(name, self.__aliases_deprecated[name]) - return result - - raise AttributeError(name) - - def __load_cftypes(self, cftypes): - if not cftypes: return - - for name, type, gettypeid_func, tollfree in cftypes: - if tollfree: - for nm in tollfree.split(','): # pragma: no branch - try: - objc.lookUpClass(nm) - except objc.error: - pass - else: - tollfree = nm - break - try: - v = objc.registerCFSignature(name, type, None, tollfree) - self.__dict__[name] = v - continue - except objc.nosuchclass_error: - pass - - if gettypeid_func is None: - func = None - - else: - try: - func = getattr(self, gettypeid_func) - except AttributeError: - func = None - - if func is None: - # GetTypeID function not found, this is either - # a CFType that isn't present on the current - # platform, or a CFType without a public GetTypeID - # function. Proxy using the generic CFType - if tollfree is None: - v = objc.registerCFSignature(name, type, None, 'NSCFType') - self.__dict__[name] = v - - continue - - v = objc.registerCFSignature(name, type, func()) - self.__dict__[name] = v diff --git a/env/lib/python2.7/site-packages/objc/_lazyimport.pyc b/env/lib/python2.7/site-packages/objc/_lazyimport.pyc deleted file mode 100644 index da7f3b58..00000000 Binary files a/env/lib/python2.7/site-packages/objc/_lazyimport.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/objc/_locking.py b/env/lib/python2.7/site-packages/objc/_locking.py deleted file mode 100644 index ea8c14a5..00000000 --- a/env/lib/python2.7/site-packages/objc/_locking.py +++ /dev/null @@ -1,31 +0,0 @@ -""" -Support for @synchronized blocks - -The python class object_lock is a contextmanager for with statements that -can also be used manually. -""" -import objc as _objc - -class object_lock(object): - """ - A context manager that implements the same feature as - @synchronized statements in Objective-C. Locking can also - be done manually using the ``lock`` and ``unlock`` methods. - - The mutex for object ``anObject`` is represented by - ``objc.object_lock(anObject)``. - """ - def __init__(self, value): - self.__value = value - - def __enter__(self): - _objc._objc_sync_enter(self.__value) - - def __exit__(self, type, value, tp): - _objc._objc_sync_exit(self.__value) - - def lock(self): - _objc._objc_sync_enter(self.__value) - - def unlock(self): - _objc._objc_sync_exit(self.__value) diff --git a/env/lib/python2.7/site-packages/objc/_locking.pyc b/env/lib/python2.7/site-packages/objc/_locking.pyc deleted file mode 100644 index 7b5e53c0..00000000 Binary files a/env/lib/python2.7/site-packages/objc/_locking.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/objc/_machsignals.so b/env/lib/python2.7/site-packages/objc/_machsignals.so deleted file mode 100755 index d662bbb6..00000000 Binary files a/env/lib/python2.7/site-packages/objc/_machsignals.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/objc/_objc.so b/env/lib/python2.7/site-packages/objc/_objc.so deleted file mode 100755 index 84d1d3ec..00000000 Binary files a/env/lib/python2.7/site-packages/objc/_objc.so and /dev/null differ diff --git a/env/lib/python2.7/site-packages/objc/_properties.py b/env/lib/python2.7/site-packages/objc/_properties.py deleted file mode 100644 index 5420f929..00000000 --- a/env/lib/python2.7/site-packages/objc/_properties.py +++ /dev/null @@ -1,1158 +0,0 @@ -__all__ = ('object_property', 'bool_property', - 'array_property', 'set_property', 'dict_property') - -from objc import ivar, selector, _C_ID, _C_NSBOOL, _C_BOOL, NULL, _C_NSUInteger -from objc import lookUpClass -from copy import copy as copy_func -import sys - -if sys.version_info[0] == 2: - import collections as collections_abc -else: - import collections.abc as collections_abc - -NSSet = lookUpClass('NSSet') -NSObject = lookUpClass('NSObject') - -if sys.version_info[0] == 2: # pragma: no 3.x cover - range = xrange - - def _str(value): - return value - -else: # pragma: no 2.x cover - long = int - - def _str(value): - return value.decode('ascii') - - - -def attrsetter(prop, name, copy): - if copy: - def func(self, value): - if isinstance(value, NSObject): - setattr(self, name, value.copy()) - else: - setattr(self, name, copy_func(value)) - else: - def func(self, value): - setattr(self, name, value) - return func - -def attrgetter(name): - def func(self): - return getattr(self, name) - return func - -def _return_value(value): - def func(self): - return value - - return func - -def _dynamic_getter(name): - def getter(object): - m = getattr(object.pyobjc_instanceMethods, name) - return m() - getter.__name__ = name - return getter - -def _dynamic_setter(name): - def setter(object, value): - m = getattr(object.pyobjc_instanceMethods, name.replace(':', '_')) - return m(value) - setter.__name__ = name - return setter - -class object_property (object): - def __init__(self, name=None, - read_only=False, copy=False, dynamic=False, - ivar=None, typestr=_C_ID, depends_on=None): - self.__created = False - self.__inherit = False - self._name = name - self._typestr = typestr - self._ro = read_only - self._copy = copy - self._dynamic = dynamic - self._ivar = ivar - self._getter = None - self._setter = None - self._validate = None - if depends_on is None: - self._depends_on = None - else: - self._depends_on = set(depends_on) - - self.__getprop = None - self.__setprop = None - self.__parent = None - - def _clone(self): - if self._depends_on is None: - depends = None - else: - depends = self._depends_on.copy() - - v = type(self)(name=self._name, - read_only=self._ro, copy=self._copy, dynamic=self._dynamic, - ivar=self._ivar, typestr=self._typestr, depends_on=depends) - v.__inherit = True - - v.__getprop = self.__getprop - v.__setprop = self.__setprop - v.__parent = self - - return v - - def __pyobjc_class_setup__(self, name, class_dict, instance_methods, class_methods): - self.__created = True - if self._name is None: - self._name = name - - if self._ivar is not NULL: - if self._ivar is None: - ivname = '_' + self._name - else: - ivname = self._ivar - - if self.__parent is None: - ivar_ref = ivar(name=ivname, type=self._typestr) - class_dict[ivname] = ivar_ref - - if self._ro: - self._setter = None - - else: - setterName = b'set' + name[0].upper().encode('latin1') + name[1:].encode('latin1') + b':' - signature = b'v@:' + self._typestr - if self._setter is None: - if self.__inherit: - pass - - elif self._dynamic: - dynSetterName = 'set' + name[0].upper() + name[1:] + '_' - self.__setprop = _dynamic_setter(dynSetterName) - instance_methods.add(setterName) - - else: - - if self._ivar is NULL: - raise ValueError( - "Cannot create default setter for property " - "without ivar") - - setprop = selector( - attrsetter(self._name, ivname, self._copy), - selector=setterName, - signature=signature - ) - setprop.isHidden = True - instance_methods.add(setprop) - - # Use dynamic setter to avoid problems when subclassing - self.__setprop = _dynamic_setter(_str(setterName)) - else: - setprop = selector( - self._setter, - selector=setterName, - signature=signature - ) - setprop.isHidden = True - instance_methods.add(setprop) - - # Use dynamic setter to avoid problems when subclassing - self.__setprop = _dynamic_setter(_str(setterName)) - - if self._typestr in (_C_NSBOOL, _C_BOOL): - getterName = b'is' + name[0].upper().encode('latin1') + name[1:].encode('latin1') - else: - getterName = self._name.encode('latin1') - - if self._getter is None: - if self.__inherit: - pass - - elif self._dynamic: - if self._typestr in (_C_NSBOOL, _C_BOOL): - dynGetterName = 'is' + name[0].upper() + name[1:] - else: - dynGetterName = self._name - - self.__getprop = _dynamic_getter(dynGetterName) - instance_methods.add(getterName) - - else: - if self._ivar is NULL: - raise ValueError( - "Cannot create default getter for property without ivar") - - self.__getprop = selector( - attrgetter(ivname), - selector=getterName, - signature=self._typestr + b'@:') - self.__getprop.isHidden=True - instance_methods.add(self.__getprop) - - else: - self.__getprop = getprop = selector( - self._getter, - selector=getterName, - signature=self._typestr + b'@:') - getprop.isHidden=True - instance_methods.add(getprop) - #self.__getprop = _dynamic_getter(getterName) - - if self._validate is not None: - selName = b'validate' + self._name[0].upper().encode('latin') + self._name[1:].encode('latin') + b':error:' - signature = _C_NSBOOL + b'@:N^@o^@' - validate = selector( - self._validate, - selector=selName, - signature=signature) - class_dict[validate.selector] = validate - instance_methods.add(validate) - - if self._depends_on: - if self.__parent is not None: - if self.__parent._depends_on: - self._depends_on.update(self.__parent._depends_on.copy()) - - self._depends_on = self._depends_on - - affecting = selector( - _return_value(NSSet.setWithArray_(list(self._depends_on))), - selector = b'keyPathsForValuesAffecting' + self._name[0].upper().encode('latin1') + self._name[1:].encode('latin1'), - signature = b'@@:', - isClassMethod=True) - class_dict[affecting.selector] = affecting - class_methods.add(affecting) - - def __get__(self, object, owner): - if object is None: - return self - return self.__getprop(object) - - def __set__(self, object, value): - if self.__setprop is None: - raise ValueError("setting read-only property " + self._name) - - return self.__setprop(object, value) - - def __delete__(self, object): - raise TypeError("cannot delete property " + self._name) - - def depends_on(self, keypath): - if self._depends_on is None: - self._depends_on = set() - self._depends_on.add(keypath) - - def getter(self, function): - if self.__created: - v = self._clone() - v._getter = function - return v - - self._getter = function - return self - - def setter(self, function): - - if self.__created: - v = self._clone() - v._ro = False - v._setter = function - return v - - if self._ro: - raise ValueError("Defining settter for read-only property") - - self._setter = function - return self - - def validate(self, function): - if self._ro: - raise ValueError("Defining validator for read-only property") - - if self.__created: - v = self._clone() - v._validate = function - return v - - self._validate = function - return self - -class bool_property (object_property): - def __init__(self, name=None, - read_only=False, copy=False, dynamic=False, - ivar=None, typestr=_C_NSBOOL): - super(bool_property, self).__init__( - name, read_only, copy, dynamic, ivar, typestr) - - - - -NSIndexSet = lookUpClass('NSIndexSet') -NSMutableIndexSet = lookUpClass('NSMutableIndexSet') -NSKeyValueChangeSetting = 1 -NSKeyValueChangeInsertion = 2 -NSKeyValueChangeRemoval = 3 -NSKeyValueChangeReplacement = 4 - - -# Helper function for (not) pickling array_proxy instances -# NOTE: Don't remove this function, it can be referenced from -# pickle files. -def _id(value): - return value - -# FIXME: split into two: array_proxy and mutable_array_proxy -class array_proxy (collections_abc.MutableSequence): - # XXX: The implemenation should be complete, but is currently not - # tested. - __slots__ = ('_name', '_parent', '__wrapped', '_ro') - - def __init__(self, name, parent, wrapped, read_only): - self._name = name - self._parent = parent - self._ro = read_only - self.__wrapped = wrapped - - @property - def _wrapped(self): - return self.__wrapped.__getvalue__(self._parent) - - def __indexSetForIndex(self, index): - if isinstance(index, slice): - result = NSMutableIndexSet.alloc().init() - start, stop, step = index.indices(len(self._wrapped)) - for i in range(start, stop, step): - result.addIndex_(i) - - return result - - elif isinstance(index, (int, long)): - if index < 0: - v = len(self) + index - if v < 0: - raise IndexError(index) - return NSIndexSet.alloc().initWithIndex_(v) - - else: - return NSIndexSet.alloc().initWithIndex_(index) - - else: - raise TypeError(index) - - - - def __repr__(self): - return '' - - def __reduce__(self): - # Ensure that the proxy itself doesn't get stored - # in pickles. - return _id, (self._wrapped,) - - def __getattr__(self, name): - # Default: just defer to wrapped list - return getattr(self._wrapped, name) - - def __len__(self): - return self._wrapped.__len__() - - def __getitem__(self, index): - return self._wrapped[index] - - def __setitem__(self, index, value): - if self._ro: - raise ValueError("Property '%s' is read-only"%(self._name,)) - - indexes = self.__indexSetForIndex(index) - self._parent.willChange_valuesAtIndexes_forKey_( - NSKeyValueChangeSetting, - indexes, self._name) - try: - self._wrapped[index] = value - finally: - self._parent.didChange_valuesAtIndexes_forKey_( - NSKeyValueChangeReplacement, - indexes, self._name) - - def __delitem__(self, index): - if self._ro: - raise ValueError("Property '%s' is read-only"%(self._name,)) - - indexes = self.__indexSetForIndex(index) - self._parent.willChange_valuesAtIndexes_forKey_( - NSKeyValueChangeRemoval, - indexes, self._name) - try: - del self._wrapped[index] - finally: - self._parent.didChange_valuesAtIndexes_forKey_( - NSKeyValueChangeRemoval, - indexes, self._name) - - def append(self, value): - if self._ro: - raise ValueError("Property '%s' is read-only"%(self._name,)) - - index = len(self) - indexes = NSIndexSet.alloc().initWithIndex_(index) - self._parent.willChange_valuesAtIndexes_forKey_( - NSKeyValueChangeInsertion, - indexes, self._name) - try: - self._wrapped.append(value) - finally: - self._parent.didChange_valuesAtIndexes_forKey_( - NSKeyValueChangeInsertion, - indexes, self._name) - - def insert(self, index, value): - if self._ro: - raise ValueError("Property '%s' is read-only"%(self._name,)) - - if isinstance(index, slice): - raise TypeError("insert argument 1 is a slice") - - indexes = self.__indexSetForIndex(index) - self._parent.willChange_valuesAtIndexes_forKey_( - NSKeyValueChangeInsertion, - indexes, self._name) - try: - self._wrapped.insert(index, value) - finally: - self._parent.didChange_valuesAtIndexes_forKey_( - NSKeyValueChangeInsertion, - indexes, self._name) - - def pop(self, index=-1): - if self._ro: - raise ValueError("Property '%s' is read-only"%(self._name,)) - - if isinstance(index, slice): - raise TypeError("insert argument 1 is a slice") - - indexes = self.__indexSetForIndex(index) - self._parent.willChange_valuesAtIndexes_forKey_( - NSKeyValueChangeRemoval, - indexes, self._name) - try: - return self._wrapped.pop(index) - finally: - self._parent.didChange_valuesAtIndexes_forKey_( - NSKeyValueChangeRemoval, - indexes, self._name) - - def extend(self, values): - # XXX: This is suboptimal but correct - if self._ro: - raise ValueError("Property '%s' is read-only"%(self._name,)) - - values = list(values) - - indexes = NSIndexSet.alloc().initWithIndexesInRange_((len(self), len(values))) - - self._parent.willChange_valuesAtIndexes_forKey_( - NSKeyValueChangeInsertion, - indexes, self._name) - try: - self._wrapped.extend(values) - - finally: - self._parent.didChange_valuesAtIndexes_forKey_( - NSKeyValueChangeInsertion, - indexes, self._name) - - def __iadd__(self, values): - self.extend(values) - return self - - def __add__(self, values): - return self._wrapped + values - - def __mul__(self, count): - return self._wrapped * count - - def __imul__(self, count): - if self._ro: - raise ValueError("Property '%s' is read-only"%(self._name,)) - if not isinstance(count, (int, long)): - raise TypeError(count) - - indexes = NSIndexSet.alloc().initWithIndexesInRange_((len(self), len(self)*(count-1))) - self._parent.willChange_valuesAtIndexes_forKey_( - NSKeyValueChangeInsertion, - indexes, self._name) - try: - self._wrapped.__imul__(count) - finally: - self._parent.didChange_valuesAtIndexes_forKey_( - NSKeyValueChangeInsertion, - indexes, self._name) - - return self - - - def __eq__(self, other): - if isinstance(other, array_proxy): - return self._wrapped == other._wrapped - - else: - return self._wrapped == other - - def __ne__(self, other): - if isinstance(other, array_proxy): - return self._wrapped != other._wrapped - - else: - return self._wrapped != other - - def __lt__(self, other): - if isinstance(other, array_proxy): - return self._wrapped < other._wrapped - - else: - return self._wrapped < other - - def __le__(self, other): - if isinstance(other, array_proxy): - return self._wrapped <= other._wrapped - - else: - return self._wrapped <= other - - def __gt__(self, other): - if isinstance(other, array_proxy): - return self._wrapped > other._wrapped - - else: - return self._wrapped > other - - def __ge__(self, other): - if isinstance(other, array_proxy): - return self._wrapped >= other._wrapped - - else: - return self._wrapped >= other - - - if sys.version_info[0] == 2: # pragma: no 3.x cover - def __cmp__(self, other): - if isinstance(other, array_proxy): - return cmp(self._wrapped, other._wrapped) - - else: - return cmp(self._wrapped, other) - - def sort(self, cmp=None, key=None, reverse=False): - if self._ro: - raise ValueError("Property '%s' is read-only"%(self._name,)) - - indexes = NSIndexSet.alloc().initWithIndexesInRange_( - (0, len(self._wrapped))) - self._parent.willChange_valuesAtIndexes_forKey_( - NSKeyValueChangeReplacement, - indexes, self._name) - try: - self._wrapped.sort(cmp=cmp, key=key, reverse=reverse) - finally: - self._parent.didChange_valuesAtIndexes_forKey_( - NSKeyValueChangeReplacement, - indexes, self._name) - - else: # pragma: no 2.x cover - def sort(self, key=None, reverse=False): - if self._ro: - raise ValueError("Property '%s' is read-only"%(self._name,)) - - indexes = NSIndexSet.alloc().initWithIndexesInRange_( - (0, len(self._wrapped))) - self._parent.willChange_valuesAtIndexes_forKey_( - NSKeyValueChangeReplacement, - indexes, self._name) - try: - self._wrapped.sort(key=key, reverse=reverse) - finally: - self._parent.didChange_valuesAtIndexes_forKey_( - NSKeyValueChangeReplacement, - indexes, self._name) - - def reverse(self): - if self._ro: - raise ValueError("Property '%s' is read-only"%(self._name,)) - - indexes = NSIndexSet.alloc().initWithIndexesInRange_( - (0, len(self._wrapped))) - self._parent.willChange_valuesAtIndexes_forKey_( - NSKeyValueChangeReplacement, - indexes, self._name) - try: - self._wrapped.reverse() - finally: - self._parent.didChange_valuesAtIndexes_forKey_( - NSKeyValueChangeReplacement, - indexes, self._name) - -def makeArrayAccessors(name): - - def countOf(self): - return len(getattr(self, name)) - - def objectIn(self, idx): - return getattr(self, name)[idx] - - def insert(self, value, idx): - getattr(self, name).insert(idx, value) - - def replace(self, idx, value): - getattr(self, name)[idx] = value - - def remove(self, idx): - del getattr(self, name)[idx] - - return countOf, objectIn, insert, remove, replace - -class array_property (object_property): - def __init__(self, name=None, - read_only=False, copy=True, dynamic=False, - ivar=None, depends_on=None): - super(array_property, self).__init__(name, - read_only=read_only, - copy=copy, dynamic=dynamic, - ivar=ivar, depends_on=depends_on) - - def __pyobjc_class_setup__(self, name, class_dict, instance_methods, class_methods): - super(array_property, self).__pyobjc_class_setup__(name, class_dict, instance_methods, class_methods) - - - # Insert (Mutable) Indexed Accessors - # FIXME: should only do the mutable bits when we're actually a mutable property - - name = self._name - Name = name[0].upper() + name[1:] - - countOf, objectIn, insert, remove, replace = makeArrayAccessors(self._name) - - countOf = selector(countOf, - selector = ('countOf%s'%(Name,)).encode('latin1'), - signature = _C_NSUInteger + b'@:', - ) - countOf.isHidden = True - instance_methods.add(countOf) - - objectIn = selector(objectIn, - selector = ('objectIn%sAtIndex:'%(Name,)).encode('latin1'), - signature = b'@@:' + _C_NSUInteger, - ) - objectIn.isHidden = True - instance_methods.add(objectIn) - - insert = selector(insert, - selector = ('insertObject:in%sAtIndex:'%(Name,)).encode('latin1'), - signature = b'v@:@' + _C_NSUInteger, - ) - insert.isHidden = True - instance_methods.add(insert) - - remove = selector(remove, - selector = ('removeObjectFrom%sAtIndex:'%(Name,)).encode('latin1'), - signature = b'v@:' + _C_NSUInteger, - ) - remove.isHidden = True - instance_methods.add(remove) - - replace = selector(replace, - selector = ('replaceObjectIn%sAtIndex:withObject:'%(Name,)).encode('latin1'), - signature = b'v@:' + _C_NSUInteger + b'@', - ) - replace.isHidden = True - instance_methods.add(replace) - - - def __set__(self, object, value): - if isinstance(value, array_proxy): - if value._name == self._name and value._parent is object: - # attr.prop = attr.prop - return - - if isinstance(value, array_proxy): - value = list(value) - - super(array_property, self).__set__(object, value) - - def __get__(self, object, owner): - v = object_property.__get__(self, object, owner) - if v is None: - v = list() - object_property.__set__(self, object, v) - return array_proxy(self._name, object, self, self._ro) - - def __getvalue__(self, object): - v = object_property.__get__(self, object, None) - if v is None: - v = list() - object_property.__set__(self, object, v) - return v - - -NSKeyValueUnionSetMutation = 1 -NSKeyValueMinusSetMutation = 2 -NSKeyValueIntersectSetMutation = 3 -NSKeyValueSetSetMutation = 4 - - -class set_proxy (collections_abc.MutableSet): - __slots__ = ('_name', '__wrapped', '_parent', '_ro') - - def __init__(self, name, parent, wrapped, read_only): - self._name = name - self._parent = parent - self._ro = read_only - self.__wrapped = wrapped - - def __repr__(self): - return '' - - @property - def _wrapped(self): - return self.__wrapped.__getvalue__(self._parent) - - def __reduce__(self): - # Ensure that the proxy itself doesn't get stored - # in pickles. - return _id, (self._wrapped,) - - def __getattr__(self, attr): - return getattr(self._wrapped, attr) - - def __contains__(self, value): - return self._wrapped.__contains__(value) - - def __iter__(self): - return self._wrapped.__iter__() - - def __len__(self): - return self._wrapped.__len__() - - - def __eq__(self, other): - if isinstance(other, set_proxy): - return self._wrapped == other._wrapped - - else: - return self._wrapped == other - - def __ne__(self, other): - if isinstance(other, set_proxy): - return self._wrapped != other._wrapped - - else: - return self._wrapped != other - - def __lt__(self, other): - if isinstance(other, set_proxy): - return self._wrapped < other._wrapped - - else: - return self._wrapped < other - - def __le__(self, other): - if isinstance(other, set_proxy): - return self._wrapped <= other._wrapped - - else: - return self._wrapped <= other - - def __gt__(self, other): - if isinstance(other, set_proxy): - return self._wrapped > other._wrapped - - else: - return self._wrapped > other - - def __ge__(self, other): - if isinstance(other, set_proxy): - return self._wrapped >= other._wrapped - - else: - return self._wrapped >= other - - - if sys.version_info[0] == 2: # pragma: no 3.x cover; pragma: no branch - def __cmp__(self, other): - raise TypeError('cannot compare sets using cmp()') - - def add(self, item): - if self._ro: - raise ValueError("Property '%s' is read-only"%(self._name,)) - - self._parent.willChangeValueForKey_withSetMutation_usingObjects_( - self._name, - NSKeyValueUnionSetMutation, - set([item]), - ) - try: - self._wrapped.add(item) - finally: - self._parent.didChangeValueForKey_withSetMutation_usingObjects_( - self._name, - NSKeyValueUnionSetMutation, - set([item]), - ) - - def clear(self): - if self._ro: - raise ValueError("Property '%s' is read-only"%(self._name,)) - - object = set(self._wrapped) - self._parent.willChangeValueForKey_withSetMutation_usingObjects_( - self._name, - NSKeyValueMinusSetMutation, - object - ) - try: - self._wrapped.clear() - finally: - self._parent.didChangeValueForKey_withSetMutation_usingObjects_( - self._name, - NSKeyValueMinusSetMutation, - object - ) - - def difference_update(self, *others): - if self._ro: - raise ValueError("Property '%s' is read-only"%(self._name,)) - - s = set() - s.update(*others) - self._parent.willChangeValueForKey_withSetMutation_usingObjects_( - self._name, - NSKeyValueMinusSetMutation, - s - ) - try: - self._wrapped.difference_update(s) - - finally: - self._parent.didChangeValueForKey_withSetMutation_usingObjects_( - self._name, - NSKeyValueMinusSetMutation, - s - ) - - - def discard(self, item): - if self._ro: - raise ValueError("Property '%s' is read-only"%(self._name,)) - - self._parent.willChangeValueForKey_withSetMutation_usingObjects_( - self._name, - NSKeyValueMinusSetMutation, - {item} - ) - try: - self._wrapped.discard(item) - - finally: - self._parent.didChangeValueForKey_withSetMutation_usingObjects_( - self._name, - NSKeyValueMinusSetMutation, - {item} - ) - - def intersection_update(self, other): - if self._ro: - raise ValueError("Property '%s' is read-only"%(self._name,)) - - other = set(other) - - self._parent.willChangeValueForKey_withSetMutation_usingObjects_( - self._name, - NSKeyValueIntersectSetMutation, - other - ) - try: - self._wrapped.intersection_update(other) - - finally: - self._parent.didChangeValueForKey_withSetMutation_usingObjects_( - self._name, - NSKeyValueIntersectSetMutation, - other - ) - - def pop(self): - if self._ro: - raise ValueError("Property '%s' is read-only"%(self._name,)) - - try: - v = next(iter(self)) - except StopIteration: - raise KeyError("Empty set") - - self.remove(v) - return v - - def remove(self, item): - if self._ro: - raise ValueError("Property '%s' is read-only"%(self._name,)) - - self._parent.willChangeValueForKey_withSetMutation_usingObjects_( - self._name, - NSKeyValueMinusSetMutation, - set([item]) - ) - try: - self._wrapped.remove(item) - - finally: - self._parent.didChangeValueForKey_withSetMutation_usingObjects_( - self._name, - NSKeyValueMinusSetMutation, - set([item]) - ) - - def symmetric_difference_update(self, other): - # NOTE: This method does not call the corresponding method - # of the wrapped set to ensure that we generate the right - # notifications. - if self._ro: - raise ValueError("Property '%s' is read-only"%(self._name,)) - - other = set(other) - - to_add = set() - to_remove = set() - for o in other: - if o in self: - to_remove.add(o) - else: - to_add.add(o) - - self._parent.willChangeValueForKey_withSetMutation_usingObjects_( - self._name, - NSKeyValueMinusSetMutation, - to_remove - ) - try: - self._wrapped.difference_update(to_remove) - - finally: - self._parent.didChangeValueForKey_withSetMutation_usingObjects_( - self._name, - NSKeyValueMinusSetMutation, - to_remove - ) - - self._parent.willChangeValueForKey_withSetMutation_usingObjects_( - self._name, - NSKeyValueUnionSetMutation, - to_add - ) - try: - self._wrapped.update(to_add) - - finally: - self._parent.didChangeValueForKey_withSetMutation_usingObjects_( - self._name, - NSKeyValueUnionSetMutation, - to_add - ) - - def update(self, *others): - if self._ro: - raise ValueError("Property '%s' is read-only"%(self._name,)) - - s = set() - s.update(*others) - - self._parent.willChangeValueForKey_withSetMutation_usingObjects_( - self._name, - NSKeyValueUnionSetMutation, - s - ) - try: - self._wrapped.update(s) - - finally: - self._parent.didChangeValueForKey_withSetMutation_usingObjects_( - self._name, - NSKeyValueUnionSetMutation, - s - ) - - def __or__(self, other): - return self._wrapped | other - - def __and__(self, other): - return self._wrapped & other - - def __xor__(self, other): - return self._wrapped ^ other - - def __sub__(self, other): - return self._wrapped - other - - def __ior__(self, other): - if self._ro: - raise ValueError("Property '%s' is read-only"%(self._name,)) - - self.update(other) - return self - - def __isub__(self, other): - if self._ro: - raise ValueError("Property '%s' is read-only"%(self._name,)) - - self.difference_update(other) - return self - - def __ixor__(self, other): - if self._ro: - raise ValueError("Property '%s' is read-only"%(self._name,)) - - self.symmetric_difference_update(other) - return self - - def __iand__(self, other): - if self._ro: - raise ValueError("Property '%s' is read-only"%(self._name,)) - - self.intersection_update(other) - return self - - -def makeSetAccessors(name): - def countOf(self): - return len(getattr(self, name)) - - def enumeratorOf(self): - return iter(getattr(self, name)) - - def memberOf(self, value): - collection = getattr(self, name) - - for item in collection: - if item == value: - return item - - else: - return None - - def add(self, value): - getattr(self, name).add(value) - - def remove(self, value): - getattr(self, name).discard(value) - - return countOf, enumeratorOf, memberOf, add, remove - - -class set_property (object_property): - def __init__(self, name=None, - read_only=False, copy=True, dynamic=False, - ivar=None, depends_on=None): - super(set_property, self).__init__(name, - read_only=read_only, - copy=copy, dynamic=dynamic, - ivar=ivar, depends_on=depends_on) - - def __get__(self, object, owner): - v = object_property.__get__(self, object, owner) - if v is None: - v = set() - object_property.__set__(self, object, v) - return set_proxy(self._name, object, self, self._ro) - - def __set__(self, object, value): - if isinstance(value, set_proxy): - if value._name == self._name and value._parent is object: - # attr.prop = attr.prop - return - - if isinstance(value, set_proxy): - value = list(value) - - super(set_property, self).__set__(object, value) - - def __getvalue__(self, object): - v = object_property.__get__(self, object, None) - if v is None: - v = set() - object_property.__set__(self, object, v) - return v - - def __pyobjc_class_setup__(self, name, class_dict, instance_methods, class_methods): - super(set_property, self).__pyobjc_class_setup__(name, class_dict, instance_methods, class_methods) - - # (Mutable) Unordered Accessors - # FIXME: should only do the mutable bits when we're actually a mutable property - - name = self._name - Name = name[0].upper() + name[1:] - - countOf, enumeratorOf, memberOf, add, remove = makeSetAccessors(self._name) - - countOf = selector(countOf, - selector = ('countOf%s'%(Name,)).encode('latin1'), - signature = _C_NSUInteger + b'@:', - ) - countOf.isHidden = True - instance_methods.add(countOf) - - enumeratorOf = selector(enumeratorOf, - selector = ('enumeratorOf%s'%(Name,)).encode('latin1'), - signature = b'@@:', - ) - enumeratorOf.isHidden = True - instance_methods.add(enumeratorOf) - - memberOf = selector(memberOf, - selector = ('memberOf%s:'%(Name,)).encode('latin'), - signature = b'@@:@', - ) - memberOf.isHidden = True - instance_methods.add(memberOf) - - add1 = selector(add, - selector = ('add%s:'%(Name,)).encode('latin'), - signature = b'v@:@', - ) - add1.isHidden = True - instance_methods.add(add1) - - add2 = selector(add, - selector = ('add%sObject:'%(Name,)).encode('latin1'), - signature = b'v@:@', - ) - add2.isHidden = True - instance_methods.add(add2) - - remove1 = selector(remove, - selector = ('remove%s:'%(Name,)).encode('latin1'), - signature = b'v@:@', - ) - remove1.isHidden = True - instance_methods.add(remove1) - - remove2 = selector(remove, - selector = ('remove%sObject:'%(Name,)).encode('latin'), - signature = b'v@:@', - ) - remove2.isHidden = True - instance_methods.add(remove2) - - -NSMutableDictionary = lookUpClass('NSMutableDictionary') - -class dict_property (object_property): - def __get__(self, object, owner): - v = object_property.__get__(self, object, owner) - if v is None: - v = NSMutableDictionary.alloc().init() - object_property.__set__(self, object, v) - return object_property.__get__(self, object, owner) diff --git a/env/lib/python2.7/site-packages/objc/_properties.pyc b/env/lib/python2.7/site-packages/objc/_properties.pyc deleted file mode 100644 index a721de94..00000000 Binary files a/env/lib/python2.7/site-packages/objc/_properties.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/objc/_protocols.py b/env/lib/python2.7/site-packages/objc/_protocols.py deleted file mode 100644 index 3515800f..00000000 --- a/env/lib/python2.7/site-packages/objc/_protocols.py +++ /dev/null @@ -1,34 +0,0 @@ -from objc import _objc -import sys - -__all__ = ['protocolNamed', 'ProtocolError'] - -class ProtocolError(_objc.error): - __module__ = 'objc' - -PROTOCOL_CACHE = {} -def protocolNamed(name): - """ - Returns a Protocol object for the named protocol. This is the - equivalent of @protocol(name) in Objective-C. - Raises objc.ProtocolError when the protocol does not exist. - """ - if sys.version_info[0] == 2: # pragma: no 3.x cover; pragma: no branch - name = unicode(name) - - try: - return PROTOCOL_CACHE[name] - except KeyError: - pass - for p in _objc.protocolsForProcess(): - pname = p.__name__ - PROTOCOL_CACHE.setdefault(pname, p) - if pname == name: - return p - for cls in _objc.getClassList(): - for p in _objc.protocolsForClass(cls): - pname = p.__name__ - PROTOCOL_CACHE.setdefault(pname, p) - if pname == name: - return p - raise ProtocolError("protocol %r does not exist" % (name,), name) diff --git a/env/lib/python2.7/site-packages/objc/_protocols.pyc b/env/lib/python2.7/site-packages/objc/_protocols.pyc deleted file mode 100644 index fed92bd9..00000000 Binary files a/env/lib/python2.7/site-packages/objc/_protocols.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/objc/_pycoder.py b/env/lib/python2.7/site-packages/objc/_pycoder.py deleted file mode 100644 index 03cd1cc5..00000000 --- a/env/lib/python2.7/site-packages/objc/_pycoder.py +++ /dev/null @@ -1,642 +0,0 @@ -""" -Implementation of NSCoding for OC_PythonObject and friends - -A minor problem with NSCoding support is that NSCoding restores -graphs recusively while Pickle does so depth-first (more of less). -This can cause problems when the object state contains the -object itself, which is why we need a 'setValue' callback for the -load_* functions below. -""" -from __future__ import unicode_literals -import sys -import objc -from types import * - -if sys.version_info[0] == 2: - import copy_reg as copyreg -else: - import copyreg - -import copy - -from pickle import PicklingError, UnpicklingError - -if sys.version_info[0] == 2: - import __builtin__ as _builtins -else: - import builtins as _builtins - -# _getattribute and whichmodule are adapted from the -# same function's in Python 3.4's pickle module. The -# primary difference is that the functions below -# behave as if 'allow_qualname' is true) - -def _getattribute(obj, name): - dotted_path = name.split(".") - for subpath in dotted_path: - if subpath == '': - raise AttributeError("Can't get local attribute %r on %r" % ( - name, obj)) - try: - obj = getattr(obj, subpath) - except AttributeError: - raise AttributeError("Can't get attribute %r on %r" % ( - name, obj)) - return obj - -def whichmodule(obj, name): - module_name = getattr(obj, '__module__', None) - if module_name is not None: - return module_name - - for module_name, module in list(sys.modules.items()): - if module_name == "__main__" or module is None: - continue - - try: - if _getattribute(module, name) is obj: - return module_name - - except AttributeError as exc: - pass - - return '__main__' - - - -if sys.version_info[0] == 2: # pragma: no 3.x cover - bltin_intern = intern - - def intern(value): - if isinstance(value, objc.pyobjc_unicode): - return bltin_intern(value.encode('utf-8')) - elif isinstance(value, basestring): - return bltin_intern(value) - else: - return value - - def import_module(name): - if name == 'copyreg': - name = 'copy_reg' - __import__(name, level=0) - return sys.modules[name] - -else: # pragma: no 2.x cover - unicode = str - long = int - bltin_intern = sys.intern - - def intern(value): - if isinstance(value, objc.pyobjc_unicode): - return bltin_intern(str(value)) - elif isinstance(value, str): - return bltin_intern(value) - else: - return value - - def import_module(name): - if name == 'copy_reg': - name = 'copyreg' - __import__(name, level=0) - return sys.modules[name] - - -NSArray = objc.lookUpClass("NSArray") -NSMutableArray = objc.lookUpClass("NSMutableArray") -NSDictionary = objc.lookUpClass("NSDictionary") -NSString = objc.lookUpClass("NSString") -NSSet = objc.lookUpClass("NSSet") -NSMutableSet = objc.lookUpClass("NSMutableSet") - -kOP_REDUCE=0 -kOP_INST=1 -kOP_GLOBAL=2 -kOP_NONE=3 -kOP_BOOL=4 -kOP_INT=5 -kOP_LONG=6 -kOP_FLOAT=7 -kOP_UNICODE=8 -kOP_STRING=9 -kOP_TUPLE=10 -kOP_LIST=11 -kOP_DICT=12 -kOP_GLOBAL_EXT=13 -kOP_FLOAT_STR=14 - -kKIND = NSString.stringWithString_("kind") -kFUNC = NSString.stringWithString_("func") -kARGS = NSString.stringWithString_("args") -kLIST = NSString.stringWithString_("list") -kDICT = NSString.stringWithString_("dict") -kSTATE = NSString.stringWithString_("state") -kCLASS = NSString.stringWithString_("class") -kVALUE = NSString.stringWithString_("value") -kNAME = NSString.stringWithString_("name") -kMODULE = NSString.stringWithString_("module") -kCODE = NSString.stringWithString_("code") - -class _EmptyClass: - pass - -encode_dispatch = {} - -# Code below tries to mirror the implementation in pickle.py, with -# adaptations because we're not saving to a byte stream but to another -# serializer. - -def save_reduce(coder, func, args, - state=None, listitems=None, dictitems=None, obj=None): - - if not isinstance(args, tuple): - raise PicklingError("args from reduce() should be a tuple") - - if not callable(func): - raise PicklingError("func from reduce should be callable") - - if coder.allowsKeyedCoding(): - coder.encodeInt_forKey_(kOP_REDUCE, kKIND) - coder.encodeObject_forKey_(func, kFUNC) - coder.encodeObject_forKey_(args, kARGS) - if listitems is None: - coder.encodeObject_forKey_(None, kLIST) - else: - coder.encodeObject_forKey_(list(listitems), kLIST) - - if dictitems is None: - coder.encodeObject_forKey_(None, kDICT) - else: - coder.encodeObject_forKey_(dict(dictitems), kDICT) - coder.encodeObject_forKey_(state, kSTATE) - - else: - coder.encodeValueOfObjCType_at_(objc._C_INT, kOP_REDUCE) - coder.encodeObject_(func) - coder.encodeObject_(args) - if listitems is None: - coder.encodeObject_(None) - else: - coder.encodeObject_(list(listitems)) - - if dictitems is None: - coder.encodeObject_(None) - else: - coder.encodeObject_(dict(dictitems)) - coder.encodeObject_(state) - -if sys.version_info[0] == 2: # pragma: no 3.x cover - def save_inst(coder, obj): - if hasattr(obj, '__getinitargs__'): - args = obj.__getinitargs__() - len(args) # Assert it's a sequence - else: - args = () - - cls = obj.__class__ - - if coder.allowsKeyedCoding(): - coder.encodeInt32_forKey_(kOP_INST, kKIND) - coder.encodeObject_forKey_(cls, kCLASS) - coder.encodeObject_forKey_(args, kARGS) - - else: - coder.encodeValueOfObjCType_at_(objc._C_INT, kOP_INST) - coder.encodeObject_(cls) - coder.encodeObject_(args) - - try: - getstate = obj.__getstate__ - except AttributeError: - state = obj.__dict__ - - else: - state = getstate() - - if coder.allowsKeyedCoding(): - coder.encodeObject_forKey_(state, kSTATE) - - else: - coder.encodeObject_(state) - - encode_dispatch[InstanceType] = save_inst - -if sys.version_info[0] == 2: # pragma: no 3.x cover - def save_int(coder, obj): - if coder.allowsKeyedCoding(): - coder.encodeInt_forKey_(kOP_INT, kKIND) - coder.encodeInt64_forKey_(obj, kVALUE) - else: - coder.encodeValueOfObjCType_at_(objc._C_INT, kOP_INT) - coder.encodeValueOfObjCType_at_(objc._C_LNG_LNG, obj) - encode_dispatch[int] = save_int - - def save_long(coder, obj): - encoded = unicode(repr(obj)) - if encoded.endswith('L'): - encoded = encoded[:-1] - if coder.allowsKeyedCoding(): - coder.encodeInt_forKey_(kOP_LONG, kKIND) - coder.encodeObject_forKey_(encoded, kVALUE) - else: - coder.encodeValueOfObjCType_at_(objc._C_INT, kOP_LONG) - coder.encodeObject_(encoded) - - encode_dispatch[long] = save_long - -else: # pragma: no 2.x cover - def save_int(coder, obj): - if coder.allowsKeyedCoding(): - coder.encodeInt_forKey_(kOP_LONG, kKIND) - coder.encodeObject_forKey_(unicode(repr(obj)), kVALUE) - else: - coder.encodeValueOfObjCType_at_(objc._C_INT, kOP_LONG) - coder.encodeObject_(unicode(repr(obj))) - encode_dispatch[int] = save_int - -def save_float(coder, obj): # pragma: no cover - # NOTE: 'no cover' because floats are encoded as OC_PythonNumber - # and that doesn't call back to this code for basic C types. - # TODO: full review the code path and remove this function. - - # Encode floats as strings, this seems to be needed to get - # 100% reliable round-trips. - if coder.allowsKeyedCoding(): - coder.encodeInt_forKey_(kOP_FLOAT_STR, kKIND) - coder.encodeObject_forKey_(unicode(repr(obj)), kVALUE) - else: - coder.encodeValueOfObjCType_at_(objc._C_INT, kOP_FLOAT_STR) - coder.encodeObject_(unicode(repr(obj))) - -encode_dispatch[float] = save_float - -def save_global(coder, obj, name=None): - if name is None: - name = getattr(obj, '__qualname__', None) - if name is None: - name = obj.__name__ - - module_name = whichmodule(obj, name) - try: - module = import_module(module_name) - obj2 = _getattribute(module, name) - - except (ImportError, KeyError, AttributeError): - raise PicklingError( - "Can't pickle %r: it's not found as %s.%s" % - (obj, module_name, name)) - else: - if obj2 is not obj: - raise PicklingError( - "Can't pickle %r: it's not the same object as %s.%s" % - (obj, module_name, name)) - - code = copyreg._extension_registry.get((module_name, name)) - - if coder.allowsKeyedCoding(): - if code: - coder.encodeInt_forKey_(kOP_GLOBAL_EXT, kKIND) - coder.encodeInt_forKey_(code, kCODE) - - else: - coder.encodeInt_forKey_(kOP_GLOBAL, kKIND) - coder.encodeObject_forKey_(unicode(module_name), kMODULE) - coder.encodeObject_forKey_(unicode(name), kNAME) - - else: - if code: - coder.encodeValueOfObjCType_at_(objc._C_INT, kOP_GLOBAL_EXT) - coder.encodeValueOfObjCType_at_(objc._C_INT, code) - - else: - coder.encodeValueOfObjCType_at_(objc._C_INT, kOP_GLOBAL) - coder.encodeObject_(unicode(module_name)) - coder.encodeObject_(unicode(name)) - -if sys.version_info[0] == 2: # pragma: no 3.x cover - encode_dispatch[ClassType] = save_global -encode_dispatch[type(save_global)] = save_global -try: - dir.__reduce__() -except TypeError: - encode_dispatch[type(dir)] = save_global - -def save_type(coder, obj): - if obj is type(None): - return save_reduce(coder, type, (None,), obj=obj) - elif obj is type(NotImplemented): - return save_reduce(coder, type, (NotImplemented,), obj=obj) - elif obj is type(Ellipsis): - return save_reduce(coder, type, (Ellipsis,), obj=obj) - return save_global(coder, obj) -encode_dispatch[type] = save_type - - -def save_ellipsis(coder, obj): - save_global(coder, Ellipsis, 'Ellipsis') -encode_dispatch[type(Ellipsis)] = save_ellipsis - -def save_notimplemented(coder, obj): - save_global(coder, NotImplemented, 'NotImplemented') -encode_dispatch[type(NotImplemented)] = save_notimplemented - - - - -decode_dispatch = {} - -def load_int(coder, setValue): - if coder.allowsKeyedCoding(): - return int(coder.decodeInt64ForKey_(kVALUE)) - else: - return int(coder.decodeValueOfObjCType_at_(objc._C_LNG_LNG, None)) -decode_dispatch[kOP_INT] = load_int - -def load_long(coder, setValue): - if coder.allowsKeyedCoding(): - return long(coder.decodeObjectForKey_(kVALUE)) - else: - return long(coder.decodeObject()) -decode_dispatch[kOP_LONG] = load_long - -def load_float(coder, setValue): # pragma: no cover - # Only used with old versions of PyObjC (before 2.3), keep - # for backward compatibility. - if coder.allowsKeyedCoding(): - return coder.decodeFloatForKey_(kVALUE) - else: - raise RuntimeError("Unexpected encoding") -decode_dispatch[kOP_FLOAT] = load_float - -def load_float_str(coder, setValue): - if coder.allowsKeyedCoding(): - return float(coder.decodeObjectForKey_(kVALUE)) - else: - return float(coder.decodeObject()) -decode_dispatch[kOP_FLOAT_STR] = load_float_str - - -def load_global_ext(coder, setValue): - if coder.allowsKeyedCoding(): - code = coder.decodeIntForKey_(kCODE) - else: - code = coder.decodeValueOfObjCType_at_(objc._C_INT, None) - nil = [] - obj = copyreg._extension_cache.get(code, nil) - if obj is not nil: - return obj - key = copyreg._inverted_registry.get(code) - if not key: - raise ValueError("unregistered extension code %d" % code) - - module, name = key - mod = import_module(module) - klass = _getattribute(mod, name) - copyreg._extension_cache[code] = klass - return klass -decode_dispatch[kOP_GLOBAL_EXT] = load_global_ext - - -def load_global(coder, setValue): - if coder.allowsKeyedCoding(): - module_name = coder.decodeObjectForKey_(kMODULE) - name = coder.decodeObjectForKey_(kNAME) - else: - module_name = coder.decodeObject() - name = coder.decodeObject() - - mod = import_module(module_name) - return _getattribute(mod, name) - -decode_dispatch[kOP_GLOBAL] = load_global - - -def load_inst(coder, setValue): - if coder.allowsKeyedCoding(): - cls = coder.decodeObjectForKey_(kCLASS) - initargs = coder.decodeObjectForKey_(kARGS) - else: - cls = coder.decodeObject() - initargs = coder.decodeObject() - - - if (sys.version_info[0] == 2 and not initargs and - type(cls) is ClassType and - not hasattr(cls, "__getinitargs__")): # pragma: no 3.x cover - value = _EmptyClass() - value.__class__ = cls - - else: - try: - value = cls(*initargs) - except TypeError as err: - raise TypeError("in constructor for %s: %s" % ( - cls.__name__, str(err)), sys.exc_info()[2]) - - - # We now have the object, but haven't set the correct - # state yet. Tell the bridge about this value right - # away, that's needed because `value` might be part - # of the object state which we'll retrieve next. - setValue(value) - - if coder.allowsKeyedCoding(): - state = coder.decodeObjectForKey_(kSTATE) - else: - state = coder.decodeObject() - if isinstance(state, NSArray): - state = tuple(state) - - setstate = getattr(value, "__setstate__", None) - if setstate is not None: - setstate(state) - return value - - slotstate = None - if isinstance(state, tuple) and len(state) == 2: - state, slotstate = state - - if state: - # Note: pickle.py catches RuntimeError here, - # that's for supporting restricted mode and - # is not relevant for PyObjC. - inst_dict = value.__dict__ - for k in state: - v = state[k] - if type(k) == objc.pyobjc_unicode: - inst_dict[intern(str(k))] = v - elif type(k) == str: - inst_dict[intern(k)] = v - else: - inst_dict[k] = v - - - if slotstate: - for k, v in slotstate.items(): - if isinstance(k, objc.pyobjc_unicode): - k = k.encode('utf-8') - setattr(value, intern(k), v) - - return value -decode_dispatch[kOP_INST] = load_inst - - -def load_reduce(coder, setValue): - if coder.allowsKeyedCoding(): - func = coder.decodeObjectForKey_(kFUNC) - args = coder.decodeObjectForKey_(kARGS) - - else: - func = coder.decodeObject() - args = coder.decodeObject() - - - new_args = [] - for a in args: - if isinstance(a, NSDictionary): - new_args.append(dict(a)) - elif isinstance(a, NSMutableArray): - new_args.append(list(a)) - elif isinstance(a, NSArray): - new_args.append(tuple(a)) - elif isinstance(a, NSMutableSet): - new_args.append(set(a)) - elif isinstance(a, NSSet): - new_args.append(frozenset(a)) - else: - new_args.append(a) - args = new_args - del new_args - - if sys.version_info[0] == 2 and func == copyreg.__newobj__: # pragma: no 3.x cover - try: - value = func(*args) - except AttributeError: - # copyreg.__newobj__ failed, almost certainly because - # there is __new__ method. This happens when a class - # is serialized in Python 3 and read back in Python 2 - # as a classic class. - cls = args[0] - args = args[1:] - value = cls(*args) - - else: - value = func(*args) - - # We now have the object, but haven't set the correct - # state yet. Tell the bridge about this value right - # away, that's needed because `value` might be part - # of the object state which we'll retrieve next. - setValue(value) - - if coder.allowsKeyedCoding(): - listitems = coder.decodeObjectForKey_(kLIST) - dictitems = coder.decodeObjectForKey_(kDICT) - state = coder.decodeObjectForKey_(kSTATE) - else: - listitems = coder.decodeObject() - dictitems = coder.decodeObject() - state = coder.decodeObject() - if isinstance(state, NSArray): - state = tuple(state) - - setstate = getattr(value, "__setstate__", None) - if setstate: - setstate(state) - return value - - slotstate = None - if isinstance(state, tuple) and len(state) == 2: - state, slotstate = state - - if state: - # NOTE: picke.py catches RuntimeError here - # to support restricted execution, that is not - # relevant for PyObjC. - inst_dict = value.__dict__ - - for k in state: - v = state[k] - if type(k) == objc.pyobjc_unicode: - inst_dict[intern(k)] = v - - elif type(k) == str: - inst_dict[intern(k)] = v - - else: - inst_dict[k] = v - - - if slotstate: - for k, v in slotstate.items(): - setattr(value, intern(k), v) - - if listitems: - for a in listitems: - value.append(a) - - if dictitems: - for k, v in dictitems.items(): - value[k] = v - - return value -decode_dispatch[kOP_REDUCE] = load_reduce - - -def pyobjectEncode(self, coder): - t = type(self) - - # Find builtin support - f = encode_dispatch.get(t) - if f is not None: - f(coder, self) - return - - # Check for a class with a custom metaclass - # NOTE: pickle.py catches TypeError here, that's for - # compatibility with ancient versions of Boost - # (before Python 2.2) and is not needed here. - issc = issubclass(t, type) - - if issc: - save_global(coder, self) - return - - # Check copyreg.dispatch_table - reduce = copyreg.dispatch_table.get(t) - if reduce is not None: - rv = reduce(self) - - else: - reduce = getattr(self, "__reduce_ex__", None) - rv = reduce(2) - - if type(rv) is str: - save_global(coder, self, rv) - return - - if type(rv) is not tuple: - raise PicklingError("%s must return string or tuple" % reduce) - - l = len(rv) - if not (2 <= l <= 5): - raise PicklingError("Tuple returned by %s must have two to " - "five elements" % reduce) - - save_reduce(coder, *rv) - -def pyobjectDecode(coder, setValue): - if coder.allowsKeyedCoding(): - tp = coder.decodeIntForKey_(kKIND) - else: - tp = coder.decodeValueOfObjCType_at_(objc._C_INT, None) - f = decode_dispatch.get(tp) - if f is None: - raise UnpicklingError("Unknown object kind: %s"%(tp,)) - - return f(coder, setValue) - -# An finally register the coder/decoder -objc.options._nscoding_version = 1 -objc.options._nscoding_encoder = pyobjectEncode -objc.options._nscoding_decoder = pyobjectDecode -objc.options._copy = copy.copy diff --git a/env/lib/python2.7/site-packages/objc/_pycoder.pyc b/env/lib/python2.7/site-packages/objc/_pycoder.pyc deleted file mode 100644 index 1cd397d7..00000000 Binary files a/env/lib/python2.7/site-packages/objc/_pycoder.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/objc/_pythonify.py b/env/lib/python2.7/site-packages/objc/_pythonify.py deleted file mode 100644 index de6323cc..00000000 --- a/env/lib/python2.7/site-packages/objc/_pythonify.py +++ /dev/null @@ -1,93 +0,0 @@ -from objc import _objc -import sys - -__all__ = [] - - -class OC_PythonFloat(float): - __slots__=('__pyobjc_object__',) - - - def __new__(cls, obj, value): - self = float.__new__(cls, value) - self.__pyobjc_object__ = obj - return self - - __class__ = property(lambda self: self.__pyobjc_object__.__class__) - - def __getattr__(self, attr): - return getattr(self.__pyobjc_object__, attr) - - def __reduce__(self): - return (float, (float(self),)) - -base_class = int if sys.version_info[0] >= 3 else long - -class OC_PythonLong(base_class): - - def __new__(cls, obj, value): - self = base_class.__new__(cls, value) - self.__pyobjc_object__ = obj - return self - - __class__ = property(lambda self: self.__pyobjc_object__.__class__) - - def __getattr__(self, attr): - return getattr(self.__pyobjc_object__, attr) - - # The long type doesn't support __slots__ on subclasses, fake - # one part of the effect of __slots__: don't allow setting of attributes. - def __setattr__(self, attr, value): - if attr != '__pyobjc_object__': - raise AttributeError("'%s' object has no attribute '%s')"%(self.__class__.__name__, attr)) - self.__dict__['__pyobjc_object__'] = value - - def __reduce__(self): - return (base_class, (base_class(self),)) - - -if sys.version_info[0] == 2: # pragma: no 3.x cover; pragma: no branch - class OC_PythonInt(int): - __slots__=('__pyobjc_object__',) - - def __new__(cls, obj, value): - self = int.__new__(cls, value) - self.__pyobjc_object__ = obj - return self - - __class__ = property(lambda self: self.__pyobjc_object__.__class__) - - def __getattr__(self, attr): - return getattr(self.__pyobjc_object__, attr) - - def __reduce__(self): - return (int, (int(self),)) - - -NSNumber = _objc.lookUpClass('NSNumber') -NSDecimalNumber = _objc.lookUpClass('NSDecimalNumber') - -def numberWrapper(obj): - if isinstance(obj, NSDecimalNumber): - return obj - - try: - tp = obj.objCType() - except AttributeError: - import warnings - warnings.warn("NSNumber instance doesn't implement objCType? %r" % (obj,), RuntimeWarning) - return obj - - if tp in b'qQLfd': - if tp == b'q': - return OC_PythonLong(obj, obj.longLongValue()) - elif tp in b'QL': - return OC_PythonLong(obj, obj.unsignedLongLongValue()) - else: - return OC_PythonFloat(obj, obj.doubleValue()) - elif sys.version_info[0] == 2: # pragma: no 3.x cover - return OC_PythonInt(obj, obj.longValue()) - else: # pragma: no 2.x cover - return OC_PythonLong(obj, obj.longValue()) - -_objc.options._nsnumber_wrapper = numberWrapper diff --git a/env/lib/python2.7/site-packages/objc/_pythonify.pyc b/env/lib/python2.7/site-packages/objc/_pythonify.pyc deleted file mode 100644 index 559602f8..00000000 Binary files a/env/lib/python2.7/site-packages/objc/_pythonify.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip-19.2.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pip-19.2.2.dist-info/LICENSE.txt deleted file mode 100644 index 737fec5c..00000000 --- a/env/lib/python2.7/site-packages/pip-19.2.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,20 +0,0 @@ -Copyright (c) 2008-2019 The pip developers (see AUTHORS.txt file) - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pip-19.2.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pip-19.2.2.dist-info/METADATA deleted file mode 100644 index 02ac0aa4..00000000 --- a/env/lib/python2.7/site-packages/pip-19.2.2.dist-info/METADATA +++ /dev/null @@ -1,81 +0,0 @@ -Metadata-Version: 2.1 -Name: pip -Version: 19.2.2 -Summary: The PyPA recommended tool for installing Python packages. -Home-page: https://pip.pypa.io/ -Author: The pip developers -Author-email: pypa-dev@groups.google.com -License: MIT -Keywords: distutils easy_install egg setuptools wheel virtualenv -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Topic :: Software Development :: Build Tools -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.* - -pip - The Python Package Installer -================================== - -.. image:: https://img.shields.io/pypi/v/pip.svg - :target: https://pypi.org/project/pip/ - -.. image:: https://readthedocs.org/projects/pip/badge/?version=latest - :target: https://pip.pypa.io/en/latest - -pip is the `package installer`_ for Python. You can use pip to install packages from the `Python Package Index`_ and other indexes. - -Please take a look at our documentation for how to install and use pip: - -* `Installation`_ -* `Usage`_ - -Updates are released regularly, with a new version every 3 months. More details can be found in our documentation: - -* `Release notes`_ -* `Release process`_ - -If you find bugs, need help, or want to talk to the developers please use our mailing lists or chat rooms: - -* `Issue tracking`_ -* `Discourse channel`_ -* `User IRC`_ - -If you want to get involved head over to GitHub to get the source code, look at our development documentation and feel free to jump on the developer mailing lists and chat rooms: - -* `GitHub page`_ -* `Dev documentation`_ -* `Dev mailing list`_ -* `Dev IRC`_ - -Code of Conduct ---------------- - -Everyone interacting in the pip project's codebases, issue trackers, chat -rooms, and mailing lists is expected to follow the `PyPA Code of Conduct`_. - -.. _package installer: https://packaging.python.org/en/latest/current/ -.. _Python Package Index: https://pypi.org -.. _Installation: https://pip.pypa.io/en/stable/installing.html -.. _Usage: https://pip.pypa.io/en/stable/ -.. _Release notes: https://pip.pypa.io/en/stable/news.html -.. _Release process: https://pip.pypa.io/en/latest/development/release-process/ -.. _GitHub page: https://github.com/pypa/pip -.. _Dev documentation: https://pip.pypa.io/en/latest/development -.. _Issue tracking: https://github.com/pypa/pip/issues -.. _Discourse channel: https://discuss.python.org/c/packaging -.. _Dev mailing list: https://groups.google.com/forum/#!forum/pypa-dev -.. _User IRC: https://webchat.freenode.net/?channels=%23pypa -.. _Dev IRC: https://webchat.freenode.net/?channels=%23pypa-dev -.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/ - - diff --git a/env/lib/python2.7/site-packages/pip-19.2.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pip-19.2.2.dist-info/RECORD deleted file mode 100644 index 2abeae62..00000000 --- a/env/lib/python2.7/site-packages/pip-19.2.2.dist-info/RECORD +++ /dev/null @@ -1,668 +0,0 @@ -../../../bin/pip,sha256=iasmuu4EQd9zEVCVFWvTiOH2wZdHXuCKLNhn-sOPnOs,269 -../../../bin/pip2,sha256=iasmuu4EQd9zEVCVFWvTiOH2wZdHXuCKLNhn-sOPnOs,269 -../../../bin/pip2.7,sha256=iasmuu4EQd9zEVCVFWvTiOH2wZdHXuCKLNhn-sOPnOs,269 -pip-19.2.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pip-19.2.2.dist-info/LICENSE.txt,sha256=W6Ifuwlk-TatfRU2LR7W1JMcyMj5_y1NkRkOEJvnRDE,1090 -pip-19.2.2.dist-info/METADATA,sha256=vmOfSNXOVdT7qLUBu67pJnjDGvmomcFPcz9mht7r_b8,3195 -pip-19.2.2.dist-info/RECORD,, -pip-19.2.2.dist-info/WHEEL,sha256=h_aVn5OB2IERUjMbi2pucmR_zzWJtk303YXvhh60NJ8,110 -pip-19.2.2.dist-info/entry_points.txt,sha256=S_zfxY25QtQDVY1BiLAmOKSkkI5llzCKPLiYOSEupsY,98 -pip-19.2.2.dist-info/top_level.txt,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pip/__init__.py,sha256=07TXbBJQ7TdAg-x2x5m3B6bEHlCXtCOJ5popcn4b1gw,23 -pip/__init__.pyc,, -pip/__main__.py,sha256=L3IHqBeasELUHvwy5CT_izVEMhM12tve289qut49DvU,623 -pip/__main__.pyc,, -pip/_internal/__init__.py,sha256=uGzk4m-m6lYf1mnYIRjjsvO35Qf6iAFatbY4oa9ifOU,2797 -pip/_internal/__init__.pyc,, -pip/_internal/build_env.py,sha256=jZHTbyb4XqoaIoPaOJP2uOp1Hnmh0HfplXBgY0TBWyM,7405 -pip/_internal/build_env.pyc,, -pip/_internal/cache.py,sha256=MzHv-Z0h8_n6XfBMxIatHcoiyAmzvX1zKtDGoJBWHk0,7658 -pip/_internal/cache.pyc,, -pip/_internal/cli/__init__.py,sha256=FkHBgpxxb-_gd6r1FjnNhfMOzAUYyXoXKJ6abijfcFU,132 -pip/_internal/cli/__init__.pyc,, -pip/_internal/cli/autocompletion.py,sha256=ptvsMdGjq42pzoY4skABVF43u2xAtLJlXAulPi-A10Y,6083 -pip/_internal/cli/autocompletion.pyc,, -pip/_internal/cli/base_command.py,sha256=KF1S58E8yilcKkqPyeJKU-jqQBSxBI25b_sBoq3uEAo,13029 -pip/_internal/cli/base_command.pyc,, -pip/_internal/cli/cmdoptions.py,sha256=cILKSj3jrwpQY3Xl76erVPhxnWuAIOoeJUcVjxttyaE,27543 -pip/_internal/cli/cmdoptions.pyc,, -pip/_internal/cli/main_parser.py,sha256=J_gG7JnoAeUhSDy2PFGqMEZLNm9oNYnuZunjVz94Lyw,2817 -pip/_internal/cli/main_parser.pyc,, -pip/_internal/cli/parser.py,sha256=VZKUKJPbU6I2cHPLDOikin-aCx7OvLcZ3fzYp3xytd8,9378 -pip/_internal/cli/parser.pyc,, -pip/_internal/cli/status_codes.py,sha256=F6uDG6Gj7RNKQJUDnd87QKqI16Us-t-B0wPF_4QMpWc,156 -pip/_internal/cli/status_codes.pyc,, -pip/_internal/commands/__init__.py,sha256=KF-mqzngZMtbOxkX9M6ayyGyroCNz5xdlZEc4lItUMI,2295 -pip/_internal/commands/__init__.pyc,, -pip/_internal/commands/check.py,sha256=liigNVif0iz2mBfhvsajrLZT5zM5KIvgmKvhAW91EzA,1430 -pip/_internal/commands/check.pyc,, -pip/_internal/commands/completion.py,sha256=hqvCvoxsIHjysiD7olHKTqK2lzE1_lS6LWn69kN5qyI,2929 -pip/_internal/commands/completion.pyc,, -pip/_internal/commands/configuration.py,sha256=c22362Rk7dAwvHFja9py4sSaV0Sryqo_PzuadI1mm0w,8156 -pip/_internal/commands/configuration.pyc,, -pip/_internal/commands/debug.py,sha256=0NJZT3Zz9vjqUqeKdPPbr_jUZubnHYp7Cmk--zlZiPs,3360 -pip/_internal/commands/debug.pyc,, -pip/_internal/commands/download.py,sha256=zAyNBo0zwHixos6O-S6Kd9SAH1L_74filOoR83_Fa7U,6375 -pip/_internal/commands/download.pyc,, -pip/_internal/commands/freeze.py,sha256=lDrob-AG-qT2DyZTNWlYa9F4BqJQTy_F9h9KakBMnG0,3441 -pip/_internal/commands/freeze.pyc,, -pip/_internal/commands/hash.py,sha256=K1JycsD-rpjqrRcL_ijacY9UKmI82pQcLYq4kCM4Pv0,1681 -pip/_internal/commands/hash.pyc,, -pip/_internal/commands/help.py,sha256=MwBhPJpW1Dt3GfJV3V8V6kgAy_pXT0jGrZJB1wCTW-E,1090 -pip/_internal/commands/help.pyc,, -pip/_internal/commands/install.py,sha256=cGXbByOjrJWKIv5myy7ZflX4jYMyjT6-w85tGhnI-Nw,22646 -pip/_internal/commands/install.pyc,, -pip/_internal/commands/list.py,sha256=MMiJnQJCfMwA1Qf0lSru7Nzm19otm49MFmbx8y01rwA,10497 -pip/_internal/commands/list.pyc,, -pip/_internal/commands/search.py,sha256=R2N1-r3RaxZqX5YeNL9QaYWnILsUn4MtPKZ1ji1i1sU,4972 -pip/_internal/commands/search.pyc,, -pip/_internal/commands/show.py,sha256=bE-ucu8fAjTTENpRRKhwD3QSWR8Rss7YgKAbMJoxock,6273 -pip/_internal/commands/show.pyc,, -pip/_internal/commands/uninstall.py,sha256=h0gfPF5jylDESx_IHgF6bZME7QAEOHzQHdn65GP-jrE,2963 -pip/_internal/commands/uninstall.pyc,, -pip/_internal/commands/wheel.py,sha256=G2dOwQkDCH0-x6nlf9MvbMY2GUf-pqAG5epV4fjMGM0,6977 -pip/_internal/commands/wheel.pyc,, -pip/_internal/configuration.py,sha256=dKsnJZN9r4jVsl9IcoKTU0iI9s6XZQu3FzOsqTNElk0,14076 -pip/_internal/configuration.pyc,, -pip/_internal/distributions/__init__.py,sha256=ydMdQRMM1DV6BdomjeP1em-YKikg90LZ9Tg5sJRhNF4,861 -pip/_internal/distributions/__init__.pyc,, -pip/_internal/distributions/base.py,sha256=Js_vmU-MKOONF_u-k5vmu3vTJnrOk3cLD_rPRB8r7-w,1000 -pip/_internal/distributions/base.pyc,, -pip/_internal/distributions/installed.py,sha256=uwB2CPqseB8rPv0ICBCIB1LMs8yQnd8h-JZe9B9oOB0,434 -pip/_internal/distributions/installed.pyc,, -pip/_internal/distributions/source.py,sha256=L4SEZsTtqx6F3D39P7yJDgqqrnc4dGMZr3BTWgA05jg,3514 -pip/_internal/distributions/source.pyc,, -pip/_internal/distributions/wheel.py,sha256=lWaa9l-REefNSL9E3A0zf8h2bZRLBOlTSBqHhPTYE7M,508 -pip/_internal/distributions/wheel.pyc,, -pip/_internal/download.py,sha256=Zd5EtNjqJct5tOzZ5DfmiR9zaWV2UbE24omoZcNsLd4,43323 -pip/_internal/download.pyc,, -pip/_internal/exceptions.py,sha256=_mDPdvO9EFMxUX4VEjzw3qic0PRqPH8EPOx__-MBNb4,10168 -pip/_internal/exceptions.pyc,, -pip/_internal/index.py,sha256=RE8HCh8MjJPgO2EhW7hww4Jr0QWFaA3GiUgxhTPs59c,56017 -pip/_internal/index.pyc,, -pip/_internal/legacy_resolve.py,sha256=GDWmB6KtWAIcTX4gvwFrU8Xc2w4X0KBEkbW8fGU24Fk,17303 -pip/_internal/legacy_resolve.pyc,, -pip/_internal/locations.py,sha256=Tv1TotkC1brrTgqG8pvLhJGvwRfiDwAlXTOdzk7hYio,5045 -pip/_internal/locations.pyc,, -pip/_internal/models/__init__.py,sha256=3DHUd_qxpPozfzouoqa9g9ts1Czr5qaHfFxbnxriepM,63 -pip/_internal/models/__init__.pyc,, -pip/_internal/models/candidate.py,sha256=IV7B5Rj-FjQKh5Shbv8CenuNekxdpb_chrJMEID4ouU,1169 -pip/_internal/models/candidate.pyc,, -pip/_internal/models/format_control.py,sha256=ap8Swa26ocSXBxIuCvaDBRZjxdKUFuwC-bfqXQHWtKw,2250 -pip/_internal/models/format_control.pyc,, -pip/_internal/models/index.py,sha256=K59A8-hVhBM20Xkahr4dTwP7OjkJyEqXH11UwHFVgqM,1060 -pip/_internal/models/index.pyc,, -pip/_internal/models/link.py,sha256=fj3Hg4xrPo8ucOVyJvYrq1AgJjh56D2Z8F1liDoW-TM,6553 -pip/_internal/models/link.pyc,, -pip/_internal/models/search_scope.py,sha256=JxPlngW2ecVoYrF8dr2b0oYf8XrZ-yAQ1U19uEM8Lgo,3875 -pip/_internal/models/search_scope.pyc,, -pip/_internal/models/selection_prefs.py,sha256=rPeif2KKjhTPXeMoQYffjqh10oWpXhdkxRDaPT1HO8k,1908 -pip/_internal/models/selection_prefs.pyc,, -pip/_internal/models/target_python.py,sha256=d66ljdpZZtAAQsuOytiZ7yq6spCa8GOmz5Vf7uoVZT0,3820 -pip/_internal/models/target_python.pyc,, -pip/_internal/operations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pip/_internal/operations/__init__.pyc,, -pip/_internal/operations/check.py,sha256=EkjtpXpOCTvt_VG0gRnlSBBj5SGWsoVYzbAMpepI8JU,5224 -pip/_internal/operations/check.pyc,, -pip/_internal/operations/freeze.py,sha256=rKAeXdh1HbK92Z5YtmLyil8IYkcC076lahNJMyxqbVM,9680 -pip/_internal/operations/freeze.pyc,, -pip/_internal/operations/prepare.py,sha256=z27rAvMEtlpake5OI1-SIzp-EBjjwyf8PEikO0KmJ8w,11728 -pip/_internal/operations/prepare.pyc,, -pip/_internal/pep425tags.py,sha256=NYNc2kGkqPswrDX6V2zKujWpAsjQgEGT4TKgy7qlnXA,13299 -pip/_internal/pep425tags.pyc,, -pip/_internal/pyproject.py,sha256=OlCw7pSqST68hUF_eV-YVaqJ4I7z_ROJwvgra-2C_5I,6464 -pip/_internal/pyproject.pyc,, -pip/_internal/req/__init__.py,sha256=Y2SjAuMFsSt3dkiK8kkiQAfv8sHrjl0PAT63FKFT0tM,2364 -pip/_internal/req/__init__.pyc,, -pip/_internal/req/constructors.py,sha256=tC7fNxKrvF3gbxI2IcA6uQiXJ5sPFQvulHPQnM5Ldgg,11858 -pip/_internal/req/constructors.pyc,, -pip/_internal/req/req_file.py,sha256=VNC-G_JYy6JmGipezb9n5hAzZ470mvesSx3DBFtfIVM,14180 -pip/_internal/req/req_file.pyc,, -pip/_internal/req/req_install.py,sha256=i21e6wHfTko7mQGziFoXqPbdByZ9Bnrz_bC6ZIJOwl8,40296 -pip/_internal/req/req_install.pyc,, -pip/_internal/req/req_set.py,sha256=PaDc5EswLQhxBMFbuKbJ0frZbMNKocmA8OGqIWT-9EY,7860 -pip/_internal/req/req_set.pyc,, -pip/_internal/req/req_tracker.py,sha256=wBpDzSDSYwpUfW4K43NrEOCCp1r6stuubfLc65Y95EM,3129 -pip/_internal/req/req_tracker.pyc,, -pip/_internal/req/req_uninstall.py,sha256=rVOk8BRM_L9rsUUr9lmkV6Lm9N1Os7TEIDir6tT1Q7U,23105 -pip/_internal/req/req_uninstall.pyc,, -pip/_internal/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pip/_internal/utils/__init__.pyc,, -pip/_internal/utils/appdirs.py,sha256=r9i0BZLK9KcvrzI5tqlw8ehRTtSehWGERFLy7YppG3g,9398 -pip/_internal/utils/appdirs.pyc,, -pip/_internal/utils/compat.py,sha256=4mi-czTysz5Ocuq-5K6BvISCii6_agyNwkBPNtKgYfM,9596 -pip/_internal/utils/compat.pyc,, -pip/_internal/utils/deprecation.py,sha256=zcC388qvHnBLY1GalWEYnHyh3MXHQRe4-fOoyyZeQNQ,3209 -pip/_internal/utils/deprecation.pyc,, -pip/_internal/utils/encoding.py,sha256=tudXCoAPe9fZvNK4cmWQs2frREZ-QuGCwF_SlTyz6cI,1218 -pip/_internal/utils/encoding.pyc,, -pip/_internal/utils/filesystem.py,sha256=ojaIDvOFOtkpKme5se6X2N8ARmQxu8cxvaaI-NFqVtk,990 -pip/_internal/utils/filesystem.pyc,, -pip/_internal/utils/glibc.py,sha256=di3treHUThyeXCxqgRgp-72nTizWpC8skE7RLbewKv4,4295 -pip/_internal/utils/glibc.pyc,, -pip/_internal/utils/hashes.py,sha256=lF1VlTk2hOqnbmbiMN6GxJHTNQEDI9RzkBCUqBgSHok,3904 -pip/_internal/utils/hashes.pyc,, -pip/_internal/utils/logging.py,sha256=k-7sr-yFTLDCgcrmrErlwBp2dYMhq157vT3P-xzrB0U,12883 -pip/_internal/utils/logging.pyc,, -pip/_internal/utils/marker_files.py,sha256=B-xFm0JZnrDStnA1jbQgKfDaMdXn53PqpZhtOJ-FWCc,595 -pip/_internal/utils/marker_files.pyc,, -pip/_internal/utils/misc.py,sha256=3tmhB5Zojxswgg1zGdPgAdGvu2sYU6g0BLiAbc2vhZY,38796 -pip/_internal/utils/misc.pyc,, -pip/_internal/utils/models.py,sha256=b7vdfIZrobxERktz8xZ7BqYnFLxoJzkWSeuq0JO9JYI,1041 -pip/_internal/utils/models.pyc,, -pip/_internal/utils/outdated.py,sha256=C7TK-XuCmBQ5DUpHBzq2jL-1p7DQft84foQziUyX2Ms,6292 -pip/_internal/utils/outdated.pyc,, -pip/_internal/utils/packaging.py,sha256=VtiwcAAL7LBi7tGL2je7LeW4bE11KMHGCsJ1NZY5XtM,3035 -pip/_internal/utils/packaging.pyc,, -pip/_internal/utils/setuptools_build.py,sha256=Jjf0MRzSG60UvDnWwWixg1rWM5dEuQ5sE8kb-5KwYFI,1239 -pip/_internal/utils/setuptools_build.pyc,, -pip/_internal/utils/temp_dir.py,sha256=0Xq5ZlOd2OOeHwKM6hGy66gnMGAbyhio7DtjLHd7DFg,5339 -pip/_internal/utils/temp_dir.pyc,, -pip/_internal/utils/typing.py,sha256=bF73ImJzIaxLLEVwfEaSJzFGqV9LaxkQBvDULIyr1jI,1125 -pip/_internal/utils/typing.pyc,, -pip/_internal/utils/ui.py,sha256=I2F3wRhWE9aere-cpCE0g9VPvgJRRLL8OC3FxXdj6_k,13768 -pip/_internal/utils/ui.pyc,, -pip/_internal/utils/virtualenv.py,sha256=oSTrUMQUqmuXcDvQZGwV65w-hlvhBAqyQiWRxLf8fN0,891 -pip/_internal/utils/virtualenv.pyc,, -pip/_internal/vcs/__init__.py,sha256=9p9dzJZy7PR6TkHhqr-DnJTFIo6JopLgtHjHNrt85h4,597 -pip/_internal/vcs/__init__.pyc,, -pip/_internal/vcs/bazaar.py,sha256=wI5WdFt_Mmnqcm0c7zn5wM3R44s7s28DNx5Yg7CJlSw,3182 -pip/_internal/vcs/bazaar.pyc,, -pip/_internal/vcs/git.py,sha256=pgTaCyWNBBfz6d0AxVnsLhft2i4XRe_hSTI_Xs7nkZg,12814 -pip/_internal/vcs/git.pyc,, -pip/_internal/vcs/mercurial.py,sha256=YzJx76Q4Nveqf8s80g-AocnfpKwCoVeHy77c95aTBO4,3335 -pip/_internal/vcs/mercurial.pyc,, -pip/_internal/vcs/subversion.py,sha256=RuQJeToLicFp2itahUftlHKjyvDFWuCWuhHfdsP9oGs,11697 -pip/_internal/vcs/subversion.pyc,, -pip/_internal/vcs/versioncontrol.py,sha256=NifBlL90ovO8WNzlt4r6HGlGbPqxNI5fUMfwLC-gMkE,19010 -pip/_internal/vcs/versioncontrol.pyc,, -pip/_internal/wheel.py,sha256=H3bdufsutvlXcLV0t3prIOTvq9m_Uc0JkLDoISZelD8,42309 -pip/_internal/wheel.pyc,, -pip/_vendor/__init__.py,sha256=iip2nWwH_riYqnDnM0q4BJFrWE-XWjYfxCejJKct0WM,4654 -pip/_vendor/__init__.pyc,, -pip/_vendor/appdirs.py,sha256=BENKsvcA08IpccD9345-rMrg3aXWFA1q6BFEglnHg6I,24547 -pip/_vendor/appdirs.pyc,, -pip/_vendor/cachecontrol/__init__.py,sha256=6cRPchVqkAkeUtYTSW8qCetjSqJo-GxP-n4VMVDbvmc,302 -pip/_vendor/cachecontrol/__init__.pyc,, -pip/_vendor/cachecontrol/_cmd.py,sha256=URGE0KrA87QekCG3SGPatlSPT571dZTDjNa-ZXX3pDc,1295 -pip/_vendor/cachecontrol/_cmd.pyc,, -pip/_vendor/cachecontrol/adapter.py,sha256=eBGAtVNRZgtl_Kj5JV54miqL9YND-D0JZPahwY8kFtY,4863 -pip/_vendor/cachecontrol/adapter.pyc,, -pip/_vendor/cachecontrol/cache.py,sha256=1fc4wJP8HYt1ycnJXeEw5pCpeBL2Cqxx6g9Fb0AYDWQ,805 -pip/_vendor/cachecontrol/cache.pyc,, -pip/_vendor/cachecontrol/caches/__init__.py,sha256=-gHNKYvaeD0kOk5M74eOrsSgIKUtC6i6GfbmugGweEo,86 -pip/_vendor/cachecontrol/caches/__init__.pyc,, -pip/_vendor/cachecontrol/caches/file_cache.py,sha256=8vrSzzGcdfEfICago1uSFbkumNJMGLbCdEkXsmUIExw,4177 -pip/_vendor/cachecontrol/caches/file_cache.pyc,, -pip/_vendor/cachecontrol/caches/redis_cache.py,sha256=HxelMpNCo-dYr2fiJDwM3hhhRmxUYtB5tXm1GpAAT4Y,856 -pip/_vendor/cachecontrol/caches/redis_cache.pyc,, -pip/_vendor/cachecontrol/compat.py,sha256=kHNvMRdt6s_Xwqq_9qJmr9ou3wYMOMUMxPPcwNxT8Mc,695 -pip/_vendor/cachecontrol/compat.pyc,, -pip/_vendor/cachecontrol/controller.py,sha256=U7g-YwizQ2O5NRgK_MZreF1ntM4E49C3PuF3od-Vwz4,13698 -pip/_vendor/cachecontrol/controller.pyc,, -pip/_vendor/cachecontrol/filewrapper.py,sha256=vACKO8Llzu_ZWyjV1Fxn1MA4TGU60N5N3GSrAFdAY2Q,2533 -pip/_vendor/cachecontrol/filewrapper.pyc,, -pip/_vendor/cachecontrol/heuristics.py,sha256=BFGHJ3yQcxvZizfo90LLZ04T_Z5XSCXvFotrp7Us0sc,4070 -pip/_vendor/cachecontrol/heuristics.pyc,, -pip/_vendor/cachecontrol/serialize.py,sha256=GebE34fgToyWwAsRPguh8hEPN6CqoG-5hRMXRsjVABQ,6954 -pip/_vendor/cachecontrol/serialize.pyc,, -pip/_vendor/cachecontrol/wrapper.py,sha256=sfr9YHWx-5TwNz1H5rT6QOo8ggII6v3vbEDjQFwR6wc,671 -pip/_vendor/cachecontrol/wrapper.pyc,, -pip/_vendor/certifi/__init__.py,sha256=phsMyKTQP7MMe1wAHfhXPbQVxL3wXixOomxzNh5Cwa4,52 -pip/_vendor/certifi/__init__.pyc,, -pip/_vendor/certifi/__main__.py,sha256=NaCn6WtWME-zzVWQ2j4zFyl8cY4knDa9CwtHNIeFPhM,53 -pip/_vendor/certifi/__main__.pyc,, -pip/_vendor/certifi/cacert.pem,sha256=DddOv7pQyMB8zNNgiXSSFrPVn7EN8qbe7P6h_IYyuek,282085 -pip/_vendor/certifi/core.py,sha256=EuFc2BsToG5O1-qsx4BSjQ1r1-7WRtH87b1WflZOWhI,218 -pip/_vendor/certifi/core.pyc,, -pip/_vendor/chardet/__init__.py,sha256=YsP5wQlsHJ2auF1RZJfypiSrCA7_bQiRm3ES_NI76-Y,1559 -pip/_vendor/chardet/__init__.pyc,, -pip/_vendor/chardet/big5freq.py,sha256=D_zK5GyzoVsRes0HkLJziltFQX0bKCLOrFe9_xDvO_8,31254 -pip/_vendor/chardet/big5freq.pyc,, -pip/_vendor/chardet/big5prober.py,sha256=kBxHbdetBpPe7xrlb-e990iot64g_eGSLd32lB7_h3M,1757 -pip/_vendor/chardet/big5prober.pyc,, -pip/_vendor/chardet/chardistribution.py,sha256=3woWS62KrGooKyqz4zQSnjFbJpa6V7g02daAibTwcl8,9411 -pip/_vendor/chardet/chardistribution.pyc,, -pip/_vendor/chardet/charsetgroupprober.py,sha256=6bDu8YIiRuScX4ca9Igb0U69TA2PGXXDej6Cc4_9kO4,3787 -pip/_vendor/chardet/charsetgroupprober.pyc,, -pip/_vendor/chardet/charsetprober.py,sha256=KSmwJErjypyj0bRZmC5F5eM7c8YQgLYIjZXintZNstg,5110 -pip/_vendor/chardet/charsetprober.pyc,, -pip/_vendor/chardet/cli/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 -pip/_vendor/chardet/cli/__init__.pyc,, -pip/_vendor/chardet/cli/chardetect.py,sha256=DI8dlV3FBD0c0XA_y3sQ78z754DUv1J8n34RtDjOXNw,2774 -pip/_vendor/chardet/cli/chardetect.pyc,, -pip/_vendor/chardet/codingstatemachine.py,sha256=VYp_6cyyki5sHgXDSZnXW4q1oelHc3cu9AyQTX7uug8,3590 -pip/_vendor/chardet/codingstatemachine.pyc,, -pip/_vendor/chardet/compat.py,sha256=PKTzHkSbtbHDqS9PyujMbX74q1a8mMpeQTDVsQhZMRw,1134 -pip/_vendor/chardet/compat.pyc,, -pip/_vendor/chardet/cp949prober.py,sha256=TZ434QX8zzBsnUvL_8wm4AQVTZ2ZkqEEQL_lNw9f9ow,1855 -pip/_vendor/chardet/cp949prober.pyc,, -pip/_vendor/chardet/enums.py,sha256=Aimwdb9as1dJKZaFNUH2OhWIVBVd6ZkJJ_WK5sNY8cU,1661 -pip/_vendor/chardet/enums.pyc,, -pip/_vendor/chardet/escprober.py,sha256=kkyqVg1Yw3DIOAMJ2bdlyQgUFQhuHAW8dUGskToNWSc,3950 -pip/_vendor/chardet/escprober.pyc,, -pip/_vendor/chardet/escsm.py,sha256=RuXlgNvTIDarndvllNCk5WZBIpdCxQ0kcd9EAuxUh84,10510 -pip/_vendor/chardet/escsm.pyc,, -pip/_vendor/chardet/eucjpprober.py,sha256=iD8Jdp0ISRjgjiVN7f0e8xGeQJ5GM2oeZ1dA8nbSeUw,3749 -pip/_vendor/chardet/eucjpprober.pyc,, -pip/_vendor/chardet/euckrfreq.py,sha256=-7GdmvgWez4-eO4SuXpa7tBiDi5vRXQ8WvdFAzVaSfo,13546 -pip/_vendor/chardet/euckrfreq.pyc,, -pip/_vendor/chardet/euckrprober.py,sha256=MqFMTQXxW4HbzIpZ9lKDHB3GN8SP4yiHenTmf8g_PxY,1748 -pip/_vendor/chardet/euckrprober.pyc,, -pip/_vendor/chardet/euctwfreq.py,sha256=No1WyduFOgB5VITUA7PLyC5oJRNzRyMbBxaKI1l16MA,31621 -pip/_vendor/chardet/euctwfreq.pyc,, -pip/_vendor/chardet/euctwprober.py,sha256=13p6EP4yRaxqnP4iHtxHOJ6R2zxHq1_m8hTRjzVZ95c,1747 -pip/_vendor/chardet/euctwprober.pyc,, -pip/_vendor/chardet/gb2312freq.py,sha256=JX8lsweKLmnCwmk8UHEQsLgkr_rP_kEbvivC4qPOrlc,20715 -pip/_vendor/chardet/gb2312freq.pyc,, -pip/_vendor/chardet/gb2312prober.py,sha256=gGvIWi9WhDjE-xQXHvNIyrnLvEbMAYgyUSZ65HUfylw,1754 -pip/_vendor/chardet/gb2312prober.pyc,, -pip/_vendor/chardet/hebrewprober.py,sha256=c3SZ-K7hvyzGY6JRAZxJgwJ_sUS9k0WYkvMY00YBYFo,13838 -pip/_vendor/chardet/hebrewprober.pyc,, -pip/_vendor/chardet/jisfreq.py,sha256=vpmJv2Bu0J8gnMVRPHMFefTRvo_ha1mryLig8CBwgOg,25777 -pip/_vendor/chardet/jisfreq.pyc,, -pip/_vendor/chardet/jpcntx.py,sha256=PYlNqRUQT8LM3cT5FmHGP0iiscFlTWED92MALvBungo,19643 -pip/_vendor/chardet/jpcntx.pyc,, -pip/_vendor/chardet/langbulgarianmodel.py,sha256=1HqQS9Pbtnj1xQgxitJMvw8X6kKr5OockNCZWfEQrPE,12839 -pip/_vendor/chardet/langbulgarianmodel.pyc,, -pip/_vendor/chardet/langcyrillicmodel.py,sha256=LODajvsetH87yYDDQKA2CULXUH87tI223dhfjh9Zx9c,17948 -pip/_vendor/chardet/langcyrillicmodel.pyc,, -pip/_vendor/chardet/langgreekmodel.py,sha256=8YAW7bU8YwSJap0kIJSbPMw1BEqzGjWzqcqf0WgUKAA,12688 -pip/_vendor/chardet/langgreekmodel.pyc,, -pip/_vendor/chardet/langhebrewmodel.py,sha256=JSnqmE5E62tDLTPTvLpQsg5gOMO4PbdWRvV7Avkc0HA,11345 -pip/_vendor/chardet/langhebrewmodel.pyc,, -pip/_vendor/chardet/langhungarianmodel.py,sha256=RhapYSG5l0ZaO-VV4Fan5sW0WRGQqhwBM61yx3yxyOA,12592 -pip/_vendor/chardet/langhungarianmodel.pyc,, -pip/_vendor/chardet/langthaimodel.py,sha256=8l0173Gu_W6G8mxmQOTEF4ls2YdE7FxWf3QkSxEGXJQ,11290 -pip/_vendor/chardet/langthaimodel.pyc,, -pip/_vendor/chardet/langturkishmodel.py,sha256=W22eRNJsqI6uWAfwXSKVWWnCerYqrI8dZQTm_M0lRFk,11102 -pip/_vendor/chardet/langturkishmodel.pyc,, -pip/_vendor/chardet/latin1prober.py,sha256=S2IoORhFk39FEFOlSFWtgVybRiP6h7BlLldHVclNkU8,5370 -pip/_vendor/chardet/latin1prober.pyc,, -pip/_vendor/chardet/mbcharsetprober.py,sha256=AR95eFH9vuqSfvLQZN-L5ijea25NOBCoXqw8s5O9xLQ,3413 -pip/_vendor/chardet/mbcharsetprober.pyc,, -pip/_vendor/chardet/mbcsgroupprober.py,sha256=h6TRnnYq2OxG1WdD5JOyxcdVpn7dG0q-vB8nWr5mbh4,2012 -pip/_vendor/chardet/mbcsgroupprober.pyc,, -pip/_vendor/chardet/mbcssm.py,sha256=SY32wVIF3HzcjY3BaEspy9metbNSKxIIB0RKPn7tjpI,25481 -pip/_vendor/chardet/mbcssm.pyc,, -pip/_vendor/chardet/sbcharsetprober.py,sha256=LDSpCldDCFlYwUkGkwD2oFxLlPWIWXT09akH_2PiY74,5657 -pip/_vendor/chardet/sbcharsetprober.pyc,, -pip/_vendor/chardet/sbcsgroupprober.py,sha256=1IprcCB_k1qfmnxGC6MBbxELlKqD3scW6S8YIwdeyXA,3546 -pip/_vendor/chardet/sbcsgroupprober.pyc,, -pip/_vendor/chardet/sjisprober.py,sha256=IIt-lZj0WJqK4rmUZzKZP4GJlE8KUEtFYVuY96ek5MQ,3774 -pip/_vendor/chardet/sjisprober.pyc,, -pip/_vendor/chardet/universaldetector.py,sha256=qL0174lSZE442eB21nnktT9_VcAye07laFWUeUrjttY,12485 -pip/_vendor/chardet/universaldetector.pyc,, -pip/_vendor/chardet/utf8prober.py,sha256=IdD8v3zWOsB8OLiyPi-y_fqwipRFxV9Nc1eKBLSuIEw,2766 -pip/_vendor/chardet/utf8prober.pyc,, -pip/_vendor/chardet/version.py,sha256=sp3B08mrDXB-pf3K9fqJ_zeDHOCLC8RrngQyDFap_7g,242 -pip/_vendor/chardet/version.pyc,, -pip/_vendor/colorama/__init__.py,sha256=lJdY6COz9uM_pXwuk9oLr0fp8H8q2RrUqN16GKabvq4,239 -pip/_vendor/colorama/__init__.pyc,, -pip/_vendor/colorama/ansi.py,sha256=Fi0un-QLqRm-v7o_nKiOqyC8PapBJK7DLV_q9LKtTO0,2524 -pip/_vendor/colorama/ansi.pyc,, -pip/_vendor/colorama/ansitowin32.py,sha256=u8QaqdqS_xYSfNkPM1eRJLHz6JMWPodaJaP0mxgHCDc,10462 -pip/_vendor/colorama/ansitowin32.pyc,, -pip/_vendor/colorama/initialise.py,sha256=PprovDNxMTrvoNHFcL2NZjpH2XzDc8BLxLxiErfUl4k,1915 -pip/_vendor/colorama/initialise.pyc,, -pip/_vendor/colorama/win32.py,sha256=bJ8Il9jwaBN5BJ8bmN6FoYZ1QYuMKv2j8fGrXh7TJjw,5404 -pip/_vendor/colorama/win32.pyc,, -pip/_vendor/colorama/winterm.py,sha256=2y_2b7Zsv34feAsP67mLOVc-Bgq51mdYGo571VprlrM,6438 -pip/_vendor/colorama/winterm.pyc,, -pip/_vendor/distlib/__init__.py,sha256=SkHYPuEQNQF2a2Cr18rfZ-LQyDqwwizn8tJE4seXPgU,587 -pip/_vendor/distlib/__init__.pyc,, -pip/_vendor/distlib/_backport/__init__.py,sha256=bqS_dTOH6uW9iGgd0uzfpPjo6vZ4xpPZ7kyfZJ2vNaw,274 -pip/_vendor/distlib/_backport/__init__.pyc,, -pip/_vendor/distlib/_backport/misc.py,sha256=KWecINdbFNOxSOP1fGF680CJnaC6S4fBRgEtaYTw0ig,971 -pip/_vendor/distlib/_backport/misc.pyc,, -pip/_vendor/distlib/_backport/shutil.py,sha256=VW1t3uYqUjWZH7jV-6QiimLhnldoV5uIpH4EuiT1jfw,25647 -pip/_vendor/distlib/_backport/shutil.pyc,, -pip/_vendor/distlib/_backport/sysconfig.cfg,sha256=swZKxq9RY5e9r3PXCrlvQPMsvOdiWZBTHLEbqS8LJLU,2617 -pip/_vendor/distlib/_backport/sysconfig.py,sha256=JdJ9ztRy4Hc-b5-VS74x3nUtdEIVr_OBvMsIb8O2sjc,26964 -pip/_vendor/distlib/_backport/sysconfig.pyc,, -pip/_vendor/distlib/_backport/tarfile.py,sha256=Ihp7rXRcjbIKw8COm9wSePV9ARGXbSF9gGXAMn2Q-KU,92628 -pip/_vendor/distlib/_backport/tarfile.pyc,, -pip/_vendor/distlib/compat.py,sha256=xdNZmqFN5HwF30HjRn5M415pcC2kgXRBXn767xS8v-M,41404 -pip/_vendor/distlib/compat.pyc,, -pip/_vendor/distlib/database.py,sha256=-KJH63AJ7hqjLtGCwOTrionhKr2Vsytdwkjyo8UdEco,51029 -pip/_vendor/distlib/database.pyc,, -pip/_vendor/distlib/index.py,sha256=SXKzpQCERctxYDMp_OLee2f0J0e19ZhGdCIoMlUfUQM,21066 -pip/_vendor/distlib/index.pyc,, -pip/_vendor/distlib/locators.py,sha256=bqzEWP3Ad8UE3D1rmzW1pgzVTKkY4rDUA_EWIVYli54,51807 -pip/_vendor/distlib/locators.pyc,, -pip/_vendor/distlib/manifest.py,sha256=nQEhYmgoreaBZzyFzwYsXxJARu3fo4EkunU163U16iE,14811 -pip/_vendor/distlib/manifest.pyc,, -pip/_vendor/distlib/markers.py,sha256=6Ac3cCfFBERexiESWIOXmg-apIP8l2esafNSX3KMy-8,4387 -pip/_vendor/distlib/markers.pyc,, -pip/_vendor/distlib/metadata.py,sha256=OhbCKmf5lswE8unWBopI1hj7tRpHp4ZbFvU4d6aAEMM,40234 -pip/_vendor/distlib/metadata.pyc,, -pip/_vendor/distlib/resources.py,sha256=2FGv0ZHF14KXjLIlL0R991lyQQGcewOS4mJ-5n-JVnc,10766 -pip/_vendor/distlib/resources.pyc,, -pip/_vendor/distlib/scripts.py,sha256=W24OXnZUmgRX_XtDrVZdfc-Frf4X4_cybvhP87iR-QU,16290 -pip/_vendor/distlib/scripts.pyc,, -pip/_vendor/distlib/t32.exe,sha256=y8Yu3yao6zZrELYGIisxkhnQLOAOvpiXft8_Y9I8vyU,92672 -pip/_vendor/distlib/t64.exe,sha256=qt1MpKO2NLqU8t1lD1T0frfFm5zwHm3mz7pLvmJ2kMI,102912 -pip/_vendor/distlib/util.py,sha256=TvdqcwncBHaQbNw0jkXRvSZvt1fbdgE8HQW5wJwzvv4,59790 -pip/_vendor/distlib/util.pyc,, -pip/_vendor/distlib/version.py,sha256=_n7F6juvQGAcn769E_SHa7fOcf5ERlEVymJ_EjPRwGw,23391 -pip/_vendor/distlib/version.pyc,, -pip/_vendor/distlib/w32.exe,sha256=f98Etq_1giFgIQxrEh-sOAeO8qVtWqpDbGxdUucJ6pw,89088 -pip/_vendor/distlib/w64.exe,sha256=6Hs-Wn0vXBHA6Qd76IlalqYXqrN80DCPpdoeIQzPRms,99840 -pip/_vendor/distlib/wheel.py,sha256=2lviV6L4IvTP5DRkKE0HGpClvdoTJQHZJLfTQ6dfn2A,40437 -pip/_vendor/distlib/wheel.pyc,, -pip/_vendor/distro.py,sha256=X2So5kjrRKyMbQJ90Xgy93HU5eFtujCzKaYNeoy1k1c,43251 -pip/_vendor/distro.pyc,, -pip/_vendor/html5lib/__init__.py,sha256=Ztrn7UvF-wIFAgRBBa0ML-Gu5AffH3BPX_INJx4SaBI,1162 -pip/_vendor/html5lib/__init__.pyc,, -pip/_vendor/html5lib/_ihatexml.py,sha256=3LBtJMlzgwM8vpQiU1TvGmEEmNH72sV0yD8yS53y07A,16705 -pip/_vendor/html5lib/_ihatexml.pyc,, -pip/_vendor/html5lib/_inputstream.py,sha256=bPUWcAfJScK4xkjQQaG_HsI2BvEVbFvI0AsodDYPQj0,32552 -pip/_vendor/html5lib/_inputstream.pyc,, -pip/_vendor/html5lib/_tokenizer.py,sha256=YAaOEBD6qc5ISq9Xt9Nif1OFgcybTTfMdwqBkZhpAq4,76580 -pip/_vendor/html5lib/_tokenizer.pyc,, -pip/_vendor/html5lib/_trie/__init__.py,sha256=8VR1bcgD2OpeS2XExpu5yBhP_Q1K-lwKbBKICBPf1kU,289 -pip/_vendor/html5lib/_trie/__init__.pyc,, -pip/_vendor/html5lib/_trie/_base.py,sha256=CaybYyMro8uERQYjby2tTeSUatnWDfWroUN9N7ety5w,1013 -pip/_vendor/html5lib/_trie/_base.pyc,, -pip/_vendor/html5lib/_trie/datrie.py,sha256=EQpqSfkZRuTbE-DuhW7xMdVDxdZNZ0CfmnYfHA_3zxM,1178 -pip/_vendor/html5lib/_trie/datrie.pyc,, -pip/_vendor/html5lib/_trie/py.py,sha256=wXmQLrZRf4MyWNyg0m3h81m9InhLR7GJ002mIIZh-8o,1775 -pip/_vendor/html5lib/_trie/py.pyc,, -pip/_vendor/html5lib/_utils.py,sha256=ismpASeqa2jqEPQjHUj8vReAf7yIoKnvLN5fuOw6nv0,4015 -pip/_vendor/html5lib/_utils.pyc,, -pip/_vendor/html5lib/constants.py,sha256=4lmZWLtEPRLnl8NzftOoYTJdo6jpeMtP6dqQC0g_bWQ,83518 -pip/_vendor/html5lib/constants.pyc,, -pip/_vendor/html5lib/filters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pip/_vendor/html5lib/filters/__init__.pyc,, -pip/_vendor/html5lib/filters/alphabeticalattributes.py,sha256=lViZc2JMCclXi_5gduvmdzrRxtO5Xo9ONnbHBVCsykU,919 -pip/_vendor/html5lib/filters/alphabeticalattributes.pyc,, -pip/_vendor/html5lib/filters/base.py,sha256=z-IU9ZAYjpsVsqmVt7kuWC63jR11hDMr6CVrvuao8W0,286 -pip/_vendor/html5lib/filters/base.pyc,, -pip/_vendor/html5lib/filters/inject_meta_charset.py,sha256=egDXUEHXmAG9504xz0K6ALDgYkvUrC2q15YUVeNlVQg,2945 -pip/_vendor/html5lib/filters/inject_meta_charset.pyc,, -pip/_vendor/html5lib/filters/lint.py,sha256=jk6q56xY0ojiYfvpdP-OZSm9eTqcAdRqhCoPItemPYA,3643 -pip/_vendor/html5lib/filters/lint.pyc,, -pip/_vendor/html5lib/filters/optionaltags.py,sha256=8lWT75J0aBOHmPgfmqTHSfPpPMp01T84NKu0CRedxcE,10588 -pip/_vendor/html5lib/filters/optionaltags.pyc,, -pip/_vendor/html5lib/filters/sanitizer.py,sha256=4ON02KNjuqda1lCw5_JCUZxb0BzWR5M7ON84dtJ7dm0,26248 -pip/_vendor/html5lib/filters/sanitizer.pyc,, -pip/_vendor/html5lib/filters/whitespace.py,sha256=8eWqZxd4UC4zlFGW6iyY6f-2uuT8pOCSALc3IZt7_t4,1214 -pip/_vendor/html5lib/filters/whitespace.pyc,, -pip/_vendor/html5lib/html5parser.py,sha256=g5g2ezkusHxhi7b23vK_-d6K6BfIJRbqIQmvQ9z4EgI,118963 -pip/_vendor/html5lib/html5parser.pyc,, -pip/_vendor/html5lib/serializer.py,sha256=yfcfBHse2wDs6ojxn-kieJjLT5s1ipilQJ0gL3-rJis,15758 -pip/_vendor/html5lib/serializer.pyc,, -pip/_vendor/html5lib/treeadapters/__init__.py,sha256=A0rY5gXIe4bJOiSGRO_j_tFhngRBO8QZPzPtPw5dFzo,679 -pip/_vendor/html5lib/treeadapters/__init__.pyc,, -pip/_vendor/html5lib/treeadapters/genshi.py,sha256=CH27pAsDKmu4ZGkAUrwty7u0KauGLCZRLPMzaO3M5vo,1715 -pip/_vendor/html5lib/treeadapters/genshi.pyc,, -pip/_vendor/html5lib/treeadapters/sax.py,sha256=BKS8woQTnKiqeffHsxChUqL4q2ZR_wb5fc9MJ3zQC8s,1776 -pip/_vendor/html5lib/treeadapters/sax.pyc,, -pip/_vendor/html5lib/treebuilders/__init__.py,sha256=AysSJyvPfikCMMsTVvaxwkgDieELD5dfR8FJIAuq7hY,3592 -pip/_vendor/html5lib/treebuilders/__init__.pyc,, -pip/_vendor/html5lib/treebuilders/base.py,sha256=wQGp5yy22TNG8tJ6aREe4UUeTR7A99dEz0BXVaedWb4,14579 -pip/_vendor/html5lib/treebuilders/base.pyc,, -pip/_vendor/html5lib/treebuilders/dom.py,sha256=22whb0C71zXIsai5mamg6qzBEiigcBIvaDy4Asw3at0,8925 -pip/_vendor/html5lib/treebuilders/dom.pyc,, -pip/_vendor/html5lib/treebuilders/etree.py,sha256=aqIBOGj_dFYqBURIcTegGNBhAIJOw5iFDHb4jrkYH-8,12764 -pip/_vendor/html5lib/treebuilders/etree.pyc,, -pip/_vendor/html5lib/treebuilders/etree_lxml.py,sha256=9V0dXxbJYYq-Skgb5-_OL2NkVYpjioEb4CHajo0e9yI,14122 -pip/_vendor/html5lib/treebuilders/etree_lxml.pyc,, -pip/_vendor/html5lib/treewalkers/__init__.py,sha256=yhXxHpjlSqfQyUag3v8-vWjMPriFBU8YRAPNpDgBTn8,5714 -pip/_vendor/html5lib/treewalkers/__init__.pyc,, -pip/_vendor/html5lib/treewalkers/base.py,sha256=ouiOsuSzvI0KgzdWP8PlxIaSNs9falhbiinAEc_UIJY,7476 -pip/_vendor/html5lib/treewalkers/base.pyc,, -pip/_vendor/html5lib/treewalkers/dom.py,sha256=EHyFR8D8lYNnyDU9lx_IKigVJRyecUGua0mOi7HBukc,1413 -pip/_vendor/html5lib/treewalkers/dom.pyc,, -pip/_vendor/html5lib/treewalkers/etree.py,sha256=sz1o6mmE93NQ53qJFDO7HKyDtuwgK-Ay3qSFZPC6u00,4550 -pip/_vendor/html5lib/treewalkers/etree.pyc,, -pip/_vendor/html5lib/treewalkers/etree_lxml.py,sha256=sY6wfRshWTllu6n48TPWpKsQRPp-0CQrT0hj_AdzHSU,6309 -pip/_vendor/html5lib/treewalkers/etree_lxml.pyc,, -pip/_vendor/html5lib/treewalkers/genshi.py,sha256=4D2PECZ5n3ZN3qu3jMl9yY7B81jnQApBQSVlfaIuYbA,2309 -pip/_vendor/html5lib/treewalkers/genshi.pyc,, -pip/_vendor/idna/__init__.py,sha256=9Nt7xpyet3DmOrPUGooDdAwmHZZu1qUAy2EaJ93kGiQ,58 -pip/_vendor/idna/__init__.pyc,, -pip/_vendor/idna/codec.py,sha256=lvYb7yu7PhAqFaAIAdWcwgaWI2UmgseUua-1c0AsG0A,3299 -pip/_vendor/idna/codec.pyc,, -pip/_vendor/idna/compat.py,sha256=R-h29D-6mrnJzbXxymrWUW7iZUvy-26TQwZ0ij57i4U,232 -pip/_vendor/idna/compat.pyc,, -pip/_vendor/idna/core.py,sha256=JDCZZ_PLESqIgEbU8mPyoEufWwoOiIqygA17-QZIe3s,11733 -pip/_vendor/idna/core.pyc,, -pip/_vendor/idna/idnadata.py,sha256=HXaPFw6_YAJ0qppACPu0YLAULtRs3QovRM_CCZHGdY0,40899 -pip/_vendor/idna/idnadata.pyc,, -pip/_vendor/idna/intranges.py,sha256=TY1lpxZIQWEP6tNqjZkFA5hgoMWOj1OBmnUG8ihT87E,1749 -pip/_vendor/idna/intranges.pyc,, -pip/_vendor/idna/package_data.py,sha256=kIzeKKXEouXLR4srqwf9Q3zv-NffKSOz5aSDOJARPB0,21 -pip/_vendor/idna/package_data.pyc,, -pip/_vendor/idna/uts46data.py,sha256=oLyNZ1pBaiBlj9zFzLFRd_P7J8MkRcgDisjExZR_4MY,198292 -pip/_vendor/idna/uts46data.pyc,, -pip/_vendor/ipaddress.py,sha256=2OgbkeAD2rLkcXqbcvof3J5R7lRwjNLoBySyTkBtKnc,79852 -pip/_vendor/ipaddress.pyc,, -pip/_vendor/lockfile/__init__.py,sha256=Tqpz90DwKYfhPsfzVOJl84TL87pdFE5ePNHdXAxs4Tk,9371 -pip/_vendor/lockfile/__init__.pyc,, -pip/_vendor/lockfile/linklockfile.py,sha256=C7OH3H4GdK68u4FQgp8fkP2kO4fyUTSyj3X6blgfobc,2652 -pip/_vendor/lockfile/linklockfile.pyc,, -pip/_vendor/lockfile/mkdirlockfile.py,sha256=e3qgIL-etZMLsS-3ft19iW_8IQ360HNkGOqE3yBKsUw,3096 -pip/_vendor/lockfile/mkdirlockfile.pyc,, -pip/_vendor/lockfile/pidlockfile.py,sha256=ukH9uk6NFuxyVmG5QiWw4iKq3fT7MjqUguX95avYPIY,6090 -pip/_vendor/lockfile/pidlockfile.pyc,, -pip/_vendor/lockfile/sqlitelockfile.py,sha256=o2TMkMRY0iwn-iL1XMRRIFStMUkS4i3ajceeYNntKFg,5506 -pip/_vendor/lockfile/sqlitelockfile.pyc,, -pip/_vendor/lockfile/symlinklockfile.py,sha256=ABwXXmvTHvCl5viPblShL3PG-gGsLiT1roAMfDRwhi8,2616 -pip/_vendor/lockfile/symlinklockfile.pyc,, -pip/_vendor/msgpack/__init__.py,sha256=TF3o2_Ao3xbsvpOlmVZdthtsb7TkMW9seSJkXlW0dHE,1630 -pip/_vendor/msgpack/__init__.pyc,, -pip/_vendor/msgpack/_version.py,sha256=ldul7tIj_IHxvYxbEiEX1QhElrhQsA2ikYSM987iw1U,20 -pip/_vendor/msgpack/_version.pyc,, -pip/_vendor/msgpack/exceptions.py,sha256=dCTWei8dpkrMsQDcjQk74ATl9HsIBH0ybt8zOPNqMYc,1081 -pip/_vendor/msgpack/exceptions.pyc,, -pip/_vendor/msgpack/fallback.py,sha256=-FKXOBCF4CUs9QEOqAlssI-IZA0jBRa27VieFCngMC4,37491 -pip/_vendor/msgpack/fallback.pyc,, -pip/_vendor/packaging/__about__.py,sha256=Wg0-hNgTU2_lBZcGBh5pm1R9yroQ3rv-X0rig8KjA6o,744 -pip/_vendor/packaging/__about__.pyc,, -pip/_vendor/packaging/__init__.py,sha256=6enbp5XgRfjBjsI9-bn00HjHf5TH21PDMOKkJW8xw-w,562 -pip/_vendor/packaging/__init__.pyc,, -pip/_vendor/packaging/_compat.py,sha256=Ugdm-qcneSchW25JrtMIKgUxfEEBcCAz6WrEeXeqz9o,865 -pip/_vendor/packaging/_compat.pyc,, -pip/_vendor/packaging/_structures.py,sha256=pVd90XcXRGwpZRB_qdFuVEibhCHpX_bL5zYr9-N0mc8,1416 -pip/_vendor/packaging/_structures.pyc,, -pip/_vendor/packaging/markers.py,sha256=-QjvJkhSJBxBogO9J_EpPQudHaaLV3rgVYsBDqn-ZLc,8234 -pip/_vendor/packaging/markers.pyc,, -pip/_vendor/packaging/requirements.py,sha256=grcnFU8x7KD230JaFLXtWl3VClLuOmsOy4c-m55tOWs,4700 -pip/_vendor/packaging/requirements.pyc,, -pip/_vendor/packaging/specifiers.py,sha256=0ZzQpcUnvrQ6LjR-mQRLzMr8G6hdRv-mY0VSf_amFtI,27778 -pip/_vendor/packaging/specifiers.pyc,, -pip/_vendor/packaging/utils.py,sha256=VaTC0Ei7zO2xl9ARiWmz2YFLFt89PuuhLbAlXMyAGms,1520 -pip/_vendor/packaging/utils.pyc,, -pip/_vendor/packaging/version.py,sha256=Npdwnb8OHedj_2L86yiUqscujb7w_i5gmSK1PhOAFzg,11978 -pip/_vendor/packaging/version.pyc,, -pip/_vendor/pep517/__init__.py,sha256=nOY747zTld3oTdEetBG6DWxEcZXTeOQk0aHvbR-sa5w,84 -pip/_vendor/pep517/__init__.pyc,, -pip/_vendor/pep517/_in_process.py,sha256=xMY2kLutkjCti5WqTmKOLRRL3o8Ds_k-fObFyuMv1tk,6061 -pip/_vendor/pep517/_in_process.pyc,, -pip/_vendor/pep517/build.py,sha256=-n8PT-ugS1TdqoTUY1vatDQjrLtx48K_-Quu2MuQBiA,2699 -pip/_vendor/pep517/build.pyc,, -pip/_vendor/pep517/check.py,sha256=Lu7nMdYu1JVV58fE3hv-d_avTy5h0yO9LsIzAt82Clk,5885 -pip/_vendor/pep517/check.pyc,, -pip/_vendor/pep517/colorlog.py,sha256=Tk9AuYm_cLF3BKTBoSTJt9bRryn0aFojIQOwbfVUTxQ,4098 -pip/_vendor/pep517/colorlog.pyc,, -pip/_vendor/pep517/compat.py,sha256=4SFG4QN-cNj8ebSa0wV0HUtEEQWwmbok2a0uk1gYEOM,631 -pip/_vendor/pep517/compat.pyc,, -pip/_vendor/pep517/envbuild.py,sha256=9-u4KffexPMEm52rTaIjEOxsCAd2DMByxzv5H566QLw,5763 -pip/_vendor/pep517/envbuild.pyc,, -pip/_vendor/pep517/wrappers.py,sha256=9dZn-q7F5KyQKUJMie2uKwur2FG0CLXz_kLZzkJOhZc,5912 -pip/_vendor/pep517/wrappers.pyc,, -pip/_vendor/pkg_resources/__init__.py,sha256=ZVHzk7ZiFIIgE2RTJj8F7wwjdMGrAngMWtQo-rGNsm4,107910 -pip/_vendor/pkg_resources/__init__.pyc,, -pip/_vendor/pkg_resources/py31compat.py,sha256=CRk8fkiPRDLsbi5pZcKsHI__Pbmh_94L8mr9Qy9Ab2U,562 -pip/_vendor/pkg_resources/py31compat.pyc,, -pip/_vendor/progress/__init__.py,sha256=fcbQQXo5np2CoQyhSH5XprkicwLZNLePR3uIahznSO0,4857 -pip/_vendor/progress/__init__.pyc,, -pip/_vendor/progress/bar.py,sha256=QuDuVNcmXgpxtNtxO0Fq72xKigxABaVmxYGBw4J3Z_E,2854 -pip/_vendor/progress/bar.pyc,, -pip/_vendor/progress/counter.py,sha256=MznyBrvPWrOlGe4MZAlGUb9q3aODe6_aNYeAE_VNoYA,1372 -pip/_vendor/progress/counter.pyc,, -pip/_vendor/progress/spinner.py,sha256=k8JbDW94T0-WXuXfxZIFhdoNPYp3jfnpXqBnfRv5fGs,1380 -pip/_vendor/progress/spinner.pyc,, -pip/_vendor/pyparsing.py,sha256=sxGUe_YcWBB5ZoHec0m1iJtgcj4iKv_SGfdA_zVCYII,245385 -pip/_vendor/pyparsing.pyc,, -pip/_vendor/pytoml/__init__.py,sha256=W_SKx36Hsew-Fty36BOpreLm4uF4V_Tgkm_z9rIoOE8,127 -pip/_vendor/pytoml/__init__.pyc,, -pip/_vendor/pytoml/core.py,sha256=9CrLLTs1PdWjEwRnYzt_i4dhHcZvGxs_GsMlYAX3iY4,509 -pip/_vendor/pytoml/core.pyc,, -pip/_vendor/pytoml/parser.py,sha256=2tDXkldqPQJhyadXzL2rGhVbjUyBNeXXhaEfncHl2iQ,10326 -pip/_vendor/pytoml/parser.pyc,, -pip/_vendor/pytoml/test.py,sha256=2nQs4aX3XQEaaQCx6x_OJTS2Hb0_IiTZRqNOeDmLCzo,1021 -pip/_vendor/pytoml/test.pyc,, -pip/_vendor/pytoml/utils.py,sha256=JCLHx77Hu1R3F-bRgiROIiKyCzLwyebnp5P35cRJxWs,1665 -pip/_vendor/pytoml/utils.pyc,, -pip/_vendor/pytoml/writer.py,sha256=WbNNQg3sh_V-s3kt88LkNNbxEq6pPDdhRE-daJzArcI,3198 -pip/_vendor/pytoml/writer.pyc,, -pip/_vendor/requests/__init__.py,sha256=ONVsH6kJuPTV9nf-XVoubWsVX3qVtjCyju42kTW6Uug,4074 -pip/_vendor/requests/__init__.pyc,, -pip/_vendor/requests/__version__.py,sha256=Bm-GFstQaFezsFlnmEMrJDe8JNROz9n2XXYtODdvjjc,436 -pip/_vendor/requests/__version__.pyc,, -pip/_vendor/requests/_internal_utils.py,sha256=Zx3PnEUccyfsB-ie11nZVAW8qClJy0gx1qNME7rgT18,1096 -pip/_vendor/requests/_internal_utils.pyc,, -pip/_vendor/requests/adapters.py,sha256=e-bmKEApNVqFdylxuMJJfiaHdlmS_zhWhIMEzlHvGuc,21548 -pip/_vendor/requests/adapters.pyc,, -pip/_vendor/requests/api.py,sha256=fbUo11QoLOoNgWU6FfvNz8vMj9bE_cMmICXBa7TZHJs,6271 -pip/_vendor/requests/api.pyc,, -pip/_vendor/requests/auth.py,sha256=QB2-cSUj1jrvWZfPXttsZpyAacQgtKLVk14vQW9TpSE,10206 -pip/_vendor/requests/auth.pyc,, -pip/_vendor/requests/certs.py,sha256=nXRVq9DtGmv_1AYbwjTu9UrgAcdJv05ZvkNeaoLOZxY,465 -pip/_vendor/requests/certs.pyc,, -pip/_vendor/requests/compat.py,sha256=FZX4Q_EMKiMnhZpZ3g_gOsT-j2ca9ij2gehDx1cwYeo,1941 -pip/_vendor/requests/compat.pyc,, -pip/_vendor/requests/cookies.py,sha256=Y-bKX6TvW3FnYlE6Au0SXtVVWcaNdFvuAwQxw-G0iTI,18430 -pip/_vendor/requests/cookies.pyc,, -pip/_vendor/requests/exceptions.py,sha256=-mLam3TAx80V09EaH3H-ZxR61eAVuLRZ8zgBBSLjK44,3197 -pip/_vendor/requests/exceptions.pyc,, -pip/_vendor/requests/help.py,sha256=SJPVcoXeo7KfK4AxJN5eFVQCjr0im87tU2n7ubLsksU,3578 -pip/_vendor/requests/help.pyc,, -pip/_vendor/requests/hooks.py,sha256=QReGyy0bRcr5rkwCuObNakbYsc7EkiKeBwG4qHekr2Q,757 -pip/_vendor/requests/hooks.pyc,, -pip/_vendor/requests/models.py,sha256=6s-37iAqXVptq8z7U_LoH_pbIPrCQUm_Z8QuIGE29Q0,34275 -pip/_vendor/requests/models.pyc,, -pip/_vendor/requests/packages.py,sha256=njJmVifY4aSctuW3PP5EFRCxjEwMRDO6J_feG2dKWsI,695 -pip/_vendor/requests/packages.pyc,, -pip/_vendor/requests/sessions.py,sha256=DjbCotDW6xSAaBsjbW-L8l4N0UcwmrxVNgSrZgIjGWM,29332 -pip/_vendor/requests/sessions.pyc,, -pip/_vendor/requests/status_codes.py,sha256=XWlcpBjbCtq9sSqpH9_KKxgnLTf9Z__wCWolq21ySlg,4129 -pip/_vendor/requests/status_codes.pyc,, -pip/_vendor/requests/structures.py,sha256=zoP8qly2Jak5e89HwpqjN1z2diztI-_gaqts1raJJBc,2981 -pip/_vendor/requests/structures.pyc,, -pip/_vendor/requests/utils.py,sha256=LtPJ1db6mJff2TJSJWKi7rBpzjPS3mSOrjC9zRhoD3A,30049 -pip/_vendor/requests/utils.pyc,, -pip/_vendor/retrying.py,sha256=k3fflf5_Mm0XcIJYhB7Tj34bqCCPhUDkYbx1NvW2FPE,9972 -pip/_vendor/retrying.pyc,, -pip/_vendor/six.py,sha256=h9jch2pS86y4R36pKRS3LOYUCVFNIJMRwjZ4fJDtJ44,32452 -pip/_vendor/six.pyc,, -pip/_vendor/urllib3/__init__.py,sha256=dW1kWCz7bYGr-1q7xbDvJ_0_GwfyJtWq4VaLIzMcviA,2721 -pip/_vendor/urllib3/__init__.pyc,, -pip/_vendor/urllib3/_collections.py,sha256=-CAKsDE-WdubAjlBSZLx7b0e7WKenaNGwWvGLDEF1TM,10746 -pip/_vendor/urllib3/_collections.pyc,, -pip/_vendor/urllib3/connection.py,sha256=hdUK2hwFNWlKxpm7JbY_YxGYJWbe6s0AYUSt9wguHk0,15001 -pip/_vendor/urllib3/connection.pyc,, -pip/_vendor/urllib3/connectionpool.py,sha256=jkmLBXUD8wB0exYjDoEsg_cXVZUv-iDbhC3vAUUH82Q,35307 -pip/_vendor/urllib3/connectionpool.pyc,, -pip/_vendor/urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pip/_vendor/urllib3/contrib/__init__.pyc,, -pip/_vendor/urllib3/contrib/_appengine_environ.py,sha256=lhYXvB5_oGKSeurX7za3XhcGyERvNjXRQ3eJp2GmQ3M,717 -pip/_vendor/urllib3/contrib/_appengine_environ.pyc,, -pip/_vendor/urllib3/contrib/_securetransport/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pip/_vendor/urllib3/contrib/_securetransport/__init__.pyc,, -pip/_vendor/urllib3/contrib/_securetransport/bindings.py,sha256=6ZRejHBpquHtJaXPDm0cBsEwOBe2l9bTnDvVzd0HwJw,17576 -pip/_vendor/urllib3/contrib/_securetransport/bindings.pyc,, -pip/_vendor/urllib3/contrib/_securetransport/low_level.py,sha256=Umy5u-3Z957GirdapnicXVOpHaM4xdOZABJuJxfaeJA,12162 -pip/_vendor/urllib3/contrib/_securetransport/low_level.pyc,, -pip/_vendor/urllib3/contrib/appengine.py,sha256=VvDpkc5gf9dTXNxXmyG1mPdON_3DrYG_eW4uOqN98oQ,10938 -pip/_vendor/urllib3/contrib/appengine.pyc,, -pip/_vendor/urllib3/contrib/ntlmpool.py,sha256=5ZpMF7N9B6NEjVU-r-xjDOV_-hkNvsDoNc84J2yqauI,4459 -pip/_vendor/urllib3/contrib/ntlmpool.pyc,, -pip/_vendor/urllib3/contrib/pyopenssl.py,sha256=raR9jRVPK485CjBp9emmWfoZIyMA1b3vcYJ2-CLg03A,16468 -pip/_vendor/urllib3/contrib/pyopenssl.pyc,, -pip/_vendor/urllib3/contrib/securetransport.py,sha256=IfPZ2wA3x3NXxHjKr--Q7Xz4A37ZSyWHn_1WvGxvTKQ,32826 -pip/_vendor/urllib3/contrib/securetransport.pyc,, -pip/_vendor/urllib3/contrib/socks.py,sha256=ZJ7lEUlErvAgQkd4xo_xvfx-acym2tDtJqyE0It4VRI,7012 -pip/_vendor/urllib3/contrib/socks.pyc,, -pip/_vendor/urllib3/exceptions.py,sha256=rFeIfBNKC8KJ61ux-MtJyJlEC9G9ggkmCeF751JwVR4,6604 -pip/_vendor/urllib3/exceptions.pyc,, -pip/_vendor/urllib3/fields.py,sha256=0EYvHsgnUflhb-UhMMVjAwiRp1InCe-uy1McDD6nhPU,8575 -pip/_vendor/urllib3/fields.pyc,, -pip/_vendor/urllib3/filepost.py,sha256=40CROlpRKVBpFUkD0R6wJf_PpvbcRQRFUu0OOQlFkKM,2436 -pip/_vendor/urllib3/filepost.pyc,, -pip/_vendor/urllib3/packages/__init__.py,sha256=nlChrGzkjCkmhCX9HrF_qHPUgosfsPQkVIJxiiLhk9g,109 -pip/_vendor/urllib3/packages/__init__.pyc,, -pip/_vendor/urllib3/packages/backports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pip/_vendor/urllib3/packages/backports/__init__.pyc,, -pip/_vendor/urllib3/packages/backports/makefile.py,sha256=so2z9BiNM8kh38Ve5tomQP_mp2_ubEqzdlCpLZKzzCI,1456 -pip/_vendor/urllib3/packages/backports/makefile.pyc,, -pip/_vendor/urllib3/packages/rfc3986/__init__.py,sha256=Y2dGb08ZJuqIrAqfmrGMpNi2zjzKVTxfeZ8iF-Db338,1562 -pip/_vendor/urllib3/packages/rfc3986/__init__.pyc,, -pip/_vendor/urllib3/packages/rfc3986/_mixin.py,sha256=fnxYtuAQwo6RlGZzsawcICxUhLqQ_Tyob_Kamy-92QU,13214 -pip/_vendor/urllib3/packages/rfc3986/_mixin.pyc,, -pip/_vendor/urllib3/packages/rfc3986/abnf_regexp.py,sha256=0cPq-UrpihByHkDsZd_7p6ruqYh2MuqCqIjc67PFHCs,9081 -pip/_vendor/urllib3/packages/rfc3986/abnf_regexp.pyc,, -pip/_vendor/urllib3/packages/rfc3986/api.py,sha256=5wYJ3IvszICEY5cgeLTtLRWCyc9mJhgZ_4QZVtYoSKI,3887 -pip/_vendor/urllib3/packages/rfc3986/api.pyc,, -pip/_vendor/urllib3/packages/rfc3986/builder.py,sha256=BFbuFFZUcAnGJzvtQ0n7ffHqgG-CBf-Xi_6aU68n-OA,9577 -pip/_vendor/urllib3/packages/rfc3986/builder.pyc,, -pip/_vendor/urllib3/packages/rfc3986/compat.py,sha256=jnSGxU4M13w5vuLldgKmXmpxcZBxmUwg8dBzNQEWzYc,1513 -pip/_vendor/urllib3/packages/rfc3986/compat.pyc,, -pip/_vendor/urllib3/packages/rfc3986/exceptions.py,sha256=dadexlPfwsYbcbFdbR1lp5WyuF8PMsSmx2gA3nrger4,3775 -pip/_vendor/urllib3/packages/rfc3986/exceptions.pyc,, -pip/_vendor/urllib3/packages/rfc3986/iri.py,sha256=UWHdKI_aOiK_SC3oE_nTrxsgiS8shJQRkmKn_AAxyms,5483 -pip/_vendor/urllib3/packages/rfc3986/iri.pyc,, -pip/_vendor/urllib3/packages/rfc3986/misc.py,sha256=MbL7MgqbTef5VddkaaPKkLpe0hPRNCEx0-kKhJfRyk8,4094 -pip/_vendor/urllib3/packages/rfc3986/misc.pyc,, -pip/_vendor/urllib3/packages/rfc3986/normalizers.py,sha256=L6DOXDi7vZ_BDoXS8IUl9CW21E-siDJluK2mmImljtY,5259 -pip/_vendor/urllib3/packages/rfc3986/normalizers.pyc,, -pip/_vendor/urllib3/packages/rfc3986/parseresult.py,sha256=cdmsiBExo5o2A2jWI-TtGFeXTPenyXQbGf5Nmv7nh6M,14654 -pip/_vendor/urllib3/packages/rfc3986/parseresult.pyc,, -pip/_vendor/urllib3/packages/rfc3986/uri.py,sha256=r_KhSNmvWfoBGRPBf1dnlwWnCwuM-JHFtGa-6DH_jH4,5227 -pip/_vendor/urllib3/packages/rfc3986/uri.pyc,, -pip/_vendor/urllib3/packages/rfc3986/validators.py,sha256=jbJGdqUcoeSD2E_gmuFbrujLsVtEpjhJg7oxpiFeyY4,13854 -pip/_vendor/urllib3/packages/rfc3986/validators.pyc,, -pip/_vendor/urllib3/packages/six.py,sha256=A6hdJZVjI3t_geebZ9BzUvwRrIXo0lfwzQlM2LcKyas,30098 -pip/_vendor/urllib3/packages/six.pyc,, -pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py,sha256=WBVbxQBojNAxfZwNavkox3BgJiMA9BJmm-_fwd0jD_o,688 -pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.pyc,, -pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py,sha256=E-9J-kAaUn76WMZ4PpzKUxM4C3yjY7mopOpbPIy3Dso,5700 -pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.pyc,, -pip/_vendor/urllib3/poolmanager.py,sha256=GrUSFRcQbhxPMRlePxOUbXvmsOgGTiNrxQpICmXd30I,17050 -pip/_vendor/urllib3/poolmanager.pyc,, -pip/_vendor/urllib3/request.py,sha256=OfelFYzPnxGlU3amEz9uBLjCBOriwgJh4QC_aW9SF3U,5991 -pip/_vendor/urllib3/request.pyc,, -pip/_vendor/urllib3/response.py,sha256=GxiW6sI0NZgdlDL4hzPfKDZmH7OFTiGZosaXu2DMG7k,27171 -pip/_vendor/urllib3/response.pyc,, -pip/_vendor/urllib3/util/__init__.py,sha256=P-VlwgBFaga7i1BTWPNHGK4TN-SES0VoexbeIPIKs_0,1082 -pip/_vendor/urllib3/util/__init__.pyc,, -pip/_vendor/urllib3/util/connection.py,sha256=-AyqcRTuNUHuo5ndtsU0Og_nMyCGATC-kYqOUdBHwIQ,4639 -pip/_vendor/urllib3/util/connection.pyc,, -pip/_vendor/urllib3/util/queue.py,sha256=myTX3JDHntglKQNBf3b6dasHH-uF-W59vzGSQiFdAfI,497 -pip/_vendor/urllib3/util/queue.pyc,, -pip/_vendor/urllib3/util/request.py,sha256=_pmOHJWpOHk7w8BDz6WZkmMunNmplEPBmh2-5bl3Do4,3832 -pip/_vendor/urllib3/util/request.pyc,, -pip/_vendor/urllib3/util/response.py,sha256=028PNXDZhwBtnm2uXvnAHi_l9_AAGrAMH2Igh2AbgWg,2586 -pip/_vendor/urllib3/util/response.pyc,, -pip/_vendor/urllib3/util/retry.py,sha256=1m-XI9_LORj1FLbwOzgWC6pTt2deycyGl4BWRI9r4Zc,15150 -pip/_vendor/urllib3/util/retry.pyc,, -pip/_vendor/urllib3/util/ssl_.py,sha256=bYkkcBpWIbtFL3WCiX2pgTRjS2Ukdpv0oTtHHK0g8Mw,13798 -pip/_vendor/urllib3/util/ssl_.pyc,, -pip/_vendor/urllib3/util/timeout.py,sha256=dTF-iEp8DZiPd-8g2X7CVucDoBWJBn221T8ghg-tjkQ,9768 -pip/_vendor/urllib3/util/timeout.pyc,, -pip/_vendor/urllib3/util/url.py,sha256=DyEkFjkLo4C82N1elJgpePfccgLb6IHMjBTnPgs9QmU,9827 -pip/_vendor/urllib3/util/url.pyc,, -pip/_vendor/urllib3/util/wait.py,sha256=p4BZo_Ukp5JF0Dn6jro7cUfqIjnU6WFtuoA6poaV5Jk,5403 -pip/_vendor/urllib3/util/wait.pyc,, -pip/_vendor/webencodings/__init__.py,sha256=qOBJIuPy_4ByYH6W_bNgJF-qYQ2DoU-dKsDu5yRWCXg,10579 -pip/_vendor/webencodings/__init__.pyc,, -pip/_vendor/webencodings/labels.py,sha256=4AO_KxTddqGtrL9ns7kAPjb0CcN6xsCIxbK37HY9r3E,8979 -pip/_vendor/webencodings/labels.pyc,, -pip/_vendor/webencodings/mklabels.py,sha256=GYIeywnpaLnP0GSic8LFWgd0UVvO_l1Nc6YoF-87R_4,1305 -pip/_vendor/webencodings/mklabels.pyc,, -pip/_vendor/webencodings/tests.py,sha256=OtGLyjhNY1fvkW1GvLJ_FV9ZoqC9Anyjr7q3kxTbzNs,6563 -pip/_vendor/webencodings/tests.pyc,, -pip/_vendor/webencodings/x_user_defined.py,sha256=yOqWSdmpytGfUgh_Z6JYgDNhoc-BAHyyeeT15Fr42tM,4307 -pip/_vendor/webencodings/x_user_defined.pyc,, diff --git a/env/lib/python2.7/site-packages/pip-19.2.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pip-19.2.2.dist-info/WHEEL deleted file mode 100644 index 78e6f69d..00000000 --- a/env/lib/python2.7/site-packages/pip-19.2.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.4) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/env/lib/python2.7/site-packages/pip-19.2.2.dist-info/entry_points.txt b/env/lib/python2.7/site-packages/pip-19.2.2.dist-info/entry_points.txt deleted file mode 100644 index f5809cb4..00000000 --- a/env/lib/python2.7/site-packages/pip-19.2.2.dist-info/entry_points.txt +++ /dev/null @@ -1,5 +0,0 @@ -[console_scripts] -pip = pip._internal:main -pip3 = pip._internal:main -pip3.7 = pip._internal:main - diff --git a/env/lib/python2.7/site-packages/pip/__init__.py b/env/lib/python2.7/site-packages/pip/__init__.py deleted file mode 100644 index 04304fd5..00000000 --- a/env/lib/python2.7/site-packages/pip/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "19.2.2" diff --git a/env/lib/python2.7/site-packages/pip/__init__.pyc b/env/lib/python2.7/site-packages/pip/__init__.pyc deleted file mode 100644 index 5f8fa2ff..00000000 Binary files a/env/lib/python2.7/site-packages/pip/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/__main__.py b/env/lib/python2.7/site-packages/pip/__main__.py deleted file mode 100644 index 0c223f8c..00000000 --- a/env/lib/python2.7/site-packages/pip/__main__.py +++ /dev/null @@ -1,19 +0,0 @@ -from __future__ import absolute_import - -import os -import sys - -# If we are running from a wheel, add the wheel to sys.path -# This allows the usage python pip-*.whl/pip install pip-*.whl -if __package__ == '': - # __file__ is pip-*.whl/pip/__main__.py - # first dirname call strips of '/__main__.py', second strips off '/pip' - # Resulting path is the name of the wheel itself - # Add that to sys.path so we can import pip - path = os.path.dirname(os.path.dirname(__file__)) - sys.path.insert(0, path) - -from pip._internal import main as _main # isort:skip # noqa - -if __name__ == '__main__': - sys.exit(_main()) diff --git a/env/lib/python2.7/site-packages/pip/__main__.pyc b/env/lib/python2.7/site-packages/pip/__main__.pyc deleted file mode 100644 index b7012319..00000000 Binary files a/env/lib/python2.7/site-packages/pip/__main__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/__init__.py b/env/lib/python2.7/site-packages/pip/_internal/__init__.py deleted file mode 100644 index fbadc28a..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/__init__.py +++ /dev/null @@ -1,77 +0,0 @@ -#!/usr/bin/env python -from __future__ import absolute_import - -import locale -import logging -import os -import warnings - -import sys - -# 2016-06-17 barry@debian.org: urllib3 1.14 added optional support for socks, -# but if invoked (i.e. imported), it will issue a warning to stderr if socks -# isn't available. requests unconditionally imports urllib3's socks contrib -# module, triggering this warning. The warning breaks DEP-8 tests (because of -# the stderr output) and is just plain annoying in normal usage. I don't want -# to add socks as yet another dependency for pip, nor do I want to allow-stderr -# in the DEP-8 tests, so just suppress the warning. pdb tells me this has to -# be done before the import of pip.vcs. -from pip._vendor.urllib3.exceptions import DependencyWarning -warnings.filterwarnings("ignore", category=DependencyWarning) # noqa - -# We want to inject the use of SecureTransport as early as possible so that any -# references or sessions or what have you are ensured to have it, however we -# only want to do this in the case that we're running on macOS and the linked -# OpenSSL is too old to handle TLSv1.2 -try: - import ssl -except ImportError: - pass -else: - # Checks for OpenSSL 1.0.1 on MacOS - if sys.platform == "darwin" and ssl.OPENSSL_VERSION_NUMBER < 0x1000100f: - try: - from pip._vendor.urllib3.contrib import securetransport - except (ImportError, OSError): - pass - else: - securetransport.inject_into_urllib3() - -from pip._internal.cli.autocompletion import autocomplete -from pip._internal.cli.main_parser import parse_command -from pip._internal.commands import commands_dict -from pip._internal.exceptions import PipError -from pip._internal.utils import deprecation -from pip._vendor.urllib3.exceptions import InsecureRequestWarning - -logger = logging.getLogger(__name__) - -# Hide the InsecureRequestWarning from urllib3 -warnings.filterwarnings("ignore", category=InsecureRequestWarning) - - -def main(args=None): - if args is None: - args = sys.argv[1:] - - # Configure our deprecation warnings to be sent through loggers - deprecation.install_warning_logger() - - autocomplete() - - try: - cmd_name, cmd_args = parse_command(args) - except PipError as exc: - sys.stderr.write("ERROR: %s" % exc) - sys.stderr.write(os.linesep) - sys.exit(1) - - # Needed for locale.getpreferredencoding(False) to work - # in pip._internal.utils.encoding.auto_decode - try: - locale.setlocale(locale.LC_ALL, '') - except locale.Error as e: - # setlocale can apparently crash if locale are uninitialized - logger.debug("Ignoring error %s when setting locale", e) - command = commands_dict[cmd_name](isolated=("--isolated" in cmd_args)) - return command.main(cmd_args) diff --git a/env/lib/python2.7/site-packages/pip/_internal/__init__.pyc b/env/lib/python2.7/site-packages/pip/_internal/__init__.pyc deleted file mode 100644 index af3b7bc5..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/build_env.py b/env/lib/python2.7/site-packages/pip/_internal/build_env.py deleted file mode 100644 index a060ceea..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/build_env.py +++ /dev/null @@ -1,218 +0,0 @@ -"""Build Environment used for isolation during sdist building -""" - -import logging -import os -import sys -import textwrap -from collections import OrderedDict -from distutils.sysconfig import get_python_lib -from sysconfig import get_paths - -from pip._vendor.pkg_resources import Requirement, VersionConflict, WorkingSet - -from pip import __file__ as pip_location -from pip._internal.utils.misc import call_subprocess -from pip._internal.utils.temp_dir import TempDirectory -from pip._internal.utils.typing import MYPY_CHECK_RUNNING -from pip._internal.utils.ui import open_spinner - -if MYPY_CHECK_RUNNING: - from typing import Tuple, Set, Iterable, Optional, List - from pip._internal.index import PackageFinder - -logger = logging.getLogger(__name__) - - -class _Prefix: - - def __init__(self, path): - # type: (str) -> None - self.path = path - self.setup = False - self.bin_dir = get_paths( - 'nt' if os.name == 'nt' else 'posix_prefix', - vars={'base': path, 'platbase': path} - )['scripts'] - # Note: prefer distutils' sysconfig to get the - # library paths so PyPy is correctly supported. - purelib = get_python_lib(plat_specific=False, prefix=path) - platlib = get_python_lib(plat_specific=True, prefix=path) - if purelib == platlib: - self.lib_dirs = [purelib] - else: - self.lib_dirs = [purelib, platlib] - - -class BuildEnvironment(object): - """Creates and manages an isolated environment to install build deps - """ - - def __init__(self): - # type: () -> None - self._temp_dir = TempDirectory(kind="build-env") - self._temp_dir.create() - - self._prefixes = OrderedDict(( - (name, _Prefix(os.path.join(self._temp_dir.path, name))) - for name in ('normal', 'overlay') - )) - - self._bin_dirs = [] # type: List[str] - self._lib_dirs = [] # type: List[str] - for prefix in reversed(list(self._prefixes.values())): - self._bin_dirs.append(prefix.bin_dir) - self._lib_dirs.extend(prefix.lib_dirs) - - # Customize site to: - # - ensure .pth files are honored - # - prevent access to system site packages - system_sites = { - os.path.normcase(site) for site in ( - get_python_lib(plat_specific=False), - get_python_lib(plat_specific=True), - ) - } - self._site_dir = os.path.join(self._temp_dir.path, 'site') - if not os.path.exists(self._site_dir): - os.mkdir(self._site_dir) - with open(os.path.join(self._site_dir, 'sitecustomize.py'), 'w') as fp: - fp.write(textwrap.dedent( - ''' - import os, site, sys - - # First, drop system-sites related paths. - original_sys_path = sys.path[:] - known_paths = set() - for path in {system_sites!r}: - site.addsitedir(path, known_paths=known_paths) - system_paths = set( - os.path.normcase(path) - for path in sys.path[len(original_sys_path):] - ) - original_sys_path = [ - path for path in original_sys_path - if os.path.normcase(path) not in system_paths - ] - sys.path = original_sys_path - - # Second, add lib directories. - # ensuring .pth file are processed. - for path in {lib_dirs!r}: - assert not path in sys.path - site.addsitedir(path) - ''' - ).format(system_sites=system_sites, lib_dirs=self._lib_dirs)) - - def __enter__(self): - self._save_env = { - name: os.environ.get(name, None) - for name in ('PATH', 'PYTHONNOUSERSITE', 'PYTHONPATH') - } - - path = self._bin_dirs[:] - old_path = self._save_env['PATH'] - if old_path: - path.extend(old_path.split(os.pathsep)) - - pythonpath = [self._site_dir] - - os.environ.update({ - 'PATH': os.pathsep.join(path), - 'PYTHONNOUSERSITE': '1', - 'PYTHONPATH': os.pathsep.join(pythonpath), - }) - - def __exit__(self, exc_type, exc_val, exc_tb): - for varname, old_value in self._save_env.items(): - if old_value is None: - os.environ.pop(varname, None) - else: - os.environ[varname] = old_value - - def cleanup(self): - # type: () -> None - self._temp_dir.cleanup() - - def check_requirements(self, reqs): - # type: (Iterable[str]) -> Tuple[Set[Tuple[str, str]], Set[str]] - """Return 2 sets: - - conflicting requirements: set of (installed, wanted) reqs tuples - - missing requirements: set of reqs - """ - missing = set() - conflicting = set() - if reqs: - ws = WorkingSet(self._lib_dirs) - for req in reqs: - try: - if ws.find(Requirement.parse(req)) is None: - missing.add(req) - except VersionConflict as e: - conflicting.add((str(e.args[0].as_requirement()), - str(e.args[1]))) - return conflicting, missing - - def install_requirements( - self, - finder, # type: PackageFinder - requirements, # type: Iterable[str] - prefix_as_string, # type: str - message # type: Optional[str] - ): - # type: (...) -> None - prefix = self._prefixes[prefix_as_string] - assert not prefix.setup - prefix.setup = True - if not requirements: - return - args = [ - sys.executable, os.path.dirname(pip_location), 'install', - '--ignore-installed', '--no-user', '--prefix', prefix.path, - '--no-warn-script-location', - ] # type: List[str] - if logger.getEffectiveLevel() <= logging.DEBUG: - args.append('-v') - for format_control in ('no_binary', 'only_binary'): - formats = getattr(finder.format_control, format_control) - args.extend(('--' + format_control.replace('_', '-'), - ','.join(sorted(formats or {':none:'})))) - - index_urls = finder.index_urls - if index_urls: - args.extend(['-i', index_urls[0]]) - for extra_index in index_urls[1:]: - args.extend(['--extra-index-url', extra_index]) - else: - args.append('--no-index') - for link in finder.find_links: - args.extend(['--find-links', link]) - - for host in finder.trusted_hosts: - args.extend(['--trusted-host', host]) - if finder.allow_all_prereleases: - args.append('--pre') - args.append('--') - args.extend(requirements) - with open_spinner(message) as spinner: - call_subprocess(args, spinner=spinner) - - -class NoOpBuildEnvironment(BuildEnvironment): - """A no-op drop-in replacement for BuildEnvironment - """ - - def __init__(self): - pass - - def __enter__(self): - pass - - def __exit__(self, exc_type, exc_val, exc_tb): - pass - - def cleanup(self): - pass - - def install_requirements(self, finder, requirements, prefix, message): - raise NotImplementedError() diff --git a/env/lib/python2.7/site-packages/pip/_internal/build_env.pyc b/env/lib/python2.7/site-packages/pip/_internal/build_env.pyc deleted file mode 100644 index 89c398aa..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/build_env.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/cache.py b/env/lib/python2.7/site-packages/pip/_internal/cache.py deleted file mode 100644 index 894624c1..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/cache.py +++ /dev/null @@ -1,224 +0,0 @@ -"""Cache Management -""" - -import errno -import hashlib -import logging -import os - -from pip._vendor.packaging.utils import canonicalize_name - -from pip._internal.models.link import Link -from pip._internal.utils.compat import expanduser -from pip._internal.utils.misc import path_to_url -from pip._internal.utils.temp_dir import TempDirectory -from pip._internal.utils.typing import MYPY_CHECK_RUNNING -from pip._internal.wheel import InvalidWheelFilename, Wheel - -if MYPY_CHECK_RUNNING: - from typing import Optional, Set, List, Any - from pip._internal.index import FormatControl - -logger = logging.getLogger(__name__) - - -class Cache(object): - """An abstract class - provides cache directories for data from links - - - :param cache_dir: The root of the cache. - :param format_control: An object of FormatControl class to limit - binaries being read from the cache. - :param allowed_formats: which formats of files the cache should store. - ('binary' and 'source' are the only allowed values) - """ - - def __init__(self, cache_dir, format_control, allowed_formats): - # type: (str, FormatControl, Set[str]) -> None - super(Cache, self).__init__() - self.cache_dir = expanduser(cache_dir) if cache_dir else None - self.format_control = format_control - self.allowed_formats = allowed_formats - - _valid_formats = {"source", "binary"} - assert self.allowed_formats.union(_valid_formats) == _valid_formats - - def _get_cache_path_parts(self, link): - # type: (Link) -> List[str] - """Get parts of part that must be os.path.joined with cache_dir - """ - - # We want to generate an url to use as our cache key, we don't want to - # just re-use the URL because it might have other items in the fragment - # and we don't care about those. - key_parts = [link.url_without_fragment] - if link.hash_name is not None and link.hash is not None: - key_parts.append("=".join([link.hash_name, link.hash])) - key_url = "#".join(key_parts) - - # Encode our key url with sha224, we'll use this because it has similar - # security properties to sha256, but with a shorter total output (and - # thus less secure). However the differences don't make a lot of - # difference for our use case here. - hashed = hashlib.sha224(key_url.encode()).hexdigest() - - # We want to nest the directories some to prevent having a ton of top - # level directories where we might run out of sub directories on some - # FS. - parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]] - - return parts - - def _get_candidates(self, link, package_name): - # type: (Link, Optional[str]) -> List[Any] - can_not_cache = ( - not self.cache_dir or - not package_name or - not link - ) - if can_not_cache: - return [] - - canonical_name = canonicalize_name(package_name) - formats = self.format_control.get_allowed_formats( - canonical_name - ) - if not self.allowed_formats.intersection(formats): - return [] - - root = self.get_path_for_link(link) - try: - return os.listdir(root) - except OSError as err: - if err.errno in {errno.ENOENT, errno.ENOTDIR}: - return [] - raise - - def get_path_for_link(self, link): - # type: (Link) -> str - """Return a directory to store cached items in for link. - """ - raise NotImplementedError() - - def get(self, link, package_name): - # type: (Link, Optional[str]) -> Link - """Returns a link to a cached item if it exists, otherwise returns the - passed link. - """ - raise NotImplementedError() - - def _link_for_candidate(self, link, candidate): - # type: (Link, str) -> Link - root = self.get_path_for_link(link) - path = os.path.join(root, candidate) - - return Link(path_to_url(path)) - - def cleanup(self): - # type: () -> None - pass - - -class SimpleWheelCache(Cache): - """A cache of wheels for future installs. - """ - - def __init__(self, cache_dir, format_control): - # type: (str, FormatControl) -> None - super(SimpleWheelCache, self).__init__( - cache_dir, format_control, {"binary"} - ) - - def get_path_for_link(self, link): - # type: (Link) -> str - """Return a directory to store cached wheels for link - - Because there are M wheels for any one sdist, we provide a directory - to cache them in, and then consult that directory when looking up - cache hits. - - We only insert things into the cache if they have plausible version - numbers, so that we don't contaminate the cache with things that were - not unique. E.g. ./package might have dozens of installs done for it - and build a version of 0.0...and if we built and cached a wheel, we'd - end up using the same wheel even if the source has been edited. - - :param link: The link of the sdist for which this will cache wheels. - """ - parts = self._get_cache_path_parts(link) - - # Store wheels within the root cache_dir - return os.path.join(self.cache_dir, "wheels", *parts) - - def get(self, link, package_name): - # type: (Link, Optional[str]) -> Link - candidates = [] - - for wheel_name in self._get_candidates(link, package_name): - try: - wheel = Wheel(wheel_name) - except InvalidWheelFilename: - continue - if not wheel.supported(): - # Built for a different python/arch/etc - continue - candidates.append((wheel.support_index_min(), wheel_name)) - - if not candidates: - return link - - return self._link_for_candidate(link, min(candidates)[1]) - - -class EphemWheelCache(SimpleWheelCache): - """A SimpleWheelCache that creates it's own temporary cache directory - """ - - def __init__(self, format_control): - # type: (FormatControl) -> None - self._temp_dir = TempDirectory(kind="ephem-wheel-cache") - self._temp_dir.create() - - super(EphemWheelCache, self).__init__( - self._temp_dir.path, format_control - ) - - def cleanup(self): - # type: () -> None - self._temp_dir.cleanup() - - -class WheelCache(Cache): - """Wraps EphemWheelCache and SimpleWheelCache into a single Cache - - This Cache allows for gracefully degradation, using the ephem wheel cache - when a certain link is not found in the simple wheel cache first. - """ - - def __init__(self, cache_dir, format_control): - # type: (str, FormatControl) -> None - super(WheelCache, self).__init__( - cache_dir, format_control, {'binary'} - ) - self._wheel_cache = SimpleWheelCache(cache_dir, format_control) - self._ephem_cache = EphemWheelCache(format_control) - - def get_path_for_link(self, link): - # type: (Link) -> str - return self._wheel_cache.get_path_for_link(link) - - def get_ephem_path_for_link(self, link): - # type: (Link) -> str - return self._ephem_cache.get_path_for_link(link) - - def get(self, link, package_name): - # type: (Link, Optional[str]) -> Link - retval = self._wheel_cache.get(link, package_name) - if retval is link: - retval = self._ephem_cache.get(link, package_name) - return retval - - def cleanup(self): - # type: () -> None - self._wheel_cache.cleanup() - self._ephem_cache.cleanup() diff --git a/env/lib/python2.7/site-packages/pip/_internal/cache.pyc b/env/lib/python2.7/site-packages/pip/_internal/cache.pyc deleted file mode 100644 index b2dfc977..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/cache.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/cli/__init__.pyc b/env/lib/python2.7/site-packages/pip/_internal/cli/__init__.pyc deleted file mode 100644 index 7908cdf7..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/cli/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/cli/autocompletion.py b/env/lib/python2.7/site-packages/pip/_internal/cli/autocompletion.py deleted file mode 100644 index 0a04199e..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/cli/autocompletion.py +++ /dev/null @@ -1,152 +0,0 @@ -"""Logic that powers autocompletion installed by ``pip completion``. -""" - -import optparse -import os -import sys - -from pip._internal.cli.main_parser import create_main_parser -from pip._internal.commands import commands_dict, get_summaries -from pip._internal.utils.misc import get_installed_distributions - - -def autocomplete(): - """Entry Point for completion of main and subcommand options. - """ - # Don't complete if user hasn't sourced bash_completion file. - if 'PIP_AUTO_COMPLETE' not in os.environ: - return - cwords = os.environ['COMP_WORDS'].split()[1:] - cword = int(os.environ['COMP_CWORD']) - try: - current = cwords[cword - 1] - except IndexError: - current = '' - - subcommands = [cmd for cmd, summary in get_summaries()] - options = [] - # subcommand - try: - subcommand_name = [w for w in cwords if w in subcommands][0] - except IndexError: - subcommand_name = None - - parser = create_main_parser() - # subcommand options - if subcommand_name: - # special case: 'help' subcommand has no options - if subcommand_name == 'help': - sys.exit(1) - # special case: list locally installed dists for show and uninstall - should_list_installed = ( - subcommand_name in ['show', 'uninstall'] and - not current.startswith('-') - ) - if should_list_installed: - installed = [] - lc = current.lower() - for dist in get_installed_distributions(local_only=True): - if dist.key.startswith(lc) and dist.key not in cwords[1:]: - installed.append(dist.key) - # if there are no dists installed, fall back to option completion - if installed: - for dist in installed: - print(dist) - sys.exit(1) - - subcommand = commands_dict[subcommand_name]() - - for opt in subcommand.parser.option_list_all: - if opt.help != optparse.SUPPRESS_HELP: - for opt_str in opt._long_opts + opt._short_opts: - options.append((opt_str, opt.nargs)) - - # filter out previously specified options from available options - prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]] - options = [(x, v) for (x, v) in options if x not in prev_opts] - # filter options by current input - options = [(k, v) for k, v in options if k.startswith(current)] - # get completion type given cwords and available subcommand options - completion_type = get_path_completion_type( - cwords, cword, subcommand.parser.option_list_all, - ) - # get completion files and directories if ``completion_type`` is - # ````, ```` or ```` - if completion_type: - options = auto_complete_paths(current, completion_type) - options = ((opt, 0) for opt in options) - for option in options: - opt_label = option[0] - # append '=' to options which require args - if option[1] and option[0][:2] == "--": - opt_label += '=' - print(opt_label) - else: - # show main parser options only when necessary - - opts = [i.option_list for i in parser.option_groups] - opts.append(parser.option_list) - opts = (o for it in opts for o in it) - if current.startswith('-'): - for opt in opts: - if opt.help != optparse.SUPPRESS_HELP: - subcommands += opt._long_opts + opt._short_opts - else: - # get completion type given cwords and all available options - completion_type = get_path_completion_type(cwords, cword, opts) - if completion_type: - subcommands = auto_complete_paths(current, completion_type) - - print(' '.join([x for x in subcommands if x.startswith(current)])) - sys.exit(1) - - -def get_path_completion_type(cwords, cword, opts): - """Get the type of path completion (``file``, ``dir``, ``path`` or None) - - :param cwords: same as the environmental variable ``COMP_WORDS`` - :param cword: same as the environmental variable ``COMP_CWORD`` - :param opts: The available options to check - :return: path completion type (``file``, ``dir``, ``path`` or None) - """ - if cword < 2 or not cwords[cword - 2].startswith('-'): - return - for opt in opts: - if opt.help == optparse.SUPPRESS_HELP: - continue - for o in str(opt).split('/'): - if cwords[cword - 2].split('=')[0] == o: - if not opt.metavar or any( - x in ('path', 'file', 'dir') - for x in opt.metavar.split('/')): - return opt.metavar - - -def auto_complete_paths(current, completion_type): - """If ``completion_type`` is ``file`` or ``path``, list all regular files - and directories starting with ``current``; otherwise only list directories - starting with ``current``. - - :param current: The word to be completed - :param completion_type: path completion type(`file`, `path` or `dir`)i - :return: A generator of regular files and/or directories - """ - directory, filename = os.path.split(current) - current_path = os.path.abspath(directory) - # Don't complete paths if they can't be accessed - if not os.access(current_path, os.R_OK): - return - filename = os.path.normcase(filename) - # list all files that start with ``filename`` - file_list = (x for x in os.listdir(current_path) - if os.path.normcase(x).startswith(filename)) - for f in file_list: - opt = os.path.join(current_path, f) - comp_file = os.path.normcase(os.path.join(directory, f)) - # complete regular files when there is not ```` after option - # complete directories when there is ````, ```` or - # ````after option - if completion_type != 'dir' and os.path.isfile(opt): - yield comp_file - elif os.path.isdir(opt): - yield os.path.join(comp_file, '') diff --git a/env/lib/python2.7/site-packages/pip/_internal/cli/autocompletion.pyc b/env/lib/python2.7/site-packages/pip/_internal/cli/autocompletion.pyc deleted file mode 100644 index 0354c17d..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/cli/autocompletion.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/cli/base_command.py b/env/lib/python2.7/site-packages/pip/_internal/cli/base_command.py deleted file mode 100644 index 90830be4..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/cli/base_command.py +++ /dev/null @@ -1,346 +0,0 @@ -"""Base Command class, and related routines""" -from __future__ import absolute_import, print_function - -import logging -import logging.config -import optparse -import os -import platform -import sys -import traceback - -from pip._internal.cli import cmdoptions -from pip._internal.cli.cmdoptions import make_search_scope -from pip._internal.cli.parser import ( - ConfigOptionParser, UpdatingDefaultsHelpFormatter, -) -from pip._internal.cli.status_codes import ( - ERROR, PREVIOUS_BUILD_DIR_ERROR, SUCCESS, UNKNOWN_ERROR, - VIRTUALENV_NOT_FOUND, -) -from pip._internal.download import PipSession -from pip._internal.exceptions import ( - BadCommand, CommandError, InstallationError, PreviousBuildDirError, - UninstallationError, -) -from pip._internal.index import PackageFinder -from pip._internal.models.selection_prefs import SelectionPreferences -from pip._internal.models.target_python import TargetPython -from pip._internal.req.constructors import ( - install_req_from_editable, install_req_from_line, -) -from pip._internal.req.req_file import parse_requirements -from pip._internal.utils.deprecation import deprecated -from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging -from pip._internal.utils.misc import get_prog, normalize_path -from pip._internal.utils.outdated import pip_version_check -from pip._internal.utils.typing import MYPY_CHECK_RUNNING -from pip._internal.utils.virtualenv import running_under_virtualenv - -if MYPY_CHECK_RUNNING: - from typing import Optional, List, Tuple, Any - from optparse import Values - from pip._internal.cache import WheelCache - from pip._internal.req.req_set import RequirementSet - -__all__ = ['Command'] - -logger = logging.getLogger(__name__) - - -class Command(object): - name = None # type: Optional[str] - usage = None # type: Optional[str] - ignore_require_venv = False # type: bool - - def __init__(self, isolated=False): - # type: (bool) -> None - parser_kw = { - 'usage': self.usage, - 'prog': '%s %s' % (get_prog(), self.name), - 'formatter': UpdatingDefaultsHelpFormatter(), - 'add_help_option': False, - 'name': self.name, - 'description': self.__doc__, - 'isolated': isolated, - } - - self.parser = ConfigOptionParser(**parser_kw) - - # Commands should add options to this option group - optgroup_name = '%s Options' % self.name.capitalize() - self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name) - - # Add the general options - gen_opts = cmdoptions.make_option_group( - cmdoptions.general_group, - self.parser, - ) - self.parser.add_option_group(gen_opts) - - def run(self, options, args): - # type: (Values, List[Any]) -> Any - raise NotImplementedError - - @classmethod - def _get_index_urls(cls, options): - """Return a list of index urls from user-provided options.""" - index_urls = [] - if not getattr(options, "no_index", False): - url = getattr(options, "index_url", None) - if url: - index_urls.append(url) - urls = getattr(options, "extra_index_urls", None) - if urls: - index_urls.extend(urls) - # Return None rather than an empty list - return index_urls or None - - def _build_session(self, options, retries=None, timeout=None): - # type: (Values, Optional[int], Optional[int]) -> PipSession - session = PipSession( - cache=( - normalize_path(os.path.join(options.cache_dir, "http")) - if options.cache_dir else None - ), - retries=retries if retries is not None else options.retries, - insecure_hosts=options.trusted_hosts, - index_urls=self._get_index_urls(options), - ) - - # Handle custom ca-bundles from the user - if options.cert: - session.verify = options.cert - - # Handle SSL client certificate - if options.client_cert: - session.cert = options.client_cert - - # Handle timeouts - if options.timeout or timeout: - session.timeout = ( - timeout if timeout is not None else options.timeout - ) - - # Handle configured proxies - if options.proxy: - session.proxies = { - "http": options.proxy, - "https": options.proxy, - } - - # Determine if we can prompt the user for authentication or not - session.auth.prompting = not options.no_input - - return session - - def parse_args(self, args): - # type: (List[str]) -> Tuple - # factored out for testability - return self.parser.parse_args(args) - - def main(self, args): - # type: (List[str]) -> int - options, args = self.parse_args(args) - - # Set verbosity so that it can be used elsewhere. - self.verbosity = options.verbose - options.quiet - - level_number = setup_logging( - verbosity=self.verbosity, - no_color=options.no_color, - user_log_file=options.log, - ) - - if sys.version_info[:2] == (2, 7): - message = ( - "A future version of pip will drop support for Python 2.7. " - "More details about Python 2 support in pip, can be found at " - "https://pip.pypa.io/en/latest/development/release-process/#python-2-support" # noqa - ) - if platform.python_implementation() == "CPython": - message = ( - "Python 2.7 will reach the end of its life on January " - "1st, 2020. Please upgrade your Python as Python 2.7 " - "won't be maintained after that date. " - ) + message - deprecated(message, replacement=None, gone_in=None) - - # TODO: Try to get these passing down from the command? - # without resorting to os.environ to hold these. - # This also affects isolated builds and it should. - - if options.no_input: - os.environ['PIP_NO_INPUT'] = '1' - - if options.exists_action: - os.environ['PIP_EXISTS_ACTION'] = ' '.join(options.exists_action) - - if options.require_venv and not self.ignore_require_venv: - # If a venv is required check if it can really be found - if not running_under_virtualenv(): - logger.critical( - 'Could not find an activated virtualenv (required).' - ) - sys.exit(VIRTUALENV_NOT_FOUND) - - try: - status = self.run(options, args) - # FIXME: all commands should return an exit status - # and when it is done, isinstance is not needed anymore - if isinstance(status, int): - return status - except PreviousBuildDirError as exc: - logger.critical(str(exc)) - logger.debug('Exception information:', exc_info=True) - - return PREVIOUS_BUILD_DIR_ERROR - except (InstallationError, UninstallationError, BadCommand) as exc: - logger.critical(str(exc)) - logger.debug('Exception information:', exc_info=True) - - return ERROR - except CommandError as exc: - logger.critical('%s', exc) - logger.debug('Exception information:', exc_info=True) - - return ERROR - except BrokenStdoutLoggingError: - # Bypass our logger and write any remaining messages to stderr - # because stdout no longer works. - print('ERROR: Pipe to stdout was broken', file=sys.stderr) - if level_number <= logging.DEBUG: - traceback.print_exc(file=sys.stderr) - - return ERROR - except KeyboardInterrupt: - logger.critical('Operation cancelled by user') - logger.debug('Exception information:', exc_info=True) - - return ERROR - except BaseException: - logger.critical('Exception:', exc_info=True) - - return UNKNOWN_ERROR - finally: - allow_version_check = ( - # Does this command have the index_group options? - hasattr(options, "no_index") and - # Is this command allowed to perform this check? - not (options.disable_pip_version_check or options.no_index) - ) - # Check if we're using the latest version of pip available - if allow_version_check: - session = self._build_session( - options, - retries=0, - timeout=min(5, options.timeout) - ) - with session: - pip_version_check(session, options) - - # Shutdown the logging module - logging.shutdown() - - return SUCCESS - - -class RequirementCommand(Command): - - @staticmethod - def populate_requirement_set(requirement_set, # type: RequirementSet - args, # type: List[str] - options, # type: Values - finder, # type: PackageFinder - session, # type: PipSession - name, # type: str - wheel_cache # type: Optional[WheelCache] - ): - # type: (...) -> None - """ - Marshal cmd line args into a requirement set. - """ - # NOTE: As a side-effect, options.require_hashes and - # requirement_set.require_hashes may be updated - - for filename in options.constraints: - for req_to_add in parse_requirements( - filename, - constraint=True, finder=finder, options=options, - session=session, wheel_cache=wheel_cache): - req_to_add.is_direct = True - requirement_set.add_requirement(req_to_add) - - for req in args: - req_to_add = install_req_from_line( - req, None, isolated=options.isolated_mode, - use_pep517=options.use_pep517, - wheel_cache=wheel_cache - ) - req_to_add.is_direct = True - requirement_set.add_requirement(req_to_add) - - for req in options.editables: - req_to_add = install_req_from_editable( - req, - isolated=options.isolated_mode, - use_pep517=options.use_pep517, - wheel_cache=wheel_cache - ) - req_to_add.is_direct = True - requirement_set.add_requirement(req_to_add) - - for filename in options.requirements: - for req_to_add in parse_requirements( - filename, - finder=finder, options=options, session=session, - wheel_cache=wheel_cache, - use_pep517=options.use_pep517): - req_to_add.is_direct = True - requirement_set.add_requirement(req_to_add) - # If --require-hashes was a line in a requirements file, tell - # RequirementSet about it: - requirement_set.require_hashes = options.require_hashes - - if not (args or options.editables or options.requirements): - opts = {'name': name} - if options.find_links: - raise CommandError( - 'You must give at least one requirement to %(name)s ' - '(maybe you meant "pip %(name)s %(links)s"?)' % - dict(opts, links=' '.join(options.find_links))) - else: - raise CommandError( - 'You must give at least one requirement to %(name)s ' - '(see "pip help %(name)s")' % opts) - - def _build_package_finder( - self, - options, # type: Values - session, # type: PipSession - target_python=None, # type: Optional[TargetPython] - ignore_requires_python=None, # type: Optional[bool] - ): - # type: (...) -> PackageFinder - """ - Create a package finder appropriate to this requirement command. - - :param ignore_requires_python: Whether to ignore incompatible - "Requires-Python" values in links. Defaults to False. - """ - search_scope = make_search_scope(options) - selection_prefs = SelectionPreferences( - allow_yanked=True, - format_control=options.format_control, - allow_all_prereleases=options.pre, - prefer_binary=options.prefer_binary, - ignore_requires_python=ignore_requires_python, - ) - - return PackageFinder.create( - search_scope=search_scope, - selection_prefs=selection_prefs, - trusted_hosts=options.trusted_hosts, - session=session, - target_python=target_python, - ) diff --git a/env/lib/python2.7/site-packages/pip/_internal/cli/base_command.pyc b/env/lib/python2.7/site-packages/pip/_internal/cli/base_command.pyc deleted file mode 100644 index c56b3748..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/cli/base_command.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/cli/cmdoptions.py b/env/lib/python2.7/site-packages/pip/_internal/cli/cmdoptions.py deleted file mode 100644 index c5c6c22d..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/cli/cmdoptions.py +++ /dev/null @@ -1,931 +0,0 @@ -""" -shared options and groups - -The principle here is to define options once, but *not* instantiate them -globally. One reason being that options with action='append' can carry state -between parses. pip parses general options twice internally, and shouldn't -pass on state. To be consistent, all options will follow this design. - -""" -from __future__ import absolute_import - -import logging -import textwrap -import warnings -from distutils.util import strtobool -from functools import partial -from optparse import SUPPRESS_HELP, Option, OptionGroup -from textwrap import dedent - -from pip._internal.exceptions import CommandError -from pip._internal.locations import USER_CACHE_DIR, get_src_prefix -from pip._internal.models.format_control import FormatControl -from pip._internal.models.index import PyPI -from pip._internal.models.search_scope import SearchScope -from pip._internal.models.target_python import TargetPython -from pip._internal.utils.hashes import STRONG_HASHES -from pip._internal.utils.misc import redact_password_from_url -from pip._internal.utils.typing import MYPY_CHECK_RUNNING -from pip._internal.utils.ui import BAR_TYPES - -if MYPY_CHECK_RUNNING: - from typing import Any, Callable, Dict, Optional, Tuple - from optparse import OptionParser, Values - from pip._internal.cli.parser import ConfigOptionParser - -logger = logging.getLogger(__name__) - - -def raise_option_error(parser, option, msg): - """ - Raise an option parsing error using parser.error(). - - Args: - parser: an OptionParser instance. - option: an Option instance. - msg: the error text. - """ - msg = '{} error: {}'.format(option, msg) - msg = textwrap.fill(' '.join(msg.split())) - parser.error(msg) - - -def make_option_group(group, parser): - # type: (Dict[str, Any], ConfigOptionParser) -> OptionGroup - """ - Return an OptionGroup object - group -- assumed to be dict with 'name' and 'options' keys - parser -- an optparse Parser - """ - option_group = OptionGroup(parser, group['name']) - for option in group['options']: - option_group.add_option(option()) - return option_group - - -def check_install_build_global(options, check_options=None): - # type: (Values, Optional[Values]) -> None - """Disable wheels if per-setup.py call options are set. - - :param options: The OptionParser options to update. - :param check_options: The options to check, if not supplied defaults to - options. - """ - if check_options is None: - check_options = options - - def getname(n): - return getattr(check_options, n, None) - names = ["build_options", "global_options", "install_options"] - if any(map(getname, names)): - control = options.format_control - control.disallow_binaries() - warnings.warn( - 'Disabling all use of wheels due to the use of --build-options ' - '/ --global-options / --install-options.', stacklevel=2, - ) - - -def check_dist_restriction(options, check_target=False): - # type: (Values, bool) -> None - """Function for determining if custom platform options are allowed. - - :param options: The OptionParser options. - :param check_target: Whether or not to check if --target is being used. - """ - dist_restriction_set = any([ - options.python_version, - options.platform, - options.abi, - options.implementation, - ]) - - binary_only = FormatControl(set(), {':all:'}) - sdist_dependencies_allowed = ( - options.format_control != binary_only and - not options.ignore_dependencies - ) - - # Installations or downloads using dist restrictions must not combine - # source distributions and dist-specific wheels, as they are not - # guaranteed to be locally compatible. - if dist_restriction_set and sdist_dependencies_allowed: - raise CommandError( - "When restricting platform and interpreter constraints using " - "--python-version, --platform, --abi, or --implementation, " - "either --no-deps must be set, or --only-binary=:all: must be " - "set and --no-binary must not be set (or must be set to " - ":none:)." - ) - - if check_target: - if dist_restriction_set and not options.target_dir: - raise CommandError( - "Can not use any platform or abi specific options unless " - "installing via '--target'" - ) - - -########### -# options # -########### - -help_ = partial( - Option, - '-h', '--help', - dest='help', - action='help', - help='Show help.', -) # type: Callable[..., Option] - -isolated_mode = partial( - Option, - "--isolated", - dest="isolated_mode", - action="store_true", - default=False, - help=( - "Run pip in an isolated mode, ignoring environment variables and user " - "configuration." - ), -) # type: Callable[..., Option] - -require_virtualenv = partial( - Option, - # Run only if inside a virtualenv, bail if not. - '--require-virtualenv', '--require-venv', - dest='require_venv', - action='store_true', - default=False, - help=SUPPRESS_HELP -) # type: Callable[..., Option] - -verbose = partial( - Option, - '-v', '--verbose', - dest='verbose', - action='count', - default=0, - help='Give more output. Option is additive, and can be used up to 3 times.' -) # type: Callable[..., Option] - -no_color = partial( - Option, - '--no-color', - dest='no_color', - action='store_true', - default=False, - help="Suppress colored output", -) # type: Callable[..., Option] - -version = partial( - Option, - '-V', '--version', - dest='version', - action='store_true', - help='Show version and exit.', -) # type: Callable[..., Option] - -quiet = partial( - Option, - '-q', '--quiet', - dest='quiet', - action='count', - default=0, - help=( - 'Give less output. Option is additive, and can be used up to 3' - ' times (corresponding to WARNING, ERROR, and CRITICAL logging' - ' levels).' - ), -) # type: Callable[..., Option] - -progress_bar = partial( - Option, - '--progress-bar', - dest='progress_bar', - type='choice', - choices=list(BAR_TYPES.keys()), - default='on', - help=( - 'Specify type of progress to be displayed [' + - '|'.join(BAR_TYPES.keys()) + '] (default: %default)' - ), -) # type: Callable[..., Option] - -log = partial( - Option, - "--log", "--log-file", "--local-log", - dest="log", - metavar="path", - help="Path to a verbose appending log." -) # type: Callable[..., Option] - -no_input = partial( - Option, - # Don't ask for input - '--no-input', - dest='no_input', - action='store_true', - default=False, - help=SUPPRESS_HELP -) # type: Callable[..., Option] - -proxy = partial( - Option, - '--proxy', - dest='proxy', - type='str', - default='', - help="Specify a proxy in the form [user:passwd@]proxy.server:port." -) # type: Callable[..., Option] - -retries = partial( - Option, - '--retries', - dest='retries', - type='int', - default=5, - help="Maximum number of retries each connection should attempt " - "(default %default times).", -) # type: Callable[..., Option] - -timeout = partial( - Option, - '--timeout', '--default-timeout', - metavar='sec', - dest='timeout', - type='float', - default=15, - help='Set the socket timeout (default %default seconds).', -) # type: Callable[..., Option] - -skip_requirements_regex = partial( - Option, - # A regex to be used to skip requirements - '--skip-requirements-regex', - dest='skip_requirements_regex', - type='str', - default='', - help=SUPPRESS_HELP, -) # type: Callable[..., Option] - - -def exists_action(): - # type: () -> Option - return Option( - # Option when path already exist - '--exists-action', - dest='exists_action', - type='choice', - choices=['s', 'i', 'w', 'b', 'a'], - default=[], - action='append', - metavar='action', - help="Default action when a path already exists: " - "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.", - ) - - -cert = partial( - Option, - '--cert', - dest='cert', - type='str', - metavar='path', - help="Path to alternate CA bundle.", -) # type: Callable[..., Option] - -client_cert = partial( - Option, - '--client-cert', - dest='client_cert', - type='str', - default=None, - metavar='path', - help="Path to SSL client certificate, a single file containing the " - "private key and the certificate in PEM format.", -) # type: Callable[..., Option] - -index_url = partial( - Option, - '-i', '--index-url', '--pypi-url', - dest='index_url', - metavar='URL', - default=PyPI.simple_url, - help="Base URL of the Python Package Index (default %default). " - "This should point to a repository compliant with PEP 503 " - "(the simple repository API) or a local directory laid out " - "in the same format.", -) # type: Callable[..., Option] - - -def extra_index_url(): - return Option( - '--extra-index-url', - dest='extra_index_urls', - metavar='URL', - action='append', - default=[], - help="Extra URLs of package indexes to use in addition to " - "--index-url. Should follow the same rules as " - "--index-url.", - ) - - -no_index = partial( - Option, - '--no-index', - dest='no_index', - action='store_true', - default=False, - help='Ignore package index (only looking at --find-links URLs instead).', -) # type: Callable[..., Option] - - -def find_links(): - # type: () -> Option - return Option( - '-f', '--find-links', - dest='find_links', - action='append', - default=[], - metavar='url', - help="If a url or path to an html file, then parse for links to " - "archives. If a local path or file:// url that's a directory, " - "then look for archives in the directory listing.", - ) - - -def make_search_scope(options, suppress_no_index=False): - # type: (Values, bool) -> SearchScope - """ - :param suppress_no_index: Whether to ignore the --no-index option - when constructing the SearchScope object. - """ - index_urls = [options.index_url] + options.extra_index_urls - if options.no_index and not suppress_no_index: - logger.debug( - 'Ignoring indexes: %s', - ','.join(redact_password_from_url(url) for url in index_urls), - ) - index_urls = [] - - # Make sure find_links is a list before passing to create(). - find_links = options.find_links or [] - - search_scope = SearchScope.create( - find_links=find_links, index_urls=index_urls, - ) - - return search_scope - - -def trusted_host(): - # type: () -> Option - return Option( - "--trusted-host", - dest="trusted_hosts", - action="append", - metavar="HOSTNAME", - default=[], - help="Mark this host as trusted, even though it does not have valid " - "or any HTTPS.", - ) - - -def constraints(): - # type: () -> Option - return Option( - '-c', '--constraint', - dest='constraints', - action='append', - default=[], - metavar='file', - help='Constrain versions using the given constraints file. ' - 'This option can be used multiple times.' - ) - - -def requirements(): - # type: () -> Option - return Option( - '-r', '--requirement', - dest='requirements', - action='append', - default=[], - metavar='file', - help='Install from the given requirements file. ' - 'This option can be used multiple times.' - ) - - -def editable(): - # type: () -> Option - return Option( - '-e', '--editable', - dest='editables', - action='append', - default=[], - metavar='path/url', - help=('Install a project in editable mode (i.e. setuptools ' - '"develop mode") from a local project path or a VCS url.'), - ) - - -src = partial( - Option, - '--src', '--source', '--source-dir', '--source-directory', - dest='src_dir', - metavar='dir', - default=get_src_prefix(), - help='Directory to check out editable projects into. ' - 'The default in a virtualenv is "/src". ' - 'The default for global installs is "/src".' -) # type: Callable[..., Option] - - -def _get_format_control(values, option): - # type: (Values, Option) -> Any - """Get a format_control object.""" - return getattr(values, option.dest) - - -def _handle_no_binary(option, opt_str, value, parser): - # type: (Option, str, str, OptionParser) -> None - existing = _get_format_control(parser.values, option) - FormatControl.handle_mutual_excludes( - value, existing.no_binary, existing.only_binary, - ) - - -def _handle_only_binary(option, opt_str, value, parser): - # type: (Option, str, str, OptionParser) -> None - existing = _get_format_control(parser.values, option) - FormatControl.handle_mutual_excludes( - value, existing.only_binary, existing.no_binary, - ) - - -def no_binary(): - # type: () -> Option - format_control = FormatControl(set(), set()) - return Option( - "--no-binary", dest="format_control", action="callback", - callback=_handle_no_binary, type="str", - default=format_control, - help="Do not use binary packages. Can be supplied multiple times, and " - "each time adds to the existing value. Accepts either :all: to " - "disable all binary packages, :none: to empty the set, or one or " - "more package names with commas between them. Note that some " - "packages are tricky to compile and may fail to install when " - "this option is used on them.", - ) - - -def only_binary(): - # type: () -> Option - format_control = FormatControl(set(), set()) - return Option( - "--only-binary", dest="format_control", action="callback", - callback=_handle_only_binary, type="str", - default=format_control, - help="Do not use source packages. Can be supplied multiple times, and " - "each time adds to the existing value. Accepts either :all: to " - "disable all source packages, :none: to empty the set, or one or " - "more package names with commas between them. Packages without " - "binary distributions will fail to install when this option is " - "used on them.", - ) - - -platform = partial( - Option, - '--platform', - dest='platform', - metavar='platform', - default=None, - help=("Only use wheels compatible with . " - "Defaults to the platform of the running system."), -) # type: Callable[..., Option] - - -# This was made a separate function for unit-testing purposes. -def _convert_python_version(value): - # type: (str) -> Tuple[Tuple[int, ...], Optional[str]] - """ - Convert a version string like "3", "37", or "3.7.3" into a tuple of ints. - - :return: A 2-tuple (version_info, error_msg), where `error_msg` is - non-None if and only if there was a parsing error. - """ - if not value: - # The empty string is the same as not providing a value. - return (None, None) - - parts = value.split('.') - if len(parts) > 3: - return ((), 'at most three version parts are allowed') - - if len(parts) == 1: - # Then we are in the case of "3" or "37". - value = parts[0] - if len(value) > 1: - parts = [value[0], value[1:]] - - try: - version_info = tuple(int(part) for part in parts) - except ValueError: - return ((), 'each version part must be an integer') - - return (version_info, None) - - -def _handle_python_version(option, opt_str, value, parser): - # type: (Option, str, str, OptionParser) -> None - """ - Handle a provided --python-version value. - """ - version_info, error_msg = _convert_python_version(value) - if error_msg is not None: - msg = ( - 'invalid --python-version value: {!r}: {}'.format( - value, error_msg, - ) - ) - raise_option_error(parser, option=option, msg=msg) - - parser.values.python_version = version_info - - -python_version = partial( - Option, - '--python-version', - dest='python_version', - metavar='python_version', - action='callback', - callback=_handle_python_version, type='str', - default=None, - help=dedent("""\ - The Python interpreter version to use for wheel and "Requires-Python" - compatibility checks. Defaults to a version derived from the running - interpreter. The version can be specified using up to three dot-separated - integers (e.g. "3" for 3.0.0, "3.7" for 3.7.0, or "3.7.3"). A major-minor - version can also be given as a string without dots (e.g. "37" for 3.7.0). - """), -) # type: Callable[..., Option] - - -implementation = partial( - Option, - '--implementation', - dest='implementation', - metavar='implementation', - default=None, - help=("Only use wheels compatible with Python " - "implementation , e.g. 'pp', 'jy', 'cp', " - " or 'ip'. If not specified, then the current " - "interpreter implementation is used. Use 'py' to force " - "implementation-agnostic wheels."), -) # type: Callable[..., Option] - - -abi = partial( - Option, - '--abi', - dest='abi', - metavar='abi', - default=None, - help=("Only use wheels compatible with Python " - "abi , e.g. 'pypy_41'. If not specified, then the " - "current interpreter abi tag is used. Generally " - "you will need to specify --implementation, " - "--platform, and --python-version when using " - "this option."), -) # type: Callable[..., Option] - - -def add_target_python_options(cmd_opts): - # type: (OptionGroup) -> None - cmd_opts.add_option(platform()) - cmd_opts.add_option(python_version()) - cmd_opts.add_option(implementation()) - cmd_opts.add_option(abi()) - - -def make_target_python(options): - # type: (Values) -> TargetPython - target_python = TargetPython( - platform=options.platform, - py_version_info=options.python_version, - abi=options.abi, - implementation=options.implementation, - ) - - return target_python - - -def prefer_binary(): - # type: () -> Option - return Option( - "--prefer-binary", - dest="prefer_binary", - action="store_true", - default=False, - help="Prefer older binary packages over newer source packages." - ) - - -cache_dir = partial( - Option, - "--cache-dir", - dest="cache_dir", - default=USER_CACHE_DIR, - metavar="dir", - help="Store the cache data in ." -) # type: Callable[..., Option] - - -def _handle_no_cache_dir(option, opt, value, parser): - # type: (Option, str, str, OptionParser) -> None - """ - Process a value provided for the --no-cache-dir option. - - This is an optparse.Option callback for the --no-cache-dir option. - """ - # The value argument will be None if --no-cache-dir is passed via the - # command-line, since the option doesn't accept arguments. However, - # the value can be non-None if the option is triggered e.g. by an - # environment variable, like PIP_NO_CACHE_DIR=true. - if value is not None: - # Then parse the string value to get argument error-checking. - try: - strtobool(value) - except ValueError as exc: - raise_option_error(parser, option=option, msg=str(exc)) - - # Originally, setting PIP_NO_CACHE_DIR to a value that strtobool() - # converted to 0 (like "false" or "no") caused cache_dir to be disabled - # rather than enabled (logic would say the latter). Thus, we disable - # the cache directory not just on values that parse to True, but (for - # backwards compatibility reasons) also on values that parse to False. - # In other words, always set it to False if the option is provided in - # some (valid) form. - parser.values.cache_dir = False - - -no_cache = partial( - Option, - "--no-cache-dir", - dest="cache_dir", - action="callback", - callback=_handle_no_cache_dir, - help="Disable the cache.", -) # type: Callable[..., Option] - -no_deps = partial( - Option, - '--no-deps', '--no-dependencies', - dest='ignore_dependencies', - action='store_true', - default=False, - help="Don't install package dependencies.", -) # type: Callable[..., Option] - -build_dir = partial( - Option, - '-b', '--build', '--build-dir', '--build-directory', - dest='build_dir', - metavar='dir', - help='Directory to unpack packages into and build in. Note that ' - 'an initial build still takes place in a temporary directory. ' - 'The location of temporary directories can be controlled by setting ' - 'the TMPDIR environment variable (TEMP on Windows) appropriately. ' - 'When passed, build directories are not cleaned in case of failures.' -) # type: Callable[..., Option] - -ignore_requires_python = partial( - Option, - '--ignore-requires-python', - dest='ignore_requires_python', - action='store_true', - help='Ignore the Requires-Python information.' -) # type: Callable[..., Option] - -no_build_isolation = partial( - Option, - '--no-build-isolation', - dest='build_isolation', - action='store_false', - default=True, - help='Disable isolation when building a modern source distribution. ' - 'Build dependencies specified by PEP 518 must be already installed ' - 'if this option is used.' -) # type: Callable[..., Option] - - -def _handle_no_use_pep517(option, opt, value, parser): - # type: (Option, str, str, OptionParser) -> None - """ - Process a value provided for the --no-use-pep517 option. - - This is an optparse.Option callback for the no_use_pep517 option. - """ - # Since --no-use-pep517 doesn't accept arguments, the value argument - # will be None if --no-use-pep517 is passed via the command-line. - # However, the value can be non-None if the option is triggered e.g. - # by an environment variable, for example "PIP_NO_USE_PEP517=true". - if value is not None: - msg = """A value was passed for --no-use-pep517, - probably using either the PIP_NO_USE_PEP517 environment variable - or the "no-use-pep517" config file option. Use an appropriate value - of the PIP_USE_PEP517 environment variable or the "use-pep517" - config file option instead. - """ - raise_option_error(parser, option=option, msg=msg) - - # Otherwise, --no-use-pep517 was passed via the command-line. - parser.values.use_pep517 = False - - -use_pep517 = partial( - Option, - '--use-pep517', - dest='use_pep517', - action='store_true', - default=None, - help='Use PEP 517 for building source distributions ' - '(use --no-use-pep517 to force legacy behaviour).' -) # type: Any - -no_use_pep517 = partial( - Option, - '--no-use-pep517', - dest='use_pep517', - action='callback', - callback=_handle_no_use_pep517, - default=None, - help=SUPPRESS_HELP -) # type: Any - -install_options = partial( - Option, - '--install-option', - dest='install_options', - action='append', - metavar='options', - help="Extra arguments to be supplied to the setup.py install " - "command (use like --install-option=\"--install-scripts=/usr/local/" - "bin\"). Use multiple --install-option options to pass multiple " - "options to setup.py install. If you are using an option with a " - "directory path, be sure to use absolute path.", -) # type: Callable[..., Option] - -global_options = partial( - Option, - '--global-option', - dest='global_options', - action='append', - metavar='options', - help="Extra global options to be supplied to the setup.py " - "call before the install command.", -) # type: Callable[..., Option] - -no_clean = partial( - Option, - '--no-clean', - action='store_true', - default=False, - help="Don't clean up build directories." -) # type: Callable[..., Option] - -pre = partial( - Option, - '--pre', - action='store_true', - default=False, - help="Include pre-release and development versions. By default, " - "pip only finds stable versions.", -) # type: Callable[..., Option] - -disable_pip_version_check = partial( - Option, - "--disable-pip-version-check", - dest="disable_pip_version_check", - action="store_true", - default=False, - help="Don't periodically check PyPI to determine whether a new version " - "of pip is available for download. Implied with --no-index.", -) # type: Callable[..., Option] - - -# Deprecated, Remove later -always_unzip = partial( - Option, - '-Z', '--always-unzip', - dest='always_unzip', - action='store_true', - help=SUPPRESS_HELP, -) # type: Callable[..., Option] - - -def _handle_merge_hash(option, opt_str, value, parser): - # type: (Option, str, str, OptionParser) -> None - """Given a value spelled "algo:digest", append the digest to a list - pointed to in a dict by the algo name.""" - if not parser.values.hashes: - parser.values.hashes = {} - try: - algo, digest = value.split(':', 1) - except ValueError: - parser.error('Arguments to %s must be a hash name ' - 'followed by a value, like --hash=sha256:abcde...' % - opt_str) - if algo not in STRONG_HASHES: - parser.error('Allowed hash algorithms for %s are %s.' % - (opt_str, ', '.join(STRONG_HASHES))) - parser.values.hashes.setdefault(algo, []).append(digest) - - -hash = partial( - Option, - '--hash', - # Hash values eventually end up in InstallRequirement.hashes due to - # __dict__ copying in process_line(). - dest='hashes', - action='callback', - callback=_handle_merge_hash, - type='string', - help="Verify that the package's archive matches this " - 'hash before installing. Example: --hash=sha256:abcdef...', -) # type: Callable[..., Option] - - -require_hashes = partial( - Option, - '--require-hashes', - dest='require_hashes', - action='store_true', - default=False, - help='Require a hash to check each requirement against, for ' - 'repeatable installs. This option is implied when any package in a ' - 'requirements file has a --hash option.', -) # type: Callable[..., Option] - - -list_path = partial( - Option, - '--path', - dest='path', - action='append', - help='Restrict to the specified installation path for listing ' - 'packages (can be used multiple times).' -) # type: Callable[..., Option] - - -def check_list_path_option(options): - # type: (Values) -> None - if options.path and (options.user or options.local): - raise CommandError( - "Cannot combine '--path' with '--user' or '--local'" - ) - - -########## -# groups # -########## - -general_group = { - 'name': 'General Options', - 'options': [ - help_, - isolated_mode, - require_virtualenv, - verbose, - version, - quiet, - log, - no_input, - proxy, - retries, - timeout, - skip_requirements_regex, - exists_action, - trusted_host, - cert, - client_cert, - cache_dir, - no_cache, - disable_pip_version_check, - no_color, - ] -} # type: Dict[str, Any] - -index_group = { - 'name': 'Package Index Options', - 'options': [ - index_url, - extra_index_url, - no_index, - find_links, - ] -} # type: Dict[str, Any] diff --git a/env/lib/python2.7/site-packages/pip/_internal/cli/cmdoptions.pyc b/env/lib/python2.7/site-packages/pip/_internal/cli/cmdoptions.pyc deleted file mode 100644 index 4a336d60..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/cli/cmdoptions.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/cli/main_parser.py b/env/lib/python2.7/site-packages/pip/_internal/cli/main_parser.py deleted file mode 100644 index 6d0b719a..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/cli/main_parser.py +++ /dev/null @@ -1,98 +0,0 @@ -"""A single place for constructing and exposing the main parser -""" - -import os -import sys - -from pip._internal.cli import cmdoptions -from pip._internal.cli.parser import ( - ConfigOptionParser, UpdatingDefaultsHelpFormatter, -) -from pip._internal.commands import ( - commands_dict, get_similar_commands, get_summaries, -) -from pip._internal.exceptions import CommandError -from pip._internal.utils.misc import get_pip_version, get_prog -from pip._internal.utils.typing import MYPY_CHECK_RUNNING - -if MYPY_CHECK_RUNNING: - from typing import Tuple, List - - -__all__ = ["create_main_parser", "parse_command"] - - -def create_main_parser(): - # type: () -> ConfigOptionParser - """Creates and returns the main parser for pip's CLI - """ - - parser_kw = { - 'usage': '\n%prog [options]', - 'add_help_option': False, - 'formatter': UpdatingDefaultsHelpFormatter(), - 'name': 'global', - 'prog': get_prog(), - } - - parser = ConfigOptionParser(**parser_kw) - parser.disable_interspersed_args() - - parser.version = get_pip_version() - - # add the general options - gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser) - parser.add_option_group(gen_opts) - - # so the help formatter knows - parser.main = True # type: ignore - - # create command listing for description - command_summaries = get_summaries() - description = [''] + ['%-27s %s' % (i, j) for i, j in command_summaries] - parser.description = '\n'.join(description) - - return parser - - -def parse_command(args): - # type: (List[str]) -> Tuple[str, List[str]] - parser = create_main_parser() - - # Note: parser calls disable_interspersed_args(), so the result of this - # call is to split the initial args into the general options before the - # subcommand and everything else. - # For example: - # args: ['--timeout=5', 'install', '--user', 'INITools'] - # general_options: ['--timeout==5'] - # args_else: ['install', '--user', 'INITools'] - general_options, args_else = parser.parse_args(args) - - # --version - if general_options.version: - sys.stdout.write(parser.version) # type: ignore - sys.stdout.write(os.linesep) - sys.exit() - - # pip || pip help -> print_help() - if not args_else or (args_else[0] == 'help' and len(args_else) == 1): - parser.print_help() - sys.exit() - - # the subcommand name - cmd_name = args_else[0] - - if cmd_name not in commands_dict: - guess = get_similar_commands(cmd_name) - - msg = ['unknown command "%s"' % cmd_name] - if guess: - msg.append('maybe you meant "%s"' % guess) - - raise CommandError(' - '.join(msg)) - - # all the args without the subcommand - cmd_args = args[:] - cmd_args.remove(cmd_name) - - return cmd_name, cmd_args diff --git a/env/lib/python2.7/site-packages/pip/_internal/cli/main_parser.pyc b/env/lib/python2.7/site-packages/pip/_internal/cli/main_parser.pyc deleted file mode 100644 index bf875d7e..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/cli/main_parser.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/cli/parser.py b/env/lib/python2.7/site-packages/pip/_internal/cli/parser.py deleted file mode 100644 index e1eaac42..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/cli/parser.py +++ /dev/null @@ -1,261 +0,0 @@ -"""Base option parser setup""" -from __future__ import absolute_import - -import logging -import optparse -import sys -import textwrap -from distutils.util import strtobool - -from pip._vendor.six import string_types - -from pip._internal.cli.status_codes import UNKNOWN_ERROR -from pip._internal.configuration import Configuration, ConfigurationError -from pip._internal.utils.compat import get_terminal_size - -logger = logging.getLogger(__name__) - - -class PrettyHelpFormatter(optparse.IndentedHelpFormatter): - """A prettier/less verbose help formatter for optparse.""" - - def __init__(self, *args, **kwargs): - # help position must be aligned with __init__.parseopts.description - kwargs['max_help_position'] = 30 - kwargs['indent_increment'] = 1 - kwargs['width'] = get_terminal_size()[0] - 2 - optparse.IndentedHelpFormatter.__init__(self, *args, **kwargs) - - def format_option_strings(self, option): - return self._format_option_strings(option, ' <%s>', ', ') - - def _format_option_strings(self, option, mvarfmt=' <%s>', optsep=', '): - """ - Return a comma-separated list of option strings and metavars. - - :param option: tuple of (short opt, long opt), e.g: ('-f', '--format') - :param mvarfmt: metavar format string - evaluated as mvarfmt % metavar - :param optsep: separator - """ - opts = [] - - if option._short_opts: - opts.append(option._short_opts[0]) - if option._long_opts: - opts.append(option._long_opts[0]) - if len(opts) > 1: - opts.insert(1, optsep) - - if option.takes_value(): - metavar = option.metavar or option.dest.lower() - opts.append(mvarfmt % metavar.lower()) - - return ''.join(opts) - - def format_heading(self, heading): - if heading == 'Options': - return '' - return heading + ':\n' - - def format_usage(self, usage): - """ - Ensure there is only one newline between usage and the first heading - if there is no description. - """ - msg = '\nUsage: %s\n' % self.indent_lines(textwrap.dedent(usage), " ") - return msg - - def format_description(self, description): - # leave full control over description to us - if description: - if hasattr(self.parser, 'main'): - label = 'Commands' - else: - label = 'Description' - # some doc strings have initial newlines, some don't - description = description.lstrip('\n') - # some doc strings have final newlines and spaces, some don't - description = description.rstrip() - # dedent, then reindent - description = self.indent_lines(textwrap.dedent(description), " ") - description = '%s:\n%s\n' % (label, description) - return description - else: - return '' - - def format_epilog(self, epilog): - # leave full control over epilog to us - if epilog: - return epilog - else: - return '' - - def indent_lines(self, text, indent): - new_lines = [indent + line for line in text.split('\n')] - return "\n".join(new_lines) - - -class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter): - """Custom help formatter for use in ConfigOptionParser. - - This is updates the defaults before expanding them, allowing - them to show up correctly in the help listing. - """ - - def expand_default(self, option): - if self.parser is not None: - self.parser._update_defaults(self.parser.defaults) - return optparse.IndentedHelpFormatter.expand_default(self, option) - - -class CustomOptionParser(optparse.OptionParser): - - def insert_option_group(self, idx, *args, **kwargs): - """Insert an OptionGroup at a given position.""" - group = self.add_option_group(*args, **kwargs) - - self.option_groups.pop() - self.option_groups.insert(idx, group) - - return group - - @property - def option_list_all(self): - """Get a list of all options, including those in option groups.""" - res = self.option_list[:] - for i in self.option_groups: - res.extend(i.option_list) - - return res - - -class ConfigOptionParser(CustomOptionParser): - """Custom option parser which updates its defaults by checking the - configuration files and environmental variables""" - - def __init__(self, *args, **kwargs): - self.name = kwargs.pop('name') - - isolated = kwargs.pop("isolated", False) - self.config = Configuration(isolated) - - assert self.name - optparse.OptionParser.__init__(self, *args, **kwargs) - - def check_default(self, option, key, val): - try: - return option.check_value(key, val) - except optparse.OptionValueError as exc: - print("An error occurred during configuration: %s" % exc) - sys.exit(3) - - def _get_ordered_configuration_items(self): - # Configuration gives keys in an unordered manner. Order them. - override_order = ["global", self.name, ":env:"] - - # Pool the options into different groups - section_items = {name: [] for name in override_order} - for section_key, val in self.config.items(): - # ignore empty values - if not val: - logger.debug( - "Ignoring configuration key '%s' as it's value is empty.", - section_key - ) - continue - - section, key = section_key.split(".", 1) - if section in override_order: - section_items[section].append((key, val)) - - # Yield each group in their override order - for section in override_order: - for key, val in section_items[section]: - yield key, val - - def _update_defaults(self, defaults): - """Updates the given defaults with values from the config files and - the environ. Does a little special handling for certain types of - options (lists).""" - - # Accumulate complex default state. - self.values = optparse.Values(self.defaults) - late_eval = set() - # Then set the options with those values - for key, val in self._get_ordered_configuration_items(): - # '--' because configuration supports only long names - option = self.get_option('--' + key) - - # Ignore options not present in this parser. E.g. non-globals put - # in [global] by users that want them to apply to all applicable - # commands. - if option is None: - continue - - if option.action in ('store_true', 'store_false', 'count'): - try: - val = strtobool(val) - except ValueError: - error_msg = invalid_config_error_message( - option.action, key, val - ) - self.error(error_msg) - - elif option.action == 'append': - val = val.split() - val = [self.check_default(option, key, v) for v in val] - elif option.action == 'callback': - late_eval.add(option.dest) - opt_str = option.get_opt_string() - val = option.convert_value(opt_str, val) - # From take_action - args = option.callback_args or () - kwargs = option.callback_kwargs or {} - option.callback(option, opt_str, val, self, *args, **kwargs) - else: - val = self.check_default(option, key, val) - - defaults[option.dest] = val - - for key in late_eval: - defaults[key] = getattr(self.values, key) - self.values = None - return defaults - - def get_default_values(self): - """Overriding to make updating the defaults after instantiation of - the option parser possible, _update_defaults() does the dirty work.""" - if not self.process_default_values: - # Old, pre-Optik 1.5 behaviour. - return optparse.Values(self.defaults) - - # Load the configuration, or error out in case of an error - try: - self.config.load() - except ConfigurationError as err: - self.exit(UNKNOWN_ERROR, str(err)) - - defaults = self._update_defaults(self.defaults.copy()) # ours - for option in self._get_all_options(): - default = defaults.get(option.dest) - if isinstance(default, string_types): - opt_str = option.get_opt_string() - defaults[option.dest] = option.check_value(opt_str, default) - return optparse.Values(defaults) - - def error(self, msg): - self.print_usage(sys.stderr) - self.exit(UNKNOWN_ERROR, "%s\n" % msg) - - -def invalid_config_error_message(action, key, val): - """Returns a better error message when invalid configuration option - is provided.""" - if action in ('store_true', 'store_false'): - return ("{0} is not a valid value for {1} option, " - "please specify a boolean value like yes/no, " - "true/false or 1/0 instead.").format(val, key) - - return ("{0} is not a valid value for {1} option, " - "please specify a numerical value like 1/0 " - "instead.").format(val, key) diff --git a/env/lib/python2.7/site-packages/pip/_internal/cli/parser.pyc b/env/lib/python2.7/site-packages/pip/_internal/cli/parser.pyc deleted file mode 100644 index d8322832..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/cli/parser.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/cli/status_codes.py b/env/lib/python2.7/site-packages/pip/_internal/cli/status_codes.py deleted file mode 100644 index 275360a3..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/cli/status_codes.py +++ /dev/null @@ -1,8 +0,0 @@ -from __future__ import absolute_import - -SUCCESS = 0 -ERROR = 1 -UNKNOWN_ERROR = 2 -VIRTUALENV_NOT_FOUND = 3 -PREVIOUS_BUILD_DIR_ERROR = 4 -NO_MATCHES_FOUND = 23 diff --git a/env/lib/python2.7/site-packages/pip/_internal/cli/status_codes.pyc b/env/lib/python2.7/site-packages/pip/_internal/cli/status_codes.pyc deleted file mode 100644 index 607f1295..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/cli/status_codes.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/commands/__init__.py b/env/lib/python2.7/site-packages/pip/_internal/commands/__init__.py deleted file mode 100644 index 9e0ab86b..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/commands/__init__.py +++ /dev/null @@ -1,81 +0,0 @@ -""" -Package containing all pip commands -""" -from __future__ import absolute_import - -from pip._internal.commands.completion import CompletionCommand -from pip._internal.commands.configuration import ConfigurationCommand -from pip._internal.commands.debug import DebugCommand -from pip._internal.commands.download import DownloadCommand -from pip._internal.commands.freeze import FreezeCommand -from pip._internal.commands.hash import HashCommand -from pip._internal.commands.help import HelpCommand -from pip._internal.commands.list import ListCommand -from pip._internal.commands.check import CheckCommand -from pip._internal.commands.search import SearchCommand -from pip._internal.commands.show import ShowCommand -from pip._internal.commands.install import InstallCommand -from pip._internal.commands.uninstall import UninstallCommand -from pip._internal.commands.wheel import WheelCommand - -from pip._internal.utils.typing import MYPY_CHECK_RUNNING - -if MYPY_CHECK_RUNNING: - from typing import List, Type - from pip._internal.cli.base_command import Command - -commands_order = [ - InstallCommand, - DownloadCommand, - UninstallCommand, - FreezeCommand, - ListCommand, - ShowCommand, - CheckCommand, - ConfigurationCommand, - SearchCommand, - WheelCommand, - HashCommand, - CompletionCommand, - DebugCommand, - HelpCommand, -] # type: List[Type[Command]] - -commands_dict = {c.name: c for c in commands_order} - - -def get_summaries(ordered=True): - """Yields sorted (command name, command summary) tuples.""" - - if ordered: - cmditems = _sort_commands(commands_dict, commands_order) - else: - cmditems = commands_dict.items() - - for name, command_class in cmditems: - yield (name, command_class.summary) - - -def get_similar_commands(name): - """Command name auto-correct.""" - from difflib import get_close_matches - - name = name.lower() - - close_commands = get_close_matches(name, commands_dict.keys()) - - if close_commands: - return close_commands[0] - else: - return False - - -def _sort_commands(cmddict, order): - def keyfn(key): - try: - return order.index(key[1]) - except ValueError: - # unordered items should come last - return 0xff - - return sorted(cmddict.items(), key=keyfn) diff --git a/env/lib/python2.7/site-packages/pip/_internal/commands/__init__.pyc b/env/lib/python2.7/site-packages/pip/_internal/commands/__init__.pyc deleted file mode 100644 index 06c04c43..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/commands/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/commands/check.py b/env/lib/python2.7/site-packages/pip/_internal/commands/check.py deleted file mode 100644 index 801cecc0..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/commands/check.py +++ /dev/null @@ -1,41 +0,0 @@ -import logging - -from pip._internal.cli.base_command import Command -from pip._internal.operations.check import ( - check_package_set, create_package_set_from_installed, -) - -logger = logging.getLogger(__name__) - - -class CheckCommand(Command): - """Verify installed packages have compatible dependencies.""" - name = 'check' - usage = """ - %prog [options]""" - summary = 'Verify installed packages have compatible dependencies.' - - def run(self, options, args): - package_set, parsing_probs = create_package_set_from_installed() - missing, conflicting = check_package_set(package_set) - - for project_name in missing: - version = package_set[project_name].version - for dependency in missing[project_name]: - logger.info( - "%s %s requires %s, which is not installed.", - project_name, version, dependency[0], - ) - - for project_name in conflicting: - version = package_set[project_name].version - for dep_name, dep_version, req in conflicting[project_name]: - logger.info( - "%s %s has requirement %s, but you have %s %s.", - project_name, version, req, dep_name, dep_version, - ) - - if missing or conflicting or parsing_probs: - return 1 - else: - logger.info("No broken requirements found.") diff --git a/env/lib/python2.7/site-packages/pip/_internal/commands/check.pyc b/env/lib/python2.7/site-packages/pip/_internal/commands/check.pyc deleted file mode 100644 index b74925da..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/commands/check.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/commands/completion.py b/env/lib/python2.7/site-packages/pip/_internal/commands/completion.py deleted file mode 100644 index 2fcdd393..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/commands/completion.py +++ /dev/null @@ -1,94 +0,0 @@ -from __future__ import absolute_import - -import sys -import textwrap - -from pip._internal.cli.base_command import Command -from pip._internal.utils.misc import get_prog - -BASE_COMPLETION = """ -# pip %(shell)s completion start%(script)s# pip %(shell)s completion end -""" - -COMPLETION_SCRIPTS = { - 'bash': """ - _pip_completion() - { - COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\ - COMP_CWORD=$COMP_CWORD \\ - PIP_AUTO_COMPLETE=1 $1 ) ) - } - complete -o default -F _pip_completion %(prog)s - """, - 'zsh': """ - function _pip_completion { - local words cword - read -Ac words - read -cn cword - reply=( $( COMP_WORDS="$words[*]" \\ - COMP_CWORD=$(( cword-1 )) \\ - PIP_AUTO_COMPLETE=1 $words[1] ) ) - } - compctl -K _pip_completion %(prog)s - """, - 'fish': """ - function __fish_complete_pip - set -lx COMP_WORDS (commandline -o) "" - set -lx COMP_CWORD ( \\ - math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\ - ) - set -lx PIP_AUTO_COMPLETE 1 - string split \\ -- (eval $COMP_WORDS[1]) - end - complete -fa "(__fish_complete_pip)" -c %(prog)s - """, -} - - -class CompletionCommand(Command): - """A helper command to be used for command completion.""" - name = 'completion' - summary = 'A helper command used for command completion.' - ignore_require_venv = True - - def __init__(self, *args, **kw): - super(CompletionCommand, self).__init__(*args, **kw) - - cmd_opts = self.cmd_opts - - cmd_opts.add_option( - '--bash', '-b', - action='store_const', - const='bash', - dest='shell', - help='Emit completion code for bash') - cmd_opts.add_option( - '--zsh', '-z', - action='store_const', - const='zsh', - dest='shell', - help='Emit completion code for zsh') - cmd_opts.add_option( - '--fish', '-f', - action='store_const', - const='fish', - dest='shell', - help='Emit completion code for fish') - - self.parser.insert_option_group(0, cmd_opts) - - def run(self, options, args): - """Prints the completion code of the given shell""" - shells = COMPLETION_SCRIPTS.keys() - shell_options = ['--' + shell for shell in sorted(shells)] - if options.shell in shells: - script = textwrap.dedent( - COMPLETION_SCRIPTS.get(options.shell, '') % { - 'prog': get_prog(), - } - ) - print(BASE_COMPLETION % {'script': script, 'shell': options.shell}) - else: - sys.stderr.write( - 'ERROR: You must pass %s\n' % ' or '.join(shell_options) - ) diff --git a/env/lib/python2.7/site-packages/pip/_internal/commands/completion.pyc b/env/lib/python2.7/site-packages/pip/_internal/commands/completion.pyc deleted file mode 100644 index 075a02fb..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/commands/completion.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/commands/configuration.py b/env/lib/python2.7/site-packages/pip/_internal/commands/configuration.py deleted file mode 100644 index 1ec77d2a..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/commands/configuration.py +++ /dev/null @@ -1,258 +0,0 @@ -import logging -import os -import subprocess - -from pip._internal.cli.base_command import Command -from pip._internal.cli.status_codes import ERROR, SUCCESS -from pip._internal.configuration import ( - Configuration, get_configuration_files, kinds, -) -from pip._internal.exceptions import PipError -from pip._internal.utils.deprecation import deprecated -from pip._internal.utils.misc import get_prog -from pip._internal.utils.virtualenv import running_under_virtualenv - -logger = logging.getLogger(__name__) - - -class ConfigurationCommand(Command): - """Manage local and global configuration. - - Subcommands: - - list: List the active configuration (or from the file specified) - edit: Edit the configuration file in an editor - get: Get the value associated with name - set: Set the name=value - unset: Unset the value associated with name - - If none of --user, --global and --site are passed, a virtual - environment configuration file is used if one is active and the file - exists. Otherwise, all modifications happen on the to the user file by - default. - """ - - name = 'config' - usage = """ - %prog [] list - %prog [] [--editor ] edit - - %prog [] get name - %prog [] set name value - %prog [] unset name - """ - - summary = "Manage local and global configuration." - - def __init__(self, *args, **kwargs): - super(ConfigurationCommand, self).__init__(*args, **kwargs) - - self.configuration = None - - self.cmd_opts.add_option( - '--editor', - dest='editor', - action='store', - default=None, - help=( - 'Editor to use to edit the file. Uses VISUAL or EDITOR ' - 'environment variables if not provided.' - ) - ) - - self.cmd_opts.add_option( - '--global', - dest='global_file', - action='store_true', - default=False, - help='Use the system-wide configuration file only' - ) - - self.cmd_opts.add_option( - '--user', - dest='user_file', - action='store_true', - default=False, - help='Use the user configuration file only' - ) - - self.cmd_opts.add_option( - '--site', - dest='site_file', - action='store_true', - default=False, - help='Use the current environment configuration file only' - ) - - self.cmd_opts.add_option( - '--venv', - dest='venv_file', - action='store_true', - default=False, - help=( - '[Deprecated] Use the current environment configuration ' - 'file in a virtual environment only' - ) - ) - - self.parser.insert_option_group(0, self.cmd_opts) - - def run(self, options, args): - handlers = { - "list": self.list_values, - "edit": self.open_in_editor, - "get": self.get_name, - "set": self.set_name_value, - "unset": self.unset_name - } - - # Determine action - if not args or args[0] not in handlers: - logger.error("Need an action ({}) to perform.".format( - ", ".join(sorted(handlers))) - ) - return ERROR - - action = args[0] - - # Determine which configuration files are to be loaded - # Depends on whether the command is modifying. - try: - load_only = self._determine_file( - options, need_value=(action in ["get", "set", "unset", "edit"]) - ) - except PipError as e: - logger.error(e.args[0]) - return ERROR - - # Load a new configuration - self.configuration = Configuration( - isolated=options.isolated_mode, load_only=load_only - ) - self.configuration.load() - - # Error handling happens here, not in the action-handlers. - try: - handlers[action](options, args[1:]) - except PipError as e: - logger.error(e.args[0]) - return ERROR - - return SUCCESS - - def _determine_file(self, options, need_value): - # Convert legacy venv_file option to site_file or error - if options.venv_file and not options.site_file: - if running_under_virtualenv(): - options.site_file = True - deprecated( - "The --venv option has been deprecated.", - replacement="--site", - gone_in="19.3", - ) - else: - raise PipError( - "Legacy --venv option requires a virtual environment. " - "Use --site instead." - ) - - file_options = [key for key, value in ( - (kinds.USER, options.user_file), - (kinds.GLOBAL, options.global_file), - (kinds.SITE, options.site_file), - ) if value] - - if not file_options: - if not need_value: - return None - # Default to user, unless there's a site file. - elif any( - os.path.exists(site_config_file) - for site_config_file in get_configuration_files()[kinds.SITE] - ): - return kinds.SITE - else: - return kinds.USER - elif len(file_options) == 1: - return file_options[0] - - raise PipError( - "Need exactly one file to operate upon " - "(--user, --site, --global) to perform." - ) - - def list_values(self, options, args): - self._get_n_args(args, "list", n=0) - - for key, value in sorted(self.configuration.items()): - logger.info("%s=%r", key, value) - - def get_name(self, options, args): - key = self._get_n_args(args, "get [name]", n=1) - value = self.configuration.get_value(key) - - logger.info("%s", value) - - def set_name_value(self, options, args): - key, value = self._get_n_args(args, "set [name] [value]", n=2) - self.configuration.set_value(key, value) - - self._save_configuration() - - def unset_name(self, options, args): - key = self._get_n_args(args, "unset [name]", n=1) - self.configuration.unset_value(key) - - self._save_configuration() - - def open_in_editor(self, options, args): - editor = self._determine_editor(options) - - fname = self.configuration.get_file_to_edit() - if fname is None: - raise PipError("Could not determine appropriate file.") - - try: - subprocess.check_call([editor, fname]) - except subprocess.CalledProcessError as e: - raise PipError( - "Editor Subprocess exited with exit code {}" - .format(e.returncode) - ) - - def _get_n_args(self, args, example, n): - """Helper to make sure the command got the right number of arguments - """ - if len(args) != n: - msg = ( - 'Got unexpected number of arguments, expected {}. ' - '(example: "{} config {}")' - ).format(n, get_prog(), example) - raise PipError(msg) - - if n == 1: - return args[0] - else: - return args - - def _save_configuration(self): - # We successfully ran a modifying command. Need to save the - # configuration. - try: - self.configuration.save() - except Exception: - logger.error( - "Unable to save configuration. Please report this as a bug.", - exc_info=1 - ) - raise PipError("Internal Error.") - - def _determine_editor(self, options): - if options.editor is not None: - return options.editor - elif "VISUAL" in os.environ: - return os.environ["VISUAL"] - elif "EDITOR" in os.environ: - return os.environ["EDITOR"] - else: - raise PipError("Could not determine editor to use.") diff --git a/env/lib/python2.7/site-packages/pip/_internal/commands/configuration.pyc b/env/lib/python2.7/site-packages/pip/_internal/commands/configuration.pyc deleted file mode 100644 index 4d1064f9..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/commands/configuration.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/commands/debug.py b/env/lib/python2.7/site-packages/pip/_internal/commands/debug.py deleted file mode 100644 index eb4f8c4e..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/commands/debug.py +++ /dev/null @@ -1,114 +0,0 @@ -from __future__ import absolute_import - -import locale -import logging -import sys - -from pip._internal.cli import cmdoptions -from pip._internal.cli.base_command import Command -from pip._internal.cli.cmdoptions import make_target_python -from pip._internal.cli.status_codes import SUCCESS -from pip._internal.utils.logging import indent_log -from pip._internal.utils.misc import get_pip_version -from pip._internal.utils.typing import MYPY_CHECK_RUNNING -from pip._internal.wheel import format_tag - -if MYPY_CHECK_RUNNING: - from typing import Any, List - from optparse import Values - -logger = logging.getLogger(__name__) - - -def show_value(name, value): - # type: (str, str) -> None - logger.info('{}: {}'.format(name, value)) - - -def show_sys_implementation(): - # type: () -> None - logger.info('sys.implementation:') - if hasattr(sys, 'implementation'): - implementation = sys.implementation # type: ignore - implementation_name = implementation.name - else: - implementation_name = '' - - with indent_log(): - show_value('name', implementation_name) - - -def show_tags(options): - # type: (Values) -> None - tag_limit = 10 - - target_python = make_target_python(options) - tags = target_python.get_tags() - - # Display the target options that were explicitly provided. - formatted_target = target_python.format_given() - suffix = '' - if formatted_target: - suffix = ' (target: {})'.format(formatted_target) - - msg = 'Compatible tags: {}{}'.format(len(tags), suffix) - logger.info(msg) - - if options.verbose < 1 and len(tags) > tag_limit: - tags_limited = True - tags = tags[:tag_limit] - else: - tags_limited = False - - with indent_log(): - for tag in tags: - logger.info(format_tag(tag)) - - if tags_limited: - msg = ( - '...\n' - '[First {tag_limit} tags shown. Pass --verbose to show all.]' - ).format(tag_limit=tag_limit) - logger.info(msg) - - -class DebugCommand(Command): - """ - Display debug information. - """ - - name = 'debug' - usage = """ - %prog """ - summary = 'Show information useful for debugging.' - ignore_require_venv = True - - def __init__(self, *args, **kw): - super(DebugCommand, self).__init__(*args, **kw) - - cmd_opts = self.cmd_opts - cmdoptions.add_target_python_options(cmd_opts) - self.parser.insert_option_group(0, cmd_opts) - - def run(self, options, args): - # type: (Values, List[Any]) -> int - logger.warning( - "This command is only meant for debugging. " - "Do not use this with automation for parsing and getting these " - "details, since the output and options of this command may " - "change without notice." - ) - show_value('pip version', get_pip_version()) - show_value('sys.version', sys.version) - show_value('sys.executable', sys.executable) - show_value('sys.getdefaultencoding', sys.getdefaultencoding()) - show_value('sys.getfilesystemencoding', sys.getfilesystemencoding()) - show_value( - 'locale.getpreferredencoding', locale.getpreferredencoding(), - ) - show_value('sys.platform', sys.platform) - show_sys_implementation() - - show_tags(options) - - return SUCCESS diff --git a/env/lib/python2.7/site-packages/pip/_internal/commands/debug.pyc b/env/lib/python2.7/site-packages/pip/_internal/commands/debug.pyc deleted file mode 100644 index e2a8dedf..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/commands/debug.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/commands/download.py b/env/lib/python2.7/site-packages/pip/_internal/commands/download.py deleted file mode 100644 index 5642b561..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/commands/download.py +++ /dev/null @@ -1,168 +0,0 @@ -from __future__ import absolute_import - -import logging -import os - -from pip._internal.cli import cmdoptions -from pip._internal.cli.base_command import RequirementCommand -from pip._internal.cli.cmdoptions import make_target_python -from pip._internal.legacy_resolve import Resolver -from pip._internal.operations.prepare import RequirementPreparer -from pip._internal.req import RequirementSet -from pip._internal.req.req_tracker import RequirementTracker -from pip._internal.utils.filesystem import check_path_owner -from pip._internal.utils.misc import ensure_dir, normalize_path -from pip._internal.utils.temp_dir import TempDirectory - -logger = logging.getLogger(__name__) - - -class DownloadCommand(RequirementCommand): - """ - Download packages from: - - - PyPI (and other indexes) using requirement specifiers. - - VCS project urls. - - Local project directories. - - Local or remote source archives. - - pip also supports downloading from "requirements files", which provide - an easy way to specify a whole environment to be downloaded. - """ - name = 'download' - - usage = """ - %prog [options] [package-index-options] ... - %prog [options] -r [package-index-options] ... - %prog [options] ... - %prog [options] ... - %prog [options] ...""" - - summary = 'Download packages.' - - def __init__(self, *args, **kw): - super(DownloadCommand, self).__init__(*args, **kw) - - cmd_opts = self.cmd_opts - - cmd_opts.add_option(cmdoptions.constraints()) - cmd_opts.add_option(cmdoptions.requirements()) - cmd_opts.add_option(cmdoptions.build_dir()) - cmd_opts.add_option(cmdoptions.no_deps()) - cmd_opts.add_option(cmdoptions.global_options()) - cmd_opts.add_option(cmdoptions.no_binary()) - cmd_opts.add_option(cmdoptions.only_binary()) - cmd_opts.add_option(cmdoptions.prefer_binary()) - cmd_opts.add_option(cmdoptions.src()) - cmd_opts.add_option(cmdoptions.pre()) - cmd_opts.add_option(cmdoptions.no_clean()) - cmd_opts.add_option(cmdoptions.require_hashes()) - cmd_opts.add_option(cmdoptions.progress_bar()) - cmd_opts.add_option(cmdoptions.no_build_isolation()) - cmd_opts.add_option(cmdoptions.use_pep517()) - cmd_opts.add_option(cmdoptions.no_use_pep517()) - - cmd_opts.add_option( - '-d', '--dest', '--destination-dir', '--destination-directory', - dest='download_dir', - metavar='dir', - default=os.curdir, - help=("Download packages into ."), - ) - - cmdoptions.add_target_python_options(cmd_opts) - - index_opts = cmdoptions.make_option_group( - cmdoptions.index_group, - self.parser, - ) - - self.parser.insert_option_group(0, index_opts) - self.parser.insert_option_group(0, cmd_opts) - - def run(self, options, args): - options.ignore_installed = True - # editable doesn't really make sense for `pip download`, but the bowels - # of the RequirementSet code require that property. - options.editables = [] - - cmdoptions.check_dist_restriction(options) - - options.src_dir = os.path.abspath(options.src_dir) - options.download_dir = normalize_path(options.download_dir) - - ensure_dir(options.download_dir) - - with self._build_session(options) as session: - target_python = make_target_python(options) - finder = self._build_package_finder( - options=options, - session=session, - target_python=target_python, - ) - build_delete = (not (options.no_clean or options.build_dir)) - if options.cache_dir and not check_path_owner(options.cache_dir): - logger.warning( - "The directory '%s' or its parent directory is not owned " - "by the current user and caching wheels has been " - "disabled. check the permissions and owner of that " - "directory. If executing pip with sudo, you may want " - "sudo's -H flag.", - options.cache_dir, - ) - options.cache_dir = None - - with RequirementTracker() as req_tracker, TempDirectory( - options.build_dir, delete=build_delete, kind="download" - ) as directory: - - requirement_set = RequirementSet( - require_hashes=options.require_hashes, - ) - self.populate_requirement_set( - requirement_set, - args, - options, - finder, - session, - self.name, - None - ) - - preparer = RequirementPreparer( - build_dir=directory.path, - src_dir=options.src_dir, - download_dir=options.download_dir, - wheel_download_dir=None, - progress_bar=options.progress_bar, - build_isolation=options.build_isolation, - req_tracker=req_tracker, - ) - - resolver = Resolver( - preparer=preparer, - finder=finder, - session=session, - wheel_cache=None, - use_user_site=False, - upgrade_strategy="to-satisfy-only", - force_reinstall=False, - ignore_dependencies=options.ignore_dependencies, - py_version_info=options.python_version, - ignore_requires_python=False, - ignore_installed=True, - isolated=options.isolated_mode, - ) - resolver.resolve(requirement_set) - - downloaded = ' '.join([ - req.name for req in requirement_set.successfully_downloaded - ]) - if downloaded: - logger.info('Successfully downloaded %s', downloaded) - - # Clean up - if not options.no_clean: - requirement_set.cleanup_files() - - return requirement_set diff --git a/env/lib/python2.7/site-packages/pip/_internal/commands/download.pyc b/env/lib/python2.7/site-packages/pip/_internal/commands/download.pyc deleted file mode 100644 index 5a687f83..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/commands/download.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/commands/freeze.py b/env/lib/python2.7/site-packages/pip/_internal/commands/freeze.py deleted file mode 100644 index 9fc5b046..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/commands/freeze.py +++ /dev/null @@ -1,101 +0,0 @@ -from __future__ import absolute_import - -import sys - -from pip._internal.cache import WheelCache -from pip._internal.cli import cmdoptions -from pip._internal.cli.base_command import Command -from pip._internal.models.format_control import FormatControl -from pip._internal.operations.freeze import freeze -from pip._internal.utils.compat import stdlib_pkgs - -DEV_PKGS = {'pip', 'setuptools', 'distribute', 'wheel'} - - -class FreezeCommand(Command): - """ - Output installed packages in requirements format. - - packages are listed in a case-insensitive sorted order. - """ - name = 'freeze' - usage = """ - %prog [options]""" - summary = 'Output installed packages in requirements format.' - log_streams = ("ext://sys.stderr", "ext://sys.stderr") - - def __init__(self, *args, **kw): - super(FreezeCommand, self).__init__(*args, **kw) - - self.cmd_opts.add_option( - '-r', '--requirement', - dest='requirements', - action='append', - default=[], - metavar='file', - help="Use the order in the given requirements file and its " - "comments when generating output. This option can be " - "used multiple times.") - self.cmd_opts.add_option( - '-f', '--find-links', - dest='find_links', - action='append', - default=[], - metavar='URL', - help='URL for finding packages, which will be added to the ' - 'output.') - self.cmd_opts.add_option( - '-l', '--local', - dest='local', - action='store_true', - default=False, - help='If in a virtualenv that has global access, do not output ' - 'globally-installed packages.') - self.cmd_opts.add_option( - '--user', - dest='user', - action='store_true', - default=False, - help='Only output packages installed in user-site.') - self.cmd_opts.add_option(cmdoptions.list_path()) - self.cmd_opts.add_option( - '--all', - dest='freeze_all', - action='store_true', - help='Do not skip these packages in the output:' - ' %s' % ', '.join(DEV_PKGS)) - self.cmd_opts.add_option( - '--exclude-editable', - dest='exclude_editable', - action='store_true', - help='Exclude editable package from output.') - - self.parser.insert_option_group(0, self.cmd_opts) - - def run(self, options, args): - format_control = FormatControl(set(), set()) - wheel_cache = WheelCache(options.cache_dir, format_control) - skip = set(stdlib_pkgs) - if not options.freeze_all: - skip.update(DEV_PKGS) - - cmdoptions.check_list_path_option(options) - - freeze_kwargs = dict( - requirement=options.requirements, - find_links=options.find_links, - local_only=options.local, - user_only=options.user, - paths=options.path, - skip_regex=options.skip_requirements_regex, - isolated=options.isolated_mode, - wheel_cache=wheel_cache, - skip=skip, - exclude_editable=options.exclude_editable, - ) - - try: - for line in freeze(**freeze_kwargs): - sys.stdout.write(line + '\n') - finally: - wheel_cache.cleanup() diff --git a/env/lib/python2.7/site-packages/pip/_internal/commands/freeze.pyc b/env/lib/python2.7/site-packages/pip/_internal/commands/freeze.pyc deleted file mode 100644 index 70baea3a..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/commands/freeze.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/commands/hash.py b/env/lib/python2.7/site-packages/pip/_internal/commands/hash.py deleted file mode 100644 index 423440e9..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/commands/hash.py +++ /dev/null @@ -1,57 +0,0 @@ -from __future__ import absolute_import - -import hashlib -import logging -import sys - -from pip._internal.cli.base_command import Command -from pip._internal.cli.status_codes import ERROR -from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES -from pip._internal.utils.misc import read_chunks - -logger = logging.getLogger(__name__) - - -class HashCommand(Command): - """ - Compute a hash of a local package archive. - - These can be used with --hash in a requirements file to do repeatable - installs. - - """ - name = 'hash' - usage = '%prog [options] ...' - summary = 'Compute hashes of package archives.' - ignore_require_venv = True - - def __init__(self, *args, **kw): - super(HashCommand, self).__init__(*args, **kw) - self.cmd_opts.add_option( - '-a', '--algorithm', - dest='algorithm', - choices=STRONG_HASHES, - action='store', - default=FAVORITE_HASH, - help='The hash algorithm to use: one of %s' % - ', '.join(STRONG_HASHES)) - self.parser.insert_option_group(0, self.cmd_opts) - - def run(self, options, args): - if not args: - self.parser.print_usage(sys.stderr) - return ERROR - - algorithm = options.algorithm - for path in args: - logger.info('%s:\n--hash=%s:%s', - path, algorithm, _hash_of_file(path, algorithm)) - - -def _hash_of_file(path, algorithm): - """Return the hash digest of a file.""" - with open(path, 'rb') as archive: - hash = hashlib.new(algorithm) - for chunk in read_chunks(archive): - hash.update(chunk) - return hash.hexdigest() diff --git a/env/lib/python2.7/site-packages/pip/_internal/commands/hash.pyc b/env/lib/python2.7/site-packages/pip/_internal/commands/hash.pyc deleted file mode 100644 index 9137fdf9..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/commands/hash.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/commands/help.py b/env/lib/python2.7/site-packages/pip/_internal/commands/help.py deleted file mode 100644 index 49a81cbb..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/commands/help.py +++ /dev/null @@ -1,37 +0,0 @@ -from __future__ import absolute_import - -from pip._internal.cli.base_command import Command -from pip._internal.cli.status_codes import SUCCESS -from pip._internal.exceptions import CommandError - - -class HelpCommand(Command): - """Show help for commands""" - name = 'help' - usage = """ - %prog """ - summary = 'Show help for commands.' - ignore_require_venv = True - - def run(self, options, args): - from pip._internal.commands import commands_dict, get_similar_commands - - try: - # 'pip help' with no args is handled by pip.__init__.parseopt() - cmd_name = args[0] # the command we need help for - except IndexError: - return SUCCESS - - if cmd_name not in commands_dict: - guess = get_similar_commands(cmd_name) - - msg = ['unknown command "%s"' % cmd_name] - if guess: - msg.append('maybe you meant "%s"' % guess) - - raise CommandError(' - '.join(msg)) - - command = commands_dict[cmd_name]() - command.parser.print_help() - - return SUCCESS diff --git a/env/lib/python2.7/site-packages/pip/_internal/commands/help.pyc b/env/lib/python2.7/site-packages/pip/_internal/commands/help.pyc deleted file mode 100644 index 962b39cd..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/commands/help.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/commands/install.py b/env/lib/python2.7/site-packages/pip/_internal/commands/install.py deleted file mode 100644 index ebeceacf..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/commands/install.py +++ /dev/null @@ -1,580 +0,0 @@ -from __future__ import absolute_import - -import errno -import logging -import operator -import os -import shutil -from optparse import SUPPRESS_HELP - -from pip._vendor import pkg_resources - -from pip._internal.cache import WheelCache -from pip._internal.cli import cmdoptions -from pip._internal.cli.base_command import RequirementCommand -from pip._internal.cli.cmdoptions import make_target_python -from pip._internal.cli.status_codes import ERROR -from pip._internal.exceptions import ( - CommandError, InstallationError, PreviousBuildDirError, -) -from pip._internal.legacy_resolve import Resolver -from pip._internal.locations import distutils_scheme -from pip._internal.operations.check import check_install_conflicts -from pip._internal.operations.prepare import RequirementPreparer -from pip._internal.req import RequirementSet, install_given_reqs -from pip._internal.req.req_tracker import RequirementTracker -from pip._internal.utils.filesystem import check_path_owner -from pip._internal.utils.misc import ( - ensure_dir, get_installed_version, - protect_pip_from_modification_on_windows, -) -from pip._internal.utils.temp_dir import TempDirectory -from pip._internal.utils.virtualenv import virtualenv_no_global -from pip._internal.wheel import WheelBuilder - -logger = logging.getLogger(__name__) - - -def is_wheel_installed(): - """ - Return whether the wheel package is installed. - """ - try: - import wheel # noqa: F401 - except ImportError: - return False - - return True - - -def build_wheels(builder, pep517_requirements, legacy_requirements, session): - """ - Build wheels for requirements, depending on whether wheel is installed. - """ - # We don't build wheels for legacy requirements if wheel is not installed. - should_build_legacy = is_wheel_installed() - - # Always build PEP 517 requirements - build_failures = builder.build( - pep517_requirements, - session=session, autobuilding=True - ) - - if should_build_legacy: - # We don't care about failures building legacy - # requirements, as we'll fall through to a direct - # install for those. - builder.build( - legacy_requirements, - session=session, autobuilding=True - ) - - return build_failures - - -class InstallCommand(RequirementCommand): - """ - Install packages from: - - - PyPI (and other indexes) using requirement specifiers. - - VCS project urls. - - Local project directories. - - Local or remote source archives. - - pip also supports installing from "requirements files," which provide - an easy way to specify a whole environment to be installed. - """ - name = 'install' - - usage = """ - %prog [options] [package-index-options] ... - %prog [options] -r [package-index-options] ... - %prog [options] [-e] ... - %prog [options] [-e] ... - %prog [options] ...""" - - summary = 'Install packages.' - - def __init__(self, *args, **kw): - super(InstallCommand, self).__init__(*args, **kw) - - cmd_opts = self.cmd_opts - - cmd_opts.add_option(cmdoptions.requirements()) - cmd_opts.add_option(cmdoptions.constraints()) - cmd_opts.add_option(cmdoptions.no_deps()) - cmd_opts.add_option(cmdoptions.pre()) - - cmd_opts.add_option(cmdoptions.editable()) - cmd_opts.add_option( - '-t', '--target', - dest='target_dir', - metavar='dir', - default=None, - help='Install packages into . ' - 'By default this will not replace existing files/folders in ' - '. Use --upgrade to replace existing packages in ' - 'with new versions.' - ) - cmdoptions.add_target_python_options(cmd_opts) - - cmd_opts.add_option( - '--user', - dest='use_user_site', - action='store_true', - help="Install to the Python user install directory for your " - "platform. Typically ~/.local/, or %APPDATA%\\Python on " - "Windows. (See the Python documentation for site.USER_BASE " - "for full details.)") - cmd_opts.add_option( - '--no-user', - dest='use_user_site', - action='store_false', - help=SUPPRESS_HELP) - cmd_opts.add_option( - '--root', - dest='root_path', - metavar='dir', - default=None, - help="Install everything relative to this alternate root " - "directory.") - cmd_opts.add_option( - '--prefix', - dest='prefix_path', - metavar='dir', - default=None, - help="Installation prefix where lib, bin and other top-level " - "folders are placed") - - cmd_opts.add_option(cmdoptions.build_dir()) - - cmd_opts.add_option(cmdoptions.src()) - - cmd_opts.add_option( - '-U', '--upgrade', - dest='upgrade', - action='store_true', - help='Upgrade all specified packages to the newest available ' - 'version. The handling of dependencies depends on the ' - 'upgrade-strategy used.' - ) - - cmd_opts.add_option( - '--upgrade-strategy', - dest='upgrade_strategy', - default='only-if-needed', - choices=['only-if-needed', 'eager'], - help='Determines how dependency upgrading should be handled ' - '[default: %default]. ' - '"eager" - dependencies are upgraded regardless of ' - 'whether the currently installed version satisfies the ' - 'requirements of the upgraded package(s). ' - '"only-if-needed" - are upgraded only when they do not ' - 'satisfy the requirements of the upgraded package(s).' - ) - - cmd_opts.add_option( - '--force-reinstall', - dest='force_reinstall', - action='store_true', - help='Reinstall all packages even if they are already ' - 'up-to-date.') - - cmd_opts.add_option( - '-I', '--ignore-installed', - dest='ignore_installed', - action='store_true', - help='Ignore the installed packages (reinstalling instead).') - - cmd_opts.add_option(cmdoptions.ignore_requires_python()) - cmd_opts.add_option(cmdoptions.no_build_isolation()) - cmd_opts.add_option(cmdoptions.use_pep517()) - cmd_opts.add_option(cmdoptions.no_use_pep517()) - - cmd_opts.add_option(cmdoptions.install_options()) - cmd_opts.add_option(cmdoptions.global_options()) - - cmd_opts.add_option( - "--compile", - action="store_true", - dest="compile", - default=True, - help="Compile Python source files to bytecode", - ) - - cmd_opts.add_option( - "--no-compile", - action="store_false", - dest="compile", - help="Do not compile Python source files to bytecode", - ) - - cmd_opts.add_option( - "--no-warn-script-location", - action="store_false", - dest="warn_script_location", - default=True, - help="Do not warn when installing scripts outside PATH", - ) - cmd_opts.add_option( - "--no-warn-conflicts", - action="store_false", - dest="warn_about_conflicts", - default=True, - help="Do not warn about broken dependencies", - ) - - cmd_opts.add_option(cmdoptions.no_binary()) - cmd_opts.add_option(cmdoptions.only_binary()) - cmd_opts.add_option(cmdoptions.prefer_binary()) - cmd_opts.add_option(cmdoptions.no_clean()) - cmd_opts.add_option(cmdoptions.require_hashes()) - cmd_opts.add_option(cmdoptions.progress_bar()) - - index_opts = cmdoptions.make_option_group( - cmdoptions.index_group, - self.parser, - ) - - self.parser.insert_option_group(0, index_opts) - self.parser.insert_option_group(0, cmd_opts) - - def run(self, options, args): - cmdoptions.check_install_build_global(options) - upgrade_strategy = "to-satisfy-only" - if options.upgrade: - upgrade_strategy = options.upgrade_strategy - - if options.build_dir: - options.build_dir = os.path.abspath(options.build_dir) - - cmdoptions.check_dist_restriction(options, check_target=True) - - options.src_dir = os.path.abspath(options.src_dir) - install_options = options.install_options or [] - if options.use_user_site: - if options.prefix_path: - raise CommandError( - "Can not combine '--user' and '--prefix' as they imply " - "different installation locations" - ) - if virtualenv_no_global(): - raise InstallationError( - "Can not perform a '--user' install. User site-packages " - "are not visible in this virtualenv." - ) - install_options.append('--user') - install_options.append('--prefix=') - - target_temp_dir = TempDirectory(kind="target") - if options.target_dir: - options.ignore_installed = True - options.target_dir = os.path.abspath(options.target_dir) - if (os.path.exists(options.target_dir) and not - os.path.isdir(options.target_dir)): - raise CommandError( - "Target path exists but is not a directory, will not " - "continue." - ) - - # Create a target directory for using with the target option - target_temp_dir.create() - install_options.append('--home=' + target_temp_dir.path) - - global_options = options.global_options or [] - - with self._build_session(options) as session: - target_python = make_target_python(options) - finder = self._build_package_finder( - options=options, - session=session, - target_python=target_python, - ignore_requires_python=options.ignore_requires_python, - ) - build_delete = (not (options.no_clean or options.build_dir)) - wheel_cache = WheelCache(options.cache_dir, options.format_control) - - if options.cache_dir and not check_path_owner(options.cache_dir): - logger.warning( - "The directory '%s' or its parent directory is not owned " - "by the current user and caching wheels has been " - "disabled. check the permissions and owner of that " - "directory. If executing pip with sudo, you may want " - "sudo's -H flag.", - options.cache_dir, - ) - options.cache_dir = None - - with RequirementTracker() as req_tracker, TempDirectory( - options.build_dir, delete=build_delete, kind="install" - ) as directory: - requirement_set = RequirementSet( - require_hashes=options.require_hashes, - check_supported_wheels=not options.target_dir, - ) - - try: - self.populate_requirement_set( - requirement_set, args, options, finder, session, - self.name, wheel_cache - ) - preparer = RequirementPreparer( - build_dir=directory.path, - src_dir=options.src_dir, - download_dir=None, - wheel_download_dir=None, - progress_bar=options.progress_bar, - build_isolation=options.build_isolation, - req_tracker=req_tracker, - ) - - resolver = Resolver( - preparer=preparer, - finder=finder, - session=session, - wheel_cache=wheel_cache, - use_user_site=options.use_user_site, - upgrade_strategy=upgrade_strategy, - force_reinstall=options.force_reinstall, - ignore_dependencies=options.ignore_dependencies, - ignore_requires_python=options.ignore_requires_python, - ignore_installed=options.ignore_installed, - isolated=options.isolated_mode, - use_pep517=options.use_pep517 - ) - resolver.resolve(requirement_set) - - protect_pip_from_modification_on_windows( - modifying_pip=requirement_set.has_requirement("pip") - ) - - # Consider legacy and PEP517-using requirements separately - legacy_requirements = [] - pep517_requirements = [] - for req in requirement_set.requirements.values(): - if req.use_pep517: - pep517_requirements.append(req) - else: - legacy_requirements.append(req) - - wheel_builder = WheelBuilder( - finder, preparer, wheel_cache, - build_options=[], global_options=[], - ) - - build_failures = build_wheels( - builder=wheel_builder, - pep517_requirements=pep517_requirements, - legacy_requirements=legacy_requirements, - session=session, - ) - - # If we're using PEP 517, we cannot do a direct install - # so we fail here. - if build_failures: - raise InstallationError( - "Could not build wheels for {} which use" - " PEP 517 and cannot be installed directly".format( - ", ".join(r.name for r in build_failures))) - - to_install = resolver.get_installation_order( - requirement_set - ) - - # Consistency Checking of the package set we're installing. - should_warn_about_conflicts = ( - not options.ignore_dependencies and - options.warn_about_conflicts - ) - if should_warn_about_conflicts: - self._warn_about_conflicts(to_install) - - # Don't warn about script install locations if - # --target has been specified - warn_script_location = options.warn_script_location - if options.target_dir: - warn_script_location = False - - installed = install_given_reqs( - to_install, - install_options, - global_options, - root=options.root_path, - home=target_temp_dir.path, - prefix=options.prefix_path, - pycompile=options.compile, - warn_script_location=warn_script_location, - use_user_site=options.use_user_site, - ) - - lib_locations = get_lib_location_guesses( - user=options.use_user_site, - home=target_temp_dir.path, - root=options.root_path, - prefix=options.prefix_path, - isolated=options.isolated_mode, - ) - working_set = pkg_resources.WorkingSet(lib_locations) - - reqs = sorted(installed, key=operator.attrgetter('name')) - items = [] - for req in reqs: - item = req.name - try: - installed_version = get_installed_version( - req.name, working_set=working_set - ) - if installed_version: - item += '-' + installed_version - except Exception: - pass - items.append(item) - installed = ' '.join(items) - if installed: - logger.info('Successfully installed %s', installed) - except EnvironmentError as error: - show_traceback = (self.verbosity >= 1) - - message = create_env_error_message( - error, show_traceback, options.use_user_site, - ) - logger.error(message, exc_info=show_traceback) - - return ERROR - except PreviousBuildDirError: - options.no_clean = True - raise - finally: - # Clean up - if not options.no_clean: - requirement_set.cleanup_files() - wheel_cache.cleanup() - - if options.target_dir: - self._handle_target_dir( - options.target_dir, target_temp_dir, options.upgrade - ) - return requirement_set - - def _handle_target_dir(self, target_dir, target_temp_dir, upgrade): - ensure_dir(target_dir) - - # Checking both purelib and platlib directories for installed - # packages to be moved to target directory - lib_dir_list = [] - - with target_temp_dir: - # Checking both purelib and platlib directories for installed - # packages to be moved to target directory - scheme = distutils_scheme('', home=target_temp_dir.path) - purelib_dir = scheme['purelib'] - platlib_dir = scheme['platlib'] - data_dir = scheme['data'] - - if os.path.exists(purelib_dir): - lib_dir_list.append(purelib_dir) - if os.path.exists(platlib_dir) and platlib_dir != purelib_dir: - lib_dir_list.append(platlib_dir) - if os.path.exists(data_dir): - lib_dir_list.append(data_dir) - - for lib_dir in lib_dir_list: - for item in os.listdir(lib_dir): - if lib_dir == data_dir: - ddir = os.path.join(data_dir, item) - if any(s.startswith(ddir) for s in lib_dir_list[:-1]): - continue - target_item_dir = os.path.join(target_dir, item) - if os.path.exists(target_item_dir): - if not upgrade: - logger.warning( - 'Target directory %s already exists. Specify ' - '--upgrade to force replacement.', - target_item_dir - ) - continue - if os.path.islink(target_item_dir): - logger.warning( - 'Target directory %s already exists and is ' - 'a link. Pip will not automatically replace ' - 'links, please remove if replacement is ' - 'desired.', - target_item_dir - ) - continue - if os.path.isdir(target_item_dir): - shutil.rmtree(target_item_dir) - else: - os.remove(target_item_dir) - - shutil.move( - os.path.join(lib_dir, item), - target_item_dir - ) - - def _warn_about_conflicts(self, to_install): - try: - package_set, _dep_info = check_install_conflicts(to_install) - except Exception: - logger.error("Error checking for conflicts.", exc_info=True) - return - missing, conflicting = _dep_info - - # NOTE: There is some duplication here from pip check - for project_name in missing: - version = package_set[project_name][0] - for dependency in missing[project_name]: - logger.critical( - "%s %s requires %s, which is not installed.", - project_name, version, dependency[1], - ) - - for project_name in conflicting: - version = package_set[project_name][0] - for dep_name, dep_version, req in conflicting[project_name]: - logger.critical( - "%s %s has requirement %s, but you'll have %s %s which is " - "incompatible.", - project_name, version, req, dep_name, dep_version, - ) - - -def get_lib_location_guesses(*args, **kwargs): - scheme = distutils_scheme('', *args, **kwargs) - return [scheme['purelib'], scheme['platlib']] - - -def create_env_error_message(error, show_traceback, using_user_site): - """Format an error message for an EnvironmentError - - It may occur anytime during the execution of the install command. - """ - parts = [] - - # Mention the error if we are not going to show a traceback - parts.append("Could not install packages due to an EnvironmentError") - if not show_traceback: - parts.append(": ") - parts.append(str(error)) - else: - parts.append(".") - - # Spilt the error indication from a helper message (if any) - parts[-1] += "\n" - - # Suggest useful actions to the user: - # (1) using user site-packages or (2) verifying the permissions - if error.errno == errno.EACCES: - user_option_part = "Consider using the `--user` option" - permissions_part = "Check the permissions" - - if not using_user_site: - parts.extend([ - user_option_part, " or ", - permissions_part.lower(), - ]) - else: - parts.append(permissions_part) - parts.append(".\n") - - return "".join(parts).strip() + "\n" diff --git a/env/lib/python2.7/site-packages/pip/_internal/commands/install.pyc b/env/lib/python2.7/site-packages/pip/_internal/commands/install.pyc deleted file mode 100644 index 77b01b94..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/commands/install.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/commands/list.py b/env/lib/python2.7/site-packages/pip/_internal/commands/list.py deleted file mode 100644 index cf71b13e..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/commands/list.py +++ /dev/null @@ -1,311 +0,0 @@ -from __future__ import absolute_import - -import json -import logging - -from pip._vendor import six -from pip._vendor.six.moves import zip_longest - -from pip._internal.cli import cmdoptions -from pip._internal.cli.base_command import Command -from pip._internal.cli.cmdoptions import make_search_scope -from pip._internal.exceptions import CommandError -from pip._internal.index import PackageFinder -from pip._internal.models.selection_prefs import SelectionPreferences -from pip._internal.utils.misc import ( - dist_is_editable, get_installed_distributions, -) -from pip._internal.utils.packaging import get_installer - -logger = logging.getLogger(__name__) - - -class ListCommand(Command): - """ - List installed packages, including editables. - - Packages are listed in a case-insensitive sorted order. - """ - name = 'list' - usage = """ - %prog [options]""" - summary = 'List installed packages.' - - def __init__(self, *args, **kw): - super(ListCommand, self).__init__(*args, **kw) - - cmd_opts = self.cmd_opts - - cmd_opts.add_option( - '-o', '--outdated', - action='store_true', - default=False, - help='List outdated packages') - cmd_opts.add_option( - '-u', '--uptodate', - action='store_true', - default=False, - help='List uptodate packages') - cmd_opts.add_option( - '-e', '--editable', - action='store_true', - default=False, - help='List editable projects.') - cmd_opts.add_option( - '-l', '--local', - action='store_true', - default=False, - help=('If in a virtualenv that has global access, do not list ' - 'globally-installed packages.'), - ) - self.cmd_opts.add_option( - '--user', - dest='user', - action='store_true', - default=False, - help='Only output packages installed in user-site.') - cmd_opts.add_option(cmdoptions.list_path()) - cmd_opts.add_option( - '--pre', - action='store_true', - default=False, - help=("Include pre-release and development versions. By default, " - "pip only finds stable versions."), - ) - - cmd_opts.add_option( - '--format', - action='store', - dest='list_format', - default="columns", - choices=('columns', 'freeze', 'json'), - help="Select the output format among: columns (default), freeze, " - "or json", - ) - - cmd_opts.add_option( - '--not-required', - action='store_true', - dest='not_required', - help="List packages that are not dependencies of " - "installed packages.", - ) - - cmd_opts.add_option( - '--exclude-editable', - action='store_false', - dest='include_editable', - help='Exclude editable package from output.', - ) - cmd_opts.add_option( - '--include-editable', - action='store_true', - dest='include_editable', - help='Include editable package from output.', - default=True, - ) - index_opts = cmdoptions.make_option_group( - cmdoptions.index_group, self.parser - ) - - self.parser.insert_option_group(0, index_opts) - self.parser.insert_option_group(0, cmd_opts) - - def _build_package_finder(self, options, session): - """ - Create a package finder appropriate to this list command. - """ - search_scope = make_search_scope(options) - - # Pass allow_yanked=False to ignore yanked versions. - selection_prefs = SelectionPreferences( - allow_yanked=False, - allow_all_prereleases=options.pre, - ) - - return PackageFinder.create( - search_scope=search_scope, - selection_prefs=selection_prefs, - trusted_hosts=options.trusted_hosts, - session=session, - ) - - def run(self, options, args): - if options.outdated and options.uptodate: - raise CommandError( - "Options --outdated and --uptodate cannot be combined.") - - cmdoptions.check_list_path_option(options) - - packages = get_installed_distributions( - local_only=options.local, - user_only=options.user, - editables_only=options.editable, - include_editables=options.include_editable, - paths=options.path, - ) - - # get_not_required must be called firstly in order to find and - # filter out all dependencies correctly. Otherwise a package - # can't be identified as requirement because some parent packages - # could be filtered out before. - if options.not_required: - packages = self.get_not_required(packages, options) - - if options.outdated: - packages = self.get_outdated(packages, options) - elif options.uptodate: - packages = self.get_uptodate(packages, options) - - self.output_package_listing(packages, options) - - def get_outdated(self, packages, options): - return [ - dist for dist in self.iter_packages_latest_infos(packages, options) - if dist.latest_version > dist.parsed_version - ] - - def get_uptodate(self, packages, options): - return [ - dist for dist in self.iter_packages_latest_infos(packages, options) - if dist.latest_version == dist.parsed_version - ] - - def get_not_required(self, packages, options): - dep_keys = set() - for dist in packages: - dep_keys.update(requirement.key for requirement in dist.requires()) - return {pkg for pkg in packages if pkg.key not in dep_keys} - - def iter_packages_latest_infos(self, packages, options): - with self._build_session(options) as session: - finder = self._build_package_finder(options, session) - - for dist in packages: - typ = 'unknown' - all_candidates = finder.find_all_candidates(dist.key) - if not options.pre: - # Remove prereleases - all_candidates = [candidate for candidate in all_candidates - if not candidate.version.is_prerelease] - - evaluator = finder.make_candidate_evaluator( - project_name=dist.project_name, - ) - best_candidate = evaluator.get_best_candidate(all_candidates) - if best_candidate is None: - continue - - remote_version = best_candidate.version - if best_candidate.link.is_wheel: - typ = 'wheel' - else: - typ = 'sdist' - # This is dirty but makes the rest of the code much cleaner - dist.latest_version = remote_version - dist.latest_filetype = typ - yield dist - - def output_package_listing(self, packages, options): - packages = sorted( - packages, - key=lambda dist: dist.project_name.lower(), - ) - if options.list_format == 'columns' and packages: - data, header = format_for_columns(packages, options) - self.output_package_listing_columns(data, header) - elif options.list_format == 'freeze': - for dist in packages: - if options.verbose >= 1: - logger.info("%s==%s (%s)", dist.project_name, - dist.version, dist.location) - else: - logger.info("%s==%s", dist.project_name, dist.version) - elif options.list_format == 'json': - logger.info(format_for_json(packages, options)) - - def output_package_listing_columns(self, data, header): - # insert the header first: we need to know the size of column names - if len(data) > 0: - data.insert(0, header) - - pkg_strings, sizes = tabulate(data) - - # Create and add a separator. - if len(data) > 0: - pkg_strings.insert(1, " ".join(map(lambda x: '-' * x, sizes))) - - for val in pkg_strings: - logger.info(val) - - -def tabulate(vals): - # From pfmoore on GitHub: - # https://github.com/pypa/pip/issues/3651#issuecomment-216932564 - assert len(vals) > 0 - - sizes = [0] * max(len(x) for x in vals) - for row in vals: - sizes = [max(s, len(str(c))) for s, c in zip_longest(sizes, row)] - - result = [] - for row in vals: - display = " ".join([str(c).ljust(s) if c is not None else '' - for s, c in zip_longest(sizes, row)]) - result.append(display) - - return result, sizes - - -def format_for_columns(pkgs, options): - """ - Convert the package data into something usable - by output_package_listing_columns. - """ - running_outdated = options.outdated - # Adjust the header for the `pip list --outdated` case. - if running_outdated: - header = ["Package", "Version", "Latest", "Type"] - else: - header = ["Package", "Version"] - - data = [] - if options.verbose >= 1 or any(dist_is_editable(x) for x in pkgs): - header.append("Location") - if options.verbose >= 1: - header.append("Installer") - - for proj in pkgs: - # if we're working on the 'outdated' list, separate out the - # latest_version and type - row = [proj.project_name, proj.version] - - if running_outdated: - row.append(proj.latest_version) - row.append(proj.latest_filetype) - - if options.verbose >= 1 or dist_is_editable(proj): - row.append(proj.location) - if options.verbose >= 1: - row.append(get_installer(proj)) - - data.append(row) - - return data, header - - -def format_for_json(packages, options): - data = [] - for dist in packages: - info = { - 'name': dist.project_name, - 'version': six.text_type(dist.version), - } - if options.verbose >= 1: - info['location'] = dist.location - info['installer'] = get_installer(dist) - if options.outdated: - info['latest_version'] = six.text_type(dist.latest_version) - info['latest_filetype'] = dist.latest_filetype - data.append(info) - return json.dumps(data) diff --git a/env/lib/python2.7/site-packages/pip/_internal/commands/list.pyc b/env/lib/python2.7/site-packages/pip/_internal/commands/list.pyc deleted file mode 100644 index 451f49fb..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/commands/list.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/commands/search.py b/env/lib/python2.7/site-packages/pip/_internal/commands/search.py deleted file mode 100644 index 58027112..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/commands/search.py +++ /dev/null @@ -1,139 +0,0 @@ -from __future__ import absolute_import - -import logging -import sys -import textwrap -from collections import OrderedDict - -from pip._vendor import pkg_resources -from pip._vendor.packaging.version import parse as parse_version -# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is -# why we ignore the type on this import -from pip._vendor.six.moves import xmlrpc_client # type: ignore - -from pip._internal.cli.base_command import Command -from pip._internal.cli.status_codes import NO_MATCHES_FOUND, SUCCESS -from pip._internal.download import PipXmlrpcTransport -from pip._internal.exceptions import CommandError -from pip._internal.models.index import PyPI -from pip._internal.utils.compat import get_terminal_size -from pip._internal.utils.logging import indent_log - -logger = logging.getLogger(__name__) - - -class SearchCommand(Command): - """Search for PyPI packages whose name or summary contains .""" - name = 'search' - usage = """ - %prog [options] """ - summary = 'Search PyPI for packages.' - ignore_require_venv = True - - def __init__(self, *args, **kw): - super(SearchCommand, self).__init__(*args, **kw) - self.cmd_opts.add_option( - '-i', '--index', - dest='index', - metavar='URL', - default=PyPI.pypi_url, - help='Base URL of Python Package Index (default %default)') - - self.parser.insert_option_group(0, self.cmd_opts) - - def run(self, options, args): - if not args: - raise CommandError('Missing required argument (search query).') - query = args - pypi_hits = self.search(query, options) - hits = transform_hits(pypi_hits) - - terminal_width = None - if sys.stdout.isatty(): - terminal_width = get_terminal_size()[0] - - print_results(hits, terminal_width=terminal_width) - if pypi_hits: - return SUCCESS - return NO_MATCHES_FOUND - - def search(self, query, options): - index_url = options.index - with self._build_session(options) as session: - transport = PipXmlrpcTransport(index_url, session) - pypi = xmlrpc_client.ServerProxy(index_url, transport) - hits = pypi.search({'name': query, 'summary': query}, 'or') - return hits - - -def transform_hits(hits): - """ - The list from pypi is really a list of versions. We want a list of - packages with the list of versions stored inline. This converts the - list from pypi into one we can use. - """ - packages = OrderedDict() - for hit in hits: - name = hit['name'] - summary = hit['summary'] - version = hit['version'] - - if name not in packages.keys(): - packages[name] = { - 'name': name, - 'summary': summary, - 'versions': [version], - } - else: - packages[name]['versions'].append(version) - - # if this is the highest version, replace summary and score - if version == highest_version(packages[name]['versions']): - packages[name]['summary'] = summary - - return list(packages.values()) - - -def print_results(hits, name_column_width=None, terminal_width=None): - if not hits: - return - if name_column_width is None: - name_column_width = max([ - len(hit['name']) + len(highest_version(hit.get('versions', ['-']))) - for hit in hits - ]) + 4 - - installed_packages = [p.project_name for p in pkg_resources.working_set] - for hit in hits: - name = hit['name'] - summary = hit['summary'] or '' - latest = highest_version(hit.get('versions', ['-'])) - if terminal_width is not None: - target_width = terminal_width - name_column_width - 5 - if target_width > 10: - # wrap and indent summary to fit terminal - summary = textwrap.wrap(summary, target_width) - summary = ('\n' + ' ' * (name_column_width + 3)).join(summary) - - line = '%-*s - %s' % (name_column_width, - '%s (%s)' % (name, latest), summary) - try: - logger.info(line) - if name in installed_packages: - dist = pkg_resources.get_distribution(name) - with indent_log(): - if dist.version == latest: - logger.info('INSTALLED: %s (latest)', dist.version) - else: - logger.info('INSTALLED: %s', dist.version) - if parse_version(latest).pre: - logger.info('LATEST: %s (pre-release; install' - ' with "pip install --pre")', latest) - else: - logger.info('LATEST: %s', latest) - except UnicodeEncodeError: - pass - - -def highest_version(versions): - return max(versions, key=parse_version) diff --git a/env/lib/python2.7/site-packages/pip/_internal/commands/search.pyc b/env/lib/python2.7/site-packages/pip/_internal/commands/search.pyc deleted file mode 100644 index 1ac8f429..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/commands/search.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/commands/show.py b/env/lib/python2.7/site-packages/pip/_internal/commands/show.py deleted file mode 100644 index a18a9020..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/commands/show.py +++ /dev/null @@ -1,168 +0,0 @@ -from __future__ import absolute_import - -import logging -import os -from email.parser import FeedParser - -from pip._vendor import pkg_resources -from pip._vendor.packaging.utils import canonicalize_name - -from pip._internal.cli.base_command import Command -from pip._internal.cli.status_codes import ERROR, SUCCESS - -logger = logging.getLogger(__name__) - - -class ShowCommand(Command): - """ - Show information about one or more installed packages. - - The output is in RFC-compliant mail header format. - """ - name = 'show' - usage = """ - %prog [options] ...""" - summary = 'Show information about installed packages.' - ignore_require_venv = True - - def __init__(self, *args, **kw): - super(ShowCommand, self).__init__(*args, **kw) - self.cmd_opts.add_option( - '-f', '--files', - dest='files', - action='store_true', - default=False, - help='Show the full list of installed files for each package.') - - self.parser.insert_option_group(0, self.cmd_opts) - - def run(self, options, args): - if not args: - logger.warning('ERROR: Please provide a package name or names.') - return ERROR - query = args - - results = search_packages_info(query) - if not print_results( - results, list_files=options.files, verbose=options.verbose): - return ERROR - return SUCCESS - - -def search_packages_info(query): - """ - Gather details from installed distributions. Print distribution name, - version, location, and installed files. Installed files requires a - pip generated 'installed-files.txt' in the distributions '.egg-info' - directory. - """ - installed = {} - for p in pkg_resources.working_set: - installed[canonicalize_name(p.project_name)] = p - - query_names = [canonicalize_name(name) for name in query] - - for dist in [installed[pkg] for pkg in query_names if pkg in installed]: - package = { - 'name': dist.project_name, - 'version': dist.version, - 'location': dist.location, - 'requires': [dep.project_name for dep in dist.requires()], - } - file_list = None - metadata = None - if isinstance(dist, pkg_resources.DistInfoDistribution): - # RECORDs should be part of .dist-info metadatas - if dist.has_metadata('RECORD'): - lines = dist.get_metadata_lines('RECORD') - paths = [l.split(',')[0] for l in lines] - paths = [os.path.join(dist.location, p) for p in paths] - file_list = [os.path.relpath(p, dist.location) for p in paths] - - if dist.has_metadata('METADATA'): - metadata = dist.get_metadata('METADATA') - else: - # Otherwise use pip's log for .egg-info's - if dist.has_metadata('installed-files.txt'): - paths = dist.get_metadata_lines('installed-files.txt') - paths = [os.path.join(dist.egg_info, p) for p in paths] - file_list = [os.path.relpath(p, dist.location) for p in paths] - - if dist.has_metadata('PKG-INFO'): - metadata = dist.get_metadata('PKG-INFO') - - if dist.has_metadata('entry_points.txt'): - entry_points = dist.get_metadata_lines('entry_points.txt') - package['entry_points'] = entry_points - - if dist.has_metadata('INSTALLER'): - for line in dist.get_metadata_lines('INSTALLER'): - if line.strip(): - package['installer'] = line.strip() - break - - # @todo: Should pkg_resources.Distribution have a - # `get_pkg_info` method? - feed_parser = FeedParser() - feed_parser.feed(metadata) - pkg_info_dict = feed_parser.close() - for key in ('metadata-version', 'summary', - 'home-page', 'author', 'author-email', 'license'): - package[key] = pkg_info_dict.get(key) - - # It looks like FeedParser cannot deal with repeated headers - classifiers = [] - for line in metadata.splitlines(): - if line.startswith('Classifier: '): - classifiers.append(line[len('Classifier: '):]) - package['classifiers'] = classifiers - - if file_list: - package['files'] = sorted(file_list) - yield package - - -def print_results(distributions, list_files=False, verbose=False): - """ - Print the informations from installed distributions found. - """ - results_printed = False - for i, dist in enumerate(distributions): - results_printed = True - if i > 0: - logger.info("---") - - name = dist.get('name', '') - required_by = [ - pkg.project_name for pkg in pkg_resources.working_set - if name in [required.name for required in pkg.requires()] - ] - - logger.info("Name: %s", name) - logger.info("Version: %s", dist.get('version', '')) - logger.info("Summary: %s", dist.get('summary', '')) - logger.info("Home-page: %s", dist.get('home-page', '')) - logger.info("Author: %s", dist.get('author', '')) - logger.info("Author-email: %s", dist.get('author-email', '')) - logger.info("License: %s", dist.get('license', '')) - logger.info("Location: %s", dist.get('location', '')) - logger.info("Requires: %s", ', '.join(dist.get('requires', []))) - logger.info("Required-by: %s", ', '.join(required_by)) - - if verbose: - logger.info("Metadata-Version: %s", - dist.get('metadata-version', '')) - logger.info("Installer: %s", dist.get('installer', '')) - logger.info("Classifiers:") - for classifier in dist.get('classifiers', []): - logger.info(" %s", classifier) - logger.info("Entry-points:") - for entry in dist.get('entry_points', []): - logger.info(" %s", entry.strip()) - if list_files: - logger.info("Files:") - for line in dist.get('files', []): - logger.info(" %s", line.strip()) - if "files" not in dist: - logger.info("Cannot locate installed-files.txt") - return results_printed diff --git a/env/lib/python2.7/site-packages/pip/_internal/commands/show.pyc b/env/lib/python2.7/site-packages/pip/_internal/commands/show.pyc deleted file mode 100644 index 95d69aaf..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/commands/show.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/commands/uninstall.py b/env/lib/python2.7/site-packages/pip/_internal/commands/uninstall.py deleted file mode 100644 index 0cd6f54b..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/commands/uninstall.py +++ /dev/null @@ -1,78 +0,0 @@ -from __future__ import absolute_import - -from pip._vendor.packaging.utils import canonicalize_name - -from pip._internal.cli.base_command import Command -from pip._internal.exceptions import InstallationError -from pip._internal.req import parse_requirements -from pip._internal.req.constructors import install_req_from_line -from pip._internal.utils.misc import protect_pip_from_modification_on_windows - - -class UninstallCommand(Command): - """ - Uninstall packages. - - pip is able to uninstall most installed packages. Known exceptions are: - - - Pure distutils packages installed with ``python setup.py install``, which - leave behind no metadata to determine what files were installed. - - Script wrappers installed by ``python setup.py develop``. - """ - name = 'uninstall' - usage = """ - %prog [options] ... - %prog [options] -r ...""" - summary = 'Uninstall packages.' - - def __init__(self, *args, **kw): - super(UninstallCommand, self).__init__(*args, **kw) - self.cmd_opts.add_option( - '-r', '--requirement', - dest='requirements', - action='append', - default=[], - metavar='file', - help='Uninstall all the packages listed in the given requirements ' - 'file. This option can be used multiple times.', - ) - self.cmd_opts.add_option( - '-y', '--yes', - dest='yes', - action='store_true', - help="Don't ask for confirmation of uninstall deletions.") - - self.parser.insert_option_group(0, self.cmd_opts) - - def run(self, options, args): - with self._build_session(options) as session: - reqs_to_uninstall = {} - for name in args: - req = install_req_from_line( - name, isolated=options.isolated_mode, - ) - if req.name: - reqs_to_uninstall[canonicalize_name(req.name)] = req - for filename in options.requirements: - for req in parse_requirements( - filename, - options=options, - session=session): - if req.name: - reqs_to_uninstall[canonicalize_name(req.name)] = req - if not reqs_to_uninstall: - raise InstallationError( - 'You must give at least one requirement to %(name)s (see ' - '"pip help %(name)s")' % dict(name=self.name) - ) - - protect_pip_from_modification_on_windows( - modifying_pip="pip" in reqs_to_uninstall - ) - - for req in reqs_to_uninstall.values(): - uninstall_pathset = req.uninstall( - auto_confirm=options.yes, verbose=self.verbosity > 0, - ) - if uninstall_pathset: - uninstall_pathset.commit() diff --git a/env/lib/python2.7/site-packages/pip/_internal/commands/uninstall.pyc b/env/lib/python2.7/site-packages/pip/_internal/commands/uninstall.pyc deleted file mode 100644 index 5153bc43..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/commands/uninstall.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/commands/wheel.py b/env/lib/python2.7/site-packages/pip/_internal/commands/wheel.py deleted file mode 100644 index 97f3b148..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/commands/wheel.py +++ /dev/null @@ -1,181 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import - -import logging -import os - -from pip._internal.cache import WheelCache -from pip._internal.cli import cmdoptions -from pip._internal.cli.base_command import RequirementCommand -from pip._internal.exceptions import CommandError, PreviousBuildDirError -from pip._internal.legacy_resolve import Resolver -from pip._internal.operations.prepare import RequirementPreparer -from pip._internal.req import RequirementSet -from pip._internal.req.req_tracker import RequirementTracker -from pip._internal.utils.temp_dir import TempDirectory -from pip._internal.wheel import WheelBuilder - -logger = logging.getLogger(__name__) - - -class WheelCommand(RequirementCommand): - """ - Build Wheel archives for your requirements and dependencies. - - Wheel is a built-package format, and offers the advantage of not - recompiling your software during every install. For more details, see the - wheel docs: https://wheel.readthedocs.io/en/latest/ - - Requirements: setuptools>=0.8, and wheel. - - 'pip wheel' uses the bdist_wheel setuptools extension from the wheel - package to build individual wheels. - - """ - - name = 'wheel' - usage = """ - %prog [options] ... - %prog [options] -r ... - %prog [options] [-e] ... - %prog [options] [-e] ... - %prog [options] ...""" - - summary = 'Build wheels from your requirements.' - - def __init__(self, *args, **kw): - super(WheelCommand, self).__init__(*args, **kw) - - cmd_opts = self.cmd_opts - - cmd_opts.add_option( - '-w', '--wheel-dir', - dest='wheel_dir', - metavar='dir', - default=os.curdir, - help=("Build wheels into , where the default is the " - "current working directory."), - ) - cmd_opts.add_option(cmdoptions.no_binary()) - cmd_opts.add_option(cmdoptions.only_binary()) - cmd_opts.add_option(cmdoptions.prefer_binary()) - cmd_opts.add_option( - '--build-option', - dest='build_options', - metavar='options', - action='append', - help="Extra arguments to be supplied to 'setup.py bdist_wheel'.", - ) - cmd_opts.add_option(cmdoptions.no_build_isolation()) - cmd_opts.add_option(cmdoptions.use_pep517()) - cmd_opts.add_option(cmdoptions.no_use_pep517()) - cmd_opts.add_option(cmdoptions.constraints()) - cmd_opts.add_option(cmdoptions.editable()) - cmd_opts.add_option(cmdoptions.requirements()) - cmd_opts.add_option(cmdoptions.src()) - cmd_opts.add_option(cmdoptions.ignore_requires_python()) - cmd_opts.add_option(cmdoptions.no_deps()) - cmd_opts.add_option(cmdoptions.build_dir()) - cmd_opts.add_option(cmdoptions.progress_bar()) - - cmd_opts.add_option( - '--global-option', - dest='global_options', - action='append', - metavar='options', - help="Extra global options to be supplied to the setup.py " - "call before the 'bdist_wheel' command.") - - cmd_opts.add_option( - '--pre', - action='store_true', - default=False, - help=("Include pre-release and development versions. By default, " - "pip only finds stable versions."), - ) - - cmd_opts.add_option(cmdoptions.no_clean()) - cmd_opts.add_option(cmdoptions.require_hashes()) - - index_opts = cmdoptions.make_option_group( - cmdoptions.index_group, - self.parser, - ) - - self.parser.insert_option_group(0, index_opts) - self.parser.insert_option_group(0, cmd_opts) - - def run(self, options, args): - cmdoptions.check_install_build_global(options) - - if options.build_dir: - options.build_dir = os.path.abspath(options.build_dir) - - options.src_dir = os.path.abspath(options.src_dir) - - with self._build_session(options) as session: - finder = self._build_package_finder(options, session) - build_delete = (not (options.no_clean or options.build_dir)) - wheel_cache = WheelCache(options.cache_dir, options.format_control) - - with RequirementTracker() as req_tracker, TempDirectory( - options.build_dir, delete=build_delete, kind="wheel" - ) as directory: - - requirement_set = RequirementSet( - require_hashes=options.require_hashes, - ) - - try: - self.populate_requirement_set( - requirement_set, args, options, finder, session, - self.name, wheel_cache - ) - - preparer = RequirementPreparer( - build_dir=directory.path, - src_dir=options.src_dir, - download_dir=None, - wheel_download_dir=options.wheel_dir, - progress_bar=options.progress_bar, - build_isolation=options.build_isolation, - req_tracker=req_tracker, - ) - - resolver = Resolver( - preparer=preparer, - finder=finder, - session=session, - wheel_cache=wheel_cache, - use_user_site=False, - upgrade_strategy="to-satisfy-only", - force_reinstall=False, - ignore_dependencies=options.ignore_dependencies, - ignore_requires_python=options.ignore_requires_python, - ignore_installed=True, - isolated=options.isolated_mode, - use_pep517=options.use_pep517 - ) - resolver.resolve(requirement_set) - - # build wheels - wb = WheelBuilder( - finder, preparer, wheel_cache, - build_options=options.build_options or [], - global_options=options.global_options or [], - no_clean=options.no_clean, - ) - build_failures = wb.build( - requirement_set.requirements.values(), session=session, - ) - if len(build_failures) != 0: - raise CommandError( - "Failed to build one or more wheels" - ) - except PreviousBuildDirError: - options.no_clean = True - raise - finally: - if not options.no_clean: - requirement_set.cleanup_files() - wheel_cache.cleanup() diff --git a/env/lib/python2.7/site-packages/pip/_internal/commands/wheel.pyc b/env/lib/python2.7/site-packages/pip/_internal/commands/wheel.pyc deleted file mode 100644 index 6cca36d7..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/commands/wheel.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/configuration.py b/env/lib/python2.7/site-packages/pip/_internal/configuration.py deleted file mode 100644 index 437e92ee..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/configuration.py +++ /dev/null @@ -1,417 +0,0 @@ -"""Configuration management setup - -Some terminology: -- name - As written in config files. -- value - Value associated with a name -- key - Name combined with it's section (section.name) -- variant - A single word describing where the configuration key-value pair came from -""" - -import locale -import logging -import os -import sys - -from pip._vendor.six.moves import configparser - -from pip._internal.exceptions import ( - ConfigurationError, ConfigurationFileCouldNotBeLoaded, -) -from pip._internal.utils import appdirs -from pip._internal.utils.compat import WINDOWS, expanduser -from pip._internal.utils.misc import ensure_dir, enum -from pip._internal.utils.typing import MYPY_CHECK_RUNNING - -if MYPY_CHECK_RUNNING: - from typing import ( - Any, Dict, Iterable, List, NewType, Optional, Tuple - ) - - RawConfigParser = configparser.RawConfigParser # Shorthand - Kind = NewType("Kind", str) - -logger = logging.getLogger(__name__) - - -# NOTE: Maybe use the optionx attribute to normalize keynames. -def _normalize_name(name): - # type: (str) -> str - """Make a name consistent regardless of source (environment or file) - """ - name = name.lower().replace('_', '-') - if name.startswith('--'): - name = name[2:] # only prefer long opts - return name - - -def _disassemble_key(name): - # type: (str) -> List[str] - if "." not in name: - error_message = ( - "Key does not contain dot separated section and key. " - "Perhaps you wanted to use 'global.{}' instead?" - ).format(name) - raise ConfigurationError(error_message) - return name.split(".", 1) - - -# The kinds of configurations there are. -kinds = enum( - USER="user", # User Specific - GLOBAL="global", # System Wide - SITE="site", # [Virtual] Environment Specific - ENV="env", # from PIP_CONFIG_FILE - ENV_VAR="env-var", # from Environment Variables -) - - -CONFIG_BASENAME = 'pip.ini' if WINDOWS else 'pip.conf' - - -def get_configuration_files(): - global_config_files = [ - os.path.join(path, CONFIG_BASENAME) - for path in appdirs.site_config_dirs('pip') - ] - - site_config_file = os.path.join(sys.prefix, CONFIG_BASENAME) - legacy_config_file = os.path.join( - expanduser('~'), - 'pip' if WINDOWS else '.pip', - CONFIG_BASENAME, - ) - new_config_file = os.path.join( - appdirs.user_config_dir("pip"), CONFIG_BASENAME - ) - return { - kinds.GLOBAL: global_config_files, - kinds.SITE: [site_config_file], - kinds.USER: [legacy_config_file, new_config_file], - } - - -class Configuration(object): - """Handles management of configuration. - - Provides an interface to accessing and managing configuration files. - - This class converts provides an API that takes "section.key-name" style - keys and stores the value associated with it as "key-name" under the - section "section". - - This allows for a clean interface wherein the both the section and the - key-name are preserved in an easy to manage form in the configuration files - and the data stored is also nice. - """ - - def __init__(self, isolated, load_only=None): - # type: (bool, Kind) -> None - super(Configuration, self).__init__() - - _valid_load_only = [kinds.USER, kinds.GLOBAL, kinds.SITE, None] - if load_only not in _valid_load_only: - raise ConfigurationError( - "Got invalid value for load_only - should be one of {}".format( - ", ".join(map(repr, _valid_load_only[:-1])) - ) - ) - self.isolated = isolated # type: bool - self.load_only = load_only # type: Optional[Kind] - - # The order here determines the override order. - self._override_order = [ - kinds.GLOBAL, kinds.USER, kinds.SITE, kinds.ENV, kinds.ENV_VAR - ] - - self._ignore_env_names = ["version", "help"] - - # Because we keep track of where we got the data from - self._parsers = { - variant: [] for variant in self._override_order - } # type: Dict[Kind, List[Tuple[str, RawConfigParser]]] - self._config = { - variant: {} for variant in self._override_order - } # type: Dict[Kind, Dict[str, Any]] - self._modified_parsers = [] # type: List[Tuple[str, RawConfigParser]] - - def load(self): - # type: () -> None - """Loads configuration from configuration files and environment - """ - self._load_config_files() - if not self.isolated: - self._load_environment_vars() - - def get_file_to_edit(self): - # type: () -> Optional[str] - """Returns the file with highest priority in configuration - """ - assert self.load_only is not None, \ - "Need to be specified a file to be editing" - - try: - return self._get_parser_to_modify()[0] - except IndexError: - return None - - def items(self): - # type: () -> Iterable[Tuple[str, Any]] - """Returns key-value pairs like dict.items() representing the loaded - configuration - """ - return self._dictionary.items() - - def get_value(self, key): - # type: (str) -> Any - """Get a value from the configuration. - """ - try: - return self._dictionary[key] - except KeyError: - raise ConfigurationError("No such key - {}".format(key)) - - def set_value(self, key, value): - # type: (str, Any) -> None - """Modify a value in the configuration. - """ - self._ensure_have_load_only() - - fname, parser = self._get_parser_to_modify() - - if parser is not None: - section, name = _disassemble_key(key) - - # Modify the parser and the configuration - if not parser.has_section(section): - parser.add_section(section) - parser.set(section, name, value) - - self._config[self.load_only][key] = value - self._mark_as_modified(fname, parser) - - def unset_value(self, key): - # type: (str) -> None - """Unset a value in the configuration. - """ - self._ensure_have_load_only() - - if key not in self._config[self.load_only]: - raise ConfigurationError("No such key - {}".format(key)) - - fname, parser = self._get_parser_to_modify() - - if parser is not None: - section, name = _disassemble_key(key) - - # Remove the key in the parser - modified_something = False - if parser.has_section(section): - # Returns whether the option was removed or not - modified_something = parser.remove_option(section, name) - - if modified_something: - # name removed from parser, section may now be empty - section_iter = iter(parser.items(section)) - try: - val = next(section_iter) - except StopIteration: - val = None - - if val is None: - parser.remove_section(section) - - self._mark_as_modified(fname, parser) - else: - raise ConfigurationError( - "Fatal Internal error [id=1]. Please report as a bug." - ) - - del self._config[self.load_only][key] - - def save(self): - # type: () -> None - """Save the current in-memory state. - """ - self._ensure_have_load_only() - - for fname, parser in self._modified_parsers: - logger.info("Writing to %s", fname) - - # Ensure directory exists. - ensure_dir(os.path.dirname(fname)) - - with open(fname, "w") as f: - parser.write(f) - - # - # Private routines - # - - def _ensure_have_load_only(self): - # type: () -> None - if self.load_only is None: - raise ConfigurationError("Needed a specific file to be modifying.") - logger.debug("Will be working with %s variant only", self.load_only) - - @property - def _dictionary(self): - # type: () -> Dict[str, Any] - """A dictionary representing the loaded configuration. - """ - # NOTE: Dictionaries are not populated if not loaded. So, conditionals - # are not needed here. - retval = {} - - for variant in self._override_order: - retval.update(self._config[variant]) - - return retval - - def _load_config_files(self): - # type: () -> None - """Loads configuration from configuration files - """ - config_files = dict(self._iter_config_files()) - if config_files[kinds.ENV][0:1] == [os.devnull]: - logger.debug( - "Skipping loading configuration files due to " - "environment's PIP_CONFIG_FILE being os.devnull" - ) - return - - for variant, files in config_files.items(): - for fname in files: - # If there's specific variant set in `load_only`, load only - # that variant, not the others. - if self.load_only is not None and variant != self.load_only: - logger.debug( - "Skipping file '%s' (variant: %s)", fname, variant - ) - continue - - parser = self._load_file(variant, fname) - - # Keeping track of the parsers used - self._parsers[variant].append((fname, parser)) - - def _load_file(self, variant, fname): - # type: (Kind, str) -> RawConfigParser - logger.debug("For variant '%s', will try loading '%s'", variant, fname) - parser = self._construct_parser(fname) - - for section in parser.sections(): - items = parser.items(section) - self._config[variant].update(self._normalized_keys(section, items)) - - return parser - - def _construct_parser(self, fname): - # type: (str) -> RawConfigParser - parser = configparser.RawConfigParser() - # If there is no such file, don't bother reading it but create the - # parser anyway, to hold the data. - # Doing this is useful when modifying and saving files, where we don't - # need to construct a parser. - if os.path.exists(fname): - try: - parser.read(fname) - except UnicodeDecodeError: - # See https://github.com/pypa/pip/issues/4963 - raise ConfigurationFileCouldNotBeLoaded( - reason="contains invalid {} characters".format( - locale.getpreferredencoding(False) - ), - fname=fname, - ) - except configparser.Error as error: - # See https://github.com/pypa/pip/issues/4893 - raise ConfigurationFileCouldNotBeLoaded(error=error) - return parser - - def _load_environment_vars(self): - # type: () -> None - """Loads configuration from environment variables - """ - self._config[kinds.ENV_VAR].update( - self._normalized_keys(":env:", self._get_environ_vars()) - ) - - def _normalized_keys(self, section, items): - # type: (str, Iterable[Tuple[str, Any]]) -> Dict[str, Any] - """Normalizes items to construct a dictionary with normalized keys. - - This routine is where the names become keys and are made the same - regardless of source - configuration files or environment. - """ - normalized = {} - for name, val in items: - key = section + "." + _normalize_name(name) - normalized[key] = val - return normalized - - def _get_environ_vars(self): - # type: () -> Iterable[Tuple[str, str]] - """Returns a generator with all environmental vars with prefix PIP_""" - for key, val in os.environ.items(): - should_be_yielded = ( - key.startswith("PIP_") and - key[4:].lower() not in self._ignore_env_names - ) - if should_be_yielded: - yield key[4:].lower(), val - - # XXX: This is patched in the tests. - def _iter_config_files(self): - # type: () -> Iterable[Tuple[Kind, List[str]]] - """Yields variant and configuration files associated with it. - - This should be treated like items of a dictionary. - """ - # SMELL: Move the conditions out of this function - - # environment variables have the lowest priority - config_file = os.environ.get('PIP_CONFIG_FILE', None) - if config_file is not None: - yield kinds.ENV, [config_file] - else: - yield kinds.ENV, [] - - config_files = get_configuration_files() - - # at the base we have any global configuration - yield kinds.GLOBAL, config_files[kinds.GLOBAL] - - # per-user configuration next - should_load_user_config = not self.isolated and not ( - config_file and os.path.exists(config_file) - ) - if should_load_user_config: - # The legacy config file is overridden by the new config file - yield kinds.USER, config_files[kinds.USER] - - # finally virtualenv configuration first trumping others - yield kinds.SITE, config_files[kinds.SITE] - - def _get_parser_to_modify(self): - # type: () -> Tuple[str, RawConfigParser] - # Determine which parser to modify - parsers = self._parsers[self.load_only] - if not parsers: - # This should not happen if everything works correctly. - raise ConfigurationError( - "Fatal Internal error [id=2]. Please report as a bug." - ) - - # Use the highest priority parser. - return parsers[-1] - - # XXX: This is patched in the tests. - def _mark_as_modified(self, fname, parser): - # type: (str, RawConfigParser) -> None - file_parser_tuple = (fname, parser) - if file_parser_tuple not in self._modified_parsers: - self._modified_parsers.append(file_parser_tuple) diff --git a/env/lib/python2.7/site-packages/pip/_internal/configuration.pyc b/env/lib/python2.7/site-packages/pip/_internal/configuration.pyc deleted file mode 100644 index 9c3836ab..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/configuration.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/distributions/__init__.py b/env/lib/python2.7/site-packages/pip/_internal/distributions/__init__.py deleted file mode 100644 index fdf332a8..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/distributions/__init__.py +++ /dev/null @@ -1,23 +0,0 @@ -from pip._internal.distributions.source import SourceDistribution -from pip._internal.distributions.wheel import WheelDistribution - -from pip._internal.utils.typing import MYPY_CHECK_RUNNING - -if MYPY_CHECK_RUNNING: - from pip._internal.distributions.base import AbstractDistribution - from pip._internal.req.req_install import InstallRequirement - - -def make_distribution_for_install_requirement(install_req): - # type: (InstallRequirement) -> AbstractDistribution - """Returns a Distribution for the given InstallRequirement - """ - # If it's not an editable, is a wheel, it's a WheelDistribution - if install_req.editable: - return SourceDistribution(install_req) - - if install_req.link and install_req.is_wheel: - return WheelDistribution(install_req) - - # Otherwise, a SourceDistribution - return SourceDistribution(install_req) diff --git a/env/lib/python2.7/site-packages/pip/_internal/distributions/__init__.pyc b/env/lib/python2.7/site-packages/pip/_internal/distributions/__init__.pyc deleted file mode 100644 index 8815421b..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/distributions/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/distributions/base.py b/env/lib/python2.7/site-packages/pip/_internal/distributions/base.py deleted file mode 100644 index b9af3f02..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/distributions/base.py +++ /dev/null @@ -1,33 +0,0 @@ -import abc - -from pip._vendor.six import add_metaclass - - -@add_metaclass(abc.ABCMeta) -class AbstractDistribution(object): - """A base class for handling installable artifacts. - - The requirements for anything installable are as follows: - - - we must be able to determine the requirement name - (or we can't correctly handle the non-upgrade case). - - - for packages with setup requirements, we must also be able - to determine their requirements without installing additional - packages (for the same reason as run-time dependencies) - - - we must be able to create a Distribution object exposing the - above metadata. - """ - - def __init__(self, req): - super(AbstractDistribution, self).__init__() - self.req = req - - @abc.abstractmethod - def get_pkg_resources_distribution(self): - raise NotImplementedError() - - @abc.abstractmethod - def prepare_distribution_metadata(self, finder, build_isolation): - raise NotImplementedError() diff --git a/env/lib/python2.7/site-packages/pip/_internal/distributions/base.pyc b/env/lib/python2.7/site-packages/pip/_internal/distributions/base.pyc deleted file mode 100644 index 3ec58ed7..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/distributions/base.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/distributions/installed.py b/env/lib/python2.7/site-packages/pip/_internal/distributions/installed.py deleted file mode 100644 index c4a64e7c..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/distributions/installed.py +++ /dev/null @@ -1,15 +0,0 @@ -from pip._internal.distributions.base import AbstractDistribution - - -class InstalledDistribution(AbstractDistribution): - """Represents an installed package. - - This does not need any preparation as the required information has already - been computed. - """ - - def get_pkg_resources_distribution(self): - return self.req.satisfied_by - - def prepare_distribution_metadata(self, finder, build_isolation): - pass diff --git a/env/lib/python2.7/site-packages/pip/_internal/distributions/installed.pyc b/env/lib/python2.7/site-packages/pip/_internal/distributions/installed.pyc deleted file mode 100644 index ae06b811..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/distributions/installed.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/distributions/source.py b/env/lib/python2.7/site-packages/pip/_internal/distributions/source.py deleted file mode 100644 index e5d9fd4b..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/distributions/source.py +++ /dev/null @@ -1,80 +0,0 @@ -import logging - -from pip._internal.build_env import BuildEnvironment -from pip._internal.distributions.base import AbstractDistribution -from pip._internal.exceptions import InstallationError - -logger = logging.getLogger(__name__) - - -class SourceDistribution(AbstractDistribution): - """Represents a source distribution. - - The preparation step for these needs metadata for the packages to be - generated, either using PEP 517 or using the legacy `setup.py egg_info`. - - NOTE from @pradyunsg (14 June 2019) - I expect SourceDistribution class will need to be split into - `legacy_source` (setup.py based) and `source` (PEP 517 based) when we start - bringing logic for preparation out of InstallRequirement into this class. - """ - - def get_pkg_resources_distribution(self): - return self.req.get_dist() - - def prepare_distribution_metadata(self, finder, build_isolation): - # Prepare for building. We need to: - # 1. Load pyproject.toml (if it exists) - # 2. Set up the build environment - - self.req.load_pyproject_toml() - should_isolate = self.req.use_pep517 and build_isolation - - def _raise_conflicts(conflicting_with, conflicting_reqs): - raise InstallationError( - "Some build dependencies for %s conflict with %s: %s." % ( - self.req, conflicting_with, ', '.join( - '%s is incompatible with %s' % (installed, wanted) - for installed, wanted in sorted(conflicting)))) - - if should_isolate: - # Isolate in a BuildEnvironment and install the build-time - # requirements. - self.req.build_env = BuildEnvironment() - self.req.build_env.install_requirements( - finder, self.req.pyproject_requires, 'overlay', - "Installing build dependencies" - ) - conflicting, missing = self.req.build_env.check_requirements( - self.req.requirements_to_check - ) - if conflicting: - _raise_conflicts("PEP 517/518 supported requirements", - conflicting) - if missing: - logger.warning( - "Missing build requirements in pyproject.toml for %s.", - self.req, - ) - logger.warning( - "The project does not specify a build backend, and " - "pip cannot fall back to setuptools without %s.", - " and ".join(map(repr, sorted(missing))) - ) - # Install any extra build dependencies that the backend requests. - # This must be done in a second pass, as the pyproject.toml - # dependencies must be installed before we can call the backend. - with self.req.build_env: - # We need to have the env active when calling the hook. - self.req.spin_message = "Getting requirements to build wheel" - reqs = self.req.pep517_backend.get_requires_for_build_wheel() - conflicting, missing = self.req.build_env.check_requirements(reqs) - if conflicting: - _raise_conflicts("the backend dependencies", conflicting) - self.req.build_env.install_requirements( - finder, missing, 'normal', - "Installing backend dependencies" - ) - - self.req.prepare_metadata() - self.req.assert_source_matches_version() diff --git a/env/lib/python2.7/site-packages/pip/_internal/distributions/source.pyc b/env/lib/python2.7/site-packages/pip/_internal/distributions/source.pyc deleted file mode 100644 index f2409150..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/distributions/source.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/distributions/wheel.py b/env/lib/python2.7/site-packages/pip/_internal/distributions/wheel.py deleted file mode 100644 index de7be38e..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/distributions/wheel.py +++ /dev/null @@ -1,17 +0,0 @@ -from pip._vendor import pkg_resources - -from pip._internal.distributions.base import AbstractDistribution - - -class WheelDistribution(AbstractDistribution): - """Represents a wheel distribution. - - This does not need any preparation as wheels can be directly unpacked. - """ - - def get_pkg_resources_distribution(self): - return list(pkg_resources.find_distributions( - self.req.source_dir))[0] - - def prepare_distribution_metadata(self, finder, build_isolation): - pass diff --git a/env/lib/python2.7/site-packages/pip/_internal/distributions/wheel.pyc b/env/lib/python2.7/site-packages/pip/_internal/distributions/wheel.pyc deleted file mode 100644 index 514e61c5..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/distributions/wheel.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/download.py b/env/lib/python2.7/site-packages/pip/_internal/download.py deleted file mode 100644 index fc1f4ddd..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/download.py +++ /dev/null @@ -1,1177 +0,0 @@ -from __future__ import absolute_import - -import cgi -import email.utils -import json -import logging -import mimetypes -import os -import platform -import re -import shutil -import sys - -from pip._vendor import requests, urllib3 -from pip._vendor.cachecontrol import CacheControlAdapter -from pip._vendor.cachecontrol.caches import FileCache -from pip._vendor.lockfile import LockError -from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter -from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth -from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response -from pip._vendor.requests.structures import CaseInsensitiveDict -from pip._vendor.requests.utils import get_netrc_auth -# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is -# why we ignore the type on this import -from pip._vendor.six.moves import xmlrpc_client # type: ignore -from pip._vendor.six.moves.urllib import parse as urllib_parse -from pip._vendor.six.moves.urllib import request as urllib_request - -import pip -from pip._internal.exceptions import HashMismatch, InstallationError -from pip._internal.models.index import PyPI -# Import ssl from compat so the initial import occurs in only one place. -from pip._internal.utils.compat import HAS_TLS, ssl -from pip._internal.utils.encoding import auto_decode -from pip._internal.utils.filesystem import check_path_owner -from pip._internal.utils.glibc import libc_ver -from pip._internal.utils.marker_files import write_delete_marker_file -from pip._internal.utils.misc import ( - ARCHIVE_EXTENSIONS, ask, ask_input, ask_password, ask_path_exists, - backup_dir, consume, display_path, format_size, get_installed_version, - path_to_url, remove_auth_from_url, rmtree, split_auth_netloc_from_url, - splitext, unpack_file, -) -from pip._internal.utils.temp_dir import TempDirectory -from pip._internal.utils.typing import MYPY_CHECK_RUNNING -from pip._internal.utils.ui import DownloadProgressProvider -from pip._internal.vcs import vcs - -if MYPY_CHECK_RUNNING: - from typing import ( - Optional, Tuple, Dict, IO, Text, Union - ) - from optparse import Values - from pip._internal.models.link import Link - from pip._internal.utils.hashes import Hashes - from pip._internal.vcs.versioncontrol import AuthInfo, VersionControl - - Credentials = Tuple[str, str, str] - - -__all__ = ['get_file_content', - 'is_url', 'url_to_path', 'path_to_url', - 'is_archive_file', 'unpack_vcs_link', - 'unpack_file_url', 'is_vcs_url', 'is_file_url', - 'unpack_http_url', 'unpack_url', - 'parse_content_disposition', 'sanitize_content_filename'] - - -logger = logging.getLogger(__name__) - - -try: - import keyring # noqa -except ImportError: - keyring = None -except Exception as exc: - logger.warning("Keyring is skipped due to an exception: %s", - str(exc)) - keyring = None - -# These are environment variables present when running under various -# CI systems. For each variable, some CI systems that use the variable -# are indicated. The collection was chosen so that for each of a number -# of popular systems, at least one of the environment variables is used. -# This list is used to provide some indication of and lower bound for -# CI traffic to PyPI. Thus, it is okay if the list is not comprehensive. -# For more background, see: https://github.com/pypa/pip/issues/5499 -CI_ENVIRONMENT_VARIABLES = ( - # Azure Pipelines - 'BUILD_BUILDID', - # Jenkins - 'BUILD_ID', - # AppVeyor, CircleCI, Codeship, Gitlab CI, Shippable, Travis CI - 'CI', - # Explicit environment variable. - 'PIP_IS_CI', -) - - -def looks_like_ci(): - # type: () -> bool - """ - Return whether it looks like pip is running under CI. - """ - # We don't use the method of checking for a tty (e.g. using isatty()) - # because some CI systems mimic a tty (e.g. Travis CI). Thus that - # method doesn't provide definitive information in either direction. - return any(name in os.environ for name in CI_ENVIRONMENT_VARIABLES) - - -def user_agent(): - """ - Return a string representing the user agent. - """ - data = { - "installer": {"name": "pip", "version": pip.__version__}, - "python": platform.python_version(), - "implementation": { - "name": platform.python_implementation(), - }, - } - - if data["implementation"]["name"] == 'CPython': - data["implementation"]["version"] = platform.python_version() - elif data["implementation"]["name"] == 'PyPy': - if sys.pypy_version_info.releaselevel == 'final': - pypy_version_info = sys.pypy_version_info[:3] - else: - pypy_version_info = sys.pypy_version_info - data["implementation"]["version"] = ".".join( - [str(x) for x in pypy_version_info] - ) - elif data["implementation"]["name"] == 'Jython': - # Complete Guess - data["implementation"]["version"] = platform.python_version() - elif data["implementation"]["name"] == 'IronPython': - # Complete Guess - data["implementation"]["version"] = platform.python_version() - - if sys.platform.startswith("linux"): - from pip._vendor import distro - distro_infos = dict(filter( - lambda x: x[1], - zip(["name", "version", "id"], distro.linux_distribution()), - )) - libc = dict(filter( - lambda x: x[1], - zip(["lib", "version"], libc_ver()), - )) - if libc: - distro_infos["libc"] = libc - if distro_infos: - data["distro"] = distro_infos - - if sys.platform.startswith("darwin") and platform.mac_ver()[0]: - data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]} - - if platform.system(): - data.setdefault("system", {})["name"] = platform.system() - - if platform.release(): - data.setdefault("system", {})["release"] = platform.release() - - if platform.machine(): - data["cpu"] = platform.machine() - - if HAS_TLS: - data["openssl_version"] = ssl.OPENSSL_VERSION - - setuptools_version = get_installed_version("setuptools") - if setuptools_version is not None: - data["setuptools_version"] = setuptools_version - - # Use None rather than False so as not to give the impression that - # pip knows it is not being run under CI. Rather, it is a null or - # inconclusive result. Also, we include some value rather than no - # value to make it easier to know that the check has been run. - data["ci"] = True if looks_like_ci() else None - - user_data = os.environ.get("PIP_USER_AGENT_USER_DATA") - if user_data is not None: - data["user_data"] = user_data - - return "{data[installer][name]}/{data[installer][version]} {json}".format( - data=data, - json=json.dumps(data, separators=(",", ":"), sort_keys=True), - ) - - -def _get_keyring_auth(url, username): - """Return the tuple auth for a given url from keyring.""" - if not url or not keyring: - return None - - try: - try: - get_credential = keyring.get_credential - except AttributeError: - pass - else: - logger.debug("Getting credentials from keyring for %s", url) - cred = get_credential(url, username) - if cred is not None: - return cred.username, cred.password - return None - - if username: - logger.debug("Getting password from keyring for %s", url) - password = keyring.get_password(url, username) - if password: - return username, password - - except Exception as exc: - logger.warning("Keyring is skipped due to an exception: %s", - str(exc)) - - -class MultiDomainBasicAuth(AuthBase): - - def __init__(self, prompting=True, index_urls=None): - # type: (bool, Optional[Values]) -> None - self.prompting = prompting - self.index_urls = index_urls - self.passwords = {} # type: Dict[str, AuthInfo] - # When the user is prompted to enter credentials and keyring is - # available, we will offer to save them. If the user accepts, - # this value is set to the credentials they entered. After the - # request authenticates, the caller should call - # ``save_credentials`` to save these. - self._credentials_to_save = None # type: Optional[Credentials] - - def _get_index_url(self, url): - """Return the original index URL matching the requested URL. - - Cached or dynamically generated credentials may work against - the original index URL rather than just the netloc. - - The provided url should have had its username and password - removed already. If the original index url had credentials then - they will be included in the return value. - - Returns None if no matching index was found, or if --no-index - was specified by the user. - """ - if not url or not self.index_urls: - return None - - for u in self.index_urls: - prefix = remove_auth_from_url(u).rstrip("/") + "/" - if url.startswith(prefix): - return u - - def _get_new_credentials(self, original_url, allow_netrc=True, - allow_keyring=True): - """Find and return credentials for the specified URL.""" - # Split the credentials and netloc from the url. - url, netloc, url_user_password = split_auth_netloc_from_url( - original_url) - - # Start with the credentials embedded in the url - username, password = url_user_password - if username is not None and password is not None: - logger.debug("Found credentials in url for %s", netloc) - return url_user_password - - # Find a matching index url for this request - index_url = self._get_index_url(url) - if index_url: - # Split the credentials from the url. - index_info = split_auth_netloc_from_url(index_url) - if index_info: - index_url, _, index_url_user_password = index_info - logger.debug("Found index url %s", index_url) - - # If an index URL was found, try its embedded credentials - if index_url and index_url_user_password[0] is not None: - username, password = index_url_user_password - if username is not None and password is not None: - logger.debug("Found credentials in index url for %s", netloc) - return index_url_user_password - - # Get creds from netrc if we still don't have them - if allow_netrc: - netrc_auth = get_netrc_auth(original_url) - if netrc_auth: - logger.debug("Found credentials in netrc for %s", netloc) - return netrc_auth - - # If we don't have a password and keyring is available, use it. - if allow_keyring: - # The index url is more specific than the netloc, so try it first - kr_auth = (_get_keyring_auth(index_url, username) or - _get_keyring_auth(netloc, username)) - if kr_auth: - logger.debug("Found credentials in keyring for %s", netloc) - return kr_auth - - return username, password - - def _get_url_and_credentials(self, original_url): - """Return the credentials to use for the provided URL. - - If allowed, netrc and keyring may be used to obtain the - correct credentials. - - Returns (url_without_credentials, username, password). Note - that even if the original URL contains credentials, this - function may return a different username and password. - """ - url, netloc, _ = split_auth_netloc_from_url(original_url) - - # Use any stored credentials that we have for this netloc - username, password = self.passwords.get(netloc, (None, None)) - - if username is None and password is None: - # No stored credentials. Acquire new credentials without prompting - # the user. (e.g. from netrc, keyring, or the URL itself) - username, password = self._get_new_credentials(original_url) - - if username is not None or password is not None: - # Convert the username and password if they're None, so that - # this netloc will show up as "cached" in the conditional above. - # Further, HTTPBasicAuth doesn't accept None, so it makes sense to - # cache the value that is going to be used. - username = username or "" - password = password or "" - - # Store any acquired credentials. - self.passwords[netloc] = (username, password) - - assert ( - # Credentials were found - (username is not None and password is not None) or - # Credentials were not found - (username is None and password is None) - ), "Could not load credentials from url: {}".format(original_url) - - return url, username, password - - def __call__(self, req): - # Get credentials for this request - url, username, password = self._get_url_and_credentials(req.url) - - # Set the url of the request to the url without any credentials - req.url = url - - if username is not None and password is not None: - # Send the basic auth with this request - req = HTTPBasicAuth(username, password)(req) - - # Attach a hook to handle 401 responses - req.register_hook("response", self.handle_401) - - return req - - # Factored out to allow for easy patching in tests - def _prompt_for_password(self, netloc): - username = ask_input("User for %s: " % netloc) - if not username: - return None, None - auth = _get_keyring_auth(netloc, username) - if auth: - return auth[0], auth[1], False - password = ask_password("Password: ") - return username, password, True - - # Factored out to allow for easy patching in tests - def _should_save_password_to_keyring(self): - if not keyring: - return False - return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y" - - def handle_401(self, resp, **kwargs): - # We only care about 401 responses, anything else we want to just - # pass through the actual response - if resp.status_code != 401: - return resp - - # We are not able to prompt the user so simply return the response - if not self.prompting: - return resp - - parsed = urllib_parse.urlparse(resp.url) - - # Prompt the user for a new username and password - username, password, save = self._prompt_for_password(parsed.netloc) - - # Store the new username and password to use for future requests - self._credentials_to_save = None - if username is not None and password is not None: - self.passwords[parsed.netloc] = (username, password) - - # Prompt to save the password to keyring - if save and self._should_save_password_to_keyring(): - self._credentials_to_save = (parsed.netloc, username, password) - - # Consume content and release the original connection to allow our new - # request to reuse the same one. - resp.content - resp.raw.release_conn() - - # Add our new username and password to the request - req = HTTPBasicAuth(username or "", password or "")(resp.request) - req.register_hook("response", self.warn_on_401) - - # On successful request, save the credentials that were used to - # keyring. (Note that if the user responded "no" above, this member - # is not set and nothing will be saved.) - if self._credentials_to_save: - req.register_hook("response", self.save_credentials) - - # Send our new request - new_resp = resp.connection.send(req, **kwargs) - new_resp.history.append(resp) - - return new_resp - - def warn_on_401(self, resp, **kwargs): - """Response callback to warn about incorrect credentials.""" - if resp.status_code == 401: - logger.warning('401 Error, Credentials not correct for %s', - resp.request.url) - - def save_credentials(self, resp, **kwargs): - """Response callback to save credentials on success.""" - assert keyring is not None, "should never reach here without keyring" - if not keyring: - return - - creds = self._credentials_to_save - self._credentials_to_save = None - if creds and resp.status_code < 400: - try: - logger.info('Saving credentials to keyring') - keyring.set_password(*creds) - except Exception: - logger.exception('Failed to save credentials') - - -class LocalFSAdapter(BaseAdapter): - - def send(self, request, stream=None, timeout=None, verify=None, cert=None, - proxies=None): - pathname = url_to_path(request.url) - - resp = Response() - resp.status_code = 200 - resp.url = request.url - - try: - stats = os.stat(pathname) - except OSError as exc: - resp.status_code = 404 - resp.raw = exc - else: - modified = email.utils.formatdate(stats.st_mtime, usegmt=True) - content_type = mimetypes.guess_type(pathname)[0] or "text/plain" - resp.headers = CaseInsensitiveDict({ - "Content-Type": content_type, - "Content-Length": stats.st_size, - "Last-Modified": modified, - }) - - resp.raw = open(pathname, "rb") - resp.close = resp.raw.close - - return resp - - def close(self): - pass - - -class SafeFileCache(FileCache): - """ - A file based cache which is safe to use even when the target directory may - not be accessible or writable. - """ - - def __init__(self, *args, **kwargs): - super(SafeFileCache, self).__init__(*args, **kwargs) - - # Check to ensure that the directory containing our cache directory - # is owned by the user current executing pip. If it does not exist - # we will check the parent directory until we find one that does exist. - # If it is not owned by the user executing pip then we will disable - # the cache and log a warning. - if not check_path_owner(self.directory): - logger.warning( - "The directory '%s' or its parent directory is not owned by " - "the current user and the cache has been disabled. Please " - "check the permissions and owner of that directory. If " - "executing pip with sudo, you may want sudo's -H flag.", - self.directory, - ) - - # Set our directory to None to disable the Cache - self.directory = None - - def get(self, *args, **kwargs): - # If we don't have a directory, then the cache should be a no-op. - if self.directory is None: - return - - try: - return super(SafeFileCache, self).get(*args, **kwargs) - except (LockError, OSError, IOError): - # We intentionally silence this error, if we can't access the cache - # then we can just skip caching and process the request as if - # caching wasn't enabled. - pass - - def set(self, *args, **kwargs): - # If we don't have a directory, then the cache should be a no-op. - if self.directory is None: - return - - try: - return super(SafeFileCache, self).set(*args, **kwargs) - except (LockError, OSError, IOError): - # We intentionally silence this error, if we can't access the cache - # then we can just skip caching and process the request as if - # caching wasn't enabled. - pass - - def delete(self, *args, **kwargs): - # If we don't have a directory, then the cache should be a no-op. - if self.directory is None: - return - - try: - return super(SafeFileCache, self).delete(*args, **kwargs) - except (LockError, OSError, IOError): - # We intentionally silence this error, if we can't access the cache - # then we can just skip caching and process the request as if - # caching wasn't enabled. - pass - - -class InsecureHTTPAdapter(HTTPAdapter): - - def cert_verify(self, conn, url, verify, cert): - conn.cert_reqs = 'CERT_NONE' - conn.ca_certs = None - - -class PipSession(requests.Session): - - timeout = None # type: Optional[int] - - def __init__(self, *args, **kwargs): - retries = kwargs.pop("retries", 0) - cache = kwargs.pop("cache", None) - insecure_hosts = kwargs.pop("insecure_hosts", []) - index_urls = kwargs.pop("index_urls", None) - - super(PipSession, self).__init__(*args, **kwargs) - - # Attach our User Agent to the request - self.headers["User-Agent"] = user_agent() - - # Attach our Authentication handler to the session - self.auth = MultiDomainBasicAuth(index_urls=index_urls) - - # Create our urllib3.Retry instance which will allow us to customize - # how we handle retries. - retries = urllib3.Retry( - # Set the total number of retries that a particular request can - # have. - total=retries, - - # A 503 error from PyPI typically means that the Fastly -> Origin - # connection got interrupted in some way. A 503 error in general - # is typically considered a transient error so we'll go ahead and - # retry it. - # A 500 may indicate transient error in Amazon S3 - # A 520 or 527 - may indicate transient error in CloudFlare - status_forcelist=[500, 503, 520, 527], - - # Add a small amount of back off between failed requests in - # order to prevent hammering the service. - backoff_factor=0.25, - ) - - # We want to _only_ cache responses on securely fetched origins. We do - # this because we can't validate the response of an insecurely fetched - # origin, and we don't want someone to be able to poison the cache and - # require manual eviction from the cache to fix it. - if cache: - secure_adapter = CacheControlAdapter( - cache=SafeFileCache(cache, use_dir_lock=True), - max_retries=retries, - ) - else: - secure_adapter = HTTPAdapter(max_retries=retries) - - # Our Insecure HTTPAdapter disables HTTPS validation. It does not - # support caching (see above) so we'll use it for all http:// URLs as - # well as any https:// host that we've marked as ignoring TLS errors - # for. - insecure_adapter = InsecureHTTPAdapter(max_retries=retries) - # Save this for later use in add_insecure_host(). - self._insecure_adapter = insecure_adapter - - self.mount("https://", secure_adapter) - self.mount("http://", insecure_adapter) - - # Enable file:// urls - self.mount("file://", LocalFSAdapter()) - - # We want to use a non-validating adapter for any requests which are - # deemed insecure. - for host in insecure_hosts: - self.add_insecure_host(host) - - def add_insecure_host(self, host): - # type: (str) -> None - self.mount('https://{}/'.format(host), self._insecure_adapter) - - def request(self, method, url, *args, **kwargs): - # Allow setting a default timeout on a session - kwargs.setdefault("timeout", self.timeout) - - # Dispatch the actual request - return super(PipSession, self).request(method, url, *args, **kwargs) - - -def get_file_content(url, comes_from=None, session=None): - # type: (str, Optional[str], Optional[PipSession]) -> Tuple[str, Text] - """Gets the content of a file; it may be a filename, file: URL, or - http: URL. Returns (location, content). Content is unicode. - - :param url: File path or url. - :param comes_from: Origin description of requirements. - :param session: Instance of pip.download.PipSession. - """ - if session is None: - raise TypeError( - "get_file_content() missing 1 required keyword argument: 'session'" - ) - - match = _scheme_re.search(url) - if match: - scheme = match.group(1).lower() - if (scheme == 'file' and comes_from and - comes_from.startswith('http')): - raise InstallationError( - 'Requirements file %s references URL %s, which is local' - % (comes_from, url)) - if scheme == 'file': - path = url.split(':', 1)[1] - path = path.replace('\\', '/') - match = _url_slash_drive_re.match(path) - if match: - path = match.group(1) + ':' + path.split('|', 1)[1] - path = urllib_parse.unquote(path) - if path.startswith('/'): - path = '/' + path.lstrip('/') - url = path - else: - # FIXME: catch some errors - resp = session.get(url) - resp.raise_for_status() - return resp.url, resp.text - try: - with open(url, 'rb') as f: - content = auto_decode(f.read()) - except IOError as exc: - raise InstallationError( - 'Could not open requirements file: %s' % str(exc) - ) - return url, content - - -_scheme_re = re.compile(r'^(http|https|file):', re.I) -_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I) - - -def is_url(name): - # type: (Union[str, Text]) -> bool - """Returns true if the name looks like a URL""" - if ':' not in name: - return False - scheme = name.split(':', 1)[0].lower() - return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes - - -def url_to_path(url): - # type: (str) -> str - """ - Convert a file: URL to a path. - """ - assert url.startswith('file:'), ( - "You can only turn file: urls into filenames (not %r)" % url) - - _, netloc, path, _, _ = urllib_parse.urlsplit(url) - - if not netloc or netloc == 'localhost': - # According to RFC 8089, same as empty authority. - netloc = '' - elif sys.platform == 'win32': - # If we have a UNC path, prepend UNC share notation. - netloc = '\\\\' + netloc - else: - raise ValueError( - 'non-local file URIs are not supported on this platform: %r' - % url - ) - - path = urllib_request.url2pathname(netloc + path) - return path - - -def is_archive_file(name): - # type: (str) -> bool - """Return True if `name` is a considered as an archive file.""" - ext = splitext(name)[1].lower() - if ext in ARCHIVE_EXTENSIONS: - return True - return False - - -def unpack_vcs_link(link, location): - vcs_backend = _get_used_vcs_backend(link) - vcs_backend.unpack(location, url=link.url) - - -def _get_used_vcs_backend(link): - # type: (Link) -> Optional[VersionControl] - """ - Return a VersionControl object or None. - """ - for vcs_backend in vcs.backends: - if link.scheme in vcs_backend.schemes: - return vcs_backend - return None - - -def is_vcs_url(link): - # type: (Link) -> bool - return bool(_get_used_vcs_backend(link)) - - -def is_file_url(link): - # type: (Link) -> bool - return link.url.lower().startswith('file:') - - -def is_dir_url(link): - # type: (Link) -> bool - """Return whether a file:// Link points to a directory. - - ``link`` must not have any other scheme but file://. Call is_file_url() - first. - - """ - link_path = url_to_path(link.url_without_fragment) - return os.path.isdir(link_path) - - -def _progress_indicator(iterable, *args, **kwargs): - return iterable - - -def _download_url( - resp, # type: Response - link, # type: Link - content_file, # type: IO - hashes, # type: Optional[Hashes] - progress_bar # type: str -): - # type: (...) -> None - try: - total_length = int(resp.headers['content-length']) - except (ValueError, KeyError, TypeError): - total_length = 0 - - cached_resp = getattr(resp, "from_cache", False) - if logger.getEffectiveLevel() > logging.INFO: - show_progress = False - elif cached_resp: - show_progress = False - elif total_length > (40 * 1000): - show_progress = True - elif not total_length: - show_progress = True - else: - show_progress = False - - show_url = link.show_url - - def resp_read(chunk_size): - try: - # Special case for urllib3. - for chunk in resp.raw.stream( - chunk_size, - # We use decode_content=False here because we don't - # want urllib3 to mess with the raw bytes we get - # from the server. If we decompress inside of - # urllib3 then we cannot verify the checksum - # because the checksum will be of the compressed - # file. This breakage will only occur if the - # server adds a Content-Encoding header, which - # depends on how the server was configured: - # - Some servers will notice that the file isn't a - # compressible file and will leave the file alone - # and with an empty Content-Encoding - # - Some servers will notice that the file is - # already compressed and will leave the file - # alone and will add a Content-Encoding: gzip - # header - # - Some servers won't notice anything at all and - # will take a file that's already been compressed - # and compress it again and set the - # Content-Encoding: gzip header - # - # By setting this not to decode automatically we - # hope to eliminate problems with the second case. - decode_content=False): - yield chunk - except AttributeError: - # Standard file-like object. - while True: - chunk = resp.raw.read(chunk_size) - if not chunk: - break - yield chunk - - def written_chunks(chunks): - for chunk in chunks: - content_file.write(chunk) - yield chunk - - progress_indicator = _progress_indicator - - if link.netloc == PyPI.netloc: - url = show_url - else: - url = link.url_without_fragment - - if show_progress: # We don't show progress on cached responses - progress_indicator = DownloadProgressProvider(progress_bar, - max=total_length) - if total_length: - logger.info("Downloading %s (%s)", url, format_size(total_length)) - else: - logger.info("Downloading %s", url) - elif cached_resp: - logger.info("Using cached %s", url) - else: - logger.info("Downloading %s", url) - - logger.debug('Downloading from URL %s', link) - - downloaded_chunks = written_chunks( - progress_indicator( - resp_read(CONTENT_CHUNK_SIZE), - CONTENT_CHUNK_SIZE - ) - ) - if hashes: - hashes.check_against_chunks(downloaded_chunks) - else: - consume(downloaded_chunks) - - -def _copy_file(filename, location, link): - copy = True - download_location = os.path.join(location, link.filename) - if os.path.exists(download_location): - response = ask_path_exists( - 'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)abort' % - display_path(download_location), ('i', 'w', 'b', 'a')) - if response == 'i': - copy = False - elif response == 'w': - logger.warning('Deleting %s', display_path(download_location)) - os.remove(download_location) - elif response == 'b': - dest_file = backup_dir(download_location) - logger.warning( - 'Backing up %s to %s', - display_path(download_location), - display_path(dest_file), - ) - shutil.move(download_location, dest_file) - elif response == 'a': - sys.exit(-1) - if copy: - shutil.copy(filename, download_location) - logger.info('Saved %s', display_path(download_location)) - - -def unpack_http_url( - link, # type: Link - location, # type: str - download_dir=None, # type: Optional[str] - session=None, # type: Optional[PipSession] - hashes=None, # type: Optional[Hashes] - progress_bar="on" # type: str -): - # type: (...) -> None - if session is None: - raise TypeError( - "unpack_http_url() missing 1 required keyword argument: 'session'" - ) - - with TempDirectory(kind="unpack") as temp_dir: - # If a download dir is specified, is the file already downloaded there? - already_downloaded_path = None - if download_dir: - already_downloaded_path = _check_download_dir(link, - download_dir, - hashes) - - if already_downloaded_path: - from_path = already_downloaded_path - content_type = mimetypes.guess_type(from_path)[0] - else: - # let's download to a tmp dir - from_path, content_type = _download_http_url(link, - session, - temp_dir.path, - hashes, - progress_bar) - - # unpack the archive to the build dir location. even when only - # downloading archives, they have to be unpacked to parse dependencies - unpack_file(from_path, location, content_type, link) - - # a download dir is specified; let's copy the archive there - if download_dir and not already_downloaded_path: - _copy_file(from_path, download_dir, link) - - if not already_downloaded_path: - os.unlink(from_path) - - -def unpack_file_url( - link, # type: Link - location, # type: str - download_dir=None, # type: Optional[str] - hashes=None # type: Optional[Hashes] -): - # type: (...) -> None - """Unpack link into location. - - If download_dir is provided and link points to a file, make a copy - of the link file inside download_dir. - """ - link_path = url_to_path(link.url_without_fragment) - - # If it's a url to a local directory - if is_dir_url(link): - if os.path.isdir(location): - rmtree(location) - shutil.copytree(link_path, location, symlinks=True) - if download_dir: - logger.info('Link is a directory, ignoring download_dir') - return - - # If --require-hashes is off, `hashes` is either empty, the - # link's embedded hash, or MissingHashes; it is required to - # match. If --require-hashes is on, we are satisfied by any - # hash in `hashes` matching: a URL-based or an option-based - # one; no internet-sourced hash will be in `hashes`. - if hashes: - hashes.check_against_path(link_path) - - # If a download dir is specified, is the file already there and valid? - already_downloaded_path = None - if download_dir: - already_downloaded_path = _check_download_dir(link, - download_dir, - hashes) - - if already_downloaded_path: - from_path = already_downloaded_path - else: - from_path = link_path - - content_type = mimetypes.guess_type(from_path)[0] - - # unpack the archive to the build dir location. even when only downloading - # archives, they have to be unpacked to parse dependencies - unpack_file(from_path, location, content_type, link) - - # a download dir is specified and not already downloaded - if download_dir and not already_downloaded_path: - _copy_file(from_path, download_dir, link) - - -class PipXmlrpcTransport(xmlrpc_client.Transport): - """Provide a `xmlrpclib.Transport` implementation via a `PipSession` - object. - """ - - def __init__(self, index_url, session, use_datetime=False): - xmlrpc_client.Transport.__init__(self, use_datetime) - index_parts = urllib_parse.urlparse(index_url) - self._scheme = index_parts.scheme - self._session = session - - def request(self, host, handler, request_body, verbose=False): - parts = (self._scheme, host, handler, None, None, None) - url = urllib_parse.urlunparse(parts) - try: - headers = {'Content-Type': 'text/xml'} - response = self._session.post(url, data=request_body, - headers=headers, stream=True) - response.raise_for_status() - self.verbose = verbose - return self.parse_response(response.raw) - except requests.HTTPError as exc: - logger.critical( - "HTTP error %s while getting %s", - exc.response.status_code, url, - ) - raise - - -def unpack_url( - link, # type: Link - location, # type: str - download_dir=None, # type: Optional[str] - only_download=False, # type: bool - session=None, # type: Optional[PipSession] - hashes=None, # type: Optional[Hashes] - progress_bar="on" # type: str -): - # type: (...) -> None - """Unpack link. - If link is a VCS link: - if only_download, export into download_dir and ignore location - else unpack into location - for other types of link: - - unpack into location - - if download_dir, copy the file into download_dir - - if only_download, mark location for deletion - - :param hashes: A Hashes object, one of whose embedded hashes must match, - or HashMismatch will be raised. If the Hashes is empty, no matches are - required, and unhashable types of requirements (like VCS ones, which - would ordinarily raise HashUnsupported) are allowed. - """ - # non-editable vcs urls - if is_vcs_url(link): - unpack_vcs_link(link, location) - - # file urls - elif is_file_url(link): - unpack_file_url(link, location, download_dir, hashes=hashes) - - # http urls - else: - if session is None: - session = PipSession() - - unpack_http_url( - link, - location, - download_dir, - session, - hashes=hashes, - progress_bar=progress_bar - ) - if only_download: - write_delete_marker_file(location) - - -def sanitize_content_filename(filename): - # type: (str) -> str - """ - Sanitize the "filename" value from a Content-Disposition header. - """ - return os.path.basename(filename) - - -def parse_content_disposition(content_disposition, default_filename): - # type: (str, str) -> str - """ - Parse the "filename" value from a Content-Disposition header, and - return the default filename if the result is empty. - """ - _type, params = cgi.parse_header(content_disposition) - filename = params.get('filename') - if filename: - # We need to sanitize the filename to prevent directory traversal - # in case the filename contains ".." path parts. - filename = sanitize_content_filename(filename) - return filename or default_filename - - -def _download_http_url( - link, # type: Link - session, # type: PipSession - temp_dir, # type: str - hashes, # type: Optional[Hashes] - progress_bar # type: str -): - # type: (...) -> Tuple[str, str] - """Download link url into temp_dir using provided session""" - target_url = link.url.split('#', 1)[0] - try: - resp = session.get( - target_url, - # We use Accept-Encoding: identity here because requests - # defaults to accepting compressed responses. This breaks in - # a variety of ways depending on how the server is configured. - # - Some servers will notice that the file isn't a compressible - # file and will leave the file alone and with an empty - # Content-Encoding - # - Some servers will notice that the file is already - # compressed and will leave the file alone and will add a - # Content-Encoding: gzip header - # - Some servers won't notice anything at all and will take - # a file that's already been compressed and compress it again - # and set the Content-Encoding: gzip header - # By setting this to request only the identity encoding We're - # hoping to eliminate the third case. Hopefully there does not - # exist a server which when given a file will notice it is - # already compressed and that you're not asking for a - # compressed file and will then decompress it before sending - # because if that's the case I don't think it'll ever be - # possible to make this work. - headers={"Accept-Encoding": "identity"}, - stream=True, - ) - resp.raise_for_status() - except requests.HTTPError as exc: - logger.critical( - "HTTP error %s while getting %s", exc.response.status_code, link, - ) - raise - - content_type = resp.headers.get('content-type', '') - filename = link.filename # fallback - # Have a look at the Content-Disposition header for a better guess - content_disposition = resp.headers.get('content-disposition') - if content_disposition: - filename = parse_content_disposition(content_disposition, filename) - ext = splitext(filename)[1] # type: Optional[str] - if not ext: - ext = mimetypes.guess_extension(content_type) - if ext: - filename += ext - if not ext and link.url != resp.url: - ext = os.path.splitext(resp.url)[1] - if ext: - filename += ext - file_path = os.path.join(temp_dir, filename) - with open(file_path, 'wb') as content_file: - _download_url(resp, link, content_file, hashes, progress_bar) - return file_path, content_type - - -def _check_download_dir(link, download_dir, hashes): - # type: (Link, str, Optional[Hashes]) -> Optional[str] - """ Check download_dir for previously downloaded file with correct hash - If a correct file is found return its path else None - """ - download_path = os.path.join(download_dir, link.filename) - if os.path.exists(download_path): - # If already downloaded, does its hash match? - logger.info('File was already downloaded %s', download_path) - if hashes: - try: - hashes.check_against_path(download_path) - except HashMismatch: - logger.warning( - 'Previously-downloaded file %s has bad hash. ' - 'Re-downloading.', - download_path - ) - os.unlink(download_path) - return None - return download_path - return None diff --git a/env/lib/python2.7/site-packages/pip/_internal/download.pyc b/env/lib/python2.7/site-packages/pip/_internal/download.pyc deleted file mode 100644 index e5df1d6a..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/download.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/exceptions.py b/env/lib/python2.7/site-packages/pip/_internal/exceptions.py deleted file mode 100644 index 096adcd6..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/exceptions.py +++ /dev/null @@ -1,305 +0,0 @@ -"""Exceptions used throughout package""" -from __future__ import absolute_import - -from itertools import chain, groupby, repeat - -from pip._vendor.six import iteritems - -from pip._internal.utils.typing import MYPY_CHECK_RUNNING - -if MYPY_CHECK_RUNNING: - from typing import Optional - from pip._vendor.pkg_resources import Distribution - from pip._internal.req.req_install import InstallRequirement - - -class PipError(Exception): - """Base pip exception""" - - -class ConfigurationError(PipError): - """General exception in configuration""" - - -class InstallationError(PipError): - """General exception during installation""" - - -class UninstallationError(PipError): - """General exception during uninstallation""" - - -class NoneMetadataError(PipError): - """ - Raised when accessing "METADATA" or "PKG-INFO" metadata for a - pip._vendor.pkg_resources.Distribution object and - `dist.has_metadata('METADATA')` returns True but - `dist.get_metadata('METADATA')` returns None (and similarly for - "PKG-INFO"). - """ - - def __init__(self, dist, metadata_name): - # type: (Distribution, str) -> None - """ - :param dist: A Distribution object. - :param metadata_name: The name of the metadata being accessed - (can be "METADATA" or "PKG-INFO"). - """ - self.dist = dist - self.metadata_name = metadata_name - - def __str__(self): - # type: () -> str - # Use `dist` in the error message because its stringification - # includes more information, like the version and location. - return ( - 'None {} metadata found for distribution: {}'.format( - self.metadata_name, self.dist, - ) - ) - - -class DistributionNotFound(InstallationError): - """Raised when a distribution cannot be found to satisfy a requirement""" - - -class RequirementsFileParseError(InstallationError): - """Raised when a general error occurs parsing a requirements file line.""" - - -class BestVersionAlreadyInstalled(PipError): - """Raised when the most up-to-date version of a package is already - installed.""" - - -class BadCommand(PipError): - """Raised when virtualenv or a command is not found""" - - -class CommandError(PipError): - """Raised when there is an error in command-line arguments""" - - -class PreviousBuildDirError(PipError): - """Raised when there's a previous conflicting build directory""" - - -class InvalidWheelFilename(InstallationError): - """Invalid wheel filename.""" - - -class UnsupportedWheel(InstallationError): - """Unsupported wheel.""" - - -class HashErrors(InstallationError): - """Multiple HashError instances rolled into one for reporting""" - - def __init__(self): - self.errors = [] - - def append(self, error): - self.errors.append(error) - - def __str__(self): - lines = [] - self.errors.sort(key=lambda e: e.order) - for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__): - lines.append(cls.head) - lines.extend(e.body() for e in errors_of_cls) - if lines: - return '\n'.join(lines) - - def __nonzero__(self): - return bool(self.errors) - - def __bool__(self): - return self.__nonzero__() - - -class HashError(InstallationError): - """ - A failure to verify a package against known-good hashes - - :cvar order: An int sorting hash exception classes by difficulty of - recovery (lower being harder), so the user doesn't bother fretting - about unpinned packages when he has deeper issues, like VCS - dependencies, to deal with. Also keeps error reports in a - deterministic order. - :cvar head: A section heading for display above potentially many - exceptions of this kind - :ivar req: The InstallRequirement that triggered this error. This is - pasted on after the exception is instantiated, because it's not - typically available earlier. - - """ - req = None # type: Optional[InstallRequirement] - head = '' - - def body(self): - """Return a summary of me for display under the heading. - - This default implementation simply prints a description of the - triggering requirement. - - :param req: The InstallRequirement that provoked this error, with - populate_link() having already been called - - """ - return ' %s' % self._requirement_name() - - def __str__(self): - return '%s\n%s' % (self.head, self.body()) - - def _requirement_name(self): - """Return a description of the requirement that triggered me. - - This default implementation returns long description of the req, with - line numbers - - """ - return str(self.req) if self.req else 'unknown package' - - -class VcsHashUnsupported(HashError): - """A hash was provided for a version-control-system-based requirement, but - we don't have a method for hashing those.""" - - order = 0 - head = ("Can't verify hashes for these requirements because we don't " - "have a way to hash version control repositories:") - - -class DirectoryUrlHashUnsupported(HashError): - """A hash was provided for a version-control-system-based requirement, but - we don't have a method for hashing those.""" - - order = 1 - head = ("Can't verify hashes for these file:// requirements because they " - "point to directories:") - - -class HashMissing(HashError): - """A hash was needed for a requirement but is absent.""" - - order = 2 - head = ('Hashes are required in --require-hashes mode, but they are ' - 'missing from some requirements. Here is a list of those ' - 'requirements along with the hashes their downloaded archives ' - 'actually had. Add lines like these to your requirements files to ' - 'prevent tampering. (If you did not enable --require-hashes ' - 'manually, note that it turns on automatically when any package ' - 'has a hash.)') - - def __init__(self, gotten_hash): - """ - :param gotten_hash: The hash of the (possibly malicious) archive we - just downloaded - """ - self.gotten_hash = gotten_hash - - def body(self): - # Dodge circular import. - from pip._internal.utils.hashes import FAVORITE_HASH - - package = None - if self.req: - # In the case of URL-based requirements, display the original URL - # seen in the requirements file rather than the package name, - # so the output can be directly copied into the requirements file. - package = (self.req.original_link if self.req.original_link - # In case someone feeds something downright stupid - # to InstallRequirement's constructor. - else getattr(self.req, 'req', None)) - return ' %s --hash=%s:%s' % (package or 'unknown package', - FAVORITE_HASH, - self.gotten_hash) - - -class HashUnpinned(HashError): - """A requirement had a hash specified but was not pinned to a specific - version.""" - - order = 3 - head = ('In --require-hashes mode, all requirements must have their ' - 'versions pinned with ==. These do not:') - - -class HashMismatch(HashError): - """ - Distribution file hash values don't match. - - :ivar package_name: The name of the package that triggered the hash - mismatch. Feel free to write to this after the exception is raise to - improve its error message. - - """ - order = 4 - head = ('THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS ' - 'FILE. If you have updated the package versions, please update ' - 'the hashes. Otherwise, examine the package contents carefully; ' - 'someone may have tampered with them.') - - def __init__(self, allowed, gots): - """ - :param allowed: A dict of algorithm names pointing to lists of allowed - hex digests - :param gots: A dict of algorithm names pointing to hashes we - actually got from the files under suspicion - """ - self.allowed = allowed - self.gots = gots - - def body(self): - return ' %s:\n%s' % (self._requirement_name(), - self._hash_comparison()) - - def _hash_comparison(self): - """ - Return a comparison of actual and expected hash values. - - Example:: - - Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde - or 123451234512345123451234512345123451234512345 - Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef - - """ - def hash_then_or(hash_name): - # For now, all the decent hashes have 6-char names, so we can get - # away with hard-coding space literals. - return chain([hash_name], repeat(' or')) - - lines = [] - for hash_name, expecteds in iteritems(self.allowed): - prefix = hash_then_or(hash_name) - lines.extend((' Expected %s %s' % (next(prefix), e)) - for e in expecteds) - lines.append(' Got %s\n' % - self.gots[hash_name].hexdigest()) - prefix = ' or' - return '\n'.join(lines) - - -class UnsupportedPythonVersion(InstallationError): - """Unsupported python version according to Requires-Python package - metadata.""" - - -class ConfigurationFileCouldNotBeLoaded(ConfigurationError): - """When there are errors while loading a configuration file - """ - - def __init__(self, reason="could not be loaded", fname=None, error=None): - super(ConfigurationFileCouldNotBeLoaded, self).__init__(error) - self.reason = reason - self.fname = fname - self.error = error - - def __str__(self): - if self.fname is not None: - message_part = " in {}.".format(self.fname) - else: - assert self.error is not None - message_part = ".\n{}\n".format(self.error.message) - return "Configuration file {}{}".format(self.reason, message_part) diff --git a/env/lib/python2.7/site-packages/pip/_internal/exceptions.pyc b/env/lib/python2.7/site-packages/pip/_internal/exceptions.pyc deleted file mode 100644 index 66abd423..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/exceptions.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/index.py b/env/lib/python2.7/site-packages/pip/_internal/index.py deleted file mode 100644 index a1aaad59..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/index.py +++ /dev/null @@ -1,1508 +0,0 @@ -"""Routines related to PyPI, indexes""" -from __future__ import absolute_import - -import cgi -import itertools -import logging -import mimetypes -import os -import re - -from pip._vendor import html5lib, requests, six -from pip._vendor.distlib.compat import unescape -from pip._vendor.packaging import specifiers -from pip._vendor.packaging.utils import canonicalize_name -from pip._vendor.packaging.version import parse as parse_version -from pip._vendor.requests.exceptions import HTTPError, RetryError, SSLError -from pip._vendor.six.moves.urllib import parse as urllib_parse -from pip._vendor.six.moves.urllib import request as urllib_request - -from pip._internal.download import is_url, url_to_path -from pip._internal.exceptions import ( - BestVersionAlreadyInstalled, DistributionNotFound, InvalidWheelFilename, - UnsupportedWheel, -) -from pip._internal.models.candidate import InstallationCandidate -from pip._internal.models.format_control import FormatControl -from pip._internal.models.link import Link -from pip._internal.models.selection_prefs import SelectionPreferences -from pip._internal.models.target_python import TargetPython -from pip._internal.utils.compat import ipaddress -from pip._internal.utils.logging import indent_log -from pip._internal.utils.misc import ( - ARCHIVE_EXTENSIONS, SUPPORTED_EXTENSIONS, WHEEL_EXTENSION, path_to_url, - redact_password_from_url, -) -from pip._internal.utils.packaging import check_requires_python -from pip._internal.utils.typing import MYPY_CHECK_RUNNING -from pip._internal.wheel import Wheel - -if MYPY_CHECK_RUNNING: - from logging import Logger - from typing import ( - Any, Callable, FrozenSet, Iterable, Iterator, List, MutableMapping, - Optional, Sequence, Set, Text, Tuple, Union, - ) - import xml.etree.ElementTree - from pip._vendor.packaging.version import _BaseVersion - from pip._vendor.requests import Response - from pip._internal.models.search_scope import SearchScope - from pip._internal.req import InstallRequirement - from pip._internal.download import PipSession - from pip._internal.pep425tags import Pep425Tag - from pip._internal.utils.hashes import Hashes - - BuildTag = Tuple[Any, ...] # either empty tuple or Tuple[int, str] - CandidateSortingKey = ( - Tuple[int, int, int, _BaseVersion, BuildTag, Optional[int]] - ) - HTMLElement = xml.etree.ElementTree.Element - SecureOrigin = Tuple[str, str, Optional[str]] - - -__all__ = ['FormatControl', 'FoundCandidates', 'PackageFinder'] - - -SECURE_ORIGINS = [ - # protocol, hostname, port - # Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC) - ("https", "*", "*"), - ("*", "localhost", "*"), - ("*", "127.0.0.0/8", "*"), - ("*", "::1/128", "*"), - ("file", "*", None), - # ssh is always secure. - ("ssh", "*", "*"), -] # type: List[SecureOrigin] - - -logger = logging.getLogger(__name__) - - -def _match_vcs_scheme(url): - # type: (str) -> Optional[str] - """Look for VCS schemes in the URL. - - Returns the matched VCS scheme, or None if there's no match. - """ - from pip._internal.vcs import vcs - for scheme in vcs.schemes: - if url.lower().startswith(scheme) and url[len(scheme)] in '+:': - return scheme - return None - - -def _is_url_like_archive(url): - # type: (str) -> bool - """Return whether the URL looks like an archive. - """ - filename = Link(url).filename - for bad_ext in ARCHIVE_EXTENSIONS: - if filename.endswith(bad_ext): - return True - return False - - -class _NotHTML(Exception): - def __init__(self, content_type, request_desc): - # type: (str, str) -> None - super(_NotHTML, self).__init__(content_type, request_desc) - self.content_type = content_type - self.request_desc = request_desc - - -def _ensure_html_header(response): - # type: (Response) -> None - """Check the Content-Type header to ensure the response contains HTML. - - Raises `_NotHTML` if the content type is not text/html. - """ - content_type = response.headers.get("Content-Type", "") - if not content_type.lower().startswith("text/html"): - raise _NotHTML(content_type, response.request.method) - - -class _NotHTTP(Exception): - pass - - -def _ensure_html_response(url, session): - # type: (str, PipSession) -> None - """Send a HEAD request to the URL, and ensure the response contains HTML. - - Raises `_NotHTTP` if the URL is not available for a HEAD request, or - `_NotHTML` if the content type is not text/html. - """ - scheme, netloc, path, query, fragment = urllib_parse.urlsplit(url) - if scheme not in {'http', 'https'}: - raise _NotHTTP() - - resp = session.head(url, allow_redirects=True) - resp.raise_for_status() - - _ensure_html_header(resp) - - -def _get_html_response(url, session): - # type: (str, PipSession) -> Response - """Access an HTML page with GET, and return the response. - - This consists of three parts: - - 1. If the URL looks suspiciously like an archive, send a HEAD first to - check the Content-Type is HTML, to avoid downloading a large file. - Raise `_NotHTTP` if the content type cannot be determined, or - `_NotHTML` if it is not HTML. - 2. Actually perform the request. Raise HTTP exceptions on network failures. - 3. Check the Content-Type header to make sure we got HTML, and raise - `_NotHTML` otherwise. - """ - if _is_url_like_archive(url): - _ensure_html_response(url, session=session) - - logger.debug('Getting page %s', redact_password_from_url(url)) - - resp = session.get( - url, - headers={ - "Accept": "text/html", - # We don't want to blindly returned cached data for - # /simple/, because authors generally expecting that - # twine upload && pip install will function, but if - # they've done a pip install in the last ~10 minutes - # it won't. Thus by setting this to zero we will not - # blindly use any cached data, however the benefit of - # using max-age=0 instead of no-cache, is that we will - # still support conditional requests, so we will still - # minimize traffic sent in cases where the page hasn't - # changed at all, we will just always incur the round - # trip for the conditional GET now instead of only - # once per 10 minutes. - # For more information, please see pypa/pip#5670. - "Cache-Control": "max-age=0", - }, - ) - resp.raise_for_status() - - # The check for archives above only works if the url ends with - # something that looks like an archive. However that is not a - # requirement of an url. Unless we issue a HEAD request on every - # url we cannot know ahead of time for sure if something is HTML - # or not. However we can check after we've downloaded it. - _ensure_html_header(resp) - - return resp - - -def _handle_get_page_fail( - link, # type: Link - reason, # type: Union[str, Exception] - meth=None # type: Optional[Callable[..., None]] -): - # type: (...) -> None - if meth is None: - meth = logger.debug - meth("Could not fetch URL %s: %s - skipping", link, reason) - - -def _get_html_page(link, session=None): - # type: (Link, Optional[PipSession]) -> Optional[HTMLPage] - if session is None: - raise TypeError( - "_get_html_page() missing 1 required keyword argument: 'session'" - ) - - url = link.url.split('#', 1)[0] - - # Check for VCS schemes that do not support lookup as web pages. - vcs_scheme = _match_vcs_scheme(url) - if vcs_scheme: - logger.debug('Cannot look at %s URL %s', vcs_scheme, link) - return None - - # Tack index.html onto file:// URLs that point to directories - scheme, _, path, _, _, _ = urllib_parse.urlparse(url) - if (scheme == 'file' and os.path.isdir(urllib_request.url2pathname(path))): - # add trailing slash if not present so urljoin doesn't trim - # final segment - if not url.endswith('/'): - url += '/' - url = urllib_parse.urljoin(url, 'index.html') - logger.debug(' file: URL is directory, getting %s', url) - - try: - resp = _get_html_response(url, session=session) - except _NotHTTP: - logger.debug( - 'Skipping page %s because it looks like an archive, and cannot ' - 'be checked by HEAD.', link, - ) - except _NotHTML as exc: - logger.debug( - 'Skipping page %s because the %s request got Content-Type: %s', - link, exc.request_desc, exc.content_type, - ) - except HTTPError as exc: - _handle_get_page_fail(link, exc) - except RetryError as exc: - _handle_get_page_fail(link, exc) - except SSLError as exc: - reason = "There was a problem confirming the ssl certificate: " - reason += str(exc) - _handle_get_page_fail(link, reason, meth=logger.info) - except requests.ConnectionError as exc: - _handle_get_page_fail(link, "connection error: %s" % exc) - except requests.Timeout: - _handle_get_page_fail(link, "timed out") - else: - return HTMLPage(resp.content, resp.url, resp.headers) - return None - - -def _check_link_requires_python( - link, # type: Link - version_info, # type: Tuple[int, int, int] - ignore_requires_python=False, # type: bool -): - # type: (...) -> bool - """ - Return whether the given Python version is compatible with a link's - "Requires-Python" value. - - :param version_info: A 3-tuple of ints representing the Python - major-minor-micro version to check. - :param ignore_requires_python: Whether to ignore the "Requires-Python" - value if the given Python version isn't compatible. - """ - try: - is_compatible = check_requires_python( - link.requires_python, version_info=version_info, - ) - except specifiers.InvalidSpecifier: - logger.debug( - "Ignoring invalid Requires-Python (%r) for link: %s", - link.requires_python, link, - ) - else: - if not is_compatible: - version = '.'.join(map(str, version_info)) - if not ignore_requires_python: - logger.debug( - 'Link requires a different Python (%s not in: %r): %s', - version, link.requires_python, link, - ) - return False - - logger.debug( - 'Ignoring failed Requires-Python check (%s not in: %r) ' - 'for link: %s', - version, link.requires_python, link, - ) - - return True - - -class LinkEvaluator(object): - - """ - Responsible for evaluating links for a particular project. - """ - - _py_version_re = re.compile(r'-py([123]\.?[0-9]?)$') - - # Don't include an allow_yanked default value to make sure each call - # site considers whether yanked releases are allowed. This also causes - # that decision to be made explicit in the calling code, which helps - # people when reading the code. - def __init__( - self, - project_name, # type: str - canonical_name, # type: str - formats, # type: FrozenSet - target_python, # type: TargetPython - allow_yanked, # type: bool - ignore_requires_python=None, # type: Optional[bool] - ): - # type: (...) -> None - """ - :param project_name: The user supplied package name. - :param canonical_name: The canonical package name. - :param formats: The formats allowed for this package. Should be a set - with 'binary' or 'source' or both in it. - :param target_python: The target Python interpreter to use when - evaluating link compatibility. This is used, for example, to - check wheel compatibility, as well as when checking the Python - version, e.g. the Python version embedded in a link filename - (or egg fragment) and against an HTML link's optional PEP 503 - "data-requires-python" attribute. - :param allow_yanked: Whether files marked as yanked (in the sense - of PEP 592) are permitted to be candidates for install. - :param ignore_requires_python: Whether to ignore incompatible - PEP 503 "data-requires-python" values in HTML links. Defaults - to False. - """ - if ignore_requires_python is None: - ignore_requires_python = False - - self._allow_yanked = allow_yanked - self._canonical_name = canonical_name - self._ignore_requires_python = ignore_requires_python - self._formats = formats - self._target_python = target_python - - self.project_name = project_name - - def evaluate_link(self, link): - # type: (Link) -> Tuple[bool, Optional[Text]] - """ - Determine whether a link is a candidate for installation. - - :return: A tuple (is_candidate, result), where `result` is (1) a - version string if `is_candidate` is True, and (2) if - `is_candidate` is False, an optional string to log the reason - the link fails to qualify. - """ - version = None - if link.is_yanked and not self._allow_yanked: - reason = link.yanked_reason or '' - # Mark this as a unicode string to prevent "UnicodeEncodeError: - # 'ascii' codec can't encode character" in Python 2 when - # the reason contains non-ascii characters. - return (False, u'yanked for reason: {}'.format(reason)) - - if link.egg_fragment: - egg_info = link.egg_fragment - ext = link.ext - else: - egg_info, ext = link.splitext() - if not ext: - return (False, 'not a file') - if ext not in SUPPORTED_EXTENSIONS: - return (False, 'unsupported archive format: %s' % ext) - if "binary" not in self._formats and ext == WHEEL_EXTENSION: - reason = 'No binaries permitted for %s' % self.project_name - return (False, reason) - if "macosx10" in link.path and ext == '.zip': - return (False, 'macosx10 one') - if ext == WHEEL_EXTENSION: - try: - wheel = Wheel(link.filename) - except InvalidWheelFilename: - return (False, 'invalid wheel filename') - if canonicalize_name(wheel.name) != self._canonical_name: - reason = 'wrong project name (not %s)' % self.project_name - return (False, reason) - - supported_tags = self._target_python.get_tags() - if not wheel.supported(supported_tags): - # Include the wheel's tags in the reason string to - # simplify troubleshooting compatibility issues. - file_tags = wheel.get_formatted_file_tags() - reason = ( - "none of the wheel's tags match: {}".format( - ', '.join(file_tags) - ) - ) - return (False, reason) - - version = wheel.version - - # This should be up by the self.ok_binary check, but see issue 2700. - if "source" not in self._formats and ext != WHEEL_EXTENSION: - return (False, 'No sources permitted for %s' % self.project_name) - - if not version: - version = _extract_version_from_fragment( - egg_info, self._canonical_name, - ) - if not version: - return ( - False, 'Missing project version for %s' % self.project_name, - ) - - match = self._py_version_re.search(version) - if match: - version = version[:match.start()] - py_version = match.group(1) - if py_version != self._target_python.py_version: - return (False, 'Python version is incorrect') - - supports_python = _check_link_requires_python( - link, version_info=self._target_python.py_version_info, - ignore_requires_python=self._ignore_requires_python, - ) - if not supports_python: - # Return None for the reason text to suppress calling - # _log_skipped_link(). - return (False, None) - - logger.debug('Found link %s, version: %s', link, version) - - return (True, version) - - -def filter_unallowed_hashes( - candidates, # type: List[InstallationCandidate] - hashes, # type: Hashes - project_name, # type: str -): - # type: (...) -> List[InstallationCandidate] - """ - Filter out candidates whose hashes aren't allowed, and return a new - list of candidates. - - If at least one candidate has an allowed hash, then all candidates with - either an allowed hash or no hash specified are returned. Otherwise, - the given candidates are returned. - - Including the candidates with no hash specified when there is a match - allows a warning to be logged if there is a more preferred candidate - with no hash specified. Returning all candidates in the case of no - matches lets pip report the hash of the candidate that would otherwise - have been installed (e.g. permitting the user to more easily update - their requirements file with the desired hash). - """ - if not hashes: - logger.debug( - 'Given no hashes to check %s links for project %r: ' - 'discarding no candidates', - len(candidates), - project_name, - ) - # Make sure we're not returning back the given value. - return list(candidates) - - matches_or_no_digest = [] - # Collect the non-matches for logging purposes. - non_matches = [] - match_count = 0 - for candidate in candidates: - link = candidate.link - if not link.has_hash: - pass - elif link.is_hash_allowed(hashes=hashes): - match_count += 1 - else: - non_matches.append(candidate) - continue - - matches_or_no_digest.append(candidate) - - if match_count: - filtered = matches_or_no_digest - else: - # Make sure we're not returning back the given value. - filtered = list(candidates) - - if len(filtered) == len(candidates): - discard_message = 'discarding no candidates' - else: - discard_message = 'discarding {} non-matches:\n {}'.format( - len(non_matches), - '\n '.join(str(candidate.link) for candidate in non_matches) - ) - - logger.debug( - 'Checked %s links for project %r against %s hashes ' - '(%s matches, %s no digest): %s', - len(candidates), - project_name, - hashes.digest_count, - match_count, - len(matches_or_no_digest) - match_count, - discard_message - ) - - return filtered - - -class CandidatePreferences(object): - - """ - Encapsulates some of the preferences for filtering and sorting - InstallationCandidate objects. - """ - - def __init__( - self, - prefer_binary=False, # type: bool - allow_all_prereleases=False, # type: bool - ): - # type: (...) -> None - """ - :param allow_all_prereleases: Whether to allow all pre-releases. - """ - self.allow_all_prereleases = allow_all_prereleases - self.prefer_binary = prefer_binary - - -class CandidateEvaluator(object): - - """ - Responsible for filtering and sorting candidates for installation based - on what tags are valid. - """ - - @classmethod - def create( - cls, - project_name, # type: str - target_python=None, # type: Optional[TargetPython] - prefer_binary=False, # type: bool - allow_all_prereleases=False, # type: bool - specifier=None, # type: Optional[specifiers.BaseSpecifier] - hashes=None, # type: Optional[Hashes] - ): - # type: (...) -> CandidateEvaluator - """Create a CandidateEvaluator object. - - :param target_python: The target Python interpreter to use when - checking compatibility. If None (the default), a TargetPython - object will be constructed from the running Python. - :param hashes: An optional collection of allowed hashes. - """ - if target_python is None: - target_python = TargetPython() - if specifier is None: - specifier = specifiers.SpecifierSet() - - supported_tags = target_python.get_tags() - - return cls( - project_name=project_name, - supported_tags=supported_tags, - specifier=specifier, - prefer_binary=prefer_binary, - allow_all_prereleases=allow_all_prereleases, - hashes=hashes, - ) - - def __init__( - self, - project_name, # type: str - supported_tags, # type: List[Pep425Tag] - specifier, # type: specifiers.BaseSpecifier - prefer_binary=False, # type: bool - allow_all_prereleases=False, # type: bool - hashes=None, # type: Optional[Hashes] - ): - # type: (...) -> None - """ - :param supported_tags: The PEP 425 tags supported by the target - Python in order of preference (most preferred first). - """ - self._allow_all_prereleases = allow_all_prereleases - self._hashes = hashes - self._prefer_binary = prefer_binary - self._project_name = project_name - self._specifier = specifier - self._supported_tags = supported_tags - - def get_applicable_candidates( - self, - candidates, # type: List[InstallationCandidate] - ): - # type: (...) -> List[InstallationCandidate] - """ - Return the applicable candidates from a list of candidates. - """ - # Using None infers from the specifier instead. - allow_prereleases = self._allow_all_prereleases or None - specifier = self._specifier - versions = { - str(v) for v in specifier.filter( - # We turn the version object into a str here because otherwise - # when we're debundled but setuptools isn't, Python will see - # packaging.version.Version and - # pkg_resources._vendor.packaging.version.Version as different - # types. This way we'll use a str as a common data interchange - # format. If we stop using the pkg_resources provided specifier - # and start using our own, we can drop the cast to str(). - (str(c.version) for c in candidates), - prereleases=allow_prereleases, - ) - } - - # Again, converting version to str to deal with debundling. - applicable_candidates = [ - c for c in candidates if str(c.version) in versions - ] - - return filter_unallowed_hashes( - candidates=applicable_candidates, - hashes=self._hashes, - project_name=self._project_name, - ) - - def make_found_candidates( - self, - candidates, # type: List[InstallationCandidate] - ): - # type: (...) -> FoundCandidates - """ - Create and return a `FoundCandidates` instance. - - :param specifier: An optional object implementing `filter` - (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable - versions. - """ - applicable_candidates = self.get_applicable_candidates(candidates) - - return FoundCandidates( - candidates, - applicable_candidates=applicable_candidates, - evaluator=self, - ) - - def _sort_key(self, candidate): - # type: (InstallationCandidate) -> CandidateSortingKey - """ - Function to pass as the `key` argument to a call to sorted() to sort - InstallationCandidates by preference. - - Returns a tuple such that tuples sorting as greater using Python's - default comparison operator are more preferred. - - The preference is as follows: - - First and foremost, candidates with allowed (matching) hashes are - always preferred over candidates without matching hashes. This is - because e.g. if the only candidate with an allowed hash is yanked, - we still want to use that candidate. - - Second, excepting hash considerations, candidates that have been - yanked (in the sense of PEP 592) are always less preferred than - candidates that haven't been yanked. Then: - - If not finding wheels, they are sorted by version only. - If finding wheels, then the sort order is by version, then: - 1. existing installs - 2. wheels ordered via Wheel.support_index_min(self._supported_tags) - 3. source archives - If prefer_binary was set, then all wheels are sorted above sources. - - Note: it was considered to embed this logic into the Link - comparison operators, but then different sdist links - with the same version, would have to be considered equal - """ - valid_tags = self._supported_tags - support_num = len(valid_tags) - build_tag = tuple() # type: BuildTag - binary_preference = 0 - link = candidate.link - if link.is_wheel: - # can raise InvalidWheelFilename - wheel = Wheel(link.filename) - if not wheel.supported(valid_tags): - raise UnsupportedWheel( - "%s is not a supported wheel for this platform. It " - "can't be sorted." % wheel.filename - ) - if self._prefer_binary: - binary_preference = 1 - pri = -(wheel.support_index_min(valid_tags)) - if wheel.build_tag is not None: - match = re.match(r'^(\d+)(.*)$', wheel.build_tag) - build_tag_groups = match.groups() - build_tag = (int(build_tag_groups[0]), build_tag_groups[1]) - else: # sdist - pri = -(support_num) - has_allowed_hash = int(link.is_hash_allowed(self._hashes)) - yank_value = -1 * int(link.is_yanked) # -1 for yanked. - return ( - has_allowed_hash, yank_value, binary_preference, candidate.version, - build_tag, pri, - ) - - def get_best_candidate( - self, - candidates, # type: List[InstallationCandidate] - ): - # type: (...) -> Optional[InstallationCandidate] - """ - Return the best candidate per the instance's sort order, or None if - no candidate is acceptable. - """ - if not candidates: - return None - - best_candidate = max(candidates, key=self._sort_key) - - # Log a warning per PEP 592 if necessary before returning. - link = best_candidate.link - if link.is_yanked: - reason = link.yanked_reason or '' - msg = ( - # Mark this as a unicode string to prevent - # "UnicodeEncodeError: 'ascii' codec can't encode character" - # in Python 2 when the reason contains non-ascii characters. - u'The candidate selected for download or install is a ' - 'yanked version: {candidate}\n' - 'Reason for being yanked: {reason}' - ).format(candidate=best_candidate, reason=reason) - logger.warning(msg) - - return best_candidate - - -class FoundCandidates(object): - """A collection of candidates, returned by `PackageFinder.find_candidates`. - - This class is only intended to be instantiated by CandidateEvaluator's - `make_found_candidates()` method. - """ - - def __init__( - self, - candidates, # type: List[InstallationCandidate] - applicable_candidates, # type: List[InstallationCandidate] - evaluator, # type: CandidateEvaluator - ): - # type: (...) -> None - """ - :param candidates: A sequence of all available candidates found. - :param applicable_candidates: The applicable candidates. - :param evaluator: A CandidateEvaluator object to sort applicable - candidates by order of preference. - """ - self._applicable_candidates = applicable_candidates - self._candidates = candidates - self._evaluator = evaluator - - def iter_all(self): - # type: () -> Iterable[InstallationCandidate] - """Iterate through all candidates. - """ - return iter(self._candidates) - - def iter_applicable(self): - # type: () -> Iterable[InstallationCandidate] - """Iterate through the applicable candidates. - """ - return iter(self._applicable_candidates) - - def get_best(self): - # type: () -> Optional[InstallationCandidate] - """Return the best candidate available, or None if no applicable - candidates are found. - """ - candidates = list(self.iter_applicable()) - return self._evaluator.get_best_candidate(candidates) - - -class PackageFinder(object): - """This finds packages. - - This is meant to match easy_install's technique for looking for - packages, by reading pages and looking for appropriate links. - """ - - def __init__( - self, - search_scope, # type: SearchScope - session, # type: PipSession - target_python, # type: TargetPython - allow_yanked, # type: bool - format_control=None, # type: Optional[FormatControl] - trusted_hosts=None, # type: Optional[List[str]] - candidate_prefs=None, # type: CandidatePreferences - ignore_requires_python=None, # type: Optional[bool] - ): - # type: (...) -> None - """ - This constructor is primarily meant to be used by the create() class - method and from tests. - - :param session: The Session to use to make requests. - :param format_control: A FormatControl object, used to control - the selection of source packages / binary packages when consulting - the index and links. - :param candidate_prefs: Options to use when creating a - CandidateEvaluator object. - """ - if trusted_hosts is None: - trusted_hosts = [] - if candidate_prefs is None: - candidate_prefs = CandidatePreferences() - - format_control = format_control or FormatControl(set(), set()) - - self._allow_yanked = allow_yanked - self._candidate_prefs = candidate_prefs - self._ignore_requires_python = ignore_requires_python - self._target_python = target_python - - self.search_scope = search_scope - self.session = session - self.format_control = format_control - self.trusted_hosts = trusted_hosts - - # These are boring links that have already been logged somehow. - self._logged_links = set() # type: Set[Link] - - # Don't include an allow_yanked default value to make sure each call - # site considers whether yanked releases are allowed. This also causes - # that decision to be made explicit in the calling code, which helps - # people when reading the code. - @classmethod - def create( - cls, - search_scope, # type: SearchScope - selection_prefs, # type: SelectionPreferences - trusted_hosts=None, # type: Optional[List[str]] - session=None, # type: Optional[PipSession] - target_python=None, # type: Optional[TargetPython] - ): - # type: (...) -> PackageFinder - """Create a PackageFinder. - - :param selection_prefs: The candidate selection preferences, as a - SelectionPreferences object. - :param trusted_hosts: Domains not to emit warnings for when not using - HTTPS. - :param session: The Session to use to make requests. - :param target_python: The target Python interpreter to use when - checking compatibility. If None (the default), a TargetPython - object will be constructed from the running Python. - """ - if session is None: - raise TypeError( - "PackageFinder.create() missing 1 required keyword argument: " - "'session'" - ) - if target_python is None: - target_python = TargetPython() - - candidate_prefs = CandidatePreferences( - prefer_binary=selection_prefs.prefer_binary, - allow_all_prereleases=selection_prefs.allow_all_prereleases, - ) - - return cls( - candidate_prefs=candidate_prefs, - search_scope=search_scope, - session=session, - target_python=target_python, - allow_yanked=selection_prefs.allow_yanked, - format_control=selection_prefs.format_control, - trusted_hosts=trusted_hosts, - ignore_requires_python=selection_prefs.ignore_requires_python, - ) - - @property - def find_links(self): - # type: () -> List[str] - return self.search_scope.find_links - - @property - def index_urls(self): - # type: () -> List[str] - return self.search_scope.index_urls - - @property - def allow_all_prereleases(self): - # type: () -> bool - return self._candidate_prefs.allow_all_prereleases - - def set_allow_all_prereleases(self): - # type: () -> None - self._candidate_prefs.allow_all_prereleases = True - - def add_trusted_host(self, host, source=None): - # type: (str, Optional[str]) -> None - """ - :param source: An optional source string, for logging where the host - string came from. - """ - # It is okay to add a previously added host because PipSession stores - # the resulting prefixes in a dict. - msg = 'adding trusted host: {!r}'.format(host) - if source is not None: - msg += ' (from {})'.format(source) - logger.info(msg) - self.session.add_insecure_host(host) - if host in self.trusted_hosts: - return - - self.trusted_hosts.append(host) - - def iter_secure_origins(self): - # type: () -> Iterator[SecureOrigin] - for secure_origin in SECURE_ORIGINS: - yield secure_origin - for host in self.trusted_hosts: - yield ('*', host, '*') - - @staticmethod - def _sort_locations(locations, expand_dir=False): - # type: (Sequence[str], bool) -> Tuple[List[str], List[str]] - """ - Sort locations into "files" (archives) and "urls", and return - a pair of lists (files,urls) - """ - files = [] - urls = [] - - # puts the url for the given file path into the appropriate list - def sort_path(path): - url = path_to_url(path) - if mimetypes.guess_type(url, strict=False)[0] == 'text/html': - urls.append(url) - else: - files.append(url) - - for url in locations: - - is_local_path = os.path.exists(url) - is_file_url = url.startswith('file:') - - if is_local_path or is_file_url: - if is_local_path: - path = url - else: - path = url_to_path(url) - if os.path.isdir(path): - if expand_dir: - path = os.path.realpath(path) - for item in os.listdir(path): - sort_path(os.path.join(path, item)) - elif is_file_url: - urls.append(url) - else: - logger.warning( - "Path '{0}' is ignored: " - "it is a directory.".format(path), - ) - elif os.path.isfile(path): - sort_path(path) - else: - logger.warning( - "Url '%s' is ignored: it is neither a file " - "nor a directory.", url, - ) - elif is_url(url): - # Only add url with clear scheme - urls.append(url) - else: - logger.warning( - "Url '%s' is ignored. It is either a non-existing " - "path or lacks a specific scheme.", url, - ) - - return files, urls - - def _validate_secure_origin(self, logger, location): - # type: (Logger, Link) -> bool - # Determine if this url used a secure transport mechanism - parsed = urllib_parse.urlparse(str(location)) - origin = (parsed.scheme, parsed.hostname, parsed.port) - - # The protocol to use to see if the protocol matches. - # Don't count the repository type as part of the protocol: in - # cases such as "git+ssh", only use "ssh". (I.e., Only verify against - # the last scheme.) - protocol = origin[0].rsplit('+', 1)[-1] - - # Determine if our origin is a secure origin by looking through our - # hardcoded list of secure origins, as well as any additional ones - # configured on this PackageFinder instance. - for secure_origin in self.iter_secure_origins(): - if protocol != secure_origin[0] and secure_origin[0] != "*": - continue - - try: - # We need to do this decode dance to ensure that we have a - # unicode object, even on Python 2.x. - addr = ipaddress.ip_address( - origin[1] - if ( - isinstance(origin[1], six.text_type) or - origin[1] is None - ) - else origin[1].decode("utf8") - ) - network = ipaddress.ip_network( - secure_origin[1] - if isinstance(secure_origin[1], six.text_type) - # setting secure_origin[1] to proper Union[bytes, str] - # creates problems in other places - else secure_origin[1].decode("utf8") # type: ignore - ) - except ValueError: - # We don't have both a valid address or a valid network, so - # we'll check this origin against hostnames. - if (origin[1] and - origin[1].lower() != secure_origin[1].lower() and - secure_origin[1] != "*"): - continue - else: - # We have a valid address and network, so see if the address - # is contained within the network. - if addr not in network: - continue - - # Check to see if the port patches - if (origin[2] != secure_origin[2] and - secure_origin[2] != "*" and - secure_origin[2] is not None): - continue - - # If we've gotten here, then this origin matches the current - # secure origin and we should return True - return True - - # If we've gotten to this point, then the origin isn't secure and we - # will not accept it as a valid location to search. We will however - # log a warning that we are ignoring it. - logger.warning( - "The repository located at %s is not a trusted or secure host and " - "is being ignored. If this repository is available via HTTPS we " - "recommend you use HTTPS instead, otherwise you may silence " - "this warning and allow it anyway with '--trusted-host %s'.", - parsed.hostname, - parsed.hostname, - ) - - return False - - def make_link_evaluator(self, project_name): - # type: (str) -> LinkEvaluator - canonical_name = canonicalize_name(project_name) - formats = self.format_control.get_allowed_formats(canonical_name) - - return LinkEvaluator( - project_name=project_name, - canonical_name=canonical_name, - formats=formats, - target_python=self._target_python, - allow_yanked=self._allow_yanked, - ignore_requires_python=self._ignore_requires_python, - ) - - def find_all_candidates(self, project_name): - # type: (str) -> List[InstallationCandidate] - """Find all available InstallationCandidate for project_name - - This checks index_urls and find_links. - All versions found are returned as an InstallationCandidate list. - - See LinkEvaluator.evaluate_link() for details on which files - are accepted. - """ - search_scope = self.search_scope - index_locations = search_scope.get_index_urls_locations(project_name) - index_file_loc, index_url_loc = self._sort_locations(index_locations) - fl_file_loc, fl_url_loc = self._sort_locations( - self.find_links, expand_dir=True, - ) - - file_locations = (Link(url) for url in itertools.chain( - index_file_loc, fl_file_loc, - )) - - # We trust every url that the user has given us whether it was given - # via --index-url or --find-links. - # We want to filter out any thing which does not have a secure origin. - url_locations = [ - link for link in itertools.chain( - (Link(url) for url in index_url_loc), - (Link(url) for url in fl_url_loc), - ) - if self._validate_secure_origin(logger, link) - ] - - logger.debug('%d location(s) to search for versions of %s:', - len(url_locations), project_name) - - for location in url_locations: - logger.debug('* %s', location) - - link_evaluator = self.make_link_evaluator(project_name) - find_links_versions = self._package_versions( - link_evaluator, - # We trust every directly linked archive in find_links - (Link(url, '-f') for url in self.find_links), - ) - - page_versions = [] - for page in self._get_pages(url_locations, project_name): - logger.debug('Analyzing links from page %s', page.url) - with indent_log(): - page_versions.extend( - self._package_versions(link_evaluator, page.iter_links()) - ) - - file_versions = self._package_versions(link_evaluator, file_locations) - if file_versions: - file_versions.sort(reverse=True) - logger.debug( - 'Local files found: %s', - ', '.join([ - url_to_path(candidate.link.url) - for candidate in file_versions - ]) - ) - - # This is an intentional priority ordering - return file_versions + find_links_versions + page_versions - - def make_candidate_evaluator( - self, - project_name, # type: str - specifier=None, # type: Optional[specifiers.BaseSpecifier] - hashes=None, # type: Optional[Hashes] - ): - # type: (...) -> CandidateEvaluator - """Create a CandidateEvaluator object to use. - """ - candidate_prefs = self._candidate_prefs - return CandidateEvaluator.create( - project_name=project_name, - target_python=self._target_python, - prefer_binary=candidate_prefs.prefer_binary, - allow_all_prereleases=candidate_prefs.allow_all_prereleases, - specifier=specifier, - hashes=hashes, - ) - - def find_candidates( - self, - project_name, # type: str - specifier=None, # type: Optional[specifiers.BaseSpecifier] - hashes=None, # type: Optional[Hashes] - ): - # type: (...) -> FoundCandidates - """Find matches for the given project and specifier. - - :param specifier: An optional object implementing `filter` - (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable - versions. - - :return: A `FoundCandidates` instance. - """ - candidates = self.find_all_candidates(project_name) - candidate_evaluator = self.make_candidate_evaluator( - project_name=project_name, - specifier=specifier, - hashes=hashes, - ) - return candidate_evaluator.make_found_candidates(candidates) - - def find_requirement(self, req, upgrade): - # type: (InstallRequirement, bool) -> Optional[Link] - """Try to find a Link matching req - - Expects req, an InstallRequirement and upgrade, a boolean - Returns a Link if found, - Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise - """ - hashes = req.hashes(trust_internet=False) - candidates = self.find_candidates( - req.name, specifier=req.specifier, hashes=hashes, - ) - best_candidate = candidates.get_best() - - installed_version = None # type: Optional[_BaseVersion] - if req.satisfied_by is not None: - installed_version = parse_version(req.satisfied_by.version) - - def _format_versions(cand_iter): - # This repeated parse_version and str() conversion is needed to - # handle different vendoring sources from pip and pkg_resources. - # If we stop using the pkg_resources provided specifier and start - # using our own, we can drop the cast to str(). - return ", ".join(sorted( - {str(c.version) for c in cand_iter}, - key=parse_version, - )) or "none" - - if installed_version is None and best_candidate is None: - logger.critical( - 'Could not find a version that satisfies the requirement %s ' - '(from versions: %s)', - req, - _format_versions(candidates.iter_all()), - ) - - raise DistributionNotFound( - 'No matching distribution found for %s' % req - ) - - best_installed = False - if installed_version and ( - best_candidate is None or - best_candidate.version <= installed_version): - best_installed = True - - if not upgrade and installed_version is not None: - if best_installed: - logger.debug( - 'Existing installed version (%s) is most up-to-date and ' - 'satisfies requirement', - installed_version, - ) - else: - logger.debug( - 'Existing installed version (%s) satisfies requirement ' - '(most up-to-date version is %s)', - installed_version, - best_candidate.version, - ) - return None - - if best_installed: - # We have an existing version, and its the best version - logger.debug( - 'Installed version (%s) is most up-to-date (past versions: ' - '%s)', - installed_version, - _format_versions(candidates.iter_applicable()), - ) - raise BestVersionAlreadyInstalled - - logger.debug( - 'Using version %s (newest of versions: %s)', - best_candidate.version, - _format_versions(candidates.iter_applicable()), - ) - return best_candidate.link - - def _get_pages(self, locations, project_name): - # type: (Iterable[Link], str) -> Iterable[HTMLPage] - """ - Yields (page, page_url) from the given locations, skipping - locations that have errors. - """ - seen = set() # type: Set[Link] - for location in locations: - if location in seen: - continue - seen.add(location) - - page = _get_html_page(location, session=self.session) - if page is None: - continue - - yield page - - def _sort_links(self, links): - # type: (Iterable[Link]) -> List[Link] - """ - Returns elements of links in order, non-egg links first, egg links - second, while eliminating duplicates - """ - eggs, no_eggs = [], [] - seen = set() # type: Set[Link] - for link in links: - if link not in seen: - seen.add(link) - if link.egg_fragment: - eggs.append(link) - else: - no_eggs.append(link) - return no_eggs + eggs - - def _log_skipped_link(self, link, reason): - # type: (Link, Text) -> None - if link not in self._logged_links: - # Mark this as a unicode string to prevent "UnicodeEncodeError: - # 'ascii' codec can't encode character" in Python 2 when - # the reason contains non-ascii characters. - # Also, put the link at the end so the reason is more visible - # and because the link string is usually very long. - logger.debug(u'Skipping link: %s: %s', reason, link) - self._logged_links.add(link) - - def get_install_candidate(self, link_evaluator, link): - # type: (LinkEvaluator, Link) -> Optional[InstallationCandidate] - """ - If the link is a candidate for install, convert it to an - InstallationCandidate and return it. Otherwise, return None. - """ - is_candidate, result = link_evaluator.evaluate_link(link) - if not is_candidate: - if result: - self._log_skipped_link(link, reason=result) - return None - - return InstallationCandidate( - project=link_evaluator.project_name, - link=link, - # Convert the Text result to str since InstallationCandidate - # accepts str. - version=str(result), - ) - - def _package_versions(self, link_evaluator, links): - # type: (LinkEvaluator, Iterable[Link]) -> List[InstallationCandidate] - result = [] - for link in self._sort_links(links): - candidate = self.get_install_candidate(link_evaluator, link) - if candidate is not None: - result.append(candidate) - return result - - -def _find_name_version_sep(fragment, canonical_name): - # type: (str, str) -> int - """Find the separator's index based on the package's canonical name. - - :param fragment: A + filename "fragment" (stem) or - egg fragment. - :param canonical_name: The package's canonical name. - - This function is needed since the canonicalized name does not necessarily - have the same length as the egg info's name part. An example:: - - >>> fragment = 'foo__bar-1.0' - >>> canonical_name = 'foo-bar' - >>> _find_name_version_sep(fragment, canonical_name) - 8 - """ - # Project name and version must be separated by one single dash. Find all - # occurrences of dashes; if the string in front of it matches the canonical - # name, this is the one separating the name and version parts. - for i, c in enumerate(fragment): - if c != "-": - continue - if canonicalize_name(fragment[:i]) == canonical_name: - return i - raise ValueError("{} does not match {}".format(fragment, canonical_name)) - - -def _extract_version_from_fragment(fragment, canonical_name): - # type: (str, str) -> Optional[str] - """Parse the version string from a + filename - "fragment" (stem) or egg fragment. - - :param fragment: The string to parse. E.g. foo-2.1 - :param canonical_name: The canonicalized name of the package this - belongs to. - """ - try: - version_start = _find_name_version_sep(fragment, canonical_name) + 1 - except ValueError: - return None - version = fragment[version_start:] - if not version: - return None - return version - - -def _determine_base_url(document, page_url): - """Determine the HTML document's base URL. - - This looks for a ```` tag in the HTML document. If present, its href - attribute denotes the base URL of anchor tags in the document. If there is - no such tag (or if it does not have a valid href attribute), the HTML - file's URL is used as the base URL. - - :param document: An HTML document representation. The current - implementation expects the result of ``html5lib.parse()``. - :param page_url: The URL of the HTML document. - """ - for base in document.findall(".//base"): - href = base.get("href") - if href is not None: - return href - return page_url - - -def _get_encoding_from_headers(headers): - """Determine if we have any encoding information in our headers. - """ - if headers and "Content-Type" in headers: - content_type, params = cgi.parse_header(headers["Content-Type"]) - if "charset" in params: - return params['charset'] - return None - - -def _clean_link(url): - # type: (str) -> str - """Makes sure a link is fully encoded. That is, if a ' ' shows up in - the link, it will be rewritten to %20 (while not over-quoting - % or other characters).""" - # Split the URL into parts according to the general structure - # `scheme://netloc/path;parameters?query#fragment`. Note that the - # `netloc` can be empty and the URI will then refer to a local - # filesystem path. - result = urllib_parse.urlparse(url) - # In both cases below we unquote prior to quoting to make sure - # nothing is double quoted. - if result.netloc == "": - # On Windows the path part might contain a drive letter which - # should not be quoted. On Linux where drive letters do not - # exist, the colon should be quoted. We rely on urllib.request - # to do the right thing here. - path = urllib_request.pathname2url( - urllib_request.url2pathname(result.path)) - else: - # In addition to the `/` character we protect `@` so that - # revision strings in VCS URLs are properly parsed. - path = urllib_parse.quote(urllib_parse.unquote(result.path), safe="/@") - return urllib_parse.urlunparse(result._replace(path=path)) - - -def _create_link_from_element( - anchor, # type: HTMLElement - page_url, # type: str - base_url, # type: str -): - # type: (...) -> Optional[Link] - """ - Convert an anchor element in a simple repository page to a Link. - """ - href = anchor.get("href") - if not href: - return None - - url = _clean_link(urllib_parse.urljoin(base_url, href)) - pyrequire = anchor.get('data-requires-python') - pyrequire = unescape(pyrequire) if pyrequire else None - - yanked_reason = anchor.get('data-yanked') - if yanked_reason: - # This is a unicode string in Python 2 (and 3). - yanked_reason = unescape(yanked_reason) - - link = Link( - url, - comes_from=page_url, - requires_python=pyrequire, - yanked_reason=yanked_reason, - ) - - return link - - -class HTMLPage(object): - """Represents one page, along with its URL""" - - def __init__(self, content, url, headers=None): - # type: (bytes, str, MutableMapping[str, str]) -> None - self.content = content - self.url = url - self.headers = headers - - def __str__(self): - return redact_password_from_url(self.url) - - def iter_links(self): - # type: () -> Iterable[Link] - """Yields all links in the page""" - document = html5lib.parse( - self.content, - transport_encoding=_get_encoding_from_headers(self.headers), - namespaceHTMLElements=False, - ) - base_url = _determine_base_url(document, self.url) - for anchor in document.findall(".//a"): - link = _create_link_from_element( - anchor, - page_url=self.url, - base_url=base_url, - ) - if link is None: - continue - yield link diff --git a/env/lib/python2.7/site-packages/pip/_internal/index.pyc b/env/lib/python2.7/site-packages/pip/_internal/index.pyc deleted file mode 100644 index 0ef3c5e9..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/index.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/legacy_resolve.py b/env/lib/python2.7/site-packages/pip/_internal/legacy_resolve.py deleted file mode 100644 index 1d9229cb..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/legacy_resolve.py +++ /dev/null @@ -1,457 +0,0 @@ -"""Dependency Resolution - -The dependency resolution in pip is performed as follows: - -for top-level requirements: - a. only one spec allowed per project, regardless of conflicts or not. - otherwise a "double requirement" exception is raised - b. they override sub-dependency requirements. -for sub-dependencies - a. "first found, wins" (where the order is breadth first) -""" - -import logging -import sys -from collections import defaultdict -from itertools import chain - -from pip._vendor.packaging import specifiers - -from pip._internal.exceptions import ( - BestVersionAlreadyInstalled, DistributionNotFound, HashError, HashErrors, - UnsupportedPythonVersion, -) -from pip._internal.req.constructors import install_req_from_req_string -from pip._internal.utils.logging import indent_log -from pip._internal.utils.misc import ( - dist_in_usersite, ensure_dir, normalize_version_info, -) -from pip._internal.utils.packaging import ( - check_requires_python, get_requires_python, -) -from pip._internal.utils.typing import MYPY_CHECK_RUNNING - -if MYPY_CHECK_RUNNING: - from typing import DefaultDict, List, Optional, Set, Tuple - from pip._vendor import pkg_resources - - from pip._internal.cache import WheelCache - from pip._internal.distributions import AbstractDistribution - from pip._internal.download import PipSession - from pip._internal.index import PackageFinder - from pip._internal.operations.prepare import RequirementPreparer - from pip._internal.req.req_install import InstallRequirement - from pip._internal.req.req_set import RequirementSet - -logger = logging.getLogger(__name__) - - -def _check_dist_requires_python( - dist, # type: pkg_resources.Distribution - version_info, # type: Tuple[int, int, int] - ignore_requires_python=False, # type: bool -): - # type: (...) -> None - """ - Check whether the given Python version is compatible with a distribution's - "Requires-Python" value. - - :param version_info: A 3-tuple of ints representing the Python - major-minor-micro version to check. - :param ignore_requires_python: Whether to ignore the "Requires-Python" - value if the given Python version isn't compatible. - - :raises UnsupportedPythonVersion: When the given Python version isn't - compatible. - """ - requires_python = get_requires_python(dist) - try: - is_compatible = check_requires_python( - requires_python, version_info=version_info, - ) - except specifiers.InvalidSpecifier as exc: - logger.warning( - "Package %r has an invalid Requires-Python: %s", - dist.project_name, exc, - ) - return - - if is_compatible: - return - - version = '.'.join(map(str, version_info)) - if ignore_requires_python: - logger.debug( - 'Ignoring failed Requires-Python check for package %r: ' - '%s not in %r', - dist.project_name, version, requires_python, - ) - return - - raise UnsupportedPythonVersion( - 'Package {!r} requires a different Python: {} not in {!r}'.format( - dist.project_name, version, requires_python, - )) - - -class Resolver(object): - """Resolves which packages need to be installed/uninstalled to perform \ - the requested operation without breaking the requirements of any package. - """ - - _allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"} - - def __init__( - self, - preparer, # type: RequirementPreparer - session, # type: PipSession - finder, # type: PackageFinder - wheel_cache, # type: Optional[WheelCache] - use_user_site, # type: bool - ignore_dependencies, # type: bool - ignore_installed, # type: bool - ignore_requires_python, # type: bool - force_reinstall, # type: bool - isolated, # type: bool - upgrade_strategy, # type: str - use_pep517=None, # type: Optional[bool] - py_version_info=None, # type: Optional[Tuple[int, ...]] - ): - # type: (...) -> None - super(Resolver, self).__init__() - assert upgrade_strategy in self._allowed_strategies - - if py_version_info is None: - py_version_info = sys.version_info[:3] - else: - py_version_info = normalize_version_info(py_version_info) - - self._py_version_info = py_version_info - - self.preparer = preparer - self.finder = finder - self.session = session - - # NOTE: This would eventually be replaced with a cache that can give - # information about both sdist and wheels transparently. - self.wheel_cache = wheel_cache - - # This is set in resolve - self.require_hashes = None # type: Optional[bool] - - self.upgrade_strategy = upgrade_strategy - self.force_reinstall = force_reinstall - self.isolated = isolated - self.ignore_dependencies = ignore_dependencies - self.ignore_installed = ignore_installed - self.ignore_requires_python = ignore_requires_python - self.use_user_site = use_user_site - self.use_pep517 = use_pep517 - - self._discovered_dependencies = \ - defaultdict(list) # type: DefaultDict[str, List] - - def resolve(self, requirement_set): - # type: (RequirementSet) -> None - """Resolve what operations need to be done - - As a side-effect of this method, the packages (and their dependencies) - are downloaded, unpacked and prepared for installation. This - preparation is done by ``pip.operations.prepare``. - - Once PyPI has static dependency metadata available, it would be - possible to move the preparation to become a step separated from - dependency resolution. - """ - # make the wheelhouse - if self.preparer.wheel_download_dir: - ensure_dir(self.preparer.wheel_download_dir) - - # If any top-level requirement has a hash specified, enter - # hash-checking mode, which requires hashes from all. - root_reqs = ( - requirement_set.unnamed_requirements + - list(requirement_set.requirements.values()) - ) - self.require_hashes = ( - requirement_set.require_hashes or - any(req.has_hash_options for req in root_reqs) - ) - - # Display where finder is looking for packages - search_scope = self.finder.search_scope - locations = search_scope.get_formatted_locations() - if locations: - logger.info(locations) - - # Actually prepare the files, and collect any exceptions. Most hash - # exceptions cannot be checked ahead of time, because - # req.populate_link() needs to be called before we can make decisions - # based on link type. - discovered_reqs = [] # type: List[InstallRequirement] - hash_errors = HashErrors() - for req in chain(root_reqs, discovered_reqs): - try: - discovered_reqs.extend( - self._resolve_one(requirement_set, req) - ) - except HashError as exc: - exc.req = req - hash_errors.append(exc) - - if hash_errors: - raise hash_errors - - def _is_upgrade_allowed(self, req): - # type: (InstallRequirement) -> bool - if self.upgrade_strategy == "to-satisfy-only": - return False - elif self.upgrade_strategy == "eager": - return True - else: - assert self.upgrade_strategy == "only-if-needed" - return req.is_direct - - def _set_req_to_reinstall(self, req): - # type: (InstallRequirement) -> None - """ - Set a requirement to be installed. - """ - # Don't uninstall the conflict if doing a user install and the - # conflict is not a user install. - if not self.use_user_site or dist_in_usersite(req.satisfied_by): - req.conflicts_with = req.satisfied_by - req.satisfied_by = None - - # XXX: Stop passing requirement_set for options - def _check_skip_installed(self, req_to_install): - # type: (InstallRequirement) -> Optional[str] - """Check if req_to_install should be skipped. - - This will check if the req is installed, and whether we should upgrade - or reinstall it, taking into account all the relevant user options. - - After calling this req_to_install will only have satisfied_by set to - None if the req_to_install is to be upgraded/reinstalled etc. Any - other value will be a dist recording the current thing installed that - satisfies the requirement. - - Note that for vcs urls and the like we can't assess skipping in this - routine - we simply identify that we need to pull the thing down, - then later on it is pulled down and introspected to assess upgrade/ - reinstalls etc. - - :return: A text reason for why it was skipped, or None. - """ - if self.ignore_installed: - return None - - req_to_install.check_if_exists(self.use_user_site) - if not req_to_install.satisfied_by: - return None - - if self.force_reinstall: - self._set_req_to_reinstall(req_to_install) - return None - - if not self._is_upgrade_allowed(req_to_install): - if self.upgrade_strategy == "only-if-needed": - return 'already satisfied, skipping upgrade' - return 'already satisfied' - - # Check for the possibility of an upgrade. For link-based - # requirements we have to pull the tree down and inspect to assess - # the version #, so it's handled way down. - if not req_to_install.link: - try: - self.finder.find_requirement(req_to_install, upgrade=True) - except BestVersionAlreadyInstalled: - # Then the best version is installed. - return 'already up-to-date' - except DistributionNotFound: - # No distribution found, so we squash the error. It will - # be raised later when we re-try later to do the install. - # Why don't we just raise here? - pass - - self._set_req_to_reinstall(req_to_install) - return None - - def _get_abstract_dist_for(self, req): - # type: (InstallRequirement) -> AbstractDistribution - """Takes a InstallRequirement and returns a single AbstractDist \ - representing a prepared variant of the same. - """ - assert self.require_hashes is not None, ( - "require_hashes should have been set in Resolver.resolve()" - ) - - if req.editable: - return self.preparer.prepare_editable_requirement( - req, self.require_hashes, self.use_user_site, self.finder, - ) - - # satisfied_by is only evaluated by calling _check_skip_installed, - # so it must be None here. - assert req.satisfied_by is None - skip_reason = self._check_skip_installed(req) - - if req.satisfied_by: - return self.preparer.prepare_installed_requirement( - req, self.require_hashes, skip_reason - ) - - upgrade_allowed = self._is_upgrade_allowed(req) - abstract_dist = self.preparer.prepare_linked_requirement( - req, self.session, self.finder, upgrade_allowed, - self.require_hashes - ) - - # NOTE - # The following portion is for determining if a certain package is - # going to be re-installed/upgraded or not and reporting to the user. - # This should probably get cleaned up in a future refactor. - - # req.req is only avail after unpack for URL - # pkgs repeat check_if_exists to uninstall-on-upgrade - # (#14) - if not self.ignore_installed: - req.check_if_exists(self.use_user_site) - - if req.satisfied_by: - should_modify = ( - self.upgrade_strategy != "to-satisfy-only" or - self.force_reinstall or - self.ignore_installed or - req.link.scheme == 'file' - ) - if should_modify: - self._set_req_to_reinstall(req) - else: - logger.info( - 'Requirement already satisfied (use --upgrade to upgrade):' - ' %s', req, - ) - - return abstract_dist - - def _resolve_one( - self, - requirement_set, # type: RequirementSet - req_to_install # type: InstallRequirement - ): - # type: (...) -> List[InstallRequirement] - """Prepare a single requirements file. - - :return: A list of additional InstallRequirements to also install. - """ - # Tell user what we are doing for this requirement: - # obtain (editable), skipping, processing (local url), collecting - # (remote url or package name) - if req_to_install.constraint or req_to_install.prepared: - return [] - - req_to_install.prepared = True - - # register tmp src for cleanup in case something goes wrong - requirement_set.reqs_to_cleanup.append(req_to_install) - - abstract_dist = self._get_abstract_dist_for(req_to_install) - - # Parse and return dependencies - dist = abstract_dist.get_pkg_resources_distribution() - # This will raise UnsupportedPythonVersion if the given Python - # version isn't compatible with the distribution's Requires-Python. - _check_dist_requires_python( - dist, version_info=self._py_version_info, - ignore_requires_python=self.ignore_requires_python, - ) - - more_reqs = [] # type: List[InstallRequirement] - - def add_req(subreq, extras_requested): - sub_install_req = install_req_from_req_string( - str(subreq), - req_to_install, - isolated=self.isolated, - wheel_cache=self.wheel_cache, - use_pep517=self.use_pep517 - ) - parent_req_name = req_to_install.name - to_scan_again, add_to_parent = requirement_set.add_requirement( - sub_install_req, - parent_req_name=parent_req_name, - extras_requested=extras_requested, - ) - if parent_req_name and add_to_parent: - self._discovered_dependencies[parent_req_name].append( - add_to_parent - ) - more_reqs.extend(to_scan_again) - - with indent_log(): - # We add req_to_install before its dependencies, so that we - # can refer to it when adding dependencies. - if not requirement_set.has_requirement(req_to_install.name): - # 'unnamed' requirements will get added here - req_to_install.is_direct = True - requirement_set.add_requirement( - req_to_install, parent_req_name=None, - ) - - if not self.ignore_dependencies: - if req_to_install.extras: - logger.debug( - "Installing extra requirements: %r", - ','.join(req_to_install.extras), - ) - missing_requested = sorted( - set(req_to_install.extras) - set(dist.extras) - ) - for missing in missing_requested: - logger.warning( - '%s does not provide the extra \'%s\'', - dist, missing - ) - - available_requested = sorted( - set(dist.extras) & set(req_to_install.extras) - ) - for subreq in dist.requires(available_requested): - add_req(subreq, extras_requested=available_requested) - - if not req_to_install.editable and not req_to_install.satisfied_by: - # XXX: --no-install leads this to report 'Successfully - # downloaded' for only non-editable reqs, even though we took - # action on them. - requirement_set.successfully_downloaded.append(req_to_install) - - return more_reqs - - def get_installation_order(self, req_set): - # type: (RequirementSet) -> List[InstallRequirement] - """Create the installation order. - - The installation order is topological - requirements are installed - before the requiring thing. We break cycles at an arbitrary point, - and make no other guarantees. - """ - # The current implementation, which we may change at any point - # installs the user specified things in the order given, except when - # dependencies must come earlier to achieve topological order. - order = [] - ordered_reqs = set() # type: Set[InstallRequirement] - - def schedule(req): - if req.satisfied_by or req in ordered_reqs: - return - if req.constraint: - return - ordered_reqs.add(req) - for dep in self._discovered_dependencies[req.name]: - schedule(dep) - order.append(req) - - for install_req in req_set.requirements.values(): - schedule(install_req) - return order diff --git a/env/lib/python2.7/site-packages/pip/_internal/legacy_resolve.pyc b/env/lib/python2.7/site-packages/pip/_internal/legacy_resolve.pyc deleted file mode 100644 index 0d3c2673..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/legacy_resolve.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/locations.py b/env/lib/python2.7/site-packages/pip/_internal/locations.py deleted file mode 100644 index 5f843d79..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/locations.py +++ /dev/null @@ -1,142 +0,0 @@ -"""Locations where we look for configs, install stuff, etc""" -from __future__ import absolute_import - -import os -import os.path -import platform -import site -import sys -import sysconfig -from distutils import sysconfig as distutils_sysconfig -from distutils.command.install import SCHEME_KEYS # type: ignore - -from pip._internal.utils import appdirs -from pip._internal.utils.compat import WINDOWS -from pip._internal.utils.typing import MYPY_CHECK_RUNNING -from pip._internal.utils.virtualenv import running_under_virtualenv - -if MYPY_CHECK_RUNNING: - from typing import Any, Union, Dict, List, Optional - - -# Application Directories -USER_CACHE_DIR = appdirs.user_cache_dir("pip") - - -def get_src_prefix(): - if running_under_virtualenv(): - src_prefix = os.path.join(sys.prefix, 'src') - else: - # FIXME: keep src in cwd for now (it is not a temporary folder) - try: - src_prefix = os.path.join(os.getcwd(), 'src') - except OSError: - # In case the current working directory has been renamed or deleted - sys.exit( - "The folder you are executing pip from can no longer be found." - ) - - # under macOS + virtualenv sys.prefix is not properly resolved - # it is something like /path/to/python/bin/.. - return os.path.abspath(src_prefix) - - -# FIXME doesn't account for venv linked to global site-packages - -site_packages = sysconfig.get_path("purelib") # type: Optional[str] - -# This is because of a bug in PyPy's sysconfig module, see -# https://bitbucket.org/pypy/pypy/issues/2506/sysconfig-returns-incorrect-paths -# for more information. -if platform.python_implementation().lower() == "pypy": - site_packages = distutils_sysconfig.get_python_lib() -try: - # Use getusersitepackages if this is present, as it ensures that the - # value is initialised properly. - user_site = site.getusersitepackages() -except AttributeError: - user_site = site.USER_SITE - -if WINDOWS: - bin_py = os.path.join(sys.prefix, 'Scripts') - bin_user = os.path.join(user_site, 'Scripts') - # buildout uses 'bin' on Windows too? - if not os.path.exists(bin_py): - bin_py = os.path.join(sys.prefix, 'bin') - bin_user = os.path.join(user_site, 'bin') -else: - bin_py = os.path.join(sys.prefix, 'bin') - bin_user = os.path.join(user_site, 'bin') - - # Forcing to use /usr/local/bin for standard macOS framework installs - # Also log to ~/Library/Logs/ for use with the Console.app log viewer - if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/': - bin_py = '/usr/local/bin' - - -def distutils_scheme(dist_name, user=False, home=None, root=None, - isolated=False, prefix=None): - # type:(str, bool, str, str, bool, str) -> dict - """ - Return a distutils install scheme - """ - from distutils.dist import Distribution - - scheme = {} - - if isolated: - extra_dist_args = {"script_args": ["--no-user-cfg"]} - else: - extra_dist_args = {} - dist_args = {'name': dist_name} # type: Dict[str, Union[str, List[str]]] - dist_args.update(extra_dist_args) - - d = Distribution(dist_args) - # Ignoring, typeshed issue reported python/typeshed/issues/2567 - d.parse_config_files() - # NOTE: Ignoring type since mypy can't find attributes on 'Command' - i = d.get_command_obj('install', create=True) # type: Any - assert i is not None - # NOTE: setting user or home has the side-effect of creating the home dir - # or user base for installations during finalize_options() - # ideally, we'd prefer a scheme class that has no side-effects. - assert not (user and prefix), "user={} prefix={}".format(user, prefix) - assert not (home and prefix), "home={} prefix={}".format(home, prefix) - i.user = user or i.user - if user or home: - i.prefix = "" - i.prefix = prefix or i.prefix - i.home = home or i.home - i.root = root or i.root - i.finalize_options() - for key in SCHEME_KEYS: - scheme[key] = getattr(i, 'install_' + key) - - # install_lib specified in setup.cfg should install *everything* - # into there (i.e. it takes precedence over both purelib and - # platlib). Note, i.install_lib is *always* set after - # finalize_options(); we only want to override here if the user - # has explicitly requested it hence going back to the config - - # Ignoring, typeshed issue reported python/typeshed/issues/2567 - if 'install_lib' in d.get_option_dict('install'): # type: ignore - scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib)) - - if running_under_virtualenv(): - scheme['headers'] = os.path.join( - sys.prefix, - 'include', - 'site', - 'python' + sys.version[:3], - dist_name, - ) - - if root is not None: - path_no_drive = os.path.splitdrive( - os.path.abspath(scheme["headers"]))[1] - scheme["headers"] = os.path.join( - root, - path_no_drive[1:], - ) - - return scheme diff --git a/env/lib/python2.7/site-packages/pip/_internal/locations.pyc b/env/lib/python2.7/site-packages/pip/_internal/locations.pyc deleted file mode 100644 index 132d11d1..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/locations.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/models/__init__.pyc b/env/lib/python2.7/site-packages/pip/_internal/models/__init__.pyc deleted file mode 100644 index 2f82452c..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/models/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/models/candidate.py b/env/lib/python2.7/site-packages/pip/_internal/models/candidate.py deleted file mode 100644 index 1b99690f..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/models/candidate.py +++ /dev/null @@ -1,36 +0,0 @@ -from pip._vendor.packaging.version import parse as parse_version - -from pip._internal.utils.models import KeyBasedCompareMixin -from pip._internal.utils.typing import MYPY_CHECK_RUNNING - -if MYPY_CHECK_RUNNING: - from pip._vendor.packaging.version import _BaseVersion - from pip._internal.models.link import Link - from typing import Any - - -class InstallationCandidate(KeyBasedCompareMixin): - """Represents a potential "candidate" for installation. - """ - - def __init__(self, project, version, link): - # type: (Any, str, Link) -> None - self.project = project - self.version = parse_version(version) # type: _BaseVersion - self.link = link - - super(InstallationCandidate, self).__init__( - key=(self.project, self.version, self.link), - defining_class=InstallationCandidate - ) - - def __repr__(self): - # type: () -> str - return "".format( - self.project, self.version, self.link, - ) - - def __str__(self): - return '{!r} candidate (version {} at {})'.format( - self.project, self.version, self.link, - ) diff --git a/env/lib/python2.7/site-packages/pip/_internal/models/candidate.pyc b/env/lib/python2.7/site-packages/pip/_internal/models/candidate.pyc deleted file mode 100644 index 41299b7a..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/models/candidate.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/models/format_control.py b/env/lib/python2.7/site-packages/pip/_internal/models/format_control.py deleted file mode 100644 index 53138e48..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/models/format_control.py +++ /dev/null @@ -1,73 +0,0 @@ -from pip._vendor.packaging.utils import canonicalize_name - -from pip._internal.utils.typing import MYPY_CHECK_RUNNING - -if MYPY_CHECK_RUNNING: - from typing import Optional, Set, FrozenSet - - -class FormatControl(object): - """Helper for managing formats from which a package can be installed. - """ - - def __init__(self, no_binary=None, only_binary=None): - # type: (Optional[Set], Optional[Set]) -> None - if no_binary is None: - no_binary = set() - if only_binary is None: - only_binary = set() - - self.no_binary = no_binary - self.only_binary = only_binary - - def __eq__(self, other): - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not self.__eq__(other) - - def __repr__(self): - return "{}({}, {})".format( - self.__class__.__name__, - self.no_binary, - self.only_binary - ) - - @staticmethod - def handle_mutual_excludes(value, target, other): - # type: (str, Optional[Set], Optional[Set]) -> None - new = value.split(',') - while ':all:' in new: - other.clear() - target.clear() - target.add(':all:') - del new[:new.index(':all:') + 1] - # Without a none, we want to discard everything as :all: covers it - if ':none:' not in new: - return - for name in new: - if name == ':none:': - target.clear() - continue - name = canonicalize_name(name) - other.discard(name) - target.add(name) - - def get_allowed_formats(self, canonical_name): - # type: (str) -> FrozenSet - result = {"binary", "source"} - if canonical_name in self.only_binary: - result.discard('source') - elif canonical_name in self.no_binary: - result.discard('binary') - elif ':all:' in self.only_binary: - result.discard('source') - elif ':all:' in self.no_binary: - result.discard('binary') - return frozenset(result) - - def disallow_binaries(self): - # type: () -> None - self.handle_mutual_excludes( - ':all:', self.no_binary, self.only_binary, - ) diff --git a/env/lib/python2.7/site-packages/pip/_internal/models/format_control.pyc b/env/lib/python2.7/site-packages/pip/_internal/models/format_control.pyc deleted file mode 100644 index 06074044..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/models/format_control.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/models/index.py b/env/lib/python2.7/site-packages/pip/_internal/models/index.py deleted file mode 100644 index ead1efbd..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/models/index.py +++ /dev/null @@ -1,31 +0,0 @@ -from pip._vendor.six.moves.urllib import parse as urllib_parse - - -class PackageIndex(object): - """Represents a Package Index and provides easier access to endpoints - """ - - def __init__(self, url, file_storage_domain): - # type: (str, str) -> None - super(PackageIndex, self).__init__() - self.url = url - self.netloc = urllib_parse.urlsplit(url).netloc - self.simple_url = self._url_for_path('simple') - self.pypi_url = self._url_for_path('pypi') - - # This is part of a temporary hack used to block installs of PyPI - # packages which depend on external urls only necessary until PyPI can - # block such packages themselves - self.file_storage_domain = file_storage_domain - - def _url_for_path(self, path): - # type: (str) -> str - return urllib_parse.urljoin(self.url, path) - - -PyPI = PackageIndex( - 'https://pypi.org/', file_storage_domain='files.pythonhosted.org' -) -TestPyPI = PackageIndex( - 'https://test.pypi.org/', file_storage_domain='test-files.pythonhosted.org' -) diff --git a/env/lib/python2.7/site-packages/pip/_internal/models/index.pyc b/env/lib/python2.7/site-packages/pip/_internal/models/index.pyc deleted file mode 100644 index dcbcfafc..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/models/index.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/models/link.py b/env/lib/python2.7/site-packages/pip/_internal/models/link.py deleted file mode 100644 index d42be28c..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/models/link.py +++ /dev/null @@ -1,213 +0,0 @@ -import posixpath -import re - -from pip._vendor.six.moves.urllib import parse as urllib_parse - -from pip._internal.utils.misc import ( - WHEEL_EXTENSION, path_to_url, redact_password_from_url, - split_auth_from_netloc, splitext, -) -from pip._internal.utils.models import KeyBasedCompareMixin -from pip._internal.utils.typing import MYPY_CHECK_RUNNING - -if MYPY_CHECK_RUNNING: - from typing import Optional, Text, Tuple, Union - from pip._internal.index import HTMLPage - from pip._internal.utils.hashes import Hashes - - -class Link(KeyBasedCompareMixin): - """Represents a parsed link from a Package Index's simple URL - """ - - def __init__( - self, - url, # type: str - comes_from=None, # type: Optional[Union[str, HTMLPage]] - requires_python=None, # type: Optional[str] - yanked_reason=None, # type: Optional[Text] - ): - # type: (...) -> None - """ - :param url: url of the resource pointed to (href of the link) - :param comes_from: instance of HTMLPage where the link was found, - or string. - :param requires_python: String containing the `Requires-Python` - metadata field, specified in PEP 345. This may be specified by - a data-requires-python attribute in the HTML link tag, as - described in PEP 503. - :param yanked_reason: the reason the file has been yanked, if the - file has been yanked, or None if the file hasn't been yanked. - This is the value of the "data-yanked" attribute, if present, in - a simple repository HTML link. If the file has been yanked but - no reason was provided, this should be the empty string. See - PEP 592 for more information and the specification. - """ - - # url can be a UNC windows share - if url.startswith('\\\\'): - url = path_to_url(url) - - self._parsed_url = urllib_parse.urlsplit(url) - # Store the url as a private attribute to prevent accidentally - # trying to set a new value. - self._url = url - - self.comes_from = comes_from - self.requires_python = requires_python if requires_python else None - self.yanked_reason = yanked_reason - - super(Link, self).__init__(key=url, defining_class=Link) - - def __str__(self): - if self.requires_python: - rp = ' (requires-python:%s)' % self.requires_python - else: - rp = '' - if self.comes_from: - return '%s (from %s)%s' % (redact_password_from_url(self._url), - self.comes_from, rp) - else: - return redact_password_from_url(str(self._url)) - - def __repr__(self): - return '' % self - - @property - def url(self): - # type: () -> str - return self._url - - @property - def filename(self): - # type: () -> str - path = self.path.rstrip('/') - name = posixpath.basename(path) - if not name: - # Make sure we don't leak auth information if the netloc - # includes a username and password. - netloc, user_pass = split_auth_from_netloc(self.netloc) - return netloc - - name = urllib_parse.unquote(name) - assert name, ('URL %r produced no filename' % self._url) - return name - - @property - def scheme(self): - # type: () -> str - return self._parsed_url.scheme - - @property - def netloc(self): - # type: () -> str - """ - This can contain auth information. - """ - return self._parsed_url.netloc - - @property - def path(self): - # type: () -> str - return urllib_parse.unquote(self._parsed_url.path) - - def splitext(self): - # type: () -> Tuple[str, str] - return splitext(posixpath.basename(self.path.rstrip('/'))) - - @property - def ext(self): - # type: () -> str - return self.splitext()[1] - - @property - def url_without_fragment(self): - # type: () -> str - scheme, netloc, path, query, fragment = self._parsed_url - return urllib_parse.urlunsplit((scheme, netloc, path, query, None)) - - _egg_fragment_re = re.compile(r'[#&]egg=([^&]*)') - - @property - def egg_fragment(self): - # type: () -> Optional[str] - match = self._egg_fragment_re.search(self._url) - if not match: - return None - return match.group(1) - - _subdirectory_fragment_re = re.compile(r'[#&]subdirectory=([^&]*)') - - @property - def subdirectory_fragment(self): - # type: () -> Optional[str] - match = self._subdirectory_fragment_re.search(self._url) - if not match: - return None - return match.group(1) - - _hash_re = re.compile( - r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)' - ) - - @property - def hash(self): - # type: () -> Optional[str] - match = self._hash_re.search(self._url) - if match: - return match.group(2) - return None - - @property - def hash_name(self): - # type: () -> Optional[str] - match = self._hash_re.search(self._url) - if match: - return match.group(1) - return None - - @property - def show_url(self): - # type: () -> Optional[str] - return posixpath.basename(self._url.split('#', 1)[0].split('?', 1)[0]) - - @property - def is_wheel(self): - # type: () -> bool - return self.ext == WHEEL_EXTENSION - - @property - def is_artifact(self): - # type: () -> bool - """ - Determines if this points to an actual artifact (e.g. a tarball) or if - it points to an "abstract" thing like a path or a VCS location. - """ - from pip._internal.vcs import vcs - - if self.scheme in vcs.all_schemes: - return False - - return True - - @property - def is_yanked(self): - # type: () -> bool - return self.yanked_reason is not None - - @property - def has_hash(self): - return self.hash_name is not None - - def is_hash_allowed(self, hashes): - # type: (Optional[Hashes]) -> bool - """ - Return True if the link has a hash and it is allowed. - """ - if hashes is None or not self.has_hash: - return False - # Assert non-None so mypy knows self.hash_name and self.hash are str. - assert self.hash_name is not None - assert self.hash is not None - - return hashes.is_hash_allowed(self.hash_name, hex_digest=self.hash) diff --git a/env/lib/python2.7/site-packages/pip/_internal/models/link.pyc b/env/lib/python2.7/site-packages/pip/_internal/models/link.pyc deleted file mode 100644 index cfc8ef9c..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/models/link.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/models/search_scope.py b/env/lib/python2.7/site-packages/pip/_internal/models/search_scope.py deleted file mode 100644 index 62152449..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/models/search_scope.py +++ /dev/null @@ -1,113 +0,0 @@ -import itertools -import logging -import os -import posixpath - -from pip._vendor.packaging.utils import canonicalize_name -from pip._vendor.six.moves.urllib import parse as urllib_parse - -from pip._internal.models.index import PyPI -from pip._internal.utils.compat import HAS_TLS -from pip._internal.utils.misc import normalize_path, redact_password_from_url -from pip._internal.utils.typing import MYPY_CHECK_RUNNING - -if MYPY_CHECK_RUNNING: - from typing import List - - -logger = logging.getLogger(__name__) - - -class SearchScope(object): - - """ - Encapsulates the locations that pip is configured to search. - """ - - @classmethod - def create( - cls, - find_links, # type: List[str] - index_urls, # type: List[str] - ): - # type: (...) -> SearchScope - """ - Create a SearchScope object after normalizing the `find_links`. - """ - # Build find_links. If an argument starts with ~, it may be - # a local file relative to a home directory. So try normalizing - # it and if it exists, use the normalized version. - # This is deliberately conservative - it might be fine just to - # blindly normalize anything starting with a ~... - built_find_links = [] # type: List[str] - for link in find_links: - if link.startswith('~'): - new_link = normalize_path(link) - if os.path.exists(new_link): - link = new_link - built_find_links.append(link) - - # If we don't have TLS enabled, then WARN if anyplace we're looking - # relies on TLS. - if not HAS_TLS: - for link in itertools.chain(index_urls, built_find_links): - parsed = urllib_parse.urlparse(link) - if parsed.scheme == 'https': - logger.warning( - 'pip is configured with locations that require ' - 'TLS/SSL, however the ssl module in Python is not ' - 'available.' - ) - break - - return cls( - find_links=built_find_links, - index_urls=index_urls, - ) - - def __init__( - self, - find_links, # type: List[str] - index_urls, # type: List[str] - ): - # type: (...) -> None - self.find_links = find_links - self.index_urls = index_urls - - def get_formatted_locations(self): - # type: () -> str - lines = [] - if self.index_urls and self.index_urls != [PyPI.simple_url]: - lines.append( - 'Looking in indexes: {}'.format(', '.join( - redact_password_from_url(url) for url in self.index_urls)) - ) - if self.find_links: - lines.append( - 'Looking in links: {}'.format(', '.join( - redact_password_from_url(url) for url in self.find_links)) - ) - return '\n'.join(lines) - - def get_index_urls_locations(self, project_name): - # type: (str) -> List[str] - """Returns the locations found via self.index_urls - - Checks the url_name on the main (first in the list) index and - use this url_name to produce all locations - """ - - def mkurl_pypi_url(url): - loc = posixpath.join( - url, - urllib_parse.quote(canonicalize_name(project_name))) - # For maximum compatibility with easy_install, ensure the path - # ends in a trailing slash. Although this isn't in the spec - # (and PyPI can handle it without the slash) some other index - # implementations might break if they relied on easy_install's - # behavior. - if not loc.endswith('/'): - loc = loc + '/' - return loc - - return [mkurl_pypi_url(url) for url in self.index_urls] diff --git a/env/lib/python2.7/site-packages/pip/_internal/models/search_scope.pyc b/env/lib/python2.7/site-packages/pip/_internal/models/search_scope.pyc deleted file mode 100644 index e93bfe06..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/models/search_scope.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/models/selection_prefs.pyc b/env/lib/python2.7/site-packages/pip/_internal/models/selection_prefs.pyc deleted file mode 100644 index 73d6fec1..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/models/selection_prefs.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/models/target_python.py b/env/lib/python2.7/site-packages/pip/_internal/models/target_python.py deleted file mode 100644 index a23b79c4..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/models/target_python.py +++ /dev/null @@ -1,106 +0,0 @@ -import sys - -from pip._internal.pep425tags import get_supported, version_info_to_nodot -from pip._internal.utils.misc import normalize_version_info -from pip._internal.utils.typing import MYPY_CHECK_RUNNING - -if MYPY_CHECK_RUNNING: - from typing import List, Optional, Tuple - from pip._internal.pep425tags import Pep425Tag - - -class TargetPython(object): - - """ - Encapsulates the properties of a Python interpreter one is targeting - for a package install, download, etc. - """ - - def __init__( - self, - platform=None, # type: Optional[str] - py_version_info=None, # type: Optional[Tuple[int, ...]] - abi=None, # type: Optional[str] - implementation=None, # type: Optional[str] - ): - # type: (...) -> None - """ - :param platform: A string or None. If None, searches for packages - that are supported by the current system. Otherwise, will find - packages that can be built on the platform passed in. These - packages will only be downloaded for distribution: they will - not be built locally. - :param py_version_info: An optional tuple of ints representing the - Python version information to use (e.g. `sys.version_info[:3]`). - This can have length 1, 2, or 3 when provided. - :param abi: A string or None. This is passed to pep425tags.py's - get_supported() function as is. - :param implementation: A string or None. This is passed to - pep425tags.py's get_supported() function as is. - """ - # Store the given py_version_info for when we call get_supported(). - self._given_py_version_info = py_version_info - - if py_version_info is None: - py_version_info = sys.version_info[:3] - else: - py_version_info = normalize_version_info(py_version_info) - - py_version = '.'.join(map(str, py_version_info[:2])) - - self.abi = abi - self.implementation = implementation - self.platform = platform - self.py_version = py_version - self.py_version_info = py_version_info - - # This is used to cache the return value of get_tags(). - self._valid_tags = None # type: Optional[List[Pep425Tag]] - - def format_given(self): - # type: () -> str - """ - Format the given, non-None attributes for display. - """ - display_version = None - if self._given_py_version_info is not None: - display_version = '.'.join( - str(part) for part in self._given_py_version_info - ) - - key_values = [ - ('platform', self.platform), - ('version_info', display_version), - ('abi', self.abi), - ('implementation', self.implementation), - ] - return ' '.join( - '{}={!r}'.format(key, value) for key, value in key_values - if value is not None - ) - - def get_tags(self): - # type: () -> List[Pep425Tag] - """ - Return the supported PEP 425 tags to check wheel candidates against. - - The tags are returned in order of preference (most preferred first). - """ - if self._valid_tags is None: - # Pass versions=None if no py_version_info was given since - # versions=None uses special default logic. - py_version_info = self._given_py_version_info - if py_version_info is None: - versions = None - else: - versions = [version_info_to_nodot(py_version_info)] - - tags = get_supported( - versions=versions, - platform=self.platform, - abi=self.abi, - impl=self.implementation, - ) - self._valid_tags = tags - - return self._valid_tags diff --git a/env/lib/python2.7/site-packages/pip/_internal/models/target_python.pyc b/env/lib/python2.7/site-packages/pip/_internal/models/target_python.pyc deleted file mode 100644 index f21f5d83..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/models/target_python.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/operations/__init__.pyc b/env/lib/python2.7/site-packages/pip/_internal/operations/__init__.pyc deleted file mode 100644 index 38059607..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/operations/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/operations/check.py b/env/lib/python2.7/site-packages/pip/_internal/operations/check.py deleted file mode 100644 index 7b8b369f..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/operations/check.py +++ /dev/null @@ -1,159 +0,0 @@ -"""Validation of dependencies of packages -""" - -import logging -from collections import namedtuple - -from pip._vendor.packaging.utils import canonicalize_name -from pip._vendor.pkg_resources import RequirementParseError - -from pip._internal.distributions import ( - make_distribution_for_install_requirement, -) -from pip._internal.utils.misc import get_installed_distributions -from pip._internal.utils.typing import MYPY_CHECK_RUNNING - -logger = logging.getLogger(__name__) - -if MYPY_CHECK_RUNNING: - from pip._internal.req.req_install import InstallRequirement - from typing import ( - Any, Callable, Dict, Optional, Set, Tuple, List - ) - - # Shorthands - PackageSet = Dict[str, 'PackageDetails'] - Missing = Tuple[str, Any] - Conflicting = Tuple[str, str, Any] - - MissingDict = Dict[str, List[Missing]] - ConflictingDict = Dict[str, List[Conflicting]] - CheckResult = Tuple[MissingDict, ConflictingDict] - -PackageDetails = namedtuple('PackageDetails', ['version', 'requires']) - - -def create_package_set_from_installed(**kwargs): - # type: (**Any) -> Tuple[PackageSet, bool] - """Converts a list of distributions into a PackageSet. - """ - # Default to using all packages installed on the system - if kwargs == {}: - kwargs = {"local_only": False, "skip": ()} - - package_set = {} - problems = False - for dist in get_installed_distributions(**kwargs): - name = canonicalize_name(dist.project_name) - try: - package_set[name] = PackageDetails(dist.version, dist.requires()) - except RequirementParseError as e: - # Don't crash on broken metadata - logging.warning("Error parsing requirements for %s: %s", name, e) - problems = True - return package_set, problems - - -def check_package_set(package_set, should_ignore=None): - # type: (PackageSet, Optional[Callable[[str], bool]]) -> CheckResult - """Check if a package set is consistent - - If should_ignore is passed, it should be a callable that takes a - package name and returns a boolean. - """ - if should_ignore is None: - def should_ignore(name): - return False - - missing = dict() - conflicting = dict() - - for package_name in package_set: - # Info about dependencies of package_name - missing_deps = set() # type: Set[Missing] - conflicting_deps = set() # type: Set[Conflicting] - - if should_ignore(package_name): - continue - - for req in package_set[package_name].requires: - name = canonicalize_name(req.project_name) # type: str - - # Check if it's missing - if name not in package_set: - missed = True - if req.marker is not None: - missed = req.marker.evaluate() - if missed: - missing_deps.add((name, req)) - continue - - # Check if there's a conflict - version = package_set[name].version # type: str - if not req.specifier.contains(version, prereleases=True): - conflicting_deps.add((name, version, req)) - - if missing_deps: - missing[package_name] = sorted(missing_deps, key=str) - if conflicting_deps: - conflicting[package_name] = sorted(conflicting_deps, key=str) - - return missing, conflicting - - -def check_install_conflicts(to_install): - # type: (List[InstallRequirement]) -> Tuple[PackageSet, CheckResult] - """For checking if the dependency graph would be consistent after \ - installing given requirements - """ - # Start from the current state - package_set, _ = create_package_set_from_installed() - # Install packages - would_be_installed = _simulate_installation_of(to_install, package_set) - - # Only warn about directly-dependent packages; create a whitelist of them - whitelist = _create_whitelist(would_be_installed, package_set) - - return ( - package_set, - check_package_set( - package_set, should_ignore=lambda name: name not in whitelist - ) - ) - - -def _simulate_installation_of(to_install, package_set): - # type: (List[InstallRequirement], PackageSet) -> Set[str] - """Computes the version of packages after installing to_install. - """ - - # Keep track of packages that were installed - installed = set() - - # Modify it as installing requirement_set would (assuming no errors) - for inst_req in to_install: - abstract_dist = make_distribution_for_install_requirement(inst_req) - dist = abstract_dist.get_pkg_resources_distribution() - - name = canonicalize_name(dist.key) - package_set[name] = PackageDetails(dist.version, dist.requires()) - - installed.add(name) - - return installed - - -def _create_whitelist(would_be_installed, package_set): - # type: (Set[str], PackageSet) -> Set[str] - packages_affected = set(would_be_installed) - - for package_name in package_set: - if package_name in packages_affected: - continue - - for req in package_set[package_name].requires: - if canonicalize_name(req.name) in packages_affected: - packages_affected.add(package_name) - break - - return packages_affected diff --git a/env/lib/python2.7/site-packages/pip/_internal/operations/check.pyc b/env/lib/python2.7/site-packages/pip/_internal/operations/check.pyc deleted file mode 100644 index 43df7bf3..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/operations/check.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/operations/freeze.py b/env/lib/python2.7/site-packages/pip/_internal/operations/freeze.py deleted file mode 100644 index 6f5a3dd9..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/operations/freeze.py +++ /dev/null @@ -1,253 +0,0 @@ -from __future__ import absolute_import - -import collections -import logging -import os -import re - -from pip._vendor import six -from pip._vendor.packaging.utils import canonicalize_name -from pip._vendor.pkg_resources import RequirementParseError - -from pip._internal.exceptions import BadCommand, InstallationError -from pip._internal.req.constructors import ( - install_req_from_editable, install_req_from_line, -) -from pip._internal.req.req_file import COMMENT_RE -from pip._internal.utils.misc import ( - dist_is_editable, get_installed_distributions, -) -from pip._internal.utils.typing import MYPY_CHECK_RUNNING - -if MYPY_CHECK_RUNNING: - from typing import ( - Iterator, Optional, List, Container, Set, Dict, Tuple, Iterable, Union - ) - from pip._internal.cache import WheelCache - from pip._vendor.pkg_resources import ( - Distribution, Requirement - ) - - RequirementInfo = Tuple[Optional[Union[str, Requirement]], bool, List[str]] - - -logger = logging.getLogger(__name__) - - -def freeze( - requirement=None, # type: Optional[List[str]] - find_links=None, # type: Optional[List[str]] - local_only=None, # type: Optional[bool] - user_only=None, # type: Optional[bool] - paths=None, # type: Optional[List[str]] - skip_regex=None, # type: Optional[str] - isolated=False, # type: bool - wheel_cache=None, # type: Optional[WheelCache] - exclude_editable=False, # type: bool - skip=() # type: Container[str] -): - # type: (...) -> Iterator[str] - find_links = find_links or [] - skip_match = None - - if skip_regex: - skip_match = re.compile(skip_regex).search - - for link in find_links: - yield '-f %s' % link - installations = {} # type: Dict[str, FrozenRequirement] - for dist in get_installed_distributions(local_only=local_only, - skip=(), - user_only=user_only, - paths=paths): - try: - req = FrozenRequirement.from_dist(dist) - except RequirementParseError as exc: - # We include dist rather than dist.project_name because the - # dist string includes more information, like the version and - # location. We also include the exception message to aid - # troubleshooting. - logger.warning( - 'Could not generate requirement for distribution %r: %s', - dist, exc - ) - continue - if exclude_editable and req.editable: - continue - installations[req.name] = req - - if requirement: - # the options that don't get turned into an InstallRequirement - # should only be emitted once, even if the same option is in multiple - # requirements files, so we need to keep track of what has been emitted - # so that we don't emit it again if it's seen again - emitted_options = set() # type: Set[str] - # keep track of which files a requirement is in so that we can - # give an accurate warning if a requirement appears multiple times. - req_files = collections.defaultdict(list) # type: Dict[str, List[str]] - for req_file_path in requirement: - with open(req_file_path) as req_file: - for line in req_file: - if (not line.strip() or - line.strip().startswith('#') or - (skip_match and skip_match(line)) or - line.startswith(( - '-r', '--requirement', - '-Z', '--always-unzip', - '-f', '--find-links', - '-i', '--index-url', - '--pre', - '--trusted-host', - '--process-dependency-links', - '--extra-index-url'))): - line = line.rstrip() - if line not in emitted_options: - emitted_options.add(line) - yield line - continue - - if line.startswith('-e') or line.startswith('--editable'): - if line.startswith('-e'): - line = line[2:].strip() - else: - line = line[len('--editable'):].strip().lstrip('=') - line_req = install_req_from_editable( - line, - isolated=isolated, - wheel_cache=wheel_cache, - ) - else: - line_req = install_req_from_line( - COMMENT_RE.sub('', line).strip(), - isolated=isolated, - wheel_cache=wheel_cache, - ) - - if not line_req.name: - logger.info( - "Skipping line in requirement file [%s] because " - "it's not clear what it would install: %s", - req_file_path, line.strip(), - ) - logger.info( - " (add #egg=PackageName to the URL to avoid" - " this warning)" - ) - elif line_req.name not in installations: - # either it's not installed, or it is installed - # but has been processed already - if not req_files[line_req.name]: - logger.warning( - "Requirement file [%s] contains %s, but " - "package %r is not installed", - req_file_path, - COMMENT_RE.sub('', line).strip(), line_req.name - ) - else: - req_files[line_req.name].append(req_file_path) - else: - yield str(installations[line_req.name]).rstrip() - del installations[line_req.name] - req_files[line_req.name].append(req_file_path) - - # Warn about requirements that were included multiple times (in a - # single requirements file or in different requirements files). - for name, files in six.iteritems(req_files): - if len(files) > 1: - logger.warning("Requirement %s included multiple times [%s]", - name, ', '.join(sorted(set(files)))) - - yield( - '## The following requirements were added by ' - 'pip freeze:' - ) - for installation in sorted( - installations.values(), key=lambda x: x.name.lower()): - if canonicalize_name(installation.name) not in skip: - yield str(installation).rstrip() - - -def get_requirement_info(dist): - # type: (Distribution) -> RequirementInfo - """ - Compute and return values (req, editable, comments) for use in - FrozenRequirement.from_dist(). - """ - if not dist_is_editable(dist): - return (None, False, []) - - location = os.path.normcase(os.path.abspath(dist.location)) - - from pip._internal.vcs import vcs, RemoteNotFoundError - vcs_backend = vcs.get_backend_for_dir(location) - - if vcs_backend is None: - req = dist.as_requirement() - logger.debug( - 'No VCS found for editable requirement "%s" in: %r', req, - location, - ) - comments = [ - '# Editable install with no version control ({})'.format(req) - ] - return (location, True, comments) - - try: - req = vcs_backend.get_src_requirement(location, dist.project_name) - except RemoteNotFoundError: - req = dist.as_requirement() - comments = [ - '# Editable {} install with no remote ({})'.format( - type(vcs_backend).__name__, req, - ) - ] - return (location, True, comments) - - except BadCommand: - logger.warning( - 'cannot determine version of editable source in %s ' - '(%s command not found in path)', - location, - vcs_backend.name, - ) - return (None, True, []) - - except InstallationError as exc: - logger.warning( - "Error when trying to get requirement for VCS system %s, " - "falling back to uneditable format", exc - ) - else: - if req is not None: - return (req, True, []) - - logger.warning( - 'Could not determine repository location of %s', location - ) - comments = ['## !! Could not determine repository location'] - - return (None, False, comments) - - -class FrozenRequirement(object): - def __init__(self, name, req, editable, comments=()): - # type: (str, Union[str, Requirement], bool, Iterable[str]) -> None - self.name = name - self.req = req - self.editable = editable - self.comments = comments - - @classmethod - def from_dist(cls, dist): - # type: (Distribution) -> FrozenRequirement - req, editable, comments = get_requirement_info(dist) - if req is None: - req = dist.as_requirement() - - return cls(dist.project_name, req, editable, comments=comments) - - def __str__(self): - req = self.req - if self.editable: - req = '-e %s' % req - return '\n'.join(list(self.comments) + [str(req)]) + '\n' diff --git a/env/lib/python2.7/site-packages/pip/_internal/operations/freeze.pyc b/env/lib/python2.7/site-packages/pip/_internal/operations/freeze.pyc deleted file mode 100644 index 6d04011a..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/operations/freeze.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/operations/prepare.py b/env/lib/python2.7/site-packages/pip/_internal/operations/prepare.py deleted file mode 100644 index 6cf5f0ed..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/operations/prepare.py +++ /dev/null @@ -1,287 +0,0 @@ -"""Prepares a distribution for installation -""" - -import logging -import os - -from pip._vendor import requests - -from pip._internal.distributions import ( - make_distribution_for_install_requirement, -) -from pip._internal.distributions.installed import InstalledDistribution -from pip._internal.download import ( - is_dir_url, is_file_url, is_vcs_url, unpack_url, url_to_path, -) -from pip._internal.exceptions import ( - DirectoryUrlHashUnsupported, HashUnpinned, InstallationError, - PreviousBuildDirError, VcsHashUnsupported, -) -from pip._internal.utils.compat import expanduser -from pip._internal.utils.hashes import MissingHashes -from pip._internal.utils.logging import indent_log -from pip._internal.utils.misc import display_path, normalize_path -from pip._internal.utils.typing import MYPY_CHECK_RUNNING - -if MYPY_CHECK_RUNNING: - from typing import Optional - - from pip._internal.distributions import AbstractDistribution - from pip._internal.download import PipSession - from pip._internal.index import PackageFinder - from pip._internal.req.req_install import InstallRequirement - from pip._internal.req.req_tracker import RequirementTracker - -logger = logging.getLogger(__name__) - - -class RequirementPreparer(object): - """Prepares a Requirement - """ - - def __init__( - self, - build_dir, # type: str - download_dir, # type: Optional[str] - src_dir, # type: str - wheel_download_dir, # type: Optional[str] - progress_bar, # type: str - build_isolation, # type: bool - req_tracker # type: RequirementTracker - ): - # type: (...) -> None - super(RequirementPreparer, self).__init__() - - self.src_dir = src_dir - self.build_dir = build_dir - self.req_tracker = req_tracker - - # Where still packed archives should be written to. If None, they are - # not saved, and are deleted immediately after unpacking. - self.download_dir = download_dir - - # Where still-packed .whl files should be written to. If None, they are - # written to the download_dir parameter. Separate to download_dir to - # permit only keeping wheel archives for pip wheel. - if wheel_download_dir: - wheel_download_dir = normalize_path(wheel_download_dir) - self.wheel_download_dir = wheel_download_dir - - # NOTE - # download_dir and wheel_download_dir overlap semantically and may - # be combined if we're willing to have non-wheel archives present in - # the wheelhouse output by 'pip wheel'. - - self.progress_bar = progress_bar - - # Is build isolation allowed? - self.build_isolation = build_isolation - - @property - def _download_should_save(self): - # type: () -> bool - # TODO: Modify to reduce indentation needed - if self.download_dir: - self.download_dir = expanduser(self.download_dir) - if os.path.exists(self.download_dir): - return True - else: - logger.critical('Could not find download directory') - raise InstallationError( - "Could not find or access download directory '%s'" - % display_path(self.download_dir)) - return False - - def prepare_linked_requirement( - self, - req, # type: InstallRequirement - session, # type: PipSession - finder, # type: PackageFinder - upgrade_allowed, # type: bool - require_hashes # type: bool - ): - # type: (...) -> AbstractDistribution - """Prepare a requirement that would be obtained from req.link - """ - # TODO: Breakup into smaller functions - if req.link and req.link.scheme == 'file': - path = url_to_path(req.link.url) - logger.info('Processing %s', display_path(path)) - else: - logger.info('Collecting %s', req) - - with indent_log(): - # @@ if filesystem packages are not marked - # editable in a req, a non deterministic error - # occurs when the script attempts to unpack the - # build directory - req.ensure_has_source_dir(self.build_dir) - # If a checkout exists, it's unwise to keep going. version - # inconsistencies are logged later, but do not fail the - # installation. - # FIXME: this won't upgrade when there's an existing - # package unpacked in `req.source_dir` - # package unpacked in `req.source_dir` - if os.path.exists(os.path.join(req.source_dir, 'setup.py')): - raise PreviousBuildDirError( - "pip can't proceed with requirements '%s' due to a" - " pre-existing build directory (%s). This is " - "likely due to a previous installation that failed" - ". pip is being responsible and not assuming it " - "can delete this. Please delete it and try again." - % (req, req.source_dir) - ) - req.populate_link(finder, upgrade_allowed, require_hashes) - - # We can't hit this spot and have populate_link return None. - # req.satisfied_by is None here (because we're - # guarded) and upgrade has no impact except when satisfied_by - # is not None. - # Then inside find_requirement existing_applicable -> False - # If no new versions are found, DistributionNotFound is raised, - # otherwise a result is guaranteed. - assert req.link - link = req.link - - # Now that we have the real link, we can tell what kind of - # requirements we have and raise some more informative errors - # than otherwise. (For example, we can raise VcsHashUnsupported - # for a VCS URL rather than HashMissing.) - if require_hashes: - # We could check these first 2 conditions inside - # unpack_url and save repetition of conditions, but then - # we would report less-useful error messages for - # unhashable requirements, complaining that there's no - # hash provided. - if is_vcs_url(link): - raise VcsHashUnsupported() - elif is_file_url(link) and is_dir_url(link): - raise DirectoryUrlHashUnsupported() - if not req.original_link and not req.is_pinned: - # Unpinned packages are asking for trouble when a new - # version is uploaded. This isn't a security check, but - # it saves users a surprising hash mismatch in the - # future. - # - # file:/// URLs aren't pinnable, so don't complain - # about them not being pinned. - raise HashUnpinned() - - hashes = req.hashes(trust_internet=not require_hashes) - if require_hashes and not hashes: - # Known-good hashes are missing for this requirement, so - # shim it with a facade object that will provoke hash - # computation and then raise a HashMissing exception - # showing the user what the hash should be. - hashes = MissingHashes() - - try: - download_dir = self.download_dir - # We always delete unpacked sdists after pip ran. - autodelete_unpacked = True - if req.link.is_wheel and self.wheel_download_dir: - # when doing 'pip wheel` we download wheels to a - # dedicated dir. - download_dir = self.wheel_download_dir - if req.link.is_wheel: - if download_dir: - # When downloading, we only unpack wheels to get - # metadata. - autodelete_unpacked = True - else: - # When installing a wheel, we use the unpacked - # wheel. - autodelete_unpacked = False - unpack_url( - req.link, req.source_dir, - download_dir, autodelete_unpacked, - session=session, hashes=hashes, - progress_bar=self.progress_bar - ) - except requests.HTTPError as exc: - logger.critical( - 'Could not install requirement %s because of error %s', - req, - exc, - ) - raise InstallationError( - 'Could not install requirement %s because of HTTP ' - 'error %s for URL %s' % - (req, exc, req.link) - ) - abstract_dist = make_distribution_for_install_requirement(req) - with self.req_tracker.track(req): - abstract_dist.prepare_distribution_metadata( - finder, self.build_isolation, - ) - if self._download_should_save: - # Make a .zip of the source_dir we already created. - if not req.link.is_artifact: - req.archive(self.download_dir) - return abstract_dist - - def prepare_editable_requirement( - self, - req, # type: InstallRequirement - require_hashes, # type: bool - use_user_site, # type: bool - finder # type: PackageFinder - ): - # type: (...) -> AbstractDistribution - """Prepare an editable requirement - """ - assert req.editable, "cannot prepare a non-editable req as editable" - - logger.info('Obtaining %s', req) - - with indent_log(): - if require_hashes: - raise InstallationError( - 'The editable requirement %s cannot be installed when ' - 'requiring hashes, because there is no single file to ' - 'hash.' % req - ) - req.ensure_has_source_dir(self.src_dir) - req.update_editable(not self._download_should_save) - - abstract_dist = make_distribution_for_install_requirement(req) - with self.req_tracker.track(req): - abstract_dist.prepare_distribution_metadata( - finder, self.build_isolation, - ) - - if self._download_should_save: - req.archive(self.download_dir) - req.check_if_exists(use_user_site) - - return abstract_dist - - def prepare_installed_requirement( - self, - req, # type: InstallRequirement - require_hashes, # type: bool - skip_reason # type: str - ): - # type: (...) -> AbstractDistribution - """Prepare an already-installed requirement - """ - assert req.satisfied_by, "req should have been satisfied but isn't" - assert skip_reason is not None, ( - "did not get skip reason skipped but req.satisfied_by " - "is set to %r" % (req.satisfied_by,) - ) - logger.info( - 'Requirement %s: %s (%s)', - skip_reason, req, req.satisfied_by.version - ) - with indent_log(): - if require_hashes: - logger.debug( - 'Since it is already installed, we are trusting this ' - 'package without checking its hash. To ensure a ' - 'completely repeatable environment, install into an ' - 'empty virtualenv.' - ) - abstract_dist = InstalledDistribution(req) - - return abstract_dist diff --git a/env/lib/python2.7/site-packages/pip/_internal/operations/prepare.pyc b/env/lib/python2.7/site-packages/pip/_internal/operations/prepare.pyc deleted file mode 100644 index 43d7ee00..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/operations/prepare.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/pep425tags.py b/env/lib/python2.7/site-packages/pip/_internal/pep425tags.py deleted file mode 100644 index 07dc148e..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/pep425tags.py +++ /dev/null @@ -1,387 +0,0 @@ -"""Generate and work with PEP 425 Compatibility Tags.""" -from __future__ import absolute_import - -import distutils.util -import logging -import platform -import re -import sys -import sysconfig -import warnings -from collections import OrderedDict - -import pip._internal.utils.glibc -from pip._internal.utils.compat import get_extension_suffixes -from pip._internal.utils.typing import MYPY_CHECK_RUNNING - -if MYPY_CHECK_RUNNING: - from typing import ( - Tuple, Callable, List, Optional, Union, Dict - ) - - Pep425Tag = Tuple[str, str, str] - -logger = logging.getLogger(__name__) - -_osx_arch_pat = re.compile(r'(.+)_(\d+)_(\d+)_(.+)') - - -def get_config_var(var): - # type: (str) -> Optional[str] - try: - return sysconfig.get_config_var(var) - except IOError as e: # Issue #1074 - warnings.warn("{}".format(e), RuntimeWarning) - return None - - -def get_abbr_impl(): - # type: () -> str - """Return abbreviated implementation name.""" - if hasattr(sys, 'pypy_version_info'): - pyimpl = 'pp' - elif sys.platform.startswith('java'): - pyimpl = 'jy' - elif sys.platform == 'cli': - pyimpl = 'ip' - else: - pyimpl = 'cp' - return pyimpl - - -def version_info_to_nodot(version_info): - # type: (Tuple[int, ...]) -> str - # Only use up to the first two numbers. - return ''.join(map(str, version_info[:2])) - - -def get_impl_ver(): - # type: () -> str - """Return implementation version.""" - impl_ver = get_config_var("py_version_nodot") - if not impl_ver or get_abbr_impl() == 'pp': - impl_ver = ''.join(map(str, get_impl_version_info())) - return impl_ver - - -def get_impl_version_info(): - # type: () -> Tuple[int, ...] - """Return sys.version_info-like tuple for use in decrementing the minor - version.""" - if get_abbr_impl() == 'pp': - # as per https://github.com/pypa/pip/issues/2882 - # attrs exist only on pypy - return (sys.version_info[0], - sys.pypy_version_info.major, # type: ignore - sys.pypy_version_info.minor) # type: ignore - else: - return sys.version_info[0], sys.version_info[1] - - -def get_impl_tag(): - # type: () -> str - """ - Returns the Tag for this specific implementation. - """ - return "{}{}".format(get_abbr_impl(), get_impl_ver()) - - -def get_flag(var, fallback, expected=True, warn=True): - # type: (str, Callable[..., bool], Union[bool, int], bool) -> bool - """Use a fallback method for determining SOABI flags if the needed config - var is unset or unavailable.""" - val = get_config_var(var) - if val is None: - if warn: - logger.debug("Config variable '%s' is unset, Python ABI tag may " - "be incorrect", var) - return fallback() - return val == expected - - -def get_abi_tag(): - # type: () -> Optional[str] - """Return the ABI tag based on SOABI (if available) or emulate SOABI - (CPython 2, PyPy).""" - soabi = get_config_var('SOABI') - impl = get_abbr_impl() - if not soabi and impl in {'cp', 'pp'} and hasattr(sys, 'maxunicode'): - d = '' - m = '' - u = '' - if get_flag('Py_DEBUG', - lambda: hasattr(sys, 'gettotalrefcount'), - warn=(impl == 'cp')): - d = 'd' - if get_flag('WITH_PYMALLOC', - lambda: impl == 'cp', - warn=(impl == 'cp')): - m = 'm' - if get_flag('Py_UNICODE_SIZE', - lambda: sys.maxunicode == 0x10ffff, - expected=4, - warn=(impl == 'cp' and - sys.version_info < (3, 3))) \ - and sys.version_info < (3, 3): - u = 'u' - abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u) - elif soabi and soabi.startswith('cpython-'): - abi = 'cp' + soabi.split('-')[1] - elif soabi: - abi = soabi.replace('.', '_').replace('-', '_') - else: - abi = None - return abi - - -def _is_running_32bit(): - # type: () -> bool - return sys.maxsize == 2147483647 - - -def get_platform(): - # type: () -> str - """Return our platform name 'win32', 'linux_x86_64'""" - if sys.platform == 'darwin': - # distutils.util.get_platform() returns the release based on the value - # of MACOSX_DEPLOYMENT_TARGET on which Python was built, which may - # be significantly older than the user's current machine. - release, _, machine = platform.mac_ver() - split_ver = release.split('.') - - if machine == "x86_64" and _is_running_32bit(): - machine = "i386" - elif machine == "ppc64" and _is_running_32bit(): - machine = "ppc" - - return 'macosx_{}_{}_{}'.format(split_ver[0], split_ver[1], machine) - - # XXX remove distutils dependency - result = distutils.util.get_platform().replace('.', '_').replace('-', '_') - if result == "linux_x86_64" and _is_running_32bit(): - # 32 bit Python program (running on a 64 bit Linux): pip should only - # install and run 32 bit compiled extensions in that case. - result = "linux_i686" - - return result - - -def is_manylinux1_compatible(): - # type: () -> bool - # Only Linux, and only x86-64 / i686 - if get_platform() not in {"linux_x86_64", "linux_i686"}: - return False - - # Check for presence of _manylinux module - try: - import _manylinux - return bool(_manylinux.manylinux1_compatible) - except (ImportError, AttributeError): - # Fall through to heuristic check below - pass - - # Check glibc version. CentOS 5 uses glibc 2.5. - return pip._internal.utils.glibc.have_compatible_glibc(2, 5) - - -def is_manylinux2010_compatible(): - # type: () -> bool - # Only Linux, and only x86-64 / i686 - if get_platform() not in {"linux_x86_64", "linux_i686"}: - return False - - # Check for presence of _manylinux module - try: - import _manylinux - return bool(_manylinux.manylinux2010_compatible) - except (ImportError, AttributeError): - # Fall through to heuristic check below - pass - - # Check glibc version. CentOS 6 uses glibc 2.12. - return pip._internal.utils.glibc.have_compatible_glibc(2, 12) - - -def get_darwin_arches(major, minor, machine): - # type: (int, int, str) -> List[str] - """Return a list of supported arches (including group arches) for - the given major, minor and machine architecture of an macOS machine. - """ - arches = [] - - def _supports_arch(major, minor, arch): - # type: (int, int, str) -> bool - # Looking at the application support for macOS versions in the chart - # provided by https://en.wikipedia.org/wiki/OS_X#Versions it appears - # our timeline looks roughly like: - # - # 10.0 - Introduces ppc support. - # 10.4 - Introduces ppc64, i386, and x86_64 support, however the ppc64 - # and x86_64 support is CLI only, and cannot be used for GUI - # applications. - # 10.5 - Extends ppc64 and x86_64 support to cover GUI applications. - # 10.6 - Drops support for ppc64 - # 10.7 - Drops support for ppc - # - # Given that we do not know if we're installing a CLI or a GUI - # application, we must be conservative and assume it might be a GUI - # application and behave as if ppc64 and x86_64 support did not occur - # until 10.5. - # - # Note: The above information is taken from the "Application support" - # column in the chart not the "Processor support" since I believe - # that we care about what instruction sets an application can use - # not which processors the OS supports. - if arch == 'ppc': - return (major, minor) <= (10, 5) - if arch == 'ppc64': - return (major, minor) == (10, 5) - if arch == 'i386': - return (major, minor) >= (10, 4) - if arch == 'x86_64': - return (major, minor) >= (10, 5) - if arch in groups: - for garch in groups[arch]: - if _supports_arch(major, minor, garch): - return True - return False - - groups = OrderedDict([ - ("fat", ("i386", "ppc")), - ("intel", ("x86_64", "i386")), - ("fat64", ("x86_64", "ppc64")), - ("fat32", ("x86_64", "i386", "ppc")), - ]) # type: Dict[str, Tuple[str, ...]] - - if _supports_arch(major, minor, machine): - arches.append(machine) - - for garch in groups: - if machine in groups[garch] and _supports_arch(major, minor, garch): - arches.append(garch) - - arches.append('universal') - - return arches - - -def get_all_minor_versions_as_strings(version_info): - # type: (Tuple[int, ...]) -> List[str] - versions = [] - major = version_info[:-1] - # Support all previous minor Python versions. - for minor in range(version_info[-1], -1, -1): - versions.append(''.join(map(str, major + (minor,)))) - return versions - - -def get_supported( - versions=None, # type: Optional[List[str]] - noarch=False, # type: bool - platform=None, # type: Optional[str] - impl=None, # type: Optional[str] - abi=None # type: Optional[str] -): - # type: (...) -> List[Pep425Tag] - """Return a list of supported tags for each version specified in - `versions`. - - :param versions: a list of string versions, of the form ["33", "32"], - or None. The first version will be assumed to support our ABI. - :param platform: specify the exact platform you want valid - tags for, or None. If None, use the local system platform. - :param impl: specify the exact implementation you want valid - tags for, or None. If None, use the local interpreter impl. - :param abi: specify the exact abi you want valid - tags for, or None. If None, use the local interpreter abi. - """ - supported = [] - - # Versions must be given with respect to the preference - if versions is None: - version_info = get_impl_version_info() - versions = get_all_minor_versions_as_strings(version_info) - - impl = impl or get_abbr_impl() - - abis = [] # type: List[str] - - abi = abi or get_abi_tag() - if abi: - abis[0:0] = [abi] - - abi3s = set() - for suffix in get_extension_suffixes(): - if suffix.startswith('.abi'): - abi3s.add(suffix.split('.', 2)[1]) - - abis.extend(sorted(list(abi3s))) - - abis.append('none') - - if not noarch: - arch = platform or get_platform() - arch_prefix, arch_sep, arch_suffix = arch.partition('_') - if arch.startswith('macosx'): - # support macosx-10.6-intel on macosx-10.9-x86_64 - match = _osx_arch_pat.match(arch) - if match: - name, major, minor, actual_arch = match.groups() - tpl = '{}_{}_%i_%s'.format(name, major) - arches = [] - for m in reversed(range(int(minor) + 1)): - for a in get_darwin_arches(int(major), m, actual_arch): - arches.append(tpl % (m, a)) - else: - # arch pattern didn't match (?!) - arches = [arch] - elif arch_prefix == 'manylinux2010': - # manylinux1 wheels run on most manylinux2010 systems with the - # exception of wheels depending on ncurses. PEP 571 states - # manylinux1 wheels should be considered manylinux2010 wheels: - # https://www.python.org/dev/peps/pep-0571/#backwards-compatibility-with-manylinux1-wheels - arches = [arch, 'manylinux1' + arch_sep + arch_suffix] - elif platform is None: - arches = [] - if is_manylinux2010_compatible(): - arches.append('manylinux2010' + arch_sep + arch_suffix) - if is_manylinux1_compatible(): - arches.append('manylinux1' + arch_sep + arch_suffix) - arches.append(arch) - else: - arches = [arch] - - # Current version, current API (built specifically for our Python): - for abi in abis: - for arch in arches: - supported.append(('%s%s' % (impl, versions[0]), abi, arch)) - - # abi3 modules compatible with older version of Python - for version in versions[1:]: - # abi3 was introduced in Python 3.2 - if version in {'31', '30'}: - break - for abi in abi3s: # empty set if not Python 3 - for arch in arches: - supported.append(("%s%s" % (impl, version), abi, arch)) - - # Has binaries, does not use the Python API: - for arch in arches: - supported.append(('py%s' % (versions[0][0]), 'none', arch)) - - # No abi / arch, but requires our implementation: - supported.append(('%s%s' % (impl, versions[0]), 'none', 'any')) - # Tagged specifically as being cross-version compatible - # (with just the major version specified) - supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any')) - - # No abi / arch, generic Python - for i, version in enumerate(versions): - supported.append(('py%s' % (version,), 'none', 'any')) - if i == 0: - supported.append(('py%s' % (version[0]), 'none', 'any')) - - return supported - - -implementation_tag = get_impl_tag() diff --git a/env/lib/python2.7/site-packages/pip/_internal/pep425tags.pyc b/env/lib/python2.7/site-packages/pip/_internal/pep425tags.pyc deleted file mode 100644 index c6347518..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/pep425tags.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/pyproject.pyc b/env/lib/python2.7/site-packages/pip/_internal/pyproject.pyc deleted file mode 100644 index 865b3863..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/pyproject.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/req/__init__.py b/env/lib/python2.7/site-packages/pip/_internal/req/__init__.py deleted file mode 100644 index c39f63fa..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/req/__init__.py +++ /dev/null @@ -1,78 +0,0 @@ -from __future__ import absolute_import - -import logging - -from .req_install import InstallRequirement -from .req_set import RequirementSet -from .req_file import parse_requirements -from pip._internal.utils.logging import indent_log -from pip._internal.utils.typing import MYPY_CHECK_RUNNING - -if MYPY_CHECK_RUNNING: - from typing import Any, List, Sequence - -__all__ = [ - "RequirementSet", "InstallRequirement", - "parse_requirements", "install_given_reqs", -] - -logger = logging.getLogger(__name__) - - -def install_given_reqs( - to_install, # type: List[InstallRequirement] - install_options, # type: List[str] - global_options=(), # type: Sequence[str] - *args, # type: Any - **kwargs # type: Any -): - # type: (...) -> List[InstallRequirement] - """ - Install everything in the given list. - - (to be called after having downloaded and unpacked the packages) - """ - - if to_install: - logger.info( - 'Installing collected packages: %s', - ', '.join([req.name for req in to_install]), - ) - - with indent_log(): - for requirement in to_install: - if requirement.conflicts_with: - logger.info( - 'Found existing installation: %s', - requirement.conflicts_with, - ) - with indent_log(): - uninstalled_pathset = requirement.uninstall( - auto_confirm=True - ) - try: - requirement.install( - install_options, - global_options, - *args, - **kwargs - ) - except Exception: - should_rollback = ( - requirement.conflicts_with and - not requirement.install_succeeded - ) - # if install did not succeed, rollback previous uninstall - if should_rollback: - uninstalled_pathset.rollback() - raise - else: - should_commit = ( - requirement.conflicts_with and - requirement.install_succeeded - ) - if should_commit: - uninstalled_pathset.commit() - requirement.remove_temporary_source() - - return to_install diff --git a/env/lib/python2.7/site-packages/pip/_internal/req/__init__.pyc b/env/lib/python2.7/site-packages/pip/_internal/req/__init__.pyc deleted file mode 100644 index ca10257b..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/req/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/req/constructors.py b/env/lib/python2.7/site-packages/pip/_internal/req/constructors.py deleted file mode 100644 index cd0ab504..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/req/constructors.py +++ /dev/null @@ -1,349 +0,0 @@ -"""Backing implementation for InstallRequirement's various constructors - -The idea here is that these formed a major chunk of InstallRequirement's size -so, moving them and support code dedicated to them outside of that class -helps creates for better understandability for the rest of the code. - -These are meant to be used elsewhere within pip to create instances of -InstallRequirement. -""" - -import logging -import os -import re - -from pip._vendor.packaging.markers import Marker -from pip._vendor.packaging.requirements import InvalidRequirement, Requirement -from pip._vendor.packaging.specifiers import Specifier -from pip._vendor.pkg_resources import RequirementParseError, parse_requirements - -from pip._internal.download import is_archive_file, is_url, url_to_path -from pip._internal.exceptions import InstallationError -from pip._internal.models.index import PyPI, TestPyPI -from pip._internal.models.link import Link -from pip._internal.pyproject import make_pyproject_path -from pip._internal.req.req_install import InstallRequirement -from pip._internal.utils.misc import is_installable_dir, path_to_url -from pip._internal.utils.typing import MYPY_CHECK_RUNNING -from pip._internal.vcs import vcs -from pip._internal.wheel import Wheel - -if MYPY_CHECK_RUNNING: - from typing import ( - Any, Dict, Optional, Set, Tuple, Union, - ) - from pip._internal.cache import WheelCache - - -__all__ = [ - "install_req_from_editable", "install_req_from_line", - "parse_editable" -] - -logger = logging.getLogger(__name__) -operators = Specifier._operators.keys() - - -def _strip_extras(path): - # type: (str) -> Tuple[str, Optional[str]] - m = re.match(r'^(.+)(\[[^\]]+\])$', path) - extras = None - if m: - path_no_extras = m.group(1) - extras = m.group(2) - else: - path_no_extras = path - - return path_no_extras, extras - - -def parse_editable(editable_req): - # type: (str) -> Tuple[Optional[str], str, Optional[Set[str]]] - """Parses an editable requirement into: - - a requirement name - - an URL - - extras - - editable options - Accepted requirements: - svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir - .[some_extra] - """ - - url = editable_req - - # If a file path is specified with extras, strip off the extras. - url_no_extras, extras = _strip_extras(url) - - if os.path.isdir(url_no_extras): - if not os.path.exists(os.path.join(url_no_extras, 'setup.py')): - msg = ( - 'File "setup.py" not found. Directory cannot be installed ' - 'in editable mode: {}'.format(os.path.abspath(url_no_extras)) - ) - pyproject_path = make_pyproject_path(url_no_extras) - if os.path.isfile(pyproject_path): - msg += ( - '\n(A "pyproject.toml" file was found, but editable ' - 'mode currently requires a setup.py based build.)' - ) - raise InstallationError(msg) - - # Treating it as code that has already been checked out - url_no_extras = path_to_url(url_no_extras) - - if url_no_extras.lower().startswith('file:'): - package_name = Link(url_no_extras).egg_fragment - if extras: - return ( - package_name, - url_no_extras, - Requirement("placeholder" + extras.lower()).extras, - ) - else: - return package_name, url_no_extras, None - - for version_control in vcs: - if url.lower().startswith('%s:' % version_control): - url = '%s+%s' % (version_control, url) - break - - if '+' not in url: - raise InstallationError( - '{} is not a valid editable requirement. ' - 'It should either be a path to a local project or a VCS URL ' - '(beginning with svn+, git+, hg+, or bzr+).'.format(editable_req) - ) - - vc_type = url.split('+', 1)[0].lower() - - if not vcs.get_backend(vc_type): - error_message = 'For --editable=%s only ' % editable_req + \ - ', '.join([backend.name + '+URL' for backend in vcs.backends]) + \ - ' is currently supported' - raise InstallationError(error_message) - - package_name = Link(url).egg_fragment - if not package_name: - raise InstallationError( - "Could not detect requirement name for '%s', please specify one " - "with #egg=your_package_name" % editable_req - ) - return package_name, url, None - - -def deduce_helpful_msg(req): - # type: (str) -> str - """Returns helpful msg in case requirements file does not exist, - or cannot be parsed. - - :params req: Requirements file path - """ - msg = "" - if os.path.exists(req): - msg = " It does exist." - # Try to parse and check if it is a requirements file. - try: - with open(req, 'r') as fp: - # parse first line only - next(parse_requirements(fp.read())) - msg += " The argument you provided " + \ - "(%s) appears to be a" % (req) + \ - " requirements file. If that is the" + \ - " case, use the '-r' flag to install" + \ - " the packages specified within it." - except RequirementParseError: - logger.debug("Cannot parse '%s' as requirements \ - file" % (req), exc_info=True) - else: - msg += " File '%s' does not exist." % (req) - return msg - - -# ---- The actual constructors follow ---- - - -def install_req_from_editable( - editable_req, # type: str - comes_from=None, # type: Optional[str] - use_pep517=None, # type: Optional[bool] - isolated=False, # type: bool - options=None, # type: Optional[Dict[str, Any]] - wheel_cache=None, # type: Optional[WheelCache] - constraint=False # type: bool -): - # type: (...) -> InstallRequirement - name, url, extras_override = parse_editable(editable_req) - if url.startswith('file:'): - source_dir = url_to_path(url) - else: - source_dir = None - - if name is not None: - try: - req = Requirement(name) - except InvalidRequirement: - raise InstallationError("Invalid requirement: '%s'" % name) - else: - req = None - return InstallRequirement( - req, comes_from, source_dir=source_dir, - editable=True, - link=Link(url), - constraint=constraint, - use_pep517=use_pep517, - isolated=isolated, - options=options if options else {}, - wheel_cache=wheel_cache, - extras=extras_override or (), - ) - - -def install_req_from_line( - name, # type: str - comes_from=None, # type: Optional[Union[str, InstallRequirement]] - use_pep517=None, # type: Optional[bool] - isolated=False, # type: bool - options=None, # type: Optional[Dict[str, Any]] - wheel_cache=None, # type: Optional[WheelCache] - constraint=False, # type: bool - line_source=None, # type: Optional[str] -): - # type: (...) -> InstallRequirement - """Creates an InstallRequirement from a name, which might be a - requirement, directory containing 'setup.py', filename, or URL. - - :param line_source: An optional string describing where the line is from, - for logging purposes in case of an error. - """ - if is_url(name): - marker_sep = '; ' - else: - marker_sep = ';' - if marker_sep in name: - name, markers_as_string = name.split(marker_sep, 1) - markers_as_string = markers_as_string.strip() - if not markers_as_string: - markers = None - else: - markers = Marker(markers_as_string) - else: - markers = None - name = name.strip() - req_as_string = None - path = os.path.normpath(os.path.abspath(name)) - link = None - extras_as_string = None - - if is_url(name): - link = Link(name) - else: - p, extras_as_string = _strip_extras(path) - looks_like_dir = os.path.isdir(p) and ( - os.path.sep in name or - (os.path.altsep is not None and os.path.altsep in name) or - name.startswith('.') - ) - if looks_like_dir: - if not is_installable_dir(p): - raise InstallationError( - "Directory %r is not installable. Neither 'setup.py' " - "nor 'pyproject.toml' found." % name - ) - link = Link(path_to_url(p)) - elif is_archive_file(p): - if not os.path.isfile(p): - logger.warning( - 'Requirement %r looks like a filename, but the ' - 'file does not exist', - name - ) - link = Link(path_to_url(p)) - - # it's a local file, dir, or url - if link: - # Handle relative file URLs - if link.scheme == 'file' and re.search(r'\.\./', link.url): - link = Link( - path_to_url(os.path.normpath(os.path.abspath(link.path)))) - # wheel file - if link.is_wheel: - wheel = Wheel(link.filename) # can raise InvalidWheelFilename - req_as_string = "%s==%s" % (wheel.name, wheel.version) - else: - # set the req to the egg fragment. when it's not there, this - # will become an 'unnamed' requirement - req_as_string = link.egg_fragment - - # a requirement specifier - else: - req_as_string = name - - if extras_as_string: - extras = Requirement("placeholder" + extras_as_string.lower()).extras - else: - extras = () - if req_as_string is not None: - try: - req = Requirement(req_as_string) - except InvalidRequirement: - if os.path.sep in req_as_string: - add_msg = "It looks like a path." - add_msg += deduce_helpful_msg(req_as_string) - elif ('=' in req_as_string and - not any(op in req_as_string for op in operators)): - add_msg = "= is not a valid operator. Did you mean == ?" - else: - add_msg = '' - if line_source is None: - source = '' - else: - source = ' (from {})'.format(line_source) - msg = ( - 'Invalid requirement: {!r}{}'.format(req_as_string, source) - ) - if add_msg: - msg += '\nHint: {}'.format(add_msg) - raise InstallationError(msg) - else: - req = None - - return InstallRequirement( - req, comes_from, link=link, markers=markers, - use_pep517=use_pep517, isolated=isolated, - options=options if options else {}, - wheel_cache=wheel_cache, - constraint=constraint, - extras=extras, - ) - - -def install_req_from_req_string( - req_string, # type: str - comes_from=None, # type: Optional[InstallRequirement] - isolated=False, # type: bool - wheel_cache=None, # type: Optional[WheelCache] - use_pep517=None # type: Optional[bool] -): - # type: (...) -> InstallRequirement - try: - req = Requirement(req_string) - except InvalidRequirement: - raise InstallationError("Invalid requirement: '%s'" % req_string) - - domains_not_allowed = [ - PyPI.file_storage_domain, - TestPyPI.file_storage_domain, - ] - if (req.url and comes_from and comes_from.link and - comes_from.link.netloc in domains_not_allowed): - # Explicitly disallow pypi packages that depend on external urls - raise InstallationError( - "Packages installed from PyPI cannot depend on packages " - "which are not also hosted on PyPI.\n" - "%s depends on %s " % (comes_from.name, req) - ) - - return InstallRequirement( - req, comes_from, isolated=isolated, wheel_cache=wheel_cache, - use_pep517=use_pep517 - ) diff --git a/env/lib/python2.7/site-packages/pip/_internal/req/constructors.pyc b/env/lib/python2.7/site-packages/pip/_internal/req/constructors.pyc deleted file mode 100644 index e6e2c2e9..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/req/constructors.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/req/req_file.py b/env/lib/python2.7/site-packages/pip/_internal/req/req_file.py deleted file mode 100644 index 5a9920fe..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/req/req_file.py +++ /dev/null @@ -1,399 +0,0 @@ -""" -Requirements file parsing -""" - -from __future__ import absolute_import - -import optparse -import os -import re -import shlex -import sys - -from pip._vendor.six.moves import filterfalse -from pip._vendor.six.moves.urllib import parse as urllib_parse - -from pip._internal.cli import cmdoptions -from pip._internal.download import get_file_content -from pip._internal.exceptions import RequirementsFileParseError -from pip._internal.models.search_scope import SearchScope -from pip._internal.req.constructors import ( - install_req_from_editable, install_req_from_line, -) -from pip._internal.utils.typing import MYPY_CHECK_RUNNING - -if MYPY_CHECK_RUNNING: - from typing import ( - Any, Callable, Iterator, List, NoReturn, Optional, Text, Tuple, - ) - from pip._internal.req import InstallRequirement - from pip._internal.cache import WheelCache - from pip._internal.index import PackageFinder - from pip._internal.download import PipSession - - ReqFileLines = Iterator[Tuple[int, Text]] - -__all__ = ['parse_requirements'] - -SCHEME_RE = re.compile(r'^(http|https|file):', re.I) -COMMENT_RE = re.compile(r'(^|\s+)#.*$') - -# Matches environment variable-style values in '${MY_VARIABLE_1}' with the -# variable name consisting of only uppercase letters, digits or the '_' -# (underscore). This follows the POSIX standard defined in IEEE Std 1003.1, -# 2013 Edition. -ENV_VAR_RE = re.compile(r'(?P\$\{(?P[A-Z0-9_]+)\})') - -SUPPORTED_OPTIONS = [ - cmdoptions.constraints, - cmdoptions.editable, - cmdoptions.requirements, - cmdoptions.no_index, - cmdoptions.index_url, - cmdoptions.find_links, - cmdoptions.extra_index_url, - cmdoptions.always_unzip, - cmdoptions.no_binary, - cmdoptions.only_binary, - cmdoptions.pre, - cmdoptions.trusted_host, - cmdoptions.require_hashes, -] # type: List[Callable[..., optparse.Option]] - -# options to be passed to requirements -SUPPORTED_OPTIONS_REQ = [ - cmdoptions.install_options, - cmdoptions.global_options, - cmdoptions.hash, -] # type: List[Callable[..., optparse.Option]] - -# the 'dest' string values -SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ] - - -def parse_requirements( - filename, # type: str - finder=None, # type: Optional[PackageFinder] - comes_from=None, # type: Optional[str] - options=None, # type: Optional[optparse.Values] - session=None, # type: Optional[PipSession] - constraint=False, # type: bool - wheel_cache=None, # type: Optional[WheelCache] - use_pep517=None # type: Optional[bool] -): - # type: (...) -> Iterator[InstallRequirement] - """Parse a requirements file and yield InstallRequirement instances. - - :param filename: Path or url of requirements file. - :param finder: Instance of pip.index.PackageFinder. - :param comes_from: Origin description of requirements. - :param options: cli options. - :param session: Instance of pip.download.PipSession. - :param constraint: If true, parsing a constraint file rather than - requirements file. - :param wheel_cache: Instance of pip.wheel.WheelCache - :param use_pep517: Value of the --use-pep517 option. - """ - if session is None: - raise TypeError( - "parse_requirements() missing 1 required keyword argument: " - "'session'" - ) - - _, content = get_file_content( - filename, comes_from=comes_from, session=session - ) - - lines_enum = preprocess(content, options) - - for line_number, line in lines_enum: - req_iter = process_line(line, filename, line_number, finder, - comes_from, options, session, wheel_cache, - use_pep517=use_pep517, constraint=constraint) - for req in req_iter: - yield req - - -def preprocess(content, options): - # type: (Text, Optional[optparse.Values]) -> ReqFileLines - """Split, filter, and join lines, and return a line iterator - - :param content: the content of the requirements file - :param options: cli options - """ - lines_enum = enumerate(content.splitlines(), start=1) # type: ReqFileLines - lines_enum = join_lines(lines_enum) - lines_enum = ignore_comments(lines_enum) - lines_enum = skip_regex(lines_enum, options) - lines_enum = expand_env_variables(lines_enum) - return lines_enum - - -def process_line( - line, # type: Text - filename, # type: str - line_number, # type: int - finder=None, # type: Optional[PackageFinder] - comes_from=None, # type: Optional[str] - options=None, # type: Optional[optparse.Values] - session=None, # type: Optional[PipSession] - wheel_cache=None, # type: Optional[WheelCache] - use_pep517=None, # type: Optional[bool] - constraint=False, # type: bool -): - # type: (...) -> Iterator[InstallRequirement] - """Process a single requirements line; This can result in creating/yielding - requirements, or updating the finder. - - For lines that contain requirements, the only options that have an effect - are from SUPPORTED_OPTIONS_REQ, and they are scoped to the - requirement. Other options from SUPPORTED_OPTIONS may be present, but are - ignored. - - For lines that do not contain requirements, the only options that have an - effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may - be present, but are ignored. These lines may contain multiple options - (although our docs imply only one is supported), and all our parsed and - affect the finder. - - :param constraint: If True, parsing a constraints file. - :param options: OptionParser options that we may update - """ - parser = build_parser(line) - defaults = parser.get_default_values() - defaults.index_url = None - if finder: - defaults.format_control = finder.format_control - args_str, options_str = break_args_options(line) - # Prior to 2.7.3, shlex cannot deal with unicode entries - if sys.version_info < (2, 7, 3): - # https://github.com/python/mypy/issues/1174 - options_str = options_str.encode('utf8') # type: ignore - # https://github.com/python/mypy/issues/1174 - opts, _ = parser.parse_args( - shlex.split(options_str), defaults) # type: ignore - - # preserve for the nested code path - line_comes_from = '%s %s (line %s)' % ( - '-c' if constraint else '-r', filename, line_number, - ) - - # yield a line requirement - if args_str: - isolated = options.isolated_mode if options else False - if options: - cmdoptions.check_install_build_global(options, opts) - # get the options that apply to requirements - req_options = {} - for dest in SUPPORTED_OPTIONS_REQ_DEST: - if dest in opts.__dict__ and opts.__dict__[dest]: - req_options[dest] = opts.__dict__[dest] - line_source = 'line {} of {}'.format(line_number, filename) - yield install_req_from_line( - args_str, - comes_from=line_comes_from, - use_pep517=use_pep517, - isolated=isolated, - options=req_options, - wheel_cache=wheel_cache, - constraint=constraint, - line_source=line_source, - ) - - # yield an editable requirement - elif opts.editables: - isolated = options.isolated_mode if options else False - yield install_req_from_editable( - opts.editables[0], comes_from=line_comes_from, - use_pep517=use_pep517, - constraint=constraint, isolated=isolated, wheel_cache=wheel_cache - ) - - # parse a nested requirements file - elif opts.requirements or opts.constraints: - if opts.requirements: - req_path = opts.requirements[0] - nested_constraint = False - else: - req_path = opts.constraints[0] - nested_constraint = True - # original file is over http - if SCHEME_RE.search(filename): - # do a url join so relative paths work - req_path = urllib_parse.urljoin(filename, req_path) - # original file and nested file are paths - elif not SCHEME_RE.search(req_path): - # do a join so relative paths work - req_path = os.path.join(os.path.dirname(filename), req_path) - # TODO: Why not use `comes_from='-r {} (line {})'` here as well? - parsed_reqs = parse_requirements( - req_path, finder, comes_from, options, session, - constraint=nested_constraint, wheel_cache=wheel_cache - ) - for req in parsed_reqs: - yield req - - # percolate hash-checking option upward - elif opts.require_hashes: - options.require_hashes = opts.require_hashes - - # set finder options - elif finder: - find_links = finder.find_links - index_urls = finder.index_urls - if opts.index_url: - index_urls = [opts.index_url] - if opts.no_index is True: - index_urls = [] - if opts.extra_index_urls: - index_urls.extend(opts.extra_index_urls) - if opts.find_links: - # FIXME: it would be nice to keep track of the source - # of the find_links: support a find-links local path - # relative to a requirements file. - value = opts.find_links[0] - req_dir = os.path.dirname(os.path.abspath(filename)) - relative_to_reqs_file = os.path.join(req_dir, value) - if os.path.exists(relative_to_reqs_file): - value = relative_to_reqs_file - find_links.append(value) - - search_scope = SearchScope( - find_links=find_links, - index_urls=index_urls, - ) - finder.search_scope = search_scope - - if opts.pre: - finder.set_allow_all_prereleases() - for host in opts.trusted_hosts or []: - source = 'line {} of {}'.format(line_number, filename) - finder.add_trusted_host(host, source=source) - - -def break_args_options(line): - # type: (Text) -> Tuple[str, Text] - """Break up the line into an args and options string. We only want to shlex - (and then optparse) the options, not the args. args can contain markers - which are corrupted by shlex. - """ - tokens = line.split(' ') - args = [] - options = tokens[:] - for token in tokens: - if token.startswith('-') or token.startswith('--'): - break - else: - args.append(token) - options.pop(0) - return ' '.join(args), ' '.join(options) # type: ignore - - -def build_parser(line): - # type: (Text) -> optparse.OptionParser - """ - Return a parser for parsing requirement lines - """ - parser = optparse.OptionParser(add_help_option=False) - - option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ - for option_factory in option_factories: - option = option_factory() - parser.add_option(option) - - # By default optparse sys.exits on parsing errors. We want to wrap - # that in our own exception. - def parser_exit(self, msg): - # type: (Any, str) -> NoReturn - # add offending line - msg = 'Invalid requirement: %s\n%s' % (line, msg) - raise RequirementsFileParseError(msg) - # NOTE: mypy disallows assigning to a method - # https://github.com/python/mypy/issues/2427 - parser.exit = parser_exit # type: ignore - - return parser - - -def join_lines(lines_enum): - # type: (ReqFileLines) -> ReqFileLines - """Joins a line ending in '\' with the previous line (except when following - comments). The joined line takes on the index of the first line. - """ - primary_line_number = None - new_line = [] # type: List[Text] - for line_number, line in lines_enum: - if not line.endswith('\\') or COMMENT_RE.match(line): - if COMMENT_RE.match(line): - # this ensures comments are always matched later - line = ' ' + line - if new_line: - new_line.append(line) - yield primary_line_number, ''.join(new_line) - new_line = [] - else: - yield line_number, line - else: - if not new_line: - primary_line_number = line_number - new_line.append(line.strip('\\')) - - # last line contains \ - if new_line: - yield primary_line_number, ''.join(new_line) - - # TODO: handle space after '\'. - - -def ignore_comments(lines_enum): - # type: (ReqFileLines) -> ReqFileLines - """ - Strips comments and filter empty lines. - """ - for line_number, line in lines_enum: - line = COMMENT_RE.sub('', line) - line = line.strip() - if line: - yield line_number, line - - -def skip_regex(lines_enum, options): - # type: (ReqFileLines, Optional[optparse.Values]) -> ReqFileLines - """ - Skip lines that match '--skip-requirements-regex' pattern - - Note: the regex pattern is only built once - """ - skip_regex = options.skip_requirements_regex if options else None - if skip_regex: - pattern = re.compile(skip_regex) - lines_enum = filterfalse(lambda e: pattern.search(e[1]), lines_enum) - return lines_enum - - -def expand_env_variables(lines_enum): - # type: (ReqFileLines) -> ReqFileLines - """Replace all environment variables that can be retrieved via `os.getenv`. - - The only allowed format for environment variables defined in the - requirement file is `${MY_VARIABLE_1}` to ensure two things: - - 1. Strings that contain a `$` aren't accidentally (partially) expanded. - 2. Ensure consistency across platforms for requirement files. - - These points are the result of a discussion on the `github pull - request #3514 `_. - - Valid characters in variable names follow the `POSIX standard - `_ and are limited - to uppercase letter, digits and the `_` (underscore). - """ - for line_number, line in lines_enum: - for env_var, var_name in ENV_VAR_RE.findall(line): - value = os.getenv(var_name) - if not value: - continue - - line = line.replace(env_var, value) - - yield line_number, line diff --git a/env/lib/python2.7/site-packages/pip/_internal/req/req_file.pyc b/env/lib/python2.7/site-packages/pip/_internal/req/req_file.pyc deleted file mode 100644 index 23a5db43..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/req/req_file.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/req/req_install.py b/env/lib/python2.7/site-packages/pip/_internal/req/req_install.py deleted file mode 100644 index f5c93504..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/req/req_install.py +++ /dev/null @@ -1,1035 +0,0 @@ -from __future__ import absolute_import - -import logging -import os -import shutil -import sys -import sysconfig -import zipfile -from distutils.util import change_root - -from pip._vendor import pkg_resources, six -from pip._vendor.packaging.requirements import Requirement -from pip._vendor.packaging.utils import canonicalize_name -from pip._vendor.packaging.version import Version -from pip._vendor.packaging.version import parse as parse_version -from pip._vendor.pep517.wrappers import Pep517HookCaller - -from pip._internal import wheel -from pip._internal.build_env import NoOpBuildEnvironment -from pip._internal.exceptions import InstallationError -from pip._internal.models.link import Link -from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path -from pip._internal.req.req_uninstall import UninstallPathSet -from pip._internal.utils.compat import native_str -from pip._internal.utils.hashes import Hashes -from pip._internal.utils.logging import indent_log -from pip._internal.utils.marker_files import PIP_DELETE_MARKER_FILENAME -from pip._internal.utils.misc import ( - _make_build_dir, ask_path_exists, backup_dir, call_subprocess, - display_path, dist_in_site_packages, dist_in_usersite, ensure_dir, - get_installed_version, redact_password_from_url, rmtree, -) -from pip._internal.utils.packaging import get_metadata -from pip._internal.utils.setuptools_build import make_setuptools_shim_args -from pip._internal.utils.temp_dir import TempDirectory -from pip._internal.utils.typing import MYPY_CHECK_RUNNING -from pip._internal.utils.ui import open_spinner -from pip._internal.utils.virtualenv import running_under_virtualenv -from pip._internal.vcs import vcs - -if MYPY_CHECK_RUNNING: - from typing import ( - Any, Dict, Iterable, List, Mapping, Optional, Sequence, Union, - ) - from pip._internal.build_env import BuildEnvironment - from pip._internal.cache import WheelCache - from pip._internal.index import PackageFinder - from pip._vendor.pkg_resources import Distribution - from pip._vendor.packaging.specifiers import SpecifierSet - from pip._vendor.packaging.markers import Marker - - -logger = logging.getLogger(__name__) - - -class InstallRequirement(object): - """ - Represents something that may be installed later on, may have information - about where to fetch the relevant requirement and also contains logic for - installing the said requirement. - """ - - def __init__( - self, - req, # type: Optional[Requirement] - comes_from, # type: Optional[Union[str, InstallRequirement]] - source_dir=None, # type: Optional[str] - editable=False, # type: bool - link=None, # type: Optional[Link] - update=True, # type: bool - markers=None, # type: Optional[Marker] - use_pep517=None, # type: Optional[bool] - isolated=False, # type: bool - options=None, # type: Optional[Dict[str, Any]] - wheel_cache=None, # type: Optional[WheelCache] - constraint=False, # type: bool - extras=() # type: Iterable[str] - ): - # type: (...) -> None - assert req is None or isinstance(req, Requirement), req - self.req = req - self.comes_from = comes_from - self.constraint = constraint - if source_dir is None: - self.source_dir = None # type: Optional[str] - else: - self.source_dir = os.path.normpath(os.path.abspath(source_dir)) - self.editable = editable - - self._wheel_cache = wheel_cache - if link is None and req and req.url: - # PEP 508 URL requirement - link = Link(req.url) - self.link = self.original_link = link - - if extras: - self.extras = extras - elif req: - self.extras = { - pkg_resources.safe_extra(extra) for extra in req.extras - } - else: - self.extras = set() - if markers is None and req: - markers = req.marker - self.markers = markers - - self._egg_info_path = None # type: Optional[str] - # This holds the pkg_resources.Distribution object if this requirement - # is already available: - self.satisfied_by = None - # This hold the pkg_resources.Distribution object if this requirement - # conflicts with another installed distribution: - self.conflicts_with = None - # Temporary build location - self._temp_build_dir = TempDirectory(kind="req-build") - # Used to store the global directory where the _temp_build_dir should - # have been created. Cf _correct_build_location method. - self._ideal_build_dir = None # type: Optional[str] - # True if the editable should be updated: - self.update = update - # Set to True after successful installation - self.install_succeeded = None # type: Optional[bool] - # UninstallPathSet of uninstalled distribution (for possible rollback) - self.uninstalled_pathset = None - self.options = options if options else {} - # Set to True after successful preparation of this requirement - self.prepared = False - self.is_direct = False - - self.isolated = isolated - self.build_env = NoOpBuildEnvironment() # type: BuildEnvironment - - # For PEP 517, the directory where we request the project metadata - # gets stored. We need this to pass to build_wheel, so the backend - # can ensure that the wheel matches the metadata (see the PEP for - # details). - self.metadata_directory = None # type: Optional[str] - - # The static build requirements (from pyproject.toml) - self.pyproject_requires = None # type: Optional[List[str]] - - # Build requirements that we will check are available - self.requirements_to_check = [] # type: List[str] - - # The PEP 517 backend we should use to build the project - self.pep517_backend = None # type: Optional[Pep517HookCaller] - - # Are we using PEP 517 for this requirement? - # After pyproject.toml has been loaded, the only valid values are True - # and False. Before loading, None is valid (meaning "use the default"). - # Setting an explicit value before loading pyproject.toml is supported, - # but after loading this flag should be treated as read only. - self.use_pep517 = use_pep517 - - def __str__(self): - # type: () -> str - if self.req: - s = str(self.req) - if self.link: - s += ' from %s' % redact_password_from_url(self.link.url) - elif self.link: - s = redact_password_from_url(self.link.url) - else: - s = '' - if self.satisfied_by is not None: - s += ' in %s' % display_path(self.satisfied_by.location) - if self.comes_from: - if isinstance(self.comes_from, six.string_types): - comes_from = self.comes_from # type: Optional[str] - else: - comes_from = self.comes_from.from_path() - if comes_from: - s += ' (from %s)' % comes_from - return s - - def __repr__(self): - # type: () -> str - return '<%s object: %s editable=%r>' % ( - self.__class__.__name__, str(self), self.editable) - - def format_debug(self): - # type: () -> str - """An un-tested helper for getting state, for debugging. - """ - attributes = vars(self) - names = sorted(attributes) - - state = ( - "{}={!r}".format(attr, attributes[attr]) for attr in sorted(names) - ) - return '<{name} object: {{{state}}}>'.format( - name=self.__class__.__name__, - state=", ".join(state), - ) - - def populate_link(self, finder, upgrade, require_hashes): - # type: (PackageFinder, bool, bool) -> None - """Ensure that if a link can be found for this, that it is found. - - Note that self.link may still be None - if Upgrade is False and the - requirement is already installed. - - If require_hashes is True, don't use the wheel cache, because cached - wheels, always built locally, have different hashes than the files - downloaded from the index server and thus throw false hash mismatches. - Furthermore, cached wheels at present have undeterministic contents due - to file modification times. - """ - if self.link is None: - self.link = finder.find_requirement(self, upgrade) - if self._wheel_cache is not None and not require_hashes: - old_link = self.link - self.link = self._wheel_cache.get(self.link, self.name) - if old_link != self.link: - logger.debug('Using cached wheel link: %s', self.link) - - # Things that are valid for all kinds of requirements? - @property - def name(self): - # type: () -> Optional[str] - if self.req is None: - return None - return native_str(pkg_resources.safe_name(self.req.name)) - - @property - def specifier(self): - # type: () -> SpecifierSet - return self.req.specifier - - @property - def is_pinned(self): - # type: () -> bool - """Return whether I am pinned to an exact version. - - For example, some-package==1.2 is pinned; some-package>1.2 is not. - """ - specifiers = self.specifier - return (len(specifiers) == 1 and - next(iter(specifiers)).operator in {'==', '==='}) - - @property - def installed_version(self): - # type: () -> Optional[str] - return get_installed_version(self.name) - - def match_markers(self, extras_requested=None): - # type: (Optional[Iterable[str]]) -> bool - if not extras_requested: - # Provide an extra to safely evaluate the markers - # without matching any extra - extras_requested = ('',) - if self.markers is not None: - return any( - self.markers.evaluate({'extra': extra}) - for extra in extras_requested) - else: - return True - - @property - def has_hash_options(self): - # type: () -> bool - """Return whether any known-good hashes are specified as options. - - These activate --require-hashes mode; hashes specified as part of a - URL do not. - - """ - return bool(self.options.get('hashes', {})) - - def hashes(self, trust_internet=True): - # type: (bool) -> Hashes - """Return a hash-comparer that considers my option- and URL-based - hashes to be known-good. - - Hashes in URLs--ones embedded in the requirements file, not ones - downloaded from an index server--are almost peers with ones from - flags. They satisfy --require-hashes (whether it was implicitly or - explicitly activated) but do not activate it. md5 and sha224 are not - allowed in flags, which should nudge people toward good algos. We - always OR all hashes together, even ones from URLs. - - :param trust_internet: Whether to trust URL-based (#md5=...) hashes - downloaded from the internet, as by populate_link() - - """ - good_hashes = self.options.get('hashes', {}).copy() - link = self.link if trust_internet else self.original_link - if link and link.hash: - good_hashes.setdefault(link.hash_name, []).append(link.hash) - return Hashes(good_hashes) - - def from_path(self): - # type: () -> Optional[str] - """Format a nice indicator to show where this "comes from" - """ - if self.req is None: - return None - s = str(self.req) - if self.comes_from: - if isinstance(self.comes_from, six.string_types): - comes_from = self.comes_from - else: - comes_from = self.comes_from.from_path() - if comes_from: - s += '->' + comes_from - return s - - def build_location(self, build_dir): - # type: (str) -> str - assert build_dir is not None - if self._temp_build_dir.path is not None: - return self._temp_build_dir.path - if self.req is None: - # for requirement via a path to a directory: the name of the - # package is not available yet so we create a temp directory - # Once run_egg_info will have run, we'll be able - # to fix it via _correct_build_location - # Some systems have /tmp as a symlink which confuses custom - # builds (such as numpy). Thus, we ensure that the real path - # is returned. - self._temp_build_dir.create() - self._ideal_build_dir = build_dir - - return self._temp_build_dir.path - if self.editable: - name = self.name.lower() - else: - name = self.name - # FIXME: Is there a better place to create the build_dir? (hg and bzr - # need this) - if not os.path.exists(build_dir): - logger.debug('Creating directory %s', build_dir) - _make_build_dir(build_dir) - return os.path.join(build_dir, name) - - def _correct_build_location(self): - # type: () -> None - """Move self._temp_build_dir to self._ideal_build_dir/self.req.name - - For some requirements (e.g. a path to a directory), the name of the - package is not available until we run egg_info, so the build_location - will return a temporary directory and store the _ideal_build_dir. - - This is only called by self.run_egg_info to fix the temporary build - directory. - """ - if self.source_dir is not None: - return - assert self.req is not None - assert self._temp_build_dir.path - assert (self._ideal_build_dir is not None and - self._ideal_build_dir.path) # type: ignore - old_location = self._temp_build_dir.path - self._temp_build_dir.path = None - - new_location = self.build_location(self._ideal_build_dir) - if os.path.exists(new_location): - raise InstallationError( - 'A package already exists in %s; please remove it to continue' - % display_path(new_location)) - logger.debug( - 'Moving package %s from %s to new location %s', - self, display_path(old_location), display_path(new_location), - ) - shutil.move(old_location, new_location) - self._temp_build_dir.path = new_location - self._ideal_build_dir = None - self.source_dir = os.path.normpath(os.path.abspath(new_location)) - self._egg_info_path = None - - # Correct the metadata directory, if it exists - if self.metadata_directory: - old_meta = self.metadata_directory - rel = os.path.relpath(old_meta, start=old_location) - new_meta = os.path.join(new_location, rel) - new_meta = os.path.normpath(os.path.abspath(new_meta)) - self.metadata_directory = new_meta - - def remove_temporary_source(self): - # type: () -> None - """Remove the source files from this requirement, if they are marked - for deletion""" - if self.source_dir and os.path.exists( - os.path.join(self.source_dir, PIP_DELETE_MARKER_FILENAME)): - logger.debug('Removing source in %s', self.source_dir) - rmtree(self.source_dir) - self.source_dir = None - self._temp_build_dir.cleanup() - self.build_env.cleanup() - - def check_if_exists(self, use_user_site): - # type: (bool) -> bool - """Find an installed distribution that satisfies or conflicts - with this requirement, and set self.satisfied_by or - self.conflicts_with appropriately. - """ - if self.req is None: - return False - try: - # get_distribution() will resolve the entire list of requirements - # anyway, and we've already determined that we need the requirement - # in question, so strip the marker so that we don't try to - # evaluate it. - no_marker = Requirement(str(self.req)) - no_marker.marker = None - self.satisfied_by = pkg_resources.get_distribution(str(no_marker)) - if self.editable and self.satisfied_by: - self.conflicts_with = self.satisfied_by - # when installing editables, nothing pre-existing should ever - # satisfy - self.satisfied_by = None - return True - except pkg_resources.DistributionNotFound: - return False - except pkg_resources.VersionConflict: - existing_dist = pkg_resources.get_distribution( - self.req.name - ) - if use_user_site: - if dist_in_usersite(existing_dist): - self.conflicts_with = existing_dist - elif (running_under_virtualenv() and - dist_in_site_packages(existing_dist)): - raise InstallationError( - "Will not install to the user site because it will " - "lack sys.path precedence to %s in %s" % - (existing_dist.project_name, existing_dist.location) - ) - else: - self.conflicts_with = existing_dist - return True - - # Things valid for wheels - @property - def is_wheel(self): - # type: () -> bool - if not self.link: - return False - return self.link.is_wheel - - def move_wheel_files( - self, - wheeldir, # type: str - root=None, # type: Optional[str] - home=None, # type: Optional[str] - prefix=None, # type: Optional[str] - warn_script_location=True, # type: bool - use_user_site=False, # type: bool - pycompile=True # type: bool - ): - # type: (...) -> None - wheel.move_wheel_files( - self.name, self.req, wheeldir, - user=use_user_site, - home=home, - root=root, - prefix=prefix, - pycompile=pycompile, - isolated=self.isolated, - warn_script_location=warn_script_location, - ) - - # Things valid for sdists - @property - def setup_py_dir(self): - # type: () -> str - return os.path.join( - self.source_dir, - self.link and self.link.subdirectory_fragment or '') - - @property - def setup_py_path(self): - # type: () -> str - assert self.source_dir, "No source dir for %s" % self - - setup_py = os.path.join(self.setup_py_dir, 'setup.py') - - # Python2 __file__ should not be unicode - if six.PY2 and isinstance(setup_py, six.text_type): - setup_py = setup_py.encode(sys.getfilesystemencoding()) - - return setup_py - - @property - def pyproject_toml_path(self): - # type: () -> str - assert self.source_dir, "No source dir for %s" % self - - return make_pyproject_path(self.setup_py_dir) - - def load_pyproject_toml(self): - # type: () -> None - """Load the pyproject.toml file. - - After calling this routine, all of the attributes related to PEP 517 - processing for this requirement have been set. In particular, the - use_pep517 attribute can be used to determine whether we should - follow the PEP 517 or legacy (setup.py) code path. - """ - pyproject_toml_data = load_pyproject_toml( - self.use_pep517, - self.pyproject_toml_path, - self.setup_py_path, - str(self) - ) - - self.use_pep517 = (pyproject_toml_data is not None) - - if not self.use_pep517: - return - - requires, backend, check = pyproject_toml_data - self.requirements_to_check = check - self.pyproject_requires = requires - self.pep517_backend = Pep517HookCaller(self.setup_py_dir, backend) - - # Use a custom function to call subprocesses - self.spin_message = "" - - def runner( - cmd, # type: List[str] - cwd=None, # type: Optional[str] - extra_environ=None # type: Optional[Mapping[str, Any]] - ): - # type: (...) -> None - with open_spinner(self.spin_message) as spinner: - call_subprocess( - cmd, - cwd=cwd, - extra_environ=extra_environ, - spinner=spinner - ) - self.spin_message = "" - - self.pep517_backend._subprocess_runner = runner - - def prepare_metadata(self): - # type: () -> None - """Ensure that project metadata is available. - - Under PEP 517, call the backend hook to prepare the metadata. - Under legacy processing, call setup.py egg-info. - """ - assert self.source_dir - - with indent_log(): - if self.use_pep517: - self.prepare_pep517_metadata() - else: - self.run_egg_info() - - if not self.req: - if isinstance(parse_version(self.metadata["Version"]), Version): - op = "==" - else: - op = "===" - self.req = Requirement( - "".join([ - self.metadata["Name"], - op, - self.metadata["Version"], - ]) - ) - self._correct_build_location() - else: - metadata_name = canonicalize_name(self.metadata["Name"]) - if canonicalize_name(self.req.name) != metadata_name: - logger.warning( - 'Generating metadata for package %s ' - 'produced metadata for project name %s. Fix your ' - '#egg=%s fragments.', - self.name, metadata_name, self.name - ) - self.req = Requirement(metadata_name) - - def prepare_pep517_metadata(self): - # type: () -> None - assert self.pep517_backend is not None - - metadata_dir = os.path.join( - self.setup_py_dir, - 'pip-wheel-metadata' - ) - ensure_dir(metadata_dir) - - with self.build_env: - # Note that Pep517HookCaller implements a fallback for - # prepare_metadata_for_build_wheel, so we don't have to - # consider the possibility that this hook doesn't exist. - backend = self.pep517_backend - self.spin_message = "Preparing wheel metadata" - distinfo_dir = backend.prepare_metadata_for_build_wheel( - metadata_dir - ) - - self.metadata_directory = os.path.join(metadata_dir, distinfo_dir) - - def run_egg_info(self): - # type: () -> None - if self.name: - logger.debug( - 'Running setup.py (path:%s) egg_info for package %s', - self.setup_py_path, self.name, - ) - else: - logger.debug( - 'Running setup.py (path:%s) egg_info for package from %s', - self.setup_py_path, self.link, - ) - base_cmd = make_setuptools_shim_args(self.setup_py_path) - if self.isolated: - base_cmd += ["--no-user-cfg"] - egg_info_cmd = base_cmd + ['egg_info'] - # We can't put the .egg-info files at the root, because then the - # source code will be mistaken for an installed egg, causing - # problems - if self.editable: - egg_base_option = [] # type: List[str] - else: - egg_info_dir = os.path.join(self.setup_py_dir, 'pip-egg-info') - ensure_dir(egg_info_dir) - egg_base_option = ['--egg-base', 'pip-egg-info'] - with self.build_env: - call_subprocess( - egg_info_cmd + egg_base_option, - cwd=self.setup_py_dir, - command_desc='python setup.py egg_info') - - @property - def egg_info_path(self): - # type: () -> str - if self._egg_info_path is None: - if self.editable: - base = self.source_dir - else: - base = os.path.join(self.setup_py_dir, 'pip-egg-info') - filenames = os.listdir(base) - if self.editable: - filenames = [] - for root, dirs, files in os.walk(base): - for dir in vcs.dirnames: - if dir in dirs: - dirs.remove(dir) - # Iterate over a copy of ``dirs``, since mutating - # a list while iterating over it can cause trouble. - # (See https://github.com/pypa/pip/pull/462.) - for dir in list(dirs): - # Don't search in anything that looks like a virtualenv - # environment - if ( - os.path.lexists( - os.path.join(root, dir, 'bin', 'python') - ) or - os.path.exists( - os.path.join( - root, dir, 'Scripts', 'Python.exe' - ) - )): - dirs.remove(dir) - # Also don't search through tests - elif dir == 'test' or dir == 'tests': - dirs.remove(dir) - filenames.extend([os.path.join(root, dir) - for dir in dirs]) - filenames = [f for f in filenames if f.endswith('.egg-info')] - - if not filenames: - raise InstallationError( - "Files/directories not found in %s" % base - ) - # if we have more than one match, we pick the toplevel one. This - # can easily be the case if there is a dist folder which contains - # an extracted tarball for testing purposes. - if len(filenames) > 1: - filenames.sort( - key=lambda x: x.count(os.path.sep) + - (os.path.altsep and x.count(os.path.altsep) or 0) - ) - self._egg_info_path = os.path.join(base, filenames[0]) - return self._egg_info_path - - @property - def metadata(self): - # type: () -> Any - if not hasattr(self, '_metadata'): - self._metadata = get_metadata(self.get_dist()) - - return self._metadata - - def get_dist(self): - # type: () -> Distribution - """Return a pkg_resources.Distribution for this requirement""" - if self.metadata_directory: - dist_dir = self.metadata_directory - dist_cls = pkg_resources.DistInfoDistribution - else: - dist_dir = self.egg_info_path.rstrip(os.path.sep) - # https://github.com/python/mypy/issues/1174 - dist_cls = pkg_resources.Distribution # type: ignore - - # dist_dir_name can be of the form ".dist-info" or - # e.g. ".egg-info". - base_dir, dist_dir_name = os.path.split(dist_dir) - dist_name = os.path.splitext(dist_dir_name)[0] - metadata = pkg_resources.PathMetadata(base_dir, dist_dir) - - return dist_cls( - base_dir, - project_name=dist_name, - metadata=metadata, - ) - - def assert_source_matches_version(self): - # type: () -> None - assert self.source_dir - version = self.metadata['version'] - if self.req.specifier and version not in self.req.specifier: - logger.warning( - 'Requested %s, but installing version %s', - self, - version, - ) - else: - logger.debug( - 'Source in %s has version %s, which satisfies requirement %s', - display_path(self.source_dir), - version, - self, - ) - - # For both source distributions and editables - def ensure_has_source_dir(self, parent_dir): - # type: (str) -> str - """Ensure that a source_dir is set. - - This will create a temporary build dir if the name of the requirement - isn't known yet. - - :param parent_dir: The ideal pip parent_dir for the source_dir. - Generally src_dir for editables and build_dir for sdists. - :return: self.source_dir - """ - if self.source_dir is None: - self.source_dir = self.build_location(parent_dir) - return self.source_dir - - # For editable installations - def install_editable( - self, - install_options, # type: List[str] - global_options=(), # type: Sequence[str] - prefix=None # type: Optional[str] - ): - # type: (...) -> None - logger.info('Running setup.py develop for %s', self.name) - - if self.isolated: - global_options = list(global_options) + ["--no-user-cfg"] - - if prefix: - prefix_param = ['--prefix={}'.format(prefix)] - install_options = list(install_options) + prefix_param - - with indent_log(): - # FIXME: should we do --install-headers here too? - with self.build_env: - call_subprocess( - make_setuptools_shim_args(self.setup_py_path) + - list(global_options) + - ['develop', '--no-deps'] + - list(install_options), - - cwd=self.setup_py_dir, - ) - - self.install_succeeded = True - - def update_editable(self, obtain=True): - # type: (bool) -> None - if not self.link: - logger.debug( - "Cannot update repository at %s; repository location is " - "unknown", - self.source_dir, - ) - return - assert self.editable - assert self.source_dir - if self.link.scheme == 'file': - # Static paths don't get updated - return - assert '+' in self.link.url, "bad url: %r" % self.link.url - if not self.update: - return - vc_type, url = self.link.url.split('+', 1) - vcs_backend = vcs.get_backend(vc_type) - if vcs_backend: - url = self.link.url - if obtain: - vcs_backend.obtain(self.source_dir, url=url) - else: - vcs_backend.export(self.source_dir, url=url) - else: - assert 0, ( - 'Unexpected version control type (in %s): %s' - % (self.link, vc_type)) - - # Top-level Actions - def uninstall(self, auto_confirm=False, verbose=False, - use_user_site=False): - # type: (bool, bool, bool) -> Optional[UninstallPathSet] - """ - Uninstall the distribution currently satisfying this requirement. - - Prompts before removing or modifying files unless - ``auto_confirm`` is True. - - Refuses to delete or modify files outside of ``sys.prefix`` - - thus uninstallation within a virtual environment can only - modify that virtual environment, even if the virtualenv is - linked to global site-packages. - - """ - if not self.check_if_exists(use_user_site): - logger.warning("Skipping %s as it is not installed.", self.name) - return None - dist = self.satisfied_by or self.conflicts_with - - uninstalled_pathset = UninstallPathSet.from_dist(dist) - uninstalled_pathset.remove(auto_confirm, verbose) - return uninstalled_pathset - - def _clean_zip_name(self, name, prefix): # only used by archive. - # type: (str, str) -> str - assert name.startswith(prefix + os.path.sep), ( - "name %r doesn't start with prefix %r" % (name, prefix) - ) - name = name[len(prefix) + 1:] - name = name.replace(os.path.sep, '/') - return name - - def _get_archive_name(self, path, parentdir, rootdir): - # type: (str, str, str) -> str - path = os.path.join(parentdir, path) - name = self._clean_zip_name(path, rootdir) - return self.name + '/' + name - - # TODO: Investigate if this should be kept in InstallRequirement - # Seems to be used only when VCS + downloads - def archive(self, build_dir): - # type: (str) -> None - assert self.source_dir - create_archive = True - archive_name = '%s-%s.zip' % (self.name, self.metadata["version"]) - archive_path = os.path.join(build_dir, archive_name) - if os.path.exists(archive_path): - response = ask_path_exists( - 'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)bort ' % - display_path(archive_path), ('i', 'w', 'b', 'a')) - if response == 'i': - create_archive = False - elif response == 'w': - logger.warning('Deleting %s', display_path(archive_path)) - os.remove(archive_path) - elif response == 'b': - dest_file = backup_dir(archive_path) - logger.warning( - 'Backing up %s to %s', - display_path(archive_path), - display_path(dest_file), - ) - shutil.move(archive_path, dest_file) - elif response == 'a': - sys.exit(-1) - if create_archive: - zip = zipfile.ZipFile( - archive_path, 'w', zipfile.ZIP_DEFLATED, - allowZip64=True - ) - dir = os.path.normcase(os.path.abspath(self.setup_py_dir)) - for dirpath, dirnames, filenames in os.walk(dir): - if 'pip-egg-info' in dirnames: - dirnames.remove('pip-egg-info') - for dirname in dirnames: - dir_arcname = self._get_archive_name(dirname, - parentdir=dirpath, - rootdir=dir) - zipdir = zipfile.ZipInfo(dir_arcname + '/') - zipdir.external_attr = 0x1ED << 16 # 0o755 - zip.writestr(zipdir, '') - for filename in filenames: - if filename == PIP_DELETE_MARKER_FILENAME: - continue - file_arcname = self._get_archive_name(filename, - parentdir=dirpath, - rootdir=dir) - filename = os.path.join(dirpath, filename) - zip.write(filename, file_arcname) - zip.close() - logger.info('Saved %s', display_path(archive_path)) - - def install( - self, - install_options, # type: List[str] - global_options=None, # type: Optional[Sequence[str]] - root=None, # type: Optional[str] - home=None, # type: Optional[str] - prefix=None, # type: Optional[str] - warn_script_location=True, # type: bool - use_user_site=False, # type: bool - pycompile=True # type: bool - ): - # type: (...) -> None - global_options = global_options if global_options is not None else [] - if self.editable: - self.install_editable( - install_options, global_options, prefix=prefix, - ) - return - if self.is_wheel: - version = wheel.wheel_version(self.source_dir) - wheel.check_compatibility(version, self.name) - - self.move_wheel_files( - self.source_dir, root=root, prefix=prefix, home=home, - warn_script_location=warn_script_location, - use_user_site=use_user_site, pycompile=pycompile, - ) - self.install_succeeded = True - return - - # Extend the list of global and install options passed on to - # the setup.py call with the ones from the requirements file. - # Options specified in requirements file override those - # specified on the command line, since the last option given - # to setup.py is the one that is used. - global_options = list(global_options) + \ - self.options.get('global_options', []) - install_options = list(install_options) + \ - self.options.get('install_options', []) - - if self.isolated: - # https://github.com/python/mypy/issues/1174 - global_options = global_options + ["--no-user-cfg"] # type: ignore - - with TempDirectory(kind="record") as temp_dir: - record_filename = os.path.join(temp_dir.path, 'install-record.txt') - install_args = self.get_install_args( - global_options, record_filename, root, prefix, pycompile, - ) - msg = 'Running setup.py install for %s' % (self.name,) - with open_spinner(msg) as spinner: - with indent_log(): - with self.build_env: - call_subprocess( - install_args + install_options, - cwd=self.setup_py_dir, - spinner=spinner, - ) - - if not os.path.exists(record_filename): - logger.debug('Record file %s not found', record_filename) - return - self.install_succeeded = True - - def prepend_root(path): - # type: (str) -> str - if root is None or not os.path.isabs(path): - return path - else: - return change_root(root, path) - - with open(record_filename) as f: - for line in f: - directory = os.path.dirname(line) - if directory.endswith('.egg-info'): - egg_info_dir = prepend_root(directory) - break - else: - logger.warning( - 'Could not find .egg-info directory in install record' - ' for %s', - self, - ) - # FIXME: put the record somewhere - # FIXME: should this be an error? - return - new_lines = [] - with open(record_filename) as f: - for line in f: - filename = line.strip() - if os.path.isdir(filename): - filename += os.path.sep - new_lines.append( - os.path.relpath(prepend_root(filename), egg_info_dir) - ) - new_lines.sort() - ensure_dir(egg_info_dir) - inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt') - with open(inst_files_path, 'w') as f: - f.write('\n'.join(new_lines) + '\n') - - def get_install_args( - self, - global_options, # type: Sequence[str] - record_filename, # type: str - root, # type: Optional[str] - prefix, # type: Optional[str] - pycompile # type: bool - ): - # type: (...) -> List[str] - install_args = make_setuptools_shim_args(self.setup_py_path, - unbuffered_output=True) - install_args += list(global_options) + \ - ['install', '--record', record_filename] - install_args += ['--single-version-externally-managed'] - - if root is not None: - install_args += ['--root', root] - if prefix is not None: - install_args += ['--prefix', prefix] - - if pycompile: - install_args += ["--compile"] - else: - install_args += ["--no-compile"] - - if running_under_virtualenv(): - py_ver_str = 'python' + sysconfig.get_python_version() - install_args += ['--install-headers', - os.path.join(sys.prefix, 'include', 'site', - py_ver_str, self.name)] - - return install_args diff --git a/env/lib/python2.7/site-packages/pip/_internal/req/req_install.pyc b/env/lib/python2.7/site-packages/pip/_internal/req/req_install.pyc deleted file mode 100644 index 5d654b57..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/req/req_install.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/req/req_set.py b/env/lib/python2.7/site-packages/pip/_internal/req/req_set.py deleted file mode 100644 index d1966a4a..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/req/req_set.py +++ /dev/null @@ -1,193 +0,0 @@ -from __future__ import absolute_import - -import logging -from collections import OrderedDict - -from pip._internal.exceptions import InstallationError -from pip._internal.utils.logging import indent_log -from pip._internal.utils.typing import MYPY_CHECK_RUNNING -from pip._internal.wheel import Wheel - -if MYPY_CHECK_RUNNING: - from typing import Dict, Iterable, List, Optional, Tuple - from pip._internal.req.req_install import InstallRequirement - - -logger = logging.getLogger(__name__) - - -class RequirementSet(object): - - def __init__(self, require_hashes=False, check_supported_wheels=True): - # type: (bool, bool) -> None - """Create a RequirementSet. - """ - - self.requirements = OrderedDict() # type: Dict[str, InstallRequirement] # noqa: E501 - self.require_hashes = require_hashes - self.check_supported_wheels = check_supported_wheels - - # Mapping of alias: real_name - self.requirement_aliases = {} # type: Dict[str, str] - self.unnamed_requirements = [] # type: List[InstallRequirement] - self.successfully_downloaded = [] # type: List[InstallRequirement] - self.reqs_to_cleanup = [] # type: List[InstallRequirement] - - def __str__(self): - # type: () -> str - reqs = [req for req in self.requirements.values() - if not req.comes_from] - reqs.sort(key=lambda req: req.name.lower()) - return ' '.join([str(req.req) for req in reqs]) - - def __repr__(self): - # type: () -> str - reqs = [req for req in self.requirements.values()] - reqs.sort(key=lambda req: req.name.lower()) - reqs_str = ', '.join([str(req.req) for req in reqs]) - return ('<%s object; %d requirement(s): %s>' - % (self.__class__.__name__, len(reqs), reqs_str)) - - def add_requirement( - self, - install_req, # type: InstallRequirement - parent_req_name=None, # type: Optional[str] - extras_requested=None # type: Optional[Iterable[str]] - ): - # type: (...) -> Tuple[List[InstallRequirement], Optional[InstallRequirement]] # noqa: E501 - """Add install_req as a requirement to install. - - :param parent_req_name: The name of the requirement that needed this - added. The name is used because when multiple unnamed requirements - resolve to the same name, we could otherwise end up with dependency - links that point outside the Requirements set. parent_req must - already be added. Note that None implies that this is a user - supplied requirement, vs an inferred one. - :param extras_requested: an iterable of extras used to evaluate the - environment markers. - :return: Additional requirements to scan. That is either [] if - the requirement is not applicable, or [install_req] if the - requirement is applicable and has just been added. - """ - name = install_req.name - - # If the markers do not match, ignore this requirement. - if not install_req.match_markers(extras_requested): - logger.info( - "Ignoring %s: markers '%s' don't match your environment", - name, install_req.markers, - ) - return [], None - - # If the wheel is not supported, raise an error. - # Should check this after filtering out based on environment markers to - # allow specifying different wheels based on the environment/OS, in a - # single requirements file. - if install_req.link and install_req.link.is_wheel: - wheel = Wheel(install_req.link.filename) - if self.check_supported_wheels and not wheel.supported(): - raise InstallationError( - "%s is not a supported wheel on this platform." % - wheel.filename - ) - - # This next bit is really a sanity check. - assert install_req.is_direct == (parent_req_name is None), ( - "a direct req shouldn't have a parent and also, " - "a non direct req should have a parent" - ) - - # Unnamed requirements are scanned again and the requirement won't be - # added as a dependency until after scanning. - if not name: - # url or path requirement w/o an egg fragment - self.unnamed_requirements.append(install_req) - return [install_req], None - - try: - existing_req = self.get_requirement(name) - except KeyError: - existing_req = None - - has_conflicting_requirement = ( - parent_req_name is None and - existing_req and - not existing_req.constraint and - existing_req.extras == install_req.extras and - existing_req.req.specifier != install_req.req.specifier - ) - if has_conflicting_requirement: - raise InstallationError( - "Double requirement given: %s (already in %s, name=%r)" - % (install_req, existing_req, name) - ) - - # When no existing requirement exists, add the requirement as a - # dependency and it will be scanned again after. - if not existing_req: - self.requirements[name] = install_req - # FIXME: what about other normalizations? E.g., _ vs. -? - if name.lower() != name: - self.requirement_aliases[name.lower()] = name - # We'd want to rescan this requirements later - return [install_req], install_req - - # Assume there's no need to scan, and that we've already - # encountered this for scanning. - if install_req.constraint or not existing_req.constraint: - return [], existing_req - - does_not_satisfy_constraint = ( - install_req.link and - not ( - existing_req.link and - install_req.link.path == existing_req.link.path - ) - ) - if does_not_satisfy_constraint: - self.reqs_to_cleanup.append(install_req) - raise InstallationError( - "Could not satisfy constraints for '%s': " - "installation from path or url cannot be " - "constrained to a version" % name, - ) - # If we're now installing a constraint, mark the existing - # object for real installation. - existing_req.constraint = False - existing_req.extras = tuple(sorted( - set(existing_req.extras) | set(install_req.extras) - )) - logger.debug( - "Setting %s extras to: %s", - existing_req, existing_req.extras, - ) - # Return the existing requirement for addition to the parent and - # scanning again. - return [existing_req], existing_req - - def has_requirement(self, project_name): - # type: (str) -> bool - name = project_name.lower() - if (name in self.requirements and - not self.requirements[name].constraint or - name in self.requirement_aliases and - not self.requirements[self.requirement_aliases[name]].constraint): - return True - return False - - def get_requirement(self, project_name): - # type: (str) -> InstallRequirement - for name in project_name, project_name.lower(): - if name in self.requirements: - return self.requirements[name] - if name in self.requirement_aliases: - return self.requirements[self.requirement_aliases[name]] - raise KeyError("No project with the name %r" % project_name) - - def cleanup_files(self): - # type: () -> None - """Clean up files, remove builds.""" - logger.debug('Cleaning up...') - with indent_log(): - for req in self.reqs_to_cleanup: - req.remove_temporary_source() diff --git a/env/lib/python2.7/site-packages/pip/_internal/req/req_set.pyc b/env/lib/python2.7/site-packages/pip/_internal/req/req_set.pyc deleted file mode 100644 index 207e826e..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/req/req_set.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/req/req_tracker.py b/env/lib/python2.7/site-packages/pip/_internal/req/req_tracker.py deleted file mode 100644 index e36a3f6b..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/req/req_tracker.py +++ /dev/null @@ -1,96 +0,0 @@ -from __future__ import absolute_import - -import contextlib -import errno -import hashlib -import logging -import os - -from pip._internal.utils.temp_dir import TempDirectory -from pip._internal.utils.typing import MYPY_CHECK_RUNNING - -if MYPY_CHECK_RUNNING: - from types import TracebackType - from typing import Iterator, Optional, Set, Type - from pip._internal.req.req_install import InstallRequirement - from pip._internal.models.link import Link - -logger = logging.getLogger(__name__) - - -class RequirementTracker(object): - - def __init__(self): - # type: () -> None - self._root = os.environ.get('PIP_REQ_TRACKER') - if self._root is None: - self._temp_dir = TempDirectory(delete=False, kind='req-tracker') - self._temp_dir.create() - self._root = os.environ['PIP_REQ_TRACKER'] = self._temp_dir.path - logger.debug('Created requirements tracker %r', self._root) - else: - self._temp_dir = None - logger.debug('Re-using requirements tracker %r', self._root) - self._entries = set() # type: Set[InstallRequirement] - - def __enter__(self): - # type: () -> RequirementTracker - return self - - def __exit__( - self, - exc_type, # type: Optional[Type[BaseException]] - exc_val, # type: Optional[BaseException] - exc_tb # type: Optional[TracebackType] - ): - # type: (...) -> None - self.cleanup() - - def _entry_path(self, link): - # type: (Link) -> str - hashed = hashlib.sha224(link.url_without_fragment.encode()).hexdigest() - return os.path.join(self._root, hashed) - - def add(self, req): - # type: (InstallRequirement) -> None - link = req.link - info = str(req) - entry_path = self._entry_path(link) - try: - with open(entry_path) as fp: - # Error, these's already a build in progress. - raise LookupError('%s is already being built: %s' - % (link, fp.read())) - except IOError as e: - if e.errno != errno.ENOENT: - raise - assert req not in self._entries - with open(entry_path, 'w') as fp: - fp.write(info) - self._entries.add(req) - logger.debug('Added %s to build tracker %r', req, self._root) - - def remove(self, req): - # type: (InstallRequirement) -> None - link = req.link - self._entries.remove(req) - os.unlink(self._entry_path(link)) - logger.debug('Removed %s from build tracker %r', req, self._root) - - def cleanup(self): - # type: () -> None - for req in set(self._entries): - self.remove(req) - remove = self._temp_dir is not None - if remove: - self._temp_dir.cleanup() - logger.debug('%s build tracker %r', - 'Removed' if remove else 'Cleaned', - self._root) - - @contextlib.contextmanager - def track(self, req): - # type: (InstallRequirement) -> Iterator[None] - self.add(req) - yield - self.remove(req) diff --git a/env/lib/python2.7/site-packages/pip/_internal/req/req_tracker.pyc b/env/lib/python2.7/site-packages/pip/_internal/req/req_tracker.pyc deleted file mode 100644 index 9c907ed7..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/req/req_tracker.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/req/req_uninstall.pyc b/env/lib/python2.7/site-packages/pip/_internal/req/req_uninstall.pyc deleted file mode 100644 index 9a470c11..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/req/req_uninstall.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/utils/__init__.pyc b/env/lib/python2.7/site-packages/pip/_internal/utils/__init__.pyc deleted file mode 100644 index d945ceb5..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/utils/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/utils/appdirs.py b/env/lib/python2.7/site-packages/pip/_internal/utils/appdirs.py deleted file mode 100644 index fb261110..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/utils/appdirs.py +++ /dev/null @@ -1,268 +0,0 @@ -""" -This code was taken from https://github.com/ActiveState/appdirs and modified -to suit our purposes. -""" -from __future__ import absolute_import - -import os -import sys - -from pip._vendor.six import PY2, text_type - -from pip._internal.utils.compat import WINDOWS, expanduser -from pip._internal.utils.typing import MYPY_CHECK_RUNNING - -if MYPY_CHECK_RUNNING: - from typing import List - - -def user_cache_dir(appname): - # type: (str) -> str - r""" - Return full path to the user-specific cache dir for this application. - - "appname" is the name of application. - - Typical user cache directories are: - macOS: ~/Library/Caches/ - Unix: ~/.cache/ (XDG default) - Windows: C:\Users\\AppData\Local\\Cache - - On Windows the only suggestion in the MSDN docs is that local settings go - in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the - non-roaming app data dir (the default returned by `user_data_dir`). Apps - typically put cache data somewhere *under* the given dir here. Some - examples: - ...\Mozilla\Firefox\Profiles\\Cache - ...\Acme\SuperApp\Cache\1.0 - - OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. - """ - if WINDOWS: - # Get the base path - path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA")) - - # When using Python 2, return paths as bytes on Windows like we do on - # other operating systems. See helper function docs for more details. - if PY2 and isinstance(path, text_type): - path = _win_path_to_bytes(path) - - # Add our app name and Cache directory to it - path = os.path.join(path, appname, "Cache") - elif sys.platform == "darwin": - # Get the base path - path = expanduser("~/Library/Caches") - - # Add our app name to it - path = os.path.join(path, appname) - else: - # Get the base path - path = os.getenv("XDG_CACHE_HOME", expanduser("~/.cache")) - - # Add our app name to it - path = os.path.join(path, appname) - - return path - - -def user_data_dir(appname, roaming=False): - # type: (str, bool) -> str - r""" - Return full path to the user-specific data dir for this application. - - "appname" is the name of application. - If None, just the system directory is returned. - "roaming" (boolean, default False) can be set True to use the Windows - roaming appdata directory. That means that for users on a Windows - network setup for roaming profiles, this user data will be - sync'd on login. See - - for a discussion of issues. - - Typical user data directories are: - macOS: ~/Library/Application Support/ - if it exists, else ~/.config/ - Unix: ~/.local/share/ # or in - $XDG_DATA_HOME, if defined - Win XP (not roaming): C:\Documents and Settings\\ ... - ...Application Data\ - Win XP (roaming): C:\Documents and Settings\\Local ... - ...Settings\Application Data\ - Win 7 (not roaming): C:\\Users\\AppData\Local\ - Win 7 (roaming): C:\\Users\\AppData\Roaming\ - - For Unix, we follow the XDG spec and support $XDG_DATA_HOME. - That means, by default "~/.local/share/". - """ - if WINDOWS: - const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA" - path = os.path.join(os.path.normpath(_get_win_folder(const)), appname) - elif sys.platform == "darwin": - path = os.path.join( - expanduser('~/Library/Application Support/'), - appname, - ) if os.path.isdir(os.path.join( - expanduser('~/Library/Application Support/'), - appname, - ) - ) else os.path.join( - expanduser('~/.config/'), - appname, - ) - else: - path = os.path.join( - os.getenv('XDG_DATA_HOME', expanduser("~/.local/share")), - appname, - ) - - return path - - -def user_config_dir(appname, roaming=True): - # type: (str, bool) -> str - """Return full path to the user-specific config dir for this application. - - "appname" is the name of application. - If None, just the system directory is returned. - "roaming" (boolean, default True) can be set False to not use the - Windows roaming appdata directory. That means that for users on a - Windows network setup for roaming profiles, this user data will be - sync'd on login. See - - for a discussion of issues. - - Typical user data directories are: - macOS: same as user_data_dir - Unix: ~/.config/ - Win *: same as user_data_dir - - For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. - That means, by default "~/.config/". - """ - if WINDOWS: - path = user_data_dir(appname, roaming=roaming) - elif sys.platform == "darwin": - path = user_data_dir(appname) - else: - path = os.getenv('XDG_CONFIG_HOME', expanduser("~/.config")) - path = os.path.join(path, appname) - - return path - - -# for the discussion regarding site_config_dirs locations -# see -def site_config_dirs(appname): - # type: (str) -> List[str] - r"""Return a list of potential user-shared config dirs for this application. - - "appname" is the name of application. - - Typical user config directories are: - macOS: /Library/Application Support// - Unix: /etc or $XDG_CONFIG_DIRS[i]// for each value in - $XDG_CONFIG_DIRS - Win XP: C:\Documents and Settings\All Users\Application ... - ...Data\\ - Vista: (Fail! "C:\ProgramData" is a hidden *system* directory - on Vista.) - Win 7: Hidden, but writeable on Win 7: - C:\ProgramData\\ - """ - if WINDOWS: - path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA")) - pathlist = [os.path.join(path, appname)] - elif sys.platform == 'darwin': - pathlist = [os.path.join('/Library/Application Support', appname)] - else: - # try looking in $XDG_CONFIG_DIRS - xdg_config_dirs = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg') - if xdg_config_dirs: - pathlist = [ - os.path.join(expanduser(x), appname) - for x in xdg_config_dirs.split(os.pathsep) - ] - else: - pathlist = [] - - # always look in /etc directly as well - pathlist.append('/etc') - - return pathlist - - -# -- Windows support functions -- - -def _get_win_folder_from_registry(csidl_name): - # type: (str) -> str - """ - This is a fallback technique at best. I'm not sure if using the - registry for this guarantees us the correct answer for all CSIDL_* - names. - """ - import _winreg - - shell_folder_name = { - "CSIDL_APPDATA": "AppData", - "CSIDL_COMMON_APPDATA": "Common AppData", - "CSIDL_LOCAL_APPDATA": "Local AppData", - }[csidl_name] - - key = _winreg.OpenKey( - _winreg.HKEY_CURRENT_USER, - r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders" - ) - directory, _type = _winreg.QueryValueEx(key, shell_folder_name) - return directory - - -def _get_win_folder_with_ctypes(csidl_name): - # type: (str) -> str - csidl_const = { - "CSIDL_APPDATA": 26, - "CSIDL_COMMON_APPDATA": 35, - "CSIDL_LOCAL_APPDATA": 28, - }[csidl_name] - - buf = ctypes.create_unicode_buffer(1024) - ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf) - - # Downgrade to short path name if have highbit chars. See - # . - has_high_char = False - for c in buf: - if ord(c) > 255: - has_high_char = True - break - if has_high_char: - buf2 = ctypes.create_unicode_buffer(1024) - if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024): - buf = buf2 - - return buf.value - - -if WINDOWS: - try: - import ctypes - _get_win_folder = _get_win_folder_with_ctypes - except ImportError: - _get_win_folder = _get_win_folder_from_registry - - -def _win_path_to_bytes(path): - """Encode Windows paths to bytes. Only used on Python 2. - - Motivation is to be consistent with other operating systems where paths - are also returned as bytes. This avoids problems mixing bytes and Unicode - elsewhere in the codebase. For more details and discussion see - . - - If encoding using ASCII and MBCS fails, return the original Unicode path. - """ - for encoding in ('ASCII', 'MBCS'): - try: - return path.encode(encoding) - except (UnicodeEncodeError, LookupError): - pass - return path diff --git a/env/lib/python2.7/site-packages/pip/_internal/utils/appdirs.pyc b/env/lib/python2.7/site-packages/pip/_internal/utils/appdirs.pyc deleted file mode 100644 index 8eef6984..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/utils/appdirs.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/utils/compat.py b/env/lib/python2.7/site-packages/pip/_internal/utils/compat.py deleted file mode 100644 index ec3995c2..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/utils/compat.py +++ /dev/null @@ -1,293 +0,0 @@ -"""Stuff that differs in different Python versions and platform -distributions.""" -from __future__ import absolute_import, division - -import codecs -import locale -import logging -import os -import shutil -import sys - -from pip._vendor.six import text_type -from pip._vendor.urllib3.util import IS_PYOPENSSL - -from pip._internal.utils.typing import MYPY_CHECK_RUNNING - -if MYPY_CHECK_RUNNING: - from typing import Optional, Text, Tuple, Union - -try: - import _ssl # noqa -except ImportError: - ssl = None -else: - # This additional assignment was needed to prevent a mypy error. - ssl = _ssl - -try: - import ipaddress -except ImportError: - try: - from pip._vendor import ipaddress # type: ignore - except ImportError: - import ipaddr as ipaddress # type: ignore - ipaddress.ip_address = ipaddress.IPAddress # type: ignore - ipaddress.ip_network = ipaddress.IPNetwork # type: ignore - - -__all__ = [ - "ipaddress", "uses_pycache", "console_to_str", "native_str", - "get_path_uid", "stdlib_pkgs", "WINDOWS", "samefile", "get_terminal_size", - "get_extension_suffixes", -] - - -logger = logging.getLogger(__name__) - -HAS_TLS = (ssl is not None) or IS_PYOPENSSL - -if sys.version_info >= (3, 4): - uses_pycache = True - from importlib.util import cache_from_source -else: - import imp - - try: - cache_from_source = imp.cache_from_source # type: ignore - except AttributeError: - # does not use __pycache__ - cache_from_source = None - - uses_pycache = cache_from_source is not None - - -if sys.version_info >= (3, 5): - backslashreplace_decode = "backslashreplace" -else: - # In version 3.4 and older, backslashreplace exists - # but does not support use for decoding. - # We implement our own replace handler for this - # situation, so that we can consistently use - # backslash replacement for all versions. - def backslashreplace_decode_fn(err): - raw_bytes = (err.object[i] for i in range(err.start, err.end)) - if sys.version_info[0] == 2: - # Python 2 gave us characters - convert to numeric bytes - raw_bytes = (ord(b) for b in raw_bytes) - return u"".join(u"\\x%x" % c for c in raw_bytes), err.end - codecs.register_error( - "backslashreplace_decode", - backslashreplace_decode_fn, - ) - backslashreplace_decode = "backslashreplace_decode" - - -def str_to_display(data, desc=None): - # type: (Union[bytes, Text], Optional[str]) -> Text - """ - For display or logging purposes, convert a bytes object (or text) to - text (e.g. unicode in Python 2) safe for output. - - :param desc: An optional phrase describing the input data, for use in - the log message if a warning is logged. Defaults to "Bytes object". - - This function should never error out and so can take a best effort - approach. It is okay to be lossy if needed since the return value is - just for display. - - We assume the data is in the locale preferred encoding. If it won't - decode properly, we warn the user but decode as best we can. - - We also ensure that the output can be safely written to standard output - without encoding errors. - """ - if isinstance(data, text_type): - return data - - # Otherwise, data is a bytes object (str in Python 2). - # First, get the encoding we assume. This is the preferred - # encoding for the locale, unless that is not found, or - # it is ASCII, in which case assume UTF-8 - encoding = locale.getpreferredencoding() - if (not encoding) or codecs.lookup(encoding).name == "ascii": - encoding = "utf-8" - - # Now try to decode the data - if we fail, warn the user and - # decode with replacement. - try: - decoded_data = data.decode(encoding) - except UnicodeDecodeError: - if desc is None: - desc = 'Bytes object' - msg_format = '{} does not appear to be encoded as %s'.format(desc) - logger.warning(msg_format, encoding) - decoded_data = data.decode(encoding, errors=backslashreplace_decode) - - # Make sure we can print the output, by encoding it to the output - # encoding with replacement of unencodable characters, and then - # decoding again. - # We use stderr's encoding because it's less likely to be - # redirected and if we don't find an encoding we skip this - # step (on the assumption that output is wrapped by something - # that won't fail). - # The double getattr is to deal with the possibility that we're - # being called in a situation where sys.__stderr__ doesn't exist, - # or doesn't have an encoding attribute. Neither of these cases - # should occur in normal pip use, but there's no harm in checking - # in case people use pip in (unsupported) unusual situations. - output_encoding = getattr(getattr(sys, "__stderr__", None), - "encoding", None) - - if output_encoding: - output_encoded = decoded_data.encode( - output_encoding, - errors="backslashreplace" - ) - decoded_data = output_encoded.decode(output_encoding) - - return decoded_data - - -def console_to_str(data): - # type: (bytes) -> Text - """Return a string, safe for output, of subprocess output. - """ - return str_to_display(data, desc='Subprocess output') - - -if sys.version_info >= (3,): - def native_str(s, replace=False): - # type: (str, bool) -> str - if isinstance(s, bytes): - return s.decode('utf-8', 'replace' if replace else 'strict') - return s - -else: - def native_str(s, replace=False): - # type: (str, bool) -> str - # Replace is ignored -- unicode to UTF-8 can't fail - if isinstance(s, text_type): - return s.encode('utf-8') - return s - - -def get_path_uid(path): - # type: (str) -> int - """ - Return path's uid. - - Does not follow symlinks: - https://github.com/pypa/pip/pull/935#discussion_r5307003 - - Placed this function in compat due to differences on AIX and - Jython, that should eventually go away. - - :raises OSError: When path is a symlink or can't be read. - """ - if hasattr(os, 'O_NOFOLLOW'): - fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW) - file_uid = os.fstat(fd).st_uid - os.close(fd) - else: # AIX and Jython - # WARNING: time of check vulnerability, but best we can do w/o NOFOLLOW - if not os.path.islink(path): - # older versions of Jython don't have `os.fstat` - file_uid = os.stat(path).st_uid - else: - # raise OSError for parity with os.O_NOFOLLOW above - raise OSError( - "%s is a symlink; Will not return uid for symlinks" % path - ) - return file_uid - - -if sys.version_info >= (3, 4): - from importlib.machinery import EXTENSION_SUFFIXES - - def get_extension_suffixes(): - return EXTENSION_SUFFIXES -else: - from imp import get_suffixes - - def get_extension_suffixes(): - return [suffix[0] for suffix in get_suffixes()] - - -def expanduser(path): - # type: (str) -> str - """ - Expand ~ and ~user constructions. - - Includes a workaround for https://bugs.python.org/issue14768 - """ - expanded = os.path.expanduser(path) - if path.startswith('~/') and expanded.startswith('//'): - expanded = expanded[1:] - return expanded - - -# packages in the stdlib that may have installation metadata, but should not be -# considered 'installed'. this theoretically could be determined based on -# dist.location (py27:`sysconfig.get_paths()['stdlib']`, -# py26:sysconfig.get_config_vars('LIBDEST')), but fear platform variation may -# make this ineffective, so hard-coding -stdlib_pkgs = {"python", "wsgiref", "argparse"} - - -# windows detection, covers cpython and ironpython -WINDOWS = (sys.platform.startswith("win") or - (sys.platform == 'cli' and os.name == 'nt')) - - -def samefile(file1, file2): - # type: (str, str) -> bool - """Provide an alternative for os.path.samefile on Windows/Python2""" - if hasattr(os.path, 'samefile'): - return os.path.samefile(file1, file2) - else: - path1 = os.path.normcase(os.path.abspath(file1)) - path2 = os.path.normcase(os.path.abspath(file2)) - return path1 == path2 - - -if hasattr(shutil, 'get_terminal_size'): - def get_terminal_size(): - # type: () -> Tuple[int, int] - """ - Returns a tuple (x, y) representing the width(x) and the height(y) - in characters of the terminal window. - """ - return tuple(shutil.get_terminal_size()) # type: ignore -else: - def get_terminal_size(): - # type: () -> Tuple[int, int] - """ - Returns a tuple (x, y) representing the width(x) and the height(y) - in characters of the terminal window. - """ - def ioctl_GWINSZ(fd): - try: - import fcntl - import termios - import struct - cr = struct.unpack_from( - 'hh', - fcntl.ioctl(fd, termios.TIOCGWINSZ, '12345678') - ) - except Exception: - return None - if cr == (0, 0): - return None - return cr - cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2) - if not cr: - try: - fd = os.open(os.ctermid(), os.O_RDONLY) - cr = ioctl_GWINSZ(fd) - os.close(fd) - except Exception: - pass - if not cr: - cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80)) - return int(cr[1]), int(cr[0]) diff --git a/env/lib/python2.7/site-packages/pip/_internal/utils/compat.pyc b/env/lib/python2.7/site-packages/pip/_internal/utils/compat.pyc deleted file mode 100644 index d8e9d5c8..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/utils/compat.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/utils/deprecation.pyc b/env/lib/python2.7/site-packages/pip/_internal/utils/deprecation.pyc deleted file mode 100644 index 3f23fc82..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/utils/deprecation.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/utils/encoding.py b/env/lib/python2.7/site-packages/pip/_internal/utils/encoding.py deleted file mode 100644 index 30139f2e..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/utils/encoding.py +++ /dev/null @@ -1,39 +0,0 @@ -import codecs -import locale -import re -import sys - -from pip._internal.utils.typing import MYPY_CHECK_RUNNING - -if MYPY_CHECK_RUNNING: - from typing import List, Tuple, Text - -BOMS = [ - (codecs.BOM_UTF8, 'utf-8'), - (codecs.BOM_UTF16, 'utf-16'), - (codecs.BOM_UTF16_BE, 'utf-16-be'), - (codecs.BOM_UTF16_LE, 'utf-16-le'), - (codecs.BOM_UTF32, 'utf-32'), - (codecs.BOM_UTF32_BE, 'utf-32-be'), - (codecs.BOM_UTF32_LE, 'utf-32-le'), -] # type: List[Tuple[bytes, Text]] - -ENCODING_RE = re.compile(br'coding[:=]\s*([-\w.]+)') - - -def auto_decode(data): - # type: (bytes) -> Text - """Check a bytes string for a BOM to correctly detect the encoding - - Fallback to locale.getpreferredencoding(False) like open() on Python3""" - for bom, encoding in BOMS: - if data.startswith(bom): - return data[len(bom):].decode(encoding) - # Lets check the first two lines as in PEP263 - for line in data.split(b'\n')[:2]: - if line[0:1] == b'#' and ENCODING_RE.search(line): - encoding = ENCODING_RE.search(line).groups()[0].decode('ascii') - return data.decode(encoding) - return data.decode( - locale.getpreferredencoding(False) or sys.getdefaultencoding(), - ) diff --git a/env/lib/python2.7/site-packages/pip/_internal/utils/encoding.pyc b/env/lib/python2.7/site-packages/pip/_internal/utils/encoding.pyc deleted file mode 100644 index 7b9698df..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/utils/encoding.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/utils/filesystem.py b/env/lib/python2.7/site-packages/pip/_internal/utils/filesystem.py deleted file mode 100644 index 1e6b0338..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/utils/filesystem.py +++ /dev/null @@ -1,30 +0,0 @@ -import os -import os.path - -from pip._internal.utils.compat import get_path_uid - - -def check_path_owner(path): - # type: (str) -> bool - # If we don't have a way to check the effective uid of this process, then - # we'll just assume that we own the directory. - if not hasattr(os, "geteuid"): - return True - - previous = None - while path != previous: - if os.path.lexists(path): - # Check if path is writable by current user. - if os.geteuid() == 0: - # Special handling for root user in order to handle properly - # cases where users use sudo without -H flag. - try: - path_uid = get_path_uid(path) - except OSError: - return False - return path_uid == 0 - else: - return os.access(path, os.W_OK) - else: - previous, path = path, os.path.dirname(path) - return False # assume we don't own the path diff --git a/env/lib/python2.7/site-packages/pip/_internal/utils/filesystem.pyc b/env/lib/python2.7/site-packages/pip/_internal/utils/filesystem.pyc deleted file mode 100644 index 5c97e0a0..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/utils/filesystem.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/utils/glibc.py b/env/lib/python2.7/site-packages/pip/_internal/utils/glibc.py deleted file mode 100644 index aa77d9b6..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/utils/glibc.py +++ /dev/null @@ -1,120 +0,0 @@ -from __future__ import absolute_import - -import os -import re -import warnings - -from pip._internal.utils.typing import MYPY_CHECK_RUNNING - -if MYPY_CHECK_RUNNING: - from typing import Optional, Tuple - - -def glibc_version_string(): - # type: () -> Optional[str] - "Returns glibc version string, or None if not using glibc." - return glibc_version_string_confstr() or glibc_version_string_ctypes() - - -def glibc_version_string_confstr(): - # type: () -> Optional[str] - "Primary implementation of glibc_version_string using os.confstr." - # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely - # to be broken or missing. This strategy is used in the standard library - # platform module: - # https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183 - try: - # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17": - _, version = os.confstr("CS_GNU_LIBC_VERSION").split() - except (AttributeError, OSError, ValueError): - # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... - return None - return version - - -def glibc_version_string_ctypes(): - # type: () -> Optional[str] - "Fallback implementation of glibc_version_string using ctypes." - - try: - import ctypes - except ImportError: - return None - - # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen - # manpage says, "If filename is NULL, then the returned handle is for the - # main program". This way we can let the linker do the work to figure out - # which libc our process is actually using. - process_namespace = ctypes.CDLL(None) - try: - gnu_get_libc_version = process_namespace.gnu_get_libc_version - except AttributeError: - # Symbol doesn't exist -> therefore, we are not linked to - # glibc. - return None - - # Call gnu_get_libc_version, which returns a string like "2.5" - gnu_get_libc_version.restype = ctypes.c_char_p - version_str = gnu_get_libc_version() - # py2 / py3 compatibility: - if not isinstance(version_str, str): - version_str = version_str.decode("ascii") - - return version_str - - -# Separated out from have_compatible_glibc for easier unit testing -def check_glibc_version(version_str, required_major, minimum_minor): - # type: (str, int, int) -> bool - # Parse string and check against requested version. - # - # We use a regexp instead of str.split because we want to discard any - # random junk that might come after the minor version -- this might happen - # in patched/forked versions of glibc (e.g. Linaro's version of glibc - # uses version strings like "2.20-2014.11"). See gh-3588. - m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str) - if not m: - warnings.warn("Expected glibc version with 2 components major.minor," - " got: %s" % version_str, RuntimeWarning) - return False - return (int(m.group("major")) == required_major and - int(m.group("minor")) >= minimum_minor) - - -def have_compatible_glibc(required_major, minimum_minor): - # type: (int, int) -> bool - version_str = glibc_version_string() - if version_str is None: - return False - return check_glibc_version(version_str, required_major, minimum_minor) - - -# platform.libc_ver regularly returns completely nonsensical glibc -# versions. E.g. on my computer, platform says: -# -# ~$ python2.7 -c 'import platform; print(platform.libc_ver())' -# ('glibc', '2.7') -# ~$ python3.5 -c 'import platform; print(platform.libc_ver())' -# ('glibc', '2.9') -# -# But the truth is: -# -# ~$ ldd --version -# ldd (Debian GLIBC 2.22-11) 2.22 -# -# This is unfortunate, because it means that the linehaul data on libc -# versions that was generated by pip 8.1.2 and earlier is useless and -# misleading. Solution: instead of using platform, use our code that actually -# works. -def libc_ver(): - # type: () -> Tuple[str, str] - """Try to determine the glibc version - - Returns a tuple of strings (lib, version) which default to empty strings - in case the lookup fails. - """ - glibc_version = glibc_version_string() - if glibc_version is None: - return ("", "") - else: - return ("glibc", glibc_version) diff --git a/env/lib/python2.7/site-packages/pip/_internal/utils/glibc.pyc b/env/lib/python2.7/site-packages/pip/_internal/utils/glibc.pyc deleted file mode 100644 index 89745b58..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/utils/glibc.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/utils/hashes.py b/env/lib/python2.7/site-packages/pip/_internal/utils/hashes.py deleted file mode 100644 index e8aabe1a..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/utils/hashes.py +++ /dev/null @@ -1,128 +0,0 @@ -from __future__ import absolute_import - -import hashlib - -from pip._vendor.six import iteritems, iterkeys, itervalues - -from pip._internal.exceptions import ( - HashMismatch, HashMissing, InstallationError, -) -from pip._internal.utils.misc import read_chunks -from pip._internal.utils.typing import MYPY_CHECK_RUNNING - -if MYPY_CHECK_RUNNING: - from typing import ( - Dict, List, BinaryIO, NoReturn, Iterator - ) - from pip._vendor.six import PY3 - if PY3: - from hashlib import _Hash - else: - from hashlib import _hash as _Hash - - -# The recommended hash algo of the moment. Change this whenever the state of -# the art changes; it won't hurt backward compatibility. -FAVORITE_HASH = 'sha256' - - -# Names of hashlib algorithms allowed by the --hash option and ``pip hash`` -# Currently, those are the ones at least as collision-resistant as sha256. -STRONG_HASHES = ['sha256', 'sha384', 'sha512'] - - -class Hashes(object): - """A wrapper that builds multiple hashes at once and checks them against - known-good values - - """ - def __init__(self, hashes=None): - # type: (Dict[str, List[str]]) -> None - """ - :param hashes: A dict of algorithm names pointing to lists of allowed - hex digests - """ - self._allowed = {} if hashes is None else hashes - - @property - def digest_count(self): - # type: () -> int - return sum(len(digests) for digests in self._allowed.values()) - - def is_hash_allowed( - self, - hash_name, # type: str - hex_digest, # type: str - ): - """Return whether the given hex digest is allowed.""" - return hex_digest in self._allowed.get(hash_name, []) - - def check_against_chunks(self, chunks): - # type: (Iterator[bytes]) -> None - """Check good hashes against ones built from iterable of chunks of - data. - - Raise HashMismatch if none match. - - """ - gots = {} - for hash_name in iterkeys(self._allowed): - try: - gots[hash_name] = hashlib.new(hash_name) - except (ValueError, TypeError): - raise InstallationError('Unknown hash name: %s' % hash_name) - - for chunk in chunks: - for hash in itervalues(gots): - hash.update(chunk) - - for hash_name, got in iteritems(gots): - if got.hexdigest() in self._allowed[hash_name]: - return - self._raise(gots) - - def _raise(self, gots): - # type: (Dict[str, _Hash]) -> NoReturn - raise HashMismatch(self._allowed, gots) - - def check_against_file(self, file): - # type: (BinaryIO) -> None - """Check good hashes against a file-like object - - Raise HashMismatch if none match. - - """ - return self.check_against_chunks(read_chunks(file)) - - def check_against_path(self, path): - # type: (str) -> None - with open(path, 'rb') as file: - return self.check_against_file(file) - - def __nonzero__(self): - # type: () -> bool - """Return whether I know any known-good hashes.""" - return bool(self._allowed) - - def __bool__(self): - # type: () -> bool - return self.__nonzero__() - - -class MissingHashes(Hashes): - """A workalike for Hashes used when we're missing a hash for a requirement - - It computes the actual hash of the requirement and raises a HashMissing - exception showing it to the user. - - """ - def __init__(self): - # type: () -> None - """Don't offer the ``hashes`` kwarg.""" - # Pass our favorite hash in to generate a "gotten hash". With the - # empty list, it will never match, so an error will always raise. - super(MissingHashes, self).__init__(hashes={FAVORITE_HASH: []}) - - def _raise(self, gots): - # type: (Dict[str, _Hash]) -> NoReturn - raise HashMissing(gots[FAVORITE_HASH].hexdigest()) diff --git a/env/lib/python2.7/site-packages/pip/_internal/utils/hashes.pyc b/env/lib/python2.7/site-packages/pip/_internal/utils/hashes.pyc deleted file mode 100644 index 70fa3b12..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/utils/hashes.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/utils/logging.py b/env/lib/python2.7/site-packages/pip/_internal/utils/logging.py deleted file mode 100644 index 3fbec712..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/utils/logging.py +++ /dev/null @@ -1,394 +0,0 @@ -from __future__ import absolute_import - -import contextlib -import errno -import logging -import logging.handlers -import os -import sys -from logging import Filter - -from pip._vendor.six import PY2 - -from pip._internal.utils.compat import WINDOWS -from pip._internal.utils.deprecation import DEPRECATION_MSG_PREFIX -from pip._internal.utils.misc import ensure_dir, subprocess_logger - -try: - import threading -except ImportError: - import dummy_threading as threading # type: ignore - - -try: - # Use "import as" and set colorama in the else clause to avoid mypy - # errors and get the following correct revealed type for colorama: - # `Union[_importlib_modulespec.ModuleType, None]` - # Otherwise, we get an error like the following in the except block: - # > Incompatible types in assignment (expression has type "None", - # variable has type Module) - # TODO: eliminate the need to use "import as" once mypy addresses some - # of its issues with conditional imports. Here is an umbrella issue: - # https://github.com/python/mypy/issues/1297 - from pip._vendor import colorama as _colorama -# Lots of different errors can come from this, including SystemError and -# ImportError. -except Exception: - colorama = None -else: - # Import Fore explicitly rather than accessing below as colorama.Fore - # to avoid the following error running mypy: - # > Module has no attribute "Fore" - # TODO: eliminate the need to import Fore once mypy addresses some of its - # issues with conditional imports. This particular case could be an - # instance of the following issue (but also see the umbrella issue above): - # https://github.com/python/mypy/issues/3500 - from pip._vendor.colorama import Fore - - colorama = _colorama - - -_log_state = threading.local() -_log_state.indentation = 0 - - -class BrokenStdoutLoggingError(Exception): - """ - Raised if BrokenPipeError occurs for the stdout stream while logging. - """ - pass - - -# BrokenPipeError does not exist in Python 2 and, in addition, manifests -# differently in Windows and non-Windows. -if WINDOWS: - # In Windows, a broken pipe can show up as EINVAL rather than EPIPE: - # https://bugs.python.org/issue19612 - # https://bugs.python.org/issue30418 - if PY2: - def _is_broken_pipe_error(exc_class, exc): - """See the docstring for non-Windows Python 3 below.""" - return (exc_class is IOError and - exc.errno in (errno.EINVAL, errno.EPIPE)) - else: - # In Windows, a broken pipe IOError became OSError in Python 3. - def _is_broken_pipe_error(exc_class, exc): - """See the docstring for non-Windows Python 3 below.""" - return ((exc_class is BrokenPipeError) or # noqa: F821 - (exc_class is OSError and - exc.errno in (errno.EINVAL, errno.EPIPE))) -elif PY2: - def _is_broken_pipe_error(exc_class, exc): - """See the docstring for non-Windows Python 3 below.""" - return (exc_class is IOError and exc.errno == errno.EPIPE) -else: - # Then we are in the non-Windows Python 3 case. - def _is_broken_pipe_error(exc_class, exc): - """ - Return whether an exception is a broken pipe error. - - Args: - exc_class: an exception class. - exc: an exception instance. - """ - return (exc_class is BrokenPipeError) # noqa: F821 - - -@contextlib.contextmanager -def indent_log(num=2): - """ - A context manager which will cause the log output to be indented for any - log messages emitted inside it. - """ - _log_state.indentation += num - try: - yield - finally: - _log_state.indentation -= num - - -def get_indentation(): - return getattr(_log_state, 'indentation', 0) - - -class IndentingFormatter(logging.Formatter): - - def __init__(self, *args, **kwargs): - """ - A logging.Formatter that obeys the indent_log() context manager. - - :param add_timestamp: A bool indicating output lines should be prefixed - with their record's timestamp. - """ - self.add_timestamp = kwargs.pop("add_timestamp", False) - super(IndentingFormatter, self).__init__(*args, **kwargs) - - def get_message_start(self, formatted, levelno): - """ - Return the start of the formatted log message (not counting the - prefix to add to each line). - """ - if levelno < logging.WARNING: - return '' - if formatted.startswith(DEPRECATION_MSG_PREFIX): - # Then the message already has a prefix. We don't want it to - # look like "WARNING: DEPRECATION: ...." - return '' - if levelno < logging.ERROR: - return 'WARNING: ' - - return 'ERROR: ' - - def format(self, record): - """ - Calls the standard formatter, but will indent all of the log message - lines by our current indentation level. - """ - formatted = super(IndentingFormatter, self).format(record) - message_start = self.get_message_start(formatted, record.levelno) - formatted = message_start + formatted - - prefix = '' - if self.add_timestamp: - # TODO: Use Formatter.default_time_format after dropping PY2. - t = self.formatTime(record, "%Y-%m-%dT%H:%M:%S") - prefix = '%s,%03d ' % (t, record.msecs) - prefix += " " * get_indentation() - formatted = "".join([ - prefix + line - for line in formatted.splitlines(True) - ]) - return formatted - - -def _color_wrap(*colors): - def wrapped(inp): - return "".join(list(colors) + [inp, colorama.Style.RESET_ALL]) - return wrapped - - -class ColorizedStreamHandler(logging.StreamHandler): - - # Don't build up a list of colors if we don't have colorama - if colorama: - COLORS = [ - # This needs to be in order from highest logging level to lowest. - (logging.ERROR, _color_wrap(Fore.RED)), - (logging.WARNING, _color_wrap(Fore.YELLOW)), - ] - else: - COLORS = [] - - def __init__(self, stream=None, no_color=None): - logging.StreamHandler.__init__(self, stream) - self._no_color = no_color - - if WINDOWS and colorama: - self.stream = colorama.AnsiToWin32(self.stream) - - def _using_stdout(self): - """ - Return whether the handler is using sys.stdout. - """ - if WINDOWS and colorama: - # Then self.stream is an AnsiToWin32 object. - return self.stream.wrapped is sys.stdout - - return self.stream is sys.stdout - - def should_color(self): - # Don't colorize things if we do not have colorama or if told not to - if not colorama or self._no_color: - return False - - real_stream = ( - self.stream if not isinstance(self.stream, colorama.AnsiToWin32) - else self.stream.wrapped - ) - - # If the stream is a tty we should color it - if hasattr(real_stream, "isatty") and real_stream.isatty(): - return True - - # If we have an ANSI term we should color it - if os.environ.get("TERM") == "ANSI": - return True - - # If anything else we should not color it - return False - - def format(self, record): - msg = logging.StreamHandler.format(self, record) - - if self.should_color(): - for level, color in self.COLORS: - if record.levelno >= level: - msg = color(msg) - break - - return msg - - # The logging module says handleError() can be customized. - def handleError(self, record): - exc_class, exc = sys.exc_info()[:2] - # If a broken pipe occurred while calling write() or flush() on the - # stdout stream in logging's Handler.emit(), then raise our special - # exception so we can handle it in main() instead of logging the - # broken pipe error and continuing. - if (exc_class and self._using_stdout() and - _is_broken_pipe_error(exc_class, exc)): - raise BrokenStdoutLoggingError() - - return super(ColorizedStreamHandler, self).handleError(record) - - -class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler): - - def _open(self): - ensure_dir(os.path.dirname(self.baseFilename)) - return logging.handlers.RotatingFileHandler._open(self) - - -class MaxLevelFilter(Filter): - - def __init__(self, level): - self.level = level - - def filter(self, record): - return record.levelno < self.level - - -class ExcludeLoggerFilter(Filter): - - """ - A logging Filter that excludes records from a logger (or its children). - """ - - def filter(self, record): - # The base Filter class allows only records from a logger (or its - # children). - return not super(ExcludeLoggerFilter, self).filter(record) - - -def setup_logging(verbosity, no_color, user_log_file): - """Configures and sets up all of the logging - - Returns the requested logging level, as its integer value. - """ - - # Determine the level to be logging at. - if verbosity >= 1: - level = "DEBUG" - elif verbosity == -1: - level = "WARNING" - elif verbosity == -2: - level = "ERROR" - elif verbosity <= -3: - level = "CRITICAL" - else: - level = "INFO" - - level_number = getattr(logging, level) - - # The "root" logger should match the "console" level *unless* we also need - # to log to a user log file. - include_user_log = user_log_file is not None - if include_user_log: - additional_log_file = user_log_file - root_level = "DEBUG" - else: - additional_log_file = "/dev/null" - root_level = level - - # Disable any logging besides WARNING unless we have DEBUG level logging - # enabled for vendored libraries. - vendored_log_level = "WARNING" if level in ["INFO", "ERROR"] else "DEBUG" - - # Shorthands for clarity - log_streams = { - "stdout": "ext://sys.stdout", - "stderr": "ext://sys.stderr", - } - handler_classes = { - "stream": "pip._internal.utils.logging.ColorizedStreamHandler", - "file": "pip._internal.utils.logging.BetterRotatingFileHandler", - } - handlers = ["console", "console_errors", "console_subprocess"] + ( - ["user_log"] if include_user_log else [] - ) - - logging.config.dictConfig({ - "version": 1, - "disable_existing_loggers": False, - "filters": { - "exclude_warnings": { - "()": "pip._internal.utils.logging.MaxLevelFilter", - "level": logging.WARNING, - }, - "restrict_to_subprocess": { - "()": "logging.Filter", - "name": subprocess_logger.name, - }, - "exclude_subprocess": { - "()": "pip._internal.utils.logging.ExcludeLoggerFilter", - "name": subprocess_logger.name, - }, - }, - "formatters": { - "indent": { - "()": IndentingFormatter, - "format": "%(message)s", - }, - "indent_with_timestamp": { - "()": IndentingFormatter, - "format": "%(message)s", - "add_timestamp": True, - }, - }, - "handlers": { - "console": { - "level": level, - "class": handler_classes["stream"], - "no_color": no_color, - "stream": log_streams["stdout"], - "filters": ["exclude_subprocess", "exclude_warnings"], - "formatter": "indent", - }, - "console_errors": { - "level": "WARNING", - "class": handler_classes["stream"], - "no_color": no_color, - "stream": log_streams["stderr"], - "filters": ["exclude_subprocess"], - "formatter": "indent", - }, - # A handler responsible for logging to the console messages - # from the "subprocessor" logger. - "console_subprocess": { - "level": level, - "class": handler_classes["stream"], - "no_color": no_color, - "stream": log_streams["stderr"], - "filters": ["restrict_to_subprocess"], - "formatter": "indent", - }, - "user_log": { - "level": "DEBUG", - "class": handler_classes["file"], - "filename": additional_log_file, - "delay": True, - "formatter": "indent_with_timestamp", - }, - }, - "root": { - "level": root_level, - "handlers": handlers, - }, - "loggers": { - "pip._vendor": { - "level": vendored_log_level - } - }, - }) - - return level_number diff --git a/env/lib/python2.7/site-packages/pip/_internal/utils/logging.pyc b/env/lib/python2.7/site-packages/pip/_internal/utils/logging.pyc deleted file mode 100644 index 8e1fa362..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/utils/logging.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/utils/marker_files.py b/env/lib/python2.7/site-packages/pip/_internal/utils/marker_files.py deleted file mode 100644 index cb0c8ebc..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/utils/marker_files.py +++ /dev/null @@ -1,20 +0,0 @@ -import os.path - -DELETE_MARKER_MESSAGE = '''\ -This file is placed here by pip to indicate the source was put -here by pip. - -Once this package is successfully installed this source code will be -deleted (unless you remove this file). -''' -PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt' - - -def write_delete_marker_file(directory): - # type: (str) -> None - """ - Write the pip delete marker file into this directory. - """ - filepath = os.path.join(directory, PIP_DELETE_MARKER_FILENAME) - with open(filepath, 'w') as marker_fp: - marker_fp.write(DELETE_MARKER_MESSAGE) diff --git a/env/lib/python2.7/site-packages/pip/_internal/utils/marker_files.pyc b/env/lib/python2.7/site-packages/pip/_internal/utils/marker_files.pyc deleted file mode 100644 index d8db6435..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/utils/marker_files.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/utils/misc.py b/env/lib/python2.7/site-packages/pip/_internal/utils/misc.py deleted file mode 100644 index 61f74dc8..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/utils/misc.py +++ /dev/null @@ -1,1201 +0,0 @@ -from __future__ import absolute_import - -import contextlib -import errno -import getpass -import io -# we have a submodule named 'logging' which would shadow this if we used the -# regular name: -import logging as std_logging -import os -import posixpath -import re -import shutil -import stat -import subprocess -import sys -import tarfile -import zipfile -from collections import deque - -from pip._vendor import pkg_resources -# NOTE: retrying is not annotated in typeshed as on 2017-07-17, which is -# why we ignore the type on this import. -from pip._vendor.retrying import retry # type: ignore -from pip._vendor.six import PY2, text_type -from pip._vendor.six.moves import input, shlex_quote -from pip._vendor.six.moves.urllib import parse as urllib_parse -from pip._vendor.six.moves.urllib import request as urllib_request -from pip._vendor.six.moves.urllib.parse import unquote as urllib_unquote - -from pip import __version__ -from pip._internal.exceptions import CommandError, InstallationError -from pip._internal.locations import site_packages, user_site -from pip._internal.utils.compat import ( - WINDOWS, console_to_str, expanduser, stdlib_pkgs, str_to_display, -) -from pip._internal.utils.marker_files import write_delete_marker_file -from pip._internal.utils.typing import MYPY_CHECK_RUNNING -from pip._internal.utils.virtualenv import ( - running_under_virtualenv, virtualenv_no_global, -) - -if PY2: - from io import BytesIO as StringIO -else: - from io import StringIO - -if MYPY_CHECK_RUNNING: - from typing import ( - Any, AnyStr, Container, Iterable, List, Mapping, Match, Optional, Text, - Tuple, Union, cast, - ) - from pip._vendor.pkg_resources import Distribution - from pip._internal.models.link import Link - from pip._internal.utils.ui import SpinnerInterface - - VersionInfo = Tuple[int, int, int] -else: - # typing's cast() is needed at runtime, but we don't want to import typing. - # Thus, we use a dummy no-op version, which we tell mypy to ignore. - def cast(type_, value): # type: ignore - return value - - -__all__ = ['rmtree', 'display_path', 'backup_dir', - 'ask', 'splitext', - 'format_size', 'is_installable_dir', - 'is_svn_page', 'file_contents', - 'split_leading_dir', 'has_leading_dir', - 'normalize_path', - 'renames', 'get_prog', - 'unzip_file', 'untar_file', 'unpack_file', 'call_subprocess', - 'captured_stdout', 'ensure_dir', - 'ARCHIVE_EXTENSIONS', 'SUPPORTED_EXTENSIONS', 'WHEEL_EXTENSION', - 'get_installed_version', 'remove_auth_from_url'] - - -logger = std_logging.getLogger(__name__) -subprocess_logger = std_logging.getLogger('pip.subprocessor') - -LOG_DIVIDER = '----------------------------------------' - -WHEEL_EXTENSION = '.whl' -BZ2_EXTENSIONS = ('.tar.bz2', '.tbz') -XZ_EXTENSIONS = ('.tar.xz', '.txz', '.tlz', '.tar.lz', '.tar.lzma') -ZIP_EXTENSIONS = ('.zip', WHEEL_EXTENSION) -TAR_EXTENSIONS = ('.tar.gz', '.tgz', '.tar') -ARCHIVE_EXTENSIONS = ( - ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS) -SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS - -try: - import bz2 # noqa - SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS -except ImportError: - logger.debug('bz2 module is not available') - -try: - # Only for Python 3.3+ - import lzma # noqa - SUPPORTED_EXTENSIONS += XZ_EXTENSIONS -except ImportError: - logger.debug('lzma module is not available') - - -def get_pip_version(): - # type: () -> str - pip_pkg_dir = os.path.join(os.path.dirname(__file__), "..", "..") - pip_pkg_dir = os.path.abspath(pip_pkg_dir) - - return ( - 'pip {} from {} (python {})'.format( - __version__, pip_pkg_dir, sys.version[:3], - ) - ) - - -def normalize_version_info(py_version_info): - # type: (Tuple[int, ...]) -> Tuple[int, int, int] - """ - Convert a tuple of ints representing a Python version to one of length - three. - - :param py_version_info: a tuple of ints representing a Python version, - or None to specify no version. The tuple can have any length. - - :return: a tuple of length three if `py_version_info` is non-None. - Otherwise, return `py_version_info` unchanged (i.e. None). - """ - if len(py_version_info) < 3: - py_version_info += (3 - len(py_version_info)) * (0,) - elif len(py_version_info) > 3: - py_version_info = py_version_info[:3] - - return cast('VersionInfo', py_version_info) - - -def ensure_dir(path): - # type: (AnyStr) -> None - """os.path.makedirs without EEXIST.""" - try: - os.makedirs(path) - except OSError as e: - if e.errno != errno.EEXIST: - raise - - -def get_prog(): - # type: () -> str - try: - prog = os.path.basename(sys.argv[0]) - if prog in ('__main__.py', '-c'): - return "%s -m pip" % sys.executable - else: - return prog - except (AttributeError, TypeError, IndexError): - pass - return 'pip' - - -# Retry every half second for up to 3 seconds -@retry(stop_max_delay=3000, wait_fixed=500) -def rmtree(dir, ignore_errors=False): - # type: (str, bool) -> None - shutil.rmtree(dir, ignore_errors=ignore_errors, - onerror=rmtree_errorhandler) - - -def rmtree_errorhandler(func, path, exc_info): - """On Windows, the files in .svn are read-only, so when rmtree() tries to - remove them, an exception is thrown. We catch that here, remove the - read-only attribute, and hopefully continue without problems.""" - # if file type currently read only - if os.stat(path).st_mode & stat.S_IREAD: - # convert to read/write - os.chmod(path, stat.S_IWRITE) - # use the original function to repeat the operation - func(path) - return - else: - raise - - -def path_to_display(path): - # type: (Optional[Union[str, Text]]) -> Optional[Text] - """ - Convert a bytes (or text) path to text (unicode in Python 2) for display - and logging purposes. - - This function should never error out. Also, this function is mainly needed - for Python 2 since in Python 3 str paths are already text. - """ - if path is None: - return None - if isinstance(path, text_type): - return path - # Otherwise, path is a bytes object (str in Python 2). - try: - display_path = path.decode(sys.getfilesystemencoding(), 'strict') - except UnicodeDecodeError: - # Include the full bytes to make troubleshooting easier, even though - # it may not be very human readable. - if PY2: - # Convert the bytes to a readable str representation using - # repr(), and then convert the str to unicode. - # Also, we add the prefix "b" to the repr() return value both - # to make the Python 2 output look like the Python 3 output, and - # to signal to the user that this is a bytes representation. - display_path = str_to_display('b{!r}'.format(path)) - else: - # Silence the "F821 undefined name 'ascii'" flake8 error since - # in Python 3 ascii() is a built-in. - display_path = ascii(path) # noqa: F821 - - return display_path - - -def display_path(path): - # type: (Union[str, Text]) -> str - """Gives the display value for a given path, making it relative to cwd - if possible.""" - path = os.path.normcase(os.path.abspath(path)) - if sys.version_info[0] == 2: - path = path.decode(sys.getfilesystemencoding(), 'replace') - path = path.encode(sys.getdefaultencoding(), 'replace') - if path.startswith(os.getcwd() + os.path.sep): - path = '.' + path[len(os.getcwd()):] - return path - - -def backup_dir(dir, ext='.bak'): - # type: (str, str) -> str - """Figure out the name of a directory to back up the given dir to - (adding .bak, .bak2, etc)""" - n = 1 - extension = ext - while os.path.exists(dir + extension): - n += 1 - extension = ext + str(n) - return dir + extension - - -def ask_path_exists(message, options): - # type: (str, Iterable[str]) -> str - for action in os.environ.get('PIP_EXISTS_ACTION', '').split(): - if action in options: - return action - return ask(message, options) - - -def _check_no_input(message): - # type: (str) -> None - """Raise an error if no input is allowed.""" - if os.environ.get('PIP_NO_INPUT'): - raise Exception( - 'No input was expected ($PIP_NO_INPUT set); question: %s' % - message - ) - - -def ask(message, options): - # type: (str, Iterable[str]) -> str - """Ask the message interactively, with the given possible responses""" - while 1: - _check_no_input(message) - response = input(message) - response = response.strip().lower() - if response not in options: - print( - 'Your response (%r) was not one of the expected responses: ' - '%s' % (response, ', '.join(options)) - ) - else: - return response - - -def ask_input(message): - # type: (str) -> str - """Ask for input interactively.""" - _check_no_input(message) - return input(message) - - -def ask_password(message): - # type: (str) -> str - """Ask for a password interactively.""" - _check_no_input(message) - return getpass.getpass(message) - - -def format_size(bytes): - # type: (float) -> str - if bytes > 1000 * 1000: - return '%.1fMB' % (bytes / 1000.0 / 1000) - elif bytes > 10 * 1000: - return '%ikB' % (bytes / 1000) - elif bytes > 1000: - return '%.1fkB' % (bytes / 1000.0) - else: - return '%ibytes' % bytes - - -def is_installable_dir(path): - # type: (str) -> bool - """Is path is a directory containing setup.py or pyproject.toml? - """ - if not os.path.isdir(path): - return False - setup_py = os.path.join(path, 'setup.py') - if os.path.isfile(setup_py): - return True - pyproject_toml = os.path.join(path, 'pyproject.toml') - if os.path.isfile(pyproject_toml): - return True - return False - - -def is_svn_page(html): - # type: (Union[str, Text]) -> Optional[Match[Union[str, Text]]] - """ - Returns true if the page appears to be the index page of an svn repository - """ - return (re.search(r'[^<]*Revision \d+:', html) and - re.search(r'Powered by (?:<a[^>]*?>)?Subversion', html, re.I)) - - -def file_contents(filename): - # type: (str) -> Text - with open(filename, 'rb') as fp: - return fp.read().decode('utf-8') - - -def read_chunks(file, size=io.DEFAULT_BUFFER_SIZE): - """Yield pieces of data from a file-like object until EOF.""" - while True: - chunk = file.read(size) - if not chunk: - break - yield chunk - - -def split_leading_dir(path): - # type: (Union[str, Text]) -> List[Union[str, Text]] - path = path.lstrip('/').lstrip('\\') - if '/' in path and (('\\' in path and path.find('/') < path.find('\\')) or - '\\' not in path): - return path.split('/', 1) - elif '\\' in path: - return path.split('\\', 1) - else: - return [path, ''] - - -def has_leading_dir(paths): - # type: (Iterable[Union[str, Text]]) -> bool - """Returns true if all the paths have the same leading path name - (i.e., everything is in one subdirectory in an archive)""" - common_prefix = None - for path in paths: - prefix, rest = split_leading_dir(path) - if not prefix: - return False - elif common_prefix is None: - common_prefix = prefix - elif prefix != common_prefix: - return False - return True - - -def normalize_path(path, resolve_symlinks=True): - # type: (str, bool) -> str - """ - Convert a path to its canonical, case-normalized, absolute version. - - """ - path = expanduser(path) - if resolve_symlinks: - path = os.path.realpath(path) - else: - path = os.path.abspath(path) - return os.path.normcase(path) - - -def splitext(path): - # type: (str) -> Tuple[str, str] - """Like os.path.splitext, but take off .tar too""" - base, ext = posixpath.splitext(path) - if base.lower().endswith('.tar'): - ext = base[-4:] + ext - base = base[:-4] - return base, ext - - -def renames(old, new): - # type: (str, str) -> None - """Like os.renames(), but handles renaming across devices.""" - # Implementation borrowed from os.renames(). - head, tail = os.path.split(new) - if head and tail and not os.path.exists(head): - os.makedirs(head) - - shutil.move(old, new) - - head, tail = os.path.split(old) - if head and tail: - try: - os.removedirs(head) - except OSError: - pass - - -def is_local(path): - # type: (str) -> bool - """ - Return True if path is within sys.prefix, if we're running in a virtualenv. - - If we're not in a virtualenv, all paths are considered "local." - - """ - if not running_under_virtualenv(): - return True - return normalize_path(path).startswith(normalize_path(sys.prefix)) - - -def dist_is_local(dist): - # type: (Distribution) -> bool - """ - Return True if given Distribution object is installed locally - (i.e. within current virtualenv). - - Always True if we're not in a virtualenv. - - """ - return is_local(dist_location(dist)) - - -def dist_in_usersite(dist): - # type: (Distribution) -> bool - """ - Return True if given Distribution is installed in user site. - """ - norm_path = normalize_path(dist_location(dist)) - return norm_path.startswith(normalize_path(user_site)) - - -def dist_in_site_packages(dist): - # type: (Distribution) -> bool - """ - Return True if given Distribution is installed in - sysconfig.get_python_lib(). - """ - return normalize_path( - dist_location(dist) - ).startswith(normalize_path(site_packages)) - - -def dist_is_editable(dist): - # type: (Distribution) -> bool - """ - Return True if given Distribution is an editable install. - """ - for path_item in sys.path: - egg_link = os.path.join(path_item, dist.project_name + '.egg-link') - if os.path.isfile(egg_link): - return True - return False - - -def get_installed_distributions( - local_only=True, # type: bool - skip=stdlib_pkgs, # type: Container[str] - include_editables=True, # type: bool - editables_only=False, # type: bool - user_only=False, # type: bool - paths=None # type: Optional[List[str]] -): - # type: (...) -> List[Distribution] - """ - Return a list of installed Distribution objects. - - If ``local_only`` is True (default), only return installations - local to the current virtualenv, if in a virtualenv. - - ``skip`` argument is an iterable of lower-case project names to - ignore; defaults to stdlib_pkgs - - If ``include_editables`` is False, don't report editables. - - If ``editables_only`` is True , only report editables. - - If ``user_only`` is True , only report installations in the user - site directory. - - If ``paths`` is set, only report the distributions present at the - specified list of locations. - """ - if paths: - working_set = pkg_resources.WorkingSet(paths) - else: - working_set = pkg_resources.working_set - - if local_only: - local_test = dist_is_local - else: - def local_test(d): - return True - - if include_editables: - def editable_test(d): - return True - else: - def editable_test(d): - return not dist_is_editable(d) - - if editables_only: - def editables_only_test(d): - return dist_is_editable(d) - else: - def editables_only_test(d): - return True - - if user_only: - user_test = dist_in_usersite - else: - def user_test(d): - return True - - # because of pkg_resources vendoring, mypy cannot find stub in typeshed - return [d for d in working_set # type: ignore - if local_test(d) and - d.key not in skip and - editable_test(d) and - editables_only_test(d) and - user_test(d) - ] - - -def egg_link_path(dist): - # type: (Distribution) -> Optional[str] - """ - Return the path for the .egg-link file if it exists, otherwise, None. - - There's 3 scenarios: - 1) not in a virtualenv - try to find in site.USER_SITE, then site_packages - 2) in a no-global virtualenv - try to find in site_packages - 3) in a yes-global virtualenv - try to find in site_packages, then site.USER_SITE - (don't look in global location) - - For #1 and #3, there could be odd cases, where there's an egg-link in 2 - locations. - - This method will just return the first one found. - """ - sites = [] - if running_under_virtualenv(): - if virtualenv_no_global(): - sites.append(site_packages) - else: - sites.append(site_packages) - if user_site: - sites.append(user_site) - else: - if user_site: - sites.append(user_site) - sites.append(site_packages) - - for site in sites: - egglink = os.path.join(site, dist.project_name) + '.egg-link' - if os.path.isfile(egglink): - return egglink - return None - - -def dist_location(dist): - # type: (Distribution) -> str - """ - Get the site-packages location of this distribution. Generally - this is dist.location, except in the case of develop-installed - packages, where dist.location is the source code location, and we - want to know where the egg-link file is. - - """ - egg_link = egg_link_path(dist) - if egg_link: - return egg_link - return dist.location - - -def current_umask(): - """Get the current umask which involves having to set it temporarily.""" - mask = os.umask(0) - os.umask(mask) - return mask - - -def unzip_file(filename, location, flatten=True): - # type: (str, str, bool) -> None - """ - Unzip the file (with path `filename`) to the destination `location`. All - files are written based on system defaults and umask (i.e. permissions are - not preserved), except that regular file members with any execute - permissions (user, group, or world) have "chmod +x" applied after being - written. Note that for windows, any execute changes using os.chmod are - no-ops per the python docs. - """ - ensure_dir(location) - zipfp = open(filename, 'rb') - try: - zip = zipfile.ZipFile(zipfp, allowZip64=True) - leading = has_leading_dir(zip.namelist()) and flatten - for info in zip.infolist(): - name = info.filename - fn = name - if leading: - fn = split_leading_dir(name)[1] - fn = os.path.join(location, fn) - dir = os.path.dirname(fn) - if fn.endswith('/') or fn.endswith('\\'): - # A directory - ensure_dir(fn) - else: - ensure_dir(dir) - # Don't use read() to avoid allocating an arbitrarily large - # chunk of memory for the file's content - fp = zip.open(name) - try: - with open(fn, 'wb') as destfp: - shutil.copyfileobj(fp, destfp) - finally: - fp.close() - mode = info.external_attr >> 16 - # if mode and regular file and any execute permissions for - # user/group/world? - if mode and stat.S_ISREG(mode) and mode & 0o111: - # make dest file have execute for user/group/world - # (chmod +x) no-op on windows per python docs - os.chmod(fn, (0o777 - current_umask() | 0o111)) - finally: - zipfp.close() - - -def untar_file(filename, location): - # type: (str, str) -> None - """ - Untar the file (with path `filename`) to the destination `location`. - All files are written based on system defaults and umask (i.e. permissions - are not preserved), except that regular file members with any execute - permissions (user, group, or world) have "chmod +x" applied after being - written. Note that for windows, any execute changes using os.chmod are - no-ops per the python docs. - """ - ensure_dir(location) - if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'): - mode = 'r:gz' - elif filename.lower().endswith(BZ2_EXTENSIONS): - mode = 'r:bz2' - elif filename.lower().endswith(XZ_EXTENSIONS): - mode = 'r:xz' - elif filename.lower().endswith('.tar'): - mode = 'r' - else: - logger.warning( - 'Cannot determine compression type for file %s', filename, - ) - mode = 'r:*' - tar = tarfile.open(filename, mode) - try: - leading = has_leading_dir([ - member.name for member in tar.getmembers() - ]) - for member in tar.getmembers(): - fn = member.name - if leading: - # https://github.com/python/mypy/issues/1174 - fn = split_leading_dir(fn)[1] # type: ignore - path = os.path.join(location, fn) - if member.isdir(): - ensure_dir(path) - elif member.issym(): - try: - # https://github.com/python/typeshed/issues/2673 - tar._extract_member(member, path) # type: ignore - except Exception as exc: - # Some corrupt tar files seem to produce this - # (specifically bad symlinks) - logger.warning( - 'In the tar file %s the member %s is invalid: %s', - filename, member.name, exc, - ) - continue - else: - try: - fp = tar.extractfile(member) - except (KeyError, AttributeError) as exc: - # Some corrupt tar files seem to produce this - # (specifically bad symlinks) - logger.warning( - 'In the tar file %s the member %s is invalid: %s', - filename, member.name, exc, - ) - continue - ensure_dir(os.path.dirname(path)) - with open(path, 'wb') as destfp: - shutil.copyfileobj(fp, destfp) - fp.close() - # Update the timestamp (useful for cython compiled files) - # https://github.com/python/typeshed/issues/2673 - tar.utime(member, path) # type: ignore - # member have any execute permissions for user/group/world? - if member.mode & 0o111: - # make dest file have execute for user/group/world - # no-op on windows per python docs - os.chmod(path, (0o777 - current_umask() | 0o111)) - finally: - tar.close() - - -def unpack_file( - filename, # type: str - location, # type: str - content_type, # type: Optional[str] - link # type: Optional[Link] -): - # type: (...) -> None - filename = os.path.realpath(filename) - if (content_type == 'application/zip' or - filename.lower().endswith(ZIP_EXTENSIONS) or - zipfile.is_zipfile(filename)): - unzip_file( - filename, - location, - flatten=not filename.endswith('.whl') - ) - elif (content_type == 'application/x-gzip' or - tarfile.is_tarfile(filename) or - filename.lower().endswith( - TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS)): - untar_file(filename, location) - elif (content_type and content_type.startswith('text/html') and - is_svn_page(file_contents(filename))): - # We don't really care about this - from pip._internal.vcs.subversion import Subversion - url = 'svn+' + link.url - Subversion().unpack(location, url=url) - else: - # FIXME: handle? - # FIXME: magic signatures? - logger.critical( - 'Cannot unpack file %s (downloaded from %s, content-type: %s); ' - 'cannot detect archive format', - filename, location, content_type, - ) - raise InstallationError( - 'Cannot determine archive format of %s' % location - ) - - -def format_command_args(args): - # type: (List[str]) -> str - """ - Format command arguments for display. - """ - return ' '.join(shlex_quote(arg) for arg in args) - - -def make_subprocess_output_error( - cmd_args, # type: List[str] - cwd, # type: Optional[str] - lines, # type: List[Text] - exit_status, # type: int -): - # type: (...) -> Text - """ - Create and return the error message to use to log a subprocess error - with command output. - - :param lines: A list of lines, each ending with a newline. - """ - command = format_command_args(cmd_args) - # Convert `command` and `cwd` to text (unicode in Python 2) so we can use - # them as arguments in the unicode format string below. This avoids - # "UnicodeDecodeError: 'ascii' codec can't decode byte ..." in Python 2 - # if either contains a non-ascii character. - command_display = str_to_display(command, desc='command bytes') - cwd_display = path_to_display(cwd) - - # We know the joined output value ends in a newline. - output = ''.join(lines) - msg = ( - # Use a unicode string to avoid "UnicodeEncodeError: 'ascii' - # codec can't encode character ..." in Python 2 when a format - # argument (e.g. `output`) has a non-ascii character. - u'Command errored out with exit status {exit_status}:\n' - ' command: {command_display}\n' - ' cwd: {cwd_display}\n' - 'Complete output ({line_count} lines):\n{output}{divider}' - ).format( - exit_status=exit_status, - command_display=command_display, - cwd_display=cwd_display, - line_count=len(lines), - output=output, - divider=LOG_DIVIDER, - ) - return msg - - -def call_subprocess( - cmd, # type: List[str] - show_stdout=False, # type: bool - cwd=None, # type: Optional[str] - on_returncode='raise', # type: str - extra_ok_returncodes=None, # type: Optional[Iterable[int]] - command_desc=None, # type: Optional[str] - extra_environ=None, # type: Optional[Mapping[str, Any]] - unset_environ=None, # type: Optional[Iterable[str]] - spinner=None # type: Optional[SpinnerInterface] -): - # type: (...) -> Text - """ - Args: - show_stdout: if true, use INFO to log the subprocess's stderr and - stdout streams. Otherwise, use DEBUG. Defaults to False. - extra_ok_returncodes: an iterable of integer return codes that are - acceptable, in addition to 0. Defaults to None, which means []. - unset_environ: an iterable of environment variable names to unset - prior to calling subprocess.Popen(). - """ - if extra_ok_returncodes is None: - extra_ok_returncodes = [] - if unset_environ is None: - unset_environ = [] - # Most places in pip use show_stdout=False. What this means is-- - # - # - We connect the child's output (combined stderr and stdout) to a - # single pipe, which we read. - # - We log this output to stderr at DEBUG level as it is received. - # - If DEBUG logging isn't enabled (e.g. if --verbose logging wasn't - # requested), then we show a spinner so the user can still see the - # subprocess is in progress. - # - If the subprocess exits with an error, we log the output to stderr - # at ERROR level if it hasn't already been displayed to the console - # (e.g. if --verbose logging wasn't enabled). This way we don't log - # the output to the console twice. - # - # If show_stdout=True, then the above is still done, but with DEBUG - # replaced by INFO. - if show_stdout: - # Then log the subprocess output at INFO level. - log_subprocess = subprocess_logger.info - used_level = std_logging.INFO - else: - # Then log the subprocess output using DEBUG. This also ensures - # it will be logged to the log file (aka user_log), if enabled. - log_subprocess = subprocess_logger.debug - used_level = std_logging.DEBUG - - # Whether the subprocess will be visible in the console. - showing_subprocess = subprocess_logger.getEffectiveLevel() <= used_level - - # Only use the spinner if we're not showing the subprocess output - # and we have a spinner. - use_spinner = not showing_subprocess and spinner is not None - - if command_desc is None: - command_desc = format_command_args(cmd) - - log_subprocess("Running command %s", command_desc) - env = os.environ.copy() - if extra_environ: - env.update(extra_environ) - for name in unset_environ: - env.pop(name, None) - try: - proc = subprocess.Popen( - cmd, stderr=subprocess.STDOUT, stdin=subprocess.PIPE, - stdout=subprocess.PIPE, cwd=cwd, env=env, - ) - proc.stdin.close() - except Exception as exc: - subprocess_logger.critical( - "Error %s while executing command %s", exc, command_desc, - ) - raise - all_output = [] - while True: - # The "line" value is a unicode string in Python 2. - line = console_to_str(proc.stdout.readline()) - if not line: - break - line = line.rstrip() - all_output.append(line + '\n') - - # Show the line immediately. - log_subprocess(line) - # Update the spinner. - if use_spinner: - spinner.spin() - try: - proc.wait() - finally: - if proc.stdout: - proc.stdout.close() - proc_had_error = ( - proc.returncode and proc.returncode not in extra_ok_returncodes - ) - if use_spinner: - if proc_had_error: - spinner.finish("error") - else: - spinner.finish("done") - if proc_had_error: - if on_returncode == 'raise': - if not showing_subprocess: - # Then the subprocess streams haven't been logged to the - # console yet. - msg = make_subprocess_output_error( - cmd_args=cmd, - cwd=cwd, - lines=all_output, - exit_status=proc.returncode, - ) - subprocess_logger.error(msg) - exc_msg = ( - 'Command errored out with exit status {}: {} ' - 'Check the logs for full command output.' - ).format(proc.returncode, command_desc) - raise InstallationError(exc_msg) - elif on_returncode == 'warn': - subprocess_logger.warning( - 'Command "%s" had error code %s in %s', - command_desc, proc.returncode, cwd, - ) - elif on_returncode == 'ignore': - pass - else: - raise ValueError('Invalid value: on_returncode=%s' % - repr(on_returncode)) - return ''.join(all_output) - - -def _make_build_dir(build_dir): - os.makedirs(build_dir) - write_delete_marker_file(build_dir) - - -class FakeFile(object): - """Wrap a list of lines in an object with readline() to make - ConfigParser happy.""" - def __init__(self, lines): - self._gen = (l for l in lines) - - def readline(self): - try: - try: - return next(self._gen) - except NameError: - return self._gen.next() - except StopIteration: - return '' - - def __iter__(self): - return self._gen - - -class StreamWrapper(StringIO): - - @classmethod - def from_stream(cls, orig_stream): - cls.orig_stream = orig_stream - return cls() - - # compileall.compile_dir() needs stdout.encoding to print to stdout - @property - def encoding(self): - return self.orig_stream.encoding - - -@contextlib.contextmanager -def captured_output(stream_name): - """Return a context manager used by captured_stdout/stdin/stderr - that temporarily replaces the sys stream *stream_name* with a StringIO. - - Taken from Lib/support/__init__.py in the CPython repo. - """ - orig_stdout = getattr(sys, stream_name) - setattr(sys, stream_name, StreamWrapper.from_stream(orig_stdout)) - try: - yield getattr(sys, stream_name) - finally: - setattr(sys, stream_name, orig_stdout) - - -def captured_stdout(): - """Capture the output of sys.stdout: - - with captured_stdout() as stdout: - print('hello') - self.assertEqual(stdout.getvalue(), 'hello\n') - - Taken from Lib/support/__init__.py in the CPython repo. - """ - return captured_output('stdout') - - -def captured_stderr(): - """ - See captured_stdout(). - """ - return captured_output('stderr') - - -class cached_property(object): - """A property that is only computed once per instance and then replaces - itself with an ordinary attribute. Deleting the attribute resets the - property. - - Source: https://github.com/bottlepy/bottle/blob/0.11.5/bottle.py#L175 - """ - - def __init__(self, func): - self.__doc__ = getattr(func, '__doc__') - self.func = func - - def __get__(self, obj, cls): - if obj is None: - # We're being accessed from the class itself, not from an object - return self - value = obj.__dict__[self.func.__name__] = self.func(obj) - return value - - -def get_installed_version(dist_name, working_set=None): - """Get the installed version of dist_name avoiding pkg_resources cache""" - # Create a requirement that we'll look for inside of setuptools. - req = pkg_resources.Requirement.parse(dist_name) - - if working_set is None: - # We want to avoid having this cached, so we need to construct a new - # working set each time. - working_set = pkg_resources.WorkingSet() - - # Get the installed distribution from our working set - dist = working_set.find(req) - - # Check to see if we got an installed distribution or not, if we did - # we want to return it's version. - return dist.version if dist else None - - -def consume(iterator): - """Consume an iterable at C speed.""" - deque(iterator, maxlen=0) - - -# Simulates an enum -def enum(*sequential, **named): - enums = dict(zip(sequential, range(len(sequential))), **named) - reverse = {value: key for key, value in enums.items()} - enums['reverse_mapping'] = reverse - return type('Enum', (), enums) - - -def path_to_url(path): - # type: (Union[str, Text]) -> str - """ - Convert a path to a file: URL. The path will be made absolute and have - quoted path parts. - """ - path = os.path.normpath(os.path.abspath(path)) - url = urllib_parse.urljoin('file:', urllib_request.pathname2url(path)) - return url - - -def split_auth_from_netloc(netloc): - """ - Parse out and remove the auth information from a netloc. - - Returns: (netloc, (username, password)). - """ - if '@' not in netloc: - return netloc, (None, None) - - # Split from the right because that's how urllib.parse.urlsplit() - # behaves if more than one @ is present (which can be checked using - # the password attribute of urlsplit()'s return value). - auth, netloc = netloc.rsplit('@', 1) - if ':' in auth: - # Split from the left because that's how urllib.parse.urlsplit() - # behaves if more than one : is present (which again can be checked - # using the password attribute of the return value) - user_pass = auth.split(':', 1) - else: - user_pass = auth, None - - user_pass = tuple( - None if x is None else urllib_unquote(x) for x in user_pass - ) - - return netloc, user_pass - - -def redact_netloc(netloc): - # type: (str) -> str - """ - Replace the password in a netloc with "****", if it exists. - - For example, "user:pass@example.com" returns "user:****@example.com". - """ - netloc, (user, password) = split_auth_from_netloc(netloc) - if user is None: - return netloc - password = '' if password is None else ':****' - return '{user}{password}@{netloc}'.format(user=urllib_parse.quote(user), - password=password, - netloc=netloc) - - -def _transform_url(url, transform_netloc): - """Transform and replace netloc in a url. - - transform_netloc is a function taking the netloc and returning a - tuple. The first element of this tuple is the new netloc. The - entire tuple is returned. - - Returns a tuple containing the transformed url as item 0 and the - original tuple returned by transform_netloc as item 1. - """ - purl = urllib_parse.urlsplit(url) - netloc_tuple = transform_netloc(purl.netloc) - # stripped url - url_pieces = ( - purl.scheme, netloc_tuple[0], purl.path, purl.query, purl.fragment - ) - surl = urllib_parse.urlunsplit(url_pieces) - return surl, netloc_tuple - - -def _get_netloc(netloc): - return split_auth_from_netloc(netloc) - - -def _redact_netloc(netloc): - return (redact_netloc(netloc),) - - -def split_auth_netloc_from_url(url): - # type: (str) -> Tuple[str, str, Tuple[str, str]] - """ - Parse a url into separate netloc, auth, and url with no auth. - - Returns: (url_without_auth, netloc, (username, password)) - """ - url_without_auth, (netloc, auth) = _transform_url(url, _get_netloc) - return url_without_auth, netloc, auth - - -def remove_auth_from_url(url): - # type: (str) -> str - """Return a copy of url with 'username:password@' removed.""" - # username/pass params are passed to subversion through flags - # and are not recognized in the url. - return _transform_url(url, _get_netloc)[0] - - -def redact_password_from_url(url): - # type: (str) -> str - """Replace the password in a given url with ****.""" - return _transform_url(url, _redact_netloc)[0] - - -def protect_pip_from_modification_on_windows(modifying_pip): - """Protection of pip.exe from modification on Windows - - On Windows, any operation modifying pip should be run as: - python -m pip ... - """ - pip_names = [ - "pip.exe", - "pip{}.exe".format(sys.version_info[0]), - "pip{}.{}.exe".format(*sys.version_info[:2]) - ] - - # See https://github.com/pypa/pip/issues/1299 for more discussion - should_show_use_python_msg = ( - modifying_pip and - WINDOWS and - os.path.basename(sys.argv[0]) in pip_names - ) - - if should_show_use_python_msg: - new_command = [ - sys.executable, "-m", "pip" - ] + sys.argv[1:] - raise CommandError( - 'To modify pip, please run the following command:\n{}' - .format(" ".join(new_command)) - ) diff --git a/env/lib/python2.7/site-packages/pip/_internal/utils/misc.pyc b/env/lib/python2.7/site-packages/pip/_internal/utils/misc.pyc deleted file mode 100644 index d93085b1..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/utils/misc.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/utils/models.py b/env/lib/python2.7/site-packages/pip/_internal/utils/models.py deleted file mode 100644 index fccaf5dd..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/utils/models.py +++ /dev/null @@ -1,40 +0,0 @@ -"""Utilities for defining models -""" - -import operator - - -class KeyBasedCompareMixin(object): - """Provides comparison capabilities that is based on a key - """ - - def __init__(self, key, defining_class): - self._compare_key = key - self._defining_class = defining_class - - def __hash__(self): - return hash(self._compare_key) - - def __lt__(self, other): - return self._compare(other, operator.__lt__) - - def __le__(self, other): - return self._compare(other, operator.__le__) - - def __gt__(self, other): - return self._compare(other, operator.__gt__) - - def __ge__(self, other): - return self._compare(other, operator.__ge__) - - def __eq__(self, other): - return self._compare(other, operator.__eq__) - - def __ne__(self, other): - return self._compare(other, operator.__ne__) - - def _compare(self, other, method): - if not isinstance(other, self._defining_class): - return NotImplemented - - return method(self._compare_key, other._compare_key) diff --git a/env/lib/python2.7/site-packages/pip/_internal/utils/models.pyc b/env/lib/python2.7/site-packages/pip/_internal/utils/models.pyc deleted file mode 100644 index 7ef1c040..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/utils/models.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/utils/outdated.py b/env/lib/python2.7/site-packages/pip/_internal/utils/outdated.py deleted file mode 100644 index 2b10aeff..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/utils/outdated.py +++ /dev/null @@ -1,178 +0,0 @@ -from __future__ import absolute_import - -import datetime -import json -import logging -import os.path -import sys - -from pip._vendor import lockfile, pkg_resources -from pip._vendor.packaging import version as packaging_version - -from pip._internal.cli.cmdoptions import make_search_scope -from pip._internal.index import PackageFinder -from pip._internal.models.selection_prefs import SelectionPreferences -from pip._internal.utils.compat import WINDOWS -from pip._internal.utils.filesystem import check_path_owner -from pip._internal.utils.misc import ensure_dir, get_installed_version -from pip._internal.utils.packaging import get_installer -from pip._internal.utils.typing import MYPY_CHECK_RUNNING - -if MYPY_CHECK_RUNNING: - import optparse - from typing import Any, Dict - from pip._internal.download import PipSession - - -SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ" - - -logger = logging.getLogger(__name__) - - -class SelfCheckState(object): - def __init__(self, cache_dir): - # type: (str) -> None - self.state = {} # type: Dict[str, Any] - self.statefile_path = None - - # Try to load the existing state - if cache_dir: - self.statefile_path = os.path.join(cache_dir, "selfcheck.json") - try: - with open(self.statefile_path) as statefile: - self.state = json.load(statefile)[sys.prefix] - except (IOError, ValueError, KeyError): - # Explicitly suppressing exceptions, since we don't want to - # error out if the cache file is invalid. - pass - - def save(self, pypi_version, current_time): - # type: (str, datetime.datetime) -> None - # If we do not have a path to cache in, don't bother saving. - if not self.statefile_path: - return - - # Check to make sure that we own the directory - if not check_path_owner(os.path.dirname(self.statefile_path)): - return - - # Now that we've ensured the directory is owned by this user, we'll go - # ahead and make sure that all our directories are created. - ensure_dir(os.path.dirname(self.statefile_path)) - - # Attempt to write out our version check file - with lockfile.LockFile(self.statefile_path): - if os.path.exists(self.statefile_path): - with open(self.statefile_path) as statefile: - state = json.load(statefile) - else: - state = {} - - state[sys.prefix] = { - "last_check": current_time.strftime(SELFCHECK_DATE_FMT), - "pypi_version": pypi_version, - } - - with open(self.statefile_path, "w") as statefile: - json.dump(state, statefile, sort_keys=True, - separators=(",", ":")) - - -def was_installed_by_pip(pkg): - # type: (str) -> bool - """Checks whether pkg was installed by pip - - This is used not to display the upgrade message when pip is in fact - installed by system package manager, such as dnf on Fedora. - """ - try: - dist = pkg_resources.get_distribution(pkg) - return "pip" == get_installer(dist) - except pkg_resources.DistributionNotFound: - return False - - -def pip_version_check(session, options): - # type: (PipSession, optparse.Values) -> None - """Check for an update for pip. - - Limit the frequency of checks to once per week. State is stored either in - the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix - of the pip script path. - """ - installed_version = get_installed_version("pip") - if not installed_version: - return - - pip_version = packaging_version.parse(installed_version) - pypi_version = None - - try: - state = SelfCheckState(cache_dir=options.cache_dir) - - current_time = datetime.datetime.utcnow() - # Determine if we need to refresh the state - if "last_check" in state.state and "pypi_version" in state.state: - last_check = datetime.datetime.strptime( - state.state["last_check"], - SELFCHECK_DATE_FMT - ) - if (current_time - last_check).total_seconds() < 7 * 24 * 60 * 60: - pypi_version = state.state["pypi_version"] - - # Refresh the version if we need to or just see if we need to warn - if pypi_version is None: - # Lets use PackageFinder to see what the latest pip version is - search_scope = make_search_scope(options, suppress_no_index=True) - - # Pass allow_yanked=False so we don't suggest upgrading to a - # yanked version. - selection_prefs = SelectionPreferences( - allow_yanked=False, - allow_all_prereleases=False, # Explicitly set to False - ) - - finder = PackageFinder.create( - search_scope=search_scope, - selection_prefs=selection_prefs, - trusted_hosts=options.trusted_hosts, - session=session, - ) - candidate = finder.find_candidates("pip").get_best() - if candidate is None: - return - pypi_version = str(candidate.version) - - # save that we've performed a check - state.save(pypi_version, current_time) - - remote_version = packaging_version.parse(pypi_version) - - local_version_is_older = ( - pip_version < remote_version and - pip_version.base_version != remote_version.base_version and - was_installed_by_pip('pip') - ) - - # Determine if our pypi_version is older - if not local_version_is_older: - return - - # Advise "python -m pip" on Windows to avoid issues - # with overwriting pip.exe. - if WINDOWS: - pip_cmd = "python -m pip" - else: - pip_cmd = "pip" - logger.warning( - "You are using pip version %s, however version %s is " - "available.\nYou should consider upgrading via the " - "'%s install --upgrade pip' command.", - pip_version, pypi_version, pip_cmd - ) - except Exception: - logger.debug( - "There was an error checking the latest version of pip", - exc_info=True, - ) diff --git a/env/lib/python2.7/site-packages/pip/_internal/utils/outdated.pyc b/env/lib/python2.7/site-packages/pip/_internal/utils/outdated.pyc deleted file mode 100644 index 3d1b5561..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/utils/outdated.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/utils/packaging.pyc b/env/lib/python2.7/site-packages/pip/_internal/utils/packaging.pyc deleted file mode 100644 index 7970f22d..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/utils/packaging.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/utils/setuptools_build.py b/env/lib/python2.7/site-packages/pip/_internal/utils/setuptools_build.py deleted file mode 100644 index 58956072..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/utils/setuptools_build.py +++ /dev/null @@ -1,36 +0,0 @@ -import sys - -from pip._internal.utils.typing import MYPY_CHECK_RUNNING - -if MYPY_CHECK_RUNNING: - from typing import List - -# Shim to wrap setup.py invocation with setuptools -# -# We set sys.argv[0] to the path to the underlying setup.py file so -# setuptools / distutils don't take the path to the setup.py to be "-c" when -# invoking via the shim. This avoids e.g. the following manifest_maker -# warning: "warning: manifest_maker: standard file '-c' not found". -_SETUPTOOLS_SHIM = ( - "import sys, setuptools, tokenize; sys.argv[0] = {0!r}; __file__={0!r};" - "f=getattr(tokenize, 'open', open)(__file__);" - "code=f.read().replace('\\r\\n', '\\n');" - "f.close();" - "exec(compile(code, __file__, 'exec'))" -) - - -def make_setuptools_shim_args(setup_py_path, unbuffered_output=False): - # type: (str, bool) -> List[str] - """ - Get setuptools command arguments with shim wrapped setup file invocation. - - :param setup_py_path: The path to setup.py to be wrapped. - :param unbuffered_output: If True, adds the unbuffered switch to the - argument list. - """ - args = [sys.executable] - if unbuffered_output: - args.append('-u') - args.extend(['-c', _SETUPTOOLS_SHIM.format(setup_py_path)]) - return args diff --git a/env/lib/python2.7/site-packages/pip/_internal/utils/setuptools_build.pyc b/env/lib/python2.7/site-packages/pip/_internal/utils/setuptools_build.pyc deleted file mode 100644 index 3b8267e4..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/utils/setuptools_build.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/utils/temp_dir.py b/env/lib/python2.7/site-packages/pip/_internal/utils/temp_dir.py deleted file mode 100644 index 2c81ad55..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/utils/temp_dir.py +++ /dev/null @@ -1,155 +0,0 @@ -from __future__ import absolute_import - -import errno -import itertools -import logging -import os.path -import tempfile - -from pip._internal.utils.misc import rmtree - -logger = logging.getLogger(__name__) - - -class TempDirectory(object): - """Helper class that owns and cleans up a temporary directory. - - This class can be used as a context manager or as an OO representation of a - temporary directory. - - Attributes: - path - Location to the created temporary directory or None - delete - Whether the directory should be deleted when exiting - (when used as a contextmanager) - - Methods: - create() - Creates a temporary directory and stores its path in the path - attribute. - cleanup() - Deletes the temporary directory and sets path attribute to None - - When used as a context manager, a temporary directory is created on - entering the context and, if the delete attribute is True, on exiting the - context the created directory is deleted. - """ - - def __init__(self, path=None, delete=None, kind="temp"): - super(TempDirectory, self).__init__() - - if path is None and delete is None: - # If we were not given an explicit directory, and we were not given - # an explicit delete option, then we'll default to deleting. - delete = True - - self.path = path - self.delete = delete - self.kind = kind - - def __repr__(self): - return "<{} {!r}>".format(self.__class__.__name__, self.path) - - def __enter__(self): - self.create() - return self - - def __exit__(self, exc, value, tb): - if self.delete: - self.cleanup() - - def create(self): - """Create a temporary directory and store its path in self.path - """ - if self.path is not None: - logger.debug( - "Skipped creation of temporary directory: {}".format(self.path) - ) - return - # We realpath here because some systems have their default tmpdir - # symlinked to another directory. This tends to confuse build - # scripts, so we canonicalize the path by traversing potential - # symlinks here. - self.path = os.path.realpath( - tempfile.mkdtemp(prefix="pip-{}-".format(self.kind)) - ) - logger.debug("Created temporary directory: {}".format(self.path)) - - def cleanup(self): - """Remove the temporary directory created and reset state - """ - if self.path is not None and os.path.exists(self.path): - rmtree(self.path) - self.path = None - - -class AdjacentTempDirectory(TempDirectory): - """Helper class that creates a temporary directory adjacent to a real one. - - Attributes: - original - The original directory to create a temp directory for. - path - After calling create() or entering, contains the full - path to the temporary directory. - delete - Whether the directory should be deleted when exiting - (when used as a contextmanager) - - """ - # The characters that may be used to name the temp directory - # We always prepend a ~ and then rotate through these until - # a usable name is found. - # pkg_resources raises a different error for .dist-info folder - # with leading '-' and invalid metadata - LEADING_CHARS = "-~.=%0123456789" - - def __init__(self, original, delete=None): - super(AdjacentTempDirectory, self).__init__(delete=delete) - self.original = original.rstrip('/\\') - - @classmethod - def _generate_names(cls, name): - """Generates a series of temporary names. - - The algorithm replaces the leading characters in the name - with ones that are valid filesystem characters, but are not - valid package names (for both Python and pip definitions of - package). - """ - for i in range(1, len(name)): - for candidate in itertools.combinations_with_replacement( - cls.LEADING_CHARS, i - 1): - new_name = '~' + ''.join(candidate) + name[i:] - if new_name != name: - yield new_name - - # If we make it this far, we will have to make a longer name - for i in range(len(cls.LEADING_CHARS)): - for candidate in itertools.combinations_with_replacement( - cls.LEADING_CHARS, i): - new_name = '~' + ''.join(candidate) + name - if new_name != name: - yield new_name - - def create(self): - root, name = os.path.split(self.original) - for candidate in self._generate_names(name): - path = os.path.join(root, candidate) - try: - os.mkdir(path) - except OSError as ex: - # Continue if the name exists already - if ex.errno != errno.EEXIST: - raise - else: - self.path = os.path.realpath(path) - break - - if not self.path: - # Final fallback on the default behavior. - self.path = os.path.realpath( - tempfile.mkdtemp(prefix="pip-{}-".format(self.kind)) - ) - logger.debug("Created temporary directory: {}".format(self.path)) diff --git a/env/lib/python2.7/site-packages/pip/_internal/utils/temp_dir.pyc b/env/lib/python2.7/site-packages/pip/_internal/utils/temp_dir.pyc deleted file mode 100644 index e6f5b9c0..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/utils/temp_dir.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/utils/typing.py b/env/lib/python2.7/site-packages/pip/_internal/utils/typing.py deleted file mode 100644 index 10170ce2..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/utils/typing.py +++ /dev/null @@ -1,29 +0,0 @@ -"""For neatly implementing static typing in pip. - -`mypy` - the static type analysis tool we use - uses the `typing` module, which -provides core functionality fundamental to mypy's functioning. - -Generally, `typing` would be imported at runtime and used in that fashion - -it acts as a no-op at runtime and does not have any run-time overhead by -design. - -As it turns out, `typing` is not vendorable - it uses separate sources for -Python 2/Python 3. Thus, this codebase can not expect it to be present. -To work around this, mypy allows the typing import to be behind a False-y -optional to prevent it from running at runtime and type-comments can be used -to remove the need for the types to be accessible directly during runtime. - -This module provides the False-y guard in a nicely named fashion so that a -curious maintainer can reach here to read this. - -In pip, all static-typing related imports should be guarded as follows: - - from pip._internal.utils.typing import MYPY_CHECK_RUNNING - - if MYPY_CHECK_RUNNING: - from typing import ... - -Ref: https://github.com/python/mypy/issues/3216 -""" - -MYPY_CHECK_RUNNING = False diff --git a/env/lib/python2.7/site-packages/pip/_internal/utils/typing.pyc b/env/lib/python2.7/site-packages/pip/_internal/utils/typing.pyc deleted file mode 100644 index a5c26a99..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/utils/typing.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/utils/ui.py b/env/lib/python2.7/site-packages/pip/_internal/utils/ui.py deleted file mode 100644 index 46390f4a..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/utils/ui.py +++ /dev/null @@ -1,424 +0,0 @@ -from __future__ import absolute_import, division - -import contextlib -import itertools -import logging -import sys -import time -from signal import SIGINT, default_int_handler, signal - -from pip._vendor import six -from pip._vendor.progress import HIDE_CURSOR, SHOW_CURSOR -from pip._vendor.progress.bar import Bar, FillingCirclesBar, IncrementalBar -from pip._vendor.progress.spinner import Spinner - -from pip._internal.utils.compat import WINDOWS -from pip._internal.utils.logging import get_indentation -from pip._internal.utils.misc import format_size -from pip._internal.utils.typing import MYPY_CHECK_RUNNING - -if MYPY_CHECK_RUNNING: - from typing import Any, Iterator, IO - -try: - from pip._vendor import colorama -# Lots of different errors can come from this, including SystemError and -# ImportError. -except Exception: - colorama = None - -logger = logging.getLogger(__name__) - - -def _select_progress_class(preferred, fallback): - encoding = getattr(preferred.file, "encoding", None) - - # If we don't know what encoding this file is in, then we'll just assume - # that it doesn't support unicode and use the ASCII bar. - if not encoding: - return fallback - - # Collect all of the possible characters we want to use with the preferred - # bar. - characters = [ - getattr(preferred, "empty_fill", six.text_type()), - getattr(preferred, "fill", six.text_type()), - ] - characters += list(getattr(preferred, "phases", [])) - - # Try to decode the characters we're using for the bar using the encoding - # of the given file, if this works then we'll assume that we can use the - # fancier bar and if not we'll fall back to the plaintext bar. - try: - six.text_type().join(characters).encode(encoding) - except UnicodeEncodeError: - return fallback - else: - return preferred - - -_BaseBar = _select_progress_class(IncrementalBar, Bar) # type: Any - - -class InterruptibleMixin(object): - """ - Helper to ensure that self.finish() gets called on keyboard interrupt. - - This allows downloads to be interrupted without leaving temporary state - (like hidden cursors) behind. - - This class is similar to the progress library's existing SigIntMixin - helper, but as of version 1.2, that helper has the following problems: - - 1. It calls sys.exit(). - 2. It discards the existing SIGINT handler completely. - 3. It leaves its own handler in place even after an uninterrupted finish, - which will have unexpected delayed effects if the user triggers an - unrelated keyboard interrupt some time after a progress-displaying - download has already completed, for example. - """ - - def __init__(self, *args, **kwargs): - """ - Save the original SIGINT handler for later. - """ - super(InterruptibleMixin, self).__init__(*args, **kwargs) - - self.original_handler = signal(SIGINT, self.handle_sigint) - - # If signal() returns None, the previous handler was not installed from - # Python, and we cannot restore it. This probably should not happen, - # but if it does, we must restore something sensible instead, at least. - # The least bad option should be Python's default SIGINT handler, which - # just raises KeyboardInterrupt. - if self.original_handler is None: - self.original_handler = default_int_handler - - def finish(self): - """ - Restore the original SIGINT handler after finishing. - - This should happen regardless of whether the progress display finishes - normally, or gets interrupted. - """ - super(InterruptibleMixin, self).finish() - signal(SIGINT, self.original_handler) - - def handle_sigint(self, signum, frame): - """ - Call self.finish() before delegating to the original SIGINT handler. - - This handler should only be in place while the progress display is - active. - """ - self.finish() - self.original_handler(signum, frame) - - -class SilentBar(Bar): - - def update(self): - pass - - -class BlueEmojiBar(IncrementalBar): - - suffix = "%(percent)d%%" - bar_prefix = " " - bar_suffix = " " - phases = (u"\U0001F539", u"\U0001F537", u"\U0001F535") # type: Any - - -class DownloadProgressMixin(object): - - def __init__(self, *args, **kwargs): - super(DownloadProgressMixin, self).__init__(*args, **kwargs) - self.message = (" " * (get_indentation() + 2)) + self.message - - @property - def downloaded(self): - return format_size(self.index) - - @property - def download_speed(self): - # Avoid zero division errors... - if self.avg == 0.0: - return "..." - return format_size(1 / self.avg) + "/s" - - @property - def pretty_eta(self): - if self.eta: - return "eta %s" % self.eta_td - return "" - - def iter(self, it, n=1): - for x in it: - yield x - self.next(n) - self.finish() - - -class WindowsMixin(object): - - def __init__(self, *args, **kwargs): - # The Windows terminal does not support the hide/show cursor ANSI codes - # even with colorama. So we'll ensure that hide_cursor is False on - # Windows. - # This call needs to go before the super() call, so that hide_cursor - # is set in time. The base progress bar class writes the "hide cursor" - # code to the terminal in its init, so if we don't set this soon - # enough, we get a "hide" with no corresponding "show"... - if WINDOWS and self.hide_cursor: - self.hide_cursor = False - - super(WindowsMixin, self).__init__(*args, **kwargs) - - # Check if we are running on Windows and we have the colorama module, - # if we do then wrap our file with it. - if WINDOWS and colorama: - self.file = colorama.AnsiToWin32(self.file) - # The progress code expects to be able to call self.file.isatty() - # but the colorama.AnsiToWin32() object doesn't have that, so we'll - # add it. - self.file.isatty = lambda: self.file.wrapped.isatty() - # The progress code expects to be able to call self.file.flush() - # but the colorama.AnsiToWin32() object doesn't have that, so we'll - # add it. - self.file.flush = lambda: self.file.wrapped.flush() - - -class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin, - DownloadProgressMixin): - - file = sys.stdout - message = "%(percent)d%%" - suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s" - -# NOTE: The "type: ignore" comments on the following classes are there to -# work around https://github.com/python/typing/issues/241 - - -class DefaultDownloadProgressBar(BaseDownloadProgressBar, - _BaseBar): - pass - - -class DownloadSilentBar(BaseDownloadProgressBar, SilentBar): # type: ignore - pass - - -class DownloadBar(BaseDownloadProgressBar, # type: ignore - Bar): - pass - - -class DownloadFillingCirclesBar(BaseDownloadProgressBar, # type: ignore - FillingCirclesBar): - pass - - -class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar, # type: ignore - BlueEmojiBar): - pass - - -class DownloadProgressSpinner(WindowsMixin, InterruptibleMixin, - DownloadProgressMixin, Spinner): - - file = sys.stdout - suffix = "%(downloaded)s %(download_speed)s" - - def next_phase(self): - if not hasattr(self, "_phaser"): - self._phaser = itertools.cycle(self.phases) - return next(self._phaser) - - def update(self): - message = self.message % self - phase = self.next_phase() - suffix = self.suffix % self - line = ''.join([ - message, - " " if message else "", - phase, - " " if suffix else "", - suffix, - ]) - - self.writeln(line) - - -BAR_TYPES = { - "off": (DownloadSilentBar, DownloadSilentBar), - "on": (DefaultDownloadProgressBar, DownloadProgressSpinner), - "ascii": (DownloadBar, DownloadProgressSpinner), - "pretty": (DownloadFillingCirclesBar, DownloadProgressSpinner), - "emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner) -} - - -def DownloadProgressProvider(progress_bar, max=None): - if max is None or max == 0: - return BAR_TYPES[progress_bar][1]().iter - else: - return BAR_TYPES[progress_bar][0](max=max).iter - - -################################################################ -# Generic "something is happening" spinners -# -# We don't even try using progress.spinner.Spinner here because it's actually -# simpler to reimplement from scratch than to coerce their code into doing -# what we need. -################################################################ - -@contextlib.contextmanager -def hidden_cursor(file): - # type: (IO) -> Iterator[None] - # The Windows terminal does not support the hide/show cursor ANSI codes, - # even via colorama. So don't even try. - if WINDOWS: - yield - # We don't want to clutter the output with control characters if we're - # writing to a file, or if the user is running with --quiet. - # See https://github.com/pypa/pip/issues/3418 - elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO: - yield - else: - file.write(HIDE_CURSOR) - try: - yield - finally: - file.write(SHOW_CURSOR) - - -class RateLimiter(object): - def __init__(self, min_update_interval_seconds): - # type: (float) -> None - self._min_update_interval_seconds = min_update_interval_seconds - self._last_update = 0 # type: float - - def ready(self): - # type: () -> bool - now = time.time() - delta = now - self._last_update - return delta >= self._min_update_interval_seconds - - def reset(self): - # type: () -> None - self._last_update = time.time() - - -class SpinnerInterface(object): - def spin(self): - # type: () -> None - raise NotImplementedError() - - def finish(self, final_status): - # type: (str) -> None - raise NotImplementedError() - - -class InteractiveSpinner(SpinnerInterface): - def __init__(self, message, file=None, spin_chars="-\\|/", - # Empirically, 8 updates/second looks nice - min_update_interval_seconds=0.125): - self._message = message - if file is None: - file = sys.stdout - self._file = file - self._rate_limiter = RateLimiter(min_update_interval_seconds) - self._finished = False - - self._spin_cycle = itertools.cycle(spin_chars) - - self._file.write(" " * get_indentation() + self._message + " ... ") - self._width = 0 - - def _write(self, status): - assert not self._finished - # Erase what we wrote before by backspacing to the beginning, writing - # spaces to overwrite the old text, and then backspacing again - backup = "\b" * self._width - self._file.write(backup + " " * self._width + backup) - # Now we have a blank slate to add our status - self._file.write(status) - self._width = len(status) - self._file.flush() - self._rate_limiter.reset() - - def spin(self): - # type: () -> None - if self._finished: - return - if not self._rate_limiter.ready(): - return - self._write(next(self._spin_cycle)) - - def finish(self, final_status): - # type: (str) -> None - if self._finished: - return - self._write(final_status) - self._file.write("\n") - self._file.flush() - self._finished = True - - -# Used for dumb terminals, non-interactive installs (no tty), etc. -# We still print updates occasionally (once every 60 seconds by default) to -# act as a keep-alive for systems like Travis-CI that take lack-of-output as -# an indication that a task has frozen. -class NonInteractiveSpinner(SpinnerInterface): - def __init__(self, message, min_update_interval_seconds=60): - # type: (str, float) -> None - self._message = message - self._finished = False - self._rate_limiter = RateLimiter(min_update_interval_seconds) - self._update("started") - - def _update(self, status): - assert not self._finished - self._rate_limiter.reset() - logger.info("%s: %s", self._message, status) - - def spin(self): - # type: () -> None - if self._finished: - return - if not self._rate_limiter.ready(): - return - self._update("still running...") - - def finish(self, final_status): - # type: (str) -> None - if self._finished: - return - self._update("finished with status '%s'" % (final_status,)) - self._finished = True - - -@contextlib.contextmanager -def open_spinner(message): - # type: (str) -> Iterator[SpinnerInterface] - # Interactive spinner goes directly to sys.stdout rather than being routed - # through the logging system, but it acts like it has level INFO, - # i.e. it's only displayed if we're at level INFO or better. - # Non-interactive spinner goes through the logging system, so it is always - # in sync with logging configuration. - if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO: - spinner = InteractiveSpinner(message) # type: SpinnerInterface - else: - spinner = NonInteractiveSpinner(message) - try: - with hidden_cursor(sys.stdout): - yield spinner - except KeyboardInterrupt: - spinner.finish("canceled") - raise - except Exception: - spinner.finish("error") - raise - else: - spinner.finish("done") diff --git a/env/lib/python2.7/site-packages/pip/_internal/utils/ui.pyc b/env/lib/python2.7/site-packages/pip/_internal/utils/ui.pyc deleted file mode 100644 index cf8c297e..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/utils/ui.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/utils/virtualenv.py b/env/lib/python2.7/site-packages/pip/_internal/utils/virtualenv.py deleted file mode 100644 index 380db1c3..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/utils/virtualenv.py +++ /dev/null @@ -1,34 +0,0 @@ -import os.path -import site -import sys - - -def running_under_virtualenv(): - # type: () -> bool - """ - Return True if we're running inside a virtualenv, False otherwise. - - """ - if hasattr(sys, 'real_prefix'): - # pypa/virtualenv case - return True - elif sys.prefix != getattr(sys, "base_prefix", sys.prefix): - # PEP 405 venv - return True - - return False - - -def virtualenv_no_global(): - # type: () -> bool - """ - Return True if in a venv and no system site packages. - """ - # this mirrors the logic in virtualenv.py for locating the - # no-global-site-packages.txt file - site_mod_dir = os.path.dirname(os.path.abspath(site.__file__)) - no_global_file = os.path.join(site_mod_dir, 'no-global-site-packages.txt') - if running_under_virtualenv() and os.path.isfile(no_global_file): - return True - else: - return False diff --git a/env/lib/python2.7/site-packages/pip/_internal/utils/virtualenv.pyc b/env/lib/python2.7/site-packages/pip/_internal/utils/virtualenv.pyc deleted file mode 100644 index 78f44140..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/utils/virtualenv.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/vcs/__init__.py b/env/lib/python2.7/site-packages/pip/_internal/vcs/__init__.py deleted file mode 100644 index cb573ab6..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/vcs/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# Expose a limited set of classes and functions so callers outside of -# the vcs package don't need to import deeper than `pip._internal.vcs`. -# (The test directory and imports protected by MYPY_CHECK_RUNNING may -# still need to import from a vcs sub-package.) -from pip._internal.vcs.versioncontrol import ( # noqa: F401 - RemoteNotFoundError, make_vcs_requirement_url, vcs, -) -# Import all vcs modules to register each VCS in the VcsSupport object. -import pip._internal.vcs.bazaar -import pip._internal.vcs.git -import pip._internal.vcs.mercurial -import pip._internal.vcs.subversion # noqa: F401 diff --git a/env/lib/python2.7/site-packages/pip/_internal/vcs/__init__.pyc b/env/lib/python2.7/site-packages/pip/_internal/vcs/__init__.pyc deleted file mode 100644 index db944ca7..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/vcs/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/vcs/bazaar.py b/env/lib/python2.7/site-packages/pip/_internal/vcs/bazaar.py deleted file mode 100644 index 4f1e114b..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/vcs/bazaar.py +++ /dev/null @@ -1,101 +0,0 @@ -from __future__ import absolute_import - -import logging -import os - -from pip._vendor.six.moves.urllib import parse as urllib_parse - -from pip._internal.utils.misc import display_path, path_to_url, rmtree -from pip._internal.vcs.versioncontrol import VersionControl, vcs - -logger = logging.getLogger(__name__) - - -class Bazaar(VersionControl): - name = 'bzr' - dirname = '.bzr' - repo_name = 'branch' - schemes = ( - 'bzr', 'bzr+http', 'bzr+https', 'bzr+ssh', 'bzr+sftp', 'bzr+ftp', - 'bzr+lp', - ) - - def __init__(self, *args, **kwargs): - super(Bazaar, self).__init__(*args, **kwargs) - # This is only needed for python <2.7.5 - # Register lp but do not expose as a scheme to support bzr+lp. - if getattr(urllib_parse, 'uses_fragment', None): - urllib_parse.uses_fragment.extend(['lp']) - - @staticmethod - def get_base_rev_args(rev): - return ['-r', rev] - - def export(self, location, url): - """ - Export the Bazaar repository at the url to the destination location - """ - # Remove the location to make sure Bazaar can export it correctly - if os.path.exists(location): - rmtree(location) - - url, rev_options = self.get_url_rev_options(url) - self.run_command( - ['export', location, url] + rev_options.to_args(), - show_stdout=False, - ) - - def fetch_new(self, dest, url, rev_options): - rev_display = rev_options.to_display() - logger.info( - 'Checking out %s%s to %s', - url, - rev_display, - display_path(dest), - ) - cmd_args = ['branch', '-q'] + rev_options.to_args() + [url, dest] - self.run_command(cmd_args) - - def switch(self, dest, url, rev_options): - self.run_command(['switch', url], cwd=dest) - - def update(self, dest, url, rev_options): - cmd_args = ['pull', '-q'] + rev_options.to_args() - self.run_command(cmd_args, cwd=dest) - - @classmethod - def get_url_rev_and_auth(cls, url): - # hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it - url, rev, user_pass = super(Bazaar, cls).get_url_rev_and_auth(url) - if url.startswith('ssh://'): - url = 'bzr+' + url - return url, rev, user_pass - - @classmethod - def get_remote_url(cls, location): - urls = cls.run_command(['info'], show_stdout=False, cwd=location) - for line in urls.splitlines(): - line = line.strip() - for x in ('checkout of branch: ', - 'parent branch: '): - if line.startswith(x): - repo = line.split(x)[1] - if cls._is_local_repository(repo): - return path_to_url(repo) - return repo - return None - - @classmethod - def get_revision(cls, location): - revision = cls.run_command( - ['revno'], show_stdout=False, cwd=location, - ) - return revision.splitlines()[-1] - - @classmethod - def is_commit_id_equal(cls, dest, name): - """Always assume the versions don't match""" - return False - - -vcs.register(Bazaar) diff --git a/env/lib/python2.7/site-packages/pip/_internal/vcs/bazaar.pyc b/env/lib/python2.7/site-packages/pip/_internal/vcs/bazaar.pyc deleted file mode 100644 index 3ecdcf46..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/vcs/bazaar.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/vcs/git.py b/env/lib/python2.7/site-packages/pip/_internal/vcs/git.py deleted file mode 100644 index 3445c1b3..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/vcs/git.py +++ /dev/null @@ -1,358 +0,0 @@ -from __future__ import absolute_import - -import logging -import os.path -import re - -from pip._vendor.packaging.version import parse as parse_version -from pip._vendor.six.moves.urllib import parse as urllib_parse -from pip._vendor.six.moves.urllib import request as urllib_request - -from pip._internal.exceptions import BadCommand -from pip._internal.utils.compat import samefile -from pip._internal.utils.misc import display_path, redact_password_from_url -from pip._internal.utils.temp_dir import TempDirectory -from pip._internal.vcs.versioncontrol import ( - RemoteNotFoundError, VersionControl, vcs, -) - -urlsplit = urllib_parse.urlsplit -urlunsplit = urllib_parse.urlunsplit - - -logger = logging.getLogger(__name__) - - -HASH_REGEX = re.compile('^[a-fA-F0-9]{40}$') - - -def looks_like_hash(sha): - return bool(HASH_REGEX.match(sha)) - - -class Git(VersionControl): - name = 'git' - dirname = '.git' - repo_name = 'clone' - schemes = ( - 'git', 'git+http', 'git+https', 'git+ssh', 'git+git', 'git+file', - ) - # Prevent the user's environment variables from interfering with pip: - # https://github.com/pypa/pip/issues/1130 - unset_environ = ('GIT_DIR', 'GIT_WORK_TREE') - default_arg_rev = 'HEAD' - - @staticmethod - def get_base_rev_args(rev): - return [rev] - - def get_git_version(self): - VERSION_PFX = 'git version ' - version = self.run_command(['version'], show_stdout=False) - if version.startswith(VERSION_PFX): - version = version[len(VERSION_PFX):].split()[0] - else: - version = '' - # get first 3 positions of the git version because - # on windows it is x.y.z.windows.t, and this parses as - # LegacyVersion which always smaller than a Version. - version = '.'.join(version.split('.')[:3]) - return parse_version(version) - - @classmethod - def get_current_branch(cls, location): - """ - Return the current branch, or None if HEAD isn't at a branch - (e.g. detached HEAD). - """ - # git-symbolic-ref exits with empty stdout if "HEAD" is a detached - # HEAD rather than a symbolic ref. In addition, the -q causes the - # command to exit with status code 1 instead of 128 in this case - # and to suppress the message to stderr. - args = ['symbolic-ref', '-q', 'HEAD'] - output = cls.run_command( - args, extra_ok_returncodes=(1, ), show_stdout=False, cwd=location, - ) - ref = output.strip() - - if ref.startswith('refs/heads/'): - return ref[len('refs/heads/'):] - - return None - - def export(self, location, url): - """Export the Git repository at the url to the destination location""" - if not location.endswith('/'): - location = location + '/' - - with TempDirectory(kind="export") as temp_dir: - self.unpack(temp_dir.path, url=url) - self.run_command( - ['checkout-index', '-a', '-f', '--prefix', location], - show_stdout=False, cwd=temp_dir.path - ) - - @classmethod - def get_revision_sha(cls, dest, rev): - """ - Return (sha_or_none, is_branch), where sha_or_none is a commit hash - if the revision names a remote branch or tag, otherwise None. - - Args: - dest: the repository directory. - rev: the revision name. - """ - # Pass rev to pre-filter the list. - output = cls.run_command(['show-ref', rev], cwd=dest, - show_stdout=False, on_returncode='ignore') - refs = {} - for line in output.strip().splitlines(): - try: - sha, ref = line.split() - except ValueError: - # Include the offending line to simplify troubleshooting if - # this error ever occurs. - raise ValueError('unexpected show-ref line: {!r}'.format(line)) - - refs[ref] = sha - - branch_ref = 'refs/remotes/origin/{}'.format(rev) - tag_ref = 'refs/tags/{}'.format(rev) - - sha = refs.get(branch_ref) - if sha is not None: - return (sha, True) - - sha = refs.get(tag_ref) - - return (sha, False) - - @classmethod - def resolve_revision(cls, dest, url, rev_options): - """ - Resolve a revision to a new RevOptions object with the SHA1 of the - branch, tag, or ref if found. - - Args: - rev_options: a RevOptions object. - """ - rev = rev_options.arg_rev - sha, is_branch = cls.get_revision_sha(dest, rev) - - if sha is not None: - rev_options = rev_options.make_new(sha) - rev_options.branch_name = rev if is_branch else None - - return rev_options - - # Do not show a warning for the common case of something that has - # the form of a Git commit hash. - if not looks_like_hash(rev): - logger.warning( - "Did not find branch or tag '%s', assuming revision or ref.", - rev, - ) - - if not rev.startswith('refs/'): - return rev_options - - # If it looks like a ref, we have to fetch it explicitly. - cls.run_command( - ['fetch', '-q', url] + rev_options.to_args(), - cwd=dest, - ) - # Change the revision to the SHA of the ref we fetched - sha = cls.get_revision(dest, rev='FETCH_HEAD') - rev_options = rev_options.make_new(sha) - - return rev_options - - @classmethod - def is_commit_id_equal(cls, dest, name): - """ - Return whether the current commit hash equals the given name. - - Args: - dest: the repository directory. - name: a string name. - """ - if not name: - # Then avoid an unnecessary subprocess call. - return False - - return cls.get_revision(dest) == name - - def fetch_new(self, dest, url, rev_options): - rev_display = rev_options.to_display() - logger.info( - 'Cloning %s%s to %s', redact_password_from_url(url), - rev_display, display_path(dest), - ) - self.run_command(['clone', '-q', url, dest]) - - if rev_options.rev: - # Then a specific revision was requested. - rev_options = self.resolve_revision(dest, url, rev_options) - branch_name = getattr(rev_options, 'branch_name', None) - if branch_name is None: - # Only do a checkout if the current commit id doesn't match - # the requested revision. - if not self.is_commit_id_equal(dest, rev_options.rev): - cmd_args = ['checkout', '-q'] + rev_options.to_args() - self.run_command(cmd_args, cwd=dest) - elif self.get_current_branch(dest) != branch_name: - # Then a specific branch was requested, and that branch - # is not yet checked out. - track_branch = 'origin/{}'.format(branch_name) - cmd_args = [ - 'checkout', '-b', branch_name, '--track', track_branch, - ] - self.run_command(cmd_args, cwd=dest) - - #: repo may contain submodules - self.update_submodules(dest) - - def switch(self, dest, url, rev_options): - self.run_command(['config', 'remote.origin.url', url], cwd=dest) - cmd_args = ['checkout', '-q'] + rev_options.to_args() - self.run_command(cmd_args, cwd=dest) - - self.update_submodules(dest) - - def update(self, dest, url, rev_options): - # First fetch changes from the default remote - if self.get_git_version() >= parse_version('1.9.0'): - # fetch tags in addition to everything else - self.run_command(['fetch', '-q', '--tags'], cwd=dest) - else: - self.run_command(['fetch', '-q'], cwd=dest) - # Then reset to wanted revision (maybe even origin/master) - rev_options = self.resolve_revision(dest, url, rev_options) - cmd_args = ['reset', '--hard', '-q'] + rev_options.to_args() - self.run_command(cmd_args, cwd=dest) - #: update submodules - self.update_submodules(dest) - - @classmethod - def get_remote_url(cls, location): - """ - Return URL of the first remote encountered. - - Raises RemoteNotFoundError if the repository does not have a remote - url configured. - """ - # We need to pass 1 for extra_ok_returncodes since the command - # exits with return code 1 if there are no matching lines. - stdout = cls.run_command( - ['config', '--get-regexp', r'remote\..*\.url'], - extra_ok_returncodes=(1, ), show_stdout=False, cwd=location, - ) - remotes = stdout.splitlines() - try: - found_remote = remotes[0] - except IndexError: - raise RemoteNotFoundError - - for remote in remotes: - if remote.startswith('remote.origin.url '): - found_remote = remote - break - url = found_remote.split(' ')[1] - return url.strip() - - @classmethod - def get_revision(cls, location, rev=None): - if rev is None: - rev = 'HEAD' - current_rev = cls.run_command( - ['rev-parse', rev], show_stdout=False, cwd=location, - ) - return current_rev.strip() - - @classmethod - def get_subdirectory(cls, location): - # find the repo root - git_dir = cls.run_command(['rev-parse', '--git-dir'], - show_stdout=False, cwd=location).strip() - if not os.path.isabs(git_dir): - git_dir = os.path.join(location, git_dir) - root_dir = os.path.join(git_dir, '..') - # find setup.py - orig_location = location - while not os.path.exists(os.path.join(location, 'setup.py')): - last_location = location - location = os.path.dirname(location) - if location == last_location: - # We've traversed up to the root of the filesystem without - # finding setup.py - logger.warning( - "Could not find setup.py for directory %s (tried all " - "parent directories)", - orig_location, - ) - return None - # relative path of setup.py to repo root - if samefile(root_dir, location): - return None - return os.path.relpath(location, root_dir) - - @classmethod - def get_url_rev_and_auth(cls, url): - """ - Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'. - That's required because although they use SSH they sometimes don't - work with a ssh:// scheme (e.g. GitHub). But we need a scheme for - parsing. Hence we remove it again afterwards and return it as a stub. - """ - # Works around an apparent Git bug - # (see https://article.gmane.org/gmane.comp.version-control.git/146500) - scheme, netloc, path, query, fragment = urlsplit(url) - if scheme.endswith('file'): - initial_slashes = path[:-len(path.lstrip('/'))] - newpath = ( - initial_slashes + - urllib_request.url2pathname(path) - .replace('\\', '/').lstrip('/') - ) - url = urlunsplit((scheme, netloc, newpath, query, fragment)) - after_plus = scheme.find('+') + 1 - url = scheme[:after_plus] + urlunsplit( - (scheme[after_plus:], netloc, newpath, query, fragment), - ) - - if '://' not in url: - assert 'file:' not in url - url = url.replace('git+', 'git+ssh://') - url, rev, user_pass = super(Git, cls).get_url_rev_and_auth(url) - url = url.replace('ssh://', '') - else: - url, rev, user_pass = super(Git, cls).get_url_rev_and_auth(url) - - return url, rev, user_pass - - @classmethod - def update_submodules(cls, location): - if not os.path.exists(os.path.join(location, '.gitmodules')): - return - cls.run_command( - ['submodule', 'update', '--init', '--recursive', '-q'], - cwd=location, - ) - - @classmethod - def controls_location(cls, location): - if super(Git, cls).controls_location(location): - return True - try: - r = cls.run_command(['rev-parse'], - cwd=location, - show_stdout=False, - on_returncode='ignore') - return not r - except BadCommand: - logger.debug("could not determine if %s is under git control " - "because git is not available", location) - return False - - -vcs.register(Git) diff --git a/env/lib/python2.7/site-packages/pip/_internal/vcs/git.pyc b/env/lib/python2.7/site-packages/pip/_internal/vcs/git.pyc deleted file mode 100644 index 111ae1c3..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/vcs/git.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/vcs/mercurial.py b/env/lib/python2.7/site-packages/pip/_internal/vcs/mercurial.py deleted file mode 100644 index db42783d..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/vcs/mercurial.py +++ /dev/null @@ -1,103 +0,0 @@ -from __future__ import absolute_import - -import logging -import os - -from pip._vendor.six.moves import configparser - -from pip._internal.utils.misc import display_path, path_to_url -from pip._internal.utils.temp_dir import TempDirectory -from pip._internal.vcs.versioncontrol import VersionControl, vcs - -logger = logging.getLogger(__name__) - - -class Mercurial(VersionControl): - name = 'hg' - dirname = '.hg' - repo_name = 'clone' - schemes = ('hg', 'hg+http', 'hg+https', 'hg+ssh', 'hg+static-http') - - @staticmethod - def get_base_rev_args(rev): - return [rev] - - def export(self, location, url): - """Export the Hg repository at the url to the destination location""" - with TempDirectory(kind="export") as temp_dir: - self.unpack(temp_dir.path, url=url) - - self.run_command( - ['archive', location], show_stdout=False, cwd=temp_dir.path - ) - - def fetch_new(self, dest, url, rev_options): - rev_display = rev_options.to_display() - logger.info( - 'Cloning hg %s%s to %s', - url, - rev_display, - display_path(dest), - ) - self.run_command(['clone', '--noupdate', '-q', url, dest]) - cmd_args = ['update', '-q'] + rev_options.to_args() - self.run_command(cmd_args, cwd=dest) - - def switch(self, dest, url, rev_options): - repo_config = os.path.join(dest, self.dirname, 'hgrc') - config = configparser.RawConfigParser() - try: - config.read(repo_config) - config.set('paths', 'default', url) - with open(repo_config, 'w') as config_file: - config.write(config_file) - except (OSError, configparser.NoSectionError) as exc: - logger.warning( - 'Could not switch Mercurial repository to %s: %s', url, exc, - ) - else: - cmd_args = ['update', '-q'] + rev_options.to_args() - self.run_command(cmd_args, cwd=dest) - - def update(self, dest, url, rev_options): - self.run_command(['pull', '-q'], cwd=dest) - cmd_args = ['update', '-q'] + rev_options.to_args() - self.run_command(cmd_args, cwd=dest) - - @classmethod - def get_remote_url(cls, location): - url = cls.run_command( - ['showconfig', 'paths.default'], - show_stdout=False, cwd=location).strip() - if cls._is_local_repository(url): - url = path_to_url(url) - return url.strip() - - @classmethod - def get_revision(cls, location): - """ - Return the repository-local changeset revision number, as an integer. - """ - current_revision = cls.run_command( - ['parents', '--template={rev}'], - show_stdout=False, cwd=location).strip() - return current_revision - - @classmethod - def get_requirement_revision(cls, location): - """ - Return the changeset identification hash, as a 40-character - hexadecimal string - """ - current_rev_hash = cls.run_command( - ['parents', '--template={node}'], - show_stdout=False, cwd=location).strip() - return current_rev_hash - - @classmethod - def is_commit_id_equal(cls, dest, name): - """Always assume the versions don't match""" - return False - - -vcs.register(Mercurial) diff --git a/env/lib/python2.7/site-packages/pip/_internal/vcs/mercurial.pyc b/env/lib/python2.7/site-packages/pip/_internal/vcs/mercurial.pyc deleted file mode 100644 index 54a532ee..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/vcs/mercurial.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/vcs/subversion.pyc b/env/lib/python2.7/site-packages/pip/_internal/vcs/subversion.pyc deleted file mode 100644 index 23832a74..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/vcs/subversion.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/vcs/versioncontrol.py b/env/lib/python2.7/site-packages/pip/_internal/vcs/versioncontrol.py deleted file mode 100644 index 2d05fc13..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/vcs/versioncontrol.py +++ /dev/null @@ -1,600 +0,0 @@ -"""Handles all VCS (version control) support""" -from __future__ import absolute_import - -import errno -import logging -import os -import shutil -import sys - -from pip._vendor import pkg_resources -from pip._vendor.six.moves.urllib import parse as urllib_parse - -from pip._internal.exceptions import BadCommand -from pip._internal.utils.misc import ( - ask_path_exists, backup_dir, call_subprocess, display_path, rmtree, -) -from pip._internal.utils.typing import MYPY_CHECK_RUNNING - -if MYPY_CHECK_RUNNING: - from typing import ( - Any, Dict, Iterable, List, Mapping, Optional, Text, Tuple, Type - ) - from pip._internal.utils.ui import SpinnerInterface - - AuthInfo = Tuple[Optional[str], Optional[str]] - -__all__ = ['vcs'] - - -logger = logging.getLogger(__name__) - - -def make_vcs_requirement_url(repo_url, rev, project_name, subdir=None): - """ - Return the URL for a VCS requirement. - - Args: - repo_url: the remote VCS url, with any needed VCS prefix (e.g. "git+"). - project_name: the (unescaped) project name. - """ - egg_project_name = pkg_resources.to_filename(project_name) - req = '{}@{}#egg={}'.format(repo_url, rev, egg_project_name) - if subdir: - req += '&subdirectory={}'.format(subdir) - - return req - - -class RemoteNotFoundError(Exception): - pass - - -class RevOptions(object): - - """ - Encapsulates a VCS-specific revision to install, along with any VCS - install options. - - Instances of this class should be treated as if immutable. - """ - - def __init__( - self, - vc_class, # type: Type[VersionControl] - rev=None, # type: Optional[str] - extra_args=None, # type: Optional[List[str]] - ): - # type: (...) -> None - """ - Args: - vc_class: a VersionControl subclass. - rev: the name of the revision to install. - extra_args: a list of extra options. - """ - if extra_args is None: - extra_args = [] - - self.extra_args = extra_args - self.rev = rev - self.vc_class = vc_class - - def __repr__(self): - return '<RevOptions {}: rev={!r}>'.format(self.vc_class.name, self.rev) - - @property - def arg_rev(self): - # type: () -> Optional[str] - if self.rev is None: - return self.vc_class.default_arg_rev - - return self.rev - - def to_args(self): - # type: () -> List[str] - """ - Return the VCS-specific command arguments. - """ - args = [] # type: List[str] - rev = self.arg_rev - if rev is not None: - args += self.vc_class.get_base_rev_args(rev) - args += self.extra_args - - return args - - def to_display(self): - # type: () -> str - if not self.rev: - return '' - - return ' (to revision {})'.format(self.rev) - - def make_new(self, rev): - # type: (str) -> RevOptions - """ - Make a copy of the current instance, but with a new rev. - - Args: - rev: the name of the revision for the new object. - """ - return self.vc_class.make_rev_options(rev, extra_args=self.extra_args) - - -class VcsSupport(object): - _registry = {} # type: Dict[str, VersionControl] - schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn'] - - def __init__(self): - # type: () -> None - # Register more schemes with urlparse for various version control - # systems - urllib_parse.uses_netloc.extend(self.schemes) - # Python >= 2.7.4, 3.3 doesn't have uses_fragment - if getattr(urllib_parse, 'uses_fragment', None): - urllib_parse.uses_fragment.extend(self.schemes) - super(VcsSupport, self).__init__() - - def __iter__(self): - return self._registry.__iter__() - - @property - def backends(self): - # type: () -> List[VersionControl] - return list(self._registry.values()) - - @property - def dirnames(self): - # type: () -> List[str] - return [backend.dirname for backend in self.backends] - - @property - def all_schemes(self): - # type: () -> List[str] - schemes = [] # type: List[str] - for backend in self.backends: - schemes.extend(backend.schemes) - return schemes - - def register(self, cls): - # type: (Type[VersionControl]) -> None - if not hasattr(cls, 'name'): - logger.warning('Cannot register VCS %s', cls.__name__) - return - if cls.name not in self._registry: - self._registry[cls.name] = cls() - logger.debug('Registered VCS backend: %s', cls.name) - - def unregister(self, name): - # type: (str) -> None - if name in self._registry: - del self._registry[name] - - def get_backend_for_dir(self, location): - # type: (str) -> Optional[VersionControl] - """ - Return a VersionControl object if a repository of that type is found - at the given directory. - """ - for vcs_backend in self._registry.values(): - if vcs_backend.controls_location(location): - logger.debug('Determine that %s uses VCS: %s', - location, vcs_backend.name) - return vcs_backend - return None - - def get_backend(self, name): - # type: (str) -> Optional[VersionControl] - """ - Return a VersionControl object or None. - """ - name = name.lower() - return self._registry.get(name) - - -vcs = VcsSupport() - - -class VersionControl(object): - name = '' - dirname = '' - repo_name = '' - # List of supported schemes for this Version Control - schemes = () # type: Tuple[str, ...] - # Iterable of environment variable names to pass to call_subprocess(). - unset_environ = () # type: Tuple[str, ...] - default_arg_rev = None # type: Optional[str] - - @classmethod - def should_add_vcs_url_prefix(cls, remote_url): - """ - Return whether the vcs prefix (e.g. "git+") should be added to a - repository's remote url when used in a requirement. - """ - return not remote_url.lower().startswith('{}:'.format(cls.name)) - - @classmethod - def get_subdirectory(cls, repo_dir): - """ - Return the path to setup.py, relative to the repo root. - """ - return None - - @classmethod - def get_requirement_revision(cls, repo_dir): - """ - Return the revision string that should be used in a requirement. - """ - return cls.get_revision(repo_dir) - - @classmethod - def get_src_requirement(cls, repo_dir, project_name): - """ - Return the requirement string to use to redownload the files - currently at the given repository directory. - - Args: - project_name: the (unescaped) project name. - - The return value has a form similar to the following: - - {repository_url}@{revision}#egg={project_name} - """ - repo_url = cls.get_remote_url(repo_dir) - if repo_url is None: - return None - - if cls.should_add_vcs_url_prefix(repo_url): - repo_url = '{}+{}'.format(cls.name, repo_url) - - revision = cls.get_requirement_revision(repo_dir) - subdir = cls.get_subdirectory(repo_dir) - req = make_vcs_requirement_url(repo_url, revision, project_name, - subdir=subdir) - - return req - - @staticmethod - def get_base_rev_args(rev): - """ - Return the base revision arguments for a vcs command. - - Args: - rev: the name of a revision to install. Cannot be None. - """ - raise NotImplementedError - - @classmethod - def make_rev_options(cls, rev=None, extra_args=None): - # type: (Optional[str], Optional[List[str]]) -> RevOptions - """ - Return a RevOptions object. - - Args: - rev: the name of a revision to install. - extra_args: a list of extra options. - """ - return RevOptions(cls, rev, extra_args=extra_args) - - @classmethod - def _is_local_repository(cls, repo): - # type: (str) -> bool - """ - posix absolute paths start with os.path.sep, - win32 ones start with drive (like c:\\folder) - """ - drive, tail = os.path.splitdrive(repo) - return repo.startswith(os.path.sep) or bool(drive) - - def export(self, location, url): - """ - Export the repository at the url to the destination location - i.e. only download the files, without vcs informations - - :param url: the repository URL starting with a vcs prefix. - """ - raise NotImplementedError - - @classmethod - def get_netloc_and_auth(cls, netloc, scheme): - """ - Parse the repository URL's netloc, and return the new netloc to use - along with auth information. - - Args: - netloc: the original repository URL netloc. - scheme: the repository URL's scheme without the vcs prefix. - - This is mainly for the Subversion class to override, so that auth - information can be provided via the --username and --password options - instead of through the URL. For other subclasses like Git without - such an option, auth information must stay in the URL. - - Returns: (netloc, (username, password)). - """ - return netloc, (None, None) - - @classmethod - def get_url_rev_and_auth(cls, url): - # type: (str) -> Tuple[str, Optional[str], AuthInfo] - """ - Parse the repository URL to use, and return the URL, revision, - and auth info to use. - - Returns: (url, rev, (username, password)). - """ - scheme, netloc, path, query, frag = urllib_parse.urlsplit(url) - if '+' not in scheme: - raise ValueError( - "Sorry, {!r} is a malformed VCS url. " - "The format is <vcs>+<protocol>://<url>, " - "e.g. svn+http://myrepo/svn/MyApp#egg=MyApp".format(url) - ) - # Remove the vcs prefix. - scheme = scheme.split('+', 1)[1] - netloc, user_pass = cls.get_netloc_and_auth(netloc, scheme) - rev = None - if '@' in path: - path, rev = path.rsplit('@', 1) - url = urllib_parse.urlunsplit((scheme, netloc, path, query, '')) - return url, rev, user_pass - - @staticmethod - def make_rev_args(username, password): - """ - Return the RevOptions "extra arguments" to use in obtain(). - """ - return [] - - def get_url_rev_options(self, url): - # type: (str) -> Tuple[str, RevOptions] - """ - Return the URL and RevOptions object to use in obtain() and in - some cases export(), as a tuple (url, rev_options). - """ - url, rev, user_pass = self.get_url_rev_and_auth(url) - username, password = user_pass - extra_args = self.make_rev_args(username, password) - rev_options = self.make_rev_options(rev, extra_args=extra_args) - - return url, rev_options - - @staticmethod - def normalize_url(url): - # type: (str) -> str - """ - Normalize a URL for comparison by unquoting it and removing any - trailing slash. - """ - return urllib_parse.unquote(url).rstrip('/') - - @classmethod - def compare_urls(cls, url1, url2): - # type: (str, str) -> bool - """ - Compare two repo URLs for identity, ignoring incidental differences. - """ - return (cls.normalize_url(url1) == cls.normalize_url(url2)) - - def fetch_new(self, dest, url, rev_options): - """ - Fetch a revision from a repository, in the case that this is the - first fetch from the repository. - - Args: - dest: the directory to fetch the repository to. - rev_options: a RevOptions object. - """ - raise NotImplementedError - - def switch(self, dest, url, rev_options): - """ - Switch the repo at ``dest`` to point to ``URL``. - - Args: - rev_options: a RevOptions object. - """ - raise NotImplementedError - - def update(self, dest, url, rev_options): - """ - Update an already-existing repo to the given ``rev_options``. - - Args: - rev_options: a RevOptions object. - """ - raise NotImplementedError - - @classmethod - def is_commit_id_equal(cls, dest, name): - """ - Return whether the id of the current commit equals the given name. - - Args: - dest: the repository directory. - name: a string name. - """ - raise NotImplementedError - - def obtain(self, dest, url): - # type: (str, str) -> None - """ - Install or update in editable mode the package represented by this - VersionControl object. - - :param dest: the repository directory in which to install or update. - :param url: the repository URL starting with a vcs prefix. - """ - url, rev_options = self.get_url_rev_options(url) - - if not os.path.exists(dest): - self.fetch_new(dest, url, rev_options) - return - - rev_display = rev_options.to_display() - if self.is_repository_directory(dest): - existing_url = self.get_remote_url(dest) - if self.compare_urls(existing_url, url): - logger.debug( - '%s in %s exists, and has correct URL (%s)', - self.repo_name.title(), - display_path(dest), - url, - ) - if not self.is_commit_id_equal(dest, rev_options.rev): - logger.info( - 'Updating %s %s%s', - display_path(dest), - self.repo_name, - rev_display, - ) - self.update(dest, url, rev_options) - else: - logger.info('Skipping because already up-to-date.') - return - - logger.warning( - '%s %s in %s exists with URL %s', - self.name, - self.repo_name, - display_path(dest), - existing_url, - ) - prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ', - ('s', 'i', 'w', 'b')) - else: - logger.warning( - 'Directory %s already exists, and is not a %s %s.', - dest, - self.name, - self.repo_name, - ) - # https://github.com/python/mypy/issues/1174 - prompt = ('(i)gnore, (w)ipe, (b)ackup ', # type: ignore - ('i', 'w', 'b')) - - logger.warning( - 'The plan is to install the %s repository %s', - self.name, - url, - ) - response = ask_path_exists('What to do? %s' % prompt[0], prompt[1]) - - if response == 'a': - sys.exit(-1) - - if response == 'w': - logger.warning('Deleting %s', display_path(dest)) - rmtree(dest) - self.fetch_new(dest, url, rev_options) - return - - if response == 'b': - dest_dir = backup_dir(dest) - logger.warning( - 'Backing up %s to %s', display_path(dest), dest_dir, - ) - shutil.move(dest, dest_dir) - self.fetch_new(dest, url, rev_options) - return - - # Do nothing if the response is "i". - if response == 's': - logger.info( - 'Switching %s %s to %s%s', - self.repo_name, - display_path(dest), - url, - rev_display, - ) - self.switch(dest, url, rev_options) - - def unpack(self, location, url): - # type: (str, str) -> None - """ - Clean up current location and download the url repository - (and vcs infos) into location - - :param url: the repository URL starting with a vcs prefix. - """ - if os.path.exists(location): - rmtree(location) - self.obtain(location, url=url) - - @classmethod - def get_remote_url(cls, location): - """ - Return the url used at location - - Raises RemoteNotFoundError if the repository does not have a remote - url configured. - """ - raise NotImplementedError - - @classmethod - def get_revision(cls, location): - """ - Return the current commit id of the files at the given location. - """ - raise NotImplementedError - - @classmethod - def run_command( - cls, - cmd, # type: List[str] - show_stdout=True, # type: bool - cwd=None, # type: Optional[str] - on_returncode='raise', # type: str - extra_ok_returncodes=None, # type: Optional[Iterable[int]] - command_desc=None, # type: Optional[str] - extra_environ=None, # type: Optional[Mapping[str, Any]] - spinner=None # type: Optional[SpinnerInterface] - ): - # type: (...) -> Text - """ - Run a VCS subcommand - This is simply a wrapper around call_subprocess that adds the VCS - command name, and checks that the VCS is available - """ - cmd = [cls.name] + cmd - try: - return call_subprocess(cmd, show_stdout, cwd, - on_returncode=on_returncode, - extra_ok_returncodes=extra_ok_returncodes, - command_desc=command_desc, - extra_environ=extra_environ, - unset_environ=cls.unset_environ, - spinner=spinner) - except OSError as e: - # errno.ENOENT = no such file or directory - # In other words, the VCS executable isn't available - if e.errno == errno.ENOENT: - raise BadCommand( - 'Cannot find command %r - do you have ' - '%r installed and in your ' - 'PATH?' % (cls.name, cls.name)) - else: - raise # re-raise exception if a different error occurred - - @classmethod - def is_repository_directory(cls, path): - # type: (str) -> bool - """ - Return whether a directory path is a repository directory. - """ - logger.debug('Checking in %s for %s (%s)...', - path, cls.dirname, cls.name) - return os.path.exists(os.path.join(path, cls.dirname)) - - @classmethod - def controls_location(cls, location): - # type: (str) -> bool - """ - Check if a location is controlled by the vcs. - It is meant to be overridden to implement smarter detection - mechanisms for specific vcs. - - This can do more than is_repository_directory() alone. For example, - the Git override checks that Git is actually available. - """ - return cls.is_repository_directory(location) diff --git a/env/lib/python2.7/site-packages/pip/_internal/vcs/versioncontrol.pyc b/env/lib/python2.7/site-packages/pip/_internal/vcs/versioncontrol.pyc deleted file mode 100644 index 71d2fd52..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/vcs/versioncontrol.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_internal/wheel.py b/env/lib/python2.7/site-packages/pip/_internal/wheel.py deleted file mode 100644 index 6f034cd0..00000000 --- a/env/lib/python2.7/site-packages/pip/_internal/wheel.py +++ /dev/null @@ -1,1125 +0,0 @@ -""" -Support for installing and building the "wheel" binary package format. -""" -from __future__ import absolute_import - -import collections -import compileall -import csv -import hashlib -import logging -import os.path -import re -import shutil -import stat -import sys -import warnings -from base64 import urlsafe_b64encode -from email.parser import Parser - -from pip._vendor import pkg_resources -from pip._vendor.distlib.scripts import ScriptMaker -from pip._vendor.packaging.utils import canonicalize_name -from pip._vendor.six import StringIO - -from pip._internal import pep425tags -from pip._internal.download import unpack_url -from pip._internal.exceptions import ( - InstallationError, InvalidWheelFilename, UnsupportedWheel, -) -from pip._internal.locations import distutils_scheme -from pip._internal.models.link import Link -from pip._internal.utils.logging import indent_log -from pip._internal.utils.marker_files import PIP_DELETE_MARKER_FILENAME -from pip._internal.utils.misc import ( - LOG_DIVIDER, call_subprocess, captured_stdout, ensure_dir, - format_command_args, path_to_url, read_chunks, -) -from pip._internal.utils.setuptools_build import make_setuptools_shim_args -from pip._internal.utils.temp_dir import TempDirectory -from pip._internal.utils.typing import MYPY_CHECK_RUNNING -from pip._internal.utils.ui import open_spinner - -if MYPY_CHECK_RUNNING: - from typing import ( - Dict, List, Optional, Sequence, Mapping, Tuple, IO, Text, Any, Iterable - ) - from pip._vendor.packaging.requirements import Requirement - from pip._internal.req.req_install import InstallRequirement - from pip._internal.download import PipSession - from pip._internal.index import FormatControl, PackageFinder - from pip._internal.operations.prepare import ( - RequirementPreparer - ) - from pip._internal.cache import WheelCache - from pip._internal.pep425tags import Pep425Tag - - InstalledCSVRow = Tuple[str, ...] - - -VERSION_COMPATIBLE = (1, 0) - - -logger = logging.getLogger(__name__) - - -def normpath(src, p): - return os.path.relpath(src, p).replace(os.path.sep, '/') - - -def hash_file(path, blocksize=1 << 20): - # type: (str, int) -> Tuple[Any, int] - """Return (hash, length) for path using hashlib.sha256()""" - h = hashlib.sha256() - length = 0 - with open(path, 'rb') as f: - for block in read_chunks(f, size=blocksize): - length += len(block) - h.update(block) - return (h, length) # type: ignore - - -def rehash(path, blocksize=1 << 20): - # type: (str, int) -> Tuple[str, str] - """Return (encoded_digest, length) for path using hashlib.sha256()""" - h, length = hash_file(path, blocksize) - digest = 'sha256=' + urlsafe_b64encode( - h.digest() - ).decode('latin1').rstrip('=') - # unicode/str python2 issues - return (digest, str(length)) # type: ignore - - -def open_for_csv(name, mode): - # type: (str, Text) -> IO - if sys.version_info[0] < 3: - nl = {} # type: Dict[str, Any] - bin = 'b' - else: - nl = {'newline': ''} # type: Dict[str, Any] - bin = '' - return open(name, mode + bin, **nl) - - -def replace_python_tag(wheelname, new_tag): - # type: (str, str) -> str - """Replace the Python tag in a wheel file name with a new value. - """ - parts = wheelname.split('-') - parts[-3] = new_tag - return '-'.join(parts) - - -def fix_script(path): - # type: (str) -> Optional[bool] - """Replace #!python with #!/path/to/python - Return True if file was changed.""" - # XXX RECORD hashes will need to be updated - if os.path.isfile(path): - with open(path, 'rb') as script: - firstline = script.readline() - if not firstline.startswith(b'#!python'): - return False - exename = sys.executable.encode(sys.getfilesystemencoding()) - firstline = b'#!' + exename + os.linesep.encode("ascii") - rest = script.read() - with open(path, 'wb') as script: - script.write(firstline) - script.write(rest) - return True - return None - - -dist_info_re = re.compile(r"""^(?P<namever>(?P<name>.+?)(-(?P<ver>.+?))?) - \.dist-info$""", re.VERBOSE) - - -def root_is_purelib(name, wheeldir): - # type: (str, str) -> bool - """ - Return True if the extracted wheel in wheeldir should go into purelib. - """ - name_folded = name.replace("-", "_") - for item in os.listdir(wheeldir): - match = dist_info_re.match(item) - if match and match.group('name') == name_folded: - with open(os.path.join(wheeldir, item, 'WHEEL')) as wheel: - for line in wheel: - line = line.lower().rstrip() - if line == "root-is-purelib: true": - return True - return False - - -def get_entrypoints(filename): - # type: (str) -> Tuple[Dict[str, str], Dict[str, str]] - if not os.path.exists(filename): - return {}, {} - - # This is done because you can pass a string to entry_points wrappers which - # means that they may or may not be valid INI files. The attempt here is to - # strip leading and trailing whitespace in order to make them valid INI - # files. - with open(filename) as fp: - data = StringIO() - for line in fp: - data.write(line.strip()) - data.write("\n") - data.seek(0) - - # get the entry points and then the script names - entry_points = pkg_resources.EntryPoint.parse_map(data) - console = entry_points.get('console_scripts', {}) - gui = entry_points.get('gui_scripts', {}) - - def _split_ep(s): - """get the string representation of EntryPoint, remove space and split - on '='""" - return str(s).replace(" ", "").split("=") - - # convert the EntryPoint objects into strings with module:function - console = dict(_split_ep(v) for v in console.values()) - gui = dict(_split_ep(v) for v in gui.values()) - return console, gui - - -def message_about_scripts_not_on_PATH(scripts): - # type: (Sequence[str]) -> Optional[str] - """Determine if any scripts are not on PATH and format a warning. - - Returns a warning message if one or more scripts are not on PATH, - otherwise None. - """ - if not scripts: - return None - - # Group scripts by the path they were installed in - grouped_by_dir = collections.defaultdict(set) # type: Dict[str, set] - for destfile in scripts: - parent_dir = os.path.dirname(destfile) - script_name = os.path.basename(destfile) - grouped_by_dir[parent_dir].add(script_name) - - # We don't want to warn for directories that are on PATH. - not_warn_dirs = [ - os.path.normcase(i).rstrip(os.sep) for i in - os.environ.get("PATH", "").split(os.pathsep) - ] - # If an executable sits with sys.executable, we don't warn for it. - # This covers the case of venv invocations without activating the venv. - not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable))) - warn_for = { - parent_dir: scripts for parent_dir, scripts in grouped_by_dir.items() - if os.path.normcase(parent_dir) not in not_warn_dirs - } - if not warn_for: - return None - - # Format a message - msg_lines = [] - for parent_dir, scripts in warn_for.items(): - sorted_scripts = sorted(scripts) # type: List[str] - if len(sorted_scripts) == 1: - start_text = "script {} is".format(sorted_scripts[0]) - else: - start_text = "scripts {} are".format( - ", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1] - ) - - msg_lines.append( - "The {} installed in '{}' which is not on PATH." - .format(start_text, parent_dir) - ) - - last_line_fmt = ( - "Consider adding {} to PATH or, if you prefer " - "to suppress this warning, use --no-warn-script-location." - ) - if len(msg_lines) == 1: - msg_lines.append(last_line_fmt.format("this directory")) - else: - msg_lines.append(last_line_fmt.format("these directories")) - - # Returns the formatted multiline message - return "\n".join(msg_lines) - - -def sorted_outrows(outrows): - # type: (Iterable[InstalledCSVRow]) -> List[InstalledCSVRow] - """ - Return the given rows of a RECORD file in sorted order. - - Each row is a 3-tuple (path, hash, size) and corresponds to a record of - a RECORD file (see PEP 376 and PEP 427 for details). For the rows - passed to this function, the size can be an integer as an int or string, - or the empty string. - """ - # Normally, there should only be one row per path, in which case the - # second and third elements don't come into play when sorting. - # However, in cases in the wild where a path might happen to occur twice, - # we don't want the sort operation to trigger an error (but still want - # determinism). Since the third element can be an int or string, we - # coerce each element to a string to avoid a TypeError in this case. - # For additional background, see-- - # https://github.com/pypa/pip/issues/5868 - return sorted(outrows, key=lambda row: tuple(str(x) for x in row)) - - -def get_csv_rows_for_installed( - old_csv_rows, # type: Iterable[List[str]] - installed, # type: Dict[str, str] - changed, # type: set - generated, # type: List[str] - lib_dir, # type: str -): - # type: (...) -> List[InstalledCSVRow] - """ - :param installed: A map from archive RECORD path to installation RECORD - path. - """ - installed_rows = [] # type: List[InstalledCSVRow] - for row in old_csv_rows: - if len(row) > 3: - logger.warning( - 'RECORD line has more than three elements: {}'.format(row) - ) - # Make a copy because we are mutating the row. - row = list(row) - old_path = row[0] - new_path = installed.pop(old_path, old_path) - row[0] = new_path - if new_path in changed: - digest, length = rehash(new_path) - row[1] = digest - row[2] = length - installed_rows.append(tuple(row)) - for f in generated: - digest, length = rehash(f) - installed_rows.append((normpath(f, lib_dir), digest, str(length))) - for f in installed: - installed_rows.append((installed[f], '', '')) - return installed_rows - - -def move_wheel_files( - name, # type: str - req, # type: Requirement - wheeldir, # type: str - user=False, # type: bool - home=None, # type: Optional[str] - root=None, # type: Optional[str] - pycompile=True, # type: bool - scheme=None, # type: Optional[Mapping[str, str]] - isolated=False, # type: bool - prefix=None, # type: Optional[str] - warn_script_location=True # type: bool -): - # type: (...) -> None - """Install a wheel""" - # TODO: Investigate and break this up. - # TODO: Look into moving this into a dedicated class for representing an - # installation. - - if not scheme: - scheme = distutils_scheme( - name, user=user, home=home, root=root, isolated=isolated, - prefix=prefix, - ) - - if root_is_purelib(name, wheeldir): - lib_dir = scheme['purelib'] - else: - lib_dir = scheme['platlib'] - - info_dir = [] # type: List[str] - data_dirs = [] - source = wheeldir.rstrip(os.path.sep) + os.path.sep - - # Record details of the files moved - # installed = files copied from the wheel to the destination - # changed = files changed while installing (scripts #! line typically) - # generated = files newly generated during the install (script wrappers) - installed = {} # type: Dict[str, str] - changed = set() - generated = [] # type: List[str] - - # Compile all of the pyc files that we're going to be installing - if pycompile: - with captured_stdout() as stdout: - with warnings.catch_warnings(): - warnings.filterwarnings('ignore') - compileall.compile_dir(source, force=True, quiet=True) - logger.debug(stdout.getvalue()) - - def record_installed(srcfile, destfile, modified=False): - """Map archive RECORD paths to installation RECORD paths.""" - oldpath = normpath(srcfile, wheeldir) - newpath = normpath(destfile, lib_dir) - installed[oldpath] = newpath - if modified: - changed.add(destfile) - - def clobber(source, dest, is_base, fixer=None, filter=None): - ensure_dir(dest) # common for the 'include' path - - for dir, subdirs, files in os.walk(source): - basedir = dir[len(source):].lstrip(os.path.sep) - destdir = os.path.join(dest, basedir) - if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'): - continue - for s in subdirs: - destsubdir = os.path.join(dest, basedir, s) - if is_base and basedir == '' and destsubdir.endswith('.data'): - data_dirs.append(s) - continue - elif (is_base and - s.endswith('.dist-info') and - canonicalize_name(s).startswith( - canonicalize_name(req.name))): - assert not info_dir, ('Multiple .dist-info directories: ' + - destsubdir + ', ' + - ', '.join(info_dir)) - info_dir.append(destsubdir) - for f in files: - # Skip unwanted files - if filter and filter(f): - continue - srcfile = os.path.join(dir, f) - destfile = os.path.join(dest, basedir, f) - # directory creation is lazy and after the file filtering above - # to ensure we don't install empty dirs; empty dirs can't be - # uninstalled. - ensure_dir(destdir) - - # copyfile (called below) truncates the destination if it - # exists and then writes the new contents. This is fine in most - # cases, but can cause a segfault if pip has loaded a shared - # object (e.g. from pyopenssl through its vendored urllib3) - # Since the shared object is mmap'd an attempt to call a - # symbol in it will then cause a segfault. Unlinking the file - # allows writing of new contents while allowing the process to - # continue to use the old copy. - if os.path.exists(destfile): - os.unlink(destfile) - - # We use copyfile (not move, copy, or copy2) to be extra sure - # that we are not moving directories over (copyfile fails for - # directories) as well as to ensure that we are not copying - # over any metadata because we want more control over what - # metadata we actually copy over. - shutil.copyfile(srcfile, destfile) - - # Copy over the metadata for the file, currently this only - # includes the atime and mtime. - st = os.stat(srcfile) - if hasattr(os, "utime"): - os.utime(destfile, (st.st_atime, st.st_mtime)) - - # If our file is executable, then make our destination file - # executable. - if os.access(srcfile, os.X_OK): - st = os.stat(srcfile) - permissions = ( - st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH - ) - os.chmod(destfile, permissions) - - changed = False - if fixer: - changed = fixer(destfile) - record_installed(srcfile, destfile, changed) - - clobber(source, lib_dir, True) - - assert info_dir, "%s .dist-info directory not found" % req - - # Get the defined entry points - ep_file = os.path.join(info_dir[0], 'entry_points.txt') - console, gui = get_entrypoints(ep_file) - - def is_entrypoint_wrapper(name): - # EP, EP.exe and EP-script.py are scripts generated for - # entry point EP by setuptools - if name.lower().endswith('.exe'): - matchname = name[:-4] - elif name.lower().endswith('-script.py'): - matchname = name[:-10] - elif name.lower().endswith(".pya"): - matchname = name[:-4] - else: - matchname = name - # Ignore setuptools-generated scripts - return (matchname in console or matchname in gui) - - for datadir in data_dirs: - fixer = None - filter = None - for subdir in os.listdir(os.path.join(wheeldir, datadir)): - fixer = None - if subdir == 'scripts': - fixer = fix_script - filter = is_entrypoint_wrapper - source = os.path.join(wheeldir, datadir, subdir) - dest = scheme[subdir] - clobber(source, dest, False, fixer=fixer, filter=filter) - - maker = ScriptMaker(None, scheme['scripts']) - - # Ensure old scripts are overwritten. - # See https://github.com/pypa/pip/issues/1800 - maker.clobber = True - - # Ensure we don't generate any variants for scripts because this is almost - # never what somebody wants. - # See https://bitbucket.org/pypa/distlib/issue/35/ - maker.variants = {''} - - # This is required because otherwise distlib creates scripts that are not - # executable. - # See https://bitbucket.org/pypa/distlib/issue/32/ - maker.set_mode = True - - # Simplify the script and fix the fact that the default script swallows - # every single stack trace. - # See https://bitbucket.org/pypa/distlib/issue/34/ - # See https://bitbucket.org/pypa/distlib/issue/33/ - def _get_script_text(entry): - if entry.suffix is None: - raise InstallationError( - "Invalid script entry point: %s for req: %s - A callable " - "suffix is required. Cf https://packaging.python.org/en/" - "latest/distributing.html#console-scripts for more " - "information." % (entry, req) - ) - return maker.script_template % { - "module": entry.prefix, - "import_name": entry.suffix.split(".")[0], - "func": entry.suffix, - } - # ignore type, because mypy disallows assigning to a method, - # see https://github.com/python/mypy/issues/2427 - maker._get_script_text = _get_script_text # type: ignore - maker.script_template = r"""# -*- coding: utf-8 -*- -import re -import sys - -from %(module)s import %(import_name)s - -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) - sys.exit(%(func)s()) -""" - - # Special case pip and setuptools to generate versioned wrappers - # - # The issue is that some projects (specifically, pip and setuptools) use - # code in setup.py to create "versioned" entry points - pip2.7 on Python - # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into - # the wheel metadata at build time, and so if the wheel is installed with - # a *different* version of Python the entry points will be wrong. The - # correct fix for this is to enhance the metadata to be able to describe - # such versioned entry points, but that won't happen till Metadata 2.0 is - # available. - # In the meantime, projects using versioned entry points will either have - # incorrect versioned entry points, or they will not be able to distribute - # "universal" wheels (i.e., they will need a wheel per Python version). - # - # Because setuptools and pip are bundled with _ensurepip and virtualenv, - # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we - # override the versioned entry points in the wheel and generate the - # correct ones. This code is purely a short-term measure until Metadata 2.0 - # is available. - # - # To add the level of hack in this section of code, in order to support - # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment - # variable which will control which version scripts get installed. - # - # ENSUREPIP_OPTIONS=altinstall - # - Only pipX.Y and easy_install-X.Y will be generated and installed - # ENSUREPIP_OPTIONS=install - # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note - # that this option is technically if ENSUREPIP_OPTIONS is set and is - # not altinstall - # DEFAULT - # - The default behavior is to install pip, pipX, pipX.Y, easy_install - # and easy_install-X.Y. - pip_script = console.pop('pip', None) - if pip_script: - if "ENSUREPIP_OPTIONS" not in os.environ: - spec = 'pip = ' + pip_script - generated.extend(maker.make(spec)) - - if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall": - spec = 'pip%s = %s' % (sys.version[:1], pip_script) - generated.extend(maker.make(spec)) - - spec = 'pip%s = %s' % (sys.version[:3], pip_script) - generated.extend(maker.make(spec)) - # Delete any other versioned pip entry points - pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)] - for k in pip_ep: - del console[k] - easy_install_script = console.pop('easy_install', None) - if easy_install_script: - if "ENSUREPIP_OPTIONS" not in os.environ: - spec = 'easy_install = ' + easy_install_script - generated.extend(maker.make(spec)) - - spec = 'easy_install-%s = %s' % (sys.version[:3], easy_install_script) - generated.extend(maker.make(spec)) - # Delete any other versioned easy_install entry points - easy_install_ep = [ - k for k in console if re.match(r'easy_install(-\d\.\d)?$', k) - ] - for k in easy_install_ep: - del console[k] - - # Generate the console and GUI entry points specified in the wheel - if len(console) > 0: - generated_console_scripts = maker.make_multiple( - ['%s = %s' % kv for kv in console.items()] - ) - generated.extend(generated_console_scripts) - - if warn_script_location: - msg = message_about_scripts_not_on_PATH(generated_console_scripts) - if msg is not None: - logger.warning(msg) - - if len(gui) > 0: - generated.extend( - maker.make_multiple( - ['%s = %s' % kv for kv in gui.items()], - {'gui': True} - ) - ) - - # Record pip as the installer - installer = os.path.join(info_dir[0], 'INSTALLER') - temp_installer = os.path.join(info_dir[0], 'INSTALLER.pip') - with open(temp_installer, 'wb') as installer_file: - installer_file.write(b'pip\n') - shutil.move(temp_installer, installer) - generated.append(installer) - - # Record details of all files installed - record = os.path.join(info_dir[0], 'RECORD') - temp_record = os.path.join(info_dir[0], 'RECORD.pip') - with open_for_csv(record, 'r') as record_in: - with open_for_csv(temp_record, 'w+') as record_out: - reader = csv.reader(record_in) - outrows = get_csv_rows_for_installed( - reader, installed=installed, changed=changed, - generated=generated, lib_dir=lib_dir, - ) - writer = csv.writer(record_out) - # Sort to simplify testing. - for row in sorted_outrows(outrows): - writer.writerow(row) - shutil.move(temp_record, record) - - -def wheel_version(source_dir): - # type: (Optional[str]) -> Optional[Tuple[int, ...]] - """ - Return the Wheel-Version of an extracted wheel, if possible. - - Otherwise, return None if we couldn't parse / extract it. - """ - try: - dist = [d for d in pkg_resources.find_on_path(None, source_dir)][0] - - wheel_data = dist.get_metadata('WHEEL') - wheel_data = Parser().parsestr(wheel_data) - - version = wheel_data['Wheel-Version'].strip() - version = tuple(map(int, version.split('.'))) - return version - except Exception: - return None - - -def check_compatibility(version, name): - # type: (Optional[Tuple[int, ...]], str) -> None - """ - Raises errors or warns if called with an incompatible Wheel-Version. - - Pip should refuse to install a Wheel-Version that's a major series - ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when - installing a version only minor version ahead (e.g 1.2 > 1.1). - - version: a 2-tuple representing a Wheel-Version (Major, Minor) - name: name of wheel or package to raise exception about - - :raises UnsupportedWheel: when an incompatible Wheel-Version is given - """ - if not version: - raise UnsupportedWheel( - "%s is in an unsupported or invalid wheel" % name - ) - if version[0] > VERSION_COMPATIBLE[0]: - raise UnsupportedWheel( - "%s's Wheel-Version (%s) is not compatible with this version " - "of pip" % (name, '.'.join(map(str, version))) - ) - elif version > VERSION_COMPATIBLE: - logger.warning( - 'Installing from a newer Wheel-Version (%s)', - '.'.join(map(str, version)), - ) - - -def format_tag(file_tag): - # type: (Tuple[str, ...]) -> str - """ - Format three tags in the form "<python_tag>-<abi_tag>-<platform_tag>". - - :param file_tag: A 3-tuple of tags (python_tag, abi_tag, platform_tag). - """ - return '-'.join(file_tag) - - -class Wheel(object): - """A wheel file""" - - # TODO: Maybe move the class into the models sub-package - # TODO: Maybe move the install code into this class - - wheel_file_re = re.compile( - r"""^(?P<namever>(?P<name>.+?)-(?P<ver>.*?)) - ((-(?P<build>\d[^-]*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?) - \.whl|\.dist-info)$""", - re.VERBOSE - ) - - def __init__(self, filename): - # type: (str) -> None - """ - :raises InvalidWheelFilename: when the filename is invalid for a wheel - """ - wheel_info = self.wheel_file_re.match(filename) - if not wheel_info: - raise InvalidWheelFilename( - "%s is not a valid wheel filename." % filename - ) - self.filename = filename - self.name = wheel_info.group('name').replace('_', '-') - # we'll assume "_" means "-" due to wheel naming scheme - # (https://github.com/pypa/pip/issues/1150) - self.version = wheel_info.group('ver').replace('_', '-') - self.build_tag = wheel_info.group('build') - self.pyversions = wheel_info.group('pyver').split('.') - self.abis = wheel_info.group('abi').split('.') - self.plats = wheel_info.group('plat').split('.') - - # All the tag combinations from this file - self.file_tags = { - (x, y, z) for x in self.pyversions - for y in self.abis for z in self.plats - } - - def get_formatted_file_tags(self): - # type: () -> List[str] - """ - Return the wheel's tags as a sorted list of strings. - """ - return sorted(format_tag(tag) for tag in self.file_tags) - - def support_index_min(self, tags=None): - # type: (Optional[List[Pep425Tag]]) -> Optional[int] - """ - Return the lowest index that one of the wheel's file_tag combinations - achieves in the supported_tags list e.g. if there are 8 supported tags, - and one of the file tags is first in the list, then return 0. Returns - None is the wheel is not supported. - """ - if tags is None: # for mock - tags = pep425tags.get_supported() - indexes = [tags.index(c) for c in self.file_tags if c in tags] - return min(indexes) if indexes else None - - def supported(self, tags=None): - # type: (Optional[List[Pep425Tag]]) -> bool - """Is this wheel supported on this system?""" - if tags is None: # for mock - tags = pep425tags.get_supported() - return bool(set(tags).intersection(self.file_tags)) - - -def _contains_egg_info( - s, _egg_info_re=re.compile(r'([a-z0-9_.]+)-([a-z0-9_.!+-]+)', re.I)): - """Determine whether the string looks like an egg_info. - - :param s: The string to parse. E.g. foo-2.1 - """ - return bool(_egg_info_re.search(s)) - - -def should_use_ephemeral_cache( - req, # type: InstallRequirement - format_control, # type: FormatControl - autobuilding, # type: bool - cache_available # type: bool -): - # type: (...) -> Optional[bool] - """ - Return whether to build an InstallRequirement object using the - ephemeral cache. - - :param cache_available: whether a cache directory is available for the - autobuilding=True case. - - :return: True or False to build the requirement with ephem_cache=True - or False, respectively; or None not to build the requirement. - """ - if req.constraint: - return None - if req.is_wheel: - if not autobuilding: - logger.info( - 'Skipping %s, due to already being wheel.', req.name, - ) - return None - if not autobuilding: - return False - - if req.editable or not req.source_dir: - return None - - if "binary" not in format_control.get_allowed_formats( - canonicalize_name(req.name)): - logger.info( - "Skipping bdist_wheel for %s, due to binaries " - "being disabled for it.", req.name, - ) - return None - - if req.link and not req.link.is_artifact: - # VCS checkout. Build wheel just for this run. - return True - - link = req.link - base, ext = link.splitext() - if cache_available and _contains_egg_info(base): - return False - - # Otherwise, build the wheel just for this run using the ephemeral - # cache since we are either in the case of e.g. a local directory, or - # no cache directory is available to use. - return True - - -def format_command_result( - command_args, # type: List[str] - command_output, # type: str -): - # type: (...) -> str - """ - Format command information for logging. - """ - command_desc = format_command_args(command_args) - text = 'Command arguments: {}\n'.format(command_desc) - - if not command_output: - text += 'Command output: None' - elif logger.getEffectiveLevel() > logging.DEBUG: - text += 'Command output: [use --verbose to show]' - else: - if not command_output.endswith('\n'): - command_output += '\n' - text += 'Command output:\n{}{}'.format(command_output, LOG_DIVIDER) - - return text - - -def get_legacy_build_wheel_path( - names, # type: List[str] - temp_dir, # type: str - req, # type: InstallRequirement - command_args, # type: List[str] - command_output, # type: str -): - # type: (...) -> Optional[str] - """ - Return the path to the wheel in the temporary build directory. - """ - # Sort for determinism. - names = sorted(names) - if not names: - msg = ( - 'Legacy build of wheel for {!r} created no files.\n' - ).format(req.name) - msg += format_command_result(command_args, command_output) - logger.warning(msg) - return None - - if len(names) > 1: - msg = ( - 'Legacy build of wheel for {!r} created more than one file.\n' - 'Filenames (choosing first): {}\n' - ).format(req.name, names) - msg += format_command_result(command_args, command_output) - logger.warning(msg) - - return os.path.join(temp_dir, names[0]) - - -class WheelBuilder(object): - """Build wheels from a RequirementSet.""" - - def __init__( - self, - finder, # type: PackageFinder - preparer, # type: RequirementPreparer - wheel_cache, # type: WheelCache - build_options=None, # type: Optional[List[str]] - global_options=None, # type: Optional[List[str]] - no_clean=False # type: bool - ): - # type: (...) -> None - self.finder = finder - self.preparer = preparer - self.wheel_cache = wheel_cache - - self._wheel_dir = preparer.wheel_download_dir - - self.build_options = build_options or [] - self.global_options = global_options or [] - self.no_clean = no_clean - - def _build_one(self, req, output_dir, python_tag=None): - """Build one wheel. - - :return: The filename of the built wheel, or None if the build failed. - """ - # Install build deps into temporary directory (PEP 518) - with req.build_env: - return self._build_one_inside_env(req, output_dir, - python_tag=python_tag) - - def _build_one_inside_env(self, req, output_dir, python_tag=None): - with TempDirectory(kind="wheel") as temp_dir: - if req.use_pep517: - builder = self._build_one_pep517 - else: - builder = self._build_one_legacy - wheel_path = builder(req, temp_dir.path, python_tag=python_tag) - if wheel_path is not None: - wheel_name = os.path.basename(wheel_path) - dest_path = os.path.join(output_dir, wheel_name) - try: - wheel_hash, length = hash_file(wheel_path) - shutil.move(wheel_path, dest_path) - logger.info('Created wheel for %s: ' - 'filename=%s size=%d sha256=%s', - req.name, wheel_name, length, - wheel_hash.hexdigest()) - logger.info('Stored in directory: %s', output_dir) - return dest_path - except Exception: - pass - # Ignore return, we can't do anything else useful. - self._clean_one(req) - return None - - def _base_setup_args(self, req): - # NOTE: Eventually, we'd want to also -S to the flags here, when we're - # isolating. Currently, it breaks Python in virtualenvs, because it - # relies on site.py to find parts of the standard library outside the - # virtualenv. - base_cmd = make_setuptools_shim_args(req.setup_py_path, - unbuffered_output=True) - return base_cmd + list(self.global_options) - - def _build_one_pep517(self, req, tempd, python_tag=None): - """Build one InstallRequirement using the PEP 517 build process. - - Returns path to wheel if successfully built. Otherwise, returns None. - """ - assert req.metadata_directory is not None - if self.build_options: - # PEP 517 does not support --build-options - logger.error('Cannot build wheel for %s using PEP 517 when ' - '--build-options is present' % (req.name,)) - return None - try: - req.spin_message = 'Building wheel for %s (PEP 517)' % (req.name,) - logger.debug('Destination directory: %s', tempd) - wheel_name = req.pep517_backend.build_wheel( - tempd, - metadata_directory=req.metadata_directory - ) - if python_tag: - # General PEP 517 backends don't necessarily support - # a "--python-tag" option, so we rename the wheel - # file directly. - new_name = replace_python_tag(wheel_name, python_tag) - os.rename( - os.path.join(tempd, wheel_name), - os.path.join(tempd, new_name) - ) - # Reassign to simplify the return at the end of function - wheel_name = new_name - except Exception: - logger.error('Failed building wheel for %s', req.name) - return None - return os.path.join(tempd, wheel_name) - - def _build_one_legacy(self, req, tempd, python_tag=None): - """Build one InstallRequirement using the "legacy" build process. - - Returns path to wheel if successfully built. Otherwise, returns None. - """ - base_args = self._base_setup_args(req) - - spin_message = 'Building wheel for %s (setup.py)' % (req.name,) - with open_spinner(spin_message) as spinner: - logger.debug('Destination directory: %s', tempd) - wheel_args = base_args + ['bdist_wheel', '-d', tempd] \ - + self.build_options - - if python_tag is not None: - wheel_args += ["--python-tag", python_tag] - - try: - output = call_subprocess(wheel_args, cwd=req.setup_py_dir, - spinner=spinner) - except Exception: - spinner.finish("error") - logger.error('Failed building wheel for %s', req.name) - return None - names = os.listdir(tempd) - wheel_path = get_legacy_build_wheel_path( - names=names, - temp_dir=tempd, - req=req, - command_args=wheel_args, - command_output=output, - ) - return wheel_path - - def _clean_one(self, req): - base_args = self._base_setup_args(req) - - logger.info('Running setup.py clean for %s', req.name) - clean_args = base_args + ['clean', '--all'] - try: - call_subprocess(clean_args, cwd=req.source_dir) - return True - except Exception: - logger.error('Failed cleaning build dir for %s', req.name) - return False - - def build( - self, - requirements, # type: Iterable[InstallRequirement] - session, # type: PipSession - autobuilding=False # type: bool - ): - # type: (...) -> List[InstallRequirement] - """Build wheels. - - :param unpack: If True, replace the sdist we built from with the - newly built wheel, in preparation for installation. - :return: True if all the wheels built correctly. - """ - buildset = [] - format_control = self.finder.format_control - # Whether a cache directory is available for autobuilding=True. - cache_available = bool(self._wheel_dir or self.wheel_cache.cache_dir) - - for req in requirements: - ephem_cache = should_use_ephemeral_cache( - req, format_control=format_control, autobuilding=autobuilding, - cache_available=cache_available, - ) - if ephem_cache is None: - continue - - buildset.append((req, ephem_cache)) - - if not buildset: - return [] - - # Is any wheel build not using the ephemeral cache? - if any(not ephem_cache for _, ephem_cache in buildset): - have_directory_for_build = self._wheel_dir or ( - autobuilding and self.wheel_cache.cache_dir - ) - assert have_directory_for_build - - # TODO by @pradyunsg - # Should break up this method into 2 separate methods. - - # Build the wheels. - logger.info( - 'Building wheels for collected packages: %s', - ', '.join([req.name for (req, _) in buildset]), - ) - _cache = self.wheel_cache # shorter name - with indent_log(): - build_success, build_failure = [], [] - for req, ephem in buildset: - python_tag = None - if autobuilding: - python_tag = pep425tags.implementation_tag - if ephem: - output_dir = _cache.get_ephem_path_for_link(req.link) - else: - output_dir = _cache.get_path_for_link(req.link) - try: - ensure_dir(output_dir) - except OSError as e: - logger.warning("Building wheel for %s failed: %s", - req.name, e) - build_failure.append(req) - continue - else: - output_dir = self._wheel_dir - wheel_file = self._build_one( - req, output_dir, - python_tag=python_tag, - ) - if wheel_file: - build_success.append(req) - if autobuilding: - # XXX: This is mildly duplicative with prepare_files, - # but not close enough to pull out to a single common - # method. - # The code below assumes temporary source dirs - - # prevent it doing bad things. - if req.source_dir and not os.path.exists(os.path.join( - req.source_dir, PIP_DELETE_MARKER_FILENAME)): - raise AssertionError( - "bad source dir - missing marker") - # Delete the source we built the wheel from - req.remove_temporary_source() - # set the build directory again - name is known from - # the work prepare_files did. - req.source_dir = req.build_location( - self.preparer.build_dir - ) - # Update the link for this. - req.link = Link(path_to_url(wheel_file)) - assert req.link.is_wheel - # extract the wheel into the dir - unpack_url( - req.link, req.source_dir, None, False, - session=session, - ) - else: - build_failure.append(req) - - # notify success/failure - if build_success: - logger.info( - 'Successfully built %s', - ' '.join([req.name for req in build_success]), - ) - if build_failure: - logger.info( - 'Failed to build %s', - ' '.join([req.name for req in build_failure]), - ) - # Return a list of requirements that failed to build - return build_failure diff --git a/env/lib/python2.7/site-packages/pip/_internal/wheel.pyc b/env/lib/python2.7/site-packages/pip/_internal/wheel.pyc deleted file mode 100644 index 02143d46..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_internal/wheel.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/__init__.py b/env/lib/python2.7/site-packages/pip/_vendor/__init__.py deleted file mode 100644 index c1d9508d..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/__init__.py +++ /dev/null @@ -1,109 +0,0 @@ -""" -pip._vendor is for vendoring dependencies of pip to prevent needing pip to -depend on something external. - -Files inside of pip._vendor should be considered immutable and should only be -updated to versions from upstream. -""" -from __future__ import absolute_import - -import glob -import os.path -import sys - -# Downstream redistributors which have debundled our dependencies should also -# patch this value to be true. This will trigger the additional patching -# to cause things like "six" to be available as pip. -DEBUNDLED = False - -# By default, look in this directory for a bunch of .whl files which we will -# add to the beginning of sys.path before attempting to import anything. This -# is done to support downstream re-distributors like Debian and Fedora who -# wish to create their own Wheels for our dependencies to aid in debundling. -WHEEL_DIR = os.path.abspath(os.path.dirname(__file__)) - - -# Define a small helper function to alias our vendored modules to the real ones -# if the vendored ones do not exist. This idea of this was taken from -# https://github.com/kennethreitz/requests/pull/2567. -def vendored(modulename): - vendored_name = "{0}.{1}".format(__name__, modulename) - - try: - __import__(modulename, globals(), locals(), level=0) - except ImportError: - # We can just silently allow import failures to pass here. If we - # got to this point it means that ``import pip._vendor.whatever`` - # failed and so did ``import whatever``. Since we're importing this - # upfront in an attempt to alias imports, not erroring here will - # just mean we get a regular import error whenever pip *actually* - # tries to import one of these modules to use it, which actually - # gives us a better error message than we would have otherwise - # gotten. - pass - else: - sys.modules[vendored_name] = sys.modules[modulename] - base, head = vendored_name.rsplit(".", 1) - setattr(sys.modules[base], head, sys.modules[modulename]) - - -# If we're operating in a debundled setup, then we want to go ahead and trigger -# the aliasing of our vendored libraries as well as looking for wheels to add -# to our sys.path. This will cause all of this code to be a no-op typically -# however downstream redistributors can enable it in a consistent way across -# all platforms. -if DEBUNDLED: - # Actually look inside of WHEEL_DIR to find .whl files and add them to the - # front of our sys.path. - sys.path[:] = glob.glob(os.path.join(WHEEL_DIR, "*.whl")) + sys.path - - # Actually alias all of our vendored dependencies. - vendored("cachecontrol") - vendored("colorama") - vendored("distlib") - vendored("distro") - vendored("html5lib") - vendored("lockfile") - vendored("six") - vendored("six.moves") - vendored("six.moves.urllib") - vendored("six.moves.urllib.parse") - vendored("packaging") - vendored("packaging.version") - vendored("packaging.specifiers") - vendored("pep517") - vendored("pkg_resources") - vendored("progress") - vendored("pytoml") - vendored("retrying") - vendored("requests") - vendored("requests.exceptions") - vendored("requests.packages") - vendored("requests.packages.urllib3") - vendored("requests.packages.urllib3._collections") - vendored("requests.packages.urllib3.connection") - vendored("requests.packages.urllib3.connectionpool") - vendored("requests.packages.urllib3.contrib") - vendored("requests.packages.urllib3.contrib.ntlmpool") - vendored("requests.packages.urllib3.contrib.pyopenssl") - vendored("requests.packages.urllib3.exceptions") - vendored("requests.packages.urllib3.fields") - vendored("requests.packages.urllib3.filepost") - vendored("requests.packages.urllib3.packages") - vendored("requests.packages.urllib3.packages.ordered_dict") - vendored("requests.packages.urllib3.packages.six") - vendored("requests.packages.urllib3.packages.ssl_match_hostname") - vendored("requests.packages.urllib3.packages.ssl_match_hostname." - "_implementation") - vendored("requests.packages.urllib3.poolmanager") - vendored("requests.packages.urllib3.request") - vendored("requests.packages.urllib3.response") - vendored("requests.packages.urllib3.util") - vendored("requests.packages.urllib3.util.connection") - vendored("requests.packages.urllib3.util.request") - vendored("requests.packages.urllib3.util.response") - vendored("requests.packages.urllib3.util.retry") - vendored("requests.packages.urllib3.util.ssl_") - vendored("requests.packages.urllib3.util.timeout") - vendored("requests.packages.urllib3.util.url") - vendored("urllib3") diff --git a/env/lib/python2.7/site-packages/pip/_vendor/__init__.pyc b/env/lib/python2.7/site-packages/pip/_vendor/__init__.pyc deleted file mode 100644 index e6e3e217..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/appdirs.py b/env/lib/python2.7/site-packages/pip/_vendor/appdirs.py deleted file mode 100644 index 2bd39110..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/appdirs.py +++ /dev/null @@ -1,604 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# Copyright (c) 2005-2010 ActiveState Software Inc. -# Copyright (c) 2013 Eddy Petrișor - -"""Utilities for determining application-specific dirs. - -See <http://github.com/ActiveState/appdirs> for details and usage. -""" -# Dev Notes: -# - MSDN on where to store app data files: -# http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120 -# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html -# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html - -__version_info__ = (1, 4, 3) -__version__ = '.'.join(map(str, __version_info__)) - - -import sys -import os - -PY3 = sys.version_info[0] == 3 - -if PY3: - unicode = str - -if sys.platform.startswith('java'): - import platform - os_name = platform.java_ver()[3][0] - if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc. - system = 'win32' - elif os_name.startswith('Mac'): # "Mac OS X", etc. - system = 'darwin' - else: # "Linux", "SunOS", "FreeBSD", etc. - # Setting this to "linux2" is not ideal, but only Windows or Mac - # are actually checked for and the rest of the module expects - # *sys.platform* style strings. - system = 'linux2' -else: - system = sys.platform - - - -def user_data_dir(appname=None, appauthor=None, version=None, roaming=False): - r"""Return full path to the user-specific data dir for this application. - - "appname" is the name of application. - If None, just the system directory is returned. - "appauthor" (only used on Windows) is the name of the - appauthor or distributing body for this application. Typically - it is the owning company name. This falls back to appname. You may - pass False to disable it. - "version" is an optional version path element to append to the - path. You might want to use this if you want multiple versions - of your app to be able to run independently. If used, this - would typically be "<major>.<minor>". - Only applied when appname is present. - "roaming" (boolean, default False) can be set True to use the Windows - roaming appdata directory. That means that for users on a Windows - network setup for roaming profiles, this user data will be - sync'd on login. See - <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> - for a discussion of issues. - - Typical user data directories are: - Mac OS X: ~/Library/Application Support/<AppName> - Unix: ~/.local/share/<AppName> # or in $XDG_DATA_HOME, if defined - Win XP (not roaming): C:\Documents and Settings\<username>\Application Data\<AppAuthor>\<AppName> - Win XP (roaming): C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName> - Win 7 (not roaming): C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName> - Win 7 (roaming): C:\Users\<username>\AppData\Roaming\<AppAuthor>\<AppName> - - For Unix, we follow the XDG spec and support $XDG_DATA_HOME. - That means, by default "~/.local/share/<AppName>". - """ - if system == "win32": - if appauthor is None: - appauthor = appname - const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA" - path = os.path.normpath(_get_win_folder(const)) - if appname: - if appauthor is not False: - path = os.path.join(path, appauthor, appname) - else: - path = os.path.join(path, appname) - elif system == 'darwin': - path = os.path.expanduser('~/Library/Application Support/') - if appname: - path = os.path.join(path, appname) - else: - path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share")) - if appname: - path = os.path.join(path, appname) - if appname and version: - path = os.path.join(path, version) - return path - - -def site_data_dir(appname=None, appauthor=None, version=None, multipath=False): - r"""Return full path to the user-shared data dir for this application. - - "appname" is the name of application. - If None, just the system directory is returned. - "appauthor" (only used on Windows) is the name of the - appauthor or distributing body for this application. Typically - it is the owning company name. This falls back to appname. You may - pass False to disable it. - "version" is an optional version path element to append to the - path. You might want to use this if you want multiple versions - of your app to be able to run independently. If used, this - would typically be "<major>.<minor>". - Only applied when appname is present. - "multipath" is an optional parameter only applicable to *nix - which indicates that the entire list of data dirs should be - returned. By default, the first item from XDG_DATA_DIRS is - returned, or '/usr/local/share/<AppName>', - if XDG_DATA_DIRS is not set - - Typical site data directories are: - Mac OS X: /Library/Application Support/<AppName> - Unix: /usr/local/share/<AppName> or /usr/share/<AppName> - Win XP: C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName> - Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) - Win 7: C:\ProgramData\<AppAuthor>\<AppName> # Hidden, but writeable on Win 7. - - For Unix, this is using the $XDG_DATA_DIRS[0] default. - - WARNING: Do not use this on Windows. See the Vista-Fail note above for why. - """ - if system == "win32": - if appauthor is None: - appauthor = appname - path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA")) - if appname: - if appauthor is not False: - path = os.path.join(path, appauthor, appname) - else: - path = os.path.join(path, appname) - elif system == 'darwin': - path = os.path.expanduser('/Library/Application Support') - if appname: - path = os.path.join(path, appname) - else: - # XDG default for $XDG_DATA_DIRS - # only first, if multipath is False - path = os.getenv('XDG_DATA_DIRS', - os.pathsep.join(['/usr/local/share', '/usr/share'])) - pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] - if appname: - if version: - appname = os.path.join(appname, version) - pathlist = [os.sep.join([x, appname]) for x in pathlist] - - if multipath: - path = os.pathsep.join(pathlist) - else: - path = pathlist[0] - return path - - if appname and version: - path = os.path.join(path, version) - return path - - -def user_config_dir(appname=None, appauthor=None, version=None, roaming=False): - r"""Return full path to the user-specific config dir for this application. - - "appname" is the name of application. - If None, just the system directory is returned. - "appauthor" (only used on Windows) is the name of the - appauthor or distributing body for this application. Typically - it is the owning company name. This falls back to appname. You may - pass False to disable it. - "version" is an optional version path element to append to the - path. You might want to use this if you want multiple versions - of your app to be able to run independently. If used, this - would typically be "<major>.<minor>". - Only applied when appname is present. - "roaming" (boolean, default False) can be set True to use the Windows - roaming appdata directory. That means that for users on a Windows - network setup for roaming profiles, this user data will be - sync'd on login. See - <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> - for a discussion of issues. - - Typical user config directories are: - Mac OS X: same as user_data_dir - Unix: ~/.config/<AppName> # or in $XDG_CONFIG_HOME, if defined - Win *: same as user_data_dir - - For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. - That means, by default "~/.config/<AppName>". - """ - if system in ["win32", "darwin"]: - path = user_data_dir(appname, appauthor, None, roaming) - else: - path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config")) - if appname: - path = os.path.join(path, appname) - if appname and version: - path = os.path.join(path, version) - return path - - -def site_config_dir(appname=None, appauthor=None, version=None, multipath=False): - r"""Return full path to the user-shared data dir for this application. - - "appname" is the name of application. - If None, just the system directory is returned. - "appauthor" (only used on Windows) is the name of the - appauthor or distributing body for this application. Typically - it is the owning company name. This falls back to appname. You may - pass False to disable it. - "version" is an optional version path element to append to the - path. You might want to use this if you want multiple versions - of your app to be able to run independently. If used, this - would typically be "<major>.<minor>". - Only applied when appname is present. - "multipath" is an optional parameter only applicable to *nix - which indicates that the entire list of config dirs should be - returned. By default, the first item from XDG_CONFIG_DIRS is - returned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not set - - Typical site config directories are: - Mac OS X: same as site_data_dir - Unix: /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in - $XDG_CONFIG_DIRS - Win *: same as site_data_dir - Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) - - For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False - - WARNING: Do not use this on Windows. See the Vista-Fail note above for why. - """ - if system in ["win32", "darwin"]: - path = site_data_dir(appname, appauthor) - if appname and version: - path = os.path.join(path, version) - else: - # XDG default for $XDG_CONFIG_DIRS - # only first, if multipath is False - path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg') - pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] - if appname: - if version: - appname = os.path.join(appname, version) - pathlist = [os.sep.join([x, appname]) for x in pathlist] - - if multipath: - path = os.pathsep.join(pathlist) - else: - path = pathlist[0] - return path - - -def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True): - r"""Return full path to the user-specific cache dir for this application. - - "appname" is the name of application. - If None, just the system directory is returned. - "appauthor" (only used on Windows) is the name of the - appauthor or distributing body for this application. Typically - it is the owning company name. This falls back to appname. You may - pass False to disable it. - "version" is an optional version path element to append to the - path. You might want to use this if you want multiple versions - of your app to be able to run independently. If used, this - would typically be "<major>.<minor>". - Only applied when appname is present. - "opinion" (boolean) can be False to disable the appending of - "Cache" to the base app data dir for Windows. See - discussion below. - - Typical user cache directories are: - Mac OS X: ~/Library/Caches/<AppName> - Unix: ~/.cache/<AppName> (XDG default) - Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Cache - Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Cache - - On Windows the only suggestion in the MSDN docs is that local settings go in - the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming - app data dir (the default returned by `user_data_dir` above). Apps typically - put cache data somewhere *under* the given dir here. Some examples: - ...\Mozilla\Firefox\Profiles\<ProfileName>\Cache - ...\Acme\SuperApp\Cache\1.0 - OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. - This can be disabled with the `opinion=False` option. - """ - if system == "win32": - if appauthor is None: - appauthor = appname - path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA")) - if appname: - if appauthor is not False: - path = os.path.join(path, appauthor, appname) - else: - path = os.path.join(path, appname) - if opinion: - path = os.path.join(path, "Cache") - elif system == 'darwin': - path = os.path.expanduser('~/Library/Caches') - if appname: - path = os.path.join(path, appname) - else: - path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache')) - if appname: - path = os.path.join(path, appname) - if appname and version: - path = os.path.join(path, version) - return path - - -def user_state_dir(appname=None, appauthor=None, version=None, roaming=False): - r"""Return full path to the user-specific state dir for this application. - - "appname" is the name of application. - If None, just the system directory is returned. - "appauthor" (only used on Windows) is the name of the - appauthor or distributing body for this application. Typically - it is the owning company name. This falls back to appname. You may - pass False to disable it. - "version" is an optional version path element to append to the - path. You might want to use this if you want multiple versions - of your app to be able to run independently. If used, this - would typically be "<major>.<minor>". - Only applied when appname is present. - "roaming" (boolean, default False) can be set True to use the Windows - roaming appdata directory. That means that for users on a Windows - network setup for roaming profiles, this user data will be - sync'd on login. See - <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> - for a discussion of issues. - - Typical user state directories are: - Mac OS X: same as user_data_dir - Unix: ~/.local/state/<AppName> # or in $XDG_STATE_HOME, if defined - Win *: same as user_data_dir - - For Unix, we follow this Debian proposal <https://wiki.debian.org/XDGBaseDirectorySpecification#state> - to extend the XDG spec and support $XDG_STATE_HOME. - - That means, by default "~/.local/state/<AppName>". - """ - if system in ["win32", "darwin"]: - path = user_data_dir(appname, appauthor, None, roaming) - else: - path = os.getenv('XDG_STATE_HOME', os.path.expanduser("~/.local/state")) - if appname: - path = os.path.join(path, appname) - if appname and version: - path = os.path.join(path, version) - return path - - -def user_log_dir(appname=None, appauthor=None, version=None, opinion=True): - r"""Return full path to the user-specific log dir for this application. - - "appname" is the name of application. - If None, just the system directory is returned. - "appauthor" (only used on Windows) is the name of the - appauthor or distributing body for this application. Typically - it is the owning company name. This falls back to appname. You may - pass False to disable it. - "version" is an optional version path element to append to the - path. You might want to use this if you want multiple versions - of your app to be able to run independently. If used, this - would typically be "<major>.<minor>". - Only applied when appname is present. - "opinion" (boolean) can be False to disable the appending of - "Logs" to the base app data dir for Windows, and "log" to the - base cache dir for Unix. See discussion below. - - Typical user log directories are: - Mac OS X: ~/Library/Logs/<AppName> - Unix: ~/.cache/<AppName>/log # or under $XDG_CACHE_HOME if defined - Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Logs - Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Logs - - On Windows the only suggestion in the MSDN docs is that local settings - go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in - examples of what some windows apps use for a logs dir.) - - OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA` - value for Windows and appends "log" to the user cache dir for Unix. - This can be disabled with the `opinion=False` option. - """ - if system == "darwin": - path = os.path.join( - os.path.expanduser('~/Library/Logs'), - appname) - elif system == "win32": - path = user_data_dir(appname, appauthor, version) - version = False - if opinion: - path = os.path.join(path, "Logs") - else: - path = user_cache_dir(appname, appauthor, version) - version = False - if opinion: - path = os.path.join(path, "log") - if appname and version: - path = os.path.join(path, version) - return path - - -class AppDirs(object): - """Convenience wrapper for getting application dirs.""" - def __init__(self, appname=None, appauthor=None, version=None, - roaming=False, multipath=False): - self.appname = appname - self.appauthor = appauthor - self.version = version - self.roaming = roaming - self.multipath = multipath - - @property - def user_data_dir(self): - return user_data_dir(self.appname, self.appauthor, - version=self.version, roaming=self.roaming) - - @property - def site_data_dir(self): - return site_data_dir(self.appname, self.appauthor, - version=self.version, multipath=self.multipath) - - @property - def user_config_dir(self): - return user_config_dir(self.appname, self.appauthor, - version=self.version, roaming=self.roaming) - - @property - def site_config_dir(self): - return site_config_dir(self.appname, self.appauthor, - version=self.version, multipath=self.multipath) - - @property - def user_cache_dir(self): - return user_cache_dir(self.appname, self.appauthor, - version=self.version) - - @property - def user_state_dir(self): - return user_state_dir(self.appname, self.appauthor, - version=self.version) - - @property - def user_log_dir(self): - return user_log_dir(self.appname, self.appauthor, - version=self.version) - - -#---- internal support stuff - -def _get_win_folder_from_registry(csidl_name): - """This is a fallback technique at best. I'm not sure if using the - registry for this guarantees us the correct answer for all CSIDL_* - names. - """ - if PY3: - import winreg as _winreg - else: - import _winreg - - shell_folder_name = { - "CSIDL_APPDATA": "AppData", - "CSIDL_COMMON_APPDATA": "Common AppData", - "CSIDL_LOCAL_APPDATA": "Local AppData", - }[csidl_name] - - key = _winreg.OpenKey( - _winreg.HKEY_CURRENT_USER, - r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders" - ) - dir, type = _winreg.QueryValueEx(key, shell_folder_name) - return dir - - -def _get_win_folder_with_pywin32(csidl_name): - from win32com.shell import shellcon, shell - dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0) - # Try to make this a unicode path because SHGetFolderPath does - # not return unicode strings when there is unicode data in the - # path. - try: - dir = unicode(dir) - - # Downgrade to short path name if have highbit chars. See - # <http://bugs.activestate.com/show_bug.cgi?id=85099>. - has_high_char = False - for c in dir: - if ord(c) > 255: - has_high_char = True - break - if has_high_char: - try: - import win32api - dir = win32api.GetShortPathName(dir) - except ImportError: - pass - except UnicodeError: - pass - return dir - - -def _get_win_folder_with_ctypes(csidl_name): - import ctypes - - csidl_const = { - "CSIDL_APPDATA": 26, - "CSIDL_COMMON_APPDATA": 35, - "CSIDL_LOCAL_APPDATA": 28, - }[csidl_name] - - buf = ctypes.create_unicode_buffer(1024) - ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf) - - # Downgrade to short path name if have highbit chars. See - # <http://bugs.activestate.com/show_bug.cgi?id=85099>. - has_high_char = False - for c in buf: - if ord(c) > 255: - has_high_char = True - break - if has_high_char: - buf2 = ctypes.create_unicode_buffer(1024) - if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024): - buf = buf2 - - return buf.value - -def _get_win_folder_with_jna(csidl_name): - import array - from com.sun import jna - from com.sun.jna.platform import win32 - - buf_size = win32.WinDef.MAX_PATH * 2 - buf = array.zeros('c', buf_size) - shell = win32.Shell32.INSTANCE - shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf) - dir = jna.Native.toString(buf.tostring()).rstrip("\0") - - # Downgrade to short path name if have highbit chars. See - # <http://bugs.activestate.com/show_bug.cgi?id=85099>. - has_high_char = False - for c in dir: - if ord(c) > 255: - has_high_char = True - break - if has_high_char: - buf = array.zeros('c', buf_size) - kernel = win32.Kernel32.INSTANCE - if kernel.GetShortPathName(dir, buf, buf_size): - dir = jna.Native.toString(buf.tostring()).rstrip("\0") - - return dir - -if system == "win32": - try: - from ctypes import windll - _get_win_folder = _get_win_folder_with_ctypes - except ImportError: - try: - import com.sun.jna - _get_win_folder = _get_win_folder_with_jna - except ImportError: - _get_win_folder = _get_win_folder_from_registry - - -#---- self test code - -if __name__ == "__main__": - appname = "MyApp" - appauthor = "MyCompany" - - props = ("user_data_dir", - "user_config_dir", - "user_cache_dir", - "user_state_dir", - "user_log_dir", - "site_data_dir", - "site_config_dir") - - print("-- app dirs %s --" % __version__) - - print("-- app dirs (with optional 'version')") - dirs = AppDirs(appname, appauthor, version="1.0") - for prop in props: - print("%s: %s" % (prop, getattr(dirs, prop))) - - print("\n-- app dirs (without optional 'version')") - dirs = AppDirs(appname, appauthor) - for prop in props: - print("%s: %s" % (prop, getattr(dirs, prop))) - - print("\n-- app dirs (without optional 'appauthor')") - dirs = AppDirs(appname) - for prop in props: - print("%s: %s" % (prop, getattr(dirs, prop))) - - print("\n-- app dirs (with disabled 'appauthor')") - dirs = AppDirs(appname, appauthor=False) - for prop in props: - print("%s: %s" % (prop, getattr(dirs, prop))) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/appdirs.pyc b/env/lib/python2.7/site-packages/pip/_vendor/appdirs.pyc deleted file mode 100644 index 7945f8cf..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/appdirs.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/__init__.py b/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/__init__.py deleted file mode 100644 index 8fdee66f..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/__init__.py +++ /dev/null @@ -1,11 +0,0 @@ -"""CacheControl import Interface. - -Make it easy to import from cachecontrol without long namespaces. -""" -__author__ = "Eric Larson" -__email__ = "eric@ionrock.org" -__version__ = "0.12.5" - -from .wrapper import CacheControl -from .adapter import CacheControlAdapter -from .controller import CacheController diff --git a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/__init__.pyc b/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/__init__.pyc deleted file mode 100644 index 00b979e9..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/_cmd.pyc b/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/_cmd.pyc deleted file mode 100644 index bfdae5a0..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/_cmd.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/adapter.pyc b/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/adapter.pyc deleted file mode 100644 index d044aec1..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/adapter.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/cache.pyc b/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/cache.pyc deleted file mode 100644 index a265054e..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/cache.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/__init__.pyc b/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/__init__.pyc deleted file mode 100644 index afafe73b..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/file_cache.pyc b/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/file_cache.pyc deleted file mode 100644 index 774107c1..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/file_cache.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.pyc b/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.pyc deleted file mode 100644 index b580576c..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/compat.pyc b/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/compat.pyc deleted file mode 100644 index f2bcad69..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/compat.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/controller.pyc b/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/controller.pyc deleted file mode 100644 index 4ef89c8b..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/controller.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/filewrapper.pyc b/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/filewrapper.pyc deleted file mode 100644 index e23816c9..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/filewrapper.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/heuristics.pyc b/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/heuristics.pyc deleted file mode 100644 index e2b07462..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/heuristics.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/serialize.pyc b/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/serialize.pyc deleted file mode 100644 index 30a08928..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/serialize.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/wrapper.pyc b/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/wrapper.pyc deleted file mode 100644 index c87db752..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/wrapper.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/certifi/__init__.py b/env/lib/python2.7/site-packages/pip/_vendor/certifi/__init__.py deleted file mode 100644 index 8ccb14e2..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/certifi/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .core import where - -__version__ = "2019.06.16" diff --git a/env/lib/python2.7/site-packages/pip/_vendor/certifi/__init__.pyc b/env/lib/python2.7/site-packages/pip/_vendor/certifi/__init__.pyc deleted file mode 100644 index b23d2d71..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/certifi/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/certifi/__main__.py b/env/lib/python2.7/site-packages/pip/_vendor/certifi/__main__.py deleted file mode 100644 index ae2aff5c..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/certifi/__main__.py +++ /dev/null @@ -1,2 +0,0 @@ -from pip._vendor.certifi import where -print(where()) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/certifi/__main__.pyc b/env/lib/python2.7/site-packages/pip/_vendor/certifi/__main__.pyc deleted file mode 100644 index 53a723dd..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/certifi/__main__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/certifi/core.py b/env/lib/python2.7/site-packages/pip/_vendor/certifi/core.py deleted file mode 100644 index 7271acf4..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/certifi/core.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -certifi.py -~~~~~~~~~~ - -This module returns the installation location of cacert.pem. -""" -import os - - -def where(): - f = os.path.dirname(__file__) - - return os.path.join(f, 'cacert.pem') diff --git a/env/lib/python2.7/site-packages/pip/_vendor/certifi/core.pyc b/env/lib/python2.7/site-packages/pip/_vendor/certifi/core.pyc deleted file mode 100644 index 88b89dfd..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/certifi/core.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/__init__.py b/env/lib/python2.7/site-packages/pip/_vendor/chardet/__init__.py deleted file mode 100644 index 0f9f820e..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/chardet/__init__.py +++ /dev/null @@ -1,39 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - - -from .compat import PY2, PY3 -from .universaldetector import UniversalDetector -from .version import __version__, VERSION - - -def detect(byte_str): - """ - Detect the encoding of the given byte string. - - :param byte_str: The byte sequence to examine. - :type byte_str: ``bytes`` or ``bytearray`` - """ - if not isinstance(byte_str, bytearray): - if not isinstance(byte_str, bytes): - raise TypeError('Expected object of type bytes or bytearray, got: ' - '{0}'.format(type(byte_str))) - else: - byte_str = bytearray(byte_str) - detector = UniversalDetector() - detector.feed(byte_str) - return detector.close() diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/__init__.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/__init__.pyc deleted file mode 100644 index 8f8a9abc..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/big5freq.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/big5freq.pyc deleted file mode 100644 index 40b0519e..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/big5freq.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/big5prober.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/big5prober.pyc deleted file mode 100644 index 8c4b3a8a..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/big5prober.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyc deleted file mode 100644 index fbd4521c..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/charsetgroupprober.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/charsetgroupprober.pyc deleted file mode 100644 index 49faa9e3..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/charsetgroupprober.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/charsetprober.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/charsetprober.pyc deleted file mode 100644 index 8c903863..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/charsetprober.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/cli/__init__.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/cli/__init__.pyc deleted file mode 100644 index 51f2607e..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/cli/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/cli/chardetect.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/cli/chardetect.pyc deleted file mode 100644 index a8542063..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/cli/chardetect.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/codingstatemachine.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/codingstatemachine.pyc deleted file mode 100644 index c6955310..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/codingstatemachine.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/compat.py b/env/lib/python2.7/site-packages/pip/_vendor/chardet/compat.py deleted file mode 100644 index ddd74687..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/chardet/compat.py +++ /dev/null @@ -1,34 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# Contributor(s): -# Dan Blanchard -# Ian Cordasco -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -import sys - - -if sys.version_info < (3, 0): - PY2 = True - PY3 = False - base_str = (str, unicode) - text_type = unicode -else: - PY2 = False - PY3 = True - base_str = (bytes, str) - text_type = str diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/compat.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/compat.pyc deleted file mode 100644 index 06443729..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/compat.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/cp949prober.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/cp949prober.pyc deleted file mode 100644 index 8a886954..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/cp949prober.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/enums.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/enums.pyc deleted file mode 100644 index df8f7836..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/enums.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/escprober.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/escprober.pyc deleted file mode 100644 index 1d1a9d26..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/escprober.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/escsm.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/escsm.pyc deleted file mode 100644 index b6e4f942..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/escsm.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/eucjpprober.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/eucjpprober.pyc deleted file mode 100644 index 8e0d7ea6..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/eucjpprober.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/euckrfreq.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/euckrfreq.pyc deleted file mode 100644 index 9c73a139..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/euckrfreq.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/euckrprober.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/euckrprober.pyc deleted file mode 100644 index 2b97c03c..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/euckrprober.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/euctwfreq.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/euctwfreq.pyc deleted file mode 100644 index f968c81e..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/euctwfreq.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/euctwprober.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/euctwprober.pyc deleted file mode 100644 index cb85cc47..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/euctwprober.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/gb2312freq.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/gb2312freq.pyc deleted file mode 100644 index a1e5b32c..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/gb2312freq.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/gb2312prober.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/gb2312prober.pyc deleted file mode 100644 index 09ab814c..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/gb2312prober.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/hebrewprober.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/hebrewprober.pyc deleted file mode 100644 index dc561cce..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/hebrewprober.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/jisfreq.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/jisfreq.pyc deleted file mode 100644 index eeaeca9a..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/jisfreq.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/jpcntx.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/jpcntx.pyc deleted file mode 100644 index 6f132ec8..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/jpcntx.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/langbulgarianmodel.py b/env/lib/python2.7/site-packages/pip/_vendor/chardet/langbulgarianmodel.py deleted file mode 100644 index 2aa4fb2e..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/chardet/langbulgarianmodel.py +++ /dev/null @@ -1,228 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# 255: Control characters that usually does not exist in any text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 - -# Character Mapping Table: -# this table is modified base on win1251BulgarianCharToOrderMap, so -# only number <64 is sure valid - -Latin5_BulgarianCharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40 -110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50 -253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60 -116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70 -194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209, # 80 -210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225, # 90 - 81,226,227,228,229,230,105,231,232,233,234,235,236, 45,237,238, # a0 - 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # b0 - 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,239, 67,240, 60, 56, # c0 - 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # d0 - 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,241, 42, 16, # e0 - 62,242,243,244, 58,245, 98,246,247,248,249,250,251, 91,252,253, # f0 -) - -win1251BulgarianCharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40 -110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50 -253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60 -116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70 -206,207,208,209,210,211,212,213,120,214,215,216,217,218,219,220, # 80 -221, 78, 64, 83,121, 98,117,105,222,223,224,225,226,227,228,229, # 90 - 88,230,231,232,233,122, 89,106,234,235,236,237,238, 45,239,240, # a0 - 73, 80,118,114,241,242,243,244,245, 62, 58,246,247,248,249,250, # b0 - 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # c0 - 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,251, 67,252, 60, 56, # d0 - 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # e0 - 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,253, 42, 16, # f0 -) - -# Model Table: -# total sequences: 100% -# first 512 sequences: 96.9392% -# first 1024 sequences:3.0618% -# rest sequences: 0.2992% -# negative sequences: 0.0020% -BulgarianLangModel = ( -0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,3,3,3,3,3, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,2,2,1,2,2, -3,1,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,0,1, -0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,3,3,0,3,1,0, -0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,2,3,2,2,1,3,3,3,3,2,2,2,1,1,2,0,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,2,3,2,2,3,3,1,1,2,3,3,2,3,3,3,3,2,1,2,0,2,0,3,0,0, -0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,1,3,3,3,3,3,2,3,2,3,3,3,3,3,2,3,3,1,3,0,3,0,2,0,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,3,1,3,3,2,3,3,3,1,3,3,2,3,2,2,2,0,0,2,0,2,0,2,0,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,3,3,1,2,2,3,2,1,1,2,0,2,0,0,0,0, -1,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,2,3,3,1,2,3,2,2,2,3,3,3,3,3,2,2,3,1,2,0,2,1,2,0,0, -0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,1,3,3,3,3,3,2,3,3,3,2,3,3,2,3,2,2,2,3,1,2,0,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,3,3,3,3,1,1,1,2,2,1,3,1,3,2,2,3,0,0,1,0,1,0,1,0,0, -0,0,0,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,2,2,3,2,2,3,1,2,1,1,1,2,3,1,3,1,2,2,0,1,1,1,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,1,3,2,2,3,3,1,2,3,1,1,3,3,3,3,1,2,2,1,1,1,0,2,0,2,0,1, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,2,2,3,3,3,2,2,1,1,2,0,2,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,0,1,2,1,3,3,2,3,3,3,3,3,2,3,2,1,0,3,1,2,1,2,1,2,3,2,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,1,1,2,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,1,3,3,2,3,3,2,2,2,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,3,3,3,0,3,3,3,3,3,2,1,1,2,1,3,3,0,3,1,1,1,1,3,2,0,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,1,1,3,1,3,3,2,3,2,2,2,3,0,2,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,2,3,3,2,2,3,2,1,1,1,1,1,3,1,3,1,1,0,0,0,1,0,0,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,2,3,2,0,3,2,0,3,0,2,0,0,2,1,3,1,0,0,1,0,0,0,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,2,1,1,1,1,2,1,1,2,1,1,1,2,2,1,2,1,1,1,0,1,1,0,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,2,1,3,1,1,2,1,3,2,1,1,0,1,2,3,2,1,1,1,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,3,3,3,2,2,1,0,1,0,0,1,0,0,0,2,1,0,3,0,0,1,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,2,3,2,3,3,1,3,2,1,1,1,2,1,1,2,1,3,0,1,0,0,0,1,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,1,2,2,3,3,2,3,2,2,2,3,1,2,2,1,1,2,1,1,2,2,0,1,1,0,1,0,2,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,2,1,3,1,0,2,2,1,3,2,1,0,0,2,0,2,0,1,0,0,0,0,0,0,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,1,2,0,2,3,1,2,3,2,0,1,3,1,2,1,1,1,0,0,1,0,0,2,2,2,3, -2,2,2,2,1,2,1,1,2,2,1,1,2,0,1,1,1,0,0,1,1,0,0,1,1,0,0,0,1,1,0,1, -3,3,3,3,3,2,1,2,2,1,2,0,2,0,1,0,1,2,1,2,1,1,0,0,0,1,0,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, -3,3,2,3,3,1,1,3,1,0,3,2,1,0,0,0,1,2,0,2,0,1,0,0,0,1,0,1,2,1,2,2, -1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,0,1,2,1,1,1,0,0,0,0,0,1,1,0,0, -3,1,0,1,0,2,3,2,2,2,3,2,2,2,2,2,1,0,2,1,2,1,1,1,0,1,2,1,2,2,2,1, -1,1,2,2,2,2,1,2,1,1,0,1,2,1,2,2,2,1,1,1,0,1,1,1,1,2,0,1,0,0,0,0, -2,3,2,3,3,0,0,2,1,0,2,1,0,0,0,0,2,3,0,2,0,0,0,0,0,1,0,0,2,0,1,2, -2,1,2,1,2,2,1,1,1,2,1,1,1,0,1,2,2,1,1,1,1,1,0,1,1,1,0,0,1,2,0,0, -3,3,2,2,3,0,2,3,1,1,2,0,0,0,1,0,0,2,0,2,0,0,0,1,0,1,0,1,2,0,2,2, -1,1,1,1,2,1,0,1,2,2,2,1,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,0,0, -2,3,2,3,3,0,0,3,0,1,1,0,1,0,0,0,2,2,1,2,0,0,0,0,0,0,0,0,2,0,1,2, -2,2,1,1,1,1,1,2,2,2,1,0,2,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0, -3,3,3,3,2,2,2,2,2,0,2,1,1,1,1,2,1,2,1,1,0,2,0,1,0,1,0,0,2,0,1,2, -1,1,1,1,1,1,1,2,2,1,1,0,2,0,1,0,2,0,0,1,1,1,0,0,2,0,0,0,1,1,0,0, -2,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,0,0,0,1,2,0,1,2, -2,2,2,1,1,2,1,1,2,2,2,1,2,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,1,1,0,0, -2,3,3,3,3,0,2,2,0,2,1,0,0,0,1,1,1,2,0,2,0,0,0,3,0,0,0,0,2,0,2,2, -1,1,1,2,1,2,1,1,2,2,2,1,2,0,1,1,1,0,1,1,1,1,0,2,1,0,0,0,1,1,0,0, -2,3,3,3,3,0,2,1,0,0,2,0,0,0,0,0,1,2,0,2,0,0,0,0,0,0,0,0,2,0,1,2, -1,1,1,2,1,1,1,1,2,2,2,0,1,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,1,0,0, -3,3,2,2,3,0,1,0,1,0,0,0,0,0,0,0,1,1,0,3,0,0,0,0,0,0,0,0,1,0,2,2, -1,1,1,1,1,2,1,1,2,2,1,2,2,1,0,1,1,1,1,1,0,1,0,0,1,0,0,0,1,1,0,0, -3,1,0,1,0,2,2,2,2,3,2,1,1,1,2,3,0,0,1,0,2,1,1,0,1,1,1,1,2,1,1,1, -1,2,2,1,2,1,2,2,1,1,0,1,2,1,2,2,1,1,1,0,0,1,1,1,2,1,0,1,0,0,0,0, -2,1,0,1,0,3,1,2,2,2,2,1,2,2,1,1,1,0,2,1,2,2,1,1,2,1,1,0,2,1,1,1, -1,2,2,2,2,2,2,2,1,2,0,1,1,0,2,1,1,1,1,1,0,0,1,1,1,1,0,1,0,0,0,0, -2,1,1,1,1,2,2,2,2,1,2,2,2,1,2,2,1,1,2,1,2,3,2,2,1,1,1,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,3,2,0,1,2,0,1,2,1,1,0,1,0,1,2,1,2,0,0,0,1,1,0,0,0,1,0,0,2, -1,1,0,0,1,1,0,1,1,1,1,0,2,0,1,1,1,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0, -2,0,0,0,0,1,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,2,1,1,1, -1,2,2,2,2,1,1,2,1,2,1,1,1,0,2,1,2,1,1,1,0,2,1,1,1,1,0,1,0,0,0,0, -3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0, -1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,3,2,0,0,0,0,1,0,0,0,0,0,0,1,1,0,2,0,0,0,0,0,0,0,0,1,0,1,2, -1,1,1,1,1,1,0,0,2,2,2,2,2,0,1,1,0,1,1,1,1,1,0,0,1,0,0,0,1,1,0,1, -2,3,1,2,1,0,1,1,0,2,2,2,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,1,2, -1,1,1,1,2,1,1,1,1,1,1,1,1,0,1,1,0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0, -2,2,2,2,2,0,0,2,0,0,2,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,0,2,2, -1,1,1,1,1,0,0,1,2,1,1,0,1,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, -1,2,2,2,2,0,0,2,0,1,1,0,0,0,1,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,1,1, -0,0,0,1,1,1,1,1,1,1,1,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -1,2,2,3,2,0,0,1,0,0,1,0,0,0,0,0,0,1,0,2,0,0,0,1,0,0,0,0,0,0,0,2, -1,1,0,0,1,0,0,0,1,1,0,0,1,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, -2,1,2,2,2,1,2,1,2,2,1,1,2,1,1,1,0,1,1,1,1,2,0,1,0,1,1,1,1,0,1,1, -1,1,2,1,1,1,1,1,1,0,0,1,2,1,1,1,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0, -1,0,0,1,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,2,1,0,0,1,0,2,0,0,0,0,0,1,1,1,0,1,0,0,0,0,0,0,0,0,2,0,0,1, -0,2,0,1,0,0,1,1,2,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, -1,2,2,2,2,0,1,1,0,2,1,0,1,1,1,0,0,1,0,2,0,1,0,0,0,0,0,0,0,0,0,1, -0,1,0,0,1,0,0,0,1,1,0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,2,2,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1, -0,1,0,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, -2,0,1,0,0,1,2,1,1,1,1,1,1,2,2,1,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,0, -1,1,2,1,1,1,1,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,1,2,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1, -0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0, -0,1,1,0,1,1,1,0,0,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0, -1,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,2,0,0,2,0,1,0,0,1,0,0,1, -1,1,0,0,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0, -1,1,1,1,1,1,1,2,0,0,0,0,0,0,2,1,0,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,1,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -) - -Latin5BulgarianModel = { - 'char_to_order_map': Latin5_BulgarianCharToOrderMap, - 'precedence_matrix': BulgarianLangModel, - 'typical_positive_ratio': 0.969392, - 'keep_english_letter': False, - 'charset_name': "ISO-8859-5", - 'language': 'Bulgairan', -} - -Win1251BulgarianModel = { - 'char_to_order_map': win1251BulgarianCharToOrderMap, - 'precedence_matrix': BulgarianLangModel, - 'typical_positive_ratio': 0.969392, - 'keep_english_letter': False, - 'charset_name': "windows-1251", - 'language': 'Bulgarian', -} diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/langbulgarianmodel.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/langbulgarianmodel.pyc deleted file mode 100644 index dba1ed6d..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/langbulgarianmodel.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/langcyrillicmodel.py b/env/lib/python2.7/site-packages/pip/_vendor/chardet/langcyrillicmodel.py deleted file mode 100644 index e5f9a1fd..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/chardet/langcyrillicmodel.py +++ /dev/null @@ -1,333 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# KOI8-R language model -# Character Mapping Table: -KOI8R_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 -155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 -253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 - 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 -191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, # 80 -207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, # 90 -223,224,225, 68,226,227,228,229,230,231,232,233,234,235,236,237, # a0 -238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253, # b0 - 27, 3, 21, 28, 13, 2, 39, 19, 26, 4, 23, 11, 8, 12, 5, 1, # c0 - 15, 16, 9, 7, 6, 14, 24, 10, 17, 18, 20, 25, 30, 29, 22, 54, # d0 - 59, 37, 44, 58, 41, 48, 53, 46, 55, 42, 60, 36, 49, 38, 31, 34, # e0 - 35, 43, 45, 32, 40, 52, 56, 33, 61, 62, 51, 57, 47, 63, 50, 70, # f0 -) - -win1251_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 -155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 -253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 - 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 -191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, -207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, -223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, -239,240,241,242,243,244,245,246, 68,247,248,249,250,251,252,253, - 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, - 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, - 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, - 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, -) - -latin5_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 -155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 -253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 - 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 -191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, -207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, -223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, - 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, - 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, - 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, - 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, -239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255, -) - -macCyrillic_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 -155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 -253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 - 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 - 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, - 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, -191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, -207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, -223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, -239,240,241,242,243,244,245,246,247,248,249,250,251,252, 68, 16, - 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, - 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27,255, -) - -IBM855_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 -155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 -253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 - 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 -191,192,193,194, 68,195,196,197,198,199,200,201,202,203,204,205, -206,207,208,209,210,211,212,213,214,215,216,217, 27, 59, 54, 70, - 3, 37, 21, 44, 28, 58, 13, 41, 2, 48, 39, 53, 19, 46,218,219, -220,221,222,223,224, 26, 55, 4, 42,225,226,227,228, 23, 60,229, -230,231,232,233,234,235, 11, 36,236,237,238,239,240,241,242,243, - 8, 49, 12, 38, 5, 31, 1, 34, 15,244,245,246,247, 35, 16,248, - 43, 9, 45, 7, 32, 6, 40, 14, 52, 24, 56, 10, 33, 17, 61,249, -250, 18, 62, 20, 51, 25, 57, 30, 47, 29, 63, 22, 50,251,252,255, -) - -IBM866_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 -155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 -253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 - 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 - 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, - 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, - 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, -191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, -207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, -223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, - 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, -239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255, -) - -# Model Table: -# total sequences: 100% -# first 512 sequences: 97.6601% -# first 1024 sequences: 2.3389% -# rest sequences: 0.1237% -# negative sequences: 0.0009% -RussianLangModel = ( -0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,1,3,3,3,3,1,3,3,3,2,3,2,3,3, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,2,2,2,2,2,0,0,2, -3,3,3,2,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,2,3,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,2,2,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,2,3,3,1,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,2,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1, -0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1, -0,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,2,2,2,3,1,3,3,1,3,3,3,3,2,2,3,0,2,2,2,3,3,2,1,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,3,3,3,3,3,2,2,3,2,3,3,3,2,1,2,2,0,1,2,2,2,2,2,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,3,0,2,2,3,3,2,1,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,3,3,1,2,3,2,2,3,2,3,3,3,3,2,2,3,0,3,2,2,3,1,1,1,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,3,3,3,3,2,2,2,0,3,3,3,2,2,2,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,2,3,2,2,0,1,3,2,1,2,2,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,2,1,1,3,0,1,1,1,1,2,1,1,0,2,2,2,1,2,0,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,3,3,2,2,2,2,1,3,2,3,2,3,2,1,2,2,0,1,1,2,1,2,1,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,2,3,3,3,2,2,2,2,0,2,2,2,2,3,1,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -3,2,3,2,2,3,3,3,3,3,3,3,3,3,1,3,2,0,0,3,3,3,3,2,3,3,3,3,2,3,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,3,3,3,3,2,2,3,3,0,2,1,0,3,2,3,2,3,0,0,1,2,0,0,1,0,1,2,1,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,3,0,2,3,3,3,3,2,3,3,3,3,1,2,2,0,0,2,3,2,2,2,3,2,3,2,2,3,0,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,2,3,0,2,3,2,3,0,1,2,3,3,2,0,2,3,0,0,2,3,2,2,0,1,3,1,3,2,2,1,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,3,0,2,3,3,3,3,3,3,3,3,2,1,3,2,0,0,2,2,3,3,3,2,3,3,0,2,2,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,2,3,3,2,2,2,3,3,0,0,1,1,1,1,1,2,0,0,1,1,1,1,0,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,2,3,3,3,3,3,3,3,0,3,2,3,3,2,3,2,0,2,1,0,1,1,0,1,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,3,3,3,2,2,2,2,3,1,3,2,3,1,1,2,1,0,2,2,2,2,1,3,1,0, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -2,2,3,3,3,3,3,1,2,2,1,3,1,0,3,0,0,3,0,0,0,1,1,0,1,2,1,0,0,0,0,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,2,2,1,1,3,3,3,2,2,1,2,2,3,1,1,2,0,0,2,2,1,3,0,0,2,1,1,2,1,1,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,2,3,3,3,3,1,2,2,2,1,2,1,3,3,1,1,2,1,2,1,2,2,0,2,0,0,1,1,0,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,3,3,3,3,2,1,3,2,2,3,2,0,3,2,0,3,0,1,0,1,1,0,0,1,1,1,1,0,1,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,2,3,3,3,2,2,2,3,3,1,2,1,2,1,0,1,0,1,1,0,1,0,0,2,1,1,1,0,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, -3,1,1,2,1,2,3,3,2,2,1,2,2,3,0,2,1,0,0,2,2,3,2,1,2,2,2,2,2,3,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,1,1,0,1,1,2,2,1,1,3,0,0,1,3,1,1,1,0,0,0,1,0,1,1,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,1,3,3,3,2,0,0,0,2,1,0,1,0,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,1,0,0,2,3,2,2,2,1,2,2,2,1,2,1,0,0,1,1,1,0,2,0,1,1,1,0,0,1,1, -1,0,0,0,0,0,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0, -2,3,3,3,3,0,0,0,0,1,0,0,0,0,3,0,1,2,1,0,0,0,0,0,0,0,1,1,0,0,1,1, -1,0,1,0,1,2,0,0,1,1,2,1,0,1,1,1,1,0,1,1,1,1,0,1,0,0,1,0,0,1,1,0, -2,2,3,2,2,2,3,1,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,0,1,0,1,1,1,0,2,1, -1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,0,1,1,0, -3,3,3,2,2,2,2,3,2,2,1,1,2,2,2,2,1,1,3,1,2,1,2,0,0,1,1,0,1,0,2,1, -1,1,1,1,1,2,1,0,1,1,1,1,0,1,0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,1,1,0, -2,0,0,1,0,3,2,2,2,2,1,2,1,2,1,2,0,0,0,2,1,2,2,1,1,2,2,0,1,1,0,2, -1,1,1,1,1,0,1,1,1,2,1,1,1,2,1,0,1,2,1,1,1,1,0,1,1,1,0,0,1,0,0,1, -1,3,2,2,2,1,1,1,2,3,0,0,0,0,2,0,2,2,1,0,0,0,0,0,0,1,0,0,0,0,1,1, -1,0,1,1,0,1,0,1,1,0,1,1,0,2,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0, -2,3,2,3,2,1,2,2,2,2,1,0,0,0,2,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,2,1, -1,1,2,1,0,2,0,0,1,0,1,0,0,1,0,0,1,1,0,1,1,0,0,0,0,0,1,0,0,0,0,0, -3,0,0,1,0,2,2,2,3,2,2,2,2,2,2,2,0,0,0,2,1,2,1,1,1,2,2,0,0,0,1,2, -1,1,1,1,1,0,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,1,0,1,1,1,1,1,1,0,0,1, -2,3,2,3,3,2,0,1,1,1,0,0,1,0,2,0,1,1,3,1,0,0,0,0,0,0,0,1,0,0,2,1, -1,1,1,1,1,1,1,0,1,0,1,1,1,1,0,1,1,1,0,0,1,1,0,1,0,0,0,0,0,0,1,0, -2,3,3,3,3,1,2,2,2,2,0,1,1,0,2,1,1,1,2,1,0,1,1,0,0,1,0,1,0,0,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,3,3,2,0,0,1,1,2,2,1,0,0,2,0,1,1,3,0,0,1,0,0,0,0,0,1,0,1,2,1, -1,1,2,0,1,1,1,0,1,0,1,1,0,1,0,1,1,1,1,0,1,0,0,0,0,0,0,1,0,1,1,0, -1,3,2,3,2,1,0,0,2,2,2,0,1,0,2,0,1,1,1,0,1,0,0,0,3,0,1,1,0,0,2,1, -1,1,1,0,1,1,0,0,0,0,1,1,0,1,0,0,2,1,1,0,1,0,0,0,1,0,1,0,0,1,1,0, -3,1,2,1,1,2,2,2,2,2,2,1,2,2,1,1,0,0,0,2,2,2,0,0,0,1,2,1,0,1,0,1, -2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,2,1,1,1,0,1,0,1,1,0,1,1,1,0,0,1, -3,0,0,0,0,2,0,1,1,1,1,1,1,1,0,1,0,0,0,1,1,1,0,1,0,1,1,0,0,1,0,1, -1,1,0,0,1,0,0,0,1,0,1,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1, -1,3,3,2,2,0,0,0,2,2,0,0,0,1,2,0,1,1,2,0,0,0,0,0,0,0,0,1,0,0,2,1, -0,1,1,0,0,1,1,0,0,0,1,1,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0, -2,3,2,3,2,0,0,0,0,1,1,0,0,0,2,0,2,0,2,0,0,0,0,0,1,0,0,1,0,0,1,1, -1,1,2,0,1,2,1,0,1,1,2,1,1,1,1,1,2,1,1,0,1,0,0,1,1,1,1,1,0,1,1,0, -1,3,2,2,2,1,0,0,2,2,1,0,1,2,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1, -0,0,1,1,0,1,1,0,0,1,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,1,0,2,3,1,2,2,2,2,2,2,1,1,0,0,0,1,0,1,0,2,1,1,1,0,0,0,0,1, -1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0, -2,0,2,0,0,1,0,3,2,1,2,1,2,2,0,1,0,0,0,2,1,0,0,2,1,1,1,1,0,2,0,2, -2,1,1,1,1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,0,0,0,1,1,1,1,0,1,0,0,1, -1,2,2,2,2,1,0,0,1,0,0,0,0,0,2,0,1,1,1,1,0,0,0,0,1,0,1,2,0,0,2,0, -1,0,1,1,1,2,1,0,1,0,1,1,0,0,1,0,1,1,1,0,1,0,0,0,1,0,0,1,0,1,1,0, -2,1,2,2,2,0,3,0,1,1,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -0,0,0,1,1,1,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0, -1,2,2,3,2,2,0,0,1,1,2,0,1,2,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1, -0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0, -2,2,1,1,2,1,2,2,2,2,2,1,2,2,0,1,0,0,0,1,2,2,2,1,2,1,1,1,1,1,2,1, -1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,0,1, -1,2,2,2,2,0,1,0,2,2,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0, -0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,2,2,2,2,0,0,0,2,2,2,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1, -0,1,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,2,2,2,2,0,0,0,0,1,0,0,1,1,2,0,0,0,0,1,0,1,0,0,1,0,0,2,0,0,0,1, -0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, -1,2,2,2,1,1,2,0,2,1,1,1,1,0,2,2,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,1, -0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -1,0,2,1,2,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0, -0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0, -1,0,0,0,0,2,0,1,2,1,0,1,1,1,0,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,1, -0,0,0,0,0,1,0,0,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1, -2,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -1,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -1,1,1,0,1,0,1,0,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -1,1,0,1,1,0,1,0,1,0,0,0,0,1,1,0,1,1,0,0,0,0,0,1,0,1,1,0,1,0,0,0, -0,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0, -) - -Koi8rModel = { - 'char_to_order_map': KOI8R_char_to_order_map, - 'precedence_matrix': RussianLangModel, - 'typical_positive_ratio': 0.976601, - 'keep_english_letter': False, - 'charset_name': "KOI8-R", - 'language': 'Russian', -} - -Win1251CyrillicModel = { - 'char_to_order_map': win1251_char_to_order_map, - 'precedence_matrix': RussianLangModel, - 'typical_positive_ratio': 0.976601, - 'keep_english_letter': False, - 'charset_name': "windows-1251", - 'language': 'Russian', -} - -Latin5CyrillicModel = { - 'char_to_order_map': latin5_char_to_order_map, - 'precedence_matrix': RussianLangModel, - 'typical_positive_ratio': 0.976601, - 'keep_english_letter': False, - 'charset_name': "ISO-8859-5", - 'language': 'Russian', -} - -MacCyrillicModel = { - 'char_to_order_map': macCyrillic_char_to_order_map, - 'precedence_matrix': RussianLangModel, - 'typical_positive_ratio': 0.976601, - 'keep_english_letter': False, - 'charset_name': "MacCyrillic", - 'language': 'Russian', -} - -Ibm866Model = { - 'char_to_order_map': IBM866_char_to_order_map, - 'precedence_matrix': RussianLangModel, - 'typical_positive_ratio': 0.976601, - 'keep_english_letter': False, - 'charset_name': "IBM866", - 'language': 'Russian', -} - -Ibm855Model = { - 'char_to_order_map': IBM855_char_to_order_map, - 'precedence_matrix': RussianLangModel, - 'typical_positive_ratio': 0.976601, - 'keep_english_letter': False, - 'charset_name': "IBM855", - 'language': 'Russian', -} diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/langcyrillicmodel.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/langcyrillicmodel.pyc deleted file mode 100644 index ab7e7db0..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/langcyrillicmodel.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/langgreekmodel.py b/env/lib/python2.7/site-packages/pip/_vendor/chardet/langgreekmodel.py deleted file mode 100644 index 53322216..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/chardet/langgreekmodel.py +++ /dev/null @@ -1,225 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# 255: Control characters that usually does not exist in any text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 - -# Character Mapping Table: -Latin7_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40 - 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50 -253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60 - 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90 -253,233, 90,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0 -253,253,253,253,247,248, 61, 36, 46, 71, 73,253, 54,253,108,123, # b0 -110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0 - 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0 -124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0 - 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0 -) - -win1253_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40 - 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50 -253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60 - 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90 -253,233, 61,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0 -253,253,253,253,247,253,253, 36, 46, 71, 73,253, 54,253,108,123, # b0 -110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0 - 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0 -124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0 - 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0 -) - -# Model Table: -# total sequences: 100% -# first 512 sequences: 98.2851% -# first 1024 sequences:1.7001% -# rest sequences: 0.0359% -# negative sequences: 0.0148% -GreekLangModel = ( -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,2,2,3,3,3,3,3,3,3,3,1,3,3,3,0,2,2,3,3,0,3,0,3,2,0,3,3,3,0, -3,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,0,3,3,0,3,2,3,3,0,3,2,3,3,3,0,0,3,0,3,0,3,3,2,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, -0,2,3,2,2,3,3,3,3,3,3,3,3,0,3,3,3,3,0,2,3,3,0,3,3,3,3,2,3,3,3,0, -2,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,2,1,3,3,3,3,2,3,3,2,3,3,2,0, -0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,2,3,3,0, -2,0,1,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,2,3,0,0,0,0,3,3,0,3,1,3,3,3,0,3,3,0,3,3,3,3,0,0,0,0, -2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,0,3,0,3,3,3,3,3,0,3,2,2,2,3,0,2,3,3,3,3,3,2,3,3,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,3,2,2,2,3,3,3,3,0,3,1,3,3,3,3,2,3,3,3,3,3,3,3,2,2,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,2,0,3,0,0,0,3,3,2,3,3,3,3,3,0,0,3,2,3,0,2,3,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,0,3,3,3,3,0,0,3,3,0,2,3,0,3,0,3,3,3,0,0,3,0,3,0,2,2,3,3,0,0, -0,0,1,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,2,0,3,2,3,3,3,3,0,3,3,3,3,3,0,3,3,2,3,2,3,3,2,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,2,3,2,3,3,3,3,3,3,0,2,3,2,3,2,2,2,3,2,3,3,2,3,0,2,2,2,3,0, -2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,0,0,0,3,3,3,2,3,3,0,0,3,0,3,0,0,0,3,2,0,3,0,3,0,0,2,0,2,0, -0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,0,0,0,3,3,0,3,3,3,0,0,1,2,3,0, -3,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,2,0,0,3,2,2,3,3,0,3,3,3,3,3,2,1,3,0,3,2,3,3,2,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,3,0,2,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,3,0,3,2,3,0,0,3,3,3,0, -3,0,0,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,0,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,2,0,3,2,3,0,0,3,2,3,0, -2,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,1,2,2,3,3,3,3,3,3,0,2,3,0,3,0,0,0,3,3,0,3,0,2,0,0,2,3,1,0, -2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,0,3,3,3,3,0,3,0,3,3,2,3,0,3,3,3,3,3,3,0,3,3,3,0,2,3,0,0,3,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,0,3,3,3,0,0,3,0,0,0,3,3,0,3,0,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,0,0,0,3,3,3,3,3,3,0,0,3,0,2,0,0,0,3,3,0,3,0,3,0,0,2,0,2,0, -0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,3,0,3,0,2,0,3,2,0,3,2,3,2,3,0,0,3,2,3,2,3,3,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,0,0,2,3,3,3,3,3,0,0,0,3,0,2,1,0,0,3,2,2,2,0,3,0,0,2,2,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,0,3,3,3,2,0,3,0,3,0,3,3,0,2,1,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,3,3,3,0,3,3,3,3,3,3,0,2,3,0,3,0,0,0,2,1,0,2,2,3,0,0,2,2,2,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,0,0,2,3,3,3,2,3,0,0,1,3,0,2,0,0,0,0,3,0,1,0,2,0,0,1,1,1,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,1,0,3,0,0,0,3,2,0,3,2,3,3,3,0,0,3,0,3,2,2,2,1,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,0,3,3,3,0,0,3,0,0,0,0,2,0,2,3,3,2,2,2,2,3,0,2,0,2,2,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,2,0,0,0,0,0,0,2,3,0,2,0,2,3,2,0,0,3,0,3,0,3,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,3,2,3,3,2,2,3,0,2,0,3,0,0,0,2,0,0,0,0,1,2,0,2,0,2,0, -0,2,0,2,0,2,2,0,0,1,0,2,2,2,0,2,2,2,0,2,2,2,0,0,2,0,0,1,0,0,0,0, -0,2,0,3,3,2,0,0,0,0,0,0,1,3,0,2,0,2,2,2,0,0,2,0,3,0,0,2,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,0,2,3,2,0,2,2,0,2,0,2,2,0,2,0,2,2,2,0,0,0,0,0,0,2,3,0,0,0,2, -0,1,2,0,0,0,0,2,2,0,0,0,2,1,0,2,2,0,0,0,0,0,0,1,0,2,0,0,0,0,0,0, -0,0,2,1,0,2,3,2,2,3,2,3,2,0,0,3,3,3,0,0,3,2,0,0,0,1,1,0,2,0,2,2, -0,2,0,2,0,2,2,0,0,2,0,2,2,2,0,2,2,2,2,0,0,2,0,0,0,2,0,1,0,0,0,0, -0,3,0,3,3,2,2,0,3,0,0,0,2,2,0,2,2,2,1,2,0,0,1,2,2,0,0,3,0,0,0,2, -0,1,2,0,0,0,1,2,0,0,0,0,0,0,0,2,2,0,1,0,0,2,0,0,0,2,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,3,3,2,2,0,0,0,2,0,2,3,3,0,2,0,0,0,0,0,0,2,2,2,0,2,2,0,2,0,2, -0,2,2,0,0,2,2,2,2,1,0,0,2,2,0,2,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0, -0,2,0,3,2,3,0,0,0,3,0,0,2,2,0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,0,2, -0,0,2,2,0,0,2,2,2,0,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,2,0,0,3,2,0,2,2,2,2,2,0,0,0,2,0,0,0,0,2,0,1,0,0,2,0,1,0,0,0, -0,2,2,2,0,2,2,0,1,2,0,2,2,2,0,2,2,2,2,1,2,2,0,0,2,0,0,0,0,0,0,0, -0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -0,2,0,2,0,2,2,0,0,0,0,1,2,1,0,0,2,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,3,2,3,0,0,2,0,0,0,2,2,0,2,0,0,0,1,0,0,2,0,2,0,2,2,0,0,0,0, -0,0,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0, -0,2,2,3,2,2,0,0,0,0,0,0,1,3,0,2,0,2,2,0,0,0,1,0,2,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,0,2,0,3,2,0,2,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -0,0,2,0,0,0,0,1,1,0,0,2,1,2,0,2,2,0,1,0,0,1,0,0,0,2,0,0,0,0,0,0, -0,3,0,2,2,2,0,0,2,0,0,0,2,0,0,0,2,3,0,2,0,0,0,0,0,0,2,2,0,0,0,2, -0,1,2,0,0,0,1,2,2,1,0,0,0,2,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,1,2,0,2,2,0,2,0,0,2,0,0,0,0,1,2,1,0,2,1,0,0,0,0,0,0,0,0,0,0, -0,0,2,0,0,0,3,1,2,2,0,2,0,0,0,0,2,0,0,0,2,0,0,3,0,0,0,0,2,2,2,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,1,0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,2, -0,2,2,0,0,2,2,2,2,2,0,1,2,0,0,0,2,2,0,1,0,2,0,0,2,2,0,0,0,0,0,0, -0,0,0,0,1,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,0,0,0,0,2,0,2,0,0,0,0,2, -0,1,2,0,0,0,0,2,2,1,0,1,0,1,0,2,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0, -0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,2,0,0,2,2,0,0,0,0,1,0,0,0,0,0,0,2, -0,2,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0, -0,2,2,2,2,0,0,0,3,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,1, -0,0,2,0,0,0,0,1,2,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0, -0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,2,2,2,0,0,0,2,0,0,0,0,0,0,0,0,2, -0,0,1,0,0,0,0,2,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0, -0,3,0,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,2, -0,0,2,0,0,0,0,2,2,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,0,2,2,1,0,0,0,0,0,0,2,0,0,2,0,2,2,2,0,0,0,0,0,0,2,0,0,0,0,2, -0,0,2,0,0,2,0,2,2,0,0,0,0,2,0,2,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0, -0,0,3,0,0,0,2,2,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0,0,0, -0,2,2,2,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1, -0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -0,2,0,0,0,2,0,0,0,0,0,1,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,2,0,0,0, -0,2,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,2,0,2,0,0,0, -0,0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,1,2,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -) - -Latin7GreekModel = { - 'char_to_order_map': Latin7_char_to_order_map, - 'precedence_matrix': GreekLangModel, - 'typical_positive_ratio': 0.982851, - 'keep_english_letter': False, - 'charset_name': "ISO-8859-7", - 'language': 'Greek', -} - -Win1253GreekModel = { - 'char_to_order_map': win1253_char_to_order_map, - 'precedence_matrix': GreekLangModel, - 'typical_positive_ratio': 0.982851, - 'keep_english_letter': False, - 'charset_name': "windows-1253", - 'language': 'Greek', -} diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/langgreekmodel.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/langgreekmodel.pyc deleted file mode 100644 index 564296ee..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/langgreekmodel.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/langhebrewmodel.py b/env/lib/python2.7/site-packages/pip/_vendor/chardet/langhebrewmodel.py deleted file mode 100644 index 58f4c875..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/chardet/langhebrewmodel.py +++ /dev/null @@ -1,200 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Simon Montagu -# Portions created by the Initial Developer are Copyright (C) 2005 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# Shoshannah Forbes - original C code (?) -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# 255: Control characters that usually does not exist in any text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 - -# Windows-1255 language model -# Character Mapping Table: -WIN1255_CHAR_TO_ORDER_MAP = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 69, 91, 79, 80, 92, 89, 97, 90, 68,111,112, 82, 73, 95, 85, # 40 - 78,121, 86, 71, 67,102,107, 84,114,103,115,253,253,253,253,253, # 50 -253, 50, 74, 60, 61, 42, 76, 70, 64, 53,105, 93, 56, 65, 54, 49, # 60 - 66,110, 51, 43, 44, 63, 81, 77, 98, 75,108,253,253,253,253,253, # 70 -124,202,203,204,205, 40, 58,206,207,208,209,210,211,212,213,214, -215, 83, 52, 47, 46, 72, 32, 94,216,113,217,109,218,219,220,221, - 34,116,222,118,100,223,224,117,119,104,125,225,226, 87, 99,227, -106,122,123,228, 55,229,230,101,231,232,120,233, 48, 39, 57,234, - 30, 59, 41, 88, 33, 37, 36, 31, 29, 35,235, 62, 28,236,126,237, -238, 38, 45,239,240,241,242,243,127,244,245,246,247,248,249,250, - 9, 8, 20, 16, 3, 2, 24, 14, 22, 1, 25, 15, 4, 11, 6, 23, - 12, 19, 13, 26, 18, 27, 21, 17, 7, 10, 5,251,252,128, 96,253, -) - -# Model Table: -# total sequences: 100% -# first 512 sequences: 98.4004% -# first 1024 sequences: 1.5981% -# rest sequences: 0.087% -# negative sequences: 0.0015% -HEBREW_LANG_MODEL = ( -0,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,3,2,1,2,0,1,0,0, -3,0,3,1,0,0,1,3,2,0,1,1,2,0,2,2,2,1,1,1,1,2,1,1,1,2,0,0,2,2,0,1, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2, -1,2,1,2,1,2,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2, -1,2,1,3,1,1,0,0,2,0,0,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,1,2,2,1,3, -1,2,1,1,2,2,0,0,2,2,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,1,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,2,2,2,3,2, -1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,3,2,2,3,2,2,2,1,2,2,2,2, -1,2,1,1,2,2,0,1,2,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,0,2,2,2,2,2, -0,2,0,2,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,0,2,2,2, -0,2,1,2,2,2,0,0,2,1,0,0,0,0,1,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,2,1,2,3,2,2,2, -1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,0, -3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,2,0,2, -0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,2,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,2,2,3,2,1,2,1,1,1, -0,1,1,1,1,1,3,0,1,0,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,0,0, -0,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2, -0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,2,3,3,3,2,1,2,3,3,2,3,3,3,3,2,3,2,1,2,0,2,1,2, -0,2,0,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0, -3,3,3,3,3,3,3,3,3,2,3,3,3,1,2,2,3,3,2,3,2,3,2,2,3,1,2,2,0,2,2,2, -0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,2,2,3,3,3,3,1,3,2,2,2, -0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,2,3,2,2,2,1,2,2,0,2,2,2,2, -0,2,0,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,1,3,2,3,3,2,3,3,2,2,1,2,2,2,2,2,2, -0,2,1,2,1,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,2,3,2,3,3,2,3,3,3,3,2,3,2,3,3,3,3,3,2,2,2,2,2,2,2,1, -0,2,0,1,2,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,2,1,2,3,3,3,3,3,3,3,2,3,2,3,2,1,2,3,0,2,1,2,2, -0,2,1,1,2,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,2,0, -3,3,3,3,3,3,3,3,3,2,3,3,3,3,2,1,3,1,2,2,2,1,2,3,3,1,2,1,2,2,2,2, -0,1,1,1,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,0,2,3,3,3,1,3,3,3,1,2,2,2,2,1,1,2,2,2,2,2,2, -0,2,0,1,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,2,3,3,3,2,2,3,3,3,2,1,2,3,2,3,2,2,2,2,1,2,1,1,1,2,2, -0,2,1,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,1,0,0,0,0,0, -1,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,2,3,3,2,3,1,2,2,2,2,3,2,3,1,1,2,2,1,2,2,1,1,0,2,2,2,2, -0,1,0,1,2,2,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, -3,0,0,1,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,2,0, -0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,1,0,1,0,1,1,0,1,1,0,0,0,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -3,2,2,1,2,2,2,2,2,2,2,1,2,2,1,2,2,1,1,1,1,1,1,1,1,2,1,1,0,3,3,3, -0,3,0,2,2,2,2,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -2,2,2,3,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,2,1,2,2,2,1,1,1,2,0,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,2,2,0,2,2,0,0,0,0,0,0, -0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,1,0,2,1,0, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, -0,3,1,1,2,2,2,2,2,1,2,2,2,1,1,2,2,2,2,2,2,2,1,2,2,1,0,1,1,1,1,0, -0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,2,1,1,1,1,2,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0, -0,0,2,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,0,0, -2,1,1,2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,1,2,1,2,1,1,1,1,0,0,0,0, -0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,2,1,2,2,2,2,2,2,2,2,2,2,1,2,1,2,1,1,2,1,1,1,2,1,2,1,2,0,1,0,1, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,1,2,2,2,1,2,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,2,1,2,1,1,0,1,0,1, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,1,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2, -0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,1,1,1,1,1,1,1,0,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,2,0,1,1,1,0,1,0,0,0,1,1,0,1,1,0,0,0,0,0,1,1,0,0, -0,1,1,1,2,1,2,2,2,0,2,0,2,0,1,1,2,1,1,1,1,2,1,0,1,1,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,1,0,0,0,0,0,1,0,1,2,2,0,1,0,0,1,1,2,2,1,2,0,2,0,0,0,1,2,0,1, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,2,0,2,1,2,0,2,0,0,1,1,1,1,1,1,0,1,0,0,0,1,0,0,1, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,1,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,1,2,2,0,0,1,0,0,0,1,0,0,1, -1,1,2,1,0,1,1,1,0,1,0,1,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,2,1, -0,2,0,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,1,0,0,1,0,1,1,1,1,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,1,0,0,0,1,1,0,1, -2,0,1,0,1,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,1,1,1,0,1,0,0,1,1,2,1,1,2,0,1,0,0,0,1,1,0,1, -1,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,0,0,2,1,1,2,0,2,0,0,0,1,1,0,1, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,2,2,1,2,1,1,0,1,0,0,0,1,1,0,1, -2,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,1,0,1, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,2,1,1,1,0,2,1,1,0,0,0,2,1,0,1, -1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,0,2,1,1,0,1,0,0,0,1,1,0,1, -2,2,1,1,1,0,1,1,0,1,1,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,0,1,2,1,0,2,0,0,0,1,1,0,1, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0, -0,1,0,0,2,0,2,1,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,1,0,1,0,0,1,0,0,0,1,0,0,1, -1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,0,0,0,0,0,1,0,1,1,0,0,1,0,0,2,1,1,1,1,1,0,1,0,0,0,0,1,0,1, -0,1,1,1,2,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,2,1,0,0,0,0,0,1,1,1,1,1,0,1,0,0,0,1,1,0,0, -) - -Win1255HebrewModel = { - 'char_to_order_map': WIN1255_CHAR_TO_ORDER_MAP, - 'precedence_matrix': HEBREW_LANG_MODEL, - 'typical_positive_ratio': 0.984004, - 'keep_english_letter': False, - 'charset_name': "windows-1255", - 'language': 'Hebrew', -} diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/langhebrewmodel.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/langhebrewmodel.pyc deleted file mode 100644 index 6fb270aa..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/langhebrewmodel.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/langhungarianmodel.py b/env/lib/python2.7/site-packages/pip/_vendor/chardet/langhungarianmodel.py deleted file mode 100644 index bb7c095e..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/chardet/langhungarianmodel.py +++ /dev/null @@ -1,225 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# 255: Control characters that usually does not exist in any text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 - -# Character Mapping Table: -Latin2_HungarianCharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47, - 46, 71, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253, -253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8, - 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253, -159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174, -175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190, -191,192,193,194,195,196,197, 75,198,199,200,201,202,203,204,205, - 79,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220, -221, 51, 81,222, 78,223,224,225,226, 44,227,228,229, 61,230,231, -232,233,234, 58,235, 66, 59,236,237,238, 60, 69, 63,239,240,241, - 82, 14, 74,242, 70, 80,243, 72,244, 15, 83, 77, 84, 30, 76, 85, -245,246,247, 25, 73, 42, 24,248,249,250, 31, 56, 29,251,252,253, -) - -win1250HungarianCharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47, - 46, 72, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253, -253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8, - 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253, -161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176, -177,178,179,180, 78,181, 69,182,183,184,185,186,187,188,189,190, -191,192,193,194,195,196,197, 76,198,199,200,201,202,203,204,205, - 81,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220, -221, 51, 83,222, 80,223,224,225,226, 44,227,228,229, 61,230,231, -232,233,234, 58,235, 66, 59,236,237,238, 60, 70, 63,239,240,241, - 84, 14, 75,242, 71, 82,243, 73,244, 15, 85, 79, 86, 30, 77, 87, -245,246,247, 25, 74, 42, 24,248,249,250, 31, 56, 29,251,252,253, -) - -# Model Table: -# total sequences: 100% -# first 512 sequences: 94.7368% -# first 1024 sequences:5.2623% -# rest sequences: 0.8894% -# negative sequences: 0.0009% -HungarianLangModel = ( -0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3, -3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,2,3,3,1,1,2,2,2,2,2,1,2, -3,2,2,3,3,3,3,3,2,3,3,3,3,3,3,1,2,3,3,3,3,2,3,3,1,1,3,3,0,1,1,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0, -3,2,1,3,3,3,3,3,2,3,3,3,3,3,1,1,2,3,3,3,3,3,3,3,1,1,3,2,0,1,1,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,1,1,2,3,3,3,1,3,3,3,3,3,1,3,3,2,2,0,3,2,3, -0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,3,3,2,3,3,2,2,3,2,3,2,0,3,2,2, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0, -3,3,3,3,3,3,2,3,3,3,3,3,2,3,3,3,1,2,3,2,2,3,1,2,3,3,2,2,0,3,3,3, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,3,2,3,3,3,3,2,3,3,3,3,0,2,3,2, -0,0,0,1,1,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,1,1,1,3,3,2,1,3,2,2,3,2,1,3,2,2,1,0,3,3,1, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,2,2,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,3,2,2,3,1,1,3,2,0,1,1,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,1,3,3,3,3,3,2,2,1,3,3,3,0,1,1,2, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,2,0,3,2,3, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,1,0, -3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,1,3,2,2,2,3,1,1,3,3,1,1,0,3,3,2, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,2,3,3,3,3,3,1,2,3,2,2,0,2,2,2, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,2,2,2,3,1,3,3,2,2,1,3,3,3,1,1,3,1,2,3,2,3,2,2,2,1,0,2,2,2, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, -3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,2,2,3,2,1,0,3,2,0,1,1,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,1,0,3,3,3,3,0,2,3,0,0,2,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,2,3,3,2,2,2,2,3,3,0,1,2,3,2,3,2,2,3,2,1,2,0,2,2,2, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, -3,3,3,3,3,3,1,2,3,3,3,2,1,2,3,3,2,2,2,3,2,3,3,1,3,3,1,1,0,2,3,2, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,1,2,2,2,2,3,3,3,1,1,1,3,3,1,1,3,1,1,3,2,1,2,3,1,1,0,2,2,2, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,2,1,2,1,1,3,3,1,1,1,1,3,3,1,1,2,2,1,2,1,1,2,2,1,1,0,2,2,1, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,1,1,2,1,1,3,3,1,0,1,1,3,3,2,0,1,1,2,3,1,0,2,2,1,0,0,1,3,2, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,2,1,3,3,3,3,3,1,2,3,2,3,3,2,1,1,3,2,3,2,1,2,2,0,1,2,1,0,0,1,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,2,2,2,2,3,1,2,2,1,1,3,3,0,3,2,1,2,3,2,1,3,3,1,1,0,2,1,3, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,2,2,2,3,2,3,3,3,2,1,1,3,3,1,1,1,2,2,3,2,3,2,2,2,1,0,2,2,1, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -1,0,0,3,3,3,3,3,0,0,3,3,2,3,0,0,0,2,3,3,1,0,1,2,0,0,1,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,2,3,3,3,3,3,1,2,3,3,2,2,1,1,0,3,3,2,2,1,2,2,1,0,2,2,0,1,1,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,2,2,1,3,1,2,3,3,2,2,1,1,2,2,1,1,1,1,3,2,1,1,1,1,2,1,0,1,2,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0, -2,3,3,1,1,1,1,1,3,3,3,0,1,1,3,3,1,1,1,1,1,2,2,0,3,1,1,2,0,2,1,1, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,1,0,1,2,1,2,2,0,1,2,3,1,2,0,0,0,2,1,1,1,1,1,2,0,0,1,1,0,0,0,0, -1,2,1,2,2,2,1,2,1,2,0,2,0,2,2,1,1,2,1,1,2,1,1,1,0,1,0,0,0,1,1,0, -1,1,1,2,3,2,3,3,0,1,2,2,3,1,0,1,0,2,1,2,2,0,1,1,0,0,1,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,3,3,2,2,1,0,0,3,2,3,2,0,0,0,1,1,3,0,0,1,1,0,0,2,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,1,2,2,3,3,1,0,1,3,2,3,1,1,1,0,1,1,1,1,1,3,1,0,0,2,2,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,1,1,2,2,2,1,0,1,2,3,3,2,0,0,0,2,1,1,1,2,1,1,1,0,1,1,1,0,0,0, -1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,2,1,1,1,1,1,1,0,1,1,1,0,0,1,1, -3,2,2,1,0,0,1,1,2,2,0,3,0,1,2,1,1,0,0,1,1,1,0,1,1,1,1,0,2,1,1,1, -2,2,1,1,1,2,1,2,1,1,1,1,1,1,1,2,1,1,1,2,3,1,1,1,1,1,1,1,1,1,0,1, -2,3,3,0,1,0,0,0,3,3,1,0,0,1,2,2,1,0,0,0,0,2,0,0,1,1,1,0,2,1,1,1, -2,1,1,1,1,1,1,2,1,1,0,1,1,0,1,1,1,0,1,2,1,1,0,1,1,1,1,1,1,1,0,1, -2,3,3,0,1,0,0,0,2,2,0,0,0,0,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,1,0, -2,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1, -3,2,2,0,1,0,1,0,2,3,2,0,0,1,2,2,1,0,0,1,1,1,0,0,2,1,0,1,2,2,1,1, -2,1,1,1,1,1,1,2,1,1,1,1,1,1,0,2,1,0,1,1,0,1,1,1,0,1,1,2,1,1,0,1, -2,2,2,0,0,1,0,0,2,2,1,1,0,0,2,1,1,0,0,0,1,2,0,0,2,1,0,0,2,1,1,1, -2,1,1,1,1,2,1,2,1,1,1,2,2,1,1,2,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1, -1,2,3,0,0,0,1,0,3,2,1,0,0,1,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,2,1, -1,1,0,0,0,1,0,1,1,1,1,1,2,0,0,1,0,0,0,2,0,0,1,1,1,1,1,1,1,1,0,1, -3,0,0,2,1,2,2,1,0,0,2,1,2,2,0,0,0,2,1,1,1,0,1,1,0,0,1,1,2,0,0,0, -1,2,1,2,2,1,1,2,1,2,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,0,0,1, -1,3,2,0,0,0,1,0,2,2,2,0,0,0,2,2,1,0,0,0,0,3,1,1,1,1,0,0,2,1,1,1, -2,1,0,1,1,1,0,1,1,1,1,1,1,1,0,2,1,0,0,1,0,1,1,0,1,1,1,1,1,1,0,1, -2,3,2,0,0,0,1,0,2,2,0,0,0,0,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,1,0, -2,1,1,1,1,2,1,2,1,2,0,1,1,1,0,2,1,1,1,2,1,1,1,1,0,1,1,1,1,1,0,1, -3,1,1,2,2,2,3,2,1,1,2,2,1,1,0,1,0,2,2,1,1,1,1,1,0,0,1,1,0,1,1,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,0,0,0,0,0,2,2,0,0,0,0,2,2,1,0,0,0,1,1,0,0,1,2,0,0,2,1,1,1, -2,2,1,1,1,2,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,1,1,0,1,2,1,1,1,0,1, -1,0,0,1,2,3,2,1,0,0,2,0,1,1,0,0,0,1,1,1,1,0,1,1,0,0,1,0,0,0,0,0, -1,2,1,2,1,2,1,1,1,2,0,2,1,1,1,0,1,2,0,0,1,1,1,0,0,0,0,0,0,0,0,0, -2,3,2,0,0,0,0,0,1,1,2,1,0,0,1,1,1,0,0,0,0,2,0,0,1,1,0,0,2,1,1,1, -2,1,1,1,1,1,1,2,1,0,1,1,1,1,0,2,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1, -1,2,2,0,1,1,1,0,2,2,2,0,0,0,3,2,1,0,0,0,1,1,0,0,1,1,0,1,1,1,0,0, -1,1,0,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,0,0,1,1,1,0,1,0,1, -2,1,0,2,1,1,2,2,1,1,2,1,1,1,0,0,0,1,1,0,1,1,1,1,0,0,1,1,1,0,0,0, -1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,1,0, -1,2,3,0,0,0,1,0,2,2,0,0,0,0,2,2,0,0,0,0,0,1,0,0,1,0,0,0,2,0,1,0, -2,1,1,1,1,1,0,2,0,0,0,1,2,1,1,1,1,0,1,2,0,1,0,1,0,1,1,1,0,1,0,1, -2,2,2,0,0,0,1,0,2,1,2,0,0,0,1,1,2,0,0,0,0,1,0,0,1,1,0,0,2,1,0,1, -2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1, -1,2,2,0,0,0,1,0,2,2,2,0,0,0,1,1,0,0,0,0,0,1,1,0,2,0,0,1,1,1,0,1, -1,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,0,0,1,1,0,1,0,1,1,1,1,1,0,0,0,1, -1,0,0,1,0,1,2,1,0,0,1,1,1,2,0,0,0,1,1,0,1,0,1,1,0,0,1,0,0,0,0,0, -0,2,1,2,1,1,1,1,1,2,0,2,0,1,1,0,1,2,1,0,1,1,1,0,0,0,0,0,0,1,0,0, -2,1,1,0,1,2,0,0,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,2,1,0,1, -2,2,1,1,1,1,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,0,1,0,1,1,1,1,1,0,1, -1,2,2,0,0,0,0,0,1,1,0,0,0,0,2,1,0,0,0,0,0,2,0,0,2,2,0,0,2,0,0,1, -2,1,1,1,1,1,1,1,0,1,1,0,1,1,0,1,0,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1, -1,1,2,0,0,3,1,0,2,1,1,1,0,0,1,1,1,0,0,0,1,1,0,0,0,1,0,0,1,0,1,0, -1,2,1,0,1,1,1,2,1,1,0,1,1,1,1,1,0,0,0,1,1,1,1,1,0,1,0,0,0,1,0,0, -2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,2,0,0,0, -2,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,1,0,1, -2,1,1,1,2,1,1,1,0,1,1,2,1,0,0,0,0,1,1,1,1,0,1,0,0,0,0,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,1,0,1,1,1,1,1,0,0,1,1,2,1,0,0,0,1,1,0,0,0,1,1,0,0,1,0,1,0,0,0, -1,2,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0, -2,0,0,0,1,1,1,1,0,0,1,1,0,0,0,0,0,1,1,1,2,0,0,1,0,0,1,0,1,0,0,0, -0,1,1,1,1,1,1,1,1,2,0,1,1,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, -1,0,0,1,1,1,1,1,0,0,2,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0, -0,1,1,1,1,1,1,0,1,1,0,1,0,1,1,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,0, -1,0,0,1,1,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -0,1,1,1,1,1,0,0,1,1,0,1,0,1,0,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, -0,0,0,1,0,0,0,0,0,0,1,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,1,1,1,0,1,0,0,1,1,0,1,0,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, -2,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,0,1,0,0,1,0,1,0,1,1,1,0,0,1,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,1,1,1,1,0,0,0,1,1,1,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0, -0,1,1,1,1,1,1,0,1,1,0,1,0,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0, -) - -Latin2HungarianModel = { - 'char_to_order_map': Latin2_HungarianCharToOrderMap, - 'precedence_matrix': HungarianLangModel, - 'typical_positive_ratio': 0.947368, - 'keep_english_letter': True, - 'charset_name': "ISO-8859-2", - 'language': 'Hungarian', -} - -Win1250HungarianModel = { - 'char_to_order_map': win1250HungarianCharToOrderMap, - 'precedence_matrix': HungarianLangModel, - 'typical_positive_ratio': 0.947368, - 'keep_english_letter': True, - 'charset_name': "windows-1250", - 'language': 'Hungarian', -} diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/langhungarianmodel.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/langhungarianmodel.pyc deleted file mode 100644 index 0383e0ab..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/langhungarianmodel.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/langthaimodel.py b/env/lib/python2.7/site-packages/pip/_vendor/chardet/langthaimodel.py deleted file mode 100644 index 15f94c2d..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/chardet/langthaimodel.py +++ /dev/null @@ -1,199 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# 255: Control characters that usually does not exist in any text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 - -# The following result for thai was collected from a limited sample (1M). - -# Character Mapping Table: -TIS620CharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,182,106,107,100,183,184,185,101, 94,186,187,108,109,110,111, # 40 -188,189,190, 89, 95,112,113,191,192,193,194,253,253,253,253,253, # 50 -253, 64, 72, 73,114, 74,115,116,102, 81,201,117, 90,103, 78, 82, # 60 - 96,202, 91, 79, 84,104,105, 97, 98, 92,203,253,253,253,253,253, # 70 -209,210,211,212,213, 88,214,215,216,217,218,219,220,118,221,222, -223,224, 99, 85, 83,225,226,227,228,229,230,231,232,233,234,235, -236, 5, 30,237, 24,238, 75, 8, 26, 52, 34, 51,119, 47, 58, 57, - 49, 53, 55, 43, 20, 19, 44, 14, 48, 3, 17, 25, 39, 62, 31, 54, - 45, 9, 16, 2, 61, 15,239, 12, 42, 46, 18, 21, 76, 4, 66, 63, - 22, 10, 1, 36, 23, 13, 40, 27, 32, 35, 86,240,241,242,243,244, - 11, 28, 41, 29, 33,245, 50, 37, 6, 7, 67, 77, 38, 93,246,247, - 68, 56, 59, 65, 69, 60, 70, 80, 71, 87,248,249,250,251,252,253, -) - -# Model Table: -# total sequences: 100% -# first 512 sequences: 92.6386% -# first 1024 sequences:7.3177% -# rest sequences: 1.0230% -# negative sequences: 0.0436% -ThaiLangModel = ( -0,1,3,3,3,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,0,0,3,3,3,0,3,3,3,3, -0,3,3,0,0,0,1,3,0,3,3,2,3,3,0,1,2,3,3,3,3,0,2,0,2,0,0,3,2,1,2,2, -3,0,3,3,2,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,0,3,2,3,0,2,2,2,3, -0,2,3,0,0,0,0,1,0,1,2,3,1,1,3,2,2,0,1,1,0,0,1,0,0,0,0,0,0,0,1,1, -3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,3,3,2,3,2,3,3,2,2,2, -3,1,2,3,0,3,3,2,2,1,2,3,3,1,2,0,1,3,0,1,0,0,1,0,0,0,0,0,0,0,1,1, -3,3,2,2,3,3,3,3,1,2,3,3,3,3,3,2,2,2,2,3,3,2,2,3,3,2,2,3,2,3,2,2, -3,3,1,2,3,1,2,2,3,3,1,0,2,1,0,0,3,1,2,1,0,0,1,0,0,0,0,0,0,1,0,1, -3,3,3,3,3,3,2,2,3,3,3,3,2,3,2,2,3,3,2,2,3,2,2,2,2,1,1,3,1,2,1,1, -3,2,1,0,2,1,0,1,0,1,1,0,1,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0, -3,3,3,2,3,2,3,3,2,2,3,2,3,3,2,3,1,1,2,3,2,2,2,3,2,2,2,2,2,1,2,1, -2,2,1,1,3,3,2,1,0,1,2,2,0,1,3,0,0,0,1,1,0,0,0,0,0,2,3,0,0,2,1,1, -3,3,2,3,3,2,0,0,3,3,0,3,3,0,2,2,3,1,2,2,1,1,1,0,2,2,2,0,2,2,1,1, -0,2,1,0,2,0,0,2,0,1,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,2,3,3,2,0,0,3,3,0,2,3,0,2,1,2,2,2,2,1,2,0,0,2,2,2,0,2,2,1,1, -0,2,1,0,2,0,0,2,0,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0, -3,3,2,3,2,3,2,0,2,2,1,3,2,1,3,2,1,2,3,2,2,3,0,2,3,2,2,1,2,2,2,2, -1,2,2,0,0,0,0,2,0,1,2,0,1,1,1,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,1,0, -3,3,2,3,3,2,3,2,2,2,3,2,2,3,2,2,1,2,3,2,2,3,1,3,2,2,2,3,2,2,2,3, -3,2,1,3,0,1,1,1,0,2,1,1,1,1,1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,2,0,0, -1,0,0,3,0,3,3,3,3,3,0,0,3,0,2,2,3,3,3,3,3,0,0,0,1,1,3,0,0,0,0,2, -0,0,1,0,0,0,0,0,0,0,2,3,0,0,0,3,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0, -2,0,3,3,3,3,0,0,2,3,0,0,3,0,3,3,2,3,3,3,3,3,0,0,3,3,3,0,0,0,3,3, -0,0,3,0,0,0,0,2,0,0,2,1,1,3,0,0,1,0,0,2,3,0,1,0,0,0,0,0,0,0,1,0, -3,3,3,3,2,3,3,3,3,3,3,3,1,2,1,3,3,2,2,1,2,2,2,3,1,1,2,0,2,1,2,1, -2,2,1,0,0,0,1,1,0,1,0,1,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0, -3,0,2,1,2,3,3,3,0,2,0,2,2,0,2,1,3,2,2,1,2,1,0,0,2,2,1,0,2,1,2,2, -0,1,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,2,1,3,3,1,1,3,0,2,3,1,1,3,2,1,1,2,0,2,2,3,2,1,1,1,1,1,2, -3,0,0,1,3,1,2,1,2,0,3,0,0,0,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0, -3,3,1,1,3,2,3,3,3,1,3,2,1,3,2,1,3,2,2,2,2,1,3,3,1,2,1,3,1,2,3,0, -2,1,1,3,2,2,2,1,2,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2, -3,3,2,3,2,3,3,2,3,2,3,2,3,3,2,1,0,3,2,2,2,1,2,2,2,1,2,2,1,2,1,1, -2,2,2,3,0,1,3,1,1,1,1,0,1,1,0,2,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,2,3,2,2,1,1,3,2,3,2,3,2,0,3,2,2,1,2,0,2,2,2,1,2,2,2,2,1, -3,2,1,2,2,1,0,2,0,1,0,0,1,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,2,3,1,2,3,3,2,2,3,0,1,1,2,0,3,3,2,2,3,0,1,1,3,0,0,0,0, -3,1,0,3,3,0,2,0,2,1,0,0,3,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,2,3,2,3,3,0,1,3,1,1,2,1,2,1,1,3,1,1,0,2,3,1,1,1,1,1,1,1,1, -3,1,1,2,2,2,2,1,1,1,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -3,2,2,1,1,2,1,3,3,2,3,2,2,3,2,2,3,1,2,2,1,2,0,3,2,1,2,2,2,2,2,1, -3,2,1,2,2,2,1,1,1,1,0,0,1,1,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,1,3,3,0,2,1,0,3,2,0,0,3,1,0,1,1,0,1,0,0,0,0,0,1, -1,0,0,1,0,3,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,2,2,2,3,0,0,1,3,0,3,2,0,3,2,2,3,3,3,3,3,1,0,2,2,2,0,2,2,1,2, -0,2,3,0,0,0,0,1,0,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -3,0,2,3,1,3,3,2,3,3,0,3,3,0,3,2,2,3,2,3,3,3,0,0,2,2,3,0,1,1,1,3, -0,0,3,0,0,0,2,2,0,1,3,0,1,2,2,2,3,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1, -3,2,3,3,2,0,3,3,2,2,3,1,3,2,1,3,2,0,1,2,2,0,2,3,2,1,0,3,0,0,0,0, -3,0,0,2,3,1,3,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,3,2,2,2,1,2,0,1,3,1,1,3,1,3,0,0,2,1,1,1,1,2,1,1,1,0,2,1,0,1, -1,2,0,0,0,3,1,1,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,3,1,0,0,0,1,0, -3,3,3,3,2,2,2,2,2,1,3,1,1,1,2,0,1,1,2,1,2,1,3,2,0,0,3,1,1,1,1,1, -3,1,0,2,3,0,0,0,3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,2,3,0,3,3,0,2,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,2,3,1,3,0,0,1,2,0,0,2,0,3,3,2,3,3,3,2,3,0,0,2,2,2,0,0,0,2,2, -0,0,1,0,0,0,0,3,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -0,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,1,2,3,1,3,3,0,0,1,0,3,0,0,0,0,0, -0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,1,2,3,1,2,3,1,0,3,0,2,2,1,0,2,1,1,2,0,1,0,0,1,1,1,1,0,1,0,0, -1,0,0,0,0,1,1,0,3,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,2,1,0,1,1,1,3,1,2,2,2,2,2,2,1,1,1,1,0,3,1,0,1,3,1,1,1,1, -1,1,0,2,0,1,3,1,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,1, -3,0,2,2,1,3,3,2,3,3,0,1,1,0,2,2,1,2,1,3,3,1,0,0,3,2,0,0,0,0,2,1, -0,1,0,0,0,0,1,2,0,1,1,3,1,1,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -0,0,3,0,0,1,0,0,0,3,0,0,3,0,3,1,0,1,1,1,3,2,0,0,0,3,0,0,0,0,2,0, -0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, -3,3,1,3,2,1,3,3,1,2,2,0,1,2,1,0,1,2,0,0,0,0,0,3,0,0,0,3,0,0,0,0, -3,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,1,2,0,3,3,3,2,2,0,1,1,0,1,3,0,0,0,2,2,0,0,0,0,3,1,0,1,0,0,0, -0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,2,3,1,2,0,0,2,1,0,3,1,0,1,2,0,1,1,1,1,3,0,0,3,1,1,0,2,2,1,1, -0,2,0,0,0,0,0,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,0,3,1,2,0,0,2,2,0,1,2,0,1,0,1,3,1,2,1,0,0,0,2,0,3,0,0,0,1,0, -0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,1,1,2,2,0,0,0,2,0,2,1,0,1,1,0,1,1,1,2,1,0,0,1,1,1,0,2,1,1,1, -0,1,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1, -0,0,0,2,0,1,3,1,1,1,1,0,0,0,0,3,2,0,1,0,0,0,1,2,0,0,0,1,0,0,0,0, -0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,2,3,2,2,0,0,0,1,0,0,0,0,2,3,2,1,2,2,3,0,0,0,2,3,1,0,0,0,1,1, -0,0,1,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0, -3,3,2,2,0,1,0,0,0,0,2,0,2,0,1,0,0,0,1,1,0,0,0,2,1,0,1,0,1,1,0,0, -0,1,0,2,0,0,1,0,3,0,1,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,1,0,0,1,0,0,0,0,0,1,1,2,0,0,0,0,1,0,0,1,3,1,0,0,0,0,1,1,0,0, -0,1,0,0,0,0,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0, -3,3,1,1,1,1,2,3,0,0,2,1,1,1,1,1,0,2,1,1,0,0,0,2,1,0,1,2,1,1,0,1, -2,1,0,3,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,3,1,0,0,0,0,0,0,0,3,0,0,0,3,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1, -0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,2,0,0,0,0,0,0,1,2,1,0,1,1,0,2,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,2,0,0,0,1,3,0,1,0,0,0,2,0,0,0,0,0,0,0,1,2,0,0,0,0,0, -3,3,0,0,1,1,2,0,0,1,2,1,0,1,1,1,0,1,1,0,0,2,1,1,0,1,0,0,1,1,1,0, -0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,1,0,0,0,0,1,0,0,0,0,3,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0, -2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,0,0,1,1,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,1,0,1,2,0,1,2,0,0,1,1,0,2,0,1,0,0,1,0,0,0,0,1,0,0,0,2,0,0,0,0, -1,0,0,1,0,1,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,1,0,0,0,0,0,0,0,1,1,0,1,1,0,2,1,3,0,0,0,0,1,1,0,0,0,0,0,0,0,3, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,1,0,1,0,0,2,0,0,2,0,0,1,1,2,0,0,1,1,0,0,0,1,0,0,0,1,1,0,0,0, -1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,3,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0, -1,0,0,0,0,0,0,0,0,1,0,0,0,0,2,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,1,1,0,0,2,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -) - -TIS620ThaiModel = { - 'char_to_order_map': TIS620CharToOrderMap, - 'precedence_matrix': ThaiLangModel, - 'typical_positive_ratio': 0.926386, - 'keep_english_letter': False, - 'charset_name': "TIS-620", - 'language': 'Thai', -} diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/langthaimodel.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/langthaimodel.pyc deleted file mode 100644 index ca9184f3..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/langthaimodel.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/langturkishmodel.py b/env/lib/python2.7/site-packages/pip/_vendor/chardet/langturkishmodel.py deleted file mode 100644 index a427a457..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/chardet/langturkishmodel.py +++ /dev/null @@ -1,193 +0,0 @@ -# -*- coding: utf-8 -*- -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Özgür Baskın - Turkish Language Model -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# 255: Control characters that usually does not exist in any text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 - -# Character Mapping Table: -Latin5_TurkishCharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, -255, 23, 37, 47, 39, 29, 52, 36, 45, 53, 60, 16, 49, 20, 46, 42, - 48, 69, 44, 35, 31, 51, 38, 62, 65, 43, 56,255,255,255,255,255, -255, 1, 21, 28, 12, 2, 18, 27, 25, 3, 24, 10, 5, 13, 4, 15, - 26, 64, 7, 8, 9, 14, 32, 57, 58, 11, 22,255,255,255,255,255, -180,179,178,177,176,175,174,173,172,171,170,169,168,167,166,165, -164,163,162,161,160,159,101,158,157,156,155,154,153,152,151,106, -150,149,148,147,146,145,144,100,143,142,141,140,139,138,137,136, - 94, 80, 93,135,105,134,133, 63,132,131,130,129,128,127,126,125, -124,104, 73, 99, 79, 85,123, 54,122, 98, 92,121,120, 91,103,119, - 68,118,117, 97,116,115, 50, 90,114,113,112,111, 55, 41, 40, 86, - 89, 70, 59, 78, 71, 82, 88, 33, 77, 66, 84, 83,110, 75, 61, 96, - 30, 67,109, 74, 87,102, 34, 95, 81,108, 76, 72, 17, 6, 19,107, -) - -TurkishLangModel = ( -3,2,3,3,3,1,3,3,3,3,3,3,3,3,2,1,1,3,3,1,3,3,0,3,3,3,3,3,0,3,1,3, -3,2,1,0,0,1,1,0,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,2,2,0,0,1,0,0,1, -3,2,2,3,3,0,3,3,3,3,3,3,3,2,3,1,0,3,3,1,3,3,0,3,3,3,3,3,0,3,0,3, -3,1,1,0,1,0,1,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,2,2,0,0,0,1,0,1, -3,3,2,3,3,0,3,3,3,3,3,3,3,2,3,1,1,3,3,0,3,3,1,2,3,3,3,3,0,3,0,3, -3,1,1,0,0,0,1,0,0,0,0,1,1,0,1,2,1,0,0,0,1,0,0,0,0,2,0,0,0,0,0,1, -3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,1,3,3,2,0,3,2,1,2,2,1,3,3,0,0,0,2, -2,2,0,1,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,1,0,0,1, -3,3,3,2,3,3,1,2,3,3,3,3,3,3,3,1,3,2,1,0,3,2,0,1,2,3,3,2,1,0,0,2, -2,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,2,0,0,0, -1,0,1,3,3,1,3,3,3,3,3,3,3,1,2,0,0,2,3,0,2,3,0,0,2,2,2,3,0,3,0,1, -2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,0,3,2,0,2,3,2,3,3,1,0,0,2, -3,2,0,0,1,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,1,1,1,0,2,0,0,1, -3,3,3,2,3,3,2,3,3,3,3,2,3,3,3,0,3,3,0,0,2,1,0,0,2,3,2,2,0,0,0,2, -2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,0,1,0,2,0,0,1, -3,3,3,2,3,3,3,3,3,3,3,2,3,3,3,0,3,2,0,1,3,2,1,1,3,2,3,2,1,0,0,2, -2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0, -3,3,3,2,3,3,3,3,3,3,3,2,3,3,3,0,3,2,2,0,2,3,0,0,2,2,2,2,0,0,0,2, -3,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,0,1,0,0,0, -3,3,3,3,3,3,3,2,2,2,2,3,2,3,3,0,3,3,1,1,2,2,0,0,2,2,3,2,0,0,1,3, -0,3,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1, -3,3,3,2,3,3,3,2,1,2,2,3,2,3,3,0,3,2,0,0,1,1,0,1,1,2,1,2,0,0,0,1, -0,3,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,0,0, -3,3,3,2,3,3,2,3,2,2,2,3,3,3,3,1,3,1,1,0,3,2,1,1,3,3,2,3,1,0,0,1, -1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,0,0,1, -3,2,2,3,3,0,3,3,3,3,3,3,3,2,2,1,0,3,3,1,3,3,0,1,3,3,2,3,0,3,0,3, -2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, -2,2,2,3,3,0,3,3,3,3,3,3,3,3,3,0,0,3,2,0,3,3,0,3,2,3,3,3,0,3,1,3, -2,0,0,0,0,0,0,0,0,0,0,1,0,1,2,0,1,0,0,0,0,0,0,0,2,2,0,0,1,0,0,1, -3,3,3,1,2,3,3,1,0,0,1,0,0,3,3,2,3,0,0,2,0,0,2,0,2,0,0,0,2,0,2,0, -0,3,1,0,1,0,0,0,2,2,1,0,1,1,2,1,2,2,2,0,2,1,1,0,0,0,2,0,0,0,0,0, -1,2,1,3,3,0,3,3,3,3,3,2,3,0,0,0,0,2,3,0,2,3,1,0,2,3,1,3,0,3,0,2, -3,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,1,3,3,2,2,3,2,2,0,1,2,3,0,1,2,1,0,1,0,0,0,1,0,2,2,0,0,0,1, -1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0, -3,3,3,1,3,3,1,1,3,3,1,1,3,3,1,0,2,1,2,0,2,1,0,0,1,1,2,1,0,0,0,2, -2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,1,0,2,1,3,0,0,2,0,0,3,3,0,3,0,0,1,0,1,2,0,0,1,1,2,2,0,1,0, -0,1,2,1,1,0,1,0,1,1,1,1,1,0,1,1,1,2,2,1,2,0,1,0,0,0,0,0,0,1,0,0, -3,3,3,2,3,2,3,3,0,2,2,2,3,3,3,0,3,0,0,0,2,2,0,1,2,1,1,1,0,0,0,1, -0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0, -3,3,3,3,3,3,2,1,2,2,3,3,3,3,2,0,2,0,0,0,2,2,0,0,2,1,3,3,0,0,1,1, -1,1,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0, -1,1,2,3,3,0,3,3,3,3,3,3,2,2,0,2,0,2,3,2,3,2,2,2,2,2,2,2,1,3,2,3, -2,0,2,1,2,2,2,2,1,1,2,2,1,2,2,1,2,0,0,2,1,1,0,2,1,0,0,1,0,0,0,1, -2,3,3,1,1,1,0,1,1,1,2,3,2,1,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0, -0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,2,2,2,3,2,3,2,2,1,3,3,3,0,2,1,2,0,2,1,0,0,1,1,1,1,1,0,0,1, -2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,2,0,1,0,0,0, -3,3,3,2,3,3,3,3,3,2,3,1,2,3,3,1,2,0,0,0,0,0,0,0,3,2,1,1,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, -3,3,3,2,2,3,3,2,1,1,1,1,1,3,3,0,3,1,0,0,1,1,0,0,3,1,2,1,0,0,0,0, -0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0, -3,3,3,2,2,3,2,2,2,3,2,1,1,3,3,0,3,0,0,0,0,1,0,0,3,1,1,2,0,0,0,1, -1,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -1,1,1,3,3,0,3,3,3,3,3,2,2,2,1,2,0,2,1,2,2,1,1,0,1,2,2,2,2,2,2,2, -0,0,2,1,2,1,2,1,0,1,1,3,1,2,1,1,2,0,0,2,0,1,0,1,0,1,0,0,0,1,0,1, -3,3,3,1,3,3,3,0,1,1,0,2,2,3,1,0,3,0,0,0,1,0,0,0,1,0,0,1,0,1,0,0, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,2,0,0,2,2,1,0,0,1,0,0,3,3,1,3,0,0,1,1,0,2,0,3,0,0,0,2,0,1,1, -0,1,2,0,1,2,2,0,2,2,2,2,1,0,2,1,1,0,2,0,2,1,2,0,0,0,0,0,0,0,0,0, -3,3,3,1,3,2,3,2,0,2,2,2,1,3,2,0,2,1,2,0,1,2,0,0,1,0,2,2,0,0,0,2, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0, -3,3,3,0,3,3,1,1,2,3,1,0,3,2,3,0,3,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0, -1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,3,3,0,3,3,2,3,3,2,2,0,0,0,0,1,2,0,1,3,0,0,0,3,1,1,0,3,0,2, -2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,1,2,2,1,0,3,1,1,1,1,3,3,2,3,0,0,1,0,1,2,0,2,2,0,2,2,0,2,1, -0,2,2,1,1,1,1,0,2,1,1,0,1,1,1,1,2,1,2,1,2,0,1,0,1,0,0,0,0,0,0,0, -3,3,3,0,1,1,3,0,0,1,1,0,0,2,2,0,3,0,0,1,1,0,1,0,0,0,0,0,2,0,0,0, -0,3,1,0,1,0,1,0,2,0,0,1,0,1,0,1,1,1,2,1,1,0,2,0,0,0,0,0,0,0,0,0, -3,3,3,0,2,0,2,0,1,1,1,0,0,3,3,0,2,0,0,1,0,0,2,1,1,0,1,0,1,0,1,0, -0,2,0,1,2,0,2,0,2,1,1,0,1,0,2,1,1,0,2,1,1,0,1,0,0,0,1,1,0,0,0,0, -3,2,3,0,1,0,0,0,0,0,0,0,0,1,2,0,1,0,0,1,0,0,1,0,0,0,0,0,2,0,0,0, -0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,2,1,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,0,0,2,3,0,0,1,0,1,0,2,3,2,3,0,0,1,3,0,2,1,0,0,0,0,2,0,1,0, -0,2,1,0,0,1,1,0,2,1,0,0,1,0,0,1,1,0,1,1,2,0,1,0,0,0,0,1,0,0,0,0, -3,2,2,0,0,1,1,0,0,0,0,0,0,3,1,1,1,0,0,0,0,0,1,0,0,0,0,0,2,0,1,0, -0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0, -0,0,0,3,3,0,2,3,2,2,1,2,2,1,1,2,0,1,3,2,2,2,0,0,2,2,0,0,0,1,2,1, -3,0,2,1,1,0,1,1,1,0,1,2,2,2,1,1,2,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0, -0,1,1,2,3,0,3,3,3,2,2,2,2,1,0,1,0,1,0,1,2,2,0,0,2,2,1,3,1,1,2,1, -0,0,1,1,2,0,1,1,0,0,1,2,0,2,1,1,2,0,0,1,0,0,0,1,0,1,0,1,0,0,0,0, -3,3,2,0,0,3,1,0,0,0,0,0,0,3,2,1,2,0,0,1,0,0,2,0,0,0,0,0,2,0,1,0, -0,2,1,1,0,0,1,0,1,2,0,0,1,1,0,0,2,1,1,1,1,0,2,0,0,0,0,0,0,0,0,0, -3,3,2,0,0,1,0,0,0,0,1,0,0,3,3,2,2,0,0,1,0,0,2,0,1,0,0,0,2,0,1,0, -0,0,1,1,0,0,2,0,2,1,0,0,1,1,2,1,2,0,2,1,2,1,1,1,0,0,1,1,0,0,0,0, -3,3,2,0,0,2,2,0,0,0,1,1,0,2,2,1,3,1,0,1,0,1,2,0,0,0,0,0,1,0,1,0, -0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,2,0,0,0,1,0,0,1,0,0,2,3,1,2,0,0,1,0,0,2,0,0,0,1,0,2,0,2,0, -0,1,1,2,2,1,2,0,2,1,1,0,0,1,1,0,1,1,1,1,2,1,1,0,0,0,0,0,0,0,0,0, -3,3,3,0,2,1,2,1,0,0,1,1,0,3,3,1,2,0,0,1,0,0,2,0,2,0,1,1,2,0,0,0, -0,0,1,1,1,1,2,0,1,1,0,1,1,1,1,0,0,0,1,1,1,0,1,0,0,0,1,0,0,0,0,0, -3,3,3,0,2,2,3,2,0,0,1,0,0,2,3,1,0,0,0,0,0,0,2,0,2,0,0,0,2,0,0,0, -0,1,1,0,0,0,1,0,0,1,0,1,1,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0, -3,2,3,0,0,0,0,0,0,0,1,0,0,2,2,2,2,0,0,1,0,0,2,0,0,0,0,0,2,0,1,0, -0,0,2,1,1,0,1,0,2,1,1,0,0,1,1,2,1,0,2,0,2,0,1,0,0,0,2,0,0,0,0,0, -0,0,0,2,2,0,2,1,1,1,1,2,2,0,0,1,0,1,0,0,1,3,0,0,0,0,1,0,0,2,1,0, -0,0,1,0,1,0,0,0,0,0,2,1,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, -2,0,0,2,3,0,2,3,1,2,2,0,2,0,0,2,0,2,1,1,1,2,1,0,0,1,2,1,1,2,1,0, -1,0,2,0,1,0,1,1,0,0,2,2,1,2,1,1,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,0,2,1,2,0,0,0,1,0,0,3,2,0,1,0,0,1,0,0,2,0,0,0,1,2,1,0,1,0, -0,0,0,0,1,0,1,0,0,1,0,0,0,0,1,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0, -0,0,0,2,2,0,2,2,1,1,0,1,1,1,1,1,0,0,1,2,1,1,1,0,1,0,0,0,1,1,1,1, -0,0,2,1,0,1,1,1,0,1,1,2,1,2,1,1,2,0,1,1,2,1,0,2,0,0,0,0,0,0,0,0, -3,2,2,0,0,2,0,0,0,0,0,0,0,2,2,0,2,0,0,1,0,0,2,0,0,0,0,0,2,0,0,0, -0,2,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0, -0,0,0,3,2,0,2,2,0,1,1,0,1,0,0,1,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,0, -2,0,1,0,1,0,1,1,0,0,1,2,0,1,0,1,1,0,0,1,0,1,0,2,0,0,0,0,0,0,0,0, -2,2,2,0,1,1,0,0,0,1,0,0,0,1,2,0,1,0,0,1,0,0,1,0,0,0,0,1,2,0,1,0, -0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,2,1,0,1,1,1,0,0,0,0,1,2,0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, -1,1,2,0,1,0,0,0,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,1, -0,0,1,2,2,0,2,1,2,1,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,0,0,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0, -2,2,2,0,0,0,1,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, -0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,0,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,1,0,0,0,0,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -) - -Latin5TurkishModel = { - 'char_to_order_map': Latin5_TurkishCharToOrderMap, - 'precedence_matrix': TurkishLangModel, - 'typical_positive_ratio': 0.970290, - 'keep_english_letter': True, - 'charset_name': "ISO-8859-9", - 'language': 'Turkish', -} diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/langturkishmodel.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/langturkishmodel.pyc deleted file mode 100644 index f7d54185..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/langturkishmodel.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/latin1prober.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/latin1prober.pyc deleted file mode 100644 index e8bf5370..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/latin1prober.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/mbcharsetprober.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/mbcharsetprober.pyc deleted file mode 100644 index f0a1abe9..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/mbcharsetprober.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/mbcsgroupprober.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/mbcsgroupprober.pyc deleted file mode 100644 index d78d3f55..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/mbcsgroupprober.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/mbcssm.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/mbcssm.pyc deleted file mode 100644 index 17e559ad..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/mbcssm.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/sbcharsetprober.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/sbcharsetprober.pyc deleted file mode 100644 index aac2ae0b..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/sbcharsetprober.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/sbcsgroupprober.py b/env/lib/python2.7/site-packages/pip/_vendor/chardet/sbcsgroupprober.py deleted file mode 100644 index 98e95dc1..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/chardet/sbcsgroupprober.py +++ /dev/null @@ -1,73 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetgroupprober import CharSetGroupProber -from .sbcharsetprober import SingleByteCharSetProber -from .langcyrillicmodel import (Win1251CyrillicModel, Koi8rModel, - Latin5CyrillicModel, MacCyrillicModel, - Ibm866Model, Ibm855Model) -from .langgreekmodel import Latin7GreekModel, Win1253GreekModel -from .langbulgarianmodel import Latin5BulgarianModel, Win1251BulgarianModel -# from .langhungarianmodel import Latin2HungarianModel, Win1250HungarianModel -from .langthaimodel import TIS620ThaiModel -from .langhebrewmodel import Win1255HebrewModel -from .hebrewprober import HebrewProber -from .langturkishmodel import Latin5TurkishModel - - -class SBCSGroupProber(CharSetGroupProber): - def __init__(self): - super(SBCSGroupProber, self).__init__() - self.probers = [ - SingleByteCharSetProber(Win1251CyrillicModel), - SingleByteCharSetProber(Koi8rModel), - SingleByteCharSetProber(Latin5CyrillicModel), - SingleByteCharSetProber(MacCyrillicModel), - SingleByteCharSetProber(Ibm866Model), - SingleByteCharSetProber(Ibm855Model), - SingleByteCharSetProber(Latin7GreekModel), - SingleByteCharSetProber(Win1253GreekModel), - SingleByteCharSetProber(Latin5BulgarianModel), - SingleByteCharSetProber(Win1251BulgarianModel), - # TODO: Restore Hungarian encodings (iso-8859-2 and windows-1250) - # after we retrain model. - # SingleByteCharSetProber(Latin2HungarianModel), - # SingleByteCharSetProber(Win1250HungarianModel), - SingleByteCharSetProber(TIS620ThaiModel), - SingleByteCharSetProber(Latin5TurkishModel), - ] - hebrew_prober = HebrewProber() - logical_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel, - False, hebrew_prober) - visual_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel, True, - hebrew_prober) - hebrew_prober.set_model_probers(logical_hebrew_prober, visual_hebrew_prober) - self.probers.extend([hebrew_prober, logical_hebrew_prober, - visual_hebrew_prober]) - - self.reset() diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/sbcsgroupprober.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/sbcsgroupprober.pyc deleted file mode 100644 index 2cd97722..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/sbcsgroupprober.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/sjisprober.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/sjisprober.pyc deleted file mode 100644 index 5aed9608..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/sjisprober.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/universaldetector.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/universaldetector.pyc deleted file mode 100644 index ad5f2bc4..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/universaldetector.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/utf8prober.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/utf8prober.pyc deleted file mode 100644 index 1beaac88..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/utf8prober.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/version.py b/env/lib/python2.7/site-packages/pip/_vendor/chardet/version.py deleted file mode 100644 index bb2a34a7..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/chardet/version.py +++ /dev/null @@ -1,9 +0,0 @@ -""" -This module exists only to simplify retrieving the version number of chardet -from within setup.py and from chardet subpackages. - -:author: Dan Blanchard (dan.blanchard@gmail.com) -""" - -__version__ = "3.0.4" -VERSION = __version__.split('.') diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/version.pyc b/env/lib/python2.7/site-packages/pip/_vendor/chardet/version.pyc deleted file mode 100644 index ef0f3dce..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/chardet/version.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/colorama/__init__.py b/env/lib/python2.7/site-packages/pip/_vendor/colorama/__init__.py deleted file mode 100644 index 2a3bf471..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/colorama/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -from .initialise import init, deinit, reinit, colorama_text -from .ansi import Fore, Back, Style, Cursor -from .ansitowin32 import AnsiToWin32 - -__version__ = '0.4.1' diff --git a/env/lib/python2.7/site-packages/pip/_vendor/colorama/__init__.pyc b/env/lib/python2.7/site-packages/pip/_vendor/colorama/__init__.pyc deleted file mode 100644 index 4c70cdfc..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/colorama/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.pyc b/env/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.pyc deleted file mode 100644 index a140bd74..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pyc b/env/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pyc deleted file mode 100644 index b46e3546..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/colorama/initialise.pyc b/env/lib/python2.7/site-packages/pip/_vendor/colorama/initialise.pyc deleted file mode 100644 index 9e36808e..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/colorama/initialise.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/colorama/win32.pyc b/env/lib/python2.7/site-packages/pip/_vendor/colorama/win32.pyc deleted file mode 100644 index 8866483e..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/colorama/win32.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.pyc b/env/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.pyc deleted file mode 100644 index bcbd7a2c..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/__init__.py b/env/lib/python2.7/site-packages/pip/_vendor/distlib/__init__.py deleted file mode 100644 index a2d70d47..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/distlib/__init__.py +++ /dev/null @@ -1,23 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2012-2019 Vinay Sajip. -# Licensed to the Python Software Foundation under a contributor agreement. -# See LICENSE.txt and CONTRIBUTORS.txt. -# -import logging - -__version__ = '0.2.9.post0' - -class DistlibException(Exception): - pass - -try: - from logging import NullHandler -except ImportError: # pragma: no cover - class NullHandler(logging.Handler): - def handle(self, record): pass - def emit(self, record): pass - def createLock(self): self.lock = None - -logger = logging.getLogger(__name__) -logger.addHandler(NullHandler()) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/__init__.pyc b/env/lib/python2.7/site-packages/pip/_vendor/distlib/__init__.pyc deleted file mode 100644 index 6ace5708..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/distlib/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/__init__.pyc b/env/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/__init__.pyc deleted file mode 100644 index f76395fa..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/misc.pyc b/env/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/misc.pyc deleted file mode 100644 index 4671a1b8..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/misc.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyc b/env/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyc deleted file mode 100644 index a2149d3f..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.py b/env/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.py deleted file mode 100644 index 1df3aba1..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.py +++ /dev/null @@ -1,788 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2012 The Python Software Foundation. -# See LICENSE.txt and CONTRIBUTORS.txt. -# -"""Access to Python's configuration information.""" - -import codecs -import os -import re -import sys -from os.path import pardir, realpath -try: - import configparser -except ImportError: - import ConfigParser as configparser - - -__all__ = [ - 'get_config_h_filename', - 'get_config_var', - 'get_config_vars', - 'get_makefile_filename', - 'get_path', - 'get_path_names', - 'get_paths', - 'get_platform', - 'get_python_version', - 'get_scheme_names', - 'parse_config_h', -] - - -def _safe_realpath(path): - try: - return realpath(path) - except OSError: - return path - - -if sys.executable: - _PROJECT_BASE = os.path.dirname(_safe_realpath(sys.executable)) -else: - # sys.executable can be empty if argv[0] has been changed and Python is - # unable to retrieve the real program name - _PROJECT_BASE = _safe_realpath(os.getcwd()) - -if os.name == "nt" and "pcbuild" in _PROJECT_BASE[-8:].lower(): - _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir)) -# PC/VS7.1 -if os.name == "nt" and "\\pc\\v" in _PROJECT_BASE[-10:].lower(): - _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir)) -# PC/AMD64 -if os.name == "nt" and "\\pcbuild\\amd64" in _PROJECT_BASE[-14:].lower(): - _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir)) - - -def is_python_build(): - for fn in ("Setup.dist", "Setup.local"): - if os.path.isfile(os.path.join(_PROJECT_BASE, "Modules", fn)): - return True - return False - -_PYTHON_BUILD = is_python_build() - -_cfg_read = False - -def _ensure_cfg_read(): - global _cfg_read - if not _cfg_read: - from ..resources import finder - backport_package = __name__.rsplit('.', 1)[0] - _finder = finder(backport_package) - _cfgfile = _finder.find('sysconfig.cfg') - assert _cfgfile, 'sysconfig.cfg exists' - with _cfgfile.as_stream() as s: - _SCHEMES.readfp(s) - if _PYTHON_BUILD: - for scheme in ('posix_prefix', 'posix_home'): - _SCHEMES.set(scheme, 'include', '{srcdir}/Include') - _SCHEMES.set(scheme, 'platinclude', '{projectbase}/.') - - _cfg_read = True - - -_SCHEMES = configparser.RawConfigParser() -_VAR_REPL = re.compile(r'\{([^{]*?)\}') - -def _expand_globals(config): - _ensure_cfg_read() - if config.has_section('globals'): - globals = config.items('globals') - else: - globals = tuple() - - sections = config.sections() - for section in sections: - if section == 'globals': - continue - for option, value in globals: - if config.has_option(section, option): - continue - config.set(section, option, value) - config.remove_section('globals') - - # now expanding local variables defined in the cfg file - # - for section in config.sections(): - variables = dict(config.items(section)) - - def _replacer(matchobj): - name = matchobj.group(1) - if name in variables: - return variables[name] - return matchobj.group(0) - - for option, value in config.items(section): - config.set(section, option, _VAR_REPL.sub(_replacer, value)) - -#_expand_globals(_SCHEMES) - - # FIXME don't rely on sys.version here, its format is an implementation detail - # of CPython, use sys.version_info or sys.hexversion -_PY_VERSION = sys.version.split()[0] -_PY_VERSION_SHORT = sys.version[:3] -_PY_VERSION_SHORT_NO_DOT = _PY_VERSION[0] + _PY_VERSION[2] -_PREFIX = os.path.normpath(sys.prefix) -_EXEC_PREFIX = os.path.normpath(sys.exec_prefix) -_CONFIG_VARS = None -_USER_BASE = None - - -def _subst_vars(path, local_vars): - """In the string `path`, replace tokens like {some.thing} with the - corresponding value from the map `local_vars`. - - If there is no corresponding value, leave the token unchanged. - """ - def _replacer(matchobj): - name = matchobj.group(1) - if name in local_vars: - return local_vars[name] - elif name in os.environ: - return os.environ[name] - return matchobj.group(0) - return _VAR_REPL.sub(_replacer, path) - - -def _extend_dict(target_dict, other_dict): - target_keys = target_dict.keys() - for key, value in other_dict.items(): - if key in target_keys: - continue - target_dict[key] = value - - -def _expand_vars(scheme, vars): - res = {} - if vars is None: - vars = {} - _extend_dict(vars, get_config_vars()) - - for key, value in _SCHEMES.items(scheme): - if os.name in ('posix', 'nt'): - value = os.path.expanduser(value) - res[key] = os.path.normpath(_subst_vars(value, vars)) - return res - - -def format_value(value, vars): - def _replacer(matchobj): - name = matchobj.group(1) - if name in vars: - return vars[name] - return matchobj.group(0) - return _VAR_REPL.sub(_replacer, value) - - -def _get_default_scheme(): - if os.name == 'posix': - # the default scheme for posix is posix_prefix - return 'posix_prefix' - return os.name - - -def _getuserbase(): - env_base = os.environ.get("PYTHONUSERBASE", None) - - def joinuser(*args): - return os.path.expanduser(os.path.join(*args)) - - # what about 'os2emx', 'riscos' ? - if os.name == "nt": - base = os.environ.get("APPDATA") or "~" - if env_base: - return env_base - else: - return joinuser(base, "Python") - - if sys.platform == "darwin": - framework = get_config_var("PYTHONFRAMEWORK") - if framework: - if env_base: - return env_base - else: - return joinuser("~", "Library", framework, "%d.%d" % - sys.version_info[:2]) - - if env_base: - return env_base - else: - return joinuser("~", ".local") - - -def _parse_makefile(filename, vars=None): - """Parse a Makefile-style file. - - A dictionary containing name/value pairs is returned. If an - optional dictionary is passed in as the second argument, it is - used instead of a new dictionary. - """ - # Regexes needed for parsing Makefile (and similar syntaxes, - # like old-style Setup files). - _variable_rx = re.compile(r"([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)") - _findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)") - _findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}") - - if vars is None: - vars = {} - done = {} - notdone = {} - - with codecs.open(filename, encoding='utf-8', errors="surrogateescape") as f: - lines = f.readlines() - - for line in lines: - if line.startswith('#') or line.strip() == '': - continue - m = _variable_rx.match(line) - if m: - n, v = m.group(1, 2) - v = v.strip() - # `$$' is a literal `$' in make - tmpv = v.replace('$$', '') - - if "$" in tmpv: - notdone[n] = v - else: - try: - v = int(v) - except ValueError: - # insert literal `$' - done[n] = v.replace('$$', '$') - else: - done[n] = v - - # do variable interpolation here - variables = list(notdone.keys()) - - # Variables with a 'PY_' prefix in the makefile. These need to - # be made available without that prefix through sysconfig. - # Special care is needed to ensure that variable expansion works, even - # if the expansion uses the name without a prefix. - renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS') - - while len(variables) > 0: - for name in tuple(variables): - value = notdone[name] - m = _findvar1_rx.search(value) or _findvar2_rx.search(value) - if m is not None: - n = m.group(1) - found = True - if n in done: - item = str(done[n]) - elif n in notdone: - # get it on a subsequent round - found = False - elif n in os.environ: - # do it like make: fall back to environment - item = os.environ[n] - - elif n in renamed_variables: - if (name.startswith('PY_') and - name[3:] in renamed_variables): - item = "" - - elif 'PY_' + n in notdone: - found = False - - else: - item = str(done['PY_' + n]) - - else: - done[n] = item = "" - - if found: - after = value[m.end():] - value = value[:m.start()] + item + after - if "$" in after: - notdone[name] = value - else: - try: - value = int(value) - except ValueError: - done[name] = value.strip() - else: - done[name] = value - variables.remove(name) - - if (name.startswith('PY_') and - name[3:] in renamed_variables): - - name = name[3:] - if name not in done: - done[name] = value - - else: - # bogus variable reference (e.g. "prefix=$/opt/python"); - # just drop it since we can't deal - done[name] = value - variables.remove(name) - - # strip spurious spaces - for k, v in done.items(): - if isinstance(v, str): - done[k] = v.strip() - - # save the results in the global dictionary - vars.update(done) - return vars - - -def get_makefile_filename(): - """Return the path of the Makefile.""" - if _PYTHON_BUILD: - return os.path.join(_PROJECT_BASE, "Makefile") - if hasattr(sys, 'abiflags'): - config_dir_name = 'config-%s%s' % (_PY_VERSION_SHORT, sys.abiflags) - else: - config_dir_name = 'config' - return os.path.join(get_path('stdlib'), config_dir_name, 'Makefile') - - -def _init_posix(vars): - """Initialize the module as appropriate for POSIX systems.""" - # load the installed Makefile: - makefile = get_makefile_filename() - try: - _parse_makefile(makefile, vars) - except IOError as e: - msg = "invalid Python installation: unable to open %s" % makefile - if hasattr(e, "strerror"): - msg = msg + " (%s)" % e.strerror - raise IOError(msg) - # load the installed pyconfig.h: - config_h = get_config_h_filename() - try: - with open(config_h) as f: - parse_config_h(f, vars) - except IOError as e: - msg = "invalid Python installation: unable to open %s" % config_h - if hasattr(e, "strerror"): - msg = msg + " (%s)" % e.strerror - raise IOError(msg) - # On AIX, there are wrong paths to the linker scripts in the Makefile - # -- these paths are relative to the Python source, but when installed - # the scripts are in another directory. - if _PYTHON_BUILD: - vars['LDSHARED'] = vars['BLDSHARED'] - - -def _init_non_posix(vars): - """Initialize the module as appropriate for NT""" - # set basic install directories - vars['LIBDEST'] = get_path('stdlib') - vars['BINLIBDEST'] = get_path('platstdlib') - vars['INCLUDEPY'] = get_path('include') - vars['SO'] = '.pyd' - vars['EXE'] = '.exe' - vars['VERSION'] = _PY_VERSION_SHORT_NO_DOT - vars['BINDIR'] = os.path.dirname(_safe_realpath(sys.executable)) - -# -# public APIs -# - - -def parse_config_h(fp, vars=None): - """Parse a config.h-style file. - - A dictionary containing name/value pairs is returned. If an - optional dictionary is passed in as the second argument, it is - used instead of a new dictionary. - """ - if vars is None: - vars = {} - define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n") - undef_rx = re.compile("/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\n") - - while True: - line = fp.readline() - if not line: - break - m = define_rx.match(line) - if m: - n, v = m.group(1, 2) - try: - v = int(v) - except ValueError: - pass - vars[n] = v - else: - m = undef_rx.match(line) - if m: - vars[m.group(1)] = 0 - return vars - - -def get_config_h_filename(): - """Return the path of pyconfig.h.""" - if _PYTHON_BUILD: - if os.name == "nt": - inc_dir = os.path.join(_PROJECT_BASE, "PC") - else: - inc_dir = _PROJECT_BASE - else: - inc_dir = get_path('platinclude') - return os.path.join(inc_dir, 'pyconfig.h') - - -def get_scheme_names(): - """Return a tuple containing the schemes names.""" - return tuple(sorted(_SCHEMES.sections())) - - -def get_path_names(): - """Return a tuple containing the paths names.""" - # xxx see if we want a static list - return _SCHEMES.options('posix_prefix') - - -def get_paths(scheme=_get_default_scheme(), vars=None, expand=True): - """Return a mapping containing an install scheme. - - ``scheme`` is the install scheme name. If not provided, it will - return the default scheme for the current platform. - """ - _ensure_cfg_read() - if expand: - return _expand_vars(scheme, vars) - else: - return dict(_SCHEMES.items(scheme)) - - -def get_path(name, scheme=_get_default_scheme(), vars=None, expand=True): - """Return a path corresponding to the scheme. - - ``scheme`` is the install scheme name. - """ - return get_paths(scheme, vars, expand)[name] - - -def get_config_vars(*args): - """With no arguments, return a dictionary of all configuration - variables relevant for the current platform. - - On Unix, this means every variable defined in Python's installed Makefile; - On Windows and Mac OS it's a much smaller set. - - With arguments, return a list of values that result from looking up - each argument in the configuration variable dictionary. - """ - global _CONFIG_VARS - if _CONFIG_VARS is None: - _CONFIG_VARS = {} - # Normalized versions of prefix and exec_prefix are handy to have; - # in fact, these are the standard versions used most places in the - # distutils2 module. - _CONFIG_VARS['prefix'] = _PREFIX - _CONFIG_VARS['exec_prefix'] = _EXEC_PREFIX - _CONFIG_VARS['py_version'] = _PY_VERSION - _CONFIG_VARS['py_version_short'] = _PY_VERSION_SHORT - _CONFIG_VARS['py_version_nodot'] = _PY_VERSION[0] + _PY_VERSION[2] - _CONFIG_VARS['base'] = _PREFIX - _CONFIG_VARS['platbase'] = _EXEC_PREFIX - _CONFIG_VARS['projectbase'] = _PROJECT_BASE - try: - _CONFIG_VARS['abiflags'] = sys.abiflags - except AttributeError: - # sys.abiflags may not be defined on all platforms. - _CONFIG_VARS['abiflags'] = '' - - if os.name in ('nt', 'os2'): - _init_non_posix(_CONFIG_VARS) - if os.name == 'posix': - _init_posix(_CONFIG_VARS) - # Setting 'userbase' is done below the call to the - # init function to enable using 'get_config_var' in - # the init-function. - if sys.version >= '2.6': - _CONFIG_VARS['userbase'] = _getuserbase() - - if 'srcdir' not in _CONFIG_VARS: - _CONFIG_VARS['srcdir'] = _PROJECT_BASE - else: - _CONFIG_VARS['srcdir'] = _safe_realpath(_CONFIG_VARS['srcdir']) - - # Convert srcdir into an absolute path if it appears necessary. - # Normally it is relative to the build directory. However, during - # testing, for example, we might be running a non-installed python - # from a different directory. - if _PYTHON_BUILD and os.name == "posix": - base = _PROJECT_BASE - try: - cwd = os.getcwd() - except OSError: - cwd = None - if (not os.path.isabs(_CONFIG_VARS['srcdir']) and - base != cwd): - # srcdir is relative and we are not in the same directory - # as the executable. Assume executable is in the build - # directory and make srcdir absolute. - srcdir = os.path.join(base, _CONFIG_VARS['srcdir']) - _CONFIG_VARS['srcdir'] = os.path.normpath(srcdir) - - if sys.platform == 'darwin': - kernel_version = os.uname()[2] # Kernel version (8.4.3) - major_version = int(kernel_version.split('.')[0]) - - if major_version < 8: - # On Mac OS X before 10.4, check if -arch and -isysroot - # are in CFLAGS or LDFLAGS and remove them if they are. - # This is needed when building extensions on a 10.3 system - # using a universal build of python. - for key in ('LDFLAGS', 'BASECFLAGS', - # a number of derived variables. These need to be - # patched up as well. - 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): - flags = _CONFIG_VARS[key] - flags = re.sub(r'-arch\s+\w+\s', ' ', flags) - flags = re.sub('-isysroot [^ \t]*', ' ', flags) - _CONFIG_VARS[key] = flags - else: - # Allow the user to override the architecture flags using - # an environment variable. - # NOTE: This name was introduced by Apple in OSX 10.5 and - # is used by several scripting languages distributed with - # that OS release. - if 'ARCHFLAGS' in os.environ: - arch = os.environ['ARCHFLAGS'] - for key in ('LDFLAGS', 'BASECFLAGS', - # a number of derived variables. These need to be - # patched up as well. - 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): - - flags = _CONFIG_VARS[key] - flags = re.sub(r'-arch\s+\w+\s', ' ', flags) - flags = flags + ' ' + arch - _CONFIG_VARS[key] = flags - - # If we're on OSX 10.5 or later and the user tries to - # compiles an extension using an SDK that is not present - # on the current machine it is better to not use an SDK - # than to fail. - # - # The major usecase for this is users using a Python.org - # binary installer on OSX 10.6: that installer uses - # the 10.4u SDK, but that SDK is not installed by default - # when you install Xcode. - # - CFLAGS = _CONFIG_VARS.get('CFLAGS', '') - m = re.search(r'-isysroot\s+(\S+)', CFLAGS) - if m is not None: - sdk = m.group(1) - if not os.path.exists(sdk): - for key in ('LDFLAGS', 'BASECFLAGS', - # a number of derived variables. These need to be - # patched up as well. - 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): - - flags = _CONFIG_VARS[key] - flags = re.sub(r'-isysroot\s+\S+(\s|$)', ' ', flags) - _CONFIG_VARS[key] = flags - - if args: - vals = [] - for name in args: - vals.append(_CONFIG_VARS.get(name)) - return vals - else: - return _CONFIG_VARS - - -def get_config_var(name): - """Return the value of a single variable using the dictionary returned by - 'get_config_vars()'. - - Equivalent to get_config_vars().get(name) - """ - return get_config_vars().get(name) - - -def get_platform(): - """Return a string that identifies the current platform. - - This is used mainly to distinguish platform-specific build directories and - platform-specific built distributions. Typically includes the OS name - and version and the architecture (as supplied by 'os.uname()'), - although the exact information included depends on the OS; eg. for IRIX - the architecture isn't particularly important (IRIX only runs on SGI - hardware), but for Linux the kernel version isn't particularly - important. - - Examples of returned values: - linux-i586 - linux-alpha (?) - solaris-2.6-sun4u - irix-5.3 - irix64-6.2 - - Windows will return one of: - win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc) - win-ia64 (64bit Windows on Itanium) - win32 (all others - specifically, sys.platform is returned) - - For other non-POSIX platforms, currently just returns 'sys.platform'. - """ - if os.name == 'nt': - # sniff sys.version for architecture. - prefix = " bit (" - i = sys.version.find(prefix) - if i == -1: - return sys.platform - j = sys.version.find(")", i) - look = sys.version[i+len(prefix):j].lower() - if look == 'amd64': - return 'win-amd64' - if look == 'itanium': - return 'win-ia64' - return sys.platform - - if os.name != "posix" or not hasattr(os, 'uname'): - # XXX what about the architecture? NT is Intel or Alpha, - # Mac OS is M68k or PPC, etc. - return sys.platform - - # Try to distinguish various flavours of Unix - osname, host, release, version, machine = os.uname() - - # Convert the OS name to lowercase, remove '/' characters - # (to accommodate BSD/OS), and translate spaces (for "Power Macintosh") - osname = osname.lower().replace('/', '') - machine = machine.replace(' ', '_') - machine = machine.replace('/', '-') - - if osname[:5] == "linux": - # At least on Linux/Intel, 'machine' is the processor -- - # i386, etc. - # XXX what about Alpha, SPARC, etc? - return "%s-%s" % (osname, machine) - elif osname[:5] == "sunos": - if release[0] >= "5": # SunOS 5 == Solaris 2 - osname = "solaris" - release = "%d.%s" % (int(release[0]) - 3, release[2:]) - # fall through to standard osname-release-machine representation - elif osname[:4] == "irix": # could be "irix64"! - return "%s-%s" % (osname, release) - elif osname[:3] == "aix": - return "%s-%s.%s" % (osname, version, release) - elif osname[:6] == "cygwin": - osname = "cygwin" - rel_re = re.compile(r'[\d.]+') - m = rel_re.match(release) - if m: - release = m.group() - elif osname[:6] == "darwin": - # - # For our purposes, we'll assume that the system version from - # distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set - # to. This makes the compatibility story a bit more sane because the - # machine is going to compile and link as if it were - # MACOSX_DEPLOYMENT_TARGET. - cfgvars = get_config_vars() - macver = cfgvars.get('MACOSX_DEPLOYMENT_TARGET') - - if True: - # Always calculate the release of the running machine, - # needed to determine if we can build fat binaries or not. - - macrelease = macver - # Get the system version. Reading this plist is a documented - # way to get the system version (see the documentation for - # the Gestalt Manager) - try: - f = open('/System/Library/CoreServices/SystemVersion.plist') - except IOError: - # We're on a plain darwin box, fall back to the default - # behaviour. - pass - else: - try: - m = re.search(r'<key>ProductUserVisibleVersion</key>\s*' - r'<string>(.*?)</string>', f.read()) - finally: - f.close() - if m is not None: - macrelease = '.'.join(m.group(1).split('.')[:2]) - # else: fall back to the default behaviour - - if not macver: - macver = macrelease - - if macver: - release = macver - osname = "macosx" - - if ((macrelease + '.') >= '10.4.' and - '-arch' in get_config_vars().get('CFLAGS', '').strip()): - # The universal build will build fat binaries, but not on - # systems before 10.4 - # - # Try to detect 4-way universal builds, those have machine-type - # 'universal' instead of 'fat'. - - machine = 'fat' - cflags = get_config_vars().get('CFLAGS') - - archs = re.findall(r'-arch\s+(\S+)', cflags) - archs = tuple(sorted(set(archs))) - - if len(archs) == 1: - machine = archs[0] - elif archs == ('i386', 'ppc'): - machine = 'fat' - elif archs == ('i386', 'x86_64'): - machine = 'intel' - elif archs == ('i386', 'ppc', 'x86_64'): - machine = 'fat3' - elif archs == ('ppc64', 'x86_64'): - machine = 'fat64' - elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'): - machine = 'universal' - else: - raise ValueError( - "Don't know machine value for archs=%r" % (archs,)) - - elif machine == 'i386': - # On OSX the machine type returned by uname is always the - # 32-bit variant, even if the executable architecture is - # the 64-bit variant - if sys.maxsize >= 2**32: - machine = 'x86_64' - - elif machine in ('PowerPC', 'Power_Macintosh'): - # Pick a sane name for the PPC architecture. - # See 'i386' case - if sys.maxsize >= 2**32: - machine = 'ppc64' - else: - machine = 'ppc' - - return "%s-%s-%s" % (osname, release, machine) - - -def get_python_version(): - return _PY_VERSION_SHORT - - -def _print_dict(title, data): - for index, (key, value) in enumerate(sorted(data.items())): - if index == 0: - print('%s: ' % (title)) - print('\t%s = "%s"' % (key, value)) - - -def _main(): - """Display all information sysconfig detains.""" - print('Platform: "%s"' % get_platform()) - print('Python version: "%s"' % get_python_version()) - print('Current installation scheme: "%s"' % _get_default_scheme()) - print() - _print_dict('Paths', get_paths()) - print() - _print_dict('Variables', get_config_vars()) - - -if __name__ == '__main__': - _main() diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.pyc b/env/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.pyc deleted file mode 100644 index b53ff203..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyc b/env/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyc deleted file mode 100644 index dad23479..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/compat.py b/env/lib/python2.7/site-packages/pip/_vendor/distlib/compat.py deleted file mode 100644 index ff328c8e..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/distlib/compat.py +++ /dev/null @@ -1,1120 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2013-2017 Vinay Sajip. -# Licensed to the Python Software Foundation under a contributor agreement. -# See LICENSE.txt and CONTRIBUTORS.txt. -# -from __future__ import absolute_import - -import os -import re -import sys - -try: - import ssl -except ImportError: # pragma: no cover - ssl = None - -if sys.version_info[0] < 3: # pragma: no cover - from StringIO import StringIO - string_types = basestring, - text_type = unicode - from types import FileType as file_type - import __builtin__ as builtins - import ConfigParser as configparser - from ._backport import shutil - from urlparse import urlparse, urlunparse, urljoin, urlsplit, urlunsplit - from urllib import (urlretrieve, quote as _quote, unquote, url2pathname, - pathname2url, ContentTooShortError, splittype) - - def quote(s): - if isinstance(s, unicode): - s = s.encode('utf-8') - return _quote(s) - - import urllib2 - from urllib2 import (Request, urlopen, URLError, HTTPError, - HTTPBasicAuthHandler, HTTPPasswordMgr, - HTTPHandler, HTTPRedirectHandler, - build_opener) - if ssl: - from urllib2 import HTTPSHandler - import httplib - import xmlrpclib - import Queue as queue - from HTMLParser import HTMLParser - import htmlentitydefs - raw_input = raw_input - from itertools import ifilter as filter - from itertools import ifilterfalse as filterfalse - - _userprog = None - def splituser(host): - """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'.""" - global _userprog - if _userprog is None: - import re - _userprog = re.compile('^(.*)@(.*)$') - - match = _userprog.match(host) - if match: return match.group(1, 2) - return None, host - -else: # pragma: no cover - from io import StringIO - string_types = str, - text_type = str - from io import TextIOWrapper as file_type - import builtins - import configparser - import shutil - from urllib.parse import (urlparse, urlunparse, urljoin, splituser, quote, - unquote, urlsplit, urlunsplit, splittype) - from urllib.request import (urlopen, urlretrieve, Request, url2pathname, - pathname2url, - HTTPBasicAuthHandler, HTTPPasswordMgr, - HTTPHandler, HTTPRedirectHandler, - build_opener) - if ssl: - from urllib.request import HTTPSHandler - from urllib.error import HTTPError, URLError, ContentTooShortError - import http.client as httplib - import urllib.request as urllib2 - import xmlrpc.client as xmlrpclib - import queue - from html.parser import HTMLParser - import html.entities as htmlentitydefs - raw_input = input - from itertools import filterfalse - filter = filter - -try: - from ssl import match_hostname, CertificateError -except ImportError: # pragma: no cover - class CertificateError(ValueError): - pass - - - def _dnsname_match(dn, hostname, max_wildcards=1): - """Matching according to RFC 6125, section 6.4.3 - - http://tools.ietf.org/html/rfc6125#section-6.4.3 - """ - pats = [] - if not dn: - return False - - parts = dn.split('.') - leftmost, remainder = parts[0], parts[1:] - - wildcards = leftmost.count('*') - if wildcards > max_wildcards: - # Issue #17980: avoid denials of service by refusing more - # than one wildcard per fragment. A survey of established - # policy among SSL implementations showed it to be a - # reasonable choice. - raise CertificateError( - "too many wildcards in certificate DNS name: " + repr(dn)) - - # speed up common case w/o wildcards - if not wildcards: - return dn.lower() == hostname.lower() - - # RFC 6125, section 6.4.3, subitem 1. - # The client SHOULD NOT attempt to match a presented identifier in which - # the wildcard character comprises a label other than the left-most label. - if leftmost == '*': - # When '*' is a fragment by itself, it matches a non-empty dotless - # fragment. - pats.append('[^.]+') - elif leftmost.startswith('xn--') or hostname.startswith('xn--'): - # RFC 6125, section 6.4.3, subitem 3. - # The client SHOULD NOT attempt to match a presented identifier - # where the wildcard character is embedded within an A-label or - # U-label of an internationalized domain name. - pats.append(re.escape(leftmost)) - else: - # Otherwise, '*' matches any dotless string, e.g. www* - pats.append(re.escape(leftmost).replace(r'\*', '[^.]*')) - - # add the remaining fragments, ignore any wildcards - for frag in remainder: - pats.append(re.escape(frag)) - - pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) - return pat.match(hostname) - - - def match_hostname(cert, hostname): - """Verify that *cert* (in decoded format as returned by - SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 - rules are followed, but IP addresses are not accepted for *hostname*. - - CertificateError is raised on failure. On success, the function - returns nothing. - """ - if not cert: - raise ValueError("empty or no certificate, match_hostname needs a " - "SSL socket or SSL context with either " - "CERT_OPTIONAL or CERT_REQUIRED") - dnsnames = [] - san = cert.get('subjectAltName', ()) - for key, value in san: - if key == 'DNS': - if _dnsname_match(value, hostname): - return - dnsnames.append(value) - if not dnsnames: - # The subject is only checked when there is no dNSName entry - # in subjectAltName - for sub in cert.get('subject', ()): - for key, value in sub: - # XXX according to RFC 2818, the most specific Common Name - # must be used. - if key == 'commonName': - if _dnsname_match(value, hostname): - return - dnsnames.append(value) - if len(dnsnames) > 1: - raise CertificateError("hostname %r " - "doesn't match either of %s" - % (hostname, ', '.join(map(repr, dnsnames)))) - elif len(dnsnames) == 1: - raise CertificateError("hostname %r " - "doesn't match %r" - % (hostname, dnsnames[0])) - else: - raise CertificateError("no appropriate commonName or " - "subjectAltName fields were found") - - -try: - from types import SimpleNamespace as Container -except ImportError: # pragma: no cover - class Container(object): - """ - A generic container for when multiple values need to be returned - """ - def __init__(self, **kwargs): - self.__dict__.update(kwargs) - - -try: - from shutil import which -except ImportError: # pragma: no cover - # Implementation from Python 3.3 - def which(cmd, mode=os.F_OK | os.X_OK, path=None): - """Given a command, mode, and a PATH string, return the path which - conforms to the given mode on the PATH, or None if there is no such - file. - - `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result - of os.environ.get("PATH"), or can be overridden with a custom search - path. - - """ - # Check that a given file can be accessed with the correct mode. - # Additionally check that `file` is not a directory, as on Windows - # directories pass the os.access check. - def _access_check(fn, mode): - return (os.path.exists(fn) and os.access(fn, mode) - and not os.path.isdir(fn)) - - # If we're given a path with a directory part, look it up directly rather - # than referring to PATH directories. This includes checking relative to the - # current directory, e.g. ./script - if os.path.dirname(cmd): - if _access_check(cmd, mode): - return cmd - return None - - if path is None: - path = os.environ.get("PATH", os.defpath) - if not path: - return None - path = path.split(os.pathsep) - - if sys.platform == "win32": - # The current directory takes precedence on Windows. - if not os.curdir in path: - path.insert(0, os.curdir) - - # PATHEXT is necessary to check on Windows. - pathext = os.environ.get("PATHEXT", "").split(os.pathsep) - # See if the given file matches any of the expected path extensions. - # This will allow us to short circuit when given "python.exe". - # If it does match, only test that one, otherwise we have to try - # others. - if any(cmd.lower().endswith(ext.lower()) for ext in pathext): - files = [cmd] - else: - files = [cmd + ext for ext in pathext] - else: - # On other platforms you don't have things like PATHEXT to tell you - # what file suffixes are executable, so just pass on cmd as-is. - files = [cmd] - - seen = set() - for dir in path: - normdir = os.path.normcase(dir) - if not normdir in seen: - seen.add(normdir) - for thefile in files: - name = os.path.join(dir, thefile) - if _access_check(name, mode): - return name - return None - - -# ZipFile is a context manager in 2.7, but not in 2.6 - -from zipfile import ZipFile as BaseZipFile - -if hasattr(BaseZipFile, '__enter__'): # pragma: no cover - ZipFile = BaseZipFile -else: # pragma: no cover - from zipfile import ZipExtFile as BaseZipExtFile - - class ZipExtFile(BaseZipExtFile): - def __init__(self, base): - self.__dict__.update(base.__dict__) - - def __enter__(self): - return self - - def __exit__(self, *exc_info): - self.close() - # return None, so if an exception occurred, it will propagate - - class ZipFile(BaseZipFile): - def __enter__(self): - return self - - def __exit__(self, *exc_info): - self.close() - # return None, so if an exception occurred, it will propagate - - def open(self, *args, **kwargs): - base = BaseZipFile.open(self, *args, **kwargs) - return ZipExtFile(base) - -try: - from platform import python_implementation -except ImportError: # pragma: no cover - def python_implementation(): - """Return a string identifying the Python implementation.""" - if 'PyPy' in sys.version: - return 'PyPy' - if os.name == 'java': - return 'Jython' - if sys.version.startswith('IronPython'): - return 'IronPython' - return 'CPython' - -try: - import sysconfig -except ImportError: # pragma: no cover - from ._backport import sysconfig - -try: - callable = callable -except NameError: # pragma: no cover - from collections import Callable - - def callable(obj): - return isinstance(obj, Callable) - - -try: - fsencode = os.fsencode - fsdecode = os.fsdecode -except AttributeError: # pragma: no cover - # Issue #99: on some systems (e.g. containerised), - # sys.getfilesystemencoding() returns None, and we need a real value, - # so fall back to utf-8. From the CPython 2.7 docs relating to Unix and - # sys.getfilesystemencoding(): the return value is "the user’s preference - # according to the result of nl_langinfo(CODESET), or None if the - # nl_langinfo(CODESET) failed." - _fsencoding = sys.getfilesystemencoding() or 'utf-8' - if _fsencoding == 'mbcs': - _fserrors = 'strict' - else: - _fserrors = 'surrogateescape' - - def fsencode(filename): - if isinstance(filename, bytes): - return filename - elif isinstance(filename, text_type): - return filename.encode(_fsencoding, _fserrors) - else: - raise TypeError("expect bytes or str, not %s" % - type(filename).__name__) - - def fsdecode(filename): - if isinstance(filename, text_type): - return filename - elif isinstance(filename, bytes): - return filename.decode(_fsencoding, _fserrors) - else: - raise TypeError("expect bytes or str, not %s" % - type(filename).__name__) - -try: - from tokenize import detect_encoding -except ImportError: # pragma: no cover - from codecs import BOM_UTF8, lookup - import re - - cookie_re = re.compile(r"coding[:=]\s*([-\w.]+)") - - def _get_normal_name(orig_enc): - """Imitates get_normal_name in tokenizer.c.""" - # Only care about the first 12 characters. - enc = orig_enc[:12].lower().replace("_", "-") - if enc == "utf-8" or enc.startswith("utf-8-"): - return "utf-8" - if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \ - enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")): - return "iso-8859-1" - return orig_enc - - def detect_encoding(readline): - """ - The detect_encoding() function is used to detect the encoding that should - be used to decode a Python source file. It requires one argument, readline, - in the same way as the tokenize() generator. - - It will call readline a maximum of twice, and return the encoding used - (as a string) and a list of any lines (left as bytes) it has read in. - - It detects the encoding from the presence of a utf-8 bom or an encoding - cookie as specified in pep-0263. If both a bom and a cookie are present, - but disagree, a SyntaxError will be raised. If the encoding cookie is an - invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found, - 'utf-8-sig' is returned. - - If no encoding is specified, then the default of 'utf-8' will be returned. - """ - try: - filename = readline.__self__.name - except AttributeError: - filename = None - bom_found = False - encoding = None - default = 'utf-8' - def read_or_stop(): - try: - return readline() - except StopIteration: - return b'' - - def find_cookie(line): - try: - # Decode as UTF-8. Either the line is an encoding declaration, - # in which case it should be pure ASCII, or it must be UTF-8 - # per default encoding. - line_string = line.decode('utf-8') - except UnicodeDecodeError: - msg = "invalid or missing encoding declaration" - if filename is not None: - msg = '{} for {!r}'.format(msg, filename) - raise SyntaxError(msg) - - matches = cookie_re.findall(line_string) - if not matches: - return None - encoding = _get_normal_name(matches[0]) - try: - codec = lookup(encoding) - except LookupError: - # This behaviour mimics the Python interpreter - if filename is None: - msg = "unknown encoding: " + encoding - else: - msg = "unknown encoding for {!r}: {}".format(filename, - encoding) - raise SyntaxError(msg) - - if bom_found: - if codec.name != 'utf-8': - # This behaviour mimics the Python interpreter - if filename is None: - msg = 'encoding problem: utf-8' - else: - msg = 'encoding problem for {!r}: utf-8'.format(filename) - raise SyntaxError(msg) - encoding += '-sig' - return encoding - - first = read_or_stop() - if first.startswith(BOM_UTF8): - bom_found = True - first = first[3:] - default = 'utf-8-sig' - if not first: - return default, [] - - encoding = find_cookie(first) - if encoding: - return encoding, [first] - - second = read_or_stop() - if not second: - return default, [first] - - encoding = find_cookie(second) - if encoding: - return encoding, [first, second] - - return default, [first, second] - -# For converting & <-> & etc. -try: - from html import escape -except ImportError: - from cgi import escape -if sys.version_info[:2] < (3, 4): - unescape = HTMLParser().unescape -else: - from html import unescape - -try: - from collections import ChainMap -except ImportError: # pragma: no cover - from collections import MutableMapping - - try: - from reprlib import recursive_repr as _recursive_repr - except ImportError: - def _recursive_repr(fillvalue='...'): - ''' - Decorator to make a repr function return fillvalue for a recursive - call - ''' - - def decorating_function(user_function): - repr_running = set() - - def wrapper(self): - key = id(self), get_ident() - if key in repr_running: - return fillvalue - repr_running.add(key) - try: - result = user_function(self) - finally: - repr_running.discard(key) - return result - - # Can't use functools.wraps() here because of bootstrap issues - wrapper.__module__ = getattr(user_function, '__module__') - wrapper.__doc__ = getattr(user_function, '__doc__') - wrapper.__name__ = getattr(user_function, '__name__') - wrapper.__annotations__ = getattr(user_function, '__annotations__', {}) - return wrapper - - return decorating_function - - class ChainMap(MutableMapping): - ''' A ChainMap groups multiple dicts (or other mappings) together - to create a single, updateable view. - - The underlying mappings are stored in a list. That list is public and can - accessed or updated using the *maps* attribute. There is no other state. - - Lookups search the underlying mappings successively until a key is found. - In contrast, writes, updates, and deletions only operate on the first - mapping. - - ''' - - def __init__(self, *maps): - '''Initialize a ChainMap by setting *maps* to the given mappings. - If no mappings are provided, a single empty dictionary is used. - - ''' - self.maps = list(maps) or [{}] # always at least one map - - def __missing__(self, key): - raise KeyError(key) - - def __getitem__(self, key): - for mapping in self.maps: - try: - return mapping[key] # can't use 'key in mapping' with defaultdict - except KeyError: - pass - return self.__missing__(key) # support subclasses that define __missing__ - - def get(self, key, default=None): - return self[key] if key in self else default - - def __len__(self): - return len(set().union(*self.maps)) # reuses stored hash values if possible - - def __iter__(self): - return iter(set().union(*self.maps)) - - def __contains__(self, key): - return any(key in m for m in self.maps) - - def __bool__(self): - return any(self.maps) - - @_recursive_repr() - def __repr__(self): - return '{0.__class__.__name__}({1})'.format( - self, ', '.join(map(repr, self.maps))) - - @classmethod - def fromkeys(cls, iterable, *args): - 'Create a ChainMap with a single dict created from the iterable.' - return cls(dict.fromkeys(iterable, *args)) - - def copy(self): - 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]' - return self.__class__(self.maps[0].copy(), *self.maps[1:]) - - __copy__ = copy - - def new_child(self): # like Django's Context.push() - 'New ChainMap with a new dict followed by all previous maps.' - return self.__class__({}, *self.maps) - - @property - def parents(self): # like Django's Context.pop() - 'New ChainMap from maps[1:].' - return self.__class__(*self.maps[1:]) - - def __setitem__(self, key, value): - self.maps[0][key] = value - - def __delitem__(self, key): - try: - del self.maps[0][key] - except KeyError: - raise KeyError('Key not found in the first mapping: {!r}'.format(key)) - - def popitem(self): - 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' - try: - return self.maps[0].popitem() - except KeyError: - raise KeyError('No keys found in the first mapping.') - - def pop(self, key, *args): - 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].' - try: - return self.maps[0].pop(key, *args) - except KeyError: - raise KeyError('Key not found in the first mapping: {!r}'.format(key)) - - def clear(self): - 'Clear maps[0], leaving maps[1:] intact.' - self.maps[0].clear() - -try: - from importlib.util import cache_from_source # Python >= 3.4 -except ImportError: # pragma: no cover - try: - from imp import cache_from_source - except ImportError: # pragma: no cover - def cache_from_source(path, debug_override=None): - assert path.endswith('.py') - if debug_override is None: - debug_override = __debug__ - if debug_override: - suffix = 'c' - else: - suffix = 'o' - return path + suffix - -try: - from collections import OrderedDict -except ImportError: # pragma: no cover -## {{{ http://code.activestate.com/recipes/576693/ (r9) -# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy. -# Passes Python2.7's test suite and incorporates all the latest updates. - try: - from thread import get_ident as _get_ident - except ImportError: - from dummy_thread import get_ident as _get_ident - - try: - from _abcoll import KeysView, ValuesView, ItemsView - except ImportError: - pass - - - class OrderedDict(dict): - 'Dictionary that remembers insertion order' - # An inherited dict maps keys to values. - # The inherited dict provides __getitem__, __len__, __contains__, and get. - # The remaining methods are order-aware. - # Big-O running times for all methods are the same as for regular dictionaries. - - # The internal self.__map dictionary maps keys to links in a doubly linked list. - # The circular doubly linked list starts and ends with a sentinel element. - # The sentinel element never gets deleted (this simplifies the algorithm). - # Each link is stored as a list of length three: [PREV, NEXT, KEY]. - - def __init__(self, *args, **kwds): - '''Initialize an ordered dictionary. Signature is the same as for - regular dictionaries, but keyword arguments are not recommended - because their insertion order is arbitrary. - - ''' - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - try: - self.__root - except AttributeError: - self.__root = root = [] # sentinel node - root[:] = [root, root, None] - self.__map = {} - self.__update(*args, **kwds) - - def __setitem__(self, key, value, dict_setitem=dict.__setitem__): - 'od.__setitem__(i, y) <==> od[i]=y' - # Setting a new item creates a new link which goes at the end of the linked - # list, and the inherited dictionary is updated with the new key/value pair. - if key not in self: - root = self.__root - last = root[0] - last[1] = root[0] = self.__map[key] = [last, root, key] - dict_setitem(self, key, value) - - def __delitem__(self, key, dict_delitem=dict.__delitem__): - 'od.__delitem__(y) <==> del od[y]' - # Deleting an existing item uses self.__map to find the link which is - # then removed by updating the links in the predecessor and successor nodes. - dict_delitem(self, key) - link_prev, link_next, key = self.__map.pop(key) - link_prev[1] = link_next - link_next[0] = link_prev - - def __iter__(self): - 'od.__iter__() <==> iter(od)' - root = self.__root - curr = root[1] - while curr is not root: - yield curr[2] - curr = curr[1] - - def __reversed__(self): - 'od.__reversed__() <==> reversed(od)' - root = self.__root - curr = root[0] - while curr is not root: - yield curr[2] - curr = curr[0] - - def clear(self): - 'od.clear() -> None. Remove all items from od.' - try: - for node in self.__map.itervalues(): - del node[:] - root = self.__root - root[:] = [root, root, None] - self.__map.clear() - except AttributeError: - pass - dict.clear(self) - - def popitem(self, last=True): - '''od.popitem() -> (k, v), return and remove a (key, value) pair. - Pairs are returned in LIFO order if last is true or FIFO order if false. - - ''' - if not self: - raise KeyError('dictionary is empty') - root = self.__root - if last: - link = root[0] - link_prev = link[0] - link_prev[1] = root - root[0] = link_prev - else: - link = root[1] - link_next = link[1] - root[1] = link_next - link_next[0] = root - key = link[2] - del self.__map[key] - value = dict.pop(self, key) - return key, value - - # -- the following methods do not depend on the internal structure -- - - def keys(self): - 'od.keys() -> list of keys in od' - return list(self) - - def values(self): - 'od.values() -> list of values in od' - return [self[key] for key in self] - - def items(self): - 'od.items() -> list of (key, value) pairs in od' - return [(key, self[key]) for key in self] - - def iterkeys(self): - 'od.iterkeys() -> an iterator over the keys in od' - return iter(self) - - def itervalues(self): - 'od.itervalues -> an iterator over the values in od' - for k in self: - yield self[k] - - def iteritems(self): - 'od.iteritems -> an iterator over the (key, value) items in od' - for k in self: - yield (k, self[k]) - - def update(*args, **kwds): - '''od.update(E, **F) -> None. Update od from dict/iterable E and F. - - If E is a dict instance, does: for k in E: od[k] = E[k] - If E has a .keys() method, does: for k in E.keys(): od[k] = E[k] - Or if E is an iterable of items, does: for k, v in E: od[k] = v - In either case, this is followed by: for k, v in F.items(): od[k] = v - - ''' - if len(args) > 2: - raise TypeError('update() takes at most 2 positional ' - 'arguments (%d given)' % (len(args),)) - elif not args: - raise TypeError('update() takes at least 1 argument (0 given)') - self = args[0] - # Make progressively weaker assumptions about "other" - other = () - if len(args) == 2: - other = args[1] - if isinstance(other, dict): - for key in other: - self[key] = other[key] - elif hasattr(other, 'keys'): - for key in other.keys(): - self[key] = other[key] - else: - for key, value in other: - self[key] = value - for key, value in kwds.items(): - self[key] = value - - __update = update # let subclasses override update without breaking __init__ - - __marker = object() - - def pop(self, key, default=__marker): - '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value. - If key is not found, d is returned if given, otherwise KeyError is raised. - - ''' - if key in self: - result = self[key] - del self[key] - return result - if default is self.__marker: - raise KeyError(key) - return default - - def setdefault(self, key, default=None): - 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' - if key in self: - return self[key] - self[key] = default - return default - - def __repr__(self, _repr_running=None): - 'od.__repr__() <==> repr(od)' - if not _repr_running: _repr_running = {} - call_key = id(self), _get_ident() - if call_key in _repr_running: - return '...' - _repr_running[call_key] = 1 - try: - if not self: - return '%s()' % (self.__class__.__name__,) - return '%s(%r)' % (self.__class__.__name__, self.items()) - finally: - del _repr_running[call_key] - - def __reduce__(self): - 'Return state information for pickling' - items = [[k, self[k]] for k in self] - inst_dict = vars(self).copy() - for k in vars(OrderedDict()): - inst_dict.pop(k, None) - if inst_dict: - return (self.__class__, (items,), inst_dict) - return self.__class__, (items,) - - def copy(self): - 'od.copy() -> a shallow copy of od' - return self.__class__(self) - - @classmethod - def fromkeys(cls, iterable, value=None): - '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S - and values equal to v (which defaults to None). - - ''' - d = cls() - for key in iterable: - d[key] = value - return d - - def __eq__(self, other): - '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive - while comparison to a regular mapping is order-insensitive. - - ''' - if isinstance(other, OrderedDict): - return len(self)==len(other) and self.items() == other.items() - return dict.__eq__(self, other) - - def __ne__(self, other): - return not self == other - - # -- the following methods are only used in Python 2.7 -- - - def viewkeys(self): - "od.viewkeys() -> a set-like object providing a view on od's keys" - return KeysView(self) - - def viewvalues(self): - "od.viewvalues() -> an object providing a view on od's values" - return ValuesView(self) - - def viewitems(self): - "od.viewitems() -> a set-like object providing a view on od's items" - return ItemsView(self) - -try: - from logging.config import BaseConfigurator, valid_ident -except ImportError: # pragma: no cover - IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I) - - - def valid_ident(s): - m = IDENTIFIER.match(s) - if not m: - raise ValueError('Not a valid Python identifier: %r' % s) - return True - - - # The ConvertingXXX classes are wrappers around standard Python containers, - # and they serve to convert any suitable values in the container. The - # conversion converts base dicts, lists and tuples to their wrapped - # equivalents, whereas strings which match a conversion format are converted - # appropriately. - # - # Each wrapper should have a configurator attribute holding the actual - # configurator to use for conversion. - - class ConvertingDict(dict): - """A converting dictionary wrapper.""" - - def __getitem__(self, key): - value = dict.__getitem__(self, key) - result = self.configurator.convert(value) - #If the converted value is different, save for next time - if value is not result: - self[key] = result - if type(result) in (ConvertingDict, ConvertingList, - ConvertingTuple): - result.parent = self - result.key = key - return result - - def get(self, key, default=None): - value = dict.get(self, key, default) - result = self.configurator.convert(value) - #If the converted value is different, save for next time - if value is not result: - self[key] = result - if type(result) in (ConvertingDict, ConvertingList, - ConvertingTuple): - result.parent = self - result.key = key - return result - - def pop(self, key, default=None): - value = dict.pop(self, key, default) - result = self.configurator.convert(value) - if value is not result: - if type(result) in (ConvertingDict, ConvertingList, - ConvertingTuple): - result.parent = self - result.key = key - return result - - class ConvertingList(list): - """A converting list wrapper.""" - def __getitem__(self, key): - value = list.__getitem__(self, key) - result = self.configurator.convert(value) - #If the converted value is different, save for next time - if value is not result: - self[key] = result - if type(result) in (ConvertingDict, ConvertingList, - ConvertingTuple): - result.parent = self - result.key = key - return result - - def pop(self, idx=-1): - value = list.pop(self, idx) - result = self.configurator.convert(value) - if value is not result: - if type(result) in (ConvertingDict, ConvertingList, - ConvertingTuple): - result.parent = self - return result - - class ConvertingTuple(tuple): - """A converting tuple wrapper.""" - def __getitem__(self, key): - value = tuple.__getitem__(self, key) - result = self.configurator.convert(value) - if value is not result: - if type(result) in (ConvertingDict, ConvertingList, - ConvertingTuple): - result.parent = self - result.key = key - return result - - class BaseConfigurator(object): - """ - The configurator base class which defines some useful defaults. - """ - - CONVERT_PATTERN = re.compile(r'^(?P<prefix>[a-z]+)://(?P<suffix>.*)$') - - WORD_PATTERN = re.compile(r'^\s*(\w+)\s*') - DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*') - INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*') - DIGIT_PATTERN = re.compile(r'^\d+$') - - value_converters = { - 'ext' : 'ext_convert', - 'cfg' : 'cfg_convert', - } - - # We might want to use a different one, e.g. importlib - importer = staticmethod(__import__) - - def __init__(self, config): - self.config = ConvertingDict(config) - self.config.configurator = self - - def resolve(self, s): - """ - Resolve strings to objects using standard import and attribute - syntax. - """ - name = s.split('.') - used = name.pop(0) - try: - found = self.importer(used) - for frag in name: - used += '.' + frag - try: - found = getattr(found, frag) - except AttributeError: - self.importer(used) - found = getattr(found, frag) - return found - except ImportError: - e, tb = sys.exc_info()[1:] - v = ValueError('Cannot resolve %r: %s' % (s, e)) - v.__cause__, v.__traceback__ = e, tb - raise v - - def ext_convert(self, value): - """Default converter for the ext:// protocol.""" - return self.resolve(value) - - def cfg_convert(self, value): - """Default converter for the cfg:// protocol.""" - rest = value - m = self.WORD_PATTERN.match(rest) - if m is None: - raise ValueError("Unable to convert %r" % value) - else: - rest = rest[m.end():] - d = self.config[m.groups()[0]] - #print d, rest - while rest: - m = self.DOT_PATTERN.match(rest) - if m: - d = d[m.groups()[0]] - else: - m = self.INDEX_PATTERN.match(rest) - if m: - idx = m.groups()[0] - if not self.DIGIT_PATTERN.match(idx): - d = d[idx] - else: - try: - n = int(idx) # try as number first (most likely) - d = d[n] - except TypeError: - d = d[idx] - if m: - rest = rest[m.end():] - else: - raise ValueError('Unable to convert ' - '%r at %r' % (value, rest)) - #rest should be empty - return d - - def convert(self, value): - """ - Convert values to an appropriate type. dicts, lists and tuples are - replaced by their converting alternatives. Strings are checked to - see if they have a conversion format and are converted if they do. - """ - if not isinstance(value, ConvertingDict) and isinstance(value, dict): - value = ConvertingDict(value) - value.configurator = self - elif not isinstance(value, ConvertingList) and isinstance(value, list): - value = ConvertingList(value) - value.configurator = self - elif not isinstance(value, ConvertingTuple) and\ - isinstance(value, tuple): - value = ConvertingTuple(value) - value.configurator = self - elif isinstance(value, string_types): - m = self.CONVERT_PATTERN.match(value) - if m: - d = m.groupdict() - prefix = d['prefix'] - converter = self.value_converters.get(prefix, None) - if converter: - suffix = d['suffix'] - converter = getattr(self, converter) - value = converter(suffix) - return value - - def configure_custom(self, config): - """Configure an object with a user-supplied factory.""" - c = config.pop('()') - if not callable(c): - c = self.resolve(c) - props = config.pop('.', None) - # Check for valid identifiers - kwargs = dict([(k, config[k]) for k in config if valid_ident(k)]) - result = c(**kwargs) - if props: - for name, value in props.items(): - setattr(result, name, value) - return result - - def as_tuple(self, value): - """Utility function which converts lists to tuples.""" - if isinstance(value, list): - value = tuple(value) - return value diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyc b/env/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyc deleted file mode 100644 index fcd866ab..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyc b/env/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyc deleted file mode 100644 index e460ec8a..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/index.pyc b/env/lib/python2.7/site-packages/pip/_vendor/distlib/index.pyc deleted file mode 100644 index 4f774acf..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/distlib/index.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyc b/env/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyc deleted file mode 100644 index 75982670..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/manifest.pyc b/env/lib/python2.7/site-packages/pip/_vendor/distlib/manifest.pyc deleted file mode 100644 index a2222068..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/distlib/manifest.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pyc b/env/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pyc deleted file mode 100644 index 1c7859af..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.py b/env/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.py deleted file mode 100644 index 2d61378e..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.py +++ /dev/null @@ -1,1096 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2012 The Python Software Foundation. -# See LICENSE.txt and CONTRIBUTORS.txt. -# -"""Implementation of the Metadata for Python packages PEPs. - -Supports all metadata formats (1.0, 1.1, 1.2, and 2.0 experimental). -""" -from __future__ import unicode_literals - -import codecs -from email import message_from_file -import json -import logging -import re - - -from . import DistlibException, __version__ -from .compat import StringIO, string_types, text_type -from .markers import interpret -from .util import extract_by_key, get_extras -from .version import get_scheme, PEP440_VERSION_RE - -logger = logging.getLogger(__name__) - - -class MetadataMissingError(DistlibException): - """A required metadata is missing""" - - -class MetadataConflictError(DistlibException): - """Attempt to read or write metadata fields that are conflictual.""" - - -class MetadataUnrecognizedVersionError(DistlibException): - """Unknown metadata version number.""" - - -class MetadataInvalidError(DistlibException): - """A metadata value is invalid""" - -# public API of this module -__all__ = ['Metadata', 'PKG_INFO_ENCODING', 'PKG_INFO_PREFERRED_VERSION'] - -# Encoding used for the PKG-INFO files -PKG_INFO_ENCODING = 'utf-8' - -# preferred version. Hopefully will be changed -# to 1.2 once PEP 345 is supported everywhere -PKG_INFO_PREFERRED_VERSION = '1.1' - -_LINE_PREFIX_1_2 = re.compile('\n \\|') -_LINE_PREFIX_PRE_1_2 = re.compile('\n ') -_241_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', - 'Summary', 'Description', - 'Keywords', 'Home-page', 'Author', 'Author-email', - 'License') - -_314_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', - 'Supported-Platform', 'Summary', 'Description', - 'Keywords', 'Home-page', 'Author', 'Author-email', - 'License', 'Classifier', 'Download-URL', 'Obsoletes', - 'Provides', 'Requires') - -_314_MARKERS = ('Obsoletes', 'Provides', 'Requires', 'Classifier', - 'Download-URL') - -_345_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', - 'Supported-Platform', 'Summary', 'Description', - 'Keywords', 'Home-page', 'Author', 'Author-email', - 'Maintainer', 'Maintainer-email', 'License', - 'Classifier', 'Download-URL', 'Obsoletes-Dist', - 'Project-URL', 'Provides-Dist', 'Requires-Dist', - 'Requires-Python', 'Requires-External') - -_345_MARKERS = ('Provides-Dist', 'Requires-Dist', 'Requires-Python', - 'Obsoletes-Dist', 'Requires-External', 'Maintainer', - 'Maintainer-email', 'Project-URL') - -_426_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', - 'Supported-Platform', 'Summary', 'Description', - 'Keywords', 'Home-page', 'Author', 'Author-email', - 'Maintainer', 'Maintainer-email', 'License', - 'Classifier', 'Download-URL', 'Obsoletes-Dist', - 'Project-URL', 'Provides-Dist', 'Requires-Dist', - 'Requires-Python', 'Requires-External', 'Private-Version', - 'Obsoleted-By', 'Setup-Requires-Dist', 'Extension', - 'Provides-Extra') - -_426_MARKERS = ('Private-Version', 'Provides-Extra', 'Obsoleted-By', - 'Setup-Requires-Dist', 'Extension') - -# See issue #106: Sometimes 'Requires' and 'Provides' occur wrongly in -# the metadata. Include them in the tuple literal below to allow them -# (for now). -_566_FIELDS = _426_FIELDS + ('Description-Content-Type', - 'Requires', 'Provides') - -_566_MARKERS = ('Description-Content-Type',) - -_ALL_FIELDS = set() -_ALL_FIELDS.update(_241_FIELDS) -_ALL_FIELDS.update(_314_FIELDS) -_ALL_FIELDS.update(_345_FIELDS) -_ALL_FIELDS.update(_426_FIELDS) -_ALL_FIELDS.update(_566_FIELDS) - -EXTRA_RE = re.compile(r'''extra\s*==\s*("([^"]+)"|'([^']+)')''') - - -def _version2fieldlist(version): - if version == '1.0': - return _241_FIELDS - elif version == '1.1': - return _314_FIELDS - elif version == '1.2': - return _345_FIELDS - elif version in ('1.3', '2.1'): - return _345_FIELDS + _566_FIELDS - elif version == '2.0': - return _426_FIELDS - raise MetadataUnrecognizedVersionError(version) - - -def _best_version(fields): - """Detect the best version depending on the fields used.""" - def _has_marker(keys, markers): - for marker in markers: - if marker in keys: - return True - return False - - keys = [] - for key, value in fields.items(): - if value in ([], 'UNKNOWN', None): - continue - keys.append(key) - - possible_versions = ['1.0', '1.1', '1.2', '1.3', '2.0', '2.1'] - - # first let's try to see if a field is not part of one of the version - for key in keys: - if key not in _241_FIELDS and '1.0' in possible_versions: - possible_versions.remove('1.0') - logger.debug('Removed 1.0 due to %s', key) - if key not in _314_FIELDS and '1.1' in possible_versions: - possible_versions.remove('1.1') - logger.debug('Removed 1.1 due to %s', key) - if key not in _345_FIELDS and '1.2' in possible_versions: - possible_versions.remove('1.2') - logger.debug('Removed 1.2 due to %s', key) - if key not in _566_FIELDS and '1.3' in possible_versions: - possible_versions.remove('1.3') - logger.debug('Removed 1.3 due to %s', key) - if key not in _566_FIELDS and '2.1' in possible_versions: - if key != 'Description': # In 2.1, description allowed after headers - possible_versions.remove('2.1') - logger.debug('Removed 2.1 due to %s', key) - if key not in _426_FIELDS and '2.0' in possible_versions: - possible_versions.remove('2.0') - logger.debug('Removed 2.0 due to %s', key) - - # possible_version contains qualified versions - if len(possible_versions) == 1: - return possible_versions[0] # found ! - elif len(possible_versions) == 0: - logger.debug('Out of options - unknown metadata set: %s', fields) - raise MetadataConflictError('Unknown metadata set') - - # let's see if one unique marker is found - is_1_1 = '1.1' in possible_versions and _has_marker(keys, _314_MARKERS) - is_1_2 = '1.2' in possible_versions and _has_marker(keys, _345_MARKERS) - is_2_1 = '2.1' in possible_versions and _has_marker(keys, _566_MARKERS) - is_2_0 = '2.0' in possible_versions and _has_marker(keys, _426_MARKERS) - if int(is_1_1) + int(is_1_2) + int(is_2_1) + int(is_2_0) > 1: - raise MetadataConflictError('You used incompatible 1.1/1.2/2.0/2.1 fields') - - # we have the choice, 1.0, or 1.2, or 2.0 - # - 1.0 has a broken Summary field but works with all tools - # - 1.1 is to avoid - # - 1.2 fixes Summary but has little adoption - # - 2.0 adds more features and is very new - if not is_1_1 and not is_1_2 and not is_2_1 and not is_2_0: - # we couldn't find any specific marker - if PKG_INFO_PREFERRED_VERSION in possible_versions: - return PKG_INFO_PREFERRED_VERSION - if is_1_1: - return '1.1' - if is_1_2: - return '1.2' - if is_2_1: - return '2.1' - - return '2.0' - -_ATTR2FIELD = { - 'metadata_version': 'Metadata-Version', - 'name': 'Name', - 'version': 'Version', - 'platform': 'Platform', - 'supported_platform': 'Supported-Platform', - 'summary': 'Summary', - 'description': 'Description', - 'keywords': 'Keywords', - 'home_page': 'Home-page', - 'author': 'Author', - 'author_email': 'Author-email', - 'maintainer': 'Maintainer', - 'maintainer_email': 'Maintainer-email', - 'license': 'License', - 'classifier': 'Classifier', - 'download_url': 'Download-URL', - 'obsoletes_dist': 'Obsoletes-Dist', - 'provides_dist': 'Provides-Dist', - 'requires_dist': 'Requires-Dist', - 'setup_requires_dist': 'Setup-Requires-Dist', - 'requires_python': 'Requires-Python', - 'requires_external': 'Requires-External', - 'requires': 'Requires', - 'provides': 'Provides', - 'obsoletes': 'Obsoletes', - 'project_url': 'Project-URL', - 'private_version': 'Private-Version', - 'obsoleted_by': 'Obsoleted-By', - 'extension': 'Extension', - 'provides_extra': 'Provides-Extra', -} - -_PREDICATE_FIELDS = ('Requires-Dist', 'Obsoletes-Dist', 'Provides-Dist') -_VERSIONS_FIELDS = ('Requires-Python',) -_VERSION_FIELDS = ('Version',) -_LISTFIELDS = ('Platform', 'Classifier', 'Obsoletes', - 'Requires', 'Provides', 'Obsoletes-Dist', - 'Provides-Dist', 'Requires-Dist', 'Requires-External', - 'Project-URL', 'Supported-Platform', 'Setup-Requires-Dist', - 'Provides-Extra', 'Extension') -_LISTTUPLEFIELDS = ('Project-URL',) - -_ELEMENTSFIELD = ('Keywords',) - -_UNICODEFIELDS = ('Author', 'Maintainer', 'Summary', 'Description') - -_MISSING = object() - -_FILESAFE = re.compile('[^A-Za-z0-9.]+') - - -def _get_name_and_version(name, version, for_filename=False): - """Return the distribution name with version. - - If for_filename is true, return a filename-escaped form.""" - if for_filename: - # For both name and version any runs of non-alphanumeric or '.' - # characters are replaced with a single '-'. Additionally any - # spaces in the version string become '.' - name = _FILESAFE.sub('-', name) - version = _FILESAFE.sub('-', version.replace(' ', '.')) - return '%s-%s' % (name, version) - - -class LegacyMetadata(object): - """The legacy metadata of a release. - - Supports versions 1.0, 1.1 and 1.2 (auto-detected). You can - instantiate the class with one of these arguments (or none): - - *path*, the path to a metadata file - - *fileobj* give a file-like object with metadata as content - - *mapping* is a dict-like object - - *scheme* is a version scheme name - """ - # TODO document the mapping API and UNKNOWN default key - - def __init__(self, path=None, fileobj=None, mapping=None, - scheme='default'): - if [path, fileobj, mapping].count(None) < 2: - raise TypeError('path, fileobj and mapping are exclusive') - self._fields = {} - self.requires_files = [] - self._dependencies = None - self.scheme = scheme - if path is not None: - self.read(path) - elif fileobj is not None: - self.read_file(fileobj) - elif mapping is not None: - self.update(mapping) - self.set_metadata_version() - - def set_metadata_version(self): - self._fields['Metadata-Version'] = _best_version(self._fields) - - def _write_field(self, fileobj, name, value): - fileobj.write('%s: %s\n' % (name, value)) - - def __getitem__(self, name): - return self.get(name) - - def __setitem__(self, name, value): - return self.set(name, value) - - def __delitem__(self, name): - field_name = self._convert_name(name) - try: - del self._fields[field_name] - except KeyError: - raise KeyError(name) - - def __contains__(self, name): - return (name in self._fields or - self._convert_name(name) in self._fields) - - def _convert_name(self, name): - if name in _ALL_FIELDS: - return name - name = name.replace('-', '_').lower() - return _ATTR2FIELD.get(name, name) - - def _default_value(self, name): - if name in _LISTFIELDS or name in _ELEMENTSFIELD: - return [] - return 'UNKNOWN' - - def _remove_line_prefix(self, value): - if self.metadata_version in ('1.0', '1.1'): - return _LINE_PREFIX_PRE_1_2.sub('\n', value) - else: - return _LINE_PREFIX_1_2.sub('\n', value) - - def __getattr__(self, name): - if name in _ATTR2FIELD: - return self[name] - raise AttributeError(name) - - # - # Public API - # - -# dependencies = property(_get_dependencies, _set_dependencies) - - def get_fullname(self, filesafe=False): - """Return the distribution name with version. - - If filesafe is true, return a filename-escaped form.""" - return _get_name_and_version(self['Name'], self['Version'], filesafe) - - def is_field(self, name): - """return True if name is a valid metadata key""" - name = self._convert_name(name) - return name in _ALL_FIELDS - - def is_multi_field(self, name): - name = self._convert_name(name) - return name in _LISTFIELDS - - def read(self, filepath): - """Read the metadata values from a file path.""" - fp = codecs.open(filepath, 'r', encoding='utf-8') - try: - self.read_file(fp) - finally: - fp.close() - - def read_file(self, fileob): - """Read the metadata values from a file object.""" - msg = message_from_file(fileob) - self._fields['Metadata-Version'] = msg['metadata-version'] - - # When reading, get all the fields we can - for field in _ALL_FIELDS: - if field not in msg: - continue - if field in _LISTFIELDS: - # we can have multiple lines - values = msg.get_all(field) - if field in _LISTTUPLEFIELDS and values is not None: - values = [tuple(value.split(',')) for value in values] - self.set(field, values) - else: - # single line - value = msg[field] - if value is not None and value != 'UNKNOWN': - self.set(field, value) - # logger.debug('Attempting to set metadata for %s', self) - # self.set_metadata_version() - - def write(self, filepath, skip_unknown=False): - """Write the metadata fields to filepath.""" - fp = codecs.open(filepath, 'w', encoding='utf-8') - try: - self.write_file(fp, skip_unknown) - finally: - fp.close() - - def write_file(self, fileobject, skip_unknown=False): - """Write the PKG-INFO format data to a file object.""" - self.set_metadata_version() - - for field in _version2fieldlist(self['Metadata-Version']): - values = self.get(field) - if skip_unknown and values in ('UNKNOWN', [], ['UNKNOWN']): - continue - if field in _ELEMENTSFIELD: - self._write_field(fileobject, field, ','.join(values)) - continue - if field not in _LISTFIELDS: - if field == 'Description': - if self.metadata_version in ('1.0', '1.1'): - values = values.replace('\n', '\n ') - else: - values = values.replace('\n', '\n |') - values = [values] - - if field in _LISTTUPLEFIELDS: - values = [','.join(value) for value in values] - - for value in values: - self._write_field(fileobject, field, value) - - def update(self, other=None, **kwargs): - """Set metadata values from the given iterable `other` and kwargs. - - Behavior is like `dict.update`: If `other` has a ``keys`` method, - they are looped over and ``self[key]`` is assigned ``other[key]``. - Else, ``other`` is an iterable of ``(key, value)`` iterables. - - Keys that don't match a metadata field or that have an empty value are - dropped. - """ - def _set(key, value): - if key in _ATTR2FIELD and value: - self.set(self._convert_name(key), value) - - if not other: - # other is None or empty container - pass - elif hasattr(other, 'keys'): - for k in other.keys(): - _set(k, other[k]) - else: - for k, v in other: - _set(k, v) - - if kwargs: - for k, v in kwargs.items(): - _set(k, v) - - def set(self, name, value): - """Control then set a metadata field.""" - name = self._convert_name(name) - - if ((name in _ELEMENTSFIELD or name == 'Platform') and - not isinstance(value, (list, tuple))): - if isinstance(value, string_types): - value = [v.strip() for v in value.split(',')] - else: - value = [] - elif (name in _LISTFIELDS and - not isinstance(value, (list, tuple))): - if isinstance(value, string_types): - value = [value] - else: - value = [] - - if logger.isEnabledFor(logging.WARNING): - project_name = self['Name'] - - scheme = get_scheme(self.scheme) - if name in _PREDICATE_FIELDS and value is not None: - for v in value: - # check that the values are valid - if not scheme.is_valid_matcher(v.split(';')[0]): - logger.warning( - "'%s': '%s' is not valid (field '%s')", - project_name, v, name) - # FIXME this rejects UNKNOWN, is that right? - elif name in _VERSIONS_FIELDS and value is not None: - if not scheme.is_valid_constraint_list(value): - logger.warning("'%s': '%s' is not a valid version (field '%s')", - project_name, value, name) - elif name in _VERSION_FIELDS and value is not None: - if not scheme.is_valid_version(value): - logger.warning("'%s': '%s' is not a valid version (field '%s')", - project_name, value, name) - - if name in _UNICODEFIELDS: - if name == 'Description': - value = self._remove_line_prefix(value) - - self._fields[name] = value - - def get(self, name, default=_MISSING): - """Get a metadata field.""" - name = self._convert_name(name) - if name not in self._fields: - if default is _MISSING: - default = self._default_value(name) - return default - if name in _UNICODEFIELDS: - value = self._fields[name] - return value - elif name in _LISTFIELDS: - value = self._fields[name] - if value is None: - return [] - res = [] - for val in value: - if name not in _LISTTUPLEFIELDS: - res.append(val) - else: - # That's for Project-URL - res.append((val[0], val[1])) - return res - - elif name in _ELEMENTSFIELD: - value = self._fields[name] - if isinstance(value, string_types): - return value.split(',') - return self._fields[name] - - def check(self, strict=False): - """Check if the metadata is compliant. If strict is True then raise if - no Name or Version are provided""" - self.set_metadata_version() - - # XXX should check the versions (if the file was loaded) - missing, warnings = [], [] - - for attr in ('Name', 'Version'): # required by PEP 345 - if attr not in self: - missing.append(attr) - - if strict and missing != []: - msg = 'missing required metadata: %s' % ', '.join(missing) - raise MetadataMissingError(msg) - - for attr in ('Home-page', 'Author'): - if attr not in self: - missing.append(attr) - - # checking metadata 1.2 (XXX needs to check 1.1, 1.0) - if self['Metadata-Version'] != '1.2': - return missing, warnings - - scheme = get_scheme(self.scheme) - - def are_valid_constraints(value): - for v in value: - if not scheme.is_valid_matcher(v.split(';')[0]): - return False - return True - - for fields, controller in ((_PREDICATE_FIELDS, are_valid_constraints), - (_VERSIONS_FIELDS, - scheme.is_valid_constraint_list), - (_VERSION_FIELDS, - scheme.is_valid_version)): - for field in fields: - value = self.get(field, None) - if value is not None and not controller(value): - warnings.append("Wrong value for '%s': %s" % (field, value)) - - return missing, warnings - - def todict(self, skip_missing=False): - """Return fields as a dict. - - Field names will be converted to use the underscore-lowercase style - instead of hyphen-mixed case (i.e. home_page instead of Home-page). - """ - self.set_metadata_version() - - mapping_1_0 = ( - ('metadata_version', 'Metadata-Version'), - ('name', 'Name'), - ('version', 'Version'), - ('summary', 'Summary'), - ('home_page', 'Home-page'), - ('author', 'Author'), - ('author_email', 'Author-email'), - ('license', 'License'), - ('description', 'Description'), - ('keywords', 'Keywords'), - ('platform', 'Platform'), - ('classifiers', 'Classifier'), - ('download_url', 'Download-URL'), - ) - - data = {} - for key, field_name in mapping_1_0: - if not skip_missing or field_name in self._fields: - data[key] = self[field_name] - - if self['Metadata-Version'] == '1.2': - mapping_1_2 = ( - ('requires_dist', 'Requires-Dist'), - ('requires_python', 'Requires-Python'), - ('requires_external', 'Requires-External'), - ('provides_dist', 'Provides-Dist'), - ('obsoletes_dist', 'Obsoletes-Dist'), - ('project_url', 'Project-URL'), - ('maintainer', 'Maintainer'), - ('maintainer_email', 'Maintainer-email'), - ) - for key, field_name in mapping_1_2: - if not skip_missing or field_name in self._fields: - if key != 'project_url': - data[key] = self[field_name] - else: - data[key] = [','.join(u) for u in self[field_name]] - - elif self['Metadata-Version'] == '1.1': - mapping_1_1 = ( - ('provides', 'Provides'), - ('requires', 'Requires'), - ('obsoletes', 'Obsoletes'), - ) - for key, field_name in mapping_1_1: - if not skip_missing or field_name in self._fields: - data[key] = self[field_name] - - return data - - def add_requirements(self, requirements): - if self['Metadata-Version'] == '1.1': - # we can't have 1.1 metadata *and* Setuptools requires - for field in ('Obsoletes', 'Requires', 'Provides'): - if field in self: - del self[field] - self['Requires-Dist'] += requirements - - # Mapping API - # TODO could add iter* variants - - def keys(self): - return list(_version2fieldlist(self['Metadata-Version'])) - - def __iter__(self): - for key in self.keys(): - yield key - - def values(self): - return [self[key] for key in self.keys()] - - def items(self): - return [(key, self[key]) for key in self.keys()] - - def __repr__(self): - return '<%s %s %s>' % (self.__class__.__name__, self.name, - self.version) - - -METADATA_FILENAME = 'pydist.json' -WHEEL_METADATA_FILENAME = 'metadata.json' -LEGACY_METADATA_FILENAME = 'METADATA' - - -class Metadata(object): - """ - The metadata of a release. This implementation uses 2.0 (JSON) - metadata where possible. If not possible, it wraps a LegacyMetadata - instance which handles the key-value metadata format. - """ - - METADATA_VERSION_MATCHER = re.compile(r'^\d+(\.\d+)*$') - - NAME_MATCHER = re.compile('^[0-9A-Z]([0-9A-Z_.-]*[0-9A-Z])?$', re.I) - - VERSION_MATCHER = PEP440_VERSION_RE - - SUMMARY_MATCHER = re.compile('.{1,2047}') - - METADATA_VERSION = '2.0' - - GENERATOR = 'distlib (%s)' % __version__ - - MANDATORY_KEYS = { - 'name': (), - 'version': (), - 'summary': ('legacy',), - } - - INDEX_KEYS = ('name version license summary description author ' - 'author_email keywords platform home_page classifiers ' - 'download_url') - - DEPENDENCY_KEYS = ('extras run_requires test_requires build_requires ' - 'dev_requires provides meta_requires obsoleted_by ' - 'supports_environments') - - SYNTAX_VALIDATORS = { - 'metadata_version': (METADATA_VERSION_MATCHER, ()), - 'name': (NAME_MATCHER, ('legacy',)), - 'version': (VERSION_MATCHER, ('legacy',)), - 'summary': (SUMMARY_MATCHER, ('legacy',)), - } - - __slots__ = ('_legacy', '_data', 'scheme') - - def __init__(self, path=None, fileobj=None, mapping=None, - scheme='default'): - if [path, fileobj, mapping].count(None) < 2: - raise TypeError('path, fileobj and mapping are exclusive') - self._legacy = None - self._data = None - self.scheme = scheme - #import pdb; pdb.set_trace() - if mapping is not None: - try: - self._validate_mapping(mapping, scheme) - self._data = mapping - except MetadataUnrecognizedVersionError: - self._legacy = LegacyMetadata(mapping=mapping, scheme=scheme) - self.validate() - else: - data = None - if path: - with open(path, 'rb') as f: - data = f.read() - elif fileobj: - data = fileobj.read() - if data is None: - # Initialised with no args - to be added - self._data = { - 'metadata_version': self.METADATA_VERSION, - 'generator': self.GENERATOR, - } - else: - if not isinstance(data, text_type): - data = data.decode('utf-8') - try: - self._data = json.loads(data) - self._validate_mapping(self._data, scheme) - except ValueError: - # Note: MetadataUnrecognizedVersionError does not - # inherit from ValueError (it's a DistlibException, - # which should not inherit from ValueError). - # The ValueError comes from the json.load - if that - # succeeds and we get a validation error, we want - # that to propagate - self._legacy = LegacyMetadata(fileobj=StringIO(data), - scheme=scheme) - self.validate() - - common_keys = set(('name', 'version', 'license', 'keywords', 'summary')) - - none_list = (None, list) - none_dict = (None, dict) - - mapped_keys = { - 'run_requires': ('Requires-Dist', list), - 'build_requires': ('Setup-Requires-Dist', list), - 'dev_requires': none_list, - 'test_requires': none_list, - 'meta_requires': none_list, - 'extras': ('Provides-Extra', list), - 'modules': none_list, - 'namespaces': none_list, - 'exports': none_dict, - 'commands': none_dict, - 'classifiers': ('Classifier', list), - 'source_url': ('Download-URL', None), - 'metadata_version': ('Metadata-Version', None), - } - - del none_list, none_dict - - def __getattribute__(self, key): - common = object.__getattribute__(self, 'common_keys') - mapped = object.__getattribute__(self, 'mapped_keys') - if key in mapped: - lk, maker = mapped[key] - if self._legacy: - if lk is None: - result = None if maker is None else maker() - else: - result = self._legacy.get(lk) - else: - value = None if maker is None else maker() - if key not in ('commands', 'exports', 'modules', 'namespaces', - 'classifiers'): - result = self._data.get(key, value) - else: - # special cases for PEP 459 - sentinel = object() - result = sentinel - d = self._data.get('extensions') - if d: - if key == 'commands': - result = d.get('python.commands', value) - elif key == 'classifiers': - d = d.get('python.details') - if d: - result = d.get(key, value) - else: - d = d.get('python.exports') - if not d: - d = self._data.get('python.exports') - if d: - result = d.get(key, value) - if result is sentinel: - result = value - elif key not in common: - result = object.__getattribute__(self, key) - elif self._legacy: - result = self._legacy.get(key) - else: - result = self._data.get(key) - return result - - def _validate_value(self, key, value, scheme=None): - if key in self.SYNTAX_VALIDATORS: - pattern, exclusions = self.SYNTAX_VALIDATORS[key] - if (scheme or self.scheme) not in exclusions: - m = pattern.match(value) - if not m: - raise MetadataInvalidError("'%s' is an invalid value for " - "the '%s' property" % (value, - key)) - - def __setattr__(self, key, value): - self._validate_value(key, value) - common = object.__getattribute__(self, 'common_keys') - mapped = object.__getattribute__(self, 'mapped_keys') - if key in mapped: - lk, _ = mapped[key] - if self._legacy: - if lk is None: - raise NotImplementedError - self._legacy[lk] = value - elif key not in ('commands', 'exports', 'modules', 'namespaces', - 'classifiers'): - self._data[key] = value - else: - # special cases for PEP 459 - d = self._data.setdefault('extensions', {}) - if key == 'commands': - d['python.commands'] = value - elif key == 'classifiers': - d = d.setdefault('python.details', {}) - d[key] = value - else: - d = d.setdefault('python.exports', {}) - d[key] = value - elif key not in common: - object.__setattr__(self, key, value) - else: - if key == 'keywords': - if isinstance(value, string_types): - value = value.strip() - if value: - value = value.split() - else: - value = [] - if self._legacy: - self._legacy[key] = value - else: - self._data[key] = value - - @property - def name_and_version(self): - return _get_name_and_version(self.name, self.version, True) - - @property - def provides(self): - if self._legacy: - result = self._legacy['Provides-Dist'] - else: - result = self._data.setdefault('provides', []) - s = '%s (%s)' % (self.name, self.version) - if s not in result: - result.append(s) - return result - - @provides.setter - def provides(self, value): - if self._legacy: - self._legacy['Provides-Dist'] = value - else: - self._data['provides'] = value - - def get_requirements(self, reqts, extras=None, env=None): - """ - Base method to get dependencies, given a set of extras - to satisfy and an optional environment context. - :param reqts: A list of sometimes-wanted dependencies, - perhaps dependent on extras and environment. - :param extras: A list of optional components being requested. - :param env: An optional environment for marker evaluation. - """ - if self._legacy: - result = reqts - else: - result = [] - extras = get_extras(extras or [], self.extras) - for d in reqts: - if 'extra' not in d and 'environment' not in d: - # unconditional - include = True - else: - if 'extra' not in d: - # Not extra-dependent - only environment-dependent - include = True - else: - include = d.get('extra') in extras - if include: - # Not excluded because of extras, check environment - marker = d.get('environment') - if marker: - include = interpret(marker, env) - if include: - result.extend(d['requires']) - for key in ('build', 'dev', 'test'): - e = ':%s:' % key - if e in extras: - extras.remove(e) - # A recursive call, but it should terminate since 'test' - # has been removed from the extras - reqts = self._data.get('%s_requires' % key, []) - result.extend(self.get_requirements(reqts, extras=extras, - env=env)) - return result - - @property - def dictionary(self): - if self._legacy: - return self._from_legacy() - return self._data - - @property - def dependencies(self): - if self._legacy: - raise NotImplementedError - else: - return extract_by_key(self._data, self.DEPENDENCY_KEYS) - - @dependencies.setter - def dependencies(self, value): - if self._legacy: - raise NotImplementedError - else: - self._data.update(value) - - def _validate_mapping(self, mapping, scheme): - if mapping.get('metadata_version') != self.METADATA_VERSION: - raise MetadataUnrecognizedVersionError() - missing = [] - for key, exclusions in self.MANDATORY_KEYS.items(): - if key not in mapping: - if scheme not in exclusions: - missing.append(key) - if missing: - msg = 'Missing metadata items: %s' % ', '.join(missing) - raise MetadataMissingError(msg) - for k, v in mapping.items(): - self._validate_value(k, v, scheme) - - def validate(self): - if self._legacy: - missing, warnings = self._legacy.check(True) - if missing or warnings: - logger.warning('Metadata: missing: %s, warnings: %s', - missing, warnings) - else: - self._validate_mapping(self._data, self.scheme) - - def todict(self): - if self._legacy: - return self._legacy.todict(True) - else: - result = extract_by_key(self._data, self.INDEX_KEYS) - return result - - def _from_legacy(self): - assert self._legacy and not self._data - result = { - 'metadata_version': self.METADATA_VERSION, - 'generator': self.GENERATOR, - } - lmd = self._legacy.todict(True) # skip missing ones - for k in ('name', 'version', 'license', 'summary', 'description', - 'classifier'): - if k in lmd: - if k == 'classifier': - nk = 'classifiers' - else: - nk = k - result[nk] = lmd[k] - kw = lmd.get('Keywords', []) - if kw == ['']: - kw = [] - result['keywords'] = kw - keys = (('requires_dist', 'run_requires'), - ('setup_requires_dist', 'build_requires')) - for ok, nk in keys: - if ok in lmd and lmd[ok]: - result[nk] = [{'requires': lmd[ok]}] - result['provides'] = self.provides - author = {} - maintainer = {} - return result - - LEGACY_MAPPING = { - 'name': 'Name', - 'version': 'Version', - 'license': 'License', - 'summary': 'Summary', - 'description': 'Description', - 'classifiers': 'Classifier', - } - - def _to_legacy(self): - def process_entries(entries): - reqts = set() - for e in entries: - extra = e.get('extra') - env = e.get('environment') - rlist = e['requires'] - for r in rlist: - if not env and not extra: - reqts.add(r) - else: - marker = '' - if extra: - marker = 'extra == "%s"' % extra - if env: - if marker: - marker = '(%s) and %s' % (env, marker) - else: - marker = env - reqts.add(';'.join((r, marker))) - return reqts - - assert self._data and not self._legacy - result = LegacyMetadata() - nmd = self._data - for nk, ok in self.LEGACY_MAPPING.items(): - if nk in nmd: - result[ok] = nmd[nk] - r1 = process_entries(self.run_requires + self.meta_requires) - r2 = process_entries(self.build_requires + self.dev_requires) - if self.extras: - result['Provides-Extra'] = sorted(self.extras) - result['Requires-Dist'] = sorted(r1) - result['Setup-Requires-Dist'] = sorted(r2) - # TODO: other fields such as contacts - return result - - def write(self, path=None, fileobj=None, legacy=False, skip_unknown=True): - if [path, fileobj].count(None) != 1: - raise ValueError('Exactly one of path and fileobj is needed') - self.validate() - if legacy: - if self._legacy: - legacy_md = self._legacy - else: - legacy_md = self._to_legacy() - if path: - legacy_md.write(path, skip_unknown=skip_unknown) - else: - legacy_md.write_file(fileobj, skip_unknown=skip_unknown) - else: - if self._legacy: - d = self._from_legacy() - else: - d = self._data - if fileobj: - json.dump(d, fileobj, ensure_ascii=True, indent=2, - sort_keys=True) - else: - with codecs.open(path, 'w', 'utf-8') as f: - json.dump(d, f, ensure_ascii=True, indent=2, - sort_keys=True) - - def add_requirements(self, requirements): - if self._legacy: - self._legacy.add_requirements(requirements) - else: - run_requires = self._data.setdefault('run_requires', []) - always = None - for entry in run_requires: - if 'environment' not in entry and 'extra' not in entry: - always = entry - break - if always is None: - always = { 'requires': requirements } - run_requires.insert(0, always) - else: - rset = set(always['requires']) | set(requirements) - always['requires'] = sorted(rset) - - def __repr__(self): - name = self.name or '(no name)' - version = self.version or 'no version' - return '<%s %s %s (%s)>' % (self.__class__.__name__, - self.metadata_version, name, version) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyc b/env/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyc deleted file mode 100644 index 9ecedd75..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pyc b/env/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pyc deleted file mode 100644 index 56ccc024..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.pyc b/env/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.pyc deleted file mode 100644 index bc926bfb..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/t32.exe b/env/lib/python2.7/site-packages/pip/_vendor/distlib/t32.exe deleted file mode 100644 index 5d5bce1f..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/distlib/t32.exe and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/t64.exe b/env/lib/python2.7/site-packages/pip/_vendor/distlib/t64.exe deleted file mode 100644 index 039ce441..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/distlib/t64.exe and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/util.py b/env/lib/python2.7/site-packages/pip/_vendor/distlib/util.py deleted file mode 100644 index e851146c..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/distlib/util.py +++ /dev/null @@ -1,1760 +0,0 @@ -# -# Copyright (C) 2012-2017 The Python Software Foundation. -# See LICENSE.txt and CONTRIBUTORS.txt. -# -import codecs -from collections import deque -import contextlib -import csv -from glob import iglob as std_iglob -import io -import json -import logging -import os -import py_compile -import re -import socket -try: - import ssl -except ImportError: # pragma: no cover - ssl = None -import subprocess -import sys -import tarfile -import tempfile -import textwrap - -try: - import threading -except ImportError: # pragma: no cover - import dummy_threading as threading -import time - -from . import DistlibException -from .compat import (string_types, text_type, shutil, raw_input, StringIO, - cache_from_source, urlopen, urljoin, httplib, xmlrpclib, - splittype, HTTPHandler, BaseConfigurator, valid_ident, - Container, configparser, URLError, ZipFile, fsdecode, - unquote, urlparse) - -logger = logging.getLogger(__name__) - -# -# Requirement parsing code as per PEP 508 -# - -IDENTIFIER = re.compile(r'^([\w\.-]+)\s*') -VERSION_IDENTIFIER = re.compile(r'^([\w\.*+-]+)\s*') -COMPARE_OP = re.compile(r'^(<=?|>=?|={2,3}|[~!]=)\s*') -MARKER_OP = re.compile(r'^((<=?)|(>=?)|={2,3}|[~!]=|in|not\s+in)\s*') -OR = re.compile(r'^or\b\s*') -AND = re.compile(r'^and\b\s*') -NON_SPACE = re.compile(r'(\S+)\s*') -STRING_CHUNK = re.compile(r'([\s\w\.{}()*+#:;,/?!~`@$%^&=|<>\[\]-]+)') - - -def parse_marker(marker_string): - """ - Parse a marker string and return a dictionary containing a marker expression. - - The dictionary will contain keys "op", "lhs" and "rhs" for non-terminals in - the expression grammar, or strings. A string contained in quotes is to be - interpreted as a literal string, and a string not contained in quotes is a - variable (such as os_name). - """ - def marker_var(remaining): - # either identifier, or literal string - m = IDENTIFIER.match(remaining) - if m: - result = m.groups()[0] - remaining = remaining[m.end():] - elif not remaining: - raise SyntaxError('unexpected end of input') - else: - q = remaining[0] - if q not in '\'"': - raise SyntaxError('invalid expression: %s' % remaining) - oq = '\'"'.replace(q, '') - remaining = remaining[1:] - parts = [q] - while remaining: - # either a string chunk, or oq, or q to terminate - if remaining[0] == q: - break - elif remaining[0] == oq: - parts.append(oq) - remaining = remaining[1:] - else: - m = STRING_CHUNK.match(remaining) - if not m: - raise SyntaxError('error in string literal: %s' % remaining) - parts.append(m.groups()[0]) - remaining = remaining[m.end():] - else: - s = ''.join(parts) - raise SyntaxError('unterminated string: %s' % s) - parts.append(q) - result = ''.join(parts) - remaining = remaining[1:].lstrip() # skip past closing quote - return result, remaining - - def marker_expr(remaining): - if remaining and remaining[0] == '(': - result, remaining = marker(remaining[1:].lstrip()) - if remaining[0] != ')': - raise SyntaxError('unterminated parenthesis: %s' % remaining) - remaining = remaining[1:].lstrip() - else: - lhs, remaining = marker_var(remaining) - while remaining: - m = MARKER_OP.match(remaining) - if not m: - break - op = m.groups()[0] - remaining = remaining[m.end():] - rhs, remaining = marker_var(remaining) - lhs = {'op': op, 'lhs': lhs, 'rhs': rhs} - result = lhs - return result, remaining - - def marker_and(remaining): - lhs, remaining = marker_expr(remaining) - while remaining: - m = AND.match(remaining) - if not m: - break - remaining = remaining[m.end():] - rhs, remaining = marker_expr(remaining) - lhs = {'op': 'and', 'lhs': lhs, 'rhs': rhs} - return lhs, remaining - - def marker(remaining): - lhs, remaining = marker_and(remaining) - while remaining: - m = OR.match(remaining) - if not m: - break - remaining = remaining[m.end():] - rhs, remaining = marker_and(remaining) - lhs = {'op': 'or', 'lhs': lhs, 'rhs': rhs} - return lhs, remaining - - return marker(marker_string) - - -def parse_requirement(req): - """ - Parse a requirement passed in as a string. Return a Container - whose attributes contain the various parts of the requirement. - """ - remaining = req.strip() - if not remaining or remaining.startswith('#'): - return None - m = IDENTIFIER.match(remaining) - if not m: - raise SyntaxError('name expected: %s' % remaining) - distname = m.groups()[0] - remaining = remaining[m.end():] - extras = mark_expr = versions = uri = None - if remaining and remaining[0] == '[': - i = remaining.find(']', 1) - if i < 0: - raise SyntaxError('unterminated extra: %s' % remaining) - s = remaining[1:i] - remaining = remaining[i + 1:].lstrip() - extras = [] - while s: - m = IDENTIFIER.match(s) - if not m: - raise SyntaxError('malformed extra: %s' % s) - extras.append(m.groups()[0]) - s = s[m.end():] - if not s: - break - if s[0] != ',': - raise SyntaxError('comma expected in extras: %s' % s) - s = s[1:].lstrip() - if not extras: - extras = None - if remaining: - if remaining[0] == '@': - # it's a URI - remaining = remaining[1:].lstrip() - m = NON_SPACE.match(remaining) - if not m: - raise SyntaxError('invalid URI: %s' % remaining) - uri = m.groups()[0] - t = urlparse(uri) - # there are issues with Python and URL parsing, so this test - # is a bit crude. See bpo-20271, bpo-23505. Python doesn't - # always parse invalid URLs correctly - it should raise - # exceptions for malformed URLs - if not (t.scheme and t.netloc): - raise SyntaxError('Invalid URL: %s' % uri) - remaining = remaining[m.end():].lstrip() - else: - - def get_versions(ver_remaining): - """ - Return a list of operator, version tuples if any are - specified, else None. - """ - m = COMPARE_OP.match(ver_remaining) - versions = None - if m: - versions = [] - while True: - op = m.groups()[0] - ver_remaining = ver_remaining[m.end():] - m = VERSION_IDENTIFIER.match(ver_remaining) - if not m: - raise SyntaxError('invalid version: %s' % ver_remaining) - v = m.groups()[0] - versions.append((op, v)) - ver_remaining = ver_remaining[m.end():] - if not ver_remaining or ver_remaining[0] != ',': - break - ver_remaining = ver_remaining[1:].lstrip() - m = COMPARE_OP.match(ver_remaining) - if not m: - raise SyntaxError('invalid constraint: %s' % ver_remaining) - if not versions: - versions = None - return versions, ver_remaining - - if remaining[0] != '(': - versions, remaining = get_versions(remaining) - else: - i = remaining.find(')', 1) - if i < 0: - raise SyntaxError('unterminated parenthesis: %s' % remaining) - s = remaining[1:i] - remaining = remaining[i + 1:].lstrip() - # As a special diversion from PEP 508, allow a version number - # a.b.c in parentheses as a synonym for ~= a.b.c (because this - # is allowed in earlier PEPs) - if COMPARE_OP.match(s): - versions, _ = get_versions(s) - else: - m = VERSION_IDENTIFIER.match(s) - if not m: - raise SyntaxError('invalid constraint: %s' % s) - v = m.groups()[0] - s = s[m.end():].lstrip() - if s: - raise SyntaxError('invalid constraint: %s' % s) - versions = [('~=', v)] - - if remaining: - if remaining[0] != ';': - raise SyntaxError('invalid requirement: %s' % remaining) - remaining = remaining[1:].lstrip() - - mark_expr, remaining = parse_marker(remaining) - - if remaining and remaining[0] != '#': - raise SyntaxError('unexpected trailing data: %s' % remaining) - - if not versions: - rs = distname - else: - rs = '%s %s' % (distname, ', '.join(['%s %s' % con for con in versions])) - return Container(name=distname, extras=extras, constraints=versions, - marker=mark_expr, url=uri, requirement=rs) - - -def get_resources_dests(resources_root, rules): - """Find destinations for resources files""" - - def get_rel_path(root, path): - # normalizes and returns a lstripped-/-separated path - root = root.replace(os.path.sep, '/') - path = path.replace(os.path.sep, '/') - assert path.startswith(root) - return path[len(root):].lstrip('/') - - destinations = {} - for base, suffix, dest in rules: - prefix = os.path.join(resources_root, base) - for abs_base in iglob(prefix): - abs_glob = os.path.join(abs_base, suffix) - for abs_path in iglob(abs_glob): - resource_file = get_rel_path(resources_root, abs_path) - if dest is None: # remove the entry if it was here - destinations.pop(resource_file, None) - else: - rel_path = get_rel_path(abs_base, abs_path) - rel_dest = dest.replace(os.path.sep, '/').rstrip('/') - destinations[resource_file] = rel_dest + '/' + rel_path - return destinations - - -def in_venv(): - if hasattr(sys, 'real_prefix'): - # virtualenv venvs - result = True - else: - # PEP 405 venvs - result = sys.prefix != getattr(sys, 'base_prefix', sys.prefix) - return result - - -def get_executable(): -# The __PYVENV_LAUNCHER__ dance is apparently no longer needed, as -# changes to the stub launcher mean that sys.executable always points -# to the stub on OS X -# if sys.platform == 'darwin' and ('__PYVENV_LAUNCHER__' -# in os.environ): -# result = os.environ['__PYVENV_LAUNCHER__'] -# else: -# result = sys.executable -# return result - result = os.path.normcase(sys.executable) - if not isinstance(result, text_type): - result = fsdecode(result) - return result - - -def proceed(prompt, allowed_chars, error_prompt=None, default=None): - p = prompt - while True: - s = raw_input(p) - p = prompt - if not s and default: - s = default - if s: - c = s[0].lower() - if c in allowed_chars: - break - if error_prompt: - p = '%c: %s\n%s' % (c, error_prompt, prompt) - return c - - -def extract_by_key(d, keys): - if isinstance(keys, string_types): - keys = keys.split() - result = {} - for key in keys: - if key in d: - result[key] = d[key] - return result - -def read_exports(stream): - if sys.version_info[0] >= 3: - # needs to be a text stream - stream = codecs.getreader('utf-8')(stream) - # Try to load as JSON, falling back on legacy format - data = stream.read() - stream = StringIO(data) - try: - jdata = json.load(stream) - result = jdata['extensions']['python.exports']['exports'] - for group, entries in result.items(): - for k, v in entries.items(): - s = '%s = %s' % (k, v) - entry = get_export_entry(s) - assert entry is not None - entries[k] = entry - return result - except Exception: - stream.seek(0, 0) - - def read_stream(cp, stream): - if hasattr(cp, 'read_file'): - cp.read_file(stream) - else: - cp.readfp(stream) - - cp = configparser.ConfigParser() - try: - read_stream(cp, stream) - except configparser.MissingSectionHeaderError: - stream.close() - data = textwrap.dedent(data) - stream = StringIO(data) - read_stream(cp, stream) - - result = {} - for key in cp.sections(): - result[key] = entries = {} - for name, value in cp.items(key): - s = '%s = %s' % (name, value) - entry = get_export_entry(s) - assert entry is not None - #entry.dist = self - entries[name] = entry - return result - - -def write_exports(exports, stream): - if sys.version_info[0] >= 3: - # needs to be a text stream - stream = codecs.getwriter('utf-8')(stream) - cp = configparser.ConfigParser() - for k, v in exports.items(): - # TODO check k, v for valid values - cp.add_section(k) - for entry in v.values(): - if entry.suffix is None: - s = entry.prefix - else: - s = '%s:%s' % (entry.prefix, entry.suffix) - if entry.flags: - s = '%s [%s]' % (s, ', '.join(entry.flags)) - cp.set(k, entry.name, s) - cp.write(stream) - - -@contextlib.contextmanager -def tempdir(): - td = tempfile.mkdtemp() - try: - yield td - finally: - shutil.rmtree(td) - -@contextlib.contextmanager -def chdir(d): - cwd = os.getcwd() - try: - os.chdir(d) - yield - finally: - os.chdir(cwd) - - -@contextlib.contextmanager -def socket_timeout(seconds=15): - cto = socket.getdefaulttimeout() - try: - socket.setdefaulttimeout(seconds) - yield - finally: - socket.setdefaulttimeout(cto) - - -class cached_property(object): - def __init__(self, func): - self.func = func - #for attr in ('__name__', '__module__', '__doc__'): - # setattr(self, attr, getattr(func, attr, None)) - - def __get__(self, obj, cls=None): - if obj is None: - return self - value = self.func(obj) - object.__setattr__(obj, self.func.__name__, value) - #obj.__dict__[self.func.__name__] = value = self.func(obj) - return value - -def convert_path(pathname): - """Return 'pathname' as a name that will work on the native filesystem. - - The path is split on '/' and put back together again using the current - directory separator. Needed because filenames in the setup script are - always supplied in Unix style, and have to be converted to the local - convention before we can actually use them in the filesystem. Raises - ValueError on non-Unix-ish systems if 'pathname' either starts or - ends with a slash. - """ - if os.sep == '/': - return pathname - if not pathname: - return pathname - if pathname[0] == '/': - raise ValueError("path '%s' cannot be absolute" % pathname) - if pathname[-1] == '/': - raise ValueError("path '%s' cannot end with '/'" % pathname) - - paths = pathname.split('/') - while os.curdir in paths: - paths.remove(os.curdir) - if not paths: - return os.curdir - return os.path.join(*paths) - - -class FileOperator(object): - def __init__(self, dry_run=False): - self.dry_run = dry_run - self.ensured = set() - self._init_record() - - def _init_record(self): - self.record = False - self.files_written = set() - self.dirs_created = set() - - def record_as_written(self, path): - if self.record: - self.files_written.add(path) - - def newer(self, source, target): - """Tell if the target is newer than the source. - - Returns true if 'source' exists and is more recently modified than - 'target', or if 'source' exists and 'target' doesn't. - - Returns false if both exist and 'target' is the same age or younger - than 'source'. Raise PackagingFileError if 'source' does not exist. - - Note that this test is not very accurate: files created in the same - second will have the same "age". - """ - if not os.path.exists(source): - raise DistlibException("file '%r' does not exist" % - os.path.abspath(source)) - if not os.path.exists(target): - return True - - return os.stat(source).st_mtime > os.stat(target).st_mtime - - def copy_file(self, infile, outfile, check=True): - """Copy a file respecting dry-run and force flags. - """ - self.ensure_dir(os.path.dirname(outfile)) - logger.info('Copying %s to %s', infile, outfile) - if not self.dry_run: - msg = None - if check: - if os.path.islink(outfile): - msg = '%s is a symlink' % outfile - elif os.path.exists(outfile) and not os.path.isfile(outfile): - msg = '%s is a non-regular file' % outfile - if msg: - raise ValueError(msg + ' which would be overwritten') - shutil.copyfile(infile, outfile) - self.record_as_written(outfile) - - def copy_stream(self, instream, outfile, encoding=None): - assert not os.path.isdir(outfile) - self.ensure_dir(os.path.dirname(outfile)) - logger.info('Copying stream %s to %s', instream, outfile) - if not self.dry_run: - if encoding is None: - outstream = open(outfile, 'wb') - else: - outstream = codecs.open(outfile, 'w', encoding=encoding) - try: - shutil.copyfileobj(instream, outstream) - finally: - outstream.close() - self.record_as_written(outfile) - - def write_binary_file(self, path, data): - self.ensure_dir(os.path.dirname(path)) - if not self.dry_run: - if os.path.exists(path): - os.remove(path) - with open(path, 'wb') as f: - f.write(data) - self.record_as_written(path) - - def write_text_file(self, path, data, encoding): - self.write_binary_file(path, data.encode(encoding)) - - def set_mode(self, bits, mask, files): - if os.name == 'posix' or (os.name == 'java' and os._name == 'posix'): - # Set the executable bits (owner, group, and world) on - # all the files specified. - for f in files: - if self.dry_run: - logger.info("changing mode of %s", f) - else: - mode = (os.stat(f).st_mode | bits) & mask - logger.info("changing mode of %s to %o", f, mode) - os.chmod(f, mode) - - set_executable_mode = lambda s, f: s.set_mode(0o555, 0o7777, f) - - def ensure_dir(self, path): - path = os.path.abspath(path) - if path not in self.ensured and not os.path.exists(path): - self.ensured.add(path) - d, f = os.path.split(path) - self.ensure_dir(d) - logger.info('Creating %s' % path) - if not self.dry_run: - os.mkdir(path) - if self.record: - self.dirs_created.add(path) - - def byte_compile(self, path, optimize=False, force=False, prefix=None, hashed_invalidation=False): - dpath = cache_from_source(path, not optimize) - logger.info('Byte-compiling %s to %s', path, dpath) - if not self.dry_run: - if force or self.newer(path, dpath): - if not prefix: - diagpath = None - else: - assert path.startswith(prefix) - diagpath = path[len(prefix):] - compile_kwargs = {} - if hashed_invalidation and hasattr(py_compile, 'PycInvalidationMode'): - compile_kwargs['invalidation_mode'] = py_compile.PycInvalidationMode.CHECKED_HASH - py_compile.compile(path, dpath, diagpath, True, **compile_kwargs) # raise error - self.record_as_written(dpath) - return dpath - - def ensure_removed(self, path): - if os.path.exists(path): - if os.path.isdir(path) and not os.path.islink(path): - logger.debug('Removing directory tree at %s', path) - if not self.dry_run: - shutil.rmtree(path) - if self.record: - if path in self.dirs_created: - self.dirs_created.remove(path) - else: - if os.path.islink(path): - s = 'link' - else: - s = 'file' - logger.debug('Removing %s %s', s, path) - if not self.dry_run: - os.remove(path) - if self.record: - if path in self.files_written: - self.files_written.remove(path) - - def is_writable(self, path): - result = False - while not result: - if os.path.exists(path): - result = os.access(path, os.W_OK) - break - parent = os.path.dirname(path) - if parent == path: - break - path = parent - return result - - def commit(self): - """ - Commit recorded changes, turn off recording, return - changes. - """ - assert self.record - result = self.files_written, self.dirs_created - self._init_record() - return result - - def rollback(self): - if not self.dry_run: - for f in list(self.files_written): - if os.path.exists(f): - os.remove(f) - # dirs should all be empty now, except perhaps for - # __pycache__ subdirs - # reverse so that subdirs appear before their parents - dirs = sorted(self.dirs_created, reverse=True) - for d in dirs: - flist = os.listdir(d) - if flist: - assert flist == ['__pycache__'] - sd = os.path.join(d, flist[0]) - os.rmdir(sd) - os.rmdir(d) # should fail if non-empty - self._init_record() - -def resolve(module_name, dotted_path): - if module_name in sys.modules: - mod = sys.modules[module_name] - else: - mod = __import__(module_name) - if dotted_path is None: - result = mod - else: - parts = dotted_path.split('.') - result = getattr(mod, parts.pop(0)) - for p in parts: - result = getattr(result, p) - return result - - -class ExportEntry(object): - def __init__(self, name, prefix, suffix, flags): - self.name = name - self.prefix = prefix - self.suffix = suffix - self.flags = flags - - @cached_property - def value(self): - return resolve(self.prefix, self.suffix) - - def __repr__(self): # pragma: no cover - return '<ExportEntry %s = %s:%s %s>' % (self.name, self.prefix, - self.suffix, self.flags) - - def __eq__(self, other): - if not isinstance(other, ExportEntry): - result = False - else: - result = (self.name == other.name and - self.prefix == other.prefix and - self.suffix == other.suffix and - self.flags == other.flags) - return result - - __hash__ = object.__hash__ - - -ENTRY_RE = re.compile(r'''(?P<name>(\w|[-.+])+) - \s*=\s*(?P<callable>(\w+)([:\.]\w+)*) - \s*(\[\s*(?P<flags>\w+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])? - ''', re.VERBOSE) - -def get_export_entry(specification): - m = ENTRY_RE.search(specification) - if not m: - result = None - if '[' in specification or ']' in specification: - raise DistlibException("Invalid specification " - "'%s'" % specification) - else: - d = m.groupdict() - name = d['name'] - path = d['callable'] - colons = path.count(':') - if colons == 0: - prefix, suffix = path, None - else: - if colons != 1: - raise DistlibException("Invalid specification " - "'%s'" % specification) - prefix, suffix = path.split(':') - flags = d['flags'] - if flags is None: - if '[' in specification or ']' in specification: - raise DistlibException("Invalid specification " - "'%s'" % specification) - flags = [] - else: - flags = [f.strip() for f in flags.split(',')] - result = ExportEntry(name, prefix, suffix, flags) - return result - - -def get_cache_base(suffix=None): - """ - Return the default base location for distlib caches. If the directory does - not exist, it is created. Use the suffix provided for the base directory, - and default to '.distlib' if it isn't provided. - - On Windows, if LOCALAPPDATA is defined in the environment, then it is - assumed to be a directory, and will be the parent directory of the result. - On POSIX, and on Windows if LOCALAPPDATA is not defined, the user's home - directory - using os.expanduser('~') - will be the parent directory of - the result. - - The result is just the directory '.distlib' in the parent directory as - determined above, or with the name specified with ``suffix``. - """ - if suffix is None: - suffix = '.distlib' - if os.name == 'nt' and 'LOCALAPPDATA' in os.environ: - result = os.path.expandvars('$localappdata') - else: - # Assume posix, or old Windows - result = os.path.expanduser('~') - # we use 'isdir' instead of 'exists', because we want to - # fail if there's a file with that name - if os.path.isdir(result): - usable = os.access(result, os.W_OK) - if not usable: - logger.warning('Directory exists but is not writable: %s', result) - else: - try: - os.makedirs(result) - usable = True - except OSError: - logger.warning('Unable to create %s', result, exc_info=True) - usable = False - if not usable: - result = tempfile.mkdtemp() - logger.warning('Default location unusable, using %s', result) - return os.path.join(result, suffix) - - -def path_to_cache_dir(path): - """ - Convert an absolute path to a directory name for use in a cache. - - The algorithm used is: - - #. On Windows, any ``':'`` in the drive is replaced with ``'---'``. - #. Any occurrence of ``os.sep`` is replaced with ``'--'``. - #. ``'.cache'`` is appended. - """ - d, p = os.path.splitdrive(os.path.abspath(path)) - if d: - d = d.replace(':', '---') - p = p.replace(os.sep, '--') - return d + p + '.cache' - - -def ensure_slash(s): - if not s.endswith('/'): - return s + '/' - return s - - -def parse_credentials(netloc): - username = password = None - if '@' in netloc: - prefix, netloc = netloc.rsplit('@', 1) - if ':' not in prefix: - username = prefix - else: - username, password = prefix.split(':', 1) - if username: - username = unquote(username) - if password: - password = unquote(password) - return username, password, netloc - - -def get_process_umask(): - result = os.umask(0o22) - os.umask(result) - return result - -def is_string_sequence(seq): - result = True - i = None - for i, s in enumerate(seq): - if not isinstance(s, string_types): - result = False - break - assert i is not None - return result - -PROJECT_NAME_AND_VERSION = re.compile('([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-' - '([a-z0-9_.+-]+)', re.I) -PYTHON_VERSION = re.compile(r'-py(\d\.?\d?)') - - -def split_filename(filename, project_name=None): - """ - Extract name, version, python version from a filename (no extension) - - Return name, version, pyver or None - """ - result = None - pyver = None - filename = unquote(filename).replace(' ', '-') - m = PYTHON_VERSION.search(filename) - if m: - pyver = m.group(1) - filename = filename[:m.start()] - if project_name and len(filename) > len(project_name) + 1: - m = re.match(re.escape(project_name) + r'\b', filename) - if m: - n = m.end() - result = filename[:n], filename[n + 1:], pyver - if result is None: - m = PROJECT_NAME_AND_VERSION.match(filename) - if m: - result = m.group(1), m.group(3), pyver - return result - -# Allow spaces in name because of legacy dists like "Twisted Core" -NAME_VERSION_RE = re.compile(r'(?P<name>[\w .-]+)\s*' - r'\(\s*(?P<ver>[^\s)]+)\)$') - -def parse_name_and_version(p): - """ - A utility method used to get name and version from a string. - - From e.g. a Provides-Dist value. - - :param p: A value in a form 'foo (1.0)' - :return: The name and version as a tuple. - """ - m = NAME_VERSION_RE.match(p) - if not m: - raise DistlibException('Ill-formed name/version string: \'%s\'' % p) - d = m.groupdict() - return d['name'].strip().lower(), d['ver'] - -def get_extras(requested, available): - result = set() - requested = set(requested or []) - available = set(available or []) - if '*' in requested: - requested.remove('*') - result |= available - for r in requested: - if r == '-': - result.add(r) - elif r.startswith('-'): - unwanted = r[1:] - if unwanted not in available: - logger.warning('undeclared extra: %s' % unwanted) - if unwanted in result: - result.remove(unwanted) - else: - if r not in available: - logger.warning('undeclared extra: %s' % r) - result.add(r) - return result -# -# Extended metadata functionality -# - -def _get_external_data(url): - result = {} - try: - # urlopen might fail if it runs into redirections, - # because of Python issue #13696. Fixed in locators - # using a custom redirect handler. - resp = urlopen(url) - headers = resp.info() - ct = headers.get('Content-Type') - if not ct.startswith('application/json'): - logger.debug('Unexpected response for JSON request: %s', ct) - else: - reader = codecs.getreader('utf-8')(resp) - #data = reader.read().decode('utf-8') - #result = json.loads(data) - result = json.load(reader) - except Exception as e: - logger.exception('Failed to get external data for %s: %s', url, e) - return result - -_external_data_base_url = 'https://www.red-dove.com/pypi/projects/' - -def get_project_data(name): - url = '%s/%s/project.json' % (name[0].upper(), name) - url = urljoin(_external_data_base_url, url) - result = _get_external_data(url) - return result - -def get_package_data(name, version): - url = '%s/%s/package-%s.json' % (name[0].upper(), name, version) - url = urljoin(_external_data_base_url, url) - return _get_external_data(url) - - -class Cache(object): - """ - A class implementing a cache for resources that need to live in the file system - e.g. shared libraries. This class was moved from resources to here because it - could be used by other modules, e.g. the wheel module. - """ - - def __init__(self, base): - """ - Initialise an instance. - - :param base: The base directory where the cache should be located. - """ - # we use 'isdir' instead of 'exists', because we want to - # fail if there's a file with that name - if not os.path.isdir(base): # pragma: no cover - os.makedirs(base) - if (os.stat(base).st_mode & 0o77) != 0: - logger.warning('Directory \'%s\' is not private', base) - self.base = os.path.abspath(os.path.normpath(base)) - - def prefix_to_dir(self, prefix): - """ - Converts a resource prefix to a directory name in the cache. - """ - return path_to_cache_dir(prefix) - - def clear(self): - """ - Clear the cache. - """ - not_removed = [] - for fn in os.listdir(self.base): - fn = os.path.join(self.base, fn) - try: - if os.path.islink(fn) or os.path.isfile(fn): - os.remove(fn) - elif os.path.isdir(fn): - shutil.rmtree(fn) - except Exception: - not_removed.append(fn) - return not_removed - - -class EventMixin(object): - """ - A very simple publish/subscribe system. - """ - def __init__(self): - self._subscribers = {} - - def add(self, event, subscriber, append=True): - """ - Add a subscriber for an event. - - :param event: The name of an event. - :param subscriber: The subscriber to be added (and called when the - event is published). - :param append: Whether to append or prepend the subscriber to an - existing subscriber list for the event. - """ - subs = self._subscribers - if event not in subs: - subs[event] = deque([subscriber]) - else: - sq = subs[event] - if append: - sq.append(subscriber) - else: - sq.appendleft(subscriber) - - def remove(self, event, subscriber): - """ - Remove a subscriber for an event. - - :param event: The name of an event. - :param subscriber: The subscriber to be removed. - """ - subs = self._subscribers - if event not in subs: - raise ValueError('No subscribers: %r' % event) - subs[event].remove(subscriber) - - def get_subscribers(self, event): - """ - Return an iterator for the subscribers for an event. - :param event: The event to return subscribers for. - """ - return iter(self._subscribers.get(event, ())) - - def publish(self, event, *args, **kwargs): - """ - Publish a event and return a list of values returned by its - subscribers. - - :param event: The event to publish. - :param args: The positional arguments to pass to the event's - subscribers. - :param kwargs: The keyword arguments to pass to the event's - subscribers. - """ - result = [] - for subscriber in self.get_subscribers(event): - try: - value = subscriber(event, *args, **kwargs) - except Exception: - logger.exception('Exception during event publication') - value = None - result.append(value) - logger.debug('publish %s: args = %s, kwargs = %s, result = %s', - event, args, kwargs, result) - return result - -# -# Simple sequencing -# -class Sequencer(object): - def __init__(self): - self._preds = {} - self._succs = {} - self._nodes = set() # nodes with no preds/succs - - def add_node(self, node): - self._nodes.add(node) - - def remove_node(self, node, edges=False): - if node in self._nodes: - self._nodes.remove(node) - if edges: - for p in set(self._preds.get(node, ())): - self.remove(p, node) - for s in set(self._succs.get(node, ())): - self.remove(node, s) - # Remove empties - for k, v in list(self._preds.items()): - if not v: - del self._preds[k] - for k, v in list(self._succs.items()): - if not v: - del self._succs[k] - - def add(self, pred, succ): - assert pred != succ - self._preds.setdefault(succ, set()).add(pred) - self._succs.setdefault(pred, set()).add(succ) - - def remove(self, pred, succ): - assert pred != succ - try: - preds = self._preds[succ] - succs = self._succs[pred] - except KeyError: # pragma: no cover - raise ValueError('%r not a successor of anything' % succ) - try: - preds.remove(pred) - succs.remove(succ) - except KeyError: # pragma: no cover - raise ValueError('%r not a successor of %r' % (succ, pred)) - - def is_step(self, step): - return (step in self._preds or step in self._succs or - step in self._nodes) - - def get_steps(self, final): - if not self.is_step(final): - raise ValueError('Unknown: %r' % final) - result = [] - todo = [] - seen = set() - todo.append(final) - while todo: - step = todo.pop(0) - if step in seen: - # if a step was already seen, - # move it to the end (so it will appear earlier - # when reversed on return) ... but not for the - # final step, as that would be confusing for - # users - if step != final: - result.remove(step) - result.append(step) - else: - seen.add(step) - result.append(step) - preds = self._preds.get(step, ()) - todo.extend(preds) - return reversed(result) - - @property - def strong_connections(self): - #http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm - index_counter = [0] - stack = [] - lowlinks = {} - index = {} - result = [] - - graph = self._succs - - def strongconnect(node): - # set the depth index for this node to the smallest unused index - index[node] = index_counter[0] - lowlinks[node] = index_counter[0] - index_counter[0] += 1 - stack.append(node) - - # Consider successors - try: - successors = graph[node] - except Exception: - successors = [] - for successor in successors: - if successor not in lowlinks: - # Successor has not yet been visited - strongconnect(successor) - lowlinks[node] = min(lowlinks[node],lowlinks[successor]) - elif successor in stack: - # the successor is in the stack and hence in the current - # strongly connected component (SCC) - lowlinks[node] = min(lowlinks[node],index[successor]) - - # If `node` is a root node, pop the stack and generate an SCC - if lowlinks[node] == index[node]: - connected_component = [] - - while True: - successor = stack.pop() - connected_component.append(successor) - if successor == node: break - component = tuple(connected_component) - # storing the result - result.append(component) - - for node in graph: - if node not in lowlinks: - strongconnect(node) - - return result - - @property - def dot(self): - result = ['digraph G {'] - for succ in self._preds: - preds = self._preds[succ] - for pred in preds: - result.append(' %s -> %s;' % (pred, succ)) - for node in self._nodes: - result.append(' %s;' % node) - result.append('}') - return '\n'.join(result) - -# -# Unarchiving functionality for zip, tar, tgz, tbz, whl -# - -ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip', - '.tgz', '.tbz', '.whl') - -def unarchive(archive_filename, dest_dir, format=None, check=True): - - def check_path(path): - if not isinstance(path, text_type): - path = path.decode('utf-8') - p = os.path.abspath(os.path.join(dest_dir, path)) - if not p.startswith(dest_dir) or p[plen] != os.sep: - raise ValueError('path outside destination: %r' % p) - - dest_dir = os.path.abspath(dest_dir) - plen = len(dest_dir) - archive = None - if format is None: - if archive_filename.endswith(('.zip', '.whl')): - format = 'zip' - elif archive_filename.endswith(('.tar.gz', '.tgz')): - format = 'tgz' - mode = 'r:gz' - elif archive_filename.endswith(('.tar.bz2', '.tbz')): - format = 'tbz' - mode = 'r:bz2' - elif archive_filename.endswith('.tar'): - format = 'tar' - mode = 'r' - else: # pragma: no cover - raise ValueError('Unknown format for %r' % archive_filename) - try: - if format == 'zip': - archive = ZipFile(archive_filename, 'r') - if check: - names = archive.namelist() - for name in names: - check_path(name) - else: - archive = tarfile.open(archive_filename, mode) - if check: - names = archive.getnames() - for name in names: - check_path(name) - if format != 'zip' and sys.version_info[0] < 3: - # See Python issue 17153. If the dest path contains Unicode, - # tarfile extraction fails on Python 2.x if a member path name - # contains non-ASCII characters - it leads to an implicit - # bytes -> unicode conversion using ASCII to decode. - for tarinfo in archive.getmembers(): - if not isinstance(tarinfo.name, text_type): - tarinfo.name = tarinfo.name.decode('utf-8') - archive.extractall(dest_dir) - - finally: - if archive: - archive.close() - - -def zip_dir(directory): - """zip a directory tree into a BytesIO object""" - result = io.BytesIO() - dlen = len(directory) - with ZipFile(result, "w") as zf: - for root, dirs, files in os.walk(directory): - for name in files: - full = os.path.join(root, name) - rel = root[dlen:] - dest = os.path.join(rel, name) - zf.write(full, dest) - return result - -# -# Simple progress bar -# - -UNITS = ('', 'K', 'M', 'G','T','P') - - -class Progress(object): - unknown = 'UNKNOWN' - - def __init__(self, minval=0, maxval=100): - assert maxval is None or maxval >= minval - self.min = self.cur = minval - self.max = maxval - self.started = None - self.elapsed = 0 - self.done = False - - def update(self, curval): - assert self.min <= curval - assert self.max is None or curval <= self.max - self.cur = curval - now = time.time() - if self.started is None: - self.started = now - else: - self.elapsed = now - self.started - - def increment(self, incr): - assert incr >= 0 - self.update(self.cur + incr) - - def start(self): - self.update(self.min) - return self - - def stop(self): - if self.max is not None: - self.update(self.max) - self.done = True - - @property - def maximum(self): - return self.unknown if self.max is None else self.max - - @property - def percentage(self): - if self.done: - result = '100 %' - elif self.max is None: - result = ' ?? %' - else: - v = 100.0 * (self.cur - self.min) / (self.max - self.min) - result = '%3d %%' % v - return result - - def format_duration(self, duration): - if (duration <= 0) and self.max is None or self.cur == self.min: - result = '??:??:??' - #elif duration < 1: - # result = '--:--:--' - else: - result = time.strftime('%H:%M:%S', time.gmtime(duration)) - return result - - @property - def ETA(self): - if self.done: - prefix = 'Done' - t = self.elapsed - #import pdb; pdb.set_trace() - else: - prefix = 'ETA ' - if self.max is None: - t = -1 - elif self.elapsed == 0 or (self.cur == self.min): - t = 0 - else: - #import pdb; pdb.set_trace() - t = float(self.max - self.min) - t /= self.cur - self.min - t = (t - 1) * self.elapsed - return '%s: %s' % (prefix, self.format_duration(t)) - - @property - def speed(self): - if self.elapsed == 0: - result = 0.0 - else: - result = (self.cur - self.min) / self.elapsed - for unit in UNITS: - if result < 1000: - break - result /= 1000.0 - return '%d %sB/s' % (result, unit) - -# -# Glob functionality -# - -RICH_GLOB = re.compile(r'\{([^}]*)\}') -_CHECK_RECURSIVE_GLOB = re.compile(r'[^/\\,{]\*\*|\*\*[^/\\,}]') -_CHECK_MISMATCH_SET = re.compile(r'^[^{]*\}|\{[^}]*$') - - -def iglob(path_glob): - """Extended globbing function that supports ** and {opt1,opt2,opt3}.""" - if _CHECK_RECURSIVE_GLOB.search(path_glob): - msg = """invalid glob %r: recursive glob "**" must be used alone""" - raise ValueError(msg % path_glob) - if _CHECK_MISMATCH_SET.search(path_glob): - msg = """invalid glob %r: mismatching set marker '{' or '}'""" - raise ValueError(msg % path_glob) - return _iglob(path_glob) - - -def _iglob(path_glob): - rich_path_glob = RICH_GLOB.split(path_glob, 1) - if len(rich_path_glob) > 1: - assert len(rich_path_glob) == 3, rich_path_glob - prefix, set, suffix = rich_path_glob - for item in set.split(','): - for path in _iglob(''.join((prefix, item, suffix))): - yield path - else: - if '**' not in path_glob: - for item in std_iglob(path_glob): - yield item - else: - prefix, radical = path_glob.split('**', 1) - if prefix == '': - prefix = '.' - if radical == '': - radical = '*' - else: - # we support both - radical = radical.lstrip('/') - radical = radical.lstrip('\\') - for path, dir, files in os.walk(prefix): - path = os.path.normpath(path) - for fn in _iglob(os.path.join(path, radical)): - yield fn - -if ssl: - from .compat import (HTTPSHandler as BaseHTTPSHandler, match_hostname, - CertificateError) - - -# -# HTTPSConnection which verifies certificates/matches domains -# - - class HTTPSConnection(httplib.HTTPSConnection): - ca_certs = None # set this to the path to the certs file (.pem) - check_domain = True # only used if ca_certs is not None - - # noinspection PyPropertyAccess - def connect(self): - sock = socket.create_connection((self.host, self.port), self.timeout) - if getattr(self, '_tunnel_host', False): - self.sock = sock - self._tunnel() - - if not hasattr(ssl, 'SSLContext'): - # For 2.x - if self.ca_certs: - cert_reqs = ssl.CERT_REQUIRED - else: - cert_reqs = ssl.CERT_NONE - self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file, - cert_reqs=cert_reqs, - ssl_version=ssl.PROTOCOL_SSLv23, - ca_certs=self.ca_certs) - else: # pragma: no cover - context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) - context.options |= ssl.OP_NO_SSLv2 - if self.cert_file: - context.load_cert_chain(self.cert_file, self.key_file) - kwargs = {} - if self.ca_certs: - context.verify_mode = ssl.CERT_REQUIRED - context.load_verify_locations(cafile=self.ca_certs) - if getattr(ssl, 'HAS_SNI', False): - kwargs['server_hostname'] = self.host - self.sock = context.wrap_socket(sock, **kwargs) - if self.ca_certs and self.check_domain: - try: - match_hostname(self.sock.getpeercert(), self.host) - logger.debug('Host verified: %s', self.host) - except CertificateError: # pragma: no cover - self.sock.shutdown(socket.SHUT_RDWR) - self.sock.close() - raise - - class HTTPSHandler(BaseHTTPSHandler): - def __init__(self, ca_certs, check_domain=True): - BaseHTTPSHandler.__init__(self) - self.ca_certs = ca_certs - self.check_domain = check_domain - - def _conn_maker(self, *args, **kwargs): - """ - This is called to create a connection instance. Normally you'd - pass a connection class to do_open, but it doesn't actually check for - a class, and just expects a callable. As long as we behave just as a - constructor would have, we should be OK. If it ever changes so that - we *must* pass a class, we'll create an UnsafeHTTPSConnection class - which just sets check_domain to False in the class definition, and - choose which one to pass to do_open. - """ - result = HTTPSConnection(*args, **kwargs) - if self.ca_certs: - result.ca_certs = self.ca_certs - result.check_domain = self.check_domain - return result - - def https_open(self, req): - try: - return self.do_open(self._conn_maker, req) - except URLError as e: - if 'certificate verify failed' in str(e.reason): - raise CertificateError('Unable to verify server certificate ' - 'for %s' % req.host) - else: - raise - - # - # To prevent against mixing HTTP traffic with HTTPS (examples: A Man-In-The- - # Middle proxy using HTTP listens on port 443, or an index mistakenly serves - # HTML containing a http://xyz link when it should be https://xyz), - # you can use the following handler class, which does not allow HTTP traffic. - # - # It works by inheriting from HTTPHandler - so build_opener won't add a - # handler for HTTP itself. - # - class HTTPSOnlyHandler(HTTPSHandler, HTTPHandler): - def http_open(self, req): - raise URLError('Unexpected HTTP request on what should be a secure ' - 'connection: %s' % req) - -# -# XML-RPC with timeouts -# - -_ver_info = sys.version_info[:2] - -if _ver_info == (2, 6): - class HTTP(httplib.HTTP): - def __init__(self, host='', port=None, **kwargs): - if port == 0: # 0 means use port 0, not the default port - port = None - self._setup(self._connection_class(host, port, **kwargs)) - - - if ssl: - class HTTPS(httplib.HTTPS): - def __init__(self, host='', port=None, **kwargs): - if port == 0: # 0 means use port 0, not the default port - port = None - self._setup(self._connection_class(host, port, **kwargs)) - - -class Transport(xmlrpclib.Transport): - def __init__(self, timeout, use_datetime=0): - self.timeout = timeout - xmlrpclib.Transport.__init__(self, use_datetime) - - def make_connection(self, host): - h, eh, x509 = self.get_host_info(host) - if _ver_info == (2, 6): - result = HTTP(h, timeout=self.timeout) - else: - if not self._connection or host != self._connection[0]: - self._extra_headers = eh - self._connection = host, httplib.HTTPConnection(h) - result = self._connection[1] - return result - -if ssl: - class SafeTransport(xmlrpclib.SafeTransport): - def __init__(self, timeout, use_datetime=0): - self.timeout = timeout - xmlrpclib.SafeTransport.__init__(self, use_datetime) - - def make_connection(self, host): - h, eh, kwargs = self.get_host_info(host) - if not kwargs: - kwargs = {} - kwargs['timeout'] = self.timeout - if _ver_info == (2, 6): - result = HTTPS(host, None, **kwargs) - else: - if not self._connection or host != self._connection[0]: - self._extra_headers = eh - self._connection = host, httplib.HTTPSConnection(h, None, - **kwargs) - result = self._connection[1] - return result - - -class ServerProxy(xmlrpclib.ServerProxy): - def __init__(self, uri, **kwargs): - self.timeout = timeout = kwargs.pop('timeout', None) - # The above classes only come into play if a timeout - # is specified - if timeout is not None: - scheme, _ = splittype(uri) - use_datetime = kwargs.get('use_datetime', 0) - if scheme == 'https': - tcls = SafeTransport - else: - tcls = Transport - kwargs['transport'] = t = tcls(timeout, use_datetime=use_datetime) - self.transport = t - xmlrpclib.ServerProxy.__init__(self, uri, **kwargs) - -# -# CSV functionality. This is provided because on 2.x, the csv module can't -# handle Unicode. However, we need to deal with Unicode in e.g. RECORD files. -# - -def _csv_open(fn, mode, **kwargs): - if sys.version_info[0] < 3: - mode += 'b' - else: - kwargs['newline'] = '' - # Python 3 determines encoding from locale. Force 'utf-8' - # file encoding to match other forced utf-8 encoding - kwargs['encoding'] = 'utf-8' - return open(fn, mode, **kwargs) - - -class CSVBase(object): - defaults = { - 'delimiter': str(','), # The strs are used because we need native - 'quotechar': str('"'), # str in the csv API (2.x won't take - 'lineterminator': str('\n') # Unicode) - } - - def __enter__(self): - return self - - def __exit__(self, *exc_info): - self.stream.close() - - -class CSVReader(CSVBase): - def __init__(self, **kwargs): - if 'stream' in kwargs: - stream = kwargs['stream'] - if sys.version_info[0] >= 3: - # needs to be a text stream - stream = codecs.getreader('utf-8')(stream) - self.stream = stream - else: - self.stream = _csv_open(kwargs['path'], 'r') - self.reader = csv.reader(self.stream, **self.defaults) - - def __iter__(self): - return self - - def next(self): - result = next(self.reader) - if sys.version_info[0] < 3: - for i, item in enumerate(result): - if not isinstance(item, text_type): - result[i] = item.decode('utf-8') - return result - - __next__ = next - -class CSVWriter(CSVBase): - def __init__(self, fn, **kwargs): - self.stream = _csv_open(fn, 'w') - self.writer = csv.writer(self.stream, **self.defaults) - - def writerow(self, row): - if sys.version_info[0] < 3: - r = [] - for item in row: - if isinstance(item, text_type): - item = item.encode('utf-8') - r.append(item) - row = r - self.writer.writerow(row) - -# -# Configurator functionality -# - -class Configurator(BaseConfigurator): - - value_converters = dict(BaseConfigurator.value_converters) - value_converters['inc'] = 'inc_convert' - - def __init__(self, config, base=None): - super(Configurator, self).__init__(config) - self.base = base or os.getcwd() - - def configure_custom(self, config): - def convert(o): - if isinstance(o, (list, tuple)): - result = type(o)([convert(i) for i in o]) - elif isinstance(o, dict): - if '()' in o: - result = self.configure_custom(o) - else: - result = {} - for k in o: - result[k] = convert(o[k]) - else: - result = self.convert(o) - return result - - c = config.pop('()') - if not callable(c): - c = self.resolve(c) - props = config.pop('.', None) - # Check for valid identifiers - args = config.pop('[]', ()) - if args: - args = tuple([convert(o) for o in args]) - items = [(k, convert(config[k])) for k in config if valid_ident(k)] - kwargs = dict(items) - result = c(*args, **kwargs) - if props: - for n, v in props.items(): - setattr(result, n, convert(v)) - return result - - def __getitem__(self, key): - result = self.config[key] - if isinstance(result, dict) and '()' in result: - self.config[key] = result = self.configure_custom(result) - return result - - def inc_convert(self, value): - """Default converter for the inc:// protocol.""" - if not os.path.isabs(value): - value = os.path.join(self.base, value) - with codecs.open(value, 'r', encoding='utf-8') as f: - result = json.load(f) - return result - - -class SubprocessMixin(object): - """ - Mixin for running subprocesses and capturing their output - """ - def __init__(self, verbose=False, progress=None): - self.verbose = verbose - self.progress = progress - - def reader(self, stream, context): - """ - Read lines from a subprocess' output stream and either pass to a progress - callable (if specified) or write progress information to sys.stderr. - """ - progress = self.progress - verbose = self.verbose - while True: - s = stream.readline() - if not s: - break - if progress is not None: - progress(s, context) - else: - if not verbose: - sys.stderr.write('.') - else: - sys.stderr.write(s.decode('utf-8')) - sys.stderr.flush() - stream.close() - - def run_command(self, cmd, **kwargs): - p = subprocess.Popen(cmd, stdout=subprocess.PIPE, - stderr=subprocess.PIPE, **kwargs) - t1 = threading.Thread(target=self.reader, args=(p.stdout, 'stdout')) - t1.start() - t2 = threading.Thread(target=self.reader, args=(p.stderr, 'stderr')) - t2.start() - p.wait() - t1.join() - t2.join() - if self.progress is not None: - self.progress('done.', 'main') - elif self.verbose: - sys.stderr.write('done.\n') - return p - - -def normalize_name(name): - """Normalize a python package name a la PEP 503""" - # https://www.python.org/dev/peps/pep-0503/#normalized-names - return re.sub('[-_.]+', '-', name).lower() diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyc b/env/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyc deleted file mode 100644 index fa8a36c2..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyc b/env/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyc deleted file mode 100644 index 085666df..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/w32.exe b/env/lib/python2.7/site-packages/pip/_vendor/distlib/w32.exe deleted file mode 100644 index 4df77001..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/distlib/w32.exe and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/w64.exe b/env/lib/python2.7/site-packages/pip/_vendor/distlib/w64.exe deleted file mode 100644 index 63ce483d..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/distlib/w64.exe and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.py b/env/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.py deleted file mode 100644 index 0c8efad9..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.py +++ /dev/null @@ -1,1004 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2013-2017 Vinay Sajip. -# Licensed to the Python Software Foundation under a contributor agreement. -# See LICENSE.txt and CONTRIBUTORS.txt. -# -from __future__ import unicode_literals - -import base64 -import codecs -import datetime -import distutils.util -from email import message_from_file -import hashlib -import imp -import json -import logging -import os -import posixpath -import re -import shutil -import sys -import tempfile -import zipfile - -from . import __version__, DistlibException -from .compat import sysconfig, ZipFile, fsdecode, text_type, filter -from .database import InstalledDistribution -from .metadata import Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME -from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache, - cached_property, get_cache_base, read_exports, tempdir) -from .version import NormalizedVersion, UnsupportedVersionError - -logger = logging.getLogger(__name__) - -cache = None # created when needed - -if hasattr(sys, 'pypy_version_info'): # pragma: no cover - IMP_PREFIX = 'pp' -elif sys.platform.startswith('java'): # pragma: no cover - IMP_PREFIX = 'jy' -elif sys.platform == 'cli': # pragma: no cover - IMP_PREFIX = 'ip' -else: - IMP_PREFIX = 'cp' - -VER_SUFFIX = sysconfig.get_config_var('py_version_nodot') -if not VER_SUFFIX: # pragma: no cover - VER_SUFFIX = '%s%s' % sys.version_info[:2] -PYVER = 'py' + VER_SUFFIX -IMPVER = IMP_PREFIX + VER_SUFFIX - -ARCH = distutils.util.get_platform().replace('-', '_').replace('.', '_') - -ABI = sysconfig.get_config_var('SOABI') -if ABI and ABI.startswith('cpython-'): - ABI = ABI.replace('cpython-', 'cp') -else: - def _derive_abi(): - parts = ['cp', VER_SUFFIX] - if sysconfig.get_config_var('Py_DEBUG'): - parts.append('d') - if sysconfig.get_config_var('WITH_PYMALLOC'): - parts.append('m') - if sysconfig.get_config_var('Py_UNICODE_SIZE') == 4: - parts.append('u') - return ''.join(parts) - ABI = _derive_abi() - del _derive_abi - -FILENAME_RE = re.compile(r''' -(?P<nm>[^-]+) --(?P<vn>\d+[^-]*) -(-(?P<bn>\d+[^-]*))? --(?P<py>\w+\d+(\.\w+\d+)*) --(?P<bi>\w+) --(?P<ar>\w+(\.\w+)*) -\.whl$ -''', re.IGNORECASE | re.VERBOSE) - -NAME_VERSION_RE = re.compile(r''' -(?P<nm>[^-]+) --(?P<vn>\d+[^-]*) -(-(?P<bn>\d+[^-]*))?$ -''', re.IGNORECASE | re.VERBOSE) - -SHEBANG_RE = re.compile(br'\s*#![^\r\n]*') -SHEBANG_DETAIL_RE = re.compile(br'^(\s*#!("[^"]+"|\S+))\s+(.*)$') -SHEBANG_PYTHON = b'#!python' -SHEBANG_PYTHONW = b'#!pythonw' - -if os.sep == '/': - to_posix = lambda o: o -else: - to_posix = lambda o: o.replace(os.sep, '/') - - -class Mounter(object): - def __init__(self): - self.impure_wheels = {} - self.libs = {} - - def add(self, pathname, extensions): - self.impure_wheels[pathname] = extensions - self.libs.update(extensions) - - def remove(self, pathname): - extensions = self.impure_wheels.pop(pathname) - for k, v in extensions: - if k in self.libs: - del self.libs[k] - - def find_module(self, fullname, path=None): - if fullname in self.libs: - result = self - else: - result = None - return result - - def load_module(self, fullname): - if fullname in sys.modules: - result = sys.modules[fullname] - else: - if fullname not in self.libs: - raise ImportError('unable to find extension for %s' % fullname) - result = imp.load_dynamic(fullname, self.libs[fullname]) - result.__loader__ = self - parts = fullname.rsplit('.', 1) - if len(parts) > 1: - result.__package__ = parts[0] - return result - -_hook = Mounter() - - -class Wheel(object): - """ - Class to build and install from Wheel files (PEP 427). - """ - - wheel_version = (1, 1) - hash_kind = 'sha256' - - def __init__(self, filename=None, sign=False, verify=False): - """ - Initialise an instance using a (valid) filename. - """ - self.sign = sign - self.should_verify = verify - self.buildver = '' - self.pyver = [PYVER] - self.abi = ['none'] - self.arch = ['any'] - self.dirname = os.getcwd() - if filename is None: - self.name = 'dummy' - self.version = '0.1' - self._filename = self.filename - else: - m = NAME_VERSION_RE.match(filename) - if m: - info = m.groupdict('') - self.name = info['nm'] - # Reinstate the local version separator - self.version = info['vn'].replace('_', '-') - self.buildver = info['bn'] - self._filename = self.filename - else: - dirname, filename = os.path.split(filename) - m = FILENAME_RE.match(filename) - if not m: - raise DistlibException('Invalid name or ' - 'filename: %r' % filename) - if dirname: - self.dirname = os.path.abspath(dirname) - self._filename = filename - info = m.groupdict('') - self.name = info['nm'] - self.version = info['vn'] - self.buildver = info['bn'] - self.pyver = info['py'].split('.') - self.abi = info['bi'].split('.') - self.arch = info['ar'].split('.') - - @property - def filename(self): - """ - Build and return a filename from the various components. - """ - if self.buildver: - buildver = '-' + self.buildver - else: - buildver = '' - pyver = '.'.join(self.pyver) - abi = '.'.join(self.abi) - arch = '.'.join(self.arch) - # replace - with _ as a local version separator - version = self.version.replace('-', '_') - return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver, - pyver, abi, arch) - - @property - def exists(self): - path = os.path.join(self.dirname, self.filename) - return os.path.isfile(path) - - @property - def tags(self): - for pyver in self.pyver: - for abi in self.abi: - for arch in self.arch: - yield pyver, abi, arch - - @cached_property - def metadata(self): - pathname = os.path.join(self.dirname, self.filename) - name_ver = '%s-%s' % (self.name, self.version) - info_dir = '%s.dist-info' % name_ver - wrapper = codecs.getreader('utf-8') - with ZipFile(pathname, 'r') as zf: - wheel_metadata = self.get_wheel_metadata(zf) - wv = wheel_metadata['Wheel-Version'].split('.', 1) - file_version = tuple([int(i) for i in wv]) - if file_version < (1, 1): - fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME, 'METADATA'] - else: - fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME] - result = None - for fn in fns: - try: - metadata_filename = posixpath.join(info_dir, fn) - with zf.open(metadata_filename) as bf: - wf = wrapper(bf) - result = Metadata(fileobj=wf) - if result: - break - except KeyError: - pass - if not result: - raise ValueError('Invalid wheel, because metadata is ' - 'missing: looked in %s' % ', '.join(fns)) - return result - - def get_wheel_metadata(self, zf): - name_ver = '%s-%s' % (self.name, self.version) - info_dir = '%s.dist-info' % name_ver - metadata_filename = posixpath.join(info_dir, 'WHEEL') - with zf.open(metadata_filename) as bf: - wf = codecs.getreader('utf-8')(bf) - message = message_from_file(wf) - return dict(message) - - @cached_property - def info(self): - pathname = os.path.join(self.dirname, self.filename) - with ZipFile(pathname, 'r') as zf: - result = self.get_wheel_metadata(zf) - return result - - def process_shebang(self, data): - m = SHEBANG_RE.match(data) - if m: - end = m.end() - shebang, data_after_shebang = data[:end], data[end:] - # Preserve any arguments after the interpreter - if b'pythonw' in shebang.lower(): - shebang_python = SHEBANG_PYTHONW - else: - shebang_python = SHEBANG_PYTHON - m = SHEBANG_DETAIL_RE.match(shebang) - if m: - args = b' ' + m.groups()[-1] - else: - args = b'' - shebang = shebang_python + args - data = shebang + data_after_shebang - else: - cr = data.find(b'\r') - lf = data.find(b'\n') - if cr < 0 or cr > lf: - term = b'\n' - else: - if data[cr:cr + 2] == b'\r\n': - term = b'\r\n' - else: - term = b'\r' - data = SHEBANG_PYTHON + term + data - return data - - def get_hash(self, data, hash_kind=None): - if hash_kind is None: - hash_kind = self.hash_kind - try: - hasher = getattr(hashlib, hash_kind) - except AttributeError: - raise DistlibException('Unsupported hash algorithm: %r' % hash_kind) - result = hasher(data).digest() - result = base64.urlsafe_b64encode(result).rstrip(b'=').decode('ascii') - return hash_kind, result - - def write_record(self, records, record_path, base): - records = list(records) # make a copy for sorting - p = to_posix(os.path.relpath(record_path, base)) - records.append((p, '', '')) - records.sort() - with CSVWriter(record_path) as writer: - for row in records: - writer.writerow(row) - - def write_records(self, info, libdir, archive_paths): - records = [] - distinfo, info_dir = info - hasher = getattr(hashlib, self.hash_kind) - for ap, p in archive_paths: - with open(p, 'rb') as f: - data = f.read() - digest = '%s=%s' % self.get_hash(data) - size = os.path.getsize(p) - records.append((ap, digest, size)) - - p = os.path.join(distinfo, 'RECORD') - self.write_record(records, p, libdir) - ap = to_posix(os.path.join(info_dir, 'RECORD')) - archive_paths.append((ap, p)) - - def build_zip(self, pathname, archive_paths): - with ZipFile(pathname, 'w', zipfile.ZIP_DEFLATED) as zf: - for ap, p in archive_paths: - logger.debug('Wrote %s to %s in wheel', p, ap) - zf.write(p, ap) - - def build(self, paths, tags=None, wheel_version=None): - """ - Build a wheel from files in specified paths, and use any specified tags - when determining the name of the wheel. - """ - if tags is None: - tags = {} - - libkey = list(filter(lambda o: o in paths, ('purelib', 'platlib')))[0] - if libkey == 'platlib': - is_pure = 'false' - default_pyver = [IMPVER] - default_abi = [ABI] - default_arch = [ARCH] - else: - is_pure = 'true' - default_pyver = [PYVER] - default_abi = ['none'] - default_arch = ['any'] - - self.pyver = tags.get('pyver', default_pyver) - self.abi = tags.get('abi', default_abi) - self.arch = tags.get('arch', default_arch) - - libdir = paths[libkey] - - name_ver = '%s-%s' % (self.name, self.version) - data_dir = '%s.data' % name_ver - info_dir = '%s.dist-info' % name_ver - - archive_paths = [] - - # First, stuff which is not in site-packages - for key in ('data', 'headers', 'scripts'): - if key not in paths: - continue - path = paths[key] - if os.path.isdir(path): - for root, dirs, files in os.walk(path): - for fn in files: - p = fsdecode(os.path.join(root, fn)) - rp = os.path.relpath(p, path) - ap = to_posix(os.path.join(data_dir, key, rp)) - archive_paths.append((ap, p)) - if key == 'scripts' and not p.endswith('.exe'): - with open(p, 'rb') as f: - data = f.read() - data = self.process_shebang(data) - with open(p, 'wb') as f: - f.write(data) - - # Now, stuff which is in site-packages, other than the - # distinfo stuff. - path = libdir - distinfo = None - for root, dirs, files in os.walk(path): - if root == path: - # At the top level only, save distinfo for later - # and skip it for now - for i, dn in enumerate(dirs): - dn = fsdecode(dn) - if dn.endswith('.dist-info'): - distinfo = os.path.join(root, dn) - del dirs[i] - break - assert distinfo, '.dist-info directory expected, not found' - - for fn in files: - # comment out next suite to leave .pyc files in - if fsdecode(fn).endswith(('.pyc', '.pyo')): - continue - p = os.path.join(root, fn) - rp = to_posix(os.path.relpath(p, path)) - archive_paths.append((rp, p)) - - # Now distinfo. Assumed to be flat, i.e. os.listdir is enough. - files = os.listdir(distinfo) - for fn in files: - if fn not in ('RECORD', 'INSTALLER', 'SHARED', 'WHEEL'): - p = fsdecode(os.path.join(distinfo, fn)) - ap = to_posix(os.path.join(info_dir, fn)) - archive_paths.append((ap, p)) - - wheel_metadata = [ - 'Wheel-Version: %d.%d' % (wheel_version or self.wheel_version), - 'Generator: distlib %s' % __version__, - 'Root-Is-Purelib: %s' % is_pure, - ] - for pyver, abi, arch in self.tags: - wheel_metadata.append('Tag: %s-%s-%s' % (pyver, abi, arch)) - p = os.path.join(distinfo, 'WHEEL') - with open(p, 'w') as f: - f.write('\n'.join(wheel_metadata)) - ap = to_posix(os.path.join(info_dir, 'WHEEL')) - archive_paths.append((ap, p)) - - # Now, at last, RECORD. - # Paths in here are archive paths - nothing else makes sense. - self.write_records((distinfo, info_dir), libdir, archive_paths) - # Now, ready to build the zip file - pathname = os.path.join(self.dirname, self.filename) - self.build_zip(pathname, archive_paths) - return pathname - - def skip_entry(self, arcname): - """ - Determine whether an archive entry should be skipped when verifying - or installing. - """ - # The signature file won't be in RECORD, - # and we don't currently don't do anything with it - # We also skip directories, as they won't be in RECORD - # either. See: - # - # https://github.com/pypa/wheel/issues/294 - # https://github.com/pypa/wheel/issues/287 - # https://github.com/pypa/wheel/pull/289 - # - return arcname.endswith(('/', '/RECORD.jws')) - - def install(self, paths, maker, **kwargs): - """ - Install a wheel to the specified paths. If kwarg ``warner`` is - specified, it should be a callable, which will be called with two - tuples indicating the wheel version of this software and the wheel - version in the file, if there is a discrepancy in the versions. - This can be used to issue any warnings to raise any exceptions. - If kwarg ``lib_only`` is True, only the purelib/platlib files are - installed, and the headers, scripts, data and dist-info metadata are - not written. If kwarg ``bytecode_hashed_invalidation`` is True, written - bytecode will try to use file-hash based invalidation (PEP-552) on - supported interpreter versions (CPython 2.7+). - - The return value is a :class:`InstalledDistribution` instance unless - ``options.lib_only`` is True, in which case the return value is ``None``. - """ - - dry_run = maker.dry_run - warner = kwargs.get('warner') - lib_only = kwargs.get('lib_only', False) - bc_hashed_invalidation = kwargs.get('bytecode_hashed_invalidation', False) - - pathname = os.path.join(self.dirname, self.filename) - name_ver = '%s-%s' % (self.name, self.version) - data_dir = '%s.data' % name_ver - info_dir = '%s.dist-info' % name_ver - - metadata_name = posixpath.join(info_dir, METADATA_FILENAME) - wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') - record_name = posixpath.join(info_dir, 'RECORD') - - wrapper = codecs.getreader('utf-8') - - with ZipFile(pathname, 'r') as zf: - with zf.open(wheel_metadata_name) as bwf: - wf = wrapper(bwf) - message = message_from_file(wf) - wv = message['Wheel-Version'].split('.', 1) - file_version = tuple([int(i) for i in wv]) - if (file_version != self.wheel_version) and warner: - warner(self.wheel_version, file_version) - - if message['Root-Is-Purelib'] == 'true': - libdir = paths['purelib'] - else: - libdir = paths['platlib'] - - records = {} - with zf.open(record_name) as bf: - with CSVReader(stream=bf) as reader: - for row in reader: - p = row[0] - records[p] = row - - data_pfx = posixpath.join(data_dir, '') - info_pfx = posixpath.join(info_dir, '') - script_pfx = posixpath.join(data_dir, 'scripts', '') - - # make a new instance rather than a copy of maker's, - # as we mutate it - fileop = FileOperator(dry_run=dry_run) - fileop.record = True # so we can rollback if needed - - bc = not sys.dont_write_bytecode # Double negatives. Lovely! - - outfiles = [] # for RECORD writing - - # for script copying/shebang processing - workdir = tempfile.mkdtemp() - # set target dir later - # we default add_launchers to False, as the - # Python Launcher should be used instead - maker.source_dir = workdir - maker.target_dir = None - try: - for zinfo in zf.infolist(): - arcname = zinfo.filename - if isinstance(arcname, text_type): - u_arcname = arcname - else: - u_arcname = arcname.decode('utf-8') - if self.skip_entry(u_arcname): - continue - row = records[u_arcname] - if row[2] and str(zinfo.file_size) != row[2]: - raise DistlibException('size mismatch for ' - '%s' % u_arcname) - if row[1]: - kind, value = row[1].split('=', 1) - with zf.open(arcname) as bf: - data = bf.read() - _, digest = self.get_hash(data, kind) - if digest != value: - raise DistlibException('digest mismatch for ' - '%s' % arcname) - - if lib_only and u_arcname.startswith((info_pfx, data_pfx)): - logger.debug('lib_only: skipping %s', u_arcname) - continue - is_script = (u_arcname.startswith(script_pfx) - and not u_arcname.endswith('.exe')) - - if u_arcname.startswith(data_pfx): - _, where, rp = u_arcname.split('/', 2) - outfile = os.path.join(paths[where], convert_path(rp)) - else: - # meant for site-packages. - if u_arcname in (wheel_metadata_name, record_name): - continue - outfile = os.path.join(libdir, convert_path(u_arcname)) - if not is_script: - with zf.open(arcname) as bf: - fileop.copy_stream(bf, outfile) - outfiles.append(outfile) - # Double check the digest of the written file - if not dry_run and row[1]: - with open(outfile, 'rb') as bf: - data = bf.read() - _, newdigest = self.get_hash(data, kind) - if newdigest != digest: - raise DistlibException('digest mismatch ' - 'on write for ' - '%s' % outfile) - if bc and outfile.endswith('.py'): - try: - pyc = fileop.byte_compile(outfile, - hashed_invalidation=bc_hashed_invalidation) - outfiles.append(pyc) - except Exception: - # Don't give up if byte-compilation fails, - # but log it and perhaps warn the user - logger.warning('Byte-compilation failed', - exc_info=True) - else: - fn = os.path.basename(convert_path(arcname)) - workname = os.path.join(workdir, fn) - with zf.open(arcname) as bf: - fileop.copy_stream(bf, workname) - - dn, fn = os.path.split(outfile) - maker.target_dir = dn - filenames = maker.make(fn) - fileop.set_executable_mode(filenames) - outfiles.extend(filenames) - - if lib_only: - logger.debug('lib_only: returning None') - dist = None - else: - # Generate scripts - - # Try to get pydist.json so we can see if there are - # any commands to generate. If this fails (e.g. because - # of a legacy wheel), log a warning but don't give up. - commands = None - file_version = self.info['Wheel-Version'] - if file_version == '1.0': - # Use legacy info - ep = posixpath.join(info_dir, 'entry_points.txt') - try: - with zf.open(ep) as bwf: - epdata = read_exports(bwf) - commands = {} - for key in ('console', 'gui'): - k = '%s_scripts' % key - if k in epdata: - commands['wrap_%s' % key] = d = {} - for v in epdata[k].values(): - s = '%s:%s' % (v.prefix, v.suffix) - if v.flags: - s += ' %s' % v.flags - d[v.name] = s - except Exception: - logger.warning('Unable to read legacy script ' - 'metadata, so cannot generate ' - 'scripts') - else: - try: - with zf.open(metadata_name) as bwf: - wf = wrapper(bwf) - commands = json.load(wf).get('extensions') - if commands: - commands = commands.get('python.commands') - except Exception: - logger.warning('Unable to read JSON metadata, so ' - 'cannot generate scripts') - if commands: - console_scripts = commands.get('wrap_console', {}) - gui_scripts = commands.get('wrap_gui', {}) - if console_scripts or gui_scripts: - script_dir = paths.get('scripts', '') - if not os.path.isdir(script_dir): - raise ValueError('Valid script path not ' - 'specified') - maker.target_dir = script_dir - for k, v in console_scripts.items(): - script = '%s = %s' % (k, v) - filenames = maker.make(script) - fileop.set_executable_mode(filenames) - - if gui_scripts: - options = {'gui': True } - for k, v in gui_scripts.items(): - script = '%s = %s' % (k, v) - filenames = maker.make(script, options) - fileop.set_executable_mode(filenames) - - p = os.path.join(libdir, info_dir) - dist = InstalledDistribution(p) - - # Write SHARED - paths = dict(paths) # don't change passed in dict - del paths['purelib'] - del paths['platlib'] - paths['lib'] = libdir - p = dist.write_shared_locations(paths, dry_run) - if p: - outfiles.append(p) - - # Write RECORD - dist.write_installed_files(outfiles, paths['prefix'], - dry_run) - return dist - except Exception: # pragma: no cover - logger.exception('installation failed.') - fileop.rollback() - raise - finally: - shutil.rmtree(workdir) - - def _get_dylib_cache(self): - global cache - if cache is None: - # Use native string to avoid issues on 2.x: see Python #20140. - base = os.path.join(get_cache_base(), str('dylib-cache'), - sys.version[:3]) - cache = Cache(base) - return cache - - def _get_extensions(self): - pathname = os.path.join(self.dirname, self.filename) - name_ver = '%s-%s' % (self.name, self.version) - info_dir = '%s.dist-info' % name_ver - arcname = posixpath.join(info_dir, 'EXTENSIONS') - wrapper = codecs.getreader('utf-8') - result = [] - with ZipFile(pathname, 'r') as zf: - try: - with zf.open(arcname) as bf: - wf = wrapper(bf) - extensions = json.load(wf) - cache = self._get_dylib_cache() - prefix = cache.prefix_to_dir(pathname) - cache_base = os.path.join(cache.base, prefix) - if not os.path.isdir(cache_base): - os.makedirs(cache_base) - for name, relpath in extensions.items(): - dest = os.path.join(cache_base, convert_path(relpath)) - if not os.path.exists(dest): - extract = True - else: - file_time = os.stat(dest).st_mtime - file_time = datetime.datetime.fromtimestamp(file_time) - info = zf.getinfo(relpath) - wheel_time = datetime.datetime(*info.date_time) - extract = wheel_time > file_time - if extract: - zf.extract(relpath, cache_base) - result.append((name, dest)) - except KeyError: - pass - return result - - def is_compatible(self): - """ - Determine if a wheel is compatible with the running system. - """ - return is_compatible(self) - - def is_mountable(self): - """ - Determine if a wheel is asserted as mountable by its metadata. - """ - return True # for now - metadata details TBD - - def mount(self, append=False): - pathname = os.path.abspath(os.path.join(self.dirname, self.filename)) - if not self.is_compatible(): - msg = 'Wheel %s not compatible with this Python.' % pathname - raise DistlibException(msg) - if not self.is_mountable(): - msg = 'Wheel %s is marked as not mountable.' % pathname - raise DistlibException(msg) - if pathname in sys.path: - logger.debug('%s already in path', pathname) - else: - if append: - sys.path.append(pathname) - else: - sys.path.insert(0, pathname) - extensions = self._get_extensions() - if extensions: - if _hook not in sys.meta_path: - sys.meta_path.append(_hook) - _hook.add(pathname, extensions) - - def unmount(self): - pathname = os.path.abspath(os.path.join(self.dirname, self.filename)) - if pathname not in sys.path: - logger.debug('%s not in path', pathname) - else: - sys.path.remove(pathname) - if pathname in _hook.impure_wheels: - _hook.remove(pathname) - if not _hook.impure_wheels: - if _hook in sys.meta_path: - sys.meta_path.remove(_hook) - - def verify(self): - pathname = os.path.join(self.dirname, self.filename) - name_ver = '%s-%s' % (self.name, self.version) - data_dir = '%s.data' % name_ver - info_dir = '%s.dist-info' % name_ver - - metadata_name = posixpath.join(info_dir, METADATA_FILENAME) - wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') - record_name = posixpath.join(info_dir, 'RECORD') - - wrapper = codecs.getreader('utf-8') - - with ZipFile(pathname, 'r') as zf: - with zf.open(wheel_metadata_name) as bwf: - wf = wrapper(bwf) - message = message_from_file(wf) - wv = message['Wheel-Version'].split('.', 1) - file_version = tuple([int(i) for i in wv]) - # TODO version verification - - records = {} - with zf.open(record_name) as bf: - with CSVReader(stream=bf) as reader: - for row in reader: - p = row[0] - records[p] = row - - for zinfo in zf.infolist(): - arcname = zinfo.filename - if isinstance(arcname, text_type): - u_arcname = arcname - else: - u_arcname = arcname.decode('utf-8') - # See issue #115: some wheels have .. in their entries, but - # in the filename ... e.g. __main__..py ! So the check is - # updated to look for .. in the directory portions - p = u_arcname.split('/') - if '..' in p: - raise DistlibException('invalid entry in ' - 'wheel: %r' % u_arcname) - - if self.skip_entry(u_arcname): - continue - row = records[u_arcname] - if row[2] and str(zinfo.file_size) != row[2]: - raise DistlibException('size mismatch for ' - '%s' % u_arcname) - if row[1]: - kind, value = row[1].split('=', 1) - with zf.open(arcname) as bf: - data = bf.read() - _, digest = self.get_hash(data, kind) - if digest != value: - raise DistlibException('digest mismatch for ' - '%s' % arcname) - - def update(self, modifier, dest_dir=None, **kwargs): - """ - Update the contents of a wheel in a generic way. The modifier should - be a callable which expects a dictionary argument: its keys are - archive-entry paths, and its values are absolute filesystem paths - where the contents the corresponding archive entries can be found. The - modifier is free to change the contents of the files pointed to, add - new entries and remove entries, before returning. This method will - extract the entire contents of the wheel to a temporary location, call - the modifier, and then use the passed (and possibly updated) - dictionary to write a new wheel. If ``dest_dir`` is specified, the new - wheel is written there -- otherwise, the original wheel is overwritten. - - The modifier should return True if it updated the wheel, else False. - This method returns the same value the modifier returns. - """ - - def get_version(path_map, info_dir): - version = path = None - key = '%s/%s' % (info_dir, METADATA_FILENAME) - if key not in path_map: - key = '%s/PKG-INFO' % info_dir - if key in path_map: - path = path_map[key] - version = Metadata(path=path).version - return version, path - - def update_version(version, path): - updated = None - try: - v = NormalizedVersion(version) - i = version.find('-') - if i < 0: - updated = '%s+1' % version - else: - parts = [int(s) for s in version[i + 1:].split('.')] - parts[-1] += 1 - updated = '%s+%s' % (version[:i], - '.'.join(str(i) for i in parts)) - except UnsupportedVersionError: - logger.debug('Cannot update non-compliant (PEP-440) ' - 'version %r', version) - if updated: - md = Metadata(path=path) - md.version = updated - legacy = not path.endswith(METADATA_FILENAME) - md.write(path=path, legacy=legacy) - logger.debug('Version updated from %r to %r', version, - updated) - - pathname = os.path.join(self.dirname, self.filename) - name_ver = '%s-%s' % (self.name, self.version) - info_dir = '%s.dist-info' % name_ver - record_name = posixpath.join(info_dir, 'RECORD') - with tempdir() as workdir: - with ZipFile(pathname, 'r') as zf: - path_map = {} - for zinfo in zf.infolist(): - arcname = zinfo.filename - if isinstance(arcname, text_type): - u_arcname = arcname - else: - u_arcname = arcname.decode('utf-8') - if u_arcname == record_name: - continue - if '..' in u_arcname: - raise DistlibException('invalid entry in ' - 'wheel: %r' % u_arcname) - zf.extract(zinfo, workdir) - path = os.path.join(workdir, convert_path(u_arcname)) - path_map[u_arcname] = path - - # Remember the version. - original_version, _ = get_version(path_map, info_dir) - # Files extracted. Call the modifier. - modified = modifier(path_map, **kwargs) - if modified: - # Something changed - need to build a new wheel. - current_version, path = get_version(path_map, info_dir) - if current_version and (current_version == original_version): - # Add or update local version to signify changes. - update_version(current_version, path) - # Decide where the new wheel goes. - if dest_dir is None: - fd, newpath = tempfile.mkstemp(suffix='.whl', - prefix='wheel-update-', - dir=workdir) - os.close(fd) - else: - if not os.path.isdir(dest_dir): - raise DistlibException('Not a directory: %r' % dest_dir) - newpath = os.path.join(dest_dir, self.filename) - archive_paths = list(path_map.items()) - distinfo = os.path.join(workdir, info_dir) - info = distinfo, info_dir - self.write_records(info, workdir, archive_paths) - self.build_zip(newpath, archive_paths) - if dest_dir is None: - shutil.copyfile(newpath, pathname) - return modified - -def compatible_tags(): - """ - Return (pyver, abi, arch) tuples compatible with this Python. - """ - versions = [VER_SUFFIX] - major = VER_SUFFIX[0] - for minor in range(sys.version_info[1] - 1, - 1, -1): - versions.append(''.join([major, str(minor)])) - - abis = [] - for suffix, _, _ in imp.get_suffixes(): - if suffix.startswith('.abi'): - abis.append(suffix.split('.', 2)[1]) - abis.sort() - if ABI != 'none': - abis.insert(0, ABI) - abis.append('none') - result = [] - - arches = [ARCH] - if sys.platform == 'darwin': - m = re.match(r'(\w+)_(\d+)_(\d+)_(\w+)$', ARCH) - if m: - name, major, minor, arch = m.groups() - minor = int(minor) - matches = [arch] - if arch in ('i386', 'ppc'): - matches.append('fat') - if arch in ('i386', 'ppc', 'x86_64'): - matches.append('fat3') - if arch in ('ppc64', 'x86_64'): - matches.append('fat64') - if arch in ('i386', 'x86_64'): - matches.append('intel') - if arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'): - matches.append('universal') - while minor >= 0: - for match in matches: - s = '%s_%s_%s_%s' % (name, major, minor, match) - if s != ARCH: # already there - arches.append(s) - minor -= 1 - - # Most specific - our Python version, ABI and arch - for abi in abis: - for arch in arches: - result.append((''.join((IMP_PREFIX, versions[0])), abi, arch)) - - # where no ABI / arch dependency, but IMP_PREFIX dependency - for i, version in enumerate(versions): - result.append((''.join((IMP_PREFIX, version)), 'none', 'any')) - if i == 0: - result.append((''.join((IMP_PREFIX, version[0])), 'none', 'any')) - - # no IMP_PREFIX, ABI or arch dependency - for i, version in enumerate(versions): - result.append((''.join(('py', version)), 'none', 'any')) - if i == 0: - result.append((''.join(('py', version[0])), 'none', 'any')) - return set(result) - - -COMPATIBLE_TAGS = compatible_tags() - -del compatible_tags - - -def is_compatible(wheel, tags=None): - if not isinstance(wheel, Wheel): - wheel = Wheel(wheel) # assume it's a filename - result = False - if tags is None: - tags = COMPATIBLE_TAGS - for ver, abi, arch in tags: - if ver in wheel.pyver and abi in wheel.abi and arch in wheel.arch: - result = True - break - return result diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyc b/env/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyc deleted file mode 100644 index 3d365bf1..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distro.pyc b/env/lib/python2.7/site-packages/pip/_vendor/distro.pyc deleted file mode 100644 index 079b53b2..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/distro.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/__init__.py b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/__init__.py deleted file mode 100644 index 04912349..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/__init__.py +++ /dev/null @@ -1,35 +0,0 @@ -""" -HTML parsing library based on the `WHATWG HTML specification -<https://whatwg.org/html>`_. The parser is designed to be compatible with -existing HTML found in the wild and implements well-defined error recovery that -is largely compatible with modern desktop web browsers. - -Example usage:: - - from pip._vendor import html5lib - with open("my_document.html", "rb") as f: - tree = html5lib.parse(f) - -For convenience, this module re-exports the following names: - -* :func:`~.html5parser.parse` -* :func:`~.html5parser.parseFragment` -* :class:`~.html5parser.HTMLParser` -* :func:`~.treebuilders.getTreeBuilder` -* :func:`~.treewalkers.getTreeWalker` -* :func:`~.serializer.serialize` -""" - -from __future__ import absolute_import, division, unicode_literals - -from .html5parser import HTMLParser, parse, parseFragment -from .treebuilders import getTreeBuilder -from .treewalkers import getTreeWalker -from .serializer import serialize - -__all__ = ["HTMLParser", "parse", "parseFragment", "getTreeBuilder", - "getTreeWalker", "serialize"] - -# this has to be at the top level, see how setup.py parses this -#: Distribution version number. -__version__ = "1.0.1" diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/__init__.pyc b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/__init__.pyc deleted file mode 100644 index 5c5d2398..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/_ihatexml.pyc b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/_ihatexml.pyc deleted file mode 100644 index 5575dfdf..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/_ihatexml.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyc b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyc deleted file mode 100644 index 6d56f2f4..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/_tokenizer.pyc b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/_tokenizer.pyc deleted file mode 100644 index 90d4207d..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/_tokenizer.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/_trie/__init__.py b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/_trie/__init__.py deleted file mode 100644 index a5ba4bf1..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/_trie/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -from __future__ import absolute_import, division, unicode_literals - -from .py import Trie as PyTrie - -Trie = PyTrie - -# pylint:disable=wrong-import-position -try: - from .datrie import Trie as DATrie -except ImportError: - pass -else: - Trie = DATrie -# pylint:enable=wrong-import-position diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/_trie/__init__.pyc b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/_trie/__init__.pyc deleted file mode 100644 index 8d25469a..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/_trie/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/_trie/_base.pyc b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/_trie/_base.pyc deleted file mode 100644 index 9d7851a5..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/_trie/_base.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/_trie/datrie.py b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/_trie/datrie.py deleted file mode 100644 index e2e5f866..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/_trie/datrie.py +++ /dev/null @@ -1,44 +0,0 @@ -from __future__ import absolute_import, division, unicode_literals - -from datrie import Trie as DATrie -from pip._vendor.six import text_type - -from ._base import Trie as ABCTrie - - -class Trie(ABCTrie): - def __init__(self, data): - chars = set() - for key in data.keys(): - if not isinstance(key, text_type): - raise TypeError("All keys must be strings") - for char in key: - chars.add(char) - - self._data = DATrie("".join(chars)) - for key, value in data.items(): - self._data[key] = value - - def __contains__(self, key): - return key in self._data - - def __len__(self): - return len(self._data) - - def __iter__(self): - raise NotImplementedError() - - def __getitem__(self, key): - return self._data[key] - - def keys(self, prefix=None): - return self._data.keys(prefix) - - def has_keys_with_prefix(self, prefix): - return self._data.has_keys_with_prefix(prefix) - - def longest_prefix(self, prefix): - return self._data.longest_prefix(prefix) - - def longest_prefix_item(self, prefix): - return self._data.longest_prefix_item(prefix) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/_trie/datrie.pyc b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/_trie/datrie.pyc deleted file mode 100644 index 19165c23..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/_trie/datrie.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/_trie/py.pyc b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/_trie/py.pyc deleted file mode 100644 index fdd4b51f..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/_trie/py.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/_utils.py b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/_utils.py deleted file mode 100644 index 0703afb3..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/_utils.py +++ /dev/null @@ -1,124 +0,0 @@ -from __future__ import absolute_import, division, unicode_literals - -from types import ModuleType - -from pip._vendor.six import text_type - -try: - import xml.etree.cElementTree as default_etree -except ImportError: - import xml.etree.ElementTree as default_etree - - -__all__ = ["default_etree", "MethodDispatcher", "isSurrogatePair", - "surrogatePairToCodepoint", "moduleFactoryFactory", - "supports_lone_surrogates"] - - -# Platforms not supporting lone surrogates (\uD800-\uDFFF) should be -# caught by the below test. In general this would be any platform -# using UTF-16 as its encoding of unicode strings, such as -# Jython. This is because UTF-16 itself is based on the use of such -# surrogates, and there is no mechanism to further escape such -# escapes. -try: - _x = eval('"\\uD800"') # pylint:disable=eval-used - if not isinstance(_x, text_type): - # We need this with u"" because of http://bugs.jython.org/issue2039 - _x = eval('u"\\uD800"') # pylint:disable=eval-used - assert isinstance(_x, text_type) -except: # pylint:disable=bare-except - supports_lone_surrogates = False -else: - supports_lone_surrogates = True - - -class MethodDispatcher(dict): - """Dict with 2 special properties: - - On initiation, keys that are lists, sets or tuples are converted to - multiple keys so accessing any one of the items in the original - list-like object returns the matching value - - md = MethodDispatcher({("foo", "bar"):"baz"}) - md["foo"] == "baz" - - A default value which can be set through the default attribute. - """ - - def __init__(self, items=()): - # Using _dictEntries instead of directly assigning to self is about - # twice as fast. Please do careful performance testing before changing - # anything here. - _dictEntries = [] - for name, value in items: - if isinstance(name, (list, tuple, frozenset, set)): - for item in name: - _dictEntries.append((item, value)) - else: - _dictEntries.append((name, value)) - dict.__init__(self, _dictEntries) - assert len(self) == len(_dictEntries) - self.default = None - - def __getitem__(self, key): - return dict.get(self, key, self.default) - - -# Some utility functions to deal with weirdness around UCS2 vs UCS4 -# python builds - -def isSurrogatePair(data): - return (len(data) == 2 and - ord(data[0]) >= 0xD800 and ord(data[0]) <= 0xDBFF and - ord(data[1]) >= 0xDC00 and ord(data[1]) <= 0xDFFF) - - -def surrogatePairToCodepoint(data): - char_val = (0x10000 + (ord(data[0]) - 0xD800) * 0x400 + - (ord(data[1]) - 0xDC00)) - return char_val - -# Module Factory Factory (no, this isn't Java, I know) -# Here to stop this being duplicated all over the place. - - -def moduleFactoryFactory(factory): - moduleCache = {} - - def moduleFactory(baseModule, *args, **kwargs): - if isinstance(ModuleType.__name__, type("")): - name = "_%s_factory" % baseModule.__name__ - else: - name = b"_%s_factory" % baseModule.__name__ - - kwargs_tuple = tuple(kwargs.items()) - - try: - return moduleCache[name][args][kwargs_tuple] - except KeyError: - mod = ModuleType(name) - objs = factory(baseModule, *args, **kwargs) - mod.__dict__.update(objs) - if "name" not in moduleCache: - moduleCache[name] = {} - if "args" not in moduleCache[name]: - moduleCache[name][args] = {} - if "kwargs" not in moduleCache[name][args]: - moduleCache[name][args][kwargs_tuple] = {} - moduleCache[name][args][kwargs_tuple] = mod - return mod - - return moduleFactory - - -def memoize(func): - cache = {} - - def wrapped(*args, **kwargs): - key = (tuple(args), tuple(kwargs.items())) - if key not in cache: - cache[key] = func(*args, **kwargs) - return cache[key] - - return wrapped diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/_utils.pyc b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/_utils.pyc deleted file mode 100644 index 1d56e91f..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/_utils.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/constants.pyc b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/constants.pyc deleted file mode 100644 index 7c42c26b..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/constants.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/__init__.pyc b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/__init__.pyc deleted file mode 100644 index e4cc2215..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.pyc b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.pyc deleted file mode 100644 index c885d39d..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/base.pyc b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/base.pyc deleted file mode 100644 index 02f17d62..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/base.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/inject_meta_charset.pyc b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/inject_meta_charset.pyc deleted file mode 100644 index 7fb04781..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/inject_meta_charset.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/lint.pyc b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/lint.pyc deleted file mode 100644 index 5dc336c3..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/lint.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/optionaltags.pyc b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/optionaltags.pyc deleted file mode 100644 index b7fe30fe..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/optionaltags.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/sanitizer.pyc b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/sanitizer.pyc deleted file mode 100644 index a775809b..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/sanitizer.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/whitespace.pyc b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/whitespace.pyc deleted file mode 100644 index feaa8da4..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/whitespace.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyc b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyc deleted file mode 100644 index de72bb65..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/serializer.pyc b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/serializer.pyc deleted file mode 100644 index 25d13bf0..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/serializer.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treeadapters/__init__.pyc b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treeadapters/__init__.pyc deleted file mode 100644 index f6a25cf3..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treeadapters/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treeadapters/genshi.pyc b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treeadapters/genshi.pyc deleted file mode 100644 index eb600064..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treeadapters/genshi.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treeadapters/sax.pyc b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treeadapters/sax.pyc deleted file mode 100644 index 6b168e6a..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treeadapters/sax.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treebuilders/__init__.pyc b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treebuilders/__init__.pyc deleted file mode 100644 index ad14cab5..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treebuilders/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treebuilders/base.py b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treebuilders/base.py deleted file mode 100644 index 73973db5..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treebuilders/base.py +++ /dev/null @@ -1,417 +0,0 @@ -from __future__ import absolute_import, division, unicode_literals -from pip._vendor.six import text_type - -from ..constants import scopingElements, tableInsertModeElements, namespaces - -# The scope markers are inserted when entering object elements, -# marquees, table cells, and table captions, and are used to prevent formatting -# from "leaking" into tables, object elements, and marquees. -Marker = None - -listElementsMap = { - None: (frozenset(scopingElements), False), - "button": (frozenset(scopingElements | set([(namespaces["html"], "button")])), False), - "list": (frozenset(scopingElements | set([(namespaces["html"], "ol"), - (namespaces["html"], "ul")])), False), - "table": (frozenset([(namespaces["html"], "html"), - (namespaces["html"], "table")]), False), - "select": (frozenset([(namespaces["html"], "optgroup"), - (namespaces["html"], "option")]), True) -} - - -class Node(object): - """Represents an item in the tree""" - def __init__(self, name): - """Creates a Node - - :arg name: The tag name associated with the node - - """ - # The tag name assocaited with the node - self.name = name - # The parent of the current node (or None for the document node) - self.parent = None - # The value of the current node (applies to text nodes and comments) - self.value = None - # A dict holding name -> value pairs for attributes of the node - self.attributes = {} - # A list of child nodes of the current node. This must include all - # elements but not necessarily other node types. - self.childNodes = [] - # A list of miscellaneous flags that can be set on the node. - self._flags = [] - - def __str__(self): - attributesStr = " ".join(["%s=\"%s\"" % (name, value) - for name, value in - self.attributes.items()]) - if attributesStr: - return "<%s %s>" % (self.name, attributesStr) - else: - return "<%s>" % (self.name) - - def __repr__(self): - return "<%s>" % (self.name) - - def appendChild(self, node): - """Insert node as a child of the current node - - :arg node: the node to insert - - """ - raise NotImplementedError - - def insertText(self, data, insertBefore=None): - """Insert data as text in the current node, positioned before the - start of node insertBefore or to the end of the node's text. - - :arg data: the data to insert - - :arg insertBefore: True if you want to insert the text before the node - and False if you want to insert it after the node - - """ - raise NotImplementedError - - def insertBefore(self, node, refNode): - """Insert node as a child of the current node, before refNode in the - list of child nodes. Raises ValueError if refNode is not a child of - the current node - - :arg node: the node to insert - - :arg refNode: the child node to insert the node before - - """ - raise NotImplementedError - - def removeChild(self, node): - """Remove node from the children of the current node - - :arg node: the child node to remove - - """ - raise NotImplementedError - - def reparentChildren(self, newParent): - """Move all the children of the current node to newParent. - This is needed so that trees that don't store text as nodes move the - text in the correct way - - :arg newParent: the node to move all this node's children to - - """ - # XXX - should this method be made more general? - for child in self.childNodes: - newParent.appendChild(child) - self.childNodes = [] - - def cloneNode(self): - """Return a shallow copy of the current node i.e. a node with the same - name and attributes but with no parent or child nodes - """ - raise NotImplementedError - - def hasContent(self): - """Return true if the node has children or text, false otherwise - """ - raise NotImplementedError - - -class ActiveFormattingElements(list): - def append(self, node): - equalCount = 0 - if node != Marker: - for element in self[::-1]: - if element == Marker: - break - if self.nodesEqual(element, node): - equalCount += 1 - if equalCount == 3: - self.remove(element) - break - list.append(self, node) - - def nodesEqual(self, node1, node2): - if not node1.nameTuple == node2.nameTuple: - return False - - if not node1.attributes == node2.attributes: - return False - - return True - - -class TreeBuilder(object): - """Base treebuilder implementation - - * documentClass - the class to use for the bottommost node of a document - * elementClass - the class to use for HTML Elements - * commentClass - the class to use for comments - * doctypeClass - the class to use for doctypes - - """ - # pylint:disable=not-callable - - # Document class - documentClass = None - - # The class to use for creating a node - elementClass = None - - # The class to use for creating comments - commentClass = None - - # The class to use for creating doctypes - doctypeClass = None - - # Fragment class - fragmentClass = None - - def __init__(self, namespaceHTMLElements): - """Create a TreeBuilder - - :arg namespaceHTMLElements: whether or not to namespace HTML elements - - """ - if namespaceHTMLElements: - self.defaultNamespace = "http://www.w3.org/1999/xhtml" - else: - self.defaultNamespace = None - self.reset() - - def reset(self): - self.openElements = [] - self.activeFormattingElements = ActiveFormattingElements() - - # XXX - rename these to headElement, formElement - self.headPointer = None - self.formPointer = None - - self.insertFromTable = False - - self.document = self.documentClass() - - def elementInScope(self, target, variant=None): - - # If we pass a node in we match that. if we pass a string - # match any node with that name - exactNode = hasattr(target, "nameTuple") - if not exactNode: - if isinstance(target, text_type): - target = (namespaces["html"], target) - assert isinstance(target, tuple) - - listElements, invert = listElementsMap[variant] - - for node in reversed(self.openElements): - if exactNode and node == target: - return True - elif not exactNode and node.nameTuple == target: - return True - elif (invert ^ (node.nameTuple in listElements)): - return False - - assert False # We should never reach this point - - def reconstructActiveFormattingElements(self): - # Within this algorithm the order of steps described in the - # specification is not quite the same as the order of steps in the - # code. It should still do the same though. - - # Step 1: stop the algorithm when there's nothing to do. - if not self.activeFormattingElements: - return - - # Step 2 and step 3: we start with the last element. So i is -1. - i = len(self.activeFormattingElements) - 1 - entry = self.activeFormattingElements[i] - if entry == Marker or entry in self.openElements: - return - - # Step 6 - while entry != Marker and entry not in self.openElements: - if i == 0: - # This will be reset to 0 below - i = -1 - break - i -= 1 - # Step 5: let entry be one earlier in the list. - entry = self.activeFormattingElements[i] - - while True: - # Step 7 - i += 1 - - # Step 8 - entry = self.activeFormattingElements[i] - clone = entry.cloneNode() # Mainly to get a new copy of the attributes - - # Step 9 - element = self.insertElement({"type": "StartTag", - "name": clone.name, - "namespace": clone.namespace, - "data": clone.attributes}) - - # Step 10 - self.activeFormattingElements[i] = element - - # Step 11 - if element == self.activeFormattingElements[-1]: - break - - def clearActiveFormattingElements(self): - entry = self.activeFormattingElements.pop() - while self.activeFormattingElements and entry != Marker: - entry = self.activeFormattingElements.pop() - - def elementInActiveFormattingElements(self, name): - """Check if an element exists between the end of the active - formatting elements and the last marker. If it does, return it, else - return false""" - - for item in self.activeFormattingElements[::-1]: - # Check for Marker first because if it's a Marker it doesn't have a - # name attribute. - if item == Marker: - break - elif item.name == name: - return item - return False - - def insertRoot(self, token): - element = self.createElement(token) - self.openElements.append(element) - self.document.appendChild(element) - - def insertDoctype(self, token): - name = token["name"] - publicId = token["publicId"] - systemId = token["systemId"] - - doctype = self.doctypeClass(name, publicId, systemId) - self.document.appendChild(doctype) - - def insertComment(self, token, parent=None): - if parent is None: - parent = self.openElements[-1] - parent.appendChild(self.commentClass(token["data"])) - - def createElement(self, token): - """Create an element but don't insert it anywhere""" - name = token["name"] - namespace = token.get("namespace", self.defaultNamespace) - element = self.elementClass(name, namespace) - element.attributes = token["data"] - return element - - def _getInsertFromTable(self): - return self._insertFromTable - - def _setInsertFromTable(self, value): - """Switch the function used to insert an element from the - normal one to the misnested table one and back again""" - self._insertFromTable = value - if value: - self.insertElement = self.insertElementTable - else: - self.insertElement = self.insertElementNormal - - insertFromTable = property(_getInsertFromTable, _setInsertFromTable) - - def insertElementNormal(self, token): - name = token["name"] - assert isinstance(name, text_type), "Element %s not unicode" % name - namespace = token.get("namespace", self.defaultNamespace) - element = self.elementClass(name, namespace) - element.attributes = token["data"] - self.openElements[-1].appendChild(element) - self.openElements.append(element) - return element - - def insertElementTable(self, token): - """Create an element and insert it into the tree""" - element = self.createElement(token) - if self.openElements[-1].name not in tableInsertModeElements: - return self.insertElementNormal(token) - else: - # We should be in the InTable mode. This means we want to do - # special magic element rearranging - parent, insertBefore = self.getTableMisnestedNodePosition() - if insertBefore is None: - parent.appendChild(element) - else: - parent.insertBefore(element, insertBefore) - self.openElements.append(element) - return element - - def insertText(self, data, parent=None): - """Insert text data.""" - if parent is None: - parent = self.openElements[-1] - - if (not self.insertFromTable or (self.insertFromTable and - self.openElements[-1].name - not in tableInsertModeElements)): - parent.insertText(data) - else: - # We should be in the InTable mode. This means we want to do - # special magic element rearranging - parent, insertBefore = self.getTableMisnestedNodePosition() - parent.insertText(data, insertBefore) - - def getTableMisnestedNodePosition(self): - """Get the foster parent element, and sibling to insert before - (or None) when inserting a misnested table node""" - # The foster parent element is the one which comes before the most - # recently opened table element - # XXX - this is really inelegant - lastTable = None - fosterParent = None - insertBefore = None - for elm in self.openElements[::-1]: - if elm.name == "table": - lastTable = elm - break - if lastTable: - # XXX - we should really check that this parent is actually a - # node here - if lastTable.parent: - fosterParent = lastTable.parent - insertBefore = lastTable - else: - fosterParent = self.openElements[ - self.openElements.index(lastTable) - 1] - else: - fosterParent = self.openElements[0] - return fosterParent, insertBefore - - def generateImpliedEndTags(self, exclude=None): - name = self.openElements[-1].name - # XXX td, th and tr are not actually needed - if (name in frozenset(("dd", "dt", "li", "option", "optgroup", "p", "rp", "rt")) and - name != exclude): - self.openElements.pop() - # XXX This is not entirely what the specification says. We should - # investigate it more closely. - self.generateImpliedEndTags(exclude) - - def getDocument(self): - """Return the final tree""" - return self.document - - def getFragment(self): - """Return the final fragment""" - # assert self.innerHTML - fragment = self.fragmentClass() - self.openElements[0].reparentChildren(fragment) - return fragment - - def testSerializer(self, node): - """Serialize the subtree of node in the format required by unit tests - - :arg node: the node from which to start serializing - - """ - raise NotImplementedError diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treebuilders/base.pyc b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treebuilders/base.pyc deleted file mode 100644 index 86efcd2d..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treebuilders/base.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treebuilders/dom.pyc b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treebuilders/dom.pyc deleted file mode 100644 index 3bc3e2d4..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treebuilders/dom.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treebuilders/etree.py b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treebuilders/etree.py deleted file mode 100644 index 0dedf441..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treebuilders/etree.py +++ /dev/null @@ -1,340 +0,0 @@ -from __future__ import absolute_import, division, unicode_literals -# pylint:disable=protected-access - -from pip._vendor.six import text_type - -import re - -from . import base -from .. import _ihatexml -from .. import constants -from ..constants import namespaces -from .._utils import moduleFactoryFactory - -tag_regexp = re.compile("{([^}]*)}(.*)") - - -def getETreeBuilder(ElementTreeImplementation, fullTree=False): - ElementTree = ElementTreeImplementation - ElementTreeCommentType = ElementTree.Comment("asd").tag - - class Element(base.Node): - def __init__(self, name, namespace=None): - self._name = name - self._namespace = namespace - self._element = ElementTree.Element(self._getETreeTag(name, - namespace)) - if namespace is None: - self.nameTuple = namespaces["html"], self._name - else: - self.nameTuple = self._namespace, self._name - self.parent = None - self._childNodes = [] - self._flags = [] - - def _getETreeTag(self, name, namespace): - if namespace is None: - etree_tag = name - else: - etree_tag = "{%s}%s" % (namespace, name) - return etree_tag - - def _setName(self, name): - self._name = name - self._element.tag = self._getETreeTag(self._name, self._namespace) - - def _getName(self): - return self._name - - name = property(_getName, _setName) - - def _setNamespace(self, namespace): - self._namespace = namespace - self._element.tag = self._getETreeTag(self._name, self._namespace) - - def _getNamespace(self): - return self._namespace - - namespace = property(_getNamespace, _setNamespace) - - def _getAttributes(self): - return self._element.attrib - - def _setAttributes(self, attributes): - # Delete existing attributes first - # XXX - there may be a better way to do this... - for key in list(self._element.attrib.keys()): - del self._element.attrib[key] - for key, value in attributes.items(): - if isinstance(key, tuple): - name = "{%s}%s" % (key[2], key[1]) - else: - name = key - self._element.set(name, value) - - attributes = property(_getAttributes, _setAttributes) - - def _getChildNodes(self): - return self._childNodes - - def _setChildNodes(self, value): - del self._element[:] - self._childNodes = [] - for element in value: - self.insertChild(element) - - childNodes = property(_getChildNodes, _setChildNodes) - - def hasContent(self): - """Return true if the node has children or text""" - return bool(self._element.text or len(self._element)) - - def appendChild(self, node): - self._childNodes.append(node) - self._element.append(node._element) - node.parent = self - - def insertBefore(self, node, refNode): - index = list(self._element).index(refNode._element) - self._element.insert(index, node._element) - node.parent = self - - def removeChild(self, node): - self._childNodes.remove(node) - self._element.remove(node._element) - node.parent = None - - def insertText(self, data, insertBefore=None): - if not(len(self._element)): - if not self._element.text: - self._element.text = "" - self._element.text += data - elif insertBefore is None: - # Insert the text as the tail of the last child element - if not self._element[-1].tail: - self._element[-1].tail = "" - self._element[-1].tail += data - else: - # Insert the text before the specified node - children = list(self._element) - index = children.index(insertBefore._element) - if index > 0: - if not self._element[index - 1].tail: - self._element[index - 1].tail = "" - self._element[index - 1].tail += data - else: - if not self._element.text: - self._element.text = "" - self._element.text += data - - def cloneNode(self): - element = type(self)(self.name, self.namespace) - for name, value in self.attributes.items(): - element.attributes[name] = value - return element - - def reparentChildren(self, newParent): - if newParent.childNodes: - newParent.childNodes[-1]._element.tail += self._element.text - else: - if not newParent._element.text: - newParent._element.text = "" - if self._element.text is not None: - newParent._element.text += self._element.text - self._element.text = "" - base.Node.reparentChildren(self, newParent) - - class Comment(Element): - def __init__(self, data): - # Use the superclass constructor to set all properties on the - # wrapper element - self._element = ElementTree.Comment(data) - self.parent = None - self._childNodes = [] - self._flags = [] - - def _getData(self): - return self._element.text - - def _setData(self, value): - self._element.text = value - - data = property(_getData, _setData) - - class DocumentType(Element): - def __init__(self, name, publicId, systemId): - Element.__init__(self, "<!DOCTYPE>") - self._element.text = name - self.publicId = publicId - self.systemId = systemId - - def _getPublicId(self): - return self._element.get("publicId", "") - - def _setPublicId(self, value): - if value is not None: - self._element.set("publicId", value) - - publicId = property(_getPublicId, _setPublicId) - - def _getSystemId(self): - return self._element.get("systemId", "") - - def _setSystemId(self, value): - if value is not None: - self._element.set("systemId", value) - - systemId = property(_getSystemId, _setSystemId) - - class Document(Element): - def __init__(self): - Element.__init__(self, "DOCUMENT_ROOT") - - class DocumentFragment(Element): - def __init__(self): - Element.__init__(self, "DOCUMENT_FRAGMENT") - - def testSerializer(element): - rv = [] - - def serializeElement(element, indent=0): - if not(hasattr(element, "tag")): - element = element.getroot() - if element.tag == "<!DOCTYPE>": - if element.get("publicId") or element.get("systemId"): - publicId = element.get("publicId") or "" - systemId = element.get("systemId") or "" - rv.append("""<!DOCTYPE %s "%s" "%s">""" % - (element.text, publicId, systemId)) - else: - rv.append("<!DOCTYPE %s>" % (element.text,)) - elif element.tag == "DOCUMENT_ROOT": - rv.append("#document") - if element.text is not None: - rv.append("|%s\"%s\"" % (' ' * (indent + 2), element.text)) - if element.tail is not None: - raise TypeError("Document node cannot have tail") - if hasattr(element, "attrib") and len(element.attrib): - raise TypeError("Document node cannot have attributes") - elif element.tag == ElementTreeCommentType: - rv.append("|%s<!-- %s -->" % (' ' * indent, element.text)) - else: - assert isinstance(element.tag, text_type), \ - "Expected unicode, got %s, %s" % (type(element.tag), element.tag) - nsmatch = tag_regexp.match(element.tag) - - if nsmatch is None: - name = element.tag - else: - ns, name = nsmatch.groups() - prefix = constants.prefixes[ns] - name = "%s %s" % (prefix, name) - rv.append("|%s<%s>" % (' ' * indent, name)) - - if hasattr(element, "attrib"): - attributes = [] - for name, value in element.attrib.items(): - nsmatch = tag_regexp.match(name) - if nsmatch is not None: - ns, name = nsmatch.groups() - prefix = constants.prefixes[ns] - attr_string = "%s %s" % (prefix, name) - else: - attr_string = name - attributes.append((attr_string, value)) - - for name, value in sorted(attributes): - rv.append('|%s%s="%s"' % (' ' * (indent + 2), name, value)) - if element.text: - rv.append("|%s\"%s\"" % (' ' * (indent + 2), element.text)) - indent += 2 - for child in element: - serializeElement(child, indent) - if element.tail: - rv.append("|%s\"%s\"" % (' ' * (indent - 2), element.tail)) - serializeElement(element, 0) - - return "\n".join(rv) - - def tostring(element): # pylint:disable=unused-variable - """Serialize an element and its child nodes to a string""" - rv = [] - filter = _ihatexml.InfosetFilter() - - def serializeElement(element): - if isinstance(element, ElementTree.ElementTree): - element = element.getroot() - - if element.tag == "<!DOCTYPE>": - if element.get("publicId") or element.get("systemId"): - publicId = element.get("publicId") or "" - systemId = element.get("systemId") or "" - rv.append("""<!DOCTYPE %s PUBLIC "%s" "%s">""" % - (element.text, publicId, systemId)) - else: - rv.append("<!DOCTYPE %s>" % (element.text,)) - elif element.tag == "DOCUMENT_ROOT": - if element.text is not None: - rv.append(element.text) - if element.tail is not None: - raise TypeError("Document node cannot have tail") - if hasattr(element, "attrib") and len(element.attrib): - raise TypeError("Document node cannot have attributes") - - for child in element: - serializeElement(child) - - elif element.tag == ElementTreeCommentType: - rv.append("<!--%s-->" % (element.text,)) - else: - # This is assumed to be an ordinary element - if not element.attrib: - rv.append("<%s>" % (filter.fromXmlName(element.tag),)) - else: - attr = " ".join(["%s=\"%s\"" % ( - filter.fromXmlName(name), value) - for name, value in element.attrib.items()]) - rv.append("<%s %s>" % (element.tag, attr)) - if element.text: - rv.append(element.text) - - for child in element: - serializeElement(child) - - rv.append("</%s>" % (element.tag,)) - - if element.tail: - rv.append(element.tail) - - serializeElement(element) - - return "".join(rv) - - class TreeBuilder(base.TreeBuilder): # pylint:disable=unused-variable - documentClass = Document - doctypeClass = DocumentType - elementClass = Element - commentClass = Comment - fragmentClass = DocumentFragment - implementation = ElementTreeImplementation - - def testSerializer(self, element): - return testSerializer(element) - - def getDocument(self): - if fullTree: - return self.document._element - else: - if self.defaultNamespace is not None: - return self.document._element.find( - "{%s}html" % self.defaultNamespace) - else: - return self.document._element.find("html") - - def getFragment(self): - return base.TreeBuilder.getFragment(self)._element - - return locals() - - -getETreeModule = moduleFactoryFactory(getETreeBuilder) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treebuilders/etree.pyc b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treebuilders/etree.pyc deleted file mode 100644 index 92d61b73..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treebuilders/etree.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treebuilders/etree_lxml.py b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treebuilders/etree_lxml.py deleted file mode 100644 index ca12a99c..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treebuilders/etree_lxml.py +++ /dev/null @@ -1,366 +0,0 @@ -"""Module for supporting the lxml.etree library. The idea here is to use as much -of the native library as possible, without using fragile hacks like custom element -names that break between releases. The downside of this is that we cannot represent -all possible trees; specifically the following are known to cause problems: - -Text or comments as siblings of the root element -Docypes with no name - -When any of these things occur, we emit a DataLossWarning -""" - -from __future__ import absolute_import, division, unicode_literals -# pylint:disable=protected-access - -import warnings -import re -import sys - -from . import base -from ..constants import DataLossWarning -from .. import constants -from . import etree as etree_builders -from .. import _ihatexml - -import lxml.etree as etree - - -fullTree = True -tag_regexp = re.compile("{([^}]*)}(.*)") - -comment_type = etree.Comment("asd").tag - - -class DocumentType(object): - def __init__(self, name, publicId, systemId): - self.name = name - self.publicId = publicId - self.systemId = systemId - - -class Document(object): - def __init__(self): - self._elementTree = None - self._childNodes = [] - - def appendChild(self, element): - self._elementTree.getroot().addnext(element._element) - - def _getChildNodes(self): - return self._childNodes - - childNodes = property(_getChildNodes) - - -def testSerializer(element): - rv = [] - infosetFilter = _ihatexml.InfosetFilter(preventDoubleDashComments=True) - - def serializeElement(element, indent=0): - if not hasattr(element, "tag"): - if hasattr(element, "getroot"): - # Full tree case - rv.append("#document") - if element.docinfo.internalDTD: - if not (element.docinfo.public_id or - element.docinfo.system_url): - dtd_str = "<!DOCTYPE %s>" % element.docinfo.root_name - else: - dtd_str = """<!DOCTYPE %s "%s" "%s">""" % ( - element.docinfo.root_name, - element.docinfo.public_id, - element.docinfo.system_url) - rv.append("|%s%s" % (' ' * (indent + 2), dtd_str)) - next_element = element.getroot() - while next_element.getprevious() is not None: - next_element = next_element.getprevious() - while next_element is not None: - serializeElement(next_element, indent + 2) - next_element = next_element.getnext() - elif isinstance(element, str) or isinstance(element, bytes): - # Text in a fragment - assert isinstance(element, str) or sys.version_info[0] == 2 - rv.append("|%s\"%s\"" % (' ' * indent, element)) - else: - # Fragment case - rv.append("#document-fragment") - for next_element in element: - serializeElement(next_element, indent + 2) - elif element.tag == comment_type: - rv.append("|%s<!-- %s -->" % (' ' * indent, element.text)) - if hasattr(element, "tail") and element.tail: - rv.append("|%s\"%s\"" % (' ' * indent, element.tail)) - else: - assert isinstance(element, etree._Element) - nsmatch = etree_builders.tag_regexp.match(element.tag) - if nsmatch is not None: - ns = nsmatch.group(1) - tag = nsmatch.group(2) - prefix = constants.prefixes[ns] - rv.append("|%s<%s %s>" % (' ' * indent, prefix, - infosetFilter.fromXmlName(tag))) - else: - rv.append("|%s<%s>" % (' ' * indent, - infosetFilter.fromXmlName(element.tag))) - - if hasattr(element, "attrib"): - attributes = [] - for name, value in element.attrib.items(): - nsmatch = tag_regexp.match(name) - if nsmatch is not None: - ns, name = nsmatch.groups() - name = infosetFilter.fromXmlName(name) - prefix = constants.prefixes[ns] - attr_string = "%s %s" % (prefix, name) - else: - attr_string = infosetFilter.fromXmlName(name) - attributes.append((attr_string, value)) - - for name, value in sorted(attributes): - rv.append('|%s%s="%s"' % (' ' * (indent + 2), name, value)) - - if element.text: - rv.append("|%s\"%s\"" % (' ' * (indent + 2), element.text)) - indent += 2 - for child in element: - serializeElement(child, indent) - if hasattr(element, "tail") and element.tail: - rv.append("|%s\"%s\"" % (' ' * (indent - 2), element.tail)) - serializeElement(element, 0) - - return "\n".join(rv) - - -def tostring(element): - """Serialize an element and its child nodes to a string""" - rv = [] - - def serializeElement(element): - if not hasattr(element, "tag"): - if element.docinfo.internalDTD: - if element.docinfo.doctype: - dtd_str = element.docinfo.doctype - else: - dtd_str = "<!DOCTYPE %s>" % element.docinfo.root_name - rv.append(dtd_str) - serializeElement(element.getroot()) - - elif element.tag == comment_type: - rv.append("<!--%s-->" % (element.text,)) - - else: - # This is assumed to be an ordinary element - if not element.attrib: - rv.append("<%s>" % (element.tag,)) - else: - attr = " ".join(["%s=\"%s\"" % (name, value) - for name, value in element.attrib.items()]) - rv.append("<%s %s>" % (element.tag, attr)) - if element.text: - rv.append(element.text) - - for child in element: - serializeElement(child) - - rv.append("</%s>" % (element.tag,)) - - if hasattr(element, "tail") and element.tail: - rv.append(element.tail) - - serializeElement(element) - - return "".join(rv) - - -class TreeBuilder(base.TreeBuilder): - documentClass = Document - doctypeClass = DocumentType - elementClass = None - commentClass = None - fragmentClass = Document - implementation = etree - - def __init__(self, namespaceHTMLElements, fullTree=False): - builder = etree_builders.getETreeModule(etree, fullTree=fullTree) - infosetFilter = self.infosetFilter = _ihatexml.InfosetFilter(preventDoubleDashComments=True) - self.namespaceHTMLElements = namespaceHTMLElements - - class Attributes(dict): - def __init__(self, element, value=None): - if value is None: - value = {} - self._element = element - dict.__init__(self, value) # pylint:disable=non-parent-init-called - for key, value in self.items(): - if isinstance(key, tuple): - name = "{%s}%s" % (key[2], infosetFilter.coerceAttribute(key[1])) - else: - name = infosetFilter.coerceAttribute(key) - self._element._element.attrib[name] = value - - def __setitem__(self, key, value): - dict.__setitem__(self, key, value) - if isinstance(key, tuple): - name = "{%s}%s" % (key[2], infosetFilter.coerceAttribute(key[1])) - else: - name = infosetFilter.coerceAttribute(key) - self._element._element.attrib[name] = value - - class Element(builder.Element): - def __init__(self, name, namespace): - name = infosetFilter.coerceElement(name) - builder.Element.__init__(self, name, namespace=namespace) - self._attributes = Attributes(self) - - def _setName(self, name): - self._name = infosetFilter.coerceElement(name) - self._element.tag = self._getETreeTag( - self._name, self._namespace) - - def _getName(self): - return infosetFilter.fromXmlName(self._name) - - name = property(_getName, _setName) - - def _getAttributes(self): - return self._attributes - - def _setAttributes(self, attributes): - self._attributes = Attributes(self, attributes) - - attributes = property(_getAttributes, _setAttributes) - - def insertText(self, data, insertBefore=None): - data = infosetFilter.coerceCharacters(data) - builder.Element.insertText(self, data, insertBefore) - - def appendChild(self, child): - builder.Element.appendChild(self, child) - - class Comment(builder.Comment): - def __init__(self, data): - data = infosetFilter.coerceComment(data) - builder.Comment.__init__(self, data) - - def _setData(self, data): - data = infosetFilter.coerceComment(data) - self._element.text = data - - def _getData(self): - return self._element.text - - data = property(_getData, _setData) - - self.elementClass = Element - self.commentClass = Comment - # self.fragmentClass = builder.DocumentFragment - base.TreeBuilder.__init__(self, namespaceHTMLElements) - - def reset(self): - base.TreeBuilder.reset(self) - self.insertComment = self.insertCommentInitial - self.initial_comments = [] - self.doctype = None - - def testSerializer(self, element): - return testSerializer(element) - - def getDocument(self): - if fullTree: - return self.document._elementTree - else: - return self.document._elementTree.getroot() - - def getFragment(self): - fragment = [] - element = self.openElements[0]._element - if element.text: - fragment.append(element.text) - fragment.extend(list(element)) - if element.tail: - fragment.append(element.tail) - return fragment - - def insertDoctype(self, token): - name = token["name"] - publicId = token["publicId"] - systemId = token["systemId"] - - if not name: - warnings.warn("lxml cannot represent empty doctype", DataLossWarning) - self.doctype = None - else: - coercedName = self.infosetFilter.coerceElement(name) - if coercedName != name: - warnings.warn("lxml cannot represent non-xml doctype", DataLossWarning) - - doctype = self.doctypeClass(coercedName, publicId, systemId) - self.doctype = doctype - - def insertCommentInitial(self, data, parent=None): - assert parent is None or parent is self.document - assert self.document._elementTree is None - self.initial_comments.append(data) - - def insertCommentMain(self, data, parent=None): - if (parent == self.document and - self.document._elementTree.getroot()[-1].tag == comment_type): - warnings.warn("lxml cannot represent adjacent comments beyond the root elements", DataLossWarning) - super(TreeBuilder, self).insertComment(data, parent) - - def insertRoot(self, token): - # Because of the way libxml2 works, it doesn't seem to be possible to - # alter information like the doctype after the tree has been parsed. - # Therefore we need to use the built-in parser to create our initial - # tree, after which we can add elements like normal - docStr = "" - if self.doctype: - assert self.doctype.name - docStr += "<!DOCTYPE %s" % self.doctype.name - if (self.doctype.publicId is not None or - self.doctype.systemId is not None): - docStr += (' PUBLIC "%s" ' % - (self.infosetFilter.coercePubid(self.doctype.publicId or ""))) - if self.doctype.systemId: - sysid = self.doctype.systemId - if sysid.find("'") >= 0 and sysid.find('"') >= 0: - warnings.warn("DOCTYPE system cannot contain single and double quotes", DataLossWarning) - sysid = sysid.replace("'", 'U00027') - if sysid.find("'") >= 0: - docStr += '"%s"' % sysid - else: - docStr += "'%s'" % sysid - else: - docStr += "''" - docStr += ">" - if self.doctype.name != token["name"]: - warnings.warn("lxml cannot represent doctype with a different name to the root element", DataLossWarning) - docStr += "<THIS_SHOULD_NEVER_APPEAR_PUBLICLY/>" - root = etree.fromstring(docStr) - - # Append the initial comments: - for comment_token in self.initial_comments: - comment = self.commentClass(comment_token["data"]) - root.addprevious(comment._element) - - # Create the root document and add the ElementTree to it - self.document = self.documentClass() - self.document._elementTree = root.getroottree() - - # Give the root element the right name - name = token["name"] - namespace = token.get("namespace", self.defaultNamespace) - if namespace is None: - etree_tag = name - else: - etree_tag = "{%s}%s" % (namespace, name) - root.tag = etree_tag - - # Add the root element to the internal child/open data structures - root_element = self.elementClass(name, namespace) - root_element._element = root - self.document._childNodes.append(root_element) - self.openElements.append(root_element) - - # Reset to the default insert comment function - self.insertComment = self.insertCommentMain diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treebuilders/etree_lxml.pyc b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treebuilders/etree_lxml.pyc deleted file mode 100644 index c250eee9..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treebuilders/etree_lxml.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treewalkers/__init__.py b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treewalkers/__init__.py deleted file mode 100644 index 9bec2076..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treewalkers/__init__.py +++ /dev/null @@ -1,154 +0,0 @@ -"""A collection of modules for iterating through different kinds of -tree, generating tokens identical to those produced by the tokenizer -module. - -To create a tree walker for a new type of tree, you need to do -implement a tree walker object (called TreeWalker by convention) that -implements a 'serialize' method taking a tree as sole argument and -returning an iterator generating tokens. -""" - -from __future__ import absolute_import, division, unicode_literals - -from .. import constants -from .._utils import default_etree - -__all__ = ["getTreeWalker", "pprint"] - -treeWalkerCache = {} - - -def getTreeWalker(treeType, implementation=None, **kwargs): - """Get a TreeWalker class for various types of tree with built-in support - - :arg str treeType: the name of the tree type required (case-insensitive). - Supported values are: - - * "dom": The xml.dom.minidom DOM implementation - * "etree": A generic walker for tree implementations exposing an - elementtree-like interface (known to work with ElementTree, - cElementTree and lxml.etree). - * "lxml": Optimized walker for lxml.etree - * "genshi": a Genshi stream - - :arg implementation: A module implementing the tree type e.g. - xml.etree.ElementTree or cElementTree (Currently applies to the "etree" - tree type only). - - :arg kwargs: keyword arguments passed to the etree walker--for other - walkers, this has no effect - - :returns: a TreeWalker class - - """ - - treeType = treeType.lower() - if treeType not in treeWalkerCache: - if treeType == "dom": - from . import dom - treeWalkerCache[treeType] = dom.TreeWalker - elif treeType == "genshi": - from . import genshi - treeWalkerCache[treeType] = genshi.TreeWalker - elif treeType == "lxml": - from . import etree_lxml - treeWalkerCache[treeType] = etree_lxml.TreeWalker - elif treeType == "etree": - from . import etree - if implementation is None: - implementation = default_etree - # XXX: NEVER cache here, caching is done in the etree submodule - return etree.getETreeModule(implementation, **kwargs).TreeWalker - return treeWalkerCache.get(treeType) - - -def concatenateCharacterTokens(tokens): - pendingCharacters = [] - for token in tokens: - type = token["type"] - if type in ("Characters", "SpaceCharacters"): - pendingCharacters.append(token["data"]) - else: - if pendingCharacters: - yield {"type": "Characters", "data": "".join(pendingCharacters)} - pendingCharacters = [] - yield token - if pendingCharacters: - yield {"type": "Characters", "data": "".join(pendingCharacters)} - - -def pprint(walker): - """Pretty printer for tree walkers - - Takes a TreeWalker instance and pretty prints the output of walking the tree. - - :arg walker: a TreeWalker instance - - """ - output = [] - indent = 0 - for token in concatenateCharacterTokens(walker): - type = token["type"] - if type in ("StartTag", "EmptyTag"): - # tag name - if token["namespace"] and token["namespace"] != constants.namespaces["html"]: - if token["namespace"] in constants.prefixes: - ns = constants.prefixes[token["namespace"]] - else: - ns = token["namespace"] - name = "%s %s" % (ns, token["name"]) - else: - name = token["name"] - output.append("%s<%s>" % (" " * indent, name)) - indent += 2 - # attributes (sorted for consistent ordering) - attrs = token["data"] - for (namespace, localname), value in sorted(attrs.items()): - if namespace: - if namespace in constants.prefixes: - ns = constants.prefixes[namespace] - else: - ns = namespace - name = "%s %s" % (ns, localname) - else: - name = localname - output.append("%s%s=\"%s\"" % (" " * indent, name, value)) - # self-closing - if type == "EmptyTag": - indent -= 2 - - elif type == "EndTag": - indent -= 2 - - elif type == "Comment": - output.append("%s<!-- %s -->" % (" " * indent, token["data"])) - - elif type == "Doctype": - if token["name"]: - if token["publicId"]: - output.append("""%s<!DOCTYPE %s "%s" "%s">""" % - (" " * indent, - token["name"], - token["publicId"], - token["systemId"] if token["systemId"] else "")) - elif token["systemId"]: - output.append("""%s<!DOCTYPE %s "" "%s">""" % - (" " * indent, - token["name"], - token["systemId"])) - else: - output.append("%s<!DOCTYPE %s>" % (" " * indent, - token["name"])) - else: - output.append("%s<!DOCTYPE >" % (" " * indent,)) - - elif type == "Characters": - output.append("%s\"%s\"" % (" " * indent, token["data"])) - - elif type == "SpaceCharacters": - assert False, "concatenateCharacterTokens should have got rid of all Space tokens" - - else: - raise ValueError("Unknown token type, %s" % type) - - return "\n".join(output) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treewalkers/__init__.pyc b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treewalkers/__init__.pyc deleted file mode 100644 index d013b688..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treewalkers/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treewalkers/base.pyc b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treewalkers/base.pyc deleted file mode 100644 index 30c707a9..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treewalkers/base.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treewalkers/dom.pyc b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treewalkers/dom.pyc deleted file mode 100644 index a5593fa1..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treewalkers/dom.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treewalkers/etree.py b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treewalkers/etree.py deleted file mode 100644 index 95fc0c17..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treewalkers/etree.py +++ /dev/null @@ -1,130 +0,0 @@ -from __future__ import absolute_import, division, unicode_literals - -from collections import OrderedDict -import re - -from pip._vendor.six import string_types - -from . import base -from .._utils import moduleFactoryFactory - -tag_regexp = re.compile("{([^}]*)}(.*)") - - -def getETreeBuilder(ElementTreeImplementation): - ElementTree = ElementTreeImplementation - ElementTreeCommentType = ElementTree.Comment("asd").tag - - class TreeWalker(base.NonRecursiveTreeWalker): # pylint:disable=unused-variable - """Given the particular ElementTree representation, this implementation, - to avoid using recursion, returns "nodes" as tuples with the following - content: - - 1. The current element - - 2. The index of the element relative to its parent - - 3. A stack of ancestor elements - - 4. A flag "text", "tail" or None to indicate if the current node is a - text node; either the text or tail of the current element (1) - """ - def getNodeDetails(self, node): - if isinstance(node, tuple): # It might be the root Element - elt, _, _, flag = node - if flag in ("text", "tail"): - return base.TEXT, getattr(elt, flag) - else: - node = elt - - if not(hasattr(node, "tag")): - node = node.getroot() - - if node.tag in ("DOCUMENT_ROOT", "DOCUMENT_FRAGMENT"): - return (base.DOCUMENT,) - - elif node.tag == "<!DOCTYPE>": - return (base.DOCTYPE, node.text, - node.get("publicId"), node.get("systemId")) - - elif node.tag == ElementTreeCommentType: - return base.COMMENT, node.text - - else: - assert isinstance(node.tag, string_types), type(node.tag) - # This is assumed to be an ordinary element - match = tag_regexp.match(node.tag) - if match: - namespace, tag = match.groups() - else: - namespace = None - tag = node.tag - attrs = OrderedDict() - for name, value in list(node.attrib.items()): - match = tag_regexp.match(name) - if match: - attrs[(match.group(1), match.group(2))] = value - else: - attrs[(None, name)] = value - return (base.ELEMENT, namespace, tag, - attrs, len(node) or node.text) - - def getFirstChild(self, node): - if isinstance(node, tuple): - element, key, parents, flag = node - else: - element, key, parents, flag = node, None, [], None - - if flag in ("text", "tail"): - return None - else: - if element.text: - return element, key, parents, "text" - elif len(element): - parents.append(element) - return element[0], 0, parents, None - else: - return None - - def getNextSibling(self, node): - if isinstance(node, tuple): - element, key, parents, flag = node - else: - return None - - if flag == "text": - if len(element): - parents.append(element) - return element[0], 0, parents, None - else: - return None - else: - if element.tail and flag != "tail": - return element, key, parents, "tail" - elif key < len(parents[-1]) - 1: - return parents[-1][key + 1], key + 1, parents, None - else: - return None - - def getParentNode(self, node): - if isinstance(node, tuple): - element, key, parents, flag = node - else: - return None - - if flag == "text": - if not parents: - return element - else: - return element, key, parents, None - else: - parent = parents.pop() - if not parents: - return parent - else: - assert list(parents[-1]).count(parent) == 1 - return parent, list(parents[-1]).index(parent), parents, None - - return locals() - -getETreeModule = moduleFactoryFactory(getETreeBuilder) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treewalkers/etree.pyc b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treewalkers/etree.pyc deleted file mode 100644 index 2349dc59..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treewalkers/etree.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treewalkers/etree_lxml.py b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treewalkers/etree_lxml.py deleted file mode 100644 index e81ddf33..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treewalkers/etree_lxml.py +++ /dev/null @@ -1,213 +0,0 @@ -from __future__ import absolute_import, division, unicode_literals -from pip._vendor.six import text_type - -from lxml import etree -from ..treebuilders.etree import tag_regexp - -from . import base - -from .. import _ihatexml - - -def ensure_str(s): - if s is None: - return None - elif isinstance(s, text_type): - return s - else: - return s.decode("ascii", "strict") - - -class Root(object): - def __init__(self, et): - self.elementtree = et - self.children = [] - - try: - if et.docinfo.internalDTD: - self.children.append(Doctype(self, - ensure_str(et.docinfo.root_name), - ensure_str(et.docinfo.public_id), - ensure_str(et.docinfo.system_url))) - except AttributeError: - pass - - try: - node = et.getroot() - except AttributeError: - node = et - - while node.getprevious() is not None: - node = node.getprevious() - while node is not None: - self.children.append(node) - node = node.getnext() - - self.text = None - self.tail = None - - def __getitem__(self, key): - return self.children[key] - - def getnext(self): - return None - - def __len__(self): - return 1 - - -class Doctype(object): - def __init__(self, root_node, name, public_id, system_id): - self.root_node = root_node - self.name = name - self.public_id = public_id - self.system_id = system_id - - self.text = None - self.tail = None - - def getnext(self): - return self.root_node.children[1] - - -class FragmentRoot(Root): - def __init__(self, children): - self.children = [FragmentWrapper(self, child) for child in children] - self.text = self.tail = None - - def getnext(self): - return None - - -class FragmentWrapper(object): - def __init__(self, fragment_root, obj): - self.root_node = fragment_root - self.obj = obj - if hasattr(self.obj, 'text'): - self.text = ensure_str(self.obj.text) - else: - self.text = None - if hasattr(self.obj, 'tail'): - self.tail = ensure_str(self.obj.tail) - else: - self.tail = None - - def __getattr__(self, name): - return getattr(self.obj, name) - - def getnext(self): - siblings = self.root_node.children - idx = siblings.index(self) - if idx < len(siblings) - 1: - return siblings[idx + 1] - else: - return None - - def __getitem__(self, key): - return self.obj[key] - - def __bool__(self): - return bool(self.obj) - - def getparent(self): - return None - - def __str__(self): - return str(self.obj) - - def __unicode__(self): - return str(self.obj) - - def __len__(self): - return len(self.obj) - - -class TreeWalker(base.NonRecursiveTreeWalker): - def __init__(self, tree): - # pylint:disable=redefined-variable-type - if isinstance(tree, list): - self.fragmentChildren = set(tree) - tree = FragmentRoot(tree) - else: - self.fragmentChildren = set() - tree = Root(tree) - base.NonRecursiveTreeWalker.__init__(self, tree) - self.filter = _ihatexml.InfosetFilter() - - def getNodeDetails(self, node): - if isinstance(node, tuple): # Text node - node, key = node - assert key in ("text", "tail"), "Text nodes are text or tail, found %s" % key - return base.TEXT, ensure_str(getattr(node, key)) - - elif isinstance(node, Root): - return (base.DOCUMENT,) - - elif isinstance(node, Doctype): - return base.DOCTYPE, node.name, node.public_id, node.system_id - - elif isinstance(node, FragmentWrapper) and not hasattr(node, "tag"): - return base.TEXT, ensure_str(node.obj) - - elif node.tag == etree.Comment: - return base.COMMENT, ensure_str(node.text) - - elif node.tag == etree.Entity: - return base.ENTITY, ensure_str(node.text)[1:-1] # strip &; - - else: - # This is assumed to be an ordinary element - match = tag_regexp.match(ensure_str(node.tag)) - if match: - namespace, tag = match.groups() - else: - namespace = None - tag = ensure_str(node.tag) - attrs = {} - for name, value in list(node.attrib.items()): - name = ensure_str(name) - value = ensure_str(value) - match = tag_regexp.match(name) - if match: - attrs[(match.group(1), match.group(2))] = value - else: - attrs[(None, name)] = value - return (base.ELEMENT, namespace, self.filter.fromXmlName(tag), - attrs, len(node) > 0 or node.text) - - def getFirstChild(self, node): - assert not isinstance(node, tuple), "Text nodes have no children" - - assert len(node) or node.text, "Node has no children" - if node.text: - return (node, "text") - else: - return node[0] - - def getNextSibling(self, node): - if isinstance(node, tuple): # Text node - node, key = node - assert key in ("text", "tail"), "Text nodes are text or tail, found %s" % key - if key == "text": - # XXX: we cannot use a "bool(node) and node[0] or None" construct here - # because node[0] might evaluate to False if it has no child element - if len(node): - return node[0] - else: - return None - else: # tail - return node.getnext() - - return (node, "tail") if node.tail else node.getnext() - - def getParentNode(self, node): - if isinstance(node, tuple): # Text node - node, key = node - assert key in ("text", "tail"), "Text nodes are text or tail, found %s" % key - if key == "text": - return node - # else: fallback to "normal" processing - elif node in self.fragmentChildren: - return None - - return node.getparent() diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treewalkers/etree_lxml.pyc b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treewalkers/etree_lxml.pyc deleted file mode 100644 index cbf04ed6..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treewalkers/etree_lxml.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treewalkers/genshi.pyc b/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treewalkers/genshi.pyc deleted file mode 100644 index 4749a75e..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treewalkers/genshi.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/idna/__init__.pyc b/env/lib/python2.7/site-packages/pip/_vendor/idna/__init__.pyc deleted file mode 100644 index 00c130f7..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/idna/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/idna/codec.py b/env/lib/python2.7/site-packages/pip/_vendor/idna/codec.py deleted file mode 100644 index 98c65ead..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/idna/codec.py +++ /dev/null @@ -1,118 +0,0 @@ -from .core import encode, decode, alabel, ulabel, IDNAError -import codecs -import re - -_unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]') - -class Codec(codecs.Codec): - - def encode(self, data, errors='strict'): - - if errors != 'strict': - raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) - - if not data: - return "", 0 - - return encode(data), len(data) - - def decode(self, data, errors='strict'): - - if errors != 'strict': - raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) - - if not data: - return u"", 0 - - return decode(data), len(data) - -class IncrementalEncoder(codecs.BufferedIncrementalEncoder): - def _buffer_encode(self, data, errors, final): - if errors != 'strict': - raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) - - if not data: - return ("", 0) - - labels = _unicode_dots_re.split(data) - trailing_dot = u'' - if labels: - if not labels[-1]: - trailing_dot = '.' - del labels[-1] - elif not final: - # Keep potentially unfinished label until the next call - del labels[-1] - if labels: - trailing_dot = '.' - - result = [] - size = 0 - for label in labels: - result.append(alabel(label)) - if size: - size += 1 - size += len(label) - - # Join with U+002E - result = ".".join(result) + trailing_dot - size += len(trailing_dot) - return (result, size) - -class IncrementalDecoder(codecs.BufferedIncrementalDecoder): - def _buffer_decode(self, data, errors, final): - if errors != 'strict': - raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) - - if not data: - return (u"", 0) - - # IDNA allows decoding to operate on Unicode strings, too. - if isinstance(data, unicode): - labels = _unicode_dots_re.split(data) - else: - # Must be ASCII string - data = str(data) - unicode(data, "ascii") - labels = data.split(".") - - trailing_dot = u'' - if labels: - if not labels[-1]: - trailing_dot = u'.' - del labels[-1] - elif not final: - # Keep potentially unfinished label until the next call - del labels[-1] - if labels: - trailing_dot = u'.' - - result = [] - size = 0 - for label in labels: - result.append(ulabel(label)) - if size: - size += 1 - size += len(label) - - result = u".".join(result) + trailing_dot - size += len(trailing_dot) - return (result, size) - - -class StreamWriter(Codec, codecs.StreamWriter): - pass - -class StreamReader(Codec, codecs.StreamReader): - pass - -def getregentry(): - return codecs.CodecInfo( - name='idna', - encode=Codec().encode, - decode=Codec().decode, - incrementalencoder=IncrementalEncoder, - incrementaldecoder=IncrementalDecoder, - streamwriter=StreamWriter, - streamreader=StreamReader, - ) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/idna/codec.pyc b/env/lib/python2.7/site-packages/pip/_vendor/idna/codec.pyc deleted file mode 100644 index c6425709..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/idna/codec.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/idna/compat.py b/env/lib/python2.7/site-packages/pip/_vendor/idna/compat.py deleted file mode 100644 index 4d47f336..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/idna/compat.py +++ /dev/null @@ -1,12 +0,0 @@ -from .core import * -from .codec import * - -def ToASCII(label): - return encode(label) - -def ToUnicode(label): - return decode(label) - -def nameprep(s): - raise NotImplementedError("IDNA 2008 does not utilise nameprep protocol") - diff --git a/env/lib/python2.7/site-packages/pip/_vendor/idna/compat.pyc b/env/lib/python2.7/site-packages/pip/_vendor/idna/compat.pyc deleted file mode 100644 index d9be94a5..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/idna/compat.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/idna/core.py b/env/lib/python2.7/site-packages/pip/_vendor/idna/core.py deleted file mode 100644 index 104624ad..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/idna/core.py +++ /dev/null @@ -1,396 +0,0 @@ -from . import idnadata -import bisect -import unicodedata -import re -import sys -from .intranges import intranges_contain - -_virama_combining_class = 9 -_alabel_prefix = b'xn--' -_unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]') - -if sys.version_info[0] == 3: - unicode = str - unichr = chr - -class IDNAError(UnicodeError): - """ Base exception for all IDNA-encoding related problems """ - pass - - -class IDNABidiError(IDNAError): - """ Exception when bidirectional requirements are not satisfied """ - pass - - -class InvalidCodepoint(IDNAError): - """ Exception when a disallowed or unallocated codepoint is used """ - pass - - -class InvalidCodepointContext(IDNAError): - """ Exception when the codepoint is not valid in the context it is used """ - pass - - -def _combining_class(cp): - v = unicodedata.combining(unichr(cp)) - if v == 0: - if not unicodedata.name(unichr(cp)): - raise ValueError("Unknown character in unicodedata") - return v - -def _is_script(cp, script): - return intranges_contain(ord(cp), idnadata.scripts[script]) - -def _punycode(s): - return s.encode('punycode') - -def _unot(s): - return 'U+{0:04X}'.format(s) - - -def valid_label_length(label): - - if len(label) > 63: - return False - return True - - -def valid_string_length(label, trailing_dot): - - if len(label) > (254 if trailing_dot else 253): - return False - return True - - -def check_bidi(label, check_ltr=False): - - # Bidi rules should only be applied if string contains RTL characters - bidi_label = False - for (idx, cp) in enumerate(label, 1): - direction = unicodedata.bidirectional(cp) - if direction == '': - # String likely comes from a newer version of Unicode - raise IDNABidiError('Unknown directionality in label {0} at position {1}'.format(repr(label), idx)) - if direction in ['R', 'AL', 'AN']: - bidi_label = True - if not bidi_label and not check_ltr: - return True - - # Bidi rule 1 - direction = unicodedata.bidirectional(label[0]) - if direction in ['R', 'AL']: - rtl = True - elif direction == 'L': - rtl = False - else: - raise IDNABidiError('First codepoint in label {0} must be directionality L, R or AL'.format(repr(label))) - - valid_ending = False - number_type = False - for (idx, cp) in enumerate(label, 1): - direction = unicodedata.bidirectional(cp) - - if rtl: - # Bidi rule 2 - if not direction in ['R', 'AL', 'AN', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: - raise IDNABidiError('Invalid direction for codepoint at position {0} in a right-to-left label'.format(idx)) - # Bidi rule 3 - if direction in ['R', 'AL', 'EN', 'AN']: - valid_ending = True - elif direction != 'NSM': - valid_ending = False - # Bidi rule 4 - if direction in ['AN', 'EN']: - if not number_type: - number_type = direction - else: - if number_type != direction: - raise IDNABidiError('Can not mix numeral types in a right-to-left label') - else: - # Bidi rule 5 - if not direction in ['L', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: - raise IDNABidiError('Invalid direction for codepoint at position {0} in a left-to-right label'.format(idx)) - # Bidi rule 6 - if direction in ['L', 'EN']: - valid_ending = True - elif direction != 'NSM': - valid_ending = False - - if not valid_ending: - raise IDNABidiError('Label ends with illegal codepoint directionality') - - return True - - -def check_initial_combiner(label): - - if unicodedata.category(label[0])[0] == 'M': - raise IDNAError('Label begins with an illegal combining character') - return True - - -def check_hyphen_ok(label): - - if label[2:4] == '--': - raise IDNAError('Label has disallowed hyphens in 3rd and 4th position') - if label[0] == '-' or label[-1] == '-': - raise IDNAError('Label must not start or end with a hyphen') - return True - - -def check_nfc(label): - - if unicodedata.normalize('NFC', label) != label: - raise IDNAError('Label must be in Normalization Form C') - - -def valid_contextj(label, pos): - - cp_value = ord(label[pos]) - - if cp_value == 0x200c: - - if pos > 0: - if _combining_class(ord(label[pos - 1])) == _virama_combining_class: - return True - - ok = False - for i in range(pos-1, -1, -1): - joining_type = idnadata.joining_types.get(ord(label[i])) - if joining_type == ord('T'): - continue - if joining_type in [ord('L'), ord('D')]: - ok = True - break - - if not ok: - return False - - ok = False - for i in range(pos+1, len(label)): - joining_type = idnadata.joining_types.get(ord(label[i])) - if joining_type == ord('T'): - continue - if joining_type in [ord('R'), ord('D')]: - ok = True - break - return ok - - if cp_value == 0x200d: - - if pos > 0: - if _combining_class(ord(label[pos - 1])) == _virama_combining_class: - return True - return False - - else: - - return False - - -def valid_contexto(label, pos, exception=False): - - cp_value = ord(label[pos]) - - if cp_value == 0x00b7: - if 0 < pos < len(label)-1: - if ord(label[pos - 1]) == 0x006c and ord(label[pos + 1]) == 0x006c: - return True - return False - - elif cp_value == 0x0375: - if pos < len(label)-1 and len(label) > 1: - return _is_script(label[pos + 1], 'Greek') - return False - - elif cp_value == 0x05f3 or cp_value == 0x05f4: - if pos > 0: - return _is_script(label[pos - 1], 'Hebrew') - return False - - elif cp_value == 0x30fb: - for cp in label: - if cp == u'\u30fb': - continue - if _is_script(cp, 'Hiragana') or _is_script(cp, 'Katakana') or _is_script(cp, 'Han'): - return True - return False - - elif 0x660 <= cp_value <= 0x669: - for cp in label: - if 0x6f0 <= ord(cp) <= 0x06f9: - return False - return True - - elif 0x6f0 <= cp_value <= 0x6f9: - for cp in label: - if 0x660 <= ord(cp) <= 0x0669: - return False - return True - - -def check_label(label): - - if isinstance(label, (bytes, bytearray)): - label = label.decode('utf-8') - if len(label) == 0: - raise IDNAError('Empty Label') - - check_nfc(label) - check_hyphen_ok(label) - check_initial_combiner(label) - - for (pos, cp) in enumerate(label): - cp_value = ord(cp) - if intranges_contain(cp_value, idnadata.codepoint_classes['PVALID']): - continue - elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTJ']): - try: - if not valid_contextj(label, pos): - raise InvalidCodepointContext('Joiner {0} not allowed at position {1} in {2}'.format( - _unot(cp_value), pos+1, repr(label))) - except ValueError: - raise IDNAError('Unknown codepoint adjacent to joiner {0} at position {1} in {2}'.format( - _unot(cp_value), pos+1, repr(label))) - elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTO']): - if not valid_contexto(label, pos): - raise InvalidCodepointContext('Codepoint {0} not allowed at position {1} in {2}'.format(_unot(cp_value), pos+1, repr(label))) - else: - raise InvalidCodepoint('Codepoint {0} at position {1} of {2} not allowed'.format(_unot(cp_value), pos+1, repr(label))) - - check_bidi(label) - - -def alabel(label): - - try: - label = label.encode('ascii') - ulabel(label) - if not valid_label_length(label): - raise IDNAError('Label too long') - return label - except UnicodeEncodeError: - pass - - if not label: - raise IDNAError('No Input') - - label = unicode(label) - check_label(label) - label = _punycode(label) - label = _alabel_prefix + label - - if not valid_label_length(label): - raise IDNAError('Label too long') - - return label - - -def ulabel(label): - - if not isinstance(label, (bytes, bytearray)): - try: - label = label.encode('ascii') - except UnicodeEncodeError: - check_label(label) - return label - - label = label.lower() - if label.startswith(_alabel_prefix): - label = label[len(_alabel_prefix):] - else: - check_label(label) - return label.decode('ascii') - - label = label.decode('punycode') - check_label(label) - return label - - -def uts46_remap(domain, std3_rules=True, transitional=False): - """Re-map the characters in the string according to UTS46 processing.""" - from .uts46data import uts46data - output = u"" - try: - for pos, char in enumerate(domain): - code_point = ord(char) - uts46row = uts46data[code_point if code_point < 256 else - bisect.bisect_left(uts46data, (code_point, "Z")) - 1] - status = uts46row[1] - replacement = uts46row[2] if len(uts46row) == 3 else None - if (status == "V" or - (status == "D" and not transitional) or - (status == "3" and not std3_rules and replacement is None)): - output += char - elif replacement is not None and (status == "M" or - (status == "3" and not std3_rules) or - (status == "D" and transitional)): - output += replacement - elif status != "I": - raise IndexError() - return unicodedata.normalize("NFC", output) - except IndexError: - raise InvalidCodepoint( - "Codepoint {0} not allowed at position {1} in {2}".format( - _unot(code_point), pos + 1, repr(domain))) - - -def encode(s, strict=False, uts46=False, std3_rules=False, transitional=False): - - if isinstance(s, (bytes, bytearray)): - s = s.decode("ascii") - if uts46: - s = uts46_remap(s, std3_rules, transitional) - trailing_dot = False - result = [] - if strict: - labels = s.split('.') - else: - labels = _unicode_dots_re.split(s) - if not labels or labels == ['']: - raise IDNAError('Empty domain') - if labels[-1] == '': - del labels[-1] - trailing_dot = True - for label in labels: - s = alabel(label) - if s: - result.append(s) - else: - raise IDNAError('Empty label') - if trailing_dot: - result.append(b'') - s = b'.'.join(result) - if not valid_string_length(s, trailing_dot): - raise IDNAError('Domain too long') - return s - - -def decode(s, strict=False, uts46=False, std3_rules=False): - - if isinstance(s, (bytes, bytearray)): - s = s.decode("ascii") - if uts46: - s = uts46_remap(s, std3_rules, False) - trailing_dot = False - result = [] - if not strict: - labels = _unicode_dots_re.split(s) - else: - labels = s.split(u'.') - if not labels or labels == ['']: - raise IDNAError('Empty domain') - if not labels[-1]: - del labels[-1] - trailing_dot = True - for label in labels: - s = ulabel(label) - if s: - result.append(s) - else: - raise IDNAError('Empty label') - if trailing_dot: - result.append(u'') - return u'.'.join(result) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/idna/core.pyc b/env/lib/python2.7/site-packages/pip/_vendor/idna/core.pyc deleted file mode 100644 index dbd91941..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/idna/core.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/idna/idnadata.pyc b/env/lib/python2.7/site-packages/pip/_vendor/idna/idnadata.pyc deleted file mode 100644 index bafff33f..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/idna/idnadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/idna/intranges.pyc b/env/lib/python2.7/site-packages/pip/_vendor/idna/intranges.pyc deleted file mode 100644 index 76888bc1..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/idna/intranges.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/idna/package_data.py b/env/lib/python2.7/site-packages/pip/_vendor/idna/package_data.py deleted file mode 100644 index 257e8989..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/idna/package_data.py +++ /dev/null @@ -1,2 +0,0 @@ -__version__ = '2.8' - diff --git a/env/lib/python2.7/site-packages/pip/_vendor/idna/package_data.pyc b/env/lib/python2.7/site-packages/pip/_vendor/idna/package_data.pyc deleted file mode 100644 index 9bea8753..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/idna/package_data.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/idna/uts46data.py b/env/lib/python2.7/site-packages/pip/_vendor/idna/uts46data.py deleted file mode 100644 index a68ed4c0..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/idna/uts46data.py +++ /dev/null @@ -1,8205 +0,0 @@ -# This file is automatically generated by tools/idna-data -# vim: set fileencoding=utf-8 : - -"""IDNA Mapping Table from UTS46.""" - - -__version__ = "11.0.0" -def _seg_0(): - return [ - (0x0, '3'), - (0x1, '3'), - (0x2, '3'), - (0x3, '3'), - (0x4, '3'), - (0x5, '3'), - (0x6, '3'), - (0x7, '3'), - (0x8, '3'), - (0x9, '3'), - (0xA, '3'), - (0xB, '3'), - (0xC, '3'), - (0xD, '3'), - (0xE, '3'), - (0xF, '3'), - (0x10, '3'), - (0x11, '3'), - (0x12, '3'), - (0x13, '3'), - (0x14, '3'), - (0x15, '3'), - (0x16, '3'), - (0x17, '3'), - (0x18, '3'), - (0x19, '3'), - (0x1A, '3'), - (0x1B, '3'), - (0x1C, '3'), - (0x1D, '3'), - (0x1E, '3'), - (0x1F, '3'), - (0x20, '3'), - (0x21, '3'), - (0x22, '3'), - (0x23, '3'), - (0x24, '3'), - (0x25, '3'), - (0x26, '3'), - (0x27, '3'), - (0x28, '3'), - (0x29, '3'), - (0x2A, '3'), - (0x2B, '3'), - (0x2C, '3'), - (0x2D, 'V'), - (0x2E, 'V'), - (0x2F, '3'), - (0x30, 'V'), - (0x31, 'V'), - (0x32, 'V'), - (0x33, 'V'), - (0x34, 'V'), - (0x35, 'V'), - (0x36, 'V'), - (0x37, 'V'), - (0x38, 'V'), - (0x39, 'V'), - (0x3A, '3'), - (0x3B, '3'), - (0x3C, '3'), - (0x3D, '3'), - (0x3E, '3'), - (0x3F, '3'), - (0x40, '3'), - (0x41, 'M', u'a'), - (0x42, 'M', u'b'), - (0x43, 'M', u'c'), - (0x44, 'M', u'd'), - (0x45, 'M', u'e'), - (0x46, 'M', u'f'), - (0x47, 'M', u'g'), - (0x48, 'M', u'h'), - (0x49, 'M', u'i'), - (0x4A, 'M', u'j'), - (0x4B, 'M', u'k'), - (0x4C, 'M', u'l'), - (0x4D, 'M', u'm'), - (0x4E, 'M', u'n'), - (0x4F, 'M', u'o'), - (0x50, 'M', u'p'), - (0x51, 'M', u'q'), - (0x52, 'M', u'r'), - (0x53, 'M', u's'), - (0x54, 'M', u't'), - (0x55, 'M', u'u'), - (0x56, 'M', u'v'), - (0x57, 'M', u'w'), - (0x58, 'M', u'x'), - (0x59, 'M', u'y'), - (0x5A, 'M', u'z'), - (0x5B, '3'), - (0x5C, '3'), - (0x5D, '3'), - (0x5E, '3'), - (0x5F, '3'), - (0x60, '3'), - (0x61, 'V'), - (0x62, 'V'), - (0x63, 'V'), - ] - -def _seg_1(): - return [ - (0x64, 'V'), - (0x65, 'V'), - (0x66, 'V'), - (0x67, 'V'), - (0x68, 'V'), - (0x69, 'V'), - (0x6A, 'V'), - (0x6B, 'V'), - (0x6C, 'V'), - (0x6D, 'V'), - (0x6E, 'V'), - (0x6F, 'V'), - (0x70, 'V'), - (0x71, 'V'), - (0x72, 'V'), - (0x73, 'V'), - (0x74, 'V'), - (0x75, 'V'), - (0x76, 'V'), - (0x77, 'V'), - (0x78, 'V'), - (0x79, 'V'), - (0x7A, 'V'), - (0x7B, '3'), - (0x7C, '3'), - (0x7D, '3'), - (0x7E, '3'), - (0x7F, '3'), - (0x80, 'X'), - (0x81, 'X'), - (0x82, 'X'), - (0x83, 'X'), - (0x84, 'X'), - (0x85, 'X'), - (0x86, 'X'), - (0x87, 'X'), - (0x88, 'X'), - (0x89, 'X'), - (0x8A, 'X'), - (0x8B, 'X'), - (0x8C, 'X'), - (0x8D, 'X'), - (0x8E, 'X'), - (0x8F, 'X'), - (0x90, 'X'), - (0x91, 'X'), - (0x92, 'X'), - (0x93, 'X'), - (0x94, 'X'), - (0x95, 'X'), - (0x96, 'X'), - (0x97, 'X'), - (0x98, 'X'), - (0x99, 'X'), - (0x9A, 'X'), - (0x9B, 'X'), - (0x9C, 'X'), - (0x9D, 'X'), - (0x9E, 'X'), - (0x9F, 'X'), - (0xA0, '3', u' '), - (0xA1, 'V'), - (0xA2, 'V'), - (0xA3, 'V'), - (0xA4, 'V'), - (0xA5, 'V'), - (0xA6, 'V'), - (0xA7, 'V'), - (0xA8, '3', u' ̈'), - (0xA9, 'V'), - (0xAA, 'M', u'a'), - (0xAB, 'V'), - (0xAC, 'V'), - (0xAD, 'I'), - (0xAE, 'V'), - (0xAF, '3', u' ̄'), - (0xB0, 'V'), - (0xB1, 'V'), - (0xB2, 'M', u'2'), - (0xB3, 'M', u'3'), - (0xB4, '3', u' ́'), - (0xB5, 'M', u'μ'), - (0xB6, 'V'), - (0xB7, 'V'), - (0xB8, '3', u' ̧'), - (0xB9, 'M', u'1'), - (0xBA, 'M', u'o'), - (0xBB, 'V'), - (0xBC, 'M', u'1⁄4'), - (0xBD, 'M', u'1⁄2'), - (0xBE, 'M', u'3⁄4'), - (0xBF, 'V'), - (0xC0, 'M', u'à'), - (0xC1, 'M', u'á'), - (0xC2, 'M', u'â'), - (0xC3, 'M', u'ã'), - (0xC4, 'M', u'ä'), - (0xC5, 'M', u'å'), - (0xC6, 'M', u'æ'), - (0xC7, 'M', u'ç'), - ] - -def _seg_2(): - return [ - (0xC8, 'M', u'è'), - (0xC9, 'M', u'é'), - (0xCA, 'M', u'ê'), - (0xCB, 'M', u'ë'), - (0xCC, 'M', u'ì'), - (0xCD, 'M', u'í'), - (0xCE, 'M', u'î'), - (0xCF, 'M', u'ï'), - (0xD0, 'M', u'ð'), - (0xD1, 'M', u'ñ'), - (0xD2, 'M', u'ò'), - (0xD3, 'M', u'ó'), - (0xD4, 'M', u'ô'), - (0xD5, 'M', u'õ'), - (0xD6, 'M', u'ö'), - (0xD7, 'V'), - (0xD8, 'M', u'ø'), - (0xD9, 'M', u'ù'), - (0xDA, 'M', u'ú'), - (0xDB, 'M', u'û'), - (0xDC, 'M', u'ü'), - (0xDD, 'M', u'ý'), - (0xDE, 'M', u'þ'), - (0xDF, 'D', u'ss'), - (0xE0, 'V'), - (0xE1, 'V'), - (0xE2, 'V'), - (0xE3, 'V'), - (0xE4, 'V'), - (0xE5, 'V'), - (0xE6, 'V'), - (0xE7, 'V'), - (0xE8, 'V'), - (0xE9, 'V'), - (0xEA, 'V'), - (0xEB, 'V'), - (0xEC, 'V'), - (0xED, 'V'), - (0xEE, 'V'), - (0xEF, 'V'), - (0xF0, 'V'), - (0xF1, 'V'), - (0xF2, 'V'), - (0xF3, 'V'), - (0xF4, 'V'), - (0xF5, 'V'), - (0xF6, 'V'), - (0xF7, 'V'), - (0xF8, 'V'), - (0xF9, 'V'), - (0xFA, 'V'), - (0xFB, 'V'), - (0xFC, 'V'), - (0xFD, 'V'), - (0xFE, 'V'), - (0xFF, 'V'), - (0x100, 'M', u'ā'), - (0x101, 'V'), - (0x102, 'M', u'ă'), - (0x103, 'V'), - (0x104, 'M', u'ą'), - (0x105, 'V'), - (0x106, 'M', u'ć'), - (0x107, 'V'), - (0x108, 'M', u'ĉ'), - (0x109, 'V'), - (0x10A, 'M', u'ċ'), - (0x10B, 'V'), - (0x10C, 'M', u'č'), - (0x10D, 'V'), - (0x10E, 'M', u'ď'), - (0x10F, 'V'), - (0x110, 'M', u'đ'), - (0x111, 'V'), - (0x112, 'M', u'ē'), - (0x113, 'V'), - (0x114, 'M', u'ĕ'), - (0x115, 'V'), - (0x116, 'M', u'ė'), - (0x117, 'V'), - (0x118, 'M', u'ę'), - (0x119, 'V'), - (0x11A, 'M', u'ě'), - (0x11B, 'V'), - (0x11C, 'M', u'ĝ'), - (0x11D, 'V'), - (0x11E, 'M', u'ğ'), - (0x11F, 'V'), - (0x120, 'M', u'ġ'), - (0x121, 'V'), - (0x122, 'M', u'ģ'), - (0x123, 'V'), - (0x124, 'M', u'ĥ'), - (0x125, 'V'), - (0x126, 'M', u'ħ'), - (0x127, 'V'), - (0x128, 'M', u'ĩ'), - (0x129, 'V'), - (0x12A, 'M', u'ī'), - (0x12B, 'V'), - ] - -def _seg_3(): - return [ - (0x12C, 'M', u'ĭ'), - (0x12D, 'V'), - (0x12E, 'M', u'į'), - (0x12F, 'V'), - (0x130, 'M', u'i̇'), - (0x131, 'V'), - (0x132, 'M', u'ij'), - (0x134, 'M', u'ĵ'), - (0x135, 'V'), - (0x136, 'M', u'ķ'), - (0x137, 'V'), - (0x139, 'M', u'ĺ'), - (0x13A, 'V'), - (0x13B, 'M', u'ļ'), - (0x13C, 'V'), - (0x13D, 'M', u'ľ'), - (0x13E, 'V'), - (0x13F, 'M', u'l·'), - (0x141, 'M', u'ł'), - (0x142, 'V'), - (0x143, 'M', u'ń'), - (0x144, 'V'), - (0x145, 'M', u'ņ'), - (0x146, 'V'), - (0x147, 'M', u'ň'), - (0x148, 'V'), - (0x149, 'M', u'ʼn'), - (0x14A, 'M', u'ŋ'), - (0x14B, 'V'), - (0x14C, 'M', u'ō'), - (0x14D, 'V'), - (0x14E, 'M', u'ŏ'), - (0x14F, 'V'), - (0x150, 'M', u'ő'), - (0x151, 'V'), - (0x152, 'M', u'œ'), - (0x153, 'V'), - (0x154, 'M', u'ŕ'), - (0x155, 'V'), - (0x156, 'M', u'ŗ'), - (0x157, 'V'), - (0x158, 'M', u'ř'), - (0x159, 'V'), - (0x15A, 'M', u'ś'), - (0x15B, 'V'), - (0x15C, 'M', u'ŝ'), - (0x15D, 'V'), - (0x15E, 'M', u'ş'), - (0x15F, 'V'), - (0x160, 'M', u'š'), - (0x161, 'V'), - (0x162, 'M', u'ţ'), - (0x163, 'V'), - (0x164, 'M', u'ť'), - (0x165, 'V'), - (0x166, 'M', u'ŧ'), - (0x167, 'V'), - (0x168, 'M', u'ũ'), - (0x169, 'V'), - (0x16A, 'M', u'ū'), - (0x16B, 'V'), - (0x16C, 'M', u'ŭ'), - (0x16D, 'V'), - (0x16E, 'M', u'ů'), - (0x16F, 'V'), - (0x170, 'M', u'ű'), - (0x171, 'V'), - (0x172, 'M', u'ų'), - (0x173, 'V'), - (0x174, 'M', u'ŵ'), - (0x175, 'V'), - (0x176, 'M', u'ŷ'), - (0x177, 'V'), - (0x178, 'M', u'ÿ'), - (0x179, 'M', u'ź'), - (0x17A, 'V'), - (0x17B, 'M', u'ż'), - (0x17C, 'V'), - (0x17D, 'M', u'ž'), - (0x17E, 'V'), - (0x17F, 'M', u's'), - (0x180, 'V'), - (0x181, 'M', u'ɓ'), - (0x182, 'M', u'ƃ'), - (0x183, 'V'), - (0x184, 'M', u'ƅ'), - (0x185, 'V'), - (0x186, 'M', u'ɔ'), - (0x187, 'M', u'ƈ'), - (0x188, 'V'), - (0x189, 'M', u'ɖ'), - (0x18A, 'M', u'ɗ'), - (0x18B, 'M', u'ƌ'), - (0x18C, 'V'), - (0x18E, 'M', u'ǝ'), - (0x18F, 'M', u'ə'), - (0x190, 'M', u'ɛ'), - (0x191, 'M', u'ƒ'), - (0x192, 'V'), - (0x193, 'M', u'ɠ'), - ] - -def _seg_4(): - return [ - (0x194, 'M', u'ɣ'), - (0x195, 'V'), - (0x196, 'M', u'ɩ'), - (0x197, 'M', u'ɨ'), - (0x198, 'M', u'ƙ'), - (0x199, 'V'), - (0x19C, 'M', u'ɯ'), - (0x19D, 'M', u'ɲ'), - (0x19E, 'V'), - (0x19F, 'M', u'ɵ'), - (0x1A0, 'M', u'ơ'), - (0x1A1, 'V'), - (0x1A2, 'M', u'ƣ'), - (0x1A3, 'V'), - (0x1A4, 'M', u'ƥ'), - (0x1A5, 'V'), - (0x1A6, 'M', u'ʀ'), - (0x1A7, 'M', u'ƨ'), - (0x1A8, 'V'), - (0x1A9, 'M', u'ʃ'), - (0x1AA, 'V'), - (0x1AC, 'M', u'ƭ'), - (0x1AD, 'V'), - (0x1AE, 'M', u'ʈ'), - (0x1AF, 'M', u'ư'), - (0x1B0, 'V'), - (0x1B1, 'M', u'ʊ'), - (0x1B2, 'M', u'ʋ'), - (0x1B3, 'M', u'ƴ'), - (0x1B4, 'V'), - (0x1B5, 'M', u'ƶ'), - (0x1B6, 'V'), - (0x1B7, 'M', u'ʒ'), - (0x1B8, 'M', u'ƹ'), - (0x1B9, 'V'), - (0x1BC, 'M', u'ƽ'), - (0x1BD, 'V'), - (0x1C4, 'M', u'dž'), - (0x1C7, 'M', u'lj'), - (0x1CA, 'M', u'nj'), - (0x1CD, 'M', u'ǎ'), - (0x1CE, 'V'), - (0x1CF, 'M', u'ǐ'), - (0x1D0, 'V'), - (0x1D1, 'M', u'ǒ'), - (0x1D2, 'V'), - (0x1D3, 'M', u'ǔ'), - (0x1D4, 'V'), - (0x1D5, 'M', u'ǖ'), - (0x1D6, 'V'), - (0x1D7, 'M', u'ǘ'), - (0x1D8, 'V'), - (0x1D9, 'M', u'ǚ'), - (0x1DA, 'V'), - (0x1DB, 'M', u'ǜ'), - (0x1DC, 'V'), - (0x1DE, 'M', u'ǟ'), - (0x1DF, 'V'), - (0x1E0, 'M', u'ǡ'), - (0x1E1, 'V'), - (0x1E2, 'M', u'ǣ'), - (0x1E3, 'V'), - (0x1E4, 'M', u'ǥ'), - (0x1E5, 'V'), - (0x1E6, 'M', u'ǧ'), - (0x1E7, 'V'), - (0x1E8, 'M', u'ǩ'), - (0x1E9, 'V'), - (0x1EA, 'M', u'ǫ'), - (0x1EB, 'V'), - (0x1EC, 'M', u'ǭ'), - (0x1ED, 'V'), - (0x1EE, 'M', u'ǯ'), - (0x1EF, 'V'), - (0x1F1, 'M', u'dz'), - (0x1F4, 'M', u'ǵ'), - (0x1F5, 'V'), - (0x1F6, 'M', u'ƕ'), - (0x1F7, 'M', u'ƿ'), - (0x1F8, 'M', u'ǹ'), - (0x1F9, 'V'), - (0x1FA, 'M', u'ǻ'), - (0x1FB, 'V'), - (0x1FC, 'M', u'ǽ'), - (0x1FD, 'V'), - (0x1FE, 'M', u'ǿ'), - (0x1FF, 'V'), - (0x200, 'M', u'ȁ'), - (0x201, 'V'), - (0x202, 'M', u'ȃ'), - (0x203, 'V'), - (0x204, 'M', u'ȅ'), - (0x205, 'V'), - (0x206, 'M', u'ȇ'), - (0x207, 'V'), - (0x208, 'M', u'ȉ'), - (0x209, 'V'), - (0x20A, 'M', u'ȋ'), - (0x20B, 'V'), - (0x20C, 'M', u'ȍ'), - ] - -def _seg_5(): - return [ - (0x20D, 'V'), - (0x20E, 'M', u'ȏ'), - (0x20F, 'V'), - (0x210, 'M', u'ȑ'), - (0x211, 'V'), - (0x212, 'M', u'ȓ'), - (0x213, 'V'), - (0x214, 'M', u'ȕ'), - (0x215, 'V'), - (0x216, 'M', u'ȗ'), - (0x217, 'V'), - (0x218, 'M', u'ș'), - (0x219, 'V'), - (0x21A, 'M', u'ț'), - (0x21B, 'V'), - (0x21C, 'M', u'ȝ'), - (0x21D, 'V'), - (0x21E, 'M', u'ȟ'), - (0x21F, 'V'), - (0x220, 'M', u'ƞ'), - (0x221, 'V'), - (0x222, 'M', u'ȣ'), - (0x223, 'V'), - (0x224, 'M', u'ȥ'), - (0x225, 'V'), - (0x226, 'M', u'ȧ'), - (0x227, 'V'), - (0x228, 'M', u'ȩ'), - (0x229, 'V'), - (0x22A, 'M', u'ȫ'), - (0x22B, 'V'), - (0x22C, 'M', u'ȭ'), - (0x22D, 'V'), - (0x22E, 'M', u'ȯ'), - (0x22F, 'V'), - (0x230, 'M', u'ȱ'), - (0x231, 'V'), - (0x232, 'M', u'ȳ'), - (0x233, 'V'), - (0x23A, 'M', u'ⱥ'), - (0x23B, 'M', u'ȼ'), - (0x23C, 'V'), - (0x23D, 'M', u'ƚ'), - (0x23E, 'M', u'ⱦ'), - (0x23F, 'V'), - (0x241, 'M', u'ɂ'), - (0x242, 'V'), - (0x243, 'M', u'ƀ'), - (0x244, 'M', u'ʉ'), - (0x245, 'M', u'ʌ'), - (0x246, 'M', u'ɇ'), - (0x247, 'V'), - (0x248, 'M', u'ɉ'), - (0x249, 'V'), - (0x24A, 'M', u'ɋ'), - (0x24B, 'V'), - (0x24C, 'M', u'ɍ'), - (0x24D, 'V'), - (0x24E, 'M', u'ɏ'), - (0x24F, 'V'), - (0x2B0, 'M', u'h'), - (0x2B1, 'M', u'ɦ'), - (0x2B2, 'M', u'j'), - (0x2B3, 'M', u'r'), - (0x2B4, 'M', u'ɹ'), - (0x2B5, 'M', u'ɻ'), - (0x2B6, 'M', u'ʁ'), - (0x2B7, 'M', u'w'), - (0x2B8, 'M', u'y'), - (0x2B9, 'V'), - (0x2D8, '3', u' ̆'), - (0x2D9, '3', u' ̇'), - (0x2DA, '3', u' ̊'), - (0x2DB, '3', u' ̨'), - (0x2DC, '3', u' ̃'), - (0x2DD, '3', u' ̋'), - (0x2DE, 'V'), - (0x2E0, 'M', u'ɣ'), - (0x2E1, 'M', u'l'), - (0x2E2, 'M', u's'), - (0x2E3, 'M', u'x'), - (0x2E4, 'M', u'ʕ'), - (0x2E5, 'V'), - (0x340, 'M', u'̀'), - (0x341, 'M', u'́'), - (0x342, 'V'), - (0x343, 'M', u'̓'), - (0x344, 'M', u'̈́'), - (0x345, 'M', u'ι'), - (0x346, 'V'), - (0x34F, 'I'), - (0x350, 'V'), - (0x370, 'M', u'ͱ'), - (0x371, 'V'), - (0x372, 'M', u'ͳ'), - (0x373, 'V'), - (0x374, 'M', u'ʹ'), - (0x375, 'V'), - (0x376, 'M', u'ͷ'), - (0x377, 'V'), - ] - -def _seg_6(): - return [ - (0x378, 'X'), - (0x37A, '3', u' ι'), - (0x37B, 'V'), - (0x37E, '3', u';'), - (0x37F, 'M', u'ϳ'), - (0x380, 'X'), - (0x384, '3', u' ́'), - (0x385, '3', u' ̈́'), - (0x386, 'M', u'ά'), - (0x387, 'M', u'·'), - (0x388, 'M', u'έ'), - (0x389, 'M', u'ή'), - (0x38A, 'M', u'ί'), - (0x38B, 'X'), - (0x38C, 'M', u'ό'), - (0x38D, 'X'), - (0x38E, 'M', u'ύ'), - (0x38F, 'M', u'ώ'), - (0x390, 'V'), - (0x391, 'M', u'α'), - (0x392, 'M', u'β'), - (0x393, 'M', u'γ'), - (0x394, 'M', u'δ'), - (0x395, 'M', u'ε'), - (0x396, 'M', u'ζ'), - (0x397, 'M', u'η'), - (0x398, 'M', u'θ'), - (0x399, 'M', u'ι'), - (0x39A, 'M', u'κ'), - (0x39B, 'M', u'λ'), - (0x39C, 'M', u'μ'), - (0x39D, 'M', u'ν'), - (0x39E, 'M', u'ξ'), - (0x39F, 'M', u'ο'), - (0x3A0, 'M', u'π'), - (0x3A1, 'M', u'ρ'), - (0x3A2, 'X'), - (0x3A3, 'M', u'σ'), - (0x3A4, 'M', u'τ'), - (0x3A5, 'M', u'υ'), - (0x3A6, 'M', u'φ'), - (0x3A7, 'M', u'χ'), - (0x3A8, 'M', u'ψ'), - (0x3A9, 'M', u'ω'), - (0x3AA, 'M', u'ϊ'), - (0x3AB, 'M', u'ϋ'), - (0x3AC, 'V'), - (0x3C2, 'D', u'σ'), - (0x3C3, 'V'), - (0x3CF, 'M', u'ϗ'), - (0x3D0, 'M', u'β'), - (0x3D1, 'M', u'θ'), - (0x3D2, 'M', u'υ'), - (0x3D3, 'M', u'ύ'), - (0x3D4, 'M', u'ϋ'), - (0x3D5, 'M', u'φ'), - (0x3D6, 'M', u'π'), - (0x3D7, 'V'), - (0x3D8, 'M', u'ϙ'), - (0x3D9, 'V'), - (0x3DA, 'M', u'ϛ'), - (0x3DB, 'V'), - (0x3DC, 'M', u'ϝ'), - (0x3DD, 'V'), - (0x3DE, 'M', u'ϟ'), - (0x3DF, 'V'), - (0x3E0, 'M', u'ϡ'), - (0x3E1, 'V'), - (0x3E2, 'M', u'ϣ'), - (0x3E3, 'V'), - (0x3E4, 'M', u'ϥ'), - (0x3E5, 'V'), - (0x3E6, 'M', u'ϧ'), - (0x3E7, 'V'), - (0x3E8, 'M', u'ϩ'), - (0x3E9, 'V'), - (0x3EA, 'M', u'ϫ'), - (0x3EB, 'V'), - (0x3EC, 'M', u'ϭ'), - (0x3ED, 'V'), - (0x3EE, 'M', u'ϯ'), - (0x3EF, 'V'), - (0x3F0, 'M', u'κ'), - (0x3F1, 'M', u'ρ'), - (0x3F2, 'M', u'σ'), - (0x3F3, 'V'), - (0x3F4, 'M', u'θ'), - (0x3F5, 'M', u'ε'), - (0x3F6, 'V'), - (0x3F7, 'M', u'ϸ'), - (0x3F8, 'V'), - (0x3F9, 'M', u'σ'), - (0x3FA, 'M', u'ϻ'), - (0x3FB, 'V'), - (0x3FD, 'M', u'ͻ'), - (0x3FE, 'M', u'ͼ'), - (0x3FF, 'M', u'ͽ'), - (0x400, 'M', u'ѐ'), - (0x401, 'M', u'ё'), - (0x402, 'M', u'ђ'), - ] - -def _seg_7(): - return [ - (0x403, 'M', u'ѓ'), - (0x404, 'M', u'є'), - (0x405, 'M', u'ѕ'), - (0x406, 'M', u'і'), - (0x407, 'M', u'ї'), - (0x408, 'M', u'ј'), - (0x409, 'M', u'љ'), - (0x40A, 'M', u'њ'), - (0x40B, 'M', u'ћ'), - (0x40C, 'M', u'ќ'), - (0x40D, 'M', u'ѝ'), - (0x40E, 'M', u'ў'), - (0x40F, 'M', u'џ'), - (0x410, 'M', u'а'), - (0x411, 'M', u'б'), - (0x412, 'M', u'в'), - (0x413, 'M', u'г'), - (0x414, 'M', u'д'), - (0x415, 'M', u'е'), - (0x416, 'M', u'ж'), - (0x417, 'M', u'з'), - (0x418, 'M', u'и'), - (0x419, 'M', u'й'), - (0x41A, 'M', u'к'), - (0x41B, 'M', u'л'), - (0x41C, 'M', u'м'), - (0x41D, 'M', u'н'), - (0x41E, 'M', u'о'), - (0x41F, 'M', u'п'), - (0x420, 'M', u'р'), - (0x421, 'M', u'с'), - (0x422, 'M', u'т'), - (0x423, 'M', u'у'), - (0x424, 'M', u'ф'), - (0x425, 'M', u'х'), - (0x426, 'M', u'ц'), - (0x427, 'M', u'ч'), - (0x428, 'M', u'ш'), - (0x429, 'M', u'щ'), - (0x42A, 'M', u'ъ'), - (0x42B, 'M', u'ы'), - (0x42C, 'M', u'ь'), - (0x42D, 'M', u'э'), - (0x42E, 'M', u'ю'), - (0x42F, 'M', u'я'), - (0x430, 'V'), - (0x460, 'M', u'ѡ'), - (0x461, 'V'), - (0x462, 'M', u'ѣ'), - (0x463, 'V'), - (0x464, 'M', u'ѥ'), - (0x465, 'V'), - (0x466, 'M', u'ѧ'), - (0x467, 'V'), - (0x468, 'M', u'ѩ'), - (0x469, 'V'), - (0x46A, 'M', u'ѫ'), - (0x46B, 'V'), - (0x46C, 'M', u'ѭ'), - (0x46D, 'V'), - (0x46E, 'M', u'ѯ'), - (0x46F, 'V'), - (0x470, 'M', u'ѱ'), - (0x471, 'V'), - (0x472, 'M', u'ѳ'), - (0x473, 'V'), - (0x474, 'M', u'ѵ'), - (0x475, 'V'), - (0x476, 'M', u'ѷ'), - (0x477, 'V'), - (0x478, 'M', u'ѹ'), - (0x479, 'V'), - (0x47A, 'M', u'ѻ'), - (0x47B, 'V'), - (0x47C, 'M', u'ѽ'), - (0x47D, 'V'), - (0x47E, 'M', u'ѿ'), - (0x47F, 'V'), - (0x480, 'M', u'ҁ'), - (0x481, 'V'), - (0x48A, 'M', u'ҋ'), - (0x48B, 'V'), - (0x48C, 'M', u'ҍ'), - (0x48D, 'V'), - (0x48E, 'M', u'ҏ'), - (0x48F, 'V'), - (0x490, 'M', u'ґ'), - (0x491, 'V'), - (0x492, 'M', u'ғ'), - (0x493, 'V'), - (0x494, 'M', u'ҕ'), - (0x495, 'V'), - (0x496, 'M', u'җ'), - (0x497, 'V'), - (0x498, 'M', u'ҙ'), - (0x499, 'V'), - (0x49A, 'M', u'қ'), - (0x49B, 'V'), - (0x49C, 'M', u'ҝ'), - (0x49D, 'V'), - ] - -def _seg_8(): - return [ - (0x49E, 'M', u'ҟ'), - (0x49F, 'V'), - (0x4A0, 'M', u'ҡ'), - (0x4A1, 'V'), - (0x4A2, 'M', u'ң'), - (0x4A3, 'V'), - (0x4A4, 'M', u'ҥ'), - (0x4A5, 'V'), - (0x4A6, 'M', u'ҧ'), - (0x4A7, 'V'), - (0x4A8, 'M', u'ҩ'), - (0x4A9, 'V'), - (0x4AA, 'M', u'ҫ'), - (0x4AB, 'V'), - (0x4AC, 'M', u'ҭ'), - (0x4AD, 'V'), - (0x4AE, 'M', u'ү'), - (0x4AF, 'V'), - (0x4B0, 'M', u'ұ'), - (0x4B1, 'V'), - (0x4B2, 'M', u'ҳ'), - (0x4B3, 'V'), - (0x4B4, 'M', u'ҵ'), - (0x4B5, 'V'), - (0x4B6, 'M', u'ҷ'), - (0x4B7, 'V'), - (0x4B8, 'M', u'ҹ'), - (0x4B9, 'V'), - (0x4BA, 'M', u'һ'), - (0x4BB, 'V'), - (0x4BC, 'M', u'ҽ'), - (0x4BD, 'V'), - (0x4BE, 'M', u'ҿ'), - (0x4BF, 'V'), - (0x4C0, 'X'), - (0x4C1, 'M', u'ӂ'), - (0x4C2, 'V'), - (0x4C3, 'M', u'ӄ'), - (0x4C4, 'V'), - (0x4C5, 'M', u'ӆ'), - (0x4C6, 'V'), - (0x4C7, 'M', u'ӈ'), - (0x4C8, 'V'), - (0x4C9, 'M', u'ӊ'), - (0x4CA, 'V'), - (0x4CB, 'M', u'ӌ'), - (0x4CC, 'V'), - (0x4CD, 'M', u'ӎ'), - (0x4CE, 'V'), - (0x4D0, 'M', u'ӑ'), - (0x4D1, 'V'), - (0x4D2, 'M', u'ӓ'), - (0x4D3, 'V'), - (0x4D4, 'M', u'ӕ'), - (0x4D5, 'V'), - (0x4D6, 'M', u'ӗ'), - (0x4D7, 'V'), - (0x4D8, 'M', u'ә'), - (0x4D9, 'V'), - (0x4DA, 'M', u'ӛ'), - (0x4DB, 'V'), - (0x4DC, 'M', u'ӝ'), - (0x4DD, 'V'), - (0x4DE, 'M', u'ӟ'), - (0x4DF, 'V'), - (0x4E0, 'M', u'ӡ'), - (0x4E1, 'V'), - (0x4E2, 'M', u'ӣ'), - (0x4E3, 'V'), - (0x4E4, 'M', u'ӥ'), - (0x4E5, 'V'), - (0x4E6, 'M', u'ӧ'), - (0x4E7, 'V'), - (0x4E8, 'M', u'ө'), - (0x4E9, 'V'), - (0x4EA, 'M', u'ӫ'), - (0x4EB, 'V'), - (0x4EC, 'M', u'ӭ'), - (0x4ED, 'V'), - (0x4EE, 'M', u'ӯ'), - (0x4EF, 'V'), - (0x4F0, 'M', u'ӱ'), - (0x4F1, 'V'), - (0x4F2, 'M', u'ӳ'), - (0x4F3, 'V'), - (0x4F4, 'M', u'ӵ'), - (0x4F5, 'V'), - (0x4F6, 'M', u'ӷ'), - (0x4F7, 'V'), - (0x4F8, 'M', u'ӹ'), - (0x4F9, 'V'), - (0x4FA, 'M', u'ӻ'), - (0x4FB, 'V'), - (0x4FC, 'M', u'ӽ'), - (0x4FD, 'V'), - (0x4FE, 'M', u'ӿ'), - (0x4FF, 'V'), - (0x500, 'M', u'ԁ'), - (0x501, 'V'), - (0x502, 'M', u'ԃ'), - ] - -def _seg_9(): - return [ - (0x503, 'V'), - (0x504, 'M', u'ԅ'), - (0x505, 'V'), - (0x506, 'M', u'ԇ'), - (0x507, 'V'), - (0x508, 'M', u'ԉ'), - (0x509, 'V'), - (0x50A, 'M', u'ԋ'), - (0x50B, 'V'), - (0x50C, 'M', u'ԍ'), - (0x50D, 'V'), - (0x50E, 'M', u'ԏ'), - (0x50F, 'V'), - (0x510, 'M', u'ԑ'), - (0x511, 'V'), - (0x512, 'M', u'ԓ'), - (0x513, 'V'), - (0x514, 'M', u'ԕ'), - (0x515, 'V'), - (0x516, 'M', u'ԗ'), - (0x517, 'V'), - (0x518, 'M', u'ԙ'), - (0x519, 'V'), - (0x51A, 'M', u'ԛ'), - (0x51B, 'V'), - (0x51C, 'M', u'ԝ'), - (0x51D, 'V'), - (0x51E, 'M', u'ԟ'), - (0x51F, 'V'), - (0x520, 'M', u'ԡ'), - (0x521, 'V'), - (0x522, 'M', u'ԣ'), - (0x523, 'V'), - (0x524, 'M', u'ԥ'), - (0x525, 'V'), - (0x526, 'M', u'ԧ'), - (0x527, 'V'), - (0x528, 'M', u'ԩ'), - (0x529, 'V'), - (0x52A, 'M', u'ԫ'), - (0x52B, 'V'), - (0x52C, 'M', u'ԭ'), - (0x52D, 'V'), - (0x52E, 'M', u'ԯ'), - (0x52F, 'V'), - (0x530, 'X'), - (0x531, 'M', u'ա'), - (0x532, 'M', u'բ'), - (0x533, 'M', u'գ'), - (0x534, 'M', u'դ'), - (0x535, 'M', u'ե'), - (0x536, 'M', u'զ'), - (0x537, 'M', u'է'), - (0x538, 'M', u'ը'), - (0x539, 'M', u'թ'), - (0x53A, 'M', u'ժ'), - (0x53B, 'M', u'ի'), - (0x53C, 'M', u'լ'), - (0x53D, 'M', u'խ'), - (0x53E, 'M', u'ծ'), - (0x53F, 'M', u'կ'), - (0x540, 'M', u'հ'), - (0x541, 'M', u'ձ'), - (0x542, 'M', u'ղ'), - (0x543, 'M', u'ճ'), - (0x544, 'M', u'մ'), - (0x545, 'M', u'յ'), - (0x546, 'M', u'ն'), - (0x547, 'M', u'շ'), - (0x548, 'M', u'ո'), - (0x549, 'M', u'չ'), - (0x54A, 'M', u'պ'), - (0x54B, 'M', u'ջ'), - (0x54C, 'M', u'ռ'), - (0x54D, 'M', u'ս'), - (0x54E, 'M', u'վ'), - (0x54F, 'M', u'տ'), - (0x550, 'M', u'ր'), - (0x551, 'M', u'ց'), - (0x552, 'M', u'ւ'), - (0x553, 'M', u'փ'), - (0x554, 'M', u'ք'), - (0x555, 'M', u'օ'), - (0x556, 'M', u'ֆ'), - (0x557, 'X'), - (0x559, 'V'), - (0x587, 'M', u'եւ'), - (0x588, 'V'), - (0x58B, 'X'), - (0x58D, 'V'), - (0x590, 'X'), - (0x591, 'V'), - (0x5C8, 'X'), - (0x5D0, 'V'), - (0x5EB, 'X'), - (0x5EF, 'V'), - (0x5F5, 'X'), - (0x606, 'V'), - (0x61C, 'X'), - (0x61E, 'V'), - ] - -def _seg_10(): - return [ - (0x675, 'M', u'اٴ'), - (0x676, 'M', u'وٴ'), - (0x677, 'M', u'ۇٴ'), - (0x678, 'M', u'يٴ'), - (0x679, 'V'), - (0x6DD, 'X'), - (0x6DE, 'V'), - (0x70E, 'X'), - (0x710, 'V'), - (0x74B, 'X'), - (0x74D, 'V'), - (0x7B2, 'X'), - (0x7C0, 'V'), - (0x7FB, 'X'), - (0x7FD, 'V'), - (0x82E, 'X'), - (0x830, 'V'), - (0x83F, 'X'), - (0x840, 'V'), - (0x85C, 'X'), - (0x85E, 'V'), - (0x85F, 'X'), - (0x860, 'V'), - (0x86B, 'X'), - (0x8A0, 'V'), - (0x8B5, 'X'), - (0x8B6, 'V'), - (0x8BE, 'X'), - (0x8D3, 'V'), - (0x8E2, 'X'), - (0x8E3, 'V'), - (0x958, 'M', u'क़'), - (0x959, 'M', u'ख़'), - (0x95A, 'M', u'ग़'), - (0x95B, 'M', u'ज़'), - (0x95C, 'M', u'ड़'), - (0x95D, 'M', u'ढ़'), - (0x95E, 'M', u'फ़'), - (0x95F, 'M', u'य़'), - (0x960, 'V'), - (0x984, 'X'), - (0x985, 'V'), - (0x98D, 'X'), - (0x98F, 'V'), - (0x991, 'X'), - (0x993, 'V'), - (0x9A9, 'X'), - (0x9AA, 'V'), - (0x9B1, 'X'), - (0x9B2, 'V'), - (0x9B3, 'X'), - (0x9B6, 'V'), - (0x9BA, 'X'), - (0x9BC, 'V'), - (0x9C5, 'X'), - (0x9C7, 'V'), - (0x9C9, 'X'), - (0x9CB, 'V'), - (0x9CF, 'X'), - (0x9D7, 'V'), - (0x9D8, 'X'), - (0x9DC, 'M', u'ড়'), - (0x9DD, 'M', u'ঢ়'), - (0x9DE, 'X'), - (0x9DF, 'M', u'য়'), - (0x9E0, 'V'), - (0x9E4, 'X'), - (0x9E6, 'V'), - (0x9FF, 'X'), - (0xA01, 'V'), - (0xA04, 'X'), - (0xA05, 'V'), - (0xA0B, 'X'), - (0xA0F, 'V'), - (0xA11, 'X'), - (0xA13, 'V'), - (0xA29, 'X'), - (0xA2A, 'V'), - (0xA31, 'X'), - (0xA32, 'V'), - (0xA33, 'M', u'ਲ਼'), - (0xA34, 'X'), - (0xA35, 'V'), - (0xA36, 'M', u'ਸ਼'), - (0xA37, 'X'), - (0xA38, 'V'), - (0xA3A, 'X'), - (0xA3C, 'V'), - (0xA3D, 'X'), - (0xA3E, 'V'), - (0xA43, 'X'), - (0xA47, 'V'), - (0xA49, 'X'), - (0xA4B, 'V'), - (0xA4E, 'X'), - (0xA51, 'V'), - (0xA52, 'X'), - (0xA59, 'M', u'ਖ਼'), - (0xA5A, 'M', u'ਗ਼'), - (0xA5B, 'M', u'ਜ਼'), - ] - -def _seg_11(): - return [ - (0xA5C, 'V'), - (0xA5D, 'X'), - (0xA5E, 'M', u'ਫ਼'), - (0xA5F, 'X'), - (0xA66, 'V'), - (0xA77, 'X'), - (0xA81, 'V'), - (0xA84, 'X'), - (0xA85, 'V'), - (0xA8E, 'X'), - (0xA8F, 'V'), - (0xA92, 'X'), - (0xA93, 'V'), - (0xAA9, 'X'), - (0xAAA, 'V'), - (0xAB1, 'X'), - (0xAB2, 'V'), - (0xAB4, 'X'), - (0xAB5, 'V'), - (0xABA, 'X'), - (0xABC, 'V'), - (0xAC6, 'X'), - (0xAC7, 'V'), - (0xACA, 'X'), - (0xACB, 'V'), - (0xACE, 'X'), - (0xAD0, 'V'), - (0xAD1, 'X'), - (0xAE0, 'V'), - (0xAE4, 'X'), - (0xAE6, 'V'), - (0xAF2, 'X'), - (0xAF9, 'V'), - (0xB00, 'X'), - (0xB01, 'V'), - (0xB04, 'X'), - (0xB05, 'V'), - (0xB0D, 'X'), - (0xB0F, 'V'), - (0xB11, 'X'), - (0xB13, 'V'), - (0xB29, 'X'), - (0xB2A, 'V'), - (0xB31, 'X'), - (0xB32, 'V'), - (0xB34, 'X'), - (0xB35, 'V'), - (0xB3A, 'X'), - (0xB3C, 'V'), - (0xB45, 'X'), - (0xB47, 'V'), - (0xB49, 'X'), - (0xB4B, 'V'), - (0xB4E, 'X'), - (0xB56, 'V'), - (0xB58, 'X'), - (0xB5C, 'M', u'ଡ଼'), - (0xB5D, 'M', u'ଢ଼'), - (0xB5E, 'X'), - (0xB5F, 'V'), - (0xB64, 'X'), - (0xB66, 'V'), - (0xB78, 'X'), - (0xB82, 'V'), - (0xB84, 'X'), - (0xB85, 'V'), - (0xB8B, 'X'), - (0xB8E, 'V'), - (0xB91, 'X'), - (0xB92, 'V'), - (0xB96, 'X'), - (0xB99, 'V'), - (0xB9B, 'X'), - (0xB9C, 'V'), - (0xB9D, 'X'), - (0xB9E, 'V'), - (0xBA0, 'X'), - (0xBA3, 'V'), - (0xBA5, 'X'), - (0xBA8, 'V'), - (0xBAB, 'X'), - (0xBAE, 'V'), - (0xBBA, 'X'), - (0xBBE, 'V'), - (0xBC3, 'X'), - (0xBC6, 'V'), - (0xBC9, 'X'), - (0xBCA, 'V'), - (0xBCE, 'X'), - (0xBD0, 'V'), - (0xBD1, 'X'), - (0xBD7, 'V'), - (0xBD8, 'X'), - (0xBE6, 'V'), - (0xBFB, 'X'), - (0xC00, 'V'), - (0xC0D, 'X'), - (0xC0E, 'V'), - (0xC11, 'X'), - (0xC12, 'V'), - ] - -def _seg_12(): - return [ - (0xC29, 'X'), - (0xC2A, 'V'), - (0xC3A, 'X'), - (0xC3D, 'V'), - (0xC45, 'X'), - (0xC46, 'V'), - (0xC49, 'X'), - (0xC4A, 'V'), - (0xC4E, 'X'), - (0xC55, 'V'), - (0xC57, 'X'), - (0xC58, 'V'), - (0xC5B, 'X'), - (0xC60, 'V'), - (0xC64, 'X'), - (0xC66, 'V'), - (0xC70, 'X'), - (0xC78, 'V'), - (0xC8D, 'X'), - (0xC8E, 'V'), - (0xC91, 'X'), - (0xC92, 'V'), - (0xCA9, 'X'), - (0xCAA, 'V'), - (0xCB4, 'X'), - (0xCB5, 'V'), - (0xCBA, 'X'), - (0xCBC, 'V'), - (0xCC5, 'X'), - (0xCC6, 'V'), - (0xCC9, 'X'), - (0xCCA, 'V'), - (0xCCE, 'X'), - (0xCD5, 'V'), - (0xCD7, 'X'), - (0xCDE, 'V'), - (0xCDF, 'X'), - (0xCE0, 'V'), - (0xCE4, 'X'), - (0xCE6, 'V'), - (0xCF0, 'X'), - (0xCF1, 'V'), - (0xCF3, 'X'), - (0xD00, 'V'), - (0xD04, 'X'), - (0xD05, 'V'), - (0xD0D, 'X'), - (0xD0E, 'V'), - (0xD11, 'X'), - (0xD12, 'V'), - (0xD45, 'X'), - (0xD46, 'V'), - (0xD49, 'X'), - (0xD4A, 'V'), - (0xD50, 'X'), - (0xD54, 'V'), - (0xD64, 'X'), - (0xD66, 'V'), - (0xD80, 'X'), - (0xD82, 'V'), - (0xD84, 'X'), - (0xD85, 'V'), - (0xD97, 'X'), - (0xD9A, 'V'), - (0xDB2, 'X'), - (0xDB3, 'V'), - (0xDBC, 'X'), - (0xDBD, 'V'), - (0xDBE, 'X'), - (0xDC0, 'V'), - (0xDC7, 'X'), - (0xDCA, 'V'), - (0xDCB, 'X'), - (0xDCF, 'V'), - (0xDD5, 'X'), - (0xDD6, 'V'), - (0xDD7, 'X'), - (0xDD8, 'V'), - (0xDE0, 'X'), - (0xDE6, 'V'), - (0xDF0, 'X'), - (0xDF2, 'V'), - (0xDF5, 'X'), - (0xE01, 'V'), - (0xE33, 'M', u'ํา'), - (0xE34, 'V'), - (0xE3B, 'X'), - (0xE3F, 'V'), - (0xE5C, 'X'), - (0xE81, 'V'), - (0xE83, 'X'), - (0xE84, 'V'), - (0xE85, 'X'), - (0xE87, 'V'), - (0xE89, 'X'), - (0xE8A, 'V'), - (0xE8B, 'X'), - (0xE8D, 'V'), - (0xE8E, 'X'), - (0xE94, 'V'), - ] - -def _seg_13(): - return [ - (0xE98, 'X'), - (0xE99, 'V'), - (0xEA0, 'X'), - (0xEA1, 'V'), - (0xEA4, 'X'), - (0xEA5, 'V'), - (0xEA6, 'X'), - (0xEA7, 'V'), - (0xEA8, 'X'), - (0xEAA, 'V'), - (0xEAC, 'X'), - (0xEAD, 'V'), - (0xEB3, 'M', u'ໍາ'), - (0xEB4, 'V'), - (0xEBA, 'X'), - (0xEBB, 'V'), - (0xEBE, 'X'), - (0xEC0, 'V'), - (0xEC5, 'X'), - (0xEC6, 'V'), - (0xEC7, 'X'), - (0xEC8, 'V'), - (0xECE, 'X'), - (0xED0, 'V'), - (0xEDA, 'X'), - (0xEDC, 'M', u'ຫນ'), - (0xEDD, 'M', u'ຫມ'), - (0xEDE, 'V'), - (0xEE0, 'X'), - (0xF00, 'V'), - (0xF0C, 'M', u'་'), - (0xF0D, 'V'), - (0xF43, 'M', u'གྷ'), - (0xF44, 'V'), - (0xF48, 'X'), - (0xF49, 'V'), - (0xF4D, 'M', u'ཌྷ'), - (0xF4E, 'V'), - (0xF52, 'M', u'དྷ'), - (0xF53, 'V'), - (0xF57, 'M', u'བྷ'), - (0xF58, 'V'), - (0xF5C, 'M', u'ཛྷ'), - (0xF5D, 'V'), - (0xF69, 'M', u'ཀྵ'), - (0xF6A, 'V'), - (0xF6D, 'X'), - (0xF71, 'V'), - (0xF73, 'M', u'ཱི'), - (0xF74, 'V'), - (0xF75, 'M', u'ཱུ'), - (0xF76, 'M', u'ྲྀ'), - (0xF77, 'M', u'ྲཱྀ'), - (0xF78, 'M', u'ླྀ'), - (0xF79, 'M', u'ླཱྀ'), - (0xF7A, 'V'), - (0xF81, 'M', u'ཱྀ'), - (0xF82, 'V'), - (0xF93, 'M', u'ྒྷ'), - (0xF94, 'V'), - (0xF98, 'X'), - (0xF99, 'V'), - (0xF9D, 'M', u'ྜྷ'), - (0xF9E, 'V'), - (0xFA2, 'M', u'ྡྷ'), - (0xFA3, 'V'), - (0xFA7, 'M', u'ྦྷ'), - (0xFA8, 'V'), - (0xFAC, 'M', u'ྫྷ'), - (0xFAD, 'V'), - (0xFB9, 'M', u'ྐྵ'), - (0xFBA, 'V'), - (0xFBD, 'X'), - (0xFBE, 'V'), - (0xFCD, 'X'), - (0xFCE, 'V'), - (0xFDB, 'X'), - (0x1000, 'V'), - (0x10A0, 'X'), - (0x10C7, 'M', u'ⴧ'), - (0x10C8, 'X'), - (0x10CD, 'M', u'ⴭ'), - (0x10CE, 'X'), - (0x10D0, 'V'), - (0x10FC, 'M', u'ნ'), - (0x10FD, 'V'), - (0x115F, 'X'), - (0x1161, 'V'), - (0x1249, 'X'), - (0x124A, 'V'), - (0x124E, 'X'), - (0x1250, 'V'), - (0x1257, 'X'), - (0x1258, 'V'), - (0x1259, 'X'), - (0x125A, 'V'), - (0x125E, 'X'), - (0x1260, 'V'), - (0x1289, 'X'), - (0x128A, 'V'), - ] - -def _seg_14(): - return [ - (0x128E, 'X'), - (0x1290, 'V'), - (0x12B1, 'X'), - (0x12B2, 'V'), - (0x12B6, 'X'), - (0x12B8, 'V'), - (0x12BF, 'X'), - (0x12C0, 'V'), - (0x12C1, 'X'), - (0x12C2, 'V'), - (0x12C6, 'X'), - (0x12C8, 'V'), - (0x12D7, 'X'), - (0x12D8, 'V'), - (0x1311, 'X'), - (0x1312, 'V'), - (0x1316, 'X'), - (0x1318, 'V'), - (0x135B, 'X'), - (0x135D, 'V'), - (0x137D, 'X'), - (0x1380, 'V'), - (0x139A, 'X'), - (0x13A0, 'V'), - (0x13F6, 'X'), - (0x13F8, 'M', u'Ᏸ'), - (0x13F9, 'M', u'Ᏹ'), - (0x13FA, 'M', u'Ᏺ'), - (0x13FB, 'M', u'Ᏻ'), - (0x13FC, 'M', u'Ᏼ'), - (0x13FD, 'M', u'Ᏽ'), - (0x13FE, 'X'), - (0x1400, 'V'), - (0x1680, 'X'), - (0x1681, 'V'), - (0x169D, 'X'), - (0x16A0, 'V'), - (0x16F9, 'X'), - (0x1700, 'V'), - (0x170D, 'X'), - (0x170E, 'V'), - (0x1715, 'X'), - (0x1720, 'V'), - (0x1737, 'X'), - (0x1740, 'V'), - (0x1754, 'X'), - (0x1760, 'V'), - (0x176D, 'X'), - (0x176E, 'V'), - (0x1771, 'X'), - (0x1772, 'V'), - (0x1774, 'X'), - (0x1780, 'V'), - (0x17B4, 'X'), - (0x17B6, 'V'), - (0x17DE, 'X'), - (0x17E0, 'V'), - (0x17EA, 'X'), - (0x17F0, 'V'), - (0x17FA, 'X'), - (0x1800, 'V'), - (0x1806, 'X'), - (0x1807, 'V'), - (0x180B, 'I'), - (0x180E, 'X'), - (0x1810, 'V'), - (0x181A, 'X'), - (0x1820, 'V'), - (0x1879, 'X'), - (0x1880, 'V'), - (0x18AB, 'X'), - (0x18B0, 'V'), - (0x18F6, 'X'), - (0x1900, 'V'), - (0x191F, 'X'), - (0x1920, 'V'), - (0x192C, 'X'), - (0x1930, 'V'), - (0x193C, 'X'), - (0x1940, 'V'), - (0x1941, 'X'), - (0x1944, 'V'), - (0x196E, 'X'), - (0x1970, 'V'), - (0x1975, 'X'), - (0x1980, 'V'), - (0x19AC, 'X'), - (0x19B0, 'V'), - (0x19CA, 'X'), - (0x19D0, 'V'), - (0x19DB, 'X'), - (0x19DE, 'V'), - (0x1A1C, 'X'), - (0x1A1E, 'V'), - (0x1A5F, 'X'), - (0x1A60, 'V'), - (0x1A7D, 'X'), - (0x1A7F, 'V'), - (0x1A8A, 'X'), - (0x1A90, 'V'), - ] - -def _seg_15(): - return [ - (0x1A9A, 'X'), - (0x1AA0, 'V'), - (0x1AAE, 'X'), - (0x1AB0, 'V'), - (0x1ABF, 'X'), - (0x1B00, 'V'), - (0x1B4C, 'X'), - (0x1B50, 'V'), - (0x1B7D, 'X'), - (0x1B80, 'V'), - (0x1BF4, 'X'), - (0x1BFC, 'V'), - (0x1C38, 'X'), - (0x1C3B, 'V'), - (0x1C4A, 'X'), - (0x1C4D, 'V'), - (0x1C80, 'M', u'в'), - (0x1C81, 'M', u'д'), - (0x1C82, 'M', u'о'), - (0x1C83, 'M', u'с'), - (0x1C84, 'M', u'т'), - (0x1C86, 'M', u'ъ'), - (0x1C87, 'M', u'ѣ'), - (0x1C88, 'M', u'ꙋ'), - (0x1C89, 'X'), - (0x1CC0, 'V'), - (0x1CC8, 'X'), - (0x1CD0, 'V'), - (0x1CFA, 'X'), - (0x1D00, 'V'), - (0x1D2C, 'M', u'a'), - (0x1D2D, 'M', u'æ'), - (0x1D2E, 'M', u'b'), - (0x1D2F, 'V'), - (0x1D30, 'M', u'd'), - (0x1D31, 'M', u'e'), - (0x1D32, 'M', u'ǝ'), - (0x1D33, 'M', u'g'), - (0x1D34, 'M', u'h'), - (0x1D35, 'M', u'i'), - (0x1D36, 'M', u'j'), - (0x1D37, 'M', u'k'), - (0x1D38, 'M', u'l'), - (0x1D39, 'M', u'm'), - (0x1D3A, 'M', u'n'), - (0x1D3B, 'V'), - (0x1D3C, 'M', u'o'), - (0x1D3D, 'M', u'ȣ'), - (0x1D3E, 'M', u'p'), - (0x1D3F, 'M', u'r'), - (0x1D40, 'M', u't'), - (0x1D41, 'M', u'u'), - (0x1D42, 'M', u'w'), - (0x1D43, 'M', u'a'), - (0x1D44, 'M', u'ɐ'), - (0x1D45, 'M', u'ɑ'), - (0x1D46, 'M', u'ᴂ'), - (0x1D47, 'M', u'b'), - (0x1D48, 'M', u'd'), - (0x1D49, 'M', u'e'), - (0x1D4A, 'M', u'ə'), - (0x1D4B, 'M', u'ɛ'), - (0x1D4C, 'M', u'ɜ'), - (0x1D4D, 'M', u'g'), - (0x1D4E, 'V'), - (0x1D4F, 'M', u'k'), - (0x1D50, 'M', u'm'), - (0x1D51, 'M', u'ŋ'), - (0x1D52, 'M', u'o'), - (0x1D53, 'M', u'ɔ'), - (0x1D54, 'M', u'ᴖ'), - (0x1D55, 'M', u'ᴗ'), - (0x1D56, 'M', u'p'), - (0x1D57, 'M', u't'), - (0x1D58, 'M', u'u'), - (0x1D59, 'M', u'ᴝ'), - (0x1D5A, 'M', u'ɯ'), - (0x1D5B, 'M', u'v'), - (0x1D5C, 'M', u'ᴥ'), - (0x1D5D, 'M', u'β'), - (0x1D5E, 'M', u'γ'), - (0x1D5F, 'M', u'δ'), - (0x1D60, 'M', u'φ'), - (0x1D61, 'M', u'χ'), - (0x1D62, 'M', u'i'), - (0x1D63, 'M', u'r'), - (0x1D64, 'M', u'u'), - (0x1D65, 'M', u'v'), - (0x1D66, 'M', u'β'), - (0x1D67, 'M', u'γ'), - (0x1D68, 'M', u'ρ'), - (0x1D69, 'M', u'φ'), - (0x1D6A, 'M', u'χ'), - (0x1D6B, 'V'), - (0x1D78, 'M', u'н'), - (0x1D79, 'V'), - (0x1D9B, 'M', u'ɒ'), - (0x1D9C, 'M', u'c'), - (0x1D9D, 'M', u'ɕ'), - (0x1D9E, 'M', u'ð'), - ] - -def _seg_16(): - return [ - (0x1D9F, 'M', u'ɜ'), - (0x1DA0, 'M', u'f'), - (0x1DA1, 'M', u'ɟ'), - (0x1DA2, 'M', u'ɡ'), - (0x1DA3, 'M', u'ɥ'), - (0x1DA4, 'M', u'ɨ'), - (0x1DA5, 'M', u'ɩ'), - (0x1DA6, 'M', u'ɪ'), - (0x1DA7, 'M', u'ᵻ'), - (0x1DA8, 'M', u'ʝ'), - (0x1DA9, 'M', u'ɭ'), - (0x1DAA, 'M', u'ᶅ'), - (0x1DAB, 'M', u'ʟ'), - (0x1DAC, 'M', u'ɱ'), - (0x1DAD, 'M', u'ɰ'), - (0x1DAE, 'M', u'ɲ'), - (0x1DAF, 'M', u'ɳ'), - (0x1DB0, 'M', u'ɴ'), - (0x1DB1, 'M', u'ɵ'), - (0x1DB2, 'M', u'ɸ'), - (0x1DB3, 'M', u'ʂ'), - (0x1DB4, 'M', u'ʃ'), - (0x1DB5, 'M', u'ƫ'), - (0x1DB6, 'M', u'ʉ'), - (0x1DB7, 'M', u'ʊ'), - (0x1DB8, 'M', u'ᴜ'), - (0x1DB9, 'M', u'ʋ'), - (0x1DBA, 'M', u'ʌ'), - (0x1DBB, 'M', u'z'), - (0x1DBC, 'M', u'ʐ'), - (0x1DBD, 'M', u'ʑ'), - (0x1DBE, 'M', u'ʒ'), - (0x1DBF, 'M', u'θ'), - (0x1DC0, 'V'), - (0x1DFA, 'X'), - (0x1DFB, 'V'), - (0x1E00, 'M', u'ḁ'), - (0x1E01, 'V'), - (0x1E02, 'M', u'ḃ'), - (0x1E03, 'V'), - (0x1E04, 'M', u'ḅ'), - (0x1E05, 'V'), - (0x1E06, 'M', u'ḇ'), - (0x1E07, 'V'), - (0x1E08, 'M', u'ḉ'), - (0x1E09, 'V'), - (0x1E0A, 'M', u'ḋ'), - (0x1E0B, 'V'), - (0x1E0C, 'M', u'ḍ'), - (0x1E0D, 'V'), - (0x1E0E, 'M', u'ḏ'), - (0x1E0F, 'V'), - (0x1E10, 'M', u'ḑ'), - (0x1E11, 'V'), - (0x1E12, 'M', u'ḓ'), - (0x1E13, 'V'), - (0x1E14, 'M', u'ḕ'), - (0x1E15, 'V'), - (0x1E16, 'M', u'ḗ'), - (0x1E17, 'V'), - (0x1E18, 'M', u'ḙ'), - (0x1E19, 'V'), - (0x1E1A, 'M', u'ḛ'), - (0x1E1B, 'V'), - (0x1E1C, 'M', u'ḝ'), - (0x1E1D, 'V'), - (0x1E1E, 'M', u'ḟ'), - (0x1E1F, 'V'), - (0x1E20, 'M', u'ḡ'), - (0x1E21, 'V'), - (0x1E22, 'M', u'ḣ'), - (0x1E23, 'V'), - (0x1E24, 'M', u'ḥ'), - (0x1E25, 'V'), - (0x1E26, 'M', u'ḧ'), - (0x1E27, 'V'), - (0x1E28, 'M', u'ḩ'), - (0x1E29, 'V'), - (0x1E2A, 'M', u'ḫ'), - (0x1E2B, 'V'), - (0x1E2C, 'M', u'ḭ'), - (0x1E2D, 'V'), - (0x1E2E, 'M', u'ḯ'), - (0x1E2F, 'V'), - (0x1E30, 'M', u'ḱ'), - (0x1E31, 'V'), - (0x1E32, 'M', u'ḳ'), - (0x1E33, 'V'), - (0x1E34, 'M', u'ḵ'), - (0x1E35, 'V'), - (0x1E36, 'M', u'ḷ'), - (0x1E37, 'V'), - (0x1E38, 'M', u'ḹ'), - (0x1E39, 'V'), - (0x1E3A, 'M', u'ḻ'), - (0x1E3B, 'V'), - (0x1E3C, 'M', u'ḽ'), - (0x1E3D, 'V'), - (0x1E3E, 'M', u'ḿ'), - (0x1E3F, 'V'), - ] - -def _seg_17(): - return [ - (0x1E40, 'M', u'ṁ'), - (0x1E41, 'V'), - (0x1E42, 'M', u'ṃ'), - (0x1E43, 'V'), - (0x1E44, 'M', u'ṅ'), - (0x1E45, 'V'), - (0x1E46, 'M', u'ṇ'), - (0x1E47, 'V'), - (0x1E48, 'M', u'ṉ'), - (0x1E49, 'V'), - (0x1E4A, 'M', u'ṋ'), - (0x1E4B, 'V'), - (0x1E4C, 'M', u'ṍ'), - (0x1E4D, 'V'), - (0x1E4E, 'M', u'ṏ'), - (0x1E4F, 'V'), - (0x1E50, 'M', u'ṑ'), - (0x1E51, 'V'), - (0x1E52, 'M', u'ṓ'), - (0x1E53, 'V'), - (0x1E54, 'M', u'ṕ'), - (0x1E55, 'V'), - (0x1E56, 'M', u'ṗ'), - (0x1E57, 'V'), - (0x1E58, 'M', u'ṙ'), - (0x1E59, 'V'), - (0x1E5A, 'M', u'ṛ'), - (0x1E5B, 'V'), - (0x1E5C, 'M', u'ṝ'), - (0x1E5D, 'V'), - (0x1E5E, 'M', u'ṟ'), - (0x1E5F, 'V'), - (0x1E60, 'M', u'ṡ'), - (0x1E61, 'V'), - (0x1E62, 'M', u'ṣ'), - (0x1E63, 'V'), - (0x1E64, 'M', u'ṥ'), - (0x1E65, 'V'), - (0x1E66, 'M', u'ṧ'), - (0x1E67, 'V'), - (0x1E68, 'M', u'ṩ'), - (0x1E69, 'V'), - (0x1E6A, 'M', u'ṫ'), - (0x1E6B, 'V'), - (0x1E6C, 'M', u'ṭ'), - (0x1E6D, 'V'), - (0x1E6E, 'M', u'ṯ'), - (0x1E6F, 'V'), - (0x1E70, 'M', u'ṱ'), - (0x1E71, 'V'), - (0x1E72, 'M', u'ṳ'), - (0x1E73, 'V'), - (0x1E74, 'M', u'ṵ'), - (0x1E75, 'V'), - (0x1E76, 'M', u'ṷ'), - (0x1E77, 'V'), - (0x1E78, 'M', u'ṹ'), - (0x1E79, 'V'), - (0x1E7A, 'M', u'ṻ'), - (0x1E7B, 'V'), - (0x1E7C, 'M', u'ṽ'), - (0x1E7D, 'V'), - (0x1E7E, 'M', u'ṿ'), - (0x1E7F, 'V'), - (0x1E80, 'M', u'ẁ'), - (0x1E81, 'V'), - (0x1E82, 'M', u'ẃ'), - (0x1E83, 'V'), - (0x1E84, 'M', u'ẅ'), - (0x1E85, 'V'), - (0x1E86, 'M', u'ẇ'), - (0x1E87, 'V'), - (0x1E88, 'M', u'ẉ'), - (0x1E89, 'V'), - (0x1E8A, 'M', u'ẋ'), - (0x1E8B, 'V'), - (0x1E8C, 'M', u'ẍ'), - (0x1E8D, 'V'), - (0x1E8E, 'M', u'ẏ'), - (0x1E8F, 'V'), - (0x1E90, 'M', u'ẑ'), - (0x1E91, 'V'), - (0x1E92, 'M', u'ẓ'), - (0x1E93, 'V'), - (0x1E94, 'M', u'ẕ'), - (0x1E95, 'V'), - (0x1E9A, 'M', u'aʾ'), - (0x1E9B, 'M', u'ṡ'), - (0x1E9C, 'V'), - (0x1E9E, 'M', u'ss'), - (0x1E9F, 'V'), - (0x1EA0, 'M', u'ạ'), - (0x1EA1, 'V'), - (0x1EA2, 'M', u'ả'), - (0x1EA3, 'V'), - (0x1EA4, 'M', u'ấ'), - (0x1EA5, 'V'), - (0x1EA6, 'M', u'ầ'), - (0x1EA7, 'V'), - (0x1EA8, 'M', u'ẩ'), - ] - -def _seg_18(): - return [ - (0x1EA9, 'V'), - (0x1EAA, 'M', u'ẫ'), - (0x1EAB, 'V'), - (0x1EAC, 'M', u'ậ'), - (0x1EAD, 'V'), - (0x1EAE, 'M', u'ắ'), - (0x1EAF, 'V'), - (0x1EB0, 'M', u'ằ'), - (0x1EB1, 'V'), - (0x1EB2, 'M', u'ẳ'), - (0x1EB3, 'V'), - (0x1EB4, 'M', u'ẵ'), - (0x1EB5, 'V'), - (0x1EB6, 'M', u'ặ'), - (0x1EB7, 'V'), - (0x1EB8, 'M', u'ẹ'), - (0x1EB9, 'V'), - (0x1EBA, 'M', u'ẻ'), - (0x1EBB, 'V'), - (0x1EBC, 'M', u'ẽ'), - (0x1EBD, 'V'), - (0x1EBE, 'M', u'ế'), - (0x1EBF, 'V'), - (0x1EC0, 'M', u'ề'), - (0x1EC1, 'V'), - (0x1EC2, 'M', u'ể'), - (0x1EC3, 'V'), - (0x1EC4, 'M', u'ễ'), - (0x1EC5, 'V'), - (0x1EC6, 'M', u'ệ'), - (0x1EC7, 'V'), - (0x1EC8, 'M', u'ỉ'), - (0x1EC9, 'V'), - (0x1ECA, 'M', u'ị'), - (0x1ECB, 'V'), - (0x1ECC, 'M', u'ọ'), - (0x1ECD, 'V'), - (0x1ECE, 'M', u'ỏ'), - (0x1ECF, 'V'), - (0x1ED0, 'M', u'ố'), - (0x1ED1, 'V'), - (0x1ED2, 'M', u'ồ'), - (0x1ED3, 'V'), - (0x1ED4, 'M', u'ổ'), - (0x1ED5, 'V'), - (0x1ED6, 'M', u'ỗ'), - (0x1ED7, 'V'), - (0x1ED8, 'M', u'ộ'), - (0x1ED9, 'V'), - (0x1EDA, 'M', u'ớ'), - (0x1EDB, 'V'), - (0x1EDC, 'M', u'ờ'), - (0x1EDD, 'V'), - (0x1EDE, 'M', u'ở'), - (0x1EDF, 'V'), - (0x1EE0, 'M', u'ỡ'), - (0x1EE1, 'V'), - (0x1EE2, 'M', u'ợ'), - (0x1EE3, 'V'), - (0x1EE4, 'M', u'ụ'), - (0x1EE5, 'V'), - (0x1EE6, 'M', u'ủ'), - (0x1EE7, 'V'), - (0x1EE8, 'M', u'ứ'), - (0x1EE9, 'V'), - (0x1EEA, 'M', u'ừ'), - (0x1EEB, 'V'), - (0x1EEC, 'M', u'ử'), - (0x1EED, 'V'), - (0x1EEE, 'M', u'ữ'), - (0x1EEF, 'V'), - (0x1EF0, 'M', u'ự'), - (0x1EF1, 'V'), - (0x1EF2, 'M', u'ỳ'), - (0x1EF3, 'V'), - (0x1EF4, 'M', u'ỵ'), - (0x1EF5, 'V'), - (0x1EF6, 'M', u'ỷ'), - (0x1EF7, 'V'), - (0x1EF8, 'M', u'ỹ'), - (0x1EF9, 'V'), - (0x1EFA, 'M', u'ỻ'), - (0x1EFB, 'V'), - (0x1EFC, 'M', u'ỽ'), - (0x1EFD, 'V'), - (0x1EFE, 'M', u'ỿ'), - (0x1EFF, 'V'), - (0x1F08, 'M', u'ἀ'), - (0x1F09, 'M', u'ἁ'), - (0x1F0A, 'M', u'ἂ'), - (0x1F0B, 'M', u'ἃ'), - (0x1F0C, 'M', u'ἄ'), - (0x1F0D, 'M', u'ἅ'), - (0x1F0E, 'M', u'ἆ'), - (0x1F0F, 'M', u'ἇ'), - (0x1F10, 'V'), - (0x1F16, 'X'), - (0x1F18, 'M', u'ἐ'), - (0x1F19, 'M', u'ἑ'), - (0x1F1A, 'M', u'ἒ'), - ] - -def _seg_19(): - return [ - (0x1F1B, 'M', u'ἓ'), - (0x1F1C, 'M', u'ἔ'), - (0x1F1D, 'M', u'ἕ'), - (0x1F1E, 'X'), - (0x1F20, 'V'), - (0x1F28, 'M', u'ἠ'), - (0x1F29, 'M', u'ἡ'), - (0x1F2A, 'M', u'ἢ'), - (0x1F2B, 'M', u'ἣ'), - (0x1F2C, 'M', u'ἤ'), - (0x1F2D, 'M', u'ἥ'), - (0x1F2E, 'M', u'ἦ'), - (0x1F2F, 'M', u'ἧ'), - (0x1F30, 'V'), - (0x1F38, 'M', u'ἰ'), - (0x1F39, 'M', u'ἱ'), - (0x1F3A, 'M', u'ἲ'), - (0x1F3B, 'M', u'ἳ'), - (0x1F3C, 'M', u'ἴ'), - (0x1F3D, 'M', u'ἵ'), - (0x1F3E, 'M', u'ἶ'), - (0x1F3F, 'M', u'ἷ'), - (0x1F40, 'V'), - (0x1F46, 'X'), - (0x1F48, 'M', u'ὀ'), - (0x1F49, 'M', u'ὁ'), - (0x1F4A, 'M', u'ὂ'), - (0x1F4B, 'M', u'ὃ'), - (0x1F4C, 'M', u'ὄ'), - (0x1F4D, 'M', u'ὅ'), - (0x1F4E, 'X'), - (0x1F50, 'V'), - (0x1F58, 'X'), - (0x1F59, 'M', u'ὑ'), - (0x1F5A, 'X'), - (0x1F5B, 'M', u'ὓ'), - (0x1F5C, 'X'), - (0x1F5D, 'M', u'ὕ'), - (0x1F5E, 'X'), - (0x1F5F, 'M', u'ὗ'), - (0x1F60, 'V'), - (0x1F68, 'M', u'ὠ'), - (0x1F69, 'M', u'ὡ'), - (0x1F6A, 'M', u'ὢ'), - (0x1F6B, 'M', u'ὣ'), - (0x1F6C, 'M', u'ὤ'), - (0x1F6D, 'M', u'ὥ'), - (0x1F6E, 'M', u'ὦ'), - (0x1F6F, 'M', u'ὧ'), - (0x1F70, 'V'), - (0x1F71, 'M', u'ά'), - (0x1F72, 'V'), - (0x1F73, 'M', u'έ'), - (0x1F74, 'V'), - (0x1F75, 'M', u'ή'), - (0x1F76, 'V'), - (0x1F77, 'M', u'ί'), - (0x1F78, 'V'), - (0x1F79, 'M', u'ό'), - (0x1F7A, 'V'), - (0x1F7B, 'M', u'ύ'), - (0x1F7C, 'V'), - (0x1F7D, 'M', u'ώ'), - (0x1F7E, 'X'), - (0x1F80, 'M', u'ἀι'), - (0x1F81, 'M', u'ἁι'), - (0x1F82, 'M', u'ἂι'), - (0x1F83, 'M', u'ἃι'), - (0x1F84, 'M', u'ἄι'), - (0x1F85, 'M', u'ἅι'), - (0x1F86, 'M', u'ἆι'), - (0x1F87, 'M', u'ἇι'), - (0x1F88, 'M', u'ἀι'), - (0x1F89, 'M', u'ἁι'), - (0x1F8A, 'M', u'ἂι'), - (0x1F8B, 'M', u'ἃι'), - (0x1F8C, 'M', u'ἄι'), - (0x1F8D, 'M', u'ἅι'), - (0x1F8E, 'M', u'ἆι'), - (0x1F8F, 'M', u'ἇι'), - (0x1F90, 'M', u'ἠι'), - (0x1F91, 'M', u'ἡι'), - (0x1F92, 'M', u'ἢι'), - (0x1F93, 'M', u'ἣι'), - (0x1F94, 'M', u'ἤι'), - (0x1F95, 'M', u'ἥι'), - (0x1F96, 'M', u'ἦι'), - (0x1F97, 'M', u'ἧι'), - (0x1F98, 'M', u'ἠι'), - (0x1F99, 'M', u'ἡι'), - (0x1F9A, 'M', u'ἢι'), - (0x1F9B, 'M', u'ἣι'), - (0x1F9C, 'M', u'ἤι'), - (0x1F9D, 'M', u'ἥι'), - (0x1F9E, 'M', u'ἦι'), - (0x1F9F, 'M', u'ἧι'), - (0x1FA0, 'M', u'ὠι'), - (0x1FA1, 'M', u'ὡι'), - (0x1FA2, 'M', u'ὢι'), - (0x1FA3, 'M', u'ὣι'), - ] - -def _seg_20(): - return [ - (0x1FA4, 'M', u'ὤι'), - (0x1FA5, 'M', u'ὥι'), - (0x1FA6, 'M', u'ὦι'), - (0x1FA7, 'M', u'ὧι'), - (0x1FA8, 'M', u'ὠι'), - (0x1FA9, 'M', u'ὡι'), - (0x1FAA, 'M', u'ὢι'), - (0x1FAB, 'M', u'ὣι'), - (0x1FAC, 'M', u'ὤι'), - (0x1FAD, 'M', u'ὥι'), - (0x1FAE, 'M', u'ὦι'), - (0x1FAF, 'M', u'ὧι'), - (0x1FB0, 'V'), - (0x1FB2, 'M', u'ὰι'), - (0x1FB3, 'M', u'αι'), - (0x1FB4, 'M', u'άι'), - (0x1FB5, 'X'), - (0x1FB6, 'V'), - (0x1FB7, 'M', u'ᾶι'), - (0x1FB8, 'M', u'ᾰ'), - (0x1FB9, 'M', u'ᾱ'), - (0x1FBA, 'M', u'ὰ'), - (0x1FBB, 'M', u'ά'), - (0x1FBC, 'M', u'αι'), - (0x1FBD, '3', u' ̓'), - (0x1FBE, 'M', u'ι'), - (0x1FBF, '3', u' ̓'), - (0x1FC0, '3', u' ͂'), - (0x1FC1, '3', u' ̈͂'), - (0x1FC2, 'M', u'ὴι'), - (0x1FC3, 'M', u'ηι'), - (0x1FC4, 'M', u'ήι'), - (0x1FC5, 'X'), - (0x1FC6, 'V'), - (0x1FC7, 'M', u'ῆι'), - (0x1FC8, 'M', u'ὲ'), - (0x1FC9, 'M', u'έ'), - (0x1FCA, 'M', u'ὴ'), - (0x1FCB, 'M', u'ή'), - (0x1FCC, 'M', u'ηι'), - (0x1FCD, '3', u' ̓̀'), - (0x1FCE, '3', u' ̓́'), - (0x1FCF, '3', u' ̓͂'), - (0x1FD0, 'V'), - (0x1FD3, 'M', u'ΐ'), - (0x1FD4, 'X'), - (0x1FD6, 'V'), - (0x1FD8, 'M', u'ῐ'), - (0x1FD9, 'M', u'ῑ'), - (0x1FDA, 'M', u'ὶ'), - (0x1FDB, 'M', u'ί'), - (0x1FDC, 'X'), - (0x1FDD, '3', u' ̔̀'), - (0x1FDE, '3', u' ̔́'), - (0x1FDF, '3', u' ̔͂'), - (0x1FE0, 'V'), - (0x1FE3, 'M', u'ΰ'), - (0x1FE4, 'V'), - (0x1FE8, 'M', u'ῠ'), - (0x1FE9, 'M', u'ῡ'), - (0x1FEA, 'M', u'ὺ'), - (0x1FEB, 'M', u'ύ'), - (0x1FEC, 'M', u'ῥ'), - (0x1FED, '3', u' ̈̀'), - (0x1FEE, '3', u' ̈́'), - (0x1FEF, '3', u'`'), - (0x1FF0, 'X'), - (0x1FF2, 'M', u'ὼι'), - (0x1FF3, 'M', u'ωι'), - (0x1FF4, 'M', u'ώι'), - (0x1FF5, 'X'), - (0x1FF6, 'V'), - (0x1FF7, 'M', u'ῶι'), - (0x1FF8, 'M', u'ὸ'), - (0x1FF9, 'M', u'ό'), - (0x1FFA, 'M', u'ὼ'), - (0x1FFB, 'M', u'ώ'), - (0x1FFC, 'M', u'ωι'), - (0x1FFD, '3', u' ́'), - (0x1FFE, '3', u' ̔'), - (0x1FFF, 'X'), - (0x2000, '3', u' '), - (0x200B, 'I'), - (0x200C, 'D', u''), - (0x200E, 'X'), - (0x2010, 'V'), - (0x2011, 'M', u'‐'), - (0x2012, 'V'), - (0x2017, '3', u' ̳'), - (0x2018, 'V'), - (0x2024, 'X'), - (0x2027, 'V'), - (0x2028, 'X'), - (0x202F, '3', u' '), - (0x2030, 'V'), - (0x2033, 'M', u'′′'), - (0x2034, 'M', u'′′′'), - (0x2035, 'V'), - (0x2036, 'M', u'‵‵'), - (0x2037, 'M', u'‵‵‵'), - ] - -def _seg_21(): - return [ - (0x2038, 'V'), - (0x203C, '3', u'!!'), - (0x203D, 'V'), - (0x203E, '3', u' ̅'), - (0x203F, 'V'), - (0x2047, '3', u'??'), - (0x2048, '3', u'?!'), - (0x2049, '3', u'!?'), - (0x204A, 'V'), - (0x2057, 'M', u'′′′′'), - (0x2058, 'V'), - (0x205F, '3', u' '), - (0x2060, 'I'), - (0x2061, 'X'), - (0x2064, 'I'), - (0x2065, 'X'), - (0x2070, 'M', u'0'), - (0x2071, 'M', u'i'), - (0x2072, 'X'), - (0x2074, 'M', u'4'), - (0x2075, 'M', u'5'), - (0x2076, 'M', u'6'), - (0x2077, 'M', u'7'), - (0x2078, 'M', u'8'), - (0x2079, 'M', u'9'), - (0x207A, '3', u'+'), - (0x207B, 'M', u'−'), - (0x207C, '3', u'='), - (0x207D, '3', u'('), - (0x207E, '3', u')'), - (0x207F, 'M', u'n'), - (0x2080, 'M', u'0'), - (0x2081, 'M', u'1'), - (0x2082, 'M', u'2'), - (0x2083, 'M', u'3'), - (0x2084, 'M', u'4'), - (0x2085, 'M', u'5'), - (0x2086, 'M', u'6'), - (0x2087, 'M', u'7'), - (0x2088, 'M', u'8'), - (0x2089, 'M', u'9'), - (0x208A, '3', u'+'), - (0x208B, 'M', u'−'), - (0x208C, '3', u'='), - (0x208D, '3', u'('), - (0x208E, '3', u')'), - (0x208F, 'X'), - (0x2090, 'M', u'a'), - (0x2091, 'M', u'e'), - (0x2092, 'M', u'o'), - (0x2093, 'M', u'x'), - (0x2094, 'M', u'ə'), - (0x2095, 'M', u'h'), - (0x2096, 'M', u'k'), - (0x2097, 'M', u'l'), - (0x2098, 'M', u'm'), - (0x2099, 'M', u'n'), - (0x209A, 'M', u'p'), - (0x209B, 'M', u's'), - (0x209C, 'M', u't'), - (0x209D, 'X'), - (0x20A0, 'V'), - (0x20A8, 'M', u'rs'), - (0x20A9, 'V'), - (0x20C0, 'X'), - (0x20D0, 'V'), - (0x20F1, 'X'), - (0x2100, '3', u'a/c'), - (0x2101, '3', u'a/s'), - (0x2102, 'M', u'c'), - (0x2103, 'M', u'°c'), - (0x2104, 'V'), - (0x2105, '3', u'c/o'), - (0x2106, '3', u'c/u'), - (0x2107, 'M', u'ɛ'), - (0x2108, 'V'), - (0x2109, 'M', u'°f'), - (0x210A, 'M', u'g'), - (0x210B, 'M', u'h'), - (0x210F, 'M', u'ħ'), - (0x2110, 'M', u'i'), - (0x2112, 'M', u'l'), - (0x2114, 'V'), - (0x2115, 'M', u'n'), - (0x2116, 'M', u'no'), - (0x2117, 'V'), - (0x2119, 'M', u'p'), - (0x211A, 'M', u'q'), - (0x211B, 'M', u'r'), - (0x211E, 'V'), - (0x2120, 'M', u'sm'), - (0x2121, 'M', u'tel'), - (0x2122, 'M', u'tm'), - (0x2123, 'V'), - (0x2124, 'M', u'z'), - (0x2125, 'V'), - (0x2126, 'M', u'ω'), - (0x2127, 'V'), - (0x2128, 'M', u'z'), - (0x2129, 'V'), - ] - -def _seg_22(): - return [ - (0x212A, 'M', u'k'), - (0x212B, 'M', u'å'), - (0x212C, 'M', u'b'), - (0x212D, 'M', u'c'), - (0x212E, 'V'), - (0x212F, 'M', u'e'), - (0x2131, 'M', u'f'), - (0x2132, 'X'), - (0x2133, 'M', u'm'), - (0x2134, 'M', u'o'), - (0x2135, 'M', u'א'), - (0x2136, 'M', u'ב'), - (0x2137, 'M', u'ג'), - (0x2138, 'M', u'ד'), - (0x2139, 'M', u'i'), - (0x213A, 'V'), - (0x213B, 'M', u'fax'), - (0x213C, 'M', u'π'), - (0x213D, 'M', u'γ'), - (0x213F, 'M', u'π'), - (0x2140, 'M', u'∑'), - (0x2141, 'V'), - (0x2145, 'M', u'd'), - (0x2147, 'M', u'e'), - (0x2148, 'M', u'i'), - (0x2149, 'M', u'j'), - (0x214A, 'V'), - (0x2150, 'M', u'1⁄7'), - (0x2151, 'M', u'1⁄9'), - (0x2152, 'M', u'1⁄10'), - (0x2153, 'M', u'1⁄3'), - (0x2154, 'M', u'2⁄3'), - (0x2155, 'M', u'1⁄5'), - (0x2156, 'M', u'2⁄5'), - (0x2157, 'M', u'3⁄5'), - (0x2158, 'M', u'4⁄5'), - (0x2159, 'M', u'1⁄6'), - (0x215A, 'M', u'5⁄6'), - (0x215B, 'M', u'1⁄8'), - (0x215C, 'M', u'3⁄8'), - (0x215D, 'M', u'5⁄8'), - (0x215E, 'M', u'7⁄8'), - (0x215F, 'M', u'1⁄'), - (0x2160, 'M', u'i'), - (0x2161, 'M', u'ii'), - (0x2162, 'M', u'iii'), - (0x2163, 'M', u'iv'), - (0x2164, 'M', u'v'), - (0x2165, 'M', u'vi'), - (0x2166, 'M', u'vii'), - (0x2167, 'M', u'viii'), - (0x2168, 'M', u'ix'), - (0x2169, 'M', u'x'), - (0x216A, 'M', u'xi'), - (0x216B, 'M', u'xii'), - (0x216C, 'M', u'l'), - (0x216D, 'M', u'c'), - (0x216E, 'M', u'd'), - (0x216F, 'M', u'm'), - (0x2170, 'M', u'i'), - (0x2171, 'M', u'ii'), - (0x2172, 'M', u'iii'), - (0x2173, 'M', u'iv'), - (0x2174, 'M', u'v'), - (0x2175, 'M', u'vi'), - (0x2176, 'M', u'vii'), - (0x2177, 'M', u'viii'), - (0x2178, 'M', u'ix'), - (0x2179, 'M', u'x'), - (0x217A, 'M', u'xi'), - (0x217B, 'M', u'xii'), - (0x217C, 'M', u'l'), - (0x217D, 'M', u'c'), - (0x217E, 'M', u'd'), - (0x217F, 'M', u'm'), - (0x2180, 'V'), - (0x2183, 'X'), - (0x2184, 'V'), - (0x2189, 'M', u'0⁄3'), - (0x218A, 'V'), - (0x218C, 'X'), - (0x2190, 'V'), - (0x222C, 'M', u'∫∫'), - (0x222D, 'M', u'∫∫∫'), - (0x222E, 'V'), - (0x222F, 'M', u'∮∮'), - (0x2230, 'M', u'∮∮∮'), - (0x2231, 'V'), - (0x2260, '3'), - (0x2261, 'V'), - (0x226E, '3'), - (0x2270, 'V'), - (0x2329, 'M', u'〈'), - (0x232A, 'M', u'〉'), - (0x232B, 'V'), - (0x2427, 'X'), - (0x2440, 'V'), - (0x244B, 'X'), - (0x2460, 'M', u'1'), - (0x2461, 'M', u'2'), - ] - -def _seg_23(): - return [ - (0x2462, 'M', u'3'), - (0x2463, 'M', u'4'), - (0x2464, 'M', u'5'), - (0x2465, 'M', u'6'), - (0x2466, 'M', u'7'), - (0x2467, 'M', u'8'), - (0x2468, 'M', u'9'), - (0x2469, 'M', u'10'), - (0x246A, 'M', u'11'), - (0x246B, 'M', u'12'), - (0x246C, 'M', u'13'), - (0x246D, 'M', u'14'), - (0x246E, 'M', u'15'), - (0x246F, 'M', u'16'), - (0x2470, 'M', u'17'), - (0x2471, 'M', u'18'), - (0x2472, 'M', u'19'), - (0x2473, 'M', u'20'), - (0x2474, '3', u'(1)'), - (0x2475, '3', u'(2)'), - (0x2476, '3', u'(3)'), - (0x2477, '3', u'(4)'), - (0x2478, '3', u'(5)'), - (0x2479, '3', u'(6)'), - (0x247A, '3', u'(7)'), - (0x247B, '3', u'(8)'), - (0x247C, '3', u'(9)'), - (0x247D, '3', u'(10)'), - (0x247E, '3', u'(11)'), - (0x247F, '3', u'(12)'), - (0x2480, '3', u'(13)'), - (0x2481, '3', u'(14)'), - (0x2482, '3', u'(15)'), - (0x2483, '3', u'(16)'), - (0x2484, '3', u'(17)'), - (0x2485, '3', u'(18)'), - (0x2486, '3', u'(19)'), - (0x2487, '3', u'(20)'), - (0x2488, 'X'), - (0x249C, '3', u'(a)'), - (0x249D, '3', u'(b)'), - (0x249E, '3', u'(c)'), - (0x249F, '3', u'(d)'), - (0x24A0, '3', u'(e)'), - (0x24A1, '3', u'(f)'), - (0x24A2, '3', u'(g)'), - (0x24A3, '3', u'(h)'), - (0x24A4, '3', u'(i)'), - (0x24A5, '3', u'(j)'), - (0x24A6, '3', u'(k)'), - (0x24A7, '3', u'(l)'), - (0x24A8, '3', u'(m)'), - (0x24A9, '3', u'(n)'), - (0x24AA, '3', u'(o)'), - (0x24AB, '3', u'(p)'), - (0x24AC, '3', u'(q)'), - (0x24AD, '3', u'(r)'), - (0x24AE, '3', u'(s)'), - (0x24AF, '3', u'(t)'), - (0x24B0, '3', u'(u)'), - (0x24B1, '3', u'(v)'), - (0x24B2, '3', u'(w)'), - (0x24B3, '3', u'(x)'), - (0x24B4, '3', u'(y)'), - (0x24B5, '3', u'(z)'), - (0x24B6, 'M', u'a'), - (0x24B7, 'M', u'b'), - (0x24B8, 'M', u'c'), - (0x24B9, 'M', u'd'), - (0x24BA, 'M', u'e'), - (0x24BB, 'M', u'f'), - (0x24BC, 'M', u'g'), - (0x24BD, 'M', u'h'), - (0x24BE, 'M', u'i'), - (0x24BF, 'M', u'j'), - (0x24C0, 'M', u'k'), - (0x24C1, 'M', u'l'), - (0x24C2, 'M', u'm'), - (0x24C3, 'M', u'n'), - (0x24C4, 'M', u'o'), - (0x24C5, 'M', u'p'), - (0x24C6, 'M', u'q'), - (0x24C7, 'M', u'r'), - (0x24C8, 'M', u's'), - (0x24C9, 'M', u't'), - (0x24CA, 'M', u'u'), - (0x24CB, 'M', u'v'), - (0x24CC, 'M', u'w'), - (0x24CD, 'M', u'x'), - (0x24CE, 'M', u'y'), - (0x24CF, 'M', u'z'), - (0x24D0, 'M', u'a'), - (0x24D1, 'M', u'b'), - (0x24D2, 'M', u'c'), - (0x24D3, 'M', u'd'), - (0x24D4, 'M', u'e'), - (0x24D5, 'M', u'f'), - (0x24D6, 'M', u'g'), - (0x24D7, 'M', u'h'), - (0x24D8, 'M', u'i'), - ] - -def _seg_24(): - return [ - (0x24D9, 'M', u'j'), - (0x24DA, 'M', u'k'), - (0x24DB, 'M', u'l'), - (0x24DC, 'M', u'm'), - (0x24DD, 'M', u'n'), - (0x24DE, 'M', u'o'), - (0x24DF, 'M', u'p'), - (0x24E0, 'M', u'q'), - (0x24E1, 'M', u'r'), - (0x24E2, 'M', u's'), - (0x24E3, 'M', u't'), - (0x24E4, 'M', u'u'), - (0x24E5, 'M', u'v'), - (0x24E6, 'M', u'w'), - (0x24E7, 'M', u'x'), - (0x24E8, 'M', u'y'), - (0x24E9, 'M', u'z'), - (0x24EA, 'M', u'0'), - (0x24EB, 'V'), - (0x2A0C, 'M', u'∫∫∫∫'), - (0x2A0D, 'V'), - (0x2A74, '3', u'::='), - (0x2A75, '3', u'=='), - (0x2A76, '3', u'==='), - (0x2A77, 'V'), - (0x2ADC, 'M', u'⫝̸'), - (0x2ADD, 'V'), - (0x2B74, 'X'), - (0x2B76, 'V'), - (0x2B96, 'X'), - (0x2B98, 'V'), - (0x2BC9, 'X'), - (0x2BCA, 'V'), - (0x2BFF, 'X'), - (0x2C00, 'M', u'ⰰ'), - (0x2C01, 'M', u'ⰱ'), - (0x2C02, 'M', u'ⰲ'), - (0x2C03, 'M', u'ⰳ'), - (0x2C04, 'M', u'ⰴ'), - (0x2C05, 'M', u'ⰵ'), - (0x2C06, 'M', u'ⰶ'), - (0x2C07, 'M', u'ⰷ'), - (0x2C08, 'M', u'ⰸ'), - (0x2C09, 'M', u'ⰹ'), - (0x2C0A, 'M', u'ⰺ'), - (0x2C0B, 'M', u'ⰻ'), - (0x2C0C, 'M', u'ⰼ'), - (0x2C0D, 'M', u'ⰽ'), - (0x2C0E, 'M', u'ⰾ'), - (0x2C0F, 'M', u'ⰿ'), - (0x2C10, 'M', u'ⱀ'), - (0x2C11, 'M', u'ⱁ'), - (0x2C12, 'M', u'ⱂ'), - (0x2C13, 'M', u'ⱃ'), - (0x2C14, 'M', u'ⱄ'), - (0x2C15, 'M', u'ⱅ'), - (0x2C16, 'M', u'ⱆ'), - (0x2C17, 'M', u'ⱇ'), - (0x2C18, 'M', u'ⱈ'), - (0x2C19, 'M', u'ⱉ'), - (0x2C1A, 'M', u'ⱊ'), - (0x2C1B, 'M', u'ⱋ'), - (0x2C1C, 'M', u'ⱌ'), - (0x2C1D, 'M', u'ⱍ'), - (0x2C1E, 'M', u'ⱎ'), - (0x2C1F, 'M', u'ⱏ'), - (0x2C20, 'M', u'ⱐ'), - (0x2C21, 'M', u'ⱑ'), - (0x2C22, 'M', u'ⱒ'), - (0x2C23, 'M', u'ⱓ'), - (0x2C24, 'M', u'ⱔ'), - (0x2C25, 'M', u'ⱕ'), - (0x2C26, 'M', u'ⱖ'), - (0x2C27, 'M', u'ⱗ'), - (0x2C28, 'M', u'ⱘ'), - (0x2C29, 'M', u'ⱙ'), - (0x2C2A, 'M', u'ⱚ'), - (0x2C2B, 'M', u'ⱛ'), - (0x2C2C, 'M', u'ⱜ'), - (0x2C2D, 'M', u'ⱝ'), - (0x2C2E, 'M', u'ⱞ'), - (0x2C2F, 'X'), - (0x2C30, 'V'), - (0x2C5F, 'X'), - (0x2C60, 'M', u'ⱡ'), - (0x2C61, 'V'), - (0x2C62, 'M', u'ɫ'), - (0x2C63, 'M', u'ᵽ'), - (0x2C64, 'M', u'ɽ'), - (0x2C65, 'V'), - (0x2C67, 'M', u'ⱨ'), - (0x2C68, 'V'), - (0x2C69, 'M', u'ⱪ'), - (0x2C6A, 'V'), - (0x2C6B, 'M', u'ⱬ'), - (0x2C6C, 'V'), - (0x2C6D, 'M', u'ɑ'), - (0x2C6E, 'M', u'ɱ'), - (0x2C6F, 'M', u'ɐ'), - (0x2C70, 'M', u'ɒ'), - ] - -def _seg_25(): - return [ - (0x2C71, 'V'), - (0x2C72, 'M', u'ⱳ'), - (0x2C73, 'V'), - (0x2C75, 'M', u'ⱶ'), - (0x2C76, 'V'), - (0x2C7C, 'M', u'j'), - (0x2C7D, 'M', u'v'), - (0x2C7E, 'M', u'ȿ'), - (0x2C7F, 'M', u'ɀ'), - (0x2C80, 'M', u'ⲁ'), - (0x2C81, 'V'), - (0x2C82, 'M', u'ⲃ'), - (0x2C83, 'V'), - (0x2C84, 'M', u'ⲅ'), - (0x2C85, 'V'), - (0x2C86, 'M', u'ⲇ'), - (0x2C87, 'V'), - (0x2C88, 'M', u'ⲉ'), - (0x2C89, 'V'), - (0x2C8A, 'M', u'ⲋ'), - (0x2C8B, 'V'), - (0x2C8C, 'M', u'ⲍ'), - (0x2C8D, 'V'), - (0x2C8E, 'M', u'ⲏ'), - (0x2C8F, 'V'), - (0x2C90, 'M', u'ⲑ'), - (0x2C91, 'V'), - (0x2C92, 'M', u'ⲓ'), - (0x2C93, 'V'), - (0x2C94, 'M', u'ⲕ'), - (0x2C95, 'V'), - (0x2C96, 'M', u'ⲗ'), - (0x2C97, 'V'), - (0x2C98, 'M', u'ⲙ'), - (0x2C99, 'V'), - (0x2C9A, 'M', u'ⲛ'), - (0x2C9B, 'V'), - (0x2C9C, 'M', u'ⲝ'), - (0x2C9D, 'V'), - (0x2C9E, 'M', u'ⲟ'), - (0x2C9F, 'V'), - (0x2CA0, 'M', u'ⲡ'), - (0x2CA1, 'V'), - (0x2CA2, 'M', u'ⲣ'), - (0x2CA3, 'V'), - (0x2CA4, 'M', u'ⲥ'), - (0x2CA5, 'V'), - (0x2CA6, 'M', u'ⲧ'), - (0x2CA7, 'V'), - (0x2CA8, 'M', u'ⲩ'), - (0x2CA9, 'V'), - (0x2CAA, 'M', u'ⲫ'), - (0x2CAB, 'V'), - (0x2CAC, 'M', u'ⲭ'), - (0x2CAD, 'V'), - (0x2CAE, 'M', u'ⲯ'), - (0x2CAF, 'V'), - (0x2CB0, 'M', u'ⲱ'), - (0x2CB1, 'V'), - (0x2CB2, 'M', u'ⲳ'), - (0x2CB3, 'V'), - (0x2CB4, 'M', u'ⲵ'), - (0x2CB5, 'V'), - (0x2CB6, 'M', u'ⲷ'), - (0x2CB7, 'V'), - (0x2CB8, 'M', u'ⲹ'), - (0x2CB9, 'V'), - (0x2CBA, 'M', u'ⲻ'), - (0x2CBB, 'V'), - (0x2CBC, 'M', u'ⲽ'), - (0x2CBD, 'V'), - (0x2CBE, 'M', u'ⲿ'), - (0x2CBF, 'V'), - (0x2CC0, 'M', u'ⳁ'), - (0x2CC1, 'V'), - (0x2CC2, 'M', u'ⳃ'), - (0x2CC3, 'V'), - (0x2CC4, 'M', u'ⳅ'), - (0x2CC5, 'V'), - (0x2CC6, 'M', u'ⳇ'), - (0x2CC7, 'V'), - (0x2CC8, 'M', u'ⳉ'), - (0x2CC9, 'V'), - (0x2CCA, 'M', u'ⳋ'), - (0x2CCB, 'V'), - (0x2CCC, 'M', u'ⳍ'), - (0x2CCD, 'V'), - (0x2CCE, 'M', u'ⳏ'), - (0x2CCF, 'V'), - (0x2CD0, 'M', u'ⳑ'), - (0x2CD1, 'V'), - (0x2CD2, 'M', u'ⳓ'), - (0x2CD3, 'V'), - (0x2CD4, 'M', u'ⳕ'), - (0x2CD5, 'V'), - (0x2CD6, 'M', u'ⳗ'), - (0x2CD7, 'V'), - (0x2CD8, 'M', u'ⳙ'), - (0x2CD9, 'V'), - (0x2CDA, 'M', u'ⳛ'), - ] - -def _seg_26(): - return [ - (0x2CDB, 'V'), - (0x2CDC, 'M', u'ⳝ'), - (0x2CDD, 'V'), - (0x2CDE, 'M', u'ⳟ'), - (0x2CDF, 'V'), - (0x2CE0, 'M', u'ⳡ'), - (0x2CE1, 'V'), - (0x2CE2, 'M', u'ⳣ'), - (0x2CE3, 'V'), - (0x2CEB, 'M', u'ⳬ'), - (0x2CEC, 'V'), - (0x2CED, 'M', u'ⳮ'), - (0x2CEE, 'V'), - (0x2CF2, 'M', u'ⳳ'), - (0x2CF3, 'V'), - (0x2CF4, 'X'), - (0x2CF9, 'V'), - (0x2D26, 'X'), - (0x2D27, 'V'), - (0x2D28, 'X'), - (0x2D2D, 'V'), - (0x2D2E, 'X'), - (0x2D30, 'V'), - (0x2D68, 'X'), - (0x2D6F, 'M', u'ⵡ'), - (0x2D70, 'V'), - (0x2D71, 'X'), - (0x2D7F, 'V'), - (0x2D97, 'X'), - (0x2DA0, 'V'), - (0x2DA7, 'X'), - (0x2DA8, 'V'), - (0x2DAF, 'X'), - (0x2DB0, 'V'), - (0x2DB7, 'X'), - (0x2DB8, 'V'), - (0x2DBF, 'X'), - (0x2DC0, 'V'), - (0x2DC7, 'X'), - (0x2DC8, 'V'), - (0x2DCF, 'X'), - (0x2DD0, 'V'), - (0x2DD7, 'X'), - (0x2DD8, 'V'), - (0x2DDF, 'X'), - (0x2DE0, 'V'), - (0x2E4F, 'X'), - (0x2E80, 'V'), - (0x2E9A, 'X'), - (0x2E9B, 'V'), - (0x2E9F, 'M', u'母'), - (0x2EA0, 'V'), - (0x2EF3, 'M', u'龟'), - (0x2EF4, 'X'), - (0x2F00, 'M', u'一'), - (0x2F01, 'M', u'丨'), - (0x2F02, 'M', u'丶'), - (0x2F03, 'M', u'丿'), - (0x2F04, 'M', u'乙'), - (0x2F05, 'M', u'亅'), - (0x2F06, 'M', u'二'), - (0x2F07, 'M', u'亠'), - (0x2F08, 'M', u'人'), - (0x2F09, 'M', u'儿'), - (0x2F0A, 'M', u'入'), - (0x2F0B, 'M', u'八'), - (0x2F0C, 'M', u'冂'), - (0x2F0D, 'M', u'冖'), - (0x2F0E, 'M', u'冫'), - (0x2F0F, 'M', u'几'), - (0x2F10, 'M', u'凵'), - (0x2F11, 'M', u'刀'), - (0x2F12, 'M', u'力'), - (0x2F13, 'M', u'勹'), - (0x2F14, 'M', u'匕'), - (0x2F15, 'M', u'匚'), - (0x2F16, 'M', u'匸'), - (0x2F17, 'M', u'十'), - (0x2F18, 'M', u'卜'), - (0x2F19, 'M', u'卩'), - (0x2F1A, 'M', u'厂'), - (0x2F1B, 'M', u'厶'), - (0x2F1C, 'M', u'又'), - (0x2F1D, 'M', u'口'), - (0x2F1E, 'M', u'囗'), - (0x2F1F, 'M', u'土'), - (0x2F20, 'M', u'士'), - (0x2F21, 'M', u'夂'), - (0x2F22, 'M', u'夊'), - (0x2F23, 'M', u'夕'), - (0x2F24, 'M', u'大'), - (0x2F25, 'M', u'女'), - (0x2F26, 'M', u'子'), - (0x2F27, 'M', u'宀'), - (0x2F28, 'M', u'寸'), - (0x2F29, 'M', u'小'), - (0x2F2A, 'M', u'尢'), - (0x2F2B, 'M', u'尸'), - (0x2F2C, 'M', u'屮'), - (0x2F2D, 'M', u'山'), - ] - -def _seg_27(): - return [ - (0x2F2E, 'M', u'巛'), - (0x2F2F, 'M', u'工'), - (0x2F30, 'M', u'己'), - (0x2F31, 'M', u'巾'), - (0x2F32, 'M', u'干'), - (0x2F33, 'M', u'幺'), - (0x2F34, 'M', u'广'), - (0x2F35, 'M', u'廴'), - (0x2F36, 'M', u'廾'), - (0x2F37, 'M', u'弋'), - (0x2F38, 'M', u'弓'), - (0x2F39, 'M', u'彐'), - (0x2F3A, 'M', u'彡'), - (0x2F3B, 'M', u'彳'), - (0x2F3C, 'M', u'心'), - (0x2F3D, 'M', u'戈'), - (0x2F3E, 'M', u'戶'), - (0x2F3F, 'M', u'手'), - (0x2F40, 'M', u'支'), - (0x2F41, 'M', u'攴'), - (0x2F42, 'M', u'文'), - (0x2F43, 'M', u'斗'), - (0x2F44, 'M', u'斤'), - (0x2F45, 'M', u'方'), - (0x2F46, 'M', u'无'), - (0x2F47, 'M', u'日'), - (0x2F48, 'M', u'曰'), - (0x2F49, 'M', u'月'), - (0x2F4A, 'M', u'木'), - (0x2F4B, 'M', u'欠'), - (0x2F4C, 'M', u'止'), - (0x2F4D, 'M', u'歹'), - (0x2F4E, 'M', u'殳'), - (0x2F4F, 'M', u'毋'), - (0x2F50, 'M', u'比'), - (0x2F51, 'M', u'毛'), - (0x2F52, 'M', u'氏'), - (0x2F53, 'M', u'气'), - (0x2F54, 'M', u'水'), - (0x2F55, 'M', u'火'), - (0x2F56, 'M', u'爪'), - (0x2F57, 'M', u'父'), - (0x2F58, 'M', u'爻'), - (0x2F59, 'M', u'爿'), - (0x2F5A, 'M', u'片'), - (0x2F5B, 'M', u'牙'), - (0x2F5C, 'M', u'牛'), - (0x2F5D, 'M', u'犬'), - (0x2F5E, 'M', u'玄'), - (0x2F5F, 'M', u'玉'), - (0x2F60, 'M', u'瓜'), - (0x2F61, 'M', u'瓦'), - (0x2F62, 'M', u'甘'), - (0x2F63, 'M', u'生'), - (0x2F64, 'M', u'用'), - (0x2F65, 'M', u'田'), - (0x2F66, 'M', u'疋'), - (0x2F67, 'M', u'疒'), - (0x2F68, 'M', u'癶'), - (0x2F69, 'M', u'白'), - (0x2F6A, 'M', u'皮'), - (0x2F6B, 'M', u'皿'), - (0x2F6C, 'M', u'目'), - (0x2F6D, 'M', u'矛'), - (0x2F6E, 'M', u'矢'), - (0x2F6F, 'M', u'石'), - (0x2F70, 'M', u'示'), - (0x2F71, 'M', u'禸'), - (0x2F72, 'M', u'禾'), - (0x2F73, 'M', u'穴'), - (0x2F74, 'M', u'立'), - (0x2F75, 'M', u'竹'), - (0x2F76, 'M', u'米'), - (0x2F77, 'M', u'糸'), - (0x2F78, 'M', u'缶'), - (0x2F79, 'M', u'网'), - (0x2F7A, 'M', u'羊'), - (0x2F7B, 'M', u'羽'), - (0x2F7C, 'M', u'老'), - (0x2F7D, 'M', u'而'), - (0x2F7E, 'M', u'耒'), - (0x2F7F, 'M', u'耳'), - (0x2F80, 'M', u'聿'), - (0x2F81, 'M', u'肉'), - (0x2F82, 'M', u'臣'), - (0x2F83, 'M', u'自'), - (0x2F84, 'M', u'至'), - (0x2F85, 'M', u'臼'), - (0x2F86, 'M', u'舌'), - (0x2F87, 'M', u'舛'), - (0x2F88, 'M', u'舟'), - (0x2F89, 'M', u'艮'), - (0x2F8A, 'M', u'色'), - (0x2F8B, 'M', u'艸'), - (0x2F8C, 'M', u'虍'), - (0x2F8D, 'M', u'虫'), - (0x2F8E, 'M', u'血'), - (0x2F8F, 'M', u'行'), - (0x2F90, 'M', u'衣'), - (0x2F91, 'M', u'襾'), - ] - -def _seg_28(): - return [ - (0x2F92, 'M', u'見'), - (0x2F93, 'M', u'角'), - (0x2F94, 'M', u'言'), - (0x2F95, 'M', u'谷'), - (0x2F96, 'M', u'豆'), - (0x2F97, 'M', u'豕'), - (0x2F98, 'M', u'豸'), - (0x2F99, 'M', u'貝'), - (0x2F9A, 'M', u'赤'), - (0x2F9B, 'M', u'走'), - (0x2F9C, 'M', u'足'), - (0x2F9D, 'M', u'身'), - (0x2F9E, 'M', u'車'), - (0x2F9F, 'M', u'辛'), - (0x2FA0, 'M', u'辰'), - (0x2FA1, 'M', u'辵'), - (0x2FA2, 'M', u'邑'), - (0x2FA3, 'M', u'酉'), - (0x2FA4, 'M', u'釆'), - (0x2FA5, 'M', u'里'), - (0x2FA6, 'M', u'金'), - (0x2FA7, 'M', u'長'), - (0x2FA8, 'M', u'門'), - (0x2FA9, 'M', u'阜'), - (0x2FAA, 'M', u'隶'), - (0x2FAB, 'M', u'隹'), - (0x2FAC, 'M', u'雨'), - (0x2FAD, 'M', u'靑'), - (0x2FAE, 'M', u'非'), - (0x2FAF, 'M', u'面'), - (0x2FB0, 'M', u'革'), - (0x2FB1, 'M', u'韋'), - (0x2FB2, 'M', u'韭'), - (0x2FB3, 'M', u'音'), - (0x2FB4, 'M', u'頁'), - (0x2FB5, 'M', u'風'), - (0x2FB6, 'M', u'飛'), - (0x2FB7, 'M', u'食'), - (0x2FB8, 'M', u'首'), - (0x2FB9, 'M', u'香'), - (0x2FBA, 'M', u'馬'), - (0x2FBB, 'M', u'骨'), - (0x2FBC, 'M', u'高'), - (0x2FBD, 'M', u'髟'), - (0x2FBE, 'M', u'鬥'), - (0x2FBF, 'M', u'鬯'), - (0x2FC0, 'M', u'鬲'), - (0x2FC1, 'M', u'鬼'), - (0x2FC2, 'M', u'魚'), - (0x2FC3, 'M', u'鳥'), - (0x2FC4, 'M', u'鹵'), - (0x2FC5, 'M', u'鹿'), - (0x2FC6, 'M', u'麥'), - (0x2FC7, 'M', u'麻'), - (0x2FC8, 'M', u'黃'), - (0x2FC9, 'M', u'黍'), - (0x2FCA, 'M', u'黑'), - (0x2FCB, 'M', u'黹'), - (0x2FCC, 'M', u'黽'), - (0x2FCD, 'M', u'鼎'), - (0x2FCE, 'M', u'鼓'), - (0x2FCF, 'M', u'鼠'), - (0x2FD0, 'M', u'鼻'), - (0x2FD1, 'M', u'齊'), - (0x2FD2, 'M', u'齒'), - (0x2FD3, 'M', u'龍'), - (0x2FD4, 'M', u'龜'), - (0x2FD5, 'M', u'龠'), - (0x2FD6, 'X'), - (0x3000, '3', u' '), - (0x3001, 'V'), - (0x3002, 'M', u'.'), - (0x3003, 'V'), - (0x3036, 'M', u'〒'), - (0x3037, 'V'), - (0x3038, 'M', u'十'), - (0x3039, 'M', u'卄'), - (0x303A, 'M', u'卅'), - (0x303B, 'V'), - (0x3040, 'X'), - (0x3041, 'V'), - (0x3097, 'X'), - (0x3099, 'V'), - (0x309B, '3', u' ゙'), - (0x309C, '3', u' ゚'), - (0x309D, 'V'), - (0x309F, 'M', u'より'), - (0x30A0, 'V'), - (0x30FF, 'M', u'コト'), - (0x3100, 'X'), - (0x3105, 'V'), - (0x3130, 'X'), - (0x3131, 'M', u'ᄀ'), - (0x3132, 'M', u'ᄁ'), - (0x3133, 'M', u'ᆪ'), - (0x3134, 'M', u'ᄂ'), - (0x3135, 'M', u'ᆬ'), - (0x3136, 'M', u'ᆭ'), - (0x3137, 'M', u'ᄃ'), - (0x3138, 'M', u'ᄄ'), - ] - -def _seg_29(): - return [ - (0x3139, 'M', u'ᄅ'), - (0x313A, 'M', u'ᆰ'), - (0x313B, 'M', u'ᆱ'), - (0x313C, 'M', u'ᆲ'), - (0x313D, 'M', u'ᆳ'), - (0x313E, 'M', u'ᆴ'), - (0x313F, 'M', u'ᆵ'), - (0x3140, 'M', u'ᄚ'), - (0x3141, 'M', u'ᄆ'), - (0x3142, 'M', u'ᄇ'), - (0x3143, 'M', u'ᄈ'), - (0x3144, 'M', u'ᄡ'), - (0x3145, 'M', u'ᄉ'), - (0x3146, 'M', u'ᄊ'), - (0x3147, 'M', u'ᄋ'), - (0x3148, 'M', u'ᄌ'), - (0x3149, 'M', u'ᄍ'), - (0x314A, 'M', u'ᄎ'), - (0x314B, 'M', u'ᄏ'), - (0x314C, 'M', u'ᄐ'), - (0x314D, 'M', u'ᄑ'), - (0x314E, 'M', u'ᄒ'), - (0x314F, 'M', u'ᅡ'), - (0x3150, 'M', u'ᅢ'), - (0x3151, 'M', u'ᅣ'), - (0x3152, 'M', u'ᅤ'), - (0x3153, 'M', u'ᅥ'), - (0x3154, 'M', u'ᅦ'), - (0x3155, 'M', u'ᅧ'), - (0x3156, 'M', u'ᅨ'), - (0x3157, 'M', u'ᅩ'), - (0x3158, 'M', u'ᅪ'), - (0x3159, 'M', u'ᅫ'), - (0x315A, 'M', u'ᅬ'), - (0x315B, 'M', u'ᅭ'), - (0x315C, 'M', u'ᅮ'), - (0x315D, 'M', u'ᅯ'), - (0x315E, 'M', u'ᅰ'), - (0x315F, 'M', u'ᅱ'), - (0x3160, 'M', u'ᅲ'), - (0x3161, 'M', u'ᅳ'), - (0x3162, 'M', u'ᅴ'), - (0x3163, 'M', u'ᅵ'), - (0x3164, 'X'), - (0x3165, 'M', u'ᄔ'), - (0x3166, 'M', u'ᄕ'), - (0x3167, 'M', u'ᇇ'), - (0x3168, 'M', u'ᇈ'), - (0x3169, 'M', u'ᇌ'), - (0x316A, 'M', u'ᇎ'), - (0x316B, 'M', u'ᇓ'), - (0x316C, 'M', u'ᇗ'), - (0x316D, 'M', u'ᇙ'), - (0x316E, 'M', u'ᄜ'), - (0x316F, 'M', u'ᇝ'), - (0x3170, 'M', u'ᇟ'), - (0x3171, 'M', u'ᄝ'), - (0x3172, 'M', u'ᄞ'), - (0x3173, 'M', u'ᄠ'), - (0x3174, 'M', u'ᄢ'), - (0x3175, 'M', u'ᄣ'), - (0x3176, 'M', u'ᄧ'), - (0x3177, 'M', u'ᄩ'), - (0x3178, 'M', u'ᄫ'), - (0x3179, 'M', u'ᄬ'), - (0x317A, 'M', u'ᄭ'), - (0x317B, 'M', u'ᄮ'), - (0x317C, 'M', u'ᄯ'), - (0x317D, 'M', u'ᄲ'), - (0x317E, 'M', u'ᄶ'), - (0x317F, 'M', u'ᅀ'), - (0x3180, 'M', u'ᅇ'), - (0x3181, 'M', u'ᅌ'), - (0x3182, 'M', u'ᇱ'), - (0x3183, 'M', u'ᇲ'), - (0x3184, 'M', u'ᅗ'), - (0x3185, 'M', u'ᅘ'), - (0x3186, 'M', u'ᅙ'), - (0x3187, 'M', u'ᆄ'), - (0x3188, 'M', u'ᆅ'), - (0x3189, 'M', u'ᆈ'), - (0x318A, 'M', u'ᆑ'), - (0x318B, 'M', u'ᆒ'), - (0x318C, 'M', u'ᆔ'), - (0x318D, 'M', u'ᆞ'), - (0x318E, 'M', u'ᆡ'), - (0x318F, 'X'), - (0x3190, 'V'), - (0x3192, 'M', u'一'), - (0x3193, 'M', u'二'), - (0x3194, 'M', u'三'), - (0x3195, 'M', u'四'), - (0x3196, 'M', u'上'), - (0x3197, 'M', u'中'), - (0x3198, 'M', u'下'), - (0x3199, 'M', u'甲'), - (0x319A, 'M', u'乙'), - (0x319B, 'M', u'丙'), - (0x319C, 'M', u'丁'), - (0x319D, 'M', u'天'), - ] - -def _seg_30(): - return [ - (0x319E, 'M', u'地'), - (0x319F, 'M', u'人'), - (0x31A0, 'V'), - (0x31BB, 'X'), - (0x31C0, 'V'), - (0x31E4, 'X'), - (0x31F0, 'V'), - (0x3200, '3', u'(ᄀ)'), - (0x3201, '3', u'(ᄂ)'), - (0x3202, '3', u'(ᄃ)'), - (0x3203, '3', u'(ᄅ)'), - (0x3204, '3', u'(ᄆ)'), - (0x3205, '3', u'(ᄇ)'), - (0x3206, '3', u'(ᄉ)'), - (0x3207, '3', u'(ᄋ)'), - (0x3208, '3', u'(ᄌ)'), - (0x3209, '3', u'(ᄎ)'), - (0x320A, '3', u'(ᄏ)'), - (0x320B, '3', u'(ᄐ)'), - (0x320C, '3', u'(ᄑ)'), - (0x320D, '3', u'(ᄒ)'), - (0x320E, '3', u'(가)'), - (0x320F, '3', u'(나)'), - (0x3210, '3', u'(다)'), - (0x3211, '3', u'(라)'), - (0x3212, '3', u'(마)'), - (0x3213, '3', u'(바)'), - (0x3214, '3', u'(사)'), - (0x3215, '3', u'(아)'), - (0x3216, '3', u'(자)'), - (0x3217, '3', u'(차)'), - (0x3218, '3', u'(카)'), - (0x3219, '3', u'(타)'), - (0x321A, '3', u'(파)'), - (0x321B, '3', u'(하)'), - (0x321C, '3', u'(주)'), - (0x321D, '3', u'(오전)'), - (0x321E, '3', u'(오후)'), - (0x321F, 'X'), - (0x3220, '3', u'(一)'), - (0x3221, '3', u'(二)'), - (0x3222, '3', u'(三)'), - (0x3223, '3', u'(四)'), - (0x3224, '3', u'(五)'), - (0x3225, '3', u'(六)'), - (0x3226, '3', u'(七)'), - (0x3227, '3', u'(八)'), - (0x3228, '3', u'(九)'), - (0x3229, '3', u'(十)'), - (0x322A, '3', u'(月)'), - (0x322B, '3', u'(火)'), - (0x322C, '3', u'(水)'), - (0x322D, '3', u'(木)'), - (0x322E, '3', u'(金)'), - (0x322F, '3', u'(土)'), - (0x3230, '3', u'(日)'), - (0x3231, '3', u'(株)'), - (0x3232, '3', u'(有)'), - (0x3233, '3', u'(社)'), - (0x3234, '3', u'(名)'), - (0x3235, '3', u'(特)'), - (0x3236, '3', u'(財)'), - (0x3237, '3', u'(祝)'), - (0x3238, '3', u'(労)'), - (0x3239, '3', u'(代)'), - (0x323A, '3', u'(呼)'), - (0x323B, '3', u'(学)'), - (0x323C, '3', u'(監)'), - (0x323D, '3', u'(企)'), - (0x323E, '3', u'(資)'), - (0x323F, '3', u'(協)'), - (0x3240, '3', u'(祭)'), - (0x3241, '3', u'(休)'), - (0x3242, '3', u'(自)'), - (0x3243, '3', u'(至)'), - (0x3244, 'M', u'問'), - (0x3245, 'M', u'幼'), - (0x3246, 'M', u'文'), - (0x3247, 'M', u'箏'), - (0x3248, 'V'), - (0x3250, 'M', u'pte'), - (0x3251, 'M', u'21'), - (0x3252, 'M', u'22'), - (0x3253, 'M', u'23'), - (0x3254, 'M', u'24'), - (0x3255, 'M', u'25'), - (0x3256, 'M', u'26'), - (0x3257, 'M', u'27'), - (0x3258, 'M', u'28'), - (0x3259, 'M', u'29'), - (0x325A, 'M', u'30'), - (0x325B, 'M', u'31'), - (0x325C, 'M', u'32'), - (0x325D, 'M', u'33'), - (0x325E, 'M', u'34'), - (0x325F, 'M', u'35'), - (0x3260, 'M', u'ᄀ'), - (0x3261, 'M', u'ᄂ'), - (0x3262, 'M', u'ᄃ'), - (0x3263, 'M', u'ᄅ'), - ] - -def _seg_31(): - return [ - (0x3264, 'M', u'ᄆ'), - (0x3265, 'M', u'ᄇ'), - (0x3266, 'M', u'ᄉ'), - (0x3267, 'M', u'ᄋ'), - (0x3268, 'M', u'ᄌ'), - (0x3269, 'M', u'ᄎ'), - (0x326A, 'M', u'ᄏ'), - (0x326B, 'M', u'ᄐ'), - (0x326C, 'M', u'ᄑ'), - (0x326D, 'M', u'ᄒ'), - (0x326E, 'M', u'가'), - (0x326F, 'M', u'나'), - (0x3270, 'M', u'다'), - (0x3271, 'M', u'라'), - (0x3272, 'M', u'마'), - (0x3273, 'M', u'바'), - (0x3274, 'M', u'사'), - (0x3275, 'M', u'아'), - (0x3276, 'M', u'자'), - (0x3277, 'M', u'차'), - (0x3278, 'M', u'카'), - (0x3279, 'M', u'타'), - (0x327A, 'M', u'파'), - (0x327B, 'M', u'하'), - (0x327C, 'M', u'참고'), - (0x327D, 'M', u'주의'), - (0x327E, 'M', u'우'), - (0x327F, 'V'), - (0x3280, 'M', u'一'), - (0x3281, 'M', u'二'), - (0x3282, 'M', u'三'), - (0x3283, 'M', u'四'), - (0x3284, 'M', u'五'), - (0x3285, 'M', u'六'), - (0x3286, 'M', u'七'), - (0x3287, 'M', u'八'), - (0x3288, 'M', u'九'), - (0x3289, 'M', u'十'), - (0x328A, 'M', u'月'), - (0x328B, 'M', u'火'), - (0x328C, 'M', u'水'), - (0x328D, 'M', u'木'), - (0x328E, 'M', u'金'), - (0x328F, 'M', u'土'), - (0x3290, 'M', u'日'), - (0x3291, 'M', u'株'), - (0x3292, 'M', u'有'), - (0x3293, 'M', u'社'), - (0x3294, 'M', u'名'), - (0x3295, 'M', u'特'), - (0x3296, 'M', u'財'), - (0x3297, 'M', u'祝'), - (0x3298, 'M', u'労'), - (0x3299, 'M', u'秘'), - (0x329A, 'M', u'男'), - (0x329B, 'M', u'女'), - (0x329C, 'M', u'適'), - (0x329D, 'M', u'優'), - (0x329E, 'M', u'印'), - (0x329F, 'M', u'注'), - (0x32A0, 'M', u'項'), - (0x32A1, 'M', u'休'), - (0x32A2, 'M', u'写'), - (0x32A3, 'M', u'正'), - (0x32A4, 'M', u'上'), - (0x32A5, 'M', u'中'), - (0x32A6, 'M', u'下'), - (0x32A7, 'M', u'左'), - (0x32A8, 'M', u'右'), - (0x32A9, 'M', u'医'), - (0x32AA, 'M', u'宗'), - (0x32AB, 'M', u'学'), - (0x32AC, 'M', u'監'), - (0x32AD, 'M', u'企'), - (0x32AE, 'M', u'資'), - (0x32AF, 'M', u'協'), - (0x32B0, 'M', u'夜'), - (0x32B1, 'M', u'36'), - (0x32B2, 'M', u'37'), - (0x32B3, 'M', u'38'), - (0x32B4, 'M', u'39'), - (0x32B5, 'M', u'40'), - (0x32B6, 'M', u'41'), - (0x32B7, 'M', u'42'), - (0x32B8, 'M', u'43'), - (0x32B9, 'M', u'44'), - (0x32BA, 'M', u'45'), - (0x32BB, 'M', u'46'), - (0x32BC, 'M', u'47'), - (0x32BD, 'M', u'48'), - (0x32BE, 'M', u'49'), - (0x32BF, 'M', u'50'), - (0x32C0, 'M', u'1月'), - (0x32C1, 'M', u'2月'), - (0x32C2, 'M', u'3月'), - (0x32C3, 'M', u'4月'), - (0x32C4, 'M', u'5月'), - (0x32C5, 'M', u'6月'), - (0x32C6, 'M', u'7月'), - (0x32C7, 'M', u'8月'), - ] - -def _seg_32(): - return [ - (0x32C8, 'M', u'9月'), - (0x32C9, 'M', u'10月'), - (0x32CA, 'M', u'11月'), - (0x32CB, 'M', u'12月'), - (0x32CC, 'M', u'hg'), - (0x32CD, 'M', u'erg'), - (0x32CE, 'M', u'ev'), - (0x32CF, 'M', u'ltd'), - (0x32D0, 'M', u'ア'), - (0x32D1, 'M', u'イ'), - (0x32D2, 'M', u'ウ'), - (0x32D3, 'M', u'エ'), - (0x32D4, 'M', u'オ'), - (0x32D5, 'M', u'カ'), - (0x32D6, 'M', u'キ'), - (0x32D7, 'M', u'ク'), - (0x32D8, 'M', u'ケ'), - (0x32D9, 'M', u'コ'), - (0x32DA, 'M', u'サ'), - (0x32DB, 'M', u'シ'), - (0x32DC, 'M', u'ス'), - (0x32DD, 'M', u'セ'), - (0x32DE, 'M', u'ソ'), - (0x32DF, 'M', u'タ'), - (0x32E0, 'M', u'チ'), - (0x32E1, 'M', u'ツ'), - (0x32E2, 'M', u'テ'), - (0x32E3, 'M', u'ト'), - (0x32E4, 'M', u'ナ'), - (0x32E5, 'M', u'ニ'), - (0x32E6, 'M', u'ヌ'), - (0x32E7, 'M', u'ネ'), - (0x32E8, 'M', u'ノ'), - (0x32E9, 'M', u'ハ'), - (0x32EA, 'M', u'ヒ'), - (0x32EB, 'M', u'フ'), - (0x32EC, 'M', u'ヘ'), - (0x32ED, 'M', u'ホ'), - (0x32EE, 'M', u'マ'), - (0x32EF, 'M', u'ミ'), - (0x32F0, 'M', u'ム'), - (0x32F1, 'M', u'メ'), - (0x32F2, 'M', u'モ'), - (0x32F3, 'M', u'ヤ'), - (0x32F4, 'M', u'ユ'), - (0x32F5, 'M', u'ヨ'), - (0x32F6, 'M', u'ラ'), - (0x32F7, 'M', u'リ'), - (0x32F8, 'M', u'ル'), - (0x32F9, 'M', u'レ'), - (0x32FA, 'M', u'ロ'), - (0x32FB, 'M', u'ワ'), - (0x32FC, 'M', u'ヰ'), - (0x32FD, 'M', u'ヱ'), - (0x32FE, 'M', u'ヲ'), - (0x32FF, 'X'), - (0x3300, 'M', u'アパート'), - (0x3301, 'M', u'アルファ'), - (0x3302, 'M', u'アンペア'), - (0x3303, 'M', u'アール'), - (0x3304, 'M', u'イニング'), - (0x3305, 'M', u'インチ'), - (0x3306, 'M', u'ウォン'), - (0x3307, 'M', u'エスクード'), - (0x3308, 'M', u'エーカー'), - (0x3309, 'M', u'オンス'), - (0x330A, 'M', u'オーム'), - (0x330B, 'M', u'カイリ'), - (0x330C, 'M', u'カラット'), - (0x330D, 'M', u'カロリー'), - (0x330E, 'M', u'ガロン'), - (0x330F, 'M', u'ガンマ'), - (0x3310, 'M', u'ギガ'), - (0x3311, 'M', u'ギニー'), - (0x3312, 'M', u'キュリー'), - (0x3313, 'M', u'ギルダー'), - (0x3314, 'M', u'キロ'), - (0x3315, 'M', u'キログラム'), - (0x3316, 'M', u'キロメートル'), - (0x3317, 'M', u'キロワット'), - (0x3318, 'M', u'グラム'), - (0x3319, 'M', u'グラムトン'), - (0x331A, 'M', u'クルゼイロ'), - (0x331B, 'M', u'クローネ'), - (0x331C, 'M', u'ケース'), - (0x331D, 'M', u'コルナ'), - (0x331E, 'M', u'コーポ'), - (0x331F, 'M', u'サイクル'), - (0x3320, 'M', u'サンチーム'), - (0x3321, 'M', u'シリング'), - (0x3322, 'M', u'センチ'), - (0x3323, 'M', u'セント'), - (0x3324, 'M', u'ダース'), - (0x3325, 'M', u'デシ'), - (0x3326, 'M', u'ドル'), - (0x3327, 'M', u'トン'), - (0x3328, 'M', u'ナノ'), - (0x3329, 'M', u'ノット'), - (0x332A, 'M', u'ハイツ'), - (0x332B, 'M', u'パーセント'), - ] - -def _seg_33(): - return [ - (0x332C, 'M', u'パーツ'), - (0x332D, 'M', u'バーレル'), - (0x332E, 'M', u'ピアストル'), - (0x332F, 'M', u'ピクル'), - (0x3330, 'M', u'ピコ'), - (0x3331, 'M', u'ビル'), - (0x3332, 'M', u'ファラッド'), - (0x3333, 'M', u'フィート'), - (0x3334, 'M', u'ブッシェル'), - (0x3335, 'M', u'フラン'), - (0x3336, 'M', u'ヘクタール'), - (0x3337, 'M', u'ペソ'), - (0x3338, 'M', u'ペニヒ'), - (0x3339, 'M', u'ヘルツ'), - (0x333A, 'M', u'ペンス'), - (0x333B, 'M', u'ページ'), - (0x333C, 'M', u'ベータ'), - (0x333D, 'M', u'ポイント'), - (0x333E, 'M', u'ボルト'), - (0x333F, 'M', u'ホン'), - (0x3340, 'M', u'ポンド'), - (0x3341, 'M', u'ホール'), - (0x3342, 'M', u'ホーン'), - (0x3343, 'M', u'マイクロ'), - (0x3344, 'M', u'マイル'), - (0x3345, 'M', u'マッハ'), - (0x3346, 'M', u'マルク'), - (0x3347, 'M', u'マンション'), - (0x3348, 'M', u'ミクロン'), - (0x3349, 'M', u'ミリ'), - (0x334A, 'M', u'ミリバール'), - (0x334B, 'M', u'メガ'), - (0x334C, 'M', u'メガトン'), - (0x334D, 'M', u'メートル'), - (0x334E, 'M', u'ヤード'), - (0x334F, 'M', u'ヤール'), - (0x3350, 'M', u'ユアン'), - (0x3351, 'M', u'リットル'), - (0x3352, 'M', u'リラ'), - (0x3353, 'M', u'ルピー'), - (0x3354, 'M', u'ルーブル'), - (0x3355, 'M', u'レム'), - (0x3356, 'M', u'レントゲン'), - (0x3357, 'M', u'ワット'), - (0x3358, 'M', u'0点'), - (0x3359, 'M', u'1点'), - (0x335A, 'M', u'2点'), - (0x335B, 'M', u'3点'), - (0x335C, 'M', u'4点'), - (0x335D, 'M', u'5点'), - (0x335E, 'M', u'6点'), - (0x335F, 'M', u'7点'), - (0x3360, 'M', u'8点'), - (0x3361, 'M', u'9点'), - (0x3362, 'M', u'10点'), - (0x3363, 'M', u'11点'), - (0x3364, 'M', u'12点'), - (0x3365, 'M', u'13点'), - (0x3366, 'M', u'14点'), - (0x3367, 'M', u'15点'), - (0x3368, 'M', u'16点'), - (0x3369, 'M', u'17点'), - (0x336A, 'M', u'18点'), - (0x336B, 'M', u'19点'), - (0x336C, 'M', u'20点'), - (0x336D, 'M', u'21点'), - (0x336E, 'M', u'22点'), - (0x336F, 'M', u'23点'), - (0x3370, 'M', u'24点'), - (0x3371, 'M', u'hpa'), - (0x3372, 'M', u'da'), - (0x3373, 'M', u'au'), - (0x3374, 'M', u'bar'), - (0x3375, 'M', u'ov'), - (0x3376, 'M', u'pc'), - (0x3377, 'M', u'dm'), - (0x3378, 'M', u'dm2'), - (0x3379, 'M', u'dm3'), - (0x337A, 'M', u'iu'), - (0x337B, 'M', u'平成'), - (0x337C, 'M', u'昭和'), - (0x337D, 'M', u'大正'), - (0x337E, 'M', u'明治'), - (0x337F, 'M', u'株式会社'), - (0x3380, 'M', u'pa'), - (0x3381, 'M', u'na'), - (0x3382, 'M', u'μa'), - (0x3383, 'M', u'ma'), - (0x3384, 'M', u'ka'), - (0x3385, 'M', u'kb'), - (0x3386, 'M', u'mb'), - (0x3387, 'M', u'gb'), - (0x3388, 'M', u'cal'), - (0x3389, 'M', u'kcal'), - (0x338A, 'M', u'pf'), - (0x338B, 'M', u'nf'), - (0x338C, 'M', u'μf'), - (0x338D, 'M', u'μg'), - (0x338E, 'M', u'mg'), - (0x338F, 'M', u'kg'), - ] - -def _seg_34(): - return [ - (0x3390, 'M', u'hz'), - (0x3391, 'M', u'khz'), - (0x3392, 'M', u'mhz'), - (0x3393, 'M', u'ghz'), - (0x3394, 'M', u'thz'), - (0x3395, 'M', u'μl'), - (0x3396, 'M', u'ml'), - (0x3397, 'M', u'dl'), - (0x3398, 'M', u'kl'), - (0x3399, 'M', u'fm'), - (0x339A, 'M', u'nm'), - (0x339B, 'M', u'μm'), - (0x339C, 'M', u'mm'), - (0x339D, 'M', u'cm'), - (0x339E, 'M', u'km'), - (0x339F, 'M', u'mm2'), - (0x33A0, 'M', u'cm2'), - (0x33A1, 'M', u'm2'), - (0x33A2, 'M', u'km2'), - (0x33A3, 'M', u'mm3'), - (0x33A4, 'M', u'cm3'), - (0x33A5, 'M', u'm3'), - (0x33A6, 'M', u'km3'), - (0x33A7, 'M', u'm∕s'), - (0x33A8, 'M', u'm∕s2'), - (0x33A9, 'M', u'pa'), - (0x33AA, 'M', u'kpa'), - (0x33AB, 'M', u'mpa'), - (0x33AC, 'M', u'gpa'), - (0x33AD, 'M', u'rad'), - (0x33AE, 'M', u'rad∕s'), - (0x33AF, 'M', u'rad∕s2'), - (0x33B0, 'M', u'ps'), - (0x33B1, 'M', u'ns'), - (0x33B2, 'M', u'μs'), - (0x33B3, 'M', u'ms'), - (0x33B4, 'M', u'pv'), - (0x33B5, 'M', u'nv'), - (0x33B6, 'M', u'μv'), - (0x33B7, 'M', u'mv'), - (0x33B8, 'M', u'kv'), - (0x33B9, 'M', u'mv'), - (0x33BA, 'M', u'pw'), - (0x33BB, 'M', u'nw'), - (0x33BC, 'M', u'μw'), - (0x33BD, 'M', u'mw'), - (0x33BE, 'M', u'kw'), - (0x33BF, 'M', u'mw'), - (0x33C0, 'M', u'kω'), - (0x33C1, 'M', u'mω'), - (0x33C2, 'X'), - (0x33C3, 'M', u'bq'), - (0x33C4, 'M', u'cc'), - (0x33C5, 'M', u'cd'), - (0x33C6, 'M', u'c∕kg'), - (0x33C7, 'X'), - (0x33C8, 'M', u'db'), - (0x33C9, 'M', u'gy'), - (0x33CA, 'M', u'ha'), - (0x33CB, 'M', u'hp'), - (0x33CC, 'M', u'in'), - (0x33CD, 'M', u'kk'), - (0x33CE, 'M', u'km'), - (0x33CF, 'M', u'kt'), - (0x33D0, 'M', u'lm'), - (0x33D1, 'M', u'ln'), - (0x33D2, 'M', u'log'), - (0x33D3, 'M', u'lx'), - (0x33D4, 'M', u'mb'), - (0x33D5, 'M', u'mil'), - (0x33D6, 'M', u'mol'), - (0x33D7, 'M', u'ph'), - (0x33D8, 'X'), - (0x33D9, 'M', u'ppm'), - (0x33DA, 'M', u'pr'), - (0x33DB, 'M', u'sr'), - (0x33DC, 'M', u'sv'), - (0x33DD, 'M', u'wb'), - (0x33DE, 'M', u'v∕m'), - (0x33DF, 'M', u'a∕m'), - (0x33E0, 'M', u'1日'), - (0x33E1, 'M', u'2日'), - (0x33E2, 'M', u'3日'), - (0x33E3, 'M', u'4日'), - (0x33E4, 'M', u'5日'), - (0x33E5, 'M', u'6日'), - (0x33E6, 'M', u'7日'), - (0x33E7, 'M', u'8日'), - (0x33E8, 'M', u'9日'), - (0x33E9, 'M', u'10日'), - (0x33EA, 'M', u'11日'), - (0x33EB, 'M', u'12日'), - (0x33EC, 'M', u'13日'), - (0x33ED, 'M', u'14日'), - (0x33EE, 'M', u'15日'), - (0x33EF, 'M', u'16日'), - (0x33F0, 'M', u'17日'), - (0x33F1, 'M', u'18日'), - (0x33F2, 'M', u'19日'), - (0x33F3, 'M', u'20日'), - ] - -def _seg_35(): - return [ - (0x33F4, 'M', u'21日'), - (0x33F5, 'M', u'22日'), - (0x33F6, 'M', u'23日'), - (0x33F7, 'M', u'24日'), - (0x33F8, 'M', u'25日'), - (0x33F9, 'M', u'26日'), - (0x33FA, 'M', u'27日'), - (0x33FB, 'M', u'28日'), - (0x33FC, 'M', u'29日'), - (0x33FD, 'M', u'30日'), - (0x33FE, 'M', u'31日'), - (0x33FF, 'M', u'gal'), - (0x3400, 'V'), - (0x4DB6, 'X'), - (0x4DC0, 'V'), - (0x9FF0, 'X'), - (0xA000, 'V'), - (0xA48D, 'X'), - (0xA490, 'V'), - (0xA4C7, 'X'), - (0xA4D0, 'V'), - (0xA62C, 'X'), - (0xA640, 'M', u'ꙁ'), - (0xA641, 'V'), - (0xA642, 'M', u'ꙃ'), - (0xA643, 'V'), - (0xA644, 'M', u'ꙅ'), - (0xA645, 'V'), - (0xA646, 'M', u'ꙇ'), - (0xA647, 'V'), - (0xA648, 'M', u'ꙉ'), - (0xA649, 'V'), - (0xA64A, 'M', u'ꙋ'), - (0xA64B, 'V'), - (0xA64C, 'M', u'ꙍ'), - (0xA64D, 'V'), - (0xA64E, 'M', u'ꙏ'), - (0xA64F, 'V'), - (0xA650, 'M', u'ꙑ'), - (0xA651, 'V'), - (0xA652, 'M', u'ꙓ'), - (0xA653, 'V'), - (0xA654, 'M', u'ꙕ'), - (0xA655, 'V'), - (0xA656, 'M', u'ꙗ'), - (0xA657, 'V'), - (0xA658, 'M', u'ꙙ'), - (0xA659, 'V'), - (0xA65A, 'M', u'ꙛ'), - (0xA65B, 'V'), - (0xA65C, 'M', u'ꙝ'), - (0xA65D, 'V'), - (0xA65E, 'M', u'ꙟ'), - (0xA65F, 'V'), - (0xA660, 'M', u'ꙡ'), - (0xA661, 'V'), - (0xA662, 'M', u'ꙣ'), - (0xA663, 'V'), - (0xA664, 'M', u'ꙥ'), - (0xA665, 'V'), - (0xA666, 'M', u'ꙧ'), - (0xA667, 'V'), - (0xA668, 'M', u'ꙩ'), - (0xA669, 'V'), - (0xA66A, 'M', u'ꙫ'), - (0xA66B, 'V'), - (0xA66C, 'M', u'ꙭ'), - (0xA66D, 'V'), - (0xA680, 'M', u'ꚁ'), - (0xA681, 'V'), - (0xA682, 'M', u'ꚃ'), - (0xA683, 'V'), - (0xA684, 'M', u'ꚅ'), - (0xA685, 'V'), - (0xA686, 'M', u'ꚇ'), - (0xA687, 'V'), - (0xA688, 'M', u'ꚉ'), - (0xA689, 'V'), - (0xA68A, 'M', u'ꚋ'), - (0xA68B, 'V'), - (0xA68C, 'M', u'ꚍ'), - (0xA68D, 'V'), - (0xA68E, 'M', u'ꚏ'), - (0xA68F, 'V'), - (0xA690, 'M', u'ꚑ'), - (0xA691, 'V'), - (0xA692, 'M', u'ꚓ'), - (0xA693, 'V'), - (0xA694, 'M', u'ꚕ'), - (0xA695, 'V'), - (0xA696, 'M', u'ꚗ'), - (0xA697, 'V'), - (0xA698, 'M', u'ꚙ'), - (0xA699, 'V'), - (0xA69A, 'M', u'ꚛ'), - (0xA69B, 'V'), - (0xA69C, 'M', u'ъ'), - (0xA69D, 'M', u'ь'), - (0xA69E, 'V'), - (0xA6F8, 'X'), - ] - -def _seg_36(): - return [ - (0xA700, 'V'), - (0xA722, 'M', u'ꜣ'), - (0xA723, 'V'), - (0xA724, 'M', u'ꜥ'), - (0xA725, 'V'), - (0xA726, 'M', u'ꜧ'), - (0xA727, 'V'), - (0xA728, 'M', u'ꜩ'), - (0xA729, 'V'), - (0xA72A, 'M', u'ꜫ'), - (0xA72B, 'V'), - (0xA72C, 'M', u'ꜭ'), - (0xA72D, 'V'), - (0xA72E, 'M', u'ꜯ'), - (0xA72F, 'V'), - (0xA732, 'M', u'ꜳ'), - (0xA733, 'V'), - (0xA734, 'M', u'ꜵ'), - (0xA735, 'V'), - (0xA736, 'M', u'ꜷ'), - (0xA737, 'V'), - (0xA738, 'M', u'ꜹ'), - (0xA739, 'V'), - (0xA73A, 'M', u'ꜻ'), - (0xA73B, 'V'), - (0xA73C, 'M', u'ꜽ'), - (0xA73D, 'V'), - (0xA73E, 'M', u'ꜿ'), - (0xA73F, 'V'), - (0xA740, 'M', u'ꝁ'), - (0xA741, 'V'), - (0xA742, 'M', u'ꝃ'), - (0xA743, 'V'), - (0xA744, 'M', u'ꝅ'), - (0xA745, 'V'), - (0xA746, 'M', u'ꝇ'), - (0xA747, 'V'), - (0xA748, 'M', u'ꝉ'), - (0xA749, 'V'), - (0xA74A, 'M', u'ꝋ'), - (0xA74B, 'V'), - (0xA74C, 'M', u'ꝍ'), - (0xA74D, 'V'), - (0xA74E, 'M', u'ꝏ'), - (0xA74F, 'V'), - (0xA750, 'M', u'ꝑ'), - (0xA751, 'V'), - (0xA752, 'M', u'ꝓ'), - (0xA753, 'V'), - (0xA754, 'M', u'ꝕ'), - (0xA755, 'V'), - (0xA756, 'M', u'ꝗ'), - (0xA757, 'V'), - (0xA758, 'M', u'ꝙ'), - (0xA759, 'V'), - (0xA75A, 'M', u'ꝛ'), - (0xA75B, 'V'), - (0xA75C, 'M', u'ꝝ'), - (0xA75D, 'V'), - (0xA75E, 'M', u'ꝟ'), - (0xA75F, 'V'), - (0xA760, 'M', u'ꝡ'), - (0xA761, 'V'), - (0xA762, 'M', u'ꝣ'), - (0xA763, 'V'), - (0xA764, 'M', u'ꝥ'), - (0xA765, 'V'), - (0xA766, 'M', u'ꝧ'), - (0xA767, 'V'), - (0xA768, 'M', u'ꝩ'), - (0xA769, 'V'), - (0xA76A, 'M', u'ꝫ'), - (0xA76B, 'V'), - (0xA76C, 'M', u'ꝭ'), - (0xA76D, 'V'), - (0xA76E, 'M', u'ꝯ'), - (0xA76F, 'V'), - (0xA770, 'M', u'ꝯ'), - (0xA771, 'V'), - (0xA779, 'M', u'ꝺ'), - (0xA77A, 'V'), - (0xA77B, 'M', u'ꝼ'), - (0xA77C, 'V'), - (0xA77D, 'M', u'ᵹ'), - (0xA77E, 'M', u'ꝿ'), - (0xA77F, 'V'), - (0xA780, 'M', u'ꞁ'), - (0xA781, 'V'), - (0xA782, 'M', u'ꞃ'), - (0xA783, 'V'), - (0xA784, 'M', u'ꞅ'), - (0xA785, 'V'), - (0xA786, 'M', u'ꞇ'), - (0xA787, 'V'), - (0xA78B, 'M', u'ꞌ'), - (0xA78C, 'V'), - (0xA78D, 'M', u'ɥ'), - (0xA78E, 'V'), - (0xA790, 'M', u'ꞑ'), - (0xA791, 'V'), - ] - -def _seg_37(): - return [ - (0xA792, 'M', u'ꞓ'), - (0xA793, 'V'), - (0xA796, 'M', u'ꞗ'), - (0xA797, 'V'), - (0xA798, 'M', u'ꞙ'), - (0xA799, 'V'), - (0xA79A, 'M', u'ꞛ'), - (0xA79B, 'V'), - (0xA79C, 'M', u'ꞝ'), - (0xA79D, 'V'), - (0xA79E, 'M', u'ꞟ'), - (0xA79F, 'V'), - (0xA7A0, 'M', u'ꞡ'), - (0xA7A1, 'V'), - (0xA7A2, 'M', u'ꞣ'), - (0xA7A3, 'V'), - (0xA7A4, 'M', u'ꞥ'), - (0xA7A5, 'V'), - (0xA7A6, 'M', u'ꞧ'), - (0xA7A7, 'V'), - (0xA7A8, 'M', u'ꞩ'), - (0xA7A9, 'V'), - (0xA7AA, 'M', u'ɦ'), - (0xA7AB, 'M', u'ɜ'), - (0xA7AC, 'M', u'ɡ'), - (0xA7AD, 'M', u'ɬ'), - (0xA7AE, 'M', u'ɪ'), - (0xA7AF, 'V'), - (0xA7B0, 'M', u'ʞ'), - (0xA7B1, 'M', u'ʇ'), - (0xA7B2, 'M', u'ʝ'), - (0xA7B3, 'M', u'ꭓ'), - (0xA7B4, 'M', u'ꞵ'), - (0xA7B5, 'V'), - (0xA7B6, 'M', u'ꞷ'), - (0xA7B7, 'V'), - (0xA7B8, 'X'), - (0xA7B9, 'V'), - (0xA7BA, 'X'), - (0xA7F7, 'V'), - (0xA7F8, 'M', u'ħ'), - (0xA7F9, 'M', u'œ'), - (0xA7FA, 'V'), - (0xA82C, 'X'), - (0xA830, 'V'), - (0xA83A, 'X'), - (0xA840, 'V'), - (0xA878, 'X'), - (0xA880, 'V'), - (0xA8C6, 'X'), - (0xA8CE, 'V'), - (0xA8DA, 'X'), - (0xA8E0, 'V'), - (0xA954, 'X'), - (0xA95F, 'V'), - (0xA97D, 'X'), - (0xA980, 'V'), - (0xA9CE, 'X'), - (0xA9CF, 'V'), - (0xA9DA, 'X'), - (0xA9DE, 'V'), - (0xA9FF, 'X'), - (0xAA00, 'V'), - (0xAA37, 'X'), - (0xAA40, 'V'), - (0xAA4E, 'X'), - (0xAA50, 'V'), - (0xAA5A, 'X'), - (0xAA5C, 'V'), - (0xAAC3, 'X'), - (0xAADB, 'V'), - (0xAAF7, 'X'), - (0xAB01, 'V'), - (0xAB07, 'X'), - (0xAB09, 'V'), - (0xAB0F, 'X'), - (0xAB11, 'V'), - (0xAB17, 'X'), - (0xAB20, 'V'), - (0xAB27, 'X'), - (0xAB28, 'V'), - (0xAB2F, 'X'), - (0xAB30, 'V'), - (0xAB5C, 'M', u'ꜧ'), - (0xAB5D, 'M', u'ꬷ'), - (0xAB5E, 'M', u'ɫ'), - (0xAB5F, 'M', u'ꭒ'), - (0xAB60, 'V'), - (0xAB66, 'X'), - (0xAB70, 'M', u'Ꭰ'), - (0xAB71, 'M', u'Ꭱ'), - (0xAB72, 'M', u'Ꭲ'), - (0xAB73, 'M', u'Ꭳ'), - (0xAB74, 'M', u'Ꭴ'), - (0xAB75, 'M', u'Ꭵ'), - (0xAB76, 'M', u'Ꭶ'), - (0xAB77, 'M', u'Ꭷ'), - (0xAB78, 'M', u'Ꭸ'), - (0xAB79, 'M', u'Ꭹ'), - (0xAB7A, 'M', u'Ꭺ'), - ] - -def _seg_38(): - return [ - (0xAB7B, 'M', u'Ꭻ'), - (0xAB7C, 'M', u'Ꭼ'), - (0xAB7D, 'M', u'Ꭽ'), - (0xAB7E, 'M', u'Ꭾ'), - (0xAB7F, 'M', u'Ꭿ'), - (0xAB80, 'M', u'Ꮀ'), - (0xAB81, 'M', u'Ꮁ'), - (0xAB82, 'M', u'Ꮂ'), - (0xAB83, 'M', u'Ꮃ'), - (0xAB84, 'M', u'Ꮄ'), - (0xAB85, 'M', u'Ꮅ'), - (0xAB86, 'M', u'Ꮆ'), - (0xAB87, 'M', u'Ꮇ'), - (0xAB88, 'M', u'Ꮈ'), - (0xAB89, 'M', u'Ꮉ'), - (0xAB8A, 'M', u'Ꮊ'), - (0xAB8B, 'M', u'Ꮋ'), - (0xAB8C, 'M', u'Ꮌ'), - (0xAB8D, 'M', u'Ꮍ'), - (0xAB8E, 'M', u'Ꮎ'), - (0xAB8F, 'M', u'Ꮏ'), - (0xAB90, 'M', u'Ꮐ'), - (0xAB91, 'M', u'Ꮑ'), - (0xAB92, 'M', u'Ꮒ'), - (0xAB93, 'M', u'Ꮓ'), - (0xAB94, 'M', u'Ꮔ'), - (0xAB95, 'M', u'Ꮕ'), - (0xAB96, 'M', u'Ꮖ'), - (0xAB97, 'M', u'Ꮗ'), - (0xAB98, 'M', u'Ꮘ'), - (0xAB99, 'M', u'Ꮙ'), - (0xAB9A, 'M', u'Ꮚ'), - (0xAB9B, 'M', u'Ꮛ'), - (0xAB9C, 'M', u'Ꮜ'), - (0xAB9D, 'M', u'Ꮝ'), - (0xAB9E, 'M', u'Ꮞ'), - (0xAB9F, 'M', u'Ꮟ'), - (0xABA0, 'M', u'Ꮠ'), - (0xABA1, 'M', u'Ꮡ'), - (0xABA2, 'M', u'Ꮢ'), - (0xABA3, 'M', u'Ꮣ'), - (0xABA4, 'M', u'Ꮤ'), - (0xABA5, 'M', u'Ꮥ'), - (0xABA6, 'M', u'Ꮦ'), - (0xABA7, 'M', u'Ꮧ'), - (0xABA8, 'M', u'Ꮨ'), - (0xABA9, 'M', u'Ꮩ'), - (0xABAA, 'M', u'Ꮪ'), - (0xABAB, 'M', u'Ꮫ'), - (0xABAC, 'M', u'Ꮬ'), - (0xABAD, 'M', u'Ꮭ'), - (0xABAE, 'M', u'Ꮮ'), - (0xABAF, 'M', u'Ꮯ'), - (0xABB0, 'M', u'Ꮰ'), - (0xABB1, 'M', u'Ꮱ'), - (0xABB2, 'M', u'Ꮲ'), - (0xABB3, 'M', u'Ꮳ'), - (0xABB4, 'M', u'Ꮴ'), - (0xABB5, 'M', u'Ꮵ'), - (0xABB6, 'M', u'Ꮶ'), - (0xABB7, 'M', u'Ꮷ'), - (0xABB8, 'M', u'Ꮸ'), - (0xABB9, 'M', u'Ꮹ'), - (0xABBA, 'M', u'Ꮺ'), - (0xABBB, 'M', u'Ꮻ'), - (0xABBC, 'M', u'Ꮼ'), - (0xABBD, 'M', u'Ꮽ'), - (0xABBE, 'M', u'Ꮾ'), - (0xABBF, 'M', u'Ꮿ'), - (0xABC0, 'V'), - (0xABEE, 'X'), - (0xABF0, 'V'), - (0xABFA, 'X'), - (0xAC00, 'V'), - (0xD7A4, 'X'), - (0xD7B0, 'V'), - (0xD7C7, 'X'), - (0xD7CB, 'V'), - (0xD7FC, 'X'), - (0xF900, 'M', u'豈'), - (0xF901, 'M', u'更'), - (0xF902, 'M', u'車'), - (0xF903, 'M', u'賈'), - (0xF904, 'M', u'滑'), - (0xF905, 'M', u'串'), - (0xF906, 'M', u'句'), - (0xF907, 'M', u'龜'), - (0xF909, 'M', u'契'), - (0xF90A, 'M', u'金'), - (0xF90B, 'M', u'喇'), - (0xF90C, 'M', u'奈'), - (0xF90D, 'M', u'懶'), - (0xF90E, 'M', u'癩'), - (0xF90F, 'M', u'羅'), - (0xF910, 'M', u'蘿'), - (0xF911, 'M', u'螺'), - (0xF912, 'M', u'裸'), - (0xF913, 'M', u'邏'), - (0xF914, 'M', u'樂'), - (0xF915, 'M', u'洛'), - ] - -def _seg_39(): - return [ - (0xF916, 'M', u'烙'), - (0xF917, 'M', u'珞'), - (0xF918, 'M', u'落'), - (0xF919, 'M', u'酪'), - (0xF91A, 'M', u'駱'), - (0xF91B, 'M', u'亂'), - (0xF91C, 'M', u'卵'), - (0xF91D, 'M', u'欄'), - (0xF91E, 'M', u'爛'), - (0xF91F, 'M', u'蘭'), - (0xF920, 'M', u'鸞'), - (0xF921, 'M', u'嵐'), - (0xF922, 'M', u'濫'), - (0xF923, 'M', u'藍'), - (0xF924, 'M', u'襤'), - (0xF925, 'M', u'拉'), - (0xF926, 'M', u'臘'), - (0xF927, 'M', u'蠟'), - (0xF928, 'M', u'廊'), - (0xF929, 'M', u'朗'), - (0xF92A, 'M', u'浪'), - (0xF92B, 'M', u'狼'), - (0xF92C, 'M', u'郎'), - (0xF92D, 'M', u'來'), - (0xF92E, 'M', u'冷'), - (0xF92F, 'M', u'勞'), - (0xF930, 'M', u'擄'), - (0xF931, 'M', u'櫓'), - (0xF932, 'M', u'爐'), - (0xF933, 'M', u'盧'), - (0xF934, 'M', u'老'), - (0xF935, 'M', u'蘆'), - (0xF936, 'M', u'虜'), - (0xF937, 'M', u'路'), - (0xF938, 'M', u'露'), - (0xF939, 'M', u'魯'), - (0xF93A, 'M', u'鷺'), - (0xF93B, 'M', u'碌'), - (0xF93C, 'M', u'祿'), - (0xF93D, 'M', u'綠'), - (0xF93E, 'M', u'菉'), - (0xF93F, 'M', u'錄'), - (0xF940, 'M', u'鹿'), - (0xF941, 'M', u'論'), - (0xF942, 'M', u'壟'), - (0xF943, 'M', u'弄'), - (0xF944, 'M', u'籠'), - (0xF945, 'M', u'聾'), - (0xF946, 'M', u'牢'), - (0xF947, 'M', u'磊'), - (0xF948, 'M', u'賂'), - (0xF949, 'M', u'雷'), - (0xF94A, 'M', u'壘'), - (0xF94B, 'M', u'屢'), - (0xF94C, 'M', u'樓'), - (0xF94D, 'M', u'淚'), - (0xF94E, 'M', u'漏'), - (0xF94F, 'M', u'累'), - (0xF950, 'M', u'縷'), - (0xF951, 'M', u'陋'), - (0xF952, 'M', u'勒'), - (0xF953, 'M', u'肋'), - (0xF954, 'M', u'凜'), - (0xF955, 'M', u'凌'), - (0xF956, 'M', u'稜'), - (0xF957, 'M', u'綾'), - (0xF958, 'M', u'菱'), - (0xF959, 'M', u'陵'), - (0xF95A, 'M', u'讀'), - (0xF95B, 'M', u'拏'), - (0xF95C, 'M', u'樂'), - (0xF95D, 'M', u'諾'), - (0xF95E, 'M', u'丹'), - (0xF95F, 'M', u'寧'), - (0xF960, 'M', u'怒'), - (0xF961, 'M', u'率'), - (0xF962, 'M', u'異'), - (0xF963, 'M', u'北'), - (0xF964, 'M', u'磻'), - (0xF965, 'M', u'便'), - (0xF966, 'M', u'復'), - (0xF967, 'M', u'不'), - (0xF968, 'M', u'泌'), - (0xF969, 'M', u'數'), - (0xF96A, 'M', u'索'), - (0xF96B, 'M', u'參'), - (0xF96C, 'M', u'塞'), - (0xF96D, 'M', u'省'), - (0xF96E, 'M', u'葉'), - (0xF96F, 'M', u'說'), - (0xF970, 'M', u'殺'), - (0xF971, 'M', u'辰'), - (0xF972, 'M', u'沈'), - (0xF973, 'M', u'拾'), - (0xF974, 'M', u'若'), - (0xF975, 'M', u'掠'), - (0xF976, 'M', u'略'), - (0xF977, 'M', u'亮'), - (0xF978, 'M', u'兩'), - (0xF979, 'M', u'凉'), - ] - -def _seg_40(): - return [ - (0xF97A, 'M', u'梁'), - (0xF97B, 'M', u'糧'), - (0xF97C, 'M', u'良'), - (0xF97D, 'M', u'諒'), - (0xF97E, 'M', u'量'), - (0xF97F, 'M', u'勵'), - (0xF980, 'M', u'呂'), - (0xF981, 'M', u'女'), - (0xF982, 'M', u'廬'), - (0xF983, 'M', u'旅'), - (0xF984, 'M', u'濾'), - (0xF985, 'M', u'礪'), - (0xF986, 'M', u'閭'), - (0xF987, 'M', u'驪'), - (0xF988, 'M', u'麗'), - (0xF989, 'M', u'黎'), - (0xF98A, 'M', u'力'), - (0xF98B, 'M', u'曆'), - (0xF98C, 'M', u'歷'), - (0xF98D, 'M', u'轢'), - (0xF98E, 'M', u'年'), - (0xF98F, 'M', u'憐'), - (0xF990, 'M', u'戀'), - (0xF991, 'M', u'撚'), - (0xF992, 'M', u'漣'), - (0xF993, 'M', u'煉'), - (0xF994, 'M', u'璉'), - (0xF995, 'M', u'秊'), - (0xF996, 'M', u'練'), - (0xF997, 'M', u'聯'), - (0xF998, 'M', u'輦'), - (0xF999, 'M', u'蓮'), - (0xF99A, 'M', u'連'), - (0xF99B, 'M', u'鍊'), - (0xF99C, 'M', u'列'), - (0xF99D, 'M', u'劣'), - (0xF99E, 'M', u'咽'), - (0xF99F, 'M', u'烈'), - (0xF9A0, 'M', u'裂'), - (0xF9A1, 'M', u'說'), - (0xF9A2, 'M', u'廉'), - (0xF9A3, 'M', u'念'), - (0xF9A4, 'M', u'捻'), - (0xF9A5, 'M', u'殮'), - (0xF9A6, 'M', u'簾'), - (0xF9A7, 'M', u'獵'), - (0xF9A8, 'M', u'令'), - (0xF9A9, 'M', u'囹'), - (0xF9AA, 'M', u'寧'), - (0xF9AB, 'M', u'嶺'), - (0xF9AC, 'M', u'怜'), - (0xF9AD, 'M', u'玲'), - (0xF9AE, 'M', u'瑩'), - (0xF9AF, 'M', u'羚'), - (0xF9B0, 'M', u'聆'), - (0xF9B1, 'M', u'鈴'), - (0xF9B2, 'M', u'零'), - (0xF9B3, 'M', u'靈'), - (0xF9B4, 'M', u'領'), - (0xF9B5, 'M', u'例'), - (0xF9B6, 'M', u'禮'), - (0xF9B7, 'M', u'醴'), - (0xF9B8, 'M', u'隸'), - (0xF9B9, 'M', u'惡'), - (0xF9BA, 'M', u'了'), - (0xF9BB, 'M', u'僚'), - (0xF9BC, 'M', u'寮'), - (0xF9BD, 'M', u'尿'), - (0xF9BE, 'M', u'料'), - (0xF9BF, 'M', u'樂'), - (0xF9C0, 'M', u'燎'), - (0xF9C1, 'M', u'療'), - (0xF9C2, 'M', u'蓼'), - (0xF9C3, 'M', u'遼'), - (0xF9C4, 'M', u'龍'), - (0xF9C5, 'M', u'暈'), - (0xF9C6, 'M', u'阮'), - (0xF9C7, 'M', u'劉'), - (0xF9C8, 'M', u'杻'), - (0xF9C9, 'M', u'柳'), - (0xF9CA, 'M', u'流'), - (0xF9CB, 'M', u'溜'), - (0xF9CC, 'M', u'琉'), - (0xF9CD, 'M', u'留'), - (0xF9CE, 'M', u'硫'), - (0xF9CF, 'M', u'紐'), - (0xF9D0, 'M', u'類'), - (0xF9D1, 'M', u'六'), - (0xF9D2, 'M', u'戮'), - (0xF9D3, 'M', u'陸'), - (0xF9D4, 'M', u'倫'), - (0xF9D5, 'M', u'崙'), - (0xF9D6, 'M', u'淪'), - (0xF9D7, 'M', u'輪'), - (0xF9D8, 'M', u'律'), - (0xF9D9, 'M', u'慄'), - (0xF9DA, 'M', u'栗'), - (0xF9DB, 'M', u'率'), - (0xF9DC, 'M', u'隆'), - (0xF9DD, 'M', u'利'), - ] - -def _seg_41(): - return [ - (0xF9DE, 'M', u'吏'), - (0xF9DF, 'M', u'履'), - (0xF9E0, 'M', u'易'), - (0xF9E1, 'M', u'李'), - (0xF9E2, 'M', u'梨'), - (0xF9E3, 'M', u'泥'), - (0xF9E4, 'M', u'理'), - (0xF9E5, 'M', u'痢'), - (0xF9E6, 'M', u'罹'), - (0xF9E7, 'M', u'裏'), - (0xF9E8, 'M', u'裡'), - (0xF9E9, 'M', u'里'), - (0xF9EA, 'M', u'離'), - (0xF9EB, 'M', u'匿'), - (0xF9EC, 'M', u'溺'), - (0xF9ED, 'M', u'吝'), - (0xF9EE, 'M', u'燐'), - (0xF9EF, 'M', u'璘'), - (0xF9F0, 'M', u'藺'), - (0xF9F1, 'M', u'隣'), - (0xF9F2, 'M', u'鱗'), - (0xF9F3, 'M', u'麟'), - (0xF9F4, 'M', u'林'), - (0xF9F5, 'M', u'淋'), - (0xF9F6, 'M', u'臨'), - (0xF9F7, 'M', u'立'), - (0xF9F8, 'M', u'笠'), - (0xF9F9, 'M', u'粒'), - (0xF9FA, 'M', u'狀'), - (0xF9FB, 'M', u'炙'), - (0xF9FC, 'M', u'識'), - (0xF9FD, 'M', u'什'), - (0xF9FE, 'M', u'茶'), - (0xF9FF, 'M', u'刺'), - (0xFA00, 'M', u'切'), - (0xFA01, 'M', u'度'), - (0xFA02, 'M', u'拓'), - (0xFA03, 'M', u'糖'), - (0xFA04, 'M', u'宅'), - (0xFA05, 'M', u'洞'), - (0xFA06, 'M', u'暴'), - (0xFA07, 'M', u'輻'), - (0xFA08, 'M', u'行'), - (0xFA09, 'M', u'降'), - (0xFA0A, 'M', u'見'), - (0xFA0B, 'M', u'廓'), - (0xFA0C, 'M', u'兀'), - (0xFA0D, 'M', u'嗀'), - (0xFA0E, 'V'), - (0xFA10, 'M', u'塚'), - (0xFA11, 'V'), - (0xFA12, 'M', u'晴'), - (0xFA13, 'V'), - (0xFA15, 'M', u'凞'), - (0xFA16, 'M', u'猪'), - (0xFA17, 'M', u'益'), - (0xFA18, 'M', u'礼'), - (0xFA19, 'M', u'神'), - (0xFA1A, 'M', u'祥'), - (0xFA1B, 'M', u'福'), - (0xFA1C, 'M', u'靖'), - (0xFA1D, 'M', u'精'), - (0xFA1E, 'M', u'羽'), - (0xFA1F, 'V'), - (0xFA20, 'M', u'蘒'), - (0xFA21, 'V'), - (0xFA22, 'M', u'諸'), - (0xFA23, 'V'), - (0xFA25, 'M', u'逸'), - (0xFA26, 'M', u'都'), - (0xFA27, 'V'), - (0xFA2A, 'M', u'飯'), - (0xFA2B, 'M', u'飼'), - (0xFA2C, 'M', u'館'), - (0xFA2D, 'M', u'鶴'), - (0xFA2E, 'M', u'郞'), - (0xFA2F, 'M', u'隷'), - (0xFA30, 'M', u'侮'), - (0xFA31, 'M', u'僧'), - (0xFA32, 'M', u'免'), - (0xFA33, 'M', u'勉'), - (0xFA34, 'M', u'勤'), - (0xFA35, 'M', u'卑'), - (0xFA36, 'M', u'喝'), - (0xFA37, 'M', u'嘆'), - (0xFA38, 'M', u'器'), - (0xFA39, 'M', u'塀'), - (0xFA3A, 'M', u'墨'), - (0xFA3B, 'M', u'層'), - (0xFA3C, 'M', u'屮'), - (0xFA3D, 'M', u'悔'), - (0xFA3E, 'M', u'慨'), - (0xFA3F, 'M', u'憎'), - (0xFA40, 'M', u'懲'), - (0xFA41, 'M', u'敏'), - (0xFA42, 'M', u'既'), - (0xFA43, 'M', u'暑'), - (0xFA44, 'M', u'梅'), - (0xFA45, 'M', u'海'), - (0xFA46, 'M', u'渚'), - ] - -def _seg_42(): - return [ - (0xFA47, 'M', u'漢'), - (0xFA48, 'M', u'煮'), - (0xFA49, 'M', u'爫'), - (0xFA4A, 'M', u'琢'), - (0xFA4B, 'M', u'碑'), - (0xFA4C, 'M', u'社'), - (0xFA4D, 'M', u'祉'), - (0xFA4E, 'M', u'祈'), - (0xFA4F, 'M', u'祐'), - (0xFA50, 'M', u'祖'), - (0xFA51, 'M', u'祝'), - (0xFA52, 'M', u'禍'), - (0xFA53, 'M', u'禎'), - (0xFA54, 'M', u'穀'), - (0xFA55, 'M', u'突'), - (0xFA56, 'M', u'節'), - (0xFA57, 'M', u'練'), - (0xFA58, 'M', u'縉'), - (0xFA59, 'M', u'繁'), - (0xFA5A, 'M', u'署'), - (0xFA5B, 'M', u'者'), - (0xFA5C, 'M', u'臭'), - (0xFA5D, 'M', u'艹'), - (0xFA5F, 'M', u'著'), - (0xFA60, 'M', u'褐'), - (0xFA61, 'M', u'視'), - (0xFA62, 'M', u'謁'), - (0xFA63, 'M', u'謹'), - (0xFA64, 'M', u'賓'), - (0xFA65, 'M', u'贈'), - (0xFA66, 'M', u'辶'), - (0xFA67, 'M', u'逸'), - (0xFA68, 'M', u'難'), - (0xFA69, 'M', u'響'), - (0xFA6A, 'M', u'頻'), - (0xFA6B, 'M', u'恵'), - (0xFA6C, 'M', u'𤋮'), - (0xFA6D, 'M', u'舘'), - (0xFA6E, 'X'), - (0xFA70, 'M', u'並'), - (0xFA71, 'M', u'况'), - (0xFA72, 'M', u'全'), - (0xFA73, 'M', u'侀'), - (0xFA74, 'M', u'充'), - (0xFA75, 'M', u'冀'), - (0xFA76, 'M', u'勇'), - (0xFA77, 'M', u'勺'), - (0xFA78, 'M', u'喝'), - (0xFA79, 'M', u'啕'), - (0xFA7A, 'M', u'喙'), - (0xFA7B, 'M', u'嗢'), - (0xFA7C, 'M', u'塚'), - (0xFA7D, 'M', u'墳'), - (0xFA7E, 'M', u'奄'), - (0xFA7F, 'M', u'奔'), - (0xFA80, 'M', u'婢'), - (0xFA81, 'M', u'嬨'), - (0xFA82, 'M', u'廒'), - (0xFA83, 'M', u'廙'), - (0xFA84, 'M', u'彩'), - (0xFA85, 'M', u'徭'), - (0xFA86, 'M', u'惘'), - (0xFA87, 'M', u'慎'), - (0xFA88, 'M', u'愈'), - (0xFA89, 'M', u'憎'), - (0xFA8A, 'M', u'慠'), - (0xFA8B, 'M', u'懲'), - (0xFA8C, 'M', u'戴'), - (0xFA8D, 'M', u'揄'), - (0xFA8E, 'M', u'搜'), - (0xFA8F, 'M', u'摒'), - (0xFA90, 'M', u'敖'), - (0xFA91, 'M', u'晴'), - (0xFA92, 'M', u'朗'), - (0xFA93, 'M', u'望'), - (0xFA94, 'M', u'杖'), - (0xFA95, 'M', u'歹'), - (0xFA96, 'M', u'殺'), - (0xFA97, 'M', u'流'), - (0xFA98, 'M', u'滛'), - (0xFA99, 'M', u'滋'), - (0xFA9A, 'M', u'漢'), - (0xFA9B, 'M', u'瀞'), - (0xFA9C, 'M', u'煮'), - (0xFA9D, 'M', u'瞧'), - (0xFA9E, 'M', u'爵'), - (0xFA9F, 'M', u'犯'), - (0xFAA0, 'M', u'猪'), - (0xFAA1, 'M', u'瑱'), - (0xFAA2, 'M', u'甆'), - (0xFAA3, 'M', u'画'), - (0xFAA4, 'M', u'瘝'), - (0xFAA5, 'M', u'瘟'), - (0xFAA6, 'M', u'益'), - (0xFAA7, 'M', u'盛'), - (0xFAA8, 'M', u'直'), - (0xFAA9, 'M', u'睊'), - (0xFAAA, 'M', u'着'), - (0xFAAB, 'M', u'磌'), - (0xFAAC, 'M', u'窱'), - ] - -def _seg_43(): - return [ - (0xFAAD, 'M', u'節'), - (0xFAAE, 'M', u'类'), - (0xFAAF, 'M', u'絛'), - (0xFAB0, 'M', u'練'), - (0xFAB1, 'M', u'缾'), - (0xFAB2, 'M', u'者'), - (0xFAB3, 'M', u'荒'), - (0xFAB4, 'M', u'華'), - (0xFAB5, 'M', u'蝹'), - (0xFAB6, 'M', u'襁'), - (0xFAB7, 'M', u'覆'), - (0xFAB8, 'M', u'視'), - (0xFAB9, 'M', u'調'), - (0xFABA, 'M', u'諸'), - (0xFABB, 'M', u'請'), - (0xFABC, 'M', u'謁'), - (0xFABD, 'M', u'諾'), - (0xFABE, 'M', u'諭'), - (0xFABF, 'M', u'謹'), - (0xFAC0, 'M', u'變'), - (0xFAC1, 'M', u'贈'), - (0xFAC2, 'M', u'輸'), - (0xFAC3, 'M', u'遲'), - (0xFAC4, 'M', u'醙'), - (0xFAC5, 'M', u'鉶'), - (0xFAC6, 'M', u'陼'), - (0xFAC7, 'M', u'難'), - (0xFAC8, 'M', u'靖'), - (0xFAC9, 'M', u'韛'), - (0xFACA, 'M', u'響'), - (0xFACB, 'M', u'頋'), - (0xFACC, 'M', u'頻'), - (0xFACD, 'M', u'鬒'), - (0xFACE, 'M', u'龜'), - (0xFACF, 'M', u'𢡊'), - (0xFAD0, 'M', u'𢡄'), - (0xFAD1, 'M', u'𣏕'), - (0xFAD2, 'M', u'㮝'), - (0xFAD3, 'M', u'䀘'), - (0xFAD4, 'M', u'䀹'), - (0xFAD5, 'M', u'𥉉'), - (0xFAD6, 'M', u'𥳐'), - (0xFAD7, 'M', u'𧻓'), - (0xFAD8, 'M', u'齃'), - (0xFAD9, 'M', u'龎'), - (0xFADA, 'X'), - (0xFB00, 'M', u'ff'), - (0xFB01, 'M', u'fi'), - (0xFB02, 'M', u'fl'), - (0xFB03, 'M', u'ffi'), - (0xFB04, 'M', u'ffl'), - (0xFB05, 'M', u'st'), - (0xFB07, 'X'), - (0xFB13, 'M', u'մն'), - (0xFB14, 'M', u'մե'), - (0xFB15, 'M', u'մի'), - (0xFB16, 'M', u'վն'), - (0xFB17, 'M', u'մխ'), - (0xFB18, 'X'), - (0xFB1D, 'M', u'יִ'), - (0xFB1E, 'V'), - (0xFB1F, 'M', u'ײַ'), - (0xFB20, 'M', u'ע'), - (0xFB21, 'M', u'א'), - (0xFB22, 'M', u'ד'), - (0xFB23, 'M', u'ה'), - (0xFB24, 'M', u'כ'), - (0xFB25, 'M', u'ל'), - (0xFB26, 'M', u'ם'), - (0xFB27, 'M', u'ר'), - (0xFB28, 'M', u'ת'), - (0xFB29, '3', u'+'), - (0xFB2A, 'M', u'שׁ'), - (0xFB2B, 'M', u'שׂ'), - (0xFB2C, 'M', u'שּׁ'), - (0xFB2D, 'M', u'שּׂ'), - (0xFB2E, 'M', u'אַ'), - (0xFB2F, 'M', u'אָ'), - (0xFB30, 'M', u'אּ'), - (0xFB31, 'M', u'בּ'), - (0xFB32, 'M', u'גּ'), - (0xFB33, 'M', u'דּ'), - (0xFB34, 'M', u'הּ'), - (0xFB35, 'M', u'וּ'), - (0xFB36, 'M', u'זּ'), - (0xFB37, 'X'), - (0xFB38, 'M', u'טּ'), - (0xFB39, 'M', u'יּ'), - (0xFB3A, 'M', u'ךּ'), - (0xFB3B, 'M', u'כּ'), - (0xFB3C, 'M', u'לּ'), - (0xFB3D, 'X'), - (0xFB3E, 'M', u'מּ'), - (0xFB3F, 'X'), - (0xFB40, 'M', u'נּ'), - (0xFB41, 'M', u'סּ'), - (0xFB42, 'X'), - (0xFB43, 'M', u'ףּ'), - (0xFB44, 'M', u'פּ'), - (0xFB45, 'X'), - ] - -def _seg_44(): - return [ - (0xFB46, 'M', u'צּ'), - (0xFB47, 'M', u'קּ'), - (0xFB48, 'M', u'רּ'), - (0xFB49, 'M', u'שּ'), - (0xFB4A, 'M', u'תּ'), - (0xFB4B, 'M', u'וֹ'), - (0xFB4C, 'M', u'בֿ'), - (0xFB4D, 'M', u'כֿ'), - (0xFB4E, 'M', u'פֿ'), - (0xFB4F, 'M', u'אל'), - (0xFB50, 'M', u'ٱ'), - (0xFB52, 'M', u'ٻ'), - (0xFB56, 'M', u'پ'), - (0xFB5A, 'M', u'ڀ'), - (0xFB5E, 'M', u'ٺ'), - (0xFB62, 'M', u'ٿ'), - (0xFB66, 'M', u'ٹ'), - (0xFB6A, 'M', u'ڤ'), - (0xFB6E, 'M', u'ڦ'), - (0xFB72, 'M', u'ڄ'), - (0xFB76, 'M', u'ڃ'), - (0xFB7A, 'M', u'چ'), - (0xFB7E, 'M', u'ڇ'), - (0xFB82, 'M', u'ڍ'), - (0xFB84, 'M', u'ڌ'), - (0xFB86, 'M', u'ڎ'), - (0xFB88, 'M', u'ڈ'), - (0xFB8A, 'M', u'ژ'), - (0xFB8C, 'M', u'ڑ'), - (0xFB8E, 'M', u'ک'), - (0xFB92, 'M', u'گ'), - (0xFB96, 'M', u'ڳ'), - (0xFB9A, 'M', u'ڱ'), - (0xFB9E, 'M', u'ں'), - (0xFBA0, 'M', u'ڻ'), - (0xFBA4, 'M', u'ۀ'), - (0xFBA6, 'M', u'ہ'), - (0xFBAA, 'M', u'ھ'), - (0xFBAE, 'M', u'ے'), - (0xFBB0, 'M', u'ۓ'), - (0xFBB2, 'V'), - (0xFBC2, 'X'), - (0xFBD3, 'M', u'ڭ'), - (0xFBD7, 'M', u'ۇ'), - (0xFBD9, 'M', u'ۆ'), - (0xFBDB, 'M', u'ۈ'), - (0xFBDD, 'M', u'ۇٴ'), - (0xFBDE, 'M', u'ۋ'), - (0xFBE0, 'M', u'ۅ'), - (0xFBE2, 'M', u'ۉ'), - (0xFBE4, 'M', u'ې'), - (0xFBE8, 'M', u'ى'), - (0xFBEA, 'M', u'ئا'), - (0xFBEC, 'M', u'ئە'), - (0xFBEE, 'M', u'ئو'), - (0xFBF0, 'M', u'ئۇ'), - (0xFBF2, 'M', u'ئۆ'), - (0xFBF4, 'M', u'ئۈ'), - (0xFBF6, 'M', u'ئې'), - (0xFBF9, 'M', u'ئى'), - (0xFBFC, 'M', u'ی'), - (0xFC00, 'M', u'ئج'), - (0xFC01, 'M', u'ئح'), - (0xFC02, 'M', u'ئم'), - (0xFC03, 'M', u'ئى'), - (0xFC04, 'M', u'ئي'), - (0xFC05, 'M', u'بج'), - (0xFC06, 'M', u'بح'), - (0xFC07, 'M', u'بخ'), - (0xFC08, 'M', u'بم'), - (0xFC09, 'M', u'بى'), - (0xFC0A, 'M', u'بي'), - (0xFC0B, 'M', u'تج'), - (0xFC0C, 'M', u'تح'), - (0xFC0D, 'M', u'تخ'), - (0xFC0E, 'M', u'تم'), - (0xFC0F, 'M', u'تى'), - (0xFC10, 'M', u'تي'), - (0xFC11, 'M', u'ثج'), - (0xFC12, 'M', u'ثم'), - (0xFC13, 'M', u'ثى'), - (0xFC14, 'M', u'ثي'), - (0xFC15, 'M', u'جح'), - (0xFC16, 'M', u'جم'), - (0xFC17, 'M', u'حج'), - (0xFC18, 'M', u'حم'), - (0xFC19, 'M', u'خج'), - (0xFC1A, 'M', u'خح'), - (0xFC1B, 'M', u'خم'), - (0xFC1C, 'M', u'سج'), - (0xFC1D, 'M', u'سح'), - (0xFC1E, 'M', u'سخ'), - (0xFC1F, 'M', u'سم'), - (0xFC20, 'M', u'صح'), - (0xFC21, 'M', u'صم'), - (0xFC22, 'M', u'ضج'), - (0xFC23, 'M', u'ضح'), - (0xFC24, 'M', u'ضخ'), - (0xFC25, 'M', u'ضم'), - (0xFC26, 'M', u'طح'), - ] - -def _seg_45(): - return [ - (0xFC27, 'M', u'طم'), - (0xFC28, 'M', u'ظم'), - (0xFC29, 'M', u'عج'), - (0xFC2A, 'M', u'عم'), - (0xFC2B, 'M', u'غج'), - (0xFC2C, 'M', u'غم'), - (0xFC2D, 'M', u'فج'), - (0xFC2E, 'M', u'فح'), - (0xFC2F, 'M', u'فخ'), - (0xFC30, 'M', u'فم'), - (0xFC31, 'M', u'فى'), - (0xFC32, 'M', u'في'), - (0xFC33, 'M', u'قح'), - (0xFC34, 'M', u'قم'), - (0xFC35, 'M', u'قى'), - (0xFC36, 'M', u'قي'), - (0xFC37, 'M', u'كا'), - (0xFC38, 'M', u'كج'), - (0xFC39, 'M', u'كح'), - (0xFC3A, 'M', u'كخ'), - (0xFC3B, 'M', u'كل'), - (0xFC3C, 'M', u'كم'), - (0xFC3D, 'M', u'كى'), - (0xFC3E, 'M', u'كي'), - (0xFC3F, 'M', u'لج'), - (0xFC40, 'M', u'لح'), - (0xFC41, 'M', u'لخ'), - (0xFC42, 'M', u'لم'), - (0xFC43, 'M', u'لى'), - (0xFC44, 'M', u'لي'), - (0xFC45, 'M', u'مج'), - (0xFC46, 'M', u'مح'), - (0xFC47, 'M', u'مخ'), - (0xFC48, 'M', u'مم'), - (0xFC49, 'M', u'مى'), - (0xFC4A, 'M', u'مي'), - (0xFC4B, 'M', u'نج'), - (0xFC4C, 'M', u'نح'), - (0xFC4D, 'M', u'نخ'), - (0xFC4E, 'M', u'نم'), - (0xFC4F, 'M', u'نى'), - (0xFC50, 'M', u'ني'), - (0xFC51, 'M', u'هج'), - (0xFC52, 'M', u'هم'), - (0xFC53, 'M', u'هى'), - (0xFC54, 'M', u'هي'), - (0xFC55, 'M', u'يج'), - (0xFC56, 'M', u'يح'), - (0xFC57, 'M', u'يخ'), - (0xFC58, 'M', u'يم'), - (0xFC59, 'M', u'يى'), - (0xFC5A, 'M', u'يي'), - (0xFC5B, 'M', u'ذٰ'), - (0xFC5C, 'M', u'رٰ'), - (0xFC5D, 'M', u'ىٰ'), - (0xFC5E, '3', u' ٌّ'), - (0xFC5F, '3', u' ٍّ'), - (0xFC60, '3', u' َّ'), - (0xFC61, '3', u' ُّ'), - (0xFC62, '3', u' ِّ'), - (0xFC63, '3', u' ّٰ'), - (0xFC64, 'M', u'ئر'), - (0xFC65, 'M', u'ئز'), - (0xFC66, 'M', u'ئم'), - (0xFC67, 'M', u'ئن'), - (0xFC68, 'M', u'ئى'), - (0xFC69, 'M', u'ئي'), - (0xFC6A, 'M', u'بر'), - (0xFC6B, 'M', u'بز'), - (0xFC6C, 'M', u'بم'), - (0xFC6D, 'M', u'بن'), - (0xFC6E, 'M', u'بى'), - (0xFC6F, 'M', u'بي'), - (0xFC70, 'M', u'تر'), - (0xFC71, 'M', u'تز'), - (0xFC72, 'M', u'تم'), - (0xFC73, 'M', u'تن'), - (0xFC74, 'M', u'تى'), - (0xFC75, 'M', u'تي'), - (0xFC76, 'M', u'ثر'), - (0xFC77, 'M', u'ثز'), - (0xFC78, 'M', u'ثم'), - (0xFC79, 'M', u'ثن'), - (0xFC7A, 'M', u'ثى'), - (0xFC7B, 'M', u'ثي'), - (0xFC7C, 'M', u'فى'), - (0xFC7D, 'M', u'في'), - (0xFC7E, 'M', u'قى'), - (0xFC7F, 'M', u'قي'), - (0xFC80, 'M', u'كا'), - (0xFC81, 'M', u'كل'), - (0xFC82, 'M', u'كم'), - (0xFC83, 'M', u'كى'), - (0xFC84, 'M', u'كي'), - (0xFC85, 'M', u'لم'), - (0xFC86, 'M', u'لى'), - (0xFC87, 'M', u'لي'), - (0xFC88, 'M', u'ما'), - (0xFC89, 'M', u'مم'), - (0xFC8A, 'M', u'نر'), - ] - -def _seg_46(): - return [ - (0xFC8B, 'M', u'نز'), - (0xFC8C, 'M', u'نم'), - (0xFC8D, 'M', u'نن'), - (0xFC8E, 'M', u'نى'), - (0xFC8F, 'M', u'ني'), - (0xFC90, 'M', u'ىٰ'), - (0xFC91, 'M', u'ير'), - (0xFC92, 'M', u'يز'), - (0xFC93, 'M', u'يم'), - (0xFC94, 'M', u'ين'), - (0xFC95, 'M', u'يى'), - (0xFC96, 'M', u'يي'), - (0xFC97, 'M', u'ئج'), - (0xFC98, 'M', u'ئح'), - (0xFC99, 'M', u'ئخ'), - (0xFC9A, 'M', u'ئم'), - (0xFC9B, 'M', u'ئه'), - (0xFC9C, 'M', u'بج'), - (0xFC9D, 'M', u'بح'), - (0xFC9E, 'M', u'بخ'), - (0xFC9F, 'M', u'بم'), - (0xFCA0, 'M', u'به'), - (0xFCA1, 'M', u'تج'), - (0xFCA2, 'M', u'تح'), - (0xFCA3, 'M', u'تخ'), - (0xFCA4, 'M', u'تم'), - (0xFCA5, 'M', u'ته'), - (0xFCA6, 'M', u'ثم'), - (0xFCA7, 'M', u'جح'), - (0xFCA8, 'M', u'جم'), - (0xFCA9, 'M', u'حج'), - (0xFCAA, 'M', u'حم'), - (0xFCAB, 'M', u'خج'), - (0xFCAC, 'M', u'خم'), - (0xFCAD, 'M', u'سج'), - (0xFCAE, 'M', u'سح'), - (0xFCAF, 'M', u'سخ'), - (0xFCB0, 'M', u'سم'), - (0xFCB1, 'M', u'صح'), - (0xFCB2, 'M', u'صخ'), - (0xFCB3, 'M', u'صم'), - (0xFCB4, 'M', u'ضج'), - (0xFCB5, 'M', u'ضح'), - (0xFCB6, 'M', u'ضخ'), - (0xFCB7, 'M', u'ضم'), - (0xFCB8, 'M', u'طح'), - (0xFCB9, 'M', u'ظم'), - (0xFCBA, 'M', u'عج'), - (0xFCBB, 'M', u'عم'), - (0xFCBC, 'M', u'غج'), - (0xFCBD, 'M', u'غم'), - (0xFCBE, 'M', u'فج'), - (0xFCBF, 'M', u'فح'), - (0xFCC0, 'M', u'فخ'), - (0xFCC1, 'M', u'فم'), - (0xFCC2, 'M', u'قح'), - (0xFCC3, 'M', u'قم'), - (0xFCC4, 'M', u'كج'), - (0xFCC5, 'M', u'كح'), - (0xFCC6, 'M', u'كخ'), - (0xFCC7, 'M', u'كل'), - (0xFCC8, 'M', u'كم'), - (0xFCC9, 'M', u'لج'), - (0xFCCA, 'M', u'لح'), - (0xFCCB, 'M', u'لخ'), - (0xFCCC, 'M', u'لم'), - (0xFCCD, 'M', u'له'), - (0xFCCE, 'M', u'مج'), - (0xFCCF, 'M', u'مح'), - (0xFCD0, 'M', u'مخ'), - (0xFCD1, 'M', u'مم'), - (0xFCD2, 'M', u'نج'), - (0xFCD3, 'M', u'نح'), - (0xFCD4, 'M', u'نخ'), - (0xFCD5, 'M', u'نم'), - (0xFCD6, 'M', u'نه'), - (0xFCD7, 'M', u'هج'), - (0xFCD8, 'M', u'هم'), - (0xFCD9, 'M', u'هٰ'), - (0xFCDA, 'M', u'يج'), - (0xFCDB, 'M', u'يح'), - (0xFCDC, 'M', u'يخ'), - (0xFCDD, 'M', u'يم'), - (0xFCDE, 'M', u'يه'), - (0xFCDF, 'M', u'ئم'), - (0xFCE0, 'M', u'ئه'), - (0xFCE1, 'M', u'بم'), - (0xFCE2, 'M', u'به'), - (0xFCE3, 'M', u'تم'), - (0xFCE4, 'M', u'ته'), - (0xFCE5, 'M', u'ثم'), - (0xFCE6, 'M', u'ثه'), - (0xFCE7, 'M', u'سم'), - (0xFCE8, 'M', u'سه'), - (0xFCE9, 'M', u'شم'), - (0xFCEA, 'M', u'شه'), - (0xFCEB, 'M', u'كل'), - (0xFCEC, 'M', u'كم'), - (0xFCED, 'M', u'لم'), - (0xFCEE, 'M', u'نم'), - ] - -def _seg_47(): - return [ - (0xFCEF, 'M', u'نه'), - (0xFCF0, 'M', u'يم'), - (0xFCF1, 'M', u'يه'), - (0xFCF2, 'M', u'ـَّ'), - (0xFCF3, 'M', u'ـُّ'), - (0xFCF4, 'M', u'ـِّ'), - (0xFCF5, 'M', u'طى'), - (0xFCF6, 'M', u'طي'), - (0xFCF7, 'M', u'عى'), - (0xFCF8, 'M', u'عي'), - (0xFCF9, 'M', u'غى'), - (0xFCFA, 'M', u'غي'), - (0xFCFB, 'M', u'سى'), - (0xFCFC, 'M', u'سي'), - (0xFCFD, 'M', u'شى'), - (0xFCFE, 'M', u'شي'), - (0xFCFF, 'M', u'حى'), - (0xFD00, 'M', u'حي'), - (0xFD01, 'M', u'جى'), - (0xFD02, 'M', u'جي'), - (0xFD03, 'M', u'خى'), - (0xFD04, 'M', u'خي'), - (0xFD05, 'M', u'صى'), - (0xFD06, 'M', u'صي'), - (0xFD07, 'M', u'ضى'), - (0xFD08, 'M', u'ضي'), - (0xFD09, 'M', u'شج'), - (0xFD0A, 'M', u'شح'), - (0xFD0B, 'M', u'شخ'), - (0xFD0C, 'M', u'شم'), - (0xFD0D, 'M', u'شر'), - (0xFD0E, 'M', u'سر'), - (0xFD0F, 'M', u'صر'), - (0xFD10, 'M', u'ضر'), - (0xFD11, 'M', u'طى'), - (0xFD12, 'M', u'طي'), - (0xFD13, 'M', u'عى'), - (0xFD14, 'M', u'عي'), - (0xFD15, 'M', u'غى'), - (0xFD16, 'M', u'غي'), - (0xFD17, 'M', u'سى'), - (0xFD18, 'M', u'سي'), - (0xFD19, 'M', u'شى'), - (0xFD1A, 'M', u'شي'), - (0xFD1B, 'M', u'حى'), - (0xFD1C, 'M', u'حي'), - (0xFD1D, 'M', u'جى'), - (0xFD1E, 'M', u'جي'), - (0xFD1F, 'M', u'خى'), - (0xFD20, 'M', u'خي'), - (0xFD21, 'M', u'صى'), - (0xFD22, 'M', u'صي'), - (0xFD23, 'M', u'ضى'), - (0xFD24, 'M', u'ضي'), - (0xFD25, 'M', u'شج'), - (0xFD26, 'M', u'شح'), - (0xFD27, 'M', u'شخ'), - (0xFD28, 'M', u'شم'), - (0xFD29, 'M', u'شر'), - (0xFD2A, 'M', u'سر'), - (0xFD2B, 'M', u'صر'), - (0xFD2C, 'M', u'ضر'), - (0xFD2D, 'M', u'شج'), - (0xFD2E, 'M', u'شح'), - (0xFD2F, 'M', u'شخ'), - (0xFD30, 'M', u'شم'), - (0xFD31, 'M', u'سه'), - (0xFD32, 'M', u'شه'), - (0xFD33, 'M', u'طم'), - (0xFD34, 'M', u'سج'), - (0xFD35, 'M', u'سح'), - (0xFD36, 'M', u'سخ'), - (0xFD37, 'M', u'شج'), - (0xFD38, 'M', u'شح'), - (0xFD39, 'M', u'شخ'), - (0xFD3A, 'M', u'طم'), - (0xFD3B, 'M', u'ظم'), - (0xFD3C, 'M', u'اً'), - (0xFD3E, 'V'), - (0xFD40, 'X'), - (0xFD50, 'M', u'تجم'), - (0xFD51, 'M', u'تحج'), - (0xFD53, 'M', u'تحم'), - (0xFD54, 'M', u'تخم'), - (0xFD55, 'M', u'تمج'), - (0xFD56, 'M', u'تمح'), - (0xFD57, 'M', u'تمخ'), - (0xFD58, 'M', u'جمح'), - (0xFD5A, 'M', u'حمي'), - (0xFD5B, 'M', u'حمى'), - (0xFD5C, 'M', u'سحج'), - (0xFD5D, 'M', u'سجح'), - (0xFD5E, 'M', u'سجى'), - (0xFD5F, 'M', u'سمح'), - (0xFD61, 'M', u'سمج'), - (0xFD62, 'M', u'سمم'), - (0xFD64, 'M', u'صحح'), - (0xFD66, 'M', u'صمم'), - (0xFD67, 'M', u'شحم'), - (0xFD69, 'M', u'شجي'), - ] - -def _seg_48(): - return [ - (0xFD6A, 'M', u'شمخ'), - (0xFD6C, 'M', u'شمم'), - (0xFD6E, 'M', u'ضحى'), - (0xFD6F, 'M', u'ضخم'), - (0xFD71, 'M', u'طمح'), - (0xFD73, 'M', u'طمم'), - (0xFD74, 'M', u'طمي'), - (0xFD75, 'M', u'عجم'), - (0xFD76, 'M', u'عمم'), - (0xFD78, 'M', u'عمى'), - (0xFD79, 'M', u'غمم'), - (0xFD7A, 'M', u'غمي'), - (0xFD7B, 'M', u'غمى'), - (0xFD7C, 'M', u'فخم'), - (0xFD7E, 'M', u'قمح'), - (0xFD7F, 'M', u'قمم'), - (0xFD80, 'M', u'لحم'), - (0xFD81, 'M', u'لحي'), - (0xFD82, 'M', u'لحى'), - (0xFD83, 'M', u'لجج'), - (0xFD85, 'M', u'لخم'), - (0xFD87, 'M', u'لمح'), - (0xFD89, 'M', u'محج'), - (0xFD8A, 'M', u'محم'), - (0xFD8B, 'M', u'محي'), - (0xFD8C, 'M', u'مجح'), - (0xFD8D, 'M', u'مجم'), - (0xFD8E, 'M', u'مخج'), - (0xFD8F, 'M', u'مخم'), - (0xFD90, 'X'), - (0xFD92, 'M', u'مجخ'), - (0xFD93, 'M', u'همج'), - (0xFD94, 'M', u'همم'), - (0xFD95, 'M', u'نحم'), - (0xFD96, 'M', u'نحى'), - (0xFD97, 'M', u'نجم'), - (0xFD99, 'M', u'نجى'), - (0xFD9A, 'M', u'نمي'), - (0xFD9B, 'M', u'نمى'), - (0xFD9C, 'M', u'يمم'), - (0xFD9E, 'M', u'بخي'), - (0xFD9F, 'M', u'تجي'), - (0xFDA0, 'M', u'تجى'), - (0xFDA1, 'M', u'تخي'), - (0xFDA2, 'M', u'تخى'), - (0xFDA3, 'M', u'تمي'), - (0xFDA4, 'M', u'تمى'), - (0xFDA5, 'M', u'جمي'), - (0xFDA6, 'M', u'جحى'), - (0xFDA7, 'M', u'جمى'), - (0xFDA8, 'M', u'سخى'), - (0xFDA9, 'M', u'صحي'), - (0xFDAA, 'M', u'شحي'), - (0xFDAB, 'M', u'ضحي'), - (0xFDAC, 'M', u'لجي'), - (0xFDAD, 'M', u'لمي'), - (0xFDAE, 'M', u'يحي'), - (0xFDAF, 'M', u'يجي'), - (0xFDB0, 'M', u'يمي'), - (0xFDB1, 'M', u'ممي'), - (0xFDB2, 'M', u'قمي'), - (0xFDB3, 'M', u'نحي'), - (0xFDB4, 'M', u'قمح'), - (0xFDB5, 'M', u'لحم'), - (0xFDB6, 'M', u'عمي'), - (0xFDB7, 'M', u'كمي'), - (0xFDB8, 'M', u'نجح'), - (0xFDB9, 'M', u'مخي'), - (0xFDBA, 'M', u'لجم'), - (0xFDBB, 'M', u'كمم'), - (0xFDBC, 'M', u'لجم'), - (0xFDBD, 'M', u'نجح'), - (0xFDBE, 'M', u'جحي'), - (0xFDBF, 'M', u'حجي'), - (0xFDC0, 'M', u'مجي'), - (0xFDC1, 'M', u'فمي'), - (0xFDC2, 'M', u'بحي'), - (0xFDC3, 'M', u'كمم'), - (0xFDC4, 'M', u'عجم'), - (0xFDC5, 'M', u'صمم'), - (0xFDC6, 'M', u'سخي'), - (0xFDC7, 'M', u'نجي'), - (0xFDC8, 'X'), - (0xFDF0, 'M', u'صلے'), - (0xFDF1, 'M', u'قلے'), - (0xFDF2, 'M', u'الله'), - (0xFDF3, 'M', u'اكبر'), - (0xFDF4, 'M', u'محمد'), - (0xFDF5, 'M', u'صلعم'), - (0xFDF6, 'M', u'رسول'), - (0xFDF7, 'M', u'عليه'), - (0xFDF8, 'M', u'وسلم'), - (0xFDF9, 'M', u'صلى'), - (0xFDFA, '3', u'صلى الله عليه وسلم'), - (0xFDFB, '3', u'جل جلاله'), - (0xFDFC, 'M', u'ریال'), - (0xFDFD, 'V'), - (0xFDFE, 'X'), - (0xFE00, 'I'), - (0xFE10, '3', u','), - ] - -def _seg_49(): - return [ - (0xFE11, 'M', u'、'), - (0xFE12, 'X'), - (0xFE13, '3', u':'), - (0xFE14, '3', u';'), - (0xFE15, '3', u'!'), - (0xFE16, '3', u'?'), - (0xFE17, 'M', u'〖'), - (0xFE18, 'M', u'〗'), - (0xFE19, 'X'), - (0xFE20, 'V'), - (0xFE30, 'X'), - (0xFE31, 'M', u'—'), - (0xFE32, 'M', u'–'), - (0xFE33, '3', u'_'), - (0xFE35, '3', u'('), - (0xFE36, '3', u')'), - (0xFE37, '3', u'{'), - (0xFE38, '3', u'}'), - (0xFE39, 'M', u'〔'), - (0xFE3A, 'M', u'〕'), - (0xFE3B, 'M', u'【'), - (0xFE3C, 'M', u'】'), - (0xFE3D, 'M', u'《'), - (0xFE3E, 'M', u'》'), - (0xFE3F, 'M', u'〈'), - (0xFE40, 'M', u'〉'), - (0xFE41, 'M', u'「'), - (0xFE42, 'M', u'」'), - (0xFE43, 'M', u'『'), - (0xFE44, 'M', u'』'), - (0xFE45, 'V'), - (0xFE47, '3', u'['), - (0xFE48, '3', u']'), - (0xFE49, '3', u' ̅'), - (0xFE4D, '3', u'_'), - (0xFE50, '3', u','), - (0xFE51, 'M', u'、'), - (0xFE52, 'X'), - (0xFE54, '3', u';'), - (0xFE55, '3', u':'), - (0xFE56, '3', u'?'), - (0xFE57, '3', u'!'), - (0xFE58, 'M', u'—'), - (0xFE59, '3', u'('), - (0xFE5A, '3', u')'), - (0xFE5B, '3', u'{'), - (0xFE5C, '3', u'}'), - (0xFE5D, 'M', u'〔'), - (0xFE5E, 'M', u'〕'), - (0xFE5F, '3', u'#'), - (0xFE60, '3', u'&'), - (0xFE61, '3', u'*'), - (0xFE62, '3', u'+'), - (0xFE63, 'M', u'-'), - (0xFE64, '3', u'<'), - (0xFE65, '3', u'>'), - (0xFE66, '3', u'='), - (0xFE67, 'X'), - (0xFE68, '3', u'\\'), - (0xFE69, '3', u'$'), - (0xFE6A, '3', u'%'), - (0xFE6B, '3', u'@'), - (0xFE6C, 'X'), - (0xFE70, '3', u' ً'), - (0xFE71, 'M', u'ـً'), - (0xFE72, '3', u' ٌ'), - (0xFE73, 'V'), - (0xFE74, '3', u' ٍ'), - (0xFE75, 'X'), - (0xFE76, '3', u' َ'), - (0xFE77, 'M', u'ـَ'), - (0xFE78, '3', u' ُ'), - (0xFE79, 'M', u'ـُ'), - (0xFE7A, '3', u' ِ'), - (0xFE7B, 'M', u'ـِ'), - (0xFE7C, '3', u' ّ'), - (0xFE7D, 'M', u'ـّ'), - (0xFE7E, '3', u' ْ'), - (0xFE7F, 'M', u'ـْ'), - (0xFE80, 'M', u'ء'), - (0xFE81, 'M', u'آ'), - (0xFE83, 'M', u'أ'), - (0xFE85, 'M', u'ؤ'), - (0xFE87, 'M', u'إ'), - (0xFE89, 'M', u'ئ'), - (0xFE8D, 'M', u'ا'), - (0xFE8F, 'M', u'ب'), - (0xFE93, 'M', u'ة'), - (0xFE95, 'M', u'ت'), - (0xFE99, 'M', u'ث'), - (0xFE9D, 'M', u'ج'), - (0xFEA1, 'M', u'ح'), - (0xFEA5, 'M', u'خ'), - (0xFEA9, 'M', u'د'), - (0xFEAB, 'M', u'ذ'), - (0xFEAD, 'M', u'ر'), - (0xFEAF, 'M', u'ز'), - (0xFEB1, 'M', u'س'), - (0xFEB5, 'M', u'ش'), - (0xFEB9, 'M', u'ص'), - ] - -def _seg_50(): - return [ - (0xFEBD, 'M', u'ض'), - (0xFEC1, 'M', u'ط'), - (0xFEC5, 'M', u'ظ'), - (0xFEC9, 'M', u'ع'), - (0xFECD, 'M', u'غ'), - (0xFED1, 'M', u'ف'), - (0xFED5, 'M', u'ق'), - (0xFED9, 'M', u'ك'), - (0xFEDD, 'M', u'ل'), - (0xFEE1, 'M', u'م'), - (0xFEE5, 'M', u'ن'), - (0xFEE9, 'M', u'ه'), - (0xFEED, 'M', u'و'), - (0xFEEF, 'M', u'ى'), - (0xFEF1, 'M', u'ي'), - (0xFEF5, 'M', u'لآ'), - (0xFEF7, 'M', u'لأ'), - (0xFEF9, 'M', u'لإ'), - (0xFEFB, 'M', u'لا'), - (0xFEFD, 'X'), - (0xFEFF, 'I'), - (0xFF00, 'X'), - (0xFF01, '3', u'!'), - (0xFF02, '3', u'"'), - (0xFF03, '3', u'#'), - (0xFF04, '3', u'$'), - (0xFF05, '3', u'%'), - (0xFF06, '3', u'&'), - (0xFF07, '3', u'\''), - (0xFF08, '3', u'('), - (0xFF09, '3', u')'), - (0xFF0A, '3', u'*'), - (0xFF0B, '3', u'+'), - (0xFF0C, '3', u','), - (0xFF0D, 'M', u'-'), - (0xFF0E, 'M', u'.'), - (0xFF0F, '3', u'/'), - (0xFF10, 'M', u'0'), - (0xFF11, 'M', u'1'), - (0xFF12, 'M', u'2'), - (0xFF13, 'M', u'3'), - (0xFF14, 'M', u'4'), - (0xFF15, 'M', u'5'), - (0xFF16, 'M', u'6'), - (0xFF17, 'M', u'7'), - (0xFF18, 'M', u'8'), - (0xFF19, 'M', u'9'), - (0xFF1A, '3', u':'), - (0xFF1B, '3', u';'), - (0xFF1C, '3', u'<'), - (0xFF1D, '3', u'='), - (0xFF1E, '3', u'>'), - (0xFF1F, '3', u'?'), - (0xFF20, '3', u'@'), - (0xFF21, 'M', u'a'), - (0xFF22, 'M', u'b'), - (0xFF23, 'M', u'c'), - (0xFF24, 'M', u'd'), - (0xFF25, 'M', u'e'), - (0xFF26, 'M', u'f'), - (0xFF27, 'M', u'g'), - (0xFF28, 'M', u'h'), - (0xFF29, 'M', u'i'), - (0xFF2A, 'M', u'j'), - (0xFF2B, 'M', u'k'), - (0xFF2C, 'M', u'l'), - (0xFF2D, 'M', u'm'), - (0xFF2E, 'M', u'n'), - (0xFF2F, 'M', u'o'), - (0xFF30, 'M', u'p'), - (0xFF31, 'M', u'q'), - (0xFF32, 'M', u'r'), - (0xFF33, 'M', u's'), - (0xFF34, 'M', u't'), - (0xFF35, 'M', u'u'), - (0xFF36, 'M', u'v'), - (0xFF37, 'M', u'w'), - (0xFF38, 'M', u'x'), - (0xFF39, 'M', u'y'), - (0xFF3A, 'M', u'z'), - (0xFF3B, '3', u'['), - (0xFF3C, '3', u'\\'), - (0xFF3D, '3', u']'), - (0xFF3E, '3', u'^'), - (0xFF3F, '3', u'_'), - (0xFF40, '3', u'`'), - (0xFF41, 'M', u'a'), - (0xFF42, 'M', u'b'), - (0xFF43, 'M', u'c'), - (0xFF44, 'M', u'd'), - (0xFF45, 'M', u'e'), - (0xFF46, 'M', u'f'), - (0xFF47, 'M', u'g'), - (0xFF48, 'M', u'h'), - (0xFF49, 'M', u'i'), - (0xFF4A, 'M', u'j'), - (0xFF4B, 'M', u'k'), - (0xFF4C, 'M', u'l'), - (0xFF4D, 'M', u'm'), - (0xFF4E, 'M', u'n'), - ] - -def _seg_51(): - return [ - (0xFF4F, 'M', u'o'), - (0xFF50, 'M', u'p'), - (0xFF51, 'M', u'q'), - (0xFF52, 'M', u'r'), - (0xFF53, 'M', u's'), - (0xFF54, 'M', u't'), - (0xFF55, 'M', u'u'), - (0xFF56, 'M', u'v'), - (0xFF57, 'M', u'w'), - (0xFF58, 'M', u'x'), - (0xFF59, 'M', u'y'), - (0xFF5A, 'M', u'z'), - (0xFF5B, '3', u'{'), - (0xFF5C, '3', u'|'), - (0xFF5D, '3', u'}'), - (0xFF5E, '3', u'~'), - (0xFF5F, 'M', u'⦅'), - (0xFF60, 'M', u'⦆'), - (0xFF61, 'M', u'.'), - (0xFF62, 'M', u'「'), - (0xFF63, 'M', u'」'), - (0xFF64, 'M', u'、'), - (0xFF65, 'M', u'・'), - (0xFF66, 'M', u'ヲ'), - (0xFF67, 'M', u'ァ'), - (0xFF68, 'M', u'ィ'), - (0xFF69, 'M', u'ゥ'), - (0xFF6A, 'M', u'ェ'), - (0xFF6B, 'M', u'ォ'), - (0xFF6C, 'M', u'ャ'), - (0xFF6D, 'M', u'ュ'), - (0xFF6E, 'M', u'ョ'), - (0xFF6F, 'M', u'ッ'), - (0xFF70, 'M', u'ー'), - (0xFF71, 'M', u'ア'), - (0xFF72, 'M', u'イ'), - (0xFF73, 'M', u'ウ'), - (0xFF74, 'M', u'エ'), - (0xFF75, 'M', u'オ'), - (0xFF76, 'M', u'カ'), - (0xFF77, 'M', u'キ'), - (0xFF78, 'M', u'ク'), - (0xFF79, 'M', u'ケ'), - (0xFF7A, 'M', u'コ'), - (0xFF7B, 'M', u'サ'), - (0xFF7C, 'M', u'シ'), - (0xFF7D, 'M', u'ス'), - (0xFF7E, 'M', u'セ'), - (0xFF7F, 'M', u'ソ'), - (0xFF80, 'M', u'タ'), - (0xFF81, 'M', u'チ'), - (0xFF82, 'M', u'ツ'), - (0xFF83, 'M', u'テ'), - (0xFF84, 'M', u'ト'), - (0xFF85, 'M', u'ナ'), - (0xFF86, 'M', u'ニ'), - (0xFF87, 'M', u'ヌ'), - (0xFF88, 'M', u'ネ'), - (0xFF89, 'M', u'ノ'), - (0xFF8A, 'M', u'ハ'), - (0xFF8B, 'M', u'ヒ'), - (0xFF8C, 'M', u'フ'), - (0xFF8D, 'M', u'ヘ'), - (0xFF8E, 'M', u'ホ'), - (0xFF8F, 'M', u'マ'), - (0xFF90, 'M', u'ミ'), - (0xFF91, 'M', u'ム'), - (0xFF92, 'M', u'メ'), - (0xFF93, 'M', u'モ'), - (0xFF94, 'M', u'ヤ'), - (0xFF95, 'M', u'ユ'), - (0xFF96, 'M', u'ヨ'), - (0xFF97, 'M', u'ラ'), - (0xFF98, 'M', u'リ'), - (0xFF99, 'M', u'ル'), - (0xFF9A, 'M', u'レ'), - (0xFF9B, 'M', u'ロ'), - (0xFF9C, 'M', u'ワ'), - (0xFF9D, 'M', u'ン'), - (0xFF9E, 'M', u'゙'), - (0xFF9F, 'M', u'゚'), - (0xFFA0, 'X'), - (0xFFA1, 'M', u'ᄀ'), - (0xFFA2, 'M', u'ᄁ'), - (0xFFA3, 'M', u'ᆪ'), - (0xFFA4, 'M', u'ᄂ'), - (0xFFA5, 'M', u'ᆬ'), - (0xFFA6, 'M', u'ᆭ'), - (0xFFA7, 'M', u'ᄃ'), - (0xFFA8, 'M', u'ᄄ'), - (0xFFA9, 'M', u'ᄅ'), - (0xFFAA, 'M', u'ᆰ'), - (0xFFAB, 'M', u'ᆱ'), - (0xFFAC, 'M', u'ᆲ'), - (0xFFAD, 'M', u'ᆳ'), - (0xFFAE, 'M', u'ᆴ'), - (0xFFAF, 'M', u'ᆵ'), - (0xFFB0, 'M', u'ᄚ'), - (0xFFB1, 'M', u'ᄆ'), - (0xFFB2, 'M', u'ᄇ'), - ] - -def _seg_52(): - return [ - (0xFFB3, 'M', u'ᄈ'), - (0xFFB4, 'M', u'ᄡ'), - (0xFFB5, 'M', u'ᄉ'), - (0xFFB6, 'M', u'ᄊ'), - (0xFFB7, 'M', u'ᄋ'), - (0xFFB8, 'M', u'ᄌ'), - (0xFFB9, 'M', u'ᄍ'), - (0xFFBA, 'M', u'ᄎ'), - (0xFFBB, 'M', u'ᄏ'), - (0xFFBC, 'M', u'ᄐ'), - (0xFFBD, 'M', u'ᄑ'), - (0xFFBE, 'M', u'ᄒ'), - (0xFFBF, 'X'), - (0xFFC2, 'M', u'ᅡ'), - (0xFFC3, 'M', u'ᅢ'), - (0xFFC4, 'M', u'ᅣ'), - (0xFFC5, 'M', u'ᅤ'), - (0xFFC6, 'M', u'ᅥ'), - (0xFFC7, 'M', u'ᅦ'), - (0xFFC8, 'X'), - (0xFFCA, 'M', u'ᅧ'), - (0xFFCB, 'M', u'ᅨ'), - (0xFFCC, 'M', u'ᅩ'), - (0xFFCD, 'M', u'ᅪ'), - (0xFFCE, 'M', u'ᅫ'), - (0xFFCF, 'M', u'ᅬ'), - (0xFFD0, 'X'), - (0xFFD2, 'M', u'ᅭ'), - (0xFFD3, 'M', u'ᅮ'), - (0xFFD4, 'M', u'ᅯ'), - (0xFFD5, 'M', u'ᅰ'), - (0xFFD6, 'M', u'ᅱ'), - (0xFFD7, 'M', u'ᅲ'), - (0xFFD8, 'X'), - (0xFFDA, 'M', u'ᅳ'), - (0xFFDB, 'M', u'ᅴ'), - (0xFFDC, 'M', u'ᅵ'), - (0xFFDD, 'X'), - (0xFFE0, 'M', u'¢'), - (0xFFE1, 'M', u'£'), - (0xFFE2, 'M', u'¬'), - (0xFFE3, '3', u' ̄'), - (0xFFE4, 'M', u'¦'), - (0xFFE5, 'M', u'¥'), - (0xFFE6, 'M', u'₩'), - (0xFFE7, 'X'), - (0xFFE8, 'M', u'│'), - (0xFFE9, 'M', u'←'), - (0xFFEA, 'M', u'↑'), - (0xFFEB, 'M', u'→'), - (0xFFEC, 'M', u'↓'), - (0xFFED, 'M', u'■'), - (0xFFEE, 'M', u'○'), - (0xFFEF, 'X'), - (0x10000, 'V'), - (0x1000C, 'X'), - (0x1000D, 'V'), - (0x10027, 'X'), - (0x10028, 'V'), - (0x1003B, 'X'), - (0x1003C, 'V'), - (0x1003E, 'X'), - (0x1003F, 'V'), - (0x1004E, 'X'), - (0x10050, 'V'), - (0x1005E, 'X'), - (0x10080, 'V'), - (0x100FB, 'X'), - (0x10100, 'V'), - (0x10103, 'X'), - (0x10107, 'V'), - (0x10134, 'X'), - (0x10137, 'V'), - (0x1018F, 'X'), - (0x10190, 'V'), - (0x1019C, 'X'), - (0x101A0, 'V'), - (0x101A1, 'X'), - (0x101D0, 'V'), - (0x101FE, 'X'), - (0x10280, 'V'), - (0x1029D, 'X'), - (0x102A0, 'V'), - (0x102D1, 'X'), - (0x102E0, 'V'), - (0x102FC, 'X'), - (0x10300, 'V'), - (0x10324, 'X'), - (0x1032D, 'V'), - (0x1034B, 'X'), - (0x10350, 'V'), - (0x1037B, 'X'), - (0x10380, 'V'), - (0x1039E, 'X'), - (0x1039F, 'V'), - (0x103C4, 'X'), - (0x103C8, 'V'), - (0x103D6, 'X'), - (0x10400, 'M', u'𐐨'), - (0x10401, 'M', u'𐐩'), - ] - -def _seg_53(): - return [ - (0x10402, 'M', u'𐐪'), - (0x10403, 'M', u'𐐫'), - (0x10404, 'M', u'𐐬'), - (0x10405, 'M', u'𐐭'), - (0x10406, 'M', u'𐐮'), - (0x10407, 'M', u'𐐯'), - (0x10408, 'M', u'𐐰'), - (0x10409, 'M', u'𐐱'), - (0x1040A, 'M', u'𐐲'), - (0x1040B, 'M', u'𐐳'), - (0x1040C, 'M', u'𐐴'), - (0x1040D, 'M', u'𐐵'), - (0x1040E, 'M', u'𐐶'), - (0x1040F, 'M', u'𐐷'), - (0x10410, 'M', u'𐐸'), - (0x10411, 'M', u'𐐹'), - (0x10412, 'M', u'𐐺'), - (0x10413, 'M', u'𐐻'), - (0x10414, 'M', u'𐐼'), - (0x10415, 'M', u'𐐽'), - (0x10416, 'M', u'𐐾'), - (0x10417, 'M', u'𐐿'), - (0x10418, 'M', u'𐑀'), - (0x10419, 'M', u'𐑁'), - (0x1041A, 'M', u'𐑂'), - (0x1041B, 'M', u'𐑃'), - (0x1041C, 'M', u'𐑄'), - (0x1041D, 'M', u'𐑅'), - (0x1041E, 'M', u'𐑆'), - (0x1041F, 'M', u'𐑇'), - (0x10420, 'M', u'𐑈'), - (0x10421, 'M', u'𐑉'), - (0x10422, 'M', u'𐑊'), - (0x10423, 'M', u'𐑋'), - (0x10424, 'M', u'𐑌'), - (0x10425, 'M', u'𐑍'), - (0x10426, 'M', u'𐑎'), - (0x10427, 'M', u'𐑏'), - (0x10428, 'V'), - (0x1049E, 'X'), - (0x104A0, 'V'), - (0x104AA, 'X'), - (0x104B0, 'M', u'𐓘'), - (0x104B1, 'M', u'𐓙'), - (0x104B2, 'M', u'𐓚'), - (0x104B3, 'M', u'𐓛'), - (0x104B4, 'M', u'𐓜'), - (0x104B5, 'M', u'𐓝'), - (0x104B6, 'M', u'𐓞'), - (0x104B7, 'M', u'𐓟'), - (0x104B8, 'M', u'𐓠'), - (0x104B9, 'M', u'𐓡'), - (0x104BA, 'M', u'𐓢'), - (0x104BB, 'M', u'𐓣'), - (0x104BC, 'M', u'𐓤'), - (0x104BD, 'M', u'𐓥'), - (0x104BE, 'M', u'𐓦'), - (0x104BF, 'M', u'𐓧'), - (0x104C0, 'M', u'𐓨'), - (0x104C1, 'M', u'𐓩'), - (0x104C2, 'M', u'𐓪'), - (0x104C3, 'M', u'𐓫'), - (0x104C4, 'M', u'𐓬'), - (0x104C5, 'M', u'𐓭'), - (0x104C6, 'M', u'𐓮'), - (0x104C7, 'M', u'𐓯'), - (0x104C8, 'M', u'𐓰'), - (0x104C9, 'M', u'𐓱'), - (0x104CA, 'M', u'𐓲'), - (0x104CB, 'M', u'𐓳'), - (0x104CC, 'M', u'𐓴'), - (0x104CD, 'M', u'𐓵'), - (0x104CE, 'M', u'𐓶'), - (0x104CF, 'M', u'𐓷'), - (0x104D0, 'M', u'𐓸'), - (0x104D1, 'M', u'𐓹'), - (0x104D2, 'M', u'𐓺'), - (0x104D3, 'M', u'𐓻'), - (0x104D4, 'X'), - (0x104D8, 'V'), - (0x104FC, 'X'), - (0x10500, 'V'), - (0x10528, 'X'), - (0x10530, 'V'), - (0x10564, 'X'), - (0x1056F, 'V'), - (0x10570, 'X'), - (0x10600, 'V'), - (0x10737, 'X'), - (0x10740, 'V'), - (0x10756, 'X'), - (0x10760, 'V'), - (0x10768, 'X'), - (0x10800, 'V'), - (0x10806, 'X'), - (0x10808, 'V'), - (0x10809, 'X'), - (0x1080A, 'V'), - (0x10836, 'X'), - (0x10837, 'V'), - ] - -def _seg_54(): - return [ - (0x10839, 'X'), - (0x1083C, 'V'), - (0x1083D, 'X'), - (0x1083F, 'V'), - (0x10856, 'X'), - (0x10857, 'V'), - (0x1089F, 'X'), - (0x108A7, 'V'), - (0x108B0, 'X'), - (0x108E0, 'V'), - (0x108F3, 'X'), - (0x108F4, 'V'), - (0x108F6, 'X'), - (0x108FB, 'V'), - (0x1091C, 'X'), - (0x1091F, 'V'), - (0x1093A, 'X'), - (0x1093F, 'V'), - (0x10940, 'X'), - (0x10980, 'V'), - (0x109B8, 'X'), - (0x109BC, 'V'), - (0x109D0, 'X'), - (0x109D2, 'V'), - (0x10A04, 'X'), - (0x10A05, 'V'), - (0x10A07, 'X'), - (0x10A0C, 'V'), - (0x10A14, 'X'), - (0x10A15, 'V'), - (0x10A18, 'X'), - (0x10A19, 'V'), - (0x10A36, 'X'), - (0x10A38, 'V'), - (0x10A3B, 'X'), - (0x10A3F, 'V'), - (0x10A49, 'X'), - (0x10A50, 'V'), - (0x10A59, 'X'), - (0x10A60, 'V'), - (0x10AA0, 'X'), - (0x10AC0, 'V'), - (0x10AE7, 'X'), - (0x10AEB, 'V'), - (0x10AF7, 'X'), - (0x10B00, 'V'), - (0x10B36, 'X'), - (0x10B39, 'V'), - (0x10B56, 'X'), - (0x10B58, 'V'), - (0x10B73, 'X'), - (0x10B78, 'V'), - (0x10B92, 'X'), - (0x10B99, 'V'), - (0x10B9D, 'X'), - (0x10BA9, 'V'), - (0x10BB0, 'X'), - (0x10C00, 'V'), - (0x10C49, 'X'), - (0x10C80, 'M', u'𐳀'), - (0x10C81, 'M', u'𐳁'), - (0x10C82, 'M', u'𐳂'), - (0x10C83, 'M', u'𐳃'), - (0x10C84, 'M', u'𐳄'), - (0x10C85, 'M', u'𐳅'), - (0x10C86, 'M', u'𐳆'), - (0x10C87, 'M', u'𐳇'), - (0x10C88, 'M', u'𐳈'), - (0x10C89, 'M', u'𐳉'), - (0x10C8A, 'M', u'𐳊'), - (0x10C8B, 'M', u'𐳋'), - (0x10C8C, 'M', u'𐳌'), - (0x10C8D, 'M', u'𐳍'), - (0x10C8E, 'M', u'𐳎'), - (0x10C8F, 'M', u'𐳏'), - (0x10C90, 'M', u'𐳐'), - (0x10C91, 'M', u'𐳑'), - (0x10C92, 'M', u'𐳒'), - (0x10C93, 'M', u'𐳓'), - (0x10C94, 'M', u'𐳔'), - (0x10C95, 'M', u'𐳕'), - (0x10C96, 'M', u'𐳖'), - (0x10C97, 'M', u'𐳗'), - (0x10C98, 'M', u'𐳘'), - (0x10C99, 'M', u'𐳙'), - (0x10C9A, 'M', u'𐳚'), - (0x10C9B, 'M', u'𐳛'), - (0x10C9C, 'M', u'𐳜'), - (0x10C9D, 'M', u'𐳝'), - (0x10C9E, 'M', u'𐳞'), - (0x10C9F, 'M', u'𐳟'), - (0x10CA0, 'M', u'𐳠'), - (0x10CA1, 'M', u'𐳡'), - (0x10CA2, 'M', u'𐳢'), - (0x10CA3, 'M', u'𐳣'), - (0x10CA4, 'M', u'𐳤'), - (0x10CA5, 'M', u'𐳥'), - (0x10CA6, 'M', u'𐳦'), - (0x10CA7, 'M', u'𐳧'), - (0x10CA8, 'M', u'𐳨'), - ] - -def _seg_55(): - return [ - (0x10CA9, 'M', u'𐳩'), - (0x10CAA, 'M', u'𐳪'), - (0x10CAB, 'M', u'𐳫'), - (0x10CAC, 'M', u'𐳬'), - (0x10CAD, 'M', u'𐳭'), - (0x10CAE, 'M', u'𐳮'), - (0x10CAF, 'M', u'𐳯'), - (0x10CB0, 'M', u'𐳰'), - (0x10CB1, 'M', u'𐳱'), - (0x10CB2, 'M', u'𐳲'), - (0x10CB3, 'X'), - (0x10CC0, 'V'), - (0x10CF3, 'X'), - (0x10CFA, 'V'), - (0x10D28, 'X'), - (0x10D30, 'V'), - (0x10D3A, 'X'), - (0x10E60, 'V'), - (0x10E7F, 'X'), - (0x10F00, 'V'), - (0x10F28, 'X'), - (0x10F30, 'V'), - (0x10F5A, 'X'), - (0x11000, 'V'), - (0x1104E, 'X'), - (0x11052, 'V'), - (0x11070, 'X'), - (0x1107F, 'V'), - (0x110BD, 'X'), - (0x110BE, 'V'), - (0x110C2, 'X'), - (0x110D0, 'V'), - (0x110E9, 'X'), - (0x110F0, 'V'), - (0x110FA, 'X'), - (0x11100, 'V'), - (0x11135, 'X'), - (0x11136, 'V'), - (0x11147, 'X'), - (0x11150, 'V'), - (0x11177, 'X'), - (0x11180, 'V'), - (0x111CE, 'X'), - (0x111D0, 'V'), - (0x111E0, 'X'), - (0x111E1, 'V'), - (0x111F5, 'X'), - (0x11200, 'V'), - (0x11212, 'X'), - (0x11213, 'V'), - (0x1123F, 'X'), - (0x11280, 'V'), - (0x11287, 'X'), - (0x11288, 'V'), - (0x11289, 'X'), - (0x1128A, 'V'), - (0x1128E, 'X'), - (0x1128F, 'V'), - (0x1129E, 'X'), - (0x1129F, 'V'), - (0x112AA, 'X'), - (0x112B0, 'V'), - (0x112EB, 'X'), - (0x112F0, 'V'), - (0x112FA, 'X'), - (0x11300, 'V'), - (0x11304, 'X'), - (0x11305, 'V'), - (0x1130D, 'X'), - (0x1130F, 'V'), - (0x11311, 'X'), - (0x11313, 'V'), - (0x11329, 'X'), - (0x1132A, 'V'), - (0x11331, 'X'), - (0x11332, 'V'), - (0x11334, 'X'), - (0x11335, 'V'), - (0x1133A, 'X'), - (0x1133B, 'V'), - (0x11345, 'X'), - (0x11347, 'V'), - (0x11349, 'X'), - (0x1134B, 'V'), - (0x1134E, 'X'), - (0x11350, 'V'), - (0x11351, 'X'), - (0x11357, 'V'), - (0x11358, 'X'), - (0x1135D, 'V'), - (0x11364, 'X'), - (0x11366, 'V'), - (0x1136D, 'X'), - (0x11370, 'V'), - (0x11375, 'X'), - (0x11400, 'V'), - (0x1145A, 'X'), - (0x1145B, 'V'), - (0x1145C, 'X'), - (0x1145D, 'V'), - ] - -def _seg_56(): - return [ - (0x1145F, 'X'), - (0x11480, 'V'), - (0x114C8, 'X'), - (0x114D0, 'V'), - (0x114DA, 'X'), - (0x11580, 'V'), - (0x115B6, 'X'), - (0x115B8, 'V'), - (0x115DE, 'X'), - (0x11600, 'V'), - (0x11645, 'X'), - (0x11650, 'V'), - (0x1165A, 'X'), - (0x11660, 'V'), - (0x1166D, 'X'), - (0x11680, 'V'), - (0x116B8, 'X'), - (0x116C0, 'V'), - (0x116CA, 'X'), - (0x11700, 'V'), - (0x1171B, 'X'), - (0x1171D, 'V'), - (0x1172C, 'X'), - (0x11730, 'V'), - (0x11740, 'X'), - (0x11800, 'V'), - (0x1183C, 'X'), - (0x118A0, 'M', u'𑣀'), - (0x118A1, 'M', u'𑣁'), - (0x118A2, 'M', u'𑣂'), - (0x118A3, 'M', u'𑣃'), - (0x118A4, 'M', u'𑣄'), - (0x118A5, 'M', u'𑣅'), - (0x118A6, 'M', u'𑣆'), - (0x118A7, 'M', u'𑣇'), - (0x118A8, 'M', u'𑣈'), - (0x118A9, 'M', u'𑣉'), - (0x118AA, 'M', u'𑣊'), - (0x118AB, 'M', u'𑣋'), - (0x118AC, 'M', u'𑣌'), - (0x118AD, 'M', u'𑣍'), - (0x118AE, 'M', u'𑣎'), - (0x118AF, 'M', u'𑣏'), - (0x118B0, 'M', u'𑣐'), - (0x118B1, 'M', u'𑣑'), - (0x118B2, 'M', u'𑣒'), - (0x118B3, 'M', u'𑣓'), - (0x118B4, 'M', u'𑣔'), - (0x118B5, 'M', u'𑣕'), - (0x118B6, 'M', u'𑣖'), - (0x118B7, 'M', u'𑣗'), - (0x118B8, 'M', u'𑣘'), - (0x118B9, 'M', u'𑣙'), - (0x118BA, 'M', u'𑣚'), - (0x118BB, 'M', u'𑣛'), - (0x118BC, 'M', u'𑣜'), - (0x118BD, 'M', u'𑣝'), - (0x118BE, 'M', u'𑣞'), - (0x118BF, 'M', u'𑣟'), - (0x118C0, 'V'), - (0x118F3, 'X'), - (0x118FF, 'V'), - (0x11900, 'X'), - (0x11A00, 'V'), - (0x11A48, 'X'), - (0x11A50, 'V'), - (0x11A84, 'X'), - (0x11A86, 'V'), - (0x11AA3, 'X'), - (0x11AC0, 'V'), - (0x11AF9, 'X'), - (0x11C00, 'V'), - (0x11C09, 'X'), - (0x11C0A, 'V'), - (0x11C37, 'X'), - (0x11C38, 'V'), - (0x11C46, 'X'), - (0x11C50, 'V'), - (0x11C6D, 'X'), - (0x11C70, 'V'), - (0x11C90, 'X'), - (0x11C92, 'V'), - (0x11CA8, 'X'), - (0x11CA9, 'V'), - (0x11CB7, 'X'), - (0x11D00, 'V'), - (0x11D07, 'X'), - (0x11D08, 'V'), - (0x11D0A, 'X'), - (0x11D0B, 'V'), - (0x11D37, 'X'), - (0x11D3A, 'V'), - (0x11D3B, 'X'), - (0x11D3C, 'V'), - (0x11D3E, 'X'), - (0x11D3F, 'V'), - (0x11D48, 'X'), - (0x11D50, 'V'), - (0x11D5A, 'X'), - (0x11D60, 'V'), - ] - -def _seg_57(): - return [ - (0x11D66, 'X'), - (0x11D67, 'V'), - (0x11D69, 'X'), - (0x11D6A, 'V'), - (0x11D8F, 'X'), - (0x11D90, 'V'), - (0x11D92, 'X'), - (0x11D93, 'V'), - (0x11D99, 'X'), - (0x11DA0, 'V'), - (0x11DAA, 'X'), - (0x11EE0, 'V'), - (0x11EF9, 'X'), - (0x12000, 'V'), - (0x1239A, 'X'), - (0x12400, 'V'), - (0x1246F, 'X'), - (0x12470, 'V'), - (0x12475, 'X'), - (0x12480, 'V'), - (0x12544, 'X'), - (0x13000, 'V'), - (0x1342F, 'X'), - (0x14400, 'V'), - (0x14647, 'X'), - (0x16800, 'V'), - (0x16A39, 'X'), - (0x16A40, 'V'), - (0x16A5F, 'X'), - (0x16A60, 'V'), - (0x16A6A, 'X'), - (0x16A6E, 'V'), - (0x16A70, 'X'), - (0x16AD0, 'V'), - (0x16AEE, 'X'), - (0x16AF0, 'V'), - (0x16AF6, 'X'), - (0x16B00, 'V'), - (0x16B46, 'X'), - (0x16B50, 'V'), - (0x16B5A, 'X'), - (0x16B5B, 'V'), - (0x16B62, 'X'), - (0x16B63, 'V'), - (0x16B78, 'X'), - (0x16B7D, 'V'), - (0x16B90, 'X'), - (0x16E60, 'V'), - (0x16E9B, 'X'), - (0x16F00, 'V'), - (0x16F45, 'X'), - (0x16F50, 'V'), - (0x16F7F, 'X'), - (0x16F8F, 'V'), - (0x16FA0, 'X'), - (0x16FE0, 'V'), - (0x16FE2, 'X'), - (0x17000, 'V'), - (0x187F2, 'X'), - (0x18800, 'V'), - (0x18AF3, 'X'), - (0x1B000, 'V'), - (0x1B11F, 'X'), - (0x1B170, 'V'), - (0x1B2FC, 'X'), - (0x1BC00, 'V'), - (0x1BC6B, 'X'), - (0x1BC70, 'V'), - (0x1BC7D, 'X'), - (0x1BC80, 'V'), - (0x1BC89, 'X'), - (0x1BC90, 'V'), - (0x1BC9A, 'X'), - (0x1BC9C, 'V'), - (0x1BCA0, 'I'), - (0x1BCA4, 'X'), - (0x1D000, 'V'), - (0x1D0F6, 'X'), - (0x1D100, 'V'), - (0x1D127, 'X'), - (0x1D129, 'V'), - (0x1D15E, 'M', u'𝅗𝅥'), - (0x1D15F, 'M', u'𝅘𝅥'), - (0x1D160, 'M', u'𝅘𝅥𝅮'), - (0x1D161, 'M', u'𝅘𝅥𝅯'), - (0x1D162, 'M', u'𝅘𝅥𝅰'), - (0x1D163, 'M', u'𝅘𝅥𝅱'), - (0x1D164, 'M', u'𝅘𝅥𝅲'), - (0x1D165, 'V'), - (0x1D173, 'X'), - (0x1D17B, 'V'), - (0x1D1BB, 'M', u'𝆹𝅥'), - (0x1D1BC, 'M', u'𝆺𝅥'), - (0x1D1BD, 'M', u'𝆹𝅥𝅮'), - (0x1D1BE, 'M', u'𝆺𝅥𝅮'), - (0x1D1BF, 'M', u'𝆹𝅥𝅯'), - (0x1D1C0, 'M', u'𝆺𝅥𝅯'), - (0x1D1C1, 'V'), - (0x1D1E9, 'X'), - (0x1D200, 'V'), - ] - -def _seg_58(): - return [ - (0x1D246, 'X'), - (0x1D2E0, 'V'), - (0x1D2F4, 'X'), - (0x1D300, 'V'), - (0x1D357, 'X'), - (0x1D360, 'V'), - (0x1D379, 'X'), - (0x1D400, 'M', u'a'), - (0x1D401, 'M', u'b'), - (0x1D402, 'M', u'c'), - (0x1D403, 'M', u'd'), - (0x1D404, 'M', u'e'), - (0x1D405, 'M', u'f'), - (0x1D406, 'M', u'g'), - (0x1D407, 'M', u'h'), - (0x1D408, 'M', u'i'), - (0x1D409, 'M', u'j'), - (0x1D40A, 'M', u'k'), - (0x1D40B, 'M', u'l'), - (0x1D40C, 'M', u'm'), - (0x1D40D, 'M', u'n'), - (0x1D40E, 'M', u'o'), - (0x1D40F, 'M', u'p'), - (0x1D410, 'M', u'q'), - (0x1D411, 'M', u'r'), - (0x1D412, 'M', u's'), - (0x1D413, 'M', u't'), - (0x1D414, 'M', u'u'), - (0x1D415, 'M', u'v'), - (0x1D416, 'M', u'w'), - (0x1D417, 'M', u'x'), - (0x1D418, 'M', u'y'), - (0x1D419, 'M', u'z'), - (0x1D41A, 'M', u'a'), - (0x1D41B, 'M', u'b'), - (0x1D41C, 'M', u'c'), - (0x1D41D, 'M', u'd'), - (0x1D41E, 'M', u'e'), - (0x1D41F, 'M', u'f'), - (0x1D420, 'M', u'g'), - (0x1D421, 'M', u'h'), - (0x1D422, 'M', u'i'), - (0x1D423, 'M', u'j'), - (0x1D424, 'M', u'k'), - (0x1D425, 'M', u'l'), - (0x1D426, 'M', u'm'), - (0x1D427, 'M', u'n'), - (0x1D428, 'M', u'o'), - (0x1D429, 'M', u'p'), - (0x1D42A, 'M', u'q'), - (0x1D42B, 'M', u'r'), - (0x1D42C, 'M', u's'), - (0x1D42D, 'M', u't'), - (0x1D42E, 'M', u'u'), - (0x1D42F, 'M', u'v'), - (0x1D430, 'M', u'w'), - (0x1D431, 'M', u'x'), - (0x1D432, 'M', u'y'), - (0x1D433, 'M', u'z'), - (0x1D434, 'M', u'a'), - (0x1D435, 'M', u'b'), - (0x1D436, 'M', u'c'), - (0x1D437, 'M', u'd'), - (0x1D438, 'M', u'e'), - (0x1D439, 'M', u'f'), - (0x1D43A, 'M', u'g'), - (0x1D43B, 'M', u'h'), - (0x1D43C, 'M', u'i'), - (0x1D43D, 'M', u'j'), - (0x1D43E, 'M', u'k'), - (0x1D43F, 'M', u'l'), - (0x1D440, 'M', u'm'), - (0x1D441, 'M', u'n'), - (0x1D442, 'M', u'o'), - (0x1D443, 'M', u'p'), - (0x1D444, 'M', u'q'), - (0x1D445, 'M', u'r'), - (0x1D446, 'M', u's'), - (0x1D447, 'M', u't'), - (0x1D448, 'M', u'u'), - (0x1D449, 'M', u'v'), - (0x1D44A, 'M', u'w'), - (0x1D44B, 'M', u'x'), - (0x1D44C, 'M', u'y'), - (0x1D44D, 'M', u'z'), - (0x1D44E, 'M', u'a'), - (0x1D44F, 'M', u'b'), - (0x1D450, 'M', u'c'), - (0x1D451, 'M', u'd'), - (0x1D452, 'M', u'e'), - (0x1D453, 'M', u'f'), - (0x1D454, 'M', u'g'), - (0x1D455, 'X'), - (0x1D456, 'M', u'i'), - (0x1D457, 'M', u'j'), - (0x1D458, 'M', u'k'), - (0x1D459, 'M', u'l'), - (0x1D45A, 'M', u'm'), - (0x1D45B, 'M', u'n'), - (0x1D45C, 'M', u'o'), - ] - -def _seg_59(): - return [ - (0x1D45D, 'M', u'p'), - (0x1D45E, 'M', u'q'), - (0x1D45F, 'M', u'r'), - (0x1D460, 'M', u's'), - (0x1D461, 'M', u't'), - (0x1D462, 'M', u'u'), - (0x1D463, 'M', u'v'), - (0x1D464, 'M', u'w'), - (0x1D465, 'M', u'x'), - (0x1D466, 'M', u'y'), - (0x1D467, 'M', u'z'), - (0x1D468, 'M', u'a'), - (0x1D469, 'M', u'b'), - (0x1D46A, 'M', u'c'), - (0x1D46B, 'M', u'd'), - (0x1D46C, 'M', u'e'), - (0x1D46D, 'M', u'f'), - (0x1D46E, 'M', u'g'), - (0x1D46F, 'M', u'h'), - (0x1D470, 'M', u'i'), - (0x1D471, 'M', u'j'), - (0x1D472, 'M', u'k'), - (0x1D473, 'M', u'l'), - (0x1D474, 'M', u'm'), - (0x1D475, 'M', u'n'), - (0x1D476, 'M', u'o'), - (0x1D477, 'M', u'p'), - (0x1D478, 'M', u'q'), - (0x1D479, 'M', u'r'), - (0x1D47A, 'M', u's'), - (0x1D47B, 'M', u't'), - (0x1D47C, 'M', u'u'), - (0x1D47D, 'M', u'v'), - (0x1D47E, 'M', u'w'), - (0x1D47F, 'M', u'x'), - (0x1D480, 'M', u'y'), - (0x1D481, 'M', u'z'), - (0x1D482, 'M', u'a'), - (0x1D483, 'M', u'b'), - (0x1D484, 'M', u'c'), - (0x1D485, 'M', u'd'), - (0x1D486, 'M', u'e'), - (0x1D487, 'M', u'f'), - (0x1D488, 'M', u'g'), - (0x1D489, 'M', u'h'), - (0x1D48A, 'M', u'i'), - (0x1D48B, 'M', u'j'), - (0x1D48C, 'M', u'k'), - (0x1D48D, 'M', u'l'), - (0x1D48E, 'M', u'm'), - (0x1D48F, 'M', u'n'), - (0x1D490, 'M', u'o'), - (0x1D491, 'M', u'p'), - (0x1D492, 'M', u'q'), - (0x1D493, 'M', u'r'), - (0x1D494, 'M', u's'), - (0x1D495, 'M', u't'), - (0x1D496, 'M', u'u'), - (0x1D497, 'M', u'v'), - (0x1D498, 'M', u'w'), - (0x1D499, 'M', u'x'), - (0x1D49A, 'M', u'y'), - (0x1D49B, 'M', u'z'), - (0x1D49C, 'M', u'a'), - (0x1D49D, 'X'), - (0x1D49E, 'M', u'c'), - (0x1D49F, 'M', u'd'), - (0x1D4A0, 'X'), - (0x1D4A2, 'M', u'g'), - (0x1D4A3, 'X'), - (0x1D4A5, 'M', u'j'), - (0x1D4A6, 'M', u'k'), - (0x1D4A7, 'X'), - (0x1D4A9, 'M', u'n'), - (0x1D4AA, 'M', u'o'), - (0x1D4AB, 'M', u'p'), - (0x1D4AC, 'M', u'q'), - (0x1D4AD, 'X'), - (0x1D4AE, 'M', u's'), - (0x1D4AF, 'M', u't'), - (0x1D4B0, 'M', u'u'), - (0x1D4B1, 'M', u'v'), - (0x1D4B2, 'M', u'w'), - (0x1D4B3, 'M', u'x'), - (0x1D4B4, 'M', u'y'), - (0x1D4B5, 'M', u'z'), - (0x1D4B6, 'M', u'a'), - (0x1D4B7, 'M', u'b'), - (0x1D4B8, 'M', u'c'), - (0x1D4B9, 'M', u'd'), - (0x1D4BA, 'X'), - (0x1D4BB, 'M', u'f'), - (0x1D4BC, 'X'), - (0x1D4BD, 'M', u'h'), - (0x1D4BE, 'M', u'i'), - (0x1D4BF, 'M', u'j'), - (0x1D4C0, 'M', u'k'), - (0x1D4C1, 'M', u'l'), - (0x1D4C2, 'M', u'm'), - (0x1D4C3, 'M', u'n'), - ] - -def _seg_60(): - return [ - (0x1D4C4, 'X'), - (0x1D4C5, 'M', u'p'), - (0x1D4C6, 'M', u'q'), - (0x1D4C7, 'M', u'r'), - (0x1D4C8, 'M', u's'), - (0x1D4C9, 'M', u't'), - (0x1D4CA, 'M', u'u'), - (0x1D4CB, 'M', u'v'), - (0x1D4CC, 'M', u'w'), - (0x1D4CD, 'M', u'x'), - (0x1D4CE, 'M', u'y'), - (0x1D4CF, 'M', u'z'), - (0x1D4D0, 'M', u'a'), - (0x1D4D1, 'M', u'b'), - (0x1D4D2, 'M', u'c'), - (0x1D4D3, 'M', u'd'), - (0x1D4D4, 'M', u'e'), - (0x1D4D5, 'M', u'f'), - (0x1D4D6, 'M', u'g'), - (0x1D4D7, 'M', u'h'), - (0x1D4D8, 'M', u'i'), - (0x1D4D9, 'M', u'j'), - (0x1D4DA, 'M', u'k'), - (0x1D4DB, 'M', u'l'), - (0x1D4DC, 'M', u'm'), - (0x1D4DD, 'M', u'n'), - (0x1D4DE, 'M', u'o'), - (0x1D4DF, 'M', u'p'), - (0x1D4E0, 'M', u'q'), - (0x1D4E1, 'M', u'r'), - (0x1D4E2, 'M', u's'), - (0x1D4E3, 'M', u't'), - (0x1D4E4, 'M', u'u'), - (0x1D4E5, 'M', u'v'), - (0x1D4E6, 'M', u'w'), - (0x1D4E7, 'M', u'x'), - (0x1D4E8, 'M', u'y'), - (0x1D4E9, 'M', u'z'), - (0x1D4EA, 'M', u'a'), - (0x1D4EB, 'M', u'b'), - (0x1D4EC, 'M', u'c'), - (0x1D4ED, 'M', u'd'), - (0x1D4EE, 'M', u'e'), - (0x1D4EF, 'M', u'f'), - (0x1D4F0, 'M', u'g'), - (0x1D4F1, 'M', u'h'), - (0x1D4F2, 'M', u'i'), - (0x1D4F3, 'M', u'j'), - (0x1D4F4, 'M', u'k'), - (0x1D4F5, 'M', u'l'), - (0x1D4F6, 'M', u'm'), - (0x1D4F7, 'M', u'n'), - (0x1D4F8, 'M', u'o'), - (0x1D4F9, 'M', u'p'), - (0x1D4FA, 'M', u'q'), - (0x1D4FB, 'M', u'r'), - (0x1D4FC, 'M', u's'), - (0x1D4FD, 'M', u't'), - (0x1D4FE, 'M', u'u'), - (0x1D4FF, 'M', u'v'), - (0x1D500, 'M', u'w'), - (0x1D501, 'M', u'x'), - (0x1D502, 'M', u'y'), - (0x1D503, 'M', u'z'), - (0x1D504, 'M', u'a'), - (0x1D505, 'M', u'b'), - (0x1D506, 'X'), - (0x1D507, 'M', u'd'), - (0x1D508, 'M', u'e'), - (0x1D509, 'M', u'f'), - (0x1D50A, 'M', u'g'), - (0x1D50B, 'X'), - (0x1D50D, 'M', u'j'), - (0x1D50E, 'M', u'k'), - (0x1D50F, 'M', u'l'), - (0x1D510, 'M', u'm'), - (0x1D511, 'M', u'n'), - (0x1D512, 'M', u'o'), - (0x1D513, 'M', u'p'), - (0x1D514, 'M', u'q'), - (0x1D515, 'X'), - (0x1D516, 'M', u's'), - (0x1D517, 'M', u't'), - (0x1D518, 'M', u'u'), - (0x1D519, 'M', u'v'), - (0x1D51A, 'M', u'w'), - (0x1D51B, 'M', u'x'), - (0x1D51C, 'M', u'y'), - (0x1D51D, 'X'), - (0x1D51E, 'M', u'a'), - (0x1D51F, 'M', u'b'), - (0x1D520, 'M', u'c'), - (0x1D521, 'M', u'd'), - (0x1D522, 'M', u'e'), - (0x1D523, 'M', u'f'), - (0x1D524, 'M', u'g'), - (0x1D525, 'M', u'h'), - (0x1D526, 'M', u'i'), - (0x1D527, 'M', u'j'), - (0x1D528, 'M', u'k'), - ] - -def _seg_61(): - return [ - (0x1D529, 'M', u'l'), - (0x1D52A, 'M', u'm'), - (0x1D52B, 'M', u'n'), - (0x1D52C, 'M', u'o'), - (0x1D52D, 'M', u'p'), - (0x1D52E, 'M', u'q'), - (0x1D52F, 'M', u'r'), - (0x1D530, 'M', u's'), - (0x1D531, 'M', u't'), - (0x1D532, 'M', u'u'), - (0x1D533, 'M', u'v'), - (0x1D534, 'M', u'w'), - (0x1D535, 'M', u'x'), - (0x1D536, 'M', u'y'), - (0x1D537, 'M', u'z'), - (0x1D538, 'M', u'a'), - (0x1D539, 'M', u'b'), - (0x1D53A, 'X'), - (0x1D53B, 'M', u'd'), - (0x1D53C, 'M', u'e'), - (0x1D53D, 'M', u'f'), - (0x1D53E, 'M', u'g'), - (0x1D53F, 'X'), - (0x1D540, 'M', u'i'), - (0x1D541, 'M', u'j'), - (0x1D542, 'M', u'k'), - (0x1D543, 'M', u'l'), - (0x1D544, 'M', u'm'), - (0x1D545, 'X'), - (0x1D546, 'M', u'o'), - (0x1D547, 'X'), - (0x1D54A, 'M', u's'), - (0x1D54B, 'M', u't'), - (0x1D54C, 'M', u'u'), - (0x1D54D, 'M', u'v'), - (0x1D54E, 'M', u'w'), - (0x1D54F, 'M', u'x'), - (0x1D550, 'M', u'y'), - (0x1D551, 'X'), - (0x1D552, 'M', u'a'), - (0x1D553, 'M', u'b'), - (0x1D554, 'M', u'c'), - (0x1D555, 'M', u'd'), - (0x1D556, 'M', u'e'), - (0x1D557, 'M', u'f'), - (0x1D558, 'M', u'g'), - (0x1D559, 'M', u'h'), - (0x1D55A, 'M', u'i'), - (0x1D55B, 'M', u'j'), - (0x1D55C, 'M', u'k'), - (0x1D55D, 'M', u'l'), - (0x1D55E, 'M', u'm'), - (0x1D55F, 'M', u'n'), - (0x1D560, 'M', u'o'), - (0x1D561, 'M', u'p'), - (0x1D562, 'M', u'q'), - (0x1D563, 'M', u'r'), - (0x1D564, 'M', u's'), - (0x1D565, 'M', u't'), - (0x1D566, 'M', u'u'), - (0x1D567, 'M', u'v'), - (0x1D568, 'M', u'w'), - (0x1D569, 'M', u'x'), - (0x1D56A, 'M', u'y'), - (0x1D56B, 'M', u'z'), - (0x1D56C, 'M', u'a'), - (0x1D56D, 'M', u'b'), - (0x1D56E, 'M', u'c'), - (0x1D56F, 'M', u'd'), - (0x1D570, 'M', u'e'), - (0x1D571, 'M', u'f'), - (0x1D572, 'M', u'g'), - (0x1D573, 'M', u'h'), - (0x1D574, 'M', u'i'), - (0x1D575, 'M', u'j'), - (0x1D576, 'M', u'k'), - (0x1D577, 'M', u'l'), - (0x1D578, 'M', u'm'), - (0x1D579, 'M', u'n'), - (0x1D57A, 'M', u'o'), - (0x1D57B, 'M', u'p'), - (0x1D57C, 'M', u'q'), - (0x1D57D, 'M', u'r'), - (0x1D57E, 'M', u's'), - (0x1D57F, 'M', u't'), - (0x1D580, 'M', u'u'), - (0x1D581, 'M', u'v'), - (0x1D582, 'M', u'w'), - (0x1D583, 'M', u'x'), - (0x1D584, 'M', u'y'), - (0x1D585, 'M', u'z'), - (0x1D586, 'M', u'a'), - (0x1D587, 'M', u'b'), - (0x1D588, 'M', u'c'), - (0x1D589, 'M', u'd'), - (0x1D58A, 'M', u'e'), - (0x1D58B, 'M', u'f'), - (0x1D58C, 'M', u'g'), - (0x1D58D, 'M', u'h'), - (0x1D58E, 'M', u'i'), - ] - -def _seg_62(): - return [ - (0x1D58F, 'M', u'j'), - (0x1D590, 'M', u'k'), - (0x1D591, 'M', u'l'), - (0x1D592, 'M', u'm'), - (0x1D593, 'M', u'n'), - (0x1D594, 'M', u'o'), - (0x1D595, 'M', u'p'), - (0x1D596, 'M', u'q'), - (0x1D597, 'M', u'r'), - (0x1D598, 'M', u's'), - (0x1D599, 'M', u't'), - (0x1D59A, 'M', u'u'), - (0x1D59B, 'M', u'v'), - (0x1D59C, 'M', u'w'), - (0x1D59D, 'M', u'x'), - (0x1D59E, 'M', u'y'), - (0x1D59F, 'M', u'z'), - (0x1D5A0, 'M', u'a'), - (0x1D5A1, 'M', u'b'), - (0x1D5A2, 'M', u'c'), - (0x1D5A3, 'M', u'd'), - (0x1D5A4, 'M', u'e'), - (0x1D5A5, 'M', u'f'), - (0x1D5A6, 'M', u'g'), - (0x1D5A7, 'M', u'h'), - (0x1D5A8, 'M', u'i'), - (0x1D5A9, 'M', u'j'), - (0x1D5AA, 'M', u'k'), - (0x1D5AB, 'M', u'l'), - (0x1D5AC, 'M', u'm'), - (0x1D5AD, 'M', u'n'), - (0x1D5AE, 'M', u'o'), - (0x1D5AF, 'M', u'p'), - (0x1D5B0, 'M', u'q'), - (0x1D5B1, 'M', u'r'), - (0x1D5B2, 'M', u's'), - (0x1D5B3, 'M', u't'), - (0x1D5B4, 'M', u'u'), - (0x1D5B5, 'M', u'v'), - (0x1D5B6, 'M', u'w'), - (0x1D5B7, 'M', u'x'), - (0x1D5B8, 'M', u'y'), - (0x1D5B9, 'M', u'z'), - (0x1D5BA, 'M', u'a'), - (0x1D5BB, 'M', u'b'), - (0x1D5BC, 'M', u'c'), - (0x1D5BD, 'M', u'd'), - (0x1D5BE, 'M', u'e'), - (0x1D5BF, 'M', u'f'), - (0x1D5C0, 'M', u'g'), - (0x1D5C1, 'M', u'h'), - (0x1D5C2, 'M', u'i'), - (0x1D5C3, 'M', u'j'), - (0x1D5C4, 'M', u'k'), - (0x1D5C5, 'M', u'l'), - (0x1D5C6, 'M', u'm'), - (0x1D5C7, 'M', u'n'), - (0x1D5C8, 'M', u'o'), - (0x1D5C9, 'M', u'p'), - (0x1D5CA, 'M', u'q'), - (0x1D5CB, 'M', u'r'), - (0x1D5CC, 'M', u's'), - (0x1D5CD, 'M', u't'), - (0x1D5CE, 'M', u'u'), - (0x1D5CF, 'M', u'v'), - (0x1D5D0, 'M', u'w'), - (0x1D5D1, 'M', u'x'), - (0x1D5D2, 'M', u'y'), - (0x1D5D3, 'M', u'z'), - (0x1D5D4, 'M', u'a'), - (0x1D5D5, 'M', u'b'), - (0x1D5D6, 'M', u'c'), - (0x1D5D7, 'M', u'd'), - (0x1D5D8, 'M', u'e'), - (0x1D5D9, 'M', u'f'), - (0x1D5DA, 'M', u'g'), - (0x1D5DB, 'M', u'h'), - (0x1D5DC, 'M', u'i'), - (0x1D5DD, 'M', u'j'), - (0x1D5DE, 'M', u'k'), - (0x1D5DF, 'M', u'l'), - (0x1D5E0, 'M', u'm'), - (0x1D5E1, 'M', u'n'), - (0x1D5E2, 'M', u'o'), - (0x1D5E3, 'M', u'p'), - (0x1D5E4, 'M', u'q'), - (0x1D5E5, 'M', u'r'), - (0x1D5E6, 'M', u's'), - (0x1D5E7, 'M', u't'), - (0x1D5E8, 'M', u'u'), - (0x1D5E9, 'M', u'v'), - (0x1D5EA, 'M', u'w'), - (0x1D5EB, 'M', u'x'), - (0x1D5EC, 'M', u'y'), - (0x1D5ED, 'M', u'z'), - (0x1D5EE, 'M', u'a'), - (0x1D5EF, 'M', u'b'), - (0x1D5F0, 'M', u'c'), - (0x1D5F1, 'M', u'd'), - (0x1D5F2, 'M', u'e'), - ] - -def _seg_63(): - return [ - (0x1D5F3, 'M', u'f'), - (0x1D5F4, 'M', u'g'), - (0x1D5F5, 'M', u'h'), - (0x1D5F6, 'M', u'i'), - (0x1D5F7, 'M', u'j'), - (0x1D5F8, 'M', u'k'), - (0x1D5F9, 'M', u'l'), - (0x1D5FA, 'M', u'm'), - (0x1D5FB, 'M', u'n'), - (0x1D5FC, 'M', u'o'), - (0x1D5FD, 'M', u'p'), - (0x1D5FE, 'M', u'q'), - (0x1D5FF, 'M', u'r'), - (0x1D600, 'M', u's'), - (0x1D601, 'M', u't'), - (0x1D602, 'M', u'u'), - (0x1D603, 'M', u'v'), - (0x1D604, 'M', u'w'), - (0x1D605, 'M', u'x'), - (0x1D606, 'M', u'y'), - (0x1D607, 'M', u'z'), - (0x1D608, 'M', u'a'), - (0x1D609, 'M', u'b'), - (0x1D60A, 'M', u'c'), - (0x1D60B, 'M', u'd'), - (0x1D60C, 'M', u'e'), - (0x1D60D, 'M', u'f'), - (0x1D60E, 'M', u'g'), - (0x1D60F, 'M', u'h'), - (0x1D610, 'M', u'i'), - (0x1D611, 'M', u'j'), - (0x1D612, 'M', u'k'), - (0x1D613, 'M', u'l'), - (0x1D614, 'M', u'm'), - (0x1D615, 'M', u'n'), - (0x1D616, 'M', u'o'), - (0x1D617, 'M', u'p'), - (0x1D618, 'M', u'q'), - (0x1D619, 'M', u'r'), - (0x1D61A, 'M', u's'), - (0x1D61B, 'M', u't'), - (0x1D61C, 'M', u'u'), - (0x1D61D, 'M', u'v'), - (0x1D61E, 'M', u'w'), - (0x1D61F, 'M', u'x'), - (0x1D620, 'M', u'y'), - (0x1D621, 'M', u'z'), - (0x1D622, 'M', u'a'), - (0x1D623, 'M', u'b'), - (0x1D624, 'M', u'c'), - (0x1D625, 'M', u'd'), - (0x1D626, 'M', u'e'), - (0x1D627, 'M', u'f'), - (0x1D628, 'M', u'g'), - (0x1D629, 'M', u'h'), - (0x1D62A, 'M', u'i'), - (0x1D62B, 'M', u'j'), - (0x1D62C, 'M', u'k'), - (0x1D62D, 'M', u'l'), - (0x1D62E, 'M', u'm'), - (0x1D62F, 'M', u'n'), - (0x1D630, 'M', u'o'), - (0x1D631, 'M', u'p'), - (0x1D632, 'M', u'q'), - (0x1D633, 'M', u'r'), - (0x1D634, 'M', u's'), - (0x1D635, 'M', u't'), - (0x1D636, 'M', u'u'), - (0x1D637, 'M', u'v'), - (0x1D638, 'M', u'w'), - (0x1D639, 'M', u'x'), - (0x1D63A, 'M', u'y'), - (0x1D63B, 'M', u'z'), - (0x1D63C, 'M', u'a'), - (0x1D63D, 'M', u'b'), - (0x1D63E, 'M', u'c'), - (0x1D63F, 'M', u'd'), - (0x1D640, 'M', u'e'), - (0x1D641, 'M', u'f'), - (0x1D642, 'M', u'g'), - (0x1D643, 'M', u'h'), - (0x1D644, 'M', u'i'), - (0x1D645, 'M', u'j'), - (0x1D646, 'M', u'k'), - (0x1D647, 'M', u'l'), - (0x1D648, 'M', u'm'), - (0x1D649, 'M', u'n'), - (0x1D64A, 'M', u'o'), - (0x1D64B, 'M', u'p'), - (0x1D64C, 'M', u'q'), - (0x1D64D, 'M', u'r'), - (0x1D64E, 'M', u's'), - (0x1D64F, 'M', u't'), - (0x1D650, 'M', u'u'), - (0x1D651, 'M', u'v'), - (0x1D652, 'M', u'w'), - (0x1D653, 'M', u'x'), - (0x1D654, 'M', u'y'), - (0x1D655, 'M', u'z'), - (0x1D656, 'M', u'a'), - ] - -def _seg_64(): - return [ - (0x1D657, 'M', u'b'), - (0x1D658, 'M', u'c'), - (0x1D659, 'M', u'd'), - (0x1D65A, 'M', u'e'), - (0x1D65B, 'M', u'f'), - (0x1D65C, 'M', u'g'), - (0x1D65D, 'M', u'h'), - (0x1D65E, 'M', u'i'), - (0x1D65F, 'M', u'j'), - (0x1D660, 'M', u'k'), - (0x1D661, 'M', u'l'), - (0x1D662, 'M', u'm'), - (0x1D663, 'M', u'n'), - (0x1D664, 'M', u'o'), - (0x1D665, 'M', u'p'), - (0x1D666, 'M', u'q'), - (0x1D667, 'M', u'r'), - (0x1D668, 'M', u's'), - (0x1D669, 'M', u't'), - (0x1D66A, 'M', u'u'), - (0x1D66B, 'M', u'v'), - (0x1D66C, 'M', u'w'), - (0x1D66D, 'M', u'x'), - (0x1D66E, 'M', u'y'), - (0x1D66F, 'M', u'z'), - (0x1D670, 'M', u'a'), - (0x1D671, 'M', u'b'), - (0x1D672, 'M', u'c'), - (0x1D673, 'M', u'd'), - (0x1D674, 'M', u'e'), - (0x1D675, 'M', u'f'), - (0x1D676, 'M', u'g'), - (0x1D677, 'M', u'h'), - (0x1D678, 'M', u'i'), - (0x1D679, 'M', u'j'), - (0x1D67A, 'M', u'k'), - (0x1D67B, 'M', u'l'), - (0x1D67C, 'M', u'm'), - (0x1D67D, 'M', u'n'), - (0x1D67E, 'M', u'o'), - (0x1D67F, 'M', u'p'), - (0x1D680, 'M', u'q'), - (0x1D681, 'M', u'r'), - (0x1D682, 'M', u's'), - (0x1D683, 'M', u't'), - (0x1D684, 'M', u'u'), - (0x1D685, 'M', u'v'), - (0x1D686, 'M', u'w'), - (0x1D687, 'M', u'x'), - (0x1D688, 'M', u'y'), - (0x1D689, 'M', u'z'), - (0x1D68A, 'M', u'a'), - (0x1D68B, 'M', u'b'), - (0x1D68C, 'M', u'c'), - (0x1D68D, 'M', u'd'), - (0x1D68E, 'M', u'e'), - (0x1D68F, 'M', u'f'), - (0x1D690, 'M', u'g'), - (0x1D691, 'M', u'h'), - (0x1D692, 'M', u'i'), - (0x1D693, 'M', u'j'), - (0x1D694, 'M', u'k'), - (0x1D695, 'M', u'l'), - (0x1D696, 'M', u'm'), - (0x1D697, 'M', u'n'), - (0x1D698, 'M', u'o'), - (0x1D699, 'M', u'p'), - (0x1D69A, 'M', u'q'), - (0x1D69B, 'M', u'r'), - (0x1D69C, 'M', u's'), - (0x1D69D, 'M', u't'), - (0x1D69E, 'M', u'u'), - (0x1D69F, 'M', u'v'), - (0x1D6A0, 'M', u'w'), - (0x1D6A1, 'M', u'x'), - (0x1D6A2, 'M', u'y'), - (0x1D6A3, 'M', u'z'), - (0x1D6A4, 'M', u'ı'), - (0x1D6A5, 'M', u'ȷ'), - (0x1D6A6, 'X'), - (0x1D6A8, 'M', u'α'), - (0x1D6A9, 'M', u'β'), - (0x1D6AA, 'M', u'γ'), - (0x1D6AB, 'M', u'δ'), - (0x1D6AC, 'M', u'ε'), - (0x1D6AD, 'M', u'ζ'), - (0x1D6AE, 'M', u'η'), - (0x1D6AF, 'M', u'θ'), - (0x1D6B0, 'M', u'ι'), - (0x1D6B1, 'M', u'κ'), - (0x1D6B2, 'M', u'λ'), - (0x1D6B3, 'M', u'μ'), - (0x1D6B4, 'M', u'ν'), - (0x1D6B5, 'M', u'ξ'), - (0x1D6B6, 'M', u'ο'), - (0x1D6B7, 'M', u'π'), - (0x1D6B8, 'M', u'ρ'), - (0x1D6B9, 'M', u'θ'), - (0x1D6BA, 'M', u'σ'), - (0x1D6BB, 'M', u'τ'), - ] - -def _seg_65(): - return [ - (0x1D6BC, 'M', u'υ'), - (0x1D6BD, 'M', u'φ'), - (0x1D6BE, 'M', u'χ'), - (0x1D6BF, 'M', u'ψ'), - (0x1D6C0, 'M', u'ω'), - (0x1D6C1, 'M', u'∇'), - (0x1D6C2, 'M', u'α'), - (0x1D6C3, 'M', u'β'), - (0x1D6C4, 'M', u'γ'), - (0x1D6C5, 'M', u'δ'), - (0x1D6C6, 'M', u'ε'), - (0x1D6C7, 'M', u'ζ'), - (0x1D6C8, 'M', u'η'), - (0x1D6C9, 'M', u'θ'), - (0x1D6CA, 'M', u'ι'), - (0x1D6CB, 'M', u'κ'), - (0x1D6CC, 'M', u'λ'), - (0x1D6CD, 'M', u'μ'), - (0x1D6CE, 'M', u'ν'), - (0x1D6CF, 'M', u'ξ'), - (0x1D6D0, 'M', u'ο'), - (0x1D6D1, 'M', u'π'), - (0x1D6D2, 'M', u'ρ'), - (0x1D6D3, 'M', u'σ'), - (0x1D6D5, 'M', u'τ'), - (0x1D6D6, 'M', u'υ'), - (0x1D6D7, 'M', u'φ'), - (0x1D6D8, 'M', u'χ'), - (0x1D6D9, 'M', u'ψ'), - (0x1D6DA, 'M', u'ω'), - (0x1D6DB, 'M', u'∂'), - (0x1D6DC, 'M', u'ε'), - (0x1D6DD, 'M', u'θ'), - (0x1D6DE, 'M', u'κ'), - (0x1D6DF, 'M', u'φ'), - (0x1D6E0, 'M', u'ρ'), - (0x1D6E1, 'M', u'π'), - (0x1D6E2, 'M', u'α'), - (0x1D6E3, 'M', u'β'), - (0x1D6E4, 'M', u'γ'), - (0x1D6E5, 'M', u'δ'), - (0x1D6E6, 'M', u'ε'), - (0x1D6E7, 'M', u'ζ'), - (0x1D6E8, 'M', u'η'), - (0x1D6E9, 'M', u'θ'), - (0x1D6EA, 'M', u'ι'), - (0x1D6EB, 'M', u'κ'), - (0x1D6EC, 'M', u'λ'), - (0x1D6ED, 'M', u'μ'), - (0x1D6EE, 'M', u'ν'), - (0x1D6EF, 'M', u'ξ'), - (0x1D6F0, 'M', u'ο'), - (0x1D6F1, 'M', u'π'), - (0x1D6F2, 'M', u'ρ'), - (0x1D6F3, 'M', u'θ'), - (0x1D6F4, 'M', u'σ'), - (0x1D6F5, 'M', u'τ'), - (0x1D6F6, 'M', u'υ'), - (0x1D6F7, 'M', u'φ'), - (0x1D6F8, 'M', u'χ'), - (0x1D6F9, 'M', u'ψ'), - (0x1D6FA, 'M', u'ω'), - (0x1D6FB, 'M', u'∇'), - (0x1D6FC, 'M', u'α'), - (0x1D6FD, 'M', u'β'), - (0x1D6FE, 'M', u'γ'), - (0x1D6FF, 'M', u'δ'), - (0x1D700, 'M', u'ε'), - (0x1D701, 'M', u'ζ'), - (0x1D702, 'M', u'η'), - (0x1D703, 'M', u'θ'), - (0x1D704, 'M', u'ι'), - (0x1D705, 'M', u'κ'), - (0x1D706, 'M', u'λ'), - (0x1D707, 'M', u'μ'), - (0x1D708, 'M', u'ν'), - (0x1D709, 'M', u'ξ'), - (0x1D70A, 'M', u'ο'), - (0x1D70B, 'M', u'π'), - (0x1D70C, 'M', u'ρ'), - (0x1D70D, 'M', u'σ'), - (0x1D70F, 'M', u'τ'), - (0x1D710, 'M', u'υ'), - (0x1D711, 'M', u'φ'), - (0x1D712, 'M', u'χ'), - (0x1D713, 'M', u'ψ'), - (0x1D714, 'M', u'ω'), - (0x1D715, 'M', u'∂'), - (0x1D716, 'M', u'ε'), - (0x1D717, 'M', u'θ'), - (0x1D718, 'M', u'κ'), - (0x1D719, 'M', u'φ'), - (0x1D71A, 'M', u'ρ'), - (0x1D71B, 'M', u'π'), - (0x1D71C, 'M', u'α'), - (0x1D71D, 'M', u'β'), - (0x1D71E, 'M', u'γ'), - (0x1D71F, 'M', u'δ'), - (0x1D720, 'M', u'ε'), - (0x1D721, 'M', u'ζ'), - ] - -def _seg_66(): - return [ - (0x1D722, 'M', u'η'), - (0x1D723, 'M', u'θ'), - (0x1D724, 'M', u'ι'), - (0x1D725, 'M', u'κ'), - (0x1D726, 'M', u'λ'), - (0x1D727, 'M', u'μ'), - (0x1D728, 'M', u'ν'), - (0x1D729, 'M', u'ξ'), - (0x1D72A, 'M', u'ο'), - (0x1D72B, 'M', u'π'), - (0x1D72C, 'M', u'ρ'), - (0x1D72D, 'M', u'θ'), - (0x1D72E, 'M', u'σ'), - (0x1D72F, 'M', u'τ'), - (0x1D730, 'M', u'υ'), - (0x1D731, 'M', u'φ'), - (0x1D732, 'M', u'χ'), - (0x1D733, 'M', u'ψ'), - (0x1D734, 'M', u'ω'), - (0x1D735, 'M', u'∇'), - (0x1D736, 'M', u'α'), - (0x1D737, 'M', u'β'), - (0x1D738, 'M', u'γ'), - (0x1D739, 'M', u'δ'), - (0x1D73A, 'M', u'ε'), - (0x1D73B, 'M', u'ζ'), - (0x1D73C, 'M', u'η'), - (0x1D73D, 'M', u'θ'), - (0x1D73E, 'M', u'ι'), - (0x1D73F, 'M', u'κ'), - (0x1D740, 'M', u'λ'), - (0x1D741, 'M', u'μ'), - (0x1D742, 'M', u'ν'), - (0x1D743, 'M', u'ξ'), - (0x1D744, 'M', u'ο'), - (0x1D745, 'M', u'π'), - (0x1D746, 'M', u'ρ'), - (0x1D747, 'M', u'σ'), - (0x1D749, 'M', u'τ'), - (0x1D74A, 'M', u'υ'), - (0x1D74B, 'M', u'φ'), - (0x1D74C, 'M', u'χ'), - (0x1D74D, 'M', u'ψ'), - (0x1D74E, 'M', u'ω'), - (0x1D74F, 'M', u'∂'), - (0x1D750, 'M', u'ε'), - (0x1D751, 'M', u'θ'), - (0x1D752, 'M', u'κ'), - (0x1D753, 'M', u'φ'), - (0x1D754, 'M', u'ρ'), - (0x1D755, 'M', u'π'), - (0x1D756, 'M', u'α'), - (0x1D757, 'M', u'β'), - (0x1D758, 'M', u'γ'), - (0x1D759, 'M', u'δ'), - (0x1D75A, 'M', u'ε'), - (0x1D75B, 'M', u'ζ'), - (0x1D75C, 'M', u'η'), - (0x1D75D, 'M', u'θ'), - (0x1D75E, 'M', u'ι'), - (0x1D75F, 'M', u'κ'), - (0x1D760, 'M', u'λ'), - (0x1D761, 'M', u'μ'), - (0x1D762, 'M', u'ν'), - (0x1D763, 'M', u'ξ'), - (0x1D764, 'M', u'ο'), - (0x1D765, 'M', u'π'), - (0x1D766, 'M', u'ρ'), - (0x1D767, 'M', u'θ'), - (0x1D768, 'M', u'σ'), - (0x1D769, 'M', u'τ'), - (0x1D76A, 'M', u'υ'), - (0x1D76B, 'M', u'φ'), - (0x1D76C, 'M', u'χ'), - (0x1D76D, 'M', u'ψ'), - (0x1D76E, 'M', u'ω'), - (0x1D76F, 'M', u'∇'), - (0x1D770, 'M', u'α'), - (0x1D771, 'M', u'β'), - (0x1D772, 'M', u'γ'), - (0x1D773, 'M', u'δ'), - (0x1D774, 'M', u'ε'), - (0x1D775, 'M', u'ζ'), - (0x1D776, 'M', u'η'), - (0x1D777, 'M', u'θ'), - (0x1D778, 'M', u'ι'), - (0x1D779, 'M', u'κ'), - (0x1D77A, 'M', u'λ'), - (0x1D77B, 'M', u'μ'), - (0x1D77C, 'M', u'ν'), - (0x1D77D, 'M', u'ξ'), - (0x1D77E, 'M', u'ο'), - (0x1D77F, 'M', u'π'), - (0x1D780, 'M', u'ρ'), - (0x1D781, 'M', u'σ'), - (0x1D783, 'M', u'τ'), - (0x1D784, 'M', u'υ'), - (0x1D785, 'M', u'φ'), - (0x1D786, 'M', u'χ'), - (0x1D787, 'M', u'ψ'), - ] - -def _seg_67(): - return [ - (0x1D788, 'M', u'ω'), - (0x1D789, 'M', u'∂'), - (0x1D78A, 'M', u'ε'), - (0x1D78B, 'M', u'θ'), - (0x1D78C, 'M', u'κ'), - (0x1D78D, 'M', u'φ'), - (0x1D78E, 'M', u'ρ'), - (0x1D78F, 'M', u'π'), - (0x1D790, 'M', u'α'), - (0x1D791, 'M', u'β'), - (0x1D792, 'M', u'γ'), - (0x1D793, 'M', u'δ'), - (0x1D794, 'M', u'ε'), - (0x1D795, 'M', u'ζ'), - (0x1D796, 'M', u'η'), - (0x1D797, 'M', u'θ'), - (0x1D798, 'M', u'ι'), - (0x1D799, 'M', u'κ'), - (0x1D79A, 'M', u'λ'), - (0x1D79B, 'M', u'μ'), - (0x1D79C, 'M', u'ν'), - (0x1D79D, 'M', u'ξ'), - (0x1D79E, 'M', u'ο'), - (0x1D79F, 'M', u'π'), - (0x1D7A0, 'M', u'ρ'), - (0x1D7A1, 'M', u'θ'), - (0x1D7A2, 'M', u'σ'), - (0x1D7A3, 'M', u'τ'), - (0x1D7A4, 'M', u'υ'), - (0x1D7A5, 'M', u'φ'), - (0x1D7A6, 'M', u'χ'), - (0x1D7A7, 'M', u'ψ'), - (0x1D7A8, 'M', u'ω'), - (0x1D7A9, 'M', u'∇'), - (0x1D7AA, 'M', u'α'), - (0x1D7AB, 'M', u'β'), - (0x1D7AC, 'M', u'γ'), - (0x1D7AD, 'M', u'δ'), - (0x1D7AE, 'M', u'ε'), - (0x1D7AF, 'M', u'ζ'), - (0x1D7B0, 'M', u'η'), - (0x1D7B1, 'M', u'θ'), - (0x1D7B2, 'M', u'ι'), - (0x1D7B3, 'M', u'κ'), - (0x1D7B4, 'M', u'λ'), - (0x1D7B5, 'M', u'μ'), - (0x1D7B6, 'M', u'ν'), - (0x1D7B7, 'M', u'ξ'), - (0x1D7B8, 'M', u'ο'), - (0x1D7B9, 'M', u'π'), - (0x1D7BA, 'M', u'ρ'), - (0x1D7BB, 'M', u'σ'), - (0x1D7BD, 'M', u'τ'), - (0x1D7BE, 'M', u'υ'), - (0x1D7BF, 'M', u'φ'), - (0x1D7C0, 'M', u'χ'), - (0x1D7C1, 'M', u'ψ'), - (0x1D7C2, 'M', u'ω'), - (0x1D7C3, 'M', u'∂'), - (0x1D7C4, 'M', u'ε'), - (0x1D7C5, 'M', u'θ'), - (0x1D7C6, 'M', u'κ'), - (0x1D7C7, 'M', u'φ'), - (0x1D7C8, 'M', u'ρ'), - (0x1D7C9, 'M', u'π'), - (0x1D7CA, 'M', u'ϝ'), - (0x1D7CC, 'X'), - (0x1D7CE, 'M', u'0'), - (0x1D7CF, 'M', u'1'), - (0x1D7D0, 'M', u'2'), - (0x1D7D1, 'M', u'3'), - (0x1D7D2, 'M', u'4'), - (0x1D7D3, 'M', u'5'), - (0x1D7D4, 'M', u'6'), - (0x1D7D5, 'M', u'7'), - (0x1D7D6, 'M', u'8'), - (0x1D7D7, 'M', u'9'), - (0x1D7D8, 'M', u'0'), - (0x1D7D9, 'M', u'1'), - (0x1D7DA, 'M', u'2'), - (0x1D7DB, 'M', u'3'), - (0x1D7DC, 'M', u'4'), - (0x1D7DD, 'M', u'5'), - (0x1D7DE, 'M', u'6'), - (0x1D7DF, 'M', u'7'), - (0x1D7E0, 'M', u'8'), - (0x1D7E1, 'M', u'9'), - (0x1D7E2, 'M', u'0'), - (0x1D7E3, 'M', u'1'), - (0x1D7E4, 'M', u'2'), - (0x1D7E5, 'M', u'3'), - (0x1D7E6, 'M', u'4'), - (0x1D7E7, 'M', u'5'), - (0x1D7E8, 'M', u'6'), - (0x1D7E9, 'M', u'7'), - (0x1D7EA, 'M', u'8'), - (0x1D7EB, 'M', u'9'), - (0x1D7EC, 'M', u'0'), - (0x1D7ED, 'M', u'1'), - (0x1D7EE, 'M', u'2'), - ] - -def _seg_68(): - return [ - (0x1D7EF, 'M', u'3'), - (0x1D7F0, 'M', u'4'), - (0x1D7F1, 'M', u'5'), - (0x1D7F2, 'M', u'6'), - (0x1D7F3, 'M', u'7'), - (0x1D7F4, 'M', u'8'), - (0x1D7F5, 'M', u'9'), - (0x1D7F6, 'M', u'0'), - (0x1D7F7, 'M', u'1'), - (0x1D7F8, 'M', u'2'), - (0x1D7F9, 'M', u'3'), - (0x1D7FA, 'M', u'4'), - (0x1D7FB, 'M', u'5'), - (0x1D7FC, 'M', u'6'), - (0x1D7FD, 'M', u'7'), - (0x1D7FE, 'M', u'8'), - (0x1D7FF, 'M', u'9'), - (0x1D800, 'V'), - (0x1DA8C, 'X'), - (0x1DA9B, 'V'), - (0x1DAA0, 'X'), - (0x1DAA1, 'V'), - (0x1DAB0, 'X'), - (0x1E000, 'V'), - (0x1E007, 'X'), - (0x1E008, 'V'), - (0x1E019, 'X'), - (0x1E01B, 'V'), - (0x1E022, 'X'), - (0x1E023, 'V'), - (0x1E025, 'X'), - (0x1E026, 'V'), - (0x1E02B, 'X'), - (0x1E800, 'V'), - (0x1E8C5, 'X'), - (0x1E8C7, 'V'), - (0x1E8D7, 'X'), - (0x1E900, 'M', u'𞤢'), - (0x1E901, 'M', u'𞤣'), - (0x1E902, 'M', u'𞤤'), - (0x1E903, 'M', u'𞤥'), - (0x1E904, 'M', u'𞤦'), - (0x1E905, 'M', u'𞤧'), - (0x1E906, 'M', u'𞤨'), - (0x1E907, 'M', u'𞤩'), - (0x1E908, 'M', u'𞤪'), - (0x1E909, 'M', u'𞤫'), - (0x1E90A, 'M', u'𞤬'), - (0x1E90B, 'M', u'𞤭'), - (0x1E90C, 'M', u'𞤮'), - (0x1E90D, 'M', u'𞤯'), - (0x1E90E, 'M', u'𞤰'), - (0x1E90F, 'M', u'𞤱'), - (0x1E910, 'M', u'𞤲'), - (0x1E911, 'M', u'𞤳'), - (0x1E912, 'M', u'𞤴'), - (0x1E913, 'M', u'𞤵'), - (0x1E914, 'M', u'𞤶'), - (0x1E915, 'M', u'𞤷'), - (0x1E916, 'M', u'𞤸'), - (0x1E917, 'M', u'𞤹'), - (0x1E918, 'M', u'𞤺'), - (0x1E919, 'M', u'𞤻'), - (0x1E91A, 'M', u'𞤼'), - (0x1E91B, 'M', u'𞤽'), - (0x1E91C, 'M', u'𞤾'), - (0x1E91D, 'M', u'𞤿'), - (0x1E91E, 'M', u'𞥀'), - (0x1E91F, 'M', u'𞥁'), - (0x1E920, 'M', u'𞥂'), - (0x1E921, 'M', u'𞥃'), - (0x1E922, 'V'), - (0x1E94B, 'X'), - (0x1E950, 'V'), - (0x1E95A, 'X'), - (0x1E95E, 'V'), - (0x1E960, 'X'), - (0x1EC71, 'V'), - (0x1ECB5, 'X'), - (0x1EE00, 'M', u'ا'), - (0x1EE01, 'M', u'ب'), - (0x1EE02, 'M', u'ج'), - (0x1EE03, 'M', u'د'), - (0x1EE04, 'X'), - (0x1EE05, 'M', u'و'), - (0x1EE06, 'M', u'ز'), - (0x1EE07, 'M', u'ح'), - (0x1EE08, 'M', u'ط'), - (0x1EE09, 'M', u'ي'), - (0x1EE0A, 'M', u'ك'), - (0x1EE0B, 'M', u'ل'), - (0x1EE0C, 'M', u'م'), - (0x1EE0D, 'M', u'ن'), - (0x1EE0E, 'M', u'س'), - (0x1EE0F, 'M', u'ع'), - (0x1EE10, 'M', u'ف'), - (0x1EE11, 'M', u'ص'), - (0x1EE12, 'M', u'ق'), - (0x1EE13, 'M', u'ر'), - (0x1EE14, 'M', u'ش'), - ] - -def _seg_69(): - return [ - (0x1EE15, 'M', u'ت'), - (0x1EE16, 'M', u'ث'), - (0x1EE17, 'M', u'خ'), - (0x1EE18, 'M', u'ذ'), - (0x1EE19, 'M', u'ض'), - (0x1EE1A, 'M', u'ظ'), - (0x1EE1B, 'M', u'غ'), - (0x1EE1C, 'M', u'ٮ'), - (0x1EE1D, 'M', u'ں'), - (0x1EE1E, 'M', u'ڡ'), - (0x1EE1F, 'M', u'ٯ'), - (0x1EE20, 'X'), - (0x1EE21, 'M', u'ب'), - (0x1EE22, 'M', u'ج'), - (0x1EE23, 'X'), - (0x1EE24, 'M', u'ه'), - (0x1EE25, 'X'), - (0x1EE27, 'M', u'ح'), - (0x1EE28, 'X'), - (0x1EE29, 'M', u'ي'), - (0x1EE2A, 'M', u'ك'), - (0x1EE2B, 'M', u'ل'), - (0x1EE2C, 'M', u'م'), - (0x1EE2D, 'M', u'ن'), - (0x1EE2E, 'M', u'س'), - (0x1EE2F, 'M', u'ع'), - (0x1EE30, 'M', u'ف'), - (0x1EE31, 'M', u'ص'), - (0x1EE32, 'M', u'ق'), - (0x1EE33, 'X'), - (0x1EE34, 'M', u'ش'), - (0x1EE35, 'M', u'ت'), - (0x1EE36, 'M', u'ث'), - (0x1EE37, 'M', u'خ'), - (0x1EE38, 'X'), - (0x1EE39, 'M', u'ض'), - (0x1EE3A, 'X'), - (0x1EE3B, 'M', u'غ'), - (0x1EE3C, 'X'), - (0x1EE42, 'M', u'ج'), - (0x1EE43, 'X'), - (0x1EE47, 'M', u'ح'), - (0x1EE48, 'X'), - (0x1EE49, 'M', u'ي'), - (0x1EE4A, 'X'), - (0x1EE4B, 'M', u'ل'), - (0x1EE4C, 'X'), - (0x1EE4D, 'M', u'ن'), - (0x1EE4E, 'M', u'س'), - (0x1EE4F, 'M', u'ع'), - (0x1EE50, 'X'), - (0x1EE51, 'M', u'ص'), - (0x1EE52, 'M', u'ق'), - (0x1EE53, 'X'), - (0x1EE54, 'M', u'ش'), - (0x1EE55, 'X'), - (0x1EE57, 'M', u'خ'), - (0x1EE58, 'X'), - (0x1EE59, 'M', u'ض'), - (0x1EE5A, 'X'), - (0x1EE5B, 'M', u'غ'), - (0x1EE5C, 'X'), - (0x1EE5D, 'M', u'ں'), - (0x1EE5E, 'X'), - (0x1EE5F, 'M', u'ٯ'), - (0x1EE60, 'X'), - (0x1EE61, 'M', u'ب'), - (0x1EE62, 'M', u'ج'), - (0x1EE63, 'X'), - (0x1EE64, 'M', u'ه'), - (0x1EE65, 'X'), - (0x1EE67, 'M', u'ح'), - (0x1EE68, 'M', u'ط'), - (0x1EE69, 'M', u'ي'), - (0x1EE6A, 'M', u'ك'), - (0x1EE6B, 'X'), - (0x1EE6C, 'M', u'م'), - (0x1EE6D, 'M', u'ن'), - (0x1EE6E, 'M', u'س'), - (0x1EE6F, 'M', u'ع'), - (0x1EE70, 'M', u'ف'), - (0x1EE71, 'M', u'ص'), - (0x1EE72, 'M', u'ق'), - (0x1EE73, 'X'), - (0x1EE74, 'M', u'ش'), - (0x1EE75, 'M', u'ت'), - (0x1EE76, 'M', u'ث'), - (0x1EE77, 'M', u'خ'), - (0x1EE78, 'X'), - (0x1EE79, 'M', u'ض'), - (0x1EE7A, 'M', u'ظ'), - (0x1EE7B, 'M', u'غ'), - (0x1EE7C, 'M', u'ٮ'), - (0x1EE7D, 'X'), - (0x1EE7E, 'M', u'ڡ'), - (0x1EE7F, 'X'), - (0x1EE80, 'M', u'ا'), - (0x1EE81, 'M', u'ب'), - (0x1EE82, 'M', u'ج'), - (0x1EE83, 'M', u'د'), - ] - -def _seg_70(): - return [ - (0x1EE84, 'M', u'ه'), - (0x1EE85, 'M', u'و'), - (0x1EE86, 'M', u'ز'), - (0x1EE87, 'M', u'ح'), - (0x1EE88, 'M', u'ط'), - (0x1EE89, 'M', u'ي'), - (0x1EE8A, 'X'), - (0x1EE8B, 'M', u'ل'), - (0x1EE8C, 'M', u'م'), - (0x1EE8D, 'M', u'ن'), - (0x1EE8E, 'M', u'س'), - (0x1EE8F, 'M', u'ع'), - (0x1EE90, 'M', u'ف'), - (0x1EE91, 'M', u'ص'), - (0x1EE92, 'M', u'ق'), - (0x1EE93, 'M', u'ر'), - (0x1EE94, 'M', u'ش'), - (0x1EE95, 'M', u'ت'), - (0x1EE96, 'M', u'ث'), - (0x1EE97, 'M', u'خ'), - (0x1EE98, 'M', u'ذ'), - (0x1EE99, 'M', u'ض'), - (0x1EE9A, 'M', u'ظ'), - (0x1EE9B, 'M', u'غ'), - (0x1EE9C, 'X'), - (0x1EEA1, 'M', u'ب'), - (0x1EEA2, 'M', u'ج'), - (0x1EEA3, 'M', u'د'), - (0x1EEA4, 'X'), - (0x1EEA5, 'M', u'و'), - (0x1EEA6, 'M', u'ز'), - (0x1EEA7, 'M', u'ح'), - (0x1EEA8, 'M', u'ط'), - (0x1EEA9, 'M', u'ي'), - (0x1EEAA, 'X'), - (0x1EEAB, 'M', u'ل'), - (0x1EEAC, 'M', u'م'), - (0x1EEAD, 'M', u'ن'), - (0x1EEAE, 'M', u'س'), - (0x1EEAF, 'M', u'ع'), - (0x1EEB0, 'M', u'ف'), - (0x1EEB1, 'M', u'ص'), - (0x1EEB2, 'M', u'ق'), - (0x1EEB3, 'M', u'ر'), - (0x1EEB4, 'M', u'ش'), - (0x1EEB5, 'M', u'ت'), - (0x1EEB6, 'M', u'ث'), - (0x1EEB7, 'M', u'خ'), - (0x1EEB8, 'M', u'ذ'), - (0x1EEB9, 'M', u'ض'), - (0x1EEBA, 'M', u'ظ'), - (0x1EEBB, 'M', u'غ'), - (0x1EEBC, 'X'), - (0x1EEF0, 'V'), - (0x1EEF2, 'X'), - (0x1F000, 'V'), - (0x1F02C, 'X'), - (0x1F030, 'V'), - (0x1F094, 'X'), - (0x1F0A0, 'V'), - (0x1F0AF, 'X'), - (0x1F0B1, 'V'), - (0x1F0C0, 'X'), - (0x1F0C1, 'V'), - (0x1F0D0, 'X'), - (0x1F0D1, 'V'), - (0x1F0F6, 'X'), - (0x1F101, '3', u'0,'), - (0x1F102, '3', u'1,'), - (0x1F103, '3', u'2,'), - (0x1F104, '3', u'3,'), - (0x1F105, '3', u'4,'), - (0x1F106, '3', u'5,'), - (0x1F107, '3', u'6,'), - (0x1F108, '3', u'7,'), - (0x1F109, '3', u'8,'), - (0x1F10A, '3', u'9,'), - (0x1F10B, 'V'), - (0x1F10D, 'X'), - (0x1F110, '3', u'(a)'), - (0x1F111, '3', u'(b)'), - (0x1F112, '3', u'(c)'), - (0x1F113, '3', u'(d)'), - (0x1F114, '3', u'(e)'), - (0x1F115, '3', u'(f)'), - (0x1F116, '3', u'(g)'), - (0x1F117, '3', u'(h)'), - (0x1F118, '3', u'(i)'), - (0x1F119, '3', u'(j)'), - (0x1F11A, '3', u'(k)'), - (0x1F11B, '3', u'(l)'), - (0x1F11C, '3', u'(m)'), - (0x1F11D, '3', u'(n)'), - (0x1F11E, '3', u'(o)'), - (0x1F11F, '3', u'(p)'), - (0x1F120, '3', u'(q)'), - (0x1F121, '3', u'(r)'), - (0x1F122, '3', u'(s)'), - (0x1F123, '3', u'(t)'), - (0x1F124, '3', u'(u)'), - ] - -def _seg_71(): - return [ - (0x1F125, '3', u'(v)'), - (0x1F126, '3', u'(w)'), - (0x1F127, '3', u'(x)'), - (0x1F128, '3', u'(y)'), - (0x1F129, '3', u'(z)'), - (0x1F12A, 'M', u'〔s〕'), - (0x1F12B, 'M', u'c'), - (0x1F12C, 'M', u'r'), - (0x1F12D, 'M', u'cd'), - (0x1F12E, 'M', u'wz'), - (0x1F12F, 'V'), - (0x1F130, 'M', u'a'), - (0x1F131, 'M', u'b'), - (0x1F132, 'M', u'c'), - (0x1F133, 'M', u'd'), - (0x1F134, 'M', u'e'), - (0x1F135, 'M', u'f'), - (0x1F136, 'M', u'g'), - (0x1F137, 'M', u'h'), - (0x1F138, 'M', u'i'), - (0x1F139, 'M', u'j'), - (0x1F13A, 'M', u'k'), - (0x1F13B, 'M', u'l'), - (0x1F13C, 'M', u'm'), - (0x1F13D, 'M', u'n'), - (0x1F13E, 'M', u'o'), - (0x1F13F, 'M', u'p'), - (0x1F140, 'M', u'q'), - (0x1F141, 'M', u'r'), - (0x1F142, 'M', u's'), - (0x1F143, 'M', u't'), - (0x1F144, 'M', u'u'), - (0x1F145, 'M', u'v'), - (0x1F146, 'M', u'w'), - (0x1F147, 'M', u'x'), - (0x1F148, 'M', u'y'), - (0x1F149, 'M', u'z'), - (0x1F14A, 'M', u'hv'), - (0x1F14B, 'M', u'mv'), - (0x1F14C, 'M', u'sd'), - (0x1F14D, 'M', u'ss'), - (0x1F14E, 'M', u'ppv'), - (0x1F14F, 'M', u'wc'), - (0x1F150, 'V'), - (0x1F16A, 'M', u'mc'), - (0x1F16B, 'M', u'md'), - (0x1F16C, 'X'), - (0x1F170, 'V'), - (0x1F190, 'M', u'dj'), - (0x1F191, 'V'), - (0x1F1AD, 'X'), - (0x1F1E6, 'V'), - (0x1F200, 'M', u'ほか'), - (0x1F201, 'M', u'ココ'), - (0x1F202, 'M', u'サ'), - (0x1F203, 'X'), - (0x1F210, 'M', u'手'), - (0x1F211, 'M', u'字'), - (0x1F212, 'M', u'双'), - (0x1F213, 'M', u'デ'), - (0x1F214, 'M', u'二'), - (0x1F215, 'M', u'多'), - (0x1F216, 'M', u'解'), - (0x1F217, 'M', u'天'), - (0x1F218, 'M', u'交'), - (0x1F219, 'M', u'映'), - (0x1F21A, 'M', u'無'), - (0x1F21B, 'M', u'料'), - (0x1F21C, 'M', u'前'), - (0x1F21D, 'M', u'後'), - (0x1F21E, 'M', u'再'), - (0x1F21F, 'M', u'新'), - (0x1F220, 'M', u'初'), - (0x1F221, 'M', u'終'), - (0x1F222, 'M', u'生'), - (0x1F223, 'M', u'販'), - (0x1F224, 'M', u'声'), - (0x1F225, 'M', u'吹'), - (0x1F226, 'M', u'演'), - (0x1F227, 'M', u'投'), - (0x1F228, 'M', u'捕'), - (0x1F229, 'M', u'一'), - (0x1F22A, 'M', u'三'), - (0x1F22B, 'M', u'遊'), - (0x1F22C, 'M', u'左'), - (0x1F22D, 'M', u'中'), - (0x1F22E, 'M', u'右'), - (0x1F22F, 'M', u'指'), - (0x1F230, 'M', u'走'), - (0x1F231, 'M', u'打'), - (0x1F232, 'M', u'禁'), - (0x1F233, 'M', u'空'), - (0x1F234, 'M', u'合'), - (0x1F235, 'M', u'満'), - (0x1F236, 'M', u'有'), - (0x1F237, 'M', u'月'), - (0x1F238, 'M', u'申'), - (0x1F239, 'M', u'割'), - (0x1F23A, 'M', u'営'), - (0x1F23B, 'M', u'配'), - ] - -def _seg_72(): - return [ - (0x1F23C, 'X'), - (0x1F240, 'M', u'〔本〕'), - (0x1F241, 'M', u'〔三〕'), - (0x1F242, 'M', u'〔二〕'), - (0x1F243, 'M', u'〔安〕'), - (0x1F244, 'M', u'〔点〕'), - (0x1F245, 'M', u'〔打〕'), - (0x1F246, 'M', u'〔盗〕'), - (0x1F247, 'M', u'〔勝〕'), - (0x1F248, 'M', u'〔敗〕'), - (0x1F249, 'X'), - (0x1F250, 'M', u'得'), - (0x1F251, 'M', u'可'), - (0x1F252, 'X'), - (0x1F260, 'V'), - (0x1F266, 'X'), - (0x1F300, 'V'), - (0x1F6D5, 'X'), - (0x1F6E0, 'V'), - (0x1F6ED, 'X'), - (0x1F6F0, 'V'), - (0x1F6FA, 'X'), - (0x1F700, 'V'), - (0x1F774, 'X'), - (0x1F780, 'V'), - (0x1F7D9, 'X'), - (0x1F800, 'V'), - (0x1F80C, 'X'), - (0x1F810, 'V'), - (0x1F848, 'X'), - (0x1F850, 'V'), - (0x1F85A, 'X'), - (0x1F860, 'V'), - (0x1F888, 'X'), - (0x1F890, 'V'), - (0x1F8AE, 'X'), - (0x1F900, 'V'), - (0x1F90C, 'X'), - (0x1F910, 'V'), - (0x1F93F, 'X'), - (0x1F940, 'V'), - (0x1F971, 'X'), - (0x1F973, 'V'), - (0x1F977, 'X'), - (0x1F97A, 'V'), - (0x1F97B, 'X'), - (0x1F97C, 'V'), - (0x1F9A3, 'X'), - (0x1F9B0, 'V'), - (0x1F9BA, 'X'), - (0x1F9C0, 'V'), - (0x1F9C3, 'X'), - (0x1F9D0, 'V'), - (0x1FA00, 'X'), - (0x1FA60, 'V'), - (0x1FA6E, 'X'), - (0x20000, 'V'), - (0x2A6D7, 'X'), - (0x2A700, 'V'), - (0x2B735, 'X'), - (0x2B740, 'V'), - (0x2B81E, 'X'), - (0x2B820, 'V'), - (0x2CEA2, 'X'), - (0x2CEB0, 'V'), - (0x2EBE1, 'X'), - (0x2F800, 'M', u'丽'), - (0x2F801, 'M', u'丸'), - (0x2F802, 'M', u'乁'), - (0x2F803, 'M', u'𠄢'), - (0x2F804, 'M', u'你'), - (0x2F805, 'M', u'侮'), - (0x2F806, 'M', u'侻'), - (0x2F807, 'M', u'倂'), - (0x2F808, 'M', u'偺'), - (0x2F809, 'M', u'備'), - (0x2F80A, 'M', u'僧'), - (0x2F80B, 'M', u'像'), - (0x2F80C, 'M', u'㒞'), - (0x2F80D, 'M', u'𠘺'), - (0x2F80E, 'M', u'免'), - (0x2F80F, 'M', u'兔'), - (0x2F810, 'M', u'兤'), - (0x2F811, 'M', u'具'), - (0x2F812, 'M', u'𠔜'), - (0x2F813, 'M', u'㒹'), - (0x2F814, 'M', u'內'), - (0x2F815, 'M', u'再'), - (0x2F816, 'M', u'𠕋'), - (0x2F817, 'M', u'冗'), - (0x2F818, 'M', u'冤'), - (0x2F819, 'M', u'仌'), - (0x2F81A, 'M', u'冬'), - (0x2F81B, 'M', u'况'), - (0x2F81C, 'M', u'𩇟'), - (0x2F81D, 'M', u'凵'), - (0x2F81E, 'M', u'刃'), - (0x2F81F, 'M', u'㓟'), - (0x2F820, 'M', u'刻'), - (0x2F821, 'M', u'剆'), - ] - -def _seg_73(): - return [ - (0x2F822, 'M', u'割'), - (0x2F823, 'M', u'剷'), - (0x2F824, 'M', u'㔕'), - (0x2F825, 'M', u'勇'), - (0x2F826, 'M', u'勉'), - (0x2F827, 'M', u'勤'), - (0x2F828, 'M', u'勺'), - (0x2F829, 'M', u'包'), - (0x2F82A, 'M', u'匆'), - (0x2F82B, 'M', u'北'), - (0x2F82C, 'M', u'卉'), - (0x2F82D, 'M', u'卑'), - (0x2F82E, 'M', u'博'), - (0x2F82F, 'M', u'即'), - (0x2F830, 'M', u'卽'), - (0x2F831, 'M', u'卿'), - (0x2F834, 'M', u'𠨬'), - (0x2F835, 'M', u'灰'), - (0x2F836, 'M', u'及'), - (0x2F837, 'M', u'叟'), - (0x2F838, 'M', u'𠭣'), - (0x2F839, 'M', u'叫'), - (0x2F83A, 'M', u'叱'), - (0x2F83B, 'M', u'吆'), - (0x2F83C, 'M', u'咞'), - (0x2F83D, 'M', u'吸'), - (0x2F83E, 'M', u'呈'), - (0x2F83F, 'M', u'周'), - (0x2F840, 'M', u'咢'), - (0x2F841, 'M', u'哶'), - (0x2F842, 'M', u'唐'), - (0x2F843, 'M', u'啓'), - (0x2F844, 'M', u'啣'), - (0x2F845, 'M', u'善'), - (0x2F847, 'M', u'喙'), - (0x2F848, 'M', u'喫'), - (0x2F849, 'M', u'喳'), - (0x2F84A, 'M', u'嗂'), - (0x2F84B, 'M', u'圖'), - (0x2F84C, 'M', u'嘆'), - (0x2F84D, 'M', u'圗'), - (0x2F84E, 'M', u'噑'), - (0x2F84F, 'M', u'噴'), - (0x2F850, 'M', u'切'), - (0x2F851, 'M', u'壮'), - (0x2F852, 'M', u'城'), - (0x2F853, 'M', u'埴'), - (0x2F854, 'M', u'堍'), - (0x2F855, 'M', u'型'), - (0x2F856, 'M', u'堲'), - (0x2F857, 'M', u'報'), - (0x2F858, 'M', u'墬'), - (0x2F859, 'M', u'𡓤'), - (0x2F85A, 'M', u'売'), - (0x2F85B, 'M', u'壷'), - (0x2F85C, 'M', u'夆'), - (0x2F85D, 'M', u'多'), - (0x2F85E, 'M', u'夢'), - (0x2F85F, 'M', u'奢'), - (0x2F860, 'M', u'𡚨'), - (0x2F861, 'M', u'𡛪'), - (0x2F862, 'M', u'姬'), - (0x2F863, 'M', u'娛'), - (0x2F864, 'M', u'娧'), - (0x2F865, 'M', u'姘'), - (0x2F866, 'M', u'婦'), - (0x2F867, 'M', u'㛮'), - (0x2F868, 'X'), - (0x2F869, 'M', u'嬈'), - (0x2F86A, 'M', u'嬾'), - (0x2F86C, 'M', u'𡧈'), - (0x2F86D, 'M', u'寃'), - (0x2F86E, 'M', u'寘'), - (0x2F86F, 'M', u'寧'), - (0x2F870, 'M', u'寳'), - (0x2F871, 'M', u'𡬘'), - (0x2F872, 'M', u'寿'), - (0x2F873, 'M', u'将'), - (0x2F874, 'X'), - (0x2F875, 'M', u'尢'), - (0x2F876, 'M', u'㞁'), - (0x2F877, 'M', u'屠'), - (0x2F878, 'M', u'屮'), - (0x2F879, 'M', u'峀'), - (0x2F87A, 'M', u'岍'), - (0x2F87B, 'M', u'𡷤'), - (0x2F87C, 'M', u'嵃'), - (0x2F87D, 'M', u'𡷦'), - (0x2F87E, 'M', u'嵮'), - (0x2F87F, 'M', u'嵫'), - (0x2F880, 'M', u'嵼'), - (0x2F881, 'M', u'巡'), - (0x2F882, 'M', u'巢'), - (0x2F883, 'M', u'㠯'), - (0x2F884, 'M', u'巽'), - (0x2F885, 'M', u'帨'), - (0x2F886, 'M', u'帽'), - (0x2F887, 'M', u'幩'), - (0x2F888, 'M', u'㡢'), - (0x2F889, 'M', u'𢆃'), - ] - -def _seg_74(): - return [ - (0x2F88A, 'M', u'㡼'), - (0x2F88B, 'M', u'庰'), - (0x2F88C, 'M', u'庳'), - (0x2F88D, 'M', u'庶'), - (0x2F88E, 'M', u'廊'), - (0x2F88F, 'M', u'𪎒'), - (0x2F890, 'M', u'廾'), - (0x2F891, 'M', u'𢌱'), - (0x2F893, 'M', u'舁'), - (0x2F894, 'M', u'弢'), - (0x2F896, 'M', u'㣇'), - (0x2F897, 'M', u'𣊸'), - (0x2F898, 'M', u'𦇚'), - (0x2F899, 'M', u'形'), - (0x2F89A, 'M', u'彫'), - (0x2F89B, 'M', u'㣣'), - (0x2F89C, 'M', u'徚'), - (0x2F89D, 'M', u'忍'), - (0x2F89E, 'M', u'志'), - (0x2F89F, 'M', u'忹'), - (0x2F8A0, 'M', u'悁'), - (0x2F8A1, 'M', u'㤺'), - (0x2F8A2, 'M', u'㤜'), - (0x2F8A3, 'M', u'悔'), - (0x2F8A4, 'M', u'𢛔'), - (0x2F8A5, 'M', u'惇'), - (0x2F8A6, 'M', u'慈'), - (0x2F8A7, 'M', u'慌'), - (0x2F8A8, 'M', u'慎'), - (0x2F8A9, 'M', u'慌'), - (0x2F8AA, 'M', u'慺'), - (0x2F8AB, 'M', u'憎'), - (0x2F8AC, 'M', u'憲'), - (0x2F8AD, 'M', u'憤'), - (0x2F8AE, 'M', u'憯'), - (0x2F8AF, 'M', u'懞'), - (0x2F8B0, 'M', u'懲'), - (0x2F8B1, 'M', u'懶'), - (0x2F8B2, 'M', u'成'), - (0x2F8B3, 'M', u'戛'), - (0x2F8B4, 'M', u'扝'), - (0x2F8B5, 'M', u'抱'), - (0x2F8B6, 'M', u'拔'), - (0x2F8B7, 'M', u'捐'), - (0x2F8B8, 'M', u'𢬌'), - (0x2F8B9, 'M', u'挽'), - (0x2F8BA, 'M', u'拼'), - (0x2F8BB, 'M', u'捨'), - (0x2F8BC, 'M', u'掃'), - (0x2F8BD, 'M', u'揤'), - (0x2F8BE, 'M', u'𢯱'), - (0x2F8BF, 'M', u'搢'), - (0x2F8C0, 'M', u'揅'), - (0x2F8C1, 'M', u'掩'), - (0x2F8C2, 'M', u'㨮'), - (0x2F8C3, 'M', u'摩'), - (0x2F8C4, 'M', u'摾'), - (0x2F8C5, 'M', u'撝'), - (0x2F8C6, 'M', u'摷'), - (0x2F8C7, 'M', u'㩬'), - (0x2F8C8, 'M', u'敏'), - (0x2F8C9, 'M', u'敬'), - (0x2F8CA, 'M', u'𣀊'), - (0x2F8CB, 'M', u'旣'), - (0x2F8CC, 'M', u'書'), - (0x2F8CD, 'M', u'晉'), - (0x2F8CE, 'M', u'㬙'), - (0x2F8CF, 'M', u'暑'), - (0x2F8D0, 'M', u'㬈'), - (0x2F8D1, 'M', u'㫤'), - (0x2F8D2, 'M', u'冒'), - (0x2F8D3, 'M', u'冕'), - (0x2F8D4, 'M', u'最'), - (0x2F8D5, 'M', u'暜'), - (0x2F8D6, 'M', u'肭'), - (0x2F8D7, 'M', u'䏙'), - (0x2F8D8, 'M', u'朗'), - (0x2F8D9, 'M', u'望'), - (0x2F8DA, 'M', u'朡'), - (0x2F8DB, 'M', u'杞'), - (0x2F8DC, 'M', u'杓'), - (0x2F8DD, 'M', u'𣏃'), - (0x2F8DE, 'M', u'㭉'), - (0x2F8DF, 'M', u'柺'), - (0x2F8E0, 'M', u'枅'), - (0x2F8E1, 'M', u'桒'), - (0x2F8E2, 'M', u'梅'), - (0x2F8E3, 'M', u'𣑭'), - (0x2F8E4, 'M', u'梎'), - (0x2F8E5, 'M', u'栟'), - (0x2F8E6, 'M', u'椔'), - (0x2F8E7, 'M', u'㮝'), - (0x2F8E8, 'M', u'楂'), - (0x2F8E9, 'M', u'榣'), - (0x2F8EA, 'M', u'槪'), - (0x2F8EB, 'M', u'檨'), - (0x2F8EC, 'M', u'𣚣'), - (0x2F8ED, 'M', u'櫛'), - (0x2F8EE, 'M', u'㰘'), - (0x2F8EF, 'M', u'次'), - ] - -def _seg_75(): - return [ - (0x2F8F0, 'M', u'𣢧'), - (0x2F8F1, 'M', u'歔'), - (0x2F8F2, 'M', u'㱎'), - (0x2F8F3, 'M', u'歲'), - (0x2F8F4, 'M', u'殟'), - (0x2F8F5, 'M', u'殺'), - (0x2F8F6, 'M', u'殻'), - (0x2F8F7, 'M', u'𣪍'), - (0x2F8F8, 'M', u'𡴋'), - (0x2F8F9, 'M', u'𣫺'), - (0x2F8FA, 'M', u'汎'), - (0x2F8FB, 'M', u'𣲼'), - (0x2F8FC, 'M', u'沿'), - (0x2F8FD, 'M', u'泍'), - (0x2F8FE, 'M', u'汧'), - (0x2F8FF, 'M', u'洖'), - (0x2F900, 'M', u'派'), - (0x2F901, 'M', u'海'), - (0x2F902, 'M', u'流'), - (0x2F903, 'M', u'浩'), - (0x2F904, 'M', u'浸'), - (0x2F905, 'M', u'涅'), - (0x2F906, 'M', u'𣴞'), - (0x2F907, 'M', u'洴'), - (0x2F908, 'M', u'港'), - (0x2F909, 'M', u'湮'), - (0x2F90A, 'M', u'㴳'), - (0x2F90B, 'M', u'滋'), - (0x2F90C, 'M', u'滇'), - (0x2F90D, 'M', u'𣻑'), - (0x2F90E, 'M', u'淹'), - (0x2F90F, 'M', u'潮'), - (0x2F910, 'M', u'𣽞'), - (0x2F911, 'M', u'𣾎'), - (0x2F912, 'M', u'濆'), - (0x2F913, 'M', u'瀹'), - (0x2F914, 'M', u'瀞'), - (0x2F915, 'M', u'瀛'), - (0x2F916, 'M', u'㶖'), - (0x2F917, 'M', u'灊'), - (0x2F918, 'M', u'災'), - (0x2F919, 'M', u'灷'), - (0x2F91A, 'M', u'炭'), - (0x2F91B, 'M', u'𠔥'), - (0x2F91C, 'M', u'煅'), - (0x2F91D, 'M', u'𤉣'), - (0x2F91E, 'M', u'熜'), - (0x2F91F, 'X'), - (0x2F920, 'M', u'爨'), - (0x2F921, 'M', u'爵'), - (0x2F922, 'M', u'牐'), - (0x2F923, 'M', u'𤘈'), - (0x2F924, 'M', u'犀'), - (0x2F925, 'M', u'犕'), - (0x2F926, 'M', u'𤜵'), - (0x2F927, 'M', u'𤠔'), - (0x2F928, 'M', u'獺'), - (0x2F929, 'M', u'王'), - (0x2F92A, 'M', u'㺬'), - (0x2F92B, 'M', u'玥'), - (0x2F92C, 'M', u'㺸'), - (0x2F92E, 'M', u'瑇'), - (0x2F92F, 'M', u'瑜'), - (0x2F930, 'M', u'瑱'), - (0x2F931, 'M', u'璅'), - (0x2F932, 'M', u'瓊'), - (0x2F933, 'M', u'㼛'), - (0x2F934, 'M', u'甤'), - (0x2F935, 'M', u'𤰶'), - (0x2F936, 'M', u'甾'), - (0x2F937, 'M', u'𤲒'), - (0x2F938, 'M', u'異'), - (0x2F939, 'M', u'𢆟'), - (0x2F93A, 'M', u'瘐'), - (0x2F93B, 'M', u'𤾡'), - (0x2F93C, 'M', u'𤾸'), - (0x2F93D, 'M', u'𥁄'), - (0x2F93E, 'M', u'㿼'), - (0x2F93F, 'M', u'䀈'), - (0x2F940, 'M', u'直'), - (0x2F941, 'M', u'𥃳'), - (0x2F942, 'M', u'𥃲'), - (0x2F943, 'M', u'𥄙'), - (0x2F944, 'M', u'𥄳'), - (0x2F945, 'M', u'眞'), - (0x2F946, 'M', u'真'), - (0x2F948, 'M', u'睊'), - (0x2F949, 'M', u'䀹'), - (0x2F94A, 'M', u'瞋'), - (0x2F94B, 'M', u'䁆'), - (0x2F94C, 'M', u'䂖'), - (0x2F94D, 'M', u'𥐝'), - (0x2F94E, 'M', u'硎'), - (0x2F94F, 'M', u'碌'), - (0x2F950, 'M', u'磌'), - (0x2F951, 'M', u'䃣'), - (0x2F952, 'M', u'𥘦'), - (0x2F953, 'M', u'祖'), - (0x2F954, 'M', u'𥚚'), - (0x2F955, 'M', u'𥛅'), - ] - -def _seg_76(): - return [ - (0x2F956, 'M', u'福'), - (0x2F957, 'M', u'秫'), - (0x2F958, 'M', u'䄯'), - (0x2F959, 'M', u'穀'), - (0x2F95A, 'M', u'穊'), - (0x2F95B, 'M', u'穏'), - (0x2F95C, 'M', u'𥥼'), - (0x2F95D, 'M', u'𥪧'), - (0x2F95F, 'X'), - (0x2F960, 'M', u'䈂'), - (0x2F961, 'M', u'𥮫'), - (0x2F962, 'M', u'篆'), - (0x2F963, 'M', u'築'), - (0x2F964, 'M', u'䈧'), - (0x2F965, 'M', u'𥲀'), - (0x2F966, 'M', u'糒'), - (0x2F967, 'M', u'䊠'), - (0x2F968, 'M', u'糨'), - (0x2F969, 'M', u'糣'), - (0x2F96A, 'M', u'紀'), - (0x2F96B, 'M', u'𥾆'), - (0x2F96C, 'M', u'絣'), - (0x2F96D, 'M', u'䌁'), - (0x2F96E, 'M', u'緇'), - (0x2F96F, 'M', u'縂'), - (0x2F970, 'M', u'繅'), - (0x2F971, 'M', u'䌴'), - (0x2F972, 'M', u'𦈨'), - (0x2F973, 'M', u'𦉇'), - (0x2F974, 'M', u'䍙'), - (0x2F975, 'M', u'𦋙'), - (0x2F976, 'M', u'罺'), - (0x2F977, 'M', u'𦌾'), - (0x2F978, 'M', u'羕'), - (0x2F979, 'M', u'翺'), - (0x2F97A, 'M', u'者'), - (0x2F97B, 'M', u'𦓚'), - (0x2F97C, 'M', u'𦔣'), - (0x2F97D, 'M', u'聠'), - (0x2F97E, 'M', u'𦖨'), - (0x2F97F, 'M', u'聰'), - (0x2F980, 'M', u'𣍟'), - (0x2F981, 'M', u'䏕'), - (0x2F982, 'M', u'育'), - (0x2F983, 'M', u'脃'), - (0x2F984, 'M', u'䐋'), - (0x2F985, 'M', u'脾'), - (0x2F986, 'M', u'媵'), - (0x2F987, 'M', u'𦞧'), - (0x2F988, 'M', u'𦞵'), - (0x2F989, 'M', u'𣎓'), - (0x2F98A, 'M', u'𣎜'), - (0x2F98B, 'M', u'舁'), - (0x2F98C, 'M', u'舄'), - (0x2F98D, 'M', u'辞'), - (0x2F98E, 'M', u'䑫'), - (0x2F98F, 'M', u'芑'), - (0x2F990, 'M', u'芋'), - (0x2F991, 'M', u'芝'), - (0x2F992, 'M', u'劳'), - (0x2F993, 'M', u'花'), - (0x2F994, 'M', u'芳'), - (0x2F995, 'M', u'芽'), - (0x2F996, 'M', u'苦'), - (0x2F997, 'M', u'𦬼'), - (0x2F998, 'M', u'若'), - (0x2F999, 'M', u'茝'), - (0x2F99A, 'M', u'荣'), - (0x2F99B, 'M', u'莭'), - (0x2F99C, 'M', u'茣'), - (0x2F99D, 'M', u'莽'), - (0x2F99E, 'M', u'菧'), - (0x2F99F, 'M', u'著'), - (0x2F9A0, 'M', u'荓'), - (0x2F9A1, 'M', u'菊'), - (0x2F9A2, 'M', u'菌'), - (0x2F9A3, 'M', u'菜'), - (0x2F9A4, 'M', u'𦰶'), - (0x2F9A5, 'M', u'𦵫'), - (0x2F9A6, 'M', u'𦳕'), - (0x2F9A7, 'M', u'䔫'), - (0x2F9A8, 'M', u'蓱'), - (0x2F9A9, 'M', u'蓳'), - (0x2F9AA, 'M', u'蔖'), - (0x2F9AB, 'M', u'𧏊'), - (0x2F9AC, 'M', u'蕤'), - (0x2F9AD, 'M', u'𦼬'), - (0x2F9AE, 'M', u'䕝'), - (0x2F9AF, 'M', u'䕡'), - (0x2F9B0, 'M', u'𦾱'), - (0x2F9B1, 'M', u'𧃒'), - (0x2F9B2, 'M', u'䕫'), - (0x2F9B3, 'M', u'虐'), - (0x2F9B4, 'M', u'虜'), - (0x2F9B5, 'M', u'虧'), - (0x2F9B6, 'M', u'虩'), - (0x2F9B7, 'M', u'蚩'), - (0x2F9B8, 'M', u'蚈'), - (0x2F9B9, 'M', u'蜎'), - (0x2F9BA, 'M', u'蛢'), - ] - -def _seg_77(): - return [ - (0x2F9BB, 'M', u'蝹'), - (0x2F9BC, 'M', u'蜨'), - (0x2F9BD, 'M', u'蝫'), - (0x2F9BE, 'M', u'螆'), - (0x2F9BF, 'X'), - (0x2F9C0, 'M', u'蟡'), - (0x2F9C1, 'M', u'蠁'), - (0x2F9C2, 'M', u'䗹'), - (0x2F9C3, 'M', u'衠'), - (0x2F9C4, 'M', u'衣'), - (0x2F9C5, 'M', u'𧙧'), - (0x2F9C6, 'M', u'裗'), - (0x2F9C7, 'M', u'裞'), - (0x2F9C8, 'M', u'䘵'), - (0x2F9C9, 'M', u'裺'), - (0x2F9CA, 'M', u'㒻'), - (0x2F9CB, 'M', u'𧢮'), - (0x2F9CC, 'M', u'𧥦'), - (0x2F9CD, 'M', u'䚾'), - (0x2F9CE, 'M', u'䛇'), - (0x2F9CF, 'M', u'誠'), - (0x2F9D0, 'M', u'諭'), - (0x2F9D1, 'M', u'變'), - (0x2F9D2, 'M', u'豕'), - (0x2F9D3, 'M', u'𧲨'), - (0x2F9D4, 'M', u'貫'), - (0x2F9D5, 'M', u'賁'), - (0x2F9D6, 'M', u'贛'), - (0x2F9D7, 'M', u'起'), - (0x2F9D8, 'M', u'𧼯'), - (0x2F9D9, 'M', u'𠠄'), - (0x2F9DA, 'M', u'跋'), - (0x2F9DB, 'M', u'趼'), - (0x2F9DC, 'M', u'跰'), - (0x2F9DD, 'M', u'𠣞'), - (0x2F9DE, 'M', u'軔'), - (0x2F9DF, 'M', u'輸'), - (0x2F9E0, 'M', u'𨗒'), - (0x2F9E1, 'M', u'𨗭'), - (0x2F9E2, 'M', u'邔'), - (0x2F9E3, 'M', u'郱'), - (0x2F9E4, 'M', u'鄑'), - (0x2F9E5, 'M', u'𨜮'), - (0x2F9E6, 'M', u'鄛'), - (0x2F9E7, 'M', u'鈸'), - (0x2F9E8, 'M', u'鋗'), - (0x2F9E9, 'M', u'鋘'), - (0x2F9EA, 'M', u'鉼'), - (0x2F9EB, 'M', u'鏹'), - (0x2F9EC, 'M', u'鐕'), - (0x2F9ED, 'M', u'𨯺'), - (0x2F9EE, 'M', u'開'), - (0x2F9EF, 'M', u'䦕'), - (0x2F9F0, 'M', u'閷'), - (0x2F9F1, 'M', u'𨵷'), - (0x2F9F2, 'M', u'䧦'), - (0x2F9F3, 'M', u'雃'), - (0x2F9F4, 'M', u'嶲'), - (0x2F9F5, 'M', u'霣'), - (0x2F9F6, 'M', u'𩅅'), - (0x2F9F7, 'M', u'𩈚'), - (0x2F9F8, 'M', u'䩮'), - (0x2F9F9, 'M', u'䩶'), - (0x2F9FA, 'M', u'韠'), - (0x2F9FB, 'M', u'𩐊'), - (0x2F9FC, 'M', u'䪲'), - (0x2F9FD, 'M', u'𩒖'), - (0x2F9FE, 'M', u'頋'), - (0x2FA00, 'M', u'頩'), - (0x2FA01, 'M', u'𩖶'), - (0x2FA02, 'M', u'飢'), - (0x2FA03, 'M', u'䬳'), - (0x2FA04, 'M', u'餩'), - (0x2FA05, 'M', u'馧'), - (0x2FA06, 'M', u'駂'), - (0x2FA07, 'M', u'駾'), - (0x2FA08, 'M', u'䯎'), - (0x2FA09, 'M', u'𩬰'), - (0x2FA0A, 'M', u'鬒'), - (0x2FA0B, 'M', u'鱀'), - (0x2FA0C, 'M', u'鳽'), - (0x2FA0D, 'M', u'䳎'), - (0x2FA0E, 'M', u'䳭'), - (0x2FA0F, 'M', u'鵧'), - (0x2FA10, 'M', u'𪃎'), - (0x2FA11, 'M', u'䳸'), - (0x2FA12, 'M', u'𪄅'), - (0x2FA13, 'M', u'𪈎'), - (0x2FA14, 'M', u'𪊑'), - (0x2FA15, 'M', u'麻'), - (0x2FA16, 'M', u'䵖'), - (0x2FA17, 'M', u'黹'), - (0x2FA18, 'M', u'黾'), - (0x2FA19, 'M', u'鼅'), - (0x2FA1A, 'M', u'鼏'), - (0x2FA1B, 'M', u'鼖'), - (0x2FA1C, 'M', u'鼻'), - (0x2FA1D, 'M', u'𪘀'), - (0x2FA1E, 'X'), - (0xE0100, 'I'), - ] - -def _seg_78(): - return [ - (0xE01F0, 'X'), - ] - -uts46data = tuple( - _seg_0() - + _seg_1() - + _seg_2() - + _seg_3() - + _seg_4() - + _seg_5() - + _seg_6() - + _seg_7() - + _seg_8() - + _seg_9() - + _seg_10() - + _seg_11() - + _seg_12() - + _seg_13() - + _seg_14() - + _seg_15() - + _seg_16() - + _seg_17() - + _seg_18() - + _seg_19() - + _seg_20() - + _seg_21() - + _seg_22() - + _seg_23() - + _seg_24() - + _seg_25() - + _seg_26() - + _seg_27() - + _seg_28() - + _seg_29() - + _seg_30() - + _seg_31() - + _seg_32() - + _seg_33() - + _seg_34() - + _seg_35() - + _seg_36() - + _seg_37() - + _seg_38() - + _seg_39() - + _seg_40() - + _seg_41() - + _seg_42() - + _seg_43() - + _seg_44() - + _seg_45() - + _seg_46() - + _seg_47() - + _seg_48() - + _seg_49() - + _seg_50() - + _seg_51() - + _seg_52() - + _seg_53() - + _seg_54() - + _seg_55() - + _seg_56() - + _seg_57() - + _seg_58() - + _seg_59() - + _seg_60() - + _seg_61() - + _seg_62() - + _seg_63() - + _seg_64() - + _seg_65() - + _seg_66() - + _seg_67() - + _seg_68() - + _seg_69() - + _seg_70() - + _seg_71() - + _seg_72() - + _seg_73() - + _seg_74() - + _seg_75() - + _seg_76() - + _seg_77() - + _seg_78() -) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/idna/uts46data.pyc b/env/lib/python2.7/site-packages/pip/_vendor/idna/uts46data.pyc deleted file mode 100644 index 7fc61977..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/idna/uts46data.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/ipaddress.py b/env/lib/python2.7/site-packages/pip/_vendor/ipaddress.py deleted file mode 100644 index f2d07668..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/ipaddress.py +++ /dev/null @@ -1,2419 +0,0 @@ -# Copyright 2007 Google Inc. -# Licensed to PSF under a Contributor Agreement. - -"""A fast, lightweight IPv4/IPv6 manipulation library in Python. - -This library is used to create/poke/manipulate IPv4 and IPv6 addresses -and networks. - -""" - -from __future__ import unicode_literals - - -import itertools -import struct - -__version__ = '1.0.22' - -# Compatibility functions -_compat_int_types = (int,) -try: - _compat_int_types = (int, long) -except NameError: - pass -try: - _compat_str = unicode -except NameError: - _compat_str = str - assert bytes != str -if b'\0'[0] == 0: # Python 3 semantics - def _compat_bytes_to_byte_vals(byt): - return byt -else: - def _compat_bytes_to_byte_vals(byt): - return [struct.unpack(b'!B', b)[0] for b in byt] -try: - _compat_int_from_byte_vals = int.from_bytes -except AttributeError: - def _compat_int_from_byte_vals(bytvals, endianess): - assert endianess == 'big' - res = 0 - for bv in bytvals: - assert isinstance(bv, _compat_int_types) - res = (res << 8) + bv - return res - - -def _compat_to_bytes(intval, length, endianess): - assert isinstance(intval, _compat_int_types) - assert endianess == 'big' - if length == 4: - if intval < 0 or intval >= 2 ** 32: - raise struct.error("integer out of range for 'I' format code") - return struct.pack(b'!I', intval) - elif length == 16: - if intval < 0 or intval >= 2 ** 128: - raise struct.error("integer out of range for 'QQ' format code") - return struct.pack(b'!QQ', intval >> 64, intval & 0xffffffffffffffff) - else: - raise NotImplementedError() - - -if hasattr(int, 'bit_length'): - # Not int.bit_length , since that won't work in 2.7 where long exists - def _compat_bit_length(i): - return i.bit_length() -else: - def _compat_bit_length(i): - for res in itertools.count(): - if i >> res == 0: - return res - - -def _compat_range(start, end, step=1): - assert step > 0 - i = start - while i < end: - yield i - i += step - - -class _TotalOrderingMixin(object): - __slots__ = () - - # Helper that derives the other comparison operations from - # __lt__ and __eq__ - # We avoid functools.total_ordering because it doesn't handle - # NotImplemented correctly yet (http://bugs.python.org/issue10042) - def __eq__(self, other): - raise NotImplementedError - - def __ne__(self, other): - equal = self.__eq__(other) - if equal is NotImplemented: - return NotImplemented - return not equal - - def __lt__(self, other): - raise NotImplementedError - - def __le__(self, other): - less = self.__lt__(other) - if less is NotImplemented or not less: - return self.__eq__(other) - return less - - def __gt__(self, other): - less = self.__lt__(other) - if less is NotImplemented: - return NotImplemented - equal = self.__eq__(other) - if equal is NotImplemented: - return NotImplemented - return not (less or equal) - - def __ge__(self, other): - less = self.__lt__(other) - if less is NotImplemented: - return NotImplemented - return not less - - -IPV4LENGTH = 32 -IPV6LENGTH = 128 - - -class AddressValueError(ValueError): - """A Value Error related to the address.""" - - -class NetmaskValueError(ValueError): - """A Value Error related to the netmask.""" - - -def ip_address(address): - """Take an IP string/int and return an object of the correct type. - - Args: - address: A string or integer, the IP address. Either IPv4 or - IPv6 addresses may be supplied; integers less than 2**32 will - be considered to be IPv4 by default. - - Returns: - An IPv4Address or IPv6Address object. - - Raises: - ValueError: if the *address* passed isn't either a v4 or a v6 - address - - """ - try: - return IPv4Address(address) - except (AddressValueError, NetmaskValueError): - pass - - try: - return IPv6Address(address) - except (AddressValueError, NetmaskValueError): - pass - - if isinstance(address, bytes): - raise AddressValueError( - '%r does not appear to be an IPv4 or IPv6 address. ' - 'Did you pass in a bytes (str in Python 2) instead of' - ' a unicode object?' % address) - - raise ValueError('%r does not appear to be an IPv4 or IPv6 address' % - address) - - -def ip_network(address, strict=True): - """Take an IP string/int and return an object of the correct type. - - Args: - address: A string or integer, the IP network. Either IPv4 or - IPv6 networks may be supplied; integers less than 2**32 will - be considered to be IPv4 by default. - - Returns: - An IPv4Network or IPv6Network object. - - Raises: - ValueError: if the string passed isn't either a v4 or a v6 - address. Or if the network has host bits set. - - """ - try: - return IPv4Network(address, strict) - except (AddressValueError, NetmaskValueError): - pass - - try: - return IPv6Network(address, strict) - except (AddressValueError, NetmaskValueError): - pass - - if isinstance(address, bytes): - raise AddressValueError( - '%r does not appear to be an IPv4 or IPv6 network. ' - 'Did you pass in a bytes (str in Python 2) instead of' - ' a unicode object?' % address) - - raise ValueError('%r does not appear to be an IPv4 or IPv6 network' % - address) - - -def ip_interface(address): - """Take an IP string/int and return an object of the correct type. - - Args: - address: A string or integer, the IP address. Either IPv4 or - IPv6 addresses may be supplied; integers less than 2**32 will - be considered to be IPv4 by default. - - Returns: - An IPv4Interface or IPv6Interface object. - - Raises: - ValueError: if the string passed isn't either a v4 or a v6 - address. - - Notes: - The IPv?Interface classes describe an Address on a particular - Network, so they're basically a combination of both the Address - and Network classes. - - """ - try: - return IPv4Interface(address) - except (AddressValueError, NetmaskValueError): - pass - - try: - return IPv6Interface(address) - except (AddressValueError, NetmaskValueError): - pass - - raise ValueError('%r does not appear to be an IPv4 or IPv6 interface' % - address) - - -def v4_int_to_packed(address): - """Represent an address as 4 packed bytes in network (big-endian) order. - - Args: - address: An integer representation of an IPv4 IP address. - - Returns: - The integer address packed as 4 bytes in network (big-endian) order. - - Raises: - ValueError: If the integer is negative or too large to be an - IPv4 IP address. - - """ - try: - return _compat_to_bytes(address, 4, 'big') - except (struct.error, OverflowError): - raise ValueError("Address negative or too large for IPv4") - - -def v6_int_to_packed(address): - """Represent an address as 16 packed bytes in network (big-endian) order. - - Args: - address: An integer representation of an IPv6 IP address. - - Returns: - The integer address packed as 16 bytes in network (big-endian) order. - - """ - try: - return _compat_to_bytes(address, 16, 'big') - except (struct.error, OverflowError): - raise ValueError("Address negative or too large for IPv6") - - -def _split_optional_netmask(address): - """Helper to split the netmask and raise AddressValueError if needed""" - addr = _compat_str(address).split('/') - if len(addr) > 2: - raise AddressValueError("Only one '/' permitted in %r" % address) - return addr - - -def _find_address_range(addresses): - """Find a sequence of sorted deduplicated IPv#Address. - - Args: - addresses: a list of IPv#Address objects. - - Yields: - A tuple containing the first and last IP addresses in the sequence. - - """ - it = iter(addresses) - first = last = next(it) - for ip in it: - if ip._ip != last._ip + 1: - yield first, last - first = ip - last = ip - yield first, last - - -def _count_righthand_zero_bits(number, bits): - """Count the number of zero bits on the right hand side. - - Args: - number: an integer. - bits: maximum number of bits to count. - - Returns: - The number of zero bits on the right hand side of the number. - - """ - if number == 0: - return bits - return min(bits, _compat_bit_length(~number & (number - 1))) - - -def summarize_address_range(first, last): - """Summarize a network range given the first and last IP addresses. - - Example: - >>> list(summarize_address_range(IPv4Address('192.0.2.0'), - ... IPv4Address('192.0.2.130'))) - ... #doctest: +NORMALIZE_WHITESPACE - [IPv4Network('192.0.2.0/25'), IPv4Network('192.0.2.128/31'), - IPv4Network('192.0.2.130/32')] - - Args: - first: the first IPv4Address or IPv6Address in the range. - last: the last IPv4Address or IPv6Address in the range. - - Returns: - An iterator of the summarized IPv(4|6) network objects. - - Raise: - TypeError: - If the first and last objects are not IP addresses. - If the first and last objects are not the same version. - ValueError: - If the last object is not greater than the first. - If the version of the first address is not 4 or 6. - - """ - if (not (isinstance(first, _BaseAddress) and - isinstance(last, _BaseAddress))): - raise TypeError('first and last must be IP addresses, not networks') - if first.version != last.version: - raise TypeError("%s and %s are not of the same version" % ( - first, last)) - if first > last: - raise ValueError('last IP address must be greater than first') - - if first.version == 4: - ip = IPv4Network - elif first.version == 6: - ip = IPv6Network - else: - raise ValueError('unknown IP version') - - ip_bits = first._max_prefixlen - first_int = first._ip - last_int = last._ip - while first_int <= last_int: - nbits = min(_count_righthand_zero_bits(first_int, ip_bits), - _compat_bit_length(last_int - first_int + 1) - 1) - net = ip((first_int, ip_bits - nbits)) - yield net - first_int += 1 << nbits - if first_int - 1 == ip._ALL_ONES: - break - - -def _collapse_addresses_internal(addresses): - """Loops through the addresses, collapsing concurrent netblocks. - - Example: - - ip1 = IPv4Network('192.0.2.0/26') - ip2 = IPv4Network('192.0.2.64/26') - ip3 = IPv4Network('192.0.2.128/26') - ip4 = IPv4Network('192.0.2.192/26') - - _collapse_addresses_internal([ip1, ip2, ip3, ip4]) -> - [IPv4Network('192.0.2.0/24')] - - This shouldn't be called directly; it is called via - collapse_addresses([]). - - Args: - addresses: A list of IPv4Network's or IPv6Network's - - Returns: - A list of IPv4Network's or IPv6Network's depending on what we were - passed. - - """ - # First merge - to_merge = list(addresses) - subnets = {} - while to_merge: - net = to_merge.pop() - supernet = net.supernet() - existing = subnets.get(supernet) - if existing is None: - subnets[supernet] = net - elif existing != net: - # Merge consecutive subnets - del subnets[supernet] - to_merge.append(supernet) - # Then iterate over resulting networks, skipping subsumed subnets - last = None - for net in sorted(subnets.values()): - if last is not None: - # Since they are sorted, - # last.network_address <= net.network_address is a given. - if last.broadcast_address >= net.broadcast_address: - continue - yield net - last = net - - -def collapse_addresses(addresses): - """Collapse a list of IP objects. - - Example: - collapse_addresses([IPv4Network('192.0.2.0/25'), - IPv4Network('192.0.2.128/25')]) -> - [IPv4Network('192.0.2.0/24')] - - Args: - addresses: An iterator of IPv4Network or IPv6Network objects. - - Returns: - An iterator of the collapsed IPv(4|6)Network objects. - - Raises: - TypeError: If passed a list of mixed version objects. - - """ - addrs = [] - ips = [] - nets = [] - - # split IP addresses and networks - for ip in addresses: - if isinstance(ip, _BaseAddress): - if ips and ips[-1]._version != ip._version: - raise TypeError("%s and %s are not of the same version" % ( - ip, ips[-1])) - ips.append(ip) - elif ip._prefixlen == ip._max_prefixlen: - if ips and ips[-1]._version != ip._version: - raise TypeError("%s and %s are not of the same version" % ( - ip, ips[-1])) - try: - ips.append(ip.ip) - except AttributeError: - ips.append(ip.network_address) - else: - if nets and nets[-1]._version != ip._version: - raise TypeError("%s and %s are not of the same version" % ( - ip, nets[-1])) - nets.append(ip) - - # sort and dedup - ips = sorted(set(ips)) - - # find consecutive address ranges in the sorted sequence and summarize them - if ips: - for first, last in _find_address_range(ips): - addrs.extend(summarize_address_range(first, last)) - - return _collapse_addresses_internal(addrs + nets) - - -def get_mixed_type_key(obj): - """Return a key suitable for sorting between networks and addresses. - - Address and Network objects are not sortable by default; they're - fundamentally different so the expression - - IPv4Address('192.0.2.0') <= IPv4Network('192.0.2.0/24') - - doesn't make any sense. There are some times however, where you may wish - to have ipaddress sort these for you anyway. If you need to do this, you - can use this function as the key= argument to sorted(). - - Args: - obj: either a Network or Address object. - Returns: - appropriate key. - - """ - if isinstance(obj, _BaseNetwork): - return obj._get_networks_key() - elif isinstance(obj, _BaseAddress): - return obj._get_address_key() - return NotImplemented - - -class _IPAddressBase(_TotalOrderingMixin): - - """The mother class.""" - - __slots__ = () - - @property - def exploded(self): - """Return the longhand version of the IP address as a string.""" - return self._explode_shorthand_ip_string() - - @property - def compressed(self): - """Return the shorthand version of the IP address as a string.""" - return _compat_str(self) - - @property - def reverse_pointer(self): - """The name of the reverse DNS pointer for the IP address, e.g.: - >>> ipaddress.ip_address("127.0.0.1").reverse_pointer - '1.0.0.127.in-addr.arpa' - >>> ipaddress.ip_address("2001:db8::1").reverse_pointer - '1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa' - - """ - return self._reverse_pointer() - - @property - def version(self): - msg = '%200s has no version specified' % (type(self),) - raise NotImplementedError(msg) - - def _check_int_address(self, address): - if address < 0: - msg = "%d (< 0) is not permitted as an IPv%d address" - raise AddressValueError(msg % (address, self._version)) - if address > self._ALL_ONES: - msg = "%d (>= 2**%d) is not permitted as an IPv%d address" - raise AddressValueError(msg % (address, self._max_prefixlen, - self._version)) - - def _check_packed_address(self, address, expected_len): - address_len = len(address) - if address_len != expected_len: - msg = ( - '%r (len %d != %d) is not permitted as an IPv%d address. ' - 'Did you pass in a bytes (str in Python 2) instead of' - ' a unicode object?') - raise AddressValueError(msg % (address, address_len, - expected_len, self._version)) - - @classmethod - def _ip_int_from_prefix(cls, prefixlen): - """Turn the prefix length into a bitwise netmask - - Args: - prefixlen: An integer, the prefix length. - - Returns: - An integer. - - """ - return cls._ALL_ONES ^ (cls._ALL_ONES >> prefixlen) - - @classmethod - def _prefix_from_ip_int(cls, ip_int): - """Return prefix length from the bitwise netmask. - - Args: - ip_int: An integer, the netmask in expanded bitwise format - - Returns: - An integer, the prefix length. - - Raises: - ValueError: If the input intermingles zeroes & ones - """ - trailing_zeroes = _count_righthand_zero_bits(ip_int, - cls._max_prefixlen) - prefixlen = cls._max_prefixlen - trailing_zeroes - leading_ones = ip_int >> trailing_zeroes - all_ones = (1 << prefixlen) - 1 - if leading_ones != all_ones: - byteslen = cls._max_prefixlen // 8 - details = _compat_to_bytes(ip_int, byteslen, 'big') - msg = 'Netmask pattern %r mixes zeroes & ones' - raise ValueError(msg % details) - return prefixlen - - @classmethod - def _report_invalid_netmask(cls, netmask_str): - msg = '%r is not a valid netmask' % netmask_str - raise NetmaskValueError(msg) - - @classmethod - def _prefix_from_prefix_string(cls, prefixlen_str): - """Return prefix length from a numeric string - - Args: - prefixlen_str: The string to be converted - - Returns: - An integer, the prefix length. - - Raises: - NetmaskValueError: If the input is not a valid netmask - """ - # int allows a leading +/- as well as surrounding whitespace, - # so we ensure that isn't the case - if not _BaseV4._DECIMAL_DIGITS.issuperset(prefixlen_str): - cls._report_invalid_netmask(prefixlen_str) - try: - prefixlen = int(prefixlen_str) - except ValueError: - cls._report_invalid_netmask(prefixlen_str) - if not (0 <= prefixlen <= cls._max_prefixlen): - cls._report_invalid_netmask(prefixlen_str) - return prefixlen - - @classmethod - def _prefix_from_ip_string(cls, ip_str): - """Turn a netmask/hostmask string into a prefix length - - Args: - ip_str: The netmask/hostmask to be converted - - Returns: - An integer, the prefix length. - - Raises: - NetmaskValueError: If the input is not a valid netmask/hostmask - """ - # Parse the netmask/hostmask like an IP address. - try: - ip_int = cls._ip_int_from_string(ip_str) - except AddressValueError: - cls._report_invalid_netmask(ip_str) - - # Try matching a netmask (this would be /1*0*/ as a bitwise regexp). - # Note that the two ambiguous cases (all-ones and all-zeroes) are - # treated as netmasks. - try: - return cls._prefix_from_ip_int(ip_int) - except ValueError: - pass - - # Invert the bits, and try matching a /0+1+/ hostmask instead. - ip_int ^= cls._ALL_ONES - try: - return cls._prefix_from_ip_int(ip_int) - except ValueError: - cls._report_invalid_netmask(ip_str) - - def __reduce__(self): - return self.__class__, (_compat_str(self),) - - -class _BaseAddress(_IPAddressBase): - - """A generic IP object. - - This IP class contains the version independent methods which are - used by single IP addresses. - """ - - __slots__ = () - - def __int__(self): - return self._ip - - def __eq__(self, other): - try: - return (self._ip == other._ip and - self._version == other._version) - except AttributeError: - return NotImplemented - - def __lt__(self, other): - if not isinstance(other, _IPAddressBase): - return NotImplemented - if not isinstance(other, _BaseAddress): - raise TypeError('%s and %s are not of the same type' % ( - self, other)) - if self._version != other._version: - raise TypeError('%s and %s are not of the same version' % ( - self, other)) - if self._ip != other._ip: - return self._ip < other._ip - return False - - # Shorthand for Integer addition and subtraction. This is not - # meant to ever support addition/subtraction of addresses. - def __add__(self, other): - if not isinstance(other, _compat_int_types): - return NotImplemented - return self.__class__(int(self) + other) - - def __sub__(self, other): - if not isinstance(other, _compat_int_types): - return NotImplemented - return self.__class__(int(self) - other) - - def __repr__(self): - return '%s(%r)' % (self.__class__.__name__, _compat_str(self)) - - def __str__(self): - return _compat_str(self._string_from_ip_int(self._ip)) - - def __hash__(self): - return hash(hex(int(self._ip))) - - def _get_address_key(self): - return (self._version, self) - - def __reduce__(self): - return self.__class__, (self._ip,) - - -class _BaseNetwork(_IPAddressBase): - - """A generic IP network object. - - This IP class contains the version independent methods which are - used by networks. - - """ - def __init__(self, address): - self._cache = {} - - def __repr__(self): - return '%s(%r)' % (self.__class__.__name__, _compat_str(self)) - - def __str__(self): - return '%s/%d' % (self.network_address, self.prefixlen) - - def hosts(self): - """Generate Iterator over usable hosts in a network. - - This is like __iter__ except it doesn't return the network - or broadcast addresses. - - """ - network = int(self.network_address) - broadcast = int(self.broadcast_address) - for x in _compat_range(network + 1, broadcast): - yield self._address_class(x) - - def __iter__(self): - network = int(self.network_address) - broadcast = int(self.broadcast_address) - for x in _compat_range(network, broadcast + 1): - yield self._address_class(x) - - def __getitem__(self, n): - network = int(self.network_address) - broadcast = int(self.broadcast_address) - if n >= 0: - if network + n > broadcast: - raise IndexError('address out of range') - return self._address_class(network + n) - else: - n += 1 - if broadcast + n < network: - raise IndexError('address out of range') - return self._address_class(broadcast + n) - - def __lt__(self, other): - if not isinstance(other, _IPAddressBase): - return NotImplemented - if not isinstance(other, _BaseNetwork): - raise TypeError('%s and %s are not of the same type' % ( - self, other)) - if self._version != other._version: - raise TypeError('%s and %s are not of the same version' % ( - self, other)) - if self.network_address != other.network_address: - return self.network_address < other.network_address - if self.netmask != other.netmask: - return self.netmask < other.netmask - return False - - def __eq__(self, other): - try: - return (self._version == other._version and - self.network_address == other.network_address and - int(self.netmask) == int(other.netmask)) - except AttributeError: - return NotImplemented - - def __hash__(self): - return hash(int(self.network_address) ^ int(self.netmask)) - - def __contains__(self, other): - # always false if one is v4 and the other is v6. - if self._version != other._version: - return False - # dealing with another network. - if isinstance(other, _BaseNetwork): - return False - # dealing with another address - else: - # address - return (int(self.network_address) <= int(other._ip) <= - int(self.broadcast_address)) - - def overlaps(self, other): - """Tell if self is partly contained in other.""" - return self.network_address in other or ( - self.broadcast_address in other or ( - other.network_address in self or ( - other.broadcast_address in self))) - - @property - def broadcast_address(self): - x = self._cache.get('broadcast_address') - if x is None: - x = self._address_class(int(self.network_address) | - int(self.hostmask)) - self._cache['broadcast_address'] = x - return x - - @property - def hostmask(self): - x = self._cache.get('hostmask') - if x is None: - x = self._address_class(int(self.netmask) ^ self._ALL_ONES) - self._cache['hostmask'] = x - return x - - @property - def with_prefixlen(self): - return '%s/%d' % (self.network_address, self._prefixlen) - - @property - def with_netmask(self): - return '%s/%s' % (self.network_address, self.netmask) - - @property - def with_hostmask(self): - return '%s/%s' % (self.network_address, self.hostmask) - - @property - def num_addresses(self): - """Number of hosts in the current subnet.""" - return int(self.broadcast_address) - int(self.network_address) + 1 - - @property - def _address_class(self): - # Returning bare address objects (rather than interfaces) allows for - # more consistent behaviour across the network address, broadcast - # address and individual host addresses. - msg = '%200s has no associated address class' % (type(self),) - raise NotImplementedError(msg) - - @property - def prefixlen(self): - return self._prefixlen - - def address_exclude(self, other): - """Remove an address from a larger block. - - For example: - - addr1 = ip_network('192.0.2.0/28') - addr2 = ip_network('192.0.2.1/32') - list(addr1.address_exclude(addr2)) = - [IPv4Network('192.0.2.0/32'), IPv4Network('192.0.2.2/31'), - IPv4Network('192.0.2.4/30'), IPv4Network('192.0.2.8/29')] - - or IPv6: - - addr1 = ip_network('2001:db8::1/32') - addr2 = ip_network('2001:db8::1/128') - list(addr1.address_exclude(addr2)) = - [ip_network('2001:db8::1/128'), - ip_network('2001:db8::2/127'), - ip_network('2001:db8::4/126'), - ip_network('2001:db8::8/125'), - ... - ip_network('2001:db8:8000::/33')] - - Args: - other: An IPv4Network or IPv6Network object of the same type. - - Returns: - An iterator of the IPv(4|6)Network objects which is self - minus other. - - Raises: - TypeError: If self and other are of differing address - versions, or if other is not a network object. - ValueError: If other is not completely contained by self. - - """ - if not self._version == other._version: - raise TypeError("%s and %s are not of the same version" % ( - self, other)) - - if not isinstance(other, _BaseNetwork): - raise TypeError("%s is not a network object" % other) - - if not other.subnet_of(self): - raise ValueError('%s not contained in %s' % (other, self)) - if other == self: - return - - # Make sure we're comparing the network of other. - other = other.__class__('%s/%s' % (other.network_address, - other.prefixlen)) - - s1, s2 = self.subnets() - while s1 != other and s2 != other: - if other.subnet_of(s1): - yield s2 - s1, s2 = s1.subnets() - elif other.subnet_of(s2): - yield s1 - s1, s2 = s2.subnets() - else: - # If we got here, there's a bug somewhere. - raise AssertionError('Error performing exclusion: ' - 's1: %s s2: %s other: %s' % - (s1, s2, other)) - if s1 == other: - yield s2 - elif s2 == other: - yield s1 - else: - # If we got here, there's a bug somewhere. - raise AssertionError('Error performing exclusion: ' - 's1: %s s2: %s other: %s' % - (s1, s2, other)) - - def compare_networks(self, other): - """Compare two IP objects. - - This is only concerned about the comparison of the integer - representation of the network addresses. This means that the - host bits aren't considered at all in this method. If you want - to compare host bits, you can easily enough do a - 'HostA._ip < HostB._ip' - - Args: - other: An IP object. - - Returns: - If the IP versions of self and other are the same, returns: - - -1 if self < other: - eg: IPv4Network('192.0.2.0/25') < IPv4Network('192.0.2.128/25') - IPv6Network('2001:db8::1000/124') < - IPv6Network('2001:db8::2000/124') - 0 if self == other - eg: IPv4Network('192.0.2.0/24') == IPv4Network('192.0.2.0/24') - IPv6Network('2001:db8::1000/124') == - IPv6Network('2001:db8::1000/124') - 1 if self > other - eg: IPv4Network('192.0.2.128/25') > IPv4Network('192.0.2.0/25') - IPv6Network('2001:db8::2000/124') > - IPv6Network('2001:db8::1000/124') - - Raises: - TypeError if the IP versions are different. - - """ - # does this need to raise a ValueError? - if self._version != other._version: - raise TypeError('%s and %s are not of the same type' % ( - self, other)) - # self._version == other._version below here: - if self.network_address < other.network_address: - return -1 - if self.network_address > other.network_address: - return 1 - # self.network_address == other.network_address below here: - if self.netmask < other.netmask: - return -1 - if self.netmask > other.netmask: - return 1 - return 0 - - def _get_networks_key(self): - """Network-only key function. - - Returns an object that identifies this address' network and - netmask. This function is a suitable "key" argument for sorted() - and list.sort(). - - """ - return (self._version, self.network_address, self.netmask) - - def subnets(self, prefixlen_diff=1, new_prefix=None): - """The subnets which join to make the current subnet. - - In the case that self contains only one IP - (self._prefixlen == 32 for IPv4 or self._prefixlen == 128 - for IPv6), yield an iterator with just ourself. - - Args: - prefixlen_diff: An integer, the amount the prefix length - should be increased by. This should not be set if - new_prefix is also set. - new_prefix: The desired new prefix length. This must be a - larger number (smaller prefix) than the existing prefix. - This should not be set if prefixlen_diff is also set. - - Returns: - An iterator of IPv(4|6) objects. - - Raises: - ValueError: The prefixlen_diff is too small or too large. - OR - prefixlen_diff and new_prefix are both set or new_prefix - is a smaller number than the current prefix (smaller - number means a larger network) - - """ - if self._prefixlen == self._max_prefixlen: - yield self - return - - if new_prefix is not None: - if new_prefix < self._prefixlen: - raise ValueError('new prefix must be longer') - if prefixlen_diff != 1: - raise ValueError('cannot set prefixlen_diff and new_prefix') - prefixlen_diff = new_prefix - self._prefixlen - - if prefixlen_diff < 0: - raise ValueError('prefix length diff must be > 0') - new_prefixlen = self._prefixlen + prefixlen_diff - - if new_prefixlen > self._max_prefixlen: - raise ValueError( - 'prefix length diff %d is invalid for netblock %s' % ( - new_prefixlen, self)) - - start = int(self.network_address) - end = int(self.broadcast_address) + 1 - step = (int(self.hostmask) + 1) >> prefixlen_diff - for new_addr in _compat_range(start, end, step): - current = self.__class__((new_addr, new_prefixlen)) - yield current - - def supernet(self, prefixlen_diff=1, new_prefix=None): - """The supernet containing the current network. - - Args: - prefixlen_diff: An integer, the amount the prefix length of - the network should be decreased by. For example, given a - /24 network and a prefixlen_diff of 3, a supernet with a - /21 netmask is returned. - - Returns: - An IPv4 network object. - - Raises: - ValueError: If self.prefixlen - prefixlen_diff < 0. I.e., you have - a negative prefix length. - OR - If prefixlen_diff and new_prefix are both set or new_prefix is a - larger number than the current prefix (larger number means a - smaller network) - - """ - if self._prefixlen == 0: - return self - - if new_prefix is not None: - if new_prefix > self._prefixlen: - raise ValueError('new prefix must be shorter') - if prefixlen_diff != 1: - raise ValueError('cannot set prefixlen_diff and new_prefix') - prefixlen_diff = self._prefixlen - new_prefix - - new_prefixlen = self.prefixlen - prefixlen_diff - if new_prefixlen < 0: - raise ValueError( - 'current prefixlen is %d, cannot have a prefixlen_diff of %d' % - (self.prefixlen, prefixlen_diff)) - return self.__class__(( - int(self.network_address) & (int(self.netmask) << prefixlen_diff), - new_prefixlen)) - - @property - def is_multicast(self): - """Test if the address is reserved for multicast use. - - Returns: - A boolean, True if the address is a multicast address. - See RFC 2373 2.7 for details. - - """ - return (self.network_address.is_multicast and - self.broadcast_address.is_multicast) - - @staticmethod - def _is_subnet_of(a, b): - try: - # Always false if one is v4 and the other is v6. - if a._version != b._version: - raise TypeError("%s and %s are not of the same version" (a, b)) - return (b.network_address <= a.network_address and - b.broadcast_address >= a.broadcast_address) - except AttributeError: - raise TypeError("Unable to test subnet containment " - "between %s and %s" % (a, b)) - - def subnet_of(self, other): - """Return True if this network is a subnet of other.""" - return self._is_subnet_of(self, other) - - def supernet_of(self, other): - """Return True if this network is a supernet of other.""" - return self._is_subnet_of(other, self) - - @property - def is_reserved(self): - """Test if the address is otherwise IETF reserved. - - Returns: - A boolean, True if the address is within one of the - reserved IPv6 Network ranges. - - """ - return (self.network_address.is_reserved and - self.broadcast_address.is_reserved) - - @property - def is_link_local(self): - """Test if the address is reserved for link-local. - - Returns: - A boolean, True if the address is reserved per RFC 4291. - - """ - return (self.network_address.is_link_local and - self.broadcast_address.is_link_local) - - @property - def is_private(self): - """Test if this address is allocated for private networks. - - Returns: - A boolean, True if the address is reserved per - iana-ipv4-special-registry or iana-ipv6-special-registry. - - """ - return (self.network_address.is_private and - self.broadcast_address.is_private) - - @property - def is_global(self): - """Test if this address is allocated for public networks. - - Returns: - A boolean, True if the address is not reserved per - iana-ipv4-special-registry or iana-ipv6-special-registry. - - """ - return not self.is_private - - @property - def is_unspecified(self): - """Test if the address is unspecified. - - Returns: - A boolean, True if this is the unspecified address as defined in - RFC 2373 2.5.2. - - """ - return (self.network_address.is_unspecified and - self.broadcast_address.is_unspecified) - - @property - def is_loopback(self): - """Test if the address is a loopback address. - - Returns: - A boolean, True if the address is a loopback address as defined in - RFC 2373 2.5.3. - - """ - return (self.network_address.is_loopback and - self.broadcast_address.is_loopback) - - -class _BaseV4(object): - - """Base IPv4 object. - - The following methods are used by IPv4 objects in both single IP - addresses and networks. - - """ - - __slots__ = () - _version = 4 - # Equivalent to 255.255.255.255 or 32 bits of 1's. - _ALL_ONES = (2 ** IPV4LENGTH) - 1 - _DECIMAL_DIGITS = frozenset('0123456789') - - # the valid octets for host and netmasks. only useful for IPv4. - _valid_mask_octets = frozenset([255, 254, 252, 248, 240, 224, 192, 128, 0]) - - _max_prefixlen = IPV4LENGTH - # There are only a handful of valid v4 netmasks, so we cache them all - # when constructed (see _make_netmask()). - _netmask_cache = {} - - def _explode_shorthand_ip_string(self): - return _compat_str(self) - - @classmethod - def _make_netmask(cls, arg): - """Make a (netmask, prefix_len) tuple from the given argument. - - Argument can be: - - an integer (the prefix length) - - a string representing the prefix length (e.g. "24") - - a string representing the prefix netmask (e.g. "255.255.255.0") - """ - if arg not in cls._netmask_cache: - if isinstance(arg, _compat_int_types): - prefixlen = arg - else: - try: - # Check for a netmask in prefix length form - prefixlen = cls._prefix_from_prefix_string(arg) - except NetmaskValueError: - # Check for a netmask or hostmask in dotted-quad form. - # This may raise NetmaskValueError. - prefixlen = cls._prefix_from_ip_string(arg) - netmask = IPv4Address(cls._ip_int_from_prefix(prefixlen)) - cls._netmask_cache[arg] = netmask, prefixlen - return cls._netmask_cache[arg] - - @classmethod - def _ip_int_from_string(cls, ip_str): - """Turn the given IP string into an integer for comparison. - - Args: - ip_str: A string, the IP ip_str. - - Returns: - The IP ip_str as an integer. - - Raises: - AddressValueError: if ip_str isn't a valid IPv4 Address. - - """ - if not ip_str: - raise AddressValueError('Address cannot be empty') - - octets = ip_str.split('.') - if len(octets) != 4: - raise AddressValueError("Expected 4 octets in %r" % ip_str) - - try: - return _compat_int_from_byte_vals( - map(cls._parse_octet, octets), 'big') - except ValueError as exc: - raise AddressValueError("%s in %r" % (exc, ip_str)) - - @classmethod - def _parse_octet(cls, octet_str): - """Convert a decimal octet into an integer. - - Args: - octet_str: A string, the number to parse. - - Returns: - The octet as an integer. - - Raises: - ValueError: if the octet isn't strictly a decimal from [0..255]. - - """ - if not octet_str: - raise ValueError("Empty octet not permitted") - # Whitelist the characters, since int() allows a lot of bizarre stuff. - if not cls._DECIMAL_DIGITS.issuperset(octet_str): - msg = "Only decimal digits permitted in %r" - raise ValueError(msg % octet_str) - # We do the length check second, since the invalid character error - # is likely to be more informative for the user - if len(octet_str) > 3: - msg = "At most 3 characters permitted in %r" - raise ValueError(msg % octet_str) - # Convert to integer (we know digits are legal) - octet_int = int(octet_str, 10) - # Any octets that look like they *might* be written in octal, - # and which don't look exactly the same in both octal and - # decimal are rejected as ambiguous - if octet_int > 7 and octet_str[0] == '0': - msg = "Ambiguous (octal/decimal) value in %r not permitted" - raise ValueError(msg % octet_str) - if octet_int > 255: - raise ValueError("Octet %d (> 255) not permitted" % octet_int) - return octet_int - - @classmethod - def _string_from_ip_int(cls, ip_int): - """Turns a 32-bit integer into dotted decimal notation. - - Args: - ip_int: An integer, the IP address. - - Returns: - The IP address as a string in dotted decimal notation. - - """ - return '.'.join(_compat_str(struct.unpack(b'!B', b)[0] - if isinstance(b, bytes) - else b) - for b in _compat_to_bytes(ip_int, 4, 'big')) - - def _is_hostmask(self, ip_str): - """Test if the IP string is a hostmask (rather than a netmask). - - Args: - ip_str: A string, the potential hostmask. - - Returns: - A boolean, True if the IP string is a hostmask. - - """ - bits = ip_str.split('.') - try: - parts = [x for x in map(int, bits) if x in self._valid_mask_octets] - except ValueError: - return False - if len(parts) != len(bits): - return False - if parts[0] < parts[-1]: - return True - return False - - def _reverse_pointer(self): - """Return the reverse DNS pointer name for the IPv4 address. - - This implements the method described in RFC1035 3.5. - - """ - reverse_octets = _compat_str(self).split('.')[::-1] - return '.'.join(reverse_octets) + '.in-addr.arpa' - - @property - def max_prefixlen(self): - return self._max_prefixlen - - @property - def version(self): - return self._version - - -class IPv4Address(_BaseV4, _BaseAddress): - - """Represent and manipulate single IPv4 Addresses.""" - - __slots__ = ('_ip', '__weakref__') - - def __init__(self, address): - - """ - Args: - address: A string or integer representing the IP - - Additionally, an integer can be passed, so - IPv4Address('192.0.2.1') == IPv4Address(3221225985). - or, more generally - IPv4Address(int(IPv4Address('192.0.2.1'))) == - IPv4Address('192.0.2.1') - - Raises: - AddressValueError: If ipaddress isn't a valid IPv4 address. - - """ - # Efficient constructor from integer. - if isinstance(address, _compat_int_types): - self._check_int_address(address) - self._ip = address - return - - # Constructing from a packed address - if isinstance(address, bytes): - self._check_packed_address(address, 4) - bvs = _compat_bytes_to_byte_vals(address) - self._ip = _compat_int_from_byte_vals(bvs, 'big') - return - - # Assume input argument to be string or any object representation - # which converts into a formatted IP string. - addr_str = _compat_str(address) - if '/' in addr_str: - raise AddressValueError("Unexpected '/' in %r" % address) - self._ip = self._ip_int_from_string(addr_str) - - @property - def packed(self): - """The binary representation of this address.""" - return v4_int_to_packed(self._ip) - - @property - def is_reserved(self): - """Test if the address is otherwise IETF reserved. - - Returns: - A boolean, True if the address is within the - reserved IPv4 Network range. - - """ - return self in self._constants._reserved_network - - @property - def is_private(self): - """Test if this address is allocated for private networks. - - Returns: - A boolean, True if the address is reserved per - iana-ipv4-special-registry. - - """ - return any(self in net for net in self._constants._private_networks) - - @property - def is_global(self): - return ( - self not in self._constants._public_network and - not self.is_private) - - @property - def is_multicast(self): - """Test if the address is reserved for multicast use. - - Returns: - A boolean, True if the address is multicast. - See RFC 3171 for details. - - """ - return self in self._constants._multicast_network - - @property - def is_unspecified(self): - """Test if the address is unspecified. - - Returns: - A boolean, True if this is the unspecified address as defined in - RFC 5735 3. - - """ - return self == self._constants._unspecified_address - - @property - def is_loopback(self): - """Test if the address is a loopback address. - - Returns: - A boolean, True if the address is a loopback per RFC 3330. - - """ - return self in self._constants._loopback_network - - @property - def is_link_local(self): - """Test if the address is reserved for link-local. - - Returns: - A boolean, True if the address is link-local per RFC 3927. - - """ - return self in self._constants._linklocal_network - - -class IPv4Interface(IPv4Address): - - def __init__(self, address): - if isinstance(address, (bytes, _compat_int_types)): - IPv4Address.__init__(self, address) - self.network = IPv4Network(self._ip) - self._prefixlen = self._max_prefixlen - return - - if isinstance(address, tuple): - IPv4Address.__init__(self, address[0]) - if len(address) > 1: - self._prefixlen = int(address[1]) - else: - self._prefixlen = self._max_prefixlen - - self.network = IPv4Network(address, strict=False) - self.netmask = self.network.netmask - self.hostmask = self.network.hostmask - return - - addr = _split_optional_netmask(address) - IPv4Address.__init__(self, addr[0]) - - self.network = IPv4Network(address, strict=False) - self._prefixlen = self.network._prefixlen - - self.netmask = self.network.netmask - self.hostmask = self.network.hostmask - - def __str__(self): - return '%s/%d' % (self._string_from_ip_int(self._ip), - self.network.prefixlen) - - def __eq__(self, other): - address_equal = IPv4Address.__eq__(self, other) - if not address_equal or address_equal is NotImplemented: - return address_equal - try: - return self.network == other.network - except AttributeError: - # An interface with an associated network is NOT the - # same as an unassociated address. That's why the hash - # takes the extra info into account. - return False - - def __lt__(self, other): - address_less = IPv4Address.__lt__(self, other) - if address_less is NotImplemented: - return NotImplemented - try: - return (self.network < other.network or - self.network == other.network and address_less) - except AttributeError: - # We *do* allow addresses and interfaces to be sorted. The - # unassociated address is considered less than all interfaces. - return False - - def __hash__(self): - return self._ip ^ self._prefixlen ^ int(self.network.network_address) - - __reduce__ = _IPAddressBase.__reduce__ - - @property - def ip(self): - return IPv4Address(self._ip) - - @property - def with_prefixlen(self): - return '%s/%s' % (self._string_from_ip_int(self._ip), - self._prefixlen) - - @property - def with_netmask(self): - return '%s/%s' % (self._string_from_ip_int(self._ip), - self.netmask) - - @property - def with_hostmask(self): - return '%s/%s' % (self._string_from_ip_int(self._ip), - self.hostmask) - - -class IPv4Network(_BaseV4, _BaseNetwork): - - """This class represents and manipulates 32-bit IPv4 network + addresses.. - - Attributes: [examples for IPv4Network('192.0.2.0/27')] - .network_address: IPv4Address('192.0.2.0') - .hostmask: IPv4Address('0.0.0.31') - .broadcast_address: IPv4Address('192.0.2.32') - .netmask: IPv4Address('255.255.255.224') - .prefixlen: 27 - - """ - # Class to use when creating address objects - _address_class = IPv4Address - - def __init__(self, address, strict=True): - - """Instantiate a new IPv4 network object. - - Args: - address: A string or integer representing the IP [& network]. - '192.0.2.0/24' - '192.0.2.0/255.255.255.0' - '192.0.0.2/0.0.0.255' - are all functionally the same in IPv4. Similarly, - '192.0.2.1' - '192.0.2.1/255.255.255.255' - '192.0.2.1/32' - are also functionally equivalent. That is to say, failing to - provide a subnetmask will create an object with a mask of /32. - - If the mask (portion after the / in the argument) is given in - dotted quad form, it is treated as a netmask if it starts with a - non-zero field (e.g. /255.0.0.0 == /8) and as a hostmask if it - starts with a zero field (e.g. 0.255.255.255 == /8), with the - single exception of an all-zero mask which is treated as a - netmask == /0. If no mask is given, a default of /32 is used. - - Additionally, an integer can be passed, so - IPv4Network('192.0.2.1') == IPv4Network(3221225985) - or, more generally - IPv4Interface(int(IPv4Interface('192.0.2.1'))) == - IPv4Interface('192.0.2.1') - - Raises: - AddressValueError: If ipaddress isn't a valid IPv4 address. - NetmaskValueError: If the netmask isn't valid for - an IPv4 address. - ValueError: If strict is True and a network address is not - supplied. - - """ - _BaseNetwork.__init__(self, address) - - # Constructing from a packed address or integer - if isinstance(address, (_compat_int_types, bytes)): - self.network_address = IPv4Address(address) - self.netmask, self._prefixlen = self._make_netmask( - self._max_prefixlen) - # fixme: address/network test here. - return - - if isinstance(address, tuple): - if len(address) > 1: - arg = address[1] - else: - # We weren't given an address[1] - arg = self._max_prefixlen - self.network_address = IPv4Address(address[0]) - self.netmask, self._prefixlen = self._make_netmask(arg) - packed = int(self.network_address) - if packed & int(self.netmask) != packed: - if strict: - raise ValueError('%s has host bits set' % self) - else: - self.network_address = IPv4Address(packed & - int(self.netmask)) - return - - # Assume input argument to be string or any object representation - # which converts into a formatted IP prefix string. - addr = _split_optional_netmask(address) - self.network_address = IPv4Address(self._ip_int_from_string(addr[0])) - - if len(addr) == 2: - arg = addr[1] - else: - arg = self._max_prefixlen - self.netmask, self._prefixlen = self._make_netmask(arg) - - if strict: - if (IPv4Address(int(self.network_address) & int(self.netmask)) != - self.network_address): - raise ValueError('%s has host bits set' % self) - self.network_address = IPv4Address(int(self.network_address) & - int(self.netmask)) - - if self._prefixlen == (self._max_prefixlen - 1): - self.hosts = self.__iter__ - - @property - def is_global(self): - """Test if this address is allocated for public networks. - - Returns: - A boolean, True if the address is not reserved per - iana-ipv4-special-registry. - - """ - return (not (self.network_address in IPv4Network('100.64.0.0/10') and - self.broadcast_address in IPv4Network('100.64.0.0/10')) and - not self.is_private) - - -class _IPv4Constants(object): - - _linklocal_network = IPv4Network('169.254.0.0/16') - - _loopback_network = IPv4Network('127.0.0.0/8') - - _multicast_network = IPv4Network('224.0.0.0/4') - - _public_network = IPv4Network('100.64.0.0/10') - - _private_networks = [ - IPv4Network('0.0.0.0/8'), - IPv4Network('10.0.0.0/8'), - IPv4Network('127.0.0.0/8'), - IPv4Network('169.254.0.0/16'), - IPv4Network('172.16.0.0/12'), - IPv4Network('192.0.0.0/29'), - IPv4Network('192.0.0.170/31'), - IPv4Network('192.0.2.0/24'), - IPv4Network('192.168.0.0/16'), - IPv4Network('198.18.0.0/15'), - IPv4Network('198.51.100.0/24'), - IPv4Network('203.0.113.0/24'), - IPv4Network('240.0.0.0/4'), - IPv4Network('255.255.255.255/32'), - ] - - _reserved_network = IPv4Network('240.0.0.0/4') - - _unspecified_address = IPv4Address('0.0.0.0') - - -IPv4Address._constants = _IPv4Constants - - -class _BaseV6(object): - - """Base IPv6 object. - - The following methods are used by IPv6 objects in both single IP - addresses and networks. - - """ - - __slots__ = () - _version = 6 - _ALL_ONES = (2 ** IPV6LENGTH) - 1 - _HEXTET_COUNT = 8 - _HEX_DIGITS = frozenset('0123456789ABCDEFabcdef') - _max_prefixlen = IPV6LENGTH - - # There are only a bunch of valid v6 netmasks, so we cache them all - # when constructed (see _make_netmask()). - _netmask_cache = {} - - @classmethod - def _make_netmask(cls, arg): - """Make a (netmask, prefix_len) tuple from the given argument. - - Argument can be: - - an integer (the prefix length) - - a string representing the prefix length (e.g. "24") - - a string representing the prefix netmask (e.g. "255.255.255.0") - """ - if arg not in cls._netmask_cache: - if isinstance(arg, _compat_int_types): - prefixlen = arg - else: - prefixlen = cls._prefix_from_prefix_string(arg) - netmask = IPv6Address(cls._ip_int_from_prefix(prefixlen)) - cls._netmask_cache[arg] = netmask, prefixlen - return cls._netmask_cache[arg] - - @classmethod - def _ip_int_from_string(cls, ip_str): - """Turn an IPv6 ip_str into an integer. - - Args: - ip_str: A string, the IPv6 ip_str. - - Returns: - An int, the IPv6 address - - Raises: - AddressValueError: if ip_str isn't a valid IPv6 Address. - - """ - if not ip_str: - raise AddressValueError('Address cannot be empty') - - parts = ip_str.split(':') - - # An IPv6 address needs at least 2 colons (3 parts). - _min_parts = 3 - if len(parts) < _min_parts: - msg = "At least %d parts expected in %r" % (_min_parts, ip_str) - raise AddressValueError(msg) - - # If the address has an IPv4-style suffix, convert it to hexadecimal. - if '.' in parts[-1]: - try: - ipv4_int = IPv4Address(parts.pop())._ip - except AddressValueError as exc: - raise AddressValueError("%s in %r" % (exc, ip_str)) - parts.append('%x' % ((ipv4_int >> 16) & 0xFFFF)) - parts.append('%x' % (ipv4_int & 0xFFFF)) - - # An IPv6 address can't have more than 8 colons (9 parts). - # The extra colon comes from using the "::" notation for a single - # leading or trailing zero part. - _max_parts = cls._HEXTET_COUNT + 1 - if len(parts) > _max_parts: - msg = "At most %d colons permitted in %r" % ( - _max_parts - 1, ip_str) - raise AddressValueError(msg) - - # Disregarding the endpoints, find '::' with nothing in between. - # This indicates that a run of zeroes has been skipped. - skip_index = None - for i in _compat_range(1, len(parts) - 1): - if not parts[i]: - if skip_index is not None: - # Can't have more than one '::' - msg = "At most one '::' permitted in %r" % ip_str - raise AddressValueError(msg) - skip_index = i - - # parts_hi is the number of parts to copy from above/before the '::' - # parts_lo is the number of parts to copy from below/after the '::' - if skip_index is not None: - # If we found a '::', then check if it also covers the endpoints. - parts_hi = skip_index - parts_lo = len(parts) - skip_index - 1 - if not parts[0]: - parts_hi -= 1 - if parts_hi: - msg = "Leading ':' only permitted as part of '::' in %r" - raise AddressValueError(msg % ip_str) # ^: requires ^:: - if not parts[-1]: - parts_lo -= 1 - if parts_lo: - msg = "Trailing ':' only permitted as part of '::' in %r" - raise AddressValueError(msg % ip_str) # :$ requires ::$ - parts_skipped = cls._HEXTET_COUNT - (parts_hi + parts_lo) - if parts_skipped < 1: - msg = "Expected at most %d other parts with '::' in %r" - raise AddressValueError(msg % (cls._HEXTET_COUNT - 1, ip_str)) - else: - # Otherwise, allocate the entire address to parts_hi. The - # endpoints could still be empty, but _parse_hextet() will check - # for that. - if len(parts) != cls._HEXTET_COUNT: - msg = "Exactly %d parts expected without '::' in %r" - raise AddressValueError(msg % (cls._HEXTET_COUNT, ip_str)) - if not parts[0]: - msg = "Leading ':' only permitted as part of '::' in %r" - raise AddressValueError(msg % ip_str) # ^: requires ^:: - if not parts[-1]: - msg = "Trailing ':' only permitted as part of '::' in %r" - raise AddressValueError(msg % ip_str) # :$ requires ::$ - parts_hi = len(parts) - parts_lo = 0 - parts_skipped = 0 - - try: - # Now, parse the hextets into a 128-bit integer. - ip_int = 0 - for i in range(parts_hi): - ip_int <<= 16 - ip_int |= cls._parse_hextet(parts[i]) - ip_int <<= 16 * parts_skipped - for i in range(-parts_lo, 0): - ip_int <<= 16 - ip_int |= cls._parse_hextet(parts[i]) - return ip_int - except ValueError as exc: - raise AddressValueError("%s in %r" % (exc, ip_str)) - - @classmethod - def _parse_hextet(cls, hextet_str): - """Convert an IPv6 hextet string into an integer. - - Args: - hextet_str: A string, the number to parse. - - Returns: - The hextet as an integer. - - Raises: - ValueError: if the input isn't strictly a hex number from - [0..FFFF]. - - """ - # Whitelist the characters, since int() allows a lot of bizarre stuff. - if not cls._HEX_DIGITS.issuperset(hextet_str): - raise ValueError("Only hex digits permitted in %r" % hextet_str) - # We do the length check second, since the invalid character error - # is likely to be more informative for the user - if len(hextet_str) > 4: - msg = "At most 4 characters permitted in %r" - raise ValueError(msg % hextet_str) - # Length check means we can skip checking the integer value - return int(hextet_str, 16) - - @classmethod - def _compress_hextets(cls, hextets): - """Compresses a list of hextets. - - Compresses a list of strings, replacing the longest continuous - sequence of "0" in the list with "" and adding empty strings at - the beginning or at the end of the string such that subsequently - calling ":".join(hextets) will produce the compressed version of - the IPv6 address. - - Args: - hextets: A list of strings, the hextets to compress. - - Returns: - A list of strings. - - """ - best_doublecolon_start = -1 - best_doublecolon_len = 0 - doublecolon_start = -1 - doublecolon_len = 0 - for index, hextet in enumerate(hextets): - if hextet == '0': - doublecolon_len += 1 - if doublecolon_start == -1: - # Start of a sequence of zeros. - doublecolon_start = index - if doublecolon_len > best_doublecolon_len: - # This is the longest sequence of zeros so far. - best_doublecolon_len = doublecolon_len - best_doublecolon_start = doublecolon_start - else: - doublecolon_len = 0 - doublecolon_start = -1 - - if best_doublecolon_len > 1: - best_doublecolon_end = (best_doublecolon_start + - best_doublecolon_len) - # For zeros at the end of the address. - if best_doublecolon_end == len(hextets): - hextets += [''] - hextets[best_doublecolon_start:best_doublecolon_end] = [''] - # For zeros at the beginning of the address. - if best_doublecolon_start == 0: - hextets = [''] + hextets - - return hextets - - @classmethod - def _string_from_ip_int(cls, ip_int=None): - """Turns a 128-bit integer into hexadecimal notation. - - Args: - ip_int: An integer, the IP address. - - Returns: - A string, the hexadecimal representation of the address. - - Raises: - ValueError: The address is bigger than 128 bits of all ones. - - """ - if ip_int is None: - ip_int = int(cls._ip) - - if ip_int > cls._ALL_ONES: - raise ValueError('IPv6 address is too large') - - hex_str = '%032x' % ip_int - hextets = ['%x' % int(hex_str[x:x + 4], 16) for x in range(0, 32, 4)] - - hextets = cls._compress_hextets(hextets) - return ':'.join(hextets) - - def _explode_shorthand_ip_string(self): - """Expand a shortened IPv6 address. - - Args: - ip_str: A string, the IPv6 address. - - Returns: - A string, the expanded IPv6 address. - - """ - if isinstance(self, IPv6Network): - ip_str = _compat_str(self.network_address) - elif isinstance(self, IPv6Interface): - ip_str = _compat_str(self.ip) - else: - ip_str = _compat_str(self) - - ip_int = self._ip_int_from_string(ip_str) - hex_str = '%032x' % ip_int - parts = [hex_str[x:x + 4] for x in range(0, 32, 4)] - if isinstance(self, (_BaseNetwork, IPv6Interface)): - return '%s/%d' % (':'.join(parts), self._prefixlen) - return ':'.join(parts) - - def _reverse_pointer(self): - """Return the reverse DNS pointer name for the IPv6 address. - - This implements the method described in RFC3596 2.5. - - """ - reverse_chars = self.exploded[::-1].replace(':', '') - return '.'.join(reverse_chars) + '.ip6.arpa' - - @property - def max_prefixlen(self): - return self._max_prefixlen - - @property - def version(self): - return self._version - - -class IPv6Address(_BaseV6, _BaseAddress): - - """Represent and manipulate single IPv6 Addresses.""" - - __slots__ = ('_ip', '__weakref__') - - def __init__(self, address): - """Instantiate a new IPv6 address object. - - Args: - address: A string or integer representing the IP - - Additionally, an integer can be passed, so - IPv6Address('2001:db8::') == - IPv6Address(42540766411282592856903984951653826560) - or, more generally - IPv6Address(int(IPv6Address('2001:db8::'))) == - IPv6Address('2001:db8::') - - Raises: - AddressValueError: If address isn't a valid IPv6 address. - - """ - # Efficient constructor from integer. - if isinstance(address, _compat_int_types): - self._check_int_address(address) - self._ip = address - return - - # Constructing from a packed address - if isinstance(address, bytes): - self._check_packed_address(address, 16) - bvs = _compat_bytes_to_byte_vals(address) - self._ip = _compat_int_from_byte_vals(bvs, 'big') - return - - # Assume input argument to be string or any object representation - # which converts into a formatted IP string. - addr_str = _compat_str(address) - if '/' in addr_str: - raise AddressValueError("Unexpected '/' in %r" % address) - self._ip = self._ip_int_from_string(addr_str) - - @property - def packed(self): - """The binary representation of this address.""" - return v6_int_to_packed(self._ip) - - @property - def is_multicast(self): - """Test if the address is reserved for multicast use. - - Returns: - A boolean, True if the address is a multicast address. - See RFC 2373 2.7 for details. - - """ - return self in self._constants._multicast_network - - @property - def is_reserved(self): - """Test if the address is otherwise IETF reserved. - - Returns: - A boolean, True if the address is within one of the - reserved IPv6 Network ranges. - - """ - return any(self in x for x in self._constants._reserved_networks) - - @property - def is_link_local(self): - """Test if the address is reserved for link-local. - - Returns: - A boolean, True if the address is reserved per RFC 4291. - - """ - return self in self._constants._linklocal_network - - @property - def is_site_local(self): - """Test if the address is reserved for site-local. - - Note that the site-local address space has been deprecated by RFC 3879. - Use is_private to test if this address is in the space of unique local - addresses as defined by RFC 4193. - - Returns: - A boolean, True if the address is reserved per RFC 3513 2.5.6. - - """ - return self in self._constants._sitelocal_network - - @property - def is_private(self): - """Test if this address is allocated for private networks. - - Returns: - A boolean, True if the address is reserved per - iana-ipv6-special-registry. - - """ - return any(self in net for net in self._constants._private_networks) - - @property - def is_global(self): - """Test if this address is allocated for public networks. - - Returns: - A boolean, true if the address is not reserved per - iana-ipv6-special-registry. - - """ - return not self.is_private - - @property - def is_unspecified(self): - """Test if the address is unspecified. - - Returns: - A boolean, True if this is the unspecified address as defined in - RFC 2373 2.5.2. - - """ - return self._ip == 0 - - @property - def is_loopback(self): - """Test if the address is a loopback address. - - Returns: - A boolean, True if the address is a loopback address as defined in - RFC 2373 2.5.3. - - """ - return self._ip == 1 - - @property - def ipv4_mapped(self): - """Return the IPv4 mapped address. - - Returns: - If the IPv6 address is a v4 mapped address, return the - IPv4 mapped address. Return None otherwise. - - """ - if (self._ip >> 32) != 0xFFFF: - return None - return IPv4Address(self._ip & 0xFFFFFFFF) - - @property - def teredo(self): - """Tuple of embedded teredo IPs. - - Returns: - Tuple of the (server, client) IPs or None if the address - doesn't appear to be a teredo address (doesn't start with - 2001::/32) - - """ - if (self._ip >> 96) != 0x20010000: - return None - return (IPv4Address((self._ip >> 64) & 0xFFFFFFFF), - IPv4Address(~self._ip & 0xFFFFFFFF)) - - @property - def sixtofour(self): - """Return the IPv4 6to4 embedded address. - - Returns: - The IPv4 6to4-embedded address if present or None if the - address doesn't appear to contain a 6to4 embedded address. - - """ - if (self._ip >> 112) != 0x2002: - return None - return IPv4Address((self._ip >> 80) & 0xFFFFFFFF) - - -class IPv6Interface(IPv6Address): - - def __init__(self, address): - if isinstance(address, (bytes, _compat_int_types)): - IPv6Address.__init__(self, address) - self.network = IPv6Network(self._ip) - self._prefixlen = self._max_prefixlen - return - if isinstance(address, tuple): - IPv6Address.__init__(self, address[0]) - if len(address) > 1: - self._prefixlen = int(address[1]) - else: - self._prefixlen = self._max_prefixlen - self.network = IPv6Network(address, strict=False) - self.netmask = self.network.netmask - self.hostmask = self.network.hostmask - return - - addr = _split_optional_netmask(address) - IPv6Address.__init__(self, addr[0]) - self.network = IPv6Network(address, strict=False) - self.netmask = self.network.netmask - self._prefixlen = self.network._prefixlen - self.hostmask = self.network.hostmask - - def __str__(self): - return '%s/%d' % (self._string_from_ip_int(self._ip), - self.network.prefixlen) - - def __eq__(self, other): - address_equal = IPv6Address.__eq__(self, other) - if not address_equal or address_equal is NotImplemented: - return address_equal - try: - return self.network == other.network - except AttributeError: - # An interface with an associated network is NOT the - # same as an unassociated address. That's why the hash - # takes the extra info into account. - return False - - def __lt__(self, other): - address_less = IPv6Address.__lt__(self, other) - if address_less is NotImplemented: - return NotImplemented - try: - return (self.network < other.network or - self.network == other.network and address_less) - except AttributeError: - # We *do* allow addresses and interfaces to be sorted. The - # unassociated address is considered less than all interfaces. - return False - - def __hash__(self): - return self._ip ^ self._prefixlen ^ int(self.network.network_address) - - __reduce__ = _IPAddressBase.__reduce__ - - @property - def ip(self): - return IPv6Address(self._ip) - - @property - def with_prefixlen(self): - return '%s/%s' % (self._string_from_ip_int(self._ip), - self._prefixlen) - - @property - def with_netmask(self): - return '%s/%s' % (self._string_from_ip_int(self._ip), - self.netmask) - - @property - def with_hostmask(self): - return '%s/%s' % (self._string_from_ip_int(self._ip), - self.hostmask) - - @property - def is_unspecified(self): - return self._ip == 0 and self.network.is_unspecified - - @property - def is_loopback(self): - return self._ip == 1 and self.network.is_loopback - - -class IPv6Network(_BaseV6, _BaseNetwork): - - """This class represents and manipulates 128-bit IPv6 networks. - - Attributes: [examples for IPv6('2001:db8::1000/124')] - .network_address: IPv6Address('2001:db8::1000') - .hostmask: IPv6Address('::f') - .broadcast_address: IPv6Address('2001:db8::100f') - .netmask: IPv6Address('ffff:ffff:ffff:ffff:ffff:ffff:ffff:fff0') - .prefixlen: 124 - - """ - - # Class to use when creating address objects - _address_class = IPv6Address - - def __init__(self, address, strict=True): - """Instantiate a new IPv6 Network object. - - Args: - address: A string or integer representing the IPv6 network or the - IP and prefix/netmask. - '2001:db8::/128' - '2001:db8:0000:0000:0000:0000:0000:0000/128' - '2001:db8::' - are all functionally the same in IPv6. That is to say, - failing to provide a subnetmask will create an object with - a mask of /128. - - Additionally, an integer can be passed, so - IPv6Network('2001:db8::') == - IPv6Network(42540766411282592856903984951653826560) - or, more generally - IPv6Network(int(IPv6Network('2001:db8::'))) == - IPv6Network('2001:db8::') - - strict: A boolean. If true, ensure that we have been passed - A true network address, eg, 2001:db8::1000/124 and not an - IP address on a network, eg, 2001:db8::1/124. - - Raises: - AddressValueError: If address isn't a valid IPv6 address. - NetmaskValueError: If the netmask isn't valid for - an IPv6 address. - ValueError: If strict was True and a network address was not - supplied. - - """ - _BaseNetwork.__init__(self, address) - - # Efficient constructor from integer or packed address - if isinstance(address, (bytes, _compat_int_types)): - self.network_address = IPv6Address(address) - self.netmask, self._prefixlen = self._make_netmask( - self._max_prefixlen) - return - - if isinstance(address, tuple): - if len(address) > 1: - arg = address[1] - else: - arg = self._max_prefixlen - self.netmask, self._prefixlen = self._make_netmask(arg) - self.network_address = IPv6Address(address[0]) - packed = int(self.network_address) - if packed & int(self.netmask) != packed: - if strict: - raise ValueError('%s has host bits set' % self) - else: - self.network_address = IPv6Address(packed & - int(self.netmask)) - return - - # Assume input argument to be string or any object representation - # which converts into a formatted IP prefix string. - addr = _split_optional_netmask(address) - - self.network_address = IPv6Address(self._ip_int_from_string(addr[0])) - - if len(addr) == 2: - arg = addr[1] - else: - arg = self._max_prefixlen - self.netmask, self._prefixlen = self._make_netmask(arg) - - if strict: - if (IPv6Address(int(self.network_address) & int(self.netmask)) != - self.network_address): - raise ValueError('%s has host bits set' % self) - self.network_address = IPv6Address(int(self.network_address) & - int(self.netmask)) - - if self._prefixlen == (self._max_prefixlen - 1): - self.hosts = self.__iter__ - - def hosts(self): - """Generate Iterator over usable hosts in a network. - - This is like __iter__ except it doesn't return the - Subnet-Router anycast address. - - """ - network = int(self.network_address) - broadcast = int(self.broadcast_address) - for x in _compat_range(network + 1, broadcast + 1): - yield self._address_class(x) - - @property - def is_site_local(self): - """Test if the address is reserved for site-local. - - Note that the site-local address space has been deprecated by RFC 3879. - Use is_private to test if this address is in the space of unique local - addresses as defined by RFC 4193. - - Returns: - A boolean, True if the address is reserved per RFC 3513 2.5.6. - - """ - return (self.network_address.is_site_local and - self.broadcast_address.is_site_local) - - -class _IPv6Constants(object): - - _linklocal_network = IPv6Network('fe80::/10') - - _multicast_network = IPv6Network('ff00::/8') - - _private_networks = [ - IPv6Network('::1/128'), - IPv6Network('::/128'), - IPv6Network('::ffff:0:0/96'), - IPv6Network('100::/64'), - IPv6Network('2001::/23'), - IPv6Network('2001:2::/48'), - IPv6Network('2001:db8::/32'), - IPv6Network('2001:10::/28'), - IPv6Network('fc00::/7'), - IPv6Network('fe80::/10'), - ] - - _reserved_networks = [ - IPv6Network('::/8'), IPv6Network('100::/8'), - IPv6Network('200::/7'), IPv6Network('400::/6'), - IPv6Network('800::/5'), IPv6Network('1000::/4'), - IPv6Network('4000::/3'), IPv6Network('6000::/3'), - IPv6Network('8000::/3'), IPv6Network('A000::/3'), - IPv6Network('C000::/3'), IPv6Network('E000::/4'), - IPv6Network('F000::/5'), IPv6Network('F800::/6'), - IPv6Network('FE00::/9'), - ] - - _sitelocal_network = IPv6Network('fec0::/10') - - -IPv6Address._constants = _IPv6Constants diff --git a/env/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyc b/env/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyc deleted file mode 100644 index 3c157467..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.py b/env/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.py deleted file mode 100644 index a6f44a55..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.py +++ /dev/null @@ -1,347 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -lockfile.py - Platform-independent advisory file locks. - -Requires Python 2.5 unless you apply 2.4.diff -Locking is done on a per-thread basis instead of a per-process basis. - -Usage: - ->>> lock = LockFile('somefile') ->>> try: -... lock.acquire() -... except AlreadyLocked: -... print 'somefile', 'is locked already.' -... except LockFailed: -... print 'somefile', 'can\\'t be locked.' -... else: -... print 'got lock' -got lock ->>> print lock.is_locked() -True ->>> lock.release() - ->>> lock = LockFile('somefile') ->>> print lock.is_locked() -False ->>> with lock: -... print lock.is_locked() -True ->>> print lock.is_locked() -False - ->>> lock = LockFile('somefile') ->>> # It is okay to lock twice from the same thread... ->>> with lock: -... lock.acquire() -... ->>> # Though no counter is kept, so you can't unlock multiple times... ->>> print lock.is_locked() -False - -Exceptions: - - Error - base class for other exceptions - LockError - base class for all locking exceptions - AlreadyLocked - Another thread or process already holds the lock - LockFailed - Lock failed for some other reason - UnlockError - base class for all unlocking exceptions - AlreadyUnlocked - File was not locked. - NotMyLock - File was locked but not by the current thread/process -""" - -from __future__ import absolute_import - -import functools -import os -import socket -import threading -import warnings - -# Work with PEP8 and non-PEP8 versions of threading module. -if not hasattr(threading, "current_thread"): - threading.current_thread = threading.currentThread -if not hasattr(threading.Thread, "get_name"): - threading.Thread.get_name = threading.Thread.getName - -__all__ = ['Error', 'LockError', 'LockTimeout', 'AlreadyLocked', - 'LockFailed', 'UnlockError', 'NotLocked', 'NotMyLock', - 'LinkFileLock', 'MkdirFileLock', 'SQLiteFileLock', - 'LockBase', 'locked'] - - -class Error(Exception): - """ - Base class for other exceptions. - - >>> try: - ... raise Error - ... except Exception: - ... pass - """ - pass - - -class LockError(Error): - """ - Base class for error arising from attempts to acquire the lock. - - >>> try: - ... raise LockError - ... except Error: - ... pass - """ - pass - - -class LockTimeout(LockError): - """Raised when lock creation fails within a user-defined period of time. - - >>> try: - ... raise LockTimeout - ... except LockError: - ... pass - """ - pass - - -class AlreadyLocked(LockError): - """Some other thread/process is locking the file. - - >>> try: - ... raise AlreadyLocked - ... except LockError: - ... pass - """ - pass - - -class LockFailed(LockError): - """Lock file creation failed for some other reason. - - >>> try: - ... raise LockFailed - ... except LockError: - ... pass - """ - pass - - -class UnlockError(Error): - """ - Base class for errors arising from attempts to release the lock. - - >>> try: - ... raise UnlockError - ... except Error: - ... pass - """ - pass - - -class NotLocked(UnlockError): - """Raised when an attempt is made to unlock an unlocked file. - - >>> try: - ... raise NotLocked - ... except UnlockError: - ... pass - """ - pass - - -class NotMyLock(UnlockError): - """Raised when an attempt is made to unlock a file someone else locked. - - >>> try: - ... raise NotMyLock - ... except UnlockError: - ... pass - """ - pass - - -class _SharedBase(object): - def __init__(self, path): - self.path = path - - def acquire(self, timeout=None): - """ - Acquire the lock. - - * If timeout is omitted (or None), wait forever trying to lock the - file. - - * If timeout > 0, try to acquire the lock for that many seconds. If - the lock period expires and the file is still locked, raise - LockTimeout. - - * If timeout <= 0, raise AlreadyLocked immediately if the file is - already locked. - """ - raise NotImplemented("implement in subclass") - - def release(self): - """ - Release the lock. - - If the file is not locked, raise NotLocked. - """ - raise NotImplemented("implement in subclass") - - def __enter__(self): - """ - Context manager support. - """ - self.acquire() - return self - - def __exit__(self, *_exc): - """ - Context manager support. - """ - self.release() - - def __repr__(self): - return "<%s: %r>" % (self.__class__.__name__, self.path) - - -class LockBase(_SharedBase): - """Base class for platform-specific lock classes.""" - def __init__(self, path, threaded=True, timeout=None): - """ - >>> lock = LockBase('somefile') - >>> lock = LockBase('somefile', threaded=False) - """ - super(LockBase, self).__init__(path) - self.lock_file = os.path.abspath(path) + ".lock" - self.hostname = socket.gethostname() - self.pid = os.getpid() - if threaded: - t = threading.current_thread() - # Thread objects in Python 2.4 and earlier do not have ident - # attrs. Worm around that. - ident = getattr(t, "ident", hash(t)) - self.tname = "-%x" % (ident & 0xffffffff) - else: - self.tname = "" - dirname = os.path.dirname(self.lock_file) - - # unique name is mostly about the current process, but must - # also contain the path -- otherwise, two adjacent locked - # files conflict (one file gets locked, creating lock-file and - # unique file, the other one gets locked, creating lock-file - # and overwriting the already existing lock-file, then one - # gets unlocked, deleting both lock-file and unique file, - # finally the last lock errors out upon releasing. - self.unique_name = os.path.join(dirname, - "%s%s.%s%s" % (self.hostname, - self.tname, - self.pid, - hash(self.path))) - self.timeout = timeout - - def is_locked(self): - """ - Tell whether or not the file is locked. - """ - raise NotImplemented("implement in subclass") - - def i_am_locking(self): - """ - Return True if this object is locking the file. - """ - raise NotImplemented("implement in subclass") - - def break_lock(self): - """ - Remove a lock. Useful if a locking thread failed to unlock. - """ - raise NotImplemented("implement in subclass") - - def __repr__(self): - return "<%s: %r -- %r>" % (self.__class__.__name__, self.unique_name, - self.path) - - -def _fl_helper(cls, mod, *args, **kwds): - warnings.warn("Import from %s module instead of lockfile package" % mod, - DeprecationWarning, stacklevel=2) - # This is a bit funky, but it's only for awhile. The way the unit tests - # are constructed this function winds up as an unbound method, so it - # actually takes three args, not two. We want to toss out self. - if not isinstance(args[0], str): - # We are testing, avoid the first arg - args = args[1:] - if len(args) == 1 and not kwds: - kwds["threaded"] = True - return cls(*args, **kwds) - - -def LinkFileLock(*args, **kwds): - """Factory function provided for backwards compatibility. - - Do not use in new code. Instead, import LinkLockFile from the - lockfile.linklockfile module. - """ - from . import linklockfile - return _fl_helper(linklockfile.LinkLockFile, "lockfile.linklockfile", - *args, **kwds) - - -def MkdirFileLock(*args, **kwds): - """Factory function provided for backwards compatibility. - - Do not use in new code. Instead, import MkdirLockFile from the - lockfile.mkdirlockfile module. - """ - from . import mkdirlockfile - return _fl_helper(mkdirlockfile.MkdirLockFile, "lockfile.mkdirlockfile", - *args, **kwds) - - -def SQLiteFileLock(*args, **kwds): - """Factory function provided for backwards compatibility. - - Do not use in new code. Instead, import SQLiteLockFile from the - lockfile.mkdirlockfile module. - """ - from . import sqlitelockfile - return _fl_helper(sqlitelockfile.SQLiteLockFile, "lockfile.sqlitelockfile", - *args, **kwds) - - -def locked(path, timeout=None): - """Decorator which enables locks for decorated function. - - Arguments: - - path: path for lockfile. - - timeout (optional): Timeout for acquiring lock. - - Usage: - @locked('/var/run/myname', timeout=0) - def myname(...): - ... - """ - def decor(func): - @functools.wraps(func) - def wrapper(*args, **kwargs): - lock = FileLock(path, timeout=timeout) - lock.acquire() - try: - return func(*args, **kwargs) - finally: - lock.release() - return wrapper - return decor - - -if hasattr(os, "link"): - from . import linklockfile as _llf - LockFile = _llf.LinkLockFile -else: - from . import mkdirlockfile as _mlf - LockFile = _mlf.MkdirLockFile - -FileLock = LockFile diff --git a/env/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyc b/env/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyc deleted file mode 100644 index 183fc2c5..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/lockfile/linklockfile.py b/env/lib/python2.7/site-packages/pip/_vendor/lockfile/linklockfile.py deleted file mode 100644 index 2ca9be04..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/lockfile/linklockfile.py +++ /dev/null @@ -1,73 +0,0 @@ -from __future__ import absolute_import - -import time -import os - -from . import (LockBase, LockFailed, NotLocked, NotMyLock, LockTimeout, - AlreadyLocked) - - -class LinkLockFile(LockBase): - """Lock access to a file using atomic property of link(2). - - >>> lock = LinkLockFile('somefile') - >>> lock = LinkLockFile('somefile', threaded=False) - """ - - def acquire(self, timeout=None): - try: - open(self.unique_name, "wb").close() - except IOError: - raise LockFailed("failed to create %s" % self.unique_name) - - timeout = timeout if timeout is not None else self.timeout - end_time = time.time() - if timeout is not None and timeout > 0: - end_time += timeout - - while True: - # Try and create a hard link to it. - try: - os.link(self.unique_name, self.lock_file) - except OSError: - # Link creation failed. Maybe we've double-locked? - nlinks = os.stat(self.unique_name).st_nlink - if nlinks == 2: - # The original link plus the one I created == 2. We're - # good to go. - return - else: - # Otherwise the lock creation failed. - if timeout is not None and time.time() > end_time: - os.unlink(self.unique_name) - if timeout > 0: - raise LockTimeout("Timeout waiting to acquire" - " lock for %s" % - self.path) - else: - raise AlreadyLocked("%s is already locked" % - self.path) - time.sleep(timeout is not None and timeout / 10 or 0.1) - else: - # Link creation succeeded. We're good to go. - return - - def release(self): - if not self.is_locked(): - raise NotLocked("%s is not locked" % self.path) - elif not os.path.exists(self.unique_name): - raise NotMyLock("%s is locked, but not by me" % self.path) - os.unlink(self.unique_name) - os.unlink(self.lock_file) - - def is_locked(self): - return os.path.exists(self.lock_file) - - def i_am_locking(self): - return (self.is_locked() and - os.path.exists(self.unique_name) and - os.stat(self.unique_name).st_nlink == 2) - - def break_lock(self): - if os.path.exists(self.lock_file): - os.unlink(self.lock_file) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/lockfile/linklockfile.pyc b/env/lib/python2.7/site-packages/pip/_vendor/lockfile/linklockfile.pyc deleted file mode 100644 index 2621d47a..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/lockfile/linklockfile.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/lockfile/mkdirlockfile.py b/env/lib/python2.7/site-packages/pip/_vendor/lockfile/mkdirlockfile.py deleted file mode 100644 index 05a8c96c..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/lockfile/mkdirlockfile.py +++ /dev/null @@ -1,84 +0,0 @@ -from __future__ import absolute_import, division - -import time -import os -import sys -import errno - -from . import (LockBase, LockFailed, NotLocked, NotMyLock, LockTimeout, - AlreadyLocked) - - -class MkdirLockFile(LockBase): - """Lock file by creating a directory.""" - def __init__(self, path, threaded=True, timeout=None): - """ - >>> lock = MkdirLockFile('somefile') - >>> lock = MkdirLockFile('somefile', threaded=False) - """ - LockBase.__init__(self, path, threaded, timeout) - # Lock file itself is a directory. Place the unique file name into - # it. - self.unique_name = os.path.join(self.lock_file, - "%s.%s%s" % (self.hostname, - self.tname, - self.pid)) - - def acquire(self, timeout=None): - timeout = timeout if timeout is not None else self.timeout - end_time = time.time() - if timeout is not None and timeout > 0: - end_time += timeout - - if timeout is None: - wait = 0.1 - else: - wait = max(0, timeout / 10) - - while True: - try: - os.mkdir(self.lock_file) - except OSError: - err = sys.exc_info()[1] - if err.errno == errno.EEXIST: - # Already locked. - if os.path.exists(self.unique_name): - # Already locked by me. - return - if timeout is not None and time.time() > end_time: - if timeout > 0: - raise LockTimeout("Timeout waiting to acquire" - " lock for %s" % - self.path) - else: - # Someone else has the lock. - raise AlreadyLocked("%s is already locked" % - self.path) - time.sleep(wait) - else: - # Couldn't create the lock for some other reason - raise LockFailed("failed to create %s" % self.lock_file) - else: - open(self.unique_name, "wb").close() - return - - def release(self): - if not self.is_locked(): - raise NotLocked("%s is not locked" % self.path) - elif not os.path.exists(self.unique_name): - raise NotMyLock("%s is locked, but not by me" % self.path) - os.unlink(self.unique_name) - os.rmdir(self.lock_file) - - def is_locked(self): - return os.path.exists(self.lock_file) - - def i_am_locking(self): - return (self.is_locked() and - os.path.exists(self.unique_name)) - - def break_lock(self): - if os.path.exists(self.lock_file): - for name in os.listdir(self.lock_file): - os.unlink(os.path.join(self.lock_file, name)) - os.rmdir(self.lock_file) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/lockfile/mkdirlockfile.pyc b/env/lib/python2.7/site-packages/pip/_vendor/lockfile/mkdirlockfile.pyc deleted file mode 100644 index 62fa39e5..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/lockfile/mkdirlockfile.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/lockfile/pidlockfile.py b/env/lib/python2.7/site-packages/pip/_vendor/lockfile/pidlockfile.py deleted file mode 100644 index 069e85b1..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/lockfile/pidlockfile.py +++ /dev/null @@ -1,190 +0,0 @@ -# -*- coding: utf-8 -*- - -# pidlockfile.py -# -# Copyright © 2008–2009 Ben Finney <ben+python@benfinney.id.au> -# -# This is free software: you may copy, modify, and/or distribute this work -# under the terms of the Python Software Foundation License, version 2 or -# later as published by the Python Software Foundation. -# No warranty expressed or implied. See the file LICENSE.PSF-2 for details. - -""" Lockfile behaviour implemented via Unix PID files. - """ - -from __future__ import absolute_import - -import errno -import os -import time - -from . import (LockBase, AlreadyLocked, LockFailed, NotLocked, NotMyLock, - LockTimeout) - - -class PIDLockFile(LockBase): - """ Lockfile implemented as a Unix PID file. - - The lock file is a normal file named by the attribute `path`. - A lock's PID file contains a single line of text, containing - the process ID (PID) of the process that acquired the lock. - - >>> lock = PIDLockFile('somefile') - >>> lock = PIDLockFile('somefile') - """ - - def __init__(self, path, threaded=False, timeout=None): - # pid lockfiles don't support threaded operation, so always force - # False as the threaded arg. - LockBase.__init__(self, path, False, timeout) - self.unique_name = self.path - - def read_pid(self): - """ Get the PID from the lock file. - """ - return read_pid_from_pidfile(self.path) - - def is_locked(self): - """ Test if the lock is currently held. - - The lock is held if the PID file for this lock exists. - - """ - return os.path.exists(self.path) - - def i_am_locking(self): - """ Test if the lock is held by the current process. - - Returns ``True`` if the current process ID matches the - number stored in the PID file. - """ - return self.is_locked() and os.getpid() == self.read_pid() - - def acquire(self, timeout=None): - """ Acquire the lock. - - Creates the PID file for this lock, or raises an error if - the lock could not be acquired. - """ - - timeout = timeout if timeout is not None else self.timeout - end_time = time.time() - if timeout is not None and timeout > 0: - end_time += timeout - - while True: - try: - write_pid_to_pidfile(self.path) - except OSError as exc: - if exc.errno == errno.EEXIST: - # The lock creation failed. Maybe sleep a bit. - if time.time() > end_time: - if timeout is not None and timeout > 0: - raise LockTimeout("Timeout waiting to acquire" - " lock for %s" % - self.path) - else: - raise AlreadyLocked("%s is already locked" % - self.path) - time.sleep(timeout is not None and timeout / 10 or 0.1) - else: - raise LockFailed("failed to create %s" % self.path) - else: - return - - def release(self): - """ Release the lock. - - Removes the PID file to release the lock, or raises an - error if the current process does not hold the lock. - - """ - if not self.is_locked(): - raise NotLocked("%s is not locked" % self.path) - if not self.i_am_locking(): - raise NotMyLock("%s is locked, but not by me" % self.path) - remove_existing_pidfile(self.path) - - def break_lock(self): - """ Break an existing lock. - - Removes the PID file if it already exists, otherwise does - nothing. - - """ - remove_existing_pidfile(self.path) - - -def read_pid_from_pidfile(pidfile_path): - """ Read the PID recorded in the named PID file. - - Read and return the numeric PID recorded as text in the named - PID file. If the PID file cannot be read, or if the content is - not a valid PID, return ``None``. - - """ - pid = None - try: - pidfile = open(pidfile_path, 'r') - except IOError: - pass - else: - # According to the FHS 2.3 section on PID files in /var/run: - # - # The file must consist of the process identifier in - # ASCII-encoded decimal, followed by a newline character. - # - # Programs that read PID files should be somewhat flexible - # in what they accept; i.e., they should ignore extra - # whitespace, leading zeroes, absence of the trailing - # newline, or additional lines in the PID file. - - line = pidfile.readline().strip() - try: - pid = int(line) - except ValueError: - pass - pidfile.close() - - return pid - - -def write_pid_to_pidfile(pidfile_path): - """ Write the PID in the named PID file. - - Get the numeric process ID (“PID”) of the current process - and write it to the named file as a line of text. - - """ - open_flags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY) - open_mode = 0o644 - pidfile_fd = os.open(pidfile_path, open_flags, open_mode) - pidfile = os.fdopen(pidfile_fd, 'w') - - # According to the FHS 2.3 section on PID files in /var/run: - # - # The file must consist of the process identifier in - # ASCII-encoded decimal, followed by a newline character. For - # example, if crond was process number 25, /var/run/crond.pid - # would contain three characters: two, five, and newline. - - pid = os.getpid() - pidfile.write("%s\n" % pid) - pidfile.close() - - -def remove_existing_pidfile(pidfile_path): - """ Remove the named PID file if it exists. - - Removing a PID file that doesn't already exist puts us in the - desired state, so we ignore the condition if the file does not - exist. - - """ - try: - os.remove(pidfile_path) - except OSError as exc: - if exc.errno == errno.ENOENT: - pass - else: - raise diff --git a/env/lib/python2.7/site-packages/pip/_vendor/lockfile/pidlockfile.pyc b/env/lib/python2.7/site-packages/pip/_vendor/lockfile/pidlockfile.pyc deleted file mode 100644 index 847cc743..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/lockfile/pidlockfile.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/lockfile/sqlitelockfile.py b/env/lib/python2.7/site-packages/pip/_vendor/lockfile/sqlitelockfile.py deleted file mode 100644 index f997e244..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/lockfile/sqlitelockfile.py +++ /dev/null @@ -1,156 +0,0 @@ -from __future__ import absolute_import, division - -import time -import os - -try: - unicode -except NameError: - unicode = str - -from . import LockBase, NotLocked, NotMyLock, LockTimeout, AlreadyLocked - - -class SQLiteLockFile(LockBase): - "Demonstrate SQL-based locking." - - testdb = None - - def __init__(self, path, threaded=True, timeout=None): - """ - >>> lock = SQLiteLockFile('somefile') - >>> lock = SQLiteLockFile('somefile', threaded=False) - """ - LockBase.__init__(self, path, threaded, timeout) - self.lock_file = unicode(self.lock_file) - self.unique_name = unicode(self.unique_name) - - if SQLiteLockFile.testdb is None: - import tempfile - _fd, testdb = tempfile.mkstemp() - os.close(_fd) - os.unlink(testdb) - del _fd, tempfile - SQLiteLockFile.testdb = testdb - - import sqlite3 - self.connection = sqlite3.connect(SQLiteLockFile.testdb) - - c = self.connection.cursor() - try: - c.execute("create table locks" - "(" - " lock_file varchar(32)," - " unique_name varchar(32)" - ")") - except sqlite3.OperationalError: - pass - else: - self.connection.commit() - import atexit - atexit.register(os.unlink, SQLiteLockFile.testdb) - - def acquire(self, timeout=None): - timeout = timeout if timeout is not None else self.timeout - end_time = time.time() - if timeout is not None and timeout > 0: - end_time += timeout - - if timeout is None: - wait = 0.1 - elif timeout <= 0: - wait = 0 - else: - wait = timeout / 10 - - cursor = self.connection.cursor() - - while True: - if not self.is_locked(): - # Not locked. Try to lock it. - cursor.execute("insert into locks" - " (lock_file, unique_name)" - " values" - " (?, ?)", - (self.lock_file, self.unique_name)) - self.connection.commit() - - # Check to see if we are the only lock holder. - cursor.execute("select * from locks" - " where unique_name = ?", - (self.unique_name,)) - rows = cursor.fetchall() - if len(rows) > 1: - # Nope. Someone else got there. Remove our lock. - cursor.execute("delete from locks" - " where unique_name = ?", - (self.unique_name,)) - self.connection.commit() - else: - # Yup. We're done, so go home. - return - else: - # Check to see if we are the only lock holder. - cursor.execute("select * from locks" - " where unique_name = ?", - (self.unique_name,)) - rows = cursor.fetchall() - if len(rows) == 1: - # We're the locker, so go home. - return - - # Maybe we should wait a bit longer. - if timeout is not None and time.time() > end_time: - if timeout > 0: - # No more waiting. - raise LockTimeout("Timeout waiting to acquire" - " lock for %s" % - self.path) - else: - # Someone else has the lock and we are impatient.. - raise AlreadyLocked("%s is already locked" % self.path) - - # Well, okay. We'll give it a bit longer. - time.sleep(wait) - - def release(self): - if not self.is_locked(): - raise NotLocked("%s is not locked" % self.path) - if not self.i_am_locking(): - raise NotMyLock("%s is locked, but not by me (by %s)" % - (self.unique_name, self._who_is_locking())) - cursor = self.connection.cursor() - cursor.execute("delete from locks" - " where unique_name = ?", - (self.unique_name,)) - self.connection.commit() - - def _who_is_locking(self): - cursor = self.connection.cursor() - cursor.execute("select unique_name from locks" - " where lock_file = ?", - (self.lock_file,)) - return cursor.fetchone()[0] - - def is_locked(self): - cursor = self.connection.cursor() - cursor.execute("select * from locks" - " where lock_file = ?", - (self.lock_file,)) - rows = cursor.fetchall() - return not not rows - - def i_am_locking(self): - cursor = self.connection.cursor() - cursor.execute("select * from locks" - " where lock_file = ?" - " and unique_name = ?", - (self.lock_file, self.unique_name)) - return not not cursor.fetchall() - - def break_lock(self): - cursor = self.connection.cursor() - cursor.execute("delete from locks" - " where lock_file = ?", - (self.lock_file,)) - self.connection.commit() diff --git a/env/lib/python2.7/site-packages/pip/_vendor/lockfile/sqlitelockfile.pyc b/env/lib/python2.7/site-packages/pip/_vendor/lockfile/sqlitelockfile.pyc deleted file mode 100644 index 72fa77d2..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/lockfile/sqlitelockfile.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/lockfile/symlinklockfile.py b/env/lib/python2.7/site-packages/pip/_vendor/lockfile/symlinklockfile.py deleted file mode 100644 index 23b41f58..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/lockfile/symlinklockfile.py +++ /dev/null @@ -1,70 +0,0 @@ -from __future__ import absolute_import - -import os -import time - -from . import (LockBase, NotLocked, NotMyLock, LockTimeout, - AlreadyLocked) - - -class SymlinkLockFile(LockBase): - """Lock access to a file using symlink(2).""" - - def __init__(self, path, threaded=True, timeout=None): - # super(SymlinkLockFile).__init(...) - LockBase.__init__(self, path, threaded, timeout) - # split it back! - self.unique_name = os.path.split(self.unique_name)[1] - - def acquire(self, timeout=None): - # Hopefully unnecessary for symlink. - # try: - # open(self.unique_name, "wb").close() - # except IOError: - # raise LockFailed("failed to create %s" % self.unique_name) - timeout = timeout if timeout is not None else self.timeout - end_time = time.time() - if timeout is not None and timeout > 0: - end_time += timeout - - while True: - # Try and create a symbolic link to it. - try: - os.symlink(self.unique_name, self.lock_file) - except OSError: - # Link creation failed. Maybe we've double-locked? - if self.i_am_locking(): - # Linked to out unique name. Proceed. - return - else: - # Otherwise the lock creation failed. - if timeout is not None and time.time() > end_time: - if timeout > 0: - raise LockTimeout("Timeout waiting to acquire" - " lock for %s" % - self.path) - else: - raise AlreadyLocked("%s is already locked" % - self.path) - time.sleep(timeout / 10 if timeout is not None else 0.1) - else: - # Link creation succeeded. We're good to go. - return - - def release(self): - if not self.is_locked(): - raise NotLocked("%s is not locked" % self.path) - elif not self.i_am_locking(): - raise NotMyLock("%s is locked, but not by me" % self.path) - os.unlink(self.lock_file) - - def is_locked(self): - return os.path.islink(self.lock_file) - - def i_am_locking(self): - return (os.path.islink(self.lock_file) - and os.readlink(self.lock_file) == self.unique_name) - - def break_lock(self): - if os.path.islink(self.lock_file): # exists && link - os.unlink(self.lock_file) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/lockfile/symlinklockfile.pyc b/env/lib/python2.7/site-packages/pip/_vendor/lockfile/symlinklockfile.pyc deleted file mode 100644 index 093ea9f3..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/lockfile/symlinklockfile.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/msgpack/__init__.py b/env/lib/python2.7/site-packages/pip/_vendor/msgpack/__init__.py deleted file mode 100644 index b3265075..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/msgpack/__init__.py +++ /dev/null @@ -1,65 +0,0 @@ -# coding: utf-8 -from pip._vendor.msgpack._version import version -from pip._vendor.msgpack.exceptions import * - -from collections import namedtuple - - -class ExtType(namedtuple('ExtType', 'code data')): - """ExtType represents ext type in msgpack.""" - def __new__(cls, code, data): - if not isinstance(code, int): - raise TypeError("code must be int") - if not isinstance(data, bytes): - raise TypeError("data must be bytes") - if not 0 <= code <= 127: - raise ValueError("code must be 0~127") - return super(ExtType, cls).__new__(cls, code, data) - - -import os -if os.environ.get('MSGPACK_PUREPYTHON'): - from pip._vendor.msgpack.fallback import Packer, unpackb, Unpacker -else: - try: - from pip._vendor.msgpack._cmsgpack import Packer, unpackb, Unpacker - except ImportError: - from pip._vendor.msgpack.fallback import Packer, unpackb, Unpacker - - -def pack(o, stream, **kwargs): - """ - Pack object `o` and write it to `stream` - - See :class:`Packer` for options. - """ - packer = Packer(**kwargs) - stream.write(packer.pack(o)) - - -def packb(o, **kwargs): - """ - Pack object `o` and return packed bytes - - See :class:`Packer` for options. - """ - return Packer(**kwargs).pack(o) - - -def unpack(stream, **kwargs): - """ - Unpack an object from `stream`. - - Raises `ExtraData` when `stream` contains extra bytes. - See :class:`Unpacker` for options. - """ - data = stream.read() - return unpackb(data, **kwargs) - - -# alias for compatibility to simplejson/marshal/pickle. -load = unpack -loads = unpackb - -dump = pack -dumps = packb diff --git a/env/lib/python2.7/site-packages/pip/_vendor/msgpack/__init__.pyc b/env/lib/python2.7/site-packages/pip/_vendor/msgpack/__init__.pyc deleted file mode 100644 index ec9d42d5..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/msgpack/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/msgpack/_version.py b/env/lib/python2.7/site-packages/pip/_vendor/msgpack/_version.py deleted file mode 100644 index 926c5e7b..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/msgpack/_version.py +++ /dev/null @@ -1 +0,0 @@ -version = (0, 6, 1) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/msgpack/_version.pyc b/env/lib/python2.7/site-packages/pip/_vendor/msgpack/_version.pyc deleted file mode 100644 index 82f30672..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/msgpack/_version.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/msgpack/exceptions.pyc b/env/lib/python2.7/site-packages/pip/_vendor/msgpack/exceptions.pyc deleted file mode 100644 index 95a43eb4..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/msgpack/exceptions.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/msgpack/fallback.py b/env/lib/python2.7/site-packages/pip/_vendor/msgpack/fallback.py deleted file mode 100644 index 5b731ddd..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/msgpack/fallback.py +++ /dev/null @@ -1,1027 +0,0 @@ -"""Fallback pure Python implementation of msgpack""" - -import sys -import struct -import warnings - - -if sys.version_info[0] == 2: - PY2 = True - int_types = (int, long) - def dict_iteritems(d): - return d.iteritems() -else: - PY2 = False - int_types = int - unicode = str - xrange = range - def dict_iteritems(d): - return d.items() - -if sys.version_info < (3, 5): - # Ugly hack... - RecursionError = RuntimeError - - def _is_recursionerror(e): - return len(e.args) == 1 and isinstance(e.args[0], str) and \ - e.args[0].startswith('maximum recursion depth exceeded') -else: - def _is_recursionerror(e): - return True - -if hasattr(sys, 'pypy_version_info'): - # cStringIO is slow on PyPy, StringIO is faster. However: PyPy's own - # StringBuilder is fastest. - from __pypy__ import newlist_hint - try: - from __pypy__.builders import BytesBuilder as StringBuilder - except ImportError: - from __pypy__.builders import StringBuilder - USING_STRINGBUILDER = True - class StringIO(object): - def __init__(self, s=b''): - if s: - self.builder = StringBuilder(len(s)) - self.builder.append(s) - else: - self.builder = StringBuilder() - def write(self, s): - if isinstance(s, memoryview): - s = s.tobytes() - elif isinstance(s, bytearray): - s = bytes(s) - self.builder.append(s) - def getvalue(self): - return self.builder.build() -else: - USING_STRINGBUILDER = False - from io import BytesIO as StringIO - newlist_hint = lambda size: [] - - -from pip._vendor.msgpack.exceptions import ( - BufferFull, - OutOfData, - ExtraData, - FormatError, - StackError, -) - -from pip._vendor.msgpack import ExtType - - -EX_SKIP = 0 -EX_CONSTRUCT = 1 -EX_READ_ARRAY_HEADER = 2 -EX_READ_MAP_HEADER = 3 - -TYPE_IMMEDIATE = 0 -TYPE_ARRAY = 1 -TYPE_MAP = 2 -TYPE_RAW = 3 -TYPE_BIN = 4 -TYPE_EXT = 5 - -DEFAULT_RECURSE_LIMIT = 511 - - -def _check_type_strict(obj, t, type=type, tuple=tuple): - if type(t) is tuple: - return type(obj) in t - else: - return type(obj) is t - - -def _get_data_from_buffer(obj): - try: - view = memoryview(obj) - except TypeError: - # try to use legacy buffer protocol if 2.7, otherwise re-raise - if PY2: - view = memoryview(buffer(obj)) - warnings.warn("using old buffer interface to unpack %s; " - "this leads to unpacking errors if slicing is used and " - "will be removed in a future version" % type(obj), - RuntimeWarning, stacklevel=3) - else: - raise - if view.itemsize != 1: - raise ValueError("cannot unpack from multi-byte object") - return view - - -def unpack(stream, **kwargs): - warnings.warn( - "Direct calling implementation's unpack() is deprecated, Use msgpack.unpack() or unpackb() instead.", - DeprecationWarning, stacklevel=2) - data = stream.read() - return unpackb(data, **kwargs) - - -def unpackb(packed, **kwargs): - """ - Unpack an object from `packed`. - - Raises ``ExtraData`` when *packed* contains extra bytes. - Raises ``ValueError`` when *packed* is incomplete. - Raises ``FormatError`` when *packed* is not valid msgpack. - Raises ``StackError`` when *packed* contains too nested. - Other exceptions can be raised during unpacking. - - See :class:`Unpacker` for options. - """ - unpacker = Unpacker(None, max_buffer_size=len(packed), **kwargs) - unpacker.feed(packed) - try: - ret = unpacker._unpack() - except OutOfData: - raise ValueError("Unpack failed: incomplete input") - except RecursionError as e: - if _is_recursionerror(e): - raise StackError - raise - if unpacker._got_extradata(): - raise ExtraData(ret, unpacker._get_extradata()) - return ret - - -if sys.version_info < (2, 7, 6): - def _unpack_from(f, b, o=0): - """Explicit typcast for legacy struct.unpack_from""" - return struct.unpack_from(f, bytes(b), o) -else: - _unpack_from = struct.unpack_from - - -class Unpacker(object): - """Streaming unpacker. - - arguments: - - :param file_like: - File-like object having `.read(n)` method. - If specified, unpacker reads serialized data from it and :meth:`feed()` is not usable. - - :param int read_size: - Used as `file_like.read(read_size)`. (default: `min(16*1024, max_buffer_size)`) - - :param bool use_list: - If true, unpack msgpack array to Python list. - Otherwise, unpack to Python tuple. (default: True) - - :param bool raw: - If true, unpack msgpack raw to Python bytes (default). - Otherwise, unpack to Python str (or unicode on Python 2) by decoding - with UTF-8 encoding (recommended). - Currently, the default is true, but it will be changed to false in - near future. So you must specify it explicitly for keeping backward - compatibility. - - *encoding* option which is deprecated overrides this option. - - :param bool strict_map_key: - If true, only str or bytes are accepted for map (dict) keys. - It's False by default for backward-compatibility. - But it will be True from msgpack 1.0. - - :param callable object_hook: - When specified, it should be callable. - Unpacker calls it with a dict argument after unpacking msgpack map. - (See also simplejson) - - :param callable object_pairs_hook: - When specified, it should be callable. - Unpacker calls it with a list of key-value pairs after unpacking msgpack map. - (See also simplejson) - - :param str encoding: - Encoding used for decoding msgpack raw. - If it is None (default), msgpack raw is deserialized to Python bytes. - - :param str unicode_errors: - (deprecated) Used for decoding msgpack raw with *encoding*. - (default: `'strict'`) - - :param int max_buffer_size: - Limits size of data waiting unpacked. 0 means system's INT_MAX (default). - Raises `BufferFull` exception when it is insufficient. - You should set this parameter when unpacking data from untrusted source. - - :param int max_str_len: - Deprecated, use *max_buffer_size* instead. - Limits max length of str. (default: max_buffer_size or 1024*1024) - - :param int max_bin_len: - Deprecated, use *max_buffer_size* instead. - Limits max length of bin. (default: max_buffer_size or 1024*1024) - - :param int max_array_len: - Limits max length of array. - (default: max_buffer_size or 128*1024) - - :param int max_map_len: - Limits max length of map. - (default: max_buffer_size//2 or 32*1024) - - :param int max_ext_len: - Deprecated, use *max_buffer_size* instead. - Limits max size of ext type. (default: max_buffer_size or 1024*1024) - - Example of streaming deserialize from file-like object:: - - unpacker = Unpacker(file_like, raw=False, max_buffer_size=10*1024*1024) - for o in unpacker: - process(o) - - Example of streaming deserialize from socket:: - - unpacker = Unpacker(raw=False, max_buffer_size=10*1024*1024) - while True: - buf = sock.recv(1024**2) - if not buf: - break - unpacker.feed(buf) - for o in unpacker: - process(o) - - Raises ``ExtraData`` when *packed* contains extra bytes. - Raises ``OutOfData`` when *packed* is incomplete. - Raises ``FormatError`` when *packed* is not valid msgpack. - Raises ``StackError`` when *packed* contains too nested. - Other exceptions can be raised during unpacking. - """ - - def __init__(self, file_like=None, read_size=0, use_list=True, raw=True, strict_map_key=False, - object_hook=None, object_pairs_hook=None, list_hook=None, - encoding=None, unicode_errors=None, max_buffer_size=0, - ext_hook=ExtType, - max_str_len=-1, - max_bin_len=-1, - max_array_len=-1, - max_map_len=-1, - max_ext_len=-1): - if encoding is not None: - warnings.warn( - "encoding is deprecated, Use raw=False instead.", - DeprecationWarning, stacklevel=2) - - if unicode_errors is None: - unicode_errors = 'strict' - - if file_like is None: - self._feeding = True - else: - if not callable(file_like.read): - raise TypeError("`file_like.read` must be callable") - self.file_like = file_like - self._feeding = False - - #: array of bytes fed. - self._buffer = bytearray() - #: Which position we currently reads - self._buff_i = 0 - - # When Unpacker is used as an iterable, between the calls to next(), - # the buffer is not "consumed" completely, for efficiency sake. - # Instead, it is done sloppily. To make sure we raise BufferFull at - # the correct moments, we have to keep track of how sloppy we were. - # Furthermore, when the buffer is incomplete (that is: in the case - # we raise an OutOfData) we need to rollback the buffer to the correct - # state, which _buf_checkpoint records. - self._buf_checkpoint = 0 - - if max_str_len == -1: - max_str_len = max_buffer_size or 1024*1024 - if max_bin_len == -1: - max_bin_len = max_buffer_size or 1024*1024 - if max_array_len == -1: - max_array_len = max_buffer_size or 128*1024 - if max_map_len == -1: - max_map_len = max_buffer_size//2 or 32*1024 - if max_ext_len == -1: - max_ext_len = max_buffer_size or 1024*1024 - - self._max_buffer_size = max_buffer_size or 2**31-1 - if read_size > self._max_buffer_size: - raise ValueError("read_size must be smaller than max_buffer_size") - self._read_size = read_size or min(self._max_buffer_size, 16*1024) - self._raw = bool(raw) - self._strict_map_key = bool(strict_map_key) - self._encoding = encoding - self._unicode_errors = unicode_errors - self._use_list = use_list - self._list_hook = list_hook - self._object_hook = object_hook - self._object_pairs_hook = object_pairs_hook - self._ext_hook = ext_hook - self._max_str_len = max_str_len - self._max_bin_len = max_bin_len - self._max_array_len = max_array_len - self._max_map_len = max_map_len - self._max_ext_len = max_ext_len - self._stream_offset = 0 - - if list_hook is not None and not callable(list_hook): - raise TypeError('`list_hook` is not callable') - if object_hook is not None and not callable(object_hook): - raise TypeError('`object_hook` is not callable') - if object_pairs_hook is not None and not callable(object_pairs_hook): - raise TypeError('`object_pairs_hook` is not callable') - if object_hook is not None and object_pairs_hook is not None: - raise TypeError("object_pairs_hook and object_hook are mutually " - "exclusive") - if not callable(ext_hook): - raise TypeError("`ext_hook` is not callable") - - def feed(self, next_bytes): - assert self._feeding - view = _get_data_from_buffer(next_bytes) - if (len(self._buffer) - self._buff_i + len(view) > self._max_buffer_size): - raise BufferFull - - # Strip buffer before checkpoint before reading file. - if self._buf_checkpoint > 0: - del self._buffer[:self._buf_checkpoint] - self._buff_i -= self._buf_checkpoint - self._buf_checkpoint = 0 - - # Use extend here: INPLACE_ADD += doesn't reliably typecast memoryview in jython - self._buffer.extend(view) - - def _consume(self): - """ Gets rid of the used parts of the buffer. """ - self._stream_offset += self._buff_i - self._buf_checkpoint - self._buf_checkpoint = self._buff_i - - def _got_extradata(self): - return self._buff_i < len(self._buffer) - - def _get_extradata(self): - return self._buffer[self._buff_i:] - - def read_bytes(self, n): - return self._read(n) - - def _read(self, n): - # (int) -> bytearray - self._reserve(n) - i = self._buff_i - self._buff_i = i+n - return self._buffer[i:i+n] - - def _reserve(self, n): - remain_bytes = len(self._buffer) - self._buff_i - n - - # Fast path: buffer has n bytes already - if remain_bytes >= 0: - return - - if self._feeding: - self._buff_i = self._buf_checkpoint - raise OutOfData - - # Strip buffer before checkpoint before reading file. - if self._buf_checkpoint > 0: - del self._buffer[:self._buf_checkpoint] - self._buff_i -= self._buf_checkpoint - self._buf_checkpoint = 0 - - # Read from file - remain_bytes = -remain_bytes - while remain_bytes > 0: - to_read_bytes = max(self._read_size, remain_bytes) - read_data = self.file_like.read(to_read_bytes) - if not read_data: - break - assert isinstance(read_data, bytes) - self._buffer += read_data - remain_bytes -= len(read_data) - - if len(self._buffer) < n + self._buff_i: - self._buff_i = 0 # rollback - raise OutOfData - - def _read_header(self, execute=EX_CONSTRUCT): - typ = TYPE_IMMEDIATE - n = 0 - obj = None - self._reserve(1) - b = self._buffer[self._buff_i] - self._buff_i += 1 - if b & 0b10000000 == 0: - obj = b - elif b & 0b11100000 == 0b11100000: - obj = -1 - (b ^ 0xff) - elif b & 0b11100000 == 0b10100000: - n = b & 0b00011111 - typ = TYPE_RAW - if n > self._max_str_len: - raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) - obj = self._read(n) - elif b & 0b11110000 == 0b10010000: - n = b & 0b00001111 - typ = TYPE_ARRAY - if n > self._max_array_len: - raise ValueError("%s exceeds max_array_len(%s)", n, self._max_array_len) - elif b & 0b11110000 == 0b10000000: - n = b & 0b00001111 - typ = TYPE_MAP - if n > self._max_map_len: - raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len) - elif b == 0xc0: - obj = None - elif b == 0xc2: - obj = False - elif b == 0xc3: - obj = True - elif b == 0xc4: - typ = TYPE_BIN - self._reserve(1) - n = self._buffer[self._buff_i] - self._buff_i += 1 - if n > self._max_bin_len: - raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len)) - obj = self._read(n) - elif b == 0xc5: - typ = TYPE_BIN - self._reserve(2) - n = _unpack_from(">H", self._buffer, self._buff_i)[0] - self._buff_i += 2 - if n > self._max_bin_len: - raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len)) - obj = self._read(n) - elif b == 0xc6: - typ = TYPE_BIN - self._reserve(4) - n = _unpack_from(">I", self._buffer, self._buff_i)[0] - self._buff_i += 4 - if n > self._max_bin_len: - raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len)) - obj = self._read(n) - elif b == 0xc7: # ext 8 - typ = TYPE_EXT - self._reserve(2) - L, n = _unpack_from('Bb', self._buffer, self._buff_i) - self._buff_i += 2 - if L > self._max_ext_len: - raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len)) - obj = self._read(L) - elif b == 0xc8: # ext 16 - typ = TYPE_EXT - self._reserve(3) - L, n = _unpack_from('>Hb', self._buffer, self._buff_i) - self._buff_i += 3 - if L > self._max_ext_len: - raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len)) - obj = self._read(L) - elif b == 0xc9: # ext 32 - typ = TYPE_EXT - self._reserve(5) - L, n = _unpack_from('>Ib', self._buffer, self._buff_i) - self._buff_i += 5 - if L > self._max_ext_len: - raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len)) - obj = self._read(L) - elif b == 0xca: - self._reserve(4) - obj = _unpack_from(">f", self._buffer, self._buff_i)[0] - self._buff_i += 4 - elif b == 0xcb: - self._reserve(8) - obj = _unpack_from(">d", self._buffer, self._buff_i)[0] - self._buff_i += 8 - elif b == 0xcc: - self._reserve(1) - obj = self._buffer[self._buff_i] - self._buff_i += 1 - elif b == 0xcd: - self._reserve(2) - obj = _unpack_from(">H", self._buffer, self._buff_i)[0] - self._buff_i += 2 - elif b == 0xce: - self._reserve(4) - obj = _unpack_from(">I", self._buffer, self._buff_i)[0] - self._buff_i += 4 - elif b == 0xcf: - self._reserve(8) - obj = _unpack_from(">Q", self._buffer, self._buff_i)[0] - self._buff_i += 8 - elif b == 0xd0: - self._reserve(1) - obj = _unpack_from("b", self._buffer, self._buff_i)[0] - self._buff_i += 1 - elif b == 0xd1: - self._reserve(2) - obj = _unpack_from(">h", self._buffer, self._buff_i)[0] - self._buff_i += 2 - elif b == 0xd2: - self._reserve(4) - obj = _unpack_from(">i", self._buffer, self._buff_i)[0] - self._buff_i += 4 - elif b == 0xd3: - self._reserve(8) - obj = _unpack_from(">q", self._buffer, self._buff_i)[0] - self._buff_i += 8 - elif b == 0xd4: # fixext 1 - typ = TYPE_EXT - if self._max_ext_len < 1: - raise ValueError("%s exceeds max_ext_len(%s)" % (1, self._max_ext_len)) - self._reserve(2) - n, obj = _unpack_from("b1s", self._buffer, self._buff_i) - self._buff_i += 2 - elif b == 0xd5: # fixext 2 - typ = TYPE_EXT - if self._max_ext_len < 2: - raise ValueError("%s exceeds max_ext_len(%s)" % (2, self._max_ext_len)) - self._reserve(3) - n, obj = _unpack_from("b2s", self._buffer, self._buff_i) - self._buff_i += 3 - elif b == 0xd6: # fixext 4 - typ = TYPE_EXT - if self._max_ext_len < 4: - raise ValueError("%s exceeds max_ext_len(%s)" % (4, self._max_ext_len)) - self._reserve(5) - n, obj = _unpack_from("b4s", self._buffer, self._buff_i) - self._buff_i += 5 - elif b == 0xd7: # fixext 8 - typ = TYPE_EXT - if self._max_ext_len < 8: - raise ValueError("%s exceeds max_ext_len(%s)" % (8, self._max_ext_len)) - self._reserve(9) - n, obj = _unpack_from("b8s", self._buffer, self._buff_i) - self._buff_i += 9 - elif b == 0xd8: # fixext 16 - typ = TYPE_EXT - if self._max_ext_len < 16: - raise ValueError("%s exceeds max_ext_len(%s)" % (16, self._max_ext_len)) - self._reserve(17) - n, obj = _unpack_from("b16s", self._buffer, self._buff_i) - self._buff_i += 17 - elif b == 0xd9: - typ = TYPE_RAW - self._reserve(1) - n = self._buffer[self._buff_i] - self._buff_i += 1 - if n > self._max_str_len: - raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) - obj = self._read(n) - elif b == 0xda: - typ = TYPE_RAW - self._reserve(2) - n, = _unpack_from(">H", self._buffer, self._buff_i) - self._buff_i += 2 - if n > self._max_str_len: - raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) - obj = self._read(n) - elif b == 0xdb: - typ = TYPE_RAW - self._reserve(4) - n, = _unpack_from(">I", self._buffer, self._buff_i) - self._buff_i += 4 - if n > self._max_str_len: - raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) - obj = self._read(n) - elif b == 0xdc: - typ = TYPE_ARRAY - self._reserve(2) - n, = _unpack_from(">H", self._buffer, self._buff_i) - self._buff_i += 2 - if n > self._max_array_len: - raise ValueError("%s exceeds max_array_len(%s)", n, self._max_array_len) - elif b == 0xdd: - typ = TYPE_ARRAY - self._reserve(4) - n, = _unpack_from(">I", self._buffer, self._buff_i) - self._buff_i += 4 - if n > self._max_array_len: - raise ValueError("%s exceeds max_array_len(%s)", n, self._max_array_len) - elif b == 0xde: - self._reserve(2) - n, = _unpack_from(">H", self._buffer, self._buff_i) - self._buff_i += 2 - if n > self._max_map_len: - raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len) - typ = TYPE_MAP - elif b == 0xdf: - self._reserve(4) - n, = _unpack_from(">I", self._buffer, self._buff_i) - self._buff_i += 4 - if n > self._max_map_len: - raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len) - typ = TYPE_MAP - else: - raise FormatError("Unknown header: 0x%x" % b) - return typ, n, obj - - def _unpack(self, execute=EX_CONSTRUCT): - typ, n, obj = self._read_header(execute) - - if execute == EX_READ_ARRAY_HEADER: - if typ != TYPE_ARRAY: - raise ValueError("Expected array") - return n - if execute == EX_READ_MAP_HEADER: - if typ != TYPE_MAP: - raise ValueError("Expected map") - return n - # TODO should we eliminate the recursion? - if typ == TYPE_ARRAY: - if execute == EX_SKIP: - for i in xrange(n): - # TODO check whether we need to call `list_hook` - self._unpack(EX_SKIP) - return - ret = newlist_hint(n) - for i in xrange(n): - ret.append(self._unpack(EX_CONSTRUCT)) - if self._list_hook is not None: - ret = self._list_hook(ret) - # TODO is the interaction between `list_hook` and `use_list` ok? - return ret if self._use_list else tuple(ret) - if typ == TYPE_MAP: - if execute == EX_SKIP: - for i in xrange(n): - # TODO check whether we need to call hooks - self._unpack(EX_SKIP) - self._unpack(EX_SKIP) - return - if self._object_pairs_hook is not None: - ret = self._object_pairs_hook( - (self._unpack(EX_CONSTRUCT), - self._unpack(EX_CONSTRUCT)) - for _ in xrange(n)) - else: - ret = {} - for _ in xrange(n): - key = self._unpack(EX_CONSTRUCT) - if self._strict_map_key and type(key) not in (unicode, bytes): - raise ValueError("%s is not allowed for map key" % str(type(key))) - ret[key] = self._unpack(EX_CONSTRUCT) - if self._object_hook is not None: - ret = self._object_hook(ret) - return ret - if execute == EX_SKIP: - return - if typ == TYPE_RAW: - if self._encoding is not None: - obj = obj.decode(self._encoding, self._unicode_errors) - elif self._raw: - obj = bytes(obj) - else: - obj = obj.decode('utf_8') - return obj - if typ == TYPE_EXT: - return self._ext_hook(n, bytes(obj)) - if typ == TYPE_BIN: - return bytes(obj) - assert typ == TYPE_IMMEDIATE - return obj - - def __iter__(self): - return self - - def __next__(self): - try: - ret = self._unpack(EX_CONSTRUCT) - self._consume() - return ret - except OutOfData: - self._consume() - raise StopIteration - except RecursionError: - raise StackError - - next = __next__ - - def skip(self): - self._unpack(EX_SKIP) - self._consume() - - def unpack(self): - try: - ret = self._unpack(EX_CONSTRUCT) - except RecursionError: - raise StackError - self._consume() - return ret - - def read_array_header(self): - ret = self._unpack(EX_READ_ARRAY_HEADER) - self._consume() - return ret - - def read_map_header(self): - ret = self._unpack(EX_READ_MAP_HEADER) - self._consume() - return ret - - def tell(self): - return self._stream_offset - - -class Packer(object): - """ - MessagePack Packer - - usage: - - packer = Packer() - astream.write(packer.pack(a)) - astream.write(packer.pack(b)) - - Packer's constructor has some keyword arguments: - - :param callable default: - Convert user type to builtin type that Packer supports. - See also simplejson's document. - - :param bool use_single_float: - Use single precision float type for float. (default: False) - - :param bool autoreset: - Reset buffer after each pack and return its content as `bytes`. (default: True). - If set this to false, use `bytes()` to get content and `.reset()` to clear buffer. - - :param bool use_bin_type: - Use bin type introduced in msgpack spec 2.0 for bytes. - It also enables str8 type for unicode. - - :param bool strict_types: - If set to true, types will be checked to be exact. Derived classes - from serializeable types will not be serialized and will be - treated as unsupported type and forwarded to default. - Additionally tuples will not be serialized as lists. - This is useful when trying to implement accurate serialization - for python types. - - :param str encoding: - (deprecated) Convert unicode to bytes with this encoding. (default: 'utf-8') - - :param str unicode_errors: - Error handler for encoding unicode. (default: 'strict') - """ - def __init__(self, default=None, encoding=None, unicode_errors=None, - use_single_float=False, autoreset=True, use_bin_type=False, - strict_types=False): - if encoding is None: - encoding = 'utf_8' - else: - warnings.warn( - "encoding is deprecated, Use raw=False instead.", - DeprecationWarning, stacklevel=2) - - if unicode_errors is None: - unicode_errors = 'strict' - - self._strict_types = strict_types - self._use_float = use_single_float - self._autoreset = autoreset - self._use_bin_type = use_bin_type - self._encoding = encoding - self._unicode_errors = unicode_errors - self._buffer = StringIO() - if default is not None: - if not callable(default): - raise TypeError("default must be callable") - self._default = default - - def _pack(self, obj, nest_limit=DEFAULT_RECURSE_LIMIT, - check=isinstance, check_type_strict=_check_type_strict): - default_used = False - if self._strict_types: - check = check_type_strict - list_types = list - else: - list_types = (list, tuple) - while True: - if nest_limit < 0: - raise ValueError("recursion limit exceeded") - if obj is None: - return self._buffer.write(b"\xc0") - if check(obj, bool): - if obj: - return self._buffer.write(b"\xc3") - return self._buffer.write(b"\xc2") - if check(obj, int_types): - if 0 <= obj < 0x80: - return self._buffer.write(struct.pack("B", obj)) - if -0x20 <= obj < 0: - return self._buffer.write(struct.pack("b", obj)) - if 0x80 <= obj <= 0xff: - return self._buffer.write(struct.pack("BB", 0xcc, obj)) - if -0x80 <= obj < 0: - return self._buffer.write(struct.pack(">Bb", 0xd0, obj)) - if 0xff < obj <= 0xffff: - return self._buffer.write(struct.pack(">BH", 0xcd, obj)) - if -0x8000 <= obj < -0x80: - return self._buffer.write(struct.pack(">Bh", 0xd1, obj)) - if 0xffff < obj <= 0xffffffff: - return self._buffer.write(struct.pack(">BI", 0xce, obj)) - if -0x80000000 <= obj < -0x8000: - return self._buffer.write(struct.pack(">Bi", 0xd2, obj)) - if 0xffffffff < obj <= 0xffffffffffffffff: - return self._buffer.write(struct.pack(">BQ", 0xcf, obj)) - if -0x8000000000000000 <= obj < -0x80000000: - return self._buffer.write(struct.pack(">Bq", 0xd3, obj)) - if not default_used and self._default is not None: - obj = self._default(obj) - default_used = True - continue - raise OverflowError("Integer value out of range") - if check(obj, (bytes, bytearray)): - n = len(obj) - if n >= 2**32: - raise ValueError("%s is too large" % type(obj).__name__) - self._pack_bin_header(n) - return self._buffer.write(obj) - if check(obj, unicode): - if self._encoding is None: - raise TypeError( - "Can't encode unicode string: " - "no encoding is specified") - obj = obj.encode(self._encoding, self._unicode_errors) - n = len(obj) - if n >= 2**32: - raise ValueError("String is too large") - self._pack_raw_header(n) - return self._buffer.write(obj) - if check(obj, memoryview): - n = len(obj) * obj.itemsize - if n >= 2**32: - raise ValueError("Memoryview is too large") - self._pack_bin_header(n) - return self._buffer.write(obj) - if check(obj, float): - if self._use_float: - return self._buffer.write(struct.pack(">Bf", 0xca, obj)) - return self._buffer.write(struct.pack(">Bd", 0xcb, obj)) - if check(obj, ExtType): - code = obj.code - data = obj.data - assert isinstance(code, int) - assert isinstance(data, bytes) - L = len(data) - if L == 1: - self._buffer.write(b'\xd4') - elif L == 2: - self._buffer.write(b'\xd5') - elif L == 4: - self._buffer.write(b'\xd6') - elif L == 8: - self._buffer.write(b'\xd7') - elif L == 16: - self._buffer.write(b'\xd8') - elif L <= 0xff: - self._buffer.write(struct.pack(">BB", 0xc7, L)) - elif L <= 0xffff: - self._buffer.write(struct.pack(">BH", 0xc8, L)) - else: - self._buffer.write(struct.pack(">BI", 0xc9, L)) - self._buffer.write(struct.pack("b", code)) - self._buffer.write(data) - return - if check(obj, list_types): - n = len(obj) - self._pack_array_header(n) - for i in xrange(n): - self._pack(obj[i], nest_limit - 1) - return - if check(obj, dict): - return self._pack_map_pairs(len(obj), dict_iteritems(obj), - nest_limit - 1) - if not default_used and self._default is not None: - obj = self._default(obj) - default_used = 1 - continue - raise TypeError("Cannot serialize %r" % (obj, )) - - def pack(self, obj): - try: - self._pack(obj) - except: - self._buffer = StringIO() # force reset - raise - if self._autoreset: - ret = self._buffer.getvalue() - self._buffer = StringIO() - return ret - - def pack_map_pairs(self, pairs): - self._pack_map_pairs(len(pairs), pairs) - if self._autoreset: - ret = self._buffer.getvalue() - self._buffer = StringIO() - return ret - - def pack_array_header(self, n): - if n >= 2**32: - raise ValueError - self._pack_array_header(n) - if self._autoreset: - ret = self._buffer.getvalue() - self._buffer = StringIO() - return ret - - def pack_map_header(self, n): - if n >= 2**32: - raise ValueError - self._pack_map_header(n) - if self._autoreset: - ret = self._buffer.getvalue() - self._buffer = StringIO() - return ret - - def pack_ext_type(self, typecode, data): - if not isinstance(typecode, int): - raise TypeError("typecode must have int type.") - if not 0 <= typecode <= 127: - raise ValueError("typecode should be 0-127") - if not isinstance(data, bytes): - raise TypeError("data must have bytes type") - L = len(data) - if L > 0xffffffff: - raise ValueError("Too large data") - if L == 1: - self._buffer.write(b'\xd4') - elif L == 2: - self._buffer.write(b'\xd5') - elif L == 4: - self._buffer.write(b'\xd6') - elif L == 8: - self._buffer.write(b'\xd7') - elif L == 16: - self._buffer.write(b'\xd8') - elif L <= 0xff: - self._buffer.write(b'\xc7' + struct.pack('B', L)) - elif L <= 0xffff: - self._buffer.write(b'\xc8' + struct.pack('>H', L)) - else: - self._buffer.write(b'\xc9' + struct.pack('>I', L)) - self._buffer.write(struct.pack('B', typecode)) - self._buffer.write(data) - - def _pack_array_header(self, n): - if n <= 0x0f: - return self._buffer.write(struct.pack('B', 0x90 + n)) - if n <= 0xffff: - return self._buffer.write(struct.pack(">BH", 0xdc, n)) - if n <= 0xffffffff: - return self._buffer.write(struct.pack(">BI", 0xdd, n)) - raise ValueError("Array is too large") - - def _pack_map_header(self, n): - if n <= 0x0f: - return self._buffer.write(struct.pack('B', 0x80 + n)) - if n <= 0xffff: - return self._buffer.write(struct.pack(">BH", 0xde, n)) - if n <= 0xffffffff: - return self._buffer.write(struct.pack(">BI", 0xdf, n)) - raise ValueError("Dict is too large") - - def _pack_map_pairs(self, n, pairs, nest_limit=DEFAULT_RECURSE_LIMIT): - self._pack_map_header(n) - for (k, v) in pairs: - self._pack(k, nest_limit - 1) - self._pack(v, nest_limit - 1) - - def _pack_raw_header(self, n): - if n <= 0x1f: - self._buffer.write(struct.pack('B', 0xa0 + n)) - elif self._use_bin_type and n <= 0xff: - self._buffer.write(struct.pack('>BB', 0xd9, n)) - elif n <= 0xffff: - self._buffer.write(struct.pack(">BH", 0xda, n)) - elif n <= 0xffffffff: - self._buffer.write(struct.pack(">BI", 0xdb, n)) - else: - raise ValueError('Raw is too large') - - def _pack_bin_header(self, n): - if not self._use_bin_type: - return self._pack_raw_header(n) - elif n <= 0xff: - return self._buffer.write(struct.pack('>BB', 0xc4, n)) - elif n <= 0xffff: - return self._buffer.write(struct.pack(">BH", 0xc5, n)) - elif n <= 0xffffffff: - return self._buffer.write(struct.pack(">BI", 0xc6, n)) - else: - raise ValueError('Bin is too large') - - def bytes(self): - """Return internal buffer contents as bytes object""" - return self._buffer.getvalue() - - def reset(self): - """Reset internal buffer. - - This method is usaful only when autoreset=False. - """ - self._buffer = StringIO() - - def getbuffer(self): - """Return view of internal buffer.""" - if USING_STRINGBUILDER or PY2: - return memoryview(self.bytes()) - else: - return self._buffer.getbuffer() diff --git a/env/lib/python2.7/site-packages/pip/_vendor/msgpack/fallback.pyc b/env/lib/python2.7/site-packages/pip/_vendor/msgpack/fallback.pyc deleted file mode 100644 index 074b7c0e..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/msgpack/fallback.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/packaging/__about__.py b/env/lib/python2.7/site-packages/pip/_vendor/packaging/__about__.py deleted file mode 100644 index 7481c9e2..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/packaging/__about__.py +++ /dev/null @@ -1,27 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -__all__ = [ - "__title__", - "__summary__", - "__uri__", - "__version__", - "__author__", - "__email__", - "__license__", - "__copyright__", -] - -__title__ = "packaging" -__summary__ = "Core utilities for Python packages" -__uri__ = "https://github.com/pypa/packaging" - -__version__ = "19.0" - -__author__ = "Donald Stufft and individual contributors" -__email__ = "donald@stufft.io" - -__license__ = "BSD or Apache License, Version 2.0" -__copyright__ = "Copyright 2014-2019 %s" % __author__ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/packaging/__about__.pyc b/env/lib/python2.7/site-packages/pip/_vendor/packaging/__about__.pyc deleted file mode 100644 index 55e4d623..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/packaging/__about__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/packaging/__init__.pyc b/env/lib/python2.7/site-packages/pip/_vendor/packaging/__init__.pyc deleted file mode 100644 index 264201bf..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/packaging/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/packaging/_compat.py b/env/lib/python2.7/site-packages/pip/_vendor/packaging/_compat.py deleted file mode 100644 index 25da473c..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/packaging/_compat.py +++ /dev/null @@ -1,31 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -import sys - - -PY2 = sys.version_info[0] == 2 -PY3 = sys.version_info[0] == 3 - -# flake8: noqa - -if PY3: - string_types = (str,) -else: - string_types = (basestring,) - - -def with_metaclass(meta, *bases): - """ - Create a base class with a metaclass. - """ - # This requires a bit of explanation: the basic idea is to make a dummy - # metaclass for one level of class instantiation that replaces itself with - # the actual metaclass. - class metaclass(meta): - def __new__(cls, name, this_bases, d): - return meta(name, bases, d) - - return type.__new__(metaclass, "temporary_class", (), {}) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/packaging/_compat.pyc b/env/lib/python2.7/site-packages/pip/_vendor/packaging/_compat.pyc deleted file mode 100644 index a7de5e2a..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/packaging/_compat.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/packaging/_structures.py b/env/lib/python2.7/site-packages/pip/_vendor/packaging/_structures.py deleted file mode 100644 index 68dcca63..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/packaging/_structures.py +++ /dev/null @@ -1,68 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - - -class Infinity(object): - def __repr__(self): - return "Infinity" - - def __hash__(self): - return hash(repr(self)) - - def __lt__(self, other): - return False - - def __le__(self, other): - return False - - def __eq__(self, other): - return isinstance(other, self.__class__) - - def __ne__(self, other): - return not isinstance(other, self.__class__) - - def __gt__(self, other): - return True - - def __ge__(self, other): - return True - - def __neg__(self): - return NegativeInfinity - - -Infinity = Infinity() - - -class NegativeInfinity(object): - def __repr__(self): - return "-Infinity" - - def __hash__(self): - return hash(repr(self)) - - def __lt__(self, other): - return True - - def __le__(self, other): - return True - - def __eq__(self, other): - return isinstance(other, self.__class__) - - def __ne__(self, other): - return not isinstance(other, self.__class__) - - def __gt__(self, other): - return False - - def __ge__(self, other): - return False - - def __neg__(self): - return Infinity - - -NegativeInfinity = NegativeInfinity() diff --git a/env/lib/python2.7/site-packages/pip/_vendor/packaging/_structures.pyc b/env/lib/python2.7/site-packages/pip/_vendor/packaging/_structures.pyc deleted file mode 100644 index 07523103..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/packaging/_structures.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/packaging/markers.py b/env/lib/python2.7/site-packages/pip/_vendor/packaging/markers.py deleted file mode 100644 index 54824768..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/packaging/markers.py +++ /dev/null @@ -1,296 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -import operator -import os -import platform -import sys - -from pip._vendor.pyparsing import ParseException, ParseResults, stringStart, stringEnd -from pip._vendor.pyparsing import ZeroOrMore, Group, Forward, QuotedString -from pip._vendor.pyparsing import Literal as L # noqa - -from ._compat import string_types -from .specifiers import Specifier, InvalidSpecifier - - -__all__ = [ - "InvalidMarker", - "UndefinedComparison", - "UndefinedEnvironmentName", - "Marker", - "default_environment", -] - - -class InvalidMarker(ValueError): - """ - An invalid marker was found, users should refer to PEP 508. - """ - - -class UndefinedComparison(ValueError): - """ - An invalid operation was attempted on a value that doesn't support it. - """ - - -class UndefinedEnvironmentName(ValueError): - """ - A name was attempted to be used that does not exist inside of the - environment. - """ - - -class Node(object): - def __init__(self, value): - self.value = value - - def __str__(self): - return str(self.value) - - def __repr__(self): - return "<{0}({1!r})>".format(self.__class__.__name__, str(self)) - - def serialize(self): - raise NotImplementedError - - -class Variable(Node): - def serialize(self): - return str(self) - - -class Value(Node): - def serialize(self): - return '"{0}"'.format(self) - - -class Op(Node): - def serialize(self): - return str(self) - - -VARIABLE = ( - L("implementation_version") - | L("platform_python_implementation") - | L("implementation_name") - | L("python_full_version") - | L("platform_release") - | L("platform_version") - | L("platform_machine") - | L("platform_system") - | L("python_version") - | L("sys_platform") - | L("os_name") - | L("os.name") - | L("sys.platform") # PEP-345 - | L("platform.version") # PEP-345 - | L("platform.machine") # PEP-345 - | L("platform.python_implementation") # PEP-345 - | L("python_implementation") # PEP-345 - | L("extra") # undocumented setuptools legacy -) -ALIASES = { - "os.name": "os_name", - "sys.platform": "sys_platform", - "platform.version": "platform_version", - "platform.machine": "platform_machine", - "platform.python_implementation": "platform_python_implementation", - "python_implementation": "platform_python_implementation", -} -VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0]))) - -VERSION_CMP = ( - L("===") | L("==") | L(">=") | L("<=") | L("!=") | L("~=") | L(">") | L("<") -) - -MARKER_OP = VERSION_CMP | L("not in") | L("in") -MARKER_OP.setParseAction(lambda s, l, t: Op(t[0])) - -MARKER_VALUE = QuotedString("'") | QuotedString('"') -MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0])) - -BOOLOP = L("and") | L("or") - -MARKER_VAR = VARIABLE | MARKER_VALUE - -MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR) -MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0])) - -LPAREN = L("(").suppress() -RPAREN = L(")").suppress() - -MARKER_EXPR = Forward() -MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN) -MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR) - -MARKER = stringStart + MARKER_EXPR + stringEnd - - -def _coerce_parse_result(results): - if isinstance(results, ParseResults): - return [_coerce_parse_result(i) for i in results] - else: - return results - - -def _format_marker(marker, first=True): - assert isinstance(marker, (list, tuple, string_types)) - - # Sometimes we have a structure like [[...]] which is a single item list - # where the single item is itself it's own list. In that case we want skip - # the rest of this function so that we don't get extraneous () on the - # outside. - if ( - isinstance(marker, list) - and len(marker) == 1 - and isinstance(marker[0], (list, tuple)) - ): - return _format_marker(marker[0]) - - if isinstance(marker, list): - inner = (_format_marker(m, first=False) for m in marker) - if first: - return " ".join(inner) - else: - return "(" + " ".join(inner) + ")" - elif isinstance(marker, tuple): - return " ".join([m.serialize() for m in marker]) - else: - return marker - - -_operators = { - "in": lambda lhs, rhs: lhs in rhs, - "not in": lambda lhs, rhs: lhs not in rhs, - "<": operator.lt, - "<=": operator.le, - "==": operator.eq, - "!=": operator.ne, - ">=": operator.ge, - ">": operator.gt, -} - - -def _eval_op(lhs, op, rhs): - try: - spec = Specifier("".join([op.serialize(), rhs])) - except InvalidSpecifier: - pass - else: - return spec.contains(lhs) - - oper = _operators.get(op.serialize()) - if oper is None: - raise UndefinedComparison( - "Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs) - ) - - return oper(lhs, rhs) - - -_undefined = object() - - -def _get_env(environment, name): - value = environment.get(name, _undefined) - - if value is _undefined: - raise UndefinedEnvironmentName( - "{0!r} does not exist in evaluation environment.".format(name) - ) - - return value - - -def _evaluate_markers(markers, environment): - groups = [[]] - - for marker in markers: - assert isinstance(marker, (list, tuple, string_types)) - - if isinstance(marker, list): - groups[-1].append(_evaluate_markers(marker, environment)) - elif isinstance(marker, tuple): - lhs, op, rhs = marker - - if isinstance(lhs, Variable): - lhs_value = _get_env(environment, lhs.value) - rhs_value = rhs.value - else: - lhs_value = lhs.value - rhs_value = _get_env(environment, rhs.value) - - groups[-1].append(_eval_op(lhs_value, op, rhs_value)) - else: - assert marker in ["and", "or"] - if marker == "or": - groups.append([]) - - return any(all(item) for item in groups) - - -def format_full_version(info): - version = "{0.major}.{0.minor}.{0.micro}".format(info) - kind = info.releaselevel - if kind != "final": - version += kind[0] + str(info.serial) - return version - - -def default_environment(): - if hasattr(sys, "implementation"): - iver = format_full_version(sys.implementation.version) - implementation_name = sys.implementation.name - else: - iver = "0" - implementation_name = "" - - return { - "implementation_name": implementation_name, - "implementation_version": iver, - "os_name": os.name, - "platform_machine": platform.machine(), - "platform_release": platform.release(), - "platform_system": platform.system(), - "platform_version": platform.version(), - "python_full_version": platform.python_version(), - "platform_python_implementation": platform.python_implementation(), - "python_version": platform.python_version()[:3], - "sys_platform": sys.platform, - } - - -class Marker(object): - def __init__(self, marker): - try: - self._markers = _coerce_parse_result(MARKER.parseString(marker)) - except ParseException as e: - err_str = "Invalid marker: {0!r}, parse error at {1!r}".format( - marker, marker[e.loc : e.loc + 8] - ) - raise InvalidMarker(err_str) - - def __str__(self): - return _format_marker(self._markers) - - def __repr__(self): - return "<Marker({0!r})>".format(str(self)) - - def evaluate(self, environment=None): - """Evaluate a marker. - - Return the boolean from evaluating the given marker against the - environment. environment is an optional argument to override all or - part of the determined environment. - - The environment is determined from the current Python process. - """ - current_environment = default_environment() - if environment is not None: - current_environment.update(environment) - - return _evaluate_markers(self._markers, current_environment) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/packaging/markers.pyc b/env/lib/python2.7/site-packages/pip/_vendor/packaging/markers.pyc deleted file mode 100644 index b7277c3b..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/packaging/markers.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/packaging/requirements.py b/env/lib/python2.7/site-packages/pip/_vendor/packaging/requirements.py deleted file mode 100644 index dbc5f11d..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/packaging/requirements.py +++ /dev/null @@ -1,138 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -import string -import re - -from pip._vendor.pyparsing import stringStart, stringEnd, originalTextFor, ParseException -from pip._vendor.pyparsing import ZeroOrMore, Word, Optional, Regex, Combine -from pip._vendor.pyparsing import Literal as L # noqa -from pip._vendor.six.moves.urllib import parse as urlparse - -from .markers import MARKER_EXPR, Marker -from .specifiers import LegacySpecifier, Specifier, SpecifierSet - - -class InvalidRequirement(ValueError): - """ - An invalid requirement was found, users should refer to PEP 508. - """ - - -ALPHANUM = Word(string.ascii_letters + string.digits) - -LBRACKET = L("[").suppress() -RBRACKET = L("]").suppress() -LPAREN = L("(").suppress() -RPAREN = L(")").suppress() -COMMA = L(",").suppress() -SEMICOLON = L(";").suppress() -AT = L("@").suppress() - -PUNCTUATION = Word("-_.") -IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM) -IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END)) - -NAME = IDENTIFIER("name") -EXTRA = IDENTIFIER - -URI = Regex(r"[^ ]+")("url") -URL = AT + URI - -EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA) -EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras") - -VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE) -VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE) - -VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY -VERSION_MANY = Combine( - VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), joinString=",", adjacent=False -)("_raw_spec") -_VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY)) -_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or "") - -VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier") -VERSION_SPEC.setParseAction(lambda s, l, t: t[1]) - -MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker") -MARKER_EXPR.setParseAction( - lambda s, l, t: Marker(s[t._original_start : t._original_end]) -) -MARKER_SEPARATOR = SEMICOLON -MARKER = MARKER_SEPARATOR + MARKER_EXPR - -VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER) -URL_AND_MARKER = URL + Optional(MARKER) - -NAMED_REQUIREMENT = NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER) - -REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd -# pyparsing isn't thread safe during initialization, so we do it eagerly, see -# issue #104 -REQUIREMENT.parseString("x[]") - - -class Requirement(object): - """Parse a requirement. - - Parse a given requirement string into its parts, such as name, specifier, - URL, and extras. Raises InvalidRequirement on a badly-formed requirement - string. - """ - - # TODO: Can we test whether something is contained within a requirement? - # If so how do we do that? Do we need to test against the _name_ of - # the thing as well as the version? What about the markers? - # TODO: Can we normalize the name and extra name? - - def __init__(self, requirement_string): - try: - req = REQUIREMENT.parseString(requirement_string) - except ParseException as e: - raise InvalidRequirement( - 'Parse error at "{0!r}": {1}'.format( - requirement_string[e.loc : e.loc + 8], e.msg - ) - ) - - self.name = req.name - if req.url: - parsed_url = urlparse.urlparse(req.url) - if parsed_url.scheme == "file": - if urlparse.urlunparse(parsed_url) != req.url: - raise InvalidRequirement("Invalid URL given") - elif not (parsed_url.scheme and parsed_url.netloc) or ( - not parsed_url.scheme and not parsed_url.netloc - ): - raise InvalidRequirement("Invalid URL: {0}".format(req.url)) - self.url = req.url - else: - self.url = None - self.extras = set(req.extras.asList() if req.extras else []) - self.specifier = SpecifierSet(req.specifier) - self.marker = req.marker if req.marker else None - - def __str__(self): - parts = [self.name] - - if self.extras: - parts.append("[{0}]".format(",".join(sorted(self.extras)))) - - if self.specifier: - parts.append(str(self.specifier)) - - if self.url: - parts.append("@ {0}".format(self.url)) - if self.marker: - parts.append(" ") - - if self.marker: - parts.append("; {0}".format(self.marker)) - - return "".join(parts) - - def __repr__(self): - return "<Requirement({0!r})>".format(str(self)) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/packaging/requirements.pyc b/env/lib/python2.7/site-packages/pip/_vendor/packaging/requirements.pyc deleted file mode 100644 index cedc4a95..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/packaging/requirements.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/packaging/specifiers.py b/env/lib/python2.7/site-packages/pip/_vendor/packaging/specifiers.py deleted file mode 100644 index 743576a0..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/packaging/specifiers.py +++ /dev/null @@ -1,749 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -import abc -import functools -import itertools -import re - -from ._compat import string_types, with_metaclass -from .version import Version, LegacyVersion, parse - - -class InvalidSpecifier(ValueError): - """ - An invalid specifier was found, users should refer to PEP 440. - """ - - -class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): - @abc.abstractmethod - def __str__(self): - """ - Returns the str representation of this Specifier like object. This - should be representative of the Specifier itself. - """ - - @abc.abstractmethod - def __hash__(self): - """ - Returns a hash value for this Specifier like object. - """ - - @abc.abstractmethod - def __eq__(self, other): - """ - Returns a boolean representing whether or not the two Specifier like - objects are equal. - """ - - @abc.abstractmethod - def __ne__(self, other): - """ - Returns a boolean representing whether or not the two Specifier like - objects are not equal. - """ - - @abc.abstractproperty - def prereleases(self): - """ - Returns whether or not pre-releases as a whole are allowed by this - specifier. - """ - - @prereleases.setter - def prereleases(self, value): - """ - Sets whether or not pre-releases as a whole are allowed by this - specifier. - """ - - @abc.abstractmethod - def contains(self, item, prereleases=None): - """ - Determines if the given item is contained within this specifier. - """ - - @abc.abstractmethod - def filter(self, iterable, prereleases=None): - """ - Takes an iterable of items and filters them so that only items which - are contained within this specifier are allowed in it. - """ - - -class _IndividualSpecifier(BaseSpecifier): - - _operators = {} - - def __init__(self, spec="", prereleases=None): - match = self._regex.search(spec) - if not match: - raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec)) - - self._spec = (match.group("operator").strip(), match.group("version").strip()) - - # Store whether or not this Specifier should accept prereleases - self._prereleases = prereleases - - def __repr__(self): - pre = ( - ", prereleases={0!r}".format(self.prereleases) - if self._prereleases is not None - else "" - ) - - return "<{0}({1!r}{2})>".format(self.__class__.__name__, str(self), pre) - - def __str__(self): - return "{0}{1}".format(*self._spec) - - def __hash__(self): - return hash(self._spec) - - def __eq__(self, other): - if isinstance(other, string_types): - try: - other = self.__class__(other) - except InvalidSpecifier: - return NotImplemented - elif not isinstance(other, self.__class__): - return NotImplemented - - return self._spec == other._spec - - def __ne__(self, other): - if isinstance(other, string_types): - try: - other = self.__class__(other) - except InvalidSpecifier: - return NotImplemented - elif not isinstance(other, self.__class__): - return NotImplemented - - return self._spec != other._spec - - def _get_operator(self, op): - return getattr(self, "_compare_{0}".format(self._operators[op])) - - def _coerce_version(self, version): - if not isinstance(version, (LegacyVersion, Version)): - version = parse(version) - return version - - @property - def operator(self): - return self._spec[0] - - @property - def version(self): - return self._spec[1] - - @property - def prereleases(self): - return self._prereleases - - @prereleases.setter - def prereleases(self, value): - self._prereleases = value - - def __contains__(self, item): - return self.contains(item) - - def contains(self, item, prereleases=None): - # Determine if prereleases are to be allowed or not. - if prereleases is None: - prereleases = self.prereleases - - # Normalize item to a Version or LegacyVersion, this allows us to have - # a shortcut for ``"2.0" in Specifier(">=2") - item = self._coerce_version(item) - - # Determine if we should be supporting prereleases in this specifier - # or not, if we do not support prereleases than we can short circuit - # logic if this version is a prereleases. - if item.is_prerelease and not prereleases: - return False - - # Actually do the comparison to determine if this item is contained - # within this Specifier or not. - return self._get_operator(self.operator)(item, self.version) - - def filter(self, iterable, prereleases=None): - yielded = False - found_prereleases = [] - - kw = {"prereleases": prereleases if prereleases is not None else True} - - # Attempt to iterate over all the values in the iterable and if any of - # them match, yield them. - for version in iterable: - parsed_version = self._coerce_version(version) - - if self.contains(parsed_version, **kw): - # If our version is a prerelease, and we were not set to allow - # prereleases, then we'll store it for later incase nothing - # else matches this specifier. - if parsed_version.is_prerelease and not ( - prereleases or self.prereleases - ): - found_prereleases.append(version) - # Either this is not a prerelease, or we should have been - # accepting prereleases from the beginning. - else: - yielded = True - yield version - - # Now that we've iterated over everything, determine if we've yielded - # any values, and if we have not and we have any prereleases stored up - # then we will go ahead and yield the prereleases. - if not yielded and found_prereleases: - for version in found_prereleases: - yield version - - -class LegacySpecifier(_IndividualSpecifier): - - _regex_str = r""" - (?P<operator>(==|!=|<=|>=|<|>)) - \s* - (?P<version> - [^,;\s)]* # Since this is a "legacy" specifier, and the version - # string can be just about anything, we match everything - # except for whitespace, a semi-colon for marker support, - # a closing paren since versions can be enclosed in - # them, and a comma since it's a version separator. - ) - """ - - _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) - - _operators = { - "==": "equal", - "!=": "not_equal", - "<=": "less_than_equal", - ">=": "greater_than_equal", - "<": "less_than", - ">": "greater_than", - } - - def _coerce_version(self, version): - if not isinstance(version, LegacyVersion): - version = LegacyVersion(str(version)) - return version - - def _compare_equal(self, prospective, spec): - return prospective == self._coerce_version(spec) - - def _compare_not_equal(self, prospective, spec): - return prospective != self._coerce_version(spec) - - def _compare_less_than_equal(self, prospective, spec): - return prospective <= self._coerce_version(spec) - - def _compare_greater_than_equal(self, prospective, spec): - return prospective >= self._coerce_version(spec) - - def _compare_less_than(self, prospective, spec): - return prospective < self._coerce_version(spec) - - def _compare_greater_than(self, prospective, spec): - return prospective > self._coerce_version(spec) - - -def _require_version_compare(fn): - @functools.wraps(fn) - def wrapped(self, prospective, spec): - if not isinstance(prospective, Version): - return False - return fn(self, prospective, spec) - - return wrapped - - -class Specifier(_IndividualSpecifier): - - _regex_str = r""" - (?P<operator>(~=|==|!=|<=|>=|<|>|===)) - (?P<version> - (?: - # The identity operators allow for an escape hatch that will - # do an exact string match of the version you wish to install. - # This will not be parsed by PEP 440 and we cannot determine - # any semantic meaning from it. This operator is discouraged - # but included entirely as an escape hatch. - (?<====) # Only match for the identity operator - \s* - [^\s]* # We just match everything, except for whitespace - # since we are only testing for strict identity. - ) - | - (?: - # The (non)equality operators allow for wild card and local - # versions to be specified so we have to define these two - # operators separately to enable that. - (?<===|!=) # Only match for equals and not equals - - \s* - v? - (?:[0-9]+!)? # epoch - [0-9]+(?:\.[0-9]+)* # release - (?: # pre release - [-_\.]? - (a|b|c|rc|alpha|beta|pre|preview) - [-_\.]? - [0-9]* - )? - (?: # post release - (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) - )? - - # You cannot use a wild card and a dev or local version - # together so group them with a | and make them optional. - (?: - (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release - (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local - | - \.\* # Wild card syntax of .* - )? - ) - | - (?: - # The compatible operator requires at least two digits in the - # release segment. - (?<=~=) # Only match for the compatible operator - - \s* - v? - (?:[0-9]+!)? # epoch - [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) - (?: # pre release - [-_\.]? - (a|b|c|rc|alpha|beta|pre|preview) - [-_\.]? - [0-9]* - )? - (?: # post release - (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) - )? - (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release - ) - | - (?: - # All other operators only allow a sub set of what the - # (non)equality operators do. Specifically they do not allow - # local versions to be specified nor do they allow the prefix - # matching wild cards. - (?<!==|!=|~=) # We have special cases for these - # operators so we want to make sure they - # don't match here. - - \s* - v? - (?:[0-9]+!)? # epoch - [0-9]+(?:\.[0-9]+)* # release - (?: # pre release - [-_\.]? - (a|b|c|rc|alpha|beta|pre|preview) - [-_\.]? - [0-9]* - )? - (?: # post release - (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) - )? - (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release - ) - ) - """ - - _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) - - _operators = { - "~=": "compatible", - "==": "equal", - "!=": "not_equal", - "<=": "less_than_equal", - ">=": "greater_than_equal", - "<": "less_than", - ">": "greater_than", - "===": "arbitrary", - } - - @_require_version_compare - def _compare_compatible(self, prospective, spec): - # Compatible releases have an equivalent combination of >= and ==. That - # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to - # implement this in terms of the other specifiers instead of - # implementing it ourselves. The only thing we need to do is construct - # the other specifiers. - - # We want everything but the last item in the version, but we want to - # ignore post and dev releases and we want to treat the pre-release as - # it's own separate segment. - prefix = ".".join( - list( - itertools.takewhile( - lambda x: (not x.startswith("post") and not x.startswith("dev")), - _version_split(spec), - ) - )[:-1] - ) - - # Add the prefix notation to the end of our string - prefix += ".*" - - return self._get_operator(">=")(prospective, spec) and self._get_operator("==")( - prospective, prefix - ) - - @_require_version_compare - def _compare_equal(self, prospective, spec): - # We need special logic to handle prefix matching - if spec.endswith(".*"): - # In the case of prefix matching we want to ignore local segment. - prospective = Version(prospective.public) - # Split the spec out by dots, and pretend that there is an implicit - # dot in between a release segment and a pre-release segment. - spec = _version_split(spec[:-2]) # Remove the trailing .* - - # Split the prospective version out by dots, and pretend that there - # is an implicit dot in between a release segment and a pre-release - # segment. - prospective = _version_split(str(prospective)) - - # Shorten the prospective version to be the same length as the spec - # so that we can determine if the specifier is a prefix of the - # prospective version or not. - prospective = prospective[: len(spec)] - - # Pad out our two sides with zeros so that they both equal the same - # length. - spec, prospective = _pad_version(spec, prospective) - else: - # Convert our spec string into a Version - spec = Version(spec) - - # If the specifier does not have a local segment, then we want to - # act as if the prospective version also does not have a local - # segment. - if not spec.local: - prospective = Version(prospective.public) - - return prospective == spec - - @_require_version_compare - def _compare_not_equal(self, prospective, spec): - return not self._compare_equal(prospective, spec) - - @_require_version_compare - def _compare_less_than_equal(self, prospective, spec): - return prospective <= Version(spec) - - @_require_version_compare - def _compare_greater_than_equal(self, prospective, spec): - return prospective >= Version(spec) - - @_require_version_compare - def _compare_less_than(self, prospective, spec): - # Convert our spec to a Version instance, since we'll want to work with - # it as a version. - spec = Version(spec) - - # Check to see if the prospective version is less than the spec - # version. If it's not we can short circuit and just return False now - # instead of doing extra unneeded work. - if not prospective < spec: - return False - - # This special case is here so that, unless the specifier itself - # includes is a pre-release version, that we do not accept pre-release - # versions for the version mentioned in the specifier (e.g. <3.1 should - # not match 3.1.dev0, but should match 3.0.dev0). - if not spec.is_prerelease and prospective.is_prerelease: - if Version(prospective.base_version) == Version(spec.base_version): - return False - - # If we've gotten to here, it means that prospective version is both - # less than the spec version *and* it's not a pre-release of the same - # version in the spec. - return True - - @_require_version_compare - def _compare_greater_than(self, prospective, spec): - # Convert our spec to a Version instance, since we'll want to work with - # it as a version. - spec = Version(spec) - - # Check to see if the prospective version is greater than the spec - # version. If it's not we can short circuit and just return False now - # instead of doing extra unneeded work. - if not prospective > spec: - return False - - # This special case is here so that, unless the specifier itself - # includes is a post-release version, that we do not accept - # post-release versions for the version mentioned in the specifier - # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). - if not spec.is_postrelease and prospective.is_postrelease: - if Version(prospective.base_version) == Version(spec.base_version): - return False - - # Ensure that we do not allow a local version of the version mentioned - # in the specifier, which is technically greater than, to match. - if prospective.local is not None: - if Version(prospective.base_version) == Version(spec.base_version): - return False - - # If we've gotten to here, it means that prospective version is both - # greater than the spec version *and* it's not a pre-release of the - # same version in the spec. - return True - - def _compare_arbitrary(self, prospective, spec): - return str(prospective).lower() == str(spec).lower() - - @property - def prereleases(self): - # If there is an explicit prereleases set for this, then we'll just - # blindly use that. - if self._prereleases is not None: - return self._prereleases - - # Look at all of our specifiers and determine if they are inclusive - # operators, and if they are if they are including an explicit - # prerelease. - operator, version = self._spec - if operator in ["==", ">=", "<=", "~=", "==="]: - # The == specifier can include a trailing .*, if it does we - # want to remove before parsing. - if operator == "==" and version.endswith(".*"): - version = version[:-2] - - # Parse the version, and if it is a pre-release than this - # specifier allows pre-releases. - if parse(version).is_prerelease: - return True - - return False - - @prereleases.setter - def prereleases(self, value): - self._prereleases = value - - -_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") - - -def _version_split(version): - result = [] - for item in version.split("."): - match = _prefix_regex.search(item) - if match: - result.extend(match.groups()) - else: - result.append(item) - return result - - -def _pad_version(left, right): - left_split, right_split = [], [] - - # Get the release segment of our versions - left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) - right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) - - # Get the rest of our versions - left_split.append(left[len(left_split[0]) :]) - right_split.append(right[len(right_split[0]) :]) - - # Insert our padding - left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0]))) - right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0]))) - - return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split))) - - -class SpecifierSet(BaseSpecifier): - def __init__(self, specifiers="", prereleases=None): - # Split on , to break each indidivual specifier into it's own item, and - # strip each item to remove leading/trailing whitespace. - specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] - - # Parsed each individual specifier, attempting first to make it a - # Specifier and falling back to a LegacySpecifier. - parsed = set() - for specifier in specifiers: - try: - parsed.add(Specifier(specifier)) - except InvalidSpecifier: - parsed.add(LegacySpecifier(specifier)) - - # Turn our parsed specifiers into a frozen set and save them for later. - self._specs = frozenset(parsed) - - # Store our prereleases value so we can use it later to determine if - # we accept prereleases or not. - self._prereleases = prereleases - - def __repr__(self): - pre = ( - ", prereleases={0!r}".format(self.prereleases) - if self._prereleases is not None - else "" - ) - - return "<SpecifierSet({0!r}{1})>".format(str(self), pre) - - def __str__(self): - return ",".join(sorted(str(s) for s in self._specs)) - - def __hash__(self): - return hash(self._specs) - - def __and__(self, other): - if isinstance(other, string_types): - other = SpecifierSet(other) - elif not isinstance(other, SpecifierSet): - return NotImplemented - - specifier = SpecifierSet() - specifier._specs = frozenset(self._specs | other._specs) - - if self._prereleases is None and other._prereleases is not None: - specifier._prereleases = other._prereleases - elif self._prereleases is not None and other._prereleases is None: - specifier._prereleases = self._prereleases - elif self._prereleases == other._prereleases: - specifier._prereleases = self._prereleases - else: - raise ValueError( - "Cannot combine SpecifierSets with True and False prerelease " - "overrides." - ) - - return specifier - - def __eq__(self, other): - if isinstance(other, string_types): - other = SpecifierSet(other) - elif isinstance(other, _IndividualSpecifier): - other = SpecifierSet(str(other)) - elif not isinstance(other, SpecifierSet): - return NotImplemented - - return self._specs == other._specs - - def __ne__(self, other): - if isinstance(other, string_types): - other = SpecifierSet(other) - elif isinstance(other, _IndividualSpecifier): - other = SpecifierSet(str(other)) - elif not isinstance(other, SpecifierSet): - return NotImplemented - - return self._specs != other._specs - - def __len__(self): - return len(self._specs) - - def __iter__(self): - return iter(self._specs) - - @property - def prereleases(self): - # If we have been given an explicit prerelease modifier, then we'll - # pass that through here. - if self._prereleases is not None: - return self._prereleases - - # If we don't have any specifiers, and we don't have a forced value, - # then we'll just return None since we don't know if this should have - # pre-releases or not. - if not self._specs: - return None - - # Otherwise we'll see if any of the given specifiers accept - # prereleases, if any of them do we'll return True, otherwise False. - return any(s.prereleases for s in self._specs) - - @prereleases.setter - def prereleases(self, value): - self._prereleases = value - - def __contains__(self, item): - return self.contains(item) - - def contains(self, item, prereleases=None): - # Ensure that our item is a Version or LegacyVersion instance. - if not isinstance(item, (LegacyVersion, Version)): - item = parse(item) - - # Determine if we're forcing a prerelease or not, if we're not forcing - # one for this particular filter call, then we'll use whatever the - # SpecifierSet thinks for whether or not we should support prereleases. - if prereleases is None: - prereleases = self.prereleases - - # We can determine if we're going to allow pre-releases by looking to - # see if any of the underlying items supports them. If none of them do - # and this item is a pre-release then we do not allow it and we can - # short circuit that here. - # Note: This means that 1.0.dev1 would not be contained in something - # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0 - if not prereleases and item.is_prerelease: - return False - - # We simply dispatch to the underlying specs here to make sure that the - # given version is contained within all of them. - # Note: This use of all() here means that an empty set of specifiers - # will always return True, this is an explicit design decision. - return all(s.contains(item, prereleases=prereleases) for s in self._specs) - - def filter(self, iterable, prereleases=None): - # Determine if we're forcing a prerelease or not, if we're not forcing - # one for this particular filter call, then we'll use whatever the - # SpecifierSet thinks for whether or not we should support prereleases. - if prereleases is None: - prereleases = self.prereleases - - # If we have any specifiers, then we want to wrap our iterable in the - # filter method for each one, this will act as a logical AND amongst - # each specifier. - if self._specs: - for spec in self._specs: - iterable = spec.filter(iterable, prereleases=bool(prereleases)) - return iterable - # If we do not have any specifiers, then we need to have a rough filter - # which will filter out any pre-releases, unless there are no final - # releases, and which will filter out LegacyVersion in general. - else: - filtered = [] - found_prereleases = [] - - for item in iterable: - # Ensure that we some kind of Version class for this item. - if not isinstance(item, (LegacyVersion, Version)): - parsed_version = parse(item) - else: - parsed_version = item - - # Filter out any item which is parsed as a LegacyVersion - if isinstance(parsed_version, LegacyVersion): - continue - - # Store any item which is a pre-release for later unless we've - # already found a final version or we are accepting prereleases - if parsed_version.is_prerelease and not prereleases: - if not filtered: - found_prereleases.append(item) - else: - filtered.append(item) - - # If we've found no items except for pre-releases, then we'll go - # ahead and use the pre-releases - if not filtered and found_prereleases and prereleases is None: - return found_prereleases - - return filtered diff --git a/env/lib/python2.7/site-packages/pip/_vendor/packaging/specifiers.pyc b/env/lib/python2.7/site-packages/pip/_vendor/packaging/specifiers.pyc deleted file mode 100644 index 9eaaeb71..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/packaging/specifiers.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/packaging/utils.py b/env/lib/python2.7/site-packages/pip/_vendor/packaging/utils.py deleted file mode 100644 index 88418786..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/packaging/utils.py +++ /dev/null @@ -1,57 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -import re - -from .version import InvalidVersion, Version - - -_canonicalize_regex = re.compile(r"[-_.]+") - - -def canonicalize_name(name): - # This is taken from PEP 503. - return _canonicalize_regex.sub("-", name).lower() - - -def canonicalize_version(version): - """ - This is very similar to Version.__str__, but has one subtle differences - with the way it handles the release segment. - """ - - try: - version = Version(version) - except InvalidVersion: - # Legacy versions cannot be normalized - return version - - parts = [] - - # Epoch - if version.epoch != 0: - parts.append("{0}!".format(version.epoch)) - - # Release segment - # NB: This strips trailing '.0's to normalize - parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in version.release))) - - # Pre-release - if version.pre is not None: - parts.append("".join(str(x) for x in version.pre)) - - # Post-release - if version.post is not None: - parts.append(".post{0}".format(version.post)) - - # Development release - if version.dev is not None: - parts.append(".dev{0}".format(version.dev)) - - # Local version segment - if version.local is not None: - parts.append("+{0}".format(version.local)) - - return "".join(parts) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/packaging/utils.pyc b/env/lib/python2.7/site-packages/pip/_vendor/packaging/utils.pyc deleted file mode 100644 index de950173..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/packaging/utils.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/packaging/version.py b/env/lib/python2.7/site-packages/pip/_vendor/packaging/version.py deleted file mode 100644 index 95157a1f..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/packaging/version.py +++ /dev/null @@ -1,420 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -import collections -import itertools -import re - -from ._structures import Infinity - - -__all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"] - - -_Version = collections.namedtuple( - "_Version", ["epoch", "release", "dev", "pre", "post", "local"] -) - - -def parse(version): - """ - Parse the given version string and return either a :class:`Version` object - or a :class:`LegacyVersion` object depending on if the given version is - a valid PEP 440 version or a legacy version. - """ - try: - return Version(version) - except InvalidVersion: - return LegacyVersion(version) - - -class InvalidVersion(ValueError): - """ - An invalid version was found, users should refer to PEP 440. - """ - - -class _BaseVersion(object): - def __hash__(self): - return hash(self._key) - - def __lt__(self, other): - return self._compare(other, lambda s, o: s < o) - - def __le__(self, other): - return self._compare(other, lambda s, o: s <= o) - - def __eq__(self, other): - return self._compare(other, lambda s, o: s == o) - - def __ge__(self, other): - return self._compare(other, lambda s, o: s >= o) - - def __gt__(self, other): - return self._compare(other, lambda s, o: s > o) - - def __ne__(self, other): - return self._compare(other, lambda s, o: s != o) - - def _compare(self, other, method): - if not isinstance(other, _BaseVersion): - return NotImplemented - - return method(self._key, other._key) - - -class LegacyVersion(_BaseVersion): - def __init__(self, version): - self._version = str(version) - self._key = _legacy_cmpkey(self._version) - - def __str__(self): - return self._version - - def __repr__(self): - return "<LegacyVersion({0})>".format(repr(str(self))) - - @property - def public(self): - return self._version - - @property - def base_version(self): - return self._version - - @property - def epoch(self): - return -1 - - @property - def release(self): - return None - - @property - def pre(self): - return None - - @property - def post(self): - return None - - @property - def dev(self): - return None - - @property - def local(self): - return None - - @property - def is_prerelease(self): - return False - - @property - def is_postrelease(self): - return False - - @property - def is_devrelease(self): - return False - - -_legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE) - -_legacy_version_replacement_map = { - "pre": "c", - "preview": "c", - "-": "final-", - "rc": "c", - "dev": "@", -} - - -def _parse_version_parts(s): - for part in _legacy_version_component_re.split(s): - part = _legacy_version_replacement_map.get(part, part) - - if not part or part == ".": - continue - - if part[:1] in "0123456789": - # pad for numeric comparison - yield part.zfill(8) - else: - yield "*" + part - - # ensure that alpha/beta/candidate are before final - yield "*final" - - -def _legacy_cmpkey(version): - # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch - # greater than or equal to 0. This will effectively put the LegacyVersion, - # which uses the defacto standard originally implemented by setuptools, - # as before all PEP 440 versions. - epoch = -1 - - # This scheme is taken from pkg_resources.parse_version setuptools prior to - # it's adoption of the packaging library. - parts = [] - for part in _parse_version_parts(version.lower()): - if part.startswith("*"): - # remove "-" before a prerelease tag - if part < "*final": - while parts and parts[-1] == "*final-": - parts.pop() - - # remove trailing zeros from each series of numeric parts - while parts and parts[-1] == "00000000": - parts.pop() - - parts.append(part) - parts = tuple(parts) - - return epoch, parts - - -# Deliberately not anchored to the start and end of the string, to make it -# easier for 3rd party code to reuse -VERSION_PATTERN = r""" - v? - (?: - (?:(?P<epoch>[0-9]+)!)? # epoch - (?P<release>[0-9]+(?:\.[0-9]+)*) # release segment - (?P<pre> # pre-release - [-_\.]? - (?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview)) - [-_\.]? - (?P<pre_n>[0-9]+)? - )? - (?P<post> # post release - (?:-(?P<post_n1>[0-9]+)) - | - (?: - [-_\.]? - (?P<post_l>post|rev|r) - [-_\.]? - (?P<post_n2>[0-9]+)? - ) - )? - (?P<dev> # dev release - [-_\.]? - (?P<dev_l>dev) - [-_\.]? - (?P<dev_n>[0-9]+)? - )? - ) - (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version -""" - - -class Version(_BaseVersion): - - _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE) - - def __init__(self, version): - # Validate the version and parse it into pieces - match = self._regex.search(version) - if not match: - raise InvalidVersion("Invalid version: '{0}'".format(version)) - - # Store the parsed out pieces of the version - self._version = _Version( - epoch=int(match.group("epoch")) if match.group("epoch") else 0, - release=tuple(int(i) for i in match.group("release").split(".")), - pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")), - post=_parse_letter_version( - match.group("post_l"), match.group("post_n1") or match.group("post_n2") - ), - dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")), - local=_parse_local_version(match.group("local")), - ) - - # Generate a key which will be used for sorting - self._key = _cmpkey( - self._version.epoch, - self._version.release, - self._version.pre, - self._version.post, - self._version.dev, - self._version.local, - ) - - def __repr__(self): - return "<Version({0})>".format(repr(str(self))) - - def __str__(self): - parts = [] - - # Epoch - if self.epoch != 0: - parts.append("{0}!".format(self.epoch)) - - # Release segment - parts.append(".".join(str(x) for x in self.release)) - - # Pre-release - if self.pre is not None: - parts.append("".join(str(x) for x in self.pre)) - - # Post-release - if self.post is not None: - parts.append(".post{0}".format(self.post)) - - # Development release - if self.dev is not None: - parts.append(".dev{0}".format(self.dev)) - - # Local version segment - if self.local is not None: - parts.append("+{0}".format(self.local)) - - return "".join(parts) - - @property - def epoch(self): - return self._version.epoch - - @property - def release(self): - return self._version.release - - @property - def pre(self): - return self._version.pre - - @property - def post(self): - return self._version.post[1] if self._version.post else None - - @property - def dev(self): - return self._version.dev[1] if self._version.dev else None - - @property - def local(self): - if self._version.local: - return ".".join(str(x) for x in self._version.local) - else: - return None - - @property - def public(self): - return str(self).split("+", 1)[0] - - @property - def base_version(self): - parts = [] - - # Epoch - if self.epoch != 0: - parts.append("{0}!".format(self.epoch)) - - # Release segment - parts.append(".".join(str(x) for x in self.release)) - - return "".join(parts) - - @property - def is_prerelease(self): - return self.dev is not None or self.pre is not None - - @property - def is_postrelease(self): - return self.post is not None - - @property - def is_devrelease(self): - return self.dev is not None - - -def _parse_letter_version(letter, number): - if letter: - # We consider there to be an implicit 0 in a pre-release if there is - # not a numeral associated with it. - if number is None: - number = 0 - - # We normalize any letters to their lower case form - letter = letter.lower() - - # We consider some words to be alternate spellings of other words and - # in those cases we want to normalize the spellings to our preferred - # spelling. - if letter == "alpha": - letter = "a" - elif letter == "beta": - letter = "b" - elif letter in ["c", "pre", "preview"]: - letter = "rc" - elif letter in ["rev", "r"]: - letter = "post" - - return letter, int(number) - if not letter and number: - # We assume if we are given a number, but we are not given a letter - # then this is using the implicit post release syntax (e.g. 1.0-1) - letter = "post" - - return letter, int(number) - - -_local_version_separators = re.compile(r"[\._-]") - - -def _parse_local_version(local): - """ - Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve"). - """ - if local is not None: - return tuple( - part.lower() if not part.isdigit() else int(part) - for part in _local_version_separators.split(local) - ) - - -def _cmpkey(epoch, release, pre, post, dev, local): - # When we compare a release version, we want to compare it with all of the - # trailing zeros removed. So we'll use a reverse the list, drop all the now - # leading zeros until we come to something non zero, then take the rest - # re-reverse it back into the correct order and make it a tuple and use - # that for our sorting key. - release = tuple( - reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release)))) - ) - - # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0. - # We'll do this by abusing the pre segment, but we _only_ want to do this - # if there is not a pre or a post segment. If we have one of those then - # the normal sorting rules will handle this case correctly. - if pre is None and post is None and dev is not None: - pre = -Infinity - # Versions without a pre-release (except as noted above) should sort after - # those with one. - elif pre is None: - pre = Infinity - - # Versions without a post segment should sort before those with one. - if post is None: - post = -Infinity - - # Versions without a development segment should sort after those with one. - if dev is None: - dev = Infinity - - if local is None: - # Versions without a local segment should sort before those with one. - local = -Infinity - else: - # Versions with a local segment need that segment parsed to implement - # the sorting rules in PEP440. - # - Alpha numeric segments sort before numeric segments - # - Alpha numeric segments sort lexicographically - # - Numeric segments sort numerically - # - Shorter versions sort before longer versions when the prefixes - # match exactly - local = tuple((i, "") if isinstance(i, int) else (-Infinity, i) for i in local) - - return epoch, release, pre, post, dev, local diff --git a/env/lib/python2.7/site-packages/pip/_vendor/packaging/version.pyc b/env/lib/python2.7/site-packages/pip/_vendor/packaging/version.pyc deleted file mode 100644 index a7e06bc5..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/packaging/version.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/pep517/__init__.py b/env/lib/python2.7/site-packages/pip/_vendor/pep517/__init__.py deleted file mode 100644 index 9c1a098f..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/pep517/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -"""Wrappers to build Python packages using PEP 517 hooks -""" - -__version__ = '0.5.0' diff --git a/env/lib/python2.7/site-packages/pip/_vendor/pep517/__init__.pyc b/env/lib/python2.7/site-packages/pip/_vendor/pep517/__init__.pyc deleted file mode 100644 index 7d100a88..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/pep517/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/pep517/_in_process.py b/env/lib/python2.7/site-packages/pip/_vendor/pep517/_in_process.py deleted file mode 100644 index d6524b66..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/pep517/_in_process.py +++ /dev/null @@ -1,207 +0,0 @@ -"""This is invoked in a subprocess to call the build backend hooks. - -It expects: -- Command line args: hook_name, control_dir -- Environment variable: PEP517_BUILD_BACKEND=entry.point:spec -- control_dir/input.json: - - {"kwargs": {...}} - -Results: -- control_dir/output.json - - {"return_val": ...} -""" -from glob import glob -from importlib import import_module -import os -from os.path import join as pjoin -import re -import shutil -import sys - -# This is run as a script, not a module, so it can't do a relative import -import compat - - -class BackendUnavailable(Exception): - """Raised if we cannot import the backend""" - - -def _build_backend(): - """Find and load the build backend""" - ep = os.environ['PEP517_BUILD_BACKEND'] - mod_path, _, obj_path = ep.partition(':') - try: - obj = import_module(mod_path) - except ImportError: - raise BackendUnavailable - if obj_path: - for path_part in obj_path.split('.'): - obj = getattr(obj, path_part) - return obj - - -def get_requires_for_build_wheel(config_settings): - """Invoke the optional get_requires_for_build_wheel hook - - Returns [] if the hook is not defined. - """ - backend = _build_backend() - try: - hook = backend.get_requires_for_build_wheel - except AttributeError: - return [] - else: - return hook(config_settings) - - -def prepare_metadata_for_build_wheel(metadata_directory, config_settings): - """Invoke optional prepare_metadata_for_build_wheel - - Implements a fallback by building a wheel if the hook isn't defined. - """ - backend = _build_backend() - try: - hook = backend.prepare_metadata_for_build_wheel - except AttributeError: - return _get_wheel_metadata_from_wheel(backend, metadata_directory, - config_settings) - else: - return hook(metadata_directory, config_settings) - - -WHEEL_BUILT_MARKER = 'PEP517_ALREADY_BUILT_WHEEL' - - -def _dist_info_files(whl_zip): - """Identify the .dist-info folder inside a wheel ZipFile.""" - res = [] - for path in whl_zip.namelist(): - m = re.match(r'[^/\\]+-[^/\\]+\.dist-info/', path) - if m: - res.append(path) - if res: - return res - raise Exception("No .dist-info folder found in wheel") - - -def _get_wheel_metadata_from_wheel( - backend, metadata_directory, config_settings): - """Build a wheel and extract the metadata from it. - - Fallback for when the build backend does not - define the 'get_wheel_metadata' hook. - """ - from zipfile import ZipFile - whl_basename = backend.build_wheel(metadata_directory, config_settings) - with open(os.path.join(metadata_directory, WHEEL_BUILT_MARKER), 'wb'): - pass # Touch marker file - - whl_file = os.path.join(metadata_directory, whl_basename) - with ZipFile(whl_file) as zipf: - dist_info = _dist_info_files(zipf) - zipf.extractall(path=metadata_directory, members=dist_info) - return dist_info[0].split('/')[0] - - -def _find_already_built_wheel(metadata_directory): - """Check for a wheel already built during the get_wheel_metadata hook. - """ - if not metadata_directory: - return None - metadata_parent = os.path.dirname(metadata_directory) - if not os.path.isfile(pjoin(metadata_parent, WHEEL_BUILT_MARKER)): - return None - - whl_files = glob(os.path.join(metadata_parent, '*.whl')) - if not whl_files: - print('Found wheel built marker, but no .whl files') - return None - if len(whl_files) > 1: - print('Found multiple .whl files; unspecified behaviour. ' - 'Will call build_wheel.') - return None - - # Exactly one .whl file - return whl_files[0] - - -def build_wheel(wheel_directory, config_settings, metadata_directory=None): - """Invoke the mandatory build_wheel hook. - - If a wheel was already built in the - prepare_metadata_for_build_wheel fallback, this - will copy it rather than rebuilding the wheel. - """ - prebuilt_whl = _find_already_built_wheel(metadata_directory) - if prebuilt_whl: - shutil.copy2(prebuilt_whl, wheel_directory) - return os.path.basename(prebuilt_whl) - - return _build_backend().build_wheel(wheel_directory, config_settings, - metadata_directory) - - -def get_requires_for_build_sdist(config_settings): - """Invoke the optional get_requires_for_build_wheel hook - - Returns [] if the hook is not defined. - """ - backend = _build_backend() - try: - hook = backend.get_requires_for_build_sdist - except AttributeError: - return [] - else: - return hook(config_settings) - - -class _DummyException(Exception): - """Nothing should ever raise this exception""" - - -class GotUnsupportedOperation(Exception): - """For internal use when backend raises UnsupportedOperation""" - - -def build_sdist(sdist_directory, config_settings): - """Invoke the mandatory build_sdist hook.""" - backend = _build_backend() - try: - return backend.build_sdist(sdist_directory, config_settings) - except getattr(backend, 'UnsupportedOperation', _DummyException): - raise GotUnsupportedOperation - - -HOOK_NAMES = { - 'get_requires_for_build_wheel', - 'prepare_metadata_for_build_wheel', - 'build_wheel', - 'get_requires_for_build_sdist', - 'build_sdist', -} - - -def main(): - if len(sys.argv) < 3: - sys.exit("Needs args: hook_name, control_dir") - hook_name = sys.argv[1] - control_dir = sys.argv[2] - if hook_name not in HOOK_NAMES: - sys.exit("Unknown hook: %s" % hook_name) - hook = globals()[hook_name] - - hook_input = compat.read_json(pjoin(control_dir, 'input.json')) - - json_out = {'unsupported': False, 'return_val': None} - try: - json_out['return_val'] = hook(**hook_input['kwargs']) - except BackendUnavailable: - json_out['no_backend'] = True - except GotUnsupportedOperation: - json_out['unsupported'] = True - - compat.write_json(json_out, pjoin(control_dir, 'output.json'), indent=2) - - -if __name__ == '__main__': - main() diff --git a/env/lib/python2.7/site-packages/pip/_vendor/pep517/_in_process.pyc b/env/lib/python2.7/site-packages/pip/_vendor/pep517/_in_process.pyc deleted file mode 100644 index 96e50163..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/pep517/_in_process.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/pep517/build.py b/env/lib/python2.7/site-packages/pip/_vendor/pep517/build.py deleted file mode 100644 index ac6c9495..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/pep517/build.py +++ /dev/null @@ -1,108 +0,0 @@ -"""Build a project using PEP 517 hooks. -""" -import argparse -import logging -import os -import contextlib -from pip._vendor import pytoml -import shutil -import errno -import tempfile - -from .envbuild import BuildEnvironment -from .wrappers import Pep517HookCaller - -log = logging.getLogger(__name__) - - -@contextlib.contextmanager -def tempdir(): - td = tempfile.mkdtemp() - try: - yield td - finally: - shutil.rmtree(td) - - -def _do_build(hooks, env, dist, dest): - get_requires_name = 'get_requires_for_build_{dist}'.format(**locals()) - get_requires = getattr(hooks, get_requires_name) - reqs = get_requires({}) - log.info('Got build requires: %s', reqs) - - env.pip_install(reqs) - log.info('Installed dynamic build dependencies') - - with tempdir() as td: - log.info('Trying to build %s in %s', dist, td) - build_name = 'build_{dist}'.format(**locals()) - build = getattr(hooks, build_name) - filename = build(td, {}) - source = os.path.join(td, filename) - shutil.move(source, os.path.join(dest, os.path.basename(filename))) - - -def mkdir_p(*args, **kwargs): - """Like `mkdir`, but does not raise an exception if the - directory already exists. - """ - try: - return os.mkdir(*args, **kwargs) - except OSError as exc: - if exc.errno != errno.EEXIST: - raise - - -def build(source_dir, dist, dest=None): - pyproject = os.path.join(source_dir, 'pyproject.toml') - dest = os.path.join(source_dir, dest or 'dist') - mkdir_p(dest) - - with open(pyproject) as f: - pyproject_data = pytoml.load(f) - # Ensure the mandatory data can be loaded - buildsys = pyproject_data['build-system'] - requires = buildsys['requires'] - backend = buildsys['build-backend'] - - hooks = Pep517HookCaller(source_dir, backend) - - with BuildEnvironment() as env: - env.pip_install(requires) - _do_build(hooks, env, dist, dest) - - -parser = argparse.ArgumentParser() -parser.add_argument( - 'source_dir', - help="A directory containing pyproject.toml", -) -parser.add_argument( - '--binary', '-b', - action='store_true', - default=False, -) -parser.add_argument( - '--source', '-s', - action='store_true', - default=False, -) -parser.add_argument( - '--out-dir', '-o', - help="Destination in which to save the builds relative to source dir", -) - - -def main(args): - # determine which dists to build - dists = list(filter(None, ( - 'sdist' if args.source or not args.binary else None, - 'wheel' if args.binary or not args.source else None, - ))) - - for dist in dists: - build(args.source_dir, dist, args.out_dir) - - -if __name__ == '__main__': - main(parser.parse_args()) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/pep517/build.pyc b/env/lib/python2.7/site-packages/pip/_vendor/pep517/build.pyc deleted file mode 100644 index 7778fb63..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/pep517/build.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/pep517/check.py b/env/lib/python2.7/site-packages/pip/_vendor/pep517/check.py deleted file mode 100644 index f4cdc6be..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/pep517/check.py +++ /dev/null @@ -1,202 +0,0 @@ -"""Check a project and backend by attempting to build using PEP 517 hooks. -""" -import argparse -import logging -import os -from os.path import isfile, join as pjoin -from pip._vendor.pytoml import TomlError, load as toml_load -import shutil -from subprocess import CalledProcessError -import sys -import tarfile -from tempfile import mkdtemp -import zipfile - -from .colorlog import enable_colourful_output -from .envbuild import BuildEnvironment -from .wrappers import Pep517HookCaller - -log = logging.getLogger(__name__) - - -def check_build_sdist(hooks, build_sys_requires): - with BuildEnvironment() as env: - try: - env.pip_install(build_sys_requires) - log.info('Installed static build dependencies') - except CalledProcessError: - log.error('Failed to install static build dependencies') - return False - - try: - reqs = hooks.get_requires_for_build_sdist({}) - log.info('Got build requires: %s', reqs) - except Exception: - log.error('Failure in get_requires_for_build_sdist', exc_info=True) - return False - - try: - env.pip_install(reqs) - log.info('Installed dynamic build dependencies') - except CalledProcessError: - log.error('Failed to install dynamic build dependencies') - return False - - td = mkdtemp() - log.info('Trying to build sdist in %s', td) - try: - try: - filename = hooks.build_sdist(td, {}) - log.info('build_sdist returned %r', filename) - except Exception: - log.info('Failure in build_sdist', exc_info=True) - return False - - if not filename.endswith('.tar.gz'): - log.error( - "Filename %s doesn't have .tar.gz extension", filename) - return False - - path = pjoin(td, filename) - if isfile(path): - log.info("Output file %s exists", path) - else: - log.error("Output file %s does not exist", path) - return False - - if tarfile.is_tarfile(path): - log.info("Output file is a tar file") - else: - log.error("Output file is not a tar file") - return False - - finally: - shutil.rmtree(td) - - return True - - -def check_build_wheel(hooks, build_sys_requires): - with BuildEnvironment() as env: - try: - env.pip_install(build_sys_requires) - log.info('Installed static build dependencies') - except CalledProcessError: - log.error('Failed to install static build dependencies') - return False - - try: - reqs = hooks.get_requires_for_build_wheel({}) - log.info('Got build requires: %s', reqs) - except Exception: - log.error('Failure in get_requires_for_build_sdist', exc_info=True) - return False - - try: - env.pip_install(reqs) - log.info('Installed dynamic build dependencies') - except CalledProcessError: - log.error('Failed to install dynamic build dependencies') - return False - - td = mkdtemp() - log.info('Trying to build wheel in %s', td) - try: - try: - filename = hooks.build_wheel(td, {}) - log.info('build_wheel returned %r', filename) - except Exception: - log.info('Failure in build_wheel', exc_info=True) - return False - - if not filename.endswith('.whl'): - log.error("Filename %s doesn't have .whl extension", filename) - return False - - path = pjoin(td, filename) - if isfile(path): - log.info("Output file %s exists", path) - else: - log.error("Output file %s does not exist", path) - return False - - if zipfile.is_zipfile(path): - log.info("Output file is a zip file") - else: - log.error("Output file is not a zip file") - return False - - finally: - shutil.rmtree(td) - - return True - - -def check(source_dir): - pyproject = pjoin(source_dir, 'pyproject.toml') - if isfile(pyproject): - log.info('Found pyproject.toml') - else: - log.error('Missing pyproject.toml') - return False - - try: - with open(pyproject) as f: - pyproject_data = toml_load(f) - # Ensure the mandatory data can be loaded - buildsys = pyproject_data['build-system'] - requires = buildsys['requires'] - backend = buildsys['build-backend'] - log.info('Loaded pyproject.toml') - except (TomlError, KeyError): - log.error("Invalid pyproject.toml", exc_info=True) - return False - - hooks = Pep517HookCaller(source_dir, backend) - - sdist_ok = check_build_sdist(hooks, requires) - wheel_ok = check_build_wheel(hooks, requires) - - if not sdist_ok: - log.warning('Sdist checks failed; scroll up to see') - if not wheel_ok: - log.warning('Wheel checks failed') - - return sdist_ok - - -def main(argv=None): - ap = argparse.ArgumentParser() - ap.add_argument( - 'source_dir', - help="A directory containing pyproject.toml") - args = ap.parse_args(argv) - - enable_colourful_output() - - ok = check(args.source_dir) - - if ok: - print(ansi('Checks passed', 'green')) - else: - print(ansi('Checks failed', 'red')) - sys.exit(1) - - -ansi_codes = { - 'reset': '\x1b[0m', - 'bold': '\x1b[1m', - 'red': '\x1b[31m', - 'green': '\x1b[32m', -} - - -def ansi(s, attr): - if os.name != 'nt' and sys.stdout.isatty(): - return ansi_codes[attr] + str(s) + ansi_codes['reset'] - else: - return str(s) - - -if __name__ == '__main__': - main() diff --git a/env/lib/python2.7/site-packages/pip/_vendor/pep517/check.pyc b/env/lib/python2.7/site-packages/pip/_vendor/pep517/check.pyc deleted file mode 100644 index 8500ee5d..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/pep517/check.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/pep517/colorlog.pyc b/env/lib/python2.7/site-packages/pip/_vendor/pep517/colorlog.pyc deleted file mode 100644 index 1d26d0dc..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/pep517/colorlog.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/pep517/compat.py b/env/lib/python2.7/site-packages/pip/_vendor/pep517/compat.py deleted file mode 100644 index 01c66fc7..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/pep517/compat.py +++ /dev/null @@ -1,23 +0,0 @@ -"""Handle reading and writing JSON in UTF-8, on Python 3 and 2.""" -import json -import sys - -if sys.version_info[0] >= 3: - # Python 3 - def write_json(obj, path, **kwargs): - with open(path, 'w', encoding='utf-8') as f: - json.dump(obj, f, **kwargs) - - def read_json(path): - with open(path, 'r', encoding='utf-8') as f: - return json.load(f) - -else: - # Python 2 - def write_json(obj, path, **kwargs): - with open(path, 'wb') as f: - json.dump(obj, f, encoding='utf-8', **kwargs) - - def read_json(path): - with open(path, 'rb') as f: - return json.load(f) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/pep517/compat.pyc b/env/lib/python2.7/site-packages/pip/_vendor/pep517/compat.pyc deleted file mode 100644 index d49abfa9..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/pep517/compat.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/pep517/envbuild.pyc b/env/lib/python2.7/site-packages/pip/_vendor/pep517/envbuild.pyc deleted file mode 100644 index c2f5bb15..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/pep517/envbuild.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/pep517/wrappers.py b/env/lib/python2.7/site-packages/pip/_vendor/pep517/wrappers.py deleted file mode 100644 index b14b8991..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/pep517/wrappers.py +++ /dev/null @@ -1,163 +0,0 @@ -from contextlib import contextmanager -import os -from os.path import dirname, abspath, join as pjoin -import shutil -from subprocess import check_call -import sys -from tempfile import mkdtemp - -from . import compat - -_in_proc_script = pjoin(dirname(abspath(__file__)), '_in_process.py') - - -@contextmanager -def tempdir(): - td = mkdtemp() - try: - yield td - finally: - shutil.rmtree(td) - - -class BackendUnavailable(Exception): - """Will be raised if the backend cannot be imported in the hook process.""" - - -class UnsupportedOperation(Exception): - """May be raised by build_sdist if the backend indicates that it can't.""" - - -def default_subprocess_runner(cmd, cwd=None, extra_environ=None): - """The default method of calling the wrapper subprocess.""" - env = os.environ.copy() - if extra_environ: - env.update(extra_environ) - - check_call(cmd, cwd=cwd, env=env) - - -class Pep517HookCaller(object): - """A wrapper around a source directory to be built with a PEP 517 backend. - - source_dir : The path to the source directory, containing pyproject.toml. - backend : The build backend spec, as per PEP 517, from pyproject.toml. - """ - def __init__(self, source_dir, build_backend): - self.source_dir = abspath(source_dir) - self.build_backend = build_backend - self._subprocess_runner = default_subprocess_runner - - # TODO: Is this over-engineered? Maybe frontends only need to - # set this when creating the wrapper, not on every call. - @contextmanager - def subprocess_runner(self, runner): - prev = self._subprocess_runner - self._subprocess_runner = runner - yield - self._subprocess_runner = prev - - def get_requires_for_build_wheel(self, config_settings=None): - """Identify packages required for building a wheel - - Returns a list of dependency specifications, e.g.: - ["wheel >= 0.25", "setuptools"] - - This does not include requirements specified in pyproject.toml. - It returns the result of calling the equivalently named hook in a - subprocess. - """ - return self._call_hook('get_requires_for_build_wheel', { - 'config_settings': config_settings - }) - - def prepare_metadata_for_build_wheel( - self, metadata_directory, config_settings=None): - """Prepare a *.dist-info folder with metadata for this project. - - Returns the name of the newly created folder. - - If the build backend defines a hook with this name, it will be called - in a subprocess. If not, the backend will be asked to build a wheel, - and the dist-info extracted from that. - """ - return self._call_hook('prepare_metadata_for_build_wheel', { - 'metadata_directory': abspath(metadata_directory), - 'config_settings': config_settings, - }) - - def build_wheel( - self, wheel_directory, config_settings=None, - metadata_directory=None): - """Build a wheel from this project. - - Returns the name of the newly created file. - - In general, this will call the 'build_wheel' hook in the backend. - However, if that was previously called by - 'prepare_metadata_for_build_wheel', and the same metadata_directory is - used, the previously built wheel will be copied to wheel_directory. - """ - if metadata_directory is not None: - metadata_directory = abspath(metadata_directory) - return self._call_hook('build_wheel', { - 'wheel_directory': abspath(wheel_directory), - 'config_settings': config_settings, - 'metadata_directory': metadata_directory, - }) - - def get_requires_for_build_sdist(self, config_settings=None): - """Identify packages required for building a wheel - - Returns a list of dependency specifications, e.g.: - ["setuptools >= 26"] - - This does not include requirements specified in pyproject.toml. - It returns the result of calling the equivalently named hook in a - subprocess. - """ - return self._call_hook('get_requires_for_build_sdist', { - 'config_settings': config_settings - }) - - def build_sdist(self, sdist_directory, config_settings=None): - """Build an sdist from this project. - - Returns the name of the newly created file. - - This calls the 'build_sdist' backend hook in a subprocess. - """ - return self._call_hook('build_sdist', { - 'sdist_directory': abspath(sdist_directory), - 'config_settings': config_settings, - }) - - def _call_hook(self, hook_name, kwargs): - # On Python 2, pytoml returns Unicode values (which is correct) but the - # environment passed to check_call needs to contain string values. We - # convert here by encoding using ASCII (the backend can only contain - # letters, digits and _, . and : characters, and will be used as a - # Python identifier, so non-ASCII content is wrong on Python 2 in - # any case). - if sys.version_info[0] == 2: - build_backend = self.build_backend.encode('ASCII') - else: - build_backend = self.build_backend - - with tempdir() as td: - compat.write_json({'kwargs': kwargs}, pjoin(td, 'input.json'), - indent=2) - - # Run the hook in a subprocess - self._subprocess_runner( - [sys.executable, _in_proc_script, hook_name, td], - cwd=self.source_dir, - extra_environ={'PEP517_BUILD_BACKEND': build_backend} - ) - - data = compat.read_json(pjoin(td, 'output.json')) - if data.get('unsupported'): - raise UnsupportedOperation - if data.get('no_backend'): - raise BackendUnavailable - return data['return_val'] diff --git a/env/lib/python2.7/site-packages/pip/_vendor/pep517/wrappers.pyc b/env/lib/python2.7/site-packages/pip/_vendor/pep517/wrappers.pyc deleted file mode 100644 index 0fcb3725..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/pep517/wrappers.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.py b/env/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.py deleted file mode 100644 index fdd40de4..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.py +++ /dev/null @@ -1,3286 +0,0 @@ -# coding: utf-8 -""" -Package resource API --------------------- - -A resource is a logical file contained within a package, or a logical -subdirectory thereof. The package resource API expects resource names -to have their path parts separated with ``/``, *not* whatever the local -path separator is. Do not use os.path operations to manipulate resource -names being passed into the API. - -The package resource API is designed to work with normal filesystem packages, -.egg files, and unpacked .egg files. It can also work in a limited way with -.zip files and with custom PEP 302 loaders that support the ``get_data()`` -method. -""" - -from __future__ import absolute_import - -import sys -import os -import io -import time -import re -import types -import zipfile -import zipimport -import warnings -import stat -import functools -import pkgutil -import operator -import platform -import collections -import plistlib -import email.parser -import errno -import tempfile -import textwrap -import itertools -import inspect -import ntpath -import posixpath -from pkgutil import get_importer - -try: - import _imp -except ImportError: - # Python 3.2 compatibility - import imp as _imp - -try: - FileExistsError -except NameError: - FileExistsError = OSError - -from pip._vendor import six -from pip._vendor.six.moves import urllib, map, filter - -# capture these to bypass sandboxing -from os import utime -try: - from os import mkdir, rename, unlink - WRITE_SUPPORT = True -except ImportError: - # no write support, probably under GAE - WRITE_SUPPORT = False - -from os import open as os_open -from os.path import isdir, split - -try: - import importlib.machinery as importlib_machinery - # access attribute to force import under delayed import mechanisms. - importlib_machinery.__name__ -except ImportError: - importlib_machinery = None - -from . import py31compat -from pip._vendor import appdirs -from pip._vendor import packaging -__import__('pip._vendor.packaging.version') -__import__('pip._vendor.packaging.specifiers') -__import__('pip._vendor.packaging.requirements') -__import__('pip._vendor.packaging.markers') - - -__metaclass__ = type - - -if (3, 0) < sys.version_info < (3, 4): - raise RuntimeError("Python 3.4 or later is required") - -if six.PY2: - # Those builtin exceptions are only defined in Python 3 - PermissionError = None - NotADirectoryError = None - -# declare some globals that will be defined later to -# satisfy the linters. -require = None -working_set = None -add_activation_listener = None -resources_stream = None -cleanup_resources = None -resource_dir = None -resource_stream = None -set_extraction_path = None -resource_isdir = None -resource_string = None -iter_entry_points = None -resource_listdir = None -resource_filename = None -resource_exists = None -_distribution_finders = None -_namespace_handlers = None -_namespace_packages = None - - -class PEP440Warning(RuntimeWarning): - """ - Used when there is an issue with a version or specifier not complying with - PEP 440. - """ - - -def parse_version(v): - try: - return packaging.version.Version(v) - except packaging.version.InvalidVersion: - return packaging.version.LegacyVersion(v) - - -_state_vars = {} - - -def _declare_state(vartype, **kw): - globals().update(kw) - _state_vars.update(dict.fromkeys(kw, vartype)) - - -def __getstate__(): - state = {} - g = globals() - for k, v in _state_vars.items(): - state[k] = g['_sget_' + v](g[k]) - return state - - -def __setstate__(state): - g = globals() - for k, v in state.items(): - g['_sset_' + _state_vars[k]](k, g[k], v) - return state - - -def _sget_dict(val): - return val.copy() - - -def _sset_dict(key, ob, state): - ob.clear() - ob.update(state) - - -def _sget_object(val): - return val.__getstate__() - - -def _sset_object(key, ob, state): - ob.__setstate__(state) - - -_sget_none = _sset_none = lambda *args: None - - -def get_supported_platform(): - """Return this platform's maximum compatible version. - - distutils.util.get_platform() normally reports the minimum version - of Mac OS X that would be required to *use* extensions produced by - distutils. But what we want when checking compatibility is to know the - version of Mac OS X that we are *running*. To allow usage of packages that - explicitly require a newer version of Mac OS X, we must also know the - current version of the OS. - - If this condition occurs for any other platform with a version in its - platform strings, this function should be extended accordingly. - """ - plat = get_build_platform() - m = macosVersionString.match(plat) - if m is not None and sys.platform == "darwin": - try: - plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3)) - except ValueError: - # not Mac OS X - pass - return plat - - -__all__ = [ - # Basic resource access and distribution/entry point discovery - 'require', 'run_script', 'get_provider', 'get_distribution', - 'load_entry_point', 'get_entry_map', 'get_entry_info', - 'iter_entry_points', - 'resource_string', 'resource_stream', 'resource_filename', - 'resource_listdir', 'resource_exists', 'resource_isdir', - - # Environmental control - 'declare_namespace', 'working_set', 'add_activation_listener', - 'find_distributions', 'set_extraction_path', 'cleanup_resources', - 'get_default_cache', - - # Primary implementation classes - 'Environment', 'WorkingSet', 'ResourceManager', - 'Distribution', 'Requirement', 'EntryPoint', - - # Exceptions - 'ResolutionError', 'VersionConflict', 'DistributionNotFound', - 'UnknownExtra', 'ExtractionError', - - # Warnings - 'PEP440Warning', - - # Parsing functions and string utilities - 'parse_requirements', 'parse_version', 'safe_name', 'safe_version', - 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections', - 'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker', - - # filesystem utilities - 'ensure_directory', 'normalize_path', - - # Distribution "precedence" constants - 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST', - - # "Provider" interfaces, implementations, and registration/lookup APIs - 'IMetadataProvider', 'IResourceProvider', 'FileMetadata', - 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider', - 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider', - 'register_finder', 'register_namespace_handler', 'register_loader_type', - 'fixup_namespace_packages', 'get_importer', - - # Warnings - 'PkgResourcesDeprecationWarning', - - # Deprecated/backward compatibility only - 'run_main', 'AvailableDistributions', -] - - -class ResolutionError(Exception): - """Abstract base for dependency resolution errors""" - - def __repr__(self): - return self.__class__.__name__ + repr(self.args) - - -class VersionConflict(ResolutionError): - """ - An already-installed version conflicts with the requested version. - - Should be initialized with the installed Distribution and the requested - Requirement. - """ - - _template = "{self.dist} is installed but {self.req} is required" - - @property - def dist(self): - return self.args[0] - - @property - def req(self): - return self.args[1] - - def report(self): - return self._template.format(**locals()) - - def with_context(self, required_by): - """ - If required_by is non-empty, return a version of self that is a - ContextualVersionConflict. - """ - if not required_by: - return self - args = self.args + (required_by,) - return ContextualVersionConflict(*args) - - -class ContextualVersionConflict(VersionConflict): - """ - A VersionConflict that accepts a third parameter, the set of the - requirements that required the installed Distribution. - """ - - _template = VersionConflict._template + ' by {self.required_by}' - - @property - def required_by(self): - return self.args[2] - - -class DistributionNotFound(ResolutionError): - """A requested distribution was not found""" - - _template = ("The '{self.req}' distribution was not found " - "and is required by {self.requirers_str}") - - @property - def req(self): - return self.args[0] - - @property - def requirers(self): - return self.args[1] - - @property - def requirers_str(self): - if not self.requirers: - return 'the application' - return ', '.join(self.requirers) - - def report(self): - return self._template.format(**locals()) - - def __str__(self): - return self.report() - - -class UnknownExtra(ResolutionError): - """Distribution doesn't have an "extra feature" of the given name""" - - -_provider_factories = {} - -PY_MAJOR = sys.version[:3] -EGG_DIST = 3 -BINARY_DIST = 2 -SOURCE_DIST = 1 -CHECKOUT_DIST = 0 -DEVELOP_DIST = -1 - - -def register_loader_type(loader_type, provider_factory): - """Register `provider_factory` to make providers for `loader_type` - - `loader_type` is the type or class of a PEP 302 ``module.__loader__``, - and `provider_factory` is a function that, passed a *module* object, - returns an ``IResourceProvider`` for that module. - """ - _provider_factories[loader_type] = provider_factory - - -def get_provider(moduleOrReq): - """Return an IResourceProvider for the named module or requirement""" - if isinstance(moduleOrReq, Requirement): - return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0] - try: - module = sys.modules[moduleOrReq] - except KeyError: - __import__(moduleOrReq) - module = sys.modules[moduleOrReq] - loader = getattr(module, '__loader__', None) - return _find_adapter(_provider_factories, loader)(module) - - -def _macosx_vers(_cache=[]): - if not _cache: - version = platform.mac_ver()[0] - # fallback for MacPorts - if version == '': - plist = '/System/Library/CoreServices/SystemVersion.plist' - if os.path.exists(plist): - if hasattr(plistlib, 'readPlist'): - plist_content = plistlib.readPlist(plist) - if 'ProductVersion' in plist_content: - version = plist_content['ProductVersion'] - - _cache.append(version.split('.')) - return _cache[0] - - -def _macosx_arch(machine): - return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine) - - -def get_build_platform(): - """Return this platform's string for platform-specific distributions - - XXX Currently this is the same as ``distutils.util.get_platform()``, but it - needs some hacks for Linux and Mac OS X. - """ - from sysconfig import get_platform - - plat = get_platform() - if sys.platform == "darwin" and not plat.startswith('macosx-'): - try: - version = _macosx_vers() - machine = os.uname()[4].replace(" ", "_") - return "macosx-%d.%d-%s" % ( - int(version[0]), int(version[1]), - _macosx_arch(machine), - ) - except ValueError: - # if someone is running a non-Mac darwin system, this will fall - # through to the default implementation - pass - return plat - - -macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)") -darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)") -# XXX backward compat -get_platform = get_build_platform - - -def compatible_platforms(provided, required): - """Can code for the `provided` platform run on the `required` platform? - - Returns true if either platform is ``None``, or the platforms are equal. - - XXX Needs compatibility checks for Linux and other unixy OSes. - """ - if provided is None or required is None or provided == required: - # easy case - return True - - # Mac OS X special cases - reqMac = macosVersionString.match(required) - if reqMac: - provMac = macosVersionString.match(provided) - - # is this a Mac package? - if not provMac: - # this is backwards compatibility for packages built before - # setuptools 0.6. All packages built after this point will - # use the new macosx designation. - provDarwin = darwinVersionString.match(provided) - if provDarwin: - dversion = int(provDarwin.group(1)) - macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2)) - if dversion == 7 and macosversion >= "10.3" or \ - dversion == 8 and macosversion >= "10.4": - return True - # egg isn't macosx or legacy darwin - return False - - # are they the same major version and machine type? - if provMac.group(1) != reqMac.group(1) or \ - provMac.group(3) != reqMac.group(3): - return False - - # is the required OS major update >= the provided one? - if int(provMac.group(2)) > int(reqMac.group(2)): - return False - - return True - - # XXX Linux and other platforms' special cases should go here - return False - - -def run_script(dist_spec, script_name): - """Locate distribution `dist_spec` and run its `script_name` script""" - ns = sys._getframe(1).f_globals - name = ns['__name__'] - ns.clear() - ns['__name__'] = name - require(dist_spec)[0].run_script(script_name, ns) - - -# backward compatibility -run_main = run_script - - -def get_distribution(dist): - """Return a current distribution object for a Requirement or string""" - if isinstance(dist, six.string_types): - dist = Requirement.parse(dist) - if isinstance(dist, Requirement): - dist = get_provider(dist) - if not isinstance(dist, Distribution): - raise TypeError("Expected string, Requirement, or Distribution", dist) - return dist - - -def load_entry_point(dist, group, name): - """Return `name` entry point of `group` for `dist` or raise ImportError""" - return get_distribution(dist).load_entry_point(group, name) - - -def get_entry_map(dist, group=None): - """Return the entry point map for `group`, or the full entry map""" - return get_distribution(dist).get_entry_map(group) - - -def get_entry_info(dist, group, name): - """Return the EntryPoint object for `group`+`name`, or ``None``""" - return get_distribution(dist).get_entry_info(group, name) - - -class IMetadataProvider: - def has_metadata(name): - """Does the package's distribution contain the named metadata?""" - - def get_metadata(name): - """The named metadata resource as a string""" - - def get_metadata_lines(name): - """Yield named metadata resource as list of non-blank non-comment lines - - Leading and trailing whitespace is stripped from each line, and lines - with ``#`` as the first non-blank character are omitted.""" - - def metadata_isdir(name): - """Is the named metadata a directory? (like ``os.path.isdir()``)""" - - def metadata_listdir(name): - """List of metadata names in the directory (like ``os.listdir()``)""" - - def run_script(script_name, namespace): - """Execute the named script in the supplied namespace dictionary""" - - -class IResourceProvider(IMetadataProvider): - """An object that provides access to package resources""" - - def get_resource_filename(manager, resource_name): - """Return a true filesystem path for `resource_name` - - `manager` must be an ``IResourceManager``""" - - def get_resource_stream(manager, resource_name): - """Return a readable file-like object for `resource_name` - - `manager` must be an ``IResourceManager``""" - - def get_resource_string(manager, resource_name): - """Return a string containing the contents of `resource_name` - - `manager` must be an ``IResourceManager``""" - - def has_resource(resource_name): - """Does the package contain the named resource?""" - - def resource_isdir(resource_name): - """Is the named resource a directory? (like ``os.path.isdir()``)""" - - def resource_listdir(resource_name): - """List of resource names in the directory (like ``os.listdir()``)""" - - -class WorkingSet: - """A collection of active distributions on sys.path (or a similar list)""" - - def __init__(self, entries=None): - """Create working set from list of path entries (default=sys.path)""" - self.entries = [] - self.entry_keys = {} - self.by_key = {} - self.callbacks = [] - - if entries is None: - entries = sys.path - - for entry in entries: - self.add_entry(entry) - - @classmethod - def _build_master(cls): - """ - Prepare the master working set. - """ - ws = cls() - try: - from __main__ import __requires__ - except ImportError: - # The main program does not list any requirements - return ws - - # ensure the requirements are met - try: - ws.require(__requires__) - except VersionConflict: - return cls._build_from_requirements(__requires__) - - return ws - - @classmethod - def _build_from_requirements(cls, req_spec): - """ - Build a working set from a requirement spec. Rewrites sys.path. - """ - # try it without defaults already on sys.path - # by starting with an empty path - ws = cls([]) - reqs = parse_requirements(req_spec) - dists = ws.resolve(reqs, Environment()) - for dist in dists: - ws.add(dist) - - # add any missing entries from sys.path - for entry in sys.path: - if entry not in ws.entries: - ws.add_entry(entry) - - # then copy back to sys.path - sys.path[:] = ws.entries - return ws - - def add_entry(self, entry): - """Add a path item to ``.entries``, finding any distributions on it - - ``find_distributions(entry, True)`` is used to find distributions - corresponding to the path entry, and they are added. `entry` is - always appended to ``.entries``, even if it is already present. - (This is because ``sys.path`` can contain the same value more than - once, and the ``.entries`` of the ``sys.path`` WorkingSet should always - equal ``sys.path``.) - """ - self.entry_keys.setdefault(entry, []) - self.entries.append(entry) - for dist in find_distributions(entry, True): - self.add(dist, entry, False) - - def __contains__(self, dist): - """True if `dist` is the active distribution for its project""" - return self.by_key.get(dist.key) == dist - - def find(self, req): - """Find a distribution matching requirement `req` - - If there is an active distribution for the requested project, this - returns it as long as it meets the version requirement specified by - `req`. But, if there is an active distribution for the project and it - does *not* meet the `req` requirement, ``VersionConflict`` is raised. - If there is no active distribution for the requested project, ``None`` - is returned. - """ - dist = self.by_key.get(req.key) - if dist is not None and dist not in req: - # XXX add more info - raise VersionConflict(dist, req) - return dist - - def iter_entry_points(self, group, name=None): - """Yield entry point objects from `group` matching `name` - - If `name` is None, yields all entry points in `group` from all - distributions in the working set, otherwise only ones matching - both `group` and `name` are yielded (in distribution order). - """ - return ( - entry - for dist in self - for entry in dist.get_entry_map(group).values() - if name is None or name == entry.name - ) - - def run_script(self, requires, script_name): - """Locate distribution for `requires` and run `script_name` script""" - ns = sys._getframe(1).f_globals - name = ns['__name__'] - ns.clear() - ns['__name__'] = name - self.require(requires)[0].run_script(script_name, ns) - - def __iter__(self): - """Yield distributions for non-duplicate projects in the working set - - The yield order is the order in which the items' path entries were - added to the working set. - """ - seen = {} - for item in self.entries: - if item not in self.entry_keys: - # workaround a cache issue - continue - - for key in self.entry_keys[item]: - if key not in seen: - seen[key] = 1 - yield self.by_key[key] - - def add(self, dist, entry=None, insert=True, replace=False): - """Add `dist` to working set, associated with `entry` - - If `entry` is unspecified, it defaults to the ``.location`` of `dist`. - On exit from this routine, `entry` is added to the end of the working - set's ``.entries`` (if it wasn't already present). - - `dist` is only added to the working set if it's for a project that - doesn't already have a distribution in the set, unless `replace=True`. - If it's added, any callbacks registered with the ``subscribe()`` method - will be called. - """ - if insert: - dist.insert_on(self.entries, entry, replace=replace) - - if entry is None: - entry = dist.location - keys = self.entry_keys.setdefault(entry, []) - keys2 = self.entry_keys.setdefault(dist.location, []) - if not replace and dist.key in self.by_key: - # ignore hidden distros - return - - self.by_key[dist.key] = dist - if dist.key not in keys: - keys.append(dist.key) - if dist.key not in keys2: - keys2.append(dist.key) - self._added_new(dist) - - def resolve(self, requirements, env=None, installer=None, - replace_conflicting=False, extras=None): - """List all distributions needed to (recursively) meet `requirements` - - `requirements` must be a sequence of ``Requirement`` objects. `env`, - if supplied, should be an ``Environment`` instance. If - not supplied, it defaults to all distributions available within any - entry or distribution in the working set. `installer`, if supplied, - will be invoked with each requirement that cannot be met by an - already-installed distribution; it should return a ``Distribution`` or - ``None``. - - Unless `replace_conflicting=True`, raises a VersionConflict exception - if - any requirements are found on the path that have the correct name but - the wrong version. Otherwise, if an `installer` is supplied it will be - invoked to obtain the correct version of the requirement and activate - it. - - `extras` is a list of the extras to be used with these requirements. - This is important because extra requirements may look like `my_req; - extra = "my_extra"`, which would otherwise be interpreted as a purely - optional requirement. Instead, we want to be able to assert that these - requirements are truly required. - """ - - # set up the stack - requirements = list(requirements)[::-1] - # set of processed requirements - processed = {} - # key -> dist - best = {} - to_activate = [] - - req_extras = _ReqExtras() - - # Mapping of requirement to set of distributions that required it; - # useful for reporting info about conflicts. - required_by = collections.defaultdict(set) - - while requirements: - # process dependencies breadth-first - req = requirements.pop(0) - if req in processed: - # Ignore cyclic or redundant dependencies - continue - - if not req_extras.markers_pass(req, extras): - continue - - dist = best.get(req.key) - if dist is None: - # Find the best distribution and add it to the map - dist = self.by_key.get(req.key) - if dist is None or (dist not in req and replace_conflicting): - ws = self - if env is None: - if dist is None: - env = Environment(self.entries) - else: - # Use an empty environment and workingset to avoid - # any further conflicts with the conflicting - # distribution - env = Environment([]) - ws = WorkingSet([]) - dist = best[req.key] = env.best_match( - req, ws, installer, - replace_conflicting=replace_conflicting - ) - if dist is None: - requirers = required_by.get(req, None) - raise DistributionNotFound(req, requirers) - to_activate.append(dist) - if dist not in req: - # Oops, the "best" so far conflicts with a dependency - dependent_req = required_by[req] - raise VersionConflict(dist, req).with_context(dependent_req) - - # push the new requirements onto the stack - new_requirements = dist.requires(req.extras)[::-1] - requirements.extend(new_requirements) - - # Register the new requirements needed by req - for new_requirement in new_requirements: - required_by[new_requirement].add(req.project_name) - req_extras[new_requirement] = req.extras - - processed[req] = True - - # return list of distros to activate - return to_activate - - def find_plugins( - self, plugin_env, full_env=None, installer=None, fallback=True): - """Find all activatable distributions in `plugin_env` - - Example usage:: - - distributions, errors = working_set.find_plugins( - Environment(plugin_dirlist) - ) - # add plugins+libs to sys.path - map(working_set.add, distributions) - # display errors - print('Could not load', errors) - - The `plugin_env` should be an ``Environment`` instance that contains - only distributions that are in the project's "plugin directory" or - directories. The `full_env`, if supplied, should be an ``Environment`` - contains all currently-available distributions. If `full_env` is not - supplied, one is created automatically from the ``WorkingSet`` this - method is called on, which will typically mean that every directory on - ``sys.path`` will be scanned for distributions. - - `installer` is a standard installer callback as used by the - ``resolve()`` method. The `fallback` flag indicates whether we should - attempt to resolve older versions of a plugin if the newest version - cannot be resolved. - - This method returns a 2-tuple: (`distributions`, `error_info`), where - `distributions` is a list of the distributions found in `plugin_env` - that were loadable, along with any other distributions that are needed - to resolve their dependencies. `error_info` is a dictionary mapping - unloadable plugin distributions to an exception instance describing the - error that occurred. Usually this will be a ``DistributionNotFound`` or - ``VersionConflict`` instance. - """ - - plugin_projects = list(plugin_env) - # scan project names in alphabetic order - plugin_projects.sort() - - error_info = {} - distributions = {} - - if full_env is None: - env = Environment(self.entries) - env += plugin_env - else: - env = full_env + plugin_env - - shadow_set = self.__class__([]) - # put all our entries in shadow_set - list(map(shadow_set.add, self)) - - for project_name in plugin_projects: - - for dist in plugin_env[project_name]: - - req = [dist.as_requirement()] - - try: - resolvees = shadow_set.resolve(req, env, installer) - - except ResolutionError as v: - # save error info - error_info[dist] = v - if fallback: - # try the next older version of project - continue - else: - # give up on this project, keep going - break - - else: - list(map(shadow_set.add, resolvees)) - distributions.update(dict.fromkeys(resolvees)) - - # success, no need to try any more versions of this project - break - - distributions = list(distributions) - distributions.sort() - - return distributions, error_info - - def require(self, *requirements): - """Ensure that distributions matching `requirements` are activated - - `requirements` must be a string or a (possibly-nested) sequence - thereof, specifying the distributions and versions required. The - return value is a sequence of the distributions that needed to be - activated to fulfill the requirements; all relevant distributions are - included, even if they were already activated in this working set. - """ - needed = self.resolve(parse_requirements(requirements)) - - for dist in needed: - self.add(dist) - - return needed - - def subscribe(self, callback, existing=True): - """Invoke `callback` for all distributions - - If `existing=True` (default), - call on all existing ones, as well. - """ - if callback in self.callbacks: - return - self.callbacks.append(callback) - if not existing: - return - for dist in self: - callback(dist) - - def _added_new(self, dist): - for callback in self.callbacks: - callback(dist) - - def __getstate__(self): - return ( - self.entries[:], self.entry_keys.copy(), self.by_key.copy(), - self.callbacks[:] - ) - - def __setstate__(self, e_k_b_c): - entries, keys, by_key, callbacks = e_k_b_c - self.entries = entries[:] - self.entry_keys = keys.copy() - self.by_key = by_key.copy() - self.callbacks = callbacks[:] - - -class _ReqExtras(dict): - """ - Map each requirement to the extras that demanded it. - """ - - def markers_pass(self, req, extras=None): - """ - Evaluate markers for req against each extra that - demanded it. - - Return False if the req has a marker and fails - evaluation. Otherwise, return True. - """ - extra_evals = ( - req.marker.evaluate({'extra': extra}) - for extra in self.get(req, ()) + (extras or (None,)) - ) - return not req.marker or any(extra_evals) - - -class Environment: - """Searchable snapshot of distributions on a search path""" - - def __init__( - self, search_path=None, platform=get_supported_platform(), - python=PY_MAJOR): - """Snapshot distributions available on a search path - - Any distributions found on `search_path` are added to the environment. - `search_path` should be a sequence of ``sys.path`` items. If not - supplied, ``sys.path`` is used. - - `platform` is an optional string specifying the name of the platform - that platform-specific distributions must be compatible with. If - unspecified, it defaults to the current platform. `python` is an - optional string naming the desired version of Python (e.g. ``'3.6'``); - it defaults to the current version. - - You may explicitly set `platform` (and/or `python`) to ``None`` if you - wish to map *all* distributions, not just those compatible with the - running platform or Python version. - """ - self._distmap = {} - self.platform = platform - self.python = python - self.scan(search_path) - - def can_add(self, dist): - """Is distribution `dist` acceptable for this environment? - - The distribution must match the platform and python version - requirements specified when this environment was created, or False - is returned. - """ - py_compat = ( - self.python is None - or dist.py_version is None - or dist.py_version == self.python - ) - return py_compat and compatible_platforms(dist.platform, self.platform) - - def remove(self, dist): - """Remove `dist` from the environment""" - self._distmap[dist.key].remove(dist) - - def scan(self, search_path=None): - """Scan `search_path` for distributions usable in this environment - - Any distributions found are added to the environment. - `search_path` should be a sequence of ``sys.path`` items. If not - supplied, ``sys.path`` is used. Only distributions conforming to - the platform/python version defined at initialization are added. - """ - if search_path is None: - search_path = sys.path - - for item in search_path: - for dist in find_distributions(item): - self.add(dist) - - def __getitem__(self, project_name): - """Return a newest-to-oldest list of distributions for `project_name` - - Uses case-insensitive `project_name` comparison, assuming all the - project's distributions use their project's name converted to all - lowercase as their key. - - """ - distribution_key = project_name.lower() - return self._distmap.get(distribution_key, []) - - def add(self, dist): - """Add `dist` if we ``can_add()`` it and it has not already been added - """ - if self.can_add(dist) and dist.has_version(): - dists = self._distmap.setdefault(dist.key, []) - if dist not in dists: - dists.append(dist) - dists.sort(key=operator.attrgetter('hashcmp'), reverse=True) - - def best_match( - self, req, working_set, installer=None, replace_conflicting=False): - """Find distribution best matching `req` and usable on `working_set` - - This calls the ``find(req)`` method of the `working_set` to see if a - suitable distribution is already active. (This may raise - ``VersionConflict`` if an unsuitable version of the project is already - active in the specified `working_set`.) If a suitable distribution - isn't active, this method returns the newest distribution in the - environment that meets the ``Requirement`` in `req`. If no suitable - distribution is found, and `installer` is supplied, then the result of - calling the environment's ``obtain(req, installer)`` method will be - returned. - """ - try: - dist = working_set.find(req) - except VersionConflict: - if not replace_conflicting: - raise - dist = None - if dist is not None: - return dist - for dist in self[req.key]: - if dist in req: - return dist - # try to download/install - return self.obtain(req, installer) - - def obtain(self, requirement, installer=None): - """Obtain a distribution matching `requirement` (e.g. via download) - - Obtain a distro that matches requirement (e.g. via download). In the - base ``Environment`` class, this routine just returns - ``installer(requirement)``, unless `installer` is None, in which case - None is returned instead. This method is a hook that allows subclasses - to attempt other ways of obtaining a distribution before falling back - to the `installer` argument.""" - if installer is not None: - return installer(requirement) - - def __iter__(self): - """Yield the unique project names of the available distributions""" - for key in self._distmap.keys(): - if self[key]: - yield key - - def __iadd__(self, other): - """In-place addition of a distribution or environment""" - if isinstance(other, Distribution): - self.add(other) - elif isinstance(other, Environment): - for project in other: - for dist in other[project]: - self.add(dist) - else: - raise TypeError("Can't add %r to environment" % (other,)) - return self - - def __add__(self, other): - """Add an environment or distribution to an environment""" - new = self.__class__([], platform=None, python=None) - for env in self, other: - new += env - return new - - -# XXX backward compatibility -AvailableDistributions = Environment - - -class ExtractionError(RuntimeError): - """An error occurred extracting a resource - - The following attributes are available from instances of this exception: - - manager - The resource manager that raised this exception - - cache_path - The base directory for resource extraction - - original_error - The exception instance that caused extraction to fail - """ - - -class ResourceManager: - """Manage resource extraction and packages""" - extraction_path = None - - def __init__(self): - self.cached_files = {} - - def resource_exists(self, package_or_requirement, resource_name): - """Does the named resource exist?""" - return get_provider(package_or_requirement).has_resource(resource_name) - - def resource_isdir(self, package_or_requirement, resource_name): - """Is the named resource an existing directory?""" - return get_provider(package_or_requirement).resource_isdir( - resource_name - ) - - def resource_filename(self, package_or_requirement, resource_name): - """Return a true filesystem path for specified resource""" - return get_provider(package_or_requirement).get_resource_filename( - self, resource_name - ) - - def resource_stream(self, package_or_requirement, resource_name): - """Return a readable file-like object for specified resource""" - return get_provider(package_or_requirement).get_resource_stream( - self, resource_name - ) - - def resource_string(self, package_or_requirement, resource_name): - """Return specified resource as a string""" - return get_provider(package_or_requirement).get_resource_string( - self, resource_name - ) - - def resource_listdir(self, package_or_requirement, resource_name): - """List the contents of the named resource directory""" - return get_provider(package_or_requirement).resource_listdir( - resource_name - ) - - def extraction_error(self): - """Give an error message for problems extracting file(s)""" - - old_exc = sys.exc_info()[1] - cache_path = self.extraction_path or get_default_cache() - - tmpl = textwrap.dedent(""" - Can't extract file(s) to egg cache - - The following error occurred while trying to extract file(s) - to the Python egg cache: - - {old_exc} - - The Python egg cache directory is currently set to: - - {cache_path} - - Perhaps your account does not have write access to this directory? - You can change the cache directory by setting the PYTHON_EGG_CACHE - environment variable to point to an accessible directory. - """).lstrip() - err = ExtractionError(tmpl.format(**locals())) - err.manager = self - err.cache_path = cache_path - err.original_error = old_exc - raise err - - def get_cache_path(self, archive_name, names=()): - """Return absolute location in cache for `archive_name` and `names` - - The parent directory of the resulting path will be created if it does - not already exist. `archive_name` should be the base filename of the - enclosing egg (which may not be the name of the enclosing zipfile!), - including its ".egg" extension. `names`, if provided, should be a - sequence of path name parts "under" the egg's extraction location. - - This method should only be called by resource providers that need to - obtain an extraction location, and only for names they intend to - extract, as it tracks the generated names for possible cleanup later. - """ - extract_path = self.extraction_path or get_default_cache() - target_path = os.path.join(extract_path, archive_name + '-tmp', *names) - try: - _bypass_ensure_directory(target_path) - except Exception: - self.extraction_error() - - self._warn_unsafe_extraction_path(extract_path) - - self.cached_files[target_path] = 1 - return target_path - - @staticmethod - def _warn_unsafe_extraction_path(path): - """ - If the default extraction path is overridden and set to an insecure - location, such as /tmp, it opens up an opportunity for an attacker to - replace an extracted file with an unauthorized payload. Warn the user - if a known insecure location is used. - - See Distribute #375 for more details. - """ - if os.name == 'nt' and not path.startswith(os.environ['windir']): - # On Windows, permissions are generally restrictive by default - # and temp directories are not writable by other users, so - # bypass the warning. - return - mode = os.stat(path).st_mode - if mode & stat.S_IWOTH or mode & stat.S_IWGRP: - msg = ( - "%s is writable by group/others and vulnerable to attack " - "when " - "used with get_resource_filename. Consider a more secure " - "location (set with .set_extraction_path or the " - "PYTHON_EGG_CACHE environment variable)." % path - ) - warnings.warn(msg, UserWarning) - - def postprocess(self, tempname, filename): - """Perform any platform-specific postprocessing of `tempname` - - This is where Mac header rewrites should be done; other platforms don't - have anything special they should do. - - Resource providers should call this method ONLY after successfully - extracting a compressed resource. They must NOT call it on resources - that are already in the filesystem. - - `tempname` is the current (temporary) name of the file, and `filename` - is the name it will be renamed to by the caller after this routine - returns. - """ - - if os.name == 'posix': - # Make the resource executable - mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777 - os.chmod(tempname, mode) - - def set_extraction_path(self, path): - """Set the base path where resources will be extracted to, if needed. - - If you do not call this routine before any extractions take place, the - path defaults to the return value of ``get_default_cache()``. (Which - is based on the ``PYTHON_EGG_CACHE`` environment variable, with various - platform-specific fallbacks. See that routine's documentation for more - details.) - - Resources are extracted to subdirectories of this path based upon - information given by the ``IResourceProvider``. You may set this to a - temporary directory, but then you must call ``cleanup_resources()`` to - delete the extracted files when done. There is no guarantee that - ``cleanup_resources()`` will be able to remove all extracted files. - - (Note: you may not change the extraction path for a given resource - manager once resources have been extracted, unless you first call - ``cleanup_resources()``.) - """ - if self.cached_files: - raise ValueError( - "Can't change extraction path, files already extracted" - ) - - self.extraction_path = path - - def cleanup_resources(self, force=False): - """ - Delete all extracted resource files and directories, returning a list - of the file and directory names that could not be successfully removed. - This function does not have any concurrency protection, so it should - generally only be called when the extraction path is a temporary - directory exclusive to a single process. This method is not - automatically called; you must call it explicitly or register it as an - ``atexit`` function if you wish to ensure cleanup of a temporary - directory used for extractions. - """ - # XXX - - -def get_default_cache(): - """ - Return the ``PYTHON_EGG_CACHE`` environment variable - or a platform-relevant user cache dir for an app - named "Python-Eggs". - """ - return ( - os.environ.get('PYTHON_EGG_CACHE') - or appdirs.user_cache_dir(appname='Python-Eggs') - ) - - -def safe_name(name): - """Convert an arbitrary string to a standard distribution name - - Any runs of non-alphanumeric/. characters are replaced with a single '-'. - """ - return re.sub('[^A-Za-z0-9.]+', '-', name) - - -def safe_version(version): - """ - Convert an arbitrary string to a standard version string - """ - try: - # normalize the version - return str(packaging.version.Version(version)) - except packaging.version.InvalidVersion: - version = version.replace(' ', '.') - return re.sub('[^A-Za-z0-9.]+', '-', version) - - -def safe_extra(extra): - """Convert an arbitrary string to a standard 'extra' name - - Any runs of non-alphanumeric characters are replaced with a single '_', - and the result is always lowercased. - """ - return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower() - - -def to_filename(name): - """Convert a project or version name to its filename-escaped form - - Any '-' characters are currently replaced with '_'. - """ - return name.replace('-', '_') - - -def invalid_marker(text): - """ - Validate text as a PEP 508 environment marker; return an exception - if invalid or False otherwise. - """ - try: - evaluate_marker(text) - except SyntaxError as e: - e.filename = None - e.lineno = None - return e - return False - - -def evaluate_marker(text, extra=None): - """ - Evaluate a PEP 508 environment marker. - Return a boolean indicating the marker result in this environment. - Raise SyntaxError if marker is invalid. - - This implementation uses the 'pyparsing' module. - """ - try: - marker = packaging.markers.Marker(text) - return marker.evaluate() - except packaging.markers.InvalidMarker as e: - raise SyntaxError(e) - - -class NullProvider: - """Try to implement resources and metadata for arbitrary PEP 302 loaders""" - - egg_name = None - egg_info = None - loader = None - - def __init__(self, module): - self.loader = getattr(module, '__loader__', None) - self.module_path = os.path.dirname(getattr(module, '__file__', '')) - - def get_resource_filename(self, manager, resource_name): - return self._fn(self.module_path, resource_name) - - def get_resource_stream(self, manager, resource_name): - return io.BytesIO(self.get_resource_string(manager, resource_name)) - - def get_resource_string(self, manager, resource_name): - return self._get(self._fn(self.module_path, resource_name)) - - def has_resource(self, resource_name): - return self._has(self._fn(self.module_path, resource_name)) - - def _get_metadata_path(self, name): - return self._fn(self.egg_info, name) - - def has_metadata(self, name): - if not self.egg_info: - return self.egg_info - - path = self._get_metadata_path(name) - return self._has(path) - - def get_metadata(self, name): - if not self.egg_info: - return "" - value = self._get(self._fn(self.egg_info, name)) - return value.decode('utf-8') if six.PY3 else value - - def get_metadata_lines(self, name): - return yield_lines(self.get_metadata(name)) - - def resource_isdir(self, resource_name): - return self._isdir(self._fn(self.module_path, resource_name)) - - def metadata_isdir(self, name): - return self.egg_info and self._isdir(self._fn(self.egg_info, name)) - - def resource_listdir(self, resource_name): - return self._listdir(self._fn(self.module_path, resource_name)) - - def metadata_listdir(self, name): - if self.egg_info: - return self._listdir(self._fn(self.egg_info, name)) - return [] - - def run_script(self, script_name, namespace): - script = 'scripts/' + script_name - if not self.has_metadata(script): - raise ResolutionError( - "Script {script!r} not found in metadata at {self.egg_info!r}" - .format(**locals()), - ) - script_text = self.get_metadata(script).replace('\r\n', '\n') - script_text = script_text.replace('\r', '\n') - script_filename = self._fn(self.egg_info, script) - namespace['__file__'] = script_filename - if os.path.exists(script_filename): - source = open(script_filename).read() - code = compile(source, script_filename, 'exec') - exec(code, namespace, namespace) - else: - from linecache import cache - cache[script_filename] = ( - len(script_text), 0, script_text.split('\n'), script_filename - ) - script_code = compile(script_text, script_filename, 'exec') - exec(script_code, namespace, namespace) - - def _has(self, path): - raise NotImplementedError( - "Can't perform this operation for unregistered loader type" - ) - - def _isdir(self, path): - raise NotImplementedError( - "Can't perform this operation for unregistered loader type" - ) - - def _listdir(self, path): - raise NotImplementedError( - "Can't perform this operation for unregistered loader type" - ) - - def _fn(self, base, resource_name): - self._validate_resource_path(resource_name) - if resource_name: - return os.path.join(base, *resource_name.split('/')) - return base - - @staticmethod - def _validate_resource_path(path): - """ - Validate the resource paths according to the docs. - https://setuptools.readthedocs.io/en/latest/pkg_resources.html#basic-resource-access - - >>> warned = getfixture('recwarn') - >>> warnings.simplefilter('always') - >>> vrp = NullProvider._validate_resource_path - >>> vrp('foo/bar.txt') - >>> bool(warned) - False - >>> vrp('../foo/bar.txt') - >>> bool(warned) - True - >>> warned.clear() - >>> vrp('/foo/bar.txt') - >>> bool(warned) - True - >>> vrp('foo/../../bar.txt') - >>> bool(warned) - True - >>> warned.clear() - >>> vrp('foo/f../bar.txt') - >>> bool(warned) - False - - Windows path separators are straight-up disallowed. - >>> vrp(r'\\foo/bar.txt') - Traceback (most recent call last): - ... - ValueError: Use of .. or absolute path in a resource path \ -is not allowed. - - >>> vrp(r'C:\\foo/bar.txt') - Traceback (most recent call last): - ... - ValueError: Use of .. or absolute path in a resource path \ -is not allowed. - - Blank values are allowed - - >>> vrp('') - >>> bool(warned) - False - - Non-string values are not. - - >>> vrp(None) - Traceback (most recent call last): - ... - AttributeError: ... - """ - invalid = ( - os.path.pardir in path.split(posixpath.sep) or - posixpath.isabs(path) or - ntpath.isabs(path) - ) - if not invalid: - return - - msg = "Use of .. or absolute path in a resource path is not allowed." - - # Aggressively disallow Windows absolute paths - if ntpath.isabs(path) and not posixpath.isabs(path): - raise ValueError(msg) - - # for compatibility, warn; in future - # raise ValueError(msg) - warnings.warn( - msg[:-1] + " and will raise exceptions in a future release.", - DeprecationWarning, - stacklevel=4, - ) - - def _get(self, path): - if hasattr(self.loader, 'get_data'): - return self.loader.get_data(path) - raise NotImplementedError( - "Can't perform this operation for loaders without 'get_data()'" - ) - - -register_loader_type(object, NullProvider) - - -class EggProvider(NullProvider): - """Provider based on a virtual filesystem""" - - def __init__(self, module): - NullProvider.__init__(self, module) - self._setup_prefix() - - def _setup_prefix(self): - # we assume here that our metadata may be nested inside a "basket" - # of multiple eggs; that's why we use module_path instead of .archive - path = self.module_path - old = None - while path != old: - if _is_egg_path(path): - self.egg_name = os.path.basename(path) - self.egg_info = os.path.join(path, 'EGG-INFO') - self.egg_root = path - break - old = path - path, base = os.path.split(path) - - -class DefaultProvider(EggProvider): - """Provides access to package resources in the filesystem""" - - def _has(self, path): - return os.path.exists(path) - - def _isdir(self, path): - return os.path.isdir(path) - - def _listdir(self, path): - return os.listdir(path) - - def get_resource_stream(self, manager, resource_name): - return open(self._fn(self.module_path, resource_name), 'rb') - - def _get(self, path): - with open(path, 'rb') as stream: - return stream.read() - - @classmethod - def _register(cls): - loader_names = 'SourceFileLoader', 'SourcelessFileLoader', - for name in loader_names: - loader_cls = getattr(importlib_machinery, name, type(None)) - register_loader_type(loader_cls, cls) - - -DefaultProvider._register() - - -class EmptyProvider(NullProvider): - """Provider that returns nothing for all requests""" - - module_path = None - - _isdir = _has = lambda self, path: False - - def _get(self, path): - return '' - - def _listdir(self, path): - return [] - - def __init__(self): - pass - - -empty_provider = EmptyProvider() - - -class ZipManifests(dict): - """ - zip manifest builder - """ - - @classmethod - def build(cls, path): - """ - Build a dictionary similar to the zipimport directory - caches, except instead of tuples, store ZipInfo objects. - - Use a platform-specific path separator (os.sep) for the path keys - for compatibility with pypy on Windows. - """ - with zipfile.ZipFile(path) as zfile: - items = ( - ( - name.replace('/', os.sep), - zfile.getinfo(name), - ) - for name in zfile.namelist() - ) - return dict(items) - - load = build - - -class MemoizedZipManifests(ZipManifests): - """ - Memoized zipfile manifests. - """ - manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime') - - def load(self, path): - """ - Load a manifest at path or return a suitable manifest already loaded. - """ - path = os.path.normpath(path) - mtime = os.stat(path).st_mtime - - if path not in self or self[path].mtime != mtime: - manifest = self.build(path) - self[path] = self.manifest_mod(manifest, mtime) - - return self[path].manifest - - -class ZipProvider(EggProvider): - """Resource support for zips and eggs""" - - eagers = None - _zip_manifests = MemoizedZipManifests() - - def __init__(self, module): - EggProvider.__init__(self, module) - self.zip_pre = self.loader.archive + os.sep - - def _zipinfo_name(self, fspath): - # Convert a virtual filename (full path to file) into a zipfile subpath - # usable with the zipimport directory cache for our target archive - fspath = fspath.rstrip(os.sep) - if fspath == self.loader.archive: - return '' - if fspath.startswith(self.zip_pre): - return fspath[len(self.zip_pre):] - raise AssertionError( - "%s is not a subpath of %s" % (fspath, self.zip_pre) - ) - - def _parts(self, zip_path): - # Convert a zipfile subpath into an egg-relative path part list. - # pseudo-fs path - fspath = self.zip_pre + zip_path - if fspath.startswith(self.egg_root + os.sep): - return fspath[len(self.egg_root) + 1:].split(os.sep) - raise AssertionError( - "%s is not a subpath of %s" % (fspath, self.egg_root) - ) - - @property - def zipinfo(self): - return self._zip_manifests.load(self.loader.archive) - - def get_resource_filename(self, manager, resource_name): - if not self.egg_name: - raise NotImplementedError( - "resource_filename() only supported for .egg, not .zip" - ) - # no need to lock for extraction, since we use temp names - zip_path = self._resource_to_zip(resource_name) - eagers = self._get_eager_resources() - if '/'.join(self._parts(zip_path)) in eagers: - for name in eagers: - self._extract_resource(manager, self._eager_to_zip(name)) - return self._extract_resource(manager, zip_path) - - @staticmethod - def _get_date_and_size(zip_stat): - size = zip_stat.file_size - # ymdhms+wday, yday, dst - date_time = zip_stat.date_time + (0, 0, -1) - # 1980 offset already done - timestamp = time.mktime(date_time) - return timestamp, size - - def _extract_resource(self, manager, zip_path): - - if zip_path in self._index(): - for name in self._index()[zip_path]: - last = self._extract_resource( - manager, os.path.join(zip_path, name) - ) - # return the extracted directory name - return os.path.dirname(last) - - timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) - - if not WRITE_SUPPORT: - raise IOError('"os.rename" and "os.unlink" are not supported ' - 'on this platform') - try: - - real_path = manager.get_cache_path( - self.egg_name, self._parts(zip_path) - ) - - if self._is_current(real_path, zip_path): - return real_path - - outf, tmpnam = _mkstemp( - ".$extract", - dir=os.path.dirname(real_path), - ) - os.write(outf, self.loader.get_data(zip_path)) - os.close(outf) - utime(tmpnam, (timestamp, timestamp)) - manager.postprocess(tmpnam, real_path) - - try: - rename(tmpnam, real_path) - - except os.error: - if os.path.isfile(real_path): - if self._is_current(real_path, zip_path): - # the file became current since it was checked above, - # so proceed. - return real_path - # Windows, del old file and retry - elif os.name == 'nt': - unlink(real_path) - rename(tmpnam, real_path) - return real_path - raise - - except os.error: - # report a user-friendly error - manager.extraction_error() - - return real_path - - def _is_current(self, file_path, zip_path): - """ - Return True if the file_path is current for this zip_path - """ - timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) - if not os.path.isfile(file_path): - return False - stat = os.stat(file_path) - if stat.st_size != size or stat.st_mtime != timestamp: - return False - # check that the contents match - zip_contents = self.loader.get_data(zip_path) - with open(file_path, 'rb') as f: - file_contents = f.read() - return zip_contents == file_contents - - def _get_eager_resources(self): - if self.eagers is None: - eagers = [] - for name in ('native_libs.txt', 'eager_resources.txt'): - if self.has_metadata(name): - eagers.extend(self.get_metadata_lines(name)) - self.eagers = eagers - return self.eagers - - def _index(self): - try: - return self._dirindex - except AttributeError: - ind = {} - for path in self.zipinfo: - parts = path.split(os.sep) - while parts: - parent = os.sep.join(parts[:-1]) - if parent in ind: - ind[parent].append(parts[-1]) - break - else: - ind[parent] = [parts.pop()] - self._dirindex = ind - return ind - - def _has(self, fspath): - zip_path = self._zipinfo_name(fspath) - return zip_path in self.zipinfo or zip_path in self._index() - - def _isdir(self, fspath): - return self._zipinfo_name(fspath) in self._index() - - def _listdir(self, fspath): - return list(self._index().get(self._zipinfo_name(fspath), ())) - - def _eager_to_zip(self, resource_name): - return self._zipinfo_name(self._fn(self.egg_root, resource_name)) - - def _resource_to_zip(self, resource_name): - return self._zipinfo_name(self._fn(self.module_path, resource_name)) - - -register_loader_type(zipimport.zipimporter, ZipProvider) - - -class FileMetadata(EmptyProvider): - """Metadata handler for standalone PKG-INFO files - - Usage:: - - metadata = FileMetadata("/path/to/PKG-INFO") - - This provider rejects all data and metadata requests except for PKG-INFO, - which is treated as existing, and will be the contents of the file at - the provided location. - """ - - def __init__(self, path): - self.path = path - - def _get_metadata_path(self, name): - return self.path - - def has_metadata(self, name): - return name == 'PKG-INFO' and os.path.isfile(self.path) - - def get_metadata(self, name): - if name != 'PKG-INFO': - raise KeyError("No metadata except PKG-INFO is available") - - with io.open(self.path, encoding='utf-8', errors="replace") as f: - metadata = f.read() - self._warn_on_replacement(metadata) - return metadata - - def _warn_on_replacement(self, metadata): - # Python 2.7 compat for: replacement_char = '�' - replacement_char = b'\xef\xbf\xbd'.decode('utf-8') - if replacement_char in metadata: - tmpl = "{self.path} could not be properly decoded in UTF-8" - msg = tmpl.format(**locals()) - warnings.warn(msg) - - def get_metadata_lines(self, name): - return yield_lines(self.get_metadata(name)) - - -class PathMetadata(DefaultProvider): - """Metadata provider for egg directories - - Usage:: - - # Development eggs: - - egg_info = "/path/to/PackageName.egg-info" - base_dir = os.path.dirname(egg_info) - metadata = PathMetadata(base_dir, egg_info) - dist_name = os.path.splitext(os.path.basename(egg_info))[0] - dist = Distribution(basedir, project_name=dist_name, metadata=metadata) - - # Unpacked egg directories: - - egg_path = "/path/to/PackageName-ver-pyver-etc.egg" - metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO')) - dist = Distribution.from_filename(egg_path, metadata=metadata) - """ - - def __init__(self, path, egg_info): - self.module_path = path - self.egg_info = egg_info - - -class EggMetadata(ZipProvider): - """Metadata provider for .egg files""" - - def __init__(self, importer): - """Create a metadata provider from a zipimporter""" - - self.zip_pre = importer.archive + os.sep - self.loader = importer - if importer.prefix: - self.module_path = os.path.join(importer.archive, importer.prefix) - else: - self.module_path = importer.archive - self._setup_prefix() - - -_declare_state('dict', _distribution_finders={}) - - -def register_finder(importer_type, distribution_finder): - """Register `distribution_finder` to find distributions in sys.path items - - `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item - handler), and `distribution_finder` is a callable that, passed a path - item and the importer instance, yields ``Distribution`` instances found on - that path item. See ``pkg_resources.find_on_path`` for an example.""" - _distribution_finders[importer_type] = distribution_finder - - -def find_distributions(path_item, only=False): - """Yield distributions accessible via `path_item`""" - importer = get_importer(path_item) - finder = _find_adapter(_distribution_finders, importer) - return finder(importer, path_item, only) - - -def find_eggs_in_zip(importer, path_item, only=False): - """ - Find eggs in zip files; possibly multiple nested eggs. - """ - if importer.archive.endswith('.whl'): - # wheels are not supported with this finder - # they don't have PKG-INFO metadata, and won't ever contain eggs - return - metadata = EggMetadata(importer) - if metadata.has_metadata('PKG-INFO'): - yield Distribution.from_filename(path_item, metadata=metadata) - if only: - # don't yield nested distros - return - for subitem in metadata.resource_listdir(''): - if _is_egg_path(subitem): - subpath = os.path.join(path_item, subitem) - dists = find_eggs_in_zip(zipimport.zipimporter(subpath), subpath) - for dist in dists: - yield dist - elif subitem.lower().endswith('.dist-info'): - subpath = os.path.join(path_item, subitem) - submeta = EggMetadata(zipimport.zipimporter(subpath)) - submeta.egg_info = subpath - yield Distribution.from_location(path_item, subitem, submeta) - - -register_finder(zipimport.zipimporter, find_eggs_in_zip) - - -def find_nothing(importer, path_item, only=False): - return () - - -register_finder(object, find_nothing) - - -def _by_version_descending(names): - """ - Given a list of filenames, return them in descending order - by version number. - - >>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg' - >>> _by_version_descending(names) - ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'foo', 'bar'] - >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg' - >>> _by_version_descending(names) - ['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg'] - >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg' - >>> _by_version_descending(names) - ['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg'] - """ - def _by_version(name): - """ - Parse each component of the filename - """ - name, ext = os.path.splitext(name) - parts = itertools.chain(name.split('-'), [ext]) - return [packaging.version.parse(part) for part in parts] - - return sorted(names, key=_by_version, reverse=True) - - -def find_on_path(importer, path_item, only=False): - """Yield distributions accessible on a sys.path directory""" - path_item = _normalize_cached(path_item) - - if _is_unpacked_egg(path_item): - yield Distribution.from_filename( - path_item, metadata=PathMetadata( - path_item, os.path.join(path_item, 'EGG-INFO') - ) - ) - return - - entries = safe_listdir(path_item) - - # for performance, before sorting by version, - # screen entries for only those that will yield - # distributions - filtered = ( - entry - for entry in entries - if dist_factory(path_item, entry, only) - ) - - # scan for .egg and .egg-info in directory - path_item_entries = _by_version_descending(filtered) - for entry in path_item_entries: - fullpath = os.path.join(path_item, entry) - factory = dist_factory(path_item, entry, only) - for dist in factory(fullpath): - yield dist - - -def dist_factory(path_item, entry, only): - """ - Return a dist_factory for a path_item and entry - """ - lower = entry.lower() - is_meta = any(map(lower.endswith, ('.egg-info', '.dist-info'))) - return ( - distributions_from_metadata - if is_meta else - find_distributions - if not only and _is_egg_path(entry) else - resolve_egg_link - if not only and lower.endswith('.egg-link') else - NoDists() - ) - - -class NoDists: - """ - >>> bool(NoDists()) - False - - >>> list(NoDists()('anything')) - [] - """ - def __bool__(self): - return False - if six.PY2: - __nonzero__ = __bool__ - - def __call__(self, fullpath): - return iter(()) - - -def safe_listdir(path): - """ - Attempt to list contents of path, but suppress some exceptions. - """ - try: - return os.listdir(path) - except (PermissionError, NotADirectoryError): - pass - except OSError as e: - # Ignore the directory if does not exist, not a directory or - # permission denied - ignorable = ( - e.errno in (errno.ENOTDIR, errno.EACCES, errno.ENOENT) - # Python 2 on Windows needs to be handled this way :( - or getattr(e, "winerror", None) == 267 - ) - if not ignorable: - raise - return () - - -def distributions_from_metadata(path): - root = os.path.dirname(path) - if os.path.isdir(path): - if len(os.listdir(path)) == 0: - # empty metadata dir; skip - return - metadata = PathMetadata(root, path) - else: - metadata = FileMetadata(path) - entry = os.path.basename(path) - yield Distribution.from_location( - root, entry, metadata, precedence=DEVELOP_DIST, - ) - - -def non_empty_lines(path): - """ - Yield non-empty lines from file at path - """ - with open(path) as f: - for line in f: - line = line.strip() - if line: - yield line - - -def resolve_egg_link(path): - """ - Given a path to an .egg-link, resolve distributions - present in the referenced path. - """ - referenced_paths = non_empty_lines(path) - resolved_paths = ( - os.path.join(os.path.dirname(path), ref) - for ref in referenced_paths - ) - dist_groups = map(find_distributions, resolved_paths) - return next(dist_groups, ()) - - -register_finder(pkgutil.ImpImporter, find_on_path) - -if hasattr(importlib_machinery, 'FileFinder'): - register_finder(importlib_machinery.FileFinder, find_on_path) - -_declare_state('dict', _namespace_handlers={}) -_declare_state('dict', _namespace_packages={}) - - -def register_namespace_handler(importer_type, namespace_handler): - """Register `namespace_handler` to declare namespace packages - - `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item - handler), and `namespace_handler` is a callable like this:: - - def namespace_handler(importer, path_entry, moduleName, module): - # return a path_entry to use for child packages - - Namespace handlers are only called if the importer object has already - agreed that it can handle the relevant path item, and they should only - return a subpath if the module __path__ does not already contain an - equivalent subpath. For an example namespace handler, see - ``pkg_resources.file_ns_handler``. - """ - _namespace_handlers[importer_type] = namespace_handler - - -def _handle_ns(packageName, path_item): - """Ensure that named package includes a subpath of path_item (if needed)""" - - importer = get_importer(path_item) - if importer is None: - return None - - # capture warnings due to #1111 - with warnings.catch_warnings(): - warnings.simplefilter("ignore") - loader = importer.find_module(packageName) - - if loader is None: - return None - module = sys.modules.get(packageName) - if module is None: - module = sys.modules[packageName] = types.ModuleType(packageName) - module.__path__ = [] - _set_parent_ns(packageName) - elif not hasattr(module, '__path__'): - raise TypeError("Not a package:", packageName) - handler = _find_adapter(_namespace_handlers, importer) - subpath = handler(importer, path_item, packageName, module) - if subpath is not None: - path = module.__path__ - path.append(subpath) - loader.load_module(packageName) - _rebuild_mod_path(path, packageName, module) - return subpath - - -def _rebuild_mod_path(orig_path, package_name, module): - """ - Rebuild module.__path__ ensuring that all entries are ordered - corresponding to their sys.path order - """ - sys_path = [_normalize_cached(p) for p in sys.path] - - def safe_sys_path_index(entry): - """ - Workaround for #520 and #513. - """ - try: - return sys_path.index(entry) - except ValueError: - return float('inf') - - def position_in_sys_path(path): - """ - Return the ordinal of the path based on its position in sys.path - """ - path_parts = path.split(os.sep) - module_parts = package_name.count('.') + 1 - parts = path_parts[:-module_parts] - return safe_sys_path_index(_normalize_cached(os.sep.join(parts))) - - new_path = sorted(orig_path, key=position_in_sys_path) - new_path = [_normalize_cached(p) for p in new_path] - - if isinstance(module.__path__, list): - module.__path__[:] = new_path - else: - module.__path__ = new_path - - -def declare_namespace(packageName): - """Declare that package 'packageName' is a namespace package""" - - _imp.acquire_lock() - try: - if packageName in _namespace_packages: - return - - path = sys.path - parent, _, _ = packageName.rpartition('.') - - if parent: - declare_namespace(parent) - if parent not in _namespace_packages: - __import__(parent) - try: - path = sys.modules[parent].__path__ - except AttributeError: - raise TypeError("Not a package:", parent) - - # Track what packages are namespaces, so when new path items are added, - # they can be updated - _namespace_packages.setdefault(parent or None, []).append(packageName) - _namespace_packages.setdefault(packageName, []) - - for path_item in path: - # Ensure all the parent's path items are reflected in the child, - # if they apply - _handle_ns(packageName, path_item) - - finally: - _imp.release_lock() - - -def fixup_namespace_packages(path_item, parent=None): - """Ensure that previously-declared namespace packages include path_item""" - _imp.acquire_lock() - try: - for package in _namespace_packages.get(parent, ()): - subpath = _handle_ns(package, path_item) - if subpath: - fixup_namespace_packages(subpath, package) - finally: - _imp.release_lock() - - -def file_ns_handler(importer, path_item, packageName, module): - """Compute an ns-package subpath for a filesystem or zipfile importer""" - - subpath = os.path.join(path_item, packageName.split('.')[-1]) - normalized = _normalize_cached(subpath) - for item in module.__path__: - if _normalize_cached(item) == normalized: - break - else: - # Only return the path if it's not already there - return subpath - - -register_namespace_handler(pkgutil.ImpImporter, file_ns_handler) -register_namespace_handler(zipimport.zipimporter, file_ns_handler) - -if hasattr(importlib_machinery, 'FileFinder'): - register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler) - - -def null_ns_handler(importer, path_item, packageName, module): - return None - - -register_namespace_handler(object, null_ns_handler) - - -def normalize_path(filename): - """Normalize a file/dir name for comparison purposes""" - return os.path.normcase(os.path.realpath(os.path.normpath(_cygwin_patch(filename)))) - - -def _cygwin_patch(filename): # pragma: nocover - """ - Contrary to POSIX 2008, on Cygwin, getcwd (3) contains - symlink components. Using - os.path.abspath() works around this limitation. A fix in os.getcwd() - would probably better, in Cygwin even more so, except - that this seems to be by design... - """ - return os.path.abspath(filename) if sys.platform == 'cygwin' else filename - - -def _normalize_cached(filename, _cache={}): - try: - return _cache[filename] - except KeyError: - _cache[filename] = result = normalize_path(filename) - return result - - -def _is_egg_path(path): - """ - Determine if given path appears to be an egg. - """ - return path.lower().endswith('.egg') - - -def _is_unpacked_egg(path): - """ - Determine if given path appears to be an unpacked egg. - """ - return ( - _is_egg_path(path) and - os.path.isfile(os.path.join(path, 'EGG-INFO', 'PKG-INFO')) - ) - - -def _set_parent_ns(packageName): - parts = packageName.split('.') - name = parts.pop() - if parts: - parent = '.'.join(parts) - setattr(sys.modules[parent], name, sys.modules[packageName]) - - -def yield_lines(strs): - """Yield non-empty/non-comment lines of a string or sequence""" - if isinstance(strs, six.string_types): - for s in strs.splitlines(): - s = s.strip() - # skip blank lines/comments - if s and not s.startswith('#'): - yield s - else: - for ss in strs: - for s in yield_lines(ss): - yield s - - -MODULE = re.compile(r"\w+(\.\w+)*$").match -EGG_NAME = re.compile( - r""" - (?P<name>[^-]+) ( - -(?P<ver>[^-]+) ( - -py(?P<pyver>[^-]+) ( - -(?P<plat>.+) - )? - )? - )? - """, - re.VERBOSE | re.IGNORECASE, -).match - - -class EntryPoint: - """Object representing an advertised importable object""" - - def __init__(self, name, module_name, attrs=(), extras=(), dist=None): - if not MODULE(module_name): - raise ValueError("Invalid module name", module_name) - self.name = name - self.module_name = module_name - self.attrs = tuple(attrs) - self.extras = tuple(extras) - self.dist = dist - - def __str__(self): - s = "%s = %s" % (self.name, self.module_name) - if self.attrs: - s += ':' + '.'.join(self.attrs) - if self.extras: - s += ' [%s]' % ','.join(self.extras) - return s - - def __repr__(self): - return "EntryPoint.parse(%r)" % str(self) - - def load(self, require=True, *args, **kwargs): - """ - Require packages for this EntryPoint, then resolve it. - """ - if not require or args or kwargs: - warnings.warn( - "Parameters to load are deprecated. Call .resolve and " - ".require separately.", - PkgResourcesDeprecationWarning, - stacklevel=2, - ) - if require: - self.require(*args, **kwargs) - return self.resolve() - - def resolve(self): - """ - Resolve the entry point from its module and attrs. - """ - module = __import__(self.module_name, fromlist=['__name__'], level=0) - try: - return functools.reduce(getattr, self.attrs, module) - except AttributeError as exc: - raise ImportError(str(exc)) - - def require(self, env=None, installer=None): - if self.extras and not self.dist: - raise UnknownExtra("Can't require() without a distribution", self) - - # Get the requirements for this entry point with all its extras and - # then resolve them. We have to pass `extras` along when resolving so - # that the working set knows what extras we want. Otherwise, for - # dist-info distributions, the working set will assume that the - # requirements for that extra are purely optional and skip over them. - reqs = self.dist.requires(self.extras) - items = working_set.resolve(reqs, env, installer, extras=self.extras) - list(map(working_set.add, items)) - - pattern = re.compile( - r'\s*' - r'(?P<name>.+?)\s*' - r'=\s*' - r'(?P<module>[\w.]+)\s*' - r'(:\s*(?P<attr>[\w.]+))?\s*' - r'(?P<extras>\[.*\])?\s*$' - ) - - @classmethod - def parse(cls, src, dist=None): - """Parse a single entry point from string `src` - - Entry point syntax follows the form:: - - name = some.module:some.attr [extra1, extra2] - - The entry name and module name are required, but the ``:attrs`` and - ``[extras]`` parts are optional - """ - m = cls.pattern.match(src) - if not m: - msg = "EntryPoint must be in 'name=module:attrs [extras]' format" - raise ValueError(msg, src) - res = m.groupdict() - extras = cls._parse_extras(res['extras']) - attrs = res['attr'].split('.') if res['attr'] else () - return cls(res['name'], res['module'], attrs, extras, dist) - - @classmethod - def _parse_extras(cls, extras_spec): - if not extras_spec: - return () - req = Requirement.parse('x' + extras_spec) - if req.specs: - raise ValueError() - return req.extras - - @classmethod - def parse_group(cls, group, lines, dist=None): - """Parse an entry point group""" - if not MODULE(group): - raise ValueError("Invalid group name", group) - this = {} - for line in yield_lines(lines): - ep = cls.parse(line, dist) - if ep.name in this: - raise ValueError("Duplicate entry point", group, ep.name) - this[ep.name] = ep - return this - - @classmethod - def parse_map(cls, data, dist=None): - """Parse a map of entry point groups""" - if isinstance(data, dict): - data = data.items() - else: - data = split_sections(data) - maps = {} - for group, lines in data: - if group is None: - if not lines: - continue - raise ValueError("Entry points must be listed in groups") - group = group.strip() - if group in maps: - raise ValueError("Duplicate group name", group) - maps[group] = cls.parse_group(group, lines, dist) - return maps - - -def _remove_md5_fragment(location): - if not location: - return '' - parsed = urllib.parse.urlparse(location) - if parsed[-1].startswith('md5='): - return urllib.parse.urlunparse(parsed[:-1] + ('',)) - return location - - -def _version_from_file(lines): - """ - Given an iterable of lines from a Metadata file, return - the value of the Version field, if present, or None otherwise. - """ - def is_version_line(line): - return line.lower().startswith('version:') - version_lines = filter(is_version_line, lines) - line = next(iter(version_lines), '') - _, _, value = line.partition(':') - return safe_version(value.strip()) or None - - -class Distribution: - """Wrap an actual or potential sys.path entry w/metadata""" - PKG_INFO = 'PKG-INFO' - - def __init__( - self, location=None, metadata=None, project_name=None, - version=None, py_version=PY_MAJOR, platform=None, - precedence=EGG_DIST): - self.project_name = safe_name(project_name or 'Unknown') - if version is not None: - self._version = safe_version(version) - self.py_version = py_version - self.platform = platform - self.location = location - self.precedence = precedence - self._provider = metadata or empty_provider - - @classmethod - def from_location(cls, location, basename, metadata=None, **kw): - project_name, version, py_version, platform = [None] * 4 - basename, ext = os.path.splitext(basename) - if ext.lower() in _distributionImpl: - cls = _distributionImpl[ext.lower()] - - match = EGG_NAME(basename) - if match: - project_name, version, py_version, platform = match.group( - 'name', 'ver', 'pyver', 'plat' - ) - return cls( - location, metadata, project_name=project_name, version=version, - py_version=py_version, platform=platform, **kw - )._reload_version() - - def _reload_version(self): - return self - - @property - def hashcmp(self): - return ( - self.parsed_version, - self.precedence, - self.key, - _remove_md5_fragment(self.location), - self.py_version or '', - self.platform or '', - ) - - def __hash__(self): - return hash(self.hashcmp) - - def __lt__(self, other): - return self.hashcmp < other.hashcmp - - def __le__(self, other): - return self.hashcmp <= other.hashcmp - - def __gt__(self, other): - return self.hashcmp > other.hashcmp - - def __ge__(self, other): - return self.hashcmp >= other.hashcmp - - def __eq__(self, other): - if not isinstance(other, self.__class__): - # It's not a Distribution, so they are not equal - return False - return self.hashcmp == other.hashcmp - - def __ne__(self, other): - return not self == other - - # These properties have to be lazy so that we don't have to load any - # metadata until/unless it's actually needed. (i.e., some distributions - # may not know their name or version without loading PKG-INFO) - - @property - def key(self): - try: - return self._key - except AttributeError: - self._key = key = self.project_name.lower() - return key - - @property - def parsed_version(self): - if not hasattr(self, "_parsed_version"): - self._parsed_version = parse_version(self.version) - - return self._parsed_version - - def _warn_legacy_version(self): - LV = packaging.version.LegacyVersion - is_legacy = isinstance(self._parsed_version, LV) - if not is_legacy: - return - - # While an empty version is technically a legacy version and - # is not a valid PEP 440 version, it's also unlikely to - # actually come from someone and instead it is more likely that - # it comes from setuptools attempting to parse a filename and - # including it in the list. So for that we'll gate this warning - # on if the version is anything at all or not. - if not self.version: - return - - tmpl = textwrap.dedent(""" - '{project_name} ({version})' is being parsed as a legacy, - non PEP 440, - version. You may find odd behavior and sort order. - In particular it will be sorted as less than 0.0. It - is recommended to migrate to PEP 440 compatible - versions. - """).strip().replace('\n', ' ') - - warnings.warn(tmpl.format(**vars(self)), PEP440Warning) - - @property - def version(self): - try: - return self._version - except AttributeError: - version = self._get_version() - if version is None: - path = self._get_metadata_path_for_display(self.PKG_INFO) - msg = ( - "Missing 'Version:' header and/or {} file at path: {}" - ).format(self.PKG_INFO, path) - raise ValueError(msg, self) - - return version - - @property - def _dep_map(self): - """ - A map of extra to its list of (direct) requirements - for this distribution, including the null extra. - """ - try: - return self.__dep_map - except AttributeError: - self.__dep_map = self._filter_extras(self._build_dep_map()) - return self.__dep_map - - @staticmethod - def _filter_extras(dm): - """ - Given a mapping of extras to dependencies, strip off - environment markers and filter out any dependencies - not matching the markers. - """ - for extra in list(filter(None, dm)): - new_extra = extra - reqs = dm.pop(extra) - new_extra, _, marker = extra.partition(':') - fails_marker = marker and ( - invalid_marker(marker) - or not evaluate_marker(marker) - ) - if fails_marker: - reqs = [] - new_extra = safe_extra(new_extra) or None - - dm.setdefault(new_extra, []).extend(reqs) - return dm - - def _build_dep_map(self): - dm = {} - for name in 'requires.txt', 'depends.txt': - for extra, reqs in split_sections(self._get_metadata(name)): - dm.setdefault(extra, []).extend(parse_requirements(reqs)) - return dm - - def requires(self, extras=()): - """List of Requirements needed for this distro if `extras` are used""" - dm = self._dep_map - deps = [] - deps.extend(dm.get(None, ())) - for ext in extras: - try: - deps.extend(dm[safe_extra(ext)]) - except KeyError: - raise UnknownExtra( - "%s has no such extra feature %r" % (self, ext) - ) - return deps - - def _get_metadata_path_for_display(self, name): - """ - Return the path to the given metadata file, if available. - """ - try: - # We need to access _get_metadata_path() on the provider object - # directly rather than through this class's __getattr__() - # since _get_metadata_path() is marked private. - path = self._provider._get_metadata_path(name) - - # Handle exceptions e.g. in case the distribution's metadata - # provider doesn't support _get_metadata_path(). - except Exception: - return '[could not detect]' - - return path - - def _get_metadata(self, name): - if self.has_metadata(name): - for line in self.get_metadata_lines(name): - yield line - - def _get_version(self): - lines = self._get_metadata(self.PKG_INFO) - version = _version_from_file(lines) - - return version - - def activate(self, path=None, replace=False): - """Ensure distribution is importable on `path` (default=sys.path)""" - if path is None: - path = sys.path - self.insert_on(path, replace=replace) - if path is sys.path: - fixup_namespace_packages(self.location) - for pkg in self._get_metadata('namespace_packages.txt'): - if pkg in sys.modules: - declare_namespace(pkg) - - def egg_name(self): - """Return what this distribution's standard .egg filename should be""" - filename = "%s-%s-py%s" % ( - to_filename(self.project_name), to_filename(self.version), - self.py_version or PY_MAJOR - ) - - if self.platform: - filename += '-' + self.platform - return filename - - def __repr__(self): - if self.location: - return "%s (%s)" % (self, self.location) - else: - return str(self) - - def __str__(self): - try: - version = getattr(self, 'version', None) - except ValueError: - version = None - version = version or "[unknown version]" - return "%s %s" % (self.project_name, version) - - def __getattr__(self, attr): - """Delegate all unrecognized public attributes to .metadata provider""" - if attr.startswith('_'): - raise AttributeError(attr) - return getattr(self._provider, attr) - - def __dir__(self): - return list( - set(super(Distribution, self).__dir__()) - | set( - attr for attr in self._provider.__dir__() - if not attr.startswith('_') - ) - ) - - if not hasattr(object, '__dir__'): - # python 2.7 not supported - del __dir__ - - @classmethod - def from_filename(cls, filename, metadata=None, **kw): - return cls.from_location( - _normalize_cached(filename), os.path.basename(filename), metadata, - **kw - ) - - def as_requirement(self): - """Return a ``Requirement`` that matches this distribution exactly""" - if isinstance(self.parsed_version, packaging.version.Version): - spec = "%s==%s" % (self.project_name, self.parsed_version) - else: - spec = "%s===%s" % (self.project_name, self.parsed_version) - - return Requirement.parse(spec) - - def load_entry_point(self, group, name): - """Return the `name` entry point of `group` or raise ImportError""" - ep = self.get_entry_info(group, name) - if ep is None: - raise ImportError("Entry point %r not found" % ((group, name),)) - return ep.load() - - def get_entry_map(self, group=None): - """Return the entry point map for `group`, or the full entry map""" - try: - ep_map = self._ep_map - except AttributeError: - ep_map = self._ep_map = EntryPoint.parse_map( - self._get_metadata('entry_points.txt'), self - ) - if group is not None: - return ep_map.get(group, {}) - return ep_map - - def get_entry_info(self, group, name): - """Return the EntryPoint object for `group`+`name`, or ``None``""" - return self.get_entry_map(group).get(name) - - def insert_on(self, path, loc=None, replace=False): - """Ensure self.location is on path - - If replace=False (default): - - If location is already in path anywhere, do nothing. - - Else: - - If it's an egg and its parent directory is on path, - insert just ahead of the parent. - - Else: add to the end of path. - If replace=True: - - If location is already on path anywhere (not eggs) - or higher priority than its parent (eggs) - do nothing. - - Else: - - If it's an egg and its parent directory is on path, - insert just ahead of the parent, - removing any lower-priority entries. - - Else: add it to the front of path. - """ - - loc = loc or self.location - if not loc: - return - - nloc = _normalize_cached(loc) - bdir = os.path.dirname(nloc) - npath = [(p and _normalize_cached(p) or p) for p in path] - - for p, item in enumerate(npath): - if item == nloc: - if replace: - break - else: - # don't modify path (even removing duplicates) if - # found and not replace - return - elif item == bdir and self.precedence == EGG_DIST: - # if it's an .egg, give it precedence over its directory - # UNLESS it's already been added to sys.path and replace=False - if (not replace) and nloc in npath[p:]: - return - if path is sys.path: - self.check_version_conflict() - path.insert(p, loc) - npath.insert(p, nloc) - break - else: - if path is sys.path: - self.check_version_conflict() - if replace: - path.insert(0, loc) - else: - path.append(loc) - return - - # p is the spot where we found or inserted loc; now remove duplicates - while True: - try: - np = npath.index(nloc, p + 1) - except ValueError: - break - else: - del npath[np], path[np] - # ha! - p = np - - return - - def check_version_conflict(self): - if self.key == 'setuptools': - # ignore the inevitable setuptools self-conflicts :( - return - - nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt')) - loc = normalize_path(self.location) - for modname in self._get_metadata('top_level.txt'): - if (modname not in sys.modules or modname in nsp - or modname in _namespace_packages): - continue - if modname in ('pkg_resources', 'setuptools', 'site'): - continue - fn = getattr(sys.modules[modname], '__file__', None) - if fn and (normalize_path(fn).startswith(loc) or - fn.startswith(self.location)): - continue - issue_warning( - "Module %s was already imported from %s, but %s is being added" - " to sys.path" % (modname, fn, self.location), - ) - - def has_version(self): - try: - self.version - except ValueError: - issue_warning("Unbuilt egg for " + repr(self)) - return False - return True - - def clone(self, **kw): - """Copy this distribution, substituting in any changed keyword args""" - names = 'project_name version py_version platform location precedence' - for attr in names.split(): - kw.setdefault(attr, getattr(self, attr, None)) - kw.setdefault('metadata', self._provider) - return self.__class__(**kw) - - @property - def extras(self): - return [dep for dep in self._dep_map if dep] - - -class EggInfoDistribution(Distribution): - def _reload_version(self): - """ - Packages installed by distutils (e.g. numpy or scipy), - which uses an old safe_version, and so - their version numbers can get mangled when - converted to filenames (e.g., 1.11.0.dev0+2329eae to - 1.11.0.dev0_2329eae). These distributions will not be - parsed properly - downstream by Distribution and safe_version, so - take an extra step and try to get the version number from - the metadata file itself instead of the filename. - """ - md_version = self._get_version() - if md_version: - self._version = md_version - return self - - -class DistInfoDistribution(Distribution): - """ - Wrap an actual or potential sys.path entry - w/metadata, .dist-info style. - """ - PKG_INFO = 'METADATA' - EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])") - - @property - def _parsed_pkg_info(self): - """Parse and cache metadata""" - try: - return self._pkg_info - except AttributeError: - metadata = self.get_metadata(self.PKG_INFO) - self._pkg_info = email.parser.Parser().parsestr(metadata) - return self._pkg_info - - @property - def _dep_map(self): - try: - return self.__dep_map - except AttributeError: - self.__dep_map = self._compute_dependencies() - return self.__dep_map - - def _compute_dependencies(self): - """Recompute this distribution's dependencies.""" - dm = self.__dep_map = {None: []} - - reqs = [] - # Including any condition expressions - for req in self._parsed_pkg_info.get_all('Requires-Dist') or []: - reqs.extend(parse_requirements(req)) - - def reqs_for_extra(extra): - for req in reqs: - if not req.marker or req.marker.evaluate({'extra': extra}): - yield req - - common = frozenset(reqs_for_extra(None)) - dm[None].extend(common) - - for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []: - s_extra = safe_extra(extra.strip()) - dm[s_extra] = list(frozenset(reqs_for_extra(extra)) - common) - - return dm - - -_distributionImpl = { - '.egg': Distribution, - '.egg-info': EggInfoDistribution, - '.dist-info': DistInfoDistribution, -} - - -def issue_warning(*args, **kw): - level = 1 - g = globals() - try: - # find the first stack frame that is *not* code in - # the pkg_resources module, to use for the warning - while sys._getframe(level).f_globals is g: - level += 1 - except ValueError: - pass - warnings.warn(stacklevel=level + 1, *args, **kw) - - -class RequirementParseError(ValueError): - def __str__(self): - return ' '.join(self.args) - - -def parse_requirements(strs): - """Yield ``Requirement`` objects for each specification in `strs` - - `strs` must be a string, or a (possibly-nested) iterable thereof. - """ - # create a steppable iterator, so we can handle \-continuations - lines = iter(yield_lines(strs)) - - for line in lines: - # Drop comments -- a hash without a space may be in a URL. - if ' #' in line: - line = line[:line.find(' #')] - # If there is a line continuation, drop it, and append the next line. - if line.endswith('\\'): - line = line[:-2].strip() - try: - line += next(lines) - except StopIteration: - return - yield Requirement(line) - - -class Requirement(packaging.requirements.Requirement): - def __init__(self, requirement_string): - """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!""" - try: - super(Requirement, self).__init__(requirement_string) - except packaging.requirements.InvalidRequirement as e: - raise RequirementParseError(str(e)) - self.unsafe_name = self.name - project_name = safe_name(self.name) - self.project_name, self.key = project_name, project_name.lower() - self.specs = [ - (spec.operator, spec.version) for spec in self.specifier] - self.extras = tuple(map(safe_extra, self.extras)) - self.hashCmp = ( - self.key, - self.specifier, - frozenset(self.extras), - str(self.marker) if self.marker else None, - ) - self.__hash = hash(self.hashCmp) - - def __eq__(self, other): - return ( - isinstance(other, Requirement) and - self.hashCmp == other.hashCmp - ) - - def __ne__(self, other): - return not self == other - - def __contains__(self, item): - if isinstance(item, Distribution): - if item.key != self.key: - return False - - item = item.version - - # Allow prereleases always in order to match the previous behavior of - # this method. In the future this should be smarter and follow PEP 440 - # more accurately. - return self.specifier.contains(item, prereleases=True) - - def __hash__(self): - return self.__hash - - def __repr__(self): - return "Requirement.parse(%r)" % str(self) - - @staticmethod - def parse(s): - req, = parse_requirements(s) - return req - - -def _always_object(classes): - """ - Ensure object appears in the mro even - for old-style classes. - """ - if object not in classes: - return classes + (object,) - return classes - - -def _find_adapter(registry, ob): - """Return an adapter factory for `ob` from `registry`""" - types = _always_object(inspect.getmro(getattr(ob, '__class__', type(ob)))) - for t in types: - if t in registry: - return registry[t] - - -def ensure_directory(path): - """Ensure that the parent directory of `path` exists""" - dirname = os.path.dirname(path) - py31compat.makedirs(dirname, exist_ok=True) - - -def _bypass_ensure_directory(path): - """Sandbox-bypassing version of ensure_directory()""" - if not WRITE_SUPPORT: - raise IOError('"os.mkdir" not supported on this platform.') - dirname, filename = split(path) - if dirname and filename and not isdir(dirname): - _bypass_ensure_directory(dirname) - try: - mkdir(dirname, 0o755) - except FileExistsError: - pass - - -def split_sections(s): - """Split a string or iterable thereof into (section, content) pairs - - Each ``section`` is a stripped version of the section header ("[section]") - and each ``content`` is a list of stripped lines excluding blank lines and - comment-only lines. If there are any such lines before the first section - header, they're returned in a first ``section`` of ``None``. - """ - section = None - content = [] - for line in yield_lines(s): - if line.startswith("["): - if line.endswith("]"): - if section or content: - yield section, content - section = line[1:-1].strip() - content = [] - else: - raise ValueError("Invalid section heading", line) - else: - content.append(line) - - # wrap up last segment - yield section, content - - -def _mkstemp(*args, **kw): - old_open = os.open - try: - # temporarily bypass sandboxing - os.open = os_open - return tempfile.mkstemp(*args, **kw) - finally: - # and then put it back - os.open = old_open - - -# Silence the PEP440Warning by default, so that end users don't get hit by it -# randomly just because they use pkg_resources. We want to append the rule -# because we want earlier uses of filterwarnings to take precedence over this -# one. -warnings.filterwarnings("ignore", category=PEP440Warning, append=True) - - -# from jaraco.functools 1.3 -def _call_aside(f, *args, **kwargs): - f(*args, **kwargs) - return f - - -@_call_aside -def _initialize(g=globals()): - "Set up global resource manager (deliberately not state-saved)" - manager = ResourceManager() - g['_manager'] = manager - g.update( - (name, getattr(manager, name)) - for name in dir(manager) - if not name.startswith('_') - ) - - -@_call_aside -def _initialize_master_working_set(): - """ - Prepare the master working set and make the ``require()`` - API available. - - This function has explicit effects on the global state - of pkg_resources. It is intended to be invoked once at - the initialization of this module. - - Invocation by other packages is unsupported and done - at their own risk. - """ - working_set = WorkingSet._build_master() - _declare_state('object', working_set=working_set) - - require = working_set.require - iter_entry_points = working_set.iter_entry_points - add_activation_listener = working_set.subscribe - run_script = working_set.run_script - # backward compatibility - run_main = run_script - # Activate all distributions already on sys.path with replace=False and - # ensure that all distributions added to the working set in the future - # (e.g. by calling ``require()``) will get activated as well, - # with higher priority (replace=True). - tuple( - dist.activate(replace=False) - for dist in working_set - ) - add_activation_listener( - lambda dist: dist.activate(replace=True), - existing=False, - ) - working_set.entries = [] - # match order - list(map(working_set.add_entry, sys.path)) - globals().update(locals()) - -class PkgResourcesDeprecationWarning(Warning): - """ - Base class for warning about deprecations in ``pkg_resources`` - - This class is not derived from ``DeprecationWarning``, and as such is - visible by default. - """ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyc b/env/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyc deleted file mode 100644 index e4f9b676..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/pkg_resources/py31compat.pyc b/env/lib/python2.7/site-packages/pip/_vendor/pkg_resources/py31compat.pyc deleted file mode 100644 index 9f78be7c..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/pkg_resources/py31compat.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pyc b/env/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pyc deleted file mode 100644 index a9297103..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/progress/bar.pyc b/env/lib/python2.7/site-packages/pip/_vendor/progress/bar.pyc deleted file mode 100644 index 2d266fd1..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/progress/bar.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/progress/counter.pyc b/env/lib/python2.7/site-packages/pip/_vendor/progress/counter.pyc deleted file mode 100644 index 59f513be..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/progress/counter.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/progress/spinner.pyc b/env/lib/python2.7/site-packages/pip/_vendor/progress/spinner.pyc deleted file mode 100644 index 414c5318..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/progress/spinner.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/pyparsing.py b/env/lib/python2.7/site-packages/pip/_vendor/pyparsing.py deleted file mode 100644 index 9d6a01d5..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/pyparsing.py +++ /dev/null @@ -1,6493 +0,0 @@ -#-*- coding: utf-8 -*- -# module pyparsing.py -# -# Copyright (c) 2003-2019 Paul T. McGuire -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be -# included in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__doc__ = \ -""" -pyparsing module - Classes and methods to define and execute parsing grammars -============================================================================= - -The pyparsing module is an alternative approach to creating and -executing simple grammars, vs. the traditional lex/yacc approach, or the -use of regular expressions. With pyparsing, you don't need to learn -a new syntax for defining grammars or matching expressions - the parsing -module provides a library of classes that you use to construct the -grammar directly in Python. - -Here is a program to parse "Hello, World!" (or any greeting of the form -``"<salutation>, <addressee>!"``), built up using :class:`Word`, -:class:`Literal`, and :class:`And` elements -(the :class:`'+'<ParserElement.__add__>` operators create :class:`And` expressions, -and the strings are auto-converted to :class:`Literal` expressions):: - - from pip._vendor.pyparsing import Word, alphas - - # define grammar of a greeting - greet = Word(alphas) + "," + Word(alphas) + "!" - - hello = "Hello, World!" - print (hello, "->", greet.parseString(hello)) - -The program outputs the following:: - - Hello, World! -> ['Hello', ',', 'World', '!'] - -The Python representation of the grammar is quite readable, owing to the -self-explanatory class names, and the use of '+', '|' and '^' operators. - -The :class:`ParseResults` object returned from -:class:`ParserElement.parseString` can be -accessed as a nested list, a dictionary, or an object with named -attributes. - -The pyparsing module handles some of the problems that are typically -vexing when writing text parsers: - - - extra or missing whitespace (the above program will also handle - "Hello,World!", "Hello , World !", etc.) - - quoted strings - - embedded comments - - -Getting Started - ------------------ -Visit the classes :class:`ParserElement` and :class:`ParseResults` to -see the base classes that most other pyparsing -classes inherit from. Use the docstrings for examples of how to: - - - construct literal match expressions from :class:`Literal` and - :class:`CaselessLiteral` classes - - construct character word-group expressions using the :class:`Word` - class - - see how to create repetitive expressions using :class:`ZeroOrMore` - and :class:`OneOrMore` classes - - use :class:`'+'<And>`, :class:`'|'<MatchFirst>`, :class:`'^'<Or>`, - and :class:`'&'<Each>` operators to combine simple expressions into - more complex ones - - associate names with your parsed results using - :class:`ParserElement.setResultsName` - - find some helpful expression short-cuts like :class:`delimitedList` - and :class:`oneOf` - - find more useful common expressions in the :class:`pyparsing_common` - namespace class -""" - -__version__ = "2.4.0" -__versionTime__ = "07 Apr 2019 18:28 UTC" -__author__ = "Paul McGuire <ptmcg@users.sourceforge.net>" - -import string -from weakref import ref as wkref -import copy -import sys -import warnings -import re -import sre_constants -import collections -import pprint -import traceback -import types -from datetime import datetime - -try: - # Python 3 - from itertools import filterfalse -except ImportError: - from itertools import ifilterfalse as filterfalse - -try: - from _thread import RLock -except ImportError: - from threading import RLock - -try: - # Python 3 - from collections.abc import Iterable - from collections.abc import MutableMapping -except ImportError: - # Python 2.7 - from collections import Iterable - from collections import MutableMapping - -try: - from collections import OrderedDict as _OrderedDict -except ImportError: - try: - from ordereddict import OrderedDict as _OrderedDict - except ImportError: - _OrderedDict = None - -try: - from types import SimpleNamespace -except ImportError: - class SimpleNamespace: pass - -# version compatibility configuration -__compat__ = SimpleNamespace() -__compat__.__doc__ = """ - A cross-version compatibility configuration for pyparsing features that will be - released in a future version. By setting values in this configuration to True, - those features can be enabled in prior versions for compatibility development - and testing. - - - collect_all_And_tokens - flag to enable fix for Issue #63 that fixes erroneous grouping - of results names when an And expression is nested within an Or or MatchFirst; set to - True to enable bugfix to be released in pyparsing 2.4 -""" -__compat__.collect_all_And_tokens = True - - -#~ sys.stderr.write( "testing pyparsing module, version %s, %s\n" % (__version__,__versionTime__ ) ) - -__all__ = [ '__version__', '__versionTime__', '__author__', '__compat__', -'And', 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 'Empty', -'FollowedBy', 'Forward', 'GoToColumn', 'Group', 'Keyword', 'LineEnd', 'LineStart', 'Literal', -'PrecededBy', 'MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or', -'ParseBaseException', 'ParseElementEnhance', 'ParseException', 'ParseExpression', 'ParseFatalException', -'ParseResults', 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException', -'Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter', -'White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore', 'Char', -'alphanums', 'alphas', 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col', -'commaSeparatedList', 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString', -'dblSlashComment', 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'hexnums', -'htmlComment', 'javaStyleComment', 'line', 'lineEnd', 'lineStart', 'lineno', -'makeHTMLTags', 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral', -'nestedExpr', 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables', -'punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity', -'replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd', -'stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute', -'indentedBlock', 'originalTextFor', 'ungroup', 'infixNotation','locatedExpr', 'withClass', -'CloseMatch', 'tokenMap', 'pyparsing_common', 'pyparsing_unicode', 'unicode_set', -] - -system_version = tuple(sys.version_info)[:3] -PY_3 = system_version[0] == 3 -if PY_3: - _MAX_INT = sys.maxsize - basestring = str - unichr = chr - unicode = str - _ustr = str - - # build list of single arg builtins, that can be used as parse actions - singleArgBuiltins = [sum, len, sorted, reversed, list, tuple, set, any, all, min, max] - -else: - _MAX_INT = sys.maxint - range = xrange - - def _ustr(obj): - """Drop-in replacement for str(obj) that tries to be Unicode - friendly. It first tries str(obj). If that fails with - a UnicodeEncodeError, then it tries unicode(obj). It then - < returns the unicode object | encodes it with the default - encoding | ... >. - """ - if isinstance(obj,unicode): - return obj - - try: - # If this works, then _ustr(obj) has the same behaviour as str(obj), so - # it won't break any existing code. - return str(obj) - - except UnicodeEncodeError: - # Else encode it - ret = unicode(obj).encode(sys.getdefaultencoding(), 'xmlcharrefreplace') - xmlcharref = Regex(r'&#\d+;') - xmlcharref.setParseAction(lambda t: '\\u' + hex(int(t[0][2:-1]))[2:]) - return xmlcharref.transformString(ret) - - # build list of single arg builtins, tolerant of Python version, that can be used as parse actions - singleArgBuiltins = [] - import __builtin__ - for fname in "sum len sorted reversed list tuple set any all min max".split(): - try: - singleArgBuiltins.append(getattr(__builtin__,fname)) - except AttributeError: - continue - -_generatorType = type((y for y in range(1))) - -def _xml_escape(data): - """Escape &, <, >, ", ', etc. in a string of data.""" - - # ampersand must be replaced first - from_symbols = '&><"\'' - to_symbols = ('&'+s+';' for s in "amp gt lt quot apos".split()) - for from_,to_ in zip(from_symbols, to_symbols): - data = data.replace(from_, to_) - return data - -alphas = string.ascii_uppercase + string.ascii_lowercase -nums = "0123456789" -hexnums = nums + "ABCDEFabcdef" -alphanums = alphas + nums -_bslash = chr(92) -printables = "".join(c for c in string.printable if c not in string.whitespace) - -class ParseBaseException(Exception): - """base exception class for all parsing runtime exceptions""" - # Performance tuning: we construct a *lot* of these, so keep this - # constructor as small and fast as possible - def __init__( self, pstr, loc=0, msg=None, elem=None ): - self.loc = loc - if msg is None: - self.msg = pstr - self.pstr = "" - else: - self.msg = msg - self.pstr = pstr - self.parserElement = elem - self.args = (pstr, loc, msg) - - @classmethod - def _from_exception(cls, pe): - """ - internal factory method to simplify creating one type of ParseException - from another - avoids having __init__ signature conflicts among subclasses - """ - return cls(pe.pstr, pe.loc, pe.msg, pe.parserElement) - - def __getattr__( self, aname ): - """supported attributes by name are: - - lineno - returns the line number of the exception text - - col - returns the column number of the exception text - - line - returns the line containing the exception text - """ - if( aname == "lineno" ): - return lineno( self.loc, self.pstr ) - elif( aname in ("col", "column") ): - return col( self.loc, self.pstr ) - elif( aname == "line" ): - return line( self.loc, self.pstr ) - else: - raise AttributeError(aname) - - def __str__( self ): - return "%s (at char %d), (line:%d, col:%d)" % \ - ( self.msg, self.loc, self.lineno, self.column ) - def __repr__( self ): - return _ustr(self) - def markInputline( self, markerString = ">!<" ): - """Extracts the exception line from the input string, and marks - the location of the exception with a special symbol. - """ - line_str = self.line - line_column = self.column - 1 - if markerString: - line_str = "".join((line_str[:line_column], - markerString, line_str[line_column:])) - return line_str.strip() - def __dir__(self): - return "lineno col line".split() + dir(type(self)) - -class ParseException(ParseBaseException): - """ - Exception thrown when parse expressions don't match class; - supported attributes by name are: - - lineno - returns the line number of the exception text - - col - returns the column number of the exception text - - line - returns the line containing the exception text - - Example:: - - try: - Word(nums).setName("integer").parseString("ABC") - except ParseException as pe: - print(pe) - print("column: {}".format(pe.col)) - - prints:: - - Expected integer (at char 0), (line:1, col:1) - column: 1 - - """ - - @staticmethod - def explain(exc, depth=16): - """ - Method to take an exception and translate the Python internal traceback into a list - of the pyparsing expressions that caused the exception to be raised. - - Parameters: - - - exc - exception raised during parsing (need not be a ParseException, in support - of Python exceptions that might be raised in a parse action) - - depth (default=16) - number of levels back in the stack trace to list expression - and function names; if None, the full stack trace names will be listed; if 0, only - the failing input line, marker, and exception string will be shown - - Returns a multi-line string listing the ParserElements and/or function names in the - exception's stack trace. - - Note: the diagnostic output will include string representations of the expressions - that failed to parse. These representations will be more helpful if you use `setName` to - give identifiable names to your expressions. Otherwise they will use the default string - forms, which may be cryptic to read. - - explain() is only supported under Python 3. - """ - import inspect - - if depth is None: - depth = sys.getrecursionlimit() - ret = [] - if isinstance(exc, ParseBaseException): - ret.append(exc.line) - ret.append(' ' * (exc.col - 1) + '^') - ret.append("{0}: {1}".format(type(exc).__name__, exc)) - - if depth > 0: - callers = inspect.getinnerframes(exc.__traceback__, context=depth) - seen = set() - for i, ff in enumerate(callers[-depth:]): - frm = ff[0] - - f_self = frm.f_locals.get('self', None) - if isinstance(f_self, ParserElement): - if frm.f_code.co_name not in ('parseImpl', '_parseNoCache'): - continue - if f_self in seen: - continue - seen.add(f_self) - - self_type = type(f_self) - ret.append("{0}.{1} - {2}".format(self_type.__module__, - self_type.__name__, - f_self)) - elif f_self is not None: - self_type = type(f_self) - ret.append("{0}.{1}".format(self_type.__module__, - self_type.__name__)) - else: - code = frm.f_code - if code.co_name in ('wrapper', '<module>'): - continue - - ret.append("{0}".format(code.co_name)) - - depth -= 1 - if not depth: - break - - return '\n'.join(ret) - - -class ParseFatalException(ParseBaseException): - """user-throwable exception thrown when inconsistent parse content - is found; stops all parsing immediately""" - pass - -class ParseSyntaxException(ParseFatalException): - """just like :class:`ParseFatalException`, but thrown internally - when an :class:`ErrorStop<And._ErrorStop>` ('-' operator) indicates - that parsing is to stop immediately because an unbacktrackable - syntax error has been found. - """ - pass - -#~ class ReparseException(ParseBaseException): - #~ """Experimental class - parse actions can raise this exception to cause - #~ pyparsing to reparse the input string: - #~ - with a modified input string, and/or - #~ - with a modified start location - #~ Set the values of the ReparseException in the constructor, and raise the - #~ exception in a parse action to cause pyparsing to use the new string/location. - #~ Setting the values as None causes no change to be made. - #~ """ - #~ def __init_( self, newstring, restartLoc ): - #~ self.newParseText = newstring - #~ self.reparseLoc = restartLoc - -class RecursiveGrammarException(Exception): - """exception thrown by :class:`ParserElement.validate` if the - grammar could be improperly recursive - """ - def __init__( self, parseElementList ): - self.parseElementTrace = parseElementList - - def __str__( self ): - return "RecursiveGrammarException: %s" % self.parseElementTrace - -class _ParseResultsWithOffset(object): - def __init__(self,p1,p2): - self.tup = (p1,p2) - def __getitem__(self,i): - return self.tup[i] - def __repr__(self): - return repr(self.tup[0]) - def setOffset(self,i): - self.tup = (self.tup[0],i) - -class ParseResults(object): - """Structured parse results, to provide multiple means of access to - the parsed data: - - - as a list (``len(results)``) - - by list index (``results[0], results[1]``, etc.) - - by attribute (``results.<resultsName>`` - see :class:`ParserElement.setResultsName`) - - Example:: - - integer = Word(nums) - date_str = (integer.setResultsName("year") + '/' - + integer.setResultsName("month") + '/' - + integer.setResultsName("day")) - # equivalent form: - # date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - - # parseString returns a ParseResults object - result = date_str.parseString("1999/12/31") - - def test(s, fn=repr): - print("%s -> %s" % (s, fn(eval(s)))) - test("list(result)") - test("result[0]") - test("result['month']") - test("result.day") - test("'month' in result") - test("'minutes' in result") - test("result.dump()", str) - - prints:: - - list(result) -> ['1999', '/', '12', '/', '31'] - result[0] -> '1999' - result['month'] -> '12' - result.day -> '31' - 'month' in result -> True - 'minutes' in result -> False - result.dump() -> ['1999', '/', '12', '/', '31'] - - day: 31 - - month: 12 - - year: 1999 - """ - def __new__(cls, toklist=None, name=None, asList=True, modal=True ): - if isinstance(toklist, cls): - return toklist - retobj = object.__new__(cls) - retobj.__doinit = True - return retobj - - # Performance tuning: we construct a *lot* of these, so keep this - # constructor as small and fast as possible - def __init__( self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance ): - if self.__doinit: - self.__doinit = False - self.__name = None - self.__parent = None - self.__accumNames = {} - self.__asList = asList - self.__modal = modal - if toklist is None: - toklist = [] - if isinstance(toklist, list): - self.__toklist = toklist[:] - elif isinstance(toklist, _generatorType): - self.__toklist = list(toklist) - else: - self.__toklist = [toklist] - self.__tokdict = dict() - - if name is not None and name: - if not modal: - self.__accumNames[name] = 0 - if isinstance(name,int): - name = _ustr(name) # will always return a str, but use _ustr for consistency - self.__name = name - if not (isinstance(toklist, (type(None), basestring, list)) and toklist in (None,'',[])): - if isinstance(toklist,basestring): - toklist = [ toklist ] - if asList: - if isinstance(toklist,ParseResults): - self[name] = _ParseResultsWithOffset(ParseResults(toklist.__toklist), 0) - else: - self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]),0) - self[name].__name = name - else: - try: - self[name] = toklist[0] - except (KeyError,TypeError,IndexError): - self[name] = toklist - - def __getitem__( self, i ): - if isinstance( i, (int,slice) ): - return self.__toklist[i] - else: - if i not in self.__accumNames: - return self.__tokdict[i][-1][0] - else: - return ParseResults([ v[0] for v in self.__tokdict[i] ]) - - def __setitem__( self, k, v, isinstance=isinstance ): - if isinstance(v,_ParseResultsWithOffset): - self.__tokdict[k] = self.__tokdict.get(k,list()) + [v] - sub = v[0] - elif isinstance(k,(int,slice)): - self.__toklist[k] = v - sub = v - else: - self.__tokdict[k] = self.__tokdict.get(k,list()) + [_ParseResultsWithOffset(v,0)] - sub = v - if isinstance(sub,ParseResults): - sub.__parent = wkref(self) - - def __delitem__( self, i ): - if isinstance(i,(int,slice)): - mylen = len( self.__toklist ) - del self.__toklist[i] - - # convert int to slice - if isinstance(i, int): - if i < 0: - i += mylen - i = slice(i, i+1) - # get removed indices - removed = list(range(*i.indices(mylen))) - removed.reverse() - # fixup indices in token dictionary - for name,occurrences in self.__tokdict.items(): - for j in removed: - for k, (value, position) in enumerate(occurrences): - occurrences[k] = _ParseResultsWithOffset(value, position - (position > j)) - else: - del self.__tokdict[i] - - def __contains__( self, k ): - return k in self.__tokdict - - def __len__( self ): return len( self.__toklist ) - def __bool__(self): return ( not not self.__toklist ) - __nonzero__ = __bool__ - def __iter__( self ): return iter( self.__toklist ) - def __reversed__( self ): return iter( self.__toklist[::-1] ) - def _iterkeys( self ): - if hasattr(self.__tokdict, "iterkeys"): - return self.__tokdict.iterkeys() - else: - return iter(self.__tokdict) - - def _itervalues( self ): - return (self[k] for k in self._iterkeys()) - - def _iteritems( self ): - return ((k, self[k]) for k in self._iterkeys()) - - if PY_3: - keys = _iterkeys - """Returns an iterator of all named result keys.""" - - values = _itervalues - """Returns an iterator of all named result values.""" - - items = _iteritems - """Returns an iterator of all named result key-value tuples.""" - - else: - iterkeys = _iterkeys - """Returns an iterator of all named result keys (Python 2.x only).""" - - itervalues = _itervalues - """Returns an iterator of all named result values (Python 2.x only).""" - - iteritems = _iteritems - """Returns an iterator of all named result key-value tuples (Python 2.x only).""" - - def keys( self ): - """Returns all named result keys (as a list in Python 2.x, as an iterator in Python 3.x).""" - return list(self.iterkeys()) - - def values( self ): - """Returns all named result values (as a list in Python 2.x, as an iterator in Python 3.x).""" - return list(self.itervalues()) - - def items( self ): - """Returns all named result key-values (as a list of tuples in Python 2.x, as an iterator in Python 3.x).""" - return list(self.iteritems()) - - def haskeys( self ): - """Since keys() returns an iterator, this method is helpful in bypassing - code that looks for the existence of any defined results names.""" - return bool(self.__tokdict) - - def pop( self, *args, **kwargs): - """ - Removes and returns item at specified index (default= ``last``). - Supports both ``list`` and ``dict`` semantics for ``pop()``. If - passed no argument or an integer argument, it will use ``list`` - semantics and pop tokens from the list of parsed tokens. If passed - a non-integer argument (most likely a string), it will use ``dict`` - semantics and pop the corresponding value from any defined results - names. A second default return value argument is supported, just as in - ``dict.pop()``. - - Example:: - - def remove_first(tokens): - tokens.pop(0) - print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] - print(OneOrMore(Word(nums)).addParseAction(remove_first).parseString("0 123 321")) # -> ['123', '321'] - - label = Word(alphas) - patt = label("LABEL") + OneOrMore(Word(nums)) - print(patt.parseString("AAB 123 321").dump()) - - # Use pop() in a parse action to remove named result (note that corresponding value is not - # removed from list form of results) - def remove_LABEL(tokens): - tokens.pop("LABEL") - return tokens - patt.addParseAction(remove_LABEL) - print(patt.parseString("AAB 123 321").dump()) - - prints:: - - ['AAB', '123', '321'] - - LABEL: AAB - - ['AAB', '123', '321'] - """ - if not args: - args = [-1] - for k,v in kwargs.items(): - if k == 'default': - args = (args[0], v) - else: - raise TypeError("pop() got an unexpected keyword argument '%s'" % k) - if (isinstance(args[0], int) or - len(args) == 1 or - args[0] in self): - index = args[0] - ret = self[index] - del self[index] - return ret - else: - defaultvalue = args[1] - return defaultvalue - - def get(self, key, defaultValue=None): - """ - Returns named result matching the given key, or if there is no - such name, then returns the given ``defaultValue`` or ``None`` if no - ``defaultValue`` is specified. - - Similar to ``dict.get()``. - - Example:: - - integer = Word(nums) - date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - - result = date_str.parseString("1999/12/31") - print(result.get("year")) # -> '1999' - print(result.get("hour", "not specified")) # -> 'not specified' - print(result.get("hour")) # -> None - """ - if key in self: - return self[key] - else: - return defaultValue - - def insert( self, index, insStr ): - """ - Inserts new element at location index in the list of parsed tokens. - - Similar to ``list.insert()``. - - Example:: - - print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] - - # use a parse action to insert the parse location in the front of the parsed results - def insert_locn(locn, tokens): - tokens.insert(0, locn) - print(OneOrMore(Word(nums)).addParseAction(insert_locn).parseString("0 123 321")) # -> [0, '0', '123', '321'] - """ - self.__toklist.insert(index, insStr) - # fixup indices in token dictionary - for name,occurrences in self.__tokdict.items(): - for k, (value, position) in enumerate(occurrences): - occurrences[k] = _ParseResultsWithOffset(value, position + (position > index)) - - def append( self, item ): - """ - Add single element to end of ParseResults list of elements. - - Example:: - - print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] - - # use a parse action to compute the sum of the parsed integers, and add it to the end - def append_sum(tokens): - tokens.append(sum(map(int, tokens))) - print(OneOrMore(Word(nums)).addParseAction(append_sum).parseString("0 123 321")) # -> ['0', '123', '321', 444] - """ - self.__toklist.append(item) - - def extend( self, itemseq ): - """ - Add sequence of elements to end of ParseResults list of elements. - - Example:: - - patt = OneOrMore(Word(alphas)) - - # use a parse action to append the reverse of the matched strings, to make a palindrome - def make_palindrome(tokens): - tokens.extend(reversed([t[::-1] for t in tokens])) - return ''.join(tokens) - print(patt.addParseAction(make_palindrome).parseString("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl' - """ - if isinstance(itemseq, ParseResults): - self.__iadd__(itemseq) - else: - self.__toklist.extend(itemseq) - - def clear( self ): - """ - Clear all elements and results names. - """ - del self.__toklist[:] - self.__tokdict.clear() - - def __getattr__( self, name ): - try: - return self[name] - except KeyError: - return "" - - if name in self.__tokdict: - if name not in self.__accumNames: - return self.__tokdict[name][-1][0] - else: - return ParseResults([ v[0] for v in self.__tokdict[name] ]) - else: - return "" - - def __add__( self, other ): - ret = self.copy() - ret += other - return ret - - def __iadd__( self, other ): - if other.__tokdict: - offset = len(self.__toklist) - addoffset = lambda a: offset if a<0 else a+offset - otheritems = other.__tokdict.items() - otherdictitems = [(k, _ParseResultsWithOffset(v[0],addoffset(v[1])) ) - for (k,vlist) in otheritems for v in vlist] - for k,v in otherdictitems: - self[k] = v - if isinstance(v[0],ParseResults): - v[0].__parent = wkref(self) - - self.__toklist += other.__toklist - self.__accumNames.update( other.__accumNames ) - return self - - def __radd__(self, other): - if isinstance(other,int) and other == 0: - # useful for merging many ParseResults using sum() builtin - return self.copy() - else: - # this may raise a TypeError - so be it - return other + self - - def __repr__( self ): - return "(%s, %s)" % ( repr( self.__toklist ), repr( self.__tokdict ) ) - - def __str__( self ): - return '[' + ', '.join(_ustr(i) if isinstance(i, ParseResults) else repr(i) for i in self.__toklist) + ']' - - def _asStringList( self, sep='' ): - out = [] - for item in self.__toklist: - if out and sep: - out.append(sep) - if isinstance( item, ParseResults ): - out += item._asStringList() - else: - out.append( _ustr(item) ) - return out - - def asList( self ): - """ - Returns the parse results as a nested list of matching tokens, all converted to strings. - - Example:: - - patt = OneOrMore(Word(alphas)) - result = patt.parseString("sldkj lsdkj sldkj") - # even though the result prints in string-like form, it is actually a pyparsing ParseResults - print(type(result), result) # -> <class 'pyparsing.ParseResults'> ['sldkj', 'lsdkj', 'sldkj'] - - # Use asList() to create an actual list - result_list = result.asList() - print(type(result_list), result_list) # -> <class 'list'> ['sldkj', 'lsdkj', 'sldkj'] - """ - return [res.asList() if isinstance(res,ParseResults) else res for res in self.__toklist] - - def asDict( self ): - """ - Returns the named parse results as a nested dictionary. - - Example:: - - integer = Word(nums) - date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - - result = date_str.parseString('12/31/1999') - print(type(result), repr(result)) # -> <class 'pyparsing.ParseResults'> (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]}) - - result_dict = result.asDict() - print(type(result_dict), repr(result_dict)) # -> <class 'dict'> {'day': '1999', 'year': '12', 'month': '31'} - - # even though a ParseResults supports dict-like access, sometime you just need to have a dict - import json - print(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializable - print(json.dumps(result.asDict())) # -> {"month": "31", "day": "1999", "year": "12"} - """ - if PY_3: - item_fn = self.items - else: - item_fn = self.iteritems - - def toItem(obj): - if isinstance(obj, ParseResults): - if obj.haskeys(): - return obj.asDict() - else: - return [toItem(v) for v in obj] - else: - return obj - - return dict((k,toItem(v)) for k,v in item_fn()) - - def copy( self ): - """ - Returns a new copy of a :class:`ParseResults` object. - """ - ret = ParseResults( self.__toklist ) - ret.__tokdict = dict(self.__tokdict.items()) - ret.__parent = self.__parent - ret.__accumNames.update( self.__accumNames ) - ret.__name = self.__name - return ret - - def asXML( self, doctag=None, namedItemsOnly=False, indent="", formatted=True ): - """ - (Deprecated) Returns the parse results as XML. Tags are created for tokens and lists that have defined results names. - """ - nl = "\n" - out = [] - namedItems = dict((v[1],k) for (k,vlist) in self.__tokdict.items() - for v in vlist) - nextLevelIndent = indent + " " - - # collapse out indents if formatting is not desired - if not formatted: - indent = "" - nextLevelIndent = "" - nl = "" - - selfTag = None - if doctag is not None: - selfTag = doctag - else: - if self.__name: - selfTag = self.__name - - if not selfTag: - if namedItemsOnly: - return "" - else: - selfTag = "ITEM" - - out += [ nl, indent, "<", selfTag, ">" ] - - for i,res in enumerate(self.__toklist): - if isinstance(res,ParseResults): - if i in namedItems: - out += [ res.asXML(namedItems[i], - namedItemsOnly and doctag is None, - nextLevelIndent, - formatted)] - else: - out += [ res.asXML(None, - namedItemsOnly and doctag is None, - nextLevelIndent, - formatted)] - else: - # individual token, see if there is a name for it - resTag = None - if i in namedItems: - resTag = namedItems[i] - if not resTag: - if namedItemsOnly: - continue - else: - resTag = "ITEM" - xmlBodyText = _xml_escape(_ustr(res)) - out += [ nl, nextLevelIndent, "<", resTag, ">", - xmlBodyText, - "</", resTag, ">" ] - - out += [ nl, indent, "</", selfTag, ">" ] - return "".join(out) - - def __lookup(self,sub): - for k,vlist in self.__tokdict.items(): - for v,loc in vlist: - if sub is v: - return k - return None - - def getName(self): - r""" - Returns the results name for this token expression. Useful when several - different expressions might match at a particular location. - - Example:: - - integer = Word(nums) - ssn_expr = Regex(r"\d\d\d-\d\d-\d\d\d\d") - house_number_expr = Suppress('#') + Word(nums, alphanums) - user_data = (Group(house_number_expr)("house_number") - | Group(ssn_expr)("ssn") - | Group(integer)("age")) - user_info = OneOrMore(user_data) - - result = user_info.parseString("22 111-22-3333 #221B") - for item in result: - print(item.getName(), ':', item[0]) - - prints:: - - age : 22 - ssn : 111-22-3333 - house_number : 221B - """ - if self.__name: - return self.__name - elif self.__parent: - par = self.__parent() - if par: - return par.__lookup(self) - else: - return None - elif (len(self) == 1 and - len(self.__tokdict) == 1 and - next(iter(self.__tokdict.values()))[0][1] in (0,-1)): - return next(iter(self.__tokdict.keys())) - else: - return None - - def dump(self, indent='', depth=0, full=True): - """ - Diagnostic method for listing out the contents of - a :class:`ParseResults`. Accepts an optional ``indent`` argument so - that this string can be embedded in a nested display of other data. - - Example:: - - integer = Word(nums) - date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - - result = date_str.parseString('12/31/1999') - print(result.dump()) - - prints:: - - ['12', '/', '31', '/', '1999'] - - day: 1999 - - month: 31 - - year: 12 - """ - out = [] - NL = '\n' - out.append( indent+_ustr(self.asList()) ) - if full: - if self.haskeys(): - items = sorted((str(k), v) for k,v in self.items()) - for k,v in items: - if out: - out.append(NL) - out.append( "%s%s- %s: " % (indent,(' '*depth), k) ) - if isinstance(v,ParseResults): - if v: - out.append( v.dump(indent,depth+1) ) - else: - out.append(_ustr(v)) - else: - out.append(repr(v)) - elif any(isinstance(vv,ParseResults) for vv in self): - v = self - for i,vv in enumerate(v): - if isinstance(vv,ParseResults): - out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),vv.dump(indent,depth+1) )) - else: - out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),_ustr(vv))) - - return "".join(out) - - def pprint(self, *args, **kwargs): - """ - Pretty-printer for parsed results as a list, using the - `pprint <https://docs.python.org/3/library/pprint.html>`_ module. - Accepts additional positional or keyword args as defined for - `pprint.pprint <https://docs.python.org/3/library/pprint.html#pprint.pprint>`_ . - - Example:: - - ident = Word(alphas, alphanums) - num = Word(nums) - func = Forward() - term = ident | num | Group('(' + func + ')') - func <<= ident + Group(Optional(delimitedList(term))) - result = func.parseString("fna a,b,(fnb c,d,200),100") - result.pprint(width=40) - - prints:: - - ['fna', - ['a', - 'b', - ['(', 'fnb', ['c', 'd', '200'], ')'], - '100']] - """ - pprint.pprint(self.asList(), *args, **kwargs) - - # add support for pickle protocol - def __getstate__(self): - return ( self.__toklist, - ( self.__tokdict.copy(), - self.__parent is not None and self.__parent() or None, - self.__accumNames, - self.__name ) ) - - def __setstate__(self,state): - self.__toklist = state[0] - (self.__tokdict, - par, - inAccumNames, - self.__name) = state[1] - self.__accumNames = {} - self.__accumNames.update(inAccumNames) - if par is not None: - self.__parent = wkref(par) - else: - self.__parent = None - - def __getnewargs__(self): - return self.__toklist, self.__name, self.__asList, self.__modal - - def __dir__(self): - return (dir(type(self)) + list(self.keys())) - -MutableMapping.register(ParseResults) - -def col (loc,strg): - """Returns current column within a string, counting newlines as line separators. - The first column is number 1. - - Note: the default parsing behavior is to expand tabs in the input string - before starting the parsing process. See - :class:`ParserElement.parseString` for more - information on parsing strings containing ``<TAB>`` s, and suggested - methods to maintain a consistent view of the parsed string, the parse - location, and line and column positions within the parsed string. - """ - s = strg - return 1 if 0<loc<len(s) and s[loc-1] == '\n' else loc - s.rfind("\n", 0, loc) - -def lineno(loc,strg): - """Returns current line number within a string, counting newlines as line separators. - The first line is number 1. - - Note - the default parsing behavior is to expand tabs in the input string - before starting the parsing process. See :class:`ParserElement.parseString` - for more information on parsing strings containing ``<TAB>`` s, and - suggested methods to maintain a consistent view of the parsed string, the - parse location, and line and column positions within the parsed string. - """ - return strg.count("\n",0,loc) + 1 - -def line( loc, strg ): - """Returns the line of text containing loc within a string, counting newlines as line separators. - """ - lastCR = strg.rfind("\n", 0, loc) - nextCR = strg.find("\n", loc) - if nextCR >= 0: - return strg[lastCR+1:nextCR] - else: - return strg[lastCR+1:] - -def _defaultStartDebugAction( instring, loc, expr ): - print (("Match " + _ustr(expr) + " at loc " + _ustr(loc) + "(%d,%d)" % ( lineno(loc,instring), col(loc,instring) ))) - -def _defaultSuccessDebugAction( instring, startloc, endloc, expr, toks ): - print ("Matched " + _ustr(expr) + " -> " + str(toks.asList())) - -def _defaultExceptionDebugAction( instring, loc, expr, exc ): - print ("Exception raised:" + _ustr(exc)) - -def nullDebugAction(*args): - """'Do-nothing' debug action, to suppress debugging output during parsing.""" - pass - -# Only works on Python 3.x - nonlocal is toxic to Python 2 installs -#~ 'decorator to trim function calls to match the arity of the target' -#~ def _trim_arity(func, maxargs=3): - #~ if func in singleArgBuiltins: - #~ return lambda s,l,t: func(t) - #~ limit = 0 - #~ foundArity = False - #~ def wrapper(*args): - #~ nonlocal limit,foundArity - #~ while 1: - #~ try: - #~ ret = func(*args[limit:]) - #~ foundArity = True - #~ return ret - #~ except TypeError: - #~ if limit == maxargs or foundArity: - #~ raise - #~ limit += 1 - #~ continue - #~ return wrapper - -# this version is Python 2.x-3.x cross-compatible -'decorator to trim function calls to match the arity of the target' -def _trim_arity(func, maxargs=2): - if func in singleArgBuiltins: - return lambda s,l,t: func(t) - limit = [0] - foundArity = [False] - - # traceback return data structure changed in Py3.5 - normalize back to plain tuples - if system_version[:2] >= (3,5): - def extract_stack(limit=0): - # special handling for Python 3.5.0 - extra deep call stack by 1 - offset = -3 if system_version == (3,5,0) else -2 - frame_summary = traceback.extract_stack(limit=-offset+limit-1)[offset] - return [frame_summary[:2]] - def extract_tb(tb, limit=0): - frames = traceback.extract_tb(tb, limit=limit) - frame_summary = frames[-1] - return [frame_summary[:2]] - else: - extract_stack = traceback.extract_stack - extract_tb = traceback.extract_tb - - # synthesize what would be returned by traceback.extract_stack at the call to - # user's parse action 'func', so that we don't incur call penalty at parse time - - LINE_DIFF = 6 - # IF ANY CODE CHANGES, EVEN JUST COMMENTS OR BLANK LINES, BETWEEN THE NEXT LINE AND - # THE CALL TO FUNC INSIDE WRAPPER, LINE_DIFF MUST BE MODIFIED!!!! - this_line = extract_stack(limit=2)[-1] - pa_call_line_synth = (this_line[0], this_line[1]+LINE_DIFF) - - def wrapper(*args): - while 1: - try: - ret = func(*args[limit[0]:]) - foundArity[0] = True - return ret - except TypeError: - # re-raise TypeErrors if they did not come from our arity testing - if foundArity[0]: - raise - else: - try: - tb = sys.exc_info()[-1] - if not extract_tb(tb, limit=2)[-1][:2] == pa_call_line_synth: - raise - finally: - del tb - - if limit[0] <= maxargs: - limit[0] += 1 - continue - raise - - # copy func name to wrapper for sensible debug output - func_name = "<parse action>" - try: - func_name = getattr(func, '__name__', - getattr(func, '__class__').__name__) - except Exception: - func_name = str(func) - wrapper.__name__ = func_name - - return wrapper - -class ParserElement(object): - """Abstract base level parser element class.""" - DEFAULT_WHITE_CHARS = " \n\t\r" - verbose_stacktrace = False - - @staticmethod - def setDefaultWhitespaceChars( chars ): - r""" - Overrides the default whitespace chars - - Example:: - - # default whitespace chars are space, <TAB> and newline - OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def', 'ghi', 'jkl'] - - # change to just treat newline as significant - ParserElement.setDefaultWhitespaceChars(" \t") - OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def'] - """ - ParserElement.DEFAULT_WHITE_CHARS = chars - - @staticmethod - def inlineLiteralsUsing(cls): - """ - Set class to be used for inclusion of string literals into a parser. - - Example:: - - # default literal class used is Literal - integer = Word(nums) - date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - - date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31'] - - - # change to Suppress - ParserElement.inlineLiteralsUsing(Suppress) - date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - - date_str.parseString("1999/12/31") # -> ['1999', '12', '31'] - """ - ParserElement._literalStringClass = cls - - def __init__( self, savelist=False ): - self.parseAction = list() - self.failAction = None - #~ self.name = "<unknown>" # don't define self.name, let subclasses try/except upcall - self.strRepr = None - self.resultsName = None - self.saveAsList = savelist - self.skipWhitespace = True - self.whiteChars = set(ParserElement.DEFAULT_WHITE_CHARS) - self.copyDefaultWhiteChars = True - self.mayReturnEmpty = False # used when checking for left-recursion - self.keepTabs = False - self.ignoreExprs = list() - self.debug = False - self.streamlined = False - self.mayIndexError = True # used to optimize exception handling for subclasses that don't advance parse index - self.errmsg = "" - self.modalResults = True # used to mark results names as modal (report only last) or cumulative (list all) - self.debugActions = ( None, None, None ) #custom debug actions - self.re = None - self.callPreparse = True # used to avoid redundant calls to preParse - self.callDuringTry = False - - def copy( self ): - """ - Make a copy of this :class:`ParserElement`. Useful for defining - different parse actions for the same parsing pattern, using copies of - the original parse element. - - Example:: - - integer = Word(nums).setParseAction(lambda toks: int(toks[0])) - integerK = integer.copy().addParseAction(lambda toks: toks[0]*1024) + Suppress("K") - integerM = integer.copy().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M") - - print(OneOrMore(integerK | integerM | integer).parseString("5K 100 640K 256M")) - - prints:: - - [5120, 100, 655360, 268435456] - - Equivalent form of ``expr.copy()`` is just ``expr()``:: - - integerM = integer().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M") - """ - cpy = copy.copy( self ) - cpy.parseAction = self.parseAction[:] - cpy.ignoreExprs = self.ignoreExprs[:] - if self.copyDefaultWhiteChars: - cpy.whiteChars = ParserElement.DEFAULT_WHITE_CHARS - return cpy - - def setName( self, name ): - """ - Define name for this expression, makes debugging and exception messages clearer. - - Example:: - - Word(nums).parseString("ABC") # -> Exception: Expected W:(0123...) (at char 0), (line:1, col:1) - Word(nums).setName("integer").parseString("ABC") # -> Exception: Expected integer (at char 0), (line:1, col:1) - """ - self.name = name - self.errmsg = "Expected " + self.name - if hasattr(self,"exception"): - self.exception.msg = self.errmsg - return self - - def setResultsName( self, name, listAllMatches=False ): - """ - Define name for referencing matching tokens as a nested attribute - of the returned parse results. - NOTE: this returns a *copy* of the original :class:`ParserElement` object; - this is so that the client can define a basic element, such as an - integer, and reference it in multiple places with different names. - - You can also set results names using the abbreviated syntax, - ``expr("name")`` in place of ``expr.setResultsName("name")`` - - see :class:`__call__`. - - Example:: - - date_str = (integer.setResultsName("year") + '/' - + integer.setResultsName("month") + '/' - + integer.setResultsName("day")) - - # equivalent form: - date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - """ - newself = self.copy() - if name.endswith("*"): - name = name[:-1] - listAllMatches=True - newself.resultsName = name - newself.modalResults = not listAllMatches - return newself - - def setBreak(self,breakFlag = True): - """Method to invoke the Python pdb debugger when this element is - about to be parsed. Set ``breakFlag`` to True to enable, False to - disable. - """ - if breakFlag: - _parseMethod = self._parse - def breaker(instring, loc, doActions=True, callPreParse=True): - import pdb - pdb.set_trace() - return _parseMethod( instring, loc, doActions, callPreParse ) - breaker._originalParseMethod = _parseMethod - self._parse = breaker - else: - if hasattr(self._parse,"_originalParseMethod"): - self._parse = self._parse._originalParseMethod - return self - - def setParseAction( self, *fns, **kwargs ): - """ - Define one or more actions to perform when successfully matching parse element definition. - Parse action fn is a callable method with 0-3 arguments, called as ``fn(s,loc,toks)`` , - ``fn(loc,toks)`` , ``fn(toks)`` , or just ``fn()`` , where: - - - s = the original string being parsed (see note below) - - loc = the location of the matching substring - - toks = a list of the matched tokens, packaged as a :class:`ParseResults` object - - If the functions in fns modify the tokens, they can return them as the return - value from fn, and the modified list of tokens will replace the original. - Otherwise, fn does not need to return any value. - - Optional keyword arguments: - - callDuringTry = (default= ``False`` ) indicate if parse action should be run during lookaheads and alternate testing - - Note: the default parsing behavior is to expand tabs in the input string - before starting the parsing process. See :class:`parseString for more - information on parsing strings containing ``<TAB>`` s, and suggested - methods to maintain a consistent view of the parsed string, the parse - location, and line and column positions within the parsed string. - - Example:: - - integer = Word(nums) - date_str = integer + '/' + integer + '/' + integer - - date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31'] - - # use parse action to convert to ints at parse time - integer = Word(nums).setParseAction(lambda toks: int(toks[0])) - date_str = integer + '/' + integer + '/' + integer - - # note that integer fields are now ints, not strings - date_str.parseString("1999/12/31") # -> [1999, '/', 12, '/', 31] - """ - self.parseAction = list(map(_trim_arity, list(fns))) - self.callDuringTry = kwargs.get("callDuringTry", False) - return self - - def addParseAction( self, *fns, **kwargs ): - """ - Add one or more parse actions to expression's list of parse actions. See :class:`setParseAction`. - - See examples in :class:`copy`. - """ - self.parseAction += list(map(_trim_arity, list(fns))) - self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False) - return self - - def addCondition(self, *fns, **kwargs): - """Add a boolean predicate function to expression's list of parse actions. See - :class:`setParseAction` for function call signatures. Unlike ``setParseAction``, - functions passed to ``addCondition`` need to return boolean success/fail of the condition. - - Optional keyword arguments: - - message = define a custom message to be used in the raised exception - - fatal = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise ParseException - - Example:: - - integer = Word(nums).setParseAction(lambda toks: int(toks[0])) - year_int = integer.copy() - year_int.addCondition(lambda toks: toks[0] >= 2000, message="Only support years 2000 and later") - date_str = year_int + '/' + integer + '/' + integer - - result = date_str.parseString("1999/12/31") # -> Exception: Only support years 2000 and later (at char 0), (line:1, col:1) - """ - msg = kwargs.get("message", "failed user-defined condition") - exc_type = ParseFatalException if kwargs.get("fatal", False) else ParseException - for fn in fns: - fn = _trim_arity(fn) - def pa(s,l,t): - if not bool(fn(s,l,t)): - raise exc_type(s,l,msg) - self.parseAction.append(pa) - self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False) - return self - - def setFailAction( self, fn ): - """Define action to perform if parsing fails at this expression. - Fail acton fn is a callable function that takes the arguments - ``fn(s,loc,expr,err)`` where: - - s = string being parsed - - loc = location where expression match was attempted and failed - - expr = the parse expression that failed - - err = the exception thrown - The function returns no value. It may throw :class:`ParseFatalException` - if it is desired to stop parsing immediately.""" - self.failAction = fn - return self - - def _skipIgnorables( self, instring, loc ): - exprsFound = True - while exprsFound: - exprsFound = False - for e in self.ignoreExprs: - try: - while 1: - loc,dummy = e._parse( instring, loc ) - exprsFound = True - except ParseException: - pass - return loc - - def preParse( self, instring, loc ): - if self.ignoreExprs: - loc = self._skipIgnorables( instring, loc ) - - if self.skipWhitespace: - wt = self.whiteChars - instrlen = len(instring) - while loc < instrlen and instring[loc] in wt: - loc += 1 - - return loc - - def parseImpl( self, instring, loc, doActions=True ): - return loc, [] - - def postParse( self, instring, loc, tokenlist ): - return tokenlist - - #~ @profile - def _parseNoCache( self, instring, loc, doActions=True, callPreParse=True ): - debugging = ( self.debug ) #and doActions ) - - if debugging or self.failAction: - #~ print ("Match",self,"at loc",loc,"(%d,%d)" % ( lineno(loc,instring), col(loc,instring) )) - if (self.debugActions[0] ): - self.debugActions[0]( instring, loc, self ) - if callPreParse and self.callPreparse: - preloc = self.preParse( instring, loc ) - else: - preloc = loc - tokensStart = preloc - try: - try: - loc,tokens = self.parseImpl( instring, preloc, doActions ) - except IndexError: - raise ParseException( instring, len(instring), self.errmsg, self ) - except ParseBaseException as err: - #~ print ("Exception raised:", err) - if self.debugActions[2]: - self.debugActions[2]( instring, tokensStart, self, err ) - if self.failAction: - self.failAction( instring, tokensStart, self, err ) - raise - else: - if callPreParse and self.callPreparse: - preloc = self.preParse( instring, loc ) - else: - preloc = loc - tokensStart = preloc - if self.mayIndexError or preloc >= len(instring): - try: - loc,tokens = self.parseImpl( instring, preloc, doActions ) - except IndexError: - raise ParseException( instring, len(instring), self.errmsg, self ) - else: - loc,tokens = self.parseImpl( instring, preloc, doActions ) - - tokens = self.postParse( instring, loc, tokens ) - - retTokens = ParseResults( tokens, self.resultsName, asList=self.saveAsList, modal=self.modalResults ) - if self.parseAction and (doActions or self.callDuringTry): - if debugging: - try: - for fn in self.parseAction: - try: - tokens = fn( instring, tokensStart, retTokens ) - except IndexError as parse_action_exc: - exc = ParseException("exception raised in parse action") - exc.__cause__ = parse_action_exc - raise exc - - if tokens is not None and tokens is not retTokens: - retTokens = ParseResults( tokens, - self.resultsName, - asList=self.saveAsList and isinstance(tokens,(ParseResults,list)), - modal=self.modalResults ) - except ParseBaseException as err: - #~ print "Exception raised in user parse action:", err - if (self.debugActions[2] ): - self.debugActions[2]( instring, tokensStart, self, err ) - raise - else: - for fn in self.parseAction: - try: - tokens = fn( instring, tokensStart, retTokens ) - except IndexError as parse_action_exc: - exc = ParseException("exception raised in parse action") - exc.__cause__ = parse_action_exc - raise exc - - if tokens is not None and tokens is not retTokens: - retTokens = ParseResults( tokens, - self.resultsName, - asList=self.saveAsList and isinstance(tokens,(ParseResults,list)), - modal=self.modalResults ) - if debugging: - #~ print ("Matched",self,"->",retTokens.asList()) - if (self.debugActions[1] ): - self.debugActions[1]( instring, tokensStart, loc, self, retTokens ) - - return loc, retTokens - - def tryParse( self, instring, loc ): - try: - return self._parse( instring, loc, doActions=False )[0] - except ParseFatalException: - raise ParseException( instring, loc, self.errmsg, self) - - def canParseNext(self, instring, loc): - try: - self.tryParse(instring, loc) - except (ParseException, IndexError): - return False - else: - return True - - class _UnboundedCache(object): - def __init__(self): - cache = {} - self.not_in_cache = not_in_cache = object() - - def get(self, key): - return cache.get(key, not_in_cache) - - def set(self, key, value): - cache[key] = value - - def clear(self): - cache.clear() - - def cache_len(self): - return len(cache) - - self.get = types.MethodType(get, self) - self.set = types.MethodType(set, self) - self.clear = types.MethodType(clear, self) - self.__len__ = types.MethodType(cache_len, self) - - if _OrderedDict is not None: - class _FifoCache(object): - def __init__(self, size): - self.not_in_cache = not_in_cache = object() - - cache = _OrderedDict() - - def get(self, key): - return cache.get(key, not_in_cache) - - def set(self, key, value): - cache[key] = value - while len(cache) > size: - try: - cache.popitem(False) - except KeyError: - pass - - def clear(self): - cache.clear() - - def cache_len(self): - return len(cache) - - self.get = types.MethodType(get, self) - self.set = types.MethodType(set, self) - self.clear = types.MethodType(clear, self) - self.__len__ = types.MethodType(cache_len, self) - - else: - class _FifoCache(object): - def __init__(self, size): - self.not_in_cache = not_in_cache = object() - - cache = {} - key_fifo = collections.deque([], size) - - def get(self, key): - return cache.get(key, not_in_cache) - - def set(self, key, value): - cache[key] = value - while len(key_fifo) > size: - cache.pop(key_fifo.popleft(), None) - key_fifo.append(key) - - def clear(self): - cache.clear() - key_fifo.clear() - - def cache_len(self): - return len(cache) - - self.get = types.MethodType(get, self) - self.set = types.MethodType(set, self) - self.clear = types.MethodType(clear, self) - self.__len__ = types.MethodType(cache_len, self) - - # argument cache for optimizing repeated calls when backtracking through recursive expressions - packrat_cache = {} # this is set later by enabledPackrat(); this is here so that resetCache() doesn't fail - packrat_cache_lock = RLock() - packrat_cache_stats = [0, 0] - - # this method gets repeatedly called during backtracking with the same arguments - - # we can cache these arguments and save ourselves the trouble of re-parsing the contained expression - def _parseCache( self, instring, loc, doActions=True, callPreParse=True ): - HIT, MISS = 0, 1 - lookup = (self, instring, loc, callPreParse, doActions) - with ParserElement.packrat_cache_lock: - cache = ParserElement.packrat_cache - value = cache.get(lookup) - if value is cache.not_in_cache: - ParserElement.packrat_cache_stats[MISS] += 1 - try: - value = self._parseNoCache(instring, loc, doActions, callPreParse) - except ParseBaseException as pe: - # cache a copy of the exception, without the traceback - cache.set(lookup, pe.__class__(*pe.args)) - raise - else: - cache.set(lookup, (value[0], value[1].copy())) - return value - else: - ParserElement.packrat_cache_stats[HIT] += 1 - if isinstance(value, Exception): - raise value - return (value[0], value[1].copy()) - - _parse = _parseNoCache - - @staticmethod - def resetCache(): - ParserElement.packrat_cache.clear() - ParserElement.packrat_cache_stats[:] = [0] * len(ParserElement.packrat_cache_stats) - - _packratEnabled = False - @staticmethod - def enablePackrat(cache_size_limit=128): - """Enables "packrat" parsing, which adds memoizing to the parsing logic. - Repeated parse attempts at the same string location (which happens - often in many complex grammars) can immediately return a cached value, - instead of re-executing parsing/validating code. Memoizing is done of - both valid results and parsing exceptions. - - Parameters: - - - cache_size_limit - (default= ``128``) - if an integer value is provided - will limit the size of the packrat cache; if None is passed, then - the cache size will be unbounded; if 0 is passed, the cache will - be effectively disabled. - - This speedup may break existing programs that use parse actions that - have side-effects. For this reason, packrat parsing is disabled when - you first import pyparsing. To activate the packrat feature, your - program must call the class method :class:`ParserElement.enablePackrat`. - For best results, call ``enablePackrat()`` immediately after - importing pyparsing. - - Example:: - - from pip._vendor import pyparsing - pyparsing.ParserElement.enablePackrat() - """ - if not ParserElement._packratEnabled: - ParserElement._packratEnabled = True - if cache_size_limit is None: - ParserElement.packrat_cache = ParserElement._UnboundedCache() - else: - ParserElement.packrat_cache = ParserElement._FifoCache(cache_size_limit) - ParserElement._parse = ParserElement._parseCache - - def parseString( self, instring, parseAll=False ): - """ - Execute the parse expression with the given string. - This is the main interface to the client code, once the complete - expression has been built. - - If you want the grammar to require that the entire input string be - successfully parsed, then set ``parseAll`` to True (equivalent to ending - the grammar with ``StringEnd()``). - - Note: ``parseString`` implicitly calls ``expandtabs()`` on the input string, - in order to report proper column numbers in parse actions. - If the input string contains tabs and - the grammar uses parse actions that use the ``loc`` argument to index into the - string being parsed, you can ensure you have a consistent view of the input - string by: - - - calling ``parseWithTabs`` on your grammar before calling ``parseString`` - (see :class:`parseWithTabs`) - - define your parse action using the full ``(s,loc,toks)`` signature, and - reference the input string using the parse action's ``s`` argument - - explictly expand the tabs in your input string before calling - ``parseString`` - - Example:: - - Word('a').parseString('aaaaabaaa') # -> ['aaaaa'] - Word('a').parseString('aaaaabaaa', parseAll=True) # -> Exception: Expected end of text - """ - ParserElement.resetCache() - if not self.streamlined: - self.streamline() - #~ self.saveAsList = True - for e in self.ignoreExprs: - e.streamline() - if not self.keepTabs: - instring = instring.expandtabs() - try: - loc, tokens = self._parse( instring, 0 ) - if parseAll: - loc = self.preParse( instring, loc ) - se = Empty() + StringEnd() - se._parse( instring, loc ) - except ParseBaseException as exc: - if ParserElement.verbose_stacktrace: - raise - else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace - raise exc - else: - return tokens - - def scanString( self, instring, maxMatches=_MAX_INT, overlap=False ): - """ - Scan the input string for expression matches. Each match will return the - matching tokens, start location, and end location. May be called with optional - ``maxMatches`` argument, to clip scanning after 'n' matches are found. If - ``overlap`` is specified, then overlapping matches will be reported. - - Note that the start and end locations are reported relative to the string - being parsed. See :class:`parseString` for more information on parsing - strings with embedded tabs. - - Example:: - - source = "sldjf123lsdjjkf345sldkjf879lkjsfd987" - print(source) - for tokens,start,end in Word(alphas).scanString(source): - print(' '*start + '^'*(end-start)) - print(' '*start + tokens[0]) - - prints:: - - sldjf123lsdjjkf345sldkjf879lkjsfd987 - ^^^^^ - sldjf - ^^^^^^^ - lsdjjkf - ^^^^^^ - sldkjf - ^^^^^^ - lkjsfd - """ - if not self.streamlined: - self.streamline() - for e in self.ignoreExprs: - e.streamline() - - if not self.keepTabs: - instring = _ustr(instring).expandtabs() - instrlen = len(instring) - loc = 0 - preparseFn = self.preParse - parseFn = self._parse - ParserElement.resetCache() - matches = 0 - try: - while loc <= instrlen and matches < maxMatches: - try: - preloc = preparseFn( instring, loc ) - nextLoc,tokens = parseFn( instring, preloc, callPreParse=False ) - except ParseException: - loc = preloc+1 - else: - if nextLoc > loc: - matches += 1 - yield tokens, preloc, nextLoc - if overlap: - nextloc = preparseFn( instring, loc ) - if nextloc > loc: - loc = nextLoc - else: - loc += 1 - else: - loc = nextLoc - else: - loc = preloc+1 - except ParseBaseException as exc: - if ParserElement.verbose_stacktrace: - raise - else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace - raise exc - - def transformString( self, instring ): - """ - Extension to :class:`scanString`, to modify matching text with modified tokens that may - be returned from a parse action. To use ``transformString``, define a grammar and - attach a parse action to it that modifies the returned token list. - Invoking ``transformString()`` on a target string will then scan for matches, - and replace the matched text patterns according to the logic in the parse - action. ``transformString()`` returns the resulting transformed string. - - Example:: - - wd = Word(alphas) - wd.setParseAction(lambda toks: toks[0].title()) - - print(wd.transformString("now is the winter of our discontent made glorious summer by this sun of york.")) - - prints:: - - Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York. - """ - out = [] - lastE = 0 - # force preservation of <TAB>s, to minimize unwanted transformation of string, and to - # keep string locs straight between transformString and scanString - self.keepTabs = True - try: - for t,s,e in self.scanString( instring ): - out.append( instring[lastE:s] ) - if t: - if isinstance(t,ParseResults): - out += t.asList() - elif isinstance(t,list): - out += t - else: - out.append(t) - lastE = e - out.append(instring[lastE:]) - out = [o for o in out if o] - return "".join(map(_ustr,_flatten(out))) - except ParseBaseException as exc: - if ParserElement.verbose_stacktrace: - raise - else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace - raise exc - - def searchString( self, instring, maxMatches=_MAX_INT ): - """ - Another extension to :class:`scanString`, simplifying the access to the tokens found - to match the given parse expression. May be called with optional - ``maxMatches`` argument, to clip searching after 'n' matches are found. - - Example:: - - # a capitalized word starts with an uppercase letter, followed by zero or more lowercase letters - cap_word = Word(alphas.upper(), alphas.lower()) - - print(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity")) - - # the sum() builtin can be used to merge results into a single ParseResults object - print(sum(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity"))) - - prints:: - - [['More'], ['Iron'], ['Lead'], ['Gold'], ['I'], ['Electricity']] - ['More', 'Iron', 'Lead', 'Gold', 'I', 'Electricity'] - """ - try: - return ParseResults([ t for t,s,e in self.scanString( instring, maxMatches ) ]) - except ParseBaseException as exc: - if ParserElement.verbose_stacktrace: - raise - else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace - raise exc - - def split(self, instring, maxsplit=_MAX_INT, includeSeparators=False): - """ - Generator method to split a string using the given expression as a separator. - May be called with optional ``maxsplit`` argument, to limit the number of splits; - and the optional ``includeSeparators`` argument (default= ``False``), if the separating - matching text should be included in the split results. - - Example:: - - punc = oneOf(list(".,;:/-!?")) - print(list(punc.split("This, this?, this sentence, is badly punctuated!"))) - - prints:: - - ['This', ' this', '', ' this sentence', ' is badly punctuated', ''] - """ - splits = 0 - last = 0 - for t,s,e in self.scanString(instring, maxMatches=maxsplit): - yield instring[last:s] - if includeSeparators: - yield t[0] - last = e - yield instring[last:] - - def __add__(self, other ): - """ - Implementation of + operator - returns :class:`And`. Adding strings to a ParserElement - converts them to :class:`Literal`s by default. - - Example:: - - greet = Word(alphas) + "," + Word(alphas) + "!" - hello = "Hello, World!" - print (hello, "->", greet.parseString(hello)) - - prints:: - - Hello, World! -> ['Hello', ',', 'World', '!'] - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return And( [ self, other ] ) - - def __radd__(self, other ): - """ - Implementation of + operator when left operand is not a :class:`ParserElement` - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return other + self - - def __sub__(self, other): - """ - Implementation of - operator, returns :class:`And` with error stop - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return self + And._ErrorStop() + other - - def __rsub__(self, other ): - """ - Implementation of - operator when left operand is not a :class:`ParserElement` - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return other - self - - def __mul__(self,other): - """ - Implementation of * operator, allows use of ``expr * 3`` in place of - ``expr + expr + expr``. Expressions may also me multiplied by a 2-integer - tuple, similar to ``{min,max}`` multipliers in regular expressions. Tuples - may also include ``None`` as in: - - ``expr*(n,None)`` or ``expr*(n,)`` is equivalent - to ``expr*n + ZeroOrMore(expr)`` - (read as "at least n instances of ``expr``") - - ``expr*(None,n)`` is equivalent to ``expr*(0,n)`` - (read as "0 to n instances of ``expr``") - - ``expr*(None,None)`` is equivalent to ``ZeroOrMore(expr)`` - - ``expr*(1,None)`` is equivalent to ``OneOrMore(expr)`` - - Note that ``expr*(None,n)`` does not raise an exception if - more than n exprs exist in the input stream; that is, - ``expr*(None,n)`` does not enforce a maximum number of expr - occurrences. If this behavior is desired, then write - ``expr*(None,n) + ~expr`` - """ - if isinstance(other,int): - minElements, optElements = other,0 - elif isinstance(other,tuple): - other = (other + (None, None))[:2] - if other[0] is None: - other = (0, other[1]) - if isinstance(other[0],int) and other[1] is None: - if other[0] == 0: - return ZeroOrMore(self) - if other[0] == 1: - return OneOrMore(self) - else: - return self*other[0] + ZeroOrMore(self) - elif isinstance(other[0],int) and isinstance(other[1],int): - minElements, optElements = other - optElements -= minElements - else: - raise TypeError("cannot multiply 'ParserElement' and ('%s','%s') objects", type(other[0]),type(other[1])) - else: - raise TypeError("cannot multiply 'ParserElement' and '%s' objects", type(other)) - - if minElements < 0: - raise ValueError("cannot multiply ParserElement by negative value") - if optElements < 0: - raise ValueError("second tuple value must be greater or equal to first tuple value") - if minElements == optElements == 0: - raise ValueError("cannot multiply ParserElement by 0 or (0,0)") - - if (optElements): - def makeOptionalList(n): - if n>1: - return Optional(self + makeOptionalList(n-1)) - else: - return Optional(self) - if minElements: - if minElements == 1: - ret = self + makeOptionalList(optElements) - else: - ret = And([self]*minElements) + makeOptionalList(optElements) - else: - ret = makeOptionalList(optElements) - else: - if minElements == 1: - ret = self - else: - ret = And([self]*minElements) - return ret - - def __rmul__(self, other): - return self.__mul__(other) - - def __or__(self, other ): - """ - Implementation of | operator - returns :class:`MatchFirst` - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return MatchFirst( [ self, other ] ) - - def __ror__(self, other ): - """ - Implementation of | operator when left operand is not a :class:`ParserElement` - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return other | self - - def __xor__(self, other ): - """ - Implementation of ^ operator - returns :class:`Or` - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return Or( [ self, other ] ) - - def __rxor__(self, other ): - """ - Implementation of ^ operator when left operand is not a :class:`ParserElement` - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return other ^ self - - def __and__(self, other ): - """ - Implementation of & operator - returns :class:`Each` - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return Each( [ self, other ] ) - - def __rand__(self, other ): - """ - Implementation of & operator when left operand is not a :class:`ParserElement` - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return other & self - - def __invert__( self ): - """ - Implementation of ~ operator - returns :class:`NotAny` - """ - return NotAny( self ) - - def __call__(self, name=None): - """ - Shortcut for :class:`setResultsName`, with ``listAllMatches=False``. - - If ``name`` is given with a trailing ``'*'`` character, then ``listAllMatches`` will be - passed as ``True``. - - If ``name` is omitted, same as calling :class:`copy`. - - Example:: - - # these are equivalent - userdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno") - userdata = Word(alphas)("name") + Word(nums+"-")("socsecno") - """ - if name is not None: - return self.setResultsName(name) - else: - return self.copy() - - def suppress( self ): - """ - Suppresses the output of this :class:`ParserElement`; useful to keep punctuation from - cluttering up returned output. - """ - return Suppress( self ) - - def leaveWhitespace( self ): - """ - Disables the skipping of whitespace before matching the characters in the - :class:`ParserElement`'s defined pattern. This is normally only used internally by - the pyparsing module, but may be needed in some whitespace-sensitive grammars. - """ - self.skipWhitespace = False - return self - - def setWhitespaceChars( self, chars ): - """ - Overrides the default whitespace chars - """ - self.skipWhitespace = True - self.whiteChars = chars - self.copyDefaultWhiteChars = False - return self - - def parseWithTabs( self ): - """ - Overrides default behavior to expand ``<TAB>``s to spaces before parsing the input string. - Must be called before ``parseString`` when the input grammar contains elements that - match ``<TAB>`` characters. - """ - self.keepTabs = True - return self - - def ignore( self, other ): - """ - Define expression to be ignored (e.g., comments) while doing pattern - matching; may be called repeatedly, to define multiple comment or other - ignorable patterns. - - Example:: - - patt = OneOrMore(Word(alphas)) - patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj'] - - patt.ignore(cStyleComment) - patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj', 'lskjd'] - """ - if isinstance(other, basestring): - other = Suppress(other) - - if isinstance( other, Suppress ): - if other not in self.ignoreExprs: - self.ignoreExprs.append(other) - else: - self.ignoreExprs.append( Suppress( other.copy() ) ) - return self - - def setDebugActions( self, startAction, successAction, exceptionAction ): - """ - Enable display of debugging messages while doing pattern matching. - """ - self.debugActions = (startAction or _defaultStartDebugAction, - successAction or _defaultSuccessDebugAction, - exceptionAction or _defaultExceptionDebugAction) - self.debug = True - return self - - def setDebug( self, flag=True ): - """ - Enable display of debugging messages while doing pattern matching. - Set ``flag`` to True to enable, False to disable. - - Example:: - - wd = Word(alphas).setName("alphaword") - integer = Word(nums).setName("numword") - term = wd | integer - - # turn on debugging for wd - wd.setDebug() - - OneOrMore(term).parseString("abc 123 xyz 890") - - prints:: - - Match alphaword at loc 0(1,1) - Matched alphaword -> ['abc'] - Match alphaword at loc 3(1,4) - Exception raised:Expected alphaword (at char 4), (line:1, col:5) - Match alphaword at loc 7(1,8) - Matched alphaword -> ['xyz'] - Match alphaword at loc 11(1,12) - Exception raised:Expected alphaword (at char 12), (line:1, col:13) - Match alphaword at loc 15(1,16) - Exception raised:Expected alphaword (at char 15), (line:1, col:16) - - The output shown is that produced by the default debug actions - custom debug actions can be - specified using :class:`setDebugActions`. Prior to attempting - to match the ``wd`` expression, the debugging message ``"Match <exprname> at loc <n>(<line>,<col>)"`` - is shown. Then if the parse succeeds, a ``"Matched"`` message is shown, or an ``"Exception raised"`` - message is shown. Also note the use of :class:`setName` to assign a human-readable name to the expression, - which makes debugging and exception messages easier to understand - for instance, the default - name created for the :class:`Word` expression without calling ``setName`` is ``"W:(ABCD...)"``. - """ - if flag: - self.setDebugActions( _defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction ) - else: - self.debug = False - return self - - def __str__( self ): - return self.name - - def __repr__( self ): - return _ustr(self) - - def streamline( self ): - self.streamlined = True - self.strRepr = None - return self - - def checkRecursion( self, parseElementList ): - pass - - def validate( self, validateTrace=[] ): - """ - Check defined expressions for valid structure, check for infinite recursive definitions. - """ - self.checkRecursion( [] ) - - def parseFile( self, file_or_filename, parseAll=False ): - """ - Execute the parse expression on the given file or filename. - If a filename is specified (instead of a file object), - the entire file is opened, read, and closed before parsing. - """ - try: - file_contents = file_or_filename.read() - except AttributeError: - with open(file_or_filename, "r") as f: - file_contents = f.read() - try: - return self.parseString(file_contents, parseAll) - except ParseBaseException as exc: - if ParserElement.verbose_stacktrace: - raise - else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace - raise exc - - def __eq__(self,other): - if isinstance(other, ParserElement): - return self is other or vars(self) == vars(other) - elif isinstance(other, basestring): - return self.matches(other) - else: - return super(ParserElement,self)==other - - def __ne__(self,other): - return not (self == other) - - def __hash__(self): - return hash(id(self)) - - def __req__(self,other): - return self == other - - def __rne__(self,other): - return not (self == other) - - def matches(self, testString, parseAll=True): - """ - Method for quick testing of a parser against a test string. Good for simple - inline microtests of sub expressions while building up larger parser. - - Parameters: - - testString - to test against this expression for a match - - parseAll - (default= ``True``) - flag to pass to :class:`parseString` when running tests - - Example:: - - expr = Word(nums) - assert expr.matches("100") - """ - try: - self.parseString(_ustr(testString), parseAll=parseAll) - return True - except ParseBaseException: - return False - - def runTests(self, tests, parseAll=True, comment='#', - fullDump=True, printResults=True, failureTests=False, postParse=None): - """ - Execute the parse expression on a series of test strings, showing each - test, the parsed results or where the parse failed. Quick and easy way to - run a parse expression against a list of sample strings. - - Parameters: - - tests - a list of separate test strings, or a multiline string of test strings - - parseAll - (default= ``True``) - flag to pass to :class:`parseString` when running tests - - comment - (default= ``'#'``) - expression for indicating embedded comments in the test - string; pass None to disable comment filtering - - fullDump - (default= ``True``) - dump results as list followed by results names in nested outline; - if False, only dump nested list - - printResults - (default= ``True``) prints test output to stdout - - failureTests - (default= ``False``) indicates if these tests are expected to fail parsing - - postParse - (default= ``None``) optional callback for successful parse results; called as - `fn(test_string, parse_results)` and returns a string to be added to the test output - - Returns: a (success, results) tuple, where success indicates that all tests succeeded - (or failed if ``failureTests`` is True), and the results contain a list of lines of each - test's output - - Example:: - - number_expr = pyparsing_common.number.copy() - - result = number_expr.runTests(''' - # unsigned integer - 100 - # negative integer - -100 - # float with scientific notation - 6.02e23 - # integer with scientific notation - 1e-12 - ''') - print("Success" if result[0] else "Failed!") - - result = number_expr.runTests(''' - # stray character - 100Z - # missing leading digit before '.' - -.100 - # too many '.' - 3.14.159 - ''', failureTests=True) - print("Success" if result[0] else "Failed!") - - prints:: - - # unsigned integer - 100 - [100] - - # negative integer - -100 - [-100] - - # float with scientific notation - 6.02e23 - [6.02e+23] - - # integer with scientific notation - 1e-12 - [1e-12] - - Success - - # stray character - 100Z - ^ - FAIL: Expected end of text (at char 3), (line:1, col:4) - - # missing leading digit before '.' - -.100 - ^ - FAIL: Expected {real number with scientific notation | real number | signed integer} (at char 0), (line:1, col:1) - - # too many '.' - 3.14.159 - ^ - FAIL: Expected end of text (at char 4), (line:1, col:5) - - Success - - Each test string must be on a single line. If you want to test a string that spans multiple - lines, create a test like this:: - - expr.runTest(r"this is a test\\n of strings that spans \\n 3 lines") - - (Note that this is a raw string literal, you must include the leading 'r'.) - """ - if isinstance(tests, basestring): - tests = list(map(str.strip, tests.rstrip().splitlines())) - if isinstance(comment, basestring): - comment = Literal(comment) - allResults = [] - comments = [] - success = True - for t in tests: - if comment is not None and comment.matches(t, False) or comments and not t: - comments.append(t) - continue - if not t: - continue - out = ['\n'.join(comments), t] - comments = [] - try: - # convert newline marks to actual newlines, and strip leading BOM if present - NL = Literal(r'\n').addParseAction(replaceWith('\n')).ignore(quotedString) - BOM = '\ufeff' - t = NL.transformString(t.lstrip(BOM)) - result = self.parseString(t, parseAll=parseAll) - out.append(result.dump(full=fullDump)) - success = success and not failureTests - if postParse is not None: - try: - pp_value = postParse(t, result) - if pp_value is not None: - out.append(str(pp_value)) - except Exception as e: - out.append("{0} failed: {1}: {2}".format(postParse.__name__, type(e).__name__, e)) - except ParseBaseException as pe: - fatal = "(FATAL)" if isinstance(pe, ParseFatalException) else "" - if '\n' in t: - out.append(line(pe.loc, t)) - out.append(' '*(col(pe.loc,t)-1) + '^' + fatal) - else: - out.append(' '*pe.loc + '^' + fatal) - out.append("FAIL: " + str(pe)) - success = success and failureTests - result = pe - except Exception as exc: - out.append("FAIL-EXCEPTION: " + str(exc)) - success = success and failureTests - result = exc - - if printResults: - if fullDump: - out.append('') - print('\n'.join(out)) - - allResults.append((t, result)) - - return success, allResults - - -class Token(ParserElement): - """Abstract :class:`ParserElement` subclass, for defining atomic - matching patterns. - """ - def __init__( self ): - super(Token,self).__init__( savelist=False ) - - -class Empty(Token): - """An empty token, will always match. - """ - def __init__( self ): - super(Empty,self).__init__() - self.name = "Empty" - self.mayReturnEmpty = True - self.mayIndexError = False - - -class NoMatch(Token): - """A token that will never match. - """ - def __init__( self ): - super(NoMatch,self).__init__() - self.name = "NoMatch" - self.mayReturnEmpty = True - self.mayIndexError = False - self.errmsg = "Unmatchable token" - - def parseImpl( self, instring, loc, doActions=True ): - raise ParseException(instring, loc, self.errmsg, self) - - -class Literal(Token): - """Token to exactly match a specified string. - - Example:: - - Literal('blah').parseString('blah') # -> ['blah'] - Literal('blah').parseString('blahfooblah') # -> ['blah'] - Literal('blah').parseString('bla') # -> Exception: Expected "blah" - - For case-insensitive matching, use :class:`CaselessLiteral`. - - For keyword matching (force word break before and after the matched string), - use :class:`Keyword` or :class:`CaselessKeyword`. - """ - def __init__( self, matchString ): - super(Literal,self).__init__() - self.match = matchString - self.matchLen = len(matchString) - try: - self.firstMatchChar = matchString[0] - except IndexError: - warnings.warn("null string passed to Literal; use Empty() instead", - SyntaxWarning, stacklevel=2) - self.__class__ = Empty - self.name = '"%s"' % _ustr(self.match) - self.errmsg = "Expected " + self.name - self.mayReturnEmpty = False - self.mayIndexError = False - - # Performance tuning: this routine gets called a *lot* - # if this is a single character match string and the first character matches, - # short-circuit as quickly as possible, and avoid calling startswith - #~ @profile - def parseImpl( self, instring, loc, doActions=True ): - if (instring[loc] == self.firstMatchChar and - (self.matchLen==1 or instring.startswith(self.match,loc)) ): - return loc+self.matchLen, self.match - raise ParseException(instring, loc, self.errmsg, self) -_L = Literal -ParserElement._literalStringClass = Literal - -class Keyword(Token): - """Token to exactly match a specified string as a keyword, that is, - it must be immediately followed by a non-keyword character. Compare - with :class:`Literal`: - - - ``Literal("if")`` will match the leading ``'if'`` in - ``'ifAndOnlyIf'``. - - ``Keyword("if")`` will not; it will only match the leading - ``'if'`` in ``'if x=1'``, or ``'if(y==2)'`` - - Accepts two optional constructor arguments in addition to the - keyword string: - - - ``identChars`` is a string of characters that would be valid - identifier characters, defaulting to all alphanumerics + "_" and - "$" - - ``caseless`` allows case-insensitive matching, default is ``False``. - - Example:: - - Keyword("start").parseString("start") # -> ['start'] - Keyword("start").parseString("starting") # -> Exception - - For case-insensitive matching, use :class:`CaselessKeyword`. - """ - DEFAULT_KEYWORD_CHARS = alphanums+"_$" - - def __init__( self, matchString, identChars=None, caseless=False ): - super(Keyword,self).__init__() - if identChars is None: - identChars = Keyword.DEFAULT_KEYWORD_CHARS - self.match = matchString - self.matchLen = len(matchString) - try: - self.firstMatchChar = matchString[0] - except IndexError: - warnings.warn("null string passed to Keyword; use Empty() instead", - SyntaxWarning, stacklevel=2) - self.name = '"%s"' % self.match - self.errmsg = "Expected " + self.name - self.mayReturnEmpty = False - self.mayIndexError = False - self.caseless = caseless - if caseless: - self.caselessmatch = matchString.upper() - identChars = identChars.upper() - self.identChars = set(identChars) - - def parseImpl( self, instring, loc, doActions=True ): - if self.caseless: - if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and - (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) and - (loc == 0 or instring[loc-1].upper() not in self.identChars) ): - return loc+self.matchLen, self.match - else: - if (instring[loc] == self.firstMatchChar and - (self.matchLen==1 or instring.startswith(self.match,loc)) and - (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen] not in self.identChars) and - (loc == 0 or instring[loc-1] not in self.identChars) ): - return loc+self.matchLen, self.match - raise ParseException(instring, loc, self.errmsg, self) - - def copy(self): - c = super(Keyword,self).copy() - c.identChars = Keyword.DEFAULT_KEYWORD_CHARS - return c - - @staticmethod - def setDefaultKeywordChars( chars ): - """Overrides the default Keyword chars - """ - Keyword.DEFAULT_KEYWORD_CHARS = chars - -class CaselessLiteral(Literal): - """Token to match a specified string, ignoring case of letters. - Note: the matched results will always be in the case of the given - match string, NOT the case of the input text. - - Example:: - - OneOrMore(CaselessLiteral("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD', 'CMD'] - - (Contrast with example for :class:`CaselessKeyword`.) - """ - def __init__( self, matchString ): - super(CaselessLiteral,self).__init__( matchString.upper() ) - # Preserve the defining literal. - self.returnString = matchString - self.name = "'%s'" % self.returnString - self.errmsg = "Expected " + self.name - - def parseImpl( self, instring, loc, doActions=True ): - if instring[ loc:loc+self.matchLen ].upper() == self.match: - return loc+self.matchLen, self.returnString - raise ParseException(instring, loc, self.errmsg, self) - -class CaselessKeyword(Keyword): - """ - Caseless version of :class:`Keyword`. - - Example:: - - OneOrMore(CaselessKeyword("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD'] - - (Contrast with example for :class:`CaselessLiteral`.) - """ - def __init__( self, matchString, identChars=None ): - super(CaselessKeyword,self).__init__( matchString, identChars, caseless=True ) - -class CloseMatch(Token): - """A variation on :class:`Literal` which matches "close" matches, - that is, strings with at most 'n' mismatching characters. - :class:`CloseMatch` takes parameters: - - - ``match_string`` - string to be matched - - ``maxMismatches`` - (``default=1``) maximum number of - mismatches allowed to count as a match - - The results from a successful parse will contain the matched text - from the input string and the following named results: - - - ``mismatches`` - a list of the positions within the - match_string where mismatches were found - - ``original`` - the original match_string used to compare - against the input string - - If ``mismatches`` is an empty list, then the match was an exact - match. - - Example:: - - patt = CloseMatch("ATCATCGAATGGA") - patt.parseString("ATCATCGAAXGGA") # -> (['ATCATCGAAXGGA'], {'mismatches': [[9]], 'original': ['ATCATCGAATGGA']}) - patt.parseString("ATCAXCGAAXGGA") # -> Exception: Expected 'ATCATCGAATGGA' (with up to 1 mismatches) (at char 0), (line:1, col:1) - - # exact match - patt.parseString("ATCATCGAATGGA") # -> (['ATCATCGAATGGA'], {'mismatches': [[]], 'original': ['ATCATCGAATGGA']}) - - # close match allowing up to 2 mismatches - patt = CloseMatch("ATCATCGAATGGA", maxMismatches=2) - patt.parseString("ATCAXCGAAXGGA") # -> (['ATCAXCGAAXGGA'], {'mismatches': [[4, 9]], 'original': ['ATCATCGAATGGA']}) - """ - def __init__(self, match_string, maxMismatches=1): - super(CloseMatch,self).__init__() - self.name = match_string - self.match_string = match_string - self.maxMismatches = maxMismatches - self.errmsg = "Expected %r (with up to %d mismatches)" % (self.match_string, self.maxMismatches) - self.mayIndexError = False - self.mayReturnEmpty = False - - def parseImpl( self, instring, loc, doActions=True ): - start = loc - instrlen = len(instring) - maxloc = start + len(self.match_string) - - if maxloc <= instrlen: - match_string = self.match_string - match_stringloc = 0 - mismatches = [] - maxMismatches = self.maxMismatches - - for match_stringloc,s_m in enumerate(zip(instring[loc:maxloc], self.match_string)): - src,mat = s_m - if src != mat: - mismatches.append(match_stringloc) - if len(mismatches) > maxMismatches: - break - else: - loc = match_stringloc + 1 - results = ParseResults([instring[start:loc]]) - results['original'] = self.match_string - results['mismatches'] = mismatches - return loc, results - - raise ParseException(instring, loc, self.errmsg, self) - - -class Word(Token): - """Token for matching words composed of allowed character sets. - Defined with string containing all allowed initial characters, an - optional string containing allowed body characters (if omitted, - defaults to the initial character set), and an optional minimum, - maximum, and/or exact length. The default value for ``min`` is - 1 (a minimum value < 1 is not valid); the default values for - ``max`` and ``exact`` are 0, meaning no maximum or exact - length restriction. An optional ``excludeChars`` parameter can - list characters that might be found in the input ``bodyChars`` - string; useful to define a word of all printables except for one or - two characters, for instance. - - :class:`srange` is useful for defining custom character set strings - for defining ``Word`` expressions, using range notation from - regular expression character sets. - - A common mistake is to use :class:`Word` to match a specific literal - string, as in ``Word("Address")``. Remember that :class:`Word` - uses the string argument to define *sets* of matchable characters. - This expression would match "Add", "AAA", "dAred", or any other word - made up of the characters 'A', 'd', 'r', 'e', and 's'. To match an - exact literal string, use :class:`Literal` or :class:`Keyword`. - - pyparsing includes helper strings for building Words: - - - :class:`alphas` - - :class:`nums` - - :class:`alphanums` - - :class:`hexnums` - - :class:`alphas8bit` (alphabetic characters in ASCII range 128-255 - - accented, tilded, umlauted, etc.) - - :class:`punc8bit` (non-alphabetic characters in ASCII range - 128-255 - currency, symbols, superscripts, diacriticals, etc.) - - :class:`printables` (any non-whitespace character) - - Example:: - - # a word composed of digits - integer = Word(nums) # equivalent to Word("0123456789") or Word(srange("0-9")) - - # a word with a leading capital, and zero or more lowercase - capital_word = Word(alphas.upper(), alphas.lower()) - - # hostnames are alphanumeric, with leading alpha, and '-' - hostname = Word(alphas, alphanums+'-') - - # roman numeral (not a strict parser, accepts invalid mix of characters) - roman = Word("IVXLCDM") - - # any string of non-whitespace characters, except for ',' - csv_value = Word(printables, excludeChars=",") - """ - def __init__( self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False, excludeChars=None ): - super(Word,self).__init__() - if excludeChars: - excludeChars = set(excludeChars) - initChars = ''.join(c for c in initChars if c not in excludeChars) - if bodyChars: - bodyChars = ''.join(c for c in bodyChars if c not in excludeChars) - self.initCharsOrig = initChars - self.initChars = set(initChars) - if bodyChars : - self.bodyCharsOrig = bodyChars - self.bodyChars = set(bodyChars) - else: - self.bodyCharsOrig = initChars - self.bodyChars = set(initChars) - - self.maxSpecified = max > 0 - - if min < 1: - raise ValueError("cannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permitted") - - self.minLen = min - - if max > 0: - self.maxLen = max - else: - self.maxLen = _MAX_INT - - if exact > 0: - self.maxLen = exact - self.minLen = exact - - self.name = _ustr(self) - self.errmsg = "Expected " + self.name - self.mayIndexError = False - self.asKeyword = asKeyword - - if ' ' not in self.initCharsOrig+self.bodyCharsOrig and (min==1 and max==0 and exact==0): - if self.bodyCharsOrig == self.initCharsOrig: - self.reString = "[%s]+" % _escapeRegexRangeChars(self.initCharsOrig) - elif len(self.initCharsOrig) == 1: - self.reString = "%s[%s]*" % \ - (re.escape(self.initCharsOrig), - _escapeRegexRangeChars(self.bodyCharsOrig),) - else: - self.reString = "[%s][%s]*" % \ - (_escapeRegexRangeChars(self.initCharsOrig), - _escapeRegexRangeChars(self.bodyCharsOrig),) - if self.asKeyword: - self.reString = r"\b"+self.reString+r"\b" - try: - self.re = re.compile( self.reString ) - except Exception: - self.re = None - - def parseImpl( self, instring, loc, doActions=True ): - if self.re: - result = self.re.match(instring,loc) - if not result: - raise ParseException(instring, loc, self.errmsg, self) - - loc = result.end() - return loc, result.group() - - if instring[loc] not in self.initChars: - raise ParseException(instring, loc, self.errmsg, self) - - start = loc - loc += 1 - instrlen = len(instring) - bodychars = self.bodyChars - maxloc = start + self.maxLen - maxloc = min( maxloc, instrlen ) - while loc < maxloc and instring[loc] in bodychars: - loc += 1 - - throwException = False - if loc - start < self.minLen: - throwException = True - elif self.maxSpecified and loc < instrlen and instring[loc] in bodychars: - throwException = True - elif self.asKeyword: - if (start>0 and instring[start-1] in bodychars) or (loc<instrlen and instring[loc] in bodychars): - throwException = True - - if throwException: - raise ParseException(instring, loc, self.errmsg, self) - - return loc, instring[start:loc] - - def __str__( self ): - try: - return super(Word,self).__str__() - except Exception: - pass - - - if self.strRepr is None: - - def charsAsStr(s): - if len(s)>4: - return s[:4]+"..." - else: - return s - - if ( self.initCharsOrig != self.bodyCharsOrig ): - self.strRepr = "W:(%s,%s)" % ( charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig) ) - else: - self.strRepr = "W:(%s)" % charsAsStr(self.initCharsOrig) - - return self.strRepr - - -class Char(Word): - """A short-cut class for defining ``Word(characters, exact=1)``, - when defining a match of any single character in a string of - characters. - """ - def __init__(self, charset, asKeyword=False, excludeChars=None): - super(Char, self).__init__(charset, exact=1, asKeyword=asKeyword, excludeChars=excludeChars) - self.reString = "[%s]" % _escapeRegexRangeChars(self.initCharsOrig) - self.re = re.compile( self.reString ) - - -class Regex(Token): - r"""Token for matching strings that match a given regular - expression. Defined with string specifying the regular expression in - a form recognized by the stdlib Python `re module <https://docs.python.org/3/library/re.html>`_. - If the given regex contains named groups (defined using ``(?P<name>...)``), - these will be preserved as named parse results. - - Example:: - - realnum = Regex(r"[+-]?\d+\.\d*") - date = Regex(r'(?P<year>\d{4})-(?P<month>\d\d?)-(?P<day>\d\d?)') - # ref: https://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expression - roman = Regex(r"M{0,4}(CM|CD|D?{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})") - """ - compiledREtype = type(re.compile("[A-Z]")) - def __init__( self, pattern, flags=0, asGroupList=False, asMatch=False): - """The parameters ``pattern`` and ``flags`` are passed - to the ``re.compile()`` function as-is. See the Python - `re module <https://docs.python.org/3/library/re.html>`_ module for an - explanation of the acceptable patterns and flags. - """ - super(Regex,self).__init__() - - if isinstance(pattern, basestring): - if not pattern: - warnings.warn("null string passed to Regex; use Empty() instead", - SyntaxWarning, stacklevel=2) - - self.pattern = pattern - self.flags = flags - - try: - self.re = re.compile(self.pattern, self.flags) - self.reString = self.pattern - except sre_constants.error: - warnings.warn("invalid pattern (%s) passed to Regex" % pattern, - SyntaxWarning, stacklevel=2) - raise - - elif isinstance(pattern, Regex.compiledREtype): - self.re = pattern - self.pattern = \ - self.reString = str(pattern) - self.flags = flags - - else: - raise ValueError("Regex may only be constructed with a string or a compiled RE object") - - self.name = _ustr(self) - self.errmsg = "Expected " + self.name - self.mayIndexError = False - self.mayReturnEmpty = True - self.asGroupList = asGroupList - self.asMatch = asMatch - if self.asGroupList: - self.parseImpl = self.parseImplAsGroupList - if self.asMatch: - self.parseImpl = self.parseImplAsMatch - - def parseImpl(self, instring, loc, doActions=True): - result = self.re.match(instring,loc) - if not result: - raise ParseException(instring, loc, self.errmsg, self) - - loc = result.end() - ret = ParseResults(result.group()) - d = result.groupdict() - if d: - for k, v in d.items(): - ret[k] = v - return loc, ret - - def parseImplAsGroupList(self, instring, loc, doActions=True): - result = self.re.match(instring,loc) - if not result: - raise ParseException(instring, loc, self.errmsg, self) - - loc = result.end() - ret = result.groups() - return loc, ret - - def parseImplAsMatch(self, instring, loc, doActions=True): - result = self.re.match(instring,loc) - if not result: - raise ParseException(instring, loc, self.errmsg, self) - - loc = result.end() - ret = result - return loc, ret - - def __str__( self ): - try: - return super(Regex,self).__str__() - except Exception: - pass - - if self.strRepr is None: - self.strRepr = "Re:(%s)" % repr(self.pattern) - - return self.strRepr - - def sub(self, repl): - r""" - Return Regex with an attached parse action to transform the parsed - result as if called using `re.sub(expr, repl, string) <https://docs.python.org/3/library/re.html#re.sub>`_. - - Example:: - - make_html = Regex(r"(\w+):(.*?):").sub(r"<\1>\2</\1>") - print(make_html.transformString("h1:main title:")) - # prints "<h1>main title</h1>" - """ - if self.asGroupList: - warnings.warn("cannot use sub() with Regex(asGroupList=True)", - SyntaxWarning, stacklevel=2) - raise SyntaxError() - - if self.asMatch and callable(repl): - warnings.warn("cannot use sub() with a callable with Regex(asMatch=True)", - SyntaxWarning, stacklevel=2) - raise SyntaxError() - - if self.asMatch: - def pa(tokens): - return tokens[0].expand(repl) - else: - def pa(tokens): - return self.re.sub(repl, tokens[0]) - return self.addParseAction(pa) - -class QuotedString(Token): - r""" - Token for matching strings that are delimited by quoting characters. - - Defined with the following parameters: - - - quoteChar - string of one or more characters defining the - quote delimiting string - - escChar - character to escape quotes, typically backslash - (default= ``None`` ) - - escQuote - special quote sequence to escape an embedded quote - string (such as SQL's ``""`` to escape an embedded ``"``) - (default= ``None`` ) - - multiline - boolean indicating whether quotes can span - multiple lines (default= ``False`` ) - - unquoteResults - boolean indicating whether the matched text - should be unquoted (default= ``True`` ) - - endQuoteChar - string of one or more characters defining the - end of the quote delimited string (default= ``None`` => same as - quoteChar) - - convertWhitespaceEscapes - convert escaped whitespace - (``'\t'``, ``'\n'``, etc.) to actual whitespace - (default= ``True`` ) - - Example:: - - qs = QuotedString('"') - print(qs.searchString('lsjdf "This is the quote" sldjf')) - complex_qs = QuotedString('{{', endQuoteChar='}}') - print(complex_qs.searchString('lsjdf {{This is the "quote"}} sldjf')) - sql_qs = QuotedString('"', escQuote='""') - print(sql_qs.searchString('lsjdf "This is the quote with ""embedded"" quotes" sldjf')) - - prints:: - - [['This is the quote']] - [['This is the "quote"']] - [['This is the quote with "embedded" quotes']] - """ - def __init__( self, quoteChar, escChar=None, escQuote=None, multiline=False, unquoteResults=True, endQuoteChar=None, convertWhitespaceEscapes=True): - super(QuotedString,self).__init__() - - # remove white space from quote chars - wont work anyway - quoteChar = quoteChar.strip() - if not quoteChar: - warnings.warn("quoteChar cannot be the empty string",SyntaxWarning,stacklevel=2) - raise SyntaxError() - - if endQuoteChar is None: - endQuoteChar = quoteChar - else: - endQuoteChar = endQuoteChar.strip() - if not endQuoteChar: - warnings.warn("endQuoteChar cannot be the empty string",SyntaxWarning,stacklevel=2) - raise SyntaxError() - - self.quoteChar = quoteChar - self.quoteCharLen = len(quoteChar) - self.firstQuoteChar = quoteChar[0] - self.endQuoteChar = endQuoteChar - self.endQuoteCharLen = len(endQuoteChar) - self.escChar = escChar - self.escQuote = escQuote - self.unquoteResults = unquoteResults - self.convertWhitespaceEscapes = convertWhitespaceEscapes - - if multiline: - self.flags = re.MULTILINE | re.DOTALL - self.pattern = r'%s(?:[^%s%s]' % \ - ( re.escape(self.quoteChar), - _escapeRegexRangeChars(self.endQuoteChar[0]), - (escChar is not None and _escapeRegexRangeChars(escChar) or '') ) - else: - self.flags = 0 - self.pattern = r'%s(?:[^%s\n\r%s]' % \ - ( re.escape(self.quoteChar), - _escapeRegexRangeChars(self.endQuoteChar[0]), - (escChar is not None and _escapeRegexRangeChars(escChar) or '') ) - if len(self.endQuoteChar) > 1: - self.pattern += ( - '|(?:' + ')|(?:'.join("%s[^%s]" % (re.escape(self.endQuoteChar[:i]), - _escapeRegexRangeChars(self.endQuoteChar[i])) - for i in range(len(self.endQuoteChar)-1,0,-1)) + ')' - ) - if escQuote: - self.pattern += (r'|(?:%s)' % re.escape(escQuote)) - if escChar: - self.pattern += (r'|(?:%s.)' % re.escape(escChar)) - self.escCharReplacePattern = re.escape(self.escChar)+"(.)" - self.pattern += (r')*%s' % re.escape(self.endQuoteChar)) - - try: - self.re = re.compile(self.pattern, self.flags) - self.reString = self.pattern - except sre_constants.error: - warnings.warn("invalid pattern (%s) passed to Regex" % self.pattern, - SyntaxWarning, stacklevel=2) - raise - - self.name = _ustr(self) - self.errmsg = "Expected " + self.name - self.mayIndexError = False - self.mayReturnEmpty = True - - def parseImpl( self, instring, loc, doActions=True ): - result = instring[loc] == self.firstQuoteChar and self.re.match(instring,loc) or None - if not result: - raise ParseException(instring, loc, self.errmsg, self) - - loc = result.end() - ret = result.group() - - if self.unquoteResults: - - # strip off quotes - ret = ret[self.quoteCharLen:-self.endQuoteCharLen] - - if isinstance(ret,basestring): - # replace escaped whitespace - if '\\' in ret and self.convertWhitespaceEscapes: - ws_map = { - r'\t' : '\t', - r'\n' : '\n', - r'\f' : '\f', - r'\r' : '\r', - } - for wslit,wschar in ws_map.items(): - ret = ret.replace(wslit, wschar) - - # replace escaped characters - if self.escChar: - ret = re.sub(self.escCharReplacePattern, r"\g<1>", ret) - - # replace escaped quotes - if self.escQuote: - ret = ret.replace(self.escQuote, self.endQuoteChar) - - return loc, ret - - def __str__( self ): - try: - return super(QuotedString,self).__str__() - except Exception: - pass - - if self.strRepr is None: - self.strRepr = "quoted string, starting with %s ending with %s" % (self.quoteChar, self.endQuoteChar) - - return self.strRepr - - -class CharsNotIn(Token): - """Token for matching words composed of characters *not* in a given - set (will include whitespace in matched characters if not listed in - the provided exclusion set - see example). Defined with string - containing all disallowed characters, and an optional minimum, - maximum, and/or exact length. The default value for ``min`` is - 1 (a minimum value < 1 is not valid); the default values for - ``max`` and ``exact`` are 0, meaning no maximum or exact - length restriction. - - Example:: - - # define a comma-separated-value as anything that is not a ',' - csv_value = CharsNotIn(',') - print(delimitedList(csv_value).parseString("dkls,lsdkjf,s12 34,@!#,213")) - - prints:: - - ['dkls', 'lsdkjf', 's12 34', '@!#', '213'] - """ - def __init__( self, notChars, min=1, max=0, exact=0 ): - super(CharsNotIn,self).__init__() - self.skipWhitespace = False - self.notChars = notChars - - if min < 1: - raise ValueError( - "cannot specify a minimum length < 1; use " + - "Optional(CharsNotIn()) if zero-length char group is permitted") - - self.minLen = min - - if max > 0: - self.maxLen = max - else: - self.maxLen = _MAX_INT - - if exact > 0: - self.maxLen = exact - self.minLen = exact - - self.name = _ustr(self) - self.errmsg = "Expected " + self.name - self.mayReturnEmpty = ( self.minLen == 0 ) - self.mayIndexError = False - - def parseImpl( self, instring, loc, doActions=True ): - if instring[loc] in self.notChars: - raise ParseException(instring, loc, self.errmsg, self) - - start = loc - loc += 1 - notchars = self.notChars - maxlen = min( start+self.maxLen, len(instring) ) - while loc < maxlen and \ - (instring[loc] not in notchars): - loc += 1 - - if loc - start < self.minLen: - raise ParseException(instring, loc, self.errmsg, self) - - return loc, instring[start:loc] - - def __str__( self ): - try: - return super(CharsNotIn, self).__str__() - except Exception: - pass - - if self.strRepr is None: - if len(self.notChars) > 4: - self.strRepr = "!W:(%s...)" % self.notChars[:4] - else: - self.strRepr = "!W:(%s)" % self.notChars - - return self.strRepr - -class White(Token): - """Special matching class for matching whitespace. Normally, - whitespace is ignored by pyparsing grammars. This class is included - when some whitespace structures are significant. Define with - a string containing the whitespace characters to be matched; default - is ``" \\t\\r\\n"``. Also takes optional ``min``, - ``max``, and ``exact`` arguments, as defined for the - :class:`Word` class. - """ - whiteStrs = { - ' ' : '<SP>', - '\t': '<TAB>', - '\n': '<LF>', - '\r': '<CR>', - '\f': '<FF>', - 'u\00A0': '<NBSP>', - 'u\1680': '<OGHAM_SPACE_MARK>', - 'u\180E': '<MONGOLIAN_VOWEL_SEPARATOR>', - 'u\2000': '<EN_QUAD>', - 'u\2001': '<EM_QUAD>', - 'u\2002': '<EN_SPACE>', - 'u\2003': '<EM_SPACE>', - 'u\2004': '<THREE-PER-EM_SPACE>', - 'u\2005': '<FOUR-PER-EM_SPACE>', - 'u\2006': '<SIX-PER-EM_SPACE>', - 'u\2007': '<FIGURE_SPACE>', - 'u\2008': '<PUNCTUATION_SPACE>', - 'u\2009': '<THIN_SPACE>', - 'u\200A': '<HAIR_SPACE>', - 'u\200B': '<ZERO_WIDTH_SPACE>', - 'u\202F': '<NNBSP>', - 'u\205F': '<MMSP>', - 'u\3000': '<IDEOGRAPHIC_SPACE>', - } - def __init__(self, ws=" \t\r\n", min=1, max=0, exact=0): - super(White,self).__init__() - self.matchWhite = ws - self.setWhitespaceChars( "".join(c for c in self.whiteChars if c not in self.matchWhite) ) - #~ self.leaveWhitespace() - self.name = ("".join(White.whiteStrs[c] for c in self.matchWhite)) - self.mayReturnEmpty = True - self.errmsg = "Expected " + self.name - - self.minLen = min - - if max > 0: - self.maxLen = max - else: - self.maxLen = _MAX_INT - - if exact > 0: - self.maxLen = exact - self.minLen = exact - - def parseImpl( self, instring, loc, doActions=True ): - if instring[loc] not in self.matchWhite: - raise ParseException(instring, loc, self.errmsg, self) - start = loc - loc += 1 - maxloc = start + self.maxLen - maxloc = min( maxloc, len(instring) ) - while loc < maxloc and instring[loc] in self.matchWhite: - loc += 1 - - if loc - start < self.minLen: - raise ParseException(instring, loc, self.errmsg, self) - - return loc, instring[start:loc] - - -class _PositionToken(Token): - def __init__( self ): - super(_PositionToken,self).__init__() - self.name=self.__class__.__name__ - self.mayReturnEmpty = True - self.mayIndexError = False - -class GoToColumn(_PositionToken): - """Token to advance to a specific column of input text; useful for - tabular report scraping. - """ - def __init__( self, colno ): - super(GoToColumn,self).__init__() - self.col = colno - - def preParse( self, instring, loc ): - if col(loc,instring) != self.col: - instrlen = len(instring) - if self.ignoreExprs: - loc = self._skipIgnorables( instring, loc ) - while loc < instrlen and instring[loc].isspace() and col( loc, instring ) != self.col : - loc += 1 - return loc - - def parseImpl( self, instring, loc, doActions=True ): - thiscol = col( loc, instring ) - if thiscol > self.col: - raise ParseException( instring, loc, "Text not in expected column", self ) - newloc = loc + self.col - thiscol - ret = instring[ loc: newloc ] - return newloc, ret - - -class LineStart(_PositionToken): - r"""Matches if current position is at the beginning of a line within - the parse string - - Example:: - - test = '''\ - AAA this line - AAA and this line - AAA but not this one - B AAA and definitely not this one - ''' - - for t in (LineStart() + 'AAA' + restOfLine).searchString(test): - print(t) - - prints:: - - ['AAA', ' this line'] - ['AAA', ' and this line'] - - """ - def __init__( self ): - super(LineStart,self).__init__() - self.errmsg = "Expected start of line" - - def parseImpl( self, instring, loc, doActions=True ): - if col(loc, instring) == 1: - return loc, [] - raise ParseException(instring, loc, self.errmsg, self) - -class LineEnd(_PositionToken): - """Matches if current position is at the end of a line within the - parse string - """ - def __init__( self ): - super(LineEnd,self).__init__() - self.setWhitespaceChars( ParserElement.DEFAULT_WHITE_CHARS.replace("\n","") ) - self.errmsg = "Expected end of line" - - def parseImpl( self, instring, loc, doActions=True ): - if loc<len(instring): - if instring[loc] == "\n": - return loc+1, "\n" - else: - raise ParseException(instring, loc, self.errmsg, self) - elif loc == len(instring): - return loc+1, [] - else: - raise ParseException(instring, loc, self.errmsg, self) - -class StringStart(_PositionToken): - """Matches if current position is at the beginning of the parse - string - """ - def __init__( self ): - super(StringStart,self).__init__() - self.errmsg = "Expected start of text" - - def parseImpl( self, instring, loc, doActions=True ): - if loc != 0: - # see if entire string up to here is just whitespace and ignoreables - if loc != self.preParse( instring, 0 ): - raise ParseException(instring, loc, self.errmsg, self) - return loc, [] - -class StringEnd(_PositionToken): - """Matches if current position is at the end of the parse string - """ - def __init__( self ): - super(StringEnd,self).__init__() - self.errmsg = "Expected end of text" - - def parseImpl( self, instring, loc, doActions=True ): - if loc < len(instring): - raise ParseException(instring, loc, self.errmsg, self) - elif loc == len(instring): - return loc+1, [] - elif loc > len(instring): - return loc, [] - else: - raise ParseException(instring, loc, self.errmsg, self) - -class WordStart(_PositionToken): - """Matches if the current position is at the beginning of a Word, - and is not preceded by any character in a given set of - ``wordChars`` (default= ``printables``). To emulate the - ``\b`` behavior of regular expressions, use - ``WordStart(alphanums)``. ``WordStart`` will also match at - the beginning of the string being parsed, or at the beginning of - a line. - """ - def __init__(self, wordChars = printables): - super(WordStart,self).__init__() - self.wordChars = set(wordChars) - self.errmsg = "Not at the start of a word" - - def parseImpl(self, instring, loc, doActions=True ): - if loc != 0: - if (instring[loc-1] in self.wordChars or - instring[loc] not in self.wordChars): - raise ParseException(instring, loc, self.errmsg, self) - return loc, [] - -class WordEnd(_PositionToken): - """Matches if the current position is at the end of a Word, and is - not followed by any character in a given set of ``wordChars`` - (default= ``printables``). To emulate the ``\b`` behavior of - regular expressions, use ``WordEnd(alphanums)``. ``WordEnd`` - will also match at the end of the string being parsed, or at the end - of a line. - """ - def __init__(self, wordChars = printables): - super(WordEnd,self).__init__() - self.wordChars = set(wordChars) - self.skipWhitespace = False - self.errmsg = "Not at the end of a word" - - def parseImpl(self, instring, loc, doActions=True ): - instrlen = len(instring) - if instrlen>0 and loc<instrlen: - if (instring[loc] in self.wordChars or - instring[loc-1] not in self.wordChars): - raise ParseException(instring, loc, self.errmsg, self) - return loc, [] - - -class ParseExpression(ParserElement): - """Abstract subclass of ParserElement, for combining and - post-processing parsed tokens. - """ - def __init__( self, exprs, savelist = False ): - super(ParseExpression,self).__init__(savelist) - if isinstance( exprs, _generatorType ): - exprs = list(exprs) - - if isinstance( exprs, basestring ): - self.exprs = [ ParserElement._literalStringClass( exprs ) ] - elif isinstance( exprs, Iterable ): - exprs = list(exprs) - # if sequence of strings provided, wrap with Literal - if all(isinstance(expr, basestring) for expr in exprs): - exprs = map(ParserElement._literalStringClass, exprs) - self.exprs = list(exprs) - else: - try: - self.exprs = list( exprs ) - except TypeError: - self.exprs = [ exprs ] - self.callPreparse = False - - def __getitem__( self, i ): - return self.exprs[i] - - def append( self, other ): - self.exprs.append( other ) - self.strRepr = None - return self - - def leaveWhitespace( self ): - """Extends ``leaveWhitespace`` defined in base class, and also invokes ``leaveWhitespace`` on - all contained expressions.""" - self.skipWhitespace = False - self.exprs = [ e.copy() for e in self.exprs ] - for e in self.exprs: - e.leaveWhitespace() - return self - - def ignore( self, other ): - if isinstance( other, Suppress ): - if other not in self.ignoreExprs: - super( ParseExpression, self).ignore( other ) - for e in self.exprs: - e.ignore( self.ignoreExprs[-1] ) - else: - super( ParseExpression, self).ignore( other ) - for e in self.exprs: - e.ignore( self.ignoreExprs[-1] ) - return self - - def __str__( self ): - try: - return super(ParseExpression,self).__str__() - except Exception: - pass - - if self.strRepr is None: - self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.exprs) ) - return self.strRepr - - def streamline( self ): - super(ParseExpression,self).streamline() - - for e in self.exprs: - e.streamline() - - # collapse nested And's of the form And( And( And( a,b), c), d) to And( a,b,c,d ) - # but only if there are no parse actions or resultsNames on the nested And's - # (likewise for Or's and MatchFirst's) - if ( len(self.exprs) == 2 ): - other = self.exprs[0] - if ( isinstance( other, self.__class__ ) and - not(other.parseAction) and - other.resultsName is None and - not other.debug ): - self.exprs = other.exprs[:] + [ self.exprs[1] ] - self.strRepr = None - self.mayReturnEmpty |= other.mayReturnEmpty - self.mayIndexError |= other.mayIndexError - - other = self.exprs[-1] - if ( isinstance( other, self.__class__ ) and - not(other.parseAction) and - other.resultsName is None and - not other.debug ): - self.exprs = self.exprs[:-1] + other.exprs[:] - self.strRepr = None - self.mayReturnEmpty |= other.mayReturnEmpty - self.mayIndexError |= other.mayIndexError - - self.errmsg = "Expected " + _ustr(self) - - return self - - def validate( self, validateTrace=[] ): - tmp = validateTrace[:]+[self] - for e in self.exprs: - e.validate(tmp) - self.checkRecursion( [] ) - - def copy(self): - ret = super(ParseExpression,self).copy() - ret.exprs = [e.copy() for e in self.exprs] - return ret - -class And(ParseExpression): - """ - Requires all given :class:`ParseExpression` s to be found in the given order. - Expressions may be separated by whitespace. - May be constructed using the ``'+'`` operator. - May also be constructed using the ``'-'`` operator, which will - suppress backtracking. - - Example:: - - integer = Word(nums) - name_expr = OneOrMore(Word(alphas)) - - expr = And([integer("id"),name_expr("name"),integer("age")]) - # more easily written as: - expr = integer("id") + name_expr("name") + integer("age") - """ - - class _ErrorStop(Empty): - def __init__(self, *args, **kwargs): - super(And._ErrorStop,self).__init__(*args, **kwargs) - self.name = '-' - self.leaveWhitespace() - - def __init__( self, exprs, savelist = True ): - super(And,self).__init__(exprs, savelist) - self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) - self.setWhitespaceChars( self.exprs[0].whiteChars ) - self.skipWhitespace = self.exprs[0].skipWhitespace - self.callPreparse = True - - def streamline(self): - super(And, self).streamline() - self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) - return self - - def parseImpl( self, instring, loc, doActions=True ): - # pass False as last arg to _parse for first element, since we already - # pre-parsed the string as part of our And pre-parsing - loc, resultlist = self.exprs[0]._parse( instring, loc, doActions, callPreParse=False ) - errorStop = False - for e in self.exprs[1:]: - if isinstance(e, And._ErrorStop): - errorStop = True - continue - if errorStop: - try: - loc, exprtokens = e._parse( instring, loc, doActions ) - except ParseSyntaxException: - raise - except ParseBaseException as pe: - pe.__traceback__ = None - raise ParseSyntaxException._from_exception(pe) - except IndexError: - raise ParseSyntaxException(instring, len(instring), self.errmsg, self) - else: - loc, exprtokens = e._parse( instring, loc, doActions ) - if exprtokens or exprtokens.haskeys(): - resultlist += exprtokens - return loc, resultlist - - def __iadd__(self, other ): - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - return self.append( other ) #And( [ self, other ] ) - - def checkRecursion( self, parseElementList ): - subRecCheckList = parseElementList[:] + [ self ] - for e in self.exprs: - e.checkRecursion( subRecCheckList ) - if not e.mayReturnEmpty: - break - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "{" + " ".join(_ustr(e) for e in self.exprs) + "}" - - return self.strRepr - - -class Or(ParseExpression): - """Requires that at least one :class:`ParseExpression` is found. If - two expressions match, the expression that matches the longest - string will be used. May be constructed using the ``'^'`` - operator. - - Example:: - - # construct Or using '^' operator - - number = Word(nums) ^ Combine(Word(nums) + '.' + Word(nums)) - print(number.searchString("123 3.1416 789")) - - prints:: - - [['123'], ['3.1416'], ['789']] - """ - def __init__( self, exprs, savelist = False ): - super(Or,self).__init__(exprs, savelist) - if self.exprs: - self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs) - else: - self.mayReturnEmpty = True - - def streamline(self): - super(Or, self).streamline() - if __compat__.collect_all_And_tokens: - self.saveAsList = any(e.saveAsList for e in self.exprs) - return self - - def parseImpl( self, instring, loc, doActions=True ): - maxExcLoc = -1 - maxException = None - matches = [] - for e in self.exprs: - try: - loc2 = e.tryParse( instring, loc ) - except ParseException as err: - err.__traceback__ = None - if err.loc > maxExcLoc: - maxException = err - maxExcLoc = err.loc - except IndexError: - if len(instring) > maxExcLoc: - maxException = ParseException(instring,len(instring),e.errmsg,self) - maxExcLoc = len(instring) - else: - # save match among all matches, to retry longest to shortest - matches.append((loc2, e)) - - if matches: - matches.sort(key=lambda x: -x[0]) - for _,e in matches: - try: - return e._parse( instring, loc, doActions ) - except ParseException as err: - err.__traceback__ = None - if err.loc > maxExcLoc: - maxException = err - maxExcLoc = err.loc - - if maxException is not None: - maxException.msg = self.errmsg - raise maxException - else: - raise ParseException(instring, loc, "no defined alternatives to match", self) - - - def __ixor__(self, other ): - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - return self.append( other ) #Or( [ self, other ] ) - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "{" + " ^ ".join(_ustr(e) for e in self.exprs) + "}" - - return self.strRepr - - def checkRecursion( self, parseElementList ): - subRecCheckList = parseElementList[:] + [ self ] - for e in self.exprs: - e.checkRecursion( subRecCheckList ) - - -class MatchFirst(ParseExpression): - """Requires that at least one :class:`ParseExpression` is found. If - two expressions match, the first one listed is the one that will - match. May be constructed using the ``'|'`` operator. - - Example:: - - # construct MatchFirst using '|' operator - - # watch the order of expressions to match - number = Word(nums) | Combine(Word(nums) + '.' + Word(nums)) - print(number.searchString("123 3.1416 789")) # Fail! -> [['123'], ['3'], ['1416'], ['789']] - - # put more selective expression first - number = Combine(Word(nums) + '.' + Word(nums)) | Word(nums) - print(number.searchString("123 3.1416 789")) # Better -> [['123'], ['3.1416'], ['789']] - """ - def __init__( self, exprs, savelist = False ): - super(MatchFirst,self).__init__(exprs, savelist) - if self.exprs: - self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs) - else: - self.mayReturnEmpty = True - - def streamline(self): - super(MatchFirst, self).streamline() - if __compat__.collect_all_And_tokens: - self.saveAsList = any(e.saveAsList for e in self.exprs) - return self - - def parseImpl( self, instring, loc, doActions=True ): - maxExcLoc = -1 - maxException = None - for e in self.exprs: - try: - ret = e._parse( instring, loc, doActions ) - return ret - except ParseException as err: - if err.loc > maxExcLoc: - maxException = err - maxExcLoc = err.loc - except IndexError: - if len(instring) > maxExcLoc: - maxException = ParseException(instring,len(instring),e.errmsg,self) - maxExcLoc = len(instring) - - # only got here if no expression matched, raise exception for match that made it the furthest - else: - if maxException is not None: - maxException.msg = self.errmsg - raise maxException - else: - raise ParseException(instring, loc, "no defined alternatives to match", self) - - def __ior__(self, other ): - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - return self.append( other ) #MatchFirst( [ self, other ] ) - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "{" + " | ".join(_ustr(e) for e in self.exprs) + "}" - - return self.strRepr - - def checkRecursion( self, parseElementList ): - subRecCheckList = parseElementList[:] + [ self ] - for e in self.exprs: - e.checkRecursion( subRecCheckList ) - - -class Each(ParseExpression): - """Requires all given :class:`ParseExpression` s to be found, but in - any order. Expressions may be separated by whitespace. - - May be constructed using the ``'&'`` operator. - - Example:: - - color = oneOf("RED ORANGE YELLOW GREEN BLUE PURPLE BLACK WHITE BROWN") - shape_type = oneOf("SQUARE CIRCLE TRIANGLE STAR HEXAGON OCTAGON") - integer = Word(nums) - shape_attr = "shape:" + shape_type("shape") - posn_attr = "posn:" + Group(integer("x") + ',' + integer("y"))("posn") - color_attr = "color:" + color("color") - size_attr = "size:" + integer("size") - - # use Each (using operator '&') to accept attributes in any order - # (shape and posn are required, color and size are optional) - shape_spec = shape_attr & posn_attr & Optional(color_attr) & Optional(size_attr) - - shape_spec.runTests(''' - shape: SQUARE color: BLACK posn: 100, 120 - shape: CIRCLE size: 50 color: BLUE posn: 50,80 - color:GREEN size:20 shape:TRIANGLE posn:20,40 - ''' - ) - - prints:: - - shape: SQUARE color: BLACK posn: 100, 120 - ['shape:', 'SQUARE', 'color:', 'BLACK', 'posn:', ['100', ',', '120']] - - color: BLACK - - posn: ['100', ',', '120'] - - x: 100 - - y: 120 - - shape: SQUARE - - - shape: CIRCLE size: 50 color: BLUE posn: 50,80 - ['shape:', 'CIRCLE', 'size:', '50', 'color:', 'BLUE', 'posn:', ['50', ',', '80']] - - color: BLUE - - posn: ['50', ',', '80'] - - x: 50 - - y: 80 - - shape: CIRCLE - - size: 50 - - - color: GREEN size: 20 shape: TRIANGLE posn: 20,40 - ['color:', 'GREEN', 'size:', '20', 'shape:', 'TRIANGLE', 'posn:', ['20', ',', '40']] - - color: GREEN - - posn: ['20', ',', '40'] - - x: 20 - - y: 40 - - shape: TRIANGLE - - size: 20 - """ - def __init__( self, exprs, savelist = True ): - super(Each,self).__init__(exprs, savelist) - self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) - self.skipWhitespace = True - self.initExprGroups = True - self.saveAsList = True - - def streamline(self): - super(Each, self).streamline() - self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) - return self - - def parseImpl( self, instring, loc, doActions=True ): - if self.initExprGroups: - self.opt1map = dict((id(e.expr),e) for e in self.exprs if isinstance(e,Optional)) - opt1 = [ e.expr for e in self.exprs if isinstance(e,Optional) ] - opt2 = [ e for e in self.exprs if e.mayReturnEmpty and not isinstance(e,Optional)] - self.optionals = opt1 + opt2 - self.multioptionals = [ e.expr for e in self.exprs if isinstance(e,ZeroOrMore) ] - self.multirequired = [ e.expr for e in self.exprs if isinstance(e,OneOrMore) ] - self.required = [ e for e in self.exprs if not isinstance(e,(Optional,ZeroOrMore,OneOrMore)) ] - self.required += self.multirequired - self.initExprGroups = False - tmpLoc = loc - tmpReqd = self.required[:] - tmpOpt = self.optionals[:] - matchOrder = [] - - keepMatching = True - while keepMatching: - tmpExprs = tmpReqd + tmpOpt + self.multioptionals + self.multirequired - failed = [] - for e in tmpExprs: - try: - tmpLoc = e.tryParse( instring, tmpLoc ) - except ParseException: - failed.append(e) - else: - matchOrder.append(self.opt1map.get(id(e),e)) - if e in tmpReqd: - tmpReqd.remove(e) - elif e in tmpOpt: - tmpOpt.remove(e) - if len(failed) == len(tmpExprs): - keepMatching = False - - if tmpReqd: - missing = ", ".join(_ustr(e) for e in tmpReqd) - raise ParseException(instring,loc,"Missing one or more required elements (%s)" % missing ) - - # add any unmatched Optionals, in case they have default values defined - matchOrder += [e for e in self.exprs if isinstance(e,Optional) and e.expr in tmpOpt] - - resultlist = [] - for e in matchOrder: - loc,results = e._parse(instring,loc,doActions) - resultlist.append(results) - - finalResults = sum(resultlist, ParseResults([])) - return loc, finalResults - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "{" + " & ".join(_ustr(e) for e in self.exprs) + "}" - - return self.strRepr - - def checkRecursion( self, parseElementList ): - subRecCheckList = parseElementList[:] + [ self ] - for e in self.exprs: - e.checkRecursion( subRecCheckList ) - - -class ParseElementEnhance(ParserElement): - """Abstract subclass of :class:`ParserElement`, for combining and - post-processing parsed tokens. - """ - def __init__( self, expr, savelist=False ): - super(ParseElementEnhance,self).__init__(savelist) - if isinstance( expr, basestring ): - if issubclass(ParserElement._literalStringClass, Token): - expr = ParserElement._literalStringClass(expr) - else: - expr = ParserElement._literalStringClass(Literal(expr)) - self.expr = expr - self.strRepr = None - if expr is not None: - self.mayIndexError = expr.mayIndexError - self.mayReturnEmpty = expr.mayReturnEmpty - self.setWhitespaceChars( expr.whiteChars ) - self.skipWhitespace = expr.skipWhitespace - self.saveAsList = expr.saveAsList - self.callPreparse = expr.callPreparse - self.ignoreExprs.extend(expr.ignoreExprs) - - def parseImpl( self, instring, loc, doActions=True ): - if self.expr is not None: - return self.expr._parse( instring, loc, doActions, callPreParse=False ) - else: - raise ParseException("",loc,self.errmsg,self) - - def leaveWhitespace( self ): - self.skipWhitespace = False - self.expr = self.expr.copy() - if self.expr is not None: - self.expr.leaveWhitespace() - return self - - def ignore( self, other ): - if isinstance( other, Suppress ): - if other not in self.ignoreExprs: - super( ParseElementEnhance, self).ignore( other ) - if self.expr is not None: - self.expr.ignore( self.ignoreExprs[-1] ) - else: - super( ParseElementEnhance, self).ignore( other ) - if self.expr is not None: - self.expr.ignore( self.ignoreExprs[-1] ) - return self - - def streamline( self ): - super(ParseElementEnhance,self).streamline() - if self.expr is not None: - self.expr.streamline() - return self - - def checkRecursion( self, parseElementList ): - if self in parseElementList: - raise RecursiveGrammarException( parseElementList+[self] ) - subRecCheckList = parseElementList[:] + [ self ] - if self.expr is not None: - self.expr.checkRecursion( subRecCheckList ) - - def validate( self, validateTrace=[] ): - tmp = validateTrace[:]+[self] - if self.expr is not None: - self.expr.validate(tmp) - self.checkRecursion( [] ) - - def __str__( self ): - try: - return super(ParseElementEnhance,self).__str__() - except Exception: - pass - - if self.strRepr is None and self.expr is not None: - self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.expr) ) - return self.strRepr - - -class FollowedBy(ParseElementEnhance): - """Lookahead matching of the given parse expression. - ``FollowedBy`` does *not* advance the parsing position within - the input string, it only verifies that the specified parse - expression matches at the current position. ``FollowedBy`` - always returns a null token list. If any results names are defined - in the lookahead expression, those *will* be returned for access by - name. - - Example:: - - # use FollowedBy to match a label only if it is followed by a ':' - data_word = Word(alphas) - label = data_word + FollowedBy(':') - attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) - - OneOrMore(attr_expr).parseString("shape: SQUARE color: BLACK posn: upper left").pprint() - - prints:: - - [['shape', 'SQUARE'], ['color', 'BLACK'], ['posn', 'upper left']] - """ - def __init__( self, expr ): - super(FollowedBy,self).__init__(expr) - self.mayReturnEmpty = True - - def parseImpl( self, instring, loc, doActions=True ): - _, ret = self.expr._parse(instring, loc, doActions=doActions) - del ret[:] - return loc, ret - - -class PrecededBy(ParseElementEnhance): - """Lookbehind matching of the given parse expression. - ``PrecededBy`` does not advance the parsing position within the - input string, it only verifies that the specified parse expression - matches prior to the current position. ``PrecededBy`` always - returns a null token list, but if a results name is defined on the - given expression, it is returned. - - Parameters: - - - expr - expression that must match prior to the current parse - location - - retreat - (default= ``None``) - (int) maximum number of characters - to lookbehind prior to the current parse location - - If the lookbehind expression is a string, Literal, Keyword, or - a Word or CharsNotIn with a specified exact or maximum length, then - the retreat parameter is not required. Otherwise, retreat must be - specified to give a maximum number of characters to look back from - the current parse position for a lookbehind match. - - Example:: - - # VB-style variable names with type prefixes - int_var = PrecededBy("#") + pyparsing_common.identifier - str_var = PrecededBy("$") + pyparsing_common.identifier - - """ - def __init__(self, expr, retreat=None): - super(PrecededBy, self).__init__(expr) - self.expr = self.expr().leaveWhitespace() - self.mayReturnEmpty = True - self.mayIndexError = False - self.exact = False - if isinstance(expr, str): - retreat = len(expr) - self.exact = True - elif isinstance(expr, (Literal, Keyword)): - retreat = expr.matchLen - self.exact = True - elif isinstance(expr, (Word, CharsNotIn)) and expr.maxLen != _MAX_INT: - retreat = expr.maxLen - self.exact = True - elif isinstance(expr, _PositionToken): - retreat = 0 - self.exact = True - self.retreat = retreat - self.errmsg = "not preceded by " + str(expr) - self.skipWhitespace = False - - def parseImpl(self, instring, loc=0, doActions=True): - if self.exact: - if loc < self.retreat: - raise ParseException(instring, loc, self.errmsg) - start = loc - self.retreat - _, ret = self.expr._parse(instring, start) - else: - # retreat specified a maximum lookbehind window, iterate - test_expr = self.expr + StringEnd() - instring_slice = instring[:loc] - last_expr = ParseException(instring, loc, self.errmsg) - for offset in range(1, min(loc, self.retreat+1)): - try: - _, ret = test_expr._parse(instring_slice, loc-offset) - except ParseBaseException as pbe: - last_expr = pbe - else: - break - else: - raise last_expr - # return empty list of tokens, but preserve any defined results names - del ret[:] - return loc, ret - - -class NotAny(ParseElementEnhance): - """Lookahead to disallow matching with the given parse expression. - ``NotAny`` does *not* advance the parsing position within the - input string, it only verifies that the specified parse expression - does *not* match at the current position. Also, ``NotAny`` does - *not* skip over leading whitespace. ``NotAny`` always returns - a null token list. May be constructed using the '~' operator. - - Example:: - - AND, OR, NOT = map(CaselessKeyword, "AND OR NOT".split()) - - # take care not to mistake keywords for identifiers - ident = ~(AND | OR | NOT) + Word(alphas) - boolean_term = Optional(NOT) + ident - - # very crude boolean expression - to support parenthesis groups and - # operation hierarchy, use infixNotation - boolean_expr = boolean_term + ZeroOrMore((AND | OR) + boolean_term) - - # integers that are followed by "." are actually floats - integer = Word(nums) + ~Char(".") - """ - def __init__( self, expr ): - super(NotAny,self).__init__(expr) - #~ self.leaveWhitespace() - self.skipWhitespace = False # do NOT use self.leaveWhitespace(), don't want to propagate to exprs - self.mayReturnEmpty = True - self.errmsg = "Found unwanted token, "+_ustr(self.expr) - - def parseImpl( self, instring, loc, doActions=True ): - if self.expr.canParseNext(instring, loc): - raise ParseException(instring, loc, self.errmsg, self) - return loc, [] - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "~{" + _ustr(self.expr) + "}" - - return self.strRepr - -class _MultipleMatch(ParseElementEnhance): - def __init__( self, expr, stopOn=None): - super(_MultipleMatch, self).__init__(expr) - self.saveAsList = True - ender = stopOn - if isinstance(ender, basestring): - ender = ParserElement._literalStringClass(ender) - self.not_ender = ~ender if ender is not None else None - - def parseImpl( self, instring, loc, doActions=True ): - self_expr_parse = self.expr._parse - self_skip_ignorables = self._skipIgnorables - check_ender = self.not_ender is not None - if check_ender: - try_not_ender = self.not_ender.tryParse - - # must be at least one (but first see if we are the stopOn sentinel; - # if so, fail) - if check_ender: - try_not_ender(instring, loc) - loc, tokens = self_expr_parse( instring, loc, doActions, callPreParse=False ) - try: - hasIgnoreExprs = (not not self.ignoreExprs) - while 1: - if check_ender: - try_not_ender(instring, loc) - if hasIgnoreExprs: - preloc = self_skip_ignorables( instring, loc ) - else: - preloc = loc - loc, tmptokens = self_expr_parse( instring, preloc, doActions ) - if tmptokens or tmptokens.haskeys(): - tokens += tmptokens - except (ParseException,IndexError): - pass - - return loc, tokens - -class OneOrMore(_MultipleMatch): - """Repetition of one or more of the given expression. - - Parameters: - - expr - expression that must match one or more times - - stopOn - (default= ``None``) - expression for a terminating sentinel - (only required if the sentinel would ordinarily match the repetition - expression) - - Example:: - - data_word = Word(alphas) - label = data_word + FollowedBy(':') - attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join)) - - text = "shape: SQUARE posn: upper left color: BLACK" - OneOrMore(attr_expr).parseString(text).pprint() # Fail! read 'color' as data instead of next label -> [['shape', 'SQUARE color']] - - # use stopOn attribute for OneOrMore to avoid reading label string as part of the data - attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) - OneOrMore(attr_expr).parseString(text).pprint() # Better -> [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'BLACK']] - - # could also be written as - (attr_expr * (1,)).parseString(text).pprint() - """ - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "{" + _ustr(self.expr) + "}..." - - return self.strRepr - -class ZeroOrMore(_MultipleMatch): - """Optional repetition of zero or more of the given expression. - - Parameters: - - expr - expression that must match zero or more times - - stopOn - (default= ``None``) - expression for a terminating sentinel - (only required if the sentinel would ordinarily match the repetition - expression) - - Example: similar to :class:`OneOrMore` - """ - def __init__( self, expr, stopOn=None): - super(ZeroOrMore,self).__init__(expr, stopOn=stopOn) - self.mayReturnEmpty = True - - def parseImpl( self, instring, loc, doActions=True ): - try: - return super(ZeroOrMore, self).parseImpl(instring, loc, doActions) - except (ParseException,IndexError): - return loc, [] - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "[" + _ustr(self.expr) + "]..." - - return self.strRepr - -class _NullToken(object): - def __bool__(self): - return False - __nonzero__ = __bool__ - def __str__(self): - return "" - -_optionalNotMatched = _NullToken() -class Optional(ParseElementEnhance): - """Optional matching of the given expression. - - Parameters: - - expr - expression that must match zero or more times - - default (optional) - value to be returned if the optional expression is not found. - - Example:: - - # US postal code can be a 5-digit zip, plus optional 4-digit qualifier - zip = Combine(Word(nums, exact=5) + Optional('-' + Word(nums, exact=4))) - zip.runTests(''' - # traditional ZIP code - 12345 - - # ZIP+4 form - 12101-0001 - - # invalid ZIP - 98765- - ''') - - prints:: - - # traditional ZIP code - 12345 - ['12345'] - - # ZIP+4 form - 12101-0001 - ['12101-0001'] - - # invalid ZIP - 98765- - ^ - FAIL: Expected end of text (at char 5), (line:1, col:6) - """ - def __init__( self, expr, default=_optionalNotMatched ): - super(Optional,self).__init__( expr, savelist=False ) - self.saveAsList = self.expr.saveAsList - self.defaultValue = default - self.mayReturnEmpty = True - - def parseImpl( self, instring, loc, doActions=True ): - try: - loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False ) - except (ParseException,IndexError): - if self.defaultValue is not _optionalNotMatched: - if self.expr.resultsName: - tokens = ParseResults([ self.defaultValue ]) - tokens[self.expr.resultsName] = self.defaultValue - else: - tokens = [ self.defaultValue ] - else: - tokens = [] - return loc, tokens - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "[" + _ustr(self.expr) + "]" - - return self.strRepr - -class SkipTo(ParseElementEnhance): - """Token for skipping over all undefined text until the matched - expression is found. - - Parameters: - - expr - target expression marking the end of the data to be skipped - - include - (default= ``False``) if True, the target expression is also parsed - (the skipped text and target expression are returned as a 2-element list). - - ignore - (default= ``None``) used to define grammars (typically quoted strings and - comments) that might contain false matches to the target expression - - failOn - (default= ``None``) define expressions that are not allowed to be - included in the skipped test; if found before the target expression is found, - the SkipTo is not a match - - Example:: - - report = ''' - Outstanding Issues Report - 1 Jan 2000 - - # | Severity | Description | Days Open - -----+----------+-------------------------------------------+----------- - 101 | Critical | Intermittent system crash | 6 - 94 | Cosmetic | Spelling error on Login ('log|n') | 14 - 79 | Minor | System slow when running too many reports | 47 - ''' - integer = Word(nums) - SEP = Suppress('|') - # use SkipTo to simply match everything up until the next SEP - # - ignore quoted strings, so that a '|' character inside a quoted string does not match - # - parse action will call token.strip() for each matched token, i.e., the description body - string_data = SkipTo(SEP, ignore=quotedString) - string_data.setParseAction(tokenMap(str.strip)) - ticket_expr = (integer("issue_num") + SEP - + string_data("sev") + SEP - + string_data("desc") + SEP - + integer("days_open")) - - for tkt in ticket_expr.searchString(report): - print tkt.dump() - - prints:: - - ['101', 'Critical', 'Intermittent system crash', '6'] - - days_open: 6 - - desc: Intermittent system crash - - issue_num: 101 - - sev: Critical - ['94', 'Cosmetic', "Spelling error on Login ('log|n')", '14'] - - days_open: 14 - - desc: Spelling error on Login ('log|n') - - issue_num: 94 - - sev: Cosmetic - ['79', 'Minor', 'System slow when running too many reports', '47'] - - days_open: 47 - - desc: System slow when running too many reports - - issue_num: 79 - - sev: Minor - """ - def __init__( self, other, include=False, ignore=None, failOn=None ): - super( SkipTo, self ).__init__( other ) - self.ignoreExpr = ignore - self.mayReturnEmpty = True - self.mayIndexError = False - self.includeMatch = include - self.saveAsList = False - if isinstance(failOn, basestring): - self.failOn = ParserElement._literalStringClass(failOn) - else: - self.failOn = failOn - self.errmsg = "No match found for "+_ustr(self.expr) - - def parseImpl( self, instring, loc, doActions=True ): - startloc = loc - instrlen = len(instring) - expr = self.expr - expr_parse = self.expr._parse - self_failOn_canParseNext = self.failOn.canParseNext if self.failOn is not None else None - self_ignoreExpr_tryParse = self.ignoreExpr.tryParse if self.ignoreExpr is not None else None - - tmploc = loc - while tmploc <= instrlen: - if self_failOn_canParseNext is not None: - # break if failOn expression matches - if self_failOn_canParseNext(instring, tmploc): - break - - if self_ignoreExpr_tryParse is not None: - # advance past ignore expressions - while 1: - try: - tmploc = self_ignoreExpr_tryParse(instring, tmploc) - except ParseBaseException: - break - - try: - expr_parse(instring, tmploc, doActions=False, callPreParse=False) - except (ParseException, IndexError): - # no match, advance loc in string - tmploc += 1 - else: - # matched skipto expr, done - break - - else: - # ran off the end of the input string without matching skipto expr, fail - raise ParseException(instring, loc, self.errmsg, self) - - # build up return values - loc = tmploc - skiptext = instring[startloc:loc] - skipresult = ParseResults(skiptext) - - if self.includeMatch: - loc, mat = expr_parse(instring,loc,doActions,callPreParse=False) - skipresult += mat - - return loc, skipresult - -class Forward(ParseElementEnhance): - """Forward declaration of an expression to be defined later - - used for recursive grammars, such as algebraic infix notation. - When the expression is known, it is assigned to the ``Forward`` - variable using the '<<' operator. - - Note: take care when assigning to ``Forward`` not to overlook - precedence of operators. - - Specifically, '|' has a lower precedence than '<<', so that:: - - fwdExpr << a | b | c - - will actually be evaluated as:: - - (fwdExpr << a) | b | c - - thereby leaving b and c out as parseable alternatives. It is recommended that you - explicitly group the values inserted into the ``Forward``:: - - fwdExpr << (a | b | c) - - Converting to use the '<<=' operator instead will avoid this problem. - - See :class:`ParseResults.pprint` for an example of a recursive - parser created using ``Forward``. - """ - def __init__( self, other=None ): - super(Forward,self).__init__( other, savelist=False ) - - def __lshift__( self, other ): - if isinstance( other, basestring ): - other = ParserElement._literalStringClass(other) - self.expr = other - self.strRepr = None - self.mayIndexError = self.expr.mayIndexError - self.mayReturnEmpty = self.expr.mayReturnEmpty - self.setWhitespaceChars( self.expr.whiteChars ) - self.skipWhitespace = self.expr.skipWhitespace - self.saveAsList = self.expr.saveAsList - self.ignoreExprs.extend(self.expr.ignoreExprs) - return self - - def __ilshift__(self, other): - return self << other - - def leaveWhitespace( self ): - self.skipWhitespace = False - return self - - def streamline( self ): - if not self.streamlined: - self.streamlined = True - if self.expr is not None: - self.expr.streamline() - return self - - def validate( self, validateTrace=[] ): - if self not in validateTrace: - tmp = validateTrace[:]+[self] - if self.expr is not None: - self.expr.validate(tmp) - self.checkRecursion([]) - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - # Avoid infinite recursion by setting a temporary name - self.name = self.__class__.__name__ + ": ..." - - # Use the string representation of main expression. - try: - if self.expr is not None: - retString = _ustr(self.expr) - else: - retString = "None" - finally: - del self.name - return self.__class__.__name__ + ": " + retString - - def copy(self): - if self.expr is not None: - return super(Forward,self).copy() - else: - ret = Forward() - ret <<= self - return ret - -class TokenConverter(ParseElementEnhance): - """ - Abstract subclass of :class:`ParseExpression`, for converting parsed results. - """ - def __init__( self, expr, savelist=False ): - super(TokenConverter,self).__init__( expr )#, savelist ) - self.saveAsList = False - -class Combine(TokenConverter): - """Converter to concatenate all matching tokens to a single string. - By default, the matching patterns must also be contiguous in the - input string; this can be disabled by specifying - ``'adjacent=False'`` in the constructor. - - Example:: - - real = Word(nums) + '.' + Word(nums) - print(real.parseString('3.1416')) # -> ['3', '.', '1416'] - # will also erroneously match the following - print(real.parseString('3. 1416')) # -> ['3', '.', '1416'] - - real = Combine(Word(nums) + '.' + Word(nums)) - print(real.parseString('3.1416')) # -> ['3.1416'] - # no match when there are internal spaces - print(real.parseString('3. 1416')) # -> Exception: Expected W:(0123...) - """ - def __init__( self, expr, joinString="", adjacent=True ): - super(Combine,self).__init__( expr ) - # suppress whitespace-stripping in contained parse expressions, but re-enable it on the Combine itself - if adjacent: - self.leaveWhitespace() - self.adjacent = adjacent - self.skipWhitespace = True - self.joinString = joinString - self.callPreparse = True - - def ignore( self, other ): - if self.adjacent: - ParserElement.ignore(self, other) - else: - super( Combine, self).ignore( other ) - return self - - def postParse( self, instring, loc, tokenlist ): - retToks = tokenlist.copy() - del retToks[:] - retToks += ParseResults([ "".join(tokenlist._asStringList(self.joinString)) ], modal=self.modalResults) - - if self.resultsName and retToks.haskeys(): - return [ retToks ] - else: - return retToks - -class Group(TokenConverter): - """Converter to return the matched tokens as a list - useful for - returning tokens of :class:`ZeroOrMore` and :class:`OneOrMore` expressions. - - Example:: - - ident = Word(alphas) - num = Word(nums) - term = ident | num - func = ident + Optional(delimitedList(term)) - print(func.parseString("fn a,b,100")) # -> ['fn', 'a', 'b', '100'] - - func = ident + Group(Optional(delimitedList(term))) - print(func.parseString("fn a,b,100")) # -> ['fn', ['a', 'b', '100']] - """ - def __init__( self, expr ): - super(Group,self).__init__( expr ) - self.saveAsList = True - - def postParse( self, instring, loc, tokenlist ): - return [ tokenlist ] - -class Dict(TokenConverter): - """Converter to return a repetitive expression as a list, but also - as a dictionary. Each element can also be referenced using the first - token in the expression as its key. Useful for tabular report - scraping when the first column can be used as a item key. - - Example:: - - data_word = Word(alphas) - label = data_word + FollowedBy(':') - attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join)) - - text = "shape: SQUARE posn: upper left color: light blue texture: burlap" - attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) - - # print attributes as plain groups - print(OneOrMore(attr_expr).parseString(text).dump()) - - # instead of OneOrMore(expr), parse using Dict(OneOrMore(Group(expr))) - Dict will auto-assign names - result = Dict(OneOrMore(Group(attr_expr))).parseString(text) - print(result.dump()) - - # access named fields as dict entries, or output as dict - print(result['shape']) - print(result.asDict()) - - prints:: - - ['shape', 'SQUARE', 'posn', 'upper left', 'color', 'light blue', 'texture', 'burlap'] - [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] - - color: light blue - - posn: upper left - - shape: SQUARE - - texture: burlap - SQUARE - {'color': 'light blue', 'posn': 'upper left', 'texture': 'burlap', 'shape': 'SQUARE'} - - See more examples at :class:`ParseResults` of accessing fields by results name. - """ - def __init__( self, expr ): - super(Dict,self).__init__( expr ) - self.saveAsList = True - - def postParse( self, instring, loc, tokenlist ): - for i,tok in enumerate(tokenlist): - if len(tok) == 0: - continue - ikey = tok[0] - if isinstance(ikey,int): - ikey = _ustr(tok[0]).strip() - if len(tok)==1: - tokenlist[ikey] = _ParseResultsWithOffset("",i) - elif len(tok)==2 and not isinstance(tok[1],ParseResults): - tokenlist[ikey] = _ParseResultsWithOffset(tok[1],i) - else: - dictvalue = tok.copy() #ParseResults(i) - del dictvalue[0] - if len(dictvalue)!= 1 or (isinstance(dictvalue,ParseResults) and dictvalue.haskeys()): - tokenlist[ikey] = _ParseResultsWithOffset(dictvalue,i) - else: - tokenlist[ikey] = _ParseResultsWithOffset(dictvalue[0],i) - - if self.resultsName: - return [ tokenlist ] - else: - return tokenlist - - -class Suppress(TokenConverter): - """Converter for ignoring the results of a parsed expression. - - Example:: - - source = "a, b, c,d" - wd = Word(alphas) - wd_list1 = wd + ZeroOrMore(',' + wd) - print(wd_list1.parseString(source)) - - # often, delimiters that are useful during parsing are just in the - # way afterward - use Suppress to keep them out of the parsed output - wd_list2 = wd + ZeroOrMore(Suppress(',') + wd) - print(wd_list2.parseString(source)) - - prints:: - - ['a', ',', 'b', ',', 'c', ',', 'd'] - ['a', 'b', 'c', 'd'] - - (See also :class:`delimitedList`.) - """ - def postParse( self, instring, loc, tokenlist ): - return [] - - def suppress( self ): - return self - - -class OnlyOnce(object): - """Wrapper for parse actions, to ensure they are only called once. - """ - def __init__(self, methodCall): - self.callable = _trim_arity(methodCall) - self.called = False - def __call__(self,s,l,t): - if not self.called: - results = self.callable(s,l,t) - self.called = True - return results - raise ParseException(s,l,"") - def reset(self): - self.called = False - -def traceParseAction(f): - """Decorator for debugging parse actions. - - When the parse action is called, this decorator will print - ``">> entering method-name(line:<current_source_line>, <parse_location>, <matched_tokens>)"``. - When the parse action completes, the decorator will print - ``"<<"`` followed by the returned value, or any exception that the parse action raised. - - Example:: - - wd = Word(alphas) - - @traceParseAction - def remove_duplicate_chars(tokens): - return ''.join(sorted(set(''.join(tokens)))) - - wds = OneOrMore(wd).setParseAction(remove_duplicate_chars) - print(wds.parseString("slkdjs sld sldd sdlf sdljf")) - - prints:: - - >>entering remove_duplicate_chars(line: 'slkdjs sld sldd sdlf sdljf', 0, (['slkdjs', 'sld', 'sldd', 'sdlf', 'sdljf'], {})) - <<leaving remove_duplicate_chars (ret: 'dfjkls') - ['dfjkls'] - """ - f = _trim_arity(f) - def z(*paArgs): - thisFunc = f.__name__ - s,l,t = paArgs[-3:] - if len(paArgs)>3: - thisFunc = paArgs[0].__class__.__name__ + '.' + thisFunc - sys.stderr.write( ">>entering %s(line: '%s', %d, %r)\n" % (thisFunc,line(l,s),l,t) ) - try: - ret = f(*paArgs) - except Exception as exc: - sys.stderr.write( "<<leaving %s (exception: %s)\n" % (thisFunc,exc) ) - raise - sys.stderr.write( "<<leaving %s (ret: %r)\n" % (thisFunc,ret) ) - return ret - try: - z.__name__ = f.__name__ - except AttributeError: - pass - return z - -# -# global helpers -# -def delimitedList( expr, delim=",", combine=False ): - """Helper to define a delimited list of expressions - the delimiter - defaults to ','. By default, the list elements and delimiters can - have intervening whitespace, and comments, but this can be - overridden by passing ``combine=True`` in the constructor. If - ``combine`` is set to ``True``, the matching tokens are - returned as a single token string, with the delimiters included; - otherwise, the matching tokens are returned as a list of tokens, - with the delimiters suppressed. - - Example:: - - delimitedList(Word(alphas)).parseString("aa,bb,cc") # -> ['aa', 'bb', 'cc'] - delimitedList(Word(hexnums), delim=':', combine=True).parseString("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE'] - """ - dlName = _ustr(expr)+" ["+_ustr(delim)+" "+_ustr(expr)+"]..." - if combine: - return Combine( expr + ZeroOrMore( delim + expr ) ).setName(dlName) - else: - return ( expr + ZeroOrMore( Suppress( delim ) + expr ) ).setName(dlName) - -def countedArray( expr, intExpr=None ): - """Helper to define a counted list of expressions. - - This helper defines a pattern of the form:: - - integer expr expr expr... - - where the leading integer tells how many expr expressions follow. - The matched tokens returns the array of expr tokens as a list - the - leading count token is suppressed. - - If ``intExpr`` is specified, it should be a pyparsing expression - that produces an integer value. - - Example:: - - countedArray(Word(alphas)).parseString('2 ab cd ef') # -> ['ab', 'cd'] - - # in this parser, the leading integer value is given in binary, - # '10' indicating that 2 values are in the array - binaryConstant = Word('01').setParseAction(lambda t: int(t[0], 2)) - countedArray(Word(alphas), intExpr=binaryConstant).parseString('10 ab cd ef') # -> ['ab', 'cd'] - """ - arrayExpr = Forward() - def countFieldParseAction(s,l,t): - n = t[0] - arrayExpr << (n and Group(And([expr]*n)) or Group(empty)) - return [] - if intExpr is None: - intExpr = Word(nums).setParseAction(lambda t:int(t[0])) - else: - intExpr = intExpr.copy() - intExpr.setName("arrayLen") - intExpr.addParseAction(countFieldParseAction, callDuringTry=True) - return ( intExpr + arrayExpr ).setName('(len) ' + _ustr(expr) + '...') - -def _flatten(L): - ret = [] - for i in L: - if isinstance(i,list): - ret.extend(_flatten(i)) - else: - ret.append(i) - return ret - -def matchPreviousLiteral(expr): - """Helper to define an expression that is indirectly defined from - the tokens matched in a previous expression, that is, it looks for - a 'repeat' of a previous expression. For example:: - - first = Word(nums) - second = matchPreviousLiteral(first) - matchExpr = first + ":" + second - - will match ``"1:1"``, but not ``"1:2"``. Because this - matches a previous literal, will also match the leading - ``"1:1"`` in ``"1:10"``. If this is not desired, use - :class:`matchPreviousExpr`. Do *not* use with packrat parsing - enabled. - """ - rep = Forward() - def copyTokenToRepeater(s,l,t): - if t: - if len(t) == 1: - rep << t[0] - else: - # flatten t tokens - tflat = _flatten(t.asList()) - rep << And(Literal(tt) for tt in tflat) - else: - rep << Empty() - expr.addParseAction(copyTokenToRepeater, callDuringTry=True) - rep.setName('(prev) ' + _ustr(expr)) - return rep - -def matchPreviousExpr(expr): - """Helper to define an expression that is indirectly defined from - the tokens matched in a previous expression, that is, it looks for - a 'repeat' of a previous expression. For example:: - - first = Word(nums) - second = matchPreviousExpr(first) - matchExpr = first + ":" + second - - will match ``"1:1"``, but not ``"1:2"``. Because this - matches by expressions, will *not* match the leading ``"1:1"`` - in ``"1:10"``; the expressions are evaluated first, and then - compared, so ``"1"`` is compared with ``"10"``. Do *not* use - with packrat parsing enabled. - """ - rep = Forward() - e2 = expr.copy() - rep <<= e2 - def copyTokenToRepeater(s,l,t): - matchTokens = _flatten(t.asList()) - def mustMatchTheseTokens(s,l,t): - theseTokens = _flatten(t.asList()) - if theseTokens != matchTokens: - raise ParseException("",0,"") - rep.setParseAction( mustMatchTheseTokens, callDuringTry=True ) - expr.addParseAction(copyTokenToRepeater, callDuringTry=True) - rep.setName('(prev) ' + _ustr(expr)) - return rep - -def _escapeRegexRangeChars(s): - #~ escape these chars: ^-] - for c in r"\^-]": - s = s.replace(c,_bslash+c) - s = s.replace("\n",r"\n") - s = s.replace("\t",r"\t") - return _ustr(s) - -def oneOf( strs, caseless=False, useRegex=True ): - """Helper to quickly define a set of alternative Literals, and makes - sure to do longest-first testing when there is a conflict, - regardless of the input order, but returns - a :class:`MatchFirst` for best performance. - - Parameters: - - - strs - a string of space-delimited literals, or a collection of - string literals - - caseless - (default= ``False``) - treat all literals as - caseless - - useRegex - (default= ``True``) - as an optimization, will - generate a Regex object; otherwise, will generate - a :class:`MatchFirst` object (if ``caseless=True``, or if - creating a :class:`Regex` raises an exception) - - Example:: - - comp_oper = oneOf("< = > <= >= !=") - var = Word(alphas) - number = Word(nums) - term = var | number - comparison_expr = term + comp_oper + term - print(comparison_expr.searchString("B = 12 AA=23 B<=AA AA>12")) - - prints:: - - [['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']] - """ - if caseless: - isequal = ( lambda a,b: a.upper() == b.upper() ) - masks = ( lambda a,b: b.upper().startswith(a.upper()) ) - parseElementClass = CaselessLiteral - else: - isequal = ( lambda a,b: a == b ) - masks = ( lambda a,b: b.startswith(a) ) - parseElementClass = Literal - - symbols = [] - if isinstance(strs,basestring): - symbols = strs.split() - elif isinstance(strs, Iterable): - symbols = list(strs) - else: - warnings.warn("Invalid argument to oneOf, expected string or iterable", - SyntaxWarning, stacklevel=2) - if not symbols: - return NoMatch() - - i = 0 - while i < len(symbols)-1: - cur = symbols[i] - for j,other in enumerate(symbols[i+1:]): - if ( isequal(other, cur) ): - del symbols[i+j+1] - break - elif ( masks(cur, other) ): - del symbols[i+j+1] - symbols.insert(i,other) - cur = other - break - else: - i += 1 - - if not caseless and useRegex: - #~ print (strs,"->", "|".join( [ _escapeRegexChars(sym) for sym in symbols] )) - try: - if len(symbols)==len("".join(symbols)): - return Regex( "[%s]" % "".join(_escapeRegexRangeChars(sym) for sym in symbols) ).setName(' | '.join(symbols)) - else: - return Regex( "|".join(re.escape(sym) for sym in symbols) ).setName(' | '.join(symbols)) - except Exception: - warnings.warn("Exception creating Regex for oneOf, building MatchFirst", - SyntaxWarning, stacklevel=2) - - - # last resort, just use MatchFirst - return MatchFirst(parseElementClass(sym) for sym in symbols).setName(' | '.join(symbols)) - -def dictOf( key, value ): - """Helper to easily and clearly define a dictionary by specifying - the respective patterns for the key and value. Takes care of - defining the :class:`Dict`, :class:`ZeroOrMore`, and - :class:`Group` tokens in the proper order. The key pattern - can include delimiting markers or punctuation, as long as they are - suppressed, thereby leaving the significant key text. The value - pattern can include named results, so that the :class:`Dict` results - can include named token fields. - - Example:: - - text = "shape: SQUARE posn: upper left color: light blue texture: burlap" - attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) - print(OneOrMore(attr_expr).parseString(text).dump()) - - attr_label = label - attr_value = Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join) - - # similar to Dict, but simpler call format - result = dictOf(attr_label, attr_value).parseString(text) - print(result.dump()) - print(result['shape']) - print(result.shape) # object attribute access works too - print(result.asDict()) - - prints:: - - [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] - - color: light blue - - posn: upper left - - shape: SQUARE - - texture: burlap - SQUARE - SQUARE - {'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'} - """ - return Dict(OneOrMore(Group(key + value))) - -def originalTextFor(expr, asString=True): - """Helper to return the original, untokenized text for a given - expression. Useful to restore the parsed fields of an HTML start - tag into the raw tag text itself, or to revert separate tokens with - intervening whitespace back to the original matching input text. By - default, returns astring containing the original parsed text. - - If the optional ``asString`` argument is passed as - ``False``, then the return value is - a :class:`ParseResults` containing any results names that - were originally matched, and a single token containing the original - matched text from the input string. So if the expression passed to - :class:`originalTextFor` contains expressions with defined - results names, you must set ``asString`` to ``False`` if you - want to preserve those results name values. - - Example:: - - src = "this is test <b> bold <i>text</i> </b> normal text " - for tag in ("b","i"): - opener,closer = makeHTMLTags(tag) - patt = originalTextFor(opener + SkipTo(closer) + closer) - print(patt.searchString(src)[0]) - - prints:: - - ['<b> bold <i>text</i> </b>'] - ['<i>text</i>'] - """ - locMarker = Empty().setParseAction(lambda s,loc,t: loc) - endlocMarker = locMarker.copy() - endlocMarker.callPreparse = False - matchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end") - if asString: - extractText = lambda s,l,t: s[t._original_start:t._original_end] - else: - def extractText(s,l,t): - t[:] = [s[t.pop('_original_start'):t.pop('_original_end')]] - matchExpr.setParseAction(extractText) - matchExpr.ignoreExprs = expr.ignoreExprs - return matchExpr - -def ungroup(expr): - """Helper to undo pyparsing's default grouping of And expressions, - even if all but one are non-empty. - """ - return TokenConverter(expr).addParseAction(lambda t:t[0]) - -def locatedExpr(expr): - """Helper to decorate a returned token with its starting and ending - locations in the input string. - - This helper adds the following results names: - - - locn_start = location where matched expression begins - - locn_end = location where matched expression ends - - value = the actual parsed results - - Be careful if the input text contains ``<TAB>`` characters, you - may want to call :class:`ParserElement.parseWithTabs` - - Example:: - - wd = Word(alphas) - for match in locatedExpr(wd).searchString("ljsdf123lksdjjf123lkkjj1222"): - print(match) - - prints:: - - [[0, 'ljsdf', 5]] - [[8, 'lksdjjf', 15]] - [[18, 'lkkjj', 23]] - """ - locator = Empty().setParseAction(lambda s,l,t: l) - return Group(locator("locn_start") + expr("value") + locator.copy().leaveWhitespace()("locn_end")) - - -# convenience constants for positional expressions -empty = Empty().setName("empty") -lineStart = LineStart().setName("lineStart") -lineEnd = LineEnd().setName("lineEnd") -stringStart = StringStart().setName("stringStart") -stringEnd = StringEnd().setName("stringEnd") - -_escapedPunc = Word( _bslash, r"\[]-*.$+^?()~ ", exact=2 ).setParseAction(lambda s,l,t:t[0][1]) -_escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s,l,t:unichr(int(t[0].lstrip(r'\0x'),16))) -_escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s,l,t:unichr(int(t[0][1:],8))) -_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | CharsNotIn(r'\]', exact=1) -_charRange = Group(_singleChar + Suppress("-") + _singleChar) -_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group( OneOrMore( _charRange | _singleChar ) ).setResultsName("body") + "]" - -def srange(s): - r"""Helper to easily define string ranges for use in Word - construction. Borrows syntax from regexp '[]' string range - definitions:: - - srange("[0-9]") -> "0123456789" - srange("[a-z]") -> "abcdefghijklmnopqrstuvwxyz" - srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_" - - The input string must be enclosed in []'s, and the returned string - is the expanded character set joined into a single string. The - values enclosed in the []'s may be: - - - a single character - - an escaped character with a leading backslash (such as ``\-`` - or ``\]``) - - an escaped hex character with a leading ``'\x'`` - (``\x21``, which is a ``'!'`` character) (``\0x##`` - is also supported for backwards compatibility) - - an escaped octal character with a leading ``'\0'`` - (``\041``, which is a ``'!'`` character) - - a range of any of the above, separated by a dash (``'a-z'``, - etc.) - - any combination of the above (``'aeiouy'``, - ``'a-zA-Z0-9_$'``, etc.) - """ - _expanded = lambda p: p if not isinstance(p,ParseResults) else ''.join(unichr(c) for c in range(ord(p[0]),ord(p[1])+1)) - try: - return "".join(_expanded(part) for part in _reBracketExpr.parseString(s).body) - except Exception: - return "" - -def matchOnlyAtCol(n): - """Helper method for defining parse actions that require matching at - a specific column in the input text. - """ - def verifyCol(strg,locn,toks): - if col(locn,strg) != n: - raise ParseException(strg,locn,"matched token not at column %d" % n) - return verifyCol - -def replaceWith(replStr): - """Helper method for common parse actions that simply return - a literal value. Especially useful when used with - :class:`transformString<ParserElement.transformString>` (). - - Example:: - - num = Word(nums).setParseAction(lambda toks: int(toks[0])) - na = oneOf("N/A NA").setParseAction(replaceWith(math.nan)) - term = na | num - - OneOrMore(term).parseString("324 234 N/A 234") # -> [324, 234, nan, 234] - """ - return lambda s,l,t: [replStr] - -def removeQuotes(s,l,t): - """Helper parse action for removing quotation marks from parsed - quoted strings. - - Example:: - - # by default, quotation marks are included in parsed results - quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["'Now is the Winter of our Discontent'"] - - # use removeQuotes to strip quotation marks from parsed results - quotedString.setParseAction(removeQuotes) - quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["Now is the Winter of our Discontent"] - """ - return t[0][1:-1] - -def tokenMap(func, *args): - """Helper to define a parse action by mapping a function to all - elements of a ParseResults list. If any additional args are passed, - they are forwarded to the given function as additional arguments - after the token, as in - ``hex_integer = Word(hexnums).setParseAction(tokenMap(int, 16))``, - which will convert the parsed data to an integer using base 16. - - Example (compare the last to example in :class:`ParserElement.transformString`:: - - hex_ints = OneOrMore(Word(hexnums)).setParseAction(tokenMap(int, 16)) - hex_ints.runTests(''' - 00 11 22 aa FF 0a 0d 1a - ''') - - upperword = Word(alphas).setParseAction(tokenMap(str.upper)) - OneOrMore(upperword).runTests(''' - my kingdom for a horse - ''') - - wd = Word(alphas).setParseAction(tokenMap(str.title)) - OneOrMore(wd).setParseAction(' '.join).runTests(''' - now is the winter of our discontent made glorious summer by this sun of york - ''') - - prints:: - - 00 11 22 aa FF 0a 0d 1a - [0, 17, 34, 170, 255, 10, 13, 26] - - my kingdom for a horse - ['MY', 'KINGDOM', 'FOR', 'A', 'HORSE'] - - now is the winter of our discontent made glorious summer by this sun of york - ['Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York'] - """ - def pa(s,l,t): - return [func(tokn, *args) for tokn in t] - - try: - func_name = getattr(func, '__name__', - getattr(func, '__class__').__name__) - except Exception: - func_name = str(func) - pa.__name__ = func_name - - return pa - -upcaseTokens = tokenMap(lambda t: _ustr(t).upper()) -"""(Deprecated) Helper parse action to convert tokens to upper case. -Deprecated in favor of :class:`pyparsing_common.upcaseTokens`""" - -downcaseTokens = tokenMap(lambda t: _ustr(t).lower()) -"""(Deprecated) Helper parse action to convert tokens to lower case. -Deprecated in favor of :class:`pyparsing_common.downcaseTokens`""" - -def _makeTags(tagStr, xml, - suppress_LT=Suppress("<"), - suppress_GT=Suppress(">")): - """Internal helper to construct opening and closing tag expressions, given a tag name""" - if isinstance(tagStr,basestring): - resname = tagStr - tagStr = Keyword(tagStr, caseless=not xml) - else: - resname = tagStr.name - - tagAttrName = Word(alphas,alphanums+"_-:") - if (xml): - tagAttrValue = dblQuotedString.copy().setParseAction( removeQuotes ) - openTag = (suppress_LT - + tagStr("tag") - + Dict(ZeroOrMore(Group(tagAttrName + Suppress("=") + tagAttrValue ))) - + Optional("/", default=[False])("empty").setParseAction(lambda s,l,t:t[0]=='/') - + suppress_GT) - else: - tagAttrValue = quotedString.copy().setParseAction( removeQuotes ) | Word(printables, excludeChars=">") - openTag = (suppress_LT - + tagStr("tag") - + Dict(ZeroOrMore(Group(tagAttrName.setParseAction(downcaseTokens) - + Optional(Suppress("=") + tagAttrValue)))) - + Optional("/",default=[False])("empty").setParseAction(lambda s,l,t:t[0]=='/') - + suppress_GT) - closeTag = Combine(_L("</") + tagStr + ">", adjacent=False) - - openTag.setName("<%s>" % resname) - # add start<tagname> results name in parse action now that ungrouped names are not reported at two levels - openTag.addParseAction(lambda t: t.__setitem__("start"+"".join(resname.replace(":"," ").title().split()), t.copy())) - closeTag = closeTag("end"+"".join(resname.replace(":"," ").title().split())).setName("</%s>" % resname) - openTag.tag = resname - closeTag.tag = resname - openTag.tag_body = SkipTo(closeTag()) - return openTag, closeTag - -def makeHTMLTags(tagStr): - """Helper to construct opening and closing tag expressions for HTML, - given a tag name. Matches tags in either upper or lower case, - attributes with namespaces and with quoted or unquoted values. - - Example:: - - text = '<td>More info at the <a href="https://github.com/pyparsing/pyparsing/wiki">pyparsing</a> wiki page</td>' - # makeHTMLTags returns pyparsing expressions for the opening and - # closing tags as a 2-tuple - a,a_end = makeHTMLTags("A") - link_expr = a + SkipTo(a_end)("link_text") + a_end - - for link in link_expr.searchString(text): - # attributes in the <A> tag (like "href" shown here) are - # also accessible as named results - print(link.link_text, '->', link.href) - - prints:: - - pyparsing -> https://github.com/pyparsing/pyparsing/wiki - """ - return _makeTags( tagStr, False ) - -def makeXMLTags(tagStr): - """Helper to construct opening and closing tag expressions for XML, - given a tag name. Matches tags only in the given upper/lower case. - - Example: similar to :class:`makeHTMLTags` - """ - return _makeTags( tagStr, True ) - -def withAttribute(*args,**attrDict): - """Helper to create a validating parse action to be used with start - tags created with :class:`makeXMLTags` or - :class:`makeHTMLTags`. Use ``withAttribute`` to qualify - a starting tag with a required attribute value, to avoid false - matches on common tags such as ``<TD>`` or ``<DIV>``. - - Call ``withAttribute`` with a series of attribute names and - values. Specify the list of filter attributes names and values as: - - - keyword arguments, as in ``(align="right")``, or - - as an explicit dict with ``**`` operator, when an attribute - name is also a Python reserved word, as in ``**{"class":"Customer", "align":"right"}`` - - a list of name-value tuples, as in ``(("ns1:class", "Customer"), ("ns2:align","right"))`` - - For attribute names with a namespace prefix, you must use the second - form. Attribute names are matched insensitive to upper/lower case. - - If just testing for ``class`` (with or without a namespace), use - :class:`withClass`. - - To verify that the attribute exists, but without specifying a value, - pass ``withAttribute.ANY_VALUE`` as the value. - - Example:: - - html = ''' - <div> - Some text - <div type="grid">1 4 0 1 0</div> - <div type="graph">1,3 2,3 1,1</div> - <div>this has no type</div> - </div> - - ''' - div,div_end = makeHTMLTags("div") - - # only match div tag having a type attribute with value "grid" - div_grid = div().setParseAction(withAttribute(type="grid")) - grid_expr = div_grid + SkipTo(div | div_end)("body") - for grid_header in grid_expr.searchString(html): - print(grid_header.body) - - # construct a match with any div tag having a type attribute, regardless of the value - div_any_type = div().setParseAction(withAttribute(type=withAttribute.ANY_VALUE)) - div_expr = div_any_type + SkipTo(div | div_end)("body") - for div_header in div_expr.searchString(html): - print(div_header.body) - - prints:: - - 1 4 0 1 0 - - 1 4 0 1 0 - 1,3 2,3 1,1 - """ - if args: - attrs = args[:] - else: - attrs = attrDict.items() - attrs = [(k,v) for k,v in attrs] - def pa(s,l,tokens): - for attrName,attrValue in attrs: - if attrName not in tokens: - raise ParseException(s,l,"no matching attribute " + attrName) - if attrValue != withAttribute.ANY_VALUE and tokens[attrName] != attrValue: - raise ParseException(s,l,"attribute '%s' has value '%s', must be '%s'" % - (attrName, tokens[attrName], attrValue)) - return pa -withAttribute.ANY_VALUE = object() - -def withClass(classname, namespace=''): - """Simplified version of :class:`withAttribute` when - matching on a div class - made difficult because ``class`` is - a reserved word in Python. - - Example:: - - html = ''' - <div> - Some text - <div class="grid">1 4 0 1 0</div> - <div class="graph">1,3 2,3 1,1</div> - <div>this <div> has no class</div> - </div> - - ''' - div,div_end = makeHTMLTags("div") - div_grid = div().setParseAction(withClass("grid")) - - grid_expr = div_grid + SkipTo(div | div_end)("body") - for grid_header in grid_expr.searchString(html): - print(grid_header.body) - - div_any_type = div().setParseAction(withClass(withAttribute.ANY_VALUE)) - div_expr = div_any_type + SkipTo(div | div_end)("body") - for div_header in div_expr.searchString(html): - print(div_header.body) - - prints:: - - 1 4 0 1 0 - - 1 4 0 1 0 - 1,3 2,3 1,1 - """ - classattr = "%s:class" % namespace if namespace else "class" - return withAttribute(**{classattr : classname}) - -opAssoc = SimpleNamespace() -opAssoc.LEFT = object() -opAssoc.RIGHT = object() - -def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ): - """Helper method for constructing grammars of expressions made up of - operators working in a precedence hierarchy. Operators may be unary - or binary, left- or right-associative. Parse actions can also be - attached to operator expressions. The generated parser will also - recognize the use of parentheses to override operator precedences - (see example below). - - Note: if you define a deep operator list, you may see performance - issues when using infixNotation. See - :class:`ParserElement.enablePackrat` for a mechanism to potentially - improve your parser performance. - - Parameters: - - baseExpr - expression representing the most basic element for the - nested - - opList - list of tuples, one for each operator precedence level - in the expression grammar; each tuple is of the form ``(opExpr, - numTerms, rightLeftAssoc, parseAction)``, where: - - - opExpr is the pyparsing expression for the operator; may also - be a string, which will be converted to a Literal; if numTerms - is 3, opExpr is a tuple of two expressions, for the two - operators separating the 3 terms - - numTerms is the number of terms for this operator (must be 1, - 2, or 3) - - rightLeftAssoc is the indicator whether the operator is right - or left associative, using the pyparsing-defined constants - ``opAssoc.RIGHT`` and ``opAssoc.LEFT``. - - parseAction is the parse action to be associated with - expressions matching this operator expression (the parse action - tuple member may be omitted); if the parse action is passed - a tuple or list of functions, this is equivalent to calling - ``setParseAction(*fn)`` - (:class:`ParserElement.setParseAction`) - - lpar - expression for matching left-parentheses - (default= ``Suppress('(')``) - - rpar - expression for matching right-parentheses - (default= ``Suppress(')')``) - - Example:: - - # simple example of four-function arithmetic with ints and - # variable names - integer = pyparsing_common.signed_integer - varname = pyparsing_common.identifier - - arith_expr = infixNotation(integer | varname, - [ - ('-', 1, opAssoc.RIGHT), - (oneOf('* /'), 2, opAssoc.LEFT), - (oneOf('+ -'), 2, opAssoc.LEFT), - ]) - - arith_expr.runTests(''' - 5+3*6 - (5+3)*6 - -2--11 - ''', fullDump=False) - - prints:: - - 5+3*6 - [[5, '+', [3, '*', 6]]] - - (5+3)*6 - [[[5, '+', 3], '*', 6]] - - -2--11 - [[['-', 2], '-', ['-', 11]]] - """ - # captive version of FollowedBy that does not do parse actions or capture results names - class _FB(FollowedBy): - def parseImpl(self, instring, loc, doActions=True): - self.expr.tryParse(instring, loc) - return loc, [] - - ret = Forward() - lastExpr = baseExpr | ( lpar + ret + rpar ) - for i,operDef in enumerate(opList): - opExpr,arity,rightLeftAssoc,pa = (operDef + (None,))[:4] - termName = "%s term" % opExpr if arity < 3 else "%s%s term" % opExpr - if arity == 3: - if opExpr is None or len(opExpr) != 2: - raise ValueError( - "if numterms=3, opExpr must be a tuple or list of two expressions") - opExpr1, opExpr2 = opExpr - thisExpr = Forward().setName(termName) - if rightLeftAssoc == opAssoc.LEFT: - if arity == 1: - matchExpr = _FB(lastExpr + opExpr) + Group( lastExpr + OneOrMore( opExpr ) ) - elif arity == 2: - if opExpr is not None: - matchExpr = _FB(lastExpr + opExpr + lastExpr) + Group( lastExpr + OneOrMore( opExpr + lastExpr ) ) - else: - matchExpr = _FB(lastExpr+lastExpr) + Group( lastExpr + OneOrMore(lastExpr) ) - elif arity == 3: - matchExpr = _FB(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) + \ - Group( lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr ) - else: - raise ValueError("operator must be unary (1), binary (2), or ternary (3)") - elif rightLeftAssoc == opAssoc.RIGHT: - if arity == 1: - # try to avoid LR with this extra test - if not isinstance(opExpr, Optional): - opExpr = Optional(opExpr) - matchExpr = _FB(opExpr.expr + thisExpr) + Group( opExpr + thisExpr ) - elif arity == 2: - if opExpr is not None: - matchExpr = _FB(lastExpr + opExpr + thisExpr) + Group( lastExpr + OneOrMore( opExpr + thisExpr ) ) - else: - matchExpr = _FB(lastExpr + thisExpr) + Group( lastExpr + OneOrMore( thisExpr ) ) - elif arity == 3: - matchExpr = _FB(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + \ - Group( lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr ) - else: - raise ValueError("operator must be unary (1), binary (2), or ternary (3)") - else: - raise ValueError("operator must indicate right or left associativity") - if pa: - if isinstance(pa, (tuple, list)): - matchExpr.setParseAction(*pa) - else: - matchExpr.setParseAction(pa) - thisExpr <<= ( matchExpr.setName(termName) | lastExpr ) - lastExpr = thisExpr - ret <<= lastExpr - return ret - -operatorPrecedence = infixNotation -"""(Deprecated) Former name of :class:`infixNotation`, will be -dropped in a future release.""" - -dblQuotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"').setName("string enclosed in double quotes") -sglQuotedString = Combine(Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("string enclosed in single quotes") -quotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"'| - Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("quotedString using single or double quotes") -unicodeString = Combine(_L('u') + quotedString.copy()).setName("unicode string literal") - -def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.copy()): - """Helper method for defining nested lists enclosed in opening and - closing delimiters ("(" and ")" are the default). - - Parameters: - - opener - opening character for a nested list - (default= ``"("``); can also be a pyparsing expression - - closer - closing character for a nested list - (default= ``")"``); can also be a pyparsing expression - - content - expression for items within the nested lists - (default= ``None``) - - ignoreExpr - expression for ignoring opening and closing - delimiters (default= :class:`quotedString`) - - If an expression is not provided for the content argument, the - nested expression will capture all whitespace-delimited content - between delimiters as a list of separate values. - - Use the ``ignoreExpr`` argument to define expressions that may - contain opening or closing characters that should not be treated as - opening or closing characters for nesting, such as quotedString or - a comment expression. Specify multiple expressions using an - :class:`Or` or :class:`MatchFirst`. The default is - :class:`quotedString`, but if no expressions are to be ignored, then - pass ``None`` for this argument. - - Example:: - - data_type = oneOf("void int short long char float double") - decl_data_type = Combine(data_type + Optional(Word('*'))) - ident = Word(alphas+'_', alphanums+'_') - number = pyparsing_common.number - arg = Group(decl_data_type + ident) - LPAR,RPAR = map(Suppress, "()") - - code_body = nestedExpr('{', '}', ignoreExpr=(quotedString | cStyleComment)) - - c_function = (decl_data_type("type") - + ident("name") - + LPAR + Optional(delimitedList(arg), [])("args") + RPAR - + code_body("body")) - c_function.ignore(cStyleComment) - - source_code = ''' - int is_odd(int x) { - return (x%2); - } - - int dec_to_hex(char hchar) { - if (hchar >= '0' && hchar <= '9') { - return (ord(hchar)-ord('0')); - } else { - return (10+ord(hchar)-ord('A')); - } - } - ''' - for func in c_function.searchString(source_code): - print("%(name)s (%(type)s) args: %(args)s" % func) - - - prints:: - - is_odd (int) args: [['int', 'x']] - dec_to_hex (int) args: [['char', 'hchar']] - """ - if opener == closer: - raise ValueError("opening and closing strings cannot be the same") - if content is None: - if isinstance(opener,basestring) and isinstance(closer,basestring): - if len(opener) == 1 and len(closer)==1: - if ignoreExpr is not None: - content = (Combine(OneOrMore(~ignoreExpr + - CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS,exact=1)) - ).setParseAction(lambda t:t[0].strip())) - else: - content = (empty.copy()+CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS - ).setParseAction(lambda t:t[0].strip())) - else: - if ignoreExpr is not None: - content = (Combine(OneOrMore(~ignoreExpr + - ~Literal(opener) + ~Literal(closer) + - CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) - ).setParseAction(lambda t:t[0].strip())) - else: - content = (Combine(OneOrMore(~Literal(opener) + ~Literal(closer) + - CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) - ).setParseAction(lambda t:t[0].strip())) - else: - raise ValueError("opening and closing arguments must be strings if no content expression is given") - ret = Forward() - if ignoreExpr is not None: - ret <<= Group( Suppress(opener) + ZeroOrMore( ignoreExpr | ret | content ) + Suppress(closer) ) - else: - ret <<= Group( Suppress(opener) + ZeroOrMore( ret | content ) + Suppress(closer) ) - ret.setName('nested %s%s expression' % (opener,closer)) - return ret - -def indentedBlock(blockStatementExpr, indentStack, indent=True): - """Helper method for defining space-delimited indentation blocks, - such as those used to define block statements in Python source code. - - Parameters: - - - blockStatementExpr - expression defining syntax of statement that - is repeated within the indented block - - indentStack - list created by caller to manage indentation stack - (multiple statementWithIndentedBlock expressions within a single - grammar should share a common indentStack) - - indent - boolean indicating whether block must be indented beyond - the the current level; set to False for block of left-most - statements (default= ``True``) - - A valid block must contain at least one ``blockStatement``. - - Example:: - - data = ''' - def A(z): - A1 - B = 100 - G = A2 - A2 - A3 - B - def BB(a,b,c): - BB1 - def BBA(): - bba1 - bba2 - bba3 - C - D - def spam(x,y): - def eggs(z): - pass - ''' - - - indentStack = [1] - stmt = Forward() - - identifier = Word(alphas, alphanums) - funcDecl = ("def" + identifier + Group( "(" + Optional( delimitedList(identifier) ) + ")" ) + ":") - func_body = indentedBlock(stmt, indentStack) - funcDef = Group( funcDecl + func_body ) - - rvalue = Forward() - funcCall = Group(identifier + "(" + Optional(delimitedList(rvalue)) + ")") - rvalue << (funcCall | identifier | Word(nums)) - assignment = Group(identifier + "=" + rvalue) - stmt << ( funcDef | assignment | identifier ) - - module_body = OneOrMore(stmt) - - parseTree = module_body.parseString(data) - parseTree.pprint() - - prints:: - - [['def', - 'A', - ['(', 'z', ')'], - ':', - [['A1'], [['B', '=', '100']], [['G', '=', 'A2']], ['A2'], ['A3']]], - 'B', - ['def', - 'BB', - ['(', 'a', 'b', 'c', ')'], - ':', - [['BB1'], [['def', 'BBA', ['(', ')'], ':', [['bba1'], ['bba2'], ['bba3']]]]]], - 'C', - 'D', - ['def', - 'spam', - ['(', 'x', 'y', ')'], - ':', - [[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]] - """ - backup_stack = indentStack[:] - - def reset_stack(): - indentStack[:] = backup_stack - - def checkPeerIndent(s,l,t): - if l >= len(s): return - curCol = col(l,s) - if curCol != indentStack[-1]: - if curCol > indentStack[-1]: - raise ParseException(s,l,"illegal nesting") - raise ParseException(s,l,"not a peer entry") - - def checkSubIndent(s,l,t): - curCol = col(l,s) - if curCol > indentStack[-1]: - indentStack.append( curCol ) - else: - raise ParseException(s,l,"not a subentry") - - def checkUnindent(s,l,t): - if l >= len(s): return - curCol = col(l,s) - if not(indentStack and curCol < indentStack[-1] and curCol <= indentStack[-2]): - raise ParseException(s,l,"not an unindent") - indentStack.pop() - - NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress()) - INDENT = (Empty() + Empty().setParseAction(checkSubIndent)).setName('INDENT') - PEER = Empty().setParseAction(checkPeerIndent).setName('') - UNDENT = Empty().setParseAction(checkUnindent).setName('UNINDENT') - if indent: - smExpr = Group( Optional(NL) + - #~ FollowedBy(blockStatementExpr) + - INDENT + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) + UNDENT) - else: - smExpr = Group( Optional(NL) + - (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) ) - smExpr.setFailAction(lambda a, b, c, d: reset_stack()) - blockStatementExpr.ignore(_bslash + LineEnd()) - return smExpr.setName('indented block') - -alphas8bit = srange(r"[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]") -punc8bit = srange(r"[\0xa1-\0xbf\0xd7\0xf7]") - -anyOpenTag,anyCloseTag = makeHTMLTags(Word(alphas,alphanums+"_:").setName('any tag')) -_htmlEntityMap = dict(zip("gt lt amp nbsp quot apos".split(),'><& "\'')) -commonHTMLEntity = Regex('&(?P<entity>' + '|'.join(_htmlEntityMap.keys()) +");").setName("common HTML entity") -def replaceHTMLEntity(t): - """Helper parser action to replace common HTML entities with their special characters""" - return _htmlEntityMap.get(t.entity) - -# it's easy to get these comment structures wrong - they're very common, so may as well make them available -cStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/').setName("C style comment") -"Comment of the form ``/* ... */``" - -htmlComment = Regex(r"<!--[\s\S]*?-->").setName("HTML comment") -"Comment of the form ``<!-- ... -->``" - -restOfLine = Regex(r".*").leaveWhitespace().setName("rest of line") -dblSlashComment = Regex(r"//(?:\\\n|[^\n])*").setName("// comment") -"Comment of the form ``// ... (to end of line)``" - -cppStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/'| dblSlashComment).setName("C++ style comment") -"Comment of either form :class:`cStyleComment` or :class:`dblSlashComment`" - -javaStyleComment = cppStyleComment -"Same as :class:`cppStyleComment`" - -pythonStyleComment = Regex(r"#.*").setName("Python style comment") -"Comment of the form ``# ... (to end of line)``" - -_commasepitem = Combine(OneOrMore(Word(printables, excludeChars=',') + - Optional( Word(" \t") + - ~Literal(",") + ~LineEnd() ) ) ).streamline().setName("commaItem") -commaSeparatedList = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("commaSeparatedList") -"""(Deprecated) Predefined expression of 1 or more printable words or -quoted strings, separated by commas. - -This expression is deprecated in favor of :class:`pyparsing_common.comma_separated_list`. -""" - -# some other useful expressions - using lower-case class name since we are really using this as a namespace -class pyparsing_common: - """Here are some common low-level expressions that may be useful in - jump-starting parser development: - - - numeric forms (:class:`integers<integer>`, :class:`reals<real>`, - :class:`scientific notation<sci_real>`) - - common :class:`programming identifiers<identifier>` - - network addresses (:class:`MAC<mac_address>`, - :class:`IPv4<ipv4_address>`, :class:`IPv6<ipv6_address>`) - - ISO8601 :class:`dates<iso8601_date>` and - :class:`datetime<iso8601_datetime>` - - :class:`UUID<uuid>` - - :class:`comma-separated list<comma_separated_list>` - - Parse actions: - - - :class:`convertToInteger` - - :class:`convertToFloat` - - :class:`convertToDate` - - :class:`convertToDatetime` - - :class:`stripHTMLTags` - - :class:`upcaseTokens` - - :class:`downcaseTokens` - - Example:: - - pyparsing_common.number.runTests(''' - # any int or real number, returned as the appropriate type - 100 - -100 - +100 - 3.14159 - 6.02e23 - 1e-12 - ''') - - pyparsing_common.fnumber.runTests(''' - # any int or real number, returned as float - 100 - -100 - +100 - 3.14159 - 6.02e23 - 1e-12 - ''') - - pyparsing_common.hex_integer.runTests(''' - # hex numbers - 100 - FF - ''') - - pyparsing_common.fraction.runTests(''' - # fractions - 1/2 - -3/4 - ''') - - pyparsing_common.mixed_integer.runTests(''' - # mixed fractions - 1 - 1/2 - -3/4 - 1-3/4 - ''') - - import uuid - pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) - pyparsing_common.uuid.runTests(''' - # uuid - 12345678-1234-5678-1234-567812345678 - ''') - - prints:: - - # any int or real number, returned as the appropriate type - 100 - [100] - - -100 - [-100] - - +100 - [100] - - 3.14159 - [3.14159] - - 6.02e23 - [6.02e+23] - - 1e-12 - [1e-12] - - # any int or real number, returned as float - 100 - [100.0] - - -100 - [-100.0] - - +100 - [100.0] - - 3.14159 - [3.14159] - - 6.02e23 - [6.02e+23] - - 1e-12 - [1e-12] - - # hex numbers - 100 - [256] - - FF - [255] - - # fractions - 1/2 - [0.5] - - -3/4 - [-0.75] - - # mixed fractions - 1 - [1] - - 1/2 - [0.5] - - -3/4 - [-0.75] - - 1-3/4 - [1.75] - - # uuid - 12345678-1234-5678-1234-567812345678 - [UUID('12345678-1234-5678-1234-567812345678')] - """ - - convertToInteger = tokenMap(int) - """ - Parse action for converting parsed integers to Python int - """ - - convertToFloat = tokenMap(float) - """ - Parse action for converting parsed numbers to Python float - """ - - integer = Word(nums).setName("integer").setParseAction(convertToInteger) - """expression that parses an unsigned integer, returns an int""" - - hex_integer = Word(hexnums).setName("hex integer").setParseAction(tokenMap(int,16)) - """expression that parses a hexadecimal integer, returns an int""" - - signed_integer = Regex(r'[+-]?\d+').setName("signed integer").setParseAction(convertToInteger) - """expression that parses an integer with optional leading sign, returns an int""" - - fraction = (signed_integer().setParseAction(convertToFloat) + '/' + signed_integer().setParseAction(convertToFloat)).setName("fraction") - """fractional expression of an integer divided by an integer, returns a float""" - fraction.addParseAction(lambda t: t[0]/t[-1]) - - mixed_integer = (fraction | signed_integer + Optional(Optional('-').suppress() + fraction)).setName("fraction or mixed integer-fraction") - """mixed integer of the form 'integer - fraction', with optional leading integer, returns float""" - mixed_integer.addParseAction(sum) - - real = Regex(r'[+-]?\d+\.\d*').setName("real number").setParseAction(convertToFloat) - """expression that parses a floating point number and returns a float""" - - sci_real = Regex(r'[+-]?\d+([eE][+-]?\d+|\.\d*([eE][+-]?\d+)?)').setName("real number with scientific notation").setParseAction(convertToFloat) - """expression that parses a floating point number with optional - scientific notation and returns a float""" - - # streamlining this expression makes the docs nicer-looking - number = (sci_real | real | signed_integer).streamline() - """any numeric expression, returns the corresponding Python type""" - - fnumber = Regex(r'[+-]?\d+\.?\d*([eE][+-]?\d+)?').setName("fnumber").setParseAction(convertToFloat) - """any int or real number, returned as float""" - - identifier = Word(alphas+'_', alphanums+'_').setName("identifier") - """typical code identifier (leading alpha or '_', followed by 0 or more alphas, nums, or '_')""" - - ipv4_address = Regex(r'(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}').setName("IPv4 address") - "IPv4 address (``0.0.0.0 - 255.255.255.255``)" - - _ipv6_part = Regex(r'[0-9a-fA-F]{1,4}').setName("hex_integer") - _full_ipv6_address = (_ipv6_part + (':' + _ipv6_part)*7).setName("full IPv6 address") - _short_ipv6_address = (Optional(_ipv6_part + (':' + _ipv6_part)*(0,6)) + "::" + Optional(_ipv6_part + (':' + _ipv6_part)*(0,6))).setName("short IPv6 address") - _short_ipv6_address.addCondition(lambda t: sum(1 for tt in t if pyparsing_common._ipv6_part.matches(tt)) < 8) - _mixed_ipv6_address = ("::ffff:" + ipv4_address).setName("mixed IPv6 address") - ipv6_address = Combine((_full_ipv6_address | _mixed_ipv6_address | _short_ipv6_address).setName("IPv6 address")).setName("IPv6 address") - "IPv6 address (long, short, or mixed form)" - - mac_address = Regex(r'[0-9a-fA-F]{2}([:.-])[0-9a-fA-F]{2}(?:\1[0-9a-fA-F]{2}){4}').setName("MAC address") - "MAC address xx:xx:xx:xx:xx (may also have '-' or '.' delimiters)" - - @staticmethod - def convertToDate(fmt="%Y-%m-%d"): - """ - Helper to create a parse action for converting parsed date string to Python datetime.date - - Params - - - fmt - format to be passed to datetime.strptime (default= ``"%Y-%m-%d"``) - - Example:: - - date_expr = pyparsing_common.iso8601_date.copy() - date_expr.setParseAction(pyparsing_common.convertToDate()) - print(date_expr.parseString("1999-12-31")) - - prints:: - - [datetime.date(1999, 12, 31)] - """ - def cvt_fn(s,l,t): - try: - return datetime.strptime(t[0], fmt).date() - except ValueError as ve: - raise ParseException(s, l, str(ve)) - return cvt_fn - - @staticmethod - def convertToDatetime(fmt="%Y-%m-%dT%H:%M:%S.%f"): - """Helper to create a parse action for converting parsed - datetime string to Python datetime.datetime - - Params - - - fmt - format to be passed to datetime.strptime (default= ``"%Y-%m-%dT%H:%M:%S.%f"``) - - Example:: - - dt_expr = pyparsing_common.iso8601_datetime.copy() - dt_expr.setParseAction(pyparsing_common.convertToDatetime()) - print(dt_expr.parseString("1999-12-31T23:59:59.999")) - - prints:: - - [datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)] - """ - def cvt_fn(s,l,t): - try: - return datetime.strptime(t[0], fmt) - except ValueError as ve: - raise ParseException(s, l, str(ve)) - return cvt_fn - - iso8601_date = Regex(r'(?P<year>\d{4})(?:-(?P<month>\d\d)(?:-(?P<day>\d\d))?)?').setName("ISO8601 date") - "ISO8601 date (``yyyy-mm-dd``)" - - iso8601_datetime = Regex(r'(?P<year>\d{4})-(?P<month>\d\d)-(?P<day>\d\d)[T ](?P<hour>\d\d):(?P<minute>\d\d)(:(?P<second>\d\d(\.\d*)?)?)?(?P<tz>Z|[+-]\d\d:?\d\d)?').setName("ISO8601 datetime") - "ISO8601 datetime (``yyyy-mm-ddThh:mm:ss.s(Z|+-00:00)``) - trailing seconds, milliseconds, and timezone optional; accepts separating ``'T'`` or ``' '``" - - uuid = Regex(r'[0-9a-fA-F]{8}(-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12}').setName("UUID") - "UUID (``xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx``)" - - _html_stripper = anyOpenTag.suppress() | anyCloseTag.suppress() - @staticmethod - def stripHTMLTags(s, l, tokens): - """Parse action to remove HTML tags from web page HTML source - - Example:: - - # strip HTML links from normal text - text = '<td>More info at the <a href="https://github.com/pyparsing/pyparsing/wiki">pyparsing</a> wiki page</td>' - td,td_end = makeHTMLTags("TD") - table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end - print(table_text.parseString(text).body) - - Prints:: - - More info at the pyparsing wiki page - """ - return pyparsing_common._html_stripper.transformString(tokens[0]) - - _commasepitem = Combine(OneOrMore(~Literal(",") + ~LineEnd() + Word(printables, excludeChars=',') - + Optional( White(" \t") ) ) ).streamline().setName("commaItem") - comma_separated_list = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("comma separated list") - """Predefined expression of 1 or more printable words or quoted strings, separated by commas.""" - - upcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).upper())) - """Parse action to convert tokens to upper case.""" - - downcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).lower())) - """Parse action to convert tokens to lower case.""" - - -class _lazyclassproperty(object): - def __init__(self, fn): - self.fn = fn - self.__doc__ = fn.__doc__ - self.__name__ = fn.__name__ - - def __get__(self, obj, cls): - if cls is None: - cls = type(obj) - if not hasattr(cls, '_intern') or any(cls._intern is getattr(superclass, '_intern', []) for superclass in cls.__mro__[1:]): - cls._intern = {} - attrname = self.fn.__name__ - if attrname not in cls._intern: - cls._intern[attrname] = self.fn(cls) - return cls._intern[attrname] - - -class unicode_set(object): - """ - A set of Unicode characters, for language-specific strings for - ``alphas``, ``nums``, ``alphanums``, and ``printables``. - A unicode_set is defined by a list of ranges in the Unicode character - set, in a class attribute ``_ranges``, such as:: - - _ranges = [(0x0020, 0x007e), (0x00a0, 0x00ff),] - - A unicode set can also be defined using multiple inheritance of other unicode sets:: - - class CJK(Chinese, Japanese, Korean): - pass - """ - _ranges = [] - - @classmethod - def _get_chars_for_ranges(cls): - ret = [] - for cc in cls.__mro__: - if cc is unicode_set: - break - for rr in cc._ranges: - ret.extend(range(rr[0], rr[-1]+1)) - return [unichr(c) for c in sorted(set(ret))] - - @_lazyclassproperty - def printables(cls): - "all non-whitespace characters in this range" - return u''.join(filterfalse(unicode.isspace, cls._get_chars_for_ranges())) - - @_lazyclassproperty - def alphas(cls): - "all alphabetic characters in this range" - return u''.join(filter(unicode.isalpha, cls._get_chars_for_ranges())) - - @_lazyclassproperty - def nums(cls): - "all numeric digit characters in this range" - return u''.join(filter(unicode.isdigit, cls._get_chars_for_ranges())) - - @_lazyclassproperty - def alphanums(cls): - "all alphanumeric characters in this range" - return cls.alphas + cls.nums - - -class pyparsing_unicode(unicode_set): - """ - A namespace class for defining common language unicode_sets. - """ - _ranges = [(32, sys.maxunicode)] - - class Latin1(unicode_set): - "Unicode set for Latin-1 Unicode Character Range" - _ranges = [(0x0020, 0x007e), (0x00a0, 0x00ff),] - - class LatinA(unicode_set): - "Unicode set for Latin-A Unicode Character Range" - _ranges = [(0x0100, 0x017f),] - - class LatinB(unicode_set): - "Unicode set for Latin-B Unicode Character Range" - _ranges = [(0x0180, 0x024f),] - - class Greek(unicode_set): - "Unicode set for Greek Unicode Character Ranges" - _ranges = [ - (0x0370, 0x03ff), (0x1f00, 0x1f15), (0x1f18, 0x1f1d), (0x1f20, 0x1f45), (0x1f48, 0x1f4d), - (0x1f50, 0x1f57), (0x1f59,), (0x1f5b,), (0x1f5d,), (0x1f5f, 0x1f7d), (0x1f80, 0x1fb4), (0x1fb6, 0x1fc4), - (0x1fc6, 0x1fd3), (0x1fd6, 0x1fdb), (0x1fdd, 0x1fef), (0x1ff2, 0x1ff4), (0x1ff6, 0x1ffe), - ] - - class Cyrillic(unicode_set): - "Unicode set for Cyrillic Unicode Character Range" - _ranges = [(0x0400, 0x04ff)] - - class Chinese(unicode_set): - "Unicode set for Chinese Unicode Character Range" - _ranges = [(0x4e00, 0x9fff), (0x3000, 0x303f), ] - - class Japanese(unicode_set): - "Unicode set for Japanese Unicode Character Range, combining Kanji, Hiragana, and Katakana ranges" - _ranges = [ ] - - class Kanji(unicode_set): - "Unicode set for Kanji Unicode Character Range" - _ranges = [(0x4E00, 0x9Fbf), (0x3000, 0x303f), ] - - class Hiragana(unicode_set): - "Unicode set for Hiragana Unicode Character Range" - _ranges = [(0x3040, 0x309f), ] - - class Katakana(unicode_set): - "Unicode set for Katakana Unicode Character Range" - _ranges = [(0x30a0, 0x30ff), ] - - class Korean(unicode_set): - "Unicode set for Korean Unicode Character Range" - _ranges = [(0xac00, 0xd7af), (0x1100, 0x11ff), (0x3130, 0x318f), (0xa960, 0xa97f), (0xd7b0, 0xd7ff), (0x3000, 0x303f), ] - - class CJK(Chinese, Japanese, Korean): - "Unicode set for combined Chinese, Japanese, and Korean (CJK) Unicode Character Range" - pass - - class Thai(unicode_set): - "Unicode set for Thai Unicode Character Range" - _ranges = [(0x0e01, 0x0e3a), (0x0e3f, 0x0e5b), ] - - class Arabic(unicode_set): - "Unicode set for Arabic Unicode Character Range" - _ranges = [(0x0600, 0x061b), (0x061e, 0x06ff), (0x0700, 0x077f), ] - - class Hebrew(unicode_set): - "Unicode set for Hebrew Unicode Character Range" - _ranges = [(0x0590, 0x05ff), ] - - class Devanagari(unicode_set): - "Unicode set for Devanagari Unicode Character Range" - _ranges = [(0x0900, 0x097f), (0xa8e0, 0xa8ff)] - -pyparsing_unicode.Japanese._ranges = (pyparsing_unicode.Japanese.Kanji._ranges - + pyparsing_unicode.Japanese.Hiragana._ranges - + pyparsing_unicode.Japanese.Katakana._ranges) - -# define ranges in language character sets -if PY_3: - setattr(pyparsing_unicode, "العربية", pyparsing_unicode.Arabic) - setattr(pyparsing_unicode, "中文", pyparsing_unicode.Chinese) - setattr(pyparsing_unicode, "кириллица", pyparsing_unicode.Cyrillic) - setattr(pyparsing_unicode, "Ελληνικά", pyparsing_unicode.Greek) - setattr(pyparsing_unicode, "עִברִית", pyparsing_unicode.Hebrew) - setattr(pyparsing_unicode, "日本語", pyparsing_unicode.Japanese) - setattr(pyparsing_unicode.Japanese, "漢字", pyparsing_unicode.Japanese.Kanji) - setattr(pyparsing_unicode.Japanese, "カタカナ", pyparsing_unicode.Japanese.Katakana) - setattr(pyparsing_unicode.Japanese, "ひらがな", pyparsing_unicode.Japanese.Hiragana) - setattr(pyparsing_unicode, "한국어", pyparsing_unicode.Korean) - setattr(pyparsing_unicode, "ไทย", pyparsing_unicode.Thai) - setattr(pyparsing_unicode, "देवनागरी", pyparsing_unicode.Devanagari) - - -if __name__ == "__main__": - - selectToken = CaselessLiteral("select") - fromToken = CaselessLiteral("from") - - ident = Word(alphas, alphanums + "_$") - - columnName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) - columnNameList = Group(delimitedList(columnName)).setName("columns") - columnSpec = ('*' | columnNameList) - - tableName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) - tableNameList = Group(delimitedList(tableName)).setName("tables") - - simpleSQL = selectToken("command") + columnSpec("columns") + fromToken + tableNameList("tables") - - # demo runTests method, including embedded comments in test string - simpleSQL.runTests(""" - # '*' as column list and dotted table name - select * from SYS.XYZZY - - # caseless match on "SELECT", and casts back to "select" - SELECT * from XYZZY, ABC - - # list of column names, and mixed case SELECT keyword - Select AA,BB,CC from Sys.dual - - # multiple tables - Select A, B, C from Sys.dual, Table2 - - # invalid SELECT keyword - should fail - Xelect A, B, C from Sys.dual - - # incomplete command - should fail - Select - - # invalid column name - should fail - Select ^^^ frox Sys.dual - - """) - - pyparsing_common.number.runTests(""" - 100 - -100 - +100 - 3.14159 - 6.02e23 - 1e-12 - """) - - # any int or real number, returned as float - pyparsing_common.fnumber.runTests(""" - 100 - -100 - +100 - 3.14159 - 6.02e23 - 1e-12 - """) - - pyparsing_common.hex_integer.runTests(""" - 100 - FF - """) - - import uuid - pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) - pyparsing_common.uuid.runTests(""" - 12345678-1234-5678-1234-567812345678 - """) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyc b/env/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyc deleted file mode 100644 index 20cf50e4..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/pytoml/__init__.py b/env/lib/python2.7/site-packages/pip/_vendor/pytoml/__init__.py deleted file mode 100644 index 8ed060ff..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/pytoml/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from .core import TomlError -from .parser import load, loads -from .test import translate_to_test -from .writer import dump, dumps \ No newline at end of file diff --git a/env/lib/python2.7/site-packages/pip/_vendor/pytoml/__init__.pyc b/env/lib/python2.7/site-packages/pip/_vendor/pytoml/__init__.pyc deleted file mode 100644 index e8337840..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/pytoml/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/pytoml/core.py b/env/lib/python2.7/site-packages/pip/_vendor/pytoml/core.py deleted file mode 100644 index c182734e..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/pytoml/core.py +++ /dev/null @@ -1,13 +0,0 @@ -class TomlError(RuntimeError): - def __init__(self, message, line, col, filename): - RuntimeError.__init__(self, message, line, col, filename) - self.message = message - self.line = line - self.col = col - self.filename = filename - - def __str__(self): - return '{}({}, {}): {}'.format(self.filename, self.line, self.col, self.message) - - def __repr__(self): - return 'TomlError({!r}, {!r}, {!r}, {!r})'.format(self.message, self.line, self.col, self.filename) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/pytoml/core.pyc b/env/lib/python2.7/site-packages/pip/_vendor/pytoml/core.pyc deleted file mode 100644 index f04a8aa4..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/pytoml/core.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/pytoml/parser.py b/env/lib/python2.7/site-packages/pip/_vendor/pytoml/parser.py deleted file mode 100644 index 3493aa64..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/pytoml/parser.py +++ /dev/null @@ -1,341 +0,0 @@ -import string, re, sys, datetime -from .core import TomlError -from .utils import rfc3339_re, parse_rfc3339_re - -if sys.version_info[0] == 2: - _chr = unichr -else: - _chr = chr - -def load(fin, translate=lambda t, x, v: v, object_pairs_hook=dict): - return loads(fin.read(), translate=translate, object_pairs_hook=object_pairs_hook, filename=getattr(fin, 'name', repr(fin))) - -def loads(s, filename='<string>', translate=lambda t, x, v: v, object_pairs_hook=dict): - if isinstance(s, bytes): - s = s.decode('utf-8') - - s = s.replace('\r\n', '\n') - - root = object_pairs_hook() - tables = object_pairs_hook() - scope = root - - src = _Source(s, filename=filename) - ast = _p_toml(src, object_pairs_hook=object_pairs_hook) - - def error(msg): - raise TomlError(msg, pos[0], pos[1], filename) - - def process_value(v, object_pairs_hook): - kind, text, value, pos = v - if kind == 'str' and value.startswith('\n'): - value = value[1:] - if kind == 'array': - if value and any(k != value[0][0] for k, t, v, p in value[1:]): - error('array-type-mismatch') - value = [process_value(item, object_pairs_hook=object_pairs_hook) for item in value] - elif kind == 'table': - value = object_pairs_hook([(k, process_value(value[k], object_pairs_hook=object_pairs_hook)) for k in value]) - return translate(kind, text, value) - - for kind, value, pos in ast: - if kind == 'kv': - k, v = value - if k in scope: - error('duplicate_keys. Key "{0}" was used more than once.'.format(k)) - scope[k] = process_value(v, object_pairs_hook=object_pairs_hook) - else: - is_table_array = (kind == 'table_array') - cur = tables - for name in value[:-1]: - if isinstance(cur.get(name), list): - d, cur = cur[name][-1] - else: - d, cur = cur.setdefault(name, (None, object_pairs_hook())) - - scope = object_pairs_hook() - name = value[-1] - if name not in cur: - if is_table_array: - cur[name] = [(scope, object_pairs_hook())] - else: - cur[name] = (scope, object_pairs_hook()) - elif isinstance(cur[name], list): - if not is_table_array: - error('table_type_mismatch') - cur[name].append((scope, object_pairs_hook())) - else: - if is_table_array: - error('table_type_mismatch') - old_scope, next_table = cur[name] - if old_scope is not None: - error('duplicate_tables') - cur[name] = (scope, next_table) - - def merge_tables(scope, tables): - if scope is None: - scope = object_pairs_hook() - for k in tables: - if k in scope: - error('key_table_conflict') - v = tables[k] - if isinstance(v, list): - scope[k] = [merge_tables(sc, tbl) for sc, tbl in v] - else: - scope[k] = merge_tables(v[0], v[1]) - return scope - - return merge_tables(root, tables) - -class _Source: - def __init__(self, s, filename=None): - self.s = s - self._pos = (1, 1) - self._last = None - self._filename = filename - self.backtrack_stack = [] - - def last(self): - return self._last - - def pos(self): - return self._pos - - def fail(self): - return self._expect(None) - - def consume_dot(self): - if self.s: - self._last = self.s[0] - self.s = self[1:] - self._advance(self._last) - return self._last - return None - - def expect_dot(self): - return self._expect(self.consume_dot()) - - def consume_eof(self): - if not self.s: - self._last = '' - return True - return False - - def expect_eof(self): - return self._expect(self.consume_eof()) - - def consume(self, s): - if self.s.startswith(s): - self.s = self.s[len(s):] - self._last = s - self._advance(s) - return True - return False - - def expect(self, s): - return self._expect(self.consume(s)) - - def consume_re(self, re): - m = re.match(self.s) - if m: - self.s = self.s[len(m.group(0)):] - self._last = m - self._advance(m.group(0)) - return m - return None - - def expect_re(self, re): - return self._expect(self.consume_re(re)) - - def __enter__(self): - self.backtrack_stack.append((self.s, self._pos)) - - def __exit__(self, type, value, traceback): - if type is None: - self.backtrack_stack.pop() - else: - self.s, self._pos = self.backtrack_stack.pop() - return type == TomlError - - def commit(self): - self.backtrack_stack[-1] = (self.s, self._pos) - - def _expect(self, r): - if not r: - raise TomlError('msg', self._pos[0], self._pos[1], self._filename) - return r - - def _advance(self, s): - suffix_pos = s.rfind('\n') - if suffix_pos == -1: - self._pos = (self._pos[0], self._pos[1] + len(s)) - else: - self._pos = (self._pos[0] + s.count('\n'), len(s) - suffix_pos) - -_ews_re = re.compile(r'(?:[ \t]|#[^\n]*\n|#[^\n]*\Z|\n)*') -def _p_ews(s): - s.expect_re(_ews_re) - -_ws_re = re.compile(r'[ \t]*') -def _p_ws(s): - s.expect_re(_ws_re) - -_escapes = { 'b': '\b', 'n': '\n', 'r': '\r', 't': '\t', '"': '"', - '\\': '\\', 'f': '\f' } - -_basicstr_re = re.compile(r'[^"\\\000-\037]*') -_short_uni_re = re.compile(r'u([0-9a-fA-F]{4})') -_long_uni_re = re.compile(r'U([0-9a-fA-F]{8})') -_escapes_re = re.compile(r'[btnfr\"\\]') -_newline_esc_re = re.compile('\n[ \t\n]*') -def _p_basicstr_content(s, content=_basicstr_re): - res = [] - while True: - res.append(s.expect_re(content).group(0)) - if not s.consume('\\'): - break - if s.consume_re(_newline_esc_re): - pass - elif s.consume_re(_short_uni_re) or s.consume_re(_long_uni_re): - v = int(s.last().group(1), 16) - if 0xd800 <= v < 0xe000: - s.fail() - res.append(_chr(v)) - else: - s.expect_re(_escapes_re) - res.append(_escapes[s.last().group(0)]) - return ''.join(res) - -_key_re = re.compile(r'[0-9a-zA-Z-_]+') -def _p_key(s): - with s: - s.expect('"') - r = _p_basicstr_content(s, _basicstr_re) - s.expect('"') - return r - if s.consume('\''): - if s.consume('\'\''): - r = s.expect_re(_litstr_ml_re).group(0) - s.expect('\'\'\'') - else: - r = s.expect_re(_litstr_re).group(0) - s.expect('\'') - return r - return s.expect_re(_key_re).group(0) - -_float_re = re.compile(r'[+-]?(?:0|[1-9](?:_?\d)*)(?:\.\d(?:_?\d)*)?(?:[eE][+-]?(?:\d(?:_?\d)*))?') - -_basicstr_ml_re = re.compile(r'(?:""?(?!")|[^"\\\000-\011\013-\037])*') -_litstr_re = re.compile(r"[^'\000\010\012-\037]*") -_litstr_ml_re = re.compile(r"(?:(?:|'|'')(?:[^'\000-\010\013-\037]))*") -def _p_value(s, object_pairs_hook): - pos = s.pos() - - if s.consume('true'): - return 'bool', s.last(), True, pos - if s.consume('false'): - return 'bool', s.last(), False, pos - - if s.consume('"'): - if s.consume('""'): - r = _p_basicstr_content(s, _basicstr_ml_re) - s.expect('"""') - else: - r = _p_basicstr_content(s, _basicstr_re) - s.expect('"') - return 'str', r, r, pos - - if s.consume('\''): - if s.consume('\'\''): - r = s.expect_re(_litstr_ml_re).group(0) - s.expect('\'\'\'') - else: - r = s.expect_re(_litstr_re).group(0) - s.expect('\'') - return 'str', r, r, pos - - if s.consume_re(rfc3339_re): - m = s.last() - return 'datetime', m.group(0), parse_rfc3339_re(m), pos - - if s.consume_re(_float_re): - m = s.last().group(0) - r = m.replace('_','') - if '.' in m or 'e' in m or 'E' in m: - return 'float', m, float(r), pos - else: - return 'int', m, int(r, 10), pos - - if s.consume('['): - items = [] - with s: - while True: - _p_ews(s) - items.append(_p_value(s, object_pairs_hook=object_pairs_hook)) - s.commit() - _p_ews(s) - s.expect(',') - s.commit() - _p_ews(s) - s.expect(']') - return 'array', None, items, pos - - if s.consume('{'): - _p_ws(s) - items = object_pairs_hook() - if not s.consume('}'): - k = _p_key(s) - _p_ws(s) - s.expect('=') - _p_ws(s) - items[k] = _p_value(s, object_pairs_hook=object_pairs_hook) - _p_ws(s) - while s.consume(','): - _p_ws(s) - k = _p_key(s) - _p_ws(s) - s.expect('=') - _p_ws(s) - items[k] = _p_value(s, object_pairs_hook=object_pairs_hook) - _p_ws(s) - s.expect('}') - return 'table', None, items, pos - - s.fail() - -def _p_stmt(s, object_pairs_hook): - pos = s.pos() - if s.consume( '['): - is_array = s.consume('[') - _p_ws(s) - keys = [_p_key(s)] - _p_ws(s) - while s.consume('.'): - _p_ws(s) - keys.append(_p_key(s)) - _p_ws(s) - s.expect(']') - if is_array: - s.expect(']') - return 'table_array' if is_array else 'table', keys, pos - - key = _p_key(s) - _p_ws(s) - s.expect('=') - _p_ws(s) - value = _p_value(s, object_pairs_hook=object_pairs_hook) - return 'kv', (key, value), pos - -_stmtsep_re = re.compile(r'(?:[ \t]*(?:#[^\n]*)?\n)+[ \t]*') -def _p_toml(s, object_pairs_hook): - stmts = [] - _p_ews(s) - with s: - stmts.append(_p_stmt(s, object_pairs_hook=object_pairs_hook)) - while True: - s.commit() - s.expect_re(_stmtsep_re) - stmts.append(_p_stmt(s, object_pairs_hook=object_pairs_hook)) - _p_ews(s) - s.expect_eof() - return stmts diff --git a/env/lib/python2.7/site-packages/pip/_vendor/pytoml/parser.pyc b/env/lib/python2.7/site-packages/pip/_vendor/pytoml/parser.pyc deleted file mode 100644 index 682ebcfd..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/pytoml/parser.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/pytoml/test.py b/env/lib/python2.7/site-packages/pip/_vendor/pytoml/test.py deleted file mode 100644 index ec8abfc6..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/pytoml/test.py +++ /dev/null @@ -1,30 +0,0 @@ -import datetime -from .utils import format_rfc3339 - -try: - _string_types = (str, unicode) - _int_types = (int, long) -except NameError: - _string_types = str - _int_types = int - -def translate_to_test(v): - if isinstance(v, dict): - return { k: translate_to_test(v) for k, v in v.items() } - if isinstance(v, list): - a = [translate_to_test(x) for x in v] - if v and isinstance(v[0], dict): - return a - else: - return {'type': 'array', 'value': a} - if isinstance(v, datetime.datetime): - return {'type': 'datetime', 'value': format_rfc3339(v)} - if isinstance(v, bool): - return {'type': 'bool', 'value': 'true' if v else 'false'} - if isinstance(v, _int_types): - return {'type': 'integer', 'value': str(v)} - if isinstance(v, float): - return {'type': 'float', 'value': '{:.17}'.format(v)} - if isinstance(v, _string_types): - return {'type': 'string', 'value': v} - raise RuntimeError('unexpected value: {!r}'.format(v)) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/pytoml/test.pyc b/env/lib/python2.7/site-packages/pip/_vendor/pytoml/test.pyc deleted file mode 100644 index 1afb942f..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/pytoml/test.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/pytoml/utils.py b/env/lib/python2.7/site-packages/pip/_vendor/pytoml/utils.py deleted file mode 100644 index 636a680b..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/pytoml/utils.py +++ /dev/null @@ -1,67 +0,0 @@ -import datetime -import re - -rfc3339_re = re.compile(r'(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})(\.\d+)?(?:Z|([+-]\d{2}):(\d{2}))') - -def parse_rfc3339(v): - m = rfc3339_re.match(v) - if not m or m.group(0) != v: - return None - return parse_rfc3339_re(m) - -def parse_rfc3339_re(m): - r = map(int, m.groups()[:6]) - if m.group(7): - micro = float(m.group(7)) - else: - micro = 0 - - if m.group(8): - g = int(m.group(8), 10) * 60 + int(m.group(9), 10) - tz = _TimeZone(datetime.timedelta(0, g * 60)) - else: - tz = _TimeZone(datetime.timedelta(0, 0)) - - y, m, d, H, M, S = r - return datetime.datetime(y, m, d, H, M, S, int(micro * 1000000), tz) - - -def format_rfc3339(v): - offs = v.utcoffset() - offs = int(offs.total_seconds()) // 60 if offs is not None else 0 - - if offs == 0: - suffix = 'Z' - else: - if offs > 0: - suffix = '+' - else: - suffix = '-' - offs = -offs - suffix = '{0}{1:02}:{2:02}'.format(suffix, offs // 60, offs % 60) - - if v.microsecond: - return v.strftime('%Y-%m-%dT%H:%M:%S.%f') + suffix - else: - return v.strftime('%Y-%m-%dT%H:%M:%S') + suffix - -class _TimeZone(datetime.tzinfo): - def __init__(self, offset): - self._offset = offset - - def utcoffset(self, dt): - return self._offset - - def dst(self, dt): - return None - - def tzname(self, dt): - m = self._offset.total_seconds() // 60 - if m < 0: - res = '-' - m = -m - else: - res = '+' - h = m // 60 - m = m - h * 60 - return '{}{:.02}{:.02}'.format(res, h, m) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/pytoml/utils.pyc b/env/lib/python2.7/site-packages/pip/_vendor/pytoml/utils.pyc deleted file mode 100644 index 1542edb4..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/pytoml/utils.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/pytoml/writer.py b/env/lib/python2.7/site-packages/pip/_vendor/pytoml/writer.py deleted file mode 100644 index 73b5089c..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/pytoml/writer.py +++ /dev/null @@ -1,106 +0,0 @@ -from __future__ import unicode_literals -import io, datetime, math, string, sys - -from .utils import format_rfc3339 - -if sys.version_info[0] == 3: - long = int - unicode = str - - -def dumps(obj, sort_keys=False): - fout = io.StringIO() - dump(obj, fout, sort_keys=sort_keys) - return fout.getvalue() - - -_escapes = {'\n': 'n', '\r': 'r', '\\': '\\', '\t': 't', '\b': 'b', '\f': 'f', '"': '"'} - - -def _escape_string(s): - res = [] - start = 0 - - def flush(): - if start != i: - res.append(s[start:i]) - return i + 1 - - i = 0 - while i < len(s): - c = s[i] - if c in '"\\\n\r\t\b\f': - start = flush() - res.append('\\' + _escapes[c]) - elif ord(c) < 0x20: - start = flush() - res.append('\\u%04x' % ord(c)) - i += 1 - - flush() - return '"' + ''.join(res) + '"' - - -_key_chars = string.digits + string.ascii_letters + '-_' -def _escape_id(s): - if any(c not in _key_chars for c in s): - return _escape_string(s) - return s - - -def _format_value(v): - if isinstance(v, bool): - return 'true' if v else 'false' - if isinstance(v, int) or isinstance(v, long): - return unicode(v) - if isinstance(v, float): - if math.isnan(v) or math.isinf(v): - raise ValueError("{0} is not a valid TOML value".format(v)) - else: - return repr(v) - elif isinstance(v, unicode) or isinstance(v, bytes): - return _escape_string(v) - elif isinstance(v, datetime.datetime): - return format_rfc3339(v) - elif isinstance(v, list): - return '[{0}]'.format(', '.join(_format_value(obj) for obj in v)) - elif isinstance(v, dict): - return '{{{0}}}'.format(', '.join('{} = {}'.format(_escape_id(k), _format_value(obj)) for k, obj in v.items())) - else: - raise RuntimeError(v) - - -def dump(obj, fout, sort_keys=False): - tables = [((), obj, False)] - - while tables: - name, table, is_array = tables.pop() - if name: - section_name = '.'.join(_escape_id(c) for c in name) - if is_array: - fout.write('[[{0}]]\n'.format(section_name)) - else: - fout.write('[{0}]\n'.format(section_name)) - - table_keys = sorted(table.keys()) if sort_keys else table.keys() - new_tables = [] - has_kv = False - for k in table_keys: - v = table[k] - if isinstance(v, dict): - new_tables.append((name + (k,), v, False)) - elif isinstance(v, list) and v and all(isinstance(o, dict) for o in v): - new_tables.extend((name + (k,), d, True) for d in v) - elif v is None: - # based on mojombo's comment: https://github.com/toml-lang/toml/issues/146#issuecomment-25019344 - fout.write( - '#{} = null # To use: uncomment and replace null with value\n'.format(_escape_id(k))) - has_kv = True - else: - fout.write('{0} = {1}\n'.format(_escape_id(k), _format_value(v))) - has_kv = True - - tables.extend(reversed(new_tables)) - - if (name or has_kv) and tables: - fout.write('\n') diff --git a/env/lib/python2.7/site-packages/pip/_vendor/pytoml/writer.pyc b/env/lib/python2.7/site-packages/pip/_vendor/pytoml/writer.pyc deleted file mode 100644 index bbacd6bb..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/pytoml/writer.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/requests/__init__.py b/env/lib/python2.7/site-packages/pip/_vendor/requests/__init__.py deleted file mode 100644 index 1d30e3e0..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/requests/__init__.py +++ /dev/null @@ -1,133 +0,0 @@ -# -*- coding: utf-8 -*- - -# __ -# /__) _ _ _ _ _/ _ -# / ( (- (/ (/ (- _) / _) -# / - -""" -Requests HTTP Library -~~~~~~~~~~~~~~~~~~~~~ - -Requests is an HTTP library, written in Python, for human beings. Basic GET -usage: - - >>> import requests - >>> r = requests.get('https://www.python.org') - >>> r.status_code - 200 - >>> 'Python is a programming language' in r.content - True - -... or POST: - - >>> payload = dict(key1='value1', key2='value2') - >>> r = requests.post('https://httpbin.org/post', data=payload) - >>> print(r.text) - { - ... - "form": { - "key2": "value2", - "key1": "value1" - }, - ... - } - -The other HTTP methods are supported - see `requests.api`. Full documentation -is at <http://python-requests.org>. - -:copyright: (c) 2017 by Kenneth Reitz. -:license: Apache 2.0, see LICENSE for more details. -""" - -from pip._vendor import urllib3 -from pip._vendor import chardet -import warnings -from .exceptions import RequestsDependencyWarning - - -def check_compatibility(urllib3_version, chardet_version): - urllib3_version = urllib3_version.split('.') - assert urllib3_version != ['dev'] # Verify urllib3 isn't installed from git. - - # Sometimes, urllib3 only reports its version as 16.1. - if len(urllib3_version) == 2: - urllib3_version.append('0') - - # Check urllib3 for compatibility. - major, minor, patch = urllib3_version # noqa: F811 - major, minor, patch = int(major), int(minor), int(patch) - # urllib3 >= 1.21.1, <= 1.25 - assert major == 1 - assert minor >= 21 - assert minor <= 25 - - # Check chardet for compatibility. - major, minor, patch = chardet_version.split('.')[:3] - major, minor, patch = int(major), int(minor), int(patch) - # chardet >= 3.0.2, < 3.1.0 - assert major == 3 - assert minor < 1 - assert patch >= 2 - - -def _check_cryptography(cryptography_version): - # cryptography < 1.3.4 - try: - cryptography_version = list(map(int, cryptography_version.split('.'))) - except ValueError: - return - - if cryptography_version < [1, 3, 4]: - warning = 'Old version of cryptography ({}) may cause slowdown.'.format(cryptography_version) - warnings.warn(warning, RequestsDependencyWarning) - -# Check imported dependencies for compatibility. -try: - check_compatibility(urllib3.__version__, chardet.__version__) -except (AssertionError, ValueError): - warnings.warn("urllib3 ({}) or chardet ({}) doesn't match a supported " - "version!".format(urllib3.__version__, chardet.__version__), - RequestsDependencyWarning) - -# Attempt to enable urllib3's SNI support, if possible -from pip._internal.utils.compat import WINDOWS -if not WINDOWS: - try: - from pip._vendor.urllib3.contrib import pyopenssl - pyopenssl.inject_into_urllib3() - - # Check cryptography version - from cryptography import __version__ as cryptography_version - _check_cryptography(cryptography_version) - except ImportError: - pass - -# urllib3's DependencyWarnings should be silenced. -from pip._vendor.urllib3.exceptions import DependencyWarning -warnings.simplefilter('ignore', DependencyWarning) - -from .__version__ import __title__, __description__, __url__, __version__ -from .__version__ import __build__, __author__, __author_email__, __license__ -from .__version__ import __copyright__, __cake__ - -from . import utils -from . import packages -from .models import Request, Response, PreparedRequest -from .api import request, get, head, post, patch, put, delete, options -from .sessions import session, Session -from .status_codes import codes -from .exceptions import ( - RequestException, Timeout, URLRequired, - TooManyRedirects, HTTPError, ConnectionError, - FileModeWarning, ConnectTimeout, ReadTimeout -) - -# Set default logging handler to avoid "No handler found" warnings. -import logging -from logging import NullHandler - -logging.getLogger(__name__).addHandler(NullHandler()) - -# FileModeWarnings go off per the default. -warnings.simplefilter('default', FileModeWarning, append=True) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/requests/__init__.pyc b/env/lib/python2.7/site-packages/pip/_vendor/requests/__init__.pyc deleted file mode 100644 index 9c71a410..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/requests/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/requests/__version__.py b/env/lib/python2.7/site-packages/pip/_vendor/requests/__version__.py deleted file mode 100644 index 9844f740..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/requests/__version__.py +++ /dev/null @@ -1,14 +0,0 @@ -# .-. .-. .-. . . .-. .-. .-. .-. -# |( |- |.| | | |- `-. | `-. -# ' ' `-' `-`.`-' `-' `-' ' `-' - -__title__ = 'requests' -__description__ = 'Python HTTP for Humans.' -__url__ = 'http://python-requests.org' -__version__ = '2.22.0' -__build__ = 0x022200 -__author__ = 'Kenneth Reitz' -__author_email__ = 'me@kennethreitz.org' -__license__ = 'Apache 2.0' -__copyright__ = 'Copyright 2019 Kenneth Reitz' -__cake__ = u'\u2728 \U0001f370 \u2728' diff --git a/env/lib/python2.7/site-packages/pip/_vendor/requests/__version__.pyc b/env/lib/python2.7/site-packages/pip/_vendor/requests/__version__.pyc deleted file mode 100644 index 70270bd4..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/requests/__version__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/requests/_internal_utils.pyc b/env/lib/python2.7/site-packages/pip/_vendor/requests/_internal_utils.pyc deleted file mode 100644 index c7527931..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/requests/_internal_utils.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pyc b/env/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pyc deleted file mode 100644 index fe85d115..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/requests/api.py b/env/lib/python2.7/site-packages/pip/_vendor/requests/api.py deleted file mode 100644 index ef71d075..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/requests/api.py +++ /dev/null @@ -1,158 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -requests.api -~~~~~~~~~~~~ - -This module implements the Requests API. - -:copyright: (c) 2012 by Kenneth Reitz. -:license: Apache2, see LICENSE for more details. -""" - -from . import sessions - - -def request(method, url, **kwargs): - """Constructs and sends a :class:`Request <Request>`. - - :param method: method for the new :class:`Request` object. - :param url: URL for the new :class:`Request` object. - :param params: (optional) Dictionary, list of tuples or bytes to send - in the query string for the :class:`Request`. - :param data: (optional) Dictionary, list of tuples, bytes, or file-like - object to send in the body of the :class:`Request`. - :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. - :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. - :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. - :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload. - ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')`` - or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string - defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers - to add for the file. - :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. - :param timeout: (optional) How many seconds to wait for the server to send data - before giving up, as a float, or a :ref:`(connect timeout, read - timeout) <timeouts>` tuple. - :type timeout: float or tuple - :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``. - :type allow_redirects: bool - :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. - :param verify: (optional) Either a boolean, in which case it controls whether we verify - the server's TLS certificate, or a string, in which case it must be a path - to a CA bundle to use. Defaults to ``True``. - :param stream: (optional) if ``False``, the response content will be immediately downloaded. - :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. - :return: :class:`Response <Response>` object - :rtype: requests.Response - - Usage:: - - >>> import requests - >>> req = requests.request('GET', 'https://httpbin.org/get') - <Response [200]> - """ - - # By using the 'with' statement we are sure the session is closed, thus we - # avoid leaving sockets open which can trigger a ResourceWarning in some - # cases, and look like a memory leak in others. - with sessions.Session() as session: - return session.request(method=method, url=url, **kwargs) - - -def get(url, params=None, **kwargs): - r"""Sends a GET request. - - :param url: URL for the new :class:`Request` object. - :param params: (optional) Dictionary, list of tuples or bytes to send - in the query string for the :class:`Request`. - :param \*\*kwargs: Optional arguments that ``request`` takes. - :return: :class:`Response <Response>` object - :rtype: requests.Response - """ - - kwargs.setdefault('allow_redirects', True) - return request('get', url, params=params, **kwargs) - - -def options(url, **kwargs): - r"""Sends an OPTIONS request. - - :param url: URL for the new :class:`Request` object. - :param \*\*kwargs: Optional arguments that ``request`` takes. - :return: :class:`Response <Response>` object - :rtype: requests.Response - """ - - kwargs.setdefault('allow_redirects', True) - return request('options', url, **kwargs) - - -def head(url, **kwargs): - r"""Sends a HEAD request. - - :param url: URL for the new :class:`Request` object. - :param \*\*kwargs: Optional arguments that ``request`` takes. - :return: :class:`Response <Response>` object - :rtype: requests.Response - """ - - kwargs.setdefault('allow_redirects', False) - return request('head', url, **kwargs) - - -def post(url, data=None, json=None, **kwargs): - r"""Sends a POST request. - - :param url: URL for the new :class:`Request` object. - :param data: (optional) Dictionary, list of tuples, bytes, or file-like - object to send in the body of the :class:`Request`. - :param json: (optional) json data to send in the body of the :class:`Request`. - :param \*\*kwargs: Optional arguments that ``request`` takes. - :return: :class:`Response <Response>` object - :rtype: requests.Response - """ - - return request('post', url, data=data, json=json, **kwargs) - - -def put(url, data=None, **kwargs): - r"""Sends a PUT request. - - :param url: URL for the new :class:`Request` object. - :param data: (optional) Dictionary, list of tuples, bytes, or file-like - object to send in the body of the :class:`Request`. - :param json: (optional) json data to send in the body of the :class:`Request`. - :param \*\*kwargs: Optional arguments that ``request`` takes. - :return: :class:`Response <Response>` object - :rtype: requests.Response - """ - - return request('put', url, data=data, **kwargs) - - -def patch(url, data=None, **kwargs): - r"""Sends a PATCH request. - - :param url: URL for the new :class:`Request` object. - :param data: (optional) Dictionary, list of tuples, bytes, or file-like - object to send in the body of the :class:`Request`. - :param json: (optional) json data to send in the body of the :class:`Request`. - :param \*\*kwargs: Optional arguments that ``request`` takes. - :return: :class:`Response <Response>` object - :rtype: requests.Response - """ - - return request('patch', url, data=data, **kwargs) - - -def delete(url, **kwargs): - r"""Sends a DELETE request. - - :param url: URL for the new :class:`Request` object. - :param \*\*kwargs: Optional arguments that ``request`` takes. - :return: :class:`Response <Response>` object - :rtype: requests.Response - """ - - return request('delete', url, **kwargs) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/requests/api.pyc b/env/lib/python2.7/site-packages/pip/_vendor/requests/api.pyc deleted file mode 100644 index 2f11a1f5..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/requests/api.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/requests/auth.py b/env/lib/python2.7/site-packages/pip/_vendor/requests/auth.py deleted file mode 100644 index bdde51c7..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/requests/auth.py +++ /dev/null @@ -1,305 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -requests.auth -~~~~~~~~~~~~~ - -This module contains the authentication handlers for Requests. -""" - -import os -import re -import time -import hashlib -import threading -import warnings - -from base64 import b64encode - -from .compat import urlparse, str, basestring -from .cookies import extract_cookies_to_jar -from ._internal_utils import to_native_string -from .utils import parse_dict_header - -CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded' -CONTENT_TYPE_MULTI_PART = 'multipart/form-data' - - -def _basic_auth_str(username, password): - """Returns a Basic Auth string.""" - - # "I want us to put a big-ol' comment on top of it that - # says that this behaviour is dumb but we need to preserve - # it because people are relying on it." - # - Lukasa - # - # These are here solely to maintain backwards compatibility - # for things like ints. This will be removed in 3.0.0. - if not isinstance(username, basestring): - warnings.warn( - "Non-string usernames will no longer be supported in Requests " - "3.0.0. Please convert the object you've passed in ({!r}) to " - "a string or bytes object in the near future to avoid " - "problems.".format(username), - category=DeprecationWarning, - ) - username = str(username) - - if not isinstance(password, basestring): - warnings.warn( - "Non-string passwords will no longer be supported in Requests " - "3.0.0. Please convert the object you've passed in ({!r}) to " - "a string or bytes object in the near future to avoid " - "problems.".format(password), - category=DeprecationWarning, - ) - password = str(password) - # -- End Removal -- - - if isinstance(username, str): - username = username.encode('latin1') - - if isinstance(password, str): - password = password.encode('latin1') - - authstr = 'Basic ' + to_native_string( - b64encode(b':'.join((username, password))).strip() - ) - - return authstr - - -class AuthBase(object): - """Base class that all auth implementations derive from""" - - def __call__(self, r): - raise NotImplementedError('Auth hooks must be callable.') - - -class HTTPBasicAuth(AuthBase): - """Attaches HTTP Basic Authentication to the given Request object.""" - - def __init__(self, username, password): - self.username = username - self.password = password - - def __eq__(self, other): - return all([ - self.username == getattr(other, 'username', None), - self.password == getattr(other, 'password', None) - ]) - - def __ne__(self, other): - return not self == other - - def __call__(self, r): - r.headers['Authorization'] = _basic_auth_str(self.username, self.password) - return r - - -class HTTPProxyAuth(HTTPBasicAuth): - """Attaches HTTP Proxy Authentication to a given Request object.""" - - def __call__(self, r): - r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password) - return r - - -class HTTPDigestAuth(AuthBase): - """Attaches HTTP Digest Authentication to the given Request object.""" - - def __init__(self, username, password): - self.username = username - self.password = password - # Keep state in per-thread local storage - self._thread_local = threading.local() - - def init_per_thread_state(self): - # Ensure state is initialized just once per-thread - if not hasattr(self._thread_local, 'init'): - self._thread_local.init = True - self._thread_local.last_nonce = '' - self._thread_local.nonce_count = 0 - self._thread_local.chal = {} - self._thread_local.pos = None - self._thread_local.num_401_calls = None - - def build_digest_header(self, method, url): - """ - :rtype: str - """ - - realm = self._thread_local.chal['realm'] - nonce = self._thread_local.chal['nonce'] - qop = self._thread_local.chal.get('qop') - algorithm = self._thread_local.chal.get('algorithm') - opaque = self._thread_local.chal.get('opaque') - hash_utf8 = None - - if algorithm is None: - _algorithm = 'MD5' - else: - _algorithm = algorithm.upper() - # lambdas assume digest modules are imported at the top level - if _algorithm == 'MD5' or _algorithm == 'MD5-SESS': - def md5_utf8(x): - if isinstance(x, str): - x = x.encode('utf-8') - return hashlib.md5(x).hexdigest() - hash_utf8 = md5_utf8 - elif _algorithm == 'SHA': - def sha_utf8(x): - if isinstance(x, str): - x = x.encode('utf-8') - return hashlib.sha1(x).hexdigest() - hash_utf8 = sha_utf8 - elif _algorithm == 'SHA-256': - def sha256_utf8(x): - if isinstance(x, str): - x = x.encode('utf-8') - return hashlib.sha256(x).hexdigest() - hash_utf8 = sha256_utf8 - elif _algorithm == 'SHA-512': - def sha512_utf8(x): - if isinstance(x, str): - x = x.encode('utf-8') - return hashlib.sha512(x).hexdigest() - hash_utf8 = sha512_utf8 - - KD = lambda s, d: hash_utf8("%s:%s" % (s, d)) - - if hash_utf8 is None: - return None - - # XXX not implemented yet - entdig = None - p_parsed = urlparse(url) - #: path is request-uri defined in RFC 2616 which should not be empty - path = p_parsed.path or "/" - if p_parsed.query: - path += '?' + p_parsed.query - - A1 = '%s:%s:%s' % (self.username, realm, self.password) - A2 = '%s:%s' % (method, path) - - HA1 = hash_utf8(A1) - HA2 = hash_utf8(A2) - - if nonce == self._thread_local.last_nonce: - self._thread_local.nonce_count += 1 - else: - self._thread_local.nonce_count = 1 - ncvalue = '%08x' % self._thread_local.nonce_count - s = str(self._thread_local.nonce_count).encode('utf-8') - s += nonce.encode('utf-8') - s += time.ctime().encode('utf-8') - s += os.urandom(8) - - cnonce = (hashlib.sha1(s).hexdigest()[:16]) - if _algorithm == 'MD5-SESS': - HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce)) - - if not qop: - respdig = KD(HA1, "%s:%s" % (nonce, HA2)) - elif qop == 'auth' or 'auth' in qop.split(','): - noncebit = "%s:%s:%s:%s:%s" % ( - nonce, ncvalue, cnonce, 'auth', HA2 - ) - respdig = KD(HA1, noncebit) - else: - # XXX handle auth-int. - return None - - self._thread_local.last_nonce = nonce - - # XXX should the partial digests be encoded too? - base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \ - 'response="%s"' % (self.username, realm, nonce, path, respdig) - if opaque: - base += ', opaque="%s"' % opaque - if algorithm: - base += ', algorithm="%s"' % algorithm - if entdig: - base += ', digest="%s"' % entdig - if qop: - base += ', qop="auth", nc=%s, cnonce="%s"' % (ncvalue, cnonce) - - return 'Digest %s' % (base) - - def handle_redirect(self, r, **kwargs): - """Reset num_401_calls counter on redirects.""" - if r.is_redirect: - self._thread_local.num_401_calls = 1 - - def handle_401(self, r, **kwargs): - """ - Takes the given response and tries digest-auth, if needed. - - :rtype: requests.Response - """ - - # If response is not 4xx, do not auth - # See https://github.com/requests/requests/issues/3772 - if not 400 <= r.status_code < 500: - self._thread_local.num_401_calls = 1 - return r - - if self._thread_local.pos is not None: - # Rewind the file position indicator of the body to where - # it was to resend the request. - r.request.body.seek(self._thread_local.pos) - s_auth = r.headers.get('www-authenticate', '') - - if 'digest' in s_auth.lower() and self._thread_local.num_401_calls < 2: - - self._thread_local.num_401_calls += 1 - pat = re.compile(r'digest ', flags=re.IGNORECASE) - self._thread_local.chal = parse_dict_header(pat.sub('', s_auth, count=1)) - - # Consume content and release the original connection - # to allow our new request to reuse the same one. - r.content - r.close() - prep = r.request.copy() - extract_cookies_to_jar(prep._cookies, r.request, r.raw) - prep.prepare_cookies(prep._cookies) - - prep.headers['Authorization'] = self.build_digest_header( - prep.method, prep.url) - _r = r.connection.send(prep, **kwargs) - _r.history.append(r) - _r.request = prep - - return _r - - self._thread_local.num_401_calls = 1 - return r - - def __call__(self, r): - # Initialize per-thread state, if needed - self.init_per_thread_state() - # If we have a saved nonce, skip the 401 - if self._thread_local.last_nonce: - r.headers['Authorization'] = self.build_digest_header(r.method, r.url) - try: - self._thread_local.pos = r.body.tell() - except AttributeError: - # In the case of HTTPDigestAuth being reused and the body of - # the previous request was a file-like object, pos has the - # file position of the previous body. Ensure it's set to - # None. - self._thread_local.pos = None - r.register_hook('response', self.handle_401) - r.register_hook('response', self.handle_redirect) - self._thread_local.num_401_calls = 1 - - return r - - def __eq__(self, other): - return all([ - self.username == getattr(other, 'username', None), - self.password == getattr(other, 'password', None) - ]) - - def __ne__(self, other): - return not self == other diff --git a/env/lib/python2.7/site-packages/pip/_vendor/requests/auth.pyc b/env/lib/python2.7/site-packages/pip/_vendor/requests/auth.pyc deleted file mode 100644 index 421aef7e..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/requests/auth.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/requests/certs.pyc b/env/lib/python2.7/site-packages/pip/_vendor/requests/certs.pyc deleted file mode 100644 index 793127d6..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/requests/certs.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/requests/compat.py b/env/lib/python2.7/site-packages/pip/_vendor/requests/compat.py deleted file mode 100644 index 6a86893d..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/requests/compat.py +++ /dev/null @@ -1,74 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -requests.compat -~~~~~~~~~~~~~~~ - -This module handles import compatibility issues between Python 2 and -Python 3. -""" - -from pip._vendor import chardet - -import sys - -# ------- -# Pythons -# ------- - -# Syntax sugar. -_ver = sys.version_info - -#: Python 2.x? -is_py2 = (_ver[0] == 2) - -#: Python 3.x? -is_py3 = (_ver[0] == 3) - -# Note: We've patched out simplejson support in pip because it prevents -# upgrading simplejson on Windows. -# try: -# import simplejson as json -# except (ImportError, SyntaxError): -# # simplejson does not support Python 3.2, it throws a SyntaxError -# # because of u'...' Unicode literals. -import json - -# --------- -# Specifics -# --------- - -if is_py2: - from urllib import ( - quote, unquote, quote_plus, unquote_plus, urlencode, getproxies, - proxy_bypass, proxy_bypass_environment, getproxies_environment) - from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag - from urllib2 import parse_http_list - import cookielib - from Cookie import Morsel - from StringIO import StringIO - from collections import Callable, Mapping, MutableMapping, OrderedDict - - - builtin_str = str - bytes = str - str = unicode - basestring = basestring - numeric_types = (int, long, float) - integer_types = (int, long) - -elif is_py3: - from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag - from urllib.request import parse_http_list, getproxies, proxy_bypass, proxy_bypass_environment, getproxies_environment - from http import cookiejar as cookielib - from http.cookies import Morsel - from io import StringIO - from collections import OrderedDict - from collections.abc import Callable, Mapping, MutableMapping - - builtin_str = str - str = str - bytes = bytes - basestring = (str, bytes) - numeric_types = (int, float) - integer_types = (int,) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/requests/compat.pyc b/env/lib/python2.7/site-packages/pip/_vendor/requests/compat.pyc deleted file mode 100644 index 92faf4f4..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/requests/compat.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/requests/cookies.pyc b/env/lib/python2.7/site-packages/pip/_vendor/requests/cookies.pyc deleted file mode 100644 index 3ca60509..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/requests/cookies.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.py b/env/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.py deleted file mode 100644 index a91e1fd1..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.py +++ /dev/null @@ -1,126 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -requests.exceptions -~~~~~~~~~~~~~~~~~~~ - -This module contains the set of Requests' exceptions. -""" -from pip._vendor.urllib3.exceptions import HTTPError as BaseHTTPError - - -class RequestException(IOError): - """There was an ambiguous exception that occurred while handling your - request. - """ - - def __init__(self, *args, **kwargs): - """Initialize RequestException with `request` and `response` objects.""" - response = kwargs.pop('response', None) - self.response = response - self.request = kwargs.pop('request', None) - if (response is not None and not self.request and - hasattr(response, 'request')): - self.request = self.response.request - super(RequestException, self).__init__(*args, **kwargs) - - -class HTTPError(RequestException): - """An HTTP error occurred.""" - - -class ConnectionError(RequestException): - """A Connection error occurred.""" - - -class ProxyError(ConnectionError): - """A proxy error occurred.""" - - -class SSLError(ConnectionError): - """An SSL error occurred.""" - - -class Timeout(RequestException): - """The request timed out. - - Catching this error will catch both - :exc:`~requests.exceptions.ConnectTimeout` and - :exc:`~requests.exceptions.ReadTimeout` errors. - """ - - -class ConnectTimeout(ConnectionError, Timeout): - """The request timed out while trying to connect to the remote server. - - Requests that produced this error are safe to retry. - """ - - -class ReadTimeout(Timeout): - """The server did not send any data in the allotted amount of time.""" - - -class URLRequired(RequestException): - """A valid URL is required to make a request.""" - - -class TooManyRedirects(RequestException): - """Too many redirects.""" - - -class MissingSchema(RequestException, ValueError): - """The URL schema (e.g. http or https) is missing.""" - - -class InvalidSchema(RequestException, ValueError): - """See defaults.py for valid schemas.""" - - -class InvalidURL(RequestException, ValueError): - """The URL provided was somehow invalid.""" - - -class InvalidHeader(RequestException, ValueError): - """The header value provided was somehow invalid.""" - - -class InvalidProxyURL(InvalidURL): - """The proxy URL provided is invalid.""" - - -class ChunkedEncodingError(RequestException): - """The server declared chunked encoding but sent an invalid chunk.""" - - -class ContentDecodingError(RequestException, BaseHTTPError): - """Failed to decode response content""" - - -class StreamConsumedError(RequestException, TypeError): - """The content for this response was already consumed""" - - -class RetryError(RequestException): - """Custom retries logic failed""" - - -class UnrewindableBodyError(RequestException): - """Requests encountered an error when trying to rewind a body""" - -# Warnings - - -class RequestsWarning(Warning): - """Base warning for Requests.""" - pass - - -class FileModeWarning(RequestsWarning, DeprecationWarning): - """A file was opened in text mode, but Requests determined its binary length.""" - pass - - -class RequestsDependencyWarning(RequestsWarning): - """An imported dependency doesn't match the expected version range.""" - pass diff --git a/env/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyc b/env/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyc deleted file mode 100644 index 55fb8b7f..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/requests/help.pyc b/env/lib/python2.7/site-packages/pip/_vendor/requests/help.pyc deleted file mode 100644 index 2da78f4e..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/requests/help.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/requests/hooks.pyc b/env/lib/python2.7/site-packages/pip/_vendor/requests/hooks.pyc deleted file mode 100644 index 75deb7e8..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/requests/hooks.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/requests/models.py b/env/lib/python2.7/site-packages/pip/_vendor/requests/models.py deleted file mode 100644 index 08399574..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/requests/models.py +++ /dev/null @@ -1,953 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -requests.models -~~~~~~~~~~~~~~~ - -This module contains the primary objects that power Requests. -""" - -import datetime -import sys - -# Import encoding now, to avoid implicit import later. -# Implicit import within threads may cause LookupError when standard library is in a ZIP, -# such as in Embedded Python. See https://github.com/requests/requests/issues/3578. -import encodings.idna - -from pip._vendor.urllib3.fields import RequestField -from pip._vendor.urllib3.filepost import encode_multipart_formdata -from pip._vendor.urllib3.util import parse_url -from pip._vendor.urllib3.exceptions import ( - DecodeError, ReadTimeoutError, ProtocolError, LocationParseError) - -from io import UnsupportedOperation -from .hooks import default_hooks -from .structures import CaseInsensitiveDict - -from .auth import HTTPBasicAuth -from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar -from .exceptions import ( - HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError, - ContentDecodingError, ConnectionError, StreamConsumedError) -from ._internal_utils import to_native_string, unicode_is_ascii -from .utils import ( - guess_filename, get_auth_from_url, requote_uri, - stream_decode_response_unicode, to_key_val_list, parse_header_links, - iter_slices, guess_json_utf, super_len, check_header_validity) -from .compat import ( - Callable, Mapping, - cookielib, urlunparse, urlsplit, urlencode, str, bytes, - is_py2, chardet, builtin_str, basestring) -from .compat import json as complexjson -from .status_codes import codes - -#: The set of HTTP status codes that indicate an automatically -#: processable redirect. -REDIRECT_STATI = ( - codes.moved, # 301 - codes.found, # 302 - codes.other, # 303 - codes.temporary_redirect, # 307 - codes.permanent_redirect, # 308 -) - -DEFAULT_REDIRECT_LIMIT = 30 -CONTENT_CHUNK_SIZE = 10 * 1024 -ITER_CHUNK_SIZE = 512 - - -class RequestEncodingMixin(object): - @property - def path_url(self): - """Build the path URL to use.""" - - url = [] - - p = urlsplit(self.url) - - path = p.path - if not path: - path = '/' - - url.append(path) - - query = p.query - if query: - url.append('?') - url.append(query) - - return ''.join(url) - - @staticmethod - def _encode_params(data): - """Encode parameters in a piece of data. - - Will successfully encode parameters when passed as a dict or a list of - 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary - if parameters are supplied as a dict. - """ - - if isinstance(data, (str, bytes)): - return data - elif hasattr(data, 'read'): - return data - elif hasattr(data, '__iter__'): - result = [] - for k, vs in to_key_val_list(data): - if isinstance(vs, basestring) or not hasattr(vs, '__iter__'): - vs = [vs] - for v in vs: - if v is not None: - result.append( - (k.encode('utf-8') if isinstance(k, str) else k, - v.encode('utf-8') if isinstance(v, str) else v)) - return urlencode(result, doseq=True) - else: - return data - - @staticmethod - def _encode_files(files, data): - """Build the body for a multipart/form-data request. - - Will successfully encode files when passed as a dict or a list of - tuples. Order is retained if data is a list of tuples but arbitrary - if parameters are supplied as a dict. - The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype) - or 4-tuples (filename, fileobj, contentype, custom_headers). - """ - if (not files): - raise ValueError("Files must be provided.") - elif isinstance(data, basestring): - raise ValueError("Data must not be a string.") - - new_fields = [] - fields = to_key_val_list(data or {}) - files = to_key_val_list(files or {}) - - for field, val in fields: - if isinstance(val, basestring) or not hasattr(val, '__iter__'): - val = [val] - for v in val: - if v is not None: - # Don't call str() on bytestrings: in Py3 it all goes wrong. - if not isinstance(v, bytes): - v = str(v) - - new_fields.append( - (field.decode('utf-8') if isinstance(field, bytes) else field, - v.encode('utf-8') if isinstance(v, str) else v)) - - for (k, v) in files: - # support for explicit filename - ft = None - fh = None - if isinstance(v, (tuple, list)): - if len(v) == 2: - fn, fp = v - elif len(v) == 3: - fn, fp, ft = v - else: - fn, fp, ft, fh = v - else: - fn = guess_filename(v) or k - fp = v - - if isinstance(fp, (str, bytes, bytearray)): - fdata = fp - elif hasattr(fp, 'read'): - fdata = fp.read() - elif fp is None: - continue - else: - fdata = fp - - rf = RequestField(name=k, data=fdata, filename=fn, headers=fh) - rf.make_multipart(content_type=ft) - new_fields.append(rf) - - body, content_type = encode_multipart_formdata(new_fields) - - return body, content_type - - -class RequestHooksMixin(object): - def register_hook(self, event, hook): - """Properly register a hook.""" - - if event not in self.hooks: - raise ValueError('Unsupported event specified, with event name "%s"' % (event)) - - if isinstance(hook, Callable): - self.hooks[event].append(hook) - elif hasattr(hook, '__iter__'): - self.hooks[event].extend(h for h in hook if isinstance(h, Callable)) - - def deregister_hook(self, event, hook): - """Deregister a previously registered hook. - Returns True if the hook existed, False if not. - """ - - try: - self.hooks[event].remove(hook) - return True - except ValueError: - return False - - -class Request(RequestHooksMixin): - """A user-created :class:`Request <Request>` object. - - Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server. - - :param method: HTTP method to use. - :param url: URL to send. - :param headers: dictionary of headers to send. - :param files: dictionary of {filename: fileobject} files to multipart upload. - :param data: the body to attach to the request. If a dictionary or - list of tuples ``[(key, value)]`` is provided, form-encoding will - take place. - :param json: json for the body to attach to the request (if files or data is not specified). - :param params: URL parameters to append to the URL. If a dictionary or - list of tuples ``[(key, value)]`` is provided, form-encoding will - take place. - :param auth: Auth handler or (user, pass) tuple. - :param cookies: dictionary or CookieJar of cookies to attach to this request. - :param hooks: dictionary of callback hooks, for internal usage. - - Usage:: - - >>> import requests - >>> req = requests.Request('GET', 'https://httpbin.org/get') - >>> req.prepare() - <PreparedRequest [GET]> - """ - - def __init__(self, - method=None, url=None, headers=None, files=None, data=None, - params=None, auth=None, cookies=None, hooks=None, json=None): - - # Default empty dicts for dict params. - data = [] if data is None else data - files = [] if files is None else files - headers = {} if headers is None else headers - params = {} if params is None else params - hooks = {} if hooks is None else hooks - - self.hooks = default_hooks() - for (k, v) in list(hooks.items()): - self.register_hook(event=k, hook=v) - - self.method = method - self.url = url - self.headers = headers - self.files = files - self.data = data - self.json = json - self.params = params - self.auth = auth - self.cookies = cookies - - def __repr__(self): - return '<Request [%s]>' % (self.method) - - def prepare(self): - """Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it.""" - p = PreparedRequest() - p.prepare( - method=self.method, - url=self.url, - headers=self.headers, - files=self.files, - data=self.data, - json=self.json, - params=self.params, - auth=self.auth, - cookies=self.cookies, - hooks=self.hooks, - ) - return p - - -class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): - """The fully mutable :class:`PreparedRequest <PreparedRequest>` object, - containing the exact bytes that will be sent to the server. - - Generated from either a :class:`Request <Request>` object or manually. - - Usage:: - - >>> import requests - >>> req = requests.Request('GET', 'https://httpbin.org/get') - >>> r = req.prepare() - <PreparedRequest [GET]> - - >>> s = requests.Session() - >>> s.send(r) - <Response [200]> - """ - - def __init__(self): - #: HTTP verb to send to the server. - self.method = None - #: HTTP URL to send the request to. - self.url = None - #: dictionary of HTTP headers. - self.headers = None - # The `CookieJar` used to create the Cookie header will be stored here - # after prepare_cookies is called - self._cookies = None - #: request body to send to the server. - self.body = None - #: dictionary of callback hooks, for internal usage. - self.hooks = default_hooks() - #: integer denoting starting position of a readable file-like body. - self._body_position = None - - def prepare(self, - method=None, url=None, headers=None, files=None, data=None, - params=None, auth=None, cookies=None, hooks=None, json=None): - """Prepares the entire request with the given parameters.""" - - self.prepare_method(method) - self.prepare_url(url, params) - self.prepare_headers(headers) - self.prepare_cookies(cookies) - self.prepare_body(data, files, json) - self.prepare_auth(auth, url) - - # Note that prepare_auth must be last to enable authentication schemes - # such as OAuth to work on a fully prepared request. - - # This MUST go after prepare_auth. Authenticators could add a hook - self.prepare_hooks(hooks) - - def __repr__(self): - return '<PreparedRequest [%s]>' % (self.method) - - def copy(self): - p = PreparedRequest() - p.method = self.method - p.url = self.url - p.headers = self.headers.copy() if self.headers is not None else None - p._cookies = _copy_cookie_jar(self._cookies) - p.body = self.body - p.hooks = self.hooks - p._body_position = self._body_position - return p - - def prepare_method(self, method): - """Prepares the given HTTP method.""" - self.method = method - if self.method is not None: - self.method = to_native_string(self.method.upper()) - - @staticmethod - def _get_idna_encoded_host(host): - from pip._vendor import idna - - try: - host = idna.encode(host, uts46=True).decode('utf-8') - except idna.IDNAError: - raise UnicodeError - return host - - def prepare_url(self, url, params): - """Prepares the given HTTP URL.""" - #: Accept objects that have string representations. - #: We're unable to blindly call unicode/str functions - #: as this will include the bytestring indicator (b'') - #: on python 3.x. - #: https://github.com/requests/requests/pull/2238 - if isinstance(url, bytes): - url = url.decode('utf8') - else: - url = unicode(url) if is_py2 else str(url) - - # Remove leading whitespaces from url - url = url.lstrip() - - # Don't do any URL preparation for non-HTTP schemes like `mailto`, - # `data` etc to work around exceptions from `url_parse`, which - # handles RFC 3986 only. - if ':' in url and not url.lower().startswith('http'): - self.url = url - return - - # Support for unicode domain names and paths. - try: - scheme, auth, host, port, path, query, fragment = parse_url(url) - except LocationParseError as e: - raise InvalidURL(*e.args) - - if not scheme: - error = ("Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?") - error = error.format(to_native_string(url, 'utf8')) - - raise MissingSchema(error) - - if not host: - raise InvalidURL("Invalid URL %r: No host supplied" % url) - - # In general, we want to try IDNA encoding the hostname if the string contains - # non-ASCII characters. This allows users to automatically get the correct IDNA - # behaviour. For strings containing only ASCII characters, we need to also verify - # it doesn't start with a wildcard (*), before allowing the unencoded hostname. - if not unicode_is_ascii(host): - try: - host = self._get_idna_encoded_host(host) - except UnicodeError: - raise InvalidURL('URL has an invalid label.') - elif host.startswith(u'*'): - raise InvalidURL('URL has an invalid label.') - - # Carefully reconstruct the network location - netloc = auth or '' - if netloc: - netloc += '@' - netloc += host - if port: - netloc += ':' + str(port) - - # Bare domains aren't valid URLs. - if not path: - path = '/' - - if is_py2: - if isinstance(scheme, str): - scheme = scheme.encode('utf-8') - if isinstance(netloc, str): - netloc = netloc.encode('utf-8') - if isinstance(path, str): - path = path.encode('utf-8') - if isinstance(query, str): - query = query.encode('utf-8') - if isinstance(fragment, str): - fragment = fragment.encode('utf-8') - - if isinstance(params, (str, bytes)): - params = to_native_string(params) - - enc_params = self._encode_params(params) - if enc_params: - if query: - query = '%s&%s' % (query, enc_params) - else: - query = enc_params - - url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment])) - self.url = url - - def prepare_headers(self, headers): - """Prepares the given HTTP headers.""" - - self.headers = CaseInsensitiveDict() - if headers: - for header in headers.items(): - # Raise exception on invalid header value. - check_header_validity(header) - name, value = header - self.headers[to_native_string(name)] = value - - def prepare_body(self, data, files, json=None): - """Prepares the given HTTP body data.""" - - # Check if file, fo, generator, iterator. - # If not, run through normal process. - - # Nottin' on you. - body = None - content_type = None - - if not data and json is not None: - # urllib3 requires a bytes-like body. Python 2's json.dumps - # provides this natively, but Python 3 gives a Unicode string. - content_type = 'application/json' - body = complexjson.dumps(json) - if not isinstance(body, bytes): - body = body.encode('utf-8') - - is_stream = all([ - hasattr(data, '__iter__'), - not isinstance(data, (basestring, list, tuple, Mapping)) - ]) - - try: - length = super_len(data) - except (TypeError, AttributeError, UnsupportedOperation): - length = None - - if is_stream: - body = data - - if getattr(body, 'tell', None) is not None: - # Record the current file position before reading. - # This will allow us to rewind a file in the event - # of a redirect. - try: - self._body_position = body.tell() - except (IOError, OSError): - # This differentiates from None, allowing us to catch - # a failed `tell()` later when trying to rewind the body - self._body_position = object() - - if files: - raise NotImplementedError('Streamed bodies and files are mutually exclusive.') - - if length: - self.headers['Content-Length'] = builtin_str(length) - else: - self.headers['Transfer-Encoding'] = 'chunked' - else: - # Multi-part file uploads. - if files: - (body, content_type) = self._encode_files(files, data) - else: - if data: - body = self._encode_params(data) - if isinstance(data, basestring) or hasattr(data, 'read'): - content_type = None - else: - content_type = 'application/x-www-form-urlencoded' - - self.prepare_content_length(body) - - # Add content-type if it wasn't explicitly provided. - if content_type and ('content-type' not in self.headers): - self.headers['Content-Type'] = content_type - - self.body = body - - def prepare_content_length(self, body): - """Prepare Content-Length header based on request method and body""" - if body is not None: - length = super_len(body) - if length: - # If length exists, set it. Otherwise, we fallback - # to Transfer-Encoding: chunked. - self.headers['Content-Length'] = builtin_str(length) - elif self.method not in ('GET', 'HEAD') and self.headers.get('Content-Length') is None: - # Set Content-Length to 0 for methods that can have a body - # but don't provide one. (i.e. not GET or HEAD) - self.headers['Content-Length'] = '0' - - def prepare_auth(self, auth, url=''): - """Prepares the given HTTP auth data.""" - - # If no Auth is explicitly provided, extract it from the URL first. - if auth is None: - url_auth = get_auth_from_url(self.url) - auth = url_auth if any(url_auth) else None - - if auth: - if isinstance(auth, tuple) and len(auth) == 2: - # special-case basic HTTP auth - auth = HTTPBasicAuth(*auth) - - # Allow auth to make its changes. - r = auth(self) - - # Update self to reflect the auth changes. - self.__dict__.update(r.__dict__) - - # Recompute Content-Length - self.prepare_content_length(self.body) - - def prepare_cookies(self, cookies): - """Prepares the given HTTP cookie data. - - This function eventually generates a ``Cookie`` header from the - given cookies using cookielib. Due to cookielib's design, the header - will not be regenerated if it already exists, meaning this function - can only be called once for the life of the - :class:`PreparedRequest <PreparedRequest>` object. Any subsequent calls - to ``prepare_cookies`` will have no actual effect, unless the "Cookie" - header is removed beforehand. - """ - if isinstance(cookies, cookielib.CookieJar): - self._cookies = cookies - else: - self._cookies = cookiejar_from_dict(cookies) - - cookie_header = get_cookie_header(self._cookies, self) - if cookie_header is not None: - self.headers['Cookie'] = cookie_header - - def prepare_hooks(self, hooks): - """Prepares the given hooks.""" - # hooks can be passed as None to the prepare method and to this - # method. To prevent iterating over None, simply use an empty list - # if hooks is False-y - hooks = hooks or [] - for event in hooks: - self.register_hook(event, hooks[event]) - - -class Response(object): - """The :class:`Response <Response>` object, which contains a - server's response to an HTTP request. - """ - - __attrs__ = [ - '_content', 'status_code', 'headers', 'url', 'history', - 'encoding', 'reason', 'cookies', 'elapsed', 'request' - ] - - def __init__(self): - self._content = False - self._content_consumed = False - self._next = None - - #: Integer Code of responded HTTP Status, e.g. 404 or 200. - self.status_code = None - - #: Case-insensitive Dictionary of Response Headers. - #: For example, ``headers['content-encoding']`` will return the - #: value of a ``'Content-Encoding'`` response header. - self.headers = CaseInsensitiveDict() - - #: File-like object representation of response (for advanced usage). - #: Use of ``raw`` requires that ``stream=True`` be set on the request. - # This requirement does not apply for use internally to Requests. - self.raw = None - - #: Final URL location of Response. - self.url = None - - #: Encoding to decode with when accessing r.text. - self.encoding = None - - #: A list of :class:`Response <Response>` objects from - #: the history of the Request. Any redirect responses will end - #: up here. The list is sorted from the oldest to the most recent request. - self.history = [] - - #: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK". - self.reason = None - - #: A CookieJar of Cookies the server sent back. - self.cookies = cookiejar_from_dict({}) - - #: The amount of time elapsed between sending the request - #: and the arrival of the response (as a timedelta). - #: This property specifically measures the time taken between sending - #: the first byte of the request and finishing parsing the headers. It - #: is therefore unaffected by consuming the response content or the - #: value of the ``stream`` keyword argument. - self.elapsed = datetime.timedelta(0) - - #: The :class:`PreparedRequest <PreparedRequest>` object to which this - #: is a response. - self.request = None - - def __enter__(self): - return self - - def __exit__(self, *args): - self.close() - - def __getstate__(self): - # Consume everything; accessing the content attribute makes - # sure the content has been fully read. - if not self._content_consumed: - self.content - - return {attr: getattr(self, attr, None) for attr in self.__attrs__} - - def __setstate__(self, state): - for name, value in state.items(): - setattr(self, name, value) - - # pickled objects do not have .raw - setattr(self, '_content_consumed', True) - setattr(self, 'raw', None) - - def __repr__(self): - return '<Response [%s]>' % (self.status_code) - - def __bool__(self): - """Returns True if :attr:`status_code` is less than 400. - - This attribute checks if the status code of the response is between - 400 and 600 to see if there was a client error or a server error. If - the status code, is between 200 and 400, this will return True. This - is **not** a check to see if the response code is ``200 OK``. - """ - return self.ok - - def __nonzero__(self): - """Returns True if :attr:`status_code` is less than 400. - - This attribute checks if the status code of the response is between - 400 and 600 to see if there was a client error or a server error. If - the status code, is between 200 and 400, this will return True. This - is **not** a check to see if the response code is ``200 OK``. - """ - return self.ok - - def __iter__(self): - """Allows you to use a response as an iterator.""" - return self.iter_content(128) - - @property - def ok(self): - """Returns True if :attr:`status_code` is less than 400, False if not. - - This attribute checks if the status code of the response is between - 400 and 600 to see if there was a client error or a server error. If - the status code is between 200 and 400, this will return True. This - is **not** a check to see if the response code is ``200 OK``. - """ - try: - self.raise_for_status() - except HTTPError: - return False - return True - - @property - def is_redirect(self): - """True if this Response is a well-formed HTTP redirect that could have - been processed automatically (by :meth:`Session.resolve_redirects`). - """ - return ('location' in self.headers and self.status_code in REDIRECT_STATI) - - @property - def is_permanent_redirect(self): - """True if this Response one of the permanent versions of redirect.""" - return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect)) - - @property - def next(self): - """Returns a PreparedRequest for the next request in a redirect chain, if there is one.""" - return self._next - - @property - def apparent_encoding(self): - """The apparent encoding, provided by the chardet library.""" - return chardet.detect(self.content)['encoding'] - - def iter_content(self, chunk_size=1, decode_unicode=False): - """Iterates over the response data. When stream=True is set on the - request, this avoids reading the content at once into memory for - large responses. The chunk size is the number of bytes it should - read into memory. This is not necessarily the length of each item - returned as decoding can take place. - - chunk_size must be of type int or None. A value of None will - function differently depending on the value of `stream`. - stream=True will read data as it arrives in whatever size the - chunks are received. If stream=False, data is returned as - a single chunk. - - If decode_unicode is True, content will be decoded using the best - available encoding based on the response. - """ - - def generate(): - # Special case for urllib3. - if hasattr(self.raw, 'stream'): - try: - for chunk in self.raw.stream(chunk_size, decode_content=True): - yield chunk - except ProtocolError as e: - raise ChunkedEncodingError(e) - except DecodeError as e: - raise ContentDecodingError(e) - except ReadTimeoutError as e: - raise ConnectionError(e) - else: - # Standard file-like object. - while True: - chunk = self.raw.read(chunk_size) - if not chunk: - break - yield chunk - - self._content_consumed = True - - if self._content_consumed and isinstance(self._content, bool): - raise StreamConsumedError() - elif chunk_size is not None and not isinstance(chunk_size, int): - raise TypeError("chunk_size must be an int, it is instead a %s." % type(chunk_size)) - # simulate reading small chunks of the content - reused_chunks = iter_slices(self._content, chunk_size) - - stream_chunks = generate() - - chunks = reused_chunks if self._content_consumed else stream_chunks - - if decode_unicode: - chunks = stream_decode_response_unicode(chunks, self) - - return chunks - - def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None): - """Iterates over the response data, one line at a time. When - stream=True is set on the request, this avoids reading the - content at once into memory for large responses. - - .. note:: This method is not reentrant safe. - """ - - pending = None - - for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode): - - if pending is not None: - chunk = pending + chunk - - if delimiter: - lines = chunk.split(delimiter) - else: - lines = chunk.splitlines() - - if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]: - pending = lines.pop() - else: - pending = None - - for line in lines: - yield line - - if pending is not None: - yield pending - - @property - def content(self): - """Content of the response, in bytes.""" - - if self._content is False: - # Read the contents. - if self._content_consumed: - raise RuntimeError( - 'The content for this response was already consumed') - - if self.status_code == 0 or self.raw is None: - self._content = None - else: - self._content = b''.join(self.iter_content(CONTENT_CHUNK_SIZE)) or b'' - - self._content_consumed = True - # don't need to release the connection; that's been handled by urllib3 - # since we exhausted the data. - return self._content - - @property - def text(self): - """Content of the response, in unicode. - - If Response.encoding is None, encoding will be guessed using - ``chardet``. - - The encoding of the response content is determined based solely on HTTP - headers, following RFC 2616 to the letter. If you can take advantage of - non-HTTP knowledge to make a better guess at the encoding, you should - set ``r.encoding`` appropriately before accessing this property. - """ - - # Try charset from content-type - content = None - encoding = self.encoding - - if not self.content: - return str('') - - # Fallback to auto-detected encoding. - if self.encoding is None: - encoding = self.apparent_encoding - - # Decode unicode from given encoding. - try: - content = str(self.content, encoding, errors='replace') - except (LookupError, TypeError): - # A LookupError is raised if the encoding was not found which could - # indicate a misspelling or similar mistake. - # - # A TypeError can be raised if encoding is None - # - # So we try blindly encoding. - content = str(self.content, errors='replace') - - return content - - def json(self, **kwargs): - r"""Returns the json-encoded content of a response, if any. - - :param \*\*kwargs: Optional arguments that ``json.loads`` takes. - :raises ValueError: If the response body does not contain valid json. - """ - - if not self.encoding and self.content and len(self.content) > 3: - # No encoding set. JSON RFC 4627 section 3 states we should expect - # UTF-8, -16 or -32. Detect which one to use; If the detection or - # decoding fails, fall back to `self.text` (using chardet to make - # a best guess). - encoding = guess_json_utf(self.content) - if encoding is not None: - try: - return complexjson.loads( - self.content.decode(encoding), **kwargs - ) - except UnicodeDecodeError: - # Wrong UTF codec detected; usually because it's not UTF-8 - # but some other 8-bit codec. This is an RFC violation, - # and the server didn't bother to tell us what codec *was* - # used. - pass - return complexjson.loads(self.text, **kwargs) - - @property - def links(self): - """Returns the parsed header links of the response, if any.""" - - header = self.headers.get('link') - - # l = MultiDict() - l = {} - - if header: - links = parse_header_links(header) - - for link in links: - key = link.get('rel') or link.get('url') - l[key] = link - - return l - - def raise_for_status(self): - """Raises stored :class:`HTTPError`, if one occurred.""" - - http_error_msg = '' - if isinstance(self.reason, bytes): - # We attempt to decode utf-8 first because some servers - # choose to localize their reason strings. If the string - # isn't utf-8, we fall back to iso-8859-1 for all other - # encodings. (See PR #3538) - try: - reason = self.reason.decode('utf-8') - except UnicodeDecodeError: - reason = self.reason.decode('iso-8859-1') - else: - reason = self.reason - - if 400 <= self.status_code < 500: - http_error_msg = u'%s Client Error: %s for url: %s' % (self.status_code, reason, self.url) - - elif 500 <= self.status_code < 600: - http_error_msg = u'%s Server Error: %s for url: %s' % (self.status_code, reason, self.url) - - if http_error_msg: - raise HTTPError(http_error_msg, response=self) - - def close(self): - """Releases the connection back to the pool. Once this method has been - called the underlying ``raw`` object must not be accessed again. - - *Note: Should not normally need to be called explicitly.* - """ - if not self._content_consumed: - self.raw.close() - - release_conn = getattr(self.raw, 'release_conn', None) - if release_conn is not None: - release_conn() diff --git a/env/lib/python2.7/site-packages/pip/_vendor/requests/models.pyc b/env/lib/python2.7/site-packages/pip/_vendor/requests/models.pyc deleted file mode 100644 index aa3936bb..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/requests/models.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/requests/packages.pyc b/env/lib/python2.7/site-packages/pip/_vendor/requests/packages.pyc deleted file mode 100644 index faec016b..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/requests/packages.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pyc b/env/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pyc deleted file mode 100644 index b980fff3..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/requests/status_codes.py b/env/lib/python2.7/site-packages/pip/_vendor/requests/status_codes.py deleted file mode 100644 index 813e8c4e..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/requests/status_codes.py +++ /dev/null @@ -1,120 +0,0 @@ -# -*- coding: utf-8 -*- - -r""" -The ``codes`` object defines a mapping from common names for HTTP statuses -to their numerical codes, accessible either as attributes or as dictionary -items. - ->>> requests.codes['temporary_redirect'] -307 ->>> requests.codes.teapot -418 ->>> requests.codes['\o/'] -200 - -Some codes have multiple names, and both upper- and lower-case versions of -the names are allowed. For example, ``codes.ok``, ``codes.OK``, and -``codes.okay`` all correspond to the HTTP status code 200. -""" - -from .structures import LookupDict - -_codes = { - - # Informational. - 100: ('continue',), - 101: ('switching_protocols',), - 102: ('processing',), - 103: ('checkpoint',), - 122: ('uri_too_long', 'request_uri_too_long'), - 200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/', '✓'), - 201: ('created',), - 202: ('accepted',), - 203: ('non_authoritative_info', 'non_authoritative_information'), - 204: ('no_content',), - 205: ('reset_content', 'reset'), - 206: ('partial_content', 'partial'), - 207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'), - 208: ('already_reported',), - 226: ('im_used',), - - # Redirection. - 300: ('multiple_choices',), - 301: ('moved_permanently', 'moved', '\\o-'), - 302: ('found',), - 303: ('see_other', 'other'), - 304: ('not_modified',), - 305: ('use_proxy',), - 306: ('switch_proxy',), - 307: ('temporary_redirect', 'temporary_moved', 'temporary'), - 308: ('permanent_redirect', - 'resume_incomplete', 'resume',), # These 2 to be removed in 3.0 - - # Client Error. - 400: ('bad_request', 'bad'), - 401: ('unauthorized',), - 402: ('payment_required', 'payment'), - 403: ('forbidden',), - 404: ('not_found', '-o-'), - 405: ('method_not_allowed', 'not_allowed'), - 406: ('not_acceptable',), - 407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'), - 408: ('request_timeout', 'timeout'), - 409: ('conflict',), - 410: ('gone',), - 411: ('length_required',), - 412: ('precondition_failed', 'precondition'), - 413: ('request_entity_too_large',), - 414: ('request_uri_too_large',), - 415: ('unsupported_media_type', 'unsupported_media', 'media_type'), - 416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'), - 417: ('expectation_failed',), - 418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'), - 421: ('misdirected_request',), - 422: ('unprocessable_entity', 'unprocessable'), - 423: ('locked',), - 424: ('failed_dependency', 'dependency'), - 425: ('unordered_collection', 'unordered'), - 426: ('upgrade_required', 'upgrade'), - 428: ('precondition_required', 'precondition'), - 429: ('too_many_requests', 'too_many'), - 431: ('header_fields_too_large', 'fields_too_large'), - 444: ('no_response', 'none'), - 449: ('retry_with', 'retry'), - 450: ('blocked_by_windows_parental_controls', 'parental_controls'), - 451: ('unavailable_for_legal_reasons', 'legal_reasons'), - 499: ('client_closed_request',), - - # Server Error. - 500: ('internal_server_error', 'server_error', '/o\\', '✗'), - 501: ('not_implemented',), - 502: ('bad_gateway',), - 503: ('service_unavailable', 'unavailable'), - 504: ('gateway_timeout',), - 505: ('http_version_not_supported', 'http_version'), - 506: ('variant_also_negotiates',), - 507: ('insufficient_storage',), - 509: ('bandwidth_limit_exceeded', 'bandwidth'), - 510: ('not_extended',), - 511: ('network_authentication_required', 'network_auth', 'network_authentication'), -} - -codes = LookupDict(name='status_codes') - -def _init(): - for code, titles in _codes.items(): - for title in titles: - setattr(codes, title, code) - if not title.startswith(('\\', '/')): - setattr(codes, title.upper(), code) - - def doc(code): - names = ', '.join('``%s``' % n for n in _codes[code]) - return '* %d: %s' % (code, names) - - global __doc__ - __doc__ = (__doc__ + '\n' + - '\n'.join(doc(code) for code in sorted(_codes)) - if __doc__ is not None else None) - -_init() diff --git a/env/lib/python2.7/site-packages/pip/_vendor/requests/status_codes.pyc b/env/lib/python2.7/site-packages/pip/_vendor/requests/status_codes.pyc deleted file mode 100644 index 58e70dc5..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/requests/status_codes.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/requests/structures.pyc b/env/lib/python2.7/site-packages/pip/_vendor/requests/structures.pyc deleted file mode 100644 index bfee134b..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/requests/structures.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/requests/utils.py b/env/lib/python2.7/site-packages/pip/_vendor/requests/utils.py deleted file mode 100644 index 8170a8d2..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/requests/utils.py +++ /dev/null @@ -1,977 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -requests.utils -~~~~~~~~~~~~~~ - -This module provides utility functions that are used within Requests -that are also useful for external consumption. -""" - -import codecs -import contextlib -import io -import os -import re -import socket -import struct -import sys -import tempfile -import warnings -import zipfile - -from .__version__ import __version__ -from . import certs -# to_native_string is unused here, but imported here for backwards compatibility -from ._internal_utils import to_native_string -from .compat import parse_http_list as _parse_list_header -from .compat import ( - quote, urlparse, bytes, str, OrderedDict, unquote, getproxies, - proxy_bypass, urlunparse, basestring, integer_types, is_py3, - proxy_bypass_environment, getproxies_environment, Mapping) -from .cookies import cookiejar_from_dict -from .structures import CaseInsensitiveDict -from .exceptions import ( - InvalidURL, InvalidHeader, FileModeWarning, UnrewindableBodyError) - -NETRC_FILES = ('.netrc', '_netrc') - -DEFAULT_CA_BUNDLE_PATH = certs.where() - -DEFAULT_PORTS = {'http': 80, 'https': 443} - - -if sys.platform == 'win32': - # provide a proxy_bypass version on Windows without DNS lookups - - def proxy_bypass_registry(host): - try: - if is_py3: - import winreg - else: - import _winreg as winreg - except ImportError: - return False - - try: - internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER, - r'Software\Microsoft\Windows\CurrentVersion\Internet Settings') - # ProxyEnable could be REG_SZ or REG_DWORD, normalizing it - proxyEnable = int(winreg.QueryValueEx(internetSettings, - 'ProxyEnable')[0]) - # ProxyOverride is almost always a string - proxyOverride = winreg.QueryValueEx(internetSettings, - 'ProxyOverride')[0] - except OSError: - return False - if not proxyEnable or not proxyOverride: - return False - - # make a check value list from the registry entry: replace the - # '<local>' string by the localhost entry and the corresponding - # canonical entry. - proxyOverride = proxyOverride.split(';') - # now check if we match one of the registry values. - for test in proxyOverride: - if test == '<local>': - if '.' not in host: - return True - test = test.replace(".", r"\.") # mask dots - test = test.replace("*", r".*") # change glob sequence - test = test.replace("?", r".") # change glob char - if re.match(test, host, re.I): - return True - return False - - def proxy_bypass(host): # noqa - """Return True, if the host should be bypassed. - - Checks proxy settings gathered from the environment, if specified, - or the registry. - """ - if getproxies_environment(): - return proxy_bypass_environment(host) - else: - return proxy_bypass_registry(host) - - -def dict_to_sequence(d): - """Returns an internal sequence dictionary update.""" - - if hasattr(d, 'items'): - d = d.items() - - return d - - -def super_len(o): - total_length = None - current_position = 0 - - if hasattr(o, '__len__'): - total_length = len(o) - - elif hasattr(o, 'len'): - total_length = o.len - - elif hasattr(o, 'fileno'): - try: - fileno = o.fileno() - except io.UnsupportedOperation: - pass - else: - total_length = os.fstat(fileno).st_size - - # Having used fstat to determine the file length, we need to - # confirm that this file was opened up in binary mode. - if 'b' not in o.mode: - warnings.warn(( - "Requests has determined the content-length for this " - "request using the binary size of the file: however, the " - "file has been opened in text mode (i.e. without the 'b' " - "flag in the mode). This may lead to an incorrect " - "content-length. In Requests 3.0, support will be removed " - "for files in text mode."), - FileModeWarning - ) - - if hasattr(o, 'tell'): - try: - current_position = o.tell() - except (OSError, IOError): - # This can happen in some weird situations, such as when the file - # is actually a special file descriptor like stdin. In this - # instance, we don't know what the length is, so set it to zero and - # let requests chunk it instead. - if total_length is not None: - current_position = total_length - else: - if hasattr(o, 'seek') and total_length is None: - # StringIO and BytesIO have seek but no useable fileno - try: - # seek to end of file - o.seek(0, 2) - total_length = o.tell() - - # seek back to current position to support - # partially read file-like objects - o.seek(current_position or 0) - except (OSError, IOError): - total_length = 0 - - if total_length is None: - total_length = 0 - - return max(0, total_length - current_position) - - -def get_netrc_auth(url, raise_errors=False): - """Returns the Requests tuple auth for a given url from netrc.""" - - try: - from netrc import netrc, NetrcParseError - - netrc_path = None - - for f in NETRC_FILES: - try: - loc = os.path.expanduser('~/{}'.format(f)) - except KeyError: - # os.path.expanduser can fail when $HOME is undefined and - # getpwuid fails. See https://bugs.python.org/issue20164 & - # https://github.com/requests/requests/issues/1846 - return - - if os.path.exists(loc): - netrc_path = loc - break - - # Abort early if there isn't one. - if netrc_path is None: - return - - ri = urlparse(url) - - # Strip port numbers from netloc. This weird `if...encode`` dance is - # used for Python 3.2, which doesn't support unicode literals. - splitstr = b':' - if isinstance(url, str): - splitstr = splitstr.decode('ascii') - host = ri.netloc.split(splitstr)[0] - - try: - _netrc = netrc(netrc_path).authenticators(host) - if _netrc: - # Return with login / password - login_i = (0 if _netrc[0] else 1) - return (_netrc[login_i], _netrc[2]) - except (NetrcParseError, IOError): - # If there was a parsing error or a permissions issue reading the file, - # we'll just skip netrc auth unless explicitly asked to raise errors. - if raise_errors: - raise - - # AppEngine hackiness. - except (ImportError, AttributeError): - pass - - -def guess_filename(obj): - """Tries to guess the filename of the given object.""" - name = getattr(obj, 'name', None) - if (name and isinstance(name, basestring) and name[0] != '<' and - name[-1] != '>'): - return os.path.basename(name) - - -def extract_zipped_paths(path): - """Replace nonexistent paths that look like they refer to a member of a zip - archive with the location of an extracted copy of the target, or else - just return the provided path unchanged. - """ - if os.path.exists(path): - # this is already a valid path, no need to do anything further - return path - - # find the first valid part of the provided path and treat that as a zip archive - # assume the rest of the path is the name of a member in the archive - archive, member = os.path.split(path) - while archive and not os.path.exists(archive): - archive, prefix = os.path.split(archive) - member = '/'.join([prefix, member]) - - if not zipfile.is_zipfile(archive): - return path - - zip_file = zipfile.ZipFile(archive) - if member not in zip_file.namelist(): - return path - - # we have a valid zip archive and a valid member of that archive - tmp = tempfile.gettempdir() - extracted_path = os.path.join(tmp, *member.split('/')) - if not os.path.exists(extracted_path): - extracted_path = zip_file.extract(member, path=tmp) - - return extracted_path - - -def from_key_val_list(value): - """Take an object and test to see if it can be represented as a - dictionary. Unless it can not be represented as such, return an - OrderedDict, e.g., - - :: - - >>> from_key_val_list([('key', 'val')]) - OrderedDict([('key', 'val')]) - >>> from_key_val_list('string') - ValueError: cannot encode objects that are not 2-tuples - >>> from_key_val_list({'key': 'val'}) - OrderedDict([('key', 'val')]) - - :rtype: OrderedDict - """ - if value is None: - return None - - if isinstance(value, (str, bytes, bool, int)): - raise ValueError('cannot encode objects that are not 2-tuples') - - return OrderedDict(value) - - -def to_key_val_list(value): - """Take an object and test to see if it can be represented as a - dictionary. If it can be, return a list of tuples, e.g., - - :: - - >>> to_key_val_list([('key', 'val')]) - [('key', 'val')] - >>> to_key_val_list({'key': 'val'}) - [('key', 'val')] - >>> to_key_val_list('string') - ValueError: cannot encode objects that are not 2-tuples. - - :rtype: list - """ - if value is None: - return None - - if isinstance(value, (str, bytes, bool, int)): - raise ValueError('cannot encode objects that are not 2-tuples') - - if isinstance(value, Mapping): - value = value.items() - - return list(value) - - -# From mitsuhiko/werkzeug (used with permission). -def parse_list_header(value): - """Parse lists as described by RFC 2068 Section 2. - - In particular, parse comma-separated lists where the elements of - the list may include quoted-strings. A quoted-string could - contain a comma. A non-quoted string could have quotes in the - middle. Quotes are removed automatically after parsing. - - It basically works like :func:`parse_set_header` just that items - may appear multiple times and case sensitivity is preserved. - - The return value is a standard :class:`list`: - - >>> parse_list_header('token, "quoted value"') - ['token', 'quoted value'] - - To create a header from the :class:`list` again, use the - :func:`dump_header` function. - - :param value: a string with a list header. - :return: :class:`list` - :rtype: list - """ - result = [] - for item in _parse_list_header(value): - if item[:1] == item[-1:] == '"': - item = unquote_header_value(item[1:-1]) - result.append(item) - return result - - -# From mitsuhiko/werkzeug (used with permission). -def parse_dict_header(value): - """Parse lists of key, value pairs as described by RFC 2068 Section 2 and - convert them into a python dict: - - >>> d = parse_dict_header('foo="is a fish", bar="as well"') - >>> type(d) is dict - True - >>> sorted(d.items()) - [('bar', 'as well'), ('foo', 'is a fish')] - - If there is no value for a key it will be `None`: - - >>> parse_dict_header('key_without_value') - {'key_without_value': None} - - To create a header from the :class:`dict` again, use the - :func:`dump_header` function. - - :param value: a string with a dict header. - :return: :class:`dict` - :rtype: dict - """ - result = {} - for item in _parse_list_header(value): - if '=' not in item: - result[item] = None - continue - name, value = item.split('=', 1) - if value[:1] == value[-1:] == '"': - value = unquote_header_value(value[1:-1]) - result[name] = value - return result - - -# From mitsuhiko/werkzeug (used with permission). -def unquote_header_value(value, is_filename=False): - r"""Unquotes a header value. (Reversal of :func:`quote_header_value`). - This does not use the real unquoting but what browsers are actually - using for quoting. - - :param value: the header value to unquote. - :rtype: str - """ - if value and value[0] == value[-1] == '"': - # this is not the real unquoting, but fixing this so that the - # RFC is met will result in bugs with internet explorer and - # probably some other browsers as well. IE for example is - # uploading files with "C:\foo\bar.txt" as filename - value = value[1:-1] - - # if this is a filename and the starting characters look like - # a UNC path, then just return the value without quotes. Using the - # replace sequence below on a UNC path has the effect of turning - # the leading double slash into a single slash and then - # _fix_ie_filename() doesn't work correctly. See #458. - if not is_filename or value[:2] != '\\\\': - return value.replace('\\\\', '\\').replace('\\"', '"') - return value - - -def dict_from_cookiejar(cj): - """Returns a key/value dictionary from a CookieJar. - - :param cj: CookieJar object to extract cookies from. - :rtype: dict - """ - - cookie_dict = {} - - for cookie in cj: - cookie_dict[cookie.name] = cookie.value - - return cookie_dict - - -def add_dict_to_cookiejar(cj, cookie_dict): - """Returns a CookieJar from a key/value dictionary. - - :param cj: CookieJar to insert cookies into. - :param cookie_dict: Dict of key/values to insert into CookieJar. - :rtype: CookieJar - """ - - return cookiejar_from_dict(cookie_dict, cj) - - -def get_encodings_from_content(content): - """Returns encodings from given content string. - - :param content: bytestring to extract encodings from. - """ - warnings.warn(( - 'In requests 3.0, get_encodings_from_content will be removed. For ' - 'more information, please see the discussion on issue #2266. (This' - ' warning should only appear once.)'), - DeprecationWarning) - - charset_re = re.compile(r'<meta.*?charset=["\']*(.+?)["\'>]', flags=re.I) - pragma_re = re.compile(r'<meta.*?content=["\']*;?charset=(.+?)["\'>]', flags=re.I) - xml_re = re.compile(r'^<\?xml.*?encoding=["\']*(.+?)["\'>]') - - return (charset_re.findall(content) + - pragma_re.findall(content) + - xml_re.findall(content)) - - -def _parse_content_type_header(header): - """Returns content type and parameters from given header - - :param header: string - :return: tuple containing content type and dictionary of - parameters - """ - - tokens = header.split(';') - content_type, params = tokens[0].strip(), tokens[1:] - params_dict = {} - items_to_strip = "\"' " - - for param in params: - param = param.strip() - if param: - key, value = param, True - index_of_equals = param.find("=") - if index_of_equals != -1: - key = param[:index_of_equals].strip(items_to_strip) - value = param[index_of_equals + 1:].strip(items_to_strip) - params_dict[key.lower()] = value - return content_type, params_dict - - -def get_encoding_from_headers(headers): - """Returns encodings from given HTTP Header Dict. - - :param headers: dictionary to extract encoding from. - :rtype: str - """ - - content_type = headers.get('content-type') - - if not content_type: - return None - - content_type, params = _parse_content_type_header(content_type) - - if 'charset' in params: - return params['charset'].strip("'\"") - - if 'text' in content_type: - return 'ISO-8859-1' - - -def stream_decode_response_unicode(iterator, r): - """Stream decodes a iterator.""" - - if r.encoding is None: - for item in iterator: - yield item - return - - decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace') - for chunk in iterator: - rv = decoder.decode(chunk) - if rv: - yield rv - rv = decoder.decode(b'', final=True) - if rv: - yield rv - - -def iter_slices(string, slice_length): - """Iterate over slices of a string.""" - pos = 0 - if slice_length is None or slice_length <= 0: - slice_length = len(string) - while pos < len(string): - yield string[pos:pos + slice_length] - pos += slice_length - - -def get_unicode_from_response(r): - """Returns the requested content back in unicode. - - :param r: Response object to get unicode content from. - - Tried: - - 1. charset from content-type - 2. fall back and replace all unicode characters - - :rtype: str - """ - warnings.warn(( - 'In requests 3.0, get_unicode_from_response will be removed. For ' - 'more information, please see the discussion on issue #2266. (This' - ' warning should only appear once.)'), - DeprecationWarning) - - tried_encodings = [] - - # Try charset from content-type - encoding = get_encoding_from_headers(r.headers) - - if encoding: - try: - return str(r.content, encoding) - except UnicodeError: - tried_encodings.append(encoding) - - # Fall back: - try: - return str(r.content, encoding, errors='replace') - except TypeError: - return r.content - - -# The unreserved URI characters (RFC 3986) -UNRESERVED_SET = frozenset( - "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + "0123456789-._~") - - -def unquote_unreserved(uri): - """Un-escape any percent-escape sequences in a URI that are unreserved - characters. This leaves all reserved, illegal and non-ASCII bytes encoded. - - :rtype: str - """ - parts = uri.split('%') - for i in range(1, len(parts)): - h = parts[i][0:2] - if len(h) == 2 and h.isalnum(): - try: - c = chr(int(h, 16)) - except ValueError: - raise InvalidURL("Invalid percent-escape sequence: '%s'" % h) - - if c in UNRESERVED_SET: - parts[i] = c + parts[i][2:] - else: - parts[i] = '%' + parts[i] - else: - parts[i] = '%' + parts[i] - return ''.join(parts) - - -def requote_uri(uri): - """Re-quote the given URI. - - This function passes the given URI through an unquote/quote cycle to - ensure that it is fully and consistently quoted. - - :rtype: str - """ - safe_with_percent = "!#$%&'()*+,/:;=?@[]~" - safe_without_percent = "!#$&'()*+,/:;=?@[]~" - try: - # Unquote only the unreserved characters - # Then quote only illegal characters (do not quote reserved, - # unreserved, or '%') - return quote(unquote_unreserved(uri), safe=safe_with_percent) - except InvalidURL: - # We couldn't unquote the given URI, so let's try quoting it, but - # there may be unquoted '%'s in the URI. We need to make sure they're - # properly quoted so they do not cause issues elsewhere. - return quote(uri, safe=safe_without_percent) - - -def address_in_network(ip, net): - """This function allows you to check if an IP belongs to a network subnet - - Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24 - returns False if ip = 192.168.1.1 and net = 192.168.100.0/24 - - :rtype: bool - """ - ipaddr = struct.unpack('=L', socket.inet_aton(ip))[0] - netaddr, bits = net.split('/') - netmask = struct.unpack('=L', socket.inet_aton(dotted_netmask(int(bits))))[0] - network = struct.unpack('=L', socket.inet_aton(netaddr))[0] & netmask - return (ipaddr & netmask) == (network & netmask) - - -def dotted_netmask(mask): - """Converts mask from /xx format to xxx.xxx.xxx.xxx - - Example: if mask is 24 function returns 255.255.255.0 - - :rtype: str - """ - bits = 0xffffffff ^ (1 << 32 - mask) - 1 - return socket.inet_ntoa(struct.pack('>I', bits)) - - -def is_ipv4_address(string_ip): - """ - :rtype: bool - """ - try: - socket.inet_aton(string_ip) - except socket.error: - return False - return True - - -def is_valid_cidr(string_network): - """ - Very simple check of the cidr format in no_proxy variable. - - :rtype: bool - """ - if string_network.count('/') == 1: - try: - mask = int(string_network.split('/')[1]) - except ValueError: - return False - - if mask < 1 or mask > 32: - return False - - try: - socket.inet_aton(string_network.split('/')[0]) - except socket.error: - return False - else: - return False - return True - - -@contextlib.contextmanager -def set_environ(env_name, value): - """Set the environment variable 'env_name' to 'value' - - Save previous value, yield, and then restore the previous value stored in - the environment variable 'env_name'. - - If 'value' is None, do nothing""" - value_changed = value is not None - if value_changed: - old_value = os.environ.get(env_name) - os.environ[env_name] = value - try: - yield - finally: - if value_changed: - if old_value is None: - del os.environ[env_name] - else: - os.environ[env_name] = old_value - - -def should_bypass_proxies(url, no_proxy): - """ - Returns whether we should bypass proxies or not. - - :rtype: bool - """ - # Prioritize lowercase environment variables over uppercase - # to keep a consistent behaviour with other http projects (curl, wget). - get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper()) - - # First check whether no_proxy is defined. If it is, check that the URL - # we're getting isn't in the no_proxy list. - no_proxy_arg = no_proxy - if no_proxy is None: - no_proxy = get_proxy('no_proxy') - parsed = urlparse(url) - - if parsed.hostname is None: - # URLs don't always have hostnames, e.g. file:/// urls. - return True - - if no_proxy: - # We need to check whether we match here. We need to see if we match - # the end of the hostname, both with and without the port. - no_proxy = ( - host for host in no_proxy.replace(' ', '').split(',') if host - ) - - if is_ipv4_address(parsed.hostname): - for proxy_ip in no_proxy: - if is_valid_cidr(proxy_ip): - if address_in_network(parsed.hostname, proxy_ip): - return True - elif parsed.hostname == proxy_ip: - # If no_proxy ip was defined in plain IP notation instead of cidr notation & - # matches the IP of the index - return True - else: - host_with_port = parsed.hostname - if parsed.port: - host_with_port += ':{}'.format(parsed.port) - - for host in no_proxy: - if parsed.hostname.endswith(host) or host_with_port.endswith(host): - # The URL does match something in no_proxy, so we don't want - # to apply the proxies on this URL. - return True - - with set_environ('no_proxy', no_proxy_arg): - # parsed.hostname can be `None` in cases such as a file URI. - try: - bypass = proxy_bypass(parsed.hostname) - except (TypeError, socket.gaierror): - bypass = False - - if bypass: - return True - - return False - - -def get_environ_proxies(url, no_proxy=None): - """ - Return a dict of environment proxies. - - :rtype: dict - """ - if should_bypass_proxies(url, no_proxy=no_proxy): - return {} - else: - return getproxies() - - -def select_proxy(url, proxies): - """Select a proxy for the url, if applicable. - - :param url: The url being for the request - :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs - """ - proxies = proxies or {} - urlparts = urlparse(url) - if urlparts.hostname is None: - return proxies.get(urlparts.scheme, proxies.get('all')) - - proxy_keys = [ - urlparts.scheme + '://' + urlparts.hostname, - urlparts.scheme, - 'all://' + urlparts.hostname, - 'all', - ] - proxy = None - for proxy_key in proxy_keys: - if proxy_key in proxies: - proxy = proxies[proxy_key] - break - - return proxy - - -def default_user_agent(name="python-requests"): - """ - Return a string representing the default user agent. - - :rtype: str - """ - return '%s/%s' % (name, __version__) - - -def default_headers(): - """ - :rtype: requests.structures.CaseInsensitiveDict - """ - return CaseInsensitiveDict({ - 'User-Agent': default_user_agent(), - 'Accept-Encoding': ', '.join(('gzip', 'deflate')), - 'Accept': '*/*', - 'Connection': 'keep-alive', - }) - - -def parse_header_links(value): - """Return a list of parsed link headers proxies. - - i.e. Link: <http:/.../front.jpeg>; rel=front; type="image/jpeg",<http://.../back.jpeg>; rel=back;type="image/jpeg" - - :rtype: list - """ - - links = [] - - replace_chars = ' \'"' - - value = value.strip(replace_chars) - if not value: - return links - - for val in re.split(', *<', value): - try: - url, params = val.split(';', 1) - except ValueError: - url, params = val, '' - - link = {'url': url.strip('<> \'"')} - - for param in params.split(';'): - try: - key, value = param.split('=') - except ValueError: - break - - link[key.strip(replace_chars)] = value.strip(replace_chars) - - links.append(link) - - return links - - -# Null bytes; no need to recreate these on each call to guess_json_utf -_null = '\x00'.encode('ascii') # encoding to ASCII for Python 3 -_null2 = _null * 2 -_null3 = _null * 3 - - -def guess_json_utf(data): - """ - :rtype: str - """ - # JSON always starts with two ASCII characters, so detection is as - # easy as counting the nulls and from their location and count - # determine the encoding. Also detect a BOM, if present. - sample = data[:4] - if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE): - return 'utf-32' # BOM included - if sample[:3] == codecs.BOM_UTF8: - return 'utf-8-sig' # BOM included, MS style (discouraged) - if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE): - return 'utf-16' # BOM included - nullcount = sample.count(_null) - if nullcount == 0: - return 'utf-8' - if nullcount == 2: - if sample[::2] == _null2: # 1st and 3rd are null - return 'utf-16-be' - if sample[1::2] == _null2: # 2nd and 4th are null - return 'utf-16-le' - # Did not detect 2 valid UTF-16 ascii-range characters - if nullcount == 3: - if sample[:3] == _null3: - return 'utf-32-be' - if sample[1:] == _null3: - return 'utf-32-le' - # Did not detect a valid UTF-32 ascii-range character - return None - - -def prepend_scheme_if_needed(url, new_scheme): - """Given a URL that may or may not have a scheme, prepend the given scheme. - Does not replace a present scheme with the one provided as an argument. - - :rtype: str - """ - scheme, netloc, path, params, query, fragment = urlparse(url, new_scheme) - - # urlparse is a finicky beast, and sometimes decides that there isn't a - # netloc present. Assume that it's being over-cautious, and switch netloc - # and path if urlparse decided there was no netloc. - if not netloc: - netloc, path = path, netloc - - return urlunparse((scheme, netloc, path, params, query, fragment)) - - -def get_auth_from_url(url): - """Given a url with authentication components, extract them into a tuple of - username,password. - - :rtype: (str,str) - """ - parsed = urlparse(url) - - try: - auth = (unquote(parsed.username), unquote(parsed.password)) - except (AttributeError, TypeError): - auth = ('', '') - - return auth - - -# Moved outside of function to avoid recompile every call -_CLEAN_HEADER_REGEX_BYTE = re.compile(b'^\\S[^\\r\\n]*$|^$') -_CLEAN_HEADER_REGEX_STR = re.compile(r'^\S[^\r\n]*$|^$') - - -def check_header_validity(header): - """Verifies that header value is a string which doesn't contain - leading whitespace or return characters. This prevents unintended - header injection. - - :param header: tuple, in the format (name, value). - """ - name, value = header - - if isinstance(value, bytes): - pat = _CLEAN_HEADER_REGEX_BYTE - else: - pat = _CLEAN_HEADER_REGEX_STR - try: - if not pat.match(value): - raise InvalidHeader("Invalid return character or leading space in header: %s" % name) - except TypeError: - raise InvalidHeader("Value for header {%s: %s} must be of type str or " - "bytes, not %s" % (name, value, type(value))) - - -def urldefragauth(url): - """ - Given a url remove the fragment and the authentication part. - - :rtype: str - """ - scheme, netloc, path, params, query, fragment = urlparse(url) - - # see func:`prepend_scheme_if_needed` - if not netloc: - netloc, path = path, netloc - - netloc = netloc.rsplit('@', 1)[-1] - - return urlunparse((scheme, netloc, path, params, query, '')) - - -def rewind_body(prepared_request): - """Move file pointer back to its recorded starting position - so it can be read again on redirect. - """ - body_seek = getattr(prepared_request.body, 'seek', None) - if body_seek is not None and isinstance(prepared_request._body_position, integer_types): - try: - body_seek(prepared_request._body_position) - except (IOError, OSError): - raise UnrewindableBodyError("An error occurred when rewinding request " - "body for redirect.") - else: - raise UnrewindableBodyError("Unable to rewind request body for redirect.") diff --git a/env/lib/python2.7/site-packages/pip/_vendor/requests/utils.pyc b/env/lib/python2.7/site-packages/pip/_vendor/requests/utils.pyc deleted file mode 100644 index 555b4dc6..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/requests/utils.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/retrying.py b/env/lib/python2.7/site-packages/pip/_vendor/retrying.py deleted file mode 100644 index 6d1e627a..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/retrying.py +++ /dev/null @@ -1,267 +0,0 @@ -## Copyright 2013-2014 Ray Holder -## -## Licensed under the Apache License, Version 2.0 (the "License"); -## you may not use this file except in compliance with the License. -## You may obtain a copy of the License at -## -## http://www.apache.org/licenses/LICENSE-2.0 -## -## Unless required by applicable law or agreed to in writing, software -## distributed under the License is distributed on an "AS IS" BASIS, -## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -## See the License for the specific language governing permissions and -## limitations under the License. - -import random -from pip._vendor import six -import sys -import time -import traceback - - -# sys.maxint / 2, since Python 3.2 doesn't have a sys.maxint... -MAX_WAIT = 1073741823 - - -def retry(*dargs, **dkw): - """ - Decorator function that instantiates the Retrying object - @param *dargs: positional arguments passed to Retrying object - @param **dkw: keyword arguments passed to the Retrying object - """ - # support both @retry and @retry() as valid syntax - if len(dargs) == 1 and callable(dargs[0]): - def wrap_simple(f): - - @six.wraps(f) - def wrapped_f(*args, **kw): - return Retrying().call(f, *args, **kw) - - return wrapped_f - - return wrap_simple(dargs[0]) - - else: - def wrap(f): - - @six.wraps(f) - def wrapped_f(*args, **kw): - return Retrying(*dargs, **dkw).call(f, *args, **kw) - - return wrapped_f - - return wrap - - -class Retrying(object): - - def __init__(self, - stop=None, wait=None, - stop_max_attempt_number=None, - stop_max_delay=None, - wait_fixed=None, - wait_random_min=None, wait_random_max=None, - wait_incrementing_start=None, wait_incrementing_increment=None, - wait_exponential_multiplier=None, wait_exponential_max=None, - retry_on_exception=None, - retry_on_result=None, - wrap_exception=False, - stop_func=None, - wait_func=None, - wait_jitter_max=None): - - self._stop_max_attempt_number = 5 if stop_max_attempt_number is None else stop_max_attempt_number - self._stop_max_delay = 100 if stop_max_delay is None else stop_max_delay - self._wait_fixed = 1000 if wait_fixed is None else wait_fixed - self._wait_random_min = 0 if wait_random_min is None else wait_random_min - self._wait_random_max = 1000 if wait_random_max is None else wait_random_max - self._wait_incrementing_start = 0 if wait_incrementing_start is None else wait_incrementing_start - self._wait_incrementing_increment = 100 if wait_incrementing_increment is None else wait_incrementing_increment - self._wait_exponential_multiplier = 1 if wait_exponential_multiplier is None else wait_exponential_multiplier - self._wait_exponential_max = MAX_WAIT if wait_exponential_max is None else wait_exponential_max - self._wait_jitter_max = 0 if wait_jitter_max is None else wait_jitter_max - - # TODO add chaining of stop behaviors - # stop behavior - stop_funcs = [] - if stop_max_attempt_number is not None: - stop_funcs.append(self.stop_after_attempt) - - if stop_max_delay is not None: - stop_funcs.append(self.stop_after_delay) - - if stop_func is not None: - self.stop = stop_func - - elif stop is None: - self.stop = lambda attempts, delay: any(f(attempts, delay) for f in stop_funcs) - - else: - self.stop = getattr(self, stop) - - # TODO add chaining of wait behaviors - # wait behavior - wait_funcs = [lambda *args, **kwargs: 0] - if wait_fixed is not None: - wait_funcs.append(self.fixed_sleep) - - if wait_random_min is not None or wait_random_max is not None: - wait_funcs.append(self.random_sleep) - - if wait_incrementing_start is not None or wait_incrementing_increment is not None: - wait_funcs.append(self.incrementing_sleep) - - if wait_exponential_multiplier is not None or wait_exponential_max is not None: - wait_funcs.append(self.exponential_sleep) - - if wait_func is not None: - self.wait = wait_func - - elif wait is None: - self.wait = lambda attempts, delay: max(f(attempts, delay) for f in wait_funcs) - - else: - self.wait = getattr(self, wait) - - # retry on exception filter - if retry_on_exception is None: - self._retry_on_exception = self.always_reject - else: - self._retry_on_exception = retry_on_exception - - # TODO simplify retrying by Exception types - # retry on result filter - if retry_on_result is None: - self._retry_on_result = self.never_reject - else: - self._retry_on_result = retry_on_result - - self._wrap_exception = wrap_exception - - def stop_after_attempt(self, previous_attempt_number, delay_since_first_attempt_ms): - """Stop after the previous attempt >= stop_max_attempt_number.""" - return previous_attempt_number >= self._stop_max_attempt_number - - def stop_after_delay(self, previous_attempt_number, delay_since_first_attempt_ms): - """Stop after the time from the first attempt >= stop_max_delay.""" - return delay_since_first_attempt_ms >= self._stop_max_delay - - def no_sleep(self, previous_attempt_number, delay_since_first_attempt_ms): - """Don't sleep at all before retrying.""" - return 0 - - def fixed_sleep(self, previous_attempt_number, delay_since_first_attempt_ms): - """Sleep a fixed amount of time between each retry.""" - return self._wait_fixed - - def random_sleep(self, previous_attempt_number, delay_since_first_attempt_ms): - """Sleep a random amount of time between wait_random_min and wait_random_max""" - return random.randint(self._wait_random_min, self._wait_random_max) - - def incrementing_sleep(self, previous_attempt_number, delay_since_first_attempt_ms): - """ - Sleep an incremental amount of time after each attempt, starting at - wait_incrementing_start and incrementing by wait_incrementing_increment - """ - result = self._wait_incrementing_start + (self._wait_incrementing_increment * (previous_attempt_number - 1)) - if result < 0: - result = 0 - return result - - def exponential_sleep(self, previous_attempt_number, delay_since_first_attempt_ms): - exp = 2 ** previous_attempt_number - result = self._wait_exponential_multiplier * exp - if result > self._wait_exponential_max: - result = self._wait_exponential_max - if result < 0: - result = 0 - return result - - def never_reject(self, result): - return False - - def always_reject(self, result): - return True - - def should_reject(self, attempt): - reject = False - if attempt.has_exception: - reject |= self._retry_on_exception(attempt.value[1]) - else: - reject |= self._retry_on_result(attempt.value) - - return reject - - def call(self, fn, *args, **kwargs): - start_time = int(round(time.time() * 1000)) - attempt_number = 1 - while True: - try: - attempt = Attempt(fn(*args, **kwargs), attempt_number, False) - except: - tb = sys.exc_info() - attempt = Attempt(tb, attempt_number, True) - - if not self.should_reject(attempt): - return attempt.get(self._wrap_exception) - - delay_since_first_attempt_ms = int(round(time.time() * 1000)) - start_time - if self.stop(attempt_number, delay_since_first_attempt_ms): - if not self._wrap_exception and attempt.has_exception: - # get() on an attempt with an exception should cause it to be raised, but raise just in case - raise attempt.get() - else: - raise RetryError(attempt) - else: - sleep = self.wait(attempt_number, delay_since_first_attempt_ms) - if self._wait_jitter_max: - jitter = random.random() * self._wait_jitter_max - sleep = sleep + max(0, jitter) - time.sleep(sleep / 1000.0) - - attempt_number += 1 - - -class Attempt(object): - """ - An Attempt encapsulates a call to a target function that may end as a - normal return value from the function or an Exception depending on what - occurred during the execution. - """ - - def __init__(self, value, attempt_number, has_exception): - self.value = value - self.attempt_number = attempt_number - self.has_exception = has_exception - - def get(self, wrap_exception=False): - """ - Return the return value of this Attempt instance or raise an Exception. - If wrap_exception is true, this Attempt is wrapped inside of a - RetryError before being raised. - """ - if self.has_exception: - if wrap_exception: - raise RetryError(self) - else: - six.reraise(self.value[0], self.value[1], self.value[2]) - else: - return self.value - - def __repr__(self): - if self.has_exception: - return "Attempts: {0}, Error:\n{1}".format(self.attempt_number, "".join(traceback.format_tb(self.value[2]))) - else: - return "Attempts: {0}, Value: {1}".format(self.attempt_number, self.value) - - -class RetryError(Exception): - """ - A RetryError encapsulates the last Attempt instance right before giving up. - """ - - def __init__(self, last_attempt): - self.last_attempt = last_attempt - - def __str__(self): - return "RetryError[{0}]".format(self.last_attempt) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/retrying.pyc b/env/lib/python2.7/site-packages/pip/_vendor/retrying.pyc deleted file mode 100644 index 915007fc..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/retrying.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/six.py b/env/lib/python2.7/site-packages/pip/_vendor/six.py deleted file mode 100644 index 89b2188f..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/six.py +++ /dev/null @@ -1,952 +0,0 @@ -# Copyright (c) 2010-2018 Benjamin Peterson -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - -"""Utilities for writing code that runs on Python 2 and 3""" - -from __future__ import absolute_import - -import functools -import itertools -import operator -import sys -import types - -__author__ = "Benjamin Peterson <benjamin@python.org>" -__version__ = "1.12.0" - - -# Useful for very coarse version differentiation. -PY2 = sys.version_info[0] == 2 -PY3 = sys.version_info[0] == 3 -PY34 = sys.version_info[0:2] >= (3, 4) - -if PY3: - string_types = str, - integer_types = int, - class_types = type, - text_type = str - binary_type = bytes - - MAXSIZE = sys.maxsize -else: - string_types = basestring, - integer_types = (int, long) - class_types = (type, types.ClassType) - text_type = unicode - binary_type = str - - if sys.platform.startswith("java"): - # Jython always uses 32 bits. - MAXSIZE = int((1 << 31) - 1) - else: - # It's possible to have sizeof(long) != sizeof(Py_ssize_t). - class X(object): - - def __len__(self): - return 1 << 31 - try: - len(X()) - except OverflowError: - # 32-bit - MAXSIZE = int((1 << 31) - 1) - else: - # 64-bit - MAXSIZE = int((1 << 63) - 1) - del X - - -def _add_doc(func, doc): - """Add documentation to a function.""" - func.__doc__ = doc - - -def _import_module(name): - """Import module, returning the module after the last dot.""" - __import__(name) - return sys.modules[name] - - -class _LazyDescr(object): - - def __init__(self, name): - self.name = name - - def __get__(self, obj, tp): - result = self._resolve() - setattr(obj, self.name, result) # Invokes __set__. - try: - # This is a bit ugly, but it avoids running this again by - # removing this descriptor. - delattr(obj.__class__, self.name) - except AttributeError: - pass - return result - - -class MovedModule(_LazyDescr): - - def __init__(self, name, old, new=None): - super(MovedModule, self).__init__(name) - if PY3: - if new is None: - new = name - self.mod = new - else: - self.mod = old - - def _resolve(self): - return _import_module(self.mod) - - def __getattr__(self, attr): - _module = self._resolve() - value = getattr(_module, attr) - setattr(self, attr, value) - return value - - -class _LazyModule(types.ModuleType): - - def __init__(self, name): - super(_LazyModule, self).__init__(name) - self.__doc__ = self.__class__.__doc__ - - def __dir__(self): - attrs = ["__doc__", "__name__"] - attrs += [attr.name for attr in self._moved_attributes] - return attrs - - # Subclasses should override this - _moved_attributes = [] - - -class MovedAttribute(_LazyDescr): - - def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): - super(MovedAttribute, self).__init__(name) - if PY3: - if new_mod is None: - new_mod = name - self.mod = new_mod - if new_attr is None: - if old_attr is None: - new_attr = name - else: - new_attr = old_attr - self.attr = new_attr - else: - self.mod = old_mod - if old_attr is None: - old_attr = name - self.attr = old_attr - - def _resolve(self): - module = _import_module(self.mod) - return getattr(module, self.attr) - - -class _SixMetaPathImporter(object): - - """ - A meta path importer to import six.moves and its submodules. - - This class implements a PEP302 finder and loader. It should be compatible - with Python 2.5 and all existing versions of Python3 - """ - - def __init__(self, six_module_name): - self.name = six_module_name - self.known_modules = {} - - def _add_module(self, mod, *fullnames): - for fullname in fullnames: - self.known_modules[self.name + "." + fullname] = mod - - def _get_module(self, fullname): - return self.known_modules[self.name + "." + fullname] - - def find_module(self, fullname, path=None): - if fullname in self.known_modules: - return self - return None - - def __get_module(self, fullname): - try: - return self.known_modules[fullname] - except KeyError: - raise ImportError("This loader does not know module " + fullname) - - def load_module(self, fullname): - try: - # in case of a reload - return sys.modules[fullname] - except KeyError: - pass - mod = self.__get_module(fullname) - if isinstance(mod, MovedModule): - mod = mod._resolve() - else: - mod.__loader__ = self - sys.modules[fullname] = mod - return mod - - def is_package(self, fullname): - """ - Return true, if the named module is a package. - - We need this method to get correct spec objects with - Python 3.4 (see PEP451) - """ - return hasattr(self.__get_module(fullname), "__path__") - - def get_code(self, fullname): - """Return None - - Required, if is_package is implemented""" - self.__get_module(fullname) # eventually raises ImportError - return None - get_source = get_code # same as get_code - -_importer = _SixMetaPathImporter(__name__) - - -class _MovedItems(_LazyModule): - - """Lazy loading of moved objects""" - __path__ = [] # mark as package - - -_moved_attributes = [ - MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), - MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), - MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), - MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), - MovedAttribute("intern", "__builtin__", "sys"), - MovedAttribute("map", "itertools", "builtins", "imap", "map"), - MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), - MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), - MovedAttribute("getoutput", "commands", "subprocess"), - MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), - MovedAttribute("reduce", "__builtin__", "functools"), - MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), - MovedAttribute("StringIO", "StringIO", "io"), - MovedAttribute("UserDict", "UserDict", "collections"), - MovedAttribute("UserList", "UserList", "collections"), - MovedAttribute("UserString", "UserString", "collections"), - MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), - MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), - MovedModule("builtins", "__builtin__"), - MovedModule("configparser", "ConfigParser"), - MovedModule("copyreg", "copy_reg"), - MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), - MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), - MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), - MovedModule("http_cookies", "Cookie", "http.cookies"), - MovedModule("html_entities", "htmlentitydefs", "html.entities"), - MovedModule("html_parser", "HTMLParser", "html.parser"), - MovedModule("http_client", "httplib", "http.client"), - MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), - MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"), - MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), - MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), - MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), - MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), - MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), - MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), - MovedModule("cPickle", "cPickle", "pickle"), - MovedModule("queue", "Queue"), - MovedModule("reprlib", "repr"), - MovedModule("socketserver", "SocketServer"), - MovedModule("_thread", "thread", "_thread"), - MovedModule("tkinter", "Tkinter"), - MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), - MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), - MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), - MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), - MovedModule("tkinter_tix", "Tix", "tkinter.tix"), - MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), - MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), - MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), - MovedModule("tkinter_colorchooser", "tkColorChooser", - "tkinter.colorchooser"), - MovedModule("tkinter_commondialog", "tkCommonDialog", - "tkinter.commondialog"), - MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), - MovedModule("tkinter_font", "tkFont", "tkinter.font"), - MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), - MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", - "tkinter.simpledialog"), - MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), - MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), - MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), - MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), - MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), - MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), -] -# Add windows specific modules. -if sys.platform == "win32": - _moved_attributes += [ - MovedModule("winreg", "_winreg"), - ] - -for attr in _moved_attributes: - setattr(_MovedItems, attr.name, attr) - if isinstance(attr, MovedModule): - _importer._add_module(attr, "moves." + attr.name) -del attr - -_MovedItems._moved_attributes = _moved_attributes - -moves = _MovedItems(__name__ + ".moves") -_importer._add_module(moves, "moves") - - -class Module_six_moves_urllib_parse(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_parse""" - - -_urllib_parse_moved_attributes = [ - MovedAttribute("ParseResult", "urlparse", "urllib.parse"), - MovedAttribute("SplitResult", "urlparse", "urllib.parse"), - MovedAttribute("parse_qs", "urlparse", "urllib.parse"), - MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), - MovedAttribute("urldefrag", "urlparse", "urllib.parse"), - MovedAttribute("urljoin", "urlparse", "urllib.parse"), - MovedAttribute("urlparse", "urlparse", "urllib.parse"), - MovedAttribute("urlsplit", "urlparse", "urllib.parse"), - MovedAttribute("urlunparse", "urlparse", "urllib.parse"), - MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), - MovedAttribute("quote", "urllib", "urllib.parse"), - MovedAttribute("quote_plus", "urllib", "urllib.parse"), - MovedAttribute("unquote", "urllib", "urllib.parse"), - MovedAttribute("unquote_plus", "urllib", "urllib.parse"), - MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"), - MovedAttribute("urlencode", "urllib", "urllib.parse"), - MovedAttribute("splitquery", "urllib", "urllib.parse"), - MovedAttribute("splittag", "urllib", "urllib.parse"), - MovedAttribute("splituser", "urllib", "urllib.parse"), - MovedAttribute("splitvalue", "urllib", "urllib.parse"), - MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), - MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), - MovedAttribute("uses_params", "urlparse", "urllib.parse"), - MovedAttribute("uses_query", "urlparse", "urllib.parse"), - MovedAttribute("uses_relative", "urlparse", "urllib.parse"), -] -for attr in _urllib_parse_moved_attributes: - setattr(Module_six_moves_urllib_parse, attr.name, attr) -del attr - -Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes - -_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), - "moves.urllib_parse", "moves.urllib.parse") - - -class Module_six_moves_urllib_error(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_error""" - - -_urllib_error_moved_attributes = [ - MovedAttribute("URLError", "urllib2", "urllib.error"), - MovedAttribute("HTTPError", "urllib2", "urllib.error"), - MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), -] -for attr in _urllib_error_moved_attributes: - setattr(Module_six_moves_urllib_error, attr.name, attr) -del attr - -Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes - -_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), - "moves.urllib_error", "moves.urllib.error") - - -class Module_six_moves_urllib_request(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_request""" - - -_urllib_request_moved_attributes = [ - MovedAttribute("urlopen", "urllib2", "urllib.request"), - MovedAttribute("install_opener", "urllib2", "urllib.request"), - MovedAttribute("build_opener", "urllib2", "urllib.request"), - MovedAttribute("pathname2url", "urllib", "urllib.request"), - MovedAttribute("url2pathname", "urllib", "urllib.request"), - MovedAttribute("getproxies", "urllib", "urllib.request"), - MovedAttribute("Request", "urllib2", "urllib.request"), - MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), - MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), - MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), - MovedAttribute("BaseHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), - MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), - MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), - MovedAttribute("FileHandler", "urllib2", "urllib.request"), - MovedAttribute("FTPHandler", "urllib2", "urllib.request"), - MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), - MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), - MovedAttribute("urlretrieve", "urllib", "urllib.request"), - MovedAttribute("urlcleanup", "urllib", "urllib.request"), - MovedAttribute("URLopener", "urllib", "urllib.request"), - MovedAttribute("FancyURLopener", "urllib", "urllib.request"), - MovedAttribute("proxy_bypass", "urllib", "urllib.request"), - MovedAttribute("parse_http_list", "urllib2", "urllib.request"), - MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"), -] -for attr in _urllib_request_moved_attributes: - setattr(Module_six_moves_urllib_request, attr.name, attr) -del attr - -Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes - -_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), - "moves.urllib_request", "moves.urllib.request") - - -class Module_six_moves_urllib_response(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_response""" - - -_urllib_response_moved_attributes = [ - MovedAttribute("addbase", "urllib", "urllib.response"), - MovedAttribute("addclosehook", "urllib", "urllib.response"), - MovedAttribute("addinfo", "urllib", "urllib.response"), - MovedAttribute("addinfourl", "urllib", "urllib.response"), -] -for attr in _urllib_response_moved_attributes: - setattr(Module_six_moves_urllib_response, attr.name, attr) -del attr - -Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes - -_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), - "moves.urllib_response", "moves.urllib.response") - - -class Module_six_moves_urllib_robotparser(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_robotparser""" - - -_urllib_robotparser_moved_attributes = [ - MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), -] -for attr in _urllib_robotparser_moved_attributes: - setattr(Module_six_moves_urllib_robotparser, attr.name, attr) -del attr - -Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes - -_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), - "moves.urllib_robotparser", "moves.urllib.robotparser") - - -class Module_six_moves_urllib(types.ModuleType): - - """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" - __path__ = [] # mark as package - parse = _importer._get_module("moves.urllib_parse") - error = _importer._get_module("moves.urllib_error") - request = _importer._get_module("moves.urllib_request") - response = _importer._get_module("moves.urllib_response") - robotparser = _importer._get_module("moves.urllib_robotparser") - - def __dir__(self): - return ['parse', 'error', 'request', 'response', 'robotparser'] - -_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), - "moves.urllib") - - -def add_move(move): - """Add an item to six.moves.""" - setattr(_MovedItems, move.name, move) - - -def remove_move(name): - """Remove item from six.moves.""" - try: - delattr(_MovedItems, name) - except AttributeError: - try: - del moves.__dict__[name] - except KeyError: - raise AttributeError("no such move, %r" % (name,)) - - -if PY3: - _meth_func = "__func__" - _meth_self = "__self__" - - _func_closure = "__closure__" - _func_code = "__code__" - _func_defaults = "__defaults__" - _func_globals = "__globals__" -else: - _meth_func = "im_func" - _meth_self = "im_self" - - _func_closure = "func_closure" - _func_code = "func_code" - _func_defaults = "func_defaults" - _func_globals = "func_globals" - - -try: - advance_iterator = next -except NameError: - def advance_iterator(it): - return it.next() -next = advance_iterator - - -try: - callable = callable -except NameError: - def callable(obj): - return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) - - -if PY3: - def get_unbound_function(unbound): - return unbound - - create_bound_method = types.MethodType - - def create_unbound_method(func, cls): - return func - - Iterator = object -else: - def get_unbound_function(unbound): - return unbound.im_func - - def create_bound_method(func, obj): - return types.MethodType(func, obj, obj.__class__) - - def create_unbound_method(func, cls): - return types.MethodType(func, None, cls) - - class Iterator(object): - - def next(self): - return type(self).__next__(self) - - callable = callable -_add_doc(get_unbound_function, - """Get the function out of a possibly unbound function""") - - -get_method_function = operator.attrgetter(_meth_func) -get_method_self = operator.attrgetter(_meth_self) -get_function_closure = operator.attrgetter(_func_closure) -get_function_code = operator.attrgetter(_func_code) -get_function_defaults = operator.attrgetter(_func_defaults) -get_function_globals = operator.attrgetter(_func_globals) - - -if PY3: - def iterkeys(d, **kw): - return iter(d.keys(**kw)) - - def itervalues(d, **kw): - return iter(d.values(**kw)) - - def iteritems(d, **kw): - return iter(d.items(**kw)) - - def iterlists(d, **kw): - return iter(d.lists(**kw)) - - viewkeys = operator.methodcaller("keys") - - viewvalues = operator.methodcaller("values") - - viewitems = operator.methodcaller("items") -else: - def iterkeys(d, **kw): - return d.iterkeys(**kw) - - def itervalues(d, **kw): - return d.itervalues(**kw) - - def iteritems(d, **kw): - return d.iteritems(**kw) - - def iterlists(d, **kw): - return d.iterlists(**kw) - - viewkeys = operator.methodcaller("viewkeys") - - viewvalues = operator.methodcaller("viewvalues") - - viewitems = operator.methodcaller("viewitems") - -_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") -_add_doc(itervalues, "Return an iterator over the values of a dictionary.") -_add_doc(iteritems, - "Return an iterator over the (key, value) pairs of a dictionary.") -_add_doc(iterlists, - "Return an iterator over the (key, [values]) pairs of a dictionary.") - - -if PY3: - def b(s): - return s.encode("latin-1") - - def u(s): - return s - unichr = chr - import struct - int2byte = struct.Struct(">B").pack - del struct - byte2int = operator.itemgetter(0) - indexbytes = operator.getitem - iterbytes = iter - import io - StringIO = io.StringIO - BytesIO = io.BytesIO - _assertCountEqual = "assertCountEqual" - if sys.version_info[1] <= 1: - _assertRaisesRegex = "assertRaisesRegexp" - _assertRegex = "assertRegexpMatches" - else: - _assertRaisesRegex = "assertRaisesRegex" - _assertRegex = "assertRegex" -else: - def b(s): - return s - # Workaround for standalone backslash - - def u(s): - return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") - unichr = unichr - int2byte = chr - - def byte2int(bs): - return ord(bs[0]) - - def indexbytes(buf, i): - return ord(buf[i]) - iterbytes = functools.partial(itertools.imap, ord) - import StringIO - StringIO = BytesIO = StringIO.StringIO - _assertCountEqual = "assertItemsEqual" - _assertRaisesRegex = "assertRaisesRegexp" - _assertRegex = "assertRegexpMatches" -_add_doc(b, """Byte literal""") -_add_doc(u, """Text literal""") - - -def assertCountEqual(self, *args, **kwargs): - return getattr(self, _assertCountEqual)(*args, **kwargs) - - -def assertRaisesRegex(self, *args, **kwargs): - return getattr(self, _assertRaisesRegex)(*args, **kwargs) - - -def assertRegex(self, *args, **kwargs): - return getattr(self, _assertRegex)(*args, **kwargs) - - -if PY3: - exec_ = getattr(moves.builtins, "exec") - - def reraise(tp, value, tb=None): - try: - if value is None: - value = tp() - if value.__traceback__ is not tb: - raise value.with_traceback(tb) - raise value - finally: - value = None - tb = None - -else: - def exec_(_code_, _globs_=None, _locs_=None): - """Execute code in a namespace.""" - if _globs_ is None: - frame = sys._getframe(1) - _globs_ = frame.f_globals - if _locs_ is None: - _locs_ = frame.f_locals - del frame - elif _locs_ is None: - _locs_ = _globs_ - exec("""exec _code_ in _globs_, _locs_""") - - exec_("""def reraise(tp, value, tb=None): - try: - raise tp, value, tb - finally: - tb = None -""") - - -if sys.version_info[:2] == (3, 2): - exec_("""def raise_from(value, from_value): - try: - if from_value is None: - raise value - raise value from from_value - finally: - value = None -""") -elif sys.version_info[:2] > (3, 2): - exec_("""def raise_from(value, from_value): - try: - raise value from from_value - finally: - value = None -""") -else: - def raise_from(value, from_value): - raise value - - -print_ = getattr(moves.builtins, "print", None) -if print_ is None: - def print_(*args, **kwargs): - """The new-style print function for Python 2.4 and 2.5.""" - fp = kwargs.pop("file", sys.stdout) - if fp is None: - return - - def write(data): - if not isinstance(data, basestring): - data = str(data) - # If the file has an encoding, encode unicode with it. - if (isinstance(fp, file) and - isinstance(data, unicode) and - fp.encoding is not None): - errors = getattr(fp, "errors", None) - if errors is None: - errors = "strict" - data = data.encode(fp.encoding, errors) - fp.write(data) - want_unicode = False - sep = kwargs.pop("sep", None) - if sep is not None: - if isinstance(sep, unicode): - want_unicode = True - elif not isinstance(sep, str): - raise TypeError("sep must be None or a string") - end = kwargs.pop("end", None) - if end is not None: - if isinstance(end, unicode): - want_unicode = True - elif not isinstance(end, str): - raise TypeError("end must be None or a string") - if kwargs: - raise TypeError("invalid keyword arguments to print()") - if not want_unicode: - for arg in args: - if isinstance(arg, unicode): - want_unicode = True - break - if want_unicode: - newline = unicode("\n") - space = unicode(" ") - else: - newline = "\n" - space = " " - if sep is None: - sep = space - if end is None: - end = newline - for i, arg in enumerate(args): - if i: - write(sep) - write(arg) - write(end) -if sys.version_info[:2] < (3, 3): - _print = print_ - - def print_(*args, **kwargs): - fp = kwargs.get("file", sys.stdout) - flush = kwargs.pop("flush", False) - _print(*args, **kwargs) - if flush and fp is not None: - fp.flush() - -_add_doc(reraise, """Reraise an exception.""") - -if sys.version_info[0:2] < (3, 4): - def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, - updated=functools.WRAPPER_UPDATES): - def wrapper(f): - f = functools.wraps(wrapped, assigned, updated)(f) - f.__wrapped__ = wrapped - return f - return wrapper -else: - wraps = functools.wraps - - -def with_metaclass(meta, *bases): - """Create a base class with a metaclass.""" - # This requires a bit of explanation: the basic idea is to make a dummy - # metaclass for one level of class instantiation that replaces itself with - # the actual metaclass. - class metaclass(type): - - def __new__(cls, name, this_bases, d): - return meta(name, bases, d) - - @classmethod - def __prepare__(cls, name, this_bases): - return meta.__prepare__(name, bases) - return type.__new__(metaclass, 'temporary_class', (), {}) - - -def add_metaclass(metaclass): - """Class decorator for creating a class with a metaclass.""" - def wrapper(cls): - orig_vars = cls.__dict__.copy() - slots = orig_vars.get('__slots__') - if slots is not None: - if isinstance(slots, str): - slots = [slots] - for slots_var in slots: - orig_vars.pop(slots_var) - orig_vars.pop('__dict__', None) - orig_vars.pop('__weakref__', None) - if hasattr(cls, '__qualname__'): - orig_vars['__qualname__'] = cls.__qualname__ - return metaclass(cls.__name__, cls.__bases__, orig_vars) - return wrapper - - -def ensure_binary(s, encoding='utf-8', errors='strict'): - """Coerce **s** to six.binary_type. - - For Python 2: - - `unicode` -> encoded to `str` - - `str` -> `str` - - For Python 3: - - `str` -> encoded to `bytes` - - `bytes` -> `bytes` - """ - if isinstance(s, text_type): - return s.encode(encoding, errors) - elif isinstance(s, binary_type): - return s - else: - raise TypeError("not expecting type '%s'" % type(s)) - - -def ensure_str(s, encoding='utf-8', errors='strict'): - """Coerce *s* to `str`. - - For Python 2: - - `unicode` -> encoded to `str` - - `str` -> `str` - - For Python 3: - - `str` -> `str` - - `bytes` -> decoded to `str` - """ - if not isinstance(s, (text_type, binary_type)): - raise TypeError("not expecting type '%s'" % type(s)) - if PY2 and isinstance(s, text_type): - s = s.encode(encoding, errors) - elif PY3 and isinstance(s, binary_type): - s = s.decode(encoding, errors) - return s - - -def ensure_text(s, encoding='utf-8', errors='strict'): - """Coerce *s* to six.text_type. - - For Python 2: - - `unicode` -> `unicode` - - `str` -> `unicode` - - For Python 3: - - `str` -> `str` - - `bytes` -> decoded to `str` - """ - if isinstance(s, binary_type): - return s.decode(encoding, errors) - elif isinstance(s, text_type): - return s - else: - raise TypeError("not expecting type '%s'" % type(s)) - - - -def python_2_unicode_compatible(klass): - """ - A decorator that defines __unicode__ and __str__ methods under Python 2. - Under Python 3 it does nothing. - - To support Python 2 and 3 with a single code base, define a __str__ method - returning text and apply this decorator to the class. - """ - if PY2: - if '__str__' not in klass.__dict__: - raise ValueError("@python_2_unicode_compatible cannot be applied " - "to %s because it doesn't define __str__()." % - klass.__name__) - klass.__unicode__ = klass.__str__ - klass.__str__ = lambda self: self.__unicode__().encode('utf-8') - return klass - - -# Complete the moves implementation. -# This code is at the end of this module to speed up module loading. -# Turn this module into a package. -__path__ = [] # required for PEP 302 and PEP 451 -__package__ = __name__ # see PEP 366 @ReservedAssignment -if globals().get("__spec__") is not None: - __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable -# Remove other six meta path importers, since they cause problems. This can -# happen if six is removed from sys.modules and then reloaded. (Setuptools does -# this for some reason.) -if sys.meta_path: - for i, importer in enumerate(sys.meta_path): - # Here's some real nastiness: Another "instance" of the six module might - # be floating around. Therefore, we can't use isinstance() to check for - # the six meta path importer, since the other six instance will have - # inserted an importer with different class. - if (type(importer).__name__ == "_SixMetaPathImporter" and - importer.name == __name__): - del sys.meta_path[i] - break - del i, importer -# Finally, add the importer to the meta path import hook. -sys.meta_path.append(_importer) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/six.pyc b/env/lib/python2.7/site-packages/pip/_vendor/six.pyc deleted file mode 100644 index 756ab6f2..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/six.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/__init__.py b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/__init__.py deleted file mode 100644 index c4c0dde5..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/__init__.py +++ /dev/null @@ -1,91 +0,0 @@ -""" -urllib3 - Thread-safe connection pooling and re-using. -""" -from __future__ import absolute_import -import warnings - -from .connectionpool import ( - HTTPConnectionPool, - HTTPSConnectionPool, - connection_from_url -) - -from . import exceptions -from .filepost import encode_multipart_formdata -from .poolmanager import PoolManager, ProxyManager, proxy_from_url -from .response import HTTPResponse -from .util.request import make_headers -from .util.url import get_host -from .util.timeout import Timeout -from .util.retry import Retry - - -# Set default logging handler to avoid "No handler found" warnings. -import logging -from logging import NullHandler - -__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)' -__license__ = 'MIT' -__version__ = '1.25.3' - -__all__ = ( - 'HTTPConnectionPool', - 'HTTPSConnectionPool', - 'PoolManager', - 'ProxyManager', - 'HTTPResponse', - 'Retry', - 'Timeout', - 'add_stderr_logger', - 'connection_from_url', - 'disable_warnings', - 'encode_multipart_formdata', - 'get_host', - 'make_headers', - 'proxy_from_url', -) - -logging.getLogger(__name__).addHandler(NullHandler()) - - -def add_stderr_logger(level=logging.DEBUG): - """ - Helper for quickly adding a StreamHandler to the logger. Useful for - debugging. - - Returns the handler after adding it. - """ - # This method needs to be in this __init__.py to get the __name__ correct - # even if urllib3 is vendored within another package. - logger = logging.getLogger(__name__) - handler = logging.StreamHandler() - handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s')) - logger.addHandler(handler) - logger.setLevel(level) - logger.debug('Added a stderr logging handler to logger: %s', __name__) - return handler - - -# ... Clean up. -del NullHandler - - -# All warning filters *must* be appended unless you're really certain that they -# shouldn't be: otherwise, it's very hard for users to use most Python -# mechanisms to silence them. -# SecurityWarning's always go off by default. -warnings.simplefilter('always', exceptions.SecurityWarning, append=True) -# SubjectAltNameWarning's should go off once per host -warnings.simplefilter('default', exceptions.SubjectAltNameWarning, append=True) -# InsecurePlatformWarning's don't vary between requests, so we keep it default. -warnings.simplefilter('default', exceptions.InsecurePlatformWarning, - append=True) -# SNIMissingWarnings should go off only once. -warnings.simplefilter('default', exceptions.SNIMissingWarning, append=True) - - -def disable_warnings(category=exceptions.HTTPWarning): - """ - Helper for quickly disabling all urllib3 warnings. - """ - warnings.simplefilter('ignore', category) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/__init__.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/__init__.pyc deleted file mode 100644 index 89162b97..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pyc deleted file mode 100644 index d78fe27f..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/connection.py b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/connection.py deleted file mode 100644 index 57c58fed..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/connection.py +++ /dev/null @@ -1,417 +0,0 @@ -from __future__ import absolute_import -import datetime -import logging -import os -import socket -from socket import error as SocketError, timeout as SocketTimeout -import warnings -from .packages import six -from .packages.six.moves.http_client import HTTPConnection as _HTTPConnection -from .packages.six.moves.http_client import HTTPException # noqa: F401 - -try: # Compiled with SSL? - import ssl - BaseSSLError = ssl.SSLError -except (ImportError, AttributeError): # Platform-specific: No SSL. - ssl = None - - class BaseSSLError(BaseException): - pass - - -try: - # Python 3: not a no-op, we're adding this to the namespace so it can be imported. - ConnectionError = ConnectionError -except NameError: - # Python 2 - class ConnectionError(Exception): - pass - - -from .exceptions import ( - NewConnectionError, - ConnectTimeoutError, - SubjectAltNameWarning, - SystemTimeWarning, -) -from .packages.ssl_match_hostname import match_hostname, CertificateError - -from .util.ssl_ import ( - resolve_cert_reqs, - resolve_ssl_version, - assert_fingerprint, - create_urllib3_context, - ssl_wrap_socket -) - - -from .util import connection - -from ._collections import HTTPHeaderDict - -log = logging.getLogger(__name__) - -port_by_scheme = { - 'http': 80, - 'https': 443, -} - -# When updating RECENT_DATE, move it to within two years of the current date, -# and not less than 6 months ago. -# Example: if Today is 2018-01-01, then RECENT_DATE should be any date on or -# after 2016-01-01 (today - 2 years) AND before 2017-07-01 (today - 6 months) -RECENT_DATE = datetime.date(2017, 6, 30) - - -class DummyConnection(object): - """Used to detect a failed ConnectionCls import.""" - pass - - -class HTTPConnection(_HTTPConnection, object): - """ - Based on httplib.HTTPConnection but provides an extra constructor - backwards-compatibility layer between older and newer Pythons. - - Additional keyword parameters are used to configure attributes of the connection. - Accepted parameters include: - - - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool` - - ``source_address``: Set the source address for the current connection. - - ``socket_options``: Set specific options on the underlying socket. If not specified, then - defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling - Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy. - - For example, if you wish to enable TCP Keep Alive in addition to the defaults, - you might pass:: - - HTTPConnection.default_socket_options + [ - (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1), - ] - - Or you may want to disable the defaults by passing an empty list (e.g., ``[]``). - """ - - default_port = port_by_scheme['http'] - - #: Disable Nagle's algorithm by default. - #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]`` - default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)] - - #: Whether this connection verifies the host's certificate. - is_verified = False - - def __init__(self, *args, **kw): - if six.PY3: - kw.pop('strict', None) - - # Pre-set source_address. - self.source_address = kw.get('source_address') - - #: The socket options provided by the user. If no options are - #: provided, we use the default options. - self.socket_options = kw.pop('socket_options', self.default_socket_options) - - _HTTPConnection.__init__(self, *args, **kw) - - @property - def host(self): - """ - Getter method to remove any trailing dots that indicate the hostname is an FQDN. - - In general, SSL certificates don't include the trailing dot indicating a - fully-qualified domain name, and thus, they don't validate properly when - checked against a domain name that includes the dot. In addition, some - servers may not expect to receive the trailing dot when provided. - - However, the hostname with trailing dot is critical to DNS resolution; doing a - lookup with the trailing dot will properly only resolve the appropriate FQDN, - whereas a lookup without a trailing dot will search the system's search domain - list. Thus, it's important to keep the original host around for use only in - those cases where it's appropriate (i.e., when doing DNS lookup to establish the - actual TCP connection across which we're going to send HTTP requests). - """ - return self._dns_host.rstrip('.') - - @host.setter - def host(self, value): - """ - Setter for the `host` property. - - We assume that only urllib3 uses the _dns_host attribute; httplib itself - only uses `host`, and it seems reasonable that other libraries follow suit. - """ - self._dns_host = value - - def _new_conn(self): - """ Establish a socket connection and set nodelay settings on it. - - :return: New socket connection. - """ - extra_kw = {} - if self.source_address: - extra_kw['source_address'] = self.source_address - - if self.socket_options: - extra_kw['socket_options'] = self.socket_options - - try: - conn = connection.create_connection( - (self._dns_host, self.port), self.timeout, **extra_kw) - - except SocketTimeout: - raise ConnectTimeoutError( - self, "Connection to %s timed out. (connect timeout=%s)" % - (self.host, self.timeout)) - - except SocketError as e: - raise NewConnectionError( - self, "Failed to establish a new connection: %s" % e) - - return conn - - def _prepare_conn(self, conn): - self.sock = conn - # Google App Engine's httplib does not define _tunnel_host - if getattr(self, '_tunnel_host', None): - # TODO: Fix tunnel so it doesn't depend on self.sock state. - self._tunnel() - # Mark this connection as not reusable - self.auto_open = 0 - - def connect(self): - conn = self._new_conn() - self._prepare_conn(conn) - - def request_chunked(self, method, url, body=None, headers=None): - """ - Alternative to the common request method, which sends the - body with chunked encoding and not as one block - """ - headers = HTTPHeaderDict(headers if headers is not None else {}) - skip_accept_encoding = 'accept-encoding' in headers - skip_host = 'host' in headers - self.putrequest( - method, - url, - skip_accept_encoding=skip_accept_encoding, - skip_host=skip_host - ) - for header, value in headers.items(): - self.putheader(header, value) - if 'transfer-encoding' not in headers: - self.putheader('Transfer-Encoding', 'chunked') - self.endheaders() - - if body is not None: - stringish_types = six.string_types + (bytes,) - if isinstance(body, stringish_types): - body = (body,) - for chunk in body: - if not chunk: - continue - if not isinstance(chunk, bytes): - chunk = chunk.encode('utf8') - len_str = hex(len(chunk))[2:] - self.send(len_str.encode('utf-8')) - self.send(b'\r\n') - self.send(chunk) - self.send(b'\r\n') - - # After the if clause, to always have a closed body - self.send(b'0\r\n\r\n') - - -class HTTPSConnection(HTTPConnection): - default_port = port_by_scheme['https'] - - ssl_version = None - - def __init__(self, host, port=None, key_file=None, cert_file=None, - key_password=None, strict=None, - timeout=socket._GLOBAL_DEFAULT_TIMEOUT, - ssl_context=None, server_hostname=None, **kw): - - HTTPConnection.__init__(self, host, port, strict=strict, - timeout=timeout, **kw) - - self.key_file = key_file - self.cert_file = cert_file - self.key_password = key_password - self.ssl_context = ssl_context - self.server_hostname = server_hostname - - # Required property for Google AppEngine 1.9.0 which otherwise causes - # HTTPS requests to go out as HTTP. (See Issue #356) - self._protocol = 'https' - - def connect(self): - conn = self._new_conn() - self._prepare_conn(conn) - - # Wrap socket using verification with the root certs in - # trusted_root_certs - default_ssl_context = False - if self.ssl_context is None: - default_ssl_context = True - self.ssl_context = create_urllib3_context( - ssl_version=resolve_ssl_version(self.ssl_version), - cert_reqs=resolve_cert_reqs(self.cert_reqs), - ) - - # Try to load OS default certs if none are given. - # Works well on Windows (requires Python3.4+) - context = self.ssl_context - if (not self.ca_certs and not self.ca_cert_dir and default_ssl_context - and hasattr(context, 'load_default_certs')): - context.load_default_certs() - - self.sock = ssl_wrap_socket( - sock=conn, - keyfile=self.key_file, - certfile=self.cert_file, - key_password=self.key_password, - ssl_context=self.ssl_context, - server_hostname=self.server_hostname - ) - - -class VerifiedHTTPSConnection(HTTPSConnection): - """ - Based on httplib.HTTPSConnection but wraps the socket with - SSL certification. - """ - cert_reqs = None - ca_certs = None - ca_cert_dir = None - ssl_version = None - assert_fingerprint = None - - def set_cert(self, key_file=None, cert_file=None, - cert_reqs=None, key_password=None, ca_certs=None, - assert_hostname=None, assert_fingerprint=None, - ca_cert_dir=None): - """ - This method should only be called once, before the connection is used. - """ - # If cert_reqs is not provided we'll assume CERT_REQUIRED unless we also - # have an SSLContext object in which case we'll use its verify_mode. - if cert_reqs is None: - if self.ssl_context is not None: - cert_reqs = self.ssl_context.verify_mode - else: - cert_reqs = resolve_cert_reqs(None) - - self.key_file = key_file - self.cert_file = cert_file - self.cert_reqs = cert_reqs - self.key_password = key_password - self.assert_hostname = assert_hostname - self.assert_fingerprint = assert_fingerprint - self.ca_certs = ca_certs and os.path.expanduser(ca_certs) - self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir) - - def connect(self): - # Add certificate verification - conn = self._new_conn() - hostname = self.host - - # Google App Engine's httplib does not define _tunnel_host - if getattr(self, '_tunnel_host', None): - self.sock = conn - # Calls self._set_hostport(), so self.host is - # self._tunnel_host below. - self._tunnel() - # Mark this connection as not reusable - self.auto_open = 0 - - # Override the host with the one we're requesting data from. - hostname = self._tunnel_host - - server_hostname = hostname - if self.server_hostname is not None: - server_hostname = self.server_hostname - - is_time_off = datetime.date.today() < RECENT_DATE - if is_time_off: - warnings.warn(( - 'System time is way off (before {0}). This will probably ' - 'lead to SSL verification errors').format(RECENT_DATE), - SystemTimeWarning - ) - - # Wrap socket using verification with the root certs in - # trusted_root_certs - default_ssl_context = False - if self.ssl_context is None: - default_ssl_context = True - self.ssl_context = create_urllib3_context( - ssl_version=resolve_ssl_version(self.ssl_version), - cert_reqs=resolve_cert_reqs(self.cert_reqs), - ) - - context = self.ssl_context - context.verify_mode = resolve_cert_reqs(self.cert_reqs) - - # Try to load OS default certs if none are given. - # Works well on Windows (requires Python3.4+) - if (not self.ca_certs and not self.ca_cert_dir and default_ssl_context - and hasattr(context, 'load_default_certs')): - context.load_default_certs() - - self.sock = ssl_wrap_socket( - sock=conn, - keyfile=self.key_file, - certfile=self.cert_file, - key_password=self.key_password, - ca_certs=self.ca_certs, - ca_cert_dir=self.ca_cert_dir, - server_hostname=server_hostname, - ssl_context=context) - - if self.assert_fingerprint: - assert_fingerprint(self.sock.getpeercert(binary_form=True), - self.assert_fingerprint) - elif context.verify_mode != ssl.CERT_NONE \ - and not getattr(context, 'check_hostname', False) \ - and self.assert_hostname is not False: - # While urllib3 attempts to always turn off hostname matching from - # the TLS library, this cannot always be done. So we check whether - # the TLS Library still thinks it's matching hostnames. - cert = self.sock.getpeercert() - if not cert.get('subjectAltName', ()): - warnings.warn(( - 'Certificate for {0} has no `subjectAltName`, falling back to check for a ' - '`commonName` for now. This feature is being removed by major browsers and ' - 'deprecated by RFC 2818. (See https://github.com/shazow/urllib3/issues/497 ' - 'for details.)'.format(hostname)), - SubjectAltNameWarning - ) - _match_hostname(cert, self.assert_hostname or server_hostname) - - self.is_verified = ( - context.verify_mode == ssl.CERT_REQUIRED or - self.assert_fingerprint is not None - ) - - -def _match_hostname(cert, asserted_hostname): - try: - match_hostname(cert, asserted_hostname) - except CertificateError as e: - log.error( - 'Certificate did not match expected hostname: %s. ' - 'Certificate: %s', asserted_hostname, cert - ) - # Add cert to exception and reraise so client code can inspect - # the cert when catching the exception, if they want to - e._peer_cert = cert - raise - - -if ssl: - # Make a copy for testing. - UnverifiedHTTPSConnection = HTTPSConnection - HTTPSConnection = VerifiedHTTPSConnection -else: - HTTPSConnection = DummyConnection diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/connection.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/connection.pyc deleted file mode 100644 index 813667dc..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/connection.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.py b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.py deleted file mode 100644 index 157568a3..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.py +++ /dev/null @@ -1,897 +0,0 @@ -from __future__ import absolute_import -import errno -import logging -import sys -import warnings - -from socket import error as SocketError, timeout as SocketTimeout -import socket - - -from .exceptions import ( - ClosedPoolError, - ProtocolError, - EmptyPoolError, - HeaderParsingError, - HostChangedError, - LocationValueError, - MaxRetryError, - ProxyError, - ReadTimeoutError, - SSLError, - TimeoutError, - InsecureRequestWarning, - NewConnectionError, -) -from .packages.ssl_match_hostname import CertificateError -from .packages import six -from .packages.six.moves import queue -from .packages.rfc3986.normalizers import normalize_host -from .connection import ( - port_by_scheme, - DummyConnection, - HTTPConnection, HTTPSConnection, VerifiedHTTPSConnection, - HTTPException, BaseSSLError, -) -from .request import RequestMethods -from .response import HTTPResponse - -from .util.connection import is_connection_dropped -from .util.request import set_file_position -from .util.response import assert_header_parsing -from .util.retry import Retry -from .util.timeout import Timeout -from .util.url import get_host, Url, NORMALIZABLE_SCHEMES -from .util.queue import LifoQueue - - -xrange = six.moves.xrange - -log = logging.getLogger(__name__) - -_Default = object() - - -# Pool objects -class ConnectionPool(object): - """ - Base class for all connection pools, such as - :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`. - """ - - scheme = None - QueueCls = LifoQueue - - def __init__(self, host, port=None): - if not host: - raise LocationValueError("No host specified.") - - self.host = _normalize_host(host, scheme=self.scheme) - self._proxy_host = host.lower() - self.port = port - - def __str__(self): - return '%s(host=%r, port=%r)' % (type(self).__name__, - self.host, self.port) - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - self.close() - # Return False to re-raise any potential exceptions - return False - - def close(self): - """ - Close all pooled connections and disable the pool. - """ - pass - - -# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252 -_blocking_errnos = {errno.EAGAIN, errno.EWOULDBLOCK} - - -class HTTPConnectionPool(ConnectionPool, RequestMethods): - """ - Thread-safe connection pool for one host. - - :param host: - Host used for this HTTP Connection (e.g. "localhost"), passed into - :class:`httplib.HTTPConnection`. - - :param port: - Port used for this HTTP Connection (None is equivalent to 80), passed - into :class:`httplib.HTTPConnection`. - - :param strict: - Causes BadStatusLine to be raised if the status line can't be parsed - as a valid HTTP/1.0 or 1.1 status line, passed into - :class:`httplib.HTTPConnection`. - - .. note:: - Only works in Python 2. This parameter is ignored in Python 3. - - :param timeout: - Socket timeout in seconds for each individual connection. This can - be a float or integer, which sets the timeout for the HTTP request, - or an instance of :class:`urllib3.util.Timeout` which gives you more - fine-grained control over request timeouts. After the constructor has - been parsed, this is always a `urllib3.util.Timeout` object. - - :param maxsize: - Number of connections to save that can be reused. More than 1 is useful - in multithreaded situations. If ``block`` is set to False, more - connections will be created but they will not be saved once they've - been used. - - :param block: - If set to True, no more than ``maxsize`` connections will be used at - a time. When no free connections are available, the call will block - until a connection has been released. This is a useful side effect for - particular multithreaded situations where one does not want to use more - than maxsize connections per host to prevent flooding. - - :param headers: - Headers to include with all requests, unless other headers are given - explicitly. - - :param retries: - Retry configuration to use by default with requests in this pool. - - :param _proxy: - Parsed proxy URL, should not be used directly, instead, see - :class:`urllib3.connectionpool.ProxyManager`" - - :param _proxy_headers: - A dictionary with proxy headers, should not be used directly, - instead, see :class:`urllib3.connectionpool.ProxyManager`" - - :param \\**conn_kw: - Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`, - :class:`urllib3.connection.HTTPSConnection` instances. - """ - - scheme = 'http' - ConnectionCls = HTTPConnection - ResponseCls = HTTPResponse - - def __init__(self, host, port=None, strict=False, - timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, block=False, - headers=None, retries=None, - _proxy=None, _proxy_headers=None, - **conn_kw): - ConnectionPool.__init__(self, host, port) - RequestMethods.__init__(self, headers) - - self.strict = strict - - if not isinstance(timeout, Timeout): - timeout = Timeout.from_float(timeout) - - if retries is None: - retries = Retry.DEFAULT - - self.timeout = timeout - self.retries = retries - - self.pool = self.QueueCls(maxsize) - self.block = block - - self.proxy = _proxy - self.proxy_headers = _proxy_headers or {} - - # Fill the queue up so that doing get() on it will block properly - for _ in xrange(maxsize): - self.pool.put(None) - - # These are mostly for testing and debugging purposes. - self.num_connections = 0 - self.num_requests = 0 - self.conn_kw = conn_kw - - if self.proxy: - # Enable Nagle's algorithm for proxies, to avoid packet fragmentation. - # We cannot know if the user has added default socket options, so we cannot replace the - # list. - self.conn_kw.setdefault('socket_options', []) - - def _new_conn(self): - """ - Return a fresh :class:`HTTPConnection`. - """ - self.num_connections += 1 - log.debug("Starting new HTTP connection (%d): %s:%s", - self.num_connections, self.host, self.port or "80") - - conn = self.ConnectionCls(host=self.host, port=self.port, - timeout=self.timeout.connect_timeout, - strict=self.strict, **self.conn_kw) - return conn - - def _get_conn(self, timeout=None): - """ - Get a connection. Will return a pooled connection if one is available. - - If no connections are available and :prop:`.block` is ``False``, then a - fresh connection is returned. - - :param timeout: - Seconds to wait before giving up and raising - :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and - :prop:`.block` is ``True``. - """ - conn = None - try: - conn = self.pool.get(block=self.block, timeout=timeout) - - except AttributeError: # self.pool is None - raise ClosedPoolError(self, "Pool is closed.") - - except queue.Empty: - if self.block: - raise EmptyPoolError(self, - "Pool reached maximum size and no more " - "connections are allowed.") - pass # Oh well, we'll create a new connection then - - # If this is a persistent connection, check if it got disconnected - if conn and is_connection_dropped(conn): - log.debug("Resetting dropped connection: %s", self.host) - conn.close() - if getattr(conn, 'auto_open', 1) == 0: - # This is a proxied connection that has been mutated by - # httplib._tunnel() and cannot be reused (since it would - # attempt to bypass the proxy) - conn = None - - return conn or self._new_conn() - - def _put_conn(self, conn): - """ - Put a connection back into the pool. - - :param conn: - Connection object for the current host and port as returned by - :meth:`._new_conn` or :meth:`._get_conn`. - - If the pool is already full, the connection is closed and discarded - because we exceeded maxsize. If connections are discarded frequently, - then maxsize should be increased. - - If the pool is closed, then the connection will be closed and discarded. - """ - try: - self.pool.put(conn, block=False) - return # Everything is dandy, done. - except AttributeError: - # self.pool is None. - pass - except queue.Full: - # This should never happen if self.block == True - log.warning( - "Connection pool is full, discarding connection: %s", - self.host) - - # Connection never got put back into the pool, close it. - if conn: - conn.close() - - def _validate_conn(self, conn): - """ - Called right before a request is made, after the socket is created. - """ - pass - - def _prepare_proxy(self, conn): - # Nothing to do for HTTP connections. - pass - - def _get_timeout(self, timeout): - """ Helper that always returns a :class:`urllib3.util.Timeout` """ - if timeout is _Default: - return self.timeout.clone() - - if isinstance(timeout, Timeout): - return timeout.clone() - else: - # User passed us an int/float. This is for backwards compatibility, - # can be removed later - return Timeout.from_float(timeout) - - def _raise_timeout(self, err, url, timeout_value): - """Is the error actually a timeout? Will raise a ReadTimeout or pass""" - - if isinstance(err, SocketTimeout): - raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value) - - # See the above comment about EAGAIN in Python 3. In Python 2 we have - # to specifically catch it and throw the timeout error - if hasattr(err, 'errno') and err.errno in _blocking_errnos: - raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value) - - # Catch possible read timeouts thrown as SSL errors. If not the - # case, rethrow the original. We need to do this because of: - # http://bugs.python.org/issue10272 - if 'timed out' in str(err) or 'did not complete (read)' in str(err): # Python < 2.7.4 - raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value) - - def _make_request(self, conn, method, url, timeout=_Default, chunked=False, - **httplib_request_kw): - """ - Perform a request on a given urllib connection object taken from our - pool. - - :param conn: - a connection from one of our connection pools - - :param timeout: - Socket timeout in seconds for the request. This can be a - float or integer, which will set the same timeout value for - the socket connect and the socket read, or an instance of - :class:`urllib3.util.Timeout`, which gives you more fine-grained - control over your timeouts. - """ - self.num_requests += 1 - - timeout_obj = self._get_timeout(timeout) - timeout_obj.start_connect() - conn.timeout = timeout_obj.connect_timeout - - # Trigger any extra validation we need to do. - try: - self._validate_conn(conn) - except (SocketTimeout, BaseSSLError) as e: - # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout. - self._raise_timeout(err=e, url=url, timeout_value=conn.timeout) - raise - - # conn.request() calls httplib.*.request, not the method in - # urllib3.request. It also calls makefile (recv) on the socket. - if chunked: - conn.request_chunked(method, url, **httplib_request_kw) - else: - conn.request(method, url, **httplib_request_kw) - - # Reset the timeout for the recv() on the socket - read_timeout = timeout_obj.read_timeout - - # App Engine doesn't have a sock attr - if getattr(conn, 'sock', None): - # In Python 3 socket.py will catch EAGAIN and return None when you - # try and read into the file pointer created by http.client, which - # instead raises a BadStatusLine exception. Instead of catching - # the exception and assuming all BadStatusLine exceptions are read - # timeouts, check for a zero timeout before making the request. - if read_timeout == 0: - raise ReadTimeoutError( - self, url, "Read timed out. (read timeout=%s)" % read_timeout) - if read_timeout is Timeout.DEFAULT_TIMEOUT: - conn.sock.settimeout(socket.getdefaulttimeout()) - else: # None or a value - conn.sock.settimeout(read_timeout) - - # Receive the response from the server - try: - try: - # Python 2.7, use buffering of HTTP responses - httplib_response = conn.getresponse(buffering=True) - except TypeError: - # Python 3 - try: - httplib_response = conn.getresponse() - except Exception as e: - # Remove the TypeError from the exception chain in Python 3; - # otherwise it looks like a programming error was the cause. - six.raise_from(e, None) - except (SocketTimeout, BaseSSLError, SocketError) as e: - self._raise_timeout(err=e, url=url, timeout_value=read_timeout) - raise - - # AppEngine doesn't have a version attr. - http_version = getattr(conn, '_http_vsn_str', 'HTTP/?') - log.debug("%s://%s:%s \"%s %s %s\" %s %s", self.scheme, self.host, self.port, - method, url, http_version, httplib_response.status, - httplib_response.length) - - try: - assert_header_parsing(httplib_response.msg) - except (HeaderParsingError, TypeError) as hpe: # Platform-specific: Python 3 - log.warning( - 'Failed to parse headers (url=%s): %s', - self._absolute_url(url), hpe, exc_info=True) - - return httplib_response - - def _absolute_url(self, path): - return Url(scheme=self.scheme, host=self.host, port=self.port, path=path).url - - def close(self): - """ - Close all pooled connections and disable the pool. - """ - if self.pool is None: - return - # Disable access to the pool - old_pool, self.pool = self.pool, None - - try: - while True: - conn = old_pool.get(block=False) - if conn: - conn.close() - - except queue.Empty: - pass # Done. - - def is_same_host(self, url): - """ - Check if the given ``url`` is a member of the same host as this - connection pool. - """ - if url.startswith('/'): - return True - - # TODO: Add optional support for socket.gethostbyname checking. - scheme, host, port = get_host(url) - if host is not None: - host = _normalize_host(host, scheme=scheme) - - # Use explicit default port for comparison when none is given - if self.port and not port: - port = port_by_scheme.get(scheme) - elif not self.port and port == port_by_scheme.get(scheme): - port = None - - return (scheme, host, port) == (self.scheme, self.host, self.port) - - def urlopen(self, method, url, body=None, headers=None, retries=None, - redirect=True, assert_same_host=True, timeout=_Default, - pool_timeout=None, release_conn=None, chunked=False, - body_pos=None, **response_kw): - """ - Get a connection from the pool and perform an HTTP request. This is the - lowest level call for making a request, so you'll need to specify all - the raw details. - - .. note:: - - More commonly, it's appropriate to use a convenience method provided - by :class:`.RequestMethods`, such as :meth:`request`. - - .. note:: - - `release_conn` will only behave as expected if - `preload_content=False` because we want to make - `preload_content=False` the default behaviour someday soon without - breaking backwards compatibility. - - :param method: - HTTP request method (such as GET, POST, PUT, etc.) - - :param body: - Data to send in the request body (useful for creating - POST requests, see HTTPConnectionPool.post_url for - more convenience). - - :param headers: - Dictionary of custom headers to send, such as User-Agent, - If-None-Match, etc. If None, pool headers are used. If provided, - these headers completely replace any pool-specific headers. - - :param retries: - Configure the number of retries to allow before raising a - :class:`~urllib3.exceptions.MaxRetryError` exception. - - Pass ``None`` to retry until you receive a response. Pass a - :class:`~urllib3.util.retry.Retry` object for fine-grained control - over different types of retries. - Pass an integer number to retry connection errors that many times, - but no other types of errors. Pass zero to never retry. - - If ``False``, then retries are disabled and any exception is raised - immediately. Also, instead of raising a MaxRetryError on redirects, - the redirect response will be returned. - - :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int. - - :param redirect: - If True, automatically handle redirects (status codes 301, 302, - 303, 307, 308). Each redirect counts as a retry. Disabling retries - will disable redirect, too. - - :param assert_same_host: - If ``True``, will make sure that the host of the pool requests is - consistent else will raise HostChangedError. When False, you can - use the pool on an HTTP proxy and request foreign hosts. - - :param timeout: - If specified, overrides the default timeout for this one - request. It may be a float (in seconds) or an instance of - :class:`urllib3.util.Timeout`. - - :param pool_timeout: - If set and the pool is set to block=True, then this method will - block for ``pool_timeout`` seconds and raise EmptyPoolError if no - connection is available within the time period. - - :param release_conn: - If False, then the urlopen call will not release the connection - back into the pool once a response is received (but will release if - you read the entire contents of the response such as when - `preload_content=True`). This is useful if you're not preloading - the response's content immediately. You will need to call - ``r.release_conn()`` on the response ``r`` to return the connection - back into the pool. If None, it takes the value of - ``response_kw.get('preload_content', True)``. - - :param chunked: - If True, urllib3 will send the body using chunked transfer - encoding. Otherwise, urllib3 will send the body using the standard - content-length form. Defaults to False. - - :param int body_pos: - Position to seek to in file-like body in the event of a retry or - redirect. Typically this won't need to be set because urllib3 will - auto-populate the value when needed. - - :param \\**response_kw: - Additional parameters are passed to - :meth:`urllib3.response.HTTPResponse.from_httplib` - """ - if headers is None: - headers = self.headers - - if not isinstance(retries, Retry): - retries = Retry.from_int(retries, redirect=redirect, default=self.retries) - - if release_conn is None: - release_conn = response_kw.get('preload_content', True) - - # Check host - if assert_same_host and not self.is_same_host(url): - raise HostChangedError(self, url, retries) - - conn = None - - # Track whether `conn` needs to be released before - # returning/raising/recursing. Update this variable if necessary, and - # leave `release_conn` constant throughout the function. That way, if - # the function recurses, the original value of `release_conn` will be - # passed down into the recursive call, and its value will be respected. - # - # See issue #651 [1] for details. - # - # [1] <https://github.com/shazow/urllib3/issues/651> - release_this_conn = release_conn - - # Merge the proxy headers. Only do this in HTTP. We have to copy the - # headers dict so we can safely change it without those changes being - # reflected in anyone else's copy. - if self.scheme == 'http': - headers = headers.copy() - headers.update(self.proxy_headers) - - # Must keep the exception bound to a separate variable or else Python 3 - # complains about UnboundLocalError. - err = None - - # Keep track of whether we cleanly exited the except block. This - # ensures we do proper cleanup in finally. - clean_exit = False - - # Rewind body position, if needed. Record current position - # for future rewinds in the event of a redirect/retry. - body_pos = set_file_position(body, body_pos) - - try: - # Request a connection from the queue. - timeout_obj = self._get_timeout(timeout) - conn = self._get_conn(timeout=pool_timeout) - - conn.timeout = timeout_obj.connect_timeout - - is_new_proxy_conn = self.proxy is not None and not getattr(conn, 'sock', None) - if is_new_proxy_conn: - self._prepare_proxy(conn) - - # Make the request on the httplib connection object. - httplib_response = self._make_request(conn, method, url, - timeout=timeout_obj, - body=body, headers=headers, - chunked=chunked) - - # If we're going to release the connection in ``finally:``, then - # the response doesn't need to know about the connection. Otherwise - # it will also try to release it and we'll have a double-release - # mess. - response_conn = conn if not release_conn else None - - # Pass method to Response for length checking - response_kw['request_method'] = method - - # Import httplib's response into our own wrapper object - response = self.ResponseCls.from_httplib(httplib_response, - pool=self, - connection=response_conn, - retries=retries, - **response_kw) - - # Everything went great! - clean_exit = True - - except queue.Empty: - # Timed out by queue. - raise EmptyPoolError(self, "No pool connections are available.") - - except (TimeoutError, HTTPException, SocketError, ProtocolError, - BaseSSLError, SSLError, CertificateError) as e: - # Discard the connection for these exceptions. It will be - # replaced during the next _get_conn() call. - clean_exit = False - if isinstance(e, (BaseSSLError, CertificateError)): - e = SSLError(e) - elif isinstance(e, (SocketError, NewConnectionError)) and self.proxy: - e = ProxyError('Cannot connect to proxy.', e) - elif isinstance(e, (SocketError, HTTPException)): - e = ProtocolError('Connection aborted.', e) - - retries = retries.increment(method, url, error=e, _pool=self, - _stacktrace=sys.exc_info()[2]) - retries.sleep() - - # Keep track of the error for the retry warning. - err = e - - finally: - if not clean_exit: - # We hit some kind of exception, handled or otherwise. We need - # to throw the connection away unless explicitly told not to. - # Close the connection, set the variable to None, and make sure - # we put the None back in the pool to avoid leaking it. - conn = conn and conn.close() - release_this_conn = True - - if release_this_conn: - # Put the connection back to be reused. If the connection is - # expired then it will be None, which will get replaced with a - # fresh connection during _get_conn. - self._put_conn(conn) - - if not conn: - # Try again - log.warning("Retrying (%r) after connection " - "broken by '%r': %s", retries, err, url) - return self.urlopen(method, url, body, headers, retries, - redirect, assert_same_host, - timeout=timeout, pool_timeout=pool_timeout, - release_conn=release_conn, body_pos=body_pos, - **response_kw) - - def drain_and_release_conn(response): - try: - # discard any remaining response body, the connection will be - # released back to the pool once the entire response is read - response.read() - except (TimeoutError, HTTPException, SocketError, ProtocolError, - BaseSSLError, SSLError): - pass - - # Handle redirect? - redirect_location = redirect and response.get_redirect_location() - if redirect_location: - if response.status == 303: - method = 'GET' - - try: - retries = retries.increment(method, url, response=response, _pool=self) - except MaxRetryError: - if retries.raise_on_redirect: - # Drain and release the connection for this response, since - # we're not returning it to be released manually. - drain_and_release_conn(response) - raise - return response - - # drain and return the connection to the pool before recursing - drain_and_release_conn(response) - - retries.sleep_for_retry(response) - log.debug("Redirecting %s -> %s", url, redirect_location) - return self.urlopen( - method, redirect_location, body, headers, - retries=retries, redirect=redirect, - assert_same_host=assert_same_host, - timeout=timeout, pool_timeout=pool_timeout, - release_conn=release_conn, body_pos=body_pos, - **response_kw) - - # Check if we should retry the HTTP response. - has_retry_after = bool(response.getheader('Retry-After')) - if retries.is_retry(method, response.status, has_retry_after): - try: - retries = retries.increment(method, url, response=response, _pool=self) - except MaxRetryError: - if retries.raise_on_status: - # Drain and release the connection for this response, since - # we're not returning it to be released manually. - drain_and_release_conn(response) - raise - return response - - # drain and return the connection to the pool before recursing - drain_and_release_conn(response) - - retries.sleep(response) - log.debug("Retry: %s", url) - return self.urlopen( - method, url, body, headers, - retries=retries, redirect=redirect, - assert_same_host=assert_same_host, - timeout=timeout, pool_timeout=pool_timeout, - release_conn=release_conn, - body_pos=body_pos, **response_kw) - - return response - - -class HTTPSConnectionPool(HTTPConnectionPool): - """ - Same as :class:`.HTTPConnectionPool`, but HTTPS. - - When Python is compiled with the :mod:`ssl` module, then - :class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates, - instead of :class:`.HTTPSConnection`. - - :class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``, - ``assert_hostname`` and ``host`` in this order to verify connections. - If ``assert_hostname`` is False, no verification is done. - - The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``, - ``ca_cert_dir``, ``ssl_version``, ``key_password`` are only used if :mod:`ssl` - is available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade - the connection socket into an SSL socket. - """ - - scheme = 'https' - ConnectionCls = HTTPSConnection - - def __init__(self, host, port=None, - strict=False, timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, - block=False, headers=None, retries=None, - _proxy=None, _proxy_headers=None, - key_file=None, cert_file=None, cert_reqs=None, - key_password=None, ca_certs=None, ssl_version=None, - assert_hostname=None, assert_fingerprint=None, - ca_cert_dir=None, **conn_kw): - - HTTPConnectionPool.__init__(self, host, port, strict, timeout, maxsize, - block, headers, retries, _proxy, _proxy_headers, - **conn_kw) - - self.key_file = key_file - self.cert_file = cert_file - self.cert_reqs = cert_reqs - self.key_password = key_password - self.ca_certs = ca_certs - self.ca_cert_dir = ca_cert_dir - self.ssl_version = ssl_version - self.assert_hostname = assert_hostname - self.assert_fingerprint = assert_fingerprint - - def _prepare_conn(self, conn): - """ - Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket` - and establish the tunnel if proxy is used. - """ - - if isinstance(conn, VerifiedHTTPSConnection): - conn.set_cert(key_file=self.key_file, - key_password=self.key_password, - cert_file=self.cert_file, - cert_reqs=self.cert_reqs, - ca_certs=self.ca_certs, - ca_cert_dir=self.ca_cert_dir, - assert_hostname=self.assert_hostname, - assert_fingerprint=self.assert_fingerprint) - conn.ssl_version = self.ssl_version - return conn - - def _prepare_proxy(self, conn): - """ - Establish tunnel connection early, because otherwise httplib - would improperly set Host: header to proxy's IP:port. - """ - conn.set_tunnel(self._proxy_host, self.port, self.proxy_headers) - conn.connect() - - def _new_conn(self): - """ - Return a fresh :class:`httplib.HTTPSConnection`. - """ - self.num_connections += 1 - log.debug("Starting new HTTPS connection (%d): %s:%s", - self.num_connections, self.host, self.port or "443") - - if not self.ConnectionCls or self.ConnectionCls is DummyConnection: - raise SSLError("Can't connect to HTTPS URL because the SSL " - "module is not available.") - - actual_host = self.host - actual_port = self.port - if self.proxy is not None: - actual_host = self.proxy.host - actual_port = self.proxy.port - - conn = self.ConnectionCls(host=actual_host, port=actual_port, - timeout=self.timeout.connect_timeout, - strict=self.strict, cert_file=self.cert_file, - key_file=self.key_file, key_password=self.key_password, - **self.conn_kw) - - return self._prepare_conn(conn) - - def _validate_conn(self, conn): - """ - Called right before a request is made, after the socket is created. - """ - super(HTTPSConnectionPool, self)._validate_conn(conn) - - # Force connect early to allow us to validate the connection. - if not getattr(conn, 'sock', None): # AppEngine might not have `.sock` - conn.connect() - - if not conn.is_verified: - warnings.warn(( - 'Unverified HTTPS request is being made. ' - 'Adding certificate verification is strongly advised. See: ' - 'https://urllib3.readthedocs.io/en/latest/advanced-usage.html' - '#ssl-warnings'), - InsecureRequestWarning) - - -def connection_from_url(url, **kw): - """ - Given a url, return an :class:`.ConnectionPool` instance of its host. - - This is a shortcut for not having to parse out the scheme, host, and port - of the url before creating an :class:`.ConnectionPool` instance. - - :param url: - Absolute URL string that must include the scheme. Port is optional. - - :param \\**kw: - Passes additional parameters to the constructor of the appropriate - :class:`.ConnectionPool`. Useful for specifying things like - timeout, maxsize, headers, etc. - - Example:: - - >>> conn = connection_from_url('http://google.com/') - >>> r = conn.request('GET', '/') - """ - scheme, host, port = get_host(url) - port = port or port_by_scheme.get(scheme, 80) - if scheme == 'https': - return HTTPSConnectionPool(host, port=port, **kw) - else: - return HTTPConnectionPool(host, port=port, **kw) - - -def _normalize_host(host, scheme): - """ - Normalize hosts for comparisons and use with sockets. - """ - - # httplib doesn't like it when we include brackets in IPv6 addresses - # Specifically, if we include brackets but also pass the port then - # httplib crazily doubles up the square brackets on the Host header. - # Instead, we need to make sure we never pass ``None`` as the port. - # However, for backward compatibility reasons we can't actually - # *assert* that. See http://bugs.python.org/issue28539 - if host.startswith('[') and host.endswith(']'): - host = host.strip('[]') - if scheme in NORMALIZABLE_SCHEMES: - host = normalize_host(host) - return host diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyc deleted file mode 100644 index 9f9ca695..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/__init__.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/__init__.pyc deleted file mode 100644 index db4ea0ec..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_appengine_environ.py b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_appengine_environ.py deleted file mode 100644 index f3e00942..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_appengine_environ.py +++ /dev/null @@ -1,30 +0,0 @@ -""" -This module provides means to detect the App Engine environment. -""" - -import os - - -def is_appengine(): - return (is_local_appengine() or - is_prod_appengine() or - is_prod_appengine_mvms()) - - -def is_appengine_sandbox(): - return is_appengine() and not is_prod_appengine_mvms() - - -def is_local_appengine(): - return ('APPENGINE_RUNTIME' in os.environ and - 'Development/' in os.environ['SERVER_SOFTWARE']) - - -def is_prod_appengine(): - return ('APPENGINE_RUNTIME' in os.environ and - 'Google App Engine/' in os.environ['SERVER_SOFTWARE'] and - not is_prod_appengine_mvms()) - - -def is_prod_appengine_mvms(): - return os.environ.get('GAE_VM', False) == 'true' diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_appengine_environ.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_appengine_environ.pyc deleted file mode 100644 index 179c4365..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_appengine_environ.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__init__.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__init__.pyc deleted file mode 100644 index 5df7234c..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.py b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.py deleted file mode 100644 index be342153..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.py +++ /dev/null @@ -1,593 +0,0 @@ -""" -This module uses ctypes to bind a whole bunch of functions and constants from -SecureTransport. The goal here is to provide the low-level API to -SecureTransport. These are essentially the C-level functions and constants, and -they're pretty gross to work with. - -This code is a bastardised version of the code found in Will Bond's oscrypto -library. An enormous debt is owed to him for blazing this trail for us. For -that reason, this code should be considered to be covered both by urllib3's -license and by oscrypto's: - - Copyright (c) 2015-2016 Will Bond <will@wbond.net> - - Permission is hereby granted, free of charge, to any person obtaining a - copy of this software and associated documentation files (the "Software"), - to deal in the Software without restriction, including without limitation - the rights to use, copy, modify, merge, publish, distribute, sublicense, - and/or sell copies of the Software, and to permit persons to whom the - Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in - all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER - DEALINGS IN THE SOFTWARE. -""" -from __future__ import absolute_import - -import platform -from ctypes.util import find_library -from ctypes import ( - c_void_p, c_int32, c_char_p, c_size_t, c_byte, c_uint32, c_ulong, c_long, - c_bool -) -from ctypes import CDLL, POINTER, CFUNCTYPE - - -security_path = find_library('Security') -if not security_path: - raise ImportError('The library Security could not be found') - - -core_foundation_path = find_library('CoreFoundation') -if not core_foundation_path: - raise ImportError('The library CoreFoundation could not be found') - - -version = platform.mac_ver()[0] -version_info = tuple(map(int, version.split('.'))) -if version_info < (10, 8): - raise OSError( - 'Only OS X 10.8 and newer are supported, not %s.%s' % ( - version_info[0], version_info[1] - ) - ) - -Security = CDLL(security_path, use_errno=True) -CoreFoundation = CDLL(core_foundation_path, use_errno=True) - -Boolean = c_bool -CFIndex = c_long -CFStringEncoding = c_uint32 -CFData = c_void_p -CFString = c_void_p -CFArray = c_void_p -CFMutableArray = c_void_p -CFDictionary = c_void_p -CFError = c_void_p -CFType = c_void_p -CFTypeID = c_ulong - -CFTypeRef = POINTER(CFType) -CFAllocatorRef = c_void_p - -OSStatus = c_int32 - -CFDataRef = POINTER(CFData) -CFStringRef = POINTER(CFString) -CFArrayRef = POINTER(CFArray) -CFMutableArrayRef = POINTER(CFMutableArray) -CFDictionaryRef = POINTER(CFDictionary) -CFArrayCallBacks = c_void_p -CFDictionaryKeyCallBacks = c_void_p -CFDictionaryValueCallBacks = c_void_p - -SecCertificateRef = POINTER(c_void_p) -SecExternalFormat = c_uint32 -SecExternalItemType = c_uint32 -SecIdentityRef = POINTER(c_void_p) -SecItemImportExportFlags = c_uint32 -SecItemImportExportKeyParameters = c_void_p -SecKeychainRef = POINTER(c_void_p) -SSLProtocol = c_uint32 -SSLCipherSuite = c_uint32 -SSLContextRef = POINTER(c_void_p) -SecTrustRef = POINTER(c_void_p) -SSLConnectionRef = c_uint32 -SecTrustResultType = c_uint32 -SecTrustOptionFlags = c_uint32 -SSLProtocolSide = c_uint32 -SSLConnectionType = c_uint32 -SSLSessionOption = c_uint32 - - -try: - Security.SecItemImport.argtypes = [ - CFDataRef, - CFStringRef, - POINTER(SecExternalFormat), - POINTER(SecExternalItemType), - SecItemImportExportFlags, - POINTER(SecItemImportExportKeyParameters), - SecKeychainRef, - POINTER(CFArrayRef), - ] - Security.SecItemImport.restype = OSStatus - - Security.SecCertificateGetTypeID.argtypes = [] - Security.SecCertificateGetTypeID.restype = CFTypeID - - Security.SecIdentityGetTypeID.argtypes = [] - Security.SecIdentityGetTypeID.restype = CFTypeID - - Security.SecKeyGetTypeID.argtypes = [] - Security.SecKeyGetTypeID.restype = CFTypeID - - Security.SecCertificateCreateWithData.argtypes = [ - CFAllocatorRef, - CFDataRef - ] - Security.SecCertificateCreateWithData.restype = SecCertificateRef - - Security.SecCertificateCopyData.argtypes = [ - SecCertificateRef - ] - Security.SecCertificateCopyData.restype = CFDataRef - - Security.SecCopyErrorMessageString.argtypes = [ - OSStatus, - c_void_p - ] - Security.SecCopyErrorMessageString.restype = CFStringRef - - Security.SecIdentityCreateWithCertificate.argtypes = [ - CFTypeRef, - SecCertificateRef, - POINTER(SecIdentityRef) - ] - Security.SecIdentityCreateWithCertificate.restype = OSStatus - - Security.SecKeychainCreate.argtypes = [ - c_char_p, - c_uint32, - c_void_p, - Boolean, - c_void_p, - POINTER(SecKeychainRef) - ] - Security.SecKeychainCreate.restype = OSStatus - - Security.SecKeychainDelete.argtypes = [ - SecKeychainRef - ] - Security.SecKeychainDelete.restype = OSStatus - - Security.SecPKCS12Import.argtypes = [ - CFDataRef, - CFDictionaryRef, - POINTER(CFArrayRef) - ] - Security.SecPKCS12Import.restype = OSStatus - - SSLReadFunc = CFUNCTYPE(OSStatus, SSLConnectionRef, c_void_p, POINTER(c_size_t)) - SSLWriteFunc = CFUNCTYPE(OSStatus, SSLConnectionRef, POINTER(c_byte), POINTER(c_size_t)) - - Security.SSLSetIOFuncs.argtypes = [ - SSLContextRef, - SSLReadFunc, - SSLWriteFunc - ] - Security.SSLSetIOFuncs.restype = OSStatus - - Security.SSLSetPeerID.argtypes = [ - SSLContextRef, - c_char_p, - c_size_t - ] - Security.SSLSetPeerID.restype = OSStatus - - Security.SSLSetCertificate.argtypes = [ - SSLContextRef, - CFArrayRef - ] - Security.SSLSetCertificate.restype = OSStatus - - Security.SSLSetCertificateAuthorities.argtypes = [ - SSLContextRef, - CFTypeRef, - Boolean - ] - Security.SSLSetCertificateAuthorities.restype = OSStatus - - Security.SSLSetConnection.argtypes = [ - SSLContextRef, - SSLConnectionRef - ] - Security.SSLSetConnection.restype = OSStatus - - Security.SSLSetPeerDomainName.argtypes = [ - SSLContextRef, - c_char_p, - c_size_t - ] - Security.SSLSetPeerDomainName.restype = OSStatus - - Security.SSLHandshake.argtypes = [ - SSLContextRef - ] - Security.SSLHandshake.restype = OSStatus - - Security.SSLRead.argtypes = [ - SSLContextRef, - c_char_p, - c_size_t, - POINTER(c_size_t) - ] - Security.SSLRead.restype = OSStatus - - Security.SSLWrite.argtypes = [ - SSLContextRef, - c_char_p, - c_size_t, - POINTER(c_size_t) - ] - Security.SSLWrite.restype = OSStatus - - Security.SSLClose.argtypes = [ - SSLContextRef - ] - Security.SSLClose.restype = OSStatus - - Security.SSLGetNumberSupportedCiphers.argtypes = [ - SSLContextRef, - POINTER(c_size_t) - ] - Security.SSLGetNumberSupportedCiphers.restype = OSStatus - - Security.SSLGetSupportedCiphers.argtypes = [ - SSLContextRef, - POINTER(SSLCipherSuite), - POINTER(c_size_t) - ] - Security.SSLGetSupportedCiphers.restype = OSStatus - - Security.SSLSetEnabledCiphers.argtypes = [ - SSLContextRef, - POINTER(SSLCipherSuite), - c_size_t - ] - Security.SSLSetEnabledCiphers.restype = OSStatus - - Security.SSLGetNumberEnabledCiphers.argtype = [ - SSLContextRef, - POINTER(c_size_t) - ] - Security.SSLGetNumberEnabledCiphers.restype = OSStatus - - Security.SSLGetEnabledCiphers.argtypes = [ - SSLContextRef, - POINTER(SSLCipherSuite), - POINTER(c_size_t) - ] - Security.SSLGetEnabledCiphers.restype = OSStatus - - Security.SSLGetNegotiatedCipher.argtypes = [ - SSLContextRef, - POINTER(SSLCipherSuite) - ] - Security.SSLGetNegotiatedCipher.restype = OSStatus - - Security.SSLGetNegotiatedProtocolVersion.argtypes = [ - SSLContextRef, - POINTER(SSLProtocol) - ] - Security.SSLGetNegotiatedProtocolVersion.restype = OSStatus - - Security.SSLCopyPeerTrust.argtypes = [ - SSLContextRef, - POINTER(SecTrustRef) - ] - Security.SSLCopyPeerTrust.restype = OSStatus - - Security.SecTrustSetAnchorCertificates.argtypes = [ - SecTrustRef, - CFArrayRef - ] - Security.SecTrustSetAnchorCertificates.restype = OSStatus - - Security.SecTrustSetAnchorCertificatesOnly.argstypes = [ - SecTrustRef, - Boolean - ] - Security.SecTrustSetAnchorCertificatesOnly.restype = OSStatus - - Security.SecTrustEvaluate.argtypes = [ - SecTrustRef, - POINTER(SecTrustResultType) - ] - Security.SecTrustEvaluate.restype = OSStatus - - Security.SecTrustGetCertificateCount.argtypes = [ - SecTrustRef - ] - Security.SecTrustGetCertificateCount.restype = CFIndex - - Security.SecTrustGetCertificateAtIndex.argtypes = [ - SecTrustRef, - CFIndex - ] - Security.SecTrustGetCertificateAtIndex.restype = SecCertificateRef - - Security.SSLCreateContext.argtypes = [ - CFAllocatorRef, - SSLProtocolSide, - SSLConnectionType - ] - Security.SSLCreateContext.restype = SSLContextRef - - Security.SSLSetSessionOption.argtypes = [ - SSLContextRef, - SSLSessionOption, - Boolean - ] - Security.SSLSetSessionOption.restype = OSStatus - - Security.SSLSetProtocolVersionMin.argtypes = [ - SSLContextRef, - SSLProtocol - ] - Security.SSLSetProtocolVersionMin.restype = OSStatus - - Security.SSLSetProtocolVersionMax.argtypes = [ - SSLContextRef, - SSLProtocol - ] - Security.SSLSetProtocolVersionMax.restype = OSStatus - - Security.SecCopyErrorMessageString.argtypes = [ - OSStatus, - c_void_p - ] - Security.SecCopyErrorMessageString.restype = CFStringRef - - Security.SSLReadFunc = SSLReadFunc - Security.SSLWriteFunc = SSLWriteFunc - Security.SSLContextRef = SSLContextRef - Security.SSLProtocol = SSLProtocol - Security.SSLCipherSuite = SSLCipherSuite - Security.SecIdentityRef = SecIdentityRef - Security.SecKeychainRef = SecKeychainRef - Security.SecTrustRef = SecTrustRef - Security.SecTrustResultType = SecTrustResultType - Security.SecExternalFormat = SecExternalFormat - Security.OSStatus = OSStatus - - Security.kSecImportExportPassphrase = CFStringRef.in_dll( - Security, 'kSecImportExportPassphrase' - ) - Security.kSecImportItemIdentity = CFStringRef.in_dll( - Security, 'kSecImportItemIdentity' - ) - - # CoreFoundation time! - CoreFoundation.CFRetain.argtypes = [ - CFTypeRef - ] - CoreFoundation.CFRetain.restype = CFTypeRef - - CoreFoundation.CFRelease.argtypes = [ - CFTypeRef - ] - CoreFoundation.CFRelease.restype = None - - CoreFoundation.CFGetTypeID.argtypes = [ - CFTypeRef - ] - CoreFoundation.CFGetTypeID.restype = CFTypeID - - CoreFoundation.CFStringCreateWithCString.argtypes = [ - CFAllocatorRef, - c_char_p, - CFStringEncoding - ] - CoreFoundation.CFStringCreateWithCString.restype = CFStringRef - - CoreFoundation.CFStringGetCStringPtr.argtypes = [ - CFStringRef, - CFStringEncoding - ] - CoreFoundation.CFStringGetCStringPtr.restype = c_char_p - - CoreFoundation.CFStringGetCString.argtypes = [ - CFStringRef, - c_char_p, - CFIndex, - CFStringEncoding - ] - CoreFoundation.CFStringGetCString.restype = c_bool - - CoreFoundation.CFDataCreate.argtypes = [ - CFAllocatorRef, - c_char_p, - CFIndex - ] - CoreFoundation.CFDataCreate.restype = CFDataRef - - CoreFoundation.CFDataGetLength.argtypes = [ - CFDataRef - ] - CoreFoundation.CFDataGetLength.restype = CFIndex - - CoreFoundation.CFDataGetBytePtr.argtypes = [ - CFDataRef - ] - CoreFoundation.CFDataGetBytePtr.restype = c_void_p - - CoreFoundation.CFDictionaryCreate.argtypes = [ - CFAllocatorRef, - POINTER(CFTypeRef), - POINTER(CFTypeRef), - CFIndex, - CFDictionaryKeyCallBacks, - CFDictionaryValueCallBacks - ] - CoreFoundation.CFDictionaryCreate.restype = CFDictionaryRef - - CoreFoundation.CFDictionaryGetValue.argtypes = [ - CFDictionaryRef, - CFTypeRef - ] - CoreFoundation.CFDictionaryGetValue.restype = CFTypeRef - - CoreFoundation.CFArrayCreate.argtypes = [ - CFAllocatorRef, - POINTER(CFTypeRef), - CFIndex, - CFArrayCallBacks, - ] - CoreFoundation.CFArrayCreate.restype = CFArrayRef - - CoreFoundation.CFArrayCreateMutable.argtypes = [ - CFAllocatorRef, - CFIndex, - CFArrayCallBacks - ] - CoreFoundation.CFArrayCreateMutable.restype = CFMutableArrayRef - - CoreFoundation.CFArrayAppendValue.argtypes = [ - CFMutableArrayRef, - c_void_p - ] - CoreFoundation.CFArrayAppendValue.restype = None - - CoreFoundation.CFArrayGetCount.argtypes = [ - CFArrayRef - ] - CoreFoundation.CFArrayGetCount.restype = CFIndex - - CoreFoundation.CFArrayGetValueAtIndex.argtypes = [ - CFArrayRef, - CFIndex - ] - CoreFoundation.CFArrayGetValueAtIndex.restype = c_void_p - - CoreFoundation.kCFAllocatorDefault = CFAllocatorRef.in_dll( - CoreFoundation, 'kCFAllocatorDefault' - ) - CoreFoundation.kCFTypeArrayCallBacks = c_void_p.in_dll(CoreFoundation, 'kCFTypeArrayCallBacks') - CoreFoundation.kCFTypeDictionaryKeyCallBacks = c_void_p.in_dll( - CoreFoundation, 'kCFTypeDictionaryKeyCallBacks' - ) - CoreFoundation.kCFTypeDictionaryValueCallBacks = c_void_p.in_dll( - CoreFoundation, 'kCFTypeDictionaryValueCallBacks' - ) - - CoreFoundation.CFTypeRef = CFTypeRef - CoreFoundation.CFArrayRef = CFArrayRef - CoreFoundation.CFStringRef = CFStringRef - CoreFoundation.CFDictionaryRef = CFDictionaryRef - -except (AttributeError): - raise ImportError('Error initializing ctypes') - - -class CFConst(object): - """ - A class object that acts as essentially a namespace for CoreFoundation - constants. - """ - kCFStringEncodingUTF8 = CFStringEncoding(0x08000100) - - -class SecurityConst(object): - """ - A class object that acts as essentially a namespace for Security constants. - """ - kSSLSessionOptionBreakOnServerAuth = 0 - - kSSLProtocol2 = 1 - kSSLProtocol3 = 2 - kTLSProtocol1 = 4 - kTLSProtocol11 = 7 - kTLSProtocol12 = 8 - kTLSProtocol13 = 10 - kTLSProtocolMaxSupported = 999 - - kSSLClientSide = 1 - kSSLStreamType = 0 - - kSecFormatPEMSequence = 10 - - kSecTrustResultInvalid = 0 - kSecTrustResultProceed = 1 - # This gap is present on purpose: this was kSecTrustResultConfirm, which - # is deprecated. - kSecTrustResultDeny = 3 - kSecTrustResultUnspecified = 4 - kSecTrustResultRecoverableTrustFailure = 5 - kSecTrustResultFatalTrustFailure = 6 - kSecTrustResultOtherError = 7 - - errSSLProtocol = -9800 - errSSLWouldBlock = -9803 - errSSLClosedGraceful = -9805 - errSSLClosedNoNotify = -9816 - errSSLClosedAbort = -9806 - - errSSLXCertChainInvalid = -9807 - errSSLCrypto = -9809 - errSSLInternal = -9810 - errSSLCertExpired = -9814 - errSSLCertNotYetValid = -9815 - errSSLUnknownRootCert = -9812 - errSSLNoRootCert = -9813 - errSSLHostNameMismatch = -9843 - errSSLPeerHandshakeFail = -9824 - errSSLPeerUserCancelled = -9839 - errSSLWeakPeerEphemeralDHKey = -9850 - errSSLServerAuthCompleted = -9841 - errSSLRecordOverflow = -9847 - - errSecVerifyFailed = -67808 - errSecNoTrustSettings = -25263 - errSecItemNotFound = -25300 - errSecInvalidTrustSettings = -25262 - - # Cipher suites. We only pick the ones our default cipher string allows. - # Source: https://developer.apple.com/documentation/security/1550981-ssl_cipher_suite_values - TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 = 0xC02C - TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 = 0xC030 - TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 = 0xC02B - TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 = 0xC02F - TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA9 - TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA8 - TLS_DHE_RSA_WITH_AES_256_GCM_SHA384 = 0x009F - TLS_DHE_RSA_WITH_AES_128_GCM_SHA256 = 0x009E - TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384 = 0xC024 - TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384 = 0xC028 - TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA = 0xC00A - TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA = 0xC014 - TLS_DHE_RSA_WITH_AES_256_CBC_SHA256 = 0x006B - TLS_DHE_RSA_WITH_AES_256_CBC_SHA = 0x0039 - TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256 = 0xC023 - TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256 = 0xC027 - TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA = 0xC009 - TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA = 0xC013 - TLS_DHE_RSA_WITH_AES_128_CBC_SHA256 = 0x0067 - TLS_DHE_RSA_WITH_AES_128_CBC_SHA = 0x0033 - TLS_RSA_WITH_AES_256_GCM_SHA384 = 0x009D - TLS_RSA_WITH_AES_128_GCM_SHA256 = 0x009C - TLS_RSA_WITH_AES_256_CBC_SHA256 = 0x003D - TLS_RSA_WITH_AES_128_CBC_SHA256 = 0x003C - TLS_RSA_WITH_AES_256_CBC_SHA = 0x0035 - TLS_RSA_WITH_AES_128_CBC_SHA = 0x002F - TLS_AES_128_GCM_SHA256 = 0x1301 - TLS_AES_256_GCM_SHA384 = 0x1302 - TLS_AES_128_CCM_8_SHA256 = 0x1305 - TLS_AES_128_CCM_SHA256 = 0x1304 diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.pyc deleted file mode 100644 index 98ba1752..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.pyc deleted file mode 100644 index ef03693d..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/appengine.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/appengine.pyc deleted file mode 100644 index 63f5d4ca..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/appengine.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/ntlmpool.py b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/ntlmpool.py deleted file mode 100644 index 8ea127c5..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/ntlmpool.py +++ /dev/null @@ -1,111 +0,0 @@ -""" -NTLM authenticating pool, contributed by erikcederstran - -Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10 -""" -from __future__ import absolute_import - -from logging import getLogger -from ntlm import ntlm - -from .. import HTTPSConnectionPool -from ..packages.six.moves.http_client import HTTPSConnection - - -log = getLogger(__name__) - - -class NTLMConnectionPool(HTTPSConnectionPool): - """ - Implements an NTLM authentication version of an urllib3 connection pool - """ - - scheme = 'https' - - def __init__(self, user, pw, authurl, *args, **kwargs): - """ - authurl is a random URL on the server that is protected by NTLM. - user is the Windows user, probably in the DOMAIN\\username format. - pw is the password for the user. - """ - super(NTLMConnectionPool, self).__init__(*args, **kwargs) - self.authurl = authurl - self.rawuser = user - user_parts = user.split('\\', 1) - self.domain = user_parts[0].upper() - self.user = user_parts[1] - self.pw = pw - - def _new_conn(self): - # Performs the NTLM handshake that secures the connection. The socket - # must be kept open while requests are performed. - self.num_connections += 1 - log.debug('Starting NTLM HTTPS connection no. %d: https://%s%s', - self.num_connections, self.host, self.authurl) - - headers = {'Connection': 'Keep-Alive'} - req_header = 'Authorization' - resp_header = 'www-authenticate' - - conn = HTTPSConnection(host=self.host, port=self.port) - - # Send negotiation message - headers[req_header] = ( - 'NTLM %s' % ntlm.create_NTLM_NEGOTIATE_MESSAGE(self.rawuser)) - log.debug('Request headers: %s', headers) - conn.request('GET', self.authurl, None, headers) - res = conn.getresponse() - reshdr = dict(res.getheaders()) - log.debug('Response status: %s %s', res.status, res.reason) - log.debug('Response headers: %s', reshdr) - log.debug('Response data: %s [...]', res.read(100)) - - # Remove the reference to the socket, so that it can not be closed by - # the response object (we want to keep the socket open) - res.fp = None - - # Server should respond with a challenge message - auth_header_values = reshdr[resp_header].split(', ') - auth_header_value = None - for s in auth_header_values: - if s[:5] == 'NTLM ': - auth_header_value = s[5:] - if auth_header_value is None: - raise Exception('Unexpected %s response header: %s' % - (resp_header, reshdr[resp_header])) - - # Send authentication message - ServerChallenge, NegotiateFlags = \ - ntlm.parse_NTLM_CHALLENGE_MESSAGE(auth_header_value) - auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(ServerChallenge, - self.user, - self.domain, - self.pw, - NegotiateFlags) - headers[req_header] = 'NTLM %s' % auth_msg - log.debug('Request headers: %s', headers) - conn.request('GET', self.authurl, None, headers) - res = conn.getresponse() - log.debug('Response status: %s %s', res.status, res.reason) - log.debug('Response headers: %s', dict(res.getheaders())) - log.debug('Response data: %s [...]', res.read()[:100]) - if res.status != 200: - if res.status == 401: - raise Exception('Server rejected request: wrong ' - 'username or password') - raise Exception('Wrong server response: %s %s' % - (res.status, res.reason)) - - res.fp = None - log.debug('Connection established') - return conn - - def urlopen(self, method, url, body=None, headers=None, retries=3, - redirect=True, assert_same_host=True): - if headers is None: - headers = {} - headers['Connection'] = 'Keep-Alive' - return super(NTLMConnectionPool, self).urlopen(method, url, body, - headers, retries, - redirect, - assert_same_host) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/ntlmpool.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/ntlmpool.pyc deleted file mode 100644 index fad7b4dd..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/ntlmpool.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyc deleted file mode 100644 index afba7030..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pyc deleted file mode 100644 index 22602acc..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/socks.py b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/socks.py deleted file mode 100644 index 636d261f..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/socks.py +++ /dev/null @@ -1,205 +0,0 @@ -# -*- coding: utf-8 -*- -""" -This module contains provisional support for SOCKS proxies from within -urllib3. This module supports SOCKS4, SOCKS4A (an extension of SOCKS4), and -SOCKS5. To enable its functionality, either install PySocks or install this -module with the ``socks`` extra. - -The SOCKS implementation supports the full range of urllib3 features. It also -supports the following SOCKS features: - -- SOCKS4A (``proxy_url='socks4a://...``) -- SOCKS4 (``proxy_url='socks4://...``) -- SOCKS5 with remote DNS (``proxy_url='socks5h://...``) -- SOCKS5 with local DNS (``proxy_url='socks5://...``) -- Usernames and passwords for the SOCKS proxy - - .. note:: - It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in - your ``proxy_url`` to ensure that DNS resolution is done from the remote - server instead of client-side when connecting to a domain name. - -SOCKS4 supports IPv4 and domain names with the SOCKS4A extension. SOCKS5 -supports IPv4, IPv6, and domain names. - -When connecting to a SOCKS4 proxy the ``username`` portion of the ``proxy_url`` -will be sent as the ``userid`` section of the SOCKS request:: - - proxy_url="socks4a://<userid>@proxy-host" - -When connecting to a SOCKS5 proxy the ``username`` and ``password`` portion -of the ``proxy_url`` will be sent as the username/password to authenticate -with the proxy:: - - proxy_url="socks5h://<username>:<password>@proxy-host" - -""" -from __future__ import absolute_import - -try: - import socks -except ImportError: - import warnings - from ..exceptions import DependencyWarning - - warnings.warn(( - 'SOCKS support in urllib3 requires the installation of optional ' - 'dependencies: specifically, PySocks. For more information, see ' - 'https://urllib3.readthedocs.io/en/latest/contrib.html#socks-proxies' - ), - DependencyWarning - ) - raise - -from socket import error as SocketError, timeout as SocketTimeout - -from ..connection import ( - HTTPConnection, HTTPSConnection -) -from ..connectionpool import ( - HTTPConnectionPool, HTTPSConnectionPool -) -from ..exceptions import ConnectTimeoutError, NewConnectionError -from ..poolmanager import PoolManager -from ..util.url import parse_url - -try: - import ssl -except ImportError: - ssl = None - - -class SOCKSConnection(HTTPConnection): - """ - A plain-text HTTP connection that connects via a SOCKS proxy. - """ - def __init__(self, *args, **kwargs): - self._socks_options = kwargs.pop('_socks_options') - super(SOCKSConnection, self).__init__(*args, **kwargs) - - def _new_conn(self): - """ - Establish a new connection via the SOCKS proxy. - """ - extra_kw = {} - if self.source_address: - extra_kw['source_address'] = self.source_address - - if self.socket_options: - extra_kw['socket_options'] = self.socket_options - - try: - conn = socks.create_connection( - (self.host, self.port), - proxy_type=self._socks_options['socks_version'], - proxy_addr=self._socks_options['proxy_host'], - proxy_port=self._socks_options['proxy_port'], - proxy_username=self._socks_options['username'], - proxy_password=self._socks_options['password'], - proxy_rdns=self._socks_options['rdns'], - timeout=self.timeout, - **extra_kw - ) - - except SocketTimeout: - raise ConnectTimeoutError( - self, "Connection to %s timed out. (connect timeout=%s)" % - (self.host, self.timeout)) - - except socks.ProxyError as e: - # This is fragile as hell, but it seems to be the only way to raise - # useful errors here. - if e.socket_err: - error = e.socket_err - if isinstance(error, SocketTimeout): - raise ConnectTimeoutError( - self, - "Connection to %s timed out. (connect timeout=%s)" % - (self.host, self.timeout) - ) - else: - raise NewConnectionError( - self, - "Failed to establish a new connection: %s" % error - ) - else: - raise NewConnectionError( - self, - "Failed to establish a new connection: %s" % e - ) - - except SocketError as e: # Defensive: PySocks should catch all these. - raise NewConnectionError( - self, "Failed to establish a new connection: %s" % e) - - return conn - - -# We don't need to duplicate the Verified/Unverified distinction from -# urllib3/connection.py here because the HTTPSConnection will already have been -# correctly set to either the Verified or Unverified form by that module. This -# means the SOCKSHTTPSConnection will automatically be the correct type. -class SOCKSHTTPSConnection(SOCKSConnection, HTTPSConnection): - pass - - -class SOCKSHTTPConnectionPool(HTTPConnectionPool): - ConnectionCls = SOCKSConnection - - -class SOCKSHTTPSConnectionPool(HTTPSConnectionPool): - ConnectionCls = SOCKSHTTPSConnection - - -class SOCKSProxyManager(PoolManager): - """ - A version of the urllib3 ProxyManager that routes connections via the - defined SOCKS proxy. - """ - pool_classes_by_scheme = { - 'http': SOCKSHTTPConnectionPool, - 'https': SOCKSHTTPSConnectionPool, - } - - def __init__(self, proxy_url, username=None, password=None, - num_pools=10, headers=None, **connection_pool_kw): - parsed = parse_url(proxy_url) - - if username is None and password is None and parsed.auth is not None: - split = parsed.auth.split(':') - if len(split) == 2: - username, password = split - if parsed.scheme == 'socks5': - socks_version = socks.PROXY_TYPE_SOCKS5 - rdns = False - elif parsed.scheme == 'socks5h': - socks_version = socks.PROXY_TYPE_SOCKS5 - rdns = True - elif parsed.scheme == 'socks4': - socks_version = socks.PROXY_TYPE_SOCKS4 - rdns = False - elif parsed.scheme == 'socks4a': - socks_version = socks.PROXY_TYPE_SOCKS4 - rdns = True - else: - raise ValueError( - "Unable to determine SOCKS version from %s" % proxy_url - ) - - self.proxy_url = proxy_url - - socks_options = { - 'socks_version': socks_version, - 'proxy_host': parsed.host, - 'proxy_port': parsed.port, - 'username': username, - 'password': password, - 'rdns': rdns - } - connection_pool_kw['_socks_options'] = socks_options - - super(SOCKSProxyManager, self).__init__( - num_pools, headers, **connection_pool_kw - ) - - self.pool_classes_by_scheme = SOCKSProxyManager.pool_classes_by_scheme diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/socks.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/socks.pyc deleted file mode 100644 index 7a2d2531..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/socks.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.py b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.py deleted file mode 100644 index 7bbaa987..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.py +++ /dev/null @@ -1,246 +0,0 @@ -from __future__ import absolute_import -from .packages.six.moves.http_client import ( - IncompleteRead as httplib_IncompleteRead -) -# Base Exceptions - - -class HTTPError(Exception): - "Base exception used by this module." - pass - - -class HTTPWarning(Warning): - "Base warning used by this module." - pass - - -class PoolError(HTTPError): - "Base exception for errors caused within a pool." - def __init__(self, pool, message): - self.pool = pool - HTTPError.__init__(self, "%s: %s" % (pool, message)) - - def __reduce__(self): - # For pickling purposes. - return self.__class__, (None, None) - - -class RequestError(PoolError): - "Base exception for PoolErrors that have associated URLs." - def __init__(self, pool, url, message): - self.url = url - PoolError.__init__(self, pool, message) - - def __reduce__(self): - # For pickling purposes. - return self.__class__, (None, self.url, None) - - -class SSLError(HTTPError): - "Raised when SSL certificate fails in an HTTPS connection." - pass - - -class ProxyError(HTTPError): - "Raised when the connection to a proxy fails." - pass - - -class DecodeError(HTTPError): - "Raised when automatic decoding based on Content-Type fails." - pass - - -class ProtocolError(HTTPError): - "Raised when something unexpected happens mid-request/response." - pass - - -#: Renamed to ProtocolError but aliased for backwards compatibility. -ConnectionError = ProtocolError - - -# Leaf Exceptions - -class MaxRetryError(RequestError): - """Raised when the maximum number of retries is exceeded. - - :param pool: The connection pool - :type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool` - :param string url: The requested Url - :param exceptions.Exception reason: The underlying error - - """ - - def __init__(self, pool, url, reason=None): - self.reason = reason - - message = "Max retries exceeded with url: %s (Caused by %r)" % ( - url, reason) - - RequestError.__init__(self, pool, url, message) - - -class HostChangedError(RequestError): - "Raised when an existing pool gets a request for a foreign host." - - def __init__(self, pool, url, retries=3): - message = "Tried to open a foreign host with url: %s" % url - RequestError.__init__(self, pool, url, message) - self.retries = retries - - -class TimeoutStateError(HTTPError): - """ Raised when passing an invalid state to a timeout """ - pass - - -class TimeoutError(HTTPError): - """ Raised when a socket timeout error occurs. - - Catching this error will catch both :exc:`ReadTimeoutErrors - <ReadTimeoutError>` and :exc:`ConnectTimeoutErrors <ConnectTimeoutError>`. - """ - pass - - -class ReadTimeoutError(TimeoutError, RequestError): - "Raised when a socket timeout occurs while receiving data from a server" - pass - - -# This timeout error does not have a URL attached and needs to inherit from the -# base HTTPError -class ConnectTimeoutError(TimeoutError): - "Raised when a socket timeout occurs while connecting to a server" - pass - - -class NewConnectionError(ConnectTimeoutError, PoolError): - "Raised when we fail to establish a new connection. Usually ECONNREFUSED." - pass - - -class EmptyPoolError(PoolError): - "Raised when a pool runs out of connections and no more are allowed." - pass - - -class ClosedPoolError(PoolError): - "Raised when a request enters a pool after the pool has been closed." - pass - - -class LocationValueError(ValueError, HTTPError): - "Raised when there is something wrong with a given URL input." - pass - - -class LocationParseError(LocationValueError): - "Raised when get_host or similar fails to parse the URL input." - - def __init__(self, location): - message = "Failed to parse: %s" % location - HTTPError.__init__(self, message) - - self.location = location - - -class ResponseError(HTTPError): - "Used as a container for an error reason supplied in a MaxRetryError." - GENERIC_ERROR = 'too many error responses' - SPECIFIC_ERROR = 'too many {status_code} error responses' - - -class SecurityWarning(HTTPWarning): - "Warned when performing security reducing actions" - pass - - -class SubjectAltNameWarning(SecurityWarning): - "Warned when connecting to a host with a certificate missing a SAN." - pass - - -class InsecureRequestWarning(SecurityWarning): - "Warned when making an unverified HTTPS request." - pass - - -class SystemTimeWarning(SecurityWarning): - "Warned when system time is suspected to be wrong" - pass - - -class InsecurePlatformWarning(SecurityWarning): - "Warned when certain SSL configuration is not available on a platform." - pass - - -class SNIMissingWarning(HTTPWarning): - "Warned when making a HTTPS request without SNI available." - pass - - -class DependencyWarning(HTTPWarning): - """ - Warned when an attempt is made to import a module with missing optional - dependencies. - """ - pass - - -class ResponseNotChunked(ProtocolError, ValueError): - "Response needs to be chunked in order to read it as chunks." - pass - - -class BodyNotHttplibCompatible(HTTPError): - """ - Body should be httplib.HTTPResponse like (have an fp attribute which - returns raw chunks) for read_chunked(). - """ - pass - - -class IncompleteRead(HTTPError, httplib_IncompleteRead): - """ - Response length doesn't match expected Content-Length - - Subclass of http_client.IncompleteRead to allow int value - for `partial` to avoid creating large objects on streamed - reads. - """ - def __init__(self, partial, expected): - super(IncompleteRead, self).__init__(partial, expected) - - def __repr__(self): - return ('IncompleteRead(%i bytes read, ' - '%i more expected)' % (self.partial, self.expected)) - - -class InvalidHeader(HTTPError): - "The header provided was somehow invalid." - pass - - -class ProxySchemeUnknown(AssertionError, ValueError): - "ProxyManager does not support the supplied scheme" - # TODO(t-8ch): Stop inheriting from AssertionError in v2.0. - - def __init__(self, scheme): - message = "Not supported proxy scheme %s" % scheme - super(ProxySchemeUnknown, self).__init__(message) - - -class HeaderParsingError(HTTPError): - "Raised by assert_header_parsing, but we convert it to a log.warning statement." - def __init__(self, defects, unparsed_data): - message = '%s, unparsed data: %r' % (defects or 'Unknown', unparsed_data) - super(HeaderParsingError, self).__init__(message) - - -class UnrewindableBodyError(HTTPError): - "urllib3 encountered an error when trying to rewind a body" - pass diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyc deleted file mode 100644 index 9a36f000..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/fields.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/fields.pyc deleted file mode 100644 index 4cad3733..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/fields.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/filepost.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/filepost.pyc deleted file mode 100644 index e90758d9..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/filepost.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/__init__.py b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/__init__.py deleted file mode 100644 index 170e974c..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from __future__ import absolute_import - -from . import ssl_match_hostname - -__all__ = ('ssl_match_hostname', ) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/__init__.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/__init__.pyc deleted file mode 100644 index ac445e9a..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/backports/__init__.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/backports/__init__.pyc deleted file mode 100644 index c3d7cd5e..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/backports/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/backports/makefile.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/backports/makefile.pyc deleted file mode 100644 index 592c5f72..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/backports/makefile.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__init__.py b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__init__.py deleted file mode 100644 index 371c6dd5..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__init__.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (c) 2014 Rackspace -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -An implementation of semantics and validations described in RFC 3986. - -See http://rfc3986.readthedocs.io/ for detailed documentation. - -:copyright: (c) 2014 Rackspace -:license: Apache v2.0, see LICENSE for details -""" - -from .api import iri_reference -from .api import IRIReference -from .api import is_valid_uri -from .api import normalize_uri -from .api import uri_reference -from .api import URIReference -from .api import urlparse -from .parseresult import ParseResult - -__title__ = 'rfc3986' -__author__ = 'Ian Stapleton Cordasco' -__author_email__ = 'graffatcolmingov@gmail.com' -__license__ = 'Apache v2.0' -__copyright__ = 'Copyright 2014 Rackspace' -__version__ = '1.3.2' - -__all__ = ( - 'ParseResult', - 'URIReference', - 'IRIReference', - 'is_valid_uri', - 'normalize_uri', - 'uri_reference', - 'iri_reference', - 'urlparse', - '__title__', - '__author__', - '__author_email__', - '__license__', - '__copyright__', - '__version__', -) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__init__.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__init__.pyc deleted file mode 100644 index 2393bc7f..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/_mixin.py b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/_mixin.py deleted file mode 100644 index 543925cd..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/_mixin.py +++ /dev/null @@ -1,353 +0,0 @@ -"""Module containing the implementation of the URIMixin class.""" -import warnings - -from . import exceptions as exc -from . import misc -from . import normalizers -from . import validators - - -class URIMixin(object): - """Mixin with all shared methods for URIs and IRIs.""" - - __hash__ = tuple.__hash__ - - def authority_info(self): - """Return a dictionary with the ``userinfo``, ``host``, and ``port``. - - If the authority is not valid, it will raise a - :class:`~rfc3986.exceptions.InvalidAuthority` Exception. - - :returns: - ``{'userinfo': 'username:password', 'host': 'www.example.com', - 'port': '80'}`` - :rtype: dict - :raises rfc3986.exceptions.InvalidAuthority: - If the authority is not ``None`` and can not be parsed. - """ - if not self.authority: - return {'userinfo': None, 'host': None, 'port': None} - - match = self._match_subauthority() - - if match is None: - # In this case, we have an authority that was parsed from the URI - # Reference, but it cannot be further parsed by our - # misc.SUBAUTHORITY_MATCHER. In this case it must not be a valid - # authority. - raise exc.InvalidAuthority(self.authority.encode(self.encoding)) - - # We had a match, now let's ensure that it is actually a valid host - # address if it is IPv4 - matches = match.groupdict() - host = matches.get('host') - - if (host and misc.IPv4_MATCHER.match(host) and not - validators.valid_ipv4_host_address(host)): - # If we have a host, it appears to be IPv4 and it does not have - # valid bytes, it is an InvalidAuthority. - raise exc.InvalidAuthority(self.authority.encode(self.encoding)) - - return matches - - def _match_subauthority(self): - return misc.SUBAUTHORITY_MATCHER.match(self.authority) - - @property - def host(self): - """If present, a string representing the host.""" - try: - authority = self.authority_info() - except exc.InvalidAuthority: - return None - return authority['host'] - - @property - def port(self): - """If present, the port extracted from the authority.""" - try: - authority = self.authority_info() - except exc.InvalidAuthority: - return None - return authority['port'] - - @property - def userinfo(self): - """If present, the userinfo extracted from the authority.""" - try: - authority = self.authority_info() - except exc.InvalidAuthority: - return None - return authority['userinfo'] - - def is_absolute(self): - """Determine if this URI Reference is an absolute URI. - - See http://tools.ietf.org/html/rfc3986#section-4.3 for explanation. - - :returns: ``True`` if it is an absolute URI, ``False`` otherwise. - :rtype: bool - """ - return bool(misc.ABSOLUTE_URI_MATCHER.match(self.unsplit())) - - def is_valid(self, **kwargs): - """Determine if the URI is valid. - - .. deprecated:: 1.1.0 - - Use the :class:`~rfc3986.validators.Validator` object instead. - - :param bool require_scheme: Set to ``True`` if you wish to require the - presence of the scheme component. - :param bool require_authority: Set to ``True`` if you wish to require - the presence of the authority component. - :param bool require_path: Set to ``True`` if you wish to require the - presence of the path component. - :param bool require_query: Set to ``True`` if you wish to require the - presence of the query component. - :param bool require_fragment: Set to ``True`` if you wish to require - the presence of the fragment component. - :returns: ``True`` if the URI is valid. ``False`` otherwise. - :rtype: bool - """ - warnings.warn("Please use rfc3986.validators.Validator instead. " - "This method will be eventually removed.", - DeprecationWarning) - validators = [ - (self.scheme_is_valid, kwargs.get('require_scheme', False)), - (self.authority_is_valid, kwargs.get('require_authority', False)), - (self.path_is_valid, kwargs.get('require_path', False)), - (self.query_is_valid, kwargs.get('require_query', False)), - (self.fragment_is_valid, kwargs.get('require_fragment', False)), - ] - return all(v(r) for v, r in validators) - - def authority_is_valid(self, require=False): - """Determine if the authority component is valid. - - .. deprecated:: 1.1.0 - - Use the :class:`~rfc3986.validators.Validator` object instead. - - :param bool require: - Set to ``True`` to require the presence of this component. - :returns: - ``True`` if the authority is valid. ``False`` otherwise. - :rtype: - bool - """ - warnings.warn("Please use rfc3986.validators.Validator instead. " - "This method will be eventually removed.", - DeprecationWarning) - try: - self.authority_info() - except exc.InvalidAuthority: - return False - - return validators.authority_is_valid( - self.authority, - host=self.host, - require=require, - ) - - def scheme_is_valid(self, require=False): - """Determine if the scheme component is valid. - - .. deprecated:: 1.1.0 - - Use the :class:`~rfc3986.validators.Validator` object instead. - - :param str require: Set to ``True`` to require the presence of this - component. - :returns: ``True`` if the scheme is valid. ``False`` otherwise. - :rtype: bool - """ - warnings.warn("Please use rfc3986.validators.Validator instead. " - "This method will be eventually removed.", - DeprecationWarning) - return validators.scheme_is_valid(self.scheme, require) - - def path_is_valid(self, require=False): - """Determine if the path component is valid. - - .. deprecated:: 1.1.0 - - Use the :class:`~rfc3986.validators.Validator` object instead. - - :param str require: Set to ``True`` to require the presence of this - component. - :returns: ``True`` if the path is valid. ``False`` otherwise. - :rtype: bool - """ - warnings.warn("Please use rfc3986.validators.Validator instead. " - "This method will be eventually removed.", - DeprecationWarning) - return validators.path_is_valid(self.path, require) - - def query_is_valid(self, require=False): - """Determine if the query component is valid. - - .. deprecated:: 1.1.0 - - Use the :class:`~rfc3986.validators.Validator` object instead. - - :param str require: Set to ``True`` to require the presence of this - component. - :returns: ``True`` if the query is valid. ``False`` otherwise. - :rtype: bool - """ - warnings.warn("Please use rfc3986.validators.Validator instead. " - "This method will be eventually removed.", - DeprecationWarning) - return validators.query_is_valid(self.query, require) - - def fragment_is_valid(self, require=False): - """Determine if the fragment component is valid. - - .. deprecated:: 1.1.0 - - Use the Validator object instead. - - :param str require: Set to ``True`` to require the presence of this - component. - :returns: ``True`` if the fragment is valid. ``False`` otherwise. - :rtype: bool - """ - warnings.warn("Please use rfc3986.validators.Validator instead. " - "This method will be eventually removed.", - DeprecationWarning) - return validators.fragment_is_valid(self.fragment, require) - - def normalized_equality(self, other_ref): - """Compare this URIReference to another URIReference. - - :param URIReference other_ref: (required), The reference with which - we're comparing. - :returns: ``True`` if the references are equal, ``False`` otherwise. - :rtype: bool - """ - return tuple(self.normalize()) == tuple(other_ref.normalize()) - - def resolve_with(self, base_uri, strict=False): - """Use an absolute URI Reference to resolve this relative reference. - - Assuming this is a relative reference that you would like to resolve, - use the provided base URI to resolve it. - - See http://tools.ietf.org/html/rfc3986#section-5 for more information. - - :param base_uri: Either a string or URIReference. It must be an - absolute URI or it will raise an exception. - :returns: A new URIReference which is the result of resolving this - reference using ``base_uri``. - :rtype: :class:`URIReference` - :raises rfc3986.exceptions.ResolutionError: - If the ``base_uri`` is not an absolute URI. - """ - if not isinstance(base_uri, URIMixin): - base_uri = type(self).from_string(base_uri) - - if not base_uri.is_absolute(): - raise exc.ResolutionError(base_uri) - - # This is optional per - # http://tools.ietf.org/html/rfc3986#section-5.2.1 - base_uri = base_uri.normalize() - - # The reference we're resolving - resolving = self - - if not strict and resolving.scheme == base_uri.scheme: - resolving = resolving.copy_with(scheme=None) - - # http://tools.ietf.org/html/rfc3986#page-32 - if resolving.scheme is not None: - target = resolving.copy_with( - path=normalizers.normalize_path(resolving.path) - ) - else: - if resolving.authority is not None: - target = resolving.copy_with( - scheme=base_uri.scheme, - path=normalizers.normalize_path(resolving.path) - ) - else: - if resolving.path is None: - if resolving.query is not None: - query = resolving.query - else: - query = base_uri.query - target = resolving.copy_with( - scheme=base_uri.scheme, - authority=base_uri.authority, - path=base_uri.path, - query=query - ) - else: - if resolving.path.startswith('/'): - path = normalizers.normalize_path(resolving.path) - else: - path = normalizers.normalize_path( - misc.merge_paths(base_uri, resolving.path) - ) - target = resolving.copy_with( - scheme=base_uri.scheme, - authority=base_uri.authority, - path=path, - query=resolving.query - ) - return target - - def unsplit(self): - """Create a URI string from the components. - - :returns: The URI Reference reconstituted as a string. - :rtype: str - """ - # See http://tools.ietf.org/html/rfc3986#section-5.3 - result_list = [] - if self.scheme: - result_list.extend([self.scheme, ':']) - if self.authority: - result_list.extend(['//', self.authority]) - if self.path: - result_list.append(self.path) - if self.query is not None: - result_list.extend(['?', self.query]) - if self.fragment is not None: - result_list.extend(['#', self.fragment]) - return ''.join(result_list) - - def copy_with(self, scheme=misc.UseExisting, authority=misc.UseExisting, - path=misc.UseExisting, query=misc.UseExisting, - fragment=misc.UseExisting): - """Create a copy of this reference with the new components. - - :param str scheme: - (optional) The scheme to use for the new reference. - :param str authority: - (optional) The authority to use for the new reference. - :param str path: - (optional) The path to use for the new reference. - :param str query: - (optional) The query to use for the new reference. - :param str fragment: - (optional) The fragment to use for the new reference. - :returns: - New URIReference with provided components. - :rtype: - URIReference - """ - attributes = { - 'scheme': scheme, - 'authority': authority, - 'path': path, - 'query': query, - 'fragment': fragment, - } - for key, value in list(attributes.items()): - if value is misc.UseExisting: - del attributes[key] - uri = self._replace(**attributes) - uri.encoding = self.encoding - return uri diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/_mixin.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/_mixin.pyc deleted file mode 100644 index 561bd866..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/_mixin.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/abnf_regexp.py b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/abnf_regexp.py deleted file mode 100644 index 24c9c3d0..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/abnf_regexp.py +++ /dev/null @@ -1,267 +0,0 @@ -# -*- coding: utf-8 -*- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Module for the regular expressions crafted from ABNF.""" - -import sys - -# https://tools.ietf.org/html/rfc3986#page-13 -GEN_DELIMS = GENERIC_DELIMITERS = ":/?#[]@" -GENERIC_DELIMITERS_SET = set(GENERIC_DELIMITERS) -# https://tools.ietf.org/html/rfc3986#page-13 -SUB_DELIMS = SUB_DELIMITERS = "!$&'()*+,;=" -SUB_DELIMITERS_SET = set(SUB_DELIMITERS) -# Escape the '*' for use in regular expressions -SUB_DELIMITERS_RE = r"!$&'()\*+,;=" -RESERVED_CHARS_SET = GENERIC_DELIMITERS_SET.union(SUB_DELIMITERS_SET) -ALPHA = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz' -DIGIT = '0123456789' -# https://tools.ietf.org/html/rfc3986#section-2.3 -UNRESERVED = UNRESERVED_CHARS = ALPHA + DIGIT + r'._!-' -UNRESERVED_CHARS_SET = set(UNRESERVED_CHARS) -NON_PCT_ENCODED_SET = RESERVED_CHARS_SET.union(UNRESERVED_CHARS_SET) -# We need to escape the '-' in this case: -UNRESERVED_RE = r'A-Za-z0-9._~\-' - -# Percent encoded character values -PERCENT_ENCODED = PCT_ENCODED = '%[A-Fa-f0-9]{2}' -PCHAR = '([' + UNRESERVED_RE + SUB_DELIMITERS_RE + ':@]|%s)' % PCT_ENCODED - -# NOTE(sigmavirus24): We're going to use more strict regular expressions -# than appear in Appendix B for scheme. This will prevent over-eager -# consuming of items that aren't schemes. -SCHEME_RE = '[a-zA-Z][a-zA-Z0-9+.-]*' -_AUTHORITY_RE = '[^/?#]*' -_PATH_RE = '[^?#]*' -_QUERY_RE = '[^#]*' -_FRAGMENT_RE = '.*' - -# Extracted from http://tools.ietf.org/html/rfc3986#appendix-B -COMPONENT_PATTERN_DICT = { - 'scheme': SCHEME_RE, - 'authority': _AUTHORITY_RE, - 'path': _PATH_RE, - 'query': _QUERY_RE, - 'fragment': _FRAGMENT_RE, -} - -# See http://tools.ietf.org/html/rfc3986#appendix-B -# In this case, we name each of the important matches so we can use -# SRE_Match#groupdict to parse the values out if we so choose. This is also -# modified to ignore other matches that are not important to the parsing of -# the reference so we can also simply use SRE_Match#groups. -URL_PARSING_RE = ( - r'(?:(?P<scheme>{scheme}):)?(?://(?P<authority>{authority}))?' - r'(?P<path>{path})(?:\?(?P<query>{query}))?' - r'(?:#(?P<fragment>{fragment}))?' -).format(**COMPONENT_PATTERN_DICT) - - -# ######################### -# Authority Matcher Section -# ######################### - -# Host patterns, see: http://tools.ietf.org/html/rfc3986#section-3.2.2 -# The pattern for a regular name, e.g., www.google.com, api.github.com -REGULAR_NAME_RE = REG_NAME = '((?:{0}|[{1}])*)'.format( - '%[0-9A-Fa-f]{2}', SUB_DELIMITERS_RE + UNRESERVED_RE -) -# The pattern for an IPv4 address, e.g., 192.168.255.255, 127.0.0.1, -IPv4_RE = r'([0-9]{1,3}\.){3}[0-9]{1,3}' -# Hexadecimal characters used in each piece of an IPv6 address -HEXDIG_RE = '[0-9A-Fa-f]{1,4}' -# Least-significant 32 bits of an IPv6 address -LS32_RE = '({hex}:{hex}|{ipv4})'.format(hex=HEXDIG_RE, ipv4=IPv4_RE) -# Substitutions into the following patterns for IPv6 patterns defined -# http://tools.ietf.org/html/rfc3986#page-20 -_subs = {'hex': HEXDIG_RE, 'ls32': LS32_RE} - -# Below: h16 = hexdig, see: https://tools.ietf.org/html/rfc5234 for details -# about ABNF (Augmented Backus-Naur Form) use in the comments -variations = [ - # 6( h16 ":" ) ls32 - '(%(hex)s:){6}%(ls32)s' % _subs, - # "::" 5( h16 ":" ) ls32 - '::(%(hex)s:){5}%(ls32)s' % _subs, - # [ h16 ] "::" 4( h16 ":" ) ls32 - '(%(hex)s)?::(%(hex)s:){4}%(ls32)s' % _subs, - # [ *1( h16 ":" ) h16 ] "::" 3( h16 ":" ) ls32 - '((%(hex)s:)?%(hex)s)?::(%(hex)s:){3}%(ls32)s' % _subs, - # [ *2( h16 ":" ) h16 ] "::" 2( h16 ":" ) ls32 - '((%(hex)s:){0,2}%(hex)s)?::(%(hex)s:){2}%(ls32)s' % _subs, - # [ *3( h16 ":" ) h16 ] "::" h16 ":" ls32 - '((%(hex)s:){0,3}%(hex)s)?::%(hex)s:%(ls32)s' % _subs, - # [ *4( h16 ":" ) h16 ] "::" ls32 - '((%(hex)s:){0,4}%(hex)s)?::%(ls32)s' % _subs, - # [ *5( h16 ":" ) h16 ] "::" h16 - '((%(hex)s:){0,5}%(hex)s)?::%(hex)s' % _subs, - # [ *6( h16 ":" ) h16 ] "::" - '((%(hex)s:){0,6}%(hex)s)?::' % _subs, -] - -IPv6_RE = '(({0})|({1})|({2})|({3})|({4})|({5})|({6})|({7})|({8}))'.format( - *variations -) - -IPv_FUTURE_RE = r'v[0-9A-Fa-f]+\.[%s]+' % ( - UNRESERVED_RE + SUB_DELIMITERS_RE + ':' -) - -# RFC 6874 Zone ID ABNF -ZONE_ID = '(?:[' + UNRESERVED_RE + ']|' + PCT_ENCODED + ')+' - -IPv6_ADDRZ_RFC4007_RE = IPv6_RE + '(?:(?:%25|%)' + ZONE_ID + ')?' -IPv6_ADDRZ_RE = IPv6_RE + '(?:%25' + ZONE_ID + ')?' - -IP_LITERAL_RE = r'\[({0}|{1})\]'.format( - IPv6_ADDRZ_RFC4007_RE, - IPv_FUTURE_RE, -) - -# Pattern for matching the host piece of the authority -HOST_RE = HOST_PATTERN = '({0}|{1}|{2})'.format( - REG_NAME, - IPv4_RE, - IP_LITERAL_RE, -) -USERINFO_RE = '^([' + UNRESERVED_RE + SUB_DELIMITERS_RE + ':]|%s)+' % ( - PCT_ENCODED -) -PORT_RE = '[0-9]{1,5}' - -# #################### -# Path Matcher Section -# #################### - -# See http://tools.ietf.org/html/rfc3986#section-3.3 for more information -# about the path patterns defined below. -segments = { - 'segment': PCHAR + '*', - # Non-zero length segment - 'segment-nz': PCHAR + '+', - # Non-zero length segment without ":" - 'segment-nz-nc': PCHAR.replace(':', '') + '+' -} - -# Path types taken from Section 3.3 (linked above) -PATH_EMPTY = '^$' -PATH_ROOTLESS = '%(segment-nz)s(/%(segment)s)*' % segments -PATH_NOSCHEME = '%(segment-nz-nc)s(/%(segment)s)*' % segments -PATH_ABSOLUTE = '/(%s)?' % PATH_ROOTLESS -PATH_ABEMPTY = '(/%(segment)s)*' % segments -PATH_RE = '^(%s|%s|%s|%s|%s)$' % ( - PATH_ABEMPTY, PATH_ABSOLUTE, PATH_NOSCHEME, PATH_ROOTLESS, PATH_EMPTY -) - -FRAGMENT_RE = QUERY_RE = ( - '^([/?:@' + UNRESERVED_RE + SUB_DELIMITERS_RE + ']|%s)*$' % PCT_ENCODED -) - -# ########################## -# Relative reference matcher -# ########################## - -# See http://tools.ietf.org/html/rfc3986#section-4.2 for details -RELATIVE_PART_RE = '(//%s%s|%s|%s|%s)' % ( - COMPONENT_PATTERN_DICT['authority'], - PATH_ABEMPTY, - PATH_ABSOLUTE, - PATH_NOSCHEME, - PATH_EMPTY, -) - -# See http://tools.ietf.org/html/rfc3986#section-3 for definition -HIER_PART_RE = '(//%s%s|%s|%s|%s)' % ( - COMPONENT_PATTERN_DICT['authority'], - PATH_ABEMPTY, - PATH_ABSOLUTE, - PATH_ROOTLESS, - PATH_EMPTY, -) - -# ############### -# IRIs / RFC 3987 -# ############### - -# Only wide-unicode gets the high-ranges of UCSCHAR -if sys.maxunicode > 0xFFFF: # pragma: no cover - IPRIVATE = u'\uE000-\uF8FF\U000F0000-\U000FFFFD\U00100000-\U0010FFFD' - UCSCHAR_RE = ( - u'\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF' - u'\U00010000-\U0001FFFD\U00020000-\U0002FFFD' - u'\U00030000-\U0003FFFD\U00040000-\U0004FFFD' - u'\U00050000-\U0005FFFD\U00060000-\U0006FFFD' - u'\U00070000-\U0007FFFD\U00080000-\U0008FFFD' - u'\U00090000-\U0009FFFD\U000A0000-\U000AFFFD' - u'\U000B0000-\U000BFFFD\U000C0000-\U000CFFFD' - u'\U000D0000-\U000DFFFD\U000E1000-\U000EFFFD' - ) -else: # pragma: no cover - IPRIVATE = u'\uE000-\uF8FF' - UCSCHAR_RE = ( - u'\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF' - ) - -IUNRESERVED_RE = u'A-Za-z0-9\\._~\\-' + UCSCHAR_RE -IPCHAR = u'([' + IUNRESERVED_RE + SUB_DELIMITERS_RE + u':@]|%s)' % PCT_ENCODED - -isegments = { - 'isegment': IPCHAR + u'*', - # Non-zero length segment - 'isegment-nz': IPCHAR + u'+', - # Non-zero length segment without ":" - 'isegment-nz-nc': IPCHAR.replace(':', '') + u'+' -} - -IPATH_ROOTLESS = u'%(isegment-nz)s(/%(isegment)s)*' % isegments -IPATH_NOSCHEME = u'%(isegment-nz-nc)s(/%(isegment)s)*' % isegments -IPATH_ABSOLUTE = u'/(?:%s)?' % IPATH_ROOTLESS -IPATH_ABEMPTY = u'(?:/%(isegment)s)*' % isegments -IPATH_RE = u'^(?:%s|%s|%s|%s|%s)$' % ( - IPATH_ABEMPTY, IPATH_ABSOLUTE, IPATH_NOSCHEME, IPATH_ROOTLESS, PATH_EMPTY -) - -IREGULAR_NAME_RE = IREG_NAME = u'(?:{0}|[{1}])*'.format( - u'%[0-9A-Fa-f]{2}', SUB_DELIMITERS_RE + IUNRESERVED_RE -) - -IHOST_RE = IHOST_PATTERN = u'({0}|{1}|{2})'.format( - IREG_NAME, - IPv4_RE, - IP_LITERAL_RE, -) - -IUSERINFO_RE = u'^(?:[' + IUNRESERVED_RE + SUB_DELIMITERS_RE + u':]|%s)+' % ( - PCT_ENCODED -) - -IFRAGMENT_RE = (u'^(?:[/?:@' + IUNRESERVED_RE + SUB_DELIMITERS_RE - + u']|%s)*$' % PCT_ENCODED) -IQUERY_RE = (u'^(?:[/?:@' + IUNRESERVED_RE + SUB_DELIMITERS_RE - + IPRIVATE + u']|%s)*$' % PCT_ENCODED) - -IRELATIVE_PART_RE = u'(//%s%s|%s|%s|%s)' % ( - COMPONENT_PATTERN_DICT['authority'], - IPATH_ABEMPTY, - IPATH_ABSOLUTE, - IPATH_NOSCHEME, - PATH_EMPTY, -) - -IHIER_PART_RE = u'(//%s%s|%s|%s|%s)' % ( - COMPONENT_PATTERN_DICT['authority'], - IPATH_ABEMPTY, - IPATH_ABSOLUTE, - IPATH_ROOTLESS, - PATH_EMPTY, -) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/abnf_regexp.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/abnf_regexp.pyc deleted file mode 100644 index 49168e93..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/abnf_regexp.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/api.py b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/api.py deleted file mode 100644 index ddc4a1cd..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/api.py +++ /dev/null @@ -1,106 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (c) 2014 Rackspace -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Module containing the simple and functional API for rfc3986. - -This module defines functions and provides access to the public attributes -and classes of rfc3986. -""" - -from .iri import IRIReference -from .parseresult import ParseResult -from .uri import URIReference - - -def uri_reference(uri, encoding='utf-8'): - """Parse a URI string into a URIReference. - - This is a convenience function. You could achieve the same end by using - ``URIReference.from_string(uri)``. - - :param str uri: The URI which needs to be parsed into a reference. - :param str encoding: The encoding of the string provided - :returns: A parsed URI - :rtype: :class:`URIReference` - """ - return URIReference.from_string(uri, encoding) - - -def iri_reference(iri, encoding='utf-8'): - """Parse a IRI string into an IRIReference. - - This is a convenience function. You could achieve the same end by using - ``IRIReference.from_string(iri)``. - - :param str iri: The IRI which needs to be parsed into a reference. - :param str encoding: The encoding of the string provided - :returns: A parsed IRI - :rtype: :class:`IRIReference` - """ - return IRIReference.from_string(iri, encoding) - - -def is_valid_uri(uri, encoding='utf-8', **kwargs): - """Determine if the URI given is valid. - - This is a convenience function. You could use either - ``uri_reference(uri).is_valid()`` or - ``URIReference.from_string(uri).is_valid()`` to achieve the same result. - - :param str uri: The URI to be validated. - :param str encoding: The encoding of the string provided - :param bool require_scheme: Set to ``True`` if you wish to require the - presence of the scheme component. - :param bool require_authority: Set to ``True`` if you wish to require the - presence of the authority component. - :param bool require_path: Set to ``True`` if you wish to require the - presence of the path component. - :param bool require_query: Set to ``True`` if you wish to require the - presence of the query component. - :param bool require_fragment: Set to ``True`` if you wish to require the - presence of the fragment component. - :returns: ``True`` if the URI is valid, ``False`` otherwise. - :rtype: bool - """ - return URIReference.from_string(uri, encoding).is_valid(**kwargs) - - -def normalize_uri(uri, encoding='utf-8'): - """Normalize the given URI. - - This is a convenience function. You could use either - ``uri_reference(uri).normalize().unsplit()`` or - ``URIReference.from_string(uri).normalize().unsplit()`` instead. - - :param str uri: The URI to be normalized. - :param str encoding: The encoding of the string provided - :returns: The normalized URI. - :rtype: str - """ - normalized_reference = URIReference.from_string(uri, encoding).normalize() - return normalized_reference.unsplit() - - -def urlparse(uri, encoding='utf-8'): - """Parse a given URI and return a ParseResult. - - This is a partial replacement of the standard library's urlparse function. - - :param str uri: The URI to be parsed. - :param str encoding: The encoding of the string provided. - :returns: A parsed URI - :rtype: :class:`~rfc3986.parseresult.ParseResult` - """ - return ParseResult.from_string(uri, encoding, strict=False) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/api.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/api.pyc deleted file mode 100644 index 64cd17db..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/api.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/builder.py b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/builder.py deleted file mode 100644 index 79342799..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/builder.py +++ /dev/null @@ -1,298 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (c) 2017 Ian Stapleton Cordasco -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Module containing the logic for the URIBuilder object.""" -from . import compat -from . import normalizers -from . import uri - - -class URIBuilder(object): - """Object to aid in building up a URI Reference from parts. - - .. note:: - - This object should be instantiated by the user, but it's recommended - that it is not provided with arguments. Instead, use the available - method to populate the fields. - - """ - - def __init__(self, scheme=None, userinfo=None, host=None, port=None, - path=None, query=None, fragment=None): - """Initialize our URI builder. - - :param str scheme: - (optional) - :param str userinfo: - (optional) - :param str host: - (optional) - :param int port: - (optional) - :param str path: - (optional) - :param str query: - (optional) - :param str fragment: - (optional) - """ - self.scheme = scheme - self.userinfo = userinfo - self.host = host - self.port = port - self.path = path - self.query = query - self.fragment = fragment - - def __repr__(self): - """Provide a convenient view of our builder object.""" - formatstr = ('URIBuilder(scheme={b.scheme}, userinfo={b.userinfo}, ' - 'host={b.host}, port={b.port}, path={b.path}, ' - 'query={b.query}, fragment={b.fragment})') - return formatstr.format(b=self) - - def add_scheme(self, scheme): - """Add a scheme to our builder object. - - After normalizing, this will generate a new URIBuilder instance with - the specified scheme and all other attributes the same. - - .. code-block:: python - - >>> URIBuilder().add_scheme('HTTPS') - URIBuilder(scheme='https', userinfo=None, host=None, port=None, - path=None, query=None, fragment=None) - - """ - scheme = normalizers.normalize_scheme(scheme) - return URIBuilder( - scheme=scheme, - userinfo=self.userinfo, - host=self.host, - port=self.port, - path=self.path, - query=self.query, - fragment=self.fragment, - ) - - def add_credentials(self, username, password): - """Add credentials as the userinfo portion of the URI. - - .. code-block:: python - - >>> URIBuilder().add_credentials('root', 's3crete') - URIBuilder(scheme=None, userinfo='root:s3crete', host=None, - port=None, path=None, query=None, fragment=None) - - >>> URIBuilder().add_credentials('root', None) - URIBuilder(scheme=None, userinfo='root', host=None, - port=None, path=None, query=None, fragment=None) - """ - if username is None: - raise ValueError('Username cannot be None') - userinfo = normalizers.normalize_username(username) - - if password is not None: - userinfo = '{}:{}'.format( - userinfo, - normalizers.normalize_password(password), - ) - - return URIBuilder( - scheme=self.scheme, - userinfo=userinfo, - host=self.host, - port=self.port, - path=self.path, - query=self.query, - fragment=self.fragment, - ) - - def add_host(self, host): - """Add hostname to the URI. - - .. code-block:: python - - >>> URIBuilder().add_host('google.com') - URIBuilder(scheme=None, userinfo=None, host='google.com', - port=None, path=None, query=None, fragment=None) - - """ - return URIBuilder( - scheme=self.scheme, - userinfo=self.userinfo, - host=normalizers.normalize_host(host), - port=self.port, - path=self.path, - query=self.query, - fragment=self.fragment, - ) - - def add_port(self, port): - """Add port to the URI. - - .. code-block:: python - - >>> URIBuilder().add_port(80) - URIBuilder(scheme=None, userinfo=None, host=None, port='80', - path=None, query=None, fragment=None) - - >>> URIBuilder().add_port(443) - URIBuilder(scheme=None, userinfo=None, host=None, port='443', - path=None, query=None, fragment=None) - - """ - port_int = int(port) - if port_int < 0: - raise ValueError( - 'ports are not allowed to be negative. You provided {}'.format( - port_int, - ) - ) - if port_int > 65535: - raise ValueError( - 'ports are not allowed to be larger than 65535. ' - 'You provided {}'.format( - port_int, - ) - ) - - return URIBuilder( - scheme=self.scheme, - userinfo=self.userinfo, - host=self.host, - port='{}'.format(port_int), - path=self.path, - query=self.query, - fragment=self.fragment, - ) - - def add_path(self, path): - """Add a path to the URI. - - .. code-block:: python - - >>> URIBuilder().add_path('sigmavirus24/rfc3985') - URIBuilder(scheme=None, userinfo=None, host=None, port=None, - path='/sigmavirus24/rfc3986', query=None, fragment=None) - - >>> URIBuilder().add_path('/checkout.php') - URIBuilder(scheme=None, userinfo=None, host=None, port=None, - path='/checkout.php', query=None, fragment=None) - - """ - if not path.startswith('/'): - path = '/{}'.format(path) - - return URIBuilder( - scheme=self.scheme, - userinfo=self.userinfo, - host=self.host, - port=self.port, - path=normalizers.normalize_path(path), - query=self.query, - fragment=self.fragment, - ) - - def add_query_from(self, query_items): - """Generate and add a query a dictionary or list of tuples. - - .. code-block:: python - - >>> URIBuilder().add_query_from({'a': 'b c'}) - URIBuilder(scheme=None, userinfo=None, host=None, port=None, - path=None, query='a=b+c', fragment=None) - - >>> URIBuilder().add_query_from([('a', 'b c')]) - URIBuilder(scheme=None, userinfo=None, host=None, port=None, - path=None, query='a=b+c', fragment=None) - - """ - query = normalizers.normalize_query(compat.urlencode(query_items)) - - return URIBuilder( - scheme=self.scheme, - userinfo=self.userinfo, - host=self.host, - port=self.port, - path=self.path, - query=query, - fragment=self.fragment, - ) - - def add_query(self, query): - """Add a pre-formated query string to the URI. - - .. code-block:: python - - >>> URIBuilder().add_query('a=b&c=d') - URIBuilder(scheme=None, userinfo=None, host=None, port=None, - path=None, query='a=b&c=d', fragment=None) - - """ - return URIBuilder( - scheme=self.scheme, - userinfo=self.userinfo, - host=self.host, - port=self.port, - path=self.path, - query=normalizers.normalize_query(query), - fragment=self.fragment, - ) - - def add_fragment(self, fragment): - """Add a fragment to the URI. - - .. code-block:: python - - >>> URIBuilder().add_fragment('section-2.6.1') - URIBuilder(scheme=None, userinfo=None, host=None, port=None, - path=None, query=None, fragment='section-2.6.1') - - """ - return URIBuilder( - scheme=self.scheme, - userinfo=self.userinfo, - host=self.host, - port=self.port, - path=self.path, - query=self.query, - fragment=normalizers.normalize_fragment(fragment), - ) - - def finalize(self): - """Create a URIReference from our builder. - - .. code-block:: python - - >>> URIBuilder().add_scheme('https').add_host('github.com' - ... ).add_path('sigmavirus24/rfc3986').finalize().unsplit() - 'https://github.com/sigmavirus24/rfc3986' - - >>> URIBuilder().add_scheme('https').add_host('github.com' - ... ).add_path('sigmavirus24/rfc3986').add_credentials( - ... 'sigmavirus24', 'not-re@l').finalize().unsplit() - 'https://sigmavirus24:not-re%40l@github.com/sigmavirus24/rfc3986' - - """ - return uri.URIReference( - self.scheme, - normalizers.normalize_authority( - (self.userinfo, self.host, self.port) - ), - self.path, - self.query, - self.fragment, - ) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/builder.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/builder.pyc deleted file mode 100644 index a9f428be..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/builder.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/compat.py b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/compat.py deleted file mode 100644 index 8968c384..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/compat.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (c) 2014 Rackspace -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Compatibility module for Python 2 and 3 support.""" -import sys - -try: - from urllib.parse import quote as urlquote -except ImportError: # Python 2.x - from urllib import quote as urlquote - -try: - from urllib.parse import urlencode -except ImportError: # Python 2.x - from urllib import urlencode - -__all__ = ( - 'to_bytes', - 'to_str', - 'urlquote', - 'urlencode', -) - -PY3 = (3, 0) <= sys.version_info < (4, 0) -PY2 = (2, 6) <= sys.version_info < (2, 8) - - -if PY3: - unicode = str # Python 3.x - - -def to_str(b, encoding='utf-8'): - """Ensure that b is text in the specified encoding.""" - if hasattr(b, 'decode') and not isinstance(b, unicode): - b = b.decode(encoding) - return b - - -def to_bytes(s, encoding='utf-8'): - """Ensure that s is converted to bytes from the encoding.""" - if hasattr(s, 'encode') and not isinstance(s, bytes): - s = s.encode(encoding) - return s diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/compat.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/compat.pyc deleted file mode 100644 index 5b800b3f..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/compat.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/exceptions.py b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/exceptions.py deleted file mode 100644 index da8ca7cb..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/exceptions.py +++ /dev/null @@ -1,118 +0,0 @@ -# -*- coding: utf-8 -*- -"""Exceptions module for rfc3986.""" - -from . import compat - - -class RFC3986Exception(Exception): - """Base class for all rfc3986 exception classes.""" - - pass - - -class InvalidAuthority(RFC3986Exception): - """Exception when the authority string is invalid.""" - - def __init__(self, authority): - """Initialize the exception with the invalid authority.""" - super(InvalidAuthority, self).__init__( - u"The authority ({0}) is not valid.".format( - compat.to_str(authority))) - - -class InvalidPort(RFC3986Exception): - """Exception when the port is invalid.""" - - def __init__(self, port): - """Initialize the exception with the invalid port.""" - super(InvalidPort, self).__init__( - 'The port ("{0}") is not valid.'.format(port)) - - -class ResolutionError(RFC3986Exception): - """Exception to indicate a failure to resolve a URI.""" - - def __init__(self, uri): - """Initialize the error with the failed URI.""" - super(ResolutionError, self).__init__( - "{0} is not an absolute URI.".format(uri.unsplit())) - - -class ValidationError(RFC3986Exception): - """Exception raised during Validation of a URI.""" - - pass - - -class MissingComponentError(ValidationError): - """Exception raised when a required component is missing.""" - - def __init__(self, uri, *component_names): - """Initialize the error with the missing component name.""" - verb = 'was' - if len(component_names) > 1: - verb = 'were' - - self.uri = uri - self.components = sorted(component_names) - components = ', '.join(self.components) - super(MissingComponentError, self).__init__( - "{} {} required but missing".format(components, verb), - uri, - self.components, - ) - - -class UnpermittedComponentError(ValidationError): - """Exception raised when a component has an unpermitted value.""" - - def __init__(self, component_name, component_value, allowed_values): - """Initialize the error with the unpermitted component.""" - super(UnpermittedComponentError, self).__init__( - "{} was required to be one of {!r} but was {!r}".format( - component_name, list(sorted(allowed_values)), component_value, - ), - component_name, - component_value, - allowed_values, - ) - self.component_name = component_name - self.component_value = component_value - self.allowed_values = allowed_values - - -class PasswordForbidden(ValidationError): - """Exception raised when a URL has a password in the userinfo section.""" - - def __init__(self, uri): - """Initialize the error with the URI that failed validation.""" - unsplit = getattr(uri, 'unsplit', lambda: uri) - super(PasswordForbidden, self).__init__( - '"{}" contained a password when validation forbade it'.format( - unsplit() - ) - ) - self.uri = uri - - -class InvalidComponentsError(ValidationError): - """Exception raised when one or more components are invalid.""" - - def __init__(self, uri, *component_names): - """Initialize the error with the invalid component name(s).""" - verb = 'was' - if len(component_names) > 1: - verb = 'were' - - self.uri = uri - self.components = sorted(component_names) - components = ', '.join(self.components) - super(InvalidComponentsError, self).__init__( - "{} {} found to be invalid".format(components, verb), - uri, - self.components, - ) - - -class MissingDependencyError(RFC3986Exception): - """Exception raised when an IRI is encoded without the 'idna' module.""" diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/exceptions.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/exceptions.pyc deleted file mode 100644 index c5bdcf48..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/exceptions.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/iri.py b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/iri.py deleted file mode 100644 index 416cae4a..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/iri.py +++ /dev/null @@ -1,147 +0,0 @@ -"""Module containing the implementation of the IRIReference class.""" -# -*- coding: utf-8 -*- -# Copyright (c) 2014 Rackspace -# Copyright (c) 2015 Ian Stapleton Cordasco -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from collections import namedtuple - -from . import compat -from . import exceptions -from . import misc -from . import normalizers -from . import uri - - -try: - from pip._vendor import idna -except ImportError: # pragma: no cover - idna = None - - -class IRIReference(namedtuple('IRIReference', misc.URI_COMPONENTS), - uri.URIMixin): - """Immutable object representing a parsed IRI Reference. - - Can be encoded into an URIReference object via the procedure - specified in RFC 3987 Section 3.1 - - .. note:: - The IRI submodule is a new interface and may possibly change in - the future. Check for changes to the interface when upgrading. - """ - - slots = () - - def __new__(cls, scheme, authority, path, query, fragment, - encoding='utf-8'): - """Create a new IRIReference.""" - ref = super(IRIReference, cls).__new__( - cls, - scheme or None, - authority or None, - path or None, - query, - fragment) - ref.encoding = encoding - return ref - - def __eq__(self, other): - """Compare this reference to another.""" - other_ref = other - if isinstance(other, tuple): - other_ref = self.__class__(*other) - elif not isinstance(other, IRIReference): - try: - other_ref = self.__class__.from_string(other) - except TypeError: - raise TypeError( - 'Unable to compare {0}() to {1}()'.format( - type(self).__name__, type(other).__name__)) - - # See http://tools.ietf.org/html/rfc3986#section-6.2 - return tuple(self) == tuple(other_ref) - - def _match_subauthority(self): - return misc.ISUBAUTHORITY_MATCHER.match(self.authority) - - @classmethod - def from_string(cls, iri_string, encoding='utf-8'): - """Parse a IRI reference from the given unicode IRI string. - - :param str iri_string: Unicode IRI to be parsed into a reference. - :param str encoding: The encoding of the string provided - :returns: :class:`IRIReference` or subclass thereof - """ - iri_string = compat.to_str(iri_string, encoding) - - split_iri = misc.IRI_MATCHER.match(iri_string).groupdict() - return cls( - split_iri['scheme'], split_iri['authority'], - normalizers.encode_component(split_iri['path'], encoding), - normalizers.encode_component(split_iri['query'], encoding), - normalizers.encode_component(split_iri['fragment'], encoding), - encoding, - ) - - def encode(self, idna_encoder=None): # noqa: C901 - """Encode an IRIReference into a URIReference instance. - - If the ``idna`` module is installed or the ``rfc3986[idna]`` - extra is used then unicode characters in the IRI host - component will be encoded with IDNA2008. - - :param idna_encoder: - Function that encodes each part of the host component - If not given will raise an exception if the IRI - contains a host component. - :rtype: uri.URIReference - :returns: A URI reference - """ - authority = self.authority - if authority: - if idna_encoder is None: - if idna is None: # pragma: no cover - raise exceptions.MissingDependencyError( - "Could not import the 'idna' module " - "and the IRI hostname requires encoding" - ) - - def idna_encoder(name): - if any(ord(c) > 128 for c in name): - try: - return idna.encode(name.lower(), - strict=True, - std3_rules=True) - except idna.IDNAError: - raise exceptions.InvalidAuthority(self.authority) - return name - - authority = "" - if self.host: - authority = ".".join([compat.to_str(idna_encoder(part)) - for part in self.host.split(".")]) - - if self.userinfo is not None: - authority = (normalizers.encode_component( - self.userinfo, self.encoding) + '@' + authority) - - if self.port is not None: - authority += ":" + str(self.port) - - return uri.URIReference(self.scheme, - authority, - path=self.path, - query=self.query, - fragment=self.fragment, - encoding=self.encoding) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/iri.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/iri.pyc deleted file mode 100644 index db08ce2e..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/iri.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/misc.py b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/misc.py deleted file mode 100644 index b735e044..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/misc.py +++ /dev/null @@ -1,124 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (c) 2014 Rackspace -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Module containing compiled regular expressions and constants. - -This module contains important constants, patterns, and compiled regular -expressions for parsing and validating URIs and their components. -""" - -import re - -from . import abnf_regexp - -# These are enumerated for the named tuple used as a superclass of -# URIReference -URI_COMPONENTS = ['scheme', 'authority', 'path', 'query', 'fragment'] - -important_characters = { - 'generic_delimiters': abnf_regexp.GENERIC_DELIMITERS, - 'sub_delimiters': abnf_regexp.SUB_DELIMITERS, - # We need to escape the '*' in this case - 're_sub_delimiters': abnf_regexp.SUB_DELIMITERS_RE, - 'unreserved_chars': abnf_regexp.UNRESERVED_CHARS, - # We need to escape the '-' in this case: - 're_unreserved': abnf_regexp.UNRESERVED_RE, -} - -# For details about delimiters and reserved characters, see: -# http://tools.ietf.org/html/rfc3986#section-2.2 -GENERIC_DELIMITERS = abnf_regexp.GENERIC_DELIMITERS_SET -SUB_DELIMITERS = abnf_regexp.SUB_DELIMITERS_SET -RESERVED_CHARS = abnf_regexp.RESERVED_CHARS_SET -# For details about unreserved characters, see: -# http://tools.ietf.org/html/rfc3986#section-2.3 -UNRESERVED_CHARS = abnf_regexp.UNRESERVED_CHARS_SET -NON_PCT_ENCODED = abnf_regexp.NON_PCT_ENCODED_SET - -URI_MATCHER = re.compile(abnf_regexp.URL_PARSING_RE) - -SUBAUTHORITY_MATCHER = re.compile(( - '^(?:(?P<userinfo>{0})@)?' # userinfo - '(?P<host>{1})' # host - ':?(?P<port>{2})?$' # port - ).format(abnf_regexp.USERINFO_RE, - abnf_regexp.HOST_PATTERN, - abnf_regexp.PORT_RE)) - - -HOST_MATCHER = re.compile('^' + abnf_regexp.HOST_RE + '$') -IPv4_MATCHER = re.compile('^' + abnf_regexp.IPv4_RE + '$') -IPv6_MATCHER = re.compile(r'^\[' + abnf_regexp.IPv6_ADDRZ_RFC4007_RE + r'\]$') - -# Used by host validator -IPv6_NO_RFC4007_MATCHER = re.compile(r'^\[%s\]$' % ( - abnf_regexp.IPv6_ADDRZ_RE -)) - -# Matcher used to validate path components -PATH_MATCHER = re.compile(abnf_regexp.PATH_RE) - - -# ################################## -# Query and Fragment Matcher Section -# ################################## - -QUERY_MATCHER = re.compile(abnf_regexp.QUERY_RE) - -FRAGMENT_MATCHER = QUERY_MATCHER - -# Scheme validation, see: http://tools.ietf.org/html/rfc3986#section-3.1 -SCHEME_MATCHER = re.compile('^{0}$'.format(abnf_regexp.SCHEME_RE)) - -RELATIVE_REF_MATCHER = re.compile(r'^%s(\?%s)?(#%s)?$' % ( - abnf_regexp.RELATIVE_PART_RE, - abnf_regexp.QUERY_RE, - abnf_regexp.FRAGMENT_RE, -)) - -# See http://tools.ietf.org/html/rfc3986#section-4.3 -ABSOLUTE_URI_MATCHER = re.compile(r'^%s:%s(\?%s)?$' % ( - abnf_regexp.COMPONENT_PATTERN_DICT['scheme'], - abnf_regexp.HIER_PART_RE, - abnf_regexp.QUERY_RE[1:-1], -)) - -# ############### -# IRIs / RFC 3987 -# ############### - -IRI_MATCHER = re.compile(abnf_regexp.URL_PARSING_RE, re.UNICODE) - -ISUBAUTHORITY_MATCHER = re.compile(( - u'^(?:(?P<userinfo>{0})@)?' # iuserinfo - u'(?P<host>{1})' # ihost - u':?(?P<port>{2})?$' # port - ).format(abnf_regexp.IUSERINFO_RE, - abnf_regexp.IHOST_RE, - abnf_regexp.PORT_RE), re.UNICODE) - - -# Path merger as defined in http://tools.ietf.org/html/rfc3986#section-5.2.3 -def merge_paths(base_uri, relative_path): - """Merge a base URI's path with a relative URI's path.""" - if base_uri.path is None and base_uri.authority is not None: - return '/' + relative_path - else: - path = base_uri.path or '' - index = path.rfind('/') - return path[:index] + '/' + relative_path - - -UseExisting = object() diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/misc.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/misc.pyc deleted file mode 100644 index 1b3162bb..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/misc.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/normalizers.py b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/normalizers.py deleted file mode 100644 index 2eb1bb36..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/normalizers.py +++ /dev/null @@ -1,167 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (c) 2014 Rackspace -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Module with functions to normalize components.""" -import re - -from . import compat -from . import misc - - -def normalize_scheme(scheme): - """Normalize the scheme component.""" - return scheme.lower() - - -def normalize_authority(authority): - """Normalize an authority tuple to a string.""" - userinfo, host, port = authority - result = '' - if userinfo: - result += normalize_percent_characters(userinfo) + '@' - if host: - result += normalize_host(host) - if port: - result += ':' + port - return result - - -def normalize_username(username): - """Normalize a username to make it safe to include in userinfo.""" - return compat.urlquote(username) - - -def normalize_password(password): - """Normalize a password to make safe for userinfo.""" - return compat.urlquote(password) - - -def normalize_host(host): - """Normalize a host string.""" - if misc.IPv6_MATCHER.match(host): - percent = host.find('%') - if percent != -1: - percent_25 = host.find('%25') - - # Replace RFC 4007 IPv6 Zone ID delimiter '%' with '%25' - # from RFC 6874. If the host is '[<IPv6 addr>%25]' then we - # assume RFC 4007 and normalize to '[<IPV6 addr>%2525]' - if percent_25 == -1 or percent < percent_25 or \ - (percent == percent_25 and percent_25 == len(host) - 4): - host = host.replace('%', '%25', 1) - - # Don't normalize the casing of the Zone ID - return host[:percent].lower() + host[percent:] - - return host.lower() - - -def normalize_path(path): - """Normalize the path string.""" - if not path: - return path - - path = normalize_percent_characters(path) - return remove_dot_segments(path) - - -def normalize_query(query): - """Normalize the query string.""" - if not query: - return query - return normalize_percent_characters(query) - - -def normalize_fragment(fragment): - """Normalize the fragment string.""" - if not fragment: - return fragment - return normalize_percent_characters(fragment) - - -PERCENT_MATCHER = re.compile('%[A-Fa-f0-9]{2}') - - -def normalize_percent_characters(s): - """All percent characters should be upper-cased. - - For example, ``"%3afoo%DF%ab"`` should be turned into ``"%3Afoo%DF%AB"``. - """ - matches = set(PERCENT_MATCHER.findall(s)) - for m in matches: - if not m.isupper(): - s = s.replace(m, m.upper()) - return s - - -def remove_dot_segments(s): - """Remove dot segments from the string. - - See also Section 5.2.4 of :rfc:`3986`. - """ - # See http://tools.ietf.org/html/rfc3986#section-5.2.4 for pseudo-code - segments = s.split('/') # Turn the path into a list of segments - output = [] # Initialize the variable to use to store output - - for segment in segments: - # '.' is the current directory, so ignore it, it is superfluous - if segment == '.': - continue - # Anything other than '..', should be appended to the output - elif segment != '..': - output.append(segment) - # In this case segment == '..', if we can, we should pop the last - # element - elif output: - output.pop() - - # If the path starts with '/' and the output is empty or the first string - # is non-empty - if s.startswith('/') and (not output or output[0]): - output.insert(0, '') - - # If the path starts with '/.' or '/..' ensure we add one more empty - # string to add a trailing '/' - if s.endswith(('/.', '/..')): - output.append('') - - return '/'.join(output) - - -def encode_component(uri_component, encoding): - """Encode the specific component in the provided encoding.""" - if uri_component is None: - return uri_component - - # Try to see if the component we're encoding is already percent-encoded - # so we can skip all '%' characters but still encode all others. - percent_encodings = len(PERCENT_MATCHER.findall( - compat.to_str(uri_component, encoding))) - - uri_bytes = compat.to_bytes(uri_component, encoding) - is_percent_encoded = percent_encodings == uri_bytes.count(b'%') - - encoded_uri = bytearray() - - for i in range(0, len(uri_bytes)): - # Will return a single character bytestring on both Python 2 & 3 - byte = uri_bytes[i:i+1] - byte_ord = ord(byte) - if ((is_percent_encoded and byte == b'%') - or (byte_ord < 128 and byte.decode() in misc.NON_PCT_ENCODED)): - encoded_uri.extend(byte) - continue - encoded_uri.extend('%{0:02x}'.format(byte_ord).encode().upper()) - - return encoded_uri.decode(encoding) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/normalizers.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/normalizers.pyc deleted file mode 100644 index 389f1e0a..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/normalizers.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/parseresult.py b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/parseresult.py deleted file mode 100644 index 0a734566..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/parseresult.py +++ /dev/null @@ -1,385 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (c) 2015 Ian Stapleton Cordasco -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Module containing the urlparse compatibility logic.""" -from collections import namedtuple - -from . import compat -from . import exceptions -from . import misc -from . import normalizers -from . import uri - -__all__ = ('ParseResult', 'ParseResultBytes') - -PARSED_COMPONENTS = ('scheme', 'userinfo', 'host', 'port', 'path', 'query', - 'fragment') - - -class ParseResultMixin(object): - def _generate_authority(self, attributes): - # I swear I did not align the comparisons below. That's just how they - # happened to align based on pep8 and attribute lengths. - userinfo, host, port = (attributes[p] - for p in ('userinfo', 'host', 'port')) - if (self.userinfo != userinfo or - self.host != host or - self.port != port): - if port: - port = '{0}'.format(port) - return normalizers.normalize_authority( - (compat.to_str(userinfo, self.encoding), - compat.to_str(host, self.encoding), - port) - ) - return self.authority - - def geturl(self): - """Shim to match the standard library method.""" - return self.unsplit() - - @property - def hostname(self): - """Shim to match the standard library.""" - return self.host - - @property - def netloc(self): - """Shim to match the standard library.""" - return self.authority - - @property - def params(self): - """Shim to match the standard library.""" - return self.query - - -class ParseResult(namedtuple('ParseResult', PARSED_COMPONENTS), - ParseResultMixin): - """Implementation of urlparse compatibility class. - - This uses the URIReference logic to handle compatibility with the - urlparse.ParseResult class. - """ - - slots = () - - def __new__(cls, scheme, userinfo, host, port, path, query, fragment, - uri_ref, encoding='utf-8'): - """Create a new ParseResult.""" - parse_result = super(ParseResult, cls).__new__( - cls, - scheme or None, - userinfo or None, - host, - port or None, - path or None, - query, - fragment) - parse_result.encoding = encoding - parse_result.reference = uri_ref - return parse_result - - @classmethod - def from_parts(cls, scheme=None, userinfo=None, host=None, port=None, - path=None, query=None, fragment=None, encoding='utf-8'): - """Create a ParseResult instance from its parts.""" - authority = '' - if userinfo is not None: - authority += userinfo + '@' - if host is not None: - authority += host - if port is not None: - authority += ':{0}'.format(port) - uri_ref = uri.URIReference(scheme=scheme, - authority=authority, - path=path, - query=query, - fragment=fragment, - encoding=encoding).normalize() - userinfo, host, port = authority_from(uri_ref, strict=True) - return cls(scheme=uri_ref.scheme, - userinfo=userinfo, - host=host, - port=port, - path=uri_ref.path, - query=uri_ref.query, - fragment=uri_ref.fragment, - uri_ref=uri_ref, - encoding=encoding) - - @classmethod - def from_string(cls, uri_string, encoding='utf-8', strict=True, - lazy_normalize=True): - """Parse a URI from the given unicode URI string. - - :param str uri_string: Unicode URI to be parsed into a reference. - :param str encoding: The encoding of the string provided - :param bool strict: Parse strictly according to :rfc:`3986` if True. - If False, parse similarly to the standard library's urlparse - function. - :returns: :class:`ParseResult` or subclass thereof - """ - reference = uri.URIReference.from_string(uri_string, encoding) - if not lazy_normalize: - reference = reference.normalize() - userinfo, host, port = authority_from(reference, strict) - - return cls(scheme=reference.scheme, - userinfo=userinfo, - host=host, - port=port, - path=reference.path, - query=reference.query, - fragment=reference.fragment, - uri_ref=reference, - encoding=encoding) - - @property - def authority(self): - """Return the normalized authority.""" - return self.reference.authority - - def copy_with(self, scheme=misc.UseExisting, userinfo=misc.UseExisting, - host=misc.UseExisting, port=misc.UseExisting, - path=misc.UseExisting, query=misc.UseExisting, - fragment=misc.UseExisting): - """Create a copy of this instance replacing with specified parts.""" - attributes = zip(PARSED_COMPONENTS, - (scheme, userinfo, host, port, path, query, fragment)) - attrs_dict = {} - for name, value in attributes: - if value is misc.UseExisting: - value = getattr(self, name) - attrs_dict[name] = value - authority = self._generate_authority(attrs_dict) - ref = self.reference.copy_with(scheme=attrs_dict['scheme'], - authority=authority, - path=attrs_dict['path'], - query=attrs_dict['query'], - fragment=attrs_dict['fragment']) - return ParseResult(uri_ref=ref, encoding=self.encoding, **attrs_dict) - - def encode(self, encoding=None): - """Convert to an instance of ParseResultBytes.""" - encoding = encoding or self.encoding - attrs = dict( - zip(PARSED_COMPONENTS, - (attr.encode(encoding) if hasattr(attr, 'encode') else attr - for attr in self))) - return ParseResultBytes( - uri_ref=self.reference, - encoding=encoding, - **attrs - ) - - def unsplit(self, use_idna=False): - """Create a URI string from the components. - - :returns: The parsed URI reconstituted as a string. - :rtype: str - """ - parse_result = self - if use_idna and self.host: - hostbytes = self.host.encode('idna') - host = hostbytes.decode(self.encoding) - parse_result = self.copy_with(host=host) - return parse_result.reference.unsplit() - - -class ParseResultBytes(namedtuple('ParseResultBytes', PARSED_COMPONENTS), - ParseResultMixin): - """Compatibility shim for the urlparse.ParseResultBytes object.""" - - def __new__(cls, scheme, userinfo, host, port, path, query, fragment, - uri_ref, encoding='utf-8', lazy_normalize=True): - """Create a new ParseResultBytes instance.""" - parse_result = super(ParseResultBytes, cls).__new__( - cls, - scheme or None, - userinfo or None, - host, - port or None, - path or None, - query or None, - fragment or None) - parse_result.encoding = encoding - parse_result.reference = uri_ref - parse_result.lazy_normalize = lazy_normalize - return parse_result - - @classmethod - def from_parts(cls, scheme=None, userinfo=None, host=None, port=None, - path=None, query=None, fragment=None, encoding='utf-8', - lazy_normalize=True): - """Create a ParseResult instance from its parts.""" - authority = '' - if userinfo is not None: - authority += userinfo + '@' - if host is not None: - authority += host - if port is not None: - authority += ':{0}'.format(int(port)) - uri_ref = uri.URIReference(scheme=scheme, - authority=authority, - path=path, - query=query, - fragment=fragment, - encoding=encoding) - if not lazy_normalize: - uri_ref = uri_ref.normalize() - to_bytes = compat.to_bytes - userinfo, host, port = authority_from(uri_ref, strict=True) - return cls(scheme=to_bytes(scheme, encoding), - userinfo=to_bytes(userinfo, encoding), - host=to_bytes(host, encoding), - port=port, - path=to_bytes(path, encoding), - query=to_bytes(query, encoding), - fragment=to_bytes(fragment, encoding), - uri_ref=uri_ref, - encoding=encoding, - lazy_normalize=lazy_normalize) - - @classmethod - def from_string(cls, uri_string, encoding='utf-8', strict=True, - lazy_normalize=True): - """Parse a URI from the given unicode URI string. - - :param str uri_string: Unicode URI to be parsed into a reference. - :param str encoding: The encoding of the string provided - :param bool strict: Parse strictly according to :rfc:`3986` if True. - If False, parse similarly to the standard library's urlparse - function. - :returns: :class:`ParseResultBytes` or subclass thereof - """ - reference = uri.URIReference.from_string(uri_string, encoding) - if not lazy_normalize: - reference = reference.normalize() - userinfo, host, port = authority_from(reference, strict) - - to_bytes = compat.to_bytes - return cls(scheme=to_bytes(reference.scheme, encoding), - userinfo=to_bytes(userinfo, encoding), - host=to_bytes(host, encoding), - port=port, - path=to_bytes(reference.path, encoding), - query=to_bytes(reference.query, encoding), - fragment=to_bytes(reference.fragment, encoding), - uri_ref=reference, - encoding=encoding, - lazy_normalize=lazy_normalize) - - @property - def authority(self): - """Return the normalized authority.""" - return self.reference.authority.encode(self.encoding) - - def copy_with(self, scheme=misc.UseExisting, userinfo=misc.UseExisting, - host=misc.UseExisting, port=misc.UseExisting, - path=misc.UseExisting, query=misc.UseExisting, - fragment=misc.UseExisting, lazy_normalize=True): - """Create a copy of this instance replacing with specified parts.""" - attributes = zip(PARSED_COMPONENTS, - (scheme, userinfo, host, port, path, query, fragment)) - attrs_dict = {} - for name, value in attributes: - if value is misc.UseExisting: - value = getattr(self, name) - if not isinstance(value, bytes) and hasattr(value, 'encode'): - value = value.encode(self.encoding) - attrs_dict[name] = value - authority = self._generate_authority(attrs_dict) - to_str = compat.to_str - ref = self.reference.copy_with( - scheme=to_str(attrs_dict['scheme'], self.encoding), - authority=to_str(authority, self.encoding), - path=to_str(attrs_dict['path'], self.encoding), - query=to_str(attrs_dict['query'], self.encoding), - fragment=to_str(attrs_dict['fragment'], self.encoding) - ) - if not lazy_normalize: - ref = ref.normalize() - return ParseResultBytes( - uri_ref=ref, - encoding=self.encoding, - lazy_normalize=lazy_normalize, - **attrs_dict - ) - - def unsplit(self, use_idna=False): - """Create a URI bytes object from the components. - - :returns: The parsed URI reconstituted as a string. - :rtype: bytes - """ - parse_result = self - if use_idna and self.host: - # self.host is bytes, to encode to idna, we need to decode it - # first - host = self.host.decode(self.encoding) - hostbytes = host.encode('idna') - parse_result = self.copy_with(host=hostbytes) - if self.lazy_normalize: - parse_result = parse_result.copy_with(lazy_normalize=False) - uri = parse_result.reference.unsplit() - return uri.encode(self.encoding) - - -def split_authority(authority): - # Initialize our expected return values - userinfo = host = port = None - # Initialize an extra var we may need to use - extra_host = None - # Set-up rest in case there is no userinfo portion - rest = authority - - if '@' in authority: - userinfo, rest = authority.rsplit('@', 1) - - # Handle IPv6 host addresses - if rest.startswith('['): - host, rest = rest.split(']', 1) - host += ']' - - if ':' in rest: - extra_host, port = rest.split(':', 1) - elif not host and rest: - host = rest - - if extra_host and not host: - host = extra_host - - return userinfo, host, port - - -def authority_from(reference, strict): - try: - subauthority = reference.authority_info() - except exceptions.InvalidAuthority: - if strict: - raise - userinfo, host, port = split_authority(reference.authority) - else: - # Thanks to Richard Barrell for this idea: - # https://twitter.com/0x2ba22e11/status/617338811975139328 - userinfo, host, port = (subauthority.get(p) - for p in ('userinfo', 'host', 'port')) - - if port: - try: - port = int(port) - except ValueError: - raise exceptions.InvalidPort(port) - return userinfo, host, port diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/parseresult.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/parseresult.pyc deleted file mode 100644 index 273e2baf..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/parseresult.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/uri.py b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/uri.py deleted file mode 100644 index d1d71505..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/uri.py +++ /dev/null @@ -1,153 +0,0 @@ -"""Module containing the implementation of the URIReference class.""" -# -*- coding: utf-8 -*- -# Copyright (c) 2014 Rackspace -# Copyright (c) 2015 Ian Stapleton Cordasco -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from collections import namedtuple - -from . import compat -from . import misc -from . import normalizers -from ._mixin import URIMixin - - -class URIReference(namedtuple('URIReference', misc.URI_COMPONENTS), URIMixin): - """Immutable object representing a parsed URI Reference. - - .. note:: - - This class is not intended to be directly instantiated by the user. - - This object exposes attributes for the following components of a - URI: - - - scheme - - authority - - path - - query - - fragment - - .. attribute:: scheme - - The scheme that was parsed for the URI Reference. For example, - ``http``, ``https``, ``smtp``, ``imap``, etc. - - .. attribute:: authority - - Component of the URI that contains the user information, host, - and port sub-components. For example, - ``google.com``, ``127.0.0.1:5000``, ``username@[::1]``, - ``username:password@example.com:443``, etc. - - .. attribute:: path - - The path that was parsed for the given URI Reference. For example, - ``/``, ``/index.php``, etc. - - .. attribute:: query - - The query component for a given URI Reference. For example, ``a=b``, - ``a=b%20c``, ``a=b+c``, ``a=b,c=d,e=%20f``, etc. - - .. attribute:: fragment - - The fragment component of a URI. For example, ``section-3.1``. - - This class also provides extra attributes for easier access to information - like the subcomponents of the authority component. - - .. attribute:: userinfo - - The user information parsed from the authority. - - .. attribute:: host - - The hostname, IPv4, or IPv6 adddres parsed from the authority. - - .. attribute:: port - - The port parsed from the authority. - """ - - slots = () - - def __new__(cls, scheme, authority, path, query, fragment, - encoding='utf-8'): - """Create a new URIReference.""" - ref = super(URIReference, cls).__new__( - cls, - scheme or None, - authority or None, - path or None, - query, - fragment) - ref.encoding = encoding - return ref - - __hash__ = tuple.__hash__ - - def __eq__(self, other): - """Compare this reference to another.""" - other_ref = other - if isinstance(other, tuple): - other_ref = URIReference(*other) - elif not isinstance(other, URIReference): - try: - other_ref = URIReference.from_string(other) - except TypeError: - raise TypeError( - 'Unable to compare URIReference() to {0}()'.format( - type(other).__name__)) - - # See http://tools.ietf.org/html/rfc3986#section-6.2 - naive_equality = tuple(self) == tuple(other_ref) - return naive_equality or self.normalized_equality(other_ref) - - def normalize(self): - """Normalize this reference as described in Section 6.2.2. - - This is not an in-place normalization. Instead this creates a new - URIReference. - - :returns: A new reference object with normalized components. - :rtype: URIReference - """ - # See http://tools.ietf.org/html/rfc3986#section-6.2.2 for logic in - # this method. - return URIReference(normalizers.normalize_scheme(self.scheme or ''), - normalizers.normalize_authority( - (self.userinfo, self.host, self.port)), - normalizers.normalize_path(self.path or ''), - normalizers.normalize_query(self.query), - normalizers.normalize_fragment(self.fragment), - self.encoding) - - @classmethod - def from_string(cls, uri_string, encoding='utf-8'): - """Parse a URI reference from the given unicode URI string. - - :param str uri_string: Unicode URI to be parsed into a reference. - :param str encoding: The encoding of the string provided - :returns: :class:`URIReference` or subclass thereof - """ - uri_string = compat.to_str(uri_string, encoding) - - split_uri = misc.URI_MATCHER.match(uri_string).groupdict() - return cls( - split_uri['scheme'], split_uri['authority'], - normalizers.encode_component(split_uri['path'], encoding), - normalizers.encode_component(split_uri['query'], encoding), - normalizers.encode_component(split_uri['fragment'], encoding), - encoding, - ) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/uri.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/uri.pyc deleted file mode 100644 index 2c7ab1e4..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/uri.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/validators.py b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/validators.py deleted file mode 100644 index 7fc97215..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/validators.py +++ /dev/null @@ -1,450 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (c) 2017 Ian Stapleton Cordasco -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Module containing the validation logic for rfc3986.""" -from . import exceptions -from . import misc -from . import normalizers - - -class Validator(object): - """Object used to configure validation of all objects in rfc3986. - - .. versionadded:: 1.0 - - Example usage:: - - >>> from rfc3986 import api, validators - >>> uri = api.uri_reference('https://github.com/') - >>> validator = validators.Validator().require_presence_of( - ... 'scheme', 'host', 'path', - ... ).allow_schemes( - ... 'http', 'https', - ... ).allow_hosts( - ... '127.0.0.1', 'github.com', - ... ) - >>> validator.validate(uri) - >>> invalid_uri = rfc3986.uri_reference('imap://mail.google.com') - >>> validator.validate(invalid_uri) - Traceback (most recent call last): - ... - rfc3986.exceptions.MissingComponentError: ('path was required but - missing', URIReference(scheme=u'imap', authority=u'mail.google.com', - path=None, query=None, fragment=None), ['path']) - - """ - - COMPONENT_NAMES = frozenset([ - 'scheme', - 'userinfo', - 'host', - 'port', - 'path', - 'query', - 'fragment', - ]) - - def __init__(self): - """Initialize our default validations.""" - self.allowed_schemes = set() - self.allowed_hosts = set() - self.allowed_ports = set() - self.allow_password = True - self.required_components = { - 'scheme': False, - 'userinfo': False, - 'host': False, - 'port': False, - 'path': False, - 'query': False, - 'fragment': False, - } - self.validated_components = self.required_components.copy() - - def allow_schemes(self, *schemes): - """Require the scheme to be one of the provided schemes. - - .. versionadded:: 1.0 - - :param schemes: - Schemes, without ``://`` that are allowed. - :returns: - The validator instance. - :rtype: - Validator - """ - for scheme in schemes: - self.allowed_schemes.add(normalizers.normalize_scheme(scheme)) - return self - - def allow_hosts(self, *hosts): - """Require the host to be one of the provided hosts. - - .. versionadded:: 1.0 - - :param hosts: - Hosts that are allowed. - :returns: - The validator instance. - :rtype: - Validator - """ - for host in hosts: - self.allowed_hosts.add(normalizers.normalize_host(host)) - return self - - def allow_ports(self, *ports): - """Require the port to be one of the provided ports. - - .. versionadded:: 1.0 - - :param ports: - Ports that are allowed. - :returns: - The validator instance. - :rtype: - Validator - """ - for port in ports: - port_int = int(port, base=10) - if 0 <= port_int <= 65535: - self.allowed_ports.add(port) - return self - - def allow_use_of_password(self): - """Allow passwords to be present in the URI. - - .. versionadded:: 1.0 - - :returns: - The validator instance. - :rtype: - Validator - """ - self.allow_password = True - return self - - def forbid_use_of_password(self): - """Prevent passwords from being included in the URI. - - .. versionadded:: 1.0 - - :returns: - The validator instance. - :rtype: - Validator - """ - self.allow_password = False - return self - - def check_validity_of(self, *components): - """Check the validity of the components provided. - - This can be specified repeatedly. - - .. versionadded:: 1.1 - - :param components: - Names of components from :attr:`Validator.COMPONENT_NAMES`. - :returns: - The validator instance. - :rtype: - Validator - """ - components = [c.lower() for c in components] - for component in components: - if component not in self.COMPONENT_NAMES: - raise ValueError( - '"{}" is not a valid component'.format(component) - ) - self.validated_components.update({ - component: True for component in components - }) - return self - - def require_presence_of(self, *components): - """Require the components provided. - - This can be specified repeatedly. - - .. versionadded:: 1.0 - - :param components: - Names of components from :attr:`Validator.COMPONENT_NAMES`. - :returns: - The validator instance. - :rtype: - Validator - """ - components = [c.lower() for c in components] - for component in components: - if component not in self.COMPONENT_NAMES: - raise ValueError( - '"{}" is not a valid component'.format(component) - ) - self.required_components.update({ - component: True for component in components - }) - return self - - def validate(self, uri): - """Check a URI for conditions specified on this validator. - - .. versionadded:: 1.0 - - :param uri: - Parsed URI to validate. - :type uri: - rfc3986.uri.URIReference - :raises MissingComponentError: - When a required component is missing. - :raises UnpermittedComponentError: - When a component is not one of those allowed. - :raises PasswordForbidden: - When a password is present in the userinfo component but is - not permitted by configuration. - :raises InvalidComponentsError: - When a component was found to be invalid. - """ - if not self.allow_password: - check_password(uri) - - required_components = [ - component - for component, required in self.required_components.items() - if required - ] - validated_components = [ - component - for component, required in self.validated_components.items() - if required - ] - if required_components: - ensure_required_components_exist(uri, required_components) - if validated_components: - ensure_components_are_valid(uri, validated_components) - - ensure_one_of(self.allowed_schemes, uri, 'scheme') - ensure_one_of(self.allowed_hosts, uri, 'host') - ensure_one_of(self.allowed_ports, uri, 'port') - - -def check_password(uri): - """Assert that there is no password present in the uri.""" - userinfo = uri.userinfo - if not userinfo: - return - credentials = userinfo.split(':', 1) - if len(credentials) <= 1: - return - raise exceptions.PasswordForbidden(uri) - - -def ensure_one_of(allowed_values, uri, attribute): - """Assert that the uri's attribute is one of the allowed values.""" - value = getattr(uri, attribute) - if value is not None and allowed_values and value not in allowed_values: - raise exceptions.UnpermittedComponentError( - attribute, value, allowed_values, - ) - - -def ensure_required_components_exist(uri, required_components): - """Assert that all required components are present in the URI.""" - missing_components = sorted([ - component - for component in required_components - if getattr(uri, component) is None - ]) - if missing_components: - raise exceptions.MissingComponentError(uri, *missing_components) - - -def is_valid(value, matcher, require): - """Determine if a value is valid based on the provided matcher. - - :param str value: - Value to validate. - :param matcher: - Compiled regular expression to use to validate the value. - :param require: - Whether or not the value is required. - """ - if require: - return (value is not None - and matcher.match(value)) - - # require is False and value is not None - return value is None or matcher.match(value) - - -def authority_is_valid(authority, host=None, require=False): - """Determine if the authority string is valid. - - :param str authority: - The authority to validate. - :param str host: - (optional) The host portion of the authority to validate. - :param bool require: - (optional) Specify if authority must not be None. - :returns: - ``True`` if valid, ``False`` otherwise - :rtype: - bool - """ - validated = is_valid(authority, misc.SUBAUTHORITY_MATCHER, require) - if validated and host is not None: - return host_is_valid(host, require) - return validated - - -def host_is_valid(host, require=False): - """Determine if the host string is valid. - - :param str host: - The host to validate. - :param bool require: - (optional) Specify if host must not be None. - :returns: - ``True`` if valid, ``False`` otherwise - :rtype: - bool - """ - validated = is_valid(host, misc.HOST_MATCHER, require) - if validated and host is not None and misc.IPv4_MATCHER.match(host): - return valid_ipv4_host_address(host) - elif validated and host is not None and misc.IPv6_MATCHER.match(host): - return misc.IPv6_NO_RFC4007_MATCHER.match(host) is not None - return validated - - -def scheme_is_valid(scheme, require=False): - """Determine if the scheme is valid. - - :param str scheme: - The scheme string to validate. - :param bool require: - (optional) Set to ``True`` to require the presence of a scheme. - :returns: - ``True`` if the scheme is valid. ``False`` otherwise. - :rtype: - bool - """ - return is_valid(scheme, misc.SCHEME_MATCHER, require) - - -def path_is_valid(path, require=False): - """Determine if the path component is valid. - - :param str path: - The path string to validate. - :param bool require: - (optional) Set to ``True`` to require the presence of a path. - :returns: - ``True`` if the path is valid. ``False`` otherwise. - :rtype: - bool - """ - return is_valid(path, misc.PATH_MATCHER, require) - - -def query_is_valid(query, require=False): - """Determine if the query component is valid. - - :param str query: - The query string to validate. - :param bool require: - (optional) Set to ``True`` to require the presence of a query. - :returns: - ``True`` if the query is valid. ``False`` otherwise. - :rtype: - bool - """ - return is_valid(query, misc.QUERY_MATCHER, require) - - -def fragment_is_valid(fragment, require=False): - """Determine if the fragment component is valid. - - :param str fragment: - The fragment string to validate. - :param bool require: - (optional) Set to ``True`` to require the presence of a fragment. - :returns: - ``True`` if the fragment is valid. ``False`` otherwise. - :rtype: - bool - """ - return is_valid(fragment, misc.FRAGMENT_MATCHER, require) - - -def valid_ipv4_host_address(host): - """Determine if the given host is a valid IPv4 address.""" - # If the host exists, and it might be IPv4, check each byte in the - # address. - return all([0 <= int(byte, base=10) <= 255 for byte in host.split('.')]) - - -_COMPONENT_VALIDATORS = { - 'scheme': scheme_is_valid, - 'path': path_is_valid, - 'query': query_is_valid, - 'fragment': fragment_is_valid, -} - -_SUBAUTHORITY_VALIDATORS = set(['userinfo', 'host', 'port']) - - -def subauthority_component_is_valid(uri, component): - """Determine if the userinfo, host, and port are valid.""" - try: - subauthority_dict = uri.authority_info() - except exceptions.InvalidAuthority: - return False - - # If we can parse the authority into sub-components and we're not - # validating the port, we can assume it's valid. - if component == 'host': - return host_is_valid(subauthority_dict['host']) - elif component != 'port': - return True - - try: - port = int(subauthority_dict['port']) - except TypeError: - # If the port wasn't provided it'll be None and int(None) raises a - # TypeError - return True - - return (0 <= port <= 65535) - - -def ensure_components_are_valid(uri, validated_components): - """Assert that all components are valid in the URI.""" - invalid_components = set([]) - for component in validated_components: - if component in _SUBAUTHORITY_VALIDATORS: - if not subauthority_component_is_valid(uri, component): - invalid_components.add(component) - # Python's peephole optimizer means that while this continue *is* - # actually executed, coverage.py cannot detect that. See also, - # https://bitbucket.org/ned/coveragepy/issues/198/continue-marked-as-not-covered - continue # nocov: Python 2.7, 3.3, 3.4 - - validator = _COMPONENT_VALIDATORS[component] - if not validator(getattr(uri, component)): - invalid_components.add(component) - - if invalid_components: - raise exceptions.InvalidComponentsError(uri, *invalid_components) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/validators.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/validators.pyc deleted file mode 100644 index e42a92d6..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/validators.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.py b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.py deleted file mode 100644 index 190c0239..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.py +++ /dev/null @@ -1,868 +0,0 @@ -"""Utilities for writing code that runs on Python 2 and 3""" - -# Copyright (c) 2010-2015 Benjamin Peterson -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - -from __future__ import absolute_import - -import functools -import itertools -import operator -import sys -import types - -__author__ = "Benjamin Peterson <benjamin@python.org>" -__version__ = "1.10.0" - - -# Useful for very coarse version differentiation. -PY2 = sys.version_info[0] == 2 -PY3 = sys.version_info[0] == 3 -PY34 = sys.version_info[0:2] >= (3, 4) - -if PY3: - string_types = str, - integer_types = int, - class_types = type, - text_type = str - binary_type = bytes - - MAXSIZE = sys.maxsize -else: - string_types = basestring, - integer_types = (int, long) - class_types = (type, types.ClassType) - text_type = unicode - binary_type = str - - if sys.platform.startswith("java"): - # Jython always uses 32 bits. - MAXSIZE = int((1 << 31) - 1) - else: - # It's possible to have sizeof(long) != sizeof(Py_ssize_t). - class X(object): - - def __len__(self): - return 1 << 31 - try: - len(X()) - except OverflowError: - # 32-bit - MAXSIZE = int((1 << 31) - 1) - else: - # 64-bit - MAXSIZE = int((1 << 63) - 1) - del X - - -def _add_doc(func, doc): - """Add documentation to a function.""" - func.__doc__ = doc - - -def _import_module(name): - """Import module, returning the module after the last dot.""" - __import__(name) - return sys.modules[name] - - -class _LazyDescr(object): - - def __init__(self, name): - self.name = name - - def __get__(self, obj, tp): - result = self._resolve() - setattr(obj, self.name, result) # Invokes __set__. - try: - # This is a bit ugly, but it avoids running this again by - # removing this descriptor. - delattr(obj.__class__, self.name) - except AttributeError: - pass - return result - - -class MovedModule(_LazyDescr): - - def __init__(self, name, old, new=None): - super(MovedModule, self).__init__(name) - if PY3: - if new is None: - new = name - self.mod = new - else: - self.mod = old - - def _resolve(self): - return _import_module(self.mod) - - def __getattr__(self, attr): - _module = self._resolve() - value = getattr(_module, attr) - setattr(self, attr, value) - return value - - -class _LazyModule(types.ModuleType): - - def __init__(self, name): - super(_LazyModule, self).__init__(name) - self.__doc__ = self.__class__.__doc__ - - def __dir__(self): - attrs = ["__doc__", "__name__"] - attrs += [attr.name for attr in self._moved_attributes] - return attrs - - # Subclasses should override this - _moved_attributes = [] - - -class MovedAttribute(_LazyDescr): - - def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): - super(MovedAttribute, self).__init__(name) - if PY3: - if new_mod is None: - new_mod = name - self.mod = new_mod - if new_attr is None: - if old_attr is None: - new_attr = name - else: - new_attr = old_attr - self.attr = new_attr - else: - self.mod = old_mod - if old_attr is None: - old_attr = name - self.attr = old_attr - - def _resolve(self): - module = _import_module(self.mod) - return getattr(module, self.attr) - - -class _SixMetaPathImporter(object): - - """ - A meta path importer to import six.moves and its submodules. - - This class implements a PEP302 finder and loader. It should be compatible - with Python 2.5 and all existing versions of Python3 - """ - - def __init__(self, six_module_name): - self.name = six_module_name - self.known_modules = {} - - def _add_module(self, mod, *fullnames): - for fullname in fullnames: - self.known_modules[self.name + "." + fullname] = mod - - def _get_module(self, fullname): - return self.known_modules[self.name + "." + fullname] - - def find_module(self, fullname, path=None): - if fullname in self.known_modules: - return self - return None - - def __get_module(self, fullname): - try: - return self.known_modules[fullname] - except KeyError: - raise ImportError("This loader does not know module " + fullname) - - def load_module(self, fullname): - try: - # in case of a reload - return sys.modules[fullname] - except KeyError: - pass - mod = self.__get_module(fullname) - if isinstance(mod, MovedModule): - mod = mod._resolve() - else: - mod.__loader__ = self - sys.modules[fullname] = mod - return mod - - def is_package(self, fullname): - """ - Return true, if the named module is a package. - - We need this method to get correct spec objects with - Python 3.4 (see PEP451) - """ - return hasattr(self.__get_module(fullname), "__path__") - - def get_code(self, fullname): - """Return None - - Required, if is_package is implemented""" - self.__get_module(fullname) # eventually raises ImportError - return None - get_source = get_code # same as get_code - -_importer = _SixMetaPathImporter(__name__) - - -class _MovedItems(_LazyModule): - - """Lazy loading of moved objects""" - __path__ = [] # mark as package - - -_moved_attributes = [ - MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), - MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), - MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), - MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), - MovedAttribute("intern", "__builtin__", "sys"), - MovedAttribute("map", "itertools", "builtins", "imap", "map"), - MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), - MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), - MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), - MovedAttribute("reduce", "__builtin__", "functools"), - MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), - MovedAttribute("StringIO", "StringIO", "io"), - MovedAttribute("UserDict", "UserDict", "collections"), - MovedAttribute("UserList", "UserList", "collections"), - MovedAttribute("UserString", "UserString", "collections"), - MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), - MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), - MovedModule("builtins", "__builtin__"), - MovedModule("configparser", "ConfigParser"), - MovedModule("copyreg", "copy_reg"), - MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), - MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), - MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), - MovedModule("http_cookies", "Cookie", "http.cookies"), - MovedModule("html_entities", "htmlentitydefs", "html.entities"), - MovedModule("html_parser", "HTMLParser", "html.parser"), - MovedModule("http_client", "httplib", "http.client"), - MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), - MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), - MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), - MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), - MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), - MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), - MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), - MovedModule("cPickle", "cPickle", "pickle"), - MovedModule("queue", "Queue"), - MovedModule("reprlib", "repr"), - MovedModule("socketserver", "SocketServer"), - MovedModule("_thread", "thread", "_thread"), - MovedModule("tkinter", "Tkinter"), - MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), - MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), - MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), - MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), - MovedModule("tkinter_tix", "Tix", "tkinter.tix"), - MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), - MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), - MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), - MovedModule("tkinter_colorchooser", "tkColorChooser", - "tkinter.colorchooser"), - MovedModule("tkinter_commondialog", "tkCommonDialog", - "tkinter.commondialog"), - MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), - MovedModule("tkinter_font", "tkFont", "tkinter.font"), - MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), - MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", - "tkinter.simpledialog"), - MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), - MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), - MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), - MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), - MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), - MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), -] -# Add windows specific modules. -if sys.platform == "win32": - _moved_attributes += [ - MovedModule("winreg", "_winreg"), - ] - -for attr in _moved_attributes: - setattr(_MovedItems, attr.name, attr) - if isinstance(attr, MovedModule): - _importer._add_module(attr, "moves." + attr.name) -del attr - -_MovedItems._moved_attributes = _moved_attributes - -moves = _MovedItems(__name__ + ".moves") -_importer._add_module(moves, "moves") - - -class Module_six_moves_urllib_parse(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_parse""" - - -_urllib_parse_moved_attributes = [ - MovedAttribute("ParseResult", "urlparse", "urllib.parse"), - MovedAttribute("SplitResult", "urlparse", "urllib.parse"), - MovedAttribute("parse_qs", "urlparse", "urllib.parse"), - MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), - MovedAttribute("urldefrag", "urlparse", "urllib.parse"), - MovedAttribute("urljoin", "urlparse", "urllib.parse"), - MovedAttribute("urlparse", "urlparse", "urllib.parse"), - MovedAttribute("urlsplit", "urlparse", "urllib.parse"), - MovedAttribute("urlunparse", "urlparse", "urllib.parse"), - MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), - MovedAttribute("quote", "urllib", "urllib.parse"), - MovedAttribute("quote_plus", "urllib", "urllib.parse"), - MovedAttribute("unquote", "urllib", "urllib.parse"), - MovedAttribute("unquote_plus", "urllib", "urllib.parse"), - MovedAttribute("urlencode", "urllib", "urllib.parse"), - MovedAttribute("splitquery", "urllib", "urllib.parse"), - MovedAttribute("splittag", "urllib", "urllib.parse"), - MovedAttribute("splituser", "urllib", "urllib.parse"), - MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), - MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), - MovedAttribute("uses_params", "urlparse", "urllib.parse"), - MovedAttribute("uses_query", "urlparse", "urllib.parse"), - MovedAttribute("uses_relative", "urlparse", "urllib.parse"), -] -for attr in _urllib_parse_moved_attributes: - setattr(Module_six_moves_urllib_parse, attr.name, attr) -del attr - -Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes - -_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), - "moves.urllib_parse", "moves.urllib.parse") - - -class Module_six_moves_urllib_error(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_error""" - - -_urllib_error_moved_attributes = [ - MovedAttribute("URLError", "urllib2", "urllib.error"), - MovedAttribute("HTTPError", "urllib2", "urllib.error"), - MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), -] -for attr in _urllib_error_moved_attributes: - setattr(Module_six_moves_urllib_error, attr.name, attr) -del attr - -Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes - -_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), - "moves.urllib_error", "moves.urllib.error") - - -class Module_six_moves_urllib_request(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_request""" - - -_urllib_request_moved_attributes = [ - MovedAttribute("urlopen", "urllib2", "urllib.request"), - MovedAttribute("install_opener", "urllib2", "urllib.request"), - MovedAttribute("build_opener", "urllib2", "urllib.request"), - MovedAttribute("pathname2url", "urllib", "urllib.request"), - MovedAttribute("url2pathname", "urllib", "urllib.request"), - MovedAttribute("getproxies", "urllib", "urllib.request"), - MovedAttribute("Request", "urllib2", "urllib.request"), - MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), - MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), - MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), - MovedAttribute("BaseHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), - MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), - MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), - MovedAttribute("FileHandler", "urllib2", "urllib.request"), - MovedAttribute("FTPHandler", "urllib2", "urllib.request"), - MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), - MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), - MovedAttribute("urlretrieve", "urllib", "urllib.request"), - MovedAttribute("urlcleanup", "urllib", "urllib.request"), - MovedAttribute("URLopener", "urllib", "urllib.request"), - MovedAttribute("FancyURLopener", "urllib", "urllib.request"), - MovedAttribute("proxy_bypass", "urllib", "urllib.request"), -] -for attr in _urllib_request_moved_attributes: - setattr(Module_six_moves_urllib_request, attr.name, attr) -del attr - -Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes - -_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), - "moves.urllib_request", "moves.urllib.request") - - -class Module_six_moves_urllib_response(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_response""" - - -_urllib_response_moved_attributes = [ - MovedAttribute("addbase", "urllib", "urllib.response"), - MovedAttribute("addclosehook", "urllib", "urllib.response"), - MovedAttribute("addinfo", "urllib", "urllib.response"), - MovedAttribute("addinfourl", "urllib", "urllib.response"), -] -for attr in _urllib_response_moved_attributes: - setattr(Module_six_moves_urllib_response, attr.name, attr) -del attr - -Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes - -_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), - "moves.urllib_response", "moves.urllib.response") - - -class Module_six_moves_urllib_robotparser(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_robotparser""" - - -_urllib_robotparser_moved_attributes = [ - MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), -] -for attr in _urllib_robotparser_moved_attributes: - setattr(Module_six_moves_urllib_robotparser, attr.name, attr) -del attr - -Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes - -_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), - "moves.urllib_robotparser", "moves.urllib.robotparser") - - -class Module_six_moves_urllib(types.ModuleType): - - """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" - __path__ = [] # mark as package - parse = _importer._get_module("moves.urllib_parse") - error = _importer._get_module("moves.urllib_error") - request = _importer._get_module("moves.urllib_request") - response = _importer._get_module("moves.urllib_response") - robotparser = _importer._get_module("moves.urllib_robotparser") - - def __dir__(self): - return ['parse', 'error', 'request', 'response', 'robotparser'] - -_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), - "moves.urllib") - - -def add_move(move): - """Add an item to six.moves.""" - setattr(_MovedItems, move.name, move) - - -def remove_move(name): - """Remove item from six.moves.""" - try: - delattr(_MovedItems, name) - except AttributeError: - try: - del moves.__dict__[name] - except KeyError: - raise AttributeError("no such move, %r" % (name,)) - - -if PY3: - _meth_func = "__func__" - _meth_self = "__self__" - - _func_closure = "__closure__" - _func_code = "__code__" - _func_defaults = "__defaults__" - _func_globals = "__globals__" -else: - _meth_func = "im_func" - _meth_self = "im_self" - - _func_closure = "func_closure" - _func_code = "func_code" - _func_defaults = "func_defaults" - _func_globals = "func_globals" - - -try: - advance_iterator = next -except NameError: - def advance_iterator(it): - return it.next() -next = advance_iterator - - -try: - callable = callable -except NameError: - def callable(obj): - return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) - - -if PY3: - def get_unbound_function(unbound): - return unbound - - create_bound_method = types.MethodType - - def create_unbound_method(func, cls): - return func - - Iterator = object -else: - def get_unbound_function(unbound): - return unbound.im_func - - def create_bound_method(func, obj): - return types.MethodType(func, obj, obj.__class__) - - def create_unbound_method(func, cls): - return types.MethodType(func, None, cls) - - class Iterator(object): - - def next(self): - return type(self).__next__(self) - - callable = callable -_add_doc(get_unbound_function, - """Get the function out of a possibly unbound function""") - - -get_method_function = operator.attrgetter(_meth_func) -get_method_self = operator.attrgetter(_meth_self) -get_function_closure = operator.attrgetter(_func_closure) -get_function_code = operator.attrgetter(_func_code) -get_function_defaults = operator.attrgetter(_func_defaults) -get_function_globals = operator.attrgetter(_func_globals) - - -if PY3: - def iterkeys(d, **kw): - return iter(d.keys(**kw)) - - def itervalues(d, **kw): - return iter(d.values(**kw)) - - def iteritems(d, **kw): - return iter(d.items(**kw)) - - def iterlists(d, **kw): - return iter(d.lists(**kw)) - - viewkeys = operator.methodcaller("keys") - - viewvalues = operator.methodcaller("values") - - viewitems = operator.methodcaller("items") -else: - def iterkeys(d, **kw): - return d.iterkeys(**kw) - - def itervalues(d, **kw): - return d.itervalues(**kw) - - def iteritems(d, **kw): - return d.iteritems(**kw) - - def iterlists(d, **kw): - return d.iterlists(**kw) - - viewkeys = operator.methodcaller("viewkeys") - - viewvalues = operator.methodcaller("viewvalues") - - viewitems = operator.methodcaller("viewitems") - -_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") -_add_doc(itervalues, "Return an iterator over the values of a dictionary.") -_add_doc(iteritems, - "Return an iterator over the (key, value) pairs of a dictionary.") -_add_doc(iterlists, - "Return an iterator over the (key, [values]) pairs of a dictionary.") - - -if PY3: - def b(s): - return s.encode("latin-1") - - def u(s): - return s - unichr = chr - import struct - int2byte = struct.Struct(">B").pack - del struct - byte2int = operator.itemgetter(0) - indexbytes = operator.getitem - iterbytes = iter - import io - StringIO = io.StringIO - BytesIO = io.BytesIO - _assertCountEqual = "assertCountEqual" - if sys.version_info[1] <= 1: - _assertRaisesRegex = "assertRaisesRegexp" - _assertRegex = "assertRegexpMatches" - else: - _assertRaisesRegex = "assertRaisesRegex" - _assertRegex = "assertRegex" -else: - def b(s): - return s - # Workaround for standalone backslash - - def u(s): - return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") - unichr = unichr - int2byte = chr - - def byte2int(bs): - return ord(bs[0]) - - def indexbytes(buf, i): - return ord(buf[i]) - iterbytes = functools.partial(itertools.imap, ord) - import StringIO - StringIO = BytesIO = StringIO.StringIO - _assertCountEqual = "assertItemsEqual" - _assertRaisesRegex = "assertRaisesRegexp" - _assertRegex = "assertRegexpMatches" -_add_doc(b, """Byte literal""") -_add_doc(u, """Text literal""") - - -def assertCountEqual(self, *args, **kwargs): - return getattr(self, _assertCountEqual)(*args, **kwargs) - - -def assertRaisesRegex(self, *args, **kwargs): - return getattr(self, _assertRaisesRegex)(*args, **kwargs) - - -def assertRegex(self, *args, **kwargs): - return getattr(self, _assertRegex)(*args, **kwargs) - - -if PY3: - exec_ = getattr(moves.builtins, "exec") - - def reraise(tp, value, tb=None): - if value is None: - value = tp() - if value.__traceback__ is not tb: - raise value.with_traceback(tb) - raise value - -else: - def exec_(_code_, _globs_=None, _locs_=None): - """Execute code in a namespace.""" - if _globs_ is None: - frame = sys._getframe(1) - _globs_ = frame.f_globals - if _locs_ is None: - _locs_ = frame.f_locals - del frame - elif _locs_ is None: - _locs_ = _globs_ - exec("""exec _code_ in _globs_, _locs_""") - - exec_("""def reraise(tp, value, tb=None): - raise tp, value, tb -""") - - -if sys.version_info[:2] == (3, 2): - exec_("""def raise_from(value, from_value): - if from_value is None: - raise value - raise value from from_value -""") -elif sys.version_info[:2] > (3, 2): - exec_("""def raise_from(value, from_value): - raise value from from_value -""") -else: - def raise_from(value, from_value): - raise value - - -print_ = getattr(moves.builtins, "print", None) -if print_ is None: - def print_(*args, **kwargs): - """The new-style print function for Python 2.4 and 2.5.""" - fp = kwargs.pop("file", sys.stdout) - if fp is None: - return - - def write(data): - if not isinstance(data, basestring): - data = str(data) - # If the file has an encoding, encode unicode with it. - if (isinstance(fp, file) and - isinstance(data, unicode) and - fp.encoding is not None): - errors = getattr(fp, "errors", None) - if errors is None: - errors = "strict" - data = data.encode(fp.encoding, errors) - fp.write(data) - want_unicode = False - sep = kwargs.pop("sep", None) - if sep is not None: - if isinstance(sep, unicode): - want_unicode = True - elif not isinstance(sep, str): - raise TypeError("sep must be None or a string") - end = kwargs.pop("end", None) - if end is not None: - if isinstance(end, unicode): - want_unicode = True - elif not isinstance(end, str): - raise TypeError("end must be None or a string") - if kwargs: - raise TypeError("invalid keyword arguments to print()") - if not want_unicode: - for arg in args: - if isinstance(arg, unicode): - want_unicode = True - break - if want_unicode: - newline = unicode("\n") - space = unicode(" ") - else: - newline = "\n" - space = " " - if sep is None: - sep = space - if end is None: - end = newline - for i, arg in enumerate(args): - if i: - write(sep) - write(arg) - write(end) -if sys.version_info[:2] < (3, 3): - _print = print_ - - def print_(*args, **kwargs): - fp = kwargs.get("file", sys.stdout) - flush = kwargs.pop("flush", False) - _print(*args, **kwargs) - if flush and fp is not None: - fp.flush() - -_add_doc(reraise, """Reraise an exception.""") - -if sys.version_info[0:2] < (3, 4): - def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, - updated=functools.WRAPPER_UPDATES): - def wrapper(f): - f = functools.wraps(wrapped, assigned, updated)(f) - f.__wrapped__ = wrapped - return f - return wrapper -else: - wraps = functools.wraps - - -def with_metaclass(meta, *bases): - """Create a base class with a metaclass.""" - # This requires a bit of explanation: the basic idea is to make a dummy - # metaclass for one level of class instantiation that replaces itself with - # the actual metaclass. - class metaclass(meta): - - def __new__(cls, name, this_bases, d): - return meta(name, bases, d) - return type.__new__(metaclass, 'temporary_class', (), {}) - - -def add_metaclass(metaclass): - """Class decorator for creating a class with a metaclass.""" - def wrapper(cls): - orig_vars = cls.__dict__.copy() - slots = orig_vars.get('__slots__') - if slots is not None: - if isinstance(slots, str): - slots = [slots] - for slots_var in slots: - orig_vars.pop(slots_var) - orig_vars.pop('__dict__', None) - orig_vars.pop('__weakref__', None) - return metaclass(cls.__name__, cls.__bases__, orig_vars) - return wrapper - - -def python_2_unicode_compatible(klass): - """ - A decorator that defines __unicode__ and __str__ methods under Python 2. - Under Python 3 it does nothing. - - To support Python 2 and 3 with a single code base, define a __str__ method - returning text and apply this decorator to the class. - """ - if PY2: - if '__str__' not in klass.__dict__: - raise ValueError("@python_2_unicode_compatible cannot be applied " - "to %s because it doesn't define __str__()." % - klass.__name__) - klass.__unicode__ = klass.__str__ - klass.__str__ = lambda self: self.__unicode__().encode('utf-8') - return klass - - -# Complete the moves implementation. -# This code is at the end of this module to speed up module loading. -# Turn this module into a package. -__path__ = [] # required for PEP 302 and PEP 451 -__package__ = __name__ # see PEP 366 @ReservedAssignment -if globals().get("__spec__") is not None: - __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable -# Remove other six meta path importers, since they cause problems. This can -# happen if six is removed from sys.modules and then reloaded. (Setuptools does -# this for some reason.) -if sys.meta_path: - for i, importer in enumerate(sys.meta_path): - # Here's some real nastiness: Another "instance" of the six module might - # be floating around. Therefore, we can't use isinstance() to check for - # the six meta path importer, since the other six instance will have - # inserted an importer with different class. - if (type(importer).__name__ == "_SixMetaPathImporter" and - importer.name == __name__): - del sys.meta_path[i] - break - del i, importer -# Finally, add the importer to the meta path import hook. -sys.meta_path.append(_importer) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyc deleted file mode 100644 index 487709c1..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py deleted file mode 100644 index d6594eb2..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -import sys - -try: - # Our match_hostname function is the same as 3.5's, so we only want to - # import the match_hostname function if it's at least that good. - if sys.version_info < (3, 5): - raise ImportError("Fallback to vendored code") - - from ssl import CertificateError, match_hostname -except ImportError: - try: - # Backport of the function from a pypi module - from backports.ssl_match_hostname import CertificateError, match_hostname - except ImportError: - # Our vendored copy - from ._implementation import CertificateError, match_hostname - -# Not needed, but documenting what we provide. -__all__ = ('CertificateError', 'match_hostname') diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.pyc deleted file mode 100644 index 6bea5bfc..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.pyc deleted file mode 100644 index b89bd415..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/poolmanager.py b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/poolmanager.py deleted file mode 100644 index a6ade6e9..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/poolmanager.py +++ /dev/null @@ -1,455 +0,0 @@ -from __future__ import absolute_import -import collections -import functools -import logging - -from ._collections import RecentlyUsedContainer -from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool -from .connectionpool import port_by_scheme -from .exceptions import LocationValueError, MaxRetryError, ProxySchemeUnknown -from .packages import six -from .packages.six.moves.urllib.parse import urljoin -from .request import RequestMethods -from .util.url import parse_url -from .util.retry import Retry - - -__all__ = ['PoolManager', 'ProxyManager', 'proxy_from_url'] - - -log = logging.getLogger(__name__) - -SSL_KEYWORDS = ('key_file', 'cert_file', 'cert_reqs', 'ca_certs', - 'ssl_version', 'ca_cert_dir', 'ssl_context', - 'key_password') - -# All known keyword arguments that could be provided to the pool manager, its -# pools, or the underlying connections. This is used to construct a pool key. -_key_fields = ( - 'key_scheme', # str - 'key_host', # str - 'key_port', # int - 'key_timeout', # int or float or Timeout - 'key_retries', # int or Retry - 'key_strict', # bool - 'key_block', # bool - 'key_source_address', # str - 'key_key_file', # str - 'key_key_password', # str - 'key_cert_file', # str - 'key_cert_reqs', # str - 'key_ca_certs', # str - 'key_ssl_version', # str - 'key_ca_cert_dir', # str - 'key_ssl_context', # instance of ssl.SSLContext or urllib3.util.ssl_.SSLContext - 'key_maxsize', # int - 'key_headers', # dict - 'key__proxy', # parsed proxy url - 'key__proxy_headers', # dict - 'key_socket_options', # list of (level (int), optname (int), value (int or str)) tuples - 'key__socks_options', # dict - 'key_assert_hostname', # bool or string - 'key_assert_fingerprint', # str - 'key_server_hostname', # str -) - -#: The namedtuple class used to construct keys for the connection pool. -#: All custom key schemes should include the fields in this key at a minimum. -PoolKey = collections.namedtuple('PoolKey', _key_fields) - - -def _default_key_normalizer(key_class, request_context): - """ - Create a pool key out of a request context dictionary. - - According to RFC 3986, both the scheme and host are case-insensitive. - Therefore, this function normalizes both before constructing the pool - key for an HTTPS request. If you wish to change this behaviour, provide - alternate callables to ``key_fn_by_scheme``. - - :param key_class: - The class to use when constructing the key. This should be a namedtuple - with the ``scheme`` and ``host`` keys at a minimum. - :type key_class: namedtuple - :param request_context: - A dictionary-like object that contain the context for a request. - :type request_context: dict - - :return: A namedtuple that can be used as a connection pool key. - :rtype: PoolKey - """ - # Since we mutate the dictionary, make a copy first - context = request_context.copy() - context['scheme'] = context['scheme'].lower() - context['host'] = context['host'].lower() - - # These are both dictionaries and need to be transformed into frozensets - for key in ('headers', '_proxy_headers', '_socks_options'): - if key in context and context[key] is not None: - context[key] = frozenset(context[key].items()) - - # The socket_options key may be a list and needs to be transformed into a - # tuple. - socket_opts = context.get('socket_options') - if socket_opts is not None: - context['socket_options'] = tuple(socket_opts) - - # Map the kwargs to the names in the namedtuple - this is necessary since - # namedtuples can't have fields starting with '_'. - for key in list(context.keys()): - context['key_' + key] = context.pop(key) - - # Default to ``None`` for keys missing from the context - for field in key_class._fields: - if field not in context: - context[field] = None - - return key_class(**context) - - -#: A dictionary that maps a scheme to a callable that creates a pool key. -#: This can be used to alter the way pool keys are constructed, if desired. -#: Each PoolManager makes a copy of this dictionary so they can be configured -#: globally here, or individually on the instance. -key_fn_by_scheme = { - 'http': functools.partial(_default_key_normalizer, PoolKey), - 'https': functools.partial(_default_key_normalizer, PoolKey), -} - -pool_classes_by_scheme = { - 'http': HTTPConnectionPool, - 'https': HTTPSConnectionPool, -} - - -class PoolManager(RequestMethods): - """ - Allows for arbitrary requests while transparently keeping track of - necessary connection pools for you. - - :param num_pools: - Number of connection pools to cache before discarding the least - recently used pool. - - :param headers: - Headers to include with all requests, unless other headers are given - explicitly. - - :param \\**connection_pool_kw: - Additional parameters are used to create fresh - :class:`urllib3.connectionpool.ConnectionPool` instances. - - Example:: - - >>> manager = PoolManager(num_pools=2) - >>> r = manager.request('GET', 'http://google.com/') - >>> r = manager.request('GET', 'http://google.com/mail') - >>> r = manager.request('GET', 'http://yahoo.com/') - >>> len(manager.pools) - 2 - - """ - - proxy = None - - def __init__(self, num_pools=10, headers=None, **connection_pool_kw): - RequestMethods.__init__(self, headers) - self.connection_pool_kw = connection_pool_kw - self.pools = RecentlyUsedContainer(num_pools, - dispose_func=lambda p: p.close()) - - # Locally set the pool classes and keys so other PoolManagers can - # override them. - self.pool_classes_by_scheme = pool_classes_by_scheme - self.key_fn_by_scheme = key_fn_by_scheme.copy() - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - self.clear() - # Return False to re-raise any potential exceptions - return False - - def _new_pool(self, scheme, host, port, request_context=None): - """ - Create a new :class:`ConnectionPool` based on host, port, scheme, and - any additional pool keyword arguments. - - If ``request_context`` is provided, it is provided as keyword arguments - to the pool class used. This method is used to actually create the - connection pools handed out by :meth:`connection_from_url` and - companion methods. It is intended to be overridden for customization. - """ - pool_cls = self.pool_classes_by_scheme[scheme] - if request_context is None: - request_context = self.connection_pool_kw.copy() - - # Although the context has everything necessary to create the pool, - # this function has historically only used the scheme, host, and port - # in the positional args. When an API change is acceptable these can - # be removed. - for key in ('scheme', 'host', 'port'): - request_context.pop(key, None) - - if scheme == 'http': - for kw in SSL_KEYWORDS: - request_context.pop(kw, None) - - return pool_cls(host, port, **request_context) - - def clear(self): - """ - Empty our store of pools and direct them all to close. - - This will not affect in-flight connections, but they will not be - re-used after completion. - """ - self.pools.clear() - - def connection_from_host(self, host, port=None, scheme='http', pool_kwargs=None): - """ - Get a :class:`ConnectionPool` based on the host, port, and scheme. - - If ``port`` isn't given, it will be derived from the ``scheme`` using - ``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is - provided, it is merged with the instance's ``connection_pool_kw`` - variable and used to create the new connection pool, if one is - needed. - """ - - if not host: - raise LocationValueError("No host specified.") - - request_context = self._merge_pool_kwargs(pool_kwargs) - request_context['scheme'] = scheme or 'http' - if not port: - port = port_by_scheme.get(request_context['scheme'].lower(), 80) - request_context['port'] = port - request_context['host'] = host - - return self.connection_from_context(request_context) - - def connection_from_context(self, request_context): - """ - Get a :class:`ConnectionPool` based on the request context. - - ``request_context`` must at least contain the ``scheme`` key and its - value must be a key in ``key_fn_by_scheme`` instance variable. - """ - scheme = request_context['scheme'].lower() - pool_key_constructor = self.key_fn_by_scheme[scheme] - pool_key = pool_key_constructor(request_context) - - return self.connection_from_pool_key(pool_key, request_context=request_context) - - def connection_from_pool_key(self, pool_key, request_context=None): - """ - Get a :class:`ConnectionPool` based on the provided pool key. - - ``pool_key`` should be a namedtuple that only contains immutable - objects. At a minimum it must have the ``scheme``, ``host``, and - ``port`` fields. - """ - with self.pools.lock: - # If the scheme, host, or port doesn't match existing open - # connections, open a new ConnectionPool. - pool = self.pools.get(pool_key) - if pool: - return pool - - # Make a fresh ConnectionPool of the desired type - scheme = request_context['scheme'] - host = request_context['host'] - port = request_context['port'] - pool = self._new_pool(scheme, host, port, request_context=request_context) - self.pools[pool_key] = pool - - return pool - - def connection_from_url(self, url, pool_kwargs=None): - """ - Similar to :func:`urllib3.connectionpool.connection_from_url`. - - If ``pool_kwargs`` is not provided and a new pool needs to be - constructed, ``self.connection_pool_kw`` is used to initialize - the :class:`urllib3.connectionpool.ConnectionPool`. If ``pool_kwargs`` - is provided, it is used instead. Note that if a new pool does not - need to be created for the request, the provided ``pool_kwargs`` are - not used. - """ - u = parse_url(url) - return self.connection_from_host(u.host, port=u.port, scheme=u.scheme, - pool_kwargs=pool_kwargs) - - def _merge_pool_kwargs(self, override): - """ - Merge a dictionary of override values for self.connection_pool_kw. - - This does not modify self.connection_pool_kw and returns a new dict. - Any keys in the override dictionary with a value of ``None`` are - removed from the merged dictionary. - """ - base_pool_kwargs = self.connection_pool_kw.copy() - if override: - for key, value in override.items(): - if value is None: - try: - del base_pool_kwargs[key] - except KeyError: - pass - else: - base_pool_kwargs[key] = value - return base_pool_kwargs - - def urlopen(self, method, url, redirect=True, **kw): - """ - Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen` - with custom cross-host redirect logic and only sends the request-uri - portion of the ``url``. - - The given ``url`` parameter must be absolute, such that an appropriate - :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it. - """ - u = parse_url(url) - conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme) - - kw['assert_same_host'] = False - kw['redirect'] = False - - if 'headers' not in kw: - kw['headers'] = self.headers.copy() - - if self.proxy is not None and u.scheme == "http": - response = conn.urlopen(method, url, **kw) - else: - response = conn.urlopen(method, u.request_uri, **kw) - - redirect_location = redirect and response.get_redirect_location() - if not redirect_location: - return response - - # Support relative URLs for redirecting. - redirect_location = urljoin(url, redirect_location) - - # RFC 7231, Section 6.4.4 - if response.status == 303: - method = 'GET' - - retries = kw.get('retries') - if not isinstance(retries, Retry): - retries = Retry.from_int(retries, redirect=redirect) - - # Strip headers marked as unsafe to forward to the redirected location. - # Check remove_headers_on_redirect to avoid a potential network call within - # conn.is_same_host() which may use socket.gethostbyname() in the future. - if (retries.remove_headers_on_redirect - and not conn.is_same_host(redirect_location)): - headers = list(six.iterkeys(kw['headers'])) - for header in headers: - if header.lower() in retries.remove_headers_on_redirect: - kw['headers'].pop(header, None) - - try: - retries = retries.increment(method, url, response=response, _pool=conn) - except MaxRetryError: - if retries.raise_on_redirect: - raise - return response - - kw['retries'] = retries - kw['redirect'] = redirect - - log.info("Redirecting %s -> %s", url, redirect_location) - return self.urlopen(method, redirect_location, **kw) - - -class ProxyManager(PoolManager): - """ - Behaves just like :class:`PoolManager`, but sends all requests through - the defined proxy, using the CONNECT method for HTTPS URLs. - - :param proxy_url: - The URL of the proxy to be used. - - :param proxy_headers: - A dictionary containing headers that will be sent to the proxy. In case - of HTTP they are being sent with each request, while in the - HTTPS/CONNECT case they are sent only once. Could be used for proxy - authentication. - - Example: - >>> proxy = urllib3.ProxyManager('http://localhost:3128/') - >>> r1 = proxy.request('GET', 'http://google.com/') - >>> r2 = proxy.request('GET', 'http://httpbin.org/') - >>> len(proxy.pools) - 1 - >>> r3 = proxy.request('GET', 'https://httpbin.org/') - >>> r4 = proxy.request('GET', 'https://twitter.com/') - >>> len(proxy.pools) - 3 - - """ - - def __init__(self, proxy_url, num_pools=10, headers=None, - proxy_headers=None, **connection_pool_kw): - - if isinstance(proxy_url, HTTPConnectionPool): - proxy_url = '%s://%s:%i' % (proxy_url.scheme, proxy_url.host, - proxy_url.port) - proxy = parse_url(proxy_url) - if not proxy.port: - port = port_by_scheme.get(proxy.scheme, 80) - proxy = proxy._replace(port=port) - - if proxy.scheme not in ("http", "https"): - raise ProxySchemeUnknown(proxy.scheme) - - self.proxy = proxy - self.proxy_headers = proxy_headers or {} - - connection_pool_kw['_proxy'] = self.proxy - connection_pool_kw['_proxy_headers'] = self.proxy_headers - - super(ProxyManager, self).__init__( - num_pools, headers, **connection_pool_kw) - - def connection_from_host(self, host, port=None, scheme='http', pool_kwargs=None): - if scheme == "https": - return super(ProxyManager, self).connection_from_host( - host, port, scheme, pool_kwargs=pool_kwargs) - - return super(ProxyManager, self).connection_from_host( - self.proxy.host, self.proxy.port, self.proxy.scheme, pool_kwargs=pool_kwargs) - - def _set_proxy_headers(self, url, headers=None): - """ - Sets headers needed by proxies: specifically, the Accept and Host - headers. Only sets headers not provided by the user. - """ - headers_ = {'Accept': '*/*'} - - netloc = parse_url(url).netloc - if netloc: - headers_['Host'] = netloc - - if headers: - headers_.update(headers) - return headers_ - - def urlopen(self, method, url, redirect=True, **kw): - "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute." - u = parse_url(url) - - if u.scheme == "http": - # For proxied HTTPS requests, httplib sets the necessary headers - # on the CONNECT to the proxy. For HTTP, we'll definitely - # need to set 'Host' at the very least. - headers = kw.get('headers', self.headers) - kw['headers'] = self._set_proxy_headers(url, headers) - - return super(ProxyManager, self).urlopen(method, url, redirect=redirect, **kw) - - -def proxy_from_url(url, **kw): - return ProxyManager(proxy_url=url, **kw) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/poolmanager.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/poolmanager.pyc deleted file mode 100644 index 57ca2a51..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/poolmanager.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/request.py b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/request.py deleted file mode 100644 index 8f2f44bb..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/request.py +++ /dev/null @@ -1,150 +0,0 @@ -from __future__ import absolute_import - -from .filepost import encode_multipart_formdata -from .packages.six.moves.urllib.parse import urlencode - - -__all__ = ['RequestMethods'] - - -class RequestMethods(object): - """ - Convenience mixin for classes who implement a :meth:`urlopen` method, such - as :class:`~urllib3.connectionpool.HTTPConnectionPool` and - :class:`~urllib3.poolmanager.PoolManager`. - - Provides behavior for making common types of HTTP request methods and - decides which type of request field encoding to use. - - Specifically, - - :meth:`.request_encode_url` is for sending requests whose fields are - encoded in the URL (such as GET, HEAD, DELETE). - - :meth:`.request_encode_body` is for sending requests whose fields are - encoded in the *body* of the request using multipart or www-form-urlencoded - (such as for POST, PUT, PATCH). - - :meth:`.request` is for making any kind of request, it will look up the - appropriate encoding format and use one of the above two methods to make - the request. - - Initializer parameters: - - :param headers: - Headers to include with all requests, unless other headers are given - explicitly. - """ - - _encode_url_methods = {'DELETE', 'GET', 'HEAD', 'OPTIONS'} - - def __init__(self, headers=None): - self.headers = headers or {} - - def urlopen(self, method, url, body=None, headers=None, - encode_multipart=True, multipart_boundary=None, - **kw): # Abstract - raise NotImplementedError("Classes extending RequestMethods must implement " - "their own ``urlopen`` method.") - - def request(self, method, url, fields=None, headers=None, **urlopen_kw): - """ - Make a request using :meth:`urlopen` with the appropriate encoding of - ``fields`` based on the ``method`` used. - - This is a convenience method that requires the least amount of manual - effort. It can be used in most situations, while still having the - option to drop down to more specific methods when necessary, such as - :meth:`request_encode_url`, :meth:`request_encode_body`, - or even the lowest level :meth:`urlopen`. - """ - method = method.upper() - - urlopen_kw['request_url'] = url - - if method in self._encode_url_methods: - return self.request_encode_url(method, url, fields=fields, - headers=headers, - **urlopen_kw) - else: - return self.request_encode_body(method, url, fields=fields, - headers=headers, - **urlopen_kw) - - def request_encode_url(self, method, url, fields=None, headers=None, - **urlopen_kw): - """ - Make a request using :meth:`urlopen` with the ``fields`` encoded in - the url. This is useful for request methods like GET, HEAD, DELETE, etc. - """ - if headers is None: - headers = self.headers - - extra_kw = {'headers': headers} - extra_kw.update(urlopen_kw) - - if fields: - url += '?' + urlencode(fields) - - return self.urlopen(method, url, **extra_kw) - - def request_encode_body(self, method, url, fields=None, headers=None, - encode_multipart=True, multipart_boundary=None, - **urlopen_kw): - """ - Make a request using :meth:`urlopen` with the ``fields`` encoded in - the body. This is useful for request methods like POST, PUT, PATCH, etc. - - When ``encode_multipart=True`` (default), then - :meth:`urllib3.filepost.encode_multipart_formdata` is used to encode - the payload with the appropriate content type. Otherwise - :meth:`urllib.urlencode` is used with the - 'application/x-www-form-urlencoded' content type. - - Multipart encoding must be used when posting files, and it's reasonably - safe to use it in other times too. However, it may break request - signing, such as with OAuth. - - Supports an optional ``fields`` parameter of key/value strings AND - key/filetuple. A filetuple is a (filename, data, MIME type) tuple where - the MIME type is optional. For example:: - - fields = { - 'foo': 'bar', - 'fakefile': ('foofile.txt', 'contents of foofile'), - 'realfile': ('barfile.txt', open('realfile').read()), - 'typedfile': ('bazfile.bin', open('bazfile').read(), - 'image/jpeg'), - 'nonamefile': 'contents of nonamefile field', - } - - When uploading a file, providing a filename (the first parameter of the - tuple) is optional but recommended to best mimic behavior of browsers. - - Note that if ``headers`` are supplied, the 'Content-Type' header will - be overwritten because it depends on the dynamic random boundary string - which is used to compose the body of the request. The random boundary - string can be explicitly set with the ``multipart_boundary`` parameter. - """ - if headers is None: - headers = self.headers - - extra_kw = {'headers': {}} - - if fields: - if 'body' in urlopen_kw: - raise TypeError( - "request got values for both 'fields' and 'body', can only specify one.") - - if encode_multipart: - body, content_type = encode_multipart_formdata(fields, boundary=multipart_boundary) - else: - body, content_type = urlencode(fields), 'application/x-www-form-urlencoded' - - extra_kw['body'] = body - extra_kw['headers'] = {'Content-Type': content_type} - - extra_kw['headers'].update(headers) - extra_kw.update(urlopen_kw) - - return self.urlopen(method, url, **extra_kw) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/request.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/request.pyc deleted file mode 100644 index 247c437f..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/request.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/response.py b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/response.py deleted file mode 100644 index 4f857932..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/response.py +++ /dev/null @@ -1,760 +0,0 @@ -from __future__ import absolute_import -from contextlib import contextmanager -import zlib -import io -import logging -from socket import timeout as SocketTimeout -from socket import error as SocketError - -try: - import brotli -except ImportError: - brotli = None - -from ._collections import HTTPHeaderDict -from .exceptions import ( - BodyNotHttplibCompatible, ProtocolError, DecodeError, ReadTimeoutError, - ResponseNotChunked, IncompleteRead, InvalidHeader -) -from .packages.six import string_types as basestring, PY3 -from .packages.six.moves import http_client as httplib -from .connection import HTTPException, BaseSSLError -from .util.response import is_fp_closed, is_response_to_head - -log = logging.getLogger(__name__) - - -class DeflateDecoder(object): - - def __init__(self): - self._first_try = True - self._data = b'' - self._obj = zlib.decompressobj() - - def __getattr__(self, name): - return getattr(self._obj, name) - - def decompress(self, data): - if not data: - return data - - if not self._first_try: - return self._obj.decompress(data) - - self._data += data - try: - decompressed = self._obj.decompress(data) - if decompressed: - self._first_try = False - self._data = None - return decompressed - except zlib.error: - self._first_try = False - self._obj = zlib.decompressobj(-zlib.MAX_WBITS) - try: - return self.decompress(self._data) - finally: - self._data = None - - -class GzipDecoderState(object): - - FIRST_MEMBER = 0 - OTHER_MEMBERS = 1 - SWALLOW_DATA = 2 - - -class GzipDecoder(object): - - def __init__(self): - self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS) - self._state = GzipDecoderState.FIRST_MEMBER - - def __getattr__(self, name): - return getattr(self._obj, name) - - def decompress(self, data): - ret = bytearray() - if self._state == GzipDecoderState.SWALLOW_DATA or not data: - return bytes(ret) - while True: - try: - ret += self._obj.decompress(data) - except zlib.error: - previous_state = self._state - # Ignore data after the first error - self._state = GzipDecoderState.SWALLOW_DATA - if previous_state == GzipDecoderState.OTHER_MEMBERS: - # Allow trailing garbage acceptable in other gzip clients - return bytes(ret) - raise - data = self._obj.unused_data - if not data: - return bytes(ret) - self._state = GzipDecoderState.OTHER_MEMBERS - self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS) - - -if brotli is not None: - class BrotliDecoder(object): - # Supports both 'brotlipy' and 'Brotli' packages - # since they share an import name. The top branches - # are for 'brotlipy' and bottom branches for 'Brotli' - def __init__(self): - self._obj = brotli.Decompressor() - - def decompress(self, data): - if hasattr(self._obj, 'decompress'): - return self._obj.decompress(data) - return self._obj.process(data) - - def flush(self): - if hasattr(self._obj, 'flush'): - return self._obj.flush() - return b'' - - -class MultiDecoder(object): - """ - From RFC7231: - If one or more encodings have been applied to a representation, the - sender that applied the encodings MUST generate a Content-Encoding - header field that lists the content codings in the order in which - they were applied. - """ - - def __init__(self, modes): - self._decoders = [_get_decoder(m.strip()) for m in modes.split(',')] - - def flush(self): - return self._decoders[0].flush() - - def decompress(self, data): - for d in reversed(self._decoders): - data = d.decompress(data) - return data - - -def _get_decoder(mode): - if ',' in mode: - return MultiDecoder(mode) - - if mode == 'gzip': - return GzipDecoder() - - if brotli is not None and mode == 'br': - return BrotliDecoder() - - return DeflateDecoder() - - -class HTTPResponse(io.IOBase): - """ - HTTP Response container. - - Backwards-compatible to httplib's HTTPResponse but the response ``body`` is - loaded and decoded on-demand when the ``data`` property is accessed. This - class is also compatible with the Python standard library's :mod:`io` - module, and can hence be treated as a readable object in the context of that - framework. - - Extra parameters for behaviour not present in httplib.HTTPResponse: - - :param preload_content: - If True, the response's body will be preloaded during construction. - - :param decode_content: - If True, will attempt to decode the body based on the - 'content-encoding' header. - - :param original_response: - When this HTTPResponse wrapper is generated from an httplib.HTTPResponse - object, it's convenient to include the original for debug purposes. It's - otherwise unused. - - :param retries: - The retries contains the last :class:`~urllib3.util.retry.Retry` that - was used during the request. - - :param enforce_content_length: - Enforce content length checking. Body returned by server must match - value of Content-Length header, if present. Otherwise, raise error. - """ - - CONTENT_DECODERS = ['gzip', 'deflate'] - if brotli is not None: - CONTENT_DECODERS += ['br'] - REDIRECT_STATUSES = [301, 302, 303, 307, 308] - - def __init__(self, body='', headers=None, status=0, version=0, reason=None, - strict=0, preload_content=True, decode_content=True, - original_response=None, pool=None, connection=None, msg=None, - retries=None, enforce_content_length=False, - request_method=None, request_url=None): - - if isinstance(headers, HTTPHeaderDict): - self.headers = headers - else: - self.headers = HTTPHeaderDict(headers) - self.status = status - self.version = version - self.reason = reason - self.strict = strict - self.decode_content = decode_content - self.retries = retries - self.enforce_content_length = enforce_content_length - - self._decoder = None - self._body = None - self._fp = None - self._original_response = original_response - self._fp_bytes_read = 0 - self.msg = msg - self._request_url = request_url - - if body and isinstance(body, (basestring, bytes)): - self._body = body - - self._pool = pool - self._connection = connection - - if hasattr(body, 'read'): - self._fp = body - - # Are we using the chunked-style of transfer encoding? - self.chunked = False - self.chunk_left = None - tr_enc = self.headers.get('transfer-encoding', '').lower() - # Don't incur the penalty of creating a list and then discarding it - encodings = (enc.strip() for enc in tr_enc.split(",")) - if "chunked" in encodings: - self.chunked = True - - # Determine length of response - self.length_remaining = self._init_length(request_method) - - # If requested, preload the body. - if preload_content and not self._body: - self._body = self.read(decode_content=decode_content) - - def get_redirect_location(self): - """ - Should we redirect and where to? - - :returns: Truthy redirect location string if we got a redirect status - code and valid location. ``None`` if redirect status and no - location. ``False`` if not a redirect status code. - """ - if self.status in self.REDIRECT_STATUSES: - return self.headers.get('location') - - return False - - def release_conn(self): - if not self._pool or not self._connection: - return - - self._pool._put_conn(self._connection) - self._connection = None - - @property - def data(self): - # For backwords-compat with earlier urllib3 0.4 and earlier. - if self._body: - return self._body - - if self._fp: - return self.read(cache_content=True) - - @property - def connection(self): - return self._connection - - def isclosed(self): - return is_fp_closed(self._fp) - - def tell(self): - """ - Obtain the number of bytes pulled over the wire so far. May differ from - the amount of content returned by :meth:``HTTPResponse.read`` if bytes - are encoded on the wire (e.g, compressed). - """ - return self._fp_bytes_read - - def _init_length(self, request_method): - """ - Set initial length value for Response content if available. - """ - length = self.headers.get('content-length') - - if length is not None: - if self.chunked: - # This Response will fail with an IncompleteRead if it can't be - # received as chunked. This method falls back to attempt reading - # the response before raising an exception. - log.warning("Received response with both Content-Length and " - "Transfer-Encoding set. This is expressly forbidden " - "by RFC 7230 sec 3.3.2. Ignoring Content-Length and " - "attempting to process response as Transfer-Encoding: " - "chunked.") - return None - - try: - # RFC 7230 section 3.3.2 specifies multiple content lengths can - # be sent in a single Content-Length header - # (e.g. Content-Length: 42, 42). This line ensures the values - # are all valid ints and that as long as the `set` length is 1, - # all values are the same. Otherwise, the header is invalid. - lengths = set([int(val) for val in length.split(',')]) - if len(lengths) > 1: - raise InvalidHeader("Content-Length contained multiple " - "unmatching values (%s)" % length) - length = lengths.pop() - except ValueError: - length = None - else: - if length < 0: - length = None - - # Convert status to int for comparison - # In some cases, httplib returns a status of "_UNKNOWN" - try: - status = int(self.status) - except ValueError: - status = 0 - - # Check for responses that shouldn't include a body - if status in (204, 304) or 100 <= status < 200 or request_method == 'HEAD': - length = 0 - - return length - - def _init_decoder(self): - """ - Set-up the _decoder attribute if necessary. - """ - # Note: content-encoding value should be case-insensitive, per RFC 7230 - # Section 3.2 - content_encoding = self.headers.get('content-encoding', '').lower() - if self._decoder is None: - if content_encoding in self.CONTENT_DECODERS: - self._decoder = _get_decoder(content_encoding) - elif ',' in content_encoding: - encodings = [ - e.strip() for e in content_encoding.split(',') - if e.strip() in self.CONTENT_DECODERS] - if len(encodings): - self._decoder = _get_decoder(content_encoding) - - DECODER_ERROR_CLASSES = (IOError, zlib.error) - if brotli is not None: - DECODER_ERROR_CLASSES += (brotli.error,) - - def _decode(self, data, decode_content, flush_decoder): - """ - Decode the data passed in and potentially flush the decoder. - """ - if not decode_content: - return data - - try: - if self._decoder: - data = self._decoder.decompress(data) - except self.DECODER_ERROR_CLASSES as e: - content_encoding = self.headers.get('content-encoding', '').lower() - raise DecodeError( - "Received response with content-encoding: %s, but " - "failed to decode it." % content_encoding, e) - if flush_decoder: - data += self._flush_decoder() - - return data - - def _flush_decoder(self): - """ - Flushes the decoder. Should only be called if the decoder is actually - being used. - """ - if self._decoder: - buf = self._decoder.decompress(b'') - return buf + self._decoder.flush() - - return b'' - - @contextmanager - def _error_catcher(self): - """ - Catch low-level python exceptions, instead re-raising urllib3 - variants, so that low-level exceptions are not leaked in the - high-level api. - - On exit, release the connection back to the pool. - """ - clean_exit = False - - try: - try: - yield - - except SocketTimeout: - # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but - # there is yet no clean way to get at it from this context. - raise ReadTimeoutError(self._pool, None, 'Read timed out.') - - except BaseSSLError as e: - # FIXME: Is there a better way to differentiate between SSLErrors? - if 'read operation timed out' not in str(e): # Defensive: - # This shouldn't happen but just in case we're missing an edge - # case, let's avoid swallowing SSL errors. - raise - - raise ReadTimeoutError(self._pool, None, 'Read timed out.') - - except (HTTPException, SocketError) as e: - # This includes IncompleteRead. - raise ProtocolError('Connection broken: %r' % e, e) - - # If no exception is thrown, we should avoid cleaning up - # unnecessarily. - clean_exit = True - finally: - # If we didn't terminate cleanly, we need to throw away our - # connection. - if not clean_exit: - # The response may not be closed but we're not going to use it - # anymore so close it now to ensure that the connection is - # released back to the pool. - if self._original_response: - self._original_response.close() - - # Closing the response may not actually be sufficient to close - # everything, so if we have a hold of the connection close that - # too. - if self._connection: - self._connection.close() - - # If we hold the original response but it's closed now, we should - # return the connection back to the pool. - if self._original_response and self._original_response.isclosed(): - self.release_conn() - - def read(self, amt=None, decode_content=None, cache_content=False): - """ - Similar to :meth:`httplib.HTTPResponse.read`, but with two additional - parameters: ``decode_content`` and ``cache_content``. - - :param amt: - How much of the content to read. If specified, caching is skipped - because it doesn't make sense to cache partial content as the full - response. - - :param decode_content: - If True, will attempt to decode the body based on the - 'content-encoding' header. - - :param cache_content: - If True, will save the returned data such that the same result is - returned despite of the state of the underlying file object. This - is useful if you want the ``.data`` property to continue working - after having ``.read()`` the file object. (Overridden if ``amt`` is - set.) - """ - self._init_decoder() - if decode_content is None: - decode_content = self.decode_content - - if self._fp is None: - return - - flush_decoder = False - data = None - - with self._error_catcher(): - if amt is None: - # cStringIO doesn't like amt=None - data = self._fp.read() - flush_decoder = True - else: - cache_content = False - data = self._fp.read(amt) - if amt != 0 and not data: # Platform-specific: Buggy versions of Python. - # Close the connection when no data is returned - # - # This is redundant to what httplib/http.client _should_ - # already do. However, versions of python released before - # December 15, 2012 (http://bugs.python.org/issue16298) do - # not properly close the connection in all cases. There is - # no harm in redundantly calling close. - self._fp.close() - flush_decoder = True - if self.enforce_content_length and self.length_remaining not in (0, None): - # This is an edge case that httplib failed to cover due - # to concerns of backward compatibility. We're - # addressing it here to make sure IncompleteRead is - # raised during streaming, so all calls with incorrect - # Content-Length are caught. - raise IncompleteRead(self._fp_bytes_read, self.length_remaining) - - if data: - self._fp_bytes_read += len(data) - if self.length_remaining is not None: - self.length_remaining -= len(data) - - data = self._decode(data, decode_content, flush_decoder) - - if cache_content: - self._body = data - - return data - - def stream(self, amt=2**16, decode_content=None): - """ - A generator wrapper for the read() method. A call will block until - ``amt`` bytes have been read from the connection or until the - connection is closed. - - :param amt: - How much of the content to read. The generator will return up to - much data per iteration, but may return less. This is particularly - likely when using compressed data. However, the empty string will - never be returned. - - :param decode_content: - If True, will attempt to decode the body based on the - 'content-encoding' header. - """ - if self.chunked and self.supports_chunked_reads(): - for line in self.read_chunked(amt, decode_content=decode_content): - yield line - else: - while not is_fp_closed(self._fp): - data = self.read(amt=amt, decode_content=decode_content) - - if data: - yield data - - @classmethod - def from_httplib(ResponseCls, r, **response_kw): - """ - Given an :class:`httplib.HTTPResponse` instance ``r``, return a - corresponding :class:`urllib3.response.HTTPResponse` object. - - Remaining parameters are passed to the HTTPResponse constructor, along - with ``original_response=r``. - """ - headers = r.msg - - if not isinstance(headers, HTTPHeaderDict): - if PY3: - headers = HTTPHeaderDict(headers.items()) - else: - # Python 2.7 - headers = HTTPHeaderDict.from_httplib(headers) - - # HTTPResponse objects in Python 3 don't have a .strict attribute - strict = getattr(r, 'strict', 0) - resp = ResponseCls(body=r, - headers=headers, - status=r.status, - version=r.version, - reason=r.reason, - strict=strict, - original_response=r, - **response_kw) - return resp - - # Backwards-compatibility methods for httplib.HTTPResponse - def getheaders(self): - return self.headers - - def getheader(self, name, default=None): - return self.headers.get(name, default) - - # Backwards compatibility for http.cookiejar - def info(self): - return self.headers - - # Overrides from io.IOBase - def close(self): - if not self.closed: - self._fp.close() - - if self._connection: - self._connection.close() - - @property - def closed(self): - if self._fp is None: - return True - elif hasattr(self._fp, 'isclosed'): - return self._fp.isclosed() - elif hasattr(self._fp, 'closed'): - return self._fp.closed - else: - return True - - def fileno(self): - if self._fp is None: - raise IOError("HTTPResponse has no file to get a fileno from") - elif hasattr(self._fp, "fileno"): - return self._fp.fileno() - else: - raise IOError("The file-like object this HTTPResponse is wrapped " - "around has no file descriptor") - - def flush(self): - if self._fp is not None and hasattr(self._fp, 'flush'): - return self._fp.flush() - - def readable(self): - # This method is required for `io` module compatibility. - return True - - def readinto(self, b): - # This method is required for `io` module compatibility. - temp = self.read(len(b)) - if len(temp) == 0: - return 0 - else: - b[:len(temp)] = temp - return len(temp) - - def supports_chunked_reads(self): - """ - Checks if the underlying file-like object looks like a - httplib.HTTPResponse object. We do this by testing for the fp - attribute. If it is present we assume it returns raw chunks as - processed by read_chunked(). - """ - return hasattr(self._fp, 'fp') - - def _update_chunk_length(self): - # First, we'll figure out length of a chunk and then - # we'll try to read it from socket. - if self.chunk_left is not None: - return - line = self._fp.fp.readline() - line = line.split(b';', 1)[0] - try: - self.chunk_left = int(line, 16) - except ValueError: - # Invalid chunked protocol response, abort. - self.close() - raise httplib.IncompleteRead(line) - - def _handle_chunk(self, amt): - returned_chunk = None - if amt is None: - chunk = self._fp._safe_read(self.chunk_left) - returned_chunk = chunk - self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. - self.chunk_left = None - elif amt < self.chunk_left: - value = self._fp._safe_read(amt) - self.chunk_left = self.chunk_left - amt - returned_chunk = value - elif amt == self.chunk_left: - value = self._fp._safe_read(amt) - self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. - self.chunk_left = None - returned_chunk = value - else: # amt > self.chunk_left - returned_chunk = self._fp._safe_read(self.chunk_left) - self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. - self.chunk_left = None - return returned_chunk - - def read_chunked(self, amt=None, decode_content=None): - """ - Similar to :meth:`HTTPResponse.read`, but with an additional - parameter: ``decode_content``. - - :param amt: - How much of the content to read. If specified, caching is skipped - because it doesn't make sense to cache partial content as the full - response. - - :param decode_content: - If True, will attempt to decode the body based on the - 'content-encoding' header. - """ - self._init_decoder() - # FIXME: Rewrite this method and make it a class with a better structured logic. - if not self.chunked: - raise ResponseNotChunked( - "Response is not chunked. " - "Header 'transfer-encoding: chunked' is missing.") - if not self.supports_chunked_reads(): - raise BodyNotHttplibCompatible( - "Body should be httplib.HTTPResponse like. " - "It should have have an fp attribute which returns raw chunks.") - - with self._error_catcher(): - # Don't bother reading the body of a HEAD request. - if self._original_response and is_response_to_head(self._original_response): - self._original_response.close() - return - - # If a response is already read and closed - # then return immediately. - if self._fp.fp is None: - return - - while True: - self._update_chunk_length() - if self.chunk_left == 0: - break - chunk = self._handle_chunk(amt) - decoded = self._decode(chunk, decode_content=decode_content, - flush_decoder=False) - if decoded: - yield decoded - - if decode_content: - # On CPython and PyPy, we should never need to flush the - # decoder. However, on Jython we *might* need to, so - # lets defensively do it anyway. - decoded = self._flush_decoder() - if decoded: # Platform-specific: Jython. - yield decoded - - # Chunk content ends with \r\n: discard it. - while True: - line = self._fp.fp.readline() - if not line: - # Some sites may not end with '\r\n'. - break - if line == b'\r\n': - break - - # We read everything; close the "file". - if self._original_response: - self._original_response.close() - - def geturl(self): - """ - Returns the URL that was the source of this response. - If the request that generated this response redirected, this method - will return the final redirect location. - """ - if self.retries is not None and len(self.retries.history): - return self.retries.history[-1].redirect_location - else: - return self._request_url - - def __iter__(self): - buffer = [b""] - for chunk in self.stream(decode_content=True): - if b"\n" in chunk: - chunk = chunk.split(b"\n") - yield b"".join(buffer) + chunk[0] + b"\n" - for x in chunk[1:-1]: - yield x + b"\n" - if chunk[-1]: - buffer = [chunk[-1]] - else: - buffer = [] - else: - buffer.append(chunk) - if buffer: - yield b"".join(buffer) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyc deleted file mode 100644 index f69a9d37..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/__init__.py b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/__init__.py deleted file mode 100644 index 2914bb46..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/__init__.py +++ /dev/null @@ -1,56 +0,0 @@ -from __future__ import absolute_import -# For backwards compatibility, provide imports that used to be here. -from .connection import is_connection_dropped -from .request import make_headers -from .response import is_fp_closed -from .ssl_ import ( - SSLContext, - HAS_SNI, - IS_PYOPENSSL, - IS_SECURETRANSPORT, - assert_fingerprint, - resolve_cert_reqs, - resolve_ssl_version, - ssl_wrap_socket, - PROTOCOL_TLS, -) -from .timeout import ( - current_time, - Timeout, -) - -from .retry import Retry -from .url import ( - get_host, - parse_url, - split_first, - Url, -) -from .wait import ( - wait_for_read, - wait_for_write -) - -__all__ = ( - 'HAS_SNI', - 'IS_PYOPENSSL', - 'IS_SECURETRANSPORT', - 'SSLContext', - 'PROTOCOL_TLS', - 'Retry', - 'Timeout', - 'Url', - 'assert_fingerprint', - 'current_time', - 'is_connection_dropped', - 'is_fp_closed', - 'get_host', - 'parse_url', - 'make_headers', - 'resolve_cert_reqs', - 'resolve_ssl_version', - 'split_first', - 'ssl_wrap_socket', - 'wait_for_read', - 'wait_for_write' -) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/__init__.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/__init__.pyc deleted file mode 100644 index 3bde4540..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/connection.py b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/connection.py deleted file mode 100644 index 5ad70b2f..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/connection.py +++ /dev/null @@ -1,134 +0,0 @@ -from __future__ import absolute_import -import socket -from .wait import NoWayToWaitForSocketError, wait_for_read -from ..contrib import _appengine_environ - - -def is_connection_dropped(conn): # Platform-specific - """ - Returns True if the connection is dropped and should be closed. - - :param conn: - :class:`httplib.HTTPConnection` object. - - Note: For platforms like AppEngine, this will always return ``False`` to - let the platform handle connection recycling transparently for us. - """ - sock = getattr(conn, 'sock', False) - if sock is False: # Platform-specific: AppEngine - return False - if sock is None: # Connection already closed (such as by httplib). - return True - try: - # Returns True if readable, which here means it's been dropped - return wait_for_read(sock, timeout=0.0) - except NoWayToWaitForSocketError: # Platform-specific: AppEngine - return False - - -# This function is copied from socket.py in the Python 2.7 standard -# library test suite. Added to its signature is only `socket_options`. -# One additional modification is that we avoid binding to IPv6 servers -# discovered in DNS if the system doesn't have IPv6 functionality. -def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, - source_address=None, socket_options=None): - """Connect to *address* and return the socket object. - - Convenience function. Connect to *address* (a 2-tuple ``(host, - port)``) and return the socket object. Passing the optional - *timeout* parameter will set the timeout on the socket instance - before attempting to connect. If no *timeout* is supplied, the - global default timeout setting returned by :func:`getdefaulttimeout` - is used. If *source_address* is set it must be a tuple of (host, port) - for the socket to bind as a source address before making the connection. - An host of '' or port 0 tells the OS to use the default. - """ - - host, port = address - if host.startswith('['): - host = host.strip('[]') - err = None - - # Using the value from allowed_gai_family() in the context of getaddrinfo lets - # us select whether to work with IPv4 DNS records, IPv6 records, or both. - # The original create_connection function always returns all records. - family = allowed_gai_family() - - for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM): - af, socktype, proto, canonname, sa = res - sock = None - try: - sock = socket.socket(af, socktype, proto) - - # If provided, set socket level options before connecting. - _set_socket_options(sock, socket_options) - - if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT: - sock.settimeout(timeout) - if source_address: - sock.bind(source_address) - sock.connect(sa) - return sock - - except socket.error as e: - err = e - if sock is not None: - sock.close() - sock = None - - if err is not None: - raise err - - raise socket.error("getaddrinfo returns an empty list") - - -def _set_socket_options(sock, options): - if options is None: - return - - for opt in options: - sock.setsockopt(*opt) - - -def allowed_gai_family(): - """This function is designed to work in the context of - getaddrinfo, where family=socket.AF_UNSPEC is the default and - will perform a DNS search for both IPv6 and IPv4 records.""" - - family = socket.AF_INET - if HAS_IPV6: - family = socket.AF_UNSPEC - return family - - -def _has_ipv6(host): - """ Returns True if the system can bind an IPv6 address. """ - sock = None - has_ipv6 = False - - # App Engine doesn't support IPV6 sockets and actually has a quota on the - # number of sockets that can be used, so just early out here instead of - # creating a socket needlessly. - # See https://github.com/urllib3/urllib3/issues/1446 - if _appengine_environ.is_appengine_sandbox(): - return False - - if socket.has_ipv6: - # has_ipv6 returns true if cPython was compiled with IPv6 support. - # It does not tell us if the system has IPv6 support enabled. To - # determine that we must bind to an IPv6 address. - # https://github.com/shazow/urllib3/pull/611 - # https://bugs.python.org/issue658327 - try: - sock = socket.socket(socket.AF_INET6) - sock.bind((host, 0)) - has_ipv6 = True - except Exception: - pass - - if sock: - sock.close() - return has_ipv6 - - -HAS_IPV6 = _has_ipv6('::1') diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/connection.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/connection.pyc deleted file mode 100644 index cd5d14d8..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/connection.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/queue.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/queue.pyc deleted file mode 100644 index e015d239..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/queue.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/request.py b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/request.py deleted file mode 100644 index 280b8530..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/request.py +++ /dev/null @@ -1,125 +0,0 @@ -from __future__ import absolute_import -from base64 import b64encode - -from ..packages.six import b, integer_types -from ..exceptions import UnrewindableBodyError - -ACCEPT_ENCODING = 'gzip,deflate' -try: - import brotli as _unused_module_brotli # noqa: F401 -except ImportError: - pass -else: - ACCEPT_ENCODING += ',br' - -_FAILEDTELL = object() - - -def make_headers(keep_alive=None, accept_encoding=None, user_agent=None, - basic_auth=None, proxy_basic_auth=None, disable_cache=None): - """ - Shortcuts for generating request headers. - - :param keep_alive: - If ``True``, adds 'connection: keep-alive' header. - - :param accept_encoding: - Can be a boolean, list, or string. - ``True`` translates to 'gzip,deflate'. - List will get joined by comma. - String will be used as provided. - - :param user_agent: - String representing the user-agent you want, such as - "python-urllib3/0.6" - - :param basic_auth: - Colon-separated username:password string for 'authorization: basic ...' - auth header. - - :param proxy_basic_auth: - Colon-separated username:password string for 'proxy-authorization: basic ...' - auth header. - - :param disable_cache: - If ``True``, adds 'cache-control: no-cache' header. - - Example:: - - >>> make_headers(keep_alive=True, user_agent="Batman/1.0") - {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'} - >>> make_headers(accept_encoding=True) - {'accept-encoding': 'gzip,deflate'} - """ - headers = {} - if accept_encoding: - if isinstance(accept_encoding, str): - pass - elif isinstance(accept_encoding, list): - accept_encoding = ','.join(accept_encoding) - else: - accept_encoding = ACCEPT_ENCODING - headers['accept-encoding'] = accept_encoding - - if user_agent: - headers['user-agent'] = user_agent - - if keep_alive: - headers['connection'] = 'keep-alive' - - if basic_auth: - headers['authorization'] = 'Basic ' + \ - b64encode(b(basic_auth)).decode('utf-8') - - if proxy_basic_auth: - headers['proxy-authorization'] = 'Basic ' + \ - b64encode(b(proxy_basic_auth)).decode('utf-8') - - if disable_cache: - headers['cache-control'] = 'no-cache' - - return headers - - -def set_file_position(body, pos): - """ - If a position is provided, move file to that point. - Otherwise, we'll attempt to record a position for future use. - """ - if pos is not None: - rewind_body(body, pos) - elif getattr(body, 'tell', None) is not None: - try: - pos = body.tell() - except (IOError, OSError): - # This differentiates from None, allowing us to catch - # a failed `tell()` later when trying to rewind the body. - pos = _FAILEDTELL - - return pos - - -def rewind_body(body, body_pos): - """ - Attempt to rewind body to a certain position. - Primarily used for request redirects and retries. - - :param body: - File-like object that supports seek. - - :param int pos: - Position to seek to in file. - """ - body_seek = getattr(body, 'seek', None) - if body_seek is not None and isinstance(body_pos, integer_types): - try: - body_seek(body_pos) - except (IOError, OSError): - raise UnrewindableBodyError("An error occurred when rewinding request " - "body for redirect/retry.") - elif body_pos is _FAILEDTELL: - raise UnrewindableBodyError("Unable to record file position for rewinding " - "request body during a redirect/retry.") - else: - raise ValueError("body_pos must be of type integer, " - "instead it was %s." % type(body_pos)) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/request.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/request.pyc deleted file mode 100644 index 6576574e..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/request.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/response.py b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/response.py deleted file mode 100644 index 3d548648..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/response.py +++ /dev/null @@ -1,87 +0,0 @@ -from __future__ import absolute_import -from ..packages.six.moves import http_client as httplib - -from ..exceptions import HeaderParsingError - - -def is_fp_closed(obj): - """ - Checks whether a given file-like object is closed. - - :param obj: - The file-like object to check. - """ - - try: - # Check `isclosed()` first, in case Python3 doesn't set `closed`. - # GH Issue #928 - return obj.isclosed() - except AttributeError: - pass - - try: - # Check via the official file-like-object way. - return obj.closed - except AttributeError: - pass - - try: - # Check if the object is a container for another file-like object that - # gets released on exhaustion (e.g. HTTPResponse). - return obj.fp is None - except AttributeError: - pass - - raise ValueError("Unable to determine whether fp is closed.") - - -def assert_header_parsing(headers): - """ - Asserts whether all headers have been successfully parsed. - Extracts encountered errors from the result of parsing headers. - - Only works on Python 3. - - :param headers: Headers to verify. - :type headers: `httplib.HTTPMessage`. - - :raises urllib3.exceptions.HeaderParsingError: - If parsing errors are found. - """ - - # This will fail silently if we pass in the wrong kind of parameter. - # To make debugging easier add an explicit check. - if not isinstance(headers, httplib.HTTPMessage): - raise TypeError('expected httplib.Message, got {0}.'.format( - type(headers))) - - defects = getattr(headers, 'defects', None) - get_payload = getattr(headers, 'get_payload', None) - - unparsed_data = None - if get_payload: - # get_payload is actually email.message.Message.get_payload; - # we're only interested in the result if it's not a multipart message - if not headers.is_multipart(): - payload = get_payload() - - if isinstance(payload, (bytes, str)): - unparsed_data = payload - - if defects or unparsed_data: - raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data) - - -def is_response_to_head(response): - """ - Checks whether the request of a response has been a HEAD-request. - Handles the quirks of AppEngine. - - :param conn: - :type conn: :class:`httplib.HTTPResponse` - """ - # FIXME: Can we do this somehow without accessing private httplib _method? - method = response._method - if isinstance(method, int): # Platform-specific: Appengine - return method == 3 - return method.upper() == 'HEAD' diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/response.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/response.pyc deleted file mode 100644 index b2778fb5..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/response.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.py b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.py deleted file mode 100644 index 02429ee8..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.py +++ /dev/null @@ -1,412 +0,0 @@ -from __future__ import absolute_import -import time -import logging -from collections import namedtuple -from itertools import takewhile -import email -import re - -from ..exceptions import ( - ConnectTimeoutError, - MaxRetryError, - ProtocolError, - ReadTimeoutError, - ResponseError, - InvalidHeader, -) -from ..packages import six - - -log = logging.getLogger(__name__) - - -# Data structure for representing the metadata of requests that result in a retry. -RequestHistory = namedtuple('RequestHistory', ["method", "url", "error", - "status", "redirect_location"]) - - -class Retry(object): - """ Retry configuration. - - Each retry attempt will create a new Retry object with updated values, so - they can be safely reused. - - Retries can be defined as a default for a pool:: - - retries = Retry(connect=5, read=2, redirect=5) - http = PoolManager(retries=retries) - response = http.request('GET', 'http://example.com/') - - Or per-request (which overrides the default for the pool):: - - response = http.request('GET', 'http://example.com/', retries=Retry(10)) - - Retries can be disabled by passing ``False``:: - - response = http.request('GET', 'http://example.com/', retries=False) - - Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless - retries are disabled, in which case the causing exception will be raised. - - :param int total: - Total number of retries to allow. Takes precedence over other counts. - - Set to ``None`` to remove this constraint and fall back on other - counts. It's a good idea to set this to some sensibly-high value to - account for unexpected edge cases and avoid infinite retry loops. - - Set to ``0`` to fail on the first retry. - - Set to ``False`` to disable and imply ``raise_on_redirect=False``. - - :param int connect: - How many connection-related errors to retry on. - - These are errors raised before the request is sent to the remote server, - which we assume has not triggered the server to process the request. - - Set to ``0`` to fail on the first retry of this type. - - :param int read: - How many times to retry on read errors. - - These errors are raised after the request was sent to the server, so the - request may have side-effects. - - Set to ``0`` to fail on the first retry of this type. - - :param int redirect: - How many redirects to perform. Limit this to avoid infinite redirect - loops. - - A redirect is a HTTP response with a status code 301, 302, 303, 307 or - 308. - - Set to ``0`` to fail on the first retry of this type. - - Set to ``False`` to disable and imply ``raise_on_redirect=False``. - - :param int status: - How many times to retry on bad status codes. - - These are retries made on responses, where status code matches - ``status_forcelist``. - - Set to ``0`` to fail on the first retry of this type. - - :param iterable method_whitelist: - Set of uppercased HTTP method verbs that we should retry on. - - By default, we only retry on methods which are considered to be - idempotent (multiple requests with the same parameters end with the - same state). See :attr:`Retry.DEFAULT_METHOD_WHITELIST`. - - Set to a ``False`` value to retry on any verb. - - :param iterable status_forcelist: - A set of integer HTTP status codes that we should force a retry on. - A retry is initiated if the request method is in ``method_whitelist`` - and the response status code is in ``status_forcelist``. - - By default, this is disabled with ``None``. - - :param float backoff_factor: - A backoff factor to apply between attempts after the second try - (most errors are resolved immediately by a second try without a - delay). urllib3 will sleep for:: - - {backoff factor} * (2 ** ({number of total retries} - 1)) - - seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep - for [0.0s, 0.2s, 0.4s, ...] between retries. It will never be longer - than :attr:`Retry.BACKOFF_MAX`. - - By default, backoff is disabled (set to 0). - - :param bool raise_on_redirect: Whether, if the number of redirects is - exhausted, to raise a MaxRetryError, or to return a response with a - response code in the 3xx range. - - :param bool raise_on_status: Similar meaning to ``raise_on_redirect``: - whether we should raise an exception, or return a response, - if status falls in ``status_forcelist`` range and retries have - been exhausted. - - :param tuple history: The history of the request encountered during - each call to :meth:`~Retry.increment`. The list is in the order - the requests occurred. Each list item is of class :class:`RequestHistory`. - - :param bool respect_retry_after_header: - Whether to respect Retry-After header on status codes defined as - :attr:`Retry.RETRY_AFTER_STATUS_CODES` or not. - - :param iterable remove_headers_on_redirect: - Sequence of headers to remove from the request when a response - indicating a redirect is returned before firing off the redirected - request. - """ - - DEFAULT_METHOD_WHITELIST = frozenset([ - 'HEAD', 'GET', 'PUT', 'DELETE', 'OPTIONS', 'TRACE']) - - RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503]) - - DEFAULT_REDIRECT_HEADERS_BLACKLIST = frozenset(['Authorization']) - - #: Maximum backoff time. - BACKOFF_MAX = 120 - - def __init__(self, total=10, connect=None, read=None, redirect=None, status=None, - method_whitelist=DEFAULT_METHOD_WHITELIST, status_forcelist=None, - backoff_factor=0, raise_on_redirect=True, raise_on_status=True, - history=None, respect_retry_after_header=True, - remove_headers_on_redirect=DEFAULT_REDIRECT_HEADERS_BLACKLIST): - - self.total = total - self.connect = connect - self.read = read - self.status = status - - if redirect is False or total is False: - redirect = 0 - raise_on_redirect = False - - self.redirect = redirect - self.status_forcelist = status_forcelist or set() - self.method_whitelist = method_whitelist - self.backoff_factor = backoff_factor - self.raise_on_redirect = raise_on_redirect - self.raise_on_status = raise_on_status - self.history = history or tuple() - self.respect_retry_after_header = respect_retry_after_header - self.remove_headers_on_redirect = frozenset([ - h.lower() for h in remove_headers_on_redirect]) - - def new(self, **kw): - params = dict( - total=self.total, - connect=self.connect, read=self.read, redirect=self.redirect, status=self.status, - method_whitelist=self.method_whitelist, - status_forcelist=self.status_forcelist, - backoff_factor=self.backoff_factor, - raise_on_redirect=self.raise_on_redirect, - raise_on_status=self.raise_on_status, - history=self.history, - remove_headers_on_redirect=self.remove_headers_on_redirect - ) - params.update(kw) - return type(self)(**params) - - @classmethod - def from_int(cls, retries, redirect=True, default=None): - """ Backwards-compatibility for the old retries format.""" - if retries is None: - retries = default if default is not None else cls.DEFAULT - - if isinstance(retries, Retry): - return retries - - redirect = bool(redirect) and None - new_retries = cls(retries, redirect=redirect) - log.debug("Converted retries value: %r -> %r", retries, new_retries) - return new_retries - - def get_backoff_time(self): - """ Formula for computing the current backoff - - :rtype: float - """ - # We want to consider only the last consecutive errors sequence (Ignore redirects). - consecutive_errors_len = len(list(takewhile(lambda x: x.redirect_location is None, - reversed(self.history)))) - if consecutive_errors_len <= 1: - return 0 - - backoff_value = self.backoff_factor * (2 ** (consecutive_errors_len - 1)) - return min(self.BACKOFF_MAX, backoff_value) - - def parse_retry_after(self, retry_after): - # Whitespace: https://tools.ietf.org/html/rfc7230#section-3.2.4 - if re.match(r"^\s*[0-9]+\s*$", retry_after): - seconds = int(retry_after) - else: - retry_date_tuple = email.utils.parsedate(retry_after) - if retry_date_tuple is None: - raise InvalidHeader("Invalid Retry-After header: %s" % retry_after) - retry_date = time.mktime(retry_date_tuple) - seconds = retry_date - time.time() - - if seconds < 0: - seconds = 0 - - return seconds - - def get_retry_after(self, response): - """ Get the value of Retry-After in seconds. """ - - retry_after = response.getheader("Retry-After") - - if retry_after is None: - return None - - return self.parse_retry_after(retry_after) - - def sleep_for_retry(self, response=None): - retry_after = self.get_retry_after(response) - if retry_after: - time.sleep(retry_after) - return True - - return False - - def _sleep_backoff(self): - backoff = self.get_backoff_time() - if backoff <= 0: - return - time.sleep(backoff) - - def sleep(self, response=None): - """ Sleep between retry attempts. - - This method will respect a server's ``Retry-After`` response header - and sleep the duration of the time requested. If that is not present, it - will use an exponential backoff. By default, the backoff factor is 0 and - this method will return immediately. - """ - - if response: - slept = self.sleep_for_retry(response) - if slept: - return - - self._sleep_backoff() - - def _is_connection_error(self, err): - """ Errors when we're fairly sure that the server did not receive the - request, so it should be safe to retry. - """ - return isinstance(err, ConnectTimeoutError) - - def _is_read_error(self, err): - """ Errors that occur after the request has been started, so we should - assume that the server began processing it. - """ - return isinstance(err, (ReadTimeoutError, ProtocolError)) - - def _is_method_retryable(self, method): - """ Checks if a given HTTP method should be retried upon, depending if - it is included on the method whitelist. - """ - if self.method_whitelist and method.upper() not in self.method_whitelist: - return False - - return True - - def is_retry(self, method, status_code, has_retry_after=False): - """ Is this method/status code retryable? (Based on whitelists and control - variables such as the number of total retries to allow, whether to - respect the Retry-After header, whether this header is present, and - whether the returned status code is on the list of status codes to - be retried upon on the presence of the aforementioned header) - """ - if not self._is_method_retryable(method): - return False - - if self.status_forcelist and status_code in self.status_forcelist: - return True - - return (self.total and self.respect_retry_after_header and - has_retry_after and (status_code in self.RETRY_AFTER_STATUS_CODES)) - - def is_exhausted(self): - """ Are we out of retries? """ - retry_counts = (self.total, self.connect, self.read, self.redirect, self.status) - retry_counts = list(filter(None, retry_counts)) - if not retry_counts: - return False - - return min(retry_counts) < 0 - - def increment(self, method=None, url=None, response=None, error=None, - _pool=None, _stacktrace=None): - """ Return a new Retry object with incremented retry counters. - - :param response: A response object, or None, if the server did not - return a response. - :type response: :class:`~urllib3.response.HTTPResponse` - :param Exception error: An error encountered during the request, or - None if the response was received successfully. - - :return: A new ``Retry`` object. - """ - if self.total is False and error: - # Disabled, indicate to re-raise the error. - raise six.reraise(type(error), error, _stacktrace) - - total = self.total - if total is not None: - total -= 1 - - connect = self.connect - read = self.read - redirect = self.redirect - status_count = self.status - cause = 'unknown' - status = None - redirect_location = None - - if error and self._is_connection_error(error): - # Connect retry? - if connect is False: - raise six.reraise(type(error), error, _stacktrace) - elif connect is not None: - connect -= 1 - - elif error and self._is_read_error(error): - # Read retry? - if read is False or not self._is_method_retryable(method): - raise six.reraise(type(error), error, _stacktrace) - elif read is not None: - read -= 1 - - elif response and response.get_redirect_location(): - # Redirect retry? - if redirect is not None: - redirect -= 1 - cause = 'too many redirects' - redirect_location = response.get_redirect_location() - status = response.status - - else: - # Incrementing because of a server error like a 500 in - # status_forcelist and a the given method is in the whitelist - cause = ResponseError.GENERIC_ERROR - if response and response.status: - if status_count is not None: - status_count -= 1 - cause = ResponseError.SPECIFIC_ERROR.format( - status_code=response.status) - status = response.status - - history = self.history + (RequestHistory(method, url, error, status, redirect_location),) - - new_retry = self.new( - total=total, - connect=connect, read=read, redirect=redirect, status=status_count, - history=history) - - if new_retry.is_exhausted(): - raise MaxRetryError(_pool, url, error or ResponseError(cause)) - - log.debug("Incremented Retry for (url='%s'): %r", url, new_retry) - - return new_retry - - def __repr__(self): - return ('{cls.__name__}(total={self.total}, connect={self.connect}, ' - 'read={self.read}, redirect={self.redirect}, status={self.status})').format( - cls=type(self), self=self) - - -# For backwards compatibility (equivalent to pre-v1.9): -Retry.DEFAULT = Retry(3) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pyc deleted file mode 100644 index f3d4602d..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/ssl_.py b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/ssl_.py deleted file mode 100644 index fbdef65d..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/ssl_.py +++ /dev/null @@ -1,392 +0,0 @@ -from __future__ import absolute_import -import errno -import warnings -import hmac -import re - -from binascii import hexlify, unhexlify -from hashlib import md5, sha1, sha256 - -from ..exceptions import SSLError, InsecurePlatformWarning, SNIMissingWarning -from ..packages import six -from ..packages.rfc3986 import abnf_regexp - - -SSLContext = None -HAS_SNI = False -IS_PYOPENSSL = False -IS_SECURETRANSPORT = False - -# Maps the length of a digest to a possible hash function producing this digest -HASHFUNC_MAP = { - 32: md5, - 40: sha1, - 64: sha256, -} - - -def _const_compare_digest_backport(a, b): - """ - Compare two digests of equal length in constant time. - - The digests must be of type str/bytes. - Returns True if the digests match, and False otherwise. - """ - result = abs(len(a) - len(b)) - for l, r in zip(bytearray(a), bytearray(b)): - result |= l ^ r - return result == 0 - - -_const_compare_digest = getattr(hmac, 'compare_digest', - _const_compare_digest_backport) - -# Borrow rfc3986's regular expressions for IPv4 -# and IPv6 addresses for use in is_ipaddress() -_IP_ADDRESS_REGEX = re.compile( - r'^(?:%s|%s|%s)$' % ( - abnf_regexp.IPv4_RE, - abnf_regexp.IPv6_RE, - abnf_regexp.IPv6_ADDRZ_RFC4007_RE - ) -) - -try: # Test for SSL features - import ssl - from ssl import wrap_socket, CERT_REQUIRED - from ssl import HAS_SNI # Has SNI? -except ImportError: - pass - -try: # Platform-specific: Python 3.6 - from ssl import PROTOCOL_TLS - PROTOCOL_SSLv23 = PROTOCOL_TLS -except ImportError: - try: - from ssl import PROTOCOL_SSLv23 as PROTOCOL_TLS - PROTOCOL_SSLv23 = PROTOCOL_TLS - except ImportError: - PROTOCOL_SSLv23 = PROTOCOL_TLS = 2 - - -try: - from ssl import OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_COMPRESSION -except ImportError: - OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000 - OP_NO_COMPRESSION = 0x20000 - - -# A secure default. -# Sources for more information on TLS ciphers: -# -# - https://wiki.mozilla.org/Security/Server_Side_TLS -# - https://www.ssllabs.com/projects/best-practices/index.html -# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/ -# -# The general intent is: -# - prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE), -# - prefer ECDHE over DHE for better performance, -# - prefer any AES-GCM and ChaCha20 over any AES-CBC for better performance and -# security, -# - prefer AES-GCM over ChaCha20 because hardware-accelerated AES is common, -# - disable NULL authentication, MD5 MACs, DSS, and other -# insecure ciphers for security reasons. -# - NOTE: TLS 1.3 cipher suites are managed through a different interface -# not exposed by CPython (yet!) and are enabled by default if they're available. -DEFAULT_CIPHERS = ':'.join([ - 'ECDHE+AESGCM', - 'ECDHE+CHACHA20', - 'DHE+AESGCM', - 'DHE+CHACHA20', - 'ECDH+AESGCM', - 'DH+AESGCM', - 'ECDH+AES', - 'DH+AES', - 'RSA+AESGCM', - 'RSA+AES', - '!aNULL', - '!eNULL', - '!MD5', - '!DSS', -]) - -try: - from ssl import SSLContext # Modern SSL? -except ImportError: - class SSLContext(object): # Platform-specific: Python 2 - def __init__(self, protocol_version): - self.protocol = protocol_version - # Use default values from a real SSLContext - self.check_hostname = False - self.verify_mode = ssl.CERT_NONE - self.ca_certs = None - self.options = 0 - self.certfile = None - self.keyfile = None - self.ciphers = None - - def load_cert_chain(self, certfile, keyfile): - self.certfile = certfile - self.keyfile = keyfile - - def load_verify_locations(self, cafile=None, capath=None): - self.ca_certs = cafile - - if capath is not None: - raise SSLError("CA directories not supported in older Pythons") - - def set_ciphers(self, cipher_suite): - self.ciphers = cipher_suite - - def wrap_socket(self, socket, server_hostname=None, server_side=False): - warnings.warn( - 'A true SSLContext object is not available. This prevents ' - 'urllib3 from configuring SSL appropriately and may cause ' - 'certain SSL connections to fail. You can upgrade to a newer ' - 'version of Python to solve this. For more information, see ' - 'https://urllib3.readthedocs.io/en/latest/advanced-usage.html' - '#ssl-warnings', - InsecurePlatformWarning - ) - kwargs = { - 'keyfile': self.keyfile, - 'certfile': self.certfile, - 'ca_certs': self.ca_certs, - 'cert_reqs': self.verify_mode, - 'ssl_version': self.protocol, - 'server_side': server_side, - } - return wrap_socket(socket, ciphers=self.ciphers, **kwargs) - - -def assert_fingerprint(cert, fingerprint): - """ - Checks if given fingerprint matches the supplied certificate. - - :param cert: - Certificate as bytes object. - :param fingerprint: - Fingerprint as string of hexdigits, can be interspersed by colons. - """ - - fingerprint = fingerprint.replace(':', '').lower() - digest_length = len(fingerprint) - hashfunc = HASHFUNC_MAP.get(digest_length) - if not hashfunc: - raise SSLError( - 'Fingerprint of invalid length: {0}'.format(fingerprint)) - - # We need encode() here for py32; works on py2 and p33. - fingerprint_bytes = unhexlify(fingerprint.encode()) - - cert_digest = hashfunc(cert).digest() - - if not _const_compare_digest(cert_digest, fingerprint_bytes): - raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".' - .format(fingerprint, hexlify(cert_digest))) - - -def resolve_cert_reqs(candidate): - """ - Resolves the argument to a numeric constant, which can be passed to - the wrap_socket function/method from the ssl module. - Defaults to :data:`ssl.CERT_NONE`. - If given a string it is assumed to be the name of the constant in the - :mod:`ssl` module or its abbreviation. - (So you can specify `REQUIRED` instead of `CERT_REQUIRED`. - If it's neither `None` nor a string we assume it is already the numeric - constant which can directly be passed to wrap_socket. - """ - if candidate is None: - return CERT_REQUIRED - - if isinstance(candidate, str): - res = getattr(ssl, candidate, None) - if res is None: - res = getattr(ssl, 'CERT_' + candidate) - return res - - return candidate - - -def resolve_ssl_version(candidate): - """ - like resolve_cert_reqs - """ - if candidate is None: - return PROTOCOL_TLS - - if isinstance(candidate, str): - res = getattr(ssl, candidate, None) - if res is None: - res = getattr(ssl, 'PROTOCOL_' + candidate) - return res - - return candidate - - -def create_urllib3_context(ssl_version=None, cert_reqs=None, - options=None, ciphers=None): - """All arguments have the same meaning as ``ssl_wrap_socket``. - - By default, this function does a lot of the same work that - ``ssl.create_default_context`` does on Python 3.4+. It: - - - Disables SSLv2, SSLv3, and compression - - Sets a restricted set of server ciphers - - If you wish to enable SSLv3, you can do:: - - from pip._vendor.urllib3.util import ssl_ - context = ssl_.create_urllib3_context() - context.options &= ~ssl_.OP_NO_SSLv3 - - You can do the same to enable compression (substituting ``COMPRESSION`` - for ``SSLv3`` in the last line above). - - :param ssl_version: - The desired protocol version to use. This will default to - PROTOCOL_SSLv23 which will negotiate the highest protocol that both - the server and your installation of OpenSSL support. - :param cert_reqs: - Whether to require the certificate verification. This defaults to - ``ssl.CERT_REQUIRED``. - :param options: - Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``, - ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``. - :param ciphers: - Which cipher suites to allow the server to select. - :returns: - Constructed SSLContext object with specified options - :rtype: SSLContext - """ - context = SSLContext(ssl_version or PROTOCOL_TLS) - - context.set_ciphers(ciphers or DEFAULT_CIPHERS) - - # Setting the default here, as we may have no ssl module on import - cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs - - if options is None: - options = 0 - # SSLv2 is easily broken and is considered harmful and dangerous - options |= OP_NO_SSLv2 - # SSLv3 has several problems and is now dangerous - options |= OP_NO_SSLv3 - # Disable compression to prevent CRIME attacks for OpenSSL 1.0+ - # (issue #309) - options |= OP_NO_COMPRESSION - - context.options |= options - - context.verify_mode = cert_reqs - if getattr(context, 'check_hostname', None) is not None: # Platform-specific: Python 3.2 - # We do our own verification, including fingerprints and alternative - # hostnames. So disable it here - context.check_hostname = False - return context - - -def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, - ca_certs=None, server_hostname=None, - ssl_version=None, ciphers=None, ssl_context=None, - ca_cert_dir=None, key_password=None): - """ - All arguments except for server_hostname, ssl_context, and ca_cert_dir have - the same meaning as they do when using :func:`ssl.wrap_socket`. - - :param server_hostname: - When SNI is supported, the expected hostname of the certificate - :param ssl_context: - A pre-made :class:`SSLContext` object. If none is provided, one will - be created using :func:`create_urllib3_context`. - :param ciphers: - A string of ciphers we wish the client to support. - :param ca_cert_dir: - A directory containing CA certificates in multiple separate files, as - supported by OpenSSL's -CApath flag or the capath argument to - SSLContext.load_verify_locations(). - :param key_password: - Optional password if the keyfile is encrypted. - """ - context = ssl_context - if context is None: - # Note: This branch of code and all the variables in it are no longer - # used by urllib3 itself. We should consider deprecating and removing - # this code. - context = create_urllib3_context(ssl_version, cert_reqs, - ciphers=ciphers) - - if ca_certs or ca_cert_dir: - try: - context.load_verify_locations(ca_certs, ca_cert_dir) - except IOError as e: # Platform-specific: Python 2.7 - raise SSLError(e) - # Py33 raises FileNotFoundError which subclasses OSError - # These are not equivalent unless we check the errno attribute - except OSError as e: # Platform-specific: Python 3.3 and beyond - if e.errno == errno.ENOENT: - raise SSLError(e) - raise - - elif ssl_context is None and hasattr(context, 'load_default_certs'): - # try to load OS default certs; works well on Windows (require Python3.4+) - context.load_default_certs() - - # Attempt to detect if we get the goofy behavior of the - # keyfile being encrypted and OpenSSL asking for the - # passphrase via the terminal and instead error out. - if keyfile and key_password is None and _is_key_file_encrypted(keyfile): - raise SSLError("Client private key is encrypted, password is required") - - if certfile: - if key_password is None: - context.load_cert_chain(certfile, keyfile) - else: - context.load_cert_chain(certfile, keyfile, key_password) - - # If we detect server_hostname is an IP address then the SNI - # extension should not be used according to RFC3546 Section 3.1 - # We shouldn't warn the user if SNI isn't available but we would - # not be using SNI anyways due to IP address for server_hostname. - if ((server_hostname is not None and not is_ipaddress(server_hostname)) - or IS_SECURETRANSPORT): - if HAS_SNI and server_hostname is not None: - return context.wrap_socket(sock, server_hostname=server_hostname) - - warnings.warn( - 'An HTTPS request has been made, but the SNI (Server Name ' - 'Indication) extension to TLS is not available on this platform. ' - 'This may cause the server to present an incorrect TLS ' - 'certificate, which can cause validation failures. You can upgrade to ' - 'a newer version of Python to solve this. For more information, see ' - 'https://urllib3.readthedocs.io/en/latest/advanced-usage.html' - '#ssl-warnings', - SNIMissingWarning - ) - - return context.wrap_socket(sock) - - -def is_ipaddress(hostname): - """Detects whether the hostname given is an IPv4 or IPv6 address. - Also detects IPv6 addresses with Zone IDs. - - :param str hostname: Hostname to examine. - :return: True if the hostname is an IP address, False otherwise. - """ - if six.PY3 and isinstance(hostname, bytes): - # IDN A-label bytes are ASCII compatible. - hostname = hostname.decode('ascii') - return _IP_ADDRESS_REGEX.match(hostname) is not None - - -def _is_key_file_encrypted(key_file): - """Detects if a key file is encrypted or not.""" - with open(key_file, 'r') as f: - for line in f: - # Look for Proc-Type: 4,ENCRYPTED - if 'ENCRYPTED' in line: - return True - - return False diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/ssl_.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/ssl_.pyc deleted file mode 100644 index e82df63b..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/ssl_.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/timeout.py b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/timeout.py deleted file mode 100644 index a4d004a8..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/timeout.py +++ /dev/null @@ -1,243 +0,0 @@ -from __future__ import absolute_import -# The default socket timeout, used by httplib to indicate that no timeout was -# specified by the user -from socket import _GLOBAL_DEFAULT_TIMEOUT -import time - -from ..exceptions import TimeoutStateError - -# A sentinel value to indicate that no timeout was specified by the user in -# urllib3 -_Default = object() - - -# Use time.monotonic if available. -current_time = getattr(time, "monotonic", time.time) - - -class Timeout(object): - """ Timeout configuration. - - Timeouts can be defined as a default for a pool:: - - timeout = Timeout(connect=2.0, read=7.0) - http = PoolManager(timeout=timeout) - response = http.request('GET', 'http://example.com/') - - Or per-request (which overrides the default for the pool):: - - response = http.request('GET', 'http://example.com/', timeout=Timeout(10)) - - Timeouts can be disabled by setting all the parameters to ``None``:: - - no_timeout = Timeout(connect=None, read=None) - response = http.request('GET', 'http://example.com/, timeout=no_timeout) - - - :param total: - This combines the connect and read timeouts into one; the read timeout - will be set to the time leftover from the connect attempt. In the - event that both a connect timeout and a total are specified, or a read - timeout and a total are specified, the shorter timeout will be applied. - - Defaults to None. - - :type total: integer, float, or None - - :param connect: - The maximum amount of time to wait for a connection attempt to a server - to succeed. Omitting the parameter will default the connect timeout to - the system default, probably `the global default timeout in socket.py - <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_. - None will set an infinite timeout for connection attempts. - - :type connect: integer, float, or None - - :param read: - The maximum amount of time to wait between consecutive - read operations for a response from the server. Omitting - the parameter will default the read timeout to the system - default, probably `the global default timeout in socket.py - <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_. - None will set an infinite timeout. - - :type read: integer, float, or None - - .. note:: - - Many factors can affect the total amount of time for urllib3 to return - an HTTP response. - - For example, Python's DNS resolver does not obey the timeout specified - on the socket. Other factors that can affect total request time include - high CPU load, high swap, the program running at a low priority level, - or other behaviors. - - In addition, the read and total timeouts only measure the time between - read operations on the socket connecting the client and the server, - not the total amount of time for the request to return a complete - response. For most requests, the timeout is raised because the server - has not sent the first byte in the specified time. This is not always - the case; if a server streams one byte every fifteen seconds, a timeout - of 20 seconds will not trigger, even though the request will take - several minutes to complete. - - If your goal is to cut off any request after a set amount of wall clock - time, consider having a second "watcher" thread to cut off a slow - request. - """ - - #: A sentinel object representing the default timeout value - DEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUT - - def __init__(self, total=None, connect=_Default, read=_Default): - self._connect = self._validate_timeout(connect, 'connect') - self._read = self._validate_timeout(read, 'read') - self.total = self._validate_timeout(total, 'total') - self._start_connect = None - - def __str__(self): - return '%s(connect=%r, read=%r, total=%r)' % ( - type(self).__name__, self._connect, self._read, self.total) - - @classmethod - def _validate_timeout(cls, value, name): - """ Check that a timeout attribute is valid. - - :param value: The timeout value to validate - :param name: The name of the timeout attribute to validate. This is - used to specify in error messages. - :return: The validated and casted version of the given value. - :raises ValueError: If it is a numeric value less than or equal to - zero, or the type is not an integer, float, or None. - """ - if value is _Default: - return cls.DEFAULT_TIMEOUT - - if value is None or value is cls.DEFAULT_TIMEOUT: - return value - - if isinstance(value, bool): - raise ValueError("Timeout cannot be a boolean value. It must " - "be an int, float or None.") - try: - float(value) - except (TypeError, ValueError): - raise ValueError("Timeout value %s was %s, but it must be an " - "int, float or None." % (name, value)) - - try: - if value <= 0: - raise ValueError("Attempted to set %s timeout to %s, but the " - "timeout cannot be set to a value less " - "than or equal to 0." % (name, value)) - except TypeError: - # Python 3 - raise ValueError("Timeout value %s was %s, but it must be an " - "int, float or None." % (name, value)) - - return value - - @classmethod - def from_float(cls, timeout): - """ Create a new Timeout from a legacy timeout value. - - The timeout value used by httplib.py sets the same timeout on the - connect(), and recv() socket requests. This creates a :class:`Timeout` - object that sets the individual timeouts to the ``timeout`` value - passed to this function. - - :param timeout: The legacy timeout value. - :type timeout: integer, float, sentinel default object, or None - :return: Timeout object - :rtype: :class:`Timeout` - """ - return Timeout(read=timeout, connect=timeout) - - def clone(self): - """ Create a copy of the timeout object - - Timeout properties are stored per-pool but each request needs a fresh - Timeout object to ensure each one has its own start/stop configured. - - :return: a copy of the timeout object - :rtype: :class:`Timeout` - """ - # We can't use copy.deepcopy because that will also create a new object - # for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to - # detect the user default. - return Timeout(connect=self._connect, read=self._read, - total=self.total) - - def start_connect(self): - """ Start the timeout clock, used during a connect() attempt - - :raises urllib3.exceptions.TimeoutStateError: if you attempt - to start a timer that has been started already. - """ - if self._start_connect is not None: - raise TimeoutStateError("Timeout timer has already been started.") - self._start_connect = current_time() - return self._start_connect - - def get_connect_duration(self): - """ Gets the time elapsed since the call to :meth:`start_connect`. - - :return: Elapsed time. - :rtype: float - :raises urllib3.exceptions.TimeoutStateError: if you attempt - to get duration for a timer that hasn't been started. - """ - if self._start_connect is None: - raise TimeoutStateError("Can't get connect duration for timer " - "that has not started.") - return current_time() - self._start_connect - - @property - def connect_timeout(self): - """ Get the value to use when setting a connection timeout. - - This will be a positive float or integer, the value None - (never timeout), or the default system timeout. - - :return: Connect timeout. - :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None - """ - if self.total is None: - return self._connect - - if self._connect is None or self._connect is self.DEFAULT_TIMEOUT: - return self.total - - return min(self._connect, self.total) - - @property - def read_timeout(self): - """ Get the value for the read timeout. - - This assumes some time has elapsed in the connection timeout and - computes the read timeout appropriately. - - If self.total is set, the read timeout is dependent on the amount of - time taken by the connect timeout. If the connection time has not been - established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be - raised. - - :return: Value to use for the read timeout. - :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None - :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect` - has not yet been called on this object. - """ - if (self.total is not None and - self.total is not self.DEFAULT_TIMEOUT and - self._read is not None and - self._read is not self.DEFAULT_TIMEOUT): - # In case the connect timeout has not yet been established. - if self._start_connect is None: - return self._read - return max(0, min(self.total - self.get_connect_duration(), - self._read)) - elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT: - return max(0, self.total - self.get_connect_duration()) - else: - return self._read diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/timeout.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/timeout.pyc deleted file mode 100644 index 4c12a318..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/timeout.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/url.py b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/url.py deleted file mode 100644 index aefa119b..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/url.py +++ /dev/null @@ -1,289 +0,0 @@ -from __future__ import absolute_import -import re -from collections import namedtuple - -from ..exceptions import LocationParseError -from ..packages import six, rfc3986 -from ..packages.rfc3986.exceptions import RFC3986Exception, ValidationError -from ..packages.rfc3986.validators import Validator -from ..packages.rfc3986 import abnf_regexp, normalizers, compat, misc - - -url_attrs = ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment'] - -# We only want to normalize urls with an HTTP(S) scheme. -# urllib3 infers URLs without a scheme (None) to be http. -NORMALIZABLE_SCHEMES = ('http', 'https', None) - -# Regex for detecting URLs with schemes. RFC 3986 Section 3.1 -SCHEME_REGEX = re.compile(r"^(?:[a-zA-Z][a-zA-Z0-9+\-]*:|/)") - -PATH_CHARS = abnf_regexp.UNRESERVED_CHARS_SET | abnf_regexp.SUB_DELIMITERS_SET | {':', '@', '/'} -QUERY_CHARS = FRAGMENT_CHARS = PATH_CHARS | {'?'} - - -class Url(namedtuple('Url', url_attrs)): - """ - Data structure for representing an HTTP URL. Used as a return value for - :func:`parse_url`. Both the scheme and host are normalized as they are - both case-insensitive according to RFC 3986. - """ - __slots__ = () - - def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None, - query=None, fragment=None): - if path and not path.startswith('/'): - path = '/' + path - if scheme is not None: - scheme = scheme.lower() - return super(Url, cls).__new__(cls, scheme, auth, host, port, path, - query, fragment) - - @property - def hostname(self): - """For backwards-compatibility with urlparse. We're nice like that.""" - return self.host - - @property - def request_uri(self): - """Absolute path including the query string.""" - uri = self.path or '/' - - if self.query is not None: - uri += '?' + self.query - - return uri - - @property - def netloc(self): - """Network location including host and port""" - if self.port: - return '%s:%d' % (self.host, self.port) - return self.host - - @property - def url(self): - """ - Convert self into a url - - This function should more or less round-trip with :func:`.parse_url`. The - returned url may not be exactly the same as the url inputted to - :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls - with a blank port will have : removed). - - Example: :: - - >>> U = parse_url('http://google.com/mail/') - >>> U.url - 'http://google.com/mail/' - >>> Url('http', 'username:password', 'host.com', 80, - ... '/path', 'query', 'fragment').url - 'http://username:password@host.com:80/path?query#fragment' - """ - scheme, auth, host, port, path, query, fragment = self - url = u'' - - # We use "is not None" we want things to happen with empty strings (or 0 port) - if scheme is not None: - url += scheme + u'://' - if auth is not None: - url += auth + u'@' - if host is not None: - url += host - if port is not None: - url += u':' + str(port) - if path is not None: - url += path - if query is not None: - url += u'?' + query - if fragment is not None: - url += u'#' + fragment - - return url - - def __str__(self): - return self.url - - -def split_first(s, delims): - """ - .. deprecated:: 1.25 - - Given a string and an iterable of delimiters, split on the first found - delimiter. Return two split parts and the matched delimiter. - - If not found, then the first part is the full input string. - - Example:: - - >>> split_first('foo/bar?baz', '?/=') - ('foo', 'bar?baz', '/') - >>> split_first('foo/bar?baz', '123') - ('foo/bar?baz', '', None) - - Scales linearly with number of delims. Not ideal for large number of delims. - """ - min_idx = None - min_delim = None - for d in delims: - idx = s.find(d) - if idx < 0: - continue - - if min_idx is None or idx < min_idx: - min_idx = idx - min_delim = d - - if min_idx is None or min_idx < 0: - return s, '', None - - return s[:min_idx], s[min_idx + 1:], min_delim - - -def _encode_invalid_chars(component, allowed_chars, encoding='utf-8'): - """Percent-encodes a URI component without reapplying - onto an already percent-encoded component. Based on - rfc3986.normalizers.encode_component() - """ - if component is None: - return component - - # Try to see if the component we're encoding is already percent-encoded - # so we can skip all '%' characters but still encode all others. - percent_encodings = len(normalizers.PERCENT_MATCHER.findall( - compat.to_str(component, encoding))) - - uri_bytes = component.encode('utf-8', 'surrogatepass') - is_percent_encoded = percent_encodings == uri_bytes.count(b'%') - - encoded_component = bytearray() - - for i in range(0, len(uri_bytes)): - # Will return a single character bytestring on both Python 2 & 3 - byte = uri_bytes[i:i+1] - byte_ord = ord(byte) - if ((is_percent_encoded and byte == b'%') - or (byte_ord < 128 and byte.decode() in allowed_chars)): - encoded_component.extend(byte) - continue - encoded_component.extend('%{0:02x}'.format(byte_ord).encode().upper()) - - return encoded_component.decode(encoding) - - -def parse_url(url): - """ - Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is - performed to parse incomplete urls. Fields not provided will be None. - This parser is RFC 3986 compliant. - - :param str url: URL to parse into a :class:`.Url` namedtuple. - - Partly backwards-compatible with :mod:`urlparse`. - - Example:: - - >>> parse_url('http://google.com/mail/') - Url(scheme='http', host='google.com', port=None, path='/mail/', ...) - >>> parse_url('google.com:80') - Url(scheme=None, host='google.com', port=80, path=None, ...) - >>> parse_url('/foo?bar') - Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...) - """ - if not url: - # Empty - return Url() - - is_string = not isinstance(url, six.binary_type) - - # RFC 3986 doesn't like URLs that have a host but don't start - # with a scheme and we support URLs like that so we need to - # detect that problem and add an empty scheme indication. - # We don't get hurt on path-only URLs here as it's stripped - # off and given an empty scheme anyways. - if not SCHEME_REGEX.search(url): - url = "//" + url - - def idna_encode(name): - if name and any([ord(x) > 128 for x in name]): - try: - from pip._vendor import idna - except ImportError: - raise LocationParseError("Unable to parse URL without the 'idna' module") - try: - return idna.encode(name.lower(), strict=True, std3_rules=True) - except idna.IDNAError: - raise LocationParseError(u"Name '%s' is not a valid IDNA label" % name) - return name - - try: - split_iri = misc.IRI_MATCHER.match(compat.to_str(url)).groupdict() - iri_ref = rfc3986.IRIReference( - split_iri['scheme'], split_iri['authority'], - _encode_invalid_chars(split_iri['path'], PATH_CHARS), - _encode_invalid_chars(split_iri['query'], QUERY_CHARS), - _encode_invalid_chars(split_iri['fragment'], FRAGMENT_CHARS) - ) - has_authority = iri_ref.authority is not None - uri_ref = iri_ref.encode(idna_encoder=idna_encode) - except (ValueError, RFC3986Exception): - return six.raise_from(LocationParseError(url), None) - - # rfc3986 strips the authority if it's invalid - if has_authority and uri_ref.authority is None: - raise LocationParseError(url) - - # Only normalize schemes we understand to not break http+unix - # or other schemes that don't follow RFC 3986. - if uri_ref.scheme is None or uri_ref.scheme.lower() in NORMALIZABLE_SCHEMES: - uri_ref = uri_ref.normalize() - - # Validate all URIReference components and ensure that all - # components that were set before are still set after - # normalization has completed. - validator = Validator() - try: - validator.check_validity_of( - *validator.COMPONENT_NAMES - ).validate(uri_ref) - except ValidationError: - return six.raise_from(LocationParseError(url), None) - - # For the sake of backwards compatibility we put empty - # string values for path if there are any defined values - # beyond the path in the URL. - # TODO: Remove this when we break backwards compatibility. - path = uri_ref.path - if not path: - if (uri_ref.query is not None - or uri_ref.fragment is not None): - path = "" - else: - path = None - - # Ensure that each part of the URL is a `str` for - # backwards compatibility. - def to_input_type(x): - if x is None: - return None - elif not is_string and not isinstance(x, six.binary_type): - return x.encode('utf-8') - return x - - return Url( - scheme=to_input_type(uri_ref.scheme), - auth=to_input_type(uri_ref.userinfo), - host=to_input_type(uri_ref.host), - port=int(uri_ref.port) if uri_ref.port is not None else None, - path=to_input_type(path), - query=to_input_type(uri_ref.query), - fragment=to_input_type(uri_ref.fragment) - ) - - -def get_host(url): - """ - Deprecated. Use :func:`parse_url` instead. - """ - p = parse_url(url) - return p.scheme or 'http', p.hostname, p.port diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/url.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/url.pyc deleted file mode 100644 index acadce74..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/url.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/wait.py b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/wait.py deleted file mode 100644 index 4db71baf..00000000 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/wait.py +++ /dev/null @@ -1,150 +0,0 @@ -import errno -from functools import partial -import select -import sys -try: - from time import monotonic -except ImportError: - from time import time as monotonic - -__all__ = ["NoWayToWaitForSocketError", "wait_for_read", "wait_for_write"] - - -class NoWayToWaitForSocketError(Exception): - pass - - -# How should we wait on sockets? -# -# There are two types of APIs you can use for waiting on sockets: the fancy -# modern stateful APIs like epoll/kqueue, and the older stateless APIs like -# select/poll. The stateful APIs are more efficient when you have a lots of -# sockets to keep track of, because you can set them up once and then use them -# lots of times. But we only ever want to wait on a single socket at a time -# and don't want to keep track of state, so the stateless APIs are actually -# more efficient. So we want to use select() or poll(). -# -# Now, how do we choose between select() and poll()? On traditional Unixes, -# select() has a strange calling convention that makes it slow, or fail -# altogether, for high-numbered file descriptors. The point of poll() is to fix -# that, so on Unixes, we prefer poll(). -# -# On Windows, there is no poll() (or at least Python doesn't provide a wrapper -# for it), but that's OK, because on Windows, select() doesn't have this -# strange calling convention; plain select() works fine. -# -# So: on Windows we use select(), and everywhere else we use poll(). We also -# fall back to select() in case poll() is somehow broken or missing. - -if sys.version_info >= (3, 5): - # Modern Python, that retries syscalls by default - def _retry_on_intr(fn, timeout): - return fn(timeout) -else: - # Old and broken Pythons. - def _retry_on_intr(fn, timeout): - if timeout is None: - deadline = float("inf") - else: - deadline = monotonic() + timeout - - while True: - try: - return fn(timeout) - # OSError for 3 <= pyver < 3.5, select.error for pyver <= 2.7 - except (OSError, select.error) as e: - # 'e.args[0]' incantation works for both OSError and select.error - if e.args[0] != errno.EINTR: - raise - else: - timeout = deadline - monotonic() - if timeout < 0: - timeout = 0 - if timeout == float("inf"): - timeout = None - continue - - -def select_wait_for_socket(sock, read=False, write=False, timeout=None): - if not read and not write: - raise RuntimeError("must specify at least one of read=True, write=True") - rcheck = [] - wcheck = [] - if read: - rcheck.append(sock) - if write: - wcheck.append(sock) - # When doing a non-blocking connect, most systems signal success by - # marking the socket writable. Windows, though, signals success by marked - # it as "exceptional". We paper over the difference by checking the write - # sockets for both conditions. (The stdlib selectors module does the same - # thing.) - fn = partial(select.select, rcheck, wcheck, wcheck) - rready, wready, xready = _retry_on_intr(fn, timeout) - return bool(rready or wready or xready) - - -def poll_wait_for_socket(sock, read=False, write=False, timeout=None): - if not read and not write: - raise RuntimeError("must specify at least one of read=True, write=True") - mask = 0 - if read: - mask |= select.POLLIN - if write: - mask |= select.POLLOUT - poll_obj = select.poll() - poll_obj.register(sock, mask) - - # For some reason, poll() takes timeout in milliseconds - def do_poll(t): - if t is not None: - t *= 1000 - return poll_obj.poll(t) - - return bool(_retry_on_intr(do_poll, timeout)) - - -def null_wait_for_socket(*args, **kwargs): - raise NoWayToWaitForSocketError("no select-equivalent available") - - -def _have_working_poll(): - # Apparently some systems have a select.poll that fails as soon as you try - # to use it, either due to strange configuration or broken monkeypatching - # from libraries like eventlet/greenlet. - try: - poll_obj = select.poll() - _retry_on_intr(poll_obj.poll, 0) - except (AttributeError, OSError): - return False - else: - return True - - -def wait_for_socket(*args, **kwargs): - # We delay choosing which implementation to use until the first time we're - # called. We could do it at import time, but then we might make the wrong - # decision if someone goes wild with monkeypatching select.poll after - # we're imported. - global wait_for_socket - if _have_working_poll(): - wait_for_socket = poll_wait_for_socket - elif hasattr(select, "select"): - wait_for_socket = select_wait_for_socket - else: # Platform-specific: Appengine. - wait_for_socket = null_wait_for_socket - return wait_for_socket(*args, **kwargs) - - -def wait_for_read(sock, timeout=None): - """ Waits for reading to be available on a given socket. - Returns True if the socket is readable, or False if the timeout expired. - """ - return wait_for_socket(sock, read=True, timeout=timeout) - - -def wait_for_write(sock, timeout=None): - """ Waits for writing to be available on a given socket. - Returns True if the socket is readable, or False if the timeout expired. - """ - return wait_for_socket(sock, write=True, timeout=timeout) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/wait.pyc b/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/wait.pyc deleted file mode 100644 index 181a44f8..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/wait.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyc b/env/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyc deleted file mode 100644 index 617f4d83..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/webencodings/labels.pyc b/env/lib/python2.7/site-packages/pip/_vendor/webencodings/labels.pyc deleted file mode 100644 index c014ee60..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/webencodings/labels.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/webencodings/mklabels.pyc b/env/lib/python2.7/site-packages/pip/_vendor/webencodings/mklabels.pyc deleted file mode 100644 index 113ad365..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/webencodings/mklabels.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/webencodings/tests.pyc b/env/lib/python2.7/site-packages/pip/_vendor/webencodings/tests.pyc deleted file mode 100644 index 73454a72..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/webencodings/tests.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/webencodings/x_user_defined.pyc b/env/lib/python2.7/site-packages/pip/_vendor/webencodings/x_user_defined.pyc deleted file mode 100644 index cd37552a..00000000 Binary files a/env/lib/python2.7/site-packages/pip/_vendor/webencodings/x_user_defined.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pkg_resources/__init__.py b/env/lib/python2.7/site-packages/pkg_resources/__init__.py deleted file mode 100644 index 1f170cfd..00000000 --- a/env/lib/python2.7/site-packages/pkg_resources/__init__.py +++ /dev/null @@ -1,3295 +0,0 @@ -# coding: utf-8 -""" -Package resource API --------------------- - -A resource is a logical file contained within a package, or a logical -subdirectory thereof. The package resource API expects resource names -to have their path parts separated with ``/``, *not* whatever the local -path separator is. Do not use os.path operations to manipulate resource -names being passed into the API. - -The package resource API is designed to work with normal filesystem packages, -.egg files, and unpacked .egg files. It can also work in a limited way with -.zip files and with custom PEP 302 loaders that support the ``get_data()`` -method. -""" - -from __future__ import absolute_import - -import sys -import os -import io -import time -import re -import types -import zipfile -import zipimport -import warnings -import stat -import functools -import pkgutil -import operator -import platform -import collections -import plistlib -import email.parser -import errno -import tempfile -import textwrap -import itertools -import inspect -import ntpath -import posixpath -from pkgutil import get_importer - -try: - import _imp -except ImportError: - # Python 3.2 compatibility - import imp as _imp - -try: - FileExistsError -except NameError: - FileExistsError = OSError - -from pkg_resources.extern import six -from pkg_resources.extern.six.moves import urllib, map, filter - -# capture these to bypass sandboxing -from os import utime -try: - from os import mkdir, rename, unlink - WRITE_SUPPORT = True -except ImportError: - # no write support, probably under GAE - WRITE_SUPPORT = False - -from os import open as os_open -from os.path import isdir, split - -try: - import importlib.machinery as importlib_machinery - # access attribute to force import under delayed import mechanisms. - importlib_machinery.__name__ -except ImportError: - importlib_machinery = None - -from . import py31compat -from pkg_resources.extern import appdirs -from pkg_resources.extern import packaging -__import__('pkg_resources.extern.packaging.version') -__import__('pkg_resources.extern.packaging.specifiers') -__import__('pkg_resources.extern.packaging.requirements') -__import__('pkg_resources.extern.packaging.markers') - - -__metaclass__ = type - - -if (3, 0) < sys.version_info < (3, 4): - raise RuntimeError("Python 3.4 or later is required") - -if six.PY2: - # Those builtin exceptions are only defined in Python 3 - PermissionError = None - NotADirectoryError = None - -# declare some globals that will be defined later to -# satisfy the linters. -require = None -working_set = None -add_activation_listener = None -resources_stream = None -cleanup_resources = None -resource_dir = None -resource_stream = None -set_extraction_path = None -resource_isdir = None -resource_string = None -iter_entry_points = None -resource_listdir = None -resource_filename = None -resource_exists = None -_distribution_finders = None -_namespace_handlers = None -_namespace_packages = None - - -class PEP440Warning(RuntimeWarning): - """ - Used when there is an issue with a version or specifier not complying with - PEP 440. - """ - - -def parse_version(v): - try: - return packaging.version.Version(v) - except packaging.version.InvalidVersion: - return packaging.version.LegacyVersion(v) - - -_state_vars = {} - - -def _declare_state(vartype, **kw): - globals().update(kw) - _state_vars.update(dict.fromkeys(kw, vartype)) - - -def __getstate__(): - state = {} - g = globals() - for k, v in _state_vars.items(): - state[k] = g['_sget_' + v](g[k]) - return state - - -def __setstate__(state): - g = globals() - for k, v in state.items(): - g['_sset_' + _state_vars[k]](k, g[k], v) - return state - - -def _sget_dict(val): - return val.copy() - - -def _sset_dict(key, ob, state): - ob.clear() - ob.update(state) - - -def _sget_object(val): - return val.__getstate__() - - -def _sset_object(key, ob, state): - ob.__setstate__(state) - - -_sget_none = _sset_none = lambda *args: None - - -def get_supported_platform(): - """Return this platform's maximum compatible version. - - distutils.util.get_platform() normally reports the minimum version - of Mac OS X that would be required to *use* extensions produced by - distutils. But what we want when checking compatibility is to know the - version of Mac OS X that we are *running*. To allow usage of packages that - explicitly require a newer version of Mac OS X, we must also know the - current version of the OS. - - If this condition occurs for any other platform with a version in its - platform strings, this function should be extended accordingly. - """ - plat = get_build_platform() - m = macosVersionString.match(plat) - if m is not None and sys.platform == "darwin": - try: - plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3)) - except ValueError: - # not Mac OS X - pass - return plat - - -__all__ = [ - # Basic resource access and distribution/entry point discovery - 'require', 'run_script', 'get_provider', 'get_distribution', - 'load_entry_point', 'get_entry_map', 'get_entry_info', - 'iter_entry_points', - 'resource_string', 'resource_stream', 'resource_filename', - 'resource_listdir', 'resource_exists', 'resource_isdir', - - # Environmental control - 'declare_namespace', 'working_set', 'add_activation_listener', - 'find_distributions', 'set_extraction_path', 'cleanup_resources', - 'get_default_cache', - - # Primary implementation classes - 'Environment', 'WorkingSet', 'ResourceManager', - 'Distribution', 'Requirement', 'EntryPoint', - - # Exceptions - 'ResolutionError', 'VersionConflict', 'DistributionNotFound', - 'UnknownExtra', 'ExtractionError', - - # Warnings - 'PEP440Warning', - - # Parsing functions and string utilities - 'parse_requirements', 'parse_version', 'safe_name', 'safe_version', - 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections', - 'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker', - - # filesystem utilities - 'ensure_directory', 'normalize_path', - - # Distribution "precedence" constants - 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST', - - # "Provider" interfaces, implementations, and registration/lookup APIs - 'IMetadataProvider', 'IResourceProvider', 'FileMetadata', - 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider', - 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider', - 'register_finder', 'register_namespace_handler', 'register_loader_type', - 'fixup_namespace_packages', 'get_importer', - - # Warnings - 'PkgResourcesDeprecationWarning', - - # Deprecated/backward compatibility only - 'run_main', 'AvailableDistributions', -] - - -class ResolutionError(Exception): - """Abstract base for dependency resolution errors""" - - def __repr__(self): - return self.__class__.__name__ + repr(self.args) - - -class VersionConflict(ResolutionError): - """ - An already-installed version conflicts with the requested version. - - Should be initialized with the installed Distribution and the requested - Requirement. - """ - - _template = "{self.dist} is installed but {self.req} is required" - - @property - def dist(self): - return self.args[0] - - @property - def req(self): - return self.args[1] - - def report(self): - return self._template.format(**locals()) - - def with_context(self, required_by): - """ - If required_by is non-empty, return a version of self that is a - ContextualVersionConflict. - """ - if not required_by: - return self - args = self.args + (required_by,) - return ContextualVersionConflict(*args) - - -class ContextualVersionConflict(VersionConflict): - """ - A VersionConflict that accepts a third parameter, the set of the - requirements that required the installed Distribution. - """ - - _template = VersionConflict._template + ' by {self.required_by}' - - @property - def required_by(self): - return self.args[2] - - -class DistributionNotFound(ResolutionError): - """A requested distribution was not found""" - - _template = ("The '{self.req}' distribution was not found " - "and is required by {self.requirers_str}") - - @property - def req(self): - return self.args[0] - - @property - def requirers(self): - return self.args[1] - - @property - def requirers_str(self): - if not self.requirers: - return 'the application' - return ', '.join(self.requirers) - - def report(self): - return self._template.format(**locals()) - - def __str__(self): - return self.report() - - -class UnknownExtra(ResolutionError): - """Distribution doesn't have an "extra feature" of the given name""" - - -_provider_factories = {} - -PY_MAJOR = sys.version[:3] -EGG_DIST = 3 -BINARY_DIST = 2 -SOURCE_DIST = 1 -CHECKOUT_DIST = 0 -DEVELOP_DIST = -1 - - -def register_loader_type(loader_type, provider_factory): - """Register `provider_factory` to make providers for `loader_type` - - `loader_type` is the type or class of a PEP 302 ``module.__loader__``, - and `provider_factory` is a function that, passed a *module* object, - returns an ``IResourceProvider`` for that module. - """ - _provider_factories[loader_type] = provider_factory - - -def get_provider(moduleOrReq): - """Return an IResourceProvider for the named module or requirement""" - if isinstance(moduleOrReq, Requirement): - return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0] - try: - module = sys.modules[moduleOrReq] - except KeyError: - __import__(moduleOrReq) - module = sys.modules[moduleOrReq] - loader = getattr(module, '__loader__', None) - return _find_adapter(_provider_factories, loader)(module) - - -def _macosx_vers(_cache=[]): - if not _cache: - version = platform.mac_ver()[0] - # fallback for MacPorts - if version == '': - plist = '/System/Library/CoreServices/SystemVersion.plist' - if os.path.exists(plist): - if hasattr(plistlib, 'readPlist'): - plist_content = plistlib.readPlist(plist) - if 'ProductVersion' in plist_content: - version = plist_content['ProductVersion'] - - _cache.append(version.split('.')) - return _cache[0] - - -def _macosx_arch(machine): - return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine) - - -def get_build_platform(): - """Return this platform's string for platform-specific distributions - - XXX Currently this is the same as ``distutils.util.get_platform()``, but it - needs some hacks for Linux and Mac OS X. - """ - from sysconfig import get_platform - - plat = get_platform() - if sys.platform == "darwin" and not plat.startswith('macosx-'): - try: - version = _macosx_vers() - machine = os.uname()[4].replace(" ", "_") - return "macosx-%d.%d-%s" % ( - int(version[0]), int(version[1]), - _macosx_arch(machine), - ) - except ValueError: - # if someone is running a non-Mac darwin system, this will fall - # through to the default implementation - pass - return plat - - -macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)") -darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)") -# XXX backward compat -get_platform = get_build_platform - - -def compatible_platforms(provided, required): - """Can code for the `provided` platform run on the `required` platform? - - Returns true if either platform is ``None``, or the platforms are equal. - - XXX Needs compatibility checks for Linux and other unixy OSes. - """ - if provided is None or required is None or provided == required: - # easy case - return True - - # Mac OS X special cases - reqMac = macosVersionString.match(required) - if reqMac: - provMac = macosVersionString.match(provided) - - # is this a Mac package? - if not provMac: - # this is backwards compatibility for packages built before - # setuptools 0.6. All packages built after this point will - # use the new macosx designation. - provDarwin = darwinVersionString.match(provided) - if provDarwin: - dversion = int(provDarwin.group(1)) - macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2)) - if dversion == 7 and macosversion >= "10.3" or \ - dversion == 8 and macosversion >= "10.4": - return True - # egg isn't macosx or legacy darwin - return False - - # are they the same major version and machine type? - if provMac.group(1) != reqMac.group(1) or \ - provMac.group(3) != reqMac.group(3): - return False - - # is the required OS major update >= the provided one? - if int(provMac.group(2)) > int(reqMac.group(2)): - return False - - return True - - # XXX Linux and other platforms' special cases should go here - return False - - -def run_script(dist_spec, script_name): - """Locate distribution `dist_spec` and run its `script_name` script""" - ns = sys._getframe(1).f_globals - name = ns['__name__'] - ns.clear() - ns['__name__'] = name - require(dist_spec)[0].run_script(script_name, ns) - - -# backward compatibility -run_main = run_script - - -def get_distribution(dist): - """Return a current distribution object for a Requirement or string""" - if isinstance(dist, six.string_types): - dist = Requirement.parse(dist) - if isinstance(dist, Requirement): - dist = get_provider(dist) - if not isinstance(dist, Distribution): - raise TypeError("Expected string, Requirement, or Distribution", dist) - return dist - - -def load_entry_point(dist, group, name): - """Return `name` entry point of `group` for `dist` or raise ImportError""" - return get_distribution(dist).load_entry_point(group, name) - - -def get_entry_map(dist, group=None): - """Return the entry point map for `group`, or the full entry map""" - return get_distribution(dist).get_entry_map(group) - - -def get_entry_info(dist, group, name): - """Return the EntryPoint object for `group`+`name`, or ``None``""" - return get_distribution(dist).get_entry_info(group, name) - - -class IMetadataProvider: - def has_metadata(name): - """Does the package's distribution contain the named metadata?""" - - def get_metadata(name): - """The named metadata resource as a string""" - - def get_metadata_lines(name): - """Yield named metadata resource as list of non-blank non-comment lines - - Leading and trailing whitespace is stripped from each line, and lines - with ``#`` as the first non-blank character are omitted.""" - - def metadata_isdir(name): - """Is the named metadata a directory? (like ``os.path.isdir()``)""" - - def metadata_listdir(name): - """List of metadata names in the directory (like ``os.listdir()``)""" - - def run_script(script_name, namespace): - """Execute the named script in the supplied namespace dictionary""" - - -class IResourceProvider(IMetadataProvider): - """An object that provides access to package resources""" - - def get_resource_filename(manager, resource_name): - """Return a true filesystem path for `resource_name` - - `manager` must be an ``IResourceManager``""" - - def get_resource_stream(manager, resource_name): - """Return a readable file-like object for `resource_name` - - `manager` must be an ``IResourceManager``""" - - def get_resource_string(manager, resource_name): - """Return a string containing the contents of `resource_name` - - `manager` must be an ``IResourceManager``""" - - def has_resource(resource_name): - """Does the package contain the named resource?""" - - def resource_isdir(resource_name): - """Is the named resource a directory? (like ``os.path.isdir()``)""" - - def resource_listdir(resource_name): - """List of resource names in the directory (like ``os.listdir()``)""" - - -class WorkingSet: - """A collection of active distributions on sys.path (or a similar list)""" - - def __init__(self, entries=None): - """Create working set from list of path entries (default=sys.path)""" - self.entries = [] - self.entry_keys = {} - self.by_key = {} - self.callbacks = [] - - if entries is None: - entries = sys.path - - for entry in entries: - self.add_entry(entry) - - @classmethod - def _build_master(cls): - """ - Prepare the master working set. - """ - ws = cls() - try: - from __main__ import __requires__ - except ImportError: - # The main program does not list any requirements - return ws - - # ensure the requirements are met - try: - ws.require(__requires__) - except VersionConflict: - return cls._build_from_requirements(__requires__) - - return ws - - @classmethod - def _build_from_requirements(cls, req_spec): - """ - Build a working set from a requirement spec. Rewrites sys.path. - """ - # try it without defaults already on sys.path - # by starting with an empty path - ws = cls([]) - reqs = parse_requirements(req_spec) - dists = ws.resolve(reqs, Environment()) - for dist in dists: - ws.add(dist) - - # add any missing entries from sys.path - for entry in sys.path: - if entry not in ws.entries: - ws.add_entry(entry) - - # then copy back to sys.path - sys.path[:] = ws.entries - return ws - - def add_entry(self, entry): - """Add a path item to ``.entries``, finding any distributions on it - - ``find_distributions(entry, True)`` is used to find distributions - corresponding to the path entry, and they are added. `entry` is - always appended to ``.entries``, even if it is already present. - (This is because ``sys.path`` can contain the same value more than - once, and the ``.entries`` of the ``sys.path`` WorkingSet should always - equal ``sys.path``.) - """ - self.entry_keys.setdefault(entry, []) - self.entries.append(entry) - for dist in find_distributions(entry, True): - self.add(dist, entry, False) - - def __contains__(self, dist): - """True if `dist` is the active distribution for its project""" - return self.by_key.get(dist.key) == dist - - def find(self, req): - """Find a distribution matching requirement `req` - - If there is an active distribution for the requested project, this - returns it as long as it meets the version requirement specified by - `req`. But, if there is an active distribution for the project and it - does *not* meet the `req` requirement, ``VersionConflict`` is raised. - If there is no active distribution for the requested project, ``None`` - is returned. - """ - dist = self.by_key.get(req.key) - if dist is not None and dist not in req: - # XXX add more info - raise VersionConflict(dist, req) - return dist - - def iter_entry_points(self, group, name=None): - """Yield entry point objects from `group` matching `name` - - If `name` is None, yields all entry points in `group` from all - distributions in the working set, otherwise only ones matching - both `group` and `name` are yielded (in distribution order). - """ - return ( - entry - for dist in self - for entry in dist.get_entry_map(group).values() - if name is None or name == entry.name - ) - - def run_script(self, requires, script_name): - """Locate distribution for `requires` and run `script_name` script""" - ns = sys._getframe(1).f_globals - name = ns['__name__'] - ns.clear() - ns['__name__'] = name - self.require(requires)[0].run_script(script_name, ns) - - def __iter__(self): - """Yield distributions for non-duplicate projects in the working set - - The yield order is the order in which the items' path entries were - added to the working set. - """ - seen = {} - for item in self.entries: - if item not in self.entry_keys: - # workaround a cache issue - continue - - for key in self.entry_keys[item]: - if key not in seen: - seen[key] = 1 - yield self.by_key[key] - - def add(self, dist, entry=None, insert=True, replace=False): - """Add `dist` to working set, associated with `entry` - - If `entry` is unspecified, it defaults to the ``.location`` of `dist`. - On exit from this routine, `entry` is added to the end of the working - set's ``.entries`` (if it wasn't already present). - - `dist` is only added to the working set if it's for a project that - doesn't already have a distribution in the set, unless `replace=True`. - If it's added, any callbacks registered with the ``subscribe()`` method - will be called. - """ - if insert: - dist.insert_on(self.entries, entry, replace=replace) - - if entry is None: - entry = dist.location - keys = self.entry_keys.setdefault(entry, []) - keys2 = self.entry_keys.setdefault(dist.location, []) - if not replace and dist.key in self.by_key: - # ignore hidden distros - return - - self.by_key[dist.key] = dist - if dist.key not in keys: - keys.append(dist.key) - if dist.key not in keys2: - keys2.append(dist.key) - self._added_new(dist) - - def resolve(self, requirements, env=None, installer=None, - replace_conflicting=False, extras=None): - """List all distributions needed to (recursively) meet `requirements` - - `requirements` must be a sequence of ``Requirement`` objects. `env`, - if supplied, should be an ``Environment`` instance. If - not supplied, it defaults to all distributions available within any - entry or distribution in the working set. `installer`, if supplied, - will be invoked with each requirement that cannot be met by an - already-installed distribution; it should return a ``Distribution`` or - ``None``. - - Unless `replace_conflicting=True`, raises a VersionConflict exception - if - any requirements are found on the path that have the correct name but - the wrong version. Otherwise, if an `installer` is supplied it will be - invoked to obtain the correct version of the requirement and activate - it. - - `extras` is a list of the extras to be used with these requirements. - This is important because extra requirements may look like `my_req; - extra = "my_extra"`, which would otherwise be interpreted as a purely - optional requirement. Instead, we want to be able to assert that these - requirements are truly required. - """ - - # set up the stack - requirements = list(requirements)[::-1] - # set of processed requirements - processed = {} - # key -> dist - best = {} - to_activate = [] - - req_extras = _ReqExtras() - - # Mapping of requirement to set of distributions that required it; - # useful for reporting info about conflicts. - required_by = collections.defaultdict(set) - - while requirements: - # process dependencies breadth-first - req = requirements.pop(0) - if req in processed: - # Ignore cyclic or redundant dependencies - continue - - if not req_extras.markers_pass(req, extras): - continue - - dist = best.get(req.key) - if dist is None: - # Find the best distribution and add it to the map - dist = self.by_key.get(req.key) - if dist is None or (dist not in req and replace_conflicting): - ws = self - if env is None: - if dist is None: - env = Environment(self.entries) - else: - # Use an empty environment and workingset to avoid - # any further conflicts with the conflicting - # distribution - env = Environment([]) - ws = WorkingSet([]) - dist = best[req.key] = env.best_match( - req, ws, installer, - replace_conflicting=replace_conflicting - ) - if dist is None: - requirers = required_by.get(req, None) - raise DistributionNotFound(req, requirers) - to_activate.append(dist) - if dist not in req: - # Oops, the "best" so far conflicts with a dependency - dependent_req = required_by[req] - raise VersionConflict(dist, req).with_context(dependent_req) - - # push the new requirements onto the stack - new_requirements = dist.requires(req.extras)[::-1] - requirements.extend(new_requirements) - - # Register the new requirements needed by req - for new_requirement in new_requirements: - required_by[new_requirement].add(req.project_name) - req_extras[new_requirement] = req.extras - - processed[req] = True - - # return list of distros to activate - return to_activate - - def find_plugins( - self, plugin_env, full_env=None, installer=None, fallback=True): - """Find all activatable distributions in `plugin_env` - - Example usage:: - - distributions, errors = working_set.find_plugins( - Environment(plugin_dirlist) - ) - # add plugins+libs to sys.path - map(working_set.add, distributions) - # display errors - print('Could not load', errors) - - The `plugin_env` should be an ``Environment`` instance that contains - only distributions that are in the project's "plugin directory" or - directories. The `full_env`, if supplied, should be an ``Environment`` - contains all currently-available distributions. If `full_env` is not - supplied, one is created automatically from the ``WorkingSet`` this - method is called on, which will typically mean that every directory on - ``sys.path`` will be scanned for distributions. - - `installer` is a standard installer callback as used by the - ``resolve()`` method. The `fallback` flag indicates whether we should - attempt to resolve older versions of a plugin if the newest version - cannot be resolved. - - This method returns a 2-tuple: (`distributions`, `error_info`), where - `distributions` is a list of the distributions found in `plugin_env` - that were loadable, along with any other distributions that are needed - to resolve their dependencies. `error_info` is a dictionary mapping - unloadable plugin distributions to an exception instance describing the - error that occurred. Usually this will be a ``DistributionNotFound`` or - ``VersionConflict`` instance. - """ - - plugin_projects = list(plugin_env) - # scan project names in alphabetic order - plugin_projects.sort() - - error_info = {} - distributions = {} - - if full_env is None: - env = Environment(self.entries) - env += plugin_env - else: - env = full_env + plugin_env - - shadow_set = self.__class__([]) - # put all our entries in shadow_set - list(map(shadow_set.add, self)) - - for project_name in plugin_projects: - - for dist in plugin_env[project_name]: - - req = [dist.as_requirement()] - - try: - resolvees = shadow_set.resolve(req, env, installer) - - except ResolutionError as v: - # save error info - error_info[dist] = v - if fallback: - # try the next older version of project - continue - else: - # give up on this project, keep going - break - - else: - list(map(shadow_set.add, resolvees)) - distributions.update(dict.fromkeys(resolvees)) - - # success, no need to try any more versions of this project - break - - distributions = list(distributions) - distributions.sort() - - return distributions, error_info - - def require(self, *requirements): - """Ensure that distributions matching `requirements` are activated - - `requirements` must be a string or a (possibly-nested) sequence - thereof, specifying the distributions and versions required. The - return value is a sequence of the distributions that needed to be - activated to fulfill the requirements; all relevant distributions are - included, even if they were already activated in this working set. - """ - needed = self.resolve(parse_requirements(requirements)) - - for dist in needed: - self.add(dist) - - return needed - - def subscribe(self, callback, existing=True): - """Invoke `callback` for all distributions - - If `existing=True` (default), - call on all existing ones, as well. - """ - if callback in self.callbacks: - return - self.callbacks.append(callback) - if not existing: - return - for dist in self: - callback(dist) - - def _added_new(self, dist): - for callback in self.callbacks: - callback(dist) - - def __getstate__(self): - return ( - self.entries[:], self.entry_keys.copy(), self.by_key.copy(), - self.callbacks[:] - ) - - def __setstate__(self, e_k_b_c): - entries, keys, by_key, callbacks = e_k_b_c - self.entries = entries[:] - self.entry_keys = keys.copy() - self.by_key = by_key.copy() - self.callbacks = callbacks[:] - - -class _ReqExtras(dict): - """ - Map each requirement to the extras that demanded it. - """ - - def markers_pass(self, req, extras=None): - """ - Evaluate markers for req against each extra that - demanded it. - - Return False if the req has a marker and fails - evaluation. Otherwise, return True. - """ - extra_evals = ( - req.marker.evaluate({'extra': extra}) - for extra in self.get(req, ()) + (extras or (None,)) - ) - return not req.marker or any(extra_evals) - - -class Environment: - """Searchable snapshot of distributions on a search path""" - - def __init__( - self, search_path=None, platform=get_supported_platform(), - python=PY_MAJOR): - """Snapshot distributions available on a search path - - Any distributions found on `search_path` are added to the environment. - `search_path` should be a sequence of ``sys.path`` items. If not - supplied, ``sys.path`` is used. - - `platform` is an optional string specifying the name of the platform - that platform-specific distributions must be compatible with. If - unspecified, it defaults to the current platform. `python` is an - optional string naming the desired version of Python (e.g. ``'3.6'``); - it defaults to the current version. - - You may explicitly set `platform` (and/or `python`) to ``None`` if you - wish to map *all* distributions, not just those compatible with the - running platform or Python version. - """ - self._distmap = {} - self.platform = platform - self.python = python - self.scan(search_path) - - def can_add(self, dist): - """Is distribution `dist` acceptable for this environment? - - The distribution must match the platform and python version - requirements specified when this environment was created, or False - is returned. - """ - py_compat = ( - self.python is None - or dist.py_version is None - or dist.py_version == self.python - ) - return py_compat and compatible_platforms(dist.platform, self.platform) - - def remove(self, dist): - """Remove `dist` from the environment""" - self._distmap[dist.key].remove(dist) - - def scan(self, search_path=None): - """Scan `search_path` for distributions usable in this environment - - Any distributions found are added to the environment. - `search_path` should be a sequence of ``sys.path`` items. If not - supplied, ``sys.path`` is used. Only distributions conforming to - the platform/python version defined at initialization are added. - """ - if search_path is None: - search_path = sys.path - - for item in search_path: - for dist in find_distributions(item): - self.add(dist) - - def __getitem__(self, project_name): - """Return a newest-to-oldest list of distributions for `project_name` - - Uses case-insensitive `project_name` comparison, assuming all the - project's distributions use their project's name converted to all - lowercase as their key. - - """ - distribution_key = project_name.lower() - return self._distmap.get(distribution_key, []) - - def add(self, dist): - """Add `dist` if we ``can_add()`` it and it has not already been added - """ - if self.can_add(dist) and dist.has_version(): - dists = self._distmap.setdefault(dist.key, []) - if dist not in dists: - dists.append(dist) - dists.sort(key=operator.attrgetter('hashcmp'), reverse=True) - - def best_match( - self, req, working_set, installer=None, replace_conflicting=False): - """Find distribution best matching `req` and usable on `working_set` - - This calls the ``find(req)`` method of the `working_set` to see if a - suitable distribution is already active. (This may raise - ``VersionConflict`` if an unsuitable version of the project is already - active in the specified `working_set`.) If a suitable distribution - isn't active, this method returns the newest distribution in the - environment that meets the ``Requirement`` in `req`. If no suitable - distribution is found, and `installer` is supplied, then the result of - calling the environment's ``obtain(req, installer)`` method will be - returned. - """ - try: - dist = working_set.find(req) - except VersionConflict: - if not replace_conflicting: - raise - dist = None - if dist is not None: - return dist - for dist in self[req.key]: - if dist in req: - return dist - # try to download/install - return self.obtain(req, installer) - - def obtain(self, requirement, installer=None): - """Obtain a distribution matching `requirement` (e.g. via download) - - Obtain a distro that matches requirement (e.g. via download). In the - base ``Environment`` class, this routine just returns - ``installer(requirement)``, unless `installer` is None, in which case - None is returned instead. This method is a hook that allows subclasses - to attempt other ways of obtaining a distribution before falling back - to the `installer` argument.""" - if installer is not None: - return installer(requirement) - - def __iter__(self): - """Yield the unique project names of the available distributions""" - for key in self._distmap.keys(): - if self[key]: - yield key - - def __iadd__(self, other): - """In-place addition of a distribution or environment""" - if isinstance(other, Distribution): - self.add(other) - elif isinstance(other, Environment): - for project in other: - for dist in other[project]: - self.add(dist) - else: - raise TypeError("Can't add %r to environment" % (other,)) - return self - - def __add__(self, other): - """Add an environment or distribution to an environment""" - new = self.__class__([], platform=None, python=None) - for env in self, other: - new += env - return new - - -# XXX backward compatibility -AvailableDistributions = Environment - - -class ExtractionError(RuntimeError): - """An error occurred extracting a resource - - The following attributes are available from instances of this exception: - - manager - The resource manager that raised this exception - - cache_path - The base directory for resource extraction - - original_error - The exception instance that caused extraction to fail - """ - - -class ResourceManager: - """Manage resource extraction and packages""" - extraction_path = None - - def __init__(self): - self.cached_files = {} - - def resource_exists(self, package_or_requirement, resource_name): - """Does the named resource exist?""" - return get_provider(package_or_requirement).has_resource(resource_name) - - def resource_isdir(self, package_or_requirement, resource_name): - """Is the named resource an existing directory?""" - return get_provider(package_or_requirement).resource_isdir( - resource_name - ) - - def resource_filename(self, package_or_requirement, resource_name): - """Return a true filesystem path for specified resource""" - return get_provider(package_or_requirement).get_resource_filename( - self, resource_name - ) - - def resource_stream(self, package_or_requirement, resource_name): - """Return a readable file-like object for specified resource""" - return get_provider(package_or_requirement).get_resource_stream( - self, resource_name - ) - - def resource_string(self, package_or_requirement, resource_name): - """Return specified resource as a string""" - return get_provider(package_or_requirement).get_resource_string( - self, resource_name - ) - - def resource_listdir(self, package_or_requirement, resource_name): - """List the contents of the named resource directory""" - return get_provider(package_or_requirement).resource_listdir( - resource_name - ) - - def extraction_error(self): - """Give an error message for problems extracting file(s)""" - - old_exc = sys.exc_info()[1] - cache_path = self.extraction_path or get_default_cache() - - tmpl = textwrap.dedent(""" - Can't extract file(s) to egg cache - - The following error occurred while trying to extract file(s) - to the Python egg cache: - - {old_exc} - - The Python egg cache directory is currently set to: - - {cache_path} - - Perhaps your account does not have write access to this directory? - You can change the cache directory by setting the PYTHON_EGG_CACHE - environment variable to point to an accessible directory. - """).lstrip() - err = ExtractionError(tmpl.format(**locals())) - err.manager = self - err.cache_path = cache_path - err.original_error = old_exc - raise err - - def get_cache_path(self, archive_name, names=()): - """Return absolute location in cache for `archive_name` and `names` - - The parent directory of the resulting path will be created if it does - not already exist. `archive_name` should be the base filename of the - enclosing egg (which may not be the name of the enclosing zipfile!), - including its ".egg" extension. `names`, if provided, should be a - sequence of path name parts "under" the egg's extraction location. - - This method should only be called by resource providers that need to - obtain an extraction location, and only for names they intend to - extract, as it tracks the generated names for possible cleanup later. - """ - extract_path = self.extraction_path or get_default_cache() - target_path = os.path.join(extract_path, archive_name + '-tmp', *names) - try: - _bypass_ensure_directory(target_path) - except Exception: - self.extraction_error() - - self._warn_unsafe_extraction_path(extract_path) - - self.cached_files[target_path] = 1 - return target_path - - @staticmethod - def _warn_unsafe_extraction_path(path): - """ - If the default extraction path is overridden and set to an insecure - location, such as /tmp, it opens up an opportunity for an attacker to - replace an extracted file with an unauthorized payload. Warn the user - if a known insecure location is used. - - See Distribute #375 for more details. - """ - if os.name == 'nt' and not path.startswith(os.environ['windir']): - # On Windows, permissions are generally restrictive by default - # and temp directories are not writable by other users, so - # bypass the warning. - return - mode = os.stat(path).st_mode - if mode & stat.S_IWOTH or mode & stat.S_IWGRP: - msg = ( - "%s is writable by group/others and vulnerable to attack " - "when " - "used with get_resource_filename. Consider a more secure " - "location (set with .set_extraction_path or the " - "PYTHON_EGG_CACHE environment variable)." % path - ) - warnings.warn(msg, UserWarning) - - def postprocess(self, tempname, filename): - """Perform any platform-specific postprocessing of `tempname` - - This is where Mac header rewrites should be done; other platforms don't - have anything special they should do. - - Resource providers should call this method ONLY after successfully - extracting a compressed resource. They must NOT call it on resources - that are already in the filesystem. - - `tempname` is the current (temporary) name of the file, and `filename` - is the name it will be renamed to by the caller after this routine - returns. - """ - - if os.name == 'posix': - # Make the resource executable - mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777 - os.chmod(tempname, mode) - - def set_extraction_path(self, path): - """Set the base path where resources will be extracted to, if needed. - - If you do not call this routine before any extractions take place, the - path defaults to the return value of ``get_default_cache()``. (Which - is based on the ``PYTHON_EGG_CACHE`` environment variable, with various - platform-specific fallbacks. See that routine's documentation for more - details.) - - Resources are extracted to subdirectories of this path based upon - information given by the ``IResourceProvider``. You may set this to a - temporary directory, but then you must call ``cleanup_resources()`` to - delete the extracted files when done. There is no guarantee that - ``cleanup_resources()`` will be able to remove all extracted files. - - (Note: you may not change the extraction path for a given resource - manager once resources have been extracted, unless you first call - ``cleanup_resources()``.) - """ - if self.cached_files: - raise ValueError( - "Can't change extraction path, files already extracted" - ) - - self.extraction_path = path - - def cleanup_resources(self, force=False): - """ - Delete all extracted resource files and directories, returning a list - of the file and directory names that could not be successfully removed. - This function does not have any concurrency protection, so it should - generally only be called when the extraction path is a temporary - directory exclusive to a single process. This method is not - automatically called; you must call it explicitly or register it as an - ``atexit`` function if you wish to ensure cleanup of a temporary - directory used for extractions. - """ - # XXX - - -def get_default_cache(): - """ - Return the ``PYTHON_EGG_CACHE`` environment variable - or a platform-relevant user cache dir for an app - named "Python-Eggs". - """ - return ( - os.environ.get('PYTHON_EGG_CACHE') - or appdirs.user_cache_dir(appname='Python-Eggs') - ) - - -def safe_name(name): - """Convert an arbitrary string to a standard distribution name - - Any runs of non-alphanumeric/. characters are replaced with a single '-'. - """ - return re.sub('[^A-Za-z0-9.]+', '-', name) - - -def safe_version(version): - """ - Convert an arbitrary string to a standard version string - """ - try: - # normalize the version - return str(packaging.version.Version(version)) - except packaging.version.InvalidVersion: - version = version.replace(' ', '.') - return re.sub('[^A-Za-z0-9.]+', '-', version) - - -def safe_extra(extra): - """Convert an arbitrary string to a standard 'extra' name - - Any runs of non-alphanumeric characters are replaced with a single '_', - and the result is always lowercased. - """ - return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower() - - -def to_filename(name): - """Convert a project or version name to its filename-escaped form - - Any '-' characters are currently replaced with '_'. - """ - return name.replace('-', '_') - - -def invalid_marker(text): - """ - Validate text as a PEP 508 environment marker; return an exception - if invalid or False otherwise. - """ - try: - evaluate_marker(text) - except SyntaxError as e: - e.filename = None - e.lineno = None - return e - return False - - -def evaluate_marker(text, extra=None): - """ - Evaluate a PEP 508 environment marker. - Return a boolean indicating the marker result in this environment. - Raise SyntaxError if marker is invalid. - - This implementation uses the 'pyparsing' module. - """ - try: - marker = packaging.markers.Marker(text) - return marker.evaluate() - except packaging.markers.InvalidMarker as e: - raise SyntaxError(e) - - -class NullProvider: - """Try to implement resources and metadata for arbitrary PEP 302 loaders""" - - egg_name = None - egg_info = None - loader = None - - def __init__(self, module): - self.loader = getattr(module, '__loader__', None) - self.module_path = os.path.dirname(getattr(module, '__file__', '')) - - def get_resource_filename(self, manager, resource_name): - return self._fn(self.module_path, resource_name) - - def get_resource_stream(self, manager, resource_name): - return io.BytesIO(self.get_resource_string(manager, resource_name)) - - def get_resource_string(self, manager, resource_name): - return self._get(self._fn(self.module_path, resource_name)) - - def has_resource(self, resource_name): - return self._has(self._fn(self.module_path, resource_name)) - - def _get_metadata_path(self, name): - return self._fn(self.egg_info, name) - - def has_metadata(self, name): - if not self.egg_info: - return self.egg_info - - path = self._get_metadata_path(name) - return self._has(path) - - def get_metadata(self, name): - if not self.egg_info: - return "" - path = self._get_metadata_path(name) - value = self._get(path) - if six.PY2: - return value - try: - return value.decode('utf-8') - except UnicodeDecodeError as exc: - # Include the path in the error message to simplify - # troubleshooting, and without changing the exception type. - exc.reason += ' in {} file at path: {}'.format(name, path) - raise - - def get_metadata_lines(self, name): - return yield_lines(self.get_metadata(name)) - - def resource_isdir(self, resource_name): - return self._isdir(self._fn(self.module_path, resource_name)) - - def metadata_isdir(self, name): - return self.egg_info and self._isdir(self._fn(self.egg_info, name)) - - def resource_listdir(self, resource_name): - return self._listdir(self._fn(self.module_path, resource_name)) - - def metadata_listdir(self, name): - if self.egg_info: - return self._listdir(self._fn(self.egg_info, name)) - return [] - - def run_script(self, script_name, namespace): - script = 'scripts/' + script_name - if not self.has_metadata(script): - raise ResolutionError( - "Script {script!r} not found in metadata at {self.egg_info!r}" - .format(**locals()), - ) - script_text = self.get_metadata(script).replace('\r\n', '\n') - script_text = script_text.replace('\r', '\n') - script_filename = self._fn(self.egg_info, script) - namespace['__file__'] = script_filename - if os.path.exists(script_filename): - source = open(script_filename).read() - code = compile(source, script_filename, 'exec') - exec(code, namespace, namespace) - else: - from linecache import cache - cache[script_filename] = ( - len(script_text), 0, script_text.split('\n'), script_filename - ) - script_code = compile(script_text, script_filename, 'exec') - exec(script_code, namespace, namespace) - - def _has(self, path): - raise NotImplementedError( - "Can't perform this operation for unregistered loader type" - ) - - def _isdir(self, path): - raise NotImplementedError( - "Can't perform this operation for unregistered loader type" - ) - - def _listdir(self, path): - raise NotImplementedError( - "Can't perform this operation for unregistered loader type" - ) - - def _fn(self, base, resource_name): - self._validate_resource_path(resource_name) - if resource_name: - return os.path.join(base, *resource_name.split('/')) - return base - - @staticmethod - def _validate_resource_path(path): - """ - Validate the resource paths according to the docs. - https://setuptools.readthedocs.io/en/latest/pkg_resources.html#basic-resource-access - - >>> warned = getfixture('recwarn') - >>> warnings.simplefilter('always') - >>> vrp = NullProvider._validate_resource_path - >>> vrp('foo/bar.txt') - >>> bool(warned) - False - >>> vrp('../foo/bar.txt') - >>> bool(warned) - True - >>> warned.clear() - >>> vrp('/foo/bar.txt') - >>> bool(warned) - True - >>> vrp('foo/../../bar.txt') - >>> bool(warned) - True - >>> warned.clear() - >>> vrp('foo/f../bar.txt') - >>> bool(warned) - False - - Windows path separators are straight-up disallowed. - >>> vrp(r'\\foo/bar.txt') - Traceback (most recent call last): - ... - ValueError: Use of .. or absolute path in a resource path \ -is not allowed. - - >>> vrp(r'C:\\foo/bar.txt') - Traceback (most recent call last): - ... - ValueError: Use of .. or absolute path in a resource path \ -is not allowed. - - Blank values are allowed - - >>> vrp('') - >>> bool(warned) - False - - Non-string values are not. - - >>> vrp(None) - Traceback (most recent call last): - ... - AttributeError: ... - """ - invalid = ( - os.path.pardir in path.split(posixpath.sep) or - posixpath.isabs(path) or - ntpath.isabs(path) - ) - if not invalid: - return - - msg = "Use of .. or absolute path in a resource path is not allowed." - - # Aggressively disallow Windows absolute paths - if ntpath.isabs(path) and not posixpath.isabs(path): - raise ValueError(msg) - - # for compatibility, warn; in future - # raise ValueError(msg) - warnings.warn( - msg[:-1] + " and will raise exceptions in a future release.", - DeprecationWarning, - stacklevel=4, - ) - - def _get(self, path): - if hasattr(self.loader, 'get_data'): - return self.loader.get_data(path) - raise NotImplementedError( - "Can't perform this operation for loaders without 'get_data()'" - ) - - -register_loader_type(object, NullProvider) - - -class EggProvider(NullProvider): - """Provider based on a virtual filesystem""" - - def __init__(self, module): - NullProvider.__init__(self, module) - self._setup_prefix() - - def _setup_prefix(self): - # we assume here that our metadata may be nested inside a "basket" - # of multiple eggs; that's why we use module_path instead of .archive - path = self.module_path - old = None - while path != old: - if _is_egg_path(path): - self.egg_name = os.path.basename(path) - self.egg_info = os.path.join(path, 'EGG-INFO') - self.egg_root = path - break - old = path - path, base = os.path.split(path) - - -class DefaultProvider(EggProvider): - """Provides access to package resources in the filesystem""" - - def _has(self, path): - return os.path.exists(path) - - def _isdir(self, path): - return os.path.isdir(path) - - def _listdir(self, path): - return os.listdir(path) - - def get_resource_stream(self, manager, resource_name): - return open(self._fn(self.module_path, resource_name), 'rb') - - def _get(self, path): - with open(path, 'rb') as stream: - return stream.read() - - @classmethod - def _register(cls): - loader_names = 'SourceFileLoader', 'SourcelessFileLoader', - for name in loader_names: - loader_cls = getattr(importlib_machinery, name, type(None)) - register_loader_type(loader_cls, cls) - - -DefaultProvider._register() - - -class EmptyProvider(NullProvider): - """Provider that returns nothing for all requests""" - - module_path = None - - _isdir = _has = lambda self, path: False - - def _get(self, path): - return '' - - def _listdir(self, path): - return [] - - def __init__(self): - pass - - -empty_provider = EmptyProvider() - - -class ZipManifests(dict): - """ - zip manifest builder - """ - - @classmethod - def build(cls, path): - """ - Build a dictionary similar to the zipimport directory - caches, except instead of tuples, store ZipInfo objects. - - Use a platform-specific path separator (os.sep) for the path keys - for compatibility with pypy on Windows. - """ - with zipfile.ZipFile(path) as zfile: - items = ( - ( - name.replace('/', os.sep), - zfile.getinfo(name), - ) - for name in zfile.namelist() - ) - return dict(items) - - load = build - - -class MemoizedZipManifests(ZipManifests): - """ - Memoized zipfile manifests. - """ - manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime') - - def load(self, path): - """ - Load a manifest at path or return a suitable manifest already loaded. - """ - path = os.path.normpath(path) - mtime = os.stat(path).st_mtime - - if path not in self or self[path].mtime != mtime: - manifest = self.build(path) - self[path] = self.manifest_mod(manifest, mtime) - - return self[path].manifest - - -class ZipProvider(EggProvider): - """Resource support for zips and eggs""" - - eagers = None - _zip_manifests = MemoizedZipManifests() - - def __init__(self, module): - EggProvider.__init__(self, module) - self.zip_pre = self.loader.archive + os.sep - - def _zipinfo_name(self, fspath): - # Convert a virtual filename (full path to file) into a zipfile subpath - # usable with the zipimport directory cache for our target archive - fspath = fspath.rstrip(os.sep) - if fspath == self.loader.archive: - return '' - if fspath.startswith(self.zip_pre): - return fspath[len(self.zip_pre):] - raise AssertionError( - "%s is not a subpath of %s" % (fspath, self.zip_pre) - ) - - def _parts(self, zip_path): - # Convert a zipfile subpath into an egg-relative path part list. - # pseudo-fs path - fspath = self.zip_pre + zip_path - if fspath.startswith(self.egg_root + os.sep): - return fspath[len(self.egg_root) + 1:].split(os.sep) - raise AssertionError( - "%s is not a subpath of %s" % (fspath, self.egg_root) - ) - - @property - def zipinfo(self): - return self._zip_manifests.load(self.loader.archive) - - def get_resource_filename(self, manager, resource_name): - if not self.egg_name: - raise NotImplementedError( - "resource_filename() only supported for .egg, not .zip" - ) - # no need to lock for extraction, since we use temp names - zip_path = self._resource_to_zip(resource_name) - eagers = self._get_eager_resources() - if '/'.join(self._parts(zip_path)) in eagers: - for name in eagers: - self._extract_resource(manager, self._eager_to_zip(name)) - return self._extract_resource(manager, zip_path) - - @staticmethod - def _get_date_and_size(zip_stat): - size = zip_stat.file_size - # ymdhms+wday, yday, dst - date_time = zip_stat.date_time + (0, 0, -1) - # 1980 offset already done - timestamp = time.mktime(date_time) - return timestamp, size - - def _extract_resource(self, manager, zip_path): - - if zip_path in self._index(): - for name in self._index()[zip_path]: - last = self._extract_resource( - manager, os.path.join(zip_path, name) - ) - # return the extracted directory name - return os.path.dirname(last) - - timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) - - if not WRITE_SUPPORT: - raise IOError('"os.rename" and "os.unlink" are not supported ' - 'on this platform') - try: - - real_path = manager.get_cache_path( - self.egg_name, self._parts(zip_path) - ) - - if self._is_current(real_path, zip_path): - return real_path - - outf, tmpnam = _mkstemp( - ".$extract", - dir=os.path.dirname(real_path), - ) - os.write(outf, self.loader.get_data(zip_path)) - os.close(outf) - utime(tmpnam, (timestamp, timestamp)) - manager.postprocess(tmpnam, real_path) - - try: - rename(tmpnam, real_path) - - except os.error: - if os.path.isfile(real_path): - if self._is_current(real_path, zip_path): - # the file became current since it was checked above, - # so proceed. - return real_path - # Windows, del old file and retry - elif os.name == 'nt': - unlink(real_path) - rename(tmpnam, real_path) - return real_path - raise - - except os.error: - # report a user-friendly error - manager.extraction_error() - - return real_path - - def _is_current(self, file_path, zip_path): - """ - Return True if the file_path is current for this zip_path - """ - timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) - if not os.path.isfile(file_path): - return False - stat = os.stat(file_path) - if stat.st_size != size or stat.st_mtime != timestamp: - return False - # check that the contents match - zip_contents = self.loader.get_data(zip_path) - with open(file_path, 'rb') as f: - file_contents = f.read() - return zip_contents == file_contents - - def _get_eager_resources(self): - if self.eagers is None: - eagers = [] - for name in ('native_libs.txt', 'eager_resources.txt'): - if self.has_metadata(name): - eagers.extend(self.get_metadata_lines(name)) - self.eagers = eagers - return self.eagers - - def _index(self): - try: - return self._dirindex - except AttributeError: - ind = {} - for path in self.zipinfo: - parts = path.split(os.sep) - while parts: - parent = os.sep.join(parts[:-1]) - if parent in ind: - ind[parent].append(parts[-1]) - break - else: - ind[parent] = [parts.pop()] - self._dirindex = ind - return ind - - def _has(self, fspath): - zip_path = self._zipinfo_name(fspath) - return zip_path in self.zipinfo or zip_path in self._index() - - def _isdir(self, fspath): - return self._zipinfo_name(fspath) in self._index() - - def _listdir(self, fspath): - return list(self._index().get(self._zipinfo_name(fspath), ())) - - def _eager_to_zip(self, resource_name): - return self._zipinfo_name(self._fn(self.egg_root, resource_name)) - - def _resource_to_zip(self, resource_name): - return self._zipinfo_name(self._fn(self.module_path, resource_name)) - - -register_loader_type(zipimport.zipimporter, ZipProvider) - - -class FileMetadata(EmptyProvider): - """Metadata handler for standalone PKG-INFO files - - Usage:: - - metadata = FileMetadata("/path/to/PKG-INFO") - - This provider rejects all data and metadata requests except for PKG-INFO, - which is treated as existing, and will be the contents of the file at - the provided location. - """ - - def __init__(self, path): - self.path = path - - def _get_metadata_path(self, name): - return self.path - - def has_metadata(self, name): - return name == 'PKG-INFO' and os.path.isfile(self.path) - - def get_metadata(self, name): - if name != 'PKG-INFO': - raise KeyError("No metadata except PKG-INFO is available") - - with io.open(self.path, encoding='utf-8', errors="replace") as f: - metadata = f.read() - self._warn_on_replacement(metadata) - return metadata - - def _warn_on_replacement(self, metadata): - # Python 2.7 compat for: replacement_char = '�' - replacement_char = b'\xef\xbf\xbd'.decode('utf-8') - if replacement_char in metadata: - tmpl = "{self.path} could not be properly decoded in UTF-8" - msg = tmpl.format(**locals()) - warnings.warn(msg) - - def get_metadata_lines(self, name): - return yield_lines(self.get_metadata(name)) - - -class PathMetadata(DefaultProvider): - """Metadata provider for egg directories - - Usage:: - - # Development eggs: - - egg_info = "/path/to/PackageName.egg-info" - base_dir = os.path.dirname(egg_info) - metadata = PathMetadata(base_dir, egg_info) - dist_name = os.path.splitext(os.path.basename(egg_info))[0] - dist = Distribution(basedir, project_name=dist_name, metadata=metadata) - - # Unpacked egg directories: - - egg_path = "/path/to/PackageName-ver-pyver-etc.egg" - metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO')) - dist = Distribution.from_filename(egg_path, metadata=metadata) - """ - - def __init__(self, path, egg_info): - self.module_path = path - self.egg_info = egg_info - - -class EggMetadata(ZipProvider): - """Metadata provider for .egg files""" - - def __init__(self, importer): - """Create a metadata provider from a zipimporter""" - - self.zip_pre = importer.archive + os.sep - self.loader = importer - if importer.prefix: - self.module_path = os.path.join(importer.archive, importer.prefix) - else: - self.module_path = importer.archive - self._setup_prefix() - - -_declare_state('dict', _distribution_finders={}) - - -def register_finder(importer_type, distribution_finder): - """Register `distribution_finder` to find distributions in sys.path items - - `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item - handler), and `distribution_finder` is a callable that, passed a path - item and the importer instance, yields ``Distribution`` instances found on - that path item. See ``pkg_resources.find_on_path`` for an example.""" - _distribution_finders[importer_type] = distribution_finder - - -def find_distributions(path_item, only=False): - """Yield distributions accessible via `path_item`""" - importer = get_importer(path_item) - finder = _find_adapter(_distribution_finders, importer) - return finder(importer, path_item, only) - - -def find_eggs_in_zip(importer, path_item, only=False): - """ - Find eggs in zip files; possibly multiple nested eggs. - """ - if importer.archive.endswith('.whl'): - # wheels are not supported with this finder - # they don't have PKG-INFO metadata, and won't ever contain eggs - return - metadata = EggMetadata(importer) - if metadata.has_metadata('PKG-INFO'): - yield Distribution.from_filename(path_item, metadata=metadata) - if only: - # don't yield nested distros - return - for subitem in metadata.resource_listdir(''): - if _is_egg_path(subitem): - subpath = os.path.join(path_item, subitem) - dists = find_eggs_in_zip(zipimport.zipimporter(subpath), subpath) - for dist in dists: - yield dist - elif subitem.lower().endswith('.dist-info'): - subpath = os.path.join(path_item, subitem) - submeta = EggMetadata(zipimport.zipimporter(subpath)) - submeta.egg_info = subpath - yield Distribution.from_location(path_item, subitem, submeta) - - -register_finder(zipimport.zipimporter, find_eggs_in_zip) - - -def find_nothing(importer, path_item, only=False): - return () - - -register_finder(object, find_nothing) - - -def _by_version_descending(names): - """ - Given a list of filenames, return them in descending order - by version number. - - >>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg' - >>> _by_version_descending(names) - ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'foo', 'bar'] - >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg' - >>> _by_version_descending(names) - ['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg'] - >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg' - >>> _by_version_descending(names) - ['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg'] - """ - def _by_version(name): - """ - Parse each component of the filename - """ - name, ext = os.path.splitext(name) - parts = itertools.chain(name.split('-'), [ext]) - return [packaging.version.parse(part) for part in parts] - - return sorted(names, key=_by_version, reverse=True) - - -def find_on_path(importer, path_item, only=False): - """Yield distributions accessible on a sys.path directory""" - path_item = _normalize_cached(path_item) - - if _is_unpacked_egg(path_item): - yield Distribution.from_filename( - path_item, metadata=PathMetadata( - path_item, os.path.join(path_item, 'EGG-INFO') - ) - ) - return - - entries = safe_listdir(path_item) - - # for performance, before sorting by version, - # screen entries for only those that will yield - # distributions - filtered = ( - entry - for entry in entries - if dist_factory(path_item, entry, only) - ) - - # scan for .egg and .egg-info in directory - path_item_entries = _by_version_descending(filtered) - for entry in path_item_entries: - fullpath = os.path.join(path_item, entry) - factory = dist_factory(path_item, entry, only) - for dist in factory(fullpath): - yield dist - - -def dist_factory(path_item, entry, only): - """ - Return a dist_factory for a path_item and entry - """ - lower = entry.lower() - is_meta = any(map(lower.endswith, ('.egg-info', '.dist-info'))) - return ( - distributions_from_metadata - if is_meta else - find_distributions - if not only and _is_egg_path(entry) else - resolve_egg_link - if not only and lower.endswith('.egg-link') else - NoDists() - ) - - -class NoDists: - """ - >>> bool(NoDists()) - False - - >>> list(NoDists()('anything')) - [] - """ - def __bool__(self): - return False - if six.PY2: - __nonzero__ = __bool__ - - def __call__(self, fullpath): - return iter(()) - - -def safe_listdir(path): - """ - Attempt to list contents of path, but suppress some exceptions. - """ - try: - return os.listdir(path) - except (PermissionError, NotADirectoryError): - pass - except OSError as e: - # Ignore the directory if does not exist, not a directory or - # permission denied - ignorable = ( - e.errno in (errno.ENOTDIR, errno.EACCES, errno.ENOENT) - # Python 2 on Windows needs to be handled this way :( - or getattr(e, "winerror", None) == 267 - ) - if not ignorable: - raise - return () - - -def distributions_from_metadata(path): - root = os.path.dirname(path) - if os.path.isdir(path): - if len(os.listdir(path)) == 0: - # empty metadata dir; skip - return - metadata = PathMetadata(root, path) - else: - metadata = FileMetadata(path) - entry = os.path.basename(path) - yield Distribution.from_location( - root, entry, metadata, precedence=DEVELOP_DIST, - ) - - -def non_empty_lines(path): - """ - Yield non-empty lines from file at path - """ - with open(path) as f: - for line in f: - line = line.strip() - if line: - yield line - - -def resolve_egg_link(path): - """ - Given a path to an .egg-link, resolve distributions - present in the referenced path. - """ - referenced_paths = non_empty_lines(path) - resolved_paths = ( - os.path.join(os.path.dirname(path), ref) - for ref in referenced_paths - ) - dist_groups = map(find_distributions, resolved_paths) - return next(dist_groups, ()) - - -register_finder(pkgutil.ImpImporter, find_on_path) - -if hasattr(importlib_machinery, 'FileFinder'): - register_finder(importlib_machinery.FileFinder, find_on_path) - -_declare_state('dict', _namespace_handlers={}) -_declare_state('dict', _namespace_packages={}) - - -def register_namespace_handler(importer_type, namespace_handler): - """Register `namespace_handler` to declare namespace packages - - `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item - handler), and `namespace_handler` is a callable like this:: - - def namespace_handler(importer, path_entry, moduleName, module): - # return a path_entry to use for child packages - - Namespace handlers are only called if the importer object has already - agreed that it can handle the relevant path item, and they should only - return a subpath if the module __path__ does not already contain an - equivalent subpath. For an example namespace handler, see - ``pkg_resources.file_ns_handler``. - """ - _namespace_handlers[importer_type] = namespace_handler - - -def _handle_ns(packageName, path_item): - """Ensure that named package includes a subpath of path_item (if needed)""" - - importer = get_importer(path_item) - if importer is None: - return None - - # capture warnings due to #1111 - with warnings.catch_warnings(): - warnings.simplefilter("ignore") - loader = importer.find_module(packageName) - - if loader is None: - return None - module = sys.modules.get(packageName) - if module is None: - module = sys.modules[packageName] = types.ModuleType(packageName) - module.__path__ = [] - _set_parent_ns(packageName) - elif not hasattr(module, '__path__'): - raise TypeError("Not a package:", packageName) - handler = _find_adapter(_namespace_handlers, importer) - subpath = handler(importer, path_item, packageName, module) - if subpath is not None: - path = module.__path__ - path.append(subpath) - loader.load_module(packageName) - _rebuild_mod_path(path, packageName, module) - return subpath - - -def _rebuild_mod_path(orig_path, package_name, module): - """ - Rebuild module.__path__ ensuring that all entries are ordered - corresponding to their sys.path order - """ - sys_path = [_normalize_cached(p) for p in sys.path] - - def safe_sys_path_index(entry): - """ - Workaround for #520 and #513. - """ - try: - return sys_path.index(entry) - except ValueError: - return float('inf') - - def position_in_sys_path(path): - """ - Return the ordinal of the path based on its position in sys.path - """ - path_parts = path.split(os.sep) - module_parts = package_name.count('.') + 1 - parts = path_parts[:-module_parts] - return safe_sys_path_index(_normalize_cached(os.sep.join(parts))) - - new_path = sorted(orig_path, key=position_in_sys_path) - new_path = [_normalize_cached(p) for p in new_path] - - if isinstance(module.__path__, list): - module.__path__[:] = new_path - else: - module.__path__ = new_path - - -def declare_namespace(packageName): - """Declare that package 'packageName' is a namespace package""" - - _imp.acquire_lock() - try: - if packageName in _namespace_packages: - return - - path = sys.path - parent, _, _ = packageName.rpartition('.') - - if parent: - declare_namespace(parent) - if parent not in _namespace_packages: - __import__(parent) - try: - path = sys.modules[parent].__path__ - except AttributeError: - raise TypeError("Not a package:", parent) - - # Track what packages are namespaces, so when new path items are added, - # they can be updated - _namespace_packages.setdefault(parent or None, []).append(packageName) - _namespace_packages.setdefault(packageName, []) - - for path_item in path: - # Ensure all the parent's path items are reflected in the child, - # if they apply - _handle_ns(packageName, path_item) - - finally: - _imp.release_lock() - - -def fixup_namespace_packages(path_item, parent=None): - """Ensure that previously-declared namespace packages include path_item""" - _imp.acquire_lock() - try: - for package in _namespace_packages.get(parent, ()): - subpath = _handle_ns(package, path_item) - if subpath: - fixup_namespace_packages(subpath, package) - finally: - _imp.release_lock() - - -def file_ns_handler(importer, path_item, packageName, module): - """Compute an ns-package subpath for a filesystem or zipfile importer""" - - subpath = os.path.join(path_item, packageName.split('.')[-1]) - normalized = _normalize_cached(subpath) - for item in module.__path__: - if _normalize_cached(item) == normalized: - break - else: - # Only return the path if it's not already there - return subpath - - -register_namespace_handler(pkgutil.ImpImporter, file_ns_handler) -register_namespace_handler(zipimport.zipimporter, file_ns_handler) - -if hasattr(importlib_machinery, 'FileFinder'): - register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler) - - -def null_ns_handler(importer, path_item, packageName, module): - return None - - -register_namespace_handler(object, null_ns_handler) - - -def normalize_path(filename): - """Normalize a file/dir name for comparison purposes""" - return os.path.normcase(os.path.realpath(os.path.normpath(_cygwin_patch(filename)))) - - -def _cygwin_patch(filename): # pragma: nocover - """ - Contrary to POSIX 2008, on Cygwin, getcwd (3) contains - symlink components. Using - os.path.abspath() works around this limitation. A fix in os.getcwd() - would probably better, in Cygwin even more so, except - that this seems to be by design... - """ - return os.path.abspath(filename) if sys.platform == 'cygwin' else filename - - -def _normalize_cached(filename, _cache={}): - try: - return _cache[filename] - except KeyError: - _cache[filename] = result = normalize_path(filename) - return result - - -def _is_egg_path(path): - """ - Determine if given path appears to be an egg. - """ - return path.lower().endswith('.egg') - - -def _is_unpacked_egg(path): - """ - Determine if given path appears to be an unpacked egg. - """ - return ( - _is_egg_path(path) and - os.path.isfile(os.path.join(path, 'EGG-INFO', 'PKG-INFO')) - ) - - -def _set_parent_ns(packageName): - parts = packageName.split('.') - name = parts.pop() - if parts: - parent = '.'.join(parts) - setattr(sys.modules[parent], name, sys.modules[packageName]) - - -def yield_lines(strs): - """Yield non-empty/non-comment lines of a string or sequence""" - if isinstance(strs, six.string_types): - for s in strs.splitlines(): - s = s.strip() - # skip blank lines/comments - if s and not s.startswith('#'): - yield s - else: - for ss in strs: - for s in yield_lines(ss): - yield s - - -MODULE = re.compile(r"\w+(\.\w+)*$").match -EGG_NAME = re.compile( - r""" - (?P<name>[^-]+) ( - -(?P<ver>[^-]+) ( - -py(?P<pyver>[^-]+) ( - -(?P<plat>.+) - )? - )? - )? - """, - re.VERBOSE | re.IGNORECASE, -).match - - -class EntryPoint: - """Object representing an advertised importable object""" - - def __init__(self, name, module_name, attrs=(), extras=(), dist=None): - if not MODULE(module_name): - raise ValueError("Invalid module name", module_name) - self.name = name - self.module_name = module_name - self.attrs = tuple(attrs) - self.extras = tuple(extras) - self.dist = dist - - def __str__(self): - s = "%s = %s" % (self.name, self.module_name) - if self.attrs: - s += ':' + '.'.join(self.attrs) - if self.extras: - s += ' [%s]' % ','.join(self.extras) - return s - - def __repr__(self): - return "EntryPoint.parse(%r)" % str(self) - - def load(self, require=True, *args, **kwargs): - """ - Require packages for this EntryPoint, then resolve it. - """ - if not require or args or kwargs: - warnings.warn( - "Parameters to load are deprecated. Call .resolve and " - ".require separately.", - PkgResourcesDeprecationWarning, - stacklevel=2, - ) - if require: - self.require(*args, **kwargs) - return self.resolve() - - def resolve(self): - """ - Resolve the entry point from its module and attrs. - """ - module = __import__(self.module_name, fromlist=['__name__'], level=0) - try: - return functools.reduce(getattr, self.attrs, module) - except AttributeError as exc: - raise ImportError(str(exc)) - - def require(self, env=None, installer=None): - if self.extras and not self.dist: - raise UnknownExtra("Can't require() without a distribution", self) - - # Get the requirements for this entry point with all its extras and - # then resolve them. We have to pass `extras` along when resolving so - # that the working set knows what extras we want. Otherwise, for - # dist-info distributions, the working set will assume that the - # requirements for that extra are purely optional and skip over them. - reqs = self.dist.requires(self.extras) - items = working_set.resolve(reqs, env, installer, extras=self.extras) - list(map(working_set.add, items)) - - pattern = re.compile( - r'\s*' - r'(?P<name>.+?)\s*' - r'=\s*' - r'(?P<module>[\w.]+)\s*' - r'(:\s*(?P<attr>[\w.]+))?\s*' - r'(?P<extras>\[.*\])?\s*$' - ) - - @classmethod - def parse(cls, src, dist=None): - """Parse a single entry point from string `src` - - Entry point syntax follows the form:: - - name = some.module:some.attr [extra1, extra2] - - The entry name and module name are required, but the ``:attrs`` and - ``[extras]`` parts are optional - """ - m = cls.pattern.match(src) - if not m: - msg = "EntryPoint must be in 'name=module:attrs [extras]' format" - raise ValueError(msg, src) - res = m.groupdict() - extras = cls._parse_extras(res['extras']) - attrs = res['attr'].split('.') if res['attr'] else () - return cls(res['name'], res['module'], attrs, extras, dist) - - @classmethod - def _parse_extras(cls, extras_spec): - if not extras_spec: - return () - req = Requirement.parse('x' + extras_spec) - if req.specs: - raise ValueError() - return req.extras - - @classmethod - def parse_group(cls, group, lines, dist=None): - """Parse an entry point group""" - if not MODULE(group): - raise ValueError("Invalid group name", group) - this = {} - for line in yield_lines(lines): - ep = cls.parse(line, dist) - if ep.name in this: - raise ValueError("Duplicate entry point", group, ep.name) - this[ep.name] = ep - return this - - @classmethod - def parse_map(cls, data, dist=None): - """Parse a map of entry point groups""" - if isinstance(data, dict): - data = data.items() - else: - data = split_sections(data) - maps = {} - for group, lines in data: - if group is None: - if not lines: - continue - raise ValueError("Entry points must be listed in groups") - group = group.strip() - if group in maps: - raise ValueError("Duplicate group name", group) - maps[group] = cls.parse_group(group, lines, dist) - return maps - - -def _remove_md5_fragment(location): - if not location: - return '' - parsed = urllib.parse.urlparse(location) - if parsed[-1].startswith('md5='): - return urllib.parse.urlunparse(parsed[:-1] + ('',)) - return location - - -def _version_from_file(lines): - """ - Given an iterable of lines from a Metadata file, return - the value of the Version field, if present, or None otherwise. - """ - def is_version_line(line): - return line.lower().startswith('version:') - version_lines = filter(is_version_line, lines) - line = next(iter(version_lines), '') - _, _, value = line.partition(':') - return safe_version(value.strip()) or None - - -class Distribution: - """Wrap an actual or potential sys.path entry w/metadata""" - PKG_INFO = 'PKG-INFO' - - def __init__( - self, location=None, metadata=None, project_name=None, - version=None, py_version=PY_MAJOR, platform=None, - precedence=EGG_DIST): - self.project_name = safe_name(project_name or 'Unknown') - if version is not None: - self._version = safe_version(version) - self.py_version = py_version - self.platform = platform - self.location = location - self.precedence = precedence - self._provider = metadata or empty_provider - - @classmethod - def from_location(cls, location, basename, metadata=None, **kw): - project_name, version, py_version, platform = [None] * 4 - basename, ext = os.path.splitext(basename) - if ext.lower() in _distributionImpl: - cls = _distributionImpl[ext.lower()] - - match = EGG_NAME(basename) - if match: - project_name, version, py_version, platform = match.group( - 'name', 'ver', 'pyver', 'plat' - ) - return cls( - location, metadata, project_name=project_name, version=version, - py_version=py_version, platform=platform, **kw - )._reload_version() - - def _reload_version(self): - return self - - @property - def hashcmp(self): - return ( - self.parsed_version, - self.precedence, - self.key, - _remove_md5_fragment(self.location), - self.py_version or '', - self.platform or '', - ) - - def __hash__(self): - return hash(self.hashcmp) - - def __lt__(self, other): - return self.hashcmp < other.hashcmp - - def __le__(self, other): - return self.hashcmp <= other.hashcmp - - def __gt__(self, other): - return self.hashcmp > other.hashcmp - - def __ge__(self, other): - return self.hashcmp >= other.hashcmp - - def __eq__(self, other): - if not isinstance(other, self.__class__): - # It's not a Distribution, so they are not equal - return False - return self.hashcmp == other.hashcmp - - def __ne__(self, other): - return not self == other - - # These properties have to be lazy so that we don't have to load any - # metadata until/unless it's actually needed. (i.e., some distributions - # may not know their name or version without loading PKG-INFO) - - @property - def key(self): - try: - return self._key - except AttributeError: - self._key = key = self.project_name.lower() - return key - - @property - def parsed_version(self): - if not hasattr(self, "_parsed_version"): - self._parsed_version = parse_version(self.version) - - return self._parsed_version - - def _warn_legacy_version(self): - LV = packaging.version.LegacyVersion - is_legacy = isinstance(self._parsed_version, LV) - if not is_legacy: - return - - # While an empty version is technically a legacy version and - # is not a valid PEP 440 version, it's also unlikely to - # actually come from someone and instead it is more likely that - # it comes from setuptools attempting to parse a filename and - # including it in the list. So for that we'll gate this warning - # on if the version is anything at all or not. - if not self.version: - return - - tmpl = textwrap.dedent(""" - '{project_name} ({version})' is being parsed as a legacy, - non PEP 440, - version. You may find odd behavior and sort order. - In particular it will be sorted as less than 0.0. It - is recommended to migrate to PEP 440 compatible - versions. - """).strip().replace('\n', ' ') - - warnings.warn(tmpl.format(**vars(self)), PEP440Warning) - - @property - def version(self): - try: - return self._version - except AttributeError: - version = self._get_version() - if version is None: - path = self._get_metadata_path_for_display(self.PKG_INFO) - msg = ( - "Missing 'Version:' header and/or {} file at path: {}" - ).format(self.PKG_INFO, path) - raise ValueError(msg, self) - - return version - - @property - def _dep_map(self): - """ - A map of extra to its list of (direct) requirements - for this distribution, including the null extra. - """ - try: - return self.__dep_map - except AttributeError: - self.__dep_map = self._filter_extras(self._build_dep_map()) - return self.__dep_map - - @staticmethod - def _filter_extras(dm): - """ - Given a mapping of extras to dependencies, strip off - environment markers and filter out any dependencies - not matching the markers. - """ - for extra in list(filter(None, dm)): - new_extra = extra - reqs = dm.pop(extra) - new_extra, _, marker = extra.partition(':') - fails_marker = marker and ( - invalid_marker(marker) - or not evaluate_marker(marker) - ) - if fails_marker: - reqs = [] - new_extra = safe_extra(new_extra) or None - - dm.setdefault(new_extra, []).extend(reqs) - return dm - - def _build_dep_map(self): - dm = {} - for name in 'requires.txt', 'depends.txt': - for extra, reqs in split_sections(self._get_metadata(name)): - dm.setdefault(extra, []).extend(parse_requirements(reqs)) - return dm - - def requires(self, extras=()): - """List of Requirements needed for this distro if `extras` are used""" - dm = self._dep_map - deps = [] - deps.extend(dm.get(None, ())) - for ext in extras: - try: - deps.extend(dm[safe_extra(ext)]) - except KeyError: - raise UnknownExtra( - "%s has no such extra feature %r" % (self, ext) - ) - return deps - - def _get_metadata_path_for_display(self, name): - """ - Return the path to the given metadata file, if available. - """ - try: - # We need to access _get_metadata_path() on the provider object - # directly rather than through this class's __getattr__() - # since _get_metadata_path() is marked private. - path = self._provider._get_metadata_path(name) - - # Handle exceptions e.g. in case the distribution's metadata - # provider doesn't support _get_metadata_path(). - except Exception: - return '[could not detect]' - - return path - - def _get_metadata(self, name): - if self.has_metadata(name): - for line in self.get_metadata_lines(name): - yield line - - def _get_version(self): - lines = self._get_metadata(self.PKG_INFO) - version = _version_from_file(lines) - - return version - - def activate(self, path=None, replace=False): - """Ensure distribution is importable on `path` (default=sys.path)""" - if path is None: - path = sys.path - self.insert_on(path, replace=replace) - if path is sys.path: - fixup_namespace_packages(self.location) - for pkg in self._get_metadata('namespace_packages.txt'): - if pkg in sys.modules: - declare_namespace(pkg) - - def egg_name(self): - """Return what this distribution's standard .egg filename should be""" - filename = "%s-%s-py%s" % ( - to_filename(self.project_name), to_filename(self.version), - self.py_version or PY_MAJOR - ) - - if self.platform: - filename += '-' + self.platform - return filename - - def __repr__(self): - if self.location: - return "%s (%s)" % (self, self.location) - else: - return str(self) - - def __str__(self): - try: - version = getattr(self, 'version', None) - except ValueError: - version = None - version = version or "[unknown version]" - return "%s %s" % (self.project_name, version) - - def __getattr__(self, attr): - """Delegate all unrecognized public attributes to .metadata provider""" - if attr.startswith('_'): - raise AttributeError(attr) - return getattr(self._provider, attr) - - def __dir__(self): - return list( - set(super(Distribution, self).__dir__()) - | set( - attr for attr in self._provider.__dir__() - if not attr.startswith('_') - ) - ) - - if not hasattr(object, '__dir__'): - # python 2.7 not supported - del __dir__ - - @classmethod - def from_filename(cls, filename, metadata=None, **kw): - return cls.from_location( - _normalize_cached(filename), os.path.basename(filename), metadata, - **kw - ) - - def as_requirement(self): - """Return a ``Requirement`` that matches this distribution exactly""" - if isinstance(self.parsed_version, packaging.version.Version): - spec = "%s==%s" % (self.project_name, self.parsed_version) - else: - spec = "%s===%s" % (self.project_name, self.parsed_version) - - return Requirement.parse(spec) - - def load_entry_point(self, group, name): - """Return the `name` entry point of `group` or raise ImportError""" - ep = self.get_entry_info(group, name) - if ep is None: - raise ImportError("Entry point %r not found" % ((group, name),)) - return ep.load() - - def get_entry_map(self, group=None): - """Return the entry point map for `group`, or the full entry map""" - try: - ep_map = self._ep_map - except AttributeError: - ep_map = self._ep_map = EntryPoint.parse_map( - self._get_metadata('entry_points.txt'), self - ) - if group is not None: - return ep_map.get(group, {}) - return ep_map - - def get_entry_info(self, group, name): - """Return the EntryPoint object for `group`+`name`, or ``None``""" - return self.get_entry_map(group).get(name) - - def insert_on(self, path, loc=None, replace=False): - """Ensure self.location is on path - - If replace=False (default): - - If location is already in path anywhere, do nothing. - - Else: - - If it's an egg and its parent directory is on path, - insert just ahead of the parent. - - Else: add to the end of path. - If replace=True: - - If location is already on path anywhere (not eggs) - or higher priority than its parent (eggs) - do nothing. - - Else: - - If it's an egg and its parent directory is on path, - insert just ahead of the parent, - removing any lower-priority entries. - - Else: add it to the front of path. - """ - - loc = loc or self.location - if not loc: - return - - nloc = _normalize_cached(loc) - bdir = os.path.dirname(nloc) - npath = [(p and _normalize_cached(p) or p) for p in path] - - for p, item in enumerate(npath): - if item == nloc: - if replace: - break - else: - # don't modify path (even removing duplicates) if - # found and not replace - return - elif item == bdir and self.precedence == EGG_DIST: - # if it's an .egg, give it precedence over its directory - # UNLESS it's already been added to sys.path and replace=False - if (not replace) and nloc in npath[p:]: - return - if path is sys.path: - self.check_version_conflict() - path.insert(p, loc) - npath.insert(p, nloc) - break - else: - if path is sys.path: - self.check_version_conflict() - if replace: - path.insert(0, loc) - else: - path.append(loc) - return - - # p is the spot where we found or inserted loc; now remove duplicates - while True: - try: - np = npath.index(nloc, p + 1) - except ValueError: - break - else: - del npath[np], path[np] - # ha! - p = np - - return - - def check_version_conflict(self): - if self.key == 'setuptools': - # ignore the inevitable setuptools self-conflicts :( - return - - nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt')) - loc = normalize_path(self.location) - for modname in self._get_metadata('top_level.txt'): - if (modname not in sys.modules or modname in nsp - or modname in _namespace_packages): - continue - if modname in ('pkg_resources', 'setuptools', 'site'): - continue - fn = getattr(sys.modules[modname], '__file__', None) - if fn and (normalize_path(fn).startswith(loc) or - fn.startswith(self.location)): - continue - issue_warning( - "Module %s was already imported from %s, but %s is being added" - " to sys.path" % (modname, fn, self.location), - ) - - def has_version(self): - try: - self.version - except ValueError: - issue_warning("Unbuilt egg for " + repr(self)) - return False - return True - - def clone(self, **kw): - """Copy this distribution, substituting in any changed keyword args""" - names = 'project_name version py_version platform location precedence' - for attr in names.split(): - kw.setdefault(attr, getattr(self, attr, None)) - kw.setdefault('metadata', self._provider) - return self.__class__(**kw) - - @property - def extras(self): - return [dep for dep in self._dep_map if dep] - - -class EggInfoDistribution(Distribution): - def _reload_version(self): - """ - Packages installed by distutils (e.g. numpy or scipy), - which uses an old safe_version, and so - their version numbers can get mangled when - converted to filenames (e.g., 1.11.0.dev0+2329eae to - 1.11.0.dev0_2329eae). These distributions will not be - parsed properly - downstream by Distribution and safe_version, so - take an extra step and try to get the version number from - the metadata file itself instead of the filename. - """ - md_version = self._get_version() - if md_version: - self._version = md_version - return self - - -class DistInfoDistribution(Distribution): - """ - Wrap an actual or potential sys.path entry - w/metadata, .dist-info style. - """ - PKG_INFO = 'METADATA' - EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])") - - @property - def _parsed_pkg_info(self): - """Parse and cache metadata""" - try: - return self._pkg_info - except AttributeError: - metadata = self.get_metadata(self.PKG_INFO) - self._pkg_info = email.parser.Parser().parsestr(metadata) - return self._pkg_info - - @property - def _dep_map(self): - try: - return self.__dep_map - except AttributeError: - self.__dep_map = self._compute_dependencies() - return self.__dep_map - - def _compute_dependencies(self): - """Recompute this distribution's dependencies.""" - dm = self.__dep_map = {None: []} - - reqs = [] - # Including any condition expressions - for req in self._parsed_pkg_info.get_all('Requires-Dist') or []: - reqs.extend(parse_requirements(req)) - - def reqs_for_extra(extra): - for req in reqs: - if not req.marker or req.marker.evaluate({'extra': extra}): - yield req - - common = frozenset(reqs_for_extra(None)) - dm[None].extend(common) - - for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []: - s_extra = safe_extra(extra.strip()) - dm[s_extra] = list(frozenset(reqs_for_extra(extra)) - common) - - return dm - - -_distributionImpl = { - '.egg': Distribution, - '.egg-info': EggInfoDistribution, - '.dist-info': DistInfoDistribution, -} - - -def issue_warning(*args, **kw): - level = 1 - g = globals() - try: - # find the first stack frame that is *not* code in - # the pkg_resources module, to use for the warning - while sys._getframe(level).f_globals is g: - level += 1 - except ValueError: - pass - warnings.warn(stacklevel=level + 1, *args, **kw) - - -class RequirementParseError(ValueError): - def __str__(self): - return ' '.join(self.args) - - -def parse_requirements(strs): - """Yield ``Requirement`` objects for each specification in `strs` - - `strs` must be a string, or a (possibly-nested) iterable thereof. - """ - # create a steppable iterator, so we can handle \-continuations - lines = iter(yield_lines(strs)) - - for line in lines: - # Drop comments -- a hash without a space may be in a URL. - if ' #' in line: - line = line[:line.find(' #')] - # If there is a line continuation, drop it, and append the next line. - if line.endswith('\\'): - line = line[:-2].strip() - try: - line += next(lines) - except StopIteration: - return - yield Requirement(line) - - -class Requirement(packaging.requirements.Requirement): - def __init__(self, requirement_string): - """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!""" - try: - super(Requirement, self).__init__(requirement_string) - except packaging.requirements.InvalidRequirement as e: - raise RequirementParseError(str(e)) - self.unsafe_name = self.name - project_name = safe_name(self.name) - self.project_name, self.key = project_name, project_name.lower() - self.specs = [ - (spec.operator, spec.version) for spec in self.specifier] - self.extras = tuple(map(safe_extra, self.extras)) - self.hashCmp = ( - self.key, - self.specifier, - frozenset(self.extras), - str(self.marker) if self.marker else None, - ) - self.__hash = hash(self.hashCmp) - - def __eq__(self, other): - return ( - isinstance(other, Requirement) and - self.hashCmp == other.hashCmp - ) - - def __ne__(self, other): - return not self == other - - def __contains__(self, item): - if isinstance(item, Distribution): - if item.key != self.key: - return False - - item = item.version - - # Allow prereleases always in order to match the previous behavior of - # this method. In the future this should be smarter and follow PEP 440 - # more accurately. - return self.specifier.contains(item, prereleases=True) - - def __hash__(self): - return self.__hash - - def __repr__(self): - return "Requirement.parse(%r)" % str(self) - - @staticmethod - def parse(s): - req, = parse_requirements(s) - return req - - -def _always_object(classes): - """ - Ensure object appears in the mro even - for old-style classes. - """ - if object not in classes: - return classes + (object,) - return classes - - -def _find_adapter(registry, ob): - """Return an adapter factory for `ob` from `registry`""" - types = _always_object(inspect.getmro(getattr(ob, '__class__', type(ob)))) - for t in types: - if t in registry: - return registry[t] - - -def ensure_directory(path): - """Ensure that the parent directory of `path` exists""" - dirname = os.path.dirname(path) - py31compat.makedirs(dirname, exist_ok=True) - - -def _bypass_ensure_directory(path): - """Sandbox-bypassing version of ensure_directory()""" - if not WRITE_SUPPORT: - raise IOError('"os.mkdir" not supported on this platform.') - dirname, filename = split(path) - if dirname and filename and not isdir(dirname): - _bypass_ensure_directory(dirname) - try: - mkdir(dirname, 0o755) - except FileExistsError: - pass - - -def split_sections(s): - """Split a string or iterable thereof into (section, content) pairs - - Each ``section`` is a stripped version of the section header ("[section]") - and each ``content`` is a list of stripped lines excluding blank lines and - comment-only lines. If there are any such lines before the first section - header, they're returned in a first ``section`` of ``None``. - """ - section = None - content = [] - for line in yield_lines(s): - if line.startswith("["): - if line.endswith("]"): - if section or content: - yield section, content - section = line[1:-1].strip() - content = [] - else: - raise ValueError("Invalid section heading", line) - else: - content.append(line) - - # wrap up last segment - yield section, content - - -def _mkstemp(*args, **kw): - old_open = os.open - try: - # temporarily bypass sandboxing - os.open = os_open - return tempfile.mkstemp(*args, **kw) - finally: - # and then put it back - os.open = old_open - - -# Silence the PEP440Warning by default, so that end users don't get hit by it -# randomly just because they use pkg_resources. We want to append the rule -# because we want earlier uses of filterwarnings to take precedence over this -# one. -warnings.filterwarnings("ignore", category=PEP440Warning, append=True) - - -# from jaraco.functools 1.3 -def _call_aside(f, *args, **kwargs): - f(*args, **kwargs) - return f - - -@_call_aside -def _initialize(g=globals()): - "Set up global resource manager (deliberately not state-saved)" - manager = ResourceManager() - g['_manager'] = manager - g.update( - (name, getattr(manager, name)) - for name in dir(manager) - if not name.startswith('_') - ) - - -@_call_aside -def _initialize_master_working_set(): - """ - Prepare the master working set and make the ``require()`` - API available. - - This function has explicit effects on the global state - of pkg_resources. It is intended to be invoked once at - the initialization of this module. - - Invocation by other packages is unsupported and done - at their own risk. - """ - working_set = WorkingSet._build_master() - _declare_state('object', working_set=working_set) - - require = working_set.require - iter_entry_points = working_set.iter_entry_points - add_activation_listener = working_set.subscribe - run_script = working_set.run_script - # backward compatibility - run_main = run_script - # Activate all distributions already on sys.path with replace=False and - # ensure that all distributions added to the working set in the future - # (e.g. by calling ``require()``) will get activated as well, - # with higher priority (replace=True). - tuple( - dist.activate(replace=False) - for dist in working_set - ) - add_activation_listener( - lambda dist: dist.activate(replace=True), - existing=False, - ) - working_set.entries = [] - # match order - list(map(working_set.add_entry, sys.path)) - globals().update(locals()) - -class PkgResourcesDeprecationWarning(Warning): - """ - Base class for warning about deprecations in ``pkg_resources`` - - This class is not derived from ``DeprecationWarning``, and as such is - visible by default. - """ diff --git a/env/lib/python2.7/site-packages/pkg_resources/__init__.pyc b/env/lib/python2.7/site-packages/pkg_resources/__init__.pyc deleted file mode 100644 index 3bb302d4..00000000 Binary files a/env/lib/python2.7/site-packages/pkg_resources/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pkg_resources/_vendor/__init__.pyc b/env/lib/python2.7/site-packages/pkg_resources/_vendor/__init__.pyc deleted file mode 100644 index 157ba6ae..00000000 Binary files a/env/lib/python2.7/site-packages/pkg_resources/_vendor/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pyc b/env/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pyc deleted file mode 100644 index 08020c20..00000000 Binary files a/env/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/__about__.py b/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/__about__.py deleted file mode 100644 index 95d330ef..00000000 --- a/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/__about__.py +++ /dev/null @@ -1,21 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -__all__ = [ - "__title__", "__summary__", "__uri__", "__version__", "__author__", - "__email__", "__license__", "__copyright__", -] - -__title__ = "packaging" -__summary__ = "Core utilities for Python packages" -__uri__ = "https://github.com/pypa/packaging" - -__version__ = "16.8" - -__author__ = "Donald Stufft and individual contributors" -__email__ = "donald@stufft.io" - -__license__ = "BSD or Apache License, Version 2.0" -__copyright__ = "Copyright 2014-2016 %s" % __author__ diff --git a/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/__about__.pyc b/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/__about__.pyc deleted file mode 100644 index b578ecc3..00000000 Binary files a/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/__about__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/__init__.py b/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/__init__.py deleted file mode 100644 index 5ee62202..00000000 --- a/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -from .__about__ import ( - __author__, __copyright__, __email__, __license__, __summary__, __title__, - __uri__, __version__ -) - -__all__ = [ - "__title__", "__summary__", "__uri__", "__version__", "__author__", - "__email__", "__license__", "__copyright__", -] diff --git a/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/__init__.pyc b/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/__init__.pyc deleted file mode 100644 index 13bca42b..00000000 Binary files a/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_compat.py b/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_compat.py deleted file mode 100644 index 210bb80b..00000000 --- a/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_compat.py +++ /dev/null @@ -1,30 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -import sys - - -PY2 = sys.version_info[0] == 2 -PY3 = sys.version_info[0] == 3 - -# flake8: noqa - -if PY3: - string_types = str, -else: - string_types = basestring, - - -def with_metaclass(meta, *bases): - """ - Create a base class with a metaclass. - """ - # This requires a bit of explanation: the basic idea is to make a dummy - # metaclass for one level of class instantiation that replaces itself with - # the actual metaclass. - class metaclass(meta): - def __new__(cls, name, this_bases, d): - return meta(name, bases, d) - return type.__new__(metaclass, 'temporary_class', (), {}) diff --git a/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_compat.pyc b/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_compat.pyc deleted file mode 100644 index e55ea0e7..00000000 Binary files a/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_compat.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.py b/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.py deleted file mode 100644 index ccc27861..00000000 --- a/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.py +++ /dev/null @@ -1,68 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - - -class Infinity(object): - - def __repr__(self): - return "Infinity" - - def __hash__(self): - return hash(repr(self)) - - def __lt__(self, other): - return False - - def __le__(self, other): - return False - - def __eq__(self, other): - return isinstance(other, self.__class__) - - def __ne__(self, other): - return not isinstance(other, self.__class__) - - def __gt__(self, other): - return True - - def __ge__(self, other): - return True - - def __neg__(self): - return NegativeInfinity - -Infinity = Infinity() - - -class NegativeInfinity(object): - - def __repr__(self): - return "-Infinity" - - def __hash__(self): - return hash(repr(self)) - - def __lt__(self, other): - return True - - def __le__(self, other): - return True - - def __eq__(self, other): - return isinstance(other, self.__class__) - - def __ne__(self, other): - return not isinstance(other, self.__class__) - - def __gt__(self, other): - return False - - def __ge__(self, other): - return False - - def __neg__(self): - return Infinity - -NegativeInfinity = NegativeInfinity() diff --git a/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyc b/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyc deleted file mode 100644 index 0f24e9cf..00000000 Binary files a/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.py b/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.py deleted file mode 100644 index 892e578e..00000000 --- a/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.py +++ /dev/null @@ -1,301 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -import operator -import os -import platform -import sys - -from pkg_resources.extern.pyparsing import ParseException, ParseResults, stringStart, stringEnd -from pkg_resources.extern.pyparsing import ZeroOrMore, Group, Forward, QuotedString -from pkg_resources.extern.pyparsing import Literal as L # noqa - -from ._compat import string_types -from .specifiers import Specifier, InvalidSpecifier - - -__all__ = [ - "InvalidMarker", "UndefinedComparison", "UndefinedEnvironmentName", - "Marker", "default_environment", -] - - -class InvalidMarker(ValueError): - """ - An invalid marker was found, users should refer to PEP 508. - """ - - -class UndefinedComparison(ValueError): - """ - An invalid operation was attempted on a value that doesn't support it. - """ - - -class UndefinedEnvironmentName(ValueError): - """ - A name was attempted to be used that does not exist inside of the - environment. - """ - - -class Node(object): - - def __init__(self, value): - self.value = value - - def __str__(self): - return str(self.value) - - def __repr__(self): - return "<{0}({1!r})>".format(self.__class__.__name__, str(self)) - - def serialize(self): - raise NotImplementedError - - -class Variable(Node): - - def serialize(self): - return str(self) - - -class Value(Node): - - def serialize(self): - return '"{0}"'.format(self) - - -class Op(Node): - - def serialize(self): - return str(self) - - -VARIABLE = ( - L("implementation_version") | - L("platform_python_implementation") | - L("implementation_name") | - L("python_full_version") | - L("platform_release") | - L("platform_version") | - L("platform_machine") | - L("platform_system") | - L("python_version") | - L("sys_platform") | - L("os_name") | - L("os.name") | # PEP-345 - L("sys.platform") | # PEP-345 - L("platform.version") | # PEP-345 - L("platform.machine") | # PEP-345 - L("platform.python_implementation") | # PEP-345 - L("python_implementation") | # undocumented setuptools legacy - L("extra") -) -ALIASES = { - 'os.name': 'os_name', - 'sys.platform': 'sys_platform', - 'platform.version': 'platform_version', - 'platform.machine': 'platform_machine', - 'platform.python_implementation': 'platform_python_implementation', - 'python_implementation': 'platform_python_implementation' -} -VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0]))) - -VERSION_CMP = ( - L("===") | - L("==") | - L(">=") | - L("<=") | - L("!=") | - L("~=") | - L(">") | - L("<") -) - -MARKER_OP = VERSION_CMP | L("not in") | L("in") -MARKER_OP.setParseAction(lambda s, l, t: Op(t[0])) - -MARKER_VALUE = QuotedString("'") | QuotedString('"') -MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0])) - -BOOLOP = L("and") | L("or") - -MARKER_VAR = VARIABLE | MARKER_VALUE - -MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR) -MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0])) - -LPAREN = L("(").suppress() -RPAREN = L(")").suppress() - -MARKER_EXPR = Forward() -MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN) -MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR) - -MARKER = stringStart + MARKER_EXPR + stringEnd - - -def _coerce_parse_result(results): - if isinstance(results, ParseResults): - return [_coerce_parse_result(i) for i in results] - else: - return results - - -def _format_marker(marker, first=True): - assert isinstance(marker, (list, tuple, string_types)) - - # Sometimes we have a structure like [[...]] which is a single item list - # where the single item is itself it's own list. In that case we want skip - # the rest of this function so that we don't get extraneous () on the - # outside. - if (isinstance(marker, list) and len(marker) == 1 and - isinstance(marker[0], (list, tuple))): - return _format_marker(marker[0]) - - if isinstance(marker, list): - inner = (_format_marker(m, first=False) for m in marker) - if first: - return " ".join(inner) - else: - return "(" + " ".join(inner) + ")" - elif isinstance(marker, tuple): - return " ".join([m.serialize() for m in marker]) - else: - return marker - - -_operators = { - "in": lambda lhs, rhs: lhs in rhs, - "not in": lambda lhs, rhs: lhs not in rhs, - "<": operator.lt, - "<=": operator.le, - "==": operator.eq, - "!=": operator.ne, - ">=": operator.ge, - ">": operator.gt, -} - - -def _eval_op(lhs, op, rhs): - try: - spec = Specifier("".join([op.serialize(), rhs])) - except InvalidSpecifier: - pass - else: - return spec.contains(lhs) - - oper = _operators.get(op.serialize()) - if oper is None: - raise UndefinedComparison( - "Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs) - ) - - return oper(lhs, rhs) - - -_undefined = object() - - -def _get_env(environment, name): - value = environment.get(name, _undefined) - - if value is _undefined: - raise UndefinedEnvironmentName( - "{0!r} does not exist in evaluation environment.".format(name) - ) - - return value - - -def _evaluate_markers(markers, environment): - groups = [[]] - - for marker in markers: - assert isinstance(marker, (list, tuple, string_types)) - - if isinstance(marker, list): - groups[-1].append(_evaluate_markers(marker, environment)) - elif isinstance(marker, tuple): - lhs, op, rhs = marker - - if isinstance(lhs, Variable): - lhs_value = _get_env(environment, lhs.value) - rhs_value = rhs.value - else: - lhs_value = lhs.value - rhs_value = _get_env(environment, rhs.value) - - groups[-1].append(_eval_op(lhs_value, op, rhs_value)) - else: - assert marker in ["and", "or"] - if marker == "or": - groups.append([]) - - return any(all(item) for item in groups) - - -def format_full_version(info): - version = '{0.major}.{0.minor}.{0.micro}'.format(info) - kind = info.releaselevel - if kind != 'final': - version += kind[0] + str(info.serial) - return version - - -def default_environment(): - if hasattr(sys, 'implementation'): - iver = format_full_version(sys.implementation.version) - implementation_name = sys.implementation.name - else: - iver = '0' - implementation_name = '' - - return { - "implementation_name": implementation_name, - "implementation_version": iver, - "os_name": os.name, - "platform_machine": platform.machine(), - "platform_release": platform.release(), - "platform_system": platform.system(), - "platform_version": platform.version(), - "python_full_version": platform.python_version(), - "platform_python_implementation": platform.python_implementation(), - "python_version": platform.python_version()[:3], - "sys_platform": sys.platform, - } - - -class Marker(object): - - def __init__(self, marker): - try: - self._markers = _coerce_parse_result(MARKER.parseString(marker)) - except ParseException as e: - err_str = "Invalid marker: {0!r}, parse error at {1!r}".format( - marker, marker[e.loc:e.loc + 8]) - raise InvalidMarker(err_str) - - def __str__(self): - return _format_marker(self._markers) - - def __repr__(self): - return "<Marker({0!r})>".format(str(self)) - - def evaluate(self, environment=None): - """Evaluate a marker. - - Return the boolean from evaluating the given marker against the - environment. environment is an optional argument to override all or - part of the determined environment. - - The environment is determined from the current Python process. - """ - current_environment = default_environment() - if environment is not None: - current_environment.update(environment) - - return _evaluate_markers(self._markers, current_environment) diff --git a/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyc b/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyc deleted file mode 100644 index 97e532a6..00000000 Binary files a/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/requirements.py b/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/requirements.py deleted file mode 100644 index 0c8c4a38..00000000 --- a/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/requirements.py +++ /dev/null @@ -1,127 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -import string -import re - -from pkg_resources.extern.pyparsing import stringStart, stringEnd, originalTextFor, ParseException -from pkg_resources.extern.pyparsing import ZeroOrMore, Word, Optional, Regex, Combine -from pkg_resources.extern.pyparsing import Literal as L # noqa -from pkg_resources.extern.six.moves.urllib import parse as urlparse - -from .markers import MARKER_EXPR, Marker -from .specifiers import LegacySpecifier, Specifier, SpecifierSet - - -class InvalidRequirement(ValueError): - """ - An invalid requirement was found, users should refer to PEP 508. - """ - - -ALPHANUM = Word(string.ascii_letters + string.digits) - -LBRACKET = L("[").suppress() -RBRACKET = L("]").suppress() -LPAREN = L("(").suppress() -RPAREN = L(")").suppress() -COMMA = L(",").suppress() -SEMICOLON = L(";").suppress() -AT = L("@").suppress() - -PUNCTUATION = Word("-_.") -IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM) -IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END)) - -NAME = IDENTIFIER("name") -EXTRA = IDENTIFIER - -URI = Regex(r'[^ ]+')("url") -URL = (AT + URI) - -EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA) -EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras") - -VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE) -VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE) - -VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY -VERSION_MANY = Combine(VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), - joinString=",", adjacent=False)("_raw_spec") -_VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY)) -_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or '') - -VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier") -VERSION_SPEC.setParseAction(lambda s, l, t: t[1]) - -MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker") -MARKER_EXPR.setParseAction( - lambda s, l, t: Marker(s[t._original_start:t._original_end]) -) -MARKER_SEPERATOR = SEMICOLON -MARKER = MARKER_SEPERATOR + MARKER_EXPR - -VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER) -URL_AND_MARKER = URL + Optional(MARKER) - -NAMED_REQUIREMENT = \ - NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER) - -REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd - - -class Requirement(object): - """Parse a requirement. - - Parse a given requirement string into its parts, such as name, specifier, - URL, and extras. Raises InvalidRequirement on a badly-formed requirement - string. - """ - - # TODO: Can we test whether something is contained within a requirement? - # If so how do we do that? Do we need to test against the _name_ of - # the thing as well as the version? What about the markers? - # TODO: Can we normalize the name and extra name? - - def __init__(self, requirement_string): - try: - req = REQUIREMENT.parseString(requirement_string) - except ParseException as e: - raise InvalidRequirement( - "Invalid requirement, parse error at \"{0!r}\"".format( - requirement_string[e.loc:e.loc + 8])) - - self.name = req.name - if req.url: - parsed_url = urlparse.urlparse(req.url) - if not (parsed_url.scheme and parsed_url.netloc) or ( - not parsed_url.scheme and not parsed_url.netloc): - raise InvalidRequirement("Invalid URL given") - self.url = req.url - else: - self.url = None - self.extras = set(req.extras.asList() if req.extras else []) - self.specifier = SpecifierSet(req.specifier) - self.marker = req.marker if req.marker else None - - def __str__(self): - parts = [self.name] - - if self.extras: - parts.append("[{0}]".format(",".join(sorted(self.extras)))) - - if self.specifier: - parts.append(str(self.specifier)) - - if self.url: - parts.append("@ {0}".format(self.url)) - - if self.marker: - parts.append("; {0}".format(self.marker)) - - return "".join(parts) - - def __repr__(self): - return "<Requirement({0!r})>".format(str(self)) diff --git a/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/requirements.pyc b/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/requirements.pyc deleted file mode 100644 index a4096699..00000000 Binary files a/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/requirements.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.py b/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.py deleted file mode 100644 index 7f5a76cf..00000000 --- a/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.py +++ /dev/null @@ -1,774 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -import abc -import functools -import itertools -import re - -from ._compat import string_types, with_metaclass -from .version import Version, LegacyVersion, parse - - -class InvalidSpecifier(ValueError): - """ - An invalid specifier was found, users should refer to PEP 440. - """ - - -class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): - - @abc.abstractmethod - def __str__(self): - """ - Returns the str representation of this Specifier like object. This - should be representative of the Specifier itself. - """ - - @abc.abstractmethod - def __hash__(self): - """ - Returns a hash value for this Specifier like object. - """ - - @abc.abstractmethod - def __eq__(self, other): - """ - Returns a boolean representing whether or not the two Specifier like - objects are equal. - """ - - @abc.abstractmethod - def __ne__(self, other): - """ - Returns a boolean representing whether or not the two Specifier like - objects are not equal. - """ - - @abc.abstractproperty - def prereleases(self): - """ - Returns whether or not pre-releases as a whole are allowed by this - specifier. - """ - - @prereleases.setter - def prereleases(self, value): - """ - Sets whether or not pre-releases as a whole are allowed by this - specifier. - """ - - @abc.abstractmethod - def contains(self, item, prereleases=None): - """ - Determines if the given item is contained within this specifier. - """ - - @abc.abstractmethod - def filter(self, iterable, prereleases=None): - """ - Takes an iterable of items and filters them so that only items which - are contained within this specifier are allowed in it. - """ - - -class _IndividualSpecifier(BaseSpecifier): - - _operators = {} - - def __init__(self, spec="", prereleases=None): - match = self._regex.search(spec) - if not match: - raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec)) - - self._spec = ( - match.group("operator").strip(), - match.group("version").strip(), - ) - - # Store whether or not this Specifier should accept prereleases - self._prereleases = prereleases - - def __repr__(self): - pre = ( - ", prereleases={0!r}".format(self.prereleases) - if self._prereleases is not None - else "" - ) - - return "<{0}({1!r}{2})>".format( - self.__class__.__name__, - str(self), - pre, - ) - - def __str__(self): - return "{0}{1}".format(*self._spec) - - def __hash__(self): - return hash(self._spec) - - def __eq__(self, other): - if isinstance(other, string_types): - try: - other = self.__class__(other) - except InvalidSpecifier: - return NotImplemented - elif not isinstance(other, self.__class__): - return NotImplemented - - return self._spec == other._spec - - def __ne__(self, other): - if isinstance(other, string_types): - try: - other = self.__class__(other) - except InvalidSpecifier: - return NotImplemented - elif not isinstance(other, self.__class__): - return NotImplemented - - return self._spec != other._spec - - def _get_operator(self, op): - return getattr(self, "_compare_{0}".format(self._operators[op])) - - def _coerce_version(self, version): - if not isinstance(version, (LegacyVersion, Version)): - version = parse(version) - return version - - @property - def operator(self): - return self._spec[0] - - @property - def version(self): - return self._spec[1] - - @property - def prereleases(self): - return self._prereleases - - @prereleases.setter - def prereleases(self, value): - self._prereleases = value - - def __contains__(self, item): - return self.contains(item) - - def contains(self, item, prereleases=None): - # Determine if prereleases are to be allowed or not. - if prereleases is None: - prereleases = self.prereleases - - # Normalize item to a Version or LegacyVersion, this allows us to have - # a shortcut for ``"2.0" in Specifier(">=2") - item = self._coerce_version(item) - - # Determine if we should be supporting prereleases in this specifier - # or not, if we do not support prereleases than we can short circuit - # logic if this version is a prereleases. - if item.is_prerelease and not prereleases: - return False - - # Actually do the comparison to determine if this item is contained - # within this Specifier or not. - return self._get_operator(self.operator)(item, self.version) - - def filter(self, iterable, prereleases=None): - yielded = False - found_prereleases = [] - - kw = {"prereleases": prereleases if prereleases is not None else True} - - # Attempt to iterate over all the values in the iterable and if any of - # them match, yield them. - for version in iterable: - parsed_version = self._coerce_version(version) - - if self.contains(parsed_version, **kw): - # If our version is a prerelease, and we were not set to allow - # prereleases, then we'll store it for later incase nothing - # else matches this specifier. - if (parsed_version.is_prerelease and not - (prereleases or self.prereleases)): - found_prereleases.append(version) - # Either this is not a prerelease, or we should have been - # accepting prereleases from the begining. - else: - yielded = True - yield version - - # Now that we've iterated over everything, determine if we've yielded - # any values, and if we have not and we have any prereleases stored up - # then we will go ahead and yield the prereleases. - if not yielded and found_prereleases: - for version in found_prereleases: - yield version - - -class LegacySpecifier(_IndividualSpecifier): - - _regex_str = ( - r""" - (?P<operator>(==|!=|<=|>=|<|>)) - \s* - (?P<version> - [^,;\s)]* # Since this is a "legacy" specifier, and the version - # string can be just about anything, we match everything - # except for whitespace, a semi-colon for marker support, - # a closing paren since versions can be enclosed in - # them, and a comma since it's a version separator. - ) - """ - ) - - _regex = re.compile( - r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) - - _operators = { - "==": "equal", - "!=": "not_equal", - "<=": "less_than_equal", - ">=": "greater_than_equal", - "<": "less_than", - ">": "greater_than", - } - - def _coerce_version(self, version): - if not isinstance(version, LegacyVersion): - version = LegacyVersion(str(version)) - return version - - def _compare_equal(self, prospective, spec): - return prospective == self._coerce_version(spec) - - def _compare_not_equal(self, prospective, spec): - return prospective != self._coerce_version(spec) - - def _compare_less_than_equal(self, prospective, spec): - return prospective <= self._coerce_version(spec) - - def _compare_greater_than_equal(self, prospective, spec): - return prospective >= self._coerce_version(spec) - - def _compare_less_than(self, prospective, spec): - return prospective < self._coerce_version(spec) - - def _compare_greater_than(self, prospective, spec): - return prospective > self._coerce_version(spec) - - -def _require_version_compare(fn): - @functools.wraps(fn) - def wrapped(self, prospective, spec): - if not isinstance(prospective, Version): - return False - return fn(self, prospective, spec) - return wrapped - - -class Specifier(_IndividualSpecifier): - - _regex_str = ( - r""" - (?P<operator>(~=|==|!=|<=|>=|<|>|===)) - (?P<version> - (?: - # The identity operators allow for an escape hatch that will - # do an exact string match of the version you wish to install. - # This will not be parsed by PEP 440 and we cannot determine - # any semantic meaning from it. This operator is discouraged - # but included entirely as an escape hatch. - (?<====) # Only match for the identity operator - \s* - [^\s]* # We just match everything, except for whitespace - # since we are only testing for strict identity. - ) - | - (?: - # The (non)equality operators allow for wild card and local - # versions to be specified so we have to define these two - # operators separately to enable that. - (?<===|!=) # Only match for equals and not equals - - \s* - v? - (?:[0-9]+!)? # epoch - [0-9]+(?:\.[0-9]+)* # release - (?: # pre release - [-_\.]? - (a|b|c|rc|alpha|beta|pre|preview) - [-_\.]? - [0-9]* - )? - (?: # post release - (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) - )? - - # You cannot use a wild card and a dev or local version - # together so group them with a | and make them optional. - (?: - (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release - (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local - | - \.\* # Wild card syntax of .* - )? - ) - | - (?: - # The compatible operator requires at least two digits in the - # release segment. - (?<=~=) # Only match for the compatible operator - - \s* - v? - (?:[0-9]+!)? # epoch - [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) - (?: # pre release - [-_\.]? - (a|b|c|rc|alpha|beta|pre|preview) - [-_\.]? - [0-9]* - )? - (?: # post release - (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) - )? - (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release - ) - | - (?: - # All other operators only allow a sub set of what the - # (non)equality operators do. Specifically they do not allow - # local versions to be specified nor do they allow the prefix - # matching wild cards. - (?<!==|!=|~=) # We have special cases for these - # operators so we want to make sure they - # don't match here. - - \s* - v? - (?:[0-9]+!)? # epoch - [0-9]+(?:\.[0-9]+)* # release - (?: # pre release - [-_\.]? - (a|b|c|rc|alpha|beta|pre|preview) - [-_\.]? - [0-9]* - )? - (?: # post release - (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) - )? - (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release - ) - ) - """ - ) - - _regex = re.compile( - r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) - - _operators = { - "~=": "compatible", - "==": "equal", - "!=": "not_equal", - "<=": "less_than_equal", - ">=": "greater_than_equal", - "<": "less_than", - ">": "greater_than", - "===": "arbitrary", - } - - @_require_version_compare - def _compare_compatible(self, prospective, spec): - # Compatible releases have an equivalent combination of >= and ==. That - # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to - # implement this in terms of the other specifiers instead of - # implementing it ourselves. The only thing we need to do is construct - # the other specifiers. - - # We want everything but the last item in the version, but we want to - # ignore post and dev releases and we want to treat the pre-release as - # it's own separate segment. - prefix = ".".join( - list( - itertools.takewhile( - lambda x: (not x.startswith("post") and not - x.startswith("dev")), - _version_split(spec), - ) - )[:-1] - ) - - # Add the prefix notation to the end of our string - prefix += ".*" - - return (self._get_operator(">=")(prospective, spec) and - self._get_operator("==")(prospective, prefix)) - - @_require_version_compare - def _compare_equal(self, prospective, spec): - # We need special logic to handle prefix matching - if spec.endswith(".*"): - # In the case of prefix matching we want to ignore local segment. - prospective = Version(prospective.public) - # Split the spec out by dots, and pretend that there is an implicit - # dot in between a release segment and a pre-release segment. - spec = _version_split(spec[:-2]) # Remove the trailing .* - - # Split the prospective version out by dots, and pretend that there - # is an implicit dot in between a release segment and a pre-release - # segment. - prospective = _version_split(str(prospective)) - - # Shorten the prospective version to be the same length as the spec - # so that we can determine if the specifier is a prefix of the - # prospective version or not. - prospective = prospective[:len(spec)] - - # Pad out our two sides with zeros so that they both equal the same - # length. - spec, prospective = _pad_version(spec, prospective) - else: - # Convert our spec string into a Version - spec = Version(spec) - - # If the specifier does not have a local segment, then we want to - # act as if the prospective version also does not have a local - # segment. - if not spec.local: - prospective = Version(prospective.public) - - return prospective == spec - - @_require_version_compare - def _compare_not_equal(self, prospective, spec): - return not self._compare_equal(prospective, spec) - - @_require_version_compare - def _compare_less_than_equal(self, prospective, spec): - return prospective <= Version(spec) - - @_require_version_compare - def _compare_greater_than_equal(self, prospective, spec): - return prospective >= Version(spec) - - @_require_version_compare - def _compare_less_than(self, prospective, spec): - # Convert our spec to a Version instance, since we'll want to work with - # it as a version. - spec = Version(spec) - - # Check to see if the prospective version is less than the spec - # version. If it's not we can short circuit and just return False now - # instead of doing extra unneeded work. - if not prospective < spec: - return False - - # This special case is here so that, unless the specifier itself - # includes is a pre-release version, that we do not accept pre-release - # versions for the version mentioned in the specifier (e.g. <3.1 should - # not match 3.1.dev0, but should match 3.0.dev0). - if not spec.is_prerelease and prospective.is_prerelease: - if Version(prospective.base_version) == Version(spec.base_version): - return False - - # If we've gotten to here, it means that prospective version is both - # less than the spec version *and* it's not a pre-release of the same - # version in the spec. - return True - - @_require_version_compare - def _compare_greater_than(self, prospective, spec): - # Convert our spec to a Version instance, since we'll want to work with - # it as a version. - spec = Version(spec) - - # Check to see if the prospective version is greater than the spec - # version. If it's not we can short circuit and just return False now - # instead of doing extra unneeded work. - if not prospective > spec: - return False - - # This special case is here so that, unless the specifier itself - # includes is a post-release version, that we do not accept - # post-release versions for the version mentioned in the specifier - # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). - if not spec.is_postrelease and prospective.is_postrelease: - if Version(prospective.base_version) == Version(spec.base_version): - return False - - # Ensure that we do not allow a local version of the version mentioned - # in the specifier, which is techincally greater than, to match. - if prospective.local is not None: - if Version(prospective.base_version) == Version(spec.base_version): - return False - - # If we've gotten to here, it means that prospective version is both - # greater than the spec version *and* it's not a pre-release of the - # same version in the spec. - return True - - def _compare_arbitrary(self, prospective, spec): - return str(prospective).lower() == str(spec).lower() - - @property - def prereleases(self): - # If there is an explicit prereleases set for this, then we'll just - # blindly use that. - if self._prereleases is not None: - return self._prereleases - - # Look at all of our specifiers and determine if they are inclusive - # operators, and if they are if they are including an explicit - # prerelease. - operator, version = self._spec - if operator in ["==", ">=", "<=", "~=", "==="]: - # The == specifier can include a trailing .*, if it does we - # want to remove before parsing. - if operator == "==" and version.endswith(".*"): - version = version[:-2] - - # Parse the version, and if it is a pre-release than this - # specifier allows pre-releases. - if parse(version).is_prerelease: - return True - - return False - - @prereleases.setter - def prereleases(self, value): - self._prereleases = value - - -_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") - - -def _version_split(version): - result = [] - for item in version.split("."): - match = _prefix_regex.search(item) - if match: - result.extend(match.groups()) - else: - result.append(item) - return result - - -def _pad_version(left, right): - left_split, right_split = [], [] - - # Get the release segment of our versions - left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) - right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) - - # Get the rest of our versions - left_split.append(left[len(left_split[0]):]) - right_split.append(right[len(right_split[0]):]) - - # Insert our padding - left_split.insert( - 1, - ["0"] * max(0, len(right_split[0]) - len(left_split[0])), - ) - right_split.insert( - 1, - ["0"] * max(0, len(left_split[0]) - len(right_split[0])), - ) - - return ( - list(itertools.chain(*left_split)), - list(itertools.chain(*right_split)), - ) - - -class SpecifierSet(BaseSpecifier): - - def __init__(self, specifiers="", prereleases=None): - # Split on , to break each indidivual specifier into it's own item, and - # strip each item to remove leading/trailing whitespace. - specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] - - # Parsed each individual specifier, attempting first to make it a - # Specifier and falling back to a LegacySpecifier. - parsed = set() - for specifier in specifiers: - try: - parsed.add(Specifier(specifier)) - except InvalidSpecifier: - parsed.add(LegacySpecifier(specifier)) - - # Turn our parsed specifiers into a frozen set and save them for later. - self._specs = frozenset(parsed) - - # Store our prereleases value so we can use it later to determine if - # we accept prereleases or not. - self._prereleases = prereleases - - def __repr__(self): - pre = ( - ", prereleases={0!r}".format(self.prereleases) - if self._prereleases is not None - else "" - ) - - return "<SpecifierSet({0!r}{1})>".format(str(self), pre) - - def __str__(self): - return ",".join(sorted(str(s) for s in self._specs)) - - def __hash__(self): - return hash(self._specs) - - def __and__(self, other): - if isinstance(other, string_types): - other = SpecifierSet(other) - elif not isinstance(other, SpecifierSet): - return NotImplemented - - specifier = SpecifierSet() - specifier._specs = frozenset(self._specs | other._specs) - - if self._prereleases is None and other._prereleases is not None: - specifier._prereleases = other._prereleases - elif self._prereleases is not None and other._prereleases is None: - specifier._prereleases = self._prereleases - elif self._prereleases == other._prereleases: - specifier._prereleases = self._prereleases - else: - raise ValueError( - "Cannot combine SpecifierSets with True and False prerelease " - "overrides." - ) - - return specifier - - def __eq__(self, other): - if isinstance(other, string_types): - other = SpecifierSet(other) - elif isinstance(other, _IndividualSpecifier): - other = SpecifierSet(str(other)) - elif not isinstance(other, SpecifierSet): - return NotImplemented - - return self._specs == other._specs - - def __ne__(self, other): - if isinstance(other, string_types): - other = SpecifierSet(other) - elif isinstance(other, _IndividualSpecifier): - other = SpecifierSet(str(other)) - elif not isinstance(other, SpecifierSet): - return NotImplemented - - return self._specs != other._specs - - def __len__(self): - return len(self._specs) - - def __iter__(self): - return iter(self._specs) - - @property - def prereleases(self): - # If we have been given an explicit prerelease modifier, then we'll - # pass that through here. - if self._prereleases is not None: - return self._prereleases - - # If we don't have any specifiers, and we don't have a forced value, - # then we'll just return None since we don't know if this should have - # pre-releases or not. - if not self._specs: - return None - - # Otherwise we'll see if any of the given specifiers accept - # prereleases, if any of them do we'll return True, otherwise False. - return any(s.prereleases for s in self._specs) - - @prereleases.setter - def prereleases(self, value): - self._prereleases = value - - def __contains__(self, item): - return self.contains(item) - - def contains(self, item, prereleases=None): - # Ensure that our item is a Version or LegacyVersion instance. - if not isinstance(item, (LegacyVersion, Version)): - item = parse(item) - - # Determine if we're forcing a prerelease or not, if we're not forcing - # one for this particular filter call, then we'll use whatever the - # SpecifierSet thinks for whether or not we should support prereleases. - if prereleases is None: - prereleases = self.prereleases - - # We can determine if we're going to allow pre-releases by looking to - # see if any of the underlying items supports them. If none of them do - # and this item is a pre-release then we do not allow it and we can - # short circuit that here. - # Note: This means that 1.0.dev1 would not be contained in something - # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0 - if not prereleases and item.is_prerelease: - return False - - # We simply dispatch to the underlying specs here to make sure that the - # given version is contained within all of them. - # Note: This use of all() here means that an empty set of specifiers - # will always return True, this is an explicit design decision. - return all( - s.contains(item, prereleases=prereleases) - for s in self._specs - ) - - def filter(self, iterable, prereleases=None): - # Determine if we're forcing a prerelease or not, if we're not forcing - # one for this particular filter call, then we'll use whatever the - # SpecifierSet thinks for whether or not we should support prereleases. - if prereleases is None: - prereleases = self.prereleases - - # If we have any specifiers, then we want to wrap our iterable in the - # filter method for each one, this will act as a logical AND amongst - # each specifier. - if self._specs: - for spec in self._specs: - iterable = spec.filter(iterable, prereleases=bool(prereleases)) - return iterable - # If we do not have any specifiers, then we need to have a rough filter - # which will filter out any pre-releases, unless there are no final - # releases, and which will filter out LegacyVersion in general. - else: - filtered = [] - found_prereleases = [] - - for item in iterable: - # Ensure that we some kind of Version class for this item. - if not isinstance(item, (LegacyVersion, Version)): - parsed_version = parse(item) - else: - parsed_version = item - - # Filter out any item which is parsed as a LegacyVersion - if isinstance(parsed_version, LegacyVersion): - continue - - # Store any item which is a pre-release for later unless we've - # already found a final version or we are accepting prereleases - if parsed_version.is_prerelease and not prereleases: - if not filtered: - found_prereleases.append(item) - else: - filtered.append(item) - - # If we've found no items except for pre-releases, then we'll go - # ahead and use the pre-releases - if not filtered and found_prereleases and prereleases is None: - return found_prereleases - - return filtered diff --git a/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyc b/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyc deleted file mode 100644 index b838861d..00000000 Binary files a/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/utils.py b/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/utils.py deleted file mode 100644 index 942387ce..00000000 --- a/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/utils.py +++ /dev/null @@ -1,14 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -import re - - -_canonicalize_regex = re.compile(r"[-_.]+") - - -def canonicalize_name(name): - # This is taken from PEP 503. - return _canonicalize_regex.sub("-", name).lower() diff --git a/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/utils.pyc b/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/utils.pyc deleted file mode 100644 index 9f55ae27..00000000 Binary files a/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/utils.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.py b/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.py deleted file mode 100644 index 83b5ee8c..00000000 --- a/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.py +++ /dev/null @@ -1,393 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -import collections -import itertools -import re - -from ._structures import Infinity - - -__all__ = [ - "parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN" -] - - -_Version = collections.namedtuple( - "_Version", - ["epoch", "release", "dev", "pre", "post", "local"], -) - - -def parse(version): - """ - Parse the given version string and return either a :class:`Version` object - or a :class:`LegacyVersion` object depending on if the given version is - a valid PEP 440 version or a legacy version. - """ - try: - return Version(version) - except InvalidVersion: - return LegacyVersion(version) - - -class InvalidVersion(ValueError): - """ - An invalid version was found, users should refer to PEP 440. - """ - - -class _BaseVersion(object): - - def __hash__(self): - return hash(self._key) - - def __lt__(self, other): - return self._compare(other, lambda s, o: s < o) - - def __le__(self, other): - return self._compare(other, lambda s, o: s <= o) - - def __eq__(self, other): - return self._compare(other, lambda s, o: s == o) - - def __ge__(self, other): - return self._compare(other, lambda s, o: s >= o) - - def __gt__(self, other): - return self._compare(other, lambda s, o: s > o) - - def __ne__(self, other): - return self._compare(other, lambda s, o: s != o) - - def _compare(self, other, method): - if not isinstance(other, _BaseVersion): - return NotImplemented - - return method(self._key, other._key) - - -class LegacyVersion(_BaseVersion): - - def __init__(self, version): - self._version = str(version) - self._key = _legacy_cmpkey(self._version) - - def __str__(self): - return self._version - - def __repr__(self): - return "<LegacyVersion({0})>".format(repr(str(self))) - - @property - def public(self): - return self._version - - @property - def base_version(self): - return self._version - - @property - def local(self): - return None - - @property - def is_prerelease(self): - return False - - @property - def is_postrelease(self): - return False - - -_legacy_version_component_re = re.compile( - r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE, -) - -_legacy_version_replacement_map = { - "pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@", -} - - -def _parse_version_parts(s): - for part in _legacy_version_component_re.split(s): - part = _legacy_version_replacement_map.get(part, part) - - if not part or part == ".": - continue - - if part[:1] in "0123456789": - # pad for numeric comparison - yield part.zfill(8) - else: - yield "*" + part - - # ensure that alpha/beta/candidate are before final - yield "*final" - - -def _legacy_cmpkey(version): - # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch - # greater than or equal to 0. This will effectively put the LegacyVersion, - # which uses the defacto standard originally implemented by setuptools, - # as before all PEP 440 versions. - epoch = -1 - - # This scheme is taken from pkg_resources.parse_version setuptools prior to - # it's adoption of the packaging library. - parts = [] - for part in _parse_version_parts(version.lower()): - if part.startswith("*"): - # remove "-" before a prerelease tag - if part < "*final": - while parts and parts[-1] == "*final-": - parts.pop() - - # remove trailing zeros from each series of numeric parts - while parts and parts[-1] == "00000000": - parts.pop() - - parts.append(part) - parts = tuple(parts) - - return epoch, parts - -# Deliberately not anchored to the start and end of the string, to make it -# easier for 3rd party code to reuse -VERSION_PATTERN = r""" - v? - (?: - (?:(?P<epoch>[0-9]+)!)? # epoch - (?P<release>[0-9]+(?:\.[0-9]+)*) # release segment - (?P<pre> # pre-release - [-_\.]? - (?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview)) - [-_\.]? - (?P<pre_n>[0-9]+)? - )? - (?P<post> # post release - (?:-(?P<post_n1>[0-9]+)) - | - (?: - [-_\.]? - (?P<post_l>post|rev|r) - [-_\.]? - (?P<post_n2>[0-9]+)? - ) - )? - (?P<dev> # dev release - [-_\.]? - (?P<dev_l>dev) - [-_\.]? - (?P<dev_n>[0-9]+)? - )? - ) - (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version -""" - - -class Version(_BaseVersion): - - _regex = re.compile( - r"^\s*" + VERSION_PATTERN + r"\s*$", - re.VERBOSE | re.IGNORECASE, - ) - - def __init__(self, version): - # Validate the version and parse it into pieces - match = self._regex.search(version) - if not match: - raise InvalidVersion("Invalid version: '{0}'".format(version)) - - # Store the parsed out pieces of the version - self._version = _Version( - epoch=int(match.group("epoch")) if match.group("epoch") else 0, - release=tuple(int(i) for i in match.group("release").split(".")), - pre=_parse_letter_version( - match.group("pre_l"), - match.group("pre_n"), - ), - post=_parse_letter_version( - match.group("post_l"), - match.group("post_n1") or match.group("post_n2"), - ), - dev=_parse_letter_version( - match.group("dev_l"), - match.group("dev_n"), - ), - local=_parse_local_version(match.group("local")), - ) - - # Generate a key which will be used for sorting - self._key = _cmpkey( - self._version.epoch, - self._version.release, - self._version.pre, - self._version.post, - self._version.dev, - self._version.local, - ) - - def __repr__(self): - return "<Version({0})>".format(repr(str(self))) - - def __str__(self): - parts = [] - - # Epoch - if self._version.epoch != 0: - parts.append("{0}!".format(self._version.epoch)) - - # Release segment - parts.append(".".join(str(x) for x in self._version.release)) - - # Pre-release - if self._version.pre is not None: - parts.append("".join(str(x) for x in self._version.pre)) - - # Post-release - if self._version.post is not None: - parts.append(".post{0}".format(self._version.post[1])) - - # Development release - if self._version.dev is not None: - parts.append(".dev{0}".format(self._version.dev[1])) - - # Local version segment - if self._version.local is not None: - parts.append( - "+{0}".format(".".join(str(x) for x in self._version.local)) - ) - - return "".join(parts) - - @property - def public(self): - return str(self).split("+", 1)[0] - - @property - def base_version(self): - parts = [] - - # Epoch - if self._version.epoch != 0: - parts.append("{0}!".format(self._version.epoch)) - - # Release segment - parts.append(".".join(str(x) for x in self._version.release)) - - return "".join(parts) - - @property - def local(self): - version_string = str(self) - if "+" in version_string: - return version_string.split("+", 1)[1] - - @property - def is_prerelease(self): - return bool(self._version.dev or self._version.pre) - - @property - def is_postrelease(self): - return bool(self._version.post) - - -def _parse_letter_version(letter, number): - if letter: - # We consider there to be an implicit 0 in a pre-release if there is - # not a numeral associated with it. - if number is None: - number = 0 - - # We normalize any letters to their lower case form - letter = letter.lower() - - # We consider some words to be alternate spellings of other words and - # in those cases we want to normalize the spellings to our preferred - # spelling. - if letter == "alpha": - letter = "a" - elif letter == "beta": - letter = "b" - elif letter in ["c", "pre", "preview"]: - letter = "rc" - elif letter in ["rev", "r"]: - letter = "post" - - return letter, int(number) - if not letter and number: - # We assume if we are given a number, but we are not given a letter - # then this is using the implicit post release syntax (e.g. 1.0-1) - letter = "post" - - return letter, int(number) - - -_local_version_seperators = re.compile(r"[\._-]") - - -def _parse_local_version(local): - """ - Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve"). - """ - if local is not None: - return tuple( - part.lower() if not part.isdigit() else int(part) - for part in _local_version_seperators.split(local) - ) - - -def _cmpkey(epoch, release, pre, post, dev, local): - # When we compare a release version, we want to compare it with all of the - # trailing zeros removed. So we'll use a reverse the list, drop all the now - # leading zeros until we come to something non zero, then take the rest - # re-reverse it back into the correct order and make it a tuple and use - # that for our sorting key. - release = tuple( - reversed(list( - itertools.dropwhile( - lambda x: x == 0, - reversed(release), - ) - )) - ) - - # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0. - # We'll do this by abusing the pre segment, but we _only_ want to do this - # if there is not a pre or a post segment. If we have one of those then - # the normal sorting rules will handle this case correctly. - if pre is None and post is None and dev is not None: - pre = -Infinity - # Versions without a pre-release (except as noted above) should sort after - # those with one. - elif pre is None: - pre = Infinity - - # Versions without a post segment should sort before those with one. - if post is None: - post = -Infinity - - # Versions without a development segment should sort after those with one. - if dev is None: - dev = Infinity - - if local is None: - # Versions without a local segment should sort before those with one. - local = -Infinity - else: - # Versions with a local segment need that segment parsed to implement - # the sorting rules in PEP440. - # - Alpha numeric segments sort before numeric segments - # - Alpha numeric segments sort lexicographically - # - Numeric segments sort numerically - # - Shorter versions sort before longer versions when the prefixes - # match exactly - local = tuple( - (i, "") if isinstance(i, int) else (-Infinity, i) - for i in local - ) - - return epoch, release, pre, post, dev, local diff --git a/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyc b/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyc deleted file mode 100644 index d755b424..00000000 Binary files a/env/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.py b/env/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.py deleted file mode 100644 index cf75e1e5..00000000 --- a/env/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.py +++ /dev/null @@ -1,5742 +0,0 @@ -# module pyparsing.py -# -# Copyright (c) 2003-2018 Paul T. McGuire -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be -# included in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__doc__ = \ -""" -pyparsing module - Classes and methods to define and execute parsing grammars -============================================================================= - -The pyparsing module is an alternative approach to creating and executing simple grammars, -vs. the traditional lex/yacc approach, or the use of regular expressions. With pyparsing, you -don't need to learn a new syntax for defining grammars or matching expressions - the parsing module -provides a library of classes that you use to construct the grammar directly in Python. - -Here is a program to parse "Hello, World!" (or any greeting of the form -C{"<salutation>, <addressee>!"}), built up using L{Word}, L{Literal}, and L{And} elements -(L{'+'<ParserElement.__add__>} operator gives L{And} expressions, strings are auto-converted to -L{Literal} expressions):: - - from pyparsing import Word, alphas - - # define grammar of a greeting - greet = Word(alphas) + "," + Word(alphas) + "!" - - hello = "Hello, World!" - print (hello, "->", greet.parseString(hello)) - -The program outputs the following:: - - Hello, World! -> ['Hello', ',', 'World', '!'] - -The Python representation of the grammar is quite readable, owing to the self-explanatory -class names, and the use of '+', '|' and '^' operators. - -The L{ParseResults} object returned from L{ParserElement.parseString<ParserElement.parseString>} can be accessed as a nested list, a dictionary, or an -object with named attributes. - -The pyparsing module handles some of the problems that are typically vexing when writing text parsers: - - extra or missing whitespace (the above program will also handle "Hello,World!", "Hello , World !", etc.) - - quoted strings - - embedded comments - - -Getting Started - ------------------ -Visit the classes L{ParserElement} and L{ParseResults} to see the base classes that most other pyparsing -classes inherit from. Use the docstrings for examples of how to: - - construct literal match expressions from L{Literal} and L{CaselessLiteral} classes - - construct character word-group expressions using the L{Word} class - - see how to create repetitive expressions using L{ZeroOrMore} and L{OneOrMore} classes - - use L{'+'<And>}, L{'|'<MatchFirst>}, L{'^'<Or>}, and L{'&'<Each>} operators to combine simple expressions into more complex ones - - associate names with your parsed results using L{ParserElement.setResultsName} - - find some helpful expression short-cuts like L{delimitedList} and L{oneOf} - - find more useful common expressions in the L{pyparsing_common} namespace class -""" - -__version__ = "2.2.1" -__versionTime__ = "18 Sep 2018 00:49 UTC" -__author__ = "Paul McGuire <ptmcg@users.sourceforge.net>" - -import string -from weakref import ref as wkref -import copy -import sys -import warnings -import re -import sre_constants -import collections -import pprint -import traceback -import types -from datetime import datetime - -try: - from _thread import RLock -except ImportError: - from threading import RLock - -try: - # Python 3 - from collections.abc import Iterable - from collections.abc import MutableMapping -except ImportError: - # Python 2.7 - from collections import Iterable - from collections import MutableMapping - -try: - from collections import OrderedDict as _OrderedDict -except ImportError: - try: - from ordereddict import OrderedDict as _OrderedDict - except ImportError: - _OrderedDict = None - -#~ sys.stderr.write( "testing pyparsing module, version %s, %s\n" % (__version__,__versionTime__ ) ) - -__all__ = [ -'And', 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 'Empty', -'FollowedBy', 'Forward', 'GoToColumn', 'Group', 'Keyword', 'LineEnd', 'LineStart', 'Literal', -'MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or', -'ParseBaseException', 'ParseElementEnhance', 'ParseException', 'ParseExpression', 'ParseFatalException', -'ParseResults', 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException', -'Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter', -'White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore', -'alphanums', 'alphas', 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col', -'commaSeparatedList', 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString', -'dblSlashComment', 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'hexnums', -'htmlComment', 'javaStyleComment', 'line', 'lineEnd', 'lineStart', 'lineno', -'makeHTMLTags', 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral', -'nestedExpr', 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables', -'punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity', -'replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd', -'stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute', -'indentedBlock', 'originalTextFor', 'ungroup', 'infixNotation','locatedExpr', 'withClass', -'CloseMatch', 'tokenMap', 'pyparsing_common', -] - -system_version = tuple(sys.version_info)[:3] -PY_3 = system_version[0] == 3 -if PY_3: - _MAX_INT = sys.maxsize - basestring = str - unichr = chr - _ustr = str - - # build list of single arg builtins, that can be used as parse actions - singleArgBuiltins = [sum, len, sorted, reversed, list, tuple, set, any, all, min, max] - -else: - _MAX_INT = sys.maxint - range = xrange - - def _ustr(obj): - """Drop-in replacement for str(obj) that tries to be Unicode friendly. It first tries - str(obj). If that fails with a UnicodeEncodeError, then it tries unicode(obj). It - then < returns the unicode object | encodes it with the default encoding | ... >. - """ - if isinstance(obj,unicode): - return obj - - try: - # If this works, then _ustr(obj) has the same behaviour as str(obj), so - # it won't break any existing code. - return str(obj) - - except UnicodeEncodeError: - # Else encode it - ret = unicode(obj).encode(sys.getdefaultencoding(), 'xmlcharrefreplace') - xmlcharref = Regex(r'&#\d+;') - xmlcharref.setParseAction(lambda t: '\\u' + hex(int(t[0][2:-1]))[2:]) - return xmlcharref.transformString(ret) - - # build list of single arg builtins, tolerant of Python version, that can be used as parse actions - singleArgBuiltins = [] - import __builtin__ - for fname in "sum len sorted reversed list tuple set any all min max".split(): - try: - singleArgBuiltins.append(getattr(__builtin__,fname)) - except AttributeError: - continue - -_generatorType = type((y for y in range(1))) - -def _xml_escape(data): - """Escape &, <, >, ", ', etc. in a string of data.""" - - # ampersand must be replaced first - from_symbols = '&><"\'' - to_symbols = ('&'+s+';' for s in "amp gt lt quot apos".split()) - for from_,to_ in zip(from_symbols, to_symbols): - data = data.replace(from_, to_) - return data - -class _Constants(object): - pass - -alphas = string.ascii_uppercase + string.ascii_lowercase -nums = "0123456789" -hexnums = nums + "ABCDEFabcdef" -alphanums = alphas + nums -_bslash = chr(92) -printables = "".join(c for c in string.printable if c not in string.whitespace) - -class ParseBaseException(Exception): - """base exception class for all parsing runtime exceptions""" - # Performance tuning: we construct a *lot* of these, so keep this - # constructor as small and fast as possible - def __init__( self, pstr, loc=0, msg=None, elem=None ): - self.loc = loc - if msg is None: - self.msg = pstr - self.pstr = "" - else: - self.msg = msg - self.pstr = pstr - self.parserElement = elem - self.args = (pstr, loc, msg) - - @classmethod - def _from_exception(cls, pe): - """ - internal factory method to simplify creating one type of ParseException - from another - avoids having __init__ signature conflicts among subclasses - """ - return cls(pe.pstr, pe.loc, pe.msg, pe.parserElement) - - def __getattr__( self, aname ): - """supported attributes by name are: - - lineno - returns the line number of the exception text - - col - returns the column number of the exception text - - line - returns the line containing the exception text - """ - if( aname == "lineno" ): - return lineno( self.loc, self.pstr ) - elif( aname in ("col", "column") ): - return col( self.loc, self.pstr ) - elif( aname == "line" ): - return line( self.loc, self.pstr ) - else: - raise AttributeError(aname) - - def __str__( self ): - return "%s (at char %d), (line:%d, col:%d)" % \ - ( self.msg, self.loc, self.lineno, self.column ) - def __repr__( self ): - return _ustr(self) - def markInputline( self, markerString = ">!<" ): - """Extracts the exception line from the input string, and marks - the location of the exception with a special symbol. - """ - line_str = self.line - line_column = self.column - 1 - if markerString: - line_str = "".join((line_str[:line_column], - markerString, line_str[line_column:])) - return line_str.strip() - def __dir__(self): - return "lineno col line".split() + dir(type(self)) - -class ParseException(ParseBaseException): - """ - Exception thrown when parse expressions don't match class; - supported attributes by name are: - - lineno - returns the line number of the exception text - - col - returns the column number of the exception text - - line - returns the line containing the exception text - - Example:: - try: - Word(nums).setName("integer").parseString("ABC") - except ParseException as pe: - print(pe) - print("column: {}".format(pe.col)) - - prints:: - Expected integer (at char 0), (line:1, col:1) - column: 1 - """ - pass - -class ParseFatalException(ParseBaseException): - """user-throwable exception thrown when inconsistent parse content - is found; stops all parsing immediately""" - pass - -class ParseSyntaxException(ParseFatalException): - """just like L{ParseFatalException}, but thrown internally when an - L{ErrorStop<And._ErrorStop>} ('-' operator) indicates that parsing is to stop - immediately because an unbacktrackable syntax error has been found""" - pass - -#~ class ReparseException(ParseBaseException): - #~ """Experimental class - parse actions can raise this exception to cause - #~ pyparsing to reparse the input string: - #~ - with a modified input string, and/or - #~ - with a modified start location - #~ Set the values of the ReparseException in the constructor, and raise the - #~ exception in a parse action to cause pyparsing to use the new string/location. - #~ Setting the values as None causes no change to be made. - #~ """ - #~ def __init_( self, newstring, restartLoc ): - #~ self.newParseText = newstring - #~ self.reparseLoc = restartLoc - -class RecursiveGrammarException(Exception): - """exception thrown by L{ParserElement.validate} if the grammar could be improperly recursive""" - def __init__( self, parseElementList ): - self.parseElementTrace = parseElementList - - def __str__( self ): - return "RecursiveGrammarException: %s" % self.parseElementTrace - -class _ParseResultsWithOffset(object): - def __init__(self,p1,p2): - self.tup = (p1,p2) - def __getitem__(self,i): - return self.tup[i] - def __repr__(self): - return repr(self.tup[0]) - def setOffset(self,i): - self.tup = (self.tup[0],i) - -class ParseResults(object): - """ - Structured parse results, to provide multiple means of access to the parsed data: - - as a list (C{len(results)}) - - by list index (C{results[0], results[1]}, etc.) - - by attribute (C{results.<resultsName>} - see L{ParserElement.setResultsName}) - - Example:: - integer = Word(nums) - date_str = (integer.setResultsName("year") + '/' - + integer.setResultsName("month") + '/' - + integer.setResultsName("day")) - # equivalent form: - # date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - - # parseString returns a ParseResults object - result = date_str.parseString("1999/12/31") - - def test(s, fn=repr): - print("%s -> %s" % (s, fn(eval(s)))) - test("list(result)") - test("result[0]") - test("result['month']") - test("result.day") - test("'month' in result") - test("'minutes' in result") - test("result.dump()", str) - prints:: - list(result) -> ['1999', '/', '12', '/', '31'] - result[0] -> '1999' - result['month'] -> '12' - result.day -> '31' - 'month' in result -> True - 'minutes' in result -> False - result.dump() -> ['1999', '/', '12', '/', '31'] - - day: 31 - - month: 12 - - year: 1999 - """ - def __new__(cls, toklist=None, name=None, asList=True, modal=True ): - if isinstance(toklist, cls): - return toklist - retobj = object.__new__(cls) - retobj.__doinit = True - return retobj - - # Performance tuning: we construct a *lot* of these, so keep this - # constructor as small and fast as possible - def __init__( self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance ): - if self.__doinit: - self.__doinit = False - self.__name = None - self.__parent = None - self.__accumNames = {} - self.__asList = asList - self.__modal = modal - if toklist is None: - toklist = [] - if isinstance(toklist, list): - self.__toklist = toklist[:] - elif isinstance(toklist, _generatorType): - self.__toklist = list(toklist) - else: - self.__toklist = [toklist] - self.__tokdict = dict() - - if name is not None and name: - if not modal: - self.__accumNames[name] = 0 - if isinstance(name,int): - name = _ustr(name) # will always return a str, but use _ustr for consistency - self.__name = name - if not (isinstance(toklist, (type(None), basestring, list)) and toklist in (None,'',[])): - if isinstance(toklist,basestring): - toklist = [ toklist ] - if asList: - if isinstance(toklist,ParseResults): - self[name] = _ParseResultsWithOffset(toklist.copy(),0) - else: - self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]),0) - self[name].__name = name - else: - try: - self[name] = toklist[0] - except (KeyError,TypeError,IndexError): - self[name] = toklist - - def __getitem__( self, i ): - if isinstance( i, (int,slice) ): - return self.__toklist[i] - else: - if i not in self.__accumNames: - return self.__tokdict[i][-1][0] - else: - return ParseResults([ v[0] for v in self.__tokdict[i] ]) - - def __setitem__( self, k, v, isinstance=isinstance ): - if isinstance(v,_ParseResultsWithOffset): - self.__tokdict[k] = self.__tokdict.get(k,list()) + [v] - sub = v[0] - elif isinstance(k,(int,slice)): - self.__toklist[k] = v - sub = v - else: - self.__tokdict[k] = self.__tokdict.get(k,list()) + [_ParseResultsWithOffset(v,0)] - sub = v - if isinstance(sub,ParseResults): - sub.__parent = wkref(self) - - def __delitem__( self, i ): - if isinstance(i,(int,slice)): - mylen = len( self.__toklist ) - del self.__toklist[i] - - # convert int to slice - if isinstance(i, int): - if i < 0: - i += mylen - i = slice(i, i+1) - # get removed indices - removed = list(range(*i.indices(mylen))) - removed.reverse() - # fixup indices in token dictionary - for name,occurrences in self.__tokdict.items(): - for j in removed: - for k, (value, position) in enumerate(occurrences): - occurrences[k] = _ParseResultsWithOffset(value, position - (position > j)) - else: - del self.__tokdict[i] - - def __contains__( self, k ): - return k in self.__tokdict - - def __len__( self ): return len( self.__toklist ) - def __bool__(self): return ( not not self.__toklist ) - __nonzero__ = __bool__ - def __iter__( self ): return iter( self.__toklist ) - def __reversed__( self ): return iter( self.__toklist[::-1] ) - def _iterkeys( self ): - if hasattr(self.__tokdict, "iterkeys"): - return self.__tokdict.iterkeys() - else: - return iter(self.__tokdict) - - def _itervalues( self ): - return (self[k] for k in self._iterkeys()) - - def _iteritems( self ): - return ((k, self[k]) for k in self._iterkeys()) - - if PY_3: - keys = _iterkeys - """Returns an iterator of all named result keys (Python 3.x only).""" - - values = _itervalues - """Returns an iterator of all named result values (Python 3.x only).""" - - items = _iteritems - """Returns an iterator of all named result key-value tuples (Python 3.x only).""" - - else: - iterkeys = _iterkeys - """Returns an iterator of all named result keys (Python 2.x only).""" - - itervalues = _itervalues - """Returns an iterator of all named result values (Python 2.x only).""" - - iteritems = _iteritems - """Returns an iterator of all named result key-value tuples (Python 2.x only).""" - - def keys( self ): - """Returns all named result keys (as a list in Python 2.x, as an iterator in Python 3.x).""" - return list(self.iterkeys()) - - def values( self ): - """Returns all named result values (as a list in Python 2.x, as an iterator in Python 3.x).""" - return list(self.itervalues()) - - def items( self ): - """Returns all named result key-values (as a list of tuples in Python 2.x, as an iterator in Python 3.x).""" - return list(self.iteritems()) - - def haskeys( self ): - """Since keys() returns an iterator, this method is helpful in bypassing - code that looks for the existence of any defined results names.""" - return bool(self.__tokdict) - - def pop( self, *args, **kwargs): - """ - Removes and returns item at specified index (default=C{last}). - Supports both C{list} and C{dict} semantics for C{pop()}. If passed no - argument or an integer argument, it will use C{list} semantics - and pop tokens from the list of parsed tokens. If passed a - non-integer argument (most likely a string), it will use C{dict} - semantics and pop the corresponding value from any defined - results names. A second default return value argument is - supported, just as in C{dict.pop()}. - - Example:: - def remove_first(tokens): - tokens.pop(0) - print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] - print(OneOrMore(Word(nums)).addParseAction(remove_first).parseString("0 123 321")) # -> ['123', '321'] - - label = Word(alphas) - patt = label("LABEL") + OneOrMore(Word(nums)) - print(patt.parseString("AAB 123 321").dump()) - - # Use pop() in a parse action to remove named result (note that corresponding value is not - # removed from list form of results) - def remove_LABEL(tokens): - tokens.pop("LABEL") - return tokens - patt.addParseAction(remove_LABEL) - print(patt.parseString("AAB 123 321").dump()) - prints:: - ['AAB', '123', '321'] - - LABEL: AAB - - ['AAB', '123', '321'] - """ - if not args: - args = [-1] - for k,v in kwargs.items(): - if k == 'default': - args = (args[0], v) - else: - raise TypeError("pop() got an unexpected keyword argument '%s'" % k) - if (isinstance(args[0], int) or - len(args) == 1 or - args[0] in self): - index = args[0] - ret = self[index] - del self[index] - return ret - else: - defaultvalue = args[1] - return defaultvalue - - def get(self, key, defaultValue=None): - """ - Returns named result matching the given key, or if there is no - such name, then returns the given C{defaultValue} or C{None} if no - C{defaultValue} is specified. - - Similar to C{dict.get()}. - - Example:: - integer = Word(nums) - date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - - result = date_str.parseString("1999/12/31") - print(result.get("year")) # -> '1999' - print(result.get("hour", "not specified")) # -> 'not specified' - print(result.get("hour")) # -> None - """ - if key in self: - return self[key] - else: - return defaultValue - - def insert( self, index, insStr ): - """ - Inserts new element at location index in the list of parsed tokens. - - Similar to C{list.insert()}. - - Example:: - print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] - - # use a parse action to insert the parse location in the front of the parsed results - def insert_locn(locn, tokens): - tokens.insert(0, locn) - print(OneOrMore(Word(nums)).addParseAction(insert_locn).parseString("0 123 321")) # -> [0, '0', '123', '321'] - """ - self.__toklist.insert(index, insStr) - # fixup indices in token dictionary - for name,occurrences in self.__tokdict.items(): - for k, (value, position) in enumerate(occurrences): - occurrences[k] = _ParseResultsWithOffset(value, position + (position > index)) - - def append( self, item ): - """ - Add single element to end of ParseResults list of elements. - - Example:: - print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] - - # use a parse action to compute the sum of the parsed integers, and add it to the end - def append_sum(tokens): - tokens.append(sum(map(int, tokens))) - print(OneOrMore(Word(nums)).addParseAction(append_sum).parseString("0 123 321")) # -> ['0', '123', '321', 444] - """ - self.__toklist.append(item) - - def extend( self, itemseq ): - """ - Add sequence of elements to end of ParseResults list of elements. - - Example:: - patt = OneOrMore(Word(alphas)) - - # use a parse action to append the reverse of the matched strings, to make a palindrome - def make_palindrome(tokens): - tokens.extend(reversed([t[::-1] for t in tokens])) - return ''.join(tokens) - print(patt.addParseAction(make_palindrome).parseString("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl' - """ - if isinstance(itemseq, ParseResults): - self += itemseq - else: - self.__toklist.extend(itemseq) - - def clear( self ): - """ - Clear all elements and results names. - """ - del self.__toklist[:] - self.__tokdict.clear() - - def __getattr__( self, name ): - try: - return self[name] - except KeyError: - return "" - - if name in self.__tokdict: - if name not in self.__accumNames: - return self.__tokdict[name][-1][0] - else: - return ParseResults([ v[0] for v in self.__tokdict[name] ]) - else: - return "" - - def __add__( self, other ): - ret = self.copy() - ret += other - return ret - - def __iadd__( self, other ): - if other.__tokdict: - offset = len(self.__toklist) - addoffset = lambda a: offset if a<0 else a+offset - otheritems = other.__tokdict.items() - otherdictitems = [(k, _ParseResultsWithOffset(v[0],addoffset(v[1])) ) - for (k,vlist) in otheritems for v in vlist] - for k,v in otherdictitems: - self[k] = v - if isinstance(v[0],ParseResults): - v[0].__parent = wkref(self) - - self.__toklist += other.__toklist - self.__accumNames.update( other.__accumNames ) - return self - - def __radd__(self, other): - if isinstance(other,int) and other == 0: - # useful for merging many ParseResults using sum() builtin - return self.copy() - else: - # this may raise a TypeError - so be it - return other + self - - def __repr__( self ): - return "(%s, %s)" % ( repr( self.__toklist ), repr( self.__tokdict ) ) - - def __str__( self ): - return '[' + ', '.join(_ustr(i) if isinstance(i, ParseResults) else repr(i) for i in self.__toklist) + ']' - - def _asStringList( self, sep='' ): - out = [] - for item in self.__toklist: - if out and sep: - out.append(sep) - if isinstance( item, ParseResults ): - out += item._asStringList() - else: - out.append( _ustr(item) ) - return out - - def asList( self ): - """ - Returns the parse results as a nested list of matching tokens, all converted to strings. - - Example:: - patt = OneOrMore(Word(alphas)) - result = patt.parseString("sldkj lsdkj sldkj") - # even though the result prints in string-like form, it is actually a pyparsing ParseResults - print(type(result), result) # -> <class 'pyparsing.ParseResults'> ['sldkj', 'lsdkj', 'sldkj'] - - # Use asList() to create an actual list - result_list = result.asList() - print(type(result_list), result_list) # -> <class 'list'> ['sldkj', 'lsdkj', 'sldkj'] - """ - return [res.asList() if isinstance(res,ParseResults) else res for res in self.__toklist] - - def asDict( self ): - """ - Returns the named parse results as a nested dictionary. - - Example:: - integer = Word(nums) - date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - - result = date_str.parseString('12/31/1999') - print(type(result), repr(result)) # -> <class 'pyparsing.ParseResults'> (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]}) - - result_dict = result.asDict() - print(type(result_dict), repr(result_dict)) # -> <class 'dict'> {'day': '1999', 'year': '12', 'month': '31'} - - # even though a ParseResults supports dict-like access, sometime you just need to have a dict - import json - print(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializable - print(json.dumps(result.asDict())) # -> {"month": "31", "day": "1999", "year": "12"} - """ - if PY_3: - item_fn = self.items - else: - item_fn = self.iteritems - - def toItem(obj): - if isinstance(obj, ParseResults): - if obj.haskeys(): - return obj.asDict() - else: - return [toItem(v) for v in obj] - else: - return obj - - return dict((k,toItem(v)) for k,v in item_fn()) - - def copy( self ): - """ - Returns a new copy of a C{ParseResults} object. - """ - ret = ParseResults( self.__toklist ) - ret.__tokdict = self.__tokdict.copy() - ret.__parent = self.__parent - ret.__accumNames.update( self.__accumNames ) - ret.__name = self.__name - return ret - - def asXML( self, doctag=None, namedItemsOnly=False, indent="", formatted=True ): - """ - (Deprecated) Returns the parse results as XML. Tags are created for tokens and lists that have defined results names. - """ - nl = "\n" - out = [] - namedItems = dict((v[1],k) for (k,vlist) in self.__tokdict.items() - for v in vlist) - nextLevelIndent = indent + " " - - # collapse out indents if formatting is not desired - if not formatted: - indent = "" - nextLevelIndent = "" - nl = "" - - selfTag = None - if doctag is not None: - selfTag = doctag - else: - if self.__name: - selfTag = self.__name - - if not selfTag: - if namedItemsOnly: - return "" - else: - selfTag = "ITEM" - - out += [ nl, indent, "<", selfTag, ">" ] - - for i,res in enumerate(self.__toklist): - if isinstance(res,ParseResults): - if i in namedItems: - out += [ res.asXML(namedItems[i], - namedItemsOnly and doctag is None, - nextLevelIndent, - formatted)] - else: - out += [ res.asXML(None, - namedItemsOnly and doctag is None, - nextLevelIndent, - formatted)] - else: - # individual token, see if there is a name for it - resTag = None - if i in namedItems: - resTag = namedItems[i] - if not resTag: - if namedItemsOnly: - continue - else: - resTag = "ITEM" - xmlBodyText = _xml_escape(_ustr(res)) - out += [ nl, nextLevelIndent, "<", resTag, ">", - xmlBodyText, - "</", resTag, ">" ] - - out += [ nl, indent, "</", selfTag, ">" ] - return "".join(out) - - def __lookup(self,sub): - for k,vlist in self.__tokdict.items(): - for v,loc in vlist: - if sub is v: - return k - return None - - def getName(self): - r""" - Returns the results name for this token expression. Useful when several - different expressions might match at a particular location. - - Example:: - integer = Word(nums) - ssn_expr = Regex(r"\d\d\d-\d\d-\d\d\d\d") - house_number_expr = Suppress('#') + Word(nums, alphanums) - user_data = (Group(house_number_expr)("house_number") - | Group(ssn_expr)("ssn") - | Group(integer)("age")) - user_info = OneOrMore(user_data) - - result = user_info.parseString("22 111-22-3333 #221B") - for item in result: - print(item.getName(), ':', item[0]) - prints:: - age : 22 - ssn : 111-22-3333 - house_number : 221B - """ - if self.__name: - return self.__name - elif self.__parent: - par = self.__parent() - if par: - return par.__lookup(self) - else: - return None - elif (len(self) == 1 and - len(self.__tokdict) == 1 and - next(iter(self.__tokdict.values()))[0][1] in (0,-1)): - return next(iter(self.__tokdict.keys())) - else: - return None - - def dump(self, indent='', depth=0, full=True): - """ - Diagnostic method for listing out the contents of a C{ParseResults}. - Accepts an optional C{indent} argument so that this string can be embedded - in a nested display of other data. - - Example:: - integer = Word(nums) - date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - - result = date_str.parseString('12/31/1999') - print(result.dump()) - prints:: - ['12', '/', '31', '/', '1999'] - - day: 1999 - - month: 31 - - year: 12 - """ - out = [] - NL = '\n' - out.append( indent+_ustr(self.asList()) ) - if full: - if self.haskeys(): - items = sorted((str(k), v) for k,v in self.items()) - for k,v in items: - if out: - out.append(NL) - out.append( "%s%s- %s: " % (indent,(' '*depth), k) ) - if isinstance(v,ParseResults): - if v: - out.append( v.dump(indent,depth+1) ) - else: - out.append(_ustr(v)) - else: - out.append(repr(v)) - elif any(isinstance(vv,ParseResults) for vv in self): - v = self - for i,vv in enumerate(v): - if isinstance(vv,ParseResults): - out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),vv.dump(indent,depth+1) )) - else: - out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),_ustr(vv))) - - return "".join(out) - - def pprint(self, *args, **kwargs): - """ - Pretty-printer for parsed results as a list, using the C{pprint} module. - Accepts additional positional or keyword args as defined for the - C{pprint.pprint} method. (U{http://docs.python.org/3/library/pprint.html#pprint.pprint}) - - Example:: - ident = Word(alphas, alphanums) - num = Word(nums) - func = Forward() - term = ident | num | Group('(' + func + ')') - func <<= ident + Group(Optional(delimitedList(term))) - result = func.parseString("fna a,b,(fnb c,d,200),100") - result.pprint(width=40) - prints:: - ['fna', - ['a', - 'b', - ['(', 'fnb', ['c', 'd', '200'], ')'], - '100']] - """ - pprint.pprint(self.asList(), *args, **kwargs) - - # add support for pickle protocol - def __getstate__(self): - return ( self.__toklist, - ( self.__tokdict.copy(), - self.__parent is not None and self.__parent() or None, - self.__accumNames, - self.__name ) ) - - def __setstate__(self,state): - self.__toklist = state[0] - (self.__tokdict, - par, - inAccumNames, - self.__name) = state[1] - self.__accumNames = {} - self.__accumNames.update(inAccumNames) - if par is not None: - self.__parent = wkref(par) - else: - self.__parent = None - - def __getnewargs__(self): - return self.__toklist, self.__name, self.__asList, self.__modal - - def __dir__(self): - return (dir(type(self)) + list(self.keys())) - -MutableMapping.register(ParseResults) - -def col (loc,strg): - """Returns current column within a string, counting newlines as line separators. - The first column is number 1. - - Note: the default parsing behavior is to expand tabs in the input string - before starting the parsing process. See L{I{ParserElement.parseString}<ParserElement.parseString>} for more information - on parsing strings containing C{<TAB>}s, and suggested methods to maintain a - consistent view of the parsed string, the parse location, and line and column - positions within the parsed string. - """ - s = strg - return 1 if 0<loc<len(s) and s[loc-1] == '\n' else loc - s.rfind("\n", 0, loc) - -def lineno(loc,strg): - """Returns current line number within a string, counting newlines as line separators. - The first line is number 1. - - Note: the default parsing behavior is to expand tabs in the input string - before starting the parsing process. See L{I{ParserElement.parseString}<ParserElement.parseString>} for more information - on parsing strings containing C{<TAB>}s, and suggested methods to maintain a - consistent view of the parsed string, the parse location, and line and column - positions within the parsed string. - """ - return strg.count("\n",0,loc) + 1 - -def line( loc, strg ): - """Returns the line of text containing loc within a string, counting newlines as line separators. - """ - lastCR = strg.rfind("\n", 0, loc) - nextCR = strg.find("\n", loc) - if nextCR >= 0: - return strg[lastCR+1:nextCR] - else: - return strg[lastCR+1:] - -def _defaultStartDebugAction( instring, loc, expr ): - print (("Match " + _ustr(expr) + " at loc " + _ustr(loc) + "(%d,%d)" % ( lineno(loc,instring), col(loc,instring) ))) - -def _defaultSuccessDebugAction( instring, startloc, endloc, expr, toks ): - print ("Matched " + _ustr(expr) + " -> " + str(toks.asList())) - -def _defaultExceptionDebugAction( instring, loc, expr, exc ): - print ("Exception raised:" + _ustr(exc)) - -def nullDebugAction(*args): - """'Do-nothing' debug action, to suppress debugging output during parsing.""" - pass - -# Only works on Python 3.x - nonlocal is toxic to Python 2 installs -#~ 'decorator to trim function calls to match the arity of the target' -#~ def _trim_arity(func, maxargs=3): - #~ if func in singleArgBuiltins: - #~ return lambda s,l,t: func(t) - #~ limit = 0 - #~ foundArity = False - #~ def wrapper(*args): - #~ nonlocal limit,foundArity - #~ while 1: - #~ try: - #~ ret = func(*args[limit:]) - #~ foundArity = True - #~ return ret - #~ except TypeError: - #~ if limit == maxargs or foundArity: - #~ raise - #~ limit += 1 - #~ continue - #~ return wrapper - -# this version is Python 2.x-3.x cross-compatible -'decorator to trim function calls to match the arity of the target' -def _trim_arity(func, maxargs=2): - if func in singleArgBuiltins: - return lambda s,l,t: func(t) - limit = [0] - foundArity = [False] - - # traceback return data structure changed in Py3.5 - normalize back to plain tuples - if system_version[:2] >= (3,5): - def extract_stack(limit=0): - # special handling for Python 3.5.0 - extra deep call stack by 1 - offset = -3 if system_version == (3,5,0) else -2 - frame_summary = traceback.extract_stack(limit=-offset+limit-1)[offset] - return [frame_summary[:2]] - def extract_tb(tb, limit=0): - frames = traceback.extract_tb(tb, limit=limit) - frame_summary = frames[-1] - return [frame_summary[:2]] - else: - extract_stack = traceback.extract_stack - extract_tb = traceback.extract_tb - - # synthesize what would be returned by traceback.extract_stack at the call to - # user's parse action 'func', so that we don't incur call penalty at parse time - - LINE_DIFF = 6 - # IF ANY CODE CHANGES, EVEN JUST COMMENTS OR BLANK LINES, BETWEEN THE NEXT LINE AND - # THE CALL TO FUNC INSIDE WRAPPER, LINE_DIFF MUST BE MODIFIED!!!! - this_line = extract_stack(limit=2)[-1] - pa_call_line_synth = (this_line[0], this_line[1]+LINE_DIFF) - - def wrapper(*args): - while 1: - try: - ret = func(*args[limit[0]:]) - foundArity[0] = True - return ret - except TypeError: - # re-raise TypeErrors if they did not come from our arity testing - if foundArity[0]: - raise - else: - try: - tb = sys.exc_info()[-1] - if not extract_tb(tb, limit=2)[-1][:2] == pa_call_line_synth: - raise - finally: - del tb - - if limit[0] <= maxargs: - limit[0] += 1 - continue - raise - - # copy func name to wrapper for sensible debug output - func_name = "<parse action>" - try: - func_name = getattr(func, '__name__', - getattr(func, '__class__').__name__) - except Exception: - func_name = str(func) - wrapper.__name__ = func_name - - return wrapper - -class ParserElement(object): - """Abstract base level parser element class.""" - DEFAULT_WHITE_CHARS = " \n\t\r" - verbose_stacktrace = False - - @staticmethod - def setDefaultWhitespaceChars( chars ): - r""" - Overrides the default whitespace chars - - Example:: - # default whitespace chars are space, <TAB> and newline - OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def', 'ghi', 'jkl'] - - # change to just treat newline as significant - ParserElement.setDefaultWhitespaceChars(" \t") - OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def'] - """ - ParserElement.DEFAULT_WHITE_CHARS = chars - - @staticmethod - def inlineLiteralsUsing(cls): - """ - Set class to be used for inclusion of string literals into a parser. - - Example:: - # default literal class used is Literal - integer = Word(nums) - date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - - date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31'] - - - # change to Suppress - ParserElement.inlineLiteralsUsing(Suppress) - date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - - date_str.parseString("1999/12/31") # -> ['1999', '12', '31'] - """ - ParserElement._literalStringClass = cls - - def __init__( self, savelist=False ): - self.parseAction = list() - self.failAction = None - #~ self.name = "<unknown>" # don't define self.name, let subclasses try/except upcall - self.strRepr = None - self.resultsName = None - self.saveAsList = savelist - self.skipWhitespace = True - self.whiteChars = ParserElement.DEFAULT_WHITE_CHARS - self.copyDefaultWhiteChars = True - self.mayReturnEmpty = False # used when checking for left-recursion - self.keepTabs = False - self.ignoreExprs = list() - self.debug = False - self.streamlined = False - self.mayIndexError = True # used to optimize exception handling for subclasses that don't advance parse index - self.errmsg = "" - self.modalResults = True # used to mark results names as modal (report only last) or cumulative (list all) - self.debugActions = ( None, None, None ) #custom debug actions - self.re = None - self.callPreparse = True # used to avoid redundant calls to preParse - self.callDuringTry = False - - def copy( self ): - """ - Make a copy of this C{ParserElement}. Useful for defining different parse actions - for the same parsing pattern, using copies of the original parse element. - - Example:: - integer = Word(nums).setParseAction(lambda toks: int(toks[0])) - integerK = integer.copy().addParseAction(lambda toks: toks[0]*1024) + Suppress("K") - integerM = integer.copy().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M") - - print(OneOrMore(integerK | integerM | integer).parseString("5K 100 640K 256M")) - prints:: - [5120, 100, 655360, 268435456] - Equivalent form of C{expr.copy()} is just C{expr()}:: - integerM = integer().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M") - """ - cpy = copy.copy( self ) - cpy.parseAction = self.parseAction[:] - cpy.ignoreExprs = self.ignoreExprs[:] - if self.copyDefaultWhiteChars: - cpy.whiteChars = ParserElement.DEFAULT_WHITE_CHARS - return cpy - - def setName( self, name ): - """ - Define name for this expression, makes debugging and exception messages clearer. - - Example:: - Word(nums).parseString("ABC") # -> Exception: Expected W:(0123...) (at char 0), (line:1, col:1) - Word(nums).setName("integer").parseString("ABC") # -> Exception: Expected integer (at char 0), (line:1, col:1) - """ - self.name = name - self.errmsg = "Expected " + self.name - if hasattr(self,"exception"): - self.exception.msg = self.errmsg - return self - - def setResultsName( self, name, listAllMatches=False ): - """ - Define name for referencing matching tokens as a nested attribute - of the returned parse results. - NOTE: this returns a *copy* of the original C{ParserElement} object; - this is so that the client can define a basic element, such as an - integer, and reference it in multiple places with different names. - - You can also set results names using the abbreviated syntax, - C{expr("name")} in place of C{expr.setResultsName("name")} - - see L{I{__call__}<__call__>}. - - Example:: - date_str = (integer.setResultsName("year") + '/' - + integer.setResultsName("month") + '/' - + integer.setResultsName("day")) - - # equivalent form: - date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - """ - newself = self.copy() - if name.endswith("*"): - name = name[:-1] - listAllMatches=True - newself.resultsName = name - newself.modalResults = not listAllMatches - return newself - - def setBreak(self,breakFlag = True): - """Method to invoke the Python pdb debugger when this element is - about to be parsed. Set C{breakFlag} to True to enable, False to - disable. - """ - if breakFlag: - _parseMethod = self._parse - def breaker(instring, loc, doActions=True, callPreParse=True): - import pdb - pdb.set_trace() - return _parseMethod( instring, loc, doActions, callPreParse ) - breaker._originalParseMethod = _parseMethod - self._parse = breaker - else: - if hasattr(self._parse,"_originalParseMethod"): - self._parse = self._parse._originalParseMethod - return self - - def setParseAction( self, *fns, **kwargs ): - """ - Define one or more actions to perform when successfully matching parse element definition. - Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)}, - C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where: - - s = the original string being parsed (see note below) - - loc = the location of the matching substring - - toks = a list of the matched tokens, packaged as a C{L{ParseResults}} object - If the functions in fns modify the tokens, they can return them as the return - value from fn, and the modified list of tokens will replace the original. - Otherwise, fn does not need to return any value. - - Optional keyword arguments: - - callDuringTry = (default=C{False}) indicate if parse action should be run during lookaheads and alternate testing - - Note: the default parsing behavior is to expand tabs in the input string - before starting the parsing process. See L{I{parseString}<parseString>} for more information - on parsing strings containing C{<TAB>}s, and suggested methods to maintain a - consistent view of the parsed string, the parse location, and line and column - positions within the parsed string. - - Example:: - integer = Word(nums) - date_str = integer + '/' + integer + '/' + integer - - date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31'] - - # use parse action to convert to ints at parse time - integer = Word(nums).setParseAction(lambda toks: int(toks[0])) - date_str = integer + '/' + integer + '/' + integer - - # note that integer fields are now ints, not strings - date_str.parseString("1999/12/31") # -> [1999, '/', 12, '/', 31] - """ - self.parseAction = list(map(_trim_arity, list(fns))) - self.callDuringTry = kwargs.get("callDuringTry", False) - return self - - def addParseAction( self, *fns, **kwargs ): - """ - Add one or more parse actions to expression's list of parse actions. See L{I{setParseAction}<setParseAction>}. - - See examples in L{I{copy}<copy>}. - """ - self.parseAction += list(map(_trim_arity, list(fns))) - self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False) - return self - - def addCondition(self, *fns, **kwargs): - """Add a boolean predicate function to expression's list of parse actions. See - L{I{setParseAction}<setParseAction>} for function call signatures. Unlike C{setParseAction}, - functions passed to C{addCondition} need to return boolean success/fail of the condition. - - Optional keyword arguments: - - message = define a custom message to be used in the raised exception - - fatal = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise ParseException - - Example:: - integer = Word(nums).setParseAction(lambda toks: int(toks[0])) - year_int = integer.copy() - year_int.addCondition(lambda toks: toks[0] >= 2000, message="Only support years 2000 and later") - date_str = year_int + '/' + integer + '/' + integer - - result = date_str.parseString("1999/12/31") # -> Exception: Only support years 2000 and later (at char 0), (line:1, col:1) - """ - msg = kwargs.get("message", "failed user-defined condition") - exc_type = ParseFatalException if kwargs.get("fatal", False) else ParseException - for fn in fns: - def pa(s,l,t): - if not bool(_trim_arity(fn)(s,l,t)): - raise exc_type(s,l,msg) - self.parseAction.append(pa) - self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False) - return self - - def setFailAction( self, fn ): - """Define action to perform if parsing fails at this expression. - Fail acton fn is a callable function that takes the arguments - C{fn(s,loc,expr,err)} where: - - s = string being parsed - - loc = location where expression match was attempted and failed - - expr = the parse expression that failed - - err = the exception thrown - The function returns no value. It may throw C{L{ParseFatalException}} - if it is desired to stop parsing immediately.""" - self.failAction = fn - return self - - def _skipIgnorables( self, instring, loc ): - exprsFound = True - while exprsFound: - exprsFound = False - for e in self.ignoreExprs: - try: - while 1: - loc,dummy = e._parse( instring, loc ) - exprsFound = True - except ParseException: - pass - return loc - - def preParse( self, instring, loc ): - if self.ignoreExprs: - loc = self._skipIgnorables( instring, loc ) - - if self.skipWhitespace: - wt = self.whiteChars - instrlen = len(instring) - while loc < instrlen and instring[loc] in wt: - loc += 1 - - return loc - - def parseImpl( self, instring, loc, doActions=True ): - return loc, [] - - def postParse( self, instring, loc, tokenlist ): - return tokenlist - - #~ @profile - def _parseNoCache( self, instring, loc, doActions=True, callPreParse=True ): - debugging = ( self.debug ) #and doActions ) - - if debugging or self.failAction: - #~ print ("Match",self,"at loc",loc,"(%d,%d)" % ( lineno(loc,instring), col(loc,instring) )) - if (self.debugActions[0] ): - self.debugActions[0]( instring, loc, self ) - if callPreParse and self.callPreparse: - preloc = self.preParse( instring, loc ) - else: - preloc = loc - tokensStart = preloc - try: - try: - loc,tokens = self.parseImpl( instring, preloc, doActions ) - except IndexError: - raise ParseException( instring, len(instring), self.errmsg, self ) - except ParseBaseException as err: - #~ print ("Exception raised:", err) - if self.debugActions[2]: - self.debugActions[2]( instring, tokensStart, self, err ) - if self.failAction: - self.failAction( instring, tokensStart, self, err ) - raise - else: - if callPreParse and self.callPreparse: - preloc = self.preParse( instring, loc ) - else: - preloc = loc - tokensStart = preloc - if self.mayIndexError or preloc >= len(instring): - try: - loc,tokens = self.parseImpl( instring, preloc, doActions ) - except IndexError: - raise ParseException( instring, len(instring), self.errmsg, self ) - else: - loc,tokens = self.parseImpl( instring, preloc, doActions ) - - tokens = self.postParse( instring, loc, tokens ) - - retTokens = ParseResults( tokens, self.resultsName, asList=self.saveAsList, modal=self.modalResults ) - if self.parseAction and (doActions or self.callDuringTry): - if debugging: - try: - for fn in self.parseAction: - tokens = fn( instring, tokensStart, retTokens ) - if tokens is not None: - retTokens = ParseResults( tokens, - self.resultsName, - asList=self.saveAsList and isinstance(tokens,(ParseResults,list)), - modal=self.modalResults ) - except ParseBaseException as err: - #~ print "Exception raised in user parse action:", err - if (self.debugActions[2] ): - self.debugActions[2]( instring, tokensStart, self, err ) - raise - else: - for fn in self.parseAction: - tokens = fn( instring, tokensStart, retTokens ) - if tokens is not None: - retTokens = ParseResults( tokens, - self.resultsName, - asList=self.saveAsList and isinstance(tokens,(ParseResults,list)), - modal=self.modalResults ) - if debugging: - #~ print ("Matched",self,"->",retTokens.asList()) - if (self.debugActions[1] ): - self.debugActions[1]( instring, tokensStart, loc, self, retTokens ) - - return loc, retTokens - - def tryParse( self, instring, loc ): - try: - return self._parse( instring, loc, doActions=False )[0] - except ParseFatalException: - raise ParseException( instring, loc, self.errmsg, self) - - def canParseNext(self, instring, loc): - try: - self.tryParse(instring, loc) - except (ParseException, IndexError): - return False - else: - return True - - class _UnboundedCache(object): - def __init__(self): - cache = {} - self.not_in_cache = not_in_cache = object() - - def get(self, key): - return cache.get(key, not_in_cache) - - def set(self, key, value): - cache[key] = value - - def clear(self): - cache.clear() - - def cache_len(self): - return len(cache) - - self.get = types.MethodType(get, self) - self.set = types.MethodType(set, self) - self.clear = types.MethodType(clear, self) - self.__len__ = types.MethodType(cache_len, self) - - if _OrderedDict is not None: - class _FifoCache(object): - def __init__(self, size): - self.not_in_cache = not_in_cache = object() - - cache = _OrderedDict() - - def get(self, key): - return cache.get(key, not_in_cache) - - def set(self, key, value): - cache[key] = value - while len(cache) > size: - try: - cache.popitem(False) - except KeyError: - pass - - def clear(self): - cache.clear() - - def cache_len(self): - return len(cache) - - self.get = types.MethodType(get, self) - self.set = types.MethodType(set, self) - self.clear = types.MethodType(clear, self) - self.__len__ = types.MethodType(cache_len, self) - - else: - class _FifoCache(object): - def __init__(self, size): - self.not_in_cache = not_in_cache = object() - - cache = {} - key_fifo = collections.deque([], size) - - def get(self, key): - return cache.get(key, not_in_cache) - - def set(self, key, value): - cache[key] = value - while len(key_fifo) > size: - cache.pop(key_fifo.popleft(), None) - key_fifo.append(key) - - def clear(self): - cache.clear() - key_fifo.clear() - - def cache_len(self): - return len(cache) - - self.get = types.MethodType(get, self) - self.set = types.MethodType(set, self) - self.clear = types.MethodType(clear, self) - self.__len__ = types.MethodType(cache_len, self) - - # argument cache for optimizing repeated calls when backtracking through recursive expressions - packrat_cache = {} # this is set later by enabledPackrat(); this is here so that resetCache() doesn't fail - packrat_cache_lock = RLock() - packrat_cache_stats = [0, 0] - - # this method gets repeatedly called during backtracking with the same arguments - - # we can cache these arguments and save ourselves the trouble of re-parsing the contained expression - def _parseCache( self, instring, loc, doActions=True, callPreParse=True ): - HIT, MISS = 0, 1 - lookup = (self, instring, loc, callPreParse, doActions) - with ParserElement.packrat_cache_lock: - cache = ParserElement.packrat_cache - value = cache.get(lookup) - if value is cache.not_in_cache: - ParserElement.packrat_cache_stats[MISS] += 1 - try: - value = self._parseNoCache(instring, loc, doActions, callPreParse) - except ParseBaseException as pe: - # cache a copy of the exception, without the traceback - cache.set(lookup, pe.__class__(*pe.args)) - raise - else: - cache.set(lookup, (value[0], value[1].copy())) - return value - else: - ParserElement.packrat_cache_stats[HIT] += 1 - if isinstance(value, Exception): - raise value - return (value[0], value[1].copy()) - - _parse = _parseNoCache - - @staticmethod - def resetCache(): - ParserElement.packrat_cache.clear() - ParserElement.packrat_cache_stats[:] = [0] * len(ParserElement.packrat_cache_stats) - - _packratEnabled = False - @staticmethod - def enablePackrat(cache_size_limit=128): - """Enables "packrat" parsing, which adds memoizing to the parsing logic. - Repeated parse attempts at the same string location (which happens - often in many complex grammars) can immediately return a cached value, - instead of re-executing parsing/validating code. Memoizing is done of - both valid results and parsing exceptions. - - Parameters: - - cache_size_limit - (default=C{128}) - if an integer value is provided - will limit the size of the packrat cache; if None is passed, then - the cache size will be unbounded; if 0 is passed, the cache will - be effectively disabled. - - This speedup may break existing programs that use parse actions that - have side-effects. For this reason, packrat parsing is disabled when - you first import pyparsing. To activate the packrat feature, your - program must call the class method C{ParserElement.enablePackrat()}. If - your program uses C{psyco} to "compile as you go", you must call - C{enablePackrat} before calling C{psyco.full()}. If you do not do this, - Python will crash. For best results, call C{enablePackrat()} immediately - after importing pyparsing. - - Example:: - import pyparsing - pyparsing.ParserElement.enablePackrat() - """ - if not ParserElement._packratEnabled: - ParserElement._packratEnabled = True - if cache_size_limit is None: - ParserElement.packrat_cache = ParserElement._UnboundedCache() - else: - ParserElement.packrat_cache = ParserElement._FifoCache(cache_size_limit) - ParserElement._parse = ParserElement._parseCache - - def parseString( self, instring, parseAll=False ): - """ - Execute the parse expression with the given string. - This is the main interface to the client code, once the complete - expression has been built. - - If you want the grammar to require that the entire input string be - successfully parsed, then set C{parseAll} to True (equivalent to ending - the grammar with C{L{StringEnd()}}). - - Note: C{parseString} implicitly calls C{expandtabs()} on the input string, - in order to report proper column numbers in parse actions. - If the input string contains tabs and - the grammar uses parse actions that use the C{loc} argument to index into the - string being parsed, you can ensure you have a consistent view of the input - string by: - - calling C{parseWithTabs} on your grammar before calling C{parseString} - (see L{I{parseWithTabs}<parseWithTabs>}) - - define your parse action using the full C{(s,loc,toks)} signature, and - reference the input string using the parse action's C{s} argument - - explictly expand the tabs in your input string before calling - C{parseString} - - Example:: - Word('a').parseString('aaaaabaaa') # -> ['aaaaa'] - Word('a').parseString('aaaaabaaa', parseAll=True) # -> Exception: Expected end of text - """ - ParserElement.resetCache() - if not self.streamlined: - self.streamline() - #~ self.saveAsList = True - for e in self.ignoreExprs: - e.streamline() - if not self.keepTabs: - instring = instring.expandtabs() - try: - loc, tokens = self._parse( instring, 0 ) - if parseAll: - loc = self.preParse( instring, loc ) - se = Empty() + StringEnd() - se._parse( instring, loc ) - except ParseBaseException as exc: - if ParserElement.verbose_stacktrace: - raise - else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace - raise exc - else: - return tokens - - def scanString( self, instring, maxMatches=_MAX_INT, overlap=False ): - """ - Scan the input string for expression matches. Each match will return the - matching tokens, start location, and end location. May be called with optional - C{maxMatches} argument, to clip scanning after 'n' matches are found. If - C{overlap} is specified, then overlapping matches will be reported. - - Note that the start and end locations are reported relative to the string - being parsed. See L{I{parseString}<parseString>} for more information on parsing - strings with embedded tabs. - - Example:: - source = "sldjf123lsdjjkf345sldkjf879lkjsfd987" - print(source) - for tokens,start,end in Word(alphas).scanString(source): - print(' '*start + '^'*(end-start)) - print(' '*start + tokens[0]) - - prints:: - - sldjf123lsdjjkf345sldkjf879lkjsfd987 - ^^^^^ - sldjf - ^^^^^^^ - lsdjjkf - ^^^^^^ - sldkjf - ^^^^^^ - lkjsfd - """ - if not self.streamlined: - self.streamline() - for e in self.ignoreExprs: - e.streamline() - - if not self.keepTabs: - instring = _ustr(instring).expandtabs() - instrlen = len(instring) - loc = 0 - preparseFn = self.preParse - parseFn = self._parse - ParserElement.resetCache() - matches = 0 - try: - while loc <= instrlen and matches < maxMatches: - try: - preloc = preparseFn( instring, loc ) - nextLoc,tokens = parseFn( instring, preloc, callPreParse=False ) - except ParseException: - loc = preloc+1 - else: - if nextLoc > loc: - matches += 1 - yield tokens, preloc, nextLoc - if overlap: - nextloc = preparseFn( instring, loc ) - if nextloc > loc: - loc = nextLoc - else: - loc += 1 - else: - loc = nextLoc - else: - loc = preloc+1 - except ParseBaseException as exc: - if ParserElement.verbose_stacktrace: - raise - else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace - raise exc - - def transformString( self, instring ): - """ - Extension to C{L{scanString}}, to modify matching text with modified tokens that may - be returned from a parse action. To use C{transformString}, define a grammar and - attach a parse action to it that modifies the returned token list. - Invoking C{transformString()} on a target string will then scan for matches, - and replace the matched text patterns according to the logic in the parse - action. C{transformString()} returns the resulting transformed string. - - Example:: - wd = Word(alphas) - wd.setParseAction(lambda toks: toks[0].title()) - - print(wd.transformString("now is the winter of our discontent made glorious summer by this sun of york.")) - Prints:: - Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York. - """ - out = [] - lastE = 0 - # force preservation of <TAB>s, to minimize unwanted transformation of string, and to - # keep string locs straight between transformString and scanString - self.keepTabs = True - try: - for t,s,e in self.scanString( instring ): - out.append( instring[lastE:s] ) - if t: - if isinstance(t,ParseResults): - out += t.asList() - elif isinstance(t,list): - out += t - else: - out.append(t) - lastE = e - out.append(instring[lastE:]) - out = [o for o in out if o] - return "".join(map(_ustr,_flatten(out))) - except ParseBaseException as exc: - if ParserElement.verbose_stacktrace: - raise - else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace - raise exc - - def searchString( self, instring, maxMatches=_MAX_INT ): - """ - Another extension to C{L{scanString}}, simplifying the access to the tokens found - to match the given parse expression. May be called with optional - C{maxMatches} argument, to clip searching after 'n' matches are found. - - Example:: - # a capitalized word starts with an uppercase letter, followed by zero or more lowercase letters - cap_word = Word(alphas.upper(), alphas.lower()) - - print(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity")) - - # the sum() builtin can be used to merge results into a single ParseResults object - print(sum(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity"))) - prints:: - [['More'], ['Iron'], ['Lead'], ['Gold'], ['I'], ['Electricity']] - ['More', 'Iron', 'Lead', 'Gold', 'I', 'Electricity'] - """ - try: - return ParseResults([ t for t,s,e in self.scanString( instring, maxMatches ) ]) - except ParseBaseException as exc: - if ParserElement.verbose_stacktrace: - raise - else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace - raise exc - - def split(self, instring, maxsplit=_MAX_INT, includeSeparators=False): - """ - Generator method to split a string using the given expression as a separator. - May be called with optional C{maxsplit} argument, to limit the number of splits; - and the optional C{includeSeparators} argument (default=C{False}), if the separating - matching text should be included in the split results. - - Example:: - punc = oneOf(list(".,;:/-!?")) - print(list(punc.split("This, this?, this sentence, is badly punctuated!"))) - prints:: - ['This', ' this', '', ' this sentence', ' is badly punctuated', ''] - """ - splits = 0 - last = 0 - for t,s,e in self.scanString(instring, maxMatches=maxsplit): - yield instring[last:s] - if includeSeparators: - yield t[0] - last = e - yield instring[last:] - - def __add__(self, other ): - """ - Implementation of + operator - returns C{L{And}}. Adding strings to a ParserElement - converts them to L{Literal}s by default. - - Example:: - greet = Word(alphas) + "," + Word(alphas) + "!" - hello = "Hello, World!" - print (hello, "->", greet.parseString(hello)) - Prints:: - Hello, World! -> ['Hello', ',', 'World', '!'] - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return And( [ self, other ] ) - - def __radd__(self, other ): - """ - Implementation of + operator when left operand is not a C{L{ParserElement}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return other + self - - def __sub__(self, other): - """ - Implementation of - operator, returns C{L{And}} with error stop - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return self + And._ErrorStop() + other - - def __rsub__(self, other ): - """ - Implementation of - operator when left operand is not a C{L{ParserElement}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return other - self - - def __mul__(self,other): - """ - Implementation of * operator, allows use of C{expr * 3} in place of - C{expr + expr + expr}. Expressions may also me multiplied by a 2-integer - tuple, similar to C{{min,max}} multipliers in regular expressions. Tuples - may also include C{None} as in: - - C{expr*(n,None)} or C{expr*(n,)} is equivalent - to C{expr*n + L{ZeroOrMore}(expr)} - (read as "at least n instances of C{expr}") - - C{expr*(None,n)} is equivalent to C{expr*(0,n)} - (read as "0 to n instances of C{expr}") - - C{expr*(None,None)} is equivalent to C{L{ZeroOrMore}(expr)} - - C{expr*(1,None)} is equivalent to C{L{OneOrMore}(expr)} - - Note that C{expr*(None,n)} does not raise an exception if - more than n exprs exist in the input stream; that is, - C{expr*(None,n)} does not enforce a maximum number of expr - occurrences. If this behavior is desired, then write - C{expr*(None,n) + ~expr} - """ - if isinstance(other,int): - minElements, optElements = other,0 - elif isinstance(other,tuple): - other = (other + (None, None))[:2] - if other[0] is None: - other = (0, other[1]) - if isinstance(other[0],int) and other[1] is None: - if other[0] == 0: - return ZeroOrMore(self) - if other[0] == 1: - return OneOrMore(self) - else: - return self*other[0] + ZeroOrMore(self) - elif isinstance(other[0],int) and isinstance(other[1],int): - minElements, optElements = other - optElements -= minElements - else: - raise TypeError("cannot multiply 'ParserElement' and ('%s','%s') objects", type(other[0]),type(other[1])) - else: - raise TypeError("cannot multiply 'ParserElement' and '%s' objects", type(other)) - - if minElements < 0: - raise ValueError("cannot multiply ParserElement by negative value") - if optElements < 0: - raise ValueError("second tuple value must be greater or equal to first tuple value") - if minElements == optElements == 0: - raise ValueError("cannot multiply ParserElement by 0 or (0,0)") - - if (optElements): - def makeOptionalList(n): - if n>1: - return Optional(self + makeOptionalList(n-1)) - else: - return Optional(self) - if minElements: - if minElements == 1: - ret = self + makeOptionalList(optElements) - else: - ret = And([self]*minElements) + makeOptionalList(optElements) - else: - ret = makeOptionalList(optElements) - else: - if minElements == 1: - ret = self - else: - ret = And([self]*minElements) - return ret - - def __rmul__(self, other): - return self.__mul__(other) - - def __or__(self, other ): - """ - Implementation of | operator - returns C{L{MatchFirst}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return MatchFirst( [ self, other ] ) - - def __ror__(self, other ): - """ - Implementation of | operator when left operand is not a C{L{ParserElement}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return other | self - - def __xor__(self, other ): - """ - Implementation of ^ operator - returns C{L{Or}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return Or( [ self, other ] ) - - def __rxor__(self, other ): - """ - Implementation of ^ operator when left operand is not a C{L{ParserElement}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return other ^ self - - def __and__(self, other ): - """ - Implementation of & operator - returns C{L{Each}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return Each( [ self, other ] ) - - def __rand__(self, other ): - """ - Implementation of & operator when left operand is not a C{L{ParserElement}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return other & self - - def __invert__( self ): - """ - Implementation of ~ operator - returns C{L{NotAny}} - """ - return NotAny( self ) - - def __call__(self, name=None): - """ - Shortcut for C{L{setResultsName}}, with C{listAllMatches=False}. - - If C{name} is given with a trailing C{'*'} character, then C{listAllMatches} will be - passed as C{True}. - - If C{name} is omitted, same as calling C{L{copy}}. - - Example:: - # these are equivalent - userdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno") - userdata = Word(alphas)("name") + Word(nums+"-")("socsecno") - """ - if name is not None: - return self.setResultsName(name) - else: - return self.copy() - - def suppress( self ): - """ - Suppresses the output of this C{ParserElement}; useful to keep punctuation from - cluttering up returned output. - """ - return Suppress( self ) - - def leaveWhitespace( self ): - """ - Disables the skipping of whitespace before matching the characters in the - C{ParserElement}'s defined pattern. This is normally only used internally by - the pyparsing module, but may be needed in some whitespace-sensitive grammars. - """ - self.skipWhitespace = False - return self - - def setWhitespaceChars( self, chars ): - """ - Overrides the default whitespace chars - """ - self.skipWhitespace = True - self.whiteChars = chars - self.copyDefaultWhiteChars = False - return self - - def parseWithTabs( self ): - """ - Overrides default behavior to expand C{<TAB>}s to spaces before parsing the input string. - Must be called before C{parseString} when the input grammar contains elements that - match C{<TAB>} characters. - """ - self.keepTabs = True - return self - - def ignore( self, other ): - """ - Define expression to be ignored (e.g., comments) while doing pattern - matching; may be called repeatedly, to define multiple comment or other - ignorable patterns. - - Example:: - patt = OneOrMore(Word(alphas)) - patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj'] - - patt.ignore(cStyleComment) - patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj', 'lskjd'] - """ - if isinstance(other, basestring): - other = Suppress(other) - - if isinstance( other, Suppress ): - if other not in self.ignoreExprs: - self.ignoreExprs.append(other) - else: - self.ignoreExprs.append( Suppress( other.copy() ) ) - return self - - def setDebugActions( self, startAction, successAction, exceptionAction ): - """ - Enable display of debugging messages while doing pattern matching. - """ - self.debugActions = (startAction or _defaultStartDebugAction, - successAction or _defaultSuccessDebugAction, - exceptionAction or _defaultExceptionDebugAction) - self.debug = True - return self - - def setDebug( self, flag=True ): - """ - Enable display of debugging messages while doing pattern matching. - Set C{flag} to True to enable, False to disable. - - Example:: - wd = Word(alphas).setName("alphaword") - integer = Word(nums).setName("numword") - term = wd | integer - - # turn on debugging for wd - wd.setDebug() - - OneOrMore(term).parseString("abc 123 xyz 890") - - prints:: - Match alphaword at loc 0(1,1) - Matched alphaword -> ['abc'] - Match alphaword at loc 3(1,4) - Exception raised:Expected alphaword (at char 4), (line:1, col:5) - Match alphaword at loc 7(1,8) - Matched alphaword -> ['xyz'] - Match alphaword at loc 11(1,12) - Exception raised:Expected alphaword (at char 12), (line:1, col:13) - Match alphaword at loc 15(1,16) - Exception raised:Expected alphaword (at char 15), (line:1, col:16) - - The output shown is that produced by the default debug actions - custom debug actions can be - specified using L{setDebugActions}. Prior to attempting - to match the C{wd} expression, the debugging message C{"Match <exprname> at loc <n>(<line>,<col>)"} - is shown. Then if the parse succeeds, a C{"Matched"} message is shown, or an C{"Exception raised"} - message is shown. Also note the use of L{setName} to assign a human-readable name to the expression, - which makes debugging and exception messages easier to understand - for instance, the default - name created for the C{Word} expression without calling C{setName} is C{"W:(ABCD...)"}. - """ - if flag: - self.setDebugActions( _defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction ) - else: - self.debug = False - return self - - def __str__( self ): - return self.name - - def __repr__( self ): - return _ustr(self) - - def streamline( self ): - self.streamlined = True - self.strRepr = None - return self - - def checkRecursion( self, parseElementList ): - pass - - def validate( self, validateTrace=[] ): - """ - Check defined expressions for valid structure, check for infinite recursive definitions. - """ - self.checkRecursion( [] ) - - def parseFile( self, file_or_filename, parseAll=False ): - """ - Execute the parse expression on the given file or filename. - If a filename is specified (instead of a file object), - the entire file is opened, read, and closed before parsing. - """ - try: - file_contents = file_or_filename.read() - except AttributeError: - with open(file_or_filename, "r") as f: - file_contents = f.read() - try: - return self.parseString(file_contents, parseAll) - except ParseBaseException as exc: - if ParserElement.verbose_stacktrace: - raise - else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace - raise exc - - def __eq__(self,other): - if isinstance(other, ParserElement): - return self is other or vars(self) == vars(other) - elif isinstance(other, basestring): - return self.matches(other) - else: - return super(ParserElement,self)==other - - def __ne__(self,other): - return not (self == other) - - def __hash__(self): - return hash(id(self)) - - def __req__(self,other): - return self == other - - def __rne__(self,other): - return not (self == other) - - def matches(self, testString, parseAll=True): - """ - Method for quick testing of a parser against a test string. Good for simple - inline microtests of sub expressions while building up larger parser. - - Parameters: - - testString - to test against this expression for a match - - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests - - Example:: - expr = Word(nums) - assert expr.matches("100") - """ - try: - self.parseString(_ustr(testString), parseAll=parseAll) - return True - except ParseBaseException: - return False - - def runTests(self, tests, parseAll=True, comment='#', fullDump=True, printResults=True, failureTests=False): - """ - Execute the parse expression on a series of test strings, showing each - test, the parsed results or where the parse failed. Quick and easy way to - run a parse expression against a list of sample strings. - - Parameters: - - tests - a list of separate test strings, or a multiline string of test strings - - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests - - comment - (default=C{'#'}) - expression for indicating embedded comments in the test - string; pass None to disable comment filtering - - fullDump - (default=C{True}) - dump results as list followed by results names in nested outline; - if False, only dump nested list - - printResults - (default=C{True}) prints test output to stdout - - failureTests - (default=C{False}) indicates if these tests are expected to fail parsing - - Returns: a (success, results) tuple, where success indicates that all tests succeeded - (or failed if C{failureTests} is True), and the results contain a list of lines of each - test's output - - Example:: - number_expr = pyparsing_common.number.copy() - - result = number_expr.runTests(''' - # unsigned integer - 100 - # negative integer - -100 - # float with scientific notation - 6.02e23 - # integer with scientific notation - 1e-12 - ''') - print("Success" if result[0] else "Failed!") - - result = number_expr.runTests(''' - # stray character - 100Z - # missing leading digit before '.' - -.100 - # too many '.' - 3.14.159 - ''', failureTests=True) - print("Success" if result[0] else "Failed!") - prints:: - # unsigned integer - 100 - [100] - - # negative integer - -100 - [-100] - - # float with scientific notation - 6.02e23 - [6.02e+23] - - # integer with scientific notation - 1e-12 - [1e-12] - - Success - - # stray character - 100Z - ^ - FAIL: Expected end of text (at char 3), (line:1, col:4) - - # missing leading digit before '.' - -.100 - ^ - FAIL: Expected {real number with scientific notation | real number | signed integer} (at char 0), (line:1, col:1) - - # too many '.' - 3.14.159 - ^ - FAIL: Expected end of text (at char 4), (line:1, col:5) - - Success - - Each test string must be on a single line. If you want to test a string that spans multiple - lines, create a test like this:: - - expr.runTest(r"this is a test\\n of strings that spans \\n 3 lines") - - (Note that this is a raw string literal, you must include the leading 'r'.) - """ - if isinstance(tests, basestring): - tests = list(map(str.strip, tests.rstrip().splitlines())) - if isinstance(comment, basestring): - comment = Literal(comment) - allResults = [] - comments = [] - success = True - for t in tests: - if comment is not None and comment.matches(t, False) or comments and not t: - comments.append(t) - continue - if not t: - continue - out = ['\n'.join(comments), t] - comments = [] - try: - t = t.replace(r'\n','\n') - result = self.parseString(t, parseAll=parseAll) - out.append(result.dump(full=fullDump)) - success = success and not failureTests - except ParseBaseException as pe: - fatal = "(FATAL)" if isinstance(pe, ParseFatalException) else "" - if '\n' in t: - out.append(line(pe.loc, t)) - out.append(' '*(col(pe.loc,t)-1) + '^' + fatal) - else: - out.append(' '*pe.loc + '^' + fatal) - out.append("FAIL: " + str(pe)) - success = success and failureTests - result = pe - except Exception as exc: - out.append("FAIL-EXCEPTION: " + str(exc)) - success = success and failureTests - result = exc - - if printResults: - if fullDump: - out.append('') - print('\n'.join(out)) - - allResults.append((t, result)) - - return success, allResults - - -class Token(ParserElement): - """ - Abstract C{ParserElement} subclass, for defining atomic matching patterns. - """ - def __init__( self ): - super(Token,self).__init__( savelist=False ) - - -class Empty(Token): - """ - An empty token, will always match. - """ - def __init__( self ): - super(Empty,self).__init__() - self.name = "Empty" - self.mayReturnEmpty = True - self.mayIndexError = False - - -class NoMatch(Token): - """ - A token that will never match. - """ - def __init__( self ): - super(NoMatch,self).__init__() - self.name = "NoMatch" - self.mayReturnEmpty = True - self.mayIndexError = False - self.errmsg = "Unmatchable token" - - def parseImpl( self, instring, loc, doActions=True ): - raise ParseException(instring, loc, self.errmsg, self) - - -class Literal(Token): - """ - Token to exactly match a specified string. - - Example:: - Literal('blah').parseString('blah') # -> ['blah'] - Literal('blah').parseString('blahfooblah') # -> ['blah'] - Literal('blah').parseString('bla') # -> Exception: Expected "blah" - - For case-insensitive matching, use L{CaselessLiteral}. - - For keyword matching (force word break before and after the matched string), - use L{Keyword} or L{CaselessKeyword}. - """ - def __init__( self, matchString ): - super(Literal,self).__init__() - self.match = matchString - self.matchLen = len(matchString) - try: - self.firstMatchChar = matchString[0] - except IndexError: - warnings.warn("null string passed to Literal; use Empty() instead", - SyntaxWarning, stacklevel=2) - self.__class__ = Empty - self.name = '"%s"' % _ustr(self.match) - self.errmsg = "Expected " + self.name - self.mayReturnEmpty = False - self.mayIndexError = False - - # Performance tuning: this routine gets called a *lot* - # if this is a single character match string and the first character matches, - # short-circuit as quickly as possible, and avoid calling startswith - #~ @profile - def parseImpl( self, instring, loc, doActions=True ): - if (instring[loc] == self.firstMatchChar and - (self.matchLen==1 or instring.startswith(self.match,loc)) ): - return loc+self.matchLen, self.match - raise ParseException(instring, loc, self.errmsg, self) -_L = Literal -ParserElement._literalStringClass = Literal - -class Keyword(Token): - """ - Token to exactly match a specified string as a keyword, that is, it must be - immediately followed by a non-keyword character. Compare with C{L{Literal}}: - - C{Literal("if")} will match the leading C{'if'} in C{'ifAndOnlyIf'}. - - C{Keyword("if")} will not; it will only match the leading C{'if'} in C{'if x=1'}, or C{'if(y==2)'} - Accepts two optional constructor arguments in addition to the keyword string: - - C{identChars} is a string of characters that would be valid identifier characters, - defaulting to all alphanumerics + "_" and "$" - - C{caseless} allows case-insensitive matching, default is C{False}. - - Example:: - Keyword("start").parseString("start") # -> ['start'] - Keyword("start").parseString("starting") # -> Exception - - For case-insensitive matching, use L{CaselessKeyword}. - """ - DEFAULT_KEYWORD_CHARS = alphanums+"_$" - - def __init__( self, matchString, identChars=None, caseless=False ): - super(Keyword,self).__init__() - if identChars is None: - identChars = Keyword.DEFAULT_KEYWORD_CHARS - self.match = matchString - self.matchLen = len(matchString) - try: - self.firstMatchChar = matchString[0] - except IndexError: - warnings.warn("null string passed to Keyword; use Empty() instead", - SyntaxWarning, stacklevel=2) - self.name = '"%s"' % self.match - self.errmsg = "Expected " + self.name - self.mayReturnEmpty = False - self.mayIndexError = False - self.caseless = caseless - if caseless: - self.caselessmatch = matchString.upper() - identChars = identChars.upper() - self.identChars = set(identChars) - - def parseImpl( self, instring, loc, doActions=True ): - if self.caseless: - if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and - (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) and - (loc == 0 or instring[loc-1].upper() not in self.identChars) ): - return loc+self.matchLen, self.match - else: - if (instring[loc] == self.firstMatchChar and - (self.matchLen==1 or instring.startswith(self.match,loc)) and - (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen] not in self.identChars) and - (loc == 0 or instring[loc-1] not in self.identChars) ): - return loc+self.matchLen, self.match - raise ParseException(instring, loc, self.errmsg, self) - - def copy(self): - c = super(Keyword,self).copy() - c.identChars = Keyword.DEFAULT_KEYWORD_CHARS - return c - - @staticmethod - def setDefaultKeywordChars( chars ): - """Overrides the default Keyword chars - """ - Keyword.DEFAULT_KEYWORD_CHARS = chars - -class CaselessLiteral(Literal): - """ - Token to match a specified string, ignoring case of letters. - Note: the matched results will always be in the case of the given - match string, NOT the case of the input text. - - Example:: - OneOrMore(CaselessLiteral("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD', 'CMD'] - - (Contrast with example for L{CaselessKeyword}.) - """ - def __init__( self, matchString ): - super(CaselessLiteral,self).__init__( matchString.upper() ) - # Preserve the defining literal. - self.returnString = matchString - self.name = "'%s'" % self.returnString - self.errmsg = "Expected " + self.name - - def parseImpl( self, instring, loc, doActions=True ): - if instring[ loc:loc+self.matchLen ].upper() == self.match: - return loc+self.matchLen, self.returnString - raise ParseException(instring, loc, self.errmsg, self) - -class CaselessKeyword(Keyword): - """ - Caseless version of L{Keyword}. - - Example:: - OneOrMore(CaselessKeyword("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD'] - - (Contrast with example for L{CaselessLiteral}.) - """ - def __init__( self, matchString, identChars=None ): - super(CaselessKeyword,self).__init__( matchString, identChars, caseless=True ) - - def parseImpl( self, instring, loc, doActions=True ): - if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and - (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) ): - return loc+self.matchLen, self.match - raise ParseException(instring, loc, self.errmsg, self) - -class CloseMatch(Token): - """ - A variation on L{Literal} which matches "close" matches, that is, - strings with at most 'n' mismatching characters. C{CloseMatch} takes parameters: - - C{match_string} - string to be matched - - C{maxMismatches} - (C{default=1}) maximum number of mismatches allowed to count as a match - - The results from a successful parse will contain the matched text from the input string and the following named results: - - C{mismatches} - a list of the positions within the match_string where mismatches were found - - C{original} - the original match_string used to compare against the input string - - If C{mismatches} is an empty list, then the match was an exact match. - - Example:: - patt = CloseMatch("ATCATCGAATGGA") - patt.parseString("ATCATCGAAXGGA") # -> (['ATCATCGAAXGGA'], {'mismatches': [[9]], 'original': ['ATCATCGAATGGA']}) - patt.parseString("ATCAXCGAAXGGA") # -> Exception: Expected 'ATCATCGAATGGA' (with up to 1 mismatches) (at char 0), (line:1, col:1) - - # exact match - patt.parseString("ATCATCGAATGGA") # -> (['ATCATCGAATGGA'], {'mismatches': [[]], 'original': ['ATCATCGAATGGA']}) - - # close match allowing up to 2 mismatches - patt = CloseMatch("ATCATCGAATGGA", maxMismatches=2) - patt.parseString("ATCAXCGAAXGGA") # -> (['ATCAXCGAAXGGA'], {'mismatches': [[4, 9]], 'original': ['ATCATCGAATGGA']}) - """ - def __init__(self, match_string, maxMismatches=1): - super(CloseMatch,self).__init__() - self.name = match_string - self.match_string = match_string - self.maxMismatches = maxMismatches - self.errmsg = "Expected %r (with up to %d mismatches)" % (self.match_string, self.maxMismatches) - self.mayIndexError = False - self.mayReturnEmpty = False - - def parseImpl( self, instring, loc, doActions=True ): - start = loc - instrlen = len(instring) - maxloc = start + len(self.match_string) - - if maxloc <= instrlen: - match_string = self.match_string - match_stringloc = 0 - mismatches = [] - maxMismatches = self.maxMismatches - - for match_stringloc,s_m in enumerate(zip(instring[loc:maxloc], self.match_string)): - src,mat = s_m - if src != mat: - mismatches.append(match_stringloc) - if len(mismatches) > maxMismatches: - break - else: - loc = match_stringloc + 1 - results = ParseResults([instring[start:loc]]) - results['original'] = self.match_string - results['mismatches'] = mismatches - return loc, results - - raise ParseException(instring, loc, self.errmsg, self) - - -class Word(Token): - """ - Token for matching words composed of allowed character sets. - Defined with string containing all allowed initial characters, - an optional string containing allowed body characters (if omitted, - defaults to the initial character set), and an optional minimum, - maximum, and/or exact length. The default value for C{min} is 1 (a - minimum value < 1 is not valid); the default values for C{max} and C{exact} - are 0, meaning no maximum or exact length restriction. An optional - C{excludeChars} parameter can list characters that might be found in - the input C{bodyChars} string; useful to define a word of all printables - except for one or two characters, for instance. - - L{srange} is useful for defining custom character set strings for defining - C{Word} expressions, using range notation from regular expression character sets. - - A common mistake is to use C{Word} to match a specific literal string, as in - C{Word("Address")}. Remember that C{Word} uses the string argument to define - I{sets} of matchable characters. This expression would match "Add", "AAA", - "dAred", or any other word made up of the characters 'A', 'd', 'r', 'e', and 's'. - To match an exact literal string, use L{Literal} or L{Keyword}. - - pyparsing includes helper strings for building Words: - - L{alphas} - - L{nums} - - L{alphanums} - - L{hexnums} - - L{alphas8bit} (alphabetic characters in ASCII range 128-255 - accented, tilded, umlauted, etc.) - - L{punc8bit} (non-alphabetic characters in ASCII range 128-255 - currency, symbols, superscripts, diacriticals, etc.) - - L{printables} (any non-whitespace character) - - Example:: - # a word composed of digits - integer = Word(nums) # equivalent to Word("0123456789") or Word(srange("0-9")) - - # a word with a leading capital, and zero or more lowercase - capital_word = Word(alphas.upper(), alphas.lower()) - - # hostnames are alphanumeric, with leading alpha, and '-' - hostname = Word(alphas, alphanums+'-') - - # roman numeral (not a strict parser, accepts invalid mix of characters) - roman = Word("IVXLCDM") - - # any string of non-whitespace characters, except for ',' - csv_value = Word(printables, excludeChars=",") - """ - def __init__( self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False, excludeChars=None ): - super(Word,self).__init__() - if excludeChars: - initChars = ''.join(c for c in initChars if c not in excludeChars) - if bodyChars: - bodyChars = ''.join(c for c in bodyChars if c not in excludeChars) - self.initCharsOrig = initChars - self.initChars = set(initChars) - if bodyChars : - self.bodyCharsOrig = bodyChars - self.bodyChars = set(bodyChars) - else: - self.bodyCharsOrig = initChars - self.bodyChars = set(initChars) - - self.maxSpecified = max > 0 - - if min < 1: - raise ValueError("cannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permitted") - - self.minLen = min - - if max > 0: - self.maxLen = max - else: - self.maxLen = _MAX_INT - - if exact > 0: - self.maxLen = exact - self.minLen = exact - - self.name = _ustr(self) - self.errmsg = "Expected " + self.name - self.mayIndexError = False - self.asKeyword = asKeyword - - if ' ' not in self.initCharsOrig+self.bodyCharsOrig and (min==1 and max==0 and exact==0): - if self.bodyCharsOrig == self.initCharsOrig: - self.reString = "[%s]+" % _escapeRegexRangeChars(self.initCharsOrig) - elif len(self.initCharsOrig) == 1: - self.reString = "%s[%s]*" % \ - (re.escape(self.initCharsOrig), - _escapeRegexRangeChars(self.bodyCharsOrig),) - else: - self.reString = "[%s][%s]*" % \ - (_escapeRegexRangeChars(self.initCharsOrig), - _escapeRegexRangeChars(self.bodyCharsOrig),) - if self.asKeyword: - self.reString = r"\b"+self.reString+r"\b" - try: - self.re = re.compile( self.reString ) - except Exception: - self.re = None - - def parseImpl( self, instring, loc, doActions=True ): - if self.re: - result = self.re.match(instring,loc) - if not result: - raise ParseException(instring, loc, self.errmsg, self) - - loc = result.end() - return loc, result.group() - - if not(instring[ loc ] in self.initChars): - raise ParseException(instring, loc, self.errmsg, self) - - start = loc - loc += 1 - instrlen = len(instring) - bodychars = self.bodyChars - maxloc = start + self.maxLen - maxloc = min( maxloc, instrlen ) - while loc < maxloc and instring[loc] in bodychars: - loc += 1 - - throwException = False - if loc - start < self.minLen: - throwException = True - if self.maxSpecified and loc < instrlen and instring[loc] in bodychars: - throwException = True - if self.asKeyword: - if (start>0 and instring[start-1] in bodychars) or (loc<instrlen and instring[loc] in bodychars): - throwException = True - - if throwException: - raise ParseException(instring, loc, self.errmsg, self) - - return loc, instring[start:loc] - - def __str__( self ): - try: - return super(Word,self).__str__() - except Exception: - pass - - - if self.strRepr is None: - - def charsAsStr(s): - if len(s)>4: - return s[:4]+"..." - else: - return s - - if ( self.initCharsOrig != self.bodyCharsOrig ): - self.strRepr = "W:(%s,%s)" % ( charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig) ) - else: - self.strRepr = "W:(%s)" % charsAsStr(self.initCharsOrig) - - return self.strRepr - - -class Regex(Token): - r""" - Token for matching strings that match a given regular expression. - Defined with string specifying the regular expression in a form recognized by the inbuilt Python re module. - If the given regex contains named groups (defined using C{(?P<name>...)}), these will be preserved as - named parse results. - - Example:: - realnum = Regex(r"[+-]?\d+\.\d*") - date = Regex(r'(?P<year>\d{4})-(?P<month>\d\d?)-(?P<day>\d\d?)') - # ref: http://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expression - roman = Regex(r"M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})") - """ - compiledREtype = type(re.compile("[A-Z]")) - def __init__( self, pattern, flags=0): - """The parameters C{pattern} and C{flags} are passed to the C{re.compile()} function as-is. See the Python C{re} module for an explanation of the acceptable patterns and flags.""" - super(Regex,self).__init__() - - if isinstance(pattern, basestring): - if not pattern: - warnings.warn("null string passed to Regex; use Empty() instead", - SyntaxWarning, stacklevel=2) - - self.pattern = pattern - self.flags = flags - - try: - self.re = re.compile(self.pattern, self.flags) - self.reString = self.pattern - except sre_constants.error: - warnings.warn("invalid pattern (%s) passed to Regex" % pattern, - SyntaxWarning, stacklevel=2) - raise - - elif isinstance(pattern, Regex.compiledREtype): - self.re = pattern - self.pattern = \ - self.reString = str(pattern) - self.flags = flags - - else: - raise ValueError("Regex may only be constructed with a string or a compiled RE object") - - self.name = _ustr(self) - self.errmsg = "Expected " + self.name - self.mayIndexError = False - self.mayReturnEmpty = True - - def parseImpl( self, instring, loc, doActions=True ): - result = self.re.match(instring,loc) - if not result: - raise ParseException(instring, loc, self.errmsg, self) - - loc = result.end() - d = result.groupdict() - ret = ParseResults(result.group()) - if d: - for k in d: - ret[k] = d[k] - return loc,ret - - def __str__( self ): - try: - return super(Regex,self).__str__() - except Exception: - pass - - if self.strRepr is None: - self.strRepr = "Re:(%s)" % repr(self.pattern) - - return self.strRepr - - -class QuotedString(Token): - r""" - Token for matching strings that are delimited by quoting characters. - - Defined with the following parameters: - - quoteChar - string of one or more characters defining the quote delimiting string - - escChar - character to escape quotes, typically backslash (default=C{None}) - - escQuote - special quote sequence to escape an embedded quote string (such as SQL's "" to escape an embedded ") (default=C{None}) - - multiline - boolean indicating whether quotes can span multiple lines (default=C{False}) - - unquoteResults - boolean indicating whether the matched text should be unquoted (default=C{True}) - - endQuoteChar - string of one or more characters defining the end of the quote delimited string (default=C{None} => same as quoteChar) - - convertWhitespaceEscapes - convert escaped whitespace (C{'\t'}, C{'\n'}, etc.) to actual whitespace (default=C{True}) - - Example:: - qs = QuotedString('"') - print(qs.searchString('lsjdf "This is the quote" sldjf')) - complex_qs = QuotedString('{{', endQuoteChar='}}') - print(complex_qs.searchString('lsjdf {{This is the "quote"}} sldjf')) - sql_qs = QuotedString('"', escQuote='""') - print(sql_qs.searchString('lsjdf "This is the quote with ""embedded"" quotes" sldjf')) - prints:: - [['This is the quote']] - [['This is the "quote"']] - [['This is the quote with "embedded" quotes']] - """ - def __init__( self, quoteChar, escChar=None, escQuote=None, multiline=False, unquoteResults=True, endQuoteChar=None, convertWhitespaceEscapes=True): - super(QuotedString,self).__init__() - - # remove white space from quote chars - wont work anyway - quoteChar = quoteChar.strip() - if not quoteChar: - warnings.warn("quoteChar cannot be the empty string",SyntaxWarning,stacklevel=2) - raise SyntaxError() - - if endQuoteChar is None: - endQuoteChar = quoteChar - else: - endQuoteChar = endQuoteChar.strip() - if not endQuoteChar: - warnings.warn("endQuoteChar cannot be the empty string",SyntaxWarning,stacklevel=2) - raise SyntaxError() - - self.quoteChar = quoteChar - self.quoteCharLen = len(quoteChar) - self.firstQuoteChar = quoteChar[0] - self.endQuoteChar = endQuoteChar - self.endQuoteCharLen = len(endQuoteChar) - self.escChar = escChar - self.escQuote = escQuote - self.unquoteResults = unquoteResults - self.convertWhitespaceEscapes = convertWhitespaceEscapes - - if multiline: - self.flags = re.MULTILINE | re.DOTALL - self.pattern = r'%s(?:[^%s%s]' % \ - ( re.escape(self.quoteChar), - _escapeRegexRangeChars(self.endQuoteChar[0]), - (escChar is not None and _escapeRegexRangeChars(escChar) or '') ) - else: - self.flags = 0 - self.pattern = r'%s(?:[^%s\n\r%s]' % \ - ( re.escape(self.quoteChar), - _escapeRegexRangeChars(self.endQuoteChar[0]), - (escChar is not None and _escapeRegexRangeChars(escChar) or '') ) - if len(self.endQuoteChar) > 1: - self.pattern += ( - '|(?:' + ')|(?:'.join("%s[^%s]" % (re.escape(self.endQuoteChar[:i]), - _escapeRegexRangeChars(self.endQuoteChar[i])) - for i in range(len(self.endQuoteChar)-1,0,-1)) + ')' - ) - if escQuote: - self.pattern += (r'|(?:%s)' % re.escape(escQuote)) - if escChar: - self.pattern += (r'|(?:%s.)' % re.escape(escChar)) - self.escCharReplacePattern = re.escape(self.escChar)+"(.)" - self.pattern += (r')*%s' % re.escape(self.endQuoteChar)) - - try: - self.re = re.compile(self.pattern, self.flags) - self.reString = self.pattern - except sre_constants.error: - warnings.warn("invalid pattern (%s) passed to Regex" % self.pattern, - SyntaxWarning, stacklevel=2) - raise - - self.name = _ustr(self) - self.errmsg = "Expected " + self.name - self.mayIndexError = False - self.mayReturnEmpty = True - - def parseImpl( self, instring, loc, doActions=True ): - result = instring[loc] == self.firstQuoteChar and self.re.match(instring,loc) or None - if not result: - raise ParseException(instring, loc, self.errmsg, self) - - loc = result.end() - ret = result.group() - - if self.unquoteResults: - - # strip off quotes - ret = ret[self.quoteCharLen:-self.endQuoteCharLen] - - if isinstance(ret,basestring): - # replace escaped whitespace - if '\\' in ret and self.convertWhitespaceEscapes: - ws_map = { - r'\t' : '\t', - r'\n' : '\n', - r'\f' : '\f', - r'\r' : '\r', - } - for wslit,wschar in ws_map.items(): - ret = ret.replace(wslit, wschar) - - # replace escaped characters - if self.escChar: - ret = re.sub(self.escCharReplacePattern, r"\g<1>", ret) - - # replace escaped quotes - if self.escQuote: - ret = ret.replace(self.escQuote, self.endQuoteChar) - - return loc, ret - - def __str__( self ): - try: - return super(QuotedString,self).__str__() - except Exception: - pass - - if self.strRepr is None: - self.strRepr = "quoted string, starting with %s ending with %s" % (self.quoteChar, self.endQuoteChar) - - return self.strRepr - - -class CharsNotIn(Token): - """ - Token for matching words composed of characters I{not} in a given set (will - include whitespace in matched characters if not listed in the provided exclusion set - see example). - Defined with string containing all disallowed characters, and an optional - minimum, maximum, and/or exact length. The default value for C{min} is 1 (a - minimum value < 1 is not valid); the default values for C{max} and C{exact} - are 0, meaning no maximum or exact length restriction. - - Example:: - # define a comma-separated-value as anything that is not a ',' - csv_value = CharsNotIn(',') - print(delimitedList(csv_value).parseString("dkls,lsdkjf,s12 34,@!#,213")) - prints:: - ['dkls', 'lsdkjf', 's12 34', '@!#', '213'] - """ - def __init__( self, notChars, min=1, max=0, exact=0 ): - super(CharsNotIn,self).__init__() - self.skipWhitespace = False - self.notChars = notChars - - if min < 1: - raise ValueError("cannot specify a minimum length < 1; use Optional(CharsNotIn()) if zero-length char group is permitted") - - self.minLen = min - - if max > 0: - self.maxLen = max - else: - self.maxLen = _MAX_INT - - if exact > 0: - self.maxLen = exact - self.minLen = exact - - self.name = _ustr(self) - self.errmsg = "Expected " + self.name - self.mayReturnEmpty = ( self.minLen == 0 ) - self.mayIndexError = False - - def parseImpl( self, instring, loc, doActions=True ): - if instring[loc] in self.notChars: - raise ParseException(instring, loc, self.errmsg, self) - - start = loc - loc += 1 - notchars = self.notChars - maxlen = min( start+self.maxLen, len(instring) ) - while loc < maxlen and \ - (instring[loc] not in notchars): - loc += 1 - - if loc - start < self.minLen: - raise ParseException(instring, loc, self.errmsg, self) - - return loc, instring[start:loc] - - def __str__( self ): - try: - return super(CharsNotIn, self).__str__() - except Exception: - pass - - if self.strRepr is None: - if len(self.notChars) > 4: - self.strRepr = "!W:(%s...)" % self.notChars[:4] - else: - self.strRepr = "!W:(%s)" % self.notChars - - return self.strRepr - -class White(Token): - """ - Special matching class for matching whitespace. Normally, whitespace is ignored - by pyparsing grammars. This class is included when some whitespace structures - are significant. Define with a string containing the whitespace characters to be - matched; default is C{" \\t\\r\\n"}. Also takes optional C{min}, C{max}, and C{exact} arguments, - as defined for the C{L{Word}} class. - """ - whiteStrs = { - " " : "<SPC>", - "\t": "<TAB>", - "\n": "<LF>", - "\r": "<CR>", - "\f": "<FF>", - } - def __init__(self, ws=" \t\r\n", min=1, max=0, exact=0): - super(White,self).__init__() - self.matchWhite = ws - self.setWhitespaceChars( "".join(c for c in self.whiteChars if c not in self.matchWhite) ) - #~ self.leaveWhitespace() - self.name = ("".join(White.whiteStrs[c] for c in self.matchWhite)) - self.mayReturnEmpty = True - self.errmsg = "Expected " + self.name - - self.minLen = min - - if max > 0: - self.maxLen = max - else: - self.maxLen = _MAX_INT - - if exact > 0: - self.maxLen = exact - self.minLen = exact - - def parseImpl( self, instring, loc, doActions=True ): - if not(instring[ loc ] in self.matchWhite): - raise ParseException(instring, loc, self.errmsg, self) - start = loc - loc += 1 - maxloc = start + self.maxLen - maxloc = min( maxloc, len(instring) ) - while loc < maxloc and instring[loc] in self.matchWhite: - loc += 1 - - if loc - start < self.minLen: - raise ParseException(instring, loc, self.errmsg, self) - - return loc, instring[start:loc] - - -class _PositionToken(Token): - def __init__( self ): - super(_PositionToken,self).__init__() - self.name=self.__class__.__name__ - self.mayReturnEmpty = True - self.mayIndexError = False - -class GoToColumn(_PositionToken): - """ - Token to advance to a specific column of input text; useful for tabular report scraping. - """ - def __init__( self, colno ): - super(GoToColumn,self).__init__() - self.col = colno - - def preParse( self, instring, loc ): - if col(loc,instring) != self.col: - instrlen = len(instring) - if self.ignoreExprs: - loc = self._skipIgnorables( instring, loc ) - while loc < instrlen and instring[loc].isspace() and col( loc, instring ) != self.col : - loc += 1 - return loc - - def parseImpl( self, instring, loc, doActions=True ): - thiscol = col( loc, instring ) - if thiscol > self.col: - raise ParseException( instring, loc, "Text not in expected column", self ) - newloc = loc + self.col - thiscol - ret = instring[ loc: newloc ] - return newloc, ret - - -class LineStart(_PositionToken): - """ - Matches if current position is at the beginning of a line within the parse string - - Example:: - - test = '''\ - AAA this line - AAA and this line - AAA but not this one - B AAA and definitely not this one - ''' - - for t in (LineStart() + 'AAA' + restOfLine).searchString(test): - print(t) - - Prints:: - ['AAA', ' this line'] - ['AAA', ' and this line'] - - """ - def __init__( self ): - super(LineStart,self).__init__() - self.errmsg = "Expected start of line" - - def parseImpl( self, instring, loc, doActions=True ): - if col(loc, instring) == 1: - return loc, [] - raise ParseException(instring, loc, self.errmsg, self) - -class LineEnd(_PositionToken): - """ - Matches if current position is at the end of a line within the parse string - """ - def __init__( self ): - super(LineEnd,self).__init__() - self.setWhitespaceChars( ParserElement.DEFAULT_WHITE_CHARS.replace("\n","") ) - self.errmsg = "Expected end of line" - - def parseImpl( self, instring, loc, doActions=True ): - if loc<len(instring): - if instring[loc] == "\n": - return loc+1, "\n" - else: - raise ParseException(instring, loc, self.errmsg, self) - elif loc == len(instring): - return loc+1, [] - else: - raise ParseException(instring, loc, self.errmsg, self) - -class StringStart(_PositionToken): - """ - Matches if current position is at the beginning of the parse string - """ - def __init__( self ): - super(StringStart,self).__init__() - self.errmsg = "Expected start of text" - - def parseImpl( self, instring, loc, doActions=True ): - if loc != 0: - # see if entire string up to here is just whitespace and ignoreables - if loc != self.preParse( instring, 0 ): - raise ParseException(instring, loc, self.errmsg, self) - return loc, [] - -class StringEnd(_PositionToken): - """ - Matches if current position is at the end of the parse string - """ - def __init__( self ): - super(StringEnd,self).__init__() - self.errmsg = "Expected end of text" - - def parseImpl( self, instring, loc, doActions=True ): - if loc < len(instring): - raise ParseException(instring, loc, self.errmsg, self) - elif loc == len(instring): - return loc+1, [] - elif loc > len(instring): - return loc, [] - else: - raise ParseException(instring, loc, self.errmsg, self) - -class WordStart(_PositionToken): - """ - Matches if the current position is at the beginning of a Word, and - is not preceded by any character in a given set of C{wordChars} - (default=C{printables}). To emulate the C{\b} behavior of regular expressions, - use C{WordStart(alphanums)}. C{WordStart} will also match at the beginning of - the string being parsed, or at the beginning of a line. - """ - def __init__(self, wordChars = printables): - super(WordStart,self).__init__() - self.wordChars = set(wordChars) - self.errmsg = "Not at the start of a word" - - def parseImpl(self, instring, loc, doActions=True ): - if loc != 0: - if (instring[loc-1] in self.wordChars or - instring[loc] not in self.wordChars): - raise ParseException(instring, loc, self.errmsg, self) - return loc, [] - -class WordEnd(_PositionToken): - """ - Matches if the current position is at the end of a Word, and - is not followed by any character in a given set of C{wordChars} - (default=C{printables}). To emulate the C{\b} behavior of regular expressions, - use C{WordEnd(alphanums)}. C{WordEnd} will also match at the end of - the string being parsed, or at the end of a line. - """ - def __init__(self, wordChars = printables): - super(WordEnd,self).__init__() - self.wordChars = set(wordChars) - self.skipWhitespace = False - self.errmsg = "Not at the end of a word" - - def parseImpl(self, instring, loc, doActions=True ): - instrlen = len(instring) - if instrlen>0 and loc<instrlen: - if (instring[loc] in self.wordChars or - instring[loc-1] not in self.wordChars): - raise ParseException(instring, loc, self.errmsg, self) - return loc, [] - - -class ParseExpression(ParserElement): - """ - Abstract subclass of ParserElement, for combining and post-processing parsed tokens. - """ - def __init__( self, exprs, savelist = False ): - super(ParseExpression,self).__init__(savelist) - if isinstance( exprs, _generatorType ): - exprs = list(exprs) - - if isinstance( exprs, basestring ): - self.exprs = [ ParserElement._literalStringClass( exprs ) ] - elif isinstance( exprs, Iterable ): - exprs = list(exprs) - # if sequence of strings provided, wrap with Literal - if all(isinstance(expr, basestring) for expr in exprs): - exprs = map(ParserElement._literalStringClass, exprs) - self.exprs = list(exprs) - else: - try: - self.exprs = list( exprs ) - except TypeError: - self.exprs = [ exprs ] - self.callPreparse = False - - def __getitem__( self, i ): - return self.exprs[i] - - def append( self, other ): - self.exprs.append( other ) - self.strRepr = None - return self - - def leaveWhitespace( self ): - """Extends C{leaveWhitespace} defined in base class, and also invokes C{leaveWhitespace} on - all contained expressions.""" - self.skipWhitespace = False - self.exprs = [ e.copy() for e in self.exprs ] - for e in self.exprs: - e.leaveWhitespace() - return self - - def ignore( self, other ): - if isinstance( other, Suppress ): - if other not in self.ignoreExprs: - super( ParseExpression, self).ignore( other ) - for e in self.exprs: - e.ignore( self.ignoreExprs[-1] ) - else: - super( ParseExpression, self).ignore( other ) - for e in self.exprs: - e.ignore( self.ignoreExprs[-1] ) - return self - - def __str__( self ): - try: - return super(ParseExpression,self).__str__() - except Exception: - pass - - if self.strRepr is None: - self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.exprs) ) - return self.strRepr - - def streamline( self ): - super(ParseExpression,self).streamline() - - for e in self.exprs: - e.streamline() - - # collapse nested And's of the form And( And( And( a,b), c), d) to And( a,b,c,d ) - # but only if there are no parse actions or resultsNames on the nested And's - # (likewise for Or's and MatchFirst's) - if ( len(self.exprs) == 2 ): - other = self.exprs[0] - if ( isinstance( other, self.__class__ ) and - not(other.parseAction) and - other.resultsName is None and - not other.debug ): - self.exprs = other.exprs[:] + [ self.exprs[1] ] - self.strRepr = None - self.mayReturnEmpty |= other.mayReturnEmpty - self.mayIndexError |= other.mayIndexError - - other = self.exprs[-1] - if ( isinstance( other, self.__class__ ) and - not(other.parseAction) and - other.resultsName is None and - not other.debug ): - self.exprs = self.exprs[:-1] + other.exprs[:] - self.strRepr = None - self.mayReturnEmpty |= other.mayReturnEmpty - self.mayIndexError |= other.mayIndexError - - self.errmsg = "Expected " + _ustr(self) - - return self - - def setResultsName( self, name, listAllMatches=False ): - ret = super(ParseExpression,self).setResultsName(name,listAllMatches) - return ret - - def validate( self, validateTrace=[] ): - tmp = validateTrace[:]+[self] - for e in self.exprs: - e.validate(tmp) - self.checkRecursion( [] ) - - def copy(self): - ret = super(ParseExpression,self).copy() - ret.exprs = [e.copy() for e in self.exprs] - return ret - -class And(ParseExpression): - """ - Requires all given C{ParseExpression}s to be found in the given order. - Expressions may be separated by whitespace. - May be constructed using the C{'+'} operator. - May also be constructed using the C{'-'} operator, which will suppress backtracking. - - Example:: - integer = Word(nums) - name_expr = OneOrMore(Word(alphas)) - - expr = And([integer("id"),name_expr("name"),integer("age")]) - # more easily written as: - expr = integer("id") + name_expr("name") + integer("age") - """ - - class _ErrorStop(Empty): - def __init__(self, *args, **kwargs): - super(And._ErrorStop,self).__init__(*args, **kwargs) - self.name = '-' - self.leaveWhitespace() - - def __init__( self, exprs, savelist = True ): - super(And,self).__init__(exprs, savelist) - self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) - self.setWhitespaceChars( self.exprs[0].whiteChars ) - self.skipWhitespace = self.exprs[0].skipWhitespace - self.callPreparse = True - - def parseImpl( self, instring, loc, doActions=True ): - # pass False as last arg to _parse for first element, since we already - # pre-parsed the string as part of our And pre-parsing - loc, resultlist = self.exprs[0]._parse( instring, loc, doActions, callPreParse=False ) - errorStop = False - for e in self.exprs[1:]: - if isinstance(e, And._ErrorStop): - errorStop = True - continue - if errorStop: - try: - loc, exprtokens = e._parse( instring, loc, doActions ) - except ParseSyntaxException: - raise - except ParseBaseException as pe: - pe.__traceback__ = None - raise ParseSyntaxException._from_exception(pe) - except IndexError: - raise ParseSyntaxException(instring, len(instring), self.errmsg, self) - else: - loc, exprtokens = e._parse( instring, loc, doActions ) - if exprtokens or exprtokens.haskeys(): - resultlist += exprtokens - return loc, resultlist - - def __iadd__(self, other ): - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - return self.append( other ) #And( [ self, other ] ) - - def checkRecursion( self, parseElementList ): - subRecCheckList = parseElementList[:] + [ self ] - for e in self.exprs: - e.checkRecursion( subRecCheckList ) - if not e.mayReturnEmpty: - break - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "{" + " ".join(_ustr(e) for e in self.exprs) + "}" - - return self.strRepr - - -class Or(ParseExpression): - """ - Requires that at least one C{ParseExpression} is found. - If two expressions match, the expression that matches the longest string will be used. - May be constructed using the C{'^'} operator. - - Example:: - # construct Or using '^' operator - - number = Word(nums) ^ Combine(Word(nums) + '.' + Word(nums)) - print(number.searchString("123 3.1416 789")) - prints:: - [['123'], ['3.1416'], ['789']] - """ - def __init__( self, exprs, savelist = False ): - super(Or,self).__init__(exprs, savelist) - if self.exprs: - self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs) - else: - self.mayReturnEmpty = True - - def parseImpl( self, instring, loc, doActions=True ): - maxExcLoc = -1 - maxException = None - matches = [] - for e in self.exprs: - try: - loc2 = e.tryParse( instring, loc ) - except ParseException as err: - err.__traceback__ = None - if err.loc > maxExcLoc: - maxException = err - maxExcLoc = err.loc - except IndexError: - if len(instring) > maxExcLoc: - maxException = ParseException(instring,len(instring),e.errmsg,self) - maxExcLoc = len(instring) - else: - # save match among all matches, to retry longest to shortest - matches.append((loc2, e)) - - if matches: - matches.sort(key=lambda x: -x[0]) - for _,e in matches: - try: - return e._parse( instring, loc, doActions ) - except ParseException as err: - err.__traceback__ = None - if err.loc > maxExcLoc: - maxException = err - maxExcLoc = err.loc - - if maxException is not None: - maxException.msg = self.errmsg - raise maxException - else: - raise ParseException(instring, loc, "no defined alternatives to match", self) - - - def __ixor__(self, other ): - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - return self.append( other ) #Or( [ self, other ] ) - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "{" + " ^ ".join(_ustr(e) for e in self.exprs) + "}" - - return self.strRepr - - def checkRecursion( self, parseElementList ): - subRecCheckList = parseElementList[:] + [ self ] - for e in self.exprs: - e.checkRecursion( subRecCheckList ) - - -class MatchFirst(ParseExpression): - """ - Requires that at least one C{ParseExpression} is found. - If two expressions match, the first one listed is the one that will match. - May be constructed using the C{'|'} operator. - - Example:: - # construct MatchFirst using '|' operator - - # watch the order of expressions to match - number = Word(nums) | Combine(Word(nums) + '.' + Word(nums)) - print(number.searchString("123 3.1416 789")) # Fail! -> [['123'], ['3'], ['1416'], ['789']] - - # put more selective expression first - number = Combine(Word(nums) + '.' + Word(nums)) | Word(nums) - print(number.searchString("123 3.1416 789")) # Better -> [['123'], ['3.1416'], ['789']] - """ - def __init__( self, exprs, savelist = False ): - super(MatchFirst,self).__init__(exprs, savelist) - if self.exprs: - self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs) - else: - self.mayReturnEmpty = True - - def parseImpl( self, instring, loc, doActions=True ): - maxExcLoc = -1 - maxException = None - for e in self.exprs: - try: - ret = e._parse( instring, loc, doActions ) - return ret - except ParseException as err: - if err.loc > maxExcLoc: - maxException = err - maxExcLoc = err.loc - except IndexError: - if len(instring) > maxExcLoc: - maxException = ParseException(instring,len(instring),e.errmsg,self) - maxExcLoc = len(instring) - - # only got here if no expression matched, raise exception for match that made it the furthest - else: - if maxException is not None: - maxException.msg = self.errmsg - raise maxException - else: - raise ParseException(instring, loc, "no defined alternatives to match", self) - - def __ior__(self, other ): - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - return self.append( other ) #MatchFirst( [ self, other ] ) - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "{" + " | ".join(_ustr(e) for e in self.exprs) + "}" - - return self.strRepr - - def checkRecursion( self, parseElementList ): - subRecCheckList = parseElementList[:] + [ self ] - for e in self.exprs: - e.checkRecursion( subRecCheckList ) - - -class Each(ParseExpression): - """ - Requires all given C{ParseExpression}s to be found, but in any order. - Expressions may be separated by whitespace. - May be constructed using the C{'&'} operator. - - Example:: - color = oneOf("RED ORANGE YELLOW GREEN BLUE PURPLE BLACK WHITE BROWN") - shape_type = oneOf("SQUARE CIRCLE TRIANGLE STAR HEXAGON OCTAGON") - integer = Word(nums) - shape_attr = "shape:" + shape_type("shape") - posn_attr = "posn:" + Group(integer("x") + ',' + integer("y"))("posn") - color_attr = "color:" + color("color") - size_attr = "size:" + integer("size") - - # use Each (using operator '&') to accept attributes in any order - # (shape and posn are required, color and size are optional) - shape_spec = shape_attr & posn_attr & Optional(color_attr) & Optional(size_attr) - - shape_spec.runTests(''' - shape: SQUARE color: BLACK posn: 100, 120 - shape: CIRCLE size: 50 color: BLUE posn: 50,80 - color:GREEN size:20 shape:TRIANGLE posn:20,40 - ''' - ) - prints:: - shape: SQUARE color: BLACK posn: 100, 120 - ['shape:', 'SQUARE', 'color:', 'BLACK', 'posn:', ['100', ',', '120']] - - color: BLACK - - posn: ['100', ',', '120'] - - x: 100 - - y: 120 - - shape: SQUARE - - - shape: CIRCLE size: 50 color: BLUE posn: 50,80 - ['shape:', 'CIRCLE', 'size:', '50', 'color:', 'BLUE', 'posn:', ['50', ',', '80']] - - color: BLUE - - posn: ['50', ',', '80'] - - x: 50 - - y: 80 - - shape: CIRCLE - - size: 50 - - - color: GREEN size: 20 shape: TRIANGLE posn: 20,40 - ['color:', 'GREEN', 'size:', '20', 'shape:', 'TRIANGLE', 'posn:', ['20', ',', '40']] - - color: GREEN - - posn: ['20', ',', '40'] - - x: 20 - - y: 40 - - shape: TRIANGLE - - size: 20 - """ - def __init__( self, exprs, savelist = True ): - super(Each,self).__init__(exprs, savelist) - self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) - self.skipWhitespace = True - self.initExprGroups = True - - def parseImpl( self, instring, loc, doActions=True ): - if self.initExprGroups: - self.opt1map = dict((id(e.expr),e) for e in self.exprs if isinstance(e,Optional)) - opt1 = [ e.expr for e in self.exprs if isinstance(e,Optional) ] - opt2 = [ e for e in self.exprs if e.mayReturnEmpty and not isinstance(e,Optional)] - self.optionals = opt1 + opt2 - self.multioptionals = [ e.expr for e in self.exprs if isinstance(e,ZeroOrMore) ] - self.multirequired = [ e.expr for e in self.exprs if isinstance(e,OneOrMore) ] - self.required = [ e for e in self.exprs if not isinstance(e,(Optional,ZeroOrMore,OneOrMore)) ] - self.required += self.multirequired - self.initExprGroups = False - tmpLoc = loc - tmpReqd = self.required[:] - tmpOpt = self.optionals[:] - matchOrder = [] - - keepMatching = True - while keepMatching: - tmpExprs = tmpReqd + tmpOpt + self.multioptionals + self.multirequired - failed = [] - for e in tmpExprs: - try: - tmpLoc = e.tryParse( instring, tmpLoc ) - except ParseException: - failed.append(e) - else: - matchOrder.append(self.opt1map.get(id(e),e)) - if e in tmpReqd: - tmpReqd.remove(e) - elif e in tmpOpt: - tmpOpt.remove(e) - if len(failed) == len(tmpExprs): - keepMatching = False - - if tmpReqd: - missing = ", ".join(_ustr(e) for e in tmpReqd) - raise ParseException(instring,loc,"Missing one or more required elements (%s)" % missing ) - - # add any unmatched Optionals, in case they have default values defined - matchOrder += [e for e in self.exprs if isinstance(e,Optional) and e.expr in tmpOpt] - - resultlist = [] - for e in matchOrder: - loc,results = e._parse(instring,loc,doActions) - resultlist.append(results) - - finalResults = sum(resultlist, ParseResults([])) - return loc, finalResults - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "{" + " & ".join(_ustr(e) for e in self.exprs) + "}" - - return self.strRepr - - def checkRecursion( self, parseElementList ): - subRecCheckList = parseElementList[:] + [ self ] - for e in self.exprs: - e.checkRecursion( subRecCheckList ) - - -class ParseElementEnhance(ParserElement): - """ - Abstract subclass of C{ParserElement}, for combining and post-processing parsed tokens. - """ - def __init__( self, expr, savelist=False ): - super(ParseElementEnhance,self).__init__(savelist) - if isinstance( expr, basestring ): - if issubclass(ParserElement._literalStringClass, Token): - expr = ParserElement._literalStringClass(expr) - else: - expr = ParserElement._literalStringClass(Literal(expr)) - self.expr = expr - self.strRepr = None - if expr is not None: - self.mayIndexError = expr.mayIndexError - self.mayReturnEmpty = expr.mayReturnEmpty - self.setWhitespaceChars( expr.whiteChars ) - self.skipWhitespace = expr.skipWhitespace - self.saveAsList = expr.saveAsList - self.callPreparse = expr.callPreparse - self.ignoreExprs.extend(expr.ignoreExprs) - - def parseImpl( self, instring, loc, doActions=True ): - if self.expr is not None: - return self.expr._parse( instring, loc, doActions, callPreParse=False ) - else: - raise ParseException("",loc,self.errmsg,self) - - def leaveWhitespace( self ): - self.skipWhitespace = False - self.expr = self.expr.copy() - if self.expr is not None: - self.expr.leaveWhitespace() - return self - - def ignore( self, other ): - if isinstance( other, Suppress ): - if other not in self.ignoreExprs: - super( ParseElementEnhance, self).ignore( other ) - if self.expr is not None: - self.expr.ignore( self.ignoreExprs[-1] ) - else: - super( ParseElementEnhance, self).ignore( other ) - if self.expr is not None: - self.expr.ignore( self.ignoreExprs[-1] ) - return self - - def streamline( self ): - super(ParseElementEnhance,self).streamline() - if self.expr is not None: - self.expr.streamline() - return self - - def checkRecursion( self, parseElementList ): - if self in parseElementList: - raise RecursiveGrammarException( parseElementList+[self] ) - subRecCheckList = parseElementList[:] + [ self ] - if self.expr is not None: - self.expr.checkRecursion( subRecCheckList ) - - def validate( self, validateTrace=[] ): - tmp = validateTrace[:]+[self] - if self.expr is not None: - self.expr.validate(tmp) - self.checkRecursion( [] ) - - def __str__( self ): - try: - return super(ParseElementEnhance,self).__str__() - except Exception: - pass - - if self.strRepr is None and self.expr is not None: - self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.expr) ) - return self.strRepr - - -class FollowedBy(ParseElementEnhance): - """ - Lookahead matching of the given parse expression. C{FollowedBy} - does I{not} advance the parsing position within the input string, it only - verifies that the specified parse expression matches at the current - position. C{FollowedBy} always returns a null token list. - - Example:: - # use FollowedBy to match a label only if it is followed by a ':' - data_word = Word(alphas) - label = data_word + FollowedBy(':') - attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) - - OneOrMore(attr_expr).parseString("shape: SQUARE color: BLACK posn: upper left").pprint() - prints:: - [['shape', 'SQUARE'], ['color', 'BLACK'], ['posn', 'upper left']] - """ - def __init__( self, expr ): - super(FollowedBy,self).__init__(expr) - self.mayReturnEmpty = True - - def parseImpl( self, instring, loc, doActions=True ): - self.expr.tryParse( instring, loc ) - return loc, [] - - -class NotAny(ParseElementEnhance): - """ - Lookahead to disallow matching with the given parse expression. C{NotAny} - does I{not} advance the parsing position within the input string, it only - verifies that the specified parse expression does I{not} match at the current - position. Also, C{NotAny} does I{not} skip over leading whitespace. C{NotAny} - always returns a null token list. May be constructed using the '~' operator. - - Example:: - - """ - def __init__( self, expr ): - super(NotAny,self).__init__(expr) - #~ self.leaveWhitespace() - self.skipWhitespace = False # do NOT use self.leaveWhitespace(), don't want to propagate to exprs - self.mayReturnEmpty = True - self.errmsg = "Found unwanted token, "+_ustr(self.expr) - - def parseImpl( self, instring, loc, doActions=True ): - if self.expr.canParseNext(instring, loc): - raise ParseException(instring, loc, self.errmsg, self) - return loc, [] - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "~{" + _ustr(self.expr) + "}" - - return self.strRepr - -class _MultipleMatch(ParseElementEnhance): - def __init__( self, expr, stopOn=None): - super(_MultipleMatch, self).__init__(expr) - self.saveAsList = True - ender = stopOn - if isinstance(ender, basestring): - ender = ParserElement._literalStringClass(ender) - self.not_ender = ~ender if ender is not None else None - - def parseImpl( self, instring, loc, doActions=True ): - self_expr_parse = self.expr._parse - self_skip_ignorables = self._skipIgnorables - check_ender = self.not_ender is not None - if check_ender: - try_not_ender = self.not_ender.tryParse - - # must be at least one (but first see if we are the stopOn sentinel; - # if so, fail) - if check_ender: - try_not_ender(instring, loc) - loc, tokens = self_expr_parse( instring, loc, doActions, callPreParse=False ) - try: - hasIgnoreExprs = (not not self.ignoreExprs) - while 1: - if check_ender: - try_not_ender(instring, loc) - if hasIgnoreExprs: - preloc = self_skip_ignorables( instring, loc ) - else: - preloc = loc - loc, tmptokens = self_expr_parse( instring, preloc, doActions ) - if tmptokens or tmptokens.haskeys(): - tokens += tmptokens - except (ParseException,IndexError): - pass - - return loc, tokens - -class OneOrMore(_MultipleMatch): - """ - Repetition of one or more of the given expression. - - Parameters: - - expr - expression that must match one or more times - - stopOn - (default=C{None}) - expression for a terminating sentinel - (only required if the sentinel would ordinarily match the repetition - expression) - - Example:: - data_word = Word(alphas) - label = data_word + FollowedBy(':') - attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join)) - - text = "shape: SQUARE posn: upper left color: BLACK" - OneOrMore(attr_expr).parseString(text).pprint() # Fail! read 'color' as data instead of next label -> [['shape', 'SQUARE color']] - - # use stopOn attribute for OneOrMore to avoid reading label string as part of the data - attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) - OneOrMore(attr_expr).parseString(text).pprint() # Better -> [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'BLACK']] - - # could also be written as - (attr_expr * (1,)).parseString(text).pprint() - """ - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "{" + _ustr(self.expr) + "}..." - - return self.strRepr - -class ZeroOrMore(_MultipleMatch): - """ - Optional repetition of zero or more of the given expression. - - Parameters: - - expr - expression that must match zero or more times - - stopOn - (default=C{None}) - expression for a terminating sentinel - (only required if the sentinel would ordinarily match the repetition - expression) - - Example: similar to L{OneOrMore} - """ - def __init__( self, expr, stopOn=None): - super(ZeroOrMore,self).__init__(expr, stopOn=stopOn) - self.mayReturnEmpty = True - - def parseImpl( self, instring, loc, doActions=True ): - try: - return super(ZeroOrMore, self).parseImpl(instring, loc, doActions) - except (ParseException,IndexError): - return loc, [] - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "[" + _ustr(self.expr) + "]..." - - return self.strRepr - -class _NullToken(object): - def __bool__(self): - return False - __nonzero__ = __bool__ - def __str__(self): - return "" - -_optionalNotMatched = _NullToken() -class Optional(ParseElementEnhance): - """ - Optional matching of the given expression. - - Parameters: - - expr - expression that must match zero or more times - - default (optional) - value to be returned if the optional expression is not found. - - Example:: - # US postal code can be a 5-digit zip, plus optional 4-digit qualifier - zip = Combine(Word(nums, exact=5) + Optional('-' + Word(nums, exact=4))) - zip.runTests(''' - # traditional ZIP code - 12345 - - # ZIP+4 form - 12101-0001 - - # invalid ZIP - 98765- - ''') - prints:: - # traditional ZIP code - 12345 - ['12345'] - - # ZIP+4 form - 12101-0001 - ['12101-0001'] - - # invalid ZIP - 98765- - ^ - FAIL: Expected end of text (at char 5), (line:1, col:6) - """ - def __init__( self, expr, default=_optionalNotMatched ): - super(Optional,self).__init__( expr, savelist=False ) - self.saveAsList = self.expr.saveAsList - self.defaultValue = default - self.mayReturnEmpty = True - - def parseImpl( self, instring, loc, doActions=True ): - try: - loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False ) - except (ParseException,IndexError): - if self.defaultValue is not _optionalNotMatched: - if self.expr.resultsName: - tokens = ParseResults([ self.defaultValue ]) - tokens[self.expr.resultsName] = self.defaultValue - else: - tokens = [ self.defaultValue ] - else: - tokens = [] - return loc, tokens - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "[" + _ustr(self.expr) + "]" - - return self.strRepr - -class SkipTo(ParseElementEnhance): - """ - Token for skipping over all undefined text until the matched expression is found. - - Parameters: - - expr - target expression marking the end of the data to be skipped - - include - (default=C{False}) if True, the target expression is also parsed - (the skipped text and target expression are returned as a 2-element list). - - ignore - (default=C{None}) used to define grammars (typically quoted strings and - comments) that might contain false matches to the target expression - - failOn - (default=C{None}) define expressions that are not allowed to be - included in the skipped test; if found before the target expression is found, - the SkipTo is not a match - - Example:: - report = ''' - Outstanding Issues Report - 1 Jan 2000 - - # | Severity | Description | Days Open - -----+----------+-------------------------------------------+----------- - 101 | Critical | Intermittent system crash | 6 - 94 | Cosmetic | Spelling error on Login ('log|n') | 14 - 79 | Minor | System slow when running too many reports | 47 - ''' - integer = Word(nums) - SEP = Suppress('|') - # use SkipTo to simply match everything up until the next SEP - # - ignore quoted strings, so that a '|' character inside a quoted string does not match - # - parse action will call token.strip() for each matched token, i.e., the description body - string_data = SkipTo(SEP, ignore=quotedString) - string_data.setParseAction(tokenMap(str.strip)) - ticket_expr = (integer("issue_num") + SEP - + string_data("sev") + SEP - + string_data("desc") + SEP - + integer("days_open")) - - for tkt in ticket_expr.searchString(report): - print tkt.dump() - prints:: - ['101', 'Critical', 'Intermittent system crash', '6'] - - days_open: 6 - - desc: Intermittent system crash - - issue_num: 101 - - sev: Critical - ['94', 'Cosmetic', "Spelling error on Login ('log|n')", '14'] - - days_open: 14 - - desc: Spelling error on Login ('log|n') - - issue_num: 94 - - sev: Cosmetic - ['79', 'Minor', 'System slow when running too many reports', '47'] - - days_open: 47 - - desc: System slow when running too many reports - - issue_num: 79 - - sev: Minor - """ - def __init__( self, other, include=False, ignore=None, failOn=None ): - super( SkipTo, self ).__init__( other ) - self.ignoreExpr = ignore - self.mayReturnEmpty = True - self.mayIndexError = False - self.includeMatch = include - self.asList = False - if isinstance(failOn, basestring): - self.failOn = ParserElement._literalStringClass(failOn) - else: - self.failOn = failOn - self.errmsg = "No match found for "+_ustr(self.expr) - - def parseImpl( self, instring, loc, doActions=True ): - startloc = loc - instrlen = len(instring) - expr = self.expr - expr_parse = self.expr._parse - self_failOn_canParseNext = self.failOn.canParseNext if self.failOn is not None else None - self_ignoreExpr_tryParse = self.ignoreExpr.tryParse if self.ignoreExpr is not None else None - - tmploc = loc - while tmploc <= instrlen: - if self_failOn_canParseNext is not None: - # break if failOn expression matches - if self_failOn_canParseNext(instring, tmploc): - break - - if self_ignoreExpr_tryParse is not None: - # advance past ignore expressions - while 1: - try: - tmploc = self_ignoreExpr_tryParse(instring, tmploc) - except ParseBaseException: - break - - try: - expr_parse(instring, tmploc, doActions=False, callPreParse=False) - except (ParseException, IndexError): - # no match, advance loc in string - tmploc += 1 - else: - # matched skipto expr, done - break - - else: - # ran off the end of the input string without matching skipto expr, fail - raise ParseException(instring, loc, self.errmsg, self) - - # build up return values - loc = tmploc - skiptext = instring[startloc:loc] - skipresult = ParseResults(skiptext) - - if self.includeMatch: - loc, mat = expr_parse(instring,loc,doActions,callPreParse=False) - skipresult += mat - - return loc, skipresult - -class Forward(ParseElementEnhance): - """ - Forward declaration of an expression to be defined later - - used for recursive grammars, such as algebraic infix notation. - When the expression is known, it is assigned to the C{Forward} variable using the '<<' operator. - - Note: take care when assigning to C{Forward} not to overlook precedence of operators. - Specifically, '|' has a lower precedence than '<<', so that:: - fwdExpr << a | b | c - will actually be evaluated as:: - (fwdExpr << a) | b | c - thereby leaving b and c out as parseable alternatives. It is recommended that you - explicitly group the values inserted into the C{Forward}:: - fwdExpr << (a | b | c) - Converting to use the '<<=' operator instead will avoid this problem. - - See L{ParseResults.pprint} for an example of a recursive parser created using - C{Forward}. - """ - def __init__( self, other=None ): - super(Forward,self).__init__( other, savelist=False ) - - def __lshift__( self, other ): - if isinstance( other, basestring ): - other = ParserElement._literalStringClass(other) - self.expr = other - self.strRepr = None - self.mayIndexError = self.expr.mayIndexError - self.mayReturnEmpty = self.expr.mayReturnEmpty - self.setWhitespaceChars( self.expr.whiteChars ) - self.skipWhitespace = self.expr.skipWhitespace - self.saveAsList = self.expr.saveAsList - self.ignoreExprs.extend(self.expr.ignoreExprs) - return self - - def __ilshift__(self, other): - return self << other - - def leaveWhitespace( self ): - self.skipWhitespace = False - return self - - def streamline( self ): - if not self.streamlined: - self.streamlined = True - if self.expr is not None: - self.expr.streamline() - return self - - def validate( self, validateTrace=[] ): - if self not in validateTrace: - tmp = validateTrace[:]+[self] - if self.expr is not None: - self.expr.validate(tmp) - self.checkRecursion([]) - - def __str__( self ): - if hasattr(self,"name"): - return self.name - return self.__class__.__name__ + ": ..." - - # stubbed out for now - creates awful memory and perf issues - self._revertClass = self.__class__ - self.__class__ = _ForwardNoRecurse - try: - if self.expr is not None: - retString = _ustr(self.expr) - else: - retString = "None" - finally: - self.__class__ = self._revertClass - return self.__class__.__name__ + ": " + retString - - def copy(self): - if self.expr is not None: - return super(Forward,self).copy() - else: - ret = Forward() - ret <<= self - return ret - -class _ForwardNoRecurse(Forward): - def __str__( self ): - return "..." - -class TokenConverter(ParseElementEnhance): - """ - Abstract subclass of C{ParseExpression}, for converting parsed results. - """ - def __init__( self, expr, savelist=False ): - super(TokenConverter,self).__init__( expr )#, savelist ) - self.saveAsList = False - -class Combine(TokenConverter): - """ - Converter to concatenate all matching tokens to a single string. - By default, the matching patterns must also be contiguous in the input string; - this can be disabled by specifying C{'adjacent=False'} in the constructor. - - Example:: - real = Word(nums) + '.' + Word(nums) - print(real.parseString('3.1416')) # -> ['3', '.', '1416'] - # will also erroneously match the following - print(real.parseString('3. 1416')) # -> ['3', '.', '1416'] - - real = Combine(Word(nums) + '.' + Word(nums)) - print(real.parseString('3.1416')) # -> ['3.1416'] - # no match when there are internal spaces - print(real.parseString('3. 1416')) # -> Exception: Expected W:(0123...) - """ - def __init__( self, expr, joinString="", adjacent=True ): - super(Combine,self).__init__( expr ) - # suppress whitespace-stripping in contained parse expressions, but re-enable it on the Combine itself - if adjacent: - self.leaveWhitespace() - self.adjacent = adjacent - self.skipWhitespace = True - self.joinString = joinString - self.callPreparse = True - - def ignore( self, other ): - if self.adjacent: - ParserElement.ignore(self, other) - else: - super( Combine, self).ignore( other ) - return self - - def postParse( self, instring, loc, tokenlist ): - retToks = tokenlist.copy() - del retToks[:] - retToks += ParseResults([ "".join(tokenlist._asStringList(self.joinString)) ], modal=self.modalResults) - - if self.resultsName and retToks.haskeys(): - return [ retToks ] - else: - return retToks - -class Group(TokenConverter): - """ - Converter to return the matched tokens as a list - useful for returning tokens of C{L{ZeroOrMore}} and C{L{OneOrMore}} expressions. - - Example:: - ident = Word(alphas) - num = Word(nums) - term = ident | num - func = ident + Optional(delimitedList(term)) - print(func.parseString("fn a,b,100")) # -> ['fn', 'a', 'b', '100'] - - func = ident + Group(Optional(delimitedList(term))) - print(func.parseString("fn a,b,100")) # -> ['fn', ['a', 'b', '100']] - """ - def __init__( self, expr ): - super(Group,self).__init__( expr ) - self.saveAsList = True - - def postParse( self, instring, loc, tokenlist ): - return [ tokenlist ] - -class Dict(TokenConverter): - """ - Converter to return a repetitive expression as a list, but also as a dictionary. - Each element can also be referenced using the first token in the expression as its key. - Useful for tabular report scraping when the first column can be used as a item key. - - Example:: - data_word = Word(alphas) - label = data_word + FollowedBy(':') - attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join)) - - text = "shape: SQUARE posn: upper left color: light blue texture: burlap" - attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) - - # print attributes as plain groups - print(OneOrMore(attr_expr).parseString(text).dump()) - - # instead of OneOrMore(expr), parse using Dict(OneOrMore(Group(expr))) - Dict will auto-assign names - result = Dict(OneOrMore(Group(attr_expr))).parseString(text) - print(result.dump()) - - # access named fields as dict entries, or output as dict - print(result['shape']) - print(result.asDict()) - prints:: - ['shape', 'SQUARE', 'posn', 'upper left', 'color', 'light blue', 'texture', 'burlap'] - - [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] - - color: light blue - - posn: upper left - - shape: SQUARE - - texture: burlap - SQUARE - {'color': 'light blue', 'posn': 'upper left', 'texture': 'burlap', 'shape': 'SQUARE'} - See more examples at L{ParseResults} of accessing fields by results name. - """ - def __init__( self, expr ): - super(Dict,self).__init__( expr ) - self.saveAsList = True - - def postParse( self, instring, loc, tokenlist ): - for i,tok in enumerate(tokenlist): - if len(tok) == 0: - continue - ikey = tok[0] - if isinstance(ikey,int): - ikey = _ustr(tok[0]).strip() - if len(tok)==1: - tokenlist[ikey] = _ParseResultsWithOffset("",i) - elif len(tok)==2 and not isinstance(tok[1],ParseResults): - tokenlist[ikey] = _ParseResultsWithOffset(tok[1],i) - else: - dictvalue = tok.copy() #ParseResults(i) - del dictvalue[0] - if len(dictvalue)!= 1 or (isinstance(dictvalue,ParseResults) and dictvalue.haskeys()): - tokenlist[ikey] = _ParseResultsWithOffset(dictvalue,i) - else: - tokenlist[ikey] = _ParseResultsWithOffset(dictvalue[0],i) - - if self.resultsName: - return [ tokenlist ] - else: - return tokenlist - - -class Suppress(TokenConverter): - """ - Converter for ignoring the results of a parsed expression. - - Example:: - source = "a, b, c,d" - wd = Word(alphas) - wd_list1 = wd + ZeroOrMore(',' + wd) - print(wd_list1.parseString(source)) - - # often, delimiters that are useful during parsing are just in the - # way afterward - use Suppress to keep them out of the parsed output - wd_list2 = wd + ZeroOrMore(Suppress(',') + wd) - print(wd_list2.parseString(source)) - prints:: - ['a', ',', 'b', ',', 'c', ',', 'd'] - ['a', 'b', 'c', 'd'] - (See also L{delimitedList}.) - """ - def postParse( self, instring, loc, tokenlist ): - return [] - - def suppress( self ): - return self - - -class OnlyOnce(object): - """ - Wrapper for parse actions, to ensure they are only called once. - """ - def __init__(self, methodCall): - self.callable = _trim_arity(methodCall) - self.called = False - def __call__(self,s,l,t): - if not self.called: - results = self.callable(s,l,t) - self.called = True - return results - raise ParseException(s,l,"") - def reset(self): - self.called = False - -def traceParseAction(f): - """ - Decorator for debugging parse actions. - - When the parse action is called, this decorator will print C{">> entering I{method-name}(line:I{current_source_line}, I{parse_location}, I{matched_tokens})".} - When the parse action completes, the decorator will print C{"<<"} followed by the returned value, or any exception that the parse action raised. - - Example:: - wd = Word(alphas) - - @traceParseAction - def remove_duplicate_chars(tokens): - return ''.join(sorted(set(''.join(tokens)))) - - wds = OneOrMore(wd).setParseAction(remove_duplicate_chars) - print(wds.parseString("slkdjs sld sldd sdlf sdljf")) - prints:: - >>entering remove_duplicate_chars(line: 'slkdjs sld sldd sdlf sdljf', 0, (['slkdjs', 'sld', 'sldd', 'sdlf', 'sdljf'], {})) - <<leaving remove_duplicate_chars (ret: 'dfjkls') - ['dfjkls'] - """ - f = _trim_arity(f) - def z(*paArgs): - thisFunc = f.__name__ - s,l,t = paArgs[-3:] - if len(paArgs)>3: - thisFunc = paArgs[0].__class__.__name__ + '.' + thisFunc - sys.stderr.write( ">>entering %s(line: '%s', %d, %r)\n" % (thisFunc,line(l,s),l,t) ) - try: - ret = f(*paArgs) - except Exception as exc: - sys.stderr.write( "<<leaving %s (exception: %s)\n" % (thisFunc,exc) ) - raise - sys.stderr.write( "<<leaving %s (ret: %r)\n" % (thisFunc,ret) ) - return ret - try: - z.__name__ = f.__name__ - except AttributeError: - pass - return z - -# -# global helpers -# -def delimitedList( expr, delim=",", combine=False ): - """ - Helper to define a delimited list of expressions - the delimiter defaults to ','. - By default, the list elements and delimiters can have intervening whitespace, and - comments, but this can be overridden by passing C{combine=True} in the constructor. - If C{combine} is set to C{True}, the matching tokens are returned as a single token - string, with the delimiters included; otherwise, the matching tokens are returned - as a list of tokens, with the delimiters suppressed. - - Example:: - delimitedList(Word(alphas)).parseString("aa,bb,cc") # -> ['aa', 'bb', 'cc'] - delimitedList(Word(hexnums), delim=':', combine=True).parseString("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE'] - """ - dlName = _ustr(expr)+" ["+_ustr(delim)+" "+_ustr(expr)+"]..." - if combine: - return Combine( expr + ZeroOrMore( delim + expr ) ).setName(dlName) - else: - return ( expr + ZeroOrMore( Suppress( delim ) + expr ) ).setName(dlName) - -def countedArray( expr, intExpr=None ): - """ - Helper to define a counted list of expressions. - This helper defines a pattern of the form:: - integer expr expr expr... - where the leading integer tells how many expr expressions follow. - The matched tokens returns the array of expr tokens as a list - the leading count token is suppressed. - - If C{intExpr} is specified, it should be a pyparsing expression that produces an integer value. - - Example:: - countedArray(Word(alphas)).parseString('2 ab cd ef') # -> ['ab', 'cd'] - - # in this parser, the leading integer value is given in binary, - # '10' indicating that 2 values are in the array - binaryConstant = Word('01').setParseAction(lambda t: int(t[0], 2)) - countedArray(Word(alphas), intExpr=binaryConstant).parseString('10 ab cd ef') # -> ['ab', 'cd'] - """ - arrayExpr = Forward() - def countFieldParseAction(s,l,t): - n = t[0] - arrayExpr << (n and Group(And([expr]*n)) or Group(empty)) - return [] - if intExpr is None: - intExpr = Word(nums).setParseAction(lambda t:int(t[0])) - else: - intExpr = intExpr.copy() - intExpr.setName("arrayLen") - intExpr.addParseAction(countFieldParseAction, callDuringTry=True) - return ( intExpr + arrayExpr ).setName('(len) ' + _ustr(expr) + '...') - -def _flatten(L): - ret = [] - for i in L: - if isinstance(i,list): - ret.extend(_flatten(i)) - else: - ret.append(i) - return ret - -def matchPreviousLiteral(expr): - """ - Helper to define an expression that is indirectly defined from - the tokens matched in a previous expression, that is, it looks - for a 'repeat' of a previous expression. For example:: - first = Word(nums) - second = matchPreviousLiteral(first) - matchExpr = first + ":" + second - will match C{"1:1"}, but not C{"1:2"}. Because this matches a - previous literal, will also match the leading C{"1:1"} in C{"1:10"}. - If this is not desired, use C{matchPreviousExpr}. - Do I{not} use with packrat parsing enabled. - """ - rep = Forward() - def copyTokenToRepeater(s,l,t): - if t: - if len(t) == 1: - rep << t[0] - else: - # flatten t tokens - tflat = _flatten(t.asList()) - rep << And(Literal(tt) for tt in tflat) - else: - rep << Empty() - expr.addParseAction(copyTokenToRepeater, callDuringTry=True) - rep.setName('(prev) ' + _ustr(expr)) - return rep - -def matchPreviousExpr(expr): - """ - Helper to define an expression that is indirectly defined from - the tokens matched in a previous expression, that is, it looks - for a 'repeat' of a previous expression. For example:: - first = Word(nums) - second = matchPreviousExpr(first) - matchExpr = first + ":" + second - will match C{"1:1"}, but not C{"1:2"}. Because this matches by - expressions, will I{not} match the leading C{"1:1"} in C{"1:10"}; - the expressions are evaluated first, and then compared, so - C{"1"} is compared with C{"10"}. - Do I{not} use with packrat parsing enabled. - """ - rep = Forward() - e2 = expr.copy() - rep <<= e2 - def copyTokenToRepeater(s,l,t): - matchTokens = _flatten(t.asList()) - def mustMatchTheseTokens(s,l,t): - theseTokens = _flatten(t.asList()) - if theseTokens != matchTokens: - raise ParseException("",0,"") - rep.setParseAction( mustMatchTheseTokens, callDuringTry=True ) - expr.addParseAction(copyTokenToRepeater, callDuringTry=True) - rep.setName('(prev) ' + _ustr(expr)) - return rep - -def _escapeRegexRangeChars(s): - #~ escape these chars: ^-] - for c in r"\^-]": - s = s.replace(c,_bslash+c) - s = s.replace("\n",r"\n") - s = s.replace("\t",r"\t") - return _ustr(s) - -def oneOf( strs, caseless=False, useRegex=True ): - """ - Helper to quickly define a set of alternative Literals, and makes sure to do - longest-first testing when there is a conflict, regardless of the input order, - but returns a C{L{MatchFirst}} for best performance. - - Parameters: - - strs - a string of space-delimited literals, or a collection of string literals - - caseless - (default=C{False}) - treat all literals as caseless - - useRegex - (default=C{True}) - as an optimization, will generate a Regex - object; otherwise, will generate a C{MatchFirst} object (if C{caseless=True}, or - if creating a C{Regex} raises an exception) - - Example:: - comp_oper = oneOf("< = > <= >= !=") - var = Word(alphas) - number = Word(nums) - term = var | number - comparison_expr = term + comp_oper + term - print(comparison_expr.searchString("B = 12 AA=23 B<=AA AA>12")) - prints:: - [['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']] - """ - if caseless: - isequal = ( lambda a,b: a.upper() == b.upper() ) - masks = ( lambda a,b: b.upper().startswith(a.upper()) ) - parseElementClass = CaselessLiteral - else: - isequal = ( lambda a,b: a == b ) - masks = ( lambda a,b: b.startswith(a) ) - parseElementClass = Literal - - symbols = [] - if isinstance(strs,basestring): - symbols = strs.split() - elif isinstance(strs, Iterable): - symbols = list(strs) - else: - warnings.warn("Invalid argument to oneOf, expected string or iterable", - SyntaxWarning, stacklevel=2) - if not symbols: - return NoMatch() - - i = 0 - while i < len(symbols)-1: - cur = symbols[i] - for j,other in enumerate(symbols[i+1:]): - if ( isequal(other, cur) ): - del symbols[i+j+1] - break - elif ( masks(cur, other) ): - del symbols[i+j+1] - symbols.insert(i,other) - cur = other - break - else: - i += 1 - - if not caseless and useRegex: - #~ print (strs,"->", "|".join( [ _escapeRegexChars(sym) for sym in symbols] )) - try: - if len(symbols)==len("".join(symbols)): - return Regex( "[%s]" % "".join(_escapeRegexRangeChars(sym) for sym in symbols) ).setName(' | '.join(symbols)) - else: - return Regex( "|".join(re.escape(sym) for sym in symbols) ).setName(' | '.join(symbols)) - except Exception: - warnings.warn("Exception creating Regex for oneOf, building MatchFirst", - SyntaxWarning, stacklevel=2) - - - # last resort, just use MatchFirst - return MatchFirst(parseElementClass(sym) for sym in symbols).setName(' | '.join(symbols)) - -def dictOf( key, value ): - """ - Helper to easily and clearly define a dictionary by specifying the respective patterns - for the key and value. Takes care of defining the C{L{Dict}}, C{L{ZeroOrMore}}, and C{L{Group}} tokens - in the proper order. The key pattern can include delimiting markers or punctuation, - as long as they are suppressed, thereby leaving the significant key text. The value - pattern can include named results, so that the C{Dict} results can include named token - fields. - - Example:: - text = "shape: SQUARE posn: upper left color: light blue texture: burlap" - attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) - print(OneOrMore(attr_expr).parseString(text).dump()) - - attr_label = label - attr_value = Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join) - - # similar to Dict, but simpler call format - result = dictOf(attr_label, attr_value).parseString(text) - print(result.dump()) - print(result['shape']) - print(result.shape) # object attribute access works too - print(result.asDict()) - prints:: - [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] - - color: light blue - - posn: upper left - - shape: SQUARE - - texture: burlap - SQUARE - SQUARE - {'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'} - """ - return Dict( ZeroOrMore( Group ( key + value ) ) ) - -def originalTextFor(expr, asString=True): - """ - Helper to return the original, untokenized text for a given expression. Useful to - restore the parsed fields of an HTML start tag into the raw tag text itself, or to - revert separate tokens with intervening whitespace back to the original matching - input text. By default, returns astring containing the original parsed text. - - If the optional C{asString} argument is passed as C{False}, then the return value is a - C{L{ParseResults}} containing any results names that were originally matched, and a - single token containing the original matched text from the input string. So if - the expression passed to C{L{originalTextFor}} contains expressions with defined - results names, you must set C{asString} to C{False} if you want to preserve those - results name values. - - Example:: - src = "this is test <b> bold <i>text</i> </b> normal text " - for tag in ("b","i"): - opener,closer = makeHTMLTags(tag) - patt = originalTextFor(opener + SkipTo(closer) + closer) - print(patt.searchString(src)[0]) - prints:: - ['<b> bold <i>text</i> </b>'] - ['<i>text</i>'] - """ - locMarker = Empty().setParseAction(lambda s,loc,t: loc) - endlocMarker = locMarker.copy() - endlocMarker.callPreparse = False - matchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end") - if asString: - extractText = lambda s,l,t: s[t._original_start:t._original_end] - else: - def extractText(s,l,t): - t[:] = [s[t.pop('_original_start'):t.pop('_original_end')]] - matchExpr.setParseAction(extractText) - matchExpr.ignoreExprs = expr.ignoreExprs - return matchExpr - -def ungroup(expr): - """ - Helper to undo pyparsing's default grouping of And expressions, even - if all but one are non-empty. - """ - return TokenConverter(expr).setParseAction(lambda t:t[0]) - -def locatedExpr(expr): - """ - Helper to decorate a returned token with its starting and ending locations in the input string. - This helper adds the following results names: - - locn_start = location where matched expression begins - - locn_end = location where matched expression ends - - value = the actual parsed results - - Be careful if the input text contains C{<TAB>} characters, you may want to call - C{L{ParserElement.parseWithTabs}} - - Example:: - wd = Word(alphas) - for match in locatedExpr(wd).searchString("ljsdf123lksdjjf123lkkjj1222"): - print(match) - prints:: - [[0, 'ljsdf', 5]] - [[8, 'lksdjjf', 15]] - [[18, 'lkkjj', 23]] - """ - locator = Empty().setParseAction(lambda s,l,t: l) - return Group(locator("locn_start") + expr("value") + locator.copy().leaveWhitespace()("locn_end")) - - -# convenience constants for positional expressions -empty = Empty().setName("empty") -lineStart = LineStart().setName("lineStart") -lineEnd = LineEnd().setName("lineEnd") -stringStart = StringStart().setName("stringStart") -stringEnd = StringEnd().setName("stringEnd") - -_escapedPunc = Word( _bslash, r"\[]-*.$+^?()~ ", exact=2 ).setParseAction(lambda s,l,t:t[0][1]) -_escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s,l,t:unichr(int(t[0].lstrip(r'\0x'),16))) -_escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s,l,t:unichr(int(t[0][1:],8))) -_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | CharsNotIn(r'\]', exact=1) -_charRange = Group(_singleChar + Suppress("-") + _singleChar) -_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group( OneOrMore( _charRange | _singleChar ) ).setResultsName("body") + "]" - -def srange(s): - r""" - Helper to easily define string ranges for use in Word construction. Borrows - syntax from regexp '[]' string range definitions:: - srange("[0-9]") -> "0123456789" - srange("[a-z]") -> "abcdefghijklmnopqrstuvwxyz" - srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_" - The input string must be enclosed in []'s, and the returned string is the expanded - character set joined into a single string. - The values enclosed in the []'s may be: - - a single character - - an escaped character with a leading backslash (such as C{\-} or C{\]}) - - an escaped hex character with a leading C{'\x'} (C{\x21}, which is a C{'!'} character) - (C{\0x##} is also supported for backwards compatibility) - - an escaped octal character with a leading C{'\0'} (C{\041}, which is a C{'!'} character) - - a range of any of the above, separated by a dash (C{'a-z'}, etc.) - - any combination of the above (C{'aeiouy'}, C{'a-zA-Z0-9_$'}, etc.) - """ - _expanded = lambda p: p if not isinstance(p,ParseResults) else ''.join(unichr(c) for c in range(ord(p[0]),ord(p[1])+1)) - try: - return "".join(_expanded(part) for part in _reBracketExpr.parseString(s).body) - except Exception: - return "" - -def matchOnlyAtCol(n): - """ - Helper method for defining parse actions that require matching at a specific - column in the input text. - """ - def verifyCol(strg,locn,toks): - if col(locn,strg) != n: - raise ParseException(strg,locn,"matched token not at column %d" % n) - return verifyCol - -def replaceWith(replStr): - """ - Helper method for common parse actions that simply return a literal value. Especially - useful when used with C{L{transformString<ParserElement.transformString>}()}. - - Example:: - num = Word(nums).setParseAction(lambda toks: int(toks[0])) - na = oneOf("N/A NA").setParseAction(replaceWith(math.nan)) - term = na | num - - OneOrMore(term).parseString("324 234 N/A 234") # -> [324, 234, nan, 234] - """ - return lambda s,l,t: [replStr] - -def removeQuotes(s,l,t): - """ - Helper parse action for removing quotation marks from parsed quoted strings. - - Example:: - # by default, quotation marks are included in parsed results - quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["'Now is the Winter of our Discontent'"] - - # use removeQuotes to strip quotation marks from parsed results - quotedString.setParseAction(removeQuotes) - quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["Now is the Winter of our Discontent"] - """ - return t[0][1:-1] - -def tokenMap(func, *args): - """ - Helper to define a parse action by mapping a function to all elements of a ParseResults list.If any additional - args are passed, they are forwarded to the given function as additional arguments after - the token, as in C{hex_integer = Word(hexnums).setParseAction(tokenMap(int, 16))}, which will convert the - parsed data to an integer using base 16. - - Example (compare the last to example in L{ParserElement.transformString}:: - hex_ints = OneOrMore(Word(hexnums)).setParseAction(tokenMap(int, 16)) - hex_ints.runTests(''' - 00 11 22 aa FF 0a 0d 1a - ''') - - upperword = Word(alphas).setParseAction(tokenMap(str.upper)) - OneOrMore(upperword).runTests(''' - my kingdom for a horse - ''') - - wd = Word(alphas).setParseAction(tokenMap(str.title)) - OneOrMore(wd).setParseAction(' '.join).runTests(''' - now is the winter of our discontent made glorious summer by this sun of york - ''') - prints:: - 00 11 22 aa FF 0a 0d 1a - [0, 17, 34, 170, 255, 10, 13, 26] - - my kingdom for a horse - ['MY', 'KINGDOM', 'FOR', 'A', 'HORSE'] - - now is the winter of our discontent made glorious summer by this sun of york - ['Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York'] - """ - def pa(s,l,t): - return [func(tokn, *args) for tokn in t] - - try: - func_name = getattr(func, '__name__', - getattr(func, '__class__').__name__) - except Exception: - func_name = str(func) - pa.__name__ = func_name - - return pa - -upcaseTokens = tokenMap(lambda t: _ustr(t).upper()) -"""(Deprecated) Helper parse action to convert tokens to upper case. Deprecated in favor of L{pyparsing_common.upcaseTokens}""" - -downcaseTokens = tokenMap(lambda t: _ustr(t).lower()) -"""(Deprecated) Helper parse action to convert tokens to lower case. Deprecated in favor of L{pyparsing_common.downcaseTokens}""" - -def _makeTags(tagStr, xml): - """Internal helper to construct opening and closing tag expressions, given a tag name""" - if isinstance(tagStr,basestring): - resname = tagStr - tagStr = Keyword(tagStr, caseless=not xml) - else: - resname = tagStr.name - - tagAttrName = Word(alphas,alphanums+"_-:") - if (xml): - tagAttrValue = dblQuotedString.copy().setParseAction( removeQuotes ) - openTag = Suppress("<") + tagStr("tag") + \ - Dict(ZeroOrMore(Group( tagAttrName + Suppress("=") + tagAttrValue ))) + \ - Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">") - else: - printablesLessRAbrack = "".join(c for c in printables if c not in ">") - tagAttrValue = quotedString.copy().setParseAction( removeQuotes ) | Word(printablesLessRAbrack) - openTag = Suppress("<") + tagStr("tag") + \ - Dict(ZeroOrMore(Group( tagAttrName.setParseAction(downcaseTokens) + \ - Optional( Suppress("=") + tagAttrValue ) ))) + \ - Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">") - closeTag = Combine(_L("</") + tagStr + ">") - - openTag = openTag.setResultsName("start"+"".join(resname.replace(":"," ").title().split())).setName("<%s>" % resname) - closeTag = closeTag.setResultsName("end"+"".join(resname.replace(":"," ").title().split())).setName("</%s>" % resname) - openTag.tag = resname - closeTag.tag = resname - return openTag, closeTag - -def makeHTMLTags(tagStr): - """ - Helper to construct opening and closing tag expressions for HTML, given a tag name. Matches - tags in either upper or lower case, attributes with namespaces and with quoted or unquoted values. - - Example:: - text = '<td>More info at the <a href="http://pyparsing.wikispaces.com">pyparsing</a> wiki page</td>' - # makeHTMLTags returns pyparsing expressions for the opening and closing tags as a 2-tuple - a,a_end = makeHTMLTags("A") - link_expr = a + SkipTo(a_end)("link_text") + a_end - - for link in link_expr.searchString(text): - # attributes in the <A> tag (like "href" shown here) are also accessible as named results - print(link.link_text, '->', link.href) - prints:: - pyparsing -> http://pyparsing.wikispaces.com - """ - return _makeTags( tagStr, False ) - -def makeXMLTags(tagStr): - """ - Helper to construct opening and closing tag expressions for XML, given a tag name. Matches - tags only in the given upper/lower case. - - Example: similar to L{makeHTMLTags} - """ - return _makeTags( tagStr, True ) - -def withAttribute(*args,**attrDict): - """ - Helper to create a validating parse action to be used with start tags created - with C{L{makeXMLTags}} or C{L{makeHTMLTags}}. Use C{withAttribute} to qualify a starting tag - with a required attribute value, to avoid false matches on common tags such as - C{<TD>} or C{<DIV>}. - - Call C{withAttribute} with a series of attribute names and values. Specify the list - of filter attributes names and values as: - - keyword arguments, as in C{(align="right")}, or - - as an explicit dict with C{**} operator, when an attribute name is also a Python - reserved word, as in C{**{"class":"Customer", "align":"right"}} - - a list of name-value tuples, as in ( ("ns1:class", "Customer"), ("ns2:align","right") ) - For attribute names with a namespace prefix, you must use the second form. Attribute - names are matched insensitive to upper/lower case. - - If just testing for C{class} (with or without a namespace), use C{L{withClass}}. - - To verify that the attribute exists, but without specifying a value, pass - C{withAttribute.ANY_VALUE} as the value. - - Example:: - html = ''' - <div> - Some text - <div type="grid">1 4 0 1 0</div> - <div type="graph">1,3 2,3 1,1</div> - <div>this has no type</div> - </div> - - ''' - div,div_end = makeHTMLTags("div") - - # only match div tag having a type attribute with value "grid" - div_grid = div().setParseAction(withAttribute(type="grid")) - grid_expr = div_grid + SkipTo(div | div_end)("body") - for grid_header in grid_expr.searchString(html): - print(grid_header.body) - - # construct a match with any div tag having a type attribute, regardless of the value - div_any_type = div().setParseAction(withAttribute(type=withAttribute.ANY_VALUE)) - div_expr = div_any_type + SkipTo(div | div_end)("body") - for div_header in div_expr.searchString(html): - print(div_header.body) - prints:: - 1 4 0 1 0 - - 1 4 0 1 0 - 1,3 2,3 1,1 - """ - if args: - attrs = args[:] - else: - attrs = attrDict.items() - attrs = [(k,v) for k,v in attrs] - def pa(s,l,tokens): - for attrName,attrValue in attrs: - if attrName not in tokens: - raise ParseException(s,l,"no matching attribute " + attrName) - if attrValue != withAttribute.ANY_VALUE and tokens[attrName] != attrValue: - raise ParseException(s,l,"attribute '%s' has value '%s', must be '%s'" % - (attrName, tokens[attrName], attrValue)) - return pa -withAttribute.ANY_VALUE = object() - -def withClass(classname, namespace=''): - """ - Simplified version of C{L{withAttribute}} when matching on a div class - made - difficult because C{class} is a reserved word in Python. - - Example:: - html = ''' - <div> - Some text - <div class="grid">1 4 0 1 0</div> - <div class="graph">1,3 2,3 1,1</div> - <div>this <div> has no class</div> - </div> - - ''' - div,div_end = makeHTMLTags("div") - div_grid = div().setParseAction(withClass("grid")) - - grid_expr = div_grid + SkipTo(div | div_end)("body") - for grid_header in grid_expr.searchString(html): - print(grid_header.body) - - div_any_type = div().setParseAction(withClass(withAttribute.ANY_VALUE)) - div_expr = div_any_type + SkipTo(div | div_end)("body") - for div_header in div_expr.searchString(html): - print(div_header.body) - prints:: - 1 4 0 1 0 - - 1 4 0 1 0 - 1,3 2,3 1,1 - """ - classattr = "%s:class" % namespace if namespace else "class" - return withAttribute(**{classattr : classname}) - -opAssoc = _Constants() -opAssoc.LEFT = object() -opAssoc.RIGHT = object() - -def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ): - """ - Helper method for constructing grammars of expressions made up of - operators working in a precedence hierarchy. Operators may be unary or - binary, left- or right-associative. Parse actions can also be attached - to operator expressions. The generated parser will also recognize the use - of parentheses to override operator precedences (see example below). - - Note: if you define a deep operator list, you may see performance issues - when using infixNotation. See L{ParserElement.enablePackrat} for a - mechanism to potentially improve your parser performance. - - Parameters: - - baseExpr - expression representing the most basic element for the nested - - opList - list of tuples, one for each operator precedence level in the - expression grammar; each tuple is of the form - (opExpr, numTerms, rightLeftAssoc, parseAction), where: - - opExpr is the pyparsing expression for the operator; - may also be a string, which will be converted to a Literal; - if numTerms is 3, opExpr is a tuple of two expressions, for the - two operators separating the 3 terms - - numTerms is the number of terms for this operator (must - be 1, 2, or 3) - - rightLeftAssoc is the indicator whether the operator is - right or left associative, using the pyparsing-defined - constants C{opAssoc.RIGHT} and C{opAssoc.LEFT}. - - parseAction is the parse action to be associated with - expressions matching this operator expression (the - parse action tuple member may be omitted); if the parse action - is passed a tuple or list of functions, this is equivalent to - calling C{setParseAction(*fn)} (L{ParserElement.setParseAction}) - - lpar - expression for matching left-parentheses (default=C{Suppress('(')}) - - rpar - expression for matching right-parentheses (default=C{Suppress(')')}) - - Example:: - # simple example of four-function arithmetic with ints and variable names - integer = pyparsing_common.signed_integer - varname = pyparsing_common.identifier - - arith_expr = infixNotation(integer | varname, - [ - ('-', 1, opAssoc.RIGHT), - (oneOf('* /'), 2, opAssoc.LEFT), - (oneOf('+ -'), 2, opAssoc.LEFT), - ]) - - arith_expr.runTests(''' - 5+3*6 - (5+3)*6 - -2--11 - ''', fullDump=False) - prints:: - 5+3*6 - [[5, '+', [3, '*', 6]]] - - (5+3)*6 - [[[5, '+', 3], '*', 6]] - - -2--11 - [[['-', 2], '-', ['-', 11]]] - """ - ret = Forward() - lastExpr = baseExpr | ( lpar + ret + rpar ) - for i,operDef in enumerate(opList): - opExpr,arity,rightLeftAssoc,pa = (operDef + (None,))[:4] - termName = "%s term" % opExpr if arity < 3 else "%s%s term" % opExpr - if arity == 3: - if opExpr is None or len(opExpr) != 2: - raise ValueError("if numterms=3, opExpr must be a tuple or list of two expressions") - opExpr1, opExpr2 = opExpr - thisExpr = Forward().setName(termName) - if rightLeftAssoc == opAssoc.LEFT: - if arity == 1: - matchExpr = FollowedBy(lastExpr + opExpr) + Group( lastExpr + OneOrMore( opExpr ) ) - elif arity == 2: - if opExpr is not None: - matchExpr = FollowedBy(lastExpr + opExpr + lastExpr) + Group( lastExpr + OneOrMore( opExpr + lastExpr ) ) - else: - matchExpr = FollowedBy(lastExpr+lastExpr) + Group( lastExpr + OneOrMore(lastExpr) ) - elif arity == 3: - matchExpr = FollowedBy(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) + \ - Group( lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr ) - else: - raise ValueError("operator must be unary (1), binary (2), or ternary (3)") - elif rightLeftAssoc == opAssoc.RIGHT: - if arity == 1: - # try to avoid LR with this extra test - if not isinstance(opExpr, Optional): - opExpr = Optional(opExpr) - matchExpr = FollowedBy(opExpr.expr + thisExpr) + Group( opExpr + thisExpr ) - elif arity == 2: - if opExpr is not None: - matchExpr = FollowedBy(lastExpr + opExpr + thisExpr) + Group( lastExpr + OneOrMore( opExpr + thisExpr ) ) - else: - matchExpr = FollowedBy(lastExpr + thisExpr) + Group( lastExpr + OneOrMore( thisExpr ) ) - elif arity == 3: - matchExpr = FollowedBy(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + \ - Group( lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr ) - else: - raise ValueError("operator must be unary (1), binary (2), or ternary (3)") - else: - raise ValueError("operator must indicate right or left associativity") - if pa: - if isinstance(pa, (tuple, list)): - matchExpr.setParseAction(*pa) - else: - matchExpr.setParseAction(pa) - thisExpr <<= ( matchExpr.setName(termName) | lastExpr ) - lastExpr = thisExpr - ret <<= lastExpr - return ret - -operatorPrecedence = infixNotation -"""(Deprecated) Former name of C{L{infixNotation}}, will be dropped in a future release.""" - -dblQuotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"').setName("string enclosed in double quotes") -sglQuotedString = Combine(Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("string enclosed in single quotes") -quotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"'| - Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("quotedString using single or double quotes") -unicodeString = Combine(_L('u') + quotedString.copy()).setName("unicode string literal") - -def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.copy()): - """ - Helper method for defining nested lists enclosed in opening and closing - delimiters ("(" and ")" are the default). - - Parameters: - - opener - opening character for a nested list (default=C{"("}); can also be a pyparsing expression - - closer - closing character for a nested list (default=C{")"}); can also be a pyparsing expression - - content - expression for items within the nested lists (default=C{None}) - - ignoreExpr - expression for ignoring opening and closing delimiters (default=C{quotedString}) - - If an expression is not provided for the content argument, the nested - expression will capture all whitespace-delimited content between delimiters - as a list of separate values. - - Use the C{ignoreExpr} argument to define expressions that may contain - opening or closing characters that should not be treated as opening - or closing characters for nesting, such as quotedString or a comment - expression. Specify multiple expressions using an C{L{Or}} or C{L{MatchFirst}}. - The default is L{quotedString}, but if no expressions are to be ignored, - then pass C{None} for this argument. - - Example:: - data_type = oneOf("void int short long char float double") - decl_data_type = Combine(data_type + Optional(Word('*'))) - ident = Word(alphas+'_', alphanums+'_') - number = pyparsing_common.number - arg = Group(decl_data_type + ident) - LPAR,RPAR = map(Suppress, "()") - - code_body = nestedExpr('{', '}', ignoreExpr=(quotedString | cStyleComment)) - - c_function = (decl_data_type("type") - + ident("name") - + LPAR + Optional(delimitedList(arg), [])("args") + RPAR - + code_body("body")) - c_function.ignore(cStyleComment) - - source_code = ''' - int is_odd(int x) { - return (x%2); - } - - int dec_to_hex(char hchar) { - if (hchar >= '0' && hchar <= '9') { - return (ord(hchar)-ord('0')); - } else { - return (10+ord(hchar)-ord('A')); - } - } - ''' - for func in c_function.searchString(source_code): - print("%(name)s (%(type)s) args: %(args)s" % func) - - prints:: - is_odd (int) args: [['int', 'x']] - dec_to_hex (int) args: [['char', 'hchar']] - """ - if opener == closer: - raise ValueError("opening and closing strings cannot be the same") - if content is None: - if isinstance(opener,basestring) and isinstance(closer,basestring): - if len(opener) == 1 and len(closer)==1: - if ignoreExpr is not None: - content = (Combine(OneOrMore(~ignoreExpr + - CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS,exact=1)) - ).setParseAction(lambda t:t[0].strip())) - else: - content = (empty.copy()+CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS - ).setParseAction(lambda t:t[0].strip())) - else: - if ignoreExpr is not None: - content = (Combine(OneOrMore(~ignoreExpr + - ~Literal(opener) + ~Literal(closer) + - CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) - ).setParseAction(lambda t:t[0].strip())) - else: - content = (Combine(OneOrMore(~Literal(opener) + ~Literal(closer) + - CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) - ).setParseAction(lambda t:t[0].strip())) - else: - raise ValueError("opening and closing arguments must be strings if no content expression is given") - ret = Forward() - if ignoreExpr is not None: - ret <<= Group( Suppress(opener) + ZeroOrMore( ignoreExpr | ret | content ) + Suppress(closer) ) - else: - ret <<= Group( Suppress(opener) + ZeroOrMore( ret | content ) + Suppress(closer) ) - ret.setName('nested %s%s expression' % (opener,closer)) - return ret - -def indentedBlock(blockStatementExpr, indentStack, indent=True): - """ - Helper method for defining space-delimited indentation blocks, such as - those used to define block statements in Python source code. - - Parameters: - - blockStatementExpr - expression defining syntax of statement that - is repeated within the indented block - - indentStack - list created by caller to manage indentation stack - (multiple statementWithIndentedBlock expressions within a single grammar - should share a common indentStack) - - indent - boolean indicating whether block must be indented beyond the - the current level; set to False for block of left-most statements - (default=C{True}) - - A valid block must contain at least one C{blockStatement}. - - Example:: - data = ''' - def A(z): - A1 - B = 100 - G = A2 - A2 - A3 - B - def BB(a,b,c): - BB1 - def BBA(): - bba1 - bba2 - bba3 - C - D - def spam(x,y): - def eggs(z): - pass - ''' - - - indentStack = [1] - stmt = Forward() - - identifier = Word(alphas, alphanums) - funcDecl = ("def" + identifier + Group( "(" + Optional( delimitedList(identifier) ) + ")" ) + ":") - func_body = indentedBlock(stmt, indentStack) - funcDef = Group( funcDecl + func_body ) - - rvalue = Forward() - funcCall = Group(identifier + "(" + Optional(delimitedList(rvalue)) + ")") - rvalue << (funcCall | identifier | Word(nums)) - assignment = Group(identifier + "=" + rvalue) - stmt << ( funcDef | assignment | identifier ) - - module_body = OneOrMore(stmt) - - parseTree = module_body.parseString(data) - parseTree.pprint() - prints:: - [['def', - 'A', - ['(', 'z', ')'], - ':', - [['A1'], [['B', '=', '100']], [['G', '=', 'A2']], ['A2'], ['A3']]], - 'B', - ['def', - 'BB', - ['(', 'a', 'b', 'c', ')'], - ':', - [['BB1'], [['def', 'BBA', ['(', ')'], ':', [['bba1'], ['bba2'], ['bba3']]]]]], - 'C', - 'D', - ['def', - 'spam', - ['(', 'x', 'y', ')'], - ':', - [[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]] - """ - def checkPeerIndent(s,l,t): - if l >= len(s): return - curCol = col(l,s) - if curCol != indentStack[-1]: - if curCol > indentStack[-1]: - raise ParseFatalException(s,l,"illegal nesting") - raise ParseException(s,l,"not a peer entry") - - def checkSubIndent(s,l,t): - curCol = col(l,s) - if curCol > indentStack[-1]: - indentStack.append( curCol ) - else: - raise ParseException(s,l,"not a subentry") - - def checkUnindent(s,l,t): - if l >= len(s): return - curCol = col(l,s) - if not(indentStack and curCol < indentStack[-1] and curCol <= indentStack[-2]): - raise ParseException(s,l,"not an unindent") - indentStack.pop() - - NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress()) - INDENT = (Empty() + Empty().setParseAction(checkSubIndent)).setName('INDENT') - PEER = Empty().setParseAction(checkPeerIndent).setName('') - UNDENT = Empty().setParseAction(checkUnindent).setName('UNINDENT') - if indent: - smExpr = Group( Optional(NL) + - #~ FollowedBy(blockStatementExpr) + - INDENT + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) + UNDENT) - else: - smExpr = Group( Optional(NL) + - (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) ) - blockStatementExpr.ignore(_bslash + LineEnd()) - return smExpr.setName('indented block') - -alphas8bit = srange(r"[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]") -punc8bit = srange(r"[\0xa1-\0xbf\0xd7\0xf7]") - -anyOpenTag,anyCloseTag = makeHTMLTags(Word(alphas,alphanums+"_:").setName('any tag')) -_htmlEntityMap = dict(zip("gt lt amp nbsp quot apos".split(),'><& "\'')) -commonHTMLEntity = Regex('&(?P<entity>' + '|'.join(_htmlEntityMap.keys()) +");").setName("common HTML entity") -def replaceHTMLEntity(t): - """Helper parser action to replace common HTML entities with their special characters""" - return _htmlEntityMap.get(t.entity) - -# it's easy to get these comment structures wrong - they're very common, so may as well make them available -cStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/').setName("C style comment") -"Comment of the form C{/* ... */}" - -htmlComment = Regex(r"<!--[\s\S]*?-->").setName("HTML comment") -"Comment of the form C{<!-- ... -->}" - -restOfLine = Regex(r".*").leaveWhitespace().setName("rest of line") -dblSlashComment = Regex(r"//(?:\\\n|[^\n])*").setName("// comment") -"Comment of the form C{// ... (to end of line)}" - -cppStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/'| dblSlashComment).setName("C++ style comment") -"Comment of either form C{L{cStyleComment}} or C{L{dblSlashComment}}" - -javaStyleComment = cppStyleComment -"Same as C{L{cppStyleComment}}" - -pythonStyleComment = Regex(r"#.*").setName("Python style comment") -"Comment of the form C{# ... (to end of line)}" - -_commasepitem = Combine(OneOrMore(Word(printables, excludeChars=',') + - Optional( Word(" \t") + - ~Literal(",") + ~LineEnd() ) ) ).streamline().setName("commaItem") -commaSeparatedList = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("commaSeparatedList") -"""(Deprecated) Predefined expression of 1 or more printable words or quoted strings, separated by commas. - This expression is deprecated in favor of L{pyparsing_common.comma_separated_list}.""" - -# some other useful expressions - using lower-case class name since we are really using this as a namespace -class pyparsing_common: - """ - Here are some common low-level expressions that may be useful in jump-starting parser development: - - numeric forms (L{integers<integer>}, L{reals<real>}, L{scientific notation<sci_real>}) - - common L{programming identifiers<identifier>} - - network addresses (L{MAC<mac_address>}, L{IPv4<ipv4_address>}, L{IPv6<ipv6_address>}) - - ISO8601 L{dates<iso8601_date>} and L{datetime<iso8601_datetime>} - - L{UUID<uuid>} - - L{comma-separated list<comma_separated_list>} - Parse actions: - - C{L{convertToInteger}} - - C{L{convertToFloat}} - - C{L{convertToDate}} - - C{L{convertToDatetime}} - - C{L{stripHTMLTags}} - - C{L{upcaseTokens}} - - C{L{downcaseTokens}} - - Example:: - pyparsing_common.number.runTests(''' - # any int or real number, returned as the appropriate type - 100 - -100 - +100 - 3.14159 - 6.02e23 - 1e-12 - ''') - - pyparsing_common.fnumber.runTests(''' - # any int or real number, returned as float - 100 - -100 - +100 - 3.14159 - 6.02e23 - 1e-12 - ''') - - pyparsing_common.hex_integer.runTests(''' - # hex numbers - 100 - FF - ''') - - pyparsing_common.fraction.runTests(''' - # fractions - 1/2 - -3/4 - ''') - - pyparsing_common.mixed_integer.runTests(''' - # mixed fractions - 1 - 1/2 - -3/4 - 1-3/4 - ''') - - import uuid - pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) - pyparsing_common.uuid.runTests(''' - # uuid - 12345678-1234-5678-1234-567812345678 - ''') - prints:: - # any int or real number, returned as the appropriate type - 100 - [100] - - -100 - [-100] - - +100 - [100] - - 3.14159 - [3.14159] - - 6.02e23 - [6.02e+23] - - 1e-12 - [1e-12] - - # any int or real number, returned as float - 100 - [100.0] - - -100 - [-100.0] - - +100 - [100.0] - - 3.14159 - [3.14159] - - 6.02e23 - [6.02e+23] - - 1e-12 - [1e-12] - - # hex numbers - 100 - [256] - - FF - [255] - - # fractions - 1/2 - [0.5] - - -3/4 - [-0.75] - - # mixed fractions - 1 - [1] - - 1/2 - [0.5] - - -3/4 - [-0.75] - - 1-3/4 - [1.75] - - # uuid - 12345678-1234-5678-1234-567812345678 - [UUID('12345678-1234-5678-1234-567812345678')] - """ - - convertToInteger = tokenMap(int) - """ - Parse action for converting parsed integers to Python int - """ - - convertToFloat = tokenMap(float) - """ - Parse action for converting parsed numbers to Python float - """ - - integer = Word(nums).setName("integer").setParseAction(convertToInteger) - """expression that parses an unsigned integer, returns an int""" - - hex_integer = Word(hexnums).setName("hex integer").setParseAction(tokenMap(int,16)) - """expression that parses a hexadecimal integer, returns an int""" - - signed_integer = Regex(r'[+-]?\d+').setName("signed integer").setParseAction(convertToInteger) - """expression that parses an integer with optional leading sign, returns an int""" - - fraction = (signed_integer().setParseAction(convertToFloat) + '/' + signed_integer().setParseAction(convertToFloat)).setName("fraction") - """fractional expression of an integer divided by an integer, returns a float""" - fraction.addParseAction(lambda t: t[0]/t[-1]) - - mixed_integer = (fraction | signed_integer + Optional(Optional('-').suppress() + fraction)).setName("fraction or mixed integer-fraction") - """mixed integer of the form 'integer - fraction', with optional leading integer, returns float""" - mixed_integer.addParseAction(sum) - - real = Regex(r'[+-]?\d+\.\d*').setName("real number").setParseAction(convertToFloat) - """expression that parses a floating point number and returns a float""" - - sci_real = Regex(r'[+-]?\d+([eE][+-]?\d+|\.\d*([eE][+-]?\d+)?)').setName("real number with scientific notation").setParseAction(convertToFloat) - """expression that parses a floating point number with optional scientific notation and returns a float""" - - # streamlining this expression makes the docs nicer-looking - number = (sci_real | real | signed_integer).streamline() - """any numeric expression, returns the corresponding Python type""" - - fnumber = Regex(r'[+-]?\d+\.?\d*([eE][+-]?\d+)?').setName("fnumber").setParseAction(convertToFloat) - """any int or real number, returned as float""" - - identifier = Word(alphas+'_', alphanums+'_').setName("identifier") - """typical code identifier (leading alpha or '_', followed by 0 or more alphas, nums, or '_')""" - - ipv4_address = Regex(r'(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}').setName("IPv4 address") - "IPv4 address (C{0.0.0.0 - 255.255.255.255})" - - _ipv6_part = Regex(r'[0-9a-fA-F]{1,4}').setName("hex_integer") - _full_ipv6_address = (_ipv6_part + (':' + _ipv6_part)*7).setName("full IPv6 address") - _short_ipv6_address = (Optional(_ipv6_part + (':' + _ipv6_part)*(0,6)) + "::" + Optional(_ipv6_part + (':' + _ipv6_part)*(0,6))).setName("short IPv6 address") - _short_ipv6_address.addCondition(lambda t: sum(1 for tt in t if pyparsing_common._ipv6_part.matches(tt)) < 8) - _mixed_ipv6_address = ("::ffff:" + ipv4_address).setName("mixed IPv6 address") - ipv6_address = Combine((_full_ipv6_address | _mixed_ipv6_address | _short_ipv6_address).setName("IPv6 address")).setName("IPv6 address") - "IPv6 address (long, short, or mixed form)" - - mac_address = Regex(r'[0-9a-fA-F]{2}([:.-])[0-9a-fA-F]{2}(?:\1[0-9a-fA-F]{2}){4}').setName("MAC address") - "MAC address xx:xx:xx:xx:xx (may also have '-' or '.' delimiters)" - - @staticmethod - def convertToDate(fmt="%Y-%m-%d"): - """ - Helper to create a parse action for converting parsed date string to Python datetime.date - - Params - - - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%d"}) - - Example:: - date_expr = pyparsing_common.iso8601_date.copy() - date_expr.setParseAction(pyparsing_common.convertToDate()) - print(date_expr.parseString("1999-12-31")) - prints:: - [datetime.date(1999, 12, 31)] - """ - def cvt_fn(s,l,t): - try: - return datetime.strptime(t[0], fmt).date() - except ValueError as ve: - raise ParseException(s, l, str(ve)) - return cvt_fn - - @staticmethod - def convertToDatetime(fmt="%Y-%m-%dT%H:%M:%S.%f"): - """ - Helper to create a parse action for converting parsed datetime string to Python datetime.datetime - - Params - - - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%dT%H:%M:%S.%f"}) - - Example:: - dt_expr = pyparsing_common.iso8601_datetime.copy() - dt_expr.setParseAction(pyparsing_common.convertToDatetime()) - print(dt_expr.parseString("1999-12-31T23:59:59.999")) - prints:: - [datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)] - """ - def cvt_fn(s,l,t): - try: - return datetime.strptime(t[0], fmt) - except ValueError as ve: - raise ParseException(s, l, str(ve)) - return cvt_fn - - iso8601_date = Regex(r'(?P<year>\d{4})(?:-(?P<month>\d\d)(?:-(?P<day>\d\d))?)?').setName("ISO8601 date") - "ISO8601 date (C{yyyy-mm-dd})" - - iso8601_datetime = Regex(r'(?P<year>\d{4})-(?P<month>\d\d)-(?P<day>\d\d)[T ](?P<hour>\d\d):(?P<minute>\d\d)(:(?P<second>\d\d(\.\d*)?)?)?(?P<tz>Z|[+-]\d\d:?\d\d)?').setName("ISO8601 datetime") - "ISO8601 datetime (C{yyyy-mm-ddThh:mm:ss.s(Z|+-00:00)}) - trailing seconds, milliseconds, and timezone optional; accepts separating C{'T'} or C{' '}" - - uuid = Regex(r'[0-9a-fA-F]{8}(-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12}').setName("UUID") - "UUID (C{xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx})" - - _html_stripper = anyOpenTag.suppress() | anyCloseTag.suppress() - @staticmethod - def stripHTMLTags(s, l, tokens): - """ - Parse action to remove HTML tags from web page HTML source - - Example:: - # strip HTML links from normal text - text = '<td>More info at the <a href="http://pyparsing.wikispaces.com">pyparsing</a> wiki page</td>' - td,td_end = makeHTMLTags("TD") - table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end - - print(table_text.parseString(text).body) # -> 'More info at the pyparsing wiki page' - """ - return pyparsing_common._html_stripper.transformString(tokens[0]) - - _commasepitem = Combine(OneOrMore(~Literal(",") + ~LineEnd() + Word(printables, excludeChars=',') - + Optional( White(" \t") ) ) ).streamline().setName("commaItem") - comma_separated_list = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("comma separated list") - """Predefined expression of 1 or more printable words or quoted strings, separated by commas.""" - - upcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).upper())) - """Parse action to convert tokens to upper case.""" - - downcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).lower())) - """Parse action to convert tokens to lower case.""" - - -if __name__ == "__main__": - - selectToken = CaselessLiteral("select") - fromToken = CaselessLiteral("from") - - ident = Word(alphas, alphanums + "_$") - - columnName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) - columnNameList = Group(delimitedList(columnName)).setName("columns") - columnSpec = ('*' | columnNameList) - - tableName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) - tableNameList = Group(delimitedList(tableName)).setName("tables") - - simpleSQL = selectToken("command") + columnSpec("columns") + fromToken + tableNameList("tables") - - # demo runTests method, including embedded comments in test string - simpleSQL.runTests(""" - # '*' as column list and dotted table name - select * from SYS.XYZZY - - # caseless match on "SELECT", and casts back to "select" - SELECT * from XYZZY, ABC - - # list of column names, and mixed case SELECT keyword - Select AA,BB,CC from Sys.dual - - # multiple tables - Select A, B, C from Sys.dual, Table2 - - # invalid SELECT keyword - should fail - Xelect A, B, C from Sys.dual - - # incomplete command - should fail - Select - - # invalid column name - should fail - Select ^^^ frox Sys.dual - - """) - - pyparsing_common.number.runTests(""" - 100 - -100 - +100 - 3.14159 - 6.02e23 - 1e-12 - """) - - # any int or real number, returned as float - pyparsing_common.fnumber.runTests(""" - 100 - -100 - +100 - 3.14159 - 6.02e23 - 1e-12 - """) - - pyparsing_common.hex_integer.runTests(""" - 100 - FF - """) - - import uuid - pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) - pyparsing_common.uuid.runTests(""" - 12345678-1234-5678-1234-567812345678 - """) diff --git a/env/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyc b/env/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyc deleted file mode 100644 index af59b232..00000000 Binary files a/env/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pkg_resources/_vendor/six.py b/env/lib/python2.7/site-packages/pkg_resources/_vendor/six.py deleted file mode 100644 index 190c0239..00000000 --- a/env/lib/python2.7/site-packages/pkg_resources/_vendor/six.py +++ /dev/null @@ -1,868 +0,0 @@ -"""Utilities for writing code that runs on Python 2 and 3""" - -# Copyright (c) 2010-2015 Benjamin Peterson -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - -from __future__ import absolute_import - -import functools -import itertools -import operator -import sys -import types - -__author__ = "Benjamin Peterson <benjamin@python.org>" -__version__ = "1.10.0" - - -# Useful for very coarse version differentiation. -PY2 = sys.version_info[0] == 2 -PY3 = sys.version_info[0] == 3 -PY34 = sys.version_info[0:2] >= (3, 4) - -if PY3: - string_types = str, - integer_types = int, - class_types = type, - text_type = str - binary_type = bytes - - MAXSIZE = sys.maxsize -else: - string_types = basestring, - integer_types = (int, long) - class_types = (type, types.ClassType) - text_type = unicode - binary_type = str - - if sys.platform.startswith("java"): - # Jython always uses 32 bits. - MAXSIZE = int((1 << 31) - 1) - else: - # It's possible to have sizeof(long) != sizeof(Py_ssize_t). - class X(object): - - def __len__(self): - return 1 << 31 - try: - len(X()) - except OverflowError: - # 32-bit - MAXSIZE = int((1 << 31) - 1) - else: - # 64-bit - MAXSIZE = int((1 << 63) - 1) - del X - - -def _add_doc(func, doc): - """Add documentation to a function.""" - func.__doc__ = doc - - -def _import_module(name): - """Import module, returning the module after the last dot.""" - __import__(name) - return sys.modules[name] - - -class _LazyDescr(object): - - def __init__(self, name): - self.name = name - - def __get__(self, obj, tp): - result = self._resolve() - setattr(obj, self.name, result) # Invokes __set__. - try: - # This is a bit ugly, but it avoids running this again by - # removing this descriptor. - delattr(obj.__class__, self.name) - except AttributeError: - pass - return result - - -class MovedModule(_LazyDescr): - - def __init__(self, name, old, new=None): - super(MovedModule, self).__init__(name) - if PY3: - if new is None: - new = name - self.mod = new - else: - self.mod = old - - def _resolve(self): - return _import_module(self.mod) - - def __getattr__(self, attr): - _module = self._resolve() - value = getattr(_module, attr) - setattr(self, attr, value) - return value - - -class _LazyModule(types.ModuleType): - - def __init__(self, name): - super(_LazyModule, self).__init__(name) - self.__doc__ = self.__class__.__doc__ - - def __dir__(self): - attrs = ["__doc__", "__name__"] - attrs += [attr.name for attr in self._moved_attributes] - return attrs - - # Subclasses should override this - _moved_attributes = [] - - -class MovedAttribute(_LazyDescr): - - def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): - super(MovedAttribute, self).__init__(name) - if PY3: - if new_mod is None: - new_mod = name - self.mod = new_mod - if new_attr is None: - if old_attr is None: - new_attr = name - else: - new_attr = old_attr - self.attr = new_attr - else: - self.mod = old_mod - if old_attr is None: - old_attr = name - self.attr = old_attr - - def _resolve(self): - module = _import_module(self.mod) - return getattr(module, self.attr) - - -class _SixMetaPathImporter(object): - - """ - A meta path importer to import six.moves and its submodules. - - This class implements a PEP302 finder and loader. It should be compatible - with Python 2.5 and all existing versions of Python3 - """ - - def __init__(self, six_module_name): - self.name = six_module_name - self.known_modules = {} - - def _add_module(self, mod, *fullnames): - for fullname in fullnames: - self.known_modules[self.name + "." + fullname] = mod - - def _get_module(self, fullname): - return self.known_modules[self.name + "." + fullname] - - def find_module(self, fullname, path=None): - if fullname in self.known_modules: - return self - return None - - def __get_module(self, fullname): - try: - return self.known_modules[fullname] - except KeyError: - raise ImportError("This loader does not know module " + fullname) - - def load_module(self, fullname): - try: - # in case of a reload - return sys.modules[fullname] - except KeyError: - pass - mod = self.__get_module(fullname) - if isinstance(mod, MovedModule): - mod = mod._resolve() - else: - mod.__loader__ = self - sys.modules[fullname] = mod - return mod - - def is_package(self, fullname): - """ - Return true, if the named module is a package. - - We need this method to get correct spec objects with - Python 3.4 (see PEP451) - """ - return hasattr(self.__get_module(fullname), "__path__") - - def get_code(self, fullname): - """Return None - - Required, if is_package is implemented""" - self.__get_module(fullname) # eventually raises ImportError - return None - get_source = get_code # same as get_code - -_importer = _SixMetaPathImporter(__name__) - - -class _MovedItems(_LazyModule): - - """Lazy loading of moved objects""" - __path__ = [] # mark as package - - -_moved_attributes = [ - MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), - MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), - MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), - MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), - MovedAttribute("intern", "__builtin__", "sys"), - MovedAttribute("map", "itertools", "builtins", "imap", "map"), - MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), - MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), - MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), - MovedAttribute("reduce", "__builtin__", "functools"), - MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), - MovedAttribute("StringIO", "StringIO", "io"), - MovedAttribute("UserDict", "UserDict", "collections"), - MovedAttribute("UserList", "UserList", "collections"), - MovedAttribute("UserString", "UserString", "collections"), - MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), - MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), - MovedModule("builtins", "__builtin__"), - MovedModule("configparser", "ConfigParser"), - MovedModule("copyreg", "copy_reg"), - MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), - MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), - MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), - MovedModule("http_cookies", "Cookie", "http.cookies"), - MovedModule("html_entities", "htmlentitydefs", "html.entities"), - MovedModule("html_parser", "HTMLParser", "html.parser"), - MovedModule("http_client", "httplib", "http.client"), - MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), - MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), - MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), - MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), - MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), - MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), - MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), - MovedModule("cPickle", "cPickle", "pickle"), - MovedModule("queue", "Queue"), - MovedModule("reprlib", "repr"), - MovedModule("socketserver", "SocketServer"), - MovedModule("_thread", "thread", "_thread"), - MovedModule("tkinter", "Tkinter"), - MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), - MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), - MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), - MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), - MovedModule("tkinter_tix", "Tix", "tkinter.tix"), - MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), - MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), - MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), - MovedModule("tkinter_colorchooser", "tkColorChooser", - "tkinter.colorchooser"), - MovedModule("tkinter_commondialog", "tkCommonDialog", - "tkinter.commondialog"), - MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), - MovedModule("tkinter_font", "tkFont", "tkinter.font"), - MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), - MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", - "tkinter.simpledialog"), - MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), - MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), - MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), - MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), - MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), - MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), -] -# Add windows specific modules. -if sys.platform == "win32": - _moved_attributes += [ - MovedModule("winreg", "_winreg"), - ] - -for attr in _moved_attributes: - setattr(_MovedItems, attr.name, attr) - if isinstance(attr, MovedModule): - _importer._add_module(attr, "moves." + attr.name) -del attr - -_MovedItems._moved_attributes = _moved_attributes - -moves = _MovedItems(__name__ + ".moves") -_importer._add_module(moves, "moves") - - -class Module_six_moves_urllib_parse(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_parse""" - - -_urllib_parse_moved_attributes = [ - MovedAttribute("ParseResult", "urlparse", "urllib.parse"), - MovedAttribute("SplitResult", "urlparse", "urllib.parse"), - MovedAttribute("parse_qs", "urlparse", "urllib.parse"), - MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), - MovedAttribute("urldefrag", "urlparse", "urllib.parse"), - MovedAttribute("urljoin", "urlparse", "urllib.parse"), - MovedAttribute("urlparse", "urlparse", "urllib.parse"), - MovedAttribute("urlsplit", "urlparse", "urllib.parse"), - MovedAttribute("urlunparse", "urlparse", "urllib.parse"), - MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), - MovedAttribute("quote", "urllib", "urllib.parse"), - MovedAttribute("quote_plus", "urllib", "urllib.parse"), - MovedAttribute("unquote", "urllib", "urllib.parse"), - MovedAttribute("unquote_plus", "urllib", "urllib.parse"), - MovedAttribute("urlencode", "urllib", "urllib.parse"), - MovedAttribute("splitquery", "urllib", "urllib.parse"), - MovedAttribute("splittag", "urllib", "urllib.parse"), - MovedAttribute("splituser", "urllib", "urllib.parse"), - MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), - MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), - MovedAttribute("uses_params", "urlparse", "urllib.parse"), - MovedAttribute("uses_query", "urlparse", "urllib.parse"), - MovedAttribute("uses_relative", "urlparse", "urllib.parse"), -] -for attr in _urllib_parse_moved_attributes: - setattr(Module_six_moves_urllib_parse, attr.name, attr) -del attr - -Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes - -_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), - "moves.urllib_parse", "moves.urllib.parse") - - -class Module_six_moves_urllib_error(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_error""" - - -_urllib_error_moved_attributes = [ - MovedAttribute("URLError", "urllib2", "urllib.error"), - MovedAttribute("HTTPError", "urllib2", "urllib.error"), - MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), -] -for attr in _urllib_error_moved_attributes: - setattr(Module_six_moves_urllib_error, attr.name, attr) -del attr - -Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes - -_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), - "moves.urllib_error", "moves.urllib.error") - - -class Module_six_moves_urllib_request(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_request""" - - -_urllib_request_moved_attributes = [ - MovedAttribute("urlopen", "urllib2", "urllib.request"), - MovedAttribute("install_opener", "urllib2", "urllib.request"), - MovedAttribute("build_opener", "urllib2", "urllib.request"), - MovedAttribute("pathname2url", "urllib", "urllib.request"), - MovedAttribute("url2pathname", "urllib", "urllib.request"), - MovedAttribute("getproxies", "urllib", "urllib.request"), - MovedAttribute("Request", "urllib2", "urllib.request"), - MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), - MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), - MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), - MovedAttribute("BaseHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), - MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), - MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), - MovedAttribute("FileHandler", "urllib2", "urllib.request"), - MovedAttribute("FTPHandler", "urllib2", "urllib.request"), - MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), - MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), - MovedAttribute("urlretrieve", "urllib", "urllib.request"), - MovedAttribute("urlcleanup", "urllib", "urllib.request"), - MovedAttribute("URLopener", "urllib", "urllib.request"), - MovedAttribute("FancyURLopener", "urllib", "urllib.request"), - MovedAttribute("proxy_bypass", "urllib", "urllib.request"), -] -for attr in _urllib_request_moved_attributes: - setattr(Module_six_moves_urllib_request, attr.name, attr) -del attr - -Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes - -_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), - "moves.urllib_request", "moves.urllib.request") - - -class Module_six_moves_urllib_response(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_response""" - - -_urllib_response_moved_attributes = [ - MovedAttribute("addbase", "urllib", "urllib.response"), - MovedAttribute("addclosehook", "urllib", "urllib.response"), - MovedAttribute("addinfo", "urllib", "urllib.response"), - MovedAttribute("addinfourl", "urllib", "urllib.response"), -] -for attr in _urllib_response_moved_attributes: - setattr(Module_six_moves_urllib_response, attr.name, attr) -del attr - -Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes - -_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), - "moves.urllib_response", "moves.urllib.response") - - -class Module_six_moves_urllib_robotparser(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_robotparser""" - - -_urllib_robotparser_moved_attributes = [ - MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), -] -for attr in _urllib_robotparser_moved_attributes: - setattr(Module_six_moves_urllib_robotparser, attr.name, attr) -del attr - -Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes - -_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), - "moves.urllib_robotparser", "moves.urllib.robotparser") - - -class Module_six_moves_urllib(types.ModuleType): - - """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" - __path__ = [] # mark as package - parse = _importer._get_module("moves.urllib_parse") - error = _importer._get_module("moves.urllib_error") - request = _importer._get_module("moves.urllib_request") - response = _importer._get_module("moves.urllib_response") - robotparser = _importer._get_module("moves.urllib_robotparser") - - def __dir__(self): - return ['parse', 'error', 'request', 'response', 'robotparser'] - -_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), - "moves.urllib") - - -def add_move(move): - """Add an item to six.moves.""" - setattr(_MovedItems, move.name, move) - - -def remove_move(name): - """Remove item from six.moves.""" - try: - delattr(_MovedItems, name) - except AttributeError: - try: - del moves.__dict__[name] - except KeyError: - raise AttributeError("no such move, %r" % (name,)) - - -if PY3: - _meth_func = "__func__" - _meth_self = "__self__" - - _func_closure = "__closure__" - _func_code = "__code__" - _func_defaults = "__defaults__" - _func_globals = "__globals__" -else: - _meth_func = "im_func" - _meth_self = "im_self" - - _func_closure = "func_closure" - _func_code = "func_code" - _func_defaults = "func_defaults" - _func_globals = "func_globals" - - -try: - advance_iterator = next -except NameError: - def advance_iterator(it): - return it.next() -next = advance_iterator - - -try: - callable = callable -except NameError: - def callable(obj): - return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) - - -if PY3: - def get_unbound_function(unbound): - return unbound - - create_bound_method = types.MethodType - - def create_unbound_method(func, cls): - return func - - Iterator = object -else: - def get_unbound_function(unbound): - return unbound.im_func - - def create_bound_method(func, obj): - return types.MethodType(func, obj, obj.__class__) - - def create_unbound_method(func, cls): - return types.MethodType(func, None, cls) - - class Iterator(object): - - def next(self): - return type(self).__next__(self) - - callable = callable -_add_doc(get_unbound_function, - """Get the function out of a possibly unbound function""") - - -get_method_function = operator.attrgetter(_meth_func) -get_method_self = operator.attrgetter(_meth_self) -get_function_closure = operator.attrgetter(_func_closure) -get_function_code = operator.attrgetter(_func_code) -get_function_defaults = operator.attrgetter(_func_defaults) -get_function_globals = operator.attrgetter(_func_globals) - - -if PY3: - def iterkeys(d, **kw): - return iter(d.keys(**kw)) - - def itervalues(d, **kw): - return iter(d.values(**kw)) - - def iteritems(d, **kw): - return iter(d.items(**kw)) - - def iterlists(d, **kw): - return iter(d.lists(**kw)) - - viewkeys = operator.methodcaller("keys") - - viewvalues = operator.methodcaller("values") - - viewitems = operator.methodcaller("items") -else: - def iterkeys(d, **kw): - return d.iterkeys(**kw) - - def itervalues(d, **kw): - return d.itervalues(**kw) - - def iteritems(d, **kw): - return d.iteritems(**kw) - - def iterlists(d, **kw): - return d.iterlists(**kw) - - viewkeys = operator.methodcaller("viewkeys") - - viewvalues = operator.methodcaller("viewvalues") - - viewitems = operator.methodcaller("viewitems") - -_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") -_add_doc(itervalues, "Return an iterator over the values of a dictionary.") -_add_doc(iteritems, - "Return an iterator over the (key, value) pairs of a dictionary.") -_add_doc(iterlists, - "Return an iterator over the (key, [values]) pairs of a dictionary.") - - -if PY3: - def b(s): - return s.encode("latin-1") - - def u(s): - return s - unichr = chr - import struct - int2byte = struct.Struct(">B").pack - del struct - byte2int = operator.itemgetter(0) - indexbytes = operator.getitem - iterbytes = iter - import io - StringIO = io.StringIO - BytesIO = io.BytesIO - _assertCountEqual = "assertCountEqual" - if sys.version_info[1] <= 1: - _assertRaisesRegex = "assertRaisesRegexp" - _assertRegex = "assertRegexpMatches" - else: - _assertRaisesRegex = "assertRaisesRegex" - _assertRegex = "assertRegex" -else: - def b(s): - return s - # Workaround for standalone backslash - - def u(s): - return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") - unichr = unichr - int2byte = chr - - def byte2int(bs): - return ord(bs[0]) - - def indexbytes(buf, i): - return ord(buf[i]) - iterbytes = functools.partial(itertools.imap, ord) - import StringIO - StringIO = BytesIO = StringIO.StringIO - _assertCountEqual = "assertItemsEqual" - _assertRaisesRegex = "assertRaisesRegexp" - _assertRegex = "assertRegexpMatches" -_add_doc(b, """Byte literal""") -_add_doc(u, """Text literal""") - - -def assertCountEqual(self, *args, **kwargs): - return getattr(self, _assertCountEqual)(*args, **kwargs) - - -def assertRaisesRegex(self, *args, **kwargs): - return getattr(self, _assertRaisesRegex)(*args, **kwargs) - - -def assertRegex(self, *args, **kwargs): - return getattr(self, _assertRegex)(*args, **kwargs) - - -if PY3: - exec_ = getattr(moves.builtins, "exec") - - def reraise(tp, value, tb=None): - if value is None: - value = tp() - if value.__traceback__ is not tb: - raise value.with_traceback(tb) - raise value - -else: - def exec_(_code_, _globs_=None, _locs_=None): - """Execute code in a namespace.""" - if _globs_ is None: - frame = sys._getframe(1) - _globs_ = frame.f_globals - if _locs_ is None: - _locs_ = frame.f_locals - del frame - elif _locs_ is None: - _locs_ = _globs_ - exec("""exec _code_ in _globs_, _locs_""") - - exec_("""def reraise(tp, value, tb=None): - raise tp, value, tb -""") - - -if sys.version_info[:2] == (3, 2): - exec_("""def raise_from(value, from_value): - if from_value is None: - raise value - raise value from from_value -""") -elif sys.version_info[:2] > (3, 2): - exec_("""def raise_from(value, from_value): - raise value from from_value -""") -else: - def raise_from(value, from_value): - raise value - - -print_ = getattr(moves.builtins, "print", None) -if print_ is None: - def print_(*args, **kwargs): - """The new-style print function for Python 2.4 and 2.5.""" - fp = kwargs.pop("file", sys.stdout) - if fp is None: - return - - def write(data): - if not isinstance(data, basestring): - data = str(data) - # If the file has an encoding, encode unicode with it. - if (isinstance(fp, file) and - isinstance(data, unicode) and - fp.encoding is not None): - errors = getattr(fp, "errors", None) - if errors is None: - errors = "strict" - data = data.encode(fp.encoding, errors) - fp.write(data) - want_unicode = False - sep = kwargs.pop("sep", None) - if sep is not None: - if isinstance(sep, unicode): - want_unicode = True - elif not isinstance(sep, str): - raise TypeError("sep must be None or a string") - end = kwargs.pop("end", None) - if end is not None: - if isinstance(end, unicode): - want_unicode = True - elif not isinstance(end, str): - raise TypeError("end must be None or a string") - if kwargs: - raise TypeError("invalid keyword arguments to print()") - if not want_unicode: - for arg in args: - if isinstance(arg, unicode): - want_unicode = True - break - if want_unicode: - newline = unicode("\n") - space = unicode(" ") - else: - newline = "\n" - space = " " - if sep is None: - sep = space - if end is None: - end = newline - for i, arg in enumerate(args): - if i: - write(sep) - write(arg) - write(end) -if sys.version_info[:2] < (3, 3): - _print = print_ - - def print_(*args, **kwargs): - fp = kwargs.get("file", sys.stdout) - flush = kwargs.pop("flush", False) - _print(*args, **kwargs) - if flush and fp is not None: - fp.flush() - -_add_doc(reraise, """Reraise an exception.""") - -if sys.version_info[0:2] < (3, 4): - def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, - updated=functools.WRAPPER_UPDATES): - def wrapper(f): - f = functools.wraps(wrapped, assigned, updated)(f) - f.__wrapped__ = wrapped - return f - return wrapper -else: - wraps = functools.wraps - - -def with_metaclass(meta, *bases): - """Create a base class with a metaclass.""" - # This requires a bit of explanation: the basic idea is to make a dummy - # metaclass for one level of class instantiation that replaces itself with - # the actual metaclass. - class metaclass(meta): - - def __new__(cls, name, this_bases, d): - return meta(name, bases, d) - return type.__new__(metaclass, 'temporary_class', (), {}) - - -def add_metaclass(metaclass): - """Class decorator for creating a class with a metaclass.""" - def wrapper(cls): - orig_vars = cls.__dict__.copy() - slots = orig_vars.get('__slots__') - if slots is not None: - if isinstance(slots, str): - slots = [slots] - for slots_var in slots: - orig_vars.pop(slots_var) - orig_vars.pop('__dict__', None) - orig_vars.pop('__weakref__', None) - return metaclass(cls.__name__, cls.__bases__, orig_vars) - return wrapper - - -def python_2_unicode_compatible(klass): - """ - A decorator that defines __unicode__ and __str__ methods under Python 2. - Under Python 3 it does nothing. - - To support Python 2 and 3 with a single code base, define a __str__ method - returning text and apply this decorator to the class. - """ - if PY2: - if '__str__' not in klass.__dict__: - raise ValueError("@python_2_unicode_compatible cannot be applied " - "to %s because it doesn't define __str__()." % - klass.__name__) - klass.__unicode__ = klass.__str__ - klass.__str__ = lambda self: self.__unicode__().encode('utf-8') - return klass - - -# Complete the moves implementation. -# This code is at the end of this module to speed up module loading. -# Turn this module into a package. -__path__ = [] # required for PEP 302 and PEP 451 -__package__ = __name__ # see PEP 366 @ReservedAssignment -if globals().get("__spec__") is not None: - __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable -# Remove other six meta path importers, since they cause problems. This can -# happen if six is removed from sys.modules and then reloaded. (Setuptools does -# this for some reason.) -if sys.meta_path: - for i, importer in enumerate(sys.meta_path): - # Here's some real nastiness: Another "instance" of the six module might - # be floating around. Therefore, we can't use isinstance() to check for - # the six meta path importer, since the other six instance will have - # inserted an importer with different class. - if (type(importer).__name__ == "_SixMetaPathImporter" and - importer.name == __name__): - del sys.meta_path[i] - break - del i, importer -# Finally, add the importer to the meta path import hook. -sys.meta_path.append(_importer) diff --git a/env/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyc b/env/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyc deleted file mode 100644 index 9b310de1..00000000 Binary files a/env/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pkg_resources/extern/__init__.py b/env/lib/python2.7/site-packages/pkg_resources/extern/__init__.py deleted file mode 100644 index c1eb9e99..00000000 --- a/env/lib/python2.7/site-packages/pkg_resources/extern/__init__.py +++ /dev/null @@ -1,73 +0,0 @@ -import sys - - -class VendorImporter: - """ - A PEP 302 meta path importer for finding optionally-vendored - or otherwise naturally-installed packages from root_name. - """ - - def __init__(self, root_name, vendored_names=(), vendor_pkg=None): - self.root_name = root_name - self.vendored_names = set(vendored_names) - self.vendor_pkg = vendor_pkg or root_name.replace('extern', '_vendor') - - @property - def search_path(self): - """ - Search first the vendor package then as a natural package. - """ - yield self.vendor_pkg + '.' - yield '' - - def find_module(self, fullname, path=None): - """ - Return self when fullname starts with root_name and the - target module is one vendored through this importer. - """ - root, base, target = fullname.partition(self.root_name + '.') - if root: - return - if not any(map(target.startswith, self.vendored_names)): - return - return self - - def load_module(self, fullname): - """ - Iterate over the search path to locate and load fullname. - """ - root, base, target = fullname.partition(self.root_name + '.') - for prefix in self.search_path: - try: - extant = prefix + target - __import__(extant) - mod = sys.modules[extant] - sys.modules[fullname] = mod - # mysterious hack: - # Remove the reference to the extant package/module - # on later Python versions to cause relative imports - # in the vendor package to resolve the same modules - # as those going through this importer. - if prefix and sys.version_info > (3, 3): - del sys.modules[extant] - return mod - except ImportError: - pass - else: - raise ImportError( - "The '{target}' package is required; " - "normally this is bundled with this package so if you get " - "this warning, consult the packager of your " - "distribution.".format(**locals()) - ) - - def install(self): - """ - Install this importer into sys.meta_path if not already present. - """ - if self not in sys.meta_path: - sys.meta_path.append(self) - - -names = 'packaging', 'pyparsing', 'six', 'appdirs' -VendorImporter(__name__, names).install() diff --git a/env/lib/python2.7/site-packages/pkg_resources/extern/__init__.pyc b/env/lib/python2.7/site-packages/pkg_resources/extern/__init__.pyc deleted file mode 100644 index 7423dbaa..00000000 Binary files a/env/lib/python2.7/site-packages/pkg_resources/extern/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pkg_resources/py31compat.py b/env/lib/python2.7/site-packages/pkg_resources/py31compat.py deleted file mode 100644 index a381c424..00000000 --- a/env/lib/python2.7/site-packages/pkg_resources/py31compat.py +++ /dev/null @@ -1,23 +0,0 @@ -import os -import errno -import sys - -from .extern import six - - -def _makedirs_31(path, exist_ok=False): - try: - os.makedirs(path) - except OSError as exc: - if not exist_ok or exc.errno != errno.EEXIST: - raise - - -# rely on compatibility behavior until mode considerations -# and exists_ok considerations are disentangled. -# See https://github.com/pypa/setuptools/pull/1083#issuecomment-315168663 -needs_makedirs = ( - six.PY2 or - (3, 4) <= sys.version_info < (3, 4, 1) -) -makedirs = _makedirs_31 if needs_makedirs else os.makedirs diff --git a/env/lib/python2.7/site-packages/pkg_resources/py31compat.pyc b/env/lib/python2.7/site-packages/pkg_resources/py31compat.pyc deleted file mode 100644 index fdcb6474..00000000 Binary files a/env/lib/python2.7/site-packages/pkg_resources/py31compat.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/pyobjc-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc-5.2.dist-info/METADATA deleted file mode 100644 index 0fdb8f4f..00000000 --- a/env/lib/python2.7/site-packages/pyobjc-5.2.dist-info/METADATA +++ /dev/null @@ -1,150 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc -Version: 5.2 -Summary: Python<->ObjC Interoperability Module -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: Objective-C,bridge,Cocoa -Platform: macOS -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (==5.2) -Requires-Dist: pyobjc-framework-AddressBook (==5.2) -Requires-Dist: pyobjc-framework-AppleScriptKit (==5.2) -Requires-Dist: pyobjc-framework-ApplicationServices (==5.2) -Requires-Dist: pyobjc-framework-Automator (==5.2) -Requires-Dist: pyobjc-framework-CFNetwork (==5.2) -Requires-Dist: pyobjc-framework-Cocoa (==5.2) -Requires-Dist: pyobjc-framework-CoreAudio (==5.2) -Requires-Dist: pyobjc-framework-CoreAudioKit (==5.2) -Requires-Dist: pyobjc-framework-CoreData (==5.2) -Requires-Dist: pyobjc-framework-CoreServices (==5.2) -Requires-Dist: pyobjc-framework-CoreText (==5.2) -Requires-Dist: pyobjc-framework-DiscRecording (==5.2) -Requires-Dist: pyobjc-framework-DiscRecordingUI (==5.2) -Requires-Dist: pyobjc-framework-DiskArbitration (==5.2) -Requires-Dist: pyobjc-framework-DVDPlayback (==5.2) -Requires-Dist: pyobjc-framework-ExceptionHandling (==5.2) -Requires-Dist: pyobjc-framework-InstallerPlugins (==5.2) -Requires-Dist: pyobjc-framework-LatentSemanticMapping (==5.2) -Requires-Dist: pyobjc-framework-LaunchServices (==5.2) -Requires-Dist: pyobjc-framework-OSAKit (==5.2) -Requires-Dist: pyobjc-framework-PreferencePanes (==5.2) -Requires-Dist: pyobjc-framework-Quartz (==5.2) -Requires-Dist: pyobjc-framework-ScreenSaver (==5.2) -Requires-Dist: pyobjc-framework-Security (==5.2) -Requires-Dist: pyobjc-framework-SecurityFoundation (==5.2) -Requires-Dist: pyobjc-framework-SecurityInterface (==5.2) -Requires-Dist: pyobjc-framework-SearchKit (==5.2) -Requires-Dist: pyobjc-framework-SyncServices (==5.2) -Requires-Dist: pyobjc-framework-SystemConfiguration (==5.2) -Requires-Dist: pyobjc-framework-WebKit (==5.2) -Requires-Dist: pyobjc-framework-XgridFoundation (==5.2) ; platform_release < "12.0" -Requires-Dist: pyobjc-framework-Message (==5.2) ; platform_release < "13.0" -Requires-Dist: pyobjc-framework-AppleScriptObjC (==5.2) ; platform_release >= "10.0" -Requires-Dist: pyobjc-framework-CoreLocation (==5.2) ; platform_release >= "10.0" -Requires-Dist: pyobjc-framework-CoreWLAN (==5.2) ; platform_release >= "10.0" -Requires-Dist: pyobjc-framework-ImageCaptureCore (==5.2) ; platform_release >= "10.0" -Requires-Dist: pyobjc-framework-IOSurface (==5.2) ; platform_release >= "10.0" -Requires-Dist: pyobjc-framework-NetFS (==5.2) ; platform_release >= "10.0" -Requires-Dist: pyobjc-framework-OpenDirectory (==5.2) ; platform_release >= "10.0" -Requires-Dist: pyobjc-framework-ServiceManagement (==5.2) ; platform_release >= "10.0" -Requires-Dist: pyobjc-framework-iTunesLibrary (==5.2) ; platform_release >= "10.0" -Requires-Dist: pyobjc-framework-ServerNotification (==5.2) ; platform_release >= "10.0" and platform_release < "13.0" -Requires-Dist: pyobjc-framework-AVFoundation (==5.2) ; platform_release >= "11.0" -Requires-Dist: pyobjc-framework-CoreMedia (==5.2) ; platform_release >= "11.0" -Requires-Dist: pyobjc-framework-CoreMediaIO (==5.2) ; platform_release >= "11.0" -Requires-Dist: pyobjc-framework-IMServicePlugIn (==5.2) ; platform_release >= "11.0" -Requires-Dist: pyobjc-framework-StoreKit (==5.2) ; platform_release >= "11.0" -Requires-Dist: pyobjc-framework-SceneKit (==5.2) ; platform_release >= "11.0" -Requires-Dist: pyobjc-framework-libdispatch (==5.2) ; platform_release >= "12.0" -Requires-Dist: pyobjc-framework-Accounts (==5.2) ; platform_release >= "12.0" -Requires-Dist: pyobjc-framework-EventKit (==5.2) ; platform_release >= "12.0" -Requires-Dist: pyobjc-framework-GameCenter (==5.2) ; platform_release >= "12.0" -Requires-Dist: pyobjc-framework-Social (==5.2) ; platform_release >= "12.0" -Requires-Dist: pyobjc-framework-GameKit (==5.2) ; platform_release >= "12.0" -Requires-Dist: pyobjc-framework-VideoToolbox (==5.2) ; platform_release >= "12.0" -Requires-Dist: pyobjc-framework-AVKit (==5.2) ; platform_release >= "13.0" -Requires-Dist: pyobjc-framework-GameController (==5.2) ; platform_release >= "13.0" -Requires-Dist: pyobjc-framework-MapKit (==5.2) ; platform_release >= "13.0" -Requires-Dist: pyobjc-framework-MediaAccessibility (==5.2) ; platform_release >= "13.0" -Requires-Dist: pyobjc-framework-MediaLibrary (==5.2) ; platform_release >= "13.0" -Requires-Dist: pyobjc-framework-MediaToolbox (==5.2) ; platform_release >= "13.0" -Requires-Dist: pyobjc-framework-SpriteKit (==5.2) ; platform_release >= "13.0" -Requires-Dist: pyobjc-framework-CloudKit (==5.2) ; platform_release >= "14.0" -Requires-Dist: pyobjc-framework-CoreBluetooth (==5.2) ; platform_release >= "14.0" -Requires-Dist: pyobjc-framework-CryptoTokenKit (==5.2) ; platform_release >= "14.0" -Requires-Dist: pyobjc-framework-FinderSync (==5.2) ; platform_release >= "14.0" -Requires-Dist: pyobjc-framework-LocalAuthentication (==5.2) ; platform_release >= "14.0" -Requires-Dist: pyobjc-framework-MultipeerConnectivity (==5.2) ; platform_release >= "14.0" -Requires-Dist: pyobjc-framework-NotificationCenter (==5.2) ; platform_release >= "14.0" -Requires-Dist: pyobjc-framework-Contacts (==5.2) ; platform_release >= "15.0" -Requires-Dist: pyobjc-framework-ContactsUI (==5.2) ; platform_release >= "15.0" -Requires-Dist: pyobjc-framework-ModelIO (==5.2) ; platform_release >= "15.0" -Requires-Dist: pyobjc-framework-NetworkExtension (==5.2) ; platform_release >= "15.0" -Requires-Dist: pyobjc-framework-Photos (==5.2) ; platform_release >= "15.0" -Requires-Dist: pyobjc-framework-PhotosUI (==5.2) ; platform_release >= "15.0" -Requires-Dist: pyobjc-framework-SafariServices (==5.2) ; platform_release >= "15.0" -Requires-Dist: pyobjc-framework-GameplayKit (==5.2) ; platform_release >= "15.0" -Requires-Dist: pyobjc-framework-Intents (==5.2) ; platform_release >= "16.0" -Requires-Dist: pyobjc-framework-MediaPlayer (==5.2) ; platform_release >= "16.0" -Requires-Dist: pyobjc-framework-ColorSync (==5.2) ; platform_release >= "17.0" -Requires-Dist: pyobjc-framework-CoreML (==5.2) ; platform_release >= "17.0" -Requires-Dist: pyobjc-framework-CoreSpotlight (==5.2) ; platform_release >= "17.0" -Requires-Dist: pyobjc-framework-ExternalAccessory (==5.2) ; platform_release >= "17.0" -Requires-Dist: pyobjc-framework-Vision (==5.2) ; platform_release >= "17.0" -Requires-Dist: pyobjc-framework-AdSupport (==5.2) ; platform_release >= "18.0" -Requires-Dist: pyobjc-framework-BusinessChat (==5.2) ; platform_release >= "18.0" -Requires-Dist: pyobjc-framework-NaturalLanguage (==5.2) ; platform_release >= "18.0" -Requires-Dist: pyobjc-framework-Network (==5.2) ; platform_release >= "18.0" -Requires-Dist: pyobjc-framework-UserNotifications (==5.2) ; platform_release >= "18.0" -Requires-Dist: pyobjc-framework-VideoSubscriberAccount (==5.2) ; platform_release >= "18.0" -Requires-Dist: pyobjc-framework-CalendarStore (==5.2) ; platform_release >= "9.0" -Requires-Dist: pyobjc-framework-Collaboration (==5.2) ; platform_release >= "9.0" -Requires-Dist: pyobjc-framework-DictionaryServices (==5.2) ; platform_release >= "9.0" -Requires-Dist: pyobjc-framework-FSEvents (==5.2) ; platform_release >= "9.0" -Requires-Dist: pyobjc-framework-InputMethodKit (==5.2) ; platform_release >= "9.0" -Requires-Dist: pyobjc-framework-InstantMessage (==5.2) ; platform_release >= "9.0" -Requires-Dist: pyobjc-framework-PubSub (==5.2) ; platform_release >= "9.0" -Requires-Dist: pyobjc-framework-QTKit (==5.2) ; platform_release >= "9.0" -Requires-Dist: pyobjc-framework-ScriptingBridge (==5.2) ; platform_release >= "9.0" -Requires-Dist: pyobjc-framework-InterfaceBuilderKit (==5.2) ; platform_release >= "9.0" and platform_release < "11.0" - -PyObjC is a bridge between Python and Objective-C. It allows full -featured Cocoa applications to be written in pure Python. It is also -easy to use other frameworks containing Objective-C class libraries -from Python and to mix in Objective-C, C and C++ source. - -This package is a pseudo-package that will install all pyobjc related -packages (that is, pyobjc-core as well as wrappers for frameworks on -macOS) - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - diff --git a/env/lib/python2.7/site-packages/pyobjc-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc-5.2.dist-info/RECORD deleted file mode 100644 index 2e48e26a..00000000 --- a/env/lib/python2.7/site-packages/pyobjc-5.2.dist-info/RECORD +++ /dev/null @@ -1,6 +0,0 @@ -pyobjc-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc-5.2.dist-info/METADATA,sha256=J8V4daANUSLblwcXaGoxQXjKwEmnotJkJVuQIhrENVI,9454 -pyobjc-5.2.dist-info/RECORD,, -pyobjc-5.2.dist-info/WHEEL,sha256=pqI-DBMA-Z6OTNov1nVxs7mwm6Yj2kHZGNp_6krVn1E,92 -pyobjc-5.2.dist-info/top_level.txt,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 -pyobjc-5.2.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 diff --git a/env/lib/python2.7/site-packages/pyobjc-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc-5.2.dist-info/WHEEL deleted file mode 100644 index 32b6cef6..00000000 --- a/env/lib/python2.7/site-packages/pyobjc-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: true -Tag: py2-none-any - diff --git a/env/lib/python2.7/site-packages/pyobjc_core-5.2-py2.7-nspkg.pth b/env/lib/python2.7/site-packages/pyobjc_core-5.2-py2.7-nspkg.pth deleted file mode 100644 index f97ec6ad..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_core-5.2-py2.7-nspkg.pth +++ /dev/null @@ -1 +0,0 @@ -import sys, types, os;has_mfs = sys.version_info > (3, 5);p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('PyObjCTools',));importlib = has_mfs and __import__('importlib.util');has_mfs and __import__('importlib.machinery');m = has_mfs and sys.modules.setdefault('PyObjCTools', importlib.util.module_from_spec(importlib.machinery.PathFinder.find_spec('PyObjCTools', [os.path.dirname(p)])));m = m or sys.modules.setdefault('PyObjCTools', types.ModuleType('PyObjCTools'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p) diff --git a/env/lib/python2.7/site-packages/pyobjc_core-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_core-5.2.dist-info/METADATA deleted file mode 100644 index 1563a1d0..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_core-5.2.dist-info/METADATA +++ /dev/null @@ -1,57 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-core -Version: 5.2 -Summary: Python<->ObjC Interoperability Module -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren, bbum, SteveM, LeleG, many others stretching back through the reaches of time... -Author-email: RonaldOussoren@mac.com -Maintainer: Ronald Oussoren -Maintainer-email: RonaldOussoren@mac.com -License: MIT License -Keywords: Objective-C,Cocoa -Platform: MacOS X -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open - -PyObjC is a bridge between Python and Objective-C. It allows full -featured Cocoa applications to be written in pure Python. It is also -easy to use other frameworks containing Objective-C class libraries -from Python and to mix in Objective-C, C and C++ source. - -Python is a highly dynamic programming language with a shallow learning -curve. It combines remarkable power with very clear syntax. - -PyObjC also supports full introspection of Objective-C classes and -direct invocation of Objective-C APIs from the interactive interpreter. - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_core-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_core-5.2.dist-info/RECORD deleted file mode 100644 index cfdb022b..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_core-5.2.dist-info/RECORD +++ /dev/null @@ -1,71 +0,0 @@ -PyObjCTools/KeyValueCoding.py,sha256=Lq125lyIYCdiGnkll1xukdTFcS0iuoQTeWDfLxKezZs,10741 -PyObjCTools/KeyValueCoding.pyc,, -PyObjCTools/MachSignals.py,sha256=58e2ysrJ19B8G1nrKTA2cvaOh09KpMPAmgHlTLACY3k,1057 -PyObjCTools/MachSignals.pyc,, -PyObjCTools/Signals.py,sha256=wk2n-gtoJFwlYjW3rtSK_674jpYMnFNWquMzWSfNUmg,2339 -PyObjCTools/Signals.pyc,, -PyObjCTools/TestSupport.py,sha256=WiHj2tHOMGJJp-8TfHfPp19DcyK2XyJV90AV4gbV9WM,34383 -PyObjCTools/TestSupport.pyc,, -objc/__init__.py,sha256=aNp4ZqVs7pFRYgHc-1Q8CXimz5vJzEibcXYJNufrwME,2191 -objc/__init__.pyc,, -objc/_bridges.py,sha256=pLvK5VBNeVd0dioS75FNto9gzzl4Aauk94QRVQgFmHk,1739 -objc/_bridges.pyc,, -objc/_bridgesupport.py,sha256=afl5CVz4Yi2MyxJR9bBbAhjbRv0dPIZm0e3fqZTe-Kc,27992 -objc/_bridgesupport.pyc,, -objc/_callable_docstr.py,sha256=1ou7XbiCjkCKE2pEi7LUozpJ1BStpnU5MV-Hsh7JYl0,8687 -objc/_callable_docstr.pyc,, -objc/_category.py,sha256=zy96hbOwV-xQFUyXg9ILTf4nn6TMW-VJnrJDXW6_gHk,2368 -objc/_category.pyc,, -objc/_compat.py,sha256=0Re8eF5yKXcOadZ1JN0yMbSi4zcgqOpwR3M1nCavfVw,1015 -objc/_compat.pyc,, -objc/_context.py,sha256=XnuGJ7ong0RkBC1elTa7Vn7jS-dMPq2KPlQWjCth_TU,1283 -objc/_context.pyc,, -objc/_convenience.py,sha256=Q8ehtQP3LSrwVxbLdzms_zJBfsMmOqoak1gV_j3oy2M,3627 -objc/_convenience.pyc,, -objc/_convenience_mapping.py,sha256=TEe22-XMsg4rVTA2IIrj--TJnxjhhs4rhW9xRMHT52M,3781 -objc/_convenience_mapping.pyc,, -objc/_convenience_nsarray.py,sha256=ykq0-y5CTm2luEYCrwhKw1yFmZ8bM6jFeXZIJLVpXmU,12203 -objc/_convenience_nsarray.pyc,, -objc/_convenience_nsdata.py,sha256=iV_Wj_YwT_Sdy65SwEpA2wbTSJ_DhzkHIVHG2CHPICw,2315 -objc/_convenience_nsdata.pyc,, -objc/_convenience_nsdecimal.py,sha256=AmCbiXEJzTO1twXTFLkdjPAhy_6Z0hetX8Gmr8q-QC8,3754 -objc/_convenience_nsdecimal.pyc,, -objc/_convenience_nsdictionary.py,sha256=Y7HO4Bi3eySjO6KZRAoH4OlBJJJzGpx3Lb23ptr68Qc,11360 -objc/_convenience_nsdictionary.pyc,, -objc/_convenience_nsobject.py,sha256=v7FURPmuSrpl0zKJTHBaxBwYAPTDEmPZBjFLIQBMC4I,3493 -objc/_convenience_nsobject.pyc,, -objc/_convenience_nsset.py,sha256=wSElB1_cZM_r2j6T91vRyVZLEHq3avy9JCNgsOn2oZQ,9010 -objc/_convenience_nsset.pyc,, -objc/_convenience_nsstring.py,sha256=wwtke7XWET-FTfOc6Z2Vf264HVnKSV4SijEhGHtMJVU,556 -objc/_convenience_nsstring.pyc,, -objc/_convenience_sequence.py,sha256=TWqOmG-7I_vllaIiPURd2qr25jxWNwC7z8l9xjDFKyk,1278 -objc/_convenience_sequence.pyc,, -objc/_descriptors.py,sha256=jRjbqwZkbB0zoTFHO3dXQVoI6OIJEj0UYMWy2j6951w,11076 -objc/_descriptors.pyc,, -objc/_dyld.py,sha256=LIzMfaq9DVuG-fOLx0E8qehTWgjvIWXP9lEjDSZmkHY,3905 -objc/_dyld.pyc,, -objc/_framework.py,sha256=1dgGcoq_9MNpa7hV_pXux6F09N5A7G8hTzopWRJsYPE,632 -objc/_framework.pyc,, -objc/_lazyimport.py,sha256=U8Rborns_j16ro-EnxDkdNlsqECCEIzrahXkR5d_b5c,16151 -objc/_lazyimport.pyc,, -objc/_locking.py,sha256=02klnYCfZv52kQ-8-5Q6CvYu7KdHXqYgoB6nL9IkN14,835 -objc/_locking.pyc,, -objc/_machsignals.so,sha256=ubWQmsXdAxzeg2eS0eHc6NcV5qPsPnxxu0sIWXGx2-Q,11580 -objc/_objc.so,sha256=BQVvBeqotfscUopLV3n75KcPlzb-Cyy42OAuBjg-tvI,672072 -objc/_properties.py,sha256=3yDmanAfiEGWv1mLrgBrasfnPkueywNR7LNWswQr3p8,35598 -objc/_properties.pyc,, -objc/_protocols.py,sha256=MKL5CWH9Xbvp1D4CxL06O5Urq5REcHbGAhlbWru-DnM,1014 -objc/_protocols.pyc,, -objc/_pycoder.py,sha256=bo8w98Jh_B0FfhRFvIP3PpPHnj33ZCGuQtQmgqFcJW4,19125 -objc/_pycoder.pyc,, -objc/_pythonify.py,sha256=ExW4ameTDLWmaMVdzGootq41a8mrOYOJi5WAVC4GR7w,2755 -objc/_pythonify.pyc,, -pyobjc_core-5.2-py2.7-nspkg.pth,sha256=EWk3VWxBM0YtSF753JAPIh3HBrySs8L9P44-sx0lzZE,564 -pyobjc_core-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_core-5.2.dist-info/METADATA,sha256=rFOldl91mnUSnChfLA5_CMshh-qLCdXowb6F5-sx_7s,2410 -pyobjc_core-5.2.dist-info/RECORD,, -pyobjc_core-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_core-5.2.dist-info/include/pyobjc-api.h,sha256=kGLZFJSY2v62v3YcQM28rqRkJNv2llMFb6IHoJ8AZWU,7503 -pyobjc_core-5.2.dist-info/include/pyobjc-compat.h,sha256=wydzbdSsZYnWsjk5y7fxvg6mSpWVwBg9rgSH29_nRBo,17004 -pyobjc_core-5.2.dist-info/namespace_packages.txt,sha256=NRlruBeqb3C4r0mY4rn-7TQyXoY1yVomhiiauw-3Q_0,12 -pyobjc_core-5.2.dist-info/top_level.txt,sha256=WvGRTfxcLxJwiDngEugYJMKcWmgeAcvltGBnm99YGfc,28 diff --git a/env/lib/python2.7/site-packages/pyobjc_core-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_core-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_core-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_core-5.2.dist-info/include/pyobjc-api.h b/env/lib/python2.7/site-packages/pyobjc_core-5.2.dist-info/include/pyobjc-api.h deleted file mode 100644 index 9271668d..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_core-5.2.dist-info/include/pyobjc-api.h +++ /dev/null @@ -1,215 +0,0 @@ -#ifndef PyObjC_API_H -#define PyObjC_API_H - -/* - * Use this in helper modules for the objc package, and in wrappers - * for functions that deal with objective-C objects/classes - * - * This header defines some utility wrappers for importing and using - * the core bridge. - * - * This is the *only* header file that should be used to access - * functionality in the core bridge. - * - * WARNING: this file is not part of the public interface of PyObjC and - * might change or be removed without warning or regard for backward - * compatibility. - */ - -#include <objc/objc.h> - -#import <Foundation/Foundation.h> - -#include "pyobjc-compat.h" - -#include <objc/objc-runtime.h> - -/* Current API version, increase whenever: - * - Semantics of current functions change - * - Functions are removed - * - * Do not increase when adding a new function, the struct_len field - * can be used for detecting if a function has been added. - */ -#define PYOBJC_API_VERSION 20 - -#define PYOBJC_API_NAME "__C_API__" - -/* - * Only add items to the end of this list! - */ -typedef int (RegisterMethodMappingFunctionType)( - Class, - SEL, - PyObject *(*)(PyObject*, PyObject*, PyObject*), - void (*)(void*, void*, void**, void*)); - -struct pyobjc_api { - int api_version; /* API version */ - size_t struct_len; /* Length of this struct */ - RegisterMethodMappingFunctionType *register_method_mapping; - id (*obj_get_object)(PyObject*); - Class (*cls_get_class)(PyObject*); - id (*python_to_id)(PyObject*); - PyObject* (*id_to_python)(id); - void (*err_objc_to_python)(NSException*); - int (*py_to_objc)(const char*, PyObject*, void*); - PyObject* (*objc_to_py)(const char*, void*); - Py_ssize_t (*sizeof_type)(const char*); - Class (*sel_get_class)(PyObject* sel); - SEL (*sel_get_sel)(PyObject* sel); - void (*fill_super)(struct objc_super*, Class, id); - void (*fill_super_cls)(struct objc_super*, Class, Class); - int (*register_pointer_wrapper)( - const char*, const char*, PyObject* (*pythonify)(void*), - int (*depythonify)(PyObject*, void*) - ); - void (*unsupported_method_imp)(void*, void*, void**, void*); - PyObject* (*unsupported_method_caller)(PyObject*, PyObject*, PyObject*); - void (*err_python_to_objc_gil)(PyGILState_STATE* state); - int (*simplify_sig)(const char* signature, char* buf, size_t buflen); - void (*free_c_array)(int,void*); - int (*py_to_c_array)(BOOL, BOOL, const char*, PyObject*, void**, Py_ssize_t*, PyObject**); - PyObject* (*c_array_to_py)(const char*, void*, Py_ssize_t); - PyTypeObject* imp_type; - IMP (*imp_get_imp)(PyObject*); - SEL (*imp_get_sel)(PyObject*); - PyObject* (*newtransient)(id objc_object, int* cookie); - void (*releasetransient)(PyObject* proxy, int cookie); - PyObject** pyobjc_null; - int (*dep_c_array_count)(const char* type, Py_ssize_t count, BOOL strict, PyObject* value, void* datum); - PyObject* (*varlistnew)(const char* tp, void* array); - int (*pyobjcobject_convert)(PyObject*,void*); - int (*register_id_alias)(const char*, const char*); -}; - -#ifndef PYOBJC_BUILD - -#ifndef PYOBJC_METHOD_STUB_IMPL -static struct pyobjc_api* PyObjC_API; -#endif /* PYOBJC_METHOD_STUB_IMPL */ - -#define PyObjCIMP_Check(obj) PyObject_TypeCheck(obj, PyObjC_API->imp_type) -#define PyObjCObject_GetObject (PyObjC_API->obj_get_object) -#define PyObjCClass_GetClass (PyObjC_API->cls_get_class) -#define PyObjCSelector_GetClass (PyObjC_API->sel_get_class) -#define PyObjCSelector_GetSelector (PyObjC_API->sel_get_sel) -#define PyObjC_PythonToId (PyObjC_API->python_to_id) -#define PyObjC_IdToPython (PyObjC_API->id_to_python) -#define PyObjCErr_FromObjC (PyObjC_API->err_objc_to_python) -#define PyObjCErr_ToObjCWithGILState (PyObjC_API->err_python_to_objc_gil) -#define PyObjC_PythonToObjC (PyObjC_API->py_to_objc) -#define PyObjC_ObjCToPython (PyObjC_API->objc_to_py) -#define PyObjC_RegisterMethodMapping (PyObjC_API->register_method_mapping) -#define PyObjC_InitSuper (PyObjC_API->fill_super) -#define PyObjC_InitSuperCls (PyObjC_API->fill_super_cls) -#define PyObjCPointerWrapper_Register (PyObjC_API->register_pointer_wrapper) -#define PyObjCUnsupportedMethod_IMP (PyObjC_API->unsupported_method_imp) -#define PyObjCUnsupportedMethod_Caller (PyObjC_API->unsupported_method_caller) -#define PyObjCRT_SizeOfType (PyObjC_API->sizeof_type) -#define PyObjC_FreeCArray (PyObjC_API->free_c_array) -#define PyObjC_PythonToCArray (PyObjC_API->py_to_c_array) -#define PyObjC_CArrayToPython (PyObjC_API->c_array_to_py) -#define PyObjCIMP_GetIMP (PyObjC_API->imp_get_imp) -#define PyObjCIMP_GetSelector (PyObjC_API->imp_get_sel) -#define PyObjCObject_NewTransient (PyObjC_API->newtransient) -#define PyObjCObject_ReleaseTransient (PyObjC_API->releasetransient) -#define PyObjC_NULL (*(PyObjC_API->pyobjc_null)) -#define PyObjC_DepythonifyCArray (PyObjC_API->dep_c_array_count) -#define PyObjC_VarList_New (PyObjC_API->varlistnew) -#define PyObjCObject_Convert (PyObjC_API->pyobjcobject_convert) -#define PyObjCPointerWrapper_RegisterID (PyObjC_API->register_id_alias) - -typedef void (*PyObjC_Function_Pointer)(void); -typedef struct PyObjC_function_map { - const char* name; - PyObjC_Function_Pointer function; -} PyObjC_function_map; - - - -#ifndef PYOBJC_METHOD_STUB_IMPL - -static inline PyObject* -PyObjC_CreateInlineTab(PyObjC_function_map* map) -{ -#if PY_MAJOR_VERSION == 2 && PY_MINOR_VERSION < 7 - return PyCObject_FromVoidPtr(map, NULL); -#else - return PyCapsule_New(map, "objc.__inline__", NULL); -#endif -} - -static inline int -PyObjC_ImportAPI(PyObject* calling_module) -{ - PyObject* m; - PyObject* d; - PyObject* api_obj; -#if PY_MAJOR_VERSION == 2 - PyObject* name = PyString_FromString("objc"); -#else - PyObject* name = PyUnicode_FromString("objc"); -#endif - - m = PyImport_Import(name); - Py_DECREF(name); - if (m == NULL) { - return -1; - } - - d = PyModule_GetDict(m); - if (d == NULL) { - PyErr_SetString(PyExc_RuntimeError, - "No dict in objc module"); - return -1; - } - - api_obj = PyDict_GetItemString(d, PYOBJC_API_NAME); - if (api_obj == NULL) { - PyErr_SetString(PyExc_RuntimeError, - "No C_API in objc module"); - return -1; - } -#if PY_MAJOR_VERSION == 2 && PY_VERSION_MAJOR < 7 - PyObjC_API = (struct pyobjc_api *)PyCObject_AsVoidPtr(api_obj); -#else - PyObjC_API = (struct pyobjc_api *)PyCapsule_GetPointer(api_obj, "objc." PYOBJC_API_NAME); -#endif - if (PyObjC_API == NULL) { - return 0; - } - if (PyObjC_API->api_version != PYOBJC_API_VERSION) { - PyErr_Format(PyExc_RuntimeError, - "Wrong version of PyObjC C API (got %d, expected %d)", - (int)PyObjC_API->api_version, (int)PYOBJC_API_VERSION); - return -1; - } - - if (PyObjC_API->struct_len < sizeof(struct pyobjc_api)) { - PyErr_Format(PyExc_RuntimeError, - "Wrong struct-size of PyObjC C API (got %d, expected %d)", - (int)PyObjC_API->struct_len, (int)sizeof(struct pyobjc_api)); - return -1; - } - - Py_INCREF(api_obj); - - /* Current pyobjc implementation doesn't allow deregistering - * information, avoid unloading of users of the C-API. - * (Yes this is ugle, patches to fix this situation are apriciated) - */ - Py_INCREF(calling_module); - - return 0; -} -#endif /* PYOBJC_METHOD_STUB_IMPL */ - -#else /* PyObjC_BUILD */ - -extern struct pyobjc_api objc_api; -extern int PyObjCAPI_Register(PyObject* module); - -#endif /* !PYOBJC_BUILD */ - -#endif /* PyObjC_API_H */ diff --git a/env/lib/python2.7/site-packages/pyobjc_core-5.2.dist-info/include/pyobjc-compat.h b/env/lib/python2.7/site-packages/pyobjc_core-5.2.dist-info/include/pyobjc-compat.h deleted file mode 100644 index 155d713e..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_core-5.2.dist-info/include/pyobjc-compat.h +++ /dev/null @@ -1,647 +0,0 @@ -#ifndef PyObjC_COMPAT_H -#define PyObjC_COMPAT_H - -/* - * - * Start of compiler definitions - * - */ -#ifndef __has_feature -# define __has_feature(x) 0 -#endif -#ifndef __has_extension -# define __has_extension(x) __has_feature(x) -#endif - -#if __has_extension(c_static_assert) -# define STATIC_ASSERT(test, message) _Static_assert(test, message) -#else -# define STATIC_ASSERT(test, message) switch(0){ case 0: case test:;} -#endif - -#if !__has_feature(objc_instancetype) -# define instancetype id -#endif - -/* - * - * Start of Cocoa definitions - * - * - * Ensure that CGFloat and NSInteger are always - * available, even when compiling with ancient SDKs. - * - * Also ensure that MAC_OS_X_VERSION_... macros are available - * for all existing OSX versions. - */ -#ifndef CGFLOAT_DEFINED - -#ifdef __LP64__ -# error "Huh? 64-bit but no CFFloat available???" -#endif - -typedef float CGFloat; -#define CGFLOAT_MIN FLT_MIN -#define CGFLOAT_MAX FLT_MAX -#define CGFLOAT_IS_DOUBLE 0 -#define CGFLOAT_DEFINED - -#endif /* CGFLOAT_DEFINED */ - -#ifndef NSINTEGER_DEFINED - -#ifdef __LP64__ -# error "Huh? 64-bit but no NSINTEGER available???" -#endif - -typedef int NSInteger; -typedef unsigned int NSUInteger; - -#define NSIntegerMax LONG_MAX -#define NSIntegerMin LONG_MIN -#define NSUIntegerMax ULONG_MAX - -#define NSINTEGER_DEFINED - -#endif - -/* On 10.1 there are no defines for the OS version. */ -#ifndef MAC_OS_X_VERSION_10_1 -#define MAC_OS_X_VERSION_10_1 1010 -#define MAC_OS_X_VERSION_MAX_ALLOWED MAC_OS_X_VERSION_10_1 - -#error "MAC_OS_X_VERSION_10_1 not defined. You aren't running 10.1 are you?" - -#endif - - -#ifndef MAC_OS_X_VERSION_10_2 -#define MAC_OS_X_VERSION_10_2 1020 -#endif - -#ifndef MAC_OS_X_VERSION_10_3 -#define MAC_OS_X_VERSION_10_3 1030 -#endif - -#ifndef MAC_OS_X_VERSION_10_4 -#define MAC_OS_X_VERSION_10_4 1040 -#endif - -#ifndef MAC_OS_X_VERSION_10_5 -#define MAC_OS_X_VERSION_10_5 1050 -#endif - -#ifndef MAC_OS_X_VERSION_10_6 -#define MAC_OS_X_VERSION_10_6 1060 -#endif - -#ifndef MAC_OS_X_VERSION_10_7 -#define MAC_OS_X_VERSION_10_7 1070 -#endif - -#ifndef MAC_OS_X_VERSION_10_8 -#define MAC_OS_X_VERSION_10_8 1080 -#endif - -#ifndef MAC_OS_X_VERSION_10_9 -#define MAC_OS_X_VERSION_10_9 1090 -#endif - -#ifndef MAC_OS_X_VERSION_10_10 -#define MAC_OS_X_VERSION_10_10 101000 -#endif - -#ifndef MAC_OS_X_VERSION_10_10_2 -#define MAC_OS_X_VERSION_10_10_2 101002 -#endif - -#ifndef MAC_OS_X_VERSION_10_10_3 -#define MAC_OS_X_VERSION_10_10_3 101003 -#endif - -#ifndef MAC_OS_X_VERSION_10_11 -#define MAC_OS_X_VERSION_10_11 101100 -#endif - -#ifndef MAC_OS_X_VERSION_10_11_1 -#define MAC_OS_X_VERSION_10_11_1 101101 -#endif - -#ifndef MAC_OS_X_VERSION_10_11_2 -#define MAC_OS_X_VERSION_10_11_2 101102 -#endif - -#ifndef MAC_OS_X_VERSION_10_11_3 -#define MAC_OS_X_VERSION_10_11_3 101103 -#endif - -#ifndef MAC_OS_X_VERSION_10_11_4 -#define MAC_OS_X_VERSION_10_11_4 101104 -#endif - -#ifndef MAC_OS_X_VERSION_10_12 -#define MAC_OS_X_VERSION_10_12 101200 -#endif - -#ifndef MAC_OS_X_VERSION_10_12_1 -#define MAC_OS_X_VERSION_10_12_1 101201 -#endif - -#ifndef MAC_OS_X_VERSION_10_12_2 -#define MAC_OS_X_VERSION_10_12_2 101202 -#endif - -#ifndef MAC_OS_X_VERSION_10_12_4 -#define MAC_OS_X_VERSION_10_12_4 101204 -#endif - -#ifndef MAC_OS_X_VERSION_10_13 -#define MAC_OS_X_VERSION_10_13 101300 -#endif - -#ifndef MAC_OS_X_VERSION_10_13_1 -#define MAC_OS_X_VERSION_10_13_1 101301 -#endif - -#ifndef MAC_OS_X_VERSION_10_13_2 -#define MAC_OS_X_VERSION_10_13_2 101302 -#endif - -#ifndef MAC_OS_X_VERSION_10_13_3 -#define MAC_OS_X_VERSION_10_13_3 101303 -#endif - -#ifndef MAC_OS_X_VERSION_10_13_4 -#define MAC_OS_X_VERSION_10_13_4 101304 -#endif - -#ifndef MAC_OS_X_VERSION_10_13_5 -#define MAC_OS_X_VERSION_10_13_5 101305 -#endif - -#ifndef MAC_OS_X_VERSION_10_13_6 -#define MAC_OS_X_VERSION_10_13_6 101306 -#endif - -#ifndef MAC_OS_X_VERSION_10_14 -#define MAC_OS_X_VERSION_10_14 101400 -#endif - -#ifndef MAC_OS_X_VERSION_10_14_1 -#define MAC_OS_X_VERSION_10_14_1 101401 -#endif - -#ifndef MAC_OS_X_VERSION_10_14_2 -#define MAC_OS_X_VERSION_10_14_2 101402 -#endif - -#ifndef MAC_OS_X_VERSION_10_14_3 -#define MAC_OS_X_VERSION_10_14_3 101403 -#endif - -#ifndef MAC_OS_X_VERSION_10_14_4 -#define MAC_OS_X_VERSION_10_14_4 101404 -#endif - -#if PyObjC_BUILD_RELEASE <= 1005 - -/* On MacOS X, +signatureWithObjCTypes: is a method of NSMethodSignature, - * but that method is not present in the header files until Mac OS X 10.5. - * - * Add a definition of the method when compiling on ancient OSX releases - * to ensure that the code gets compiled without warnings. - */ -@interface NSMethodSignature (WarningKiller) - +(instancetype)signatureWithObjCTypes:(const char*)types; - @end /* interface NSMethodSignature */ - -#endif /* PyObjC_BUILD_RELEASE <= 1005 */ - -/* - * Explicit support for weak-linking functions - * - * For some reason implicit weak-linking using '#pragma weak' and - * '__attribute__((__weak__))' doesn't work (at least of some functions) - * when building on 10.8 and deploying to * 10.5) - * - * The code below introduces infrastructure that makes it fairly - * painless to do weak-linking anyway. - * - * Usage for function CFArrayCreate: - * * Use 'WEAK_LINKED_NAME(CFArrayCreate)' at the start of a wrapper module - * * Use 'USE(CFArrayCreate)' to actually call the function, don't use the - * actual function. - * * Use 'CHECK_WEAK_LINK(module, CFArrayCreate)' in the module init function, - * this will remove "CFArrayCreate" from the module dictionary when the function - * cannot by found by dlsym. - * * All access to function should be done through weak-refs like this. - * - * NOTE: When the version that introduced the function is known, that version number - * can be appended to the macros and the function will be hard-linked when - * the minimal deployment target is high enough. - */ -#include <dlfcn.h> - -#define WEAK_LINKED_NAME(NAME) static __typeof__(&NAME) ptr_ ## NAME; -#define USE(NAME) ptr_ ## NAME -#define CHECK_WEAK_LINK(module, NAME) \ - do { \ - void* dl = dlopen(NULL, RTLD_GLOBAL); \ - ptr_ ## NAME = dlsym(dl, PyObjC_STR(NAME)); \ - dlclose(dl); \ - if (ptr_ ## NAME == NULL) { \ - if (PyDict_DelItemString(PyModule_GetDict(module), PyObjC_STR(NAME)) < 0) { \ - PyObjC_INITERROR(); \ - } \ - } \ - } while(0) - -#if MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_5 -#define WEAK_LINKED_NAME_10_5(NAME) -#define USE_10_5(NAME) NAME -#define CHECK_WEAK_LINK_10_5(module, NAME) do {} while(0) -#else -#define WEAK_LINKED_NAME_10_5(NAME) WEAK_LINKED_NAME(NAME) -#define USE_10_5(NAME) USE(NAME) -#define CHECK_WEAK_LINK_10_5(module, NAME) CHECK_WEAK_LINK(module, NAME) -#endif - -#if MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_6 -#define WEAK_LINKED_NAME_10_6(NAME) -#define USE_10_6(NAME) NAME -#define CHECK_WEAK_LINK_10_6(module, NAME) do {} while(0) -#else -#define WEAK_LINKED_NAME_10_6(NAME) WEAK_LINKED_NAME(NAME) -#define USE_10_6(NAME) USE(NAME) -#define CHECK_WEAK_LINK_10_6(module, NAME) CHECK_WEAK_LINK(module, NAME) -#endif - -#if MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_7 -#define WEAK_LINKED_NAME_10_7(NAME) -#define USE_10_7(NAME) NAME -#define CHECK_WEAK_LINK_10_7(module, NAME) do {} while(0) -#else -#define WEAK_LINKED_NAME_10_7(NAME) WEAK_LINKED_NAME(NAME) -#define USE_10_7(NAME) USE(NAME) -#define CHECK_WEAK_LINK_10_7(module, NAME) CHECK_WEAK_LINK(module, NAME) -#endif - -#if MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_8 -#define WEAK_LINKED_NAME_10_8(NAME) -#define USE_10_8(NAME) NAME -#define CHECK_WEAK_LINK_10_8(module, NAME) do {} while(0) -#else -#define WEAK_LINKED_NAME_10_8(NAME) WEAK_LINKED_NAME(NAME) -#define USE_10_8(NAME) USE(NAME) -#define CHECK_WEAK_LINK_10_8(module, NAME) CHECK_WEAK_LINK(module, NAME) -#endif - -#if MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_9 -#define WEAK_LINKED_NAME_10_9(NAME) -#define USE_10_9(NAME) NAME -#define CHECK_WEAK_LINK_10_9(module, NAME) do {} while(0) -#else -#define WEAK_LINKED_NAME_10_9(NAME) WEAK_LINKED_NAME(NAME) -#define USE_10_9(NAME) USE(NAME) -#define CHECK_WEAK_LINK_10_9(module, NAME) CHECK_WEAK_LINK(module, NAME) -#endif - -#if MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_10 -#define WEAK_LINKED_NAME_10_10(NAME) -#define USE_10_10(NAME) NAME -#define CHECK_WEAK_LINK_10_10(module, NAME) do {} while(0) -#else -#define WEAK_LINKED_NAME_10_10(NAME) WEAK_LINKED_NAME(NAME) -#define USE_10_10(NAME) USE(NAME) -#define CHECK_WEAK_LINK_10_10(module, NAME) CHECK_WEAK_LINK(module, NAME) -#endif - -/* - * - * End of Cocoa definitions - * - */ - - -/* - * - * Start of compiler support helpers - * - */ - -#ifdef __GNUC__ -#define unlikely(x) __builtin_expect (!!(x), 0) -#define likely(x) __builtin_expect (!!(x), 1) -#else -#define likely(x) x -#define likely(x) x -#endif - - - - -/* On some versions of GCC <limits.h> defines LONG_LONG_MAX but not LLONG_MAX, compensate. */ -#ifndef LLONG_MIN -#ifdef LONG_LONG_MIN -#define LLONG_MIN LONG_LONG_MIN -#define LLONG_MAX LONG_LONG_MAX -#define ULLONG_MAX ULONG_LONG_MAX -#endif -#endif - -/* - * - * End of compiler support helpers - * - */ - - -#if __LP64__ -#define Py_ARG_NSInteger "l" -#define Py_ARG_NSUInteger "k" -#else -#define Py_ARG_NSInteger "i" -#define Py_ARG_NSUInteger "I" -#endif - - -/* - * - * Python version compatibility - * - */ - - -#define PyObjC__STR(x) #x -#define PyObjC_STR(x) PyObjC__STR(x) - - -/* Use CLINIC_SEP between the prototype and - * description in doc strings, to get clean - * docstrings. - */ -#if PY_VERSION_HEX >= 0x03040000 - -# define CLINIC_SEP "--\n" - -#else - -# define CLINIC_SEP "" - -#endif - -/* Define PyObjC_UNICODE_FAST_PATH when - * 1) We're before Python 3.3, and - * 2) Py_UNICODE has the same size as unichar - * - * Python 3.3 has an optimized representation that - * makes it impossible (and unnecessary) to use the - * "fast path" - */ -#if PY_VERSION_HEX >= 0x03030000 - -#undef PyObjC_UNICODE_FAST_PATH - -#elif Py_UNICODE_SIZE == 2 - -#define PyObjC_UNICODE_FAST_PATH - -#endif - -#if PY_MAJOR_VERSION == 2 - - typedef long Py_hash_t; - -# ifndef Py_ARG_BYTES -# define Py_ARG_BYTES "z" -# endif - - /* Cast a PyObject* to the type expected by the 2.x C API. - * This is a macro because the cast is not necessary for the 3.x C API) - */ -# define UNICODE_CAST(item) ((PyUnicodeObject*)(item)) -# define SLICE_CAST(item) ((PySliceObject*)(item)) - -# define Py_REFCNT(ob) (((PyObject*)(ob))->ob_refcnt) -# define Py_TYPE(ob) (((PyObject*)(ob))->ob_type) -# define Py_SIZE(ob) (((PyVarObject*)(ob))->ob_size) - - - /* Source-level backward compatibility: use PyCapsule API in sources, fall back to - * PyCObject when needed. - */ -# if PY_MINOR_VERSION < 7 -# define PyCapsule_New(pointer, name, destructor) PyCObject_FromVoidPtr(pointer, destructor) -# define PyCapsule_GetPointer(object, name) PyCObject_AsVoidPtr(object) -# define PyCapsule_CheckExact(object) PyCObject_Check(object) -# endif /* Python < 2.7 */ - -#ifdef OBJC_VERSION - -# ifdef PyErr_Format -# undef PyErr_Format -# endif - -# define PyErr_Format PyObjCErr_Format -#endif - - extern PyObject* PyObjCErr_Format(PyObject* exception, const char* format, ...); - - -# define PyText_Check PyString_Check -# define PyText_FromFormat PyString_FromFormat -# define PyText_FromString PyString_FromString -# define PyText_FromStringAndSize PyString_FromStringAndSize -# define PyText_InternFromString PyString_InternFromString -# define PyText_InternInPlace PyString_InternInPlace -# define PyText_Append PyString_ConcatAndDel -# define PyText_AsString PyString_AsString - -# ifndef PyBytes_FromString -# define PyBytes_AsString PyString_AsString -# define PyBytes_Size PyString_Size -# define PyBytes_FromString PyString_FromString -# define PyBytes_FromStringAndSize PyString_FromStringAndSize -# define PyBytes_AS_STRING PyString_AS_STRING -# define PyBytes_GET_SIZE PyString_GET_SIZE -# endif /* !PyBytes_FromString */ - -# define PyBytes_InternFromString PyString_InternFromString -# define PyBytes_InternFromStringAndSize PyObjCString_InternFromStringAndSize - - extern PyObject* PyObjCString_InternFromStringAndSize(const char* v, Py_ssize_t l); - -# define PyObjC_INITERROR() return -# define PyObjC_INITDONE() return - -# define PyObjC_MODULE_INIT(name) \ - void init##name(void); \ - void __attribute__ ((__visibility__ ("default"))) init##name(void) - -# define PyObjC_MODULE_CREATE(name) \ - Py_InitModule4(PyObjC_STR(name), mod_methods, \ - NULL, NULL, PYTHON_API_VERSION); - - -# else /* Py_MAJOR_VERSION == 3 */ - -# ifndef Py_ARG_BYTES -# define Py_ARG_BYTES "y" -# endif - -# define UNICODE_CAST(item) (item) -# define SLICE_CAST(item) (item) - - -# define PyText_Check PyUnicode_Check -# define PyText_FromFormat PyUnicode_FromFormat -# define PyText_FromString PyUnicode_FromString -# define PyText_FromStringAndSize PyUnicode_FromStringAndSize -# define PyText_InternFromString PyUnicode_InternFromString -# define PyText_InternInPlace PyUnicode_InternInPlace -# define PyText_Append PyUnicode_Append -# define PyText_AsString _PyUnicode_AsString - -# define PyInt_FromLong PyLong_FromLong -# define PyInt_FromString PyLong_FromString - - extern int PyObject_Cmp(PyObject *o1, PyObject *o2, int *result); - extern PyObject* PyBytes_InternFromString(const char* v); - extern PyObject* PyBytes_InternFromStringAndSize(const char* v, Py_ssize_t l); - -# define PyObjC_INITERROR() return NULL -# define PyObjC_INITDONE() return m - -# define PyObjC_MODULE_INIT(name) \ - static struct PyModuleDef mod_module = { \ - PyModuleDef_HEAD_INIT, \ - PyObjC_STR(name), \ - NULL, \ - 0, \ - mod_methods, \ - NULL, \ - NULL, \ - NULL, \ - NULL \ - }; \ - \ - PyObject* PyInit_##name(void); \ - PyObject* __attribute__ ((__visibility__ ("default"))) PyInit_##name(void) - -#define PyObjC_MODULE_CREATE(name) \ - PyModule_Create(&mod_module); - -# if PY_MINOR_VERSION >= 3 - - /* - * A micro optimization: when using Python 3.3 or later it - * is possible to access a 'char*' with an ASCII representation - * of a unicode object without first converting it to a bytes - * string (if the string can be encoded as ASCII in the first - * place. - * - * This slightly reduces the object allocation rate during - * attribute access. - */ - -# define PyObjC_FAST_UNICODE_ASCII 1 - - extern const char* PyObjC_Unicode_Fast_Bytes(PyObject* object); - -# endif /* Python >= 3.3 */ - -#endif /* PY_MAJOR_VERSION == 3 */ - - - - -#ifdef __clang__ - -/* This is a crude hack to disable a otherwise useful warning in the context of - * PyTuple_SET_ITEM, without disabling it everywhere - */ -#pragma clang diagnostic push -#pragma clang diagnostic ignored "-Warray-bounds" -static inline void _PyObjCTuple_SetItem(PyObject* tuple, Py_ssize_t idx, PyObject* value) -{ - PyTuple_SET_ITEM(tuple, idx, value); -} -#undef PyTuple_SET_ITEM -#define PyTuple_SET_ITEM(a, b, c) _PyObjCTuple_SetItem(a, b, c) - -static inline PyObject* _PyObjCTuple_GetItem(PyObject* tuple, Py_ssize_t idx) -{ - return PyTuple_GET_ITEM(tuple, idx); -} -#undef PyTuple_GET_ITEM -#define PyTuple_GET_ITEM(a, b) _PyObjCTuple_GetItem(a, b) - -#pragma clang diagnostic pop - -#endif /* __clang__ */ - - -/* - * - * Helper macros for Cocoa exceptions and the Python GIL - * - */ - -#ifdef NO_OBJC2_RUNTIME - -#define PyObjC_DURING \ - Py_BEGIN_ALLOW_THREADS \ - NS_DURING - -#define PyObjC_HANDLER NS_HANDLER - -#define PyObjC_ENDHANDLER \ - NS_ENDHANDLER \ - Py_END_ALLOW_THREADS - -#else /* !NO_OBJC2_RUNTIME */ - -#define PyObjC_DURING \ - Py_BEGIN_ALLOW_THREADS \ - @try { - -#define PyObjC_HANDLER } @catch(NSObject* _localException) { \ - NSException* localException __attribute__((__unused__))= (NSException*)_localException; - -#define PyObjC_ENDHANDLER \ - } \ - Py_END_ALLOW_THREADS - -#endif /* !NO_OBJC2_RUNTIME */ - -#define PyObjC_BEGIN_WITH_GIL \ - { \ - PyGILState_STATE _GILState; \ - _GILState = PyGILState_Ensure(); - -#define PyObjC_GIL_FORWARD_EXC() \ - do { \ - PyObjCErr_ToObjCWithGILState(&_GILState); \ - } while (0) - - -#define PyObjC_GIL_RETURN(val) \ - do { \ - PyGILState_Release(_GILState); \ - return (val); \ - } while (0) - -#define PyObjC_GIL_RETURNVOID \ - do { \ - PyGILState_Release(_GILState); \ - return; \ - } while (0) - - -#define PyObjC_END_WITH_GIL \ - PyGILState_Release(_GILState); \ - } - - - -#endif /* PyObjC_COMPAT_H */ diff --git a/env/lib/python2.7/site-packages/pyobjc_core-5.2.dist-info/namespace_packages.txt b/env/lib/python2.7/site-packages/pyobjc_core-5.2.dist-info/namespace_packages.txt deleted file mode 100644 index a79bb16a..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_core-5.2.dist-info/namespace_packages.txt +++ /dev/null @@ -1 +0,0 @@ -PyObjCTools diff --git a/env/lib/python2.7/site-packages/pyobjc_core-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_core-5.2.dist-info/top_level.txt deleted file mode 100644 index 8350ecbf..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_core-5.2.dist-info/top_level.txt +++ /dev/null @@ -1,3 +0,0 @@ -PyObjCTest -PyObjCTools -objc diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_AVFoundation-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_AVFoundation-5.2.dist-info/METADATA deleted file mode 100644 index a9eda8fe..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_AVFoundation-5.2.dist-info/METADATA +++ /dev/null @@ -1,55 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-AVFoundation -Version: 5.2 -Summary: Wrappers for the framework AVFoundation on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,AVFoundation -Platform: MacOS X (>=10.7) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) -Requires-Dist: pyobjc-framework-Quartz (>=5.2) - -Wrappers for the "AVFoundation" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_AVFoundation-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_AVFoundation-5.2.dist-info/RECORD deleted file mode 100644 index f52a6577..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_AVFoundation-5.2.dist-info/RECORD +++ /dev/null @@ -1,11 +0,0 @@ -AVFoundation/_AVFoundation.so,sha256=NQgJGgQS4h6nCUBFLnp6B_cx1dP_-sj4-birPM9EIxE,24348 -AVFoundation/__init__.py,sha256=wiQEp4vo9Ng1wPWCTlNv0N43CGf46efPCKXa5MRmK9c,811 -AVFoundation/__init__.pyc,, -AVFoundation/_inlines.so,sha256=q1hT4GI4mGCvJCGlBQWspfylZbeF1405csVRAmZtIC4,10184 -AVFoundation/_metadata.py,sha256=knUOMxnPHhuoXBnv6kKjSWoz-VlwFl9Jf3KSJkN50es,137998 -AVFoundation/_metadata.pyc,, -pyobjc_framework_AVFoundation-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_AVFoundation-5.2.dist-info/METADATA,sha256=BkxPIHEY54lNe0nP_ei2YQP19qiWtiyN9paayqCoOhQ,2203 -pyobjc_framework_AVFoundation-5.2.dist-info/RECORD,, -pyobjc_framework_AVFoundation-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_AVFoundation-5.2.dist-info/top_level.txt,sha256=YbV2KFCPDIX3Xp23u1Vp0XUErBiGdM0sxUVZNrQDaKU,13 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_AVFoundation-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_AVFoundation-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_AVFoundation-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_AVFoundation-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_AVFoundation-5.2.dist-info/top_level.txt deleted file mode 100644 index 39258a5a..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_AVFoundation-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -AVFoundation diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_AVKit-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_AVKit-5.2.dist-info/METADATA deleted file mode 100644 index c77db302..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_AVKit-5.2.dist-info/METADATA +++ /dev/null @@ -1,55 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-AVKit -Version: 5.2 -Summary: Wrappers for the framework AVKit on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,AVKit -Platform: MacOS X (>=10.9) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) -Requires-Dist: pyobjc-framework-Quartz (>=5.2) - -Wrappers for the "AVKit" framework on macOS introduced in macOS 10.9. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_AVKit-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_AVKit-5.2.dist-info/RECORD deleted file mode 100644 index 0498e9c0..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_AVKit-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -AVKit/_AVKit.so,sha256=fJ9fttyXNo4DFbWvodICHgecvsRAbLXSqorT1U1oFeA,9660 -AVKit/__init__.py,sha256=Y_PO3tKET4AaDAkmCB9N1cdg7A4Q7fuVuALe_5KUR6w,720 -AVKit/__init__.pyc,, -AVKit/_metadata.py,sha256=AaxkkO0C9dY1nae7LRaY9csMdsZprND3UHF-GlPF_ZA,2741 -AVKit/_metadata.pyc,, -pyobjc_framework_AVKit-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_AVKit-5.2.dist-info/METADATA,sha256=-snXCmSD0Qo_xwW5ofcJwf2xxBbatTpPmNHD4JcYkAg,2200 -pyobjc_framework_AVKit-5.2.dist-info/RECORD,, -pyobjc_framework_AVKit-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_AVKit-5.2.dist-info/top_level.txt,sha256=Ibd3kHY5SbEGLEyGZvzYljutRe3juSkcB4yN0VH4gHY,6 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_AVKit-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_AVKit-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_AVKit-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_AVKit-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_AVKit-5.2.dist-info/top_level.txt deleted file mode 100644 index 5f73d801..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_AVKit-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -AVKit diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Accounts-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_Accounts-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Accounts-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Accounts-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_Accounts-5.2.dist-info/METADATA deleted file mode 100644 index bd52ba7c..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Accounts-5.2.dist-info/METADATA +++ /dev/null @@ -1,55 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-Accounts -Version: 5.2 -Summary: Wrappers for the framework Accounts on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,Accounts -Platform: MacOS X (>=10.8) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - - -Wrappers for the "Accounts" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Accounts-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_Accounts-5.2.dist-info/RECORD deleted file mode 100644 index 32ecebf0..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Accounts-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -Accounts/__init__.py,sha256=c4bNOVy1cAEQ80l0CmuYgM57-mFGKpkuz-g34cI7Qb4,684 -Accounts/__init__.pyc,, -Accounts/_metadata.py,sha256=zgJ5C4BkddWxJkB1CTtjacB8WlokPILogt41aU8Cals,3065 -Accounts/_metadata.pyc,, -pyobjc_framework_Accounts-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_Accounts-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_Accounts-5.2.dist-info/METADATA,sha256=HlnSrTZPCnC0BP4S6yp4H28jVSdLMkvltgAVdyQkJ04,2141 -pyobjc_framework_Accounts-5.2.dist-info/RECORD,, -pyobjc_framework_Accounts-5.2.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -pyobjc_framework_Accounts-5.2.dist-info/top_level.txt,sha256=JoXS0owKhVzVc7ED3tQ6xOI52PjlVXbT7CdsMDR5kWg,9 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Accounts-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_Accounts-5.2.dist-info/WHEEL deleted file mode 100644 index c8240f03..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Accounts-5.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Accounts-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_Accounts-5.2.dist-info/top_level.txt deleted file mode 100644 index 29b85526..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Accounts-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -Accounts diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_AddressBook-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_AddressBook-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_AddressBook-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_AddressBook-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_AddressBook-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_AddressBook-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_AddressBook-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_AddressBook-5.2.dist-info/METADATA deleted file mode 100644 index 44c82b9b..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_AddressBook-5.2.dist-info/METADATA +++ /dev/null @@ -1,56 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-AddressBook -Version: 5.2 -Summary: Wrappers for the framework AddressBook on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,AddressBook -Platform: MacOS X -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - -Wrappers for the "AddressBook" framework on macOS. The Address Book is -a centralized database for contact and other information for people. Appliations -that make use of the AddressBook framework all use the same database. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_AddressBook-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_AddressBook-5.2.dist-info/RECORD deleted file mode 100644 index e2eecf8e..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_AddressBook-5.2.dist-info/RECORD +++ /dev/null @@ -1,11 +0,0 @@ -AddressBook/_AddressBook.so,sha256=Fq3mhApi44LqhCRGI7BsyDj0rfvqKt-PIUQzneYrbik,9732 -AddressBook/__init__.py,sha256=Repf1GJR0xBzOWNRQVOWShZrcYTIhxafgPaC-1ByATI,802 -AddressBook/__init__.pyc,, -AddressBook/_metadata.py,sha256=4zTNDchAmANUrUCF8OcKFmFSRHwN3TBeYlk-4g7-iAM,21006 -AddressBook/_metadata.pyc,, -pyobjc_framework_AddressBook-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_AddressBook-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_AddressBook-5.2.dist-info/METADATA,sha256=aE-Pt4KTiUuyIuvVHKLx27Ckl6SExcZZTp7BGLIMwn0,2314 -pyobjc_framework_AddressBook-5.2.dist-info/RECORD,, -pyobjc_framework_AddressBook-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_AddressBook-5.2.dist-info/top_level.txt,sha256=lKPtQGbVrqGoHrn2OAoR4LoF6G-B4hYfjJ3HPL-5nS4,12 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_AddressBook-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_AddressBook-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_AddressBook-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_AddressBook-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_AddressBook-5.2.dist-info/top_level.txt deleted file mode 100644 index 5562672e..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_AddressBook-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -AddressBook diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_AppleScriptKit-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_AppleScriptKit-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_AppleScriptKit-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_AppleScriptKit-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_AppleScriptKit-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_AppleScriptKit-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_AppleScriptKit-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_AppleScriptKit-5.2.dist-info/METADATA deleted file mode 100644 index 1b76ba4b..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_AppleScriptKit-5.2.dist-info/METADATA +++ /dev/null @@ -1,55 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-AppleScriptKit -Version: 5.2 -Summary: Wrappers for the framework AppleScriptKit on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,AppleScriptKit -Platform: MacOS X -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - - -Wrappers for framework 'AppleScriptKit'. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_AppleScriptKit-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_AppleScriptKit-5.2.dist-info/RECORD deleted file mode 100644 index 8f1ad9f3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_AppleScriptKit-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -AppleScriptKit/__init__.py,sha256=Wmz5lj-JSTKGOBM4vUmHpvt5SJfXIYt5DBxHVsT6iDw,713 -AppleScriptKit/__init__.pyc,, -AppleScriptKit/_metadata.py,sha256=vI3iZ7Pufw2rTTtBp3Rie0hA4MR-Mz2jtJA92c3nEws,414 -AppleScriptKit/_metadata.pyc,, -pyobjc_framework_AppleScriptKit-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_AppleScriptKit-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_AppleScriptKit-5.2.dist-info/METADATA,sha256=yh62NLY-s4VsKbe3mtUBga8AwFYsKyhqAX4ABCBfydE,2143 -pyobjc_framework_AppleScriptKit-5.2.dist-info/RECORD,, -pyobjc_framework_AppleScriptKit-5.2.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -pyobjc_framework_AppleScriptKit-5.2.dist-info/top_level.txt,sha256=D1VhV4R6PEoKmnYN20Yh1diAYK0QjjeWx7uS3nmmunE,15 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_AppleScriptKit-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_AppleScriptKit-5.2.dist-info/WHEEL deleted file mode 100644 index c8240f03..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_AppleScriptKit-5.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_AppleScriptKit-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_AppleScriptKit-5.2.dist-info/top_level.txt deleted file mode 100644 index 0baa2324..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_AppleScriptKit-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -AppleScriptKit diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_AppleScriptObjC-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_AppleScriptObjC-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_AppleScriptObjC-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_AppleScriptObjC-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_AppleScriptObjC-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_AppleScriptObjC-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_AppleScriptObjC-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_AppleScriptObjC-5.2.dist-info/METADATA deleted file mode 100644 index fbccfba1..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_AppleScriptObjC-5.2.dist-info/METADATA +++ /dev/null @@ -1,57 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-AppleScriptObjC -Version: 5.2 -Summary: Wrappers for the framework AppleScriptObjC on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,AppleScriptObjC -Platform: MacOS X (>=10.6) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - - -Wrappers for framework 'AppleScriptObjC' on macOS 10.6. This framework is -not useful for most users, it provides additional functionality for AppleScript -based application bundles. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_AppleScriptObjC-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_AppleScriptObjC-5.2.dist-info/RECORD deleted file mode 100644 index b652ae1c..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_AppleScriptObjC-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -AppleScriptObjC/__init__.py,sha256=O4ZE0Y06FdX6OTx4nEoEkkU6q2GxMnIcaauccxOAn3s,729 -AppleScriptObjC/__init__.pyc,, -AppleScriptObjC/_metadata.py,sha256=EbqFPD8R_mVl-8B1r999pcYG4jrTjDeKAxHPG1Wd1Ig,414 -AppleScriptObjC/_metadata.pyc,, -pyobjc_framework_AppleScriptObjC-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_AppleScriptObjC-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_AppleScriptObjC-5.2.dist-info/METADATA,sha256=FD-SBherJ7XABW1CCP1yVP2JZxxCbSv2A87Ya6m-LoI,2295 -pyobjc_framework_AppleScriptObjC-5.2.dist-info/RECORD,, -pyobjc_framework_AppleScriptObjC-5.2.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -pyobjc_framework_AppleScriptObjC-5.2.dist-info/top_level.txt,sha256=wf4dg--somhwnWZbAuwM1Q-V3fMAYvCo9nYFz4FZ3Ro,16 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_AppleScriptObjC-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_AppleScriptObjC-5.2.dist-info/WHEEL deleted file mode 100644 index c8240f03..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_AppleScriptObjC-5.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_AppleScriptObjC-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_AppleScriptObjC-5.2.dist-info/top_level.txt deleted file mode 100644 index 42a29a60..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_AppleScriptObjC-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -AppleScriptObjC diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ApplicationServices-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_ApplicationServices-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ApplicationServices-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ApplicationServices-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_ApplicationServices-5.2.dist-info/METADATA deleted file mode 100644 index 8953a2c8..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ApplicationServices-5.2.dist-info/METADATA +++ /dev/null @@ -1,58 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-ApplicationServices -Version: 5.2 -Summary: Wrappers for the framework ApplicationServices on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,ApplicationServices,HIServices,PrintCore -Platform: MacOS X -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) -Requires-Dist: pyobjc-framework-Quartz (>=5.2) - - -Wrappers for the "ApplicationServices" framework on macOS 10.5 or later. Core Text is an -advanced, low-level technology for laying out text and handling fonts. It is -designed for high performance and ease of use. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ApplicationServices-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_ApplicationServices-5.2.dist-info/RECORD deleted file mode 100644 index 08a84a8d..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ApplicationServices-5.2.dist-info/RECORD +++ /dev/null @@ -1,15 +0,0 @@ -ApplicationServices/__init__.py,sha256=a58fQYAO5Maqvl_9WFHHKfsncmnMfojYU32V7tmjJzE,848 -ApplicationServices/__init__.pyc,, -HIServices/__init__.py,sha256=0Mg39vodYAfxiGq8xOrXk9uAurC3hN8VVEKp6XdTqMs,698 -HIServices/__init__.pyc,, -HIServices/_metadata.py,sha256=kmpVYJ_lE81cESyCXCnorINRfVzRbtUv-JaHRlRZsVU,52601 -HIServices/_metadata.pyc,, -PrintCore/__init__.py,sha256=hjoI1cTLKwMz7rNrNfGrrgBPz45ax6mjUJvVARRkqUc,1196 -PrintCore/__init__.pyc,, -PrintCore/_metadata.py,sha256=Gtu4cMGkuRAsjirMLbpu_bIkHFrdPgfhap6IpH4Oo3s,33055 -PrintCore/_metadata.pyc,, -pyobjc_framework_ApplicationServices-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_ApplicationServices-5.2.dist-info/METADATA,sha256=_W4s2hk1S7F1dc9YR6ymQX0jsM0dOPryXUbLrTnVIoo,2398 -pyobjc_framework_ApplicationServices-5.2.dist-info/RECORD,, -pyobjc_framework_ApplicationServices-5.2.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -pyobjc_framework_ApplicationServices-5.2.dist-info/top_level.txt,sha256=AIGBXHJ-VOAo8OSY17mbrfHVoOdFZhs6F_vmGENYrNI,41 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ApplicationServices-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_ApplicationServices-5.2.dist-info/WHEEL deleted file mode 100644 index c8240f03..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ApplicationServices-5.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ApplicationServices-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_ApplicationServices-5.2.dist-info/top_level.txt deleted file mode 100644 index 6c492e7a..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ApplicationServices-5.2.dist-info/top_level.txt +++ /dev/null @@ -1,3 +0,0 @@ -ApplicationServices -HIServices -PrintCore diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Automator-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_Automator-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Automator-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Automator-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_Automator-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Automator-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Automator-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_Automator-5.2.dist-info/METADATA deleted file mode 100644 index 2fc3ccae..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Automator-5.2.dist-info/METADATA +++ /dev/null @@ -1,58 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-Automator -Version: 5.2 -Summary: Wrappers for the framework Automator on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,Automator -Platform: MacOS X -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - - -Wrappers for the "Automator" framework on macOS. The Automator framework -supports the development of actions for the Automator application, as well -as the ability to run a workflow in developer applications. An action is -a bundle that, when loaded and run, performs a specific task. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Automator-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_Automator-5.2.dist-info/RECORD deleted file mode 100644 index 55b7b462..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Automator-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -Automator/__init__.py,sha256=Ry4qZVn0bYcIOLpXOnN59auyoyPxmXghaYZhlVPz1JM,704 -Automator/__init__.pyc,, -Automator/_metadata.py,sha256=PGasBleE5j_JQs1IE-VfsOwqogyZAX4o8x_d-8QUH44,5051 -Automator/_metadata.pyc,, -pyobjc_framework_Automator-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_Automator-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_Automator-5.2.dist-info/METADATA,sha256=hUCJ8oFkZ-Yu8LbXeImaKvAPo9xYSRbLQamkiq1oUHk,2370 -pyobjc_framework_Automator-5.2.dist-info/RECORD,, -pyobjc_framework_Automator-5.2.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -pyobjc_framework_Automator-5.2.dist-info/top_level.txt,sha256=Z8EqY_F6EOikiS0WygbdcnxaNGo-anA0qPt29FzSpTE,10 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Automator-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_Automator-5.2.dist-info/WHEEL deleted file mode 100644 index c8240f03..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Automator-5.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Automator-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_Automator-5.2.dist-info/top_level.txt deleted file mode 100644 index 4934f70e..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Automator-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -Automator diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CFNetwork-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_CFNetwork-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CFNetwork-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CFNetwork-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_CFNetwork-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CFNetwork-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CFNetwork-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_CFNetwork-5.2.dist-info/METADATA deleted file mode 100644 index 04b9d0cb..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CFNetwork-5.2.dist-info/METADATA +++ /dev/null @@ -1,60 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-CFNetwork -Version: 5.2 -Summary: Wrappers for the framework CFNetwork on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,CFNetwork -Platform: MacOS X -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - -PyObjC wrappers for the framework "CFNetwork", part of "CoreServices" on -macOS. - -The CFNetwork framework provides a library of abstractions for networking -protocols. The most interesting bits for Python programmers are the -API's for working with proxy autoconfiguration and the API's for networking -diagnotics. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CFNetwork-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_CFNetwork-5.2.dist-info/RECORD deleted file mode 100644 index e83cccf1..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CFNetwork-5.2.dist-info/RECORD +++ /dev/null @@ -1,11 +0,0 @@ -CFNetwork/__init__.py,sha256=b2rx2GeqcXWjiDYcQyIHQC5unPxwUgbPL792rslUz00,1301 -CFNetwork/__init__.pyc,, -CFNetwork/_manual.so,sha256=rVtg8tfiZz1_aiCOe0WGU1G5mtf56LJzdHpqRBfzzS4,16520 -CFNetwork/_metadata.py,sha256=pBXx6nv7992aNYMGWVYifoWAyJVpXHmTULujkffURPw,26100 -CFNetwork/_metadata.pyc,, -pyobjc_framework_CFNetwork-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_CFNetwork-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_CFNetwork-5.2.dist-info/METADATA,sha256=Nm5G3BGBpYBRTPj1a1Ta58JeHEfhtTvBhbNMngrGAvw,2397 -pyobjc_framework_CFNetwork-5.2.dist-info/RECORD,, -pyobjc_framework_CFNetwork-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_CFNetwork-5.2.dist-info/top_level.txt,sha256=q2Z9JhHHv2Q8zzn9XMN_mb4IrMeGTqi4Ic62XPVuKGA,10 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CFNetwork-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_CFNetwork-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CFNetwork-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CFNetwork-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_CFNetwork-5.2.dist-info/top_level.txt deleted file mode 100644 index 77cedb86..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CFNetwork-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -CFNetwork diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CalendarStore-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_CalendarStore-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CalendarStore-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CalendarStore-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_CalendarStore-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CalendarStore-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CalendarStore-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_CalendarStore-5.2.dist-info/METADATA deleted file mode 100644 index d703783a..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CalendarStore-5.2.dist-info/METADATA +++ /dev/null @@ -1,58 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-CalendarStore -Version: 5.2 -Summary: Wrappers for the framework CalendarStore on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,CalendarStore -Platform: MacOS X (>=10.5) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - - -Wrappers for the "CalendarStore" on macOS 10.5 and later. The CalendarStore -frameworks provides access to the iCal data. It's possible to fetch iCal -records, such as calendars and tasks, as well as modify them and get -notifications when records change in iCal. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CalendarStore-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_CalendarStore-5.2.dist-info/RECORD deleted file mode 100644 index aaedb860..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CalendarStore-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -CalendarStore/__init__.py,sha256=H53zBlkBjHyTrpO5deN7bc7RSoGP-cQDpTKmsm99QUs,709 -CalendarStore/__init__.pyc,, -CalendarStore/_metadata.py,sha256=FGxyENTwW5zNam7KpQ74rNzOQWoBy4_xONoCHiwu9U0,2878 -CalendarStore/_metadata.pyc,, -pyobjc_framework_CalendarStore-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_CalendarStore-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_CalendarStore-5.2.dist-info/METADATA,sha256=etI-RjNwwqBKNCAAXaYH05vVjOQnmS3Exhf_90hKAzk,2369 -pyobjc_framework_CalendarStore-5.2.dist-info/RECORD,, -pyobjc_framework_CalendarStore-5.2.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -pyobjc_framework_CalendarStore-5.2.dist-info/top_level.txt,sha256=dX90QOKdXRGwyUcu7ngoERoRnxgytUfiI3JQKydJZmQ,14 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CalendarStore-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_CalendarStore-5.2.dist-info/WHEEL deleted file mode 100644 index c8240f03..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CalendarStore-5.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CalendarStore-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_CalendarStore-5.2.dist-info/top_level.txt deleted file mode 100644 index 5e1c4db4..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CalendarStore-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -CalendarStore diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CloudKit-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_CloudKit-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CloudKit-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CloudKit-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_CloudKit-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CloudKit-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CloudKit-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_CloudKit-5.2.dist-info/METADATA deleted file mode 100644 index a408b183..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CloudKit-5.2.dist-info/METADATA +++ /dev/null @@ -1,58 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-CloudKit -Version: 5.2 -Summary: Wrappers for the framework CloudKit on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,CloudKit -Platform: MacOS X (>=10.10) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) -Requires-Dist: pyobjc-framework-CoreLocation (>=5.2) -Requires-Dist: pyobjc-framework-CoreData (>=5.2) -Requires-Dist: pyobjc-framework-Accounts (>=5.2) - - -Wrappers for the "CloudKit" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CloudKit-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_CloudKit-5.2.dist-info/RECORD deleted file mode 100644 index e429a28f..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CloudKit-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -CloudKit/__init__.py,sha256=Vv5iH1YrHEaHrf3ulrJ-5hRDlNxSgRJwYTt14UuQqV4,769 -CloudKit/__init__.pyc,, -CloudKit/_metadata.py,sha256=U52-RelfAKn5X0dHLHkWWbuaaORtBOjWo2h1H25UwFE,30435 -CloudKit/_metadata.pyc,, -pyobjc_framework_CloudKit-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_CloudKit-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_CloudKit-5.2.dist-info/METADATA,sha256=wgfnlYRzDPqfnDo7LRPcF0XIV_RIwLygkz0hRICeEL0,2293 -pyobjc_framework_CloudKit-5.2.dist-info/RECORD,, -pyobjc_framework_CloudKit-5.2.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -pyobjc_framework_CloudKit-5.2.dist-info/top_level.txt,sha256=z7XD0iXYs36v7_IFOgKCe9zhkmRayocyyxmErwD6ZwQ,9 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CloudKit-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_CloudKit-5.2.dist-info/WHEEL deleted file mode 100644 index c8240f03..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CloudKit-5.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CloudKit-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_CloudKit-5.2.dist-info/top_level.txt deleted file mode 100644 index c3dbbed3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CloudKit-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -CloudKit diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Cocoa-5.2-py2.7-nspkg.pth b/env/lib/python2.7/site-packages/pyobjc_framework_Cocoa-5.2-py2.7-nspkg.pth deleted file mode 100644 index f97ec6ad..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Cocoa-5.2-py2.7-nspkg.pth +++ /dev/null @@ -1 +0,0 @@ -import sys, types, os;has_mfs = sys.version_info > (3, 5);p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('PyObjCTools',));importlib = has_mfs and __import__('importlib.util');has_mfs and __import__('importlib.machinery');m = has_mfs and sys.modules.setdefault('PyObjCTools', importlib.util.module_from_spec(importlib.machinery.PathFinder.find_spec('PyObjCTools', [os.path.dirname(p)])));m = m or sys.modules.setdefault('PyObjCTools', types.ModuleType('PyObjCTools'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p) diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Cocoa-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_Cocoa-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Cocoa-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Cocoa-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_Cocoa-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Cocoa-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Cocoa-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_Cocoa-5.2.dist-info/METADATA deleted file mode 100644 index e1bde445..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Cocoa-5.2.dist-info/METADATA +++ /dev/null @@ -1,54 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-Cocoa -Version: 5.2 -Summary: Wrappers for the Cocoa frameworks on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,Cocoa,CoreFoundation,Foundation,AppKit -Platform: MacOS X -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) - -Wrappers for the core Cocoa frameworks: CoreFoundation, Foundation and -AppKit. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use these frameworks and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Cocoa-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_Cocoa-5.2.dist-info/RECORD deleted file mode 100644 index ee0e1d49..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Cocoa-5.2.dist-info/RECORD +++ /dev/null @@ -1,48 +0,0 @@ -AppKit/_AppKit.so,sha256=9LYjiypQbqGF8BGbBRYKUy86Z5TCnv-0eg2Cb2lXGfc,159816 -AppKit/__init__.py,sha256=cTNfe8baf_ZcgW_YqKPcau3_kdM7eb9xV7ixOh4TqdY,4665 -AppKit/__init__.pyc,, -AppKit/_inlines.so,sha256=bVJAAInvz11WqwTQcIrVe0C4r9z-Pqc01g97kCHOjhs,9968 -AppKit/_metadata.py,sha256=ymI7eKNIO_40nvubXzyhQFgpewecIGeYddR8ufYklD0,563341 -AppKit/_metadata.pyc,, -AppKit/_nsapp.py,sha256=VJc587FOttSoTud0cABjENbngjSFFFzvbM-a8mrkzQE,659 -AppKit/_nsapp.pyc,, -Cocoa/__init__.py,sha256=apfl4YruzRG1c2V8E_8Fxu-VJNqHuTn_KmaPYLnNzIQ,502 -Cocoa/__init__.pyc,, -CoreFoundation/_CoreFoundation.so,sha256=Kn4QXqF5IEPKsufpd45vAfinRc4boAd7armGXmmvkdg,60268 -CoreFoundation/__init__.py,sha256=mGZaiL3wrhVbtvCrJblnGh5-yFbkyOeO70e7-oIq7wI,1007 -CoreFoundation/__init__.pyc,, -CoreFoundation/_inlines.so,sha256=0VXVW4GSmALMXwKRWaEZtKwhbWjgehsZV2uhU8PzMbA,16192 -CoreFoundation/_metadata.py,sha256=D2YLzfcHO5epxFdUE4AQ6-rJwuY8YIOfojRnnmQCz9g,139497 -CoreFoundation/_metadata.pyc,, -CoreFoundation/_static.py,sha256=TeFpS_6r1exHDSzxv1yyoYGj4Ik_yxNZK3LhlfqtVok,2839 -CoreFoundation/_static.pyc,, -Foundation/_Foundation.so,sha256=0yhGotusn9ULL2cbv1DJmn7Be2szzFLRRfrj7AEfvgA,52480 -Foundation/__init__.py,sha256=LHqv8xB9tBTtdgj-izSOmwO2PaYTWQqIQfFz6IdhMlA,6450 -Foundation/__init__.pyc,, -Foundation/_context.py,sha256=uylUF5MgCE6qgXi4MepEqnITJ0abwC0mAtYraomnyWk,729 -Foundation/_context.pyc,, -Foundation/_functiondefines.py,sha256=ctvz4YZ2HyJh1uFilMugM6uEzQW_mktlH5KeIME_tpU,773 -Foundation/_functiondefines.pyc,, -Foundation/_inlines.so,sha256=asqShb9HLPQsWmUSrTJ05q6jonuvSyR3XhsKToeTQtM,23044 -Foundation/_metadata.py,sha256=5RLLZKOt8c9fWjL3H8AKurU_6GAwOU1VXYtBDY-tHKA,296737 -Foundation/_metadata.pyc,, -Foundation/_nsindexset.py,sha256=jgNN7StsdP8zuhHQjxbbb1EnP7TTthzvBjx9JN5QE8U,396 -Foundation/_nsindexset.pyc,, -Foundation/_nsobject.py,sha256=IUOciW4vyHv9Fdz9FqaZQwcAhvV0nmiZDkIt_z66fYc,8370 -Foundation/_nsobject.pyc,, -PyObjCTools/AppCategories.py,sha256=zhod6dTH43ciegtnQZCcro5eLque7IWQ-sdrr20V3P0,848 -PyObjCTools/AppCategories.pyc,, -PyObjCTools/AppHelper.py,sha256=DeYBLKohGAZeqeL-2SgXhtmFMyS6jM7ab8zFVi2vqrA,9907 -PyObjCTools/AppHelper.pyc,, -PyObjCTools/Conversion.py,sha256=C-5TaxZr4pSAgRhHi0RaoD55k2fJ2pRQ6GUyRbpflQo,7528 -PyObjCTools/Conversion.pyc,, -PyObjCTools/FndCategories.py,sha256=yM1bazKsfECevYezgQd13tgQKikkwYPAOC-guZg1c3Y,1028 -PyObjCTools/FndCategories.pyc,, -pyobjc_framework_Cocoa-5.2-py2.7-nspkg.pth,sha256=EWk3VWxBM0YtSF753JAPIh3HBrySs8L9P44-sx0lzZE,564 -pyobjc_framework_Cocoa-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_Cocoa-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_Cocoa-5.2.dist-info/METADATA,sha256=7uAgK0W-tr7mriw-ioMA23kKC4gR_c71YINxZjh_Wrs,2143 -pyobjc_framework_Cocoa-5.2.dist-info/RECORD,, -pyobjc_framework_Cocoa-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_Cocoa-5.2.dist-info/namespace_packages.txt,sha256=NRlruBeqb3C4r0mY4rn-7TQyXoY1yVomhiiauw-3Q_0,12 -pyobjc_framework_Cocoa-5.2.dist-info/top_level.txt,sha256=1QsnXKsqfT9IZo77EJz5mxcnXKL_5uurtMmFxy_zu8k,62 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Cocoa-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_Cocoa-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Cocoa-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Cocoa-5.2.dist-info/namespace_packages.txt b/env/lib/python2.7/site-packages/pyobjc_framework_Cocoa-5.2.dist-info/namespace_packages.txt deleted file mode 100644 index a79bb16a..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Cocoa-5.2.dist-info/namespace_packages.txt +++ /dev/null @@ -1 +0,0 @@ -PyObjCTools diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Cocoa-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_Cocoa-5.2.dist-info/top_level.txt deleted file mode 100644 index c2fb7459..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Cocoa-5.2.dist-info/top_level.txt +++ /dev/null @@ -1,6 +0,0 @@ -AppKit -Cocoa -CoreFoundation -Foundation -PyObjCTest -PyObjCTools diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Collaboration-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_Collaboration-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Collaboration-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Collaboration-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_Collaboration-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Collaboration-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Collaboration-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_Collaboration-5.2.dist-info/METADATA deleted file mode 100644 index 35c4bc08..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Collaboration-5.2.dist-info/METADATA +++ /dev/null @@ -1,57 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-Collaboration -Version: 5.2 -Summary: Wrappers for the framework Collaboration on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,Collaboration -Platform: MacOS X (>=10.5) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - - -Wrappers for the "Collaboration" framework in macOS 10.5 or later. The -Collaboration framework provides access to identities, and manages -user interface elements for selecting identities. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Collaboration-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_Collaboration-5.2.dist-info/RECORD deleted file mode 100644 index fad421f8..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Collaboration-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -Collaboration/__init__.py,sha256=fYeLuD-ev8BmXa4cIIbb2peti9PiaEv5PL9V_ekmdX4,709 -Collaboration/__init__.pyc,, -Collaboration/_metadata.py,sha256=KJnT3GCZEWMRiBWWUFQo5a2Brc2mbqgssyM738m7FVA,2073 -Collaboration/_metadata.pyc,, -pyobjc_framework_Collaboration-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_Collaboration-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_Collaboration-5.2.dist-info/METADATA,sha256=tyq4zwmpUnUH2B5yTtiOljDgxli1hyIA9fo1UIpnpVY,2296 -pyobjc_framework_Collaboration-5.2.dist-info/RECORD,, -pyobjc_framework_Collaboration-5.2.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -pyobjc_framework_Collaboration-5.2.dist-info/top_level.txt,sha256=3l1JMAAyZ9thzrlKjaFYOL9Ut4mIfao-uxDyMme7GwI,14 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Collaboration-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_Collaboration-5.2.dist-info/WHEEL deleted file mode 100644 index c8240f03..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Collaboration-5.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Collaboration-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_Collaboration-5.2.dist-info/top_level.txt deleted file mode 100644 index 5df6119b..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Collaboration-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -Collaboration diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ColorSync-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_ColorSync-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ColorSync-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ColorSync-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_ColorSync-5.2.dist-info/METADATA deleted file mode 100644 index a79745e2..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ColorSync-5.2.dist-info/METADATA +++ /dev/null @@ -1,55 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-ColorSync -Version: 5.2 -Summary: Wrappers for the framework ColorSync on Mac OS X -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,ColorSync -Platform: MacOS X (>=10.13) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - - -Wrappers for the "ColorSync" framework on MacOSX 10.13 or later. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ColorSync-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_ColorSync-5.2.dist-info/RECORD deleted file mode 100644 index a426fa87..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ColorSync-5.2.dist-info/RECORD +++ /dev/null @@ -1,9 +0,0 @@ -ColorSync/__init__.py,sha256=ntPX7hUwSDBBZeYVK3xL65chZRwWHxCq2gF55mZ4zfQ,690 -ColorSync/__init__.pyc,, -ColorSync/_metadata.py,sha256=4SyZCGR6tX9fMGCnA0qs3CPWarOStXs6-KhRFxNxHjk,10712 -ColorSync/_metadata.pyc,, -pyobjc_framework_ColorSync-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_ColorSync-5.2.dist-info/METADATA,sha256=j3JUonEmNyr_GYzdvY6UHFoeaf3izqmKt6DiViwJr-U,2165 -pyobjc_framework_ColorSync-5.2.dist-info/RECORD,, -pyobjc_framework_ColorSync-5.2.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -pyobjc_framework_ColorSync-5.2.dist-info/top_level.txt,sha256=jU9JK3MlBktfnWqqso1cFOAQOBU7NqRPKe4tZqTX8B0,10 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ColorSync-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_ColorSync-5.2.dist-info/WHEEL deleted file mode 100644 index c8240f03..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ColorSync-5.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ColorSync-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_ColorSync-5.2.dist-info/top_level.txt deleted file mode 100644 index 352d72f4..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ColorSync-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -ColorSync diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Contacts-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_Contacts-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Contacts-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Contacts-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_Contacts-5.2.dist-info/METADATA deleted file mode 100644 index edee75e1..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Contacts-5.2.dist-info/METADATA +++ /dev/null @@ -1,54 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-Contacts -Version: 5.2 -Summary: Wrappers for the framework Contacts on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,Contacts -Platform: MacOS X (>=10.11) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - -Wrappers for the "Contacts" framework on macOS 10.11. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Contacts-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_Contacts-5.2.dist-info/RECORD deleted file mode 100644 index 5b173b32..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Contacts-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -Contacts/_Contacts.so,sha256=DnHyVSCvaPbvlrQYA5ck78zdpZdtWJ4f4e-MNT-wtsQ,9628 -Contacts/__init__.py,sha256=XsE8GiQRuDrQpH7SOAb_7gIFwUNqdptQO1OmZE-WPX0,775 -Contacts/__init__.pyc,, -Contacts/_metadata.py,sha256=RhmVoAxHYvjayZJ8Z760COCX2IektD1FVA3cTb1UN9Q,6823 -Contacts/_metadata.pyc,, -pyobjc_framework_Contacts-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_Contacts-5.2.dist-info/METADATA,sha256=ZEPeE9BdttYWUooBaMxayv1CC_yVgFih7MkwoWzwAPg,2147 -pyobjc_framework_Contacts-5.2.dist-info/RECORD,, -pyobjc_framework_Contacts-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_Contacts-5.2.dist-info/top_level.txt,sha256=GLXFyi9WUlQUQzl8PtV9PjzkTTgsLAt_TdmW_CmOA3M,9 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Contacts-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_Contacts-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Contacts-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Contacts-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_Contacts-5.2.dist-info/top_level.txt deleted file mode 100644 index 0c0adc67..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Contacts-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -Contacts diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ContactsUI-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_ContactsUI-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ContactsUI-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ContactsUI-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_ContactsUI-5.2.dist-info/METADATA deleted file mode 100644 index 41e4319f..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ContactsUI-5.2.dist-info/METADATA +++ /dev/null @@ -1,55 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-ContactsUI -Version: 5.2 -Summary: Wrappers for the framework ContactsUI on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,ContactsUI -Platform: MacOS X (>=10.11) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) -Requires-Dist: pyobjc-framework-Contacts (>=5.2) - -Wrappers for the "ContactsUI" framework on macOS 10.11. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ContactsUI-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_ContactsUI-5.2.dist-info/RECORD deleted file mode 100644 index 78dd8c9c..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ContactsUI-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -ContactsUI/_ContactsUI.so,sha256=HJ0Lrnga5hWH1TmoIWoepX0PS0q4mzi_YLlLt7z1ap0,9716 -ContactsUI/__init__.py,sha256=cQTaSucNXjOzViObbdfR3DHBEmJuzBiyp0LdsGH5uYQ,810 -ContactsUI/__init__.pyc,, -ContactsUI/_metadata.py,sha256=_L_WaLwYsD6tg63XzINtMRa4fGj1XimDAn3SI-I-0bA,414 -ContactsUI/_metadata.pyc,, -pyobjc_framework_ContactsUI-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_ContactsUI-5.2.dist-info/METADATA,sha256=x6F0Ocxg3XJhvKVpRpDeIYWFE3ld9XrsKrkoWaxO83U,2204 -pyobjc_framework_ContactsUI-5.2.dist-info/RECORD,, -pyobjc_framework_ContactsUI-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_ContactsUI-5.2.dist-info/top_level.txt,sha256=flGw8P6mT3VkC_U6W3Kp-pzdx7g0Y5u28YRccgR_Kz8,11 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ContactsUI-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_ContactsUI-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ContactsUI-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ContactsUI-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_ContactsUI-5.2.dist-info/top_level.txt deleted file mode 100644 index 2ed39c69..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ContactsUI-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -ContactsUI diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreAudio-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_CoreAudio-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreAudio-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreAudio-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_CoreAudio-5.2.dist-info/METADATA deleted file mode 100644 index dcf28f11..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreAudio-5.2.dist-info/METADATA +++ /dev/null @@ -1,54 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-CoreAudio -Version: 5.2 -Summary: Wrappers for the framework CoreAudio on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,CoreAudio -Platform: MacOS X -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - -Wrappers for the "CoreAudio" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreAudio-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_CoreAudio-5.2.dist-info/RECORD deleted file mode 100644 index e6207c3c..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreAudio-5.2.dist-info/RECORD +++ /dev/null @@ -1,11 +0,0 @@ -CoreAudio/_CoreAudio.so,sha256=cYnhqT7632AVWi5238UH63lXbyVZKjSqPTS-GfmTH88,31736 -CoreAudio/__init__.py,sha256=-CO50HmuOb74pX2GwSjsN2an8oX-OGPyG-hkbs7TNYE,867 -CoreAudio/__init__.pyc,, -CoreAudio/_inlines.so,sha256=P9MUp6_cQ9bFo2evbhyVjRcC9ZuOyc3zZBCi1Q3RjSg,10640 -CoreAudio/_metadata.py,sha256=e9jGHlJ-Jl0SSYPMKflN-ckbNZudmDRIjamtMPriYo8,52183 -CoreAudio/_metadata.pyc,, -pyobjc_framework_CoreAudio-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_CoreAudio-5.2.dist-info/METADATA,sha256=0F7H30w1m3V6dHoyv00SjDi72QLDZaBEHywatg3xsRA,2135 -pyobjc_framework_CoreAudio-5.2.dist-info/RECORD,, -pyobjc_framework_CoreAudio-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_CoreAudio-5.2.dist-info/top_level.txt,sha256=vRI2jCFLgVfpt1iHtd3SG9jjM0YmlXsEiTICu94BHLo,10 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreAudio-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_CoreAudio-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreAudio-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreAudio-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_CoreAudio-5.2.dist-info/top_level.txt deleted file mode 100644 index 11da01ce..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreAudio-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -CoreAudio diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreAudioKit-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_CoreAudioKit-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreAudioKit-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreAudioKit-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_CoreAudioKit-5.2.dist-info/METADATA deleted file mode 100644 index cee0365e..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreAudioKit-5.2.dist-info/METADATA +++ /dev/null @@ -1,55 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-CoreAudioKit -Version: 5.2 -Summary: Wrappers for the framework CoreAudioKit on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,CoreAudioKit -Platform: MacOS X -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) -Requires-Dist: pyobjc-framework-CoreAudio (>=5.2) - -Wrappers for the "CoreAudioKit" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreAudioKit-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_CoreAudioKit-5.2.dist-info/RECORD deleted file mode 100644 index a206bd33..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreAudioKit-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -CoreAudioKit/_CoreAudioKit.so,sha256=sS0m--8f3I2fzll9w10cDmWPf7RzkVkvveAUI4kxZD0,9684 -CoreAudioKit/__init__.py,sha256=-E0nF79uRHLVp9qe1ql312XEFIdDn7FhNN3IEr4CuPA,737 -CoreAudioKit/__init__.pyc,, -CoreAudioKit/_metadata.py,sha256=1NFyUvYN7yNpUsEveUGb1YJxzCEeQMpHKMksgEFdowM,1490 -CoreAudioKit/_metadata.pyc,, -pyobjc_framework_CoreAudioKit-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_CoreAudioKit-5.2.dist-info/METADATA,sha256=iWQ_b6oszJIZpU6DlHHLjyFASvWZ-OeGqw2i63JFWis,2197 -pyobjc_framework_CoreAudioKit-5.2.dist-info/RECORD,, -pyobjc_framework_CoreAudioKit-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_CoreAudioKit-5.2.dist-info/top_level.txt,sha256=YzPCROFcH7dWfQOaRPhgKuLu43wvFPHsLexDde99EpU,13 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreAudioKit-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_CoreAudioKit-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreAudioKit-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreAudioKit-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_CoreAudioKit-5.2.dist-info/top_level.txt deleted file mode 100644 index 1f998509..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreAudioKit-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -CoreAudioKit diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreBluetooth-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_CoreBluetooth-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreBluetooth-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreBluetooth-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_CoreBluetooth-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreBluetooth-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreBluetooth-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_CoreBluetooth-5.2.dist-info/METADATA deleted file mode 100644 index 15fedc9e..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreBluetooth-5.2.dist-info/METADATA +++ /dev/null @@ -1,54 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-CoreBluetooth -Version: 5.2 -Summary: Wrappers for the framework CoreBluetooth on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,CoreBluetooth -Platform: MacOS X (>=10.10) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - -Wrappers for the "CoreBluetooth" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreBluetooth-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_CoreBluetooth-5.2.dist-info/RECORD deleted file mode 100644 index eb3cc312..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreBluetooth-5.2.dist-info/RECORD +++ /dev/null @@ -1,11 +0,0 @@ -CoreBluetooth/_CoreBluetooth.so,sha256=MlAsyXAw7o4rlPAgQO4LHRNkrEZQwejh_X6Lhxp2Vt8,13916 -CoreBluetooth/__init__.py,sha256=mzwpJZnG4I4utNe9nT9ilQ0fYyI-ttYVJaGqrOpzzCs,778 -CoreBluetooth/__init__.pyc,, -CoreBluetooth/_metadata.py,sha256=R5lma02sXgN-31x-p_hzKH2RcaX3lZnoPOG6QQt1Teg,11513 -CoreBluetooth/_metadata.pyc,, -pyobjc_framework_CoreBluetooth-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_CoreBluetooth-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_CoreBluetooth-5.2.dist-info/METADATA,sha256=Re-5izuqMXAK-xsK9Mt6-umwSQAmI_ttlLhpQCkhkpk,2161 -pyobjc_framework_CoreBluetooth-5.2.dist-info/RECORD,, -pyobjc_framework_CoreBluetooth-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_CoreBluetooth-5.2.dist-info/top_level.txt,sha256=mrd_Sh1JLPW4g_20hfpKNneNXo-XHeaIG4n0zAvaIP0,14 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreBluetooth-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_CoreBluetooth-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreBluetooth-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreBluetooth-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_CoreBluetooth-5.2.dist-info/top_level.txt deleted file mode 100644 index 421b6ee0..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreBluetooth-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -CoreBluetooth diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreData-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_CoreData-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreData-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreData-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_CoreData-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreData-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreData-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_CoreData-5.2.dist-info/METADATA deleted file mode 100644 index 91460418..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreData-5.2.dist-info/METADATA +++ /dev/null @@ -1,56 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-CoreData -Version: 5.2 -Summary: Wrappers for the framework CoreData on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,CoreData -Platform: MacOS X -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - -Wrappers for the "CoreData" framework on macOS. The Core Data framework -provides generalized and automated solutions to common tasks associated -with object life-cycle and object graph management, including persistence. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreData-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_CoreData-5.2.dist-info/RECORD deleted file mode 100644 index ad4a4b78..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreData-5.2.dist-info/RECORD +++ /dev/null @@ -1,13 +0,0 @@ -CoreData/_CoreData.so,sha256=w4qxY5IEjEKiIdUsv8j1Uro9ouSpjFMdvkGjEcwFv-A,13928 -CoreData/__init__.py,sha256=X2bX04QVdv6dmtatGHlMNiPww0GxkYcsyVaq4zdwl2Y,735 -CoreData/__init__.pyc,, -CoreData/_convenience.py,sha256=rC9uPFNtIU17h8IASBQ8C0gfZ-aHT0R_-fFQOXWDLcQ,1750 -CoreData/_convenience.pyc,, -CoreData/_metadata.py,sha256=t54YPgsocnK3T3b3vqNf-O5CNSdNMz6E5SByplgvzHc,24075 -CoreData/_metadata.pyc,, -pyobjc_framework_CoreData-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_CoreData-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_CoreData-5.2.dist-info/METADATA,sha256=-oAvlcdUv9oXffrnKPWpMWTNmNdSr_6w0lYMwZL59qs,2302 -pyobjc_framework_CoreData-5.2.dist-info/RECORD,, -pyobjc_framework_CoreData-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_CoreData-5.2.dist-info/top_level.txt,sha256=6cRCSiJ5-kDbx3eEep6NvyBVrvLxuOB3XRQKYwkdSfo,9 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreData-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_CoreData-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreData-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreData-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_CoreData-5.2.dist-info/top_level.txt deleted file mode 100644 index 10c6070a..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreData-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -CoreData diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreLocation-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_CoreLocation-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreLocation-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreLocation-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_CoreLocation-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreLocation-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreLocation-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_CoreLocation-5.2.dist-info/METADATA deleted file mode 100644 index 2f1749ec..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreLocation-5.2.dist-info/METADATA +++ /dev/null @@ -1,56 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-CoreLocation -Version: 5.2 -Summary: Wrappers for the framework CoreLocation on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,CoreLocation -Platform: MacOS X (>=10.6) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - -Wrappers for framework 'CoreLocation' on macOS 10.6. This framework provides -an interface for dealing with the physical location of a machine, which allows -for geo-aware applications. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreLocation-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_CoreLocation-5.2.dist-info/RECORD deleted file mode 100644 index 09e2f901..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreLocation-5.2.dist-info/RECORD +++ /dev/null @@ -1,11 +0,0 @@ -CoreLocation/_CoreLocation.so,sha256=XcSoXQP7wi6mX85Ya8QZ_g_XXYApziFuTaAppTZYPiE,13860 -CoreLocation/__init__.py,sha256=vzNznCdKlv--SBI9BAdg9y9e6Dfkb0pZBchwolO3kOc,1119 -CoreLocation/__init__.pyc,, -CoreLocation/_metadata.py,sha256=5qfFcYfi6pMrZyvaZk19G7j7WTDPc1EdwMP7lgF4qyY,8980 -CoreLocation/_metadata.pyc,, -pyobjc_framework_CoreLocation-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_CoreLocation-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_CoreLocation-5.2.dist-info/METADATA,sha256=iAwxZHHE_T5HSaCb3df_B_rCGm1vsMoYi3bj6_RgUGA,2288 -pyobjc_framework_CoreLocation-5.2.dist-info/RECORD,, -pyobjc_framework_CoreLocation-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_CoreLocation-5.2.dist-info/top_level.txt,sha256=ofqEl-EuGtkMOcxPfHjFCX_xKt9OWt1c5464Sc5egCE,13 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreLocation-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_CoreLocation-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreLocation-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreLocation-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_CoreLocation-5.2.dist-info/top_level.txt deleted file mode 100644 index cdba9984..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreLocation-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -CoreLocation diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreML-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_CoreML-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreML-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreML-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_CoreML-5.2.dist-info/METADATA deleted file mode 100644 index 0abe9a19..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreML-5.2.dist-info/METADATA +++ /dev/null @@ -1,54 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-CoreML -Version: 5.2 -Summary: Wrappers for the framework CoreML on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,CoreML -Platform: MacOS X (>=10.13) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - -Wrappers for the "CoreML" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreML-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_CoreML-5.2.dist-info/RECORD deleted file mode 100644 index c2d00d6f..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreML-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -CoreML/_CoreML.so,sha256=ytZDnU59gJtPBbg2FIYmcKyp9YLRvAv2Jb6AhCSCmEQ,9676 -CoreML/__init__.py,sha256=enpsahH1k3u_eFVF09AqNpwYgT6hXzOPoDJ8NaZxpn4,700 -CoreML/__init__.pyc,, -CoreML/_metadata.py,sha256=vAJ4avLZaYBsS1d5407xhv_BzoTgMjuzQ7JTAuWlIrY,5441 -CoreML/_metadata.pyc,, -pyobjc_framework_CoreML-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_CoreML-5.2.dist-info/METADATA,sha256=g5cbx3gB7bCJz-kWuPapb7S_sDF4euxYZQ1n8YdXZlo,2133 -pyobjc_framework_CoreML-5.2.dist-info/RECORD,, -pyobjc_framework_CoreML-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_CoreML-5.2.dist-info/top_level.txt,sha256=Uu3gkPlQDO5j-W2w8qzMWN9i2fGd__ZWct1flwsvhIc,7 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreML-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_CoreML-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreML-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreML-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_CoreML-5.2.dist-info/top_level.txt deleted file mode 100644 index 3419ebd9..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreML-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -CoreML diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreMedia-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_CoreMedia-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreMedia-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreMedia-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_CoreMedia-5.2.dist-info/METADATA deleted file mode 100644 index de419c41..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreMedia-5.2.dist-info/METADATA +++ /dev/null @@ -1,54 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-CoreMedia -Version: 5.2 -Summary: Wrappers for the framework CoreMedia on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,CoreMedia -Platform: MacOS X (>=10.7) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - -Wrappers for the "CoreMedia" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreMedia-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_CoreMedia-5.2.dist-info/RECORD deleted file mode 100644 index f71b1832..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreMedia-5.2.dist-info/RECORD +++ /dev/null @@ -1,12 +0,0 @@ -CoreMedia/_CoreMedia.so,sha256=E08rBLlKly-1Fk8FrMZ1DkEBOnPZCDvJkE5U3VMG25w,15844 -CoreMedia/__init__.py,sha256=bz2f5fYp1AxZngUhylx0lOldtUUjUACb7jogoCIxoEQ,775 -CoreMedia/__init__.pyc,, -CoreMedia/_macros.py,sha256=jTxuuE5vaXXekt2eqlHwTzIOjaoXrjDP-Etc7eEzhY0,1875 -CoreMedia/_macros.pyc,, -CoreMedia/_metadata.py,sha256=nYcWEHmrBZzYltWzsoKaIQk1NMfMayWgWyKjimV9dCQ,86401 -CoreMedia/_metadata.pyc,, -pyobjc_framework_CoreMedia-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_CoreMedia-5.2.dist-info/METADATA,sha256=UXU32pQZPjqOTdLuEyxCQoGo1FT3w_oP65-8Vx3kq3M,2144 -pyobjc_framework_CoreMedia-5.2.dist-info/RECORD,, -pyobjc_framework_CoreMedia-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_CoreMedia-5.2.dist-info/top_level.txt,sha256=gAXk_qAr7wKvm969FKXllkgEHTo__yzo3li3x9fPI4Q,10 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreMedia-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_CoreMedia-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreMedia-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreMedia-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_CoreMedia-5.2.dist-info/top_level.txt deleted file mode 100644 index e271e9c7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreMedia-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -CoreMedia diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreMediaIO-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_CoreMediaIO-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreMediaIO-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreMediaIO-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_CoreMediaIO-5.2.dist-info/METADATA deleted file mode 100644 index 2b549b51..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreMediaIO-5.2.dist-info/METADATA +++ /dev/null @@ -1,54 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-CoreMediaIO -Version: 5.2 -Summary: Wrappers for the framework CoreMediaIO on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,CoreMediaIO -Platform: MacOS X (>=10.7) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - -Wrappers for the "CoreMediaIO" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreMediaIO-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_CoreMediaIO-5.2.dist-info/RECORD deleted file mode 100644 index bb5b136b..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreMediaIO-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -CoreMediaIO/_CoreMediaIO.so,sha256=1hJLDT3OJc38_0mo3B7XoJvebOZiuH2plGvjEieoURs,10452 -CoreMediaIO/__init__.py,sha256=E_0u4Nw1UZ2RPT2_bmZcKa15SLjCx9_efChFIChxQG4,1235 -CoreMediaIO/__init__.pyc,, -CoreMediaIO/_metadata.py,sha256=f9nbZ6Eck8EhXW1J8Zgj66gdnEHJ6lVSjUAm58R0ujA,23956 -CoreMediaIO/_metadata.pyc,, -pyobjc_framework_CoreMediaIO-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_CoreMediaIO-5.2.dist-info/METADATA,sha256=f-sdixpK1QE6UgsiJr0NO7c_jl4zUgtavbxsmy55iOs,2152 -pyobjc_framework_CoreMediaIO-5.2.dist-info/RECORD,, -pyobjc_framework_CoreMediaIO-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_CoreMediaIO-5.2.dist-info/top_level.txt,sha256=wX1XVgkBWb7BCtTrqZTIpy9KrkSknFMhmtuo8V43ls8,12 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreMediaIO-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_CoreMediaIO-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreMediaIO-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreMediaIO-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_CoreMediaIO-5.2.dist-info/top_level.txt deleted file mode 100644 index 0d60b9c3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreMediaIO-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -CoreMediaIO diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreServices-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_CoreServices-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreServices-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreServices-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_CoreServices-5.2.dist-info/METADATA deleted file mode 100644 index 01f191d9..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreServices-5.2.dist-info/METADATA +++ /dev/null @@ -1,54 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-CoreServices -Version: 5.2 -Summary: Wrappers for the framework CoreServices on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,CoreServices,CoreServices.CarbonCore,CoreServices.LaunchServices,CoreServices.DictionaryServices,CoreServices.SearchKit,CoreServices.Metadata -Platform: MacOS X -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-FSEvents (>=5.2) - -Wrappers for the "CoreServices" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreServices-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_CoreServices-5.2.dist-info/RECORD deleted file mode 100644 index ce5a5c36..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreServices-5.2.dist-info/RECORD +++ /dev/null @@ -1,28 +0,0 @@ -CoreServices/CarbonCore/__init__.py,sha256=Tgsmn76OwGZGRc1M9Ku9G0YIARcCcX9-RglTAgFzVBY,871 -CoreServices/CarbonCore/__init__.pyc,, -CoreServices/CarbonCore/_metadata.py,sha256=jAO68T2DB9dVnRORRjwYOfuNVPXntJAU61pSbh1-25U,25067 -CoreServices/CarbonCore/_metadata.pyc,, -CoreServices/DictionaryServices/__init__.py,sha256=79BhGOYsJ1fnpKbn4qEtwY2nWYsgs7Ibp2p8xfEVO-M,772 -CoreServices/DictionaryServices/__init__.pyc,, -CoreServices/DictionaryServices/_metadata.py,sha256=VoJwYcA-hNFKt7VIuKFv9pRJeMuJnsR6vpHitA0oUw0,988 -CoreServices/DictionaryServices/_metadata.pyc,, -CoreServices/LaunchServices/__init__.py,sha256=BAsQOgnyNbHf8eM-HRXx0hKB8oWQm3qiZ9V8OMKDUs8,762 -CoreServices/LaunchServices/__init__.pyc,, -CoreServices/LaunchServices/_metadata.py,sha256=IUuPNA6t9QmKrOn3Rbxp9J5VODpGAzNfMEBj1anGXPk,39596 -CoreServices/LaunchServices/_metadata.pyc,, -CoreServices/Metadata/__init__.py,sha256=KAlBTPYM_dqSFAdNWaYvGV56kDqEfq4ODClnc8ZWfBg,806 -CoreServices/Metadata/__init__.pyc,, -CoreServices/Metadata/_metadata.py,sha256=5icspzOpA7J7Fpp4l66svy2U7ePxPzyWcdbXh-53oc8,15161 -CoreServices/Metadata/_metadata.pyc,, -CoreServices/SearchKit/__init__.py,sha256=VwQeFfa8bIlmjtPIZWi6laRBftESoQZMfjvAWyo8R1s,4013 -CoreServices/SearchKit/__init__.pyc,, -CoreServices/SearchKit/_metadata.py,sha256=V4QMKojvPi4NJtthm-1JFNh6OJ_bGRrW53SrWXPD4Vk,9875 -CoreServices/SearchKit/_metadata.pyc,, -CoreServices/__init__.py,sha256=v3eDoawUctVOO2BnYPCUmHOHqEZyC1riefCEkzSjXYU,939 -CoreServices/__init__.pyc,, -CoreServices/_inlines.so,sha256=B8-I04u0jUNN_c2Bo3sP_LEQt6dMZPiuHUUw2tx00aw,9744 -pyobjc_framework_CoreServices-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_CoreServices-5.2.dist-info/METADATA,sha256=CJxKiNcUGt0K-VsnNpl9PY0_ioAwHnewMPmSAwX9DjU,2279 -pyobjc_framework_CoreServices-5.2.dist-info/RECORD,, -pyobjc_framework_CoreServices-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_CoreServices-5.2.dist-info/top_level.txt,sha256=Ey6Ylpurgiktt-ECuhcVfDXaAgzWuhNmez0avWtzcVE,13 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreServices-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_CoreServices-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreServices-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreServices-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_CoreServices-5.2.dist-info/top_level.txt deleted file mode 100644 index 244620cc..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreServices-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -CoreServices diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreSpotlight-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_CoreSpotlight-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreSpotlight-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreSpotlight-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_CoreSpotlight-5.2.dist-info/METADATA deleted file mode 100644 index 6d6c28d9..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreSpotlight-5.2.dist-info/METADATA +++ /dev/null @@ -1,54 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-CoreSpotlight -Version: 5.2 -Summary: Wrappers for the framework CoreSpotlight on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,CoreSpotlight -Platform: MacOS X (>=10.13) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - -Wrappers for the "CoreSpotlight" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreSpotlight-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_CoreSpotlight-5.2.dist-info/RECORD deleted file mode 100644 index b821bc34..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreSpotlight-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -CoreSpotlight/_CoreSpotlight.so,sha256=xjRYYISFt2_lxgxgDlVAuL7bLGmTzgbZgwB934GRmZ8,9748 -CoreSpotlight/__init__.py,sha256=-Zl_Fhjs88SKt5zMquen9aD7wdgrgLplFmASUC2u-dg,763 -CoreSpotlight/__init__.pyc,, -CoreSpotlight/_metadata.py,sha256=08HuXVUeggeghLc4aemwi1uZ92arfAvw_768wFhsQjM,4834 -CoreSpotlight/_metadata.pyc,, -pyobjc_framework_CoreSpotlight-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_CoreSpotlight-5.2.dist-info/METADATA,sha256=5pv9LD7hhDZ5-mgDexQpOpo2rdgf2uMugLaXNY0ZvJQ,2161 -pyobjc_framework_CoreSpotlight-5.2.dist-info/RECORD,, -pyobjc_framework_CoreSpotlight-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_CoreSpotlight-5.2.dist-info/top_level.txt,sha256=3YzoRpq-hoGPMlIjEcfrJ1OWOe0t8xzz5xFco8n0vAw,14 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreSpotlight-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_CoreSpotlight-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreSpotlight-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreSpotlight-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_CoreSpotlight-5.2.dist-info/top_level.txt deleted file mode 100644 index c422c3b1..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreSpotlight-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -CoreSpotlight diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreText-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_CoreText-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreText-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreText-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_CoreText-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreText-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreText-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_CoreText-5.2.dist-info/METADATA deleted file mode 100644 index cb9ab748..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreText-5.2.dist-info/METADATA +++ /dev/null @@ -1,57 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-CoreText -Version: 5.2 -Summary: Wrappers for the framework CoreText on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,CoreText -Platform: MacOS X (>=10.5) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) -Requires-Dist: pyobjc-framework-Quartz (>=5.2) - -Wrappers for the "CoreText" framework on macOS 10.5 or later. Core Text is an -advanced, low-level technology for laying out text and handling fonts. It is -designed for high performance and ease of use. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreText-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_CoreText-5.2.dist-info/RECORD deleted file mode 100644 index 6edfeb6b..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreText-5.2.dist-info/RECORD +++ /dev/null @@ -1,11 +0,0 @@ -CoreText/__init__.py,sha256=I2NeKgZ_KB7DKV9esUXwh9XHnzrHP9qGC34WpPssheU,873 -CoreText/__init__.pyc,, -CoreText/_manual.so,sha256=LHvzmNPDhD_s8YSVWlM67yIh7qZDybwTYQX25-YnvMw,17796 -CoreText/_metadata.py,sha256=7XgS9lzwFi70sqUzKG8cEjSy6Lmo7rctfpg-YVd13f8,92754 -CoreText/_metadata.pyc,, -pyobjc_framework_CoreText-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_CoreText-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_CoreText-5.2.dist-info/METADATA,sha256=mHqKtjCJo444tSJZODQo7kwUe9354dJgxdPKX70BCf0,2341 -pyobjc_framework_CoreText-5.2.dist-info/RECORD,, -pyobjc_framework_CoreText-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_CoreText-5.2.dist-info/top_level.txt,sha256=el7Md_M_JEGtUwR5FVoJ75sdEa89FGQ8-h7eR2u8pAo,9 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreText-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_CoreText-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreText-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreText-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_CoreText-5.2.dist-info/top_level.txt deleted file mode 100644 index e533398d..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreText-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -CoreText diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreWLAN-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_CoreWLAN-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreWLAN-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreWLAN-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_CoreWLAN-5.2.dist-info/METADATA deleted file mode 100644 index 926d3321..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreWLAN-5.2.dist-info/METADATA +++ /dev/null @@ -1,54 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-CoreWLAN -Version: 5.2 -Summary: Wrappers for the framework CoreWLAN on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,CoreWLAN -Platform: MacOS X (>=10.6) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - -Wrappers for the "CoreWLAN" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreWLAN-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_CoreWLAN-5.2.dist-info/RECORD deleted file mode 100644 index 3f3edde9..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreWLAN-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -CoreWLAN/_CoreWLAN.so,sha256=mgc2LzAq4YEgIgHJqgO81r_t6RuMXjUiz_SwziJ3ae8,9652 -CoreWLAN/__init__.py,sha256=nZjwr5mbwdc-vA8d-48i4W2B1UXZ865nRiDTHCew_kQ,2933 -CoreWLAN/__init__.pyc,, -CoreWLAN/_metadata.py,sha256=Xwbub2D_lwBzbMSq9RTO_mSxN--pkhbOpCX7mAoEXRI,15261 -CoreWLAN/_metadata.pyc,, -pyobjc_framework_CoreWLAN-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_CoreWLAN-5.2.dist-info/METADATA,sha256=GylfMVVyX2jDjsBnLXE5ufEslfrMKNJLwugOPxNqfXo,2140 -pyobjc_framework_CoreWLAN-5.2.dist-info/RECORD,, -pyobjc_framework_CoreWLAN-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_CoreWLAN-5.2.dist-info/top_level.txt,sha256=mcscvq82uk-8ot3s0quSJkHdRpmVUOcsz9wLk7Evzh4,9 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreWLAN-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_CoreWLAN-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreWLAN-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CoreWLAN-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_CoreWLAN-5.2.dist-info/top_level.txt deleted file mode 100644 index fb6c266c..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CoreWLAN-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -CoreWLAN diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CryptoTokenKit-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_CryptoTokenKit-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CryptoTokenKit-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CryptoTokenKit-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_CryptoTokenKit-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CryptoTokenKit-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CryptoTokenKit-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_CryptoTokenKit-5.2.dist-info/METADATA deleted file mode 100644 index e25dd4f1..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CryptoTokenKit-5.2.dist-info/METADATA +++ /dev/null @@ -1,54 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-CryptoTokenKit -Version: 5.2 -Summary: Wrappers for the framework CryptoTokenKit on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,CryptoTokenKit -Platform: MacOS X (>=10.10) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - -Wrappers for the "CryptoTokenKit" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CryptoTokenKit-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_CryptoTokenKit-5.2.dist-info/RECORD deleted file mode 100644 index c853f04a..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CryptoTokenKit-5.2.dist-info/RECORD +++ /dev/null @@ -1,11 +0,0 @@ -CryptoTokenKit/_CryptoTokenKit.so,sha256=1nHYhg5VL3JUWd_KUiEIsjpFLlNLA4HEiyj2EY7AxzM,13900 -CryptoTokenKit/__init__.py,sha256=a42P5KzNUifxDJlrQEy5qUvnPBF2NMt2xYHw8_Nu6e8,771 -CryptoTokenKit/__init__.pyc,, -CryptoTokenKit/_metadata.py,sha256=S4U-EQ1xH40-ydVIVd4zTP_7Pzpoy9YShOuVeyya-ZM,6713 -CryptoTokenKit/_metadata.pyc,, -pyobjc_framework_CryptoTokenKit-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_CryptoTokenKit-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_CryptoTokenKit-5.2.dist-info/METADATA,sha256=P8F_oi5SgL13qP6A0rp3kklP4F5AceZsNr591xBAkzY,2165 -pyobjc_framework_CryptoTokenKit-5.2.dist-info/RECORD,, -pyobjc_framework_CryptoTokenKit-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_CryptoTokenKit-5.2.dist-info/top_level.txt,sha256=nShCqd0TomW8L4eyDiVLMNBqN3qbmG6-nUlzPcpnSEo,15 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CryptoTokenKit-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_CryptoTokenKit-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CryptoTokenKit-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_CryptoTokenKit-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_CryptoTokenKit-5.2.dist-info/top_level.txt deleted file mode 100644 index d762049e..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_CryptoTokenKit-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -CryptoTokenKit diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_DVDPlayback-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_DVDPlayback-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_DVDPlayback-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_DVDPlayback-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_DVDPlayback-5.2.dist-info/METADATA deleted file mode 100644 index 310f13fd..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_DVDPlayback-5.2.dist-info/METADATA +++ /dev/null @@ -1,55 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-DVDPlayback -Version: 5.2 -Summary: Wrappers for the framework DVDPlayback on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,DVDPlayback -Platform: MacOS X (>=10.7) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - - -Wrappers for the "DVDPlayback" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_DVDPlayback-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_DVDPlayback-5.2.dist-info/RECORD deleted file mode 100644 index ac158855..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_DVDPlayback-5.2.dist-info/RECORD +++ /dev/null @@ -1,9 +0,0 @@ -DVDPlayback/__init__.py,sha256=HziEehgcA0sKQr2FEv1EuS4fih377gGnEmbfejC7Z_I,705 -DVDPlayback/__init__.pyc,, -DVDPlayback/_metadata.py,sha256=7jvlzg0SGBXgIl6olZ8TWTtkVtQY8OZla2NRNf9YrS8,18804 -DVDPlayback/_metadata.pyc,, -pyobjc_framework_DVDPlayback-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_DVDPlayback-5.2.dist-info/METADATA,sha256=7oRyFghrHu72iLWNxi3vYTW-rqmgpEU6IpHW3ThAkM4,2153 -pyobjc_framework_DVDPlayback-5.2.dist-info/RECORD,, -pyobjc_framework_DVDPlayback-5.2.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -pyobjc_framework_DVDPlayback-5.2.dist-info/top_level.txt,sha256=un6Qq_5hDVrr8oEs4mbxIEfV-XtVRNsBaIYsNA4xfgw,12 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_DVDPlayback-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_DVDPlayback-5.2.dist-info/WHEEL deleted file mode 100644 index c8240f03..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_DVDPlayback-5.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_DVDPlayback-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_DVDPlayback-5.2.dist-info/top_level.txt deleted file mode 100644 index ac88ebfc..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_DVDPlayback-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -DVDPlayback diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_DictionaryServices-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_DictionaryServices-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_DictionaryServices-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_DictionaryServices-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_DictionaryServices-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_DictionaryServices-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_DictionaryServices-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_DictionaryServices-5.2.dist-info/METADATA deleted file mode 100644 index ad34f817..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_DictionaryServices-5.2.dist-info/METADATA +++ /dev/null @@ -1,52 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-DictionaryServices -Version: 5.2 -Summary: Wrappers for the framework DictionaryServices on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,DictionaryServices -Platform: MacOS X (>=10.5) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-CoreServices (>=5.2) - - -Deprecated wrappers for the "DictionaryServices" framework on macOS 10.5 or later. - -Use package "CoreServices" instead. - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_DictionaryServices-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_DictionaryServices-5.2.dist-info/RECORD deleted file mode 100644 index c608340e..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_DictionaryServices-5.2.dist-info/RECORD +++ /dev/null @@ -1,8 +0,0 @@ -DictionaryServices/__init__.py,sha256=2ca8I9sbBgDld2T2FvjTpYJfUynSMHeQVNJLtxfnkLo,760 -DictionaryServices/__init__.pyc,, -pyobjc_framework_DictionaryServices-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_DictionaryServices-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_DictionaryServices-5.2.dist-info/METADATA,sha256=wQVtMWYjCKiMBpwVbwzgtPCefHIpDWdMVF7gC7tJYk0,2002 -pyobjc_framework_DictionaryServices-5.2.dist-info/RECORD,, -pyobjc_framework_DictionaryServices-5.2.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -pyobjc_framework_DictionaryServices-5.2.dist-info/top_level.txt,sha256=vUD5rn2985tcGYmPEeklVGYKVK8CUJIwsnSDA9O0iz4,19 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_DictionaryServices-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_DictionaryServices-5.2.dist-info/WHEEL deleted file mode 100644 index c8240f03..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_DictionaryServices-5.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_DictionaryServices-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_DictionaryServices-5.2.dist-info/top_level.txt deleted file mode 100644 index 0b4402c5..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_DictionaryServices-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -DictionaryServices diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_DiscRecording-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_DiscRecording-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_DiscRecording-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_DiscRecording-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_DiscRecording-5.2.dist-info/METADATA deleted file mode 100644 index 14db479f..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_DiscRecording-5.2.dist-info/METADATA +++ /dev/null @@ -1,54 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-DiscRecording -Version: 5.2 -Summary: Wrappers for the framework DiscRecording on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,DiscRecording -Platform: MacOS X -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - -Wrappers for the "DiscRecording" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_DiscRecording-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_DiscRecording-5.2.dist-info/RECORD deleted file mode 100644 index f306a884..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_DiscRecording-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -DiscRecording/_DiscRecording.so,sha256=564BMz8MBQP6EFg5zJHVxaYLdt5-UBJILP6t_11OdmE,9716 -DiscRecording/__init__.py,sha256=OUV8rt8_CNFhqQePzPbc0j27sTnfJ4mVJzVL5ttQaUU,1359 -DiscRecording/__init__.pyc,, -DiscRecording/_metadata.py,sha256=XM20LkjAJ0JG-RZ5mai4fpMQTMZRUlSd-W04QDeJ3Xc,40311 -DiscRecording/_metadata.pyc,, -pyobjc_framework_DiscRecording-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_DiscRecording-5.2.dist-info/METADATA,sha256=ogz6IowrtQCMlSgpkh9Z2Dt6oNCqT3aq_Ryx7Zu_WUE,2151 -pyobjc_framework_DiscRecording-5.2.dist-info/RECORD,, -pyobjc_framework_DiscRecording-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_DiscRecording-5.2.dist-info/top_level.txt,sha256=p19AVg3GcRKZZZBwcYyxpLAWk-7EOr6Y90i0cnNu41c,14 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_DiscRecording-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_DiscRecording-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_DiscRecording-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_DiscRecording-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_DiscRecording-5.2.dist-info/top_level.txt deleted file mode 100644 index f790f7ed..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_DiscRecording-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -DiscRecording diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_DiscRecordingUI-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_DiscRecordingUI-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_DiscRecordingUI-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_DiscRecordingUI-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_DiscRecordingUI-5.2.dist-info/METADATA deleted file mode 100644 index a84fc164..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_DiscRecordingUI-5.2.dist-info/METADATA +++ /dev/null @@ -1,56 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-DiscRecordingUI -Version: 5.2 -Summary: Wrappers for the framework DiscRecordingUI on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,DiscRecordingUI -Platform: MacOS X -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) -Requires-Dist: pyobjc-framework-DiscRecording (>=5.2) - - -Wrappers for the "DiscRecordingUI" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_DiscRecordingUI-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_DiscRecordingUI-5.2.dist-info/RECORD deleted file mode 100644 index caf49f3d..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_DiscRecordingUI-5.2.dist-info/RECORD +++ /dev/null @@ -1,9 +0,0 @@ -DiscRecordingUI/__init__.py,sha256=kjlzhh_0bR5Vy_-gwN8E62YfgXlDfb1K8wfllA3qy3Q,770 -DiscRecordingUI/__init__.pyc,, -DiscRecordingUI/_metadata.py,sha256=JGlDlutZHT1wTrU5TDJn7aYh9Y8e4qMvKJNwACH9BP4,8011 -DiscRecordingUI/_metadata.pyc,, -pyobjc_framework_DiscRecordingUI-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_DiscRecordingUI-5.2.dist-info/METADATA,sha256=OTCRNG_0YfVNbygwlBWN8eZrAbMkpyB_iG_5R2IbveQ,2214 -pyobjc_framework_DiscRecordingUI-5.2.dist-info/RECORD,, -pyobjc_framework_DiscRecordingUI-5.2.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -pyobjc_framework_DiscRecordingUI-5.2.dist-info/top_level.txt,sha256=rG8RtuHxOcc_ZDA7OuYYVQZIDhhjX8tOSIkXYezX3_c,16 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_DiscRecordingUI-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_DiscRecordingUI-5.2.dist-info/WHEEL deleted file mode 100644 index c8240f03..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_DiscRecordingUI-5.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_DiscRecordingUI-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_DiscRecordingUI-5.2.dist-info/top_level.txt deleted file mode 100644 index b33bdde8..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_DiscRecordingUI-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -DiscRecordingUI diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_DiskArbitration-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_DiskArbitration-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_DiskArbitration-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_DiskArbitration-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_DiskArbitration-5.2.dist-info/METADATA deleted file mode 100644 index 146e620a..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_DiskArbitration-5.2.dist-info/METADATA +++ /dev/null @@ -1,55 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-DiskArbitration -Version: 5.2 -Summary: Wrappers for the framework DiskArbitration on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,DiskArbitration -Platform: MacOS X -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - - -Wrappers for the "DiskArbitration" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_DiskArbitration-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_DiskArbitration-5.2.dist-info/RECORD deleted file mode 100644 index fd5fc1f4..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_DiskArbitration-5.2.dist-info/RECORD +++ /dev/null @@ -1,9 +0,0 @@ -DiskArbitration/__init__.py,sha256=qVSR4o7BRDJphff5WiAtIMpRFzUF5lwZ72mNq2gV8lo,741 -DiskArbitration/__init__.pyc,, -DiskArbitration/_metadata.py,sha256=zdmnXs6uVFEpnU4Ufn4Qo9dxgFQRGD4RVV3lwxTunSM,9216 -DiskArbitration/_metadata.pyc,, -pyobjc_framework_DiskArbitration-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_DiskArbitration-5.2.dist-info/METADATA,sha256=zFTv99y0KLub5Fef7qk3E78IbbrCezTu3b4AWyIEGIE,2160 -pyobjc_framework_DiskArbitration-5.2.dist-info/RECORD,, -pyobjc_framework_DiskArbitration-5.2.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -pyobjc_framework_DiskArbitration-5.2.dist-info/top_level.txt,sha256=WabqnK_h25Ngv3gE7sNICbqYziD_Tfg0WPDigZReZTY,16 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_DiskArbitration-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_DiskArbitration-5.2.dist-info/WHEEL deleted file mode 100644 index c8240f03..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_DiskArbitration-5.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_DiskArbitration-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_DiskArbitration-5.2.dist-info/top_level.txt deleted file mode 100644 index 00600a9e..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_DiskArbitration-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -DiskArbitration diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_EventKit-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_EventKit-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_EventKit-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_EventKit-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_EventKit-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_EventKit-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_EventKit-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_EventKit-5.2.dist-info/METADATA deleted file mode 100644 index 308c16da..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_EventKit-5.2.dist-info/METADATA +++ /dev/null @@ -1,55 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-EventKit -Version: 5.2 -Summary: Wrappers for the framework Accounts on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,EventKit -Platform: MacOS X (>=10.8) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - - -Wrappers for the "EventKit" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_EventKit-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_EventKit-5.2.dist-info/RECORD deleted file mode 100644 index 405de8b2..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_EventKit-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -EventKit/__init__.py,sha256=EEHApJLReLNbnNYR5RDYzlyoUmA8A7v82uzOsOEVn14,689 -EventKit/__init__.pyc,, -EventKit/_metadata.py,sha256=HGvv1gSH01dLYT-M6qr8gbNtmSXoJ9hYDdSItK_70OM,6941 -EventKit/_metadata.pyc,, -pyobjc_framework_EventKit-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_EventKit-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_EventKit-5.2.dist-info/METADATA,sha256=zd8vGwrcD76ijr6Py4RgT9OMD2TCwCSmD_vRHosSNNY,2141 -pyobjc_framework_EventKit-5.2.dist-info/RECORD,, -pyobjc_framework_EventKit-5.2.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -pyobjc_framework_EventKit-5.2.dist-info/top_level.txt,sha256=KzPPjnfngQY3qhxRq-8Alsk84j6xXEgbIyix_Kir8EE,9 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_EventKit-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_EventKit-5.2.dist-info/WHEEL deleted file mode 100644 index c8240f03..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_EventKit-5.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_EventKit-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_EventKit-5.2.dist-info/top_level.txt deleted file mode 100644 index 651e08ee..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_EventKit-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -EventKit diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ExceptionHandling-5.2-py3.7-nspkg.pth b/env/lib/python2.7/site-packages/pyobjc_framework_ExceptionHandling-5.2-py3.7-nspkg.pth deleted file mode 100644 index f97ec6ad..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ExceptionHandling-5.2-py3.7-nspkg.pth +++ /dev/null @@ -1 +0,0 @@ -import sys, types, os;has_mfs = sys.version_info > (3, 5);p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('PyObjCTools',));importlib = has_mfs and __import__('importlib.util');has_mfs and __import__('importlib.machinery');m = has_mfs and sys.modules.setdefault('PyObjCTools', importlib.util.module_from_spec(importlib.machinery.PathFinder.find_spec('PyObjCTools', [os.path.dirname(p)])));m = m or sys.modules.setdefault('PyObjCTools', types.ModuleType('PyObjCTools'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p) diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ExceptionHandling-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_ExceptionHandling-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ExceptionHandling-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ExceptionHandling-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_ExceptionHandling-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ExceptionHandling-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ExceptionHandling-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_ExceptionHandling-5.2.dist-info/METADATA deleted file mode 100644 index 0b0ed25b..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ExceptionHandling-5.2.dist-info/METADATA +++ /dev/null @@ -1,60 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-ExceptionHandling -Version: 5.2 -Summary: Wrappers for the framework ExceptionHandling on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,ExceptionHandling -Platform: MacOS X -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - - -Wrappers for the "ExceptionHandling" framework on macOS. The ExceptionHandling -framework provides facilities for monitoring and debugging exceptional -conditions in Cocoa programs. - -PyObjC also provides low-level debugging utilities beyond the core -ExceptionHandling framework in the module ``PyObjCTools.Debugging``. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ExceptionHandling-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_ExceptionHandling-5.2.dist-info/RECORD deleted file mode 100644 index 2add7b65..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ExceptionHandling-5.2.dist-info/RECORD +++ /dev/null @@ -1,14 +0,0 @@ -ExceptionHandling/__init__.py,sha256=tNoHXHHYGyd-i3EigJOWSIahFIZy3OH5GvIl6LI3Tbs,738 -ExceptionHandling/__init__.pyc,, -ExceptionHandling/_metadata.py,sha256=gNXhjjnCmVowo57QCKWYlazvTPDTTrI7ojvtwGCmcvI,2679 -ExceptionHandling/_metadata.pyc,, -PyObjCTools/Debugging.py,sha256=B6DVj5g4C7M8rn0td7_nlYy7u9uQbpld-1mDPOdmZ6I,6044 -PyObjCTools/Debugging.pyc,, -pyobjc_framework_ExceptionHandling-5.2-py3.7-nspkg.pth,sha256=EWk3VWxBM0YtSF753JAPIh3HBrySs8L9P44-sx0lzZE,564 -pyobjc_framework_ExceptionHandling-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_ExceptionHandling-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_ExceptionHandling-5.2.dist-info/METADATA,sha256=UF1q524Uf8qtrH9VFxwPNuMhCPkpFHYPiCIj-8dlCGE,2428 -pyobjc_framework_ExceptionHandling-5.2.dist-info/RECORD,, -pyobjc_framework_ExceptionHandling-5.2.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -pyobjc_framework_ExceptionHandling-5.2.dist-info/namespace_packages.txt,sha256=NRlruBeqb3C4r0mY4rn-7TQyXoY1yVomhiiauw-3Q_0,12 -pyobjc_framework_ExceptionHandling-5.2.dist-info/top_level.txt,sha256=9dZWP3ne-rbt6Zo7CxEHR5dISz4ZAn4PknLp-VV8bXs,30 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ExceptionHandling-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_ExceptionHandling-5.2.dist-info/WHEEL deleted file mode 100644 index c8240f03..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ExceptionHandling-5.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ExceptionHandling-5.2.dist-info/namespace_packages.txt b/env/lib/python2.7/site-packages/pyobjc_framework_ExceptionHandling-5.2.dist-info/namespace_packages.txt deleted file mode 100644 index a79bb16a..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ExceptionHandling-5.2.dist-info/namespace_packages.txt +++ /dev/null @@ -1 +0,0 @@ -PyObjCTools diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ExceptionHandling-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_ExceptionHandling-5.2.dist-info/top_level.txt deleted file mode 100644 index 758d363c..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ExceptionHandling-5.2.dist-info/top_level.txt +++ /dev/null @@ -1,2 +0,0 @@ -ExceptionHandling -PyObjCTools diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ExternalAccessory-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_ExternalAccessory-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ExternalAccessory-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ExternalAccessory-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_ExternalAccessory-5.2.dist-info/METADATA deleted file mode 100644 index 7d30b4b7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ExternalAccessory-5.2.dist-info/METADATA +++ /dev/null @@ -1,54 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-ExternalAccessory -Version: 5.2 -Summary: Wrappers for the framework ExternalAccessory on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,ExternalAccessory -Platform: MacOS X (>=10.13) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - -Wrappers for the "ExternalAccessory" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ExternalAccessory-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_ExternalAccessory-5.2.dist-info/RECORD deleted file mode 100644 index a44cba6d..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ExternalAccessory-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -ExternalAccessory/_ExternalAccessory.so,sha256=gPiueyCoyKgPAZo460Dg5XLvgAKzFL9L7GzVux5PaIU,9780 -ExternalAccessory/__init__.py,sha256=i6HTlHFsaPf2jgkZYsVSt9SX77natMDGu7CN_Pln_f0,798 -ExternalAccessory/__init__.pyc,, -ExternalAccessory/_metadata.py,sha256=Ls2bfCO7Lsl9UDl7sTyMrzk08lKp0u0DeDGnbJpwMZo,1767 -ExternalAccessory/_metadata.pyc,, -pyobjc_framework_ExternalAccessory-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_ExternalAccessory-5.2.dist-info/METADATA,sha256=mJxz5bABwPj-7SpZ4i-iGevi28n5RSxv90O1E8v_CNw,2177 -pyobjc_framework_ExternalAccessory-5.2.dist-info/RECORD,, -pyobjc_framework_ExternalAccessory-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_ExternalAccessory-5.2.dist-info/top_level.txt,sha256=o7hNIbvUIsHsxEbFwvrBVTZRVTJrI3ngWFMDcIPlwGs,18 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ExternalAccessory-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_ExternalAccessory-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ExternalAccessory-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ExternalAccessory-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_ExternalAccessory-5.2.dist-info/top_level.txt deleted file mode 100644 index 7ce9c9f4..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ExternalAccessory-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -ExternalAccessory diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_FSEvents-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_FSEvents-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_FSEvents-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_FSEvents-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_FSEvents-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_FSEvents-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_FSEvents-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_FSEvents-5.2.dist-info/METADATA deleted file mode 100644 index b04e4e87..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_FSEvents-5.2.dist-info/METADATA +++ /dev/null @@ -1,56 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-FSEvents -Version: 5.2 -Summary: Wrappers for the framework FSEvents on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,FSEvents -Platform: MacOS X (>=10.5) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - -Wrappers for the "FSEvents" API in macOS. The functions in this framework -allow you to reliably observe changes to the filesystem, even when your -program is not running al the time. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_FSEvents-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_FSEvents-5.2.dist-info/RECORD deleted file mode 100644 index ad4a8d12..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_FSEvents-5.2.dist-info/RECORD +++ /dev/null @@ -1,11 +0,0 @@ -FSEvents/__init__.py,sha256=YpXoYa-n5D2l7h_A2Xg-FrRWT333ol4VAxyCDJ07Ocw,730 -FSEvents/__init__.pyc,, -FSEvents/_callbacks.so,sha256=EkyMA9_fuK986HMdSuhOZ-wZn6y--AQ1EhfH9e5Ha7I,16328 -FSEvents/_metadata.py,sha256=bqBMZ7QctiOwC0r7A0Oyeg0dwoI9i2MJUJZk30yQ1vY,3498 -FSEvents/_metadata.pyc,, -pyobjc_framework_FSEvents-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_FSEvents-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_FSEvents-5.2.dist-info/METADATA,sha256=MQIAeY6gEpp_9ydi9nbe2G2ABcNTt10Cbl1ydzus-g8,2274 -pyobjc_framework_FSEvents-5.2.dist-info/RECORD,, -pyobjc_framework_FSEvents-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_FSEvents-5.2.dist-info/top_level.txt,sha256=9HjffDftElYLXQHzGKD5UKkw2oCM9sDY-7kOBHI0SzM,9 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_FSEvents-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_FSEvents-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_FSEvents-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_FSEvents-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_FSEvents-5.2.dist-info/top_level.txt deleted file mode 100644 index 7cf3be41..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_FSEvents-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -FSEvents diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_FinderSync-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_FinderSync-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_FinderSync-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_FinderSync-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_FinderSync-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_FinderSync-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_FinderSync-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_FinderSync-5.2.dist-info/METADATA deleted file mode 100644 index 67c5e8c3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_FinderSync-5.2.dist-info/METADATA +++ /dev/null @@ -1,55 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-FinderSync -Version: 5.2 -Summary: Wrappers for the framework FinderSync on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,FinderSync -Platform: MacOS X (>=10.10) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - - -Wrappers for the "FinderSync" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_FinderSync-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_FinderSync-5.2.dist-info/RECORD deleted file mode 100644 index f3fb0f7a..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_FinderSync-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -FinderSync/__init__.py,sha256=wXpIFmspykYjHwfGur4WowgPGHNjWkqG-EAHH1R6U68,698 -FinderSync/__init__.pyc,, -FinderSync/_metadata.py,sha256=fFmPfYwa2GQpSp03v6Ee5IqEZRhbJ_ccLWFDtdHGqpI,2279 -FinderSync/_metadata.pyc,, -pyobjc_framework_FinderSync-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_FinderSync-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_FinderSync-5.2.dist-info/METADATA,sha256=ku3oBL41gu_HrOabYQkiRXhOMBKIMRZl3dH51ZbEOHc,2150 -pyobjc_framework_FinderSync-5.2.dist-info/RECORD,, -pyobjc_framework_FinderSync-5.2.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -pyobjc_framework_FinderSync-5.2.dist-info/top_level.txt,sha256=Lez6PZ3YRpKSl1-v2Gk4UpKBHWFh5D74opAWXSLrbRc,11 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_FinderSync-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_FinderSync-5.2.dist-info/WHEEL deleted file mode 100644 index c8240f03..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_FinderSync-5.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_FinderSync-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_FinderSync-5.2.dist-info/top_level.txt deleted file mode 100644 index 2f485396..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_FinderSync-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -FinderSync diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_GameCenter-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_GameCenter-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_GameCenter-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_GameCenter-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_GameCenter-5.2.dist-info/METADATA deleted file mode 100644 index fbfa7d3d..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_GameCenter-5.2.dist-info/METADATA +++ /dev/null @@ -1,54 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-GameCenter -Version: 5.2 -Summary: Wrappers for the framework GameCenter on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,GameCenter -Platform: MacOS X (>=10.8) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - -Wrappers for the "GameCenter" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_GameCenter-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_GameCenter-5.2.dist-info/RECORD deleted file mode 100644 index 9c031056..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_GameCenter-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -GameCenter/_GameCenter.so,sha256=i-EcfCgv1__WGyp53VrzaJxv_z9yTx3lyFxCjsmnVOw,22260 -GameCenter/__init__.py,sha256=gB7XoTS7JH317CBpNIBx4iK330NAZfSad7tVMXdJxGY,743 -GameCenter/__init__.pyc,, -GameCenter/_metadata.py,sha256=adRKInI7KR9oVIvK4GPzcTFXi4kHIyZdpqlT7R5bsWQ,28265 -GameCenter/_metadata.pyc,, -pyobjc_framework_GameCenter-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_GameCenter-5.2.dist-info/METADATA,sha256=usGVTuZ_XPmhQOTDn0Q66luRSxbnML0oAYb_-MCF87A,2148 -pyobjc_framework_GameCenter-5.2.dist-info/RECORD,, -pyobjc_framework_GameCenter-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_GameCenter-5.2.dist-info/top_level.txt,sha256=G4Z7CDXIvIcqr2_zBtSlqP3HMftZyWcMfnLLs28TsM8,11 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_GameCenter-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_GameCenter-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_GameCenter-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_GameCenter-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_GameCenter-5.2.dist-info/top_level.txt deleted file mode 100644 index 20ab4295..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_GameCenter-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -GameCenter diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_GameController-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_GameController-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_GameController-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_GameController-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_GameController-5.2.dist-info/METADATA deleted file mode 100644 index 1e3a304e..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_GameController-5.2.dist-info/METADATA +++ /dev/null @@ -1,55 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-GameController -Version: 5.2 -Summary: Wrappers for the framework GameController on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,GameController -Platform: MacOS X -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - - -Wrappers for the "GameController" framework on macOS introduced in macOS 10.9. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_GameController-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_GameController-5.2.dist-info/RECORD deleted file mode 100644 index fe2b9541..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_GameController-5.2.dist-info/RECORD +++ /dev/null @@ -1,9 +0,0 @@ -GameController/__init__.py,sha256=x2QD0m79xk90MtZDEAjd9mLH2SDVAIA9zGyoqLAqJro,716 -GameController/__init__.pyc,, -GameController/_metadata.py,sha256=_IEI7bG-SC993ZSyL90MHDm5oPG8tWGmvfBLtga64Bw,9149 -GameController/_metadata.pyc,, -pyobjc_framework_GameController-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_GameController-5.2.dist-info/METADATA,sha256=c1N7gB29xbYt_0VA5LaqtmVaMebdtTEKor151bS8p4E,2181 -pyobjc_framework_GameController-5.2.dist-info/RECORD,, -pyobjc_framework_GameController-5.2.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -pyobjc_framework_GameController-5.2.dist-info/top_level.txt,sha256=0W1d05ceM324odN0qMnaq1n2veYu8my-T3ZNGgHzg48,15 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_GameController-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_GameController-5.2.dist-info/WHEEL deleted file mode 100644 index c8240f03..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_GameController-5.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_GameController-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_GameController-5.2.dist-info/top_level.txt deleted file mode 100644 index 431694f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_GameController-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -GameController diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_GameKit-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_GameKit-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_GameKit-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_GameKit-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_GameKit-5.2.dist-info/METADATA deleted file mode 100644 index 0678dc53..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_GameKit-5.2.dist-info/METADATA +++ /dev/null @@ -1,55 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-GameKit -Version: 5.2 -Summary: Wrappers for the framework GameKit on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,GameKit -Platform: MacOS X (>=10.8) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) -Requires-Dist: pyobjc-framework-Quartz (>=5.2) - -Wrappers for the "GameKit" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_GameKit-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_GameKit-5.2.dist-info/RECORD deleted file mode 100644 index c3d7d005..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_GameKit-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -GameKit/_GameKit.so,sha256=LGKhpH4SKzUHJFJqNuIYxKHuwQjUbC1H6_XPkRMiKZc,22244 -GameKit/__init__.py,sha256=uslYaH_IskygPTonXMEvrHRTWSI-cNaA-vwG86fzq8A,699 -GameKit/__init__.pyc,, -GameKit/_metadata.py,sha256=Pyfgwse5UKki6WWVVUbnGsOEAiPD1nl_U5-f8Kl9uwA,42445 -GameKit/_metadata.pyc,, -pyobjc_framework_GameKit-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_GameKit-5.2.dist-info/METADATA,sha256=Kswzrq32gjsVxBtOUCxHTYsMZPLIAumI27BJMLrUp4Y,2183 -pyobjc_framework_GameKit-5.2.dist-info/RECORD,, -pyobjc_framework_GameKit-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_GameKit-5.2.dist-info/top_level.txt,sha256=8s5XoW_mc6Z0YEit8nIWQpDfBFa9IG3TRGBN0omeHpc,8 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_GameKit-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_GameKit-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_GameKit-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_GameKit-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_GameKit-5.2.dist-info/top_level.txt deleted file mode 100644 index 92cb33a8..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_GameKit-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -GameKit diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_GameplayKit-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_GameplayKit-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_GameplayKit-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_GameplayKit-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_GameplayKit-5.2.dist-info/METADATA deleted file mode 100644 index 3fba00aa..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_GameplayKit-5.2.dist-info/METADATA +++ /dev/null @@ -1,55 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-GameplayKit -Version: 5.2 -Summary: Wrappers for the framework GameplayKit on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,GameplayKit -Platform: MacOS X (>=10.11) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) -Requires-Dist: pyobjc-framework-SpriteKit (>=5.2) - -Wrappers for the "GameplayKit" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_GameplayKit-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_GameplayKit-5.2.dist-info/RECORD deleted file mode 100644 index 78855f6f..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_GameplayKit-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -GameplayKit/_GameplayKit.so,sha256=cCEWMBqgD0q33ynKgu7D_YhpsSbnXFc4Z65fAft_I7o,13924 -GameplayKit/__init__.py,sha256=uHAm_3ShceGva3e20Sz0SqxF_eVPUay7dl1NUfbU-aU,762 -GameplayKit/__init__.pyc,, -GameplayKit/_metadata.py,sha256=te86UEl83XbrQJG3tVXHKYHGb1Th9CI-if5MI_uq3Wk,7675 -GameplayKit/_metadata.pyc,, -pyobjc_framework_GameplayKit-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_GameplayKit-5.2.dist-info/METADATA,sha256=-QMBmrqSfuPO1L2bbKmZWwNU458aNrWC3yBS-BD5k_s,2203 -pyobjc_framework_GameplayKit-5.2.dist-info/RECORD,, -pyobjc_framework_GameplayKit-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_GameplayKit-5.2.dist-info/top_level.txt,sha256=qBCrsR3zejVEwxb8K1Q6bIc4AuseeX7uDozGMZvo1Ik,12 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_GameplayKit-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_GameplayKit-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_GameplayKit-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_GameplayKit-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_GameplayKit-5.2.dist-info/top_level.txt deleted file mode 100644 index a28931c3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_GameplayKit-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -GameplayKit diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_IMServicePlugIn-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_IMServicePlugIn-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_IMServicePlugIn-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_IMServicePlugIn-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_IMServicePlugIn-5.2.dist-info/METADATA deleted file mode 100644 index 34b72d7b..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_IMServicePlugIn-5.2.dist-info/METADATA +++ /dev/null @@ -1,54 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-IMServicePlugIn -Version: 5.2 -Summary: Wrappers for the framework IMServicePlugIn on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,IMServicePlugIn -Platform: MacOS X (>=10.7) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - -Wrappers for the "IMServicePlugIn" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_IMServicePlugIn-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_IMServicePlugIn-5.2.dist-info/RECORD deleted file mode 100644 index 965418e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_IMServicePlugIn-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -IMServicePlugIn/_IMServicePlugIn.so,sha256=vJ-ycLHajrglot0Fdk1YR30e8M-F55_lc5nUCD7axws,18156 -IMServicePlugIn/__init__.py,sha256=QsZOPVJ2Z0hoU_2XH66EGnIq4gTlRUqfSwWoFWMfgik,825 -IMServicePlugIn/__init__.pyc,, -IMServicePlugIn/_metadata.py,sha256=meBy6AnX0K4B-UBwEx3gOdxTD_-B58tJfVEneIDyCNc,2179 -IMServicePlugIn/_metadata.pyc,, -pyobjc_framework_IMServicePlugIn-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_IMServicePlugIn-5.2.dist-info/METADATA,sha256=YFvclQ_kQvXUsSSk1h2k4w75MnOsJoAlOJt7DJxPJfY,2168 -pyobjc_framework_IMServicePlugIn-5.2.dist-info/RECORD,, -pyobjc_framework_IMServicePlugIn-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_IMServicePlugIn-5.2.dist-info/top_level.txt,sha256=SjtvCo0agRvoQWc4veEypMAanDjWyOeVrvDrDilmTQg,16 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_IMServicePlugIn-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_IMServicePlugIn-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_IMServicePlugIn-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_IMServicePlugIn-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_IMServicePlugIn-5.2.dist-info/top_level.txt deleted file mode 100644 index 710c69b2..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_IMServicePlugIn-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -IMServicePlugIn diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_IOSurface-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_IOSurface-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_IOSurface-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_IOSurface-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_IOSurface-5.2.dist-info/METADATA deleted file mode 100644 index b46d2ed7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_IOSurface-5.2.dist-info/METADATA +++ /dev/null @@ -1,55 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-IOSurface -Version: 5.2 -Summary: Wrappers for the framework IOSurface on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,IOSurface -Platform: MacOS X (>=10.6) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - - -Wrappers for the "IOSurface" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_IOSurface-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_IOSurface-5.2.dist-info/RECORD deleted file mode 100644 index 41053e1e..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_IOSurface-5.2.dist-info/RECORD +++ /dev/null @@ -1,9 +0,0 @@ -IOSurface/__init__.py,sha256=-Ljz-P2Zl8lFwGZYuMfK0SiMzC7ppKImTMIQHD3BDLE,691 -IOSurface/__init__.pyc,, -IOSurface/_metadata.py,sha256=xoOjNqHjC6XPXurWUR8kn4rtg1a_P1EgnmTVWEbMquk,8148 -IOSurface/_metadata.pyc,, -pyobjc_framework_IOSurface-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_IOSurface-5.2.dist-info/METADATA,sha256=lf7AcgqLsKgYQtlS21DkicEqgpl6ytVCQQ3n6XdHTGA,2145 -pyobjc_framework_IOSurface-5.2.dist-info/RECORD,, -pyobjc_framework_IOSurface-5.2.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -pyobjc_framework_IOSurface-5.2.dist-info/top_level.txt,sha256=lEU3WSYCMN0qfpwvdH0uiFRXCDH2J0TOqhJM3Vz-ozI,10 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_IOSurface-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_IOSurface-5.2.dist-info/WHEEL deleted file mode 100644 index c8240f03..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_IOSurface-5.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_IOSurface-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_IOSurface-5.2.dist-info/top_level.txt deleted file mode 100644 index 71bddb52..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_IOSurface-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -IOSurface diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ImageCaptureCore-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_ImageCaptureCore-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ImageCaptureCore-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ImageCaptureCore-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_ImageCaptureCore-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ImageCaptureCore-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ImageCaptureCore-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_ImageCaptureCore-5.2.dist-info/METADATA deleted file mode 100644 index 19ec9a95..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ImageCaptureCore-5.2.dist-info/METADATA +++ /dev/null @@ -1,54 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-ImageCaptureCore -Version: 5.2 -Summary: Wrappers for the framework ImageCaptureCore on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,ImageCaptureCore -Platform: MacOS X (>=10.6) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - -Wrappers for the "ImageCaptureCore" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ImageCaptureCore-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_ImageCaptureCore-5.2.dist-info/RECORD deleted file mode 100644 index aea23f57..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ImageCaptureCore-5.2.dist-info/RECORD +++ /dev/null @@ -1,11 +0,0 @@ -ImageCaptureCore/_ImageCaptureCore.so,sha256=9TWNCmTFJJCFelkG-FUFPGCP4sqihX2YAlsXJD96BQU,13972 -ImageCaptureCore/__init__.py,sha256=gmBSHtE4Xthg1n9xmsz_jl3dModJzr0vHK-t0KCNNdg,759 -ImageCaptureCore/__init__.pyc,, -ImageCaptureCore/_metadata.py,sha256=EuK4ublwZn2GXql_-ko5l0WzPOB0sDpcaRVoNm9aUfk,16411 -ImageCaptureCore/_metadata.pyc,, -pyobjc_framework_ImageCaptureCore-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_ImageCaptureCore-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_ImageCaptureCore-5.2.dist-info/METADATA,sha256=yUGJBHfZfUP8O-wt255eXTaEkt4NSK3bN3Ir4NgjmuI,2172 -pyobjc_framework_ImageCaptureCore-5.2.dist-info/RECORD,, -pyobjc_framework_ImageCaptureCore-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_ImageCaptureCore-5.2.dist-info/top_level.txt,sha256=mNAM-5vFMkOTow7KVbmGAu7hVw1fzS3AUvdTGmmNb9g,17 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ImageCaptureCore-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_ImageCaptureCore-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ImageCaptureCore-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ImageCaptureCore-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_ImageCaptureCore-5.2.dist-info/top_level.txt deleted file mode 100644 index 9f852f3c..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ImageCaptureCore-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -ImageCaptureCore diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_InputMethodKit-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_InputMethodKit-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_InputMethodKit-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_InputMethodKit-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_InputMethodKit-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_InputMethodKit-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_InputMethodKit-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_InputMethodKit-5.2.dist-info/METADATA deleted file mode 100644 index c7cf1e09..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_InputMethodKit-5.2.dist-info/METADATA +++ /dev/null @@ -1,55 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-InputMethodKit -Version: 5.2 -Summary: Wrappers for the framework InputMethodKit on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,InputMethodKit -Platform: MacOS X (>=10.5) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - -Wrappers for the "InputMethodKit" framework on macOS 10.5 or later. The -interfaces in this framework allow you to develop input methods. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_InputMethodKit-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_InputMethodKit-5.2.dist-info/RECORD deleted file mode 100644 index 7c5fdf17..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_InputMethodKit-5.2.dist-info/RECORD +++ /dev/null @@ -1,11 +0,0 @@ -InputMethodKit/_InputMethodKit.so,sha256=t_4i-c3Thptg_r-5PvdnQL91Z7nKxaIuzhISI2zBzxs,9716 -InputMethodKit/__init__.py,sha256=FduAcWsHsNt_mZ7xudczXmMHc5EcCcuNKF3V_h5cBQc,762 -InputMethodKit/__init__.pyc,, -InputMethodKit/_metadata.py,sha256=6sJI86PnCYum6iXfavbolIDBWqGNqdGo994mraG25vY,6710 -InputMethodKit/_metadata.pyc,, -pyobjc_framework_InputMethodKit-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_InputMethodKit-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_InputMethodKit-5.2.dist-info/METADATA,sha256=_UUWCMRL1pQIxVQ9sfjpBj98EzuleYwtnQjZhAXA1ng,2247 -pyobjc_framework_InputMethodKit-5.2.dist-info/RECORD,, -pyobjc_framework_InputMethodKit-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_InputMethodKit-5.2.dist-info/top_level.txt,sha256=LkaeGvpngcB9_obiAC3qJ-Sfhl6NqMAQt2bugA3EOQI,15 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_InputMethodKit-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_InputMethodKit-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_InputMethodKit-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_InputMethodKit-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_InputMethodKit-5.2.dist-info/top_level.txt deleted file mode 100644 index 48c3f01d..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_InputMethodKit-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -InputMethodKit diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_InstallerPlugins-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_InstallerPlugins-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_InstallerPlugins-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_InstallerPlugins-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_InstallerPlugins-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_InstallerPlugins-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_InstallerPlugins-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_InstallerPlugins-5.2.dist-info/METADATA deleted file mode 100644 index a65f6ee8..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_InstallerPlugins-5.2.dist-info/METADATA +++ /dev/null @@ -1,58 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-InstallerPlugins -Version: 5.2 -Summary: Wrappers for the framework InstallerPlugins on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,InstallerPlugins -Platform: MacOS X -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - - -Wrappers for the "InstallerPlugins" framework on macOS. This framework -allows you to develop plugin's for the "Installer.app" application, and those -make it possible to add new functionality to ".pkg" and ".mpkg" installers -on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_InstallerPlugins-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_InstallerPlugins-5.2.dist-info/RECORD deleted file mode 100644 index 5c2579fb..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_InstallerPlugins-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -InstallerPlugins/__init__.py,sha256=mE31UtcqoXS6s9sErtCTccaxnYimJng88RN6gId9kDw,723 -InstallerPlugins/__init__.pyc,, -InstallerPlugins/_metadata.py,sha256=hy_s0ViynCUmwLd3uxOWqwdRtq1fJPgvviqt0igsris,1921 -InstallerPlugins/_metadata.pyc,, -pyobjc_framework_InstallerPlugins-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_InstallerPlugins-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_InstallerPlugins-5.2.dist-info/METADATA,sha256=x3znFl5xzK-BTofOSV8eyDKRlr-5covawl4kKMEZ8AY,2342 -pyobjc_framework_InstallerPlugins-5.2.dist-info/RECORD,, -pyobjc_framework_InstallerPlugins-5.2.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -pyobjc_framework_InstallerPlugins-5.2.dist-info/top_level.txt,sha256=h4Y6zdCofQ7V9Y0VKMSH7Oa9Ac_4Tk8MAA4UEj-CCwc,17 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_InstallerPlugins-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_InstallerPlugins-5.2.dist-info/WHEEL deleted file mode 100644 index c8240f03..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_InstallerPlugins-5.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_InstallerPlugins-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_InstallerPlugins-5.2.dist-info/top_level.txt deleted file mode 100644 index dc603d63..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_InstallerPlugins-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -InstallerPlugins diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_InstantMessage-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_InstantMessage-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_InstantMessage-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_InstantMessage-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_InstantMessage-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_InstantMessage-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_InstantMessage-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_InstantMessage-5.2.dist-info/METADATA deleted file mode 100644 index bd29b763..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_InstantMessage-5.2.dist-info/METADATA +++ /dev/null @@ -1,61 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-InstantMessage -Version: 5.2 -Summary: Wrappers for the framework InstantMessage on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,InstantMessage -Platform: MacOS X (>=10.5) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) -Requires-Dist: pyobjc-framework-Quartz (>=5.2) - - -Wrappers for "InstantMessage" framework on macOS 10.5 or later. This framework -allows you to access iChat information, as well as a way to provide an -auxilliary video source to iChat Theater. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - -Note that this framework is deprecated in OSX 10.9, use the Social framework -instead if you target that OSX release. - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_InstantMessage-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_InstantMessage-5.2.dist-info/RECORD deleted file mode 100644 index 3277d366..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_InstantMessage-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -InstantMessage/__init__.py,sha256=7WUSrER24yjmmdXe7qjy7WJEFSqhVWGfN1tn2nr6B4k,736 -InstantMessage/__init__.pyc,, -InstantMessage/_metadata.py,sha256=BNpu2eZ1NeGYUtrqWnHYjs6IX9r-UdZSUhw9kn8smNQ,4570 -InstantMessage/_metadata.pyc,, -pyobjc_framework_InstantMessage-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_InstantMessage-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_InstantMessage-5.2.dist-info/METADATA,sha256=Y94bg454L7LVP5xL5fhBSl65hr2irE3V5TFt0nuChMw,2468 -pyobjc_framework_InstantMessage-5.2.dist-info/RECORD,, -pyobjc_framework_InstantMessage-5.2.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -pyobjc_framework_InstantMessage-5.2.dist-info/top_level.txt,sha256=ZFFCOIggeoURVRjDTOl89tHkH3nLUaPSpdbr9ZE_R_c,15 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_InstantMessage-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_InstantMessage-5.2.dist-info/WHEEL deleted file mode 100644 index c8240f03..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_InstantMessage-5.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_InstantMessage-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_InstantMessage-5.2.dist-info/top_level.txt deleted file mode 100644 index 019565d5..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_InstantMessage-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -InstantMessage diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Intents-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_Intents-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Intents-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Intents-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_Intents-5.2.dist-info/METADATA deleted file mode 100644 index 596fecb2..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Intents-5.2.dist-info/METADATA +++ /dev/null @@ -1,54 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-Intents -Version: 5.2 -Summary: Wrappers for the framework Intents on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,Intents -Platform: MacOS X (>=10.12) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - -Wrappers for the "Intents" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Intents-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_Intents-5.2.dist-info/RECORD deleted file mode 100644 index 3deb5e9d..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Intents-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -Intents/_Intents.so,sha256=Uy6Bh--cYMkbLNYYdvia-vfbDyy6pS-HDXCLX9AFndM,13892 -Intents/__init__.py,sha256=pqjw1evdCf46rbvtEW1KX1cgOBR2uJU9Hoqjiyo3Snk,709 -Intents/__init__.pyc,, -Intents/_metadata.py,sha256=wozN6RBjNPG3cHPQm6vJPDoA0NMfi5LQK4MKYea7qjY,83948 -Intents/_metadata.pyc,, -pyobjc_framework_Intents-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_Intents-5.2.dist-info/METADATA,sha256=71gSihojy-0yR334kROMbwe2j0YMNn4rG_r57wyLTBA,2137 -pyobjc_framework_Intents-5.2.dist-info/RECORD,, -pyobjc_framework_Intents-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_Intents-5.2.dist-info/top_level.txt,sha256=seYBy1nfsxcRkTn_78L5cpjqlQaslr-LUUqcmPP1uhI,8 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Intents-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_Intents-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Intents-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Intents-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_Intents-5.2.dist-info/top_level.txt deleted file mode 100644 index 7e7d0c24..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Intents-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -Intents diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_LatentSemanticMapping-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_LatentSemanticMapping-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_LatentSemanticMapping-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_LatentSemanticMapping-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_LatentSemanticMapping-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_LatentSemanticMapping-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_LatentSemanticMapping-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_LatentSemanticMapping-5.2.dist-info/METADATA deleted file mode 100644 index 131f775c..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_LatentSemanticMapping-5.2.dist-info/METADATA +++ /dev/null @@ -1,60 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-LatentSemanticMapping -Version: 5.2 -Summary: Wrappers for the framework LatentSemanticMapping on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,LatentSemanticMapping -Platform: MacOS X (>=10.5) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - - -Wrappers for the framework "LatentSemanticMapping" on macOS 10.5 or later. - -The Latent Semantic Mapping framework supports the classification of text and other token-based content into developer-defined categories. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks. - -NOTE: Apple's documentation for this framework is very minimal at the moment, -making it very hard to actually use the framework. - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_LatentSemanticMapping-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_LatentSemanticMapping-5.2.dist-info/RECORD deleted file mode 100644 index c317bf32..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_LatentSemanticMapping-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -LatentSemanticMapping/__init__.py,sha256=U9o8IY_tr7SgxFB7r8wMpNGoVWStxHNyRMqjZVzOabw,782 -LatentSemanticMapping/__init__.pyc,, -LatentSemanticMapping/_metadata.py,sha256=SfiTo7LIXiyT-GX-Hig5q5Se4TEucOg31hQuzDibNX4,4796 -LatentSemanticMapping/_metadata.pyc,, -pyobjc_framework_LatentSemanticMapping-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_LatentSemanticMapping-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_LatentSemanticMapping-5.2.dist-info/METADATA,sha256=zRYZxaC6Za9gL3jZQfCr3YPSG3gK8g9I7wthQLzpVpI,2478 -pyobjc_framework_LatentSemanticMapping-5.2.dist-info/RECORD,, -pyobjc_framework_LatentSemanticMapping-5.2.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -pyobjc_framework_LatentSemanticMapping-5.2.dist-info/top_level.txt,sha256=JKK1SZxv6urpfYJ53IulBQEm-SSF8KDoILwqvEv9SEY,22 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_LatentSemanticMapping-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_LatentSemanticMapping-5.2.dist-info/WHEEL deleted file mode 100644 index c8240f03..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_LatentSemanticMapping-5.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_LatentSemanticMapping-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_LatentSemanticMapping-5.2.dist-info/top_level.txt deleted file mode 100644 index 9d295de1..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_LatentSemanticMapping-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -LatentSemanticMapping diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_LaunchServices-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_LaunchServices-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_LaunchServices-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_LaunchServices-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_LaunchServices-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_LaunchServices-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_LaunchServices-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_LaunchServices-5.2.dist-info/METADATA deleted file mode 100644 index ee8d35f5..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_LaunchServices-5.2.dist-info/METADATA +++ /dev/null @@ -1,52 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-LaunchServices -Version: 5.2 -Summary: Wrappers for the framework LaunchServices on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,LaunchServices -Platform: MacOS X -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-CoreServices (>=5.2) - - -Deprecated wrappers for the "LaunchServices" framework on macOS. - -Use the "CoreServices" bindings instead. - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_LaunchServices-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_LaunchServices-5.2.dist-info/RECORD deleted file mode 100644 index 98b1e8cd..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_LaunchServices-5.2.dist-info/RECORD +++ /dev/null @@ -1,8 +0,0 @@ -LaunchServices/__init__.py,sha256=OTqkfXKdbOjd2PXtzKh0U6TV6nCxIKoPNgKX4ybj23g,751 -LaunchServices/__init__.pyc,, -pyobjc_framework_LaunchServices-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_LaunchServices-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_LaunchServices-5.2.dist-info/METADATA,sha256=myVInDgX2xBK4Pk8CgaP1sVoh8iP5H5dylejj-MqDKc,1968 -pyobjc_framework_LaunchServices-5.2.dist-info/RECORD,, -pyobjc_framework_LaunchServices-5.2.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -pyobjc_framework_LaunchServices-5.2.dist-info/top_level.txt,sha256=SacVhswCjuTJqnWUxgCImC7Gm94ztLgvlqfVrdFIKJc,15 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_LaunchServices-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_LaunchServices-5.2.dist-info/WHEEL deleted file mode 100644 index c8240f03..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_LaunchServices-5.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_LaunchServices-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_LaunchServices-5.2.dist-info/top_level.txt deleted file mode 100644 index 18e8da0f..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_LaunchServices-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -LaunchServices diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_LocalAuthentication-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_LocalAuthentication-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_LocalAuthentication-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_LocalAuthentication-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_LocalAuthentication-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_LocalAuthentication-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_LocalAuthentication-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_LocalAuthentication-5.2.dist-info/METADATA deleted file mode 100644 index cfdb7a43..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_LocalAuthentication-5.2.dist-info/METADATA +++ /dev/null @@ -1,55 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-LocalAuthentication -Version: 5.2 -Summary: Wrappers for the framework LocalAuthentication on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,LocalAuthentication -Platform: MacOS X (>=10.10) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - - -Wrappers for the "LocalAuthentication" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_LocalAuthentication-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_LocalAuthentication-5.2.dist-info/RECORD deleted file mode 100644 index e7341929..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_LocalAuthentication-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -LocalAuthentication/__init__.py,sha256=hyr_x42Ghy0TUkaFVqsIlqNt5JCXMP1nakRASCJYL6U,750 -LocalAuthentication/__init__.pyc,, -LocalAuthentication/_metadata.py,sha256=tbBQ3bCBqKDf9AknZO3rIKMdTyCW78GFP4ReBg2_H_U,3416 -LocalAuthentication/_metadata.pyc,, -pyobjc_framework_LocalAuthentication-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_LocalAuthentication-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_LocalAuthentication-5.2.dist-info/METADATA,sha256=SRXt2xNkd6RAoVfr6Ragt3rw8qCs9rJT-ZkQVK7wgs0,2186 -pyobjc_framework_LocalAuthentication-5.2.dist-info/RECORD,, -pyobjc_framework_LocalAuthentication-5.2.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -pyobjc_framework_LocalAuthentication-5.2.dist-info/top_level.txt,sha256=BY5oON8s8gX9F1ZNcQuI1CmMLytMMOVDb0FDDuU5QBY,20 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_LocalAuthentication-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_LocalAuthentication-5.2.dist-info/WHEEL deleted file mode 100644 index c8240f03..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_LocalAuthentication-5.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_LocalAuthentication-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_LocalAuthentication-5.2.dist-info/top_level.txt deleted file mode 100644 index 12f6f5a8..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_LocalAuthentication-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -LocalAuthentication diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_MapKit-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_MapKit-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_MapKit-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_MapKit-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_MapKit-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_MapKit-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_MapKit-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_MapKit-5.2.dist-info/METADATA deleted file mode 100644 index fe5efece..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_MapKit-5.2.dist-info/METADATA +++ /dev/null @@ -1,56 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-MapKit -Version: 5.2 -Summary: Wrappers for the framework MapKit on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,MapKit -Platform: MacOS X (>=10.9) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) -Requires-Dist: pyobjc-framework-CoreLocation (>=5.2) -Requires-Dist: pyobjc-framework-Quartz (>=5.2) - -Wrappers for the "MapKit" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_MapKit-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_MapKit-5.2.dist-info/RECORD deleted file mode 100644 index f58c3a2b..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_MapKit-5.2.dist-info/RECORD +++ /dev/null @@ -1,12 +0,0 @@ -MapKit/_MapKit.so,sha256=LTjYI2NNNcwaeSVewW8PGqrbkinOOJH-30rhV-Vvhe4,13860 -MapKit/__init__.py,sha256=yG9EzrvRF6qopd4vEK9_ZzEaxPIMWJPbppfuAVFmRO4,809 -MapKit/__init__.pyc,, -MapKit/_inlines.so,sha256=dCM-ALugG-WjwqU-Ro4GGWQhYDsS4Lbd_zRbH8kkE2o,12056 -MapKit/_metadata.py,sha256=iIieqew4Y9HDq9-2vzLOcT5H4ZNGjFzWaiSzH9gWMN8,24461 -MapKit/_metadata.pyc,, -pyobjc_framework_MapKit-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_MapKit-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_MapKit-5.2.dist-info/METADATA,sha256=C1gvZX1g5Im0ThJudKThnvgUCb_kCJMMl_Ggp8m_AWQ,2232 -pyobjc_framework_MapKit-5.2.dist-info/RECORD,, -pyobjc_framework_MapKit-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_MapKit-5.2.dist-info/top_level.txt,sha256=4NuNAfnKaphl_fUSXNL6v4sX4lPRUkCgtZ9VMDlsUAk,7 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_MapKit-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_MapKit-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_MapKit-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_MapKit-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_MapKit-5.2.dist-info/top_level.txt deleted file mode 100644 index b5e022f4..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_MapKit-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -MapKit diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_MediaAccessibility-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_MediaAccessibility-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_MediaAccessibility-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_MediaAccessibility-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_MediaAccessibility-5.2.dist-info/METADATA deleted file mode 100644 index 89b87b58..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_MediaAccessibility-5.2.dist-info/METADATA +++ /dev/null @@ -1,55 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-MediaAccessibility -Version: 5.2 -Summary: Wrappers for the framework MediaAccessibility on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,MediaAccessibility -Platform: MacOS X (>=10.9) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - - -Wrappers for the "AVKit" framework on macOS introduced in macOS 10.9. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_MediaAccessibility-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_MediaAccessibility-5.2.dist-info/RECORD deleted file mode 100644 index 44e4af0f..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_MediaAccessibility-5.2.dist-info/RECORD +++ /dev/null @@ -1,9 +0,0 @@ -MediaAccessibility/__init__.py,sha256=h1cAkjgKGHZ_ooRwqY-L3t_9Wb8KaNs8cLESRJL-9A4,737 -MediaAccessibility/__init__.pyc,, -MediaAccessibility/_metadata.py,sha256=km0w54ZTeSDUt2yiPYjv0-3OXCpPyvm4tj97TmJtmOU,4138 -MediaAccessibility/_metadata.pyc,, -pyobjc_framework_MediaAccessibility-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_MediaAccessibility-5.2.dist-info/METADATA,sha256=P-lnuwNG-P2em9Fwq7bgPhALfhQnF1ffjQuzPswRyhY,2193 -pyobjc_framework_MediaAccessibility-5.2.dist-info/RECORD,, -pyobjc_framework_MediaAccessibility-5.2.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -pyobjc_framework_MediaAccessibility-5.2.dist-info/top_level.txt,sha256=0nBlaW0j2IZ7T0OA5tImPiJOgPCeH3llhApJjuSFjV8,19 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_MediaAccessibility-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_MediaAccessibility-5.2.dist-info/WHEEL deleted file mode 100644 index c8240f03..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_MediaAccessibility-5.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_MediaAccessibility-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_MediaAccessibility-5.2.dist-info/top_level.txt deleted file mode 100644 index 0e0cd81a..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_MediaAccessibility-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -MediaAccessibility diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_MediaLibrary-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_MediaLibrary-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_MediaLibrary-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_MediaLibrary-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_MediaLibrary-5.2.dist-info/METADATA deleted file mode 100644 index 44bd2a57..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_MediaLibrary-5.2.dist-info/METADATA +++ /dev/null @@ -1,56 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-MediaLibrary -Version: 5.2 -Summary: Wrappers for the framework MediaLibrary on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,MediaLibrary -Platform: MacOS X (>=10.9) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) -Requires-Dist: pyobjc-framework-Quartz (>=5.2) - - -Wrappers for the "MediaLibrary" framework on macOS introduced in macOS 10.9. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_MediaLibrary-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_MediaLibrary-5.2.dist-info/RECORD deleted file mode 100644 index 877594a5..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_MediaLibrary-5.2.dist-info/RECORD +++ /dev/null @@ -1,9 +0,0 @@ -MediaLibrary/__init__.py,sha256=24W94PnjXQWD5Isyg_hsDMOtWkjLTeDbF7gZj1W0l4A,723 -MediaLibrary/__init__.pyc,, -MediaLibrary/_metadata.py,sha256=wYkt7a8nMuYA3EX7-TZQATWYrRxNl_lOTo7CA3hKs-0,5692 -MediaLibrary/_metadata.pyc,, -pyobjc_framework_MediaLibrary-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_MediaLibrary-5.2.dist-info/METADATA,sha256=18oSwAjOiCSlbWEApY0VwqlG5OgPxY33DL1OoI_vklY,2229 -pyobjc_framework_MediaLibrary-5.2.dist-info/RECORD,, -pyobjc_framework_MediaLibrary-5.2.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -pyobjc_framework_MediaLibrary-5.2.dist-info/top_level.txt,sha256=8l460_uhfCC5qFaULCPHLkYUub_SR8O05tmsEdRSRp8,13 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_MediaLibrary-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_MediaLibrary-5.2.dist-info/WHEEL deleted file mode 100644 index c8240f03..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_MediaLibrary-5.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_MediaLibrary-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_MediaLibrary-5.2.dist-info/top_level.txt deleted file mode 100644 index ece784a7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_MediaLibrary-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -MediaLibrary diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_MediaPlayer-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_MediaPlayer-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_MediaPlayer-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_MediaPlayer-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_MediaPlayer-5.2.dist-info/METADATA deleted file mode 100644 index 2290834d..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_MediaPlayer-5.2.dist-info/METADATA +++ /dev/null @@ -1,55 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-MediaPlayer -Version: 5.2 -Summary: Wrappers for the framework MediaPlayer on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,MediaPlayer -Platform: MacOS X (>=10.12) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-AVFoundation (>=5.2) - - -Wrappers for the "MediaPlayer" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_MediaPlayer-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_MediaPlayer-5.2.dist-info/RECORD deleted file mode 100644 index 38f798f1..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_MediaPlayer-5.2.dist-info/RECORD +++ /dev/null @@ -1,9 +0,0 @@ -MediaPlayer/__init__.py,sha256=aBPAXah_4cXMyBsWmkPm8u0XUv9_cvhNGqYA0sM6XL0,709 -MediaPlayer/__init__.pyc,, -MediaPlayer/_metadata.py,sha256=t2mVI3649ihy2qo6mQDl93vyc4WzX6DLib6v3tkp-PA,8655 -MediaPlayer/_metadata.pyc,, -pyobjc_framework_MediaPlayer-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_MediaPlayer-5.2.dist-info/METADATA,sha256=TckoBCIrvciZXS2LRnVrMD8ukLMHw6U3GXhY83EgjHA,2161 -pyobjc_framework_MediaPlayer-5.2.dist-info/RECORD,, -pyobjc_framework_MediaPlayer-5.2.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -pyobjc_framework_MediaPlayer-5.2.dist-info/top_level.txt,sha256=kEOHDG2hguZyvA-USYYHJyIPYsuoatNLROcd-PlWkXc,12 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_MediaPlayer-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_MediaPlayer-5.2.dist-info/WHEEL deleted file mode 100644 index c8240f03..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_MediaPlayer-5.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_MediaPlayer-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_MediaPlayer-5.2.dist-info/top_level.txt deleted file mode 100644 index 79cbe25f..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_MediaPlayer-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -MediaPlayer diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_MediaToolbox-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_MediaToolbox-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_MediaToolbox-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_MediaToolbox-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_MediaToolbox-5.2.dist-info/METADATA deleted file mode 100644 index 566656af..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_MediaToolbox-5.2.dist-info/METADATA +++ /dev/null @@ -1,54 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-MediaToolbox -Version: 5.2 -Summary: Wrappers for the framework MediaToolbox on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,MediaToolbox -Platform: MacOS X (>=10.9) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - -Wrappers for the "MediaToolbox" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_MediaToolbox-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_MediaToolbox-5.2.dist-info/RECORD deleted file mode 100644 index 3fd46c9a..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_MediaToolbox-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -MediaToolbox/_MediaToolbox.so,sha256=B0RGcc5MrTj3FIya8c5BePac3wHCLKCktLrViG2sIwQ,16760 -MediaToolbox/__init__.py,sha256=ZsDo28PaHDCsIGQt2Ecsiw5I_p5ockmRHMsZKfvBzyY,852 -MediaToolbox/__init__.pyc,, -MediaToolbox/_metadata.py,sha256=pkn__1VnoiXjJHRlYUMJ_2CW9LiGu4gH9VY2_oVK6H0,1557 -MediaToolbox/_metadata.pyc,, -pyobjc_framework_MediaToolbox-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_MediaToolbox-5.2.dist-info/METADATA,sha256=KD2JrX7-lhO3woOrQ9zTW45gD_MZH8dBXnWHwa3q2qc,2156 -pyobjc_framework_MediaToolbox-5.2.dist-info/RECORD,, -pyobjc_framework_MediaToolbox-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_MediaToolbox-5.2.dist-info/top_level.txt,sha256=Ow20bKQvO5s3fcebIZgQuwR4CalAeMBM8Z_lKdSKZ_s,13 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_MediaToolbox-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_MediaToolbox-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_MediaToolbox-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_MediaToolbox-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_MediaToolbox-5.2.dist-info/top_level.txt deleted file mode 100644 index 32c93f7a..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_MediaToolbox-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -MediaToolbox diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ModelIO-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_ModelIO-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ModelIO-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ModelIO-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_ModelIO-5.2.dist-info/METADATA deleted file mode 100644 index b148fd6b..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ModelIO-5.2.dist-info/METADATA +++ /dev/null @@ -1,55 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-ModelIO -Version: 5.2 -Summary: Wrappers for the framework ModelIO on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,ModelIO -Platform: MacOS X (>=10.11) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) -Requires-Dist: pyobjc-framework-Quartz (>=5.2) - -Wrappers for the "ModelIO" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ModelIO-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_ModelIO-5.2.dist-info/RECORD deleted file mode 100644 index 4d5b62c9..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ModelIO-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -ModelIO/_ModelIO.so,sha256=xfu2IskCW7BpN6k38RiTDNt6ARPU1-GYIQNgMIAqgi4,18060 -ModelIO/__init__.py,sha256=uZJZptZea_VcOo9llgyyRO4ZKEmULGznsuNRPYq8eUg,718 -ModelIO/__init__.pyc,, -ModelIO/_metadata.py,sha256=lxKzLixprgRD6Xhoqk_D_It9Tp_XkkbIDwmlIQMW3q8,28655 -ModelIO/_metadata.pyc,, -pyobjc_framework_ModelIO-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_ModelIO-5.2.dist-info/METADATA,sha256=1m45_A4ooLzqLkimXKu6hH2oc3b1WcsvSwBmFGpctA8,2184 -pyobjc_framework_ModelIO-5.2.dist-info/RECORD,, -pyobjc_framework_ModelIO-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_ModelIO-5.2.dist-info/top_level.txt,sha256=tlLqg26IgOSRxeNWwXKxJjKC7_NMPPXcKGazzayLCwY,8 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ModelIO-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_ModelIO-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ModelIO-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ModelIO-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_ModelIO-5.2.dist-info/top_level.txt deleted file mode 100644 index 1fb54bd7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ModelIO-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -ModelIO diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_MultipeerConnectivity-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_MultipeerConnectivity-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_MultipeerConnectivity-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_MultipeerConnectivity-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_MultipeerConnectivity-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_MultipeerConnectivity-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_MultipeerConnectivity-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_MultipeerConnectivity-5.2.dist-info/METADATA deleted file mode 100644 index 798516b0..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_MultipeerConnectivity-5.2.dist-info/METADATA +++ /dev/null @@ -1,54 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-MultipeerConnectivity -Version: 5.2 -Summary: Wrappers for the framework MultipeerConnectivity on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,MultipeerConnectivity -Platform: MacOS X (>=10.10) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - -Wrappers for the "MultipeerConnectivity" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_MultipeerConnectivity-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_MultipeerConnectivity-5.2.dist-info/RECORD deleted file mode 100644 index a1c23313..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_MultipeerConnectivity-5.2.dist-info/RECORD +++ /dev/null @@ -1,11 +0,0 @@ -MultipeerConnectivity/_MultipeerConnectivity.so,sha256=s4vXN6n7W5kKJo9Kmlnz7xnzkHOvXHNzgCj4Bvtq-hI,13972 -MultipeerConnectivity/__init__.py,sha256=VxH--58GADY5FBycRBnUjxsXKKT3i3XHQbdmGjUdaNI,891 -MultipeerConnectivity/__init__.pyc,, -MultipeerConnectivity/_metadata.py,sha256=ggc9ZGv5eQeW9oU49dSQRBtRX0wLvKJ-9VQZnJGppcA,5338 -MultipeerConnectivity/_metadata.pyc,, -pyobjc_framework_MultipeerConnectivity-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_MultipeerConnectivity-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_MultipeerConnectivity-5.2.dist-info/METADATA,sha256=q0XOzogbWOLyH9VMPYss-QNOWjBFOS30uFlxk6EPGhc,2193 -pyobjc_framework_MultipeerConnectivity-5.2.dist-info/RECORD,, -pyobjc_framework_MultipeerConnectivity-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_MultipeerConnectivity-5.2.dist-info/top_level.txt,sha256=IUU82dmFTBaV44OXEYmfdzoCP0LOj-pHYNkxJq8PCkI,22 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_MultipeerConnectivity-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_MultipeerConnectivity-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_MultipeerConnectivity-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_MultipeerConnectivity-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_MultipeerConnectivity-5.2.dist-info/top_level.txt deleted file mode 100644 index bc525183..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_MultipeerConnectivity-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -MultipeerConnectivity diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_NetFS-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_NetFS-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_NetFS-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_NetFS-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_NetFS-5.2.dist-info/METADATA deleted file mode 100644 index 5c2cd1bb..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_NetFS-5.2.dist-info/METADATA +++ /dev/null @@ -1,55 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-NetFS -Version: 5.2 -Summary: Wrappers for the framework NetFS on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,NetFS -Platform: MacOS X (>=10.6) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - - -Wrappers for the "NetFS" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_NetFS-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_NetFS-5.2.dist-info/RECORD deleted file mode 100644 index c9b1aec7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_NetFS-5.2.dist-info/RECORD +++ /dev/null @@ -1,9 +0,0 @@ -NetFS/__init__.py,sha256=oZzkhGeq37ylp0H1sMFKZEti8ezhl_g33vZY3Z5fZqA,711 -NetFS/__init__.pyc,, -NetFS/_metadata.py,sha256=rpz8vVb_qwFsex9HM7cGpTTqEVVIdC-O3qsitj1laHM,4341 -NetFS/_metadata.pyc,, -pyobjc_framework_NetFS-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_NetFS-5.2.dist-info/METADATA,sha256=wtDkeb_FCN63bsX3R-oRWiImJBAc_hjIWyV3uMheYy0,2129 -pyobjc_framework_NetFS-5.2.dist-info/RECORD,, -pyobjc_framework_NetFS-5.2.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -pyobjc_framework_NetFS-5.2.dist-info/top_level.txt,sha256=ryo-Z06fREvgUdu4V9c_xVpRtulUQaQJaZgD8ak8HRs,6 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_NetFS-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_NetFS-5.2.dist-info/WHEEL deleted file mode 100644 index c8240f03..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_NetFS-5.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_NetFS-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_NetFS-5.2.dist-info/top_level.txt deleted file mode 100644 index 8e760b1b..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_NetFS-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -NetFS diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_NetworkExtension-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_NetworkExtension-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_NetworkExtension-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_NetworkExtension-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_NetworkExtension-5.2.dist-info/METADATA deleted file mode 100644 index 3c371874..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_NetworkExtension-5.2.dist-info/METADATA +++ /dev/null @@ -1,54 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-NetworkExtension -Version: 5.2 -Summary: Wrappers for the framework NetworkExtension on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,NetworkExtension -Platform: MacOS X (>=10.11) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - -Wrappers for the "NetworkExtension" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_NetworkExtension-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_NetworkExtension-5.2.dist-info/RECORD deleted file mode 100644 index 123e17a7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_NetworkExtension-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -NetworkExtension/_NetworkExtension.so,sha256=yYva0woulJLksj_ZNVJ7216WVgV_zGhCJgeM2zG9Dmw,9764 -NetworkExtension/__init__.py,sha256=9kp3QP9GpMf2yx3nk9KJMfCSKQ_vT5PqNzmaMco7pHE,759 -NetworkExtension/__init__.pyc,, -NetworkExtension/_metadata.py,sha256=4OdWJU4Cyqe9I8ZUhh3SCoAxh5bLp_00uBgEdnsrd9s,16459 -NetworkExtension/_metadata.pyc,, -pyobjc_framework_NetworkExtension-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_NetworkExtension-5.2.dist-info/METADATA,sha256=VGD6dS8sub02jnsD-30hfjSPfJfmh0A_rvOkRqwoW4c,2173 -pyobjc_framework_NetworkExtension-5.2.dist-info/RECORD,, -pyobjc_framework_NetworkExtension-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_NetworkExtension-5.2.dist-info/top_level.txt,sha256=DcB-KatFPyELRB_Yl4EuLhbMMUUXaMG7BJ-7JqI9bCE,17 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_NetworkExtension-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_NetworkExtension-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_NetworkExtension-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_NetworkExtension-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_NetworkExtension-5.2.dist-info/top_level.txt deleted file mode 100644 index d348efaf..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_NetworkExtension-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -NetworkExtension diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_NotificationCenter-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_NotificationCenter-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_NotificationCenter-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_NotificationCenter-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_NotificationCenter-5.2.dist-info/METADATA deleted file mode 100644 index ce7f9e23..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_NotificationCenter-5.2.dist-info/METADATA +++ /dev/null @@ -1,54 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-NotificationCenter -Version: 5.2 -Summary: Wrappers for the framework NotificationCenter on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,NotificationCenter -Platform: MacOS X (>=10.10) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - -Wrappers for the "NotificationCenter" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_NotificationCenter-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_NotificationCenter-5.2.dist-info/RECORD deleted file mode 100644 index 62f1688a..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_NotificationCenter-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -NotificationCenter/_NotificationCenter.so,sha256=KIUDWUIxlA4MGE9SDRzRBXX-H2ZzvkfpmmlKDDGfUXE,13932 -NotificationCenter/__init__.py,sha256=-uEuJMgAZA8awtNpZuZkUSgIns_zFAoDlU7f1bcCpgI,824 -NotificationCenter/__init__.pyc,, -NotificationCenter/_metadata.py,sha256=Al34UWq7YYAagkRV84Vmz58o1cJ-9OEtP7m9VYZCwsU,2510 -NotificationCenter/_metadata.pyc,, -pyobjc_framework_NotificationCenter-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_NotificationCenter-5.2.dist-info/METADATA,sha256=5V2UOAT1A0cj4YL1VrdIEyRNQOZN4OKrEqQoxXt71Jc,2181 -pyobjc_framework_NotificationCenter-5.2.dist-info/RECORD,, -pyobjc_framework_NotificationCenter-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_NotificationCenter-5.2.dist-info/top_level.txt,sha256=M6iTl367RaR6LGWvutHA4iunnEmJHlMQ7CsTjvIOwic,19 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_NotificationCenter-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_NotificationCenter-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_NotificationCenter-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_NotificationCenter-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_NotificationCenter-5.2.dist-info/top_level.txt deleted file mode 100644 index 95e666f1..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_NotificationCenter-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -NotificationCenter diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_OSAKit-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_OSAKit-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_OSAKit-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_OSAKit-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_OSAKit-5.2.dist-info/METADATA deleted file mode 100644 index 1a3a41a7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_OSAKit-5.2.dist-info/METADATA +++ /dev/null @@ -1,55 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-OSAKit -Version: 5.2 -Summary: Wrappers for the framework OSAKit on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,OSAKit -Platform: MacOS X -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - - -Wrappers for the "OSAKit" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_OSAKit-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_OSAKit-5.2.dist-info/RECORD deleted file mode 100644 index 29d7ca48..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_OSAKit-5.2.dist-info/RECORD +++ /dev/null @@ -1,9 +0,0 @@ -OSAKit/__init__.py,sha256=eQOm0ylhIshB1Isu6Gdi0b_RMF2FBZHu4e_r41ZwXQk,661 -OSAKit/__init__.pyc,, -OSAKit/_metadata.py,sha256=4kJvSIAgMrvJf-vXIDDCNYwTbMoqMRBN58xR-0V-ZfQ,3911 -OSAKit/_metadata.pyc,, -pyobjc_framework_OSAKit-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_OSAKit-5.2.dist-info/METADATA,sha256=g6MTTNnPxMlAAZRr8pxaoDrkjO5V5Hc32jgheIlkxtg,2124 -pyobjc_framework_OSAKit-5.2.dist-info/RECORD,, -pyobjc_framework_OSAKit-5.2.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -pyobjc_framework_OSAKit-5.2.dist-info/top_level.txt,sha256=cRIdFhSbpdysemPGmLtHk6Y0eN02ry7RcIj22TUgSiM,7 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_OSAKit-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_OSAKit-5.2.dist-info/WHEEL deleted file mode 100644 index c8240f03..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_OSAKit-5.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_OSAKit-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_OSAKit-5.2.dist-info/top_level.txt deleted file mode 100644 index 454d82ea..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_OSAKit-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -OSAKit diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_OpenDirectory-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_OpenDirectory-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_OpenDirectory-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_OpenDirectory-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_OpenDirectory-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_OpenDirectory-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_OpenDirectory-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_OpenDirectory-5.2.dist-info/METADATA deleted file mode 100644 index 3ecead73..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_OpenDirectory-5.2.dist-info/METADATA +++ /dev/null @@ -1,56 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-OpenDirectory -Version: 5.2 -Summary: Wrappers for the framework OpenDirectory on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,OpenDirectory,CFOpenDirectory -Platform: MacOS X (>=10.6) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - - -Wrappers for the 'OpenDirectory' and 'CFOpenDirectory' frameworks on -macOS 10.6 and later. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_OpenDirectory-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_OpenDirectory-5.2.dist-info/RECORD deleted file mode 100644 index a40212c2..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_OpenDirectory-5.2.dist-info/RECORD +++ /dev/null @@ -1,14 +0,0 @@ -CFOpenDirectory/__init__.py,sha256=Kh93bcq8K7v1-DZ1U7legCDxd94_PFUx99g-o1pYwYg,858 -CFOpenDirectory/__init__.pyc,, -CFOpenDirectory/_metadata.py,sha256=bDO0EZzKLc4nanhJsTVea6YxVInskFmpwhvwe2kMhm4,28802 -CFOpenDirectory/_metadata.pyc,, -OpenDirectory/__init__.py,sha256=OAml46saJ0HK0rO_ehaBDUCaZq5mvfyBTHVvSDMGGb8,601 -OpenDirectory/__init__.pyc,, -OpenDirectory/_metadata.py,sha256=8kBo_6db6p_h8RTSy9ZQiJT8DY-BULuSNifjW0ccor4,17225 -OpenDirectory/_metadata.pyc,, -pyobjc_framework_OpenDirectory-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_OpenDirectory-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_OpenDirectory-5.2.dist-info/METADATA,sha256=Ud2Y5KDIP0xBTv-vDB94_JHkiOQNMwKlLEercBQxc1M,2215 -pyobjc_framework_OpenDirectory-5.2.dist-info/RECORD,, -pyobjc_framework_OpenDirectory-5.2.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -pyobjc_framework_OpenDirectory-5.2.dist-info/top_level.txt,sha256=Ugggy4DVDoWF1rBM-b-UFoygVyT8hiUwt95ZvYNIDoU,30 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_OpenDirectory-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_OpenDirectory-5.2.dist-info/WHEEL deleted file mode 100644 index c8240f03..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_OpenDirectory-5.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_OpenDirectory-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_OpenDirectory-5.2.dist-info/top_level.txt deleted file mode 100644 index c04fb06e..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_OpenDirectory-5.2.dist-info/top_level.txt +++ /dev/null @@ -1,2 +0,0 @@ -CFOpenDirectory -OpenDirectory diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Photos-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_Photos-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Photos-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Photos-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_Photos-5.2.dist-info/METADATA deleted file mode 100644 index 23c84362..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Photos-5.2.dist-info/METADATA +++ /dev/null @@ -1,54 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-Photos -Version: 5.2 -Summary: Wrappers for the framework Photos on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,Photos -Platform: MacOS X (>=10.11) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - -Wrappers for the "Photos" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Photos-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_Photos-5.2.dist-info/RECORD deleted file mode 100644 index f24f4b96..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Photos-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -Photos/_Photos.so,sha256=os1UfD3_nrTo1wxhmuQj1kx6-DNRivLlFhj6MM1xh2s,9684 -Photos/__init__.py,sha256=Ar1Q_7dQYInDoD-ul5gbTBoDMD7Oudig3a09q-nz5zw,692 -Photos/__init__.pyc,, -Photos/_metadata.py,sha256=C4IKZLZ1iy3xJZhBd7pcrdsXhNw1nmoGDSSoQfawfUY,9130 -Photos/_metadata.pyc,, -pyobjc_framework_Photos-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_Photos-5.2.dist-info/METADATA,sha256=8A7POHyYG8PqhOJFYOenJPNBcaCrl8pm4jbH85MwIcU,2133 -pyobjc_framework_Photos-5.2.dist-info/RECORD,, -pyobjc_framework_Photos-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_Photos-5.2.dist-info/top_level.txt,sha256=nVT86vRpn6L-r1mLwHBz7Artzq2KixZUkP5SSk-Z8z0,7 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Photos-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_Photos-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Photos-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Photos-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_Photos-5.2.dist-info/top_level.txt deleted file mode 100644 index 1fb62fd1..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Photos-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -Photos diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_PhotosUI-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_PhotosUI-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_PhotosUI-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_PhotosUI-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_PhotosUI-5.2.dist-info/METADATA deleted file mode 100644 index 1f06107d..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_PhotosUI-5.2.dist-info/METADATA +++ /dev/null @@ -1,54 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-PhotosUI -Version: 5.2 -Summary: Wrappers for the framework PhotosUI on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,PhotosUI -Platform: MacOS X (>=10.11) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - -Wrappers for the "PhotosUI" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_PhotosUI-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_PhotosUI-5.2.dist-info/RECORD deleted file mode 100644 index c93056de..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_PhotosUI-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -PhotosUI/_PhotosUI.so,sha256=RURraKHjI9H71DBwwu5A2qXmJwwjObPTv5SlYbdrI84,13860 -PhotosUI/__init__.py,sha256=xU2zj51qb47bCnkAie-qTy63y9HgUlNzdGg2zpKX_eY,708 -PhotosUI/__init__.pyc,, -PhotosUI/_metadata.py,sha256=qkzGFd82FeCCr5ELc3WBU8qvwmzDojIvlkecAkQedU8,5651 -PhotosUI/_metadata.pyc,, -pyobjc_framework_PhotosUI-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_PhotosUI-5.2.dist-info/METADATA,sha256=T_vu0iYly0AeHt7sUE8CUsiyyau5YWxkqsaa8BOG3bM,2141 -pyobjc_framework_PhotosUI-5.2.dist-info/RECORD,, -pyobjc_framework_PhotosUI-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_PhotosUI-5.2.dist-info/top_level.txt,sha256=LLD4je82vwErLoYKsc_QNBxEecxanTBVkBq_sgUd-cw,9 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_PhotosUI-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_PhotosUI-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_PhotosUI-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_PhotosUI-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_PhotosUI-5.2.dist-info/top_level.txt deleted file mode 100644 index a0b43590..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_PhotosUI-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -PhotosUI diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_PreferencePanes-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_PreferencePanes-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_PreferencePanes-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_PreferencePanes-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_PreferencePanes-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_PreferencePanes-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_PreferencePanes-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_PreferencePanes-5.2.dist-info/METADATA deleted file mode 100644 index 2c8708c5..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_PreferencePanes-5.2.dist-info/METADATA +++ /dev/null @@ -1,56 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-PreferencePanes -Version: 5.2 -Summary: Wrappers for the framework PreferencePanes on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,PreferencePanes -Platform: MacOS X -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - - -Wrappers for the "PreferencePanes" framework on macOS. This framework allows -you to write Preference Panes for the "System Preferences" application. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_PreferencePanes-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_PreferencePanes-5.2.dist-info/RECORD deleted file mode 100644 index 4e3a2a8e..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_PreferencePanes-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -PreferencePanes/__init__.py,sha256=Nxt2YMM-avApJ4KP36xstdTqtXnXbBzyInt1T_NAgtk,727 -PreferencePanes/__init__.pyc,, -PreferencePanes/_metadata.py,sha256=4W8oBRtXGYtzQNk9TdOtlUmzmbEK4KdGEuhdokHXKvw,1301 -PreferencePanes/_metadata.pyc,, -pyobjc_framework_PreferencePanes-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_PreferencePanes-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_PreferencePanes-5.2.dist-info/METADATA,sha256=msGdStXZBQ7hEY37NNs53v2GLBIBoUfkfdF4mZwhZ1w,2254 -pyobjc_framework_PreferencePanes-5.2.dist-info/RECORD,, -pyobjc_framework_PreferencePanes-5.2.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -pyobjc_framework_PreferencePanes-5.2.dist-info/top_level.txt,sha256=y9Bw1sBWLp3GhL2V_lZ9kZA4blOgoRWiUSu5PPMAhew,16 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_PreferencePanes-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_PreferencePanes-5.2.dist-info/WHEEL deleted file mode 100644 index c8240f03..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_PreferencePanes-5.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_PreferencePanes-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_PreferencePanes-5.2.dist-info/top_level.txt deleted file mode 100644 index 33a6367e..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_PreferencePanes-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -PreferencePanes diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_PubSub-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_PubSub-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_PubSub-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_PubSub-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_PubSub-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_PubSub-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_PubSub-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_PubSub-5.2.dist-info/METADATA deleted file mode 100644 index 344b553c..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_PubSub-5.2.dist-info/METADATA +++ /dev/null @@ -1,59 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-PubSub -Version: 5.2 -Summary: Wrappers for the framework PubSub on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,PubSub -Platform: MacOS X (>=10.5) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - - -Wrappers for the "PubSub" framework on macOS 10.5 or later. This framework -offers developers a way to subscribe to web feeds (RSS, Atom) from their -applications. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - -Note that this framework is deprecated in OSX 10.9 - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_PubSub-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_PubSub-5.2.dist-info/RECORD deleted file mode 100644 index 73f2d2b4..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_PubSub-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -PubSub/__init__.py,sha256=ka1rP_13J_HgENHBpmlJepi1TgazIqKwCyq1pu4psno,661 -PubSub/__init__.pyc,, -PubSub/_metadata.py,sha256=Ma56af8SmqVvE_DXbtphLZz8WOtyhMyzrvO_x5PyDG8,4210 -PubSub/_metadata.pyc,, -pyobjc_framework_PubSub-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_PubSub-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_PubSub-5.2.dist-info/METADATA,sha256=Jrvh0EmF9iHrLSCalpVkFUeVG5IEKq0ou1Bx8_68iNk,2302 -pyobjc_framework_PubSub-5.2.dist-info/RECORD,, -pyobjc_framework_PubSub-5.2.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -pyobjc_framework_PubSub-5.2.dist-info/top_level.txt,sha256=65LvGWykaMu5dl5U03p_i63i6VFn2KkLLx_O0dVICyQ,7 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_PubSub-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_PubSub-5.2.dist-info/WHEEL deleted file mode 100644 index c8240f03..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_PubSub-5.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_PubSub-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_PubSub-5.2.dist-info/top_level.txt deleted file mode 100644 index 95b96aa3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_PubSub-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -PubSub diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_QTKit-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_QTKit-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_QTKit-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_QTKit-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_QTKit-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_QTKit-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_QTKit-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_QTKit-5.2.dist-info/METADATA deleted file mode 100644 index c5af47cc..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_QTKit-5.2.dist-info/METADATA +++ /dev/null @@ -1,60 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-QTKit -Version: 5.2 -Summary: Wrappers for the framework QTKit on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,QTKit -Platform: MacOS X -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) -Requires-Dist: pyobjc-framework-Quartz (>=5.2) - -Wrappers for the "QTKit" framework on macOS. QTKit is an modern, -object-oriented framework for working with QuickTime media in Cocoa -applications, and is a replacement for the older Carbon-based Quicktime -framework. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - -Note that this framework is deprecated in OSX 10.9 - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_QTKit-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_QTKit-5.2.dist-info/RECORD deleted file mode 100644 index d2839204..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_QTKit-5.2.dist-info/RECORD +++ /dev/null @@ -1,11 +0,0 @@ -QTKit/_QTKit.so,sha256=s-AJuc_IqHltLQv3v2ff_jsdxNMIroaBIGoZSB6JCg0,9620 -QTKit/__init__.py,sha256=XMcEU9JpCNVclxuwGGTBeEQl7U2bv9MXNdbsIBu5wSc,706 -QTKit/__init__.pyc,, -QTKit/_metadata.py,sha256=dOyRaE0xT9vN_5v_8EkFM1DS0rR8wGLbbb3PzBaLtEQ,53882 -QTKit/_metadata.pyc,, -pyobjc_framework_QTKit-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_QTKit-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_QTKit-5.2.dist-info/METADATA,sha256=AIWZwZ7iDBZ5zdjQbH6jKWJozZWev7Upo7vsUxyssTw,2390 -pyobjc_framework_QTKit-5.2.dist-info/RECORD,, -pyobjc_framework_QTKit-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_QTKit-5.2.dist-info/top_level.txt,sha256=ZuGgB3KAqsY7UEhvqmS4q6Hg8Lus8nZTZMgTlYE9Qto,6 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_QTKit-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_QTKit-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_QTKit-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_QTKit-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_QTKit-5.2.dist-info/top_level.txt deleted file mode 100644 index 4cc542f0..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_QTKit-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -QTKit diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Quartz-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_Quartz-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Quartz-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Quartz-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_Quartz-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Quartz-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Quartz-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_Quartz-5.2.dist-info/METADATA deleted file mode 100644 index 354e333c..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Quartz-5.2.dist-info/METADATA +++ /dev/null @@ -1,90 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-Quartz -Version: 5.2 -Summary: Wrappers for the Quartz frameworks on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,Quartz,Quartz.QuickLookUI,Quartz.CoreGraphics,Quartz.PDFKit,Quartz.CoreVideo,Quartz.ImageIO,Quartz.QuartzFilters,Quartz.QuartzCore,Quartz.QuartzComposer,Quartz.ImageKit -Platform: MacOS X -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - -Wrappers for the "Quartz" related frameworks on macOS. These frameworks -provide a number of graphics related API's. - -The frameworks wrapped by this package are: - - * CoreGraphics - 2D Graphics, based on the PDF model - - * ImageIO - Reading and writing images - - * QuartzComposer - Working with "Quartz Composer" compositions - - * QuartzCore - Image processing and video image manipulation - - * QuarzFilters - Image effects - - * ImageKit - iPhoto-like views - - * PDFKit - Working with PDF files - - * CoreVideo - Managing digital video - -All frameworks can be accessed by importing the 'Quartz' module. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - -NOTE: The actual wrappers are subpackages of ``Quartz``, they are not toplevel -packages to avoid name clashes with Apple provided wrappers for CoreGraphics. - -WARNING: Running the unittests will change your display settings during the -testrun, which will probably mess up your window layout. - -NEWS -==== - -2.4 ---- - -* Add wrapper for ``CGBitmapContextCreateWithData`` - - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Quartz-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_Quartz-5.2.dist-info/RECORD deleted file mode 100644 index c54945cf..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Quartz-5.2.dist-info/RECORD +++ /dev/null @@ -1,57 +0,0 @@ -Quartz/CoreGraphics/__init__.py,sha256=dgN0LRf8EB9NMof02tS2kzGNkXIRvEFb2ZJnQ-7r-_M,4709 -Quartz/CoreGraphics/__init__.pyc,, -Quartz/CoreGraphics/_callbacks.so,sha256=5ZEo9_x_vENAfJ0R0vbPmRNKy8sDqVZLNFbEfJ9W7nk,38968 -Quartz/CoreGraphics/_contextmanager.py,sha256=TYSEw-qone3v9WpBzXxPsFDOk435WwMf7ysUqh0o3BA,2478 -Quartz/CoreGraphics/_contextmanager.pyc,, -Quartz/CoreGraphics/_coregraphics.so,sha256=jsr8T1khX5AVLba-mqjKJA56R1rsXv3hVXKQDiRkRi0,22024 -Quartz/CoreGraphics/_doubleindirect.so,sha256=UHK8VEMvmv3VEZF2S-o8eLpej5ZCEeij1wGSvActtO0,11484 -Quartz/CoreGraphics/_inlines.so,sha256=5Zee13WUj4-ojTHqcEEZo68cjYGPNNZkgi9vkOegcPY,10304 -Quartz/CoreGraphics/_metadata.py,sha256=LXW4e6CBc0ONo81dfvi3sJ7WwYmIqXPZyVu9Fir7o8w,99848 -Quartz/CoreGraphics/_metadata.pyc,, -Quartz/CoreGraphics/_sortandmap.so,sha256=eyyUtQbLO0ahAJqlzPvp7IUYK_2k1Lknju3KdAodC5w,16076 -Quartz/CoreVideo/_CVPixelBuffer.so,sha256=giQKJGXJEm1cQAUAcmMIMRqZ8PkAXrwouhiXfYNOtgo,11396 -Quartz/CoreVideo/__init__.py,sha256=yk1SNFv_fGyTagt1Blgr2U3S8VjFWaqKYKUGHS6GBr4,943 -Quartz/CoreVideo/__init__.pyc,, -Quartz/CoreVideo/_metadata.py,sha256=CKjaKZlUUNkpSXCxNWEJ_RISbRnSkyAVmyLf9qxkBNI,26425 -Quartz/CoreVideo/_metadata.pyc,, -Quartz/ImageIO/PyObjCOverrides.bridgesupport,sha256=L0tbueArSY_7zXm80-FvybkZyFSV1SKW1ScAPw8BWto,25143 -Quartz/ImageIO/__init__.py,sha256=GkR1A385c0tDxf6VA4eFswlbXAtvqm2KtHXf_eNOG44,765 -Quartz/ImageIO/__init__.pyc,, -Quartz/ImageIO/_metadata.py,sha256=6iAnXnYsvzOofHRp-uR0ikZG2Z6FHstCcHZXwTjElZM,41917 -Quartz/ImageIO/_metadata.pyc,, -Quartz/ImageKit/__init__.py,sha256=1HzIMOBPpsiHS11uUIrKQ_H8QiegtQ2cJqqaGEuqB-c,998 -Quartz/ImageKit/__init__.pyc,, -Quartz/ImageKit/_imagekit.so,sha256=x5Ob6ibKgpRp2_UierED3B4Iu3llbf1XZPSlyOwNF6w,13168 -Quartz/ImageKit/_metadata.py,sha256=GR1oNkEiVaLSdwc-xPRlrH3RfUAZz7FVfpdMesZd8oA,20750 -Quartz/ImageKit/_metadata.pyc,, -Quartz/PDFKit/_PDFKit.so,sha256=PWOiyF1YhS14pzMF859qryqyJTOTKLkXsnERTZ29Ajk,9056 -Quartz/PDFKit/__init__.py,sha256=eWHws1qmvwS04UfBm0oZzGEgBXVuVn6S7OTPQdkaA14,743 -Quartz/PDFKit/__init__.pyc,, -Quartz/PDFKit/_metadata.py,sha256=zy-0MWaxoQus9_R1Z5mPhovylevr20aFPkeW7EUIQfM,19602 -Quartz/PDFKit/_metadata.pyc,, -Quartz/QuartzComposer/__init__.py,sha256=cz_sMpbGqceYOz0J3cNspUIi9xr9FZyw4MF33HxujJI,821 -Quartz/QuartzComposer/__init__.pyc,, -Quartz/QuartzComposer/_metadata.py,sha256=xu9Y1UNYMi2Sz9DclHLzQ_Il_BvLD4VixKkknAyN0Gk,12987 -Quartz/QuartzComposer/_metadata.pyc,, -Quartz/QuartzCore/__init__.py,sha256=N56JGOw6Q5F0hFPqWXPBnySL00OXmPXQdU3-ZXEhQLs,1740 -Quartz/QuartzCore/__init__.pyc,, -Quartz/QuartzCore/_metadata.py,sha256=7u6XP0iWgYnpO-l4_qm8KjagQ2NRHw80e-j7j0gnY0g,29322 -Quartz/QuartzCore/_metadata.pyc,, -Quartz/QuartzCore/_quartzcore.so,sha256=SZ7OeDWpZsQ5B5zDgtzFueqhZtxAMlK0cmoMHSMfIqY,9160 -Quartz/QuartzFilters/__init__.py,sha256=CPWDB-bomnn2foDHoq_uu3YKgU4SpOa4di9-dJbbWpY,763 -Quartz/QuartzFilters/__init__.pyc,, -Quartz/QuartzFilters/_metadata.py,sha256=GeKvRJO1cYyt79DTTrwlhTJRL1jmd3WrNOIyj7p3kGA,1987 -Quartz/QuartzFilters/_metadata.pyc,, -Quartz/QuickLookUI/_QuickLookUI.so,sha256=oNO4yAw3a11hyThptj7gEb-StZU6vCbQBRW0clvQ2YU,9064 -Quartz/QuickLookUI/__init__.py,sha256=WsQweYd-cU4xgrR9OAOVLp3rljXnmRcAe9QHDRmVZjk,790 -Quartz/QuickLookUI/__init__.pyc,, -Quartz/QuickLookUI/_metadata.py,sha256=SBg-0A9Y40db5x7DlOuPtqK_mgAghzj4anFVUWIkbus,3633 -Quartz/QuickLookUI/_metadata.pyc,, -Quartz/__init__.py,sha256=r0wIec2WxmwDffogKV_wPZopCFhyAm3HTirL1rXmePQ,1727 -Quartz/__init__.pyc,, -pyobjc_framework_Quartz-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_Quartz-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_Quartz-5.2.dist-info/METADATA,sha256=zotvT6xJ39goCEci1E8LcHZuGRNjDWhkKDb5LRdxtkI,3215 -pyobjc_framework_Quartz-5.2.dist-info/RECORD,, -pyobjc_framework_Quartz-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_Quartz-5.2.dist-info/top_level.txt,sha256=Srn1VNxu_Yy83DSGkX2Fppq7ox39xmrFcCVqND3nHgc,7 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Quartz-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_Quartz-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Quartz-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Quartz-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_Quartz-5.2.dist-info/top_level.txt deleted file mode 100644 index 7f9d98f2..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Quartz-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -Quartz diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SafariServices-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_SafariServices-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SafariServices-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SafariServices-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_SafariServices-5.2.dist-info/METADATA deleted file mode 100644 index ed6ec2ae..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SafariServices-5.2.dist-info/METADATA +++ /dev/null @@ -1,54 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-SafariServices -Version: 5.2 -Summary: Wrappers for the framework SafariServices on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,SafariServices -Platform: MacOS X (>=10.11) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - -Wrappers for the "SafariServices" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SafariServices-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_SafariServices-5.2.dist-info/RECORD deleted file mode 100644 index 0edd148f..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SafariServices-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -SafariServices/_SafariServices.so,sha256=xBsuug7D8Yak-d3HgeR06P3Ag4ucIDGbLekgQndnMa4,9684 -SafariServices/__init__.py,sha256=7gUK_HmLAMKeN4l7uRv8UXO498kCPQnrEWsKT8Iwv00,772 -SafariServices/__init__.pyc,, -SafariServices/_metadata.py,sha256=7kQwHC-x8ZbGYbz3JWom4W7iSqyjKCAFVAj27rBIFHQ,7292 -SafariServices/_metadata.pyc,, -pyobjc_framework_SafariServices-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_SafariServices-5.2.dist-info/METADATA,sha256=SjAguEtcnT_aLKwbeIHGW6vW5ptkDaCdl7LRZxLEWKs,2165 -pyobjc_framework_SafariServices-5.2.dist-info/RECORD,, -pyobjc_framework_SafariServices-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_SafariServices-5.2.dist-info/top_level.txt,sha256=hfMdupWFSBCAYh1rmjy0KwfjZA2fQFIL4l_8FMxuh44,15 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SafariServices-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_SafariServices-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SafariServices-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SafariServices-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_SafariServices-5.2.dist-info/top_level.txt deleted file mode 100644 index e832ca46..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SafariServices-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -SafariServices diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SceneKit-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_SceneKit-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SceneKit-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SceneKit-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_SceneKit-5.2.dist-info/METADATA deleted file mode 100644 index 0d3e57a1..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SceneKit-5.2.dist-info/METADATA +++ /dev/null @@ -1,55 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-SceneKit -Version: 5.2 -Summary: Wrappers for the framework SceneKit on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,SceneKit -Platform: MacOS X (>=10.7) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) -Requires-Dist: pyobjc-framework-Quartz (>=5.2) - -Wrappers for the "SceneKit" framework on macOS introduced in macOS 10.8. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SceneKit-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_SceneKit-5.2.dist-info/RECORD deleted file mode 100644 index 70b729ca..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SceneKit-5.2.dist-info/RECORD +++ /dev/null @@ -1,11 +0,0 @@ -SceneKit/_SceneKit.so,sha256=fAdUXJT-wa9l2KR7cLTwDGVafuuGdeNBLiO1LigRkE0,22340 -SceneKit/__init__.py,sha256=ZbkPAktVU1l0VVxPFspB0-cH3JfUbUmx0Vb-gGX76s8,1239 -SceneKit/__init__.pyc,, -SceneKit/_inlines.so,sha256=9FmFj3LY_JQJyR-I210hja4U53BEWeqQ6VbajGG4gNo,26092 -SceneKit/_metadata.py,sha256=8iOMGHY8UDAryCnXY5oxuFtGA1h1Ai4WyJG4tyWnws8,45780 -SceneKit/_metadata.pyc,, -pyobjc_framework_SceneKit-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_SceneKit-5.2.dist-info/METADATA,sha256=6yLez2e64udWW_EG1wROMukQfMn-tZlgr855CuRq3K0,2212 -pyobjc_framework_SceneKit-5.2.dist-info/RECORD,, -pyobjc_framework_SceneKit-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_SceneKit-5.2.dist-info/top_level.txt,sha256=_7EB7slh_d5YNPX_L7q0WgOt3hOc59D_9z9f1uvQC6I,9 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SceneKit-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_SceneKit-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SceneKit-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SceneKit-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_SceneKit-5.2.dist-info/top_level.txt deleted file mode 100644 index 09c11240..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SceneKit-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -SceneKit diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ScreenSaver-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_ScreenSaver-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ScreenSaver-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ScreenSaver-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_ScreenSaver-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ScreenSaver-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ScreenSaver-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_ScreenSaver-5.2.dist-info/METADATA deleted file mode 100644 index 848b3f37..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ScreenSaver-5.2.dist-info/METADATA +++ /dev/null @@ -1,55 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-ScreenSaver -Version: 5.2 -Summary: Wrappers for the framework ScreenSaver on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,ScreenSaver -Platform: MacOS X -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - -Wrappers for the "ScreenSaver" framework on macOS. This frameworks allows -you to write custom screensaver modules. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ScreenSaver-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_ScreenSaver-5.2.dist-info/RECORD deleted file mode 100644 index 1a59c1d3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ScreenSaver-5.2.dist-info/RECORD +++ /dev/null @@ -1,11 +0,0 @@ -ScreenSaver/__init__.py,sha256=DFb7715Ku8Bifnihs5LyMAX2AR2dQ85jzgfalys8pBs,744 -ScreenSaver/__init__.pyc,, -ScreenSaver/_inlines.so,sha256=LbOEyUG411Ti0w8h2N8gsM6g15vkg6n1-_TMR2hiBsc,9888 -ScreenSaver/_metadata.py,sha256=QECeO5F8gk_rav5E9fjXOjuRyKBtCtbGBglaSYqNFXA,1423 -ScreenSaver/_metadata.pyc,, -pyobjc_framework_ScreenSaver-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_ScreenSaver-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_ScreenSaver-5.2.dist-info/METADATA,sha256=VYJgWGOUhp0HL-TufQdIAmX7E9rxcJbv41SgVpEtB-M,2207 -pyobjc_framework_ScreenSaver-5.2.dist-info/RECORD,, -pyobjc_framework_ScreenSaver-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_ScreenSaver-5.2.dist-info/top_level.txt,sha256=Y5Vby1Q1vDkNln9V0BeT-f_-2gcPWeq62czo4wLLzaY,12 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ScreenSaver-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_ScreenSaver-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ScreenSaver-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ScreenSaver-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_ScreenSaver-5.2.dist-info/top_level.txt deleted file mode 100644 index 1e4b913b..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ScreenSaver-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -ScreenSaver diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ScriptingBridge-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_ScriptingBridge-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ScriptingBridge-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ScriptingBridge-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_ScriptingBridge-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ScriptingBridge-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ScriptingBridge-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_ScriptingBridge-5.2.dist-info/METADATA deleted file mode 100644 index 1c6cb06c..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ScriptingBridge-5.2.dist-info/METADATA +++ /dev/null @@ -1,60 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-ScriptingBridge -Version: 5.2 -Summary: Wrappers for the framework ScriptingBridge on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,ScriptingBridge -Platform: MacOS X (>=10.5) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - -Wrappers for the "ScriptingBrige" framework on macOS 10.5 or later. This -framework provides an easy way to use the scripting functionality of -applications ("AppleScript") from Cocoa applications. - -The functionality of this framework is comparable to that off "appscript", -although the latter is better tuned for use in Python applications and is -available on macOS 10.4 as well. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ScriptingBridge-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_ScriptingBridge-5.2.dist-info/RECORD deleted file mode 100644 index 15363731..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ScriptingBridge-5.2.dist-info/RECORD +++ /dev/null @@ -1,11 +0,0 @@ -ScriptingBridge/_ScriptingBridge.so,sha256=RRInT2iIJSjUB5CnAoBu6wm4dGeeMIxz_d3UkAHlryI,9700 -ScriptingBridge/__init__.py,sha256=u1Tn3VivHiLMVweVSu0OLVLqSSXcuTFrSaOCs-Yorx4,1010 -ScriptingBridge/__init__.pyc,, -ScriptingBridge/_metadata.py,sha256=w-y-9zBB00qwI24th-3rR-h_7IYvx313-QX6uccXgDM,1288 -ScriptingBridge/_metadata.pyc,, -pyobjc_framework_ScriptingBridge-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_ScriptingBridge-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_ScriptingBridge-5.2.dist-info/METADATA,sha256=6XDPkuCdTJzK41M6mB-rsiE-Tpumic7tq3T16l9K-0o,2492 -pyobjc_framework_ScriptingBridge-5.2.dist-info/RECORD,, -pyobjc_framework_ScriptingBridge-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_ScriptingBridge-5.2.dist-info/top_level.txt,sha256=KRPJAiSamAl7qf45z7eKfcvQhkNXm0teMSN2a66UGQA,16 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ScriptingBridge-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_ScriptingBridge-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ScriptingBridge-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ScriptingBridge-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_ScriptingBridge-5.2.dist-info/top_level.txt deleted file mode 100644 index bdbb4676..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ScriptingBridge-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -ScriptingBridge diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SearchKit-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_SearchKit-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SearchKit-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SearchKit-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_SearchKit-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SearchKit-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SearchKit-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_SearchKit-5.2.dist-info/METADATA deleted file mode 100644 index 093edcc5..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SearchKit-5.2.dist-info/METADATA +++ /dev/null @@ -1,52 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-SearchKit -Version: 5.2 -Summary: Wrappers for the framework SearchKit on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,SearchKit -Platform: MacOS X (>=10.5) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-CoreServices (>=5.2) - - -Deprecated wrappers for the "SearchKit" framework on macOS. - -Use the CoreServices package instead. - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SearchKit-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_SearchKit-5.2.dist-info/RECORD deleted file mode 100644 index 0a56f9a9..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SearchKit-5.2.dist-info/RECORD +++ /dev/null @@ -1,8 +0,0 @@ -SearchKit/__init__.py,sha256=ozk0QVHmjQ4kQ3SsT3dPAf1xV0GIAG4BTq-HwQhPQiY,753 -SearchKit/__init__.pyc,, -pyobjc_framework_SearchKit-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_SearchKit-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_SearchKit-5.2.dist-info/METADATA,sha256=WZYpZjoyejxPs4Pu9naJa7I1nyMOSoLmyDPuUCAo_wg,1954 -pyobjc_framework_SearchKit-5.2.dist-info/RECORD,, -pyobjc_framework_SearchKit-5.2.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -pyobjc_framework_SearchKit-5.2.dist-info/top_level.txt,sha256=oQvNUqt3j047HWGV5gWgpFx8dHXi6OyyKljYxVN_w4k,10 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SearchKit-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_SearchKit-5.2.dist-info/WHEEL deleted file mode 100644 index c8240f03..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SearchKit-5.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SearchKit-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_SearchKit-5.2.dist-info/top_level.txt deleted file mode 100644 index d7ffcb52..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SearchKit-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -SearchKit diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Security-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_Security-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Security-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Security-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_Security-5.2.dist-info/METADATA deleted file mode 100644 index 06e89d7e..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Security-5.2.dist-info/METADATA +++ /dev/null @@ -1,54 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-Security -Version: 5.2 -Summary: Wrappers for the framework Security on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,Security -Platform: MacOS X -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - -Wrappers for the "Security" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Security-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_Security-5.2.dist-info/RECORD deleted file mode 100644 index bc4cb769..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Security-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -Security/_Security.so,sha256=oHT5dCZki2s2-g2XNiWC74VyenEM0vTaTS-KCqwDFkY,22380 -Security/__init__.py,sha256=x3jYXrBlZ2nWWew_NB9t6UPgRZ3nRZMzqjg14eqS4-M,726 -Security/__init__.pyc,, -Security/_metadata.py,sha256=bBd-aNQU8j9CYyUnlYBgvDU0KY86ssQRTjeUlA_6mus,85740 -Security/_metadata.pyc,, -pyobjc_framework_Security-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_Security-5.2.dist-info/METADATA,sha256=O9Uk6loGPyIqVk_zGXZ-uCyTA3HCZT-GEouJw2qP2cI,2131 -pyobjc_framework_Security-5.2.dist-info/RECORD,, -pyobjc_framework_Security-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_Security-5.2.dist-info/top_level.txt,sha256=E8juSm7v17tdILuONAr4xMx8lWQgP1-bukRwRWImiVw,9 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Security-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_Security-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Security-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Security-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_Security-5.2.dist-info/top_level.txt deleted file mode 100644 index 4b487b7d..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Security-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -Security diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SecurityFoundation-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_SecurityFoundation-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SecurityFoundation-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SecurityFoundation-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_SecurityFoundation-5.2.dist-info/METADATA deleted file mode 100644 index 19edaf4b..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SecurityFoundation-5.2.dist-info/METADATA +++ /dev/null @@ -1,56 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-SecurityFoundation -Version: 5.2 -Summary: Wrappers for the framework SecurityFoundation on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,SecurityFoundation -Platform: MacOS X -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) -Requires-Dist: pyobjc-framework-Security (>=5.2) - - -Wrappers for the "SecurityFoundation" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SecurityFoundation-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_SecurityFoundation-5.2.dist-info/RECORD deleted file mode 100644 index 1d488e3a..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SecurityFoundation-5.2.dist-info/RECORD +++ /dev/null @@ -1,9 +0,0 @@ -SecurityFoundation/__init__.py,sha256=GezuJEHuOEc47xYIpOrIvPqMzuDVttT0xikQKyIGGfw,778 -SecurityFoundation/__init__.pyc,, -SecurityFoundation/_metadata.py,sha256=zVJXms8gzkfVbSHl0JCOV2H-cit_wPdepq44suiHKKc,844 -SecurityFoundation/_metadata.pyc,, -pyobjc_framework_SecurityFoundation-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_SecurityFoundation-5.2.dist-info/METADATA,sha256=E1lWYeGaGn6wPPWqT5CRa44VilnlYhnC-cVaVIWtQ6s,2221 -pyobjc_framework_SecurityFoundation-5.2.dist-info/RECORD,, -pyobjc_framework_SecurityFoundation-5.2.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -pyobjc_framework_SecurityFoundation-5.2.dist-info/top_level.txt,sha256=hDzD3xAuSxuJXj4LnEmOT9Fm1Vkd95Cm8kx6CMckVI8,19 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SecurityFoundation-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_SecurityFoundation-5.2.dist-info/WHEEL deleted file mode 100644 index c8240f03..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SecurityFoundation-5.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SecurityFoundation-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_SecurityFoundation-5.2.dist-info/top_level.txt deleted file mode 100644 index 7c8f4249..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SecurityFoundation-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -SecurityFoundation diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SecurityInterface-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_SecurityInterface-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SecurityInterface-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SecurityInterface-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_SecurityInterface-5.2.dist-info/METADATA deleted file mode 100644 index 91e7b729..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SecurityInterface-5.2.dist-info/METADATA +++ /dev/null @@ -1,55 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-SecurityInterface -Version: 5.2 -Summary: Wrappers for the framework SecurityInterface on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,SecurityInterface -Platform: MacOS X -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) -Requires-Dist: pyobjc-framework-Security (>=5.2) - -Wrappers for the "SecurityInterface" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SecurityInterface-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_SecurityInterface-5.2.dist-info/RECORD deleted file mode 100644 index a5145c48..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SecurityInterface-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -SecurityInterface/_SecurityInterface.so,sha256=VmAdEDrLcz1eiCediX9vV-2EKOiSO35RifQPLQU_Ugs,15504 -SecurityInterface/__init__.py,sha256=xXIadVCayD8jEe06wi-eG1EPGU8sHS7FygRlWArqf68,809 -SecurityInterface/__init__.pyc,, -SecurityInterface/_metadata.py,sha256=P_f-9NeCB7LEzxLwW-J6OZ4JBU3793pkCF0iKw9Q6fU,6916 -SecurityInterface/_metadata.pyc,, -pyobjc_framework_SecurityInterface-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_SecurityInterface-5.2.dist-info/METADATA,sha256=bbtt1SNcJE8qo1Z_M6FYgx-y1fz8Oo8sJQ1JynC05Ws,2216 -pyobjc_framework_SecurityInterface-5.2.dist-info/RECORD,, -pyobjc_framework_SecurityInterface-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_SecurityInterface-5.2.dist-info/top_level.txt,sha256=bMSZX_XGj6KTsSLm5OOOvDNpl2Ogur0OToOJvb7h8RQ,18 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SecurityInterface-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_SecurityInterface-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SecurityInterface-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SecurityInterface-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_SecurityInterface-5.2.dist-info/top_level.txt deleted file mode 100644 index 2c8411dc..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SecurityInterface-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -SecurityInterface diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ServiceManagement-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_ServiceManagement-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ServiceManagement-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ServiceManagement-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_ServiceManagement-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ServiceManagement-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ServiceManagement-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_ServiceManagement-5.2.dist-info/METADATA deleted file mode 100644 index 416b4ea9..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ServiceManagement-5.2.dist-info/METADATA +++ /dev/null @@ -1,57 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-ServiceManagement -Version: 5.2 -Summary: Wrappers for the framework ServiceManagement on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,ServiceManagement -Platform: MacOS X (>=10.6) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - - -Wrappers for framework 'ServiceManagement' on macOS 10.6. This framework -provides an interface to the system services subsystem, which basicly means -a this provides a secure and object-oriented interface from launchd. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ServiceManagement-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_ServiceManagement-5.2.dist-info/RECORD deleted file mode 100644 index cbe59d7c..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ServiceManagement-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -ServiceManagement/__init__.py,sha256=YoLmLEeQw0ep0ky3WnxyCfVMyHvzn9iauFxACrt8-L0,751 -ServiceManagement/__init__.pyc,, -ServiceManagement/_metadata.py,sha256=QSn8Ej2HjMJ7-pFUg1miwyLZsZFPgare1dTaCT2mi9E,1935 -ServiceManagement/_metadata.pyc,, -pyobjc_framework_ServiceManagement-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_ServiceManagement-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_ServiceManagement-5.2.dist-info/METADATA,sha256=JCZTCRrQwAIs8CXqZzcmE3wiaQuIS0PJH1Iv3QUUAoA,2338 -pyobjc_framework_ServiceManagement-5.2.dist-info/RECORD,, -pyobjc_framework_ServiceManagement-5.2.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -pyobjc_framework_ServiceManagement-5.2.dist-info/top_level.txt,sha256=1PUYbDTBUGuOV7N8dmUnGSdRDw2dkJNHcC-J6p2obeg,18 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ServiceManagement-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_ServiceManagement-5.2.dist-info/WHEEL deleted file mode 100644 index c8240f03..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ServiceManagement-5.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_ServiceManagement-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_ServiceManagement-5.2.dist-info/top_level.txt deleted file mode 100644 index e5f512a4..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_ServiceManagement-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -ServiceManagement diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Social-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_Social-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Social-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Social-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_Social-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Social-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Social-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_Social-5.2.dist-info/METADATA deleted file mode 100644 index 69e7a080..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Social-5.2.dist-info/METADATA +++ /dev/null @@ -1,52 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-Social -Version: 5.2 -Summary: Wrappers for the framework Social on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,Social -Platform: MacOS X (>=10.8) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - - -Wrappers for the "Social" framework on macOS 10.8 or later. - -Note that this framework is only available for 64-bit code. - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Social-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_Social-5.2.dist-info/RECORD deleted file mode 100644 index 0137e490..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Social-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -Social/__init__.py,sha256=QbSrBXUznLv1QXdlzkEzi0Dzlm54Qb-ZfzaVX0T0NEM,680 -Social/__init__.pyc,, -Social/_metadata.py,sha256=opH3OnehRJnHNbl3Fa35gxuh5KQr6RqdOWfA6Q0oAxA,1021 -Social/_metadata.pyc,, -pyobjc_framework_Social-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_Social-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_Social-5.2.dist-info/METADATA,sha256=sBR5UR3H8Qh58nDfeEoitlpfy2zDWKiHEjyYVwuHPL4,1960 -pyobjc_framework_Social-5.2.dist-info/RECORD,, -pyobjc_framework_Social-5.2.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -pyobjc_framework_Social-5.2.dist-info/top_level.txt,sha256=5p9kSH0qqAgq5R61p0BQPlhe2goaH8GcBRB9FNaLZ4I,7 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Social-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_Social-5.2.dist-info/WHEEL deleted file mode 100644 index c8240f03..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Social-5.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Social-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_Social-5.2.dist-info/top_level.txt deleted file mode 100644 index f0a8e7e5..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Social-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -Social diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SpriteKit-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_SpriteKit-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SpriteKit-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SpriteKit-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_SpriteKit-5.2.dist-info/METADATA deleted file mode 100644 index 24c03561..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SpriteKit-5.2.dist-info/METADATA +++ /dev/null @@ -1,55 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-SpriteKit -Version: 5.2 -Summary: Wrappers for the framework SpriteKit on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,SpriteKit -Platform: MacOS X (>=10.9) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) -Requires-Dist: pyobjc-framework-Quartz (>=5.2) - -Wrappers for the "SpriteKit" framework on macOS introduced in macOS 10.9. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SpriteKit-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_SpriteKit-5.2.dist-info/RECORD deleted file mode 100644 index 34c27888..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SpriteKit-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -SpriteKit/_SpriteKit.so,sha256=PzGIRCbVAamvNHs_5aShG2y15VY7qKlDKYV65WqR4LQ,15696 -SpriteKit/__init__.py,sha256=mruq-I44VTf1ERYvT_K0P-ppJ4dp4XQ5h-bQZTiA0dI,774 -SpriteKit/__init__.pyc,, -SpriteKit/_metadata.py,sha256=W9y2fEtyrs4cpgxgfDG6zO6W1oYcQ2M5gHRnL-RaASY,17618 -SpriteKit/_metadata.pyc,, -pyobjc_framework_SpriteKit-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_SpriteKit-5.2.dist-info/METADATA,sha256=1HkAiro90SARJemXCUlkaL20lYb2GOYWg9vOMV4_fOQ,2216 -pyobjc_framework_SpriteKit-5.2.dist-info/RECORD,, -pyobjc_framework_SpriteKit-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_SpriteKit-5.2.dist-info/top_level.txt,sha256=TWXUfhgRD6o9FTdo1yfVvykimt6EqE8jkY17iPcv6SU,10 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SpriteKit-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_SpriteKit-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SpriteKit-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SpriteKit-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_SpriteKit-5.2.dist-info/top_level.txt deleted file mode 100644 index 9d188eb2..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SpriteKit-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -SpriteKit diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_StoreKit-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_StoreKit-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_StoreKit-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_StoreKit-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_StoreKit-5.2.dist-info/METADATA deleted file mode 100644 index e5b5f7a9..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_StoreKit-5.2.dist-info/METADATA +++ /dev/null @@ -1,54 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-StoreKit -Version: 5.2 -Summary: Wrappers for the framework StoreKit on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,StoreKit -Platform: MacOS X (>=10.7) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - -Wrappers for the "StoreKit" framework on macOS 10.7 or later. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_StoreKit-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_StoreKit-5.2.dist-info/RECORD deleted file mode 100644 index b110ce70..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_StoreKit-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -StoreKit/_StoreKit.so,sha256=PU2FCHSQU-kdZrnKE3mmucIwW4acC9McREOqbpcJm-E,13820 -StoreKit/__init__.py,sha256=1bkPkrO_bb45e0NeevFnfcVuCUZRI-4Y8mqmPhE4mYk,710 -StoreKit/__init__.pyc,, -StoreKit/_metadata.py,sha256=sk5BZydi7kL_SkYDGLxL_u7AgUWb1Jtaw_89J64pYdc,3617 -StoreKit/_metadata.pyc,, -pyobjc_framework_StoreKit-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_StoreKit-5.2.dist-info/METADATA,sha256=f4Bn_juAQyYAyZKTc1KyezVGGyqNE44L3niA7n3HnIM,2154 -pyobjc_framework_StoreKit-5.2.dist-info/RECORD,, -pyobjc_framework_StoreKit-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_StoreKit-5.2.dist-info/top_level.txt,sha256=4smnkO902dwBWt35yDEWhiG0tVsZ-Eot9sm1iF4znlI,9 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_StoreKit-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_StoreKit-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_StoreKit-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_StoreKit-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_StoreKit-5.2.dist-info/top_level.txt deleted file mode 100644 index 516acb01..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_StoreKit-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -StoreKit diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SyncServices-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_SyncServices-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SyncServices-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SyncServices-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_SyncServices-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SyncServices-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SyncServices-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_SyncServices-5.2.dist-info/METADATA deleted file mode 100644 index b3ba68aa..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SyncServices-5.2.dist-info/METADATA +++ /dev/null @@ -1,61 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-SyncServices -Version: 5.2 -Summary: Wrappers for the framework SyncServices on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,SyncServices -Platform: MacOS X -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) -Requires-Dist: pyobjc-framework-CoreData (>=5.2) - -Wrappers for the "SyncServices" framework on macOS. - -Sync Services is a framework containing all the components you need -to sync your applications and devices. If your application uses -Sync Services, user data can be synced with other applications and -devices on the same computer, or other computers over the network via -MobileMe. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SyncServices-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_SyncServices-5.2.dist-info/RECORD deleted file mode 100644 index 983d4586..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SyncServices-5.2.dist-info/RECORD +++ /dev/null @@ -1,11 +0,0 @@ -SyncServices/_SyncServices.so,sha256=aWMtnmy60mRjYZ_HA36iKFgmAkRVch6XCXJX7YBClJM,13940 -SyncServices/__init__.py,sha256=v02a0LAN1JazLvh4oLK-zaGpD8g8fkAosyhgsRE9TV0,740 -SyncServices/__init__.pyc,, -SyncServices/_metadata.py,sha256=NavKJMElm8HWEfxbngakY2je_brUFi8GWf15hu3-f9k,19860 -SyncServices/_metadata.pyc,, -pyobjc_framework_SyncServices-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_SyncServices-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_SyncServices-5.2.dist-info/METADATA,sha256=LTt0DfwO1PLgngvoQHyROGJp-xxOOti-_HINnNQAYKM,2476 -pyobjc_framework_SyncServices-5.2.dist-info/RECORD,, -pyobjc_framework_SyncServices-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_SyncServices-5.2.dist-info/top_level.txt,sha256=OWWKyy6xR4iQps-xA51Q27RW0bzcqSHsJLAufQoFJw0,13 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SyncServices-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_SyncServices-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SyncServices-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SyncServices-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_SyncServices-5.2.dist-info/top_level.txt deleted file mode 100644 index 7a4fadaa..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SyncServices-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -SyncServices diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SystemConfiguration-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_SystemConfiguration-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SystemConfiguration-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SystemConfiguration-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_SystemConfiguration-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SystemConfiguration-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SystemConfiguration-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_SystemConfiguration-5.2.dist-info/METADATA deleted file mode 100644 index e2a8eb5b..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SystemConfiguration-5.2.dist-info/METADATA +++ /dev/null @@ -1,54 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-SystemConfiguration -Version: 5.2 -Summary: Wrappers for the framework SystemConfiguration on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,SystemConfiguration -Platform: MacOS X -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - -Wrappers for framework 'SystemConfiguration'. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SystemConfiguration-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_SystemConfiguration-5.2.dist-info/RECORD deleted file mode 100644 index 36375dfd..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SystemConfiguration-5.2.dist-info/RECORD +++ /dev/null @@ -1,11 +0,0 @@ -SystemConfiguration/__init__.py,sha256=8WjLDoRY4-vNtaMExXwgZ0t6RAJTkZ5knbVewmKn7yU,1015 -SystemConfiguration/__init__.pyc,, -SystemConfiguration/_manual.so,sha256=rzOrNOixM4AjnxrsI1jRDiS704T0NqVj1QyUp5wukMU,17664 -SystemConfiguration/_metadata.py,sha256=xtfyInvYcfDc-AxF2oU8lbd3QIN7h9Ezmu4RQAoXOds,37116 -SystemConfiguration/_metadata.pyc,, -pyobjc_framework_SystemConfiguration-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_SystemConfiguration-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_SystemConfiguration-5.2.dist-info/METADATA,sha256=DJU7CohpjGg6nFRWfYD8UN_yjIBTaOMVWPKkrJBUrv4,2162 -pyobjc_framework_SystemConfiguration-5.2.dist-info/RECORD,, -pyobjc_framework_SystemConfiguration-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_SystemConfiguration-5.2.dist-info/top_level.txt,sha256=FDYGJ1K5xdSa3HCUwWN_ZDJg48iG_8ywPWKhbwU6FU8,20 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SystemConfiguration-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_SystemConfiguration-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SystemConfiguration-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_SystemConfiguration-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_SystemConfiguration-5.2.dist-info/top_level.txt deleted file mode 100644 index 7dc148e1..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_SystemConfiguration-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -SystemConfiguration diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_VideoToolbox-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_VideoToolbox-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_VideoToolbox-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_VideoToolbox-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_VideoToolbox-5.2.dist-info/METADATA deleted file mode 100644 index 3a41957e..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_VideoToolbox-5.2.dist-info/METADATA +++ /dev/null @@ -1,56 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-VideoToolbox -Version: 5.2 -Summary: Wrappers for the framework VideoToolbox on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,VideoToolbox -Platform: MacOS X (>=10.8) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) -Requires-Dist: pyobjc-framework-Quartz (>=5.2) -Requires-Dist: pyobjc-framework-CoreMedia (>=5.2) - -Wrappers for the "VideoToolbox" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_VideoToolbox-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_VideoToolbox-5.2.dist-info/RECORD deleted file mode 100644 index ead1b41d..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_VideoToolbox-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -VideoToolbox/_VideoToolbox.so,sha256=cNRjGTmuSV6uNUR7CNbnGv_MZNyNRNdy_imEON9tkQw,10712 -VideoToolbox/__init__.py,sha256=VsSXHmcpcbUfnQ9-bKbt3NC3ruhy2iPTjQCiQNkxoTQ,824 -VideoToolbox/__init__.pyc,, -VideoToolbox/_metadata.py,sha256=kXf7dw8RxAJDSBz7Ef3kqt_pFZRxhkBu8ETz3Q26XFk,20810 -VideoToolbox/_metadata.pyc,, -pyobjc_framework_VideoToolbox-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_VideoToolbox-5.2.dist-info/METADATA,sha256=40_iUAk5INPKYEegMn3UCQFRT-8CXf8whpN8sbPf8Ws,2253 -pyobjc_framework_VideoToolbox-5.2.dist-info/RECORD,, -pyobjc_framework_VideoToolbox-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_VideoToolbox-5.2.dist-info/top_level.txt,sha256=PuJ8lJztROUl0IhvkiHWyfW6KPR9kYarm8aABOijzj0,13 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_VideoToolbox-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_VideoToolbox-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_VideoToolbox-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_VideoToolbox-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_VideoToolbox-5.2.dist-info/top_level.txt deleted file mode 100644 index 68e51bd8..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_VideoToolbox-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -VideoToolbox diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Vision-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_Vision-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Vision-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Vision-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_Vision-5.2.dist-info/METADATA deleted file mode 100644 index cd49898a..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Vision-5.2.dist-info/METADATA +++ /dev/null @@ -1,56 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-Vision -Version: 5.2 -Summary: Wrappers for the framework Vision on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,Vision -Platform: MacOS X (>=10.13) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) -Requires-Dist: pyobjc-framework-Quartz (>=5.2) -Requires-Dist: pyobjc-framework-CoreML (>=5.2) - -Wrappers for the "Vision" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Vision-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_Vision-5.2.dist-info/RECORD deleted file mode 100644 index a142c5e6..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Vision-5.2.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -Vision/_Vision.so,sha256=7nCTS09bxXck4No_jXz_Ah0HBgBhAjWRcYRIjqS0ym0,9684 -Vision/__init__.py,sha256=fQCG0_-D_bADJYUqRwZXY3dC_2aPlrUKI9kpVKIQ2gI,743 -Vision/__init__.pyc,, -Vision/_metadata.py,sha256=8i8kF_YvJyNUq1kVv5vUoStHaRNM202_50cW4S14WPY,10617 -Vision/_metadata.pyc,, -pyobjc_framework_Vision-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_Vision-5.2.dist-info/METADATA,sha256=LAfwfkWg1aHYZwcFhK2KkcTirQ3zQ5rDJWvO2RgxJfg,2227 -pyobjc_framework_Vision-5.2.dist-info/RECORD,, -pyobjc_framework_Vision-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_Vision-5.2.dist-info/top_level.txt,sha256=BD3CRhsRDMFf_bOjNClvMQWv9oYadAV0TOQmAbaOljs,7 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Vision-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_Vision-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Vision-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Vision-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_Vision-5.2.dist-info/top_level.txt deleted file mode 100644 index b2332cc6..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_Vision-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -Vision diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_WebKit-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_WebKit-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_WebKit-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_WebKit-5.2.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/pyobjc_framework_WebKit-5.2.dist-info/LICENSE.txt deleted file mode 100644 index dfab07f7..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_WebKit-5.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,10 +0,0 @@ -(This is the MIT license, note that libffi-src is a separate product with its own license) - -Copyright 2002, 2003 - Bill Bumgarner, Ronald Oussoren, Steve Majewski, Lele Gaifax, et.al. -Copyright 2003-2019 - Ronald Oussoren - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_WebKit-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_WebKit-5.2.dist-info/METADATA deleted file mode 100644 index a8c4db72..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_WebKit-5.2.dist-info/METADATA +++ /dev/null @@ -1,56 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-WebKit -Version: 5.2 -Summary: Wrappers for the framework WebKit on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,WebKit,JavaScriptCore -Platform: MacOS X -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - -Wrappers for the "WebKit" and "JavaScriptCore" frameworks on macOS. The -WebKit framework contains the views and support classes for creating a -browser. The JavaScriptCore framework implements a JavaScript interpreter. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use these frameworks and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_WebKit-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_WebKit-5.2.dist-info/RECORD deleted file mode 100644 index a088a5cd..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_WebKit-5.2.dist-info/RECORD +++ /dev/null @@ -1,17 +0,0 @@ -JavaScriptCore/__init__.py,sha256=yxi8VCCnfk898CwBqJ7za_FeolqRFCNs30rzFhFmek4,817 -JavaScriptCore/__init__.pyc,, -JavaScriptCore/_metadata.py,sha256=zVY8kidWnQ0dKfaYGORJ1-eXd0adUtQX67NISlXSbBY,16837 -JavaScriptCore/_metadata.pyc,, -JavaScriptCore/_util.py,sha256=OjQBkilE34_gUNldWJyh5Xi5AB9MmF2Iug_tmxuJ8XA,737 -JavaScriptCore/_util.pyc,, -WebKit/_WebKit.so,sha256=2zzpASaPOxo_BD14yz3O-AQ_mTyksAxfsTpo8QIyobI,30692 -WebKit/__init__.py,sha256=YaCslp_37NJdetmfmw_bwQaQQ9aQSkiKLBEl9Mj6fns,761 -WebKit/__init__.pyc,, -WebKit/_metadata.py,sha256=yNa5R_5i86QhdTOp_5BjoEFe5S8KRlY8RfQ6HORuMzU,72150 -WebKit/_metadata.pyc,, -pyobjc_framework_WebKit-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_WebKit-5.2.dist-info/LICENSE.txt,sha256=3A9zqUv4jNpJGEdtjrUAV68DyC5HP7AbOCrrvPI4QX8,1249 -pyobjc_framework_WebKit-5.2.dist-info/METADATA,sha256=vUrfT2LeZ9fco7OYnsCAXjzPVKzyfPdssVN_k0caDEM,2312 -pyobjc_framework_WebKit-5.2.dist-info/RECORD,, -pyobjc_framework_WebKit-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_WebKit-5.2.dist-info/top_level.txt,sha256=rpU7x31arX71utmEFWwLKLiKAUMPqy6Wiktahg_HAW0,22 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_WebKit-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_WebKit-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_WebKit-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_WebKit-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_WebKit-5.2.dist-info/top_level.txt deleted file mode 100644 index 3ab0f502..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_WebKit-5.2.dist-info/top_level.txt +++ /dev/null @@ -1,2 +0,0 @@ -JavaScriptCore -WebKit diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_iTunesLibrary-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_iTunesLibrary-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_iTunesLibrary-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_iTunesLibrary-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_iTunesLibrary-5.2.dist-info/METADATA deleted file mode 100644 index 844fb8e4..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_iTunesLibrary-5.2.dist-info/METADATA +++ /dev/null @@ -1,57 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-iTunesLibrary -Version: 5.2 -Summary: Wrappers for the framework iTunesLibrary on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,iTunesLibrary -Platform: MacOS X (>=10.6) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) -Requires-Dist: pyobjc-framework-Cocoa (>=5.2) - - -Wrappers for the "iTunesLibrary" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - -Note that using the library requires a signed application bundle. - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_iTunesLibrary-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_iTunesLibrary-5.2.dist-info/RECORD deleted file mode 100644 index 4626f9ee..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_iTunesLibrary-5.2.dist-info/RECORD +++ /dev/null @@ -1,9 +0,0 @@ -iTunesLibrary/__init__.py,sha256=MeuNTKQFZrIGkkiWIh99W9eVPDiw1h_6cZ8mwDUyIGk,712 -iTunesLibrary/__init__.pyc,, -iTunesLibrary/_metadata.py,sha256=txtpX_Rs-hu-vQVYPmnjW6420JVSyAQvQT6vegxt0bA,7564 -iTunesLibrary/_metadata.pyc,, -pyobjc_framework_iTunesLibrary-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_iTunesLibrary-5.2.dist-info/METADATA,sha256=FeT9O8mvDfaCTQShobD3u9ZZitHs2G2wD2nrMPEORoo,2228 -pyobjc_framework_iTunesLibrary-5.2.dist-info/RECORD,, -pyobjc_framework_iTunesLibrary-5.2.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -pyobjc_framework_iTunesLibrary-5.2.dist-info/top_level.txt,sha256=FGkIMMsbHW7FGymvZfN_ML_gbVRh0HLezKN4mNMTH8w,14 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_iTunesLibrary-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_iTunesLibrary-5.2.dist-info/WHEEL deleted file mode 100644 index c8240f03..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_iTunesLibrary-5.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_iTunesLibrary-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_iTunesLibrary-5.2.dist-info/top_level.txt deleted file mode 100644 index 5ae31af3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_iTunesLibrary-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -iTunesLibrary diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_libdispatch-5.2.dist-info/INSTALLER b/env/lib/python2.7/site-packages/pyobjc_framework_libdispatch-5.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_libdispatch-5.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_libdispatch-5.2.dist-info/METADATA b/env/lib/python2.7/site-packages/pyobjc_framework_libdispatch-5.2.dist-info/METADATA deleted file mode 100644 index 70a04aaa..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_libdispatch-5.2.dist-info/METADATA +++ /dev/null @@ -1,53 +0,0 @@ -Metadata-Version: 2.1 -Name: pyobjc-framework-libdispatch -Version: 5.2 -Summary: Wrappers for libdispatch on macOS -Home-page: https://bitbucket.org/ronaldoussoren/pyobjc -Author: Ronald Oussoren -Author-email: pyobjc-dev@lists.sourceforge.net -License: MIT License -Keywords: PyObjC,libdispatch -Platform: MacOS X (>=10.8) -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Environment :: MacOS X :: Cocoa -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Objective C -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: User Interfaces -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: Documentation, https://pyobjc.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open -Requires-Dist: pyobjc-core (>=5.2) - -Wrappers for the "AVFoundation" framework on macOS. - -These wrappers don't include documentation, please check Apple's documention -for information on how to use this framework and PyObjC's documentation -for general tips and tricks regarding the translation between Python -and (Objective-)C frameworks - - -Project links -------------- - -* `Documentation <https://pyobjc.readthedocs.io/en/latest/>`_ - -* `Issue Tracker <https://bitbucket.org/ronaldoussoren/pyobjc/issues?status=new&status=open>`_ - -* `Repository <https://bitbucket.org/ronaldoussoren/pyobjc/>`_ - - - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_libdispatch-5.2.dist-info/RECORD b/env/lib/python2.7/site-packages/pyobjc_framework_libdispatch-5.2.dist-info/RECORD deleted file mode 100644 index 1b6fd5b0..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_libdispatch-5.2.dist-info/RECORD +++ /dev/null @@ -1,11 +0,0 @@ -libdispatch/__init__.py,sha256=3pFWTlgbze2ov8oBmkXFBmKOeJRkBR9UN4xdotQlHBQ,780 -libdispatch/__init__.pyc,, -libdispatch/_inlines.so,sha256=t0ed9WYX20gItccMmaFCJb3noKz9VFrqLXvOTuLmKzo,9436 -libdispatch/_libdispatch.so,sha256=mWfseuqll8hKloAgixMvZ7I3X-qHe6vKiDrJIjF8Y44,17008 -libdispatch/_metadata.py,sha256=VQzRx6QFig_LcdaD5A8BW3rukBKI7U16-uH9qco8Dh0,12777 -libdispatch/_metadata.pyc,, -pyobjc_framework_libdispatch-5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyobjc_framework_libdispatch-5.2.dist-info/METADATA,sha256=3q4QUgHuhpAFVUES2OVDbINKXpJHRSHhaACXsCDqzv4,2093 -pyobjc_framework_libdispatch-5.2.dist-info/RECORD,, -pyobjc_framework_libdispatch-5.2.dist-info/WHEEL,sha256=JUL6lgMeyaRhgLIiSvWA_gemMrRszq2iZsO7hI-zkCM,110 -pyobjc_framework_libdispatch-5.2.dist-info/top_level.txt,sha256=acyvxw3zEzXzfk9gVKXenaPAJF1MlJlQYiQEwKqvlM4,12 diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_libdispatch-5.2.dist-info/WHEEL b/env/lib/python2.7/site-packages/pyobjc_framework_libdispatch-5.2.dist-info/WHEEL deleted file mode 100644 index c40e1346..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_libdispatch-5.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp27-cp27m-macosx_10_9_x86_64 - diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_libdispatch-5.2.dist-info/top_level.txt b/env/lib/python2.7/site-packages/pyobjc_framework_libdispatch-5.2.dist-info/top_level.txt deleted file mode 100644 index a2e8e5e5..00000000 --- a/env/lib/python2.7/site-packages/pyobjc_framework_libdispatch-5.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -libdispatch diff --git a/env/lib/python2.7/site-packages/setuptools-41.1.0.dist-info/INSTALLER b/env/lib/python2.7/site-packages/setuptools-41.1.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/setuptools-41.1.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/setuptools-41.1.0.dist-info/LICENSE b/env/lib/python2.7/site-packages/setuptools-41.1.0.dist-info/LICENSE deleted file mode 100644 index 6e0693b4..00000000 --- a/env/lib/python2.7/site-packages/setuptools-41.1.0.dist-info/LICENSE +++ /dev/null @@ -1,19 +0,0 @@ -Copyright (C) 2016 Jason R Coombs <jaraco@jaraco.com> - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/env/lib/python2.7/site-packages/setuptools-41.1.0.dist-info/METADATA b/env/lib/python2.7/site-packages/setuptools-41.1.0.dist-info/METADATA deleted file mode 100644 index 298c1e3d..00000000 --- a/env/lib/python2.7/site-packages/setuptools-41.1.0.dist-info/METADATA +++ /dev/null @@ -1,77 +0,0 @@ -Metadata-Version: 2.1 -Name: setuptools -Version: 41.1.0 -Summary: Easily download, build, install, upgrade, and uninstall Python packages -Home-page: https://github.com/pypa/setuptools -Author: Python Packaging Authority -Author-email: distutils-sig@python.org -License: UNKNOWN -Project-URL: Documentation, https://setuptools.readthedocs.io/ -Keywords: CPAN PyPI distutils eggs package management -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: System :: Archiving :: Packaging -Classifier: Topic :: System :: Systems Administration -Classifier: Topic :: Utilities -Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.* -Description-Content-Type: text/x-rst; charset=UTF-8 -Provides-Extra: certs -Requires-Dist: certifi (==2016.9.26) ; extra == 'certs' -Provides-Extra: ssl -Requires-Dist: wincertstore (==0.2) ; (sys_platform == "win32") and extra == 'ssl' - -.. image:: https://img.shields.io/pypi/v/setuptools.svg - :target: https://pypi.org/project/setuptools - -.. image:: https://img.shields.io/readthedocs/setuptools/latest.svg - :target: https://setuptools.readthedocs.io - -.. image:: https://img.shields.io/travis/pypa/setuptools/master.svg?label=Linux%20CI&logo=travis&logoColor=white - :target: https://travis-ci.org/pypa/setuptools - -.. image:: https://img.shields.io/appveyor/ci/pypa/setuptools/master.svg?label=Windows%20CI&logo=appveyor&logoColor=white - :target: https://ci.appveyor.com/project/pypa/setuptools/branch/master - -.. image:: https://img.shields.io/codecov/c/github/pypa/setuptools/master.svg?logo=codecov&logoColor=white - :target: https://codecov.io/gh/pypa/setuptools - -.. image:: https://tidelift.com/badges/github/pypa/setuptools?style=flat - :target: https://tidelift.com/subscription/pkg/pypi-setuptools?utm_source=pypi-setuptools&utm_medium=readme - -.. image:: https://img.shields.io/pypi/pyversions/setuptools.svg - -See the `Installation Instructions -<https://packaging.python.org/installing/>`_ in the Python Packaging -User's Guide for instructions on installing, upgrading, and uninstalling -Setuptools. - -Questions and comments should be directed to the `distutils-sig -mailing list <http://mail.python.org/pipermail/distutils-sig/>`_. -Bug reports and especially tested patches may be -submitted directly to the `bug tracker -<https://github.com/pypa/setuptools/issues>`_. - -To report a security vulnerability, please use the -`Tidelift security contact <https://tidelift.com/security>`_. -Tidelift will coordinate the fix and disclosure. - - -Code of Conduct ---------------- - -Everyone interacting in the setuptools project's codebases, issue trackers, -chat rooms, and mailing lists is expected to follow the -`PyPA Code of Conduct <https://www.pypa.io/en/latest/code-of-conduct/>`_. - - diff --git a/env/lib/python2.7/site-packages/setuptools-41.1.0.dist-info/RECORD b/env/lib/python2.7/site-packages/setuptools-41.1.0.dist-info/RECORD deleted file mode 100644 index c0dd5b34..00000000 --- a/env/lib/python2.7/site-packages/setuptools-41.1.0.dist-info/RECORD +++ /dev/null @@ -1,186 +0,0 @@ -../../../bin/easy_install,sha256=0c85ef8QwUzqpb93W6hBEDbkKSiZFZMqbaddnAS1OsE,287 -../../../bin/easy_install-2.7,sha256=0c85ef8QwUzqpb93W6hBEDbkKSiZFZMqbaddnAS1OsE,287 -easy_install.py,sha256=MDC9vt5AxDsXX5qcKlBz2TnW6Tpuv_AobnfhCJ9X3PM,126 -easy_install.pyc,, -pkg_resources/__init__.py,sha256=6Kq6B-JSGEFSg_2FAnl6Lmoa5by2RyjDxPBiWuAh-dw,108309 -pkg_resources/__init__.pyc,, -pkg_resources/_vendor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pkg_resources/_vendor/__init__.pyc,, -pkg_resources/_vendor/appdirs.py,sha256=MievUEuv3l_mQISH5SF0shDk_BNhHHzYiAPrT3ITN4I,24701 -pkg_resources/_vendor/appdirs.pyc,, -pkg_resources/_vendor/packaging/__about__.py,sha256=zkcCPTN_6TcLW0Nrlg0176-R1QQ_WVPTm8sz1R4-HjM,720 -pkg_resources/_vendor/packaging/__about__.pyc,, -pkg_resources/_vendor/packaging/__init__.py,sha256=_vNac5TrzwsrzbOFIbF-5cHqc_Y2aPT2D7zrIR06BOo,513 -pkg_resources/_vendor/packaging/__init__.pyc,, -pkg_resources/_vendor/packaging/_compat.py,sha256=Vi_A0rAQeHbU-a9X0tt1yQm9RqkgQbDSxzRw8WlU9kA,860 -pkg_resources/_vendor/packaging/_compat.pyc,, -pkg_resources/_vendor/packaging/_structures.py,sha256=RImECJ4c_wTlaTYYwZYLHEiebDMaAJmK1oPARhw1T5o,1416 -pkg_resources/_vendor/packaging/_structures.pyc,, -pkg_resources/_vendor/packaging/markers.py,sha256=uEcBBtGvzqltgnArqb9c4RrcInXezDLos14zbBHhWJo,8248 -pkg_resources/_vendor/packaging/markers.pyc,, -pkg_resources/_vendor/packaging/requirements.py,sha256=SikL2UynbsT0qtY9ltqngndha_sfo0w6XGFhAhoSoaQ,4355 -pkg_resources/_vendor/packaging/requirements.pyc,, -pkg_resources/_vendor/packaging/specifiers.py,sha256=SAMRerzO3fK2IkFZCaZkuwZaL_EGqHNOz4pni4vhnN0,28025 -pkg_resources/_vendor/packaging/specifiers.pyc,, -pkg_resources/_vendor/packaging/utils.py,sha256=3m6WvPm6NNxE8rkTGmn0r75B_GZSGg7ikafxHsBN1WA,421 -pkg_resources/_vendor/packaging/utils.pyc,, -pkg_resources/_vendor/packaging/version.py,sha256=OwGnxYfr2ghNzYx59qWIBkrK3SnB6n-Zfd1XaLpnnM0,11556 -pkg_resources/_vendor/packaging/version.pyc,, -pkg_resources/_vendor/pyparsing.py,sha256=tmrp-lu-qO1i75ZzIN5A12nKRRD1Cm4Vpk-5LR9rims,232055 -pkg_resources/_vendor/pyparsing.pyc,, -pkg_resources/_vendor/six.py,sha256=A6hdJZVjI3t_geebZ9BzUvwRrIXo0lfwzQlM2LcKyas,30098 -pkg_resources/_vendor/six.pyc,, -pkg_resources/extern/__init__.py,sha256=cHiEfHuLmm6rs5Ve_ztBfMI7Lr31vss-D4wkqF5xzlI,2498 -pkg_resources/extern/__init__.pyc,, -pkg_resources/py31compat.py,sha256=-WQ0e4c3RG_acdhwC3gLiXhP_lg4G5q7XYkZkQg0gxU,558 -pkg_resources/py31compat.pyc,, -setuptools-41.1.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -setuptools-41.1.0.dist-info/LICENSE,sha256=wyo6w5WvYyHv0ovnPQagDw22q4h9HCHU_sRhKNIFbVo,1078 -setuptools-41.1.0.dist-info/METADATA,sha256=ieGRM1vBOzrDbP_s1TEH7NeSOMbGOqjAs9OHcmaKOR8,3305 -setuptools-41.1.0.dist-info/RECORD,, -setuptools-41.1.0.dist-info/WHEEL,sha256=h_aVn5OB2IERUjMbi2pucmR_zzWJtk303YXvhh60NJ8,110 -setuptools-41.1.0.dist-info/dependency_links.txt,sha256=HlkCFkoK5TbZ5EMLbLKYhLcY_E31kBWD8TqW2EgmatQ,239 -setuptools-41.1.0.dist-info/entry_points.txt,sha256=jBqCYDlVjl__sjYFGXo1JQGIMAYFJE-prYWUtnMZEew,2990 -setuptools-41.1.0.dist-info/top_level.txt,sha256=2HUXVVwA4Pff1xgTFr3GsTXXKaPaO6vlG6oNJ_4u4Tg,38 -setuptools-41.1.0.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 -setuptools/__init__.py,sha256=WBpCcn2lvdckotabeae1TTYonPOcgCIF3raD2zRWzBc,7283 -setuptools/__init__.pyc,, -setuptools/_deprecation_warning.py,sha256=jU9-dtfv6cKmtQJOXN8nP1mm7gONw5kKEtiPtbwnZyI,218 -setuptools/_deprecation_warning.pyc,, -setuptools/_vendor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -setuptools/_vendor/__init__.pyc,, -setuptools/_vendor/packaging/__about__.py,sha256=zkcCPTN_6TcLW0Nrlg0176-R1QQ_WVPTm8sz1R4-HjM,720 -setuptools/_vendor/packaging/__about__.pyc,, -setuptools/_vendor/packaging/__init__.py,sha256=_vNac5TrzwsrzbOFIbF-5cHqc_Y2aPT2D7zrIR06BOo,513 -setuptools/_vendor/packaging/__init__.pyc,, -setuptools/_vendor/packaging/_compat.py,sha256=Vi_A0rAQeHbU-a9X0tt1yQm9RqkgQbDSxzRw8WlU9kA,860 -setuptools/_vendor/packaging/_compat.pyc,, -setuptools/_vendor/packaging/_structures.py,sha256=RImECJ4c_wTlaTYYwZYLHEiebDMaAJmK1oPARhw1T5o,1416 -setuptools/_vendor/packaging/_structures.pyc,, -setuptools/_vendor/packaging/markers.py,sha256=Gvpk9EY20yKaMTiKgQZ8yFEEpodqVgVYtfekoic1Yts,8239 -setuptools/_vendor/packaging/markers.pyc,, -setuptools/_vendor/packaging/requirements.py,sha256=t44M2HVWtr8phIz2OhnILzuGT3rTATaovctV1dpnVIg,4343 -setuptools/_vendor/packaging/requirements.pyc,, -setuptools/_vendor/packaging/specifiers.py,sha256=SAMRerzO3fK2IkFZCaZkuwZaL_EGqHNOz4pni4vhnN0,28025 -setuptools/_vendor/packaging/specifiers.pyc,, -setuptools/_vendor/packaging/utils.py,sha256=3m6WvPm6NNxE8rkTGmn0r75B_GZSGg7ikafxHsBN1WA,421 -setuptools/_vendor/packaging/utils.pyc,, -setuptools/_vendor/packaging/version.py,sha256=OwGnxYfr2ghNzYx59qWIBkrK3SnB6n-Zfd1XaLpnnM0,11556 -setuptools/_vendor/packaging/version.pyc,, -setuptools/_vendor/pyparsing.py,sha256=tmrp-lu-qO1i75ZzIN5A12nKRRD1Cm4Vpk-5LR9rims,232055 -setuptools/_vendor/pyparsing.pyc,, -setuptools/_vendor/six.py,sha256=A6hdJZVjI3t_geebZ9BzUvwRrIXo0lfwzQlM2LcKyas,30098 -setuptools/_vendor/six.pyc,, -setuptools/archive_util.py,sha256=kw8Ib_lKjCcnPKNbS7h8HztRVK0d5RacU3r_KRdVnmM,6592 -setuptools/archive_util.pyc,, -setuptools/build_meta.py,sha256=-9Nmj9YdbW4zX3TssPJZhsENrTa4fw3k86Jm1cdKMik,9597 -setuptools/build_meta.pyc,, -setuptools/cli-32.exe,sha256=dfEuovMNnA2HLa3jRfMPVi5tk4R7alCbpTvuxtCyw0Y,65536 -setuptools/cli-64.exe,sha256=KLABu5pyrnokJCv6skjXZ6GsXeyYHGcqOUT3oHI3Xpo,74752 -setuptools/cli.exe,sha256=dfEuovMNnA2HLa3jRfMPVi5tk4R7alCbpTvuxtCyw0Y,65536 -setuptools/command/__init__.py,sha256=NWzJ0A1BEengZpVeqUyWLNm2bk4P3F4iL5QUErHy7kA,594 -setuptools/command/__init__.pyc,, -setuptools/command/alias.py,sha256=KjpE0sz_SDIHv3fpZcIQK-sCkJz-SrC6Gmug6b9Nkc8,2426 -setuptools/command/alias.pyc,, -setuptools/command/bdist_egg.py,sha256=be-IBpr1zhS9i6GjKANJgzkbH3ChImdWY7S-j0r2BK8,18167 -setuptools/command/bdist_egg.pyc,, -setuptools/command/bdist_rpm.py,sha256=B7l0TnzCGb-0nLlm6rS00jWLkojASwVmdhW2w5Qz_Ak,1508 -setuptools/command/bdist_rpm.pyc,, -setuptools/command/bdist_wininst.py,sha256=_6dz3lpB1tY200LxKPLM7qgwTCceOMgaWFF-jW2-pm0,637 -setuptools/command/bdist_wininst.pyc,, -setuptools/command/build_clib.py,sha256=bQ9aBr-5ZSO-9fGsGsDLz0mnnFteHUZnftVLkhvHDq0,4484 -setuptools/command/build_clib.pyc,, -setuptools/command/build_ext.py,sha256=81CTgsqjBjNl_HOgCJ1lQ5vv1NIM3RBpcoVGpqT4N1M,12897 -setuptools/command/build_ext.pyc,, -setuptools/command/build_py.py,sha256=yWyYaaS9F3o9JbIczn064A5g1C5_UiKRDxGaTqYbtLE,9596 -setuptools/command/build_py.pyc,, -setuptools/command/develop.py,sha256=MQlnGS6uP19erK2JCNOyQYoYyquk3PADrqrrinqqLtA,8184 -setuptools/command/develop.pyc,, -setuptools/command/dist_info.py,sha256=5t6kOfrdgALT-P3ogss6PF9k-Leyesueycuk3dUyZnI,960 -setuptools/command/dist_info.pyc,, -setuptools/command/easy_install.py,sha256=telww7CuPsoTtvlpY-ktnZGT85cZ6xGCGZa0vHvFJ-Q,87273 -setuptools/command/easy_install.pyc,, -setuptools/command/egg_info.py,sha256=w73EdxYSOk2gsaAiHGL2dZrCldoPiuRr2eTfqcFvCds,25570 -setuptools/command/egg_info.pyc,, -setuptools/command/install.py,sha256=a0EZpL_A866KEdhicTGbuyD_TYl1sykfzdrri-zazT4,4683 -setuptools/command/install.pyc,, -setuptools/command/install_egg_info.py,sha256=bMgeIeRiXzQ4DAGPV1328kcjwQjHjOWU4FngAWLV78Q,2203 -setuptools/command/install_egg_info.pyc,, -setuptools/command/install_lib.py,sha256=11mxf0Ch12NsuYwS8PHwXBRvyh671QAM4cTRh7epzG0,3840 -setuptools/command/install_lib.pyc,, -setuptools/command/install_scripts.py,sha256=UD0rEZ6861mTYhIdzcsqKnUl8PozocXWl9VBQ1VTWnc,2439 -setuptools/command/install_scripts.pyc,, -setuptools/command/launcher manifest.xml,sha256=xlLbjWrB01tKC0-hlVkOKkiSPbzMml2eOPtJ_ucCnbE,628 -setuptools/command/py36compat.py,sha256=SzjZcOxF7zdFUT47Zv2n7AM3H8koDys_0OpS-n9gIfc,4986 -setuptools/command/py36compat.pyc,, -setuptools/command/register.py,sha256=LO3MvYKPE8dN1m-KkrBRHC68ZFoPvA_vI8Xgp7vv6zI,534 -setuptools/command/register.pyc,, -setuptools/command/rotate.py,sha256=co5C1EkI7P0GGT6Tqz-T2SIj2LBJTZXYELpmao6d4KQ,2164 -setuptools/command/rotate.pyc,, -setuptools/command/saveopts.py,sha256=za7QCBcQimKKriWcoCcbhxPjUz30gSB74zuTL47xpP4,658 -setuptools/command/saveopts.pyc,, -setuptools/command/sdist.py,sha256=gr5hFrDzUtGfp_0tu0sllzIyr3jMQegIkFmlDauQJxw,7388 -setuptools/command/sdist.pyc,, -setuptools/command/setopt.py,sha256=NTWDyx-gjDF-txf4dO577s7LOzHVoKR0Mq33rFxaRr8,5085 -setuptools/command/setopt.pyc,, -setuptools/command/test.py,sha256=oePJ49u17ENKtrM-rOrrLlRhtNnrzcSr0IW-gE9XVq0,9285 -setuptools/command/test.pyc,, -setuptools/command/upload.py,sha256=GxtNkIl7SA0r8mACkbDcSCN1m2_WPppK9gZXJmQSiow,6811 -setuptools/command/upload.pyc,, -setuptools/command/upload_docs.py,sha256=oXiGplM_cUKLwE4CWWw98RzCufAu8tBhMC97GegFcms,7311 -setuptools/command/upload_docs.pyc,, -setuptools/config.py,sha256=lz19l1AtoHctpp1_tbYZv176nrEj4Gpf7ykNIYTIkAQ,20425 -setuptools/config.pyc,, -setuptools/dep_util.py,sha256=fgixvC1R7sH3r13ktyf7N0FALoqEXL1cBarmNpSEoWg,935 -setuptools/dep_util.pyc,, -setuptools/depends.py,sha256=hC8QIDcM3VDpRXvRVA6OfL9AaQfxvhxHcN_w6sAyNq8,5837 -setuptools/depends.pyc,, -setuptools/dist.py,sha256=MRrBrgBFEwzUvrJrIgW79IepDuAeRxetGuSPky-MawQ,50248 -setuptools/dist.pyc,, -setuptools/extension.py,sha256=uc6nHI-MxwmNCNPbUiBnybSyqhpJqjbhvOQ-emdvt_E,1729 -setuptools/extension.pyc,, -setuptools/extern/__init__.py,sha256=TxeNKFMSfBMzBpBDiHx8Dh3RzsdVmvWaXhtZ03DZMs0,2499 -setuptools/extern/__init__.pyc,, -setuptools/glibc.py,sha256=X64VvGPL2AbURKwYRsWJOXXGAYOiF_v2qixeTkAULuU,3146 -setuptools/glibc.pyc,, -setuptools/glob.py,sha256=o75cHrOxYsvn854thSxE0x9k8JrKDuhP_rRXlVB00Q4,5084 -setuptools/glob.pyc,, -setuptools/gui-32.exe,sha256=XBr0bHMA6Hpz2s9s9Bzjl-PwXfa9nH4ie0rFn4V2kWA,65536 -setuptools/gui-64.exe,sha256=aYKMhX1IJLn4ULHgWX0sE0yREUt6B3TEHf_jOw6yNyE,75264 -setuptools/gui.exe,sha256=XBr0bHMA6Hpz2s9s9Bzjl-PwXfa9nH4ie0rFn4V2kWA,65536 -setuptools/launch.py,sha256=sd7ejwhBocCDx_wG9rIs0OaZ8HtmmFU8ZC6IR_S0Lvg,787 -setuptools/launch.pyc,, -setuptools/lib2to3_ex.py,sha256=t5e12hbR2pi9V4ezWDTB4JM-AISUnGOkmcnYHek3xjg,2013 -setuptools/lib2to3_ex.pyc,, -setuptools/monkey.py,sha256=FGc9fffh7gAxMLFmJs2DW_OYWpBjkdbNS2n14UAK4NA,5264 -setuptools/monkey.pyc,, -setuptools/msvc.py,sha256=uuRFaZzjJt5Fv3ZmyKUUuLtjx12_8G9RILigGec4irI,40838 -setuptools/msvc.pyc,, -setuptools/namespaces.py,sha256=F0Nrbv8KCT2OrO7rwa03om4N4GZKAlnce-rr-cgDQa8,3199 -setuptools/namespaces.pyc,, -setuptools/package_index.py,sha256=F9LBC-hQ5fkjeEVflxif0mo_DzRMrepahdFTPenOtGM,40587 -setuptools/package_index.pyc,, -setuptools/pep425tags.py,sha256=o_D_WVeWcXZiI2xjPSg7pouGOvaWRgGRxEDK9DzAXIA,10861 -setuptools/pep425tags.pyc,, -setuptools/py27compat.py,sha256=3mwxRMDk5Q5O1rSXOERbQDXhFqwDJhhUitfMW_qpUCo,536 -setuptools/py27compat.pyc,, -setuptools/py31compat.py,sha256=h2rtZghOfwoGYd8sQ0-auaKiF3TcL3qX0bX3VessqcE,838 -setuptools/py31compat.pyc,, -setuptools/py33compat.py,sha256=SMF9Z8wnGicTOkU1uRNwZ_kz5Z_bj29PUBbqdqeeNsc,1330 -setuptools/py33compat.pyc,, -setuptools/sandbox.py,sha256=9UbwfEL5QY436oMI1LtFWohhoZ-UzwHvGyZjUH_qhkw,14276 -setuptools/sandbox.pyc,, -setuptools/script (dev).tmpl,sha256=RUzQzCQUaXtwdLtYHWYbIQmOaES5Brqq1FvUA_tu-5I,218 -setuptools/script.tmpl,sha256=WGTt5piezO27c-Dbx6l5Q4T3Ff20A5z7872hv3aAhYY,138 -setuptools/site-patch.py,sha256=OumkIHMuoSenRSW1382kKWI1VAwxNE86E5W8iDd34FY,2302 -setuptools/site-patch.pyc,, -setuptools/ssl_support.py,sha256=nLjPUBBw7RTTx6O4RJZ5eAMGgjJG8beiDbkFXDZpLuM,8493 -setuptools/ssl_support.pyc,, -setuptools/unicode_utils.py,sha256=NOiZ_5hD72A6w-4wVj8awHFM3n51Kmw1Ic_vx15XFqw,996 -setuptools/unicode_utils.pyc,, -setuptools/version.py,sha256=og_cuZQb0QI6ukKZFfZWPlr1HgJBPPn2vO2m_bI9ZTE,144 -setuptools/version.pyc,, -setuptools/wheel.py,sha256=94uqXsOaKt91d9hW5z6ZppZmNSs_nO66R4uiwhcr4V0,8094 -setuptools/wheel.pyc,, -setuptools/windows_support.py,sha256=5GrfqSP2-dLGJoZTq2g6dCKkyQxxa2n5IQiXlJCoYEE,714 -setuptools/windows_support.pyc,, diff --git a/env/lib/python2.7/site-packages/setuptools-41.1.0.dist-info/WHEEL b/env/lib/python2.7/site-packages/setuptools-41.1.0.dist-info/WHEEL deleted file mode 100644 index 78e6f69d..00000000 --- a/env/lib/python2.7/site-packages/setuptools-41.1.0.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.4) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/env/lib/python2.7/site-packages/setuptools-41.1.0.dist-info/entry_points.txt b/env/lib/python2.7/site-packages/setuptools-41.1.0.dist-info/entry_points.txt deleted file mode 100644 index 4159fd0a..00000000 --- a/env/lib/python2.7/site-packages/setuptools-41.1.0.dist-info/entry_points.txt +++ /dev/null @@ -1,65 +0,0 @@ -[console_scripts] -easy_install = setuptools.command.easy_install:main -easy_install-3.6 = setuptools.command.easy_install:main - -[distutils.commands] -alias = setuptools.command.alias:alias -bdist_egg = setuptools.command.bdist_egg:bdist_egg -bdist_rpm = setuptools.command.bdist_rpm:bdist_rpm -bdist_wininst = setuptools.command.bdist_wininst:bdist_wininst -build_clib = setuptools.command.build_clib:build_clib -build_ext = setuptools.command.build_ext:build_ext -build_py = setuptools.command.build_py:build_py -develop = setuptools.command.develop:develop -dist_info = setuptools.command.dist_info:dist_info -easy_install = setuptools.command.easy_install:easy_install -egg_info = setuptools.command.egg_info:egg_info -install = setuptools.command.install:install -install_egg_info = setuptools.command.install_egg_info:install_egg_info -install_lib = setuptools.command.install_lib:install_lib -install_scripts = setuptools.command.install_scripts:install_scripts -register = setuptools.command.register:register -rotate = setuptools.command.rotate:rotate -saveopts = setuptools.command.saveopts:saveopts -sdist = setuptools.command.sdist:sdist -setopt = setuptools.command.setopt:setopt -test = setuptools.command.test:test -upload = setuptools.command.upload:upload -upload_docs = setuptools.command.upload_docs:upload_docs - -[distutils.setup_keywords] -convert_2to3_doctests = setuptools.dist:assert_string_list -dependency_links = setuptools.dist:assert_string_list -eager_resources = setuptools.dist:assert_string_list -entry_points = setuptools.dist:check_entry_points -exclude_package_data = setuptools.dist:check_package_data -extras_require = setuptools.dist:check_extras -include_package_data = setuptools.dist:assert_bool -install_requires = setuptools.dist:check_requirements -namespace_packages = setuptools.dist:check_nsp -package_data = setuptools.dist:check_package_data -packages = setuptools.dist:check_packages -python_requires = setuptools.dist:check_specifier -setup_requires = setuptools.dist:check_requirements -test_loader = setuptools.dist:check_importable -test_runner = setuptools.dist:check_importable -test_suite = setuptools.dist:check_test_suite -tests_require = setuptools.dist:check_requirements -use_2to3 = setuptools.dist:assert_bool -use_2to3_exclude_fixers = setuptools.dist:assert_string_list -use_2to3_fixers = setuptools.dist:assert_string_list -zip_safe = setuptools.dist:assert_bool - -[egg_info.writers] -PKG-INFO = setuptools.command.egg_info:write_pkg_info -dependency_links.txt = setuptools.command.egg_info:overwrite_arg -depends.txt = setuptools.command.egg_info:warn_depends_obsolete -eager_resources.txt = setuptools.command.egg_info:overwrite_arg -entry_points.txt = setuptools.command.egg_info:write_entries -namespace_packages.txt = setuptools.command.egg_info:overwrite_arg -requires.txt = setuptools.command.egg_info:write_requirements -top_level.txt = setuptools.command.egg_info:write_toplevel_names - -[setuptools.installation] -eggsecutable = setuptools.command.easy_install:bootstrap - diff --git a/env/lib/python2.7/site-packages/setuptools-41.1.0.dist-info/top_level.txt b/env/lib/python2.7/site-packages/setuptools-41.1.0.dist-info/top_level.txt deleted file mode 100644 index 4577c6a7..00000000 --- a/env/lib/python2.7/site-packages/setuptools-41.1.0.dist-info/top_level.txt +++ /dev/null @@ -1,3 +0,0 @@ -easy_install -pkg_resources -setuptools diff --git a/env/lib/python2.7/site-packages/setuptools-41.1.0.dist-info/zip-safe b/env/lib/python2.7/site-packages/setuptools-41.1.0.dist-info/zip-safe deleted file mode 100644 index 8b137891..00000000 --- a/env/lib/python2.7/site-packages/setuptools-41.1.0.dist-info/zip-safe +++ /dev/null @@ -1 +0,0 @@ - diff --git a/env/lib/python2.7/site-packages/setuptools/__init__.py b/env/lib/python2.7/site-packages/setuptools/__init__.py deleted file mode 100644 index a71b2bbd..00000000 --- a/env/lib/python2.7/site-packages/setuptools/__init__.py +++ /dev/null @@ -1,228 +0,0 @@ -"""Extensions to the 'distutils' for large or complex distributions""" - -import os -import sys -import functools -import distutils.core -import distutils.filelist -import re -from distutils.errors import DistutilsOptionError -from distutils.util import convert_path -from fnmatch import fnmatchcase - -from ._deprecation_warning import SetuptoolsDeprecationWarning - -from setuptools.extern.six import PY3, string_types -from setuptools.extern.six.moves import filter, map - -import setuptools.version -from setuptools.extension import Extension -from setuptools.dist import Distribution, Feature -from setuptools.depends import Require -from . import monkey - -__metaclass__ = type - - -__all__ = [ - 'setup', 'Distribution', 'Feature', 'Command', 'Extension', 'Require', - 'SetuptoolsDeprecationWarning', - 'find_packages' -] - -if PY3: - __all__.append('find_namespace_packages') - -__version__ = setuptools.version.__version__ - -bootstrap_install_from = None - -# If we run 2to3 on .py files, should we also convert docstrings? -# Default: yes; assume that we can detect doctests reliably -run_2to3_on_doctests = True -# Standard package names for fixer packages -lib2to3_fixer_packages = ['lib2to3.fixes'] - - -class PackageFinder: - """ - Generate a list of all Python packages found within a directory - """ - - @classmethod - def find(cls, where='.', exclude=(), include=('*',)): - """Return a list all Python packages found within directory 'where' - - 'where' is the root directory which will be searched for packages. It - should be supplied as a "cross-platform" (i.e. URL-style) path; it will - be converted to the appropriate local path syntax. - - 'exclude' is a sequence of package names to exclude; '*' can be used - as a wildcard in the names, such that 'foo.*' will exclude all - subpackages of 'foo' (but not 'foo' itself). - - 'include' is a sequence of package names to include. If it's - specified, only the named packages will be included. If it's not - specified, all found packages will be included. 'include' can contain - shell style wildcard patterns just like 'exclude'. - """ - - return list(cls._find_packages_iter( - convert_path(where), - cls._build_filter('ez_setup', '*__pycache__', *exclude), - cls._build_filter(*include))) - - @classmethod - def _find_packages_iter(cls, where, exclude, include): - """ - All the packages found in 'where' that pass the 'include' filter, but - not the 'exclude' filter. - """ - for root, dirs, files in os.walk(where, followlinks=True): - # Copy dirs to iterate over it, then empty dirs. - all_dirs = dirs[:] - dirs[:] = [] - - for dir in all_dirs: - full_path = os.path.join(root, dir) - rel_path = os.path.relpath(full_path, where) - package = rel_path.replace(os.path.sep, '.') - - # Skip directory trees that are not valid packages - if ('.' in dir or not cls._looks_like_package(full_path)): - continue - - # Should this package be included? - if include(package) and not exclude(package): - yield package - - # Keep searching subdirectories, as there may be more packages - # down there, even if the parent was excluded. - dirs.append(dir) - - @staticmethod - def _looks_like_package(path): - """Does a directory look like a package?""" - return os.path.isfile(os.path.join(path, '__init__.py')) - - @staticmethod - def _build_filter(*patterns): - """ - Given a list of patterns, return a callable that will be true only if - the input matches at least one of the patterns. - """ - return lambda name: any(fnmatchcase(name, pat=pat) for pat in patterns) - - -class PEP420PackageFinder(PackageFinder): - @staticmethod - def _looks_like_package(path): - return True - - -find_packages = PackageFinder.find - -if PY3: - find_namespace_packages = PEP420PackageFinder.find - - -def _install_setup_requires(attrs): - # Note: do not use `setuptools.Distribution` directly, as - # our PEP 517 backend patch `distutils.core.Distribution`. - dist = distutils.core.Distribution(dict( - (k, v) for k, v in attrs.items() - if k in ('dependency_links', 'setup_requires') - )) - # Honor setup.cfg's options. - dist.parse_config_files(ignore_option_errors=True) - if dist.setup_requires: - dist.fetch_build_eggs(dist.setup_requires) - - -def setup(**attrs): - # Make sure we have any requirements needed to interpret 'attrs'. - _install_setup_requires(attrs) - return distutils.core.setup(**attrs) - -setup.__doc__ = distutils.core.setup.__doc__ - - -_Command = monkey.get_unpatched(distutils.core.Command) - - -class Command(_Command): - __doc__ = _Command.__doc__ - - command_consumes_arguments = False - - def __init__(self, dist, **kw): - """ - Construct the command for dist, updating - vars(self) with any keyword parameters. - """ - _Command.__init__(self, dist) - vars(self).update(kw) - - def _ensure_stringlike(self, option, what, default=None): - val = getattr(self, option) - if val is None: - setattr(self, option, default) - return default - elif not isinstance(val, string_types): - raise DistutilsOptionError("'%s' must be a %s (got `%s`)" - % (option, what, val)) - return val - - def ensure_string_list(self, option): - r"""Ensure that 'option' is a list of strings. If 'option' is - currently a string, we split it either on /,\s*/ or /\s+/, so - "foo bar baz", "foo,bar,baz", and "foo, bar baz" all become - ["foo", "bar", "baz"]. - """ - val = getattr(self, option) - if val is None: - return - elif isinstance(val, string_types): - setattr(self, option, re.split(r',\s*|\s+', val)) - else: - if isinstance(val, list): - ok = all(isinstance(v, string_types) for v in val) - else: - ok = False - if not ok: - raise DistutilsOptionError( - "'%s' must be a list of strings (got %r)" - % (option, val)) - - def reinitialize_command(self, command, reinit_subcommands=0, **kw): - cmd = _Command.reinitialize_command(self, command, reinit_subcommands) - vars(cmd).update(kw) - return cmd - - -def _find_all_simple(path): - """ - Find all files under 'path' - """ - results = ( - os.path.join(base, file) - for base, dirs, files in os.walk(path, followlinks=True) - for file in files - ) - return filter(os.path.isfile, results) - - -def findall(dir=os.curdir): - """ - Find all files under 'dir' and return the list of full filenames. - Unless dir is '.', return full filenames with dir prepended. - """ - files = _find_all_simple(dir) - if dir == os.curdir: - make_rel = functools.partial(os.path.relpath, start=dir) - files = map(make_rel, files) - return list(files) - - -# Apply monkey patches -monkey.patch_all() diff --git a/env/lib/python2.7/site-packages/setuptools/__init__.pyc b/env/lib/python2.7/site-packages/setuptools/__init__.pyc deleted file mode 100644 index 4fbe550c..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/_deprecation_warning.pyc b/env/lib/python2.7/site-packages/setuptools/_deprecation_warning.pyc deleted file mode 100644 index b0366dca..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/_deprecation_warning.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/_vendor/__init__.pyc b/env/lib/python2.7/site-packages/setuptools/_vendor/__init__.pyc deleted file mode 100644 index 7f30671e..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/_vendor/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/__about__.py b/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/__about__.py deleted file mode 100644 index 95d330ef..00000000 --- a/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/__about__.py +++ /dev/null @@ -1,21 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -__all__ = [ - "__title__", "__summary__", "__uri__", "__version__", "__author__", - "__email__", "__license__", "__copyright__", -] - -__title__ = "packaging" -__summary__ = "Core utilities for Python packages" -__uri__ = "https://github.com/pypa/packaging" - -__version__ = "16.8" - -__author__ = "Donald Stufft and individual contributors" -__email__ = "donald@stufft.io" - -__license__ = "BSD or Apache License, Version 2.0" -__copyright__ = "Copyright 2014-2016 %s" % __author__ diff --git a/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/__about__.pyc b/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/__about__.pyc deleted file mode 100644 index 5c7593ec..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/__about__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/__init__.py b/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/__init__.py deleted file mode 100644 index 5ee62202..00000000 --- a/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -from .__about__ import ( - __author__, __copyright__, __email__, __license__, __summary__, __title__, - __uri__, __version__ -) - -__all__ = [ - "__title__", "__summary__", "__uri__", "__version__", "__author__", - "__email__", "__license__", "__copyright__", -] diff --git a/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/__init__.pyc b/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/__init__.pyc deleted file mode 100644 index a68e2b44..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/_compat.py b/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/_compat.py deleted file mode 100644 index 210bb80b..00000000 --- a/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/_compat.py +++ /dev/null @@ -1,30 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -import sys - - -PY2 = sys.version_info[0] == 2 -PY3 = sys.version_info[0] == 3 - -# flake8: noqa - -if PY3: - string_types = str, -else: - string_types = basestring, - - -def with_metaclass(meta, *bases): - """ - Create a base class with a metaclass. - """ - # This requires a bit of explanation: the basic idea is to make a dummy - # metaclass for one level of class instantiation that replaces itself with - # the actual metaclass. - class metaclass(meta): - def __new__(cls, name, this_bases, d): - return meta(name, bases, d) - return type.__new__(metaclass, 'temporary_class', (), {}) diff --git a/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/_compat.pyc b/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/_compat.pyc deleted file mode 100644 index 9b28c446..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/_compat.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/_structures.py b/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/_structures.py deleted file mode 100644 index ccc27861..00000000 --- a/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/_structures.py +++ /dev/null @@ -1,68 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - - -class Infinity(object): - - def __repr__(self): - return "Infinity" - - def __hash__(self): - return hash(repr(self)) - - def __lt__(self, other): - return False - - def __le__(self, other): - return False - - def __eq__(self, other): - return isinstance(other, self.__class__) - - def __ne__(self, other): - return not isinstance(other, self.__class__) - - def __gt__(self, other): - return True - - def __ge__(self, other): - return True - - def __neg__(self): - return NegativeInfinity - -Infinity = Infinity() - - -class NegativeInfinity(object): - - def __repr__(self): - return "-Infinity" - - def __hash__(self): - return hash(repr(self)) - - def __lt__(self, other): - return True - - def __le__(self, other): - return True - - def __eq__(self, other): - return isinstance(other, self.__class__) - - def __ne__(self, other): - return not isinstance(other, self.__class__) - - def __gt__(self, other): - return False - - def __ge__(self, other): - return False - - def __neg__(self): - return Infinity - -NegativeInfinity = NegativeInfinity() diff --git a/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/_structures.pyc b/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/_structures.pyc deleted file mode 100644 index 6c29aceb..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/_structures.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/markers.py b/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/markers.py deleted file mode 100644 index 031332a3..00000000 --- a/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/markers.py +++ /dev/null @@ -1,301 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -import operator -import os -import platform -import sys - -from setuptools.extern.pyparsing import ParseException, ParseResults, stringStart, stringEnd -from setuptools.extern.pyparsing import ZeroOrMore, Group, Forward, QuotedString -from setuptools.extern.pyparsing import Literal as L # noqa - -from ._compat import string_types -from .specifiers import Specifier, InvalidSpecifier - - -__all__ = [ - "InvalidMarker", "UndefinedComparison", "UndefinedEnvironmentName", - "Marker", "default_environment", -] - - -class InvalidMarker(ValueError): - """ - An invalid marker was found, users should refer to PEP 508. - """ - - -class UndefinedComparison(ValueError): - """ - An invalid operation was attempted on a value that doesn't support it. - """ - - -class UndefinedEnvironmentName(ValueError): - """ - A name was attempted to be used that does not exist inside of the - environment. - """ - - -class Node(object): - - def __init__(self, value): - self.value = value - - def __str__(self): - return str(self.value) - - def __repr__(self): - return "<{0}({1!r})>".format(self.__class__.__name__, str(self)) - - def serialize(self): - raise NotImplementedError - - -class Variable(Node): - - def serialize(self): - return str(self) - - -class Value(Node): - - def serialize(self): - return '"{0}"'.format(self) - - -class Op(Node): - - def serialize(self): - return str(self) - - -VARIABLE = ( - L("implementation_version") | - L("platform_python_implementation") | - L("implementation_name") | - L("python_full_version") | - L("platform_release") | - L("platform_version") | - L("platform_machine") | - L("platform_system") | - L("python_version") | - L("sys_platform") | - L("os_name") | - L("os.name") | # PEP-345 - L("sys.platform") | # PEP-345 - L("platform.version") | # PEP-345 - L("platform.machine") | # PEP-345 - L("platform.python_implementation") | # PEP-345 - L("python_implementation") | # undocumented setuptools legacy - L("extra") -) -ALIASES = { - 'os.name': 'os_name', - 'sys.platform': 'sys_platform', - 'platform.version': 'platform_version', - 'platform.machine': 'platform_machine', - 'platform.python_implementation': 'platform_python_implementation', - 'python_implementation': 'platform_python_implementation' -} -VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0]))) - -VERSION_CMP = ( - L("===") | - L("==") | - L(">=") | - L("<=") | - L("!=") | - L("~=") | - L(">") | - L("<") -) - -MARKER_OP = VERSION_CMP | L("not in") | L("in") -MARKER_OP.setParseAction(lambda s, l, t: Op(t[0])) - -MARKER_VALUE = QuotedString("'") | QuotedString('"') -MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0])) - -BOOLOP = L("and") | L("or") - -MARKER_VAR = VARIABLE | MARKER_VALUE - -MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR) -MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0])) - -LPAREN = L("(").suppress() -RPAREN = L(")").suppress() - -MARKER_EXPR = Forward() -MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN) -MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR) - -MARKER = stringStart + MARKER_EXPR + stringEnd - - -def _coerce_parse_result(results): - if isinstance(results, ParseResults): - return [_coerce_parse_result(i) for i in results] - else: - return results - - -def _format_marker(marker, first=True): - assert isinstance(marker, (list, tuple, string_types)) - - # Sometimes we have a structure like [[...]] which is a single item list - # where the single item is itself it's own list. In that case we want skip - # the rest of this function so that we don't get extraneous () on the - # outside. - if (isinstance(marker, list) and len(marker) == 1 and - isinstance(marker[0], (list, tuple))): - return _format_marker(marker[0]) - - if isinstance(marker, list): - inner = (_format_marker(m, first=False) for m in marker) - if first: - return " ".join(inner) - else: - return "(" + " ".join(inner) + ")" - elif isinstance(marker, tuple): - return " ".join([m.serialize() for m in marker]) - else: - return marker - - -_operators = { - "in": lambda lhs, rhs: lhs in rhs, - "not in": lambda lhs, rhs: lhs not in rhs, - "<": operator.lt, - "<=": operator.le, - "==": operator.eq, - "!=": operator.ne, - ">=": operator.ge, - ">": operator.gt, -} - - -def _eval_op(lhs, op, rhs): - try: - spec = Specifier("".join([op.serialize(), rhs])) - except InvalidSpecifier: - pass - else: - return spec.contains(lhs) - - oper = _operators.get(op.serialize()) - if oper is None: - raise UndefinedComparison( - "Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs) - ) - - return oper(lhs, rhs) - - -_undefined = object() - - -def _get_env(environment, name): - value = environment.get(name, _undefined) - - if value is _undefined: - raise UndefinedEnvironmentName( - "{0!r} does not exist in evaluation environment.".format(name) - ) - - return value - - -def _evaluate_markers(markers, environment): - groups = [[]] - - for marker in markers: - assert isinstance(marker, (list, tuple, string_types)) - - if isinstance(marker, list): - groups[-1].append(_evaluate_markers(marker, environment)) - elif isinstance(marker, tuple): - lhs, op, rhs = marker - - if isinstance(lhs, Variable): - lhs_value = _get_env(environment, lhs.value) - rhs_value = rhs.value - else: - lhs_value = lhs.value - rhs_value = _get_env(environment, rhs.value) - - groups[-1].append(_eval_op(lhs_value, op, rhs_value)) - else: - assert marker in ["and", "or"] - if marker == "or": - groups.append([]) - - return any(all(item) for item in groups) - - -def format_full_version(info): - version = '{0.major}.{0.minor}.{0.micro}'.format(info) - kind = info.releaselevel - if kind != 'final': - version += kind[0] + str(info.serial) - return version - - -def default_environment(): - if hasattr(sys, 'implementation'): - iver = format_full_version(sys.implementation.version) - implementation_name = sys.implementation.name - else: - iver = '0' - implementation_name = '' - - return { - "implementation_name": implementation_name, - "implementation_version": iver, - "os_name": os.name, - "platform_machine": platform.machine(), - "platform_release": platform.release(), - "platform_system": platform.system(), - "platform_version": platform.version(), - "python_full_version": platform.python_version(), - "platform_python_implementation": platform.python_implementation(), - "python_version": platform.python_version()[:3], - "sys_platform": sys.platform, - } - - -class Marker(object): - - def __init__(self, marker): - try: - self._markers = _coerce_parse_result(MARKER.parseString(marker)) - except ParseException as e: - err_str = "Invalid marker: {0!r}, parse error at {1!r}".format( - marker, marker[e.loc:e.loc + 8]) - raise InvalidMarker(err_str) - - def __str__(self): - return _format_marker(self._markers) - - def __repr__(self): - return "<Marker({0!r})>".format(str(self)) - - def evaluate(self, environment=None): - """Evaluate a marker. - - Return the boolean from evaluating the given marker against the - environment. environment is an optional argument to override all or - part of the determined environment. - - The environment is determined from the current Python process. - """ - current_environment = default_environment() - if environment is not None: - current_environment.update(environment) - - return _evaluate_markers(self._markers, current_environment) diff --git a/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/markers.pyc b/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/markers.pyc deleted file mode 100644 index 0ebbdd6c..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/markers.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/requirements.py b/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/requirements.py deleted file mode 100644 index 5b493416..00000000 --- a/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/requirements.py +++ /dev/null @@ -1,127 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -import string -import re - -from setuptools.extern.pyparsing import stringStart, stringEnd, originalTextFor, ParseException -from setuptools.extern.pyparsing import ZeroOrMore, Word, Optional, Regex, Combine -from setuptools.extern.pyparsing import Literal as L # noqa -from setuptools.extern.six.moves.urllib import parse as urlparse - -from .markers import MARKER_EXPR, Marker -from .specifiers import LegacySpecifier, Specifier, SpecifierSet - - -class InvalidRequirement(ValueError): - """ - An invalid requirement was found, users should refer to PEP 508. - """ - - -ALPHANUM = Word(string.ascii_letters + string.digits) - -LBRACKET = L("[").suppress() -RBRACKET = L("]").suppress() -LPAREN = L("(").suppress() -RPAREN = L(")").suppress() -COMMA = L(",").suppress() -SEMICOLON = L(";").suppress() -AT = L("@").suppress() - -PUNCTUATION = Word("-_.") -IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM) -IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END)) - -NAME = IDENTIFIER("name") -EXTRA = IDENTIFIER - -URI = Regex(r'[^ ]+')("url") -URL = (AT + URI) - -EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA) -EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras") - -VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE) -VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE) - -VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY -VERSION_MANY = Combine(VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), - joinString=",", adjacent=False)("_raw_spec") -_VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY)) -_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or '') - -VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier") -VERSION_SPEC.setParseAction(lambda s, l, t: t[1]) - -MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker") -MARKER_EXPR.setParseAction( - lambda s, l, t: Marker(s[t._original_start:t._original_end]) -) -MARKER_SEPERATOR = SEMICOLON -MARKER = MARKER_SEPERATOR + MARKER_EXPR - -VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER) -URL_AND_MARKER = URL + Optional(MARKER) - -NAMED_REQUIREMENT = \ - NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER) - -REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd - - -class Requirement(object): - """Parse a requirement. - - Parse a given requirement string into its parts, such as name, specifier, - URL, and extras. Raises InvalidRequirement on a badly-formed requirement - string. - """ - - # TODO: Can we test whether something is contained within a requirement? - # If so how do we do that? Do we need to test against the _name_ of - # the thing as well as the version? What about the markers? - # TODO: Can we normalize the name and extra name? - - def __init__(self, requirement_string): - try: - req = REQUIREMENT.parseString(requirement_string) - except ParseException as e: - raise InvalidRequirement( - "Invalid requirement, parse error at \"{0!r}\"".format( - requirement_string[e.loc:e.loc + 8])) - - self.name = req.name - if req.url: - parsed_url = urlparse.urlparse(req.url) - if not (parsed_url.scheme and parsed_url.netloc) or ( - not parsed_url.scheme and not parsed_url.netloc): - raise InvalidRequirement("Invalid URL given") - self.url = req.url - else: - self.url = None - self.extras = set(req.extras.asList() if req.extras else []) - self.specifier = SpecifierSet(req.specifier) - self.marker = req.marker if req.marker else None - - def __str__(self): - parts = [self.name] - - if self.extras: - parts.append("[{0}]".format(",".join(sorted(self.extras)))) - - if self.specifier: - parts.append(str(self.specifier)) - - if self.url: - parts.append("@ {0}".format(self.url)) - - if self.marker: - parts.append("; {0}".format(self.marker)) - - return "".join(parts) - - def __repr__(self): - return "<Requirement({0!r})>".format(str(self)) diff --git a/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/requirements.pyc b/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/requirements.pyc deleted file mode 100644 index 1b256fb1..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/requirements.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/specifiers.py b/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/specifiers.py deleted file mode 100644 index 7f5a76cf..00000000 --- a/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/specifiers.py +++ /dev/null @@ -1,774 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -import abc -import functools -import itertools -import re - -from ._compat import string_types, with_metaclass -from .version import Version, LegacyVersion, parse - - -class InvalidSpecifier(ValueError): - """ - An invalid specifier was found, users should refer to PEP 440. - """ - - -class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): - - @abc.abstractmethod - def __str__(self): - """ - Returns the str representation of this Specifier like object. This - should be representative of the Specifier itself. - """ - - @abc.abstractmethod - def __hash__(self): - """ - Returns a hash value for this Specifier like object. - """ - - @abc.abstractmethod - def __eq__(self, other): - """ - Returns a boolean representing whether or not the two Specifier like - objects are equal. - """ - - @abc.abstractmethod - def __ne__(self, other): - """ - Returns a boolean representing whether or not the two Specifier like - objects are not equal. - """ - - @abc.abstractproperty - def prereleases(self): - """ - Returns whether or not pre-releases as a whole are allowed by this - specifier. - """ - - @prereleases.setter - def prereleases(self, value): - """ - Sets whether or not pre-releases as a whole are allowed by this - specifier. - """ - - @abc.abstractmethod - def contains(self, item, prereleases=None): - """ - Determines if the given item is contained within this specifier. - """ - - @abc.abstractmethod - def filter(self, iterable, prereleases=None): - """ - Takes an iterable of items and filters them so that only items which - are contained within this specifier are allowed in it. - """ - - -class _IndividualSpecifier(BaseSpecifier): - - _operators = {} - - def __init__(self, spec="", prereleases=None): - match = self._regex.search(spec) - if not match: - raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec)) - - self._spec = ( - match.group("operator").strip(), - match.group("version").strip(), - ) - - # Store whether or not this Specifier should accept prereleases - self._prereleases = prereleases - - def __repr__(self): - pre = ( - ", prereleases={0!r}".format(self.prereleases) - if self._prereleases is not None - else "" - ) - - return "<{0}({1!r}{2})>".format( - self.__class__.__name__, - str(self), - pre, - ) - - def __str__(self): - return "{0}{1}".format(*self._spec) - - def __hash__(self): - return hash(self._spec) - - def __eq__(self, other): - if isinstance(other, string_types): - try: - other = self.__class__(other) - except InvalidSpecifier: - return NotImplemented - elif not isinstance(other, self.__class__): - return NotImplemented - - return self._spec == other._spec - - def __ne__(self, other): - if isinstance(other, string_types): - try: - other = self.__class__(other) - except InvalidSpecifier: - return NotImplemented - elif not isinstance(other, self.__class__): - return NotImplemented - - return self._spec != other._spec - - def _get_operator(self, op): - return getattr(self, "_compare_{0}".format(self._operators[op])) - - def _coerce_version(self, version): - if not isinstance(version, (LegacyVersion, Version)): - version = parse(version) - return version - - @property - def operator(self): - return self._spec[0] - - @property - def version(self): - return self._spec[1] - - @property - def prereleases(self): - return self._prereleases - - @prereleases.setter - def prereleases(self, value): - self._prereleases = value - - def __contains__(self, item): - return self.contains(item) - - def contains(self, item, prereleases=None): - # Determine if prereleases are to be allowed or not. - if prereleases is None: - prereleases = self.prereleases - - # Normalize item to a Version or LegacyVersion, this allows us to have - # a shortcut for ``"2.0" in Specifier(">=2") - item = self._coerce_version(item) - - # Determine if we should be supporting prereleases in this specifier - # or not, if we do not support prereleases than we can short circuit - # logic if this version is a prereleases. - if item.is_prerelease and not prereleases: - return False - - # Actually do the comparison to determine if this item is contained - # within this Specifier or not. - return self._get_operator(self.operator)(item, self.version) - - def filter(self, iterable, prereleases=None): - yielded = False - found_prereleases = [] - - kw = {"prereleases": prereleases if prereleases is not None else True} - - # Attempt to iterate over all the values in the iterable and if any of - # them match, yield them. - for version in iterable: - parsed_version = self._coerce_version(version) - - if self.contains(parsed_version, **kw): - # If our version is a prerelease, and we were not set to allow - # prereleases, then we'll store it for later incase nothing - # else matches this specifier. - if (parsed_version.is_prerelease and not - (prereleases or self.prereleases)): - found_prereleases.append(version) - # Either this is not a prerelease, or we should have been - # accepting prereleases from the begining. - else: - yielded = True - yield version - - # Now that we've iterated over everything, determine if we've yielded - # any values, and if we have not and we have any prereleases stored up - # then we will go ahead and yield the prereleases. - if not yielded and found_prereleases: - for version in found_prereleases: - yield version - - -class LegacySpecifier(_IndividualSpecifier): - - _regex_str = ( - r""" - (?P<operator>(==|!=|<=|>=|<|>)) - \s* - (?P<version> - [^,;\s)]* # Since this is a "legacy" specifier, and the version - # string can be just about anything, we match everything - # except for whitespace, a semi-colon for marker support, - # a closing paren since versions can be enclosed in - # them, and a comma since it's a version separator. - ) - """ - ) - - _regex = re.compile( - r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) - - _operators = { - "==": "equal", - "!=": "not_equal", - "<=": "less_than_equal", - ">=": "greater_than_equal", - "<": "less_than", - ">": "greater_than", - } - - def _coerce_version(self, version): - if not isinstance(version, LegacyVersion): - version = LegacyVersion(str(version)) - return version - - def _compare_equal(self, prospective, spec): - return prospective == self._coerce_version(spec) - - def _compare_not_equal(self, prospective, spec): - return prospective != self._coerce_version(spec) - - def _compare_less_than_equal(self, prospective, spec): - return prospective <= self._coerce_version(spec) - - def _compare_greater_than_equal(self, prospective, spec): - return prospective >= self._coerce_version(spec) - - def _compare_less_than(self, prospective, spec): - return prospective < self._coerce_version(spec) - - def _compare_greater_than(self, prospective, spec): - return prospective > self._coerce_version(spec) - - -def _require_version_compare(fn): - @functools.wraps(fn) - def wrapped(self, prospective, spec): - if not isinstance(prospective, Version): - return False - return fn(self, prospective, spec) - return wrapped - - -class Specifier(_IndividualSpecifier): - - _regex_str = ( - r""" - (?P<operator>(~=|==|!=|<=|>=|<|>|===)) - (?P<version> - (?: - # The identity operators allow for an escape hatch that will - # do an exact string match of the version you wish to install. - # This will not be parsed by PEP 440 and we cannot determine - # any semantic meaning from it. This operator is discouraged - # but included entirely as an escape hatch. - (?<====) # Only match for the identity operator - \s* - [^\s]* # We just match everything, except for whitespace - # since we are only testing for strict identity. - ) - | - (?: - # The (non)equality operators allow for wild card and local - # versions to be specified so we have to define these two - # operators separately to enable that. - (?<===|!=) # Only match for equals and not equals - - \s* - v? - (?:[0-9]+!)? # epoch - [0-9]+(?:\.[0-9]+)* # release - (?: # pre release - [-_\.]? - (a|b|c|rc|alpha|beta|pre|preview) - [-_\.]? - [0-9]* - )? - (?: # post release - (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) - )? - - # You cannot use a wild card and a dev or local version - # together so group them with a | and make them optional. - (?: - (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release - (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local - | - \.\* # Wild card syntax of .* - )? - ) - | - (?: - # The compatible operator requires at least two digits in the - # release segment. - (?<=~=) # Only match for the compatible operator - - \s* - v? - (?:[0-9]+!)? # epoch - [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) - (?: # pre release - [-_\.]? - (a|b|c|rc|alpha|beta|pre|preview) - [-_\.]? - [0-9]* - )? - (?: # post release - (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) - )? - (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release - ) - | - (?: - # All other operators only allow a sub set of what the - # (non)equality operators do. Specifically they do not allow - # local versions to be specified nor do they allow the prefix - # matching wild cards. - (?<!==|!=|~=) # We have special cases for these - # operators so we want to make sure they - # don't match here. - - \s* - v? - (?:[0-9]+!)? # epoch - [0-9]+(?:\.[0-9]+)* # release - (?: # pre release - [-_\.]? - (a|b|c|rc|alpha|beta|pre|preview) - [-_\.]? - [0-9]* - )? - (?: # post release - (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) - )? - (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release - ) - ) - """ - ) - - _regex = re.compile( - r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) - - _operators = { - "~=": "compatible", - "==": "equal", - "!=": "not_equal", - "<=": "less_than_equal", - ">=": "greater_than_equal", - "<": "less_than", - ">": "greater_than", - "===": "arbitrary", - } - - @_require_version_compare - def _compare_compatible(self, prospective, spec): - # Compatible releases have an equivalent combination of >= and ==. That - # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to - # implement this in terms of the other specifiers instead of - # implementing it ourselves. The only thing we need to do is construct - # the other specifiers. - - # We want everything but the last item in the version, but we want to - # ignore post and dev releases and we want to treat the pre-release as - # it's own separate segment. - prefix = ".".join( - list( - itertools.takewhile( - lambda x: (not x.startswith("post") and not - x.startswith("dev")), - _version_split(spec), - ) - )[:-1] - ) - - # Add the prefix notation to the end of our string - prefix += ".*" - - return (self._get_operator(">=")(prospective, spec) and - self._get_operator("==")(prospective, prefix)) - - @_require_version_compare - def _compare_equal(self, prospective, spec): - # We need special logic to handle prefix matching - if spec.endswith(".*"): - # In the case of prefix matching we want to ignore local segment. - prospective = Version(prospective.public) - # Split the spec out by dots, and pretend that there is an implicit - # dot in between a release segment and a pre-release segment. - spec = _version_split(spec[:-2]) # Remove the trailing .* - - # Split the prospective version out by dots, and pretend that there - # is an implicit dot in between a release segment and a pre-release - # segment. - prospective = _version_split(str(prospective)) - - # Shorten the prospective version to be the same length as the spec - # so that we can determine if the specifier is a prefix of the - # prospective version or not. - prospective = prospective[:len(spec)] - - # Pad out our two sides with zeros so that they both equal the same - # length. - spec, prospective = _pad_version(spec, prospective) - else: - # Convert our spec string into a Version - spec = Version(spec) - - # If the specifier does not have a local segment, then we want to - # act as if the prospective version also does not have a local - # segment. - if not spec.local: - prospective = Version(prospective.public) - - return prospective == spec - - @_require_version_compare - def _compare_not_equal(self, prospective, spec): - return not self._compare_equal(prospective, spec) - - @_require_version_compare - def _compare_less_than_equal(self, prospective, spec): - return prospective <= Version(spec) - - @_require_version_compare - def _compare_greater_than_equal(self, prospective, spec): - return prospective >= Version(spec) - - @_require_version_compare - def _compare_less_than(self, prospective, spec): - # Convert our spec to a Version instance, since we'll want to work with - # it as a version. - spec = Version(spec) - - # Check to see if the prospective version is less than the spec - # version. If it's not we can short circuit and just return False now - # instead of doing extra unneeded work. - if not prospective < spec: - return False - - # This special case is here so that, unless the specifier itself - # includes is a pre-release version, that we do not accept pre-release - # versions for the version mentioned in the specifier (e.g. <3.1 should - # not match 3.1.dev0, but should match 3.0.dev0). - if not spec.is_prerelease and prospective.is_prerelease: - if Version(prospective.base_version) == Version(spec.base_version): - return False - - # If we've gotten to here, it means that prospective version is both - # less than the spec version *and* it's not a pre-release of the same - # version in the spec. - return True - - @_require_version_compare - def _compare_greater_than(self, prospective, spec): - # Convert our spec to a Version instance, since we'll want to work with - # it as a version. - spec = Version(spec) - - # Check to see if the prospective version is greater than the spec - # version. If it's not we can short circuit and just return False now - # instead of doing extra unneeded work. - if not prospective > spec: - return False - - # This special case is here so that, unless the specifier itself - # includes is a post-release version, that we do not accept - # post-release versions for the version mentioned in the specifier - # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). - if not spec.is_postrelease and prospective.is_postrelease: - if Version(prospective.base_version) == Version(spec.base_version): - return False - - # Ensure that we do not allow a local version of the version mentioned - # in the specifier, which is techincally greater than, to match. - if prospective.local is not None: - if Version(prospective.base_version) == Version(spec.base_version): - return False - - # If we've gotten to here, it means that prospective version is both - # greater than the spec version *and* it's not a pre-release of the - # same version in the spec. - return True - - def _compare_arbitrary(self, prospective, spec): - return str(prospective).lower() == str(spec).lower() - - @property - def prereleases(self): - # If there is an explicit prereleases set for this, then we'll just - # blindly use that. - if self._prereleases is not None: - return self._prereleases - - # Look at all of our specifiers and determine if they are inclusive - # operators, and if they are if they are including an explicit - # prerelease. - operator, version = self._spec - if operator in ["==", ">=", "<=", "~=", "==="]: - # The == specifier can include a trailing .*, if it does we - # want to remove before parsing. - if operator == "==" and version.endswith(".*"): - version = version[:-2] - - # Parse the version, and if it is a pre-release than this - # specifier allows pre-releases. - if parse(version).is_prerelease: - return True - - return False - - @prereleases.setter - def prereleases(self, value): - self._prereleases = value - - -_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") - - -def _version_split(version): - result = [] - for item in version.split("."): - match = _prefix_regex.search(item) - if match: - result.extend(match.groups()) - else: - result.append(item) - return result - - -def _pad_version(left, right): - left_split, right_split = [], [] - - # Get the release segment of our versions - left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) - right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) - - # Get the rest of our versions - left_split.append(left[len(left_split[0]):]) - right_split.append(right[len(right_split[0]):]) - - # Insert our padding - left_split.insert( - 1, - ["0"] * max(0, len(right_split[0]) - len(left_split[0])), - ) - right_split.insert( - 1, - ["0"] * max(0, len(left_split[0]) - len(right_split[0])), - ) - - return ( - list(itertools.chain(*left_split)), - list(itertools.chain(*right_split)), - ) - - -class SpecifierSet(BaseSpecifier): - - def __init__(self, specifiers="", prereleases=None): - # Split on , to break each indidivual specifier into it's own item, and - # strip each item to remove leading/trailing whitespace. - specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] - - # Parsed each individual specifier, attempting first to make it a - # Specifier and falling back to a LegacySpecifier. - parsed = set() - for specifier in specifiers: - try: - parsed.add(Specifier(specifier)) - except InvalidSpecifier: - parsed.add(LegacySpecifier(specifier)) - - # Turn our parsed specifiers into a frozen set and save them for later. - self._specs = frozenset(parsed) - - # Store our prereleases value so we can use it later to determine if - # we accept prereleases or not. - self._prereleases = prereleases - - def __repr__(self): - pre = ( - ", prereleases={0!r}".format(self.prereleases) - if self._prereleases is not None - else "" - ) - - return "<SpecifierSet({0!r}{1})>".format(str(self), pre) - - def __str__(self): - return ",".join(sorted(str(s) for s in self._specs)) - - def __hash__(self): - return hash(self._specs) - - def __and__(self, other): - if isinstance(other, string_types): - other = SpecifierSet(other) - elif not isinstance(other, SpecifierSet): - return NotImplemented - - specifier = SpecifierSet() - specifier._specs = frozenset(self._specs | other._specs) - - if self._prereleases is None and other._prereleases is not None: - specifier._prereleases = other._prereleases - elif self._prereleases is not None and other._prereleases is None: - specifier._prereleases = self._prereleases - elif self._prereleases == other._prereleases: - specifier._prereleases = self._prereleases - else: - raise ValueError( - "Cannot combine SpecifierSets with True and False prerelease " - "overrides." - ) - - return specifier - - def __eq__(self, other): - if isinstance(other, string_types): - other = SpecifierSet(other) - elif isinstance(other, _IndividualSpecifier): - other = SpecifierSet(str(other)) - elif not isinstance(other, SpecifierSet): - return NotImplemented - - return self._specs == other._specs - - def __ne__(self, other): - if isinstance(other, string_types): - other = SpecifierSet(other) - elif isinstance(other, _IndividualSpecifier): - other = SpecifierSet(str(other)) - elif not isinstance(other, SpecifierSet): - return NotImplemented - - return self._specs != other._specs - - def __len__(self): - return len(self._specs) - - def __iter__(self): - return iter(self._specs) - - @property - def prereleases(self): - # If we have been given an explicit prerelease modifier, then we'll - # pass that through here. - if self._prereleases is not None: - return self._prereleases - - # If we don't have any specifiers, and we don't have a forced value, - # then we'll just return None since we don't know if this should have - # pre-releases or not. - if not self._specs: - return None - - # Otherwise we'll see if any of the given specifiers accept - # prereleases, if any of them do we'll return True, otherwise False. - return any(s.prereleases for s in self._specs) - - @prereleases.setter - def prereleases(self, value): - self._prereleases = value - - def __contains__(self, item): - return self.contains(item) - - def contains(self, item, prereleases=None): - # Ensure that our item is a Version or LegacyVersion instance. - if not isinstance(item, (LegacyVersion, Version)): - item = parse(item) - - # Determine if we're forcing a prerelease or not, if we're not forcing - # one for this particular filter call, then we'll use whatever the - # SpecifierSet thinks for whether or not we should support prereleases. - if prereleases is None: - prereleases = self.prereleases - - # We can determine if we're going to allow pre-releases by looking to - # see if any of the underlying items supports them. If none of them do - # and this item is a pre-release then we do not allow it and we can - # short circuit that here. - # Note: This means that 1.0.dev1 would not be contained in something - # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0 - if not prereleases and item.is_prerelease: - return False - - # We simply dispatch to the underlying specs here to make sure that the - # given version is contained within all of them. - # Note: This use of all() here means that an empty set of specifiers - # will always return True, this is an explicit design decision. - return all( - s.contains(item, prereleases=prereleases) - for s in self._specs - ) - - def filter(self, iterable, prereleases=None): - # Determine if we're forcing a prerelease or not, if we're not forcing - # one for this particular filter call, then we'll use whatever the - # SpecifierSet thinks for whether or not we should support prereleases. - if prereleases is None: - prereleases = self.prereleases - - # If we have any specifiers, then we want to wrap our iterable in the - # filter method for each one, this will act as a logical AND amongst - # each specifier. - if self._specs: - for spec in self._specs: - iterable = spec.filter(iterable, prereleases=bool(prereleases)) - return iterable - # If we do not have any specifiers, then we need to have a rough filter - # which will filter out any pre-releases, unless there are no final - # releases, and which will filter out LegacyVersion in general. - else: - filtered = [] - found_prereleases = [] - - for item in iterable: - # Ensure that we some kind of Version class for this item. - if not isinstance(item, (LegacyVersion, Version)): - parsed_version = parse(item) - else: - parsed_version = item - - # Filter out any item which is parsed as a LegacyVersion - if isinstance(parsed_version, LegacyVersion): - continue - - # Store any item which is a pre-release for later unless we've - # already found a final version or we are accepting prereleases - if parsed_version.is_prerelease and not prereleases: - if not filtered: - found_prereleases.append(item) - else: - filtered.append(item) - - # If we've found no items except for pre-releases, then we'll go - # ahead and use the pre-releases - if not filtered and found_prereleases and prereleases is None: - return found_prereleases - - return filtered diff --git a/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/specifiers.pyc b/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/specifiers.pyc deleted file mode 100644 index 77d554f2..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/specifiers.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/utils.py b/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/utils.py deleted file mode 100644 index 942387ce..00000000 --- a/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/utils.py +++ /dev/null @@ -1,14 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -import re - - -_canonicalize_regex = re.compile(r"[-_.]+") - - -def canonicalize_name(name): - # This is taken from PEP 503. - return _canonicalize_regex.sub("-", name).lower() diff --git a/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/utils.pyc b/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/utils.pyc deleted file mode 100644 index a60eb637..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/utils.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/version.py b/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/version.py deleted file mode 100644 index 83b5ee8c..00000000 --- a/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/version.py +++ /dev/null @@ -1,393 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -import collections -import itertools -import re - -from ._structures import Infinity - - -__all__ = [ - "parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN" -] - - -_Version = collections.namedtuple( - "_Version", - ["epoch", "release", "dev", "pre", "post", "local"], -) - - -def parse(version): - """ - Parse the given version string and return either a :class:`Version` object - or a :class:`LegacyVersion` object depending on if the given version is - a valid PEP 440 version or a legacy version. - """ - try: - return Version(version) - except InvalidVersion: - return LegacyVersion(version) - - -class InvalidVersion(ValueError): - """ - An invalid version was found, users should refer to PEP 440. - """ - - -class _BaseVersion(object): - - def __hash__(self): - return hash(self._key) - - def __lt__(self, other): - return self._compare(other, lambda s, o: s < o) - - def __le__(self, other): - return self._compare(other, lambda s, o: s <= o) - - def __eq__(self, other): - return self._compare(other, lambda s, o: s == o) - - def __ge__(self, other): - return self._compare(other, lambda s, o: s >= o) - - def __gt__(self, other): - return self._compare(other, lambda s, o: s > o) - - def __ne__(self, other): - return self._compare(other, lambda s, o: s != o) - - def _compare(self, other, method): - if not isinstance(other, _BaseVersion): - return NotImplemented - - return method(self._key, other._key) - - -class LegacyVersion(_BaseVersion): - - def __init__(self, version): - self._version = str(version) - self._key = _legacy_cmpkey(self._version) - - def __str__(self): - return self._version - - def __repr__(self): - return "<LegacyVersion({0})>".format(repr(str(self))) - - @property - def public(self): - return self._version - - @property - def base_version(self): - return self._version - - @property - def local(self): - return None - - @property - def is_prerelease(self): - return False - - @property - def is_postrelease(self): - return False - - -_legacy_version_component_re = re.compile( - r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE, -) - -_legacy_version_replacement_map = { - "pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@", -} - - -def _parse_version_parts(s): - for part in _legacy_version_component_re.split(s): - part = _legacy_version_replacement_map.get(part, part) - - if not part or part == ".": - continue - - if part[:1] in "0123456789": - # pad for numeric comparison - yield part.zfill(8) - else: - yield "*" + part - - # ensure that alpha/beta/candidate are before final - yield "*final" - - -def _legacy_cmpkey(version): - # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch - # greater than or equal to 0. This will effectively put the LegacyVersion, - # which uses the defacto standard originally implemented by setuptools, - # as before all PEP 440 versions. - epoch = -1 - - # This scheme is taken from pkg_resources.parse_version setuptools prior to - # it's adoption of the packaging library. - parts = [] - for part in _parse_version_parts(version.lower()): - if part.startswith("*"): - # remove "-" before a prerelease tag - if part < "*final": - while parts and parts[-1] == "*final-": - parts.pop() - - # remove trailing zeros from each series of numeric parts - while parts and parts[-1] == "00000000": - parts.pop() - - parts.append(part) - parts = tuple(parts) - - return epoch, parts - -# Deliberately not anchored to the start and end of the string, to make it -# easier for 3rd party code to reuse -VERSION_PATTERN = r""" - v? - (?: - (?:(?P<epoch>[0-9]+)!)? # epoch - (?P<release>[0-9]+(?:\.[0-9]+)*) # release segment - (?P<pre> # pre-release - [-_\.]? - (?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview)) - [-_\.]? - (?P<pre_n>[0-9]+)? - )? - (?P<post> # post release - (?:-(?P<post_n1>[0-9]+)) - | - (?: - [-_\.]? - (?P<post_l>post|rev|r) - [-_\.]? - (?P<post_n2>[0-9]+)? - ) - )? - (?P<dev> # dev release - [-_\.]? - (?P<dev_l>dev) - [-_\.]? - (?P<dev_n>[0-9]+)? - )? - ) - (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version -""" - - -class Version(_BaseVersion): - - _regex = re.compile( - r"^\s*" + VERSION_PATTERN + r"\s*$", - re.VERBOSE | re.IGNORECASE, - ) - - def __init__(self, version): - # Validate the version and parse it into pieces - match = self._regex.search(version) - if not match: - raise InvalidVersion("Invalid version: '{0}'".format(version)) - - # Store the parsed out pieces of the version - self._version = _Version( - epoch=int(match.group("epoch")) if match.group("epoch") else 0, - release=tuple(int(i) for i in match.group("release").split(".")), - pre=_parse_letter_version( - match.group("pre_l"), - match.group("pre_n"), - ), - post=_parse_letter_version( - match.group("post_l"), - match.group("post_n1") or match.group("post_n2"), - ), - dev=_parse_letter_version( - match.group("dev_l"), - match.group("dev_n"), - ), - local=_parse_local_version(match.group("local")), - ) - - # Generate a key which will be used for sorting - self._key = _cmpkey( - self._version.epoch, - self._version.release, - self._version.pre, - self._version.post, - self._version.dev, - self._version.local, - ) - - def __repr__(self): - return "<Version({0})>".format(repr(str(self))) - - def __str__(self): - parts = [] - - # Epoch - if self._version.epoch != 0: - parts.append("{0}!".format(self._version.epoch)) - - # Release segment - parts.append(".".join(str(x) for x in self._version.release)) - - # Pre-release - if self._version.pre is not None: - parts.append("".join(str(x) for x in self._version.pre)) - - # Post-release - if self._version.post is not None: - parts.append(".post{0}".format(self._version.post[1])) - - # Development release - if self._version.dev is not None: - parts.append(".dev{0}".format(self._version.dev[1])) - - # Local version segment - if self._version.local is not None: - parts.append( - "+{0}".format(".".join(str(x) for x in self._version.local)) - ) - - return "".join(parts) - - @property - def public(self): - return str(self).split("+", 1)[0] - - @property - def base_version(self): - parts = [] - - # Epoch - if self._version.epoch != 0: - parts.append("{0}!".format(self._version.epoch)) - - # Release segment - parts.append(".".join(str(x) for x in self._version.release)) - - return "".join(parts) - - @property - def local(self): - version_string = str(self) - if "+" in version_string: - return version_string.split("+", 1)[1] - - @property - def is_prerelease(self): - return bool(self._version.dev or self._version.pre) - - @property - def is_postrelease(self): - return bool(self._version.post) - - -def _parse_letter_version(letter, number): - if letter: - # We consider there to be an implicit 0 in a pre-release if there is - # not a numeral associated with it. - if number is None: - number = 0 - - # We normalize any letters to their lower case form - letter = letter.lower() - - # We consider some words to be alternate spellings of other words and - # in those cases we want to normalize the spellings to our preferred - # spelling. - if letter == "alpha": - letter = "a" - elif letter == "beta": - letter = "b" - elif letter in ["c", "pre", "preview"]: - letter = "rc" - elif letter in ["rev", "r"]: - letter = "post" - - return letter, int(number) - if not letter and number: - # We assume if we are given a number, but we are not given a letter - # then this is using the implicit post release syntax (e.g. 1.0-1) - letter = "post" - - return letter, int(number) - - -_local_version_seperators = re.compile(r"[\._-]") - - -def _parse_local_version(local): - """ - Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve"). - """ - if local is not None: - return tuple( - part.lower() if not part.isdigit() else int(part) - for part in _local_version_seperators.split(local) - ) - - -def _cmpkey(epoch, release, pre, post, dev, local): - # When we compare a release version, we want to compare it with all of the - # trailing zeros removed. So we'll use a reverse the list, drop all the now - # leading zeros until we come to something non zero, then take the rest - # re-reverse it back into the correct order and make it a tuple and use - # that for our sorting key. - release = tuple( - reversed(list( - itertools.dropwhile( - lambda x: x == 0, - reversed(release), - ) - )) - ) - - # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0. - # We'll do this by abusing the pre segment, but we _only_ want to do this - # if there is not a pre or a post segment. If we have one of those then - # the normal sorting rules will handle this case correctly. - if pre is None and post is None and dev is not None: - pre = -Infinity - # Versions without a pre-release (except as noted above) should sort after - # those with one. - elif pre is None: - pre = Infinity - - # Versions without a post segment should sort before those with one. - if post is None: - post = -Infinity - - # Versions without a development segment should sort after those with one. - if dev is None: - dev = Infinity - - if local is None: - # Versions without a local segment should sort before those with one. - local = -Infinity - else: - # Versions with a local segment need that segment parsed to implement - # the sorting rules in PEP440. - # - Alpha numeric segments sort before numeric segments - # - Alpha numeric segments sort lexicographically - # - Numeric segments sort numerically - # - Shorter versions sort before longer versions when the prefixes - # match exactly - local = tuple( - (i, "") if isinstance(i, int) else (-Infinity, i) - for i in local - ) - - return epoch, release, pre, post, dev, local diff --git a/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/version.pyc b/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/version.pyc deleted file mode 100644 index 51705328..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/_vendor/packaging/version.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/_vendor/pyparsing.py b/env/lib/python2.7/site-packages/setuptools/_vendor/pyparsing.py deleted file mode 100644 index cf75e1e5..00000000 --- a/env/lib/python2.7/site-packages/setuptools/_vendor/pyparsing.py +++ /dev/null @@ -1,5742 +0,0 @@ -# module pyparsing.py -# -# Copyright (c) 2003-2018 Paul T. McGuire -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be -# included in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__doc__ = \ -""" -pyparsing module - Classes and methods to define and execute parsing grammars -============================================================================= - -The pyparsing module is an alternative approach to creating and executing simple grammars, -vs. the traditional lex/yacc approach, or the use of regular expressions. With pyparsing, you -don't need to learn a new syntax for defining grammars or matching expressions - the parsing module -provides a library of classes that you use to construct the grammar directly in Python. - -Here is a program to parse "Hello, World!" (or any greeting of the form -C{"<salutation>, <addressee>!"}), built up using L{Word}, L{Literal}, and L{And} elements -(L{'+'<ParserElement.__add__>} operator gives L{And} expressions, strings are auto-converted to -L{Literal} expressions):: - - from pyparsing import Word, alphas - - # define grammar of a greeting - greet = Word(alphas) + "," + Word(alphas) + "!" - - hello = "Hello, World!" - print (hello, "->", greet.parseString(hello)) - -The program outputs the following:: - - Hello, World! -> ['Hello', ',', 'World', '!'] - -The Python representation of the grammar is quite readable, owing to the self-explanatory -class names, and the use of '+', '|' and '^' operators. - -The L{ParseResults} object returned from L{ParserElement.parseString<ParserElement.parseString>} can be accessed as a nested list, a dictionary, or an -object with named attributes. - -The pyparsing module handles some of the problems that are typically vexing when writing text parsers: - - extra or missing whitespace (the above program will also handle "Hello,World!", "Hello , World !", etc.) - - quoted strings - - embedded comments - - -Getting Started - ------------------ -Visit the classes L{ParserElement} and L{ParseResults} to see the base classes that most other pyparsing -classes inherit from. Use the docstrings for examples of how to: - - construct literal match expressions from L{Literal} and L{CaselessLiteral} classes - - construct character word-group expressions using the L{Word} class - - see how to create repetitive expressions using L{ZeroOrMore} and L{OneOrMore} classes - - use L{'+'<And>}, L{'|'<MatchFirst>}, L{'^'<Or>}, and L{'&'<Each>} operators to combine simple expressions into more complex ones - - associate names with your parsed results using L{ParserElement.setResultsName} - - find some helpful expression short-cuts like L{delimitedList} and L{oneOf} - - find more useful common expressions in the L{pyparsing_common} namespace class -""" - -__version__ = "2.2.1" -__versionTime__ = "18 Sep 2018 00:49 UTC" -__author__ = "Paul McGuire <ptmcg@users.sourceforge.net>" - -import string -from weakref import ref as wkref -import copy -import sys -import warnings -import re -import sre_constants -import collections -import pprint -import traceback -import types -from datetime import datetime - -try: - from _thread import RLock -except ImportError: - from threading import RLock - -try: - # Python 3 - from collections.abc import Iterable - from collections.abc import MutableMapping -except ImportError: - # Python 2.7 - from collections import Iterable - from collections import MutableMapping - -try: - from collections import OrderedDict as _OrderedDict -except ImportError: - try: - from ordereddict import OrderedDict as _OrderedDict - except ImportError: - _OrderedDict = None - -#~ sys.stderr.write( "testing pyparsing module, version %s, %s\n" % (__version__,__versionTime__ ) ) - -__all__ = [ -'And', 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 'Empty', -'FollowedBy', 'Forward', 'GoToColumn', 'Group', 'Keyword', 'LineEnd', 'LineStart', 'Literal', -'MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or', -'ParseBaseException', 'ParseElementEnhance', 'ParseException', 'ParseExpression', 'ParseFatalException', -'ParseResults', 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException', -'Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter', -'White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore', -'alphanums', 'alphas', 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col', -'commaSeparatedList', 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString', -'dblSlashComment', 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'hexnums', -'htmlComment', 'javaStyleComment', 'line', 'lineEnd', 'lineStart', 'lineno', -'makeHTMLTags', 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral', -'nestedExpr', 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables', -'punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity', -'replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd', -'stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute', -'indentedBlock', 'originalTextFor', 'ungroup', 'infixNotation','locatedExpr', 'withClass', -'CloseMatch', 'tokenMap', 'pyparsing_common', -] - -system_version = tuple(sys.version_info)[:3] -PY_3 = system_version[0] == 3 -if PY_3: - _MAX_INT = sys.maxsize - basestring = str - unichr = chr - _ustr = str - - # build list of single arg builtins, that can be used as parse actions - singleArgBuiltins = [sum, len, sorted, reversed, list, tuple, set, any, all, min, max] - -else: - _MAX_INT = sys.maxint - range = xrange - - def _ustr(obj): - """Drop-in replacement for str(obj) that tries to be Unicode friendly. It first tries - str(obj). If that fails with a UnicodeEncodeError, then it tries unicode(obj). It - then < returns the unicode object | encodes it with the default encoding | ... >. - """ - if isinstance(obj,unicode): - return obj - - try: - # If this works, then _ustr(obj) has the same behaviour as str(obj), so - # it won't break any existing code. - return str(obj) - - except UnicodeEncodeError: - # Else encode it - ret = unicode(obj).encode(sys.getdefaultencoding(), 'xmlcharrefreplace') - xmlcharref = Regex(r'&#\d+;') - xmlcharref.setParseAction(lambda t: '\\u' + hex(int(t[0][2:-1]))[2:]) - return xmlcharref.transformString(ret) - - # build list of single arg builtins, tolerant of Python version, that can be used as parse actions - singleArgBuiltins = [] - import __builtin__ - for fname in "sum len sorted reversed list tuple set any all min max".split(): - try: - singleArgBuiltins.append(getattr(__builtin__,fname)) - except AttributeError: - continue - -_generatorType = type((y for y in range(1))) - -def _xml_escape(data): - """Escape &, <, >, ", ', etc. in a string of data.""" - - # ampersand must be replaced first - from_symbols = '&><"\'' - to_symbols = ('&'+s+';' for s in "amp gt lt quot apos".split()) - for from_,to_ in zip(from_symbols, to_symbols): - data = data.replace(from_, to_) - return data - -class _Constants(object): - pass - -alphas = string.ascii_uppercase + string.ascii_lowercase -nums = "0123456789" -hexnums = nums + "ABCDEFabcdef" -alphanums = alphas + nums -_bslash = chr(92) -printables = "".join(c for c in string.printable if c not in string.whitespace) - -class ParseBaseException(Exception): - """base exception class for all parsing runtime exceptions""" - # Performance tuning: we construct a *lot* of these, so keep this - # constructor as small and fast as possible - def __init__( self, pstr, loc=0, msg=None, elem=None ): - self.loc = loc - if msg is None: - self.msg = pstr - self.pstr = "" - else: - self.msg = msg - self.pstr = pstr - self.parserElement = elem - self.args = (pstr, loc, msg) - - @classmethod - def _from_exception(cls, pe): - """ - internal factory method to simplify creating one type of ParseException - from another - avoids having __init__ signature conflicts among subclasses - """ - return cls(pe.pstr, pe.loc, pe.msg, pe.parserElement) - - def __getattr__( self, aname ): - """supported attributes by name are: - - lineno - returns the line number of the exception text - - col - returns the column number of the exception text - - line - returns the line containing the exception text - """ - if( aname == "lineno" ): - return lineno( self.loc, self.pstr ) - elif( aname in ("col", "column") ): - return col( self.loc, self.pstr ) - elif( aname == "line" ): - return line( self.loc, self.pstr ) - else: - raise AttributeError(aname) - - def __str__( self ): - return "%s (at char %d), (line:%d, col:%d)" % \ - ( self.msg, self.loc, self.lineno, self.column ) - def __repr__( self ): - return _ustr(self) - def markInputline( self, markerString = ">!<" ): - """Extracts the exception line from the input string, and marks - the location of the exception with a special symbol. - """ - line_str = self.line - line_column = self.column - 1 - if markerString: - line_str = "".join((line_str[:line_column], - markerString, line_str[line_column:])) - return line_str.strip() - def __dir__(self): - return "lineno col line".split() + dir(type(self)) - -class ParseException(ParseBaseException): - """ - Exception thrown when parse expressions don't match class; - supported attributes by name are: - - lineno - returns the line number of the exception text - - col - returns the column number of the exception text - - line - returns the line containing the exception text - - Example:: - try: - Word(nums).setName("integer").parseString("ABC") - except ParseException as pe: - print(pe) - print("column: {}".format(pe.col)) - - prints:: - Expected integer (at char 0), (line:1, col:1) - column: 1 - """ - pass - -class ParseFatalException(ParseBaseException): - """user-throwable exception thrown when inconsistent parse content - is found; stops all parsing immediately""" - pass - -class ParseSyntaxException(ParseFatalException): - """just like L{ParseFatalException}, but thrown internally when an - L{ErrorStop<And._ErrorStop>} ('-' operator) indicates that parsing is to stop - immediately because an unbacktrackable syntax error has been found""" - pass - -#~ class ReparseException(ParseBaseException): - #~ """Experimental class - parse actions can raise this exception to cause - #~ pyparsing to reparse the input string: - #~ - with a modified input string, and/or - #~ - with a modified start location - #~ Set the values of the ReparseException in the constructor, and raise the - #~ exception in a parse action to cause pyparsing to use the new string/location. - #~ Setting the values as None causes no change to be made. - #~ """ - #~ def __init_( self, newstring, restartLoc ): - #~ self.newParseText = newstring - #~ self.reparseLoc = restartLoc - -class RecursiveGrammarException(Exception): - """exception thrown by L{ParserElement.validate} if the grammar could be improperly recursive""" - def __init__( self, parseElementList ): - self.parseElementTrace = parseElementList - - def __str__( self ): - return "RecursiveGrammarException: %s" % self.parseElementTrace - -class _ParseResultsWithOffset(object): - def __init__(self,p1,p2): - self.tup = (p1,p2) - def __getitem__(self,i): - return self.tup[i] - def __repr__(self): - return repr(self.tup[0]) - def setOffset(self,i): - self.tup = (self.tup[0],i) - -class ParseResults(object): - """ - Structured parse results, to provide multiple means of access to the parsed data: - - as a list (C{len(results)}) - - by list index (C{results[0], results[1]}, etc.) - - by attribute (C{results.<resultsName>} - see L{ParserElement.setResultsName}) - - Example:: - integer = Word(nums) - date_str = (integer.setResultsName("year") + '/' - + integer.setResultsName("month") + '/' - + integer.setResultsName("day")) - # equivalent form: - # date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - - # parseString returns a ParseResults object - result = date_str.parseString("1999/12/31") - - def test(s, fn=repr): - print("%s -> %s" % (s, fn(eval(s)))) - test("list(result)") - test("result[0]") - test("result['month']") - test("result.day") - test("'month' in result") - test("'minutes' in result") - test("result.dump()", str) - prints:: - list(result) -> ['1999', '/', '12', '/', '31'] - result[0] -> '1999' - result['month'] -> '12' - result.day -> '31' - 'month' in result -> True - 'minutes' in result -> False - result.dump() -> ['1999', '/', '12', '/', '31'] - - day: 31 - - month: 12 - - year: 1999 - """ - def __new__(cls, toklist=None, name=None, asList=True, modal=True ): - if isinstance(toklist, cls): - return toklist - retobj = object.__new__(cls) - retobj.__doinit = True - return retobj - - # Performance tuning: we construct a *lot* of these, so keep this - # constructor as small and fast as possible - def __init__( self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance ): - if self.__doinit: - self.__doinit = False - self.__name = None - self.__parent = None - self.__accumNames = {} - self.__asList = asList - self.__modal = modal - if toklist is None: - toklist = [] - if isinstance(toklist, list): - self.__toklist = toklist[:] - elif isinstance(toklist, _generatorType): - self.__toklist = list(toklist) - else: - self.__toklist = [toklist] - self.__tokdict = dict() - - if name is not None and name: - if not modal: - self.__accumNames[name] = 0 - if isinstance(name,int): - name = _ustr(name) # will always return a str, but use _ustr for consistency - self.__name = name - if not (isinstance(toklist, (type(None), basestring, list)) and toklist in (None,'',[])): - if isinstance(toklist,basestring): - toklist = [ toklist ] - if asList: - if isinstance(toklist,ParseResults): - self[name] = _ParseResultsWithOffset(toklist.copy(),0) - else: - self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]),0) - self[name].__name = name - else: - try: - self[name] = toklist[0] - except (KeyError,TypeError,IndexError): - self[name] = toklist - - def __getitem__( self, i ): - if isinstance( i, (int,slice) ): - return self.__toklist[i] - else: - if i not in self.__accumNames: - return self.__tokdict[i][-1][0] - else: - return ParseResults([ v[0] for v in self.__tokdict[i] ]) - - def __setitem__( self, k, v, isinstance=isinstance ): - if isinstance(v,_ParseResultsWithOffset): - self.__tokdict[k] = self.__tokdict.get(k,list()) + [v] - sub = v[0] - elif isinstance(k,(int,slice)): - self.__toklist[k] = v - sub = v - else: - self.__tokdict[k] = self.__tokdict.get(k,list()) + [_ParseResultsWithOffset(v,0)] - sub = v - if isinstance(sub,ParseResults): - sub.__parent = wkref(self) - - def __delitem__( self, i ): - if isinstance(i,(int,slice)): - mylen = len( self.__toklist ) - del self.__toklist[i] - - # convert int to slice - if isinstance(i, int): - if i < 0: - i += mylen - i = slice(i, i+1) - # get removed indices - removed = list(range(*i.indices(mylen))) - removed.reverse() - # fixup indices in token dictionary - for name,occurrences in self.__tokdict.items(): - for j in removed: - for k, (value, position) in enumerate(occurrences): - occurrences[k] = _ParseResultsWithOffset(value, position - (position > j)) - else: - del self.__tokdict[i] - - def __contains__( self, k ): - return k in self.__tokdict - - def __len__( self ): return len( self.__toklist ) - def __bool__(self): return ( not not self.__toklist ) - __nonzero__ = __bool__ - def __iter__( self ): return iter( self.__toklist ) - def __reversed__( self ): return iter( self.__toklist[::-1] ) - def _iterkeys( self ): - if hasattr(self.__tokdict, "iterkeys"): - return self.__tokdict.iterkeys() - else: - return iter(self.__tokdict) - - def _itervalues( self ): - return (self[k] for k in self._iterkeys()) - - def _iteritems( self ): - return ((k, self[k]) for k in self._iterkeys()) - - if PY_3: - keys = _iterkeys - """Returns an iterator of all named result keys (Python 3.x only).""" - - values = _itervalues - """Returns an iterator of all named result values (Python 3.x only).""" - - items = _iteritems - """Returns an iterator of all named result key-value tuples (Python 3.x only).""" - - else: - iterkeys = _iterkeys - """Returns an iterator of all named result keys (Python 2.x only).""" - - itervalues = _itervalues - """Returns an iterator of all named result values (Python 2.x only).""" - - iteritems = _iteritems - """Returns an iterator of all named result key-value tuples (Python 2.x only).""" - - def keys( self ): - """Returns all named result keys (as a list in Python 2.x, as an iterator in Python 3.x).""" - return list(self.iterkeys()) - - def values( self ): - """Returns all named result values (as a list in Python 2.x, as an iterator in Python 3.x).""" - return list(self.itervalues()) - - def items( self ): - """Returns all named result key-values (as a list of tuples in Python 2.x, as an iterator in Python 3.x).""" - return list(self.iteritems()) - - def haskeys( self ): - """Since keys() returns an iterator, this method is helpful in bypassing - code that looks for the existence of any defined results names.""" - return bool(self.__tokdict) - - def pop( self, *args, **kwargs): - """ - Removes and returns item at specified index (default=C{last}). - Supports both C{list} and C{dict} semantics for C{pop()}. If passed no - argument or an integer argument, it will use C{list} semantics - and pop tokens from the list of parsed tokens. If passed a - non-integer argument (most likely a string), it will use C{dict} - semantics and pop the corresponding value from any defined - results names. A second default return value argument is - supported, just as in C{dict.pop()}. - - Example:: - def remove_first(tokens): - tokens.pop(0) - print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] - print(OneOrMore(Word(nums)).addParseAction(remove_first).parseString("0 123 321")) # -> ['123', '321'] - - label = Word(alphas) - patt = label("LABEL") + OneOrMore(Word(nums)) - print(patt.parseString("AAB 123 321").dump()) - - # Use pop() in a parse action to remove named result (note that corresponding value is not - # removed from list form of results) - def remove_LABEL(tokens): - tokens.pop("LABEL") - return tokens - patt.addParseAction(remove_LABEL) - print(patt.parseString("AAB 123 321").dump()) - prints:: - ['AAB', '123', '321'] - - LABEL: AAB - - ['AAB', '123', '321'] - """ - if not args: - args = [-1] - for k,v in kwargs.items(): - if k == 'default': - args = (args[0], v) - else: - raise TypeError("pop() got an unexpected keyword argument '%s'" % k) - if (isinstance(args[0], int) or - len(args) == 1 or - args[0] in self): - index = args[0] - ret = self[index] - del self[index] - return ret - else: - defaultvalue = args[1] - return defaultvalue - - def get(self, key, defaultValue=None): - """ - Returns named result matching the given key, or if there is no - such name, then returns the given C{defaultValue} or C{None} if no - C{defaultValue} is specified. - - Similar to C{dict.get()}. - - Example:: - integer = Word(nums) - date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - - result = date_str.parseString("1999/12/31") - print(result.get("year")) # -> '1999' - print(result.get("hour", "not specified")) # -> 'not specified' - print(result.get("hour")) # -> None - """ - if key in self: - return self[key] - else: - return defaultValue - - def insert( self, index, insStr ): - """ - Inserts new element at location index in the list of parsed tokens. - - Similar to C{list.insert()}. - - Example:: - print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] - - # use a parse action to insert the parse location in the front of the parsed results - def insert_locn(locn, tokens): - tokens.insert(0, locn) - print(OneOrMore(Word(nums)).addParseAction(insert_locn).parseString("0 123 321")) # -> [0, '0', '123', '321'] - """ - self.__toklist.insert(index, insStr) - # fixup indices in token dictionary - for name,occurrences in self.__tokdict.items(): - for k, (value, position) in enumerate(occurrences): - occurrences[k] = _ParseResultsWithOffset(value, position + (position > index)) - - def append( self, item ): - """ - Add single element to end of ParseResults list of elements. - - Example:: - print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] - - # use a parse action to compute the sum of the parsed integers, and add it to the end - def append_sum(tokens): - tokens.append(sum(map(int, tokens))) - print(OneOrMore(Word(nums)).addParseAction(append_sum).parseString("0 123 321")) # -> ['0', '123', '321', 444] - """ - self.__toklist.append(item) - - def extend( self, itemseq ): - """ - Add sequence of elements to end of ParseResults list of elements. - - Example:: - patt = OneOrMore(Word(alphas)) - - # use a parse action to append the reverse of the matched strings, to make a palindrome - def make_palindrome(tokens): - tokens.extend(reversed([t[::-1] for t in tokens])) - return ''.join(tokens) - print(patt.addParseAction(make_palindrome).parseString("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl' - """ - if isinstance(itemseq, ParseResults): - self += itemseq - else: - self.__toklist.extend(itemseq) - - def clear( self ): - """ - Clear all elements and results names. - """ - del self.__toklist[:] - self.__tokdict.clear() - - def __getattr__( self, name ): - try: - return self[name] - except KeyError: - return "" - - if name in self.__tokdict: - if name not in self.__accumNames: - return self.__tokdict[name][-1][0] - else: - return ParseResults([ v[0] for v in self.__tokdict[name] ]) - else: - return "" - - def __add__( self, other ): - ret = self.copy() - ret += other - return ret - - def __iadd__( self, other ): - if other.__tokdict: - offset = len(self.__toklist) - addoffset = lambda a: offset if a<0 else a+offset - otheritems = other.__tokdict.items() - otherdictitems = [(k, _ParseResultsWithOffset(v[0],addoffset(v[1])) ) - for (k,vlist) in otheritems for v in vlist] - for k,v in otherdictitems: - self[k] = v - if isinstance(v[0],ParseResults): - v[0].__parent = wkref(self) - - self.__toklist += other.__toklist - self.__accumNames.update( other.__accumNames ) - return self - - def __radd__(self, other): - if isinstance(other,int) and other == 0: - # useful for merging many ParseResults using sum() builtin - return self.copy() - else: - # this may raise a TypeError - so be it - return other + self - - def __repr__( self ): - return "(%s, %s)" % ( repr( self.__toklist ), repr( self.__tokdict ) ) - - def __str__( self ): - return '[' + ', '.join(_ustr(i) if isinstance(i, ParseResults) else repr(i) for i in self.__toklist) + ']' - - def _asStringList( self, sep='' ): - out = [] - for item in self.__toklist: - if out and sep: - out.append(sep) - if isinstance( item, ParseResults ): - out += item._asStringList() - else: - out.append( _ustr(item) ) - return out - - def asList( self ): - """ - Returns the parse results as a nested list of matching tokens, all converted to strings. - - Example:: - patt = OneOrMore(Word(alphas)) - result = patt.parseString("sldkj lsdkj sldkj") - # even though the result prints in string-like form, it is actually a pyparsing ParseResults - print(type(result), result) # -> <class 'pyparsing.ParseResults'> ['sldkj', 'lsdkj', 'sldkj'] - - # Use asList() to create an actual list - result_list = result.asList() - print(type(result_list), result_list) # -> <class 'list'> ['sldkj', 'lsdkj', 'sldkj'] - """ - return [res.asList() if isinstance(res,ParseResults) else res for res in self.__toklist] - - def asDict( self ): - """ - Returns the named parse results as a nested dictionary. - - Example:: - integer = Word(nums) - date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - - result = date_str.parseString('12/31/1999') - print(type(result), repr(result)) # -> <class 'pyparsing.ParseResults'> (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]}) - - result_dict = result.asDict() - print(type(result_dict), repr(result_dict)) # -> <class 'dict'> {'day': '1999', 'year': '12', 'month': '31'} - - # even though a ParseResults supports dict-like access, sometime you just need to have a dict - import json - print(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializable - print(json.dumps(result.asDict())) # -> {"month": "31", "day": "1999", "year": "12"} - """ - if PY_3: - item_fn = self.items - else: - item_fn = self.iteritems - - def toItem(obj): - if isinstance(obj, ParseResults): - if obj.haskeys(): - return obj.asDict() - else: - return [toItem(v) for v in obj] - else: - return obj - - return dict((k,toItem(v)) for k,v in item_fn()) - - def copy( self ): - """ - Returns a new copy of a C{ParseResults} object. - """ - ret = ParseResults( self.__toklist ) - ret.__tokdict = self.__tokdict.copy() - ret.__parent = self.__parent - ret.__accumNames.update( self.__accumNames ) - ret.__name = self.__name - return ret - - def asXML( self, doctag=None, namedItemsOnly=False, indent="", formatted=True ): - """ - (Deprecated) Returns the parse results as XML. Tags are created for tokens and lists that have defined results names. - """ - nl = "\n" - out = [] - namedItems = dict((v[1],k) for (k,vlist) in self.__tokdict.items() - for v in vlist) - nextLevelIndent = indent + " " - - # collapse out indents if formatting is not desired - if not formatted: - indent = "" - nextLevelIndent = "" - nl = "" - - selfTag = None - if doctag is not None: - selfTag = doctag - else: - if self.__name: - selfTag = self.__name - - if not selfTag: - if namedItemsOnly: - return "" - else: - selfTag = "ITEM" - - out += [ nl, indent, "<", selfTag, ">" ] - - for i,res in enumerate(self.__toklist): - if isinstance(res,ParseResults): - if i in namedItems: - out += [ res.asXML(namedItems[i], - namedItemsOnly and doctag is None, - nextLevelIndent, - formatted)] - else: - out += [ res.asXML(None, - namedItemsOnly and doctag is None, - nextLevelIndent, - formatted)] - else: - # individual token, see if there is a name for it - resTag = None - if i in namedItems: - resTag = namedItems[i] - if not resTag: - if namedItemsOnly: - continue - else: - resTag = "ITEM" - xmlBodyText = _xml_escape(_ustr(res)) - out += [ nl, nextLevelIndent, "<", resTag, ">", - xmlBodyText, - "</", resTag, ">" ] - - out += [ nl, indent, "</", selfTag, ">" ] - return "".join(out) - - def __lookup(self,sub): - for k,vlist in self.__tokdict.items(): - for v,loc in vlist: - if sub is v: - return k - return None - - def getName(self): - r""" - Returns the results name for this token expression. Useful when several - different expressions might match at a particular location. - - Example:: - integer = Word(nums) - ssn_expr = Regex(r"\d\d\d-\d\d-\d\d\d\d") - house_number_expr = Suppress('#') + Word(nums, alphanums) - user_data = (Group(house_number_expr)("house_number") - | Group(ssn_expr)("ssn") - | Group(integer)("age")) - user_info = OneOrMore(user_data) - - result = user_info.parseString("22 111-22-3333 #221B") - for item in result: - print(item.getName(), ':', item[0]) - prints:: - age : 22 - ssn : 111-22-3333 - house_number : 221B - """ - if self.__name: - return self.__name - elif self.__parent: - par = self.__parent() - if par: - return par.__lookup(self) - else: - return None - elif (len(self) == 1 and - len(self.__tokdict) == 1 and - next(iter(self.__tokdict.values()))[0][1] in (0,-1)): - return next(iter(self.__tokdict.keys())) - else: - return None - - def dump(self, indent='', depth=0, full=True): - """ - Diagnostic method for listing out the contents of a C{ParseResults}. - Accepts an optional C{indent} argument so that this string can be embedded - in a nested display of other data. - - Example:: - integer = Word(nums) - date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - - result = date_str.parseString('12/31/1999') - print(result.dump()) - prints:: - ['12', '/', '31', '/', '1999'] - - day: 1999 - - month: 31 - - year: 12 - """ - out = [] - NL = '\n' - out.append( indent+_ustr(self.asList()) ) - if full: - if self.haskeys(): - items = sorted((str(k), v) for k,v in self.items()) - for k,v in items: - if out: - out.append(NL) - out.append( "%s%s- %s: " % (indent,(' '*depth), k) ) - if isinstance(v,ParseResults): - if v: - out.append( v.dump(indent,depth+1) ) - else: - out.append(_ustr(v)) - else: - out.append(repr(v)) - elif any(isinstance(vv,ParseResults) for vv in self): - v = self - for i,vv in enumerate(v): - if isinstance(vv,ParseResults): - out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),vv.dump(indent,depth+1) )) - else: - out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),_ustr(vv))) - - return "".join(out) - - def pprint(self, *args, **kwargs): - """ - Pretty-printer for parsed results as a list, using the C{pprint} module. - Accepts additional positional or keyword args as defined for the - C{pprint.pprint} method. (U{http://docs.python.org/3/library/pprint.html#pprint.pprint}) - - Example:: - ident = Word(alphas, alphanums) - num = Word(nums) - func = Forward() - term = ident | num | Group('(' + func + ')') - func <<= ident + Group(Optional(delimitedList(term))) - result = func.parseString("fna a,b,(fnb c,d,200),100") - result.pprint(width=40) - prints:: - ['fna', - ['a', - 'b', - ['(', 'fnb', ['c', 'd', '200'], ')'], - '100']] - """ - pprint.pprint(self.asList(), *args, **kwargs) - - # add support for pickle protocol - def __getstate__(self): - return ( self.__toklist, - ( self.__tokdict.copy(), - self.__parent is not None and self.__parent() or None, - self.__accumNames, - self.__name ) ) - - def __setstate__(self,state): - self.__toklist = state[0] - (self.__tokdict, - par, - inAccumNames, - self.__name) = state[1] - self.__accumNames = {} - self.__accumNames.update(inAccumNames) - if par is not None: - self.__parent = wkref(par) - else: - self.__parent = None - - def __getnewargs__(self): - return self.__toklist, self.__name, self.__asList, self.__modal - - def __dir__(self): - return (dir(type(self)) + list(self.keys())) - -MutableMapping.register(ParseResults) - -def col (loc,strg): - """Returns current column within a string, counting newlines as line separators. - The first column is number 1. - - Note: the default parsing behavior is to expand tabs in the input string - before starting the parsing process. See L{I{ParserElement.parseString}<ParserElement.parseString>} for more information - on parsing strings containing C{<TAB>}s, and suggested methods to maintain a - consistent view of the parsed string, the parse location, and line and column - positions within the parsed string. - """ - s = strg - return 1 if 0<loc<len(s) and s[loc-1] == '\n' else loc - s.rfind("\n", 0, loc) - -def lineno(loc,strg): - """Returns current line number within a string, counting newlines as line separators. - The first line is number 1. - - Note: the default parsing behavior is to expand tabs in the input string - before starting the parsing process. See L{I{ParserElement.parseString}<ParserElement.parseString>} for more information - on parsing strings containing C{<TAB>}s, and suggested methods to maintain a - consistent view of the parsed string, the parse location, and line and column - positions within the parsed string. - """ - return strg.count("\n",0,loc) + 1 - -def line( loc, strg ): - """Returns the line of text containing loc within a string, counting newlines as line separators. - """ - lastCR = strg.rfind("\n", 0, loc) - nextCR = strg.find("\n", loc) - if nextCR >= 0: - return strg[lastCR+1:nextCR] - else: - return strg[lastCR+1:] - -def _defaultStartDebugAction( instring, loc, expr ): - print (("Match " + _ustr(expr) + " at loc " + _ustr(loc) + "(%d,%d)" % ( lineno(loc,instring), col(loc,instring) ))) - -def _defaultSuccessDebugAction( instring, startloc, endloc, expr, toks ): - print ("Matched " + _ustr(expr) + " -> " + str(toks.asList())) - -def _defaultExceptionDebugAction( instring, loc, expr, exc ): - print ("Exception raised:" + _ustr(exc)) - -def nullDebugAction(*args): - """'Do-nothing' debug action, to suppress debugging output during parsing.""" - pass - -# Only works on Python 3.x - nonlocal is toxic to Python 2 installs -#~ 'decorator to trim function calls to match the arity of the target' -#~ def _trim_arity(func, maxargs=3): - #~ if func in singleArgBuiltins: - #~ return lambda s,l,t: func(t) - #~ limit = 0 - #~ foundArity = False - #~ def wrapper(*args): - #~ nonlocal limit,foundArity - #~ while 1: - #~ try: - #~ ret = func(*args[limit:]) - #~ foundArity = True - #~ return ret - #~ except TypeError: - #~ if limit == maxargs or foundArity: - #~ raise - #~ limit += 1 - #~ continue - #~ return wrapper - -# this version is Python 2.x-3.x cross-compatible -'decorator to trim function calls to match the arity of the target' -def _trim_arity(func, maxargs=2): - if func in singleArgBuiltins: - return lambda s,l,t: func(t) - limit = [0] - foundArity = [False] - - # traceback return data structure changed in Py3.5 - normalize back to plain tuples - if system_version[:2] >= (3,5): - def extract_stack(limit=0): - # special handling for Python 3.5.0 - extra deep call stack by 1 - offset = -3 if system_version == (3,5,0) else -2 - frame_summary = traceback.extract_stack(limit=-offset+limit-1)[offset] - return [frame_summary[:2]] - def extract_tb(tb, limit=0): - frames = traceback.extract_tb(tb, limit=limit) - frame_summary = frames[-1] - return [frame_summary[:2]] - else: - extract_stack = traceback.extract_stack - extract_tb = traceback.extract_tb - - # synthesize what would be returned by traceback.extract_stack at the call to - # user's parse action 'func', so that we don't incur call penalty at parse time - - LINE_DIFF = 6 - # IF ANY CODE CHANGES, EVEN JUST COMMENTS OR BLANK LINES, BETWEEN THE NEXT LINE AND - # THE CALL TO FUNC INSIDE WRAPPER, LINE_DIFF MUST BE MODIFIED!!!! - this_line = extract_stack(limit=2)[-1] - pa_call_line_synth = (this_line[0], this_line[1]+LINE_DIFF) - - def wrapper(*args): - while 1: - try: - ret = func(*args[limit[0]:]) - foundArity[0] = True - return ret - except TypeError: - # re-raise TypeErrors if they did not come from our arity testing - if foundArity[0]: - raise - else: - try: - tb = sys.exc_info()[-1] - if not extract_tb(tb, limit=2)[-1][:2] == pa_call_line_synth: - raise - finally: - del tb - - if limit[0] <= maxargs: - limit[0] += 1 - continue - raise - - # copy func name to wrapper for sensible debug output - func_name = "<parse action>" - try: - func_name = getattr(func, '__name__', - getattr(func, '__class__').__name__) - except Exception: - func_name = str(func) - wrapper.__name__ = func_name - - return wrapper - -class ParserElement(object): - """Abstract base level parser element class.""" - DEFAULT_WHITE_CHARS = " \n\t\r" - verbose_stacktrace = False - - @staticmethod - def setDefaultWhitespaceChars( chars ): - r""" - Overrides the default whitespace chars - - Example:: - # default whitespace chars are space, <TAB> and newline - OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def', 'ghi', 'jkl'] - - # change to just treat newline as significant - ParserElement.setDefaultWhitespaceChars(" \t") - OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def'] - """ - ParserElement.DEFAULT_WHITE_CHARS = chars - - @staticmethod - def inlineLiteralsUsing(cls): - """ - Set class to be used for inclusion of string literals into a parser. - - Example:: - # default literal class used is Literal - integer = Word(nums) - date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - - date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31'] - - - # change to Suppress - ParserElement.inlineLiteralsUsing(Suppress) - date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - - date_str.parseString("1999/12/31") # -> ['1999', '12', '31'] - """ - ParserElement._literalStringClass = cls - - def __init__( self, savelist=False ): - self.parseAction = list() - self.failAction = None - #~ self.name = "<unknown>" # don't define self.name, let subclasses try/except upcall - self.strRepr = None - self.resultsName = None - self.saveAsList = savelist - self.skipWhitespace = True - self.whiteChars = ParserElement.DEFAULT_WHITE_CHARS - self.copyDefaultWhiteChars = True - self.mayReturnEmpty = False # used when checking for left-recursion - self.keepTabs = False - self.ignoreExprs = list() - self.debug = False - self.streamlined = False - self.mayIndexError = True # used to optimize exception handling for subclasses that don't advance parse index - self.errmsg = "" - self.modalResults = True # used to mark results names as modal (report only last) or cumulative (list all) - self.debugActions = ( None, None, None ) #custom debug actions - self.re = None - self.callPreparse = True # used to avoid redundant calls to preParse - self.callDuringTry = False - - def copy( self ): - """ - Make a copy of this C{ParserElement}. Useful for defining different parse actions - for the same parsing pattern, using copies of the original parse element. - - Example:: - integer = Word(nums).setParseAction(lambda toks: int(toks[0])) - integerK = integer.copy().addParseAction(lambda toks: toks[0]*1024) + Suppress("K") - integerM = integer.copy().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M") - - print(OneOrMore(integerK | integerM | integer).parseString("5K 100 640K 256M")) - prints:: - [5120, 100, 655360, 268435456] - Equivalent form of C{expr.copy()} is just C{expr()}:: - integerM = integer().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M") - """ - cpy = copy.copy( self ) - cpy.parseAction = self.parseAction[:] - cpy.ignoreExprs = self.ignoreExprs[:] - if self.copyDefaultWhiteChars: - cpy.whiteChars = ParserElement.DEFAULT_WHITE_CHARS - return cpy - - def setName( self, name ): - """ - Define name for this expression, makes debugging and exception messages clearer. - - Example:: - Word(nums).parseString("ABC") # -> Exception: Expected W:(0123...) (at char 0), (line:1, col:1) - Word(nums).setName("integer").parseString("ABC") # -> Exception: Expected integer (at char 0), (line:1, col:1) - """ - self.name = name - self.errmsg = "Expected " + self.name - if hasattr(self,"exception"): - self.exception.msg = self.errmsg - return self - - def setResultsName( self, name, listAllMatches=False ): - """ - Define name for referencing matching tokens as a nested attribute - of the returned parse results. - NOTE: this returns a *copy* of the original C{ParserElement} object; - this is so that the client can define a basic element, such as an - integer, and reference it in multiple places with different names. - - You can also set results names using the abbreviated syntax, - C{expr("name")} in place of C{expr.setResultsName("name")} - - see L{I{__call__}<__call__>}. - - Example:: - date_str = (integer.setResultsName("year") + '/' - + integer.setResultsName("month") + '/' - + integer.setResultsName("day")) - - # equivalent form: - date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - """ - newself = self.copy() - if name.endswith("*"): - name = name[:-1] - listAllMatches=True - newself.resultsName = name - newself.modalResults = not listAllMatches - return newself - - def setBreak(self,breakFlag = True): - """Method to invoke the Python pdb debugger when this element is - about to be parsed. Set C{breakFlag} to True to enable, False to - disable. - """ - if breakFlag: - _parseMethod = self._parse - def breaker(instring, loc, doActions=True, callPreParse=True): - import pdb - pdb.set_trace() - return _parseMethod( instring, loc, doActions, callPreParse ) - breaker._originalParseMethod = _parseMethod - self._parse = breaker - else: - if hasattr(self._parse,"_originalParseMethod"): - self._parse = self._parse._originalParseMethod - return self - - def setParseAction( self, *fns, **kwargs ): - """ - Define one or more actions to perform when successfully matching parse element definition. - Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)}, - C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where: - - s = the original string being parsed (see note below) - - loc = the location of the matching substring - - toks = a list of the matched tokens, packaged as a C{L{ParseResults}} object - If the functions in fns modify the tokens, they can return them as the return - value from fn, and the modified list of tokens will replace the original. - Otherwise, fn does not need to return any value. - - Optional keyword arguments: - - callDuringTry = (default=C{False}) indicate if parse action should be run during lookaheads and alternate testing - - Note: the default parsing behavior is to expand tabs in the input string - before starting the parsing process. See L{I{parseString}<parseString>} for more information - on parsing strings containing C{<TAB>}s, and suggested methods to maintain a - consistent view of the parsed string, the parse location, and line and column - positions within the parsed string. - - Example:: - integer = Word(nums) - date_str = integer + '/' + integer + '/' + integer - - date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31'] - - # use parse action to convert to ints at parse time - integer = Word(nums).setParseAction(lambda toks: int(toks[0])) - date_str = integer + '/' + integer + '/' + integer - - # note that integer fields are now ints, not strings - date_str.parseString("1999/12/31") # -> [1999, '/', 12, '/', 31] - """ - self.parseAction = list(map(_trim_arity, list(fns))) - self.callDuringTry = kwargs.get("callDuringTry", False) - return self - - def addParseAction( self, *fns, **kwargs ): - """ - Add one or more parse actions to expression's list of parse actions. See L{I{setParseAction}<setParseAction>}. - - See examples in L{I{copy}<copy>}. - """ - self.parseAction += list(map(_trim_arity, list(fns))) - self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False) - return self - - def addCondition(self, *fns, **kwargs): - """Add a boolean predicate function to expression's list of parse actions. See - L{I{setParseAction}<setParseAction>} for function call signatures. Unlike C{setParseAction}, - functions passed to C{addCondition} need to return boolean success/fail of the condition. - - Optional keyword arguments: - - message = define a custom message to be used in the raised exception - - fatal = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise ParseException - - Example:: - integer = Word(nums).setParseAction(lambda toks: int(toks[0])) - year_int = integer.copy() - year_int.addCondition(lambda toks: toks[0] >= 2000, message="Only support years 2000 and later") - date_str = year_int + '/' + integer + '/' + integer - - result = date_str.parseString("1999/12/31") # -> Exception: Only support years 2000 and later (at char 0), (line:1, col:1) - """ - msg = kwargs.get("message", "failed user-defined condition") - exc_type = ParseFatalException if kwargs.get("fatal", False) else ParseException - for fn in fns: - def pa(s,l,t): - if not bool(_trim_arity(fn)(s,l,t)): - raise exc_type(s,l,msg) - self.parseAction.append(pa) - self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False) - return self - - def setFailAction( self, fn ): - """Define action to perform if parsing fails at this expression. - Fail acton fn is a callable function that takes the arguments - C{fn(s,loc,expr,err)} where: - - s = string being parsed - - loc = location where expression match was attempted and failed - - expr = the parse expression that failed - - err = the exception thrown - The function returns no value. It may throw C{L{ParseFatalException}} - if it is desired to stop parsing immediately.""" - self.failAction = fn - return self - - def _skipIgnorables( self, instring, loc ): - exprsFound = True - while exprsFound: - exprsFound = False - for e in self.ignoreExprs: - try: - while 1: - loc,dummy = e._parse( instring, loc ) - exprsFound = True - except ParseException: - pass - return loc - - def preParse( self, instring, loc ): - if self.ignoreExprs: - loc = self._skipIgnorables( instring, loc ) - - if self.skipWhitespace: - wt = self.whiteChars - instrlen = len(instring) - while loc < instrlen and instring[loc] in wt: - loc += 1 - - return loc - - def parseImpl( self, instring, loc, doActions=True ): - return loc, [] - - def postParse( self, instring, loc, tokenlist ): - return tokenlist - - #~ @profile - def _parseNoCache( self, instring, loc, doActions=True, callPreParse=True ): - debugging = ( self.debug ) #and doActions ) - - if debugging or self.failAction: - #~ print ("Match",self,"at loc",loc,"(%d,%d)" % ( lineno(loc,instring), col(loc,instring) )) - if (self.debugActions[0] ): - self.debugActions[0]( instring, loc, self ) - if callPreParse and self.callPreparse: - preloc = self.preParse( instring, loc ) - else: - preloc = loc - tokensStart = preloc - try: - try: - loc,tokens = self.parseImpl( instring, preloc, doActions ) - except IndexError: - raise ParseException( instring, len(instring), self.errmsg, self ) - except ParseBaseException as err: - #~ print ("Exception raised:", err) - if self.debugActions[2]: - self.debugActions[2]( instring, tokensStart, self, err ) - if self.failAction: - self.failAction( instring, tokensStart, self, err ) - raise - else: - if callPreParse and self.callPreparse: - preloc = self.preParse( instring, loc ) - else: - preloc = loc - tokensStart = preloc - if self.mayIndexError or preloc >= len(instring): - try: - loc,tokens = self.parseImpl( instring, preloc, doActions ) - except IndexError: - raise ParseException( instring, len(instring), self.errmsg, self ) - else: - loc,tokens = self.parseImpl( instring, preloc, doActions ) - - tokens = self.postParse( instring, loc, tokens ) - - retTokens = ParseResults( tokens, self.resultsName, asList=self.saveAsList, modal=self.modalResults ) - if self.parseAction and (doActions or self.callDuringTry): - if debugging: - try: - for fn in self.parseAction: - tokens = fn( instring, tokensStart, retTokens ) - if tokens is not None: - retTokens = ParseResults( tokens, - self.resultsName, - asList=self.saveAsList and isinstance(tokens,(ParseResults,list)), - modal=self.modalResults ) - except ParseBaseException as err: - #~ print "Exception raised in user parse action:", err - if (self.debugActions[2] ): - self.debugActions[2]( instring, tokensStart, self, err ) - raise - else: - for fn in self.parseAction: - tokens = fn( instring, tokensStart, retTokens ) - if tokens is not None: - retTokens = ParseResults( tokens, - self.resultsName, - asList=self.saveAsList and isinstance(tokens,(ParseResults,list)), - modal=self.modalResults ) - if debugging: - #~ print ("Matched",self,"->",retTokens.asList()) - if (self.debugActions[1] ): - self.debugActions[1]( instring, tokensStart, loc, self, retTokens ) - - return loc, retTokens - - def tryParse( self, instring, loc ): - try: - return self._parse( instring, loc, doActions=False )[0] - except ParseFatalException: - raise ParseException( instring, loc, self.errmsg, self) - - def canParseNext(self, instring, loc): - try: - self.tryParse(instring, loc) - except (ParseException, IndexError): - return False - else: - return True - - class _UnboundedCache(object): - def __init__(self): - cache = {} - self.not_in_cache = not_in_cache = object() - - def get(self, key): - return cache.get(key, not_in_cache) - - def set(self, key, value): - cache[key] = value - - def clear(self): - cache.clear() - - def cache_len(self): - return len(cache) - - self.get = types.MethodType(get, self) - self.set = types.MethodType(set, self) - self.clear = types.MethodType(clear, self) - self.__len__ = types.MethodType(cache_len, self) - - if _OrderedDict is not None: - class _FifoCache(object): - def __init__(self, size): - self.not_in_cache = not_in_cache = object() - - cache = _OrderedDict() - - def get(self, key): - return cache.get(key, not_in_cache) - - def set(self, key, value): - cache[key] = value - while len(cache) > size: - try: - cache.popitem(False) - except KeyError: - pass - - def clear(self): - cache.clear() - - def cache_len(self): - return len(cache) - - self.get = types.MethodType(get, self) - self.set = types.MethodType(set, self) - self.clear = types.MethodType(clear, self) - self.__len__ = types.MethodType(cache_len, self) - - else: - class _FifoCache(object): - def __init__(self, size): - self.not_in_cache = not_in_cache = object() - - cache = {} - key_fifo = collections.deque([], size) - - def get(self, key): - return cache.get(key, not_in_cache) - - def set(self, key, value): - cache[key] = value - while len(key_fifo) > size: - cache.pop(key_fifo.popleft(), None) - key_fifo.append(key) - - def clear(self): - cache.clear() - key_fifo.clear() - - def cache_len(self): - return len(cache) - - self.get = types.MethodType(get, self) - self.set = types.MethodType(set, self) - self.clear = types.MethodType(clear, self) - self.__len__ = types.MethodType(cache_len, self) - - # argument cache for optimizing repeated calls when backtracking through recursive expressions - packrat_cache = {} # this is set later by enabledPackrat(); this is here so that resetCache() doesn't fail - packrat_cache_lock = RLock() - packrat_cache_stats = [0, 0] - - # this method gets repeatedly called during backtracking with the same arguments - - # we can cache these arguments and save ourselves the trouble of re-parsing the contained expression - def _parseCache( self, instring, loc, doActions=True, callPreParse=True ): - HIT, MISS = 0, 1 - lookup = (self, instring, loc, callPreParse, doActions) - with ParserElement.packrat_cache_lock: - cache = ParserElement.packrat_cache - value = cache.get(lookup) - if value is cache.not_in_cache: - ParserElement.packrat_cache_stats[MISS] += 1 - try: - value = self._parseNoCache(instring, loc, doActions, callPreParse) - except ParseBaseException as pe: - # cache a copy of the exception, without the traceback - cache.set(lookup, pe.__class__(*pe.args)) - raise - else: - cache.set(lookup, (value[0], value[1].copy())) - return value - else: - ParserElement.packrat_cache_stats[HIT] += 1 - if isinstance(value, Exception): - raise value - return (value[0], value[1].copy()) - - _parse = _parseNoCache - - @staticmethod - def resetCache(): - ParserElement.packrat_cache.clear() - ParserElement.packrat_cache_stats[:] = [0] * len(ParserElement.packrat_cache_stats) - - _packratEnabled = False - @staticmethod - def enablePackrat(cache_size_limit=128): - """Enables "packrat" parsing, which adds memoizing to the parsing logic. - Repeated parse attempts at the same string location (which happens - often in many complex grammars) can immediately return a cached value, - instead of re-executing parsing/validating code. Memoizing is done of - both valid results and parsing exceptions. - - Parameters: - - cache_size_limit - (default=C{128}) - if an integer value is provided - will limit the size of the packrat cache; if None is passed, then - the cache size will be unbounded; if 0 is passed, the cache will - be effectively disabled. - - This speedup may break existing programs that use parse actions that - have side-effects. For this reason, packrat parsing is disabled when - you first import pyparsing. To activate the packrat feature, your - program must call the class method C{ParserElement.enablePackrat()}. If - your program uses C{psyco} to "compile as you go", you must call - C{enablePackrat} before calling C{psyco.full()}. If you do not do this, - Python will crash. For best results, call C{enablePackrat()} immediately - after importing pyparsing. - - Example:: - import pyparsing - pyparsing.ParserElement.enablePackrat() - """ - if not ParserElement._packratEnabled: - ParserElement._packratEnabled = True - if cache_size_limit is None: - ParserElement.packrat_cache = ParserElement._UnboundedCache() - else: - ParserElement.packrat_cache = ParserElement._FifoCache(cache_size_limit) - ParserElement._parse = ParserElement._parseCache - - def parseString( self, instring, parseAll=False ): - """ - Execute the parse expression with the given string. - This is the main interface to the client code, once the complete - expression has been built. - - If you want the grammar to require that the entire input string be - successfully parsed, then set C{parseAll} to True (equivalent to ending - the grammar with C{L{StringEnd()}}). - - Note: C{parseString} implicitly calls C{expandtabs()} on the input string, - in order to report proper column numbers in parse actions. - If the input string contains tabs and - the grammar uses parse actions that use the C{loc} argument to index into the - string being parsed, you can ensure you have a consistent view of the input - string by: - - calling C{parseWithTabs} on your grammar before calling C{parseString} - (see L{I{parseWithTabs}<parseWithTabs>}) - - define your parse action using the full C{(s,loc,toks)} signature, and - reference the input string using the parse action's C{s} argument - - explictly expand the tabs in your input string before calling - C{parseString} - - Example:: - Word('a').parseString('aaaaabaaa') # -> ['aaaaa'] - Word('a').parseString('aaaaabaaa', parseAll=True) # -> Exception: Expected end of text - """ - ParserElement.resetCache() - if not self.streamlined: - self.streamline() - #~ self.saveAsList = True - for e in self.ignoreExprs: - e.streamline() - if not self.keepTabs: - instring = instring.expandtabs() - try: - loc, tokens = self._parse( instring, 0 ) - if parseAll: - loc = self.preParse( instring, loc ) - se = Empty() + StringEnd() - se._parse( instring, loc ) - except ParseBaseException as exc: - if ParserElement.verbose_stacktrace: - raise - else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace - raise exc - else: - return tokens - - def scanString( self, instring, maxMatches=_MAX_INT, overlap=False ): - """ - Scan the input string for expression matches. Each match will return the - matching tokens, start location, and end location. May be called with optional - C{maxMatches} argument, to clip scanning after 'n' matches are found. If - C{overlap} is specified, then overlapping matches will be reported. - - Note that the start and end locations are reported relative to the string - being parsed. See L{I{parseString}<parseString>} for more information on parsing - strings with embedded tabs. - - Example:: - source = "sldjf123lsdjjkf345sldkjf879lkjsfd987" - print(source) - for tokens,start,end in Word(alphas).scanString(source): - print(' '*start + '^'*(end-start)) - print(' '*start + tokens[0]) - - prints:: - - sldjf123lsdjjkf345sldkjf879lkjsfd987 - ^^^^^ - sldjf - ^^^^^^^ - lsdjjkf - ^^^^^^ - sldkjf - ^^^^^^ - lkjsfd - """ - if not self.streamlined: - self.streamline() - for e in self.ignoreExprs: - e.streamline() - - if not self.keepTabs: - instring = _ustr(instring).expandtabs() - instrlen = len(instring) - loc = 0 - preparseFn = self.preParse - parseFn = self._parse - ParserElement.resetCache() - matches = 0 - try: - while loc <= instrlen and matches < maxMatches: - try: - preloc = preparseFn( instring, loc ) - nextLoc,tokens = parseFn( instring, preloc, callPreParse=False ) - except ParseException: - loc = preloc+1 - else: - if nextLoc > loc: - matches += 1 - yield tokens, preloc, nextLoc - if overlap: - nextloc = preparseFn( instring, loc ) - if nextloc > loc: - loc = nextLoc - else: - loc += 1 - else: - loc = nextLoc - else: - loc = preloc+1 - except ParseBaseException as exc: - if ParserElement.verbose_stacktrace: - raise - else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace - raise exc - - def transformString( self, instring ): - """ - Extension to C{L{scanString}}, to modify matching text with modified tokens that may - be returned from a parse action. To use C{transformString}, define a grammar and - attach a parse action to it that modifies the returned token list. - Invoking C{transformString()} on a target string will then scan for matches, - and replace the matched text patterns according to the logic in the parse - action. C{transformString()} returns the resulting transformed string. - - Example:: - wd = Word(alphas) - wd.setParseAction(lambda toks: toks[0].title()) - - print(wd.transformString("now is the winter of our discontent made glorious summer by this sun of york.")) - Prints:: - Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York. - """ - out = [] - lastE = 0 - # force preservation of <TAB>s, to minimize unwanted transformation of string, and to - # keep string locs straight between transformString and scanString - self.keepTabs = True - try: - for t,s,e in self.scanString( instring ): - out.append( instring[lastE:s] ) - if t: - if isinstance(t,ParseResults): - out += t.asList() - elif isinstance(t,list): - out += t - else: - out.append(t) - lastE = e - out.append(instring[lastE:]) - out = [o for o in out if o] - return "".join(map(_ustr,_flatten(out))) - except ParseBaseException as exc: - if ParserElement.verbose_stacktrace: - raise - else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace - raise exc - - def searchString( self, instring, maxMatches=_MAX_INT ): - """ - Another extension to C{L{scanString}}, simplifying the access to the tokens found - to match the given parse expression. May be called with optional - C{maxMatches} argument, to clip searching after 'n' matches are found. - - Example:: - # a capitalized word starts with an uppercase letter, followed by zero or more lowercase letters - cap_word = Word(alphas.upper(), alphas.lower()) - - print(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity")) - - # the sum() builtin can be used to merge results into a single ParseResults object - print(sum(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity"))) - prints:: - [['More'], ['Iron'], ['Lead'], ['Gold'], ['I'], ['Electricity']] - ['More', 'Iron', 'Lead', 'Gold', 'I', 'Electricity'] - """ - try: - return ParseResults([ t for t,s,e in self.scanString( instring, maxMatches ) ]) - except ParseBaseException as exc: - if ParserElement.verbose_stacktrace: - raise - else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace - raise exc - - def split(self, instring, maxsplit=_MAX_INT, includeSeparators=False): - """ - Generator method to split a string using the given expression as a separator. - May be called with optional C{maxsplit} argument, to limit the number of splits; - and the optional C{includeSeparators} argument (default=C{False}), if the separating - matching text should be included in the split results. - - Example:: - punc = oneOf(list(".,;:/-!?")) - print(list(punc.split("This, this?, this sentence, is badly punctuated!"))) - prints:: - ['This', ' this', '', ' this sentence', ' is badly punctuated', ''] - """ - splits = 0 - last = 0 - for t,s,e in self.scanString(instring, maxMatches=maxsplit): - yield instring[last:s] - if includeSeparators: - yield t[0] - last = e - yield instring[last:] - - def __add__(self, other ): - """ - Implementation of + operator - returns C{L{And}}. Adding strings to a ParserElement - converts them to L{Literal}s by default. - - Example:: - greet = Word(alphas) + "," + Word(alphas) + "!" - hello = "Hello, World!" - print (hello, "->", greet.parseString(hello)) - Prints:: - Hello, World! -> ['Hello', ',', 'World', '!'] - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return And( [ self, other ] ) - - def __radd__(self, other ): - """ - Implementation of + operator when left operand is not a C{L{ParserElement}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return other + self - - def __sub__(self, other): - """ - Implementation of - operator, returns C{L{And}} with error stop - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return self + And._ErrorStop() + other - - def __rsub__(self, other ): - """ - Implementation of - operator when left operand is not a C{L{ParserElement}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return other - self - - def __mul__(self,other): - """ - Implementation of * operator, allows use of C{expr * 3} in place of - C{expr + expr + expr}. Expressions may also me multiplied by a 2-integer - tuple, similar to C{{min,max}} multipliers in regular expressions. Tuples - may also include C{None} as in: - - C{expr*(n,None)} or C{expr*(n,)} is equivalent - to C{expr*n + L{ZeroOrMore}(expr)} - (read as "at least n instances of C{expr}") - - C{expr*(None,n)} is equivalent to C{expr*(0,n)} - (read as "0 to n instances of C{expr}") - - C{expr*(None,None)} is equivalent to C{L{ZeroOrMore}(expr)} - - C{expr*(1,None)} is equivalent to C{L{OneOrMore}(expr)} - - Note that C{expr*(None,n)} does not raise an exception if - more than n exprs exist in the input stream; that is, - C{expr*(None,n)} does not enforce a maximum number of expr - occurrences. If this behavior is desired, then write - C{expr*(None,n) + ~expr} - """ - if isinstance(other,int): - minElements, optElements = other,0 - elif isinstance(other,tuple): - other = (other + (None, None))[:2] - if other[0] is None: - other = (0, other[1]) - if isinstance(other[0],int) and other[1] is None: - if other[0] == 0: - return ZeroOrMore(self) - if other[0] == 1: - return OneOrMore(self) - else: - return self*other[0] + ZeroOrMore(self) - elif isinstance(other[0],int) and isinstance(other[1],int): - minElements, optElements = other - optElements -= minElements - else: - raise TypeError("cannot multiply 'ParserElement' and ('%s','%s') objects", type(other[0]),type(other[1])) - else: - raise TypeError("cannot multiply 'ParserElement' and '%s' objects", type(other)) - - if minElements < 0: - raise ValueError("cannot multiply ParserElement by negative value") - if optElements < 0: - raise ValueError("second tuple value must be greater or equal to first tuple value") - if minElements == optElements == 0: - raise ValueError("cannot multiply ParserElement by 0 or (0,0)") - - if (optElements): - def makeOptionalList(n): - if n>1: - return Optional(self + makeOptionalList(n-1)) - else: - return Optional(self) - if minElements: - if minElements == 1: - ret = self + makeOptionalList(optElements) - else: - ret = And([self]*minElements) + makeOptionalList(optElements) - else: - ret = makeOptionalList(optElements) - else: - if minElements == 1: - ret = self - else: - ret = And([self]*minElements) - return ret - - def __rmul__(self, other): - return self.__mul__(other) - - def __or__(self, other ): - """ - Implementation of | operator - returns C{L{MatchFirst}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return MatchFirst( [ self, other ] ) - - def __ror__(self, other ): - """ - Implementation of | operator when left operand is not a C{L{ParserElement}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return other | self - - def __xor__(self, other ): - """ - Implementation of ^ operator - returns C{L{Or}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return Or( [ self, other ] ) - - def __rxor__(self, other ): - """ - Implementation of ^ operator when left operand is not a C{L{ParserElement}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return other ^ self - - def __and__(self, other ): - """ - Implementation of & operator - returns C{L{Each}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return Each( [ self, other ] ) - - def __rand__(self, other ): - """ - Implementation of & operator when left operand is not a C{L{ParserElement}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return other & self - - def __invert__( self ): - """ - Implementation of ~ operator - returns C{L{NotAny}} - """ - return NotAny( self ) - - def __call__(self, name=None): - """ - Shortcut for C{L{setResultsName}}, with C{listAllMatches=False}. - - If C{name} is given with a trailing C{'*'} character, then C{listAllMatches} will be - passed as C{True}. - - If C{name} is omitted, same as calling C{L{copy}}. - - Example:: - # these are equivalent - userdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno") - userdata = Word(alphas)("name") + Word(nums+"-")("socsecno") - """ - if name is not None: - return self.setResultsName(name) - else: - return self.copy() - - def suppress( self ): - """ - Suppresses the output of this C{ParserElement}; useful to keep punctuation from - cluttering up returned output. - """ - return Suppress( self ) - - def leaveWhitespace( self ): - """ - Disables the skipping of whitespace before matching the characters in the - C{ParserElement}'s defined pattern. This is normally only used internally by - the pyparsing module, but may be needed in some whitespace-sensitive grammars. - """ - self.skipWhitespace = False - return self - - def setWhitespaceChars( self, chars ): - """ - Overrides the default whitespace chars - """ - self.skipWhitespace = True - self.whiteChars = chars - self.copyDefaultWhiteChars = False - return self - - def parseWithTabs( self ): - """ - Overrides default behavior to expand C{<TAB>}s to spaces before parsing the input string. - Must be called before C{parseString} when the input grammar contains elements that - match C{<TAB>} characters. - """ - self.keepTabs = True - return self - - def ignore( self, other ): - """ - Define expression to be ignored (e.g., comments) while doing pattern - matching; may be called repeatedly, to define multiple comment or other - ignorable patterns. - - Example:: - patt = OneOrMore(Word(alphas)) - patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj'] - - patt.ignore(cStyleComment) - patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj', 'lskjd'] - """ - if isinstance(other, basestring): - other = Suppress(other) - - if isinstance( other, Suppress ): - if other not in self.ignoreExprs: - self.ignoreExprs.append(other) - else: - self.ignoreExprs.append( Suppress( other.copy() ) ) - return self - - def setDebugActions( self, startAction, successAction, exceptionAction ): - """ - Enable display of debugging messages while doing pattern matching. - """ - self.debugActions = (startAction or _defaultStartDebugAction, - successAction or _defaultSuccessDebugAction, - exceptionAction or _defaultExceptionDebugAction) - self.debug = True - return self - - def setDebug( self, flag=True ): - """ - Enable display of debugging messages while doing pattern matching. - Set C{flag} to True to enable, False to disable. - - Example:: - wd = Word(alphas).setName("alphaword") - integer = Word(nums).setName("numword") - term = wd | integer - - # turn on debugging for wd - wd.setDebug() - - OneOrMore(term).parseString("abc 123 xyz 890") - - prints:: - Match alphaword at loc 0(1,1) - Matched alphaword -> ['abc'] - Match alphaword at loc 3(1,4) - Exception raised:Expected alphaword (at char 4), (line:1, col:5) - Match alphaword at loc 7(1,8) - Matched alphaword -> ['xyz'] - Match alphaword at loc 11(1,12) - Exception raised:Expected alphaword (at char 12), (line:1, col:13) - Match alphaword at loc 15(1,16) - Exception raised:Expected alphaword (at char 15), (line:1, col:16) - - The output shown is that produced by the default debug actions - custom debug actions can be - specified using L{setDebugActions}. Prior to attempting - to match the C{wd} expression, the debugging message C{"Match <exprname> at loc <n>(<line>,<col>)"} - is shown. Then if the parse succeeds, a C{"Matched"} message is shown, or an C{"Exception raised"} - message is shown. Also note the use of L{setName} to assign a human-readable name to the expression, - which makes debugging and exception messages easier to understand - for instance, the default - name created for the C{Word} expression without calling C{setName} is C{"W:(ABCD...)"}. - """ - if flag: - self.setDebugActions( _defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction ) - else: - self.debug = False - return self - - def __str__( self ): - return self.name - - def __repr__( self ): - return _ustr(self) - - def streamline( self ): - self.streamlined = True - self.strRepr = None - return self - - def checkRecursion( self, parseElementList ): - pass - - def validate( self, validateTrace=[] ): - """ - Check defined expressions for valid structure, check for infinite recursive definitions. - """ - self.checkRecursion( [] ) - - def parseFile( self, file_or_filename, parseAll=False ): - """ - Execute the parse expression on the given file or filename. - If a filename is specified (instead of a file object), - the entire file is opened, read, and closed before parsing. - """ - try: - file_contents = file_or_filename.read() - except AttributeError: - with open(file_or_filename, "r") as f: - file_contents = f.read() - try: - return self.parseString(file_contents, parseAll) - except ParseBaseException as exc: - if ParserElement.verbose_stacktrace: - raise - else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace - raise exc - - def __eq__(self,other): - if isinstance(other, ParserElement): - return self is other or vars(self) == vars(other) - elif isinstance(other, basestring): - return self.matches(other) - else: - return super(ParserElement,self)==other - - def __ne__(self,other): - return not (self == other) - - def __hash__(self): - return hash(id(self)) - - def __req__(self,other): - return self == other - - def __rne__(self,other): - return not (self == other) - - def matches(self, testString, parseAll=True): - """ - Method for quick testing of a parser against a test string. Good for simple - inline microtests of sub expressions while building up larger parser. - - Parameters: - - testString - to test against this expression for a match - - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests - - Example:: - expr = Word(nums) - assert expr.matches("100") - """ - try: - self.parseString(_ustr(testString), parseAll=parseAll) - return True - except ParseBaseException: - return False - - def runTests(self, tests, parseAll=True, comment='#', fullDump=True, printResults=True, failureTests=False): - """ - Execute the parse expression on a series of test strings, showing each - test, the parsed results or where the parse failed. Quick and easy way to - run a parse expression against a list of sample strings. - - Parameters: - - tests - a list of separate test strings, or a multiline string of test strings - - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests - - comment - (default=C{'#'}) - expression for indicating embedded comments in the test - string; pass None to disable comment filtering - - fullDump - (default=C{True}) - dump results as list followed by results names in nested outline; - if False, only dump nested list - - printResults - (default=C{True}) prints test output to stdout - - failureTests - (default=C{False}) indicates if these tests are expected to fail parsing - - Returns: a (success, results) tuple, where success indicates that all tests succeeded - (or failed if C{failureTests} is True), and the results contain a list of lines of each - test's output - - Example:: - number_expr = pyparsing_common.number.copy() - - result = number_expr.runTests(''' - # unsigned integer - 100 - # negative integer - -100 - # float with scientific notation - 6.02e23 - # integer with scientific notation - 1e-12 - ''') - print("Success" if result[0] else "Failed!") - - result = number_expr.runTests(''' - # stray character - 100Z - # missing leading digit before '.' - -.100 - # too many '.' - 3.14.159 - ''', failureTests=True) - print("Success" if result[0] else "Failed!") - prints:: - # unsigned integer - 100 - [100] - - # negative integer - -100 - [-100] - - # float with scientific notation - 6.02e23 - [6.02e+23] - - # integer with scientific notation - 1e-12 - [1e-12] - - Success - - # stray character - 100Z - ^ - FAIL: Expected end of text (at char 3), (line:1, col:4) - - # missing leading digit before '.' - -.100 - ^ - FAIL: Expected {real number with scientific notation | real number | signed integer} (at char 0), (line:1, col:1) - - # too many '.' - 3.14.159 - ^ - FAIL: Expected end of text (at char 4), (line:1, col:5) - - Success - - Each test string must be on a single line. If you want to test a string that spans multiple - lines, create a test like this:: - - expr.runTest(r"this is a test\\n of strings that spans \\n 3 lines") - - (Note that this is a raw string literal, you must include the leading 'r'.) - """ - if isinstance(tests, basestring): - tests = list(map(str.strip, tests.rstrip().splitlines())) - if isinstance(comment, basestring): - comment = Literal(comment) - allResults = [] - comments = [] - success = True - for t in tests: - if comment is not None and comment.matches(t, False) or comments and not t: - comments.append(t) - continue - if not t: - continue - out = ['\n'.join(comments), t] - comments = [] - try: - t = t.replace(r'\n','\n') - result = self.parseString(t, parseAll=parseAll) - out.append(result.dump(full=fullDump)) - success = success and not failureTests - except ParseBaseException as pe: - fatal = "(FATAL)" if isinstance(pe, ParseFatalException) else "" - if '\n' in t: - out.append(line(pe.loc, t)) - out.append(' '*(col(pe.loc,t)-1) + '^' + fatal) - else: - out.append(' '*pe.loc + '^' + fatal) - out.append("FAIL: " + str(pe)) - success = success and failureTests - result = pe - except Exception as exc: - out.append("FAIL-EXCEPTION: " + str(exc)) - success = success and failureTests - result = exc - - if printResults: - if fullDump: - out.append('') - print('\n'.join(out)) - - allResults.append((t, result)) - - return success, allResults - - -class Token(ParserElement): - """ - Abstract C{ParserElement} subclass, for defining atomic matching patterns. - """ - def __init__( self ): - super(Token,self).__init__( savelist=False ) - - -class Empty(Token): - """ - An empty token, will always match. - """ - def __init__( self ): - super(Empty,self).__init__() - self.name = "Empty" - self.mayReturnEmpty = True - self.mayIndexError = False - - -class NoMatch(Token): - """ - A token that will never match. - """ - def __init__( self ): - super(NoMatch,self).__init__() - self.name = "NoMatch" - self.mayReturnEmpty = True - self.mayIndexError = False - self.errmsg = "Unmatchable token" - - def parseImpl( self, instring, loc, doActions=True ): - raise ParseException(instring, loc, self.errmsg, self) - - -class Literal(Token): - """ - Token to exactly match a specified string. - - Example:: - Literal('blah').parseString('blah') # -> ['blah'] - Literal('blah').parseString('blahfooblah') # -> ['blah'] - Literal('blah').parseString('bla') # -> Exception: Expected "blah" - - For case-insensitive matching, use L{CaselessLiteral}. - - For keyword matching (force word break before and after the matched string), - use L{Keyword} or L{CaselessKeyword}. - """ - def __init__( self, matchString ): - super(Literal,self).__init__() - self.match = matchString - self.matchLen = len(matchString) - try: - self.firstMatchChar = matchString[0] - except IndexError: - warnings.warn("null string passed to Literal; use Empty() instead", - SyntaxWarning, stacklevel=2) - self.__class__ = Empty - self.name = '"%s"' % _ustr(self.match) - self.errmsg = "Expected " + self.name - self.mayReturnEmpty = False - self.mayIndexError = False - - # Performance tuning: this routine gets called a *lot* - # if this is a single character match string and the first character matches, - # short-circuit as quickly as possible, and avoid calling startswith - #~ @profile - def parseImpl( self, instring, loc, doActions=True ): - if (instring[loc] == self.firstMatchChar and - (self.matchLen==1 or instring.startswith(self.match,loc)) ): - return loc+self.matchLen, self.match - raise ParseException(instring, loc, self.errmsg, self) -_L = Literal -ParserElement._literalStringClass = Literal - -class Keyword(Token): - """ - Token to exactly match a specified string as a keyword, that is, it must be - immediately followed by a non-keyword character. Compare with C{L{Literal}}: - - C{Literal("if")} will match the leading C{'if'} in C{'ifAndOnlyIf'}. - - C{Keyword("if")} will not; it will only match the leading C{'if'} in C{'if x=1'}, or C{'if(y==2)'} - Accepts two optional constructor arguments in addition to the keyword string: - - C{identChars} is a string of characters that would be valid identifier characters, - defaulting to all alphanumerics + "_" and "$" - - C{caseless} allows case-insensitive matching, default is C{False}. - - Example:: - Keyword("start").parseString("start") # -> ['start'] - Keyword("start").parseString("starting") # -> Exception - - For case-insensitive matching, use L{CaselessKeyword}. - """ - DEFAULT_KEYWORD_CHARS = alphanums+"_$" - - def __init__( self, matchString, identChars=None, caseless=False ): - super(Keyword,self).__init__() - if identChars is None: - identChars = Keyword.DEFAULT_KEYWORD_CHARS - self.match = matchString - self.matchLen = len(matchString) - try: - self.firstMatchChar = matchString[0] - except IndexError: - warnings.warn("null string passed to Keyword; use Empty() instead", - SyntaxWarning, stacklevel=2) - self.name = '"%s"' % self.match - self.errmsg = "Expected " + self.name - self.mayReturnEmpty = False - self.mayIndexError = False - self.caseless = caseless - if caseless: - self.caselessmatch = matchString.upper() - identChars = identChars.upper() - self.identChars = set(identChars) - - def parseImpl( self, instring, loc, doActions=True ): - if self.caseless: - if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and - (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) and - (loc == 0 or instring[loc-1].upper() not in self.identChars) ): - return loc+self.matchLen, self.match - else: - if (instring[loc] == self.firstMatchChar and - (self.matchLen==1 or instring.startswith(self.match,loc)) and - (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen] not in self.identChars) and - (loc == 0 or instring[loc-1] not in self.identChars) ): - return loc+self.matchLen, self.match - raise ParseException(instring, loc, self.errmsg, self) - - def copy(self): - c = super(Keyword,self).copy() - c.identChars = Keyword.DEFAULT_KEYWORD_CHARS - return c - - @staticmethod - def setDefaultKeywordChars( chars ): - """Overrides the default Keyword chars - """ - Keyword.DEFAULT_KEYWORD_CHARS = chars - -class CaselessLiteral(Literal): - """ - Token to match a specified string, ignoring case of letters. - Note: the matched results will always be in the case of the given - match string, NOT the case of the input text. - - Example:: - OneOrMore(CaselessLiteral("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD', 'CMD'] - - (Contrast with example for L{CaselessKeyword}.) - """ - def __init__( self, matchString ): - super(CaselessLiteral,self).__init__( matchString.upper() ) - # Preserve the defining literal. - self.returnString = matchString - self.name = "'%s'" % self.returnString - self.errmsg = "Expected " + self.name - - def parseImpl( self, instring, loc, doActions=True ): - if instring[ loc:loc+self.matchLen ].upper() == self.match: - return loc+self.matchLen, self.returnString - raise ParseException(instring, loc, self.errmsg, self) - -class CaselessKeyword(Keyword): - """ - Caseless version of L{Keyword}. - - Example:: - OneOrMore(CaselessKeyword("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD'] - - (Contrast with example for L{CaselessLiteral}.) - """ - def __init__( self, matchString, identChars=None ): - super(CaselessKeyword,self).__init__( matchString, identChars, caseless=True ) - - def parseImpl( self, instring, loc, doActions=True ): - if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and - (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) ): - return loc+self.matchLen, self.match - raise ParseException(instring, loc, self.errmsg, self) - -class CloseMatch(Token): - """ - A variation on L{Literal} which matches "close" matches, that is, - strings with at most 'n' mismatching characters. C{CloseMatch} takes parameters: - - C{match_string} - string to be matched - - C{maxMismatches} - (C{default=1}) maximum number of mismatches allowed to count as a match - - The results from a successful parse will contain the matched text from the input string and the following named results: - - C{mismatches} - a list of the positions within the match_string where mismatches were found - - C{original} - the original match_string used to compare against the input string - - If C{mismatches} is an empty list, then the match was an exact match. - - Example:: - patt = CloseMatch("ATCATCGAATGGA") - patt.parseString("ATCATCGAAXGGA") # -> (['ATCATCGAAXGGA'], {'mismatches': [[9]], 'original': ['ATCATCGAATGGA']}) - patt.parseString("ATCAXCGAAXGGA") # -> Exception: Expected 'ATCATCGAATGGA' (with up to 1 mismatches) (at char 0), (line:1, col:1) - - # exact match - patt.parseString("ATCATCGAATGGA") # -> (['ATCATCGAATGGA'], {'mismatches': [[]], 'original': ['ATCATCGAATGGA']}) - - # close match allowing up to 2 mismatches - patt = CloseMatch("ATCATCGAATGGA", maxMismatches=2) - patt.parseString("ATCAXCGAAXGGA") # -> (['ATCAXCGAAXGGA'], {'mismatches': [[4, 9]], 'original': ['ATCATCGAATGGA']}) - """ - def __init__(self, match_string, maxMismatches=1): - super(CloseMatch,self).__init__() - self.name = match_string - self.match_string = match_string - self.maxMismatches = maxMismatches - self.errmsg = "Expected %r (with up to %d mismatches)" % (self.match_string, self.maxMismatches) - self.mayIndexError = False - self.mayReturnEmpty = False - - def parseImpl( self, instring, loc, doActions=True ): - start = loc - instrlen = len(instring) - maxloc = start + len(self.match_string) - - if maxloc <= instrlen: - match_string = self.match_string - match_stringloc = 0 - mismatches = [] - maxMismatches = self.maxMismatches - - for match_stringloc,s_m in enumerate(zip(instring[loc:maxloc], self.match_string)): - src,mat = s_m - if src != mat: - mismatches.append(match_stringloc) - if len(mismatches) > maxMismatches: - break - else: - loc = match_stringloc + 1 - results = ParseResults([instring[start:loc]]) - results['original'] = self.match_string - results['mismatches'] = mismatches - return loc, results - - raise ParseException(instring, loc, self.errmsg, self) - - -class Word(Token): - """ - Token for matching words composed of allowed character sets. - Defined with string containing all allowed initial characters, - an optional string containing allowed body characters (if omitted, - defaults to the initial character set), and an optional minimum, - maximum, and/or exact length. The default value for C{min} is 1 (a - minimum value < 1 is not valid); the default values for C{max} and C{exact} - are 0, meaning no maximum or exact length restriction. An optional - C{excludeChars} parameter can list characters that might be found in - the input C{bodyChars} string; useful to define a word of all printables - except for one or two characters, for instance. - - L{srange} is useful for defining custom character set strings for defining - C{Word} expressions, using range notation from regular expression character sets. - - A common mistake is to use C{Word} to match a specific literal string, as in - C{Word("Address")}. Remember that C{Word} uses the string argument to define - I{sets} of matchable characters. This expression would match "Add", "AAA", - "dAred", or any other word made up of the characters 'A', 'd', 'r', 'e', and 's'. - To match an exact literal string, use L{Literal} or L{Keyword}. - - pyparsing includes helper strings for building Words: - - L{alphas} - - L{nums} - - L{alphanums} - - L{hexnums} - - L{alphas8bit} (alphabetic characters in ASCII range 128-255 - accented, tilded, umlauted, etc.) - - L{punc8bit} (non-alphabetic characters in ASCII range 128-255 - currency, symbols, superscripts, diacriticals, etc.) - - L{printables} (any non-whitespace character) - - Example:: - # a word composed of digits - integer = Word(nums) # equivalent to Word("0123456789") or Word(srange("0-9")) - - # a word with a leading capital, and zero or more lowercase - capital_word = Word(alphas.upper(), alphas.lower()) - - # hostnames are alphanumeric, with leading alpha, and '-' - hostname = Word(alphas, alphanums+'-') - - # roman numeral (not a strict parser, accepts invalid mix of characters) - roman = Word("IVXLCDM") - - # any string of non-whitespace characters, except for ',' - csv_value = Word(printables, excludeChars=",") - """ - def __init__( self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False, excludeChars=None ): - super(Word,self).__init__() - if excludeChars: - initChars = ''.join(c for c in initChars if c not in excludeChars) - if bodyChars: - bodyChars = ''.join(c for c in bodyChars if c not in excludeChars) - self.initCharsOrig = initChars - self.initChars = set(initChars) - if bodyChars : - self.bodyCharsOrig = bodyChars - self.bodyChars = set(bodyChars) - else: - self.bodyCharsOrig = initChars - self.bodyChars = set(initChars) - - self.maxSpecified = max > 0 - - if min < 1: - raise ValueError("cannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permitted") - - self.minLen = min - - if max > 0: - self.maxLen = max - else: - self.maxLen = _MAX_INT - - if exact > 0: - self.maxLen = exact - self.minLen = exact - - self.name = _ustr(self) - self.errmsg = "Expected " + self.name - self.mayIndexError = False - self.asKeyword = asKeyword - - if ' ' not in self.initCharsOrig+self.bodyCharsOrig and (min==1 and max==0 and exact==0): - if self.bodyCharsOrig == self.initCharsOrig: - self.reString = "[%s]+" % _escapeRegexRangeChars(self.initCharsOrig) - elif len(self.initCharsOrig) == 1: - self.reString = "%s[%s]*" % \ - (re.escape(self.initCharsOrig), - _escapeRegexRangeChars(self.bodyCharsOrig),) - else: - self.reString = "[%s][%s]*" % \ - (_escapeRegexRangeChars(self.initCharsOrig), - _escapeRegexRangeChars(self.bodyCharsOrig),) - if self.asKeyword: - self.reString = r"\b"+self.reString+r"\b" - try: - self.re = re.compile( self.reString ) - except Exception: - self.re = None - - def parseImpl( self, instring, loc, doActions=True ): - if self.re: - result = self.re.match(instring,loc) - if not result: - raise ParseException(instring, loc, self.errmsg, self) - - loc = result.end() - return loc, result.group() - - if not(instring[ loc ] in self.initChars): - raise ParseException(instring, loc, self.errmsg, self) - - start = loc - loc += 1 - instrlen = len(instring) - bodychars = self.bodyChars - maxloc = start + self.maxLen - maxloc = min( maxloc, instrlen ) - while loc < maxloc and instring[loc] in bodychars: - loc += 1 - - throwException = False - if loc - start < self.minLen: - throwException = True - if self.maxSpecified and loc < instrlen and instring[loc] in bodychars: - throwException = True - if self.asKeyword: - if (start>0 and instring[start-1] in bodychars) or (loc<instrlen and instring[loc] in bodychars): - throwException = True - - if throwException: - raise ParseException(instring, loc, self.errmsg, self) - - return loc, instring[start:loc] - - def __str__( self ): - try: - return super(Word,self).__str__() - except Exception: - pass - - - if self.strRepr is None: - - def charsAsStr(s): - if len(s)>4: - return s[:4]+"..." - else: - return s - - if ( self.initCharsOrig != self.bodyCharsOrig ): - self.strRepr = "W:(%s,%s)" % ( charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig) ) - else: - self.strRepr = "W:(%s)" % charsAsStr(self.initCharsOrig) - - return self.strRepr - - -class Regex(Token): - r""" - Token for matching strings that match a given regular expression. - Defined with string specifying the regular expression in a form recognized by the inbuilt Python re module. - If the given regex contains named groups (defined using C{(?P<name>...)}), these will be preserved as - named parse results. - - Example:: - realnum = Regex(r"[+-]?\d+\.\d*") - date = Regex(r'(?P<year>\d{4})-(?P<month>\d\d?)-(?P<day>\d\d?)') - # ref: http://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expression - roman = Regex(r"M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})") - """ - compiledREtype = type(re.compile("[A-Z]")) - def __init__( self, pattern, flags=0): - """The parameters C{pattern} and C{flags} are passed to the C{re.compile()} function as-is. See the Python C{re} module for an explanation of the acceptable patterns and flags.""" - super(Regex,self).__init__() - - if isinstance(pattern, basestring): - if not pattern: - warnings.warn("null string passed to Regex; use Empty() instead", - SyntaxWarning, stacklevel=2) - - self.pattern = pattern - self.flags = flags - - try: - self.re = re.compile(self.pattern, self.flags) - self.reString = self.pattern - except sre_constants.error: - warnings.warn("invalid pattern (%s) passed to Regex" % pattern, - SyntaxWarning, stacklevel=2) - raise - - elif isinstance(pattern, Regex.compiledREtype): - self.re = pattern - self.pattern = \ - self.reString = str(pattern) - self.flags = flags - - else: - raise ValueError("Regex may only be constructed with a string or a compiled RE object") - - self.name = _ustr(self) - self.errmsg = "Expected " + self.name - self.mayIndexError = False - self.mayReturnEmpty = True - - def parseImpl( self, instring, loc, doActions=True ): - result = self.re.match(instring,loc) - if not result: - raise ParseException(instring, loc, self.errmsg, self) - - loc = result.end() - d = result.groupdict() - ret = ParseResults(result.group()) - if d: - for k in d: - ret[k] = d[k] - return loc,ret - - def __str__( self ): - try: - return super(Regex,self).__str__() - except Exception: - pass - - if self.strRepr is None: - self.strRepr = "Re:(%s)" % repr(self.pattern) - - return self.strRepr - - -class QuotedString(Token): - r""" - Token for matching strings that are delimited by quoting characters. - - Defined with the following parameters: - - quoteChar - string of one or more characters defining the quote delimiting string - - escChar - character to escape quotes, typically backslash (default=C{None}) - - escQuote - special quote sequence to escape an embedded quote string (such as SQL's "" to escape an embedded ") (default=C{None}) - - multiline - boolean indicating whether quotes can span multiple lines (default=C{False}) - - unquoteResults - boolean indicating whether the matched text should be unquoted (default=C{True}) - - endQuoteChar - string of one or more characters defining the end of the quote delimited string (default=C{None} => same as quoteChar) - - convertWhitespaceEscapes - convert escaped whitespace (C{'\t'}, C{'\n'}, etc.) to actual whitespace (default=C{True}) - - Example:: - qs = QuotedString('"') - print(qs.searchString('lsjdf "This is the quote" sldjf')) - complex_qs = QuotedString('{{', endQuoteChar='}}') - print(complex_qs.searchString('lsjdf {{This is the "quote"}} sldjf')) - sql_qs = QuotedString('"', escQuote='""') - print(sql_qs.searchString('lsjdf "This is the quote with ""embedded"" quotes" sldjf')) - prints:: - [['This is the quote']] - [['This is the "quote"']] - [['This is the quote with "embedded" quotes']] - """ - def __init__( self, quoteChar, escChar=None, escQuote=None, multiline=False, unquoteResults=True, endQuoteChar=None, convertWhitespaceEscapes=True): - super(QuotedString,self).__init__() - - # remove white space from quote chars - wont work anyway - quoteChar = quoteChar.strip() - if not quoteChar: - warnings.warn("quoteChar cannot be the empty string",SyntaxWarning,stacklevel=2) - raise SyntaxError() - - if endQuoteChar is None: - endQuoteChar = quoteChar - else: - endQuoteChar = endQuoteChar.strip() - if not endQuoteChar: - warnings.warn("endQuoteChar cannot be the empty string",SyntaxWarning,stacklevel=2) - raise SyntaxError() - - self.quoteChar = quoteChar - self.quoteCharLen = len(quoteChar) - self.firstQuoteChar = quoteChar[0] - self.endQuoteChar = endQuoteChar - self.endQuoteCharLen = len(endQuoteChar) - self.escChar = escChar - self.escQuote = escQuote - self.unquoteResults = unquoteResults - self.convertWhitespaceEscapes = convertWhitespaceEscapes - - if multiline: - self.flags = re.MULTILINE | re.DOTALL - self.pattern = r'%s(?:[^%s%s]' % \ - ( re.escape(self.quoteChar), - _escapeRegexRangeChars(self.endQuoteChar[0]), - (escChar is not None and _escapeRegexRangeChars(escChar) or '') ) - else: - self.flags = 0 - self.pattern = r'%s(?:[^%s\n\r%s]' % \ - ( re.escape(self.quoteChar), - _escapeRegexRangeChars(self.endQuoteChar[0]), - (escChar is not None and _escapeRegexRangeChars(escChar) or '') ) - if len(self.endQuoteChar) > 1: - self.pattern += ( - '|(?:' + ')|(?:'.join("%s[^%s]" % (re.escape(self.endQuoteChar[:i]), - _escapeRegexRangeChars(self.endQuoteChar[i])) - for i in range(len(self.endQuoteChar)-1,0,-1)) + ')' - ) - if escQuote: - self.pattern += (r'|(?:%s)' % re.escape(escQuote)) - if escChar: - self.pattern += (r'|(?:%s.)' % re.escape(escChar)) - self.escCharReplacePattern = re.escape(self.escChar)+"(.)" - self.pattern += (r')*%s' % re.escape(self.endQuoteChar)) - - try: - self.re = re.compile(self.pattern, self.flags) - self.reString = self.pattern - except sre_constants.error: - warnings.warn("invalid pattern (%s) passed to Regex" % self.pattern, - SyntaxWarning, stacklevel=2) - raise - - self.name = _ustr(self) - self.errmsg = "Expected " + self.name - self.mayIndexError = False - self.mayReturnEmpty = True - - def parseImpl( self, instring, loc, doActions=True ): - result = instring[loc] == self.firstQuoteChar and self.re.match(instring,loc) or None - if not result: - raise ParseException(instring, loc, self.errmsg, self) - - loc = result.end() - ret = result.group() - - if self.unquoteResults: - - # strip off quotes - ret = ret[self.quoteCharLen:-self.endQuoteCharLen] - - if isinstance(ret,basestring): - # replace escaped whitespace - if '\\' in ret and self.convertWhitespaceEscapes: - ws_map = { - r'\t' : '\t', - r'\n' : '\n', - r'\f' : '\f', - r'\r' : '\r', - } - for wslit,wschar in ws_map.items(): - ret = ret.replace(wslit, wschar) - - # replace escaped characters - if self.escChar: - ret = re.sub(self.escCharReplacePattern, r"\g<1>", ret) - - # replace escaped quotes - if self.escQuote: - ret = ret.replace(self.escQuote, self.endQuoteChar) - - return loc, ret - - def __str__( self ): - try: - return super(QuotedString,self).__str__() - except Exception: - pass - - if self.strRepr is None: - self.strRepr = "quoted string, starting with %s ending with %s" % (self.quoteChar, self.endQuoteChar) - - return self.strRepr - - -class CharsNotIn(Token): - """ - Token for matching words composed of characters I{not} in a given set (will - include whitespace in matched characters if not listed in the provided exclusion set - see example). - Defined with string containing all disallowed characters, and an optional - minimum, maximum, and/or exact length. The default value for C{min} is 1 (a - minimum value < 1 is not valid); the default values for C{max} and C{exact} - are 0, meaning no maximum or exact length restriction. - - Example:: - # define a comma-separated-value as anything that is not a ',' - csv_value = CharsNotIn(',') - print(delimitedList(csv_value).parseString("dkls,lsdkjf,s12 34,@!#,213")) - prints:: - ['dkls', 'lsdkjf', 's12 34', '@!#', '213'] - """ - def __init__( self, notChars, min=1, max=0, exact=0 ): - super(CharsNotIn,self).__init__() - self.skipWhitespace = False - self.notChars = notChars - - if min < 1: - raise ValueError("cannot specify a minimum length < 1; use Optional(CharsNotIn()) if zero-length char group is permitted") - - self.minLen = min - - if max > 0: - self.maxLen = max - else: - self.maxLen = _MAX_INT - - if exact > 0: - self.maxLen = exact - self.minLen = exact - - self.name = _ustr(self) - self.errmsg = "Expected " + self.name - self.mayReturnEmpty = ( self.minLen == 0 ) - self.mayIndexError = False - - def parseImpl( self, instring, loc, doActions=True ): - if instring[loc] in self.notChars: - raise ParseException(instring, loc, self.errmsg, self) - - start = loc - loc += 1 - notchars = self.notChars - maxlen = min( start+self.maxLen, len(instring) ) - while loc < maxlen and \ - (instring[loc] not in notchars): - loc += 1 - - if loc - start < self.minLen: - raise ParseException(instring, loc, self.errmsg, self) - - return loc, instring[start:loc] - - def __str__( self ): - try: - return super(CharsNotIn, self).__str__() - except Exception: - pass - - if self.strRepr is None: - if len(self.notChars) > 4: - self.strRepr = "!W:(%s...)" % self.notChars[:4] - else: - self.strRepr = "!W:(%s)" % self.notChars - - return self.strRepr - -class White(Token): - """ - Special matching class for matching whitespace. Normally, whitespace is ignored - by pyparsing grammars. This class is included when some whitespace structures - are significant. Define with a string containing the whitespace characters to be - matched; default is C{" \\t\\r\\n"}. Also takes optional C{min}, C{max}, and C{exact} arguments, - as defined for the C{L{Word}} class. - """ - whiteStrs = { - " " : "<SPC>", - "\t": "<TAB>", - "\n": "<LF>", - "\r": "<CR>", - "\f": "<FF>", - } - def __init__(self, ws=" \t\r\n", min=1, max=0, exact=0): - super(White,self).__init__() - self.matchWhite = ws - self.setWhitespaceChars( "".join(c for c in self.whiteChars if c not in self.matchWhite) ) - #~ self.leaveWhitespace() - self.name = ("".join(White.whiteStrs[c] for c in self.matchWhite)) - self.mayReturnEmpty = True - self.errmsg = "Expected " + self.name - - self.minLen = min - - if max > 0: - self.maxLen = max - else: - self.maxLen = _MAX_INT - - if exact > 0: - self.maxLen = exact - self.minLen = exact - - def parseImpl( self, instring, loc, doActions=True ): - if not(instring[ loc ] in self.matchWhite): - raise ParseException(instring, loc, self.errmsg, self) - start = loc - loc += 1 - maxloc = start + self.maxLen - maxloc = min( maxloc, len(instring) ) - while loc < maxloc and instring[loc] in self.matchWhite: - loc += 1 - - if loc - start < self.minLen: - raise ParseException(instring, loc, self.errmsg, self) - - return loc, instring[start:loc] - - -class _PositionToken(Token): - def __init__( self ): - super(_PositionToken,self).__init__() - self.name=self.__class__.__name__ - self.mayReturnEmpty = True - self.mayIndexError = False - -class GoToColumn(_PositionToken): - """ - Token to advance to a specific column of input text; useful for tabular report scraping. - """ - def __init__( self, colno ): - super(GoToColumn,self).__init__() - self.col = colno - - def preParse( self, instring, loc ): - if col(loc,instring) != self.col: - instrlen = len(instring) - if self.ignoreExprs: - loc = self._skipIgnorables( instring, loc ) - while loc < instrlen and instring[loc].isspace() and col( loc, instring ) != self.col : - loc += 1 - return loc - - def parseImpl( self, instring, loc, doActions=True ): - thiscol = col( loc, instring ) - if thiscol > self.col: - raise ParseException( instring, loc, "Text not in expected column", self ) - newloc = loc + self.col - thiscol - ret = instring[ loc: newloc ] - return newloc, ret - - -class LineStart(_PositionToken): - """ - Matches if current position is at the beginning of a line within the parse string - - Example:: - - test = '''\ - AAA this line - AAA and this line - AAA but not this one - B AAA and definitely not this one - ''' - - for t in (LineStart() + 'AAA' + restOfLine).searchString(test): - print(t) - - Prints:: - ['AAA', ' this line'] - ['AAA', ' and this line'] - - """ - def __init__( self ): - super(LineStart,self).__init__() - self.errmsg = "Expected start of line" - - def parseImpl( self, instring, loc, doActions=True ): - if col(loc, instring) == 1: - return loc, [] - raise ParseException(instring, loc, self.errmsg, self) - -class LineEnd(_PositionToken): - """ - Matches if current position is at the end of a line within the parse string - """ - def __init__( self ): - super(LineEnd,self).__init__() - self.setWhitespaceChars( ParserElement.DEFAULT_WHITE_CHARS.replace("\n","") ) - self.errmsg = "Expected end of line" - - def parseImpl( self, instring, loc, doActions=True ): - if loc<len(instring): - if instring[loc] == "\n": - return loc+1, "\n" - else: - raise ParseException(instring, loc, self.errmsg, self) - elif loc == len(instring): - return loc+1, [] - else: - raise ParseException(instring, loc, self.errmsg, self) - -class StringStart(_PositionToken): - """ - Matches if current position is at the beginning of the parse string - """ - def __init__( self ): - super(StringStart,self).__init__() - self.errmsg = "Expected start of text" - - def parseImpl( self, instring, loc, doActions=True ): - if loc != 0: - # see if entire string up to here is just whitespace and ignoreables - if loc != self.preParse( instring, 0 ): - raise ParseException(instring, loc, self.errmsg, self) - return loc, [] - -class StringEnd(_PositionToken): - """ - Matches if current position is at the end of the parse string - """ - def __init__( self ): - super(StringEnd,self).__init__() - self.errmsg = "Expected end of text" - - def parseImpl( self, instring, loc, doActions=True ): - if loc < len(instring): - raise ParseException(instring, loc, self.errmsg, self) - elif loc == len(instring): - return loc+1, [] - elif loc > len(instring): - return loc, [] - else: - raise ParseException(instring, loc, self.errmsg, self) - -class WordStart(_PositionToken): - """ - Matches if the current position is at the beginning of a Word, and - is not preceded by any character in a given set of C{wordChars} - (default=C{printables}). To emulate the C{\b} behavior of regular expressions, - use C{WordStart(alphanums)}. C{WordStart} will also match at the beginning of - the string being parsed, or at the beginning of a line. - """ - def __init__(self, wordChars = printables): - super(WordStart,self).__init__() - self.wordChars = set(wordChars) - self.errmsg = "Not at the start of a word" - - def parseImpl(self, instring, loc, doActions=True ): - if loc != 0: - if (instring[loc-1] in self.wordChars or - instring[loc] not in self.wordChars): - raise ParseException(instring, loc, self.errmsg, self) - return loc, [] - -class WordEnd(_PositionToken): - """ - Matches if the current position is at the end of a Word, and - is not followed by any character in a given set of C{wordChars} - (default=C{printables}). To emulate the C{\b} behavior of regular expressions, - use C{WordEnd(alphanums)}. C{WordEnd} will also match at the end of - the string being parsed, or at the end of a line. - """ - def __init__(self, wordChars = printables): - super(WordEnd,self).__init__() - self.wordChars = set(wordChars) - self.skipWhitespace = False - self.errmsg = "Not at the end of a word" - - def parseImpl(self, instring, loc, doActions=True ): - instrlen = len(instring) - if instrlen>0 and loc<instrlen: - if (instring[loc] in self.wordChars or - instring[loc-1] not in self.wordChars): - raise ParseException(instring, loc, self.errmsg, self) - return loc, [] - - -class ParseExpression(ParserElement): - """ - Abstract subclass of ParserElement, for combining and post-processing parsed tokens. - """ - def __init__( self, exprs, savelist = False ): - super(ParseExpression,self).__init__(savelist) - if isinstance( exprs, _generatorType ): - exprs = list(exprs) - - if isinstance( exprs, basestring ): - self.exprs = [ ParserElement._literalStringClass( exprs ) ] - elif isinstance( exprs, Iterable ): - exprs = list(exprs) - # if sequence of strings provided, wrap with Literal - if all(isinstance(expr, basestring) for expr in exprs): - exprs = map(ParserElement._literalStringClass, exprs) - self.exprs = list(exprs) - else: - try: - self.exprs = list( exprs ) - except TypeError: - self.exprs = [ exprs ] - self.callPreparse = False - - def __getitem__( self, i ): - return self.exprs[i] - - def append( self, other ): - self.exprs.append( other ) - self.strRepr = None - return self - - def leaveWhitespace( self ): - """Extends C{leaveWhitespace} defined in base class, and also invokes C{leaveWhitespace} on - all contained expressions.""" - self.skipWhitespace = False - self.exprs = [ e.copy() for e in self.exprs ] - for e in self.exprs: - e.leaveWhitespace() - return self - - def ignore( self, other ): - if isinstance( other, Suppress ): - if other not in self.ignoreExprs: - super( ParseExpression, self).ignore( other ) - for e in self.exprs: - e.ignore( self.ignoreExprs[-1] ) - else: - super( ParseExpression, self).ignore( other ) - for e in self.exprs: - e.ignore( self.ignoreExprs[-1] ) - return self - - def __str__( self ): - try: - return super(ParseExpression,self).__str__() - except Exception: - pass - - if self.strRepr is None: - self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.exprs) ) - return self.strRepr - - def streamline( self ): - super(ParseExpression,self).streamline() - - for e in self.exprs: - e.streamline() - - # collapse nested And's of the form And( And( And( a,b), c), d) to And( a,b,c,d ) - # but only if there are no parse actions or resultsNames on the nested And's - # (likewise for Or's and MatchFirst's) - if ( len(self.exprs) == 2 ): - other = self.exprs[0] - if ( isinstance( other, self.__class__ ) and - not(other.parseAction) and - other.resultsName is None and - not other.debug ): - self.exprs = other.exprs[:] + [ self.exprs[1] ] - self.strRepr = None - self.mayReturnEmpty |= other.mayReturnEmpty - self.mayIndexError |= other.mayIndexError - - other = self.exprs[-1] - if ( isinstance( other, self.__class__ ) and - not(other.parseAction) and - other.resultsName is None and - not other.debug ): - self.exprs = self.exprs[:-1] + other.exprs[:] - self.strRepr = None - self.mayReturnEmpty |= other.mayReturnEmpty - self.mayIndexError |= other.mayIndexError - - self.errmsg = "Expected " + _ustr(self) - - return self - - def setResultsName( self, name, listAllMatches=False ): - ret = super(ParseExpression,self).setResultsName(name,listAllMatches) - return ret - - def validate( self, validateTrace=[] ): - tmp = validateTrace[:]+[self] - for e in self.exprs: - e.validate(tmp) - self.checkRecursion( [] ) - - def copy(self): - ret = super(ParseExpression,self).copy() - ret.exprs = [e.copy() for e in self.exprs] - return ret - -class And(ParseExpression): - """ - Requires all given C{ParseExpression}s to be found in the given order. - Expressions may be separated by whitespace. - May be constructed using the C{'+'} operator. - May also be constructed using the C{'-'} operator, which will suppress backtracking. - - Example:: - integer = Word(nums) - name_expr = OneOrMore(Word(alphas)) - - expr = And([integer("id"),name_expr("name"),integer("age")]) - # more easily written as: - expr = integer("id") + name_expr("name") + integer("age") - """ - - class _ErrorStop(Empty): - def __init__(self, *args, **kwargs): - super(And._ErrorStop,self).__init__(*args, **kwargs) - self.name = '-' - self.leaveWhitespace() - - def __init__( self, exprs, savelist = True ): - super(And,self).__init__(exprs, savelist) - self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) - self.setWhitespaceChars( self.exprs[0].whiteChars ) - self.skipWhitespace = self.exprs[0].skipWhitespace - self.callPreparse = True - - def parseImpl( self, instring, loc, doActions=True ): - # pass False as last arg to _parse for first element, since we already - # pre-parsed the string as part of our And pre-parsing - loc, resultlist = self.exprs[0]._parse( instring, loc, doActions, callPreParse=False ) - errorStop = False - for e in self.exprs[1:]: - if isinstance(e, And._ErrorStop): - errorStop = True - continue - if errorStop: - try: - loc, exprtokens = e._parse( instring, loc, doActions ) - except ParseSyntaxException: - raise - except ParseBaseException as pe: - pe.__traceback__ = None - raise ParseSyntaxException._from_exception(pe) - except IndexError: - raise ParseSyntaxException(instring, len(instring), self.errmsg, self) - else: - loc, exprtokens = e._parse( instring, loc, doActions ) - if exprtokens or exprtokens.haskeys(): - resultlist += exprtokens - return loc, resultlist - - def __iadd__(self, other ): - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - return self.append( other ) #And( [ self, other ] ) - - def checkRecursion( self, parseElementList ): - subRecCheckList = parseElementList[:] + [ self ] - for e in self.exprs: - e.checkRecursion( subRecCheckList ) - if not e.mayReturnEmpty: - break - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "{" + " ".join(_ustr(e) for e in self.exprs) + "}" - - return self.strRepr - - -class Or(ParseExpression): - """ - Requires that at least one C{ParseExpression} is found. - If two expressions match, the expression that matches the longest string will be used. - May be constructed using the C{'^'} operator. - - Example:: - # construct Or using '^' operator - - number = Word(nums) ^ Combine(Word(nums) + '.' + Word(nums)) - print(number.searchString("123 3.1416 789")) - prints:: - [['123'], ['3.1416'], ['789']] - """ - def __init__( self, exprs, savelist = False ): - super(Or,self).__init__(exprs, savelist) - if self.exprs: - self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs) - else: - self.mayReturnEmpty = True - - def parseImpl( self, instring, loc, doActions=True ): - maxExcLoc = -1 - maxException = None - matches = [] - for e in self.exprs: - try: - loc2 = e.tryParse( instring, loc ) - except ParseException as err: - err.__traceback__ = None - if err.loc > maxExcLoc: - maxException = err - maxExcLoc = err.loc - except IndexError: - if len(instring) > maxExcLoc: - maxException = ParseException(instring,len(instring),e.errmsg,self) - maxExcLoc = len(instring) - else: - # save match among all matches, to retry longest to shortest - matches.append((loc2, e)) - - if matches: - matches.sort(key=lambda x: -x[0]) - for _,e in matches: - try: - return e._parse( instring, loc, doActions ) - except ParseException as err: - err.__traceback__ = None - if err.loc > maxExcLoc: - maxException = err - maxExcLoc = err.loc - - if maxException is not None: - maxException.msg = self.errmsg - raise maxException - else: - raise ParseException(instring, loc, "no defined alternatives to match", self) - - - def __ixor__(self, other ): - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - return self.append( other ) #Or( [ self, other ] ) - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "{" + " ^ ".join(_ustr(e) for e in self.exprs) + "}" - - return self.strRepr - - def checkRecursion( self, parseElementList ): - subRecCheckList = parseElementList[:] + [ self ] - for e in self.exprs: - e.checkRecursion( subRecCheckList ) - - -class MatchFirst(ParseExpression): - """ - Requires that at least one C{ParseExpression} is found. - If two expressions match, the first one listed is the one that will match. - May be constructed using the C{'|'} operator. - - Example:: - # construct MatchFirst using '|' operator - - # watch the order of expressions to match - number = Word(nums) | Combine(Word(nums) + '.' + Word(nums)) - print(number.searchString("123 3.1416 789")) # Fail! -> [['123'], ['3'], ['1416'], ['789']] - - # put more selective expression first - number = Combine(Word(nums) + '.' + Word(nums)) | Word(nums) - print(number.searchString("123 3.1416 789")) # Better -> [['123'], ['3.1416'], ['789']] - """ - def __init__( self, exprs, savelist = False ): - super(MatchFirst,self).__init__(exprs, savelist) - if self.exprs: - self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs) - else: - self.mayReturnEmpty = True - - def parseImpl( self, instring, loc, doActions=True ): - maxExcLoc = -1 - maxException = None - for e in self.exprs: - try: - ret = e._parse( instring, loc, doActions ) - return ret - except ParseException as err: - if err.loc > maxExcLoc: - maxException = err - maxExcLoc = err.loc - except IndexError: - if len(instring) > maxExcLoc: - maxException = ParseException(instring,len(instring),e.errmsg,self) - maxExcLoc = len(instring) - - # only got here if no expression matched, raise exception for match that made it the furthest - else: - if maxException is not None: - maxException.msg = self.errmsg - raise maxException - else: - raise ParseException(instring, loc, "no defined alternatives to match", self) - - def __ior__(self, other ): - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - return self.append( other ) #MatchFirst( [ self, other ] ) - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "{" + " | ".join(_ustr(e) for e in self.exprs) + "}" - - return self.strRepr - - def checkRecursion( self, parseElementList ): - subRecCheckList = parseElementList[:] + [ self ] - for e in self.exprs: - e.checkRecursion( subRecCheckList ) - - -class Each(ParseExpression): - """ - Requires all given C{ParseExpression}s to be found, but in any order. - Expressions may be separated by whitespace. - May be constructed using the C{'&'} operator. - - Example:: - color = oneOf("RED ORANGE YELLOW GREEN BLUE PURPLE BLACK WHITE BROWN") - shape_type = oneOf("SQUARE CIRCLE TRIANGLE STAR HEXAGON OCTAGON") - integer = Word(nums) - shape_attr = "shape:" + shape_type("shape") - posn_attr = "posn:" + Group(integer("x") + ',' + integer("y"))("posn") - color_attr = "color:" + color("color") - size_attr = "size:" + integer("size") - - # use Each (using operator '&') to accept attributes in any order - # (shape and posn are required, color and size are optional) - shape_spec = shape_attr & posn_attr & Optional(color_attr) & Optional(size_attr) - - shape_spec.runTests(''' - shape: SQUARE color: BLACK posn: 100, 120 - shape: CIRCLE size: 50 color: BLUE posn: 50,80 - color:GREEN size:20 shape:TRIANGLE posn:20,40 - ''' - ) - prints:: - shape: SQUARE color: BLACK posn: 100, 120 - ['shape:', 'SQUARE', 'color:', 'BLACK', 'posn:', ['100', ',', '120']] - - color: BLACK - - posn: ['100', ',', '120'] - - x: 100 - - y: 120 - - shape: SQUARE - - - shape: CIRCLE size: 50 color: BLUE posn: 50,80 - ['shape:', 'CIRCLE', 'size:', '50', 'color:', 'BLUE', 'posn:', ['50', ',', '80']] - - color: BLUE - - posn: ['50', ',', '80'] - - x: 50 - - y: 80 - - shape: CIRCLE - - size: 50 - - - color: GREEN size: 20 shape: TRIANGLE posn: 20,40 - ['color:', 'GREEN', 'size:', '20', 'shape:', 'TRIANGLE', 'posn:', ['20', ',', '40']] - - color: GREEN - - posn: ['20', ',', '40'] - - x: 20 - - y: 40 - - shape: TRIANGLE - - size: 20 - """ - def __init__( self, exprs, savelist = True ): - super(Each,self).__init__(exprs, savelist) - self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) - self.skipWhitespace = True - self.initExprGroups = True - - def parseImpl( self, instring, loc, doActions=True ): - if self.initExprGroups: - self.opt1map = dict((id(e.expr),e) for e in self.exprs if isinstance(e,Optional)) - opt1 = [ e.expr for e in self.exprs if isinstance(e,Optional) ] - opt2 = [ e for e in self.exprs if e.mayReturnEmpty and not isinstance(e,Optional)] - self.optionals = opt1 + opt2 - self.multioptionals = [ e.expr for e in self.exprs if isinstance(e,ZeroOrMore) ] - self.multirequired = [ e.expr for e in self.exprs if isinstance(e,OneOrMore) ] - self.required = [ e for e in self.exprs if not isinstance(e,(Optional,ZeroOrMore,OneOrMore)) ] - self.required += self.multirequired - self.initExprGroups = False - tmpLoc = loc - tmpReqd = self.required[:] - tmpOpt = self.optionals[:] - matchOrder = [] - - keepMatching = True - while keepMatching: - tmpExprs = tmpReqd + tmpOpt + self.multioptionals + self.multirequired - failed = [] - for e in tmpExprs: - try: - tmpLoc = e.tryParse( instring, tmpLoc ) - except ParseException: - failed.append(e) - else: - matchOrder.append(self.opt1map.get(id(e),e)) - if e in tmpReqd: - tmpReqd.remove(e) - elif e in tmpOpt: - tmpOpt.remove(e) - if len(failed) == len(tmpExprs): - keepMatching = False - - if tmpReqd: - missing = ", ".join(_ustr(e) for e in tmpReqd) - raise ParseException(instring,loc,"Missing one or more required elements (%s)" % missing ) - - # add any unmatched Optionals, in case they have default values defined - matchOrder += [e for e in self.exprs if isinstance(e,Optional) and e.expr in tmpOpt] - - resultlist = [] - for e in matchOrder: - loc,results = e._parse(instring,loc,doActions) - resultlist.append(results) - - finalResults = sum(resultlist, ParseResults([])) - return loc, finalResults - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "{" + " & ".join(_ustr(e) for e in self.exprs) + "}" - - return self.strRepr - - def checkRecursion( self, parseElementList ): - subRecCheckList = parseElementList[:] + [ self ] - for e in self.exprs: - e.checkRecursion( subRecCheckList ) - - -class ParseElementEnhance(ParserElement): - """ - Abstract subclass of C{ParserElement}, for combining and post-processing parsed tokens. - """ - def __init__( self, expr, savelist=False ): - super(ParseElementEnhance,self).__init__(savelist) - if isinstance( expr, basestring ): - if issubclass(ParserElement._literalStringClass, Token): - expr = ParserElement._literalStringClass(expr) - else: - expr = ParserElement._literalStringClass(Literal(expr)) - self.expr = expr - self.strRepr = None - if expr is not None: - self.mayIndexError = expr.mayIndexError - self.mayReturnEmpty = expr.mayReturnEmpty - self.setWhitespaceChars( expr.whiteChars ) - self.skipWhitespace = expr.skipWhitespace - self.saveAsList = expr.saveAsList - self.callPreparse = expr.callPreparse - self.ignoreExprs.extend(expr.ignoreExprs) - - def parseImpl( self, instring, loc, doActions=True ): - if self.expr is not None: - return self.expr._parse( instring, loc, doActions, callPreParse=False ) - else: - raise ParseException("",loc,self.errmsg,self) - - def leaveWhitespace( self ): - self.skipWhitespace = False - self.expr = self.expr.copy() - if self.expr is not None: - self.expr.leaveWhitespace() - return self - - def ignore( self, other ): - if isinstance( other, Suppress ): - if other not in self.ignoreExprs: - super( ParseElementEnhance, self).ignore( other ) - if self.expr is not None: - self.expr.ignore( self.ignoreExprs[-1] ) - else: - super( ParseElementEnhance, self).ignore( other ) - if self.expr is not None: - self.expr.ignore( self.ignoreExprs[-1] ) - return self - - def streamline( self ): - super(ParseElementEnhance,self).streamline() - if self.expr is not None: - self.expr.streamline() - return self - - def checkRecursion( self, parseElementList ): - if self in parseElementList: - raise RecursiveGrammarException( parseElementList+[self] ) - subRecCheckList = parseElementList[:] + [ self ] - if self.expr is not None: - self.expr.checkRecursion( subRecCheckList ) - - def validate( self, validateTrace=[] ): - tmp = validateTrace[:]+[self] - if self.expr is not None: - self.expr.validate(tmp) - self.checkRecursion( [] ) - - def __str__( self ): - try: - return super(ParseElementEnhance,self).__str__() - except Exception: - pass - - if self.strRepr is None and self.expr is not None: - self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.expr) ) - return self.strRepr - - -class FollowedBy(ParseElementEnhance): - """ - Lookahead matching of the given parse expression. C{FollowedBy} - does I{not} advance the parsing position within the input string, it only - verifies that the specified parse expression matches at the current - position. C{FollowedBy} always returns a null token list. - - Example:: - # use FollowedBy to match a label only if it is followed by a ':' - data_word = Word(alphas) - label = data_word + FollowedBy(':') - attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) - - OneOrMore(attr_expr).parseString("shape: SQUARE color: BLACK posn: upper left").pprint() - prints:: - [['shape', 'SQUARE'], ['color', 'BLACK'], ['posn', 'upper left']] - """ - def __init__( self, expr ): - super(FollowedBy,self).__init__(expr) - self.mayReturnEmpty = True - - def parseImpl( self, instring, loc, doActions=True ): - self.expr.tryParse( instring, loc ) - return loc, [] - - -class NotAny(ParseElementEnhance): - """ - Lookahead to disallow matching with the given parse expression. C{NotAny} - does I{not} advance the parsing position within the input string, it only - verifies that the specified parse expression does I{not} match at the current - position. Also, C{NotAny} does I{not} skip over leading whitespace. C{NotAny} - always returns a null token list. May be constructed using the '~' operator. - - Example:: - - """ - def __init__( self, expr ): - super(NotAny,self).__init__(expr) - #~ self.leaveWhitespace() - self.skipWhitespace = False # do NOT use self.leaveWhitespace(), don't want to propagate to exprs - self.mayReturnEmpty = True - self.errmsg = "Found unwanted token, "+_ustr(self.expr) - - def parseImpl( self, instring, loc, doActions=True ): - if self.expr.canParseNext(instring, loc): - raise ParseException(instring, loc, self.errmsg, self) - return loc, [] - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "~{" + _ustr(self.expr) + "}" - - return self.strRepr - -class _MultipleMatch(ParseElementEnhance): - def __init__( self, expr, stopOn=None): - super(_MultipleMatch, self).__init__(expr) - self.saveAsList = True - ender = stopOn - if isinstance(ender, basestring): - ender = ParserElement._literalStringClass(ender) - self.not_ender = ~ender if ender is not None else None - - def parseImpl( self, instring, loc, doActions=True ): - self_expr_parse = self.expr._parse - self_skip_ignorables = self._skipIgnorables - check_ender = self.not_ender is not None - if check_ender: - try_not_ender = self.not_ender.tryParse - - # must be at least one (but first see if we are the stopOn sentinel; - # if so, fail) - if check_ender: - try_not_ender(instring, loc) - loc, tokens = self_expr_parse( instring, loc, doActions, callPreParse=False ) - try: - hasIgnoreExprs = (not not self.ignoreExprs) - while 1: - if check_ender: - try_not_ender(instring, loc) - if hasIgnoreExprs: - preloc = self_skip_ignorables( instring, loc ) - else: - preloc = loc - loc, tmptokens = self_expr_parse( instring, preloc, doActions ) - if tmptokens or tmptokens.haskeys(): - tokens += tmptokens - except (ParseException,IndexError): - pass - - return loc, tokens - -class OneOrMore(_MultipleMatch): - """ - Repetition of one or more of the given expression. - - Parameters: - - expr - expression that must match one or more times - - stopOn - (default=C{None}) - expression for a terminating sentinel - (only required if the sentinel would ordinarily match the repetition - expression) - - Example:: - data_word = Word(alphas) - label = data_word + FollowedBy(':') - attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join)) - - text = "shape: SQUARE posn: upper left color: BLACK" - OneOrMore(attr_expr).parseString(text).pprint() # Fail! read 'color' as data instead of next label -> [['shape', 'SQUARE color']] - - # use stopOn attribute for OneOrMore to avoid reading label string as part of the data - attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) - OneOrMore(attr_expr).parseString(text).pprint() # Better -> [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'BLACK']] - - # could also be written as - (attr_expr * (1,)).parseString(text).pprint() - """ - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "{" + _ustr(self.expr) + "}..." - - return self.strRepr - -class ZeroOrMore(_MultipleMatch): - """ - Optional repetition of zero or more of the given expression. - - Parameters: - - expr - expression that must match zero or more times - - stopOn - (default=C{None}) - expression for a terminating sentinel - (only required if the sentinel would ordinarily match the repetition - expression) - - Example: similar to L{OneOrMore} - """ - def __init__( self, expr, stopOn=None): - super(ZeroOrMore,self).__init__(expr, stopOn=stopOn) - self.mayReturnEmpty = True - - def parseImpl( self, instring, loc, doActions=True ): - try: - return super(ZeroOrMore, self).parseImpl(instring, loc, doActions) - except (ParseException,IndexError): - return loc, [] - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "[" + _ustr(self.expr) + "]..." - - return self.strRepr - -class _NullToken(object): - def __bool__(self): - return False - __nonzero__ = __bool__ - def __str__(self): - return "" - -_optionalNotMatched = _NullToken() -class Optional(ParseElementEnhance): - """ - Optional matching of the given expression. - - Parameters: - - expr - expression that must match zero or more times - - default (optional) - value to be returned if the optional expression is not found. - - Example:: - # US postal code can be a 5-digit zip, plus optional 4-digit qualifier - zip = Combine(Word(nums, exact=5) + Optional('-' + Word(nums, exact=4))) - zip.runTests(''' - # traditional ZIP code - 12345 - - # ZIP+4 form - 12101-0001 - - # invalid ZIP - 98765- - ''') - prints:: - # traditional ZIP code - 12345 - ['12345'] - - # ZIP+4 form - 12101-0001 - ['12101-0001'] - - # invalid ZIP - 98765- - ^ - FAIL: Expected end of text (at char 5), (line:1, col:6) - """ - def __init__( self, expr, default=_optionalNotMatched ): - super(Optional,self).__init__( expr, savelist=False ) - self.saveAsList = self.expr.saveAsList - self.defaultValue = default - self.mayReturnEmpty = True - - def parseImpl( self, instring, loc, doActions=True ): - try: - loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False ) - except (ParseException,IndexError): - if self.defaultValue is not _optionalNotMatched: - if self.expr.resultsName: - tokens = ParseResults([ self.defaultValue ]) - tokens[self.expr.resultsName] = self.defaultValue - else: - tokens = [ self.defaultValue ] - else: - tokens = [] - return loc, tokens - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "[" + _ustr(self.expr) + "]" - - return self.strRepr - -class SkipTo(ParseElementEnhance): - """ - Token for skipping over all undefined text until the matched expression is found. - - Parameters: - - expr - target expression marking the end of the data to be skipped - - include - (default=C{False}) if True, the target expression is also parsed - (the skipped text and target expression are returned as a 2-element list). - - ignore - (default=C{None}) used to define grammars (typically quoted strings and - comments) that might contain false matches to the target expression - - failOn - (default=C{None}) define expressions that are not allowed to be - included in the skipped test; if found before the target expression is found, - the SkipTo is not a match - - Example:: - report = ''' - Outstanding Issues Report - 1 Jan 2000 - - # | Severity | Description | Days Open - -----+----------+-------------------------------------------+----------- - 101 | Critical | Intermittent system crash | 6 - 94 | Cosmetic | Spelling error on Login ('log|n') | 14 - 79 | Minor | System slow when running too many reports | 47 - ''' - integer = Word(nums) - SEP = Suppress('|') - # use SkipTo to simply match everything up until the next SEP - # - ignore quoted strings, so that a '|' character inside a quoted string does not match - # - parse action will call token.strip() for each matched token, i.e., the description body - string_data = SkipTo(SEP, ignore=quotedString) - string_data.setParseAction(tokenMap(str.strip)) - ticket_expr = (integer("issue_num") + SEP - + string_data("sev") + SEP - + string_data("desc") + SEP - + integer("days_open")) - - for tkt in ticket_expr.searchString(report): - print tkt.dump() - prints:: - ['101', 'Critical', 'Intermittent system crash', '6'] - - days_open: 6 - - desc: Intermittent system crash - - issue_num: 101 - - sev: Critical - ['94', 'Cosmetic', "Spelling error on Login ('log|n')", '14'] - - days_open: 14 - - desc: Spelling error on Login ('log|n') - - issue_num: 94 - - sev: Cosmetic - ['79', 'Minor', 'System slow when running too many reports', '47'] - - days_open: 47 - - desc: System slow when running too many reports - - issue_num: 79 - - sev: Minor - """ - def __init__( self, other, include=False, ignore=None, failOn=None ): - super( SkipTo, self ).__init__( other ) - self.ignoreExpr = ignore - self.mayReturnEmpty = True - self.mayIndexError = False - self.includeMatch = include - self.asList = False - if isinstance(failOn, basestring): - self.failOn = ParserElement._literalStringClass(failOn) - else: - self.failOn = failOn - self.errmsg = "No match found for "+_ustr(self.expr) - - def parseImpl( self, instring, loc, doActions=True ): - startloc = loc - instrlen = len(instring) - expr = self.expr - expr_parse = self.expr._parse - self_failOn_canParseNext = self.failOn.canParseNext if self.failOn is not None else None - self_ignoreExpr_tryParse = self.ignoreExpr.tryParse if self.ignoreExpr is not None else None - - tmploc = loc - while tmploc <= instrlen: - if self_failOn_canParseNext is not None: - # break if failOn expression matches - if self_failOn_canParseNext(instring, tmploc): - break - - if self_ignoreExpr_tryParse is not None: - # advance past ignore expressions - while 1: - try: - tmploc = self_ignoreExpr_tryParse(instring, tmploc) - except ParseBaseException: - break - - try: - expr_parse(instring, tmploc, doActions=False, callPreParse=False) - except (ParseException, IndexError): - # no match, advance loc in string - tmploc += 1 - else: - # matched skipto expr, done - break - - else: - # ran off the end of the input string without matching skipto expr, fail - raise ParseException(instring, loc, self.errmsg, self) - - # build up return values - loc = tmploc - skiptext = instring[startloc:loc] - skipresult = ParseResults(skiptext) - - if self.includeMatch: - loc, mat = expr_parse(instring,loc,doActions,callPreParse=False) - skipresult += mat - - return loc, skipresult - -class Forward(ParseElementEnhance): - """ - Forward declaration of an expression to be defined later - - used for recursive grammars, such as algebraic infix notation. - When the expression is known, it is assigned to the C{Forward} variable using the '<<' operator. - - Note: take care when assigning to C{Forward} not to overlook precedence of operators. - Specifically, '|' has a lower precedence than '<<', so that:: - fwdExpr << a | b | c - will actually be evaluated as:: - (fwdExpr << a) | b | c - thereby leaving b and c out as parseable alternatives. It is recommended that you - explicitly group the values inserted into the C{Forward}:: - fwdExpr << (a | b | c) - Converting to use the '<<=' operator instead will avoid this problem. - - See L{ParseResults.pprint} for an example of a recursive parser created using - C{Forward}. - """ - def __init__( self, other=None ): - super(Forward,self).__init__( other, savelist=False ) - - def __lshift__( self, other ): - if isinstance( other, basestring ): - other = ParserElement._literalStringClass(other) - self.expr = other - self.strRepr = None - self.mayIndexError = self.expr.mayIndexError - self.mayReturnEmpty = self.expr.mayReturnEmpty - self.setWhitespaceChars( self.expr.whiteChars ) - self.skipWhitespace = self.expr.skipWhitespace - self.saveAsList = self.expr.saveAsList - self.ignoreExprs.extend(self.expr.ignoreExprs) - return self - - def __ilshift__(self, other): - return self << other - - def leaveWhitespace( self ): - self.skipWhitespace = False - return self - - def streamline( self ): - if not self.streamlined: - self.streamlined = True - if self.expr is not None: - self.expr.streamline() - return self - - def validate( self, validateTrace=[] ): - if self not in validateTrace: - tmp = validateTrace[:]+[self] - if self.expr is not None: - self.expr.validate(tmp) - self.checkRecursion([]) - - def __str__( self ): - if hasattr(self,"name"): - return self.name - return self.__class__.__name__ + ": ..." - - # stubbed out for now - creates awful memory and perf issues - self._revertClass = self.__class__ - self.__class__ = _ForwardNoRecurse - try: - if self.expr is not None: - retString = _ustr(self.expr) - else: - retString = "None" - finally: - self.__class__ = self._revertClass - return self.__class__.__name__ + ": " + retString - - def copy(self): - if self.expr is not None: - return super(Forward,self).copy() - else: - ret = Forward() - ret <<= self - return ret - -class _ForwardNoRecurse(Forward): - def __str__( self ): - return "..." - -class TokenConverter(ParseElementEnhance): - """ - Abstract subclass of C{ParseExpression}, for converting parsed results. - """ - def __init__( self, expr, savelist=False ): - super(TokenConverter,self).__init__( expr )#, savelist ) - self.saveAsList = False - -class Combine(TokenConverter): - """ - Converter to concatenate all matching tokens to a single string. - By default, the matching patterns must also be contiguous in the input string; - this can be disabled by specifying C{'adjacent=False'} in the constructor. - - Example:: - real = Word(nums) + '.' + Word(nums) - print(real.parseString('3.1416')) # -> ['3', '.', '1416'] - # will also erroneously match the following - print(real.parseString('3. 1416')) # -> ['3', '.', '1416'] - - real = Combine(Word(nums) + '.' + Word(nums)) - print(real.parseString('3.1416')) # -> ['3.1416'] - # no match when there are internal spaces - print(real.parseString('3. 1416')) # -> Exception: Expected W:(0123...) - """ - def __init__( self, expr, joinString="", adjacent=True ): - super(Combine,self).__init__( expr ) - # suppress whitespace-stripping in contained parse expressions, but re-enable it on the Combine itself - if adjacent: - self.leaveWhitespace() - self.adjacent = adjacent - self.skipWhitespace = True - self.joinString = joinString - self.callPreparse = True - - def ignore( self, other ): - if self.adjacent: - ParserElement.ignore(self, other) - else: - super( Combine, self).ignore( other ) - return self - - def postParse( self, instring, loc, tokenlist ): - retToks = tokenlist.copy() - del retToks[:] - retToks += ParseResults([ "".join(tokenlist._asStringList(self.joinString)) ], modal=self.modalResults) - - if self.resultsName and retToks.haskeys(): - return [ retToks ] - else: - return retToks - -class Group(TokenConverter): - """ - Converter to return the matched tokens as a list - useful for returning tokens of C{L{ZeroOrMore}} and C{L{OneOrMore}} expressions. - - Example:: - ident = Word(alphas) - num = Word(nums) - term = ident | num - func = ident + Optional(delimitedList(term)) - print(func.parseString("fn a,b,100")) # -> ['fn', 'a', 'b', '100'] - - func = ident + Group(Optional(delimitedList(term))) - print(func.parseString("fn a,b,100")) # -> ['fn', ['a', 'b', '100']] - """ - def __init__( self, expr ): - super(Group,self).__init__( expr ) - self.saveAsList = True - - def postParse( self, instring, loc, tokenlist ): - return [ tokenlist ] - -class Dict(TokenConverter): - """ - Converter to return a repetitive expression as a list, but also as a dictionary. - Each element can also be referenced using the first token in the expression as its key. - Useful for tabular report scraping when the first column can be used as a item key. - - Example:: - data_word = Word(alphas) - label = data_word + FollowedBy(':') - attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join)) - - text = "shape: SQUARE posn: upper left color: light blue texture: burlap" - attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) - - # print attributes as plain groups - print(OneOrMore(attr_expr).parseString(text).dump()) - - # instead of OneOrMore(expr), parse using Dict(OneOrMore(Group(expr))) - Dict will auto-assign names - result = Dict(OneOrMore(Group(attr_expr))).parseString(text) - print(result.dump()) - - # access named fields as dict entries, or output as dict - print(result['shape']) - print(result.asDict()) - prints:: - ['shape', 'SQUARE', 'posn', 'upper left', 'color', 'light blue', 'texture', 'burlap'] - - [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] - - color: light blue - - posn: upper left - - shape: SQUARE - - texture: burlap - SQUARE - {'color': 'light blue', 'posn': 'upper left', 'texture': 'burlap', 'shape': 'SQUARE'} - See more examples at L{ParseResults} of accessing fields by results name. - """ - def __init__( self, expr ): - super(Dict,self).__init__( expr ) - self.saveAsList = True - - def postParse( self, instring, loc, tokenlist ): - for i,tok in enumerate(tokenlist): - if len(tok) == 0: - continue - ikey = tok[0] - if isinstance(ikey,int): - ikey = _ustr(tok[0]).strip() - if len(tok)==1: - tokenlist[ikey] = _ParseResultsWithOffset("",i) - elif len(tok)==2 and not isinstance(tok[1],ParseResults): - tokenlist[ikey] = _ParseResultsWithOffset(tok[1],i) - else: - dictvalue = tok.copy() #ParseResults(i) - del dictvalue[0] - if len(dictvalue)!= 1 or (isinstance(dictvalue,ParseResults) and dictvalue.haskeys()): - tokenlist[ikey] = _ParseResultsWithOffset(dictvalue,i) - else: - tokenlist[ikey] = _ParseResultsWithOffset(dictvalue[0],i) - - if self.resultsName: - return [ tokenlist ] - else: - return tokenlist - - -class Suppress(TokenConverter): - """ - Converter for ignoring the results of a parsed expression. - - Example:: - source = "a, b, c,d" - wd = Word(alphas) - wd_list1 = wd + ZeroOrMore(',' + wd) - print(wd_list1.parseString(source)) - - # often, delimiters that are useful during parsing are just in the - # way afterward - use Suppress to keep them out of the parsed output - wd_list2 = wd + ZeroOrMore(Suppress(',') + wd) - print(wd_list2.parseString(source)) - prints:: - ['a', ',', 'b', ',', 'c', ',', 'd'] - ['a', 'b', 'c', 'd'] - (See also L{delimitedList}.) - """ - def postParse( self, instring, loc, tokenlist ): - return [] - - def suppress( self ): - return self - - -class OnlyOnce(object): - """ - Wrapper for parse actions, to ensure they are only called once. - """ - def __init__(self, methodCall): - self.callable = _trim_arity(methodCall) - self.called = False - def __call__(self,s,l,t): - if not self.called: - results = self.callable(s,l,t) - self.called = True - return results - raise ParseException(s,l,"") - def reset(self): - self.called = False - -def traceParseAction(f): - """ - Decorator for debugging parse actions. - - When the parse action is called, this decorator will print C{">> entering I{method-name}(line:I{current_source_line}, I{parse_location}, I{matched_tokens})".} - When the parse action completes, the decorator will print C{"<<"} followed by the returned value, or any exception that the parse action raised. - - Example:: - wd = Word(alphas) - - @traceParseAction - def remove_duplicate_chars(tokens): - return ''.join(sorted(set(''.join(tokens)))) - - wds = OneOrMore(wd).setParseAction(remove_duplicate_chars) - print(wds.parseString("slkdjs sld sldd sdlf sdljf")) - prints:: - >>entering remove_duplicate_chars(line: 'slkdjs sld sldd sdlf sdljf', 0, (['slkdjs', 'sld', 'sldd', 'sdlf', 'sdljf'], {})) - <<leaving remove_duplicate_chars (ret: 'dfjkls') - ['dfjkls'] - """ - f = _trim_arity(f) - def z(*paArgs): - thisFunc = f.__name__ - s,l,t = paArgs[-3:] - if len(paArgs)>3: - thisFunc = paArgs[0].__class__.__name__ + '.' + thisFunc - sys.stderr.write( ">>entering %s(line: '%s', %d, %r)\n" % (thisFunc,line(l,s),l,t) ) - try: - ret = f(*paArgs) - except Exception as exc: - sys.stderr.write( "<<leaving %s (exception: %s)\n" % (thisFunc,exc) ) - raise - sys.stderr.write( "<<leaving %s (ret: %r)\n" % (thisFunc,ret) ) - return ret - try: - z.__name__ = f.__name__ - except AttributeError: - pass - return z - -# -# global helpers -# -def delimitedList( expr, delim=",", combine=False ): - """ - Helper to define a delimited list of expressions - the delimiter defaults to ','. - By default, the list elements and delimiters can have intervening whitespace, and - comments, but this can be overridden by passing C{combine=True} in the constructor. - If C{combine} is set to C{True}, the matching tokens are returned as a single token - string, with the delimiters included; otherwise, the matching tokens are returned - as a list of tokens, with the delimiters suppressed. - - Example:: - delimitedList(Word(alphas)).parseString("aa,bb,cc") # -> ['aa', 'bb', 'cc'] - delimitedList(Word(hexnums), delim=':', combine=True).parseString("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE'] - """ - dlName = _ustr(expr)+" ["+_ustr(delim)+" "+_ustr(expr)+"]..." - if combine: - return Combine( expr + ZeroOrMore( delim + expr ) ).setName(dlName) - else: - return ( expr + ZeroOrMore( Suppress( delim ) + expr ) ).setName(dlName) - -def countedArray( expr, intExpr=None ): - """ - Helper to define a counted list of expressions. - This helper defines a pattern of the form:: - integer expr expr expr... - where the leading integer tells how many expr expressions follow. - The matched tokens returns the array of expr tokens as a list - the leading count token is suppressed. - - If C{intExpr} is specified, it should be a pyparsing expression that produces an integer value. - - Example:: - countedArray(Word(alphas)).parseString('2 ab cd ef') # -> ['ab', 'cd'] - - # in this parser, the leading integer value is given in binary, - # '10' indicating that 2 values are in the array - binaryConstant = Word('01').setParseAction(lambda t: int(t[0], 2)) - countedArray(Word(alphas), intExpr=binaryConstant).parseString('10 ab cd ef') # -> ['ab', 'cd'] - """ - arrayExpr = Forward() - def countFieldParseAction(s,l,t): - n = t[0] - arrayExpr << (n and Group(And([expr]*n)) or Group(empty)) - return [] - if intExpr is None: - intExpr = Word(nums).setParseAction(lambda t:int(t[0])) - else: - intExpr = intExpr.copy() - intExpr.setName("arrayLen") - intExpr.addParseAction(countFieldParseAction, callDuringTry=True) - return ( intExpr + arrayExpr ).setName('(len) ' + _ustr(expr) + '...') - -def _flatten(L): - ret = [] - for i in L: - if isinstance(i,list): - ret.extend(_flatten(i)) - else: - ret.append(i) - return ret - -def matchPreviousLiteral(expr): - """ - Helper to define an expression that is indirectly defined from - the tokens matched in a previous expression, that is, it looks - for a 'repeat' of a previous expression. For example:: - first = Word(nums) - second = matchPreviousLiteral(first) - matchExpr = first + ":" + second - will match C{"1:1"}, but not C{"1:2"}. Because this matches a - previous literal, will also match the leading C{"1:1"} in C{"1:10"}. - If this is not desired, use C{matchPreviousExpr}. - Do I{not} use with packrat parsing enabled. - """ - rep = Forward() - def copyTokenToRepeater(s,l,t): - if t: - if len(t) == 1: - rep << t[0] - else: - # flatten t tokens - tflat = _flatten(t.asList()) - rep << And(Literal(tt) for tt in tflat) - else: - rep << Empty() - expr.addParseAction(copyTokenToRepeater, callDuringTry=True) - rep.setName('(prev) ' + _ustr(expr)) - return rep - -def matchPreviousExpr(expr): - """ - Helper to define an expression that is indirectly defined from - the tokens matched in a previous expression, that is, it looks - for a 'repeat' of a previous expression. For example:: - first = Word(nums) - second = matchPreviousExpr(first) - matchExpr = first + ":" + second - will match C{"1:1"}, but not C{"1:2"}. Because this matches by - expressions, will I{not} match the leading C{"1:1"} in C{"1:10"}; - the expressions are evaluated first, and then compared, so - C{"1"} is compared with C{"10"}. - Do I{not} use with packrat parsing enabled. - """ - rep = Forward() - e2 = expr.copy() - rep <<= e2 - def copyTokenToRepeater(s,l,t): - matchTokens = _flatten(t.asList()) - def mustMatchTheseTokens(s,l,t): - theseTokens = _flatten(t.asList()) - if theseTokens != matchTokens: - raise ParseException("",0,"") - rep.setParseAction( mustMatchTheseTokens, callDuringTry=True ) - expr.addParseAction(copyTokenToRepeater, callDuringTry=True) - rep.setName('(prev) ' + _ustr(expr)) - return rep - -def _escapeRegexRangeChars(s): - #~ escape these chars: ^-] - for c in r"\^-]": - s = s.replace(c,_bslash+c) - s = s.replace("\n",r"\n") - s = s.replace("\t",r"\t") - return _ustr(s) - -def oneOf( strs, caseless=False, useRegex=True ): - """ - Helper to quickly define a set of alternative Literals, and makes sure to do - longest-first testing when there is a conflict, regardless of the input order, - but returns a C{L{MatchFirst}} for best performance. - - Parameters: - - strs - a string of space-delimited literals, or a collection of string literals - - caseless - (default=C{False}) - treat all literals as caseless - - useRegex - (default=C{True}) - as an optimization, will generate a Regex - object; otherwise, will generate a C{MatchFirst} object (if C{caseless=True}, or - if creating a C{Regex} raises an exception) - - Example:: - comp_oper = oneOf("< = > <= >= !=") - var = Word(alphas) - number = Word(nums) - term = var | number - comparison_expr = term + comp_oper + term - print(comparison_expr.searchString("B = 12 AA=23 B<=AA AA>12")) - prints:: - [['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']] - """ - if caseless: - isequal = ( lambda a,b: a.upper() == b.upper() ) - masks = ( lambda a,b: b.upper().startswith(a.upper()) ) - parseElementClass = CaselessLiteral - else: - isequal = ( lambda a,b: a == b ) - masks = ( lambda a,b: b.startswith(a) ) - parseElementClass = Literal - - symbols = [] - if isinstance(strs,basestring): - symbols = strs.split() - elif isinstance(strs, Iterable): - symbols = list(strs) - else: - warnings.warn("Invalid argument to oneOf, expected string or iterable", - SyntaxWarning, stacklevel=2) - if not symbols: - return NoMatch() - - i = 0 - while i < len(symbols)-1: - cur = symbols[i] - for j,other in enumerate(symbols[i+1:]): - if ( isequal(other, cur) ): - del symbols[i+j+1] - break - elif ( masks(cur, other) ): - del symbols[i+j+1] - symbols.insert(i,other) - cur = other - break - else: - i += 1 - - if not caseless and useRegex: - #~ print (strs,"->", "|".join( [ _escapeRegexChars(sym) for sym in symbols] )) - try: - if len(symbols)==len("".join(symbols)): - return Regex( "[%s]" % "".join(_escapeRegexRangeChars(sym) for sym in symbols) ).setName(' | '.join(symbols)) - else: - return Regex( "|".join(re.escape(sym) for sym in symbols) ).setName(' | '.join(symbols)) - except Exception: - warnings.warn("Exception creating Regex for oneOf, building MatchFirst", - SyntaxWarning, stacklevel=2) - - - # last resort, just use MatchFirst - return MatchFirst(parseElementClass(sym) for sym in symbols).setName(' | '.join(symbols)) - -def dictOf( key, value ): - """ - Helper to easily and clearly define a dictionary by specifying the respective patterns - for the key and value. Takes care of defining the C{L{Dict}}, C{L{ZeroOrMore}}, and C{L{Group}} tokens - in the proper order. The key pattern can include delimiting markers or punctuation, - as long as they are suppressed, thereby leaving the significant key text. The value - pattern can include named results, so that the C{Dict} results can include named token - fields. - - Example:: - text = "shape: SQUARE posn: upper left color: light blue texture: burlap" - attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) - print(OneOrMore(attr_expr).parseString(text).dump()) - - attr_label = label - attr_value = Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join) - - # similar to Dict, but simpler call format - result = dictOf(attr_label, attr_value).parseString(text) - print(result.dump()) - print(result['shape']) - print(result.shape) # object attribute access works too - print(result.asDict()) - prints:: - [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] - - color: light blue - - posn: upper left - - shape: SQUARE - - texture: burlap - SQUARE - SQUARE - {'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'} - """ - return Dict( ZeroOrMore( Group ( key + value ) ) ) - -def originalTextFor(expr, asString=True): - """ - Helper to return the original, untokenized text for a given expression. Useful to - restore the parsed fields of an HTML start tag into the raw tag text itself, or to - revert separate tokens with intervening whitespace back to the original matching - input text. By default, returns astring containing the original parsed text. - - If the optional C{asString} argument is passed as C{False}, then the return value is a - C{L{ParseResults}} containing any results names that were originally matched, and a - single token containing the original matched text from the input string. So if - the expression passed to C{L{originalTextFor}} contains expressions with defined - results names, you must set C{asString} to C{False} if you want to preserve those - results name values. - - Example:: - src = "this is test <b> bold <i>text</i> </b> normal text " - for tag in ("b","i"): - opener,closer = makeHTMLTags(tag) - patt = originalTextFor(opener + SkipTo(closer) + closer) - print(patt.searchString(src)[0]) - prints:: - ['<b> bold <i>text</i> </b>'] - ['<i>text</i>'] - """ - locMarker = Empty().setParseAction(lambda s,loc,t: loc) - endlocMarker = locMarker.copy() - endlocMarker.callPreparse = False - matchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end") - if asString: - extractText = lambda s,l,t: s[t._original_start:t._original_end] - else: - def extractText(s,l,t): - t[:] = [s[t.pop('_original_start'):t.pop('_original_end')]] - matchExpr.setParseAction(extractText) - matchExpr.ignoreExprs = expr.ignoreExprs - return matchExpr - -def ungroup(expr): - """ - Helper to undo pyparsing's default grouping of And expressions, even - if all but one are non-empty. - """ - return TokenConverter(expr).setParseAction(lambda t:t[0]) - -def locatedExpr(expr): - """ - Helper to decorate a returned token with its starting and ending locations in the input string. - This helper adds the following results names: - - locn_start = location where matched expression begins - - locn_end = location where matched expression ends - - value = the actual parsed results - - Be careful if the input text contains C{<TAB>} characters, you may want to call - C{L{ParserElement.parseWithTabs}} - - Example:: - wd = Word(alphas) - for match in locatedExpr(wd).searchString("ljsdf123lksdjjf123lkkjj1222"): - print(match) - prints:: - [[0, 'ljsdf', 5]] - [[8, 'lksdjjf', 15]] - [[18, 'lkkjj', 23]] - """ - locator = Empty().setParseAction(lambda s,l,t: l) - return Group(locator("locn_start") + expr("value") + locator.copy().leaveWhitespace()("locn_end")) - - -# convenience constants for positional expressions -empty = Empty().setName("empty") -lineStart = LineStart().setName("lineStart") -lineEnd = LineEnd().setName("lineEnd") -stringStart = StringStart().setName("stringStart") -stringEnd = StringEnd().setName("stringEnd") - -_escapedPunc = Word( _bslash, r"\[]-*.$+^?()~ ", exact=2 ).setParseAction(lambda s,l,t:t[0][1]) -_escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s,l,t:unichr(int(t[0].lstrip(r'\0x'),16))) -_escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s,l,t:unichr(int(t[0][1:],8))) -_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | CharsNotIn(r'\]', exact=1) -_charRange = Group(_singleChar + Suppress("-") + _singleChar) -_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group( OneOrMore( _charRange | _singleChar ) ).setResultsName("body") + "]" - -def srange(s): - r""" - Helper to easily define string ranges for use in Word construction. Borrows - syntax from regexp '[]' string range definitions:: - srange("[0-9]") -> "0123456789" - srange("[a-z]") -> "abcdefghijklmnopqrstuvwxyz" - srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_" - The input string must be enclosed in []'s, and the returned string is the expanded - character set joined into a single string. - The values enclosed in the []'s may be: - - a single character - - an escaped character with a leading backslash (such as C{\-} or C{\]}) - - an escaped hex character with a leading C{'\x'} (C{\x21}, which is a C{'!'} character) - (C{\0x##} is also supported for backwards compatibility) - - an escaped octal character with a leading C{'\0'} (C{\041}, which is a C{'!'} character) - - a range of any of the above, separated by a dash (C{'a-z'}, etc.) - - any combination of the above (C{'aeiouy'}, C{'a-zA-Z0-9_$'}, etc.) - """ - _expanded = lambda p: p if not isinstance(p,ParseResults) else ''.join(unichr(c) for c in range(ord(p[0]),ord(p[1])+1)) - try: - return "".join(_expanded(part) for part in _reBracketExpr.parseString(s).body) - except Exception: - return "" - -def matchOnlyAtCol(n): - """ - Helper method for defining parse actions that require matching at a specific - column in the input text. - """ - def verifyCol(strg,locn,toks): - if col(locn,strg) != n: - raise ParseException(strg,locn,"matched token not at column %d" % n) - return verifyCol - -def replaceWith(replStr): - """ - Helper method for common parse actions that simply return a literal value. Especially - useful when used with C{L{transformString<ParserElement.transformString>}()}. - - Example:: - num = Word(nums).setParseAction(lambda toks: int(toks[0])) - na = oneOf("N/A NA").setParseAction(replaceWith(math.nan)) - term = na | num - - OneOrMore(term).parseString("324 234 N/A 234") # -> [324, 234, nan, 234] - """ - return lambda s,l,t: [replStr] - -def removeQuotes(s,l,t): - """ - Helper parse action for removing quotation marks from parsed quoted strings. - - Example:: - # by default, quotation marks are included in parsed results - quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["'Now is the Winter of our Discontent'"] - - # use removeQuotes to strip quotation marks from parsed results - quotedString.setParseAction(removeQuotes) - quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["Now is the Winter of our Discontent"] - """ - return t[0][1:-1] - -def tokenMap(func, *args): - """ - Helper to define a parse action by mapping a function to all elements of a ParseResults list.If any additional - args are passed, they are forwarded to the given function as additional arguments after - the token, as in C{hex_integer = Word(hexnums).setParseAction(tokenMap(int, 16))}, which will convert the - parsed data to an integer using base 16. - - Example (compare the last to example in L{ParserElement.transformString}:: - hex_ints = OneOrMore(Word(hexnums)).setParseAction(tokenMap(int, 16)) - hex_ints.runTests(''' - 00 11 22 aa FF 0a 0d 1a - ''') - - upperword = Word(alphas).setParseAction(tokenMap(str.upper)) - OneOrMore(upperword).runTests(''' - my kingdom for a horse - ''') - - wd = Word(alphas).setParseAction(tokenMap(str.title)) - OneOrMore(wd).setParseAction(' '.join).runTests(''' - now is the winter of our discontent made glorious summer by this sun of york - ''') - prints:: - 00 11 22 aa FF 0a 0d 1a - [0, 17, 34, 170, 255, 10, 13, 26] - - my kingdom for a horse - ['MY', 'KINGDOM', 'FOR', 'A', 'HORSE'] - - now is the winter of our discontent made glorious summer by this sun of york - ['Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York'] - """ - def pa(s,l,t): - return [func(tokn, *args) for tokn in t] - - try: - func_name = getattr(func, '__name__', - getattr(func, '__class__').__name__) - except Exception: - func_name = str(func) - pa.__name__ = func_name - - return pa - -upcaseTokens = tokenMap(lambda t: _ustr(t).upper()) -"""(Deprecated) Helper parse action to convert tokens to upper case. Deprecated in favor of L{pyparsing_common.upcaseTokens}""" - -downcaseTokens = tokenMap(lambda t: _ustr(t).lower()) -"""(Deprecated) Helper parse action to convert tokens to lower case. Deprecated in favor of L{pyparsing_common.downcaseTokens}""" - -def _makeTags(tagStr, xml): - """Internal helper to construct opening and closing tag expressions, given a tag name""" - if isinstance(tagStr,basestring): - resname = tagStr - tagStr = Keyword(tagStr, caseless=not xml) - else: - resname = tagStr.name - - tagAttrName = Word(alphas,alphanums+"_-:") - if (xml): - tagAttrValue = dblQuotedString.copy().setParseAction( removeQuotes ) - openTag = Suppress("<") + tagStr("tag") + \ - Dict(ZeroOrMore(Group( tagAttrName + Suppress("=") + tagAttrValue ))) + \ - Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">") - else: - printablesLessRAbrack = "".join(c for c in printables if c not in ">") - tagAttrValue = quotedString.copy().setParseAction( removeQuotes ) | Word(printablesLessRAbrack) - openTag = Suppress("<") + tagStr("tag") + \ - Dict(ZeroOrMore(Group( tagAttrName.setParseAction(downcaseTokens) + \ - Optional( Suppress("=") + tagAttrValue ) ))) + \ - Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">") - closeTag = Combine(_L("</") + tagStr + ">") - - openTag = openTag.setResultsName("start"+"".join(resname.replace(":"," ").title().split())).setName("<%s>" % resname) - closeTag = closeTag.setResultsName("end"+"".join(resname.replace(":"," ").title().split())).setName("</%s>" % resname) - openTag.tag = resname - closeTag.tag = resname - return openTag, closeTag - -def makeHTMLTags(tagStr): - """ - Helper to construct opening and closing tag expressions for HTML, given a tag name. Matches - tags in either upper or lower case, attributes with namespaces and with quoted or unquoted values. - - Example:: - text = '<td>More info at the <a href="http://pyparsing.wikispaces.com">pyparsing</a> wiki page</td>' - # makeHTMLTags returns pyparsing expressions for the opening and closing tags as a 2-tuple - a,a_end = makeHTMLTags("A") - link_expr = a + SkipTo(a_end)("link_text") + a_end - - for link in link_expr.searchString(text): - # attributes in the <A> tag (like "href" shown here) are also accessible as named results - print(link.link_text, '->', link.href) - prints:: - pyparsing -> http://pyparsing.wikispaces.com - """ - return _makeTags( tagStr, False ) - -def makeXMLTags(tagStr): - """ - Helper to construct opening and closing tag expressions for XML, given a tag name. Matches - tags only in the given upper/lower case. - - Example: similar to L{makeHTMLTags} - """ - return _makeTags( tagStr, True ) - -def withAttribute(*args,**attrDict): - """ - Helper to create a validating parse action to be used with start tags created - with C{L{makeXMLTags}} or C{L{makeHTMLTags}}. Use C{withAttribute} to qualify a starting tag - with a required attribute value, to avoid false matches on common tags such as - C{<TD>} or C{<DIV>}. - - Call C{withAttribute} with a series of attribute names and values. Specify the list - of filter attributes names and values as: - - keyword arguments, as in C{(align="right")}, or - - as an explicit dict with C{**} operator, when an attribute name is also a Python - reserved word, as in C{**{"class":"Customer", "align":"right"}} - - a list of name-value tuples, as in ( ("ns1:class", "Customer"), ("ns2:align","right") ) - For attribute names with a namespace prefix, you must use the second form. Attribute - names are matched insensitive to upper/lower case. - - If just testing for C{class} (with or without a namespace), use C{L{withClass}}. - - To verify that the attribute exists, but without specifying a value, pass - C{withAttribute.ANY_VALUE} as the value. - - Example:: - html = ''' - <div> - Some text - <div type="grid">1 4 0 1 0</div> - <div type="graph">1,3 2,3 1,1</div> - <div>this has no type</div> - </div> - - ''' - div,div_end = makeHTMLTags("div") - - # only match div tag having a type attribute with value "grid" - div_grid = div().setParseAction(withAttribute(type="grid")) - grid_expr = div_grid + SkipTo(div | div_end)("body") - for grid_header in grid_expr.searchString(html): - print(grid_header.body) - - # construct a match with any div tag having a type attribute, regardless of the value - div_any_type = div().setParseAction(withAttribute(type=withAttribute.ANY_VALUE)) - div_expr = div_any_type + SkipTo(div | div_end)("body") - for div_header in div_expr.searchString(html): - print(div_header.body) - prints:: - 1 4 0 1 0 - - 1 4 0 1 0 - 1,3 2,3 1,1 - """ - if args: - attrs = args[:] - else: - attrs = attrDict.items() - attrs = [(k,v) for k,v in attrs] - def pa(s,l,tokens): - for attrName,attrValue in attrs: - if attrName not in tokens: - raise ParseException(s,l,"no matching attribute " + attrName) - if attrValue != withAttribute.ANY_VALUE and tokens[attrName] != attrValue: - raise ParseException(s,l,"attribute '%s' has value '%s', must be '%s'" % - (attrName, tokens[attrName], attrValue)) - return pa -withAttribute.ANY_VALUE = object() - -def withClass(classname, namespace=''): - """ - Simplified version of C{L{withAttribute}} when matching on a div class - made - difficult because C{class} is a reserved word in Python. - - Example:: - html = ''' - <div> - Some text - <div class="grid">1 4 0 1 0</div> - <div class="graph">1,3 2,3 1,1</div> - <div>this <div> has no class</div> - </div> - - ''' - div,div_end = makeHTMLTags("div") - div_grid = div().setParseAction(withClass("grid")) - - grid_expr = div_grid + SkipTo(div | div_end)("body") - for grid_header in grid_expr.searchString(html): - print(grid_header.body) - - div_any_type = div().setParseAction(withClass(withAttribute.ANY_VALUE)) - div_expr = div_any_type + SkipTo(div | div_end)("body") - for div_header in div_expr.searchString(html): - print(div_header.body) - prints:: - 1 4 0 1 0 - - 1 4 0 1 0 - 1,3 2,3 1,1 - """ - classattr = "%s:class" % namespace if namespace else "class" - return withAttribute(**{classattr : classname}) - -opAssoc = _Constants() -opAssoc.LEFT = object() -opAssoc.RIGHT = object() - -def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ): - """ - Helper method for constructing grammars of expressions made up of - operators working in a precedence hierarchy. Operators may be unary or - binary, left- or right-associative. Parse actions can also be attached - to operator expressions. The generated parser will also recognize the use - of parentheses to override operator precedences (see example below). - - Note: if you define a deep operator list, you may see performance issues - when using infixNotation. See L{ParserElement.enablePackrat} for a - mechanism to potentially improve your parser performance. - - Parameters: - - baseExpr - expression representing the most basic element for the nested - - opList - list of tuples, one for each operator precedence level in the - expression grammar; each tuple is of the form - (opExpr, numTerms, rightLeftAssoc, parseAction), where: - - opExpr is the pyparsing expression for the operator; - may also be a string, which will be converted to a Literal; - if numTerms is 3, opExpr is a tuple of two expressions, for the - two operators separating the 3 terms - - numTerms is the number of terms for this operator (must - be 1, 2, or 3) - - rightLeftAssoc is the indicator whether the operator is - right or left associative, using the pyparsing-defined - constants C{opAssoc.RIGHT} and C{opAssoc.LEFT}. - - parseAction is the parse action to be associated with - expressions matching this operator expression (the - parse action tuple member may be omitted); if the parse action - is passed a tuple or list of functions, this is equivalent to - calling C{setParseAction(*fn)} (L{ParserElement.setParseAction}) - - lpar - expression for matching left-parentheses (default=C{Suppress('(')}) - - rpar - expression for matching right-parentheses (default=C{Suppress(')')}) - - Example:: - # simple example of four-function arithmetic with ints and variable names - integer = pyparsing_common.signed_integer - varname = pyparsing_common.identifier - - arith_expr = infixNotation(integer | varname, - [ - ('-', 1, opAssoc.RIGHT), - (oneOf('* /'), 2, opAssoc.LEFT), - (oneOf('+ -'), 2, opAssoc.LEFT), - ]) - - arith_expr.runTests(''' - 5+3*6 - (5+3)*6 - -2--11 - ''', fullDump=False) - prints:: - 5+3*6 - [[5, '+', [3, '*', 6]]] - - (5+3)*6 - [[[5, '+', 3], '*', 6]] - - -2--11 - [[['-', 2], '-', ['-', 11]]] - """ - ret = Forward() - lastExpr = baseExpr | ( lpar + ret + rpar ) - for i,operDef in enumerate(opList): - opExpr,arity,rightLeftAssoc,pa = (operDef + (None,))[:4] - termName = "%s term" % opExpr if arity < 3 else "%s%s term" % opExpr - if arity == 3: - if opExpr is None or len(opExpr) != 2: - raise ValueError("if numterms=3, opExpr must be a tuple or list of two expressions") - opExpr1, opExpr2 = opExpr - thisExpr = Forward().setName(termName) - if rightLeftAssoc == opAssoc.LEFT: - if arity == 1: - matchExpr = FollowedBy(lastExpr + opExpr) + Group( lastExpr + OneOrMore( opExpr ) ) - elif arity == 2: - if opExpr is not None: - matchExpr = FollowedBy(lastExpr + opExpr + lastExpr) + Group( lastExpr + OneOrMore( opExpr + lastExpr ) ) - else: - matchExpr = FollowedBy(lastExpr+lastExpr) + Group( lastExpr + OneOrMore(lastExpr) ) - elif arity == 3: - matchExpr = FollowedBy(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) + \ - Group( lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr ) - else: - raise ValueError("operator must be unary (1), binary (2), or ternary (3)") - elif rightLeftAssoc == opAssoc.RIGHT: - if arity == 1: - # try to avoid LR with this extra test - if not isinstance(opExpr, Optional): - opExpr = Optional(opExpr) - matchExpr = FollowedBy(opExpr.expr + thisExpr) + Group( opExpr + thisExpr ) - elif arity == 2: - if opExpr is not None: - matchExpr = FollowedBy(lastExpr + opExpr + thisExpr) + Group( lastExpr + OneOrMore( opExpr + thisExpr ) ) - else: - matchExpr = FollowedBy(lastExpr + thisExpr) + Group( lastExpr + OneOrMore( thisExpr ) ) - elif arity == 3: - matchExpr = FollowedBy(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + \ - Group( lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr ) - else: - raise ValueError("operator must be unary (1), binary (2), or ternary (3)") - else: - raise ValueError("operator must indicate right or left associativity") - if pa: - if isinstance(pa, (tuple, list)): - matchExpr.setParseAction(*pa) - else: - matchExpr.setParseAction(pa) - thisExpr <<= ( matchExpr.setName(termName) | lastExpr ) - lastExpr = thisExpr - ret <<= lastExpr - return ret - -operatorPrecedence = infixNotation -"""(Deprecated) Former name of C{L{infixNotation}}, will be dropped in a future release.""" - -dblQuotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"').setName("string enclosed in double quotes") -sglQuotedString = Combine(Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("string enclosed in single quotes") -quotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"'| - Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("quotedString using single or double quotes") -unicodeString = Combine(_L('u') + quotedString.copy()).setName("unicode string literal") - -def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.copy()): - """ - Helper method for defining nested lists enclosed in opening and closing - delimiters ("(" and ")" are the default). - - Parameters: - - opener - opening character for a nested list (default=C{"("}); can also be a pyparsing expression - - closer - closing character for a nested list (default=C{")"}); can also be a pyparsing expression - - content - expression for items within the nested lists (default=C{None}) - - ignoreExpr - expression for ignoring opening and closing delimiters (default=C{quotedString}) - - If an expression is not provided for the content argument, the nested - expression will capture all whitespace-delimited content between delimiters - as a list of separate values. - - Use the C{ignoreExpr} argument to define expressions that may contain - opening or closing characters that should not be treated as opening - or closing characters for nesting, such as quotedString or a comment - expression. Specify multiple expressions using an C{L{Or}} or C{L{MatchFirst}}. - The default is L{quotedString}, but if no expressions are to be ignored, - then pass C{None} for this argument. - - Example:: - data_type = oneOf("void int short long char float double") - decl_data_type = Combine(data_type + Optional(Word('*'))) - ident = Word(alphas+'_', alphanums+'_') - number = pyparsing_common.number - arg = Group(decl_data_type + ident) - LPAR,RPAR = map(Suppress, "()") - - code_body = nestedExpr('{', '}', ignoreExpr=(quotedString | cStyleComment)) - - c_function = (decl_data_type("type") - + ident("name") - + LPAR + Optional(delimitedList(arg), [])("args") + RPAR - + code_body("body")) - c_function.ignore(cStyleComment) - - source_code = ''' - int is_odd(int x) { - return (x%2); - } - - int dec_to_hex(char hchar) { - if (hchar >= '0' && hchar <= '9') { - return (ord(hchar)-ord('0')); - } else { - return (10+ord(hchar)-ord('A')); - } - } - ''' - for func in c_function.searchString(source_code): - print("%(name)s (%(type)s) args: %(args)s" % func) - - prints:: - is_odd (int) args: [['int', 'x']] - dec_to_hex (int) args: [['char', 'hchar']] - """ - if opener == closer: - raise ValueError("opening and closing strings cannot be the same") - if content is None: - if isinstance(opener,basestring) and isinstance(closer,basestring): - if len(opener) == 1 and len(closer)==1: - if ignoreExpr is not None: - content = (Combine(OneOrMore(~ignoreExpr + - CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS,exact=1)) - ).setParseAction(lambda t:t[0].strip())) - else: - content = (empty.copy()+CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS - ).setParseAction(lambda t:t[0].strip())) - else: - if ignoreExpr is not None: - content = (Combine(OneOrMore(~ignoreExpr + - ~Literal(opener) + ~Literal(closer) + - CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) - ).setParseAction(lambda t:t[0].strip())) - else: - content = (Combine(OneOrMore(~Literal(opener) + ~Literal(closer) + - CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) - ).setParseAction(lambda t:t[0].strip())) - else: - raise ValueError("opening and closing arguments must be strings if no content expression is given") - ret = Forward() - if ignoreExpr is not None: - ret <<= Group( Suppress(opener) + ZeroOrMore( ignoreExpr | ret | content ) + Suppress(closer) ) - else: - ret <<= Group( Suppress(opener) + ZeroOrMore( ret | content ) + Suppress(closer) ) - ret.setName('nested %s%s expression' % (opener,closer)) - return ret - -def indentedBlock(blockStatementExpr, indentStack, indent=True): - """ - Helper method for defining space-delimited indentation blocks, such as - those used to define block statements in Python source code. - - Parameters: - - blockStatementExpr - expression defining syntax of statement that - is repeated within the indented block - - indentStack - list created by caller to manage indentation stack - (multiple statementWithIndentedBlock expressions within a single grammar - should share a common indentStack) - - indent - boolean indicating whether block must be indented beyond the - the current level; set to False for block of left-most statements - (default=C{True}) - - A valid block must contain at least one C{blockStatement}. - - Example:: - data = ''' - def A(z): - A1 - B = 100 - G = A2 - A2 - A3 - B - def BB(a,b,c): - BB1 - def BBA(): - bba1 - bba2 - bba3 - C - D - def spam(x,y): - def eggs(z): - pass - ''' - - - indentStack = [1] - stmt = Forward() - - identifier = Word(alphas, alphanums) - funcDecl = ("def" + identifier + Group( "(" + Optional( delimitedList(identifier) ) + ")" ) + ":") - func_body = indentedBlock(stmt, indentStack) - funcDef = Group( funcDecl + func_body ) - - rvalue = Forward() - funcCall = Group(identifier + "(" + Optional(delimitedList(rvalue)) + ")") - rvalue << (funcCall | identifier | Word(nums)) - assignment = Group(identifier + "=" + rvalue) - stmt << ( funcDef | assignment | identifier ) - - module_body = OneOrMore(stmt) - - parseTree = module_body.parseString(data) - parseTree.pprint() - prints:: - [['def', - 'A', - ['(', 'z', ')'], - ':', - [['A1'], [['B', '=', '100']], [['G', '=', 'A2']], ['A2'], ['A3']]], - 'B', - ['def', - 'BB', - ['(', 'a', 'b', 'c', ')'], - ':', - [['BB1'], [['def', 'BBA', ['(', ')'], ':', [['bba1'], ['bba2'], ['bba3']]]]]], - 'C', - 'D', - ['def', - 'spam', - ['(', 'x', 'y', ')'], - ':', - [[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]] - """ - def checkPeerIndent(s,l,t): - if l >= len(s): return - curCol = col(l,s) - if curCol != indentStack[-1]: - if curCol > indentStack[-1]: - raise ParseFatalException(s,l,"illegal nesting") - raise ParseException(s,l,"not a peer entry") - - def checkSubIndent(s,l,t): - curCol = col(l,s) - if curCol > indentStack[-1]: - indentStack.append( curCol ) - else: - raise ParseException(s,l,"not a subentry") - - def checkUnindent(s,l,t): - if l >= len(s): return - curCol = col(l,s) - if not(indentStack and curCol < indentStack[-1] and curCol <= indentStack[-2]): - raise ParseException(s,l,"not an unindent") - indentStack.pop() - - NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress()) - INDENT = (Empty() + Empty().setParseAction(checkSubIndent)).setName('INDENT') - PEER = Empty().setParseAction(checkPeerIndent).setName('') - UNDENT = Empty().setParseAction(checkUnindent).setName('UNINDENT') - if indent: - smExpr = Group( Optional(NL) + - #~ FollowedBy(blockStatementExpr) + - INDENT + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) + UNDENT) - else: - smExpr = Group( Optional(NL) + - (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) ) - blockStatementExpr.ignore(_bslash + LineEnd()) - return smExpr.setName('indented block') - -alphas8bit = srange(r"[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]") -punc8bit = srange(r"[\0xa1-\0xbf\0xd7\0xf7]") - -anyOpenTag,anyCloseTag = makeHTMLTags(Word(alphas,alphanums+"_:").setName('any tag')) -_htmlEntityMap = dict(zip("gt lt amp nbsp quot apos".split(),'><& "\'')) -commonHTMLEntity = Regex('&(?P<entity>' + '|'.join(_htmlEntityMap.keys()) +");").setName("common HTML entity") -def replaceHTMLEntity(t): - """Helper parser action to replace common HTML entities with their special characters""" - return _htmlEntityMap.get(t.entity) - -# it's easy to get these comment structures wrong - they're very common, so may as well make them available -cStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/').setName("C style comment") -"Comment of the form C{/* ... */}" - -htmlComment = Regex(r"<!--[\s\S]*?-->").setName("HTML comment") -"Comment of the form C{<!-- ... -->}" - -restOfLine = Regex(r".*").leaveWhitespace().setName("rest of line") -dblSlashComment = Regex(r"//(?:\\\n|[^\n])*").setName("// comment") -"Comment of the form C{// ... (to end of line)}" - -cppStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/'| dblSlashComment).setName("C++ style comment") -"Comment of either form C{L{cStyleComment}} or C{L{dblSlashComment}}" - -javaStyleComment = cppStyleComment -"Same as C{L{cppStyleComment}}" - -pythonStyleComment = Regex(r"#.*").setName("Python style comment") -"Comment of the form C{# ... (to end of line)}" - -_commasepitem = Combine(OneOrMore(Word(printables, excludeChars=',') + - Optional( Word(" \t") + - ~Literal(",") + ~LineEnd() ) ) ).streamline().setName("commaItem") -commaSeparatedList = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("commaSeparatedList") -"""(Deprecated) Predefined expression of 1 or more printable words or quoted strings, separated by commas. - This expression is deprecated in favor of L{pyparsing_common.comma_separated_list}.""" - -# some other useful expressions - using lower-case class name since we are really using this as a namespace -class pyparsing_common: - """ - Here are some common low-level expressions that may be useful in jump-starting parser development: - - numeric forms (L{integers<integer>}, L{reals<real>}, L{scientific notation<sci_real>}) - - common L{programming identifiers<identifier>} - - network addresses (L{MAC<mac_address>}, L{IPv4<ipv4_address>}, L{IPv6<ipv6_address>}) - - ISO8601 L{dates<iso8601_date>} and L{datetime<iso8601_datetime>} - - L{UUID<uuid>} - - L{comma-separated list<comma_separated_list>} - Parse actions: - - C{L{convertToInteger}} - - C{L{convertToFloat}} - - C{L{convertToDate}} - - C{L{convertToDatetime}} - - C{L{stripHTMLTags}} - - C{L{upcaseTokens}} - - C{L{downcaseTokens}} - - Example:: - pyparsing_common.number.runTests(''' - # any int or real number, returned as the appropriate type - 100 - -100 - +100 - 3.14159 - 6.02e23 - 1e-12 - ''') - - pyparsing_common.fnumber.runTests(''' - # any int or real number, returned as float - 100 - -100 - +100 - 3.14159 - 6.02e23 - 1e-12 - ''') - - pyparsing_common.hex_integer.runTests(''' - # hex numbers - 100 - FF - ''') - - pyparsing_common.fraction.runTests(''' - # fractions - 1/2 - -3/4 - ''') - - pyparsing_common.mixed_integer.runTests(''' - # mixed fractions - 1 - 1/2 - -3/4 - 1-3/4 - ''') - - import uuid - pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) - pyparsing_common.uuid.runTests(''' - # uuid - 12345678-1234-5678-1234-567812345678 - ''') - prints:: - # any int or real number, returned as the appropriate type - 100 - [100] - - -100 - [-100] - - +100 - [100] - - 3.14159 - [3.14159] - - 6.02e23 - [6.02e+23] - - 1e-12 - [1e-12] - - # any int or real number, returned as float - 100 - [100.0] - - -100 - [-100.0] - - +100 - [100.0] - - 3.14159 - [3.14159] - - 6.02e23 - [6.02e+23] - - 1e-12 - [1e-12] - - # hex numbers - 100 - [256] - - FF - [255] - - # fractions - 1/2 - [0.5] - - -3/4 - [-0.75] - - # mixed fractions - 1 - [1] - - 1/2 - [0.5] - - -3/4 - [-0.75] - - 1-3/4 - [1.75] - - # uuid - 12345678-1234-5678-1234-567812345678 - [UUID('12345678-1234-5678-1234-567812345678')] - """ - - convertToInteger = tokenMap(int) - """ - Parse action for converting parsed integers to Python int - """ - - convertToFloat = tokenMap(float) - """ - Parse action for converting parsed numbers to Python float - """ - - integer = Word(nums).setName("integer").setParseAction(convertToInteger) - """expression that parses an unsigned integer, returns an int""" - - hex_integer = Word(hexnums).setName("hex integer").setParseAction(tokenMap(int,16)) - """expression that parses a hexadecimal integer, returns an int""" - - signed_integer = Regex(r'[+-]?\d+').setName("signed integer").setParseAction(convertToInteger) - """expression that parses an integer with optional leading sign, returns an int""" - - fraction = (signed_integer().setParseAction(convertToFloat) + '/' + signed_integer().setParseAction(convertToFloat)).setName("fraction") - """fractional expression of an integer divided by an integer, returns a float""" - fraction.addParseAction(lambda t: t[0]/t[-1]) - - mixed_integer = (fraction | signed_integer + Optional(Optional('-').suppress() + fraction)).setName("fraction or mixed integer-fraction") - """mixed integer of the form 'integer - fraction', with optional leading integer, returns float""" - mixed_integer.addParseAction(sum) - - real = Regex(r'[+-]?\d+\.\d*').setName("real number").setParseAction(convertToFloat) - """expression that parses a floating point number and returns a float""" - - sci_real = Regex(r'[+-]?\d+([eE][+-]?\d+|\.\d*([eE][+-]?\d+)?)').setName("real number with scientific notation").setParseAction(convertToFloat) - """expression that parses a floating point number with optional scientific notation and returns a float""" - - # streamlining this expression makes the docs nicer-looking - number = (sci_real | real | signed_integer).streamline() - """any numeric expression, returns the corresponding Python type""" - - fnumber = Regex(r'[+-]?\d+\.?\d*([eE][+-]?\d+)?').setName("fnumber").setParseAction(convertToFloat) - """any int or real number, returned as float""" - - identifier = Word(alphas+'_', alphanums+'_').setName("identifier") - """typical code identifier (leading alpha or '_', followed by 0 or more alphas, nums, or '_')""" - - ipv4_address = Regex(r'(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}').setName("IPv4 address") - "IPv4 address (C{0.0.0.0 - 255.255.255.255})" - - _ipv6_part = Regex(r'[0-9a-fA-F]{1,4}').setName("hex_integer") - _full_ipv6_address = (_ipv6_part + (':' + _ipv6_part)*7).setName("full IPv6 address") - _short_ipv6_address = (Optional(_ipv6_part + (':' + _ipv6_part)*(0,6)) + "::" + Optional(_ipv6_part + (':' + _ipv6_part)*(0,6))).setName("short IPv6 address") - _short_ipv6_address.addCondition(lambda t: sum(1 for tt in t if pyparsing_common._ipv6_part.matches(tt)) < 8) - _mixed_ipv6_address = ("::ffff:" + ipv4_address).setName("mixed IPv6 address") - ipv6_address = Combine((_full_ipv6_address | _mixed_ipv6_address | _short_ipv6_address).setName("IPv6 address")).setName("IPv6 address") - "IPv6 address (long, short, or mixed form)" - - mac_address = Regex(r'[0-9a-fA-F]{2}([:.-])[0-9a-fA-F]{2}(?:\1[0-9a-fA-F]{2}){4}').setName("MAC address") - "MAC address xx:xx:xx:xx:xx (may also have '-' or '.' delimiters)" - - @staticmethod - def convertToDate(fmt="%Y-%m-%d"): - """ - Helper to create a parse action for converting parsed date string to Python datetime.date - - Params - - - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%d"}) - - Example:: - date_expr = pyparsing_common.iso8601_date.copy() - date_expr.setParseAction(pyparsing_common.convertToDate()) - print(date_expr.parseString("1999-12-31")) - prints:: - [datetime.date(1999, 12, 31)] - """ - def cvt_fn(s,l,t): - try: - return datetime.strptime(t[0], fmt).date() - except ValueError as ve: - raise ParseException(s, l, str(ve)) - return cvt_fn - - @staticmethod - def convertToDatetime(fmt="%Y-%m-%dT%H:%M:%S.%f"): - """ - Helper to create a parse action for converting parsed datetime string to Python datetime.datetime - - Params - - - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%dT%H:%M:%S.%f"}) - - Example:: - dt_expr = pyparsing_common.iso8601_datetime.copy() - dt_expr.setParseAction(pyparsing_common.convertToDatetime()) - print(dt_expr.parseString("1999-12-31T23:59:59.999")) - prints:: - [datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)] - """ - def cvt_fn(s,l,t): - try: - return datetime.strptime(t[0], fmt) - except ValueError as ve: - raise ParseException(s, l, str(ve)) - return cvt_fn - - iso8601_date = Regex(r'(?P<year>\d{4})(?:-(?P<month>\d\d)(?:-(?P<day>\d\d))?)?').setName("ISO8601 date") - "ISO8601 date (C{yyyy-mm-dd})" - - iso8601_datetime = Regex(r'(?P<year>\d{4})-(?P<month>\d\d)-(?P<day>\d\d)[T ](?P<hour>\d\d):(?P<minute>\d\d)(:(?P<second>\d\d(\.\d*)?)?)?(?P<tz>Z|[+-]\d\d:?\d\d)?').setName("ISO8601 datetime") - "ISO8601 datetime (C{yyyy-mm-ddThh:mm:ss.s(Z|+-00:00)}) - trailing seconds, milliseconds, and timezone optional; accepts separating C{'T'} or C{' '}" - - uuid = Regex(r'[0-9a-fA-F]{8}(-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12}').setName("UUID") - "UUID (C{xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx})" - - _html_stripper = anyOpenTag.suppress() | anyCloseTag.suppress() - @staticmethod - def stripHTMLTags(s, l, tokens): - """ - Parse action to remove HTML tags from web page HTML source - - Example:: - # strip HTML links from normal text - text = '<td>More info at the <a href="http://pyparsing.wikispaces.com">pyparsing</a> wiki page</td>' - td,td_end = makeHTMLTags("TD") - table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end - - print(table_text.parseString(text).body) # -> 'More info at the pyparsing wiki page' - """ - return pyparsing_common._html_stripper.transformString(tokens[0]) - - _commasepitem = Combine(OneOrMore(~Literal(",") + ~LineEnd() + Word(printables, excludeChars=',') - + Optional( White(" \t") ) ) ).streamline().setName("commaItem") - comma_separated_list = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("comma separated list") - """Predefined expression of 1 or more printable words or quoted strings, separated by commas.""" - - upcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).upper())) - """Parse action to convert tokens to upper case.""" - - downcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).lower())) - """Parse action to convert tokens to lower case.""" - - -if __name__ == "__main__": - - selectToken = CaselessLiteral("select") - fromToken = CaselessLiteral("from") - - ident = Word(alphas, alphanums + "_$") - - columnName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) - columnNameList = Group(delimitedList(columnName)).setName("columns") - columnSpec = ('*' | columnNameList) - - tableName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) - tableNameList = Group(delimitedList(tableName)).setName("tables") - - simpleSQL = selectToken("command") + columnSpec("columns") + fromToken + tableNameList("tables") - - # demo runTests method, including embedded comments in test string - simpleSQL.runTests(""" - # '*' as column list and dotted table name - select * from SYS.XYZZY - - # caseless match on "SELECT", and casts back to "select" - SELECT * from XYZZY, ABC - - # list of column names, and mixed case SELECT keyword - Select AA,BB,CC from Sys.dual - - # multiple tables - Select A, B, C from Sys.dual, Table2 - - # invalid SELECT keyword - should fail - Xelect A, B, C from Sys.dual - - # incomplete command - should fail - Select - - # invalid column name - should fail - Select ^^^ frox Sys.dual - - """) - - pyparsing_common.number.runTests(""" - 100 - -100 - +100 - 3.14159 - 6.02e23 - 1e-12 - """) - - # any int or real number, returned as float - pyparsing_common.fnumber.runTests(""" - 100 - -100 - +100 - 3.14159 - 6.02e23 - 1e-12 - """) - - pyparsing_common.hex_integer.runTests(""" - 100 - FF - """) - - import uuid - pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) - pyparsing_common.uuid.runTests(""" - 12345678-1234-5678-1234-567812345678 - """) diff --git a/env/lib/python2.7/site-packages/setuptools/_vendor/pyparsing.pyc b/env/lib/python2.7/site-packages/setuptools/_vendor/pyparsing.pyc deleted file mode 100644 index b13e74d2..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/_vendor/pyparsing.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/_vendor/six.py b/env/lib/python2.7/site-packages/setuptools/_vendor/six.py deleted file mode 100644 index 190c0239..00000000 --- a/env/lib/python2.7/site-packages/setuptools/_vendor/six.py +++ /dev/null @@ -1,868 +0,0 @@ -"""Utilities for writing code that runs on Python 2 and 3""" - -# Copyright (c) 2010-2015 Benjamin Peterson -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - -from __future__ import absolute_import - -import functools -import itertools -import operator -import sys -import types - -__author__ = "Benjamin Peterson <benjamin@python.org>" -__version__ = "1.10.0" - - -# Useful for very coarse version differentiation. -PY2 = sys.version_info[0] == 2 -PY3 = sys.version_info[0] == 3 -PY34 = sys.version_info[0:2] >= (3, 4) - -if PY3: - string_types = str, - integer_types = int, - class_types = type, - text_type = str - binary_type = bytes - - MAXSIZE = sys.maxsize -else: - string_types = basestring, - integer_types = (int, long) - class_types = (type, types.ClassType) - text_type = unicode - binary_type = str - - if sys.platform.startswith("java"): - # Jython always uses 32 bits. - MAXSIZE = int((1 << 31) - 1) - else: - # It's possible to have sizeof(long) != sizeof(Py_ssize_t). - class X(object): - - def __len__(self): - return 1 << 31 - try: - len(X()) - except OverflowError: - # 32-bit - MAXSIZE = int((1 << 31) - 1) - else: - # 64-bit - MAXSIZE = int((1 << 63) - 1) - del X - - -def _add_doc(func, doc): - """Add documentation to a function.""" - func.__doc__ = doc - - -def _import_module(name): - """Import module, returning the module after the last dot.""" - __import__(name) - return sys.modules[name] - - -class _LazyDescr(object): - - def __init__(self, name): - self.name = name - - def __get__(self, obj, tp): - result = self._resolve() - setattr(obj, self.name, result) # Invokes __set__. - try: - # This is a bit ugly, but it avoids running this again by - # removing this descriptor. - delattr(obj.__class__, self.name) - except AttributeError: - pass - return result - - -class MovedModule(_LazyDescr): - - def __init__(self, name, old, new=None): - super(MovedModule, self).__init__(name) - if PY3: - if new is None: - new = name - self.mod = new - else: - self.mod = old - - def _resolve(self): - return _import_module(self.mod) - - def __getattr__(self, attr): - _module = self._resolve() - value = getattr(_module, attr) - setattr(self, attr, value) - return value - - -class _LazyModule(types.ModuleType): - - def __init__(self, name): - super(_LazyModule, self).__init__(name) - self.__doc__ = self.__class__.__doc__ - - def __dir__(self): - attrs = ["__doc__", "__name__"] - attrs += [attr.name for attr in self._moved_attributes] - return attrs - - # Subclasses should override this - _moved_attributes = [] - - -class MovedAttribute(_LazyDescr): - - def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): - super(MovedAttribute, self).__init__(name) - if PY3: - if new_mod is None: - new_mod = name - self.mod = new_mod - if new_attr is None: - if old_attr is None: - new_attr = name - else: - new_attr = old_attr - self.attr = new_attr - else: - self.mod = old_mod - if old_attr is None: - old_attr = name - self.attr = old_attr - - def _resolve(self): - module = _import_module(self.mod) - return getattr(module, self.attr) - - -class _SixMetaPathImporter(object): - - """ - A meta path importer to import six.moves and its submodules. - - This class implements a PEP302 finder and loader. It should be compatible - with Python 2.5 and all existing versions of Python3 - """ - - def __init__(self, six_module_name): - self.name = six_module_name - self.known_modules = {} - - def _add_module(self, mod, *fullnames): - for fullname in fullnames: - self.known_modules[self.name + "." + fullname] = mod - - def _get_module(self, fullname): - return self.known_modules[self.name + "." + fullname] - - def find_module(self, fullname, path=None): - if fullname in self.known_modules: - return self - return None - - def __get_module(self, fullname): - try: - return self.known_modules[fullname] - except KeyError: - raise ImportError("This loader does not know module " + fullname) - - def load_module(self, fullname): - try: - # in case of a reload - return sys.modules[fullname] - except KeyError: - pass - mod = self.__get_module(fullname) - if isinstance(mod, MovedModule): - mod = mod._resolve() - else: - mod.__loader__ = self - sys.modules[fullname] = mod - return mod - - def is_package(self, fullname): - """ - Return true, if the named module is a package. - - We need this method to get correct spec objects with - Python 3.4 (see PEP451) - """ - return hasattr(self.__get_module(fullname), "__path__") - - def get_code(self, fullname): - """Return None - - Required, if is_package is implemented""" - self.__get_module(fullname) # eventually raises ImportError - return None - get_source = get_code # same as get_code - -_importer = _SixMetaPathImporter(__name__) - - -class _MovedItems(_LazyModule): - - """Lazy loading of moved objects""" - __path__ = [] # mark as package - - -_moved_attributes = [ - MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), - MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), - MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), - MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), - MovedAttribute("intern", "__builtin__", "sys"), - MovedAttribute("map", "itertools", "builtins", "imap", "map"), - MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), - MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), - MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), - MovedAttribute("reduce", "__builtin__", "functools"), - MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), - MovedAttribute("StringIO", "StringIO", "io"), - MovedAttribute("UserDict", "UserDict", "collections"), - MovedAttribute("UserList", "UserList", "collections"), - MovedAttribute("UserString", "UserString", "collections"), - MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), - MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), - MovedModule("builtins", "__builtin__"), - MovedModule("configparser", "ConfigParser"), - MovedModule("copyreg", "copy_reg"), - MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), - MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), - MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), - MovedModule("http_cookies", "Cookie", "http.cookies"), - MovedModule("html_entities", "htmlentitydefs", "html.entities"), - MovedModule("html_parser", "HTMLParser", "html.parser"), - MovedModule("http_client", "httplib", "http.client"), - MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), - MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), - MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), - MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), - MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), - MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), - MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), - MovedModule("cPickle", "cPickle", "pickle"), - MovedModule("queue", "Queue"), - MovedModule("reprlib", "repr"), - MovedModule("socketserver", "SocketServer"), - MovedModule("_thread", "thread", "_thread"), - MovedModule("tkinter", "Tkinter"), - MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), - MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), - MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), - MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), - MovedModule("tkinter_tix", "Tix", "tkinter.tix"), - MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), - MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), - MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), - MovedModule("tkinter_colorchooser", "tkColorChooser", - "tkinter.colorchooser"), - MovedModule("tkinter_commondialog", "tkCommonDialog", - "tkinter.commondialog"), - MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), - MovedModule("tkinter_font", "tkFont", "tkinter.font"), - MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), - MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", - "tkinter.simpledialog"), - MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), - MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), - MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), - MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), - MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), - MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), -] -# Add windows specific modules. -if sys.platform == "win32": - _moved_attributes += [ - MovedModule("winreg", "_winreg"), - ] - -for attr in _moved_attributes: - setattr(_MovedItems, attr.name, attr) - if isinstance(attr, MovedModule): - _importer._add_module(attr, "moves." + attr.name) -del attr - -_MovedItems._moved_attributes = _moved_attributes - -moves = _MovedItems(__name__ + ".moves") -_importer._add_module(moves, "moves") - - -class Module_six_moves_urllib_parse(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_parse""" - - -_urllib_parse_moved_attributes = [ - MovedAttribute("ParseResult", "urlparse", "urllib.parse"), - MovedAttribute("SplitResult", "urlparse", "urllib.parse"), - MovedAttribute("parse_qs", "urlparse", "urllib.parse"), - MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), - MovedAttribute("urldefrag", "urlparse", "urllib.parse"), - MovedAttribute("urljoin", "urlparse", "urllib.parse"), - MovedAttribute("urlparse", "urlparse", "urllib.parse"), - MovedAttribute("urlsplit", "urlparse", "urllib.parse"), - MovedAttribute("urlunparse", "urlparse", "urllib.parse"), - MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), - MovedAttribute("quote", "urllib", "urllib.parse"), - MovedAttribute("quote_plus", "urllib", "urllib.parse"), - MovedAttribute("unquote", "urllib", "urllib.parse"), - MovedAttribute("unquote_plus", "urllib", "urllib.parse"), - MovedAttribute("urlencode", "urllib", "urllib.parse"), - MovedAttribute("splitquery", "urllib", "urllib.parse"), - MovedAttribute("splittag", "urllib", "urllib.parse"), - MovedAttribute("splituser", "urllib", "urllib.parse"), - MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), - MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), - MovedAttribute("uses_params", "urlparse", "urllib.parse"), - MovedAttribute("uses_query", "urlparse", "urllib.parse"), - MovedAttribute("uses_relative", "urlparse", "urllib.parse"), -] -for attr in _urllib_parse_moved_attributes: - setattr(Module_six_moves_urllib_parse, attr.name, attr) -del attr - -Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes - -_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), - "moves.urllib_parse", "moves.urllib.parse") - - -class Module_six_moves_urllib_error(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_error""" - - -_urllib_error_moved_attributes = [ - MovedAttribute("URLError", "urllib2", "urllib.error"), - MovedAttribute("HTTPError", "urllib2", "urllib.error"), - MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), -] -for attr in _urllib_error_moved_attributes: - setattr(Module_six_moves_urllib_error, attr.name, attr) -del attr - -Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes - -_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), - "moves.urllib_error", "moves.urllib.error") - - -class Module_six_moves_urllib_request(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_request""" - - -_urllib_request_moved_attributes = [ - MovedAttribute("urlopen", "urllib2", "urllib.request"), - MovedAttribute("install_opener", "urllib2", "urllib.request"), - MovedAttribute("build_opener", "urllib2", "urllib.request"), - MovedAttribute("pathname2url", "urllib", "urllib.request"), - MovedAttribute("url2pathname", "urllib", "urllib.request"), - MovedAttribute("getproxies", "urllib", "urllib.request"), - MovedAttribute("Request", "urllib2", "urllib.request"), - MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), - MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), - MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), - MovedAttribute("BaseHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), - MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), - MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), - MovedAttribute("FileHandler", "urllib2", "urllib.request"), - MovedAttribute("FTPHandler", "urllib2", "urllib.request"), - MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), - MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), - MovedAttribute("urlretrieve", "urllib", "urllib.request"), - MovedAttribute("urlcleanup", "urllib", "urllib.request"), - MovedAttribute("URLopener", "urllib", "urllib.request"), - MovedAttribute("FancyURLopener", "urllib", "urllib.request"), - MovedAttribute("proxy_bypass", "urllib", "urllib.request"), -] -for attr in _urllib_request_moved_attributes: - setattr(Module_six_moves_urllib_request, attr.name, attr) -del attr - -Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes - -_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), - "moves.urllib_request", "moves.urllib.request") - - -class Module_six_moves_urllib_response(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_response""" - - -_urllib_response_moved_attributes = [ - MovedAttribute("addbase", "urllib", "urllib.response"), - MovedAttribute("addclosehook", "urllib", "urllib.response"), - MovedAttribute("addinfo", "urllib", "urllib.response"), - MovedAttribute("addinfourl", "urllib", "urllib.response"), -] -for attr in _urllib_response_moved_attributes: - setattr(Module_six_moves_urllib_response, attr.name, attr) -del attr - -Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes - -_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), - "moves.urllib_response", "moves.urllib.response") - - -class Module_six_moves_urllib_robotparser(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_robotparser""" - - -_urllib_robotparser_moved_attributes = [ - MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), -] -for attr in _urllib_robotparser_moved_attributes: - setattr(Module_six_moves_urllib_robotparser, attr.name, attr) -del attr - -Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes - -_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), - "moves.urllib_robotparser", "moves.urllib.robotparser") - - -class Module_six_moves_urllib(types.ModuleType): - - """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" - __path__ = [] # mark as package - parse = _importer._get_module("moves.urllib_parse") - error = _importer._get_module("moves.urllib_error") - request = _importer._get_module("moves.urllib_request") - response = _importer._get_module("moves.urllib_response") - robotparser = _importer._get_module("moves.urllib_robotparser") - - def __dir__(self): - return ['parse', 'error', 'request', 'response', 'robotparser'] - -_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), - "moves.urllib") - - -def add_move(move): - """Add an item to six.moves.""" - setattr(_MovedItems, move.name, move) - - -def remove_move(name): - """Remove item from six.moves.""" - try: - delattr(_MovedItems, name) - except AttributeError: - try: - del moves.__dict__[name] - except KeyError: - raise AttributeError("no such move, %r" % (name,)) - - -if PY3: - _meth_func = "__func__" - _meth_self = "__self__" - - _func_closure = "__closure__" - _func_code = "__code__" - _func_defaults = "__defaults__" - _func_globals = "__globals__" -else: - _meth_func = "im_func" - _meth_self = "im_self" - - _func_closure = "func_closure" - _func_code = "func_code" - _func_defaults = "func_defaults" - _func_globals = "func_globals" - - -try: - advance_iterator = next -except NameError: - def advance_iterator(it): - return it.next() -next = advance_iterator - - -try: - callable = callable -except NameError: - def callable(obj): - return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) - - -if PY3: - def get_unbound_function(unbound): - return unbound - - create_bound_method = types.MethodType - - def create_unbound_method(func, cls): - return func - - Iterator = object -else: - def get_unbound_function(unbound): - return unbound.im_func - - def create_bound_method(func, obj): - return types.MethodType(func, obj, obj.__class__) - - def create_unbound_method(func, cls): - return types.MethodType(func, None, cls) - - class Iterator(object): - - def next(self): - return type(self).__next__(self) - - callable = callable -_add_doc(get_unbound_function, - """Get the function out of a possibly unbound function""") - - -get_method_function = operator.attrgetter(_meth_func) -get_method_self = operator.attrgetter(_meth_self) -get_function_closure = operator.attrgetter(_func_closure) -get_function_code = operator.attrgetter(_func_code) -get_function_defaults = operator.attrgetter(_func_defaults) -get_function_globals = operator.attrgetter(_func_globals) - - -if PY3: - def iterkeys(d, **kw): - return iter(d.keys(**kw)) - - def itervalues(d, **kw): - return iter(d.values(**kw)) - - def iteritems(d, **kw): - return iter(d.items(**kw)) - - def iterlists(d, **kw): - return iter(d.lists(**kw)) - - viewkeys = operator.methodcaller("keys") - - viewvalues = operator.methodcaller("values") - - viewitems = operator.methodcaller("items") -else: - def iterkeys(d, **kw): - return d.iterkeys(**kw) - - def itervalues(d, **kw): - return d.itervalues(**kw) - - def iteritems(d, **kw): - return d.iteritems(**kw) - - def iterlists(d, **kw): - return d.iterlists(**kw) - - viewkeys = operator.methodcaller("viewkeys") - - viewvalues = operator.methodcaller("viewvalues") - - viewitems = operator.methodcaller("viewitems") - -_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") -_add_doc(itervalues, "Return an iterator over the values of a dictionary.") -_add_doc(iteritems, - "Return an iterator over the (key, value) pairs of a dictionary.") -_add_doc(iterlists, - "Return an iterator over the (key, [values]) pairs of a dictionary.") - - -if PY3: - def b(s): - return s.encode("latin-1") - - def u(s): - return s - unichr = chr - import struct - int2byte = struct.Struct(">B").pack - del struct - byte2int = operator.itemgetter(0) - indexbytes = operator.getitem - iterbytes = iter - import io - StringIO = io.StringIO - BytesIO = io.BytesIO - _assertCountEqual = "assertCountEqual" - if sys.version_info[1] <= 1: - _assertRaisesRegex = "assertRaisesRegexp" - _assertRegex = "assertRegexpMatches" - else: - _assertRaisesRegex = "assertRaisesRegex" - _assertRegex = "assertRegex" -else: - def b(s): - return s - # Workaround for standalone backslash - - def u(s): - return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") - unichr = unichr - int2byte = chr - - def byte2int(bs): - return ord(bs[0]) - - def indexbytes(buf, i): - return ord(buf[i]) - iterbytes = functools.partial(itertools.imap, ord) - import StringIO - StringIO = BytesIO = StringIO.StringIO - _assertCountEqual = "assertItemsEqual" - _assertRaisesRegex = "assertRaisesRegexp" - _assertRegex = "assertRegexpMatches" -_add_doc(b, """Byte literal""") -_add_doc(u, """Text literal""") - - -def assertCountEqual(self, *args, **kwargs): - return getattr(self, _assertCountEqual)(*args, **kwargs) - - -def assertRaisesRegex(self, *args, **kwargs): - return getattr(self, _assertRaisesRegex)(*args, **kwargs) - - -def assertRegex(self, *args, **kwargs): - return getattr(self, _assertRegex)(*args, **kwargs) - - -if PY3: - exec_ = getattr(moves.builtins, "exec") - - def reraise(tp, value, tb=None): - if value is None: - value = tp() - if value.__traceback__ is not tb: - raise value.with_traceback(tb) - raise value - -else: - def exec_(_code_, _globs_=None, _locs_=None): - """Execute code in a namespace.""" - if _globs_ is None: - frame = sys._getframe(1) - _globs_ = frame.f_globals - if _locs_ is None: - _locs_ = frame.f_locals - del frame - elif _locs_ is None: - _locs_ = _globs_ - exec("""exec _code_ in _globs_, _locs_""") - - exec_("""def reraise(tp, value, tb=None): - raise tp, value, tb -""") - - -if sys.version_info[:2] == (3, 2): - exec_("""def raise_from(value, from_value): - if from_value is None: - raise value - raise value from from_value -""") -elif sys.version_info[:2] > (3, 2): - exec_("""def raise_from(value, from_value): - raise value from from_value -""") -else: - def raise_from(value, from_value): - raise value - - -print_ = getattr(moves.builtins, "print", None) -if print_ is None: - def print_(*args, **kwargs): - """The new-style print function for Python 2.4 and 2.5.""" - fp = kwargs.pop("file", sys.stdout) - if fp is None: - return - - def write(data): - if not isinstance(data, basestring): - data = str(data) - # If the file has an encoding, encode unicode with it. - if (isinstance(fp, file) and - isinstance(data, unicode) and - fp.encoding is not None): - errors = getattr(fp, "errors", None) - if errors is None: - errors = "strict" - data = data.encode(fp.encoding, errors) - fp.write(data) - want_unicode = False - sep = kwargs.pop("sep", None) - if sep is not None: - if isinstance(sep, unicode): - want_unicode = True - elif not isinstance(sep, str): - raise TypeError("sep must be None or a string") - end = kwargs.pop("end", None) - if end is not None: - if isinstance(end, unicode): - want_unicode = True - elif not isinstance(end, str): - raise TypeError("end must be None or a string") - if kwargs: - raise TypeError("invalid keyword arguments to print()") - if not want_unicode: - for arg in args: - if isinstance(arg, unicode): - want_unicode = True - break - if want_unicode: - newline = unicode("\n") - space = unicode(" ") - else: - newline = "\n" - space = " " - if sep is None: - sep = space - if end is None: - end = newline - for i, arg in enumerate(args): - if i: - write(sep) - write(arg) - write(end) -if sys.version_info[:2] < (3, 3): - _print = print_ - - def print_(*args, **kwargs): - fp = kwargs.get("file", sys.stdout) - flush = kwargs.pop("flush", False) - _print(*args, **kwargs) - if flush and fp is not None: - fp.flush() - -_add_doc(reraise, """Reraise an exception.""") - -if sys.version_info[0:2] < (3, 4): - def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, - updated=functools.WRAPPER_UPDATES): - def wrapper(f): - f = functools.wraps(wrapped, assigned, updated)(f) - f.__wrapped__ = wrapped - return f - return wrapper -else: - wraps = functools.wraps - - -def with_metaclass(meta, *bases): - """Create a base class with a metaclass.""" - # This requires a bit of explanation: the basic idea is to make a dummy - # metaclass for one level of class instantiation that replaces itself with - # the actual metaclass. - class metaclass(meta): - - def __new__(cls, name, this_bases, d): - return meta(name, bases, d) - return type.__new__(metaclass, 'temporary_class', (), {}) - - -def add_metaclass(metaclass): - """Class decorator for creating a class with a metaclass.""" - def wrapper(cls): - orig_vars = cls.__dict__.copy() - slots = orig_vars.get('__slots__') - if slots is not None: - if isinstance(slots, str): - slots = [slots] - for slots_var in slots: - orig_vars.pop(slots_var) - orig_vars.pop('__dict__', None) - orig_vars.pop('__weakref__', None) - return metaclass(cls.__name__, cls.__bases__, orig_vars) - return wrapper - - -def python_2_unicode_compatible(klass): - """ - A decorator that defines __unicode__ and __str__ methods under Python 2. - Under Python 3 it does nothing. - - To support Python 2 and 3 with a single code base, define a __str__ method - returning text and apply this decorator to the class. - """ - if PY2: - if '__str__' not in klass.__dict__: - raise ValueError("@python_2_unicode_compatible cannot be applied " - "to %s because it doesn't define __str__()." % - klass.__name__) - klass.__unicode__ = klass.__str__ - klass.__str__ = lambda self: self.__unicode__().encode('utf-8') - return klass - - -# Complete the moves implementation. -# This code is at the end of this module to speed up module loading. -# Turn this module into a package. -__path__ = [] # required for PEP 302 and PEP 451 -__package__ = __name__ # see PEP 366 @ReservedAssignment -if globals().get("__spec__") is not None: - __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable -# Remove other six meta path importers, since they cause problems. This can -# happen if six is removed from sys.modules and then reloaded. (Setuptools does -# this for some reason.) -if sys.meta_path: - for i, importer in enumerate(sys.meta_path): - # Here's some real nastiness: Another "instance" of the six module might - # be floating around. Therefore, we can't use isinstance() to check for - # the six meta path importer, since the other six instance will have - # inserted an importer with different class. - if (type(importer).__name__ == "_SixMetaPathImporter" and - importer.name == __name__): - del sys.meta_path[i] - break - del i, importer -# Finally, add the importer to the meta path import hook. -sys.meta_path.append(_importer) diff --git a/env/lib/python2.7/site-packages/setuptools/_vendor/six.pyc b/env/lib/python2.7/site-packages/setuptools/_vendor/six.pyc deleted file mode 100644 index c332ce66..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/_vendor/six.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/archive_util.py b/env/lib/python2.7/site-packages/setuptools/archive_util.py deleted file mode 100644 index 81436044..00000000 --- a/env/lib/python2.7/site-packages/setuptools/archive_util.py +++ /dev/null @@ -1,173 +0,0 @@ -"""Utilities for extracting common archive formats""" - -import zipfile -import tarfile -import os -import shutil -import posixpath -import contextlib -from distutils.errors import DistutilsError - -from pkg_resources import ensure_directory - -__all__ = [ - "unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter", - "UnrecognizedFormat", "extraction_drivers", "unpack_directory", -] - - -class UnrecognizedFormat(DistutilsError): - """Couldn't recognize the archive type""" - - -def default_filter(src, dst): - """The default progress/filter callback; returns True for all files""" - return dst - - -def unpack_archive(filename, extract_dir, progress_filter=default_filter, - drivers=None): - """Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat`` - - `progress_filter` is a function taking two arguments: a source path - internal to the archive ('/'-separated), and a filesystem path where it - will be extracted. The callback must return the desired extract path - (which may be the same as the one passed in), or else ``None`` to skip - that file or directory. The callback can thus be used to report on the - progress of the extraction, as well as to filter the items extracted or - alter their extraction paths. - - `drivers`, if supplied, must be a non-empty sequence of functions with the - same signature as this function (minus the `drivers` argument), that raise - ``UnrecognizedFormat`` if they do not support extracting the designated - archive type. The `drivers` are tried in sequence until one is found that - does not raise an error, or until all are exhausted (in which case - ``UnrecognizedFormat`` is raised). If you do not supply a sequence of - drivers, the module's ``extraction_drivers`` constant will be used, which - means that ``unpack_zipfile`` and ``unpack_tarfile`` will be tried, in that - order. - """ - for driver in drivers or extraction_drivers: - try: - driver(filename, extract_dir, progress_filter) - except UnrecognizedFormat: - continue - else: - return - else: - raise UnrecognizedFormat( - "Not a recognized archive type: %s" % filename - ) - - -def unpack_directory(filename, extract_dir, progress_filter=default_filter): - """"Unpack" a directory, using the same interface as for archives - - Raises ``UnrecognizedFormat`` if `filename` is not a directory - """ - if not os.path.isdir(filename): - raise UnrecognizedFormat("%s is not a directory" % filename) - - paths = { - filename: ('', extract_dir), - } - for base, dirs, files in os.walk(filename): - src, dst = paths[base] - for d in dirs: - paths[os.path.join(base, d)] = src + d + '/', os.path.join(dst, d) - for f in files: - target = os.path.join(dst, f) - target = progress_filter(src + f, target) - if not target: - # skip non-files - continue - ensure_directory(target) - f = os.path.join(base, f) - shutil.copyfile(f, target) - shutil.copystat(f, target) - - -def unpack_zipfile(filename, extract_dir, progress_filter=default_filter): - """Unpack zip `filename` to `extract_dir` - - Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determined - by ``zipfile.is_zipfile()``). See ``unpack_archive()`` for an explanation - of the `progress_filter` argument. - """ - - if not zipfile.is_zipfile(filename): - raise UnrecognizedFormat("%s is not a zip file" % (filename,)) - - with zipfile.ZipFile(filename) as z: - for info in z.infolist(): - name = info.filename - - # don't extract absolute paths or ones with .. in them - if name.startswith('/') or '..' in name.split('/'): - continue - - target = os.path.join(extract_dir, *name.split('/')) - target = progress_filter(name, target) - if not target: - continue - if name.endswith('/'): - # directory - ensure_directory(target) - else: - # file - ensure_directory(target) - data = z.read(info.filename) - with open(target, 'wb') as f: - f.write(data) - unix_attributes = info.external_attr >> 16 - if unix_attributes: - os.chmod(target, unix_attributes) - - -def unpack_tarfile(filename, extract_dir, progress_filter=default_filter): - """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir` - - Raises ``UnrecognizedFormat`` if `filename` is not a tarfile (as determined - by ``tarfile.open()``). See ``unpack_archive()`` for an explanation - of the `progress_filter` argument. - """ - try: - tarobj = tarfile.open(filename) - except tarfile.TarError: - raise UnrecognizedFormat( - "%s is not a compressed or uncompressed tar file" % (filename,) - ) - with contextlib.closing(tarobj): - # don't do any chowning! - tarobj.chown = lambda *args: None - for member in tarobj: - name = member.name - # don't extract absolute paths or ones with .. in them - if not name.startswith('/') and '..' not in name.split('/'): - prelim_dst = os.path.join(extract_dir, *name.split('/')) - - # resolve any links and to extract the link targets as normal - # files - while member is not None and (member.islnk() or member.issym()): - linkpath = member.linkname - if member.issym(): - base = posixpath.dirname(member.name) - linkpath = posixpath.join(base, linkpath) - linkpath = posixpath.normpath(linkpath) - member = tarobj._getmember(linkpath) - - if member is not None and (member.isfile() or member.isdir()): - final_dst = progress_filter(name, prelim_dst) - if final_dst: - if final_dst.endswith(os.sep): - final_dst = final_dst[:-1] - try: - # XXX Ugh - tarobj._extract_member(member, final_dst) - except tarfile.ExtractError: - # chown/chmod/mkfifo/mknode/makedev failed - pass - return True - - -extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile diff --git a/env/lib/python2.7/site-packages/setuptools/archive_util.pyc b/env/lib/python2.7/site-packages/setuptools/archive_util.pyc deleted file mode 100644 index e86695f2..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/archive_util.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/build_meta.pyc b/env/lib/python2.7/site-packages/setuptools/build_meta.pyc deleted file mode 100644 index 37a28202..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/build_meta.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/command/__init__.py b/env/lib/python2.7/site-packages/setuptools/command/__init__.py deleted file mode 100644 index fe619e2e..00000000 --- a/env/lib/python2.7/site-packages/setuptools/command/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -__all__ = [ - 'alias', 'bdist_egg', 'bdist_rpm', 'build_ext', 'build_py', 'develop', - 'easy_install', 'egg_info', 'install', 'install_lib', 'rotate', 'saveopts', - 'sdist', 'setopt', 'test', 'install_egg_info', 'install_scripts', - 'register', 'bdist_wininst', 'upload_docs', 'upload', 'build_clib', - 'dist_info', -] - -from distutils.command.bdist import bdist -import sys - -from setuptools.command import install_scripts - -if 'egg' not in bdist.format_commands: - bdist.format_command['egg'] = ('bdist_egg', "Python .egg file") - bdist.format_commands.append('egg') - -del bdist, sys diff --git a/env/lib/python2.7/site-packages/setuptools/command/__init__.pyc b/env/lib/python2.7/site-packages/setuptools/command/__init__.pyc deleted file mode 100644 index 3bc6936d..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/command/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/command/alias.pyc b/env/lib/python2.7/site-packages/setuptools/command/alias.pyc deleted file mode 100644 index 6039f5f2..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/command/alias.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/command/bdist_egg.pyc b/env/lib/python2.7/site-packages/setuptools/command/bdist_egg.pyc deleted file mode 100644 index 88a41aed..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/command/bdist_egg.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/command/bdist_rpm.py b/env/lib/python2.7/site-packages/setuptools/command/bdist_rpm.py deleted file mode 100644 index 70730927..00000000 --- a/env/lib/python2.7/site-packages/setuptools/command/bdist_rpm.py +++ /dev/null @@ -1,43 +0,0 @@ -import distutils.command.bdist_rpm as orig - - -class bdist_rpm(orig.bdist_rpm): - """ - Override the default bdist_rpm behavior to do the following: - - 1. Run egg_info to ensure the name and version are properly calculated. - 2. Always run 'install' using --single-version-externally-managed to - disable eggs in RPM distributions. - 3. Replace dash with underscore in the version numbers for better RPM - compatibility. - """ - - def run(self): - # ensure distro name is up-to-date - self.run_command('egg_info') - - orig.bdist_rpm.run(self) - - def _make_spec_file(self): - version = self.distribution.get_version() - rpmversion = version.replace('-', '_') - spec = orig.bdist_rpm._make_spec_file(self) - line23 = '%define version ' + version - line24 = '%define version ' + rpmversion - spec = [ - line.replace( - "Source0: %{name}-%{version}.tar", - "Source0: %{name}-%{unmangled_version}.tar" - ).replace( - "setup.py install ", - "setup.py install --single-version-externally-managed " - ).replace( - "%setup", - "%setup -n %{name}-%{unmangled_version}" - ).replace(line23, line24) - for line in spec - ] - insert_loc = spec.index(line24) + 1 - unmangled_version = "%define unmangled_version " + version - spec.insert(insert_loc, unmangled_version) - return spec diff --git a/env/lib/python2.7/site-packages/setuptools/command/bdist_rpm.pyc b/env/lib/python2.7/site-packages/setuptools/command/bdist_rpm.pyc deleted file mode 100644 index 70924683..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/command/bdist_rpm.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/command/bdist_wininst.py b/env/lib/python2.7/site-packages/setuptools/command/bdist_wininst.py deleted file mode 100644 index 073de97b..00000000 --- a/env/lib/python2.7/site-packages/setuptools/command/bdist_wininst.py +++ /dev/null @@ -1,21 +0,0 @@ -import distutils.command.bdist_wininst as orig - - -class bdist_wininst(orig.bdist_wininst): - def reinitialize_command(self, command, reinit_subcommands=0): - """ - Supplement reinitialize_command to work around - http://bugs.python.org/issue20819 - """ - cmd = self.distribution.reinitialize_command( - command, reinit_subcommands) - if command in ('install', 'install_lib'): - cmd.install_lib = None - return cmd - - def run(self): - self._is_running = True - try: - orig.bdist_wininst.run(self) - finally: - self._is_running = False diff --git a/env/lib/python2.7/site-packages/setuptools/command/bdist_wininst.pyc b/env/lib/python2.7/site-packages/setuptools/command/bdist_wininst.pyc deleted file mode 100644 index 869f492a..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/command/bdist_wininst.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/command/build_clib.py b/env/lib/python2.7/site-packages/setuptools/command/build_clib.py deleted file mode 100644 index 09caff6f..00000000 --- a/env/lib/python2.7/site-packages/setuptools/command/build_clib.py +++ /dev/null @@ -1,98 +0,0 @@ -import distutils.command.build_clib as orig -from distutils.errors import DistutilsSetupError -from distutils import log -from setuptools.dep_util import newer_pairwise_group - - -class build_clib(orig.build_clib): - """ - Override the default build_clib behaviour to do the following: - - 1. Implement a rudimentary timestamp-based dependency system - so 'compile()' doesn't run every time. - 2. Add more keys to the 'build_info' dictionary: - * obj_deps - specify dependencies for each object compiled. - this should be a dictionary mapping a key - with the source filename to a list of - dependencies. Use an empty string for global - dependencies. - * cflags - specify a list of additional flags to pass to - the compiler. - """ - - def build_libraries(self, libraries): - for (lib_name, build_info) in libraries: - sources = build_info.get('sources') - if sources is None or not isinstance(sources, (list, tuple)): - raise DistutilsSetupError( - "in 'libraries' option (library '%s'), " - "'sources' must be present and must be " - "a list of source filenames" % lib_name) - sources = list(sources) - - log.info("building '%s' library", lib_name) - - # Make sure everything is the correct type. - # obj_deps should be a dictionary of keys as sources - # and a list/tuple of files that are its dependencies. - obj_deps = build_info.get('obj_deps', dict()) - if not isinstance(obj_deps, dict): - raise DistutilsSetupError( - "in 'libraries' option (library '%s'), " - "'obj_deps' must be a dictionary of " - "type 'source: list'" % lib_name) - dependencies = [] - - # Get the global dependencies that are specified by the '' key. - # These will go into every source's dependency list. - global_deps = obj_deps.get('', list()) - if not isinstance(global_deps, (list, tuple)): - raise DistutilsSetupError( - "in 'libraries' option (library '%s'), " - "'obj_deps' must be a dictionary of " - "type 'source: list'" % lib_name) - - # Build the list to be used by newer_pairwise_group - # each source will be auto-added to its dependencies. - for source in sources: - src_deps = [source] - src_deps.extend(global_deps) - extra_deps = obj_deps.get(source, list()) - if not isinstance(extra_deps, (list, tuple)): - raise DistutilsSetupError( - "in 'libraries' option (library '%s'), " - "'obj_deps' must be a dictionary of " - "type 'source: list'" % lib_name) - src_deps.extend(extra_deps) - dependencies.append(src_deps) - - expected_objects = self.compiler.object_filenames( - sources, - output_dir=self.build_temp - ) - - if newer_pairwise_group(dependencies, expected_objects) != ([], []): - # First, compile the source code to object files in the library - # directory. (This should probably change to putting object - # files in a temporary build directory.) - macros = build_info.get('macros') - include_dirs = build_info.get('include_dirs') - cflags = build_info.get('cflags') - objects = self.compiler.compile( - sources, - output_dir=self.build_temp, - macros=macros, - include_dirs=include_dirs, - extra_postargs=cflags, - debug=self.debug - ) - - # Now "link" the object files together into a static library. - # (On Unix at least, this isn't really linking -- it just - # builds an archive. Whatever.) - self.compiler.create_static_lib( - expected_objects, - lib_name, - output_dir=self.build_clib, - debug=self.debug - ) diff --git a/env/lib/python2.7/site-packages/setuptools/command/build_clib.pyc b/env/lib/python2.7/site-packages/setuptools/command/build_clib.pyc deleted file mode 100644 index 562a6048..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/command/build_clib.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/command/build_ext.py b/env/lib/python2.7/site-packages/setuptools/command/build_ext.py deleted file mode 100644 index 60a8a32f..00000000 --- a/env/lib/python2.7/site-packages/setuptools/command/build_ext.py +++ /dev/null @@ -1,321 +0,0 @@ -import os -import sys -import itertools -import imp -from distutils.command.build_ext import build_ext as _du_build_ext -from distutils.file_util import copy_file -from distutils.ccompiler import new_compiler -from distutils.sysconfig import customize_compiler, get_config_var -from distutils.errors import DistutilsError -from distutils import log - -from setuptools.extension import Library -from setuptools.extern import six - -try: - # Attempt to use Cython for building extensions, if available - from Cython.Distutils.build_ext import build_ext as _build_ext - # Additionally, assert that the compiler module will load - # also. Ref #1229. - __import__('Cython.Compiler.Main') -except ImportError: - _build_ext = _du_build_ext - -# make sure _config_vars is initialized -get_config_var("LDSHARED") -from distutils.sysconfig import _config_vars as _CONFIG_VARS - - -def _customize_compiler_for_shlib(compiler): - if sys.platform == "darwin": - # building .dylib requires additional compiler flags on OSX; here we - # temporarily substitute the pyconfig.h variables so that distutils' - # 'customize_compiler' uses them before we build the shared libraries. - tmp = _CONFIG_VARS.copy() - try: - # XXX Help! I don't have any idea whether these are right... - _CONFIG_VARS['LDSHARED'] = ( - "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup") - _CONFIG_VARS['CCSHARED'] = " -dynamiclib" - _CONFIG_VARS['SO'] = ".dylib" - customize_compiler(compiler) - finally: - _CONFIG_VARS.clear() - _CONFIG_VARS.update(tmp) - else: - customize_compiler(compiler) - - -have_rtld = False -use_stubs = False -libtype = 'shared' - -if sys.platform == "darwin": - use_stubs = True -elif os.name != 'nt': - try: - import dl - use_stubs = have_rtld = hasattr(dl, 'RTLD_NOW') - except ImportError: - pass - -if_dl = lambda s: s if have_rtld else '' - - -def get_abi3_suffix(): - """Return the file extension for an abi3-compliant Extension()""" - for suffix, _, _ in (s for s in imp.get_suffixes() if s[2] == imp.C_EXTENSION): - if '.abi3' in suffix: # Unix - return suffix - elif suffix == '.pyd': # Windows - return suffix - - -class build_ext(_build_ext): - def run(self): - """Build extensions in build directory, then copy if --inplace""" - old_inplace, self.inplace = self.inplace, 0 - _build_ext.run(self) - self.inplace = old_inplace - if old_inplace: - self.copy_extensions_to_source() - - def copy_extensions_to_source(self): - build_py = self.get_finalized_command('build_py') - for ext in self.extensions: - fullname = self.get_ext_fullname(ext.name) - filename = self.get_ext_filename(fullname) - modpath = fullname.split('.') - package = '.'.join(modpath[:-1]) - package_dir = build_py.get_package_dir(package) - dest_filename = os.path.join(package_dir, - os.path.basename(filename)) - src_filename = os.path.join(self.build_lib, filename) - - # Always copy, even if source is older than destination, to ensure - # that the right extensions for the current Python/platform are - # used. - copy_file( - src_filename, dest_filename, verbose=self.verbose, - dry_run=self.dry_run - ) - if ext._needs_stub: - self.write_stub(package_dir or os.curdir, ext, True) - - def get_ext_filename(self, fullname): - filename = _build_ext.get_ext_filename(self, fullname) - if fullname in self.ext_map: - ext = self.ext_map[fullname] - use_abi3 = ( - six.PY3 - and getattr(ext, 'py_limited_api') - and get_abi3_suffix() - ) - if use_abi3: - so_ext = get_config_var('EXT_SUFFIX') - filename = filename[:-len(so_ext)] - filename = filename + get_abi3_suffix() - if isinstance(ext, Library): - fn, ext = os.path.splitext(filename) - return self.shlib_compiler.library_filename(fn, libtype) - elif use_stubs and ext._links_to_dynamic: - d, fn = os.path.split(filename) - return os.path.join(d, 'dl-' + fn) - return filename - - def initialize_options(self): - _build_ext.initialize_options(self) - self.shlib_compiler = None - self.shlibs = [] - self.ext_map = {} - - def finalize_options(self): - _build_ext.finalize_options(self) - self.extensions = self.extensions or [] - self.check_extensions_list(self.extensions) - self.shlibs = [ext for ext in self.extensions - if isinstance(ext, Library)] - if self.shlibs: - self.setup_shlib_compiler() - for ext in self.extensions: - ext._full_name = self.get_ext_fullname(ext.name) - for ext in self.extensions: - fullname = ext._full_name - self.ext_map[fullname] = ext - - # distutils 3.1 will also ask for module names - # XXX what to do with conflicts? - self.ext_map[fullname.split('.')[-1]] = ext - - ltd = self.shlibs and self.links_to_dynamic(ext) or False - ns = ltd and use_stubs and not isinstance(ext, Library) - ext._links_to_dynamic = ltd - ext._needs_stub = ns - filename = ext._file_name = self.get_ext_filename(fullname) - libdir = os.path.dirname(os.path.join(self.build_lib, filename)) - if ltd and libdir not in ext.library_dirs: - ext.library_dirs.append(libdir) - if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs: - ext.runtime_library_dirs.append(os.curdir) - - def setup_shlib_compiler(self): - compiler = self.shlib_compiler = new_compiler( - compiler=self.compiler, dry_run=self.dry_run, force=self.force - ) - _customize_compiler_for_shlib(compiler) - - if self.include_dirs is not None: - compiler.set_include_dirs(self.include_dirs) - if self.define is not None: - # 'define' option is a list of (name,value) tuples - for (name, value) in self.define: - compiler.define_macro(name, value) - if self.undef is not None: - for macro in self.undef: - compiler.undefine_macro(macro) - if self.libraries is not None: - compiler.set_libraries(self.libraries) - if self.library_dirs is not None: - compiler.set_library_dirs(self.library_dirs) - if self.rpath is not None: - compiler.set_runtime_library_dirs(self.rpath) - if self.link_objects is not None: - compiler.set_link_objects(self.link_objects) - - # hack so distutils' build_extension() builds a library instead - compiler.link_shared_object = link_shared_object.__get__(compiler) - - def get_export_symbols(self, ext): - if isinstance(ext, Library): - return ext.export_symbols - return _build_ext.get_export_symbols(self, ext) - - def build_extension(self, ext): - ext._convert_pyx_sources_to_lang() - _compiler = self.compiler - try: - if isinstance(ext, Library): - self.compiler = self.shlib_compiler - _build_ext.build_extension(self, ext) - if ext._needs_stub: - cmd = self.get_finalized_command('build_py').build_lib - self.write_stub(cmd, ext) - finally: - self.compiler = _compiler - - def links_to_dynamic(self, ext): - """Return true if 'ext' links to a dynamic lib in the same package""" - # XXX this should check to ensure the lib is actually being built - # XXX as dynamic, and not just using a locally-found version or a - # XXX static-compiled version - libnames = dict.fromkeys([lib._full_name for lib in self.shlibs]) - pkg = '.'.join(ext._full_name.split('.')[:-1] + ['']) - return any(pkg + libname in libnames for libname in ext.libraries) - - def get_outputs(self): - return _build_ext.get_outputs(self) + self.__get_stubs_outputs() - - def __get_stubs_outputs(self): - # assemble the base name for each extension that needs a stub - ns_ext_bases = ( - os.path.join(self.build_lib, *ext._full_name.split('.')) - for ext in self.extensions - if ext._needs_stub - ) - # pair each base with the extension - pairs = itertools.product(ns_ext_bases, self.__get_output_extensions()) - return list(base + fnext for base, fnext in pairs) - - def __get_output_extensions(self): - yield '.py' - yield '.pyc' - if self.get_finalized_command('build_py').optimize: - yield '.pyo' - - def write_stub(self, output_dir, ext, compile=False): - log.info("writing stub loader for %s to %s", ext._full_name, - output_dir) - stub_file = (os.path.join(output_dir, *ext._full_name.split('.')) + - '.py') - if compile and os.path.exists(stub_file): - raise DistutilsError(stub_file + " already exists! Please delete.") - if not self.dry_run: - f = open(stub_file, 'w') - f.write( - '\n'.join([ - "def __bootstrap__():", - " global __bootstrap__, __file__, __loader__", - " import sys, os, pkg_resources, imp" + if_dl(", dl"), - " __file__ = pkg_resources.resource_filename" - "(__name__,%r)" - % os.path.basename(ext._file_name), - " del __bootstrap__", - " if '__loader__' in globals():", - " del __loader__", - if_dl(" old_flags = sys.getdlopenflags()"), - " old_dir = os.getcwd()", - " try:", - " os.chdir(os.path.dirname(__file__))", - if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"), - " imp.load_dynamic(__name__,__file__)", - " finally:", - if_dl(" sys.setdlopenflags(old_flags)"), - " os.chdir(old_dir)", - "__bootstrap__()", - "" # terminal \n - ]) - ) - f.close() - if compile: - from distutils.util import byte_compile - - byte_compile([stub_file], optimize=0, - force=True, dry_run=self.dry_run) - optimize = self.get_finalized_command('install_lib').optimize - if optimize > 0: - byte_compile([stub_file], optimize=optimize, - force=True, dry_run=self.dry_run) - if os.path.exists(stub_file) and not self.dry_run: - os.unlink(stub_file) - - -if use_stubs or os.name == 'nt': - # Build shared libraries - # - def link_shared_object( - self, objects, output_libname, output_dir=None, libraries=None, - library_dirs=None, runtime_library_dirs=None, export_symbols=None, - debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, - target_lang=None): - self.link( - self.SHARED_LIBRARY, objects, output_libname, - output_dir, libraries, library_dirs, runtime_library_dirs, - export_symbols, debug, extra_preargs, extra_postargs, - build_temp, target_lang - ) -else: - # Build static libraries everywhere else - libtype = 'static' - - def link_shared_object( - self, objects, output_libname, output_dir=None, libraries=None, - library_dirs=None, runtime_library_dirs=None, export_symbols=None, - debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, - target_lang=None): - # XXX we need to either disallow these attrs on Library instances, - # or warn/abort here if set, or something... - # libraries=None, library_dirs=None, runtime_library_dirs=None, - # export_symbols=None, extra_preargs=None, extra_postargs=None, - # build_temp=None - - assert output_dir is None # distutils build_ext doesn't pass this - output_dir, filename = os.path.split(output_libname) - basename, ext = os.path.splitext(filename) - if self.library_filename("x").startswith('lib'): - # strip 'lib' prefix; this is kludgy if some platform uses - # a different prefix - basename = basename[3:] - - self.create_static_lib( - objects, basename, output_dir, debug, target_lang - ) diff --git a/env/lib/python2.7/site-packages/setuptools/command/build_ext.pyc b/env/lib/python2.7/site-packages/setuptools/command/build_ext.pyc deleted file mode 100644 index 4a4c099e..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/command/build_ext.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/command/build_py.py b/env/lib/python2.7/site-packages/setuptools/command/build_py.py deleted file mode 100644 index b0314fd4..00000000 --- a/env/lib/python2.7/site-packages/setuptools/command/build_py.py +++ /dev/null @@ -1,270 +0,0 @@ -from glob import glob -from distutils.util import convert_path -import distutils.command.build_py as orig -import os -import fnmatch -import textwrap -import io -import distutils.errors -import itertools - -from setuptools.extern import six -from setuptools.extern.six.moves import map, filter, filterfalse - -try: - from setuptools.lib2to3_ex import Mixin2to3 -except ImportError: - - class Mixin2to3: - def run_2to3(self, files, doctests=True): - "do nothing" - - -class build_py(orig.build_py, Mixin2to3): - """Enhanced 'build_py' command that includes data files with packages - - The data files are specified via a 'package_data' argument to 'setup()'. - See 'setuptools.dist.Distribution' for more details. - - Also, this version of the 'build_py' command allows you to specify both - 'py_modules' and 'packages' in the same setup operation. - """ - - def finalize_options(self): - orig.build_py.finalize_options(self) - self.package_data = self.distribution.package_data - self.exclude_package_data = (self.distribution.exclude_package_data or - {}) - if 'data_files' in self.__dict__: - del self.__dict__['data_files'] - self.__updated_files = [] - self.__doctests_2to3 = [] - - def run(self): - """Build modules, packages, and copy data files to build directory""" - if not self.py_modules and not self.packages: - return - - if self.py_modules: - self.build_modules() - - if self.packages: - self.build_packages() - self.build_package_data() - - self.run_2to3(self.__updated_files, False) - self.run_2to3(self.__updated_files, True) - self.run_2to3(self.__doctests_2to3, True) - - # Only compile actual .py files, using our base class' idea of what our - # output files are. - self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=0)) - - def __getattr__(self, attr): - "lazily compute data files" - if attr == 'data_files': - self.data_files = self._get_data_files() - return self.data_files - return orig.build_py.__getattr__(self, attr) - - def build_module(self, module, module_file, package): - if six.PY2 and isinstance(package, six.string_types): - # avoid errors on Python 2 when unicode is passed (#190) - package = package.split('.') - outfile, copied = orig.build_py.build_module(self, module, module_file, - package) - if copied: - self.__updated_files.append(outfile) - return outfile, copied - - def _get_data_files(self): - """Generate list of '(package,src_dir,build_dir,filenames)' tuples""" - self.analyze_manifest() - return list(map(self._get_pkg_data_files, self.packages or ())) - - def _get_pkg_data_files(self, package): - # Locate package source directory - src_dir = self.get_package_dir(package) - - # Compute package build directory - build_dir = os.path.join(*([self.build_lib] + package.split('.'))) - - # Strip directory from globbed filenames - filenames = [ - os.path.relpath(file, src_dir) - for file in self.find_data_files(package, src_dir) - ] - return package, src_dir, build_dir, filenames - - def find_data_files(self, package, src_dir): - """Return filenames for package's data files in 'src_dir'""" - patterns = self._get_platform_patterns( - self.package_data, - package, - src_dir, - ) - globs_expanded = map(glob, patterns) - # flatten the expanded globs into an iterable of matches - globs_matches = itertools.chain.from_iterable(globs_expanded) - glob_files = filter(os.path.isfile, globs_matches) - files = itertools.chain( - self.manifest_files.get(package, []), - glob_files, - ) - return self.exclude_data_files(package, src_dir, files) - - def build_package_data(self): - """Copy data files into build directory""" - for package, src_dir, build_dir, filenames in self.data_files: - for filename in filenames: - target = os.path.join(build_dir, filename) - self.mkpath(os.path.dirname(target)) - srcfile = os.path.join(src_dir, filename) - outf, copied = self.copy_file(srcfile, target) - srcfile = os.path.abspath(srcfile) - if (copied and - srcfile in self.distribution.convert_2to3_doctests): - self.__doctests_2to3.append(outf) - - def analyze_manifest(self): - self.manifest_files = mf = {} - if not self.distribution.include_package_data: - return - src_dirs = {} - for package in self.packages or (): - # Locate package source directory - src_dirs[assert_relative(self.get_package_dir(package))] = package - - self.run_command('egg_info') - ei_cmd = self.get_finalized_command('egg_info') - for path in ei_cmd.filelist.files: - d, f = os.path.split(assert_relative(path)) - prev = None - oldf = f - while d and d != prev and d not in src_dirs: - prev = d - d, df = os.path.split(d) - f = os.path.join(df, f) - if d in src_dirs: - if path.endswith('.py') and f == oldf: - continue # it's a module, not data - mf.setdefault(src_dirs[d], []).append(path) - - def get_data_files(self): - pass # Lazily compute data files in _get_data_files() function. - - def check_package(self, package, package_dir): - """Check namespace packages' __init__ for declare_namespace""" - try: - return self.packages_checked[package] - except KeyError: - pass - - init_py = orig.build_py.check_package(self, package, package_dir) - self.packages_checked[package] = init_py - - if not init_py or not self.distribution.namespace_packages: - return init_py - - for pkg in self.distribution.namespace_packages: - if pkg == package or pkg.startswith(package + '.'): - break - else: - return init_py - - with io.open(init_py, 'rb') as f: - contents = f.read() - if b'declare_namespace' not in contents: - raise distutils.errors.DistutilsError( - "Namespace package problem: %s is a namespace package, but " - "its\n__init__.py does not call declare_namespace()! Please " - 'fix it.\n(See the setuptools manual under ' - '"Namespace Packages" for details.)\n"' % (package,) - ) - return init_py - - def initialize_options(self): - self.packages_checked = {} - orig.build_py.initialize_options(self) - - def get_package_dir(self, package): - res = orig.build_py.get_package_dir(self, package) - if self.distribution.src_root is not None: - return os.path.join(self.distribution.src_root, res) - return res - - def exclude_data_files(self, package, src_dir, files): - """Filter filenames for package's data files in 'src_dir'""" - files = list(files) - patterns = self._get_platform_patterns( - self.exclude_package_data, - package, - src_dir, - ) - match_groups = ( - fnmatch.filter(files, pattern) - for pattern in patterns - ) - # flatten the groups of matches into an iterable of matches - matches = itertools.chain.from_iterable(match_groups) - bad = set(matches) - keepers = ( - fn - for fn in files - if fn not in bad - ) - # ditch dupes - return list(_unique_everseen(keepers)) - - @staticmethod - def _get_platform_patterns(spec, package, src_dir): - """ - yield platform-specific path patterns (suitable for glob - or fn_match) from a glob-based spec (such as - self.package_data or self.exclude_package_data) - matching package in src_dir. - """ - raw_patterns = itertools.chain( - spec.get('', []), - spec.get(package, []), - ) - return ( - # Each pattern has to be converted to a platform-specific path - os.path.join(src_dir, convert_path(pattern)) - for pattern in raw_patterns - ) - - -# from Python docs -def _unique_everseen(iterable, key=None): - "List unique elements, preserving order. Remember all elements ever seen." - # unique_everseen('AAAABBBCCDAABBB') --> A B C D - # unique_everseen('ABBCcAD', str.lower) --> A B C D - seen = set() - seen_add = seen.add - if key is None: - for element in filterfalse(seen.__contains__, iterable): - seen_add(element) - yield element - else: - for element in iterable: - k = key(element) - if k not in seen: - seen_add(k) - yield element - - -def assert_relative(path): - if not os.path.isabs(path): - return path - from distutils.errors import DistutilsSetupError - - msg = textwrap.dedent(""" - Error: setup script specifies an absolute path: - - %s - - setup() arguments must *always* be /-separated paths relative to the - setup.py directory, *never* absolute paths. - """).lstrip() % path - raise DistutilsSetupError(msg) diff --git a/env/lib/python2.7/site-packages/setuptools/command/build_py.pyc b/env/lib/python2.7/site-packages/setuptools/command/build_py.pyc deleted file mode 100644 index c690ddd4..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/command/build_py.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/command/develop.pyc b/env/lib/python2.7/site-packages/setuptools/command/develop.pyc deleted file mode 100644 index 0fc20a66..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/command/develop.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/command/dist_info.pyc b/env/lib/python2.7/site-packages/setuptools/command/dist_info.pyc deleted file mode 100644 index 29faf4af..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/command/dist_info.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/command/easy_install.py b/env/lib/python2.7/site-packages/setuptools/command/easy_install.py deleted file mode 100644 index 06c98271..00000000 --- a/env/lib/python2.7/site-packages/setuptools/command/easy_install.py +++ /dev/null @@ -1,2342 +0,0 @@ -#!/usr/bin/env python -""" -Easy Install ------------- - -A tool for doing automatic download/extract/build of distutils-based Python -packages. For detailed documentation, see the accompanying EasyInstall.txt -file, or visit the `EasyInstall home page`__. - -__ https://setuptools.readthedocs.io/en/latest/easy_install.html - -""" - -from glob import glob -from distutils.util import get_platform -from distutils.util import convert_path, subst_vars -from distutils.errors import ( - DistutilsArgError, DistutilsOptionError, - DistutilsError, DistutilsPlatformError, -) -from distutils.command.install import INSTALL_SCHEMES, SCHEME_KEYS -from distutils import log, dir_util -from distutils.command.build_scripts import first_line_re -from distutils.spawn import find_executable -import sys -import os -import zipimport -import shutil -import tempfile -import zipfile -import re -import stat -import random -import textwrap -import warnings -import site -import struct -import contextlib -import subprocess -import shlex -import io - - -from sysconfig import get_config_vars, get_path - -from setuptools import SetuptoolsDeprecationWarning - -from setuptools.extern import six -from setuptools.extern.six.moves import configparser, map - -from setuptools import Command -from setuptools.sandbox import run_setup -from setuptools.py27compat import rmtree_safe -from setuptools.command import setopt -from setuptools.archive_util import unpack_archive -from setuptools.package_index import ( - PackageIndex, parse_requirement_arg, URL_SCHEME, -) -from setuptools.command import bdist_egg, egg_info -from setuptools.wheel import Wheel -from pkg_resources import ( - yield_lines, normalize_path, resource_string, ensure_directory, - get_distribution, find_distributions, Environment, Requirement, - Distribution, PathMetadata, EggMetadata, WorkingSet, DistributionNotFound, - VersionConflict, DEVELOP_DIST, -) -import pkg_resources.py31compat - -__metaclass__ = type - -# Turn on PEP440Warnings -warnings.filterwarnings("default", category=pkg_resources.PEP440Warning) - -__all__ = [ - 'samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg', - 'main', 'get_exe_prefixes', -] - - -def is_64bit(): - return struct.calcsize("P") == 8 - - -def samefile(p1, p2): - """ - Determine if two paths reference the same file. - - Augments os.path.samefile to work on Windows and - suppresses errors if the path doesn't exist. - """ - both_exist = os.path.exists(p1) and os.path.exists(p2) - use_samefile = hasattr(os.path, 'samefile') and both_exist - if use_samefile: - return os.path.samefile(p1, p2) - norm_p1 = os.path.normpath(os.path.normcase(p1)) - norm_p2 = os.path.normpath(os.path.normcase(p2)) - return norm_p1 == norm_p2 - - -if six.PY2: - - def _to_bytes(s): - return s - - def isascii(s): - try: - six.text_type(s, 'ascii') - return True - except UnicodeError: - return False -else: - - def _to_bytes(s): - return s.encode('utf8') - - def isascii(s): - try: - s.encode('ascii') - return True - except UnicodeError: - return False - - -_one_liner = lambda text: textwrap.dedent(text).strip().replace('\n', '; ') - - -class easy_install(Command): - """Manage a download/build/install process""" - description = "Find/get/install Python packages" - command_consumes_arguments = True - - user_options = [ - ('prefix=', None, "installation prefix"), - ("zip-ok", "z", "install package as a zipfile"), - ("multi-version", "m", "make apps have to require() a version"), - ("upgrade", "U", "force upgrade (searches PyPI for latest versions)"), - ("install-dir=", "d", "install package to DIR"), - ("script-dir=", "s", "install scripts to DIR"), - ("exclude-scripts", "x", "Don't install scripts"), - ("always-copy", "a", "Copy all needed packages to install dir"), - ("index-url=", "i", "base URL of Python Package Index"), - ("find-links=", "f", "additional URL(s) to search for packages"), - ("build-directory=", "b", - "download/extract/build in DIR; keep the results"), - ('optimize=', 'O', - "also compile with optimization: -O1 for \"python -O\", " - "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), - ('record=', None, - "filename in which to record list of installed files"), - ('always-unzip', 'Z', "don't install as a zipfile, no matter what"), - ('site-dirs=', 'S', "list of directories where .pth files work"), - ('editable', 'e', "Install specified packages in editable form"), - ('no-deps', 'N', "don't install dependencies"), - ('allow-hosts=', 'H', "pattern(s) that hostnames must match"), - ('local-snapshots-ok', 'l', - "allow building eggs from local checkouts"), - ('version', None, "print version information and exit"), - ('no-find-links', None, - "Don't load find-links defined in packages being installed") - ] - boolean_options = [ - 'zip-ok', 'multi-version', 'exclude-scripts', 'upgrade', 'always-copy', - 'editable', - 'no-deps', 'local-snapshots-ok', 'version' - ] - - if site.ENABLE_USER_SITE: - help_msg = "install in user site-package '%s'" % site.USER_SITE - user_options.append(('user', None, help_msg)) - boolean_options.append('user') - - negative_opt = {'always-unzip': 'zip-ok'} - create_index = PackageIndex - - def initialize_options(self): - # the --user option seems to be an opt-in one, - # so the default should be False. - self.user = 0 - self.zip_ok = self.local_snapshots_ok = None - self.install_dir = self.script_dir = self.exclude_scripts = None - self.index_url = None - self.find_links = None - self.build_directory = None - self.args = None - self.optimize = self.record = None - self.upgrade = self.always_copy = self.multi_version = None - self.editable = self.no_deps = self.allow_hosts = None - self.root = self.prefix = self.no_report = None - self.version = None - self.install_purelib = None # for pure module distributions - self.install_platlib = None # non-pure (dists w/ extensions) - self.install_headers = None # for C/C++ headers - self.install_lib = None # set to either purelib or platlib - self.install_scripts = None - self.install_data = None - self.install_base = None - self.install_platbase = None - if site.ENABLE_USER_SITE: - self.install_userbase = site.USER_BASE - self.install_usersite = site.USER_SITE - else: - self.install_userbase = None - self.install_usersite = None - self.no_find_links = None - - # Options not specifiable via command line - self.package_index = None - self.pth_file = self.always_copy_from = None - self.site_dirs = None - self.installed_projects = {} - self.sitepy_installed = False - # Always read easy_install options, even if we are subclassed, or have - # an independent instance created. This ensures that defaults will - # always come from the standard configuration file(s)' "easy_install" - # section, even if this is a "develop" or "install" command, or some - # other embedding. - self._dry_run = None - self.verbose = self.distribution.verbose - self.distribution._set_command_options( - self, self.distribution.get_option_dict('easy_install') - ) - - def delete_blockers(self, blockers): - extant_blockers = ( - filename for filename in blockers - if os.path.exists(filename) or os.path.islink(filename) - ) - list(map(self._delete_path, extant_blockers)) - - def _delete_path(self, path): - log.info("Deleting %s", path) - if self.dry_run: - return - - is_tree = os.path.isdir(path) and not os.path.islink(path) - remover = rmtree if is_tree else os.unlink - remover(path) - - @staticmethod - def _render_version(): - """ - Render the Setuptools version and installation details, then exit. - """ - ver = sys.version[:3] - dist = get_distribution('setuptools') - tmpl = 'setuptools {dist.version} from {dist.location} (Python {ver})' - print(tmpl.format(**locals())) - raise SystemExit() - - def finalize_options(self): - self.version and self._render_version() - - py_version = sys.version.split()[0] - prefix, exec_prefix = get_config_vars('prefix', 'exec_prefix') - - self.config_vars = { - 'dist_name': self.distribution.get_name(), - 'dist_version': self.distribution.get_version(), - 'dist_fullname': self.distribution.get_fullname(), - 'py_version': py_version, - 'py_version_short': py_version[0:3], - 'py_version_nodot': py_version[0] + py_version[2], - 'sys_prefix': prefix, - 'prefix': prefix, - 'sys_exec_prefix': exec_prefix, - 'exec_prefix': exec_prefix, - # Only python 3.2+ has abiflags - 'abiflags': getattr(sys, 'abiflags', ''), - } - - if site.ENABLE_USER_SITE: - self.config_vars['userbase'] = self.install_userbase - self.config_vars['usersite'] = self.install_usersite - - self._fix_install_dir_for_user_site() - - self.expand_basedirs() - self.expand_dirs() - - self._expand( - 'install_dir', 'script_dir', 'build_directory', - 'site_dirs', - ) - # If a non-default installation directory was specified, default the - # script directory to match it. - if self.script_dir is None: - self.script_dir = self.install_dir - - if self.no_find_links is None: - self.no_find_links = False - - # Let install_dir get set by install_lib command, which in turn - # gets its info from the install command, and takes into account - # --prefix and --home and all that other crud. - self.set_undefined_options( - 'install_lib', ('install_dir', 'install_dir') - ) - # Likewise, set default script_dir from 'install_scripts.install_dir' - self.set_undefined_options( - 'install_scripts', ('install_dir', 'script_dir') - ) - - if self.user and self.install_purelib: - self.install_dir = self.install_purelib - self.script_dir = self.install_scripts - # default --record from the install command - self.set_undefined_options('install', ('record', 'record')) - # Should this be moved to the if statement below? It's not used - # elsewhere - normpath = map(normalize_path, sys.path) - self.all_site_dirs = get_site_dirs() - if self.site_dirs is not None: - site_dirs = [ - os.path.expanduser(s.strip()) for s in - self.site_dirs.split(',') - ] - for d in site_dirs: - if not os.path.isdir(d): - log.warn("%s (in --site-dirs) does not exist", d) - elif normalize_path(d) not in normpath: - raise DistutilsOptionError( - d + " (in --site-dirs) is not on sys.path" - ) - else: - self.all_site_dirs.append(normalize_path(d)) - if not self.editable: - self.check_site_dir() - self.index_url = self.index_url or "https://pypi.org/simple/" - self.shadow_path = self.all_site_dirs[:] - for path_item in self.install_dir, normalize_path(self.script_dir): - if path_item not in self.shadow_path: - self.shadow_path.insert(0, path_item) - - if self.allow_hosts is not None: - hosts = [s.strip() for s in self.allow_hosts.split(',')] - else: - hosts = ['*'] - if self.package_index is None: - self.package_index = self.create_index( - self.index_url, search_path=self.shadow_path, hosts=hosts, - ) - self.local_index = Environment(self.shadow_path + sys.path) - - if self.find_links is not None: - if isinstance(self.find_links, six.string_types): - self.find_links = self.find_links.split() - else: - self.find_links = [] - if self.local_snapshots_ok: - self.package_index.scan_egg_links(self.shadow_path + sys.path) - if not self.no_find_links: - self.package_index.add_find_links(self.find_links) - self.set_undefined_options('install_lib', ('optimize', 'optimize')) - if not isinstance(self.optimize, int): - try: - self.optimize = int(self.optimize) - if not (0 <= self.optimize <= 2): - raise ValueError - except ValueError: - raise DistutilsOptionError("--optimize must be 0, 1, or 2") - - if self.editable and not self.build_directory: - raise DistutilsArgError( - "Must specify a build directory (-b) when using --editable" - ) - if not self.args: - raise DistutilsArgError( - "No urls, filenames, or requirements specified (see --help)") - - self.outputs = [] - - def _fix_install_dir_for_user_site(self): - """ - Fix the install_dir if "--user" was used. - """ - if not self.user or not site.ENABLE_USER_SITE: - return - - self.create_home_path() - if self.install_userbase is None: - msg = "User base directory is not specified" - raise DistutilsPlatformError(msg) - self.install_base = self.install_platbase = self.install_userbase - scheme_name = os.name.replace('posix', 'unix') + '_user' - self.select_scheme(scheme_name) - - def _expand_attrs(self, attrs): - for attr in attrs: - val = getattr(self, attr) - if val is not None: - if os.name == 'posix' or os.name == 'nt': - val = os.path.expanduser(val) - val = subst_vars(val, self.config_vars) - setattr(self, attr, val) - - def expand_basedirs(self): - """Calls `os.path.expanduser` on install_base, install_platbase and - root.""" - self._expand_attrs(['install_base', 'install_platbase', 'root']) - - def expand_dirs(self): - """Calls `os.path.expanduser` on install dirs.""" - dirs = [ - 'install_purelib', - 'install_platlib', - 'install_lib', - 'install_headers', - 'install_scripts', - 'install_data', - ] - self._expand_attrs(dirs) - - def run(self): - if self.verbose != self.distribution.verbose: - log.set_verbosity(self.verbose) - try: - for spec in self.args: - self.easy_install(spec, not self.no_deps) - if self.record: - outputs = self.outputs - if self.root: # strip any package prefix - root_len = len(self.root) - for counter in range(len(outputs)): - outputs[counter] = outputs[counter][root_len:] - from distutils import file_util - - self.execute( - file_util.write_file, (self.record, outputs), - "writing list of installed files to '%s'" % - self.record - ) - self.warn_deprecated_options() - finally: - log.set_verbosity(self.distribution.verbose) - - def pseudo_tempname(self): - """Return a pseudo-tempname base in the install directory. - This code is intentionally naive; if a malicious party can write to - the target directory you're already in deep doodoo. - """ - try: - pid = os.getpid() - except Exception: - pid = random.randint(0, sys.maxsize) - return os.path.join(self.install_dir, "test-easy-install-%s" % pid) - - def warn_deprecated_options(self): - pass - - def check_site_dir(self): - """Verify that self.install_dir is .pth-capable dir, if needed""" - - instdir = normalize_path(self.install_dir) - pth_file = os.path.join(instdir, 'easy-install.pth') - - # Is it a configured, PYTHONPATH, implicit, or explicit site dir? - is_site_dir = instdir in self.all_site_dirs - - if not is_site_dir and not self.multi_version: - # No? Then directly test whether it does .pth file processing - is_site_dir = self.check_pth_processing() - else: - # make sure we can write to target dir - testfile = self.pseudo_tempname() + '.write-test' - test_exists = os.path.exists(testfile) - try: - if test_exists: - os.unlink(testfile) - open(testfile, 'w').close() - os.unlink(testfile) - except (OSError, IOError): - self.cant_write_to_target() - - if not is_site_dir and not self.multi_version: - # Can't install non-multi to non-site dir - raise DistutilsError(self.no_default_version_msg()) - - if is_site_dir: - if self.pth_file is None: - self.pth_file = PthDistributions(pth_file, self.all_site_dirs) - else: - self.pth_file = None - - if instdir not in map(normalize_path, _pythonpath()): - # only PYTHONPATH dirs need a site.py, so pretend it's there - self.sitepy_installed = True - elif self.multi_version and not os.path.exists(pth_file): - self.sitepy_installed = True # don't need site.py in this case - self.pth_file = None # and don't create a .pth file - self.install_dir = instdir - - __cant_write_msg = textwrap.dedent(""" - can't create or remove files in install directory - - The following error occurred while trying to add or remove files in the - installation directory: - - %s - - The installation directory you specified (via --install-dir, --prefix, or - the distutils default setting) was: - - %s - """).lstrip() - - __not_exists_id = textwrap.dedent(""" - This directory does not currently exist. Please create it and try again, or - choose a different installation directory (using the -d or --install-dir - option). - """).lstrip() - - __access_msg = textwrap.dedent(""" - Perhaps your account does not have write access to this directory? If the - installation directory is a system-owned directory, you may need to sign in - as the administrator or "root" account. If you do not have administrative - access to this machine, you may wish to choose a different installation - directory, preferably one that is listed in your PYTHONPATH environment - variable. - - For information on other options, you may wish to consult the - documentation at: - - https://setuptools.readthedocs.io/en/latest/easy_install.html - - Please make the appropriate changes for your system and try again. - """).lstrip() - - def cant_write_to_target(self): - msg = self.__cant_write_msg % (sys.exc_info()[1], self.install_dir,) - - if not os.path.exists(self.install_dir): - msg += '\n' + self.__not_exists_id - else: - msg += '\n' + self.__access_msg - raise DistutilsError(msg) - - def check_pth_processing(self): - """Empirically verify whether .pth files are supported in inst. dir""" - instdir = self.install_dir - log.info("Checking .pth file support in %s", instdir) - pth_file = self.pseudo_tempname() + ".pth" - ok_file = pth_file + '.ok' - ok_exists = os.path.exists(ok_file) - tmpl = _one_liner(""" - import os - f = open({ok_file!r}, 'w') - f.write('OK') - f.close() - """) + '\n' - try: - if ok_exists: - os.unlink(ok_file) - dirname = os.path.dirname(ok_file) - pkg_resources.py31compat.makedirs(dirname, exist_ok=True) - f = open(pth_file, 'w') - except (OSError, IOError): - self.cant_write_to_target() - else: - try: - f.write(tmpl.format(**locals())) - f.close() - f = None - executable = sys.executable - if os.name == 'nt': - dirname, basename = os.path.split(executable) - alt = os.path.join(dirname, 'pythonw.exe') - use_alt = ( - basename.lower() == 'python.exe' and - os.path.exists(alt) - ) - if use_alt: - # use pythonw.exe to avoid opening a console window - executable = alt - - from distutils.spawn import spawn - - spawn([executable, '-E', '-c', 'pass'], 0) - - if os.path.exists(ok_file): - log.info( - "TEST PASSED: %s appears to support .pth files", - instdir - ) - return True - finally: - if f: - f.close() - if os.path.exists(ok_file): - os.unlink(ok_file) - if os.path.exists(pth_file): - os.unlink(pth_file) - if not self.multi_version: - log.warn("TEST FAILED: %s does NOT support .pth files", instdir) - return False - - def install_egg_scripts(self, dist): - """Write all the scripts for `dist`, unless scripts are excluded""" - if not self.exclude_scripts and dist.metadata_isdir('scripts'): - for script_name in dist.metadata_listdir('scripts'): - if dist.metadata_isdir('scripts/' + script_name): - # The "script" is a directory, likely a Python 3 - # __pycache__ directory, so skip it. - continue - self.install_script( - dist, script_name, - dist.get_metadata('scripts/' + script_name) - ) - self.install_wrapper_scripts(dist) - - def add_output(self, path): - if os.path.isdir(path): - for base, dirs, files in os.walk(path): - for filename in files: - self.outputs.append(os.path.join(base, filename)) - else: - self.outputs.append(path) - - def not_editable(self, spec): - if self.editable: - raise DistutilsArgError( - "Invalid argument %r: you can't use filenames or URLs " - "with --editable (except via the --find-links option)." - % (spec,) - ) - - def check_editable(self, spec): - if not self.editable: - return - - if os.path.exists(os.path.join(self.build_directory, spec.key)): - raise DistutilsArgError( - "%r already exists in %s; can't do a checkout there" % - (spec.key, self.build_directory) - ) - - @contextlib.contextmanager - def _tmpdir(self): - tmpdir = tempfile.mkdtemp(prefix=u"easy_install-") - try: - # cast to str as workaround for #709 and #710 and #712 - yield str(tmpdir) - finally: - os.path.exists(tmpdir) and rmtree(rmtree_safe(tmpdir)) - - def easy_install(self, spec, deps=False): - if not self.editable: - self.install_site_py() - - with self._tmpdir() as tmpdir: - if not isinstance(spec, Requirement): - if URL_SCHEME(spec): - # It's a url, download it to tmpdir and process - self.not_editable(spec) - dl = self.package_index.download(spec, tmpdir) - return self.install_item(None, dl, tmpdir, deps, True) - - elif os.path.exists(spec): - # Existing file or directory, just process it directly - self.not_editable(spec) - return self.install_item(None, spec, tmpdir, deps, True) - else: - spec = parse_requirement_arg(spec) - - self.check_editable(spec) - dist = self.package_index.fetch_distribution( - spec, tmpdir, self.upgrade, self.editable, - not self.always_copy, self.local_index - ) - if dist is None: - msg = "Could not find suitable distribution for %r" % spec - if self.always_copy: - msg += " (--always-copy skips system and development eggs)" - raise DistutilsError(msg) - elif dist.precedence == DEVELOP_DIST: - # .egg-info dists don't need installing, just process deps - self.process_distribution(spec, dist, deps, "Using") - return dist - else: - return self.install_item(spec, dist.location, tmpdir, deps) - - def install_item(self, spec, download, tmpdir, deps, install_needed=False): - - # Installation is also needed if file in tmpdir or is not an egg - install_needed = install_needed or self.always_copy - install_needed = install_needed or os.path.dirname(download) == tmpdir - install_needed = install_needed or not download.endswith('.egg') - install_needed = install_needed or ( - self.always_copy_from is not None and - os.path.dirname(normalize_path(download)) == - normalize_path(self.always_copy_from) - ) - - if spec and not install_needed: - # at this point, we know it's a local .egg, we just don't know if - # it's already installed. - for dist in self.local_index[spec.project_name]: - if dist.location == download: - break - else: - install_needed = True # it's not in the local index - - log.info("Processing %s", os.path.basename(download)) - - if install_needed: - dists = self.install_eggs(spec, download, tmpdir) - for dist in dists: - self.process_distribution(spec, dist, deps) - else: - dists = [self.egg_distribution(download)] - self.process_distribution(spec, dists[0], deps, "Using") - - if spec is not None: - for dist in dists: - if dist in spec: - return dist - - def select_scheme(self, name): - """Sets the install directories by applying the install schemes.""" - # it's the caller's problem if they supply a bad name! - scheme = INSTALL_SCHEMES[name] - for key in SCHEME_KEYS: - attrname = 'install_' + key - if getattr(self, attrname) is None: - setattr(self, attrname, scheme[key]) - - def process_distribution(self, requirement, dist, deps=True, *info): - self.update_pth(dist) - self.package_index.add(dist) - if dist in self.local_index[dist.key]: - self.local_index.remove(dist) - self.local_index.add(dist) - self.install_egg_scripts(dist) - self.installed_projects[dist.key] = dist - log.info(self.installation_report(requirement, dist, *info)) - if (dist.has_metadata('dependency_links.txt') and - not self.no_find_links): - self.package_index.add_find_links( - dist.get_metadata_lines('dependency_links.txt') - ) - if not deps and not self.always_copy: - return - elif requirement is not None and dist.key != requirement.key: - log.warn("Skipping dependencies for %s", dist) - return # XXX this is not the distribution we were looking for - elif requirement is None or dist not in requirement: - # if we wound up with a different version, resolve what we've got - distreq = dist.as_requirement() - requirement = Requirement(str(distreq)) - log.info("Processing dependencies for %s", requirement) - try: - distros = WorkingSet([]).resolve( - [requirement], self.local_index, self.easy_install - ) - except DistributionNotFound as e: - raise DistutilsError(str(e)) - except VersionConflict as e: - raise DistutilsError(e.report()) - if self.always_copy or self.always_copy_from: - # Force all the relevant distros to be copied or activated - for dist in distros: - if dist.key not in self.installed_projects: - self.easy_install(dist.as_requirement()) - log.info("Finished processing dependencies for %s", requirement) - - def should_unzip(self, dist): - if self.zip_ok is not None: - return not self.zip_ok - if dist.has_metadata('not-zip-safe'): - return True - if not dist.has_metadata('zip-safe'): - return True - return False - - def maybe_move(self, spec, dist_filename, setup_base): - dst = os.path.join(self.build_directory, spec.key) - if os.path.exists(dst): - msg = ( - "%r already exists in %s; build directory %s will not be kept" - ) - log.warn(msg, spec.key, self.build_directory, setup_base) - return setup_base - if os.path.isdir(dist_filename): - setup_base = dist_filename - else: - if os.path.dirname(dist_filename) == setup_base: - os.unlink(dist_filename) # get it out of the tmp dir - contents = os.listdir(setup_base) - if len(contents) == 1: - dist_filename = os.path.join(setup_base, contents[0]) - if os.path.isdir(dist_filename): - # if the only thing there is a directory, move it instead - setup_base = dist_filename - ensure_directory(dst) - shutil.move(setup_base, dst) - return dst - - def install_wrapper_scripts(self, dist): - if self.exclude_scripts: - return - for args in ScriptWriter.best().get_args(dist): - self.write_script(*args) - - def install_script(self, dist, script_name, script_text, dev_path=None): - """Generate a legacy script wrapper and install it""" - spec = str(dist.as_requirement()) - is_script = is_python_script(script_text, script_name) - - if is_script: - body = self._load_template(dev_path) % locals() - script_text = ScriptWriter.get_header(script_text) + body - self.write_script(script_name, _to_bytes(script_text), 'b') - - @staticmethod - def _load_template(dev_path): - """ - There are a couple of template scripts in the package. This - function loads one of them and prepares it for use. - """ - # See https://github.com/pypa/setuptools/issues/134 for info - # on script file naming and downstream issues with SVR4 - name = 'script.tmpl' - if dev_path: - name = name.replace('.tmpl', ' (dev).tmpl') - - raw_bytes = resource_string('setuptools', name) - return raw_bytes.decode('utf-8') - - def write_script(self, script_name, contents, mode="t", blockers=()): - """Write an executable file to the scripts directory""" - self.delete_blockers( # clean up old .py/.pyw w/o a script - [os.path.join(self.script_dir, x) for x in blockers] - ) - log.info("Installing %s script to %s", script_name, self.script_dir) - target = os.path.join(self.script_dir, script_name) - self.add_output(target) - - if self.dry_run: - return - - mask = current_umask() - ensure_directory(target) - if os.path.exists(target): - os.unlink(target) - with open(target, "w" + mode) as f: - f.write(contents) - chmod(target, 0o777 - mask) - - def install_eggs(self, spec, dist_filename, tmpdir): - # .egg dirs or files are already built, so just return them - if dist_filename.lower().endswith('.egg'): - return [self.install_egg(dist_filename, tmpdir)] - elif dist_filename.lower().endswith('.exe'): - return [self.install_exe(dist_filename, tmpdir)] - elif dist_filename.lower().endswith('.whl'): - return [self.install_wheel(dist_filename, tmpdir)] - - # Anything else, try to extract and build - setup_base = tmpdir - if os.path.isfile(dist_filename) and not dist_filename.endswith('.py'): - unpack_archive(dist_filename, tmpdir, self.unpack_progress) - elif os.path.isdir(dist_filename): - setup_base = os.path.abspath(dist_filename) - - if (setup_base.startswith(tmpdir) # something we downloaded - and self.build_directory and spec is not None): - setup_base = self.maybe_move(spec, dist_filename, setup_base) - - # Find the setup.py file - setup_script = os.path.join(setup_base, 'setup.py') - - if not os.path.exists(setup_script): - setups = glob(os.path.join(setup_base, '*', 'setup.py')) - if not setups: - raise DistutilsError( - "Couldn't find a setup script in %s" % - os.path.abspath(dist_filename) - ) - if len(setups) > 1: - raise DistutilsError( - "Multiple setup scripts in %s" % - os.path.abspath(dist_filename) - ) - setup_script = setups[0] - - # Now run it, and return the result - if self.editable: - log.info(self.report_editable(spec, setup_script)) - return [] - else: - return self.build_and_install(setup_script, setup_base) - - def egg_distribution(self, egg_path): - if os.path.isdir(egg_path): - metadata = PathMetadata(egg_path, os.path.join(egg_path, - 'EGG-INFO')) - else: - metadata = EggMetadata(zipimport.zipimporter(egg_path)) - return Distribution.from_filename(egg_path, metadata=metadata) - - def install_egg(self, egg_path, tmpdir): - destination = os.path.join( - self.install_dir, - os.path.basename(egg_path), - ) - destination = os.path.abspath(destination) - if not self.dry_run: - ensure_directory(destination) - - dist = self.egg_distribution(egg_path) - if not samefile(egg_path, destination): - if os.path.isdir(destination) and not os.path.islink(destination): - dir_util.remove_tree(destination, dry_run=self.dry_run) - elif os.path.exists(destination): - self.execute( - os.unlink, - (destination,), - "Removing " + destination, - ) - try: - new_dist_is_zipped = False - if os.path.isdir(egg_path): - if egg_path.startswith(tmpdir): - f, m = shutil.move, "Moving" - else: - f, m = shutil.copytree, "Copying" - elif self.should_unzip(dist): - self.mkpath(destination) - f, m = self.unpack_and_compile, "Extracting" - else: - new_dist_is_zipped = True - if egg_path.startswith(tmpdir): - f, m = shutil.move, "Moving" - else: - f, m = shutil.copy2, "Copying" - self.execute( - f, - (egg_path, destination), - (m + " %s to %s") % ( - os.path.basename(egg_path), - os.path.dirname(destination) - ), - ) - update_dist_caches( - destination, - fix_zipimporter_caches=new_dist_is_zipped, - ) - except Exception: - update_dist_caches(destination, fix_zipimporter_caches=False) - raise - - self.add_output(destination) - return self.egg_distribution(destination) - - def install_exe(self, dist_filename, tmpdir): - # See if it's valid, get data - cfg = extract_wininst_cfg(dist_filename) - if cfg is None: - raise DistutilsError( - "%s is not a valid distutils Windows .exe" % dist_filename - ) - # Create a dummy distribution object until we build the real distro - dist = Distribution( - None, - project_name=cfg.get('metadata', 'name'), - version=cfg.get('metadata', 'version'), platform=get_platform(), - ) - - # Convert the .exe to an unpacked egg - egg_path = os.path.join(tmpdir, dist.egg_name() + '.egg') - dist.location = egg_path - egg_tmp = egg_path + '.tmp' - _egg_info = os.path.join(egg_tmp, 'EGG-INFO') - pkg_inf = os.path.join(_egg_info, 'PKG-INFO') - ensure_directory(pkg_inf) # make sure EGG-INFO dir exists - dist._provider = PathMetadata(egg_tmp, _egg_info) # XXX - self.exe_to_egg(dist_filename, egg_tmp) - - # Write EGG-INFO/PKG-INFO - if not os.path.exists(pkg_inf): - f = open(pkg_inf, 'w') - f.write('Metadata-Version: 1.0\n') - for k, v in cfg.items('metadata'): - if k != 'target_version': - f.write('%s: %s\n' % (k.replace('_', '-').title(), v)) - f.close() - script_dir = os.path.join(_egg_info, 'scripts') - # delete entry-point scripts to avoid duping - self.delete_blockers([ - os.path.join(script_dir, args[0]) - for args in ScriptWriter.get_args(dist) - ]) - # Build .egg file from tmpdir - bdist_egg.make_zipfile( - egg_path, egg_tmp, verbose=self.verbose, dry_run=self.dry_run, - ) - # install the .egg - return self.install_egg(egg_path, tmpdir) - - def exe_to_egg(self, dist_filename, egg_tmp): - """Extract a bdist_wininst to the directories an egg would use""" - # Check for .pth file and set up prefix translations - prefixes = get_exe_prefixes(dist_filename) - to_compile = [] - native_libs = [] - top_level = {} - - def process(src, dst): - s = src.lower() - for old, new in prefixes: - if s.startswith(old): - src = new + src[len(old):] - parts = src.split('/') - dst = os.path.join(egg_tmp, *parts) - dl = dst.lower() - if dl.endswith('.pyd') or dl.endswith('.dll'): - parts[-1] = bdist_egg.strip_module(parts[-1]) - top_level[os.path.splitext(parts[0])[0]] = 1 - native_libs.append(src) - elif dl.endswith('.py') and old != 'SCRIPTS/': - top_level[os.path.splitext(parts[0])[0]] = 1 - to_compile.append(dst) - return dst - if not src.endswith('.pth'): - log.warn("WARNING: can't process %s", src) - return None - - # extract, tracking .pyd/.dll->native_libs and .py -> to_compile - unpack_archive(dist_filename, egg_tmp, process) - stubs = [] - for res in native_libs: - if res.lower().endswith('.pyd'): # create stubs for .pyd's - parts = res.split('/') - resource = parts[-1] - parts[-1] = bdist_egg.strip_module(parts[-1]) + '.py' - pyfile = os.path.join(egg_tmp, *parts) - to_compile.append(pyfile) - stubs.append(pyfile) - bdist_egg.write_stub(resource, pyfile) - self.byte_compile(to_compile) # compile .py's - bdist_egg.write_safety_flag( - os.path.join(egg_tmp, 'EGG-INFO'), - bdist_egg.analyze_egg(egg_tmp, stubs)) # write zip-safety flag - - for name in 'top_level', 'native_libs': - if locals()[name]: - txt = os.path.join(egg_tmp, 'EGG-INFO', name + '.txt') - if not os.path.exists(txt): - f = open(txt, 'w') - f.write('\n'.join(locals()[name]) + '\n') - f.close() - - def install_wheel(self, wheel_path, tmpdir): - wheel = Wheel(wheel_path) - assert wheel.is_compatible() - destination = os.path.join(self.install_dir, wheel.egg_name()) - destination = os.path.abspath(destination) - if not self.dry_run: - ensure_directory(destination) - if os.path.isdir(destination) and not os.path.islink(destination): - dir_util.remove_tree(destination, dry_run=self.dry_run) - elif os.path.exists(destination): - self.execute( - os.unlink, - (destination,), - "Removing " + destination, - ) - try: - self.execute( - wheel.install_as_egg, - (destination,), - ("Installing %s to %s") % ( - os.path.basename(wheel_path), - os.path.dirname(destination) - ), - ) - finally: - update_dist_caches(destination, fix_zipimporter_caches=False) - self.add_output(destination) - return self.egg_distribution(destination) - - __mv_warning = textwrap.dedent(""" - Because this distribution was installed --multi-version, before you can - import modules from this package in an application, you will need to - 'import pkg_resources' and then use a 'require()' call similar to one of - these examples, in order to select the desired version: - - pkg_resources.require("%(name)s") # latest installed version - pkg_resources.require("%(name)s==%(version)s") # this exact version - pkg_resources.require("%(name)s>=%(version)s") # this version or higher - """).lstrip() - - __id_warning = textwrap.dedent(""" - Note also that the installation directory must be on sys.path at runtime for - this to work. (e.g. by being the application's script directory, by being on - PYTHONPATH, or by being added to sys.path by your code.) - """) - - def installation_report(self, req, dist, what="Installed"): - """Helpful installation message for display to package users""" - msg = "\n%(what)s %(eggloc)s%(extras)s" - if self.multi_version and not self.no_report: - msg += '\n' + self.__mv_warning - if self.install_dir not in map(normalize_path, sys.path): - msg += '\n' + self.__id_warning - - eggloc = dist.location - name = dist.project_name - version = dist.version - extras = '' # TODO: self.report_extras(req, dist) - return msg % locals() - - __editable_msg = textwrap.dedent(""" - Extracted editable version of %(spec)s to %(dirname)s - - If it uses setuptools in its setup script, you can activate it in - "development" mode by going to that directory and running:: - - %(python)s setup.py develop - - See the setuptools documentation for the "develop" command for more info. - """).lstrip() - - def report_editable(self, spec, setup_script): - dirname = os.path.dirname(setup_script) - python = sys.executable - return '\n' + self.__editable_msg % locals() - - def run_setup(self, setup_script, setup_base, args): - sys.modules.setdefault('distutils.command.bdist_egg', bdist_egg) - sys.modules.setdefault('distutils.command.egg_info', egg_info) - - args = list(args) - if self.verbose > 2: - v = 'v' * (self.verbose - 1) - args.insert(0, '-' + v) - elif self.verbose < 2: - args.insert(0, '-q') - if self.dry_run: - args.insert(0, '-n') - log.info( - "Running %s %s", setup_script[len(setup_base) + 1:], ' '.join(args) - ) - try: - run_setup(setup_script, args) - except SystemExit as v: - raise DistutilsError("Setup script exited with %s" % (v.args[0],)) - - def build_and_install(self, setup_script, setup_base): - args = ['bdist_egg', '--dist-dir'] - - dist_dir = tempfile.mkdtemp( - prefix='egg-dist-tmp-', dir=os.path.dirname(setup_script) - ) - try: - self._set_fetcher_options(os.path.dirname(setup_script)) - args.append(dist_dir) - - self.run_setup(setup_script, setup_base, args) - all_eggs = Environment([dist_dir]) - eggs = [] - for key in all_eggs: - for dist in all_eggs[key]: - eggs.append(self.install_egg(dist.location, setup_base)) - if not eggs and not self.dry_run: - log.warn("No eggs found in %s (setup script problem?)", - dist_dir) - return eggs - finally: - rmtree(dist_dir) - log.set_verbosity(self.verbose) # restore our log verbosity - - def _set_fetcher_options(self, base): - """ - When easy_install is about to run bdist_egg on a source dist, that - source dist might have 'setup_requires' directives, requiring - additional fetching. Ensure the fetcher options given to easy_install - are available to that command as well. - """ - # find the fetch options from easy_install and write them out - # to the setup.cfg file. - ei_opts = self.distribution.get_option_dict('easy_install').copy() - fetch_directives = ( - 'find_links', 'site_dirs', 'index_url', 'optimize', - 'site_dirs', 'allow_hosts', - ) - fetch_options = {} - for key, val in ei_opts.items(): - if key not in fetch_directives: - continue - fetch_options[key.replace('_', '-')] = val[1] - # create a settings dictionary suitable for `edit_config` - settings = dict(easy_install=fetch_options) - cfg_filename = os.path.join(base, 'setup.cfg') - setopt.edit_config(cfg_filename, settings) - - def update_pth(self, dist): - if self.pth_file is None: - return - - for d in self.pth_file[dist.key]: # drop old entries - if self.multi_version or d.location != dist.location: - log.info("Removing %s from easy-install.pth file", d) - self.pth_file.remove(d) - if d.location in self.shadow_path: - self.shadow_path.remove(d.location) - - if not self.multi_version: - if dist.location in self.pth_file.paths: - log.info( - "%s is already the active version in easy-install.pth", - dist, - ) - else: - log.info("Adding %s to easy-install.pth file", dist) - self.pth_file.add(dist) # add new entry - if dist.location not in self.shadow_path: - self.shadow_path.append(dist.location) - - if not self.dry_run: - - self.pth_file.save() - - if dist.key == 'setuptools': - # Ensure that setuptools itself never becomes unavailable! - # XXX should this check for latest version? - filename = os.path.join(self.install_dir, 'setuptools.pth') - if os.path.islink(filename): - os.unlink(filename) - f = open(filename, 'wt') - f.write(self.pth_file.make_relative(dist.location) + '\n') - f.close() - - def unpack_progress(self, src, dst): - # Progress filter for unpacking - log.debug("Unpacking %s to %s", src, dst) - return dst # only unpack-and-compile skips files for dry run - - def unpack_and_compile(self, egg_path, destination): - to_compile = [] - to_chmod = [] - - def pf(src, dst): - if dst.endswith('.py') and not src.startswith('EGG-INFO/'): - to_compile.append(dst) - elif dst.endswith('.dll') or dst.endswith('.so'): - to_chmod.append(dst) - self.unpack_progress(src, dst) - return not self.dry_run and dst or None - - unpack_archive(egg_path, destination, pf) - self.byte_compile(to_compile) - if not self.dry_run: - for f in to_chmod: - mode = ((os.stat(f)[stat.ST_MODE]) | 0o555) & 0o7755 - chmod(f, mode) - - def byte_compile(self, to_compile): - if sys.dont_write_bytecode: - return - - from distutils.util import byte_compile - - try: - # try to make the byte compile messages quieter - log.set_verbosity(self.verbose - 1) - - byte_compile(to_compile, optimize=0, force=1, dry_run=self.dry_run) - if self.optimize: - byte_compile( - to_compile, optimize=self.optimize, force=1, - dry_run=self.dry_run, - ) - finally: - log.set_verbosity(self.verbose) # restore original verbosity - - __no_default_msg = textwrap.dedent(""" - bad install directory or PYTHONPATH - - You are attempting to install a package to a directory that is not - on PYTHONPATH and which Python does not read ".pth" files from. The - installation directory you specified (via --install-dir, --prefix, or - the distutils default setting) was: - - %s - - and your PYTHONPATH environment variable currently contains: - - %r - - Here are some of your options for correcting the problem: - - * You can choose a different installation directory, i.e., one that is - on PYTHONPATH or supports .pth files - - * You can add the installation directory to the PYTHONPATH environment - variable. (It must then also be on PYTHONPATH whenever you run - Python and want to use the package(s) you are installing.) - - * You can set up the installation directory to support ".pth" files by - using one of the approaches described here: - - https://setuptools.readthedocs.io/en/latest/easy_install.html#custom-installation-locations - - - Please make the appropriate changes for your system and try again.""").lstrip() - - def no_default_version_msg(self): - template = self.__no_default_msg - return template % (self.install_dir, os.environ.get('PYTHONPATH', '')) - - def install_site_py(self): - """Make sure there's a site.py in the target dir, if needed""" - - if self.sitepy_installed: - return # already did it, or don't need to - - sitepy = os.path.join(self.install_dir, "site.py") - source = resource_string("setuptools", "site-patch.py") - source = source.decode('utf-8') - current = "" - - if os.path.exists(sitepy): - log.debug("Checking existing site.py in %s", self.install_dir) - with io.open(sitepy) as strm: - current = strm.read() - - if not current.startswith('def __boot():'): - raise DistutilsError( - "%s is not a setuptools-generated site.py; please" - " remove it." % sitepy - ) - - if current != source: - log.info("Creating %s", sitepy) - if not self.dry_run: - ensure_directory(sitepy) - with io.open(sitepy, 'w', encoding='utf-8') as strm: - strm.write(source) - self.byte_compile([sitepy]) - - self.sitepy_installed = True - - def create_home_path(self): - """Create directories under ~.""" - if not self.user: - return - home = convert_path(os.path.expanduser("~")) - for name, path in six.iteritems(self.config_vars): - if path.startswith(home) and not os.path.isdir(path): - self.debug_print("os.makedirs('%s', 0o700)" % path) - os.makedirs(path, 0o700) - - INSTALL_SCHEMES = dict( - posix=dict( - install_dir='$base/lib/python$py_version_short/site-packages', - script_dir='$base/bin', - ), - ) - - DEFAULT_SCHEME = dict( - install_dir='$base/Lib/site-packages', - script_dir='$base/Scripts', - ) - - def _expand(self, *attrs): - config_vars = self.get_finalized_command('install').config_vars - - if self.prefix: - # Set default install_dir/scripts from --prefix - config_vars = config_vars.copy() - config_vars['base'] = self.prefix - scheme = self.INSTALL_SCHEMES.get(os.name, self.DEFAULT_SCHEME) - for attr, val in scheme.items(): - if getattr(self, attr, None) is None: - setattr(self, attr, val) - - from distutils.util import subst_vars - - for attr in attrs: - val = getattr(self, attr) - if val is not None: - val = subst_vars(val, config_vars) - if os.name == 'posix': - val = os.path.expanduser(val) - setattr(self, attr, val) - - -def _pythonpath(): - items = os.environ.get('PYTHONPATH', '').split(os.pathsep) - return filter(None, items) - - -def get_site_dirs(): - """ - Return a list of 'site' dirs - """ - - sitedirs = [] - - # start with PYTHONPATH - sitedirs.extend(_pythonpath()) - - prefixes = [sys.prefix] - if sys.exec_prefix != sys.prefix: - prefixes.append(sys.exec_prefix) - for prefix in prefixes: - if prefix: - if sys.platform in ('os2emx', 'riscos'): - sitedirs.append(os.path.join(prefix, "Lib", "site-packages")) - elif os.sep == '/': - sitedirs.extend([ - os.path.join( - prefix, - "lib", - "python" + sys.version[:3], - "site-packages", - ), - os.path.join(prefix, "lib", "site-python"), - ]) - else: - sitedirs.extend([ - prefix, - os.path.join(prefix, "lib", "site-packages"), - ]) - if sys.platform == 'darwin': - # for framework builds *only* we add the standard Apple - # locations. Currently only per-user, but /Library and - # /Network/Library could be added too - if 'Python.framework' in prefix: - home = os.environ.get('HOME') - if home: - home_sp = os.path.join( - home, - 'Library', - 'Python', - sys.version[:3], - 'site-packages', - ) - sitedirs.append(home_sp) - lib_paths = get_path('purelib'), get_path('platlib') - for site_lib in lib_paths: - if site_lib not in sitedirs: - sitedirs.append(site_lib) - - if site.ENABLE_USER_SITE: - sitedirs.append(site.USER_SITE) - - try: - sitedirs.extend(site.getsitepackages()) - except AttributeError: - pass - - sitedirs = list(map(normalize_path, sitedirs)) - - return sitedirs - - -def expand_paths(inputs): - """Yield sys.path directories that might contain "old-style" packages""" - - seen = {} - - for dirname in inputs: - dirname = normalize_path(dirname) - if dirname in seen: - continue - - seen[dirname] = 1 - if not os.path.isdir(dirname): - continue - - files = os.listdir(dirname) - yield dirname, files - - for name in files: - if not name.endswith('.pth'): - # We only care about the .pth files - continue - if name in ('easy-install.pth', 'setuptools.pth'): - # Ignore .pth files that we control - continue - - # Read the .pth file - f = open(os.path.join(dirname, name)) - lines = list(yield_lines(f)) - f.close() - - # Yield existing non-dupe, non-import directory lines from it - for line in lines: - if not line.startswith("import"): - line = normalize_path(line.rstrip()) - if line not in seen: - seen[line] = 1 - if not os.path.isdir(line): - continue - yield line, os.listdir(line) - - -def extract_wininst_cfg(dist_filename): - """Extract configuration data from a bdist_wininst .exe - - Returns a configparser.RawConfigParser, or None - """ - f = open(dist_filename, 'rb') - try: - endrec = zipfile._EndRecData(f) - if endrec is None: - return None - - prepended = (endrec[9] - endrec[5]) - endrec[6] - if prepended < 12: # no wininst data here - return None - f.seek(prepended - 12) - - tag, cfglen, bmlen = struct.unpack("<iii", f.read(12)) - if tag not in (0x1234567A, 0x1234567B): - return None # not a valid tag - - f.seek(prepended - (12 + cfglen)) - init = {'version': '', 'target_version': ''} - cfg = configparser.RawConfigParser(init) - try: - part = f.read(cfglen) - # Read up to the first null byte. - config = part.split(b'\0', 1)[0] - # Now the config is in bytes, but for RawConfigParser, it should - # be text, so decode it. - config = config.decode(sys.getfilesystemencoding()) - cfg.readfp(six.StringIO(config)) - except configparser.Error: - return None - if not cfg.has_section('metadata') or not cfg.has_section('Setup'): - return None - return cfg - - finally: - f.close() - - -def get_exe_prefixes(exe_filename): - """Get exe->egg path translations for a given .exe file""" - - prefixes = [ - ('PURELIB/', ''), - ('PLATLIB/pywin32_system32', ''), - ('PLATLIB/', ''), - ('SCRIPTS/', 'EGG-INFO/scripts/'), - ('DATA/lib/site-packages', ''), - ] - z = zipfile.ZipFile(exe_filename) - try: - for info in z.infolist(): - name = info.filename - parts = name.split('/') - if len(parts) == 3 and parts[2] == 'PKG-INFO': - if parts[1].endswith('.egg-info'): - prefixes.insert(0, ('/'.join(parts[:2]), 'EGG-INFO/')) - break - if len(parts) != 2 or not name.endswith('.pth'): - continue - if name.endswith('-nspkg.pth'): - continue - if parts[0].upper() in ('PURELIB', 'PLATLIB'): - contents = z.read(name) - if six.PY3: - contents = contents.decode() - for pth in yield_lines(contents): - pth = pth.strip().replace('\\', '/') - if not pth.startswith('import'): - prefixes.append((('%s/%s/' % (parts[0], pth)), '')) - finally: - z.close() - prefixes = [(x.lower(), y) for x, y in prefixes] - prefixes.sort() - prefixes.reverse() - return prefixes - - -class PthDistributions(Environment): - """A .pth file with Distribution paths in it""" - - dirty = False - - def __init__(self, filename, sitedirs=()): - self.filename = filename - self.sitedirs = list(map(normalize_path, sitedirs)) - self.basedir = normalize_path(os.path.dirname(self.filename)) - self._load() - Environment.__init__(self, [], None, None) - for path in yield_lines(self.paths): - list(map(self.add, find_distributions(path, True))) - - def _load(self): - self.paths = [] - saw_import = False - seen = dict.fromkeys(self.sitedirs) - if os.path.isfile(self.filename): - f = open(self.filename, 'rt') - for line in f: - if line.startswith('import'): - saw_import = True - continue - path = line.rstrip() - self.paths.append(path) - if not path.strip() or path.strip().startswith('#'): - continue - # skip non-existent paths, in case somebody deleted a package - # manually, and duplicate paths as well - path = self.paths[-1] = normalize_path( - os.path.join(self.basedir, path) - ) - if not os.path.exists(path) or path in seen: - self.paths.pop() # skip it - self.dirty = True # we cleaned up, so we're dirty now :) - continue - seen[path] = 1 - f.close() - - if self.paths and not saw_import: - self.dirty = True # ensure anything we touch has import wrappers - while self.paths and not self.paths[-1].strip(): - self.paths.pop() - - def save(self): - """Write changed .pth file back to disk""" - if not self.dirty: - return - - rel_paths = list(map(self.make_relative, self.paths)) - if rel_paths: - log.debug("Saving %s", self.filename) - lines = self._wrap_lines(rel_paths) - data = '\n'.join(lines) + '\n' - - if os.path.islink(self.filename): - os.unlink(self.filename) - with open(self.filename, 'wt') as f: - f.write(data) - - elif os.path.exists(self.filename): - log.debug("Deleting empty %s", self.filename) - os.unlink(self.filename) - - self.dirty = False - - @staticmethod - def _wrap_lines(lines): - return lines - - def add(self, dist): - """Add `dist` to the distribution map""" - new_path = ( - dist.location not in self.paths and ( - dist.location not in self.sitedirs or - # account for '.' being in PYTHONPATH - dist.location == os.getcwd() - ) - ) - if new_path: - self.paths.append(dist.location) - self.dirty = True - Environment.add(self, dist) - - def remove(self, dist): - """Remove `dist` from the distribution map""" - while dist.location in self.paths: - self.paths.remove(dist.location) - self.dirty = True - Environment.remove(self, dist) - - def make_relative(self, path): - npath, last = os.path.split(normalize_path(path)) - baselen = len(self.basedir) - parts = [last] - sep = os.altsep == '/' and '/' or os.sep - while len(npath) >= baselen: - if npath == self.basedir: - parts.append(os.curdir) - parts.reverse() - return sep.join(parts) - npath, last = os.path.split(npath) - parts.append(last) - else: - return path - - -class RewritePthDistributions(PthDistributions): - @classmethod - def _wrap_lines(cls, lines): - yield cls.prelude - for line in lines: - yield line - yield cls.postlude - - prelude = _one_liner(""" - import sys - sys.__plen = len(sys.path) - """) - postlude = _one_liner(""" - import sys - new = sys.path[sys.__plen:] - del sys.path[sys.__plen:] - p = getattr(sys, '__egginsert', 0) - sys.path[p:p] = new - sys.__egginsert = p + len(new) - """) - - -if os.environ.get('SETUPTOOLS_SYS_PATH_TECHNIQUE', 'raw') == 'rewrite': - PthDistributions = RewritePthDistributions - - -def _first_line_re(): - """ - Return a regular expression based on first_line_re suitable for matching - strings. - """ - if isinstance(first_line_re.pattern, str): - return first_line_re - - # first_line_re in Python >=3.1.4 and >=3.2.1 is a bytes pattern. - return re.compile(first_line_re.pattern.decode()) - - -def auto_chmod(func, arg, exc): - if func in [os.unlink, os.remove] and os.name == 'nt': - chmod(arg, stat.S_IWRITE) - return func(arg) - et, ev, _ = sys.exc_info() - six.reraise(et, (ev[0], ev[1] + (" %s %s" % (func, arg)))) - - -def update_dist_caches(dist_path, fix_zipimporter_caches): - """ - Fix any globally cached `dist_path` related data - - `dist_path` should be a path of a newly installed egg distribution (zipped - or unzipped). - - sys.path_importer_cache contains finder objects that have been cached when - importing data from the original distribution. Any such finders need to be - cleared since the replacement distribution might be packaged differently, - e.g. a zipped egg distribution might get replaced with an unzipped egg - folder or vice versa. Having the old finders cached may then cause Python - to attempt loading modules from the replacement distribution using an - incorrect loader. - - zipimport.zipimporter objects are Python loaders charged with importing - data packaged inside zip archives. If stale loaders referencing the - original distribution, are left behind, they can fail to load modules from - the replacement distribution. E.g. if an old zipimport.zipimporter instance - is used to load data from a new zipped egg archive, it may cause the - operation to attempt to locate the requested data in the wrong location - - one indicated by the original distribution's zip archive directory - information. Such an operation may then fail outright, e.g. report having - read a 'bad local file header', or even worse, it may fail silently & - return invalid data. - - zipimport._zip_directory_cache contains cached zip archive directory - information for all existing zipimport.zipimporter instances and all such - instances connected to the same archive share the same cached directory - information. - - If asked, and the underlying Python implementation allows it, we can fix - all existing zipimport.zipimporter instances instead of having to track - them down and remove them one by one, by updating their shared cached zip - archive directory information. This, of course, assumes that the - replacement distribution is packaged as a zipped egg. - - If not asked to fix existing zipimport.zipimporter instances, we still do - our best to clear any remaining zipimport.zipimporter related cached data - that might somehow later get used when attempting to load data from the new - distribution and thus cause such load operations to fail. Note that when - tracking down such remaining stale data, we can not catch every conceivable - usage from here, and we clear only those that we know of and have found to - cause problems if left alive. Any remaining caches should be updated by - whomever is in charge of maintaining them, i.e. they should be ready to - handle us replacing their zip archives with new distributions at runtime. - - """ - # There are several other known sources of stale zipimport.zipimporter - # instances that we do not clear here, but might if ever given a reason to - # do so: - # * Global setuptools pkg_resources.working_set (a.k.a. 'master working - # set') may contain distributions which may in turn contain their - # zipimport.zipimporter loaders. - # * Several zipimport.zipimporter loaders held by local variables further - # up the function call stack when running the setuptools installation. - # * Already loaded modules may have their __loader__ attribute set to the - # exact loader instance used when importing them. Python 3.4 docs state - # that this information is intended mostly for introspection and so is - # not expected to cause us problems. - normalized_path = normalize_path(dist_path) - _uncache(normalized_path, sys.path_importer_cache) - if fix_zipimporter_caches: - _replace_zip_directory_cache_data(normalized_path) - else: - # Here, even though we do not want to fix existing and now stale - # zipimporter cache information, we still want to remove it. Related to - # Python's zip archive directory information cache, we clear each of - # its stale entries in two phases: - # 1. Clear the entry so attempting to access zip archive information - # via any existing stale zipimport.zipimporter instances fails. - # 2. Remove the entry from the cache so any newly constructed - # zipimport.zipimporter instances do not end up using old stale - # zip archive directory information. - # This whole stale data removal step does not seem strictly necessary, - # but has been left in because it was done before we started replacing - # the zip archive directory information cache content if possible, and - # there are no relevant unit tests that we can depend on to tell us if - # this is really needed. - _remove_and_clear_zip_directory_cache_data(normalized_path) - - -def _collect_zipimporter_cache_entries(normalized_path, cache): - """ - Return zipimporter cache entry keys related to a given normalized path. - - Alternative path spellings (e.g. those using different character case or - those using alternative path separators) related to the same path are - included. Any sub-path entries are included as well, i.e. those - corresponding to zip archives embedded in other zip archives. - - """ - result = [] - prefix_len = len(normalized_path) - for p in cache: - np = normalize_path(p) - if (np.startswith(normalized_path) and - np[prefix_len:prefix_len + 1] in (os.sep, '')): - result.append(p) - return result - - -def _update_zipimporter_cache(normalized_path, cache, updater=None): - """ - Update zipimporter cache data for a given normalized path. - - Any sub-path entries are processed as well, i.e. those corresponding to zip - archives embedded in other zip archives. - - Given updater is a callable taking a cache entry key and the original entry - (after already removing the entry from the cache), and expected to update - the entry and possibly return a new one to be inserted in its place. - Returning None indicates that the entry should not be replaced with a new - one. If no updater is given, the cache entries are simply removed without - any additional processing, the same as if the updater simply returned None. - - """ - for p in _collect_zipimporter_cache_entries(normalized_path, cache): - # N.B. pypy's custom zipimport._zip_directory_cache implementation does - # not support the complete dict interface: - # * Does not support item assignment, thus not allowing this function - # to be used only for removing existing cache entries. - # * Does not support the dict.pop() method, forcing us to use the - # get/del patterns instead. For more detailed information see the - # following links: - # https://github.com/pypa/setuptools/issues/202#issuecomment-202913420 - # http://bit.ly/2h9itJX - old_entry = cache[p] - del cache[p] - new_entry = updater and updater(p, old_entry) - if new_entry is not None: - cache[p] = new_entry - - -def _uncache(normalized_path, cache): - _update_zipimporter_cache(normalized_path, cache) - - -def _remove_and_clear_zip_directory_cache_data(normalized_path): - def clear_and_remove_cached_zip_archive_directory_data(path, old_entry): - old_entry.clear() - - _update_zipimporter_cache( - normalized_path, zipimport._zip_directory_cache, - updater=clear_and_remove_cached_zip_archive_directory_data) - - -# PyPy Python implementation does not allow directly writing to the -# zipimport._zip_directory_cache and so prevents us from attempting to correct -# its content. The best we can do there is clear the problematic cache content -# and have PyPy repopulate it as needed. The downside is that if there are any -# stale zipimport.zipimporter instances laying around, attempting to use them -# will fail due to not having its zip archive directory information available -# instead of being automatically corrected to use the new correct zip archive -# directory information. -if '__pypy__' in sys.builtin_module_names: - _replace_zip_directory_cache_data = \ - _remove_and_clear_zip_directory_cache_data -else: - - def _replace_zip_directory_cache_data(normalized_path): - def replace_cached_zip_archive_directory_data(path, old_entry): - # N.B. In theory, we could load the zip directory information just - # once for all updated path spellings, and then copy it locally and - # update its contained path strings to contain the correct - # spelling, but that seems like a way too invasive move (this cache - # structure is not officially documented anywhere and could in - # theory change with new Python releases) for no significant - # benefit. - old_entry.clear() - zipimport.zipimporter(path) - old_entry.update(zipimport._zip_directory_cache[path]) - return old_entry - - _update_zipimporter_cache( - normalized_path, zipimport._zip_directory_cache, - updater=replace_cached_zip_archive_directory_data) - - -def is_python(text, filename='<string>'): - "Is this string a valid Python script?" - try: - compile(text, filename, 'exec') - except (SyntaxError, TypeError): - return False - else: - return True - - -def is_sh(executable): - """Determine if the specified executable is a .sh (contains a #! line)""" - try: - with io.open(executable, encoding='latin-1') as fp: - magic = fp.read(2) - except (OSError, IOError): - return executable - return magic == '#!' - - -def nt_quote_arg(arg): - """Quote a command line argument according to Windows parsing rules""" - return subprocess.list2cmdline([arg]) - - -def is_python_script(script_text, filename): - """Is this text, as a whole, a Python script? (as opposed to shell/bat/etc. - """ - if filename.endswith('.py') or filename.endswith('.pyw'): - return True # extension says it's Python - if is_python(script_text, filename): - return True # it's syntactically valid Python - if script_text.startswith('#!'): - # It begins with a '#!' line, so check if 'python' is in it somewhere - return 'python' in script_text.splitlines()[0].lower() - - return False # Not any Python I can recognize - - -try: - from os import chmod as _chmod -except ImportError: - # Jython compatibility - def _chmod(*args): - pass - - -def chmod(path, mode): - log.debug("changing mode of %s to %o", path, mode) - try: - _chmod(path, mode) - except os.error as e: - log.debug("chmod failed: %s", e) - - -class CommandSpec(list): - """ - A command spec for a #! header, specified as a list of arguments akin to - those passed to Popen. - """ - - options = [] - split_args = dict() - - @classmethod - def best(cls): - """ - Choose the best CommandSpec class based on environmental conditions. - """ - return cls - - @classmethod - def _sys_executable(cls): - _default = os.path.normpath(sys.executable) - return os.environ.get('__PYVENV_LAUNCHER__', _default) - - @classmethod - def from_param(cls, param): - """ - Construct a CommandSpec from a parameter to build_scripts, which may - be None. - """ - if isinstance(param, cls): - return param - if isinstance(param, list): - return cls(param) - if param is None: - return cls.from_environment() - # otherwise, assume it's a string. - return cls.from_string(param) - - @classmethod - def from_environment(cls): - return cls([cls._sys_executable()]) - - @classmethod - def from_string(cls, string): - """ - Construct a command spec from a simple string representing a command - line parseable by shlex.split. - """ - items = shlex.split(string, **cls.split_args) - return cls(items) - - def install_options(self, script_text): - self.options = shlex.split(self._extract_options(script_text)) - cmdline = subprocess.list2cmdline(self) - if not isascii(cmdline): - self.options[:0] = ['-x'] - - @staticmethod - def _extract_options(orig_script): - """ - Extract any options from the first line of the script. - """ - first = (orig_script + '\n').splitlines()[0] - match = _first_line_re().match(first) - options = match.group(1) or '' if match else '' - return options.strip() - - def as_header(self): - return self._render(self + list(self.options)) - - @staticmethod - def _strip_quotes(item): - _QUOTES = '"\'' - for q in _QUOTES: - if item.startswith(q) and item.endswith(q): - return item[1:-1] - return item - - @staticmethod - def _render(items): - cmdline = subprocess.list2cmdline( - CommandSpec._strip_quotes(item.strip()) for item in items) - return '#!' + cmdline + '\n' - - -# For pbr compat; will be removed in a future version. -sys_executable = CommandSpec._sys_executable() - - -class WindowsCommandSpec(CommandSpec): - split_args = dict(posix=False) - - -class ScriptWriter: - """ - Encapsulates behavior around writing entry point scripts for console and - gui apps. - """ - - template = textwrap.dedent(r""" - # EASY-INSTALL-ENTRY-SCRIPT: %(spec)r,%(group)r,%(name)r - __requires__ = %(spec)r - import re - import sys - from pkg_resources import load_entry_point - - if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) - sys.exit( - load_entry_point(%(spec)r, %(group)r, %(name)r)() - ) - """).lstrip() - - command_spec_class = CommandSpec - - @classmethod - def get_script_args(cls, dist, executable=None, wininst=False): - # for backward compatibility - warnings.warn("Use get_args", EasyInstallDeprecationWarning) - writer = (WindowsScriptWriter if wininst else ScriptWriter).best() - header = cls.get_script_header("", executable, wininst) - return writer.get_args(dist, header) - - @classmethod - def get_script_header(cls, script_text, executable=None, wininst=False): - # for backward compatibility - warnings.warn("Use get_header", EasyInstallDeprecationWarning, stacklevel=2) - if wininst: - executable = "python.exe" - return cls.get_header(script_text, executable) - - @classmethod - def get_args(cls, dist, header=None): - """ - Yield write_script() argument tuples for a distribution's - console_scripts and gui_scripts entry points. - """ - if header is None: - header = cls.get_header() - spec = str(dist.as_requirement()) - for type_ in 'console', 'gui': - group = type_ + '_scripts' - for name, ep in dist.get_entry_map(group).items(): - cls._ensure_safe_name(name) - script_text = cls.template % locals() - args = cls._get_script_args(type_, name, header, script_text) - for res in args: - yield res - - @staticmethod - def _ensure_safe_name(name): - """ - Prevent paths in *_scripts entry point names. - """ - has_path_sep = re.search(r'[\\/]', name) - if has_path_sep: - raise ValueError("Path separators not allowed in script names") - - @classmethod - def get_writer(cls, force_windows): - # for backward compatibility - warnings.warn("Use best", EasyInstallDeprecationWarning) - return WindowsScriptWriter.best() if force_windows else cls.best() - - @classmethod - def best(cls): - """ - Select the best ScriptWriter for this environment. - """ - if sys.platform == 'win32' or (os.name == 'java' and os._name == 'nt'): - return WindowsScriptWriter.best() - else: - return cls - - @classmethod - def _get_script_args(cls, type_, name, header, script_text): - # Simply write the stub with no extension. - yield (name, header + script_text) - - @classmethod - def get_header(cls, script_text="", executable=None): - """Create a #! line, getting options (if any) from script_text""" - cmd = cls.command_spec_class.best().from_param(executable) - cmd.install_options(script_text) - return cmd.as_header() - - -class WindowsScriptWriter(ScriptWriter): - command_spec_class = WindowsCommandSpec - - @classmethod - def get_writer(cls): - # for backward compatibility - warnings.warn("Use best", EasyInstallDeprecationWarning) - return cls.best() - - @classmethod - def best(cls): - """ - Select the best ScriptWriter suitable for Windows - """ - writer_lookup = dict( - executable=WindowsExecutableLauncherWriter, - natural=cls, - ) - # for compatibility, use the executable launcher by default - launcher = os.environ.get('SETUPTOOLS_LAUNCHER', 'executable') - return writer_lookup[launcher] - - @classmethod - def _get_script_args(cls, type_, name, header, script_text): - "For Windows, add a .py extension" - ext = dict(console='.pya', gui='.pyw')[type_] - if ext not in os.environ['PATHEXT'].lower().split(';'): - msg = ( - "{ext} not listed in PATHEXT; scripts will not be " - "recognized as executables." - ).format(**locals()) - warnings.warn(msg, UserWarning) - old = ['.pya', '.py', '-script.py', '.pyc', '.pyo', '.pyw', '.exe'] - old.remove(ext) - header = cls._adjust_header(type_, header) - blockers = [name + x for x in old] - yield name + ext, header + script_text, 't', blockers - - @classmethod - def _adjust_header(cls, type_, orig_header): - """ - Make sure 'pythonw' is used for gui and and 'python' is used for - console (regardless of what sys.executable is). - """ - pattern = 'pythonw.exe' - repl = 'python.exe' - if type_ == 'gui': - pattern, repl = repl, pattern - pattern_ob = re.compile(re.escape(pattern), re.IGNORECASE) - new_header = pattern_ob.sub(string=orig_header, repl=repl) - return new_header if cls._use_header(new_header) else orig_header - - @staticmethod - def _use_header(new_header): - """ - Should _adjust_header use the replaced header? - - On non-windows systems, always use. On - Windows systems, only use the replaced header if it resolves - to an executable on the system. - """ - clean_header = new_header[2:-1].strip('"') - return sys.platform != 'win32' or find_executable(clean_header) - - -class WindowsExecutableLauncherWriter(WindowsScriptWriter): - @classmethod - def _get_script_args(cls, type_, name, header, script_text): - """ - For Windows, add a .py extension and an .exe launcher - """ - if type_ == 'gui': - launcher_type = 'gui' - ext = '-script.pyw' - old = ['.pyw'] - else: - launcher_type = 'cli' - ext = '-script.py' - old = ['.py', '.pyc', '.pyo'] - hdr = cls._adjust_header(type_, header) - blockers = [name + x for x in old] - yield (name + ext, hdr + script_text, 't', blockers) - yield ( - name + '.exe', get_win_launcher(launcher_type), - 'b' # write in binary mode - ) - if not is_64bit(): - # install a manifest for the launcher to prevent Windows - # from detecting it as an installer (which it will for - # launchers like easy_install.exe). Consider only - # adding a manifest for launchers detected as installers. - # See Distribute #143 for details. - m_name = name + '.exe.manifest' - yield (m_name, load_launcher_manifest(name), 't') - - -# for backward-compatibility -get_script_args = ScriptWriter.get_script_args -get_script_header = ScriptWriter.get_script_header - - -def get_win_launcher(type): - """ - Load the Windows launcher (executable) suitable for launching a script. - - `type` should be either 'cli' or 'gui' - - Returns the executable as a byte string. - """ - launcher_fn = '%s.exe' % type - if is_64bit(): - launcher_fn = launcher_fn.replace(".", "-64.") - else: - launcher_fn = launcher_fn.replace(".", "-32.") - return resource_string('setuptools', launcher_fn) - - -def load_launcher_manifest(name): - manifest = pkg_resources.resource_string(__name__, 'launcher manifest.xml') - if six.PY2: - return manifest % vars() - else: - return manifest.decode('utf-8') % vars() - - -def rmtree(path, ignore_errors=False, onerror=auto_chmod): - return shutil.rmtree(path, ignore_errors, onerror) - - -def current_umask(): - tmp = os.umask(0o022) - os.umask(tmp) - return tmp - - -def bootstrap(): - # This function is called when setuptools*.egg is run using /bin/sh - import setuptools - - argv0 = os.path.dirname(setuptools.__path__[0]) - sys.argv[0] = argv0 - sys.argv.append(argv0) - main() - - -def main(argv=None, **kw): - from setuptools import setup - from setuptools.dist import Distribution - - class DistributionWithoutHelpCommands(Distribution): - common_usage = "" - - def _show_help(self, *args, **kw): - with _patch_usage(): - Distribution._show_help(self, *args, **kw) - - if argv is None: - argv = sys.argv[1:] - - with _patch_usage(): - setup( - script_args=['-q', 'easy_install', '-v'] + argv, - script_name=sys.argv[0] or 'easy_install', - distclass=DistributionWithoutHelpCommands, - **kw - ) - - -@contextlib.contextmanager -def _patch_usage(): - import distutils.core - USAGE = textwrap.dedent(""" - usage: %(script)s [options] requirement_or_url ... - or: %(script)s --help - """).lstrip() - - def gen_usage(script_name): - return USAGE % dict( - script=os.path.basename(script_name), - ) - - saved = distutils.core.gen_usage - distutils.core.gen_usage = gen_usage - try: - yield - finally: - distutils.core.gen_usage = saved - -class EasyInstallDeprecationWarning(SetuptoolsDeprecationWarning): - """Class for warning about deprecations in EasyInstall in SetupTools. Not ignored by default, unlike DeprecationWarning.""" - diff --git a/env/lib/python2.7/site-packages/setuptools/command/easy_install.pyc b/env/lib/python2.7/site-packages/setuptools/command/easy_install.pyc deleted file mode 100644 index 739fe662..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/command/easy_install.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/command/egg_info.pyc b/env/lib/python2.7/site-packages/setuptools/command/egg_info.pyc deleted file mode 100644 index b41f53b8..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/command/egg_info.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/command/install.py b/env/lib/python2.7/site-packages/setuptools/command/install.py deleted file mode 100644 index 31a5ddb5..00000000 --- a/env/lib/python2.7/site-packages/setuptools/command/install.py +++ /dev/null @@ -1,125 +0,0 @@ -from distutils.errors import DistutilsArgError -import inspect -import glob -import warnings -import platform -import distutils.command.install as orig - -import setuptools - -# Prior to numpy 1.9, NumPy relies on the '_install' name, so provide it for -# now. See https://github.com/pypa/setuptools/issues/199/ -_install = orig.install - - -class install(orig.install): - """Use easy_install to install the package, w/dependencies""" - - user_options = orig.install.user_options + [ - ('old-and-unmanageable', None, "Try not to use this!"), - ('single-version-externally-managed', None, - "used by system package builders to create 'flat' eggs"), - ] - boolean_options = orig.install.boolean_options + [ - 'old-and-unmanageable', 'single-version-externally-managed', - ] - new_commands = [ - ('install_egg_info', lambda self: True), - ('install_scripts', lambda self: True), - ] - _nc = dict(new_commands) - - def initialize_options(self): - orig.install.initialize_options(self) - self.old_and_unmanageable = None - self.single_version_externally_managed = None - - def finalize_options(self): - orig.install.finalize_options(self) - if self.root: - self.single_version_externally_managed = True - elif self.single_version_externally_managed: - if not self.root and not self.record: - raise DistutilsArgError( - "You must specify --record or --root when building system" - " packages" - ) - - def handle_extra_path(self): - if self.root or self.single_version_externally_managed: - # explicit backward-compatibility mode, allow extra_path to work - return orig.install.handle_extra_path(self) - - # Ignore extra_path when installing an egg (or being run by another - # command without --root or --single-version-externally-managed - self.path_file = None - self.extra_dirs = '' - - def run(self): - # Explicit request for old-style install? Just do it - if self.old_and_unmanageable or self.single_version_externally_managed: - return orig.install.run(self) - - if not self._called_from_setup(inspect.currentframe()): - # Run in backward-compatibility mode to support bdist_* commands. - orig.install.run(self) - else: - self.do_egg_install() - - @staticmethod - def _called_from_setup(run_frame): - """ - Attempt to detect whether run() was called from setup() or by another - command. If called by setup(), the parent caller will be the - 'run_command' method in 'distutils.dist', and *its* caller will be - the 'run_commands' method. If called any other way, the - immediate caller *might* be 'run_command', but it won't have been - called by 'run_commands'. Return True in that case or if a call stack - is unavailable. Return False otherwise. - """ - if run_frame is None: - msg = "Call stack not available. bdist_* commands may fail." - warnings.warn(msg) - if platform.python_implementation() == 'IronPython': - msg = "For best results, pass -X:Frames to enable call stack." - warnings.warn(msg) - return True - res = inspect.getouterframes(run_frame)[2] - caller, = res[:1] - info = inspect.getframeinfo(caller) - caller_module = caller.f_globals.get('__name__', '') - return ( - caller_module == 'distutils.dist' - and info.function == 'run_commands' - ) - - def do_egg_install(self): - - easy_install = self.distribution.get_command_class('easy_install') - - cmd = easy_install( - self.distribution, args="x", root=self.root, record=self.record, - ) - cmd.ensure_finalized() # finalize before bdist_egg munges install cmd - cmd.always_copy_from = '.' # make sure local-dir eggs get installed - - # pick up setup-dir .egg files only: no .egg-info - cmd.package_index.scan(glob.glob('*.egg')) - - self.run_command('bdist_egg') - args = [self.distribution.get_command_obj('bdist_egg').egg_output] - - if setuptools.bootstrap_install_from: - # Bootstrap self-installation of setuptools - args.insert(0, setuptools.bootstrap_install_from) - - cmd.args = args - cmd.run() - setuptools.bootstrap_install_from = None - - -# XXX Python 3.1 doesn't see _nc if this is inside the class -install.sub_commands = ( - [cmd for cmd in orig.install.sub_commands if cmd[0] not in install._nc] + - install.new_commands -) diff --git a/env/lib/python2.7/site-packages/setuptools/command/install.pyc b/env/lib/python2.7/site-packages/setuptools/command/install.pyc deleted file mode 100644 index 7749eaea..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/command/install.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/command/install_egg_info.pyc b/env/lib/python2.7/site-packages/setuptools/command/install_egg_info.pyc deleted file mode 100644 index 3603c2dd..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/command/install_egg_info.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/command/install_lib.py b/env/lib/python2.7/site-packages/setuptools/command/install_lib.py deleted file mode 100644 index 2b31c3e3..00000000 --- a/env/lib/python2.7/site-packages/setuptools/command/install_lib.py +++ /dev/null @@ -1,121 +0,0 @@ -import os -import imp -from itertools import product, starmap -import distutils.command.install_lib as orig - - -class install_lib(orig.install_lib): - """Don't add compiled flags to filenames of non-Python files""" - - def run(self): - self.build() - outfiles = self.install() - if outfiles is not None: - # always compile, in case we have any extension stubs to deal with - self.byte_compile(outfiles) - - def get_exclusions(self): - """ - Return a collections.Sized collections.Container of paths to be - excluded for single_version_externally_managed installations. - """ - all_packages = ( - pkg - for ns_pkg in self._get_SVEM_NSPs() - for pkg in self._all_packages(ns_pkg) - ) - - excl_specs = product(all_packages, self._gen_exclusion_paths()) - return set(starmap(self._exclude_pkg_path, excl_specs)) - - def _exclude_pkg_path(self, pkg, exclusion_path): - """ - Given a package name and exclusion path within that package, - compute the full exclusion path. - """ - parts = pkg.split('.') + [exclusion_path] - return os.path.join(self.install_dir, *parts) - - @staticmethod - def _all_packages(pkg_name): - """ - >>> list(install_lib._all_packages('foo.bar.baz')) - ['foo.bar.baz', 'foo.bar', 'foo'] - """ - while pkg_name: - yield pkg_name - pkg_name, sep, child = pkg_name.rpartition('.') - - def _get_SVEM_NSPs(self): - """ - Get namespace packages (list) but only for - single_version_externally_managed installations and empty otherwise. - """ - # TODO: is it necessary to short-circuit here? i.e. what's the cost - # if get_finalized_command is called even when namespace_packages is - # False? - if not self.distribution.namespace_packages: - return [] - - install_cmd = self.get_finalized_command('install') - svem = install_cmd.single_version_externally_managed - - return self.distribution.namespace_packages if svem else [] - - @staticmethod - def _gen_exclusion_paths(): - """ - Generate file paths to be excluded for namespace packages (bytecode - cache files). - """ - # always exclude the package module itself - yield '__init__.py' - - yield '__init__.pyc' - yield '__init__.pyo' - - if not hasattr(imp, 'get_tag'): - return - - base = os.path.join('__pycache__', '__init__.' + imp.get_tag()) - yield base + '.pyc' - yield base + '.pyo' - yield base + '.opt-1.pyc' - yield base + '.opt-2.pyc' - - def copy_tree( - self, infile, outfile, - preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1 - ): - assert preserve_mode and preserve_times and not preserve_symlinks - exclude = self.get_exclusions() - - if not exclude: - return orig.install_lib.copy_tree(self, infile, outfile) - - # Exclude namespace package __init__.py* files from the output - - from setuptools.archive_util import unpack_directory - from distutils import log - - outfiles = [] - - def pf(src, dst): - if dst in exclude: - log.warn("Skipping installation of %s (namespace package)", - dst) - return False - - log.info("copying %s -> %s", src, os.path.dirname(dst)) - outfiles.append(dst) - return dst - - unpack_directory(infile, outfile, pf) - return outfiles - - def get_outputs(self): - outputs = orig.install_lib.get_outputs(self) - exclude = self.get_exclusions() - if exclude: - return [f for f in outputs if f not in exclude] - return outputs diff --git a/env/lib/python2.7/site-packages/setuptools/command/install_lib.pyc b/env/lib/python2.7/site-packages/setuptools/command/install_lib.pyc deleted file mode 100644 index 35779790..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/command/install_lib.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/command/install_scripts.py b/env/lib/python2.7/site-packages/setuptools/command/install_scripts.py deleted file mode 100644 index 16234273..00000000 --- a/env/lib/python2.7/site-packages/setuptools/command/install_scripts.py +++ /dev/null @@ -1,65 +0,0 @@ -from distutils import log -import distutils.command.install_scripts as orig -import os -import sys - -from pkg_resources import Distribution, PathMetadata, ensure_directory - - -class install_scripts(orig.install_scripts): - """Do normal script install, plus any egg_info wrapper scripts""" - - def initialize_options(self): - orig.install_scripts.initialize_options(self) - self.no_ep = False - - def run(self): - import setuptools.command.easy_install as ei - - self.run_command("egg_info") - if self.distribution.scripts: - orig.install_scripts.run(self) # run first to set up self.outfiles - else: - self.outfiles = [] - if self.no_ep: - # don't install entry point scripts into .egg file! - return - - ei_cmd = self.get_finalized_command("egg_info") - dist = Distribution( - ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info), - ei_cmd.egg_name, ei_cmd.egg_version, - ) - bs_cmd = self.get_finalized_command('build_scripts') - exec_param = getattr(bs_cmd, 'executable', None) - bw_cmd = self.get_finalized_command("bdist_wininst") - is_wininst = getattr(bw_cmd, '_is_running', False) - writer = ei.ScriptWriter - if is_wininst: - exec_param = "python.exe" - writer = ei.WindowsScriptWriter - if exec_param == sys.executable: - # In case the path to the Python executable contains a space, wrap - # it so it's not split up. - exec_param = [exec_param] - # resolve the writer to the environment - writer = writer.best() - cmd = writer.command_spec_class.best().from_param(exec_param) - for args in writer.get_args(dist, cmd.as_header()): - self.write_script(*args) - - def write_script(self, script_name, contents, mode="t", *ignored): - """Write an executable file to the scripts directory""" - from setuptools.command.easy_install import chmod, current_umask - - log.info("Installing %s script to %s", script_name, self.install_dir) - target = os.path.join(self.install_dir, script_name) - self.outfiles.append(target) - - mask = current_umask() - if not self.dry_run: - ensure_directory(target) - f = open(target, "w" + mode) - f.write(contents) - f.close() - chmod(target, 0o777 - mask) diff --git a/env/lib/python2.7/site-packages/setuptools/command/install_scripts.pyc b/env/lib/python2.7/site-packages/setuptools/command/install_scripts.pyc deleted file mode 100644 index 83831b25..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/command/install_scripts.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/command/py36compat.pyc b/env/lib/python2.7/site-packages/setuptools/command/py36compat.pyc deleted file mode 100644 index a51a95aa..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/command/py36compat.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/command/register.py b/env/lib/python2.7/site-packages/setuptools/command/register.py deleted file mode 100644 index 98bc0156..00000000 --- a/env/lib/python2.7/site-packages/setuptools/command/register.py +++ /dev/null @@ -1,18 +0,0 @@ -from distutils import log -import distutils.command.register as orig - - -class register(orig.register): - __doc__ = orig.register.__doc__ - - def run(self): - try: - # Make sure that we are using valid current name/version info - self.run_command('egg_info') - orig.register.run(self) - finally: - self.announce( - "WARNING: Registering is deprecated, use twine to " - "upload instead (https://pypi.org/p/twine/)", - log.WARN - ) diff --git a/env/lib/python2.7/site-packages/setuptools/command/register.pyc b/env/lib/python2.7/site-packages/setuptools/command/register.pyc deleted file mode 100644 index 0b6b0d31..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/command/register.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/command/rotate.pyc b/env/lib/python2.7/site-packages/setuptools/command/rotate.pyc deleted file mode 100644 index fe3819d3..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/command/rotate.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/command/saveopts.pyc b/env/lib/python2.7/site-packages/setuptools/command/saveopts.pyc deleted file mode 100644 index a090f72d..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/command/saveopts.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/command/sdist.py b/env/lib/python2.7/site-packages/setuptools/command/sdist.py deleted file mode 100644 index dc253981..00000000 --- a/env/lib/python2.7/site-packages/setuptools/command/sdist.py +++ /dev/null @@ -1,221 +0,0 @@ -from distutils import log -import distutils.command.sdist as orig -import os -import sys -import io -import contextlib - -from setuptools.extern import six - -from .py36compat import sdist_add_defaults - -import pkg_resources - -_default_revctrl = list - - -def walk_revctrl(dirname=''): - """Find all files under revision control""" - for ep in pkg_resources.iter_entry_points('setuptools.file_finders'): - for item in ep.load()(dirname): - yield item - - -class sdist(sdist_add_defaults, orig.sdist): - """Smart sdist that finds anything supported by revision control""" - - user_options = [ - ('formats=', None, - "formats for source distribution (comma-separated list)"), - ('keep-temp', 'k', - "keep the distribution tree around after creating " + - "archive file(s)"), - ('dist-dir=', 'd', - "directory to put the source distribution archive(s) in " - "[default: dist]"), - ] - - negative_opt = {} - - README_EXTENSIONS = ['', '.rst', '.txt', '.md'] - READMES = tuple('README{0}'.format(ext) for ext in README_EXTENSIONS) - - def run(self): - self.run_command('egg_info') - ei_cmd = self.get_finalized_command('egg_info') - self.filelist = ei_cmd.filelist - self.filelist.append(os.path.join(ei_cmd.egg_info, 'SOURCES.txt')) - self.check_readme() - - # Run sub commands - for cmd_name in self.get_sub_commands(): - self.run_command(cmd_name) - - self.make_distribution() - - dist_files = getattr(self.distribution, 'dist_files', []) - for file in self.archive_files: - data = ('sdist', '', file) - if data not in dist_files: - dist_files.append(data) - - def initialize_options(self): - orig.sdist.initialize_options(self) - - self._default_to_gztar() - - def _default_to_gztar(self): - # only needed on Python prior to 3.6. - if sys.version_info >= (3, 6, 0, 'beta', 1): - return - self.formats = ['gztar'] - - def make_distribution(self): - """ - Workaround for #516 - """ - with self._remove_os_link(): - orig.sdist.make_distribution(self) - - @staticmethod - @contextlib.contextmanager - def _remove_os_link(): - """ - In a context, remove and restore os.link if it exists - """ - - class NoValue: - pass - - orig_val = getattr(os, 'link', NoValue) - try: - del os.link - except Exception: - pass - try: - yield - finally: - if orig_val is not NoValue: - setattr(os, 'link', orig_val) - - def __read_template_hack(self): - # This grody hack closes the template file (MANIFEST.in) if an - # exception occurs during read_template. - # Doing so prevents an error when easy_install attempts to delete the - # file. - try: - orig.sdist.read_template(self) - except Exception: - _, _, tb = sys.exc_info() - tb.tb_next.tb_frame.f_locals['template'].close() - raise - - # Beginning with Python 2.7.2, 3.1.4, and 3.2.1, this leaky file handle - # has been fixed, so only override the method if we're using an earlier - # Python. - has_leaky_handle = ( - sys.version_info < (2, 7, 2) - or (3, 0) <= sys.version_info < (3, 1, 4) - or (3, 2) <= sys.version_info < (3, 2, 1) - ) - if has_leaky_handle: - read_template = __read_template_hack - - def _add_defaults_python(self): - """getting python files""" - if self.distribution.has_pure_modules(): - build_py = self.get_finalized_command('build_py') - self.filelist.extend(build_py.get_source_files()) - # This functionality is incompatible with include_package_data, and - # will in fact create an infinite recursion if include_package_data - # is True. Use of include_package_data will imply that - # distutils-style automatic handling of package_data is disabled - if not self.distribution.include_package_data: - for _, src_dir, _, filenames in build_py.data_files: - self.filelist.extend([os.path.join(src_dir, filename) - for filename in filenames]) - - def _add_defaults_data_files(self): - try: - if six.PY2: - sdist_add_defaults._add_defaults_data_files(self) - else: - super()._add_defaults_data_files() - except TypeError: - log.warn("data_files contains unexpected objects") - - def check_readme(self): - for f in self.READMES: - if os.path.exists(f): - return - else: - self.warn( - "standard file not found: should have one of " + - ', '.join(self.READMES) - ) - - def make_release_tree(self, base_dir, files): - orig.sdist.make_release_tree(self, base_dir, files) - - # Save any egg_info command line options used to create this sdist - dest = os.path.join(base_dir, 'setup.cfg') - if hasattr(os, 'link') and os.path.exists(dest): - # unlink and re-copy, since it might be hard-linked, and - # we don't want to change the source version - os.unlink(dest) - self.copy_file('setup.cfg', dest) - - self.get_finalized_command('egg_info').save_version_info(dest) - - def _manifest_is_not_generated(self): - # check for special comment used in 2.7.1 and higher - if not os.path.isfile(self.manifest): - return False - - with io.open(self.manifest, 'rb') as fp: - first_line = fp.readline() - return (first_line != - '# file GENERATED by distutils, do NOT edit\n'.encode()) - - def read_manifest(self): - """Read the manifest file (named by 'self.manifest') and use it to - fill in 'self.filelist', the list of files to include in the source - distribution. - """ - log.info("reading manifest file '%s'", self.manifest) - manifest = open(self.manifest, 'rb') - for line in manifest: - # The manifest must contain UTF-8. See #303. - if six.PY3: - try: - line = line.decode('UTF-8') - except UnicodeDecodeError: - log.warn("%r not UTF-8 decodable -- skipping" % line) - continue - # ignore comments and blank lines - line = line.strip() - if line.startswith('#') or not line: - continue - self.filelist.append(line) - manifest.close() - - def check_license(self): - """Checks if license_file' is configured and adds it to - 'self.filelist' if the value contains a valid path. - """ - - opts = self.distribution.get_option_dict('metadata') - - # ignore the source of the value - _, license_file = opts.get('license_file', (None, None)) - - if license_file is None: - log.debug("'license_file' option was not specified") - return - - if not os.path.exists(license_file): - log.warn("warning: Failed to find the configured license file '%s'", - license_file) - return - - self.filelist.append(license_file) diff --git a/env/lib/python2.7/site-packages/setuptools/command/sdist.pyc b/env/lib/python2.7/site-packages/setuptools/command/sdist.pyc deleted file mode 100644 index 7042d175..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/command/sdist.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/command/setopt.pyc b/env/lib/python2.7/site-packages/setuptools/command/setopt.pyc deleted file mode 100644 index 5ab40ac9..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/command/setopt.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/command/test.py b/env/lib/python2.7/site-packages/setuptools/command/test.py deleted file mode 100644 index 973e4eb2..00000000 --- a/env/lib/python2.7/site-packages/setuptools/command/test.py +++ /dev/null @@ -1,271 +0,0 @@ -import os -import operator -import sys -import contextlib -import itertools -import unittest -from distutils.errors import DistutilsError, DistutilsOptionError -from distutils import log -from unittest import TestLoader - -from setuptools.extern import six -from setuptools.extern.six.moves import map, filter - -from pkg_resources import (resource_listdir, resource_exists, normalize_path, - working_set, _namespace_packages, evaluate_marker, - add_activation_listener, require, EntryPoint) -from setuptools import Command -from .build_py import _unique_everseen - -__metaclass__ = type - - -class ScanningLoader(TestLoader): - - def __init__(self): - TestLoader.__init__(self) - self._visited = set() - - def loadTestsFromModule(self, module, pattern=None): - """Return a suite of all tests cases contained in the given module - - If the module is a package, load tests from all the modules in it. - If the module has an ``additional_tests`` function, call it and add - the return value to the tests. - """ - if module in self._visited: - return None - self._visited.add(module) - - tests = [] - tests.append(TestLoader.loadTestsFromModule(self, module)) - - if hasattr(module, "additional_tests"): - tests.append(module.additional_tests()) - - if hasattr(module, '__path__'): - for file in resource_listdir(module.__name__, ''): - if file.endswith('.py') and file != '__init__.py': - submodule = module.__name__ + '.' + file[:-3] - else: - if resource_exists(module.__name__, file + '/__init__.py'): - submodule = module.__name__ + '.' + file - else: - continue - tests.append(self.loadTestsFromName(submodule)) - - if len(tests) != 1: - return self.suiteClass(tests) - else: - return tests[0] # don't create a nested suite for only one return - - -# adapted from jaraco.classes.properties:NonDataProperty -class NonDataProperty: - def __init__(self, fget): - self.fget = fget - - def __get__(self, obj, objtype=None): - if obj is None: - return self - return self.fget(obj) - - -class test(Command): - """Command to run unit tests after in-place build""" - - description = "run unit tests after in-place build" - - user_options = [ - ('test-module=', 'm', "Run 'test_suite' in specified module"), - ('test-suite=', 's', - "Run single test, case or suite (e.g. 'module.test_suite')"), - ('test-runner=', 'r', "Test runner to use"), - ] - - def initialize_options(self): - self.test_suite = None - self.test_module = None - self.test_loader = None - self.test_runner = None - - def finalize_options(self): - - if self.test_suite and self.test_module: - msg = "You may specify a module or a suite, but not both" - raise DistutilsOptionError(msg) - - if self.test_suite is None: - if self.test_module is None: - self.test_suite = self.distribution.test_suite - else: - self.test_suite = self.test_module + ".test_suite" - - if self.test_loader is None: - self.test_loader = getattr(self.distribution, 'test_loader', None) - if self.test_loader is None: - self.test_loader = "setuptools.command.test:ScanningLoader" - if self.test_runner is None: - self.test_runner = getattr(self.distribution, 'test_runner', None) - - @NonDataProperty - def test_args(self): - return list(self._test_args()) - - def _test_args(self): - if not self.test_suite and sys.version_info >= (2, 7): - yield 'discover' - if self.verbose: - yield '--verbose' - if self.test_suite: - yield self.test_suite - - def with_project_on_sys_path(self, func): - """ - Backward compatibility for project_on_sys_path context. - """ - with self.project_on_sys_path(): - func() - - @contextlib.contextmanager - def project_on_sys_path(self, include_dists=[]): - with_2to3 = six.PY3 and getattr(self.distribution, 'use_2to3', False) - - if with_2to3: - # If we run 2to3 we can not do this inplace: - - # Ensure metadata is up-to-date - self.reinitialize_command('build_py', inplace=0) - self.run_command('build_py') - bpy_cmd = self.get_finalized_command("build_py") - build_path = normalize_path(bpy_cmd.build_lib) - - # Build extensions - self.reinitialize_command('egg_info', egg_base=build_path) - self.run_command('egg_info') - - self.reinitialize_command('build_ext', inplace=0) - self.run_command('build_ext') - else: - # Without 2to3 inplace works fine: - self.run_command('egg_info') - - # Build extensions in-place - self.reinitialize_command('build_ext', inplace=1) - self.run_command('build_ext') - - ei_cmd = self.get_finalized_command("egg_info") - - old_path = sys.path[:] - old_modules = sys.modules.copy() - - try: - project_path = normalize_path(ei_cmd.egg_base) - sys.path.insert(0, project_path) - working_set.__init__() - add_activation_listener(lambda dist: dist.activate()) - require('%s==%s' % (ei_cmd.egg_name, ei_cmd.egg_version)) - with self.paths_on_pythonpath([project_path]): - yield - finally: - sys.path[:] = old_path - sys.modules.clear() - sys.modules.update(old_modules) - working_set.__init__() - - @staticmethod - @contextlib.contextmanager - def paths_on_pythonpath(paths): - """ - Add the indicated paths to the head of the PYTHONPATH environment - variable so that subprocesses will also see the packages at - these paths. - - Do this in a context that restores the value on exit. - """ - nothing = object() - orig_pythonpath = os.environ.get('PYTHONPATH', nothing) - current_pythonpath = os.environ.get('PYTHONPATH', '') - try: - prefix = os.pathsep.join(_unique_everseen(paths)) - to_join = filter(None, [prefix, current_pythonpath]) - new_path = os.pathsep.join(to_join) - if new_path: - os.environ['PYTHONPATH'] = new_path - yield - finally: - if orig_pythonpath is nothing: - os.environ.pop('PYTHONPATH', None) - else: - os.environ['PYTHONPATH'] = orig_pythonpath - - @staticmethod - def install_dists(dist): - """ - Install the requirements indicated by self.distribution and - return an iterable of the dists that were built. - """ - ir_d = dist.fetch_build_eggs(dist.install_requires) - tr_d = dist.fetch_build_eggs(dist.tests_require or []) - er_d = dist.fetch_build_eggs( - v for k, v in dist.extras_require.items() - if k.startswith(':') and evaluate_marker(k[1:]) - ) - return itertools.chain(ir_d, tr_d, er_d) - - def run(self): - installed_dists = self.install_dists(self.distribution) - - cmd = ' '.join(self._argv) - if self.dry_run: - self.announce('skipping "%s" (dry run)' % cmd) - return - - self.announce('running "%s"' % cmd) - - paths = map(operator.attrgetter('location'), installed_dists) - with self.paths_on_pythonpath(paths): - with self.project_on_sys_path(): - self.run_tests() - - def run_tests(self): - # Purge modules under test from sys.modules. The test loader will - # re-import them from the build location. Required when 2to3 is used - # with namespace packages. - if six.PY3 and getattr(self.distribution, 'use_2to3', False): - module = self.test_suite.split('.')[0] - if module in _namespace_packages: - del_modules = [] - if module in sys.modules: - del_modules.append(module) - module += '.' - for name in sys.modules: - if name.startswith(module): - del_modules.append(name) - list(map(sys.modules.__delitem__, del_modules)) - - test = unittest.main( - None, None, self._argv, - testLoader=self._resolve_as_ep(self.test_loader), - testRunner=self._resolve_as_ep(self.test_runner), - exit=False, - ) - if not test.result.wasSuccessful(): - msg = 'Test failed: %s' % test.result - self.announce(msg, log.ERROR) - raise DistutilsError(msg) - - @property - def _argv(self): - return ['unittest'] + self.test_args - - @staticmethod - def _resolve_as_ep(val): - """ - Load the indicated attribute value, called, as a as if it were - specified as an entry point. - """ - if val is None: - return - parsed = EntryPoint.parse("x=" + val) - return parsed.resolve()() diff --git a/env/lib/python2.7/site-packages/setuptools/command/test.pyc b/env/lib/python2.7/site-packages/setuptools/command/test.pyc deleted file mode 100644 index 0f064d82..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/command/test.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/command/upload.py b/env/lib/python2.7/site-packages/setuptools/command/upload.py deleted file mode 100644 index 6db8888b..00000000 --- a/env/lib/python2.7/site-packages/setuptools/command/upload.py +++ /dev/null @@ -1,196 +0,0 @@ -import io -import os -import hashlib -import getpass - -from base64 import standard_b64encode - -from distutils import log -from distutils.command import upload as orig -from distutils.spawn import spawn - -from distutils.errors import DistutilsError - -from setuptools.extern.six.moves.urllib.request import urlopen, Request -from setuptools.extern.six.moves.urllib.error import HTTPError -from setuptools.extern.six.moves.urllib.parse import urlparse - - -class upload(orig.upload): - """ - Override default upload behavior to obtain password - in a variety of different ways. - """ - def run(self): - try: - orig.upload.run(self) - finally: - self.announce( - "WARNING: Uploading via this command is deprecated, use twine " - "to upload instead (https://pypi.org/p/twine/)", - log.WARN - ) - - def finalize_options(self): - orig.upload.finalize_options(self) - self.username = ( - self.username or - getpass.getuser() - ) - # Attempt to obtain password. Short circuit evaluation at the first - # sign of success. - self.password = ( - self.password or - self._load_password_from_keyring() or - self._prompt_for_password() - ) - - def upload_file(self, command, pyversion, filename): - # Makes sure the repository URL is compliant - schema, netloc, url, params, query, fragments = \ - urlparse(self.repository) - if params or query or fragments: - raise AssertionError("Incompatible url %s" % self.repository) - - if schema not in ('http', 'https'): - raise AssertionError("unsupported schema " + schema) - - # Sign if requested - if self.sign: - gpg_args = ["gpg", "--detach-sign", "-a", filename] - if self.identity: - gpg_args[2:2] = ["--local-user", self.identity] - spawn(gpg_args, - dry_run=self.dry_run) - - # Fill in the data - send all the meta-data in case we need to - # register a new release - with open(filename, 'rb') as f: - content = f.read() - - meta = self.distribution.metadata - - data = { - # action - ':action': 'file_upload', - 'protocol_version': '1', - - # identify release - 'name': meta.get_name(), - 'version': meta.get_version(), - - # file content - 'content': (os.path.basename(filename), content), - 'filetype': command, - 'pyversion': pyversion, - 'md5_digest': hashlib.md5(content).hexdigest(), - - # additional meta-data - 'metadata_version': str(meta.get_metadata_version()), - 'summary': meta.get_description(), - 'home_page': meta.get_url(), - 'author': meta.get_contact(), - 'author_email': meta.get_contact_email(), - 'license': meta.get_licence(), - 'description': meta.get_long_description(), - 'keywords': meta.get_keywords(), - 'platform': meta.get_platforms(), - 'classifiers': meta.get_classifiers(), - 'download_url': meta.get_download_url(), - # PEP 314 - 'provides': meta.get_provides(), - 'requires': meta.get_requires(), - 'obsoletes': meta.get_obsoletes(), - } - - data['comment'] = '' - - if self.sign: - data['gpg_signature'] = (os.path.basename(filename) + ".asc", - open(filename+".asc", "rb").read()) - - # set up the authentication - user_pass = (self.username + ":" + self.password).encode('ascii') - # The exact encoding of the authentication string is debated. - # Anyway PyPI only accepts ascii for both username or password. - auth = "Basic " + standard_b64encode(user_pass).decode('ascii') - - # Build up the MIME payload for the POST data - boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' - sep_boundary = b'\r\n--' + boundary.encode('ascii') - end_boundary = sep_boundary + b'--\r\n' - body = io.BytesIO() - for key, value in data.items(): - title = '\r\nContent-Disposition: form-data; name="%s"' % key - # handle multiple entries for the same name - if not isinstance(value, list): - value = [value] - for value in value: - if type(value) is tuple: - title += '; filename="%s"' % value[0] - value = value[1] - else: - value = str(value).encode('utf-8') - body.write(sep_boundary) - body.write(title.encode('utf-8')) - body.write(b"\r\n\r\n") - body.write(value) - body.write(end_boundary) - body = body.getvalue() - - msg = "Submitting %s to %s" % (filename, self.repository) - self.announce(msg, log.INFO) - - # build the Request - headers = { - 'Content-type': 'multipart/form-data; boundary=%s' % boundary, - 'Content-length': str(len(body)), - 'Authorization': auth, - } - - request = Request(self.repository, data=body, - headers=headers) - # send the data - try: - result = urlopen(request) - status = result.getcode() - reason = result.msg - except HTTPError as e: - status = e.code - reason = e.msg - except OSError as e: - self.announce(str(e), log.ERROR) - raise - - if status == 200: - self.announce('Server response (%s): %s' % (status, reason), - log.INFO) - if self.show_response: - text = getattr(self, '_read_pypi_response', - lambda x: None)(result) - if text is not None: - msg = '\n'.join(('-' * 75, text, '-' * 75)) - self.announce(msg, log.INFO) - else: - msg = 'Upload failed (%s): %s' % (status, reason) - self.announce(msg, log.ERROR) - raise DistutilsError(msg) - - def _load_password_from_keyring(self): - """ - Attempt to load password from keyring. Suppress Exceptions. - """ - try: - keyring = __import__('keyring') - return keyring.get_password(self.repository, self.username) - except Exception: - pass - - def _prompt_for_password(self): - """ - Prompt for a password on the tty. Suppress Exceptions. - """ - try: - return getpass.getpass() - except (Exception, KeyboardInterrupt): - pass diff --git a/env/lib/python2.7/site-packages/setuptools/command/upload.pyc b/env/lib/python2.7/site-packages/setuptools/command/upload.pyc deleted file mode 100644 index 742411a9..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/command/upload.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/command/upload_docs.pyc b/env/lib/python2.7/site-packages/setuptools/command/upload_docs.pyc deleted file mode 100644 index 3f7b66a9..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/command/upload_docs.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/config.py b/env/lib/python2.7/site-packages/setuptools/config.py deleted file mode 100644 index b6626043..00000000 --- a/env/lib/python2.7/site-packages/setuptools/config.py +++ /dev/null @@ -1,656 +0,0 @@ -from __future__ import absolute_import, unicode_literals -import io -import os -import sys - -import warnings -import functools -from collections import defaultdict -from functools import partial -from functools import wraps -from importlib import import_module - -from distutils.errors import DistutilsOptionError, DistutilsFileError -from setuptools.extern.packaging.version import LegacyVersion, parse -from setuptools.extern.six import string_types, PY3 - - -__metaclass__ = type - - -def read_configuration( - filepath, find_others=False, ignore_option_errors=False): - """Read given configuration file and returns options from it as a dict. - - :param str|unicode filepath: Path to configuration file - to get options from. - - :param bool find_others: Whether to search for other configuration files - which could be on in various places. - - :param bool ignore_option_errors: Whether to silently ignore - options, values of which could not be resolved (e.g. due to exceptions - in directives such as file:, attr:, etc.). - If False exceptions are propagated as expected. - - :rtype: dict - """ - from setuptools.dist import Distribution, _Distribution - - filepath = os.path.abspath(filepath) - - if not os.path.isfile(filepath): - raise DistutilsFileError( - 'Configuration file %s does not exist.' % filepath) - - current_directory = os.getcwd() - os.chdir(os.path.dirname(filepath)) - - try: - dist = Distribution() - - filenames = dist.find_config_files() if find_others else [] - if filepath not in filenames: - filenames.append(filepath) - - _Distribution.parse_config_files(dist, filenames=filenames) - - handlers = parse_configuration( - dist, dist.command_options, - ignore_option_errors=ignore_option_errors) - - finally: - os.chdir(current_directory) - - return configuration_to_dict(handlers) - - -def _get_option(target_obj, key): - """ - Given a target object and option key, get that option from - the target object, either through a get_{key} method or - from an attribute directly. - """ - getter_name = 'get_{key}'.format(**locals()) - by_attribute = functools.partial(getattr, target_obj, key) - getter = getattr(target_obj, getter_name, by_attribute) - return getter() - - -def configuration_to_dict(handlers): - """Returns configuration data gathered by given handlers as a dict. - - :param list[ConfigHandler] handlers: Handlers list, - usually from parse_configuration() - - :rtype: dict - """ - config_dict = defaultdict(dict) - - for handler in handlers: - for option in handler.set_options: - value = _get_option(handler.target_obj, option) - config_dict[handler.section_prefix][option] = value - - return config_dict - - -def parse_configuration( - distribution, command_options, ignore_option_errors=False): - """Performs additional parsing of configuration options - for a distribution. - - Returns a list of used option handlers. - - :param Distribution distribution: - :param dict command_options: - :param bool ignore_option_errors: Whether to silently ignore - options, values of which could not be resolved (e.g. due to exceptions - in directives such as file:, attr:, etc.). - If False exceptions are propagated as expected. - :rtype: list - """ - options = ConfigOptionsHandler( - distribution, command_options, ignore_option_errors) - options.parse() - - meta = ConfigMetadataHandler( - distribution.metadata, command_options, ignore_option_errors, - distribution.package_dir) - meta.parse() - - return meta, options - - -class ConfigHandler: - """Handles metadata supplied in configuration files.""" - - section_prefix = None - """Prefix for config sections handled by this handler. - Must be provided by class heirs. - - """ - - aliases = {} - """Options aliases. - For compatibility with various packages. E.g.: d2to1 and pbr. - Note: `-` in keys is replaced with `_` by config parser. - - """ - - def __init__(self, target_obj, options, ignore_option_errors=False): - sections = {} - - section_prefix = self.section_prefix - for section_name, section_options in options.items(): - if not section_name.startswith(section_prefix): - continue - - section_name = section_name.replace(section_prefix, '').strip('.') - sections[section_name] = section_options - - self.ignore_option_errors = ignore_option_errors - self.target_obj = target_obj - self.sections = sections - self.set_options = [] - - @property - def parsers(self): - """Metadata item name to parser function mapping.""" - raise NotImplementedError( - '%s must provide .parsers property' % self.__class__.__name__) - - def __setitem__(self, option_name, value): - unknown = tuple() - target_obj = self.target_obj - - # Translate alias into real name. - option_name = self.aliases.get(option_name, option_name) - - current_value = getattr(target_obj, option_name, unknown) - - if current_value is unknown: - raise KeyError(option_name) - - if current_value: - # Already inhabited. Skipping. - return - - skip_option = False - parser = self.parsers.get(option_name) - if parser: - try: - value = parser(value) - - except Exception: - skip_option = True - if not self.ignore_option_errors: - raise - - if skip_option: - return - - setter = getattr(target_obj, 'set_%s' % option_name, None) - if setter is None: - setattr(target_obj, option_name, value) - else: - setter(value) - - self.set_options.append(option_name) - - @classmethod - def _parse_list(cls, value, separator=','): - """Represents value as a list. - - Value is split either by separator (defaults to comma) or by lines. - - :param value: - :param separator: List items separator character. - :rtype: list - """ - if isinstance(value, list): # _get_parser_compound case - return value - - if '\n' in value: - value = value.splitlines() - else: - value = value.split(separator) - - return [chunk.strip() for chunk in value if chunk.strip()] - - @classmethod - def _parse_dict(cls, value): - """Represents value as a dict. - - :param value: - :rtype: dict - """ - separator = '=' - result = {} - for line in cls._parse_list(value): - key, sep, val = line.partition(separator) - if sep != separator: - raise DistutilsOptionError( - 'Unable to parse option value to dict: %s' % value) - result[key.strip()] = val.strip() - - return result - - @classmethod - def _parse_bool(cls, value): - """Represents value as boolean. - - :param value: - :rtype: bool - """ - value = value.lower() - return value in ('1', 'true', 'yes') - - @classmethod - def _exclude_files_parser(cls, key): - """Returns a parser function to make sure field inputs - are not files. - - Parses a value after getting the key so error messages are - more informative. - - :param key: - :rtype: callable - """ - def parser(value): - exclude_directive = 'file:' - if value.startswith(exclude_directive): - raise ValueError( - 'Only strings are accepted for the {0} field, ' - 'files are not accepted'.format(key)) - return value - return parser - - @classmethod - def _parse_file(cls, value): - """Represents value as a string, allowing including text - from nearest files using `file:` directive. - - Directive is sandboxed and won't reach anything outside - directory with setup.py. - - Examples: - file: README.rst, CHANGELOG.md, src/file.txt - - :param str value: - :rtype: str - """ - include_directive = 'file:' - - if not isinstance(value, string_types): - return value - - if not value.startswith(include_directive): - return value - - spec = value[len(include_directive):] - filepaths = (os.path.abspath(path.strip()) for path in spec.split(',')) - return '\n'.join( - cls._read_file(path) - for path in filepaths - if (cls._assert_local(path) or True) - and os.path.isfile(path) - ) - - @staticmethod - def _assert_local(filepath): - if not filepath.startswith(os.getcwd()): - raise DistutilsOptionError( - '`file:` directive can not access %s' % filepath) - - @staticmethod - def _read_file(filepath): - with io.open(filepath, encoding='utf-8') as f: - return f.read() - - @classmethod - def _parse_attr(cls, value, package_dir=None): - """Represents value as a module attribute. - - Examples: - attr: package.attr - attr: package.module.attr - - :param str value: - :rtype: str - """ - attr_directive = 'attr:' - if not value.startswith(attr_directive): - return value - - attrs_path = value.replace(attr_directive, '').strip().split('.') - attr_name = attrs_path.pop() - - module_name = '.'.join(attrs_path) - module_name = module_name or '__init__' - - parent_path = os.getcwd() - if package_dir: - if attrs_path[0] in package_dir: - # A custom path was specified for the module we want to import - custom_path = package_dir[attrs_path[0]] - parts = custom_path.rsplit('/', 1) - if len(parts) > 1: - parent_path = os.path.join(os.getcwd(), parts[0]) - module_name = parts[1] - else: - module_name = custom_path - elif '' in package_dir: - # A custom parent directory was specified for all root modules - parent_path = os.path.join(os.getcwd(), package_dir['']) - sys.path.insert(0, parent_path) - try: - module = import_module(module_name) - value = getattr(module, attr_name) - - finally: - sys.path = sys.path[1:] - - return value - - @classmethod - def _get_parser_compound(cls, *parse_methods): - """Returns parser function to represents value as a list. - - Parses a value applying given methods one after another. - - :param parse_methods: - :rtype: callable - """ - def parse(value): - parsed = value - - for method in parse_methods: - parsed = method(parsed) - - return parsed - - return parse - - @classmethod - def _parse_section_to_dict(cls, section_options, values_parser=None): - """Parses section options into a dictionary. - - Optionally applies a given parser to values. - - :param dict section_options: - :param callable values_parser: - :rtype: dict - """ - value = {} - values_parser = values_parser or (lambda val: val) - for key, (_, val) in section_options.items(): - value[key] = values_parser(val) - return value - - def parse_section(self, section_options): - """Parses configuration file section. - - :param dict section_options: - """ - for (name, (_, value)) in section_options.items(): - try: - self[name] = value - - except KeyError: - pass # Keep silent for a new option may appear anytime. - - def parse(self): - """Parses configuration file items from one - or more related sections. - - """ - for section_name, section_options in self.sections.items(): - - method_postfix = '' - if section_name: # [section.option] variant - method_postfix = '_%s' % section_name - - section_parser_method = getattr( - self, - # Dots in section names are translated into dunderscores. - ('parse_section%s' % method_postfix).replace('.', '__'), - None) - - if section_parser_method is None: - raise DistutilsOptionError( - 'Unsupported distribution option section: [%s.%s]' % ( - self.section_prefix, section_name)) - - section_parser_method(section_options) - - def _deprecated_config_handler(self, func, msg, warning_class): - """ this function will wrap around parameters that are deprecated - - :param msg: deprecation message - :param warning_class: class of warning exception to be raised - :param func: function to be wrapped around - """ - @wraps(func) - def config_handler(*args, **kwargs): - warnings.warn(msg, warning_class) - return func(*args, **kwargs) - - return config_handler - - -class ConfigMetadataHandler(ConfigHandler): - - section_prefix = 'metadata' - - aliases = { - 'home_page': 'url', - 'summary': 'description', - 'classifier': 'classifiers', - 'platform': 'platforms', - } - - strict_mode = False - """We need to keep it loose, to be partially compatible with - `pbr` and `d2to1` packages which also uses `metadata` section. - - """ - - def __init__(self, target_obj, options, ignore_option_errors=False, - package_dir=None): - super(ConfigMetadataHandler, self).__init__(target_obj, options, - ignore_option_errors) - self.package_dir = package_dir - - @property - def parsers(self): - """Metadata item name to parser function mapping.""" - parse_list = self._parse_list - parse_file = self._parse_file - parse_dict = self._parse_dict - exclude_files_parser = self._exclude_files_parser - - return { - 'platforms': parse_list, - 'keywords': parse_list, - 'provides': parse_list, - 'requires': self._deprecated_config_handler( - parse_list, - "The requires parameter is deprecated, please use " - "install_requires for runtime dependencies.", - DeprecationWarning), - 'obsoletes': parse_list, - 'classifiers': self._get_parser_compound(parse_file, parse_list), - 'license': exclude_files_parser('license'), - 'description': parse_file, - 'long_description': parse_file, - 'version': self._parse_version, - 'project_urls': parse_dict, - } - - def _parse_version(self, value): - """Parses `version` option value. - - :param value: - :rtype: str - - """ - version = self._parse_file(value) - - if version != value: - version = version.strip() - # Be strict about versions loaded from file because it's easy to - # accidentally include newlines and other unintended content - if isinstance(parse(version), LegacyVersion): - tmpl = ( - 'Version loaded from {value} does not ' - 'comply with PEP 440: {version}' - ) - raise DistutilsOptionError(tmpl.format(**locals())) - - return version - - version = self._parse_attr(value, self.package_dir) - - if callable(version): - version = version() - - if not isinstance(version, string_types): - if hasattr(version, '__iter__'): - version = '.'.join(map(str, version)) - else: - version = '%s' % version - - return version - - -class ConfigOptionsHandler(ConfigHandler): - - section_prefix = 'options' - - @property - def parsers(self): - """Metadata item name to parser function mapping.""" - parse_list = self._parse_list - parse_list_semicolon = partial(self._parse_list, separator=';') - parse_bool = self._parse_bool - parse_dict = self._parse_dict - - return { - 'zip_safe': parse_bool, - 'use_2to3': parse_bool, - 'include_package_data': parse_bool, - 'package_dir': parse_dict, - 'use_2to3_fixers': parse_list, - 'use_2to3_exclude_fixers': parse_list, - 'convert_2to3_doctests': parse_list, - 'scripts': parse_list, - 'eager_resources': parse_list, - 'dependency_links': parse_list, - 'namespace_packages': parse_list, - 'install_requires': parse_list_semicolon, - 'setup_requires': parse_list_semicolon, - 'tests_require': parse_list_semicolon, - 'packages': self._parse_packages, - 'entry_points': self._parse_file, - 'py_modules': parse_list, - } - - def _parse_packages(self, value): - """Parses `packages` option value. - - :param value: - :rtype: list - """ - find_directives = ['find:', 'find_namespace:'] - trimmed_value = value.strip() - - if trimmed_value not in find_directives: - return self._parse_list(value) - - findns = trimmed_value == find_directives[1] - if findns and not PY3: - raise DistutilsOptionError( - 'find_namespace: directive is unsupported on Python < 3.3') - - # Read function arguments from a dedicated section. - find_kwargs = self.parse_section_packages__find( - self.sections.get('packages.find', {})) - - if findns: - from setuptools import find_namespace_packages as find_packages - else: - from setuptools import find_packages - - return find_packages(**find_kwargs) - - def parse_section_packages__find(self, section_options): - """Parses `packages.find` configuration file section. - - To be used in conjunction with _parse_packages(). - - :param dict section_options: - """ - section_data = self._parse_section_to_dict( - section_options, self._parse_list) - - valid_keys = ['where', 'include', 'exclude'] - - find_kwargs = dict( - [(k, v) for k, v in section_data.items() if k in valid_keys and v]) - - where = find_kwargs.get('where') - if where is not None: - find_kwargs['where'] = where[0] # cast list to single val - - return find_kwargs - - def parse_section_entry_points(self, section_options): - """Parses `entry_points` configuration file section. - - :param dict section_options: - """ - parsed = self._parse_section_to_dict(section_options, self._parse_list) - self['entry_points'] = parsed - - def _parse_package_data(self, section_options): - parsed = self._parse_section_to_dict(section_options, self._parse_list) - - root = parsed.get('*') - if root: - parsed[''] = root - del parsed['*'] - - return parsed - - def parse_section_package_data(self, section_options): - """Parses `package_data` configuration file section. - - :param dict section_options: - """ - self['package_data'] = self._parse_package_data(section_options) - - def parse_section_exclude_package_data(self, section_options): - """Parses `exclude_package_data` configuration file section. - - :param dict section_options: - """ - self['exclude_package_data'] = self._parse_package_data( - section_options) - - def parse_section_extras_require(self, section_options): - """Parses `extras_require` configuration file section. - - :param dict section_options: - """ - parse_list = partial(self._parse_list, separator=';') - self['extras_require'] = self._parse_section_to_dict( - section_options, parse_list) - - def parse_section_data_files(self, section_options): - """Parses `data_files` configuration file section. - - :param dict section_options: - """ - parsed = self._parse_section_to_dict(section_options, self._parse_list) - self['data_files'] = [(k, v) for k, v in parsed.items()] diff --git a/env/lib/python2.7/site-packages/setuptools/config.pyc b/env/lib/python2.7/site-packages/setuptools/config.pyc deleted file mode 100644 index c66233b8..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/config.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/dep_util.py b/env/lib/python2.7/site-packages/setuptools/dep_util.py deleted file mode 100644 index 2931c13e..00000000 --- a/env/lib/python2.7/site-packages/setuptools/dep_util.py +++ /dev/null @@ -1,23 +0,0 @@ -from distutils.dep_util import newer_group - -# yes, this is was almost entirely copy-pasted from -# 'newer_pairwise()', this is just another convenience -# function. -def newer_pairwise_group(sources_groups, targets): - """Walk both arguments in parallel, testing if each source group is newer - than its corresponding target. Returns a pair of lists (sources_groups, - targets) where sources is newer than target, according to the semantics - of 'newer_group()'. - """ - if len(sources_groups) != len(targets): - raise ValueError("'sources_group' and 'targets' must be the same length") - - # build a pair of lists (sources_groups, targets) where source is newer - n_sources = [] - n_targets = [] - for i in range(len(sources_groups)): - if newer_group(sources_groups[i], targets[i]): - n_sources.append(sources_groups[i]) - n_targets.append(targets[i]) - - return n_sources, n_targets diff --git a/env/lib/python2.7/site-packages/setuptools/dep_util.pyc b/env/lib/python2.7/site-packages/setuptools/dep_util.pyc deleted file mode 100644 index 45647354..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/dep_util.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/depends.pyc b/env/lib/python2.7/site-packages/setuptools/depends.pyc deleted file mode 100644 index 13dc8a7e..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/depends.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/dist.py b/env/lib/python2.7/site-packages/setuptools/dist.py deleted file mode 100644 index f0f030b5..00000000 --- a/env/lib/python2.7/site-packages/setuptools/dist.py +++ /dev/null @@ -1,1280 +0,0 @@ -# -*- coding: utf-8 -*- -__all__ = ['Distribution'] - -import io -import sys -import re -import os -import warnings -import numbers -import distutils.log -import distutils.core -import distutils.cmd -import distutils.dist -from distutils.util import strtobool -from distutils.debug import DEBUG -from distutils.fancy_getopt import translate_longopt -import itertools - -from collections import defaultdict -from email import message_from_file - -from distutils.errors import ( - DistutilsOptionError, DistutilsPlatformError, DistutilsSetupError, -) -from distutils.util import rfc822_escape -from distutils.version import StrictVersion - -from setuptools.extern import six -from setuptools.extern import packaging -from setuptools.extern.six.moves import map, filter, filterfalse - -from . import SetuptoolsDeprecationWarning - -from setuptools.depends import Require -from setuptools import windows_support -from setuptools.monkey import get_unpatched -from setuptools.config import parse_configuration -import pkg_resources - -__import__('setuptools.extern.packaging.specifiers') -__import__('setuptools.extern.packaging.version') - - -def _get_unpatched(cls): - warnings.warn("Do not call this function", DistDeprecationWarning) - return get_unpatched(cls) - - -def get_metadata_version(self): - mv = getattr(self, 'metadata_version', None) - - if mv is None: - if self.long_description_content_type or self.provides_extras: - mv = StrictVersion('2.1') - elif (self.maintainer is not None or - self.maintainer_email is not None or - getattr(self, 'python_requires', None) is not None or - self.project_urls): - mv = StrictVersion('1.2') - elif (self.provides or self.requires or self.obsoletes or - self.classifiers or self.download_url): - mv = StrictVersion('1.1') - else: - mv = StrictVersion('1.0') - - self.metadata_version = mv - - return mv - - -def read_pkg_file(self, file): - """Reads the metadata values from a file object.""" - msg = message_from_file(file) - - def _read_field(name): - value = msg[name] - if value == 'UNKNOWN': - return None - return value - - def _read_list(name): - values = msg.get_all(name, None) - if values == []: - return None - return values - - self.metadata_version = StrictVersion(msg['metadata-version']) - self.name = _read_field('name') - self.version = _read_field('version') - self.description = _read_field('summary') - # we are filling author only. - self.author = _read_field('author') - self.maintainer = None - self.author_email = _read_field('author-email') - self.maintainer_email = None - self.url = _read_field('home-page') - self.license = _read_field('license') - - if 'download-url' in msg: - self.download_url = _read_field('download-url') - else: - self.download_url = None - - self.long_description = _read_field('description') - self.description = _read_field('summary') - - if 'keywords' in msg: - self.keywords = _read_field('keywords').split(',') - - self.platforms = _read_list('platform') - self.classifiers = _read_list('classifier') - - # PEP 314 - these fields only exist in 1.1 - if self.metadata_version == StrictVersion('1.1'): - self.requires = _read_list('requires') - self.provides = _read_list('provides') - self.obsoletes = _read_list('obsoletes') - else: - self.requires = None - self.provides = None - self.obsoletes = None - - -# Based on Python 3.5 version -def write_pkg_file(self, file): - """Write the PKG-INFO format data to a file object. - """ - version = self.get_metadata_version() - - if six.PY2: - def write_field(key, value): - file.write("%s: %s\n" % (key, self._encode_field(value))) - else: - def write_field(key, value): - file.write("%s: %s\n" % (key, value)) - - write_field('Metadata-Version', str(version)) - write_field('Name', self.get_name()) - write_field('Version', self.get_version()) - write_field('Summary', self.get_description()) - write_field('Home-page', self.get_url()) - - if version < StrictVersion('1.2'): - write_field('Author', self.get_contact()) - write_field('Author-email', self.get_contact_email()) - else: - optional_fields = ( - ('Author', 'author'), - ('Author-email', 'author_email'), - ('Maintainer', 'maintainer'), - ('Maintainer-email', 'maintainer_email'), - ) - - for field, attr in optional_fields: - attr_val = getattr(self, attr) - - if attr_val is not None: - write_field(field, attr_val) - - write_field('License', self.get_license()) - if self.download_url: - write_field('Download-URL', self.download_url) - for project_url in self.project_urls.items(): - write_field('Project-URL', '%s, %s' % project_url) - - long_desc = rfc822_escape(self.get_long_description()) - write_field('Description', long_desc) - - keywords = ','.join(self.get_keywords()) - if keywords: - write_field('Keywords', keywords) - - if version >= StrictVersion('1.2'): - for platform in self.get_platforms(): - write_field('Platform', platform) - else: - self._write_list(file, 'Platform', self.get_platforms()) - - self._write_list(file, 'Classifier', self.get_classifiers()) - - # PEP 314 - self._write_list(file, 'Requires', self.get_requires()) - self._write_list(file, 'Provides', self.get_provides()) - self._write_list(file, 'Obsoletes', self.get_obsoletes()) - - # Setuptools specific for PEP 345 - if hasattr(self, 'python_requires'): - write_field('Requires-Python', self.python_requires) - - # PEP 566 - if self.long_description_content_type: - write_field( - 'Description-Content-Type', - self.long_description_content_type - ) - if self.provides_extras: - for extra in self.provides_extras: - write_field('Provides-Extra', extra) - - -sequence = tuple, list - - -def check_importable(dist, attr, value): - try: - ep = pkg_resources.EntryPoint.parse('x=' + value) - assert not ep.extras - except (TypeError, ValueError, AttributeError, AssertionError): - raise DistutilsSetupError( - "%r must be importable 'module:attrs' string (got %r)" - % (attr, value) - ) - - -def assert_string_list(dist, attr, value): - """Verify that value is a string list""" - try: - # verify that value is a list or tuple to exclude unordered - # or single-use iterables - assert isinstance(value, (list, tuple)) - # verify that elements of value are strings - assert ''.join(value) != value - except (TypeError, ValueError, AttributeError, AssertionError): - raise DistutilsSetupError( - "%r must be a list of strings (got %r)" % (attr, value) - ) - - -def check_nsp(dist, attr, value): - """Verify that namespace packages are valid""" - ns_packages = value - assert_string_list(dist, attr, ns_packages) - for nsp in ns_packages: - if not dist.has_contents_for(nsp): - raise DistutilsSetupError( - "Distribution contains no modules or packages for " + - "namespace package %r" % nsp - ) - parent, sep, child = nsp.rpartition('.') - if parent and parent not in ns_packages: - distutils.log.warn( - "WARNING: %r is declared as a package namespace, but %r" - " is not: please correct this in setup.py", nsp, parent - ) - - -def check_extras(dist, attr, value): - """Verify that extras_require mapping is valid""" - try: - list(itertools.starmap(_check_extra, value.items())) - except (TypeError, ValueError, AttributeError): - raise DistutilsSetupError( - "'extras_require' must be a dictionary whose values are " - "strings or lists of strings containing valid project/version " - "requirement specifiers." - ) - - -def _check_extra(extra, reqs): - name, sep, marker = extra.partition(':') - if marker and pkg_resources.invalid_marker(marker): - raise DistutilsSetupError("Invalid environment marker: " + marker) - list(pkg_resources.parse_requirements(reqs)) - - -def assert_bool(dist, attr, value): - """Verify that value is True, False, 0, or 1""" - if bool(value) != value: - tmpl = "{attr!r} must be a boolean value (got {value!r})" - raise DistutilsSetupError(tmpl.format(attr=attr, value=value)) - - -def check_requirements(dist, attr, value): - """Verify that install_requires is a valid requirements list""" - try: - list(pkg_resources.parse_requirements(value)) - if isinstance(value, (dict, set)): - raise TypeError("Unordered types are not allowed") - except (TypeError, ValueError) as error: - tmpl = ( - "{attr!r} must be a string or list of strings " - "containing valid project/version requirement specifiers; {error}" - ) - raise DistutilsSetupError(tmpl.format(attr=attr, error=error)) - - -def check_specifier(dist, attr, value): - """Verify that value is a valid version specifier""" - try: - packaging.specifiers.SpecifierSet(value) - except packaging.specifiers.InvalidSpecifier as error: - tmpl = ( - "{attr!r} must be a string " - "containing valid version specifiers; {error}" - ) - raise DistutilsSetupError(tmpl.format(attr=attr, error=error)) - - -def check_entry_points(dist, attr, value): - """Verify that entry_points map is parseable""" - try: - pkg_resources.EntryPoint.parse_map(value) - except ValueError as e: - raise DistutilsSetupError(e) - - -def check_test_suite(dist, attr, value): - if not isinstance(value, six.string_types): - raise DistutilsSetupError("test_suite must be a string") - - -def check_package_data(dist, attr, value): - """Verify that value is a dictionary of package names to glob lists""" - if not isinstance(value, dict): - raise DistutilsSetupError( - "{!r} must be a dictionary mapping package names to lists of " - "string wildcard patterns".format(attr)) - for k, v in value.items(): - if not isinstance(k, six.string_types): - raise DistutilsSetupError( - "keys of {!r} dict must be strings (got {!r})" - .format(attr, k) - ) - assert_string_list(dist, 'values of {!r} dict'.format(attr), v) - - -def check_packages(dist, attr, value): - for pkgname in value: - if not re.match(r'\w+(\.\w+)*', pkgname): - distutils.log.warn( - "WARNING: %r not a valid package name; please use only " - ".-separated package names in setup.py", pkgname - ) - - -_Distribution = get_unpatched(distutils.core.Distribution) - - -class Distribution(_Distribution): - """Distribution with support for features, tests, and package data - - This is an enhanced version of 'distutils.dist.Distribution' that - effectively adds the following new optional keyword arguments to 'setup()': - - 'install_requires' -- a string or sequence of strings specifying project - versions that the distribution requires when installed, in the format - used by 'pkg_resources.require()'. They will be installed - automatically when the package is installed. If you wish to use - packages that are not available in PyPI, or want to give your users an - alternate download location, you can add a 'find_links' option to the - '[easy_install]' section of your project's 'setup.cfg' file, and then - setuptools will scan the listed web pages for links that satisfy the - requirements. - - 'extras_require' -- a dictionary mapping names of optional "extras" to the - additional requirement(s) that using those extras incurs. For example, - this:: - - extras_require = dict(reST = ["docutils>=0.3", "reSTedit"]) - - indicates that the distribution can optionally provide an extra - capability called "reST", but it can only be used if docutils and - reSTedit are installed. If the user installs your package using - EasyInstall and requests one of your extras, the corresponding - additional requirements will be installed if needed. - - 'features' **deprecated** -- a dictionary mapping option names to - 'setuptools.Feature' - objects. Features are a portion of the distribution that can be - included or excluded based on user options, inter-feature dependencies, - and availability on the current system. Excluded features are omitted - from all setup commands, including source and binary distributions, so - you can create multiple distributions from the same source tree. - Feature names should be valid Python identifiers, except that they may - contain the '-' (minus) sign. Features can be included or excluded - via the command line options '--with-X' and '--without-X', where 'X' is - the name of the feature. Whether a feature is included by default, and - whether you are allowed to control this from the command line, is - determined by the Feature object. See the 'Feature' class for more - information. - - 'test_suite' -- the name of a test suite to run for the 'test' command. - If the user runs 'python setup.py test', the package will be installed, - and the named test suite will be run. The format is the same as - would be used on a 'unittest.py' command line. That is, it is the - dotted name of an object to import and call to generate a test suite. - - 'package_data' -- a dictionary mapping package names to lists of filenames - or globs to use to find data files contained in the named packages. - If the dictionary has filenames or globs listed under '""' (the empty - string), those names will be searched for in every package, in addition - to any names for the specific package. Data files found using these - names/globs will be installed along with the package, in the same - location as the package. Note that globs are allowed to reference - the contents of non-package subdirectories, as long as you use '/' as - a path separator. (Globs are automatically converted to - platform-specific paths at runtime.) - - In addition to these new keywords, this class also has several new methods - for manipulating the distribution's contents. For example, the 'include()' - and 'exclude()' methods can be thought of as in-place add and subtract - commands that add or remove packages, modules, extensions, and so on from - the distribution. They are used by the feature subsystem to configure the - distribution for the included and excluded features. - """ - - _DISTUTILS_UNSUPPORTED_METADATA = { - 'long_description_content_type': None, - 'project_urls': dict, - 'provides_extras': set, - } - - _patched_dist = None - - def patch_missing_pkg_info(self, attrs): - # Fake up a replacement for the data that would normally come from - # PKG-INFO, but which might not yet be built if this is a fresh - # checkout. - # - if not attrs or 'name' not in attrs or 'version' not in attrs: - return - key = pkg_resources.safe_name(str(attrs['name'])).lower() - dist = pkg_resources.working_set.by_key.get(key) - if dist is not None and not dist.has_metadata('PKG-INFO'): - dist._version = pkg_resources.safe_version(str(attrs['version'])) - self._patched_dist = dist - - def __init__(self, attrs=None): - have_package_data = hasattr(self, "package_data") - if not have_package_data: - self.package_data = {} - attrs = attrs or {} - if 'features' in attrs or 'require_features' in attrs: - Feature.warn_deprecated() - self.require_features = [] - self.features = {} - self.dist_files = [] - # Filter-out setuptools' specific options. - self.src_root = attrs.pop("src_root", None) - self.patch_missing_pkg_info(attrs) - self.dependency_links = attrs.pop('dependency_links', []) - self.setup_requires = attrs.pop('setup_requires', []) - for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'): - vars(self).setdefault(ep.name, None) - _Distribution.__init__(self, { - k: v for k, v in attrs.items() - if k not in self._DISTUTILS_UNSUPPORTED_METADATA - }) - - # Fill-in missing metadata fields not supported by distutils. - # Note some fields may have been set by other tools (e.g. pbr) - # above; they are taken preferrentially to setup() arguments - for option, default in self._DISTUTILS_UNSUPPORTED_METADATA.items(): - for source in self.metadata.__dict__, attrs: - if option in source: - value = source[option] - break - else: - value = default() if default else None - setattr(self.metadata, option, value) - - if isinstance(self.metadata.version, numbers.Number): - # Some people apparently take "version number" too literally :) - self.metadata.version = str(self.metadata.version) - - if self.metadata.version is not None: - try: - ver = packaging.version.Version(self.metadata.version) - normalized_version = str(ver) - if self.metadata.version != normalized_version: - warnings.warn( - "Normalizing '%s' to '%s'" % ( - self.metadata.version, - normalized_version, - ) - ) - self.metadata.version = normalized_version - except (packaging.version.InvalidVersion, TypeError): - warnings.warn( - "The version specified (%r) is an invalid version, this " - "may not work as expected with newer versions of " - "setuptools, pip, and PyPI. Please see PEP 440 for more " - "details." % self.metadata.version - ) - self._finalize_requires() - - def _finalize_requires(self): - """ - Set `metadata.python_requires` and fix environment markers - in `install_requires` and `extras_require`. - """ - if getattr(self, 'python_requires', None): - self.metadata.python_requires = self.python_requires - - if getattr(self, 'extras_require', None): - for extra in self.extras_require.keys(): - # Since this gets called multiple times at points where the - # keys have become 'converted' extras, ensure that we are only - # truly adding extras we haven't seen before here. - extra = extra.split(':')[0] - if extra: - self.metadata.provides_extras.add(extra) - - self._convert_extras_requirements() - self._move_install_requirements_markers() - - def _convert_extras_requirements(self): - """ - Convert requirements in `extras_require` of the form - `"extra": ["barbazquux; {marker}"]` to - `"extra:{marker}": ["barbazquux"]`. - """ - spec_ext_reqs = getattr(self, 'extras_require', None) or {} - self._tmp_extras_require = defaultdict(list) - for section, v in spec_ext_reqs.items(): - # Do not strip empty sections. - self._tmp_extras_require[section] - for r in pkg_resources.parse_requirements(v): - suffix = self._suffix_for(r) - self._tmp_extras_require[section + suffix].append(r) - - @staticmethod - def _suffix_for(req): - """ - For a requirement, return the 'extras_require' suffix for - that requirement. - """ - return ':' + str(req.marker) if req.marker else '' - - def _move_install_requirements_markers(self): - """ - Move requirements in `install_requires` that are using environment - markers `extras_require`. - """ - - # divide the install_requires into two sets, simple ones still - # handled by install_requires and more complex ones handled - # by extras_require. - - def is_simple_req(req): - return not req.marker - - spec_inst_reqs = getattr(self, 'install_requires', None) or () - inst_reqs = list(pkg_resources.parse_requirements(spec_inst_reqs)) - simple_reqs = filter(is_simple_req, inst_reqs) - complex_reqs = filterfalse(is_simple_req, inst_reqs) - self.install_requires = list(map(str, simple_reqs)) - - for r in complex_reqs: - self._tmp_extras_require[':' + str(r.marker)].append(r) - self.extras_require = dict( - (k, [str(r) for r in map(self._clean_req, v)]) - for k, v in self._tmp_extras_require.items() - ) - - def _clean_req(self, req): - """ - Given a Requirement, remove environment markers and return it. - """ - req.marker = None - return req - - def _parse_config_files(self, filenames=None): - """ - Adapted from distutils.dist.Distribution.parse_config_files, - this method provides the same functionality in subtly-improved - ways. - """ - from setuptools.extern.six.moves.configparser import ConfigParser - - # Ignore install directory options if we have a venv - if six.PY3 and sys.prefix != sys.base_prefix: - ignore_options = [ - 'install-base', 'install-platbase', 'install-lib', - 'install-platlib', 'install-purelib', 'install-headers', - 'install-scripts', 'install-data', 'prefix', 'exec-prefix', - 'home', 'user', 'root'] - else: - ignore_options = [] - - ignore_options = frozenset(ignore_options) - - if filenames is None: - filenames = self.find_config_files() - - if DEBUG: - self.announce("Distribution.parse_config_files():") - - parser = ConfigParser() - for filename in filenames: - with io.open(filename, encoding='utf-8') as reader: - if DEBUG: - self.announce(" reading {filename}".format(**locals())) - (parser.read_file if six.PY3 else parser.readfp)(reader) - for section in parser.sections(): - options = parser.options(section) - opt_dict = self.get_option_dict(section) - - for opt in options: - if opt != '__name__' and opt not in ignore_options: - val = self._try_str(parser.get(section, opt)) - opt = opt.replace('-', '_') - opt_dict[opt] = (filename, val) - - # Make the ConfigParser forget everything (so we retain - # the original filenames that options come from) - parser.__init__() - - # If there was a "global" section in the config file, use it - # to set Distribution options. - - if 'global' in self.command_options: - for (opt, (src, val)) in self.command_options['global'].items(): - alias = self.negative_opt.get(opt) - try: - if alias: - setattr(self, alias, not strtobool(val)) - elif opt in ('verbose', 'dry_run'): # ugh! - setattr(self, opt, strtobool(val)) - else: - setattr(self, opt, val) - except ValueError as msg: - raise DistutilsOptionError(msg) - - @staticmethod - def _try_str(val): - """ - On Python 2, much of distutils relies on string values being of - type 'str' (bytes) and not unicode text. If the value can be safely - encoded to bytes using the default encoding, prefer that. - - Why the default encoding? Because that value can be implicitly - decoded back to text if needed. - - Ref #1653 - """ - if six.PY3: - return val - try: - return val.encode() - except UnicodeEncodeError: - pass - return val - - def _set_command_options(self, command_obj, option_dict=None): - """ - Set the options for 'command_obj' from 'option_dict'. Basically - this means copying elements of a dictionary ('option_dict') to - attributes of an instance ('command'). - - 'command_obj' must be a Command instance. If 'option_dict' is not - supplied, uses the standard option dictionary for this command - (from 'self.command_options'). - - (Adopted from distutils.dist.Distribution._set_command_options) - """ - command_name = command_obj.get_command_name() - if option_dict is None: - option_dict = self.get_option_dict(command_name) - - if DEBUG: - self.announce(" setting options for '%s' command:" % command_name) - for (option, (source, value)) in option_dict.items(): - if DEBUG: - self.announce(" %s = %s (from %s)" % (option, value, - source)) - try: - bool_opts = [translate_longopt(o) - for o in command_obj.boolean_options] - except AttributeError: - bool_opts = [] - try: - neg_opt = command_obj.negative_opt - except AttributeError: - neg_opt = {} - - try: - is_string = isinstance(value, six.string_types) - if option in neg_opt and is_string: - setattr(command_obj, neg_opt[option], not strtobool(value)) - elif option in bool_opts and is_string: - setattr(command_obj, option, strtobool(value)) - elif hasattr(command_obj, option): - setattr(command_obj, option, value) - else: - raise DistutilsOptionError( - "error in %s: command '%s' has no such option '%s'" - % (source, command_name, option)) - except ValueError as msg: - raise DistutilsOptionError(msg) - - def parse_config_files(self, filenames=None, ignore_option_errors=False): - """Parses configuration files from various levels - and loads configuration. - - """ - self._parse_config_files(filenames=filenames) - - parse_configuration(self, self.command_options, - ignore_option_errors=ignore_option_errors) - self._finalize_requires() - - def parse_command_line(self): - """Process features after parsing command line options""" - result = _Distribution.parse_command_line(self) - if self.features: - self._finalize_features() - return result - - def _feature_attrname(self, name): - """Convert feature name to corresponding option attribute name""" - return 'with_' + name.replace('-', '_') - - def fetch_build_eggs(self, requires): - """Resolve pre-setup requirements""" - resolved_dists = pkg_resources.working_set.resolve( - pkg_resources.parse_requirements(requires), - installer=self.fetch_build_egg, - replace_conflicting=True, - ) - for dist in resolved_dists: - pkg_resources.working_set.add(dist, replace=True) - return resolved_dists - - def finalize_options(self): - _Distribution.finalize_options(self) - if self.features: - self._set_global_opts_from_features() - - for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'): - value = getattr(self, ep.name, None) - if value is not None: - ep.require(installer=self.fetch_build_egg) - ep.load()(self, ep.name, value) - if getattr(self, 'convert_2to3_doctests', None): - # XXX may convert to set here when we can rely on set being builtin - self.convert_2to3_doctests = [ - os.path.abspath(p) - for p in self.convert_2to3_doctests - ] - else: - self.convert_2to3_doctests = [] - - def get_egg_cache_dir(self): - egg_cache_dir = os.path.join(os.curdir, '.eggs') - if not os.path.exists(egg_cache_dir): - os.mkdir(egg_cache_dir) - windows_support.hide_file(egg_cache_dir) - readme_txt_filename = os.path.join(egg_cache_dir, 'README.txt') - with open(readme_txt_filename, 'w') as f: - f.write('This directory contains eggs that were downloaded ' - 'by setuptools to build, test, and run plug-ins.\n\n') - f.write('This directory caches those eggs to prevent ' - 'repeated downloads.\n\n') - f.write('However, it is safe to delete this directory.\n\n') - - return egg_cache_dir - - def fetch_build_egg(self, req): - """Fetch an egg needed for building""" - from setuptools.command.easy_install import easy_install - dist = self.__class__({'script_args': ['easy_install']}) - opts = dist.get_option_dict('easy_install') - opts.clear() - opts.update( - (k, v) - for k, v in self.get_option_dict('easy_install').items() - if k in ( - # don't use any other settings - 'find_links', 'site_dirs', 'index_url', - 'optimize', 'site_dirs', 'allow_hosts', - )) - if self.dependency_links: - links = self.dependency_links[:] - if 'find_links' in opts: - links = opts['find_links'][1] + links - opts['find_links'] = ('setup', links) - install_dir = self.get_egg_cache_dir() - cmd = easy_install( - dist, args=["x"], install_dir=install_dir, - exclude_scripts=True, - always_copy=False, build_directory=None, editable=False, - upgrade=False, multi_version=True, no_report=True, user=False - ) - cmd.ensure_finalized() - return cmd.easy_install(req) - - def _set_global_opts_from_features(self): - """Add --with-X/--without-X options based on optional features""" - - go = [] - no = self.negative_opt.copy() - - for name, feature in self.features.items(): - self._set_feature(name, None) - feature.validate(self) - - if feature.optional: - descr = feature.description - incdef = ' (default)' - excdef = '' - if not feature.include_by_default(): - excdef, incdef = incdef, excdef - - new = ( - ('with-' + name, None, 'include ' + descr + incdef), - ('without-' + name, None, 'exclude ' + descr + excdef), - ) - go.extend(new) - no['without-' + name] = 'with-' + name - - self.global_options = self.feature_options = go + self.global_options - self.negative_opt = self.feature_negopt = no - - def _finalize_features(self): - """Add/remove features and resolve dependencies between them""" - - # First, flag all the enabled items (and thus their dependencies) - for name, feature in self.features.items(): - enabled = self.feature_is_included(name) - if enabled or (enabled is None and feature.include_by_default()): - feature.include_in(self) - self._set_feature(name, 1) - - # Then disable the rest, so that off-by-default features don't - # get flagged as errors when they're required by an enabled feature - for name, feature in self.features.items(): - if not self.feature_is_included(name): - feature.exclude_from(self) - self._set_feature(name, 0) - - def get_command_class(self, command): - """Pluggable version of get_command_class()""" - if command in self.cmdclass: - return self.cmdclass[command] - - eps = pkg_resources.iter_entry_points('distutils.commands', command) - for ep in eps: - ep.require(installer=self.fetch_build_egg) - self.cmdclass[command] = cmdclass = ep.load() - return cmdclass - else: - return _Distribution.get_command_class(self, command) - - def print_commands(self): - for ep in pkg_resources.iter_entry_points('distutils.commands'): - if ep.name not in self.cmdclass: - # don't require extras as the commands won't be invoked - cmdclass = ep.resolve() - self.cmdclass[ep.name] = cmdclass - return _Distribution.print_commands(self) - - def get_command_list(self): - for ep in pkg_resources.iter_entry_points('distutils.commands'): - if ep.name not in self.cmdclass: - # don't require extras as the commands won't be invoked - cmdclass = ep.resolve() - self.cmdclass[ep.name] = cmdclass - return _Distribution.get_command_list(self) - - def _set_feature(self, name, status): - """Set feature's inclusion status""" - setattr(self, self._feature_attrname(name), status) - - def feature_is_included(self, name): - """Return 1 if feature is included, 0 if excluded, 'None' if unknown""" - return getattr(self, self._feature_attrname(name)) - - def include_feature(self, name): - """Request inclusion of feature named 'name'""" - - if self.feature_is_included(name) == 0: - descr = self.features[name].description - raise DistutilsOptionError( - descr + " is required, but was excluded or is not available" - ) - self.features[name].include_in(self) - self._set_feature(name, 1) - - def include(self, **attrs): - """Add items to distribution that are named in keyword arguments - - For example, 'dist.include(py_modules=["x"])' would add 'x' to - the distribution's 'py_modules' attribute, if it was not already - there. - - Currently, this method only supports inclusion for attributes that are - lists or tuples. If you need to add support for adding to other - attributes in this or a subclass, you can add an '_include_X' method, - where 'X' is the name of the attribute. The method will be called with - the value passed to 'include()'. So, 'dist.include(foo={"bar":"baz"})' - will try to call 'dist._include_foo({"bar":"baz"})', which can then - handle whatever special inclusion logic is needed. - """ - for k, v in attrs.items(): - include = getattr(self, '_include_' + k, None) - if include: - include(v) - else: - self._include_misc(k, v) - - def exclude_package(self, package): - """Remove packages, modules, and extensions in named package""" - - pfx = package + '.' - if self.packages: - self.packages = [ - p for p in self.packages - if p != package and not p.startswith(pfx) - ] - - if self.py_modules: - self.py_modules = [ - p for p in self.py_modules - if p != package and not p.startswith(pfx) - ] - - if self.ext_modules: - self.ext_modules = [ - p for p in self.ext_modules - if p.name != package and not p.name.startswith(pfx) - ] - - def has_contents_for(self, package): - """Return true if 'exclude_package(package)' would do something""" - - pfx = package + '.' - - for p in self.iter_distribution_names(): - if p == package or p.startswith(pfx): - return True - - def _exclude_misc(self, name, value): - """Handle 'exclude()' for list/tuple attrs without a special handler""" - if not isinstance(value, sequence): - raise DistutilsSetupError( - "%s: setting must be a list or tuple (%r)" % (name, value) - ) - try: - old = getattr(self, name) - except AttributeError: - raise DistutilsSetupError( - "%s: No such distribution setting" % name - ) - if old is not None and not isinstance(old, sequence): - raise DistutilsSetupError( - name + ": this setting cannot be changed via include/exclude" - ) - elif old: - setattr(self, name, [item for item in old if item not in value]) - - def _include_misc(self, name, value): - """Handle 'include()' for list/tuple attrs without a special handler""" - - if not isinstance(value, sequence): - raise DistutilsSetupError( - "%s: setting must be a list (%r)" % (name, value) - ) - try: - old = getattr(self, name) - except AttributeError: - raise DistutilsSetupError( - "%s: No such distribution setting" % name - ) - if old is None: - setattr(self, name, value) - elif not isinstance(old, sequence): - raise DistutilsSetupError( - name + ": this setting cannot be changed via include/exclude" - ) - else: - new = [item for item in value if item not in old] - setattr(self, name, old + new) - - def exclude(self, **attrs): - """Remove items from distribution that are named in keyword arguments - - For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from - the distribution's 'py_modules' attribute. Excluding packages uses - the 'exclude_package()' method, so all of the package's contained - packages, modules, and extensions are also excluded. - - Currently, this method only supports exclusion from attributes that are - lists or tuples. If you need to add support for excluding from other - attributes in this or a subclass, you can add an '_exclude_X' method, - where 'X' is the name of the attribute. The method will be called with - the value passed to 'exclude()'. So, 'dist.exclude(foo={"bar":"baz"})' - will try to call 'dist._exclude_foo({"bar":"baz"})', which can then - handle whatever special exclusion logic is needed. - """ - for k, v in attrs.items(): - exclude = getattr(self, '_exclude_' + k, None) - if exclude: - exclude(v) - else: - self._exclude_misc(k, v) - - def _exclude_packages(self, packages): - if not isinstance(packages, sequence): - raise DistutilsSetupError( - "packages: setting must be a list or tuple (%r)" % (packages,) - ) - list(map(self.exclude_package, packages)) - - def _parse_command_opts(self, parser, args): - # Remove --with-X/--without-X options when processing command args - self.global_options = self.__class__.global_options - self.negative_opt = self.__class__.negative_opt - - # First, expand any aliases - command = args[0] - aliases = self.get_option_dict('aliases') - while command in aliases: - src, alias = aliases[command] - del aliases[command] # ensure each alias can expand only once! - import shlex - args[:1] = shlex.split(alias, True) - command = args[0] - - nargs = _Distribution._parse_command_opts(self, parser, args) - - # Handle commands that want to consume all remaining arguments - cmd_class = self.get_command_class(command) - if getattr(cmd_class, 'command_consumes_arguments', None): - self.get_option_dict(command)['args'] = ("command line", nargs) - if nargs is not None: - return [] - - return nargs - - def get_cmdline_options(self): - """Return a '{cmd: {opt:val}}' map of all command-line options - - Option names are all long, but do not include the leading '--', and - contain dashes rather than underscores. If the option doesn't take - an argument (e.g. '--quiet'), the 'val' is 'None'. - - Note that options provided by config files are intentionally excluded. - """ - - d = {} - - for cmd, opts in self.command_options.items(): - - for opt, (src, val) in opts.items(): - - if src != "command line": - continue - - opt = opt.replace('_', '-') - - if val == 0: - cmdobj = self.get_command_obj(cmd) - neg_opt = self.negative_opt.copy() - neg_opt.update(getattr(cmdobj, 'negative_opt', {})) - for neg, pos in neg_opt.items(): - if pos == opt: - opt = neg - val = None - break - else: - raise AssertionError("Shouldn't be able to get here") - - elif val == 1: - val = None - - d.setdefault(cmd, {})[opt] = val - - return d - - def iter_distribution_names(self): - """Yield all packages, modules, and extension names in distribution""" - - for pkg in self.packages or (): - yield pkg - - for module in self.py_modules or (): - yield module - - for ext in self.ext_modules or (): - if isinstance(ext, tuple): - name, buildinfo = ext - else: - name = ext.name - if name.endswith('module'): - name = name[:-6] - yield name - - def handle_display_options(self, option_order): - """If there were any non-global "display-only" options - (--help-commands or the metadata display options) on the command - line, display the requested info and return true; else return - false. - """ - import sys - - if six.PY2 or self.help_commands: - return _Distribution.handle_display_options(self, option_order) - - # Stdout may be StringIO (e.g. in tests) - if not isinstance(sys.stdout, io.TextIOWrapper): - return _Distribution.handle_display_options(self, option_order) - - # Don't wrap stdout if utf-8 is already the encoding. Provides - # workaround for #334. - if sys.stdout.encoding.lower() in ('utf-8', 'utf8'): - return _Distribution.handle_display_options(self, option_order) - - # Print metadata in UTF-8 no matter the platform - encoding = sys.stdout.encoding - errors = sys.stdout.errors - newline = sys.platform != 'win32' and '\n' or None - line_buffering = sys.stdout.line_buffering - - sys.stdout = io.TextIOWrapper( - sys.stdout.detach(), 'utf-8', errors, newline, line_buffering) - try: - return _Distribution.handle_display_options(self, option_order) - finally: - sys.stdout = io.TextIOWrapper( - sys.stdout.detach(), encoding, errors, newline, line_buffering) - - -class Feature: - """ - **deprecated** -- The `Feature` facility was never completely implemented - or supported, `has reported issues - <https://github.com/pypa/setuptools/issues/58>`_ and will be removed in - a future version. - - A subset of the distribution that can be excluded if unneeded/wanted - - Features are created using these keyword arguments: - - 'description' -- a short, human readable description of the feature, to - be used in error messages, and option help messages. - - 'standard' -- if true, the feature is included by default if it is - available on the current system. Otherwise, the feature is only - included if requested via a command line '--with-X' option, or if - another included feature requires it. The default setting is 'False'. - - 'available' -- if true, the feature is available for installation on the - current system. The default setting is 'True'. - - 'optional' -- if true, the feature's inclusion can be controlled from the - command line, using the '--with-X' or '--without-X' options. If - false, the feature's inclusion status is determined automatically, - based on 'availabile', 'standard', and whether any other feature - requires it. The default setting is 'True'. - - 'require_features' -- a string or sequence of strings naming features - that should also be included if this feature is included. Defaults to - empty list. May also contain 'Require' objects that should be - added/removed from the distribution. - - 'remove' -- a string or list of strings naming packages to be removed - from the distribution if this feature is *not* included. If the - feature *is* included, this argument is ignored. This argument exists - to support removing features that "crosscut" a distribution, such as - defining a 'tests' feature that removes all the 'tests' subpackages - provided by other features. The default for this argument is an empty - list. (Note: the named package(s) or modules must exist in the base - distribution when the 'setup()' function is initially called.) - - other keywords -- any other keyword arguments are saved, and passed to - the distribution's 'include()' and 'exclude()' methods when the - feature is included or excluded, respectively. So, for example, you - could pass 'packages=["a","b"]' to cause packages 'a' and 'b' to be - added or removed from the distribution as appropriate. - - A feature must include at least one 'requires', 'remove', or other - keyword argument. Otherwise, it can't affect the distribution in any way. - Note also that you can subclass 'Feature' to create your own specialized - feature types that modify the distribution in other ways when included or - excluded. See the docstrings for the various methods here for more detail. - Aside from the methods, the only feature attributes that distributions look - at are 'description' and 'optional'. - """ - - @staticmethod - def warn_deprecated(): - msg = ( - "Features are deprecated and will be removed in a future " - "version. See https://github.com/pypa/setuptools/issues/65." - ) - warnings.warn(msg, DistDeprecationWarning, stacklevel=3) - - def __init__( - self, description, standard=False, available=True, - optional=True, require_features=(), remove=(), **extras): - self.warn_deprecated() - - self.description = description - self.standard = standard - self.available = available - self.optional = optional - if isinstance(require_features, (str, Require)): - require_features = require_features, - - self.require_features = [ - r for r in require_features if isinstance(r, str) - ] - er = [r for r in require_features if not isinstance(r, str)] - if er: - extras['require_features'] = er - - if isinstance(remove, str): - remove = remove, - self.remove = remove - self.extras = extras - - if not remove and not require_features and not extras: - raise DistutilsSetupError( - "Feature %s: must define 'require_features', 'remove', or " - "at least one of 'packages', 'py_modules', etc." - ) - - def include_by_default(self): - """Should this feature be included by default?""" - return self.available and self.standard - - def include_in(self, dist): - """Ensure feature and its requirements are included in distribution - - You may override this in a subclass to perform additional operations on - the distribution. Note that this method may be called more than once - per feature, and so should be idempotent. - - """ - - if not self.available: - raise DistutilsPlatformError( - self.description + " is required, " - "but is not available on this platform" - ) - - dist.include(**self.extras) - - for f in self.require_features: - dist.include_feature(f) - - def exclude_from(self, dist): - """Ensure feature is excluded from distribution - - You may override this in a subclass to perform additional operations on - the distribution. This method will be called at most once per - feature, and only after all included features have been asked to - include themselves. - """ - - dist.exclude(**self.extras) - - if self.remove: - for item in self.remove: - dist.exclude_package(item) - - def validate(self, dist): - """Verify that feature makes sense in context of distribution - - This method is called by the distribution just before it parses its - command line. It checks to ensure that the 'remove' attribute, if any, - contains only valid package/module names that are present in the base - distribution when 'setup()' is called. You may override it in a - subclass to perform any other required validation of the feature - against a target distribution. - """ - - for item in self.remove: - if not dist.has_contents_for(item): - raise DistutilsSetupError( - "%s wants to be able to remove %s, but the distribution" - " doesn't contain any packages or modules under %s" - % (self.description, item, item) - ) - - -class DistDeprecationWarning(SetuptoolsDeprecationWarning): - """Class for warning about deprecations in dist in - setuptools. Not ignored by default, unlike DeprecationWarning.""" diff --git a/env/lib/python2.7/site-packages/setuptools/dist.pyc b/env/lib/python2.7/site-packages/setuptools/dist.pyc deleted file mode 100644 index 209eb5b3..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/dist.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/extension.py b/env/lib/python2.7/site-packages/setuptools/extension.py deleted file mode 100644 index 29468894..00000000 --- a/env/lib/python2.7/site-packages/setuptools/extension.py +++ /dev/null @@ -1,57 +0,0 @@ -import re -import functools -import distutils.core -import distutils.errors -import distutils.extension - -from setuptools.extern.six.moves import map - -from .monkey import get_unpatched - - -def _have_cython(): - """ - Return True if Cython can be imported. - """ - cython_impl = 'Cython.Distutils.build_ext' - try: - # from (cython_impl) import build_ext - __import__(cython_impl, fromlist=['build_ext']).build_ext - return True - except Exception: - pass - return False - - -# for compatibility -have_pyrex = _have_cython - -_Extension = get_unpatched(distutils.core.Extension) - - -class Extension(_Extension): - """Extension that uses '.c' files in place of '.pyx' files""" - - def __init__(self, name, sources, *args, **kw): - # The *args is needed for compatibility as calls may use positional - # arguments. py_limited_api may be set only via keyword. - self.py_limited_api = kw.pop("py_limited_api", False) - _Extension.__init__(self, name, sources, *args, **kw) - - def _convert_pyx_sources_to_lang(self): - """ - Replace sources with .pyx extensions to sources with the target - language extension. This mechanism allows language authors to supply - pre-converted sources but to prefer the .pyx sources. - """ - if _have_cython(): - # the build has Cython, so allow it to compile the .pyx files - return - lang = self.language or '' - target_ext = '.cpp' if lang.lower() == 'c++' else '.c' - sub = functools.partial(re.sub, '.pyx$', target_ext) - self.sources = list(map(sub, self.sources)) - - -class Library(Extension): - """Just like a regular Extension, but built as a library instead""" diff --git a/env/lib/python2.7/site-packages/setuptools/extension.pyc b/env/lib/python2.7/site-packages/setuptools/extension.pyc deleted file mode 100644 index b1f3982f..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/extension.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/extern/__init__.py b/env/lib/python2.7/site-packages/setuptools/extern/__init__.py deleted file mode 100644 index cb2fa329..00000000 --- a/env/lib/python2.7/site-packages/setuptools/extern/__init__.py +++ /dev/null @@ -1,73 +0,0 @@ -import sys - - -class VendorImporter: - """ - A PEP 302 meta path importer for finding optionally-vendored - or otherwise naturally-installed packages from root_name. - """ - - def __init__(self, root_name, vendored_names=(), vendor_pkg=None): - self.root_name = root_name - self.vendored_names = set(vendored_names) - self.vendor_pkg = vendor_pkg or root_name.replace('extern', '_vendor') - - @property - def search_path(self): - """ - Search first the vendor package then as a natural package. - """ - yield self.vendor_pkg + '.' - yield '' - - def find_module(self, fullname, path=None): - """ - Return self when fullname starts with root_name and the - target module is one vendored through this importer. - """ - root, base, target = fullname.partition(self.root_name + '.') - if root: - return - if not any(map(target.startswith, self.vendored_names)): - return - return self - - def load_module(self, fullname): - """ - Iterate over the search path to locate and load fullname. - """ - root, base, target = fullname.partition(self.root_name + '.') - for prefix in self.search_path: - try: - extant = prefix + target - __import__(extant) - mod = sys.modules[extant] - sys.modules[fullname] = mod - # mysterious hack: - # Remove the reference to the extant package/module - # on later Python versions to cause relative imports - # in the vendor package to resolve the same modules - # as those going through this importer. - if sys.version_info >= (3, ): - del sys.modules[extant] - return mod - except ImportError: - pass - else: - raise ImportError( - "The '{target}' package is required; " - "normally this is bundled with this package so if you get " - "this warning, consult the packager of your " - "distribution.".format(**locals()) - ) - - def install(self): - """ - Install this importer into sys.meta_path if not already present. - """ - if self not in sys.meta_path: - sys.meta_path.append(self) - - -names = 'six', 'packaging', 'pyparsing', -VendorImporter(__name__, names, 'setuptools._vendor').install() diff --git a/env/lib/python2.7/site-packages/setuptools/extern/__init__.pyc b/env/lib/python2.7/site-packages/setuptools/extern/__init__.pyc deleted file mode 100644 index 76b199c6..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/extern/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/glibc.py b/env/lib/python2.7/site-packages/setuptools/glibc.py deleted file mode 100644 index a134591c..00000000 --- a/env/lib/python2.7/site-packages/setuptools/glibc.py +++ /dev/null @@ -1,86 +0,0 @@ -# This file originally from pip: -# https://github.com/pypa/pip/blob/8f4f15a5a95d7d5b511ceaee9ed261176c181970/src/pip/_internal/utils/glibc.py -from __future__ import absolute_import - -import ctypes -import re -import warnings - - -def glibc_version_string(): - "Returns glibc version string, or None if not using glibc." - - # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen - # manpage says, "If filename is NULL, then the returned handle is for the - # main program". This way we can let the linker do the work to figure out - # which libc our process is actually using. - process_namespace = ctypes.CDLL(None) - try: - gnu_get_libc_version = process_namespace.gnu_get_libc_version - except AttributeError: - # Symbol doesn't exist -> therefore, we are not linked to - # glibc. - return None - - # Call gnu_get_libc_version, which returns a string like "2.5" - gnu_get_libc_version.restype = ctypes.c_char_p - version_str = gnu_get_libc_version() - # py2 / py3 compatibility: - if not isinstance(version_str, str): - version_str = version_str.decode("ascii") - - return version_str - - -# Separated out from have_compatible_glibc for easier unit testing -def check_glibc_version(version_str, required_major, minimum_minor): - # Parse string and check against requested version. - # - # We use a regexp instead of str.split because we want to discard any - # random junk that might come after the minor version -- this might happen - # in patched/forked versions of glibc (e.g. Linaro's version of glibc - # uses version strings like "2.20-2014.11"). See gh-3588. - m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str) - if not m: - warnings.warn("Expected glibc version with 2 components major.minor," - " got: %s" % version_str, RuntimeWarning) - return False - return (int(m.group("major")) == required_major and - int(m.group("minor")) >= minimum_minor) - - -def have_compatible_glibc(required_major, minimum_minor): - version_str = glibc_version_string() - if version_str is None: - return False - return check_glibc_version(version_str, required_major, minimum_minor) - - -# platform.libc_ver regularly returns completely nonsensical glibc -# versions. E.g. on my computer, platform says: -# -# ~$ python2.7 -c 'import platform; print(platform.libc_ver())' -# ('glibc', '2.7') -# ~$ python3.5 -c 'import platform; print(platform.libc_ver())' -# ('glibc', '2.9') -# -# But the truth is: -# -# ~$ ldd --version -# ldd (Debian GLIBC 2.22-11) 2.22 -# -# This is unfortunate, because it means that the linehaul data on libc -# versions that was generated by pip 8.1.2 and earlier is useless and -# misleading. Solution: instead of using platform, use our code that actually -# works. -def libc_ver(): - """Try to determine the glibc version - - Returns a tuple of strings (lib, version) which default to empty strings - in case the lookup fails. - """ - glibc_version = glibc_version_string() - if glibc_version is None: - return ("", "") - else: - return ("glibc", glibc_version) diff --git a/env/lib/python2.7/site-packages/setuptools/glibc.pyc b/env/lib/python2.7/site-packages/setuptools/glibc.pyc deleted file mode 100644 index 1ff09ada..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/glibc.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/glob.pyc b/env/lib/python2.7/site-packages/setuptools/glob.pyc deleted file mode 100644 index 3487086d..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/glob.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/launch.pyc b/env/lib/python2.7/site-packages/setuptools/launch.pyc deleted file mode 100644 index eb7f202d..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/launch.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/lib2to3_ex.pyc b/env/lib/python2.7/site-packages/setuptools/lib2to3_ex.pyc deleted file mode 100644 index 9690868a..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/lib2to3_ex.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/monkey.pyc b/env/lib/python2.7/site-packages/setuptools/monkey.pyc deleted file mode 100644 index abd803f7..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/monkey.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/msvc.py b/env/lib/python2.7/site-packages/setuptools/msvc.py deleted file mode 100644 index b9c472f1..00000000 --- a/env/lib/python2.7/site-packages/setuptools/msvc.py +++ /dev/null @@ -1,1301 +0,0 @@ -""" -Improved support for Microsoft Visual C++ compilers. - -Known supported compilers: --------------------------- -Microsoft Visual C++ 9.0: - Microsoft Visual C++ Compiler for Python 2.7 (x86, amd64) - Microsoft Windows SDK 6.1 (x86, x64, ia64) - Microsoft Windows SDK 7.0 (x86, x64, ia64) - -Microsoft Visual C++ 10.0: - Microsoft Windows SDK 7.1 (x86, x64, ia64) - -Microsoft Visual C++ 14.0: - Microsoft Visual C++ Build Tools 2015 (x86, x64, arm) - Microsoft Visual Studio 2017 (x86, x64, arm, arm64) - Microsoft Visual Studio Build Tools 2017 (x86, x64, arm, arm64) -""" - -import os -import sys -import platform -import itertools -import distutils.errors -from setuptools.extern.packaging.version import LegacyVersion - -from setuptools.extern.six.moves import filterfalse - -from .monkey import get_unpatched - -if platform.system() == 'Windows': - from setuptools.extern.six.moves import winreg - safe_env = os.environ -else: - """ - Mock winreg and environ so the module can be imported - on this platform. - """ - - class winreg: - HKEY_USERS = None - HKEY_CURRENT_USER = None - HKEY_LOCAL_MACHINE = None - HKEY_CLASSES_ROOT = None - - safe_env = dict() - -_msvc9_suppress_errors = ( - # msvc9compiler isn't available on some platforms - ImportError, - - # msvc9compiler raises DistutilsPlatformError in some - # environments. See #1118. - distutils.errors.DistutilsPlatformError, -) - -try: - from distutils.msvc9compiler import Reg -except _msvc9_suppress_errors: - pass - - -def msvc9_find_vcvarsall(version): - """ - Patched "distutils.msvc9compiler.find_vcvarsall" to use the standalone - compiler build for Python (VCForPython). Fall back to original behavior - when the standalone compiler is not available. - - Redirect the path of "vcvarsall.bat". - - Known supported compilers - ------------------------- - Microsoft Visual C++ 9.0: - Microsoft Visual C++ Compiler for Python 2.7 (x86, amd64) - - Parameters - ---------- - version: float - Required Microsoft Visual C++ version. - - Return - ------ - vcvarsall.bat path: str - """ - VC_BASE = r'Software\%sMicrosoft\DevDiv\VCForPython\%0.1f' - key = VC_BASE % ('', version) - try: - # Per-user installs register the compiler path here - productdir = Reg.get_value(key, "installdir") - except KeyError: - try: - # All-user installs on a 64-bit system register here - key = VC_BASE % ('Wow6432Node\\', version) - productdir = Reg.get_value(key, "installdir") - except KeyError: - productdir = None - - if productdir: - vcvarsall = os.path.os.path.join(productdir, "vcvarsall.bat") - if os.path.isfile(vcvarsall): - return vcvarsall - - return get_unpatched(msvc9_find_vcvarsall)(version) - - -def msvc9_query_vcvarsall(ver, arch='x86', *args, **kwargs): - """ - Patched "distutils.msvc9compiler.query_vcvarsall" for support extra - compilers. - - Set environment without use of "vcvarsall.bat". - - Known supported compilers - ------------------------- - Microsoft Visual C++ 9.0: - Microsoft Visual C++ Compiler for Python 2.7 (x86, amd64) - Microsoft Windows SDK 6.1 (x86, x64, ia64) - Microsoft Windows SDK 7.0 (x86, x64, ia64) - - Microsoft Visual C++ 10.0: - Microsoft Windows SDK 7.1 (x86, x64, ia64) - - Parameters - ---------- - ver: float - Required Microsoft Visual C++ version. - arch: str - Target architecture. - - Return - ------ - environment: dict - """ - # Try to get environement from vcvarsall.bat (Classical way) - try: - orig = get_unpatched(msvc9_query_vcvarsall) - return orig(ver, arch, *args, **kwargs) - except distutils.errors.DistutilsPlatformError: - # Pass error if Vcvarsall.bat is missing - pass - except ValueError: - # Pass error if environment not set after executing vcvarsall.bat - pass - - # If error, try to set environment directly - try: - return EnvironmentInfo(arch, ver).return_env() - except distutils.errors.DistutilsPlatformError as exc: - _augment_exception(exc, ver, arch) - raise - - -def msvc14_get_vc_env(plat_spec): - """ - Patched "distutils._msvccompiler._get_vc_env" for support extra - compilers. - - Set environment without use of "vcvarsall.bat". - - Known supported compilers - ------------------------- - Microsoft Visual C++ 14.0: - Microsoft Visual C++ Build Tools 2015 (x86, x64, arm) - Microsoft Visual Studio 2017 (x86, x64, arm, arm64) - Microsoft Visual Studio Build Tools 2017 (x86, x64, arm, arm64) - - Parameters - ---------- - plat_spec: str - Target architecture. - - Return - ------ - environment: dict - """ - # Try to get environment from vcvarsall.bat (Classical way) - try: - return get_unpatched(msvc14_get_vc_env)(plat_spec) - except distutils.errors.DistutilsPlatformError: - # Pass error Vcvarsall.bat is missing - pass - - # If error, try to set environment directly - try: - return EnvironmentInfo(plat_spec, vc_min_ver=14.0).return_env() - except distutils.errors.DistutilsPlatformError as exc: - _augment_exception(exc, 14.0) - raise - - -def msvc14_gen_lib_options(*args, **kwargs): - """ - Patched "distutils._msvccompiler.gen_lib_options" for fix - compatibility between "numpy.distutils" and "distutils._msvccompiler" - (for Numpy < 1.11.2) - """ - if "numpy.distutils" in sys.modules: - import numpy as np - if LegacyVersion(np.__version__) < LegacyVersion('1.11.2'): - return np.distutils.ccompiler.gen_lib_options(*args, **kwargs) - return get_unpatched(msvc14_gen_lib_options)(*args, **kwargs) - - -def _augment_exception(exc, version, arch=''): - """ - Add details to the exception message to help guide the user - as to what action will resolve it. - """ - # Error if MSVC++ directory not found or environment not set - message = exc.args[0] - - if "vcvarsall" in message.lower() or "visual c" in message.lower(): - # Special error message if MSVC++ not installed - tmpl = 'Microsoft Visual C++ {version:0.1f} is required.' - message = tmpl.format(**locals()) - msdownload = 'www.microsoft.com/download/details.aspx?id=%d' - if version == 9.0: - if arch.lower().find('ia64') > -1: - # For VC++ 9.0, if IA64 support is needed, redirect user - # to Windows SDK 7.0 - message += ' Get it with "Microsoft Windows SDK 7.0": ' - message += msdownload % 3138 - else: - # For VC++ 9.0 redirect user to Vc++ for Python 2.7 : - # This redirection link is maintained by Microsoft. - # Contact vspython@microsoft.com if it needs updating. - message += ' Get it from http://aka.ms/vcpython27' - elif version == 10.0: - # For VC++ 10.0 Redirect user to Windows SDK 7.1 - message += ' Get it with "Microsoft Windows SDK 7.1": ' - message += msdownload % 8279 - elif version >= 14.0: - # For VC++ 14.0 Redirect user to Visual C++ Build Tools - message += (' Get it with "Microsoft Visual C++ Build Tools": ' - r'https://visualstudio.microsoft.com/downloads/') - - exc.args = (message, ) - - -class PlatformInfo: - """ - Current and Target Architectures informations. - - Parameters - ---------- - arch: str - Target architecture. - """ - current_cpu = safe_env.get('processor_architecture', '').lower() - - def __init__(self, arch): - self.arch = arch.lower().replace('x64', 'amd64') - - @property - def target_cpu(self): - return self.arch[self.arch.find('_') + 1:] - - def target_is_x86(self): - return self.target_cpu == 'x86' - - def current_is_x86(self): - return self.current_cpu == 'x86' - - def current_dir(self, hidex86=False, x64=False): - """ - Current platform specific subfolder. - - Parameters - ---------- - hidex86: bool - return '' and not '\x86' if architecture is x86. - x64: bool - return '\x64' and not '\amd64' if architecture is amd64. - - Return - ------ - subfolder: str - '\target', or '' (see hidex86 parameter) - """ - return ( - '' if (self.current_cpu == 'x86' and hidex86) else - r'\x64' if (self.current_cpu == 'amd64' and x64) else - r'\%s' % self.current_cpu - ) - - def target_dir(self, hidex86=False, x64=False): - r""" - Target platform specific subfolder. - - Parameters - ---------- - hidex86: bool - return '' and not '\x86' if architecture is x86. - x64: bool - return '\x64' and not '\amd64' if architecture is amd64. - - Return - ------ - subfolder: str - '\current', or '' (see hidex86 parameter) - """ - return ( - '' if (self.target_cpu == 'x86' and hidex86) else - r'\x64' if (self.target_cpu == 'amd64' and x64) else - r'\%s' % self.target_cpu - ) - - def cross_dir(self, forcex86=False): - r""" - Cross platform specific subfolder. - - Parameters - ---------- - forcex86: bool - Use 'x86' as current architecture even if current acritecture is - not x86. - - Return - ------ - subfolder: str - '' if target architecture is current architecture, - '\current_target' if not. - """ - current = 'x86' if forcex86 else self.current_cpu - return ( - '' if self.target_cpu == current else - self.target_dir().replace('\\', '\\%s_' % current) - ) - - -class RegistryInfo: - """ - Microsoft Visual Studio related registry informations. - - Parameters - ---------- - platform_info: PlatformInfo - "PlatformInfo" instance. - """ - HKEYS = (winreg.HKEY_USERS, - winreg.HKEY_CURRENT_USER, - winreg.HKEY_LOCAL_MACHINE, - winreg.HKEY_CLASSES_ROOT) - - def __init__(self, platform_info): - self.pi = platform_info - - @property - def visualstudio(self): - """ - Microsoft Visual Studio root registry key. - """ - return 'VisualStudio' - - @property - def sxs(self): - """ - Microsoft Visual Studio SxS registry key. - """ - return os.path.join(self.visualstudio, 'SxS') - - @property - def vc(self): - """ - Microsoft Visual C++ VC7 registry key. - """ - return os.path.join(self.sxs, 'VC7') - - @property - def vs(self): - """ - Microsoft Visual Studio VS7 registry key. - """ - return os.path.join(self.sxs, 'VS7') - - @property - def vc_for_python(self): - """ - Microsoft Visual C++ for Python registry key. - """ - return r'DevDiv\VCForPython' - - @property - def microsoft_sdk(self): - """ - Microsoft SDK registry key. - """ - return 'Microsoft SDKs' - - @property - def windows_sdk(self): - """ - Microsoft Windows/Platform SDK registry key. - """ - return os.path.join(self.microsoft_sdk, 'Windows') - - @property - def netfx_sdk(self): - """ - Microsoft .NET Framework SDK registry key. - """ - return os.path.join(self.microsoft_sdk, 'NETFXSDK') - - @property - def windows_kits_roots(self): - """ - Microsoft Windows Kits Roots registry key. - """ - return r'Windows Kits\Installed Roots' - - def microsoft(self, key, x86=False): - """ - Return key in Microsoft software registry. - - Parameters - ---------- - key: str - Registry key path where look. - x86: str - Force x86 software registry. - - Return - ------ - str: value - """ - node64 = '' if self.pi.current_is_x86() or x86 else 'Wow6432Node' - return os.path.join('Software', node64, 'Microsoft', key) - - def lookup(self, key, name): - """ - Look for values in registry in Microsoft software registry. - - Parameters - ---------- - key: str - Registry key path where look. - name: str - Value name to find. - - Return - ------ - str: value - """ - KEY_READ = winreg.KEY_READ - openkey = winreg.OpenKey - ms = self.microsoft - for hkey in self.HKEYS: - try: - bkey = openkey(hkey, ms(key), 0, KEY_READ) - except (OSError, IOError): - if not self.pi.current_is_x86(): - try: - bkey = openkey(hkey, ms(key, True), 0, KEY_READ) - except (OSError, IOError): - continue - else: - continue - try: - return winreg.QueryValueEx(bkey, name)[0] - except (OSError, IOError): - pass - - -class SystemInfo: - """ - Microsoft Windows and Visual Studio related system inormations. - - Parameters - ---------- - registry_info: RegistryInfo - "RegistryInfo" instance. - vc_ver: float - Required Microsoft Visual C++ version. - """ - - # Variables and properties in this class use originals CamelCase variables - # names from Microsoft source files for more easy comparaison. - WinDir = safe_env.get('WinDir', '') - ProgramFiles = safe_env.get('ProgramFiles', '') - ProgramFilesx86 = safe_env.get('ProgramFiles(x86)', ProgramFiles) - - def __init__(self, registry_info, vc_ver=None): - self.ri = registry_info - self.pi = self.ri.pi - self.vc_ver = vc_ver or self._find_latest_available_vc_ver() - - def _find_latest_available_vc_ver(self): - try: - return self.find_available_vc_vers()[-1] - except IndexError: - err = 'No Microsoft Visual C++ version found' - raise distutils.errors.DistutilsPlatformError(err) - - def find_available_vc_vers(self): - """ - Find all available Microsoft Visual C++ versions. - """ - ms = self.ri.microsoft - vckeys = (self.ri.vc, self.ri.vc_for_python, self.ri.vs) - vc_vers = [] - for hkey in self.ri.HKEYS: - for key in vckeys: - try: - bkey = winreg.OpenKey(hkey, ms(key), 0, winreg.KEY_READ) - except (OSError, IOError): - continue - subkeys, values, _ = winreg.QueryInfoKey(bkey) - for i in range(values): - try: - ver = float(winreg.EnumValue(bkey, i)[0]) - if ver not in vc_vers: - vc_vers.append(ver) - except ValueError: - pass - for i in range(subkeys): - try: - ver = float(winreg.EnumKey(bkey, i)) - if ver not in vc_vers: - vc_vers.append(ver) - except ValueError: - pass - return sorted(vc_vers) - - @property - def VSInstallDir(self): - """ - Microsoft Visual Studio directory. - """ - # Default path - name = 'Microsoft Visual Studio %0.1f' % self.vc_ver - default = os.path.join(self.ProgramFilesx86, name) - - # Try to get path from registry, if fail use default path - return self.ri.lookup(self.ri.vs, '%0.1f' % self.vc_ver) or default - - @property - def VCInstallDir(self): - """ - Microsoft Visual C++ directory. - """ - self.VSInstallDir - - guess_vc = self._guess_vc() or self._guess_vc_legacy() - - # Try to get "VC++ for Python" path from registry as default path - reg_path = os.path.join(self.ri.vc_for_python, '%0.1f' % self.vc_ver) - python_vc = self.ri.lookup(reg_path, 'installdir') - default_vc = os.path.join(python_vc, 'VC') if python_vc else guess_vc - - # Try to get path from registry, if fail use default path - path = self.ri.lookup(self.ri.vc, '%0.1f' % self.vc_ver) or default_vc - - if not os.path.isdir(path): - msg = 'Microsoft Visual C++ directory not found' - raise distutils.errors.DistutilsPlatformError(msg) - - return path - - def _guess_vc(self): - """ - Locate Visual C for 2017 - """ - if self.vc_ver <= 14.0: - return - - default = r'VC\Tools\MSVC' - guess_vc = os.path.join(self.VSInstallDir, default) - # Subdir with VC exact version as name - try: - vc_exact_ver = os.listdir(guess_vc)[-1] - return os.path.join(guess_vc, vc_exact_ver) - except (OSError, IOError, IndexError): - pass - - def _guess_vc_legacy(self): - """ - Locate Visual C for versions prior to 2017 - """ - default = r'Microsoft Visual Studio %0.1f\VC' % self.vc_ver - return os.path.join(self.ProgramFilesx86, default) - - @property - def WindowsSdkVersion(self): - """ - Microsoft Windows SDK versions for specified MSVC++ version. - """ - if self.vc_ver <= 9.0: - return ('7.0', '6.1', '6.0a') - elif self.vc_ver == 10.0: - return ('7.1', '7.0a') - elif self.vc_ver == 11.0: - return ('8.0', '8.0a') - elif self.vc_ver == 12.0: - return ('8.1', '8.1a') - elif self.vc_ver >= 14.0: - return ('10.0', '8.1') - - @property - def WindowsSdkLastVersion(self): - """ - Microsoft Windows SDK last version - """ - return self._use_last_dir_name(os.path.join( - self.WindowsSdkDir, 'lib')) - - @property - def WindowsSdkDir(self): - """ - Microsoft Windows SDK directory. - """ - sdkdir = '' - for ver in self.WindowsSdkVersion: - # Try to get it from registry - loc = os.path.join(self.ri.windows_sdk, 'v%s' % ver) - sdkdir = self.ri.lookup(loc, 'installationfolder') - if sdkdir: - break - if not sdkdir or not os.path.isdir(sdkdir): - # Try to get "VC++ for Python" version from registry - path = os.path.join(self.ri.vc_for_python, '%0.1f' % self.vc_ver) - install_base = self.ri.lookup(path, 'installdir') - if install_base: - sdkdir = os.path.join(install_base, 'WinSDK') - if not sdkdir or not os.path.isdir(sdkdir): - # If fail, use default new path - for ver in self.WindowsSdkVersion: - intver = ver[:ver.rfind('.')] - path = r'Microsoft SDKs\Windows Kits\%s' % (intver) - d = os.path.join(self.ProgramFiles, path) - if os.path.isdir(d): - sdkdir = d - if not sdkdir or not os.path.isdir(sdkdir): - # If fail, use default old path - for ver in self.WindowsSdkVersion: - path = r'Microsoft SDKs\Windows\v%s' % ver - d = os.path.join(self.ProgramFiles, path) - if os.path.isdir(d): - sdkdir = d - if not sdkdir: - # If fail, use Platform SDK - sdkdir = os.path.join(self.VCInstallDir, 'PlatformSDK') - return sdkdir - - @property - def WindowsSDKExecutablePath(self): - """ - Microsoft Windows SDK executable directory. - """ - # Find WinSDK NetFx Tools registry dir name - if self.vc_ver <= 11.0: - netfxver = 35 - arch = '' - else: - netfxver = 40 - hidex86 = True if self.vc_ver <= 12.0 else False - arch = self.pi.current_dir(x64=True, hidex86=hidex86) - fx = 'WinSDK-NetFx%dTools%s' % (netfxver, arch.replace('\\', '-')) - - # liste all possibles registry paths - regpaths = [] - if self.vc_ver >= 14.0: - for ver in self.NetFxSdkVersion: - regpaths += [os.path.join(self.ri.netfx_sdk, ver, fx)] - - for ver in self.WindowsSdkVersion: - regpaths += [os.path.join(self.ri.windows_sdk, 'v%sA' % ver, fx)] - - # Return installation folder from the more recent path - for path in regpaths: - execpath = self.ri.lookup(path, 'installationfolder') - if execpath: - break - return execpath - - @property - def FSharpInstallDir(self): - """ - Microsoft Visual F# directory. - """ - path = r'%0.1f\Setup\F#' % self.vc_ver - path = os.path.join(self.ri.visualstudio, path) - return self.ri.lookup(path, 'productdir') or '' - - @property - def UniversalCRTSdkDir(self): - """ - Microsoft Universal CRT SDK directory. - """ - # Set Kit Roots versions for specified MSVC++ version - if self.vc_ver >= 14.0: - vers = ('10', '81') - else: - vers = () - - # Find path of the more recent Kit - for ver in vers: - sdkdir = self.ri.lookup(self.ri.windows_kits_roots, - 'kitsroot%s' % ver) - if sdkdir: - break - return sdkdir or '' - - @property - def UniversalCRTSdkLastVersion(self): - """ - Microsoft Universal C Runtime SDK last version - """ - return self._use_last_dir_name(os.path.join( - self.UniversalCRTSdkDir, 'lib')) - - @property - def NetFxSdkVersion(self): - """ - Microsoft .NET Framework SDK versions. - """ - # Set FxSdk versions for specified MSVC++ version - if self.vc_ver >= 14.0: - return ('4.6.1', '4.6') - else: - return () - - @property - def NetFxSdkDir(self): - """ - Microsoft .NET Framework SDK directory. - """ - for ver in self.NetFxSdkVersion: - loc = os.path.join(self.ri.netfx_sdk, ver) - sdkdir = self.ri.lookup(loc, 'kitsinstallationfolder') - if sdkdir: - break - return sdkdir or '' - - @property - def FrameworkDir32(self): - """ - Microsoft .NET Framework 32bit directory. - """ - # Default path - guess_fw = os.path.join(self.WinDir, r'Microsoft.NET\Framework') - - # Try to get path from registry, if fail use default path - return self.ri.lookup(self.ri.vc, 'frameworkdir32') or guess_fw - - @property - def FrameworkDir64(self): - """ - Microsoft .NET Framework 64bit directory. - """ - # Default path - guess_fw = os.path.join(self.WinDir, r'Microsoft.NET\Framework64') - - # Try to get path from registry, if fail use default path - return self.ri.lookup(self.ri.vc, 'frameworkdir64') or guess_fw - - @property - def FrameworkVersion32(self): - """ - Microsoft .NET Framework 32bit versions. - """ - return self._find_dot_net_versions(32) - - @property - def FrameworkVersion64(self): - """ - Microsoft .NET Framework 64bit versions. - """ - return self._find_dot_net_versions(64) - - def _find_dot_net_versions(self, bits): - """ - Find Microsoft .NET Framework versions. - - Parameters - ---------- - bits: int - Platform number of bits: 32 or 64. - """ - # Find actual .NET version in registry - reg_ver = self.ri.lookup(self.ri.vc, 'frameworkver%d' % bits) - dot_net_dir = getattr(self, 'FrameworkDir%d' % bits) - ver = reg_ver or self._use_last_dir_name(dot_net_dir, 'v') or '' - - # Set .NET versions for specified MSVC++ version - if self.vc_ver >= 12.0: - frameworkver = (ver, 'v4.0') - elif self.vc_ver >= 10.0: - frameworkver = ('v4.0.30319' if ver.lower()[:2] != 'v4' else ver, - 'v3.5') - elif self.vc_ver == 9.0: - frameworkver = ('v3.5', 'v2.0.50727') - if self.vc_ver == 8.0: - frameworkver = ('v3.0', 'v2.0.50727') - return frameworkver - - def _use_last_dir_name(self, path, prefix=''): - """ - Return name of the last dir in path or '' if no dir found. - - Parameters - ---------- - path: str - Use dirs in this path - prefix: str - Use only dirs startings by this prefix - """ - matching_dirs = ( - dir_name - for dir_name in reversed(os.listdir(path)) - if os.path.isdir(os.path.join(path, dir_name)) and - dir_name.startswith(prefix) - ) - return next(matching_dirs, None) or '' - - -class EnvironmentInfo: - """ - Return environment variables for specified Microsoft Visual C++ version - and platform : Lib, Include, Path and libpath. - - This function is compatible with Microsoft Visual C++ 9.0 to 14.0. - - Script created by analysing Microsoft environment configuration files like - "vcvars[...].bat", "SetEnv.Cmd", "vcbuildtools.bat", ... - - Parameters - ---------- - arch: str - Target architecture. - vc_ver: float - Required Microsoft Visual C++ version. If not set, autodetect the last - version. - vc_min_ver: float - Minimum Microsoft Visual C++ version. - """ - - # Variables and properties in this class use originals CamelCase variables - # names from Microsoft source files for more easy comparaison. - - def __init__(self, arch, vc_ver=None, vc_min_ver=0): - self.pi = PlatformInfo(arch) - self.ri = RegistryInfo(self.pi) - self.si = SystemInfo(self.ri, vc_ver) - - if self.vc_ver < vc_min_ver: - err = 'No suitable Microsoft Visual C++ version found' - raise distutils.errors.DistutilsPlatformError(err) - - @property - def vc_ver(self): - """ - Microsoft Visual C++ version. - """ - return self.si.vc_ver - - @property - def VSTools(self): - """ - Microsoft Visual Studio Tools - """ - paths = [r'Common7\IDE', r'Common7\Tools'] - - if self.vc_ver >= 14.0: - arch_subdir = self.pi.current_dir(hidex86=True, x64=True) - paths += [r'Common7\IDE\CommonExtensions\Microsoft\TestWindow'] - paths += [r'Team Tools\Performance Tools'] - paths += [r'Team Tools\Performance Tools%s' % arch_subdir] - - return [os.path.join(self.si.VSInstallDir, path) for path in paths] - - @property - def VCIncludes(self): - """ - Microsoft Visual C++ & Microsoft Foundation Class Includes - """ - return [os.path.join(self.si.VCInstallDir, 'Include'), - os.path.join(self.si.VCInstallDir, r'ATLMFC\Include')] - - @property - def VCLibraries(self): - """ - Microsoft Visual C++ & Microsoft Foundation Class Libraries - """ - if self.vc_ver >= 15.0: - arch_subdir = self.pi.target_dir(x64=True) - else: - arch_subdir = self.pi.target_dir(hidex86=True) - paths = ['Lib%s' % arch_subdir, r'ATLMFC\Lib%s' % arch_subdir] - - if self.vc_ver >= 14.0: - paths += [r'Lib\store%s' % arch_subdir] - - return [os.path.join(self.si.VCInstallDir, path) for path in paths] - - @property - def VCStoreRefs(self): - """ - Microsoft Visual C++ store references Libraries - """ - if self.vc_ver < 14.0: - return [] - return [os.path.join(self.si.VCInstallDir, r'Lib\store\references')] - - @property - def VCTools(self): - """ - Microsoft Visual C++ Tools - """ - si = self.si - tools = [os.path.join(si.VCInstallDir, 'VCPackages')] - - forcex86 = True if self.vc_ver <= 10.0 else False - arch_subdir = self.pi.cross_dir(forcex86) - if arch_subdir: - tools += [os.path.join(si.VCInstallDir, 'Bin%s' % arch_subdir)] - - if self.vc_ver == 14.0: - path = 'Bin%s' % self.pi.current_dir(hidex86=True) - tools += [os.path.join(si.VCInstallDir, path)] - - elif self.vc_ver >= 15.0: - host_dir = (r'bin\HostX86%s' if self.pi.current_is_x86() else - r'bin\HostX64%s') - tools += [os.path.join( - si.VCInstallDir, host_dir % self.pi.target_dir(x64=True))] - - if self.pi.current_cpu != self.pi.target_cpu: - tools += [os.path.join( - si.VCInstallDir, host_dir % self.pi.current_dir(x64=True))] - - else: - tools += [os.path.join(si.VCInstallDir, 'Bin')] - - return tools - - @property - def OSLibraries(self): - """ - Microsoft Windows SDK Libraries - """ - if self.vc_ver <= 10.0: - arch_subdir = self.pi.target_dir(hidex86=True, x64=True) - return [os.path.join(self.si.WindowsSdkDir, 'Lib%s' % arch_subdir)] - - else: - arch_subdir = self.pi.target_dir(x64=True) - lib = os.path.join(self.si.WindowsSdkDir, 'lib') - libver = self._sdk_subdir - return [os.path.join(lib, '%sum%s' % (libver , arch_subdir))] - - @property - def OSIncludes(self): - """ - Microsoft Windows SDK Include - """ - include = os.path.join(self.si.WindowsSdkDir, 'include') - - if self.vc_ver <= 10.0: - return [include, os.path.join(include, 'gl')] - - else: - if self.vc_ver >= 14.0: - sdkver = self._sdk_subdir - else: - sdkver = '' - return [os.path.join(include, '%sshared' % sdkver), - os.path.join(include, '%sum' % sdkver), - os.path.join(include, '%swinrt' % sdkver)] - - @property - def OSLibpath(self): - """ - Microsoft Windows SDK Libraries Paths - """ - ref = os.path.join(self.si.WindowsSdkDir, 'References') - libpath = [] - - if self.vc_ver <= 9.0: - libpath += self.OSLibraries - - if self.vc_ver >= 11.0: - libpath += [os.path.join(ref, r'CommonConfiguration\Neutral')] - - if self.vc_ver >= 14.0: - libpath += [ - ref, - os.path.join(self.si.WindowsSdkDir, 'UnionMetadata'), - os.path.join( - ref, - 'Windows.Foundation.UniversalApiContract', - '1.0.0.0', - ), - os.path.join( - ref, - 'Windows.Foundation.FoundationContract', - '1.0.0.0', - ), - os.path.join( - ref, - 'Windows.Networking.Connectivity.WwanContract', - '1.0.0.0', - ), - os.path.join( - self.si.WindowsSdkDir, - 'ExtensionSDKs', - 'Microsoft.VCLibs', - '%0.1f' % self.vc_ver, - 'References', - 'CommonConfiguration', - 'neutral', - ), - ] - return libpath - - @property - def SdkTools(self): - """ - Microsoft Windows SDK Tools - """ - return list(self._sdk_tools()) - - def _sdk_tools(self): - """ - Microsoft Windows SDK Tools paths generator - """ - if self.vc_ver < 15.0: - bin_dir = 'Bin' if self.vc_ver <= 11.0 else r'Bin\x86' - yield os.path.join(self.si.WindowsSdkDir, bin_dir) - - if not self.pi.current_is_x86(): - arch_subdir = self.pi.current_dir(x64=True) - path = 'Bin%s' % arch_subdir - yield os.path.join(self.si.WindowsSdkDir, path) - - if self.vc_ver == 10.0 or self.vc_ver == 11.0: - if self.pi.target_is_x86(): - arch_subdir = '' - else: - arch_subdir = self.pi.current_dir(hidex86=True, x64=True) - path = r'Bin\NETFX 4.0 Tools%s' % arch_subdir - yield os.path.join(self.si.WindowsSdkDir, path) - - elif self.vc_ver >= 15.0: - path = os.path.join(self.si.WindowsSdkDir, 'Bin') - arch_subdir = self.pi.current_dir(x64=True) - sdkver = self.si.WindowsSdkLastVersion - yield os.path.join(path, '%s%s' % (sdkver, arch_subdir)) - - if self.si.WindowsSDKExecutablePath: - yield self.si.WindowsSDKExecutablePath - - @property - def _sdk_subdir(self): - """ - Microsoft Windows SDK version subdir - """ - ucrtver = self.si.WindowsSdkLastVersion - return ('%s\\' % ucrtver) if ucrtver else '' - - @property - def SdkSetup(self): - """ - Microsoft Windows SDK Setup - """ - if self.vc_ver > 9.0: - return [] - - return [os.path.join(self.si.WindowsSdkDir, 'Setup')] - - @property - def FxTools(self): - """ - Microsoft .NET Framework Tools - """ - pi = self.pi - si = self.si - - if self.vc_ver <= 10.0: - include32 = True - include64 = not pi.target_is_x86() and not pi.current_is_x86() - else: - include32 = pi.target_is_x86() or pi.current_is_x86() - include64 = pi.current_cpu == 'amd64' or pi.target_cpu == 'amd64' - - tools = [] - if include32: - tools += [os.path.join(si.FrameworkDir32, ver) - for ver in si.FrameworkVersion32] - if include64: - tools += [os.path.join(si.FrameworkDir64, ver) - for ver in si.FrameworkVersion64] - return tools - - @property - def NetFxSDKLibraries(self): - """ - Microsoft .Net Framework SDK Libraries - """ - if self.vc_ver < 14.0 or not self.si.NetFxSdkDir: - return [] - - arch_subdir = self.pi.target_dir(x64=True) - return [os.path.join(self.si.NetFxSdkDir, r'lib\um%s' % arch_subdir)] - - @property - def NetFxSDKIncludes(self): - """ - Microsoft .Net Framework SDK Includes - """ - if self.vc_ver < 14.0 or not self.si.NetFxSdkDir: - return [] - - return [os.path.join(self.si.NetFxSdkDir, r'include\um')] - - @property - def VsTDb(self): - """ - Microsoft Visual Studio Team System Database - """ - return [os.path.join(self.si.VSInstallDir, r'VSTSDB\Deploy')] - - @property - def MSBuild(self): - """ - Microsoft Build Engine - """ - if self.vc_ver < 12.0: - return [] - elif self.vc_ver < 15.0: - base_path = self.si.ProgramFilesx86 - arch_subdir = self.pi.current_dir(hidex86=True) - else: - base_path = self.si.VSInstallDir - arch_subdir = '' - - path = r'MSBuild\%0.1f\bin%s' % (self.vc_ver, arch_subdir) - build = [os.path.join(base_path, path)] - - if self.vc_ver >= 15.0: - # Add Roslyn C# & Visual Basic Compiler - build += [os.path.join(base_path, path, 'Roslyn')] - - return build - - @property - def HTMLHelpWorkshop(self): - """ - Microsoft HTML Help Workshop - """ - if self.vc_ver < 11.0: - return [] - - return [os.path.join(self.si.ProgramFilesx86, 'HTML Help Workshop')] - - @property - def UCRTLibraries(self): - """ - Microsoft Universal C Runtime SDK Libraries - """ - if self.vc_ver < 14.0: - return [] - - arch_subdir = self.pi.target_dir(x64=True) - lib = os.path.join(self.si.UniversalCRTSdkDir, 'lib') - ucrtver = self._ucrt_subdir - return [os.path.join(lib, '%sucrt%s' % (ucrtver, arch_subdir))] - - @property - def UCRTIncludes(self): - """ - Microsoft Universal C Runtime SDK Include - """ - if self.vc_ver < 14.0: - return [] - - include = os.path.join(self.si.UniversalCRTSdkDir, 'include') - return [os.path.join(include, '%sucrt' % self._ucrt_subdir)] - - @property - def _ucrt_subdir(self): - """ - Microsoft Universal C Runtime SDK version subdir - """ - ucrtver = self.si.UniversalCRTSdkLastVersion - return ('%s\\' % ucrtver) if ucrtver else '' - - @property - def FSharp(self): - """ - Microsoft Visual F# - """ - if self.vc_ver < 11.0 and self.vc_ver > 12.0: - return [] - - return self.si.FSharpInstallDir - - @property - def VCRuntimeRedist(self): - """ - Microsoft Visual C++ runtime redistribuable dll - """ - arch_subdir = self.pi.target_dir(x64=True) - if self.vc_ver < 15: - redist_path = self.si.VCInstallDir - vcruntime = 'redist%s\\Microsoft.VC%d0.CRT\\vcruntime%d0.dll' - else: - redist_path = self.si.VCInstallDir.replace('\\Tools', '\\Redist') - vcruntime = 'onecore%s\\Microsoft.VC%d0.CRT\\vcruntime%d0.dll' - - # Visual Studio 2017 is still Visual C++ 14.0 - dll_ver = 14.0 if self.vc_ver == 15 else self.vc_ver - - vcruntime = vcruntime % (arch_subdir, self.vc_ver, dll_ver) - return os.path.join(redist_path, vcruntime) - - def return_env(self, exists=True): - """ - Return environment dict. - - Parameters - ---------- - exists: bool - It True, only return existing paths. - """ - env = dict( - include=self._build_paths('include', - [self.VCIncludes, - self.OSIncludes, - self.UCRTIncludes, - self.NetFxSDKIncludes], - exists), - lib=self._build_paths('lib', - [self.VCLibraries, - self.OSLibraries, - self.FxTools, - self.UCRTLibraries, - self.NetFxSDKLibraries], - exists), - libpath=self._build_paths('libpath', - [self.VCLibraries, - self.FxTools, - self.VCStoreRefs, - self.OSLibpath], - exists), - path=self._build_paths('path', - [self.VCTools, - self.VSTools, - self.VsTDb, - self.SdkTools, - self.SdkSetup, - self.FxTools, - self.MSBuild, - self.HTMLHelpWorkshop, - self.FSharp], - exists), - ) - if self.vc_ver >= 14 and os.path.isfile(self.VCRuntimeRedist): - env['py_vcruntime_redist'] = self.VCRuntimeRedist - return env - - def _build_paths(self, name, spec_path_lists, exists): - """ - Given an environment variable name and specified paths, - return a pathsep-separated string of paths containing - unique, extant, directories from those paths and from - the environment variable. Raise an error if no paths - are resolved. - """ - # flatten spec_path_lists - spec_paths = itertools.chain.from_iterable(spec_path_lists) - env_paths = safe_env.get(name, '').split(os.pathsep) - paths = itertools.chain(spec_paths, env_paths) - extant_paths = list(filter(os.path.isdir, paths)) if exists else paths - if not extant_paths: - msg = "%s environment variable is empty" % name.upper() - raise distutils.errors.DistutilsPlatformError(msg) - unique_paths = self._unique_everseen(extant_paths) - return os.pathsep.join(unique_paths) - - # from Python docs - def _unique_everseen(self, iterable, key=None): - """ - List unique elements, preserving order. - Remember all elements ever seen. - - _unique_everseen('AAAABBBCCDAABBB') --> A B C D - - _unique_everseen('ABBCcAD', str.lower) --> A B C D - """ - seen = set() - seen_add = seen.add - if key is None: - for element in filterfalse(seen.__contains__, iterable): - seen_add(element) - yield element - else: - for element in iterable: - k = key(element) - if k not in seen: - seen_add(k) - yield element diff --git a/env/lib/python2.7/site-packages/setuptools/msvc.pyc b/env/lib/python2.7/site-packages/setuptools/msvc.pyc deleted file mode 100644 index e57ab7eb..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/msvc.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/namespaces.pyc b/env/lib/python2.7/site-packages/setuptools/namespaces.pyc deleted file mode 100644 index 4666eb49..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/namespaces.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/package_index.pyc b/env/lib/python2.7/site-packages/setuptools/package_index.pyc deleted file mode 100644 index dd3ee92b..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/package_index.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/pep425tags.py b/env/lib/python2.7/site-packages/setuptools/pep425tags.py deleted file mode 100644 index 48745a29..00000000 --- a/env/lib/python2.7/site-packages/setuptools/pep425tags.py +++ /dev/null @@ -1,319 +0,0 @@ -# This file originally from pip: -# https://github.com/pypa/pip/blob/8f4f15a5a95d7d5b511ceaee9ed261176c181970/src/pip/_internal/pep425tags.py -"""Generate and work with PEP 425 Compatibility Tags.""" -from __future__ import absolute_import - -import distutils.util -from distutils import log -import platform -import re -import sys -import sysconfig -import warnings -from collections import OrderedDict - -from .extern import six - -from . import glibc - -_osx_arch_pat = re.compile(r'(.+)_(\d+)_(\d+)_(.+)') - - -def get_config_var(var): - try: - return sysconfig.get_config_var(var) - except IOError as e: # Issue #1074 - warnings.warn("{}".format(e), RuntimeWarning) - return None - - -def get_abbr_impl(): - """Return abbreviated implementation name.""" - if hasattr(sys, 'pypy_version_info'): - pyimpl = 'pp' - elif sys.platform.startswith('java'): - pyimpl = 'jy' - elif sys.platform == 'cli': - pyimpl = 'ip' - else: - pyimpl = 'cp' - return pyimpl - - -def get_impl_ver(): - """Return implementation version.""" - impl_ver = get_config_var("py_version_nodot") - if not impl_ver or get_abbr_impl() == 'pp': - impl_ver = ''.join(map(str, get_impl_version_info())) - return impl_ver - - -def get_impl_version_info(): - """Return sys.version_info-like tuple for use in decrementing the minor - version.""" - if get_abbr_impl() == 'pp': - # as per https://github.com/pypa/pip/issues/2882 - return (sys.version_info[0], sys.pypy_version_info.major, - sys.pypy_version_info.minor) - else: - return sys.version_info[0], sys.version_info[1] - - -def get_impl_tag(): - """ - Returns the Tag for this specific implementation. - """ - return "{}{}".format(get_abbr_impl(), get_impl_ver()) - - -def get_flag(var, fallback, expected=True, warn=True): - """Use a fallback method for determining SOABI flags if the needed config - var is unset or unavailable.""" - val = get_config_var(var) - if val is None: - if warn: - log.debug("Config variable '%s' is unset, Python ABI tag may " - "be incorrect", var) - return fallback() - return val == expected - - -def get_abi_tag(): - """Return the ABI tag based on SOABI (if available) or emulate SOABI - (CPython 2, PyPy).""" - soabi = get_config_var('SOABI') - impl = get_abbr_impl() - if not soabi and impl in {'cp', 'pp'} and hasattr(sys, 'maxunicode'): - d = '' - m = '' - u = '' - if get_flag('Py_DEBUG', - lambda: hasattr(sys, 'gettotalrefcount'), - warn=(impl == 'cp')): - d = 'd' - if get_flag('WITH_PYMALLOC', - lambda: impl == 'cp', - warn=(impl == 'cp')): - m = 'm' - if get_flag('Py_UNICODE_SIZE', - lambda: sys.maxunicode == 0x10ffff, - expected=4, - warn=(impl == 'cp' and - six.PY2)) \ - and six.PY2: - u = 'u' - abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u) - elif soabi and soabi.startswith('cpython-'): - abi = 'cp' + soabi.split('-')[1] - elif soabi: - abi = soabi.replace('.', '_').replace('-', '_') - else: - abi = None - return abi - - -def _is_running_32bit(): - return sys.maxsize == 2147483647 - - -def get_platform(): - """Return our platform name 'win32', 'linux_x86_64'""" - if sys.platform == 'darwin': - # distutils.util.get_platform() returns the release based on the value - # of MACOSX_DEPLOYMENT_TARGET on which Python was built, which may - # be significantly older than the user's current machine. - release, _, machine = platform.mac_ver() - split_ver = release.split('.') - - if machine == "x86_64" and _is_running_32bit(): - machine = "i386" - elif machine == "ppc64" and _is_running_32bit(): - machine = "ppc" - - return 'macosx_{}_{}_{}'.format(split_ver[0], split_ver[1], machine) - - # XXX remove distutils dependency - result = distutils.util.get_platform().replace('.', '_').replace('-', '_') - if result == "linux_x86_64" and _is_running_32bit(): - # 32 bit Python program (running on a 64 bit Linux): pip should only - # install and run 32 bit compiled extensions in that case. - result = "linux_i686" - - return result - - -def is_manylinux1_compatible(): - # Only Linux, and only x86-64 / i686 - if get_platform() not in {"linux_x86_64", "linux_i686"}: - return False - - # Check for presence of _manylinux module - try: - import _manylinux - return bool(_manylinux.manylinux1_compatible) - except (ImportError, AttributeError): - # Fall through to heuristic check below - pass - - # Check glibc version. CentOS 5 uses glibc 2.5. - return glibc.have_compatible_glibc(2, 5) - - -def get_darwin_arches(major, minor, machine): - """Return a list of supported arches (including group arches) for - the given major, minor and machine architecture of a macOS machine. - """ - arches = [] - - def _supports_arch(major, minor, arch): - # Looking at the application support for macOS versions in the chart - # provided by https://en.wikipedia.org/wiki/OS_X#Versions it appears - # our timeline looks roughly like: - # - # 10.0 - Introduces ppc support. - # 10.4 - Introduces ppc64, i386, and x86_64 support, however the ppc64 - # and x86_64 support is CLI only, and cannot be used for GUI - # applications. - # 10.5 - Extends ppc64 and x86_64 support to cover GUI applications. - # 10.6 - Drops support for ppc64 - # 10.7 - Drops support for ppc - # - # Given that we do not know if we're installing a CLI or a GUI - # application, we must be conservative and assume it might be a GUI - # application and behave as if ppc64 and x86_64 support did not occur - # until 10.5. - # - # Note: The above information is taken from the "Application support" - # column in the chart not the "Processor support" since I believe - # that we care about what instruction sets an application can use - # not which processors the OS supports. - if arch == 'ppc': - return (major, minor) <= (10, 5) - if arch == 'ppc64': - return (major, minor) == (10, 5) - if arch == 'i386': - return (major, minor) >= (10, 4) - if arch == 'x86_64': - return (major, minor) >= (10, 5) - if arch in groups: - for garch in groups[arch]: - if _supports_arch(major, minor, garch): - return True - return False - - groups = OrderedDict([ - ("fat", ("i386", "ppc")), - ("intel", ("x86_64", "i386")), - ("fat64", ("x86_64", "ppc64")), - ("fat32", ("x86_64", "i386", "ppc")), - ]) - - if _supports_arch(major, minor, machine): - arches.append(machine) - - for garch in groups: - if machine in groups[garch] and _supports_arch(major, minor, garch): - arches.append(garch) - - arches.append('universal') - - return arches - - -def get_supported(versions=None, noarch=False, platform=None, - impl=None, abi=None): - """Return a list of supported tags for each version specified in - `versions`. - - :param versions: a list of string versions, of the form ["33", "32"], - or None. The first version will be assumed to support our ABI. - :param platform: specify the exact platform you want valid - tags for, or None. If None, use the local system platform. - :param impl: specify the exact implementation you want valid - tags for, or None. If None, use the local interpreter impl. - :param abi: specify the exact abi you want valid - tags for, or None. If None, use the local interpreter abi. - """ - supported = [] - - # Versions must be given with respect to the preference - if versions is None: - versions = [] - version_info = get_impl_version_info() - major = version_info[:-1] - # Support all previous minor Python versions. - for minor in range(version_info[-1], -1, -1): - versions.append(''.join(map(str, major + (minor,)))) - - impl = impl or get_abbr_impl() - - abis = [] - - abi = abi or get_abi_tag() - if abi: - abis[0:0] = [abi] - - abi3s = set() - import imp - for suffix in imp.get_suffixes(): - if suffix[0].startswith('.abi'): - abi3s.add(suffix[0].split('.', 2)[1]) - - abis.extend(sorted(list(abi3s))) - - abis.append('none') - - if not noarch: - arch = platform or get_platform() - if arch.startswith('macosx'): - # support macosx-10.6-intel on macosx-10.9-x86_64 - match = _osx_arch_pat.match(arch) - if match: - name, major, minor, actual_arch = match.groups() - tpl = '{}_{}_%i_%s'.format(name, major) - arches = [] - for m in reversed(range(int(minor) + 1)): - for a in get_darwin_arches(int(major), m, actual_arch): - arches.append(tpl % (m, a)) - else: - # arch pattern didn't match (?!) - arches = [arch] - elif platform is None and is_manylinux1_compatible(): - arches = [arch.replace('linux', 'manylinux1'), arch] - else: - arches = [arch] - - # Current version, current API (built specifically for our Python): - for abi in abis: - for arch in arches: - supported.append(('%s%s' % (impl, versions[0]), abi, arch)) - - # abi3 modules compatible with older version of Python - for version in versions[1:]: - # abi3 was introduced in Python 3.2 - if version in {'31', '30'}: - break - for abi in abi3s: # empty set if not Python 3 - for arch in arches: - supported.append(("%s%s" % (impl, version), abi, arch)) - - # Has binaries, does not use the Python API: - for arch in arches: - supported.append(('py%s' % (versions[0][0]), 'none', arch)) - - # No abi / arch, but requires our implementation: - supported.append(('%s%s' % (impl, versions[0]), 'none', 'any')) - # Tagged specifically as being cross-version compatible - # (with just the major version specified) - supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any')) - - # No abi / arch, generic Python - for i, version in enumerate(versions): - supported.append(('py%s' % (version,), 'none', 'any')) - if i == 0: - supported.append(('py%s' % (version[0]), 'none', 'any')) - - return supported - - -implementation_tag = get_impl_tag() diff --git a/env/lib/python2.7/site-packages/setuptools/pep425tags.pyc b/env/lib/python2.7/site-packages/setuptools/pep425tags.pyc deleted file mode 100644 index 6431ceb0..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/pep425tags.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/py27compat.py b/env/lib/python2.7/site-packages/setuptools/py27compat.py deleted file mode 100644 index 2985011b..00000000 --- a/env/lib/python2.7/site-packages/setuptools/py27compat.py +++ /dev/null @@ -1,28 +0,0 @@ -""" -Compatibility Support for Python 2.7 and earlier -""" - -import platform - -from setuptools.extern import six - - -def get_all_headers(message, key): - """ - Given an HTTPMessage, return all headers matching a given key. - """ - return message.get_all(key) - - -if six.PY2: - def get_all_headers(message, key): - return message.getheaders(key) - - -linux_py2_ascii = ( - platform.system() == 'Linux' and - six.PY2 -) - -rmtree_safe = str if linux_py2_ascii else lambda x: x -"""Workaround for http://bugs.python.org/issue24672""" diff --git a/env/lib/python2.7/site-packages/setuptools/py27compat.pyc b/env/lib/python2.7/site-packages/setuptools/py27compat.pyc deleted file mode 100644 index a547c25a..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/py27compat.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/py31compat.py b/env/lib/python2.7/site-packages/setuptools/py31compat.py deleted file mode 100644 index e1da7ee2..00000000 --- a/env/lib/python2.7/site-packages/setuptools/py31compat.py +++ /dev/null @@ -1,32 +0,0 @@ -__all__ = [] - -__metaclass__ = type - - -try: - # Python >=3.2 - from tempfile import TemporaryDirectory -except ImportError: - import shutil - import tempfile - - class TemporaryDirectory: - """ - Very simple temporary directory context manager. - Will try to delete afterward, but will also ignore OS and similar - errors on deletion. - """ - - def __init__(self, **kwargs): - self.name = None # Handle mkdtemp raising an exception - self.name = tempfile.mkdtemp(**kwargs) - - def __enter__(self): - return self.name - - def __exit__(self, exctype, excvalue, exctrace): - try: - shutil.rmtree(self.name, True) - except OSError: # removal errors are not the only possible - pass - self.name = None diff --git a/env/lib/python2.7/site-packages/setuptools/py31compat.pyc b/env/lib/python2.7/site-packages/setuptools/py31compat.pyc deleted file mode 100644 index a8830379..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/py31compat.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/py33compat.py b/env/lib/python2.7/site-packages/setuptools/py33compat.py deleted file mode 100644 index cb694436..00000000 --- a/env/lib/python2.7/site-packages/setuptools/py33compat.py +++ /dev/null @@ -1,59 +0,0 @@ -import dis -import array -import collections - -try: - import html -except ImportError: - html = None - -from setuptools.extern import six -from setuptools.extern.six.moves import html_parser - -__metaclass__ = type - -OpArg = collections.namedtuple('OpArg', 'opcode arg') - - -class Bytecode_compat: - def __init__(self, code): - self.code = code - - def __iter__(self): - """Yield '(op,arg)' pair for each operation in code object 'code'""" - - bytes = array.array('b', self.code.co_code) - eof = len(self.code.co_code) - - ptr = 0 - extended_arg = 0 - - while ptr < eof: - - op = bytes[ptr] - - if op >= dis.HAVE_ARGUMENT: - - arg = bytes[ptr + 1] + bytes[ptr + 2] * 256 + extended_arg - ptr += 3 - - if op == dis.EXTENDED_ARG: - long_type = six.integer_types[-1] - extended_arg = arg * long_type(65536) - continue - - else: - arg = None - ptr += 1 - - yield OpArg(op, arg) - - -Bytecode = getattr(dis, 'Bytecode', Bytecode_compat) - - -unescape = getattr(html, 'unescape', None) -if unescape is None: - # HTMLParser.unescape is deprecated since Python 3.4, and will be removed - # from 3.9. - unescape = html_parser.HTMLParser().unescape diff --git a/env/lib/python2.7/site-packages/setuptools/py33compat.pyc b/env/lib/python2.7/site-packages/setuptools/py33compat.pyc deleted file mode 100644 index e1df5627..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/py33compat.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/sandbox.pyc b/env/lib/python2.7/site-packages/setuptools/sandbox.pyc deleted file mode 100644 index a4b99696..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/sandbox.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/site-patch.py b/env/lib/python2.7/site-packages/setuptools/site-patch.py deleted file mode 100644 index 40b00de0..00000000 --- a/env/lib/python2.7/site-packages/setuptools/site-patch.py +++ /dev/null @@ -1,74 +0,0 @@ -def __boot(): - import sys - import os - PYTHONPATH = os.environ.get('PYTHONPATH') - if PYTHONPATH is None or (sys.platform == 'win32' and not PYTHONPATH): - PYTHONPATH = [] - else: - PYTHONPATH = PYTHONPATH.split(os.pathsep) - - pic = getattr(sys, 'path_importer_cache', {}) - stdpath = sys.path[len(PYTHONPATH):] - mydir = os.path.dirname(__file__) - - for item in stdpath: - if item == mydir or not item: - continue # skip if current dir. on Windows, or my own directory - importer = pic.get(item) - if importer is not None: - loader = importer.find_module('site') - if loader is not None: - # This should actually reload the current module - loader.load_module('site') - break - else: - try: - import imp # Avoid import loop in Python 3 - stream, path, descr = imp.find_module('site', [item]) - except ImportError: - continue - if stream is None: - continue - try: - # This should actually reload the current module - imp.load_module('site', stream, path, descr) - finally: - stream.close() - break - else: - raise ImportError("Couldn't find the real 'site' module") - - known_paths = dict([(makepath(item)[1], 1) for item in sys.path]) # 2.2 comp - - oldpos = getattr(sys, '__egginsert', 0) # save old insertion position - sys.__egginsert = 0 # and reset the current one - - for item in PYTHONPATH: - addsitedir(item) - - sys.__egginsert += oldpos # restore effective old position - - d, nd = makepath(stdpath[0]) - insert_at = None - new_path = [] - - for item in sys.path: - p, np = makepath(item) - - if np == nd and insert_at is None: - # We've hit the first 'system' path entry, so added entries go here - insert_at = len(new_path) - - if np in known_paths or insert_at is None: - new_path.append(item) - else: - # new path after the insert point, back-insert it - new_path.insert(insert_at, item) - insert_at += 1 - - sys.path[:] = new_path - - -if __name__ == 'site': - __boot() - del __boot diff --git a/env/lib/python2.7/site-packages/setuptools/site-patch.pyc b/env/lib/python2.7/site-packages/setuptools/site-patch.pyc deleted file mode 100644 index ab6e55d6..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/site-patch.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/ssl_support.pyc b/env/lib/python2.7/site-packages/setuptools/ssl_support.pyc deleted file mode 100644 index 347aafe2..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/ssl_support.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/unicode_utils.pyc b/env/lib/python2.7/site-packages/setuptools/unicode_utils.pyc deleted file mode 100644 index bf54d718..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/unicode_utils.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/version.pyc b/env/lib/python2.7/site-packages/setuptools/version.pyc deleted file mode 100644 index a094b932..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/version.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/wheel.py b/env/lib/python2.7/site-packages/setuptools/wheel.py deleted file mode 100644 index e11f0a1d..00000000 --- a/env/lib/python2.7/site-packages/setuptools/wheel.py +++ /dev/null @@ -1,211 +0,0 @@ -"""Wheels support.""" - -from distutils.util import get_platform -import email -import itertools -import os -import posixpath -import re -import zipfile - -import pkg_resources -import setuptools -from pkg_resources import parse_version -from setuptools.extern.packaging.utils import canonicalize_name -from setuptools.extern.six import PY3 -from setuptools import pep425tags -from setuptools.command.egg_info import write_requirements - - -__metaclass__ = type - - -WHEEL_NAME = re.compile( - r"""^(?P<project_name>.+?)-(?P<version>\d.*?) - ((-(?P<build>\d.*?))?-(?P<py_version>.+?)-(?P<abi>.+?)-(?P<platform>.+?) - )\.whl$""", - re.VERBOSE).match - -NAMESPACE_PACKAGE_INIT = '''\ -try: - __import__('pkg_resources').declare_namespace(__name__) -except ImportError: - __path__ = __import__('pkgutil').extend_path(__path__, __name__) -''' - - -def unpack(src_dir, dst_dir): - '''Move everything under `src_dir` to `dst_dir`, and delete the former.''' - for dirpath, dirnames, filenames in os.walk(src_dir): - subdir = os.path.relpath(dirpath, src_dir) - for f in filenames: - src = os.path.join(dirpath, f) - dst = os.path.join(dst_dir, subdir, f) - os.renames(src, dst) - for n, d in reversed(list(enumerate(dirnames))): - src = os.path.join(dirpath, d) - dst = os.path.join(dst_dir, subdir, d) - if not os.path.exists(dst): - # Directory does not exist in destination, - # rename it and prune it from os.walk list. - os.renames(src, dst) - del dirnames[n] - # Cleanup. - for dirpath, dirnames, filenames in os.walk(src_dir, topdown=True): - assert not filenames - os.rmdir(dirpath) - - -class Wheel: - - def __init__(self, filename): - match = WHEEL_NAME(os.path.basename(filename)) - if match is None: - raise ValueError('invalid wheel name: %r' % filename) - self.filename = filename - for k, v in match.groupdict().items(): - setattr(self, k, v) - - def tags(self): - '''List tags (py_version, abi, platform) supported by this wheel.''' - return itertools.product( - self.py_version.split('.'), - self.abi.split('.'), - self.platform.split('.'), - ) - - def is_compatible(self): - '''Is the wheel is compatible with the current platform?''' - supported_tags = pep425tags.get_supported() - return next((True for t in self.tags() if t in supported_tags), False) - - def egg_name(self): - return pkg_resources.Distribution( - project_name=self.project_name, version=self.version, - platform=(None if self.platform == 'any' else get_platform()), - ).egg_name() + '.egg' - - def get_dist_info(self, zf): - # find the correct name of the .dist-info dir in the wheel file - for member in zf.namelist(): - dirname = posixpath.dirname(member) - if (dirname.endswith('.dist-info') and - canonicalize_name(dirname).startswith( - canonicalize_name(self.project_name))): - return dirname - raise ValueError("unsupported wheel format. .dist-info not found") - - def install_as_egg(self, destination_eggdir): - '''Install wheel as an egg directory.''' - with zipfile.ZipFile(self.filename) as zf: - self._install_as_egg(destination_eggdir, zf) - - def _install_as_egg(self, destination_eggdir, zf): - dist_basename = '%s-%s' % (self.project_name, self.version) - dist_info = self.get_dist_info(zf) - dist_data = '%s.data' % dist_basename - egg_info = os.path.join(destination_eggdir, 'EGG-INFO') - - self._convert_metadata(zf, destination_eggdir, dist_info, egg_info) - self._move_data_entries(destination_eggdir, dist_data) - self._fix_namespace_packages(egg_info, destination_eggdir) - - @staticmethod - def _convert_metadata(zf, destination_eggdir, dist_info, egg_info): - def get_metadata(name): - with zf.open(posixpath.join(dist_info, name)) as fp: - value = fp.read().decode('utf-8') if PY3 else fp.read() - return email.parser.Parser().parsestr(value) - - wheel_metadata = get_metadata('WHEEL') - # Check wheel format version is supported. - wheel_version = parse_version(wheel_metadata.get('Wheel-Version')) - wheel_v1 = ( - parse_version('1.0') <= wheel_version < parse_version('2.0dev0') - ) - if not wheel_v1: - raise ValueError( - 'unsupported wheel format version: %s' % wheel_version) - # Extract to target directory. - os.mkdir(destination_eggdir) - zf.extractall(destination_eggdir) - # Convert metadata. - dist_info = os.path.join(destination_eggdir, dist_info) - dist = pkg_resources.Distribution.from_location( - destination_eggdir, dist_info, - metadata=pkg_resources.PathMetadata(destination_eggdir, dist_info), - ) - - # Note: Evaluate and strip markers now, - # as it's difficult to convert back from the syntax: - # foobar; "linux" in sys_platform and extra == 'test' - def raw_req(req): - req.marker = None - return str(req) - install_requires = list(sorted(map(raw_req, dist.requires()))) - extras_require = { - extra: sorted( - req - for req in map(raw_req, dist.requires((extra,))) - if req not in install_requires - ) - for extra in dist.extras - } - os.rename(dist_info, egg_info) - os.rename( - os.path.join(egg_info, 'METADATA'), - os.path.join(egg_info, 'PKG-INFO'), - ) - setup_dist = setuptools.Distribution( - attrs=dict( - install_requires=install_requires, - extras_require=extras_require, - ), - ) - write_requirements( - setup_dist.get_command_obj('egg_info'), - None, - os.path.join(egg_info, 'requires.txt'), - ) - - @staticmethod - def _move_data_entries(destination_eggdir, dist_data): - """Move data entries to their correct location.""" - dist_data = os.path.join(destination_eggdir, dist_data) - dist_data_scripts = os.path.join(dist_data, 'scripts') - if os.path.exists(dist_data_scripts): - egg_info_scripts = os.path.join( - destination_eggdir, 'EGG-INFO', 'scripts') - os.mkdir(egg_info_scripts) - for entry in os.listdir(dist_data_scripts): - # Remove bytecode, as it's not properly handled - # during easy_install scripts install phase. - if entry.endswith('.pyc'): - os.unlink(os.path.join(dist_data_scripts, entry)) - else: - os.rename( - os.path.join(dist_data_scripts, entry), - os.path.join(egg_info_scripts, entry), - ) - os.rmdir(dist_data_scripts) - for subdir in filter(os.path.exists, ( - os.path.join(dist_data, d) - for d in ('data', 'headers', 'purelib', 'platlib') - )): - unpack(subdir, destination_eggdir) - if os.path.exists(dist_data): - os.rmdir(dist_data) - - @staticmethod - def _fix_namespace_packages(egg_info, destination_eggdir): - namespace_packages = os.path.join( - egg_info, 'namespace_packages.txt') - if os.path.exists(namespace_packages): - with open(namespace_packages) as fp: - namespace_packages = fp.read().split() - for mod in namespace_packages: - mod_dir = os.path.join(destination_eggdir, *mod.split('.')) - mod_init = os.path.join(mod_dir, '__init__.py') - if os.path.exists(mod_dir) and not os.path.exists(mod_init): - with open(mod_init, 'w') as fp: - fp.write(NAMESPACE_PACKAGE_INIT) diff --git a/env/lib/python2.7/site-packages/setuptools/wheel.pyc b/env/lib/python2.7/site-packages/setuptools/wheel.pyc deleted file mode 100644 index 41c218ff..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/wheel.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/setuptools/windows_support.pyc b/env/lib/python2.7/site-packages/setuptools/windows_support.pyc deleted file mode 100644 index 4547dea5..00000000 Binary files a/env/lib/python2.7/site-packages/setuptools/windows_support.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/wheel-0.33.6.dist-info/INSTALLER b/env/lib/python2.7/site-packages/wheel-0.33.6.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/env/lib/python2.7/site-packages/wheel-0.33.6.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python2.7/site-packages/wheel-0.33.6.dist-info/LICENSE.txt b/env/lib/python2.7/site-packages/wheel-0.33.6.dist-info/LICENSE.txt deleted file mode 100644 index c3441e6c..00000000 --- a/env/lib/python2.7/site-packages/wheel-0.33.6.dist-info/LICENSE.txt +++ /dev/null @@ -1,22 +0,0 @@ -"wheel" copyright (c) 2012-2014 Daniel Holth <dholth@fastmail.fm> and -contributors. - -The MIT License - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included -in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL -THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR -OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, -ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python2.7/site-packages/wheel-0.33.6.dist-info/METADATA b/env/lib/python2.7/site-packages/wheel-0.33.6.dist-info/METADATA deleted file mode 100644 index a178cf92..00000000 --- a/env/lib/python2.7/site-packages/wheel-0.33.6.dist-info/METADATA +++ /dev/null @@ -1,61 +0,0 @@ -Metadata-Version: 2.1 -Name: wheel -Version: 0.33.6 -Summary: A built-package format for Python. -Home-page: https://github.com/pypa/wheel -Author: Daniel Holth -Author-email: dholth@fastmail.fm -Maintainer: Alex Grönholm -Maintainer-email: alex.gronholm@nextday.fi -License: MIT -Project-URL: Documentation, https://wheel.readthedocs.io/ -Project-URL: Changelog, https://wheel.readthedocs.io/en/stable/news.html -Project-URL: Issue Tracker, https://github.com/pypa/wheel/issues -Keywords: wheel,packaging -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: Topic :: System :: Archiving :: Packaging -Classifier: License :: OSI Approved :: MIT License -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* -Provides-Extra: test -Requires-Dist: pytest (>=3.0.0) ; extra == 'test' -Requires-Dist: pytest-cov ; extra == 'test' - -wheel -===== - -This library is the reference implementation of the Python wheel packaging -standard, as defined in `PEP 427`_. - -It has two different roles: - -#. A setuptools_ extension for building wheels that provides the - ``bdist_wheel`` setuptools command -#. A command line tool for working with wheel files - -It should be noted that wheel is **not** intended to be used as a library, and -as such there is no stable, public API. - -.. _PEP 427: https://www.python.org/dev/peps/pep-0427/ -.. _setuptools: https://pypi.org/project/setuptools/ - - -Code of Conduct ---------------- - -Everyone interacting in the wheel project's codebases, issue trackers, chat -rooms, and mailing lists is expected to follow the `PyPA Code of Conduct`_. - -.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/ - - diff --git a/env/lib/python2.7/site-packages/wheel-0.33.6.dist-info/RECORD b/env/lib/python2.7/site-packages/wheel-0.33.6.dist-info/RECORD deleted file mode 100644 index 9c405f37..00000000 --- a/env/lib/python2.7/site-packages/wheel-0.33.6.dist-info/RECORD +++ /dev/null @@ -1,32 +0,0 @@ -../../../bin/wheel,sha256=Gjj2bI6h9Vvg0JdZZGrLpJUXwBfWKaI7Fa6iGYyk0Cc,265 -wheel-0.33.6.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -wheel-0.33.6.dist-info/LICENSE.txt,sha256=zKniDGrx_Pv2lAjzd3aShsvuvN7TNhAMm0o_NfvmNeQ,1125 -wheel-0.33.6.dist-info/METADATA,sha256=KO7TWvGpQ5MYnuiYromCLu30km3ky2B5cd9FSJrcSnM,2132 -wheel-0.33.6.dist-info/RECORD,, -wheel-0.33.6.dist-info/WHEEL,sha256=8zNYZbwQSXoB9IfXOjPfeNwvAsALAjffgk27FqvCWbo,110 -wheel-0.33.6.dist-info/entry_points.txt,sha256=N8HbYFST3yrNQYeB2wXWBEPUhFsEtKNRPaCFGJPyqyc,108 -wheel-0.33.6.dist-info/top_level.txt,sha256=HxSBIbgEstMPe4eFawhA66Mq-QYHMopXVoAncfjb_1c,6 -wheel/__init__.py,sha256=RWEKk9qMOFpl4Allx7f4DFZuTFptgoqdhMpTvtpOgVI,96 -wheel/__init__.pyc,, -wheel/__main__.py,sha256=lF-YLO4hdQmoWuh4eWZd8YL1U95RSdm76sNLBXa0vjE,417 -wheel/__main__.pyc,, -wheel/bdist_wheel.py,sha256=U3ljAOGgc8UGMKqxOf5YEKE1W-nt99X6RemMi-poBB8,14757 -wheel/bdist_wheel.pyc,, -wheel/cli/__init__.py,sha256=GWSoGUpRabTf8bk3FsNTPrc5Fsr8YOv2dX55iY2W7eY,2572 -wheel/cli/__init__.pyc,, -wheel/cli/convert.py,sha256=7F4vj23A2OghDDWn9gX2V-_TeXMza1a5nIejmFGEUJM,9498 -wheel/cli/convert.pyc,, -wheel/cli/pack.py,sha256=vkvZc4-rRZyWiwc6sHjpqIjzwDRMEF5u3JUNU9NY_jA,2263 -wheel/cli/pack.pyc,, -wheel/cli/unpack.py,sha256=0VWzT7U_xyenTPwEVavxqvdee93GPvAFHnR3Uu91aRc,673 -wheel/cli/unpack.pyc,, -wheel/metadata.py,sha256=uBv2aOz4U2sERF834C8DeNo235drcsp3ypTzT7MTWEA,4699 -wheel/metadata.pyc,, -wheel/pep425tags.py,sha256=AJ2m8gzMl4DzShFjd2TrHZU_DqxndgSj8U3fzskq720,6087 -wheel/pep425tags.pyc,, -wheel/pkginfo.py,sha256=GR76kupQzn1x9sKDaXuE6B6FsZ4OkfRtG7pndlXPvQ4,1257 -wheel/pkginfo.pyc,, -wheel/util.py,sha256=zwVIk-9qWVQLRMgkgQTMp4TRE4HY03-tCUxLrtCpsfU,924 -wheel/util.pyc,, -wheel/wheelfile.py,sha256=Mtt6mUaea-wABqtN5SW3Rn6bVvw-bdcSKaR2SjWZLG4,7222 -wheel/wheelfile.pyc,, diff --git a/env/lib/python2.7/site-packages/wheel-0.33.6.dist-info/entry_points.txt b/env/lib/python2.7/site-packages/wheel-0.33.6.dist-info/entry_points.txt deleted file mode 100644 index b27acadd..00000000 --- a/env/lib/python2.7/site-packages/wheel-0.33.6.dist-info/entry_points.txt +++ /dev/null @@ -1,6 +0,0 @@ -[console_scripts] -wheel = wheel.cli:main - -[distutils.commands] -bdist_wheel = wheel.bdist_wheel:bdist_wheel - diff --git a/env/lib/python2.7/site-packages/wheel-0.33.6.dist-info/top_level.txt b/env/lib/python2.7/site-packages/wheel-0.33.6.dist-info/top_level.txt deleted file mode 100644 index 2309722a..00000000 --- a/env/lib/python2.7/site-packages/wheel-0.33.6.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -wheel diff --git a/env/lib/python2.7/site-packages/wheel/__init__.py b/env/lib/python2.7/site-packages/wheel/__init__.py deleted file mode 100644 index 5df0e1b1..00000000 --- a/env/lib/python2.7/site-packages/wheel/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# __variables__ with double-quoted values will be available in setup.py: -__version__ = "0.33.6" diff --git a/env/lib/python2.7/site-packages/wheel/__init__.pyc b/env/lib/python2.7/site-packages/wheel/__init__.pyc deleted file mode 100644 index 4861b209..00000000 Binary files a/env/lib/python2.7/site-packages/wheel/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/wheel/__main__.py b/env/lib/python2.7/site-packages/wheel/__main__.py deleted file mode 100644 index b3773a20..00000000 --- a/env/lib/python2.7/site-packages/wheel/__main__.py +++ /dev/null @@ -1,19 +0,0 @@ -""" -Wheel command line tool (enable python -m wheel syntax) -""" - -import sys - - -def main(): # needed for console script - if __package__ == '': - # To be able to run 'python wheel-0.9.whl/wheel': - import os.path - path = os.path.dirname(os.path.dirname(__file__)) - sys.path[0:0] = [path] - import wheel.cli - sys.exit(wheel.cli.main()) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/env/lib/python2.7/site-packages/wheel/__main__.pyc b/env/lib/python2.7/site-packages/wheel/__main__.pyc deleted file mode 100644 index aec844c1..00000000 Binary files a/env/lib/python2.7/site-packages/wheel/__main__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/wheel/bdist_wheel.py b/env/lib/python2.7/site-packages/wheel/bdist_wheel.py deleted file mode 100644 index c79307b5..00000000 --- a/env/lib/python2.7/site-packages/wheel/bdist_wheel.py +++ /dev/null @@ -1,372 +0,0 @@ -""" -Create a wheel (.whl) distribution. - -A wheel is a built archive format. -""" - -import os -import shutil -import sys -import re -from email.generator import Generator -from distutils.core import Command -from distutils.sysconfig import get_python_version -from distutils import log as logger -from glob import iglob -from shutil import rmtree -from warnings import warn - -import pkg_resources - -from .pep425tags import get_abbr_impl, get_impl_ver, get_abi_tag, get_platform -from .pkginfo import write_pkg_info -from .metadata import pkginfo_to_metadata -from .wheelfile import WheelFile -from . import pep425tags -from . import __version__ as wheel_version - - -safe_name = pkg_resources.safe_name -safe_version = pkg_resources.safe_version - -PY_LIMITED_API_PATTERN = r'cp3\d' - - -def safer_name(name): - return safe_name(name).replace('-', '_') - - -def safer_version(version): - return safe_version(version).replace('-', '_') - - -class bdist_wheel(Command): - - description = 'create a wheel distribution' - - user_options = [('bdist-dir=', 'b', - "temporary directory for creating the distribution"), - ('plat-name=', 'p', - "platform name to embed in generated filenames " - "(default: %s)" % get_platform()), - ('keep-temp', 'k', - "keep the pseudo-installation tree around after " + - "creating the distribution archive"), - ('dist-dir=', 'd', - "directory to put final built distributions in"), - ('skip-build', None, - "skip rebuilding everything (for testing/debugging)"), - ('relative', None, - "build the archive using relative paths " - "(default: false)"), - ('owner=', 'u', - "Owner name used when creating a tar file" - " [default: current user]"), - ('group=', 'g', - "Group name used when creating a tar file" - " [default: current group]"), - ('universal', None, - "make a universal wheel" - " (default: false)"), - ('python-tag=', None, - "Python implementation compatibility tag" - " (default: py%s)" % get_impl_ver()[0]), - ('build-number=', None, - "Build number for this particular version. " - "As specified in PEP-0427, this must start with a digit. " - "[default: None]"), - ('py-limited-api=', None, - "Python tag (cp32|cp33|cpNN) for abi3 wheel tag" - " (default: false)"), - ] - - boolean_options = ['keep-temp', 'skip-build', 'relative', 'universal'] - - def initialize_options(self): - self.bdist_dir = None - self.data_dir = None - self.plat_name = None - self.plat_tag = None - self.format = 'zip' - self.keep_temp = False - self.dist_dir = None - self.egginfo_dir = None - self.root_is_pure = None - self.skip_build = None - self.relative = False - self.owner = None - self.group = None - self.universal = False - self.python_tag = 'py' + get_impl_ver()[0] - self.build_number = None - self.py_limited_api = False - self.plat_name_supplied = False - - def finalize_options(self): - if self.bdist_dir is None: - bdist_base = self.get_finalized_command('bdist').bdist_base - self.bdist_dir = os.path.join(bdist_base, 'wheel') - - self.data_dir = self.wheel_dist_name + '.data' - self.plat_name_supplied = self.plat_name is not None - - need_options = ('dist_dir', 'plat_name', 'skip_build') - - self.set_undefined_options('bdist', - *zip(need_options, need_options)) - - self.root_is_pure = not (self.distribution.has_ext_modules() - or self.distribution.has_c_libraries()) - - if self.py_limited_api and not re.match(PY_LIMITED_API_PATTERN, self.py_limited_api): - raise ValueError("py-limited-api must match '%s'" % PY_LIMITED_API_PATTERN) - - # Support legacy [wheel] section for setting universal - wheel = self.distribution.get_option_dict('wheel') - if 'universal' in wheel: - # please don't define this in your global configs - logger.warn('The [wheel] section is deprecated. Use [bdist_wheel] instead.') - val = wheel['universal'][1].strip() - if val.lower() in ('1', 'true', 'yes'): - self.universal = True - - if self.build_number is not None and not self.build_number[:1].isdigit(): - raise ValueError("Build tag (build-number) must start with a digit.") - - @property - def wheel_dist_name(self): - """Return distribution full name with - replaced with _""" - components = (safer_name(self.distribution.get_name()), - safer_version(self.distribution.get_version())) - if self.build_number: - components += (self.build_number,) - return '-'.join(components) - - def get_tag(self): - # bdist sets self.plat_name if unset, we should only use it for purepy - # wheels if the user supplied it. - if self.plat_name_supplied: - plat_name = self.plat_name - elif self.root_is_pure: - plat_name = 'any' - else: - plat_name = self.plat_name or get_platform() - if plat_name in ('linux-x86_64', 'linux_x86_64') and sys.maxsize == 2147483647: - plat_name = 'linux_i686' - plat_name = plat_name.replace('-', '_').replace('.', '_') - - if self.root_is_pure: - if self.universal: - impl = 'py2.py3' - else: - impl = self.python_tag - tag = (impl, 'none', plat_name) - else: - impl_name = get_abbr_impl() - impl_ver = get_impl_ver() - impl = impl_name + impl_ver - # We don't work on CPython 3.1, 3.0. - if self.py_limited_api and (impl_name + impl_ver).startswith('cp3'): - impl = self.py_limited_api - abi_tag = 'abi3' - else: - abi_tag = str(get_abi_tag()).lower() - tag = (impl, abi_tag, plat_name) - supported_tags = pep425tags.get_supported( - supplied_platform=plat_name if self.plat_name_supplied else None) - # XXX switch to this alternate implementation for non-pure: - if not self.py_limited_api: - assert tag == supported_tags[0], "%s != %s" % (tag, supported_tags[0]) - assert tag in supported_tags, "would build wheel with unsupported tag {}".format(tag) - return tag - - def run(self): - build_scripts = self.reinitialize_command('build_scripts') - build_scripts.executable = 'python' - build_scripts.force = True - - build_ext = self.reinitialize_command('build_ext') - build_ext.inplace = False - - if not self.skip_build: - self.run_command('build') - - install = self.reinitialize_command('install', - reinit_subcommands=True) - install.root = self.bdist_dir - install.compile = False - install.skip_build = self.skip_build - install.warn_dir = False - - # A wheel without setuptools scripts is more cross-platform. - # Use the (undocumented) `no_ep` option to setuptools' - # install_scripts command to avoid creating entry point scripts. - install_scripts = self.reinitialize_command('install_scripts') - install_scripts.no_ep = True - - # Use a custom scheme for the archive, because we have to decide - # at installation time which scheme to use. - for key in ('headers', 'scripts', 'data', 'purelib', 'platlib'): - setattr(install, - 'install_' + key, - os.path.join(self.data_dir, key)) - - basedir_observed = '' - - if os.name == 'nt': - # win32 barfs if any of these are ''; could be '.'? - # (distutils.command.install:change_roots bug) - basedir_observed = os.path.normpath(os.path.join(self.data_dir, '..')) - self.install_libbase = self.install_lib = basedir_observed - - setattr(install, - 'install_purelib' if self.root_is_pure else 'install_platlib', - basedir_observed) - - logger.info("installing to %s", self.bdist_dir) - - self.run_command('install') - - impl_tag, abi_tag, plat_tag = self.get_tag() - archive_basename = "{}-{}-{}-{}".format(self.wheel_dist_name, impl_tag, abi_tag, plat_tag) - if not self.relative: - archive_root = self.bdist_dir - else: - archive_root = os.path.join( - self.bdist_dir, - self._ensure_relative(install.install_base)) - - self.set_undefined_options('install_egg_info', ('target', 'egginfo_dir')) - distinfo_dirname = '{}-{}.dist-info'.format( - safer_name(self.distribution.get_name()), - safer_version(self.distribution.get_version())) - distinfo_dir = os.path.join(self.bdist_dir, distinfo_dirname) - self.egg2dist(self.egginfo_dir, distinfo_dir) - - self.write_wheelfile(distinfo_dir) - - # Make the archive - if not os.path.exists(self.dist_dir): - os.makedirs(self.dist_dir) - - wheel_path = os.path.join(self.dist_dir, archive_basename + '.whl') - with WheelFile(wheel_path, 'w') as wf: - wf.write_files(archive_root) - - # Add to 'Distribution.dist_files' so that the "upload" command works - getattr(self.distribution, 'dist_files', []).append( - ('bdist_wheel', get_python_version(), wheel_path)) - - if not self.keep_temp: - logger.info('removing %s', self.bdist_dir) - if not self.dry_run: - rmtree(self.bdist_dir) - - def write_wheelfile(self, wheelfile_base, generator='bdist_wheel (' + wheel_version + ')'): - from email.message import Message - msg = Message() - msg['Wheel-Version'] = '1.0' # of the spec - msg['Generator'] = generator - msg['Root-Is-Purelib'] = str(self.root_is_pure).lower() - if self.build_number is not None: - msg['Build'] = self.build_number - - # Doesn't work for bdist_wininst - impl_tag, abi_tag, plat_tag = self.get_tag() - for impl in impl_tag.split('.'): - for abi in abi_tag.split('.'): - for plat in plat_tag.split('.'): - msg['Tag'] = '-'.join((impl, abi, plat)) - - wheelfile_path = os.path.join(wheelfile_base, 'WHEEL') - logger.info('creating %s', wheelfile_path) - with open(wheelfile_path, 'w') as f: - Generator(f, maxheaderlen=0).flatten(msg) - - def _ensure_relative(self, path): - # copied from dir_util, deleted - drive, path = os.path.splitdrive(path) - if path[0:1] == os.sep: - path = drive + path[1:] - return path - - @property - def license_paths(self): - metadata = self.distribution.get_option_dict('metadata') - files = set() - patterns = sorted({ - option for option in metadata.get('license_files', ('', ''))[1].split() - }) - - if 'license_file' in metadata: - warn('The "license_file" option is deprecated. Use "license_files" instead.', - DeprecationWarning) - files.add(metadata['license_file'][1]) - - if 'license_file' not in metadata and 'license_files' not in metadata: - patterns = ('LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*') - - for pattern in patterns: - for path in iglob(pattern): - if path not in files and os.path.isfile(path): - logger.info('adding license file "%s" (matched pattern "%s")', path, pattern) - files.add(path) - - return files - - def egg2dist(self, egginfo_path, distinfo_path): - """Convert an .egg-info directory into a .dist-info directory""" - def adios(p): - """Appropriately delete directory, file or link.""" - if os.path.exists(p) and not os.path.islink(p) and os.path.isdir(p): - shutil.rmtree(p) - elif os.path.exists(p): - os.unlink(p) - - adios(distinfo_path) - - if not os.path.exists(egginfo_path): - # There is no egg-info. This is probably because the egg-info - # file/directory is not named matching the distribution name used - # to name the archive file. Check for this case and report - # accordingly. - import glob - pat = os.path.join(os.path.dirname(egginfo_path), '*.egg-info') - possible = glob.glob(pat) - err = "Egg metadata expected at %s but not found" % (egginfo_path,) - if possible: - alt = os.path.basename(possible[0]) - err += " (%s found - possible misnamed archive file?)" % (alt,) - - raise ValueError(err) - - if os.path.isfile(egginfo_path): - # .egg-info is a single file - pkginfo_path = egginfo_path - pkg_info = pkginfo_to_metadata(egginfo_path, egginfo_path) - os.mkdir(distinfo_path) - else: - # .egg-info is a directory - pkginfo_path = os.path.join(egginfo_path, 'PKG-INFO') - pkg_info = pkginfo_to_metadata(egginfo_path, pkginfo_path) - - # ignore common egg metadata that is useless to wheel - shutil.copytree(egginfo_path, distinfo_path, - ignore=lambda x, y: {'PKG-INFO', 'requires.txt', 'SOURCES.txt', - 'not-zip-safe'} - ) - - # delete dependency_links if it is only whitespace - dependency_links_path = os.path.join(distinfo_path, 'dependency_links.txt') - with open(dependency_links_path, 'r') as dependency_links_file: - dependency_links = dependency_links_file.read().strip() - if not dependency_links: - adios(dependency_links_path) - - write_pkg_info(os.path.join(distinfo_path, 'METADATA'), pkg_info) - - for license_path in self.license_paths: - filename = os.path.basename(license_path) - shutil.copy(license_path, os.path.join(distinfo_path, filename)) - - adios(egginfo_path) diff --git a/env/lib/python2.7/site-packages/wheel/bdist_wheel.pyc b/env/lib/python2.7/site-packages/wheel/bdist_wheel.pyc deleted file mode 100644 index 6e9e0fab..00000000 Binary files a/env/lib/python2.7/site-packages/wheel/bdist_wheel.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/wheel/cli/__init__.py b/env/lib/python2.7/site-packages/wheel/cli/__init__.py deleted file mode 100644 index 95740bfb..00000000 --- a/env/lib/python2.7/site-packages/wheel/cli/__init__.py +++ /dev/null @@ -1,88 +0,0 @@ -""" -Wheel command-line utility. -""" - -from __future__ import print_function - -import argparse -import os -import sys - - -def require_pkgresources(name): - try: - import pkg_resources # noqa: F401 - except ImportError: - raise RuntimeError("'{0}' needs pkg_resources (part of setuptools).".format(name)) - - -class WheelError(Exception): - pass - - -def unpack_f(args): - from .unpack import unpack - unpack(args.wheelfile, args.dest) - - -def pack_f(args): - from .pack import pack - pack(args.directory, args.dest_dir, args.build_number) - - -def convert_f(args): - from .convert import convert - convert(args.files, args.dest_dir, args.verbose) - - -def version_f(args): - from .. import __version__ - print("wheel %s" % __version__) - - -def parser(): - p = argparse.ArgumentParser() - s = p.add_subparsers(help="commands") - - unpack_parser = s.add_parser('unpack', help='Unpack wheel') - unpack_parser.add_argument('--dest', '-d', help='Destination directory', - default='.') - unpack_parser.add_argument('wheelfile', help='Wheel file') - unpack_parser.set_defaults(func=unpack_f) - - repack_parser = s.add_parser('pack', help='Repack wheel') - repack_parser.add_argument('directory', help='Root directory of the unpacked wheel') - repack_parser.add_argument('--dest-dir', '-d', default=os.path.curdir, - help="Directory to store the wheel (default %(default)s)") - repack_parser.add_argument('--build-number', help="Build tag to use in the wheel name") - repack_parser.set_defaults(func=pack_f) - - convert_parser = s.add_parser('convert', help='Convert egg or wininst to wheel') - convert_parser.add_argument('files', nargs='*', help='Files to convert') - convert_parser.add_argument('--dest-dir', '-d', default=os.path.curdir, - help="Directory to store wheels (default %(default)s)") - convert_parser.add_argument('--verbose', '-v', action='store_true') - convert_parser.set_defaults(func=convert_f) - - version_parser = s.add_parser('version', help='Print version and exit') - version_parser.set_defaults(func=version_f) - - help_parser = s.add_parser('help', help='Show this help') - help_parser.set_defaults(func=lambda args: p.print_help()) - - return p - - -def main(): - p = parser() - args = p.parse_args() - if not hasattr(args, 'func'): - p.print_help() - else: - try: - args.func(args) - return 0 - except WheelError as e: - print(e, file=sys.stderr) - - return 1 diff --git a/env/lib/python2.7/site-packages/wheel/cli/__init__.pyc b/env/lib/python2.7/site-packages/wheel/cli/__init__.pyc deleted file mode 100644 index eeacd92b..00000000 Binary files a/env/lib/python2.7/site-packages/wheel/cli/__init__.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/wheel/cli/convert.py b/env/lib/python2.7/site-packages/wheel/cli/convert.py deleted file mode 100644 index 154f1b1e..00000000 --- a/env/lib/python2.7/site-packages/wheel/cli/convert.py +++ /dev/null @@ -1,269 +0,0 @@ -import os.path -import re -import shutil -import sys -import tempfile -import zipfile -from distutils import dist -from glob import iglob - -from ..bdist_wheel import bdist_wheel -from ..wheelfile import WheelFile -from . import WheelError, require_pkgresources - -egg_info_re = re.compile(r''' - (?P<name>.+?)-(?P<ver>.+?) - (-(?P<pyver>py\d\.\d+) - (-(?P<arch>.+?))? - )?.egg$''', re.VERBOSE) - - -class _bdist_wheel_tag(bdist_wheel): - # allow the client to override the default generated wheel tag - # The default bdist_wheel implementation uses python and abi tags - # of the running python process. This is not suitable for - # generating/repackaging prebuild binaries. - - full_tag_supplied = False - full_tag = None # None or a (pytag, soabitag, plattag) triple - - def get_tag(self): - if self.full_tag_supplied and self.full_tag is not None: - return self.full_tag - else: - return bdist_wheel.get_tag(self) - - -def egg2wheel(egg_path, dest_dir): - filename = os.path.basename(egg_path) - match = egg_info_re.match(filename) - if not match: - raise WheelError('Invalid egg file name: {}'.format(filename)) - - egg_info = match.groupdict() - dir = tempfile.mkdtemp(suffix="_e2w") - if os.path.isfile(egg_path): - # assume we have a bdist_egg otherwise - with zipfile.ZipFile(egg_path) as egg: - egg.extractall(dir) - else: - # support buildout-style installed eggs directories - for pth in os.listdir(egg_path): - src = os.path.join(egg_path, pth) - if os.path.isfile(src): - shutil.copy2(src, dir) - else: - shutil.copytree(src, os.path.join(dir, pth)) - - pyver = egg_info['pyver'] - if pyver: - pyver = egg_info['pyver'] = pyver.replace('.', '') - - arch = (egg_info['arch'] or 'any').replace('.', '_').replace('-', '_') - - # assume all binary eggs are for CPython - abi = 'cp' + pyver[2:] if arch != 'any' else 'none' - - root_is_purelib = egg_info['arch'] is None - if root_is_purelib: - bw = bdist_wheel(dist.Distribution()) - else: - bw = _bdist_wheel_tag(dist.Distribution()) - - bw.root_is_pure = root_is_purelib - bw.python_tag = pyver - bw.plat_name_supplied = True - bw.plat_name = egg_info['arch'] or 'any' - if not root_is_purelib: - bw.full_tag_supplied = True - bw.full_tag = (pyver, abi, arch) - - dist_info_dir = os.path.join(dir, '{name}-{ver}.dist-info'.format(**egg_info)) - bw.egg2dist(os.path.join(dir, 'EGG-INFO'), dist_info_dir) - bw.write_wheelfile(dist_info_dir, generator='egg2wheel') - wheel_name = '{name}-{ver}-{pyver}-{}-{}.whl'.format(abi, arch, **egg_info) - with WheelFile(os.path.join(dest_dir, wheel_name), 'w') as wf: - wf.write_files(dir) - - shutil.rmtree(dir) - - -def parse_wininst_info(wininfo_name, egginfo_name): - """Extract metadata from filenames. - - Extracts the 4 metadataitems needed (name, version, pyversion, arch) from - the installer filename and the name of the egg-info directory embedded in - the zipfile (if any). - - The egginfo filename has the format:: - - name-ver(-pyver)(-arch).egg-info - - The installer filename has the format:: - - name-ver.arch(-pyver).exe - - Some things to note: - - 1. The installer filename is not definitive. An installer can be renamed - and work perfectly well as an installer. So more reliable data should - be used whenever possible. - 2. The egg-info data should be preferred for the name and version, because - these come straight from the distutils metadata, and are mandatory. - 3. The pyver from the egg-info data should be ignored, as it is - constructed from the version of Python used to build the installer, - which is irrelevant - the installer filename is correct here (even to - the point that when it's not there, any version is implied). - 4. The architecture must be taken from the installer filename, as it is - not included in the egg-info data. - 5. Architecture-neutral installers still have an architecture because the - installer format itself (being executable) is architecture-specific. We - should therefore ignore the architecture if the content is pure-python. - """ - - egginfo = None - if egginfo_name: - egginfo = egg_info_re.search(egginfo_name) - if not egginfo: - raise ValueError("Egg info filename %s is not valid" % (egginfo_name,)) - - # Parse the wininst filename - # 1. Distribution name (up to the first '-') - w_name, sep, rest = wininfo_name.partition('-') - if not sep: - raise ValueError("Installer filename %s is not valid" % (wininfo_name,)) - - # Strip '.exe' - rest = rest[:-4] - # 2. Python version (from the last '-', must start with 'py') - rest2, sep, w_pyver = rest.rpartition('-') - if sep and w_pyver.startswith('py'): - rest = rest2 - w_pyver = w_pyver.replace('.', '') - else: - # Not version specific - use py2.py3. While it is possible that - # pure-Python code is not compatible with both Python 2 and 3, there - # is no way of knowing from the wininst format, so we assume the best - # here (the user can always manually rename the wheel to be more - # restrictive if needed). - w_pyver = 'py2.py3' - # 3. Version and architecture - w_ver, sep, w_arch = rest.rpartition('.') - if not sep: - raise ValueError("Installer filename %s is not valid" % (wininfo_name,)) - - if egginfo: - w_name = egginfo.group('name') - w_ver = egginfo.group('ver') - - return {'name': w_name, 'ver': w_ver, 'arch': w_arch, 'pyver': w_pyver} - - -def wininst2wheel(path, dest_dir): - with zipfile.ZipFile(path) as bdw: - # Search for egg-info in the archive - egginfo_name = None - for filename in bdw.namelist(): - if '.egg-info' in filename: - egginfo_name = filename - break - - info = parse_wininst_info(os.path.basename(path), egginfo_name) - - root_is_purelib = True - for zipinfo in bdw.infolist(): - if zipinfo.filename.startswith('PLATLIB'): - root_is_purelib = False - break - if root_is_purelib: - paths = {'purelib': ''} - else: - paths = {'platlib': ''} - - dist_info = "%(name)s-%(ver)s" % info - datadir = "%s.data/" % dist_info - - # rewrite paths to trick ZipFile into extracting an egg - # XXX grab wininst .ini - between .exe, padding, and first zip file. - members = [] - egginfo_name = '' - for zipinfo in bdw.infolist(): - key, basename = zipinfo.filename.split('/', 1) - key = key.lower() - basepath = paths.get(key, None) - if basepath is None: - basepath = datadir + key.lower() + '/' - oldname = zipinfo.filename - newname = basepath + basename - zipinfo.filename = newname - del bdw.NameToInfo[oldname] - bdw.NameToInfo[newname] = zipinfo - # Collect member names, but omit '' (from an entry like "PLATLIB/" - if newname: - members.append(newname) - # Remember egg-info name for the egg2dist call below - if not egginfo_name: - if newname.endswith('.egg-info'): - egginfo_name = newname - elif '.egg-info/' in newname: - egginfo_name, sep, _ = newname.rpartition('/') - dir = tempfile.mkdtemp(suffix="_b2w") - bdw.extractall(dir, members) - - # egg2wheel - abi = 'none' - pyver = info['pyver'] - arch = (info['arch'] or 'any').replace('.', '_').replace('-', '_') - # Wininst installers always have arch even if they are not - # architecture-specific (because the format itself is). - # So, assume the content is architecture-neutral if root is purelib. - if root_is_purelib: - arch = 'any' - # If the installer is architecture-specific, it's almost certainly also - # CPython-specific. - if arch != 'any': - pyver = pyver.replace('py', 'cp') - wheel_name = '-'.join((dist_info, pyver, abi, arch)) - if root_is_purelib: - bw = bdist_wheel(dist.Distribution()) - else: - bw = _bdist_wheel_tag(dist.Distribution()) - - bw.root_is_pure = root_is_purelib - bw.python_tag = pyver - bw.plat_name_supplied = True - bw.plat_name = info['arch'] or 'any' - - if not root_is_purelib: - bw.full_tag_supplied = True - bw.full_tag = (pyver, abi, arch) - - dist_info_dir = os.path.join(dir, '%s.dist-info' % dist_info) - bw.egg2dist(os.path.join(dir, egginfo_name), dist_info_dir) - bw.write_wheelfile(dist_info_dir, generator='wininst2wheel') - - wheel_path = os.path.join(dest_dir, wheel_name) - with WheelFile(wheel_path, 'w') as wf: - wf.write_files(dir) - - shutil.rmtree(dir) - - -def convert(files, dest_dir, verbose): - # Only support wheel convert if pkg_resources is present - require_pkgresources('wheel convert') - - for pat in files: - for installer in iglob(pat): - if os.path.splitext(installer)[1] == '.egg': - conv = egg2wheel - else: - conv = wininst2wheel - - if verbose: - print("{}... ".format(installer)) - sys.stdout.flush() - - conv(installer, dest_dir) - if verbose: - print("OK") diff --git a/env/lib/python2.7/site-packages/wheel/cli/convert.pyc b/env/lib/python2.7/site-packages/wheel/cli/convert.pyc deleted file mode 100644 index 26d5f2f1..00000000 Binary files a/env/lib/python2.7/site-packages/wheel/cli/convert.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/wheel/cli/pack.py b/env/lib/python2.7/site-packages/wheel/cli/pack.py deleted file mode 100644 index af6e81c4..00000000 --- a/env/lib/python2.7/site-packages/wheel/cli/pack.py +++ /dev/null @@ -1,58 +0,0 @@ -from __future__ import print_function - -import os.path -import re -import sys - -from wheel.cli import WheelError -from wheel.wheelfile import WheelFile - -DIST_INFO_RE = re.compile(r"^(?P<namever>(?P<name>.+?)-(?P<ver>\d.*?))\.dist-info$") - - -def pack(directory, dest_dir, build_number): - """Repack a previously unpacked wheel directory into a new wheel file. - - The .dist-info/WHEEL file must contain one or more tags so that the target - wheel file name can be determined. - - :param directory: The unpacked wheel directory - :param dest_dir: Destination directory (defaults to the current directory) - """ - # Find the .dist-info directory - dist_info_dirs = [fn for fn in os.listdir(directory) - if os.path.isdir(os.path.join(directory, fn)) and DIST_INFO_RE.match(fn)] - if len(dist_info_dirs) > 1: - raise WheelError('Multiple .dist-info directories found in {}'.format(directory)) - elif not dist_info_dirs: - raise WheelError('No .dist-info directories found in {}'.format(directory)) - - # Determine the target wheel filename - dist_info_dir = dist_info_dirs[0] - name_version = DIST_INFO_RE.match(dist_info_dir).group('namever') - - # Add the build number if specific - if build_number: - name_version += '-' + build_number - - # Read the tags from .dist-info/WHEEL - with open(os.path.join(directory, dist_info_dir, 'WHEEL')) as f: - tags = [line.split(' ')[1].rstrip() for line in f if line.startswith('Tag: ')] - if not tags: - raise WheelError('No tags present in {}/WHEEL; cannot determine target wheel filename' - .format(dist_info_dir)) - - # Reassemble the tags for the wheel file - impls = sorted({tag.split('-')[0] for tag in tags}) - abivers = sorted({tag.split('-')[1] for tag in tags}) - platforms = sorted({tag.split('-')[2] for tag in tags}) - tagline = '-'.join(['.'.join(impls), '.'.join(abivers), '.'.join(platforms)]) - - # Repack the wheel - wheel_path = os.path.join(dest_dir, '{}-{}.whl'.format(name_version, tagline)) - with WheelFile(wheel_path, 'w') as wf: - print("Repacking wheel as {}...".format(wheel_path), end='') - sys.stdout.flush() - wf.write_files(directory) - - print('OK') diff --git a/env/lib/python2.7/site-packages/wheel/cli/pack.pyc b/env/lib/python2.7/site-packages/wheel/cli/pack.pyc deleted file mode 100644 index aa9df730..00000000 Binary files a/env/lib/python2.7/site-packages/wheel/cli/pack.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/wheel/cli/unpack.py b/env/lib/python2.7/site-packages/wheel/cli/unpack.py deleted file mode 100644 index 2e9857a3..00000000 --- a/env/lib/python2.7/site-packages/wheel/cli/unpack.py +++ /dev/null @@ -1,25 +0,0 @@ -from __future__ import print_function - -import os.path -import sys - -from ..wheelfile import WheelFile - - -def unpack(path, dest='.'): - """Unpack a wheel. - - Wheel content will be unpacked to {dest}/{name}-{ver}, where {name} - is the package name and {ver} its version. - - :param path: The path to the wheel. - :param dest: Destination directory (default to current directory). - """ - with WheelFile(path) as wf: - namever = wf.parsed_filename.group('namever') - destination = os.path.join(dest, namever) - print("Unpacking to: {}...".format(destination), end='') - sys.stdout.flush() - wf.extractall(destination) - - print('OK') diff --git a/env/lib/python2.7/site-packages/wheel/cli/unpack.pyc b/env/lib/python2.7/site-packages/wheel/cli/unpack.pyc deleted file mode 100644 index 5d64afc2..00000000 Binary files a/env/lib/python2.7/site-packages/wheel/cli/unpack.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/wheel/metadata.py b/env/lib/python2.7/site-packages/wheel/metadata.py deleted file mode 100644 index ab0c07e5..00000000 --- a/env/lib/python2.7/site-packages/wheel/metadata.py +++ /dev/null @@ -1,141 +0,0 @@ -""" -Tools for converting old- to new-style metadata. -""" - -import os.path -import re -import textwrap - -import pkg_resources - -from .pkginfo import read_pkg_info - -# Wheel itself is probably the only program that uses non-extras markers -# in METADATA/PKG-INFO. Support its syntax with the extra at the end only. -EXTRA_RE = re.compile( - r"""^(?P<package>.*?)(;\s*(?P<condition>.*?)(extra == '(?P<extra>.*?)')?)$""") - - -def requires_to_requires_dist(requirement): - """Return the version specifier for a requirement in PEP 345/566 fashion.""" - if getattr(requirement, 'url', None): - return " @ " + requirement.url - - requires_dist = [] - for op, ver in requirement.specs: - requires_dist.append(op + ver) - if not requires_dist: - return '' - return " (%s)" % ','.join(sorted(requires_dist)) - - -def convert_requirements(requirements): - """Yield Requires-Dist: strings for parsed requirements strings.""" - for req in requirements: - parsed_requirement = pkg_resources.Requirement.parse(req) - spec = requires_to_requires_dist(parsed_requirement) - extras = ",".join(sorted(parsed_requirement.extras)) - if extras: - extras = "[%s]" % extras - yield (parsed_requirement.project_name + extras + spec) - - -def generate_requirements(extras_require): - """ - Convert requirements from a setup()-style dictionary to ('Requires-Dist', 'requirement') - and ('Provides-Extra', 'extra') tuples. - - extras_require is a dictionary of {extra: [requirements]} as passed to setup(), - using the empty extra {'': [requirements]} to hold install_requires. - """ - for extra, depends in extras_require.items(): - condition = '' - extra = extra or '' - if ':' in extra: # setuptools extra:condition syntax - extra, condition = extra.split(':', 1) - - extra = pkg_resources.safe_extra(extra) - if extra: - yield 'Provides-Extra', extra - if condition: - condition = "(" + condition + ") and " - condition += "extra == '%s'" % extra - - if condition: - condition = ' ; ' + condition - - for new_req in convert_requirements(depends): - yield 'Requires-Dist', new_req + condition - - -def pkginfo_to_metadata(egg_info_path, pkginfo_path): - """ - Convert .egg-info directory with PKG-INFO to the Metadata 2.1 format - """ - pkg_info = read_pkg_info(pkginfo_path) - pkg_info.replace_header('Metadata-Version', '2.1') - # Those will be regenerated from `requires.txt`. - del pkg_info['Provides-Extra'] - del pkg_info['Requires-Dist'] - requires_path = os.path.join(egg_info_path, 'requires.txt') - if os.path.exists(requires_path): - with open(requires_path) as requires_file: - requires = requires_file.read() - - parsed_requirements = sorted(pkg_resources.split_sections(requires), - key=lambda x: x[0] or '') - for extra, reqs in parsed_requirements: - for key, value in generate_requirements({extra: reqs}): - if (key, value) not in pkg_info.items(): - pkg_info[key] = value - - description = pkg_info['Description'] - if description: - pkg_info.set_payload(dedent_description(pkg_info)) - del pkg_info['Description'] - - return pkg_info - - -def pkginfo_unicode(pkg_info, field): - """Hack to coax Unicode out of an email Message() - Python 3.3+""" - text = pkg_info[field] - field = field.lower() - if not isinstance(text, str): - if not hasattr(pkg_info, 'raw_items'): # Python 3.2 - return str(text) - for item in pkg_info.raw_items(): - if item[0].lower() == field: - text = item[1].encode('ascii', 'surrogateescape') \ - .decode('utf-8') - break - - return text - - -def dedent_description(pkg_info): - """ - Dedent and convert pkg_info['Description'] to Unicode. - """ - description = pkg_info['Description'] - - # Python 3 Unicode handling, sorta. - surrogates = False - if not isinstance(description, str): - surrogates = True - description = pkginfo_unicode(pkg_info, 'Description') - - description_lines = description.splitlines() - description_dedent = '\n'.join( - # if the first line of long_description is blank, - # the first line here will be indented. - (description_lines[0].lstrip(), - textwrap.dedent('\n'.join(description_lines[1:])), - '\n')) - - if surrogates: - description_dedent = description_dedent \ - .encode("utf8") \ - .decode("ascii", "surrogateescape") - - return description_dedent diff --git a/env/lib/python2.7/site-packages/wheel/metadata.pyc b/env/lib/python2.7/site-packages/wheel/metadata.pyc deleted file mode 100644 index bd60aa13..00000000 Binary files a/env/lib/python2.7/site-packages/wheel/metadata.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/wheel/pep425tags.py b/env/lib/python2.7/site-packages/wheel/pep425tags.py deleted file mode 100644 index b9242efa..00000000 --- a/env/lib/python2.7/site-packages/wheel/pep425tags.py +++ /dev/null @@ -1,190 +0,0 @@ -"""Generate and work with PEP 425 Compatibility Tags.""" - -import distutils.util -import platform -import sys -import sysconfig -import warnings - -try: - from importlib.machinery import all_suffixes as get_all_suffixes -except ImportError: - from imp import get_suffixes - - def get_all_suffixes(): - return [suffix[0] for suffix in get_suffixes()] - - -def get_config_var(var): - try: - return sysconfig.get_config_var(var) - except IOError as e: # pip Issue #1074 - warnings.warn("{0}".format(e), RuntimeWarning) - return None - - -def get_abbr_impl(): - """Return abbreviated implementation name.""" - impl = platform.python_implementation() - if impl == 'PyPy': - return 'pp' - elif impl == 'Jython': - return 'jy' - elif impl == 'IronPython': - return 'ip' - elif impl == 'CPython': - return 'cp' - - raise LookupError('Unknown Python implementation: ' + impl) - - -def get_impl_ver(): - """Return implementation version.""" - impl_ver = get_config_var("py_version_nodot") - if not impl_ver or get_abbr_impl() == 'pp': - impl_ver = ''.join(map(str, get_impl_version_info())) - return impl_ver - - -def get_impl_version_info(): - """Return sys.version_info-like tuple for use in decrementing the minor - version.""" - if get_abbr_impl() == 'pp': - # as per https://github.com/pypa/pip/issues/2882 - return (sys.version_info[0], sys.pypy_version_info.major, - sys.pypy_version_info.minor) - else: - return sys.version_info[0], sys.version_info[1] - - -def get_flag(var, fallback, expected=True, warn=True): - """Use a fallback method for determining SOABI flags if the needed config - var is unset or unavailable.""" - val = get_config_var(var) - if val is None: - if warn: - warnings.warn("Config variable '{0}' is unset, Python ABI tag may " - "be incorrect".format(var), RuntimeWarning, 2) - return fallback() - return val == expected - - -def get_abi_tag(): - """Return the ABI tag based on SOABI (if available) or emulate SOABI - (CPython 2, PyPy).""" - soabi = get_config_var('SOABI') - impl = get_abbr_impl() - if not soabi and impl in ('cp', 'pp') and hasattr(sys, 'maxunicode'): - d = '' - m = '' - u = '' - if get_flag('Py_DEBUG', - lambda: hasattr(sys, 'gettotalrefcount'), - warn=(impl == 'cp')): - d = 'd' - if get_flag('WITH_PYMALLOC', - lambda: impl == 'cp', - warn=(impl == 'cp' and - sys.version_info < (3, 8))) \ - and sys.version_info < (3, 8): - m = 'm' - if get_flag('Py_UNICODE_SIZE', - lambda: sys.maxunicode == 0x10ffff, - expected=4, - warn=(impl == 'cp' and - sys.version_info < (3, 3))) \ - and sys.version_info < (3, 3): - u = 'u' - abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u) - elif soabi and soabi.startswith('cpython-'): - abi = 'cp' + soabi.split('-')[1] - elif soabi: - abi = soabi.replace('.', '_').replace('-', '_') - else: - abi = None - return abi - - -def get_platform(): - """Return our platform name 'win32', 'linux_x86_64'""" - # XXX remove distutils dependency - result = distutils.util.get_platform().replace('.', '_').replace('-', '_') - if result == "linux_x86_64" and sys.maxsize == 2147483647: - # pip pull request #3497 - result = "linux_i686" - return result - - -def get_supported(versions=None, supplied_platform=None): - """Return a list of supported tags for each version specified in - `versions`. - - :param versions: a list of string versions, of the form ["33", "32"], - or None. The first version will be assumed to support our ABI. - """ - supported = [] - - # Versions must be given with respect to the preference - if versions is None: - versions = [] - version_info = get_impl_version_info() - major = version_info[:-1] - # Support all previous minor Python versions. - for minor in range(version_info[-1], -1, -1): - versions.append(''.join(map(str, major + (minor,)))) - - impl = get_abbr_impl() - - abis = [] - - abi = get_abi_tag() - if abi: - abis[0:0] = [abi] - - abi3s = set() - for suffix in get_all_suffixes(): - if suffix.startswith('.abi'): - abi3s.add(suffix.split('.', 2)[1]) - - abis.extend(sorted(list(abi3s))) - - abis.append('none') - - platforms = [] - if supplied_platform: - platforms.append(supplied_platform) - platforms.append(get_platform()) - - # Current version, current API (built specifically for our Python): - for abi in abis: - for arch in platforms: - supported.append(('%s%s' % (impl, versions[0]), abi, arch)) - - # abi3 modules compatible with older version of Python - for version in versions[1:]: - # abi3 was introduced in Python 3.2 - if version in ('31', '30'): - break - for abi in abi3s: # empty set if not Python 3 - for arch in platforms: - supported.append(("%s%s" % (impl, version), abi, arch)) - - # No abi / arch, but requires our implementation: - for i, version in enumerate(versions): - supported.append(('%s%s' % (impl, version), 'none', 'any')) - if i == 0: - # Tagged specifically as being cross-version compatible - # (with just the major version specified) - supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any')) - - # Major Python version + platform; e.g. binaries not using the Python API - for arch in platforms: - supported.append(('py%s' % (versions[0][0]), 'none', arch)) - - # No abi / arch, generic Python - for i, version in enumerate(versions): - supported.append(('py%s' % (version,), 'none', 'any')) - if i == 0: - supported.append(('py%s' % (version[0]), 'none', 'any')) - - return supported diff --git a/env/lib/python2.7/site-packages/wheel/pep425tags.pyc b/env/lib/python2.7/site-packages/wheel/pep425tags.pyc deleted file mode 100644 index cdafa960..00000000 Binary files a/env/lib/python2.7/site-packages/wheel/pep425tags.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/wheel/pkginfo.py b/env/lib/python2.7/site-packages/wheel/pkginfo.py deleted file mode 100644 index 115be45b..00000000 --- a/env/lib/python2.7/site-packages/wheel/pkginfo.py +++ /dev/null @@ -1,43 +0,0 @@ -"""Tools for reading and writing PKG-INFO / METADATA without caring -about the encoding.""" - -from email.parser import Parser - -try: - unicode - _PY3 = False -except NameError: - _PY3 = True - -if not _PY3: - from email.generator import Generator - - def read_pkg_info_bytes(bytestr): - return Parser().parsestr(bytestr) - - def read_pkg_info(path): - with open(path, "r") as headers: - message = Parser().parse(headers) - return message - - def write_pkg_info(path, message): - with open(path, 'w') as metadata: - Generator(metadata, mangle_from_=False, maxheaderlen=0).flatten(message) -else: - from email.generator import BytesGenerator - - def read_pkg_info_bytes(bytestr): - headers = bytestr.decode(encoding="ascii", errors="surrogateescape") - message = Parser().parsestr(headers) - return message - - def read_pkg_info(path): - with open(path, "r", - encoding="ascii", - errors="surrogateescape") as headers: - message = Parser().parse(headers) - return message - - def write_pkg_info(path, message): - with open(path, "wb") as out: - BytesGenerator(out, mangle_from_=False, maxheaderlen=0).flatten(message) diff --git a/env/lib/python2.7/site-packages/wheel/pkginfo.pyc b/env/lib/python2.7/site-packages/wheel/pkginfo.pyc deleted file mode 100644 index d32c9c70..00000000 Binary files a/env/lib/python2.7/site-packages/wheel/pkginfo.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/wheel/util.py b/env/lib/python2.7/site-packages/wheel/util.py deleted file mode 100644 index 0afb54a4..00000000 --- a/env/lib/python2.7/site-packages/wheel/util.py +++ /dev/null @@ -1,46 +0,0 @@ -import base64 -import io -import sys - - -if sys.version_info[0] < 3: - text_type = unicode # noqa: F821 - - StringIO = io.BytesIO - - def native(s, encoding='utf-8'): - if isinstance(s, unicode): - return s.encode(encoding) - return s -else: - text_type = str - - StringIO = io.StringIO - - def native(s, encoding='utf-8'): - if isinstance(s, bytes): - return s.decode(encoding) - return s - - -def urlsafe_b64encode(data): - """urlsafe_b64encode without padding""" - return base64.urlsafe_b64encode(data).rstrip(b'=') - - -def urlsafe_b64decode(data): - """urlsafe_b64decode without padding""" - pad = b'=' * (4 - (len(data) & 3)) - return base64.urlsafe_b64decode(data + pad) - - -def as_unicode(s): - if isinstance(s, bytes): - return s.decode('utf-8') - return s - - -def as_bytes(s): - if isinstance(s, text_type): - return s.encode('utf-8') - return s diff --git a/env/lib/python2.7/site-packages/wheel/util.pyc b/env/lib/python2.7/site-packages/wheel/util.pyc deleted file mode 100644 index 9ac58883..00000000 Binary files a/env/lib/python2.7/site-packages/wheel/util.pyc and /dev/null differ diff --git a/env/lib/python2.7/site-packages/wheel/wheelfile.py b/env/lib/python2.7/site-packages/wheel/wheelfile.py deleted file mode 100644 index ddf8509d..00000000 --- a/env/lib/python2.7/site-packages/wheel/wheelfile.py +++ /dev/null @@ -1,169 +0,0 @@ -from __future__ import print_function - -import csv -import hashlib -import os.path -import re -import stat -import time -from collections import OrderedDict -from distutils import log as logger -from zipfile import ZIP_DEFLATED, ZipInfo, ZipFile - -from wheel.cli import WheelError -from wheel.util import urlsafe_b64decode, as_unicode, native, urlsafe_b64encode, as_bytes, StringIO - -# Non-greedy matching of an optional build number may be too clever (more -# invalid wheel filenames will match). Separate regex for .dist-info? -WHEEL_INFO_RE = re.compile( - r"""^(?P<namever>(?P<name>.+?)-(?P<ver>.+?))(-(?P<build>\d[^-]*))? - -(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?)\.whl$""", - re.VERBOSE) - - -def get_zipinfo_datetime(timestamp=None): - # Some applications need reproducible .whl files, but they can't do this without forcing - # the timestamp of the individual ZipInfo objects. See issue #143. - timestamp = int(os.environ.get('SOURCE_DATE_EPOCH', timestamp or time.time())) - return time.gmtime(timestamp)[0:6] - - -class WheelFile(ZipFile): - """A ZipFile derivative class that also reads SHA-256 hashes from - .dist-info/RECORD and checks any read files against those. - """ - - _default_algorithm = hashlib.sha256 - - def __init__(self, file, mode='r'): - basename = os.path.basename(file) - self.parsed_filename = WHEEL_INFO_RE.match(basename) - if not basename.endswith('.whl') or self.parsed_filename is None: - raise WheelError("Bad wheel filename {!r}".format(basename)) - - ZipFile.__init__(self, file, mode, compression=ZIP_DEFLATED, allowZip64=True) - - self.dist_info_path = '{}.dist-info'.format(self.parsed_filename.group('namever')) - self.record_path = self.dist_info_path + '/RECORD' - self._file_hashes = OrderedDict() - self._file_sizes = {} - if mode == 'r': - # Ignore RECORD and any embedded wheel signatures - self._file_hashes[self.record_path] = None, None - self._file_hashes[self.record_path + '.jws'] = None, None - self._file_hashes[self.record_path + '.p7s'] = None, None - - # Fill in the expected hashes by reading them from RECORD - try: - record = self.open(self.record_path) - except KeyError: - raise WheelError('Missing {} file'.format(self.record_path)) - - with record: - for line in record: - line = line.decode('utf-8') - path, hash_sum, size = line.rsplit(u',', 2) - if hash_sum: - algorithm, hash_sum = hash_sum.split(u'=') - try: - hashlib.new(algorithm) - except ValueError: - raise WheelError('Unsupported hash algorithm: {}'.format(algorithm)) - - if algorithm.lower() in {'md5', 'sha1'}: - raise WheelError( - 'Weak hash algorithm ({}) is not permitted by PEP 427' - .format(algorithm)) - - self._file_hashes[path] = ( - algorithm, urlsafe_b64decode(hash_sum.encode('ascii'))) - - def open(self, name_or_info, mode="r", pwd=None): - def _update_crc(newdata, eof=None): - if eof is None: - eof = ef._eof - update_crc_orig(newdata) - else: # Python 2 - update_crc_orig(newdata, eof) - - running_hash.update(newdata) - if eof and running_hash.digest() != expected_hash: - raise WheelError("Hash mismatch for file '{}'".format(native(ef_name))) - - ef = ZipFile.open(self, name_or_info, mode, pwd) - ef_name = as_unicode(name_or_info.filename if isinstance(name_or_info, ZipInfo) - else name_or_info) - if mode == 'r' and not ef_name.endswith('/'): - if ef_name not in self._file_hashes: - raise WheelError("No hash found for file '{}'".format(native(ef_name))) - - algorithm, expected_hash = self._file_hashes[ef_name] - if expected_hash is not None: - # Monkey patch the _update_crc method to also check for the hash from RECORD - running_hash = hashlib.new(algorithm) - update_crc_orig, ef._update_crc = ef._update_crc, _update_crc - - return ef - - def write_files(self, base_dir): - logger.info("creating '%s' and adding '%s' to it", self.filename, base_dir) - deferred = [] - for root, dirnames, filenames in os.walk(base_dir): - # Sort the directory names so that `os.walk` will walk them in a - # defined order on the next iteration. - dirnames.sort() - for name in sorted(filenames): - path = os.path.normpath(os.path.join(root, name)) - if os.path.isfile(path): - arcname = os.path.relpath(path, base_dir) - if arcname == self.record_path: - pass - elif root.endswith('.dist-info'): - deferred.append((path, arcname)) - else: - self.write(path, arcname) - - deferred.sort() - for path, arcname in deferred: - self.write(path, arcname) - - def write(self, filename, arcname=None, compress_type=None): - with open(filename, 'rb') as f: - st = os.fstat(f.fileno()) - data = f.read() - - zinfo = ZipInfo(arcname or filename, date_time=get_zipinfo_datetime(st.st_mtime)) - zinfo.external_attr = (stat.S_IMODE(st.st_mode) | stat.S_IFMT(st.st_mode)) << 16 - zinfo.compress_type = ZIP_DEFLATED - self.writestr(zinfo, data, compress_type) - - def writestr(self, zinfo_or_arcname, bytes, compress_type=None): - ZipFile.writestr(self, zinfo_or_arcname, bytes, compress_type) - fname = (zinfo_or_arcname.filename if isinstance(zinfo_or_arcname, ZipInfo) - else zinfo_or_arcname) - logger.info("adding '%s'", fname) - if fname != self.record_path: - hash_ = self._default_algorithm(bytes) - self._file_hashes[fname] = hash_.name, native(urlsafe_b64encode(hash_.digest())) - self._file_sizes[fname] = len(bytes) - - def close(self): - # Write RECORD - if self.fp is not None and self.mode == 'w' and self._file_hashes: - data = StringIO() - writer = csv.writer(data, delimiter=',', quotechar='"', lineterminator='\n') - writer.writerows(( - ( - fname, - algorithm + "=" + hash_, - self._file_sizes[fname] - ) - for fname, (algorithm, hash_) in self._file_hashes.items() - )) - writer.writerow((format(self.record_path), "", "")) - zinfo = ZipInfo(native(self.record_path), date_time=get_zipinfo_datetime()) - zinfo.compress_type = ZIP_DEFLATED - zinfo.external_attr = 0o664 << 16 - self.writestr(zinfo, as_bytes(data.getvalue())) - - ZipFile.close(self) diff --git a/env/lib/python2.7/site-packages/wheel/wheelfile.pyc b/env/lib/python2.7/site-packages/wheel/wheelfile.pyc deleted file mode 100644 index a8832a29..00000000 Binary files a/env/lib/python2.7/site-packages/wheel/wheelfile.pyc and /dev/null differ diff --git a/env/lib/python2.7/site.py b/env/lib/python2.7/site.py deleted file mode 100644 index ede3ca30..00000000 --- a/env/lib/python2.7/site.py +++ /dev/null @@ -1,851 +0,0 @@ -"""Append module search paths for third-party packages to sys.path. - -**************************************************************** -* This module is automatically imported during initialization. * -**************************************************************** - -In earlier versions of Python (up to 1.5a3), scripts or modules that -needed to use site-specific modules would place ``import site'' -somewhere near the top of their code. Because of the automatic -import, this is no longer necessary (but code that does it still -works). - -This will append site-specific paths to the module search path. On -Unix, it starts with sys.prefix and sys.exec_prefix (if different) and -appends lib/python<version>/site-packages as well as lib/site-python. -It also supports the Debian convention of -lib/python<version>/dist-packages. On other platforms (mainly Mac and -Windows), it uses just sys.prefix (and sys.exec_prefix, if different, -but this is unlikely). The resulting directories, if they exist, are -appended to sys.path, and also inspected for path configuration files. - -FOR DEBIAN, this sys.path is augmented with directories in /usr/local. -Local addons go into /usr/local/lib/python<version>/site-packages -(resp. /usr/local/lib/site-python), Debian addons install into -/usr/{lib,share}/python<version>/dist-packages. - -A path configuration file is a file whose name has the form -<package>.pth; its contents are additional directories (one per line) -to be added to sys.path. Non-existing directories (or -non-directories) are never added to sys.path; no directory is added to -sys.path more than once. Blank lines and lines beginning with -'#' are skipped. Lines starting with 'import' are executed. - -For example, suppose sys.prefix and sys.exec_prefix are set to -/usr/local and there is a directory /usr/local/lib/python2.X/site-packages -with three subdirectories, foo, bar and spam, and two path -configuration files, foo.pth and bar.pth. Assume foo.pth contains the -following: - - # foo package configuration - foo - bar - bletch - -and bar.pth contains: - - # bar package configuration - bar - -Then the following directories are added to sys.path, in this order: - - /usr/local/lib/python2.X/site-packages/bar - /usr/local/lib/python2.X/site-packages/foo - -Note that bletch is omitted because it doesn't exist; bar precedes foo -because bar.pth comes alphabetically before foo.pth; and spam is -omitted because it is not mentioned in either path configuration file. - -After these path manipulations, an attempt is made to import a module -named sitecustomize, which can perform arbitrary additional -site-specific customizations. If this import fails with an -ImportError exception, it is silently ignored. - -""" - -import os -import sys - -try: - import __builtin__ as builtins -except ImportError: - import builtins -try: - set -except NameError: - from sets import Set as set - -# Prefixes for site-packages; add additional prefixes like /usr/local here -PREFIXES = [sys.prefix, sys.exec_prefix] -# Enable per user site-packages directory -# set it to False to disable the feature or True to force the feature -ENABLE_USER_SITE = None -# for distutils.commands.install -USER_SITE = None -USER_BASE = None - -_is_64bit = (getattr(sys, "maxsize", None) or getattr(sys, "maxint")) > 2 ** 32 -_is_pypy = hasattr(sys, "pypy_version_info") -_is_jython = sys.platform[:4] == "java" -if _is_jython: - ModuleType = type(os) - - -def makepath(*paths): - dir = os.path.join(*paths) - if _is_jython and (dir == "__classpath__" or dir.startswith("__pyclasspath__")): - return dir, dir - dir = os.path.abspath(dir) - return dir, os.path.normcase(dir) - - -def abs__file__(): - """Set all module' __file__ attribute to an absolute path""" - for m in sys.modules.values(): - if (_is_jython and not isinstance(m, ModuleType)) or hasattr(m, "__loader__"): - # only modules need the abspath in Jython. and don't mess - # with a PEP 302-supplied __file__ - continue - f = getattr(m, "__file__", None) - if f is None: - continue - m.__file__ = os.path.abspath(f) - - -def removeduppaths(): - """ Remove duplicate entries from sys.path along with making them - absolute""" - # This ensures that the initial path provided by the interpreter contains - # only absolute pathnames, even if we're running from the build directory. - L = [] - known_paths = set() - for dir in sys.path: - # Filter out duplicate paths (on case-insensitive file systems also - # if they only differ in case); turn relative paths into absolute - # paths. - dir, dircase = makepath(dir) - if not dircase in known_paths: - L.append(dir) - known_paths.add(dircase) - sys.path[:] = L - return known_paths - - -# XXX This should not be part of site.py, since it is needed even when -# using the -S option for Python. See http://www.python.org/sf/586680 -def addbuilddir(): - """Append ./build/lib.<platform> in case we're running in the build dir - (especially for Guido :-)""" - from distutils.util import get_platform - - s = "build/lib.{}-{:.3}".format(get_platform(), sys.version) - if hasattr(sys, "gettotalrefcount"): - s += "-pydebug" - s = os.path.join(os.path.dirname(sys.path[-1]), s) - sys.path.append(s) - - -def _init_pathinfo(): - """Return a set containing all existing directory entries from sys.path""" - d = set() - for dir in sys.path: - try: - if os.path.isdir(dir): - dir, dircase = makepath(dir) - d.add(dircase) - except TypeError: - continue - return d - - -def addpackage(sitedir, name, known_paths): - """Add a new path to known_paths by combining sitedir and 'name' or execute - sitedir if it starts with 'import'""" - if known_paths is None: - _init_pathinfo() - reset = 1 - else: - reset = 0 - fullname = os.path.join(sitedir, name) - try: - f = open(fullname, "r") - except IOError: - return - try: - for line in f: - if line.startswith("#"): - continue - if line.startswith("import"): - exec(line) - continue - line = line.rstrip() - dir, dircase = makepath(sitedir, line) - if not dircase in known_paths and os.path.exists(dir): - sys.path.append(dir) - known_paths.add(dircase) - finally: - f.close() - if reset: - known_paths = None - return known_paths - - -def addsitedir(sitedir, known_paths=None): - """Add 'sitedir' argument to sys.path if missing and handle .pth files in - 'sitedir'""" - if known_paths is None: - known_paths = _init_pathinfo() - reset = 1 - else: - reset = 0 - sitedir, sitedircase = makepath(sitedir) - if not sitedircase in known_paths: - sys.path.append(sitedir) # Add path component - try: - names = os.listdir(sitedir) - except os.error: - return - names.sort() - for name in names: - if name.endswith(os.extsep + "pth"): - addpackage(sitedir, name, known_paths) - if reset: - known_paths = None - return known_paths - - -def addsitepackages(known_paths, sys_prefix=sys.prefix, exec_prefix=sys.exec_prefix): - """Add site-packages (and possibly site-python) to sys.path""" - prefixes = [os.path.join(sys_prefix, "local"), sys_prefix] - if exec_prefix != sys_prefix: - prefixes.append(os.path.join(exec_prefix, "local")) - - for prefix in prefixes: - if prefix: - if sys.platform in ("os2emx", "riscos") or _is_jython: - sitedirs = [os.path.join(prefix, "Lib", "site-packages")] - elif _is_pypy: - sitedirs = [os.path.join(prefix, "site-packages")] - elif sys.platform == "darwin" and prefix == sys_prefix: - - if prefix.startswith("/System/Library/Frameworks/"): # Apple's Python - - sitedirs = [ - os.path.join("/Library/Python", sys.version[:3], "site-packages"), - os.path.join(prefix, "Extras", "lib", "python"), - ] - - else: # any other Python distros on OSX work this way - sitedirs = [os.path.join(prefix, "lib", "python" + sys.version[:3], "site-packages")] - - elif os.sep == "/": - sitedirs = [ - os.path.join(prefix, "lib", "python" + sys.version[:3], "site-packages"), - os.path.join(prefix, "lib", "site-python"), - os.path.join(prefix, "python" + sys.version[:3], "lib-dynload"), - ] - lib64_dir = os.path.join(prefix, "lib64", "python" + sys.version[:3], "site-packages") - if os.path.exists(lib64_dir) and os.path.realpath(lib64_dir) not in [ - os.path.realpath(p) for p in sitedirs - ]: - if _is_64bit: - sitedirs.insert(0, lib64_dir) - else: - sitedirs.append(lib64_dir) - try: - # sys.getobjects only available in --with-pydebug build - sys.getobjects - sitedirs.insert(0, os.path.join(sitedirs[0], "debug")) - except AttributeError: - pass - # Debian-specific dist-packages directories: - sitedirs.append(os.path.join(prefix, "local/lib", "python" + sys.version[:3], "dist-packages")) - if sys.version[0] == "2": - sitedirs.append(os.path.join(prefix, "lib", "python" + sys.version[:3], "dist-packages")) - else: - sitedirs.append(os.path.join(prefix, "lib", "python" + sys.version[0], "dist-packages")) - sitedirs.append(os.path.join(prefix, "lib", "dist-python")) - else: - sitedirs = [prefix, os.path.join(prefix, "lib", "site-packages")] - if sys.platform == "darwin": - # for framework builds *only* we add the standard Apple - # locations. Currently only per-user, but /Library and - # /Network/Library could be added too - if "Python.framework" in prefix: - home = os.environ.get("HOME") - if home: - sitedirs.append(os.path.join(home, "Library", "Python", sys.version[:3], "site-packages")) - for sitedir in sitedirs: - if os.path.isdir(sitedir): - addsitedir(sitedir, known_paths) - return None - - -def check_enableusersite(): - """Check if user site directory is safe for inclusion - - The function tests for the command line flag (including environment var), - process uid/gid equal to effective uid/gid. - - None: Disabled for security reasons - False: Disabled by user (command line option) - True: Safe and enabled - """ - if hasattr(sys, "flags") and getattr(sys.flags, "no_user_site", False): - return False - - if hasattr(os, "getuid") and hasattr(os, "geteuid"): - # check process uid == effective uid - if os.geteuid() != os.getuid(): - return None - if hasattr(os, "getgid") and hasattr(os, "getegid"): - # check process gid == effective gid - if os.getegid() != os.getgid(): - return None - - return True - - -def addusersitepackages(known_paths): - """Add a per user site-package to sys.path - - Each user has its own python directory with site-packages in the - home directory. - - USER_BASE is the root directory for all Python versions - - USER_SITE is the user specific site-packages directory - - USER_SITE/.. can be used for data. - """ - global USER_BASE, USER_SITE, ENABLE_USER_SITE - env_base = os.environ.get("PYTHONUSERBASE", None) - - def joinuser(*args): - return os.path.expanduser(os.path.join(*args)) - - # if sys.platform in ('os2emx', 'riscos'): - # # Don't know what to put here - # USER_BASE = '' - # USER_SITE = '' - if os.name == "nt": - base = os.environ.get("APPDATA") or "~" - if env_base: - USER_BASE = env_base - else: - USER_BASE = joinuser(base, "Python") - USER_SITE = os.path.join(USER_BASE, "Python" + sys.version[0] + sys.version[2], "site-packages") - else: - if env_base: - USER_BASE = env_base - else: - USER_BASE = joinuser("~", ".local") - USER_SITE = os.path.join(USER_BASE, "lib", "python" + sys.version[:3], "site-packages") - - if ENABLE_USER_SITE and os.path.isdir(USER_SITE): - addsitedir(USER_SITE, known_paths) - if ENABLE_USER_SITE: - for dist_libdir in ("lib", "local/lib"): - user_site = os.path.join(USER_BASE, dist_libdir, "python" + sys.version[:3], "dist-packages") - if os.path.isdir(user_site): - addsitedir(user_site, known_paths) - return known_paths - - -def setBEGINLIBPATH(): - """The OS/2 EMX port has optional extension modules that do double duty - as DLLs (and must use the .DLL file extension) for other extensions. - The library search path needs to be amended so these will be found - during module import. Use BEGINLIBPATH so that these are at the start - of the library search path. - - """ - dllpath = os.path.join(sys.prefix, "Lib", "lib-dynload") - libpath = os.environ["BEGINLIBPATH"].split(";") - if libpath[-1]: - libpath.append(dllpath) - else: - libpath[-1] = dllpath - os.environ["BEGINLIBPATH"] = ";".join(libpath) - - -def setquit(): - """Define new built-ins 'quit' and 'exit'. - These are simply strings that display a hint on how to exit. - - """ - if os.sep == ":": - eof = "Cmd-Q" - elif os.sep == "\\": - eof = "Ctrl-Z plus Return" - else: - eof = "Ctrl-D (i.e. EOF)" - - class Quitter(object): - def __init__(self, name): - self.name = name - - def __repr__(self): - return "Use {}() or {} to exit".format(self.name, eof) - - def __call__(self, code=None): - # Shells like IDLE catch the SystemExit, but listen when their - # stdin wrapper is closed. - try: - sys.stdin.close() - except: - pass - raise SystemExit(code) - - builtins.quit = Quitter("quit") - builtins.exit = Quitter("exit") - - -class _Printer(object): - """interactive prompt objects for printing the license text, a list of - contributors and the copyright notice.""" - - MAXLINES = 23 - - def __init__(self, name, data, files=(), dirs=()): - self.__name = name - self.__data = data - self.__files = files - self.__dirs = dirs - self.__lines = None - - def __setup(self): - if self.__lines: - return - data = None - for dir in self.__dirs: - for filename in self.__files: - filename = os.path.join(dir, filename) - try: - fp = open(filename, "r") - data = fp.read() - fp.close() - break - except IOError: - pass - if data: - break - if not data: - data = self.__data - self.__lines = data.split("\n") - self.__linecnt = len(self.__lines) - - def __repr__(self): - self.__setup() - if len(self.__lines) <= self.MAXLINES: - return "\n".join(self.__lines) - else: - return "Type %s() to see the full %s text" % ((self.__name,) * 2) - - def __call__(self): - self.__setup() - prompt = "Hit Return for more, or q (and Return) to quit: " - lineno = 0 - while 1: - try: - for i in range(lineno, lineno + self.MAXLINES): - print(self.__lines[i]) - except IndexError: - break - else: - lineno += self.MAXLINES - key = None - while key is None: - try: - key = raw_input(prompt) - except NameError: - key = input(prompt) - if key not in ("", "q"): - key = None - if key == "q": - break - - -def setcopyright(): - """Set 'copyright' and 'credits' in __builtin__""" - builtins.copyright = _Printer("copyright", sys.copyright) - if _is_jython: - builtins.credits = _Printer("credits", "Jython is maintained by the Jython developers (www.jython.org).") - elif _is_pypy: - builtins.credits = _Printer("credits", "PyPy is maintained by the PyPy developers: http://pypy.org/") - else: - builtins.credits = _Printer( - "credits", - """\ - Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands - for supporting Python development. See www.python.org for more information.""", - ) - here = os.path.dirname(os.__file__) - builtins.license = _Printer( - "license", - "See https://www.python.org/psf/license/", - ["LICENSE.txt", "LICENSE"], - [sys.prefix, os.path.join(here, os.pardir), here, os.curdir], - ) - - -class _Helper(object): - """Define the built-in 'help'. - This is a wrapper around pydoc.help (with a twist). - - """ - - def __repr__(self): - return "Type help() for interactive help, " "or help(object) for help about object." - - def __call__(self, *args, **kwds): - import pydoc - - return pydoc.help(*args, **kwds) - - -def sethelper(): - builtins.help = _Helper() - - -def aliasmbcs(): - """On Windows, some default encodings are not provided by Python, - while they are always available as "mbcs" in each locale. Make - them usable by aliasing to "mbcs" in such a case.""" - if sys.platform == "win32": - import locale, codecs - - enc = locale.getdefaultlocale()[1] - if enc.startswith("cp"): # "cp***" ? - try: - codecs.lookup(enc) - except LookupError: - import encodings - - encodings._cache[enc] = encodings._unknown - encodings.aliases.aliases[enc] = "mbcs" - - -def setencoding(): - """Set the string encoding used by the Unicode implementation. The - default is 'ascii', but if you're willing to experiment, you can - change this.""" - encoding = "ascii" # Default value set by _PyUnicode_Init() - if 0: - # Enable to support locale aware default string encodings. - import locale - - loc = locale.getdefaultlocale() - if loc[1]: - encoding = loc[1] - if 0: - # Enable to switch off string to Unicode coercion and implicit - # Unicode to string conversion. - encoding = "undefined" - if encoding != "ascii": - # On Non-Unicode builds this will raise an AttributeError... - sys.setdefaultencoding(encoding) # Needs Python Unicode build ! - - -def execsitecustomize(): - """Run custom site specific code, if available.""" - try: - import sitecustomize - except ImportError: - pass - - -def virtual_install_main_packages(): - f = open(os.path.join(os.path.dirname(__file__), "orig-prefix.txt")) - sys.real_prefix = f.read().strip() - f.close() - pos = 2 - hardcoded_relative_dirs = [] - if sys.path[0] == "": - pos += 1 - if _is_jython: - paths = [os.path.join(sys.real_prefix, "Lib")] - elif _is_pypy: - if sys.version_info > (3, 2): - cpyver = "%d" % sys.version_info[0] - elif sys.pypy_version_info >= (1, 5): - cpyver = "%d.%d" % sys.version_info[:2] - else: - cpyver = "%d.%d.%d" % sys.version_info[:3] - paths = [os.path.join(sys.real_prefix, "lib_pypy"), os.path.join(sys.real_prefix, "lib-python", cpyver)] - if sys.pypy_version_info < (1, 9): - paths.insert(1, os.path.join(sys.real_prefix, "lib-python", "modified-%s" % cpyver)) - hardcoded_relative_dirs = paths[:] # for the special 'darwin' case below - # - # This is hardcoded in the Python executable, but relative to sys.prefix: - for path in paths[:]: - plat_path = os.path.join(path, "plat-%s" % sys.platform) - if os.path.exists(plat_path): - paths.append(plat_path) - elif sys.platform == "win32": - paths = [os.path.join(sys.real_prefix, "Lib"), os.path.join(sys.real_prefix, "DLLs")] - else: - paths = [os.path.join(sys.real_prefix, "lib", "python" + sys.version[:3])] - hardcoded_relative_dirs = paths[:] # for the special 'darwin' case below - lib64_path = os.path.join(sys.real_prefix, "lib64", "python" + sys.version[:3]) - if os.path.exists(lib64_path): - if _is_64bit: - paths.insert(0, lib64_path) - else: - paths.append(lib64_path) - # This is hardcoded in the Python executable, but relative to - # sys.prefix. Debian change: we need to add the multiarch triplet - # here, which is where the real stuff lives. As per PEP 421, in - # Python 3.3+, this lives in sys.implementation, while in Python 2.7 - # it lives in sys. - try: - arch = getattr(sys, "implementation", sys)._multiarch - except AttributeError: - # This is a non-multiarch aware Python. Fallback to the old way. - arch = sys.platform - plat_path = os.path.join(sys.real_prefix, "lib", "python" + sys.version[:3], "plat-%s" % arch) - if os.path.exists(plat_path): - paths.append(plat_path) - # This is hardcoded in the Python executable, but - # relative to sys.prefix, so we have to fix up: - for path in list(paths): - tk_dir = os.path.join(path, "lib-tk") - if os.path.exists(tk_dir): - paths.append(tk_dir) - - # These are hardcoded in the Apple's Python executable, - # but relative to sys.prefix, so we have to fix them up: - if sys.platform == "darwin": - hardcoded_paths = [ - os.path.join(relative_dir, module) - for relative_dir in hardcoded_relative_dirs - for module in ("plat-darwin", "plat-mac", "plat-mac/lib-scriptpackages") - ] - - for path in hardcoded_paths: - if os.path.exists(path): - paths.append(path) - - sys.path.extend(paths) - - -def force_global_eggs_after_local_site_packages(): - """ - Force easy_installed eggs in the global environment to get placed - in sys.path after all packages inside the virtualenv. This - maintains the "least surprise" result that packages in the - virtualenv always mask global packages, never the other way - around. - - """ - egginsert = getattr(sys, "__egginsert", 0) - for i, path in enumerate(sys.path): - if i > egginsert and path.startswith(sys.prefix): - egginsert = i - sys.__egginsert = egginsert + 1 - - -def virtual_addsitepackages(known_paths): - force_global_eggs_after_local_site_packages() - return addsitepackages(known_paths, sys_prefix=sys.real_prefix) - - -def fixclasspath(): - """Adjust the special classpath sys.path entries for Jython. These - entries should follow the base virtualenv lib directories. - """ - paths = [] - classpaths = [] - for path in sys.path: - if path == "__classpath__" or path.startswith("__pyclasspath__"): - classpaths.append(path) - else: - paths.append(path) - sys.path = paths - sys.path.extend(classpaths) - - -def execusercustomize(): - """Run custom user specific code, if available.""" - try: - import usercustomize - except ImportError: - pass - - -def enablerlcompleter(): - """Enable default readline configuration on interactive prompts, by - registering a sys.__interactivehook__. - If the readline module can be imported, the hook will set the Tab key - as completion key and register ~/.python_history as history file. - This can be overridden in the sitecustomize or usercustomize module, - or in a PYTHONSTARTUP file. - """ - - def register_readline(): - import atexit - - try: - import readline - import rlcompleter - except ImportError: - return - - # Reading the initialization (config) file may not be enough to set a - # completion key, so we set one first and then read the file. - readline_doc = getattr(readline, "__doc__", "") - if readline_doc is not None and "libedit" in readline_doc: - readline.parse_and_bind("bind ^I rl_complete") - else: - readline.parse_and_bind("tab: complete") - - try: - readline.read_init_file() - except OSError: - # An OSError here could have many causes, but the most likely one - # is that there's no .inputrc file (or .editrc file in the case of - # Mac OS X + libedit) in the expected location. In that case, we - # want to ignore the exception. - pass - - if readline.get_current_history_length() == 0: - # If no history was loaded, default to .python_history. - # The guard is necessary to avoid doubling history size at - # each interpreter exit when readline was already configured - # through a PYTHONSTARTUP hook, see: - # http://bugs.python.org/issue5845#msg198636 - history = os.path.join(os.path.expanduser("~"), ".python_history") - try: - readline.read_history_file(history) - except OSError: - pass - - def write_history(): - try: - readline.write_history_file(history) - except (FileNotFoundError, PermissionError): - # home directory does not exist or is not writable - # https://bugs.python.org/issue19891 - pass - - atexit.register(write_history) - - sys.__interactivehook__ = register_readline - - -if _is_pypy: - - def import_builtin_stuff(): - """PyPy specific: some built-in modules should be pre-imported because - some programs expect them to be in sys.modules on startup. This is ported - from PyPy's site.py. - """ - import encodings - - if "exceptions" in sys.builtin_module_names: - import exceptions - - if "zipimport" in sys.builtin_module_names: - import zipimport - - -def main(): - global ENABLE_USER_SITE - virtual_install_main_packages() - if _is_pypy: - import_builtin_stuff() - abs__file__() - paths_in_sys = removeduppaths() - if os.name == "posix" and sys.path and os.path.basename(sys.path[-1]) == "Modules": - addbuilddir() - if _is_jython: - fixclasspath() - GLOBAL_SITE_PACKAGES = not os.path.exists(os.path.join(os.path.dirname(__file__), "no-global-site-packages.txt")) - if not GLOBAL_SITE_PACKAGES: - ENABLE_USER_SITE = False - if ENABLE_USER_SITE is None: - ENABLE_USER_SITE = check_enableusersite() - paths_in_sys = addsitepackages(paths_in_sys) - paths_in_sys = addusersitepackages(paths_in_sys) - if GLOBAL_SITE_PACKAGES: - paths_in_sys = virtual_addsitepackages(paths_in_sys) - if sys.platform == "os2emx": - setBEGINLIBPATH() - setquit() - setcopyright() - sethelper() - if sys.version_info[0] == 3: - enablerlcompleter() - aliasmbcs() - setencoding() - execsitecustomize() - if ENABLE_USER_SITE: - execusercustomize() - # Remove sys.setdefaultencoding() so that users cannot change the - # encoding after initialization. The test for presence is needed when - # this module is run as a script, because this code is executed twice. - if hasattr(sys, "setdefaultencoding"): - del sys.setdefaultencoding - - -main() - - -def _script(): - help = """\ - %s [--user-base] [--user-site] - - Without arguments print some useful information - With arguments print the value of USER_BASE and/or USER_SITE separated - by '%s'. - - Exit codes with --user-base or --user-site: - 0 - user site directory is enabled - 1 - user site directory is disabled by user - 2 - uses site directory is disabled by super user - or for security reasons - >2 - unknown error - """ - args = sys.argv[1:] - if not args: - print("sys.path = [") - for dir in sys.path: - print(" {!r},".format(dir)) - print("]") - - def exists(path): - if os.path.isdir(path): - return "exists" - else: - return "doesn't exist" - - print("USER_BASE: {!r} ({})".format(USER_BASE, exists(USER_BASE))) - print("USER_SITE: {!r} ({})".format(USER_SITE, exists(USER_SITE))) - print("ENABLE_USER_SITE: %r" % ENABLE_USER_SITE) - sys.exit(0) - - buffer = [] - if "--user-base" in args: - buffer.append(USER_BASE) - if "--user-site" in args: - buffer.append(USER_SITE) - - if buffer: - print(os.pathsep.join(buffer)) - if ENABLE_USER_SITE: - sys.exit(0) - elif ENABLE_USER_SITE is False: - sys.exit(1) - elif ENABLE_USER_SITE is None: - sys.exit(2) - else: - sys.exit(3) - else: - import textwrap - - print(textwrap.dedent(help % (sys.argv[0], os.pathsep))) - sys.exit(10) - - -if __name__ == "__main__": - _script() diff --git a/env/lib/python2.7/site.pyc b/env/lib/python2.7/site.pyc deleted file mode 100644 index d26dee81..00000000 Binary files a/env/lib/python2.7/site.pyc and /dev/null differ diff --git a/env/lib/python2.7/sre.py b/env/lib/python2.7/sre.py deleted file mode 120000 index f070cc95..00000000 --- a/env/lib/python2.7/sre.py +++ /dev/null @@ -1 +0,0 @@ -/usr/local/Cellar/python@2/2.7.15_3/Frameworks/Python.framework/Versions/2.7/lib/python2.7/sre.py \ No newline at end of file diff --git a/env/lib/python2.7/sre_compile.py b/env/lib/python2.7/sre_compile.py deleted file mode 120000 index 04c25992..00000000 --- a/env/lib/python2.7/sre_compile.py +++ /dev/null @@ -1 +0,0 @@ -/usr/local/Cellar/python@2/2.7.15_3/Frameworks/Python.framework/Versions/2.7/lib/python2.7/sre_compile.py \ No newline at end of file diff --git a/env/lib/python2.7/sre_compile.pyc b/env/lib/python2.7/sre_compile.pyc deleted file mode 100644 index a2ea7f42..00000000 Binary files a/env/lib/python2.7/sre_compile.pyc and /dev/null differ diff --git a/env/lib/python2.7/sre_constants.py b/env/lib/python2.7/sre_constants.py deleted file mode 120000 index 685da600..00000000 --- a/env/lib/python2.7/sre_constants.py +++ /dev/null @@ -1 +0,0 @@ -/usr/local/Cellar/python@2/2.7.15_3/Frameworks/Python.framework/Versions/2.7/lib/python2.7/sre_constants.py \ No newline at end of file diff --git a/env/lib/python2.7/sre_constants.pyc b/env/lib/python2.7/sre_constants.pyc deleted file mode 100644 index 67e289ec..00000000 Binary files a/env/lib/python2.7/sre_constants.pyc and /dev/null differ diff --git a/env/lib/python2.7/sre_parse.py b/env/lib/python2.7/sre_parse.py deleted file mode 120000 index 94040763..00000000 --- a/env/lib/python2.7/sre_parse.py +++ /dev/null @@ -1 +0,0 @@ -/usr/local/Cellar/python@2/2.7.15_3/Frameworks/Python.framework/Versions/2.7/lib/python2.7/sre_parse.py \ No newline at end of file diff --git a/env/lib/python2.7/sre_parse.pyc b/env/lib/python2.7/sre_parse.pyc deleted file mode 100644 index 7903039a..00000000 Binary files a/env/lib/python2.7/sre_parse.pyc and /dev/null differ diff --git a/env/lib/python2.7/stat.py b/env/lib/python2.7/stat.py deleted file mode 120000 index 8af2f4dd..00000000 --- a/env/lib/python2.7/stat.py +++ /dev/null @@ -1 +0,0 @@ -/usr/local/Cellar/python@2/2.7.15_3/Frameworks/Python.framework/Versions/2.7/lib/python2.7/stat.py \ No newline at end of file diff --git a/env/lib/python2.7/stat.pyc b/env/lib/python2.7/stat.pyc deleted file mode 100644 index 265b0fa4..00000000 Binary files a/env/lib/python2.7/stat.pyc and /dev/null differ diff --git a/env/lib/python2.7/types.py b/env/lib/python2.7/types.py deleted file mode 120000 index ca35d1be..00000000 --- a/env/lib/python2.7/types.py +++ /dev/null @@ -1 +0,0 @@ -/usr/local/Cellar/python@2/2.7.15_3/Frameworks/Python.framework/Versions/2.7/lib/python2.7/types.py \ No newline at end of file diff --git a/env/lib/python2.7/types.pyc b/env/lib/python2.7/types.pyc deleted file mode 100644 index 002ff73f..00000000 Binary files a/env/lib/python2.7/types.pyc and /dev/null differ diff --git a/env/lib/python2.7/warnings.py b/env/lib/python2.7/warnings.py deleted file mode 120000 index fd699878..00000000 --- a/env/lib/python2.7/warnings.py +++ /dev/null @@ -1 +0,0 @@ -/usr/local/Cellar/python@2/2.7.15_3/Frameworks/Python.framework/Versions/2.7/lib/python2.7/warnings.py \ No newline at end of file diff --git a/env/lib/python2.7/warnings.pyc b/env/lib/python2.7/warnings.pyc deleted file mode 100644 index 8ee852d7..00000000 Binary files a/env/lib/python2.7/warnings.pyc and /dev/null differ diff --git a/main.py b/main.py new file mode 100644 index 00000000..3d570328 --- /dev/null +++ b/main.py @@ -0,0 +1,14 @@ +# This is a sample Python script. + +# Press Shift+F10 to execute it or replace it with your code. +# Press Double Shift to search everywhere for classes, files, tool windows, actions, and settings. + +# Press the green button in the gutter to run the script. +import com.main.autotimer as autotimer + +if __name__ == '__main__': + # Edit test in here. + window_name = autotimer.get_active_window() + print(window_name) + +# See PyCharm help at https://www.jetbrains.com/help/pycharm/ diff --git a/venv/Lib/site-packages/PyWin32.chm b/venv/Lib/site-packages/PyWin32.chm new file mode 100644 index 00000000..94e6abbf Binary files /dev/null and b/venv/Lib/site-packages/PyWin32.chm differ diff --git a/venv/Lib/site-packages/__pycache__/pythoncom.cpython-36.pyc b/venv/Lib/site-packages/__pycache__/pythoncom.cpython-36.pyc new file mode 100644 index 00000000..4bf67d2c Binary files /dev/null and b/venv/Lib/site-packages/__pycache__/pythoncom.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/__pycache__/six.cpython-36.pyc b/venv/Lib/site-packages/__pycache__/six.cpython-36.pyc new file mode 100644 index 00000000..c261305e Binary files /dev/null and b/venv/Lib/site-packages/__pycache__/six.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/_distutils_hack/__init__.py b/venv/Lib/site-packages/_distutils_hack/__init__.py new file mode 100644 index 00000000..5f40996a --- /dev/null +++ b/venv/Lib/site-packages/_distutils_hack/__init__.py @@ -0,0 +1,128 @@ +import sys +import os +import re +import importlib +import warnings + + +is_pypy = '__pypy__' in sys.builtin_module_names + + +warnings.filterwarnings('ignore', + r'.+ distutils\b.+ deprecated', + DeprecationWarning) + + +def warn_distutils_present(): + if 'distutils' not in sys.modules: + return + if is_pypy and sys.version_info < (3, 7): + # PyPy for 3.6 unconditionally imports distutils, so bypass the warning + # https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250 + return + warnings.warn( + "Distutils was imported before Setuptools, but importing Setuptools " + "also replaces the `distutils` module in `sys.modules`. This may lead " + "to undesirable behaviors or errors. To avoid these issues, avoid " + "using distutils directly, ensure that setuptools is installed in the " + "traditional way (e.g. not an editable install), and/or make sure " + "that setuptools is always imported before distutils.") + + +def clear_distutils(): + if 'distutils' not in sys.modules: + return + warnings.warn("Setuptools is replacing distutils.") + mods = [name for name in sys.modules if re.match(r'distutils\b', name)] + for name in mods: + del sys.modules[name] + + +def enabled(): + """ + Allow selection of distutils by environment variable. + """ + which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'stdlib') + return which == 'local' + + +def ensure_local_distutils(): + clear_distutils() + distutils = importlib.import_module('setuptools._distutils') + distutils.__name__ = 'distutils' + sys.modules['distutils'] = distutils + + # sanity check that submodules load as expected + core = importlib.import_module('distutils.core') + assert '_distutils' in core.__file__, core.__file__ + + +def do_override(): + """ + Ensure that the local copy of distutils is preferred over stdlib. + + See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401 + for more motivation. + """ + if enabled(): + warn_distutils_present() + ensure_local_distutils() + + +class DistutilsMetaFinder: + def find_spec(self, fullname, path, target=None): + if path is not None: + return + + method_name = 'spec_for_{fullname}'.format(**locals()) + method = getattr(self, method_name, lambda: None) + return method() + + def spec_for_distutils(self): + import importlib.abc + import importlib.util + + class DistutilsLoader(importlib.abc.Loader): + + def create_module(self, spec): + return importlib.import_module('setuptools._distutils') + + def exec_module(self, module): + pass + + return importlib.util.spec_from_loader('distutils', DistutilsLoader()) + + def spec_for_pip(self): + """ + Ensure stdlib distutils when running under pip. + See pypa/pip#8761 for rationale. + """ + if self.pip_imported_during_build(): + return + clear_distutils() + self.spec_for_distutils = lambda: None + + @staticmethod + def pip_imported_during_build(): + """ + Detect if pip is being imported in a build script. Ref #2355. + """ + import traceback + return any( + frame.f_globals['__file__'].endswith('setup.py') + for frame, line in traceback.walk_stack(None) + ) + + +DISTUTILS_FINDER = DistutilsMetaFinder() + + +def add_shim(): + sys.meta_path.insert(0, DISTUTILS_FINDER) + + +def remove_shim(): + try: + sys.meta_path.remove(DISTUTILS_FINDER) + except ValueError: + pass diff --git a/venv/Lib/site-packages/_distutils_hack/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/_distutils_hack/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..1378ea94 Binary files /dev/null and b/venv/Lib/site-packages/_distutils_hack/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/_distutils_hack/__pycache__/override.cpython-36.pyc b/venv/Lib/site-packages/_distutils_hack/__pycache__/override.cpython-36.pyc new file mode 100644 index 00000000..d449a553 Binary files /dev/null and b/venv/Lib/site-packages/_distutils_hack/__pycache__/override.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/_distutils_hack/override.py b/venv/Lib/site-packages/_distutils_hack/override.py new file mode 100644 index 00000000..2cc433a4 --- /dev/null +++ b/venv/Lib/site-packages/_distutils_hack/override.py @@ -0,0 +1 @@ +__import__('_distutils_hack').do_override() diff --git a/venv/Lib/site-packages/adodbapi/__init__.py b/venv/Lib/site-packages/adodbapi/__init__.py new file mode 100644 index 00000000..30be4798 --- /dev/null +++ b/venv/Lib/site-packages/adodbapi/__init__.py @@ -0,0 +1,48 @@ +"""adodbapi - A python DB API 2.0 (PEP 249) interface to Microsoft ADO + +Copyright (C) 2002 Henrik Ekelund, version 2.1 by Vernon Cole +* http://sourceforge.net/projects/adodbapi +""" +import sys +import time + +from .apibase import apilevel, threadsafety, paramstyle +from .apibase import Warning, Error, InterfaceError, DatabaseError, DataError, OperationalError, IntegrityError +from .apibase import InternalError, ProgrammingError, NotSupportedError, FetchFailedError +from .apibase import NUMBER, STRING, BINARY, DATETIME, ROWID + +from .adodbapi import connect, Connection, __version__, dateconverter, Cursor + +def Binary(aString): + """This function constructs an object capable of holding a binary (long) string value. """ + return bytes(aString) + +def Date(year,month,day): + "This function constructs an object holding a date value. " + return dateconverter.Date(year,month,day) + +def Time(hour,minute,second): + "This function constructs an object holding a time value. " + return dateconverter.Time(hour,minute,second) + +def Timestamp(year,month,day,hour,minute,second): + "This function constructs an object holding a time stamp value. " + return dateconverter.Timestamp(year,month,day,hour,minute,second) + +def DateFromTicks(ticks): + """This function constructs an object holding a date value from the given ticks value + (number of seconds since the epoch; see the documentation of the standard Python time module for details). """ + return Date(*time.gmtime(ticks)[:3]) + +def TimeFromTicks(ticks): + """This function constructs an object holding a time value from the given ticks value + (number of seconds since the epoch; see the documentation of the standard Python time module for details). """ + return Time(*time.gmtime(ticks)[3:6]) + +def TimestampFromTicks(ticks): + """This function constructs an object holding a time stamp value from the given + ticks value (number of seconds since the epoch; + see the documentation of the standard Python time module for details). """ + return Timestamp(*time.gmtime(ticks)[:6]) + +version = 'adodbapi v' + __version__ diff --git a/venv/Lib/site-packages/adodbapi/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/adodbapi/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..fe04bf94 Binary files /dev/null and b/venv/Lib/site-packages/adodbapi/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/adodbapi/__pycache__/ado_consts.cpython-36.pyc b/venv/Lib/site-packages/adodbapi/__pycache__/ado_consts.cpython-36.pyc new file mode 100644 index 00000000..c8199ccf Binary files /dev/null and b/venv/Lib/site-packages/adodbapi/__pycache__/ado_consts.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/adodbapi/__pycache__/adodbapi.cpython-36.pyc b/venv/Lib/site-packages/adodbapi/__pycache__/adodbapi.cpython-36.pyc new file mode 100644 index 00000000..461eedc1 Binary files /dev/null and b/venv/Lib/site-packages/adodbapi/__pycache__/adodbapi.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/adodbapi/__pycache__/apibase.cpython-36.pyc b/venv/Lib/site-packages/adodbapi/__pycache__/apibase.cpython-36.pyc new file mode 100644 index 00000000..75408649 Binary files /dev/null and b/venv/Lib/site-packages/adodbapi/__pycache__/apibase.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/adodbapi/__pycache__/is64bit.cpython-36.pyc b/venv/Lib/site-packages/adodbapi/__pycache__/is64bit.cpython-36.pyc new file mode 100644 index 00000000..2f20b067 Binary files /dev/null and b/venv/Lib/site-packages/adodbapi/__pycache__/is64bit.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/adodbapi/__pycache__/process_connect_string.cpython-36.pyc b/venv/Lib/site-packages/adodbapi/__pycache__/process_connect_string.cpython-36.pyc new file mode 100644 index 00000000..dc7a0176 Binary files /dev/null and b/venv/Lib/site-packages/adodbapi/__pycache__/process_connect_string.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/adodbapi/__pycache__/remote.cpython-36.pyc b/venv/Lib/site-packages/adodbapi/__pycache__/remote.cpython-36.pyc new file mode 100644 index 00000000..d954baf8 Binary files /dev/null and b/venv/Lib/site-packages/adodbapi/__pycache__/remote.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/adodbapi/__pycache__/schema_table.cpython-36.pyc b/venv/Lib/site-packages/adodbapi/__pycache__/schema_table.cpython-36.pyc new file mode 100644 index 00000000..6d43b537 Binary files /dev/null and b/venv/Lib/site-packages/adodbapi/__pycache__/schema_table.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/adodbapi/__pycache__/setup.cpython-36.pyc b/venv/Lib/site-packages/adodbapi/__pycache__/setup.cpython-36.pyc new file mode 100644 index 00000000..8c98fdbd Binary files /dev/null and b/venv/Lib/site-packages/adodbapi/__pycache__/setup.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/adodbapi/ado_consts.py b/venv/Lib/site-packages/adodbapi/ado_consts.py new file mode 100644 index 00000000..5ecf7356 --- /dev/null +++ b/venv/Lib/site-packages/adodbapi/ado_consts.py @@ -0,0 +1,276 @@ +# ADO enumerated constants documented on MSDN: +# http://msdn.microsoft.com/en-us/library/ms678353(VS.85).aspx + +# IsolationLevelEnum +adXactUnspecified = -1 +adXactBrowse = 0x100 +adXactChaos = 0x10 +adXactCursorStability = 0x1000 +adXactIsolated = 0x100000 +adXactReadCommitted = 0x1000 +adXactReadUncommitted = 0x100 +adXactRepeatableRead = 0x10000 +adXactSerializable = 0x100000 + +# CursorLocationEnum +adUseClient = 3 +adUseServer = 2 + +# CursorTypeEnum +adOpenDynamic = 2 +adOpenForwardOnly = 0 +adOpenKeyset = 1 +adOpenStatic = 3 +adOpenUnspecified = -1 + +# CommandTypeEnum +adCmdText = 1 +adCmdStoredProc = 4 +adSchemaTables = 20 + +# ParameterDirectionEnum +adParamInput = 1 +adParamInputOutput = 3 +adParamOutput = 2 +adParamReturnValue = 4 +adParamUnknown = 0 +directions = { + 0: 'Unknown', + 1: 'Input', + 2: 'Output', + 3: 'InputOutput', + 4: 'Return', + } +def ado_direction_name(ado_dir): + try: + return 'adParam' + directions[ado_dir] + except: + return 'unknown direction ('+str(ado_dir)+')' + +# ObjectStateEnum +adStateClosed = 0 +adStateOpen = 1 +adStateConnecting = 2 +adStateExecuting = 4 +adStateFetching = 8 + +# FieldAttributeEnum +adFldMayBeNull = 0x40 + +# ConnectModeEnum +adModeUnknown = 0 +adModeRead = 1 +adModeWrite = 2 +adModeReadWrite = 3 +adModeShareDenyRead = 4 +adModeShareDenyWrite = 8 +adModeShareExclusive = 12 +adModeShareDenyNone = 16 +adModeRecursive = 0x400000 + +# XactAttributeEnum +adXactCommitRetaining = 131072 +adXactAbortRetaining = 262144 + +ado_error_TIMEOUT = -2147217871 + +# DataTypeEnum - ADO Data types documented at: +# http://msdn2.microsoft.com/en-us/library/ms675318.aspx +adArray = 0x2000 +adEmpty = 0x0 +adBSTR = 0x8 +adBigInt = 0x14 +adBinary = 0x80 +adBoolean = 0xb +adChapter = 0x88 +adChar = 0x81 +adCurrency = 0x6 +adDBDate = 0x85 +adDBTime = 0x86 +adDBTimeStamp = 0x87 +adDate = 0x7 +adDecimal = 0xe +adDouble = 0x5 +adError = 0xa +adFileTime = 0x40 +adGUID = 0x48 +adIDispatch = 0x9 +adIUnknown = 0xd +adInteger = 0x3 +adLongVarBinary = 0xcd +adLongVarChar = 0xc9 +adLongVarWChar = 0xcb +adNumeric = 0x83 +adPropVariant = 0x8a +adSingle = 0x4 +adSmallInt = 0x2 +adTinyInt = 0x10 +adUnsignedBigInt = 0x15 +adUnsignedInt = 0x13 +adUnsignedSmallInt = 0x12 +adUnsignedTinyInt = 0x11 +adUserDefined = 0x84 +adVarBinary = 0xCC +adVarChar = 0xC8 +adVarNumeric = 0x8B +adVarWChar = 0xCA +adVariant = 0xC +adWChar = 0x82 +# Additional constants used by introspection but not ADO itself +AUTO_FIELD_MARKER = -1000 + +adTypeNames = { + adBSTR: 'adBSTR', + adBigInt: 'adBigInt', + adBinary: 'adBinary', + adBoolean: 'adBoolean', + adChapter: 'adChapter', + adChar: 'adChar', + adCurrency: 'adCurrency', + adDBDate: 'adDBDate', + adDBTime: 'adDBTime', + adDBTimeStamp: 'adDBTimeStamp', + adDate: 'adDate', + adDecimal: 'adDecimal', + adDouble: 'adDouble', + adEmpty: 'adEmpty', + adError: 'adError', + adFileTime: 'adFileTime', + adGUID: 'adGUID', + adIDispatch: 'adIDispatch', + adIUnknown: 'adIUnknown', + adInteger: 'adInteger', + adLongVarBinary: 'adLongVarBinary', + adLongVarChar: 'adLongVarChar', + adLongVarWChar: 'adLongVarWChar', + adNumeric: 'adNumeric', + adPropVariant: 'adPropVariant', + adSingle: 'adSingle', + adSmallInt: 'adSmallInt', + adTinyInt: 'adTinyInt', + adUnsignedBigInt: 'adUnsignedBigInt', + adUnsignedInt: 'adUnsignedInt', + adUnsignedSmallInt: 'adUnsignedSmallInt', + adUnsignedTinyInt: 'adUnsignedTinyInt', + adUserDefined: 'adUserDefined', + adVarBinary: 'adVarBinary', + adVarChar: 'adVarChar', + adVarNumeric: 'adVarNumeric', + adVarWChar: 'adVarWChar', + adVariant: 'adVariant', + adWChar: 'adWChar', + } + +def ado_type_name(ado_type): + return adTypeNames.get(ado_type, 'unknown type ('+str(ado_type)+')') + +# here in decimal, sorted by value +#adEmpty 0 Specifies no value (DBTYPE_EMPTY). +#adSmallInt 2 Indicates a two-byte signed integer (DBTYPE_I2). +#adInteger 3 Indicates a four-byte signed integer (DBTYPE_I4). +#adSingle 4 Indicates a single-precision floating-point value (DBTYPE_R4). +#adDouble 5 Indicates a double-precision floating-point value (DBTYPE_R8). +#adCurrency 6 Indicates a currency value (DBTYPE_CY). Currency is a fixed-point number +# with four digits to the right of the decimal point. It is stored in an eight-byte signed integer scaled by 10,000. +#adDate 7 Indicates a date value (DBTYPE_DATE). A date is stored as a double, the whole part of which is +# the number of days since December 30, 1899, and the fractional part of which is the fraction of a day. +#adBSTR 8 Indicates a null-terminated character string (Unicode) (DBTYPE_BSTR). +#adIDispatch 9 Indicates a pointer to an IDispatch interface on a COM object (DBTYPE_IDISPATCH). +#adError 10 Indicates a 32-bit error code (DBTYPE_ERROR). +#adBoolean 11 Indicates a boolean value (DBTYPE_BOOL). +#adVariant 12 Indicates an Automation Variant (DBTYPE_VARIANT). +#adIUnknown 13 Indicates a pointer to an IUnknown interface on a COM object (DBTYPE_IUNKNOWN). +#adDecimal 14 Indicates an exact numeric value with a fixed precision and scale (DBTYPE_DECIMAL). +#adTinyInt 16 Indicates a one-byte signed integer (DBTYPE_I1). +#adUnsignedTinyInt 17 Indicates a one-byte unsigned integer (DBTYPE_UI1). +#adUnsignedSmallInt 18 Indicates a two-byte unsigned integer (DBTYPE_UI2). +#adUnsignedInt 19 Indicates a four-byte unsigned integer (DBTYPE_UI4). +#adBigInt 20 Indicates an eight-byte signed integer (DBTYPE_I8). +#adUnsignedBigInt 21 Indicates an eight-byte unsigned integer (DBTYPE_UI8). +#adFileTime 64 Indicates a 64-bit value representing the number of 100-nanosecond intervals since +# January 1, 1601 (DBTYPE_FILETIME). +#adGUID 72 Indicates a globally unique identifier (GUID) (DBTYPE_GUID). +#adBinary 128 Indicates a binary value (DBTYPE_BYTES). +#adChar 129 Indicates a string value (DBTYPE_STR). +#adWChar 130 Indicates a null-terminated Unicode character string (DBTYPE_WSTR). +#adNumeric 131 Indicates an exact numeric value with a fixed precision and scale (DBTYPE_NUMERIC). +# adUserDefined 132 Indicates a user-defined variable (DBTYPE_UDT). +#adUserDefined 132 Indicates a user-defined variable (DBTYPE_UDT). +#adDBDate 133 Indicates a date value (yyyymmdd) (DBTYPE_DBDATE). +#adDBTime 134 Indicates a time value (hhmmss) (DBTYPE_DBTIME). +#adDBTimeStamp 135 Indicates a date/time stamp (yyyymmddhhmmss plus a fraction in billionths) (DBTYPE_DBTIMESTAMP). +#adChapter 136 Indicates a four-byte chapter value that identifies rows in a child rowset (DBTYPE_HCHAPTER). +#adPropVariant 138 Indicates an Automation PROPVARIANT (DBTYPE_PROP_VARIANT). +#adVarNumeric 139 Indicates a numeric value (Parameter object only). +#adVarChar 200 Indicates a string value (Parameter object only). +#adLongVarChar 201 Indicates a long string value (Parameter object only). +#adVarWChar 202 Indicates a null-terminated Unicode character string (Parameter object only). +#adLongVarWChar 203 Indicates a long null-terminated Unicode string value (Parameter object only). +#adVarBinary 204 Indicates a binary value (Parameter object only). +#adLongVarBinary 205 Indicates a long binary value (Parameter object only). +#adArray (Does not apply to ADOX.) 0x2000 A flag value, always combined with another data type constant, +# that indicates an array of that other data type. + +# Error codes to names +adoErrors= { + 0xe7b :'adErrBoundToCommand', + 0xe94 :'adErrCannotComplete', + 0xea4 :'adErrCantChangeConnection', + 0xc94 :'adErrCantChangeProvider', + 0xe8c :'adErrCantConvertvalue', + 0xe8d :'adErrCantCreate', + 0xea3 :'adErrCatalogNotSet', + 0xe8e :'adErrColumnNotOnThisRow', + 0xd5d :'adErrDataConversion', + 0xe89 :'adErrDataOverflow', + 0xe9a :'adErrDelResOutOfScope', + 0xea6 :'adErrDenyNotSupported', + 0xea7 :'adErrDenyTypeNotSupported', + 0xcb3 :'adErrFeatureNotAvailable', + 0xea5 :'adErrFieldsUpdateFailed', + 0xc93 :'adErrIllegalOperation', + 0xcae :'adErrInTransaction', + 0xe87 :'adErrIntegrityViolation', + 0xbb9 :'adErrInvalidArgument', + 0xe7d :'adErrInvalidConnection', + 0xe7c :'adErrInvalidParamInfo', + 0xe82 :'adErrInvalidTransaction', + 0xe91 :'adErrInvalidURL', + 0xcc1 :'adErrItemNotFound', + 0xbcd :'adErrNoCurrentRecord', + 0xe83 :'adErrNotExecuting', + 0xe7e :'adErrNotReentrant', + 0xe78 :'adErrObjectClosed', + 0xd27 :'adErrObjectInCollection', + 0xd5c :'adErrObjectNotSet', + 0xe79 :'adErrObjectOpen', + 0xbba :'adErrOpeningFile', + 0xe80 :'adErrOperationCancelled', + 0xe96 :'adErrOutOfSpace', + 0xe88 :'adErrPermissionDenied', + 0xe9e :'adErrPropConflicting', + 0xe9b :'adErrPropInvalidColumn', + 0xe9c :'adErrPropInvalidOption', + 0xe9d :'adErrPropInvalidValue', + 0xe9f :'adErrPropNotAllSettable', + 0xea0 :'adErrPropNotSet', + 0xea1 :'adErrPropNotSettable', + 0xea2 :'adErrPropNotSupported', + 0xbb8 :'adErrProviderFailed', + 0xe7a :'adErrProviderNotFound', + 0xbbb :'adErrReadFile', + 0xe93 :'adErrResourceExists', + 0xe92 :'adErrResourceLocked', + 0xe97 :'adErrResourceOutOfScope', + 0xe8a :'adErrSchemaViolation', + 0xe8b :'adErrSignMismatch', + 0xe81 :'adErrStillConnecting', + 0xe7f :'adErrStillExecuting', + 0xe90 :'adErrTreePermissionDenied', + 0xe8f :'adErrURLDoesNotExist', + 0xe99 :'adErrURLNamedRowDoesNotExist', + 0xe98 :'adErrUnavailable', + 0xe84 :'adErrUnsafeOperation', + 0xe95 :'adErrVolumeNotFound', + 0xbbc :'adErrWriteFile' + } diff --git a/venv/Lib/site-packages/adodbapi/adodbapi.py b/venv/Lib/site-packages/adodbapi/adodbapi.py new file mode 100644 index 00000000..428663fa --- /dev/null +++ b/venv/Lib/site-packages/adodbapi/adodbapi.py @@ -0,0 +1,1008 @@ +"""adodbapi - A python DB API 2.0 (PEP 249) interface to Microsoft ADO + +Copyright (C) 2002 Henrik Ekelund, versions 2.1 and later by Vernon Cole +* http://sourceforge.net/projects/pywin32 +* https://github.com/mhammond/pywin32 +* http://sourceforge.net/projects/adodbapi + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + + django adaptations and refactoring by Adam Vandenberg + +DB-API 2.0 specification: http://www.python.org/dev/peps/pep-0249/ + +This module source should run correctly in CPython versions 2.7 and later, +or IronPython version 2.7 and later, +or, after running through 2to3.py, CPython 3.4 or later. +""" + +__version__ = '2.6.2.0' +version = 'adodbapi v' + __version__ + +import sys +import copy +import decimal +import os +import weakref + +from . import process_connect_string +from . import ado_consts as adc +from . import apibase as api + +try: + verbose = int(os.environ['ADODBAPI_VERBOSE']) +except: + verbose = False +if verbose: + print(version) + +# --- define objects to smooth out IronPython <-> CPython differences +onWin32 = False # assume the worst +if api.onIronPython: + from System import Activator, Type, DBNull, DateTime, Array, Byte + from System import Decimal as SystemDecimal + from clr import Reference + def Dispatch(dispatch): + type = Type.GetTypeFromProgID(dispatch) + return Activator.CreateInstance(type) + def getIndexedValue(obj,index): + return obj.Item[index] +else: # try pywin32 + try: + import win32com.client + import pythoncom + import pywintypes + onWin32 = True + def Dispatch(dispatch): + return win32com.client.Dispatch(dispatch) + except ImportError: + import warnings + warnings.warn("pywin32 package (or IronPython) required for adodbapi.",ImportWarning) + def getIndexedValue(obj,index): + return obj(index) + +from collections.abc import Mapping + +# --- define objects to smooth out Python3000 <-> Python 2.x differences +unicodeType = str +longType = int +StringTypes = str +maxint = sys.maxsize + +# ----------------- The .connect method ----------------- +def make_COM_connecter(): + try: + if onWin32: + pythoncom.CoInitialize() #v2.1 Paj + c = Dispatch('ADODB.Connection') #connect _after_ CoIninialize v2.1.1 adamvan + except: + raise api.InterfaceError ("Windows COM Error: Dispatch('ADODB.Connection') failed.") + return c + +def connect(*args, **kwargs): # --> a db-api connection object + """Connect to a database. + + call using: + :connection_string -- An ADODB formatted connection string, see: + * http://www.connectionstrings.com + * http://www.asp101.com/articles/john/connstring/default.asp + :timeout -- A command timeout value, in seconds (default 30 seconds) + """ + co = Connection() # make an empty connection object + + kwargs = process_connect_string.process(args, kwargs, True) + + try: # connect to the database, using the connection information in kwargs + co.connect(kwargs) + return co + except (Exception) as e: + message = 'Error opening connection to "%s"' % co.connection_string + raise api.OperationalError(e, message) + +# so you could use something like: +# myConnection.paramstyle = 'named' +# The programmer may also change the default. +# For example, if I were using django, I would say: +# import adodbapi as Database +# Database.adodbapi.paramstyle = 'format' + +# ------- other module level defaults -------- +defaultIsolationLevel = adc.adXactReadCommitted +# Set defaultIsolationLevel on module level before creating the connection. +# For example: +# import adodbapi, ado_consts +# adodbapi.adodbapi.defaultIsolationLevel=ado_consts.adXactBrowse" +# +# Set defaultCursorLocation on module level before creating the connection. +# It may be one of the "adUse..." consts. +defaultCursorLocation = adc.adUseClient # changed from adUseServer as of v 2.3.0 + +dateconverter = api.pythonDateTimeConverter() # default + +def format_parameters(ADOparameters, show_value=False): + """Format a collection of ADO Command Parameters. + + Used by error reporting in _execute_command. + """ + try: + if show_value: + desc = [ + "Name: %s, Dir.: %s, Type: %s, Size: %s, Value: \"%s\", Precision: %s, NumericScale: %s" %\ + (p.Name, adc.directions[p.Direction], adc.adTypeNames.get(p.Type, str(p.Type)+' (unknown type)'), p.Size, p.Value, p.Precision, p.NumericScale) + for p in ADOparameters ] + else: + desc = [ + "Name: %s, Dir.: %s, Type: %s, Size: %s, Precision: %s, NumericScale: %s" %\ + (p.Name, adc.directions[p.Direction], adc.adTypeNames.get(p.Type, str(p.Type)+' (unknown type)'), p.Size, p.Precision, p.NumericScale) + for p in ADOparameters ] + return '[' + '\n'.join(desc) + ']' + except: + return '[]' + +def _configure_parameter(p, value, adotype, settings_known): + """Configure the given ADO Parameter 'p' with the Python 'value'.""" + + if adotype in api.adoBinaryTypes: + p.Size = len(value) + p.AppendChunk(value) + + elif isinstance(value,StringTypes): #v2.1 Jevon + L = len(value) + if adotype in api.adoStringTypes: #v2.2.1 Cole + if settings_known: L = min(L,p.Size) #v2.1 Cole limit data to defined size + p.Value = value[:L] #v2.1 Jevon & v2.1 Cole + else: + p.Value = value # dont limit if db column is numeric + if L>0: #v2.1 Cole something does not like p.Size as Zero + p.Size = L #v2.1 Jevon + + elif isinstance(value, decimal.Decimal): + if api.onIronPython: + s = str(value) + p.Value = s + p.Size = len(s) + else: + p.Value = value + exponent = value.as_tuple()[2] + digit_count = len(value.as_tuple()[1]) + p.Precision = digit_count + if exponent == 0: + p.NumericScale = 0 + elif exponent < 0: + p.NumericScale = -exponent + if p.Precision < p.NumericScale: + p.Precision = p.NumericScale + else: # exponent > 0: + p.NumericScale = 0 + p.Precision = digit_count + exponent + + elif type(value) in dateconverter.types: + if settings_known and adotype in api.adoDateTimeTypes: + p.Value = dateconverter.COMDate(value) + else: #probably a string + # provide the date as a string in the format 'YYYY-MM-dd' + s = dateconverter.DateObjectToIsoFormatString(value) + p.Value = s + p.Size = len(s) + + elif api.onIronPython and isinstance(value, longType): # Iron Python Long + s = str(value) # feature workaround for IPy 2.0 + p.Value = s + + elif adotype == adc.adEmpty: # ADO will not let you specify a null column + p.Type = adc.adInteger # so we will fake it to be an integer (just to have something) + p.Value = None # and pass in a Null *value* + + # For any other type, set the value and let pythoncom do the right thing. + else: + p.Value = value + + +# # # # # ----- the Class that defines a connection ----- # # # # # +class Connection(object): + # include connection attributes as class attributes required by api definition. + Warning = api.Warning + Error = api.Error + InterfaceError = api.InterfaceError + DataError = api.DataError + DatabaseError = api.DatabaseError + OperationalError = api.OperationalError + IntegrityError = api.IntegrityError + InternalError = api.InternalError + NotSupportedError = api.NotSupportedError + ProgrammingError = api.ProgrammingError + FetchFailedError = api.FetchFailedError # (special for django) + # ...class attributes... (can be overridden by instance attributes) + verbose = api.verbose + + @property + def dbapi(self): # a proposed db-api version 3 extension. + "Return a reference to the DBAPI module for this Connection." + return api + + def __init__(self): # now define the instance attributes + self.connector = None + self.paramstyle = api.paramstyle + self.supportsTransactions = False + self.connection_string = '' + self.cursors = weakref.WeakValueDictionary() + self.dbms_name = '' + self.dbms_version = '' + self.errorhandler = None # use the standard error handler for this instance + self.transaction_level = 0 # 0 == Not in a transaction, at the top level + self._autocommit = False + + def connect(self, kwargs, connection_maker=make_COM_connecter): + if verbose > 9: + print('kwargs=', repr(kwargs)) + try: + self.connection_string = kwargs['connection_string'] % kwargs # insert keyword arguments + except (Exception) as e: + self._raiseConnectionError(KeyError,'Python string format error in connection string->') + self.timeout = kwargs.get('timeout', 30) + self.mode = kwargs.get("mode", adc.adModeUnknown) + self.kwargs = kwargs + if verbose: + print('%s attempting: "%s"' % (version, self.connection_string)) + self.connector = connection_maker() + self.connector.ConnectionTimeout = self.timeout + self.connector.ConnectionString = self.connection_string + self.connector.Mode = self.mode + + try: + self.connector.Open() # Open the ADO connection + except api.Error: + self._raiseConnectionError(api.DatabaseError, 'ADO error trying to Open=%s' % self.connection_string) + + try: # Stefan Fuchs; support WINCCOLEDBProvider + if getIndexedValue(self.connector.Properties,'Transaction DDL').Value != 0: + self.supportsTransactions=True + except pywintypes.com_error: + pass # Stefan Fuchs + self.dbms_name = getIndexedValue(self.connector.Properties,'DBMS Name').Value + try: # Stefan Fuchs + self.dbms_version = getIndexedValue(self.connector.Properties,'DBMS Version').Value + except pywintypes.com_error: + pass # Stefan Fuchs + self.connector.CursorLocation = defaultCursorLocation #v2.1 Rose + if self.supportsTransactions: + self.connector.IsolationLevel=defaultIsolationLevel + self._autocommit = bool(kwargs.get('autocommit', False)) + if not self._autocommit: + self.transaction_level = self.connector.BeginTrans() #Disables autocommit & inits transaction_level + else: + self._autocommit = True + if 'paramstyle' in kwargs: + self.paramstyle = kwargs['paramstyle'] # let setattr do the error checking + self.messages=[] + if verbose: + print('adodbapi New connection at %X' % id(self)) + + def _raiseConnectionError(self, errorclass, errorvalue): + eh = self.errorhandler + if eh is None: + eh = api.standardErrorHandler + eh(self, None, errorclass, errorvalue) + + def _closeAdoConnection(self): #all v2.1 Rose + """close the underlying ADO Connection object, + rolling it back first if it supports transactions.""" + if self.connector is None: + return + if not self._autocommit: + if self.transaction_level: + try: self.connector.RollbackTrans() + except: pass + self.connector.Close() + if verbose: + print('adodbapi Closed connection at %X' % id(self)) + + def close(self): + """Close the connection now (rather than whenever __del__ is called). + + The connection will be unusable from this point forward; + an Error (or subclass) exception will be raised if any operation is attempted with the connection. + The same applies to all cursor objects trying to use the connection. + """ + for crsr in list(self.cursors.values())[:]: # copy the list, then close each one + crsr.close(dont_tell_me=True) # close without back-link clearing + self.messages = [] + try: + self._closeAdoConnection() #v2.1 Rose + except (Exception) as e: + self._raiseConnectionError(sys.exc_info()[0], sys.exc_info()[1]) + + self.connector = None #v2.4.2.2 fix subtle timeout bug + # per M.Hammond: "I expect the benefits of uninitializing are probably fairly small, + # so never uninitializing will probably not cause any problems." + + def commit(self): + """Commit any pending transaction to the database. + + Note that if the database supports an auto-commit feature, + this must be initially off. An interface method may be provided to turn it back on. + Database modules that do not support transactions should implement this method with void functionality. + """ + self.messages = [] + if not self.supportsTransactions: + return + + try: + self.transaction_level = self.connector.CommitTrans() + if verbose > 1: + print('commit done on connection at %X' % id(self)) + if not (self._autocommit or (self.connector.Attributes & adc.adXactAbortRetaining)): + #If attributes has adXactCommitRetaining it performs retaining commits that is, + #calling CommitTrans automatically starts a new transaction. Not all providers support this. + #If not, we will have to start a new transaction by this command: + self.transaction_level = self.connector.BeginTrans() + except Exception as e: + self._raiseConnectionError(api.ProgrammingError, e) + + def _rollback(self): + """In case a database does provide transactions this method causes the the database to roll back to + the start of any pending transaction. Closing a connection without committing the changes first will + cause an implicit rollback to be performed. + + If the database does not support the functionality required by the method, the interface should + throw an exception in case the method is used. + The preferred approach is to not implement the method and thus have Python generate + an AttributeError in case the method is requested. This allows the programmer to check for database + capabilities using the standard hasattr() function. + + For some dynamically configured interfaces it may not be appropriate to require dynamically making + the method available. These interfaces should then raise a NotSupportedError to indicate the + non-ability to perform the roll back when the method is invoked. + """ + self.messages=[] + if self.transaction_level: # trying to roll back with no open transaction causes an error + try: + self.transaction_level = self.connector.RollbackTrans() + if verbose > 1: + print('rollback done on connection at %X' % id(self)) + if not self._autocommit and not(self.connector.Attributes & adc.adXactAbortRetaining): + #If attributes has adXactAbortRetaining it performs retaining aborts that is, + #calling RollbackTrans automatically starts a new transaction. Not all providers support this. + #If not, we will have to start a new transaction by this command: + if not self.transaction_level: # if self.transaction_level == 0 or self.transaction_level is None: + self.transaction_level = self.connector.BeginTrans() + except Exception as e: + self._raiseConnectionError(api.ProgrammingError, e) + + def __setattr__(self, name, value): + if name == 'autocommit': # extension: allow user to turn autocommit on or off + if self.supportsTransactions: + object.__setattr__(self, '_autocommit', bool(value)) + try: self._rollback() # must clear any outstanding transactions + except: pass + return + elif name == 'paramstyle': + if value not in api.accepted_paramstyles: + self._raiseConnectionError(api.NotSupportedError, + 'paramstyle="%s" not in:%s' % (value, repr(api.accepted_paramstyles))) + elif name == 'variantConversions': + value = copy.copy(value) # make a new copy -- no changes in the default, please + object.__setattr__(self, name, value) + + def __getattr__(self, item): + if item == 'rollback': # the rollback method only appears if the database supports transactions + if self.supportsTransactions: + return self._rollback # return the rollback method so the caller can execute it. + else: + raise AttributeError ('this data provider does not support Rollback') + elif item == 'autocommit': + return self._autocommit + else: + raise AttributeError('no such attribute in ADO connection object as="%s"' % item) + + def cursor(self): + "Return a new Cursor Object using the connection." + self.messages = [] + c = Cursor(self) + return c + + def _i_am_here(self, crsr): + "message from a new cursor proclaiming its existence" + oid = id(crsr) + self.cursors[oid] = crsr + + def _i_am_closing(self,crsr): + "message from a cursor giving connection a chance to clean up" + try: + del self.cursors[id(crsr)] + except: + pass + + def printADOerrors(self): + j=self.connector.Errors.Count + if j: + print('ADO Errors:(%i)' % j) + for e in self.connector.Errors: + print('Description: %s' % e.Description) + print('Error: %s %s ' % (e.Number, adc.adoErrors.get(e.Number, "unknown"))) + if e.Number == adc.ado_error_TIMEOUT: + print('Timeout Error: Try using adodbpi.connect(constr,timeout=Nseconds)') + print('Source: %s' % e.Source) + print('NativeError: %s' % e.NativeError) + print('SQL State: %s' % e.SQLState) + + def _suggest_error_class(self): + """Introspect the current ADO Errors and determine an appropriate error class. + + Error.SQLState is a SQL-defined error condition, per the SQL specification: + http://www.contrib.andrew.cmu.edu/~shadow/sql/sql1992.txt + + The 23000 class of errors are integrity errors. + Error 40002 is a transactional integrity error. + """ + if self.connector is not None: + for e in self.connector.Errors: + state = str(e.SQLState) + if state.startswith('23') or state=='40002': + return api.IntegrityError + return api.DatabaseError + + def __del__(self): + try: + self._closeAdoConnection() #v2.1 Rose + except: + pass + self.connector = None + + def __enter__(self): # Connections are context managers + return(self) + + def __exit__(self, exc_type, exc_val, exc_tb): + if exc_type: + self._rollback() #automatic rollback on errors + else: + self.commit() + + def get_table_names(self): + schema = self.connector.OpenSchema(20) # constant = adSchemaTables + + tables = [] + while not schema.EOF: + name = getIndexedValue(schema.Fields,'TABLE_NAME').Value + tables.append(name) + schema.MoveNext() + del schema + return tables + +# # # # # ----- the Class that defines a cursor ----- # # # # # +class Cursor(object): +## ** api required attributes: +## description... +## This read-only attribute is a sequence of 7-item sequences. +## Each of these sequences contains information describing one result column: +## (name, type_code, display_size, internal_size, precision, scale, null_ok). +## This attribute will be None for operations that do not return rows or if the +## cursor has not had an operation invoked via the executeXXX() method yet. +## The type_code can be interpreted by comparing it to the Type Objects specified in the section below. +## rowcount... +## This read-only attribute specifies the number of rows that the last executeXXX() produced +## (for DQL statements like select) or affected (for DML statements like update or insert). +## The attribute is -1 in case no executeXXX() has been performed on the cursor or +## the rowcount of the last operation is not determinable by the interface.[7] +## arraysize... +## This read/write attribute specifies the number of rows to fetch at a time with fetchmany(). +## It defaults to 1 meaning to fetch a single row at a time. +## Implementations must observe this value with respect to the fetchmany() method, +## but are free to interact with the database a single row at a time. +## It may also be used in the implementation of executemany(). +## ** extension attributes: +## paramstyle... +## allows the programmer to override the connection's default paramstyle +## errorhandler... +## allows the programmer to override the connection's default error handler + + def __init__(self,connection): + self.command = None + self._ado_prepared = False + self.messages=[] + self.connection = connection + self.paramstyle = connection.paramstyle # used for overriding the paramstyle + self._parameter_names = [] + self.recordset_is_remote = False + self.rs = None # the ADO recordset for this cursor + self.converters = [] # conversion function for each column + self.columnNames = {} # names of columns {lowercase name : number,...} + self.numberOfColumns = 0 + self._description = None + self.rowcount = -1 + self.errorhandler = connection.errorhandler + self.arraysize = 1 + connection._i_am_here(self) + if verbose: + print('%s New cursor at %X on conn %X' % (version, id(self), id(self.connection))) + + def __iter__(self): # [2.1 Zamarev] + return iter(self.fetchone, None) # [2.1 Zamarev] + + def prepare(self, operation): + self.command = operation + self._description = None + self._ado_prepared = 'setup' + + def __next__(self): + r = self.fetchone() + if r: + return r + raise StopIteration + + def __enter__(self): + "Allow database cursors to be used with context managers." + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + "Allow database cursors to be used with context managers." + self.close() + + def _raiseCursorError(self, errorclass, errorvalue): + eh = self.errorhandler + if eh is None: + eh = api.standardErrorHandler + eh(self.connection, self, errorclass, errorvalue) + + def build_column_info(self, recordset): + self.converters = [] # convertion function for each column + self.columnNames = {} # names of columns {lowercase name : number,...} + self._description = None + + # if EOF and BOF are true at the same time, there are no records in the recordset + if (recordset is None) or (recordset.State == adc.adStateClosed): + self.rs = None + self.numberOfColumns = 0 + return + self.rs = recordset #v2.1.1 bkline + self.recordset_format = api.RS_ARRAY if api.onIronPython else api.RS_WIN_32 + self.numberOfColumns = recordset.Fields.Count + try: + varCon = self.connection.variantConversions + except AttributeError: + varCon = api.variantConversions + for i in range(self.numberOfColumns): + f = getIndexedValue(self.rs.Fields, i) + try: + self.converters.append(varCon[f.Type]) # conversion function for this column + except KeyError: + self._raiseCursorError(api.InternalError, 'Data column of Unknown ADO type=%s' % f.Type) + self.columnNames[f.Name.lower()] = i # columnNames lookup + + def _makeDescriptionFromRS(self): + # Abort if closed or no recordset. + if self.rs is None: + self._description = None + return + desc = [] + for i in range(self.numberOfColumns): + f = getIndexedValue(self.rs.Fields, i) + if self.rs.EOF or self.rs.BOF: + display_size=None + else: + display_size=f.ActualSize #TODO: Is this the correct defintion according to the DB API 2 Spec ? + null_ok= bool(f.Attributes & adc.adFldMayBeNull) #v2.1 Cole + desc.append((f.Name, f.Type, display_size, f.DefinedSize, f.Precision, f.NumericScale, null_ok)) + self._description = desc + + def get_description(self): + if not self._description: + self._makeDescriptionFromRS() + return self._description + + def __getattr__(self, item): + if item == 'description': + return self.get_description() + object.__getattribute__(self, item) # may get here on Remote attribute calls for existing attributes + + def format_description(self,d): + """Format db_api description tuple for printing.""" + if self.description is None: + self._makeDescriptionFromRS() + if isinstance(d,int): + d = self.description[d] + desc = "Name= %s, Type= %s, DispSize= %s, IntSize= %s, Precision= %s, Scale= %s NullOK=%s" % \ + (d[0], adc.adTypeNames.get(d[1], str(d[1])+' (unknown type)'), + d[2], d[3], d[4], d[5], d[6]) + return desc + + def close(self, dont_tell_me=False): + """Close the cursor now (rather than whenever __del__ is called). + The cursor will be unusable from this point forward; an Error (or subclass) + exception will be raised if any operation is attempted with the cursor. + """ + if self.connection is None: + return + self.messages = [] + if self.rs and self.rs.State != adc.adStateClosed: # rs exists and is open #v2.1 Rose + self.rs.Close() #v2.1 Rose + self.rs = None # let go of the recordset so ADO will let it be disposed #v2.1 Rose + if not dont_tell_me: + self.connection._i_am_closing(self) # take me off the connection's cursors list + self.connection = None #this will make all future method calls on me throw an exception + if verbose: + print('adodbapi Closed cursor at %X' % id(self)) + + def __del__(self): + try: + self.close() + except: + pass + + def _new_command(self, command_type=adc.adCmdText): + self.cmd = None + self.messages = [] + + if self.connection is None: + self._raiseCursorError(api.InterfaceError, None) + return + try: + self.cmd = Dispatch("ADODB.Command") + self.cmd.ActiveConnection = self.connection.connector + self.cmd.CommandTimeout = self.connection.timeout + self.cmd.CommandType = command_type + self.cmd.CommandText = self.commandText + self.cmd.Prepared = bool(self._ado_prepared) + except: + self._raiseCursorError(api.DatabaseError, + 'Error creating new ADODB.Command object for "%s"' % repr(self.commandText)) + + def _execute_command(self): + # Stored procedures may have an integer return value + self.return_value = None + recordset = None + count = -1 #default value + if verbose: + print('Executing command="%s"'%self.commandText) + try: + # ----- the actual SQL is executed here --- + if api.onIronPython: + ra = Reference[int]() + recordset = self.cmd.Execute(ra) + count = ra.Value + else: #pywin32 + recordset, count = self.cmd.Execute() + # ----- ------------------------------- --- + except (Exception) as e: + _message = "" + if hasattr(e, 'args'): _message += str(e.args)+"\n" + _message += "Command:\n%s\nParameters:\n%s" % (self.commandText, + format_parameters(self.cmd.Parameters, True)) + klass = self.connection._suggest_error_class() + self._raiseCursorError(klass, _message) + try: + self.rowcount = recordset.RecordCount + except: + self.rowcount = count + self.build_column_info(recordset) + + # The ADO documentation hints that obtaining the recordcount may be timeconsuming + # "If the Recordset object does not support approximate positioning, this property + # may be a significant drain on resources # [ekelund] + # Therefore, COM will not return rowcount for server-side cursors. [Cole] + # Client-side cursors (the default since v2.8) will force a static + # cursor, and rowcount will then be set accurately [Cole] + def get_rowcount(self): + return self.rowcount + + def get_returned_parameters(self): + """with some providers, returned parameters and the .return_value are not available until + after the last recordset has been read. In that case, you must coll nextset() until it + returns None, then call this method to get your returned information.""" + + retLst=[] # store procedures may return altered parameters, including an added "return value" item + for p in tuple(self.cmd.Parameters): + if verbose > 2: + print('Returned=Name: %s, Dir.: %s, Type: %s, Size: %s, Value: "%s",' \ + " Precision: %s, NumericScale: %s" % \ + (p.Name, adc.directions[p.Direction], + adc.adTypeNames.get(p.Type, str(p.Type)+' (unknown type)'), + p.Size, p.Value, p.Precision, p.NumericScale)) + pyObject = api.convert_to_python(p.Value, api.variantConversions[p.Type]) + if p.Direction == adc.adParamReturnValue: + self.returnValue = pyObject # also load the undocumented attribute (Vernon's Error!) + self.return_value = pyObject + else: + retLst.append(pyObject) + return retLst # return the parameter list to the caller + + def callproc(self, procname, parameters=None): + """Call a stored database procedure with the given name. + The sequence of parameters must contain one entry for each + argument that the sproc expects. The result of the + call is returned as modified copy of the input + sequence. Input parameters are left untouched, output and + input/output parameters replaced with possibly new values. + + The sproc may also provide a result set as output, + which is available through the standard .fetch*() methods. + Extension: A "return_value" property may be set on the + cursor if the sproc defines an integer return value. + """ + self._parameter_names = [] + self.commandText = procname + self._new_command(command_type=adc.adCmdStoredProc) + self._buildADOparameterList(parameters, sproc=True) + if verbose > 2: + print('Calling Stored Proc with Params=', format_parameters(self.cmd.Parameters, True)) + self._execute_command() + return self.get_returned_parameters() + + def _reformat_operation(self, operation, parameters): + if self.paramstyle in ('format', 'pyformat'): # convert %s to ? + operation, self._parameter_names = api.changeFormatToQmark(operation) + elif self.paramstyle == 'named' or (self.paramstyle == 'dynamic' and isinstance(parameters, Mapping)): + operation, self._parameter_names = api.changeNamedToQmark(operation) # convert :name to ? + return operation + + def _buildADOparameterList(self, parameters, sproc=False): + self.parameters = parameters + if parameters is None: + parameters = [] + + # Note: ADO does not preserve the parameter list, even if "Prepared" is True, so we must build every time. + parameters_known = False + if sproc: # needed only if we are calling a stored procedure + try: # attempt to use ADO's parameter list + self.cmd.Parameters.Refresh() + if verbose > 2: + print('ADO detected Params=', format_parameters(self.cmd.Parameters, True)) + print('Program Parameters=', repr(parameters)) + parameters_known = True + except api.Error: + if verbose: + print('ADO Parameter Refresh failed') + pass + else: + if len(parameters) != self.cmd.Parameters.Count - 1: + raise api.ProgrammingError('You must supply %d parameters for this stored procedure' % \ + (self.cmd.Parameters.Count - 1)) + if sproc or parameters != []: + i = 0 + if parameters_known: # use ado parameter list + if self._parameter_names: # named parameters + for i, pm_name in enumerate(self._parameter_names): + p = getIndexedValue(self.cmd.Parameters, i) + try: + _configure_parameter(p, parameters[pm_name], p.Type, parameters_known) + except (Exception) as e: + _message = 'Error Converting Parameter %s: %s, %s <- %s\n' % \ + (p.Name, adc.ado_type_name(p.Type), p.Value, repr(parameters[pm_name])) + self._raiseCursorError(api.DataError, _message+'->'+repr(e.args)) + else: # regular sequence of parameters + for value in parameters: + p = getIndexedValue(self.cmd.Parameters,i) + if p.Direction == adc.adParamReturnValue: # this is an extra parameter added by ADO + i += 1 # skip the extra + p=getIndexedValue(self.cmd.Parameters,i) + try: + _configure_parameter(p, value, p.Type, parameters_known) + except Exception as e: + _message = 'Error Converting Parameter %s: %s, %s <- %s\n' % \ + (p.Name, adc.ado_type_name(p.Type), p.Value, repr(value)) + self._raiseCursorError(api.DataError, _message+'->'+repr(e.args)) + i += 1 + else: #-- build own parameter list + if self._parameter_names: # we expect a dictionary of parameters, this is the list of expected names + for parm_name in self._parameter_names: + elem = parameters[parm_name] + adotype = api.pyTypeToADOType(elem) + p = self.cmd.CreateParameter(parm_name, adotype, adc.adParamInput) + _configure_parameter(p, elem, adotype, parameters_known) + try: + self.cmd.Parameters.Append(p) + except Exception as e: + _message = 'Error Building Parameter %s: %s, %s <- %s\n' % \ + (p.Name, adc.ado_type_name(p.Type), p.Value, repr(elem)) + self._raiseCursorError(api.DataError, _message+'->'+repr(e.args)) + else : # expecting the usual sequence of parameters + if sproc: + p = self.cmd.CreateParameter('@RETURN_VALUE', adc.adInteger, adc.adParamReturnValue) + self.cmd.Parameters.Append(p) + + for elem in parameters: + name='p%i' % i + adotype = api.pyTypeToADOType(elem) + p=self.cmd.CreateParameter(name, adotype, adc.adParamInput) # Name, Type, Direction, Size, Value + _configure_parameter(p, elem, adotype, parameters_known) + try: + self.cmd.Parameters.Append(p) + except Exception as e: + _message = 'Error Building Parameter %s: %s, %s <- %s\n' % \ + (p.Name, adc.ado_type_name(p.Type), p.Value, repr(elem)) + self._raiseCursorError(api.DataError, _message+'->'+repr(e.args)) + i += 1 + if self._ado_prepared == 'setup': + self._ado_prepared = True # parameters will be "known" by ADO next loop + + def execute(self, operation, parameters=None): + """Prepare and execute a database operation (query or command). + + Parameters may be provided as sequence or mapping and will be bound to variables in the operation. + Variables are specified in a database-specific notation + (see the module's paramstyle attribute for details). [5] + A reference to the operation will be retained by the cursor. + If the same operation object is passed in again, then the cursor + can optimize its behavior. This is most effective for algorithms + where the same operation is used, but different parameters are bound to it (many times). + + For maximum efficiency when reusing an operation, it is best to use + the setinputsizes() method to specify the parameter types and sizes ahead of time. + It is legal for a parameter to not match the predefined information; + the implementation should compensate, possibly with a loss of efficiency. + + The parameters may also be specified as list of tuples to e.g. insert multiple rows in + a single operation, but this kind of usage is depreciated: executemany() should be used instead. + + Return value is not defined. + + [5] The module will use the __getitem__ method of the parameters object to map either positions + (integers) or names (strings) to parameter values. This allows for both sequences and mappings + to be used as input. + The term "bound" refers to the process of binding an input value to a database execution buffer. + In practical terms, this means that the input value is directly used as a value in the operation. + The client should not be required to "escape" the value so that it can be used -- the value + should be equal to the actual database value. """ + if self.command is not operation or self._ado_prepared == 'setup' or not hasattr(self, 'commandText'): + if self.command is not operation: + self._ado_prepared = False + self.command = operation + self._parameter_names = [] + self.commandText = operation if (self.paramstyle == 'qmark' or not parameters) \ + else self._reformat_operation(operation, parameters) + self._new_command() + self._buildADOparameterList(parameters) + if verbose > 3: + print('Params=', format_parameters(self.cmd.Parameters, True)) + self._execute_command() + + def executemany(self, operation, seq_of_parameters): + """Prepare a database operation (query or command) + and then execute it against all parameter sequences or mappings found in the sequence seq_of_parameters. + + Return values are not defined. + """ + self.messages = list() + total_recordcount = 0 + + self.prepare(operation) + for params in seq_of_parameters: + self.execute(self.command, params) + if self.rowcount == -1: + total_recordcount = -1 + if total_recordcount != -1: + total_recordcount += self.rowcount + self.rowcount = total_recordcount + + def _fetch(self, limit=None): + """Fetch rows from the current recordset. + + limit -- Number of rows to fetch, or None (default) to fetch all rows. + """ + if self.connection is None or self.rs is None: + self._raiseCursorError(api.FetchFailedError, 'fetch() on closed connection or empty query set') + return + + if self.rs.State == adc.adStateClosed or self.rs.BOF or self.rs.EOF: + return list() + if limit: # limit number of rows retrieved + ado_results = self.rs.GetRows(limit) + else: # get all rows + ado_results = self.rs.GetRows() + if self.recordset_format == api.RS_ARRAY: # result of GetRows is a two-dimension array + length = len(ado_results) // self.numberOfColumns # length of first dimension + else: #pywin32 + length = len(ado_results[0]) #result of GetRows is tuples in a tuple + fetchObject = api.SQLrows(ado_results, length, self) # new object to hold the results of the fetch + return fetchObject + + def fetchone(self): + """ Fetch the next row of a query result set, returning a single sequence, + or None when no more data is available. + + An Error (or subclass) exception is raised if the previous call to executeXXX() + did not produce any result set or no call was issued yet. + """ + self.messages = [] + result = self._fetch(1) + if result: # return record (not list of records) + return result[0] + return None + + def fetchmany(self, size=None): + """Fetch the next set of rows of a query result, returning a list of tuples. An empty sequence is returned when no more rows are available. + + The number of rows to fetch per call is specified by the parameter. + If it is not given, the cursor's arraysize determines the number of rows to be fetched. + The method should try to fetch as many rows as indicated by the size parameter. + If this is not possible due to the specified number of rows not being available, + fewer rows may be returned. + + An Error (or subclass) exception is raised if the previous call to executeXXX() + did not produce any result set or no call was issued yet. + + Note there are performance considerations involved with the size parameter. + For optimal performance, it is usually best to use the arraysize attribute. + If the size parameter is used, then it is best for it to retain the same value from + one fetchmany() call to the next. + """ + self.messages=[] + if size is None: + size = self.arraysize + return self._fetch(size) + + def fetchall(self): + """Fetch all (remaining) rows of a query result, returning them as a sequence of sequences (e.g. a list of tuples). + + Note that the cursor's arraysize attribute + can affect the performance of this operation. + An Error (or subclass) exception is raised if the previous call to executeXXX() + did not produce any result set or no call was issued yet. + """ + self.messages=[] + return self._fetch() + + def nextset(self): + """Skip to the next available recordset, discarding any remaining rows from the current recordset. + + If there are no more sets, the method returns None. Otherwise, it returns a true + value and subsequent calls to the fetch methods will return rows from the next result set. + + An Error (or subclass) exception is raised if the previous call to executeXXX() + did not produce any result set or no call was issued yet. + """ + self.messages=[] + if self.connection is None or self.rs is None: + self._raiseCursorError(api.OperationalError, ('nextset() on closed connection or empty query set')) + return None + + if api.onIronPython: + try: + recordset = self.rs.NextRecordset() + except TypeError: + recordset = None + except api.Error as exc: + self._raiseCursorError(api.NotSupportedError, exc.args) + else: #pywin32 + try: #[begin 2.1 ekelund] + rsTuple=self.rs.NextRecordset() # + except pywintypes.com_error as exc: # return appropriate error + self._raiseCursorError(api.NotSupportedError, exc.args)#[end 2.1 ekelund] + recordset = rsTuple[0] + if recordset is None: + return None + self.build_column_info(recordset) + return True + + def setinputsizes(self,sizes): + pass + + def setoutputsize(self, size, column=None): + pass + + def _last_query(self): # let the programmer see what query we actually used + try: + if self.parameters == None: + ret = self.commandText + else: + ret = "%s,parameters=%s" % (self.commandText,repr(self.parameters)) + except: + ret = None + return ret + query = property(_last_query, None, None, + "returns the last query executed") + +if __name__ == '__main__': + raise api.ProgrammingError(version + ' cannot be run as a main program.') diff --git a/venv/Lib/site-packages/adodbapi/apibase.py b/venv/Lib/site-packages/adodbapi/apibase.py new file mode 100644 index 00000000..daf8e27b --- /dev/null +++ b/venv/Lib/site-packages/adodbapi/apibase.py @@ -0,0 +1,640 @@ +"""adodbapi.apibase - A python DB API 2.0 (PEP 249) interface to Microsoft ADO + +Copyright (C) 2002 Henrik Ekelund, version 2.1 by Vernon Cole +* http://sourceforge.net/projects/pywin32 +* http://sourceforge.net/projects/adodbapi +""" + +import sys +import time +import datetime +import decimal +import numbers +# noinspection PyUnresolvedReferences +from . import ado_consts as adc + +verbose = False # debugging flag + +onIronPython = sys.platform == 'cli' +if onIronPython: # we need type definitions for odd data we may need to convert + # noinspection PyUnresolvedReferences + from System import DBNull, DateTime + NullTypes = (type(None), DBNull) +else: + DateTime = type(NotImplemented) # should never be seen on win32 + NullTypes = type(None) + +# --- define objects to smooth out Python3 <-> Python 2.x differences +unicodeType = str +longType = int +StringTypes = str +makeByteBuffer = bytes +memoryViewType = memoryview +_BaseException = Exception + +try: #jdhardy -- handle bytes under IronPython & Py3 + bytes +except NameError: + bytes = str # define it for old Pythons + +# ------- Error handlers ------ +def standardErrorHandler(connection, cursor, errorclass, errorvalue): + err = (errorclass, errorvalue) + try: + connection.messages.append(err) + except: pass + if cursor is not None: + try: + cursor.messages.append(err) + except: pass + raise errorclass(errorvalue) + +# Note: _BaseException is defined differently between Python 2.x and 3.x +class Error(_BaseException): + pass #Exception that is the base class of all other error + #exceptions. You can use this to catch all errors with one + #single 'except' statement. Warnings are not considered + #errors and thus should not use this class as base. It must + #be a subclass of the Python StandardError (defined in the + #module exceptions). +class Warning(_BaseException): + pass +class InterfaceError(Error): + pass +class DatabaseError(Error): + pass +class InternalError(DatabaseError): + pass +class OperationalError(DatabaseError): + pass +class ProgrammingError(DatabaseError): + pass +class IntegrityError(DatabaseError): + pass +class DataError(DatabaseError): + pass +class NotSupportedError(DatabaseError): + pass + +class FetchFailedError(OperationalError): + """ + Error is used by RawStoredProcedureQuerySet to determine when a fetch + failed due to a connection being closed or there is no record set + returned. (Non-standard, added especially for django) + """ + pass +# # # # # ----- Type Objects and Constructors ----- # # # # # +#Many databases need to have the input in a particular format for binding to an operation's input parameters. +#For example, if an input is destined for a DATE column, then it must be bound to the database in a particular +#string format. Similar problems exist for "Row ID" columns or large binary items (e.g. blobs or RAW columns). +#This presents problems for Python since the parameters to the executeXXX() method are untyped. +#When the database module sees a Python string object, it doesn't know if it should be bound as a simple CHAR +#column, as a raw BINARY item, or as a DATE. +# +#To overcome this problem, a module must provide the constructors defined below to create objects that can +#hold special values. When passed to the cursor methods, the module can then detect the proper type of +#the input parameter and bind it accordingly. + +#A Cursor Object's description attribute returns information about each of the result columns of a query. +#The type_code must compare equal to one of Type Objects defined below. Type Objects may be equal to more than +#one type code (e.g. DATETIME could be equal to the type codes for date, time and timestamp columns; +#see the Implementation Hints below for details). + +#SQL NULL values are represented by the Python None singleton on input and output. + +#Note: Usage of Unix ticks for database interfacing can cause troubles because of the limited date range they cover. + +# def Date(year,month,day): +# "This function constructs an object holding a date value. " +# return dateconverter.date(year,month,day) #dateconverter.Date(year,month,day) +# +# def Time(hour,minute,second): +# "This function constructs an object holding a time value. " +# return dateconverter.time(hour, minute, second) # dateconverter.Time(hour,minute,second) +# +# def Timestamp(year,month,day,hour,minute,second): +# "This function constructs an object holding a time stamp value. " +# return dateconverter.datetime(year,month,day,hour,minute,second) +# +# def DateFromTicks(ticks): +# """This function constructs an object holding a date value from the given ticks value +# (number of seconds since the epoch; see the documentation of the standard Python time module for details). """ +# return Date(*time.gmtime(ticks)[:3]) +# +# def TimeFromTicks(ticks): +# """This function constructs an object holding a time value from the given ticks value +# (number of seconds since the epoch; see the documentation of the standard Python time module for details). """ +# return Time(*time.gmtime(ticks)[3:6]) +# +# def TimestampFromTicks(ticks): +# """This function constructs an object holding a time stamp value from the given +# ticks value (number of seconds since the epoch; +# see the documentation of the standard Python time module for details). """ +# return Timestamp(*time.gmtime(ticks)[:6]) +# +# def Binary(aString): +# """This function constructs an object capable of holding a binary (long) string value. """ +# b = makeByteBuffer(aString) +# return b +# ----- Time converters ---------------------------------------------- +class TimeConverter(object): # this is a generic time converter skeleton + def __init__(self): # the details will be filled in by instances + self._ordinal_1899_12_31=datetime.date(1899,12,31).toordinal()-1 + # Use cls.types to compare if an input parameter is a datetime + self.types = {type(self.Date(2000,1,1)), + type(self.Time(12,1,1)), + type(self.Timestamp(2000,1,1,12,1,1)), + datetime.datetime, + datetime.time, + datetime.date} + def COMDate(self,obj): + '''Returns a ComDate from a date-time''' + try: # most likely a datetime + tt=obj.timetuple() + + try: + ms=obj.microsecond + except: + ms=0 + return self.ComDateFromTuple(tt, ms) + except: # might be a tuple + try: + return self.ComDateFromTuple(obj) + except: # try an mxdate + try: + return obj.COMDate() + except: + raise ValueError('Cannot convert "%s" to COMdate.' % repr(obj)) + def ComDateFromTuple(self, t, microseconds=0): + d = datetime.date(t[0],t[1],t[2]) + integerPart = d.toordinal() - self._ordinal_1899_12_31 + ms = (t[3]*3600 + t[4]*60 + t[5]) * 1000000 + microseconds + fractPart = float(ms) / 86400000000.0 + return integerPart + fractPart + def DateObjectFromCOMDate(self,comDate): + 'Returns an object of the wanted type from a ComDate' + raise NotImplementedError #"Abstract class" + def Date(self,year,month,day): + "This function constructs an object holding a date value. " + raise NotImplementedError #"Abstract class" + def Time(self,hour,minute,second): + "This function constructs an object holding a time value. " + raise NotImplementedError #"Abstract class" + def Timestamp(self,year,month,day,hour,minute,second): + "This function constructs an object holding a time stamp value. " + raise NotImplementedError #"Abstract class" + # all purpose date to ISO format converter + def DateObjectToIsoFormatString(self, obj): + "This function should return a string in the format 'YYYY-MM-dd HH:MM:SS:ms' (ms optional) " + try: # most likely, a datetime.datetime + s = obj.isoformat(' ') + except (TypeError, AttributeError): + if isinstance(obj, datetime.date): + s = obj.isoformat() + ' 00:00:00' # return exact midnight + else: + try: # maybe it has a strftime method, like mx + s = obj.strftime('%Y-%m-%d %H:%M:%S') + except AttributeError: + try: #but may be time.struct_time + s = time.strftime('%Y-%m-%d %H:%M:%S', obj) + except: + raise ValueError('Cannot convert "%s" to isoformat' % repr(obj)) + return s + +# -- Optional: if mx extensions are installed you may use mxDateTime ---- +try: + import mx.DateTime + mxDateTime = True +except: + mxDateTime = False +if mxDateTime: + class mxDateTimeConverter(TimeConverter): # used optionally if installed + def __init__(self): + TimeConverter.__init__(self) + self.types.add(type(mx.DateTime)) + def DateObjectFromCOMDate(self,comDate): + return mx.DateTime.DateTimeFromCOMDate(comDate) + def Date(self,year,month,day): + return mx.DateTime.Date(year,month,day) + def Time(self,hour,minute,second): + return mx.DateTime.Time(hour,minute,second) + def Timestamp(self,year,month,day,hour,minute,second): + return mx.DateTime.Timestamp(year,month,day,hour,minute,second) +else: + class mxDateTimeConverter(TimeConverter): + pass # if no mx is installed + +class pythonDateTimeConverter(TimeConverter): # standard since Python 2.3 + def __init__(self): + TimeConverter.__init__(self) + def DateObjectFromCOMDate(self, comDate): + if isinstance(comDate, datetime.datetime): + odn = comDate.toordinal() + tim = comDate.time() + new = datetime.datetime.combine(datetime.datetime.fromordinal(odn), tim) + return new + # return comDate.replace(tzinfo=None) # make non aware + elif isinstance(comDate, DateTime): + fComDate = comDate.ToOADate() # ironPython clr Date/Time + else: + fComDate=float(comDate) #ComDate is number of days since 1899-12-31 + integerPart = int(fComDate) + floatpart=fComDate-integerPart + ##if floatpart == 0.0: + ## return datetime.date.fromordinal(integerPart + self._ordinal_1899_12_31) + dte=datetime.datetime.fromordinal(integerPart + self._ordinal_1899_12_31) \ + + datetime.timedelta(milliseconds=floatpart*86400000) + # millisecondsperday=86400000 # 24*60*60*1000 + return dte + def Date(self,year,month,day): + return datetime.date(year,month,day) + def Time(self,hour,minute,second): + return datetime.time(hour,minute,second) + def Timestamp(self,year,month,day,hour,minute,second): + return datetime.datetime(year,month,day,hour,minute,second) + +class pythonTimeConverter(TimeConverter): # the old, ?nix type date and time + def __init__(self): #caution: this Class gets confised by timezones and DST + TimeConverter.__init__(self) + self.types.add(time.struct_time) + def DateObjectFromCOMDate(self,comDate): + 'Returns ticks since 1970' + if isinstance(comDate,datetime.datetime): + return comDate.timetuple() + elif isinstance(comDate, DateTime): # ironPython clr date/time + fcomDate = comDate.ToOADate() + else: + fcomDate = float(comDate) + secondsperday=86400 # 24*60*60 + #ComDate is number of days since 1899-12-31, gmtime epoch is 1970-1-1 = 25569 days + t=time.gmtime(secondsperday*(fcomDate-25569.0)) + return t #year,month,day,hour,minute,second,weekday,julianday,daylightsaving=t + def Date(self,year,month,day): + return self.Timestamp(year,month,day,0,0,0) + def Time(self,hour,minute,second): + return time.gmtime((hour*60+minute)*60 + second) + def Timestamp(self,year,month,day,hour,minute,second): + return time.localtime(time.mktime((year,month,day,hour,minute,second,0,0,-1))) + +base_dateconverter = pythonDateTimeConverter() + +# ------ DB API required module attributes --------------------- +threadsafety=1 # TODO -- find out whether this module is actually BETTER than 1. + +apilevel='2.0' #String constant stating the supported DB API level. + +paramstyle='qmark' # the default parameter style + +# ------ control for an extension which may become part of DB API 3.0 --- +accepted_paramstyles = ('qmark', 'named', 'format', 'pyformat', 'dynamic') + +#------------------------------------------------------------------------------------------ +# define similar types for generic conversion routines +adoIntegerTypes=(adc.adInteger,adc.adSmallInt,adc.adTinyInt,adc.adUnsignedInt, + adc.adUnsignedSmallInt,adc.adUnsignedTinyInt, + adc.adBoolean,adc.adError) #max 32 bits +adoRowIdTypes=(adc.adChapter,) #v2.1 Rose +adoLongTypes=(adc.adBigInt,adc.adFileTime,adc.adUnsignedBigInt) +adoExactNumericTypes=(adc.adDecimal,adc.adNumeric,adc.adVarNumeric,adc.adCurrency) #v2.3 Cole +adoApproximateNumericTypes=(adc.adDouble,adc.adSingle) #v2.1 Cole +adoStringTypes=(adc.adBSTR,adc.adChar,adc.adLongVarChar,adc.adLongVarWChar, + adc.adVarChar,adc.adVarWChar,adc.adWChar) +adoBinaryTypes=(adc.adBinary,adc.adLongVarBinary,adc.adVarBinary) +adoDateTimeTypes=(adc.adDBTime, adc.adDBTimeStamp, adc.adDate, adc.adDBDate) +adoRemainingTypes=(adc.adEmpty,adc.adIDispatch,adc.adIUnknown, + adc.adPropVariant,adc.adArray,adc.adUserDefined, + adc.adVariant,adc.adGUID) + +# this class is a trick to determine whether a type is a member of a related group of types. see PEP notes +class DBAPITypeObject(object): + def __init__(self,valuesTuple): + self.values = frozenset(valuesTuple) + + def __eq__(self,other): + return other in self.values + + def __ne__(self, other): + return other not in self.values + +"""This type object is used to describe columns in a database that are string-based (e.g. CHAR). """ +STRING = DBAPITypeObject(adoStringTypes) + +"""This type object is used to describe (long) binary columns in a database (e.g. LONG, RAW, BLOBs). """ +BINARY = DBAPITypeObject(adoBinaryTypes) + +"""This type object is used to describe numeric columns in a database. """ +NUMBER = DBAPITypeObject(adoIntegerTypes + adoLongTypes + \ + adoExactNumericTypes + adoApproximateNumericTypes) + +"""This type object is used to describe date/time columns in a database. """ + +DATETIME = DBAPITypeObject(adoDateTimeTypes) +"""This type object is used to describe the "Row ID" column in a database. """ +ROWID = DBAPITypeObject(adoRowIdTypes) + +OTHER = DBAPITypeObject(adoRemainingTypes) + +# ------- utilities for translating python data types to ADO data types --------------------------------- +typeMap = { memoryViewType : adc.adVarBinary, + float : adc.adDouble, + type(None) : adc.adEmpty, + str : adc.adBSTR, + bool :adc.adBoolean, #v2.1 Cole + decimal.Decimal : adc.adDecimal, + int: adc.adBigInt, + bytes: adc.adVarBinary } + +def pyTypeToADOType(d): + tp=type(d) + try: + return typeMap[tp] + except KeyError: # The type was not defined in the pre-computed Type table + from . import dateconverter + if tp in dateconverter.types: # maybe it is one of our supported Date/Time types + return adc.adDate + # otherwise, attempt to discern the type by probing the data object itself -- to handle duck typing + if isinstance(d, StringTypes): + return adc.adBSTR + if isinstance(d, numbers.Integral): + return adc.adBigInt + if isinstance(d, numbers.Real): + return adc.adDouble + raise DataError('cannot convert "%s" (type=%s) to ADO'%(repr(d),tp)) + +# # # # # # # # # # # # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +# functions to convert database values to Python objects +#------------------------------------------------------------------------ +# variant type : function converting variant to Python value +def variantConvertDate(v): + from . import dateconverter # this function only called when adodbapi is running + return dateconverter.DateObjectFromCOMDate(v) + +def cvtString(variant): # use to get old action of adodbapi v1 if desired + if onIronPython: + try: + return variant.ToString() + except: + pass + return str(variant) + +def cvtDecimal(variant): #better name + return _convertNumberWithCulture(variant, decimal.Decimal) + +def cvtNumeric(variant): #older name - don't break old code + return cvtDecimal(variant) + +def cvtFloat(variant): + return _convertNumberWithCulture(variant, float) + +def _convertNumberWithCulture(variant, f): + try: + return f(variant) + except (ValueError,TypeError,decimal.InvalidOperation): + try: + europeVsUS = str(variant).replace(",",".") + return f(europeVsUS) + except (ValueError,TypeError,decimal.InvalidOperation): pass + +def cvtInt(variant): + return int(variant) + +def cvtLong(variant): # only important in old versions where long and int differ + return int(variant) + +def cvtBuffer(variant): + return bytes(variant) + +def cvtUnicode(variant): + return str(variant) + +def identity(x): return x + +def cvtUnusual(variant): + if verbose > 1: + sys.stderr.write('Conversion called for Unusual data=%s\n' % repr(variant)) + if isinstance(variant, DateTime): # COMdate or System.Date + from .adodbapi import dateconverter # this will only be called when adodbapi is in use, and very rarely + return dateconverter.DateObjectFromCOMDate(variant) + return variant # cannot find conversion function -- just give the data to the user + +def convert_to_python(variant, func): # convert DB value into Python value + if isinstance(variant, NullTypes): # IronPython Null or None + return None + return func(variant) # call the appropriate conversion function + +class MultiMap(dict): #builds a dictionary from {(sequence,of,keys) : function} + """A dictionary of ado.type : function -- but you can set multiple items by passing a sequence of keys""" + #useful for defining conversion functions for groups of similar data types. + def __init__(self, aDict): + for k, v in list(aDict.items()): + self[k] = v # we must call __setitem__ + def __setitem__(self, adoType, cvtFn): + "set a single item, or a whole sequence of items" + try: # user passed us a sequence, set them individually + for type in adoType: + dict.__setitem__(self, type, cvtFn) + except TypeError: # a single value fails attempt to iterate + dict.__setitem__(self, adoType, cvtFn) + +#initialize variantConversions dictionary used to convert SQL to Python +# this is the dictionary of default conversion functions, built by the class above. +# this becomes a class attribute for the Connection, and that attribute is used +# to build the list of column conversion functions for the Cursor +variantConversions = MultiMap( { + adoDateTimeTypes : variantConvertDate, + adoApproximateNumericTypes: cvtFloat, + adoExactNumericTypes: cvtDecimal, # use to force decimal rather than unicode + adoLongTypes : cvtLong, + adoIntegerTypes: cvtInt, + adoRowIdTypes: cvtInt, + adoStringTypes: identity, + adoBinaryTypes: cvtBuffer, + adoRemainingTypes: cvtUnusual }) + +# # # # # classes to emulate the result of cursor.fetchxxx() as a sequence of sequences # # # # # + # "an ENUM of how my low level records are laid out" +RS_WIN_32, RS_ARRAY, RS_REMOTE = list(range(1,4)) + +class SQLrow(object): # a single database row + # class to emulate a sequence, so that a column may be retrieved by either number or name + def __init__(self, rows, index): # "rows" is an _SQLrows object, index is which row + self.rows = rows # parent 'fetch' container object + self.index = index # my row number within parent + def __getattr__(self, name): # used for row.columnName type of value access + try: + return self._getValue(self.rows.columnNames[name.lower()]) + except KeyError: + raise AttributeError('Unknown column name "{}"'.format(name)) + def _getValue(self,key): # key must be an integer + if self.rows.recordset_format == RS_ARRAY: # retrieve from two-dimensional array + v = self.rows.ado_results[key,self.index] + elif self.rows.recordset_format == RS_REMOTE: + v = self.rows.ado_results[self.index][key] + else:# pywin32 - retrieve from tuple of tuples + v = self.rows.ado_results[key][self.index] + if self.rows.converters is NotImplemented: + return v + return convert_to_python(v, self.rows.converters[key]) + + def __len__(self): + return self.rows.numberOfColumns + + def __getitem__(self,key): # used for row[key] type of value access + if isinstance(key,int): # normal row[1] designation + try: + return self._getValue(key) + except IndexError: + raise + if isinstance(key, slice): + indices = key.indices(self.rows.numberOfColumns) + vl = [self._getValue(i) for i in range(*indices)] + return tuple(vl) + try: + return self._getValue(self.rows.columnNames[key.lower()]) # extension row[columnName] designation + except (KeyError, TypeError): + er, st, tr = sys.exc_info() + raise er('No such key as "%s" in %s'%(repr(key),self.__repr__())).with_traceback(tr) + def __iter__(self): + return iter(self.__next__()) + def __next__(self): + for n in range(self.rows.numberOfColumns): + yield self._getValue(n) + def __repr__(self): # create a human readable representation + taglist = sorted(list(self.rows.columnNames.items()), key=lambda x: x[1]) + s = "<SQLrow={" + for name, i in taglist: + s += name + ':' + repr(self._getValue(i)) + ', ' + return s[:-2] + '}>' + def __str__(self): # create a pretty human readable representation + return str(tuple(str(self._getValue(i)) for i in range(self.rows.numberOfColumns))) + + # TO-DO implement pickling an SQLrow directly + #def __getstate__(self): return self.__dict__ + #def __setstate__(self, d): self.__dict__.update(d) + # which basically tell pickle to treat your class just like a normal one, + # taking self.__dict__ as representing the whole of the instance state, + # despite the existence of the __getattr__. + # # # # +class SQLrows(object): + # class to emulate a sequence for multiple rows using a container object + def __init__(self, ado_results, numberOfRows, cursor): + self.ado_results = ado_results # raw result of SQL get + try: + self.recordset_format = cursor.recordset_format + self.numberOfColumns = cursor.numberOfColumns + self.converters = cursor.converters + self.columnNames = cursor.columnNames + except AttributeError: + self.recordset_format = RS_ARRAY + self.numberOfColumns = 0 + self.converters = [] + self.columnNames = {} + self.numberOfRows = numberOfRows + + def __len__(self): + return self.numberOfRows + + def __getitem__(self, item): # used for row or row,column access + if not self.ado_results: + return [] + if isinstance(item, slice): # will return a list of row objects + indices = item.indices(self.numberOfRows) + return [SQLrow(self, k) for k in range(*indices)] + elif isinstance(item, tuple) and len(item)==2: + # d = some_rowsObject[i,j] will return a datum from a two-dimension address + i, j = item + if not isinstance(j, int): + try: + j = self.columnNames[j.lower()] # convert named column to numeric + except KeyError: + raise KeyError('adodbapi: no such column name as "%s"'%repr(j)) + if self.recordset_format == RS_ARRAY: # retrieve from two-dimensional array + v = self.ado_results[j,i] + elif self.recordset_format == RS_REMOTE: + v = self.ado_results[i][j] + else: # pywin32 - retrieve from tuple of tuples + v = self.ado_results[j][i] + if self.converters is NotImplemented: + return v + return convert_to_python(v, self.converters[j]) + else: + row = SQLrow(self, item) # new row descriptor + return row + def __iter__(self): + return iter(self.__next__()) + + def __next__(self): + for n in range(self.numberOfRows): + row = SQLrow(self, n) + yield row + # # # # # + + + # # # # # functions to re-format SQL requests to other paramstyle requirements # # # # # # # # # # +def changeNamedToQmark(op): #convert from 'named' paramstyle to ADO required '?'mark parameters + outOp = '' + outparms=[] + chunks = op.split("'") #quote all literals -- odd numbered list results are literals. + inQuotes = False + for chunk in chunks: + if inQuotes: # this is inside a quote + if chunk == '': # double apostrophe to quote one apostrophe + outOp = outOp[:-1] # so take one away + else: + outOp += "'"+chunk+"'" # else pass the quoted string as is. + else: # is SQL code -- look for a :namedParameter + while chunk: # some SQL string remains + sp = chunk.split(':',1) + outOp += sp[0] # concat the part up to the : + s = '' + try: + chunk = sp[1] + except IndexError: + chunk = None + if chunk: # there was a parameter - parse it out + i = 0 + c = chunk[0] + while c.isalnum() or c == '_': + i += 1 + try: + c = chunk[i] + except IndexError: + break + s = chunk[:i] + chunk = chunk[i:] + if s: + outparms.append(s) # list the parameters in order + outOp += '?' # put in the Qmark + inQuotes = not inQuotes + return outOp, outparms + +def changeFormatToQmark(op): #convert from 'format' paramstyle to ADO required '?'mark parameters + outOp = '' + outparams = [] + chunks = op.split("'") #quote all literals -- odd numbered list results are literals. + inQuotes = False + for chunk in chunks: + if inQuotes: + if outOp != '' and chunk=='': # he used a double apostrophe to quote one apostrophe + outOp = outOp[:-1] # so take one away + else: + outOp += "'"+chunk+"'" # else pass the quoted string as is. + else: # is SQL code -- look for a %s parameter + if '%(' in chunk: # ugh! pyformat! + while chunk: # some SQL string remains + sp = chunk.split('%(', 1) + outOp += sp[0] # concat the part up to the % + if len(sp) > 1: + try: + s, chunk = sp[1].split(')s', 1) # find the ')s' + except ValueError: + raise ProgrammingError('Pyformat SQL has incorrect format near "%s"' % chunk) + outparams.append(s) + outOp += '?' # put in the Qmark + else: + chunk = None + else: # proper '%s' format + sp = chunk.split('%s') # make each %s + outOp += "?".join(sp) # into ? + inQuotes = not inQuotes # every other chunk is a quoted string + return outOp, outparams diff --git a/venv/Lib/site-packages/adodbapi/examples/__pycache__/db_print.cpython-36.pyc b/venv/Lib/site-packages/adodbapi/examples/__pycache__/db_print.cpython-36.pyc new file mode 100644 index 00000000..0362a918 Binary files /dev/null and b/venv/Lib/site-packages/adodbapi/examples/__pycache__/db_print.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/adodbapi/examples/__pycache__/db_table_names.cpython-36.pyc b/venv/Lib/site-packages/adodbapi/examples/__pycache__/db_table_names.cpython-36.pyc new file mode 100644 index 00000000..0cb0614e Binary files /dev/null and b/venv/Lib/site-packages/adodbapi/examples/__pycache__/db_table_names.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/adodbapi/examples/__pycache__/xls_read.cpython-36.pyc b/venv/Lib/site-packages/adodbapi/examples/__pycache__/xls_read.cpython-36.pyc new file mode 100644 index 00000000..bd250c86 Binary files /dev/null and b/venv/Lib/site-packages/adodbapi/examples/__pycache__/xls_read.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/adodbapi/examples/__pycache__/xls_write.cpython-36.pyc b/venv/Lib/site-packages/adodbapi/examples/__pycache__/xls_write.cpython-36.pyc new file mode 100644 index 00000000..5d3c931d Binary files /dev/null and b/venv/Lib/site-packages/adodbapi/examples/__pycache__/xls_write.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/adodbapi/examples/db_print.py b/venv/Lib/site-packages/adodbapi/examples/db_print.py new file mode 100644 index 00000000..2ee21748 --- /dev/null +++ b/venv/Lib/site-packages/adodbapi/examples/db_print.py @@ -0,0 +1,66 @@ +""" db_print.py -- a simple demo for ADO database reads.""" + +import sys +import adodbapi.ado_consts as adc + +cmd_args = ('filename', 'table_name') +if 'help' in sys.argv: + print('possible settings keywords are:',cmd_args) + sys.exit() + +kw_args = {} # pick up filename and proxy address from command line (optionally) +for arg in sys.argv: + s = arg.split("=") + if len(s) > 1: + if s[0] in cmd_args: + kw_args[s[0]] = s[1] + +kw_args.setdefault('filename', "test.mdb") # assumes server is running from examples folder +kw_args.setdefault('table_name', 'Products') # the name of the demo table + +# the server needs to select the provider based on his Python installation +provider_switch = ['provider', 'Microsoft.ACE.OLEDB.12.0', "Microsoft.Jet.OLEDB.4.0"] + +# ------------------------ START HERE ------------------------------------- +#create the connection +constr = "Provider=%(provider)s;Data Source=%(filename)s" +import adodbapi as db +con = db.connect(constr, kw_args, macro_is64bit=provider_switch) + +if kw_args['table_name'] == '?': + print('The tables in your database are:') + for name in con.get_table_names(): + print(name) +else: +#make a cursor on the connection + with con.cursor() as c: + + #run an SQL statement on the cursor + sql = 'select * from %s' % kw_args['table_name'] + print('performing query="%s"' % sql) + c.execute(sql) + + #check the results + print('result rowcount shows as= %d. (Note: -1 means "not known")' \ + % (c.rowcount,)) + print('') + print('result data description is:') + print(' NAME Type DispSize IntrnlSz Prec Scale Null?') + for d in c.description: + print(('%16s %-12s %8s %8d %4d %5d %s') % \ + (d[0], adc.adTypeNames[d[1]], d[2], d[3], d[4],d[5], bool(d[6]))) + print('') + print('str() of first five records are...') + + #get the results + db = c.fetchmany(5) + + #print them + for rec in db: + print(rec) + + print('') + print('repr() of next row is...') + print(repr(c.fetchone())) + print('') +con.close() diff --git a/venv/Lib/site-packages/adodbapi/examples/db_table_names.py b/venv/Lib/site-packages/adodbapi/examples/db_table_names.py new file mode 100644 index 00000000..5706077f --- /dev/null +++ b/venv/Lib/site-packages/adodbapi/examples/db_table_names.py @@ -0,0 +1,19 @@ +""" db_table_names.py -- a simple demo for ADO database table listing.""" +import sys +import adodbapi + +try: + databasename = sys.argv[1] +except IndexError: + databasename = "test.mdb" + +provider = ['prv', "Microsoft.ACE.OLEDB.12.0", "Microsoft.Jet.OLEDB.4.0"] +constr = "Provider=%(prv)s;Data Source=%(db)s" + +#create the connection +con = adodbapi.connect(constr, db=databasename, macro_is64bit=provider) + +print('Table names in= %s' % databasename) + +for table in con.get_table_names(): + print(table) diff --git a/venv/Lib/site-packages/adodbapi/examples/xls_read.py b/venv/Lib/site-packages/adodbapi/examples/xls_read.py new file mode 100644 index 00000000..55d95db4 --- /dev/null +++ b/venv/Lib/site-packages/adodbapi/examples/xls_read.py @@ -0,0 +1,38 @@ +import sys +import adodbapi +try: + import adodbapi.is64bit as is64bit + is64 = is64bit.Python() +except ImportError: + is64 = False + +if is64: + driver = "Microsoft.ACE.OLEDB.12.0" +else: + driver = "Microsoft.Jet.OLEDB.4.0" +extended = 'Extended Properties="Excel 8.0;HDR=Yes;IMEX=1;"' + +try: # first command line argument will be xls file name -- default to the one written by xls_write.py + filename = sys.argv[1] +except IndexError: + filename = 'xx.xls' + +constr = "Provider=%s;Data Source=%s;%s" % (driver, filename, extended) + +conn = adodbapi.connect(constr) + +try: # second command line argument will be worksheet name -- default to first worksheet + sheet = sys.argv[2] +except IndexError: + # use ADO feature to get the name of the first worksheet + sheet = conn.get_table_names()[0] + +print('Shreadsheet=%s Worksheet=%s' % (filename, sheet)) +print('------------------------------------------------------------') +crsr = conn.cursor() +sql = "SELECT * from [%s]" % sheet +crsr.execute(sql) +for row in crsr.fetchmany(10): + print(repr(row)) +crsr.close() +conn.close() diff --git a/venv/Lib/site-packages/adodbapi/examples/xls_write.py b/venv/Lib/site-packages/adodbapi/examples/xls_write.py new file mode 100644 index 00000000..b5edbdeb --- /dev/null +++ b/venv/Lib/site-packages/adodbapi/examples/xls_write.py @@ -0,0 +1,32 @@ +import adodbapi +import datetime +try: + import adodbapi.is64bit as is64bit + is64 = is64bit.Python() +except ImportError: + is64 = False # in case the user has an old version of adodbapi +if is64: + driver = "Microsoft.ACE.OLEDB.12.0" +else: + driver = "Microsoft.Jet.OLEDB.4.0" +filename = 'xx.xls' # file will be created if it does not exist +extended = 'Extended Properties="Excel 8.0;Readonly=False;"' + +constr = "Provider=%s;Data Source=%s;%s" % (driver, filename, extended) + +conn = adodbapi.connect(constr) +with conn: # will auto commit if no errors + with conn.cursor() as crsr: + try: crsr.execute('drop table SheetOne') + except: pass # just is case there is one already there + + # create the sheet and the header row and set the types for the columns + crsr.execute('create table SheetOne (Name varchar, Rank varchar, SrvcNum integer, Weight float, Birth date)') + + sql = "INSERT INTO SheetOne (name, rank , srvcnum, weight, birth) values (?,?,?,?,?)" + + data = ('Mike Murphy', 'SSG', 123456789, 167.8, datetime.date(1922,12,27)) + crsr.execute(sql, data) # write the first row of data + crsr.execute(sql, ['John Jones', 'Pvt', 987654321, 140.0, datetime.date(1921,7,4)]) # another row of data +conn.close() +print('Created spreadsheet=%s worksheet=%s' % (filename, 'SheetOne')) diff --git a/venv/Lib/site-packages/adodbapi/is64bit.py b/venv/Lib/site-packages/adodbapi/is64bit.py new file mode 100644 index 00000000..6284254a --- /dev/null +++ b/venv/Lib/site-packages/adodbapi/is64bit.py @@ -0,0 +1,33 @@ +"""is64bit.Python() --> boolean value of detected Python word size. is64bit.os() --> os build version""" +import sys + +def Python(): + if sys.platform == 'cli': #IronPython + import System + return System.IntPtr.Size == 8 + else: + try: + return sys.maxsize > 2147483647 + except AttributeError: + return sys.maxint > 2147483647 + +def os(): + import platform + pm = platform.machine() + if pm != '..' and pm.endswith('64'): # recent Python (not Iron) + return True + else: + import os + if 'PROCESSOR_ARCHITEW6432' in os.environ: + return True # 32 bit program running on 64 bit Windows + try: + return os.environ['PROCESSOR_ARCHITECTURE'].endswith('64') # 64 bit Windows 64 bit program + except (IndexError, KeyError): + pass # not Windows + try: + return '64' in platform.architecture()[0] # this often works in Linux + except: + return False # is an older version of Python, assume also an older os (best we can guess) + +if __name__ == "__main__": + print("is64bit.Python() =", Python(), "is64bit.os() =", os()) diff --git a/venv/Lib/site-packages/adodbapi/license.txt b/venv/Lib/site-packages/adodbapi/license.txt new file mode 100644 index 00000000..c255f4aa --- /dev/null +++ b/venv/Lib/site-packages/adodbapi/license.txt @@ -0,0 +1,506 @@ + GNU LESSER GENERAL PUBLIC LICENSE + Version 2.1, February 1999 + + Copyright (C) 1991, 1999 Free Software Foundation, Inc. + 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + +[This is the first released version of the Lesser GPL. It also counts + as the successor of the GNU Library Public License, version 2, hence + the version number 2.1.] + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +Licenses are intended to guarantee your freedom to share and change +free software--to make sure the software is free for all its users. + + This license, the Lesser General Public License, applies to some +specially designated software packages--typically libraries--of the +Free Software Foundation and other authors who decide to use it. You +can use it too, but we suggest you first think carefully about whether +this license or the ordinary General Public License is the better +strategy to use in any particular case, based on the explanations below. + + When we speak of free software, we are referring to freedom of use, +not price. Our General Public Licenses are designed to make sure that +you have the freedom to distribute copies of free software (and charge +for this service if you wish); that you receive source code or can get +it if you want it; that you can change the software and use pieces of +it in new free programs; and that you are informed that you can do +these things. + + To protect your rights, we need to make restrictions that forbid +distributors to deny you these rights or to ask you to surrender these +rights. These restrictions translate to certain responsibilities for +you if you distribute copies of the library or if you modify it. + + For example, if you distribute copies of the library, whether gratis +or for a fee, you must give the recipients all the rights that we gave +you. You must make sure that they, too, receive or can get the source +code. If you link other code with the library, you must provide +complete object files to the recipients, so that they can relink them +with the library after making changes to the library and recompiling +it. And you must show them these terms so they know their rights. + + We protect your rights with a two-step method: (1) we copyright the +library, and (2) we offer you this license, which gives you legal +permission to copy, distribute and/or modify the library. + + To protect each distributor, we want to make it very clear that +there is no warranty for the free library. Also, if the library is +modified by someone else and passed on, the recipients should know +that what they have is not the original version, so that the original +author's reputation will not be affected by problems that might be +introduced by others. + + + + Finally, software patents pose a constant threat to the existence of +any free program. We wish to make sure that a company cannot +effectively restrict the users of a free program by obtaining a +restrictive license from a patent holder. Therefore, we insist that +any patent license obtained for a version of the library must be +consistent with the full freedom of use specified in this license. + + Most GNU software, including some libraries, is covered by the +ordinary GNU General Public License. This license, the GNU Lesser +General Public License, applies to certain designated libraries, and +is quite different from the ordinary General Public License. We use +this license for certain libraries in order to permit linking those +libraries into non-free programs. + + When a program is linked with a library, whether statically or using +a shared library, the combination of the two is legally speaking a +combined work, a derivative of the original library. The ordinary +General Public License therefore permits such linking only if the +entire combination fits its criteria of freedom. The Lesser General +Public License permits more lax criteria for linking other code with +the library. + + We call this license the "Lesser" General Public License because it +does Less to protect the user's freedom than the ordinary General +Public License. It also provides other free software developers Less +of an advantage over competing non-free programs. These disadvantages +are the reason we use the ordinary General Public License for many +libraries. However, the Lesser license provides advantages in certain +special circumstances. + + For example, on rare occasions, there may be a special need to +encourage the widest possible use of a certain library, so that it becomes +a de-facto standard. To achieve this, non-free programs must be +allowed to use the library. A more frequent case is that a free +library does the same job as widely used non-free libraries. In this +case, there is little to gain by limiting the free library to free +software only, so we use the Lesser General Public License. + + In other cases, permission to use a particular library in non-free +programs enables a greater number of people to use a large body of +free software. For example, permission to use the GNU C Library in +non-free programs enables many more people to use the whole GNU +operating system, as well as its variant, the GNU/Linux operating +system. + + Although the Lesser General Public License is Less protective of the +users' freedom, it does ensure that the user of a program that is +linked with the Library has the freedom and the wherewithal to run +that program using a modified version of the Library. + + The precise terms and conditions for copying, distribution and +modification follow. Pay close attention to the difference between a +"work based on the library" and a "work that uses the library". The +former contains code derived from the library, whereas the latter must +be combined with the library in order to run. + + + + GNU LESSER GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License Agreement applies to any software library or other +program which contains a notice placed by the copyright holder or +other authorized party saying it may be distributed under the terms of +this Lesser General Public License (also called "this License"). +Each licensee is addressed as "you". + + A "library" means a collection of software functions and/or data +prepared so as to be conveniently linked with application programs +(which use some of those functions and data) to form executables. + + The "Library", below, refers to any such software library or work +which has been distributed under these terms. A "work based on the +Library" means either the Library or any derivative work under +copyright law: that is to say, a work containing the Library or a +portion of it, either verbatim or with modifications and/or translated +straightforwardly into another language. (Hereinafter, translation is +included without limitation in the term "modification".) + + "Source code" for a work means the preferred form of the work for +making modifications to it. For a library, complete source code means +all the source code for all modules it contains, plus any associated +interface definition files, plus the scripts used to control compilation +and installation of the library. + + Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running a program using the Library is not restricted, and output from +such a program is covered only if its contents constitute a work based +on the Library (independent of the use of the Library in a tool for +writing it). Whether that is true depends on what the Library does +and what the program that uses the Library does. + + 1. You may copy and distribute verbatim copies of the Library's +complete source code as you receive it, in any medium, provided that +you conspicuously and appropriately publish on each copy an +appropriate copyright notice and disclaimer of warranty; keep intact +all the notices that refer to this License and to the absence of any +warranty; and distribute a copy of this License along with the +Library. + You may charge a fee for the physical act of transferring a copy, +and you may at your option offer warranty protection in exchange for a +fee. + + 2. You may modify your copy or copies of the Library or any portion +of it, thus forming a work based on the Library, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) The modified work must itself be a software library. + + b) You must cause the files modified to carry prominent notices + stating that you changed the files and the date of any change. + + c) You must cause the whole of the work to be licensed at no + charge to all third parties under the terms of this License. + + d) If a facility in the modified Library refers to a function or a + table of data to be supplied by an application program that uses + the facility, other than as an argument passed when the facility + is invoked, then you must make a good faith effort to ensure that, + in the event an application does not supply such function or + table, the facility still operates, and performs whatever part of + its purpose remains meaningful. + + (For example, a function in a library to compute square roots has + a purpose that is entirely well-defined independent of the + application. Therefore, Subsection 2d requires that any + application-supplied function or table used by this function must + be optional: if the application does not supply it, the square + root function must still compute square roots.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Library, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Library, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote +it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Library. + +In addition, mere aggregation of another work not based on the Library +with the Library (or with a work based on the Library) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may opt to apply the terms of the ordinary GNU General Public +License instead of this License to a given copy of the Library. To do +this, you must alter all the notices that refer to this License, so +that they refer to the ordinary GNU General Public License, version 2, +instead of to this License. (If a newer version than version 2 of the +ordinary GNU General Public License has appeared, then you can specify +that version instead if you wish.) Do not make any other change in +these notices. + + Once this change is made in a given copy, it is irreversible for +that copy, so the ordinary GNU General Public License applies to all +subsequent copies and derivative works made from that copy. + + This option is useful when you wish to copy part of the code of +the Library into a program that is not a library. + + 4. You may copy and distribute the Library (or a portion or +derivative of it, under Section 2) in object code or executable form +under the terms of Sections 1 and 2 above provided that you accompany +it with the complete corresponding machine-readable source code, which +must be distributed under the terms of Sections 1 and 2 above on a +medium customarily used for software interchange. + + If distribution of object code is made by offering access to copy +from a designated place, then offering equivalent access to copy the +source code from the same place satisfies the requirement to +distribute the source code, even though third parties are not +compelled to copy the source along with the object code. + + 5. A program that contains no derivative of any portion of the +Library, but is designed to work with the Library by being compiled or +linked with it, is called a "work that uses the Library". Such a +work, in isolation, is not a derivative work of the Library, and +therefore falls outside the scope of this License. + + However, linking a "work that uses the Library" with the Library +creates an executable that is a derivative of the Library (because it +contains portions of the Library), rather than a "work that uses the +library". The executable is therefore covered by this License. +Section 6 states terms for distribution of such executables. + + When a "work that uses the Library" uses material from a header file +that is part of the Library, the object code for the work may be a +derivative work of the Library even though the source code is not. +Whether this is true is especially significant if the work can be +linked without the Library, or if the work is itself a library. The +threshold for this to be true is not precisely defined by law. + + If such an object file uses only numerical parameters, data +structure layouts and accessors, and small macros and small inline +functions (ten lines or less in length), then the use of the object +file is unrestricted, regardless of whether it is legally a derivative +work. (Executables containing this object code plus portions of the +Library will still fall under Section 6.) + + Otherwise, if the work is a derivative of the Library, you may +distribute the object code for the work under the terms of Section 6. +Any executables containing that work also fall under Section 6, +whether or not they are linked directly with the Library itself. + + 6. As an exception to the Sections above, you may also combine or +link a "work that uses the Library" with the Library to produce a +work containing portions of the Library, and distribute that work +under terms of your choice, provided that the terms permit +modification of the work for the customer's own use and reverse +engineering for debugging such modifications. + + You must give prominent notice with each copy of the work that the +Library is used in it and that the Library and its use are covered by +this License. You must supply a copy of this License. If the work +during execution displays copyright notices, you must include the +copyright notice for the Library among them, as well as a reference +directing the user to the copy of this License. Also, you must do one +of these things: + + a) Accompany the work with the complete corresponding + machine-readable source code for the Library including whatever + changes were used in the work (which must be distributed under + Sections 1 and 2 above); and, if the work is an executable linked + with the Library, with the complete machine-readable "work that + uses the Library", as object code and/or source code, so that the + user can modify the Library and then relink to produce a modified + executable containing the modified Library. (It is understood + that the user who changes the contents of definitions files in the + Library will not necessarily be able to recompile the application + to use the modified definitions.) + + b) Use a suitable shared library mechanism for linking with the + Library. A suitable mechanism is one that (1) uses at run time a + copy of the library already present on the user's computer system, + rather than copying library functions into the executable, and (2) + will operate properly with a modified version of the library, if + the user installs one, as long as the modified version is + interface-compatible with the version that the work was made with. + + c) Accompany the work with a written offer, valid for at + least three years, to give the same user the materials + specified in Subsection 6a, above, for a charge no more + than the cost of performing this distribution. + + d) If distribution of the work is made by offering access to copy + from a designated place, offer equivalent access to copy the above + specified materials from the same place. + + e) Verify that the user has already received a copy of these + materials or that you have already sent this user a copy. + + For an executable, the required form of the "work that uses the +Library" must include any data and utility programs needed for +reproducing the executable from it. However, as a special exception, +the materials to be distributed need not include anything that is +normally distributed (in either source or binary form) with the major +components (compiler, kernel, and so on) of the operating system on +which the executable runs, unless that component itself accompanies +the executable. + + It may happen that this requirement contradicts the license +restrictions of other proprietary libraries that do not normally +accompany the operating system. Such a contradiction means you cannot +use both them and the Library together in an executable that you +distribute. + + 7. You may place library facilities that are a work based on the +Library side-by-side in a single library together with other library +facilities not covered by this License, and distribute such a combined +library, provided that the separate distribution of the work based on +the Library and of the other library facilities is otherwise +permitted, and provided that you do these two things: + + a) Accompany the combined library with a copy of the same work + based on the Library, uncombined with any other library + facilities. This must be distributed under the terms of the + Sections above. + + b) Give prominent notice with the combined library of the fact + that part of it is a work based on the Library, and explaining + where to find the accompanying uncombined form of the same work. + + 8. You may not copy, modify, sublicense, link with, or distribute +the Library except as expressly provided under this License. Any +attempt otherwise to copy, modify, sublicense, link with, or +distribute the Library is void, and will automatically terminate your +rights under this License. However, parties who have received copies, +or rights, from you under this License will not have their licenses +terminated so long as such parties remain in full compliance. + + 9. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Library or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Library (or any work based on the +Library), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Library or works based on it. + + 10. Each time you redistribute the Library (or any work based on the +Library), the recipient automatically receives a license from the +original licensor to copy, distribute, link with or modify the Library +subject to these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties with +this License. + + 11. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Library at all. For example, if a patent +license would not permit royalty-free redistribution of the Library by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Library. + +If any portion of this section is held invalid or unenforceable under any +particular circumstance, the balance of the section is intended to apply, +and the section as a whole is intended to apply in other circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 12. If the distribution and/or use of the Library is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Library under this License may add +an explicit geographical distribution limitation excluding those countries, +so that distribution is permitted only in or among countries not thus +excluded. In such case, this License incorporates the limitation as if +written in the body of this License. + + 13. The Free Software Foundation may publish revised and/or new +versions of the Lesser General Public License from time to time. +Such new versions will be similar in spirit to the present version, +but may differ in detail to address new problems or concerns. + +Each version is given a distinguishing version number. If the Library +specifies a version number of this License which applies to it and +"any later version", you have the option of following the terms and +conditions either of that version or of any later version published by +the Free Software Foundation. If the Library does not specify a +license version number, you may choose any version ever published by +the Free Software Foundation. + + 14. If you wish to incorporate parts of the Library into other free +programs whose distribution conditions are incompatible with these, +write to the author to ask for permission. For software which is +copyrighted by the Free Software Foundation, write to the Free +Software Foundation; we sometimes make exceptions for this. Our +decision will be guided by the two goals of preserving the free status +of all derivatives of our free software and of promoting the sharing +and reuse of software generally. + + NO WARRANTY + + 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO +WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. +EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR +OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY +KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE +LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME +THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN +WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY +AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU +FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR +CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE +LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING +RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A +FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF +SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH +DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Libraries + + If you develop a new library, and you want it to be of the greatest +possible use to the public, we recommend making it free software that +everyone can redistribute and change. You can do so by permitting +redistribution under these terms (or, alternatively, under the terms of the +ordinary General Public License). + + To apply these terms, attach the following notices to the library. It is +safest to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least the +"copyright" line and a pointer to where the full notice is found. + + <one line to give the library's name and a brief idea of what it does.> + Copyright (C) <year> <name of author> + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + +Also add information on how to contact you by electronic and paper mail. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the library, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the + library `Frob' (a library for tweaking knobs) written by James Random Hacker. + + <signature of Ty Coon>, 1 April 1990 + Ty Coon, President of Vice + +That's all there is to it! + diff --git a/venv/Lib/site-packages/adodbapi/process_connect_string.py b/venv/Lib/site-packages/adodbapi/process_connect_string.py new file mode 100644 index 00000000..a8dab5a1 --- /dev/null +++ b/venv/Lib/site-packages/adodbapi/process_connect_string.py @@ -0,0 +1,118 @@ +""" a clumsy attempt at a macro language to let the programmer execute code on the server (ex: determine 64bit)""" +from . import is64bit as is64bit + +def macro_call(macro_name, args, kwargs): + """ allow the programmer to perform limited processing on the server by passing macro names and args + + :new_key - the key name the macro will create + :args[0] - macro name + :args[1:] - any arguments + :code - the value of the keyword item + :kwargs - the connection keyword dictionary. ??key has been removed + --> the value to put in for kwargs['name'] = value + """ + if isinstance(args, (str, str)): + args = [args] # the user forgot to pass a sequence, so make a string into args[0] + new_key = args[0] + try: + if macro_name == "is64bit": + if is64bit.Python(): # if on 64 bit Python + return new_key, args[1] # return first argument + else: + try: + return new_key, args[2] # else return second argument (if defined) + except IndexError: + return new_key, '' # else return blank + + elif macro_name == "getuser": # get the name of the user the server is logged in under + if not new_key in kwargs: + import getpass + return new_key, getpass.getuser() + + elif macro_name == "getnode": # get the name of the computer running the server + import platform + try: + return new_key, args[1] % platform.node() + except IndexError: + return new_key, platform.node() + + elif macro_name == "getenv": # expand the server's environment variable args[1] + try: + dflt = args[2] # if not found, default from args[2] + except IndexError: # or blank + dflt = '' + return new_key, os.environ.get(args[1], dflt) + + elif macro_name == "auto_security": + if not 'user' in kwargs or not kwargs['user']: # missing, blank, or Null username + return new_key, 'Integrated Security=SSPI' + return new_key, 'User ID=%(user)s; Password=%(password)s' % kwargs + + elif macro_name == "find_temp_test_path": # helper function for testing ado operation -- undocumented + import tempfile, os + return new_key, os.path.join(tempfile.gettempdir(), 'adodbapi_test', args[1]) + + raise ValueError ('Unknown connect string macro=%s' % macro_name) + except: + raise ValueError ('Error in macro processing %s %s' % (macro_name, repr(args))) + +def process(args, kwargs, expand_macros=False): # --> connection string with keyword arguments processed. + """ attempts to inject arguments into a connection string using Python "%" operator for strings + + co: adodbapi connection object + args: positional parameters from the .connect() call + kvargs: keyword arguments from the .connect() call + """ + try: + dsn = args[0] + except IndexError: + dsn = None + if isinstance(dsn, dict): # as a convenience the first argument may be django settings + kwargs.update(dsn) + elif dsn: # the connection string is passed to the connection as part of the keyword dictionary + kwargs['connection_string'] = dsn + try: + a1 = args[1] + except IndexError: + a1 = None + # historically, the second positional argument might be a timeout value + if isinstance(a1, int): + kwargs['timeout'] = a1 + # if the second positional argument is a string, then it is user + elif isinstance(a1, str): + kwargs['user'] = a1 + # if the second positional argument is a dictionary, use it as keyword arguments, too + elif isinstance(a1, dict): + kwargs.update(a1) + try: + kwargs['password'] = args[2] # the third positional argument is password + kwargs['host'] = args[3] # the fourth positional argument is host name + kwargs['database'] = args[4] # the fifth positional argument is database name + except IndexError: + pass + + # make sure connection string is defined somehow + if not 'connection_string' in kwargs: + try: # perhaps 'dsn' was defined + kwargs['connection_string'] = kwargs['dsn'] + except KeyError: + try: # as a last effort, use the "host" keyword + kwargs['connection_string'] = kwargs['host'] + except KeyError: + raise TypeError ("Must define 'connection_string' for ado connections") + if expand_macros: + for kwarg in list(kwargs.keys()): + if kwarg.startswith('macro_'): # If a key defines a macro + macro_name = kwarg[6:] # name without the "macro_" + macro_code = kwargs.pop(kwarg) # we remove the macro_key and get the code to execute + new_key, rslt = macro_call(macro_name, macro_code, kwargs) # run the code in the local context + kwargs[new_key] = rslt # put the result back in the keywords dict + # special processing for PyRO IPv6 host address + try: + s = kwargs['proxy_host'] + if ':' in s: # it is an IPv6 address + if s[0] != '[': # is not surrounded by brackets + kwargs['proxy_host'] = s.join(('[',']')) # put it in brackets + except KeyError: + pass + return kwargs diff --git a/venv/Lib/site-packages/adodbapi/readme.txt b/venv/Lib/site-packages/adodbapi/readme.txt new file mode 100644 index 00000000..cf591905 --- /dev/null +++ b/venv/Lib/site-packages/adodbapi/readme.txt @@ -0,0 +1,92 @@ +Project +------- +adodbapi + +A Python DB-API 2.0 (PEP-249) module that makes it easy to use Microsoft ADO +for connecting with databases and other data sources +using either CPython or IronPython. + +Home page: <http://sourceforge.net/projects/adodbapi> + +Features: +* 100% DB-API 2.0 (PEP-249) compliant (including most extensions and recommendations). +* Includes pyunit testcases that describe how to use the module. +* Fully implemented in Python. -- runs in Python 2.5+ Python 3.0+ and IronPython 2.6+ +* Licensed under the LGPL license, which means that it can be used freely even in commercial programs subject to certain restrictions. +* The user can choose between paramstyles: 'qmark' 'named' 'format' 'pyformat' 'dynamic' +* Supports data retrieval by column name e.g.: + for row in myCurser.execute("select name,age from students"): + print("Student", row.name, "is", row.age, "years old.") +* Supports user-definable system-to-Python data conversion functions (selected by ADO data type, or by column) + +Prerequisites: +* C Python 2.7 or 3.5 or higher + and pywin32 (Mark Hammond's python for windows extensions.) +or + Iron Python 2.7 or higher. (works in IPy2.0 for all data types except BUFFER) + +Installation: +* (C-Python on Windows): Install pywin32 ("pip install pywin32") which includes adodbapi. +* (IronPython on Windows): Download adodbapi from http://sf.net/projects/adodbapi. Unpack the zip. + Open a command window as an administrator. CD to the folder containing the unzipped files. + Run "setup.py install" using the IronPython of your choice. + +NOTE: ........... +If you do not like the new default operation of returning Numeric columns as decimal.Decimal, +you can select other options by the user defined conversion feature. +Try: + adodbapi.apibase.variantConversions[adodbapi.ado_consts.adNumeric] = adodbapi.apibase.cvtString +or: + adodbapi.apibase.variantConversions[adodbapi.ado_consts.adNumeric] = adodbapi.apibase.cvtFloat +or: + adodbapi.apibase.variantConversions[adodbapi.ado_consts.adNumeric] = write_your_own_convertion_function + ............ +notes for 2.6.2: + The definitive source has been moved to https://github.com/mhammond/pywin32/tree/master/adodbapi. + Remote has proven too hard to configure and test with Pyro4. I am moving it to unsupported status + until I can change to a different connection method. +whats new in version 2.6 + A cursor.prepare() method and support for prepared SQL statements. + Lots of refactoring, especially of the Remote and Server modules (still to be treated as Beta code). + The quick start document 'quick_reference.odt' will export as a nice-looking pdf. + Added paramstyles 'pyformat' and 'dynamic'. If your 'paramstyle' is 'named' you _must_ pass a dictionary of + parameters to your .execute() method. If your 'paramstyle' is 'format' 'pyformat' or 'dynamic', you _may_ + pass a dictionary of parameters -- provided your SQL operation string is formatted correctly. + +whats new in version 2.5 + Remote module: (works on Linux!) allows a Windows computer to serve ADO databases via PyRO + Server module: PyRO server for ADO. Run using a command like= C:>python -m adodbapi.server + (server has simple connection string macros: is64bit, getuser, sql_provider, auto_security) + Brief documentation included. See adodbapi/examples folder adodbapi.rtf + New connection method conn.get_table_names() --> list of names of tables in database + + Vastly refactored. Data conversion things have been moved to the new adodbapi.apibase module. + Many former module-level attributes are now class attributes. (Should be more thread-safe) + Connection objects are now context managers for transactions and will commit or rollback. + Cursor objects are context managers and will automatically close themselves. + Autocommit can be switched on and off. + Keyword and positional arguments on the connect() method work as documented in PEP 249. + Keyword arguments from the connect call can be formatted into the connection string. + New keyword arguments defined, such as: autocommit, paramstyle, remote_proxy, remote_port. + *** Breaking change: variantConversion lookups are simplified: the following will raise KeyError: + oldconverter=adodbapi.variantConversions[adodbapi.adoStringTypes] + Refactor as: oldconverter=adodbapi.variantConversions[adodbapi.adoStringTypes[0]] + +License +------- +LGPL, see http://www.opensource.org/licenses/lgpl-license.php + +Documentation +------------- + +Look at adodbapi/quick_reference.md +http://www.python.org/topics/database/DatabaseAPI-2.0.html +read the examples in adodbapi/examples +and look at the test cases in adodbapi/test directory. + +Mailing lists +------------- +The adodbapi mailing lists have been deactivated. Submit comments to the +pywin32 or IronPython mailing lists. + -- the bug tracker on sourceforge.net/projects/adodbapi may be checked, (infrequently). + -- please use: https://github.com/mhammond/pywin32/issues diff --git a/venv/Lib/site-packages/adodbapi/remote.py b/venv/Lib/site-packages/adodbapi/remote.py new file mode 100644 index 00000000..aae25c66 --- /dev/null +++ b/venv/Lib/site-packages/adodbapi/remote.py @@ -0,0 +1,527 @@ +"""adodbapi.remote - A python DB API 2.0 (PEP 249) interface to Microsoft ADO + +Copyright (C) 2002 Henrik Ekelund, version 2.1 by Vernon Cole +* http://sourceforge.net/projects/pywin32 +* http://sourceforge.net/projects/adodbapi + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + + django adaptations and refactoring thanks to Adam Vandenberg + +DB-API 2.0 specification: http://www.python.org/dev/peps/pep-0249/ + +This module source should run correctly in CPython versions 2.5 and later, +or IronPython version 2.7 and later, +or, after running through 2to3.py, CPython 3.0 or later. +""" + +__version__ = '2.6.0.4' +version = 'adodbapi.remote v' + __version__ + +import os +import sys +import array +import time +import datetime + +# Pyro4 is required for server and remote operation --> https://pypi.python.org/pypi/Pyro4/ +try: + import Pyro4 +except ImportError: + print('* * * Sorry, server operation requires Pyro4. Please "pip import" it.') + exit(11) + +import adodbapi +import adodbapi.apibase as api +import adodbapi.process_connect_string +from adodbapi.apibase import ProgrammingError + +_BaseException = api._BaseException + +sys.excepthook = Pyro4.util.excepthook +Pyro4.config.PREFER_IP_VERSION = 0 # allow system to prefer IPv6 +Pyro4.config.COMMTIMEOUT = 40.0 # a bit longer than the default SQL server Gtimeout +Pyro4.config.SERIALIZER = 'pickle' + +try: + verbose = int(os.environ['ADODBAPI_VERBOSE']) +except: + verbose = False +if verbose: + print(version) + +# --- define objects to smooth out Python3 <-> Python 2.x differences +unicodeType = str #this line will be altered by 2to3.py to '= str' +longType = int #this line will be altered by 2to3.py to '= int' +StringTypes = str +makeByteBuffer = bytes +memoryViewType = memoryview + +# ----------------------------------------------------------- +# conversion functions mandated by PEP 249 +Binary = makeByteBuffer # override the function from apibase.py +def Date(year,month,day): + return datetime.date(year,month,day) #dateconverter.Date(year,month,day) +def Time(hour,minute,second): + return datetime.time(hour, minute, second) # dateconverter.Time(hour,minute,second) +def Timestamp(year,month,day,hour,minute,second): + return datetime.datetime(year,month,day,hour,minute,second) +def DateFromTicks(ticks): + return Date(*time.gmtime(ticks)[:3]) +def TimeFromTicks(ticks): + return Time(*time.gmtime(ticks)[3:6]) +def TimestampFromTicks(ticks): + return Timestamp(*time.gmtime(ticks)[:6]) + +def connect(*args, **kwargs): # --> a remote db-api connection object + """Create and open a remote db-api database connection object""" + # process the argument list the programmer gave us + kwargs = adodbapi.process_connect_string.process(args, kwargs) + # the "proxy_xxx" keys tell us where to find the PyRO proxy server + kwargs.setdefault('pyro_connection', 'PYRO:ado.connection@%(proxy_host)s:%(proxy_port)s') + if not 'proxy_port' in kwargs: + try: + pport = os.environ['PROXY_PORT'] + except KeyError: + pport = 9099 + kwargs['proxy_port'] = pport + if not 'proxy_host' in kwargs or not kwargs['proxy_host']: + try: + phost = os.environ['PROXY_HOST'] + except KeyError: + phost = '[::1]' # '127.0.0.1' + kwargs['proxy_host'] = phost + ado_uri = kwargs['pyro_connection'] % kwargs + # ask PyRO make us a remote connection object + auto_retry = 3 + while auto_retry: + try: + dispatcher = Pyro4.Proxy(ado_uri) + if 'comm_timeout' in kwargs: + dispatcher._pyroTimeout = float(kwargs['comm_timeout']) + uri = dispatcher.make_connection() + break + except Pyro4.core.errors.PyroError: + auto_retry -= 1 + if auto_retry: + time.sleep(1) + else: + raise api.DatabaseError ('Cannot create connection to=%s' % ado_uri) + + conn_uri = fix_uri(uri, kwargs) # get a host connection from the proxy server + while auto_retry: + try: + host_conn = Pyro4.Proxy(conn_uri) # bring up an exclusive Pyro connection for my ADO connection + break + except Pyro4.core.errors.PyroError: + auto_retry -= 1 + if auto_retry: + time.sleep(1) + else: + raise api.DatabaseError ('Cannot create ADO connection object using=%s' % conn_uri) + if 'comm_timeout' in kwargs: + host_conn._pyroTimeout = float(kwargs['comm_timeout']) + # make a local clone + myConn = Connection() + while auto_retry: + try: + myConn.connect(kwargs, host_conn) # call my connect method -- hand him the host connection + break + except Pyro4.core.errors.PyroError: + auto_retry -= 1 + if auto_retry: + time.sleep(1) + else: + raise api.DatabaseError ('Pyro error creating connection to/thru=%s' % repr(kwargs)) + except _BaseException as e: + raise api.DatabaseError('Error creating remote connection to=%s, e=%s, %s' % (repr(kwargs), repr(e),sys.exc_info()[2])) + return myConn + +def fix_uri(uri, kwargs): + """convert a generic pyro uri with '0.0.0.0' into the address we actually called""" + u = uri.asString() + s = u.split('[::0]') # IPv6 generic address + if len(s) == 1: # did not find one + s = u.split('0.0.0.0') # IPv4 generic address + if len(s) > 1: # found a generic + return kwargs['proxy_host'].join(s) # fill in our address for the host + return uri + +# # # # # ----- the Class that defines a connection ----- # # # # # +class Connection(object): + # include connection attributes required by api definition. + Warning = api.Warning + Error = api.Error + InterfaceError = api.InterfaceError + DataError = api.DataError + DatabaseError = api.DatabaseError + OperationalError = api.OperationalError + IntegrityError = api.IntegrityError + InternalError = api.InternalError + NotSupportedError = api.NotSupportedError + ProgrammingError = api.ProgrammingError + # set up some class attributes + paramstyle = api.paramstyle + + @property + def dbapi(self): # a proposed db-api version 3 extension. + "Return a reference to the DBAPI module for this Connection." + return api + + def __init__(self): + self.proxy = None + self.kwargs = {} + self.errorhandler = None + self.supportsTransactions = False + self.paramstyle = api.paramstyle + self.timeout = 30 + self.cursors = {} + + def connect(self, kwargs, connection_maker): + self.kwargs = kwargs + if verbose: + print('%s attempting: "%s"' % (version, repr(kwargs))) + self.proxy = connection_maker + ##try: + ret = self.proxy.connect(kwargs) # ask the server to hook us up + ##except ImportError, e: # Pyro is trying to import pywinTypes.comerrer + ## self._raiseConnectionError(api.DatabaseError, 'Proxy cannot connect using=%s' % repr(kwargs)) + if ret is not True: + self._raiseConnectionError(api.OperationalError, 'Proxy returns error message=%s'%repr(ret)) + + self.supportsTransactions = self.getIndexedValue('supportsTransactions') + self.paramstyle = self.getIndexedValue('paramstyle') + self.timeout = self.getIndexedValue('timeout') + if verbose: + print('adodbapi.remote New connection at %X' % id(self)) + + def _raiseConnectionError(self, errorclass, errorvalue): + eh = self.errorhandler + if eh is None: + eh = api.standardErrorHandler + eh(self, None, errorclass, errorvalue) + + + def close(self): + """Close the connection now (rather than whenever __del__ is called). + + The connection will be unusable from this point forward; + an Error (or subclass) exception will be raised if any operation is attempted with the connection. + The same applies to all cursor objects trying to use the connection. + """ + for crsr in list(self.cursors.values())[:]: # copy the list, then close each one + crsr.close() + try: + """close the underlying remote Connection object""" + self.proxy.close() + if verbose: + print('adodbapi.remote Closed connection at %X' % id(self)) + object.__delattr__(self, 'proxy') # future attempts to use closed cursor will be caught by __getattr__ + except Exception: + pass + + def __del__(self): + try: + self.proxy.close() + except: + pass + + def commit(self): + """Commit any pending transaction to the database. + + Note that if the database supports an auto-commit feature, + this must be initially off. An interface method may be provided to turn it back on. + Database modules that do not support transactions should implement this method with void functionality. + """ + if not self.supportsTransactions: + return + result = self.proxy.commit() + if result: + self._raiseConnectionError(api.OperationalError, 'Error during commit: %s' % result) + + def _rollback(self): + """In case a database does provide transactions this method causes the the database to roll back to + the start of any pending transaction. Closing a connection without committing the changes first will + cause an implicit rollback to be performed. + """ + result = self.proxy.rollback() + if result: + self._raiseConnectionError(api.OperationalError, 'Error during rollback: %s' % result) + + def __setattr__(self, name, value): + if name in ('paramstyle', 'timeout', 'autocommit'): + if self.proxy: + self.proxy.send_attribute_to_host(name, value) + object.__setattr__(self, name, value) # store attribute locally (too) + + def __getattr__(self, item): + if item == 'rollback': # the rollback method only appears if the database supports transactions + if self.supportsTransactions: + return self._rollback # return the rollback method so the caller can execute it. + else: + raise self.ProgrammingError ('this data provider does not support Rollback') + elif item in ('dbms_name', 'dbms_version', 'connection_string', 'autocommit'): # 'messages' ): + return self.getIndexedValue(item) + elif item == 'proxy': + raise self.ProgrammingError('Attempting to use closed connection') + else: + raise self.ProgrammingError('No remote access for attribute="%s"' % item) + + def getIndexedValue(self, index): + r = self.proxy.get_attribute_for_remote(index) + return r + + def cursor(self): + "Return a new Cursor Object using the connection." + myCursor = Cursor(self) + return myCursor + + def _i_am_here(self, crsr): + "message from a new cursor proclaiming its existence" + self.cursors[crsr.id] = crsr + + def _i_am_closing(self, crsr): + "message from a cursor giving connection a chance to clean up" + try: + del self.cursors[crsr.id] + except: + pass + + def __enter__(self): # Connections are context managers + return(self) + + def __exit__(self, exc_type, exc_val, exc_tb): + if exc_type: + self._rollback() #automatic rollback on errors + else: + self.commit() + + def get_table_names(self): + return self.proxy.get_table_names() + +def fixpickle(x): + """pickle barfs on buffer(x) so we pass as array.array(x) then restore to original form for .execute()""" + if x is None: + return None + if isinstance(x, dict): + # for 'named' paramstyle user will pass a mapping + newargs = {} + for arg,val in list(x.items()): + if isinstance(val, memoryViewType): + newval = array.array('B') + newval.fromstring(val) + newargs[arg] = newval + else: + newargs[arg] = val + return newargs + # if not a mapping, then a sequence + newargs = [] + for arg in x: + if isinstance(arg, memoryViewType): + newarg = array.array('B') + newarg.fromstring(arg) + newargs.append(newarg) + else: + newargs.append(arg) + return newargs + +class Cursor(object): + def __init__(self, connection): + self.command = None + self.errorhandler = None ## was: connection.errorhandler + self.connection = connection + self.proxy = self.connection.proxy + self.rs = None # the fetchable data for this cursor + self.converters = NotImplemented + self.id = connection.proxy.build_cursor() + connection._i_am_here(self) + self.recordset_format = api.RS_REMOTE + if verbose: + print('%s New cursor at %X on conn %X' % (version, id(self), id(self.connection))) + + def prepare(self, operation): + self.command = operation + try: del self.description + except AttributeError: pass + self.proxy.crsr_prepare(self.id, operation) + + def __iter__(self): # [2.1 Zamarev] + return iter(self.fetchone, None) # [2.1 Zamarev] + + def __next__(self): + r = self.fetchone() + if r: + return r + raise StopIteration + + def __enter__(self): + "Allow database cursors to be used with context managers." + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + "Allow database cursors to be used with context managers." + self.close() + + def __getattr__(self, key): + if key == 'numberOfColumns': + try: + return len(self.rs[0]) + except: + return 0 + if key == 'description': + try: + self.description = self.proxy.crsr_get_description(self.id)[:] + return self.description + except TypeError: + return None + if key == 'columnNames': + try: + r = dict(self.proxy.crsr_get_columnNames(self.id)) # copy the remote columns + + except TypeError: + r = {} + self.columnNames = r + return r + + if key == 'remote_cursor': + raise api.OperationalError + try: + return self.proxy.crsr_get_attribute_for_remote(self.id, key) + except AttributeError: + raise api.InternalError ('Failure getting attribute "%s" from proxy cursor.' % key) + + def __setattr__(self, key, value): + if key == 'arraysize': + self.proxy.crsr_set_arraysize(self.id, value) + if key == 'paramstyle': + if value in api.accepted_paramstyles: + self.proxy.crsr_set_paramstyle(self.id, value) + else: + self._raiseCursorError(api.ProgrammingError, 'invalid paramstyle ="%s"' % value) + object.__setattr__(self, key, value) + + def _raiseCursorError(self, errorclass, errorvalue): + eh = self.errorhandler + if eh is None: + eh = api.standardErrorHandler + eh(self.connection, self, errorclass, errorvalue) + + def execute(self, operation, parameters=None): + if self.connection is None: + self._raiseCursorError(ProgrammingError, 'Attempted operation on closed cursor') + self.command = operation + try: del self.description + except AttributeError: pass + try: del self.columnNames + except AttributeError: pass + fp = fixpickle(parameters) + if verbose > 2: + print(('%s executing "%s" with params=%s' % (version, operation, repr(parameters)))) + result = self.proxy.crsr_execute(self.id, operation, fp) + if result: # an exception was triggered + self._raiseCursorError(result[0], result[1]) + + def executemany(self, operation, seq_of_parameters): + if self.connection is None: + self._raiseCursorError(ProgrammingError, 'Attempted operation on closed cursor') + self.command = operation + try: del self.description + except AttributeError: pass + try: del self.columnNames + except AttributeError: pass + sq = [fixpickle(x) for x in seq_of_parameters] + if verbose > 2: + print(('%s executemany "%s" with params=%s' % (version, operation, repr(seq_of_parameters)))) + self.proxy.crsr_executemany(self.id, operation, sq) + + def nextset(self): + try: del self.description + except AttributeError: pass + try: del self.columnNames + except AttributeError: pass + if verbose > 2: + print(('%s nextset' % version)) + return self.proxy.crsr_nextset(self.id) + + def callproc(self, procname, parameters=None): + if self.connection is None: + self._raiseCursorError(ProgrammingError, 'Attempted operation on closed cursor') + self.command = procname + try: del self.description + except AttributeError: pass + try: del self.columnNames + except AttributeError: pass + fp = fixpickle(parameters) + if verbose > 2: + print(('%s callproc "%s" with params=%s' % (version, procname, repr(parameters)))) + return self.proxy.crsr_callproc(self.id, procname, fp) + + def fetchone(self): + try: + f1 = self.proxy.crsr_fetchone(self.id) + except _BaseException as e: + self._raiseCursorError(api.DatabaseError, e) + else: + if f1 is None: + return None + self.rs = [f1] + return api.SQLrows(self.rs, 1, self)[0] # new object to hold the results of the fetch + + def fetchmany(self, size=None): + try: + self.rs = self.proxy.crsr_fetchmany(self.id, size) + if not self.rs: + return [] + r = api.SQLrows(self.rs, len(self.rs), self) + return r + except Exception as e: + self._raiseCursorError(api.DatabaseError, e) + + def fetchall(self): + try: + self.rs = self.proxy.crsr_fetchall(self.id) + if not self.rs: + return [] + return api.SQLrows(self.rs, len(self.rs), self) + except Exception as e: + self._raiseCursorError(api.DatabaseError, e) + + def close(self): + if self.connection is None: + return + self.connection._i_am_closing(self) # take me off the connection's cursors list + try: + self.proxy.crsr_close(self.id) + except: pass + try: del self.description + except: pass + try: del self.rs # let go of the recordset + except: pass + self.connection = None #this will make all future method calls on me throw an exception + self.proxy = None + if verbose: + print('adodbapi.remote Closed cursor at %X' % id(self)) + + def __del__(self): + try: + self.close() + except: + pass + + def setinputsizes(self,sizes): + pass + + def setoutputsize(self, size, column=None): + pass diff --git a/venv/Lib/site-packages/adodbapi/schema_table.py b/venv/Lib/site-packages/adodbapi/schema_table.py new file mode 100644 index 00000000..1b113abd --- /dev/null +++ b/venv/Lib/site-packages/adodbapi/schema_table.py @@ -0,0 +1,14 @@ +"""call using an open ADO connection --> list of table names""" +from . import adodbapi + +def names(connection_object): + ado = connection_object.adoConn + schema = ado.OpenSchema(20) # constant = adSchemaTables + + tables = [] + while not schema.EOF: + name = adodbapi.getIndexedValue(schema.Fields,'TABLE_NAME').Value + tables.append(name) + schema.MoveNext() + del schema + return tables diff --git a/venv/Lib/site-packages/adodbapi/setup.py b/venv/Lib/site-packages/adodbapi/setup.py new file mode 100644 index 00000000..00560ef7 --- /dev/null +++ b/venv/Lib/site-packages/adodbapi/setup.py @@ -0,0 +1,84 @@ +"""adodbapi -- a pure Python PEP 249 DB-API package using Microsoft ADO + +Adodbapi can be run on CPython version 2.7, +or IronPython version 2.6 and later, +or Python 3.5 and later (after filtering through 2to3.py) +""" +CLASSIFIERS = """\ +Development Status :: 5 - Production/Stable +Intended Audience :: Developers +License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL) +Operating System :: Microsoft :: Windows +Operating System :: POSIX :: Linux +Programming Language :: Python +Programming Language :: Python :: 3 +Programming Language :: SQL +Topic :: Software Development +Topic :: Software Development :: Libraries :: Python Modules +Topic :: Database +""" + +NAME = 'adodbapi' +MAINTAINER = "Vernon Cole" +MAINTAINER_EMAIL = "vernondcole@gmail.com" +DESCRIPTION = """A pure Python package implementing PEP 249 DB-API using Microsoft ADO.""" +URL = "http://sourceforge.net/projects/adodbapi" +LICENSE = 'LGPL' +CLASSIFIERS = filter(None, CLASSIFIERS.split('\n')) +AUTHOR = "Henrik Ekelund, Vernon Cole, et.al." +AUTHOR_EMAIL = "vernondcole@gmail.com" +PLATFORMS = ["Windows","Linux"] + +VERSION = None # in case searching for version fails +a = open('adodbapi.py') # find the version string in the source code +for line in a: + if '__version__' in line: + VERSION = line.split("'")[1] + print('adodbapi version="%s"' % VERSION) + break +a.close() + +##DOWNLOAD_URL = "http://sourceforge.net/projects/adodbapi/files/adodbapi/" + VERSION.rsplit('.', 1)[0] + '/adodbapi-' + VERSION + '.zip' + +import sys +def setup_package(): + + from distutils.core import setup + + if sys.version_info >= (3, 0): + + try: + from distutils.command.build_py import build_py_2to3 as build_py +## # exclude fixers that break already compatible code +## from lib2to3.refactor import get_fixers_from_package +## fixers = get_fixers_from_package('lib2to3.fixes') +## for skip_fixer in ['import']: +## fixers.remove('lib2to3.fixes.fix_' + skip_fixer) +## build_py.fixer_names = fixers + except ImportError: + raise ImportError("build_py_2to3 not found in distutils - it is required for Python 3.x") + else: + from distutils.command.build_py import build_py + + setup( + cmdclass = {'build_py': build_py}, + name=NAME, + maintainer=MAINTAINER, + maintainer_email=MAINTAINER_EMAIL, + description=DESCRIPTION, + url=URL, + keywords='database ado odbc dbapi db-api Microsoft SQL', +## download_url=DOWNLOAD_URL, + long_description=open('README.txt').read(), + license=LICENSE, + classifiers=CLASSIFIERS, + author=AUTHOR, + author_email=AUTHOR_EMAIL, + platforms=PLATFORMS, + version=VERSION, + package_dir = {'adodbapi':''}, + packages=['adodbapi'] ) + return + +if __name__ == '__main__': + setup_package() diff --git a/venv/Lib/site-packages/adodbapi/test/__pycache__/adodbapitest.cpython-36.pyc b/venv/Lib/site-packages/adodbapi/test/__pycache__/adodbapitest.cpython-36.pyc new file mode 100644 index 00000000..25351505 Binary files /dev/null and b/venv/Lib/site-packages/adodbapi/test/__pycache__/adodbapitest.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/adodbapi/test/__pycache__/adodbapitestconfig.cpython-36.pyc b/venv/Lib/site-packages/adodbapi/test/__pycache__/adodbapitestconfig.cpython-36.pyc new file mode 100644 index 00000000..facbd2cc Binary files /dev/null and b/venv/Lib/site-packages/adodbapi/test/__pycache__/adodbapitestconfig.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/adodbapi/test/__pycache__/dbapi20.cpython-36.pyc b/venv/Lib/site-packages/adodbapi/test/__pycache__/dbapi20.cpython-36.pyc new file mode 100644 index 00000000..8ed7886f Binary files /dev/null and b/venv/Lib/site-packages/adodbapi/test/__pycache__/dbapi20.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/adodbapi/test/__pycache__/is64bit.cpython-36.pyc b/venv/Lib/site-packages/adodbapi/test/__pycache__/is64bit.cpython-36.pyc new file mode 100644 index 00000000..1194a821 Binary files /dev/null and b/venv/Lib/site-packages/adodbapi/test/__pycache__/is64bit.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/adodbapi/test/__pycache__/setuptestframework.cpython-36.pyc b/venv/Lib/site-packages/adodbapi/test/__pycache__/setuptestframework.cpython-36.pyc new file mode 100644 index 00000000..004d0d21 Binary files /dev/null and b/venv/Lib/site-packages/adodbapi/test/__pycache__/setuptestframework.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/adodbapi/test/__pycache__/test_adodbapi_dbapi20.cpython-36.pyc b/venv/Lib/site-packages/adodbapi/test/__pycache__/test_adodbapi_dbapi20.cpython-36.pyc new file mode 100644 index 00000000..a1e74799 Binary files /dev/null and b/venv/Lib/site-packages/adodbapi/test/__pycache__/test_adodbapi_dbapi20.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/adodbapi/test/__pycache__/tryconnection.cpython-36.pyc b/venv/Lib/site-packages/adodbapi/test/__pycache__/tryconnection.cpython-36.pyc new file mode 100644 index 00000000..7bf6556a Binary files /dev/null and b/venv/Lib/site-packages/adodbapi/test/__pycache__/tryconnection.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/adodbapi/test/adodbapitest.py b/venv/Lib/site-packages/adodbapi/test/adodbapitest.py new file mode 100644 index 00000000..00076075 --- /dev/null +++ b/venv/Lib/site-packages/adodbapi/test/adodbapitest.py @@ -0,0 +1,1375 @@ +""" Unit tests version 2.6.1.0 for adodbapi""" +""" + adodbapi - A python DB API 2.0 interface to Microsoft ADO + + Copyright (C) 2002 Henrik Ekelund + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + + Updates by Vernon Cole +""" + +import unittest +import sys +import datetime +import decimal +import copy +import random +import string + +try: + import win32com.client + win32 = True +except ImportError: + win32 = False + +# run the configuration module. +import adodbapitestconfig as config # will set sys.path to find correct version of adodbapi +# in our code below, all our switches are from config.whatever +import tryconnection + +import adodbapi +import adodbapi.apibase as api + + +try: + import adodbapi.ado_consts as ado_consts +except ImportError: #we are doing a shortcut import as a module -- so + try: + import ado_consts + except ImportError: + from adodbapi import ado_consts + +def str2bytes(sval): + return sval.encode("latin1") +long = int + +def randomstring(length): + return ''.join([random.choice(string.ascii_letters) for n in range(32)]) +class CommonDBTests(unittest.TestCase): + "Self contained super-simple tests in easy syntax, should work on everything between mySQL and Oracle" + + def setUp(self): + self.engine = 'unknown' + + def getEngine(self): + return self.engine + + def getConnection(self): + raise NotImplementedError #"This method must be overriden by a subclass" + + def getCursor(self): + return self.getConnection().cursor() + + def testConnection(self): + crsr=self.getCursor() + assert crsr.__class__.__name__ == 'Cursor' + + def testErrorHandlerInherits(self): + if not self.remote: + conn=self.getConnection() + mycallable=lambda connection,cursor,errorclass,errorvalue: 1 + conn.errorhandler=mycallable + crsr=conn.cursor() + assert crsr.errorhandler==mycallable,"Error handler on crsr should be same as on connection" + + def testDefaultErrorHandlerConnection(self): + if not self.remote: + conn=self.getConnection() + del conn.messages[:] + try: + conn.close() + conn.commit() #Should not be able to use connection after it is closed + except: + assert len(conn.messages)==1 + assert len(conn.messages[0])==2 + assert conn.messages[0][0]==api.ProgrammingError + + def testOwnErrorHandlerConnection(self): + if self.remote: # ToDo: use "skip" + return + mycallable=lambda connection,cursor,errorclass,errorvalue: 1 #does not raise anything + conn=self.getConnection() + conn.errorhandler=mycallable + conn.close() + conn.commit() #Should not be able to use connection after it is closed + assert len(conn.messages)==0 + + conn.errorhandler=None #This should bring back the standard error handler + try: + conn.close() + conn.commit() #Should not be able to use connection after it is closed + except: + pass + #The Standard errorhandler appends error to messages attribute + assert len(conn.messages)>0,"Setting errorhandler to none should bring back the standard error handler" + + + def testDefaultErrorHandlerCursor(self): + crsr=self.getConnection().cursor() + if not self.remote: + del crsr.messages[:] + try: + crsr.execute("SELECT abbtytddrf FROM dasdasd") + except: + assert len(crsr.messages)==1 + assert len(crsr.messages[0])==2 + assert crsr.messages[0][0]==api.DatabaseError + + def testOwnErrorHandlerCursor(self): + if self.remote: # ToDo: should be a "skip" + return + mycallable=lambda connection,cursor,errorclass,errorvalue: 1 #does not raise anything + crsr=self.getConnection().cursor() + crsr.errorhandler=mycallable + crsr.execute("SELECT abbtytddrf FROM dasdasd") + assert len(crsr.messages)==0 + + crsr.errorhandler=None #This should bring back the standard error handler + try: + crsr.execute("SELECT abbtytddrf FROM dasdasd") + except: + pass + #The Standard errorhandler appends error to messages attribute + assert len(crsr.messages)>0,"Setting errorhandler to none should bring back the standard error handler" + + + def testUserDefinedConversions(self): + if self.remote: ## Todo: should be a "skip" + return + try: + duplicatingConverter=lambda aStringField: aStringField*2 + assert duplicatingConverter('gabba') == 'gabbagabba' + + self.helpForceDropOnTblTemp() + conn=self.getConnection() + # the variantConversions attribute should not exist on a normal connection object + self.assertRaises(AttributeError, lambda x:conn.variantConversions[x],[2]) + if not self.remote: + # create a variantConversions attribute on the connection + conn.variantConversions = copy.copy(api.variantConversions) + crsr=conn.cursor() + tabdef = "CREATE TABLE xx_%s (fldData VARCHAR(100) NOT NULL, fld2 VARCHAR(20))" % config.tmp + crsr.execute(tabdef) + crsr.execute("INSERT INTO xx_%s(fldData,fld2) VALUES('gabba','booga')" % config.tmp) + crsr.execute("INSERT INTO xx_%s(fldData,fld2) VALUES('hey','yo')" % config.tmp) + # change converter for ALL adoStringTypes columns + conn.variantConversions[api.adoStringTypes]=duplicatingConverter + crsr.execute("SELECT fldData,fld2 FROM xx_%s ORDER BY fldData" % config.tmp) + + rows=crsr.fetchall() + row = rows[0] + self.assertEqual(row[0],'gabbagabba') + row = rows[1] + self.assertEqual(row[0],'heyhey') + self.assertEqual(row[1],'yoyo') + + upcaseConverter=lambda aStringField: aStringField.upper() + assert upcaseConverter('upThis') == 'UPTHIS' + + # now use a single column converter + rows.converters[1] = upcaseConverter # convert second column + self.assertEqual(row[0],'heyhey') # first will be unchanged + self.assertEqual(row[1],'YO') # second will convert to upper case + + finally: + try: + del conn.variantConversions #Restore the default + except: pass + self.helpRollbackTblTemp() + + def testUserDefinedConversionForExactNumericTypes(self): + # variantConversions is a dictionary of conversion functions + # held internally in adodbapi.apibase + # + # !!! this test intentionally alters the value of what should be constant in the module + # !!! no new code should use this example, to is only a test to see that the + # !!! deprecated way of doing this still works. (use connection.variantConversions) + # + if not self.remote and sys.version_info < (3,0): ### Py3 need different test + oldconverter = adodbapi.variantConversions[ado_consts.adNumeric] #keep old function to restore later + # By default decimal and "numbers" are returned as decimals. + # Instead, make numbers return as floats + try: + adodbapi.variantConversions[ado_consts.adNumeric] = adodbapi.cvtFloat + self.helpTestDataType("decimal(18,2)",'NUMBER',3.45,compareAlmostEqual=1) + self.helpTestDataType("numeric(18,2)",'NUMBER',3.45,compareAlmostEqual=1) + # now return strings + adodbapi.variantConversions[ado_consts.adNumeric] = adodbapi.cvtString + self.helpTestDataType("numeric(18,2)",'NUMBER','3.45') + # now a completly weird user defined convertion + adodbapi.variantConversions[ado_consts.adNumeric] = lambda x: '!!This function returns a funny unicode string %s!!'%x + self.helpTestDataType("numeric(18,2)",'NUMBER','3.45', + allowedReturnValues=['!!This function returns a funny unicode string 3.45!!']) + finally: + # now reset the converter to its original function + adodbapi.variantConversions[ado_consts.adNumeric]=oldconverter #Restore the original convertion function + + def helpTestDataType(self,sqlDataTypeString, + DBAPIDataTypeString, + pyData, + pyDataInputAlternatives=None, + compareAlmostEqual=None, + allowedReturnValues=None): + self.helpForceDropOnTblTemp() + conn=self.getConnection() + crsr=conn.cursor() + tabdef= """ + CREATE TABLE xx_%s ( + fldId integer NOT NULL, + fldData """ % config.tmp + sqlDataTypeString + ")\n" + + crsr.execute(tabdef) + + #Test Null values mapped to None + crsr.execute("INSERT INTO xx_%s (fldId) VALUES (1)" % config.tmp) + + crsr.execute("SELECT fldId,fldData FROM xx_%s" % config.tmp) + rs=crsr.fetchone() + self.assertEqual(rs[1],None) #Null should be mapped to None + assert rs[0]==1 + + #Test description related + descTuple=crsr.description[1] + assert descTuple[0] in ['fldData','flddata'], 'was "%s" expected "%s"'%(descTuple[0],'fldData') + + if DBAPIDataTypeString=='STRING': + assert descTuple[1] == api.STRING, 'was "%s" expected "%s"'%(descTuple[1],api.STRING.values) + elif DBAPIDataTypeString == 'NUMBER': + assert descTuple[1] == api.NUMBER, 'was "%s" expected "%s"'%(descTuple[1],api.NUMBER.values) + elif DBAPIDataTypeString == 'BINARY': + assert descTuple[1] == api.BINARY, 'was "%s" expected "%s"'%(descTuple[1],api.BINARY.values) + elif DBAPIDataTypeString == 'DATETIME': + assert descTuple[1] == api.DATETIME, 'was "%s" expected "%s"'%(descTuple[1],api.DATETIME.values) + elif DBAPIDataTypeString == 'ROWID': + assert descTuple[1] == api.ROWID, 'was "%s" expected "%s"'%(descTuple[1],api.ROWID.values) + elif DBAPIDataTypeString == 'UUID': + assert descTuple[1] == api.OTHER, 'was "%s" expected "%s"'%(descTuple[1],api.OTHER.values) + else: + raise NotImplementedError #"DBAPIDataTypeString not provided" + + #Test data binding + inputs=[pyData] + if pyDataInputAlternatives: + inputs.extend(pyDataInputAlternatives) + inputs = set(inputs) # removes redundant string==unicode tests + fldId=1 + for inParam in inputs: + fldId+=1 + try: + crsr.execute("INSERT INTO xx_%s (fldId,fldData) VALUES (?,?)" % config.tmp, (fldId, inParam)) + except: + if self.remote: + for message in crsr.messages: + print(message) + else: + conn.printADOerrors() + raise + crsr.execute("SELECT fldData FROM xx_%s WHERE ?=fldID" % config.tmp, [fldId]) + rs=crsr.fetchone() + if allowedReturnValues: + allowedTypes = tuple([type(aRV) for aRV in allowedReturnValues]) + assert isinstance(rs[0],allowedTypes), \ + 'result type "%s" must be one of %s'%(type(rs[0]),allowedTypes) + else: + assert isinstance(rs[0] ,type(pyData)), \ + 'result type "%s" must be instance of %s'%(type(rs[0]),type(pyData)) + + if compareAlmostEqual and DBAPIDataTypeString == 'DATETIME': + iso1=adodbapi.dateconverter.DateObjectToIsoFormatString(rs[0]) + iso2=adodbapi.dateconverter.DateObjectToIsoFormatString(pyData) + self.assertEqual(iso1, iso2) + elif compareAlmostEqual: + s = float(pyData) + v = float(rs[0]) + assert abs(v-s)/s < 0.00001, \ + "Values not almost equal recvd=%s, expected=%f" %(rs[0],s) + else: + if allowedReturnValues: + ok=False + self.assertTrue(rs[0] in allowedReturnValues, + 'Value "%s" not in %s' % (repr(rs[0]), allowedReturnValues)) + else: + self.assertEqual(rs[0], pyData, + 'Values are not equal recvd="%s", expected="%s"' %(rs[0],pyData)) + + def testDataTypeFloat(self): + self.helpTestDataType("real",'NUMBER',3.45,compareAlmostEqual=True) + self.helpTestDataType("float",'NUMBER',1.79e37,compareAlmostEqual=True) + + def testDataTypeDecmal(self): + self.helpTestDataType("decimal(18,2)",'NUMBER',3.45, + allowedReturnValues=['3.45','3,45',decimal.Decimal('3.45')]) + self.helpTestDataType("numeric(18,2)",'NUMBER',3.45, + allowedReturnValues=['3.45','3,45',decimal.Decimal('3.45')]) + self.helpTestDataType("decimal(20,2)",'NUMBER',444444444444444444, + allowedReturnValues=['444444444444444444.00', '444444444444444444,00', + decimal.Decimal('444444444444444444')]) + if self.getEngine() == 'MSSQL': + self.helpTestDataType("uniqueidentifier",'UUID','{71A4F49E-39F3-42B1-A41E-48FF154996E6}', + allowedReturnValues=['{71A4F49E-39F3-42B1-A41E-48FF154996E6}']) + + def testDataTypeMoney(self): #v2.1 Cole -- use decimal for money + if self.getEngine() == 'MySQL': + self.helpTestDataType("DECIMAL(20,4)",'NUMBER',decimal.Decimal('-922337203685477.5808')) + elif self.getEngine() == 'PostgreSQL': + self.helpTestDataType("money",'NUMBER',decimal.Decimal('-922337203685477.5808'), + compareAlmostEqual=True, + allowedReturnValues=[-922337203685477.5808, + decimal.Decimal('-922337203685477.5808')]) + else: + self.helpTestDataType("smallmoney",'NUMBER',decimal.Decimal('214748.02')) + self.helpTestDataType("money",'NUMBER',decimal.Decimal('-922337203685477.5808')) + + def testDataTypeInt(self): + if self.getEngine() != 'PostgreSQL': + self.helpTestDataType("tinyint",'NUMBER',115) + self.helpTestDataType("smallint",'NUMBER',-32768) + if self.getEngine() not in ['ACCESS','PostgreSQL']: + self.helpTestDataType("bit",'NUMBER',1) #Does not work correctly with access + if self.getEngine() in ['MSSQL','PostgreSQL']: + self.helpTestDataType("bigint",'NUMBER',3000000000, + allowedReturnValues=[3000000000, int(3000000000)]) + self.helpTestDataType("int",'NUMBER',2147483647) + + def testDataTypeChar(self): + for sqlDataType in ("char(6)","nchar(6)"): + self.helpTestDataType(sqlDataType,'STRING','spam ',allowedReturnValues=['spam','spam','spam ','spam ']) + + def testDataTypeVarChar(self): + if self.getEngine() == 'MySQL': + stringKinds = ["varchar(10)","text"] + elif self.getEngine() == 'PostgreSQL': + stringKinds = ["varchar(10)","text","character varying"] + else: + stringKinds = ["varchar(10)","nvarchar(10)","text","ntext"] #,"varchar(max)"] + + for sqlDataType in stringKinds: + self.helpTestDataType(sqlDataType,'STRING','spam',['spam']) + + def testDataTypeDate(self): + if self.getEngine() == 'PostgreSQL': + dt = "timestamp" + else: + dt = "datetime" + self.helpTestDataType(dt,'DATETIME',adodbapi.Date(2002,10,28), + compareAlmostEqual=True) + if self.getEngine() not in ['MySQL','PostgreSQL']: + self.helpTestDataType("smalldatetime",'DATETIME',adodbapi.Date(2002,10,28), + compareAlmostEqual=True) + if tag != 'pythontime' and self.getEngine() not in ['MySQL','PostgreSQL']: # fails when using pythonTime + self.helpTestDataType(dt,'DATETIME', adodbapi.Timestamp(2002,10,28,12,15,1), + compareAlmostEqual=True) + + def testDataTypeBinary(self): + binfld = str2bytes('\x07\x00\xE2\x40*') + arv = [binfld, adodbapi.Binary(binfld), bytes(binfld)] + if self.getEngine() == 'PostgreSQL': + self.helpTestDataType("bytea",'BINARY',adodbapi.Binary(binfld), + allowedReturnValues=arv) + else: + self.helpTestDataType("binary(5)",'BINARY',adodbapi.Binary(binfld), + allowedReturnValues=arv) + self.helpTestDataType("varbinary(100)",'BINARY',adodbapi.Binary(binfld), + allowedReturnValues=arv) + if self.getEngine() != 'MySQL': + self.helpTestDataType("image",'BINARY',adodbapi.Binary(binfld), + allowedReturnValues=arv) + + def helpRollbackTblTemp(self): + self.helpForceDropOnTblTemp() + + def helpForceDropOnTblTemp(self): + conn=self.getConnection() + with conn.cursor() as crsr: + try: + crsr.execute("DROP TABLE xx_%s" % config.tmp) + if not conn.autocommit: + conn.commit() + except: + pass + + def helpCreateAndPopulateTableTemp(self,crsr): + tabdef= """ + CREATE TABLE xx_%s ( + fldData INTEGER + ) + """ % config.tmp + try: #EAFP + crsr.execute(tabdef) + except api.DatabaseError: # was not dropped before + self.helpForceDropOnTblTemp() # so drop it now + crsr.execute(tabdef) + for i in range(9): # note: this poor SQL code, but a valid test + crsr.execute("INSERT INTO xx_%s (fldData) VALUES (%i)" % (config.tmp, i)) + # NOTE: building the test table without using parameter substitution + + def testFetchAll(self): + crsr=self.getCursor() + self.helpCreateAndPopulateTableTemp(crsr) + crsr.execute("SELECT fldData FROM xx_%s" % config.tmp) + rs=crsr.fetchall() + assert len(rs)==9 + #test slice of rows + i = 3 + for row in rs[3:-2]: #should have rowid 3..6 + assert row[0]==i + i+=1 + self.helpRollbackTblTemp() + + def testPreparedStatement(self): + crsr=self.getCursor() + self.helpCreateAndPopulateTableTemp(crsr) + crsr.prepare("SELECT fldData FROM xx_%s" % config.tmp) + crsr.execute(crsr.command) # remembes the one that was prepared + rs=crsr.fetchall() + assert len(rs)==9 + assert rs[2][0]==2 + self.helpRollbackTblTemp() + + def testWrongPreparedStatement(self): + crsr=self.getCursor() + self.helpCreateAndPopulateTableTemp(crsr) + crsr.prepare("SELECT * FROM nowhere") + crsr.execute("SELECT fldData FROM xx_%s" % config.tmp) # should execute this one, not the prepared one + rs=crsr.fetchall() + assert len(rs)==9 + assert rs[2][0]==2 + self.helpRollbackTblTemp() + + def testIterator(self): + crsr=self.getCursor() + self.helpCreateAndPopulateTableTemp(crsr) + crsr.execute("SELECT fldData FROM xx_%s" % config.tmp) + for i,row in enumerate(crsr): # using cursor as an iterator, rather than fetchxxx + assert row[0]==i + self.helpRollbackTblTemp() + + def testExecuteMany(self): + crsr=self.getCursor() + self.helpCreateAndPopulateTableTemp(crsr) + seq_of_values = [ (111,) , (222,) ] + crsr.executemany("INSERT INTO xx_%s (fldData) VALUES (?)" % config.tmp, seq_of_values) + if crsr.rowcount==-1: + print(self.getEngine()+" Provider does not support rowcount (on .executemany())") + else: + self.assertEqual( crsr.rowcount,2) + crsr.execute("SELECT fldData FROM xx_%s" % config.tmp) + rs=crsr.fetchall() + assert len(rs)==11 + self.helpRollbackTblTemp() + + + def testRowCount(self): + crsr=self.getCursor() + self.helpCreateAndPopulateTableTemp(crsr) + crsr.execute("SELECT fldData FROM xx_%s" % config.tmp) + if crsr.rowcount == -1: + #print("provider does not support rowcount on select") + pass + else: + self.assertEqual( crsr.rowcount,9) + self.helpRollbackTblTemp() + + def testRowCountNoRecordset(self): + crsr=self.getCursor() + self.helpCreateAndPopulateTableTemp(crsr) + crsr.execute("DELETE FROM xx_%s WHERE fldData >= 5" % config.tmp) + if crsr.rowcount==-1: + print(self.getEngine()+" Provider does not support rowcount (on DELETE)") + else: + self.assertEqual( crsr.rowcount,4) + self.helpRollbackTblTemp() + + def testFetchMany(self): + crsr=self.getCursor() + self.helpCreateAndPopulateTableTemp(crsr) + crsr.execute("SELECT fldData FROM xx_%s" % config.tmp) + rs=crsr.fetchmany(3) + assert len(rs)==3 + rs=crsr.fetchmany(5) + assert len(rs)==5 + rs=crsr.fetchmany(5) + assert len(rs)==1 #Asked for five, but there is only one left + self.helpRollbackTblTemp() + + def testFetchManyWithArraySize(self): + crsr=self.getCursor() + self.helpCreateAndPopulateTableTemp(crsr) + crsr.execute("SELECT fldData FROM xx_%s" % config.tmp) + rs=crsr.fetchmany() + assert len(rs)==1 #arraysize Defaults to one + crsr.arraysize=4 + rs=crsr.fetchmany() + assert len(rs)==4 + rs=crsr.fetchmany() + assert len(rs)==4 + rs=crsr.fetchmany() + assert len(rs)==0 + self.helpRollbackTblTemp() + + def testErrorConnect(self): + conn = self.getConnection() + kw = {} + if 'proxy_host' in conn.kwargs: + kw['proxy_host'] = conn.kwargs['proxy_host'] + conn.close() + self.assertRaises(api.DatabaseError, self.db, 'not a valid connect string', kw) + + def testRowIterator(self): + self.helpForceDropOnTblTemp() + conn=self.getConnection() + crsr=conn.cursor() + tabdef= """ + CREATE TABLE xx_%s ( + fldId integer NOT NULL, + fldTwo integer, + fldThree integer, + fldFour integer) + """ % config.tmp + crsr.execute(tabdef) + + inputs = [(2,3,4),(102,103,104)] + fldId=1 + for inParam in inputs: + fldId+=1 + try: + crsr.execute("INSERT INTO xx_%s (fldId,fldTwo,fldThree,fldFour) VALUES (?,?,?,?)" % config.tmp, + (fldId,inParam[0],inParam[1],inParam[2])) + except: + if self.remote: + for message in crsr.messages: + print(message) + else: + conn.printADOerrors() + raise + crsr.execute("SELECT fldTwo,fldThree,fldFour FROM xx_%s WHERE ?=fldID" % config.tmp, [fldId]) + rec = crsr.fetchone() + # check that stepping through an emulated row works + for j in range(len(inParam)): + assert rec[j] == inParam[j], 'returned value:"%s" != test value:"%s"'%(rec[j],inParam[j]) + # check that we can get a complete tuple from a row + assert tuple(rec) == inParam, 'returned value:"%s" != test value:"%s"'%(repr(rec),repr(inParam)) + # test that slices of rows work + slice1 = tuple(rec[:-1]) + slice2 = tuple(inParam[0:2]) + assert slice1 == slice2, 'returned value:"%s" != test value:"%s"'%(repr(slice1),repr(slice2)) + # now test named column retrieval + assert rec['fldTwo'] == inParam[0] + assert rec.fldThree == inParam[1] + assert rec.fldFour == inParam[2] + # test array operation + # note that the fields vv vv vv are out of order + crsr.execute("select fldThree,fldFour,fldTwo from xx_%s" % config.tmp) + recs = crsr.fetchall() + assert recs[1][0] == 103 + assert recs[0][1] == 4 + assert recs[1]['fldFour'] == 104 + assert recs[0,0] == 3 + assert recs[0,'fldTwo'] == 2 + assert recs[1,2] == 102 + for i in range(1): + for j in range(2): + assert recs[i][j] == recs[i,j] + + def testFormatParamstyle(self): + self.helpForceDropOnTblTemp() + conn=self.getConnection() + conn.paramstyle = 'format' #test nonstandard use of paramstyle + crsr=conn.cursor() + tabdef= """ + CREATE TABLE xx_%s ( + fldId integer NOT NULL, + fldData varchar(10), + fldConst varchar(30)) + """ % config.tmp + crsr.execute(tabdef) + + inputs = ['one','two','three'] + fldId=2 + for inParam in inputs: + fldId+=1 + sql = "INSERT INTO xx_" + \ + config.tmp + \ + " (fldId,fldConst,fldData) VALUES (%s,'thi%s :may cause? trouble', %s)" + try: + crsr.execute(sql, (fldId,inParam)) + except: + if self.remote: + for message in crsr.messages: + print(message) + else: + conn.printADOerrors() + raise + crsr.execute("SELECT fldData, fldConst FROM xx_" + config.tmp + " WHERE %s=fldID", [fldId]) + rec = crsr.fetchone() + self.assertEqual(rec[0], inParam, 'returned value:"%s" != test value:"%s"' % (rec[0],inParam)) + self.assertEqual(rec[1], "thi%s :may cause? trouble") + + # now try an operation with a "%s" as part of a literal + sel = "insert into xx_" + config.tmp + " (fldId,fldData) VALUES (%s,'four%sfive')" + params = (20,) + crsr.execute(sel,params) + + #test the .query implementation + assert '(?,' in crsr.query, 'expected:"%s" in "%s"'%('(?,',crsr.query) + #test the .command attribute + assert crsr.command == sel, 'expected:"%s" but found "%s"' % (sel, crsr.command) + + #test the .parameters attribute + if not self.remote: # parameter list will be altered in transit + self.assertEqual(crsr.parameters, params) + #now make sure the data made it + crsr.execute("SELECT fldData FROM xx_%s WHERE fldID=20" % config.tmp) + rec = crsr.fetchone() + self.assertEqual(rec[0], 'four%sfive') + + def testNamedParamstyle(self): + self.helpForceDropOnTblTemp() + conn=self.getConnection() + crsr=conn.cursor() + crsr.paramstyle = 'named' #test nonstandard use of paramstyle + tabdef= """ + CREATE TABLE xx_%s ( + fldId integer NOT NULL, + fldData varchar(10)) + """ % config.tmp + crsr.execute(tabdef) + + inputs = ['four','five','six'] + fldId=10 + for inParam in inputs: + fldId+=1 + try: + crsr.execute("INSERT INTO xx_%s (fldId,fldData) VALUES (:Id,:f_Val)" % config.tmp, + {"f_Val":inParam,'Id':fldId}) + except: + if self.remote: + for message in crsr.messages: + print(message) + else: + conn.printADOerrors() + raise + crsr.execute("SELECT fldData FROM xx_%s WHERE fldID=:Id" % config.tmp, {'Id':fldId}) + rec = crsr.fetchone() + self.assertEqual(rec[0], inParam, 'returned value:"%s" != test value:"%s"'%(rec[0],inParam)) + # now a test with a ":" as part of a literal + crsr.execute("insert into xx_%s (fldId,fldData) VALUES (:xyz,'six:five')" % config.tmp,{'xyz':30}) + crsr.execute("SELECT fldData FROM xx_%s WHERE fldID=30" % config.tmp) + rec = crsr.fetchone() + self.assertEqual(rec[0], 'six:five') + + + def testPyformatParamstyle(self): + self.helpForceDropOnTblTemp() + conn=self.getConnection() + crsr=conn.cursor() + crsr.paramstyle = 'pyformat' #test nonstandard use of paramstyle + tabdef= """ + CREATE TABLE xx_%s ( + fldId integer NOT NULL, + fldData varchar(10)) + """ % config.tmp + crsr.execute(tabdef) + + inputs = ['four', 'five', 'six'] + fldId=10 + for inParam in inputs: + fldId+=1 + try: + crsr.execute("INSERT INTO xx_%s (fldId,fldData) VALUES (%%(Id)s,%%(f_Val)s)" % config.tmp, + {"f_Val": inParam, 'Id': fldId}) + except: + if self.remote: + for message in crsr.messages: + print(message) + else: + conn.printADOerrors() + raise + crsr.execute("SELECT fldData FROM xx_%s WHERE fldID=%%(Id)s" % config.tmp, {'Id':fldId}) + rec = crsr.fetchone() + self.assertEqual(rec[0], inParam, 'returned value:"%s" != test value:"%s"'%(rec[0],inParam)) + # now a test with a "%" as part of a literal + crsr.execute("insert into xx_%s (fldId,fldData) VALUES (%%(xyz)s,'six%%five')" % config.tmp,{'xyz': 30}) + crsr.execute("SELECT fldData FROM xx_%s WHERE fldID=30" % config.tmp) + rec = crsr.fetchone() + self.assertEqual(rec[0], 'six%five') + + def testAutomaticParamstyle(self): + self.helpForceDropOnTblTemp() + conn=self.getConnection() + conn.paramstyle = 'dynamic' #test nonstandard use of paramstyle + crsr=conn.cursor() + tabdef= """ + CREATE TABLE xx_%s ( + fldId integer NOT NULL, + fldData varchar(10), + fldConst varchar(30)) + """ % config.tmp + crsr.execute(tabdef) + inputs = ['one', 'two', 'three'] + fldId=2 + for inParam in inputs: + fldId+=1 + try: + crsr.execute("INSERT INTO xx_" + config.tmp + \ + " (fldId,fldConst,fldData) VALUES (?,'thi%s :may cause? troub:1e', ?)", (fldId,inParam)) + except: + if self.remote: + for message in crsr.messages: + print(message) + else: + conn.printADOerrors() + raise + trouble = 'thi%s :may cause? troub:1e' + crsr.execute("SELECT fldData, fldConst FROM xx_" + config.tmp + " WHERE ?=fldID", [fldId]) + rec = crsr.fetchone() + self.assertEqual(rec[0], inParam, 'returned value:"%s" != test value:"%s"'%(rec[0],inParam)) + self.assertEqual(rec[1], trouble) + # inputs = [u'four',u'five',u'six'] + fldId=10 + for inParam in inputs: + fldId+=1 + try: + crsr.execute("INSERT INTO xx_%s (fldId,fldData) VALUES (:Id,:f_Val)" % config.tmp, + {"f_Val":inParam,'Id':fldId}) + except: + if self.remote: + for message in crsr.messages: + print(message) + else: + conn.printADOerrors() + raise + crsr.execute("SELECT fldData FROM xx_%s WHERE :Id=fldID" % config.tmp, {'Id':fldId}) + rec = crsr.fetchone() + self.assertEqual(rec[0], inParam, 'returned value:"%s" != test value:"%s"'%(rec[0],inParam)) + # now a test with a ":" as part of a literal -- and use a prepared query + ppdcmd = "insert into xx_%s (fldId,fldData) VALUES (:xyz,'six:five')" % config.tmp + crsr.prepare(ppdcmd) + crsr.execute(ppdcmd, {'xyz':30}) + crsr.execute("SELECT fldData FROM xx_%s WHERE fldID=30" % config.tmp) + rec = crsr.fetchone() + self.assertEqual(rec[0], 'six:five') + + def testRollBack(self): + conn = self.getConnection() + crsr = conn.cursor() + assert not crsr.connection.autocommit, 'Unexpected beginning condition' + self.helpCreateAndPopulateTableTemp(crsr) + crsr.connection.commit() # commit the first bunch + + crsr.execute("INSERT INTO xx_%s (fldData) VALUES(100)" % config.tmp) + + selectSql = "SELECT fldData FROM xx_%s WHERE fldData=100" % config.tmp + crsr.execute(selectSql) + rs = crsr.fetchall() + assert len(rs) == 1 + self.conn.rollback() + crsr.execute(selectSql) + assert crsr.fetchone() == None, 'cursor.fetchone should return None if a query retrieves no rows' + crsr.execute('SELECT fldData from xx_%s' % config.tmp) + rs = crsr.fetchall() + assert len(rs) == 9, 'the original records should still be present' + self.helpRollbackTblTemp() + + + def testCommit(self): + try: + con2 = self.getAnotherConnection() + except NotImplementedError: + return # should be "SKIP" for ACCESS + assert not con2.autocommit, 'default should be manual commit' + crsr = con2.cursor() + self.helpCreateAndPopulateTableTemp(crsr) + + crsr.execute("INSERT INTO xx_%s (fldData) VALUES(100)" % config.tmp) + con2.commit() + + selectSql = "SELECT fldData FROM xx_%s WHERE fldData=100" % config.tmp + crsr.execute(selectSql) + rs = crsr.fetchall() + assert len(rs) == 1 + crsr.close() + con2.close() + conn = self.getConnection() + crsr = self.getCursor() + with conn.cursor() as crsr: + crsr.execute(selectSql) + rs = crsr.fetchall() + assert len(rs) == 1 + assert rs[0][0] == 100 + self.helpRollbackTblTemp() + + + def testAutoRollback(self): + try: + con2 = self.getAnotherConnection() + except NotImplementedError: + return # should be "SKIP" for ACCESS + assert not con2.autocommit, 'unexpected beginning condition' + crsr = con2.cursor() + self.helpCreateAndPopulateTableTemp(crsr) + crsr.execute("INSERT INTO xx_%s (fldData) VALUES(100)" % config.tmp) + selectSql = "SELECT fldData FROM xx_%s WHERE fldData=100" % config.tmp + crsr.execute(selectSql) + rs = crsr.fetchall() + assert len(rs) == 1 + crsr.close() + con2.close() + crsr = self.getCursor() + try: + crsr.execute(selectSql) # closing the connection should have forced rollback + row = crsr.fetchone() + except api.DatabaseError: + row = None # if the entire table disappeared the rollback was perfect and the test passed + assert row == None, 'cursor.fetchone should return None if a query retrieves no rows. Got %s' % repr(row) + self.helpRollbackTblTemp() + + def testAutoCommit(self): + try: + ac_conn = self.getAnotherConnection({'autocommit': True}) + except NotImplementedError: + return # should be "SKIP" for ACCESS + crsr = ac_conn.cursor() + self.helpCreateAndPopulateTableTemp(crsr) + crsr.execute("INSERT INTO xx_%s (fldData) VALUES(100)" % config.tmp) + crsr.close() + with self.getCursor() as crsr: + selectSql = 'SELECT fldData from xx_%s' % config.tmp + crsr.execute(selectSql) # closing the connection should _not_ have forced rollback + rs = crsr.fetchall() + assert len(rs) == 10, 'all records should still be present' + ac_conn.close() + self.helpRollbackTblTemp() + + def testSwitchedAutoCommit(self): + try: + ac_conn = self.getAnotherConnection() + except NotImplementedError: + return # should be "SKIP" for ACCESS + ac_conn.autocommit = True + crsr = ac_conn.cursor() + self.helpCreateAndPopulateTableTemp(crsr) + crsr.execute("INSERT INTO xx_%s (fldData) VALUES(100)" % config.tmp) + crsr.close() + conn = self.getConnection() + ac_conn.close() + with self.getCursor() as crsr: + selectSql = 'SELECT fldData from xx_%s' % config.tmp + crsr.execute(selectSql) # closing the connection should _not_ have forced rollback + rs = crsr.fetchall() + assert len(rs) == 10, 'all records should still be present' + self.helpRollbackTblTemp() + + + def testExtendedTypeHandling(self): + class XtendString(str): + pass + class XtendInt(int): + pass + class XtendFloat(float): + pass + xs = XtendString(randomstring(30)) + xi = XtendInt(random.randint(-100, 500)) + xf = XtendFloat(random.random()) + self.helpForceDropOnTblTemp() + conn = self.getConnection() + crsr = conn.cursor() + tabdef = """ + CREATE TABLE xx_%s ( + s VARCHAR(40) NOT NULL, + i INTEGER NOT NULL, + f REAL NOT NULL)""" % config.tmp + crsr.execute(tabdef) + crsr.execute("INSERT INTO xx_%s (s, i, f) VALUES (?, ?, ?)" % config.tmp, (xs, xi, xf)) + crsr.close() + conn = self.getConnection() + with self.getCursor() as crsr: + selectSql = 'SELECT s, i, f from xx_%s' % config.tmp + crsr.execute(selectSql) # closing the connection should _not_ have forced rollback + row = crsr.fetchone() + self.assertEqual(row.s, xs) + self.assertEqual(row.i, xi) + self.assertAlmostEqual(row.f, xf) + self.helpRollbackTblTemp() + + +class TestADOwithSQLServer(CommonDBTests): + def setUp(self): + self.conn = config.dbSqlServerconnect(*config.connStrSQLServer[0], **config.connStrSQLServer[1]) + self.conn.timeout = 30 # turn timeout back up + self.engine = 'MSSQL' + self.db = config.dbSqlServerconnect + self.remote = config.connStrSQLServer[2] + + def tearDown(self): + try: + self.conn.rollback() + except: + pass + try: + self.conn.close() + except: + pass + self.conn=None + + def getConnection(self): + return self.conn + + def getAnotherConnection(self, addkeys=None): + keys = dict(config.connStrSQLServer[1]) + if addkeys: + keys.update(addkeys) + return config.dbSqlServerconnect(*config.connStrSQLServer[0], **keys) + + def testVariableReturningStoredProcedure(self): + crsr=self.conn.cursor() + spdef= """ + CREATE PROCEDURE sp_DeleteMeOnlyForTesting + @theInput varchar(50), + @theOtherInput varchar(50), + @theOutput varchar(100) OUTPUT + AS + SET @theOutput=@theInput+@theOtherInput + """ + try: + crsr.execute("DROP PROCEDURE sp_DeleteMeOnlyForTesting") + self.conn.commit() + except: #Make sure it is empty + pass + crsr.execute(spdef) + + retvalues=crsr.callproc('sp_DeleteMeOnlyForTesting',('Dodsworth','Anne',' ')) + assert retvalues[0]=='Dodsworth', '%s is not "Dodsworth"'%repr(retvalues[0]) + assert retvalues[1]=='Anne','%s is not "Anne"'%repr(retvalues[1]) + assert retvalues[2]=='DodsworthAnne','%s is not "DodsworthAnne"'%repr(retvalues[2]) + self.conn.rollback() + + def testMultipleSetReturn(self): + crsr=self.getCursor() + self.helpCreateAndPopulateTableTemp(crsr) + + spdef= """ + CREATE PROCEDURE sp_DeleteMe_OnlyForTesting + AS + SELECT fldData FROM xx_%s ORDER BY fldData ASC + SELECT fldData From xx_%s where fldData = -9999 + SELECT fldData FROM xx_%s ORDER BY fldData DESC + """ % (config.tmp, config.tmp, config.tmp) + try: + crsr.execute("DROP PROCEDURE sp_DeleteMe_OnlyForTesting") + self.conn.commit() + except: #Make sure it is empty + pass + crsr.execute(spdef) + + retvalues=crsr.callproc('sp_DeleteMe_OnlyForTesting') + row=crsr.fetchone() + self.assertEqual(row[0], 0) + assert crsr.nextset() == True, 'Operation should succeed' + assert not crsr.fetchall(), 'Should be an empty second set' + assert crsr.nextset() == True, 'third set should be present' + rowdesc=crsr.fetchall() + self.assertEqual(rowdesc[0][0],8) + assert crsr.nextset() == None,'No more return sets, should return None' + + self.helpRollbackTblTemp() + + + def testDatetimeProcedureParameter(self): + crsr=self.conn.cursor() + spdef= """ + CREATE PROCEDURE sp_DeleteMeOnlyForTesting + @theInput DATETIME, + @theOtherInput varchar(50), + @theOutput varchar(100) OUTPUT + AS + SET @theOutput = CONVERT(CHARACTER(20), @theInput, 0) + @theOtherInput + """ + try: + crsr.execute("DROP PROCEDURE sp_DeleteMeOnlyForTesting") + self.conn.commit() + except: #Make sure it is empty + pass + crsr.execute(spdef) + + result = crsr.callproc('sp_DeleteMeOnlyForTesting', [adodbapi.Timestamp(2014,12,25,0,1,0), 'Beep', ' ' * 30]) + + assert result[2] == 'Dec 25 2014 12:01AM Beep', 'value was="%s"' % result[2] + self.conn.rollback() + + def testIncorrectStoredProcedureParameter(self): + crsr=self.conn.cursor() + spdef= """ + CREATE PROCEDURE sp_DeleteMeOnlyForTesting + @theInput DATETIME, + @theOtherInput varchar(50), + @theOutput varchar(100) OUTPUT + AS + SET @theOutput = CONVERT(CHARACTER(20), @theInput) + @theOtherInput + """ + try: + crsr.execute("DROP PROCEDURE sp_DeleteMeOnlyForTesting") + self.conn.commit() + except: #Make sure it is empty + pass + crsr.execute(spdef) + + # calling the sproc with a string for the first parameter where a DateTime is expected + result = tryconnection.try_operation_with_expected_exception( + (api.DataError,api.DatabaseError), + crsr.callproc, + ['sp_DeleteMeOnlyForTesting'], + {'parameters': ['this is wrong', 'Anne', 'not Alice']} + ) + if result[0]: # the expected exception was raised + assert '@theInput' in str(result[1]) or 'DatabaseError' in str(result), \ + 'Identifies the wrong erroneous parameter' + else: + assert result[0], result[1] # incorrect or no exception + self.conn.rollback() + +class TestADOwithAccessDB(CommonDBTests): + def setUp(self): + self.conn = config.dbAccessconnect(*config.connStrAccess[0], **config.connStrAccess[1]) + self.conn.timeout = 30 # turn timeout back up + self.engine = 'ACCESS' + self.db = config.dbAccessconnect + self.remote = config.connStrAccess[2] + + def tearDown(self): + try: + self.conn.rollback() + except: + pass + try: + self.conn.close() + except: + pass + self.conn=None + + def getConnection(self): + return self.conn + + def getAnotherConnection(self, addkeys=None): + raise NotImplementedError('Jet cannot use a second connection to the database') + + def testOkConnect(self): + c = self.db(*config.connStrAccess[0], **config.connStrAccess[1]) + assert c != None + c.close() + +class TestADOwithMySql(CommonDBTests): + def setUp(self): + self.conn = config.dbMySqlconnect(*config.connStrMySql[0], **config.connStrMySql[1]) + self.conn.timeout = 30 # turn timeout back up + self.engine = 'MySQL' + self.db = config.dbMySqlconnect + self.remote = config.connStrMySql[2] + + def tearDown(self): + try: + self.conn.rollback() + except: + pass + try: + self.conn.close() + except: + pass + self.conn=None + + def getConnection(self): + return self.conn + + def getAnotherConnection(self, addkeys=None): + keys = dict(config.connStrMySql[1]) + if addkeys: + keys.update(addkeys) + return config.dbMySqlconnect(*config.connStrMySql[0], **keys) + + def testOkConnect(self): + c = self.db(*config.connStrMySql[0], **config.connStrMySql[1]) + assert c != None + + # def testStoredProcedure(self): + # crsr=self.conn.cursor() + # try: + # crsr.execute("DROP PROCEDURE DeleteMeOnlyForTesting") + # self.conn.commit() + # except: #Make sure it is empty + # pass + # spdef= """ + # DELIMITER $$ + # CREATE PROCEDURE DeleteMeOnlyForTesting (onein CHAR(10), twoin CHAR(10), OUT theout CHAR(20)) + # DETERMINISTIC + # BEGIN + # SET theout = onein //|| twoin; + # /* (SELECT 'a small string' as result; */ + # END $$ + # """ + # + # crsr.execute(spdef) + # + # retvalues=crsr.callproc('DeleteMeOnlyForTesting',('Dodsworth','Anne',' ')) + # print 'return value (mysql)=',repr(crsr.returnValue) ### + # assert retvalues[0]=='Dodsworth', '%s is not "Dodsworth"'%repr(retvalues[0]) + # assert retvalues[1]=='Anne','%s is not "Anne"'%repr(retvalues[1]) + # assert retvalues[2]=='DodsworthAnne','%s is not "DodsworthAnne"'%repr(retvalues[2]) + # + # try: + # crsr.execute("DROP PROCEDURE, DeleteMeOnlyForTesting") + # self.conn.commit() + # except: #Make sure it is empty + # pass + +class TestADOwithPostgres(CommonDBTests): + def setUp(self): + self.conn = config.dbPostgresConnect(*config.connStrPostgres[0], **config.connStrPostgres[1]) + self.conn.timeout = 30 # turn timeout back up + self.engine = 'PostgreSQL' + self.db = config.dbPostgresConnect + self.remote = config.connStrPostgres[2] + + def tearDown(self): + try: + self.conn.rollback() + except: + pass + try: + self.conn.close() + except: + pass + self.conn=None + + def getConnection(self): + return self.conn + + def getAnotherConnection(self, addkeys=None): + keys = dict(config.connStrPostgres[1]) + if addkeys: + keys.update(addkeys) + return config.dbPostgresConnect(*config.connStrPostgres[0], **keys) + + def testOkConnect(self): + c = self.db(*config.connStrPostgres[0], **config.connStrPostgres[1]) + assert c != None + + # def testStoredProcedure(self): + # crsr=self.conn.cursor() + # spdef= """ + # CREATE OR REPLACE FUNCTION DeleteMeOnlyForTesting (text, text) + # RETURNS text AS $funk$ + # BEGIN + # RETURN $1 || $2; + # END; + # $funk$ + # LANGUAGE SQL; + # """ + # + # crsr.execute(spdef) + # retvalues = crsr.callproc('DeleteMeOnlyForTesting',('Dodsworth','Anne',' ')) + # ### print 'return value (pg)=',repr(crsr.returnValue) ### + # assert retvalues[0]=='Dodsworth', '%s is not "Dodsworth"'%repr(retvalues[0]) + # assert retvalues[1]=='Anne','%s is not "Anne"'%repr(retvalues[1]) + # assert retvalues[2]=='Dodsworth Anne','%s is not "Dodsworth Anne"'%repr(retvalues[2]) + # self.conn.rollback() + # try: + # crsr.execute("DROP PROCEDURE, DeleteMeOnlyForTesting") + # self.conn.commit() + # except: #Make sure it is empty + # pass + +class TimeConverterInterfaceTest(unittest.TestCase): + def testIDate(self): + assert self.tc.Date(1990,2,2) + + def testITime(self): + assert self.tc.Time(13,2,2) + + def testITimestamp(self): + assert self.tc.Timestamp(1990,2,2,13,2,1) + + def testIDateObjectFromCOMDate(self): + assert self.tc.DateObjectFromCOMDate(37435.7604282) + + def testICOMDate(self): + assert hasattr(self.tc,'COMDate') + + def testExactDate(self): + d=self.tc.Date(1994,11,15) + comDate=self.tc.COMDate(d) + correct=34653.0 + assert comDate == correct,comDate + + def testExactTimestamp(self): + d=self.tc.Timestamp(1994,11,15,12,0,0) + comDate=self.tc.COMDate(d) + correct=34653.5 + self.assertEqual( comDate ,correct) + + d=self.tc.Timestamp(2003,5,6,14,15,17) + comDate=self.tc.COMDate(d) + correct=37747.593946759262 + self.assertEqual( comDate ,correct) + + def testIsoFormat(self): + d=self.tc.Timestamp(1994,11,15,12,3,10) + iso=self.tc.DateObjectToIsoFormatString(d) + self.assertEqual(str(iso[:19]) , '1994-11-15 12:03:10') + + dt=self.tc.Date(2003,5,2) + iso=self.tc.DateObjectToIsoFormatString(dt) + self.assertEqual(str(iso[:10]), '2003-05-02') + +if config.doMxDateTimeTest: + import mx.DateTime +class TestMXDateTimeConverter(TimeConverterInterfaceTest): + def setUp(self): + self.tc = api.mxDateTimeConverter() + + def testCOMDate(self): + t=mx.DateTime.DateTime(2002,6,28,18,15,2) + cmd=self.tc.COMDate(t) + assert cmd == t.COMDate() + + def testDateObjectFromCOMDate(self): + cmd=self.tc.DateObjectFromCOMDate(37435.7604282) + t=mx.DateTime.DateTime(2002,6,28,18,15,0) + t2=mx.DateTime.DateTime(2002,6,28,18,15,2) + assert t2>cmd>t + + def testDate(self): + assert mx.DateTime.Date(1980,11,4)==self.tc.Date(1980,11,4) + + def testTime(self): + assert mx.DateTime.Time(13,11,4)==self.tc.Time(13,11,4) + + def testTimestamp(self): + t=mx.DateTime.DateTime(2002,6,28,18,15,1) + obj=self.tc.Timestamp(2002,6,28,18,15,1) + assert t == obj + +import time +class TestPythonTimeConverter(TimeConverterInterfaceTest): + def setUp(self): + self.tc=api.pythonTimeConverter() + + def testCOMDate(self): + mk = time.mktime((2002,6,28,18,15,1, 4,31+28+31+30+31+28,-1)) + t=time.localtime(mk) + # Fri, 28 Jun 2002 18:15:01 +0000 + cmd=self.tc.COMDate(t) + assert abs(cmd - 37435.7604282) < 1.0/24,"%f more than an hour wrong" % cmd + + def testDateObjectFromCOMDate(self): + cmd=self.tc.DateObjectFromCOMDate(37435.7604282) + t1=time.gmtime(time.mktime((2002,6,28,0,14,1, 4,31+28+31+30+31+28,-1))) + #there are errors in the implementation of gmtime which we ignore + t2=time.gmtime(time.mktime((2002,6,29,12,14,2, 4,31+28+31+30+31+28,-1))) + assert t1<cmd<t2, '"%s" should be about 2002-6-28 12:15:01'%repr(cmd) + + def testDate(self): + t1=time.mktime((2002,6,28,18,15,1, 4,31+28+31+30+31+30,0)) + t2=time.mktime((2002,6,30,18,15,1, 4,31+28+31+30+31+28,0)) + obj=self.tc.Date(2002,6,29) + assert t1< time.mktime(obj)<t2,obj + + def testTime(self): + self.assertEqual( self.tc.Time(18,15,2),time.gmtime(18*60*60+15*60+2)) + + def testTimestamp(self): + t1=time.localtime(time.mktime((2002,6,28,18,14,1, 4,31+28+31+30+31+28,-1))) + t2=time.localtime(time.mktime((2002,6,28,18,16,1, 4,31+28+31+30+31+28,-1))) + obj=self.tc.Timestamp(2002,6,28,18,15,2) + assert t1< obj <t2,obj + +class TestPythonDateTimeConverter(TimeConverterInterfaceTest): + def setUp(self): + self.tc = api.pythonDateTimeConverter() + + def testCOMDate(self): + t=datetime.datetime( 2002,6,28,18,15,1) + # Fri, 28 Jun 2002 18:15:01 +0000 + cmd=self.tc.COMDate(t) + assert abs(cmd - 37435.7604282) < 1.0/24,"more than an hour wrong" + + def testDateObjectFromCOMDate(self): + cmd = self.tc.DateObjectFromCOMDate(37435.7604282) + t1 = datetime.datetime(2002,6,28,18,14,1) + t2 = datetime.datetime(2002,6,28,18,16,1) + assert t1 < cmd < t2, cmd + + tx = datetime.datetime(2002,6,28,18,14,1,900000) # testing that microseconds don't become milliseconds + c1 = self.tc.DateObjectFromCOMDate(self.tc.COMDate(tx)) + assert t1 < c1 < t2, c1 + + def testDate(self): + t1=datetime.date(2002,6,28) + t2=datetime.date(2002,6,30) + obj=self.tc.Date(2002,6,29) + assert t1< obj <t2,obj + + def testTime(self): + self.assertEqual( self.tc.Time(18,15,2).isoformat()[:8],'18:15:02') + + def testTimestamp(self): + t1=datetime.datetime(2002,6,28,18,14,1) + t2=datetime.datetime(2002,6,28,18,16,1) + obj=self.tc.Timestamp(2002,6,28,18,15,2) + assert t1< obj <t2,obj + +suites=[] +suites.append( unittest.makeSuite(TestPythonDateTimeConverter,'test')) +if config.doMxDateTimeTest: + suites.append( unittest.makeSuite(TestMXDateTimeConverter,'test')) +if config.doTimeTest: + suites.append( unittest.makeSuite(TestPythonTimeConverter,'test')) + +if config.doAccessTest: + suites.append( unittest.makeSuite(TestADOwithAccessDB,'test')) +if config.doSqlServerTest: + suites.append( unittest.makeSuite(TestADOwithSQLServer,'test')) +if config.doMySqlTest: + suites.append( unittest.makeSuite(TestADOwithMySql,'test')) +if config.doPostgresTest: + suites.append( unittest.makeSuite(TestADOwithPostgres,'test')) + +class cleanup_manager(object): + def __enter__(self): + pass + def __exit__(self, exc_type, exc_val, exc_tb): + config.cleanup(config.testfolder, config.mdb_name) + +suite=unittest.TestSuite(suites) +if __name__ == '__main__': + mysuite = copy.deepcopy(suite) + with cleanup_manager(): + defaultDateConverter = adodbapi.dateconverter + print(__doc__) + print("Default Date Converter is %s" %(defaultDateConverter,)) + dateconverter = defaultDateConverter + tag = 'datetime' + unittest.TextTestRunner().run(mysuite) + + if config.iterateOverTimeTests: + for test, dateconverter, tag in ( + (config.doTimeTest,api.pythonTimeConverter, 'pythontime'), + (config.doMxDateTimeTest, api.mxDateTimeConverter, 'mx')): + if test: + mysuite = copy.deepcopy(suite) # work around a side effect of unittest.TextTestRunner + adodbapi.adodbapi.dateconverter = dateconverter() + print("Changed dateconverter to ") + print(adodbapi.adodbapi.dateconverter) + unittest.TextTestRunner().run(mysuite) diff --git a/venv/Lib/site-packages/adodbapi/test/adodbapitestconfig.py b/venv/Lib/site-packages/adodbapi/test/adodbapitestconfig.py new file mode 100644 index 00000000..1287e0b4 --- /dev/null +++ b/venv/Lib/site-packages/adodbapi/test/adodbapitestconfig.py @@ -0,0 +1,176 @@ +# Configure this to _YOUR_ environment in order to run the testcases. +"testADOdbapiConfig.py v 2.6.2.B00" + +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +# # +# # TESTERS: +# # +# # You will need to make numerous modifications to this file +# # to adapt it to your own testing environment. +# # +# # Skip down to the next "# #" line -- +# # -- the things you need to change are below it. +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +import platform +import sys +import random + +import is64bit +import setuptestframework +import tryconnection + +print("\nPython", sys.version) +node = platform.node() +try: print('node=%s, is64bit.os()= %s, is64bit.Python()= %s' % (node, is64bit.os(), is64bit.Python())) +except: pass + +if '--help' in sys.argv: + print("""Valid command-line switches are: + --package - create a temporary test package, run 2to3 if needed. + --all - run all possible tests + --time - loop over time format tests (including mxdatetime if present) + --nojet - do not test against an ACCESS database file + --mssql - test against Microsoft SQL server + --pg - test against PostgreSQL + --mysql - test against MariaDB + --remote= - test unsing remote server at= (experimental) + """) + exit() +try: + onWindows = bool(sys.getwindowsversion()) # seems to work on all versions of Python +except: + onWindows = False + +# create a random name for temporary table names +_alphabet = "PYFGCRLAOEUIDHTNSQJKXBMWVZ" # why, yes, I do happen to use a dvorak keyboard +tmp = ''.join([random.choice(_alphabet) for x in range(9)]) +mdb_name = 'xx_' + tmp + '.mdb' # generate a non-colliding name for the temporary .mdb +testfolder = setuptestframework.maketemp() + +if '--package' in sys.argv: + # create a new adodbapi module -- running 2to3 if needed. + pth = setuptestframework.makeadopackage(testfolder) +else: + # use the adodbapi module in which this file appears + pth = setuptestframework.find_ado_path() +if pth not in sys.path: + # look here _first_ to find modules + sys.path.insert(1,pth) + +proxy_host = None +for arg in sys.argv: + if arg.startswith('--remote='): + proxy_host = arg.split("=")[1] + import adodbapi.remote as remote + break + + +# function to clean up the temporary folder -- calling program must run this function before exit. +cleanup = setuptestframework.getcleanupfunction() +try: + import adodbapi # will (hopefully) be imported using the "pth" discovered above +except SyntaxError: + print('\n* * * Are you trying to run Python2 code using Python3? Re-run this test using the "--package" switch.') + sys.exit(11) +try: + print(adodbapi.version) # show version +except: + print('"adodbapi.version" not present or not working.') +print(__doc__) + +verbose = False +for a in sys.argv: + if a.startswith('--verbose'): + arg = True + try: arg = int(a.split("=")[1]) + except IndexError: pass + adodbapi.adodbapi.verbose = arg + verbose = arg + +doAllTests = '--all' in sys.argv +doAccessTest = not ('--nojet' in sys.argv) +doSqlServerTest = '--mssql' in sys.argv or doAllTests +doMySqlTest = '--mysql' in sys.argv or doAllTests +doPostgresTest = '--pg' in sys.argv or doAllTests +iterateOverTimeTests = ('--time' in sys.argv or doAllTests) and onWindows + +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +# # start your environment setup here v v v +SQL_HOST_NODE = 'testsql.2txt.us,1430' + +try: #If mx extensions are installed, use mxDateTime + import mx.DateTime + doMxDateTimeTest=True +except: + doMxDateTimeTest=False #Requires eGenixMXExtensions + +doTimeTest = True # obsolete python time format + +if doAccessTest: + if proxy_host: # determine the (probably remote) database file folder + c = {'macro_find_temp_test_path': ['mdb', mdb_name], + 'proxy_host': proxy_host } + else: + c = {'mdb': setuptestframework.makemdb(testfolder, mdb_name)} + + # macro definition for keyword "provider" using macro "is64bit" -- see documentation + # is64bit will return true for 64 bit versions of Python, so the macro will select the ACE provider + # (If running a remote ADO service, this will test the 64-bitedness of the ADO server.) + c['macro_is64bit'] = ['provider', + "Microsoft.ACE.OLEDB.12.0", # 64 bit provider + "Microsoft.Jet.OLEDB.4.0"] # 32 bit provider + connStrAccess = "Provider=%(provider)s;Data Source=%(mdb)s" # ;Mode=ReadWrite;Persist Security Info=False;Jet OLEDB:Bypass UserInfo Validation=True" + print(' ...Testing ACCESS connection to {} file...'.format(c.get('mdb', 'remote .mdb'))) + doAccessTest, connStrAccess, dbAccessconnect = tryconnection.try_connection(verbose, connStrAccess, 10, **c) + +if doSqlServerTest: + c = {'host': SQL_HOST_NODE, # name of computer with SQL Server + 'database': "adotest", + 'user' : 'adotestuser', # None implies Windows security + 'password' : "Sq1234567", + # macro definition for keyword "security" using macro "auto_security" + 'macro_auto_security' : 'security', + 'provider' : 'MSOLEDBSQL; MARS Connection=True' + } + if proxy_host: + c['proxy_host'] = proxy_host + connStr = "Provider=%(provider)s; Initial Catalog=%(database)s; Data Source=%(host)s; %(security)s;" + print(' ...Testing MS-SQL login to {}...'.format(c['host'])) + doSqlServerTest, connStrSQLServer, dbSqlServerconnect = tryconnection.try_connection(verbose, connStr, 30, **c) + +if doMySqlTest: + c = {'host' : "testmysql.2txt.us", + 'database' : 'adodbapitest', + 'user' : 'adotest', + 'password' : '12345678', + 'port' : '3330', # note the nonstandard port for obfuscation + 'driver' : "MySQL ODBC 5.1 Driver"} # or _driver="MySQL ODBC 3.51 Driver + if proxy_host: + c['proxy_host'] = proxy_host + c['macro_is64bit'] = ['provider', 'Provider=MSDASQL;'] # turn on the 64 bit ODBC adapter only if needed + cs = '%(provider)sDriver={%(driver)s};Server=%(host)s;Port=3330;' + \ + 'Database=%(database)s;user=%(user)s;password=%(password)s;Option=3;' + print(' ...Testing MySql login to {}...'.format(c['host'])) + doMySqlTest, connStrMySql, dbMySqlconnect = tryconnection.try_connection(verbose, cs, 5, **c) + + + +if doPostgresTest: + _computername = "testpg.2txt.us" + _databasename='adotest' + _username = 'adotestuser' + _password = '12345678' + kws = {'timeout': 4} + kws['macro_is64bit'] = ['prov_drv', + 'Provider=MSDASQL;Driver={PostgreSQL Unicode(x64)}', + 'Driver=PostgreSQL Unicode'] + # get driver from http://www.postgresql.org/ftp/odbc/versions/ + # test using positional and keyword arguments (bad example for real code) + if proxy_host: + kws['proxy_host'] = proxy_host + print(' ...Testing PostgreSQL login to {}...'.format(_computername)) + doPostgresTest, connStrPostgres, dbPostgresConnect = tryconnection.try_connection(verbose, + '%(prov_drv)s;Server=%(host)s;Database=%(database)s;uid=%(user)s;pwd=%(password)s;port=5430;', # note nonstandard port + _username, _password, _computername, _databasename, **kws) + +assert doAccessTest or doSqlServerTest or doMySqlTest or doPostgresTest, 'No database engine found for testing' diff --git a/venv/Lib/site-packages/adodbapi/test/dbapi20.py b/venv/Lib/site-packages/adodbapi/test/dbapi20.py new file mode 100644 index 00000000..2ec70290 --- /dev/null +++ b/venv/Lib/site-packages/adodbapi/test/dbapi20.py @@ -0,0 +1,904 @@ +#!/usr/bin/env python +''' Python DB API 2.0 driver compliance unit test suite. + + This software is Public Domain and may be used without restrictions. + + "Now we have booze and barflies entering the discussion, plus rumours of + DBAs on drugs... and I won't tell you what flashes through my mind each + time I read the subject line with 'Anal Compliance' in it. All around + this is turning out to be a thoroughly unwholesome unit test." + + -- Ian Bicking +''' + +__version__ = '$Revision: 1.15.0 $'[11:-2] +__author__ = 'Stuart Bishop <stuart@stuartbishop.net>' + +import unittest +import time +import sys + +if sys.version[0] >= '3': #python 3.x + _BaseException = Exception + def _failUnless(self, expr, msg=None): + self.assertTrue(expr, msg) +else: #python 2.x + from exceptions import Exception as _BaseException + def _failUnless(self, expr, msg=None): + self.failUnless(expr, msg) ## deprecated since Python 2.6 + +# set this to "True" to follow API 2.0 to the letter +TEST_FOR_NON_IDEMPOTENT_CLOSE = False + +# Revision 1.15 2019/11/22 00:50:00 kf7xm +# Make Turn off IDEMPOTENT_CLOSE a proper skipTest + +# Revision 1.14 2013/05/20 11:02:05 kf7xm +# Add a literal string to the format insertion test to catch trivial re-format algorithms + +# Revision 1.13 2013/05/08 14:31:50 kf7xm +# Quick switch to Turn off IDEMPOTENT_CLOSE test. Also: Silence teardown failure + +# Revision 1.12 2009/02/06 03:35:11 kf7xm +# Tested okay with Python 3.0, includes last minute patches from Mark H. +# +# Revision 1.1.1.1.2.1 2008/09/20 19:54:59 rupole +# Include latest changes from main branch +# Updates for py3k +# +# Revision 1.11 2005/01/02 02:41:01 zenzen +# Update author email address +# +# Revision 1.10 2003/10/09 03:14:14 zenzen +# Add test for DB API 2.0 optional extension, where database exceptions +# are exposed as attributes on the Connection object. +# +# Revision 1.9 2003/08/13 01:16:36 zenzen +# Minor tweak from Stefan Fleiter +# +# Revision 1.8 2003/04/10 00:13:25 zenzen +# Changes, as per suggestions by M.-A. Lemburg +# - Add a table prefix, to ensure namespace collisions can always be avoided +# +# Revision 1.7 2003/02/26 23:33:37 zenzen +# Break out DDL into helper functions, as per request by David Rushby +# +# Revision 1.6 2003/02/21 03:04:33 zenzen +# Stuff from Henrik Ekelund: +# added test_None +# added test_nextset & hooks +# +# Revision 1.5 2003/02/17 22:08:43 zenzen +# Implement suggestions and code from Henrik Eklund - test that cursor.arraysize +# defaults to 1 & generic cursor.callproc test added +# +# Revision 1.4 2003/02/15 00:16:33 zenzen +# Changes, as per suggestions and bug reports by M.-A. Lemburg, +# Matthew T. Kromer, Federico Di Gregorio and Daniel Dittmar +# - Class renamed +# - Now a subclass of TestCase, to avoid requiring the driver stub +# to use multiple inheritance +# - Reversed the polarity of buggy test in test_description +# - Test exception heirarchy correctly +# - self.populate is now self._populate(), so if a driver stub +# overrides self.ddl1 this change propogates +# - VARCHAR columns now have a width, which will hopefully make the +# DDL even more portible (this will be reversed if it causes more problems) +# - cursor.rowcount being checked after various execute and fetchXXX methods +# - Check for fetchall and fetchmany returning empty lists after results +# are exhausted (already checking for empty lists if select retrieved +# nothing +# - Fix bugs in test_setoutputsize_basic and test_setinputsizes +# +def str2bytes(sval): + if sys.version_info < (3,0) and isinstance(sval, str): + sval = sval.decode("latin1") + return sval.encode("latin1") #python 3 make unicode into bytes + +class DatabaseAPI20Test(unittest.TestCase): + ''' Test a database self.driver for DB API 2.0 compatibility. + This implementation tests Gadfly, but the TestCase + is structured so that other self.drivers can subclass this + test case to ensure compiliance with the DB-API. It is + expected that this TestCase may be expanded in the future + if ambiguities or edge conditions are discovered. + + The 'Optional Extensions' are not yet being tested. + + self.drivers should subclass this test, overriding setUp, tearDown, + self.driver, connect_args and connect_kw_args. Class specification + should be as follows: + + import dbapi20 + class mytest(dbapi20.DatabaseAPI20Test): + [...] + + Don't 'import DatabaseAPI20Test from dbapi20', or you will + confuse the unit tester - just 'import dbapi20'. + ''' + + # The self.driver module. This should be the module where the 'connect' + # method is to be found + driver = None + connect_args = () # List of arguments to pass to connect + connect_kw_args = {} # Keyword arguments for connect + table_prefix = 'dbapi20test_' # If you need to specify a prefix for tables + + ddl1 = 'create table %sbooze (name varchar(20))' % table_prefix + ddl2 = 'create table %sbarflys (name varchar(20), drink varchar(30))' % table_prefix + xddl1 = 'drop table %sbooze' % table_prefix + xddl2 = 'drop table %sbarflys' % table_prefix + + lowerfunc = 'lower' # Name of stored procedure to convert string->lowercase + + # Some drivers may need to override these helpers, for example adding + # a 'commit' after the execute. + def executeDDL1(self,cursor): + cursor.execute(self.ddl1) + + def executeDDL2(self,cursor): + cursor.execute(self.ddl2) + + def setUp(self): + ''' self.drivers should override this method to perform required setup + if any is necessary, such as creating the database. + ''' + pass + + def tearDown(self): + ''' self.drivers should override this method to perform required cleanup + if any is necessary, such as deleting the test database. + The default drops the tables that may be created. + ''' + try: + con = self._connect() + try: + cur = con.cursor() + for ddl in (self.xddl1,self.xddl2): + try: + cur.execute(ddl) + con.commit() + except self.driver.Error: + # Assume table didn't exist. Other tests will check if + # execute is busted. + pass + finally: + con.close() + except _BaseException: + pass + + def _connect(self): + try: + r = self.driver.connect( + *self.connect_args,**self.connect_kw_args + ) + except AttributeError: + self.fail("No connect method found in self.driver module") + return r + + def test_connect(self): + con = self._connect() + con.close() + + def test_apilevel(self): + try: + # Must exist + apilevel = self.driver.apilevel + # Must equal 2.0 + self.assertEqual(apilevel,'2.0') + except AttributeError: + self.fail("Driver doesn't define apilevel") + + def test_threadsafety(self): + try: + # Must exist + threadsafety = self.driver.threadsafety + # Must be a valid value + _failUnless(self, threadsafety in (0,1,2,3)) + except AttributeError: + self.fail("Driver doesn't define threadsafety") + + def test_paramstyle(self): + try: + # Must exist + paramstyle = self.driver.paramstyle + # Must be a valid value + _failUnless(self, paramstyle in ( + 'qmark','numeric','named','format','pyformat' + )) + except AttributeError: + self.fail("Driver doesn't define paramstyle") + + def test_Exceptions(self): + # Make sure required exceptions exist, and are in the + # defined heirarchy. + if sys.version[0] == '3': #under Python 3 StardardError no longer exists + self.assertTrue(issubclass(self.driver.Warning,Exception)) + self.assertTrue(issubclass(self.driver.Error,Exception)) + else: + self.failUnless(issubclass(self.driver.Warning,Exception)) + self.failUnless(issubclass(self.driver.Error,Exception)) + + _failUnless(self, + issubclass(self.driver.InterfaceError,self.driver.Error) + ) + _failUnless(self, + issubclass(self.driver.DatabaseError,self.driver.Error) + ) + _failUnless(self, + issubclass(self.driver.OperationalError,self.driver.Error) + ) + _failUnless(self, + issubclass(self.driver.IntegrityError,self.driver.Error) + ) + _failUnless(self, + issubclass(self.driver.InternalError,self.driver.Error) + ) + _failUnless(self, + issubclass(self.driver.ProgrammingError,self.driver.Error) + ) + _failUnless(self, + issubclass(self.driver.NotSupportedError,self.driver.Error) + ) + + def test_ExceptionsAsConnectionAttributes(self): + # OPTIONAL EXTENSION + # Test for the optional DB API 2.0 extension, where the exceptions + # are exposed as attributes on the Connection object + # I figure this optional extension will be implemented by any + # driver author who is using this test suite, so it is enabled + # by default. + con = self._connect() + drv = self.driver + _failUnless(self,con.Warning is drv.Warning) + _failUnless(self,con.Error is drv.Error) + _failUnless(self,con.InterfaceError is drv.InterfaceError) + _failUnless(self,con.DatabaseError is drv.DatabaseError) + _failUnless(self,con.OperationalError is drv.OperationalError) + _failUnless(self,con.IntegrityError is drv.IntegrityError) + _failUnless(self,con.InternalError is drv.InternalError) + _failUnless(self,con.ProgrammingError is drv.ProgrammingError) + _failUnless(self,con.NotSupportedError is drv.NotSupportedError) + + + def test_commit(self): + con = self._connect() + try: + # Commit must work, even if it doesn't do anything + con.commit() + finally: + con.close() + + def test_rollback(self): + con = self._connect() + # If rollback is defined, it should either work or throw + # the documented exception + if hasattr(con,'rollback'): + try: + con.rollback() + except self.driver.NotSupportedError: + pass + + def test_cursor(self): + con = self._connect() + try: + cur = con.cursor() + finally: + con.close() + + def test_cursor_isolation(self): + con = self._connect() + try: + # Make sure cursors created from the same connection have + # the documented transaction isolation level + cur1 = con.cursor() + cur2 = con.cursor() + self.executeDDL1(cur1) + cur1.execute("insert into %sbooze values ('Victoria Bitter')" % ( + self.table_prefix + )) + cur2.execute("select name from %sbooze" % self.table_prefix) + booze = cur2.fetchall() + self.assertEqual(len(booze),1) + self.assertEqual(len(booze[0]),1) + self.assertEqual(booze[0][0],'Victoria Bitter') + finally: + con.close() + + def test_description(self): + con = self._connect() + try: + cur = con.cursor() + self.executeDDL1(cur) + self.assertEqual(cur.description,None, + 'cursor.description should be none after executing a ' + 'statement that can return no rows (such as DDL)' + ) + cur.execute('select name from %sbooze' % self.table_prefix) + self.assertEqual(len(cur.description),1, + 'cursor.description describes too many columns' + ) + self.assertEqual(len(cur.description[0]),7, + 'cursor.description[x] tuples must have 7 elements' + ) + self.assertEqual(cur.description[0][0].lower(),'name', + 'cursor.description[x][0] must return column name' + ) + self.assertEqual(cur.description[0][1],self.driver.STRING, + 'cursor.description[x][1] must return column type. Got %r' + % cur.description[0][1] + ) + + # Make sure self.description gets reset + self.executeDDL2(cur) + self.assertEqual(cur.description,None, + 'cursor.description not being set to None when executing ' + 'no-result statements (eg. DDL)' + ) + finally: + con.close() + + def test_rowcount(self): + con = self._connect() + try: + cur = con.cursor() + self.executeDDL1(cur) + _failUnless(self,cur.rowcount in (-1,0), # Bug #543885 + 'cursor.rowcount should be -1 or 0 after executing no-result ' + 'statements' + ) + cur.execute("insert into %sbooze values ('Victoria Bitter')" % ( + self.table_prefix + )) + _failUnless(self,cur.rowcount in (-1,1), + 'cursor.rowcount should == number or rows inserted, or ' + 'set to -1 after executing an insert statement' + ) + cur.execute("select name from %sbooze" % self.table_prefix) + _failUnless(self,cur.rowcount in (-1,1), + 'cursor.rowcount should == number of rows returned, or ' + 'set to -1 after executing a select statement' + ) + self.executeDDL2(cur) + self.assertEqual(cur.rowcount,-1, + 'cursor.rowcount not being reset to -1 after executing ' + 'no-result statements' + ) + finally: + con.close() + + lower_func = 'lower' + def test_callproc(self): + con = self._connect() + try: + cur = con.cursor() + if self.lower_func and hasattr(cur,'callproc'): + r = cur.callproc(self.lower_func,('FOO',)) + self.assertEqual(len(r),1) + self.assertEqual(r[0],'FOO') + r = cur.fetchall() + self.assertEqual(len(r),1,'callproc produced no result set') + self.assertEqual(len(r[0]),1, + 'callproc produced invalid result set' + ) + self.assertEqual(r[0][0],'foo', + 'callproc produced invalid results' + ) + finally: + con.close() + + def test_close(self): + con = self._connect() + try: + cur = con.cursor() + finally: + con.close() + + # cursor.execute should raise an Error if called after connection + # closed + self.assertRaises(self.driver.Error,self.executeDDL1,cur) + + # connection.commit should raise an Error if called after connection' + # closed.' + self.assertRaises(self.driver.Error,con.commit) + + # connection.close should raise an Error if called more than once + #!!! reasonable persons differ about the usefulness of this test and this feature !!! + if TEST_FOR_NON_IDEMPOTENT_CLOSE: + self.assertRaises(self.driver.Error, con.close) + else: + self.skipTest("Non-idempotent close is considered a bad thing by some people.") + + def test_execute(self): + con = self._connect() + try: + cur = con.cursor() + self._paraminsert(cur) + finally: + con.close() + + def _paraminsert(self,cur): + self.executeDDL2(cur) + cur.execute("insert into %sbarflys values ('Victoria Bitter', 'thi%%s :may ca%%(u)se? troub:1e')" % ( + self.table_prefix + )) + _failUnless(self,cur.rowcount in (-1,1)) + + if self.driver.paramstyle == 'qmark': + cur.execute( + "insert into %sbarflys values (?, 'thi%%s :may ca%%(u)se? troub:1e')" % self.table_prefix, + ("Cooper's",) + ) + elif self.driver.paramstyle == 'numeric': + cur.execute( + "insert into %sbarflys values (:1, 'thi%%s :may ca%%(u)se? troub:1e')" % self.table_prefix, + ("Cooper's",) + ) + elif self.driver.paramstyle == 'named': + cur.execute( + "insert into %sbarflys values (:beer, 'thi%%s :may ca%%(u)se? troub:1e')" % self.table_prefix, + {'beer':"Cooper's"} + ) + elif self.driver.paramstyle == 'format': + cur.execute( + "insert into %sbarflys values (%%s, 'thi%%s :may ca%%(u)se? troub:1e')" % self.table_prefix, + ("Cooper's",) + ) + elif self.driver.paramstyle == 'pyformat': + cur.execute( + "insert into %sbarflys values (%%(beer)s, 'thi%%s :may ca%%(u)se? troub:1e')" % self.table_prefix, + {'beer':"Cooper's"} + ) + else: + self.fail('Invalid paramstyle') + _failUnless(self,cur.rowcount in (-1,1)) + + cur.execute('select name, drink from %sbarflys' % self.table_prefix) + res = cur.fetchall() + self.assertEqual(len(res),2,'cursor.fetchall returned too few rows') + beers = [res[0][0],res[1][0]] + beers.sort() + self.assertEqual(beers[0],"Cooper's", + 'cursor.fetchall retrieved incorrect data, or data inserted ' + 'incorrectly' + ) + self.assertEqual(beers[1],"Victoria Bitter", + 'cursor.fetchall retrieved incorrect data, or data inserted ' + 'incorrectly' + ) + trouble = "thi%s :may ca%(u)se? troub:1e" + self.assertEqual(res[0][1], trouble, + 'cursor.fetchall retrieved incorrect data, or data inserted ' + 'incorrectly. Got=%s, Expected=%s' % (repr(res[0][1]), repr(trouble))) + self.assertEqual(res[1][1], trouble, + 'cursor.fetchall retrieved incorrect data, or data inserted ' + 'incorrectly. Got=%s, Expected=%s' % (repr(res[1][1]), repr(trouble) + )) + + def test_executemany(self): + con = self._connect() + try: + cur = con.cursor() + self.executeDDL1(cur) + largs = [ ("Cooper's",) , ("Boag's",) ] + margs = [ {'beer': "Cooper's"}, {'beer': "Boag's"} ] + if self.driver.paramstyle == 'qmark': + cur.executemany( + 'insert into %sbooze values (?)' % self.table_prefix, + largs + ) + elif self.driver.paramstyle == 'numeric': + cur.executemany( + 'insert into %sbooze values (:1)' % self.table_prefix, + largs + ) + elif self.driver.paramstyle == 'named': + cur.executemany( + 'insert into %sbooze values (:beer)' % self.table_prefix, + margs + ) + elif self.driver.paramstyle == 'format': + cur.executemany( + 'insert into %sbooze values (%%s)' % self.table_prefix, + largs + ) + elif self.driver.paramstyle == 'pyformat': + cur.executemany( + 'insert into %sbooze values (%%(beer)s)' % ( + self.table_prefix + ), + margs + ) + else: + self.fail('Unknown paramstyle') + _failUnless(self,cur.rowcount in (-1,2), + 'insert using cursor.executemany set cursor.rowcount to ' + 'incorrect value %r' % cur.rowcount + ) + cur.execute('select name from %sbooze' % self.table_prefix) + res = cur.fetchall() + self.assertEqual(len(res),2, + 'cursor.fetchall retrieved incorrect number of rows' + ) + beers = [res[0][0],res[1][0]] + beers.sort() + self.assertEqual(beers[0],"Boag's",'incorrect data "%s" retrieved' % beers[0]) + self.assertEqual(beers[1],"Cooper's",'incorrect data retrieved') + finally: + con.close() + + def test_fetchone(self): + con = self._connect() + try: + cur = con.cursor() + + # cursor.fetchone should raise an Error if called before + # executing a select-type query + self.assertRaises(self.driver.Error,cur.fetchone) + + # cursor.fetchone should raise an Error if called after + # executing a query that cannnot return rows + self.executeDDL1(cur) + self.assertRaises(self.driver.Error,cur.fetchone) + + cur.execute('select name from %sbooze' % self.table_prefix) + self.assertEqual(cur.fetchone(),None, + 'cursor.fetchone should return None if a query retrieves ' + 'no rows' + ) + _failUnless(self,cur.rowcount in (-1,0)) + + # cursor.fetchone should raise an Error if called after + # executing a query that cannnot return rows + cur.execute("insert into %sbooze values ('Victoria Bitter')" % ( + self.table_prefix + )) + self.assertRaises(self.driver.Error,cur.fetchone) + + cur.execute('select name from %sbooze' % self.table_prefix) + r = cur.fetchone() + self.assertEqual(len(r),1, + 'cursor.fetchone should have retrieved a single row' + ) + self.assertEqual(r[0],'Victoria Bitter', + 'cursor.fetchone retrieved incorrect data' + ) + self.assertEqual(cur.fetchone(),None, + 'cursor.fetchone should return None if no more rows available' + ) + _failUnless(self,cur.rowcount in (-1,1)) + finally: + con.close() + + samples = [ + 'Carlton Cold', + 'Carlton Draft', + 'Mountain Goat', + 'Redback', + 'Victoria Bitter', + 'XXXX' + ] + + def _populate(self): + ''' Return a list of sql commands to setup the DB for the fetch + tests. + ''' + populate = [ + "insert into %sbooze values ('%s')" % (self.table_prefix,s) + for s in self.samples + ] + return populate + + def test_fetchmany(self): + con = self._connect() + try: + cur = con.cursor() + + # cursor.fetchmany should raise an Error if called without + #issuing a query + self.assertRaises(self.driver.Error,cur.fetchmany,4) + + self.executeDDL1(cur) + for sql in self._populate(): + cur.execute(sql) + + cur.execute('select name from %sbooze' % self.table_prefix) + r = cur.fetchmany() + self.assertEqual(len(r),1, + 'cursor.fetchmany retrieved incorrect number of rows, ' + 'default of arraysize is one.' + ) + cur.arraysize=10 + r = cur.fetchmany(3) # Should get 3 rows + self.assertEqual(len(r),3, + 'cursor.fetchmany retrieved incorrect number of rows' + ) + r = cur.fetchmany(4) # Should get 2 more + self.assertEqual(len(r),2, + 'cursor.fetchmany retrieved incorrect number of rows' + ) + r = cur.fetchmany(4) # Should be an empty sequence + self.assertEqual(len(r),0, + 'cursor.fetchmany should return an empty sequence after ' + 'results are exhausted' + ) + _failUnless(self,cur.rowcount in (-1,6)) + + # Same as above, using cursor.arraysize + cur.arraysize=4 + cur.execute('select name from %sbooze' % self.table_prefix) + r = cur.fetchmany() # Should get 4 rows + self.assertEqual(len(r),4, + 'cursor.arraysize not being honoured by fetchmany' + ) + r = cur.fetchmany() # Should get 2 more + self.assertEqual(len(r),2) + r = cur.fetchmany() # Should be an empty sequence + self.assertEqual(len(r),0) + _failUnless(self,cur.rowcount in (-1,6)) + + cur.arraysize=6 + cur.execute('select name from %sbooze' % self.table_prefix) + rows = cur.fetchmany() # Should get all rows + _failUnless(self,cur.rowcount in (-1,6)) + self.assertEqual(len(rows),6) + self.assertEqual(len(rows),6) + rows = [r[0] for r in rows] + rows.sort() + + # Make sure we get the right data back out + for i in range(0,6): + self.assertEqual(rows[i],self.samples[i], + 'incorrect data retrieved by cursor.fetchmany' + ) + + rows = cur.fetchmany() # Should return an empty list + self.assertEqual(len(rows),0, + 'cursor.fetchmany should return an empty sequence if ' + 'called after the whole result set has been fetched' + ) + _failUnless(self,cur.rowcount in (-1,6)) + + self.executeDDL2(cur) + cur.execute('select name from %sbarflys' % self.table_prefix) + r = cur.fetchmany() # Should get empty sequence + self.assertEqual(len(r),0, + 'cursor.fetchmany should return an empty sequence if ' + 'query retrieved no rows' + ) + _failUnless(self,cur.rowcount in (-1,0)) + + finally: + con.close() + + def test_fetchall(self): + con = self._connect() + try: + cur = con.cursor() + # cursor.fetchall should raise an Error if called + # without executing a query that may return rows (such + # as a select) + self.assertRaises(self.driver.Error, cur.fetchall) + + self.executeDDL1(cur) + for sql in self._populate(): + cur.execute(sql) + + # cursor.fetchall should raise an Error if called + # after executing a a statement that cannot return rows + self.assertRaises(self.driver.Error,cur.fetchall) + + cur.execute('select name from %sbooze' % self.table_prefix) + rows = cur.fetchall() + _failUnless(self,cur.rowcount in (-1,len(self.samples))) + self.assertEqual(len(rows),len(self.samples), + 'cursor.fetchall did not retrieve all rows' + ) + rows = [r[0] for r in rows] + rows.sort() + for i in range(0,len(self.samples)): + self.assertEqual(rows[i],self.samples[i], + 'cursor.fetchall retrieved incorrect rows' + ) + rows = cur.fetchall() + self.assertEqual( + len(rows),0, + 'cursor.fetchall should return an empty list if called ' + 'after the whole result set has been fetched' + ) + _failUnless(self,cur.rowcount in (-1,len(self.samples))) + + self.executeDDL2(cur) + cur.execute('select name from %sbarflys' % self.table_prefix) + rows = cur.fetchall() + _failUnless(self,cur.rowcount in (-1,0)) + self.assertEqual(len(rows),0, + 'cursor.fetchall should return an empty list if ' + 'a select query returns no rows' + ) + + finally: + con.close() + + def test_mixedfetch(self): + con = self._connect() + try: + cur = con.cursor() + self.executeDDL1(cur) + for sql in self._populate(): + cur.execute(sql) + + cur.execute('select name from %sbooze' % self.table_prefix) + rows1 = cur.fetchone() + rows23 = cur.fetchmany(2) + rows4 = cur.fetchone() + rows56 = cur.fetchall() + _failUnless(self,cur.rowcount in (-1,6)) + self.assertEqual(len(rows23),2, + 'fetchmany returned incorrect number of rows' + ) + self.assertEqual(len(rows56),2, + 'fetchall returned incorrect number of rows' + ) + + rows = [rows1[0]] + rows.extend([rows23[0][0],rows23[1][0]]) + rows.append(rows4[0]) + rows.extend([rows56[0][0],rows56[1][0]]) + rows.sort() + for i in range(0,len(self.samples)): + self.assertEqual(rows[i],self.samples[i], + 'incorrect data retrieved or inserted' + ) + finally: + con.close() + + def help_nextset_setUp(self,cur): + ''' Should create a procedure called deleteme + that returns two result sets, first the + number of rows in booze then "name from booze" + ''' + raise NotImplementedError('Helper not implemented') + #sql=""" + # create procedure deleteme as + # begin + # select count(*) from booze + # select name from booze + # end + #""" + #cur.execute(sql) + + def help_nextset_tearDown(self,cur): + 'If cleaning up is needed after nextSetTest' + raise NotImplementedError('Helper not implemented') + #cur.execute("drop procedure deleteme") + + def test_nextset(self): + con = self._connect() + try: + cur = con.cursor() + if not hasattr(cur,'nextset'): + return + + try: + self.executeDDL1(cur) + sql=self._populate() + for sql in self._populate(): + cur.execute(sql) + + self.help_nextset_setUp(cur) + + cur.callproc('deleteme') + numberofrows=cur.fetchone() + assert numberofrows[0]== len(self.samples) + assert cur.nextset() + names=cur.fetchall() + assert len(names) == len(self.samples) + s=cur.nextset() + assert s == None,'No more return sets, should return None' + finally: + self.help_nextset_tearDown(cur) + + finally: + con.close() + + def test_nextset(self): + raise NotImplementedError('Drivers need to override this test') + + def test_arraysize(self): + # Not much here - rest of the tests for this are in test_fetchmany + con = self._connect() + try: + cur = con.cursor() + _failUnless(self,hasattr(cur,'arraysize'), + 'cursor.arraysize must be defined' + ) + finally: + con.close() + + def test_setinputsizes(self): + con = self._connect() + try: + cur = con.cursor() + cur.setinputsizes( (25,) ) + self._paraminsert(cur) # Make sure cursor still works + finally: + con.close() + + def test_setoutputsize_basic(self): + # Basic test is to make sure setoutputsize doesn't blow up + con = self._connect() + try: + cur = con.cursor() + cur.setoutputsize(1000) + cur.setoutputsize(2000,0) + self._paraminsert(cur) # Make sure the cursor still works + finally: + con.close() + + def test_setoutputsize(self): + # Real test for setoutputsize is driver dependant + raise NotImplementedError('Driver needed to override this test') + + def test_None(self): + con = self._connect() + try: + cur = con.cursor() + self.executeDDL1(cur) + cur.execute('insert into %sbooze values (NULL)' % self.table_prefix) + cur.execute('select name from %sbooze' % self.table_prefix) + r = cur.fetchall() + self.assertEqual(len(r),1) + self.assertEqual(len(r[0]),1) + self.assertEqual(r[0][0],None,'NULL value not returned as None') + finally: + con.close() + + def test_Date(self): + d1 = self.driver.Date(2002,12,25) + d2 = self.driver.DateFromTicks(time.mktime((2002,12,25,0,0,0,0,0,0))) + # Can we assume this? API doesn't specify, but it seems implied + # self.assertEqual(str(d1),str(d2)) + + def test_Time(self): + t1 = self.driver.Time(13,45,30) + t2 = self.driver.TimeFromTicks(time.mktime((2001,1,1,13,45,30,0,0,0))) + # Can we assume this? API doesn't specify, but it seems implied + # self.assertEqual(str(t1),str(t2)) + + def test_Timestamp(self): + t1 = self.driver.Timestamp(2002,12,25,13,45,30) + t2 = self.driver.TimestampFromTicks( + time.mktime((2002,12,25,13,45,30,0,0,0)) + ) + # Can we assume this? API doesn't specify, but it seems implied + # self.assertEqual(str(t1),str(t2)) + + def test_Binary(self): + b = self.driver.Binary(str2bytes('Something')) + b = self.driver.Binary(str2bytes('')) + + def test_STRING(self): + _failUnless(self, hasattr(self.driver,'STRING'), + 'module.STRING must be defined' + ) + + def test_BINARY(self): + _failUnless(self, hasattr(self.driver,'BINARY'), + 'module.BINARY must be defined.' + ) + + def test_NUMBER(self): + _failUnless(self, hasattr(self.driver,'NUMBER'), + 'module.NUMBER must be defined.' + ) + + def test_DATETIME(self): + _failUnless(self, hasattr(self.driver,'DATETIME'), + 'module.DATETIME must be defined.' + ) + + def test_ROWID(self): + _failUnless(self, hasattr(self.driver,'ROWID'), + 'module.ROWID must be defined.' + ) diff --git a/venv/Lib/site-packages/adodbapi/test/is64bit.py b/venv/Lib/site-packages/adodbapi/test/is64bit.py new file mode 100644 index 00000000..51d5c28b --- /dev/null +++ b/venv/Lib/site-packages/adodbapi/test/is64bit.py @@ -0,0 +1,33 @@ +"""is64bit.Python() --> boolean value of detected Python word size. is64bit.os() --> os build version""" +import sys + +def Python(): + if sys.platform == 'cli': #IronPython + import System + return System.IntPtr.Size == 8 + else: + try: + return sys.maxsize > 2147483647 + except AttributeError: + return sys.maxint > 2147483647 + +def os(): + import platform + pm = platform.machine() + if pm != '..' and pm.endswith('64'): # recent Python (not Iron) + return True + else: + import os + if 'PROCESSOR_ARCHITEW6432' in os.environ: + return True # 32 bit program running on 64 bit Windows + try: + return os.environ['PROCESSOR_ARCHITECTURE'].endswith('64') # 64 bit Windows 64 bit program + except IndexError: + pass # not Windows + try: + return '64' in platform.architecture()[0] # this often works in Linux + except: + return False # is an older version of Python, assume also an older os (best we can guess) + +if __name__ == "__main__": + print("is64bit.Python() =", Python(), "is64bit.os() =", os()) diff --git a/venv/Lib/site-packages/adodbapi/test/setuptestframework.py b/venv/Lib/site-packages/adodbapi/test/setuptestframework.py new file mode 100644 index 00000000..3bdfa603 --- /dev/null +++ b/venv/Lib/site-packages/adodbapi/test/setuptestframework.py @@ -0,0 +1,113 @@ +#!/usr/bin/python2 +# Configure this in order to run the testcases. +"setuptestframework.py v 2.6.0.8" +import os +import sys +import tempfile +import shutil + +try: + OSErrors = (WindowsError, OSError) +except NameError: # not running on Windows + OSErrors = OSError + +def maketemp(): + temphome = tempfile.gettempdir() + tempdir = os.path.join(temphome, 'adodbapi_test') + try: os.mkdir(tempdir) + except: pass + return tempdir + +def _cleanup_function(testfolder, mdb_name): + try: os.unlink(os.path.join(testfolder, mdb_name)) + except: pass # mdb database not present + try: + shutil.rmtree(testfolder) + print(' cleaned up folder', testfolder) + except: pass # test package not present + +def getcleanupfunction(): + return _cleanup_function + +def find_ado_path(): + adoName = os.path.normpath(os.getcwd() + '/../../adodbapi.py') + adoPackage = os.path.dirname(adoName) + return adoPackage + +# make a new package directory for the test copy of ado +def makeadopackage(testfolder): + adoName = os.path.normpath(os.getcwd() + '/../adodbapi.py') + adoPath = os.path.dirname(adoName) + if os.path.exists(adoName): + newpackage = os.path.join(testfolder,'adodbapi') + try: + os.mkdir(newpackage) + except OSErrors: + print('*Note: temporary adodbapi package already exists: may be two versions running?') + for f in os.listdir(adoPath): + if f.endswith('.py'): + shutil.copy(os.path.join(adoPath, f), newpackage) + if sys.version_info >= (3,0): # only when running Py3.n + save = sys.stdout + sys.stdout = None + from lib2to3.main import main # use 2to3 to make test package + main("lib2to3.fixes",args=['-n','-w', newpackage]) + sys.stdout = save + return testfolder + else: + raise EnvironmentError('Connot find source of adodbapi to test.') + +def makemdb(testfolder, mdb_name): + # following setup code borrowed from pywin32 odbc test suite + # kindly contributed by Frank Millman. + import os + + _accessdatasource = os.path.join(testfolder, mdb_name) + if os.path.isfile(_accessdatasource): + print('using JET database=', _accessdatasource) + else: + try: + from win32com.client.gencache import EnsureDispatch + from win32com.client import constants + win32 = True + except ImportError: #perhaps we are running IronPython + win32 = False #iron Python + try: + from System import Activator, Type + except: + pass + + # Create a brand-new database - what is the story with these? + dbe = None + for suffix in (".36", ".35", ".30"): + try: + if win32: + dbe = EnsureDispatch("DAO.DBEngine" + suffix) + else: + type= Type.GetTypeFromProgID("DAO.DBEngine" + suffix) + dbe = Activator.CreateInstance(type) + break + except: + pass + if dbe: + print(' ...Creating ACCESS db at '+_accessdatasource) + if win32: + workspace = dbe.Workspaces(0) + newdb = workspace.CreateDatabase(_accessdatasource, + constants.dbLangGeneral, + constants.dbVersion40) + else: + newdb = dbe.CreateDatabase(_accessdatasource,';LANGID=0x0409;CP=1252;COUNTRY=0') + newdb.Close() + else: + print(' ...copying test ACCESS db to '+_accessdatasource) + mdbName = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'examples', 'test.mdb')) + import shutil + shutil.copy(mdbName, _accessdatasource) + + return _accessdatasource + +if __name__ == "__main__": + print('Setting up a Jet database for server to use for remote testing...') + temp = maketemp() + makemdb(temp, 'server_test.mdb') diff --git a/venv/Lib/site-packages/adodbapi/test/test_adodbapi_dbapi20.py b/venv/Lib/site-packages/adodbapi/test/test_adodbapi_dbapi20.py new file mode 100644 index 00000000..0c780551 --- /dev/null +++ b/venv/Lib/site-packages/adodbapi/test/test_adodbapi_dbapi20.py @@ -0,0 +1,186 @@ +print("This module depends on the dbapi20 compliance tests created by Stuart Bishop") +print("(see db-sig mailing list history for info)") +import platform +import unittest +import sys + +import dbapi20 +import setuptestframework + +testfolder = setuptestframework.maketemp() +if '--package' in sys.argv: + pth = setuptestframework.makeadopackage(testfolder) + sys.argv.remove('--package') +else: + pth = setuptestframework.find_ado_path() +if pth not in sys.path: + sys.path.insert(1,pth) +# function to clean up the temporary folder -- calling program must run this function before exit. +cleanup = setuptestframework.getcleanupfunction() + +import adodbapi +import adodbapi.is64bit as is64bit +db = adodbapi + +if '--verbose' in sys.argv: + db.adodbapi.verbose = 3 + +print(adodbapi.version) +print("Tested with dbapi20 %s" % dbapi20.__version__) + +try: + onWindows = bool(sys.getwindowsversion()) # seems to work on all versions of Python +except: + onWindows = False + +node = platform.node() + +conn_kws = {} +host = 'testsql.2txt.us,1430' # if None, will use macro to fill in node name +instance = r'%s\SQLEXPRESS' +conn_kws['name'] = 'adotest' + +conn_kws['user'] = 'adotestuser' # None implies Windows security +conn_kws['password'] = "Sq1234567" +# macro definition for keyword "security" using macro "auto_security" +conn_kws['macro_auto_security'] = 'security' + +if host is None: + conn_kws['macro_getnode'] = ['host', instance] +else: + conn_kws['host'] = host + +conn_kws['provider'] = 'Provider=MSOLEDBSQL;DataTypeCompatibility=80;MARS Connection=True;' +connStr = "%(provider)s; %(security)s; Initial Catalog=%(name)s;Data Source=%(host)s" + +if onWindows and node != "z-PC": + pass # default should make a local SQL Server connection +elif node == "xxx": # try Postgres database + _computername = "25.223.161.222" + _databasename='adotest' + _username = 'adotestuser' + _password = '12345678' + _driver="PostgreSQL Unicode" + _provider = '' + connStr = '%sDriver={%s};Server=%s;Database=%s;uid=%s;pwd=%s;' % \ + (_provider,_driver,_computername,_databasename,_username,_password) +elif node == "yyy": # ACCESS data base is known to fail some tests. + if is64bit.Python(): + driver = "Microsoft.ACE.OLEDB.12.0" + else: + driver = "Microsoft.Jet.OLEDB.4.0" + testmdb = setuptestframework.makemdb(testfolder) + connStr = r"Provider=%s;Data Source=%s" % (driver, testmdb) +else: # try a remote connection to an SQL server + conn_kws['proxy_host'] = '25.44.77.176' + import adodbapi.remote + db = adodbapi.remote + +print('Using Connection String like=%s' % connStr) +print('Keywords=%s' % repr(conn_kws)) + +class test_adodbapi(dbapi20.DatabaseAPI20Test): + driver = db + connect_args = (connStr,) + connect_kw_args = conn_kws + + def __init__(self,arg): + dbapi20.DatabaseAPI20Test.__init__(self,arg) + + def testMethodName(self): + return self.id().split('.')[-1] + + def setUp(self): + # Call superclass setUp In case this does something in the + # future + dbapi20.DatabaseAPI20Test.setUp(self) + if self.testMethodName()=='test_callproc': + con = self._connect() + engine = con.dbms_name + ## print('Using database Engine=%s' % engine) ## + if engine != 'MS Jet': + sql=""" + create procedure templower + @theData varchar(50) + as + select lower(@theData) + """ + else: # Jet + sql=""" + create procedure templower + (theData varchar(50)) + as + select lower(theData); + """ + cur = con.cursor() + try: + cur.execute(sql) + con.commit() + except: + pass + cur.close() + con.close() + self.lower_func='templower' + + + def tearDown(self): + if self.testMethodName()=='test_callproc': + con = self._connect() + cur = con.cursor() + try: + cur.execute("drop procedure templower") + except: + pass + con.commit() + dbapi20.DatabaseAPI20Test.tearDown(self) + + + def help_nextset_setUp(self,cur): + 'Should create a procedure called deleteme ' + 'that returns two result sets, first the number of rows in booze then "name from booze"' + sql=""" + create procedure deleteme as + begin + select count(*) from %sbooze + select name from %sbooze + end + """ %(self.table_prefix,self.table_prefix) + cur.execute(sql) + + def help_nextset_tearDown(self,cur): + 'If cleaning up is needed after nextSetTest' + try: + cur.execute("drop procedure deleteme") + except: + pass + + def test_nextset(self): + con = self._connect() + try: + cur = con.cursor() + + stmts=[self.ddl1] + self._populate() + for sql in stmts: + cur.execute(sql) + + self.help_nextset_setUp(cur) + + cur.callproc('deleteme') + numberofrows=cur.fetchone() + assert numberofrows[0]== 6 + assert cur.nextset() + names=cur.fetchall() + assert len(names) == len(self.samples) + s=cur.nextset() + assert s == None,'No more return sets, should return None' + finally: + try: + self.help_nextset_tearDown(cur) + finally: + con.close() + + def test_setoutputsize(self): pass + +if __name__ == '__main__': + unittest.main() + cleanup(testfolder, None) diff --git a/venv/Lib/site-packages/adodbapi/test/tryconnection.py b/venv/Lib/site-packages/adodbapi/test/tryconnection.py new file mode 100644 index 00000000..9e5cd282 --- /dev/null +++ b/venv/Lib/site-packages/adodbapi/test/tryconnection.py @@ -0,0 +1,30 @@ +remote = False # automatic testing of remote access has been removed here + +def try_connection(verbose, *args, **kwargs): + import adodbapi + + dbconnect = adodbapi.connect + try: + s = dbconnect(*args, **kwargs) # connect to server + if verbose: + print('Connected to:', s.connection_string) + print('which has tables:', s.get_table_names()) + s.close() # thanks, it worked, goodbye + except adodbapi.DatabaseError as inst: + print(inst.args[0]) # should be the error message + print('***Failed getting connection using=',repr(args),repr(kwargs)) + return False, (args, kwargs), None + + print(" (successful)") + + return True, (args, kwargs, remote), dbconnect + + +def try_operation_with_expected_exception(expected_exception_list, some_function, *args, **kwargs): + try: + some_function(*args, **kwargs) + except expected_exception_list as e: + return True, e + except: + raise # an exception other than the expected occurred + return False, 'The expected exception did not occur' diff --git a/venv/Lib/site-packages/comtypes-1.1.10-py3.6.egg-info/PKG-INFO b/venv/Lib/site-packages/comtypes-1.1.10-py3.6.egg-info/PKG-INFO new file mode 100644 index 00000000..b23f4cce --- /dev/null +++ b/venv/Lib/site-packages/comtypes-1.1.10-py3.6.egg-info/PKG-INFO @@ -0,0 +1,54 @@ +Metadata-Version: 2.1 +Name: comtypes +Version: 1.1.10 +Summary: Pure Python COM package +Home-page: https://github.com/enthought/comtypes +Author: Thomas Heller +Author-email: theller@python.net +License: MIT License +Download-URL: https://github.com/enthought/comtypes/releases +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: Microsoft :: Windows +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +License-File: LICENSE.txt + +comtypes +======== + +**comtypes** is a lightweight Python COM package, based on the ctypes_ +FFI library, in less than 10000 lines of code (not counting the +tests). + +**comtypes** allows to define, call, and implement custom and +dispatch-based COM interfaces in pure Python. It works on Windows and +64-bit Windows. Windows CE support was dropped since comtypes==1.1.8. + +Documentation: + + https://pythonhosted.org/comtypes + + Contribute using the `source repository and issue tracker + <https://github.com/enthought/comtypes/>`_ on GitHub. + +Mailing list: + + http://gmane.org/info.php?group=gmane.comp.python.comtypes.user + + https://lists.sourceforge.net/lists/listinfo/comtypes-users/ + +Download: + + Releases can be downloaded in the PyPI page: + + https://pypi.python.org/pypi/comtypes + +.. _ctypes: http://docs.python.org/lib/module-ctypes.html + + diff --git a/venv/Lib/site-packages/comtypes-1.1.10-py3.6.egg-info/SOURCES.txt b/venv/Lib/site-packages/comtypes-1.1.10-py3.6.egg-info/SOURCES.txt new file mode 100644 index 00000000..89d47202 --- /dev/null +++ b/venv/Lib/site-packages/comtypes-1.1.10-py3.6.egg-info/SOURCES.txt @@ -0,0 +1,96 @@ +CHANGES.txt +LICENSE.txt +MANIFEST.in +README +clear_comtypes_cache.py +setup.cfg +setup.py +comtypes/GUID.py +comtypes/__init__.py +comtypes/_comobject.py +comtypes/_meta.py +comtypes/_safearray.py +comtypes/automation.py +comtypes/connectionpoints.py +comtypes/errorinfo.py +comtypes/git.py +comtypes/hresult.py +comtypes/logutil.py +comtypes/messageloop.py +comtypes/npsupport.py +comtypes/patcher.py +comtypes/persist.py +comtypes/safearray.py +comtypes/shelllink.py +comtypes/typeinfo.py +comtypes/util.py +comtypes/viewobject.py +comtypes.egg-info/PKG-INFO +comtypes.egg-info/SOURCES.txt +comtypes.egg-info/dependency_links.txt +comtypes.egg-info/top_level.txt +comtypes/client/__init__.py +comtypes/client/_code_cache.py +comtypes/client/_events.py +comtypes/client/_generate.py +comtypes/client/dynamic.py +comtypes/client/lazybind.py +comtypes/server/__init__.py +comtypes/server/automation.py +comtypes/server/connectionpoints.py +comtypes/server/inprocserver.py +comtypes/server/localserver.py +comtypes/server/register.py +comtypes/server/w_getopt.py +comtypes/test/TestComServer.idl +comtypes/test/TestComServer.py +comtypes/test/TestComServer.tlb +comtypes/test/TestDispServer.idl +comtypes/test/TestDispServer.py +comtypes/test/TestDispServer.tlb +comtypes/test/__init__.py +comtypes/test/find_memleak.py +comtypes/test/mylib.idl +comtypes/test/mytypelib.idl +comtypes/test/runtests.py +comtypes/test/setup.py +comtypes/test/test_BSTR.py +comtypes/test/test_DISPPARAMS.py +comtypes/test/test_GUID.py +comtypes/test/test_QueryService.py +comtypes/test/test_agilent.py +comtypes/test/test_avmc.py +comtypes/test/test_basic.py +comtypes/test/test_casesensitivity.py +comtypes/test/test_client.py +comtypes/test/test_collections.py +comtypes/test/test_comserver.py +comtypes/test/test_createwrappers.py +comtypes/test/test_dict.py +comtypes/test/test_dispinterface.py +comtypes/test/test_dyndispatch.py +comtypes/test/test_excel.py +comtypes/test/test_findgendir.py +comtypes/test/test_getactiveobj.py +comtypes/test/test_ie.py +comtypes/test/test_jscript.js +comtypes/test/test_msscript.py +comtypes/test/test_outparam.py +comtypes/test/test_propputref.py +comtypes/test/test_pump_events.py +comtypes/test/test_safearray.py +comtypes/test/test_sapi.py +comtypes/test/test_server.py +comtypes/test/test_showevents.py +comtypes/test/test_subinterface.py +comtypes/test/test_typeinfo.py +comtypes/test/test_urlhistory.py +comtypes/test/test_variant.py +comtypes/test/test_win32com_interop.py +comtypes/test/test_wmi.py +comtypes/test/test_word.py +comtypes/tools/__init__.py +comtypes/tools/codegenerator.py +comtypes/tools/tlbparser.py +comtypes/tools/typedesc.py +comtypes/tools/typedesc_base.py \ No newline at end of file diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/cli/__init__.py b/venv/Lib/site-packages/comtypes-1.1.10-py3.6.egg-info/dependency_links.txt similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/chardet/cli/__init__.py rename to venv/Lib/site-packages/comtypes-1.1.10-py3.6.egg-info/dependency_links.txt diff --git a/venv/Lib/site-packages/comtypes-1.1.10-py3.6.egg-info/installed-files.txt b/venv/Lib/site-packages/comtypes-1.1.10-py3.6.egg-info/installed-files.txt new file mode 100644 index 00000000..846dc4f5 --- /dev/null +++ b/venv/Lib/site-packages/comtypes-1.1.10-py3.6.egg-info/installed-files.txt @@ -0,0 +1,168 @@ +..\..\..\Scripts\clear_comtypes_cache.py +..\comtypes\GUID.py +..\comtypes\__init__.py +..\comtypes\__pycache__\GUID.cpython-36.pyc +..\comtypes\__pycache__\__init__.cpython-36.pyc +..\comtypes\__pycache__\_comobject.cpython-36.pyc +..\comtypes\__pycache__\_meta.cpython-36.pyc +..\comtypes\__pycache__\_safearray.cpython-36.pyc +..\comtypes\__pycache__\automation.cpython-36.pyc +..\comtypes\__pycache__\connectionpoints.cpython-36.pyc +..\comtypes\__pycache__\errorinfo.cpython-36.pyc +..\comtypes\__pycache__\git.cpython-36.pyc +..\comtypes\__pycache__\hresult.cpython-36.pyc +..\comtypes\__pycache__\logutil.cpython-36.pyc +..\comtypes\__pycache__\messageloop.cpython-36.pyc +..\comtypes\__pycache__\npsupport.cpython-36.pyc +..\comtypes\__pycache__\patcher.cpython-36.pyc +..\comtypes\__pycache__\persist.cpython-36.pyc +..\comtypes\__pycache__\safearray.cpython-36.pyc +..\comtypes\__pycache__\shelllink.cpython-36.pyc +..\comtypes\__pycache__\typeinfo.cpython-36.pyc +..\comtypes\__pycache__\util.cpython-36.pyc +..\comtypes\__pycache__\viewobject.cpython-36.pyc +..\comtypes\_comobject.py +..\comtypes\_meta.py +..\comtypes\_safearray.py +..\comtypes\automation.py +..\comtypes\client\__init__.py +..\comtypes\client\__pycache__\__init__.cpython-36.pyc +..\comtypes\client\__pycache__\_code_cache.cpython-36.pyc +..\comtypes\client\__pycache__\_events.cpython-36.pyc +..\comtypes\client\__pycache__\_generate.cpython-36.pyc +..\comtypes\client\__pycache__\dynamic.cpython-36.pyc +..\comtypes\client\__pycache__\lazybind.cpython-36.pyc +..\comtypes\client\_code_cache.py +..\comtypes\client\_events.py +..\comtypes\client\_generate.py +..\comtypes\client\dynamic.py +..\comtypes\client\lazybind.py +..\comtypes\connectionpoints.py +..\comtypes\errorinfo.py +..\comtypes\git.py +..\comtypes\hresult.py +..\comtypes\logutil.py +..\comtypes\messageloop.py +..\comtypes\npsupport.py +..\comtypes\patcher.py +..\comtypes\persist.py +..\comtypes\safearray.py +..\comtypes\server\__init__.py +..\comtypes\server\__pycache__\__init__.cpython-36.pyc +..\comtypes\server\__pycache__\automation.cpython-36.pyc +..\comtypes\server\__pycache__\connectionpoints.cpython-36.pyc +..\comtypes\server\__pycache__\inprocserver.cpython-36.pyc +..\comtypes\server\__pycache__\localserver.cpython-36.pyc +..\comtypes\server\__pycache__\register.cpython-36.pyc +..\comtypes\server\__pycache__\w_getopt.cpython-36.pyc +..\comtypes\server\automation.py +..\comtypes\server\connectionpoints.py +..\comtypes\server\inprocserver.py +..\comtypes\server\localserver.py +..\comtypes\server\register.py +..\comtypes\server\w_getopt.py +..\comtypes\shelllink.py +..\comtypes\test\TestComServer.idl +..\comtypes\test\TestComServer.py +..\comtypes\test\TestComServer.tlb +..\comtypes\test\TestDispServer.idl +..\comtypes\test\TestDispServer.py +..\comtypes\test\TestDispServer.tlb +..\comtypes\test\__init__.py +..\comtypes\test\__pycache__\TestComServer.cpython-36.pyc +..\comtypes\test\__pycache__\TestDispServer.cpython-36.pyc +..\comtypes\test\__pycache__\__init__.cpython-36.pyc +..\comtypes\test\__pycache__\find_memleak.cpython-36.pyc +..\comtypes\test\__pycache__\runtests.cpython-36.pyc +..\comtypes\test\__pycache__\setup.cpython-36.pyc +..\comtypes\test\__pycache__\test_BSTR.cpython-36.pyc +..\comtypes\test\__pycache__\test_DISPPARAMS.cpython-36.pyc +..\comtypes\test\__pycache__\test_GUID.cpython-36.pyc +..\comtypes\test\__pycache__\test_QueryService.cpython-36.pyc +..\comtypes\test\__pycache__\test_agilent.cpython-36.pyc +..\comtypes\test\__pycache__\test_avmc.cpython-36.pyc +..\comtypes\test\__pycache__\test_basic.cpython-36.pyc +..\comtypes\test\__pycache__\test_casesensitivity.cpython-36.pyc +..\comtypes\test\__pycache__\test_client.cpython-36.pyc +..\comtypes\test\__pycache__\test_collections.cpython-36.pyc +..\comtypes\test\__pycache__\test_comserver.cpython-36.pyc +..\comtypes\test\__pycache__\test_createwrappers.cpython-36.pyc +..\comtypes\test\__pycache__\test_dict.cpython-36.pyc +..\comtypes\test\__pycache__\test_dispinterface.cpython-36.pyc +..\comtypes\test\__pycache__\test_dyndispatch.cpython-36.pyc +..\comtypes\test\__pycache__\test_excel.cpython-36.pyc +..\comtypes\test\__pycache__\test_findgendir.cpython-36.pyc +..\comtypes\test\__pycache__\test_getactiveobj.cpython-36.pyc +..\comtypes\test\__pycache__\test_ie.cpython-36.pyc +..\comtypes\test\__pycache__\test_msscript.cpython-36.pyc +..\comtypes\test\__pycache__\test_outparam.cpython-36.pyc +..\comtypes\test\__pycache__\test_propputref.cpython-36.pyc +..\comtypes\test\__pycache__\test_pump_events.cpython-36.pyc +..\comtypes\test\__pycache__\test_safearray.cpython-36.pyc +..\comtypes\test\__pycache__\test_sapi.cpython-36.pyc +..\comtypes\test\__pycache__\test_server.cpython-36.pyc +..\comtypes\test\__pycache__\test_showevents.cpython-36.pyc +..\comtypes\test\__pycache__\test_subinterface.cpython-36.pyc +..\comtypes\test\__pycache__\test_typeinfo.cpython-36.pyc +..\comtypes\test\__pycache__\test_urlhistory.cpython-36.pyc +..\comtypes\test\__pycache__\test_variant.cpython-36.pyc +..\comtypes\test\__pycache__\test_win32com_interop.cpython-36.pyc +..\comtypes\test\__pycache__\test_wmi.cpython-36.pyc +..\comtypes\test\__pycache__\test_word.cpython-36.pyc +..\comtypes\test\find_memleak.py +..\comtypes\test\mylib.idl +..\comtypes\test\mytypelib.idl +..\comtypes\test\runtests.py +..\comtypes\test\setup.py +..\comtypes\test\test_BSTR.py +..\comtypes\test\test_DISPPARAMS.py +..\comtypes\test\test_GUID.py +..\comtypes\test\test_QueryService.py +..\comtypes\test\test_agilent.py +..\comtypes\test\test_avmc.py +..\comtypes\test\test_basic.py +..\comtypes\test\test_casesensitivity.py +..\comtypes\test\test_client.py +..\comtypes\test\test_collections.py +..\comtypes\test\test_comserver.py +..\comtypes\test\test_createwrappers.py +..\comtypes\test\test_dict.py +..\comtypes\test\test_dispinterface.py +..\comtypes\test\test_dyndispatch.py +..\comtypes\test\test_excel.py +..\comtypes\test\test_findgendir.py +..\comtypes\test\test_getactiveobj.py +..\comtypes\test\test_ie.py +..\comtypes\test\test_jscript.js +..\comtypes\test\test_msscript.py +..\comtypes\test\test_outparam.py +..\comtypes\test\test_propputref.py +..\comtypes\test\test_pump_events.py +..\comtypes\test\test_safearray.py +..\comtypes\test\test_sapi.py +..\comtypes\test\test_server.py +..\comtypes\test\test_showevents.py +..\comtypes\test\test_subinterface.py +..\comtypes\test\test_typeinfo.py +..\comtypes\test\test_urlhistory.py +..\comtypes\test\test_variant.py +..\comtypes\test\test_win32com_interop.py +..\comtypes\test\test_wmi.py +..\comtypes\test\test_word.py +..\comtypes\tools\__init__.py +..\comtypes\tools\__pycache__\__init__.cpython-36.pyc +..\comtypes\tools\__pycache__\codegenerator.cpython-36.pyc +..\comtypes\tools\__pycache__\tlbparser.cpython-36.pyc +..\comtypes\tools\__pycache__\typedesc.cpython-36.pyc +..\comtypes\tools\__pycache__\typedesc_base.cpython-36.pyc +..\comtypes\tools\codegenerator.py +..\comtypes\tools\tlbparser.py +..\comtypes\tools\typedesc.py +..\comtypes\tools\typedesc_base.py +..\comtypes\typeinfo.py +..\comtypes\util.py +..\comtypes\viewobject.py +PKG-INFO +SOURCES.txt +dependency_links.txt +top_level.txt diff --git a/venv/Lib/site-packages/comtypes-1.1.10-py3.6.egg-info/top_level.txt b/venv/Lib/site-packages/comtypes-1.1.10-py3.6.egg-info/top_level.txt new file mode 100644 index 00000000..fa08eadd --- /dev/null +++ b/venv/Lib/site-packages/comtypes-1.1.10-py3.6.egg-info/top_level.txt @@ -0,0 +1 @@ +comtypes diff --git a/venv/Lib/site-packages/comtypes/GUID.py b/venv/Lib/site-packages/comtypes/GUID.py new file mode 100644 index 00000000..04f31215 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/GUID.py @@ -0,0 +1,101 @@ +from ctypes import * +import sys + +if sys.version_info >= (2, 6): + def binary(obj): + return bytes(obj) +else: + def binary(obj): + return buffer(obj) + +BYTE = c_byte +WORD = c_ushort +DWORD = c_ulong + +_ole32 = oledll.ole32 + +_StringFromCLSID = _ole32.StringFromCLSID +_CoTaskMemFree = windll.ole32.CoTaskMemFree +_ProgIDFromCLSID = _ole32.ProgIDFromCLSID +_CLSIDFromString = _ole32.CLSIDFromString +_CLSIDFromProgID = _ole32.CLSIDFromProgID +_CoCreateGuid = _ole32.CoCreateGuid + +# Note: Comparing GUID instances by comparing their buffers +# is slightly faster than using ole32.IsEqualGUID. + +class GUID(Structure): + _fields_ = [("Data1", DWORD), + ("Data2", WORD), + ("Data3", WORD), + ("Data4", BYTE * 8)] + + def __init__(self, name=None): + if name is not None: + _CLSIDFromString(str(name), byref(self)) + + def __repr__(self): + return 'GUID("%s")' % str(self) + + def __unicode__(self): + p = c_wchar_p() + _StringFromCLSID(byref(self), byref(p)) + result = p.value + _CoTaskMemFree(p) + return result + __str__ = __unicode__ + + def __cmp__(self, other): + if isinstance(other, GUID): + return cmp(binary(self), binary(other)) + return -1 + + def __bool__(self): + return self != GUID_null + + def __eq__(self, other): + return isinstance(other, GUID) and \ + binary(self) == binary(other) + + def __hash__(self): + # We make GUID instances hashable, although they are mutable. + return hash(binary(self)) + + def copy(self): + return GUID(str(self)) + + def from_progid(cls, progid): + """Get guid from progid, ... + """ + if hasattr(progid, "_reg_clsid_"): + progid = progid._reg_clsid_ + if isinstance(progid, cls): + return progid + elif isinstance(progid, str): + if progid.startswith("{"): + return cls(progid) + inst = cls() + _CLSIDFromProgID(str(progid), byref(inst)) + return inst + else: + raise TypeError("Cannot construct guid from %r" % progid) + from_progid = classmethod(from_progid) + + def as_progid(self): + "Convert a GUID into a progid" + progid = c_wchar_p() + _ProgIDFromCLSID(byref(self), byref(progid)) + result = progid.value + _CoTaskMemFree(progid) + return result + + def create_new(cls): + "Create a brand new guid" + guid = cls() + _CoCreateGuid(byref(guid)) + return guid + create_new = classmethod(create_new) + +GUID_null = GUID() + +__all__ = ["GUID"] diff --git a/venv/Lib/site-packages/comtypes/__init__.py b/venv/Lib/site-packages/comtypes/__init__.py new file mode 100644 index 00000000..4eff9c31 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/__init__.py @@ -0,0 +1,1384 @@ +import types +import sys +import os + +# comtypes version numbers follow semver (http://semver.org/) and PEP 440 +__version__ = "1.1.10" + +import logging +class NullHandler(logging.Handler): + """A Handler that does nothing.""" + def emit(self, record): + pass + +logger = logging.getLogger(__name__) + +# Add a NULL handler to the comtypes logger. This prevents getting a +# message like this: +# No handlers could be found for logger "comtypes" +# when logging is not configured and logger.error() is called. +logger.addHandler(NullHandler()) + +from ctypes import * +from _ctypes import COMError +from comtypes import patcher + +def _check_version(actual, tlib_cached_mtime=None): + from comtypes.tools.codegenerator import version as required + if actual != required: + raise ImportError("Wrong version") + if not hasattr(sys, "frozen"): + g = sys._getframe(1).f_globals + tlb_path = g.get("typelib_path") + try: + tlib_curr_mtime = os.stat(tlb_path).st_mtime + except (OSError, TypeError): + return + if not tlib_cached_mtime or abs(tlib_curr_mtime - tlib_cached_mtime) >= 1: + raise ImportError("Typelib different than module") + +try: + COMError() +except TypeError: + pass +else: + # Python 2.5 and 2.5.1 have a bug in the COMError implementation: + # The type has no __init__ method, and no hresult, text, and + # details instance vars. Work around this bug by monkeypatching + # COMError. + def monkeypatch_COMError(): + def __init__(self, hresult, text, details): + self.hresult = hresult + self.text = text + self.details = details + super(COMError, self).__init__(hresult, text, details) + COMError.__init__ = __init__ + monkeypatch_COMError() + del monkeypatch_COMError + +if sys.version_info >= (3, 0): + pythonapi.PyInstanceMethod_New.argtypes = [py_object] + pythonapi.PyInstanceMethod_New.restype = py_object + PyInstanceMethod_Type = type(pythonapi.PyInstanceMethod_New(id)) + + def instancemethod(func, inst, cls): + mth = PyInstanceMethod_Type(func) + if inst is None: + return mth + return mth.__get__(inst) +else: + def instancemethod(func, inst, cls): + return types.MethodType(func, inst, cls) + +class ReturnHRESULT(Exception): + """ReturnHRESULT(hresult, text) + + Return a hresult code from a COM method implementation + without logging an error. + """ + +##class IDLWarning(UserWarning): +## "Warn about questionable type information" + +from comtypes.GUID import GUID +_GUID = GUID +IID = GUID +DWORD = c_ulong + +wireHWND = c_ulong + +################################################################ +# About COM apartments: +# http://blogs.msdn.com/larryosterman/archive/2004/04/28/122240.aspx +################################################################ + +################################################################ +# constants for object creation +CLSCTX_INPROC_SERVER = 1 +CLSCTX_INPROC_HANDLER = 2 +CLSCTX_LOCAL_SERVER = 4 + +CLSCTX_INPROC = 3 +CLSCTX_SERVER = 5 +CLSCTX_ALL = 7 + +CLSCTX_INPROC_SERVER16 = 8 +CLSCTX_REMOTE_SERVER = 16 +CLSCTX_INPROC_HANDLER16 = 32 +CLSCTX_RESERVED1 = 64 +CLSCTX_RESERVED2 = 128 +CLSCTX_RESERVED3 = 256 +CLSCTX_RESERVED4 = 512 +CLSCTX_NO_CODE_DOWNLOAD = 1024 +CLSCTX_RESERVED5 = 2048 +CLSCTX_NO_CUSTOM_MARSHAL = 4096 +CLSCTX_ENABLE_CODE_DOWNLOAD = 8192 +CLSCTX_NO_FAILURE_LOG = 16384 +CLSCTX_DISABLE_AAA = 32768 +CLSCTX_ENABLE_AAA = 65536 +CLSCTX_FROM_DEFAULT_CONTEXT = 131072 + +tagCLSCTX = c_int # enum +CLSCTX = tagCLSCTX + +# Constants for security setups +SEC_WINNT_AUTH_IDENTITY_UNICODE = 0x2 +RPC_C_AUTHN_WINNT = 10 +RPC_C_AUTHZ_NONE = 0 +RPC_C_AUTHN_LEVEL_CONNECT = 2 +RPC_C_IMP_LEVEL_IMPERSONATE = 3 +EOAC_NONE = 0 + + + +################################################################ +# Initialization and shutdown +_ole32 = oledll.ole32 +_ole32_nohresult = windll.ole32 # use this for functions that don't return a HRESULT + +COINIT_MULTITHREADED = 0x0 +COINIT_APARTMENTTHREADED = 0x2 +COINIT_DISABLE_OLE1DDE = 0x4 +COINIT_SPEED_OVER_MEMORY = 0x8 + +def CoInitialize(): + return CoInitializeEx(COINIT_APARTMENTTHREADED) + +def CoInitializeEx(flags=None): + if flags is None: + flags = getattr(sys, "coinit_flags", COINIT_APARTMENTTHREADED) + logger.debug("CoInitializeEx(None, %s)", flags) + _ole32.CoInitializeEx(None, flags) + +# COM is initialized automatically for the thread that imports this +# module for the first time. sys.coinit_flags is passed as parameter +# to CoInitializeEx, if defined, otherwise COINIT_APARTMENTTHREADED +# (COINIT_MULTITHREADED on Windows CE) is used. +# +# A shutdown function is registered with atexit, so that +# CoUninitialize is called when Python is shut down. +CoInitializeEx() + +# We need to have CoUninitialize for multithreaded model where we have +# to initialize and uninitialize COM for every new thread (except main) +# in which we are using COM +def CoUninitialize(): + logger.debug("CoUninitialize()") + _ole32_nohresult.CoUninitialize() + + +def _shutdown(func=_ole32_nohresult.CoUninitialize, + _debug=logger.debug, + _exc_clear=getattr(sys, "exc_clear", lambda: None)): + # Make sure no COM pointers stay in exception frames. + _exc_clear() + # Sometimes, CoUnititialize, running at Python shutdown, + # raises an exception. We suppress this when __debug__ is + # False. + _debug("Calling CoUnititialize()") + if __debug__: + func() + else: + try: func() + except WindowsError: pass + # Set the flag which means that calling obj.Release() is no longer + # needed. + if _cominterface_meta is not None: + _cominterface_meta._com_shutting_down = True + _debug("CoUnititialize() done.") + +import atexit +atexit.register(_shutdown) + +################################################################ +# global registries. + +# allows to find interface classes by guid strings (iid) +com_interface_registry = {} + +# allows to find coclasses by guid strings (clsid) +com_coclass_registry = {} + +def _is_object(obj): + """This function determines if the argument is a COM object. It + is used in several places to determine whether propputref or + propput setters have to be used.""" + from comtypes.automation import VARIANT + # A COM pointer is an 'Object' + if isinstance(obj, POINTER(IUnknown)): + return True + # A COM pointer in a VARIANT is an 'Object', too + elif isinstance(obj, VARIANT) and isinstance(obj.value, POINTER(IUnknown)): + return True + # It may be a dynamic dispatch object. + return hasattr(obj, "_comobj") + +################################################################ +# The metaclasses... + +class _cominterface_meta(type): + """Metaclass for COM interfaces. Automatically creates high level + methods from COMMETHOD lists. + """ + + # This flag is set to True by the atexit handler which calls + # CoUnititialize. + _com_shutting_down = False + + # Creates also a POINTER type for the newly created class. + def __new__(self, name, bases, namespace): + methods = namespace.pop("_methods_", None) + dispmethods = namespace.pop("_disp_methods_", None) + cls = type.__new__(self, name, bases, namespace) + + if methods is not None: + cls._methods_ = methods + if dispmethods is not None: + cls._disp_methods_ = dispmethods + + # If we sublass a COM interface, for example: + # + # class IDispatch(IUnknown): + # .... + # + # then we need to make sure that POINTER(IDispatch) is a + # subclass of POINTER(IUnknown) because of the way ctypes + # typechecks work. + if bases == (object,): + _ptr_bases = (cls, _compointer_base) + else: + _ptr_bases = (cls, POINTER(bases[0])) + + # The interface 'cls' is used as a mixin. + p = type(_compointer_base)("POINTER(%s)" % cls.__name__, + _ptr_bases, + {"__com_interface__": cls, + "_needs_com_addref_": None}) + + from ctypes import _pointer_type_cache + _pointer_type_cache[cls] = p + + if cls._case_insensitive_: + + @patcher.Patch(p) + class CaseInsensitive(object): + # case insensitive attributes for COM methods and properties + def __getattr__(self, name): + """Implement case insensitive access to methods and properties""" + try: + fixed_name = self.__map_case__[name.lower()] + except KeyError: + raise AttributeError(name) + if fixed_name != name: # prevent unbounded recursion + return getattr(self, fixed_name) + raise AttributeError(name) + + # __setattr__ is pretty heavy-weight, because it is called for + # EVERY attribute assignment. Settings a non-com attribute + # through this function takes 8.6 usec, while without this + # function it takes 0.7 sec - 12 times slower. + # + # How much faster would this be if implemented in C? + def __setattr__(self, name, value): + """Implement case insensitive access to methods and properties""" + object.__setattr__(self, + self.__map_case__.get(name.lower(), name), + value) + + @patcher.Patch(POINTER(p)) + class ReferenceFix(object): + def __setitem__(self, index, value): + # We override the __setitem__ method of the + # POINTER(POINTER(interface)) type, so that the COM + # reference count is managed correctly. + # + # This is so that we can implement COM methods that have to + # return COM pointers more easily and consistent. Instead of + # using CopyComPointer in the method implementation, we can + # simply do: + # + # def GetTypeInfo(self, this, ..., pptinfo): + # if not pptinfo: return E_POINTER + # pptinfo[0] = a_com_interface_pointer + # return S_OK + if index != 0: + # CopyComPointer, which is in _ctypes, does only + # handle an index of 0. This code does what + # CopyComPointer should do if index != 0. + if bool(value): + value.AddRef() + super(POINTER(p), self).__setitem__(index, value) + return + from _ctypes import CopyComPointer + CopyComPointer(value, self) + + return cls + + def __setattr__(self, name, value): + if name == "_methods_": + # XXX I'm no longer sure why the code generator generates + # "_methods_ = []" in the interface definition, and later + # overrides this by "Interface._methods_ = [...] +## assert self.__dict__.get("_methods_", None) is None + self._make_methods(value) + self._make_specials() + elif name == "_disp_methods_": + assert self.__dict__.get("_disp_methods_", None) is None + self._make_dispmethods(value) + self._make_specials() + type.__setattr__(self, name, value) + + def _make_specials(self): + # This call installs methods that forward the Python protocols + # to COM protocols. + + def has_name(name): + # Determine whether a property or method named 'name' + # exists + if self._case_insensitive_: + return name.lower() in self.__map_case__ + return hasattr(self, name) + + # XXX These special methods should be generated by the code generator. + if has_name("Count"): + @patcher.Patch(self) + class _(object): + def __len__(self): + "Return the the 'self.Count' property." + return self.Count + + if has_name("Item"): + @patcher.Patch(self) + class _(object): + # 'Item' is the 'default' value. Make it available by + # calling the instance (Not sure this makes sense, but + # win32com does this also). + def __call__(self, *args, **kw): + "Return 'self.Item(*args, **kw)'" + return self.Item(*args, **kw) + + # does this make sense? It seems that all standard typelibs I've + # seen so far that support .Item also support ._NewEnum + @patcher.no_replace + def __getitem__(self, index): + "Return 'self.Item(index)'" + # Handle tuples and all-slice + if isinstance(index, tuple): + args = index + elif index == _all_slice: + args = () + else: + args = (index,) + + try: + result = self.Item(*args) + except COMError as err: + (hresult, text, details) = err.args + if hresult == -2147352565: # DISP_E_BADINDEX + raise IndexError("invalid index") + else: + raise + + # Note that result may be NULL COM pointer. There is no way + # to interpret this properly, so it is returned as-is. + + # Hm, should we call __ctypes_from_outparam__ on the + # result? + return result + + @patcher.no_replace + def __setitem__(self, index, value): + "Attempt 'self.Item[index] = value'" + try: + self.Item[index] = value + except COMError as err: + (hresult, text, details) = err.args + if hresult == -2147352565: # DISP_E_BADINDEX + raise IndexError("invalid index") + else: + raise + except TypeError: + msg = "%r object does not support item assignment" + raise TypeError(msg % type(self)) + + if has_name("_NewEnum"): + @patcher.Patch(self) + class _(object): + def __iter__(self): + "Return an iterator over the _NewEnum collection." + # This method returns a pointer to _some_ _NewEnum interface. + # It relies on the fact that the code generator creates next() + # methods for them automatically. + # + # Better would maybe to return an object that + # implements the Python iterator protocol, and + # forwards the calls to the COM interface. + enum = self._NewEnum + if isinstance(enum, types.MethodType): + # _NewEnum should be a propget property, with dispid -4. + # + # Sometimes, however, it is a method. + enum = enum() + if hasattr(enum, "Next"): + return enum + # _NewEnum returns an IUnknown pointer, QueryInterface() it to + # IEnumVARIANT + from comtypes.automation import IEnumVARIANT + return enum.QueryInterface(IEnumVARIANT) + + def _make_case_insensitive(self): + # The __map_case__ dictionary maps lower case names to the + # names in the original spelling to enable case insensitive + # method and attribute access. + try: + self.__dict__["__map_case__"] + except KeyError: + d = {} + d.update(getattr(self, "__map_case__", {})) + self.__map_case__ = d + + def _make_dispmethods(self, methods): + if self._case_insensitive_: + self._make_case_insensitive() + + # create dispinterface methods and properties on the interface 'self' + properties = {} + for m in methods: + what, name, idlflags, restype, argspec = m + + # is it a property set or property get? + is_prop = False + + # argspec is a sequence of tuples, each tuple is: + # ([paramflags], type, name) + try: + memid = [x for x in idlflags if isinstance(x, int)][0] + except IndexError: + raise TypeError("no dispid found in idlflags") + if what == "DISPPROPERTY": # DISPPROPERTY + assert not argspec # XXX does not yet work for properties with parameters + accessor = self._disp_property(memid, idlflags) + is_prop = True + setattr(self, name, accessor) + elif what == "DISPMETHOD": # DISPMETHOD + # argspec is a tuple of (idlflags, type, name[, + # defval]) items. + method = self._disp_method(memid, name, idlflags, restype, argspec) +## not in 2.3 method.__name__ = name + if 'propget' in idlflags: + nargs = len(argspec) + properties.setdefault((name, nargs), [None, None, None])[0] = method + is_prop = True + elif 'propput' in idlflags: + nargs = len(argspec)-1 + properties.setdefault((name, nargs), [None, None, None])[1] = method + is_prop = True + elif 'propputref' in idlflags: + nargs = len(argspec)-1 + properties.setdefault((name, nargs), [None, None, None])[2] = method + is_prop = True + else: + setattr(self, name, method) + # COM is case insensitive. + # + # For a method, this is the real name. For a property, + # this is the name WITHOUT the _set_ or _get_ prefix. + if self._case_insensitive_: + self.__map_case__[name.lower()] = name + if is_prop: + self.__map_case__[name[5:].lower()] = name[5:] + + for (name, nargs), methods in list(properties.items()): + # methods contains [propget or None, propput or None, propputref or None] + if methods[1] is not None and methods[2] is not None: + # both propput and propputref. + # + # Create a setter method that examines the argument type + # and calls 'propputref' if it is an Object (in the VB + # sense), or call 'propput' otherwise. + propput = methods[1] + propputref = methods[2] + def put_or_putref(self, *args): + if _is_object(args[-1]): + return propputref(self, *args) + else: + return propput(self, *args) + methods[1] = put_or_putref + del methods[2] + elif methods[2] is not None: + # use propputref + del methods[1] + else: + # use propput (if any) + del methods[2] + if nargs: + setattr(self, name, named_property("%s.%s" % (self.__name__, name), *methods)) + else: + assert len(methods) <= 2 + setattr(self, name, property(*methods)) + + # COM is case insensitive + if self._case_insensitive_: + self.__map_case__[name.lower()] = name + + # Some ideas, (not only) related to disp_methods: + # + # Should the functions/methods we create have restype and/or + # argtypes attributes? + + def _disp_method(self, memid, name, idlflags, restype, argspec): + if 'propget' in idlflags: + def getfunc(obj, *args, **kw): + return self.Invoke(obj, memid, _invkind=2, *args, **kw) # DISPATCH_PROPERTYGET + return getfunc + elif 'propput' in idlflags: + def putfunc(obj, *args, **kw): + return self.Invoke(obj, memid, _invkind=4, *args, **kw) # DISPATCH_PROPERTYPUT + return putfunc + elif 'propputref' in idlflags: + def putfunc(obj, *args, **kw): + return self.Invoke(obj, memid, _invkind=8, *args, **kw) # DISPATCH_PROPERTYPUTREF + return putfunc + # a first attempt to make use of the restype. Still, support + # for named arguments and default argument values should be + # added. + if hasattr(restype, "__com_interface__"): + interface = restype.__com_interface__ + def func(s, *args, **kw): + result = self.Invoke(s, memid, _invkind=1, *args, **kw) + if result is None: + return + return result.QueryInterface(interface) + else: + def func(obj, *args, **kw): + return self.Invoke(obj, memid, _invkind=1, *args, **kw) # DISPATCH_METHOD + return func + + def _disp_property(self, memid, idlflags): + # XXX doc string missing in property + def _get(obj): + return obj.Invoke(memid, _invkind=2) # DISPATCH_PROPERTYGET + if "readonly" in idlflags: + return property(_get) + def _set(obj, value): + # Detect whether to use DISPATCH_PROPERTYPUT or + # DISPATCH_PROPERTYPUTREF + invkind = 8 if _is_object(value) else 4 + return obj.Invoke(memid, value, _invkind=invkind) + return property(_get, _set) + + def __get_baseinterface_methodcount(self): + "Return the number of com methods in the base interfaces" + try: + result = 0 + for itf in self.mro()[1:-1]: + result += len(itf.__dict__["_methods_"]) + return result + except KeyError as err: + (name,) = err.args + if name == "_methods_": + raise TypeError("baseinterface '%s' has no _methods_" % itf.__name__) + raise + + def _fix_inout_args(self, func, argtypes, paramflags): + # This function provides a workaround for a bug in ctypes. + # [in, out] parameters must be converted with the argtype's + # .from_param() method BEFORE they are passed to the _ctypes + # build_callargs() function in Modules/_ctypes/_ctypes.c. + # + # For details see below. + # + # TODO: The workaround should be disabled when a ctypes + # version is used where the bug is fixed. + SIMPLETYPE = type(c_int) + BYREFTYPE = type(byref(c_int())) + def call_with_inout(self_, *args, **kw): + args = list(args) + # Indexed by order in the output + outargs = {} + outnum = 0 + for i, info in enumerate(paramflags): + direction = info[0] + if direction & 3 == 3: + # This is an [in, out] parameter. + # + # Determine name and required type of the parameter. + name = info[1] + # [in, out] parameters are passed as pointers, + # this is the pointed-to type: + atyp = argtypes[i]._type_ + + # Get the actual parameter, either as positional or + # keyword arg. + try: + try: + v = args[i] + except IndexError: + v = kw[name] + except KeyError: + # no parameter was passed, make an empty one + # of the required type + v = atyp() + else: + # parameter was passed, call .from_param() to + # convert it to a ctypes type. + if getattr(v, "_type_", None) is atyp: + # Array of or pointer to type 'atyp' was + # passed, pointer to 'atyp' expected. + pass + elif type(atyp) is SIMPLETYPE: + # The from_param method of simple types + # (c_int, c_double, ...) returns a byref() + # object which we cannot use since later + # it will be wrapped in a pointer. Simply + # call the constructor with the argument + # in that case. + v = atyp(v) + else: + v = atyp.from_param(v) + assert not isinstance(v, BYREFTYPE) + outargs[outnum] = v + outnum += 1 + if len(args) > i: + args[i] = v + else: + kw[name] = v + elif direction & 2 == 2: + outnum += 1 + + rescode = func(self_, *args, **kw) + # If there is only a single output value, then do not expect it to + # be iterable. + if outnum == 1: # rescode is not iterable + if len(outargs) == 1: + rescode = rescode.__ctypes_from_outparam__() + return rescode + + rescode = list(rescode) + for outnum, o in list(outargs.items()): + rescode[outnum] = o.__ctypes_from_outparam__() + return rescode + return call_with_inout + + def _make_methods(self, methods): + if self._case_insensitive_: + self._make_case_insensitive() + + # we insist on an _iid_ in THIS class! + try: + iid = self.__dict__["_iid_"] + except KeyError: + raise AttributeError("this class must define an _iid_") + else: + iid = str(iid) +## if iid in com_interface_registry: +## # Warn when multiple interfaces are defined with identical iids. +## # This would also trigger if we reload() a module that contains +## # interface types, so suppress the warning in this case. +## other = com_interface_registry[iid] +## if self.__name__ != other.__name__ or self.__module__ != other.__module__: +## text = "Multiple interface defn: %s, %s" % (self, other) +## warnings.warn(text, UserWarning) + com_interface_registry[iid] = self + del iid + vtbl_offset = self.__get_baseinterface_methodcount() + + properties = {} + + # create private low level, and public high level methods + for i, item in enumerate(methods): + restype, name, argtypes, paramflags, idlflags, doc = item + # the function prototype + prototype = WINFUNCTYPE(restype, *argtypes) + + # a low level unbound method calling the com method. + # attach it with a private name (__com_AddRef, for example), + # so that custom method implementations can call it. + + # If the method returns a HRESULT, we pass the interface iid, + # so that we can request error info for the interface. + if restype == HRESULT: +## print "%s.%s" % (self.__name__, name) + raw_func = prototype(i + vtbl_offset, name, None, self._iid_) + func = prototype(i + vtbl_offset, name, paramflags, self._iid_) + else: + raw_func = prototype(i + vtbl_offset, name, None, None) + func = prototype(i + vtbl_offset, name, paramflags, None) + setattr(self, + "_%s__com_%s" % (self.__name__, name), + instancemethod(raw_func, None, self)) + + if paramflags: + # see comment in the _fix_inout_args method + dirflags = [(p[0]&3) for p in paramflags] + if 3 in dirflags: +## fullname = "%s::%s" % (self.__name__, name) +## print "FIX %s" % fullname + func = self._fix_inout_args(func, argtypes, paramflags) + + # 'func' is a high level function calling the COM method + func.__doc__ = doc + try: + func.__name__ = name # for pyhelp + except TypeError: + # In Python 2.3, __name__ is a readonly attribute + pass + # make it an unbound method. Remember, 'self' is a type here. + mth = instancemethod(func, None, self) + + # is it a property set or property get? + is_prop = False + + # XXX Hm. What, when paramflags is None? + # Or does have '0' values? + # Seems we loose then, at least for properties... + + # The following code assumes that the docstrings for + # propget and propput are identical. + if "propget" in idlflags: + assert name.startswith("_get_") + nargs = len([flags for flags in paramflags + if flags[0] & 7 in (0, 1)]) + # XXX or should we do this? + # nargs = len([flags for flags in paramflags + # if (flags[0] & 1) or (flags[0] == 0)]) + propname = name[len("_get_"):] + properties.setdefault((propname, doc, nargs), [None, None, None])[0] = func + is_prop = True + elif "propput" in idlflags: + assert name.startswith("_set_") + nargs = len([flags for flags in paramflags + if flags[0] & 7 in (0, 1)]) - 1 + propname = name[len("_set_"):] + properties.setdefault((propname, doc, nargs), [None, None, None])[1] = func + is_prop = True + elif "propputref" in idlflags: + assert name.startswith("_setref_") + nargs = len([flags for flags in paramflags + if flags[0] & 7 in (0, 1)]) - 1 + propname = name[len("_setref_"):] + properties.setdefault((propname, doc, nargs), [None, None, None])[2] = func + is_prop = True + + # We install the method in the class, except when it's a + # property accessor. And we make sure we don't overwrite + # a property that's already present in the class. + if not is_prop: + if hasattr(self, name): + setattr(self, "_" + name, mth) + else: + setattr(self, name, mth) + + # COM is case insensitive. + # + # For a method, this is the real name. For a property, + # this is the name WITHOUT the _set_ or _get_ prefix. + if self._case_insensitive_: + self.__map_case__[name.lower()] = name + if is_prop: + self.__map_case__[name[5:].lower()] = name[5:] + + # create public properties / attribute accessors + for (name, doc, nargs), methods in list(properties.items()): + # methods contains [propget or None, propput or None, propputref or None] + if methods[1] is not None and methods[2] is not None: + # both propput and propputref. + # + # Create a setter method that examines the argument type + # and calls 'propputref' if it is an Object (in the VB + # sense), or call 'propput' otherwise. + propput = methods[1] + propputref = methods[2] + def put_or_putref(self, *args): + if _is_object(args[-1]): + return propputref(self, *args) + else: + return propput(self, *args) + methods[1] = put_or_putref + del methods[2] + elif methods[2] is not None: + # use propputref + del methods[1] + else: + # use propput (if any) + del methods[2] + if nargs == 0: + prop = property(*methods + [None, doc]) + else: + # Hm, must be a descriptor where the __get__ method + # returns a bound object having __getitem__ and + # __setitem__ methods. + prop = named_property("%s.%s" % (self.__name__, name), *methods + [doc]) + # Again, we should not overwrite class attributes that are + # already present. + if hasattr(self, name): + setattr(self, "_" + name, prop) + else: + setattr(self, name, prop) + + # COM is case insensitive + if self._case_insensitive_: + self.__map_case__[name.lower()] = name + + +################################################################ +# helper classes for COM propget / propput +# Should they be implemented in C for speed? + +_all_slice = slice(None, None, None) + + +class bound_named_property(object): + def __init__(self, name, getter, setter, im_inst): + self.name = name + self.im_inst = im_inst + self.getter = getter + self.setter = setter + + def __getitem__(self, index): + if self.getter is None: + raise TypeError("unsubscriptable object") + if isinstance(index, tuple): + return self.getter(self.im_inst, *index) + elif index == _all_slice: + return self.getter(self.im_inst) + else: + return self.getter(self.im_inst, index) + + def __call__(self, *args): + if self.getter is None: + raise TypeError("object is not callable") + return self.getter(self.im_inst, *args) + + def __setitem__(self, index, value): + if self.setter is None: + raise TypeError("object does not support item assignment") + if isinstance(index, tuple): + self.setter(self.im_inst, *(index + (value,))) + elif index == _all_slice: + self.setter(self.im_inst, value) + else: + self.setter(self.im_inst, index, value) + + def __repr__(self): + return "<bound_named_property %r at %x>" % (self.name, id(self)) + + def __iter__(self): + """ Explicitly disallow iteration. """ + msg = "%r is not iterable" % self.name + raise TypeError(msg) + + + +class named_property(object): + def __init__(self, name, fget=None, fset=None, doc=None): + self.name = name + self.getter = fget + self.setter = fset + self.__doc__ = doc + + def __get__(self, im_inst, im_class=None): + if im_inst is None: + return self + return bound_named_property(self.name, self.getter, self.setter, im_inst) + + # Make this a data descriptor + def __set__(self, obj): + raise AttributeError("Unsettable attribute") + + def __repr__(self): + return "<named_property %r at %x>" % (self.name, id(self)) + +################################################################ + +class _compointer_meta(type(c_void_p), _cominterface_meta): + "metaclass for COM interface pointer classes" + # no functionality, but needed to avoid a metaclass conflict + +class _compointer_base(c_void_p, metaclass=_compointer_meta): + "base class for COM interface pointer classes" + def __del__(self, _debug=logger.debug): + "Release the COM refcount we own." + if self: + # comtypes calls CoUnititialize() when the atexit handlers + # runs. CoUninitialize() cleans up the COM objects that + # are still alive. Python COM pointers may still be + # present but we can no longer call Release() on them - + # this may give a protection fault. So we need the + # _com_shutting_down flag. + # + if not type(self)._com_shutting_down: + _debug("Release %s", self) + self.Release() + + def __cmp__(self, other): + """Compare pointers to COM interfaces.""" + # COM identity rule + # + # XXX To compare COM interface pointers, should we + # automatically QueryInterface for IUnknown on both items, and + # compare the pointer values? + if not isinstance(other, _compointer_base): + return 1 + + # get the value property of the c_void_p baseclass, this is the pointer value + return cmp(super(_compointer_base, self).value, super(_compointer_base, other).value) + + def __eq__(self, other): + if not isinstance(other, _compointer_base): + return False + # get the value property of the c_void_p baseclass, this is the pointer value + return super(_compointer_base, self).value == super(_compointer_base, other).value + + def __hash__(self): + """Return the hash value of the pointer.""" + # hash the pointer values + return hash(super(_compointer_base, self).value) + + # redefine the .value property; return the object itself. + def __get_value(self): + return self + value = property(__get_value, doc="""Return self.""") + + def __repr__(self): + ptr = super(_compointer_base, self).value + return "<%s ptr=0x%x at %x>" % (self.__class__.__name__, ptr or 0, id(self)) + + # This fixes the problem when there are multiple python interface types + # wrapping the same COM interface. This could happen because some interfaces + # are contained in multiple typelibs. + # + # It also allows to pass a CoClass instance to an api + # expecting a COM interface. + def from_param(klass, value): + """Convert 'value' into a COM pointer to the interface. + + This method accepts a COM pointer, or a CoClass instance + which is QueryInterface()d.""" + if value is None: + return None + # CLF: 2013-01-18 + # A default value of 0, meaning null, can pass through to here. + if value == 0: + return None + if isinstance(value, klass): + return value + # multiple python interface types for the same COM interface. + # Do we need more checks here? + if klass._iid_ == getattr(value, "_iid_", None): + return value + # Accept an CoClass instance which exposes the interface required. + try: + table = value._com_pointers_ + except AttributeError: + pass + else: + try: + # a kind of QueryInterface + return table[klass._iid_] + except KeyError: + raise TypeError("Interface %s not supported" % klass._iid_) + return value.QueryInterface(klass.__com_interface__) + from_param = classmethod(from_param) + +################################################################ + +from ctypes import _SimpleCData + +class BSTR(_SimpleCData): + "The windows BSTR data type" + _type_ = "X" + _needsfree = False + def __repr__(self): + return "%s(%r)" % (self.__class__.__name__, self.value) + + def __ctypes_from_outparam__(self): + self._needsfree = True + return self.value + + def __del__(self, _free=windll.oleaut32.SysFreeString): + # Free the string if self owns the memory + # or if instructed by __ctypes_from_outparam__. + if self._b_base_ is None \ + or self._needsfree: + _free(self) + + def from_param(cls, value): + """Convert into a foreign function call parameter.""" + if isinstance(value, cls): + return value + # Although the builtin SimpleCData.from_param call does the + # right thing, it doesn't ensure that SysFreeString is called + # on destruction. + return cls(value) + from_param = classmethod(from_param) + +################################################################ +# IDL stuff + +class helpstring(str): + "Specifies the helpstring for a COM method or property." + +class defaultvalue(object): + "Specifies the default value for parameters marked optional." + def __init__(self, value): + self.value = value + +class dispid(int): + "Specifies the DISPID of a method or property." + +# XXX STDMETHOD, COMMETHOD, DISPMETHOD, and DISPPROPERTY should return +# instances with methods, or at least accessors instead of tuple. + +def STDMETHOD(restype, name, argtypes=()): + "Specifies a COM method slot without idlflags" + # restype, name, argtypes, paramflags, idlflags, docstring + return restype, name, argtypes, None, (), None + +def DISPMETHOD(idlflags, restype, name, *argspec): + "Specifies a method of a dispinterface" + return "DISPMETHOD", name, idlflags, restype, argspec + +def DISPPROPERTY(idlflags, proptype, name): + "Specifies a property of a dispinterface" + return "DISPPROPERTY", name, idlflags, proptype, ()#, argspec + +# COMMETHOD returns: +# restype, methodname, tuple(argtypes), tuple(paramflags), tuple(idlflags), helptext +# +# paramflags is a sequence of (flags (integer), paramname (string) +# tuple(idlflags) is for the method itself: (dispid, 'readonly') +# +# Example: (HRESULT, 'Width', (c_long,), (2, 'rhs'), (4, 'readonly'), None) + +## sample generated code: +## DISPPROPERTY([5, 'readonly'], OLE_YSIZE_HIMETRIC, 'Height'), +## DISPMETHOD([6], None, 'Render', +## ( [], c_int, 'hdc' ), +## ( [], c_int, 'x' ), +## ( [], c_int, 'y' )) + +################################################################ + +_PARAMFLAGS = { + "in": 1, + "out": 2, + "lcid": 4, + "retval": 8, + "optional": 16, + } + +def _encode_idl(names): + # sum up all values found in _PARAMFLAGS, ignoring all others. + return sum([_PARAMFLAGS.get(n, 0) for n in names]) + +_NOTHING = object() +def _unpack_argspec(idl, typ, name=None, defval=_NOTHING): + return idl, typ, name, defval + +def COMMETHOD(idlflags, restype, methodname, *argspec): + """Specifies a COM method slot with idlflags. + + XXX should explain the sematics of the arguments. + """ + paramflags = [] + argtypes = [] + + # collect all helpstring instances + # We should suppress docstrings when Python is started with -OO + helptext = [t for t in idlflags if isinstance(t, helpstring)] + # join them together(does this make sense?) and replace by None if empty. + helptext = "".join(helptext) or None + + from comtypes.automation import VARIANT + + for item in argspec: + idl, typ, argname, defval = _unpack_argspec(*item) + pflags = _encode_idl(idl) + if "optional" in idl: + if defval is _NOTHING: + if typ is VARIANT: + defval = VARIANT.missing + elif typ is POINTER(VARIANT): + defval = pointer(VARIANT.missing) + else: +## msg = ("'optional' only allowed for VARIANT and VARIANT*, not for %s" +## % typ.__name__) +## warnings.warn(msg, IDLWarning, stacklevel=2) + defval = typ() + if defval is _NOTHING: + paramflags.append((pflags, argname)) + else: + paramflags.append((pflags, argname, defval)) + argtypes.append(typ) + if "propget" in idlflags: + methodname = "_get_%s" % methodname + elif "propput" in idlflags: + methodname = "_set_%s" % methodname + elif "propputref" in idlflags: + methodname = "_setref_%s" % methodname + return restype, methodname, tuple(argtypes), tuple(paramflags), tuple(idlflags), helptext + +################################################################ +# IUnknown, the root of all evil... + +class IUnknown(object, metaclass=_cominterface_meta): + """The most basic COM interface. + + Each subclasses of IUnknown must define these class attributes: + + _iid_ - a GUID instance defining the identifier of this interface + + _methods_ - a list of methods for this interface. + + The _methods_ list must in VTable order. Methods are specified + with STDMETHOD or COMMETHOD calls. + """ + _case_insensitive_ = False + _iid_ = GUID("{00000000-0000-0000-C000-000000000046}") + + _methods_ = [ + STDMETHOD(HRESULT, "QueryInterface", + [POINTER(GUID), POINTER(c_void_p)]), + STDMETHOD(c_ulong, "AddRef"), + STDMETHOD(c_ulong, "Release") + ] + + def QueryInterface(self, interface, iid=None): + "QueryInterface(interface) -> instance" + p = POINTER(interface)() + if iid is None: + iid = interface._iid_ + self.__com_QueryInterface(byref(iid), byref(p)) + clsid = self.__dict__.get('__clsid') + if clsid is not None: + p.__dict__['__clsid'] = clsid + return p + + # these are only so that they get a docstring. + # XXX There should be other ways to install a docstring. + def AddRef(self): + "Increase the internal refcount by one and return it." + return self.__com_AddRef() + + def Release(self): + "Decrease the internal refcount by one and return it." + return self.__com_Release() + +# IPersist is a trivial interface, which allows to ask an object about +# its clsid. +class IPersist(IUnknown): + _iid_ = GUID('{0000010C-0000-0000-C000-000000000046}') + _idlflags_ = [] + _methods_ = [ + COMMETHOD([], HRESULT, 'GetClassID', + ( ['out'], POINTER(GUID), 'pClassID' )), + ] + +class IServiceProvider(IUnknown): + _iid_ = GUID('{6D5140C1-7436-11CE-8034-00AA006009FA}') + + # Overridden QueryService to make it nicer to use (passing it an + # interface and it returns a pointer to that interface) + def QueryService(self, serviceIID, interface): + p = POINTER(interface)() + self._QueryService(byref(serviceIID), byref(interface._iid_), byref(p)) + return p + + _methods_ = [ + COMMETHOD([], HRESULT, 'QueryService', + ( ['in'], POINTER(GUID), 'guidService' ), + ( ['in'], POINTER(GUID), 'riid' ), + ( ['in'], POINTER(c_void_p), 'ppvObject' )) + ] + +################################################################ +def CoGetObject(displayname, interface): + """Convert a displayname to a moniker, then bind and return the object + identified by the moniker.""" + if interface is None: + interface = IUnknown + punk = POINTER(interface)() + # Do we need a way to specify the BIND_OPTS parameter? + _ole32.CoGetObject(str(displayname), + None, + byref(interface._iid_), + byref(punk)) + return punk + +def CoCreateInstance(clsid, interface=None, clsctx=None, punkouter=None): + """The basic windows api to create a COM class object and return a + pointer to an interface. + """ + if clsctx is None: + clsctx = CLSCTX_SERVER + if interface is None: + interface = IUnknown + p = POINTER(interface)() + iid = interface._iid_ + _ole32.CoCreateInstance(byref(clsid), punkouter, clsctx, byref(iid), byref(p)) + return p + +def CoGetClassObject(clsid, clsctx=None, pServerInfo=None, interface=None): + if clsctx is None: + clsctx = CLSCTX_SERVER + if interface is None: + import comtypes.server + interface = comtypes.server.IClassFactory + p = POINTER(interface)() + _CoGetClassObject(clsid, + clsctx, + pServerInfo, + interface._iid_, + byref(p)) + return p + +def GetActiveObject(clsid, interface=None): + """Retrieves a pointer to a running object""" + p = POINTER(IUnknown)() + oledll.oleaut32.GetActiveObject(byref(clsid), None, byref(p)) + if interface is not None: + p = p.QueryInterface(interface) + return p + +class MULTI_QI(Structure): + _fields_ = [("pIID", POINTER(GUID)), + ("pItf", POINTER(c_void_p)), + ("hr", HRESULT)] + +class _COAUTHIDENTITY(Structure): + _fields_ = [ + ('User', POINTER(c_ushort)), + ('UserLength', c_ulong), + ('Domain', POINTER(c_ushort)), + ('DomainLength', c_ulong), + ('Password', POINTER(c_ushort)), + ('PasswordLength', c_ulong), + ('Flags', c_ulong), + ] +COAUTHIDENTITY = _COAUTHIDENTITY + +class _COAUTHINFO(Structure): + _fields_ = [ + ('dwAuthnSvc', c_ulong), + ('dwAuthzSvc', c_ulong), + ('pwszServerPrincName', c_wchar_p), + ('dwAuthnLevel', c_ulong), + ('dwImpersonationLevel', c_ulong), + ('pAuthIdentityData', POINTER(_COAUTHIDENTITY)), + ('dwCapabilities', c_ulong), + ] +COAUTHINFO = _COAUTHINFO + +class _COSERVERINFO(Structure): + _fields_ = [ + ('dwReserved1', c_ulong), + ('pwszName', c_wchar_p), + ('pAuthInfo', POINTER(_COAUTHINFO)), + ('dwReserved2', c_ulong), + ] +COSERVERINFO = _COSERVERINFO +_CoGetClassObject = _ole32.CoGetClassObject +_CoGetClassObject.argtypes = [POINTER(GUID), DWORD, POINTER(COSERVERINFO), + POINTER(GUID), POINTER(c_void_p)] + +class tagBIND_OPTS(Structure): + _fields_ = [ + ('cbStruct', c_ulong), + ('grfFlags', c_ulong), + ('grfMode', c_ulong), + ('dwTickCountDeadline', c_ulong) + ] +# XXX Add __init__ which sets cbStruct? +BIND_OPTS = tagBIND_OPTS + +class tagBIND_OPTS2(Structure): + _fields_ = [ + ('cbStruct', c_ulong), + ('grfFlags', c_ulong), + ('grfMode', c_ulong), + ('dwTickCountDeadline', c_ulong), + ('dwTrackFlags', c_ulong), + ('dwClassContext', c_ulong), + ('locale', c_ulong), + ('pServerInfo', POINTER(_COSERVERINFO)), + ] +# XXX Add __init__ which sets cbStruct? +BINDOPTS2 = tagBIND_OPTS2 + +#Structures for security setups +######################################### +class _SEC_WINNT_AUTH_IDENTITY(Structure): + _fields_ = [ + ('User', POINTER(c_ushort)), + ('UserLength', c_ulong), + ('Domain', POINTER(c_ushort)), + ('DomainLength', c_ulong), + ('Password', POINTER(c_ushort)), + ('PasswordLength', c_ulong), + ('Flags', c_ulong), + ] +SEC_WINNT_AUTH_IDENTITY = _SEC_WINNT_AUTH_IDENTITY + +class _SOLE_AUTHENTICATION_INFO(Structure): + _fields_ = [ + ('dwAuthnSvc', c_ulong), + ('dwAuthzSvc', c_ulong), + ('pAuthInfo', POINTER(_SEC_WINNT_AUTH_IDENTITY)), + ] +SOLE_AUTHENTICATION_INFO = _SOLE_AUTHENTICATION_INFO + +class _SOLE_AUTHENTICATION_LIST(Structure): + _fields_ = [ + ('cAuthInfo', c_ulong), + ('pAuthInfo', POINTER(_SOLE_AUTHENTICATION_INFO)), + ] +SOLE_AUTHENTICATION_LIST = _SOLE_AUTHENTICATION_LIST + +def CoCreateInstanceEx(clsid, interface=None, + clsctx=None, + machine=None, + pServerInfo=None): + """The basic windows api to create a COM class object and return a + pointer to an interface, possibly on another machine. + + Passing both "machine" and "pServerInfo" results in a ValueError. + + """ + if clsctx is None: + clsctx=CLSCTX_LOCAL_SERVER|CLSCTX_REMOTE_SERVER + + if pServerInfo is not None: + if machine is not None: + msg = "Can not specify both machine name and server info" + raise ValueError(msg) + elif machine is not None: + serverinfo = COSERVERINFO() + serverinfo.pwszName = machine + pServerInfo = byref(serverinfo) + + if interface is None: + interface = IUnknown + multiqi = MULTI_QI() + multiqi.pIID = pointer(interface._iid_) + _ole32.CoCreateInstanceEx(byref(clsid), + None, + clsctx, + pServerInfo, + 1, + byref(multiqi)) + return cast(multiqi.pItf, POINTER(interface)) + + +################################################################ +from comtypes._comobject import COMObject + +# What's a coclass? +# a POINTER to a coclass is allowed as parameter in a function declaration: +# http://msdn.microsoft.com/library/en-us/midl/midl/oleautomation.asp + +from comtypes._meta import _coclass_meta + +class CoClass(COMObject, metaclass=_coclass_meta): + pass +################################################################ diff --git a/venv/Lib/site-packages/comtypes/__pycache__/GUID.cpython-36.pyc b/venv/Lib/site-packages/comtypes/__pycache__/GUID.cpython-36.pyc new file mode 100644 index 00000000..70f35eb5 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/__pycache__/GUID.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/comtypes/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..df63f179 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/__pycache__/_comobject.cpython-36.pyc b/venv/Lib/site-packages/comtypes/__pycache__/_comobject.cpython-36.pyc new file mode 100644 index 00000000..2b5ad6fc Binary files /dev/null and b/venv/Lib/site-packages/comtypes/__pycache__/_comobject.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/__pycache__/_meta.cpython-36.pyc b/venv/Lib/site-packages/comtypes/__pycache__/_meta.cpython-36.pyc new file mode 100644 index 00000000..beda350a Binary files /dev/null and b/venv/Lib/site-packages/comtypes/__pycache__/_meta.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/__pycache__/_safearray.cpython-36.pyc b/venv/Lib/site-packages/comtypes/__pycache__/_safearray.cpython-36.pyc new file mode 100644 index 00000000..13d240d3 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/__pycache__/_safearray.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/__pycache__/automation.cpython-36.pyc b/venv/Lib/site-packages/comtypes/__pycache__/automation.cpython-36.pyc new file mode 100644 index 00000000..ac59a9c7 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/__pycache__/automation.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/__pycache__/connectionpoints.cpython-36.pyc b/venv/Lib/site-packages/comtypes/__pycache__/connectionpoints.cpython-36.pyc new file mode 100644 index 00000000..38dd5afb Binary files /dev/null and b/venv/Lib/site-packages/comtypes/__pycache__/connectionpoints.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/__pycache__/errorinfo.cpython-36.pyc b/venv/Lib/site-packages/comtypes/__pycache__/errorinfo.cpython-36.pyc new file mode 100644 index 00000000..040ea316 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/__pycache__/errorinfo.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/__pycache__/git.cpython-36.pyc b/venv/Lib/site-packages/comtypes/__pycache__/git.cpython-36.pyc new file mode 100644 index 00000000..4ff919da Binary files /dev/null and b/venv/Lib/site-packages/comtypes/__pycache__/git.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/__pycache__/hresult.cpython-36.pyc b/venv/Lib/site-packages/comtypes/__pycache__/hresult.cpython-36.pyc new file mode 100644 index 00000000..e45018a9 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/__pycache__/hresult.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/__pycache__/logutil.cpython-36.pyc b/venv/Lib/site-packages/comtypes/__pycache__/logutil.cpython-36.pyc new file mode 100644 index 00000000..ead71874 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/__pycache__/logutil.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/__pycache__/messageloop.cpython-36.pyc b/venv/Lib/site-packages/comtypes/__pycache__/messageloop.cpython-36.pyc new file mode 100644 index 00000000..157edb58 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/__pycache__/messageloop.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/__pycache__/npsupport.cpython-36.pyc b/venv/Lib/site-packages/comtypes/__pycache__/npsupport.cpython-36.pyc new file mode 100644 index 00000000..b7b1d506 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/__pycache__/npsupport.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/__pycache__/patcher.cpython-36.pyc b/venv/Lib/site-packages/comtypes/__pycache__/patcher.cpython-36.pyc new file mode 100644 index 00000000..cf2bea0d Binary files /dev/null and b/venv/Lib/site-packages/comtypes/__pycache__/patcher.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/__pycache__/persist.cpython-36.pyc b/venv/Lib/site-packages/comtypes/__pycache__/persist.cpython-36.pyc new file mode 100644 index 00000000..1e4b041b Binary files /dev/null and b/venv/Lib/site-packages/comtypes/__pycache__/persist.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/__pycache__/safearray.cpython-36.pyc b/venv/Lib/site-packages/comtypes/__pycache__/safearray.cpython-36.pyc new file mode 100644 index 00000000..9560974d Binary files /dev/null and b/venv/Lib/site-packages/comtypes/__pycache__/safearray.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/__pycache__/shelllink.cpython-36.pyc b/venv/Lib/site-packages/comtypes/__pycache__/shelllink.cpython-36.pyc new file mode 100644 index 00000000..1091fcfb Binary files /dev/null and b/venv/Lib/site-packages/comtypes/__pycache__/shelllink.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/__pycache__/typeinfo.cpython-36.pyc b/venv/Lib/site-packages/comtypes/__pycache__/typeinfo.cpython-36.pyc new file mode 100644 index 00000000..17fb8adf Binary files /dev/null and b/venv/Lib/site-packages/comtypes/__pycache__/typeinfo.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/__pycache__/util.cpython-36.pyc b/venv/Lib/site-packages/comtypes/__pycache__/util.cpython-36.pyc new file mode 100644 index 00000000..0c640b37 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/__pycache__/util.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/__pycache__/viewobject.cpython-36.pyc b/venv/Lib/site-packages/comtypes/__pycache__/viewobject.cpython-36.pyc new file mode 100644 index 00000000..2873b7e4 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/__pycache__/viewobject.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/_comobject.py b/venv/Lib/site-packages/comtypes/_comobject.py new file mode 100644 index 00000000..2370f977 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/_comobject.py @@ -0,0 +1,771 @@ +from ctypes import ( + FormatError, POINTER, Structure, WINFUNCTYPE, byref, c_long, c_void_p, + oledll, pointer, windll +) +from _ctypes import CopyComPointer +import logging +import os + +from comtypes import COMError, ReturnHRESULT, instancemethod, _encode_idl +from comtypes.errorinfo import ISupportErrorInfo, ReportException, ReportError +from comtypes import IPersist +from comtypes.hresult import ( + DISP_E_BADINDEX, DISP_E_MEMBERNOTFOUND, E_FAIL, E_NOINTERFACE, + E_INVALIDARG, E_NOTIMPL, RPC_E_CHANGED_MODE, S_FALSE, S_OK +) +from comtypes.typeinfo import IProvideClassInfo, IProvideClassInfo2 + + +logger = logging.getLogger(__name__) +_debug = logger.debug +_warning = logger.warning +_error = logger.error + +################################################################ +# COM object implementation + +# so we don't have to import comtypes.automation +DISPATCH_METHOD = 1 +DISPATCH_PROPERTYGET = 2 +DISPATCH_PROPERTYPUT = 4 +DISPATCH_PROPERTYPUTREF = 8 + + +class E_NotImplemented(Exception): + """COM method is not implemented""" + + +def HRESULT_FROM_WIN32(errcode): + "Convert a Windows error code into a HRESULT value." + if errcode is None: + return 0x80000000 + if errcode & 0x80000000: + return errcode + return (errcode & 0xFFFF) | 0x80070000 + + +def winerror(exc): + """Return the windows error code from a WindowsError or COMError + instance.""" + if isinstance(exc, COMError): + return exc.hresult + elif isinstance(exc, WindowsError): + code = exc.winerror + if isinstance(code, int): + return code + # Sometimes, a WindowsError instance has no error code. An access + # violation raised by ctypes has only text, for example. In this + # cases we return a generic error code. + return E_FAIL + raise TypeError("Expected comtypes.COMERROR or WindowsError instance, got %s" % type(exc).__name__) + + +def _do_implement(interface_name, method_name): + def _not_implemented(*args): + """Return E_NOTIMPL because the method is not implemented.""" + _debug("unimplemented method %s_%s called", interface_name, + method_name) + return E_NOTIMPL + return _not_implemented + + +def catch_errors(obj, mth, paramflags, interface, mthname): + clsid = getattr(obj, "_reg_clsid_", None) + + def call_with_this(*args, **kw): + try: + result = mth(*args, **kw) + except ReturnHRESULT as err: + (hresult, text) = err.args + return ReportError(text, iid=interface._iid_, clsid=clsid, + hresult=hresult) + except (COMError, WindowsError) as details: + _error("Exception in %s.%s implementation:", interface.__name__, + mthname, exc_info=True) + return HRESULT_FROM_WIN32(winerror(details)) + except E_NotImplemented: + _warning("Unimplemented method %s.%s called", interface.__name__, + mthname) + return E_NOTIMPL + except: + _error("Exception in %s.%s implementation:", interface.__name__, + mthname, exc_info=True) + return ReportException(E_FAIL, interface._iid_, clsid=clsid) + if result is None: + return S_OK + return result + if paramflags is None: + has_outargs = False + else: + has_outargs = bool([x[0] for x in paramflags + if x[0] & 2]) + call_with_this.has_outargs = has_outargs + return call_with_this + + +################################################################ + +def hack(inst, mth, paramflags, interface, mthname): + if paramflags is None: + return catch_errors(inst, mth, paramflags, interface, mthname) + code = mth.__code__ + if code.co_varnames[1:2] == ("this",): + return catch_errors(inst, mth, paramflags, interface, mthname) + dirflags = [f[0] for f in paramflags] + # An argument is an input arg either if flags are NOT set in the + # idl file, or if the flags contain 'in'. In other words, the + # direction flag is either exactly '0' or has the '1' bit set: + # Output arguments have flag '2' + + args_out_idx = [] + args_in_idx = [] + for i, a in enumerate(dirflags): + if a&2: + args_out_idx.append(i) + if a&1 or a==0: + args_in_idx.append(i) + args_out = len(args_out_idx) + + ## XXX Remove this: +## if args_in != code.co_argcount - 1: +## return catch_errors(inst, mth, interface, mthname) + + clsid = getattr(inst, "_reg_clsid_", None) + + def call_without_this(this, *args): + # Method implementations could check for and return E_POINTER + # themselves. Or an error will be raised when + # 'outargs[i][0] = value' is executed. +## for a in outargs: +## if not a: +## return E_POINTER + + #make argument list for handler by index array built above + inargs = [] + for a in args_in_idx: + inargs.append(args[a]) + try: + result = mth(*inargs) + if args_out == 1: + args[args_out_idx[0]][0] = result + elif args_out != 0: + if len(result) != args_out: + msg = "Method should have returned a %s-tuple" % args_out + raise ValueError(msg) + for i, value in enumerate(result): + args[args_out_idx[i]][0] = value + except ReturnHRESULT as err: + (hresult, text) = err.args + return ReportError(text, iid=interface._iid_, clsid=clsid, + hresult=hresult) + except COMError as err: + (hr, text, details) = err.args + _error("Exception in %s.%s implementation:", interface.__name__, + mthname, exc_info=True) + try: + descr, source, helpfile, helpcontext, progid = details + except (ValueError, TypeError): + msg = str(details) + else: + msg = "%s: %s" % (source, descr) + hr = HRESULT_FROM_WIN32(hr) + return ReportError(msg, iid=interface._iid_, clsid=clsid, + hresult=hr) + except WindowsError as details: + _error("Exception in %s.%s implementation:", interface.__name__, + mthname, exc_info=True) + hr = HRESULT_FROM_WIN32(winerror(details)) + return ReportException(hr, interface._iid_, clsid=clsid) + except E_NotImplemented: + _warning("Unimplemented method %s.%s called", interface.__name__, + mthname) + return E_NOTIMPL + except: + _error("Exception in %s.%s implementation:", interface.__name__, + mthname, exc_info=True) + return ReportException(E_FAIL, interface._iid_, clsid=clsid) + return S_OK + if args_out: + call_without_this.has_outargs = True + return call_without_this + + +class _MethodFinder(object): + def __init__(self, inst): + self.inst = inst + # map lower case names to names with correct spelling. + self.names = dict([(n.lower(), n) for n in dir(inst)]) + + def get_impl(self, interface, mthname, paramflags, idlflags): + mth = self.find_impl(interface, mthname, paramflags, idlflags) + if mth is None: + return _do_implement(interface.__name__, mthname) + return hack(self.inst, mth, paramflags, interface, mthname) + + def find_method(self, fq_name, mthname): + # Try to find a method, first with the fully qualified name + # ('IUnknown_QueryInterface'), if that fails try the simple + # name ('QueryInterface') + try: + return getattr(self.inst, fq_name) + except AttributeError: + pass + return getattr(self.inst, mthname) + + def find_impl(self, interface, mthname, paramflags, idlflags): + fq_name = "%s_%s" % (interface.__name__, mthname) + if interface._case_insensitive_: + # simple name, like 'QueryInterface' + mthname = self.names.get(mthname.lower(), mthname) + # qualified name, like 'IUnknown_QueryInterface' + fq_name = self.names.get(fq_name.lower(), fq_name) + + try: + return self.find_method(fq_name, mthname) + except AttributeError: + pass + propname = mthname[5:] # strip the '_get_' or '_set' prefix + if interface._case_insensitive_: + propname = self.names.get(propname.lower(), propname) + # propput and propget is done with 'normal' attribute access, + # but only for COM properties that do not take additional + # arguments: + + if "propget" in idlflags and len(paramflags) == 1: + return self.getter(propname) + if "propput" in idlflags and len(paramflags) == 1: + return self.setter(propname) + _debug("%r: %s.%s not implemented", self.inst, interface.__name__, + mthname) + return None + + def setter(self, propname): + # + def set(self, value): + try: + # XXX this may not be correct is the object implements + # _get_PropName but not _set_PropName + setattr(self, propname, value) + except AttributeError: + raise E_NotImplemented() + return instancemethod(set, self.inst, type(self.inst)) + + def getter(self, propname): + def get(self): + try: + return getattr(self, propname) + except AttributeError: + raise E_NotImplemented() + return instancemethod(get, self.inst, type(self.inst)) + + +def _create_vtbl_type(fields, itf): + try: + return _vtbl_types[fields] + except KeyError: + class Vtbl(Structure): + _fields_ = fields + Vtbl.__name__ = "Vtbl_%s" % itf.__name__ + _vtbl_types[fields] = Vtbl + return Vtbl + +# Ugh. Another type cache to avoid leaking types. +_vtbl_types = {} + +################################################################ + +try: + _InterlockedIncrement = windll.kernel32.InterlockedIncrement + _InterlockedDecrement = windll.kernel32.InterlockedDecrement +except AttributeError: + import threading + _lock = threading.Lock() + _acquire = _lock.acquire + _release = _lock.release + # win 64 doesn't have these functions + + def _InterlockedIncrement(ob): + _acquire() + refcnt = ob.value + 1 + ob.value = refcnt + _release() + return refcnt + + def _InterlockedDecrement(ob): + _acquire() + refcnt = ob.value - 1 + ob.value = refcnt + _release() + return refcnt +else: + _InterlockedIncrement.argtypes = [POINTER(c_long)] + _InterlockedDecrement.argtypes = [POINTER(c_long)] + _InterlockedIncrement.restype = c_long + _InterlockedDecrement.restype = c_long + + +class LocalServer(object): + + _queue = None + + def run(self, classobjects): + # Use windll instead of oledll so that we don't get an + # exception on a FAILED hresult: + result = windll.ole32.CoInitialize(None) + if RPC_E_CHANGED_MODE == result: + # we're running in MTA: no message pump needed + _debug("Server running in MTA") + self.run_mta() + else: + # we're running in STA: need a message pump + _debug("Server running in STA") + if result >= 0: + # we need a matching CoUninitialize() call for a successful + # CoInitialize(). + windll.ole32.CoUninitialize() + self.run_sta() + + for obj in classobjects: + obj._revoke_class() + + def run_sta(self): + from comtypes import messageloop + messageloop.run() + + def run_mta(self): + import queue + self._queue = queue.Queue() + self._queue.get() + + def Lock(self): + oledll.ole32.CoAddRefServerProcess() + + def Unlock(self): + rc = oledll.ole32.CoReleaseServerProcess() + if rc == 0: + if self._queue: + self._queue.put(42) + else: + windll.user32.PostQuitMessage(0) + + +class InprocServer(object): + + def __init__(self): + self.locks = c_long(0) + + def Lock(self): + _InterlockedIncrement(self.locks) + + def Unlock(self): + _InterlockedDecrement(self.locks) + + def DllCanUnloadNow(self): + if self.locks.value: + return S_FALSE + if COMObject._instances_: + return S_FALSE + return S_OK + + +class COMObject(object): + _instances_ = {} + + def __new__(cls, *args, **kw): + self = super(COMObject, cls).__new__(cls) + if isinstance(self, c_void_p): + # We build the VTables only for direct instances of + # CoClass, not for POINTERs to CoClass. + return self + if hasattr(self, "_com_interfaces_"): + self.__prepare_comobject() + return self + + def __prepare_comobject(self): + # When a CoClass instance is created, COM pointers to all + # interfaces are created. Also, the CoClass must be kept alive as + # until the COM reference count drops to zero, even if no Python + # code keeps a reference to the object. + # + # The _com_pointers_ instance variable maps string interface iids + # to C compatible COM pointers. + self._com_pointers_ = {} + # COM refcount starts at zero. + self._refcnt = c_long(0) + + # Some interfaces have a default implementation in COMObject: + # - ISupportErrorInfo + # - IPersist (if the subclass has a _reg_clsid_ attribute) + # - IProvideClassInfo (if the subclass has a _reg_clsid_ attribute) + # - IProvideClassInfo2 (if the subclass has a _outgoing_interfaces_ + # attribute) + # + # Add these if they are not listed in _com_interfaces_. + interfaces = tuple(self._com_interfaces_) + if ISupportErrorInfo not in interfaces: + interfaces += (ISupportErrorInfo,) + if hasattr(self, "_reg_typelib_"): + from comtypes.typeinfo import LoadRegTypeLib + self._COMObject__typelib = LoadRegTypeLib(*self._reg_typelib_) + if hasattr(self, "_reg_clsid_"): + if IProvideClassInfo not in interfaces: + interfaces += (IProvideClassInfo,) + if hasattr(self, "_outgoing_interfaces_") and \ + IProvideClassInfo2 not in interfaces: + interfaces += (IProvideClassInfo2,) + if hasattr(self, "_reg_clsid_"): + if IPersist not in interfaces: + interfaces += (IPersist,) + for itf in interfaces[::-1]: + self.__make_interface_pointer(itf) + + def __make_interface_pointer(self, itf): + methods = [] # method implementations + fields = [] # (name, prototype) for virtual function table + iids = [] # interface identifiers. + # iterate over interface inheritance in reverse order to build the + # virtual function table, and leave out the 'object' base class. + finder = self._get_method_finder_(itf) + for interface in itf.__mro__[-2::-1]: + iids.append(interface._iid_) + for m in interface._methods_: + restype, mthname, argtypes, paramflags, idlflags, helptext = m + proto = WINFUNCTYPE(restype, c_void_p, *argtypes) + fields.append((mthname, proto)) + mth = finder.get_impl(interface, mthname, paramflags, idlflags) + methods.append(proto(mth)) + Vtbl = _create_vtbl_type(tuple(fields), itf) + vtbl = Vtbl(*methods) + for iid in iids: + self._com_pointers_[iid] = pointer(pointer(vtbl)) + if hasattr(itf, "_disp_methods_"): + self._dispimpl_ = {} + for m in itf._disp_methods_: + what, mthname, idlflags, restype, argspec = m + ################# + # What we have: + # + # restypes is a ctypes type or None + # argspec is seq. of (['in'], paramtype, paramname) tuples (or + # lists?) + ################# + # What we need: + # + # idlflags must contain 'propget', 'propset' and so on: + # Must be constructed by converting disptype + # + # paramflags must be a sequence + # of (F_IN|F_OUT|F_RETVAL, paramname[, default-value]) tuples + # + # comtypes has this function which helps: + # def _encode_idl(names): + # # convert to F_xxx and sum up "in", "out", + # # "retval" values found in _PARAMFLAGS, ignoring + # # other stuff. + # return sum([_PARAMFLAGS.get(n, 0) for n in names]) + ################# + + if what == "DISPMETHOD": + if 'propget' in idlflags: + invkind = 2 # DISPATCH_PROPERTYGET + mthname = "_get_" + mthname + elif 'propput' in idlflags: + invkind = 4 # DISPATCH_PROPERTYPUT + mthname = "_set_" + mthname + elif 'propputref' in idlflags: + invkind = 8 # DISPATCH_PROPERTYPUTREF + mthname = "_setref_" + mthname + else: + invkind = 1 # DISPATCH_METHOD + if restype: + argspec = argspec + ((['out'], restype, ""),) + self.__make_dispentry(finder, interface, mthname, + idlflags, argspec, invkind) + elif what == "DISPPROPERTY": + # DISPPROPERTY have implicit "out" + if restype: + argspec += ((['out'], restype, ""),) + self.__make_dispentry(finder, interface, + "_get_" + mthname, + idlflags, argspec, + 2 # DISPATCH_PROPERTYGET + ) + if not 'readonly' in idlflags: + self.__make_dispentry(finder, interface, + "_set_" + mthname, + idlflags, argspec, + 4) # DISPATCH_PROPERTYPUT + # Add DISPATCH_PROPERTYPUTREF also? + + def __make_dispentry(self, + finder, interface, mthname, + idlflags, argspec, invkind): + # We build a _dispmap_ entry now that maps invkind and + # dispid to implementations that the finder finds; + # IDispatch_Invoke will later call it. + paramflags = [((_encode_idl(x[0]), x[1]) + tuple(x[3:])) + for x in argspec] + # XXX can the dispid be at a different index? Check codegenerator. + dispid = idlflags[0] + impl = finder.get_impl(interface, mthname, paramflags, idlflags) + self._dispimpl_[(dispid, invkind)] = impl + # invkind is really a set of flags; we allow both + # DISPATCH_METHOD and DISPATCH_PROPERTYGET (win32com uses + # this, maybe other languages too?) + if invkind in (1, 2): + self._dispimpl_[(dispid, 3)] = impl + + def _get_method_finder_(self, itf): + # This method can be overridden to customize how methods are + # found. + return _MethodFinder(self) + + ################################################################ + # LocalServer / InprocServer stuff + __server__ = None + + @staticmethod + def __run_inprocserver__(): + if COMObject.__server__ is None: + COMObject.__server__ = InprocServer() + elif isinstance(COMObject.__server__, InprocServer): + pass + else: + raise RuntimeError("Wrong server type") + + @staticmethod + def __run_localserver__(classobjects): + assert COMObject.__server__ is None + # XXX Decide whether we are in STA or MTA + server = COMObject.__server__ = LocalServer() + server.run(classobjects) + COMObject.__server__ = None + + @staticmethod + def __keep__(obj): + COMObject._instances_[obj] = None + _debug("%d active COM objects: Added %r", len(COMObject._instances_), + obj) + if COMObject.__server__: + COMObject.__server__.Lock() + + @staticmethod + def __unkeep__(obj): + try: + del COMObject._instances_[obj] + except AttributeError: + _debug("? active COM objects: Removed %r", obj) + else: + _debug("%d active COM objects: Removed %r", + len(COMObject._instances_), obj) + _debug("Remaining: %s", list(COMObject._instances_.keys())) + if COMObject.__server__: + COMObject.__server__.Unlock() + # + ################################################################ + + ######################################################### + # IUnknown methods implementations + def IUnknown_AddRef(self, this, + __InterlockedIncrement=_InterlockedIncrement, + _debug=_debug): + result = __InterlockedIncrement(self._refcnt) + if result == 1: + self.__keep__(self) + _debug("%r.AddRef() -> %s", self, result) + return result + + def _final_release_(self): + """This method may be overridden in subclasses + to free allocated resources or so.""" + pass + + def IUnknown_Release(self, this, + __InterlockedDecrement=_InterlockedDecrement, + _debug=_debug): + # If this is called at COM shutdown, _InterlockedDecrement() + # must still be available, although module level variables may + # have been deleted already - so we supply it as default + # argument. + result = __InterlockedDecrement(self._refcnt) + _debug("%r.Release() -> %s", self, result) + if result == 0: + self._final_release_() + self.__unkeep__(self) + # Hm, why isn't this cleaned up by the cycle gc? + self._com_pointers_ = {} + return result + + def IUnknown_QueryInterface(self, this, riid, ppvObj, _debug=_debug): + # XXX This is probably too slow. + # riid[0].hashcode() alone takes 33 us! + iid = riid[0] + ptr = self._com_pointers_.get(iid, None) + if ptr is not None: + # CopyComPointer(src, dst) calls AddRef! + _debug("%r.QueryInterface(%s) -> S_OK", self, iid) + return CopyComPointer(ptr, ppvObj) + _debug("%r.QueryInterface(%s) -> E_NOINTERFACE", self, iid) + return E_NOINTERFACE + + def QueryInterface(self, interface): + "Query the object for an interface pointer" + # This method is NOT the implementation of + # IUnknown::QueryInterface, instead it is supposed to be + # called on an COMObject by user code. It allows to get COM + # interface pointers from COMObject instances. + ptr = self._com_pointers_.get(interface._iid_, None) + if ptr is None: + raise COMError(E_NOINTERFACE, FormatError(E_NOINTERFACE), + (None, None, 0, None, None)) + # CopyComPointer(src, dst) calls AddRef! + result = POINTER(interface)() + CopyComPointer(ptr, byref(result)) + return result + + ################################################################ + # ISupportErrorInfo::InterfaceSupportsErrorInfo implementation + def ISupportErrorInfo_InterfaceSupportsErrorInfo(self, this, riid): + if riid[0] in self._com_pointers_: + return S_OK + return S_FALSE + + ################################################################ + # IProvideClassInfo::GetClassInfo implementation + def IProvideClassInfo_GetClassInfo(self): + try: + self.__typelib + except AttributeError: + raise WindowsError(E_NOTIMPL) + return self.__typelib.GetTypeInfoOfGuid(self._reg_clsid_) + + ################################################################ + # IProvideClassInfo2::GetGUID implementation + + def IProvideClassInfo2_GetGUID(self, dwGuidKind): + # GUIDKIND_DEFAULT_SOURCE_DISP_IID = 1 + if dwGuidKind != 1: + raise WindowsError(E_INVALIDARG) + return self._outgoing_interfaces_[0]._iid_ + + ################################################################ + # IDispatch methods + @property + def __typeinfo(self): + # XXX Looks like this better be a static property, set by the + # code that sets __typelib also... + iid = self._com_interfaces_[0]._iid_ + return self.__typelib.GetTypeInfoOfGuid(iid) + + def IDispatch_GetTypeInfoCount(self): + try: + self.__typelib + except AttributeError: + return 0 + else: + return 1 + + def IDispatch_GetTypeInfo(self, this, itinfo, lcid, ptinfo): + if itinfo != 0: + return DISP_E_BADINDEX + try: + ptinfo[0] = self.__typeinfo + return S_OK + except AttributeError: + return E_NOTIMPL + + def IDispatch_GetIDsOfNames(self, this, riid, rgszNames, cNames, lcid, + rgDispId): + # This call uses windll instead of oledll so that a failed + # call to DispGetIDsOfNames will return a HRESULT instead of + # raising an error. + try: + tinfo = self.__typeinfo + except AttributeError: + return E_NOTIMPL + return windll.oleaut32.DispGetIDsOfNames(tinfo, + rgszNames, cNames, rgDispId) + + def IDispatch_Invoke(self, this, dispIdMember, riid, lcid, wFlags, + pDispParams, pVarResult, pExcepInfo, puArgErr): + try: + self._dispimpl_ + except AttributeError: + try: + tinfo = self.__typeinfo + except AttributeError: + # Hm, we pretend to implement IDispatch, but have no + # typeinfo, and so cannot fulfill the contract. Should we + # better return E_NOTIMPL or DISP_E_MEMBERNOTFOUND? Some + # clients call IDispatch_Invoke with 'known' DISPID_...' + # values, without going through GetIDsOfNames first. + return DISP_E_MEMBERNOTFOUND + # This call uses windll instead of oledll so that a failed + # call to DispInvoke will return a HRESULT instead of raising + # an error. + interface = self._com_interfaces_[0] + ptr = self._com_pointers_[interface._iid_] + return windll.oleaut32.DispInvoke( + ptr, tinfo, dispIdMember, wFlags, pDispParams, pVarResult, + pExcepInfo, puArgErr + ) + + try: + # XXX Hm, wFlags should be considered a SET of flags... + mth = self._dispimpl_[(dispIdMember, wFlags)] + except KeyError: + return DISP_E_MEMBERNOTFOUND + + # Unpack the parameters: It would be great if we could use the + # DispGetParam function - but we cannot since it requires that + # we pass a VARTYPE for each argument and we do not know that. + # + # Seems that n arguments have dispids (0, 1, ..., n-1). + # Unnamed arguments are packed into the DISPPARAMS array in + # reverse order (starting with the highest dispid), named + # arguments are packed in the order specified by the + # rgdispidNamedArgs array. + # + params = pDispParams[0] + + if wFlags & (4 | 8): + # DISPATCH_PROPERTYPUT + # DISPATCH_PROPERTYPUTREF + # + # How are the parameters unpacked for propertyput + # operations with additional parameters? Can propput + # have additional args? + args = [params.rgvarg[i].value + for i in reversed(list(range(params.cNamedArgs)))] + # MSDN: pVarResult is ignored if DISPATCH_PROPERTYPUT or + # DISPATCH_PROPERTYPUTREF is specified. + return mth(this, *args) + + else: + # DISPATCH_METHOD + # DISPATCH_PROPERTYGET + # the positions of named arguments + # + # 2to3 has problems to translate 'range(...)[::-1]' + # correctly, so use 'list(range)[::-1]' instead (will be + # fixed in Python 3.1, probably): + named_indexes = [params.rgdispidNamedArgs[i] + for i in range(params.cNamedArgs)] + # the positions of unnamed arguments + num_unnamed = params.cArgs - params.cNamedArgs + unnamed_indexes = list(reversed(list(range(num_unnamed)))) + # It seems that this code calculates the indexes of the + # parameters in the params.rgvarg array correctly. + indexes = named_indexes + unnamed_indexes + args = [params.rgvarg[i].value for i in indexes] + + if pVarResult and getattr(mth, "has_outargs", False): + args.append(pVarResult) + return mth(this, *args) + + ################################################################ + # IPersist interface + def IPersist_GetClassID(self): + return self._reg_clsid_ + +__all__ = ["COMObject"] diff --git a/venv/Lib/site-packages/comtypes/_meta.py b/venv/Lib/site-packages/comtypes/_meta.py new file mode 100644 index 00000000..2d26b15d --- /dev/null +++ b/venv/Lib/site-packages/comtypes/_meta.py @@ -0,0 +1,61 @@ +# comtypes._meta helper module +from ctypes import POINTER, c_void_p, cast +import comtypes + +################################################################ +# metaclass for CoClass (in comtypes/__init__.py) + +def _wrap_coclass(self): + # We are an IUnknown pointer, represented as a c_void_p instance, + # but we really want this interface: + itf = self._com_interfaces_[0] + punk = cast(self, POINTER(itf)) + result = punk.QueryInterface(itf) + result.__dict__["__clsid"] = str(self._reg_clsid_) + return result + +def _coclass_from_param(cls, obj): + if isinstance(obj, (cls._com_interfaces_[0], cls)): + return obj + raise TypeError(obj) + +# +# The mro() of a POINTER(App) type, where class App is a subclass of CoClass: +# +# POINTER(App) +# App +# CoClass +# c_void_p +# _SimpleCData +# _CData +# object + +class _coclass_meta(type): + # metaclass for CoClass + # + # When a CoClass subclass is created, create a POINTER(...) type + # for that class, with bases <coclass> and c_void_p. Also, the + # POINTER(...) type gets a __ctypes_from_outparam__ method which + # will QueryInterface for the default interface: the first one on + # the coclass' _com_interfaces_ list. + def __new__(cls, name, bases, namespace): + klass = type.__new__(cls, name, bases, namespace) + if bases == (object,): + return klass + # XXX We should insist that a _reg_clsid_ is present. + if "_reg_clsid_" in namespace: + clsid = namespace["_reg_clsid_"] + comtypes.com_coclass_registry[str(clsid)] = klass + PTR = _coclass_pointer_meta("POINTER(%s)" % klass.__name__, + (klass, c_void_p), + {"__ctypes_from_outparam__": _wrap_coclass, + "from_param": classmethod(_coclass_from_param), + }) + from ctypes import _pointer_type_cache + _pointer_type_cache[klass] = PTR + + return klass + +# will not work if we change the order of the two base classes! +class _coclass_pointer_meta(type(c_void_p), _coclass_meta): + pass diff --git a/venv/Lib/site-packages/comtypes/_safearray.py b/venv/Lib/site-packages/comtypes/_safearray.py new file mode 100644 index 00000000..76e41b9d --- /dev/null +++ b/venv/Lib/site-packages/comtypes/_safearray.py @@ -0,0 +1,128 @@ +"""SAFEARRAY api functions, data types, and constants.""" + +from ctypes import * +from ctypes.wintypes import * +from comtypes import HRESULT, GUID + +################################################################ +##if __debug__: +## from ctypeslib.dynamic_module import include +## include("""\ +## #define UNICODE +## #define NO_STRICT +## #include <windows.h> +## """, +## persist=True) + +################################################################ + +VARTYPE = c_ushort +PVOID = c_void_p +USHORT = c_ushort + +_oleaut32 = WinDLL("oleaut32") + +class tagSAFEARRAYBOUND(Structure): + _fields_ = [ + ('cElements', DWORD), + ('lLbound', LONG), +] +SAFEARRAYBOUND = tagSAFEARRAYBOUND + +class tagSAFEARRAY(Structure): + _fields_ = [ + ('cDims', USHORT), + ('fFeatures', USHORT), + ('cbElements', DWORD), + ('cLocks', DWORD), + ('pvData', PVOID), + ('rgsabound', SAFEARRAYBOUND * 1), + ] +SAFEARRAY = tagSAFEARRAY + +SafeArrayAccessData = _oleaut32.SafeArrayAccessData +SafeArrayAccessData.restype = HRESULT +# Last parameter manually changed from POINTER(c_void_p) to c_void_p: +SafeArrayAccessData.argtypes = [POINTER(SAFEARRAY), c_void_p] + +SafeArrayCreateVectorEx = _oleaut32.SafeArrayCreateVectorEx +SafeArrayCreateVectorEx.restype = POINTER(SAFEARRAY) +SafeArrayCreateVectorEx.argtypes = [VARTYPE, LONG, DWORD, PVOID] + +SafeArrayCreateEx = _oleaut32.SafeArrayCreateEx +SafeArrayCreateEx.restype = POINTER(SAFEARRAY) +SafeArrayCreateEx.argtypes = [VARTYPE, c_uint, POINTER(SAFEARRAYBOUND), PVOID] + +SafeArrayCreate = _oleaut32.SafeArrayCreate +SafeArrayCreate.restype = POINTER(SAFEARRAY) +SafeArrayCreate.argtypes = [VARTYPE, c_uint, POINTER(SAFEARRAYBOUND)] + +SafeArrayUnaccessData = _oleaut32.SafeArrayUnaccessData +SafeArrayUnaccessData.restype = HRESULT +SafeArrayUnaccessData.argtypes = [POINTER(SAFEARRAY)] + +_SafeArrayGetVartype = _oleaut32.SafeArrayGetVartype +_SafeArrayGetVartype.restype = HRESULT +_SafeArrayGetVartype.argtypes = [POINTER(SAFEARRAY), POINTER(VARTYPE)] +def SafeArrayGetVartype(pa): + result = VARTYPE() + _SafeArrayGetVartype(pa, result) + return result.value + +SafeArrayGetElement = _oleaut32.SafeArrayGetElement +SafeArrayGetElement.restype = HRESULT +SafeArrayGetElement.argtypes = [POINTER(SAFEARRAY), POINTER(LONG), c_void_p] + +SafeArrayDestroy = _oleaut32.SafeArrayDestroy +SafeArrayDestroy.restype = HRESULT +SafeArrayDestroy.argtypes = [POINTER(SAFEARRAY)] + +SafeArrayCreateVector = _oleaut32.SafeArrayCreateVector +SafeArrayCreateVector.restype = POINTER(SAFEARRAY) +SafeArrayCreateVector.argtypes = [VARTYPE, LONG, DWORD] + +SafeArrayDestroyData = _oleaut32.SafeArrayDestroyData +SafeArrayDestroyData.restype = HRESULT +SafeArrayDestroyData.argtypes = [POINTER(SAFEARRAY)] + +SafeArrayGetDim = _oleaut32.SafeArrayGetDim +SafeArrayGetDim.restype = UINT +SafeArrayGetDim.argtypes = [POINTER(SAFEARRAY)] + +_SafeArrayGetLBound = _oleaut32.SafeArrayGetLBound +_SafeArrayGetLBound.restype = HRESULT +_SafeArrayGetLBound.argtypes = [POINTER(SAFEARRAY), UINT, POINTER(LONG)] +def SafeArrayGetLBound(pa, dim): + result = LONG() + _SafeArrayGetLBound(pa, dim, result) + return result.value + +_SafeArrayGetUBound = _oleaut32.SafeArrayGetUBound +_SafeArrayGetUBound.restype = HRESULT +_SafeArrayGetUBound.argtypes = [POINTER(SAFEARRAY), UINT, POINTER(LONG)] +def SafeArrayGetUBound(pa, dim): + result = LONG() + _SafeArrayGetUBound(pa, dim, result) + return result.value + + +SafeArrayLock = _oleaut32.SafeArrayLock +SafeArrayLock.restype = HRESULT +SafeArrayLock.argtypes = [POINTER(SAFEARRAY)] +SafeArrayPtrOfIndex = _oleaut32.SafeArrayPtrOfIndex +SafeArrayPtrOfIndex.restype = HRESULT +# Last parameter manually changed from POINTER(c_void_p) to c_void_p: +SafeArrayPtrOfIndex.argtypes = [POINTER(SAFEARRAY), POINTER(LONG), c_void_p] +SafeArrayUnlock = _oleaut32.SafeArrayUnlock +SafeArrayUnlock.restype = HRESULT +SafeArrayUnlock.argtypes = [POINTER(SAFEARRAY)] +_SafeArrayGetIID = _oleaut32.SafeArrayGetIID +_SafeArrayGetIID.restype = HRESULT +_SafeArrayGetIID.argtypes = [POINTER(SAFEARRAY), POINTER(GUID)] +def SafeArrayGetIID(pa): + result = GUID() + _SafeArrayGetIID(pa, result) + return result +SafeArrayDestroyDescriptor = _oleaut32.SafeArrayDestroyDescriptor +SafeArrayDestroyDescriptor.restype = HRESULT +SafeArrayDestroyDescriptor.argtypes = [POINTER(SAFEARRAY)] diff --git a/venv/Lib/site-packages/comtypes/automation.py b/venv/Lib/site-packages/comtypes/automation.py new file mode 100644 index 00000000..026bdd52 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/automation.py @@ -0,0 +1,881 @@ +# comtypes.automation module +import array +import datetime +import decimal + +from ctypes import * +from ctypes import _Pointer +from _ctypes import CopyComPointer +from comtypes import IUnknown, GUID, IID, STDMETHOD, BSTR, COMMETHOD, COMError +from comtypes.hresult import * +from comtypes.patcher import Patch +from comtypes import npsupport +try: + from comtypes import _safearray +except (ImportError, AttributeError): + class _safearray(object): + tagSAFEARRAY = None + +from ctypes.wintypes import DWORD, LONG, UINT, VARIANT_BOOL, WCHAR, WORD + + +LCID = DWORD +DISPID = LONG +SCODE = LONG + +VARTYPE = c_ushort + +DISPATCH_METHOD = 1 +DISPATCH_PROPERTYGET = 2 +DISPATCH_PROPERTYPUT = 4 +DISPATCH_PROPERTYPUTREF = 8 + +tagINVOKEKIND = c_int +INVOKE_FUNC = DISPATCH_METHOD +INVOKE_PROPERTYGET = DISPATCH_PROPERTYGET +INVOKE_PROPERTYPUT = DISPATCH_PROPERTYPUT +INVOKE_PROPERTYPUTREF = DISPATCH_PROPERTYPUTREF +INVOKEKIND = tagINVOKEKIND + + +################################ +# helpers +IID_NULL = GUID() +riid_null = byref(IID_NULL) +_byref_type = type(byref(c_int())) + +# 30. December 1899, midnight. For VT_DATE. +_com_null_date = datetime.datetime(1899, 12, 30, 0, 0, 0) + +################################################################ +# VARIANT, in all it's glory. +VARENUM = c_int # enum +VT_EMPTY = 0 +VT_NULL = 1 +VT_I2 = 2 +VT_I4 = 3 +VT_R4 = 4 +VT_R8 = 5 +VT_CY = 6 +VT_DATE = 7 +VT_BSTR = 8 +VT_DISPATCH = 9 +VT_ERROR = 10 +VT_BOOL = 11 +VT_VARIANT = 12 +VT_UNKNOWN = 13 +VT_DECIMAL = 14 +VT_I1 = 16 +VT_UI1 = 17 +VT_UI2 = 18 +VT_UI4 = 19 +VT_I8 = 20 +VT_UI8 = 21 +VT_INT = 22 +VT_UINT = 23 +VT_VOID = 24 +VT_HRESULT = 25 +VT_PTR = 26 +VT_SAFEARRAY = 27 +VT_CARRAY = 28 +VT_USERDEFINED = 29 +VT_LPSTR = 30 +VT_LPWSTR = 31 +VT_RECORD = 36 +VT_INT_PTR = 37 +VT_UINT_PTR = 38 +VT_FILETIME = 64 +VT_BLOB = 65 +VT_STREAM = 66 +VT_STORAGE = 67 +VT_STREAMED_OBJECT = 68 +VT_STORED_OBJECT = 69 +VT_BLOB_OBJECT = 70 +VT_CF = 71 +VT_CLSID = 72 +VT_VERSIONED_STREAM = 73 +VT_BSTR_BLOB = 4095 +VT_VECTOR = 4096 +VT_ARRAY = 8192 +VT_BYREF = 16384 +VT_RESERVED = 32768 +VT_ILLEGAL = 65535 +VT_ILLEGALMASKED = 4095 +VT_TYPEMASK = 4095 + + +class tagCY(Structure): + _fields_ = [("int64", c_longlong)] +CY = tagCY +CURRENCY = CY + + +class tagDEC(Structure): + _fields_ = [("wReserved", c_ushort), + ("scale", c_ubyte), + ("sign", c_ubyte), + ("Hi32", c_ulong), + ("Lo64", c_ulonglong)] + + def as_decimal(self): + """ Convert a tagDEC struct to Decimal. + + See http://msdn.microsoft.com/en-us/library/cc234586.aspx for the tagDEC + specification. + + """ + digits = (self.Hi32 << 64) + self.Lo64 + decimal_str = "{0}{1}e-{2}".format( + '-' if self.sign else '', + digits, + self.scale, + ) + return decimal.Decimal(decimal_str) + + +DECIMAL = tagDEC + + +# The VARIANT structure is a good candidate for implementation in a C +# helper extension. At least the get/set methods. +class tagVARIANT(Structure): + class U_VARIANT1(Union): + class __tagVARIANT(Structure): + # The C Header file defn of VARIANT is much more complicated, but + # this is the ctypes version - functional as well. + class U_VARIANT2(Union): + class _tagBRECORD(Structure): + _fields_ = [("pvRecord", c_void_p), + ("pRecInfo", POINTER(IUnknown))] + _fields_ = [ + ("VT_BOOL", VARIANT_BOOL), + ("VT_I1", c_byte), + ("VT_I2", c_short), + ("VT_I4", c_long), + ("VT_I8", c_longlong), + ("VT_INT", c_int), + ("VT_UI1", c_ubyte), + ("VT_UI2", c_ushort), + ("VT_UI4", c_ulong), + ("VT_UI8", c_ulonglong), + ("VT_UINT", c_uint), + ("VT_R4", c_float), + ("VT_R8", c_double), + ("VT_CY", c_longlong), + ("c_wchar_p", c_wchar_p), + ("c_void_p", c_void_p), + ("pparray", POINTER(POINTER(_safearray.tagSAFEARRAY))), + + ("bstrVal", BSTR), + ("_tagBRECORD", _tagBRECORD), + ] + _anonymous_ = ["_tagBRECORD"] + _fields_ = [("vt", VARTYPE), + ("wReserved1", c_ushort), + ("wReserved2", c_ushort), + ("wReserved3", c_ushort), + ("_", U_VARIANT2) + ] + _fields_ = [("__VARIANT_NAME_2", __tagVARIANT), + ("decVal", DECIMAL)] + _anonymous_ = ["__VARIANT_NAME_2"] + _fields_ = [("__VARIANT_NAME_1", U_VARIANT1)] + _anonymous_ = ["__VARIANT_NAME_1"] + + def __init__(self, *args): + if args: + self.value = args[0] + + def __del__(self): + if self._b_needsfree_: + # XXX This does not work. _b_needsfree_ is never + # set because the buffer is internal to the object. + _VariantClear(self) + + def __repr__(self): + if self.vt & VT_BYREF: + return "VARIANT(vt=0x%x, byref(%r))" % (self.vt, self[0]) + return "VARIANT(vt=0x%x, %r)" % (self.vt, self.value) + + def from_param(cls, value): + if isinstance(value, cls): + return value + return cls(value) + from_param = classmethod(from_param) + + def __setitem__(self, index, value): + # This method allows to change the value of a + # (VT_BYREF|VT_xxx) variant in place. + if index != 0: + raise IndexError(index) + if not self.vt & VT_BYREF: + raise TypeError("set_byref requires a VT_BYREF VARIANT instance") + typ = _vartype_to_ctype[self.vt & ~VT_BYREF] + cast(self._.c_void_p, POINTER(typ))[0] = value + + # see also c:/sf/pywin32/com/win32com/src/oleargs.cpp 54 + def _set_value(self, value): + _VariantClear(self) + if value is None: + self.vt = VT_NULL + elif (hasattr(value, '__len__') and len(value) == 0 + and not isinstance(value, str)): + self.vt = VT_NULL + # since bool is a subclass of int, this check must come before + # the check for int + elif isinstance(value, bool): + self.vt = VT_BOOL + self._.VT_BOOL = value + elif isinstance(value, (int, c_int)): + self.vt = VT_I4 + self._.VT_I4 = value + elif isinstance(value, int): + u = self._ + # try VT_I4 first. + u.VT_I4 = value + if u.VT_I4 == value: + # it did work. + self.vt = VT_I4 + return + # try VT_UI4 next. + if value >= 0: + u.VT_UI4 = value + if u.VT_UI4 == value: + # did work. + self.vt = VT_UI4 + return + # try VT_I8 next. + if value >= 0: + u.VT_I8 = value + if u.VT_I8 == value: + # did work. + self.vt = VT_I8 + return + # try VT_UI8 next. + if value >= 0: + u.VT_UI8 = value + if u.VT_UI8 == value: + # did work. + self.vt = VT_UI8 + return + # VT_R8 is last resort. + self.vt = VT_R8 + u.VT_R8 = float(value) + elif isinstance(value, (float, c_double)): + self.vt = VT_R8 + self._.VT_R8 = value + elif isinstance(value, str): + self.vt = VT_BSTR + # do the c_wchar_p auto unicode conversion + self._.c_void_p = _SysAllocStringLen(value, len(value)) + elif isinstance(value, datetime.datetime): + delta = value - _com_null_date + # a day has 24 * 60 * 60 = 86400 seconds + com_days = delta.days + (delta.seconds + delta.microseconds * 1e-6) / 86400. + self.vt = VT_DATE + self._.VT_R8 = com_days + elif npsupport.isdatetime64(value): + com_days = value - npsupport.com_null_date64 + com_days /= npsupport.numpy.timedelta64(1, 'D') + self.vt = VT_DATE + self._.VT_R8 = com_days + elif decimal is not None and isinstance(value, decimal.Decimal): + self._.VT_CY = int(round(value * 10000)) + self.vt = VT_CY + elif isinstance(value, POINTER(IDispatch)): + CopyComPointer(value, byref(self._)) + self.vt = VT_DISPATCH + elif isinstance(value, POINTER(IUnknown)): + CopyComPointer(value, byref(self._)) + self.vt = VT_UNKNOWN + elif isinstance(value, (list, tuple)): + obj = _midlSAFEARRAY(VARIANT).create(value) + memmove(byref(self._), byref(obj), sizeof(obj)) + self.vt = VT_ARRAY | obj._vartype_ + elif isinstance(value, array.array): + vartype = _arraycode_to_vartype[value.typecode] + typ = _vartype_to_ctype[vartype] + obj = _midlSAFEARRAY(typ).create(value) + memmove(byref(self._), byref(obj), sizeof(obj)) + self.vt = VT_ARRAY | obj._vartype_ + elif npsupport.isndarray(value): + # Try to convert a simple array of basic types. + descr = value.dtype.descr[0][1] + typ = npsupport.typecodes.get(descr) + if typ is None: + # Try for variant + obj = _midlSAFEARRAY(VARIANT).create(value) + else: + obj = _midlSAFEARRAY(typ).create(value) + memmove(byref(self._), byref(obj), sizeof(obj)) + self.vt = VT_ARRAY | obj._vartype_ + elif isinstance(value, Structure) and hasattr(value, "_recordinfo_"): + guids = value._recordinfo_ + from comtypes.typeinfo import GetRecordInfoFromGuids + ri = GetRecordInfoFromGuids(*guids) + self.vt = VT_RECORD + # Assigning a COM pointer to a structure field does NOT + # call AddRef(), have to call it manually: + ri.AddRef() + self._.pRecInfo = ri + self._.pvRecord = ri.RecordCreateCopy(byref(value)) + elif isinstance(getattr(value, "_comobj", None), POINTER(IDispatch)): + CopyComPointer(value._comobj, byref(self._)) + self.vt = VT_DISPATCH + elif isinstance(value, VARIANT): + _VariantCopy(self, value) + elif isinstance(value, c_ubyte): + self._.VT_UI1 = value + self.vt = VT_UI1 + elif isinstance(value, c_char): + self._.VT_UI1 = ord(value.value) + self.vt = VT_UI1 + elif isinstance(value, c_byte): + self._.VT_I1 = value + self.vt = VT_I1 + elif isinstance(value, c_ushort): + self._.VT_UI2 = value + self.vt = VT_UI2 + elif isinstance(value, c_short): + self._.VT_I2 = value + self.vt = VT_I2 + elif isinstance(value, c_uint): + self.vt = VT_UI4 + self._.VT_UI4 = value + elif isinstance(value, c_float): + self.vt = VT_R4 + self._.VT_R4 = value + elif isinstance(value, c_int64): + self.vt = VT_I8 + self._.VT_I8 = value + elif isinstance(value, c_uint64): + self.vt = VT_UI8 + self._.VT_UI8 = value + elif isinstance(value, _byref_type): + ref = value._obj + self._.c_void_p = addressof(ref) + self.__keepref = value + self.vt = _ctype_to_vartype[type(ref)] | VT_BYREF + elif isinstance(value, _Pointer): + ref = value.contents + self._.c_void_p = addressof(ref) + self.__keepref = value + self.vt = _ctype_to_vartype[type(ref)] | VT_BYREF + else: + raise TypeError("Cannot put %r in VARIANT" % value) + # buffer -> SAFEARRAY of VT_UI1 ? + + # c:/sf/pywin32/com/win32com/src/oleargs.cpp 197 + def _get_value(self, dynamic=False): + vt = self.vt + if vt in (VT_EMPTY, VT_NULL): + return None + elif vt == VT_I1: + return self._.VT_I1 + elif vt == VT_I2: + return self._.VT_I2 + elif vt == VT_I4: + return self._.VT_I4 + elif vt == VT_I8: + return self._.VT_I8 + elif vt == VT_UI8: + return self._.VT_UI8 + elif vt == VT_INT: + return self._.VT_INT + elif vt == VT_UI1: + return self._.VT_UI1 + elif vt == VT_UI2: + return self._.VT_UI2 + elif vt == VT_UI4: + return self._.VT_UI4 + elif vt == VT_UINT: + return self._.VT_UINT + elif vt == VT_R4: + return self._.VT_R4 + elif vt == VT_R8: + return self._.VT_R8 + elif vt == VT_BOOL: + return self._.VT_BOOL + elif vt == VT_BSTR: + return self._.bstrVal + elif vt == VT_DATE: + days = self._.VT_R8 + return datetime.timedelta(days=days) + _com_null_date + elif vt == VT_CY: + return self._.VT_CY / decimal.Decimal("10000") + elif vt == VT_UNKNOWN: + val = self._.c_void_p + if not val: + # We should/could return a NULL COM pointer. + # But the code generation must be able to construct one + # from the __repr__ of it. + return None # XXX? + ptr = cast(val, POINTER(IUnknown)) + # cast doesn't call AddRef (it should, imo!) + ptr.AddRef() + return ptr.__ctypes_from_outparam__() + elif vt == VT_DECIMAL: + return self.decVal.as_decimal() + elif vt == VT_DISPATCH: + val = self._.c_void_p + if not val: + # See above. + return None # XXX? + ptr = cast(val, POINTER(IDispatch)) + # cast doesn't call AddRef (it should, imo!) + ptr.AddRef() + if not dynamic: + return ptr.__ctypes_from_outparam__() + else: + from comtypes.client.dynamic import Dispatch + return Dispatch(ptr) + # see also c:/sf/pywin32/com/win32com/src/oleargs.cpp + elif self.vt & VT_BYREF: + return self + elif vt == VT_RECORD: + from comtypes.client import GetModule + from comtypes.typeinfo import IRecordInfo + + # Retrieving a COM pointer from a structure field does NOT + # call AddRef(), have to call it manually: + punk = self._.pRecInfo + punk.AddRef() + ri = punk.QueryInterface(IRecordInfo) + + # find typelib + tlib = ri.GetTypeInfo().GetContainingTypeLib()[0] + + # load typelib wrapper module + mod = GetModule(tlib) + # retrive the type and create an instance + value = getattr(mod, ri.GetName())() + # copy data into the instance + ri.RecordCopy(self._.pvRecord, byref(value)) + + return value + elif self.vt & VT_ARRAY: + typ = _vartype_to_ctype[self.vt & ~VT_ARRAY] + return cast(self._.pparray, _midlSAFEARRAY(typ)).unpack() + else: + raise NotImplementedError("typecode %d = 0x%x)" % (vt, vt)) + + def __getitem__(self, index): + if index != 0: + raise IndexError(index) + if self.vt == VT_BYREF|VT_VARIANT: + v = VARIANT() + # apparently VariantCopyInd doesn't work always with + # VT_BYREF|VT_VARIANT, so do it manually. + v = cast(self._.c_void_p, POINTER(VARIANT))[0] + return v.value + else: + v = VARIANT() + _VariantCopyInd(v, self) + return v.value + + +# these are missing: +## getter[VT_ERROR] +## getter[VT_ARRAY] +## getter[VT_BYREF|VT_UI1] +## getter[VT_BYREF|VT_I2] +## getter[VT_BYREF|VT_I4] +## getter[VT_BYREF|VT_R4] +## getter[VT_BYREF|VT_R8] +## getter[VT_BYREF|VT_BOOL] +## getter[VT_BYREF|VT_ERROR] +## getter[VT_BYREF|VT_CY] +## getter[VT_BYREF|VT_DATE] +## getter[VT_BYREF|VT_BSTR] +## getter[VT_BYREF|VT_UNKNOWN] +## getter[VT_BYREF|VT_DISPATCH] +## getter[VT_BYREF|VT_ARRAY] +## getter[VT_BYREF|VT_VARIANT] +## getter[VT_BYREF] +## getter[VT_BYREF|VT_DECIMAL] +## getter[VT_BYREF|VT_I1] +## getter[VT_BYREF|VT_UI2] +## getter[VT_BYREF|VT_UI4] +## getter[VT_BYREF|VT_INT] +## getter[VT_BYREF|VT_UINT] + + value = property(_get_value, _set_value) + + def __ctypes_from_outparam__(self): + # XXX Manual resource management, because of the VARIANT bug: + result = self.value + self.value = None + return result + + def ChangeType(self, typecode): + _VariantChangeType(self, + self, + 0, + typecode) + +VARIANT = tagVARIANT +VARIANTARG = VARIANT + +_oleaut32 = OleDLL("oleaut32") + +_VariantChangeType = _oleaut32.VariantChangeType +_VariantChangeType.argtypes = (POINTER(VARIANT), POINTER(VARIANT), c_ushort, VARTYPE) + +_VariantClear = _oleaut32.VariantClear +_VariantClear.argtypes = (POINTER(VARIANT),) + +_SysAllocStringLen = windll.oleaut32.SysAllocStringLen +_SysAllocStringLen.argtypes = c_wchar_p, c_uint +_SysAllocStringLen.restype = c_void_p + +_VariantCopy = _oleaut32.VariantCopy +_VariantCopy.argtypes = POINTER(VARIANT), POINTER(VARIANT) + +_VariantCopyInd = _oleaut32.VariantCopyInd +_VariantCopyInd.argtypes = POINTER(VARIANT), POINTER(VARIANT) + +# some commonly used VARIANT instances +VARIANT.null = VARIANT(None) +VARIANT.empty = VARIANT() +VARIANT.missing = v = VARIANT() +v.vt = VT_ERROR +v._.VT_I4 = 0x80020004 +del v + +_carg_obj = type(byref(c_int())) +from _ctypes import Array as _CArrayType + +@Patch(POINTER(VARIANT)) +class _(object): + # Override the default .from_param classmethod of POINTER(VARIANT). + # This allows to pass values which can be stored in VARIANTs as + # function parameters declared as POINTER(VARIANT). See + # InternetExplorer's Navigate2() method, or Word's Close() method, for + # examples. + def from_param(cls, arg): + # accept POINTER(VARIANT) instance + if isinstance(arg, POINTER(VARIANT)): + return arg + # accept byref(VARIANT) instance + if isinstance(arg, _carg_obj) and isinstance(arg._obj, VARIANT): + return arg + # accept VARIANT instance + if isinstance(arg, VARIANT): + return byref(arg) + if isinstance(arg, _CArrayType) and arg._type_ is VARIANT: + # accept array of VARIANTs + return arg + # anything else which can be converted to a VARIANT. + return byref(VARIANT(arg)) + from_param = classmethod(from_param) + + def __setitem__(self, index, value): + # This is to support the same sematics as a pointer instance: + # variant[0] = value + self[index].value = value + +################################################################ +# interfaces, structures, ... +class IEnumVARIANT(IUnknown): + _iid_ = GUID('{00020404-0000-0000-C000-000000000046}') + _idlflags_ = ['hidden'] + _dynamic = False + def __iter__(self): + return self + + def __next__(self): + item, fetched = self.Next(1) + if fetched: + return item + raise StopIteration + + def __getitem__(self, index): + self.Reset() + # Does not yet work. +## if isinstance(index, slice): +## self.Skip(index.start or 0) +## return self.Next(index.stop or sys.maxint) + self.Skip(index) + item, fetched = self.Next(1) + if fetched: + return item + raise IndexError + + def Next(self, celt): + fetched = c_ulong() + if celt == 1: + v = VARIANT() + self.__com_Next(celt, v, fetched) + return v._get_value(dynamic=self._dynamic), fetched.value + array = (VARIANT * celt)() + self.__com_Next(celt, array, fetched) + result = [v._get_value(dynamic=self._dynamic) for v in array[:fetched.value]] + for v in array: + v.value = None + return result + +IEnumVARIANT._methods_ = [ + COMMETHOD([], HRESULT, 'Next', + ( ['in'], c_ulong, 'celt' ), + ( ['out'], POINTER(VARIANT), 'rgvar' ), + ( ['out'], POINTER(c_ulong), 'pceltFetched' )), + COMMETHOD([], HRESULT, 'Skip', + ( ['in'], c_ulong, 'celt' )), + COMMETHOD([], HRESULT, 'Reset'), + COMMETHOD([], HRESULT, 'Clone', + ( ['out'], POINTER(POINTER(IEnumVARIANT)), 'ppenum' )), +] + + +##from _ctypes import VARIANT_set +##import new +##VARIANT.value = property(VARIANT._get_value, new.instancemethod(VARIANT_set, None, VARIANT)) + + +class tagEXCEPINFO(Structure): + def __repr__(self): + return "<EXCEPINFO %s>" % \ + ((self.wCode, self.bstrSource, self.bstrDescription, self.bstrHelpFile, self.dwHelpContext, + self.pfnDeferredFillIn, self.scode),) +tagEXCEPINFO._fields_ = [ + ('wCode', WORD), + ('wReserved', WORD), + ('bstrSource', BSTR), + ('bstrDescription', BSTR), + ('bstrHelpFile', BSTR), + ('dwHelpContext', DWORD), + ('pvReserved', c_void_p), +## ('pfnDeferredFillIn', WINFUNCTYPE(HRESULT, POINTER(tagEXCEPINFO))), + ('pfnDeferredFillIn', c_void_p), + ('scode', SCODE), +] +EXCEPINFO = tagEXCEPINFO + +class tagDISPPARAMS(Structure): + _fields_ = [ + # C:/Programme/gccxml/bin/Vc71/PlatformSDK/oaidl.h 696 + ('rgvarg', POINTER(VARIANTARG)), + ('rgdispidNamedArgs', POINTER(DISPID)), + ('cArgs', UINT), + ('cNamedArgs', UINT), + ] + def __del__(self): + if self._b_needsfree_: + for i in range(self.cArgs): + self.rgvarg[i].value = None +DISPPARAMS = tagDISPPARAMS + +DISPID_VALUE = 0 +DISPID_UNKNOWN = -1 +DISPID_PROPERTYPUT = -3 +DISPID_NEWENUM = -4 +DISPID_EVALUATE = -5 +DISPID_CONSTRUCTOR = -6 +DISPID_DESTRUCTOR = -7 +DISPID_COLLECT = -8 + +class IDispatch(IUnknown): + _iid_ = GUID("{00020400-0000-0000-C000-000000000046}") + _methods_ = [ + COMMETHOD([], HRESULT, 'GetTypeInfoCount', + (['out'], POINTER(UINT) ) ), + COMMETHOD([], HRESULT, 'GetTypeInfo', + (['in'], UINT, 'index'), + (['in'], LCID, 'lcid', 0), +## Normally, we would declare this parameter in this way: +## (['out'], POINTER(POINTER(ITypeInfo)) ) ), +## but we cannot import comtypes.typeinfo at the top level (recursive imports!). + (['out'], POINTER(POINTER(IUnknown)) ) ), + STDMETHOD(HRESULT, 'GetIDsOfNames', [POINTER(IID), POINTER(c_wchar_p), + UINT, LCID, POINTER(DISPID)]), + STDMETHOD(HRESULT, 'Invoke', [DISPID, POINTER(IID), LCID, WORD, + POINTER(DISPPARAMS), POINTER(VARIANT), + POINTER(EXCEPINFO), POINTER(UINT)]), + ] + + def GetTypeInfo(self, index, lcid=0): + """Return type information. Index 0 specifies typeinfo for IDispatch""" + import comtypes.typeinfo + result = self._GetTypeInfo(index, lcid) + return result.QueryInterface(comtypes.typeinfo.ITypeInfo) + + def GetIDsOfNames(self, *names, **kw): + """Map string names to integer ids.""" + lcid = kw.pop("lcid", 0) + assert not kw + arr = (c_wchar_p * len(names))(*names) + ids = (DISPID * len(names))() + self.__com_GetIDsOfNames(riid_null, arr, len(names), lcid, ids) + return ids[:] + + def _invoke(self, memid, invkind, lcid, *args): + var = VARIANT() + argerr = c_uint() + dp = DISPPARAMS() + + if args: + array = (VARIANT * len(args))() + + for i, a in enumerate(args[::-1]): + array[i].value = a + + dp.cArgs = len(args) + if invkind in (DISPATCH_PROPERTYPUT, DISPATCH_PROPERTYPUTREF): + dp.cNamedArgs = 1 + dp.rgdispidNamedArgs = pointer(DISPID(DISPID_PROPERTYPUT)) + dp.rgvarg = array + + self.__com_Invoke(memid, riid_null, lcid, invkind, + dp, var, None, argerr) + return var._get_value(dynamic=True) + + def Invoke(self, dispid, *args, **kw): + """Invoke a method or property.""" + + # Memory management in Dispatch::Invoke calls: + # http://msdn.microsoft.com/library/en-us/automat/htm/chap5_4x2q.asp + # Quote: + # The *CALLING* code is responsible for releasing all strings and + # objects referred to by rgvarg[ ] or placed in *pVarResult. + # + # For comtypes this is handled in DISPPARAMS.__del__ and VARIANT.__del__. + _invkind = kw.pop("_invkind", 1) # DISPATCH_METHOD + _lcid = kw.pop("_lcid", 0) + if kw: + raise ValueError("named parameters not yet implemented") + + result = VARIANT() + excepinfo = EXCEPINFO() + argerr = c_uint() + + if _invkind in (DISPATCH_PROPERTYPUT, DISPATCH_PROPERTYPUTREF): # propput + array = (VARIANT * len(args))() + + for i, a in enumerate(args[::-1]): + array[i].value = a + + dp = DISPPARAMS() + dp.cArgs = len(args) + dp.cNamedArgs = 1 + dp.rgvarg = array + dp.rgdispidNamedArgs = pointer(DISPID(DISPID_PROPERTYPUT)) + else: + array = (VARIANT * len(args))() + + for i, a in enumerate(args[::-1]): + array[i].value = a + + dp = DISPPARAMS() + dp.cArgs = len(args) + dp.cNamedArgs = 0 + dp.rgvarg = array + + try: + self.__com_Invoke(dispid, riid_null, _lcid, _invkind, byref(dp), + byref(result), byref(excepinfo), byref(argerr)) + except COMError as err: + (hresult, text, details) = err.args + if hresult == DISP_E_EXCEPTION: + details = (excepinfo.bstrDescription, excepinfo.bstrSource, + excepinfo.bstrHelpFile, excepinfo.dwHelpContext, + excepinfo.scode) + raise COMError(hresult, text, details) + elif hresult == DISP_E_PARAMNOTFOUND: + # MSDN says: You get the error DISP_E_PARAMNOTFOUND + # when you try to set a property and you have not + # initialized the cNamedArgs and rgdispidNamedArgs + # elements of your DISPPARAMS structure. + # + # So, this looks like a bug. + raise COMError(hresult, text, argerr.value) + elif hresult == DISP_E_TYPEMISMATCH: + # MSDN: One or more of the arguments could not be + # coerced. + # + # Hm, should we raise TypeError, or COMError? + raise COMError(hresult, text, + ("TypeError: Parameter %s" % (argerr.value + 1), + args)) + raise + return result._get_value(dynamic=True) + + # XXX Would separate methods for _METHOD, _PROPERTYGET and _PROPERTYPUT be better? + + +################################################################ +# safearrays +# XXX Only one-dimensional arrays are currently implemented + +# map ctypes types to VARTYPE values + +_arraycode_to_vartype = { + "d": VT_R8, + "f": VT_R4, + "l": VT_I4, + "i": VT_INT, + "h": VT_I2, + "b": VT_I1, + "I": VT_UINT, + "L": VT_UI4, + "H": VT_UI2, + "B": VT_UI1, + } + +_ctype_to_vartype = { + c_byte: VT_I1, + c_ubyte: VT_UI1, + + c_short: VT_I2, + c_ushort: VT_UI2, + + c_long: VT_I4, + c_ulong: VT_UI4, + + c_float: VT_R4, + c_double: VT_R8, + + c_longlong: VT_I8, + c_ulonglong: VT_UI8, + + VARIANT_BOOL: VT_BOOL, + + BSTR: VT_BSTR, + VARIANT: VT_VARIANT, + + # SAFEARRAY(VARIANT *) + # + # It is unlear to me if this is allowed or not. Apparently there + # are typelibs that define such an argument type, but it may be + # that these are buggy. + # + # Point is that SafeArrayCreateEx(VT_VARIANT|VT_BYREF, ..) fails. + # The MSDN docs for SafeArrayCreate() have a notice that neither + # VT_ARRAY not VT_BYREF may be set, this notice is missing however + # for SafeArrayCreateEx(). + # + # We have this code here to make sure that comtypes can import + # such a typelib, although calling ths method will fail because + # such an array cannot be created. + POINTER(VARIANT): VT_BYREF|VT_VARIANT, + + # This is needed to import Esri ArcObjects (esriSystem.olb). + POINTER(BSTR): VT_BYREF|VT_BSTR, + + # These are not yet implemented: +## POINTER(IUnknown): VT_UNKNOWN, +## POINTER(IDispatch): VT_DISPATCH, + } + +_vartype_to_ctype = {} +for c, v in _ctype_to_vartype.items(): + _vartype_to_ctype[v] = c +_vartype_to_ctype[VT_INT] = _vartype_to_ctype[VT_I4] +_vartype_to_ctype[VT_UINT] = _vartype_to_ctype[VT_UI4] +_ctype_to_vartype[c_char] = VT_UI1 + + + +try: + from comtypes.safearray import _midlSAFEARRAY +except (ImportError, AttributeError): + pass diff --git a/venv/Lib/site-packages/comtypes/client/__init__.py b/venv/Lib/site-packages/comtypes/client/__init__.py new file mode 100644 index 00000000..d9216ae7 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/client/__init__.py @@ -0,0 +1,266 @@ +'''comtypes.client - High level client level COM support package. +''' + +################################################################ +# +# TODO: +# +# - refactor some code into modules +# +################################################################ + +import sys, os +import ctypes + +import comtypes +from comtypes.hresult import * +import comtypes.automation +import comtypes.typeinfo +import comtypes.client.dynamic + +from comtypes.client._events import GetEvents, ShowEvents, PumpEvents +from comtypes.client._generate import GetModule + +import logging +logger = logging.getLogger(__name__) + +__all__ = ["CreateObject", "GetActiveObject", "CoGetObject", + "GetEvents", "ShowEvents", "PumpEvents", "GetModule", + "GetClassObject"] + +from comtypes.client._code_cache import _find_gen_dir + +gen_dir = _find_gen_dir() +import comtypes.gen + +### for testing +##gen_dir = None + +def wrap_outparam(punk): + logger.debug("wrap_outparam(%s)", punk) + if not punk: + return None + if punk.__com_interface__ == comtypes.automation.IDispatch: + return GetBestInterface(punk) + return punk + +def GetBestInterface(punk): + """Try to QueryInterface a COM pointer to the 'most useful' + interface. + + Get type information for the provided object, either via + IDispatch.GetTypeInfo(), or via IProvideClassInfo.GetClassInfo(). + Generate a wrapper module for the typelib, and QI for the + interface found. + """ + if not punk: # NULL COM pointer + return punk # or should we return None? + # find the typelib and the interface name + logger.debug("GetBestInterface(%s)", punk) + try: + try: + pci = punk.QueryInterface(comtypes.typeinfo.IProvideClassInfo) + logger.debug("Does implement IProvideClassInfo") + except comtypes.COMError: + # Some COM objects support IProvideClassInfo2, but not IProvideClassInfo. + # These objects are broken, but we support them anyway. + logger.debug("Does NOT implement IProvideClassInfo, trying IProvideClassInfo2") + pci = punk.QueryInterface(comtypes.typeinfo.IProvideClassInfo2) + logger.debug("Does implement IProvideClassInfo2") + tinfo = pci.GetClassInfo() # TypeInfo for the CoClass + # find the interface marked as default + ta = tinfo.GetTypeAttr() + for index in range(ta.cImplTypes): + if tinfo.GetImplTypeFlags(index) == 1: + break + else: + if ta.cImplTypes != 1: + # Hm, should we use dynamic now? + raise TypeError("No default interface found") + # Only one interface implemented, use that (even if + # not marked as default). + index = 0 + href = tinfo.GetRefTypeOfImplType(index) + tinfo = tinfo.GetRefTypeInfo(href) + except comtypes.COMError: + logger.debug("Does NOT implement IProvideClassInfo/IProvideClassInfo2") + try: + pdisp = punk.QueryInterface(comtypes.automation.IDispatch) + except comtypes.COMError: + logger.debug("No Dispatch interface: %s", punk) + return punk + try: + tinfo = pdisp.GetTypeInfo(0) + except comtypes.COMError: + pdisp = comtypes.client.dynamic.Dispatch(pdisp) + logger.debug("IDispatch.GetTypeInfo(0) failed: %s" % pdisp) + return pdisp + typeattr = tinfo.GetTypeAttr() + logger.debug("Default interface is %s", typeattr.guid) + try: + punk.QueryInterface(comtypes.IUnknown, typeattr.guid) + except comtypes.COMError: + logger.debug("Does not implement default interface, returning dynamic object") + return comtypes.client.dynamic.Dispatch(punk) + + itf_name = tinfo.GetDocumentation(-1)[0] # interface name + tlib = tinfo.GetContainingTypeLib()[0] # typelib + + # import the wrapper, generating it on demand + mod = GetModule(tlib) + # Python interface class + interface = getattr(mod, itf_name) + logger.debug("Implements default interface from typeinfo %s", interface) + # QI for this interface + # XXX + # What to do if this fails? + # In the following example the engine.Eval() call returns + # such an object. + # + # engine = CreateObject("MsScriptControl.ScriptControl") + # engine.Language = "JScript" + # engine.Eval("[1, 2, 3]") + # + # Could the above code, as an optimization, check that QI works, + # *before* generating the wrapper module? + result = punk.QueryInterface(interface) + logger.debug("Final result is %s", result) + return result +# backwards compatibility: +wrap = GetBestInterface + +# Should we do this for POINTER(IUnknown) also? +ctypes.POINTER(comtypes.automation.IDispatch).__ctypes_from_outparam__ = wrap_outparam + +################################################################ +# +# Typelib constants +# +class Constants(object): + """This class loads the type library from the supplied object, + then exposes constants in the type library as attributes.""" + def __init__(self, obj): + obj = obj.QueryInterface(comtypes.automation.IDispatch) + tlib, index = obj.GetTypeInfo(0).GetContainingTypeLib() + self.tcomp = tlib.GetTypeComp() + + def __getattr__(self, name): + try: + kind, desc = self.tcomp.Bind(name) + except (WindowsError, comtypes.COMError): + raise AttributeError(name) + if kind != "variable": + raise AttributeError(name) + return desc._.lpvarValue[0].value + + def _bind_type(self, name): + return self.tcomp.BindType(name) + +################################################################ +# +# Object creation +# +def GetActiveObject(progid, interface=None, dynamic=False): + """Return a pointer to a running COM object that has been + registered with COM. + + 'progid' may be a string like "Excel.Application", + a string specifying a clsid, a GUID instance, or an object with + a _clsid_ attribute which should be any of the above. + 'interface' allows to force a certain interface. + 'dynamic=True' will return a dynamic dispatch object. + """ + clsid = comtypes.GUID.from_progid(progid) + if dynamic: + if interface is not None: + raise ValueError("interface and dynamic are mutually exclusive") + interface = comtypes.automation.IDispatch + elif interface is None: + interface = getattr(progid, "_com_interfaces_", [None])[0] + obj = comtypes.GetActiveObject(clsid, interface=interface) + if dynamic: + return comtypes.client.dynamic.Dispatch(obj) + return _manage(obj, clsid, interface=interface) + +def _manage(obj, clsid, interface): + obj.__dict__['__clsid'] = str(clsid) + if interface is None: + obj = GetBestInterface(obj) + return obj + +def GetClassObject(progid, + clsctx=None, + pServerInfo=None, + interface=None): + """Create and return the class factory for a COM object. + + 'clsctx' specifies how to create the object, use the CLSCTX_... constants. + 'pServerInfo', if used, must be a pointer to a comtypes.COSERVERINFO instance + 'interface' may be used to request an interface other than IClassFactory + """ + clsid = comtypes.GUID.from_progid(progid) + return comtypes.CoGetClassObject(clsid, + clsctx, pServerInfo, interface) + +def CreateObject(progid, # which object to create + clsctx=None, # how to create the object + machine=None, # where to create the object + interface=None, # the interface we want + dynamic=False, # use dynamic dispatch + pServerInfo=None): # server info struct for remoting + """Create a COM object from 'progid', and try to QueryInterface() + it to the most useful interface, generating typelib support on + demand. A pointer to this interface is returned. + + 'progid' may be a string like "InternetExplorer.Application", + a string specifying a clsid, a GUID instance, or an object with + a _clsid_ attribute which should be any of the above. + 'clsctx' specifies how to create the object, use the CLSCTX_... constants. + 'machine' allows to specify a remote machine to create the object on. + 'interface' allows to force a certain interface + 'dynamic=True' will return a dynamic dispatch object + 'pServerInfo', if used, must be a pointer to a comtypes.COSERVERINFO instance + This supercedes 'machine'. + + You can also later request to receive events with GetEvents(). + """ + clsid = comtypes.GUID.from_progid(progid) + logger.debug("%s -> %s", progid, clsid) + if dynamic: + if interface: + raise ValueError("interface and dynamic are mutually exclusive") + interface = comtypes.automation.IDispatch + elif interface is None: + interface = getattr(progid, "_com_interfaces_", [None])[0] + if machine is None and pServerInfo is None: + logger.debug("CoCreateInstance(%s, clsctx=%s, interface=%s)", + clsid, clsctx, interface) + obj = comtypes.CoCreateInstance(clsid, clsctx=clsctx, interface=interface) + else: + logger.debug("CoCreateInstanceEx(%s, clsctx=%s, interface=%s, machine=%s,\ + pServerInfo=%s)", + clsid, clsctx, interface, machine, pServerInfo) + if machine is not None and pServerInfo is not None: + msg = "You can notset both the machine name and server info." + raise ValueError(msg) + obj = comtypes.CoCreateInstanceEx(clsid, clsctx=clsctx, + interface=interface, machine=machine, pServerInfo=pServerInfo) + if dynamic: + return comtypes.client.dynamic.Dispatch(obj) + return _manage(obj, clsid, interface=interface) + +def CoGetObject(displayname, interface=None, dynamic=False): + """Create an object by calling CoGetObject(displayname). + + Additional parameters have the same meaning as in CreateObject(). + """ + if dynamic: + if interface is not None: + raise ValueError("interface and dynamic are mutually exclusive") + interface = comtypes.automation.IDispatch + punk = comtypes.CoGetObject(displayname, interface) + if dynamic: + return comtypes.client.dynamic.Dispatch(punk) + return _manage(punk, + clsid=None, + interface=interface) diff --git a/venv/Lib/site-packages/comtypes/client/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/comtypes/client/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..070ce59f Binary files /dev/null and b/venv/Lib/site-packages/comtypes/client/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/client/__pycache__/_code_cache.cpython-36.pyc b/venv/Lib/site-packages/comtypes/client/__pycache__/_code_cache.cpython-36.pyc new file mode 100644 index 00000000..847c9848 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/client/__pycache__/_code_cache.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/client/__pycache__/_events.cpython-36.pyc b/venv/Lib/site-packages/comtypes/client/__pycache__/_events.cpython-36.pyc new file mode 100644 index 00000000..c231b01f Binary files /dev/null and b/venv/Lib/site-packages/comtypes/client/__pycache__/_events.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/client/__pycache__/_generate.cpython-36.pyc b/venv/Lib/site-packages/comtypes/client/__pycache__/_generate.cpython-36.pyc new file mode 100644 index 00000000..9c765df7 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/client/__pycache__/_generate.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/client/__pycache__/dynamic.cpython-36.pyc b/venv/Lib/site-packages/comtypes/client/__pycache__/dynamic.cpython-36.pyc new file mode 100644 index 00000000..91658f99 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/client/__pycache__/dynamic.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/client/__pycache__/lazybind.cpython-36.pyc b/venv/Lib/site-packages/comtypes/client/__pycache__/lazybind.cpython-36.pyc new file mode 100644 index 00000000..01c45fd3 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/client/__pycache__/lazybind.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/client/_code_cache.py b/venv/Lib/site-packages/comtypes/client/_code_cache.py new file mode 100644 index 00000000..7202bf29 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/client/_code_cache.py @@ -0,0 +1,136 @@ +"""comtypes.client._code_cache helper module. + +The main function is _find_gen_dir(), which on-demand creates the +comtypes.gen package and returns a directory where generated code can +be written to. +""" +import ctypes, logging, os, sys, tempfile, types +from ctypes import wintypes +logger = logging.getLogger(__name__) + + +def _ensure_list(path): + """ + On Python 3.4 and later, when a package is imported from + an empty directory, its `__path__` will be a _NamespacePath + object and not a list, and _NamespacePath objects cannot + be indexed, leading to the error reported in #102. + This wrapper ensures that the path is a list for that reason. + """ + return list(path) + + +def _find_gen_dir(): + """Create, if needed, and return a directory where automatically + generated modules will be created. + + Usually, this is the directory 'Lib/site-packages/comtypes/gen'. + + If the above directory cannot be created, or if it is not a + directory in the file system (when comtypes is imported from a + zip-archive or a zipped egg), or if the current user cannot create + files in this directory, an additional directory is created and + appended to comtypes.gen.__path__ . + + For a Python script using comtypes, the additional directory is + '%APPDATA%\<username>\Python\Python25\comtypes_cache'. + + For an executable frozen with py2exe, the additional directory is + '%TEMP%\comtypes_cache\<imagebasename>-25'. + """ + _create_comtypes_gen_package() + from comtypes import gen + gen_path = _ensure_list(gen.__path__) + if not _is_writeable(gen_path): + # check type of executable image to determine a subdirectory + # where generated modules are placed. + ftype = getattr(sys, "frozen", None) + version_str = "%d%d" % sys.version_info[:2] + if ftype == None: + # Python script + subdir = r"Python\Python%s\comtypes_cache" % version_str + basedir = _get_appdata_dir() + + elif ftype == "dll": + # dll created with py2exe + path = _get_module_filename(sys.frozendllhandle) + base = os.path.splitext(os.path.basename(path))[0] + subdir = r"comtypes_cache\%s-%s" % (base, version_str) + basedir = tempfile.gettempdir() + + else: # ftype in ('windows_exe', 'console_exe') + # exe created by py2exe + base = os.path.splitext(os.path.basename(sys.executable))[0] + subdir = r"comtypes_cache\%s-%s" % (base, version_str) + basedir = tempfile.gettempdir() + + gen_dir = os.path.join(basedir, subdir) + if not os.path.exists(gen_dir): + logger.info("Creating writeable comtypes cache directory: '%s'", gen_dir) + os.makedirs(gen_dir) + gen_path.append(gen_dir) + result = os.path.abspath(gen_path[-1]) + logger.info("Using writeable comtypes cache directory: '%s'", result) + return result + +################################################################ + +SHGetSpecialFolderPath = ctypes.OleDLL("shell32.dll").SHGetSpecialFolderPathW +GetModuleFileName = ctypes.WinDLL("kernel32.dll").GetModuleFileNameW +SHGetSpecialFolderPath.argtypes = [ctypes.c_ulong, ctypes.c_wchar_p, + ctypes.c_int, ctypes.c_int] +GetModuleFileName.restype = ctypes.c_ulong +GetModuleFileName.argtypes = [wintypes.HMODULE, ctypes.c_wchar_p, ctypes.c_ulong] + +CSIDL_APPDATA = 26 +MAX_PATH = 260 + +def _create_comtypes_gen_package(): + """Import (creating it if needed) the comtypes.gen package.""" + try: + import comtypes.gen + logger.info("Imported existing %s", comtypes.gen) + except ImportError: + import comtypes + logger.info("Could not import comtypes.gen, trying to create it.") + try: + comtypes_path = os.path.abspath(os.path.join(comtypes.__path__[0], "gen")) + if not os.path.isdir(comtypes_path): + os.mkdir(comtypes_path) + logger.info("Created comtypes.gen directory: '%s'", comtypes_path) + comtypes_init = os.path.join(comtypes_path, "__init__.py") + if not os.path.exists(comtypes_init): + logger.info("Writing __init__.py file: '%s'", comtypes_init) + ofi = open(comtypes_init, "w") + ofi.write("# comtypes.gen package, directory for generated files.\n") + ofi.close() + except (OSError, IOError) as details: + logger.info("Creating comtypes.gen package failed: %s", details) + module = sys.modules["comtypes.gen"] = types.ModuleType("comtypes.gen") + comtypes.gen = module + comtypes.gen.__path__ = [] + logger.info("Created a memory-only package.") + +def _is_writeable(path): + """Check if the first part, if any, on path is a directory in + which we can create files.""" + if not path: + return False + # TODO: should we add os.X_OK flag as well? It seems unnecessary on Windows. + return os.access(path[0], os.W_OK) + +def _get_module_filename(hmodule): + """Call the Windows GetModuleFileName function which determines + the path from a module handle.""" + path = ctypes.create_unicode_buffer(MAX_PATH) + if GetModuleFileName(hmodule, path, MAX_PATH): + return path.value + raise ctypes.WinError() + +def _get_appdata_dir(): + """Return the 'file system directory that serves as a common + repository for application-specific data' - CSIDL_APPDATA""" + path = ctypes.create_unicode_buffer(MAX_PATH) + # get u'C:\\Documents and Settings\\<username>\\Application Data' + SHGetSpecialFolderPath(0, path, CSIDL_APPDATA, True) + return path.value diff --git a/venv/Lib/site-packages/comtypes/client/_events.py b/venv/Lib/site-packages/comtypes/client/_events.py new file mode 100644 index 00000000..5af4a212 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/client/_events.py @@ -0,0 +1,285 @@ +import ctypes +import traceback +import comtypes +import comtypes.hresult +import comtypes.automation +import comtypes.typeinfo +import comtypes.connectionpoints +from comtypes.client._generate import GetModule +import logging +logger = logging.getLogger(__name__) + +class _AdviseConnection(object): + def __init__(self, source, interface, receiver): + self.cp = None + self.cookie = None + self.receiver = None + self._connect(source, interface, receiver) + + def _connect(self, source, interface, receiver): + cpc = source.QueryInterface(comtypes.connectionpoints.IConnectionPointContainer) + self.cp = cpc.FindConnectionPoint(ctypes.byref(interface._iid_)) + logger.debug("Start advise %s", interface) + self.cookie = self.cp.Advise(receiver) + self.receiver = receiver + + def disconnect(self): + if self.cookie: + self.cp.Unadvise(self.cookie) + logger.debug("Unadvised %s", self.cp) + self.cp = None + self.cookie = None + del self.receiver + + def __del__(self): + try: + if self.cookie is not None: + self.cp.Unadvise(self.cookie) + except (comtypes.COMError, WindowsError): + # Are we sure we want to ignore errors here? + pass + +def FindOutgoingInterface(source): + """XXX Describe the strategy that is used...""" + # If the COM object implements IProvideClassInfo2, it is easy to + # find the default outgoing interface. + try: + pci = source.QueryInterface(comtypes.typeinfo.IProvideClassInfo2) + guid = pci.GetGUID(1) + except comtypes.COMError: + pass + else: + # another try: block needed? + try: + interface = comtypes.com_interface_registry[str(guid)] + except KeyError: + tinfo = pci.GetClassInfo() + tlib, index = tinfo.GetContainingTypeLib() + GetModule(tlib) + interface = comtypes.com_interface_registry[str(guid)] + logger.debug("%s using sinkinterface %s", source, interface) + return interface + + # If we can find the CLSID of the COM object, we can look for a + # registered outgoing interface (__clsid has been set by + # comtypes.client): + clsid = source.__dict__.get('__clsid') + try: + interface = comtypes.com_coclass_registry[clsid]._outgoing_interfaces_[0] + except KeyError: + pass + else: + logger.debug("%s using sinkinterface from clsid %s", source, interface) + return interface + +## interface = find_single_connection_interface(source) +## if interface: +## return interface + + raise TypeError("cannot determine source interface") + +def find_single_connection_interface(source): + # Enumerate the connection interfaces. If we find a single one, + # return it, if there are more, we give up since we cannot + # determine which one to use. + cpc = source.QueryInterface(comtypes.connectionpoints.IConnectionPointContainer) + enum = cpc.EnumConnectionPoints() + iid = enum.next().GetConnectionInterface() + try: + next(enum) + except StopIteration: + try: + interface = comtypes.com_interface_registry[str(iid)] + except KeyError: + return None + else: + logger.debug("%s using sinkinterface from iid %s", source, interface) + return interface + else: + logger.debug("%s has more than one connection point", source) + + return None + +def report_errors(func): + # This decorator preserves parts of the decorated function + # signature, so that the comtypes special-casing for the 'this' + # parameter still works. + if func.__code__.co_varnames[:2] == ('self', 'this'): + def error_printer(self, this, *args, **kw): + try: + return func(self, this, *args, **kw) + except: + traceback.print_exc() + raise + else: + def error_printer(*args, **kw): + try: + return func(*args, **kw) + except: + traceback.print_exc() + raise + return error_printer + +from comtypes._comobject import _MethodFinder +class _SinkMethodFinder(_MethodFinder): + """Special MethodFinder, for finding and decorating event handler + methods. Looks for methods on two objects. Also decorates the + event handlers with 'report_errors' which will print exceptions in + event handlers. + """ + def __init__(self, inst, sink): + super(_SinkMethodFinder, self).__init__(inst) + self.sink = sink + + def find_method(self, fq_name, mthname): + impl = self._find_method(fq_name, mthname) + # Caller of this method catches AttributeError, + # so we need to be careful in the following code + # not to raise one... + try: + # impl is a bound method, dissect it... + im_self, im_func = impl.__self__, impl.__func__ + # decorate it with an error printer... + method = report_errors(im_func) + # and make a new bound method from it again. + return comtypes.instancemethod(method, + im_self, + type(im_self)) + except AttributeError as details: + raise RuntimeError(details) + + def _find_method(self, fq_name, mthname): + try: + return super(_SinkMethodFinder, self).find_method(fq_name, mthname) + except AttributeError: + try: + return getattr(self.sink, fq_name) + except AttributeError: + return getattr(self.sink, mthname) + +def CreateEventReceiver(interface, handler): + + class Sink(comtypes.COMObject): + _com_interfaces_ = [interface] + + def _get_method_finder_(self, itf): + # Use a special MethodFinder that will first try 'self', + # then the sink. + return _SinkMethodFinder(self, handler) + + sink = Sink() + + # Since our Sink object doesn't have typeinfo, it needs a + # _dispimpl_ dictionary to dispatch events received via Invoke. + if issubclass(interface, comtypes.automation.IDispatch) \ + and not hasattr(sink, "_dispimpl_"): + finder = sink._get_method_finder_(interface) + dispimpl = sink._dispimpl_ = {} + for m in interface._methods_: + restype, mthname, argtypes, paramflags, idlflags, helptext = m + # Can dispid be at a different index? Should check code generator... + # ...but hand-written code should also work... + dispid = idlflags[0] + impl = finder.get_impl(interface, mthname, paramflags, idlflags) + # XXX Wouldn't work for 'propget', 'propput', 'propputref' + # methods - are they allowed on event interfaces? + dispimpl[(dispid, comtypes.automation.DISPATCH_METHOD)] = impl + + return sink + +def GetEvents(source, sink, interface=None): + """Receive COM events from 'source'. Events will call methods on + the 'sink' object. 'interface' is the source interface to use. + """ + # When called from CreateObject, the sourceinterface has already + # been determined by the coclass. Otherwise, the only thing that + # makes sense is to use IProvideClassInfo2 to get the default + # source interface. + if interface is None: + interface = FindOutgoingInterface(source) + + rcv = CreateEventReceiver(interface, sink) + return _AdviseConnection(source, interface, rcv) + +class EventDumper(object): + """Universal sink for COM events.""" + + def __getattr__(self, name): + "Create event handler methods on demand" + if name.startswith("__") and name.endswith("__"): + raise AttributeError(name) + print("# event found:", name) + def handler(self, this, *args, **kw): + # XXX handler is called with 'this'. Should we really print "None" instead? + args = (None,) + args + print("Event %s(%s)" % (name, ", ".join([repr(a) for a in args]))) + return comtypes.instancemethod(handler, self, EventDumper) + +def ShowEvents(source, interface=None): + """Receive COM events from 'source'. A special event sink will be + used that first prints the names of events that are found in the + outgoing interface, and will also print out the events when they + are fired. + """ + return comtypes.client.GetEvents(source, sink=EventDumper(), interface=interface) + +# This type is used inside 'PumpEvents', but if we create the type +# afresh each time 'PumpEvents' is called we end up creating cyclic +# garbage for each call. So we define it here instead. +_handles_type = ctypes.c_void_p * 1 + +def PumpEvents(timeout): + """This following code waits for 'timeout' seconds in the way + required for COM, internally doing the correct things depending + on the COM appartment of the current thread. It is possible to + terminate the message loop by pressing CTRL+C, which will raise + a KeyboardInterrupt. + """ + # XXX Should there be a way to pass additional event handles which + # can terminate this function? + + # XXX XXX XXX + # + # It may be that I misunderstood the CoWaitForMultipleHandles + # function. Is a message loop required in a STA? Seems so... + # + # MSDN says: + # + # If the caller resides in a single-thread apartment, + # CoWaitForMultipleHandles enters the COM modal loop, and the + # thread's message loop will continue to dispatch messages using + # the thread's message filter. If no message filter is registered + # for the thread, the default COM message processing is used. + # + # If the calling thread resides in a multithread apartment (MTA), + # CoWaitForMultipleHandles calls the Win32 function + # MsgWaitForMultipleObjects. + + hevt = ctypes.windll.kernel32.CreateEventA(None, True, False, None) + handles = _handles_type(hevt) + RPC_S_CALLPENDING = -2147417835 + +## @ctypes.WINFUNCTYPE(ctypes.c_int, ctypes.c_uint) + def HandlerRoutine(dwCtrlType): + if dwCtrlType == 0: # CTRL+C + ctypes.windll.kernel32.SetEvent(hevt) + return 1 + return 0 + HandlerRoutine = ctypes.WINFUNCTYPE(ctypes.c_int, ctypes.c_uint)(HandlerRoutine) + + ctypes.windll.kernel32.SetConsoleCtrlHandler(HandlerRoutine, 1) + + try: + try: + res = ctypes.oledll.ole32.CoWaitForMultipleHandles(0, + int(timeout * 1000), + len(handles), handles, + ctypes.byref(ctypes.c_ulong())) + except WindowsError as details: + if details.winerror != RPC_S_CALLPENDING: # timeout expired + raise + else: + raise KeyboardInterrupt + finally: + ctypes.windll.kernel32.CloseHandle(hevt) + ctypes.windll.kernel32.SetConsoleCtrlHandler(HandlerRoutine, 0) diff --git a/venv/Lib/site-packages/comtypes/client/_generate.py b/venv/Lib/site-packages/comtypes/client/_generate.py new file mode 100644 index 00000000..3e11cc9e --- /dev/null +++ b/venv/Lib/site-packages/comtypes/client/_generate.py @@ -0,0 +1,205 @@ +import types +import os +import sys +import comtypes.client +import comtypes.tools.codegenerator +import importlib + +import logging +logger = logging.getLogger(__name__) + +PATH = os.environ["PATH"].split(os.pathsep) + +def _my_import(fullname): + # helper function to import dotted modules + import comtypes.gen + if comtypes.client.gen_dir \ + and comtypes.client.gen_dir not in comtypes.gen.__path__: + comtypes.gen.__path__.append(comtypes.client.gen_dir) + return __import__(fullname, globals(), locals(), ['DUMMY']) + +def _name_module(tlib): + # Determine the name of a typelib wrapper module. + libattr = tlib.GetLibAttr() + modname = "_%s_%s_%s_%s" % \ + (str(libattr.guid)[1:-1].replace("-", "_"), + libattr.lcid, + libattr.wMajorVerNum, + libattr.wMinorVerNum) + return "comtypes.gen." + modname + +def GetModule(tlib): + """Create a module wrapping a COM typelibrary on demand. + + 'tlib' must be an ITypeLib COM pointer instance, the pathname of a + type library, a COM CLSID GUID, or a tuple/list specifying the + arguments to a comtypes.typeinfo.LoadRegTypeLib call: + + (libid, wMajorVerNum, wMinorVerNum, lcid=0) + + Or it can be an object with _reg_libid_ and _reg_version_ + attributes. + + A relative pathname is interpreted as relative to the callers + __file__, if this exists. + + This function determines the module name from the typelib + attributes, then tries to import it. If that fails because the + module doesn't exist, the module is generated into the + comtypes.gen package. + + It is possible to delete the whole comtypes\gen directory to + remove all generated modules, the directory and the __init__.py + file in it will be recreated when needed. + + If comtypes.gen __path__ is not a directory (in a frozen + executable it lives in a zip archive), generated modules are only + created in memory without writing them to the file system. + + Example: + + GetModule("shdocvw.dll") + + would create modules named + + comtypes.gen._EAB22AC0_30C1_11CF_A7EB_0000C05BAE0B_0_1_1 + comtypes.gen.SHDocVw + + containing the Python wrapper code for the type library used by + Internet Explorer. The former module contains all the code, the + latter is a short stub loading the former. + """ + pathname = None + if isinstance(tlib, str): + # pathname of type library + if not os.path.isabs(tlib): + # If a relative pathname is used, we try to interpret + # this pathname as relative to the callers __file__. + frame = sys._getframe(1) + _file_ = frame.f_globals.get("__file__", None) + if _file_ is not None: + directory = os.path.dirname(os.path.abspath(_file_)) + abspath = os.path.normpath(os.path.join(directory, tlib)) + # If the file does exist, we use it. Otherwise it may + # still be that the file is on Windows search path for + # typelibs, and we leave the pathname alone. + if os.path.isfile(abspath): + tlib = abspath + logger.debug("GetModule(%s)", tlib) + pathname = tlib + tlib = comtypes.typeinfo.LoadTypeLibEx(tlib) + elif isinstance(tlib, comtypes.GUID): + # tlib contain a clsid + clsid = str(tlib) + + # lookup associated typelib in registry + import winreg + with winreg.OpenKey(winreg.HKEY_CLASSES_ROOT, r"CLSID\%s\TypeLib" % clsid, 0, winreg.KEY_READ) as key: + typelib = winreg.EnumValue(key, 0)[1] + with winreg.OpenKey(winreg.HKEY_CLASSES_ROOT, r"CLSID\%s\Version" % clsid, 0, winreg.KEY_READ) as key: + version = winreg.EnumValue(key, 0)[1].split(".") + + logger.debug("GetModule(%s)", typelib) + tlib = comtypes.typeinfo.LoadRegTypeLib(comtypes.GUID(typelib), int(version[0]), int(version[1]), 0) + elif isinstance(tlib, (tuple, list)): + # sequence containing libid and version numbers + logger.debug("GetModule(%s)", (tlib,)) + tlib = comtypes.typeinfo.LoadRegTypeLib(comtypes.GUID(tlib[0]), *tlib[1:]) + elif hasattr(tlib, "_reg_libid_"): + # a COMObject implementation + logger.debug("GetModule(%s)", tlib) + tlib = comtypes.typeinfo.LoadRegTypeLib(comtypes.GUID(tlib._reg_libid_), + *tlib._reg_version_) + else: + # an ITypeLib pointer + logger.debug("GetModule(%s)", tlib.GetLibAttr()) + + # create and import the module + mod = _CreateWrapper(tlib, pathname) + try: + modulename = tlib.GetDocumentation(-1)[0] + except comtypes.COMError: + return mod + if modulename is None: + return mod + if sys.version_info < (3, 0): + modulename = modulename.encode("mbcs") + + # create and import the friendly-named module + try: + mod = _my_import("comtypes.gen." + modulename) + except Exception as details: + logger.info("Could not import comtypes.gen.%s: %s", modulename, details) + else: + return mod + # the module is always regenerated if the import fails + logger.info("# Generating comtypes.gen.%s", modulename) + # determine the Python module name + fullname = _name_module(tlib) + modname = fullname.split(".")[-1] + code = "from comtypes.gen import %s\nglobals().update(%s.__dict__)\n" % (modname, modname) + code += "__name__ = 'comtypes.gen.%s'" % modulename + if comtypes.client.gen_dir is None: + mod = types.ModuleType("comtypes.gen." + modulename) + mod.__file__ = os.path.join(os.path.abspath(comtypes.gen.__path__[0]), + "<memory>") + exec(code, mod.__dict__) + sys.modules["comtypes.gen." + modulename] = mod + setattr(comtypes.gen, modulename, mod) + return mod + # create in file system, and import it + ofi = open(os.path.join(comtypes.client.gen_dir, modulename + ".py"), "w") + ofi.write(code) + ofi.close() + # clear the import cache to make sure Python sees newly created modules + if hasattr(importlib, "invalidate_caches"): + importlib.invalidate_caches() + return _my_import("comtypes.gen." + modulename) + +def _CreateWrapper(tlib, pathname=None): + # helper which creates and imports the real typelib wrapper module. + fullname = _name_module(tlib) + try: + return sys.modules[fullname] + except KeyError: + pass + + modname = fullname.split(".")[-1] + + try: + return _my_import(fullname) + except Exception as details: + logger.info("Could not import %s: %s", fullname, details) + + # generate the module since it doesn't exist or is out of date + from comtypes.tools.tlbparser import generate_module + if comtypes.client.gen_dir is None: + import io + ofi = io.StringIO() + else: + ofi = open(os.path.join(comtypes.client.gen_dir, modname + ".py"), "w") + # XXX use logging! + logger.info("# Generating comtypes.gen.%s", modname) + generate_module(tlib, ofi, pathname) + + if comtypes.client.gen_dir is None: + code = ofi.getvalue() + mod = types.ModuleType(fullname) + mod.__file__ = os.path.join(os.path.abspath(comtypes.gen.__path__[0]), + "<memory>") + exec(code, mod.__dict__) + sys.modules[fullname] = mod + setattr(comtypes.gen, modname, mod) + else: + ofi.close() + # clear the import cache to make sure Python sees newly created modules + if hasattr(importlib, "invalidate_caches"): + importlib.invalidate_caches() + mod = _my_import(fullname) + return mod + +################################################################ + +if __name__ == "__main__": + # When started as script, generate typelib wrapper from .tlb file. + GetModule(sys.argv[1]) diff --git a/venv/Lib/site-packages/comtypes/client/dynamic.py b/venv/Lib/site-packages/comtypes/client/dynamic.py new file mode 100644 index 00000000..2d9f6d42 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/client/dynamic.py @@ -0,0 +1,165 @@ +import ctypes +import comtypes.automation +import comtypes.typeinfo +import comtypes.client +import comtypes.client.lazybind + +from comtypes import COMError, IUnknown, _is_object +import comtypes.hresult as hres + +# These errors generally mean the property or method exists, +# but can't be used in this context - eg, property instead of a method, etc. +# Used to determine if we have a real error or not. +ERRORS_BAD_CONTEXT = [ + hres.DISP_E_MEMBERNOTFOUND, + hres.DISP_E_BADPARAMCOUNT, + hres.DISP_E_PARAMNOTOPTIONAL, + hres.DISP_E_TYPEMISMATCH, + hres.E_INVALIDARG, +] + +def Dispatch(obj): + # Wrap an object in a Dispatch instance, exposing methods and properties + # via fully dynamic dispatch + if isinstance(obj, _Dispatch): + return obj + if isinstance(obj, ctypes.POINTER(comtypes.automation.IDispatch)): + try: + tinfo = obj.GetTypeInfo(0) + except (comtypes.COMError, WindowsError): + return _Dispatch(obj) + return comtypes.client.lazybind.Dispatch(obj, tinfo) + return obj + +class MethodCaller: + # Wrong name: does not only call methods but also handle + # property accesses. + def __init__(self, _id, _obj): + self._id = _id + self._obj = _obj + + def __call__(self, *args): + return self._obj._comobj.Invoke(self._id, *args) + + def __getitem__(self, *args): + return self._obj._comobj.Invoke(self._id, *args, + **dict(_invkind=comtypes.automation.DISPATCH_PROPERTYGET)) + + def __setitem__(self, *args): + if _is_object(args[-1]): + self._obj._comobj.Invoke(self._id, *args, + **dict(_invkind=comtypes.automation.DISPATCH_PROPERTYPUTREF)) + else: + self._obj._comobj.Invoke(self._id, *args, + **dict(_invkind=comtypes.automation.DISPATCH_PROPERTYPUT)) + +class _Dispatch(object): + # Expose methods and properties via fully dynamic dispatch + def __init__(self, comobj): + self.__dict__["_comobj"] = comobj + self.__dict__["_ids"] = {} # Tiny optimization: trying not to use GetIDsOfNames more than once + self.__dict__["_methods"] = set() + + def __enum(self): + e = self._comobj.Invoke(-4) # DISPID_NEWENUM + return e.QueryInterface(comtypes.automation.IEnumVARIANT) + + def __cmp__(self, other): + if not isinstance(other, _Dispatch): + return 1 + return cmp(self._comobj, other._comobj) + + def __hash__(self): + return hash(self._comobj) + + def __getitem__(self, index): + enum = self.__enum() + if index > 0: + if 0 != enum.Skip(index): + raise IndexError("index out of range") + item, fetched = enum.Next(1) + if not fetched: + raise IndexError("index out of range") + return item + + def QueryInterface(self, *args): + "QueryInterface is forwarded to the real com object." + return self._comobj.QueryInterface(*args) + + def _FlagAsMethod(self, *names): + """Flag these attribute names as being methods. + Some objects do not correctly differentiate methods and + properties, leading to problems when calling these methods. + + Specifically, trying to say: ob.SomeFunc() + may yield an exception "None object is not callable" + In this case, an attempt to fetch the *property*has worked + and returned None, rather than indicating it is really a method. + Calling: ob._FlagAsMethod("SomeFunc") + should then allow this to work. + """ + self._methods.update(names) + + def __getattr__(self, name): + if name.startswith("__") and name.endswith("__"): + raise AttributeError(name) +## tc = self._comobj.GetTypeInfo(0).QueryInterface(comtypes.typeinfo.ITypeComp) +## dispid = tc.Bind(name)[1].memid + dispid = self._ids.get(name) + if not dispid: + dispid = self._comobj.GetIDsOfNames(name)[0] + self._ids[name] = dispid + + if name in self._methods: + result = MethodCaller(dispid, self) + self.__dict__[name] = result + return result + + flags = comtypes.automation.DISPATCH_PROPERTYGET + try: + result = self._comobj.Invoke(dispid, _invkind=flags) + except COMError as err: + (hresult, text, details) = err.args + if hresult in ERRORS_BAD_CONTEXT: + result = MethodCaller(dispid, self) + self.__dict__[name] = result + else: + # The line break is important for 2to3 to work correctly + raise + except: + # The line break is important for 2to3 to work correctly + raise + + return result + + def __setattr__(self, name, value): + dispid = self._ids.get(name) + if not dispid: + dispid = self._comobj.GetIDsOfNames(name)[0] + self._ids[name] = dispid + # Detect whether to use DISPATCH_PROPERTYPUT or + # DISPATCH_PROPERTYPUTREF + flags = 8 if _is_object(value) else 4 + return self._comobj.Invoke(dispid, value, _invkind=flags) + + def __iter__(self): + return _Collection(self.__enum()) + +## def __setitem__(self, index, value): +## self._comobj.Invoke(-3, index, value, +## _invkind=comtypes.automation.DISPATCH_PROPERTYPUT|comtypes.automation.DISPATCH_PROPERTYPUTREF) + +class _Collection(object): + def __init__(self, enum): + self.enum = enum + + def __next__(self): + item, fetched = self.enum.Next(1) + if fetched: + return item + raise StopIteration + + def __iter__(self): + return self + +__all__ = ["Dispatch"] diff --git a/venv/Lib/site-packages/comtypes/client/lazybind.py b/venv/Lib/site-packages/comtypes/client/lazybind.py new file mode 100644 index 00000000..4705e4a7 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/client/lazybind.py @@ -0,0 +1,267 @@ +import comtypes +import comtypes.automation + +from comtypes.automation import IEnumVARIANT +from comtypes.automation import DISPATCH_METHOD +from comtypes.automation import DISPATCH_PROPERTYGET +from comtypes.automation import DISPATCH_PROPERTYPUT +from comtypes.automation import DISPATCH_PROPERTYPUTREF + +from comtypes.automation import DISPID_VALUE +from comtypes.automation import DISPID_NEWENUM + +from comtypes.typeinfo import FUNC_PUREVIRTUAL, FUNC_DISPATCH + + +class FuncDesc(object): + """Stores important FUNCDESC properties by copying them from a + real FUNCDESC instance. + """ + def __init__(self, **kw): + self.__dict__.update(kw) + +# What is missing? +# +# Should NamedProperty support __call__()? + +_all_slice = slice(None, None, None) + + +class NamedProperty(object): + def __init__(self, disp, get, put, putref): + self.get = get + self.put = put + self.putref = putref + self.disp = disp + + def __getitem__(self, arg): + if self.get is None: + raise TypeError("unsubscriptable object") + if isinstance(arg, tuple): + return self.disp._comobj._invoke(self.get.memid, + self.get.invkind, + 0, + *arg) + elif arg == _all_slice: + return self.disp._comobj._invoke(self.get.memid, + self.get.invkind, + 0) + return self.disp._comobj._invoke(self.get.memid, + self.get.invkind, + 0, + *[arg]) + + def __call__(self, *args): + if self.get is None: + raise TypeError("object is not callable") + return self.disp._comobj._invoke(self.get.memid, + self.get.invkind, + 0, + *args) + + def __setitem__(self, name, value): + # See discussion in Dispatch.__setattr__ below. + if self.put is None and self.putref is None: + raise TypeError("object does not support item assignment") + if comtypes._is_object(value): + descr = self.putref or self.put + else: + descr = self.put or self.putref + if isinstance(name, tuple): + self.disp._comobj._invoke(descr.memid, + descr.invkind, + 0, + *(name + (value,))) + elif name == _all_slice: + self.disp._comobj._invoke(descr.memid, + descr.invkind, + 0, + value) + else: + self.disp._comobj._invoke(descr.memid, + descr.invkind, + 0, + name, + value) + + def __iter__(self): + """ Explicitly disallow iteration. """ + msg = "%r is not iterable" % self.disp + raise TypeError(msg) + + +# The following 'Dispatch' class, returned from +# CreateObject(progid, dynamic=True) +# differ in behaviour from objects created with +# CreateObject(progid, dynamic=False) +# (let us call the latter 'Custom' objects for this discussion): +# +# +# 1. Dispatch objects support __call__(), custom objects do not +# +# 2. Custom objects method support named arguments, Dispatch +# objects do not (could be added, would probably be expensive) + +class Dispatch(object): + """Dynamic dispatch for an object the exposes type information. + Binding at runtime is done via ITypeComp::Bind calls. + """ + def __init__(self, comobj, tinfo): + self.__dict__["_comobj"] = comobj + self.__dict__["_tinfo"] = tinfo + self.__dict__["_tcomp"] = tinfo.GetTypeComp() + self.__dict__["_tdesc"] = {} +## self.__dict__["_iid"] = tinfo.GetTypeAttr().guid + + def __bind(self, name, invkind): + """Bind (name, invkind) and return a FuncDesc instance or + None. Results (even unsuccessful ones) are cached.""" + # We could cache the info in the class instead of the + # instance, but we would need an additional key for that: + # self._iid + try: + return self._tdesc[(name, invkind)] + except KeyError: + try: + descr = self._tcomp.Bind(name, invkind)[1] + except comtypes.COMError: + info = None + else: + # Using a separate instance to store interesting + # attributes of descr avoids that the typecomp instance is + # kept alive... + info = FuncDesc(memid=descr.memid, + invkind=descr.invkind, + cParams=descr.cParams, + funckind=descr.funckind) + self._tdesc[(name, invkind)] = info + return info + + def QueryInterface(self, *args): + "QueryInterface is forwarded to the real com object." + return self._comobj.QueryInterface(*args) + + def __cmp__(self, other): + if not isinstance(other, Dispatch): + return 1 + return cmp(self._comobj, other._comobj) + + def __eq__(self, other): + return isinstance(other, Dispatch) and \ + self._comobj == other._comobj + + def __hash__(self): + return hash(self._comobj) + + def __getattr__(self, name): + """Get a COM attribute.""" + if name.startswith("__") and name.endswith("__"): + raise AttributeError(name) + # check for propget or method + descr = self.__bind(name, DISPATCH_METHOD | DISPATCH_PROPERTYGET) + if descr is None: + raise AttributeError(name) + if descr.invkind == DISPATCH_PROPERTYGET: + # DISPATCH_PROPERTYGET + if descr.funckind == FUNC_DISPATCH: + if descr.cParams == 0: + return self._comobj._invoke(descr.memid, descr.invkind, 0) + elif descr.funckind == FUNC_PUREVIRTUAL: + # FUNC_PUREVIRTUAL descriptions contain the property + # itself as a parameter. + if descr.cParams == 1: + return self._comobj._invoke(descr.memid, descr.invkind, 0) + else: + raise RuntimeError("funckind %d not yet implemented" % descr.funckind) + put = self.__bind(name, DISPATCH_PROPERTYPUT) + putref = self.__bind(name, DISPATCH_PROPERTYPUTREF) + return NamedProperty(self, descr, put, putref) + else: + # DISPATCH_METHOD + def caller(*args): + return self._comobj._invoke(descr.memid, descr.invkind, 0, *args) + try: + caller.__name__ = name + except TypeError: + # In Python 2.3, __name__ is readonly + pass + return caller + + def __setattr__(self, name, value): + # Hm, this can be a propput, a propputref, or 'both' property. + # (Or nothing at all.) + # + # Whether propput or propputref is called will depend on what + # is available, and on the type of 'value' as determined by + # comtypes._is_object(value). + # + # I think that the following table MAY be correct; although I + # have no idea whether the cases marked (?) are really valid. + # + # invkind available | _is_object(value) | invkind we should use + # --------------------------------------------------------------- + # put | True | put (?) + # put | False | put + # putref | True | putref + # putref | False | putref (?) + # put, putref | True | putref + # put, putref | False | put + put = self.__bind(name, DISPATCH_PROPERTYPUT) + putref = self.__bind(name, DISPATCH_PROPERTYPUTREF) + if not put and not putref: + raise AttributeError(name) + if comtypes._is_object(value): + descr = putref or put + else: + descr = put or putref + if descr.cParams == 1: + self._comobj._invoke(descr.memid, descr.invkind, 0, value) + return + raise AttributeError(name) + + def __call__(self, *args): + return self._comobj._invoke(DISPID_VALUE, + DISPATCH_METHOD | DISPATCH_PROPERTYGET, + 0, + *args) + + def __getitem__(self, arg): + if isinstance(arg, tuple): + args = arg + elif arg == _all_slice: + args = () + else: + args = (arg,) + + try: + return self._comobj._invoke(DISPID_VALUE, + DISPATCH_METHOD | DISPATCH_PROPERTYGET, + 0, + *args) + except comtypes.COMError: + return iter(self)[arg] + + def __setitem__(self, name, value): + if comtypes._is_object(value): + invkind = DISPATCH_PROPERTYPUTREF + else: + invkind = DISPATCH_PROPERTYPUT + + if isinstance(name, tuple): + args = name + (value,) + elif name == _all_slice: + args = (value,) + else: + args = (name, value) + return self._comobj._invoke(DISPID_VALUE, + invkind, + 0, + *args) + + def __iter__(self): + punk = self._comobj._invoke(DISPID_NEWENUM, + DISPATCH_METHOD | DISPATCH_PROPERTYGET, + 0) + enum = punk.QueryInterface(IEnumVARIANT) + enum._dynamic = True + return enum diff --git a/venv/Lib/site-packages/comtypes/connectionpoints.py b/venv/Lib/site-packages/comtypes/connectionpoints.py new file mode 100644 index 00000000..0e89a60c --- /dev/null +++ b/venv/Lib/site-packages/comtypes/connectionpoints.py @@ -0,0 +1,94 @@ +from ctypes import * +from comtypes import IUnknown, COMMETHOD, GUID, HRESULT, dispid +_GUID = GUID + +class tagCONNECTDATA(Structure): + _fields_ = [ + ('pUnk', POINTER(IUnknown)), + ('dwCookie', c_ulong), + ] +CONNECTDATA = tagCONNECTDATA + +################################################################ + +class IConnectionPointContainer(IUnknown): + _iid_ = GUID('{B196B284-BAB4-101A-B69C-00AA00341D07}') + _idlflags_ = [] + +class IConnectionPoint(IUnknown): + _iid_ = GUID('{B196B286-BAB4-101A-B69C-00AA00341D07}') + _idlflags_ = [] + +class IEnumConnections(IUnknown): + _iid_ = GUID('{B196B287-BAB4-101A-B69C-00AA00341D07}') + _idlflags_ = [] + + def __iter__(self): + return self + + def __next__(self): + cp, fetched = self.Next(1) + if fetched == 0: + raise StopIteration + return cp + +class IEnumConnectionPoints(IUnknown): + _iid_ = GUID('{B196B285-BAB4-101A-B69C-00AA00341D07}') + _idlflags_ = [] + + def __iter__(self): + return self + + def __next__(self): + cp, fetched = self.Next(1) + if fetched == 0: + raise StopIteration + return cp + +################################################################ + +IConnectionPointContainer._methods_ = [ + COMMETHOD([], HRESULT, 'EnumConnectionPoints', + ( ['out'], POINTER(POINTER(IEnumConnectionPoints)), 'ppEnum' )), + COMMETHOD([], HRESULT, 'FindConnectionPoint', + ( ['in'], POINTER(_GUID), 'riid' ), + ( ['out'], POINTER(POINTER(IConnectionPoint)), 'ppCP' )), +] + +IConnectionPoint._methods_ = [ + COMMETHOD([], HRESULT, 'GetConnectionInterface', + ( ['out'], POINTER(_GUID), 'pIID' )), + COMMETHOD([], HRESULT, 'GetConnectionPointContainer', + ( ['out'], POINTER(POINTER(IConnectionPointContainer)), 'ppCPC' )), + COMMETHOD([], HRESULT, 'Advise', + ( ['in'], POINTER(IUnknown), 'pUnkSink' ), + ( ['out'], POINTER(c_ulong), 'pdwCookie' )), + COMMETHOD([], HRESULT, 'Unadvise', + ( ['in'], c_ulong, 'dwCookie' )), + COMMETHOD([], HRESULT, 'EnumConnections', + ( ['out'], POINTER(POINTER(IEnumConnections)), 'ppEnum' )), +] + +IEnumConnections._methods_ = [ + COMMETHOD([], HRESULT, 'Next', + ( ['in'], c_ulong, 'cConnections' ), + ( ['out'], POINTER(tagCONNECTDATA), 'rgcd' ), + ( ['out'], POINTER(c_ulong), 'pcFetched' )), + COMMETHOD([], HRESULT, 'Skip', + ( ['in'], c_ulong, 'cConnections' )), + COMMETHOD([], HRESULT, 'Reset'), + COMMETHOD([], HRESULT, 'Clone', + ( ['out'], POINTER(POINTER(IEnumConnections)), 'ppEnum' )), +] + +IEnumConnectionPoints._methods_ = [ + COMMETHOD([], HRESULT, 'Next', + ( ['in'], c_ulong, 'cConnections' ), + ( ['out'], POINTER(POINTER(IConnectionPoint)), 'ppCP' ), + ( ['out'], POINTER(c_ulong), 'pcFetched' )), + COMMETHOD([], HRESULT, 'Skip', + ( ['in'], c_ulong, 'cConnections' )), + COMMETHOD([], HRESULT, 'Reset'), + COMMETHOD([], HRESULT, 'Clone', + ( ['out'], POINTER(POINTER(IEnumConnectionPoints)), 'ppEnum' )), +] diff --git a/venv/Lib/site-packages/comtypes/errorinfo.py b/venv/Lib/site-packages/comtypes/errorinfo.py new file mode 100644 index 00000000..a033e2ff --- /dev/null +++ b/venv/Lib/site-packages/comtypes/errorinfo.py @@ -0,0 +1,105 @@ +import sys +from ctypes import * +from comtypes import IUnknown, HRESULT, COMMETHOD, GUID, BSTR +from comtypes.hresult import * + +LPCOLESTR = c_wchar_p +DWORD = c_ulong + +class ICreateErrorInfo(IUnknown): + _iid_ = GUID("{22F03340-547D-101B-8E65-08002B2BD119}") + _methods_ = [ + COMMETHOD([], HRESULT, 'SetGUID', + (['in'], POINTER(GUID), "rguid")), + COMMETHOD([], HRESULT, 'SetSource', + (['in'], LPCOLESTR, "szSource")), + COMMETHOD([], HRESULT, 'SetDescription', + (['in'], LPCOLESTR, "szDescription")), + COMMETHOD([], HRESULT, 'SetHelpFile', + (['in'], LPCOLESTR, "szHelpFile")), + COMMETHOD([], HRESULT, 'SetHelpContext', + (['in'], DWORD, "dwHelpContext")) + ] + +class IErrorInfo(IUnknown): + _iid_ = GUID("{1CF2B120-547D-101B-8E65-08002B2BD119}") + _methods_ = [ + COMMETHOD([], HRESULT, 'GetGUID', + (['out'], POINTER(GUID), "pGUID")), + COMMETHOD([], HRESULT, 'GetSource', + (['out'], POINTER(BSTR), "pBstrSource")), + COMMETHOD([], HRESULT, 'GetDescription', + (['out'], POINTER(BSTR), "pBstrDescription")), + COMMETHOD([], HRESULT, 'GetHelpFile', + (['out'], POINTER(BSTR), "pBstrHelpFile")), + COMMETHOD([], HRESULT, 'GetHelpContext', + (['out'], POINTER(DWORD), "pdwHelpContext")), + ] + +class ISupportErrorInfo(IUnknown): + _iid_ = GUID("{DF0B3D60-548F-101B-8E65-08002B2BD119}") + _methods_ = [ + COMMETHOD([], HRESULT, 'InterfaceSupportsErrorInfo', + (['in'], POINTER(GUID), 'riid')) + ] + +################################################################ +_oleaut32 = oledll.oleaut32 + +def CreateErrorInfo(): + cei = POINTER(ICreateErrorInfo)() + _oleaut32.CreateErrorInfo(byref(cei)) + return cei + +def GetErrorInfo(): + """Get the error information for the current thread.""" + errinfo = POINTER(IErrorInfo)() + if S_OK == _oleaut32.GetErrorInfo(0, byref(errinfo)): + return errinfo + return None + +def SetErrorInfo(errinfo): + """Set error information for the current thread.""" + return _oleaut32.SetErrorInfo(0, errinfo) + +def ReportError(text, iid, + clsid=None, helpfile=None, helpcontext=0, hresult=DISP_E_EXCEPTION): + """Report a COM error. Returns the passed in hresult value.""" + ei = CreateErrorInfo() + ei.SetDescription(text) + ei.SetGUID(iid) + if helpfile is not None: + ei.SetHelpFile(helpfile) + if helpcontext is not None: + ei.SetHelpContext(helpcontext) + if clsid is not None: + if isinstance(clsid, str): + clsid = GUID(clsid) + try: + progid = clsid.as_progid() + except WindowsError: + pass + else: + ei.SetSource(progid) # progid for the class or application that created the error + _oleaut32.SetErrorInfo(0, ei) + return hresult + +def ReportException(hresult, iid, clsid=None, helpfile=None, helpcontext=None, + stacklevel=None): + """Report a COM exception. Returns the passed in hresult value.""" + typ, value, tb = sys.exc_info() + if stacklevel is not None: + for _ in range(stacklevel): + tb = tb.tb_next + line = tb.tb_frame.f_lineno + name = tb.tb_frame.f_globals["__name__"] + text = "%s: %s (%s, line %d)" % (typ, value, name, line) + else: + text = "%s: %s" % (typ, value) + return ReportError(text, iid, + clsid=clsid, helpfile=helpfile, helpcontext=helpcontext, + hresult=hresult) + +__all__ = ["ICreateErrorInfo", "IErrorInfo", "ISupportErrorInfo", + "ReportError", "ReportException", + "SetErrorInfo", "GetErrorInfo", "CreateErrorInfo"] diff --git a/venv/Lib/site-packages/comtypes/gen/UIAutomationClient.py b/venv/Lib/site-packages/comtypes/gen/UIAutomationClient.py new file mode 100644 index 00000000..9972201f --- /dev/null +++ b/venv/Lib/site-packages/comtypes/gen/UIAutomationClient.py @@ -0,0 +1,3 @@ +from comtypes.gen import _944DE083_8FB8_45CF_BCB7_C477ACB2F897_0_1_0 +globals().update(_944DE083_8FB8_45CF_BCB7_C477ACB2F897_0_1_0.__dict__) +__name__ = 'comtypes.gen.UIAutomationClient' \ No newline at end of file diff --git a/venv/Lib/site-packages/comtypes/gen/_00020430_0000_0000_C000_000000000046_0_2_0.py b/venv/Lib/site-packages/comtypes/gen/_00020430_0000_0000_C000_000000000046_0_2_0.py new file mode 100644 index 00000000..61e154f9 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/gen/_00020430_0000_0000_C000_000000000046_0_2_0.py @@ -0,0 +1,396 @@ +# -*- coding: mbcs -*- +typelib_path = 'C:\\Windows\\System32\\stdole2.tlb' +_lcid = 0 # change this if required +from ctypes import * +from comtypes.automation import IDispatch +from comtypes import GUID +OLE_HANDLE = c_int +OLE_XSIZE_HIMETRIC = c_int +OLE_YSIZE_HIMETRIC = c_int +OLE_XPOS_HIMETRIC = c_int +OLE_YPOS_HIMETRIC = c_int +from comtypes import dispid +from comtypes import DISPMETHOD, DISPPROPERTY, helpstring +from comtypes import BSTR +OLE_COLOR = c_ulong +from comtypes import CoClass +from comtypes import IUnknown +OLE_XPOS_PIXELS = c_int +from ctypes.wintypes import VARIANT_BOOL +FONTITALIC = VARIANT_BOOL +from comtypes import GUID +OLE_YPOS_PIXELS = c_int +OLE_XSIZE_PIXELS = c_int +OLE_YSIZE_PIXELS = c_int +FONTSIZE = c_longlong +from ctypes import HRESULT +from comtypes import helpstring +from comtypes import COMMETHOD +from comtypes.automation import DISPPARAMS +OLE_XPOS_CONTAINER = c_float +from comtypes.automation import IEnumVARIANT +OLE_YPOS_CONTAINER = c_float +OLE_XSIZE_CONTAINER = c_float +OLE_YSIZE_CONTAINER = c_float +OLE_OPTEXCLUSIVE = VARIANT_BOOL +OLE_CANCELBOOL = VARIANT_BOOL +OLE_ENABLEDEFAULTBOOL = VARIANT_BOOL +from comtypes.automation import EXCEPINFO +FONTUNDERSCORE = VARIANT_BOOL +FONTBOLD = VARIANT_BOOL +FONTSTRIKETHROUGH = VARIANT_BOOL +FONTNAME = BSTR + + +class Picture(IDispatch): + _case_insensitive_ = True + _iid_ = GUID('{7BF80981-BF32-101A-8BBB-00AA00300CAB}') + _idlflags_ = [] + _methods_ = [] +Picture._disp_methods_ = [ + DISPPROPERTY([dispid(0), 'readonly'], OLE_HANDLE, 'Handle'), + DISPPROPERTY([dispid(2)], OLE_HANDLE, 'hPal'), + DISPPROPERTY([dispid(3), 'readonly'], c_short, 'Type'), + DISPPROPERTY([dispid(4), 'readonly'], OLE_XSIZE_HIMETRIC, 'Width'), + DISPPROPERTY([dispid(5), 'readonly'], OLE_YSIZE_HIMETRIC, 'Height'), + DISPMETHOD([dispid(6)], None, 'Render', + ( [], c_int, 'hdc' ), + ( [], c_int, 'x' ), + ( [], c_int, 'y' ), + ( [], c_int, 'cx' ), + ( [], c_int, 'cy' ), + ( [], OLE_XPOS_HIMETRIC, 'xSrc' ), + ( [], OLE_YPOS_HIMETRIC, 'ySrc' ), + ( [], OLE_XSIZE_HIMETRIC, 'cxSrc' ), + ( [], OLE_YSIZE_HIMETRIC, 'cySrc' ), + ( [], c_void_p, 'prcWBounds' )), +] +IPictureDisp = Picture +class FontEvents(IDispatch): + _case_insensitive_ = True + 'Event interface for the Font object' + _iid_ = GUID('{4EF6100A-AF88-11D0-9846-00C04FC29993}') + _idlflags_ = ['hidden'] + _methods_ = [] +FontEvents._disp_methods_ = [ + DISPMETHOD([dispid(9)], None, 'FontChanged', + ( ['in'], BSTR, 'PropertyName' )), +] +class StdPicture(CoClass): + _reg_clsid_ = GUID('{0BE35204-8F91-11CE-9DE3-00AA004BB851}') + _idlflags_ = [] + _typelib_path_ = typelib_path + _reg_typelib_ = ('{00020430-0000-0000-C000-000000000046}', 2, 0) +class IPicture(IUnknown): + _case_insensitive_ = True + 'Picture Object' + _iid_ = GUID('{7BF80980-BF32-101A-8BBB-00AA00300CAB}') + _idlflags_ = ['hidden'] +StdPicture._com_interfaces_ = [Picture, IPicture] + + +# values for enumeration 'OLE_TRISTATE' +Unchecked = 0 +Checked = 1 +Gray = 2 +OLE_TRISTATE = c_int # enum +IFontEventsDisp = FontEvents +class Library(object): + 'OLE Automation' + name = 'stdole' + _reg_typelib_ = ('{00020430-0000-0000-C000-000000000046}', 2, 0) + +class StdFont(CoClass): + _reg_clsid_ = GUID('{0BE35203-8F91-11CE-9DE3-00AA004BB851}') + _idlflags_ = [] + _typelib_path_ = typelib_path + _reg_typelib_ = ('{00020430-0000-0000-C000-000000000046}', 2, 0) +class Font(IDispatch): + _case_insensitive_ = True + _iid_ = GUID('{BEF6E003-A874-101A-8BBA-00AA00300CAB}') + _idlflags_ = [] + _methods_ = [] +class IFont(IUnknown): + _case_insensitive_ = True + 'Font Object' + _iid_ = GUID('{BEF6E002-A874-101A-8BBA-00AA00300CAB}') + _idlflags_ = ['hidden'] +StdFont._com_interfaces_ = [Font, IFont] +StdFont._outgoing_interfaces_ = [FontEvents] + +IPicture._methods_ = [ + COMMETHOD(['propget'], HRESULT, 'Handle', + ( ['out', 'retval'], POINTER(OLE_HANDLE), 'phandle' )), + COMMETHOD(['propget'], HRESULT, 'hPal', + ( ['out', 'retval'], POINTER(OLE_HANDLE), 'phpal' )), + COMMETHOD(['propget'], HRESULT, 'Type', + ( ['out', 'retval'], POINTER(c_short), 'ptype' )), + COMMETHOD(['propget'], HRESULT, 'Width', + ( ['out', 'retval'], POINTER(OLE_XSIZE_HIMETRIC), 'pwidth' )), + COMMETHOD(['propget'], HRESULT, 'Height', + ( ['out', 'retval'], POINTER(OLE_YSIZE_HIMETRIC), 'pheight' )), + COMMETHOD([], HRESULT, 'Render', + ( ['in'], c_int, 'hdc' ), + ( ['in'], c_int, 'x' ), + ( ['in'], c_int, 'y' ), + ( ['in'], c_int, 'cx' ), + ( ['in'], c_int, 'cy' ), + ( ['in'], OLE_XPOS_HIMETRIC, 'xSrc' ), + ( ['in'], OLE_YPOS_HIMETRIC, 'ySrc' ), + ( ['in'], OLE_XSIZE_HIMETRIC, 'cxSrc' ), + ( ['in'], OLE_YSIZE_HIMETRIC, 'cySrc' ), + ( ['in'], c_void_p, 'prcWBounds' )), + COMMETHOD(['propput'], HRESULT, 'hPal', + ( ['in'], OLE_HANDLE, 'phpal' )), + COMMETHOD(['propget'], HRESULT, 'CurDC', + ( ['out', 'retval'], POINTER(c_int), 'phdcOut' )), + COMMETHOD([], HRESULT, 'SelectPicture', + ( ['in'], c_int, 'hdcIn' ), + ( ['out'], POINTER(c_int), 'phdcOut' ), + ( ['out'], POINTER(OLE_HANDLE), 'phbmpOut' )), + COMMETHOD(['propget'], HRESULT, 'KeepOriginalFormat', + ( ['out', 'retval'], POINTER(VARIANT_BOOL), 'pfkeep' )), + COMMETHOD(['propput'], HRESULT, 'KeepOriginalFormat', + ( ['in'], VARIANT_BOOL, 'pfkeep' )), + COMMETHOD([], HRESULT, 'PictureChanged'), + COMMETHOD([], HRESULT, 'SaveAsFile', + ( ['in'], c_void_p, 'pstm' ), + ( ['in'], VARIANT_BOOL, 'fSaveMemCopy' ), + ( ['out'], POINTER(c_int), 'pcbSize' )), + COMMETHOD(['propget'], HRESULT, 'Attributes', + ( ['out', 'retval'], POINTER(c_int), 'pdwAttr' )), + COMMETHOD([], HRESULT, 'SetHdc', + ( ['in'], OLE_HANDLE, 'hdc' )), +] +################################################################ +## code template for IPicture implementation +##class IPicture_Impl(object): +## @property +## def Handle(self): +## '-no docstring-' +## #return phandle +## +## def _get(self): +## '-no docstring-' +## #return phpal +## def _set(self, phpal): +## '-no docstring-' +## hPal = property(_get, _set, doc = _set.__doc__) +## +## @property +## def Type(self): +## '-no docstring-' +## #return ptype +## +## @property +## def Width(self): +## '-no docstring-' +## #return pwidth +## +## @property +## def Height(self): +## '-no docstring-' +## #return pheight +## +## def Render(self, hdc, x, y, cx, cy, xSrc, ySrc, cxSrc, cySrc, prcWBounds): +## '-no docstring-' +## #return +## +## @property +## def CurDC(self): +## '-no docstring-' +## #return phdcOut +## +## def SelectPicture(self, hdcIn): +## '-no docstring-' +## #return phdcOut, phbmpOut +## +## def _get(self): +## '-no docstring-' +## #return pfkeep +## def _set(self, pfkeep): +## '-no docstring-' +## KeepOriginalFormat = property(_get, _set, doc = _set.__doc__) +## +## def PictureChanged(self): +## '-no docstring-' +## #return +## +## def SaveAsFile(self, pstm, fSaveMemCopy): +## '-no docstring-' +## #return pcbSize +## +## @property +## def Attributes(self): +## '-no docstring-' +## #return pdwAttr +## +## def SetHdc(self, hdc): +## '-no docstring-' +## #return +## + +IFont._methods_ = [ + COMMETHOD(['propget'], HRESULT, 'Name', + ( ['out', 'retval'], POINTER(BSTR), 'pname' )), + COMMETHOD(['propput'], HRESULT, 'Name', + ( ['in'], BSTR, 'pname' )), + COMMETHOD(['propget'], HRESULT, 'Size', + ( ['out', 'retval'], POINTER(c_longlong), 'psize' )), + COMMETHOD(['propput'], HRESULT, 'Size', + ( ['in'], c_longlong, 'psize' )), + COMMETHOD(['propget'], HRESULT, 'Bold', + ( ['out', 'retval'], POINTER(VARIANT_BOOL), 'pbold' )), + COMMETHOD(['propput'], HRESULT, 'Bold', + ( ['in'], VARIANT_BOOL, 'pbold' )), + COMMETHOD(['propget'], HRESULT, 'Italic', + ( ['out', 'retval'], POINTER(VARIANT_BOOL), 'pitalic' )), + COMMETHOD(['propput'], HRESULT, 'Italic', + ( ['in'], VARIANT_BOOL, 'pitalic' )), + COMMETHOD(['propget'], HRESULT, 'Underline', + ( ['out', 'retval'], POINTER(VARIANT_BOOL), 'punderline' )), + COMMETHOD(['propput'], HRESULT, 'Underline', + ( ['in'], VARIANT_BOOL, 'punderline' )), + COMMETHOD(['propget'], HRESULT, 'Strikethrough', + ( ['out', 'retval'], POINTER(VARIANT_BOOL), 'pstrikethrough' )), + COMMETHOD(['propput'], HRESULT, 'Strikethrough', + ( ['in'], VARIANT_BOOL, 'pstrikethrough' )), + COMMETHOD(['propget'], HRESULT, 'Weight', + ( ['out', 'retval'], POINTER(c_short), 'pweight' )), + COMMETHOD(['propput'], HRESULT, 'Weight', + ( ['in'], c_short, 'pweight' )), + COMMETHOD(['propget'], HRESULT, 'Charset', + ( ['out', 'retval'], POINTER(c_short), 'pcharset' )), + COMMETHOD(['propput'], HRESULT, 'Charset', + ( ['in'], c_short, 'pcharset' )), + COMMETHOD(['propget'], HRESULT, 'hFont', + ( ['out', 'retval'], POINTER(OLE_HANDLE), 'phfont' )), + COMMETHOD([], HRESULT, 'Clone', + ( ['out'], POINTER(POINTER(IFont)), 'ppfont' )), + COMMETHOD([], HRESULT, 'IsEqual', + ( ['in'], POINTER(IFont), 'pfontOther' )), + COMMETHOD([], HRESULT, 'SetRatio', + ( ['in'], c_int, 'cyLogical' ), + ( ['in'], c_int, 'cyHimetric' )), + COMMETHOD([], HRESULT, 'AddRefHfont', + ( ['in'], OLE_HANDLE, 'hFont' )), + COMMETHOD([], HRESULT, 'ReleaseHfont', + ( ['in'], OLE_HANDLE, 'hFont' )), +] +################################################################ +## code template for IFont implementation +##class IFont_Impl(object): +## def _get(self): +## '-no docstring-' +## #return pname +## def _set(self, pname): +## '-no docstring-' +## Name = property(_get, _set, doc = _set.__doc__) +## +## def _get(self): +## '-no docstring-' +## #return psize +## def _set(self, psize): +## '-no docstring-' +## Size = property(_get, _set, doc = _set.__doc__) +## +## def _get(self): +## '-no docstring-' +## #return pbold +## def _set(self, pbold): +## '-no docstring-' +## Bold = property(_get, _set, doc = _set.__doc__) +## +## def _get(self): +## '-no docstring-' +## #return pitalic +## def _set(self, pitalic): +## '-no docstring-' +## Italic = property(_get, _set, doc = _set.__doc__) +## +## def _get(self): +## '-no docstring-' +## #return punderline +## def _set(self, punderline): +## '-no docstring-' +## Underline = property(_get, _set, doc = _set.__doc__) +## +## def _get(self): +## '-no docstring-' +## #return pstrikethrough +## def _set(self, pstrikethrough): +## '-no docstring-' +## Strikethrough = property(_get, _set, doc = _set.__doc__) +## +## def _get(self): +## '-no docstring-' +## #return pweight +## def _set(self, pweight): +## '-no docstring-' +## Weight = property(_get, _set, doc = _set.__doc__) +## +## def _get(self): +## '-no docstring-' +## #return pcharset +## def _set(self, pcharset): +## '-no docstring-' +## Charset = property(_get, _set, doc = _set.__doc__) +## +## @property +## def hFont(self): +## '-no docstring-' +## #return phfont +## +## def Clone(self): +## '-no docstring-' +## #return ppfont +## +## def IsEqual(self, pfontOther): +## '-no docstring-' +## #return +## +## def SetRatio(self, cyLogical, cyHimetric): +## '-no docstring-' +## #return +## +## def AddRefHfont(self, hFont): +## '-no docstring-' +## #return +## +## def ReleaseHfont(self, hFont): +## '-no docstring-' +## #return +## + +Font._disp_methods_ = [ + DISPPROPERTY([dispid(0)], BSTR, 'Name'), + DISPPROPERTY([dispid(2)], c_longlong, 'Size'), + DISPPROPERTY([dispid(3)], VARIANT_BOOL, 'Bold'), + DISPPROPERTY([dispid(4)], VARIANT_BOOL, 'Italic'), + DISPPROPERTY([dispid(5)], VARIANT_BOOL, 'Underline'), + DISPPROPERTY([dispid(6)], VARIANT_BOOL, 'Strikethrough'), + DISPPROPERTY([dispid(7)], c_short, 'Weight'), + DISPPROPERTY([dispid(8)], c_short, 'Charset'), +] +IFontDisp = Font + +# values for enumeration 'LoadPictureConstants' +Default = 0 +Monochrome = 1 +VgaColor = 2 +Color = 4 +LoadPictureConstants = c_int # enum +__all__ = [ 'OLE_OPTEXCLUSIVE', 'FONTSTRIKETHROUGH', 'FONTUNDERSCORE', + 'FONTSIZE', 'IFontEventsDisp', 'OLE_YSIZE_CONTAINER', + 'OLE_YSIZE_PIXELS', 'OLE_YPOS_HIMETRIC', 'IPictureDisp', + 'OLE_COLOR', 'FONTNAME', 'Gray', 'FONTBOLD', 'IPicture', + 'Picture', 'Default', 'IFontDisp', 'OLE_HANDLE', 'IFont', + 'OLE_XSIZE_PIXELS', 'Font', 'OLE_XSIZE_HIMETRIC', 'Color', + 'OLE_XPOS_PIXELS', 'VgaColor', 'FontEvents', 'FONTITALIC', + 'LoadPictureConstants', 'OLE_XPOS_HIMETRIC', 'Monochrome', + 'Unchecked', 'OLE_YPOS_PIXELS', 'Checked', + 'OLE_ENABLEDEFAULTBOOL', 'OLE_YPOS_CONTAINER', + 'OLE_YSIZE_HIMETRIC', 'StdFont', 'StdPicture', + 'OLE_XPOS_CONTAINER', 'OLE_TRISTATE', 'OLE_CANCELBOOL', + 'OLE_XSIZE_CONTAINER'] +from comtypes import _check_version; _check_version('1.1.10', 1575709685.550032) diff --git a/venv/Lib/site-packages/comtypes/gen/_944DE083_8FB8_45CF_BCB7_C477ACB2F897_0_1_0.py b/venv/Lib/site-packages/comtypes/gen/_944DE083_8FB8_45CF_BCB7_C477ACB2F897_0_1_0.py new file mode 100644 index 00000000..57fccd04 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/gen/_944DE083_8FB8_45CF_BCB7_C477ACB2F897_0_1_0.py @@ -0,0 +1,5566 @@ +# -*- coding: mbcs -*- +typelib_path = 'UIAutomationCore.dll' +_lcid = 0 # change this if required +from ctypes import * +import comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0 +from comtypes import GUID +from comtypes.automation import IDispatch +from ctypes import HRESULT +from comtypes.automation import VARIANT +from comtypes import BSTR +from comtypes import helpstring +from comtypes import COMMETHOD +from comtypes import dispid +from comtypes.automation import _midlSAFEARRAY +WSTRING = c_wchar_p +from comtypes import CoClass +from ctypes.wintypes import tagRECT +from ctypes.wintypes import tagPOINT +from comtypes import IUnknown + + + +# values for enumeration 'ProviderOptions' +ProviderOptions_ClientSideProvider = 1 +ProviderOptions_ServerSideProvider = 2 +ProviderOptions_NonClientAreaProvider = 4 +ProviderOptions_OverrideProvider = 8 +ProviderOptions_ProviderOwnsSetFocus = 16 +ProviderOptions_UseComThreading = 32 +ProviderOptions_RefuseNonClientSupport = 64 +ProviderOptions_HasNativeIAccessible = 128 +ProviderOptions_UseClientCoordinates = 256 +ProviderOptions = c_int # enum +class IAccessible(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IDispatch): + _case_insensitive_ = True + _iid_ = GUID('{618736E0-3C3D-11CF-810C-00AA00389B71}') + _idlflags_ = ['hidden', 'dual', 'oleautomation'] +IAccessible._methods_ = [ + COMMETHOD([dispid(-5000), 'hidden', 'propget'], HRESULT, 'accParent', + ( ['out', 'retval'], POINTER(POINTER(IDispatch)), 'ppdispParent' )), + COMMETHOD([dispid(-5001), 'hidden', 'propget'], HRESULT, 'accChildCount', + ( ['out', 'retval'], POINTER(c_int), 'pcountChildren' )), + COMMETHOD([dispid(-5002), 'hidden', 'propget'], HRESULT, 'accChild', + ( ['in'], VARIANT, 'varChild' ), + ( ['out', 'retval'], POINTER(POINTER(IDispatch)), 'ppdispChild' )), + COMMETHOD([dispid(-5003), 'hidden', 'propget'], HRESULT, 'accName', + ( ['in', 'optional'], VARIANT, 'varChild' ), + ( ['out', 'retval'], POINTER(BSTR), 'pszName' )), + COMMETHOD([dispid(-5004), 'hidden', 'propget'], HRESULT, 'accValue', + ( ['in', 'optional'], VARIANT, 'varChild' ), + ( ['out', 'retval'], POINTER(BSTR), 'pszValue' )), + COMMETHOD([dispid(-5005), 'hidden', 'propget'], HRESULT, 'accDescription', + ( ['in', 'optional'], VARIANT, 'varChild' ), + ( ['out', 'retval'], POINTER(BSTR), 'pszDescription' )), + COMMETHOD([dispid(-5006), 'hidden', 'propget'], HRESULT, 'accRole', + ( ['in', 'optional'], VARIANT, 'varChild' ), + ( ['out', 'retval'], POINTER(VARIANT), 'pvarRole' )), + COMMETHOD([dispid(-5007), 'hidden', 'propget'], HRESULT, 'accState', + ( ['in', 'optional'], VARIANT, 'varChild' ), + ( ['out', 'retval'], POINTER(VARIANT), 'pvarState' )), + COMMETHOD([dispid(-5008), 'hidden', 'propget'], HRESULT, 'accHelp', + ( ['in', 'optional'], VARIANT, 'varChild' ), + ( ['out', 'retval'], POINTER(BSTR), 'pszHelp' )), + COMMETHOD([dispid(-5009), 'hidden', 'propget'], HRESULT, 'accHelpTopic', + ( ['out'], POINTER(BSTR), 'pszHelpFile' ), + ( ['in', 'optional'], VARIANT, 'varChild' ), + ( ['out', 'retval'], POINTER(c_int), 'pidTopic' )), + COMMETHOD([dispid(-5010), 'hidden', 'propget'], HRESULT, 'accKeyboardShortcut', + ( ['in', 'optional'], VARIANT, 'varChild' ), + ( ['out', 'retval'], POINTER(BSTR), 'pszKeyboardShortcut' )), + COMMETHOD([dispid(-5011), 'hidden', 'propget'], HRESULT, 'accFocus', + ( ['out', 'retval'], POINTER(VARIANT), 'pvarChild' )), + COMMETHOD([dispid(-5012), 'hidden', 'propget'], HRESULT, 'accSelection', + ( ['out', 'retval'], POINTER(VARIANT), 'pvarChildren' )), + COMMETHOD([dispid(-5013), 'hidden', 'propget'], HRESULT, 'accDefaultAction', + ( ['in', 'optional'], VARIANT, 'varChild' ), + ( ['out', 'retval'], POINTER(BSTR), 'pszDefaultAction' )), + COMMETHOD([dispid(-5014), 'hidden'], HRESULT, 'accSelect', + ( ['in'], c_int, 'flagsSelect' ), + ( ['in', 'optional'], VARIANT, 'varChild' )), + COMMETHOD([dispid(-5015), 'hidden'], HRESULT, 'accLocation', + ( ['out'], POINTER(c_int), 'pxLeft' ), + ( ['out'], POINTER(c_int), 'pyTop' ), + ( ['out'], POINTER(c_int), 'pcxWidth' ), + ( ['out'], POINTER(c_int), 'pcyHeight' ), + ( ['in', 'optional'], VARIANT, 'varChild' )), + COMMETHOD([dispid(-5016), 'hidden'], HRESULT, 'accNavigate', + ( ['in'], c_int, 'navDir' ), + ( ['in', 'optional'], VARIANT, 'varStart' ), + ( ['out', 'retval'], POINTER(VARIANT), 'pvarEndUpAt' )), + COMMETHOD([dispid(-5017), 'hidden'], HRESULT, 'accHitTest', + ( ['in'], c_int, 'xLeft' ), + ( ['in'], c_int, 'yTop' ), + ( ['out', 'retval'], POINTER(VARIANT), 'pvarChild' )), + COMMETHOD([dispid(-5018), 'hidden'], HRESULT, 'accDoDefaultAction', + ( ['in', 'optional'], VARIANT, 'varChild' )), + COMMETHOD([dispid(-5003), 'hidden', 'propput'], HRESULT, 'accName', + ( ['in', 'optional'], VARIANT, 'varChild' ), + ( ['in'], BSTR, 'pszName' )), + COMMETHOD([dispid(-5004), 'hidden', 'propput'], HRESULT, 'accValue', + ( ['in', 'optional'], VARIANT, 'varChild' ), + ( ['in'], BSTR, 'pszValue' )), +] +################################################################ +## code template for IAccessible implementation +##class IAccessible_Impl(object): +## @property +## def accParent(self): +## '-no docstring-' +## #return ppdispParent +## +## @property +## def accChildCount(self): +## '-no docstring-' +## #return pcountChildren +## +## @property +## def accChild(self, varChild): +## '-no docstring-' +## #return ppdispChild +## +## def _get(self, varChild): +## '-no docstring-' +## #return pszName +## def _set(self, varChild, pszName): +## '-no docstring-' +## accName = property(_get, _set, doc = _set.__doc__) +## +## def _get(self, varChild): +## '-no docstring-' +## #return pszValue +## def _set(self, varChild, pszValue): +## '-no docstring-' +## accValue = property(_get, _set, doc = _set.__doc__) +## +## @property +## def accDescription(self, varChild): +## '-no docstring-' +## #return pszDescription +## +## @property +## def accRole(self, varChild): +## '-no docstring-' +## #return pvarRole +## +## @property +## def accState(self, varChild): +## '-no docstring-' +## #return pvarState +## +## @property +## def accHelp(self, varChild): +## '-no docstring-' +## #return pszHelp +## +## @property +## def accHelpTopic(self, varChild): +## '-no docstring-' +## #return pszHelpFile, pidTopic +## +## @property +## def accKeyboardShortcut(self, varChild): +## '-no docstring-' +## #return pszKeyboardShortcut +## +## @property +## def accFocus(self): +## '-no docstring-' +## #return pvarChild +## +## @property +## def accSelection(self): +## '-no docstring-' +## #return pvarChildren +## +## @property +## def accDefaultAction(self, varChild): +## '-no docstring-' +## #return pszDefaultAction +## +## def accSelect(self, flagsSelect, varChild): +## '-no docstring-' +## #return +## +## def accLocation(self, varChild): +## '-no docstring-' +## #return pxLeft, pyTop, pcxWidth, pcyHeight +## +## def accNavigate(self, navDir, varStart): +## '-no docstring-' +## #return pvarEndUpAt +## +## def accHitTest(self, xLeft, yTop): +## '-no docstring-' +## #return pvarChild +## +## def accDoDefaultAction(self, varChild): +## '-no docstring-' +## #return +## + +class IUIAutomationEventHandler(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{146C3C17-F12E-4E22-8C27-F894B9B79C69}') + _idlflags_ = ['oleautomation'] +class IUIAutomationElement(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{D22108AA-8AC5-49A5-837B-37BBB3D7591E}') + _idlflags_ = [] +IUIAutomationEventHandler._methods_ = [ + COMMETHOD([], HRESULT, 'HandleAutomationEvent', + ( ['in'], POINTER(IUIAutomationElement), 'sender' ), + ( ['in'], c_int, 'eventId' )), +] +################################################################ +## code template for IUIAutomationEventHandler implementation +##class IUIAutomationEventHandler_Impl(object): +## def HandleAutomationEvent(self, sender, eventId): +## '-no docstring-' +## #return +## + +class IUIAutomationPropertyChangedEventHandler(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{40CD37D4-C756-4B0C-8C6F-BDDFEEB13B50}') + _idlflags_ = ['oleautomation'] +IUIAutomationPropertyChangedEventHandler._methods_ = [ + COMMETHOD([], HRESULT, 'HandlePropertyChangedEvent', + ( ['in'], POINTER(IUIAutomationElement), 'sender' ), + ( ['in'], c_int, 'propertyId' ), + ( ['in'], VARIANT, 'newValue' )), +] +################################################################ +## code template for IUIAutomationPropertyChangedEventHandler implementation +##class IUIAutomationPropertyChangedEventHandler_Impl(object): +## def HandlePropertyChangedEvent(self, sender, propertyId, newValue): +## '-no docstring-' +## #return +## + +class IUIAutomationProxyFactoryEntry(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{D50E472E-B64B-490C-BCA1-D30696F9F289}') + _idlflags_ = [] +class IUIAutomationProxyFactory(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{85B94ECD-849D-42B6-B94D-D6DB23FDF5A4}') + _idlflags_ = [] +IUIAutomationProxyFactoryEntry._methods_ = [ + COMMETHOD(['propget'], HRESULT, 'ProxyFactory', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationProxyFactory)), 'factory' )), + COMMETHOD(['propget'], HRESULT, 'ClassName', + ( ['out', 'retval'], POINTER(BSTR), 'ClassName' )), + COMMETHOD(['propget'], HRESULT, 'ImageName', + ( ['out', 'retval'], POINTER(BSTR), 'ImageName' )), + COMMETHOD(['propget'], HRESULT, 'AllowSubstringMatch', + ( ['out', 'retval'], POINTER(c_int), 'AllowSubstringMatch' )), + COMMETHOD(['propget'], HRESULT, 'CanCheckBaseClass', + ( ['out', 'retval'], POINTER(c_int), 'CanCheckBaseClass' )), + COMMETHOD(['propget'], HRESULT, 'NeedsAdviseEvents', + ( ['out', 'retval'], POINTER(c_int), 'adviseEvents' )), + COMMETHOD(['propput'], HRESULT, 'ClassName', + ( ['in'], WSTRING, 'ClassName' )), + COMMETHOD(['propput'], HRESULT, 'ImageName', + ( ['in'], WSTRING, 'ImageName' )), + COMMETHOD(['propput'], HRESULT, 'AllowSubstringMatch', + ( ['in'], c_int, 'AllowSubstringMatch' )), + COMMETHOD(['propput'], HRESULT, 'CanCheckBaseClass', + ( ['in'], c_int, 'CanCheckBaseClass' )), + COMMETHOD(['propput'], HRESULT, 'NeedsAdviseEvents', + ( ['in'], c_int, 'adviseEvents' )), + COMMETHOD([], HRESULT, 'SetWinEventsForAutomationEvent', + ( ['in'], c_int, 'eventId' ), + ( ['in'], c_int, 'propertyId' ), + ( ['in'], _midlSAFEARRAY(c_uint), 'winEvents' )), + COMMETHOD([], HRESULT, 'GetWinEventsForAutomationEvent', + ( ['in'], c_int, 'eventId' ), + ( ['in'], c_int, 'propertyId' ), + ( ['out', 'retval'], POINTER(_midlSAFEARRAY(c_uint)), 'winEvents' )), +] +################################################################ +## code template for IUIAutomationProxyFactoryEntry implementation +##class IUIAutomationProxyFactoryEntry_Impl(object): +## @property +## def ProxyFactory(self): +## '-no docstring-' +## #return factory +## +## def _get(self): +## '-no docstring-' +## #return ClassName +## def _set(self, ClassName): +## '-no docstring-' +## ClassName = property(_get, _set, doc = _set.__doc__) +## +## def _get(self): +## '-no docstring-' +## #return ImageName +## def _set(self, ImageName): +## '-no docstring-' +## ImageName = property(_get, _set, doc = _set.__doc__) +## +## def _get(self): +## '-no docstring-' +## #return AllowSubstringMatch +## def _set(self, AllowSubstringMatch): +## '-no docstring-' +## AllowSubstringMatch = property(_get, _set, doc = _set.__doc__) +## +## def _get(self): +## '-no docstring-' +## #return CanCheckBaseClass +## def _set(self, CanCheckBaseClass): +## '-no docstring-' +## CanCheckBaseClass = property(_get, _set, doc = _set.__doc__) +## +## def _get(self): +## '-no docstring-' +## #return adviseEvents +## def _set(self, adviseEvents): +## '-no docstring-' +## NeedsAdviseEvents = property(_get, _set, doc = _set.__doc__) +## +## def SetWinEventsForAutomationEvent(self, eventId, propertyId, winEvents): +## '-no docstring-' +## #return +## +## def GetWinEventsForAutomationEvent(self, eventId, propertyId): +## '-no docstring-' +## #return winEvents +## + +class IUIAutomationStructureChangedEventHandler(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{E81D1B4E-11C5-42F8-9754-E7036C79F054}') + _idlflags_ = ['oleautomation'] + +# values for enumeration 'StructureChangeType' +StructureChangeType_ChildAdded = 0 +StructureChangeType_ChildRemoved = 1 +StructureChangeType_ChildrenInvalidated = 2 +StructureChangeType_ChildrenBulkAdded = 3 +StructureChangeType_ChildrenBulkRemoved = 4 +StructureChangeType_ChildrenReordered = 5 +StructureChangeType = c_int # enum +IUIAutomationStructureChangedEventHandler._methods_ = [ + COMMETHOD([], HRESULT, 'HandleStructureChangedEvent', + ( ['in'], POINTER(IUIAutomationElement), 'sender' ), + ( ['in'], StructureChangeType, 'changeType' ), + ( ['in'], _midlSAFEARRAY(c_int), 'runtimeId' )), +] +################################################################ +## code template for IUIAutomationStructureChangedEventHandler implementation +##class IUIAutomationStructureChangedEventHandler_Impl(object): +## def HandleStructureChangedEvent(self, sender, changeType, runtimeId): +## '-no docstring-' +## #return +## + +class IUIAutomationSpreadsheetItemPattern(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{7D4FB86C-8D34-40E1-8E83-62C15204E335}') + _idlflags_ = [] +class IUIAutomationElementArray(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{14314595-B4BC-4055-95F2-58F2E42C9855}') + _idlflags_ = [] +IUIAutomationSpreadsheetItemPattern._methods_ = [ + COMMETHOD(['propget'], HRESULT, 'CurrentFormula', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD([], HRESULT, 'GetCurrentAnnotationObjects', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElementArray)), 'retVal' )), + COMMETHOD([], HRESULT, 'GetCurrentAnnotationTypes', + ( ['out', 'retval'], POINTER(_midlSAFEARRAY(c_int)), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedFormula', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD([], HRESULT, 'GetCachedAnnotationObjects', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElementArray)), 'retVal' )), + COMMETHOD([], HRESULT, 'GetCachedAnnotationTypes', + ( ['out', 'retval'], POINTER(_midlSAFEARRAY(c_int)), 'retVal' )), +] +################################################################ +## code template for IUIAutomationSpreadsheetItemPattern implementation +##class IUIAutomationSpreadsheetItemPattern_Impl(object): +## @property +## def CurrentFormula(self): +## '-no docstring-' +## #return retVal +## +## def GetCurrentAnnotationObjects(self): +## '-no docstring-' +## #return retVal +## +## def GetCurrentAnnotationTypes(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedFormula(self): +## '-no docstring-' +## #return retVal +## +## def GetCachedAnnotationObjects(self): +## '-no docstring-' +## #return retVal +## +## def GetCachedAnnotationTypes(self): +## '-no docstring-' +## #return retVal +## + +class IUIAutomationFocusChangedEventHandler(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{C270F6B5-5C69-4290-9745-7A7F97169468}') + _idlflags_ = ['oleautomation'] +IUIAutomationFocusChangedEventHandler._methods_ = [ + COMMETHOD([], HRESULT, 'HandleFocusChangedEvent', + ( ['in'], POINTER(IUIAutomationElement), 'sender' )), +] +################################################################ +## code template for IUIAutomationFocusChangedEventHandler implementation +##class IUIAutomationFocusChangedEventHandler_Impl(object): +## def HandleFocusChangedEvent(self, sender): +## '-no docstring-' +## #return +## + +class IUIAutomationTextEditTextChangedEventHandler(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{92FAA680-E704-4156-931A-E32D5BB38F3F}') + _idlflags_ = ['oleautomation'] + +# values for enumeration 'TextEditChangeType' +TextEditChangeType_None = 0 +TextEditChangeType_AutoCorrect = 1 +TextEditChangeType_Composition = 2 +TextEditChangeType_CompositionFinalized = 3 +TextEditChangeType_AutoComplete = 4 +TextEditChangeType = c_int # enum +IUIAutomationTextEditTextChangedEventHandler._methods_ = [ + COMMETHOD([], HRESULT, 'HandleTextEditTextChangedEvent', + ( ['in'], POINTER(IUIAutomationElement), 'sender' ), + ( ['in'], TextEditChangeType, 'TextEditChangeType' ), + ( ['in'], _midlSAFEARRAY(BSTR), 'eventStrings' )), +] +################################################################ +## code template for IUIAutomationTextEditTextChangedEventHandler implementation +##class IUIAutomationTextEditTextChangedEventHandler_Impl(object): +## def HandleTextEditTextChangedEvent(self, sender, TextEditChangeType, eventStrings): +## '-no docstring-' +## #return +## + +class IUIAutomationProxyFactoryMapping(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{09E31E18-872D-4873-93D1-1E541EC133FD}') + _idlflags_ = [] +IUIAutomationProxyFactoryMapping._methods_ = [ + COMMETHOD(['propget'], HRESULT, 'count', + ( ['out', 'retval'], POINTER(c_uint), 'count' )), + COMMETHOD([], HRESULT, 'GetTable', + ( ['out', 'retval'], POINTER(_midlSAFEARRAY(POINTER(IUIAutomationProxyFactoryEntry))), 'table' )), + COMMETHOD([], HRESULT, 'GetEntry', + ( ['in'], c_uint, 'index' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationProxyFactoryEntry)), 'entry' )), + COMMETHOD([], HRESULT, 'SetTable', + ( ['in'], _midlSAFEARRAY(POINTER(IUIAutomationProxyFactoryEntry)), 'factoryList' )), + COMMETHOD([], HRESULT, 'InsertEntries', + ( ['in'], c_uint, 'before' ), + ( ['in'], _midlSAFEARRAY(POINTER(IUIAutomationProxyFactoryEntry)), 'factoryList' )), + COMMETHOD([], HRESULT, 'InsertEntry', + ( ['in'], c_uint, 'before' ), + ( ['in'], POINTER(IUIAutomationProxyFactoryEntry), 'factory' )), + COMMETHOD([], HRESULT, 'RemoveEntry', + ( ['in'], c_uint, 'index' )), + COMMETHOD([], HRESULT, 'ClearTable'), + COMMETHOD([], HRESULT, 'RestoreDefaultTable'), +] +################################################################ +## code template for IUIAutomationProxyFactoryMapping implementation +##class IUIAutomationProxyFactoryMapping_Impl(object): +## @property +## def count(self): +## '-no docstring-' +## #return count +## +## def GetTable(self): +## '-no docstring-' +## #return table +## +## def GetEntry(self, index): +## '-no docstring-' +## #return entry +## +## def SetTable(self, factoryList): +## '-no docstring-' +## #return +## +## def InsertEntries(self, before, factoryList): +## '-no docstring-' +## #return +## +## def InsertEntry(self, before, factory): +## '-no docstring-' +## #return +## +## def RemoveEntry(self, index): +## '-no docstring-' +## #return +## +## def ClearTable(self): +## '-no docstring-' +## #return +## +## def RestoreDefaultTable(self): +## '-no docstring-' +## #return +## + +class IUIAutomationCondition(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{352FFBA8-0973-437C-A61F-F64CAFD81DF9}') + _idlflags_ = [] +class IUIAutomationNotCondition(IUIAutomationCondition): + _case_insensitive_ = True + _iid_ = GUID('{F528B657-847B-498C-8896-D52B565407A1}') + _idlflags_ = [] +IUIAutomationCondition._methods_ = [ +] +################################################################ +## code template for IUIAutomationCondition implementation +##class IUIAutomationCondition_Impl(object): + +IUIAutomationNotCondition._methods_ = [ + COMMETHOD([], HRESULT, 'GetChild', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationCondition)), 'condition' )), +] +################################################################ +## code template for IUIAutomationNotCondition implementation +##class IUIAutomationNotCondition_Impl(object): +## def GetChild(self): +## '-no docstring-' +## #return condition +## + +class CUIAutomation(CoClass): + 'The Central Class for UIAutomation' + _reg_clsid_ = GUID('{FF48DBA4-60EF-4201-AA87-54103EEF594E}') + _idlflags_ = [] + _typelib_path_ = typelib_path + _reg_typelib_ = ('{944DE083-8FB8-45CF-BCB7-C477ACB2F897}', 1, 0) +class IUIAutomation(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{30CBE57D-D9D0-452A-AB13-7AC5AC4825EE}') + _idlflags_ = [] +CUIAutomation._com_interfaces_ = [IUIAutomation] + + +# values for enumeration 'WindowVisualState' +WindowVisualState_Normal = 0 +WindowVisualState_Maximized = 1 +WindowVisualState_Minimized = 2 +WindowVisualState = c_int # enum +class CUIAutomation8(CoClass): + 'The Central Class for UIAutomation8' + _reg_clsid_ = GUID('{E22AD333-B25F-460C-83D0-0581107395C9}') + _idlflags_ = [] + _typelib_path_ = typelib_path + _reg_typelib_ = ('{944DE083-8FB8-45CF-BCB7-C477ACB2F897}', 1, 0) +class IUIAutomation2(IUIAutomation): + _case_insensitive_ = True + _iid_ = GUID('{34723AFF-0C9D-49D0-9896-7AB52DF8CD8A}') + _idlflags_ = [] +class IUIAutomation3(IUIAutomation2): + _case_insensitive_ = True + _iid_ = GUID('{73D768DA-9B51-4B89-936E-C209290973E7}') + _idlflags_ = [] +class IUIAutomation4(IUIAutomation3): + _case_insensitive_ = True + _iid_ = GUID('{1189C02A-05F8-4319-8E21-E817E3DB2860}') + _idlflags_ = [] +class IUIAutomation5(IUIAutomation4): + _case_insensitive_ = True + _iid_ = GUID('{25F700C8-D816-4057-A9DC-3CBDEE77E256}') + _idlflags_ = [] +class IUIAutomation6(IUIAutomation5): + _case_insensitive_ = True + _iid_ = GUID('{AAE072DA-29E3-413D-87A7-192DBF81ED10}') + _idlflags_ = [] +CUIAutomation8._com_interfaces_ = [IUIAutomation2, IUIAutomation3, IUIAutomation4, IUIAutomation5, IUIAutomation6] + + +# values for enumeration 'OrientationType' +OrientationType_None = 0 +OrientationType_Horizontal = 1 +OrientationType_Vertical = 2 +OrientationType = c_int # enum +class IUIAutomationSelectionPattern(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{5ED5202E-B2AC-47A6-B638-4B0BF140D78E}') + _idlflags_ = [] +class IUIAutomationSelectionPattern2(IUIAutomationSelectionPattern): + _case_insensitive_ = True + _iid_ = GUID('{0532BFAE-C011-4E32-A343-6D642D798555}') + _idlflags_ = [] +IUIAutomationSelectionPattern._methods_ = [ + COMMETHOD([], HRESULT, 'GetCurrentSelection', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElementArray)), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentCanSelectMultiple', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentIsSelectionRequired', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD([], HRESULT, 'GetCachedSelection', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElementArray)), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedCanSelectMultiple', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedIsSelectionRequired', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), +] +################################################################ +## code template for IUIAutomationSelectionPattern implementation +##class IUIAutomationSelectionPattern_Impl(object): +## def GetCurrentSelection(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentCanSelectMultiple(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentIsSelectionRequired(self): +## '-no docstring-' +## #return retVal +## +## def GetCachedSelection(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedCanSelectMultiple(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedIsSelectionRequired(self): +## '-no docstring-' +## #return retVal +## + +IUIAutomationSelectionPattern2._methods_ = [ + COMMETHOD(['propget'], HRESULT, 'CurrentFirstSelectedItem', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentLastSelectedItem', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentCurrentSelectedItem', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentItemCount', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedFirstSelectedItem', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedLastSelectedItem', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedCurrentSelectedItem', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedItemCount', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), +] +################################################################ +## code template for IUIAutomationSelectionPattern2 implementation +##class IUIAutomationSelectionPattern2_Impl(object): +## @property +## def CurrentFirstSelectedItem(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentLastSelectedItem(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentCurrentSelectedItem(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentItemCount(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedFirstSelectedItem(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedLastSelectedItem(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedCurrentSelectedItem(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedItemCount(self): +## '-no docstring-' +## #return retVal +## + +class IUIAutomationTreeWalker(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{4042C624-389C-4AFC-A630-9DF854A541FC}') + _idlflags_ = [] +class IUIAutomationCacheRequest(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{B32A92B5-BC25-4078-9C08-D7EE95C48E03}') + _idlflags_ = [] +IUIAutomationTreeWalker._methods_ = [ + COMMETHOD([], HRESULT, 'GetParentElement', + ( ['in'], POINTER(IUIAutomationElement), 'element' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'parent' )), + COMMETHOD([], HRESULT, 'GetFirstChildElement', + ( ['in'], POINTER(IUIAutomationElement), 'element' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'first' )), + COMMETHOD([], HRESULT, 'GetLastChildElement', + ( ['in'], POINTER(IUIAutomationElement), 'element' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'last' )), + COMMETHOD([], HRESULT, 'GetNextSiblingElement', + ( ['in'], POINTER(IUIAutomationElement), 'element' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'next' )), + COMMETHOD([], HRESULT, 'GetPreviousSiblingElement', + ( ['in'], POINTER(IUIAutomationElement), 'element' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'previous' )), + COMMETHOD([], HRESULT, 'NormalizeElement', + ( ['in'], POINTER(IUIAutomationElement), 'element' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'normalized' )), + COMMETHOD([], HRESULT, 'GetParentElementBuildCache', + ( ['in'], POINTER(IUIAutomationElement), 'element' ), + ( ['in'], POINTER(IUIAutomationCacheRequest), 'cacheRequest' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'parent' )), + COMMETHOD([], HRESULT, 'GetFirstChildElementBuildCache', + ( ['in'], POINTER(IUIAutomationElement), 'element' ), + ( ['in'], POINTER(IUIAutomationCacheRequest), 'cacheRequest' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'first' )), + COMMETHOD([], HRESULT, 'GetLastChildElementBuildCache', + ( ['in'], POINTER(IUIAutomationElement), 'element' ), + ( ['in'], POINTER(IUIAutomationCacheRequest), 'cacheRequest' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'last' )), + COMMETHOD([], HRESULT, 'GetNextSiblingElementBuildCache', + ( ['in'], POINTER(IUIAutomationElement), 'element' ), + ( ['in'], POINTER(IUIAutomationCacheRequest), 'cacheRequest' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'next' )), + COMMETHOD([], HRESULT, 'GetPreviousSiblingElementBuildCache', + ( ['in'], POINTER(IUIAutomationElement), 'element' ), + ( ['in'], POINTER(IUIAutomationCacheRequest), 'cacheRequest' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'previous' )), + COMMETHOD([], HRESULT, 'NormalizeElementBuildCache', + ( ['in'], POINTER(IUIAutomationElement), 'element' ), + ( ['in'], POINTER(IUIAutomationCacheRequest), 'cacheRequest' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'normalized' )), + COMMETHOD(['propget'], HRESULT, 'condition', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationCondition)), 'condition' )), +] +################################################################ +## code template for IUIAutomationTreeWalker implementation +##class IUIAutomationTreeWalker_Impl(object): +## def GetParentElement(self, element): +## '-no docstring-' +## #return parent +## +## def GetFirstChildElement(self, element): +## '-no docstring-' +## #return first +## +## def GetLastChildElement(self, element): +## '-no docstring-' +## #return last +## +## def GetNextSiblingElement(self, element): +## '-no docstring-' +## #return next +## +## def GetPreviousSiblingElement(self, element): +## '-no docstring-' +## #return previous +## +## def NormalizeElement(self, element): +## '-no docstring-' +## #return normalized +## +## def GetParentElementBuildCache(self, element, cacheRequest): +## '-no docstring-' +## #return parent +## +## def GetFirstChildElementBuildCache(self, element, cacheRequest): +## '-no docstring-' +## #return first +## +## def GetLastChildElementBuildCache(self, element, cacheRequest): +## '-no docstring-' +## #return last +## +## def GetNextSiblingElementBuildCache(self, element, cacheRequest): +## '-no docstring-' +## #return next +## +## def GetPreviousSiblingElementBuildCache(self, element, cacheRequest): +## '-no docstring-' +## #return previous +## +## def NormalizeElementBuildCache(self, element, cacheRequest): +## '-no docstring-' +## #return normalized +## +## @property +## def condition(self): +## '-no docstring-' +## #return condition +## + +class IUIAutomationItemContainerPattern(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{C690FDB2-27A8-423C-812D-429773C9084E}') + _idlflags_ = [] +IUIAutomationItemContainerPattern._methods_ = [ + COMMETHOD([], HRESULT, 'FindItemByProperty', + ( ['in'], POINTER(IUIAutomationElement), 'pStartAfter' ), + ( ['in'], c_int, 'propertyId' ), + ( ['in'], VARIANT, 'value' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'pFound' )), +] +################################################################ +## code template for IUIAutomationItemContainerPattern implementation +##class IUIAutomationItemContainerPattern_Impl(object): +## def FindItemByProperty(self, pStartAfter, propertyId, value): +## '-no docstring-' +## #return pFound +## + +class IUIAutomationVirtualizedItemPattern(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{6BA3D7A6-04CF-4F11-8793-A8D1CDE9969F}') + _idlflags_ = [] +IUIAutomationVirtualizedItemPattern._methods_ = [ + COMMETHOD([], HRESULT, 'Realize'), +] +################################################################ +## code template for IUIAutomationVirtualizedItemPattern implementation +##class IUIAutomationVirtualizedItemPattern_Impl(object): +## def Realize(self): +## '-no docstring-' +## #return +## + +class IUIAutomationEventHandlerGroup(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{C9EE12F2-C13B-4408-997C-639914377F4E}') + _idlflags_ = [] + +# values for enumeration 'TreeScope' +TreeScope_None = 0 +TreeScope_Element = 1 +TreeScope_Children = 2 +TreeScope_Descendants = 4 +TreeScope_Parent = 8 +TreeScope_Ancestors = 16 +TreeScope_Subtree = 7 +TreeScope = c_int # enum +class IUIAutomationActiveTextPositionChangedEventHandler(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{F97933B0-8DAE-4496-8997-5BA015FE0D82}') + _idlflags_ = ['oleautomation'] +class IUIAutomationChangesEventHandler(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{58EDCA55-2C3E-4980-B1B9-56C17F27A2A0}') + _idlflags_ = ['oleautomation'] +class IUIAutomationNotificationEventHandler(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{C7CB2637-E6C2-4D0C-85DE-4948C02175C7}') + _idlflags_ = ['oleautomation'] +IUIAutomationEventHandlerGroup._methods_ = [ + COMMETHOD([], HRESULT, 'AddActiveTextPositionChangedEventHandler', + ( ['in'], TreeScope, 'scope' ), + ( ['in'], POINTER(IUIAutomationCacheRequest), 'cacheRequest' ), + ( ['in'], POINTER(IUIAutomationActiveTextPositionChangedEventHandler), 'handler' )), + COMMETHOD([], HRESULT, 'AddAutomationEventHandler', + ( ['in'], c_int, 'eventId' ), + ( ['in'], TreeScope, 'scope' ), + ( ['in'], POINTER(IUIAutomationCacheRequest), 'cacheRequest' ), + ( ['in'], POINTER(IUIAutomationEventHandler), 'handler' )), + COMMETHOD([], HRESULT, 'AddChangesEventHandler', + ( ['in'], TreeScope, 'scope' ), + ( ['in'], POINTER(c_int), 'changeTypes' ), + ( ['in'], c_int, 'changesCount' ), + ( ['in'], POINTER(IUIAutomationCacheRequest), 'cacheRequest' ), + ( ['in'], POINTER(IUIAutomationChangesEventHandler), 'handler' )), + COMMETHOD([], HRESULT, 'AddNotificationEventHandler', + ( ['in'], TreeScope, 'scope' ), + ( ['in'], POINTER(IUIAutomationCacheRequest), 'cacheRequest' ), + ( ['in'], POINTER(IUIAutomationNotificationEventHandler), 'handler' )), + COMMETHOD([], HRESULT, 'AddPropertyChangedEventHandler', + ( ['in'], TreeScope, 'scope' ), + ( ['in'], POINTER(IUIAutomationCacheRequest), 'cacheRequest' ), + ( ['in'], POINTER(IUIAutomationPropertyChangedEventHandler), 'handler' ), + ( ['in'], POINTER(c_int), 'propertyArray' ), + ( ['in'], c_int, 'propertyCount' )), + COMMETHOD([], HRESULT, 'AddStructureChangedEventHandler', + ( ['in'], TreeScope, 'scope' ), + ( ['in'], POINTER(IUIAutomationCacheRequest), 'cacheRequest' ), + ( ['in'], POINTER(IUIAutomationStructureChangedEventHandler), 'handler' )), + COMMETHOD([], HRESULT, 'AddTextEditTextChangedEventHandler', + ( ['in'], TreeScope, 'scope' ), + ( ['in'], TextEditChangeType, 'TextEditChangeType' ), + ( ['in'], POINTER(IUIAutomationCacheRequest), 'cacheRequest' ), + ( ['in'], POINTER(IUIAutomationTextEditTextChangedEventHandler), 'handler' )), +] +################################################################ +## code template for IUIAutomationEventHandlerGroup implementation +##class IUIAutomationEventHandlerGroup_Impl(object): +## def AddActiveTextPositionChangedEventHandler(self, scope, cacheRequest, handler): +## '-no docstring-' +## #return +## +## def AddAutomationEventHandler(self, eventId, scope, cacheRequest, handler): +## '-no docstring-' +## #return +## +## def AddChangesEventHandler(self, scope, changeTypes, changesCount, cacheRequest, handler): +## '-no docstring-' +## #return +## +## def AddNotificationEventHandler(self, scope, cacheRequest, handler): +## '-no docstring-' +## #return +## +## def AddPropertyChangedEventHandler(self, scope, cacheRequest, handler, propertyArray, propertyCount): +## '-no docstring-' +## #return +## +## def AddStructureChangedEventHandler(self, scope, cacheRequest, handler): +## '-no docstring-' +## #return +## +## def AddTextEditTextChangedEventHandler(self, scope, TextEditChangeType, cacheRequest, handler): +## '-no docstring-' +## #return +## + + +# values for enumeration 'WindowInteractionState' +WindowInteractionState_Running = 0 +WindowInteractionState_Closing = 1 +WindowInteractionState_ReadyForUserInteraction = 2 +WindowInteractionState_BlockedByModalWindow = 3 +WindowInteractionState_NotResponding = 4 +WindowInteractionState = c_int # enum +class IUIAutomationAnnotationPattern(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{9A175B21-339E-41B1-8E8B-623F6B681098}') + _idlflags_ = [] +IUIAutomationAnnotationPattern._methods_ = [ + COMMETHOD(['propget'], HRESULT, 'CurrentAnnotationTypeId', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentAnnotationTypeName', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentAuthor', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentDateTime', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentTarget', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedAnnotationTypeId', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedAnnotationTypeName', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedAuthor', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedDateTime', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedTarget', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'retVal' )), +] +################################################################ +## code template for IUIAutomationAnnotationPattern implementation +##class IUIAutomationAnnotationPattern_Impl(object): +## @property +## def CurrentAnnotationTypeId(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentAnnotationTypeName(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentAuthor(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentDateTime(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentTarget(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedAnnotationTypeId(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedAnnotationTypeName(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedAuthor(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedDateTime(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedTarget(self): +## '-no docstring-' +## #return retVal +## + +class IUIAutomationSelectionItemPattern(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{A8EFA66A-0FDA-421A-9194-38021F3578EA}') + _idlflags_ = [] +IUIAutomationSelectionItemPattern._methods_ = [ + COMMETHOD([], HRESULT, 'Select'), + COMMETHOD([], HRESULT, 'AddToSelection'), + COMMETHOD([], HRESULT, 'RemoveFromSelection'), + COMMETHOD(['propget'], HRESULT, 'CurrentIsSelected', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentSelectionContainer', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedIsSelected', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedSelectionContainer', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'retVal' )), +] +################################################################ +## code template for IUIAutomationSelectionItemPattern implementation +##class IUIAutomationSelectionItemPattern_Impl(object): +## def Select(self): +## '-no docstring-' +## #return +## +## def AddToSelection(self): +## '-no docstring-' +## #return +## +## def RemoveFromSelection(self): +## '-no docstring-' +## #return +## +## @property +## def CurrentIsSelected(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentSelectionContainer(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedIsSelected(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedSelectionContainer(self): +## '-no docstring-' +## #return retVal +## + +class UiaChangeInfo(Structure): + pass +UiaChangeInfo._fields_ = [ + ('uiaId', c_int), + ('payload', VARIANT), + ('extraInfo', VARIANT), +] +assert sizeof(UiaChangeInfo) == 56, sizeof(UiaChangeInfo) +assert alignment(UiaChangeInfo) == 8, alignment(UiaChangeInfo) +class IUIAutomationTextRange(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{A543CC6A-F4AE-494B-8239-C814481187A8}') + _idlflags_ = [] + +# values for enumeration 'TextPatternRangeEndpoint' +TextPatternRangeEndpoint_Start = 0 +TextPatternRangeEndpoint_End = 1 +TextPatternRangeEndpoint = c_int # enum + +# values for enumeration 'TextUnit' +TextUnit_Character = 0 +TextUnit_Format = 1 +TextUnit_Word = 2 +TextUnit_Line = 3 +TextUnit_Paragraph = 4 +TextUnit_Page = 5 +TextUnit_Document = 6 +TextUnit = c_int # enum +IUIAutomationTextRange._methods_ = [ + COMMETHOD([], HRESULT, 'Clone', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationTextRange)), 'clonedRange' )), + COMMETHOD([], HRESULT, 'Compare', + ( ['in'], POINTER(IUIAutomationTextRange), 'range' ), + ( ['out', 'retval'], POINTER(c_int), 'areSame' )), + COMMETHOD([], HRESULT, 'CompareEndpoints', + ( ['in'], TextPatternRangeEndpoint, 'srcEndPoint' ), + ( ['in'], POINTER(IUIAutomationTextRange), 'range' ), + ( ['in'], TextPatternRangeEndpoint, 'targetEndPoint' ), + ( ['out', 'retval'], POINTER(c_int), 'compValue' )), + COMMETHOD([], HRESULT, 'ExpandToEnclosingUnit', + ( ['in'], TextUnit, 'TextUnit' )), + COMMETHOD([], HRESULT, 'FindAttribute', + ( ['in'], c_int, 'attr' ), + ( ['in'], VARIANT, 'val' ), + ( ['in'], c_int, 'backward' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationTextRange)), 'found' )), + COMMETHOD([], HRESULT, 'FindText', + ( ['in'], BSTR, 'text' ), + ( ['in'], c_int, 'backward' ), + ( ['in'], c_int, 'ignoreCase' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationTextRange)), 'found' )), + COMMETHOD([], HRESULT, 'GetAttributeValue', + ( ['in'], c_int, 'attr' ), + ( ['out', 'retval'], POINTER(VARIANT), 'value' )), + COMMETHOD([], HRESULT, 'GetBoundingRectangles', + ( ['out', 'retval'], POINTER(_midlSAFEARRAY(c_double)), 'boundingRects' )), + COMMETHOD([], HRESULT, 'GetEnclosingElement', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'enclosingElement' )), + COMMETHOD([], HRESULT, 'GetText', + ( ['in'], c_int, 'maxLength' ), + ( ['out', 'retval'], POINTER(BSTR), 'text' )), + COMMETHOD([], HRESULT, 'Move', + ( ['in'], TextUnit, 'unit' ), + ( ['in'], c_int, 'count' ), + ( ['out', 'retval'], POINTER(c_int), 'moved' )), + COMMETHOD([], HRESULT, 'MoveEndpointByUnit', + ( ['in'], TextPatternRangeEndpoint, 'endpoint' ), + ( ['in'], TextUnit, 'unit' ), + ( ['in'], c_int, 'count' ), + ( ['out', 'retval'], POINTER(c_int), 'moved' )), + COMMETHOD([], HRESULT, 'MoveEndpointByRange', + ( ['in'], TextPatternRangeEndpoint, 'srcEndPoint' ), + ( ['in'], POINTER(IUIAutomationTextRange), 'range' ), + ( ['in'], TextPatternRangeEndpoint, 'targetEndPoint' )), + COMMETHOD([], HRESULT, 'Select'), + COMMETHOD([], HRESULT, 'AddToSelection'), + COMMETHOD([], HRESULT, 'RemoveFromSelection'), + COMMETHOD([], HRESULT, 'ScrollIntoView', + ( ['in'], c_int, 'alignToTop' )), + COMMETHOD([], HRESULT, 'GetChildren', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElementArray)), 'children' )), +] +################################################################ +## code template for IUIAutomationTextRange implementation +##class IUIAutomationTextRange_Impl(object): +## def Clone(self): +## '-no docstring-' +## #return clonedRange +## +## def Compare(self, range): +## '-no docstring-' +## #return areSame +## +## def CompareEndpoints(self, srcEndPoint, range, targetEndPoint): +## '-no docstring-' +## #return compValue +## +## def ExpandToEnclosingUnit(self, TextUnit): +## '-no docstring-' +## #return +## +## def FindAttribute(self, attr, val, backward): +## '-no docstring-' +## #return found +## +## def FindText(self, text, backward, ignoreCase): +## '-no docstring-' +## #return found +## +## def GetAttributeValue(self, attr): +## '-no docstring-' +## #return value +## +## def GetBoundingRectangles(self): +## '-no docstring-' +## #return boundingRects +## +## def GetEnclosingElement(self): +## '-no docstring-' +## #return enclosingElement +## +## def GetText(self, maxLength): +## '-no docstring-' +## #return text +## +## def Move(self, unit, count): +## '-no docstring-' +## #return moved +## +## def MoveEndpointByUnit(self, endpoint, unit, count): +## '-no docstring-' +## #return moved +## +## def MoveEndpointByRange(self, srcEndPoint, range, targetEndPoint): +## '-no docstring-' +## #return +## +## def Select(self): +## '-no docstring-' +## #return +## +## def AddToSelection(self): +## '-no docstring-' +## #return +## +## def RemoveFromSelection(self): +## '-no docstring-' +## #return +## +## def ScrollIntoView(self, alignToTop): +## '-no docstring-' +## #return +## +## def GetChildren(self): +## '-no docstring-' +## #return children +## + +class IUIAutomationStylesPattern(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{85B5F0A2-BD79-484A-AD2B-388C9838D5FB}') + _idlflags_ = [] +class ExtendedProperty(Structure): + pass +IUIAutomationStylesPattern._methods_ = [ + COMMETHOD(['propget'], HRESULT, 'CurrentStyleId', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentStyleName', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentFillColor', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentFillPatternStyle', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentShape', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentFillPatternColor', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentExtendedProperties', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD([], HRESULT, 'GetCurrentExtendedPropertiesAsArray', + ( ['out'], POINTER(POINTER(ExtendedProperty)), 'propertyArray' ), + ( ['out'], POINTER(c_int), 'propertyCount' )), + COMMETHOD(['propget'], HRESULT, 'CachedStyleId', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedStyleName', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedFillColor', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedFillPatternStyle', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedShape', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedFillPatternColor', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedExtendedProperties', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD([], HRESULT, 'GetCachedExtendedPropertiesAsArray', + ( ['out'], POINTER(POINTER(ExtendedProperty)), 'propertyArray' ), + ( ['out'], POINTER(c_int), 'propertyCount' )), +] +################################################################ +## code template for IUIAutomationStylesPattern implementation +##class IUIAutomationStylesPattern_Impl(object): +## @property +## def CurrentStyleId(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentStyleName(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentFillColor(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentFillPatternStyle(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentShape(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentFillPatternColor(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentExtendedProperties(self): +## '-no docstring-' +## #return retVal +## +## def GetCurrentExtendedPropertiesAsArray(self): +## '-no docstring-' +## #return propertyArray, propertyCount +## +## @property +## def CachedStyleId(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedStyleName(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedFillColor(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedFillPatternStyle(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedShape(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedFillPatternColor(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedExtendedProperties(self): +## '-no docstring-' +## #return retVal +## +## def GetCachedExtendedPropertiesAsArray(self): +## '-no docstring-' +## #return propertyArray, propertyCount +## + + +# values for enumeration 'PropertyConditionFlags' +PropertyConditionFlags_None = 0 +PropertyConditionFlags_IgnoreCase = 1 +PropertyConditionFlags_MatchSubstring = 2 +PropertyConditionFlags = c_int # enum +IUIAutomation._methods_ = [ + COMMETHOD([], HRESULT, 'CompareElements', + ( ['in'], POINTER(IUIAutomationElement), 'el1' ), + ( ['in'], POINTER(IUIAutomationElement), 'el2' ), + ( ['out', 'retval'], POINTER(c_int), 'areSame' )), + COMMETHOD([], HRESULT, 'CompareRuntimeIds', + ( ['in'], _midlSAFEARRAY(c_int), 'runtimeId1' ), + ( ['in'], _midlSAFEARRAY(c_int), 'runtimeId2' ), + ( ['out', 'retval'], POINTER(c_int), 'areSame' )), + COMMETHOD([], HRESULT, 'GetRootElement', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'root' )), + COMMETHOD([], HRESULT, 'ElementFromHandle', + ( ['in'], c_void_p, 'hwnd' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'element' )), + COMMETHOD([], HRESULT, 'ElementFromPoint', + ( ['in'], tagPOINT, 'pt' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'element' )), + COMMETHOD([], HRESULT, 'GetFocusedElement', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'element' )), + COMMETHOD([], HRESULT, 'GetRootElementBuildCache', + ( ['in'], POINTER(IUIAutomationCacheRequest), 'cacheRequest' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'root' )), + COMMETHOD([], HRESULT, 'ElementFromHandleBuildCache', + ( ['in'], c_void_p, 'hwnd' ), + ( ['in'], POINTER(IUIAutomationCacheRequest), 'cacheRequest' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'element' )), + COMMETHOD([], HRESULT, 'ElementFromPointBuildCache', + ( ['in'], tagPOINT, 'pt' ), + ( ['in'], POINTER(IUIAutomationCacheRequest), 'cacheRequest' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'element' )), + COMMETHOD([], HRESULT, 'GetFocusedElementBuildCache', + ( ['in'], POINTER(IUIAutomationCacheRequest), 'cacheRequest' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'element' )), + COMMETHOD([], HRESULT, 'CreateTreeWalker', + ( ['in'], POINTER(IUIAutomationCondition), 'pCondition' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationTreeWalker)), 'walker' )), + COMMETHOD(['propget'], HRESULT, 'ControlViewWalker', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationTreeWalker)), 'walker' )), + COMMETHOD(['propget'], HRESULT, 'ContentViewWalker', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationTreeWalker)), 'walker' )), + COMMETHOD(['propget'], HRESULT, 'RawViewWalker', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationTreeWalker)), 'walker' )), + COMMETHOD(['propget'], HRESULT, 'RawViewCondition', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationCondition)), 'condition' )), + COMMETHOD(['propget'], HRESULT, 'ControlViewCondition', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationCondition)), 'condition' )), + COMMETHOD(['propget'], HRESULT, 'ContentViewCondition', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationCondition)), 'condition' )), + COMMETHOD([], HRESULT, 'CreateCacheRequest', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationCacheRequest)), 'cacheRequest' )), + COMMETHOD([], HRESULT, 'CreateTrueCondition', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationCondition)), 'newCondition' )), + COMMETHOD([], HRESULT, 'CreateFalseCondition', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationCondition)), 'newCondition' )), + COMMETHOD([], HRESULT, 'CreatePropertyCondition', + ( ['in'], c_int, 'propertyId' ), + ( ['in'], VARIANT, 'value' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationCondition)), 'newCondition' )), + COMMETHOD([], HRESULT, 'CreatePropertyConditionEx', + ( ['in'], c_int, 'propertyId' ), + ( ['in'], VARIANT, 'value' ), + ( ['in'], PropertyConditionFlags, 'flags' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationCondition)), 'newCondition' )), + COMMETHOD([], HRESULT, 'CreateAndCondition', + ( ['in'], POINTER(IUIAutomationCondition), 'condition1' ), + ( ['in'], POINTER(IUIAutomationCondition), 'condition2' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationCondition)), 'newCondition' )), + COMMETHOD([], HRESULT, 'CreateAndConditionFromArray', + ( ['in'], _midlSAFEARRAY(POINTER(IUIAutomationCondition)), 'conditions' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationCondition)), 'newCondition' )), + COMMETHOD([], HRESULT, 'CreateAndConditionFromNativeArray', + ( ['in'], POINTER(POINTER(IUIAutomationCondition)), 'conditions' ), + ( ['in'], c_int, 'conditionCount' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationCondition)), 'newCondition' )), + COMMETHOD([], HRESULT, 'CreateOrCondition', + ( ['in'], POINTER(IUIAutomationCondition), 'condition1' ), + ( ['in'], POINTER(IUIAutomationCondition), 'condition2' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationCondition)), 'newCondition' )), + COMMETHOD([], HRESULT, 'CreateOrConditionFromArray', + ( ['in'], _midlSAFEARRAY(POINTER(IUIAutomationCondition)), 'conditions' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationCondition)), 'newCondition' )), + COMMETHOD([], HRESULT, 'CreateOrConditionFromNativeArray', + ( ['in'], POINTER(POINTER(IUIAutomationCondition)), 'conditions' ), + ( ['in'], c_int, 'conditionCount' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationCondition)), 'newCondition' )), + COMMETHOD([], HRESULT, 'CreateNotCondition', + ( ['in'], POINTER(IUIAutomationCondition), 'condition' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationCondition)), 'newCondition' )), + COMMETHOD([], HRESULT, 'AddAutomationEventHandler', + ( ['in'], c_int, 'eventId' ), + ( ['in'], POINTER(IUIAutomationElement), 'element' ), + ( ['in'], TreeScope, 'scope' ), + ( ['in'], POINTER(IUIAutomationCacheRequest), 'cacheRequest' ), + ( ['in'], POINTER(IUIAutomationEventHandler), 'handler' )), + COMMETHOD([], HRESULT, 'RemoveAutomationEventHandler', + ( ['in'], c_int, 'eventId' ), + ( ['in'], POINTER(IUIAutomationElement), 'element' ), + ( ['in'], POINTER(IUIAutomationEventHandler), 'handler' )), + COMMETHOD([], HRESULT, 'AddPropertyChangedEventHandlerNativeArray', + ( ['in'], POINTER(IUIAutomationElement), 'element' ), + ( ['in'], TreeScope, 'scope' ), + ( ['in'], POINTER(IUIAutomationCacheRequest), 'cacheRequest' ), + ( ['in'], POINTER(IUIAutomationPropertyChangedEventHandler), 'handler' ), + ( ['in'], POINTER(c_int), 'propertyArray' ), + ( ['in'], c_int, 'propertyCount' )), + COMMETHOD([], HRESULT, 'AddPropertyChangedEventHandler', + ( ['in'], POINTER(IUIAutomationElement), 'element' ), + ( ['in'], TreeScope, 'scope' ), + ( ['in'], POINTER(IUIAutomationCacheRequest), 'cacheRequest' ), + ( ['in'], POINTER(IUIAutomationPropertyChangedEventHandler), 'handler' ), + ( ['in'], _midlSAFEARRAY(c_int), 'propertyArray' )), + COMMETHOD([], HRESULT, 'RemovePropertyChangedEventHandler', + ( ['in'], POINTER(IUIAutomationElement), 'element' ), + ( ['in'], POINTER(IUIAutomationPropertyChangedEventHandler), 'handler' )), + COMMETHOD([], HRESULT, 'AddStructureChangedEventHandler', + ( ['in'], POINTER(IUIAutomationElement), 'element' ), + ( ['in'], TreeScope, 'scope' ), + ( ['in'], POINTER(IUIAutomationCacheRequest), 'cacheRequest' ), + ( ['in'], POINTER(IUIAutomationStructureChangedEventHandler), 'handler' )), + COMMETHOD([], HRESULT, 'RemoveStructureChangedEventHandler', + ( ['in'], POINTER(IUIAutomationElement), 'element' ), + ( ['in'], POINTER(IUIAutomationStructureChangedEventHandler), 'handler' )), + COMMETHOD([], HRESULT, 'AddFocusChangedEventHandler', + ( ['in'], POINTER(IUIAutomationCacheRequest), 'cacheRequest' ), + ( ['in'], POINTER(IUIAutomationFocusChangedEventHandler), 'handler' )), + COMMETHOD([], HRESULT, 'RemoveFocusChangedEventHandler', + ( ['in'], POINTER(IUIAutomationFocusChangedEventHandler), 'handler' )), + COMMETHOD([], HRESULT, 'RemoveAllEventHandlers'), + COMMETHOD([], HRESULT, 'IntNativeArrayToSafeArray', + ( ['in'], POINTER(c_int), 'array' ), + ( ['in'], c_int, 'arrayCount' ), + ( ['out', 'retval'], POINTER(_midlSAFEARRAY(c_int)), 'safeArray' )), + COMMETHOD([], HRESULT, 'IntSafeArrayToNativeArray', + ( ['in'], _midlSAFEARRAY(c_int), 'intArray' ), + ( ['out'], POINTER(POINTER(c_int)), 'array' ), + ( ['out', 'retval'], POINTER(c_int), 'arrayCount' )), + COMMETHOD([], HRESULT, 'RectToVariant', + ( ['in'], tagRECT, 'rc' ), + ( ['out', 'retval'], POINTER(VARIANT), 'var' )), + COMMETHOD([], HRESULT, 'VariantToRect', + ( ['in'], VARIANT, 'var' ), + ( ['out', 'retval'], POINTER(tagRECT), 'rc' )), + COMMETHOD([], HRESULT, 'SafeArrayToRectNativeArray', + ( ['in'], _midlSAFEARRAY(c_double), 'rects' ), + ( ['out'], POINTER(POINTER(tagRECT)), 'rectArray' ), + ( ['out', 'retval'], POINTER(c_int), 'rectArrayCount' )), + COMMETHOD([], HRESULT, 'CreateProxyFactoryEntry', + ( ['in'], POINTER(IUIAutomationProxyFactory), 'factory' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationProxyFactoryEntry)), 'factoryEntry' )), + COMMETHOD(['propget'], HRESULT, 'ProxyFactoryMapping', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationProxyFactoryMapping)), 'factoryMapping' )), + COMMETHOD([], HRESULT, 'GetPropertyProgrammaticName', + ( ['in'], c_int, 'property' ), + ( ['out', 'retval'], POINTER(BSTR), 'name' )), + COMMETHOD([], HRESULT, 'GetPatternProgrammaticName', + ( ['in'], c_int, 'pattern' ), + ( ['out', 'retval'], POINTER(BSTR), 'name' )), + COMMETHOD([], HRESULT, 'PollForPotentialSupportedPatterns', + ( ['in'], POINTER(IUIAutomationElement), 'pElement' ), + ( ['out'], POINTER(_midlSAFEARRAY(c_int)), 'patternIds' ), + ( ['out'], POINTER(_midlSAFEARRAY(BSTR)), 'patternNames' )), + COMMETHOD([], HRESULT, 'PollForPotentialSupportedProperties', + ( ['in'], POINTER(IUIAutomationElement), 'pElement' ), + ( ['out'], POINTER(_midlSAFEARRAY(c_int)), 'propertyIds' ), + ( ['out'], POINTER(_midlSAFEARRAY(BSTR)), 'propertyNames' )), + COMMETHOD([], HRESULT, 'CheckNotSupported', + ( ['in'], VARIANT, 'value' ), + ( ['out', 'retval'], POINTER(c_int), 'isNotSupported' )), + COMMETHOD(['propget'], HRESULT, 'ReservedNotSupportedValue', + ( ['out', 'retval'], POINTER(POINTER(IUnknown)), 'notSupportedValue' )), + COMMETHOD(['propget'], HRESULT, 'ReservedMixedAttributeValue', + ( ['out', 'retval'], POINTER(POINTER(IUnknown)), 'mixedAttributeValue' )), + COMMETHOD([], HRESULT, 'ElementFromIAccessible', + ( ['in'], POINTER(IAccessible), 'accessible' ), + ( ['in'], c_int, 'childId' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'element' )), + COMMETHOD([], HRESULT, 'ElementFromIAccessibleBuildCache', + ( ['in'], POINTER(IAccessible), 'accessible' ), + ( ['in'], c_int, 'childId' ), + ( ['in'], POINTER(IUIAutomationCacheRequest), 'cacheRequest' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'element' )), +] +################################################################ +## code template for IUIAutomation implementation +##class IUIAutomation_Impl(object): +## def CompareElements(self, el1, el2): +## '-no docstring-' +## #return areSame +## +## def CompareRuntimeIds(self, runtimeId1, runtimeId2): +## '-no docstring-' +## #return areSame +## +## def GetRootElement(self): +## '-no docstring-' +## #return root +## +## def ElementFromHandle(self, hwnd): +## '-no docstring-' +## #return element +## +## def ElementFromPoint(self, pt): +## '-no docstring-' +## #return element +## +## def GetFocusedElement(self): +## '-no docstring-' +## #return element +## +## def GetRootElementBuildCache(self, cacheRequest): +## '-no docstring-' +## #return root +## +## def ElementFromHandleBuildCache(self, hwnd, cacheRequest): +## '-no docstring-' +## #return element +## +## def ElementFromPointBuildCache(self, pt, cacheRequest): +## '-no docstring-' +## #return element +## +## def GetFocusedElementBuildCache(self, cacheRequest): +## '-no docstring-' +## #return element +## +## def CreateTreeWalker(self, pCondition): +## '-no docstring-' +## #return walker +## +## @property +## def ControlViewWalker(self): +## '-no docstring-' +## #return walker +## +## @property +## def ContentViewWalker(self): +## '-no docstring-' +## #return walker +## +## @property +## def RawViewWalker(self): +## '-no docstring-' +## #return walker +## +## @property +## def RawViewCondition(self): +## '-no docstring-' +## #return condition +## +## @property +## def ControlViewCondition(self): +## '-no docstring-' +## #return condition +## +## @property +## def ContentViewCondition(self): +## '-no docstring-' +## #return condition +## +## def CreateCacheRequest(self): +## '-no docstring-' +## #return cacheRequest +## +## def CreateTrueCondition(self): +## '-no docstring-' +## #return newCondition +## +## def CreateFalseCondition(self): +## '-no docstring-' +## #return newCondition +## +## def CreatePropertyCondition(self, propertyId, value): +## '-no docstring-' +## #return newCondition +## +## def CreatePropertyConditionEx(self, propertyId, value, flags): +## '-no docstring-' +## #return newCondition +## +## def CreateAndCondition(self, condition1, condition2): +## '-no docstring-' +## #return newCondition +## +## def CreateAndConditionFromArray(self, conditions): +## '-no docstring-' +## #return newCondition +## +## def CreateAndConditionFromNativeArray(self, conditions, conditionCount): +## '-no docstring-' +## #return newCondition +## +## def CreateOrCondition(self, condition1, condition2): +## '-no docstring-' +## #return newCondition +## +## def CreateOrConditionFromArray(self, conditions): +## '-no docstring-' +## #return newCondition +## +## def CreateOrConditionFromNativeArray(self, conditions, conditionCount): +## '-no docstring-' +## #return newCondition +## +## def CreateNotCondition(self, condition): +## '-no docstring-' +## #return newCondition +## +## def AddAutomationEventHandler(self, eventId, element, scope, cacheRequest, handler): +## '-no docstring-' +## #return +## +## def RemoveAutomationEventHandler(self, eventId, element, handler): +## '-no docstring-' +## #return +## +## def AddPropertyChangedEventHandlerNativeArray(self, element, scope, cacheRequest, handler, propertyArray, propertyCount): +## '-no docstring-' +## #return +## +## def AddPropertyChangedEventHandler(self, element, scope, cacheRequest, handler, propertyArray): +## '-no docstring-' +## #return +## +## def RemovePropertyChangedEventHandler(self, element, handler): +## '-no docstring-' +## #return +## +## def AddStructureChangedEventHandler(self, element, scope, cacheRequest, handler): +## '-no docstring-' +## #return +## +## def RemoveStructureChangedEventHandler(self, element, handler): +## '-no docstring-' +## #return +## +## def AddFocusChangedEventHandler(self, cacheRequest, handler): +## '-no docstring-' +## #return +## +## def RemoveFocusChangedEventHandler(self, handler): +## '-no docstring-' +## #return +## +## def RemoveAllEventHandlers(self): +## '-no docstring-' +## #return +## +## def IntNativeArrayToSafeArray(self, array, arrayCount): +## '-no docstring-' +## #return safeArray +## +## def IntSafeArrayToNativeArray(self, intArray): +## '-no docstring-' +## #return array, arrayCount +## +## def RectToVariant(self, rc): +## '-no docstring-' +## #return var +## +## def VariantToRect(self, var): +## '-no docstring-' +## #return rc +## +## def SafeArrayToRectNativeArray(self, rects): +## '-no docstring-' +## #return rectArray, rectArrayCount +## +## def CreateProxyFactoryEntry(self, factory): +## '-no docstring-' +## #return factoryEntry +## +## @property +## def ProxyFactoryMapping(self): +## '-no docstring-' +## #return factoryMapping +## +## def GetPropertyProgrammaticName(self, property): +## '-no docstring-' +## #return name +## +## def GetPatternProgrammaticName(self, pattern): +## '-no docstring-' +## #return name +## +## def PollForPotentialSupportedPatterns(self, pElement): +## '-no docstring-' +## #return patternIds, patternNames +## +## def PollForPotentialSupportedProperties(self, pElement): +## '-no docstring-' +## #return propertyIds, propertyNames +## +## def CheckNotSupported(self, value): +## '-no docstring-' +## #return isNotSupported +## +## @property +## def ReservedNotSupportedValue(self): +## '-no docstring-' +## #return notSupportedValue +## +## @property +## def ReservedMixedAttributeValue(self): +## '-no docstring-' +## #return mixedAttributeValue +## +## def ElementFromIAccessible(self, accessible, childId): +## '-no docstring-' +## #return element +## +## def ElementFromIAccessibleBuildCache(self, accessible, childId, cacheRequest): +## '-no docstring-' +## #return element +## + +class IUIAutomationCustomNavigationPattern(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{01EA217A-1766-47ED-A6CC-ACF492854B1F}') + _idlflags_ = [] + +# values for enumeration 'NavigateDirection' +NavigateDirection_Parent = 0 +NavigateDirection_NextSibling = 1 +NavigateDirection_PreviousSibling = 2 +NavigateDirection_FirstChild = 3 +NavigateDirection_LastChild = 4 +NavigateDirection = c_int # enum +IUIAutomationCustomNavigationPattern._methods_ = [ + COMMETHOD([], HRESULT, 'Navigate', + ( ['in'], NavigateDirection, 'direction' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'pRetVal' )), +] +################################################################ +## code template for IUIAutomationCustomNavigationPattern implementation +##class IUIAutomationCustomNavigationPattern_Impl(object): +## def Navigate(self, direction): +## '-no docstring-' +## #return pRetVal +## + + +# values for enumeration 'ToggleState' +ToggleState_Off = 0 +ToggleState_On = 1 +ToggleState_Indeterminate = 2 +ToggleState = c_int # enum + +# values for enumeration 'TreeTraversalOptions' +TreeTraversalOptions_Default = 0 +TreeTraversalOptions_PostOrder = 1 +TreeTraversalOptions_LastToFirstOrder = 2 +TreeTraversalOptions = c_int # enum +class IUIAutomationTransformPattern(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{A9B55844-A55D-4EF0-926D-569C16FF89BB}') + _idlflags_ = [] +IUIAutomationTransformPattern._methods_ = [ + COMMETHOD([], HRESULT, 'Move', + ( ['in'], c_double, 'x' ), + ( ['in'], c_double, 'y' )), + COMMETHOD([], HRESULT, 'Resize', + ( ['in'], c_double, 'width' ), + ( ['in'], c_double, 'height' )), + COMMETHOD([], HRESULT, 'Rotate', + ( ['in'], c_double, 'degrees' )), + COMMETHOD(['propget'], HRESULT, 'CurrentCanMove', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentCanResize', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentCanRotate', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedCanMove', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedCanResize', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedCanRotate', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), +] +################################################################ +## code template for IUIAutomationTransformPattern implementation +##class IUIAutomationTransformPattern_Impl(object): +## def Move(self, x, y): +## '-no docstring-' +## #return +## +## def Resize(self, width, height): +## '-no docstring-' +## #return +## +## def Rotate(self, degrees): +## '-no docstring-' +## #return +## +## @property +## def CurrentCanMove(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentCanResize(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentCanRotate(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedCanMove(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedCanResize(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedCanRotate(self): +## '-no docstring-' +## #return retVal +## + +class IUIAutomationBoolCondition(IUIAutomationCondition): + _case_insensitive_ = True + _iid_ = GUID('{1B4E1F2E-75EB-4D0B-8952-5A69988E2307}') + _idlflags_ = [] +IUIAutomationBoolCondition._methods_ = [ + COMMETHOD(['propget'], HRESULT, 'BooleanValue', + ( ['out', 'retval'], POINTER(c_int), 'boolVal' )), +] +################################################################ +## code template for IUIAutomationBoolCondition implementation +##class IUIAutomationBoolCondition_Impl(object): +## @property +## def BooleanValue(self): +## '-no docstring-' +## #return boolVal +## + +IUIAutomationActiveTextPositionChangedEventHandler._methods_ = [ + COMMETHOD([], HRESULT, 'HandleActiveTextPositionChangedEvent', + ( ['in'], POINTER(IUIAutomationElement), 'sender' ), + ( ['in'], POINTER(IUIAutomationTextRange), 'range' )), +] +################################################################ +## code template for IUIAutomationActiveTextPositionChangedEventHandler implementation +##class IUIAutomationActiveTextPositionChangedEventHandler_Impl(object): +## def HandleActiveTextPositionChangedEvent(self, sender, range): +## '-no docstring-' +## #return +## + +class IUIAutomationLegacyIAccessiblePattern(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{828055AD-355B-4435-86D5-3B51C14A9B1B}') + _idlflags_ = [] +IUIAutomationLegacyIAccessiblePattern._methods_ = [ + COMMETHOD([], HRESULT, 'Select', + ( [], c_int, 'flagsSelect' )), + COMMETHOD([], HRESULT, 'DoDefaultAction'), + COMMETHOD([], HRESULT, 'SetValue', + ( [], WSTRING, 'szValue' )), + COMMETHOD(['propget'], HRESULT, 'CurrentChildId', + ( ['out', 'retval'], POINTER(c_int), 'pRetVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentName', + ( ['out', 'retval'], POINTER(BSTR), 'pszName' )), + COMMETHOD(['propget'], HRESULT, 'CurrentValue', + ( ['out', 'retval'], POINTER(BSTR), 'pszValue' )), + COMMETHOD(['propget'], HRESULT, 'CurrentDescription', + ( ['out', 'retval'], POINTER(BSTR), 'pszDescription' )), + COMMETHOD(['propget'], HRESULT, 'CurrentRole', + ( ['out', 'retval'], POINTER(c_ulong), 'pdwRole' )), + COMMETHOD(['propget'], HRESULT, 'CurrentState', + ( ['out', 'retval'], POINTER(c_ulong), 'pdwState' )), + COMMETHOD(['propget'], HRESULT, 'CurrentHelp', + ( ['out', 'retval'], POINTER(BSTR), 'pszHelp' )), + COMMETHOD(['propget'], HRESULT, 'CurrentKeyboardShortcut', + ( ['out', 'retval'], POINTER(BSTR), 'pszKeyboardShortcut' )), + COMMETHOD([], HRESULT, 'GetCurrentSelection', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElementArray)), 'pvarSelectedChildren' )), + COMMETHOD(['propget'], HRESULT, 'CurrentDefaultAction', + ( ['out', 'retval'], POINTER(BSTR), 'pszDefaultAction' )), + COMMETHOD(['propget'], HRESULT, 'CachedChildId', + ( ['out', 'retval'], POINTER(c_int), 'pRetVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedName', + ( ['out', 'retval'], POINTER(BSTR), 'pszName' )), + COMMETHOD(['propget'], HRESULT, 'CachedValue', + ( ['out', 'retval'], POINTER(BSTR), 'pszValue' )), + COMMETHOD(['propget'], HRESULT, 'CachedDescription', + ( ['out', 'retval'], POINTER(BSTR), 'pszDescription' )), + COMMETHOD(['propget'], HRESULT, 'CachedRole', + ( ['out', 'retval'], POINTER(c_ulong), 'pdwRole' )), + COMMETHOD(['propget'], HRESULT, 'CachedState', + ( ['out', 'retval'], POINTER(c_ulong), 'pdwState' )), + COMMETHOD(['propget'], HRESULT, 'CachedHelp', + ( ['out', 'retval'], POINTER(BSTR), 'pszHelp' )), + COMMETHOD(['propget'], HRESULT, 'CachedKeyboardShortcut', + ( ['out', 'retval'], POINTER(BSTR), 'pszKeyboardShortcut' )), + COMMETHOD([], HRESULT, 'GetCachedSelection', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElementArray)), 'pvarSelectedChildren' )), + COMMETHOD(['propget'], HRESULT, 'CachedDefaultAction', + ( ['out', 'retval'], POINTER(BSTR), 'pszDefaultAction' )), + COMMETHOD([], HRESULT, 'GetIAccessible', + ( ['out', 'retval'], POINTER(POINTER(IAccessible)), 'ppAccessible' )), +] +################################################################ +## code template for IUIAutomationLegacyIAccessiblePattern implementation +##class IUIAutomationLegacyIAccessiblePattern_Impl(object): +## def Select(self, flagsSelect): +## '-no docstring-' +## #return +## +## def DoDefaultAction(self): +## '-no docstring-' +## #return +## +## def SetValue(self, szValue): +## '-no docstring-' +## #return +## +## @property +## def CurrentChildId(self): +## '-no docstring-' +## #return pRetVal +## +## @property +## def CurrentName(self): +## '-no docstring-' +## #return pszName +## +## @property +## def CurrentValue(self): +## '-no docstring-' +## #return pszValue +## +## @property +## def CurrentDescription(self): +## '-no docstring-' +## #return pszDescription +## +## @property +## def CurrentRole(self): +## '-no docstring-' +## #return pdwRole +## +## @property +## def CurrentState(self): +## '-no docstring-' +## #return pdwState +## +## @property +## def CurrentHelp(self): +## '-no docstring-' +## #return pszHelp +## +## @property +## def CurrentKeyboardShortcut(self): +## '-no docstring-' +## #return pszKeyboardShortcut +## +## def GetCurrentSelection(self): +## '-no docstring-' +## #return pvarSelectedChildren +## +## @property +## def CurrentDefaultAction(self): +## '-no docstring-' +## #return pszDefaultAction +## +## @property +## def CachedChildId(self): +## '-no docstring-' +## #return pRetVal +## +## @property +## def CachedName(self): +## '-no docstring-' +## #return pszName +## +## @property +## def CachedValue(self): +## '-no docstring-' +## #return pszValue +## +## @property +## def CachedDescription(self): +## '-no docstring-' +## #return pszDescription +## +## @property +## def CachedRole(self): +## '-no docstring-' +## #return pdwRole +## +## @property +## def CachedState(self): +## '-no docstring-' +## #return pdwState +## +## @property +## def CachedHelp(self): +## '-no docstring-' +## #return pszHelp +## +## @property +## def CachedKeyboardShortcut(self): +## '-no docstring-' +## #return pszKeyboardShortcut +## +## def GetCachedSelection(self): +## '-no docstring-' +## #return pvarSelectedChildren +## +## @property +## def CachedDefaultAction(self): +## '-no docstring-' +## #return pszDefaultAction +## +## def GetIAccessible(self): +## '-no docstring-' +## #return ppAccessible +## + +class IUIAutomationPropertyCondition(IUIAutomationCondition): + _case_insensitive_ = True + _iid_ = GUID('{99EBF2CB-5578-4267-9AD4-AFD6EA77E94B}') + _idlflags_ = [] +IUIAutomationPropertyCondition._methods_ = [ + COMMETHOD(['propget'], HRESULT, 'propertyId', + ( ['out', 'retval'], POINTER(c_int), 'propertyId' )), + COMMETHOD(['propget'], HRESULT, 'PropertyValue', + ( ['out', 'retval'], POINTER(VARIANT), 'PropertyValue' )), + COMMETHOD(['propget'], HRESULT, 'PropertyConditionFlags', + ( ['out', 'retval'], POINTER(PropertyConditionFlags), 'flags' )), +] +################################################################ +## code template for IUIAutomationPropertyCondition implementation +##class IUIAutomationPropertyCondition_Impl(object): +## @property +## def propertyId(self): +## '-no docstring-' +## #return propertyId +## +## @property +## def PropertyValue(self): +## '-no docstring-' +## #return PropertyValue +## +## @property +## def PropertyConditionFlags(self): +## '-no docstring-' +## #return flags +## + +class IUIAutomationValuePattern(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{A94CD8B1-0844-4CD6-9D2D-640537AB39E9}') + _idlflags_ = [] +IUIAutomationValuePattern._methods_ = [ + COMMETHOD([], HRESULT, 'SetValue', + ( ['in'], BSTR, 'val' )), + COMMETHOD(['propget'], HRESULT, 'CurrentValue', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentIsReadOnly', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedValue', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedIsReadOnly', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), +] +################################################################ +## code template for IUIAutomationValuePattern implementation +##class IUIAutomationValuePattern_Impl(object): +## def SetValue(self, val): +## '-no docstring-' +## #return +## +## @property +## def CurrentValue(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentIsReadOnly(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedValue(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedIsReadOnly(self): +## '-no docstring-' +## #return retVal +## + +class IUIAutomationElement2(IUIAutomationElement): + _case_insensitive_ = True + _iid_ = GUID('{6749C683-F70D-4487-A698-5F79D55290D6}') + _idlflags_ = [] +class IUIAutomationElement3(IUIAutomationElement2): + _case_insensitive_ = True + _iid_ = GUID('{8471DF34-AEE0-4A01-A7DE-7DB9AF12C296}') + _idlflags_ = [] +class IUIAutomationElement4(IUIAutomationElement3): + _case_insensitive_ = True + _iid_ = GUID('{3B6E233C-52FB-4063-A4C9-77C075C2A06B}') + _idlflags_ = [] +class IUIAutomationElement5(IUIAutomationElement4): + _case_insensitive_ = True + _iid_ = GUID('{98141C1D-0D0E-4175-BBE2-6BFF455842A7}') + _idlflags_ = [] +class IUIAutomationElement6(IUIAutomationElement5): + _case_insensitive_ = True + _iid_ = GUID('{4780D450-8BCA-4977-AFA5-A4A517F555E3}') + _idlflags_ = [] +class IUIAutomationElement7(IUIAutomationElement6): + _case_insensitive_ = True + _iid_ = GUID('{204E8572-CFC3-4C11-B0C8-7DA7420750B7}') + _idlflags_ = [] +class IUIAutomationElement8(IUIAutomationElement7): + _case_insensitive_ = True + _iid_ = GUID('{8C60217D-5411-4CDE-BCC0-1CEDA223830C}') + _idlflags_ = [] +IUIAutomationElement._methods_ = [ + COMMETHOD([], HRESULT, 'SetFocus'), + COMMETHOD([], HRESULT, 'GetRuntimeId', + ( ['out', 'retval'], POINTER(_midlSAFEARRAY(c_int)), 'runtimeId' )), + COMMETHOD([], HRESULT, 'FindFirst', + ( ['in'], TreeScope, 'scope' ), + ( ['in'], POINTER(IUIAutomationCondition), 'condition' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'found' )), + COMMETHOD([], HRESULT, 'FindAll', + ( ['in'], TreeScope, 'scope' ), + ( ['in'], POINTER(IUIAutomationCondition), 'condition' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElementArray)), 'found' )), + COMMETHOD([], HRESULT, 'FindFirstBuildCache', + ( ['in'], TreeScope, 'scope' ), + ( ['in'], POINTER(IUIAutomationCondition), 'condition' ), + ( ['in'], POINTER(IUIAutomationCacheRequest), 'cacheRequest' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'found' )), + COMMETHOD([], HRESULT, 'FindAllBuildCache', + ( ['in'], TreeScope, 'scope' ), + ( ['in'], POINTER(IUIAutomationCondition), 'condition' ), + ( ['in'], POINTER(IUIAutomationCacheRequest), 'cacheRequest' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElementArray)), 'found' )), + COMMETHOD([], HRESULT, 'BuildUpdatedCache', + ( ['in'], POINTER(IUIAutomationCacheRequest), 'cacheRequest' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'updatedElement' )), + COMMETHOD([], HRESULT, 'GetCurrentPropertyValue', + ( ['in'], c_int, 'propertyId' ), + ( ['out', 'retval'], POINTER(VARIANT), 'retVal' )), + COMMETHOD([], HRESULT, 'GetCurrentPropertyValueEx', + ( ['in'], c_int, 'propertyId' ), + ( ['in'], c_int, 'ignoreDefaultValue' ), + ( ['out', 'retval'], POINTER(VARIANT), 'retVal' )), + COMMETHOD([], HRESULT, 'GetCachedPropertyValue', + ( ['in'], c_int, 'propertyId' ), + ( ['out', 'retval'], POINTER(VARIANT), 'retVal' )), + COMMETHOD([], HRESULT, 'GetCachedPropertyValueEx', + ( ['in'], c_int, 'propertyId' ), + ( ['in'], c_int, 'ignoreDefaultValue' ), + ( ['out', 'retval'], POINTER(VARIANT), 'retVal' )), + COMMETHOD([], HRESULT, 'GetCurrentPatternAs', + ( ['in'], c_int, 'patternId' ), + ( ['in'], POINTER(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.GUID), 'riid' ), + ( ['out', 'retval'], POINTER(c_void_p), 'patternObject' )), + COMMETHOD([], HRESULT, 'GetCachedPatternAs', + ( ['in'], c_int, 'patternId' ), + ( ['in'], POINTER(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.GUID), 'riid' ), + ( ['out', 'retval'], POINTER(c_void_p), 'patternObject' )), + COMMETHOD([], HRESULT, 'GetCurrentPattern', + ( ['in'], c_int, 'patternId' ), + ( ['out', 'retval'], POINTER(POINTER(IUnknown)), 'patternObject' )), + COMMETHOD([], HRESULT, 'GetCachedPattern', + ( ['in'], c_int, 'patternId' ), + ( ['out', 'retval'], POINTER(POINTER(IUnknown)), 'patternObject' )), + COMMETHOD([], HRESULT, 'GetCachedParent', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'parent' )), + COMMETHOD([], HRESULT, 'GetCachedChildren', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElementArray)), 'children' )), + COMMETHOD(['propget'], HRESULT, 'CurrentProcessId', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentControlType', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentLocalizedControlType', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentName', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentAcceleratorKey', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentAccessKey', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentHasKeyboardFocus', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentIsKeyboardFocusable', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentIsEnabled', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentAutomationId', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentClassName', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentHelpText', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentCulture', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentIsControlElement', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentIsContentElement', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentIsPassword', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentNativeWindowHandle', + ( ['out', 'retval'], POINTER(c_void_p), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentItemType', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentIsOffscreen', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentOrientation', + ( ['out', 'retval'], POINTER(OrientationType), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentFrameworkId', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentIsRequiredForForm', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentItemStatus', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentBoundingRectangle', + ( ['out', 'retval'], POINTER(tagRECT), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentLabeledBy', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentAriaRole', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentAriaProperties', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentIsDataValidForForm', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentControllerFor', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElementArray)), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentDescribedBy', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElementArray)), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentFlowsTo', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElementArray)), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentProviderDescription', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedProcessId', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedControlType', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedLocalizedControlType', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedName', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedAcceleratorKey', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedAccessKey', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedHasKeyboardFocus', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedIsKeyboardFocusable', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedIsEnabled', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedAutomationId', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedClassName', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedHelpText', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedCulture', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedIsControlElement', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedIsContentElement', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedIsPassword', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedNativeWindowHandle', + ( ['out', 'retval'], POINTER(c_void_p), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedItemType', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedIsOffscreen', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedOrientation', + ( ['out', 'retval'], POINTER(OrientationType), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedFrameworkId', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedIsRequiredForForm', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedItemStatus', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedBoundingRectangle', + ( ['out', 'retval'], POINTER(tagRECT), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedLabeledBy', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedAriaRole', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedAriaProperties', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedIsDataValidForForm', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedControllerFor', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElementArray)), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedDescribedBy', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElementArray)), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedFlowsTo', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElementArray)), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedProviderDescription', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD([], HRESULT, 'GetClickablePoint', + ( ['out'], POINTER(tagPOINT), 'clickable' ), + ( ['out', 'retval'], POINTER(c_int), 'gotClickable' )), +] +################################################################ +## code template for IUIAutomationElement implementation +##class IUIAutomationElement_Impl(object): +## def SetFocus(self): +## '-no docstring-' +## #return +## +## def GetRuntimeId(self): +## '-no docstring-' +## #return runtimeId +## +## def FindFirst(self, scope, condition): +## '-no docstring-' +## #return found +## +## def FindAll(self, scope, condition): +## '-no docstring-' +## #return found +## +## def FindFirstBuildCache(self, scope, condition, cacheRequest): +## '-no docstring-' +## #return found +## +## def FindAllBuildCache(self, scope, condition, cacheRequest): +## '-no docstring-' +## #return found +## +## def BuildUpdatedCache(self, cacheRequest): +## '-no docstring-' +## #return updatedElement +## +## def GetCurrentPropertyValue(self, propertyId): +## '-no docstring-' +## #return retVal +## +## def GetCurrentPropertyValueEx(self, propertyId, ignoreDefaultValue): +## '-no docstring-' +## #return retVal +## +## def GetCachedPropertyValue(self, propertyId): +## '-no docstring-' +## #return retVal +## +## def GetCachedPropertyValueEx(self, propertyId, ignoreDefaultValue): +## '-no docstring-' +## #return retVal +## +## def GetCurrentPatternAs(self, patternId, riid): +## '-no docstring-' +## #return patternObject +## +## def GetCachedPatternAs(self, patternId, riid): +## '-no docstring-' +## #return patternObject +## +## def GetCurrentPattern(self, patternId): +## '-no docstring-' +## #return patternObject +## +## def GetCachedPattern(self, patternId): +## '-no docstring-' +## #return patternObject +## +## def GetCachedParent(self): +## '-no docstring-' +## #return parent +## +## def GetCachedChildren(self): +## '-no docstring-' +## #return children +## +## @property +## def CurrentProcessId(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentControlType(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentLocalizedControlType(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentName(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentAcceleratorKey(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentAccessKey(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentHasKeyboardFocus(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentIsKeyboardFocusable(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentIsEnabled(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentAutomationId(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentClassName(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentHelpText(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentCulture(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentIsControlElement(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentIsContentElement(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentIsPassword(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentNativeWindowHandle(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentItemType(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentIsOffscreen(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentOrientation(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentFrameworkId(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentIsRequiredForForm(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentItemStatus(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentBoundingRectangle(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentLabeledBy(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentAriaRole(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentAriaProperties(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentIsDataValidForForm(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentControllerFor(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentDescribedBy(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentFlowsTo(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentProviderDescription(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedProcessId(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedControlType(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedLocalizedControlType(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedName(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedAcceleratorKey(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedAccessKey(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedHasKeyboardFocus(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedIsKeyboardFocusable(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedIsEnabled(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedAutomationId(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedClassName(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedHelpText(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedCulture(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedIsControlElement(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedIsContentElement(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedIsPassword(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedNativeWindowHandle(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedItemType(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedIsOffscreen(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedOrientation(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedFrameworkId(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedIsRequiredForForm(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedItemStatus(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedBoundingRectangle(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedLabeledBy(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedAriaRole(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedAriaProperties(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedIsDataValidForForm(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedControllerFor(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedDescribedBy(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedFlowsTo(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedProviderDescription(self): +## '-no docstring-' +## #return retVal +## +## def GetClickablePoint(self): +## '-no docstring-' +## #return clickable, gotClickable +## + + +# values for enumeration 'LiveSetting' +Off = 0 +Polite = 1 +Assertive = 2 +LiveSetting = c_int # enum +IUIAutomationElement2._methods_ = [ + COMMETHOD(['propget'], HRESULT, 'CurrentOptimizeForVisualContent', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedOptimizeForVisualContent', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentLiveSetting', + ( ['out', 'retval'], POINTER(LiveSetting), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedLiveSetting', + ( ['out', 'retval'], POINTER(LiveSetting), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentFlowsFrom', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElementArray)), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedFlowsFrom', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElementArray)), 'retVal' )), +] +################################################################ +## code template for IUIAutomationElement2 implementation +##class IUIAutomationElement2_Impl(object): +## @property +## def CurrentOptimizeForVisualContent(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedOptimizeForVisualContent(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentLiveSetting(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedLiveSetting(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentFlowsFrom(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedFlowsFrom(self): +## '-no docstring-' +## #return retVal +## + +IUIAutomationElement3._methods_ = [ + COMMETHOD([], HRESULT, 'ShowContextMenu'), + COMMETHOD(['propget'], HRESULT, 'CurrentIsPeripheral', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedIsPeripheral', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), +] +################################################################ +## code template for IUIAutomationElement3 implementation +##class IUIAutomationElement3_Impl(object): +## def ShowContextMenu(self): +## '-no docstring-' +## #return +## +## @property +## def CurrentIsPeripheral(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedIsPeripheral(self): +## '-no docstring-' +## #return retVal +## + +IUIAutomationElement4._methods_ = [ + COMMETHOD(['propget'], HRESULT, 'CurrentPositionInSet', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentSizeOfSet', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentLevel', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentAnnotationTypes', + ( ['out', 'retval'], POINTER(_midlSAFEARRAY(c_int)), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentAnnotationObjects', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElementArray)), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedPositionInSet', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedSizeOfSet', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedLevel', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedAnnotationTypes', + ( ['out', 'retval'], POINTER(_midlSAFEARRAY(c_int)), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedAnnotationObjects', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElementArray)), 'retVal' )), +] +################################################################ +## code template for IUIAutomationElement4 implementation +##class IUIAutomationElement4_Impl(object): +## @property +## def CurrentPositionInSet(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentSizeOfSet(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentLevel(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentAnnotationTypes(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentAnnotationObjects(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedPositionInSet(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedSizeOfSet(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedLevel(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedAnnotationTypes(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedAnnotationObjects(self): +## '-no docstring-' +## #return retVal +## + +IUIAutomationElement5._methods_ = [ + COMMETHOD(['propget'], HRESULT, 'CurrentLandmarkType', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentLocalizedLandmarkType', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedLandmarkType', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedLocalizedLandmarkType', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), +] +################################################################ +## code template for IUIAutomationElement5 implementation +##class IUIAutomationElement5_Impl(object): +## @property +## def CurrentLandmarkType(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentLocalizedLandmarkType(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedLandmarkType(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedLocalizedLandmarkType(self): +## '-no docstring-' +## #return retVal +## + +IUIAutomationElement6._methods_ = [ + COMMETHOD(['propget'], HRESULT, 'CurrentFullDescription', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedFullDescription', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), +] +################################################################ +## code template for IUIAutomationElement6 implementation +##class IUIAutomationElement6_Impl(object): +## @property +## def CurrentFullDescription(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedFullDescription(self): +## '-no docstring-' +## #return retVal +## + +IUIAutomationElement7._methods_ = [ + COMMETHOD([], HRESULT, 'FindFirstWithOptions', + ( ['in'], TreeScope, 'scope' ), + ( ['in'], POINTER(IUIAutomationCondition), 'condition' ), + ( ['in'], TreeTraversalOptions, 'traversalOptions' ), + ( ['in'], POINTER(IUIAutomationElement), 'root' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'found' )), + COMMETHOD([], HRESULT, 'FindAllWithOptions', + ( ['in'], TreeScope, 'scope' ), + ( ['in'], POINTER(IUIAutomationCondition), 'condition' ), + ( ['in'], TreeTraversalOptions, 'traversalOptions' ), + ( ['in'], POINTER(IUIAutomationElement), 'root' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElementArray)), 'found' )), + COMMETHOD([], HRESULT, 'FindFirstWithOptionsBuildCache', + ( ['in'], TreeScope, 'scope' ), + ( ['in'], POINTER(IUIAutomationCondition), 'condition' ), + ( ['in'], POINTER(IUIAutomationCacheRequest), 'cacheRequest' ), + ( ['in'], TreeTraversalOptions, 'traversalOptions' ), + ( ['in'], POINTER(IUIAutomationElement), 'root' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'found' )), + COMMETHOD([], HRESULT, 'FindAllWithOptionsBuildCache', + ( ['in'], TreeScope, 'scope' ), + ( ['in'], POINTER(IUIAutomationCondition), 'condition' ), + ( ['in'], POINTER(IUIAutomationCacheRequest), 'cacheRequest' ), + ( ['in'], TreeTraversalOptions, 'traversalOptions' ), + ( ['in'], POINTER(IUIAutomationElement), 'root' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElementArray)), 'found' )), + COMMETHOD([], HRESULT, 'GetCurrentMetadataValue', + ( ['in'], c_int, 'targetId' ), + ( ['in'], c_int, 'metadataId' ), + ( ['out', 'retval'], POINTER(VARIANT), 'returnVal' )), +] +################################################################ +## code template for IUIAutomationElement7 implementation +##class IUIAutomationElement7_Impl(object): +## def FindFirstWithOptions(self, scope, condition, traversalOptions, root): +## '-no docstring-' +## #return found +## +## def FindAllWithOptions(self, scope, condition, traversalOptions, root): +## '-no docstring-' +## #return found +## +## def FindFirstWithOptionsBuildCache(self, scope, condition, cacheRequest, traversalOptions, root): +## '-no docstring-' +## #return found +## +## def FindAllWithOptionsBuildCache(self, scope, condition, cacheRequest, traversalOptions, root): +## '-no docstring-' +## #return found +## +## def GetCurrentMetadataValue(self, targetId, metadataId): +## '-no docstring-' +## #return returnVal +## + +IUIAutomationElement8._methods_ = [ + COMMETHOD(['propget'], HRESULT, 'CurrentHeadingLevel', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedHeadingLevel', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), +] +################################################################ +## code template for IUIAutomationElement8 implementation +##class IUIAutomationElement8_Impl(object): +## @property +## def CurrentHeadingLevel(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedHeadingLevel(self): +## '-no docstring-' +## #return retVal +## + +class IUIAutomationAndCondition(IUIAutomationCondition): + _case_insensitive_ = True + _iid_ = GUID('{A7D0AF36-B912-45FE-9855-091DDC174AEC}') + _idlflags_ = [] +IUIAutomationAndCondition._methods_ = [ + COMMETHOD(['propget'], HRESULT, 'ChildCount', + ( ['out', 'retval'], POINTER(c_int), 'ChildCount' )), + COMMETHOD([], HRESULT, 'GetChildrenAsNativeArray', + ( ['out'], POINTER(POINTER(POINTER(IUIAutomationCondition))), 'childArray' ), + ( ['out'], POINTER(c_int), 'childArrayCount' )), + COMMETHOD([], HRESULT, 'GetChildren', + ( ['out', 'retval'], POINTER(_midlSAFEARRAY(POINTER(IUIAutomationCondition))), 'childArray' )), +] +################################################################ +## code template for IUIAutomationAndCondition implementation +##class IUIAutomationAndCondition_Impl(object): +## @property +## def ChildCount(self): +## '-no docstring-' +## #return ChildCount +## +## def GetChildrenAsNativeArray(self): +## '-no docstring-' +## #return childArray, childArrayCount +## +## def GetChildren(self): +## '-no docstring-' +## #return childArray +## + +class IUIAutomationElement9(IUIAutomationElement8): + _case_insensitive_ = True + _iid_ = GUID('{39325FAC-039D-440E-A3A3-5EB81A5CECC3}') + _idlflags_ = [] +IUIAutomationElement9._methods_ = [ + COMMETHOD(['propget'], HRESULT, 'CurrentIsDialog', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedIsDialog', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), +] +################################################################ +## code template for IUIAutomationElement9 implementation +##class IUIAutomationElement9_Impl(object): +## @property +## def CurrentIsDialog(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedIsDialog(self): +## '-no docstring-' +## #return retVal +## + +class IUIAutomationWindowPattern(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{0FAEF453-9208-43EF-BBB2-3B485177864F}') + _idlflags_ = [] +IUIAutomationWindowPattern._methods_ = [ + COMMETHOD([], HRESULT, 'Close'), + COMMETHOD([], HRESULT, 'WaitForInputIdle', + ( ['in'], c_int, 'milliseconds' ), + ( ['out', 'retval'], POINTER(c_int), 'success' )), + COMMETHOD([], HRESULT, 'SetWindowVisualState', + ( ['in'], WindowVisualState, 'state' )), + COMMETHOD(['propget'], HRESULT, 'CurrentCanMaximize', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentCanMinimize', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentIsModal', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentIsTopmost', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentWindowVisualState', + ( ['out', 'retval'], POINTER(WindowVisualState), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentWindowInteractionState', + ( ['out', 'retval'], POINTER(WindowInteractionState), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedCanMaximize', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedCanMinimize', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedIsModal', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedIsTopmost', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedWindowVisualState', + ( ['out', 'retval'], POINTER(WindowVisualState), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedWindowInteractionState', + ( ['out', 'retval'], POINTER(WindowInteractionState), 'retVal' )), +] +################################################################ +## code template for IUIAutomationWindowPattern implementation +##class IUIAutomationWindowPattern_Impl(object): +## def Close(self): +## '-no docstring-' +## #return +## +## def WaitForInputIdle(self, milliseconds): +## '-no docstring-' +## #return success +## +## def SetWindowVisualState(self, state): +## '-no docstring-' +## #return +## +## @property +## def CurrentCanMaximize(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentCanMinimize(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentIsModal(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentIsTopmost(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentWindowVisualState(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentWindowInteractionState(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedCanMaximize(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedCanMinimize(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedIsModal(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedIsTopmost(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedWindowVisualState(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedWindowInteractionState(self): +## '-no docstring-' +## #return retVal +## + +class IUIAutomationOrCondition(IUIAutomationCondition): + _case_insensitive_ = True + _iid_ = GUID('{8753F032-3DB1-47B5-A1FC-6E34A266C712}') + _idlflags_ = [] +IUIAutomationOrCondition._methods_ = [ + COMMETHOD(['propget'], HRESULT, 'ChildCount', + ( ['out', 'retval'], POINTER(c_int), 'ChildCount' )), + COMMETHOD([], HRESULT, 'GetChildrenAsNativeArray', + ( ['out'], POINTER(POINTER(POINTER(IUIAutomationCondition))), 'childArray' ), + ( ['out'], POINTER(c_int), 'childArrayCount' )), + COMMETHOD([], HRESULT, 'GetChildren', + ( ['out', 'retval'], POINTER(_midlSAFEARRAY(POINTER(IUIAutomationCondition))), 'childArray' )), +] +################################################################ +## code template for IUIAutomationOrCondition implementation +##class IUIAutomationOrCondition_Impl(object): +## @property +## def ChildCount(self): +## '-no docstring-' +## #return ChildCount +## +## def GetChildrenAsNativeArray(self): +## '-no docstring-' +## #return childArray, childArrayCount +## +## def GetChildren(self): +## '-no docstring-' +## #return childArray +## + +class IRawElementProviderSimple(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{D6DD68D1-86FD-4332-8666-9ABEDEA2D24C}') + _idlflags_ = ['oleautomation'] +IUIAutomationProxyFactory._methods_ = [ + COMMETHOD([], HRESULT, 'CreateProvider', + ( ['in'], c_void_p, 'hwnd' ), + ( ['in'], c_int, 'idObject' ), + ( ['in'], c_int, 'idChild' ), + ( ['out', 'retval'], POINTER(POINTER(IRawElementProviderSimple)), 'provider' )), + COMMETHOD(['propget'], HRESULT, 'ProxyFactoryId', + ( ['out', 'retval'], POINTER(BSTR), 'factoryId' )), +] +################################################################ +## code template for IUIAutomationProxyFactory implementation +##class IUIAutomationProxyFactory_Impl(object): +## def CreateProvider(self, hwnd, idObject, idChild): +## '-no docstring-' +## #return provider +## +## @property +## def ProxyFactoryId(self): +## '-no docstring-' +## #return factoryId +## + +IRawElementProviderSimple._methods_ = [ + COMMETHOD(['propget'], HRESULT, 'ProviderOptions', + ( ['out', 'retval'], POINTER(ProviderOptions), 'pRetVal' )), + COMMETHOD([], HRESULT, 'GetPatternProvider', + ( ['in'], c_int, 'patternId' ), + ( ['out', 'retval'], POINTER(POINTER(IUnknown)), 'pRetVal' )), + COMMETHOD([], HRESULT, 'GetPropertyValue', + ( ['in'], c_int, 'propertyId' ), + ( ['out', 'retval'], POINTER(VARIANT), 'pRetVal' )), + COMMETHOD(['propget'], HRESULT, 'HostRawElementProvider', + ( ['out', 'retval'], POINTER(POINTER(IRawElementProviderSimple)), 'pRetVal' )), +] +################################################################ +## code template for IRawElementProviderSimple implementation +##class IRawElementProviderSimple_Impl(object): +## @property +## def ProviderOptions(self): +## '-no docstring-' +## #return pRetVal +## +## def GetPatternProvider(self, patternId): +## '-no docstring-' +## #return pRetVal +## +## def GetPropertyValue(self, propertyId): +## '-no docstring-' +## #return pRetVal +## +## @property +## def HostRawElementProvider(self): +## '-no docstring-' +## #return pRetVal +## + + +# values for enumeration 'ExpandCollapseState' +ExpandCollapseState_Collapsed = 0 +ExpandCollapseState_Expanded = 1 +ExpandCollapseState_PartiallyExpanded = 2 +ExpandCollapseState_LeafNode = 3 +ExpandCollapseState = c_int # enum +class IUIAutomationGridPattern(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{414C3CDC-856B-4F5B-8538-3131C6302550}') + _idlflags_ = [] +IUIAutomationGridPattern._methods_ = [ + COMMETHOD([], HRESULT, 'GetItem', + ( ['in'], c_int, 'row' ), + ( ['in'], c_int, 'column' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'element' )), + COMMETHOD(['propget'], HRESULT, 'CurrentRowCount', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentColumnCount', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedRowCount', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedColumnCount', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), +] +################################################################ +## code template for IUIAutomationGridPattern implementation +##class IUIAutomationGridPattern_Impl(object): +## def GetItem(self, row, column): +## '-no docstring-' +## #return element +## +## @property +## def CurrentRowCount(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentColumnCount(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedRowCount(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedColumnCount(self): +## '-no docstring-' +## #return retVal +## + +class IUIAutomationGridItemPattern(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{78F8EF57-66C3-4E09-BD7C-E79B2004894D}') + _idlflags_ = [] +IUIAutomationGridItemPattern._methods_ = [ + COMMETHOD(['propget'], HRESULT, 'CurrentContainingGrid', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentRow', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentColumn', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentRowSpan', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentColumnSpan', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedContainingGrid', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedRow', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedColumn', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedRowSpan', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedColumnSpan', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), +] +################################################################ +## code template for IUIAutomationGridItemPattern implementation +##class IUIAutomationGridItemPattern_Impl(object): +## @property +## def CurrentContainingGrid(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentRow(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentColumn(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentRowSpan(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentColumnSpan(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedContainingGrid(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedRow(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedColumn(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedRowSpan(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedColumnSpan(self): +## '-no docstring-' +## #return retVal +## + +class Library(object): + name = 'UIAutomationClient' + _reg_typelib_ = ('{944DE083-8FB8-45CF-BCB7-C477ACB2F897}', 1, 0) + +UIA_SelectionPatternId = 10001 # Constant c_int +UIA_InvokePatternId = 10000 # Constant c_int +UIA_PositionInSetPropertyId = 30152 # Constant c_int +class IUIAutomationScrollPattern(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{88F4D42A-E881-459D-A77C-73BBBB7E02DC}') + _idlflags_ = [] + +# values for enumeration 'ScrollAmount' +ScrollAmount_LargeDecrement = 0 +ScrollAmount_SmallDecrement = 1 +ScrollAmount_NoAmount = 2 +ScrollAmount_LargeIncrement = 3 +ScrollAmount_SmallIncrement = 4 +ScrollAmount = c_int # enum +IUIAutomationScrollPattern._methods_ = [ + COMMETHOD([], HRESULT, 'Scroll', + ( ['in'], ScrollAmount, 'horizontalAmount' ), + ( ['in'], ScrollAmount, 'verticalAmount' )), + COMMETHOD([], HRESULT, 'SetScrollPercent', + ( ['in'], c_double, 'horizontalPercent' ), + ( ['in'], c_double, 'verticalPercent' )), + COMMETHOD(['propget'], HRESULT, 'CurrentHorizontalScrollPercent', + ( ['out', 'retval'], POINTER(c_double), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentVerticalScrollPercent', + ( ['out', 'retval'], POINTER(c_double), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentHorizontalViewSize', + ( ['out', 'retval'], POINTER(c_double), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentVerticalViewSize', + ( ['out', 'retval'], POINTER(c_double), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentHorizontallyScrollable', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentVerticallyScrollable', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedHorizontalScrollPercent', + ( ['out', 'retval'], POINTER(c_double), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedVerticalScrollPercent', + ( ['out', 'retval'], POINTER(c_double), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedHorizontalViewSize', + ( ['out', 'retval'], POINTER(c_double), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedVerticalViewSize', + ( ['out', 'retval'], POINTER(c_double), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedHorizontallyScrollable', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedVerticallyScrollable', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), +] +################################################################ +## code template for IUIAutomationScrollPattern implementation +##class IUIAutomationScrollPattern_Impl(object): +## def Scroll(self, horizontalAmount, verticalAmount): +## '-no docstring-' +## #return +## +## def SetScrollPercent(self, horizontalPercent, verticalPercent): +## '-no docstring-' +## #return +## +## @property +## def CurrentHorizontalScrollPercent(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentVerticalScrollPercent(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentHorizontalViewSize(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentVerticalViewSize(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentHorizontallyScrollable(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentVerticallyScrollable(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedHorizontalScrollPercent(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedVerticalScrollPercent(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedHorizontalViewSize(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedVerticalViewSize(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedHorizontallyScrollable(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedVerticallyScrollable(self): +## '-no docstring-' +## #return retVal +## + +UIA_IsCustomNavigationPatternAvailablePropertyId = 30151 # Constant c_int +UIA_IsPeripheralPropertyId = 30150 # Constant c_int +UIA_SizeOfSetPropertyId = 30153 # Constant c_int +UIA_TableColumnHeadersPropertyId = 30082 # Constant c_int +UIA_TableRowOrColumnMajorPropertyId = 30083 # Constant c_int +UIA_TableItemRowHeaderItemsPropertyId = 30084 # Constant c_int +class IUIAutomationTextRangeArray(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{CE4AE76A-E717-4C98-81EA-47371D028EB6}') + _idlflags_ = [] +IUIAutomationTextRangeArray._methods_ = [ + COMMETHOD(['propget'], HRESULT, 'Length', + ( ['out', 'retval'], POINTER(c_int), 'Length' )), + COMMETHOD([], HRESULT, 'GetElement', + ( ['in'], c_int, 'index' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationTextRange)), 'element' )), +] +################################################################ +## code template for IUIAutomationTextRangeArray implementation +##class IUIAutomationTextRangeArray_Impl(object): +## @property +## def Length(self): +## '-no docstring-' +## #return Length +## +## def GetElement(self, index): +## '-no docstring-' +## #return element +## + +UIA_TableItemColumnHeaderItemsPropertyId = 30085 # Constant c_int +UIA_ToggleToggleStatePropertyId = 30086 # Constant c_int +UIA_TransformCanMovePropertyId = 30087 # Constant c_int +UIA_TransformCanResizePropertyId = 30088 # Constant c_int +UIA_TransformCanRotatePropertyId = 30089 # Constant c_int +UIA_IsLegacyIAccessiblePatternAvailablePropertyId = 30090 # Constant c_int +UIA_LegacyIAccessibleChildIdPropertyId = 30091 # Constant c_int +class IUIAutomationTransformPattern2(IUIAutomationTransformPattern): + _case_insensitive_ = True + _iid_ = GUID('{6D74D017-6ECB-4381-B38B-3C17A48FF1C2}') + _idlflags_ = [] + +# values for enumeration 'ZoomUnit' +ZoomUnit_NoAmount = 0 +ZoomUnit_LargeDecrement = 1 +ZoomUnit_SmallDecrement = 2 +ZoomUnit_LargeIncrement = 3 +ZoomUnit_SmallIncrement = 4 +ZoomUnit = c_int # enum +IUIAutomationTransformPattern2._methods_ = [ + COMMETHOD([], HRESULT, 'Zoom', + ( ['in'], c_double, 'zoomValue' )), + COMMETHOD([], HRESULT, 'ZoomByUnit', + ( ['in'], ZoomUnit, 'ZoomUnit' )), + COMMETHOD(['propget'], HRESULT, 'CurrentCanZoom', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedCanZoom', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentZoomLevel', + ( ['out', 'retval'], POINTER(c_double), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedZoomLevel', + ( ['out', 'retval'], POINTER(c_double), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentZoomMinimum', + ( ['out', 'retval'], POINTER(c_double), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedZoomMinimum', + ( ['out', 'retval'], POINTER(c_double), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentZoomMaximum', + ( ['out', 'retval'], POINTER(c_double), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedZoomMaximum', + ( ['out', 'retval'], POINTER(c_double), 'retVal' )), +] +################################################################ +## code template for IUIAutomationTransformPattern2 implementation +##class IUIAutomationTransformPattern2_Impl(object): +## def Zoom(self, zoomValue): +## '-no docstring-' +## #return +## +## def ZoomByUnit(self, ZoomUnit): +## '-no docstring-' +## #return +## +## @property +## def CurrentCanZoom(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedCanZoom(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentZoomLevel(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedZoomLevel(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentZoomMinimum(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedZoomMinimum(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentZoomMaximum(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedZoomMaximum(self): +## '-no docstring-' +## #return retVal +## + +UIA_LegacyIAccessibleNamePropertyId = 30092 # Constant c_int +UIA_LegacyIAccessibleValuePropertyId = 30093 # Constant c_int +UIA_LegacyIAccessibleDescriptionPropertyId = 30094 # Constant c_int +UIA_LegacyIAccessibleRolePropertyId = 30095 # Constant c_int +class IUIAutomationTextPattern(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{32EBA289-3583-42C9-9C59-3B6D9A1E9B6A}') + _idlflags_ = [] + +# values for enumeration 'SupportedTextSelection' +SupportedTextSelection_None = 0 +SupportedTextSelection_Single = 1 +SupportedTextSelection_Multiple = 2 +SupportedTextSelection = c_int # enum +IUIAutomationTextPattern._methods_ = [ + COMMETHOD([], HRESULT, 'RangeFromPoint', + ( ['in'], tagPOINT, 'pt' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationTextRange)), 'range' )), + COMMETHOD([], HRESULT, 'RangeFromChild', + ( ['in'], POINTER(IUIAutomationElement), 'child' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationTextRange)), 'range' )), + COMMETHOD([], HRESULT, 'GetSelection', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationTextRangeArray)), 'ranges' )), + COMMETHOD([], HRESULT, 'GetVisibleRanges', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationTextRangeArray)), 'ranges' )), + COMMETHOD(['propget'], HRESULT, 'DocumentRange', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationTextRange)), 'range' )), + COMMETHOD(['propget'], HRESULT, 'SupportedTextSelection', + ( ['out', 'retval'], POINTER(SupportedTextSelection), 'SupportedTextSelection' )), +] +################################################################ +## code template for IUIAutomationTextPattern implementation +##class IUIAutomationTextPattern_Impl(object): +## def RangeFromPoint(self, pt): +## '-no docstring-' +## #return range +## +## def RangeFromChild(self, child): +## '-no docstring-' +## #return range +## +## def GetSelection(self): +## '-no docstring-' +## #return ranges +## +## def GetVisibleRanges(self): +## '-no docstring-' +## #return ranges +## +## @property +## def DocumentRange(self): +## '-no docstring-' +## #return range +## +## @property +## def SupportedTextSelection(self): +## '-no docstring-' +## #return SupportedTextSelection +## + +UIA_LegacyIAccessibleStatePropertyId = 30096 # Constant c_int +UIA_LegacyIAccessibleHelpPropertyId = 30097 # Constant c_int +UIA_LegacyIAccessibleKeyboardShortcutPropertyId = 30098 # Constant c_int +UIA_LegacyIAccessibleSelectionPropertyId = 30099 # Constant c_int +UIA_LegacyIAccessibleDefaultActionPropertyId = 30100 # Constant c_int +UIA_AriaRolePropertyId = 30101 # Constant c_int +UIA_AriaPropertiesPropertyId = 30102 # Constant c_int +UIA_IsDataValidForFormPropertyId = 30103 # Constant c_int +UIA_ControllerForPropertyId = 30104 # Constant c_int +UIA_DescribedByPropertyId = 30105 # Constant c_int +UIA_FlowsToPropertyId = 30106 # Constant c_int +UIA_ProviderDescriptionPropertyId = 30107 # Constant c_int +UIA_IsItemContainerPatternAvailablePropertyId = 30108 # Constant c_int +UIA_IsVirtualizedItemPatternAvailablePropertyId = 30109 # Constant c_int +UIA_IsSynchronizedInputPatternAvailablePropertyId = 30110 # Constant c_int +UIA_OptimizeForVisualContentPropertyId = 30111 # Constant c_int +UIA_IsObjectModelPatternAvailablePropertyId = 30112 # Constant c_int +UIA_AnnotationAnnotationTypeIdPropertyId = 30113 # Constant c_int +UIA_AnnotationAnnotationTypeNamePropertyId = 30114 # Constant c_int +UIA_AnnotationAuthorPropertyId = 30115 # Constant c_int +UIA_AnnotationDateTimePropertyId = 30116 # Constant c_int +UIA_AnnotationTargetPropertyId = 30117 # Constant c_int +UIA_IsAnnotationPatternAvailablePropertyId = 30118 # Constant c_int +UIA_IsTextPattern2AvailablePropertyId = 30119 # Constant c_int +UIA_StylesStyleIdPropertyId = 30120 # Constant c_int +UIA_StylesStyleNamePropertyId = 30121 # Constant c_int +UIA_StylesFillColorPropertyId = 30122 # Constant c_int +UIA_StylesFillPatternStylePropertyId = 30123 # Constant c_int +UIA_StylesShapePropertyId = 30124 # Constant c_int +UIA_StylesFillPatternColorPropertyId = 30125 # Constant c_int +UIA_StylesExtendedPropertiesPropertyId = 30126 # Constant c_int +UIA_IsStylesPatternAvailablePropertyId = 30127 # Constant c_int +UIA_IsSpreadsheetPatternAvailablePropertyId = 30128 # Constant c_int +UIA_SpreadsheetItemFormulaPropertyId = 30129 # Constant c_int +class IUIAutomationTextChildPattern(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{6552B038-AE05-40C8-ABFD-AA08352AAB86}') + _idlflags_ = [] +IUIAutomationTextChildPattern._methods_ = [ + COMMETHOD(['propget'], HRESULT, 'TextContainer', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'container' )), + COMMETHOD(['propget'], HRESULT, 'TextRange', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationTextRange)), 'range' )), +] +################################################################ +## code template for IUIAutomationTextChildPattern implementation +##class IUIAutomationTextChildPattern_Impl(object): +## @property +## def TextContainer(self): +## '-no docstring-' +## #return container +## +## @property +## def TextRange(self): +## '-no docstring-' +## #return range +## + +UIA_SpreadsheetItemAnnotationObjectsPropertyId = 30130 # Constant c_int +UIA_SpreadsheetItemAnnotationTypesPropertyId = 30131 # Constant c_int +UIA_IsSpreadsheetItemPatternAvailablePropertyId = 30132 # Constant c_int +class IUIAutomationDragPattern(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{1DC7B570-1F54-4BAD-BCDA-D36A722FB7BD}') + _idlflags_ = [] +IUIAutomationDragPattern._methods_ = [ + COMMETHOD(['propget'], HRESULT, 'CurrentIsGrabbed', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedIsGrabbed', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentDropEffect', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedDropEffect', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentDropEffects', + ( ['out', 'retval'], POINTER(_midlSAFEARRAY(BSTR)), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedDropEffects', + ( ['out', 'retval'], POINTER(_midlSAFEARRAY(BSTR)), 'retVal' )), + COMMETHOD([], HRESULT, 'GetCurrentGrabbedItems', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElementArray)), 'retVal' )), + COMMETHOD([], HRESULT, 'GetCachedGrabbedItems', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElementArray)), 'retVal' )), +] +################################################################ +## code template for IUIAutomationDragPattern implementation +##class IUIAutomationDragPattern_Impl(object): +## @property +## def CurrentIsGrabbed(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedIsGrabbed(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentDropEffect(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedDropEffect(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentDropEffects(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedDropEffects(self): +## '-no docstring-' +## #return retVal +## +## def GetCurrentGrabbedItems(self): +## '-no docstring-' +## #return retVal +## +## def GetCachedGrabbedItems(self): +## '-no docstring-' +## #return retVal +## + +UIA_Transform2CanZoomPropertyId = 30133 # Constant c_int +UIA_IsTransformPattern2AvailablePropertyId = 30134 # Constant c_int +class IUIAutomationTextPattern2(IUIAutomationTextPattern): + _case_insensitive_ = True + _iid_ = GUID('{506A921A-FCC9-409F-B23B-37EB74106872}') + _idlflags_ = [] +IUIAutomationTextPattern2._methods_ = [ + COMMETHOD([], HRESULT, 'RangeFromAnnotation', + ( ['in'], POINTER(IUIAutomationElement), 'annotation' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationTextRange)), 'range' )), + COMMETHOD([], HRESULT, 'GetCaretRange', + ( ['out'], POINTER(c_int), 'isActive' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationTextRange)), 'range' )), +] +################################################################ +## code template for IUIAutomationTextPattern2 implementation +##class IUIAutomationTextPattern2_Impl(object): +## def RangeFromAnnotation(self, annotation): +## '-no docstring-' +## #return range +## +## def GetCaretRange(self): +## '-no docstring-' +## #return isActive, range +## + +UIA_LiveSettingPropertyId = 30135 # Constant c_int +UIA_IsTextChildPatternAvailablePropertyId = 30136 # Constant c_int +UIA_IsDragPatternAvailablePropertyId = 30137 # Constant c_int +UIA_DragIsGrabbedPropertyId = 30138 # Constant c_int +UIA_DragDropEffectPropertyId = 30139 # Constant c_int +UIA_DragDropEffectsPropertyId = 30140 # Constant c_int +UIA_IsDropTargetPatternAvailablePropertyId = 30141 # Constant c_int +UIA_DropTargetDropTargetEffectPropertyId = 30142 # Constant c_int +class IUIAutomationTextEditPattern(IUIAutomationTextPattern): + _case_insensitive_ = True + _iid_ = GUID('{17E21576-996C-4870-99D9-BFF323380C06}') + _idlflags_ = [] +IUIAutomationTextEditPattern._methods_ = [ + COMMETHOD([], HRESULT, 'GetActiveComposition', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationTextRange)), 'range' )), + COMMETHOD([], HRESULT, 'GetConversionTarget', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationTextRange)), 'range' )), +] +################################################################ +## code template for IUIAutomationTextEditPattern implementation +##class IUIAutomationTextEditPattern_Impl(object): +## def GetActiveComposition(self): +## '-no docstring-' +## #return range +## +## def GetConversionTarget(self): +## '-no docstring-' +## #return range +## + +UIA_DropTargetDropTargetEffectsPropertyId = 30143 # Constant c_int +UIA_DragGrabbedItemsPropertyId = 30144 # Constant c_int +UIA_Transform2ZoomLevelPropertyId = 30145 # Constant c_int +UIA_Transform2ZoomMinimumPropertyId = 30146 # Constant c_int +UIA_Transform2ZoomMaximumPropertyId = 30147 # Constant c_int +UIA_FlowsFromPropertyId = 30148 # Constant c_int +UIA_IsTextEditPatternAvailablePropertyId = 30149 # Constant c_int +UIA_AcceleratorKeyPropertyId = 30006 # Constant c_int +UIA_SelectionItemIsSelectedPropertyId = 30079 # Constant c_int +HeadingLevel_None = 80050 # Constant c_int +UIA_ToolTipOpenedEventId = 20000 # Constant c_int +UIA_SelectionItemSelectionContainerPropertyId = 30080 # Constant c_int +UIA_CustomLandmarkTypeId = 80000 # Constant c_int +UIA_HasKeyboardFocusPropertyId = 30008 # Constant c_int +UIA_WindowIsTopmostPropertyId = 30078 # Constant c_int +IUIAutomation2._methods_ = [ + COMMETHOD(['propget'], HRESULT, 'AutoSetFocus', + ( ['out', 'retval'], POINTER(c_int), 'AutoSetFocus' )), + COMMETHOD(['propput'], HRESULT, 'AutoSetFocus', + ( ['in'], c_int, 'AutoSetFocus' )), + COMMETHOD(['propget'], HRESULT, 'ConnectionTimeout', + ( ['out', 'retval'], POINTER(c_ulong), 'timeout' )), + COMMETHOD(['propput'], HRESULT, 'ConnectionTimeout', + ( ['in'], c_ulong, 'timeout' )), + COMMETHOD(['propget'], HRESULT, 'TransactionTimeout', + ( ['out', 'retval'], POINTER(c_ulong), 'timeout' )), + COMMETHOD(['propput'], HRESULT, 'TransactionTimeout', + ( ['in'], c_ulong, 'timeout' )), +] +################################################################ +## code template for IUIAutomation2 implementation +##class IUIAutomation2_Impl(object): +## def _get(self): +## '-no docstring-' +## #return AutoSetFocus +## def _set(self, AutoSetFocus): +## '-no docstring-' +## AutoSetFocus = property(_get, _set, doc = _set.__doc__) +## +## def _get(self): +## '-no docstring-' +## #return timeout +## def _set(self, timeout): +## '-no docstring-' +## ConnectionTimeout = property(_get, _set, doc = _set.__doc__) +## +## def _get(self): +## '-no docstring-' +## #return timeout +## def _set(self, timeout): +## '-no docstring-' +## TransactionTimeout = property(_get, _set, doc = _set.__doc__) +## + +IUIAutomation3._methods_ = [ + COMMETHOD([], HRESULT, 'AddTextEditTextChangedEventHandler', + ( ['in'], POINTER(IUIAutomationElement), 'element' ), + ( ['in'], TreeScope, 'scope' ), + ( ['in'], TextEditChangeType, 'TextEditChangeType' ), + ( ['in'], POINTER(IUIAutomationCacheRequest), 'cacheRequest' ), + ( ['in'], POINTER(IUIAutomationTextEditTextChangedEventHandler), 'handler' )), + COMMETHOD([], HRESULT, 'RemoveTextEditTextChangedEventHandler', + ( ['in'], POINTER(IUIAutomationElement), 'element' ), + ( ['in'], POINTER(IUIAutomationTextEditTextChangedEventHandler), 'handler' )), +] +################################################################ +## code template for IUIAutomation3 implementation +##class IUIAutomation3_Impl(object): +## def AddTextEditTextChangedEventHandler(self, element, scope, TextEditChangeType, cacheRequest, handler): +## '-no docstring-' +## #return +## +## def RemoveTextEditTextChangedEventHandler(self, element, handler): +## '-no docstring-' +## #return +## + +IUIAutomation4._methods_ = [ + COMMETHOD([], HRESULT, 'AddChangesEventHandler', + ( ['in'], POINTER(IUIAutomationElement), 'element' ), + ( ['in'], TreeScope, 'scope' ), + ( ['in'], POINTER(c_int), 'changeTypes' ), + ( ['in'], c_int, 'changesCount' ), + ( ['in'], POINTER(IUIAutomationCacheRequest), 'pCacheRequest' ), + ( ['in'], POINTER(IUIAutomationChangesEventHandler), 'handler' )), + COMMETHOD([], HRESULT, 'RemoveChangesEventHandler', + ( ['in'], POINTER(IUIAutomationElement), 'element' ), + ( ['in'], POINTER(IUIAutomationChangesEventHandler), 'handler' )), +] +################################################################ +## code template for IUIAutomation4 implementation +##class IUIAutomation4_Impl(object): +## def AddChangesEventHandler(self, element, scope, changeTypes, changesCount, pCacheRequest, handler): +## '-no docstring-' +## #return +## +## def RemoveChangesEventHandler(self, element, handler): +## '-no docstring-' +## #return +## + +UIA_RuntimeIdPropertyId = 30000 # Constant c_int +UIA_TableRowHeadersPropertyId = 30081 # Constant c_int +UIA_SayAsInterpretAsMetadataId = 100000 # Constant c_int +UIA_SelectionItemPatternId = 10010 # Constant c_int +UIA_IsEnabledPropertyId = 30010 # Constant c_int +StyleId_Emphasis = 70013 # Constant c_int +UIA_DockPatternId = 10011 # Constant c_int +UIA_AutomationIdPropertyId = 30011 # Constant c_int +StyleId_Quote = 70014 # Constant c_int +UIA_TablePatternId = 10012 # Constant c_int +UIA_ClassNamePropertyId = 30012 # Constant c_int +StyleId_BulletedList = 70015 # Constant c_int +UIA_TableItemPatternId = 10013 # Constant c_int +UIA_HelpTextPropertyId = 30013 # Constant c_int +StyleId_NumberedList = 70016 # Constant c_int +UIA_TextPatternId = 10014 # Constant c_int +UIA_ClickablePointPropertyId = 30014 # Constant c_int +UIA_SummaryChangeId = 90000 # Constant c_int +UIA_TogglePatternId = 10015 # Constant c_int +UIA_CulturePropertyId = 30015 # Constant c_int +UIA_FormLandmarkTypeId = 80001 # Constant c_int +UIA_TransformPatternId = 10016 # Constant c_int +UIA_IsControlElementPropertyId = 30016 # Constant c_int +UIA_MainLandmarkTypeId = 80002 # Constant c_int +UIA_ScrollItemPatternId = 10017 # Constant c_int +UIA_IsContentElementPropertyId = 30017 # Constant c_int +UIA_NavigationLandmarkTypeId = 80003 # Constant c_int +UIA_LegacyIAccessiblePatternId = 10018 # Constant c_int +UIA_LabeledByPropertyId = 30018 # Constant c_int +UIA_SearchLandmarkTypeId = 80004 # Constant c_int +UIA_ItemContainerPatternId = 10019 # Constant c_int +UIA_IsPasswordPropertyId = 30019 # Constant c_int +UIA_VirtualizedItemPatternId = 10020 # Constant c_int +UIA_NativeWindowHandlePropertyId = 30020 # Constant c_int +HeadingLevel1 = 80051 # Constant c_int +UIA_SynchronizedInputPatternId = 10021 # Constant c_int +UIA_ItemTypePropertyId = 30021 # Constant c_int +HeadingLevel2 = 80052 # Constant c_int +UIA_ObjectModelPatternId = 10022 # Constant c_int +UIA_IsOffscreenPropertyId = 30022 # Constant c_int +HeadingLevel3 = 80053 # Constant c_int +IUIAutomation5._methods_ = [ + COMMETHOD([], HRESULT, 'AddNotificationEventHandler', + ( ['in'], POINTER(IUIAutomationElement), 'element' ), + ( ['in'], TreeScope, 'scope' ), + ( ['in'], POINTER(IUIAutomationCacheRequest), 'cacheRequest' ), + ( ['in'], POINTER(IUIAutomationNotificationEventHandler), 'handler' )), + COMMETHOD([], HRESULT, 'RemoveNotificationEventHandler', + ( ['in'], POINTER(IUIAutomationElement), 'element' ), + ( ['in'], POINTER(IUIAutomationNotificationEventHandler), 'handler' )), +] +################################################################ +## code template for IUIAutomation5 implementation +##class IUIAutomation5_Impl(object): +## def AddNotificationEventHandler(self, element, scope, cacheRequest, handler): +## '-no docstring-' +## #return +## +## def RemoveNotificationEventHandler(self, element, handler): +## '-no docstring-' +## #return +## + +UIA_AnnotationPatternId = 10023 # Constant c_int +UIA_OrientationPropertyId = 30023 # Constant c_int +HeadingLevel4 = 80054 # Constant c_int +UIA_TextPattern2Id = 10024 # Constant c_int +UIA_FrameworkIdPropertyId = 30024 # Constant c_int +HeadingLevel5 = 80055 # Constant c_int +UIA_StylesPatternId = 10025 # Constant c_int +UIA_IsRequiredForFormPropertyId = 30025 # Constant c_int +HeadingLevel6 = 80056 # Constant c_int +UIA_SpreadsheetPatternId = 10026 # Constant c_int +UIA_ItemStatusPropertyId = 30026 # Constant c_int +HeadingLevel7 = 80057 # Constant c_int +UIA_SpreadsheetItemPatternId = 10027 # Constant c_int +UIA_IsDockPatternAvailablePropertyId = 30027 # Constant c_int +HeadingLevel8 = 80058 # Constant c_int +UIA_TransformPattern2Id = 10028 # Constant c_int +UIA_IsExpandCollapsePatternAvailablePropertyId = 30028 # Constant c_int +HeadingLevel9 = 80059 # Constant c_int +UIA_TextChildPatternId = 10029 # Constant c_int +UIA_IsGridItemPatternAvailablePropertyId = 30029 # Constant c_int +UIA_DragPatternId = 10030 # Constant c_int +UIA_IsGridPatternAvailablePropertyId = 30030 # Constant c_int +UIA_DropTargetPatternId = 10031 # Constant c_int +UIA_IsInvokePatternAvailablePropertyId = 30031 # Constant c_int +UIA_TextEditPatternId = 10032 # Constant c_int +UIA_IsMultipleViewPatternAvailablePropertyId = 30032 # Constant c_int +UIA_CustomNavigationPatternId = 10033 # Constant c_int +UIA_IsRangeValuePatternAvailablePropertyId = 30033 # Constant c_int + +# values for enumeration 'ConnectionRecoveryBehaviorOptions' +ConnectionRecoveryBehaviorOptions_Disabled = 0 +ConnectionRecoveryBehaviorOptions_Enabled = 1 +ConnectionRecoveryBehaviorOptions = c_int # enum + +# values for enumeration 'CoalesceEventsOptions' +CoalesceEventsOptions_Disabled = 0 +CoalesceEventsOptions_Enabled = 1 +CoalesceEventsOptions = c_int # enum +IUIAutomation6._methods_ = [ + COMMETHOD([], HRESULT, 'CreateEventHandlerGroup', + ( ['out'], POINTER(POINTER(IUIAutomationEventHandlerGroup)), 'handlerGroup' )), + COMMETHOD([], HRESULT, 'AddEventHandlerGroup', + ( ['in'], POINTER(IUIAutomationElement), 'element' ), + ( ['in'], POINTER(IUIAutomationEventHandlerGroup), 'handlerGroup' )), + COMMETHOD([], HRESULT, 'RemoveEventHandlerGroup', + ( ['in'], POINTER(IUIAutomationElement), 'element' ), + ( ['in'], POINTER(IUIAutomationEventHandlerGroup), 'handlerGroup' )), + COMMETHOD(['propget'], HRESULT, 'ConnectionRecoveryBehavior', + ( ['out', 'retval'], POINTER(ConnectionRecoveryBehaviorOptions), 'ConnectionRecoveryBehaviorOptions' )), + COMMETHOD(['propput'], HRESULT, 'ConnectionRecoveryBehavior', + ( ['in'], ConnectionRecoveryBehaviorOptions, 'ConnectionRecoveryBehaviorOptions' )), + COMMETHOD(['propget'], HRESULT, 'CoalesceEvents', + ( ['out', 'retval'], POINTER(CoalesceEventsOptions), 'CoalesceEventsOptions' )), + COMMETHOD(['propput'], HRESULT, 'CoalesceEvents', + ( ['in'], CoalesceEventsOptions, 'CoalesceEventsOptions' )), + COMMETHOD([], HRESULT, 'AddActiveTextPositionChangedEventHandler', + ( ['in'], POINTER(IUIAutomationElement), 'element' ), + ( ['in'], TreeScope, 'scope' ), + ( ['in'], POINTER(IUIAutomationCacheRequest), 'cacheRequest' ), + ( ['in'], POINTER(IUIAutomationActiveTextPositionChangedEventHandler), 'handler' )), + COMMETHOD([], HRESULT, 'RemoveActiveTextPositionChangedEventHandler', + ( ['in'], POINTER(IUIAutomationElement), 'element' ), + ( ['in'], POINTER(IUIAutomationActiveTextPositionChangedEventHandler), 'handler' )), +] +################################################################ +## code template for IUIAutomation6 implementation +##class IUIAutomation6_Impl(object): +## def CreateEventHandlerGroup(self): +## '-no docstring-' +## #return handlerGroup +## +## def AddEventHandlerGroup(self, element, handlerGroup): +## '-no docstring-' +## #return +## +## def RemoveEventHandlerGroup(self, element, handlerGroup): +## '-no docstring-' +## #return +## +## def _get(self): +## '-no docstring-' +## #return ConnectionRecoveryBehaviorOptions +## def _set(self, ConnectionRecoveryBehaviorOptions): +## '-no docstring-' +## ConnectionRecoveryBehavior = property(_get, _set, doc = _set.__doc__) +## +## def _get(self): +## '-no docstring-' +## #return CoalesceEventsOptions +## def _set(self, CoalesceEventsOptions): +## '-no docstring-' +## CoalesceEvents = property(_get, _set, doc = _set.__doc__) +## +## def AddActiveTextPositionChangedEventHandler(self, element, scope, cacheRequest, handler): +## '-no docstring-' +## #return +## +## def RemoveActiveTextPositionChangedEventHandler(self, element, handler): +## '-no docstring-' +## #return +## + +UIA_SelectionPattern2Id = 10034 # Constant c_int +UIA_IsScrollPatternAvailablePropertyId = 30034 # Constant c_int +UIA_AccessKeyPropertyId = 30007 # Constant c_int +UIA_IsScrollItemPatternAvailablePropertyId = 30035 # Constant c_int +UIA_ToolTipClosedEventId = 20001 # Constant c_int +UIA_IsSelectionItemPatternAvailablePropertyId = 30036 # Constant c_int + +# values for enumeration 'AutomationElementMode' +AutomationElementMode_None = 0 +AutomationElementMode_Full = 1 +AutomationElementMode = c_int # enum +UIA_StructureChangedEventId = 20002 # Constant c_int +UIA_IsSelectionPatternAvailablePropertyId = 30037 # Constant c_int +UIA_MenuOpenedEventId = 20003 # Constant c_int +UIA_IsTablePatternAvailablePropertyId = 30038 # Constant c_int +UIA_AutomationPropertyChangedEventId = 20004 # Constant c_int +UIA_IsTableItemPatternAvailablePropertyId = 30039 # Constant c_int +UIA_AutomationFocusChangedEventId = 20005 # Constant c_int +UIA_IsTextPatternAvailablePropertyId = 30040 # Constant c_int +UIA_AsyncContentLoadedEventId = 20006 # Constant c_int +UIA_IsTogglePatternAvailablePropertyId = 30041 # Constant c_int +UIA_MenuClosedEventId = 20007 # Constant c_int +UIA_IsTransformPatternAvailablePropertyId = 30042 # Constant c_int +UIA_LayoutInvalidatedEventId = 20008 # Constant c_int +UIA_IsValuePatternAvailablePropertyId = 30043 # Constant c_int +UIA_Invoke_InvokedEventId = 20009 # Constant c_int +UIA_IsWindowPatternAvailablePropertyId = 30044 # Constant c_int +UIA_SelectionItem_ElementAddedToSelectionEventId = 20010 # Constant c_int +UIA_ValueValuePropertyId = 30045 # Constant c_int +UIA_SelectionItem_ElementRemovedFromSelectionEventId = 20011 # Constant c_int +UIA_ValueIsReadOnlyPropertyId = 30046 # Constant c_int +UIA_SelectionItem_ElementSelectedEventId = 20012 # Constant c_int +UIA_RangeValueValuePropertyId = 30047 # Constant c_int +UIA_Selection_InvalidatedEventId = 20013 # Constant c_int +UIA_RangeValueIsReadOnlyPropertyId = 30048 # Constant c_int +UIA_Text_TextSelectionChangedEventId = 20014 # Constant c_int +UIA_RangeValueMinimumPropertyId = 30049 # Constant c_int +UIA_Text_TextChangedEventId = 20015 # Constant c_int +UIA_RangeValueMaximumPropertyId = 30050 # Constant c_int +UIA_Window_WindowOpenedEventId = 20016 # Constant c_int +UIA_RangeValueLargeChangePropertyId = 30051 # Constant c_int +UIA_Window_WindowClosedEventId = 20017 # Constant c_int +UIA_RangeValueSmallChangePropertyId = 30052 # Constant c_int +UIA_MenuModeStartEventId = 20018 # Constant c_int +UIA_ScrollHorizontalScrollPercentPropertyId = 30053 # Constant c_int +UIA_MenuModeEndEventId = 20019 # Constant c_int +UIA_ScrollHorizontalViewSizePropertyId = 30054 # Constant c_int +UIA_InputReachedTargetEventId = 20020 # Constant c_int +UIA_ScrollVerticalScrollPercentPropertyId = 30055 # Constant c_int +UIA_InputReachedOtherElementEventId = 20021 # Constant c_int +UIA_ScrollVerticalViewSizePropertyId = 30056 # Constant c_int +UIA_InputDiscardedEventId = 20022 # Constant c_int +UIA_ScrollHorizontallyScrollablePropertyId = 30057 # Constant c_int +UIA_SystemAlertEventId = 20023 # Constant c_int +UIA_ScrollVerticallyScrollablePropertyId = 30058 # Constant c_int +UIA_LiveRegionChangedEventId = 20024 # Constant c_int +UIA_SelectionSelectionPropertyId = 30059 # Constant c_int +UIA_HostedFragmentRootsInvalidatedEventId = 20025 # Constant c_int +UIA_SelectionCanSelectMultiplePropertyId = 30060 # Constant c_int +UIA_Drag_DragStartEventId = 20026 # Constant c_int +UIA_SelectionIsSelectionRequiredPropertyId = 30061 # Constant c_int +UIA_Drag_DragCancelEventId = 20027 # Constant c_int +UIA_GridRowCountPropertyId = 30062 # Constant c_int +UIA_Drag_DragCompleteEventId = 20028 # Constant c_int +UIA_GridColumnCountPropertyId = 30063 # Constant c_int +UIA_DropTarget_DragEnterEventId = 20029 # Constant c_int +UIA_GridItemRowPropertyId = 30064 # Constant c_int +UIA_DropTarget_DragLeaveEventId = 20030 # Constant c_int +UIA_GridItemColumnPropertyId = 30065 # Constant c_int +IUIAutomationElementArray._methods_ = [ + COMMETHOD(['propget'], HRESULT, 'Length', + ( ['out', 'retval'], POINTER(c_int), 'Length' )), + COMMETHOD([], HRESULT, 'GetElement', + ( ['in'], c_int, 'index' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'element' )), +] +################################################################ +## code template for IUIAutomationElementArray implementation +##class IUIAutomationElementArray_Impl(object): +## @property +## def Length(self): +## '-no docstring-' +## #return Length +## +## def GetElement(self, index): +## '-no docstring-' +## #return element +## + +UIA_DropTarget_DroppedEventId = 20031 # Constant c_int +UIA_GridItemRowSpanPropertyId = 30066 # Constant c_int +UIA_TextEdit_TextChangedEventId = 20032 # Constant c_int +UIA_GridItemColumnSpanPropertyId = 30067 # Constant c_int +UIA_TextEdit_ConversionTargetChangedEventId = 20033 # Constant c_int +UIA_GridItemContainingGridPropertyId = 30068 # Constant c_int +UIA_ChangesEventId = 20034 # Constant c_int +UIA_DockDockPositionPropertyId = 30069 # Constant c_int +UIA_NotificationEventId = 20035 # Constant c_int +UIA_ExpandCollapseExpandCollapseStatePropertyId = 30070 # Constant c_int +UIA_ActiveTextPositionChangedEventId = 20036 # Constant c_int +UIA_MultipleViewCurrentViewPropertyId = 30071 # Constant c_int +UIA_IsKeyboardFocusablePropertyId = 30009 # Constant c_int +UIA_MultipleViewSupportedViewsPropertyId = 30072 # Constant c_int +UIA_BoundingRectanglePropertyId = 30001 # Constant c_int +UIA_WindowCanMaximizePropertyId = 30073 # Constant c_int +UIA_ProcessIdPropertyId = 30002 # Constant c_int +UIA_WindowCanMinimizePropertyId = 30074 # Constant c_int +UIA_ControlTypePropertyId = 30003 # Constant c_int +UIA_WindowWindowVisualStatePropertyId = 30075 # Constant c_int +UIA_WindowWindowInteractionStatePropertyId = 30076 # Constant c_int +UIA_LocalizedControlTypePropertyId = 30004 # Constant c_int +UIA_WindowIsModalPropertyId = 30077 # Constant c_int +UIA_NamePropertyId = 30005 # Constant c_int +UIA_AnimationStyleAttributeId = 40000 # Constant c_int +StyleId_Normal = 70012 # Constant c_int +UIA_EditControlTypeId = 50004 # Constant c_int +StyleId_Title = 70010 # Constant c_int +class IUIAutomationSynchronizedInputPattern(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{2233BE0B-AFB7-448B-9FDA-3B378AA5EAE1}') + _idlflags_ = [] + +# values for enumeration 'SynchronizedInputType' +SynchronizedInputType_KeyUp = 1 +SynchronizedInputType_KeyDown = 2 +SynchronizedInputType_LeftMouseUp = 4 +SynchronizedInputType_LeftMouseDown = 8 +SynchronizedInputType_RightMouseUp = 16 +SynchronizedInputType_RightMouseDown = 32 +SynchronizedInputType = c_int # enum +IUIAutomationSynchronizedInputPattern._methods_ = [ + COMMETHOD([], HRESULT, 'StartListening', + ( ['in'], SynchronizedInputType, 'inputType' )), + COMMETHOD([], HRESULT, 'Cancel'), +] +################################################################ +## code template for IUIAutomationSynchronizedInputPattern implementation +##class IUIAutomationSynchronizedInputPattern_Impl(object): +## def StartListening(self, inputType): +## '-no docstring-' +## #return +## +## def Cancel(self): +## '-no docstring-' +## #return +## + +UIA_ComboBoxControlTypeId = 50003 # Constant c_int +StyleId_Custom = 70000 # Constant c_int +UIA_ImageControlTypeId = 50006 # Constant c_int +AnnotationType_Unknown = 60000 # Constant c_int +UIA_LevelPropertyId = 30154 # Constant c_int +UIA_ListItemControlTypeId = 50007 # Constant c_int +UIA_AnnotationTypesPropertyId = 30155 # Constant c_int +UIA_ListControlTypeId = 50008 # Constant c_int +UIA_AnnotationObjectsPropertyId = 30156 # Constant c_int +UIA_MenuControlTypeId = 50009 # Constant c_int +IUIAutomationChangesEventHandler._methods_ = [ + COMMETHOD([], HRESULT, 'HandleChangesEvent', + ( ['in'], POINTER(IUIAutomationElement), 'sender' ), + ( ['in'], POINTER(UiaChangeInfo), 'uiaChanges' ), + ( ['in'], c_int, 'changesCount' )), +] +################################################################ +## code template for IUIAutomationChangesEventHandler implementation +##class IUIAutomationChangesEventHandler_Impl(object): +## def HandleChangesEvent(self, sender, uiaChanges, changesCount): +## '-no docstring-' +## #return +## + +UIA_LandmarkTypePropertyId = 30157 # Constant c_int +UIA_MenuBarControlTypeId = 50010 # Constant c_int +UIA_LocalizedLandmarkTypePropertyId = 30158 # Constant c_int +UIA_MenuItemControlTypeId = 50011 # Constant c_int +IUIAutomationCacheRequest._methods_ = [ + COMMETHOD([], HRESULT, 'AddProperty', + ( ['in'], c_int, 'propertyId' )), + COMMETHOD([], HRESULT, 'AddPattern', + ( ['in'], c_int, 'patternId' )), + COMMETHOD([], HRESULT, 'Clone', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationCacheRequest)), 'clonedRequest' )), + COMMETHOD(['propget'], HRESULT, 'TreeScope', + ( ['out', 'retval'], POINTER(TreeScope), 'scope' )), + COMMETHOD(['propput'], HRESULT, 'TreeScope', + ( ['in'], TreeScope, 'scope' )), + COMMETHOD(['propget'], HRESULT, 'TreeFilter', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationCondition)), 'filter' )), + COMMETHOD(['propput'], HRESULT, 'TreeFilter', + ( ['in'], POINTER(IUIAutomationCondition), 'filter' )), + COMMETHOD(['propget'], HRESULT, 'AutomationElementMode', + ( ['out', 'retval'], POINTER(AutomationElementMode), 'mode' )), + COMMETHOD(['propput'], HRESULT, 'AutomationElementMode', + ( ['in'], AutomationElementMode, 'mode' )), +] +################################################################ +## code template for IUIAutomationCacheRequest implementation +##class IUIAutomationCacheRequest_Impl(object): +## def AddProperty(self, propertyId): +## '-no docstring-' +## #return +## +## def AddPattern(self, patternId): +## '-no docstring-' +## #return +## +## def Clone(self): +## '-no docstring-' +## #return clonedRequest +## +## def _get(self): +## '-no docstring-' +## #return scope +## def _set(self, scope): +## '-no docstring-' +## TreeScope = property(_get, _set, doc = _set.__doc__) +## +## def _get(self): +## '-no docstring-' +## #return filter +## def _set(self, filter): +## '-no docstring-' +## TreeFilter = property(_get, _set, doc = _set.__doc__) +## +## def _get(self): +## '-no docstring-' +## #return mode +## def _set(self, mode): +## '-no docstring-' +## AutomationElementMode = property(_get, _set, doc = _set.__doc__) +## + +UIA_FullDescriptionPropertyId = 30159 # Constant c_int +UIA_ProgressBarControlTypeId = 50012 # Constant c_int +UIA_FillColorPropertyId = 30160 # Constant c_int +UIA_RadioButtonControlTypeId = 50013 # Constant c_int +UIA_OutlineColorPropertyId = 30161 # Constant c_int +UIA_ScrollBarControlTypeId = 50014 # Constant c_int +UIA_FillTypePropertyId = 30162 # Constant c_int +UIA_SliderControlTypeId = 50015 # Constant c_int +UIA_VisualEffectsPropertyId = 30163 # Constant c_int +UIA_SpinnerControlTypeId = 50016 # Constant c_int +UIA_OutlineThicknessPropertyId = 30164 # Constant c_int +UIA_StatusBarControlTypeId = 50017 # Constant c_int +UIA_MultipleViewPatternId = 10008 # Constant c_int +UIA_CenterPointPropertyId = 30165 # Constant c_int +UIA_TabControlTypeId = 50018 # Constant c_int +UIA_RotationPropertyId = 30166 # Constant c_int +UIA_TabItemControlTypeId = 50019 # Constant c_int +UIA_SizePropertyId = 30167 # Constant c_int +UIA_TextControlTypeId = 50020 # Constant c_int +UIA_IsSelectionPattern2AvailablePropertyId = 30168 # Constant c_int +UIA_ToolBarControlTypeId = 50021 # Constant c_int +class IUIAutomationTablePattern(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{620E691C-EA96-4710-A850-754B24CE2417}') + _idlflags_ = [] + +# values for enumeration 'RowOrColumnMajor' +RowOrColumnMajor_RowMajor = 0 +RowOrColumnMajor_ColumnMajor = 1 +RowOrColumnMajor_Indeterminate = 2 +RowOrColumnMajor = c_int # enum +IUIAutomationTablePattern._methods_ = [ + COMMETHOD([], HRESULT, 'GetCurrentRowHeaders', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElementArray)), 'retVal' )), + COMMETHOD([], HRESULT, 'GetCurrentColumnHeaders', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElementArray)), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentRowOrColumnMajor', + ( ['out', 'retval'], POINTER(RowOrColumnMajor), 'retVal' )), + COMMETHOD([], HRESULT, 'GetCachedRowHeaders', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElementArray)), 'retVal' )), + COMMETHOD([], HRESULT, 'GetCachedColumnHeaders', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElementArray)), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedRowOrColumnMajor', + ( ['out', 'retval'], POINTER(RowOrColumnMajor), 'retVal' )), +] +################################################################ +## code template for IUIAutomationTablePattern implementation +##class IUIAutomationTablePattern_Impl(object): +## def GetCurrentRowHeaders(self): +## '-no docstring-' +## #return retVal +## +## def GetCurrentColumnHeaders(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentRowOrColumnMajor(self): +## '-no docstring-' +## #return retVal +## +## def GetCachedRowHeaders(self): +## '-no docstring-' +## #return retVal +## +## def GetCachedColumnHeaders(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedRowOrColumnMajor(self): +## '-no docstring-' +## #return retVal +## + +UIA_Selection2FirstSelectedItemPropertyId = 30169 # Constant c_int +UIA_ToolTipControlTypeId = 50022 # Constant c_int +UIA_Selection2LastSelectedItemPropertyId = 30170 # Constant c_int +UIA_TreeControlTypeId = 50023 # Constant c_int + +# values for enumeration 'NotificationKind' +NotificationKind_ItemAdded = 0 +NotificationKind_ItemRemoved = 1 +NotificationKind_ActionCompleted = 2 +NotificationKind_ActionAborted = 3 +NotificationKind_Other = 4 +NotificationKind = c_int # enum + +# values for enumeration 'NotificationProcessing' +NotificationProcessing_ImportantAll = 0 +NotificationProcessing_ImportantMostRecent = 1 +NotificationProcessing_All = 2 +NotificationProcessing_MostRecent = 3 +NotificationProcessing_CurrentThenMostRecent = 4 +NotificationProcessing = c_int # enum +IUIAutomationNotificationEventHandler._methods_ = [ + COMMETHOD([], HRESULT, 'HandleNotificationEvent', + ( ['in'], POINTER(IUIAutomationElement), 'sender' ), + ( [], NotificationKind, 'NotificationKind' ), + ( [], NotificationProcessing, 'NotificationProcessing' ), + ( ['in'], BSTR, 'displayString' ), + ( ['in'], BSTR, 'activityId' )), +] +################################################################ +## code template for IUIAutomationNotificationEventHandler implementation +##class IUIAutomationNotificationEventHandler_Impl(object): +## def HandleNotificationEvent(self, sender, NotificationKind, NotificationProcessing, displayString, activityId): +## '-no docstring-' +## #return +## + +ExtendedProperty._fields_ = [ + ('PropertyName', BSTR), + ('PropertyValue', BSTR), +] +assert sizeof(ExtendedProperty) == 16, sizeof(ExtendedProperty) +assert alignment(ExtendedProperty) == 8, alignment(ExtendedProperty) +UIA_Selection2CurrentSelectedItemPropertyId = 30171 # Constant c_int +UIA_TreeItemControlTypeId = 50024 # Constant c_int +UIA_Selection2ItemCountPropertyId = 30172 # Constant c_int +UIA_CustomControlTypeId = 50025 # Constant c_int +class IUIAutomationInvokePattern(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{FB377FBE-8EA6-46D5-9C73-6499642D3059}') + _idlflags_ = [] +IUIAutomationInvokePattern._methods_ = [ + COMMETHOD([], HRESULT, 'Invoke'), +] +################################################################ +## code template for IUIAutomationInvokePattern implementation +##class IUIAutomationInvokePattern_Impl(object): +## def Invoke(self): +## '-no docstring-' +## #return +## + +UIA_HeadingLevelPropertyId = 30173 # Constant c_int +UIA_GroupControlTypeId = 50026 # Constant c_int +UIA_IsDialogPropertyId = 30174 # Constant c_int +UIA_ThumbControlTypeId = 50027 # Constant c_int +UIA_ButtonControlTypeId = 50000 # Constant c_int +UIA_DataGridControlTypeId = 50028 # Constant c_int +UIA_BackgroundColorAttributeId = 40001 # Constant c_int +UIA_DataItemControlTypeId = 50029 # Constant c_int +UIA_BulletStyleAttributeId = 40002 # Constant c_int +UIA_DocumentControlTypeId = 50030 # Constant c_int +UIA_CapStyleAttributeId = 40003 # Constant c_int +UIA_SplitButtonControlTypeId = 50031 # Constant c_int +UIA_CultureAttributeId = 40004 # Constant c_int +UIA_WindowControlTypeId = 50032 # Constant c_int +UIA_FontNameAttributeId = 40005 # Constant c_int +UIA_PaneControlTypeId = 50033 # Constant c_int +UIA_FontSizeAttributeId = 40006 # Constant c_int +UIA_HeaderControlTypeId = 50034 # Constant c_int +UIA_FontWeightAttributeId = 40007 # Constant c_int +UIA_HeaderItemControlTypeId = 50035 # Constant c_int +UIA_ForegroundColorAttributeId = 40008 # Constant c_int +UIA_TableControlTypeId = 50036 # Constant c_int +UIA_HorizontalTextAlignmentAttributeId = 40009 # Constant c_int +UIA_TitleBarControlTypeId = 50037 # Constant c_int +UIA_IndentationFirstLineAttributeId = 40010 # Constant c_int +UIA_SeparatorControlTypeId = 50038 # Constant c_int +UIA_IndentationLeadingAttributeId = 40011 # Constant c_int +UIA_SemanticZoomControlTypeId = 50039 # Constant c_int +UIA_IndentationTrailingAttributeId = 40012 # Constant c_int +UIA_AppBarControlTypeId = 50040 # Constant c_int +UIA_IsHiddenAttributeId = 40013 # Constant c_int +StyleId_Subtitle = 70011 # Constant c_int +UIA_IsItalicAttributeId = 40014 # Constant c_int +AnnotationType_SpellingError = 60001 # Constant c_int +UIA_IsReadOnlyAttributeId = 40015 # Constant c_int +AnnotationType_GrammarError = 60002 # Constant c_int +UIA_IsSubscriptAttributeId = 40016 # Constant c_int +AnnotationType_Comment = 60003 # Constant c_int +UIA_IsSuperscriptAttributeId = 40017 # Constant c_int +AnnotationType_FormulaError = 60004 # Constant c_int +UIA_MarginBottomAttributeId = 40018 # Constant c_int +AnnotationType_TrackChanges = 60005 # Constant c_int +UIA_MarginLeadingAttributeId = 40019 # Constant c_int +AnnotationType_Header = 60006 # Constant c_int +UIA_MarginTopAttributeId = 40020 # Constant c_int +AnnotationType_Footer = 60007 # Constant c_int +class IUIAutomationDockPattern(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{FDE5EF97-1464-48F6-90BF-43D0948E86EC}') + _idlflags_ = [] + +# values for enumeration 'DockPosition' +DockPosition_Top = 0 +DockPosition_Left = 1 +DockPosition_Bottom = 2 +DockPosition_Right = 3 +DockPosition_Fill = 4 +DockPosition_None = 5 +DockPosition = c_int # enum +IUIAutomationDockPattern._methods_ = [ + COMMETHOD([], HRESULT, 'SetDockPosition', + ( ['in'], DockPosition, 'dockPos' )), + COMMETHOD(['propget'], HRESULT, 'CurrentDockPosition', + ( ['out', 'retval'], POINTER(DockPosition), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedDockPosition', + ( ['out', 'retval'], POINTER(DockPosition), 'retVal' )), +] +################################################################ +## code template for IUIAutomationDockPattern implementation +##class IUIAutomationDockPattern_Impl(object): +## def SetDockPosition(self, dockPos): +## '-no docstring-' +## #return +## +## @property +## def CurrentDockPosition(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedDockPosition(self): +## '-no docstring-' +## #return retVal +## + +UIA_MarginTrailingAttributeId = 40021 # Constant c_int +AnnotationType_Highlighted = 60008 # Constant c_int +class IUIAutomationScrollItemPattern(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{B488300F-D015-4F19-9C29-BB595E3645EF}') + _idlflags_ = [] +IUIAutomationScrollItemPattern._methods_ = [ + COMMETHOD([], HRESULT, 'ScrollIntoView'), +] +################################################################ +## code template for IUIAutomationScrollItemPattern implementation +##class IUIAutomationScrollItemPattern_Impl(object): +## def ScrollIntoView(self): +## '-no docstring-' +## #return +## + +class IUIAutomationTableItemPattern(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{0B964EB3-EF2E-4464-9C79-61D61737A27E}') + _idlflags_ = [] +IUIAutomationTableItemPattern._methods_ = [ + COMMETHOD([], HRESULT, 'GetCurrentRowHeaderItems', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElementArray)), 'retVal' )), + COMMETHOD([], HRESULT, 'GetCurrentColumnHeaderItems', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElementArray)), 'retVal' )), + COMMETHOD([], HRESULT, 'GetCachedRowHeaderItems', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElementArray)), 'retVal' )), + COMMETHOD([], HRESULT, 'GetCachedColumnHeaderItems', + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElementArray)), 'retVal' )), +] +################################################################ +## code template for IUIAutomationTableItemPattern implementation +##class IUIAutomationTableItemPattern_Impl(object): +## def GetCurrentRowHeaderItems(self): +## '-no docstring-' +## #return retVal +## +## def GetCurrentColumnHeaderItems(self): +## '-no docstring-' +## #return retVal +## +## def GetCachedRowHeaderItems(self): +## '-no docstring-' +## #return retVal +## +## def GetCachedColumnHeaderItems(self): +## '-no docstring-' +## #return retVal +## + +UIA_OutlineStylesAttributeId = 40022 # Constant c_int +AnnotationType_Endnote = 60009 # Constant c_int +UIA_OverlineColorAttributeId = 40023 # Constant c_int +AnnotationType_Footnote = 60010 # Constant c_int +UIA_GridPatternId = 10006 # Constant c_int +UIA_OverlineStyleAttributeId = 40024 # Constant c_int +AnnotationType_InsertionChange = 60011 # Constant c_int +UIA_StrikethroughColorAttributeId = 40025 # Constant c_int +AnnotationType_DeletionChange = 60012 # Constant c_int +UIA_GridItemPatternId = 10007 # Constant c_int +UIA_StrikethroughStyleAttributeId = 40026 # Constant c_int +AnnotationType_MoveChange = 60013 # Constant c_int +UIA_TabsAttributeId = 40027 # Constant c_int +AnnotationType_FormatChange = 60014 # Constant c_int +UIA_TextFlowDirectionsAttributeId = 40028 # Constant c_int +AnnotationType_UnsyncedChange = 60015 # Constant c_int +UIA_UnderlineColorAttributeId = 40029 # Constant c_int +AnnotationType_EditingLockedChange = 60016 # Constant c_int +UIA_UnderlineStyleAttributeId = 40030 # Constant c_int +AnnotationType_ExternalChange = 60017 # Constant c_int +class IUIAutomationSpreadsheetPattern(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{7517A7C8-FAAE-4DE9-9F08-29B91E8595C1}') + _idlflags_ = [] +IUIAutomationSpreadsheetPattern._methods_ = [ + COMMETHOD([], HRESULT, 'GetItemByName', + ( ['in'], BSTR, 'name' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'element' )), +] +################################################################ +## code template for IUIAutomationSpreadsheetPattern implementation +##class IUIAutomationSpreadsheetPattern_Impl(object): +## def GetItemByName(self, name): +## '-no docstring-' +## #return element +## + +UIA_AnnotationTypesAttributeId = 40031 # Constant c_int +AnnotationType_ConflictingChange = 60018 # Constant c_int +UIA_WindowPatternId = 10009 # Constant c_int +UIA_AnnotationObjectsAttributeId = 40032 # Constant c_int +AnnotationType_Author = 60019 # Constant c_int +UIA_StyleNameAttributeId = 40033 # Constant c_int +AnnotationType_AdvancedProofingIssue = 60020 # Constant c_int +class IUIAutomationExpandCollapsePattern(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{619BE086-1F4E-4EE4-BAFA-210128738730}') + _idlflags_ = [] +IUIAutomationExpandCollapsePattern._methods_ = [ + COMMETHOD([], HRESULT, 'Expand'), + COMMETHOD([], HRESULT, 'Collapse'), + COMMETHOD(['propget'], HRESULT, 'CurrentExpandCollapseState', + ( ['out', 'retval'], POINTER(ExpandCollapseState), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedExpandCollapseState', + ( ['out', 'retval'], POINTER(ExpandCollapseState), 'retVal' )), +] +################################################################ +## code template for IUIAutomationExpandCollapsePattern implementation +##class IUIAutomationExpandCollapsePattern_Impl(object): +## def Expand(self): +## '-no docstring-' +## #return +## +## def Collapse(self): +## '-no docstring-' +## #return +## +## @property +## def CurrentExpandCollapseState(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedExpandCollapseState(self): +## '-no docstring-' +## #return retVal +## + +UIA_StyleIdAttributeId = 40034 # Constant c_int +AnnotationType_DataValidationError = 60021 # Constant c_int +UIA_LinkAttributeId = 40035 # Constant c_int +AnnotationType_CircularReferenceError = 60022 # Constant c_int +UIA_IsActiveAttributeId = 40036 # Constant c_int +AnnotationType_Mathematics = 60023 # Constant c_int +UIA_SelectionActiveEndAttributeId = 40037 # Constant c_int +AnnotationType_Sensitive = 60024 # Constant c_int +UIA_ValuePatternId = 10002 # Constant c_int +UIA_CaretPositionAttributeId = 40038 # Constant c_int +StyleId_Heading9 = 70009 # Constant c_int +UIA_CaretBidiModeAttributeId = 40039 # Constant c_int +StyleId_Heading1 = 70001 # Constant c_int +UIA_LineSpacingAttributeId = 40040 # Constant c_int +StyleId_Heading2 = 70002 # Constant c_int +UIA_BeforeParagraphSpacingAttributeId = 40041 # Constant c_int +StyleId_Heading3 = 70003 # Constant c_int +class IUIAutomationTogglePattern(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{94CF8058-9B8D-4AB9-8BFD-4CD0A33C8C70}') + _idlflags_ = [] +IUIAutomationTogglePattern._methods_ = [ + COMMETHOD([], HRESULT, 'Toggle'), + COMMETHOD(['propget'], HRESULT, 'CurrentToggleState', + ( ['out', 'retval'], POINTER(ToggleState), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedToggleState', + ( ['out', 'retval'], POINTER(ToggleState), 'retVal' )), +] +################################################################ +## code template for IUIAutomationTogglePattern implementation +##class IUIAutomationTogglePattern_Impl(object): +## def Toggle(self): +## '-no docstring-' +## #return +## +## @property +## def CurrentToggleState(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedToggleState(self): +## '-no docstring-' +## #return retVal +## + +UIA_AfterParagraphSpacingAttributeId = 40042 # Constant c_int +StyleId_Heading4 = 70004 # Constant c_int +UIA_SayAsInterpretAsAttributeId = 40043 # Constant c_int +StyleId_Heading5 = 70005 # Constant c_int +UIA_RangeValuePatternId = 10003 # Constant c_int +UIA_HyperlinkControlTypeId = 50005 # Constant c_int +StyleId_Heading6 = 70006 # Constant c_int +UIA_ScrollPatternId = 10004 # Constant c_int +UIA_CalendarControlTypeId = 50001 # Constant c_int +StyleId_Heading7 = 70007 # Constant c_int +UIA_ExpandCollapsePatternId = 10005 # Constant c_int +UIA_CheckBoxControlTypeId = 50002 # Constant c_int +StyleId_Heading8 = 70008 # Constant c_int +class IUIAutomationMultipleViewPattern(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{8D253C91-1DC5-4BB5-B18F-ADE16FA495E8}') + _idlflags_ = [] +IUIAutomationMultipleViewPattern._methods_ = [ + COMMETHOD([], HRESULT, 'GetViewName', + ( ['in'], c_int, 'view' ), + ( ['out', 'retval'], POINTER(BSTR), 'name' )), + COMMETHOD([], HRESULT, 'SetCurrentView', + ( ['in'], c_int, 'view' )), + COMMETHOD(['propget'], HRESULT, 'CurrentCurrentView', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD([], HRESULT, 'GetCurrentSupportedViews', + ( ['out', 'retval'], POINTER(_midlSAFEARRAY(c_int)), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedCurrentView', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD([], HRESULT, 'GetCachedSupportedViews', + ( ['out', 'retval'], POINTER(_midlSAFEARRAY(c_int)), 'retVal' )), +] +################################################################ +## code template for IUIAutomationMultipleViewPattern implementation +##class IUIAutomationMultipleViewPattern_Impl(object): +## def GetViewName(self, view): +## '-no docstring-' +## #return name +## +## def SetCurrentView(self, view): +## '-no docstring-' +## #return +## +## @property +## def CurrentCurrentView(self): +## '-no docstring-' +## #return retVal +## +## def GetCurrentSupportedViews(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedCurrentView(self): +## '-no docstring-' +## #return retVal +## +## def GetCachedSupportedViews(self): +## '-no docstring-' +## #return retVal +## + +class IUIAutomationDropTargetPattern(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{69A095F7-EEE4-430E-A46B-FB73B1AE39A5}') + _idlflags_ = [] +IUIAutomationDropTargetPattern._methods_ = [ + COMMETHOD(['propget'], HRESULT, 'CurrentDropTargetEffect', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedDropTargetEffect', + ( ['out', 'retval'], POINTER(BSTR), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentDropTargetEffects', + ( ['out', 'retval'], POINTER(_midlSAFEARRAY(BSTR)), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedDropTargetEffects', + ( ['out', 'retval'], POINTER(_midlSAFEARRAY(BSTR)), 'retVal' )), +] +################################################################ +## code template for IUIAutomationDropTargetPattern implementation +##class IUIAutomationDropTargetPattern_Impl(object): +## @property +## def CurrentDropTargetEffect(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedDropTargetEffect(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentDropTargetEffects(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedDropTargetEffects(self): +## '-no docstring-' +## #return retVal +## + +class IUIAutomationObjectModelPattern(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{71C284B3-C14D-4D14-981E-19751B0D756D}') + _idlflags_ = [] +IUIAutomationObjectModelPattern._methods_ = [ + COMMETHOD([], HRESULT, 'GetUnderlyingObjectModel', + ( ['out', 'retval'], POINTER(POINTER(IUnknown)), 'retVal' )), +] +################################################################ +## code template for IUIAutomationObjectModelPattern implementation +##class IUIAutomationObjectModelPattern_Impl(object): +## def GetUnderlyingObjectModel(self): +## '-no docstring-' +## #return retVal +## + +class IUIAutomationRangeValuePattern(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IUnknown): + _case_insensitive_ = True + _iid_ = GUID('{59213F4F-7346-49E5-B120-80555987A148}') + _idlflags_ = [] +IUIAutomationRangeValuePattern._methods_ = [ + COMMETHOD([], HRESULT, 'SetValue', + ( ['in'], c_double, 'val' )), + COMMETHOD(['propget'], HRESULT, 'CurrentValue', + ( ['out', 'retval'], POINTER(c_double), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentIsReadOnly', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentMaximum', + ( ['out', 'retval'], POINTER(c_double), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentMinimum', + ( ['out', 'retval'], POINTER(c_double), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentLargeChange', + ( ['out', 'retval'], POINTER(c_double), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CurrentSmallChange', + ( ['out', 'retval'], POINTER(c_double), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedValue', + ( ['out', 'retval'], POINTER(c_double), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedIsReadOnly', + ( ['out', 'retval'], POINTER(c_int), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedMaximum', + ( ['out', 'retval'], POINTER(c_double), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedMinimum', + ( ['out', 'retval'], POINTER(c_double), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedLargeChange', + ( ['out', 'retval'], POINTER(c_double), 'retVal' )), + COMMETHOD(['propget'], HRESULT, 'CachedSmallChange', + ( ['out', 'retval'], POINTER(c_double), 'retVal' )), +] +################################################################ +## code template for IUIAutomationRangeValuePattern implementation +##class IUIAutomationRangeValuePattern_Impl(object): +## def SetValue(self, val): +## '-no docstring-' +## #return +## +## @property +## def CurrentValue(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentIsReadOnly(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentMaximum(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentMinimum(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentLargeChange(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CurrentSmallChange(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedValue(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedIsReadOnly(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedMaximum(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedMinimum(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedLargeChange(self): +## '-no docstring-' +## #return retVal +## +## @property +## def CachedSmallChange(self): +## '-no docstring-' +## #return retVal +## + +class IUIAutomationTextRange2(IUIAutomationTextRange): + _case_insensitive_ = True + _iid_ = GUID('{BB9B40E0-5E04-46BD-9BE0-4B601B9AFAD4}') + _idlflags_ = [] +IUIAutomationTextRange2._methods_ = [ + COMMETHOD([], HRESULT, 'ShowContextMenu'), +] +################################################################ +## code template for IUIAutomationTextRange2 implementation +##class IUIAutomationTextRange2_Impl(object): +## def ShowContextMenu(self): +## '-no docstring-' +## #return +## + +class IUIAutomationTextRange3(IUIAutomationTextRange2): + _case_insensitive_ = True + _iid_ = GUID('{6A315D69-5512-4C2E-85F0-53FCE6DD4BC2}') + _idlflags_ = [] +IUIAutomationTextRange3._methods_ = [ + COMMETHOD([], HRESULT, 'GetEnclosingElementBuildCache', + ( ['in'], POINTER(IUIAutomationCacheRequest), 'cacheRequest' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElement)), 'enclosingElement' )), + COMMETHOD([], HRESULT, 'GetChildrenBuildCache', + ( ['in'], POINTER(IUIAutomationCacheRequest), 'cacheRequest' ), + ( ['out', 'retval'], POINTER(POINTER(IUIAutomationElementArray)), 'children' )), + COMMETHOD([], HRESULT, 'GetAttributeValues', + ( ['in'], POINTER(c_int), 'attributeIds' ), + ( ['in'], c_int, 'attributeIdCount' ), + ( ['out', 'retval'], POINTER(_midlSAFEARRAY(VARIANT)), 'attributeValues' )), +] +################################################################ +## code template for IUIAutomationTextRange3 implementation +##class IUIAutomationTextRange3_Impl(object): +## def GetEnclosingElementBuildCache(self, cacheRequest): +## '-no docstring-' +## #return enclosingElement +## +## def GetChildrenBuildCache(self, cacheRequest): +## '-no docstring-' +## #return children +## +## def GetAttributeValues(self, attributeIds, attributeIdCount): +## '-no docstring-' +## #return attributeValues +## + +__all__ = [ 'CoalesceEventsOptions_Disabled', + 'UIA_ItemContainerPatternId', + 'UIA_InputReachedTargetEventId', 'HeadingLevel_None', + 'UIA_ItemTypePropertyId', 'UIA_HeaderControlTypeId', + 'UIA_SelectionItem_ElementAddedToSelectionEventId', + 'StructureChangeType_ChildrenBulkAdded', + 'OrientationType_Horizontal', 'AnnotationType_Highlighted', + 'UIA_IsTextPattern2AvailablePropertyId', + 'IUIAutomationTransformPattern2', + 'IUIAutomationSelectionPattern2', + 'UIA_SemanticZoomControlTypeId', + 'UIA_IsCustomNavigationPatternAvailablePropertyId', + 'UIA_LegacyIAccessibleValuePropertyId', + 'UIA_MenuModeEndEventId', 'UIA_ValuePatternId', + 'UIA_LandmarkTypePropertyId', 'UIA_LinkAttributeId', + 'StructureChangeType_ChildrenInvalidated', + 'StyleId_Heading9', + 'UIA_IsSelectionPattern2AvailablePropertyId', + 'UIA_LegacyIAccessibleChildIdPropertyId', + 'UIA_ScrollPatternId', + 'UIA_BeforeParagraphSpacingAttributeId', + 'IUIAutomationTextPattern', + 'UIA_TextEdit_TextChangedEventId', + 'IUIAutomationNotCondition', 'TextUnit_Word', + 'UIA_LegacyIAccessibleStatePropertyId', + 'UIA_MarginBottomAttributeId', 'Off', + 'NotificationKind_ItemAdded', + 'IUIAutomationItemContainerPattern', + 'ProviderOptions_ProviderOwnsSetFocus', + 'SupportedTextSelection_Single', 'UIA_TextEditPatternId', + 'UIA_AccessKeyPropertyId', 'NotificationKind_ItemRemoved', + 'UIA_IsLegacyIAccessiblePatternAvailablePropertyId', + 'ConnectionRecoveryBehaviorOptions_Enabled', + 'SynchronizedInputType', 'UIA_TitleBarControlTypeId', + 'ProviderOptions_ClientSideProvider', + 'NavigateDirection_FirstChild', 'ExtendedProperty', + 'Polite', 'UIA_SayAsInterpretAsMetadataId', + 'UIA_IsTableItemPatternAvailablePropertyId', + 'UIA_DropTarget_DroppedEventId', 'HeadingLevel9', + 'UIA_TabControlTypeId', 'UIA_CustomLandmarkTypeId', + 'AutomationElementMode_None', 'AnnotationType_MoveChange', + 'UIA_TreeItemControlTypeId', 'UIA_TreeControlTypeId', + 'UIA_StylesFillColorPropertyId', + 'AnnotationType_EditingLockedChange', + 'UIA_IsPasswordPropertyId', + 'SynchronizedInputType_LeftMouseDown', + 'TextPatternRangeEndpoint_Start', + 'UIA_WindowCanMinimizePropertyId', + 'UIA_OutlineColorPropertyId', + 'IUIAutomationTextChildPattern', + 'UIA_BulletStyleAttributeId', + 'UIA_MarginLeadingAttributeId', + 'IUIAutomationBoolCondition', + 'UIA_AnnotationTypesPropertyId', 'HeadingLevel7', + 'UIA_ToggleToggleStatePropertyId', + 'ScrollAmount_LargeIncrement', 'HeadingLevel3', + 'UIA_IsTextEditPatternAvailablePropertyId', + 'UIA_IsInvokePatternAvailablePropertyId', + 'UIA_ScrollVerticalViewSizePropertyId', 'IUIAutomation', + 'WindowInteractionState_Closing', + 'IUIAutomationCustomNavigationPattern', + 'ExpandCollapseState_LeafNode', + 'UIA_IsTextPatternAvailablePropertyId', + 'UIA_AriaRolePropertyId', 'StyleId_Heading5', + 'UIA_SpinnerControlTypeId', 'TextUnit_Page', + 'IUIAutomationDropTargetPattern', + 'UIA_IsRequiredForFormPropertyId', + 'SynchronizedInputType_KeyDown', 'IUIAutomationElement2', + 'TreeTraversalOptions_LastToFirstOrder', + 'ScrollAmount_LargeDecrement', + 'IUIAutomationEventHandlerGroup', + 'UIA_ExpandCollapseExpandCollapseStatePropertyId', + 'IUIAutomationVirtualizedItemPattern', 'ProviderOptions', + 'UIA_GridItemRowSpanPropertyId', + 'ScrollAmount_SmallIncrement', 'UIA_ToolBarControlTypeId', + 'UIA_MultipleViewCurrentViewPropertyId', + 'UIA_WindowControlTypeId', 'OrientationType_None', + 'IUIAutomationPropertyCondition', + 'UIA_AutomationFocusChangedEventId', + 'PropertyConditionFlags_IgnoreCase', 'RowOrColumnMajor', + 'NotificationKind_ActionCompleted', + 'IUIAutomationProxyFactoryEntry', + 'UIA_DropTargetDropTargetEffectPropertyId', + 'IUIAutomationDragPattern', 'DockPosition_Right', + 'PropertyConditionFlags_MatchSubstring', + 'UIA_LocalizedLandmarkTypePropertyId', + 'UIA_StatusBarControlTypeId', + 'UIA_SpreadsheetItemFormulaPropertyId', + 'StructureChangeType_ChildrenReordered', + 'UIA_AsyncContentLoadedEventId', 'TreeTraversalOptions', + 'UIA_SpreadsheetItemAnnotationObjectsPropertyId', + 'UIA_TablePatternId', 'UIA_SummaryChangeId', + 'IUIAutomationProxyFactory', + 'UIA_IsDockPatternAvailablePropertyId', + 'UIA_Selection_InvalidatedEventId', + 'UIA_SelectionCanSelectMultiplePropertyId', + 'AnnotationType_SpellingError', 'TextUnit_Line', + 'AnnotationType_ExternalChange', + 'UIA_LegacyIAccessibleDescriptionPropertyId', + 'ProviderOptions_RefuseNonClientSupport', + 'IUIAutomationElement9', 'IUIAutomationAndCondition', + 'HeadingLevel2', 'UIA_RuntimeIdPropertyId', + 'SynchronizedInputType_KeyUp', + 'AnnotationType_FormulaError', + 'UIA_AnnotationAnnotationTypeNamePropertyId', + 'UIA_TextChildPatternId', 'UIA_MenuControlTypeId', + 'UIA_CustomControlTypeId', 'UIA_NamePropertyId', + 'UIA_LegacyIAccessibleSelectionPropertyId', + 'UIA_LabeledByPropertyId', 'TreeTraversalOptions_Default', + 'IUIAutomationGridItemPattern', + 'IUIAutomationTextEditPattern', + 'UIA_DragDropEffectsPropertyId', + 'IUIAutomationGridPattern', 'UIA_OverlineColorAttributeId', + 'UIA_MarginTopAttributeId', 'ToggleState_On', + 'UIA_Drag_DragCancelEventId', + 'IUIAutomationSynchronizedInputPattern', + 'UIA_SizePropertyId', 'IUIAutomation3', 'IUIAutomation2', + 'IUIAutomationTextRange3', 'UIA_AcceleratorKeyPropertyId', + 'NotificationProcessing_All', + 'UIA_PositionInSetPropertyId', + 'UIA_ScrollVerticallyScrollablePropertyId', + 'UIA_ItemStatusPropertyId', + 'ProviderOptions_NonClientAreaProvider', + 'UIA_IsItalicAttributeId', 'DockPosition_Bottom', + 'StyleId_Title', 'IUIAutomationChangesEventHandler', + 'UIA_StrikethroughStyleAttributeId', + 'IUIAutomationElement', + 'ProviderOptions_UseClientCoordinates', 'IUIAutomation4', + 'UIA_DragGrabbedItemsPropertyId', + 'ZoomUnit_LargeDecrement', 'UIA_DragPatternId', + 'StyleId_Heading7', + 'UIA_ScrollHorizontalViewSizePropertyId', + 'UIA_UnderlineColorAttributeId', + 'UIA_MultipleViewSupportedViewsPropertyId', + 'UIA_Text_TextChangedEventId', + 'UIA_SelectionItem_ElementRemovedFromSelectionEventId', + 'UIA_TabItemControlTypeId', + 'UIA_AutomationPropertyChangedEventId', + 'UIA_MenuOpenedEventId', 'UIA_DocumentControlTypeId', + 'SupportedTextSelection_Multiple', + 'UIA_ProviderDescriptionPropertyId', + 'UIA_OutlineStylesAttributeId', 'StyleId_NumberedList', + 'UIA_SliderControlTypeId', 'UIA_LineSpacingAttributeId', + 'IUIAutomationTextRangeArray', 'UIA_Invoke_InvokedEventId', + 'UIA_PaneControlTypeId', 'IUIAutomationStylesPattern', + 'UIA_StylesFillPatternStylePropertyId', + 'UIA_IsPeripheralPropertyId', + 'UIA_DropTarget_DragLeaveEventId', + 'ConnectionRecoveryBehaviorOptions_Disabled', + 'UIA_ExpandCollapsePatternId', + 'UIA_SeparatorControlTypeId', + 'UIA_IsItemContainerPatternAvailablePropertyId', + 'ScrollAmount', 'StyleId_Heading2', + 'TextPatternRangeEndpoint_End', 'UIA_DropTargetPatternId', + 'UIA_ProgressBarControlTypeId', 'StyleId_Subtitle', + 'AnnotationType_InsertionChange', + 'ZoomUnit_SmallIncrement', + 'UIA_Transform2CanZoomPropertyId', + 'IUIAutomationTogglePattern', 'UIA_IsOffscreenPropertyId', + 'SynchronizedInputType_RightMouseDown', + 'IUIAutomationSpreadsheetItemPattern', + 'IUIAutomationTableItemPattern', + 'IUIAutomationStructureChangedEventHandler', + 'IUIAutomationCondition', + 'NotificationProcessing_CurrentThenMostRecent', + 'StyleId_Normal', + 'UIA_IsTransformPattern2AvailablePropertyId', + 'IUIAutomationElement8', 'UIA_Window_WindowClosedEventId', + 'TextUnit_Character', 'UIA_ProcessIdPropertyId', + 'UIA_UnderlineStyleAttributeId', + 'UIA_Selection2FirstSelectedItemPropertyId', + 'UIA_OrientationPropertyId', 'UIA_CulturePropertyId', + 'DockPosition_Top', 'TextPatternRangeEndpoint', + 'IUIAutomationMultipleViewPattern', 'UiaChangeInfo', + 'IUIAutomationNotificationEventHandler', + 'NotificationKind', + 'IUIAutomationTextEditTextChangedEventHandler', + 'UIA_AnnotationTargetPropertyId', 'StyleId_Custom', + 'UIA_LegacyIAccessibleNamePropertyId', + 'UIA_FrameworkIdPropertyId', + 'StructureChangeType_ChildrenBulkRemoved', + 'UIA_IsDropTargetPatternAvailablePropertyId', + 'UIA_IsScrollPatternAvailablePropertyId', + 'UIA_SystemAlertEventId', 'IUIAutomationSelectionPattern', + 'UIA_TransformPatternId', 'TreeScope_Descendants', + 'UIA_IsExpandCollapsePatternAvailablePropertyId', + 'UIA_FlowsFromPropertyId', + 'UIA_Selection2CurrentSelectedItemPropertyId', + 'TreeScope_Parent', 'UIA_StrikethroughColorAttributeId', + 'IUIAutomationTablePattern', + 'UIA_CaretPositionAttributeId', + 'IUIAutomationRangeValuePattern', + 'UIA_ObjectModelPatternId', + 'UIA_TableRowOrColumnMajorPropertyId', + 'AnnotationType_Footnote', 'UIA_DataItemControlTypeId', + 'UIA_SpreadsheetItemPatternId', + 'UIA_RangeValueMinimumPropertyId', + 'UIA_AnnotationPatternId', + 'UIA_ScrollHorizontallyScrollablePropertyId', + 'UIA_Text_TextSelectionChangedEventId', + 'WindowInteractionState_ReadyForUserInteraction', + 'AutomationElementMode_Full', + 'UIA_BackgroundColorAttributeId', 'TreeScope_None', + 'UIA_TransformPattern2Id', + 'UIA_IsDragPatternAvailablePropertyId', + 'WindowVisualState_Maximized', 'TextUnit_Paragraph', + 'UIA_WindowPatternId', 'IUIAutomationObjectModelPattern', + 'WindowVisualState_Minimized', 'UIA_ListItemControlTypeId', + 'ProviderOptions_ServerSideProvider', + 'UIA_AnnotationAuthorPropertyId', + 'UIA_TextFlowDirectionsAttributeId', + 'UIA_IsSpreadsheetPatternAvailablePropertyId', + 'IUIAutomationElement4', + 'UIA_AfterParagraphSpacingAttributeId', + 'SupportedTextSelection', 'UIA_IsHiddenAttributeId', + 'UIA_MenuItemControlTypeId', + 'UIA_StylesExtendedPropertiesPropertyId', + 'UIA_IsTablePatternAvailablePropertyId', + 'UIA_IsTextChildPatternAvailablePropertyId', + 'UIA_DockDockPositionPropertyId', + 'UIA_MenuBarControlTypeId', 'UIA_SizeOfSetPropertyId', + 'UIA_OutlineThicknessPropertyId', 'UIA_FlowsToPropertyId', + 'IUIAutomationCacheRequest', 'UIA_LevelPropertyId', + 'UIA_VisualEffectsPropertyId', 'UIA_SelectionPatternId', + 'UIA_Transform2ZoomMaximumPropertyId', + 'UIA_HasKeyboardFocusPropertyId', + 'IUIAutomationTreeWalker', 'UIA_GridItemRowPropertyId', + 'ToggleState_Indeterminate', 'AnnotationType_Header', + 'UIA_GridRowCountPropertyId', + 'NavigateDirection_PreviousSibling', + 'IUIAutomationSpreadsheetPattern', + 'AnnotationType_Comment', 'CUIAutomation8', + 'UIA_AnnotationTypesAttributeId', + 'UIA_IsValuePatternAvailablePropertyId', + 'UIA_OptimizeForVisualContentPropertyId', + 'UIA_MenuClosedEventId', + 'UIA_IsObjectModelPatternAvailablePropertyId', + 'UIA_IsKeyboardFocusablePropertyId', + 'UIA_FontSizeAttributeId', 'UIA_NavigationLandmarkTypeId', + 'UIA_AppBarControlTypeId', 'TextUnit_Format', + 'IUIAutomationOrCondition', 'ZoomUnit_LargeIncrement', + 'UIA_Transform2ZoomMinimumPropertyId', 'OrientationType', + 'UIA_SayAsInterpretAsAttributeId', + 'UIA_LayoutInvalidatedEventId', + 'UIA_RangeValueIsReadOnlyPropertyId', 'StyleId_Heading6', + 'UIA_GridItemPatternId', 'ZoomUnit', + 'NotificationProcessing_MostRecent', + 'UIA_IsSelectionPatternAvailablePropertyId', + 'UIA_WindowWindowInteractionStatePropertyId', + 'IUIAutomationTransformPattern', + 'IUIAutomationElementArray', 'ZoomUnit_SmallDecrement', + 'StructureChangeType_ChildRemoved', + 'AnnotationType_DeletionChange', + 'UIA_IsGridPatternAvailablePropertyId', + 'IUIAutomationLegacyIAccessiblePattern', + 'UIA_StylesStyleIdPropertyId', + 'UIA_StylesStyleNamePropertyId', + 'UIA_IsTogglePatternAvailablePropertyId', + 'ExpandCollapseState', 'UIA_TogglePatternId', + 'UIA_ThumbControlTypeId', + 'ProviderOptions_HasNativeIAccessible', + 'UIA_SearchLandmarkTypeId', + 'CoalesceEventsOptions_Enabled', + 'RowOrColumnMajor_RowMajor', + 'UIA_SelectionIsSelectionRequiredPropertyId', + 'IUIAutomationScrollPattern', 'AnnotationType_Unknown', + 'UIA_HeaderItemControlTypeId', + 'UIA_SelectionActiveEndAttributeId', 'TreeScope_Ancestors', + 'IUIAutomationFocusChangedEventHandler', + 'UIA_GridColumnCountPropertyId', 'UIA_TextPattern2Id', + 'UIA_StylesFillPatternColorPropertyId', + 'UIA_WindowIsTopmostPropertyId', + 'UIA_FontWeightAttributeId', 'UIA_CheckBoxControlTypeId', + 'SynchronizedInputType_RightMouseUp', + 'UIA_WindowIsModalPropertyId', 'TreeScope_Children', + 'UIA_IsSynchronizedInputPatternAvailablePropertyId', + 'UIA_TableItemRowHeaderItemsPropertyId', + 'UIA_TableItemColumnHeaderItemsPropertyId', + 'UIA_TextControlTypeId', 'UIA_MultipleViewPatternId', + 'DockPosition_Fill', 'UIA_LegacyIAccessibleHelpPropertyId', + 'NavigateDirection', + 'UIA_IsAnnotationPatternAvailablePropertyId', + 'TextEditChangeType_AutoComplete', + 'PropertyConditionFlags_None', + 'UIA_IsDataValidForFormPropertyId', + 'IRawElementProviderSimple', + 'NotificationKind_ActionAborted', 'UIA_FormLandmarkTypeId', + 'TreeScope_Subtree', 'StructureChangeType', + 'UIA_TextPatternId', 'NotificationKind_Other', + 'ConnectionRecoveryBehaviorOptions', + 'IUIAutomationValuePattern', + 'AnnotationType_DataValidationError', + 'UIA_SelectionItemIsSelectedPropertyId', + 'UIA_IndentationFirstLineAttributeId', + 'UIA_ScrollBarControlTypeId', 'IUIAutomation6', + 'UIA_ClassNamePropertyId', 'UIA_ValueIsReadOnlyPropertyId', + 'CUIAutomation', 'UIA_DragIsGrabbedPropertyId', + 'IUIAutomationPropertyChangedEventHandler', + 'UIA_WindowCanMaximizePropertyId', + 'UIA_RangeValueLargeChangePropertyId', 'StyleId_Heading4', + 'UIA_LegacyIAccessiblePatternId', + 'UIA_AnnotationAnnotationTypeIdPropertyId', + 'UIA_ValueValuePropertyId', 'IAccessible', + 'UIA_IsSuperscriptAttributeId', + 'UIA_SelectionItemPatternId', + 'UIA_TableRowHeadersPropertyId', + 'UIA_SplitButtonControlTypeId', 'UIA_ButtonControlTypeId', + 'TextEditChangeType', 'UIA_MainLandmarkTypeId', + 'ExpandCollapseState_PartiallyExpanded', + 'UIA_CalendarControlTypeId', 'UIA_HeadingLevelPropertyId', + 'HeadingLevel8', 'UIA_TransformCanRotatePropertyId', + 'IUIAutomationProxyFactoryMapping', + 'UIA_DataGridControlTypeId', 'DockPosition_Left', + 'UIA_SelectionItem_ElementSelectedEventId', + 'ZoomUnit_NoAmount', 'NavigateDirection_Parent', + 'StyleId_Emphasis', 'UIA_InvokePatternId', + 'UIA_IsRangeValuePatternAvailablePropertyId', + 'AnnotationType_FormatChange', + 'UIA_IsSpreadsheetItemPatternAvailablePropertyId', + 'UIA_IsVirtualizedItemPatternAvailablePropertyId', + 'ExpandCollapseState_Collapsed', + 'NavigateDirection_LastChild', 'IUIAutomationTextRange2', + 'IUIAutomationTextRange', + 'UIA_IsMultipleViewPatternAvailablePropertyId', + 'AnnotationType_Footer', + 'UIA_WindowWindowVisualStatePropertyId', + 'UIA_Drag_DragStartEventId', 'UIA_SpreadsheetPatternId', + 'UIA_TransformCanResizePropertyId', + 'UIA_IsSubscriptAttributeId', + 'UIA_RangeValueMaximumPropertyId', + 'TextEditChangeType_Composition', + 'StructureChangeType_ChildAdded', 'AnnotationType_Author', + 'UIA_NotificationEventId', + 'WindowInteractionState_NotResponding', + 'UIA_RangeValueValuePropertyId', + 'UIA_DropTarget_DragEnterEventId', + 'IUIAutomationScrollItemPattern', 'HeadingLevel6', + 'UIA_GridItemColumnSpanPropertyId', + 'AnnotationType_AdvancedProofingIssue', + 'NotificationProcessing_ImportantAll', + 'UIA_ClickablePointPropertyId', 'TextUnit_Document', + 'UIA_DragDropEffectPropertyId', + 'ExpandCollapseState_Expanded', + 'UIA_InputReachedOtherElementEventId', + 'UIA_SelectionItemSelectionContainerPropertyId', + 'NotificationProcessing_ImportantMostRecent', + 'ProviderOptions_OverrideProvider', 'HeadingLevel1', + 'DockPosition_None', 'ScrollAmount_SmallDecrement', + 'IUIAutomationActiveTextPositionChangedEventHandler', + 'SupportedTextSelection_None', + 'UIA_HostedFragmentRootsInvalidatedEventId', + 'UIA_LiveRegionChangedEventId', 'UIA_ScrollItemPatternId', + 'AnnotationType_Mathematics', 'HeadingLevel4', + 'UIA_Drag_DragCompleteEventId', 'IUIAutomationElement3', + 'UIA_LegacyIAccessibleKeyboardShortcutPropertyId', + 'UIA_LegacyIAccessibleRolePropertyId', + 'UIA_AnnotationDateTimePropertyId', + 'AnnotationType_TrackChanges', 'UIA_DockPatternId', + 'UIA_RotationPropertyId', 'StyleId_Heading1', + 'CoalesceEventsOptions', 'RowOrColumnMajor_ColumnMajor', + 'UIA_IsScrollItemPatternAvailablePropertyId', + 'ToggleState', 'UIA_TableItemPatternId', + 'UIA_LegacyIAccessibleDefaultActionPropertyId', + 'UIA_TransformCanMovePropertyId', + 'WindowInteractionState_BlockedByModalWindow', + 'ProviderOptions_UseComThreading', + 'UIA_AutomationIdPropertyId', 'DockPosition', + 'IUIAutomationElement5', 'UIA_IsDialogPropertyId', + 'AnnotationType_GrammarError', 'UIA_GroupControlTypeId', + 'UIA_IsReadOnlyAttributeId', 'UIA_TableControlTypeId', + 'UIA_AnimationStyleAttributeId', + 'TextEditChangeType_AutoCorrect', + 'AnnotationType_UnsyncedChange', 'WindowVisualState', + 'AnnotationType_Sensitive', 'UIA_StyleNameAttributeId', + 'RowOrColumnMajor_Indeterminate', + 'UIA_SelectionPattern2Id', 'UIA_RangeValuePatternId', + 'ToggleState_Off', 'UIA_ToolTipClosedEventId', + 'WindowVisualState_Normal', + 'UIA_IsWindowPatternAvailablePropertyId', + 'UIA_SpreadsheetItemAnnotationTypesPropertyId', + 'UIA_AnnotationObjectsAttributeId', + 'SynchronizedInputType_LeftMouseUp', + 'AnnotationType_CircularReferenceError', + 'UIA_TextEdit_ConversionTargetChangedEventId', + 'WindowInteractionState', 'UIA_CenterPointPropertyId', + 'IUIAutomationSelectionItemPattern', + 'IUIAutomationWindowPattern', + 'UIA_IsSelectionItemPatternAvailablePropertyId', + 'LiveSetting', 'IUIAutomationElement7', + 'UIA_ControlTypePropertyId', 'UIA_HelpTextPropertyId', + 'UIA_ScrollHorizontalScrollPercentPropertyId', + 'WindowInteractionState_Running', + 'UIA_GridItemColumnPropertyId', + 'UIA_DescribedByPropertyId', + 'UIA_ForegroundColorAttributeId', + 'UIA_IsContentElementPropertyId', 'StyleId_Heading8', + 'HeadingLevel5', 'UIA_RangeValueSmallChangePropertyId', + 'StyleId_Heading3', 'TreeScope_Element', + 'TreeTraversalOptions_PostOrder', 'StyleId_BulletedList', + 'UIA_TableColumnHeadersPropertyId', + 'UIA_CustomNavigationPatternId', + 'UIA_ActiveTextPositionChangedEventId', + 'OrientationType_Vertical', 'UIA_IsActiveAttributeId', + 'UIA_StructureChangedEventId', 'UIA_IsEnabledPropertyId', + 'TextEditChangeType_CompositionFinalized', + 'UIA_GridItemContainingGridPropertyId', + 'UIA_StylesShapePropertyId', + 'UIA_DropTargetDropTargetEffectsPropertyId', + 'AnnotationType_Endnote', 'UIA_FontNameAttributeId', + 'UIA_IsControlElementPropertyId', 'AutomationElementMode', + 'IUIAutomationEventHandler', 'UIA_ControllerForPropertyId', + 'IUIAutomationElement6', 'StyleId_Quote', + 'UIA_Selection2LastSelectedItemPropertyId', + 'UIA_Window_WindowOpenedEventId', 'TreeScope', + 'UIA_InputDiscardedEventId', 'TextUnit', + 'IUIAutomationExpandCollapsePattern', + 'UIA_ToolTipControlTypeId', + 'UIA_HorizontalTextAlignmentAttributeId', + 'UIA_MarginTrailingAttributeId', 'UIA_FillColorPropertyId', + 'UIA_Transform2ZoomLevelPropertyId', + 'NotificationProcessing', 'UIA_ToolTipOpenedEventId', + 'UIA_ComboBoxControlTypeId', 'UIA_CapStyleAttributeId', + 'ScrollAmount_NoAmount', 'UIA_AnnotationObjectsPropertyId', + 'UIA_Selection2ItemCountPropertyId', + 'IUIAutomationAnnotationPattern', + 'UIA_IndentationTrailingAttributeId', + 'UIA_ScrollVerticalScrollPercentPropertyId', + 'UIA_IndentationLeadingAttributeId', + 'UIA_LocalizedControlTypePropertyId', + 'UIA_TabsAttributeId', 'UIA_FullDescriptionPropertyId', + 'UIA_ImageControlTypeId', + 'UIA_IsTransformPatternAvailablePropertyId', + 'UIA_ListControlTypeId', 'UIA_RadioButtonControlTypeId', + 'Assertive', 'UIA_MenuModeStartEventId', + 'PropertyConditionFlags', 'UIA_HyperlinkControlTypeId', + 'UIA_OverlineStyleAttributeId', + 'AnnotationType_ConflictingChange', + 'UIA_StyleIdAttributeId', + 'UIA_SelectionSelectionPropertyId', + 'UIA_AriaPropertiesPropertyId', + 'UIA_VirtualizedItemPatternId', 'TextEditChangeType_None', + 'UIA_GridPatternId', 'NavigateDirection_NextSibling', + 'UIA_NativeWindowHandlePropertyId', + 'UIA_EditControlTypeId', 'IUIAutomationDockPattern', + 'IUIAutomationInvokePattern', 'UIA_ChangesEventId', + 'UIA_BoundingRectanglePropertyId', + 'UIA_FillTypePropertyId', 'UIA_CultureAttributeId', + 'UIA_LiveSettingPropertyId', + 'UIA_SynchronizedInputPatternId', + 'UIA_CaretBidiModeAttributeId', + 'UIA_IsStylesPatternAvailablePropertyId', 'IUIAutomation5', + 'UIA_StylesPatternId', 'IUIAutomationTextPattern2', + 'UIA_IsGridItemPatternAvailablePropertyId'] +from comtypes import _check_version; _check_version('1.1.10', 1622443861.050759) diff --git a/venv/Lib/site-packages/comtypes/gen/__init__.py b/venv/Lib/site-packages/comtypes/gen/__init__.py new file mode 100644 index 00000000..40bf6c8d --- /dev/null +++ b/venv/Lib/site-packages/comtypes/gen/__init__.py @@ -0,0 +1 @@ +# comtypes.gen package, directory for generated files. diff --git a/venv/Lib/site-packages/comtypes/gen/__pycache__/UIAutomationClient.cpython-36.pyc b/venv/Lib/site-packages/comtypes/gen/__pycache__/UIAutomationClient.cpython-36.pyc new file mode 100644 index 00000000..7235b6ca Binary files /dev/null and b/venv/Lib/site-packages/comtypes/gen/__pycache__/UIAutomationClient.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/gen/__pycache__/_00020430_0000_0000_C000_000000000046_0_2_0.cpython-36.pyc b/venv/Lib/site-packages/comtypes/gen/__pycache__/_00020430_0000_0000_C000_000000000046_0_2_0.cpython-36.pyc new file mode 100644 index 00000000..d4e8032d Binary files /dev/null and b/venv/Lib/site-packages/comtypes/gen/__pycache__/_00020430_0000_0000_C000_000000000046_0_2_0.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/gen/__pycache__/_944DE083_8FB8_45CF_BCB7_C477ACB2F897_0_1_0.cpython-36.pyc b/venv/Lib/site-packages/comtypes/gen/__pycache__/_944DE083_8FB8_45CF_BCB7_C477ACB2F897_0_1_0.cpython-36.pyc new file mode 100644 index 00000000..7bed074f Binary files /dev/null and b/venv/Lib/site-packages/comtypes/gen/__pycache__/_944DE083_8FB8_45CF_BCB7_C477ACB2F897_0_1_0.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/gen/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/comtypes/gen/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..f9220e4a Binary files /dev/null and b/venv/Lib/site-packages/comtypes/gen/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/gen/__pycache__/stdole.cpython-36.pyc b/venv/Lib/site-packages/comtypes/gen/__pycache__/stdole.cpython-36.pyc new file mode 100644 index 00000000..c7f1aed4 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/gen/__pycache__/stdole.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/gen/stdole.py b/venv/Lib/site-packages/comtypes/gen/stdole.py new file mode 100644 index 00000000..09d50d33 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/gen/stdole.py @@ -0,0 +1,3 @@ +from comtypes.gen import _00020430_0000_0000_C000_000000000046_0_2_0 +globals().update(_00020430_0000_0000_C000_000000000046_0_2_0.__dict__) +__name__ = 'comtypes.gen.stdole' \ No newline at end of file diff --git a/venv/Lib/site-packages/comtypes/git.py b/venv/Lib/site-packages/comtypes/git.py new file mode 100644 index 00000000..ef9f60ab --- /dev/null +++ b/venv/Lib/site-packages/comtypes/git.py @@ -0,0 +1,65 @@ +"""comtypes.git - access the process wide global interface table + +The global interface table provides a way to marshal interface pointers +between different threading appartments. +""" +from ctypes import * +from comtypes import IUnknown, STDMETHOD, COMMETHOD, \ + GUID, HRESULT, CoCreateInstance, CLSCTX_INPROC_SERVER + +DWORD = c_ulong + +class IGlobalInterfaceTable(IUnknown): + _iid_ = GUID("{00000146-0000-0000-C000-000000000046}") + _methods_ = [ + STDMETHOD(HRESULT, "RegisterInterfaceInGlobal", + [POINTER(IUnknown), POINTER(GUID), POINTER(DWORD)]), + STDMETHOD(HRESULT, "RevokeInterfaceFromGlobal", [DWORD]), + STDMETHOD(HRESULT, "GetInterfaceFromGlobal", + [DWORD, POINTER(GUID), POINTER(POINTER(IUnknown))]), + ] + + def RegisterInterfaceInGlobal(self, obj, interface=IUnknown): + cookie = DWORD() + self.__com_RegisterInterfaceInGlobal(obj, interface._iid_, cookie) + return cookie.value + + def GetInterfaceFromGlobal(self, cookie, interface=IUnknown): + ptr = POINTER(interface)() + self.__com_GetInterfaceFromGlobal(cookie, interface._iid_, ptr) + return ptr + + def RevokeInterfaceFromGlobal(self, cookie): + self.__com_RevokeInterfaceFromGlobal(cookie) + + +# It was a pain to get this CLSID: it's neither in the registry, nor +# in any header files. I had to compile a C program, and find it out +# with the debugger. Apparently it is in uuid.lib. +CLSID_StdGlobalInterfaceTable = GUID("{00000323-0000-0000-C000-000000000046}") + +git = CoCreateInstance(CLSID_StdGlobalInterfaceTable, + interface=IGlobalInterfaceTable, + clsctx=CLSCTX_INPROC_SERVER) + +RevokeInterfaceFromGlobal = git.RevokeInterfaceFromGlobal +RegisterInterfaceInGlobal = git.RegisterInterfaceInGlobal +GetInterfaceFromGlobal = git.GetInterfaceFromGlobal + +__all__ = ["RegisterInterfaceInGlobal", "RevokeInterfaceFromGlobal", "GetInterfaceFromGlobal"] + +if __name__ == "__main__": + from comtypes.typeinfo import CreateTypeLib, ICreateTypeLib + + tlib = CreateTypeLib("foo.bar") # we don not save it later + assert (tlib.AddRef(), tlib.Release()) == (2, 1) + + cookie = RegisterInterfaceInGlobal(tlib) + assert (tlib.AddRef(), tlib.Release()) == (3, 2) + + GetInterfaceFromGlobal(cookie, ICreateTypeLib) + GetInterfaceFromGlobal(cookie, ICreateTypeLib) + GetInterfaceFromGlobal(cookie) + assert (tlib.AddRef(), tlib.Release()) == (3, 2) + RevokeInterfaceFromGlobal(cookie) + assert (tlib.AddRef(), tlib.Release()) == (2, 1) diff --git a/venv/Lib/site-packages/comtypes/hresult.py b/venv/Lib/site-packages/comtypes/hresult.py new file mode 100644 index 00000000..85f75ceb --- /dev/null +++ b/venv/Lib/site-packages/comtypes/hresult.py @@ -0,0 +1,76 @@ +# comtypes.hresult +# COM success and error codes +# +# Note that the codes should be written in decimal notation! + +S_OK = 0 +S_FALSE = 1 + +E_UNEXPECTED = -2147418113 #0x8000FFFFL + +E_NOTIMPL = -2147467263 #0x80004001L +E_NOINTERFACE = -2147467262 #0x80004002L +E_POINTER = -2147467261 #0x80004003L +E_FAIL = -2147467259 #0x80004005L +E_INVALIDARG = -2147024809 #0x80070057L +E_OUTOFMEMORY = -2147024882 # 0x8007000EL + +CLASS_E_NOAGGREGATION = -2147221232 #0x80040110L +CLASS_E_CLASSNOTAVAILABLE = -2147221231 #0x80040111L + +CO_E_CLASSSTRING = -2147221005 #0x800401F3L + +# connection point error codes +CONNECT_E_CANNOTCONNECT = -2147220990 +CONNECT_E_ADVISELIMIT = -2147220991 +CONNECT_E_NOCONNECTION = -2147220992 + +TYPE_E_ELEMENTNOTFOUND = -2147352077 #0x8002802BL + +TYPE_E_REGISTRYACCESS = -2147319780 #0x8002801CL +TYPE_E_CANTLOADLIBRARY = -2147312566 #0x80029C4AL + +# all the DISP_E_ values from windows.h +DISP_E_BUFFERTOOSMALL = -2147352557 +DISP_E_DIVBYZERO = -2147352558 +DISP_E_NOTACOLLECTION = -2147352559 +DISP_E_BADCALLEE = -2147352560 +DISP_E_PARAMNOTOPTIONAL = -2147352561 #0x8002000F +DISP_E_BADPARAMCOUNT = -2147352562 #0x8002000E +DISP_E_ARRAYISLOCKED = -2147352563 #0x8002000D +DISP_E_UNKNOWNLCID = -2147352564 #0x8002000C +DISP_E_BADINDEX = -2147352565 #0x8002000B +DISP_E_OVERFLOW = -2147352566 #0x8002000A +DISP_E_EXCEPTION = -2147352567 #0x80020009 +DISP_E_BADVARTYPE = -2147352568 #0x80020008 +DISP_E_NONAMEDARGS = -2147352569 #0x80020007 +DISP_E_UNKNOWNNAME = -2147352570 #0x80020006 +DISP_E_TYPEMISMATCH = -2147352571 #0800020005 +DISP_E_PARAMNOTFOUND = -2147352572 #0x80020004 +DISP_E_MEMBERNOTFOUND = -2147352573 #0x80020003 +DISP_E_UNKNOWNINTERFACE = -2147352575 #0x80020001 + +RPC_E_CHANGED_MODE = -2147417850 # 0x80010106 +RPC_E_SERVERFAULT = -2147417851 # 0x80010105 + +# 'macros' and constants to create your own HRESULT values: + +def MAKE_HRESULT(sev, fac, code): + # A hresult is SIGNED in comtypes + from ctypes import c_long + return c_long((sev << 31 | fac << 16 | code)).value + +SEVERITY_ERROR = 1 +SEVERITY_SUCCESS = 0 + +FACILITY_ITF = 4 +FACILITY_WIN32 = 7 + +def HRESULT_FROM_WIN32(x): + # make signed + from ctypes import c_long + x = c_long(x).value + if x < 0: + return x + # 0x80000000 | FACILITY_WIN32 << 16 | x & 0xFFFF + return c_long(0x80070000 | (x & 0xFFFF)).value diff --git a/venv/Lib/site-packages/comtypes/logutil.py b/venv/Lib/site-packages/comtypes/logutil.py new file mode 100644 index 00000000..f8b92f2f --- /dev/null +++ b/venv/Lib/site-packages/comtypes/logutil.py @@ -0,0 +1,51 @@ +# logutil.py +import logging, ctypes + +class NTDebugHandler(logging.Handler): + def emit(self, record, + writeA=ctypes.windll.kernel32.OutputDebugStringA, + writeW=ctypes.windll.kernel32.OutputDebugStringW): + text = self.format(record) + if isinstance(text, str): + writeA(text + "\n") + else: + writeW(text + "\n") +logging.NTDebugHandler = NTDebugHandler + +def setup_logging(*pathnames): + import configparser + + parser = configparser.ConfigParser() + parser.optionxform = str # use case sensitive option names! + + parser.read(pathnames) + + DEFAULTS = {"handler": "StreamHandler()", + "format": "%(levelname)s:%(name)s:%(message)s", + "level": "WARNING"} + + def get(section, option): + try: + return parser.get(section, option, True) + except (configparser.NoOptionError, configparser.NoSectionError): + return DEFAULTS[option] + + levelname = get("logging", "level") + format = get("logging", "format") + handlerclass = get("logging", "handler") + + # convert level name to level value + level = getattr(logging, levelname) + # create the handler instance + handler = eval(handlerclass, vars(logging)) + formatter = logging.Formatter(format) + handler.setFormatter(formatter) + logging.root.addHandler(handler) + logging.root.setLevel(level) + + try: + for name, value in parser.items("logging.levels", True): + value = getattr(logging, value) + logging.getLogger(name).setLevel(value) + except configparser.NoSectionError: + pass diff --git a/venv/Lib/site-packages/comtypes/messageloop.py b/venv/Lib/site-packages/comtypes/messageloop.py new file mode 100644 index 00000000..821af46f --- /dev/null +++ b/venv/Lib/site-packages/comtypes/messageloop.py @@ -0,0 +1,50 @@ +import ctypes +from ctypes import WinDLL, byref, WinError +from ctypes.wintypes import MSG +_user32 = WinDLL("user32") + +GetMessage = _user32.GetMessageA +GetMessage.argtypes = [ + ctypes.c_void_p, + ctypes.c_void_p, + ctypes.c_uint, + ctypes.c_uint, +] +TranslateMessage = _user32.TranslateMessage +DispatchMessage = _user32.DispatchMessageA + + +class _MessageLoop(object): + + def __init__(self): + self._filters = [] + + def insert_filter(self, obj, index=-1): + self._filters.insert(index, obj) + + def remove_filter(self, obj): + self._filters.remove(obj) + + def run(self): + msg = MSG() + lpmsg = byref(msg) + while 1: + ret = GetMessage(lpmsg, 0, 0, 0) + if ret == -1: + raise WinError() + elif ret == 0: + return # got WM_QUIT + if not self.filter_message(lpmsg): + TranslateMessage(lpmsg) + DispatchMessage(lpmsg) + + def filter_message(self, lpmsg): + return any(list(filter(lpmsg)) for filter in self._filters) + +_messageloop = _MessageLoop() + +run = _messageloop.run +insert_filter = _messageloop.insert_filter +remove_filter = _messageloop.remove_filter + +__all__ = ["run", "insert_filter", "remove_filter"] diff --git a/venv/Lib/site-packages/comtypes/npsupport.py b/venv/Lib/site-packages/comtypes/npsupport.py new file mode 100644 index 00000000..a806e37b --- /dev/null +++ b/venv/Lib/site-packages/comtypes/npsupport.py @@ -0,0 +1,125 @@ +""" Consolidation of numpy support utilities. """ +import sys + +try: + import numpy +except ImportError: + numpy = None + + +HAVE_NUMPY = numpy is not None + +is_64bits = sys.maxsize > 2**32 + + +def _make_variant_dtype(): + """ Create a dtype for VARIANT. This requires support for Unions, which is + available in numpy version 1.7 or greater. + + This does not support the decimal type. + + Returns None if the dtype cannot be created. + + """ + + # pointer typecode + ptr_typecode = '<u8' if is_64bits else '<u4' + + _tagBRECORD_format = [ + ('pvRecord', ptr_typecode), + ('pRecInfo', ptr_typecode), + ] + + # overlapping typecodes only allowed in numpy version 1.7 or greater + U_VARIANT_format = dict( + names=[ + 'VT_BOOL', 'VT_I1', 'VT_I2', 'VT_I4', 'VT_I8', 'VT_INT', 'VT_UI1', + 'VT_UI2', 'VT_UI4', 'VT_UI8', 'VT_UINT', 'VT_R4', 'VT_R8', 'VT_CY', + 'c_wchar_p', 'c_void_p', 'pparray', 'bstrVal', '_tagBRECORD', + ], + formats=[ + '<i2', '<i1', '<i2', '<i4', '<i8', '<i4', '<u1', '<u2', '<u4', + '<u8', '<u4', '<f4', '<f8', '<i8', ptr_typecode, ptr_typecode, + ptr_typecode, ptr_typecode, _tagBRECORD_format, + ], + offsets=[0] * 19 # This is what makes it a union + ) + + tagVARIANT_format = [ + ("vt", '<u2'), + ("wReserved1", '<u2'), + ("wReserved2", '<u2'), + ("wReserved3", '<u2'), + ("_", U_VARIANT_format), + ] + + return numpy.dtype(tagVARIANT_format) + + +def isndarray(value): + """ Check if a value is an ndarray. + + This cannot succeed if numpy is not available. + + """ + if not HAVE_NUMPY: + return False + return isinstance(value, numpy.ndarray) + + +def isdatetime64(value): + """ Check if a value is a datetime64. + + This cannot succeed if datetime64 is not available. + + """ + if not HAVE_NUMPY: + return False + return isinstance(value, datetime64) + + +def _check_ctypeslib_typecodes(): + import numpy as np + from numpy import ctypeslib + try: + from numpy.ctypeslib import _typecodes + except ImportError: + from numpy.ctypeslib import as_ctypes_type + + ctypes_to_dtypes = {} + + for tp in set(np.sctypeDict.values()): + try: + ctype_for = as_ctypes_type(tp) + ctypes_to_dtypes[ctype_for] = tp + except NotImplementedError: + continue + ctypeslib._typecodes = ctypes_to_dtypes + return ctypeslib._typecodes + + +com_null_date64 = None +datetime64 = None +VARIANT_dtype = None +typecodes = {} + +if HAVE_NUMPY: + typecodes = _check_ctypeslib_typecodes() + # dtype for VARIANT. This allows for packing of variants into an array, and + # subsequent conversion to a multi-dimensional safearray. + try: + VARIANT_dtype = _make_variant_dtype() + except ValueError: + pass + + # This simplifies dependent modules + try: + from numpy import datetime64 + except ImportError: + pass + else: + try: + # This does not work on numpy 1.6 + com_null_date64 = datetime64("1899-12-30T00:00:00", "ns") + except TypeError: + pass diff --git a/venv/Lib/site-packages/comtypes/patcher.py b/venv/Lib/site-packages/comtypes/patcher.py new file mode 100644 index 00000000..a6fa60a1 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/patcher.py @@ -0,0 +1,75 @@ + +class Patch(object): + """ + Implements a class decorator suitable for patching an existing class with + a new namespace. + + For example, consider this trivial class (that your code doesn't own): + + >>> class MyClass: + ... def __init__(self, param): + ... self.param = param + ... def bar(self): + ... print("orig bar") + + To add attributes to MyClass, you can use Patch: + + >>> @Patch(MyClass) + ... class JustANamespace: + ... def print_param(self): + ... print(self.param) + >>> ob = MyClass('foo') + >>> ob.print_param() + foo + + The namespace is assigned None, so there's no mistaking the purpose + >>> JustANamespace + + The patcher will replace the existing methods: + + >>> @Patch(MyClass) + ... class SomeNamespace: + ... def bar(self): + ... print("replaced bar") + >>> ob = MyClass('foo') + >>> ob.bar() + replaced bar + + But it will not replace methods if no_replace is indicated. + + >>> @Patch(MyClass) + ... class AnotherNamespace: + ... @no_replace + ... def bar(self): + ... print("candy bar") + >>> ob = MyClass('foo') + >>> ob.bar() + replaced bar + + """ + + def __init__(self, target): + self.target = target + + def __call__(self, patches): + for name, value in list(vars(patches).items()): + if name in vars(ReferenceEmptyClass): + continue + no_replace = getattr(value, '__no_replace', False) + if no_replace and hasattr(self.target, name): + continue + setattr(self.target, name, value) + +def no_replace(f): + """ + Method decorator to indicate that a method definition shall + silently be ignored if it already exists in the target class. + """ + f.__no_replace = True + return f + +class ReferenceEmptyClass(object): + """ + This empty class will serve as a reference for attributes present on + any class. + """ diff --git a/venv/Lib/site-packages/comtypes/persist.py b/venv/Lib/site-packages/comtypes/persist.py new file mode 100644 index 00000000..77fae347 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/persist.py @@ -0,0 +1,212 @@ +"""This module defines the following interfaces: + + IErrorLog + IPropertyBag + IPersistPropertyBag + IPropertyBag2 + IPersistPropertyBag2 + +The 'DictPropertyBag' class is a class implementing the IPropertyBag +interface, useful in client code. +""" +from ctypes import * +from ctypes.wintypes import WORD, DWORD, BOOL +from comtypes import GUID, IUnknown, COMMETHOD, HRESULT, dispid +from comtypes import IPersist +from comtypes.automation import VARIANT, tagEXCEPINFO + +# XXX Replace by canonical solution!!! +WSTRING = c_wchar_p + +class IErrorLog(IUnknown): + _iid_ = GUID('{3127CA40-446E-11CE-8135-00AA004BB851}') + _idlflags_ = [] + _methods_ = [ + COMMETHOD([], HRESULT, 'AddError', + ( ['in'], WSTRING, 'pszPropName' ), + ( ['in'], POINTER(tagEXCEPINFO), 'pExcepInfo' )), + ] + +class IPropertyBag(IUnknown): + _iid_ = GUID('{55272A00-42CB-11CE-8135-00AA004BB851}') + _idlflags_ = [] + _methods_ = [ + # XXX Note: According to MSDN, pVar and pErrorLog are ['in', 'out'] parameters. + # + # XXX ctypes does NOT yet accept POINTER(IErrorLog) as 'out' parameter: + # TypeError: 'out' parameter 3 must be a pointer type, not POINTER(IErrorLog) + COMMETHOD([], HRESULT, 'Read', + ( ['in'], WSTRING, 'pszPropName' ), + ( ['in', 'out'], POINTER(VARIANT), 'pVar' ), + ( ['in'], POINTER(IErrorLog), 'pErrorLog' )), +## ( ['in', 'out'], POINTER(IErrorLog), 'pErrorLog' )), + COMMETHOD([], HRESULT, 'Write', + ( ['in'], WSTRING, 'pszPropName' ), + ( ['in'], POINTER(VARIANT), 'pVar' )), + ] + +class IPersistPropertyBag(IPersist): + _iid_ = GUID('{37D84F60-42CB-11CE-8135-00AA004BB851}') + _idlflags_ = [] + _methods_ = [ + COMMETHOD([], HRESULT, 'InitNew'), + COMMETHOD([], HRESULT, 'Load', + ( ['in'], POINTER(IPropertyBag), 'pPropBag' ), + ( ['in'], POINTER(IErrorLog), 'pErrorLog' )), + COMMETHOD([], HRESULT, 'Save', + ( ['in'], POINTER(IPropertyBag), 'pPropBag' ), + ( ['in'], c_int, 'fClearDirty' ), + ( ['in'], c_int, 'fSaveAllProperties' )), + ] + + +CLIPFORMAT = WORD + +PROPBAG2_TYPE_UNDEFINED = 0 +PROPBAG2_TYPE_DATA = 1 +PROPBAG2_TYPE_URL = 2 +PROPBAG2_TYPE_OBJECT = 3 +PROPBAG2_TYPE_STREAM = 4 +PROPBAG2_TYPE_STORAGE = 5 +PROPBAG2_TYPE_MONIKER = 6 + +class tagPROPBAG2(Structure): + _fields_ = [ + ('dwType', c_ulong), + ('vt', c_ushort), + ('cfType', CLIPFORMAT), + ('dwHint', c_ulong), + ('pstrName', WSTRING), + ('clsid', GUID), + ] + +class IPropertyBag2(IUnknown): + _iid_ = GUID('{22F55882-280B-11D0-A8A9-00A0C90C2004}') + _idlflags_ = [] + _methods_ = [ + COMMETHOD([], HRESULT, 'Read', + ( ['in'], c_ulong, 'cProperties' ), + ( ['in'], POINTER(tagPROPBAG2), 'pPropBag' ), + ( ['in'], POINTER(IErrorLog), 'pErrLog' ), + ( ['out'], POINTER(VARIANT), 'pvarValue' ), + ( ['out'], POINTER(HRESULT), 'phrError' )), + COMMETHOD([], HRESULT, 'Write', + ( ['in'], c_ulong, 'cProperties' ), + ( ['in'], POINTER(tagPROPBAG2), 'pPropBag' ), + ( ['in'], POINTER(VARIANT), 'pvarValue' )), + COMMETHOD([], HRESULT, 'CountProperties', + ( ['out'], POINTER(c_ulong), 'pcProperties' )), + COMMETHOD([], HRESULT, 'GetPropertyInfo', + ( ['in'], c_ulong, 'iProperty' ), + ( ['in'], c_ulong, 'cProperties' ), + ( ['out'], POINTER(tagPROPBAG2), 'pPropBag' ), + ( ['out'], POINTER(c_ulong), 'pcProperties' )), + COMMETHOD([], HRESULT, 'LoadObject', + ( ['in'], WSTRING, 'pstrName' ), + ( ['in'], c_ulong, 'dwHint' ), + ( ['in'], POINTER(IUnknown), 'punkObject' ), + ( ['in'], POINTER(IErrorLog), 'pErrLog' )), + ] + +class IPersistPropertyBag2(IPersist): + _iid_ = GUID('{22F55881-280B-11D0-A8A9-00A0C90C2004}') + _idlflags_ = [] + _methods_ = [ + COMMETHOD([], HRESULT, 'InitNew'), + COMMETHOD([], HRESULT, 'Load', + ( ['in'], POINTER(IPropertyBag2), 'pPropBag' ), + ( ['in'], POINTER(IErrorLog), 'pErrLog' )), + COMMETHOD([], HRESULT, 'Save', + ( ['in'], POINTER(IPropertyBag2), 'pPropBag' ), + ( ['in'], c_int, 'fClearDirty' ), + ( ['in'], c_int, 'fSaveAllProperties' )), + COMMETHOD([], HRESULT, 'IsDirty'), + ] + + +# STGM constants +# Access +STGM_READ = 0x00000000 +STGM_WRITE = 0x00000001 +STGM_READWRITE = 0x00000002 + +# Sharing +STGM_SHARE_EXCLUSIVE = 0x00000010 +STGM_SHARE_DENY_WRITE = 0x00000020 +STGM_SHARE_DENY_READ = 0x00000030 +STGM_SHARE_DENY_NONE = 0x00000040 +STGM_PRIORITY = 0x00040000 + +# Creation +STGM_FAILIFTHERE = 0x00000000 +STGM_CREATE = 0x00001000 +STGM_CONVERT = 0x00020000 + +# Transactioning +STGM_DIRECT = 0x00000000 +STGM_TRANSACTED = 0x00010000 + +# Transactioning Performance +STGM_NOSCRATCH = 0x00100000 +STGM_NOSNAPSHOT = 0x00200000 + +# Direct SWMR and Simple +STGM_SIMPLE = 0x08000000 +STGM_DIRECT_SWMR = 0x00400000 + +# Delete on release +STGM_DELETEONRELEASE = 0x04000000 + +LPOLESTR = LPCOLESTR = c_wchar_p + +class IPersistFile(IPersist): + _iid_ = GUID('{0000010B-0000-0000-C000-000000000046}') + _idlflags_ = [] + _methods_ = [ + COMMETHOD([], HRESULT, 'IsDirty'), + COMMETHOD([], HRESULT, 'Load', + ( ['in'], LPCOLESTR, 'pszFileName' ), + ( ['in'], DWORD, 'dwMode' )), + COMMETHOD([], HRESULT, 'Save', + ( ['in'], LPCOLESTR, 'pszFileName' ), + ( ['in'], BOOL, 'fRemember' )), + COMMETHOD([], HRESULT, 'SaveCompleted', + ( ['in'], LPCOLESTR, 'pszFileName' )), + COMMETHOD([], HRESULT, 'GetCurFile', + ( ['out'], POINTER(LPOLESTR), 'ppszFileName' )) + ] + + +from comtypes import COMObject +from comtypes.hresult import * +class DictPropertyBag(COMObject): + """An object implementing the IProperty interface on a dictionary. + + Pass named values in the constructor for the client to Read(), or + retrieve from the .values instance variable after the client has + called Load(). + """ + _com_interfaces_ = [IPropertyBag] + + def __init__(self, **kw): + super(DictPropertyBag, self).__init__() + self.values = kw + + def Read(self, this, name, pVar, errorlog): + try: + val = self.values[name] + except KeyError: + return E_INVALIDARG + # The caller did provide info about the type that is expected + # with the pVar[0].vt typecode, except when this is VT_EMPTY. + var = pVar[0] + typecode = var.vt + var.value = val + if typecode: + var.ChangeType(typecode) + return S_OK + + def Write(self, this, name, var): + val = var[0].value + self.values[name] = val + return S_OK diff --git a/venv/Lib/site-packages/comtypes/safearray.py b/venv/Lib/site-packages/comtypes/safearray.py new file mode 100644 index 00000000..a59c0b46 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/safearray.py @@ -0,0 +1,397 @@ +import threading +import array +from ctypes import (POINTER, Structure, byref, cast, c_long, memmove, pointer, + sizeof) +from comtypes import _safearray, IUnknown, com_interface_registry, npsupport +from comtypes.patcher import Patch + +numpy = npsupport.numpy +_safearray_type_cache = {} + + +class _SafeArrayAsNdArrayContextManager(object): + '''Context manager allowing safe arrays to be extracted as ndarrays. + + This is thread-safe. + + Example + ------- + + This works in python >= 2.5 + >>> with safearray_as_ndarray: + >>> my_arr = com_object.AsSafeArray + >>> type(my_arr) + numpy.ndarray + + ''' + thread_local = threading.local() + + def __enter__(self): + try: + self.thread_local.count += 1 + except AttributeError: + self.thread_local.count = 1 + + def __exit__(self, exc_type, exc_value, traceback): + self.thread_local.count -= 1 + + def __bool__(self): + '''True if context manager is currently entered on given thread. + + ''' + return bool(getattr(self.thread_local, 'count', 0)) + + +# Global _SafeArrayAsNdArrayContextManager +safearray_as_ndarray = _SafeArrayAsNdArrayContextManager() + + +################################################################ +# This is THE PUBLIC function: the gateway to the SAFEARRAY functionality. +def _midlSAFEARRAY(itemtype): + """This function mimics the 'SAFEARRAY(aType)' IDL idiom. It + returns a subtype of SAFEARRAY, instances will be built with a + typecode VT_... corresponding to the aType, which must be one of + the supported ctypes. + """ + try: + return POINTER(_safearray_type_cache[itemtype]) + except KeyError: + sa_type = _make_safearray_type(itemtype) + _safearray_type_cache[itemtype] = sa_type + return POINTER(sa_type) + + +def _make_safearray_type(itemtype): + # Create and return a subclass of tagSAFEARRAY + from comtypes.automation import _ctype_to_vartype, VT_RECORD, \ + VT_UNKNOWN, IDispatch, VT_DISPATCH + + meta = type(_safearray.tagSAFEARRAY) + sa_type = meta.__new__(meta, + "SAFEARRAY_%s" % itemtype.__name__, + (_safearray.tagSAFEARRAY,), {}) + + try: + vartype = _ctype_to_vartype[itemtype] + extra = None + except KeyError: + if issubclass(itemtype, Structure): + try: + guids = itemtype._recordinfo_ + except AttributeError: + extra = None + else: + from comtypes.typeinfo import GetRecordInfoFromGuids + extra = GetRecordInfoFromGuids(*guids) + vartype = VT_RECORD + elif issubclass(itemtype, POINTER(IDispatch)): + vartype = VT_DISPATCH + extra = pointer(itemtype._iid_) + elif issubclass(itemtype, POINTER(IUnknown)): + vartype = VT_UNKNOWN + extra = pointer(itemtype._iid_) + else: + raise TypeError(itemtype) + + @Patch(POINTER(sa_type)) + class _(object): + # Should explain the ideas how SAFEARRAY is used in comtypes + _itemtype_ = itemtype # a ctypes type + _vartype_ = vartype # a VARTYPE value: VT_... + _needsfree = False + + @classmethod + def create(cls, value, extra=None): + """Create a POINTER(SAFEARRAY_...) instance of the correct + type; value is an object containing the items to store. + + Python lists, tuples, and array.array instances containing + compatible item types can be passed to create + one-dimensional arrays. To create multidimensional arrys, + numpy arrays must be passed. + """ + if npsupport.isndarray(value): + return cls.create_from_ndarray(value, extra) + + # For VT_UNKNOWN or VT_DISPATCH, extra must be a pointer to + # the GUID of the interface. + # + # For VT_RECORD, extra must be a pointer to an IRecordInfo + # describing the record. + + # XXX How to specify the lbound (3. parameter to CreateVectorEx)? + # XXX How to write tests for lbound != 0? + pa = _safearray.SafeArrayCreateVectorEx(cls._vartype_, + 0, + len(value), + extra) + if not pa: + if cls._vartype_ == VT_RECORD and extra is None: + raise TypeError("Cannot create SAFEARRAY type VT_RECORD without IRecordInfo.") + # Hm, there may be other reasons why the creation fails... + raise MemoryError() + # We now have a POINTER(tagSAFEARRAY) instance which we must cast + # to the correct type: + pa = cast(pa, cls) + # Now, fill the data in: + ptr = POINTER(cls._itemtype_)() # container for the values + _safearray.SafeArrayAccessData(pa, byref(ptr)) + try: + if isinstance(value, array.array): + addr, n = value.buffer_info() + nbytes = len(value) * sizeof(cls._itemtype_) + memmove(ptr, addr, nbytes) + else: + for index, item in enumerate(value): + ptr[index] = item + finally: + _safearray.SafeArrayUnaccessData(pa) + return pa + + @classmethod + def create_from_ndarray(cls, value, extra, lBound=0): + from comtypes.automation import VARIANT + # If processing VARIANT, makes sure the array type is correct. + if cls._itemtype_ is VARIANT: + if value.dtype != npsupport.VARIANT_dtype: + value = _ndarray_to_variant_array(value) + else: + ai = value.__array_interface__ + if ai["version"] != 3: + raise TypeError("only __array_interface__ version 3 supported") + if cls._itemtype_ != npsupport.typecodes[ai["typestr"]]: + raise TypeError("Wrong array item type") + + # SAFEARRAYs have Fortran order; convert the numpy array if needed + if not value.flags.f_contiguous: + value = numpy.array(value, order="F") + + # For VT_UNKNOWN or VT_DISPATCH, extra must be a pointer to + # the GUID of the interface. + # + # For VT_RECORD, extra must be a pointer to an IRecordInfo + # describing the record. + rgsa = (_safearray.SAFEARRAYBOUND * value.ndim)() + nitems = 1 + for i, d in enumerate(value.shape): + nitems *= d + rgsa[i].cElements = d + rgsa[i].lBound = lBound + pa = _safearray.SafeArrayCreateEx(cls._vartype_, + value.ndim, # cDims + rgsa, # rgsaBound + extra) # pvExtra + if not pa: + if cls._vartype_ == VT_RECORD and extra is None: + raise TypeError("Cannot create SAFEARRAY type VT_RECORD without IRecordInfo.") + # Hm, there may be other reasons why the creation fails... + raise MemoryError() + # We now have a POINTER(tagSAFEARRAY) instance which we must cast + # to the correct type: + pa = cast(pa, cls) + # Now, fill the data in: + ptr = POINTER(cls._itemtype_)() # pointer to the item values + _safearray.SafeArrayAccessData(pa, byref(ptr)) + try: + nbytes = nitems * sizeof(cls._itemtype_) + memmove(ptr, value.ctypes.data, nbytes) + finally: + _safearray.SafeArrayUnaccessData(pa) + return pa + + @classmethod + def from_param(cls, value): + if not isinstance(value, cls): + value = cls.create(value, extra) + value._needsfree = True + return value + + def __getitem__(self, index): + # pparray[0] returns the whole array contents. + if index != 0: + raise IndexError("Only index 0 allowed") + return self.unpack() + + def __setitem__(self, index, value): + # XXX Need this to implement [in, out] safearrays in COM servers! +## print "__setitem__", index, value + raise TypeError("Setting items not allowed") + + def __ctypes_from_outparam__(self): + self._needsfree = True + return self[0] + + def __del__(self, _SafeArrayDestroy=_safearray.SafeArrayDestroy): + if self._needsfree: + _SafeArrayDestroy(self) + + def _get_size(self, dim): + "Return the number of elements for dimension 'dim'" + ub = _safearray.SafeArrayGetUBound(self, dim) + 1 + lb = _safearray.SafeArrayGetLBound(self, dim) + return ub - lb + + def unpack(self): + """Unpack a POINTER(SAFEARRAY_...) into a Python tuple or ndarray.""" + dim = _safearray.SafeArrayGetDim(self) + + if dim == 1: + num_elements = self._get_size(1) + result = self._get_elements_raw(num_elements) + if safearray_as_ndarray: + import numpy + return numpy.asarray(result) + return tuple(result) + elif dim == 2: + # get the number of elements in each dimension + rows, cols = self._get_size(1), self._get_size(2) + # get all elements + result = self._get_elements_raw(rows * cols) + # this must be reshaped and transposed because it is + # flat, and in VB order + if safearray_as_ndarray: + import numpy + return numpy.asarray(result).reshape((cols, rows)).T + result = [tuple(result[r::rows]) for r in range(rows)] + return tuple(result) + else: + lowerbounds = [_safearray.SafeArrayGetLBound(self, d) + for d in range(1, dim+1)] + indexes = (c_long * dim)(*lowerbounds) + upperbounds = [_safearray.SafeArrayGetUBound(self, d) + for d in range(1, dim+1)] + row = self._get_row(0, indexes, lowerbounds, upperbounds) + if safearray_as_ndarray: + import numpy + return numpy.asarray(row) + return row + + def _get_elements_raw(self, num_elements): + """Returns a flat list or ndarray containing ALL elements in + the safearray.""" + from comtypes.automation import VARIANT + # XXX Not sure this is true: + # For VT_UNKNOWN and VT_DISPATCH, we should retrieve the + # interface iid by SafeArrayGetIID(). + ptr = POINTER(self._itemtype_)() # container for the values + _safearray.SafeArrayAccessData(self, byref(ptr)) + try: + if self._itemtype_ == VARIANT: + # We have to loop over each item, so we get no + # speedup by creating an ndarray here. + return [i.value for i in ptr[:num_elements]] + elif issubclass(self._itemtype_, POINTER(IUnknown)): + iid = _safearray.SafeArrayGetIID(self) + itf = com_interface_registry[str(iid)] + # COM interface pointers retrieved from array + # must be AddRef()'d if non-NULL. + elems = ptr[:num_elements] + result = [] + # We have to loop over each item, so we get no + # speedup by creating an ndarray here. + for p in elems: + if bool(p): + p.AddRef() + result.append(p.QueryInterface(itf)) + else: + # return a NULL-interface pointer. + result.append(POINTER(itf)()) + return result + else: + # If the safearray element are NOT native python + # objects, the containing safearray must be kept + # alive until all the elements are destroyed. + if not issubclass(self._itemtype_, Structure): + # Create an ndarray if requested. This is where + # we can get the most speed-up. + # XXX Only try to convert types known to + # numpy.ctypeslib. + if (safearray_as_ndarray and self._itemtype_ in + list(npsupport.typecodes.values())): + arr = numpy.ctypeslib.as_array(ptr, + (num_elements,)) + return arr.copy() + return ptr[:num_elements] + + def keep_safearray(v): + v.__keepref = self + return v + return [keep_safearray(x) for x in ptr[:num_elements]] + finally: + _safearray.SafeArrayUnaccessData(self) + + def _get_row(self, dim, indices, lowerbounds, upperbounds): + # loop over the index of dimension 'dim' + # we have to restore the index of the dimension we're looping over + restore = indices[dim] + + result = [] + obj = self._itemtype_() + pobj = byref(obj) + if dim+1 == len(indices): + # It should be faster to lock the array and get a whole row at once? + # How to calculate the pointer offset? + for i in range(indices[dim], upperbounds[dim]+1): + indices[dim] = i + _safearray.SafeArrayGetElement(self, indices, pobj) + result.append(obj.value) + else: + for i in range(indices[dim], upperbounds[dim]+1): + indices[dim] = i + result.append(self._get_row(dim+1, indices, lowerbounds, upperbounds)) + indices[dim] = restore + return tuple(result) # for compatibility with pywin32. + + @Patch(POINTER(POINTER(sa_type))) + class __(object): + + @classmethod + def from_param(cls, value): + if isinstance(value, cls._type_): + return byref(value) + return byref(cls._type_.create(value, extra)) + + def __setitem__(self, index, value): + # create an LP_SAFEARRAY_... instance + pa = self._type_.create(value, extra) + # XXX Must we destroy the currently contained data? + # fill it into self + super(POINTER(POINTER(sa_type)), self).__setitem__(index, pa) + + return sa_type + + +def _ndarray_to_variant_array(value): + """ Convert an ndarray to VARIANT_dtype array """ + # Check that variant arrays are supported + if npsupport.VARIANT_dtype is None: + msg = "VARIANT ndarrays require NumPy 1.7 or newer." + raise RuntimeError(msg) + + # special cases + if numpy.issubdtype(value.dtype, npsupport.datetime64): + return _datetime64_ndarray_to_variant_array(value) + + from comtypes.automation import VARIANT + # Empty array + varr = numpy.zeros(value.shape, npsupport.VARIANT_dtype, order='F') + # Convert each value to a variant and put it in the array. + varr.flat = [VARIANT(v) for v in value.flat] + return varr + + +def _datetime64_ndarray_to_variant_array(value): + """ Convert an ndarray of datetime64 to VARIANT_dtype array """ + # The OLE automation date format is a floating point value, counting days + # since midnight 30 December 1899. Hours and minutes are represented as + # fractional days. + from comtypes.automation import VT_DATE + value = numpy.array(value, "datetime64[ns]") + value = value - npsupport.com_null_date64 + # Convert to days + value = value / numpy.timedelta64(1, 'D') + varr = numpy.zeros(value.shape, npsupport.VARIANT_dtype, order='F') + varr['vt'] = VT_DATE + varr['_']['VT_R8'].flat = value.flat + return varr diff --git a/venv/Lib/site-packages/comtypes/server/__init__.py b/venv/Lib/site-packages/comtypes/server/__init__.py new file mode 100644 index 00000000..2d3677d0 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/server/__init__.py @@ -0,0 +1,62 @@ +import comtypes.client, ctypes + +################################################################ +# Interfaces +class IClassFactory(comtypes.IUnknown): + _iid_ = comtypes.GUID("{00000001-0000-0000-C000-000000000046}") + _methods_ = [ + comtypes.STDMETHOD(comtypes.HRESULT, "CreateInstance", + [ctypes.POINTER(comtypes.IUnknown), + ctypes.POINTER(comtypes.GUID), + ctypes.POINTER(ctypes.c_void_p)]), + comtypes.STDMETHOD(comtypes.HRESULT, "LockServer", + [ctypes.c_int])] + + def CreateInstance(self, punkouter=None, interface=None, dynamic=False): + if dynamic: + if interface is not None: + raise ValueError("interface and dynamic are mutually exclusive") + realInterface = comtypes.automation.IDispatch + elif interface is None: + realInterface = comtypes.IUnknown + else: + realInterface = interface + obj = ctypes.POINTER(realInterface)() + self.__com_CreateInstance(punkouter, realInterface._iid_, ctypes.byref(obj)) + if dynamic: + return comtypes.client.dynamic.Dispatch(obj) + elif interface is None: + # An interface was not specified, so return the best. + return comtypes.client.GetBestInterface(obj) + # An interface was specified and obj is already that interface. + return obj + +##class IExternalConnection(IUnknown): +## _iid_ = GUID("{00000019-0000-0000-C000-000000000046}") +## _methods_ = [ +## STDMETHOD(HRESULT, "AddConnection", [c_ulong, c_ulong]), +## STDMETHOD(HRESULT, "ReleaseConnection", [c_ulong, c_ulong, c_ulong])] + +# The following code is untested: + +ACTIVEOBJECT_STRONG = 0x0 +ACTIVEOBJECT_WEAK = 0x1 + +oleaut32 = ctypes.oledll.oleaut32 + +def RegisterActiveObject(comobj, weak=True): + punk = comobj._com_pointers_[comtypes.IUnknown._iid_] + clsid = comobj._reg_clsid_ + if weak: + flags = ACTIVEOBJECT_WEAK + else: + flags = ACTIVEOBJECT_STRONG + handle = ctypes.c_ulong() + oleaut32.RegisterActiveObject(punk, + ctypes.byref(clsid), + flags, + ctypes.byref(handle)) + return handle.value + +def RevokeActiveObject(handle): + oleaut32.RevokeActiveObject(handle, None) diff --git a/venv/Lib/site-packages/comtypes/server/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/comtypes/server/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..15db0014 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/server/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/server/__pycache__/automation.cpython-36.pyc b/venv/Lib/site-packages/comtypes/server/__pycache__/automation.cpython-36.pyc new file mode 100644 index 00000000..ac8a3017 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/server/__pycache__/automation.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/server/__pycache__/connectionpoints.cpython-36.pyc b/venv/Lib/site-packages/comtypes/server/__pycache__/connectionpoints.cpython-36.pyc new file mode 100644 index 00000000..66a21d4a Binary files /dev/null and b/venv/Lib/site-packages/comtypes/server/__pycache__/connectionpoints.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/server/__pycache__/inprocserver.cpython-36.pyc b/venv/Lib/site-packages/comtypes/server/__pycache__/inprocserver.cpython-36.pyc new file mode 100644 index 00000000..6a7c362a Binary files /dev/null and b/venv/Lib/site-packages/comtypes/server/__pycache__/inprocserver.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/server/__pycache__/localserver.cpython-36.pyc b/venv/Lib/site-packages/comtypes/server/__pycache__/localserver.cpython-36.pyc new file mode 100644 index 00000000..a31939b7 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/server/__pycache__/localserver.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/server/__pycache__/register.cpython-36.pyc b/venv/Lib/site-packages/comtypes/server/__pycache__/register.cpython-36.pyc new file mode 100644 index 00000000..1c93744d Binary files /dev/null and b/venv/Lib/site-packages/comtypes/server/__pycache__/register.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/server/__pycache__/w_getopt.cpython-36.pyc b/venv/Lib/site-packages/comtypes/server/__pycache__/w_getopt.cpython-36.pyc new file mode 100644 index 00000000..f2e452ce Binary files /dev/null and b/venv/Lib/site-packages/comtypes/server/__pycache__/w_getopt.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/server/automation.py b/venv/Lib/site-packages/comtypes/server/automation.py new file mode 100644 index 00000000..ba9305f6 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/server/automation.py @@ -0,0 +1,93 @@ +import logging + +from ctypes import * +from comtypes.hresult import * + +from comtypes import COMObject, IUnknown +from comtypes.automation import IDispatch, IEnumVARIANT + +logger = logging.getLogger(__name__) + +# XXX When the COMCollection class is ready, insert it into __all__ +__all__ = ["VARIANTEnumerator"] + + +class VARIANTEnumerator(COMObject): + """A universal VARIANTEnumerator class. Instantiate it with a + collection of items that support the IDispatch interface.""" + _com_interfaces_ = [IEnumVARIANT] + + def __init__(self, items): + self.items = items # keep, so that we can restore our iterator (in Reset, and Clone). + self.seq = iter(self.items) + super(VARIANTEnumerator, self).__init__() + + def Next(self, this, celt, rgVar, pCeltFetched): + if not rgVar: return E_POINTER + if not pCeltFetched: pCeltFetched = [None] + pCeltFetched[0] = 0 + try: + for index in range(celt): + item = next(self.seq) + p = item.QueryInterface(IDispatch) + rgVar[index].value = p + pCeltFetched[0] += 1 + except StopIteration: + pass +## except: +## # ReportException? return E_FAIL? +## import traceback +## traceback.print_exc() + + if pCeltFetched[0] == celt: + return S_OK + return S_FALSE + + def Skip(self, this, celt): + # skip some elements. + try: + for _ in range(celt): + next(self.seq) + except StopIteration: + return S_FALSE + return S_OK + + def Reset(self, this): + self.seq = iter(self.items) + return S_OK + + # Clone not implemented + +################################################################ + +# XXX Shouldn't this be a mixin class? +# And isn't this class borked anyway? + +class COMCollection(COMObject): + """Abstract base class which implements Count, Item, and _NewEnum.""" + def __init__(self, itemtype, collection): + self.collection = collection + self.itemtype = itemtype + super(COMCollection, self).__init__() + + def _get_Item(self, this, pathname, pitem): + if not pitem: + return E_POINTER + item = self.itemtype(pathname) + return item.IUnknown_QueryInterface(None, + pointer(pitem[0]._iid_), + pitem) + + def _get_Count(self, this, pcount): + if not pcount: + return E_POINTER + pcount[0] = len(self.collection) + return S_OK + + def _get__NewEnum(self, this, penum): + if not penum: + return E_POINTER + enum = VARIANTEnumerator(self.itemtype, self.collection) + return enum.IUnknown_QueryInterface(None, + pointer(IUnknown._iid_), + penum) diff --git a/venv/Lib/site-packages/comtypes/server/connectionpoints.py b/venv/Lib/site-packages/comtypes/server/connectionpoints.py new file mode 100644 index 00000000..f34309ff --- /dev/null +++ b/venv/Lib/site-packages/comtypes/server/connectionpoints.py @@ -0,0 +1,143 @@ +from ctypes import * +from comtypes import IUnknown, COMObject, COMError +from comtypes.hresult import * +from comtypes.typeinfo import LoadRegTypeLib +from comtypes.connectionpoints import IConnectionPoint +from comtypes.automation import IDispatch + +import logging +logger = logging.getLogger(__name__) + +__all__ = ["ConnectableObjectMixin"] + +class ConnectionPointImpl(COMObject): + """This object implements a connectionpoint""" + _com_interfaces_ = [IConnectionPoint] + + def __init__(self, sink_interface, sink_typeinfo): + super(ConnectionPointImpl, self).__init__() + self._connections = {} + self._cookie = 0 + self._sink_interface = sink_interface + self._typeinfo = sink_typeinfo + + # per MSDN, all interface methods *must* be implemented, E_NOTIMPL + # is no allowed return value + + def IConnectionPoint_Advise(self, this, pUnk, pdwCookie): + if not pUnk or not pdwCookie: + return E_POINTER + logger.debug("Advise") + try: + ptr = pUnk.QueryInterface(self._sink_interface) + except COMError: + return CONNECT_E_CANNOTCONNECT + pdwCookie[0] = self._cookie = self._cookie + 1 + self._connections[self._cookie] = ptr + return S_OK + + def IConnectionPoint_Unadvise(self, this, dwCookie): + logger.debug("Unadvise %s", dwCookie) + try: + del self._connections[dwCookie] + except KeyError: + return CONNECT_E_NOCONNECTION + return S_OK + + def IConnectionPoint_GetConnectionPointContainer(self, this, ppCPC): + return E_NOTIMPL + + def IConnectionPoint_GetConnectionInterface(self, this, pIID): + return E_NOTIMPL + + def _call_sinks(self, name, *args, **kw): + results = [] + logger.debug("_call_sinks(%s, %s, *%s, **%s)", self, name, args, kw) + # Is it an IDispatch derived interface? Then, events have to be delivered + # via Invoke calls (even if it is a dual interface). + if hasattr(self._sink_interface, "Invoke"): + # for better performance, we could cache the dispids. + dispid = self._typeinfo.GetIDsOfNames(name)[0] + for key, p in list(self._connections.items()): + try: + result = p.Invoke(dispid, *args, **kw) + except COMError as details: + if details.hresult == -2147023174: + logger.warning("_call_sinks(%s, %s, *%s, **%s) failed; removing connection", + self, name, args, kw, + exc_info=True) + try: + del self._connections[key] + except KeyError: + pass # connection already gone + else: + logger.warning("_call_sinks(%s, %s, *%s, **%s)", self, name, args, kw, + exc_info=True) + else: + results.append(result) + else: + for p in list(self._connections.values()): + try: + result = getattr(p, name)(*args, **kw) + except COMError as details: + if details.hresult == -2147023174: + logger.warning("_call_sinks(%s, %s, *%s, **%s) failed; removing connection", + self, name, args, kw, + exc_info=True) + del self._connections[key] + else: + logger.warning("_call_sinks(%s, %s, *%s, **%s)", self, name, args, kw, + exc_info=True) + else: + results.append(result) + return results + +class ConnectableObjectMixin(object): + """Mixin which implements IConnectionPointContainer. + + Call Fire_Event(interface, methodname, *args, **kw) to fire an + event. <interface> can either be the source interface, or an + integer index into the _outgoing_interfaces_ list. + """ + def __init__(self): + super(ConnectableObjectMixin, self).__init__() + self.__connections = {} + + tlib = LoadRegTypeLib(*self._reg_typelib_) + for itf in self._outgoing_interfaces_: + typeinfo = tlib.GetTypeInfoOfGuid(itf._iid_) + self.__connections[itf] = ConnectionPointImpl(itf, typeinfo) + + def IConnectionPointContainer_EnumConnectionPoints(self, this, ppEnum): + # according to MSDN, E_NOTIMPL is specificially disallowed + # because, without typeinfo, there's no way for the caller to + # find out. + return E_NOTIMPL + + def IConnectionPointContainer_FindConnectionPoint(self, this, refiid, ppcp): + iid = refiid[0] + logger.debug("FindConnectionPoint %s", iid) + if not ppcp: + return E_POINTER + for itf in self._outgoing_interfaces_: + if itf._iid_ == iid: + # 'byref' will not work in this case, since the QueryInterface + # method implementation is called on Python directly. There's + # no C layer between which will convert the second parameter + # from byref() to pointer(). + conn = self.__connections[itf] + result = conn.IUnknown_QueryInterface(None, pointer(IConnectionPoint._iid_), ppcp) + logger.debug("connectionpoint found, QI() -> %s", result) + return result + logger.debug("No connectionpoint found") + return CONNECT_E_NOCONNECTION + + def Fire_Event(self, itf, name, *args, **kw): + # Fire event 'name' with arguments *args and **kw. + # Accepts either an interface index or an interface as first argument. + # Returns a list of results. + logger.debug("Fire_Event(%s, %s, *%s, **%s)", itf, name, args, kw) + if isinstance(itf, int): + itf = self._outgoing_interfaces_[itf] + return self.__connections[itf]._call_sinks(name, *args, **kw) + diff --git a/venv/Lib/site-packages/comtypes/server/inprocserver.py b/venv/Lib/site-packages/comtypes/server/inprocserver.py new file mode 100644 index 00000000..84d1e88b --- /dev/null +++ b/venv/Lib/site-packages/comtypes/server/inprocserver.py @@ -0,0 +1,134 @@ +import ctypes +from comtypes import COMObject, GUID +from comtypes.server import IClassFactory +from comtypes.hresult import * + +import sys, winreg, logging + +logger = logging.getLogger(__name__) +_debug = logger.debug +_critical = logger.critical + +################################################################ + +class ClassFactory(COMObject): + _com_interfaces_ = [IClassFactory] + + def __init__(self, cls): + super(ClassFactory, self).__init__() + self._cls = cls + + def IClassFactory_CreateInstance(self, this, punkOuter, riid, ppv): + _debug("ClassFactory.CreateInstance(%s)", riid[0]) + result = self._cls().IUnknown_QueryInterface(None, riid, ppv) + _debug("CreateInstance() -> %s", result) + return result + + def IClassFactory_LockServer(self, this, fLock): + if fLock: + COMObject.__server__.Lock() + else: + COMObject.__server__.Unlock() + return S_OK + +# will be set by py2exe boot script 'from outside' +_clsid_to_class = {} + +def inproc_find_class(clsid): + if _clsid_to_class: + return _clsid_to_class[clsid] + + key = winreg.OpenKey(winreg.HKEY_CLASSES_ROOT, "CLSID\\%s\\InprocServer32" % clsid) + try: + pathdir = winreg.QueryValueEx(key, "PythonPath")[0] + except: + _debug("NO path to insert") + else: + if not pathdir in sys.path: + sys.path.insert(0, str(pathdir)) + _debug("insert path %r", pathdir) + else: + _debug("Already in path %r", pathdir) + pythonclass = winreg.QueryValueEx(key, "PythonClass")[0] + parts = pythonclass.split(".") + modname = ".".join(parts[:-1]) + classname = parts[-1] + _debug("modname: %s, classname %s", modname, classname) + __import__(modname) + mod = sys.modules[modname] + result = getattr(mod, classname) + _debug("Found class %s", result) + return result + +_logging_configured = False + +def _setup_logging(clsid): + """Read from the registry, and configure the logging module. + + Currently, the handler (NTDebugHandler) is hardcoded. + """ + global _logging_configured + if _logging_configured: + return + _logging_configured = True + + try: + hkey = winreg.OpenKey(winreg.HKEY_CLASSES_ROOT, r"CLSID\%s\Logging" % clsid) + except WindowsError: + return + from comtypes.logutil import NTDebugHandler + handler = NTDebugHandler() + try: + val, typ = winreg.QueryValueEx(hkey, "format") + formatter = logging.Formatter(val) + except: + formatter = logging.Formatter("(Thread %(thread)s):%(levelname)s:%(message)s") + handler.setFormatter(formatter) + logging.root.addHandler(handler) + try: + values, typ = winreg.QueryValueEx(hkey, "levels") + except: + return + if typ == winreg.REG_SZ: + values = [values] + elif typ != winreg.REG_MULTI_SZ: + # this is an error + return + for val in values: + name, level = val.split("=") + level = getattr(logging, level) + logging.getLogger(name).setLevel(level) + +def DllGetClassObject(rclsid, riid, ppv): + COMObject.__run_inprocserver__() + + iid = GUID.from_address(riid) + clsid = GUID.from_address(rclsid) + + if not _logging_configured: + _setup_logging(clsid) + + # This function is directly called by C code, and receives C + # integers as parameters. rclsid is a pointer to the CLSID for the + # coclass we want to be created, riid is a pointer to the + # requested interface. + try: + _debug("DllGetClassObject(clsid=%s, iid=%s)", clsid, iid) + + cls = inproc_find_class(clsid) + if not cls: + return CLASS_E_CLASSNOTAVAILABLE + + result = ClassFactory(cls).IUnknown_QueryInterface(None, ctypes.pointer(iid), ppv) + _debug("DllGetClassObject() -> %s", result) + return result + except Exception: + _critical("DllGetClassObject", exc_info=True) + return E_FAIL + +def DllCanUnloadNow(): + COMObject.__run_inprocserver__() + result = COMObject.__server__.DllCanUnloadNow() + # To avoid a memory leak when PyInitialize()/PyUninitialize() are + # called several times, we refuse to unload the dll. + return S_FALSE diff --git a/venv/Lib/site-packages/comtypes/server/localserver.py b/venv/Lib/site-packages/comtypes/server/localserver.py new file mode 100644 index 00000000..a0fc0bfe --- /dev/null +++ b/venv/Lib/site-packages/comtypes/server/localserver.py @@ -0,0 +1,68 @@ +from ctypes import * +import comtypes +from comtypes.hresult import * +from comtypes.server import IClassFactory +import logging +import queue + +logger = logging.getLogger(__name__) +_debug = logger.debug + +REGCLS_SINGLEUSE = 0 # class object only generates one instance +REGCLS_MULTIPLEUSE = 1 # same class object genereates multiple inst. +REGCLS_MULTI_SEPARATE = 2 # multiple use, but separate control over each +REGCLS_SUSPENDED = 4 # register it as suspended, will be activated +REGCLS_SURROGATE = 8 # must be used when a surrogate process + +def run(classes): + classobjects = [ClassFactory(cls) for cls in classes] + comtypes.COMObject.__run_localserver__(classobjects) + +class ClassFactory(comtypes.COMObject): + _com_interfaces_ = [IClassFactory] + _locks = 0 + _queue = None + regcls = REGCLS_MULTIPLEUSE + + def __init__(self, cls, *args, **kw): + super(ClassFactory, self).__init__() + self._cls = cls + self._register_class() + self._args = args + self._kw = kw + + def IUnknown_AddRef(self, this): + return 2 + + def IUnknown_Release(self, this): + return 1 + + def _register_class(self): + regcls = getattr(self._cls, "_regcls_", self.regcls) + cookie = c_ulong() + ptr = self._com_pointers_[comtypes.IUnknown._iid_] + clsctx = self._cls._reg_clsctx_ + clsctx &= ~comtypes.CLSCTX_INPROC # reset the inproc flags + oledll.ole32.CoRegisterClassObject(byref(comtypes.GUID(self._cls._reg_clsid_)), + ptr, + clsctx, + regcls, + byref(cookie)) + self.cookie = cookie + + def _revoke_class(self): + oledll.ole32.CoRevokeClassObject(self.cookie) + + def CreateInstance(self, this, punkOuter, riid, ppv): + _debug("ClassFactory.CreateInstance(%s)", riid[0]) + obj = self._cls(*self._args, **self._kw) + result = obj.IUnknown_QueryInterface(None, riid, ppv) + _debug("CreateInstance() -> %s", result) + return result + + def LockServer(self, this, fLock): + if fLock: + comtypes.COMObject.__server__.Lock() + else: + comtypes.COMObject.__server__.Unlock() + return S_OK diff --git a/venv/Lib/site-packages/comtypes/server/register.py b/venv/Lib/site-packages/comtypes/server/register.py new file mode 100644 index 00000000..522259ef --- /dev/null +++ b/venv/Lib/site-packages/comtypes/server/register.py @@ -0,0 +1,375 @@ +"""comtypes.server.register - register and unregister a COM object. + +Exports the UseCommandLine function. UseCommandLine is called with +the COM object classes that a module exposes. It parses the Windows +command line and takes the appropriate actions. +These command line options are supported: + +/regserver - register the classes with COM. +/unregserver - unregister the classes with COM. + +/nodebug - remove all logging configuration from the registry. + +/l <name>=<level> - configure the logging level for the standard Python loggind module, +this option may be used several times. + +/f <formatter> - specify the formatter string. + +Note: Registering and unregistering the objects does remove logging +entries. Configuring the logging does not change other registry +entries, so it is possible to freeze a comobject with py2exe, register +it, then configure logging afterwards to debug it, and delete the +logging config afterwards. + +Sample usage: + +Register the COM object: + + python mycomobj.py /regserver + +Configure logging info: + + python mycomobj.py /l comtypes=INFO /l comtypes.server=DEBUG /f %(message)s + +Now, debug the object, and when done delete logging info: + + python mycomobj.py /nodebug +""" +import sys, os +import winreg +import logging + +import comtypes +from comtypes.typeinfo import LoadTypeLibEx, UnRegisterTypeLib, REGKIND_REGISTER +from comtypes.hresult import * +from comtypes.server import w_getopt +import comtypes.server.inprocserver +from ctypes import windll, c_ulong, c_wchar_p, WinError, sizeof, create_string_buffer + +_debug = logging.getLogger(__name__).debug + +def get_winerror(exception): + try: + return exception.winerror + except AttributeError: + return exception.errno + +# a SHDeleteKey function, will remove a registry key with all subkeys. +def _non_zero(retval, func, args): + if retval: + raise WinError(retval) +SHDeleteKey = windll.shlwapi.SHDeleteKeyW +SHDeleteKey.errcheck = _non_zero +SHDeleteKey.argtypes = c_ulong, c_wchar_p + +try: + Set = set +except NameError: + from sets import Set #as set + + +_KEYS = {winreg.HKEY_CLASSES_ROOT: "HKCR", + winreg.HKEY_LOCAL_MACHINE: "HKLM", + winreg.HKEY_CURRENT_USER: "HKCU"} + +def _explain(hkey): + return _KEYS.get(hkey, hkey) + +class Registrar(object): + """COM class registration. + + The COM class can override what this does by implementing + _register and/or _unregister class methods. These methods will be + called with the calling instance of Registrar, and so can call the + Registrars _register and _unregister methods which do the actual + work. + """ + def nodebug(self, cls): + """Delete logging entries from the registry.""" + clsid = cls._reg_clsid_ + try: + _debug('DeleteKey( %s\\CLSID\\%s\\Logging"' % \ + (_explain(winreg.HKEY_CLASSES_ROOT), clsid)) + hkey = winreg.OpenKey(winreg.HKEY_CLASSES_ROOT, r"CLSID\%s" % clsid) + winreg.DeleteKey(hkey, "Logging") + except WindowsError as detail: + if get_winerror(detail) != 2: + raise + + def debug(self, cls, levels, format): + """Write entries in the registry to setup logging for this clsid.""" + # handlers + # format + clsid = cls._reg_clsid_ + _debug('CreateKey( %s\\CLSID\\%s\\Logging"' % \ + (_explain(winreg.HKEY_CLASSES_ROOT), clsid)) + hkey = winreg.CreateKey(winreg.HKEY_CLASSES_ROOT, r"CLSID\%s\Logging" % clsid) + for item in levels: + name, value = item.split("=") + v = getattr(logging, value) + assert isinstance(v, int) + _debug('SetValueEx(levels, %s)' % levels) + winreg.SetValueEx(hkey, "levels", None, winreg.REG_MULTI_SZ, levels) + if format: + _debug('SetValueEx(format, %s)' % format) + winreg.SetValueEx(hkey, "format", None, winreg.REG_SZ, format) + else: + _debug('DeleteValue(format)') + try: + winreg.DeleteValue(hkey, "format") + except WindowsError as detail: + if get_winerror(detail) != 2: + raise + + def register(self, cls, executable=None): + """Register the COM server class.""" + # First, we unregister the object with force=True, to force removal + # of all registry entries, even if we would not write them. + # Second, we create new entries. + # It seems ATL does the same. + mth = getattr(cls, "_register", None) + if mth is not None: + mth(self) + else: + self._unregister(cls, force=True) + self._register(cls, executable) + + def _register(self, cls, executable=None): + table = self._registry_entries(cls) + table.sort() + _debug("Registering %s", cls) + for hkey, subkey, valuename, value in table: + _debug ('[%s\\%s]', _explain(hkey), subkey) + _debug('%s="%s"', valuename or "@", value) + k = winreg.CreateKey(hkey, subkey) + winreg.SetValueEx(k, valuename, None, winreg.REG_SZ, str(value)) + + tlib = getattr(cls, "_reg_typelib_", None) + if tlib is not None: + if hasattr(sys, "frozendllhandle"): + dll = self._get_serverdll() + _debug("LoadTypeLibEx(%s, REGKIND_REGISTER)", dll) + LoadTypeLibEx(dll, REGKIND_REGISTER) + else: + if executable: + path = executable + elif hasattr(sys, "frozen"): + path = sys.executable + else: + path = cls._typelib_path_ + _debug("LoadTypeLibEx(%s, REGKIND_REGISTER)", path) + LoadTypeLibEx(path, REGKIND_REGISTER) + _debug("Done") + + def unregister(self, cls, force=False): + """Unregister the COM server class.""" + mth = getattr(cls, "_unregister", None) + if mth is not None: + mth(self) + else: + self._unregister(cls, force=force) + + def _unregister(self, cls, force=False): + # If force==False, we only remove those entries that we + # actually would have written. It seems ATL does the same. + table = [t[:2] for t in self._registry_entries(cls)] + # only unique entries + table = list(set(table)) + table.sort() + table.reverse() + _debug("Unregister %s", cls) + for hkey, subkey in table: + try: + if force: + _debug("SHDeleteKey %s\\%s", _explain(hkey), subkey) + SHDeleteKey(hkey, subkey) + else: + _debug("DeleteKey %s\\%s", _explain(hkey), subkey) + winreg.DeleteKey(hkey, subkey) + except WindowsError as detail: + if get_winerror(detail) != 2: + raise + tlib = getattr(cls, "_reg_typelib_", None) + if tlib is not None: + try: + _debug("UnRegisterTypeLib(%s, %s, %s)", *tlib) + UnRegisterTypeLib(*tlib) + except WindowsError as detail: + if not get_winerror(detail) in (TYPE_E_REGISTRYACCESS, TYPE_E_CANTLOADLIBRARY): + raise + _debug("Done") + + def _get_serverdll(self): + """Return the pathname of the dll hosting the COM object.""" + handle = getattr(sys, "frozendllhandle", None) + if handle is not None: + buf = create_string_buffer(260) + windll.kernel32.GetModuleFileNameA(handle, buf, sizeof(buf)) + return buf[:] + import _ctypes + return _ctypes.__file__ + + def _get_full_classname(self, cls): + """Return <modulename>.<classname> for 'cls'.""" + modname = cls.__module__ + if modname == "__main__": + modname = os.path.splitext(os.path.basename(sys.argv[0]))[0] + return "%s.%s" % (modname, cls.__name__) + + def _get_pythonpath(self, cls): + """Return the filesystem path of the module containing 'cls'.""" + modname = cls.__module__ + dirname = os.path.dirname(sys.modules[modname].__file__) + return os.path.abspath(dirname) + + def _registry_entries(self, cls): + """Return a sequence of tuples containing registry entries. + + The tuples must be (key, subkey, name, value). + + Required entries: + ================= + _reg_clsid_ - a string or GUID instance + _reg_clsctx_ - server type(s) to register + + Optional entries: + ================= + _reg_desc_ - a string + _reg_progid_ - a string naming the progid, typically 'MyServer.MyObject.1' + _reg_novers_progid_ - version independend progid, typically 'MyServer.MyObject' + _reg_typelib_ - an tuple (libid, majorversion, minorversion) specifying a typelib. + _reg_threading_ - a string specifying the threading model + + Note that the first part of the progid string is typically the + IDL library name of the type library containing the coclass. + """ + HKCR = winreg.HKEY_CLASSES_ROOT + + # table format: rootkey, subkey, valuename, value + table = [] + append = lambda *args: table.append(args) + + # basic entry - names the comobject + reg_clsid = str(cls._reg_clsid_) # that's the only required attribute for registration + reg_desc = getattr(cls, "_reg_desc_", "") + if not reg_desc: + # Simple minded algorithm to construct a description from + # the progid: + reg_desc = getattr(cls, "_reg_novers_progid_", "") or \ + getattr(cls, "_reg_progid_", "") + if reg_desc: + reg_desc = reg_desc.replace(".", " ") + append(HKCR, "CLSID\\%s" % reg_clsid, "", reg_desc) + + reg_progid = getattr(cls, "_reg_progid_", None) + if reg_progid: + # for ProgIDFromCLSID: + append(HKCR, "CLSID\\%s\\ProgID" % reg_clsid, "", reg_progid) # 1 + + # for CLSIDFromProgID + if reg_desc: + append(HKCR, reg_progid, "", reg_desc) # 2 + append(HKCR, "%s\\CLSID" % reg_progid, "", reg_clsid) # 3 + + reg_novers_progid = getattr(cls, "_reg_novers_progid_", None) + if reg_novers_progid: + append(HKCR, "CLSID\\%s\\VersionIndependentProgID" % reg_clsid, # 1a + "", reg_novers_progid) + if reg_desc: + append(HKCR, reg_novers_progid, "", reg_desc) # 2a + append(HKCR, "%s\\CurVer" % reg_novers_progid, "", reg_progid) # + append(HKCR, "%s\\CLSID" % reg_novers_progid, "", reg_clsid) # 3a + + clsctx = getattr(cls, "_reg_clsctx_", 0) + + if clsctx & comtypes.CLSCTX_LOCAL_SERVER \ + and not hasattr(sys, "frozendllhandle"): + exe = sys.executable + if " " in exe: + exe = '"%s"' % exe + if not hasattr(sys, "frozen"): + if not __debug__: + exe = "%s -O" % exe + script = os.path.abspath(sys.modules[cls.__module__].__file__) + if " " in script: + script = '"%s"' % script + append(HKCR, "CLSID\\%s\\LocalServer32" % reg_clsid, "", "%s %s" % (exe, script)) + else: + append(HKCR, "CLSID\\%s\\LocalServer32" % reg_clsid, "", "%s" % exe) + + # Register InprocServer32 only when run from script or from + # py2exe dll server, not from py2exe exe server. + if clsctx & comtypes.CLSCTX_INPROC_SERVER \ + and getattr(sys, "frozen", None) in (None, "dll"): + append(HKCR, "CLSID\\%s\\InprocServer32" % reg_clsid, + "", self._get_serverdll()) + # only for non-frozen inproc servers the PythonPath/PythonClass is needed. + if not hasattr(sys, "frozendllhandle") \ + or not comtypes.server.inprocserver._clsid_to_class: + append(HKCR, "CLSID\\%s\\InprocServer32" % reg_clsid, + "PythonClass", self._get_full_classname(cls)) + append(HKCR, "CLSID\\%s\\InprocServer32" % reg_clsid, + "PythonPath", self._get_pythonpath(cls)) + + reg_threading = getattr(cls, "_reg_threading_", None) + if reg_threading is not None: + append(HKCR, "CLSID\\%s\\InprocServer32" % reg_clsid, + "ThreadingModel", reg_threading) + + reg_tlib = getattr(cls, "_reg_typelib_", None) + if reg_tlib is not None: + append(HKCR, "CLSID\\%s\\Typelib" % reg_clsid, "", reg_tlib[0]) + + return table + +################################################################ + +def register(cls): + Registrar().register(cls) + +def unregister(cls): + Registrar().unregister(cls) + +def UseCommandLine(*classes): + usage = """Usage: %s [-regserver] [-unregserver] [-nodebug] [-f logformat] [-l loggername=level]""" % sys.argv[0] + opts, args = w_getopt.w_getopt(sys.argv[1:], + "regserver unregserver embedding l: f: nodebug") + if not opts: + sys.stderr.write(usage + "\n") + return 0 # nothing for us to do + + levels = [] + format = None + nodebug = False + runit = False + for option, value in opts: + if option == "regserver": + for cls in classes: + register(cls) + elif option == "unregserver": + for cls in classes: + unregister(cls) + elif option == "embedding": + runit = True + elif option == "f": + format = value + elif option == "l": + levels.append(value) + elif option == "nodebug": + nodebug = True + + if levels or format is not None: + for cls in classes: + Registrar().debug(cls, levels, format) + if nodebug: + for cls in classes: + Registrar().nodebug(cls) + + if runit: + import comtypes.server.localserver + comtypes.server.localserver.run(classes) + + return 1 # we have done something + +if __name__ == "__main__": + UseCommandLine() diff --git a/venv/Lib/site-packages/comtypes/server/w_getopt.py b/venv/Lib/site-packages/comtypes/server/w_getopt.py new file mode 100644 index 00000000..1a3ffbfa --- /dev/null +++ b/venv/Lib/site-packages/comtypes/server/w_getopt.py @@ -0,0 +1,75 @@ +class GetoptError(Exception): + pass + +def w_getopt(args, options): + """A getopt for Windows. + + Options may start with either '-' or '/', the option names may + have more than one letter (/tlb or -RegServer), and option names + are case insensitive. + + Returns two elements, just as getopt.getopt. The first is a list + of (option, value) pairs in the same way getopt.getopt does, but + there is no '-' or '/' prefix to the option name, and the option + name is always lower case. The second is the list of arguments + which do not belong to an option. + + Different from getopt.getopt, a single argument not belonging to an option + does not terminate parsing. + """ + opts = [] + arguments = [] + while args: + if args[0][:1] in "/-": + arg = args[0][1:] # strip the '-' or '/' + arg = arg.lower() + + if arg + ':' in options: + try: + opts.append((arg, args[1])) + except IndexError: + raise GetoptError("option '%s' requires an argument" % args[0]) + args = args[1:] + elif arg in options: + opts.append((arg, '')) + else: + raise GetoptError("invalid option '%s'" % args[0]) + args = args[1:] + else: + arguments.append(args[0]) + args = args[1:] + + return opts, arguments + +if __debug__: + if __name__ == "__main__": + import unittest + + class TestCase(unittest.TestCase): + def test_1(self): + args = "-embedding spam /RegServer foo /UnregSERVER blabla".split() + opts, args = w_getopt(args, + "regserver unregserver embedding".split()) + self.assertEqual(opts, + [('embedding', ''), + ('regserver', ''), + ('unregserver', '')]) + self.assertEqual(args, ["spam", "foo", "blabla"]) + + def test_2(self): + args = "/TLB Hello.Tlb HELLO.idl".split() + opts, args = w_getopt(args, ["tlb:"]) + self.assertEqual(opts, [('tlb', 'Hello.Tlb')]) + self.assertEqual(args, ['HELLO.idl']) + + def test_3(self): + # Invalid option + self.assertRaises(GetoptError, w_getopt, + "/TLIB hello.tlb hello.idl".split(), ["tlb:"]) + + def test_4(self): + # Missing argument + self.assertRaises(GetoptError, w_getopt, + "/TLB".split(), ["tlb:"]) + + unittest.main() diff --git a/venv/Lib/site-packages/comtypes/shelllink.py b/venv/Lib/site-packages/comtypes/shelllink.py new file mode 100644 index 00000000..cff6f423 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/shelllink.py @@ -0,0 +1,217 @@ +from ctypes import * +from ctypes.wintypes import DWORD, WIN32_FIND_DATAA, WIN32_FIND_DATAW, MAX_PATH +from comtypes import IUnknown, GUID, COMMETHOD, HRESULT, CoClass + +# for GetPath +SLGP_SHORTPATH = 0x1 +SLGP_UNCPRIORITY = 0x2 +SLGP_RAWPATH = 0x4 + +# for SetShowCmd, GetShowCmd +##SW_SHOWNORMAL +##SW_SHOWMAXIMIZED +##SW_SHOWMINNOACTIVE + + +# for Resolve +##SLR_INVOKE_MSI +##SLR_NOLINKINFO +##SLR_NO_UI +##SLR_NOUPDATE +##SLR_NOSEARCH +##SLR_NOTRACK +##SLR_UPDATE + +# fake these... +ITEMIDLIST = c_int +LPITEMIDLIST = LPCITEMIDLIST = POINTER(ITEMIDLIST) + +class IShellLinkA(IUnknown): + _iid_ = GUID('{000214EE-0000-0000-C000-000000000046}') + _methods_ = [ + COMMETHOD([], HRESULT, 'GetPath', + ( ['in', 'out'], c_char_p, 'pszFile' ), + ( ['in'], c_int, 'cchMaxPath' ), + ( ['in', 'out'], POINTER(WIN32_FIND_DATAA), 'pfd' ), + ( ['in'], DWORD, 'fFlags' )), + COMMETHOD([], HRESULT, 'GetIDList', + ( ['retval', 'out'], POINTER(LPITEMIDLIST), 'ppidl' )), + COMMETHOD([], HRESULT, 'SetIDList', + ( ['in'], LPCITEMIDLIST, 'pidl' )), + COMMETHOD([], HRESULT, 'GetDescription', + ( ['in', 'out'], c_char_p, 'pszName' ), + ( ['in'], c_int, 'cchMaxName' )), + COMMETHOD([], HRESULT, 'SetDescription', + ( ['in'], c_char_p, 'pszName' )), + COMMETHOD([], HRESULT, 'GetWorkingDirectory', + ( ['in', 'out'], c_char_p, 'pszDir' ), + ( ['in'], c_int, 'cchMaxPath' )), + COMMETHOD([], HRESULT, 'SetWorkingDirectory', + ( ['in'], c_char_p, 'pszDir' )), + COMMETHOD([], HRESULT, 'GetArguments', + ( ['in', 'out'], c_char_p, 'pszArgs' ), + ( ['in'], c_int, 'cchMaxPath' )), + COMMETHOD([], HRESULT, 'SetArguments', + ( ['in'], c_char_p, 'pszArgs' )), + COMMETHOD(['propget'], HRESULT, 'Hotkey', + ( ['retval', 'out'], POINTER(c_short), 'pwHotkey' )), + COMMETHOD(['propput'], HRESULT, 'Hotkey', + ( ['in'], c_short, 'pwHotkey' )), + COMMETHOD(['propget'], HRESULT, 'ShowCmd', + ( ['retval', 'out'], POINTER(c_int), 'piShowCmd' )), + COMMETHOD(['propput'], HRESULT, 'ShowCmd', + ( ['in'], c_int, 'piShowCmd' )), + COMMETHOD([], HRESULT, 'GetIconLocation', + ( ['in', 'out'], c_char_p, 'pszIconPath' ), + ( ['in'], c_int, 'cchIconPath' ), + ( ['in', 'out'], POINTER(c_int), 'piIcon' )), + COMMETHOD([], HRESULT, 'SetIconLocation', + ( ['in'], c_char_p, 'pszIconPath' ), + ( ['in'], c_int, 'iIcon' )), + COMMETHOD([], HRESULT, 'SetRelativePath', + ( ['in'], c_char_p, 'pszPathRel' ), + ( ['in'], DWORD, 'dwReserved' )), + COMMETHOD([], HRESULT, 'Resolve', + ( ['in'], c_int, 'hwnd' ), + ( ['in'], DWORD, 'fFlags' )), + COMMETHOD([], HRESULT, 'SetPath', + ( ['in'], c_char_p, 'pszFile' )), + ] + + def GetPath(self, flags=SLGP_SHORTPATH): + buf = create_string_buffer(MAX_PATH) + # We're not interested in WIN32_FIND_DATA + self.__com_GetPath(buf, MAX_PATH, None, flags) + return buf.value + + def GetDescription(self): + buf = create_string_buffer(1024) + self.__com_GetDescription(buf, 1024) + return buf.value + + def GetWorkingDirectory(self): + buf = create_string_buffer(MAX_PATH) + self.__com_GetWorkingDirectory(buf, MAX_PATH) + return buf.value + + def GetArguments(self): + buf = create_string_buffer(1024) + self.__com_GetArguments(buf, 1024) + return buf.value + + def GetIconLocation(self): + iIcon = c_int() + buf = create_string_buffer(MAX_PATH) + self.__com_GetIconLocation(buf, MAX_PATH, byref(iIcon)) + return buf.value, iIcon.value + +class IShellLinkW(IUnknown): + _iid_ = GUID('{000214F9-0000-0000-C000-000000000046}') + _methods_ = [ + COMMETHOD([], HRESULT, 'GetPath', + ( ['in', 'out'], c_wchar_p, 'pszFile' ), + ( ['in'], c_int, 'cchMaxPath' ), + ( ['in', 'out'], POINTER(WIN32_FIND_DATAW), 'pfd' ), + ( ['in'], DWORD, 'fFlags' )), + COMMETHOD([], HRESULT, 'GetIDList', + ( ['retval', 'out'], POINTER(LPITEMIDLIST), 'ppidl' )), + COMMETHOD([], HRESULT, 'SetIDList', + ( ['in'], LPCITEMIDLIST, 'pidl' )), + COMMETHOD([], HRESULT, 'GetDescription', + ( ['in', 'out'], c_wchar_p, 'pszName' ), + ( ['in'], c_int, 'cchMaxName' )), + COMMETHOD([], HRESULT, 'SetDescription', + ( ['in'], c_wchar_p, 'pszName' )), + COMMETHOD([], HRESULT, 'GetWorkingDirectory', + ( ['in', 'out'], c_wchar_p, 'pszDir' ), + ( ['in'], c_int, 'cchMaxPath' )), + COMMETHOD([], HRESULT, 'SetWorkingDirectory', + ( ['in'], c_wchar_p, 'pszDir' )), + COMMETHOD([], HRESULT, 'GetArguments', + ( ['in', 'out'], c_wchar_p, 'pszArgs' ), + ( ['in'], c_int, 'cchMaxPath' )), + COMMETHOD([], HRESULT, 'SetArguments', + ( ['in'], c_wchar_p, 'pszArgs' )), + COMMETHOD(['propget'], HRESULT, 'Hotkey', + ( ['retval', 'out'], POINTER(c_short), 'pwHotkey' )), + COMMETHOD(['propput'], HRESULT, 'Hotkey', + ( ['in'], c_short, 'pwHotkey' )), + COMMETHOD(['propget'], HRESULT, 'ShowCmd', + ( ['retval', 'out'], POINTER(c_int), 'piShowCmd' )), + COMMETHOD(['propput'], HRESULT, 'ShowCmd', + ( ['in'], c_int, 'piShowCmd' )), + COMMETHOD([], HRESULT, 'GetIconLocation', + ( ['in', 'out'], c_wchar_p, 'pszIconPath' ), + ( ['in'], c_int, 'cchIconPath' ), + ( ['in', 'out'], POINTER(c_int), 'piIcon' )), + COMMETHOD([], HRESULT, 'SetIconLocation', + ( ['in'], c_wchar_p, 'pszIconPath' ), + ( ['in'], c_int, 'iIcon' )), + COMMETHOD([], HRESULT, 'SetRelativePath', + ( ['in'], c_wchar_p, 'pszPathRel' ), + ( ['in'], DWORD, 'dwReserved' )), + COMMETHOD([], HRESULT, 'Resolve', + ( ['in'], c_int, 'hwnd' ), + ( ['in'], DWORD, 'fFlags' )), + COMMETHOD([], HRESULT, 'SetPath', + ( ['in'], c_wchar_p, 'pszFile' )), + ] + + def GetPath(self, flags=SLGP_SHORTPATH): + buf = create_unicode_buffer(MAX_PATH) + # We're not interested in WIN32_FIND_DATA + self.__com_GetPath(buf, MAX_PATH, None, flags) + return buf.value + + def GetDescription(self): + buf = create_unicode_buffer(1024) + self.__com_GetDescription(buf, 1024) + return buf.value + + def GetWorkingDirectory(self): + buf = create_unicode_buffer(MAX_PATH) + self.__com_GetWorkingDirectory(buf, MAX_PATH) + return buf.value + + def GetArguments(self): + buf = create_unicode_buffer(1024) + self.__com_GetArguments(buf, 1024) + return buf.value + + def GetIconLocation(self): + iIcon = c_int() + buf = create_unicode_buffer(MAX_PATH) + self.__com_GetIconLocation(buf, MAX_PATH, byref(iIcon)) + return buf.value, iIcon.value + +class ShellLink(CoClass): + 'ShellLink class' + _reg_clsid_ = GUID('{00021401-0000-0000-C000-000000000046}') + _idlflags_ = [] + _com_interfaces_ = [IShellLinkW, IShellLinkA] + + +if __name__ == "__main__": + + import sys + import comtypes + from comtypes.client import CreateObject + from comtypes.persist import IPersistFile + + + + shortcut = CreateObject(ShellLink) + print(shortcut) + ##help(shortcut) + + shortcut.SetPath(sys.executable) + + shortcut.SetDescription("Python %s" % sys.version) + shortcut.SetIconLocation(sys.executable, 1) + + print(shortcut.GetPath(2)) + print(shortcut.GetIconLocation()) + + pf = shortcut.QueryInterface(IPersistFile) + pf.Save("foo.lnk", True) + print(pf.GetCurFile()) diff --git a/venv/Lib/site-packages/comtypes/test/TestComServer.idl b/venv/Lib/site-packages/comtypes/test/TestComServer.idl new file mode 100644 index 00000000..439f7fd3 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/TestComServer.idl @@ -0,0 +1,101 @@ +/* +b30428b4-cc67-48ba-81db-4c6a6b3db1a3 +*/ + +/* + + REMEMBER TO COMPILE A NEW .TLB FILE WHEN THIS CHANGES, + AND ALSO TO REMOVE THE comtypes\gen DIRECTORY TO DELETE + THE TYPELIB WRAPPERS! + The TestServer.py should also be registered again. + +*/ + +import "oaidl.idl"; +import "ocidl.idl"; + +[ + object, + oleautomation, + uuid(f0a241e2-25d1-4f6d-9461-c67bf262779f), + helpstring("A custom event interface") + ] +interface ITestComServerEvents : IUnknown { + [id(10)] + HRESULT EvalStarted([in] BSTR what); + + [id(11)] + HRESULT EvalCompleted([in] BSTR what, [in] VARIANT result); +}; + +[ + object, + /* + the oleautomation flag enables universal marshalling on non-dispatch + interfaces. See Don Box, Page 220. + */ + oleautomation, + uuid(58955c76-60a9-4eeb-8b8a-8f92e90d0fe7), + helpstring("ITestComServer interface") + ] +interface ITestComServer : IDispatch { + [propget, id(10), helpstring("returns the id of the server")] + HRESULT id([out, retval] UINT *pid); + + [propget, id(11), helpstring("the name of the server")] + HRESULT name([out, retval] BSTR *pname); + + [propput, id(11), helpstring("the name of the server")] + HRESULT name([in] BSTR name); + + [id(12), helpstring("a method that receives an BSTR [in] parameter")] + HRESULT SetName([in] BSTR name); + + [id(13), helpstring("evaluate an expression and return the result")] + HRESULT eval([in] BSTR what, [out, retval] VARIANT *presult); + + /* Some methods that use defaultvalues */ + [id(14)] + HRESULT do_cy([in, defaultvalue(32.78)] CURRENCY *value); + + [id(15)] + HRESULT do_date([in, defaultvalue(32)] DATE *value); + + [id(16), helpstring("execute a statement")] + HRESULT Exec([in] BSTR what); + + [id(17), helpstring("execute a statement")] + HRESULT Exec2([in] BSTR what); + + [id(18), helpstring("a method with [in] and [out] args in mixed order")] + HRESULT MixedInOut([in] int a, [out] int *b, [in] int c, [out] int *d); +}; + +[ + uuid(5a3e1d1d-947a-44ac-9b03-5c37d5f5fffc), + version(1.0), + helpstring("TestComServer 1.0 Type library") + ] +library TestComServerLib +{ + importlib("stdole2.tlb"); + + typedef + [ + uuid(086b7f11-aed0-4de0-b77a-f1998371da83) + ] + struct MYCOLOR { + double red; + double green; + double blue; + } MYCOLOR; + + [ + uuid(1fca61d1-a1a6-464c-b3a8-e9508b4ac8f7), + helpstring("TestComServer class object") + ] + coclass TestComServer { + [default] interface ITestComServer; + [default, source] interface ITestComServerEvents; + }; +}; diff --git a/venv/Lib/site-packages/comtypes/test/TestComServer.py b/venv/Lib/site-packages/comtypes/test/TestComServer.py new file mode 100644 index 00000000..50d9aacf --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/TestComServer.py @@ -0,0 +1,150 @@ +import sys, os +import logging +logging.basicConfig() +##logging.basicConfig(level=logging.DEBUG) +##logger = logging.getLogger(__name__) + +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), r"..\.."))) + +import ctypes +import comtypes +from comtypes.hresult import * +import comtypes.client +import comtypes.errorinfo +import comtypes.server +import comtypes.server.connectionpoints +import comtypes.typeinfo + +################################################################ + +# Create the wrapper in the comtypes.gen package, it will be named +# TestComServerLib; the name is derived from the 'library ' statement +# in the IDL file +if not hasattr(sys, "frozen"): + # pathname of the type library file + tlbfile = os.path.join(os.path.dirname(__file__), "TestComServer.tlb") + # if running as frozen app (dll or exe), the wrapper should be in + # the library archive, so we don't need to generate it. + comtypes.client.GetModule(tlbfile) + +# Import the wrapper +from comtypes.gen import TestComServerLib + +################################################################ + +# Implement the CoClass. Use the coclass from the wrapper as base +# class, and use DualDispMixin as base class which provides default +# implementations of IDispatch, IProvideClassInfo, IProvideClassInfo2 +# interfaces. ISupportErrorInfo is implemented by the COMObject base +# class. +class TestComServer( + TestComServerLib.TestComServer, # the coclass from the typelib wrapper + comtypes.server.connectionpoints.ConnectableObjectMixin, + ): + + # The default interface from the typelib MUST be the first + # interface, other interfaces can follow + + _com_interfaces_ = TestComServerLib.TestComServer._com_interfaces_ + \ + [comtypes.typeinfo.IProvideClassInfo2, + comtypes.errorinfo.ISupportErrorInfo, + comtypes.connectionpoints.IConnectionPointContainer, + ] + + # registry entries + _reg_threading_ = "Both" + _reg_progid_ = "TestComServerLib.TestComServer.1" + _reg_novers_progid_ = "TestComServerLib.TestComServer" + _reg_desc_ = "comtypes COM server sample for testing" + _reg_clsctx_ = comtypes.CLSCTX_INPROC_SERVER | comtypes.CLSCTX_LOCAL_SERVER + + ################################ + # ITestComServer methods + + def ITestComServer__get_array(self, this, parray): + # Hm, why is assignment to value needed? + + # these leak +## parray[0].value = (1, "2", None, 3.14) +## parray[0].value = (1, "2", None) + +## parray[0].value = ((1, 2, 3), (4, 5, 6), (7, 8, 9)) +## parray[0].value = (1,), (4,) + +## parray[0].value = (), () +## parray[0].value = (), 0 + + # leakage +## parray[0].value = (((127900.0, None, 2620), +## (127875.0, None, 2335), +## (127675.0, 1071, None)), +## 127800.0) + + # reported *no* leakage, but leaks anyway +## parray[0].value = ((128000.0, None, 2576), +## (127975.0, None, 1923), +## (127950.0, None, 1734)) + + # these don't leak +## parray[0].value = (1, 2, 3) +## parray[0].value = (1, 2, None) +## parray[0].value = (1, 3.14) +## parray[0].value = [1, "(1, 2, 3)"] +## parray[0].value = (1, "2") +## parray[0].value = [1, "2"] +## parray[0].value = (None, None, None) + +## parray[0].value = (), + + return S_OK + + def ITestComServer_eval(self, this, what, presult): + self.Fire_Event(0, "EvalStarted", what) + presult[0].value = eval(what) + self.Fire_Event(0, "EvalCompleted", what, presult[0].value) + return S_OK + + def ITestComServer__get_id(self, this, pid): + pid[0] = id(self) + return S_OK + + def ITestComServer_Exec(self, this, what): + exec(what) + return S_OK + + def ITestComServer_Exec2(self, what): + exec(what) + + _name = "spam, spam, spam" + + def _get_name(self): + return self._name + + def ITestComServer__set_name(self, this, name): + self._name = name + return S_OK + +## def ITestComServer_SetName(self, this, name): +## self._name = name +## return S_OK + + def ITestComServer_sEtNaMe(self, this, name): + # the method is spelled in a funny way to check case + # insensitivity when implementing COM methods. + self._name = name + return S_OK + +## [id(18), helpstring("a method with [in] and [out] args in mixed order")] +## HRESULT MixedInOut([in] int a, [out] int *b, [in] int c, [out] int *d); + def MixedInOut(self, a, c): + return a+1, c+1 + +if __name__ == "__main__": + try: + from comtypes.server.register import UseCommandLine +## logging.basicConfig(level=logging.DEBUG) + UseCommandLine(TestComServer) + except Exception: + import traceback + traceback.print_exc() + input() diff --git a/venv/Lib/site-packages/comtypes/test/TestComServer.tlb b/venv/Lib/site-packages/comtypes/test/TestComServer.tlb new file mode 100644 index 00000000..07fc7c91 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/TestComServer.tlb differ diff --git a/venv/Lib/site-packages/comtypes/test/TestDispServer.idl b/venv/Lib/site-packages/comtypes/test/TestDispServer.idl new file mode 100644 index 00000000..91845c85 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/TestDispServer.idl @@ -0,0 +1,79 @@ +/* + 2882fa40-2d69-4880-8073-e81fa29e1785 + 7ae4b0e3-5d92-4ab1-b5d0-2a95c1c3ba73 + f557bf87-3e3f-4c73-9bc1-7d633d83714b +*/ + +import "oaidl.idl"; +import "ocidl.idl"; + +[ + uuid(3b3b2a10-7fef-4bcc-90fe-43a221162b1b), + helpstring("A custom event interface") + ] +dispinterface DTestDispServerEvents { + properties: + + methods: + [id(10)] + void EvalStarted([in] BSTR what); + + [id(11)] + void EvalCompleted([in] BSTR what, [in] VARIANT result); +}; + +[ + uuid(d44d11ba-aa1f-4e93-8f5a-8fa0a4715241), + helpstring("DTestDispServer interface") + ] +dispinterface DTestDispServer { + properties: + [readonly, id(10), helpstring("the id of the server")] + UINT id; + + [id(11), helpstring("the name of the server")] + BSTR name; + + methods: + + [id(12), helpstring("a method that receives an BSTR [in] parameter")] + void SetName([in] BSTR name); + + [id(13), helpstring("evaluate an expression and return the result")] + VARIANT eval([in] BSTR what); + + [id(14), helpstring("evaluate an expression and return the result")] + VARIANT eval2([in] BSTR what); + + [id(16), helpstring("execute a statement")] + void Exec([in] BSTR what); + + [id(17), helpstring("execute a statement")] + void Exec2([in] BSTR what); + + /* Some methods that use defaultvalues */ + [id(100)] + void do_cy([in, defaultvalue(32.78)] CURRENCY *value); + + [id(101)] + void do_date([in, defaultvalue(32)] DATE *value); +}; + +[ + uuid(6baa1c79-4ba0-47f2-9ad7-d2ffb1c0f3e3), + version(1.0), + helpstring("TestDispServer 1.0 Type library") + ] +library TestDispServerLib +{ + importlib("stdole2.tlb"); + + [ + uuid(bb2aba53-9d42-435b-acc3-ae2c274517b0), + helpstring("TestDispServer class object") + ] + coclass TestDispServer { + [default] dispinterface DTestDispServer; + [default, source] dispinterface DTestDispServerEvents; + }; +}; diff --git a/venv/Lib/site-packages/comtypes/test/TestDispServer.py b/venv/Lib/site-packages/comtypes/test/TestDispServer.py new file mode 100644 index 00000000..cab7b2e9 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/TestDispServer.py @@ -0,0 +1,103 @@ +import sys, os +import logging +logging.basicConfig() +##logging.basicConfig(level=logging.DEBUG) +##logger = logging.getLogger(__name__) + +# Add comtypes to sys.path (if this is run from a SVN checkout) +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), r"..\.."))) + +import comtypes +from comtypes.hresult import S_OK +import comtypes.server.connectionpoints + +################################################################ + +# Create the wrapper in the comtypes.gen package, it will be named +# TestComServerLib; the name is derived from the 'library ' statement +# in the IDL file +if not hasattr(sys, "frozen"): + import comtypes.client + # pathname of the type library file + tlbfile = os.path.join(os.path.dirname(__file__), "TestDispServer.tlb") + # if running as frozen app (dll or exe), the wrapper should be in + # the library archive, so we don't need to generate it. + comtypes.client.GetModule(tlbfile) + +# Import the wrapper +from comtypes.gen import TestDispServerLib + +################################################################ + +# Implement the CoClass by defining a subclass of the +# TestDispServerLib.TestDispServer class in the wrapper file. The +# COMObject base class provides default implementations of the +# IUnknown, IDispatch, IPersist, IProvideClassInfo, +# IProvideClassInfo2, and ISupportErrorInfo interfaces. +# +# The ConnectableObjectMixin class provides connectionpoints (events). +class TestDispServer( + TestDispServerLib.TestDispServer, # the coclass from the typelib wrapper + comtypes.server.connectionpoints.ConnectableObjectMixin, + ): + + # The default interface from the typelib MUST be the first + # interface, other interfaces can follow + + _com_interfaces_ = TestDispServerLib.TestDispServer._com_interfaces_ + \ + [comtypes.connectionpoints.IConnectionPointContainer] + + # registry entries + _reg_threading_ = "Both" + _reg_progid_ = "TestDispServerLib.TestDispServer.1" + _reg_novers_progid_ = "TestDispServerLib.TestDispServer" + _reg_desc_ = "comtypes COM server sample for testing" + _reg_clsctx_ = comtypes.CLSCTX_INPROC_SERVER | comtypes.CLSCTX_LOCAL_SERVER + + ################################ + # DTestDispServer methods + + def DTestDispServer_eval(self, this, expr, presult): + self.Fire_Event(0, "EvalStarted", expr) + # The following two are equivalent, but the former is more generic: + presult[0] = eval(expr) + ##presult[0].value = eval(expr) + self.Fire_Event(0, "EvalCompleted", expr, presult[0].value) + return S_OK + + def DTestDispServer_eval2(self, expr): + self.Fire_Event(0, "EvalStarted", expr) + result = eval(expr) + self.Fire_Event(0, "EvalCompleted", expr, result) + return result + + def DTestDispServer__get_id(self, this, pid): + pid[0] = id(self) + return S_OK + + def DTestDispServer_Exec(self, this, what): + exec(what) + return S_OK + + def DTestDispServer_Exec2(self, what): + exec(what) + + _name = "spam, spam, spam" + + # Implementation of the DTestDispServer::Name propget + def DTestDispServer__get_name(self, this, pname): + pname[0] = self._name + return S_OK + + # Implementation of the DTestDispServer::Name propput + def DTestDispServer__set_name(self, this, name): + self._name = name + return S_OK + + # Implementation of the DTestDispServer::SetName method + def DTestDispServer_sEtNaMe(self, name): + self._name = name + +if __name__ == "__main__": + from comtypes.server.register import UseCommandLine + UseCommandLine(TestDispServer) diff --git a/venv/Lib/site-packages/comtypes/test/TestDispServer.tlb b/venv/Lib/site-packages/comtypes/test/TestDispServer.tlb new file mode 100644 index 00000000..941894d9 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/TestDispServer.tlb differ diff --git a/venv/Lib/site-packages/comtypes/test/__init__.py b/venv/Lib/site-packages/comtypes/test/__init__.py new file mode 100644 index 00000000..88a659c4 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/__init__.py @@ -0,0 +1,240 @@ +# comtypes.test package. + +import ctypes +import getopt +import os +import sys +import time +import unittest + +use_resources = [] + +def get_numpy(): + '''Get numpy if it is available.''' + try: + import numpy + return numpy + except ImportError: + return None + +def register_server(source_dir): + """ Register testing server appropriate for the python architecture. + + ``source_dir`` gives the absolute path to the comtype source in which the + 32- and 64-bit testing server, "AvmcIfc.dll" is defined. + + If the server is already registered, do nothing. + + """ + # The 64-bitness of the python interpreter determines the testing dll to + # use. + dll_name = "AvmcIfc_x64.dll" if sys.maxsize > 2**32 else "AvmcIfc.dll" + dll_path = os.path.join(source_dir, "Debug", dll_name) + # Register our ATL COM tester dll + dll = ctypes.OleDLL(dll_path) + dll.DllRegisterServer() + return + +class ResourceDenied(Exception): + """Test skipped because it requested a disallowed resource. + + This is raised when a test calls requires() for a resource that + has not be enabled. Resources are defined by test modules. + """ + +def is_resource_enabled(resource): + """Test whether a resource is enabled. + + If the caller's module is __main__ then automatically return True.""" + if sys._getframe().f_back.f_globals.get("__name__") == "__main__": + return True + result = use_resources is not None and \ + (resource in use_resources or "*" in use_resources) + if not result: + _unavail[resource] = None + return result + +_unavail = {} +def requires(resource, msg=None): + """Raise ResourceDenied if the specified resource is not available. + + If the caller's module is __main__ then automatically return True.""" + # see if the caller's module is __main__ - if so, treat as if + # the resource was set + if sys._getframe().f_back.f_globals.get("__name__") == "__main__": + return + if not is_resource_enabled(resource): + if msg is None: + msg = "Use of the `%s' resource not enabled" % resource + raise ResourceDenied(msg) + +def find_package_modules(package, mask): + import fnmatch + if hasattr(package, "__loader__"): + path = package.__name__.replace(".", os.path.sep) + mask = os.path.join(path, mask) + for fnm in package.__loader__._files.keys(): + if fnmatch.fnmatchcase(fnm, mask): + yield os.path.splitext(fnm)[0].replace(os.path.sep, ".") + else: + path = package.__path__[0] + for fnm in os.listdir(path): + if fnmatch.fnmatchcase(fnm, mask): + yield "%s.%s" % (package.__name__, os.path.splitext(fnm)[0]) + +def get_tests(package, mask, verbosity): + """Return a list of skipped test modules, and a list of test cases.""" + tests = [] + skipped = [] + for modname in find_package_modules(package, mask): + try: + mod = __import__(modname, globals(), locals(), ['*']) + except ResourceDenied as detail: + skipped.append(modname) + if verbosity > 1: + print("Skipped %s: %s" % (modname, detail), file=sys.stderr) + continue + except Exception as detail: + print("Warning: could not import %s: %s" % (modname, detail), file=sys.stderr) + continue + for name in dir(mod): + if name.startswith("_"): + continue + o = getattr(mod, name) + try: + is_test = issubclass(o, unittest.TestCase) + except TypeError: + continue + if is_test: + tests.append(o) + return skipped, tests + +def usage(): + print(__doc__) + return 1 + +def test_with_refcounts(runner, verbosity, testcase): + """Run testcase several times, tracking reference counts.""" + import gc + import ctypes + ptc = ctypes._pointer_type_cache.copy() + cfc = ctypes._c_functype_cache.copy() + wfc = ctypes._win_functype_cache.copy() + + # when searching for refcount leaks, we have to manually reset any + # caches that ctypes has. + def cleanup(): + ctypes._pointer_type_cache = ptc.copy() + ctypes._c_functype_cache = cfc.copy() + ctypes._win_functype_cache = wfc.copy() + gc.collect() + + test = unittest.makeSuite(testcase) + for i in range(5): + rc = sys.gettotalrefcount() + runner.run(test) + cleanup() + COUNT = 5 + refcounts = [None] * COUNT + for i in range(COUNT): + rc = sys.gettotalrefcount() + runner.run(test) + cleanup() + refcounts[i] = sys.gettotalrefcount() - rc + if [_f for _f in refcounts if _f]: + print("%s leaks:\n\t" % testcase, refcounts) + elif verbosity: + print("%s: ok." % testcase) + +class TestRunner(unittest.TextTestRunner): + def run(self, test, skipped): + "Run the given test case or test suite." + # Same as unittest.TextTestRunner.run, except that it reports + # skipped tests. + result = self._makeResult() + startTime = time.time() + test(result) + stopTime = time.time() + timeTaken = stopTime - startTime + result.printErrors() + self.stream.writeln(result.separator2) + run = result.testsRun + if _unavail: #skipped: + requested = list(_unavail.keys()) + requested.sort() + self.stream.writeln("Ran %d test%s in %.3fs (%s module%s skipped)" % + (run, run != 1 and "s" or "", timeTaken, + len(skipped), + len(skipped) != 1 and "s" or "")) + self.stream.writeln("Unavailable resources: %s" % ", ".join(requested)) + else: + self.stream.writeln("Ran %d test%s in %.3fs" % + (run, run != 1 and "s" or "", timeTaken)) + self.stream.writeln() + if not result.wasSuccessful(): + self.stream.write("FAILED (") + failed, errored = list(map(len, (result.failures, result.errors))) + if failed: + self.stream.write("failures=%d" % failed) + if errored: + if failed: self.stream.write(", ") + self.stream.write("errors=%d" % errored) + self.stream.writeln(")") + else: + self.stream.writeln("OK") + return result + + +def run_tests(package, mask, verbosity, search_leaks): + """ Run tests for package and return True on failure, False otherwise """ + skipped, testcases = get_tests(package, mask, verbosity) + runner = TestRunner(verbosity=verbosity) + + suites = [unittest.makeSuite(o) for o in testcases] + suite = unittest.TestSuite(suites) + result = runner.run(suite, skipped) + + if search_leaks: + # hunt for refcount leaks + runner = BasicTestRunner() + for t in testcases: + test_with_refcounts(runner, verbosity, t) + + return bool(result.errors) or bool(result.failures) + +class BasicTestRunner: + def run(self, test): + result = unittest.TestResult() + test(result) + return result + +def run(args = []): + """ Run tests and return True on failure, False otherwise """ + try: + opts, args = getopt.getopt(args, "rqvu:") + except getopt.error: + return usage() + + verbosity = 1 + search_leaks = False + for flag, value in opts: + if flag == "-q": + verbosity -= 1 + elif flag == "-v": + verbosity += 1 + elif flag == "-r": + try: + sys.gettotalrefcount + except AttributeError: + print("-r flag requires Python debug build", file=sys.stderr) + return -1 + search_leaks = True + elif flag == "-u": + use_resources.extend(value.split(",")) + + mask = "test_*.py*" + if args: + mask = args[0] + + import comtypes.test + return run_tests(comtypes.test, mask, verbosity, search_leaks) diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/TestComServer.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/TestComServer.cpython-36.pyc new file mode 100644 index 00000000..362d41ea Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/TestComServer.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/TestDispServer.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/TestDispServer.cpython-36.pyc new file mode 100644 index 00000000..b8b8ea5f Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/TestDispServer.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..7bcf8a45 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/find_memleak.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/find_memleak.cpython-36.pyc new file mode 100644 index 00000000..04ec5676 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/find_memleak.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/runtests.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/runtests.cpython-36.pyc new file mode 100644 index 00000000..63f99e2d Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/runtests.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/setup.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/setup.cpython-36.pyc new file mode 100644 index 00000000..763a282f Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/setup.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/test_BSTR.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/test_BSTR.cpython-36.pyc new file mode 100644 index 00000000..a348a25c Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/test_BSTR.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/test_DISPPARAMS.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/test_DISPPARAMS.cpython-36.pyc new file mode 100644 index 00000000..d2f19ac6 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/test_DISPPARAMS.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/test_GUID.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/test_GUID.cpython-36.pyc new file mode 100644 index 00000000..05cf302e Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/test_GUID.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/test_QueryService.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/test_QueryService.cpython-36.pyc new file mode 100644 index 00000000..42ae8a7f Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/test_QueryService.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/test_agilent.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/test_agilent.cpython-36.pyc new file mode 100644 index 00000000..feea7ad0 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/test_agilent.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/test_avmc.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/test_avmc.cpython-36.pyc new file mode 100644 index 00000000..fe2ac3c9 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/test_avmc.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/test_basic.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/test_basic.cpython-36.pyc new file mode 100644 index 00000000..a8ac7092 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/test_basic.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/test_casesensitivity.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/test_casesensitivity.cpython-36.pyc new file mode 100644 index 00000000..d6e0b2d8 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/test_casesensitivity.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/test_client.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/test_client.cpython-36.pyc new file mode 100644 index 00000000..3cc3cb2c Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/test_client.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/test_collections.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/test_collections.cpython-36.pyc new file mode 100644 index 00000000..bb75b657 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/test_collections.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/test_comserver.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/test_comserver.cpython-36.pyc new file mode 100644 index 00000000..6a33381a Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/test_comserver.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/test_createwrappers.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/test_createwrappers.cpython-36.pyc new file mode 100644 index 00000000..5da407b0 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/test_createwrappers.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/test_dict.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/test_dict.cpython-36.pyc new file mode 100644 index 00000000..d824043e Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/test_dict.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/test_dispinterface.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/test_dispinterface.cpython-36.pyc new file mode 100644 index 00000000..579191ab Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/test_dispinterface.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/test_dyndispatch.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/test_dyndispatch.cpython-36.pyc new file mode 100644 index 00000000..d03c82ce Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/test_dyndispatch.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/test_excel.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/test_excel.cpython-36.pyc new file mode 100644 index 00000000..3d2ab7f1 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/test_excel.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/test_findgendir.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/test_findgendir.cpython-36.pyc new file mode 100644 index 00000000..92ad05b1 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/test_findgendir.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/test_getactiveobj.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/test_getactiveobj.cpython-36.pyc new file mode 100644 index 00000000..71605373 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/test_getactiveobj.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/test_ie.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/test_ie.cpython-36.pyc new file mode 100644 index 00000000..9fd7514d Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/test_ie.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/test_msscript.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/test_msscript.cpython-36.pyc new file mode 100644 index 00000000..de85de39 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/test_msscript.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/test_outparam.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/test_outparam.cpython-36.pyc new file mode 100644 index 00000000..679ea562 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/test_outparam.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/test_propputref.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/test_propputref.cpython-36.pyc new file mode 100644 index 00000000..37016dc4 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/test_propputref.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/test_pump_events.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/test_pump_events.cpython-36.pyc new file mode 100644 index 00000000..88e8e570 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/test_pump_events.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/test_safearray.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/test_safearray.cpython-36.pyc new file mode 100644 index 00000000..a43de95a Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/test_safearray.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/test_sapi.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/test_sapi.cpython-36.pyc new file mode 100644 index 00000000..3f2cf9d3 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/test_sapi.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/test_server.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/test_server.cpython-36.pyc new file mode 100644 index 00000000..f7b6ab64 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/test_server.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/test_showevents.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/test_showevents.cpython-36.pyc new file mode 100644 index 00000000..5a21c060 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/test_showevents.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/test_subinterface.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/test_subinterface.cpython-36.pyc new file mode 100644 index 00000000..83545d7d Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/test_subinterface.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/test_typeinfo.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/test_typeinfo.cpython-36.pyc new file mode 100644 index 00000000..fe1c07af Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/test_typeinfo.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/test_urlhistory.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/test_urlhistory.cpython-36.pyc new file mode 100644 index 00000000..a972eea8 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/test_urlhistory.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/test_variant.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/test_variant.cpython-36.pyc new file mode 100644 index 00000000..58931da8 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/test_variant.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/test_win32com_interop.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/test_win32com_interop.cpython-36.pyc new file mode 100644 index 00000000..83d1aecd Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/test_win32com_interop.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/test_wmi.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/test_wmi.cpython-36.pyc new file mode 100644 index 00000000..e80812b1 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/test_wmi.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/__pycache__/test_word.cpython-36.pyc b/venv/Lib/site-packages/comtypes/test/__pycache__/test_word.cpython-36.pyc new file mode 100644 index 00000000..7ea03846 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/test/__pycache__/test_word.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/test/find_memleak.py b/venv/Lib/site-packages/comtypes/test/find_memleak.py new file mode 100644 index 00000000..6e2612e7 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/find_memleak.py @@ -0,0 +1,61 @@ +import unittest, gc +from ctypes import * +from ctypes.wintypes import * + +################################################################ + +class PROCESS_MEMORY_COUNTERS(Structure): + _fields_ = [("cb", DWORD), + ("PageFaultCount", DWORD), + ("PeakWorkingSetSize", c_size_t), + ("WorkingSetSize", c_size_t), + ("QuotaPeakPagedPoolUsage", c_size_t), + ("QuotaPagedPoolUsage", c_size_t), + ("QuotaPeakNonPagedPoolUsage", c_size_t), + ("QuotaNonPagedPoolUsage", c_size_t), + ("PagefileUsage", c_size_t), + ("PeakPagefileUsage", c_size_t)] + def __init__(self): + self.cb = sizeof(self) + + def dump(self): + for n, _ in self._fields_[2:]: + print(n, getattr(self, n)/1e6) + +try: + windll.psapi.GetProcessMemoryInfo.argtypes = (HANDLE, POINTER(PROCESS_MEMORY_COUNTERS), DWORD) +except WindowsError: + # cannot search for memory leaks on Windows CE + def find_memleak(func, loops=None): + return 0 +else: + def wss(): + # Return the working set size (memory used by process) + pmi = PROCESS_MEMORY_COUNTERS() + if not windll.psapi.GetProcessMemoryInfo(-1, byref(pmi), sizeof(pmi)): + raise WinError() + return pmi.WorkingSetSize + + LOOPS = 10, 1000 + + def find_memleak(func, loops=LOOPS): + # call 'func' several times, so that memory consumption + # stabilizes: + for j in range(loops[0]): + for k in range(loops[1]): + func() + gc.collect(); gc.collect(); gc.collect() + bytes = wss() + # call 'func' several times, recording the difference in + # memory consumption before and after the call. Repeat this a + # few times, and return a list containing the memory + # consumption differences. + for j in range(loops[0]): + for k in range(loops[1]): + func() + gc.collect(); gc.collect(); gc.collect() + # return the increased in process size + result = wss() - bytes + # Sometimes the process size did decrease, we do not report leaks + # in this case: + return max(result, 0) diff --git a/venv/Lib/site-packages/comtypes/test/mylib.idl b/venv/Lib/site-packages/comtypes/test/mylib.idl new file mode 100644 index 00000000..cd5a9167 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/mylib.idl @@ -0,0 +1,40 @@ +import "oaidl.idl"; + import "ocidl.idl"; + [uuid(f4f74946-4546-44bd-a073-9ea6f9fe78cb)] library TestLib { +[object, + oleautomation, + dual, + uuid(ed978f5f-cc45-4fcc-a7a6-751ffa8dfedd)] + interface IMyInterface : IDispatch { +[id(100), propget] HRESULT Name([out, retval] BSTR *pname); + [id(100), propput] HRESULT Name([in] BSTR name); +[id(101)] HRESULT MixedInOut([in] int a, [out] int *b, [in] int c, [out] int *d); +[id(102)] HRESULT MultiInOutArgs([in, out] int *pa, [in, out] int *pb); +HRESULT MultiInOutArgs2([in, out] int *pa, [out] int *pb); +HRESULT MultiInOutArgs3([out] int *pa, [out] int *pb); +HRESULT MultiInOutArgs4([out] int *pa, [in, out] int *pb); +HRESULT GetStackTrace([in] ULONG FrameOffset, + [in, out] INT *Frames, + [in] ULONG FramesSize, + [out, optional] ULONG *FramesFilled); +HRESULT dummy([in] SAFEARRAY(VARIANT *) foo); +HRESULT DoSomething(); +HRESULT DoSomethingElse(); +} + +[object, + oleautomation, + dual, + uuid(f7c48a90-64ea-4bb8-abf1-b3a3aa996848)] + interface IMyEventInterface : IDispatch { +[id(103)] HRESULT OnSomething(); +[id(104)] HRESULT OnSomethingElse([out, retval] int *px); +} + + +[uuid(fa9de8f4-20de-45fc-b079-648572428817)] +coclass MyServer { + [default] interface IMyInterface; + [default, source] interface IMyEventInterface; +}; +} \ No newline at end of file diff --git a/venv/Lib/site-packages/comtypes/test/mytypelib.idl b/venv/Lib/site-packages/comtypes/test/mytypelib.idl new file mode 100644 index 00000000..d4d7c757 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/mytypelib.idl @@ -0,0 +1,99 @@ +import "oaidl.idl"; +import "ocidl.idl"; + +typedef +[ + uuid(0a411e93-aeb0-4b84-8722-b237a1b87ba1) + ] +struct Pair { + double a; + double b; +} Pair; + +typedef +[ + uuid(00b7e135-f7a3-42f8-b65b-ecd106b3c17d) + ] +struct Point { + double x; + double y; +} Point; + +[ + object, + /* + the oleautomation flag enables universal marshalling on non-dispatch + interfaces. See Don Box, Page 220. + */ + oleautomation, + uuid(368ce4db-5f87-4927-b134-2a955c1dea1f), + ] +interface IMyInterface : IUnknown { + [propget, id(10), helpstring("returns the id of the server")] + HRESULT id([out, retval] UINT *pid); + + [propget, id(11), helpstring("the name of the server")] + HRESULT name([out, retval] BSTR *pname); + + [propput, id(11), helpstring("the name of the server")] + HRESULT name([in] BSTR name); + + [id(12), helpstring("a method that receives an BSTR [in] parameter")] + HRESULT SetName([in] BSTR name); + + [id(13), helpstring("evaluate an expression and return the result")] + HRESULT eval([in] BSTR what, [out, retval] VARIANT *presult); + + /* Some methods that use defaultvalues */ + [id(14)] + HRESULT do_cy([in, defaultvalue(32.78)] CURRENCY *value); + + [id(15)] + HRESULT do_date([in, defaultvalue(32)] DATE *value); + + [id(16), helpstring("execute a statement")] + HRESULT Exec([in] BSTR what); + + [helpstring("execute a statement")] + HRESULT Exec2([in] BSTR what); + + [helpstring("a method with [in] and [out] args in mixed order")] + HRESULT MixedInOut([in] int a, [out] int *b, [in] int c, [out] int *d); + + [helpstring("a method that receives and returns SAFEARRAYs of pairs")] + HRESULT TestPairArray([in] SAFEARRAY(Pair) val, [out, retval] SAFEARRAY(Pair) *result); + + [helpstring("a method that receives and returns SAFEARRAYs of pairs")] + HRESULT TestPairArray2([in] SAFEARRAY(Pair) val, [out, retval] SAFEARRAY(Pair) *result); + + [helpstring("a method that receives and returns SAFEARRAYs of points")] + HRESULT TestPointArray([in] SAFEARRAY(Point) val, [out, retval] SAFEARRAY(Point) *result); + + [local, helpstring("...")] + LONG Test([in] int value, [out, retval] int *result); + + HRESULT MultiInOutArgs([in, out] int *pa, + [in, out] int *pb, + [in, out] int *pc); + + HRESULT MultiOutArgs2([in, out] int *pa, + [in, out] int *pb, + [out, retval] int *pc); + +}; + +[ + uuid(6a237363-015c-4ded-937e-7e4d80b0a6cf), + version(1.0), + ] +library MyTypeLib +{ + importlib("stdole2.tlb"); + + [ + uuid(08420058-ef6b-4884-9c78-14e73dfaf767), + ] + coclass MyComServer { + [default] interface IMyInterface; + }; +}; diff --git a/venv/Lib/site-packages/comtypes/test/runtests.py b/venv/Lib/site-packages/comtypes/test/runtests.py new file mode 100644 index 00000000..56d598e3 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/runtests.py @@ -0,0 +1,8 @@ +import sys +import comtypes.test + +def main(): + sys.exit(comtypes.test.run(sys.argv[1:])) + +if __name__ == "__main__": + main() diff --git a/venv/Lib/site-packages/comtypes/test/setup.py b/venv/Lib/site-packages/comtypes/test/setup.py new file mode 100644 index 00000000..5ba3e299 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/setup.py @@ -0,0 +1,6 @@ +# all the unittests can be converted to exe-files. +from distutils.core import setup +import glob +import py2exe + +setup(name='test_*', console=glob.glob("test_*.py")) diff --git a/venv/Lib/site-packages/comtypes/test/test_BSTR.py b/venv/Lib/site-packages/comtypes/test/test_BSTR.py new file mode 100644 index 00000000..62d47379 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/test_BSTR.py @@ -0,0 +1,51 @@ +import unittest, os +from ctypes import * +from comtypes import BSTR +from comtypes.test import requires + +##requires("memleaks") + +from comtypes.test.find_memleak import find_memleak + +class Test(unittest.TestCase): + def check_leaks(self, func, limit=0): + bytes = find_memleak(func) + self.assertFalse(bytes > limit, "Leaks %d bytes" % bytes) + + def test_creation(self): + def doit(): + BSTR("abcdef" * 100) + # It seems this test is unreliable. Sometimes it leaks 4096 + # bytes, sometimes not. Try to workaround that... + self.check_leaks(doit, limit=4096) + + def test_from_param(self): + def doit(): + BSTR.from_param("abcdef") + self.check_leaks(doit) + + def test_paramflags(self): + prototype = WINFUNCTYPE(c_void_p, BSTR) + func = prototype(("SysStringLen", oledll.oleaut32)) + func.restype = c_void_p + func.argtypes = (BSTR, ) + def doit(): + func("abcdef") + func("abc xyz") + func(BSTR("abc def")) + self.check_leaks(doit) + + def test_inargs(self): + SysStringLen = windll.oleaut32.SysStringLen + SysStringLen.argtypes = BSTR, + SysStringLen.restype = c_uint + + self.assertEqual(SysStringLen("abc xyz"), 7) + def doit(): + SysStringLen("abc xyz") + SysStringLen("abc xyz") + SysStringLen(BSTR("abc def")) + self.check_leaks(doit) + +if __name__ == "__main__": + unittest.main() diff --git a/venv/Lib/site-packages/comtypes/test/test_DISPPARAMS.py b/venv/Lib/site-packages/comtypes/test/test_DISPPARAMS.py new file mode 100644 index 00000000..36296a06 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/test_DISPPARAMS.py @@ -0,0 +1,41 @@ +import unittest as ut + +class TestCase(ut.TestCase): + def test(self): + from comtypes.automation import DISPPARAMS, VARIANT + + dp = DISPPARAMS() + dp.rgvarg = (VARIANT * 3)() + + for i in range(3): + self.assertEqual(dp.rgvarg[i].value, None) + + dp.rgvarg[0].value = 42 + dp.rgvarg[1].value = "spam" + dp.rgvarg[2].value = "foo" + + # damn, there's still this old bug! + + self.assertEqual(dp.rgvarg[0].value, 42) + # these fail: +## self.failUnlessEqual(dp.rgvarg[1].value, "spam") +## self.failUnlessEqual(dp.rgvarg[2].value, "foo") + + def X_test_2(self): + # basically the same test as above + from comtypes.automation import DISPPARAMS, VARIANT + + args = [42, None, "foo"] + + dp = DISPPARAMS() + dp.rgvarg = (VARIANT * 3)(*list(map(VARIANT, args[::-1]))) + + import gc + gc.collect() + + self.assertEqual(dp.rgvarg[0].value, 42) + self.assertEqual(dp.rgvarg[1].value, "spam") + self.assertEqual(dp.rgvarg[2].value, "foo") + +if __name__ == "__main__": + ut.main() diff --git a/venv/Lib/site-packages/comtypes/test/test_GUID.py b/venv/Lib/site-packages/comtypes/test/test_GUID.py new file mode 100644 index 00000000..eee2123c --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/test_GUID.py @@ -0,0 +1,32 @@ +import os +import unittest +from comtypes import GUID + +class Test(unittest.TestCase): + def test(self): + self.assertEqual(GUID(), GUID()) + self.assertEqual(GUID("{00000000-0000-0000-C000-000000000046}"), + GUID("{00000000-0000-0000-C000-000000000046}")) + + self.assertEqual(str(GUID("{0002DF01-0000-0000-C000-000000000046}")), + "{0002DF01-0000-0000-C000-000000000046}") + self.assertEqual(repr(GUID("{0002DF01-0000-0000-C000-000000000046}")), + 'GUID("{0002DF01-0000-0000-C000-000000000046}")') + + self.assertRaises(WindowsError, GUID, "abc") + self.assertRaises(WindowsError, GUID.from_progid, "abc") + + self.assertRaises(WindowsError, lambda guid: guid.as_progid(), + GUID("{00000000-0000-0000-C000-000000000046}")) + + + if os.name == "nt": + self.assertEqual(GUID.from_progid("InternetExplorer.Application"), + GUID("{0002DF01-0000-0000-C000-000000000046}")) + self.assertEqual(GUID("{0002DF01-0000-0000-C000-000000000046}").as_progid(), + 'InternetExplorer.Application.1') + + self.assertNotEqual(GUID.create_new(), GUID.create_new()) + +if __name__ == "__main__": + unittest.main() diff --git a/venv/Lib/site-packages/comtypes/test/test_QueryService.py b/venv/Lib/site-packages/comtypes/test/test_QueryService.py new file mode 100644 index 00000000..32149be2 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/test_QueryService.py @@ -0,0 +1,27 @@ +import unittest +from ctypes import POINTER +import comtypes +from comtypes.client import CreateObject, GetModule + +GetModule('oleacc.dll') +from comtypes.gen.Accessibility import IAccessible + + +class TestCase(unittest.TestCase): + + def setUp(self): + self.ie = CreateObject('InternetExplorer.application') + + def tearDown(self): + self.ie.Quit() + del self.ie + + def test(self): + ie = self.ie + ie.navigate2("about:blank", 0) + sp = ie.Document.Body.QueryInterface(comtypes.IServiceProvider) + pacc = sp.QueryService(IAccessible._iid_, IAccessible) + self.assertEqual(type(pacc), POINTER(IAccessible)) + +if __name__ == "__main__": + unittest.main() diff --git a/venv/Lib/site-packages/comtypes/test/test_agilent.py b/venv/Lib/site-packages/comtypes/test/test_agilent.py new file mode 100644 index 00000000..62715ca5 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/test_agilent.py @@ -0,0 +1,91 @@ +# This test requires that the Agilent IVI-COM Driver for Agilent546XX +# is installed. It is not requires to have a physical instrument +# connected, the driver is used in simulation mode. +import unittest +from comtypes.test import ResourceDenied +from comtypes.client import CreateObject +from comtypes import GUID +from comtypes.safearray import _midlSAFEARRAY +from ctypes import c_double, POINTER + +try: + GUID.from_progid("Agilent546XX.Agilent546XX") +except WindowsError: + pass + +else: + class Test(unittest.TestCase): + def test(self): + # The point of this test is the ReadWaveform method below, + # which takes several [in, out] arguments. + agDrvr = CreateObject("Agilent546XX.Agilent546XX") + + # XXX XXX XXX The following call crashes hard with an accessviolation when + # the OANOCACHE environ variable is set. + import os + if "OANOCACHE" in os.environ: + print("Cannot test. buggy COM object?") + return + + # Initialize the driver in simulation mode. Resource descriptor is ignored. + agDrvr.Initialize("", False, False, "Simulate=true") + # Initialize driver. Edit resource descriptor for your system. + # agDrvr.Initialize("GPIB0::7::INSTR", False, False, "QueryInstrStatus=true") + + from comtypes.gen import IviScopeLib + iviDrvr = agDrvr.QueryInterface(IviScopeLib.IIviScope) + + # Get driver Identity properties. Driver initialization not required. +## print "Identifier:", iviDrvr.Identity.Identifier +## print " Revision:", agDrvr.Identity.Revision +## print "Description:", agDrvr.Identity.Description + + # Get instrument Identity properties. +## print "InstrumentModel: ", agDrvr.Identity.InstrumentModel +## print " FirmwareRevision: ", agDrvr.Identity.InstrumentFirmwareRevision +## print " SerialNumber: ", agDrvr.System.SerialNumber + + # Setup for a measurement. Reset in this case. + agDrvr.Utility.Reset() + + pMeasurement = agDrvr.Measurements.Item("UserChannel1") + # ReadWaveform() takes a sweep and reads the data. + # + # Definition generated for ReadWaveform(): + #COMMETHOD([helpstring(u'Acquires and returns a waveform on the configured channels.')], + # HRESULT, 'ReadWaveform', + # ( ['in'], Agilent546XXTimeOutEnum, 'MaxTime' ), + # ( ['in', 'out'], POINTER(_midlSAFEARRAY(c_double)), 'pWaveformArray' ), + # ( ['in', 'out'], POINTER(c_double), 'pInitialX' ), + # ( ['in', 'out'], POINTER(c_double), 'pXIncrement' )), + + # [in, out] arguments are now optional (comtypes + # constructs an empty default value when nothing is + # passed). + psaWaveform = _midlSAFEARRAY(c_double).create([]) + self._check_result(pMeasurement.ReadWaveform(20000)) + self._check_result(pMeasurement.ReadWaveform(20000, pInitialX=9.0)) + self._check_result(pMeasurement.ReadWaveform(20000, pXIncrement=9.0, pInitialX=3.0)) + self._check_result(pMeasurement.ReadWaveform(20000)) + self._check_result(pMeasurement.ReadWaveform(20000, [])) + self._check_result(pMeasurement.ReadWaveform(20000, pWaveformArray = [])) + self._check_result(pMeasurement.ReadWaveform(20000, psaWaveform)) + self._check_result(pMeasurement.ReadWaveform(20000, pXIncrement=9.0)) + + def _check_result(self, xxx_todo_changeme): + # ReadWaveform, in simulation mode, returns three values: + # + # - a safearray containing 100 random double values, + # unpacked and returned as tuple + # - the initial_x value: 0.0 + # - the x_increment value: 0.0 + (array, initial_x, x_increment) = xxx_todo_changeme + self.assertEqual(len(array), 100) + self.assertFalse([x for x in array if not isinstance(x, float)]) + self.assertEqual(initial_x, 0.0) + self.assertEqual(x_increment, 0.0) + + + +if __name__ == "__main__": + unittest.main() diff --git a/venv/Lib/site-packages/comtypes/test/test_avmc.py b/venv/Lib/site-packages/comtypes/test/test_avmc.py new file mode 100644 index 00000000..808ceafd --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/test_avmc.py @@ -0,0 +1,36 @@ +import unittest +from comtypes.client import CreateObject +from comtypes.test.find_memleak import find_memleak + +class Test(unittest.TestCase): + "Test COM records" + def test(self): + # The ATL COM dll + avmc = CreateObject("AvmcIfc.Avmc.1") + + # This returns an array (a list) of DeviceInfo records. + devs = avmc.FindAllAvmc() + + self.assertEqual(devs[0].Flags, 12) + self.assertEqual(devs[0].ID, 13) + self.assertEqual(devs[0].LocId, 14) + self.assertEqual(devs[0].Description, "Avmc") + self.assertEqual(devs[0].SerialNumber, "1234") + + self.assertEqual(devs[1].Flags, 22) + self.assertEqual(devs[1].ID, 23) + self.assertEqual(devs[1].LocId, 24) + self.assertEqual(devs[1].Description, "Avmc2") + self.assertEqual(devs[1].SerialNumber, "5678") + +## # Leaks... where? +## def doit(): +## avmc.FindAllAvmc() +## self.check_leaks(doit) + + def check_leaks(self, func, limit=0): + bytes = find_memleak(func) + self.assertFalse(bytes > limit, "Leaks %d bytes" % bytes) + +if __name__ == "__main__": + unittest.main() diff --git a/venv/Lib/site-packages/comtypes/test/test_basic.py b/venv/Lib/site-packages/comtypes/test/test_basic.py new file mode 100644 index 00000000..63392116 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/test_basic.py @@ -0,0 +1,134 @@ +##import ut +import unittest as ut +from ctypes import windll, POINTER, byref, HRESULT +from comtypes import IUnknown, STDMETHOD, GUID + +# XXX leaks references! + +def method_count(interface): + return sum([len(base.__dict__.get("_methods_", ())) + for base in interface.__mro__]) + +class BasicTest(ut.TestCase): + def test_IUnknown(self): + from comtypes import IUnknown + self.assertEqual(method_count(IUnknown), 3) + + def test_release(self): + POINTER(IUnknown)() + + def test_refcounts(self): + p = POINTER(IUnknown)() + windll.oleaut32.CreateTypeLib2(1, "blabla", byref(p)) + # initial refcount is 2 + for i in range(2, 10): + self.assertEqual(p.AddRef(), i) + for i in range(8, 0, -1): + self.assertEqual(p.Release(), i) + + def test_qi(self): + p = POINTER(IUnknown)() + windll.oleaut32.CreateTypeLib2(1, "blabla", byref(p)) + self.assertEqual(p.AddRef(), 2) + self.assertEqual(p.Release(), 1) + + other = p.QueryInterface(IUnknown) + self.assertEqual(other.AddRef(), 3) + self.assertEqual(p.AddRef(), 4) + self.assertEqual(p.Release(), 3) + self.assertEqual(other.Release(), 2) + + del p # calls p.Release() + + self.assertEqual(other.AddRef(), 2) + self.assertEqual(other.Release(), 1) + + def test_derived(self): + # XXX leaks 50 refs + self.assertEqual(method_count(IUnknown), 3) + + class IMyInterface(IUnknown): + pass + + self.assertEqual(method_count(IMyInterface), 3) + + # assigning _methods_ does not work until we have an _iid_! + self.assertRaises(AttributeError, + setattr, IMyInterface, "_methods_", []) + IMyInterface._iid_ = GUID.create_new() + IMyInterface._methods_ = [] + self.assertEqual(method_count(IMyInterface), 3) + + IMyInterface._methods_ = [ + STDMETHOD(HRESULT, "Blah", [])] + self.assertEqual(method_count(IMyInterface), 4) + + def test_heirarchy(self): + class IMyInterface(IUnknown): + pass + + self.assertTrue(issubclass(IMyInterface, IUnknown)) + self.assertTrue(issubclass(POINTER(IMyInterface), POINTER(IUnknown))) + + def test_mro(self): + mro = POINTER(IUnknown).__mro__ + + self.assertEqual(mro[0], POINTER(IUnknown)) + self.assertEqual(mro[1], IUnknown) + + # the IUnknown class has the actual methods: + self.assertTrue(IUnknown.__dict__.get("QueryInterface")) + # but we can call it on the pointer instance + POINTER(IUnknown).QueryInterface + + def test_make_methods(self): + # XXX leaks 53 refs + class IBase(IUnknown): + _iid_ = GUID.create_new() + class IDerived(IBase): + _iid_ = GUID.create_new() + + # We cannot assign _methods_ to IDerived before IBase has it's _methods_: + self.assertRaises(TypeError, lambda: setattr(IDerived, "_methods_", [])) + # Make sure that setting _methods_ failed completely. + self.assertRaises(KeyError, lambda: IDerived.__dict__["_methods_"]) + IBase._methods_ = [] + # Now it works: + IDerived._methods_ = [] + + def test_identity(self): + # COM indentity rules + + # these should be identical + a = POINTER(IUnknown)() + b = POINTER(IUnknown)() + self.assertEqual(a, b) + self.assertEqual(hash(a), hash(b)) + + from comtypes.typeinfo import CreateTypeLib + + # we do not save the lib, so no file will be created. + # these should NOT be identical + a = CreateTypeLib("blahblah") + b = CreateTypeLib("spam") + + self.assertNotEqual(a, b) + self.assertNotEqual(hash(a), hash(b)) + + a = a.QueryInterface(IUnknown) + b = b.QueryInterface(IUnknown) + + self.assertNotEqual(a, b) + self.assertNotEqual(hash(a), hash(b)) + + # These must be identical + c = a.QueryInterface(IUnknown) + self.assertEqual(a, c) + self.assertEqual(hash(a), hash(c)) + + +def main(): + ut.main() + +if __name__ == "__main__": + main() diff --git a/venv/Lib/site-packages/comtypes/test/test_casesensitivity.py b/venv/Lib/site-packages/comtypes/test/test_casesensitivity.py new file mode 100644 index 00000000..9a3e83e7 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/test_casesensitivity.py @@ -0,0 +1,38 @@ +import unittest + +from comtypes.client import GetModule +iem = GetModule("shdocvw.dll") + +class TestCase(unittest.TestCase): + def test(self): + from comtypes.client import GetModule + iem = GetModule("shdocvw.dll") + + # IDispatch(IUnknown) + # IWebBrowser(IDispatch) + # IWebBrowserApp(IWebBrowser) + # IWebBrowser2(IWebBrowserApp) + +## print iem.IWebBrowser2.mro() + + self.assertTrue(issubclass(iem.IWebBrowser2, iem.IWebBrowserApp)) + self.assertTrue(issubclass(iem.IWebBrowserApp, iem.IWebBrowser)) + +## print sorted(iem.IWebBrowser.__map_case__.keys()) +## print "=" * 42 +## print sorted(iem.IWebBrowserApp.__map_case__.keys()) +## print "=" * 42 +## print sorted(iem.IWebBrowser2.__map_case__.keys()) +## print "=" * 42 + + # names in the base class __map_case__ must also appear in the + # subclass. + for name in iem.IWebBrowser.__map_case__: + self.assertTrue(name in iem.IWebBrowserApp.__map_case__, "%s missing" % name) + self.assertTrue(name in iem.IWebBrowser2.__map_case__, "%s missing" % name) + + for name in iem.IWebBrowserApp.__map_case__: + self.assertTrue(name in iem.IWebBrowser2.__map_case__, "%s missing" % name) + +if __name__ == "__main__": + unittest.main() diff --git a/venv/Lib/site-packages/comtypes/test/test_client.py b/venv/Lib/site-packages/comtypes/test/test_client.py new file mode 100644 index 00000000..d15ad93c --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/test_client.py @@ -0,0 +1,65 @@ +import unittest as ut +import comtypes.client +from comtypes import COSERVERINFO +from ctypes import POINTER, byref + +# create the typelib wrapper and import it +comtypes.client.GetModule("scrrun.dll") +from comtypes.gen import Scripting + +import comtypes.test +comtypes.test.requires("ui") + +class Test(ut.TestCase): + def test_progid(self): + # create from ProgID + obj = comtypes.client.CreateObject("Scripting.Dictionary") + self.assertTrue(isinstance(obj, POINTER(Scripting.IDictionary))) + + def test_clsid(self): + # create from the CoClass' clsid + obj = comtypes.client.CreateObject(Scripting.Dictionary) + self.assertTrue(isinstance(obj, POINTER(Scripting.IDictionary))) + + def test_clsid_string(self): + # create from string clsid + comtypes.client.CreateObject(str(Scripting.Dictionary._reg_clsid_)) + comtypes.client.CreateObject(str(Scripting.Dictionary._reg_clsid_)) + + def test_GetModule_clsid(self): + clsid = comtypes.GUID.from_progid("MediaPlayer.MediaPlayer") + tlib = comtypes.client.GetModule(clsid) + + def test_remote(self): + ie = comtypes.client.CreateObject("InternetExplorer.Application", + machine="localhost") + self.assertEqual(ie.Visible, False) + ie.Visible = 1 + # on a remote machine, this may not work. Probably depends on + # how the server is run. + self.assertEqual(ie.Visible, True) + self.assertEqual(0, ie.Quit()) # 0 == S_OK + + def test_server_info(self): + serverinfo = COSERVERINFO() + serverinfo.pwszName = 'localhost' + pServerInfo = byref(serverinfo) + + self.assertRaises(ValueError, comtypes.client.CreateObject, + "InternetExplorer.Application", machine='localhost', + pServerInfo=pServerInfo) + ie = comtypes.client.CreateObject("InternetExplorer.Application", + pServerInfo=pServerInfo) + self.assertEqual(ie.Visible, False) + ie.Visible = 1 + # on a remote machine, this may not work. Probably depends on + # how the server is run. + self.assertEqual(ie.Visible, True) + self.assertEqual(0, ie.Quit()) # 0 == S_OK + +def test_main(): + from test import test_support + test_support.run_unittest(Test) + +if __name__ == "__main__": + ut.main() diff --git a/venv/Lib/site-packages/comtypes/test/test_collections.py b/venv/Lib/site-packages/comtypes/test/test_collections.py new file mode 100644 index 00000000..3e3b0bf2 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/test_collections.py @@ -0,0 +1,157 @@ +import unittest +from comtypes.client import CreateObject +from ctypes import ArgumentError + +from comtypes.test.find_memleak import find_memleak + + +class Test(unittest.TestCase): + + def test_IEnumVARIANT(self): + # The XP firewall manager. + fwmgr = CreateObject('HNetCfg.FwMgr') + # apps has a _NewEnum property that implements IEnumVARIANT + services = fwmgr.LocalPolicy.CurrentProfile.Services + + self.assertEqual(services.Count, len(services)) + + cv = iter(services) + + names = [p.Name for p in cv] + self.assertEqual(len(services), len(names)) + + # The iterator is consumed now: + self.assertEqual([p.Name for p in cv], []) + + # But we can reset it: + cv.Reset() + self.assertEqual([p.Name for p in cv], names) + + # Reset, then skip: + cv.Reset() + cv.Skip(3) + self.assertEqual([p.Name for p in cv], names[3:]) + + # Reset, then skip: + cv.Reset() + cv.Skip(300) + self.assertEqual([p.Name for p in cv], names[300:]) + + # Hm, do we want to allow random access to the iterator? + # Should the iterator support __getitem__ ??? + self.assertEqual(cv[0].Name, names[0]) + self.assertEqual(cv[0].Name, names[0]) + self.assertEqual(cv[0].Name, names[0]) + + if len(names) > 1: + self.assertEqual(cv[1].Name, names[1]) + self.assertEqual(cv[1].Name, names[1]) + self.assertEqual(cv[1].Name, names[1]) + + # We can now call Next(celt) with celt != 1, the call always returns a + # list: + cv.Reset() + self.assertEqual(names[:3], + [p.Name for p in cv.Next(3)]) + + # calling Next(0) makes no sense, but should work anyway: + self.assertEqual(cv.Next(0), []) + + cv.Reset() + self.assertEqual(len(cv.Next(len(names) * 2)), len(names)) + + # slicing is not (yet?) supported + cv.Reset() + self.assertRaises(ArgumentError, lambda: cv[:]) + + def test_leaks_1(self): + # The XP firewall manager. + fwmgr = CreateObject('HNetCfg.FwMgr') + # apps has a _NewEnum property that implements IEnumVARIANT + apps = fwmgr.LocalPolicy.CurrentProfile.AuthorizedApplications + + def doit(): + for item in iter(apps): + item.ProcessImageFileName + bytes = find_memleak(doit, (20, 20)) + self.assertFalse(bytes, "Leaks %d bytes" % bytes) + + def test_leaks_2(self): + # The XP firewall manager. + fwmgr = CreateObject('HNetCfg.FwMgr') + # apps has a _NewEnum property that implements IEnumVARIANT + apps = fwmgr.LocalPolicy.CurrentProfile.AuthorizedApplications + + def doit(): + iter(apps).Next(99) + bytes = find_memleak(doit, (20, 20)) + self.assertFalse(bytes, "Leaks %d bytes" % bytes) + + def test_leaks_3(self): + # The XP firewall manager. + fwmgr = CreateObject('HNetCfg.FwMgr') + # apps has a _NewEnum property that implements IEnumVARIANT + apps = fwmgr.LocalPolicy.CurrentProfile.AuthorizedApplications + + def doit(): + for i in range(2): + for what in iter(apps): + pass + bytes = find_memleak(doit, (20, 20)) + self.assertFalse(bytes, "Leaks %d bytes" % bytes) + + +class TestCollectionInterface(unittest.TestCase): + """ Test the early-bound collection interface. """ + + def setUp(self): + self.d = CreateObject("Scripting.Dictionary", dynamic=False) + + def tearDown(self): + del self.d + + def assertAccessInterface(self, d): + """ Asserts access via indexing and named property """ + self.assertEqual(d.CompareMode, 42) + self.assertEqual(d["foo"], 1) + self.assertEqual(d.Item["foo"], d["foo"]) + self.assertEqual(d.Item("foo"), d["foo"]) + self.assertEqual(d["bar"], "spam foo") + self.assertEqual(d.Item("bar"), "spam foo") + self.assertEqual(d["baz"], 3.14) + self.assertEqual(d.Item("baz"), d["baz"]) + self.assertIsNone(d["asdlfkj"]) + self.assertIsNone(d.Item["asdlfkj"]) + self.assertIsNone(d.Item("asdlfkj")) + + items = iter(d) + self.assertEqual(items[0], "foo") + self.assertEqual(items[1], "bar") + self.assertEqual(items[2], "baz") + self.assertEqual(items[3], "asdlfkj") + + def test_index_setter(self): + d = self.d + d.CompareMode = 42 + d["foo"] = 1 + d["bar"] = "spam foo" + d["baz"] = 3.14 + self.assertAccessInterface(d) + + def test_named_property_setter(self): + d = self.d + d.CompareMode = 42 + d.Item["foo"] = 1 + d.Item["bar"] = "spam foo" + d.Item["baz"] = 3.14 + self.assertAccessInterface(d) + + def test_named_property_no_length(self): + self.assertRaises(TypeError, len, self.d.Item) + + def test_named_property_not_iterable(self): + self.assertRaises(TypeError, list, self.d.Item) + + +if __name__ == "__main__": + unittest.main() diff --git a/venv/Lib/site-packages/comtypes/test/test_comserver.py b/venv/Lib/site-packages/comtypes/test/test_comserver.py new file mode 100644 index 00000000..22ee7d9c --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/test_comserver.py @@ -0,0 +1,208 @@ +import unittest, sys +from ctypes import * +from ctypes.wintypes import * +from comtypes.client import CreateObject, GetEvents, ShowEvents +from comtypes.server.register import register#, unregister +from comtypes.test import is_resource_enabled +from comtypes.test.find_memleak import find_memleak + +################################################################ +import comtypes.test.TestComServer +register(comtypes.test.TestComServer.TestComServer) + +class TestInproc(unittest.TestCase): + + def create_object(self): + return CreateObject("TestComServerLib.TestComServer", + clsctx = comtypes.CLSCTX_INPROC_SERVER) + + def _find_memleak(self, func): + bytes = find_memleak(func) + self.assertFalse(bytes, "Leaks %d bytes" % bytes) + + def test_mixedinout(self): + o = self.create_object() + self.assertEqual(o.MixedInOut(2, 4), (3, 5)) + + def test_getname(self): + from ctypes import byref, pointer + from comtypes import BSTR + + # This tests a tricky bug, introduced with this patch: + # http://www.python.org/sf/1643874 + # + # Returning a BSTR as an [out] parameter from a server + # implementation must transfer the ownership to the caller. + # When this is not done, the BSTR instance is SysFreeString'd + # too early, and the memory is reused. + obj = self.create_object() + pb = pointer(BSTR()) + # Get the BSTR from the server: + obj._ITestComServer__com__get_name(pb) + # Retrieve the value, but keep the pointer to the BSTR alive: + name = pb[0] + # Create sme BSTR's to reuse the memory in case it has been freed: + for i in range(10): + BSTR("f" * len(name)) + # Make sure the pointer is still valid: + self.assertEqual(pb[0], name) + + if is_resource_enabled("memleaks"): + def test_get_id(self): + obj = self.create_object() + self._find_memleak(lambda: obj.id) + + def test_get_name(self): + obj = self.create_object() + self._find_memleak(lambda: obj.name) + + def test_set_name(self): + obj = self.create_object() + def func(): + obj.name = "abcde" + self._find_memleak(func) + + def test_SetName(self): + obj = self.create_object() + def func(): + obj.SetName("abcde") + self._find_memleak(func) + + + def test_eval(self): + obj = self.create_object() + def func(): + return obj.eval("(1, 2, 3)") + self.assertEqual(func(), (1, 2, 3)) + self._find_memleak(func) + + def test_get_typeinfo(self): + obj = self.create_object() + def func(): + obj.GetTypeInfo(0) + obj.GetTypeInfoCount() + obj.QueryInterface(comtypes.IUnknown) + self._find_memleak(func) + +if is_resource_enabled("ui"): + class TestLocalServer(TestInproc): + def create_object(self): + return CreateObject("TestComServerLib.TestComServer", + clsctx = comtypes.CLSCTX_LOCAL_SERVER) + +try: + from win32com.client import Dispatch +except ImportError: + pass +else: + class TestInproc_win32com(TestInproc): + def create_object(self): + return Dispatch("TestComServerLib.TestComServer") + + # These tests make no sense with win32com, override to disable them: + def test_get_typeinfo(self): + pass + + def test_getname(self): + pass + + def test_mixedinout(self): + # Not sure about this; it raise 'Invalid Number of parameters' + # Is mixed [in], [out] args not compatible with IDispatch??? + pass + + if is_resource_enabled("ui"): + class TestLocalServer_win32com(TestInproc_win32com): + def create_object(self): + return Dispatch("TestComServerLib.TestComServer", clsctx = comtypes.CLSCTX_LOCAL_SERVER) + +import doctest +import comtypes.test.test_comserver + + +class TestCase(unittest.TestCase): + def test(self): + doctest.testmod(comtypes.test.test_comserver, optionflags=doctest.ELLIPSIS) + + # The following functions are never called, they only contain doctests: + + if sys.version_info >= (3, 0): + def ShowEvents(self): + ''' + >>> from comtypes.client import CreateObject, ShowEvents + >>> + >>> o = CreateObject("TestComServerLib.TestComServer") + >>> con = ShowEvents(o) + # event found: ITestComServerEvents_EvalStarted + # event found: ITestComServerEvents_EvalCompleted + >>> result = o.eval("10. / 4") + Event ITestComServerEvents_EvalStarted(None, '10. / 4') + Event ITestComServerEvents_EvalCompleted(None, '10. / 4', VARIANT(vt=0x5, 2.5)) + >>> result + 2.5 + >>> + ''' + else: + def ShowEvents(self): + ''' + >>> from comtypes.client import CreateObject, ShowEvents + >>> + >>> o = CreateObject("TestComServerLib.TestComServer") + >>> con = ShowEvents(o) + # event found: ITestComServerEvents_EvalStarted + # event found: ITestComServerEvents_EvalCompleted + >>> result = o.eval("10. / 4") + Event ITestComServerEvents_EvalStarted(None, u'10. / 4') + Event ITestComServerEvents_EvalCompleted(None, u'10. / 4', VARIANT(vt=0x5, 2.5)) + >>> result + 2.5 + >>> + ''' + + # The following test, if enabled, works but the testsuit + # crashes elsewhere. Is there s problem with SAFEARRAYs? + + if is_resource_enabled("CRASHES"): + def Fails(self): + ''' + >>> from comtypes.client import CreateObject, ShowEvents + >>> + >>> o = CreateObject("TestComServerLib.TestComServer") + >>> con = ShowEvents(o) + # event found: ITestComServerEvents_EvalStarted + # event found: ITestComServerEvents_EvalCompleted + >>> result = o.eval("['32'] * 2") + Event ITestComServerEvents_EvalStarted(None, u"['32'] * 2") + Event ITestComServerEvents_EvalCompleted(None, u"['32'] * 2", VARIANT(vt=0x200c, (u'32', u'32'))) + >>> result + (u'32', u'32') + >>> + ''' + + def GetEvents(): + """ + >>> from comtypes.client import CreateObject, GetEvents + >>> + >>> o = CreateObject("TestComServerLib.TestComServer") + >>> class EventHandler(object): + ... def EvalStarted(self, this, what): + ... print("EvalStarted: %s" % what) + ... return 0 + ... def EvalCompleted(self, this, what, result): + ... print("EvalCompleted: %s = %s" % (what, result.value)) + ... return 0 + ... + >>> + >>> con = GetEvents(o, EventHandler()) + >>> o.eval("2 + 3") + EvalStarted: 2 + 3 + EvalCompleted: 2 + 3 = 5 + 5 + >>> del con + >>> o.eval("3 + 2") + 5 + >>> + """ + +if __name__ == "__main__": + unittest.main() diff --git a/venv/Lib/site-packages/comtypes/test/test_createwrappers.py b/venv/Lib/site-packages/comtypes/test/test_createwrappers.py new file mode 100644 index 00000000..c09a4742 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/test_createwrappers.py @@ -0,0 +1,118 @@ +import glob +import os +import unittest +import warnings +import comtypes.typeinfo +import comtypes.client +import comtypes.client._generate +from comtypes.test import requires + +requires("typelibs") + +# filter warnings about interfaces without a base interface; they will +# be skipped in the code generation. +warnings.filterwarnings("ignore", + "Ignoring interface .* which has no base interface", + UserWarning) + +# don't print messages when typelib wrappers are generated +comtypes.client._generate.__verbose__ = False + +sysdir = os.path.join(os.environ["SystemRoot"], "system32") + +progdir = os.environ["ProgramFiles"] +common_progdir = os.environ["CommonProgramFiles"] + +# This test takes quite some time. It tries to build wrappers for ALL +# .dll, .tlb, and .ocx files in the system directory which contain typelibs. + +class Test(unittest.TestCase): + def setUp(self): + "Do not write the generated files into the comtypes.gen directory" + comtypes.client.gen_dir = None + + def tearDown(self): + comtypes.client.gen_dir = comtypes.client._find_gen_dir() + +number = 0 + +def add_test(fname): + global number + def test(self): + try: + comtypes.typeinfo.LoadTypeLibEx(fname) + except WindowsError: + return + comtypes.client.GetModule(fname) + + test.__doc__ = "test GetModule(%r)" % fname + setattr(Test, "test_%d" % number, test) + number += 1 + +for fname in glob.glob(os.path.join(sysdir, "*.ocx")): + add_test(fname) + +for fname in glob.glob(os.path.join(sysdir, "*.tlb")): + add_test(fname) + +for fname in glob.glob(os.path.join(progdir, r"Microsoft Office\Office*\*.tlb")): + if os.path.basename(fname).lower() in ( + "grde50.olb", # UnicodeEncodeError + "xl5de32.olb", # UnicodeEncodeError + "grde50.olb", # UnicodeEncodeError + ): + continue + add_test(fname) + +for fname in glob.glob(os.path.join(progdir, r"Microsoft Office\Office*\*.olb")): + if os.path.basename(fname).lower() in ( + "grde50.olb", # UnicodeEncodeError + "xl5de32.olb", # UnicodeEncodeError + "grde50.olb", # UnicodeEncodeError + ): + continue + add_test(fname) + +path = os.path.join(progdir, r"Microsoft Visual Studio .NET 2003\Visual Studio SDKs\DIA SDK\bin\msdia71.dll") +if os.path.isfile(path): + print("ADD", path) + add_test(path) + +for fname in glob.glob(os.path.join(common_progdir, r"Microsoft Shared\Speech\*.dll")): + add_test(fname) + +for fname in glob.glob(os.path.join(sysdir, "*.dll")): + # these typelibs give errors: + if os.path.basename(fname).lower() in ( + "syncom.dll", # interfaces without base interface + "msvidctl.dll", # assignment to None + "scardssp.dll", # assertionerror sizeof() + "sccsccp.dll", # assertionerror sizeof() + + # Typeinfo in comsvcs.dll in XP 64-bit SP 1 is broken. + # Oleview decompiles this code snippet (^ marks are m): + #[ + # odl, + # uuid(C7B67079-8255-42C6-9EC0-6994A3548780) + #] + #interface IAppDomainHelper : IDispatch { + # HRESULT _stdcall pfnShutdownCB(void* pv); + # HRESULT _stdcall Initialize( + # [in] IUnknown* pUnkAD, + # [in] IAppDomainHelper __MIDL_0028, + # ^^^^^^^^^^^^^^^^ + # [in] void* pPool); + # HRESULT _stdcall pfnCallbackCB(void* pv); + # HRESULT _stdcall DoCallback( + # [in] IUnknown* pUnkAD, + # [in] IAppDomainHelper __MIDL_0029, + # ^^^^^^^^^^^^^^^^ + # [in] void* pPool); + #}; + "comsvcs.dll", + ): + continue + add_test(fname) + +if __name__ == "__main__": + unittest.main() diff --git a/venv/Lib/site-packages/comtypes/test/test_dict.py b/venv/Lib/site-packages/comtypes/test/test_dict.py new file mode 100644 index 00000000..628796f3 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/test_dict.py @@ -0,0 +1,103 @@ +"""Use Scripting.Dictionary to test the lazybind module.""" + +import unittest +from comtypes import COMError +from comtypes.client import CreateObject +from comtypes.client.lazybind import Dispatch +from comtypes.automation import VARIANT + +class Test(unittest.TestCase): + def test_dict(self): + d = CreateObject("Scripting.Dictionary", dynamic=True) + self.assertEqual(type(d), Dispatch) + + # Count is a normal propget, no propput + self.assertEqual(d.Count, 0) + self.assertRaises(AttributeError, lambda: setattr(d, "Count", -1)) + + # HashVal is a 'named' propget, no propput + ##d.HashVal + + # Add(Key, Item) -> None + self.assertEqual(d.Add("one", 1), None) + self.assertEqual(d.Count, 1) + + # RemoveAll() -> None + self.assertEqual(d.RemoveAll(), None) + self.assertEqual(d.Count, 0) + + # CompareMode: propget, propput + # (Can only be set when dict is empty!) + self.assertEqual(d.CompareMode, 0) + d.CompareMode = 1 + self.assertEqual(d.CompareMode, 1) + d.CompareMode = 0 + + # Exists(key) -> bool + self.assertEqual(d.Exists(42), False) + d.Add(42, "foo") + self.assertEqual(d.Exists(42), True) + + # Keys() -> array + # Items() -> array + self.assertEqual(d.Keys(), (42,)) + self.assertEqual(d.Items(), ("foo",)) + d.Remove(42) + self.assertEqual(d.Exists(42), False) + self.assertEqual(d.Keys(), ()) + self.assertEqual(d.Items(), ()) + + # Item[key] : propget + d.Add(42, "foo") + self.assertEqual(d.Item[42], "foo") + + d.Add("spam", "bar") + self.assertEqual(d.Item["spam"], "bar") + + # Item[key] = value: propput, propputref + d.Item["key"] = "value" + self.assertEqual(d.Item["key"], "value") + d.Item[42] = 73, 48 + self.assertEqual(d.Item[42], (73, 48)) + + ################################################################ + # part 2, testing propput and propputref + + s = CreateObject("Scripting.Dictionary", dynamic=True) + s.CompareMode = 42 + + # This calls propputref, since we assign an Object + d.Item["object"] = s + # This calls propput, since we assing a Value + d.Item["value"] = s.CompareMode + + a = d.Item["object"] + + self.assertEqual(d.Item["object"], s) + self.assertEqual(d.Item["object"].CompareMode, 42) + self.assertEqual(d.Item["value"], 42) + + # Changing a property of the object + s.CompareMode = 5 + self.assertEqual(d.Item["object"], s) + self.assertEqual(d.Item["object"].CompareMode, 5) + self.assertEqual(d.Item["value"], 42) + + # This also calls propputref since we assign an Object + d.Item["var"] = VARIANT(s) + self.assertEqual(d.Item["var"], s) + + # iter(d) + keys = [x for x in d] + self.assertEqual(d.Keys(), + tuple([x for x in d])) + + # d[key] = value + # d[key] -> value + d["blah"] = "blarg" + self.assertEqual(d["blah"], "blarg") + # d(key) -> value + self.assertEqual(d("blah"), "blarg") + +if __name__ == "__main__": + unittest.main() diff --git a/venv/Lib/site-packages/comtypes/test/test_dispinterface.py b/venv/Lib/site-packages/comtypes/test/test_dispinterface.py new file mode 100644 index 00000000..7ce2ee20 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/test_dispinterface.py @@ -0,0 +1,118 @@ +import unittest + +from comtypes.server.register import register#, unregister +from comtypes.test import is_resource_enabled + +################################################################ +import comtypes.test.TestDispServer +register(comtypes.test.TestDispServer.TestDispServer) + +class Test(unittest.TestCase): + + if is_resource_enabled("pythoncom"): + def test_win32com(self): + # EnsureDispatch is case-sensitive + from win32com.client.gencache import EnsureDispatch + d = EnsureDispatch("TestDispServerLib.TestDispServer") + + self.assertEqual(d.eval("3.14"), 3.14) + self.assertEqual(d.eval("1 + 2"), 3) + self.assertEqual(d.eval("[1 + 2, 'foo', None]"), (3, 'foo', None)) + + self.assertEqual(d.eval2("3.14"), 3.14) + self.assertEqual(d.eval2("1 + 2"), 3) + self.assertEqual(d.eval2("[1 + 2, 'foo', None]"), (3, 'foo', None)) + + d.eval("__import__('comtypes.client').client.CreateObject('Scripting.Dictionary')") + + server_id = d.eval("id(self)") + self.assertEqual(d.id, server_id) + + self.assertEqual(d.name, "spam, spam, spam") + + d.SetName("foo bar") + self.assertEqual(d.name, "foo bar") + + d.name = "blah" + self.assertEqual(d.name, "blah") + + def test_win32com_dyndispatch(self): + # dynamic Dispatch is case-IN-sensitive + from win32com.client.dynamic import Dispatch + d = Dispatch("TestDispServerLib.TestDispServer") + + self.assertEqual(d.eval("3.14"), 3.14) + self.assertEqual(d.eval("1 + 2"), 3) + self.assertEqual(d.eval("[1 + 2, 'foo', None]"), (3, 'foo', None)) + + self.assertEqual(d.eval2("3.14"), 3.14) + self.assertEqual(d.eval2("1 + 2"), 3) + self.assertEqual(d.eval2("[1 + 2, 'foo', None]"), (3, 'foo', None)) + + d.eval("__import__('comtypes.client').client.CreateObject('Scripting.Dictionary')") + + self.assertEqual(d.EVAL("3.14"), 3.14) + self.assertEqual(d.EVAL("1 + 2"), 3) + self.assertEqual(d.EVAL("[1 + 2, 'foo', None]"), (3, 'foo', None)) + + self.assertEqual(d.EVAL2("3.14"), 3.14) + self.assertEqual(d.EVAL2("1 + 2"), 3) + self.assertEqual(d.EVAL2("[1 + 2, 'foo', None]"), (3, 'foo', None)) + + server_id = d.eval("id(self)") + self.assertEqual(d.id, server_id) + self.assertEqual(d.ID, server_id) + + self.assertEqual(d.Name, "spam, spam, spam") + self.assertEqual(d.nAME, "spam, spam, spam") + + d.SetName("foo bar") + self.assertEqual(d.Name, "foo bar") + + # fails. Why? +## d.name = "blah" +## self.assertEqual(d.Name, "blah") + + def test_comtypes(self): + from comtypes.client import CreateObject + d = CreateObject("TestDispServerLib.TestDispServer") + + self.assertEqual(d.eval("3.14"), 3.14) + self.assertEqual(d.eval("1 + 2"), 3) + self.assertEqual(d.eval("[1 + 2, 'foo', None]"), (3, 'foo', None)) + + self.assertEqual(d.eval2("3.14"), 3.14) + self.assertEqual(d.eval2("1 + 2"), 3) + self.assertEqual(d.eval2("[1 + 2, 'foo', None]"), (3, 'foo', None)) + + d.eval("__import__('comtypes.client').client.CreateObject('Scripting.Dictionary')") + + self.assertEqual(d.EVAL("3.14"), 3.14) + self.assertEqual(d.EVAL("1 + 2"), 3) + self.assertEqual(d.EVAL("[1 + 2, 'foo', None]"), (3, 'foo', None)) + + self.assertEqual(d.EVAL2("3.14"), 3.14) + self.assertEqual(d.EVAL2("1 + 2"), 3) + self.assertEqual(d.EVAL2("[1 + 2, 'foo', None]"), (3, 'foo', None)) + + server_id = d.eval("id(self)") + self.assertEqual(d.id, server_id) + self.assertEqual(d.ID, server_id) + + self.assertEqual(d.Name, "spam, spam, spam") + self.assertEqual(d.nAME, "spam, spam, spam") + + d.SetName("foo bar") + self.assertEqual(d.Name, "foo bar") + + d.name = "blah" + self.assertEqual(d.Name, "blah") + + def test_withjscript(self): + import os + jscript = os.path.join(os.path.dirname(__file__), "test_jscript.js") + errcode = os.system("cscript -nologo %s" % jscript) + self.assertEqual(errcode, 0) + +if __name__ == "__main__": + unittest.main() diff --git a/venv/Lib/site-packages/comtypes/test/test_dyndispatch.py b/venv/Lib/site-packages/comtypes/test/test_dyndispatch.py new file mode 100644 index 00000000..af502c95 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/test_dyndispatch.py @@ -0,0 +1,96 @@ +import unittest +from comtypes.automation import IDispatch +from comtypes.client import CreateObject, GetModule +from comtypes.client.lazybind import Dispatch + +# create the typelib wrapper and import it +GetModule("scrrun.dll") +from comtypes.gen.Scripting import IDictionary + + +class Test(unittest.TestCase): + + def setUp(self): + self.d = CreateObject("Scripting.Dictionary", dynamic=True) + + def tearDown(self): + del self.d + + def test_type(self): + self.assertTrue(isinstance(self.d, Dispatch)) + + def test_index_setter(self): + d = self.d + d.CompareMode = 42 + d["foo"] = 1 + d["bar"] = "spam foo" + d["baz"] = 3.14 + self.assertAccessInterface(d) + + def test_named_property_setter(self): + d = self.d + d.CompareMode = 42 + d.Item["foo"] = 1 + d.Item["bar"] = "spam foo" + d.Item["baz"] = 3.14 + self.assertAccessInterface(d) + + def test_reference_passing(self): + d = self.d + + # Check reference passing + d["self"] = d + d[0] = "something nontrivial" + dself = d["self"] + dself[1] = "something else nontrivial" + self.assertEqual(d, dself) + self.assertEqual(d[0], "something nontrivial") + self.assertEqual(dself[0], d[0]) + self.assertEqual(d[1], "something else nontrivial") + self.assertEqual(dself[1], d[1]) + + def test_query_interface(self): + d = self.d + d.CompareMode = 42 + d.Item["foo"] = 1 + d.Item["bar"] = "spam foo" + d.Item["baz"] = 3.14 + + # This should cast the underlying com object to an IDispatch + d2 = d.QueryInterface(IDispatch) + # Which can be cast to the non-dynamic type + d3 = d2.QueryInterface(IDictionary) + self.assertEqual(d3.CompareMode, 42) + self.assertEqual(d3.Item["foo"], 1) + self.assertEqual(d3.Item["bar"], "spam foo") + self.assertEqual(d3.Item["baz"], 3.14) + + def test_named_property_no_length(self): + self.assertRaises(TypeError, len, self.d.Item) + + def test_named_property_not_iterable(self): + self.assertRaises(TypeError, list, self.d.Item) + + def assertAccessInterface(self, d): + """ Asserts access via indexing and named property """ + self.assertEqual(d.CompareMode, 42) + self.assertEqual(d["foo"], 1) + self.assertEqual(d.Item["foo"], d["foo"]) + self.assertEqual(d.Item("foo"), d["foo"]) + self.assertEqual(d["bar"], "spam foo") + self.assertEqual(d.Item("bar"), "spam foo") + self.assertEqual(d["baz"], 3.14) + self.assertEqual(d.Item("baz"), d["baz"]) + self.assertIsNone(d["asdlfkj"]) + self.assertIsNone(d.Item["asdlfkj"]) + self.assertIsNone(d.Item("asdlfkj")) + + items = iter(d) + self.assertEqual(items[0], "foo") + self.assertEqual(items[1], "bar") + self.assertEqual(items[2], "baz") + self.assertEqual(items[3], "asdlfkj") + + +if __name__ == "__main__": + unittest.main() diff --git a/venv/Lib/site-packages/comtypes/test/test_excel.py b/venv/Lib/site-packages/comtypes/test/test_excel.py new file mode 100644 index 00000000..ebff5b8e --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/test_excel.py @@ -0,0 +1,114 @@ +# -*- coding: latin-1 -*- +import unittest + +import comtypes.test +comtypes.test.requires("ui") + +import datetime + +from comtypes.client import CreateObject + +xlRangeValueDefault = 10 +xlRangeValueXMLSpreadsheet = 11 +xlRangeValueMSPersistXML = 12 + +class Test(unittest.TestCase): + + def test_earlybound(self): + self._doit(False) + + def test_latebound(self): + self._doit(True) + + def _doit(self, dynamic): + self.xl = CreateObject("Excel.Application", dynamic=dynamic) + + xl = self.xl + xl.Visible = 0 + self.assertEqual(xl.Visible, False) + xl.Visible = 1 + self.assertEqual(xl.Visible, True) + + wb = xl.Workbooks.Add() + + # Test with empty-tuple argument + xl.Range["A1", "C1"].Value[()] = (10,"20",31.4) + xl.Range["A2:C2"].Value[()] = ('x','y','z') + # Test with empty slice argument + xl.Range["A3:C3"].Value[:] = ('3','2','1') +## not (yet?) implemented: +## xl.Range["A4:C4"].Value = ('3','2','1') + + # call property to retrieve value + self.assertEqual(xl.Range["A1:C3"].Value(), + ((10.0, 20.0, 31.4), + ("x", "y", "z"), + (3.0, 2.0, 1.0))) + # index with empty tuple + self.assertEqual(xl.Range["A1:C3"].Value[()], + ((10.0, 20.0, 31.4), + ("x", "y", "z"), + (3.0, 2.0, 1.0))) + # index with empty slice + self.assertEqual(xl.Range["A1:C3"].Value[:], + ((10.0, 20.0, 31.4), + ("x", "y", "z"), + (3.0, 2.0, 1.0))) + self.assertEqual(xl.Range["A1:C3"].Value[xlRangeValueDefault], + ((10.0, 20.0, 31.4), + ("x", "y", "z"), + (3.0, 2.0, 1.0))) + self.assertEqual(xl.Range["A1", "C3"].Value[()], + ((10.0, 20.0, 31.4), + ("x", "y", "z"), + (3.0, 2.0, 1.0))) + + r = xl.Range["A1:C3"] + i = iter(r) + + # Test for iteration support in 'Range' interface + self.assertEqual([c.Value() for c in xl.Range["A1:C3"]], + [10.0, 20.0, 31.4, + "x", "y", "z", + 3.0, 2.0, 1.0]) + + # With pywin32, one could write xl.Cells(a, b) + # With comtypes, one must write xl.Cells.Item(1, b) + + for i in range(20): + xl.Cells.Item[i+1,i+1].Value[()] = "Hi %d" % i + print(xl.Cells.Item[i+1, i+1].Value[()]) + + for i in range(20): + xl.Cells(i+1,i+1).Value[()] = "Hi %d" % i + print(xl.Cells(i+1, i+1).Value[()]) + + # test dates out with Excel + xl.Range["A5"].Value[()] = "Excel time" + xl.Range["B5"].Formula = "=Now()" + self.assertEqual(xl.Cells.Item[5,2].Formula, "=NOW()") + + xl.Range["A6"].Calculate() + excel_time = xl.Range["B5"].Value[()] + self.assertEqual(type(excel_time), datetime.datetime) + python_time = datetime.datetime.now() + + self.assertTrue(python_time >= excel_time) + self.assertTrue(python_time - excel_time < datetime.timedelta(seconds=1)) + + # some random code, grabbed from c.l.p + sh = wb.Worksheets[1] + + sh.Cells.Item[1,1].Value[()] = "Hello World!" + sh.Cells.Item[3,3].Value[()] = "Hello World!" + sh.Range[sh.Cells.Item[1,1],sh.Cells.Item[3,3]].Copy(sh.Cells.Item[4,1]) + sh.Range[sh.Cells.Item[4,1],sh.Cells.Item[6,3]].Select() + + def tearDown(self): + # Close all open workbooks without saving, then quit excel. + for wb in self.xl.Workbooks: + wb.Close(0) + self.xl.Quit() + +if __name__ == "__main__": + unittest.main() diff --git a/venv/Lib/site-packages/comtypes/test/test_findgendir.py b/venv/Lib/site-packages/comtypes/test/test_findgendir.py new file mode 100644 index 00000000..5b148680 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/test_findgendir.py @@ -0,0 +1,82 @@ +import types, os, unittest, sys, tempfile +import imp + +if sys.version_info >= (2, 6): + from imp import reload + +import comtypes +import comtypes.client +import comtypes.gen + +from comtypes.client._code_cache import _get_appdata_dir + +imgbase = os.path.splitext(os.path.basename(sys.executable))[0] + +class Test(unittest.TestCase): + """Test the comtypes.client._find_gen_dir() function in several + simulated environments. + """ + def setUp(self): + # save the original comtypes.gen modules and create a + # substitute with an empty __path__. + self.orig_comtypesgen = sys.modules["comtypes.gen"] + del sys.modules["comtypes.gen"] + del comtypes.gen + mod = sys.modules["comtypes.gen"] = types.ModuleType("comtypes.gen") + mod.__path__ = [] + comtypes.gen = mod + + def tearDown(self): + # Delete py2exe-attributes that we have attached to the sys module + for name in "frozen frozendllhandle".split(): + try: + delattr(sys, name) + except AttributeError: + pass + # restore the original comtypes.gen module + comtypes.gen = self.orig_comtypesgen + sys.modules["comtypes.gen"] = self.orig_comtypesgen + imp.reload(comtypes.gen) + + def test_script(self): + # %APPDATA%\Python\Python25\comtypes_cache + template = r"$APPDATA\Python\Python%d%d\comtypes_cache" + path = os.path.expandvars(template % sys.version_info[:2]) + gen_dir = comtypes.client._find_gen_dir() + self.assertEqual(path, gen_dir) + + def test_frozen_dll(self): + sys.frozen = "dll" + sys.frozendllhandle = sys.dllhandle + ma, mi = sys.version_info[:2] + # %TEMP%\comtypes_cache\<imagebasename>-25 + # the image is python25.dll + path = os.path.join(tempfile.gettempdir(), + r"comtypes_cache\%s%d%d-%d%d" % (imgbase, ma, mi, ma, mi)) + gen_dir = comtypes.client._find_gen_dir() + self.assertEqual(path, gen_dir) + + def test_frozen_console_exe(self): + sys.frozen = "console_exe" + # %TEMP%\comtypes_cache\<imagebasename>-25 + path = os.path.join(tempfile.gettempdir(), + r"comtypes_cache\%s-%d%d" % ( + imgbase, sys.version_info[0], sys.version_info[1])) + gen_dir = comtypes.client._find_gen_dir() + self.assertEqual(path, gen_dir) + + def test_frozen_windows_exe(self): + sys.frozen = "windows_exe" + # %TEMP%\comtypes_cache\<imagebasename>-25 + path = os.path.join(tempfile.gettempdir(), + r"comtypes_cache\%s-%d%d" % ( + imgbase, sys.version_info[0], sys.version_info[1])) + gen_dir = comtypes.client._find_gen_dir() + self.assertEqual(path, gen_dir) + + +def main(): + unittest.main() + +if __name__ == "__main__": + main() diff --git a/venv/Lib/site-packages/comtypes/test/test_getactiveobj.py b/venv/Lib/site-packages/comtypes/test/test_getactiveobj.py new file mode 100644 index 00000000..d8ec7a2e --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/test_getactiveobj.py @@ -0,0 +1,52 @@ +import unittest + +import comtypes +import comtypes.client + +import comtypes.test +comtypes.test.requires("ui") + +class Test(unittest.TestCase): + def tearDown(self): + if hasattr(self, "w1"): + self.w1.Quit() + del self.w1 + + + def test(self): + try: + comtypes.client.GetActiveObject("Word.Application") + except WindowsError: + pass + else: + # seems word is running, we cannot test this. + self.fail("MSWord is running, cannot test") + + # create a WORD instance + self.w1 = w1 = comtypes.client.CreateObject("Word.Application") + # connect to the running instance + w2 = comtypes.client.GetActiveObject("Word.Application") + + # check if they are referring to the same object + self.assertEqual(w1.QueryInterface(comtypes.IUnknown), + w2.QueryInterface(comtypes.IUnknown)) + + w1.Quit() + del self.w1 + + import time + time.sleep(1) + + try: + w2.Visible + except comtypes.COMError as err: + variables = err.hresult, err.text, err.details + self.assertEqual(variables, err[:]) + else: + raise AssertionError("COMError not raised") + + self.assertRaises(WindowsError, comtypes.client.GetActiveObject, "Word.Application") + + +if __name__ == "__main__": + unittest.main() diff --git a/venv/Lib/site-packages/comtypes/test/test_ie.py b/venv/Lib/site-packages/comtypes/test/test_ie.py new file mode 100644 index 00000000..4213ccef --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/test_ie.py @@ -0,0 +1,107 @@ +import unittest as ut +from ctypes import * +from comtypes.client import CreateObject, GetEvents + +import comtypes.test +comtypes.test.requires("ui") + +class EventSink: + def __init__(self): + self._events = [] + + # some DWebBrowserEvents + def OnVisible(self, this, *args): +## print "OnVisible", args + self._events.append("OnVisible") + + def BeforeNavigate(self, this, *args): +## print "BeforeNavigate", args + self._events.append("BeforeNavigate") + + def NavigateComplete(self, this, *args): +## print "NavigateComplete", args + self._events.append("NavigateComplete") + + # some DWebBrowserEvents2 + def BeforeNavigate2(self, this, *args): +## print "BeforeNavigate2", args + self._events.append("BeforeNavigate2") + + def NavigateComplete2(self, this, *args): +## print "NavigateComplete2", args + self._events.append("NavigateComplete2") + + def DocumentComplete(self, this, *args): +## print "DocumentComplete", args + self._events.append("DocumentComplete") + + +class POINT(Structure): + _fields_ = [("x", c_long), + ("y", c_long)] + +class MSG(Structure): + _fields_ = [("hWnd", c_ulong), + ("message", c_uint), + ("wParam", c_ulong), + ("lParam", c_ulong), + ("time", c_ulong), + ("pt", POINT)] + +def PumpWaitingMessages(): + from ctypes import windll, byref + user32 = windll.user32 + msg = MSG() + PM_REMOVE = 0x0001 + while user32.PeekMessageA(byref(msg), 0, 0, 0, PM_REMOVE): + user32.TranslateMessage(byref(msg)) + user32.DispatchMessageA(byref(msg)) + +class Test(ut.TestCase): + + def tearDown(self): + import gc + gc.collect() + import time + time.sleep(2) + + def test_default_eventinterface(self): + sink = EventSink() + ie = CreateObject("InternetExplorer.Application") + conn = GetEvents(ie, sink=sink) + ie.Visible = True + ie.Navigate2(URL="http://docs.python.org/", Flags=0) + import time + for i in range(50): + PumpWaitingMessages() + time.sleep(0.1) + ie.Visible = False + ie.Quit() + + self.assertEqual(sink._events, ['OnVisible', 'BeforeNavigate2', + 'NavigateComplete2', 'DocumentComplete', + 'OnVisible']) + + del ie + del conn + + def test_nondefault_eventinterface(self): + sink = EventSink() + ie = CreateObject("InternetExplorer.Application") + import comtypes.gen.SHDocVw as mod + conn = GetEvents(ie, sink, interface=mod.DWebBrowserEvents) + + ie.Visible = True + ie.Navigate2(Flags=0, URL="http://docs.python.org/") + import time + for i in range(50): + PumpWaitingMessages() + time.sleep(0.1) + ie.Visible = False + ie.Quit() + + self.assertEqual(sink._events, ['BeforeNavigate', 'NavigateComplete']) + del ie + +if __name__ == "__main__": + ut.main() diff --git a/venv/Lib/site-packages/comtypes/test/test_jscript.js b/venv/Lib/site-packages/comtypes/test/test_jscript.js new file mode 100644 index 00000000..08741318 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/test_jscript.js @@ -0,0 +1,17 @@ +var d = new ActiveXObject("TestDispServerLib.TestDispServer"); + +//WScript.Echo("d.Name"); +if (d.Name != "spam, spam, spam") + throw new Error(d.Name); + +//WScript.Echo("d.Name = 'foo'"); +d.Name = "foo"; + +//WScript.Echo("d.Name"); +if (d.Name != "foo") + throw new Error(d.Name); + +//WScript.Echo("d.Eval('1 + 2')"); +var result = d.Eval("1 + 2"); +if (result != 3) + throw new Error(result); diff --git a/venv/Lib/site-packages/comtypes/test/test_msscript.py b/venv/Lib/site-packages/comtypes/test/test_msscript.py new file mode 100644 index 00000000..827beccc --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/test_msscript.py @@ -0,0 +1,74 @@ +import unittest +from ctypes import POINTER +from comtypes.automation import IDispatch +from comtypes.client import CreateObject +from comtypes import GUID + +##from test import test_support +##from comtypes.unittests import support + +try: + GUID.from_progid("MSScriptControl.ScriptControl") + CreateObject("MSScriptControl.ScriptControl") +except WindowsError: + # doesn't exist on Windows CE or in 64-bit. + pass +else: + + class Test(unittest.TestCase): + def test_jscript(self): + engine = CreateObject("MSScriptControl.ScriptControl") + engine.Language = "JScript" + # strange. + # + # engine.Eval returns a VARIANT containing a dispatch pointer. + # + # The dispatch pointer exposes this typeinfo (the number of + # dispproperties varies, depending on the length of the list we pass + # to Eval): + # + #class JScriptTypeInfo(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IDispatch): + # 'JScript Type Info' + # _iid_ = GUID('{C59C6B12-F6C1-11CF-8835-00A0C911E8B2}') + # _idlflags_ = [] + # _methods_ = [] + #JScriptTypeInfo._disp_methods_ = [ + # DISPPROPERTY([dispid(9522932)], VARIANT, '0'), + # DISPPROPERTY([dispid(9522976)], VARIANT, '1'), + #] + # + # Although the exact interface members vary, the guid stays + # the same. Don't think that's allowed by COM standards - is + # this a bug in the MSScriptControl? + # + # What's even more strange is that the returned dispatch + # pointer can't be QI'd for this interface! So it seems the + # typeinfo is really a temporary thing. + + res = engine.Eval("[1, 2, 3, 4]")._comobj + + # comtypes.client works around this bug, by not trying to + # high-level wrap the dispatch pointer because QI for the real + # interface fails. + self.assertEqual(type(res), POINTER(IDispatch)) + + tinfo_1 = engine.Eval("[1, 2, 3]")._comobj.GetTypeInfo(0) + tinfo_2 = engine.Eval("[1, 2, 3, 4]")._comobj.GetTypeInfo(0) + tinfo_3 = engine.Eval("[1, 2, 3, 4, 5]")._comobj.GetTypeInfo(0) + + + self.assertEqual(tinfo_1.GetTypeAttr().cVars, 3) + self.assertEqual(tinfo_2.GetTypeAttr().cVars, 4) + self.assertEqual(tinfo_3.GetTypeAttr().cVars, 5) + + # These tests simply describe the current behaviour ;-) + self.assertEqual(tinfo_1.GetTypeAttr().guid, + tinfo_1.GetTypeAttr().guid) + + ## print (res[0], res[1], res[2]) + ## print len(res) + + engine.Reset() + +if __name__ == "__main__": + unittest.main() diff --git a/venv/Lib/site-packages/comtypes/test/test_outparam.py b/venv/Lib/site-packages/comtypes/test/test_outparam.py new file mode 100644 index 00000000..2bd347e9 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/test_outparam.py @@ -0,0 +1,69 @@ +from ctypes import * +import unittest + +import comtypes.test +comtypes.test.requires("devel") + +from comtypes import BSTR, IUnknown, GUID, COMMETHOD, HRESULT +class IMalloc(IUnknown): + _iid_ = GUID("{00000002-0000-0000-C000-000000000046}") + _methods_ = [ + COMMETHOD([], c_void_p, "Alloc", + ([], c_ulong, "cb")), + COMMETHOD([], c_void_p, "Realloc", + ([], c_void_p, "pv"), + ([], c_ulong, "cb")), + COMMETHOD([], None, "Free", + ([], c_void_p, "py")), + COMMETHOD([], c_ulong, "GetSize", + ([], c_void_p, "pv")), + COMMETHOD([], c_int, "DidAlloc", + ([], c_void_p, "pv")), + COMMETHOD([], None, "HeapMinimize") # 25 + ] + +malloc = POINTER(IMalloc)() +oledll.ole32.CoGetMalloc(1, byref(malloc)) +assert bool(malloc) + +def from_outparm(self): + if not self: + return None + result = wstring_at(self) + if not malloc.DidAlloc(self): + raise ValueError("memory was NOT allocated by CoTaskMemAlloc") + windll.ole32.CoTaskMemFree(self) + return result +c_wchar_p.__ctypes_from_outparam__ = from_outparm + +def comstring(text, typ=c_wchar_p): + text = str(text) + size = (len(text) + 1) * sizeof(c_wchar) + mem = windll.ole32.CoTaskMemAlloc(size) + print("malloc'd 0x%x, %d bytes" % (mem, size)) + ptr = cast(mem, typ) + memmove(mem, text, size) + return ptr + +class Test(unittest.TestCase): + def test_c_char(self): +## ptr = c_wchar_p("abc") +## self.failUnlessEqual(ptr.__ctypes_from_outparam__(), +## "abc") + +## p = BSTR("foo bar spam") + + x = comstring("Hello, World") + y = comstring("foo bar") + z = comstring("spam, spam, and spam") + +## (x.__ctypes_from_outparam__(), x.__ctypes_from_outparam__()) + print((x.__ctypes_from_outparam__(), None)) #x.__ctypes_from_outparam__()) + +## print comstring("Hello, World", c_wchar_p).__ctypes_from_outparam__() +## print comstring("Hello, World", c_wchar_p).__ctypes_from_outparam__() +## print comstring("Hello, World", c_wchar_p).__ctypes_from_outparam__() +## print comstring("Hello, World", c_wchar_p).__ctypes_from_outparam__() + +if __name__ == "__main__": + unittest.main() diff --git a/venv/Lib/site-packages/comtypes/test/test_propputref.py b/venv/Lib/site-packages/comtypes/test/test_propputref.py new file mode 100644 index 00000000..06dcfbc3 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/test_propputref.py @@ -0,0 +1,35 @@ +# There are also propputref tests in test_sapi.py! +import unittest +from comtypes.client import CreateObject +from comtypes.automation import VARIANT + +class Test(unittest.TestCase): + def test(self, dynamic=False): + d = CreateObject("Scripting.Dictionary", dynamic=dynamic) + s = CreateObject("TestComServerLib.TestComServer", dynamic=dynamic) + s.name = "the value" + + # This calls propputref, since we assign an Object + d.Item["object"] = s + # This calls propput, since we assing a Value + d.Item["value"] = s.name + + self.assertEqual(d.Item["object"], s) + self.assertEqual(d.Item["object"].name, "the value") + self.assertEqual(d.Item["value"], "the value") + + # Changing the default property of the object + s.name = "foo bar" + self.assertEqual(d.Item["object"], s) + self.assertEqual(d.Item["object"].name, "foo bar") + self.assertEqual(d.Item["value"], "the value") + + # This also calls propputref since we assign an Object + d.Item["var"] = VARIANT(s) + self.assertEqual(d.Item["var"], s) + + def test_dispatch(self): + return self.test(dynamic=True) + +if __name__ == "__main__": + unittest.main() diff --git a/venv/Lib/site-packages/comtypes/test/test_pump_events.py b/venv/Lib/site-packages/comtypes/test/test_pump_events.py new file mode 100644 index 00000000..8ae14de0 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/test_pump_events.py @@ -0,0 +1,17 @@ +import gc +import unittest + +from comtypes.client import PumpEvents + + +class PumpEventsTest(unittest.TestCase): + def test_pump_events_doesnt_leak_cycles(self): + gc.collect() + for i in range(3): + PumpEvents(0.05) + ncycles = gc.collect() + self.assertEqual(ncycles, 0) + + +if __name__ == "__main__": + unittest.main() diff --git a/venv/Lib/site-packages/comtypes/test/test_safearray.py b/venv/Lib/site-packages/comtypes/test/test_safearray.py new file mode 100644 index 00000000..4d87a358 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/test_safearray.py @@ -0,0 +1,515 @@ +import array +from comtypes import BSTR, IUnknown +from comtypes.test import is_resource_enabled, get_numpy +from comtypes.test.find_memleak import find_memleak +from ctypes import POINTER, PyDLL, byref, c_double, c_long, pointer, py_object +from ctypes.wintypes import BOOL +import datetime +from decimal import Decimal +import unittest + +from comtypes.automation import ( + VARIANT, VT_ARRAY, VT_VARIANT, VT_I4, VT_R4, VT_R8, VT_BSTR, VARIANT_BOOL) +from comtypes.automation import _midlSAFEARRAY +from comtypes.safearray import safearray_as_ndarray +from comtypes._safearray import SafeArrayGetVartype + + +def get_array(sa): + """Get an array from a safe array type""" + with safearray_as_ndarray: + return sa[0] + + +def com_refcnt(o): + """Return the COM refcount of an interface pointer""" + import gc + gc.collect() + gc.collect() + o.AddRef() + return o.Release() + + +class VariantTestCase(unittest.TestCase): + def test_VARIANT_array(self): + v = VARIANT() + v.value = ((1, 2, 3), ("foo", "bar", None)) + self.assertEqual(v.vt, VT_ARRAY | VT_VARIANT) + self.assertEqual(v.value, ((1, 2, 3), ("foo", "bar", None))) + + def func(): + VARIANT((1, 2, 3), ("foo", "bar", None)) + + bytes = find_memleak(func) + self.assertFalse(bytes, "Leaks %d bytes" % bytes) + + def test_double_array(self): + a = array.array("d", (3.14, 2.78)) + v = VARIANT(a) + self.assertEqual(v.vt, VT_ARRAY | VT_R8) + self.assertEqual(tuple(a.tolist()), v.value) + + def func(): + VARIANT(array.array("d", (3.14, 2.78))) + + bytes = find_memleak(func) + self.assertFalse(bytes, "Leaks %d bytes" % bytes) + + def test_float_array(self): + a = array.array("f", (3.14, 2.78)) + v = VARIANT(a) + self.assertEqual(v.vt, VT_ARRAY | VT_R4) + self.assertEqual(tuple(a.tolist()), v.value) + + def test_2dim_array(self): + data = ((1, 2, 3, 4), + (5, 6, 7, 8), + (9, 10, 11, 12)) + v = VARIANT(data) + self.assertEqual(v.value, data) + + +class SafeArrayTestCase(unittest.TestCase): + + def test_equality(self): + a = _midlSAFEARRAY(c_long) + b = _midlSAFEARRAY(c_long) + self.assertTrue(a is b) + + c = _midlSAFEARRAY(BSTR) + d = _midlSAFEARRAY(BSTR) + self.assertTrue(c is d) + + self.assertNotEqual(a, c) + + # XXX remove: + self.assertEqual((a._itemtype_, a._vartype_), + (c_long, VT_I4)) + self.assertEqual((c._itemtype_, c._vartype_), + (BSTR, VT_BSTR)) + + def test_nested_contexts(self): + np = get_numpy() + if np is None: + return + + t = _midlSAFEARRAY(BSTR) + sa = t.from_param(["a", "b", "c"]) + + first = sa[0] + with safearray_as_ndarray: + second = sa[0] + with safearray_as_ndarray: + third = sa[0] + fourth = sa[0] + fifth = sa[0] + + self.assertTrue(isinstance(first, tuple)) + self.assertTrue(isinstance(second, np.ndarray)) + self.assertTrue(isinstance(third, np.ndarray)) + self.assertTrue(isinstance(fourth, np.ndarray)) + self.assertTrue(isinstance(fifth, tuple)) + + def test_VT_BSTR(self): + t = _midlSAFEARRAY(BSTR) + + sa = t.from_param(["a", "b", "c"]) + self.assertEqual(sa[0], ("a", "b", "c")) + self.assertEqual(SafeArrayGetVartype(sa), VT_BSTR) + + def test_VT_BSTR_ndarray(self): + np = get_numpy() + if np is None: + return + + t = _midlSAFEARRAY(BSTR) + + sa = t.from_param(["a", "b", "c"]) + arr = get_array(sa) + + self.assertTrue(isinstance(arr, np.ndarray)) + self.assertEqual(np.dtype('<U1'), arr.dtype) + self.assertTrue((arr == ("a", "b", "c")).all()) + self.assertEqual(SafeArrayGetVartype(sa), VT_BSTR) + + def test_VT_BSTR_leaks(self): + sb = _midlSAFEARRAY(BSTR) + + def doit(): + sb.from_param(["foo", "bar"]) + + bytes = find_memleak(doit) + self.assertFalse(bytes, "Leaks %d bytes" % bytes) + + def test_VT_I4_leaks(self): + sa = _midlSAFEARRAY(c_long) + + def doit(): + sa.from_param([1, 2, 3, 4, 5, 6]) + + bytes = find_memleak(doit) + self.assertFalse(bytes, "Leaks %d bytes" % bytes) + + def test_VT_I4(self): + t = _midlSAFEARRAY(c_long) + + sa = t.from_param([11, 22, 33]) + + self.assertEqual(sa[0], (11, 22, 33)) + + self.assertEqual(SafeArrayGetVartype(sa), VT_I4) + + # TypeError: len() of unsized object + self.assertRaises(TypeError, lambda: t.from_param(object())) + + def test_VT_I4_ndarray(self): + np = get_numpy() + if np is None: + return + + t = _midlSAFEARRAY(c_long) + + inarr = np.array([11, 22, 33]) + sa = t.from_param(inarr) + + arr = get_array(sa) + + self.assertTrue(isinstance(arr, np.ndarray)) + self.assertEqual(np.dtype(np.int), arr.dtype) + self.assertTrue((arr == inarr).all()) + self.assertEqual(SafeArrayGetVartype(sa), VT_I4) + + def test_array(self): + np = get_numpy() + if np is None: + return + + t = _midlSAFEARRAY(c_double) + pat = pointer(t()) + + pat[0] = np.zeros(32, dtype=np.float) + arr = get_array(pat[0]) + self.assertTrue(isinstance(arr, np.ndarray)) + self.assertEqual(np.dtype(np.double), arr.dtype) + self.assertTrue((arr == (0.0,) * 32).all()) + + data = ((1.0, 2.0, 3.0), + (4.0, 5.0, 6.0), + (7.0, 8.0, 9.0)) + a = np.array(data, dtype=np.double) + pat[0] = a + arr = get_array(pat[0]) + self.assertTrue(isinstance(arr, np.ndarray)) + self.assertEqual(np.dtype(np.double), arr.dtype) + self.assertTrue((arr == data).all()) + + data = ((1.0, 2.0), (3.0, 4.0), (5.0, 6.0)) + a = np.array(data, + dtype=np.double, + order="F") + pat[0] = a + arr = get_array(pat[0]) + self.assertTrue(isinstance(arr, np.ndarray)) + self.assertEqual(np.dtype(np.double), arr.dtype) + self.assertEqual(pat[0][0], data) + + def test_VT_VARIANT(self): + t = _midlSAFEARRAY(VARIANT) + + now = datetime.datetime.now() + sa = t.from_param([11, "22", None, True, now, Decimal("3.14")]) + self.assertEqual(sa[0], (11, "22", None, True, now, Decimal("3.14"))) + + self.assertEqual(SafeArrayGetVartype(sa), VT_VARIANT) + + def test_VT_VARIANT_ndarray(self): + np = get_numpy() + if np is None: + return + + t = _midlSAFEARRAY(VARIANT) + + now = datetime.datetime.now() + inarr = np.array( + [11, "22", "33", 44.0, None, True, now, Decimal("3.14")] + ).reshape(2, 4) + sa = t.from_param(inarr) + arr = get_array(sa) + self.assertEqual(np.dtype(object), arr.dtype) + self.assertTrue(isinstance(arr, np.ndarray)) + self.assertTrue((arr == inarr).all()) + self.assertEqual(SafeArrayGetVartype(sa), VT_VARIANT) + + def test_VT_BOOL(self): + t = _midlSAFEARRAY(VARIANT_BOOL) + + sa = t.from_param([True, False, True, False]) + self.assertEqual(sa[0], (True, False, True, False)) + + def test_VT_BOOL_ndarray(self): + np = get_numpy() + if np is None: + return + + t = _midlSAFEARRAY(VARIANT_BOOL) + + sa = t.from_param([True, False, True, False]) + arr = get_array(sa) + self.assertEqual(np.dtype(np.bool_), arr.dtype) + self.assertTrue(isinstance(arr, np.ndarray)) + self.assertTrue((arr == (True, False, True, False)).all()) + + def test_VT_UNKNOWN_1(self): + a = _midlSAFEARRAY(POINTER(IUnknown)) + t = _midlSAFEARRAY(POINTER(IUnknown)) + self.assertTrue(a is t) + + from comtypes.typeinfo import CreateTypeLib + # will never be saved to disk + punk = CreateTypeLib("spam").QueryInterface(IUnknown) + + # initial refcount + initial = com_refcnt(punk) + + # This should increase the refcount by 1 + sa = t.from_param([punk]) + self.assertEqual(initial + 1, com_refcnt(punk)) + + # Unpacking the array must not change the refcount, and must + # return an equal object. + self.assertEqual((punk,), sa[0]) + self.assertEqual(initial + 1, com_refcnt(punk)) + + del sa + self.assertEqual(initial, com_refcnt(punk)) + + sa = t.from_param([None]) + self.assertEqual((POINTER(IUnknown)(),), sa[0]) + + def test_VT_UNKNOWN_multi(self): + a = _midlSAFEARRAY(POINTER(IUnknown)) + t = _midlSAFEARRAY(POINTER(IUnknown)) + self.assertTrue(a is t) + + from comtypes.typeinfo import CreateTypeLib + # will never be saved to disk + punk = CreateTypeLib("spam").QueryInterface(IUnknown) + + # initial refcount + initial = com_refcnt(punk) + + # This should increase the refcount by 4 + sa = t.from_param((punk,) * 4) + self.assertEqual(initial + 4, com_refcnt(punk)) + + # Unpacking the array must not change the refcount, and must + # return an equal object. + self.assertEqual((punk,)*4, sa[0]) + self.assertEqual(initial + 4, com_refcnt(punk)) + + del sa + self.assertEqual(initial, com_refcnt(punk)) + + # This should increase the refcount by 2 + sa = t.from_param((punk, None, punk, None)) + self.assertEqual(initial + 2, com_refcnt(punk)) + + null = POINTER(IUnknown)() + self.assertEqual((punk, null, punk, null), sa[0]) + + del sa + self.assertEqual(initial, com_refcnt(punk)) + + # repeat same test, with 2 different com pointers + + plib = CreateTypeLib("foo") + a, b = com_refcnt(plib), com_refcnt(punk) + sa = t.from_param([plib, punk, plib]) + +#### self.failUnlessEqual((plib, punk, plib), sa[0]) + self.assertEqual((a+2, b+1), (com_refcnt(plib), com_refcnt(punk))) + + del sa + self.assertEqual((a, b), (com_refcnt(plib), com_refcnt(punk))) + + def test_VT_UNKNOWN_multi_ndarray(self): + np = get_numpy() + if np is None: + return + + a = _midlSAFEARRAY(POINTER(IUnknown)) + t = _midlSAFEARRAY(POINTER(IUnknown)) + self.assertTrue(a is t) + + from comtypes.typeinfo import CreateTypeLib + # will never be saved to disk + punk = CreateTypeLib("spam").QueryInterface(IUnknown) + + # initial refcount + initial = com_refcnt(punk) + + # This should increase the refcount by 4 + sa = t.from_param((punk,) * 4) + self.assertEqual(initial + 4, com_refcnt(punk)) + + # Unpacking the array must not change the refcount, and must + # return an equal object. Creating an ndarray may change the + # refcount. + arr = get_array(sa) + self.assertTrue(isinstance(arr, np.ndarray)) + self.assertEqual(np.dtype(object), arr.dtype) + self.assertTrue((arr == (punk,)*4).all()) + self.assertEqual(initial + 8, com_refcnt(punk)) + + del arr + self.assertEqual(initial + 4, com_refcnt(punk)) + + del sa + self.assertEqual(initial, com_refcnt(punk)) + + # This should increase the refcount by 2 + sa = t.from_param((punk, None, punk, None)) + self.assertEqual(initial + 2, com_refcnt(punk)) + + null = POINTER(IUnknown)() + arr = get_array(sa) + self.assertTrue(isinstance(arr, np.ndarray)) + self.assertEqual(np.dtype(object), arr.dtype) + self.assertTrue((arr == (punk, null, punk, null)).all()) + + del sa + del arr + self.assertEqual(initial, com_refcnt(punk)) + + def test_UDT(self): + from comtypes.gen.TestComServerLib import MYCOLOR + + t = _midlSAFEARRAY(MYCOLOR) + self.assertTrue(t is _midlSAFEARRAY(MYCOLOR)) + + sa = t.from_param([MYCOLOR(0, 0, 0), MYCOLOR(1, 2, 3)]) + + self.assertEqual([(x.red, x.green, x.blue) for x in sa[0]], + [(0.0, 0.0, 0.0), (1.0, 2.0, 3.0)]) + + def doit(): + t.from_param([MYCOLOR(0, 0, 0), MYCOLOR(1, 2, 3)]) + bytes = find_memleak(doit) + self.assertFalse(bytes, "Leaks %d bytes" % bytes) + + def test_UDT_ndarray(self): + np = get_numpy() + if np is None: + return + + from comtypes.gen.TestComServerLib import MYCOLOR + + t = _midlSAFEARRAY(MYCOLOR) + self.assertTrue(t is _midlSAFEARRAY(MYCOLOR)) + + sa = t.from_param([MYCOLOR(0, 0, 0), MYCOLOR(1, 2, 3)]) + arr = get_array(sa) + + self.assertTrue(isinstance(arr, np.ndarray)) + # The conversion code allows numpy to choose the dtype of + # structured data. This dtype is structured under numpy 1.5, 1.7 and + # 1.8, and object in 1.6. Instead of assuming either of these, check + # the array contents based on the chosen type. + if arr.dtype is np.dtype(object): + data = [(x.red, x.green, x.blue) for x in arr] + else: + float_dtype = np.dtype('float64') + self.assertIs(arr.dtype[0], float_dtype) + self.assertIs(arr.dtype[1], float_dtype) + self.assertIs(arr.dtype[2], float_dtype) + data = [tuple(x) for x in arr] + self.assertEqual(data, [(0.0, 0.0, 0.0), (1.0, 2.0, 3.0)]) + + def test_datetime64_ndarray(self): + np = get_numpy() + if np is None: + return + try: + np.datetime64 + except AttributeError: + return + + dates = np.array([ + np.datetime64("2000-01-01T05:30:00", "s"), + np.datetime64("1800-01-01T05:30:00", "ms"), + np.datetime64("2014-03-07T00:12:56", "us"), + np.datetime64("2000-01-01T12:34:56", "ns"), + ]) + + t = _midlSAFEARRAY(VARIANT) + sa = t.from_param(dates) + arr = get_array(sa).astype(dates.dtype) + self.assertTrue((dates == arr).all()) + + +if is_resource_enabled("pythoncom"): + try: + import pythoncom + except ImportError: + # pywin32 not installed... + pass + else: + # pywin32 is available. The pythoncom dll contains two handy + # exported functions that allow to create a VARIANT from a Python + # object, also a function that unpacks a VARIANT into a Python + # object. + # + # This allows us to create und unpack SAFEARRAY instances + # contained in VARIANTs, and check for consistency with the + # comtypes code. + + _dll = PyDLL(pythoncom.__file__) + + # c:/sf/pywin32/com/win32com/src/oleargs.cpp 213 + # PyObject *PyCom_PyObjectFromVariant(const VARIANT *var) + unpack = _dll.PyCom_PyObjectFromVariant + unpack.restype = py_object + unpack.argtypes = POINTER(VARIANT), + + # c:/sf/pywin32/com/win32com/src/oleargs.cpp 54 + # BOOL PyCom_VariantFromPyObject(PyObject *obj, VARIANT *var) + _pack = _dll.PyCom_VariantFromPyObject + _pack.argtypes = py_object, POINTER(VARIANT) + _pack.restype = BOOL + + def pack(obj): + var = VARIANT() + _pack(obj, byref(var)) + return var + + class PyWinTest(unittest.TestCase): + def test_1dim(self): + data = (1, 2, 3) + variant = pack(data) + self.assertEqual(variant.value, data) + self.assertEqual(unpack(variant), data) + + def test_2dim(self): + data = ((1, 2, 3), (4, 5, 6), (7, 8, 9)) + variant = pack(data) + self.assertEqual(variant.value, data) + self.assertEqual(unpack(variant), data) + + def test_3dim(self): + data = ( ( (1, 2), (3, 4), (5, 6) ), + ( (7, 8), (9, 10), (11, 12) ) ) + variant = pack(data) + self.assertEqual(variant.value, data) + self.assertEqual(unpack(variant), data) + + def test_4dim(self): + data = ( ( ( ( 1, 2), ( 3, 4) ), + ( ( 5, 6), ( 7, 8) ) ), + ( ( ( 9, 10), (11, 12) ), + ( (13, 14), (15, 16) ) ) ) + variant = pack(data) + self.assertEqual(variant.value, data) + self.assertEqual(unpack(variant), data) + +if __name__ == "__main__": + unittest.main() diff --git a/venv/Lib/site-packages/comtypes/test/test_sapi.py b/venv/Lib/site-packages/comtypes/test/test_sapi.py new file mode 100644 index 00000000..b32e9a7d --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/test_sapi.py @@ -0,0 +1,32 @@ +# http://www.microsoft.com/technet/scriptcenter/funzone/games/sapi.mspx +# ../gen/_C866CA3A_32F7_11D2_9602_00C04F8EE628_0_5_0 +# http://thread.gmane.org/gmane.comp.python.ctypes.user/1485 + +import os, unittest, tempfile +from comtypes.client import CreateObject + +class Test(unittest.TestCase): + def test(self, dynamic=False): + engine = CreateObject("SAPI.SpVoice", dynamic=dynamic) + stream = CreateObject("SAPI.SpFileStream", dynamic=dynamic) + from comtypes.gen import SpeechLib + + fd, fname = tempfile.mkstemp(suffix=".wav") + os.close(fd) + + stream.Open(fname, SpeechLib.SSFMCreateForWrite) + + # engine.AudioStream is a propputref property + engine.AudioOutputStream = stream + self.assertEqual(engine.AudioOutputStream, stream) + engine.speak("Hello, World", 0) + stream.Close() + filesize = os.stat(fname).st_size + self.assertTrue(filesize > 100, "filesize only %d bytes" % filesize) + os.unlink(fname) + + def test_dyndisp(self): + return self.test(dynamic=True) + +if __name__ == "__main__": + unittest.main() diff --git a/venv/Lib/site-packages/comtypes/test/test_server.py b/venv/Lib/site-packages/comtypes/test/test_server.py new file mode 100644 index 00000000..0684e930 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/test_server.py @@ -0,0 +1,291 @@ +import atexit, os, unittest +##import comtypes +import comtypes.typeinfo, comtypes.client + +class TypeLib(object): + """This class collects IDL code fragments and eventually writes + them into a .IDL file. The compile() method compiles the IDL file + into a typelibrary and registers it. A function is also + registered with atexit that will unregister the typelib at program + exit. + """ + def __init__(self, lib): + self.lib = lib + self.interfaces = [] + self.coclasses = [] + + def interface(self, header): + itf = Interface(header) + self.interfaces.append(itf) + return itf + + def coclass(self, definition): + self.coclasses.append(definition) + + def __str__(self): + header = '''import "oaidl.idl"; + import "ocidl.idl"; + %s {''' % self.lib + body = "\n".join([str(itf) for itf in self.interfaces]) + footer = "\n".join(self.coclasses) + "}" + return "\n".join((header, body, footer)) + + def compile(self): + """Compile and register the typelib""" + code = str(self) + curdir = os.path.dirname(__file__) + idl_path = os.path.join(curdir, "mylib.idl") + tlb_path = os.path.join(curdir, "mylib.tlb") + if not os.path.isfile(idl_path) or open(idl_path, "r").read() != code: + open(idl_path, "w").write(code) + os.system(r'call "%%VS71COMNTOOLS%%vsvars32.bat" && ' + r'midl /nologo %s /tlb %s' % (idl_path, tlb_path)) + # Register the typelib... + tlib = comtypes.typeinfo.LoadTypeLib(tlb_path) + # create the wrapper module... + comtypes.client.GetModule(tlb_path) + # Unregister the typelib at interpreter exit... + attr = tlib.GetLibAttr() + guid, major, minor = attr.guid, attr.wMajorVerNum, attr.wMinorVerNum +## atexit.register(comtypes.typeinfo.UnRegisterTypeLib, +## guid, major, minor) + return tlb_path + +class Interface(object): + def __init__(self, header): + self.header = header + self.code = "" + + def add(self, text): + self.code += text + "\n" + return self + + def __str__(self): + return self.header + " {\n" + self.code + "}\n" + +################################################################ +import comtypes +from comtypes.client import wrap + +tlb = TypeLib("[uuid(f4f74946-4546-44bd-a073-9ea6f9fe78cb)] library TestLib") + +itf = tlb.interface("""[object, + oleautomation, + dual, + uuid(ed978f5f-cc45-4fcc-a7a6-751ffa8dfedd)] + interface IMyInterface : IDispatch""") + +outgoing = tlb.interface("""[object, + oleautomation, + dual, + uuid(f7c48a90-64ea-4bb8-abf1-b3a3aa996848)] + interface IMyEventInterface : IDispatch""") + +tlb.coclass(""" +[uuid(fa9de8f4-20de-45fc-b079-648572428817)] +coclass MyServer { + [default] interface IMyInterface; + [default, source] interface IMyEventInterface; +}; +""") + +# The purpose of the MyServer class is to locate three separate code +# section snippets closely together: +# +# 1. The IDL method definition for a COM interface method +# 2. The Python implementation of the COM method +# 3. The unittest(s) for the COM method. +# +from comtypes.server.connectionpoints import ConnectableObjectMixin +class MyServer(comtypes.CoClass, ConnectableObjectMixin): + _reg_typelib_ = ('{f4f74946-4546-44bd-a073-9ea6f9fe78cb}', 0, 0) + _reg_clsid_ = comtypes.GUID('{fa9de8f4-20de-45fc-b079-648572428817}') + + ################ + # definition + itf.add("""[id(100), propget] HRESULT Name([out, retval] BSTR *pname); + [id(100), propput] HRESULT Name([in] BSTR name);""") + # implementation + Name = "foo" + # test + def test_Name(self): + p = wrap(self.create()) + self.assertEqual((p.Name, p.name, p.nAME), ("foo",) * 3) + p.NAME = "spam" + self.assertEqual((p.Name, p.name, p.nAME), ("spam",) * 3) + + ################ + # definition + itf.add("[id(101)] HRESULT MixedInOut([in] int a, [out] int *b, [in] int c, [out] int *d);") + # implementation + def MixedInOut(self, a, c): + return a+1, c+1 + #test + def test_MixedInOut(self): + p = wrap(self.create()) + self.assertEqual(p.MixedInOut(1, 2), (2, 3)) + + ################ + # definition + itf.add("[id(102)] HRESULT MultiInOutArgs([in, out] int *pa, [in, out] int *pb);") + # implementation + def MultiInOutArgs(self, pa, pb): + return pa[0] * 3, pb[0] * 4 + # test + def test_MultiInOutArgs(self): + p = wrap(self.create()) + self.assertEqual(p.MultiInOutArgs(1, 2), (3, 8)) + + ################ + # definition + itf.add("HRESULT MultiInOutArgs2([in, out] int *pa, [out] int *pb);") +## # implementation +## def MultiInOutArgs2(self, pa): +## return pa[0] * 3, pa[0] * 4 +## # test +## def test_MultiInOutArgs2(self): +## p = wrap(self.create()) +## self.assertEqual(p.MultiInOutArgs2(42), (126, 168)) + + ################ + # definition + itf.add("HRESULT MultiInOutArgs3([out] int *pa, [out] int *pb);") + # implementation + def MultiInOutArgs3(self): + return 42, 43 + # test + def test_MultiInOutArgs3(self): + p = wrap(self.create()) + self.assertEqual(p.MultiInOutArgs3(), (42, 43)) + + ################ + # definition + itf.add("HRESULT MultiInOutArgs4([out] int *pa, [in, out] int *pb);") + # implementation + def MultiInOutArgs4(self, pb): + return pb[0] + 3, pb[0] + 4 + # test + def test_MultiInOutArgs4(self): + p = wrap(self.create()) + res = p.MultiInOutArgs4(pb=32) +## print "MultiInOutArgs4", res + + itf.add("""HRESULT GetStackTrace([in] ULONG FrameOffset, + [in, out] INT *Frames, + [in] ULONG FramesSize, + [out, optional] ULONG *FramesFilled);""") + def GetStackTrace(self, this, *args): +## print "GetStackTrace", args + return 0 + def test_GetStackTrace(self): + p = wrap(self.create()) + from ctypes import c_int, POINTER, pointer + frames = (c_int * 5)() + res = p.GetStackTrace(42, frames, 5) +## print "RES_1", res + + frames = pointer(c_int(5)) + res = p.GetStackTrace(42, frames, 0) +## print "RES_2", res + + # It is unlear to me if this is allowed or not. Apparently there + # are typelibs that define such an argument type, but it may be + # that these are buggy. + # + # Point is that SafeArrayCreateEx(VT_VARIANT|VT_BYREF, ..) fails. + # The MSDN docs for SafeArrayCreate() have a notice that neither + # VT_ARRAY not VT_BYREF may be set, this notice is missing however + # for SafeArrayCreateEx(). + # + # We have this code here to make sure that comtypes can import + # such a typelib, although calling ths method will fail because + # such an array cannot be created. + itf.add("""HRESULT dummy([in] SAFEARRAY(VARIANT *) foo);""") + + + # Test events. + itf.add("""HRESULT DoSomething();""") + outgoing.add("""[id(103)] HRESULT OnSomething();""") + # implementation + def DoSomething(self): + "Implement the DoSomething method" + self.Fire_Event(0, "OnSomething") + # test + def test_events(self): + p = wrap(self.create()) + class Handler(object): + called = 0 + def OnSomething(self, this): + "Handles the OnSomething event" + self.called += 1 + handler = Handler() + ev = comtypes.client.GetEvents(p, handler) + p.DoSomething() + self.assertEqual(handler.called, 1) + + class Handler(object): + called = 0 + def IMyEventInterface_OnSomething(self): + "Handles the OnSomething event" + self.called += 1 + handler = Handler() + ev = comtypes.client.GetEvents(p, handler) + p.DoSomething() + self.assertEqual(handler.called, 1) + + # events with out-parameters (these are probably very unlikely...) + itf.add("""HRESULT DoSomethingElse();""") + outgoing.add("""[id(104)] HRESULT OnSomethingElse([out, retval] int *px);""") + def DoSomethingElse(self): + "Implement the DoSomething method" + self.Fire_Event(0, "OnSomethingElse") + def test_DoSomethingElse(self): + p = wrap(self.create()) + class Handler(object): + called = 0 + def OnSomethingElse(self): + "Handles the OnSomething event" + self.called += 1 + return 42 + handler = Handler() + ev = comtypes.client.GetEvents(p, handler) + p.DoSomethingElse() + self.assertEqual(handler.called, 1) + + class Handler(object): + called = 0 + def OnSomethingElse(self, this, presult): + "Handles the OnSomething event" + self.called += 1 + presult[0] = 42 + handler = Handler() + ev = comtypes.client.GetEvents(p, handler) + p.DoSomethingElse() + self.assertEqual(handler.called, 1) + +################################################################ + +path = tlb.compile() +from comtypes.gen import TestLib +from comtypes.typeinfo import IProvideClassInfo, IProvideClassInfo2 +from comtypes.connectionpoints import IConnectionPointContainer + +MyServer._com_interfaces_ = [TestLib.IMyInterface, + IProvideClassInfo2, + IConnectionPointContainer] +MyServer._outgoing_interfaces_ = [TestLib.IMyEventInterface] + +################################################################ + +class Test(unittest.TestCase, MyServer): + def __init__(self, *args): + unittest.TestCase.__init__(self, *args) + MyServer.__init__(self) + + def create(self): + obj = MyServer() + return obj.QueryInterface(comtypes.IUnknown) + + +if __name__ == "__main__": + unittest.main() diff --git a/venv/Lib/site-packages/comtypes/test/test_showevents.py b/venv/Lib/site-packages/comtypes/test/test_showevents.py new file mode 100644 index 00000000..e1178a46 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/test_showevents.py @@ -0,0 +1,265 @@ +import sys +import unittest +import doctest + +from comtypes.test import requires + +# This test is unreliable... +requires("events") + +class EventsTest(unittest.TestCase): + + def test(self): + import comtypes.test.test_showevents + doctest.testmod(comtypes.test.test_showevents, optionflags=doctest.ELLIPSIS) + + # These methods are never called, they only contain doctests. + if sys.version_info >= (3, 0): + def IE_ShowEvents(self): + ''' + >>> from comtypes.client import CreateObject, ShowEvents, PumpEvents + >>> + >>> o = CreateObject("InternetExplorer.Application") + >>> con = ShowEvents(o) + # event found: DWebBrowserEvents2_StatusTextChange + # event found: DWebBrowserEvents2_ProgressChange + # event found: DWebBrowserEvents2_CommandStateChange + # event found: DWebBrowserEvents2_DownloadBegin + # event found: DWebBrowserEvents2_DownloadComplete + # event found: DWebBrowserEvents2_TitleChange + # event found: DWebBrowserEvents2_PropertyChange + # event found: DWebBrowserEvents2_BeforeNavigate2 + # event found: DWebBrowserEvents2_NewWindow2 + # event found: DWebBrowserEvents2_NavigateComplete2 + # event found: DWebBrowserEvents2_DocumentComplete + # event found: DWebBrowserEvents2_OnQuit + # event found: DWebBrowserEvents2_OnVisible + # event found: DWebBrowserEvents2_OnToolBar + # event found: DWebBrowserEvents2_OnMenuBar + # event found: DWebBrowserEvents2_OnStatusBar + # event found: DWebBrowserEvents2_OnFullScreen + # event found: DWebBrowserEvents2_OnTheaterMode + # event found: DWebBrowserEvents2_WindowSetResizable + # event found: DWebBrowserEvents2_WindowSetLeft + # event found: DWebBrowserEvents2_WindowSetTop + # event found: DWebBrowserEvents2_WindowSetWidth + # event found: DWebBrowserEvents2_WindowSetHeight + # event found: DWebBrowserEvents2_WindowClosing + # event found: DWebBrowserEvents2_ClientToHostWindow + # event found: DWebBrowserEvents2_SetSecureLockIcon + # event found: DWebBrowserEvents2_FileDownload + # event found: DWebBrowserEvents2_NavigateError + # event found: DWebBrowserEvents2_PrintTemplateInstantiation + # event found: DWebBrowserEvents2_PrintTemplateTeardown + # event found: DWebBrowserEvents2_UpdatePageStatus + # event found: DWebBrowserEvents2_PrivacyImpactedStateChange + # event found: DWebBrowserEvents2_NewWindow3 + >>> res = o.Navigate2("http://www.python.org") + Event DWebBrowserEvents2_PropertyChange(None, '{265b75c1-4158-11d0-90f6-00c04fd497ea}') + Event DWebBrowserEvents2_BeforeNavigate2(None, <POINTER(IWebBrowser2) ...>, VARIANT(vt=0x400c, byref('http://www.python.org/')), VARIANT(vt=0x400c, byref(0)), VARIANT(vt=0x400c, byref(None)), VARIANT(vt=0x400c, byref(VARIANT(vt=0x400c, byref(None)))), VARIANT(vt=0x400c, byref(None)), VARIANT(vt=0x400b, byref(False))) + Event DWebBrowserEvents2_DownloadBegin(None) + Event DWebBrowserEvents2_PropertyChange(None, '{D0FCA420-D3F5-11CF-B211-00AA004AE837}') + >>> res = PumpEvents(0.01) + Event DWebBrowserEvents2_CommandStateChange(None, 2, False) + Event DWebBrowserEvents2_CommandStateChange(None, 1, False) + >>> res = o.Quit() + >>> res = PumpEvents(0.01) + Event DWebBrowserEvents2_OnQuit(None) + >>> + ''' + else: + def IE_ShowEvents(self): + ''' + >>> from comtypes.client import CreateObject, ShowEvents, PumpEvents + >>> + >>> o = CreateObject("InternetExplorer.Application") + >>> con = ShowEvents(o) + # event found: DWebBrowserEvents2_StatusTextChange + # event found: DWebBrowserEvents2_ProgressChange + # event found: DWebBrowserEvents2_CommandStateChange + # event found: DWebBrowserEvents2_DownloadBegin + # event found: DWebBrowserEvents2_DownloadComplete + # event found: DWebBrowserEvents2_TitleChange + # event found: DWebBrowserEvents2_PropertyChange + # event found: DWebBrowserEvents2_BeforeNavigate2 + # event found: DWebBrowserEvents2_NewWindow2 + # event found: DWebBrowserEvents2_NavigateComplete2 + # event found: DWebBrowserEvents2_DocumentComplete + # event found: DWebBrowserEvents2_OnQuit + # event found: DWebBrowserEvents2_OnVisible + # event found: DWebBrowserEvents2_OnToolBar + # event found: DWebBrowserEvents2_OnMenuBar + # event found: DWebBrowserEvents2_OnStatusBar + # event found: DWebBrowserEvents2_OnFullScreen + # event found: DWebBrowserEvents2_OnTheaterMode + # event found: DWebBrowserEvents2_WindowSetResizable + # event found: DWebBrowserEvents2_WindowSetLeft + # event found: DWebBrowserEvents2_WindowSetTop + # event found: DWebBrowserEvents2_WindowSetWidth + # event found: DWebBrowserEvents2_WindowSetHeight + # event found: DWebBrowserEvents2_WindowClosing + # event found: DWebBrowserEvents2_ClientToHostWindow + # event found: DWebBrowserEvents2_SetSecureLockIcon + # event found: DWebBrowserEvents2_FileDownload + # event found: DWebBrowserEvents2_NavigateError + # event found: DWebBrowserEvents2_PrintTemplateInstantiation + # event found: DWebBrowserEvents2_PrintTemplateTeardown + # event found: DWebBrowserEvents2_UpdatePageStatus + # event found: DWebBrowserEvents2_PrivacyImpactedStateChange + # event found: DWebBrowserEvents2_NewWindow3 + >>> res = o.Navigate2("http://www.python.org") + Event DWebBrowserEvents2_PropertyChange(None, u'{265b75c1-4158-11d0-90f6-00c04fd497ea}') + Event DWebBrowserEvents2_BeforeNavigate2(None, <POINTER(IWebBrowser2) ...>, VARIANT(vt=0x400c, byref(u'http://www.python.org/')), VARIANT(vt=0x400c, byref(0)), VARIANT(vt=0x400c, byref(None)), VARIANT(vt=0x400c, byref(VARIANT(vt=0x400c, byref(None)))), VARIANT(vt=0x400c, byref(None)), VARIANT(vt=0x400b, byref(False))) + Event DWebBrowserEvents2_DownloadBegin(None) + Event DWebBrowserEvents2_PropertyChange(None, u'{D0FCA420-D3F5-11CF-B211-00AA004AE837}') + >>> res = PumpEvents(0.01) + Event DWebBrowserEvents2_CommandStateChange(None, 2, False) + Event DWebBrowserEvents2_CommandStateChange(None, 1, False) + >>> res = o.Quit() + >>> res = PumpEvents(0.01) + Event DWebBrowserEvents2_OnQuit(None) + >>> + ''' + + def IE_GetEvents(): + """ + >>> from comtypes.client import CreateObject, GetEvents, PumpEvents + >>> + >>> o = CreateObject("InternetExplorer.Application") + >>> class EventHandler(object): + ... def DWebBrowserEvents2_PropertyChange(self, this, what): + ... print("PropertyChange: %s" % what) + ... return 0 + ... + >>> + >>> con = GetEvents(o, EventHandler()) + >>> res = o.Navigate2("http://www.python.org") + PropertyChange: {265b75c1-4158-11d0-90f6-00c04fd497ea} + PropertyChange: {D0FCA420-D3F5-11CF-B211-00AA004AE837} + >>> res = o.Quit() + >>> res = PumpEvents(0.01) + >>> + """ + + def Excel_Events(self): + ''' + >>> from comtypes.client import CreateObject, ShowEvents, PumpEvents + >>> + >>> o = CreateObject("Excel.Application") + >>> con = ShowEvents(o) + # event found: AppEvents_NewWorkbook + # event found: AppEvents_SheetSelectionChange + # event found: AppEvents_SheetBeforeDoubleClick + # event found: AppEvents_SheetBeforeRightClick + # event found: AppEvents_SheetActivate + # event found: AppEvents_SheetDeactivate + # event found: AppEvents_SheetCalculate + # event found: AppEvents_SheetChange + # event found: AppEvents_WorkbookOpen + # event found: AppEvents_WorkbookActivate + # event found: AppEvents_WorkbookDeactivate + # event found: AppEvents_WorkbookBeforeClose + # event found: AppEvents_WorkbookBeforeSave + # event found: AppEvents_WorkbookBeforePrint + # event found: AppEvents_WorkbookNewSheet + # event found: AppEvents_WorkbookAddinInstall + # event found: AppEvents_WorkbookAddinUninstall + # event found: AppEvents_WindowResize + # event found: AppEvents_WindowActivate + # event found: AppEvents_WindowDeactivate + # event found: AppEvents_SheetFollowHyperlink + # event found: AppEvents_SheetPivotTableUpdate + # event found: AppEvents_WorkbookPivotTableCloseConnection + # event found: AppEvents_WorkbookPivotTableOpenConnection + # event found: AppEvents_WorkbookSync + # event found: AppEvents_WorkbookBeforeXmlImport + # event found: AppEvents_WorkbookAfterXmlImport + # event found: AppEvents_WorkbookBeforeXmlExport + # event found: AppEvents_WorkbookAfterXmlExport + >>> wb = o.Workbooks.Add() + Event AppEvents_NewWorkbook(None, <POINTER(_Workbook) ...>) + Event AppEvents_WorkbookActivate(None, <POINTER(_Workbook) ...>) + Event AppEvents_WindowActivate(None, <POINTER(_Workbook) ...>, <POINTER(Window) ...>) + >>> PumpEvents(0.1) + >>> res = o.Quit(); PumpEvents(0.1) + Event AppEvents_WorkbookBeforeClose(None, <POINTER(_Workbook) ...>, VARIANT(vt=0x400b, byref(False))) + Event AppEvents_WindowDeactivate(None, <POINTER(_Workbook) ...>, <POINTER(Window) ...>) + Event AppEvents_WorkbookDeactivate(None, <POINTER(_Workbook) ...>) + >>> + ''' + + def Excel_Events_2(self): + ''' + >>> from comtypes.client import CreateObject, GetEvents, PumpEvents + >>> + >>> o = CreateObject("Excel.Application") + >>> class Sink(object): + ... def AppEvents_NewWorkbook(self, this, workbook): + ... print("AppEvents_NewWorkbook %s" % workbook) + ... + >>> + >>> con = GetEvents(o, Sink()) + >>> wb = o.Workbooks.Add() + AppEvents_NewWorkbook <POINTER(_Workbook) ...> + >>> + >>> class Sink(object): + ... def AppEvents_NewWorkbook(self, workbook): + ... print("AppEvents_NewWorkbook(no this) %s" % workbook) + ... + >>> + >>> con = GetEvents(o, Sink()) + >>> wb = o.Workbooks.Add() + AppEvents_NewWorkbook(no this) <POINTER(_Workbook) ...> + >>> + >>> res = o.Quit() + >>> + ''' + + def Word_Events(self): + ''' + >>> from comtypes.client import CreateObject, ShowEvents, PumpEvents + >>> + >>> o = CreateObject("Word.Application") + >>> con = ShowEvents(o) + # event found: ApplicationEvents4_Startup + # event found: ApplicationEvents4_Quit + # event found: ApplicationEvents4_DocumentChange + # event found: ApplicationEvents4_DocumentOpen + # event found: ApplicationEvents4_DocumentBeforeClose + # event found: ApplicationEvents4_DocumentBeforePrint + # event found: ApplicationEvents4_DocumentBeforeSave + # event found: ApplicationEvents4_NewDocument + # event found: ApplicationEvents4_WindowActivate + # event found: ApplicationEvents4_WindowDeactivate + # event found: ApplicationEvents4_WindowSelectionChange + # event found: ApplicationEvents4_WindowBeforeRightClick + # event found: ApplicationEvents4_WindowBeforeDoubleClick + # event found: ApplicationEvents4_EPostagePropertyDialog + # event found: ApplicationEvents4_EPostageInsert + # event found: ApplicationEvents4_MailMergeAfterMerge + # event found: ApplicationEvents4_MailMergeAfterRecordMerge + # event found: ApplicationEvents4_MailMergeBeforeMerge + # event found: ApplicationEvents4_MailMergeBeforeRecordMerge + # event found: ApplicationEvents4_MailMergeDataSourceLoad + # event found: ApplicationEvents4_MailMergeDataSourceValidate + # event found: ApplicationEvents4_MailMergeWizardSendToCustom + # event found: ApplicationEvents4_MailMergeWizardStateChange + # event found: ApplicationEvents4_WindowSize + # event found: ApplicationEvents4_XMLSelectionChange + # event found: ApplicationEvents4_XMLValidationError + # event found: ApplicationEvents4_DocumentSync + # event found: ApplicationEvents4_EPostageInsertEx + >>> PumpEvents(0.1) + >>> doc = o.Documents.Add() + Event ApplicationEvents4_NewDocument(None, <POINTER(_Document) ...>) + Event ApplicationEvents4_DocumentChange(None) + >>> res = o.Quit(); PumpEvents(0.1) + Event ApplicationEvents4_DocumentBeforeClose(None, <POINTER(_Document) ...>, VARIANT(vt=0x400b, byref(False))) + Event ApplicationEvents4_WindowDeactivate(None, <POINTER(_Document) ...>, <POINTER(Window) ...>) + Event ApplicationEvents4_DocumentChange(None) + Event ApplicationEvents4_Quit(None) + >>> + ''' + +if __name__ == "__main__": + unittest.main() diff --git a/venv/Lib/site-packages/comtypes/test/test_subinterface.py b/venv/Lib/site-packages/comtypes/test/test_subinterface.py new file mode 100644 index 00000000..fb6a6548 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/test_subinterface.py @@ -0,0 +1,16 @@ +import unittest, sys +from comtypes import IUnknown, GUID +from ctypes import * + +def test_main(): + from test import test_support + test_support.run_unittest(Test) + +class Test(unittest.TestCase): + def test_subinterface(self): + class ISub(IUnknown): + pass + + def test_subclass(self): + class X(c_void_p): + pass diff --git a/venv/Lib/site-packages/comtypes/test/test_typeinfo.py b/venv/Lib/site-packages/comtypes/test/test_typeinfo.py new file mode 100644 index 00000000..3e7ce0be --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/test_typeinfo.py @@ -0,0 +1,81 @@ +import os +import unittest +from ctypes import POINTER, byref +from comtypes import GUID, COMError +from comtypes.automation import DISPATCH_METHOD +from comtypes.typeinfo import LoadTypeLibEx, LoadRegTypeLib, \ + QueryPathOfRegTypeLib, TKIND_INTERFACE, TKIND_DISPATCH, TKIND_ENUM + +# We should add other test cases for Windows CE. +if os.name == "nt": + class Test(unittest.TestCase): + # No LoadTypeLibEx on windows ce + def test_LoadTypeLibEx(self): + # IE 6 uses shdocvw.dll, IE 7 uses ieframe.dll + if os.path.exists(os.path.join(os.environ["SystemRoot"], + "system32", "ieframe.dll")): + dllname = "ieframe.dll" + else: + dllname = "shdocvw.dll" + + self.assertRaises(WindowsError, lambda: LoadTypeLibEx("<xxx.xx>")) + tlib = LoadTypeLibEx(dllname) + self.assertTrue(tlib.GetTypeInfoCount()) + tlib.GetDocumentation(-1) + self.assertEqual(tlib.IsName("iwebbrowser"), "IWebBrowser") + self.assertEqual(tlib.IsName("IWEBBROWSER"), "IWebBrowser") + self.assertTrue(tlib.FindName("IWebBrowser")) + self.assertEqual(tlib.IsName("Spam"), None) + tlib.GetTypeComp() + + attr = tlib.GetLibAttr() + info = attr.guid, attr.wMajorVerNum, attr.wMinorVerNum + other_tlib = LoadRegTypeLib(*info) + self.assertEqual(tlib, other_tlib) + + ## for n in dir(attr): + ## if not n.startswith("_"): + ## print "\t", n, getattr(attr, n) + + for i in range(tlib.GetTypeInfoCount()): + ti = tlib.GetTypeInfo(i) + ti.GetTypeAttr() + tlib.GetDocumentation(i) + tlib.GetTypeInfoType(i) + + c_tlib, index = ti.GetContainingTypeLib() + self.assertEqual(c_tlib, tlib) + self.assertEqual(index, i) + + guid_null = GUID() + self.assertRaises(COMError, lambda: tlib.GetTypeInfoOfGuid(guid_null)) + + self.assertTrue(tlib.GetTypeInfoOfGuid(GUID("{EAB22AC1-30C1-11CF-A7EB-0000C05BAE0B}"))) + + path = QueryPathOfRegTypeLib(*info) + path = path.split("\0")[0] + self.assertTrue(path.lower().endswith(dllname)) + + def test_TypeInfo(self): + tlib = LoadTypeLibEx("shdocvw.dll") + for index in range(tlib.GetTypeInfoCount()): + ti = tlib.GetTypeInfo(index) + ta = ti.GetTypeAttr() + ti.GetDocumentation(-1) + if ta.typekind in (TKIND_INTERFACE, TKIND_DISPATCH): + if ta.cImplTypes: + href = ti.GetRefTypeOfImplType(0) + base = ti.GetRefTypeInfo(href) + base.GetDocumentation(-1) + ti.GetImplTypeFlags(0) + for f in range(ta.cFuncs): + fd = ti.GetFuncDesc(f) + names = ti.GetNames(fd.memid, 32) + ti.GetIDsOfNames(*names) + ti.GetMops(fd.memid) + + for v in range(ta.cVars): + ti.GetVarDesc(v) + +if __name__ == "__main__": + unittest.main() diff --git a/venv/Lib/site-packages/comtypes/test/test_urlhistory.py b/venv/Lib/site-packages/comtypes/test/test_urlhistory.py new file mode 100644 index 00000000..51929f77 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/test_urlhistory.py @@ -0,0 +1,48 @@ +import unittest, os +from copy import copy +from ctypes import * +from comtypes.client import GetModule, CreateObject +from comtypes.patcher import Patch + +# ./urlhist.tlb was downloaded somewhere from the internet (?) + +GetModule(os.path.join(os.path.dirname(__file__), "urlhist.tlb")) +from comtypes.gen import urlhistLib + +# The pwcsTitle and pwcsUrl fields of the _STATURL structure must be +# freed by the caller. The only way to do this without patching the +# generated code directly is to monkey-patch the +# _STATURL.__ctypes_from_outparam__ method like this. +@Patch(urlhistlib._STATURL) +class _(object): + def __ctypes_from_outparam__(self): + from comtypes.util import cast_field + result = type(self)() + for n, _ in self._fields_: + setattr(result, n, getattr(self, n)) + url, title = self.pwcsUrl, self.pwcsTitle + windll.ole32.CoTaskMemFree(cast_field(self, "pwcsUrl", c_void_p)) + windll.ole32.CoTaskMemFree(cast_field(self, "pwcsTitle", c_void_p)) + return result + +from comtypes.test.find_memleak import find_memleak + +class Test(unittest.TestCase): + def check_leaks(self, func): + bytes = find_memleak(func, (5, 10)) + self.assertFalse(bytes, "Leaks %d bytes" % bytes) + + def test_creation(self): + hist = CreateObject(urlhistLib.UrlHistory) + for x in hist.EnumURLS(): + x.pwcsUrl, x.pwcsTitle +## print (x.pwcsUrl, x.pwcsTitle) +## print x + def doit(): + for x in hist.EnumURLs(): + pass + doit() + self.check_leaks(doit) + +if __name__ == "__main__": + unittest.main() diff --git a/venv/Lib/site-packages/comtypes/test/test_variant.py b/venv/Lib/site-packages/comtypes/test/test_variant.py new file mode 100644 index 00000000..6045a170 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/test_variant.py @@ -0,0 +1,354 @@ +from ctypes import ( + POINTER, byref, c_byte, c_char, c_double, c_float, c_int, c_int64, c_short, + c_ubyte, c_ushort, c_uint, c_uint64, pointer, +) +import datetime +import decimal +import sys +import unittest + +from comtypes import IUnknown, GUID +from comtypes.automation import ( + VARIANT, DISPPARAMS, VT_NULL, VT_EMPTY, VT_ERROR, VT_I1, VT_I2, VT_I4, + VT_I8, VT_UI1, VT_UI2, VT_UI4, VT_UI8, VT_R4, VT_R8, VT_BYREF, VT_BSTR, + VT_DATE, VT_DECIMAL, VT_CY,) +from comtypes.typeinfo import LoadRegTypeLib +from comtypes.test import get_numpy +from comtypes.test.find_memleak import find_memleak + + +def get_refcnt(comptr): + # return the COM reference count of a COM interface pointer + if not comptr: + return 0 + comptr.AddRef() + return comptr.Release() + + +class VariantTestCase(unittest.TestCase): + + def test_constants(self): + empty = VARIANT.empty + self.assertEqual(empty.vt, VT_EMPTY) + self.assertTrue(empty.value is None) + + null = VARIANT.null + self.assertEqual(null.vt, VT_NULL) + self.assertTrue(null.value is None) + + missing = VARIANT.missing + self.assertEqual(missing.vt, VT_ERROR) + self.assertRaises(NotImplementedError, lambda: missing.value) + + def test_com_refcounts(self): + # typelib for oleaut32 + tlb = LoadRegTypeLib(GUID("{00020430-0000-0000-C000-000000000046}"), 2, 0, 0) + rc = get_refcnt(tlb) + + p = tlb.QueryInterface(IUnknown) + self.assertEqual(get_refcnt(tlb), rc+1) + + del p + self.assertEqual(get_refcnt(tlb), rc) + + def test_com_pointers(self): + # Storing a COM interface pointer in a VARIANT increments the refcount, + # changing the variant to contain something else decrements it + tlb = LoadRegTypeLib(GUID("{00020430-0000-0000-C000-000000000046}"), 2, 0, 0) + rc = get_refcnt(tlb) + + v = VARIANT(tlb) + self.assertEqual(get_refcnt(tlb), rc+1) + + p = v.value + self.assertEqual(get_refcnt(tlb), rc+2) + del p + self.assertEqual(get_refcnt(tlb), rc+1) + + v.value = None + self.assertEqual(get_refcnt(tlb), rc) + + def test_null_com_pointers(self): + p = POINTER(IUnknown)() + self.assertEqual(get_refcnt(p), 0) + + VARIANT(p) + self.assertEqual(get_refcnt(p), 0) + + def test_dispparams(self): + # DISPPARAMS is a complex structure, well worth testing. + d = DISPPARAMS() + d.rgvarg = (VARIANT * 3)() + values = [1, 5, 7] + for i, v in enumerate(values): + d.rgvarg[i].value = v + result = [d.rgvarg[i].value for i in range(3)] + self.assertEqual(result, values) + + def test_pythonobjects(self): + objects = [None, 42, 3.14, True, False, "abc", "abc", 7] + for x in objects: + v = VARIANT(x) + self.assertEqual(x, v.value) + + def test_integers(self): + v = VARIANT() + + if (hasattr(sys, "maxint")): + # this test doesn't work in Python 3000 + v.value = sys.maxsize + self.assertEqual(v.value, sys.maxsize) + self.assertEqual(type(v.value), int) + + v.value += 1 + self.assertEqual(v.value, sys.maxsize+1) + self.assertEqual(type(v.value), int) + + v.value = 1 + self.assertEqual(v.value, 1) + self.assertEqual(type(v.value), int) + + def test_datetime(self): + now = datetime.datetime.now() + + v = VARIANT() + v.value = now + self.assertEqual(v.vt, VT_DATE) + self.assertEqual(v.value, now) + + def test_datetime64(self): + np = get_numpy() + if np is None: + return + try: + np.datetime64 + except AttributeError: + return + + dates = [ + np.datetime64("2000-01-01T05:30:00", "s"), + np.datetime64("1800-01-01T05:30:00", "ms"), + np.datetime64("2000-01-01T12:34:56", "us") + ] + + for date in dates: + v = VARIANT() + v.value = date + self.assertEqual(v.vt, VT_DATE) + self.assertEqual(v.value, date.astype(datetime.datetime)) + + def test_decimal_as_currency(self): + value = decimal.Decimal('3.14') + + v = VARIANT() + v.value = value + self.assertEqual(v.vt, VT_CY) + self.assertEqual(v.value, value) + + def test_decimal_as_decimal(self): + v = VARIANT() + v.vt = VT_DECIMAL + v.decVal.Lo64 = 1234 + v.decVal.scale = 3 + self.assertEqual(v.value, decimal.Decimal('1.234')) + + v.decVal.sign = 0x80 + self.assertEqual(v.value, decimal.Decimal('-1.234')) + + v.decVal.scale = 28 + self.assertEqual(v.value, decimal.Decimal('-1.234e-25')) + + v.decVal.scale = 12 + v.decVal.Hi32 = 100 + self.assertEqual( + v.value, decimal.Decimal('-1844674407.370955162834')) + + def test_BSTR(self): + v = VARIANT() + v.value = "abc\x00123\x00" + self.assertEqual(v.value, "abc\x00123\x00") + + v.value = None + # manually clear the variant + v._.VT_I4 = 0 + + # NULL pointer BSTR should be handled as empty string + v.vt = VT_BSTR + self.assertTrue(v.value in ("", None)) + + def test_empty_BSTR(self): + v = VARIANT() + v.value = "" + self.assertEqual(v.vt, VT_BSTR) + + def test_UDT(self): + from comtypes.gen.TestComServerLib import MYCOLOR + v = VARIANT(MYCOLOR(red=1.0, green=2.0, blue=3.0)) + value = v.value + self.assertEqual((1.0, 2.0, 3.0), + (value.red, value.green, value.blue)) + + def func(): + v = VARIANT(MYCOLOR(red=1.0, green=2.0, blue=3.0)) + return v.value + + bytes = find_memleak(func) + self.assertFalse(bytes, "Leaks %d bytes" % bytes) + + def test_ctypes_in_variant(self): + v = VARIANT() + objs = [(c_ubyte(3), VT_UI1), + (c_char("x"), VT_UI1), + (c_byte(3), VT_I1), + (c_ushort(3), VT_UI2), + (c_short(3), VT_I2), + (c_uint(3), VT_UI4), + (c_uint64(2**64), VT_UI8), + (c_int(3), VT_I4), + (c_int64(2**32), VT_I8), + (c_double(3.14), VT_R8), + (c_float(3.14), VT_R4), + ] + for value, vt in objs: + v.value = value + self.assertEqual(v.vt, vt) + + def test_byref(self): + variable = c_int(42) + v = VARIANT(byref(variable)) + self.assertEqual(v[0], 42) + self.assertEqual(v.vt, VT_BYREF | VT_I4) + variable.value = 96 + self.assertEqual(v[0], 96) + + variable = c_int(42) + v = VARIANT(pointer(variable)) + self.assertEqual(v[0], 42) + self.assertEqual(v.vt, VT_BYREF | VT_I4) + variable.value = 96 + self.assertEqual(v[0], 96) + + +class NdArrayTest(unittest.TestCase): + def test_double(self): + np = get_numpy() + if np is None: + return + for dtype in ('float32', 'float64'): + # because of FLOAT rounding errors, whi will only work for + # certain values! + a = np.array([1.0, 2.0, 3.0, 4.5], dtype=dtype) + v = VARIANT() + v.value = a + self.assertTrue((v.value == a).all()) + + def test_int(self): + np = get_numpy() + if np is None: + return + for dtype in ('int8', 'int16', 'int32', 'int64', 'uint8', + 'uint16', 'uint32', 'uint64'): + a = np.array((1, 1, 1, 1), dtype=dtype) + v = VARIANT() + v.value = a + self.assertTrue((v.value == a).all()) + + def test_mixed(self): + np = get_numpy() + if np is None: + return + + now = datetime.datetime.now() + a = np.array( + [11, "22", None, True, now, decimal.Decimal("3.14")]).reshape(2,3) + v = VARIANT() + v.value = a + self.assertTrue((v.value == a).all()) + + +class ArrayTest(unittest.TestCase): + def test_double(self): + import array + for typecode in "df": + # because of FLOAT rounding errors, whi will only work for + # certain values! + a = array.array(typecode, (1.0, 2.0, 3.0, 4.5)) + v = VARIANT() + v.value = a + self.assertEqual(v.value, (1.0, 2.0, 3.0, 4.5)) + + def test_int(self): + import array + for typecode in "bhiBHIlL": + a = array.array(typecode, (1, 1, 1, 1)) + v = VARIANT() + v.value = a + self.assertEqual(v.value, (1, 1, 1, 1)) + +################################################################ +def run_test(rep, msg, func=None, previous={}, results={}): +## items = [None] * rep + if func is None: + locals = sys._getframe(1).f_locals + func = eval("lambda: %s" % msg, locals) + items = range(rep) + from time import clock + start = clock() + for i in items: + func(); func(); func(); func(); func() + stop = clock() + duration = (stop-start)*1e6/5/rep + try: + prev = previous[msg] + except KeyError: + print("%40s: %7.1f us" % (msg, duration), file=sys.stderr) + delta = 0.0 + else: + delta = duration / prev * 100.0 + print("%40s: %7.1f us, time = %5.1f%%" % (msg, duration, delta), file=sys.stderr) + results[msg] = duration + return delta + + +def check_perf(rep=20000): + from ctypes import c_int, byref + from comtypes.automation import VARIANT + import comtypes.automation + print(comtypes.automation) + variable = c_int() + by_var = byref(variable) + ptr_var = pointer(variable) + + import pickle + try: + previous = pickle.load(open("result.pickle", "rb")) + except IOError: + previous = {} + + results = {} + + d = 0.0 + d += run_test(rep, "VARIANT()", previous=previous, results=results) + d += run_test(rep, "VARIANT(by_var)", previous=previous, results=results) + d += run_test(rep, "VARIANT(ptr_var)", previous=previous, results=results) + d += run_test(rep, "VARIANT().value", previous=previous, results=results) + d += run_test(rep, "VARIANT(None).value", previous=previous, results=results) + d += run_test(rep, "VARIANT(42).value", previous=previous, results=results) + d += run_test(rep, "VARIANT(42L).value", previous=previous, results=results) + d += run_test(rep, "VARIANT(3.14).value", previous=previous, results=results) + d += run_test(rep, "VARIANT(u'Str').value", previous=previous, results=results) + d += run_test(rep, "VARIANT('Str').value", previous=previous, results=results) + d += run_test(rep, "VARIANT((42,)).value", previous=previous, results=results) + d += run_test(rep, "VARIANT([42,]).value", previous=previous, results=results) + + print("Average duration %.1f%%" % (d / 10)) +## cPickle.dump(results, open("result.pickle", "wb")) + +if __name__ == '__main__': + try: + unittest.main() + except SystemExit: + pass + import comtypes + print("Running benchmark with comtypes %s/Python %s ..." % (comtypes.__version__, sys.version.split()[0],)) + check_perf() diff --git a/venv/Lib/site-packages/comtypes/test/test_win32com_interop.py b/venv/Lib/site-packages/comtypes/test/test_win32com_interop.py new file mode 100644 index 00000000..ab9dc6f3 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/test_win32com_interop.py @@ -0,0 +1,94 @@ +import unittest + +from ctypes import PyDLL, py_object, c_void_p, byref, POINTER +from ctypes.wintypes import BOOL + +from comtypes import IUnknown +from comtypes.client import CreateObject +from comtypes.automation import IDispatch +from comtypes.test import requires + +requires("pythoncom") +import pythoncom +import win32com.client + +# We use the PyCom_PyObjectFromIUnknown function in pythoncom25.dll to +# convert a comtypes COM pointer into a pythoncom COM pointer. +# Fortunately this function is exported by the dll... +# +# This is the C prototype; we must pass 'True' as third argument: +# +# PyObject *PyCom_PyObjectFromIUnknown(IUnknown *punk, REFIID riid, BOOL bAddRef) + +_PyCom_PyObjectFromIUnknown = PyDLL(pythoncom.__file__).PyCom_PyObjectFromIUnknown +_PyCom_PyObjectFromIUnknown.restype = py_object +_PyCom_PyObjectFromIUnknown.argtypes = (POINTER(IUnknown), c_void_p, BOOL) + +def comtypes2pywin(ptr, interface=None): + """Convert a comtypes pointer 'ptr' into a pythoncom + PyI<interface> object. + + 'interface' specifies the interface we want; it must be a comtypes + interface class. The interface must be implemented by the object; + and the interface must be known to pythoncom. + + If 'interface' is specified, comtypes.IUnknown is used. + """ + if interface is None: + interface = IUnknown + return _PyCom_PyObjectFromIUnknown(ptr, byref(interface._iid_), True) + +################################################################ + +def comtypes_get_refcount(ptr): + """Helper function for testing: return the COM reference count of + a comtypes COM object""" + ptr.AddRef() + return ptr.Release() + +from comtypes import COMObject + +class MyComObject(COMObject): + """A completely trivial COM object implementing IDispatch. Calling + any methods will return the error code E_NOTIMPL (except the + IUnknown methods; they are implemented in the base class.""" + _com_interfaces_ = [IDispatch] + +################################################################ + +class Test(unittest.TestCase): + def tearDown(self): + if hasattr(self, "ie"): + self.ie.Quit() + del self.ie + + def test_mycomobject(self): + o = MyComObject() + p = comtypes2pywin(o, IDispatch) + disp = win32com.client.Dispatch(p) + self.assertEqual(repr(disp), "<COMObject <unknown>>") + + def test_ie(self): + # Convert a comtypes COM interface pointer into a win32com COM + # pointer. + ie = self.ie = CreateObject("InternetExplorer.Application") + # The COM refcount of the created object is 1: + self.assertEqual(comtypes_get_refcount(ie), 1) + # IE starts invisible: + self.assertEqual(ie.Visible, False) + + # Create a pythoncom PyIDispatch object from it: + p = comtypes2pywin(ie, interface=IDispatch) + self.assertEqual(comtypes_get_refcount(ie), 2) + + # Make it usable... + disp = win32com.client.Dispatch(p) + self.assertEqual(comtypes_get_refcount(ie), 2) + self.assertEqual(disp.Visible, False) + + # Cleanup and make sure that the COM refcounts are correct + del p, disp + self.assertEqual(comtypes_get_refcount(ie), 1) + +if __name__ == "__main__": + unittest.main() diff --git a/venv/Lib/site-packages/comtypes/test/test_wmi.py b/venv/Lib/site-packages/comtypes/test/test_wmi.py new file mode 100644 index 00000000..4743ebe1 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/test_wmi.py @@ -0,0 +1,52 @@ +import unittest as ut +from ctypes import POINTER +from comtypes.client import CoGetObject +from comtypes.test import requires + +requires("time") + +# WMI has dual interfaces. +# Some methods/properties have "[out] POINTER(VARIANT)" parameters. +# This test checks that these parameters are returned as strings: +# that's what VARIANT.__ctypes_from_outparam__ does. +class Test(ut.TestCase): + def test_wmi(self): + wmi = CoGetObject("winmgmts:") + disks = wmi.InstancesOf("Win32_LogicalDisk") + + # There are different typelibs installed for WMI on win2k and winXP. + # WbemScripting refers to their guid: + # Win2k: + # import comtypes.gen._565783C6_CB41_11D1_8B02_00600806D9B6_0_1_1 as mod + # WinXP: + # import comtypes.gen._565783C6_CB41_11D1_8B02_00600806D9B6_0_1_2 as mod + # So, the one that's referenced onm WbemScripting will be used, whether the actual + # typelib is available or not. XXX + from comtypes.gen import WbemScripting + WbemScripting.wbemPrivilegeCreateToken + + for item in disks: + # obj[index] is forwarded to obj.Item(index) + # .Value is a property with "[out] POINTER(VARIANT)" parameter. + a = item.Properties_["Caption"].Value + b = item.Properties_.Item("Caption").Value + c = item.Properties_("Caption").Value + self.assertEqual(a, b) + self.assertEqual(a, c) + self.assertTrue(isinstance(a, str)) + self.assertTrue(isinstance(b, str)) + self.assertTrue(isinstance(c, str)) + result = {} + for prop in item.Properties_: + self.assertTrue(isinstance(prop.Name, str)) + prop.Value + result[prop.Name] = prop.Value +## print "\t", (prop.Name, prop.Value) + self.assertEqual(len(item.Properties_), item.Properties_.Count) + self.assertEqual(len(item.Properties_), len(result)) + self.assertTrue(isinstance(item.Properties_["Description"].Value, str)) + # len(obj) is forwared to obj.Count + self.assertEqual(len(disks), disks.Count) + +if __name__ == "__main__": + ut.main() diff --git a/venv/Lib/site-packages/comtypes/test/test_word.py b/venv/Lib/site-packages/comtypes/test/test_word.py new file mode 100644 index 00000000..da52bd87 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/test/test_word.py @@ -0,0 +1,68 @@ +import unittest +import time +import comtypes.client + +# XXX leaks references. + +import comtypes.test +comtypes.test.requires("ui") + + +class Test(unittest.TestCase): + + def setUp(self): + self._events = [] + + # Word Application Event + def DocumentChange(self, this, *args): +## print "DocumentChange", args + self._events.append("DocumentChange") + + def test(self): + # create a word instance + word = comtypes.client.CreateObject("Word.Application") + from comtypes.gen import Word + + # Get the instance again, and receive events from that + w2 = comtypes.client.GetActiveObject("Word.Application") + conn = comtypes.client.GetEvents(w2, sink=self) + + word.Visible = 1 + + doc = word.Documents.Add() + wrange = doc.Range() + for i in range(10): + wrange.InsertAfter("Hello from comtypes %d\n" % i) + + for i, para in enumerate(doc.Paragraphs): + f = para.Range.Font + f.ColorIndex = i+1 + f.Size = 12 + (2 * i) + + time.sleep(0.5) + + doc.Close(SaveChanges = Word.wdDoNotSaveChanges) + + word.Quit() + del word, w2 + + time.sleep(0.5) + +## self.failUnlessEqual(self._events, ["DocumentChange", "DocumentChange"]) + + def test_commandbar(self): + word = comtypes.client.CreateObject("Word.Application") + word.Visible = 1 + tb = word.CommandBars("Standard") + btn = tb.Controls[1] + + if 0: # word does not allow programmatic access, so this does fail + evt = word.VBE.Events.CommandBarEvents(btn) + from comtypes.gen import Word, VBIDE + comtypes.client.ShowEvents(evt, interface=VBIDE._dispCommandBarControlEvents) + comtypes.client.ShowEvents(evt) + + word.Quit() + +if __name__ == "__main__": + unittest.main() diff --git a/venv/Lib/site-packages/comtypes/tools/__init__.py b/venv/Lib/site-packages/comtypes/tools/__init__.py new file mode 100644 index 00000000..a35f8aa3 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/tools/__init__.py @@ -0,0 +1 @@ +# the comtypes.tools package diff --git a/venv/Lib/site-packages/comtypes/tools/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/comtypes/tools/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..5ccf9dc4 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/tools/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/tools/__pycache__/codegenerator.cpython-36.pyc b/venv/Lib/site-packages/comtypes/tools/__pycache__/codegenerator.cpython-36.pyc new file mode 100644 index 00000000..10254605 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/tools/__pycache__/codegenerator.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/tools/__pycache__/tlbparser.cpython-36.pyc b/venv/Lib/site-packages/comtypes/tools/__pycache__/tlbparser.cpython-36.pyc new file mode 100644 index 00000000..f1429704 Binary files /dev/null and b/venv/Lib/site-packages/comtypes/tools/__pycache__/tlbparser.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/tools/__pycache__/typedesc.cpython-36.pyc b/venv/Lib/site-packages/comtypes/tools/__pycache__/typedesc.cpython-36.pyc new file mode 100644 index 00000000..daa410da Binary files /dev/null and b/venv/Lib/site-packages/comtypes/tools/__pycache__/typedesc.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/tools/__pycache__/typedesc_base.cpython-36.pyc b/venv/Lib/site-packages/comtypes/tools/__pycache__/typedesc_base.cpython-36.pyc new file mode 100644 index 00000000..3588ec7a Binary files /dev/null and b/venv/Lib/site-packages/comtypes/tools/__pycache__/typedesc_base.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/comtypes/tools/codegenerator.py b/venv/Lib/site-packages/comtypes/tools/codegenerator.py new file mode 100644 index 00000000..7c8ecfb4 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/tools/codegenerator.py @@ -0,0 +1,1015 @@ +# Code generator to generate code for everything contained in COM type +# libraries. +import os +import io +import keyword +import ctypes + +from comtypes.tools import typedesc +import comtypes +import comtypes.client +import comtypes.client._generate + +version = comtypes.__version__ + +__warn_on_munge__ = __debug__ + + +class lcid(object): + def __repr__(self): + return "_lcid" +lcid = lcid() + +class dispid(object): + def __init__(self, memid): + self.memid = memid + + def __repr__(self): + return "dispid(%s)" % self.memid + +class helpstring(object): + def __init__(self, text): + self.text = text + + def __repr__(self): + return "helpstring(%r)" % self.text + + +# XXX Should this be in ctypes itself? +ctypes_names = { + "unsigned char": "c_ubyte", + "signed char": "c_byte", + "char": "c_char", + + "wchar_t": "c_wchar", + + "short unsigned int": "c_ushort", + "short int": "c_short", + + "long unsigned int": "c_ulong", + "long int": "c_long", + "long signed int": "c_long", + + "unsigned int": "c_uint", + "int": "c_int", + + "long long unsigned int": "c_ulonglong", + "long long int": "c_longlong", + + "double": "c_double", + "float": "c_float", + + # Hm... + "void": "None", +} + +def get_real_type(tp): + if type(tp) is typedesc.Typedef: + return get_real_type(tp.typ) + elif isinstance(tp, typedesc.CvQualifiedType): + return get_real_type(tp.typ) + return tp + +ASSUME_STRINGS = True + +def _calc_packing(struct, fields, pack, isStruct): + # Try a certain packing, raise PackingError if field offsets, + # total size ot total alignment is wrong. + if struct.size is None: # incomplete struct + return -1 + if struct.name in dont_assert_size: + return None + if struct.bases: + size = struct.bases[0].size + total_align = struct.bases[0].align + else: + size = 0 + total_align = 8 # in bits + for i, f in enumerate(fields): + if f.bits: # this code cannot handle bit field sizes. +## print "##XXX FIXME" + return -2 # XXX FIXME + s, a = storage(f.typ) + if pack is not None: + a = min(pack, a) + if size % a: + size += a - size % a + if isStruct: + if size != f.offset: + raise PackingError("field %s offset (%s/%s)" % (f.name, size, f.offset)) + size += s + else: + size = max(size, s) + total_align = max(total_align, a) + if total_align != struct.align: + raise PackingError("total alignment (%s/%s)" % (total_align, struct.align)) + a = total_align + if pack is not None: + a = min(pack, a) + if size % a: + size += a - size % a + if size != struct.size: + raise PackingError("total size (%s/%s)" % (size, struct.size)) + +def calc_packing(struct, fields): + # try several packings, starting with unspecified packing + isStruct = isinstance(struct, typedesc.Structure) + for pack in [None, 16*8, 8*8, 4*8, 2*8, 1*8]: + try: + _calc_packing(struct, fields, pack, isStruct) + except PackingError as details: + continue + else: + if pack is None: + return None + return pack/8 + raise PackingError("PACKING FAILED: %s" % details) + +class PackingError(Exception): + pass + +try: + set +except NameError: + # Python 2.3 + from sets import Set as set + +# XXX These should be filtered out in gccxmlparser. +dont_assert_size = set( + [ + "__si_class_type_info_pseudo", + "__class_type_info_pseudo", + ] + ) + +def storage(t): + # return the size and alignment of a type + if isinstance(t, typedesc.Typedef): + return storage(t.typ) + elif isinstance(t, typedesc.ArrayType): + s, a = storage(t.typ) + return s * (int(t.max) - int(t.min) + 1), a + return int(t.size), int(t.align) + +################################################################ + +class Generator(object): + + def __init__(self, ofi, known_symbols=None): + self._externals = {} + self.output = ofi + self.stream = io.StringIO() + self.imports = io.StringIO() +## self.stream = self.imports = self.output + self.known_symbols = known_symbols or {} + + self.done = set() # type descriptions that have been generated + self.names = set() # names that have been generated + + def generate(self, item): + if item in self.done: + return + if isinstance(item, typedesc.StructureHead): + name = getattr(item.struct, "name", None) + else: + name = getattr(item, "name", None) + if name in self.known_symbols: + mod = self.known_symbols[name] + print("from %s import %s" % (mod, name), file=self.imports) + self.done.add(item) + if isinstance(item, typedesc.Structure): + self.done.add(item.get_head()) + self.done.add(item.get_body()) + return + mth = getattr(self, type(item).__name__) + # to avoid infinite recursion, we have to mark it as done + # before actually generating the code. + self.done.add(item) + mth(item) + + def generate_all(self, items): + for item in items: + self.generate(item) + + def _make_relative_path(self, path1, path2): + """path1 and path2 are pathnames. + Return path1 as a relative path to path2, if possible. + """ + path1 = os.path.abspath(path1) + path2 = os.path.abspath(path2) + common = os.path.commonprefix([os.path.normcase(path1), + os.path.normcase(path2)]) + if not os.path.isdir(common): + return path1 + if not common.endswith("\\"): + return path1 + if not os.path.isdir(path2): + path2 = os.path.dirname(path2) + # strip the common prefix + path1 = path1[len(common):] + path2 = path2[len(common):] + + parts2 = path2.split("\\") + return "..\\" * len(parts2) + path1 + + def generate_code(self, items, filename=None): + self.filename = filename + if filename is not None: + # Hm, what is the CORRECT encoding? + print("# -*- coding: mbcs -*-", file=self.output) + if os.path.isabs(filename): + # absolute path + print("typelib_path = %r" % filename, file=self.output) + elif not os.path.dirname(filename) and not os.path.isfile(filename): + # no directory given, and not in current directory. + print("typelib_path = %r" % filename, file=self.output) + else: + # relative path; make relative to comtypes.gen. + path = self._make_relative_path(filename, comtypes.gen.__path__[0]) + print("import os", file=self.output) + print("typelib_path = os.path.normpath(", file=self.output) + print(" os.path.abspath(os.path.join(os.path.dirname(__file__),", file=self.output) + print(" %r)))" % path, file=self.output) + + p = os.path.normpath(os.path.abspath(os.path.join(comtypes.gen.__path__[0], + path))) + assert os.path.isfile(p) + print("_lcid = 0 # change this if required", file=self.imports) + print("from ctypes import *", file=self.imports) + items = set(items) + loops = 0 + while items: + loops += 1 + self.more = set() + self.generate_all(items) + + items |= self.more + items -= self.done + + self.output.write(self.imports.getvalue()) + self.output.write("\n\n") + self.output.write(self.stream.getvalue()) + + import textwrap + wrapper = textwrap.TextWrapper(subsequent_indent=" ", + break_long_words=False) + # XXX The space before '%s' is needed to make sure that the entire list + # does not get pushed to the next line when the first name is + # excessively long. + text = "__all__ = [ %s]" % ", ".join([repr(str(n)) for n in self.names]) + + for line in wrapper.wrap(text): + print(line, file=self.output) + + tlib_mtime = None + if self.filename is not None: + # get full path to DLL first (os.stat can't work with relative DLL paths properly) + loaded_typelib = comtypes.typeinfo.LoadTypeLib(self.filename) + full_filename = comtypes.tools.tlbparser.get_tlib_filename(loaded_typelib) + + if full_filename is None: + tlib_mtime = 0 + else: + # get DLL timestamp at the moment of wrapper generation + tlib_mtime = os.stat(full_filename).st_mtime + + print("from comtypes import _check_version; _check_version(%r, %f)" % (version, tlib_mtime), file=self.output) + return loops + + def type_name(self, t, generate=True): + # Return a string, containing an expression which can be used + # to refer to the type. Assumes the 'from ctypes import *' + # namespace is available. + if isinstance(t, typedesc.SAFEARRAYType): + return "_midlSAFEARRAY(%s)" % self.type_name(t.typ) +## if isinstance(t, typedesc.CoClass): +## return "%s._com_interfaces_[0]" % t.name + if isinstance(t, typedesc.Typedef): + return t.name + if isinstance(t, typedesc.PointerType): + if ASSUME_STRINGS: + x = get_real_type(t.typ) + if isinstance(x, typedesc.FundamentalType): + if x.name == "char": + self.need_STRING() + return "STRING" + elif x.name == "wchar_t": + self.need_WSTRING() + return "WSTRING" + + result = "POINTER(%s)" % self.type_name(t.typ, generate) + # XXX Better to inspect t.typ! + if result.startswith("POINTER(WINFUNCTYPE"): + return result[len("POINTER("):-1] + if result.startswith("POINTER(CFUNCTYPE"): + return result[len("POINTER("):-1] + elif result == "POINTER(None)": + return "c_void_p" + return result + elif isinstance(t, typedesc.ArrayType): + return "%s * %s" % (self.type_name(t.typ, generate), int(t.max)+1) + elif isinstance(t, typedesc.FunctionType): + args = [self.type_name(x, generate) for x in [t.returns] + list(t.iterArgTypes())] + if "__stdcall__" in t.attributes: + return "WINFUNCTYPE(%s)" % ", ".join(args) + else: + return "CFUNCTYPE(%s)" % ", ".join(args) + elif isinstance(t, typedesc.CvQualifiedType): + # const and volatile are ignored + return "%s" % self.type_name(t.typ, generate) + elif isinstance(t, typedesc.FundamentalType): + return ctypes_names[t.name] + elif isinstance(t, typedesc.Structure): + return t.name + elif isinstance(t, typedesc.Enumeration): + if t.name: + return t.name + return "c_int" # enums are integers + return t.name + + def need_VARIANT_imports(self, value): + text = repr(value) + if "Decimal(" in text: + print("from decimal import Decimal", file=self.imports) + if "datetime.datetime(" in text: + print("import datetime", file=self.imports) + + _STRING_defined = False + def need_STRING(self): + if self._STRING_defined: + return + print("STRING = c_char_p", file=self.imports) + self._STRING_defined = True + + _WSTRING_defined = False + def need_WSTRING(self): + if self._WSTRING_defined: + return + print("WSTRING = c_wchar_p", file=self.imports) + self._WSTRING_defined = True + + _OPENARRAYS_defined = False + def need_OPENARRAYS(self): + if self._OPENARRAYS_defined: + return + print("OPENARRAY = POINTER(c_ubyte) # hack, see comtypes/tools/codegenerator.py", file=self.imports) + self._OPENARRAYS_defined = True + + _arraytypes = 0 + def ArrayType(self, tp): + self._arraytypes += 1 + self.generate(get_real_type(tp.typ)) + self.generate(tp.typ) + + _enumvalues = 0 + def EnumValue(self, tp): + value = int(tp.value) + if keyword.iskeyword(tp.name): + # XXX use logging! + if __warn_on_munge__: + print("# Fixing keyword as EnumValue for %s" % tp.name) + tp.name += "_" + print("%s = %d" % (tp.name, value), file=self.stream) + self.names.add(tp.name) + self._enumvalues += 1 + + _enumtypes = 0 + def Enumeration(self, tp): + self._enumtypes += 1 + print(file=self.stream) + if tp.name: + print("# values for enumeration '%s'" % tp.name, file=self.stream) + else: + print("# values for unnamed enumeration", file=self.stream) + # Some enumerations have the same name for the enum type + # and an enum value. Excel's XlDisplayShapes is such an example. + # Since we don't have separate namespaces for the type and the values, + # we generate the TYPE last, overwriting the value. XXX + for item in tp.values: + self.generate(item) + if tp.name: + print("%s = c_int # enum" % tp.name, file=self.stream) + self.names.add(tp.name) + + _GUID_defined = False + def need_GUID(self): + if self._GUID_defined: + return + self._GUID_defined = True + modname = self.known_symbols.get("GUID") + if modname: + print("from %s import GUID" % modname, file=self.imports) + + _typedefs = 0 + def Typedef(self, tp): + self._typedefs += 1 + if type(tp.typ) in (typedesc.Structure, typedesc.Union): + self.generate(tp.typ.get_head()) + self.more.add(tp.typ) + else: + self.generate(tp.typ) + if self.type_name(tp.typ) in self.known_symbols: + stream = self.imports + else: + stream = self.stream + if tp.name != self.type_name(tp.typ): + print("%s = %s" % \ + (tp.name, self.type_name(tp.typ)), file=stream) + self.names.add(tp.name) + + def FundamentalType(self, item): + pass # we should check if this is known somewhere + + def StructureHead(self, head): + for struct in head.struct.bases: + self.generate(struct.get_head()) + self.more.add(struct) + if head.struct.location: + print("# %s %s" % head.struct.location, file=self.stream) + basenames = [self.type_name(b) for b in head.struct.bases] + if basenames: + self.need_GUID() + method_names = [m.name for m in head.struct.members if type(m) is typedesc.Method] + print("class %s(%s):" % (head.struct.name, ", ".join(basenames)), file=self.stream) + print(" _iid_ = GUID('{}') # please look up iid and fill in!", file=self.stream) + if "Enum" in method_names: + print(" def __iter__(self):", file=self.stream) + print(" return self.Enum()", file=self.stream) + elif method_names == "Next Skip Reset Clone".split(): + print(" def __iter__(self):", file=self.stream) + print(" return self", file=self.stream) + print(file=self.stream) + print(" def next(self):", file=self.stream) + print(" arr, fetched = self.Next(1)", file=self.stream) + print(" if fetched == 0:", file=self.stream) + print(" raise StopIteration", file=self.stream) + print(" return arr[0]", file=self.stream) + else: + methods = [m for m in head.struct.members if type(m) is typedesc.Method] + if methods: + # Hm. We cannot generate code for IUnknown... + print("assert 0, 'cannot generate code for IUnknown'", file=self.stream) + print("class %s(_com_interface):" % head.struct.name, file=self.stream) + print(" pass", file=self.stream) + elif type(head.struct) == typedesc.Structure: + print("class %s(Structure):" % head.struct.name, file=self.stream) + if hasattr(head.struct, "_recordinfo_"): + print(" _recordinfo_ = %r" % (head.struct._recordinfo_,), file=self.stream) + else: + print(" pass", file=self.stream) + elif type(head.struct) == typedesc.Union: + print("class %s(Union):" % head.struct.name, file=self.stream) + print(" pass", file=self.stream) + self.names.add(head.struct.name) + + _structures = 0 + def Structure(self, struct): + self._structures += 1 + self.generate(struct.get_head()) + self.generate(struct.get_body()) + + Union = Structure + + def StructureBody(self, body): + fields = [] + methods = [] + for m in body.struct.members: + if type(m) is typedesc.Field: + fields.append(m) + if type(m.typ) is typedesc.Typedef: + self.generate(get_real_type(m.typ)) + self.generate(m.typ) + elif type(m) is typedesc.Method: + methods.append(m) + self.generate(m.returns) + self.generate_all(m.iterArgTypes()) + elif type(m) is typedesc.Constructor: + pass + + # we don't need _pack_ on Unions (I hope, at least), and not + # on COM interfaces: + if not methods: + try: + pack = calc_packing(body.struct, fields) + if pack is not None: + print("%s._pack_ = %s" % (body.struct.name, pack), file=self.stream) + except PackingError as details: + # if packing fails, write a warning comment to the output. + import warnings + message = "Structure %s: %s" % (body.struct.name, details) + warnings.warn(message, UserWarning) + print("# WARNING: %s" % details, file=self.stream) + + if fields: + if body.struct.bases: + assert len(body.struct.bases) == 1 + self.generate(body.struct.bases[0].get_body()) + # field definition normally span several lines. + # Before we generate them, we need to 'import' everything they need. + # So, call type_name for each field once, + for f in fields: + self.type_name(f.typ) + print("%s._fields_ = [" % body.struct.name, file=self.stream) + if body.struct.location: + print(" # %s %s" % body.struct.location, file=self.stream) + # unnamed fields will get autogenerated names "_", "_1". "_2", "_3", ... + unnamed_index = 0 + for f in fields: + if not f.name: + if unnamed_index: + fieldname = "_%d" % unnamed_index + else: + fieldname = "_" + unnamed_index += 1 + print(" # Unnamed field renamed to '%s'" % fieldname, file=self.stream) + else: + fieldname = f.name + if f.bits is None: + print(" ('%s', %s)," % (fieldname, self.type_name(f.typ)), file=self.stream) + else: + print(" ('%s', %s, %s)," % (fieldname, self.type_name(f.typ), f.bits), file=self.stream) + print("]", file=self.stream) + + if body.struct.size is None: + msg = ("# The size provided by the typelib is incorrect.\n" + "# The size and alignment check for %s is skipped.") + print(msg % body.struct.name, file=self.stream) + elif body.struct.name not in dont_assert_size: + size = body.struct.size // 8 + print("assert sizeof(%s) == %s, sizeof(%s)" % \ + (body.struct.name, size, body.struct.name), file=self.stream) + align = body.struct.align // 8 + print("assert alignment(%s) == %s, alignment(%s)" % \ + (body.struct.name, align, body.struct.name), file=self.stream) + + if methods: + self.need_COMMETHOD() + # method definitions normally span several lines. + # Before we generate them, we need to 'import' everything they need. + # So, call type_name for each field once, + for m in methods: + self.type_name(m.returns) + for a in m.iterArgTypes(): + self.type_name(a) + print("%s._methods_ = [" % body.struct.name, file=self.stream) + if body.struct.location: + print("# %s %s" % body.struct.location, file=self.stream) + + for m in methods: + if m.location: + print(" # %s %s" % m.location, file=self.stream) + print(" COMMETHOD([], %s, '%s'," % ( + self.type_name(m.returns), + m.name), file=self.stream) + for a in m.iterArgTypes(): + print(" ( [], %s, )," % self.type_name(a), file=self.stream) + print(" ),", file=self.stream) + print("]", file=self.stream) + + _midlSAFEARRAY_defined = False + def need_midlSAFEARRAY(self): + if self._midlSAFEARRAY_defined: + return + print("from comtypes.automation import _midlSAFEARRAY", file=self.imports) + self._midlSAFEARRAY_defined = True + + _CoClass_defined = False + def need_CoClass(self): + if self._CoClass_defined: + return + print("from comtypes import CoClass", file=self.imports) + self._CoClass_defined = True + + _dispid_defined = False + def need_dispid(self): + if self._dispid_defined: + return + print("from comtypes import dispid", file=self.imports) + self._dispid_defined = True + + _COMMETHOD_defined = False + def need_COMMETHOD(self): + if self._COMMETHOD_defined: + return + print("from comtypes import helpstring", file=self.imports) + print("from comtypes import COMMETHOD", file=self.imports) + self._COMMETHOD_defined = True + + _DISPMETHOD_defined = False + def need_DISPMETHOD(self): + if self._DISPMETHOD_defined: + return + print("from comtypes import DISPMETHOD, DISPPROPERTY, helpstring", file=self.imports) + self._DISPMETHOD_defined = True + + ################################################################ + # top-level typedesc generators + # + def TypeLib(self, lib): + # lib.name, lib.gui, lib.major, lib.minor, lib.doc + + # Hm, in user code we have to write: + # class MyServer(COMObject, ...): + # _com_interfaces_ = [MyTypeLib.IInterface] + # _reg_typelib_ = MyTypeLib.Library._reg_typelib_ + # ^^^^^^^ + # Should the '_reg_typelib_' attribute be at top-level in the + # generated code, instead as being an attribute of the + # 'Library' symbol? + print("class Library(object):", file=self.stream) + if lib.doc: + print(" %r" % lib.doc, file=self.stream) + if lib.name: + print(" name = %r" % lib.name, file=self.stream) + print(" _reg_typelib_ = (%r, %r, %r)" % (lib.guid, lib.major, lib.minor), file=self.stream) + print(file=self.stream) + + def External(self, ext): + # ext.docs - docstring of typelib + # ext.symbol_name - symbol to generate + # ext.tlib - the ITypeLib pointer to the typelibrary containing the symbols definition + # + # ext.name filled in here + + libdesc = str(ext.tlib.GetLibAttr()) # str(TLIBATTR) is unique for a given typelib + if libdesc in self._externals: # typelib wrapper already created + modname = self._externals[libdesc] + # we must fill in ext.name, it is used by self.type_name() + ext.name = "%s.%s" % (modname, ext.symbol_name) + return + + modname = comtypes.client._generate._name_module(ext.tlib) + ext.name = "%s.%s" % (modname, ext.symbol_name) + self._externals[libdesc] = modname + print("import", modname, file=self.imports) + comtypes.client.GetModule(ext.tlib) + + def Constant(self, tp): + print("%s = %r # Constant %s" % (tp.name, + tp.value, + self.type_name(tp.typ, False)), file=self.stream) + self.names.add(tp.name) + + def SAFEARRAYType(self, sa): + self.generate(sa.typ) + self.need_midlSAFEARRAY() + + _pointertypes = 0 + def PointerType(self, tp): + self._pointertypes += 1 + if type(tp.typ) is typedesc.ComInterface: + # this defines the class + self.generate(tp.typ.get_head()) + # this defines the _methods_ + self.more.add(tp.typ) + elif type(tp.typ) is typedesc.PointerType: + self.generate(tp.typ) + elif type(tp.typ) in (typedesc.Union, typedesc.Structure): + self.generate(tp.typ.get_head()) + self.more.add(tp.typ) + elif type(tp.typ) is typedesc.Typedef: + self.generate(tp.typ) + else: + self.generate(tp.typ) + + def CoClass(self, coclass): + self.need_GUID() + self.need_CoClass() + print("class %s(CoClass):" % coclass.name, file=self.stream) + doc = getattr(coclass, "doc", None) + if doc: + print(" %r" % doc, file=self.stream) + print(" _reg_clsid_ = GUID(%r)" % coclass.clsid, file=self.stream) + print(" _idlflags_ = %s" % coclass.idlflags, file=self.stream) + if self.filename is not None: + print(" _typelib_path_ = typelib_path", file=self.stream) +##X print >> self.stream, "POINTER(%s).__ctypes_from_outparam__ = wrap" % coclass.name + + libid = coclass.tlibattr.guid + wMajor, wMinor = coclass.tlibattr.wMajorVerNum, coclass.tlibattr.wMinorVerNum + print(" _reg_typelib_ = (%r, %s, %s)" % (str(libid), wMajor, wMinor), file=self.stream) + + for itf, idlflags in coclass.interfaces: + self.generate(itf.get_head()) + implemented = [] + sources = [] + for item in coclass.interfaces: + # item is (interface class, impltypeflags) + if item[1] & 2: # IMPLTYPEFLAG_FSOURCE + # source interface + where = sources + else: + # sink interface + where = implemented + if item[1] & 1: # IMPLTYPEFLAG_FDEAULT + # The default interface should be the first item on the list + where.insert(0, item[0].name) + else: + where.append(item[0].name) + if implemented: + print("%s._com_interfaces_ = [%s]" % (coclass.name, ", ".join(implemented)), file=self.stream) + if sources: + print("%s._outgoing_interfaces_ = [%s]" % (coclass.name, ", ".join(sources)), file=self.stream) + print(file=self.stream) + self.names.add(coclass.name) + + def ComInterface(self, itf): + self.generate(itf.get_head()) + self.generate(itf.get_body()) + self.names.add(itf.name) + + def _is_enuminterface(self, itf): + # Check if this is an IEnumXXX interface + if not itf.name.startswith("IEnum"): + return False + member_names = [mth.name for mth in itf.members] + for name in ("Next", "Skip", "Reset", "Clone"): + if name not in member_names: + return False + return True + + def ComInterfaceHead(self, head): + if head.itf.name in self.known_symbols: + return + base = head.itf.base + if head.itf.base is None: + # we don't beed to generate IUnknown + return + self.generate(base.get_head()) + self.more.add(base) + basename = self.type_name(head.itf.base) + + self.need_GUID() + print("class %s(%s):" % (head.itf.name, basename), file=self.stream) + print(" _case_insensitive_ = True", file=self.stream) + doc = getattr(head.itf, "doc", None) + if doc: + print(" %r" % doc, file=self.stream) + print(" _iid_ = GUID(%r)" % head.itf.iid, file=self.stream) + print(" _idlflags_ = %s" % head.itf.idlflags, file=self.stream) + + if self._is_enuminterface(head.itf): + print(" def __iter__(self):", file=self.stream) + print(" return self", file=self.stream) + print(file=self.stream) + + print(" def next(self):", file=self.stream) + print(" item, fetched = self.Next(1)", file=self.stream) + print(" if fetched:", file=self.stream) + print(" return item", file=self.stream) + print(" raise StopIteration", file=self.stream) + print(file=self.stream) + + print(" def __getitem__(self, index):", file=self.stream) + print(" self.Reset()", file=self.stream) + print(" self.Skip(index)", file=self.stream) + print(" item, fetched = self.Next(1)", file=self.stream) + print(" if fetched:", file=self.stream) + print(" return item", file=self.stream) + print(" raise IndexError(index)", file=self.stream) + print(file=self.stream) + + def ComInterfaceBody(self, body): + # The base class must be fully generated, including the + # _methods_ list. + self.generate(body.itf.base) + + # make sure we can generate the body + for m in body.itf.members: + for a in m.arguments: + self.generate(a[0]) + self.generate(m.returns) + + self.need_COMMETHOD() + self.need_dispid() + print("%s._methods_ = [" % body.itf.name, file=self.stream) + for m in body.itf.members: + if isinstance(m, typedesc.ComMethod): + self.make_ComMethod(m, "dual" in body.itf.idlflags) + else: + raise TypeError("what's this?") + + print("]", file=self.stream) + print("################################################################", file=self.stream) + print("## code template for %s implementation" % body.itf.name, file=self.stream) + print("##class %s_Impl(object):" % body.itf.name, file=self.stream) + + methods = {} + for m in body.itf.members: + if isinstance(m, typedesc.ComMethod): + # m.arguments is a sequence of tuples: + # (argtype, argname, idlflags, docstring) + # Some typelibs have unnamed method parameters! + inargs = [a[1] or '<unnamed>' for a in m.arguments + if not 'out' in a[2]] + outargs = [a[1] or '<unnamed>' for a in m.arguments + if 'out' in a[2]] + if 'propget' in m.idlflags: + methods.setdefault(m.name, [0, inargs, outargs, m.doc])[0] |= 1 + elif 'propput' in m.idlflags: + methods.setdefault(m.name, [0, inargs[:-1], inargs[-1:], m.doc])[0] |= 2 + else: + methods[m.name] = [0, inargs, outargs, m.doc] + + for name, (typ, inargs, outargs, doc) in methods.items(): + if typ == 0: # method + print("## def %s(%s):" % (name, ", ".join(["self"] + inargs)), file=self.stream) + print("## %r" % (doc or "-no docstring-"), file=self.stream) + print("## #return %s" % (", ".join(outargs)), file=self.stream) + elif typ == 1: # propget + print("## @property", file=self.stream) + print("## def %s(%s):" % (name, ", ".join(["self"] + inargs)), file=self.stream) + print("## %r" % (doc or "-no docstring-"), file=self.stream) + print("## #return %s" % (", ".join(outargs)), file=self.stream) + elif typ == 2: # propput + print("## def _set(%s):" % ", ".join(["self"] + inargs + outargs), file=self.stream) + print("## %r" % (doc or "-no docstring-"), file=self.stream) + print("## %s = property(fset = _set, doc = _set.__doc__)" % name, file=self.stream) + elif typ == 3: # propget + propput + print("## def _get(%s):" % ", ".join(["self"] + inargs), file=self.stream) + print("## %r" % (doc or "-no docstring-"), file=self.stream) + print("## #return %s" % (", ".join(outargs)), file=self.stream) + print("## def _set(%s):" % ", ".join(["self"] + inargs + outargs), file=self.stream) + print("## %r" % (doc or "-no docstring-"), file=self.stream) + print("## %s = property(_get, _set, doc = _set.__doc__)" % name, file=self.stream) + else: + raise RuntimeError("BUG") + print("##", file=self.stream) + print(file=self.stream) + + def DispInterface(self, itf): + self.generate(itf.get_head()) + self.generate(itf.get_body()) + self.names.add(itf.name) + + def DispInterfaceHead(self, head): + self.generate(head.itf.base) + basename = self.type_name(head.itf.base) + + self.need_GUID() + print("class %s(%s):" % (head.itf.name, basename), file=self.stream) + print(" _case_insensitive_ = True", file=self.stream) + doc = getattr(head.itf, "doc", None) + if doc: + print(" %r" % doc, file=self.stream) + print(" _iid_ = GUID(%r)" % head.itf.iid, file=self.stream) + print(" _idlflags_ = %s" % head.itf.idlflags, file=self.stream) + print(" _methods_ = []", file=self.stream) + + def DispInterfaceBody(self, body): + # make sure we can generate the body + for m in body.itf.members: + if isinstance(m, typedesc.DispMethod): + for a in m.arguments: + self.generate(a[0]) + self.generate(m.returns) + elif isinstance(m, typedesc.DispProperty): + self.generate(m.typ) + else: + raise TypeError(m) + + self.need_dispid() + self.need_DISPMETHOD() + print("%s._disp_methods_ = [" % body.itf.name, file=self.stream) + for m in body.itf.members: + if isinstance(m, typedesc.DispMethod): + self.make_DispMethod(m) + elif isinstance(m, typedesc.DispProperty): + self.make_DispProperty(m) + else: + raise TypeError(m) + print("]", file=self.stream) + + ################################################################ + # non-toplevel method generators + # + def make_ComMethod(self, m, isdual): + # typ, name, idlflags, default + if isdual: + idlflags = [dispid(m.memid)] + m.idlflags + else: + # We don't include the dispid for non-dispatch COM interfaces + idlflags = m.idlflags + if __debug__ and m.doc: + idlflags.insert(1, helpstring(m.doc)) + code = " COMMETHOD(%r, %s, '%s'" % ( + idlflags, + self.type_name(m.returns), + m.name) + + if not m.arguments: + print("%s)," % code, file=self.stream) + else: + print("%s," % code, file=self.stream) + self.stream.write(" ") + arglist = [] + for typ, name, idlflags, default in m.arguments: + type_name = self.type_name(typ) + ########################################################### + # IDL files that contain 'open arrays' or 'conformant + # varying arrays' method parameters are strange. + # These arrays have both a 'size_is()' and + # 'length_is()' attribute, like this example from + # dia2.idl (in the DIA SDK): + # + # interface IDiaSymbol: IUnknown { + # ... + # HRESULT get_dataBytes( + # [in] DWORD cbData, + # [out] DWORD *pcbData, + # [out, size_is(cbData), + # length_is(*pcbData)] BYTE data[] + # ); + # + # The really strange thing is that the decompiled type + # library then contains this declaration, which declares + # the interface itself as [out] method parameter: + # + # interface IDiaSymbol: IUnknown { + # ... + # HRESULT _stdcall get_dataBytes( + # [in] unsigned long cbData, + # [out] unsigned long* pcbData, + # [out] IDiaSymbol data); + # + # Of course, comtypes does not accept a COM interface + # as method parameter; so replace the parameter type + # with the comtypes spelling of 'unsigned char *', and + # mark the parameter as [in, out], so the IDL + # equivalent would be like this: + # + # interface IDiaSymbol: IUnknown { + # ... + # HRESULT _stdcall get_dataBytes( + # [in] unsigned long cbData, + # [out] unsigned long* pcbData, + # [in, out] BYTE data[]); + ########################################################### + if isinstance(typ, typedesc.ComInterface): + self.need_OPENARRAYS() + type_name = "OPENARRAY" + if 'in' not in idlflags: + idlflags.append('in') + if 'lcid' in idlflags:# and 'in' in idlflags: + default = lcid + if default is not None: + self.need_VARIANT_imports(default) + arglist.append("( %r, %s, '%s', %r )" % ( + idlflags, + type_name, + name, + default)) + else: + arglist.append("( %r, %s, '%s' )" % ( + idlflags, + type_name, + name)) + self.stream.write(",\n ".join(arglist)) + print("),", file=self.stream) + + def make_DispMethod(self, m): + idlflags = [dispid(m.dispid)] + m.idlflags + if __debug__ and m.doc: + idlflags.insert(1, helpstring(m.doc)) + # typ, name, idlflags, default + code = " DISPMETHOD(%r, %s, '%s'" % ( + idlflags, + self.type_name(m.returns), + m.name) + + if not m.arguments: + print("%s)," % code, file=self.stream) + else: + print("%s," % code, file=self.stream) + self.stream.write(" ") + arglist = [] + for typ, name, idlflags, default in m.arguments: + self.need_VARIANT_imports(default) + if default is not None: + arglist.append("( %r, %s, '%s', %r )" % ( + idlflags, + self.type_name(typ), + name, + default)) + else: + arglist.append("( %r, %s, '%s' )" % ( + idlflags, + self.type_name(typ), + name, + )) + self.stream.write(",\n ".join(arglist)) + print("),", file=self.stream) + + def make_DispProperty(self, prop): + idlflags = [dispid(prop.dispid)] + prop.idlflags + if __debug__ and prop.doc: + idlflags.insert(1, helpstring(prop.doc)) + print(" DISPPROPERTY(%r, %s, '%s')," % ( + idlflags, + self.type_name(prop.typ), + prop.name), file=self.stream) + +# shortcut for development +if __name__ == "__main__": + from . import tlbparser + tlbparser.main() diff --git a/venv/Lib/site-packages/comtypes/tools/tlbparser.py b/venv/Lib/site-packages/comtypes/tools/tlbparser.py new file mode 100644 index 00000000..166c4d16 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/tools/tlbparser.py @@ -0,0 +1,768 @@ +import os +import sys +from ctypes import windll +from ctypes import c_void_p +from ctypes import sizeof +from ctypes import alignment + +from comtypes import automation +from comtypes import typeinfo +from comtypes import COMError +from comtypes.tools import typedesc +from comtypes.client._code_cache import _get_module_filename + +try: + set +except NameError: + from sets import Set as set + +# Is the process 64-bit? +is_64bits = sys.maxsize > 2**32 + + +################################ + +def PTR(typ): + return typedesc.PointerType(typ, + sizeof(c_void_p)*8, + alignment(c_void_p)*8) + +# basic C data types, with size and alignment in bits +char_type = typedesc.FundamentalType("char", 8, 8) +uchar_type = typedesc.FundamentalType("unsigned char", 8, 8) +wchar_t_type = typedesc.FundamentalType("wchar_t", 16, 16) +short_type = typedesc.FundamentalType("short int", 16, 16) +ushort_type = typedesc.FundamentalType("short unsigned int", 16, 16) +int_type = typedesc.FundamentalType("int", 32, 32) +uint_type = typedesc.FundamentalType("unsigned int", 32, 32) +long_type = typedesc.FundamentalType("long int", 32, 32) +ulong_type = typedesc.FundamentalType("long unsigned int", 32, 32) +longlong_type = typedesc.FundamentalType("long long int", 64, 64) +ulonglong_type = typedesc.FundamentalType("long long unsigned int", 64, 64) +float_type = typedesc.FundamentalType("float", 32, 32) +double_type = typedesc.FundamentalType("double", 64, 64) + +# basic COM data types +BSTR_type = typedesc.Typedef("BSTR", PTR(wchar_t_type)) +SCODE_type = typedesc.Typedef("SCODE", int_type) +VARIANT_BOOL_type = typedesc.Typedef("VARIANT_BOOL", short_type) +HRESULT_type = typedesc.Typedef("HRESULT", ulong_type) + +VARIANT_type = typedesc.Structure("VARIANT", + align=alignment(automation.VARIANT)*8, + members=[], bases=[], + size=sizeof(automation.VARIANT)*8) +IDISPATCH_type = typedesc.Typedef("IDispatch", None) +IUNKNOWN_type = typedesc.Typedef("IUnknown", None) +DECIMAL_type = typedesc.Structure("DECIMAL", + align=alignment(automation.DECIMAL)*8, + members=[], bases=[], + size=sizeof(automation.DECIMAL)*8) + +def midlSAFEARRAY(typ): + return typedesc.SAFEARRAYType(typ) + +# faked COM data types +CURRENCY_type = longlong_type # slightly wrong; should be scaled by 10000 - use subclass of longlong? +DATE_type = double_type # not *that* wrong... + +COMTYPES = { + automation.VT_I2: short_type, # 2 + automation.VT_I4: int_type, # 3 + automation.VT_R4: float_type, # 4 + automation.VT_R8: double_type, # 5 + automation.VT_CY: CURRENCY_type, # 6 + automation.VT_DATE: DATE_type, # 7 + automation.VT_BSTR: BSTR_type, # 8 + automation.VT_DISPATCH: PTR(IDISPATCH_type), # 9 + automation.VT_ERROR: SCODE_type, # 10 + automation.VT_BOOL: VARIANT_BOOL_type, # 11 + automation.VT_VARIANT: VARIANT_type, # 12 + automation.VT_UNKNOWN: PTR(IUNKNOWN_type), # 13 + automation.VT_DECIMAL: DECIMAL_type, # 14 + + automation.VT_I1: char_type, # 16 + automation.VT_UI1: uchar_type, # 17 + automation.VT_UI2: ushort_type, # 18 + automation.VT_UI4: ulong_type, # 19 + automation.VT_I8: longlong_type, # 20 + automation.VT_UI8: ulonglong_type, # 21 + automation.VT_INT: int_type, # 22 + automation.VT_UINT: uint_type, # 23 + automation.VT_VOID: typedesc.FundamentalType("void", 0, 0), # 24 + automation.VT_HRESULT: HRESULT_type, # 25 + automation.VT_LPSTR: PTR(char_type), # 30 + automation.VT_LPWSTR: PTR(wchar_t_type), # 31 +} + +#automation.VT_PTR = 26 # below +#automation.VT_SAFEARRAY = 27 +#automation.VT_CARRAY = 28 # below +#automation.VT_USERDEFINED = 29 # below + +#automation.VT_RECORD = 36 + +#automation.VT_ARRAY = 8192 +#automation.VT_BYREF = 16384 + +################################################################ + +class Parser(object): + + def make_type(self, tdesc, tinfo): + try: + return COMTYPES[tdesc.vt] + except KeyError: + pass + + if tdesc.vt == automation.VT_CARRAY: + typ = self.make_type(tdesc._.lpadesc[0].tdescElem, tinfo) + for i in range(tdesc._.lpadesc[0].cDims): + typ = typedesc.ArrayType(typ, + tdesc._.lpadesc[0].rgbounds[i].lLbound, + tdesc._.lpadesc[0].rgbounds[i].cElements-1) + return typ + + elif tdesc.vt == automation.VT_PTR: + typ = self.make_type(tdesc._.lptdesc[0], tinfo) + return PTR(typ) + + elif tdesc.vt == automation.VT_USERDEFINED: + try: + ti = tinfo.GetRefTypeInfo(tdesc._.hreftype) + except COMError as details: + type_name = "__error_hreftype_%d__" % tdesc._.hreftype + tlib_name = get_tlib_filename(self.tlib) + if tlib_name is None: + tlib_name = "unknown typelib" + message = "\n\tGetRefTypeInfo failed in %s: %s\n\tgenerating type '%s' instead" % \ + (tlib_name, details, type_name) + import warnings + warnings.warn(message, UserWarning); + result = typedesc.Structure(type_name, + align=8, + members=[], bases=[], + size=0) + return result + result = self.parse_typeinfo(ti) + assert result is not None, ti.GetDocumentation(-1)[0] + return result + + elif tdesc.vt == automation.VT_SAFEARRAY: + # SAFEARRAY(<type>), see Don Box pp.331f + itemtype = self.make_type(tdesc._.lptdesc[0], tinfo) + return midlSAFEARRAY(itemtype) + + raise NotImplementedError(tdesc.vt) + + ################################################################ + + # TKIND_ENUM = 0 + def ParseEnum(self, tinfo, ta): + ta = tinfo.GetTypeAttr() + enum_name = tinfo.GetDocumentation(-1)[0] + enum = typedesc.Enumeration(enum_name, 32, 32) + self._register(enum_name, enum) + + for i in range(ta.cVars): + vd = tinfo.GetVarDesc(i) + name = tinfo.GetDocumentation(vd.memid)[0] + assert vd.varkind == typeinfo.VAR_CONST + num_val = vd._.lpvarValue[0].value + v = typedesc.EnumValue(name, num_val, enum) + enum.add_value(v) + return enum + + # TKIND_RECORD = 1 + def ParseRecord(self, tinfo, ta): + members = [] # will be filled later + struct_name, doc, helpcntext, helpfile = tinfo.GetDocumentation(-1) + struct = typedesc.Structure(struct_name, + align=ta.cbAlignment*8, + members=members, + bases=[], + size=ta.cbSizeInstance*8) + self._register(struct_name, struct) + + tlib, _ = tinfo.GetContainingTypeLib() + tlib_ta = tlib.GetLibAttr() + # If this is a 32-bit typlib being loaded in a 64-bit process, then the + # size and alignment are incorrect. Set the size to None to disable + # size checks and correct the alignment. + if is_64bits and tlib_ta.syskind == typeinfo.SYS_WIN32: + struct.size = None + struct.align = 64 + + if ta.guid: + struct._recordinfo_ = (str(tlib_ta.guid), + tlib_ta.wMajorVerNum, tlib_ta.wMinorVerNum, + tlib_ta.lcid, + str(ta.guid)) + + for i in range(ta.cVars): + vd = tinfo.GetVarDesc(i) + name = tinfo.GetDocumentation(vd.memid)[0] + offset = vd._.oInst * 8 + assert vd.varkind == typeinfo.VAR_PERINSTANCE + typ = self.make_type(vd.elemdescVar.tdesc, tinfo) + field = typedesc.Field(name, + typ, + None, # bits + offset) + members.append(field) + return struct + + # TKIND_MODULE = 2 + def ParseModule(self, tinfo, ta): + assert 0 == ta.cImplTypes + # functions + for i in range(ta.cFuncs): + # We skip all function definitions. There are several + # problems with these, and we can, for comtypes, ignore them. + continue + fd = tinfo.GetFuncDesc(i) + dllname, func_name, ordinal = tinfo.GetDllEntry(fd.memid, fd.invkind) + func_doc = tinfo.GetDocumentation(fd.memid)[1] + assert 0 == fd.cParamsOpt # XXX + returns = self.make_type(fd.elemdescFunc.tdesc, tinfo) + + if fd.callconv == typeinfo.CC_CDECL: + attributes = "__cdecl__" + elif fd.callconv == typeinfo.CC_STDCALL: + attributes = "__stdcall__" + else: + raise ValueError("calling convention %d" % fd.callconv) + + func = typedesc.Function(func_name, returns, attributes, extern=1) + if func_doc is not None: + func.doc = func_doc.encode("mbcs") + func.dllname = dllname + self._register(func_name, func) + for i in range(fd.cParams): + argtype = self.make_type(fd.lprgelemdescParam[i].tdesc, tinfo) + func.add_argument(argtype) + + # constants + for i in range(ta.cVars): + vd = tinfo.GetVarDesc(i) + name, var_doc = tinfo.GetDocumentation(vd.memid)[0:2] + assert vd.varkind == typeinfo.VAR_CONST + typ = self.make_type(vd.elemdescVar.tdesc, tinfo) + var_value = vd._.lpvarValue[0].value + v = typedesc.Constant(name, typ, var_value) + self._register(name, v) + if var_doc is not None: + v.doc = var_doc + + # TKIND_INTERFACE = 3 + def ParseInterface(self, tinfo, ta): + itf_name, itf_doc = tinfo.GetDocumentation(-1)[0:2] + assert ta.cImplTypes <= 1 + if ta.cImplTypes == 0 and itf_name != "IUnknown": + # Windows defines an interface IOleControlTypes in ocidl.idl. + # Don't known what artefact that is - we ignore it. + # It's an interface without methods anyway. + if itf_name != "IOleControlTypes": + message = "Ignoring interface %s which has no base interface" % itf_name + import warnings + warnings.warn(message, UserWarning); + return None + + itf = typedesc.ComInterface(itf_name, + members=[], + base=None, + iid=str(ta.guid), + idlflags=self.interface_type_flags(ta.wTypeFlags)) + if itf_doc: + itf.doc = itf_doc + self._register(itf_name, itf) + + if ta.cImplTypes: + hr = tinfo.GetRefTypeOfImplType(0) + tibase = tinfo.GetRefTypeInfo(hr) + itf.base = self.parse_typeinfo(tibase) + + assert ta.cVars == 0, "vars on an Interface?" + + members = [] + for i in range(ta.cFuncs): + fd = tinfo.GetFuncDesc(i) +## func_name = tinfo.GetDocumentation(fd.memid)[0] + func_name, func_doc = tinfo.GetDocumentation(fd.memid)[:2] + assert fd.funckind == typeinfo.FUNC_PUREVIRTUAL + returns = self.make_type(fd.elemdescFunc.tdesc, tinfo) + names = tinfo.GetNames(fd.memid, fd.cParams+1) + names.append("rhs") + names = names[:fd.cParams + 1] + assert len(names) == fd.cParams + 1 + flags = self.func_flags(fd.wFuncFlags) + flags += self.inv_kind(fd.invkind) + mth = typedesc.ComMethod(fd.invkind, fd.memid, func_name, returns, flags, func_doc) + mth.oVft = fd.oVft + for p in range(fd.cParams): + typ = self.make_type(fd.lprgelemdescParam[p].tdesc, tinfo) + name = names[p+1] + flags = fd.lprgelemdescParam[p]._.paramdesc.wParamFlags + if flags & typeinfo.PARAMFLAG_FHASDEFAULT: + # XXX should be handled by VARIANT itself + var = fd.lprgelemdescParam[p]._.paramdesc.pparamdescex[0].varDefaultValue + default = var.value + else: + default = None + mth.add_argument(typ, name, self.param_flags(flags), default) + members.append((fd.oVft, mth)) + # Sort the methods by oVft (VTable offset): Some typeinfo + # don't list methods in VTable order. + members.sort() + itf.members.extend([m[1] for m in members]) + + return itf + + # TKIND_DISPATCH = 4 + def ParseDispatch(self, tinfo, ta): + itf_name, doc = tinfo.GetDocumentation(-1)[0:2] + assert ta.cImplTypes == 1 + + hr = tinfo.GetRefTypeOfImplType(0) + tibase = tinfo.GetRefTypeInfo(hr) + base = self.parse_typeinfo(tibase) + members = [] + itf = typedesc.DispInterface(itf_name, + members=members, + base=base, + iid=str(ta.guid), + idlflags=self.interface_type_flags(ta.wTypeFlags)) + if doc is not None: + itf.doc = str(doc.split("\0")[0]) + self._register(itf_name, itf) + + # This code can only handle pure dispinterfaces. Dual + # interfaces are parsed in ParseInterface(). + assert ta.wTypeFlags & typeinfo.TYPEFLAG_FDUAL == 0 + + for i in range(ta.cVars): + vd = tinfo.GetVarDesc(i) + assert vd.varkind == typeinfo.VAR_DISPATCH + var_name, var_doc = tinfo.GetDocumentation(vd.memid)[0:2] + typ = self.make_type(vd.elemdescVar.tdesc, tinfo) + mth = typedesc.DispProperty(vd.memid, var_name, typ, self.var_flags(vd.wVarFlags), var_doc) + itf.members.append(mth) + + # At least the EXCEL typelib lists the IUnknown and IDispatch + # methods even for this kind of interface. I didn't find any + # indication about these methods in the various flags, so we + # have to exclude them by name. + # CLF: 12/14/2012 Do this in a way that does not exclude other methods. + # I have encountered typlibs where only "QueryInterface", "AddRef" + # and "Release" are to be skipped. + ignored_names = set(["QueryInterface", "AddRef", "Release", + "GetTypeInfoCount", "GetTypeInfo", + "GetIDsOfNames", "Invoke"]) + + for i in range(ta.cFuncs): + fd = tinfo.GetFuncDesc(i) + func_name, func_doc = tinfo.GetDocumentation(fd.memid)[:2] + if func_name in ignored_names: + continue + assert fd.funckind == typeinfo.FUNC_DISPATCH + + returns = self.make_type(fd.elemdescFunc.tdesc, tinfo) + names = tinfo.GetNames(fd.memid, fd.cParams+1) + names.append("rhs") + names = names[:fd.cParams + 1] + assert len(names) == fd.cParams + 1 # function name first, then parameter names + flags = self.func_flags(fd.wFuncFlags) + flags += self.inv_kind(fd.invkind) + mth = typedesc.DispMethod(fd.memid, fd.invkind, func_name, returns, flags, func_doc) + for p in range(fd.cParams): + typ = self.make_type(fd.lprgelemdescParam[p].tdesc, tinfo) + name = names[p+1] + flags = fd.lprgelemdescParam[p]._.paramdesc.wParamFlags + if flags & typeinfo.PARAMFLAG_FHASDEFAULT: + var = fd.lprgelemdescParam[p]._.paramdesc.pparamdescex[0].varDefaultValue + default = var.value + else: + default = None + mth.add_argument(typ, name, self.param_flags(flags), default) + itf.members.append(mth) + + return itf + + def inv_kind(self, invkind): + NAMES = {automation.DISPATCH_METHOD: [], + automation.DISPATCH_PROPERTYPUT: ["propput"], + automation.DISPATCH_PROPERTYPUTREF: ["propputref"], + automation.DISPATCH_PROPERTYGET: ["propget"]} + return NAMES[invkind] + + def func_flags(self, flags): + # map FUNCFLAGS values to idl attributes + NAMES = {typeinfo.FUNCFLAG_FRESTRICTED: "restricted", + typeinfo.FUNCFLAG_FSOURCE: "source", + typeinfo.FUNCFLAG_FBINDABLE: "bindable", + typeinfo.FUNCFLAG_FREQUESTEDIT: "requestedit", + typeinfo.FUNCFLAG_FDISPLAYBIND: "displaybind", + typeinfo.FUNCFLAG_FDEFAULTBIND: "defaultbind", + typeinfo.FUNCFLAG_FHIDDEN: "hidden", + typeinfo.FUNCFLAG_FUSESGETLASTERROR: "usesgetlasterror", + typeinfo.FUNCFLAG_FDEFAULTCOLLELEM: "defaultcollelem", + typeinfo.FUNCFLAG_FUIDEFAULT: "uidefault", + typeinfo.FUNCFLAG_FNONBROWSABLE: "nonbrowsable", + # typeinfo.FUNCFLAG_FREPLACEABLE: "???", + typeinfo.FUNCFLAG_FIMMEDIATEBIND: "immediatebind"} + return [NAMES[bit] for bit in NAMES if bit & flags] + + def param_flags(self, flags): + # map PARAMFLAGS values to idl attributes + NAMES = {typeinfo.PARAMFLAG_FIN: "in", + typeinfo.PARAMFLAG_FOUT: "out", + typeinfo.PARAMFLAG_FLCID: "lcid", + typeinfo.PARAMFLAG_FRETVAL: "retval", + typeinfo.PARAMFLAG_FOPT: "optional", + # typeinfo.PARAMFLAG_FHASDEFAULT: "", + # typeinfo.PARAMFLAG_FHASCUSTDATA: "", + } + return [NAMES[bit] for bit in NAMES if bit & flags] + + def coclass_type_flags(self, flags): + # map TYPEFLAGS values to idl attributes + NAMES = {typeinfo.TYPEFLAG_FAPPOBJECT: "appobject", + # typeinfo.TYPEFLAG_FCANCREATE: + typeinfo.TYPEFLAG_FLICENSED: "licensed", + # typeinfo.TYPEFLAG_FPREDECLID: + typeinfo.TYPEFLAG_FHIDDEN: "hidden", + typeinfo.TYPEFLAG_FCONTROL: "control", + typeinfo.TYPEFLAG_FDUAL: "dual", + typeinfo.TYPEFLAG_FNONEXTENSIBLE: "nonextensible", + typeinfo.TYPEFLAG_FOLEAUTOMATION: "oleautomation", + typeinfo.TYPEFLAG_FRESTRICTED: "restricted", + typeinfo.TYPEFLAG_FAGGREGATABLE: "aggregatable", + # typeinfo.TYPEFLAG_FREPLACEABLE: + # typeinfo.TYPEFLAG_FDISPATCHABLE # computed, no flag for this + typeinfo.TYPEFLAG_FREVERSEBIND: "reversebind", + typeinfo.TYPEFLAG_FPROXY: "proxy", + } + NEGATIVE_NAMES = {typeinfo.TYPEFLAG_FCANCREATE: "noncreatable"} + return [NAMES[bit] for bit in NAMES if bit & flags] + \ + [NEGATIVE_NAMES[bit] for bit in NEGATIVE_NAMES if not (bit & flags)] + + def interface_type_flags(self, flags): + # map TYPEFLAGS values to idl attributes + NAMES = {typeinfo.TYPEFLAG_FAPPOBJECT: "appobject", + # typeinfo.TYPEFLAG_FCANCREATE: + typeinfo.TYPEFLAG_FLICENSED: "licensed", + # typeinfo.TYPEFLAG_FPREDECLID: + typeinfo.TYPEFLAG_FHIDDEN: "hidden", + typeinfo.TYPEFLAG_FCONTROL: "control", + typeinfo.TYPEFLAG_FDUAL: "dual", + typeinfo.TYPEFLAG_FNONEXTENSIBLE: "nonextensible", + typeinfo.TYPEFLAG_FOLEAUTOMATION: "oleautomation", + typeinfo.TYPEFLAG_FRESTRICTED: "restricted", + typeinfo.TYPEFLAG_FAGGREGATABLE: "aggregatable", + # typeinfo.TYPEFLAG_FREPLACEABLE: + # typeinfo.TYPEFLAG_FDISPATCHABLE # computed, no flag for this + typeinfo.TYPEFLAG_FREVERSEBIND: "reversebind", + typeinfo.TYPEFLAG_FPROXY: "proxy", + } + NEGATIVE_NAMES = {} + return [NAMES[bit] for bit in NAMES if bit & flags] + \ + [NEGATIVE_NAMES[bit] for bit in NEGATIVE_NAMES if not (bit & flags)] + + def var_flags(self, flags): + NAMES = {typeinfo.VARFLAG_FREADONLY: "readonly", + typeinfo.VARFLAG_FSOURCE: "source", + typeinfo.VARFLAG_FBINDABLE: "bindable", + typeinfo.VARFLAG_FREQUESTEDIT: "requestedit", + typeinfo.VARFLAG_FDISPLAYBIND: "displaybind", + typeinfo.VARFLAG_FDEFAULTBIND: "defaultbind", + typeinfo.VARFLAG_FHIDDEN: "hidden", + typeinfo.VARFLAG_FRESTRICTED: "restricted", + typeinfo.VARFLAG_FDEFAULTCOLLELEM: "defaultcollelem", + typeinfo.VARFLAG_FUIDEFAULT: "uidefault", + typeinfo.VARFLAG_FNONBROWSABLE: "nonbrowsable", + typeinfo.VARFLAG_FREPLACEABLE: "replaceable", + typeinfo.VARFLAG_FIMMEDIATEBIND: "immediatebind" + } + return [NAMES[bit] for bit in NAMES if bit & flags] + + + # TKIND_COCLASS = 5 + def ParseCoClass(self, tinfo, ta): + # possible ta.wTypeFlags: helpstring, helpcontext, licensed, + # version, control, hidden, and appobject + coclass_name, doc = tinfo.GetDocumentation(-1)[0:2] + tlibattr = tinfo.GetContainingTypeLib()[0].GetLibAttr() + coclass = typedesc.CoClass(coclass_name, + str(ta.guid), + self.coclass_type_flags(ta.wTypeFlags), + tlibattr) + if doc is not None: + coclass.doc = doc + self._register(coclass_name, coclass) + + for i in range(ta.cImplTypes): + hr = tinfo.GetRefTypeOfImplType(i) + ti = tinfo.GetRefTypeInfo(hr) + itf = self.parse_typeinfo(ti) + flags = tinfo.GetImplTypeFlags(i) + coclass.add_interface(itf, flags) + return coclass + + # TKIND_ALIAS = 6 + def ParseAlias(self, tinfo, ta): + name = tinfo.GetDocumentation(-1)[0] + typ = self.make_type(ta.tdescAlias, tinfo) + alias = typedesc.Typedef(name, typ) + self._register(name, alias) + return alias + + # TKIND_UNION = 7 + def ParseUnion(self, tinfo, ta): + union_name, doc, helpcntext, helpfile = tinfo.GetDocumentation(-1) + members = [] + union = typedesc.Union(union_name, + align=ta.cbAlignment*8, + members=members, + bases=[], + size=ta.cbSizeInstance*8) + self._register(union_name, union) + + tlib, _ = tinfo.GetContainingTypeLib() + tlib_ta = tlib.GetLibAttr() + # If this is a 32-bit typlib being loaded in a 64-bit process, then the + # size and alignment are incorrect. Set the size to None to disable + # size checks and correct the alignment. + if is_64bits and tlib_ta.syskind == typeinfo.SYS_WIN32: + union.size = None + union.align = 64 + + for i in range(ta.cVars): + vd = tinfo.GetVarDesc(i) + name = tinfo.GetDocumentation(vd.memid)[0] + offset = vd._.oInst * 8 + assert vd.varkind == typeinfo.VAR_PERINSTANCE + typ = self.make_type(vd.elemdescVar.tdesc, tinfo) + field = typedesc.Field(name, + typ, + None, # bits + offset) + members.append(field) + return union + + ################################################################ + + def _typelib_module(self, tlib=None): + if tlib is None: + tlib = self.tlib + # return a string that uniquely identifies a typelib. + # The string doesn't have any meaning outside this instance. + return str(tlib.GetLibAttr()) + + def _register(self, name, value, tlib=None): + modname = self._typelib_module(tlib) + fullname = "%s.%s" % (modname, name) + if fullname in self.items: + # XXX Can we really allow this? It happens, at least. + if isinstance(value, typedesc.External): + return + # BUG: We try to register an item that's already registered. + raise ValueError("Bug: Multiple registered name '%s': %r" % (name, value)) + self.items[fullname] = value + + def parse_typeinfo(self, tinfo): + name = tinfo.GetDocumentation(-1)[0] + modname = self._typelib_module() + try: + return self.items["%s.%s" % (modname, name)] + except KeyError: + pass + + tlib = tinfo.GetContainingTypeLib()[0] + if tlib != self.tlib: + ta = tinfo.GetTypeAttr() + size = ta.cbSizeInstance * 8 + align = ta.cbAlignment * 8 + typ = typedesc.External(tlib, + name, + size, + align, + tlib.GetDocumentation(-1)[:2]) + self._register(name, typ, tlib) + return typ + + ta = tinfo.GetTypeAttr() + tkind = ta.typekind + + if tkind == typeinfo.TKIND_ENUM: # 0 + return self.ParseEnum(tinfo, ta) + elif tkind == typeinfo.TKIND_RECORD: # 1 + return self.ParseRecord(tinfo, ta) + elif tkind == typeinfo.TKIND_MODULE: # 2 + return self.ParseModule(tinfo, ta) + elif tkind == typeinfo.TKIND_INTERFACE: # 3 + return self.ParseInterface(tinfo, ta) + elif tkind == typeinfo.TKIND_DISPATCH: # 4 + try: + # GetRefTypeOfImplType(-1) returns the custom portion + # of a dispinterface, if it is dual + href = tinfo.GetRefTypeOfImplType(-1) + except COMError: + # no dual interface + return self.ParseDispatch(tinfo, ta) + tinfo = tinfo.GetRefTypeInfo(href) + ta = tinfo.GetTypeAttr() + assert ta.typekind == typeinfo.TKIND_INTERFACE + return self.ParseInterface(tinfo, ta) + elif tkind == typeinfo.TKIND_COCLASS: # 5 + return self.ParseCoClass(tinfo, ta) + elif tkind == typeinfo.TKIND_ALIAS: # 6 + return self.ParseAlias(tinfo, ta) + elif tkind == typeinfo.TKIND_UNION: # 7 + return self.ParseUnion(tinfo, ta) + else: + print("NYI", tkind) +## raise "NYI", tkind + + def parse_LibraryDescription(self): + la = self.tlib.GetLibAttr() + name, doc = self.tlib.GetDocumentation(-1)[:2] + desc = typedesc.TypeLib(name, + str(la.guid), la.wMajorVerNum, la.wMinorVerNum, + doc) + self._register(None, desc) + + ################################################################ + + def parse(self): + self.parse_LibraryDescription() + + for i in range(self.tlib.GetTypeInfoCount()): + tinfo = self.tlib.GetTypeInfo(i) + self.parse_typeinfo(tinfo) + return self.items + +class TlbFileParser(Parser): + "Parses a type library from a file" + def __init__(self, path): + # XXX DOESN'T LOOK CORRECT: We should NOT register the typelib. + self.tlib = typeinfo.LoadTypeLibEx(path)#, regkind=typeinfo.REGKIND_REGISTER) + self.items = {} + +class TypeLibParser(Parser): + def __init__(self, tlib): + self.tlib = tlib + self.items = {} + +################################################################ +# some interesting typelibs + +## these do NOT work: + # XXX infinite loop? +## path = r"mshtml.tlb" # has propputref + + # has SAFEARRAY + # HRESULT Run(BSTR, SAFEARRAY(VARIANT)*, VARIANT*) +## path = "msscript.ocx" + + # has SAFEARRAY + # HRESULT AddAddress(SAFEARRAY(BSTR)*, SAFEARRAY(BSTR)*) +## path = r"c:\Programme\Microsoft Office\Office\MSWORD8.OLB" # has propputref + + # has SAFEARRAY: + # SAFEARRAY(unsigned char) FileSignatureInfo(BSTR, long, MsiSignatureInfo) +## path = r"msi.dll" # DispProperty + + # fails packing IDLDESC +## path = r"C:\Dokumente und Einstellungen\thomas\Desktop\tlb\win.tlb" + # fails packing WIN32_FIND_DATA +## path = r"C:\Dokumente und Einstellungen\thomas\Desktop\tlb\win32.tlb" + # has a POINTER(IUnknown) as default parameter value +## path = r"c:\Programme\Gemeinsame Dateien\Microsoft Shared\Speech\sapi.dll" + + +## path = r"hnetcfg.dll" +## path = r"simpdata.tlb" +## path = r"nscompat.tlb" +## path = r"stdole32.tlb" + +## path = r"shdocvw.dll" + +## path = r"c:\Programme\Microsoft Office\Office\MSO97.DLL" +## path = r"PICCLP32.OCX" # DispProperty +## path = r"MSHFLXGD.OCX" # DispProperty, propputref +## path = r"scrrun.dll" # propput AND propputref on IDictionary::Item +## path = r"C:\Dokumente und Einstellungen\thomas\Desktop\tlb\threadapi.tlb" + +## path = r"..\samples\BITS\bits2_0.tlb" + +## path = r"c:\vc98\include\activscp.tlb" + +def get_tlib_filename(tlib): + # seems if the typelib is not registered, there's no way to + # determine the filename. + from ctypes import windll, byref + from comtypes import BSTR + la = tlib.GetLibAttr() + name = BSTR() + try: + windll.oleaut32.QueryPathOfRegTypeLib + except AttributeError: + # Windows CE doesn't have this function + return None + if 0 == windll.oleaut32.QueryPathOfRegTypeLib(byref(la.guid), + la.wMajorVerNum, + la.wMinorVerNum, + 0, # lcid + byref(name) + ): + full_filename = name.value.split("\0")[0] + if not os.path.isabs(full_filename): + # workaround Windows 7 bug in QueryPathOfRegTypeLib returning relative path + try: + dll = windll.LoadLibrary(full_filename) + full_filename = _get_module_filename(dll._handle) + del dll + except OSError: + return None + return full_filename + return None + +def _py2exe_hint(): + # If the tlbparser is frozen, we need to include these + import comtypes.persist + import comtypes.typeinfo + import comtypes.automation + +def generate_module(tlib, ofi, pathname): + known_symbols = {} + for name in ("comtypes.persist", + "comtypes.typeinfo", + "comtypes.automation", + "comtypes._others", + "comtypes", + "ctypes.wintypes", + "ctypes"): + try: + mod = __import__(name) + except ImportError: + if name == "comtypes._others": + continue + raise + for submodule in name.split(".")[1:]: + mod = getattr(mod, submodule) + for name in mod.__dict__: + known_symbols[name] = mod.__name__ + p = TypeLibParser(tlib) + if pathname is None: + pathname = get_tlib_filename(tlib) + items = p.parse() + + from .codegenerator import Generator + + gen = Generator(ofi, + known_symbols=known_symbols, + ) + + gen.generate_code(list(items.values()), filename=pathname) + +# -eof- diff --git a/venv/Lib/site-packages/comtypes/tools/typedesc.py b/venv/Lib/site-packages/comtypes/tools/typedesc.py new file mode 100644 index 00000000..9665b62f --- /dev/null +++ b/venv/Lib/site-packages/comtypes/tools/typedesc.py @@ -0,0 +1,138 @@ +# More type descriptions from parsed COM typelibaries, extending those +# in typedesc_base + +import ctypes +from comtypes.tools.typedesc_base import * + +class TypeLib(object): + def __init__(self, name, guid, major, minor, doc=None): + self.name = name + self.guid = guid + self.major = major + self.minor = minor + self.doc = doc + + def __repr__(self): + return "<TypeLib(%s: %s, %s, %s)>" % (self.name, self.guid, self.major, self.minor) + +class Constant(object): + def __init__(self, name, typ, value): + self.name = name + self.typ = typ + self.value = value + +class External(object): + def __init__(self, tlib, name, size, align, docs=None): + # the type library containing the symbol + self.tlib = tlib + # name of symbol + self.symbol_name = name + self.size = size + self.align = align + # type lib description + self.docs = docs + + def get_head(self): + # codegen might call this + return self + +class SAFEARRAYType(object): + def __init__(self, typ): + self.typ = typ + self.align = self.size = ctypes.sizeof(ctypes.c_void_p) * 8 + +class ComMethod(object): + # custom COM method, parsed from typelib + def __init__(self, invkind, memid, name, returns, idlflags, doc): + self.invkind = invkind + self.name = name + self.returns = returns + self.idlflags = idlflags + self.memid = memid + self.doc = doc + self.arguments = [] + + def add_argument(self, typ, name, idlflags, default): + self.arguments.append((typ, name, idlflags, default)) + +class DispMethod(object): + # dispatchable COM method, parsed from typelib + def __init__(self, dispid, invkind, name, returns, idlflags, doc): + self.dispid = dispid + self.invkind = invkind + self.name = name + self.returns = returns + self.idlflags = idlflags + self.doc = doc + self.arguments = [] + + def add_argument(self, typ, name, idlflags, default): + self.arguments.append((typ, name, idlflags, default)) + +class DispProperty(object): + # dispatchable COM property, parsed from typelib + def __init__(self, dispid, name, typ, idlflags, doc): + self.dispid = dispid + self.name = name + self.typ = typ + self.idlflags = idlflags + self.doc = doc + +class DispInterfaceHead(object): + def __init__(self, itf): + self.itf = itf + +class DispInterfaceBody(object): + def __init__(self, itf): + self.itf = itf + +class DispInterface(object): + def __init__(self, name, members, base, iid, idlflags): + self.name = name + self.members = members + self.base = base + self.iid = iid + self.idlflags = idlflags + self.itf_head = DispInterfaceHead(self) + self.itf_body = DispInterfaceBody(self) + + def get_body(self): + return self.itf_body + + def get_head(self): + return self.itf_head + +class ComInterfaceHead(object): + def __init__(self, itf): + self.itf = itf + +class ComInterfaceBody(object): + def __init__(self, itf): + self.itf = itf + +class ComInterface(object): + def __init__(self, name, members, base, iid, idlflags): + self.name = name + self.members = members + self.base = base + self.iid = iid + self.idlflags = idlflags + self.itf_head = ComInterfaceHead(self) + self.itf_body = ComInterfaceBody(self) + + def get_body(self): + return self.itf_body + + def get_head(self): + return self.itf_head + +class CoClass(object): + def __init__(self, name, clsid, idlflags, tlibattr): + self.name = name + self.clsid = clsid + self.idlflags = idlflags + self.tlibattr = tlibattr + self.interfaces = [] + + def add_interface(self, itf, idlflags): + self.interfaces.append((itf, idlflags)) diff --git a/venv/Lib/site-packages/comtypes/tools/typedesc_base.py b/venv/Lib/site-packages/comtypes/tools/typedesc_base.py new file mode 100644 index 00000000..534de91e --- /dev/null +++ b/venv/Lib/site-packages/comtypes/tools/typedesc_base.py @@ -0,0 +1,205 @@ +# typedesc.py - classes representing C type descriptions +try: + set +except NameError: + from sets import Set as set + +class Argument(object): + "a Parameter in the argument list of a callable (Function, Method, ...)" + def __init__(self, atype, name): + self.atype = atype + self.name = name + +class _HasArgs(object): + + def __init__(self): + self.arguments = [] + + def add_argument(self, arg): + assert isinstance(arg, Argument) + self.arguments.append(arg) + + def iterArgTypes(self): + for a in self.arguments: + yield a.atype + + def iterArgNames(self): + for a in self.arguments: + yield a.name + + def fixup_argtypes(self, typemap): + for a in self.arguments: + a.atype = typemap[a.atype] + + +################ + +class Alias(object): + # a C preprocessor alias, like #define A B + def __init__(self, name, alias, typ=None): + self.name = name + self.alias = alias + self.typ = typ + +class Macro(object): + # a C preprocessor definition with arguments + def __init__(self, name, args, body): + # all arguments are strings, args is the literal argument list + # *with* the parens around it: + # Example: Macro("CD_INDRIVE", "(status)", "((int)status > 0)") + self.name = name + self.args = args + self.body = body + +class File(object): + def __init__(self, name): + self.name = name + +class Function(_HasArgs): + location = None + def __init__(self, name, returns, attributes, extern): + _HasArgs.__init__(self) + self.name = name + self.returns = returns + self.attributes = attributes # dllimport, __stdcall__, __cdecl__ + self.extern = extern + +class Constructor(_HasArgs): + location = None + def __init__(self, name): + _HasArgs.__init__(self) + self.name = name + +class OperatorFunction(_HasArgs): + location = None + def __init__(self, name, returns): + _HasArgs.__init__(self) + self.name = name + self.returns = returns + +class FunctionType(_HasArgs): + location = None + def __init__(self, returns, attributes): + _HasArgs.__init__(self) + self.returns = returns + self.attributes = attributes + +class Method(_HasArgs): + location = None + def __init__(self, name, returns): + _HasArgs.__init__(self) + self.name = name + self.returns = returns + +class FundamentalType(object): + location = None + def __init__(self, name, size, align): + self.name = name + if name != "void": + self.size = int(size) + self.align = int(align) + +class PointerType(object): + location = None + def __init__(self, typ, size, align): + self.typ = typ + self.size = int(size) + self.align = int(align) + +class Typedef(object): + location = None + def __init__(self, name, typ): + self.name = name + self.typ = typ + +class ArrayType(object): + location = None + def __init__(self, typ, min, max): + self.typ = typ + self.min = min + self.max = max + +class StructureHead(object): + location = None + def __init__(self, struct): + self.struct = struct + +class StructureBody(object): + location = None + def __init__(self, struct): + self.struct = struct + +class _Struct_Union_Base(object): + location = None + def get_body(self): + return self.struct_body + + def get_head(self): + return self.struct_head + +class Structure(_Struct_Union_Base): + def __init__(self, name, align, members, bases, size, artificial=None): + self.name = name + self.align = int(align) + self.members = members + self.bases = bases + self.artificial = artificial + if size is not None: + self.size = int(size) + else: + self.size = None + self.struct_body = StructureBody(self) + self.struct_head = StructureHead(self) + +class Union(_Struct_Union_Base): + def __init__(self, name, align, members, bases, size, artificial=None): + self.name = name + self.align = int(align) + self.members = members + self.bases = bases + self.artificial = artificial + if size is not None: + self.size = int(size) + else: + self.size = None + self.struct_body = StructureBody(self) + self.struct_head = StructureHead(self) + +class Field(object): + def __init__(self, name, typ, bits, offset): + self.name = name + self.typ = typ + self.bits = bits + self.offset = int(offset) + +class CvQualifiedType(object): + def __init__(self, typ, const, volatile): + self.typ = typ + self.const = const + self.volatile = volatile + +class Enumeration(object): + location = None + def __init__(self, name, size, align): + self.name = name + self.size = int(size) + self.align = int(align) + self.values = [] + + def add_value(self, v): + self.values.append(v) + +class EnumValue(object): + def __init__(self, name, value, enumeration): + self.name = name + self.value = value + self.enumeration = enumeration + +class Variable(object): + location = None + def __init__(self, name, typ, init=None): + self.name = name + self.typ = typ + self.init = init + +################################################################ diff --git a/venv/Lib/site-packages/comtypes/typeinfo.py b/venv/Lib/site-packages/comtypes/typeinfo.py new file mode 100644 index 00000000..bd7e5221 --- /dev/null +++ b/venv/Lib/site-packages/comtypes/typeinfo.py @@ -0,0 +1,898 @@ +# XXX Should convert from STDMETHOD to COMMETHOD. + +# generated by 'xml2py' +# flags '..\tools\windows.xml -m comtypes -m comtypes.automation -w -r .*TypeLibEx -r .*TypeLib -o typeinfo.py' +# then hacked manually +import os +import sys +import weakref + +from ctypes import * +from ctypes.wintypes import ULONG +from comtypes import STDMETHOD +from comtypes import COMMETHOD +from comtypes import _GUID, GUID +# XXX should import more stuff from ctypes.wintypes... +from comtypes.automation import BSTR +from comtypes.automation import DISPID +from comtypes.automation import DISPPARAMS +from comtypes.automation import DWORD +from comtypes.automation import EXCEPINFO +from comtypes.automation import HRESULT +from comtypes.automation import IID +from comtypes.automation import IUnknown +from comtypes.automation import LCID +from comtypes.automation import LONG +from comtypes.automation import SCODE +from comtypes.automation import UINT +from comtypes.automation import VARIANT +from comtypes.automation import VARIANTARG +from comtypes.automation import VARTYPE +from comtypes.automation import WCHAR +from comtypes.automation import WORD +from comtypes.automation import tagVARIANT + +is_64_bit = sys.maxsize > 2**32 + +BOOL = c_int +HREFTYPE = DWORD +INT = c_int +MEMBERID = DISPID +OLECHAR = WCHAR +PVOID = c_void_p +SHORT = c_short +# See https://msdn.microsoft.com/en-us/library/windows/desktop/aa383751(v=vs.85).aspx#ULONG_PTR # noqa +ULONG_PTR = c_uint64 if is_64_bit else c_ulong + +USHORT = c_ushort +LPOLESTR = POINTER(OLECHAR) + +################################################################ +# enums +tagSYSKIND = c_int # enum +SYS_WIN16 = 0 +SYS_WIN32 = 1 +SYS_MAC = 2 +SYS_WIN64 = 3 +SYSKIND = tagSYSKIND + +tagREGKIND = c_int # enum +REGKIND_DEFAULT = 0 +REGKIND_REGISTER = 1 +REGKIND_NONE = 2 +REGKIND = tagREGKIND + +tagTYPEKIND = c_int # enum +TKIND_ENUM = 0 +TKIND_RECORD = 1 +TKIND_MODULE = 2 +TKIND_INTERFACE = 3 +TKIND_DISPATCH = 4 +TKIND_COCLASS = 5 +TKIND_ALIAS = 6 +TKIND_UNION = 7 +TKIND_MAX = 8 +TYPEKIND = tagTYPEKIND + +tagINVOKEKIND = c_int # enum +INVOKE_FUNC = 1 +INVOKE_PROPERTYGET = 2 +INVOKE_PROPERTYPUT = 4 +INVOKE_PROPERTYPUTREF = 8 +INVOKEKIND = tagINVOKEKIND + +tagDESCKIND = c_int # enum +DESCKIND_NONE = 0 +DESCKIND_FUNCDESC = 1 +DESCKIND_VARDESC = 2 +DESCKIND_TYPECOMP = 3 +DESCKIND_IMPLICITAPPOBJ = 4 +DESCKIND_MAX = 5 +DESCKIND = tagDESCKIND + +tagVARKIND = c_int # enum +VAR_PERINSTANCE = 0 +VAR_STATIC = 1 +VAR_CONST = 2 +VAR_DISPATCH = 3 +VARKIND = tagVARKIND + +tagFUNCKIND = c_int # enum +FUNC_VIRTUAL = 0 +FUNC_PUREVIRTUAL = 1 +FUNC_NONVIRTUAL = 2 +FUNC_STATIC = 3 +FUNC_DISPATCH = 4 +FUNCKIND = tagFUNCKIND + +tagCALLCONV = c_int # enum +CC_FASTCALL = 0 +CC_CDECL = 1 +CC_MSCPASCAL = 2 +CC_PASCAL = 2 +CC_MACPASCAL = 3 +CC_STDCALL = 4 +CC_FPFASTCALL = 5 +CC_SYSCALL = 6 +CC_MPWCDECL = 7 +CC_MPWPASCAL = 8 +CC_MAX = 9 +CALLCONV = tagCALLCONV + +IMPLTYPEFLAG_FDEFAULT = 1 +IMPLTYPEFLAG_FSOURCE = 2 +IMPLTYPEFLAG_FRESTRICTED = 4 +IMPLTYPEFLAG_FDEFAULTVTABLE = 8 + +tagTYPEFLAGS = c_int # enum +TYPEFLAG_FAPPOBJECT = 1 +TYPEFLAG_FCANCREATE = 2 +TYPEFLAG_FLICENSED = 4 +TYPEFLAG_FPREDECLID = 8 +TYPEFLAG_FHIDDEN = 16 +TYPEFLAG_FCONTROL = 32 +TYPEFLAG_FDUAL = 64 +TYPEFLAG_FNONEXTENSIBLE = 128 +TYPEFLAG_FOLEAUTOMATION = 256 +TYPEFLAG_FRESTRICTED = 512 +TYPEFLAG_FAGGREGATABLE = 1024 +TYPEFLAG_FREPLACEABLE = 2048 +TYPEFLAG_FDISPATCHABLE = 4096 +TYPEFLAG_FREVERSEBIND = 8192 +TYPEFLAG_FPROXY = 16384 +TYPEFLAGS = tagTYPEFLAGS + +tagFUNCFLAGS = c_int # enum +FUNCFLAG_FRESTRICTED = 1 +FUNCFLAG_FSOURCE = 2 +FUNCFLAG_FBINDABLE = 4 +FUNCFLAG_FREQUESTEDIT = 8 +FUNCFLAG_FDISPLAYBIND = 16 +FUNCFLAG_FDEFAULTBIND = 32 +FUNCFLAG_FHIDDEN = 64 +FUNCFLAG_FUSESGETLASTERROR = 128 +FUNCFLAG_FDEFAULTCOLLELEM = 256 +FUNCFLAG_FUIDEFAULT = 512 +FUNCFLAG_FNONBROWSABLE = 1024 +FUNCFLAG_FREPLACEABLE = 2048 +FUNCFLAG_FIMMEDIATEBIND = 4096 +FUNCFLAGS = tagFUNCFLAGS + +tagVARFLAGS = c_int # enum +VARFLAG_FREADONLY = 1 +VARFLAG_FSOURCE = 2 +VARFLAG_FBINDABLE = 4 +VARFLAG_FREQUESTEDIT = 8 +VARFLAG_FDISPLAYBIND = 16 +VARFLAG_FDEFAULTBIND = 32 +VARFLAG_FHIDDEN = 64 +VARFLAG_FRESTRICTED = 128 +VARFLAG_FDEFAULTCOLLELEM = 256 +VARFLAG_FUIDEFAULT = 512 +VARFLAG_FNONBROWSABLE = 1024 +VARFLAG_FREPLACEABLE = 2048 +VARFLAG_FIMMEDIATEBIND = 4096 +VARFLAGS = tagVARFLAGS + +PARAMFLAG_NONE = 0 +PARAMFLAG_FIN = 1 +PARAMFLAG_FOUT = 2 +PARAMFLAG_FLCID = 4 +PARAMFLAG_FRETVAL = 8 +PARAMFLAG_FOPT = 16 +PARAMFLAG_FHASDEFAULT = 32 +PARAMFLAG_FHASCUSTDATA = 64 + +################################################################ +# a helper +def _deref_with_release(ptr, release): + # Given a POINTER instance, return the pointed to value. + # Call the 'release' function with 'ptr' to release resources + # when the value is no longer needed. + result = ptr[0] + result.__ref__ = weakref.ref(result, lambda dead: release(ptr)) + return result + +# interfaces + +class ITypeLib(IUnknown): + _iid_ = GUID("{00020402-0000-0000-C000-000000000046}") + + # Commented out methods use the default implementation that comtypes + # automatically creates for COM methods. + +## def GetTypeInfoCount(self): +## "Return the number of type informations" + +## def GetTypeInfo(self, index): +## "Load type info by index" + +## def GetTypeInfoType(self, index): +## "Return the TYPEKIND of type information" + +## def GetTypeInfoOfGuid(self, guid): +## "Return type information for a guid" + + def GetLibAttr(self): + "Return type library attributes" + return _deref_with_release(self._GetLibAttr(), self.ReleaseTLibAttr) + +## def GetTypeComp(self): +## "Return an ITypeComp pointer." + +## def GetDocumentation(self, index): +## "Return documentation for a type description." + + def IsName(self, name, lHashVal=0): + """Check if there is type information for this name. + + Returns the name with capitalization found in the type + library, or None. + """ + from ctypes import create_unicode_buffer + namebuf = create_unicode_buffer(name) + found = BOOL() + self.__com_IsName(namebuf, lHashVal, byref(found)) + if found.value: + return namebuf[:].split("\0", 1)[0] + return None + + def FindName(self, name, lHashVal=0): + # Hm... + # Could search for more than one name - should we support this? + found = c_ushort(1) + tinfo = POINTER(ITypeInfo)() + memid = MEMBERID() + self.__com_FindName(name, lHashVal, byref(tinfo), byref(memid), byref(found)) + if found.value: + return memid.value, tinfo + return None + +## def ReleaseTLibAttr(self, ptla): +## "Release TLIBATTR" + +################ + +def fix_name(name): + # Some typelibs contain BSTR with embedded NUL characters, + # probably the len of the BSTR is wrong. + if name is None: + return name + return name.split("\0")[0] + +class ITypeInfo(IUnknown): + _iid_ = GUID("{00020401-0000-0000-C000-000000000046}") + + def GetTypeAttr(self): + "Return the TYPEATTR for this type" + return _deref_with_release(self._GetTypeAttr(), self.ReleaseTypeAttr) + +## def GetTypeComp(self): +## "Return ITypeComp pointer for this type" + + def GetDocumentation(self, memid): + """Return name, docstring, helpcontext, and helpfile for 'memid'.""" + name, doc, helpcontext, helpfile = self._GetDocumentation(memid) + return fix_name(name), fix_name(doc), helpcontext, fix_name(helpfile) + + def GetFuncDesc(self, index): + "Return FUNCDESC for index" + return _deref_with_release(self._GetFuncDesc(index), self.ReleaseFuncDesc) + + def GetVarDesc(self, index): + "Return VARDESC for index" + return _deref_with_release(self._GetVarDesc(index), self.ReleaseVarDesc) + + def GetNames(self, memid, count=1): + "Return names for memid" + names = (BSTR * count)() + cnames = c_uint() + self.__com_GetNames(memid, names, count, byref(cnames)) + return names[:cnames.value] + +## def GetRefTypeOfImplType(self, index): +## "Get the reftype of an implemented type" + +## def GetImplTypeFlags(self, index): +## "Get IMPLTYPEFLAGS" + + def GetIDsOfNames(self, *names): + "Maps function and argument names to identifiers" + rgsznames = (c_wchar_p * len(names))(*names) + ids = (MEMBERID * len(names))() + self.__com_GetIDsOfNames(rgsznames, len(names), ids) + return ids[:] + + + # not yet wrapped +## STDMETHOD(HRESULT, 'Invoke', [PVOID, MEMBERID, WORD, POINTER(DISPPARAMS), POINTER(VARIANT), POINTER(EXCEPINFO), POINTER(UINT)]), + +## def GetDllEntry(self, memid, invkind): +## "Return the dll name, function name, and ordinal for a function and invkind." + +## def GetRefTypeInfo(self, href): +## "Get type info for reftype" + + def AddressOfMember(self, memid, invkind): + "Get the address of a function in a dll" + raise "Check Me" + p = c_void_p() + self.__com_AddressOfMember(memid, invkind, byref(p)) + # XXX Would the default impl return the value of p? + return p.value + + def CreateInstance(self, punkouter=None, interface=IUnknown, iid=None): + if iid is None: + iid = interface._iid_ + return self._CreateInstance(punkouter, byref(interface._iid_)) + +## def GetMops(self, index): +## "Get marshalling opcodes (whatever that is...)" + +## def GetContainingTypeLib(self): +## "Return index into and the containing type lib itself" + +## def ReleaseTypeAttr(self, pta): + +## def ReleaseFuncDesc(self, pfd): + +## def ReleaseVarDesc(self, pvd): + +################ + +class ITypeComp(IUnknown): + _iid_ = GUID("{00020403-0000-0000-C000-000000000046}") + + def Bind(self, name, flags=0, lHashVal=0): + "Bind to a name" + bindptr = BINDPTR() + desckind = DESCKIND() + ti = POINTER(ITypeInfo)() + self.__com_Bind(name, lHashVal, flags, byref(ti), byref(desckind), byref(bindptr)) + kind = desckind.value + if kind == DESCKIND_FUNCDESC: + fd = bindptr.lpfuncdesc[0] + fd.__ref__ = weakref.ref(fd, lambda dead: ti.ReleaseFuncDesc(bindptr.lpfuncdesc)) + return "function", fd + elif kind == DESCKIND_VARDESC: + vd = bindptr.lpvardesc[0] + vd.__ref__ = weakref.ref(vd, lambda dead: ti.ReleaseVarDesc(bindptr.lpvardesc)) + return "variable", vd + elif kind == DESCKIND_TYPECOMP: + return "type", bindptr.lptcomp + elif kind == DESCKIND_IMPLICITAPPOBJ: + raise NotImplementedError + elif kind == DESCKIND_NONE: + raise NameError("Name %s not found" % name) + + def BindType(self, name, lHashVal=0): + "Bind a type, and return both the typeinfo and typecomp for it." + ti = POINTER(ITypeInfo)() + tc = POINTER(ITypeComp)() + self.__com_BindType(name, lHashVal, byref(ti), byref(tc)) + return ti, tc + + +################ + +class ICreateTypeLib(IUnknown): + _iid_ = GUID("{00020406-0000-0000-C000-000000000046}") + # C:/Programme/gccxml/bin/Vc71/PlatformSDK/oaidl.h 2149 + +class ICreateTypeLib2(ICreateTypeLib): + _iid_ = GUID("{0002040F-0000-0000-C000-000000000046}") + +class ICreateTypeInfo(IUnknown): + _iid_ = GUID("{00020405-0000-0000-C000-000000000046}") + # C:/Programme/gccxml/bin/Vc71/PlatformSDK/oaidl.h 915 + + def SetFuncAndParamNames(self, index, *names): + rgszNames = (c_wchar_p * len(names))() + for i, n in enumerate(names): + rgszNames[i] = n + return self._SetFuncAndParamNames(index, rgszNames, len(names)) + +class IRecordInfo(IUnknown): + # C:/vc98/include/OAIDL.H 5974 + _iid_ = GUID("{0000002F-0000-0000-C000-000000000046}") + + def GetFieldNames(self, *args): + count = c_ulong() + self.__com_GetFieldNames(count, None) + array = (BSTR * count.value)() + self.__com_GetFieldNames(count, array) + result = array[:] + # XXX Should SysFreeString the array contents. How to? + return result + +IRecordInfo. _methods_ = [ + COMMETHOD([], HRESULT, 'RecordInit', + (['in'], c_void_p, 'pvNew')), + COMMETHOD([], HRESULT, 'RecordClear', + (['in'], c_void_p, 'pvExisting')), + COMMETHOD([], HRESULT, 'RecordCopy', + (['in'], c_void_p, 'pvExisting'), + (['in'], c_void_p, 'pvNew')), + COMMETHOD([], HRESULT, 'GetGuid', + (['out'], POINTER(GUID), 'pguid')), + COMMETHOD([], HRESULT, 'GetName', + (['out'], POINTER(BSTR), 'pbstrName')), + COMMETHOD([], HRESULT, 'GetSize', + (['out'], POINTER(c_ulong), 'pcbSize')), + COMMETHOD([], HRESULT, 'GetTypeInfo', + (['out'], POINTER(POINTER(ITypeInfo)), 'ppTypeInfo')), + COMMETHOD([], HRESULT, 'GetField', + (['in'], c_void_p, 'pvData'), + (['in'], c_wchar_p, 'szFieldName'), + (['out'], POINTER(VARIANT), 'pvarField')), + COMMETHOD([], HRESULT, 'GetFieldNoCopy', + (['in'], c_void_p, 'pvData'), + (['in'], c_wchar_p, 'szFieldName'), + (['out'], POINTER(VARIANT), 'pvarField'), + (['out'], POINTER(c_void_p), 'ppvDataCArray')), + COMMETHOD([], HRESULT, 'PutField', + (['in'], c_ulong, 'wFlags'), + (['in'], c_void_p, 'pvData'), + (['in'], c_wchar_p, 'szFieldName'), + (['in'], POINTER(VARIANT), 'pvarField')), + COMMETHOD([], HRESULT, 'PutFieldNoCopy', + (['in'], c_ulong, 'wFlags'), + (['in'], c_void_p, 'pvData'), + (['in'], c_wchar_p, 'szFieldName'), + (['in'], POINTER(VARIANT), 'pvarField')), + COMMETHOD([], HRESULT, 'GetFieldNames', + (['in', 'out'], POINTER(c_ulong), 'pcNames'), + (['in'], POINTER(BSTR), 'rgBstrNames')), + COMMETHOD([], BOOL, 'IsMatchingType', + (['in'], POINTER(IRecordInfo))), + COMMETHOD([], HRESULT, 'RecordCreate'), + COMMETHOD([], HRESULT, 'RecordCreateCopy', + (['in'], c_void_p, 'pvSource'), + (['out'], POINTER(c_void_p), 'ppvDest')), + COMMETHOD([], HRESULT, 'RecordDestroy', + (['in'], c_void_p, 'pvRecord'))] + + + +################################################################ +# functions +_oleaut32 = oledll.oleaut32 + +def GetRecordInfoFromTypeInfo(tinfo): + "Return an IRecordInfo pointer to the UDT described in tinfo" + ri = POINTER(IRecordInfo)() + _oleaut32.GetRecordInfoFromTypeInfo(tinfo, byref(ri)) + return ri + +def GetRecordInfoFromGuids(rGuidTypeLib, verMajor, verMinor, lcid, rGuidTypeInfo): + ri = POINTER(IRecordInfo)() + _oleaut32.GetRecordInfoFromGuids(byref(GUID(rGuidTypeLib)), + verMajor, verMinor, lcid, + byref(GUID(rGuidTypeInfo)), + byref(ri)) + return ri + +def LoadRegTypeLib(guid, wMajorVerNum, wMinorVerNum, lcid=0): + "Load a registered type library" + tlib = POINTER(ITypeLib)() + _oleaut32.LoadRegTypeLib(byref(GUID(guid)), wMajorVerNum, wMinorVerNum, lcid, byref(tlib)) + return tlib + +if hasattr(_oleaut32, "LoadTypeLibEx"): + def LoadTypeLibEx(szFile, regkind=REGKIND_NONE): + "Load, and optionally register a type library file" + ptl = POINTER(ITypeLib)() + _oleaut32.LoadTypeLibEx(c_wchar_p(szFile), regkind, byref(ptl)) + return ptl +else: + def LoadTypeLibEx(szFile, regkind=REGKIND_NONE): + "Load, and optionally register a type library file" + ptl = POINTER(ITypeLib)() + _oleaut32.LoadTypeLib(c_wchar_p(szFile), byref(ptl)) + return ptl + +def LoadTypeLib(szFile): + "Load and register a type library file" + tlib = POINTER(ITypeLib)() + _oleaut32.LoadTypeLib(c_wchar_p(szFile), byref(tlib)) + return tlib + +def UnRegisterTypeLib(libID, wVerMajor, wVerMinor, lcid=0, syskind=SYS_WIN32): + "Unregister a registered type library" + return _oleaut32.UnRegisterTypeLib(byref(GUID(libID)), wVerMajor, wVerMinor, lcid, syskind) + +def RegisterTypeLib(tlib, fullpath, helpdir=None): + "Register a type library in the registry" + return _oleaut32.RegisterTypeLib(tlib, c_wchar_p(fullpath), c_wchar_p(helpdir)) + +def CreateTypeLib(filename, syskind=SYS_WIN32): + "Return a ICreateTypeLib2 pointer" + ctlib = POINTER(ICreateTypeLib2)() + _oleaut32.CreateTypeLib2(syskind, c_wchar_p(filename), byref(ctlib)) + return ctlib + +def QueryPathOfRegTypeLib(libid, wVerMajor, wVerMinor, lcid=0): + "Return the path of a registered type library" + pathname = BSTR() + _oleaut32.QueryPathOfRegTypeLib(byref(GUID(libid)), wVerMajor, wVerMinor, lcid, byref(pathname)) + return pathname.value.split("\0")[0] + +################################################################ +# Structures + +class tagTLIBATTR(Structure): + # C:/Programme/gccxml/bin/Vc71/PlatformSDK/oaidl.h 4437 + def __repr__(self): + return "TLIBATTR(GUID=%s, Version=%s.%s, LCID=%s, FLags=0x%x)" % \ + (self.guid, self.wMajorVerNum, self.wMinorVerNum, self.lcid, self.wLibFlags) +TLIBATTR = tagTLIBATTR + +class tagTYPEATTR(Structure): + # C:/Programme/gccxml/bin/Vc71/PlatformSDK/oaidl.h 672 + def __repr__(self): + return "TYPEATTR(GUID=%s, typekind=%s, funcs=%s, vars=%s, impltypes=%s)" % \ + (self.guid, self.typekind, self.cFuncs, self.cVars, self.cImplTypes) +TYPEATTR = tagTYPEATTR + +class tagFUNCDESC(Structure): + # C:/Programme/gccxml/bin/Vc71/PlatformSDK/oaidl.h 769 + def __repr__(self): + return "FUNCDESC(memid=%s, cParams=%s, cParamsOpt=%s, callconv=%s, invkind=%s, funckind=%s)" % \ + (self.memid, self.cParams, self.cParamsOpt, self.callconv, self.invkind, self.funckind) + + +FUNCDESC = tagFUNCDESC +class tagVARDESC(Structure): + # C:/Programme/gccxml/bin/Vc71/PlatformSDK/oaidl.h 803 + pass +VARDESC = tagVARDESC + +class tagBINDPTR(Union): + # C:/Programme/gccxml/bin/Vc71/PlatformSDK/oaidl.h 3075 + pass +BINDPTR = tagBINDPTR +class tagTYPEDESC(Structure): + # C:/Programme/gccxml/bin/Vc71/PlatformSDK/oaidl.h 582 + pass +TYPEDESC = tagTYPEDESC +class tagIDLDESC(Structure): + # C:/Programme/gccxml/bin/Vc71/PlatformSDK/oaidl.h 633 + pass +IDLDESC = tagIDLDESC + +class tagARRAYDESC(Structure): + # C:/Programme/gccxml/bin/Vc71/PlatformSDK/oaidl.h 594 + pass + +################################################################ +# interface vtbl definitions + +ICreateTypeLib._methods_ = [ +# C:/Programme/gccxml/bin/Vc71/PlatformSDK/oaidl.h 2149 + COMMETHOD([], HRESULT, 'CreateTypeInfo', + (['in'], LPOLESTR, 'szName'), + (['in'], TYPEKIND, 'tkind'), + (['out'], POINTER(POINTER(ICreateTypeInfo)), 'ppCTInfo')), + STDMETHOD(HRESULT, 'SetName', [LPOLESTR]), + STDMETHOD(HRESULT, 'SetVersion', [WORD, WORD]), + STDMETHOD(HRESULT, 'SetGuid', [POINTER(GUID)]), + STDMETHOD(HRESULT, 'SetDocString', [LPOLESTR]), + STDMETHOD(HRESULT, 'SetHelpFileName', [LPOLESTR]), + STDMETHOD(HRESULT, 'SetHelpContext', [DWORD]), + STDMETHOD(HRESULT, 'SetLcid', [LCID]), + STDMETHOD(HRESULT, 'SetLibFlags', [UINT]), + STDMETHOD(HRESULT, 'SaveAllChanges', []), +] + +ICreateTypeLib2._methods_ = [ +# C:/Programme/gccxml/bin/Vc71/PlatformSDK/oaidl.h 2444 + STDMETHOD(HRESULT, 'DeleteTypeInfo', [POINTER(ITypeInfo)]), + STDMETHOD(HRESULT, 'SetCustData', [POINTER(GUID), POINTER(VARIANT)]), + STDMETHOD(HRESULT, 'SetHelpStringContext', [ULONG]), + STDMETHOD(HRESULT, 'SetHelpStringDll', [LPOLESTR]), + ] + +ITypeLib._methods_ = [ +# C:/Programme/gccxml/bin/Vc71/PlatformSDK/oaidl.h 4455 + COMMETHOD([], UINT, 'GetTypeInfoCount'), + COMMETHOD([], HRESULT, 'GetTypeInfo', + (['in'], UINT, 'index'), + (['out'], POINTER(POINTER(ITypeInfo)))), + COMMETHOD([], HRESULT, 'GetTypeInfoType', + (['in'], UINT, 'index'), + (['out'], POINTER(TYPEKIND))), + COMMETHOD([], HRESULT, 'GetTypeInfoOfGuid', + (['in'], POINTER(GUID)), + (['out'], POINTER(POINTER(ITypeInfo)))), + COMMETHOD([], HRESULT, 'GetLibAttr', + (['out'], POINTER(POINTER(TLIBATTR)))), + COMMETHOD([], HRESULT, 'GetTypeComp', + (['out'], POINTER(POINTER(ITypeComp)))), + COMMETHOD([], HRESULT, 'GetDocumentation', + (['in'], INT, 'index'), + (['out'], POINTER(BSTR)), + (['out'], POINTER(BSTR)), + (['out'], POINTER(DWORD)), + (['out'], POINTER(BSTR))), + COMMETHOD([], HRESULT, 'IsName', + # IsName changes the casing of the passed in name to + # match that in the type library. In the automatically + # wrapped version of this method, ctypes would pass a + # Python unicode string which would then be changed - + # very bad. So we have (see above) to implement the + # IsName method manually. + (['in', 'out'], LPOLESTR, 'name'), + (['in', 'optional'], DWORD, 'lHashVal', 0), + (['out'], POINTER(BOOL))), + STDMETHOD(HRESULT, 'FindName', [LPOLESTR, DWORD, POINTER(POINTER(ITypeInfo)), + POINTER(MEMBERID), POINTER(USHORT)]), + COMMETHOD([], None, 'ReleaseTLibAttr', + (['in'], POINTER(TLIBATTR))) +] + +ITypeInfo._methods_ = [ +# C:/Programme/gccxml/bin/Vc71/PlatformSDK/oaidl.h 3230 + COMMETHOD([], HRESULT, 'GetTypeAttr', + (['out'], POINTER(POINTER(TYPEATTR)), 'ppTypeAttr')), + COMMETHOD([], HRESULT, 'GetTypeComp', + (['out'], POINTER(POINTER(ITypeComp)))), + COMMETHOD([], HRESULT, 'GetFuncDesc', + (['in'], UINT, 'index'), + (['out'], POINTER(POINTER(FUNCDESC)))), + COMMETHOD([], HRESULT, 'GetVarDesc', + (['in'], UINT, 'index'), + (['out'], POINTER(POINTER(VARDESC)))), + STDMETHOD(HRESULT, 'GetNames', [MEMBERID, POINTER(BSTR), UINT, POINTER(UINT)]), + COMMETHOD([], HRESULT, 'GetRefTypeOfImplType', + (['in'], UINT, 'index'), + (['out'], POINTER(HREFTYPE))), + COMMETHOD([], HRESULT, 'GetImplTypeFlags', + (['in'], UINT, 'index'), + (['out'], POINTER(INT))), +## STDMETHOD(HRESULT, 'GetIDsOfNames', [POINTER(LPOLESTR), UINT, POINTER(MEMBERID)]), + # this one changed, to accept c_wchar_p array + STDMETHOD(HRESULT, 'GetIDsOfNames', [POINTER(c_wchar_p), UINT, POINTER(MEMBERID)]), + STDMETHOD(HRESULT, 'Invoke', [PVOID, MEMBERID, WORD, POINTER(DISPPARAMS), POINTER(VARIANT), POINTER(EXCEPINFO), POINTER(UINT)]), + + COMMETHOD([], HRESULT, 'GetDocumentation', + (['in'], MEMBERID, 'memid'), + (['out'], POINTER(BSTR), 'pBstrName'), + (['out'], POINTER(BSTR), 'pBstrDocString'), + (['out'], POINTER(DWORD), 'pdwHelpContext'), + (['out'], POINTER(BSTR), 'pBstrHelpFile')), + COMMETHOD([], HRESULT, 'GetDllEntry', + (['in'], MEMBERID, 'index'), + (['in'], INVOKEKIND, 'invkind'), + (['out'], POINTER(BSTR), 'pBstrDllName'), + (['out'], POINTER(BSTR), 'pBstrName'), + (['out'], POINTER(WORD), 'pwOrdinal')), + COMMETHOD([], HRESULT, 'GetRefTypeInfo', + (['in'], HREFTYPE, 'hRefType'), + (['out'], POINTER(POINTER(ITypeInfo)))), + STDMETHOD(HRESULT, 'AddressOfMember', [MEMBERID, INVOKEKIND, POINTER(PVOID)]), + COMMETHOD([], HRESULT, 'CreateInstance', + (['in'], POINTER(IUnknown), 'pUnkOuter'), + (['in'], POINTER(IID), 'refiid'), + (['out'], POINTER(POINTER(IUnknown)))), + COMMETHOD([], HRESULT, 'GetMops', + (['in'], MEMBERID, 'memid'), + (['out'], POINTER(BSTR))), + COMMETHOD([], HRESULT, 'GetContainingTypeLib', + (['out'], POINTER(POINTER(ITypeLib))), + (['out'], POINTER(UINT))), + COMMETHOD([], None, 'ReleaseTypeAttr', + (['in'], POINTER(TYPEATTR))), + COMMETHOD([], None, 'ReleaseFuncDesc', + (['in'], POINTER(FUNCDESC))), + COMMETHOD([], None, 'ReleaseVarDesc', + (['in'], POINTER(VARDESC))), +] + +ITypeComp._methods_ = [ +# C:/Programme/gccxml/bin/Vc71/PlatformSDK/oaidl.h 3090 + STDMETHOD(HRESULT, 'Bind', + [LPOLESTR, DWORD, WORD, POINTER(POINTER(ITypeInfo)), + POINTER(DESCKIND), POINTER(BINDPTR)]), + STDMETHOD(HRESULT, 'BindType', + [LPOLESTR, DWORD, POINTER(POINTER(ITypeInfo)), POINTER(POINTER(ITypeComp))]), +] + +ICreateTypeInfo._methods_ = [ +# C:/Programme/gccxml/bin/Vc71/PlatformSDK/oaidl.h 915 + STDMETHOD(HRESULT, 'SetGuid', [POINTER(GUID)]), + STDMETHOD(HRESULT, 'SetTypeFlags', [UINT]), + STDMETHOD(HRESULT, 'SetDocString', [LPOLESTR]), + STDMETHOD(HRESULT, 'SetHelpContext', [DWORD]), + STDMETHOD(HRESULT, 'SetVersion', [WORD, WORD]), +# STDMETHOD(HRESULT, 'AddRefTypeInfo', [POINTER(ITypeInfo), POINTER(HREFTYPE)]), + COMMETHOD([], HRESULT, 'AddRefTypeInfo', + (['in'], POINTER(ITypeInfo)), + (['out'], POINTER(HREFTYPE))), + STDMETHOD(HRESULT, 'AddFuncDesc', [UINT, POINTER(FUNCDESC)]), + STDMETHOD(HRESULT, 'AddImplType', [UINT, HREFTYPE]), + STDMETHOD(HRESULT, 'SetImplTypeFlags', [UINT, INT]), + STDMETHOD(HRESULT, 'SetAlignment', [WORD]), + STDMETHOD(HRESULT, 'SetSchema', [LPOLESTR]), + STDMETHOD(HRESULT, 'AddVarDesc', [UINT, POINTER(VARDESC)]), + STDMETHOD(HRESULT, 'SetFuncAndParamNames', [UINT, POINTER(c_wchar_p), UINT]), + STDMETHOD(HRESULT, 'SetVarName', [UINT, LPOLESTR]), + STDMETHOD(HRESULT, 'SetTypeDescAlias', [POINTER(TYPEDESC)]), + STDMETHOD(HRESULT, 'DefineFuncAsDllEntry', [UINT, LPOLESTR, LPOLESTR]), + STDMETHOD(HRESULT, 'SetFuncDocString', [UINT, LPOLESTR]), + STDMETHOD(HRESULT, 'SetVarDocString', [UINT, LPOLESTR]), + STDMETHOD(HRESULT, 'SetFuncHelpContext', [UINT, DWORD]), + STDMETHOD(HRESULT, 'SetVarHelpContext', [UINT, DWORD]), + STDMETHOD(HRESULT, 'SetMops', [UINT, BSTR]), + STDMETHOD(HRESULT, 'SetTypeIdldesc', [POINTER(IDLDESC)]), + STDMETHOD(HRESULT, 'LayOut', []), +] + +class IProvideClassInfo(IUnknown): + _iid_ = GUID("{B196B283-BAB4-101A-B69C-00AA00341D07}") + _methods_ = [ + # Returns the ITypeInfo interface for the object's coclass type information. + COMMETHOD([], HRESULT, "GetClassInfo", + ( ['out'], POINTER(POINTER(ITypeInfo)), "ppTI" ) ) + ] + +class IProvideClassInfo2(IProvideClassInfo): + _iid_ = GUID("{A6BC3AC0-DBAA-11CE-9DE3-00AA004BB851}") + _methods_ = [ + # Returns the GUID for the object's outgoing IID for its default event set. + COMMETHOD([], HRESULT, "GetGUID", + ( ['in'], DWORD, "dwGuidKind" ), + ( ['out', 'retval'], POINTER(GUID), "pGUID" )) + ] + + +################################################################ +# Structure fields + +tagTLIBATTR._fields_ = [ + # C:/Programme/gccxml/bin/Vc71/PlatformSDK/oaidl.h 4437 + ('guid', GUID), + ('lcid', LCID), + ('syskind', SYSKIND), + ('wMajorVerNum', WORD), + ('wMinorVerNum', WORD), + ('wLibFlags', WORD), +] +class N11tagTYPEDESC5DOLLAR_203E(Union): + # C:/Programme/gccxml/bin/Vc71/PlatformSDK/oaidl.h 584 + pass +N11tagTYPEDESC5DOLLAR_203E._fields_ = [ + # C:/Programme/gccxml/bin/Vc71/PlatformSDK/oaidl.h 584 + ('lptdesc', POINTER(tagTYPEDESC)), + ('lpadesc', POINTER(tagARRAYDESC)), + ('hreftype', HREFTYPE), +] +tagTYPEDESC._anonymous_ = ('_',) +tagTYPEDESC._fields_ = [ + # C:/Programme/gccxml/bin/Vc71/PlatformSDK/oaidl.h 582 + # Unnamed field renamed to '_' + ('_', N11tagTYPEDESC5DOLLAR_203E), + ('vt', VARTYPE), +] +tagIDLDESC._fields_ = [ + # C:/Programme/gccxml/bin/Vc71/PlatformSDK/oaidl.h 633 + ('dwReserved', ULONG_PTR), + ('wIDLFlags', USHORT), +] +tagTYPEATTR._fields_ = [ + # C:/Programme/gccxml/bin/Vc71/PlatformSDK/oaidl.h 672 + ('guid', GUID), + ('lcid', LCID), + ('dwReserved', DWORD), + ('memidConstructor', MEMBERID), + ('memidDestructor', MEMBERID), + ('lpstrSchema', LPOLESTR), + ('cbSizeInstance', DWORD), + ('typekind', TYPEKIND), + ('cFuncs', WORD), + ('cVars', WORD), + ('cImplTypes', WORD), + ('cbSizeVft', WORD), + ('cbAlignment', WORD), + ('wTypeFlags', WORD), + ('wMajorVerNum', WORD), + ('wMinorVerNum', WORD), + ('tdescAlias', TYPEDESC), + ('idldescType', IDLDESC), +] +class N10tagVARDESC5DOLLAR_205E(Union): + # C:/Programme/gccxml/bin/Vc71/PlatformSDK/oaidl.h 807 + pass +N10tagVARDESC5DOLLAR_205E._fields_ = [ + # C:/Programme/gccxml/bin/Vc71/PlatformSDK/oaidl.h 807 + ('oInst', DWORD), + ('lpvarValue', POINTER(VARIANT)), +] +class tagELEMDESC(Structure): + # C:/Programme/gccxml/bin/Vc71/PlatformSDK/oaidl.h 661 + pass +class N11tagELEMDESC5DOLLAR_204E(Union): + # C:/Programme/gccxml/bin/Vc71/PlatformSDK/oaidl.h 663 + pass + +class tagPARAMDESC(Structure): + # C:/Programme/gccxml/bin/Vc71/PlatformSDK/oaidl.h 609 + pass + +class tagPARAMDESCEX(Structure): + # C:/Programme/gccxml/bin/Vc71/PlatformSDK/oaidl.h 601 + pass +LPPARAMDESCEX = POINTER(tagPARAMDESCEX) + +tagPARAMDESC._fields_ = [ + # C:/Programme/gccxml/bin/Vc71/PlatformSDK/oaidl.h 609 + ('pparamdescex', LPPARAMDESCEX), + ('wParamFlags', USHORT), +] +PARAMDESC = tagPARAMDESC + +N11tagELEMDESC5DOLLAR_204E._fields_ = [ + # C:/Programme/gccxml/bin/Vc71/PlatformSDK/oaidl.h 663 + ('idldesc', IDLDESC), + ('paramdesc', PARAMDESC), +] +tagELEMDESC._fields_ = [ + # C:/Programme/gccxml/bin/Vc71/PlatformSDK/oaidl.h 661 + ('tdesc', TYPEDESC), + # Unnamed field renamed to '_' + ('_', N11tagELEMDESC5DOLLAR_204E), +] +ELEMDESC = tagELEMDESC + +tagVARDESC._fields_ = [ + # C:/Programme/gccxml/bin/Vc71/PlatformSDK/oaidl.h 803 + ('memid', MEMBERID), + ('lpstrSchema', LPOLESTR), + # Unnamed field renamed to '_' + ('_', N10tagVARDESC5DOLLAR_205E), + ('elemdescVar', ELEMDESC), + ('wVarFlags', WORD), + ('varkind', VARKIND), +] +tagBINDPTR._fields_ = [ + # C:/Programme/gccxml/bin/Vc71/PlatformSDK/oaidl.h 3075 + ('lpfuncdesc', POINTER(FUNCDESC)), + ('lpvardesc', POINTER(VARDESC)), + ('lptcomp', POINTER(ITypeComp)), +] + +tagFUNCDESC._fields_ = [ + # C:/Programme/gccxml/bin/Vc71/PlatformSDK/oaidl.h 769 + ('memid', MEMBERID), + ('lprgscode', POINTER(SCODE)), + ('lprgelemdescParam', POINTER(ELEMDESC)), + ('funckind', FUNCKIND), + ('invkind', INVOKEKIND), + ('callconv', CALLCONV), + ('cParams', SHORT), + ('cParamsOpt', SHORT), + ('oVft', SHORT), + ('cScodes', SHORT), + ('elemdescFunc', ELEMDESC), + ('wFuncFlags', WORD), +] + +tagPARAMDESCEX._fields_ = [ + # C:/Programme/gccxml/bin/Vc71/PlatformSDK/oaidl.h 601 + ('cBytes', DWORD), + ('varDefaultValue', VARIANTARG), +] + +class tagSAFEARRAYBOUND(Structure): + # C:/Programme/gccxml/bin/Vc71/PlatformSDK/oaidl.h 226 + _fields_ = [ + ('cElements', DWORD), + ('lLbound', LONG), + ] +SAFEARRAYBOUND = tagSAFEARRAYBOUND + +tagARRAYDESC._fields_ = [ + # C:/Programme/gccxml/bin/Vc71/PlatformSDK/oaidl.h 594 + ('tdescElem', TYPEDESC), + ('cDims', USHORT), + ('rgbounds', SAFEARRAYBOUND * 1), +] diff --git a/venv/Lib/site-packages/comtypes/util.py b/venv/Lib/site-packages/comtypes/util.py new file mode 100644 index 00000000..0eaf864d --- /dev/null +++ b/venv/Lib/site-packages/comtypes/util.py @@ -0,0 +1,96 @@ +"""This module defines the funtions byref_at(cobj, offset) +and cast_field(struct, fieldname, fieldtype). +""" +from ctypes import * + +def _calc_offset(): + # Internal helper function that calculates where the object + # returned by a byref() call stores the pointer. + + # The definition of PyCArgObject in C code (that is the type of + # object that a byref() call returns): + class PyCArgObject(Structure): + class value(Union): + _fields_ = [("c", c_char), + ("h", c_short), + ("i", c_int), + ("l", c_long), + ("q", c_longlong), + ("d", c_double), + ("f", c_float), + ("p", c_void_p)] + # + # Thanks to Lenard Lindstrom for this tip: + # sizeof(PyObject_HEAD) is the same as object.__basicsize__. + # + _fields_ = [("PyObject_HEAD", c_byte * object.__basicsize__), + ("pffi_type", c_void_p), + ("tag", c_char), + ("value", value), + ("obj", c_void_p), + ("size", c_int)] + + _anonymous_ = ["value"] + + # additional checks to make sure that everything works as expected + + if sizeof(PyCArgObject) != type(byref(c_int())).__basicsize__: + raise RuntimeError("sizeof(PyCArgObject) invalid") + + obj = c_int() + ref = byref(obj) + + argobj = PyCArgObject.from_address(id(ref)) + + if argobj.obj != id(obj) or \ + argobj.p != addressof(obj) or \ + argobj.tag != 'P': + raise RuntimeError("PyCArgObject field definitions incorrect") + + return PyCArgObject.p.offset # offset of the pointer field + +################################################################ +# +# byref_at +# +def byref_at(obj, offset, + _byref=byref, + _c_void_p_from_address = c_void_p.from_address, + _byref_pointer_offset = _calc_offset() + ): + """byref_at(cobj, offset) behaves similar this C code: + + (((char *)&obj) + offset) + + In other words, the returned 'pointer' points to the address of + 'cobj' + 'offset'. 'offset' is in units of bytes. + """ + ref = _byref(obj) + # Change the pointer field in the created byref object by adding + # 'offset' to it: + _c_void_p_from_address(id(ref) + + _byref_pointer_offset).value += offset + return ref + + +################################################################ +# +# cast_field +# +def cast_field(struct, fieldname, fieldtype, offset=0, + _POINTER=POINTER, + _byref_at=byref_at, + _byref=byref, + _divmod=divmod, + _sizeof=sizeof, + ): + """cast_field(struct, fieldname, fieldtype) + + Return the contents of a struct field as it it were of type + 'fieldtype'. + """ + fieldoffset = getattr(type(struct), fieldname).offset + return cast(_byref_at(struct, fieldoffset), + _POINTER(fieldtype))[0] + +__all__ = ["byref_at", "cast_field"] diff --git a/venv/Lib/site-packages/comtypes/viewobject.py b/venv/Lib/site-packages/comtypes/viewobject.py new file mode 100644 index 00000000..110c6a6f --- /dev/null +++ b/venv/Lib/site-packages/comtypes/viewobject.py @@ -0,0 +1,160 @@ +# XXX need to find out what the share from comtypes.dataobject. +from ctypes import * +from ctypes.wintypes import _RECTL, SIZEL, HDC, tagRECT, tagPOINT + +from comtypes import COMMETHOD +from comtypes import GUID +from comtypes import IUnknown + +class tagPALETTEENTRY(Structure): + _fields_ = [ + ('peRed', c_ubyte), + ('peGreen', c_ubyte), + ('peBlue', c_ubyte), + ('peFlags', c_ubyte), + ] +assert sizeof(tagPALETTEENTRY) == 4, sizeof(tagPALETTEENTRY) +assert alignment(tagPALETTEENTRY) == 1, alignment(tagPALETTEENTRY) + +class tagLOGPALETTE(Structure): + _pack_ = 2 + _fields_ = [ + ('palVersion', c_ushort), + ('palNumEntries', c_ushort), + ('palPalEntry', POINTER(tagPALETTEENTRY)), + ] +assert sizeof(tagLOGPALETTE) == 8, sizeof(tagLOGPALETTE) +assert alignment(tagLOGPALETTE) == 2, alignment(tagLOGPALETTE) + +class tagDVTARGETDEVICE(Structure): + _fields_ = [ + ('tdSize', c_ulong), + ('tdDriverNameOffset', c_ushort), + ('tdDeviceNameOffset', c_ushort), + ('tdPortNameOffset', c_ushort), + ('tdExtDevmodeOffset', c_ushort), + ('tdData', POINTER(c_ubyte)), + ] +assert sizeof(tagDVTARGETDEVICE) == 16, sizeof(tagDVTARGETDEVICE) +assert alignment(tagDVTARGETDEVICE) == 4, alignment(tagDVTARGETDEVICE) + +class tagExtentInfo(Structure): + _fields_ = [ + ('cb', c_ulong), + ('dwExtentMode', c_ulong), + ('sizelProposed', SIZEL), + ] + def __init__(self, *args, **kw): + self.cb = sizeof(self) + super(tagExtentInfo, self).__init__(*args, **kw) + def __repr__(self): + size = (self.sizelProposed.cx, self.sizelProposed.cy) + return "<ExtentInfo(mode=%s, size=%s) at %x>" % (self.dwExtentMode, + size, + id(self)) +assert sizeof(tagExtentInfo) == 16, sizeof(tagExtentInfo) +assert alignment(tagExtentInfo) == 4, alignment(tagExtentInfo) +DVEXTENTINFO = tagExtentInfo + +IAdviseSink = IUnknown # fake the interface + +class IViewObject(IUnknown): + _case_insensitive_ = False + _iid_ = GUID('{0000010D-0000-0000-C000-000000000046}') + _idlflags_ = [] + + _methods_ = [ + COMMETHOD([], HRESULT, 'Draw', + ( ['in'], c_ulong, 'dwDrawAspect' ), + ( ['in'], c_int, 'lindex' ), + ( ['in'], c_void_p, 'pvAspect' ), + ( ['in'], POINTER(tagDVTARGETDEVICE), 'ptd' ), + ( ['in'], HDC, 'hdcTargetDev' ), + ( ['in'], HDC, 'hdcDraw' ), + ( ['in'], POINTER(_RECTL), 'lprcBounds' ), + ( ['in'], POINTER(_RECTL), 'lprcWBounds' ), + ( ['in'], c_void_p, 'pfnContinue' ), # a pointer to a callback function + ( ['in'], c_ulong, 'dwContinue')), + COMMETHOD([], HRESULT, 'GetColorSet', + ( ['in'], c_ulong, 'dwDrawAspect' ), + ( ['in'], c_int, 'lindex' ), + ( ['in'], c_void_p, 'pvAspect' ), + ( ['in'], POINTER(tagDVTARGETDEVICE), 'ptd' ), + ( ['in'], HDC, 'hicTargetDev' ), + ( ['out'], POINTER(POINTER(tagLOGPALETTE)), 'ppColorSet' )), + COMMETHOD([], HRESULT, 'Freeze', + ( ['in'], c_ulong, 'dwDrawAspect' ), + ( ['in'], c_int, 'lindex' ), + ( ['in'], c_void_p, 'pvAspect' ), + ( ['out'], POINTER(c_ulong), 'pdwFreeze' )), + COMMETHOD([], HRESULT, 'Unfreeze', + ( ['in'], c_ulong, 'dwFreeze' )), + COMMETHOD([], HRESULT, 'SetAdvise', + ( ['in'], c_ulong, 'dwAspect' ), + ( ['in'], c_ulong, 'advf' ), + ( ['in'], POINTER(IAdviseSink), 'pAdvSink' )), + COMMETHOD([], HRESULT, 'GetAdvise', + ( ['out'], POINTER(c_ulong), 'pdwAspect' ), + ( ['out'], POINTER(c_ulong), 'pAdvf' ), + ( ['out'], POINTER(POINTER(IAdviseSink)), 'ppAdvSink' )), + ] + +class IViewObject2(IViewObject): + _case_insensitive_ = False + _iid_ = GUID('{00000127-0000-0000-C000-000000000046}') + _idlflags_ = [] + _methods_ = [ + COMMETHOD([], HRESULT, 'GetExtent', + ( ['in'], c_ulong, 'dwDrawAspect' ), + ( ['in'], c_int, 'lindex' ), + ( ['in'], POINTER(tagDVTARGETDEVICE), 'ptd' ), + ( ['out'], POINTER(SIZEL), 'lpsizel' )), + ] + +class IViewObjectEx(IViewObject2): + _case_insensitive_ = False + _iid_ = GUID('{3AF24292-0C96-11CE-A0CF-00AA00600AB8}') + _idlflags_ = [] + _methods_ = [ + COMMETHOD([], HRESULT, 'GetRect', + ( ['in'], c_ulong, 'dwAspect' ), + ( ['out'], POINTER(_RECTL), 'pRect' )), + COMMETHOD([], HRESULT, 'GetViewStatus', + ( ['out'], POINTER(c_ulong), 'pdwStatus' )), + COMMETHOD([], HRESULT, 'QueryHitPoint', + ( ['in'], c_ulong, 'dwAspect' ), + ( ['in'], POINTER(tagRECT), 'pRectBounds' ), + ( ['in'], tagPOINT, 'ptlLoc' ), + ( ['in'], c_int, 'lCloseHint' ), + ( ['out'], POINTER(c_ulong), 'pHitResult' )), + COMMETHOD([], HRESULT, 'QueryHitRect', + ( ['in'], c_ulong, 'dwAspect' ), + ( ['in'], POINTER(tagRECT), 'pRectBounds' ), + ( ['in'], POINTER(tagRECT), 'pRectLoc' ), + ( ['in'], c_int, 'lCloseHint' ), + ( ['out'], POINTER(c_ulong), 'pHitResult' )), + COMMETHOD([], HRESULT, 'GetNaturalExtent', + ( ['in'], c_ulong, 'dwAspect' ), + ( ['in'], c_int, 'lindex' ), + ( ['in'], POINTER(tagDVTARGETDEVICE), 'ptd' ), + ( ['in'], HDC, 'hicTargetDev' ), + ( ['in'], POINTER(tagExtentInfo), 'pExtentInfo' ), + ( ['out'], POINTER(SIZEL), 'pSizel' )), + ] + + +DVASPECT = c_int # enum +DVASPECT_CONTENT = 1 +DVASPECT_THUMBNAIL = 2 +DVASPECT_ICON = 4 +DVASPECT_DOCPRINT = 8 + +DVASPECT2 = c_int # enum +DVASPECT_OPAQUE = 16 +DVASPECT_TRANSPARENT = 32 + +DVEXTENTMODE = c_int # enum +# Container asks the object how big it wants to be to exactly fit its content: +DVEXTENT_CONTENT = 0 +# The container proposes a size to the object for its use in resizing: +DVEXTENT_INTEGRAL = 1 diff --git a/venv/Lib/site-packages/dateutil/__init__.py b/venv/Lib/site-packages/dateutil/__init__.py new file mode 100644 index 00000000..0defb82e --- /dev/null +++ b/venv/Lib/site-packages/dateutil/__init__.py @@ -0,0 +1,8 @@ +# -*- coding: utf-8 -*- +try: + from ._version import version as __version__ +except ImportError: + __version__ = 'unknown' + +__all__ = ['easter', 'parser', 'relativedelta', 'rrule', 'tz', + 'utils', 'zoneinfo'] diff --git a/venv/Lib/site-packages/dateutil/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/dateutil/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..81b47608 Binary files /dev/null and b/venv/Lib/site-packages/dateutil/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/dateutil/__pycache__/_common.cpython-36.pyc b/venv/Lib/site-packages/dateutil/__pycache__/_common.cpython-36.pyc new file mode 100644 index 00000000..5e343c05 Binary files /dev/null and b/venv/Lib/site-packages/dateutil/__pycache__/_common.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/dateutil/__pycache__/_version.cpython-36.pyc b/venv/Lib/site-packages/dateutil/__pycache__/_version.cpython-36.pyc new file mode 100644 index 00000000..a59e1fab Binary files /dev/null and b/venv/Lib/site-packages/dateutil/__pycache__/_version.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/dateutil/__pycache__/easter.cpython-36.pyc b/venv/Lib/site-packages/dateutil/__pycache__/easter.cpython-36.pyc new file mode 100644 index 00000000..4dd715c5 Binary files /dev/null and b/venv/Lib/site-packages/dateutil/__pycache__/easter.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/dateutil/__pycache__/relativedelta.cpython-36.pyc b/venv/Lib/site-packages/dateutil/__pycache__/relativedelta.cpython-36.pyc new file mode 100644 index 00000000..f2c2f4fc Binary files /dev/null and b/venv/Lib/site-packages/dateutil/__pycache__/relativedelta.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/dateutil/__pycache__/rrule.cpython-36.pyc b/venv/Lib/site-packages/dateutil/__pycache__/rrule.cpython-36.pyc new file mode 100644 index 00000000..50ffc873 Binary files /dev/null and b/venv/Lib/site-packages/dateutil/__pycache__/rrule.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/dateutil/__pycache__/tzwin.cpython-36.pyc b/venv/Lib/site-packages/dateutil/__pycache__/tzwin.cpython-36.pyc new file mode 100644 index 00000000..7eeeae25 Binary files /dev/null and b/venv/Lib/site-packages/dateutil/__pycache__/tzwin.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/dateutil/__pycache__/utils.cpython-36.pyc b/venv/Lib/site-packages/dateutil/__pycache__/utils.cpython-36.pyc new file mode 100644 index 00000000..8669298d Binary files /dev/null and b/venv/Lib/site-packages/dateutil/__pycache__/utils.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/dateutil/_common.py b/venv/Lib/site-packages/dateutil/_common.py new file mode 100644 index 00000000..4eb2659b --- /dev/null +++ b/venv/Lib/site-packages/dateutil/_common.py @@ -0,0 +1,43 @@ +""" +Common code used in multiple modules. +""" + + +class weekday(object): + __slots__ = ["weekday", "n"] + + def __init__(self, weekday, n=None): + self.weekday = weekday + self.n = n + + def __call__(self, n): + if n == self.n: + return self + else: + return self.__class__(self.weekday, n) + + def __eq__(self, other): + try: + if self.weekday != other.weekday or self.n != other.n: + return False + except AttributeError: + return False + return True + + def __hash__(self): + return hash(( + self.weekday, + self.n, + )) + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + s = ("MO", "TU", "WE", "TH", "FR", "SA", "SU")[self.weekday] + if not self.n: + return s + else: + return "%s(%+d)" % (s, self.n) + +# vim:ts=4:sw=4:et diff --git a/venv/Lib/site-packages/dateutil/_version.py b/venv/Lib/site-packages/dateutil/_version.py new file mode 100644 index 00000000..eac12096 --- /dev/null +++ b/venv/Lib/site-packages/dateutil/_version.py @@ -0,0 +1,4 @@ +# coding: utf-8 +# file generated by setuptools_scm +# don't change, don't track in version control +version = '2.8.1' diff --git a/venv/Lib/site-packages/dateutil/easter.py b/venv/Lib/site-packages/dateutil/easter.py new file mode 100644 index 00000000..53b7c789 --- /dev/null +++ b/venv/Lib/site-packages/dateutil/easter.py @@ -0,0 +1,89 @@ +# -*- coding: utf-8 -*- +""" +This module offers a generic easter computing method for any given year, using +Western, Orthodox or Julian algorithms. +""" + +import datetime + +__all__ = ["easter", "EASTER_JULIAN", "EASTER_ORTHODOX", "EASTER_WESTERN"] + +EASTER_JULIAN = 1 +EASTER_ORTHODOX = 2 +EASTER_WESTERN = 3 + + +def easter(year, method=EASTER_WESTERN): + """ + This method was ported from the work done by GM Arts, + on top of the algorithm by Claus Tondering, which was + based in part on the algorithm of Ouding (1940), as + quoted in "Explanatory Supplement to the Astronomical + Almanac", P. Kenneth Seidelmann, editor. + + This algorithm implements three different easter + calculation methods: + + 1 - Original calculation in Julian calendar, valid in + dates after 326 AD + 2 - Original method, with date converted to Gregorian + calendar, valid in years 1583 to 4099 + 3 - Revised method, in Gregorian calendar, valid in + years 1583 to 4099 as well + + These methods are represented by the constants: + + * ``EASTER_JULIAN = 1`` + * ``EASTER_ORTHODOX = 2`` + * ``EASTER_WESTERN = 3`` + + The default method is method 3. + + More about the algorithm may be found at: + + `GM Arts: Easter Algorithms <http://www.gmarts.org/index.php?go=415>`_ + + and + + `The Calendar FAQ: Easter <https://www.tondering.dk/claus/cal/easter.php>`_ + + """ + + if not (1 <= method <= 3): + raise ValueError("invalid method") + + # g - Golden year - 1 + # c - Century + # h - (23 - Epact) mod 30 + # i - Number of days from March 21 to Paschal Full Moon + # j - Weekday for PFM (0=Sunday, etc) + # p - Number of days from March 21 to Sunday on or before PFM + # (-6 to 28 methods 1 & 3, to 56 for method 2) + # e - Extra days to add for method 2 (converting Julian + # date to Gregorian date) + + y = year + g = y % 19 + e = 0 + if method < 3: + # Old method + i = (19*g + 15) % 30 + j = (y + y//4 + i) % 7 + if method == 2: + # Extra dates to convert Julian to Gregorian date + e = 10 + if y > 1600: + e = e + y//100 - 16 - (y//100 - 16)//4 + else: + # New method + c = y//100 + h = (c - c//4 - (8*c + 13)//25 + 19*g + 15) % 30 + i = h - (h//28)*(1 - (h//28)*(29//(h + 1))*((21 - g)//11)) + j = (y + y//4 + i + 2 - c + c//4) % 7 + + # p can be from -6 to 56 corresponding to dates 22 March to 23 May + # (later dates apply to method 2, although 23 May never actually occurs) + p = i - j + e + d = 1 + (p + 27 + (p + 6)//40) % 31 + m = 3 + (p + 26)//30 + return datetime.date(int(y), int(m), int(d)) diff --git a/venv/Lib/site-packages/dateutil/parser/__init__.py b/venv/Lib/site-packages/dateutil/parser/__init__.py new file mode 100644 index 00000000..d174b0e4 --- /dev/null +++ b/venv/Lib/site-packages/dateutil/parser/__init__.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +from ._parser import parse, parser, parserinfo, ParserError +from ._parser import DEFAULTPARSER, DEFAULTTZPARSER +from ._parser import UnknownTimezoneWarning + +from ._parser import __doc__ + +from .isoparser import isoparser, isoparse + +__all__ = ['parse', 'parser', 'parserinfo', + 'isoparse', 'isoparser', + 'ParserError', + 'UnknownTimezoneWarning'] + + +### +# Deprecate portions of the private interface so that downstream code that +# is improperly relying on it is given *some* notice. + + +def __deprecated_private_func(f): + from functools import wraps + import warnings + + msg = ('{name} is a private function and may break without warning, ' + 'it will be moved and or renamed in future versions.') + msg = msg.format(name=f.__name__) + + @wraps(f) + def deprecated_func(*args, **kwargs): + warnings.warn(msg, DeprecationWarning) + return f(*args, **kwargs) + + return deprecated_func + +def __deprecate_private_class(c): + import warnings + + msg = ('{name} is a private class and may break without warning, ' + 'it will be moved and or renamed in future versions.') + msg = msg.format(name=c.__name__) + + class private_class(c): + __doc__ = c.__doc__ + + def __init__(self, *args, **kwargs): + warnings.warn(msg, DeprecationWarning) + super(private_class, self).__init__(*args, **kwargs) + + private_class.__name__ = c.__name__ + + return private_class + + +from ._parser import _timelex, _resultbase +from ._parser import _tzparser, _parsetz + +_timelex = __deprecate_private_class(_timelex) +_tzparser = __deprecate_private_class(_tzparser) +_resultbase = __deprecate_private_class(_resultbase) +_parsetz = __deprecated_private_func(_parsetz) diff --git a/venv/Lib/site-packages/dateutil/parser/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/dateutil/parser/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..ded9e444 Binary files /dev/null and b/venv/Lib/site-packages/dateutil/parser/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/dateutil/parser/__pycache__/_parser.cpython-36.pyc b/venv/Lib/site-packages/dateutil/parser/__pycache__/_parser.cpython-36.pyc new file mode 100644 index 00000000..fb16dd6a Binary files /dev/null and b/venv/Lib/site-packages/dateutil/parser/__pycache__/_parser.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/dateutil/parser/__pycache__/isoparser.cpython-36.pyc b/venv/Lib/site-packages/dateutil/parser/__pycache__/isoparser.cpython-36.pyc new file mode 100644 index 00000000..e95beea5 Binary files /dev/null and b/venv/Lib/site-packages/dateutil/parser/__pycache__/isoparser.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/dateutil/parser/_parser.py b/venv/Lib/site-packages/dateutil/parser/_parser.py new file mode 100644 index 00000000..458aa6a3 --- /dev/null +++ b/venv/Lib/site-packages/dateutil/parser/_parser.py @@ -0,0 +1,1609 @@ +# -*- coding: utf-8 -*- +""" +This module offers a generic date/time string parser which is able to parse +most known formats to represent a date and/or time. + +This module attempts to be forgiving with regards to unlikely input formats, +returning a datetime object even for dates which are ambiguous. If an element +of a date/time stamp is omitted, the following rules are applied: + +- If AM or PM is left unspecified, a 24-hour clock is assumed, however, an hour + on a 12-hour clock (``0 <= hour <= 12``) *must* be specified if AM or PM is + specified. +- If a time zone is omitted, a timezone-naive datetime is returned. + +If any other elements are missing, they are taken from the +:class:`datetime.datetime` object passed to the parameter ``default``. If this +results in a day number exceeding the valid number of days per month, the +value falls back to the end of the month. + +Additional resources about date/time string formats can be found below: + +- `A summary of the international standard date and time notation + <http://www.cl.cam.ac.uk/~mgk25/iso-time.html>`_ +- `W3C Date and Time Formats <http://www.w3.org/TR/NOTE-datetime>`_ +- `Time Formats (Planetary Rings Node) <https://pds-rings.seti.org:443/tools/time_formats.html>`_ +- `CPAN ParseDate module + <http://search.cpan.org/~muir/Time-modules-2013.0912/lib/Time/ParseDate.pm>`_ +- `Java SimpleDateFormat Class + <https://docs.oracle.com/javase/6/docs/api/java/text/SimpleDateFormat.html>`_ +""" +from __future__ import unicode_literals + +import datetime +import re +import string +import time +import warnings + +from calendar import monthrange +from io import StringIO + +import six +from six import integer_types, text_type + +from decimal import Decimal + +from warnings import warn + +from .. import relativedelta +from .. import tz + +__all__ = ["parse", "parserinfo", "ParserError"] + + +# TODO: pandas.core.tools.datetimes imports this explicitly. Might be worth +# making public and/or figuring out if there is something we can +# take off their plate. +class _timelex(object): + # Fractional seconds are sometimes split by a comma + _split_decimal = re.compile("([.,])") + + def __init__(self, instream): + if six.PY2: + # In Python 2, we can't duck type properly because unicode has + # a 'decode' function, and we'd be double-decoding + if isinstance(instream, (bytes, bytearray)): + instream = instream.decode() + else: + if getattr(instream, 'decode', None) is not None: + instream = instream.decode() + + if isinstance(instream, text_type): + instream = StringIO(instream) + elif getattr(instream, 'read', None) is None: + raise TypeError('Parser must be a string or character stream, not ' + '{itype}'.format(itype=instream.__class__.__name__)) + + self.instream = instream + self.charstack = [] + self.tokenstack = [] + self.eof = False + + def get_token(self): + """ + This function breaks the time string into lexical units (tokens), which + can be parsed by the parser. Lexical units are demarcated by changes in + the character set, so any continuous string of letters is considered + one unit, any continuous string of numbers is considered one unit. + + The main complication arises from the fact that dots ('.') can be used + both as separators (e.g. "Sep.20.2009") or decimal points (e.g. + "4:30:21.447"). As such, it is necessary to read the full context of + any dot-separated strings before breaking it into tokens; as such, this + function maintains a "token stack", for when the ambiguous context + demands that multiple tokens be parsed at once. + """ + if self.tokenstack: + return self.tokenstack.pop(0) + + seenletters = False + token = None + state = None + + while not self.eof: + # We only realize that we've reached the end of a token when we + # find a character that's not part of the current token - since + # that character may be part of the next token, it's stored in the + # charstack. + if self.charstack: + nextchar = self.charstack.pop(0) + else: + nextchar = self.instream.read(1) + while nextchar == '\x00': + nextchar = self.instream.read(1) + + if not nextchar: + self.eof = True + break + elif not state: + # First character of the token - determines if we're starting + # to parse a word, a number or something else. + token = nextchar + if self.isword(nextchar): + state = 'a' + elif self.isnum(nextchar): + state = '0' + elif self.isspace(nextchar): + token = ' ' + break # emit token + else: + break # emit token + elif state == 'a': + # If we've already started reading a word, we keep reading + # letters until we find something that's not part of a word. + seenletters = True + if self.isword(nextchar): + token += nextchar + elif nextchar == '.': + token += nextchar + state = 'a.' + else: + self.charstack.append(nextchar) + break # emit token + elif state == '0': + # If we've already started reading a number, we keep reading + # numbers until we find something that doesn't fit. + if self.isnum(nextchar): + token += nextchar + elif nextchar == '.' or (nextchar == ',' and len(token) >= 2): + token += nextchar + state = '0.' + else: + self.charstack.append(nextchar) + break # emit token + elif state == 'a.': + # If we've seen some letters and a dot separator, continue + # parsing, and the tokens will be broken up later. + seenletters = True + if nextchar == '.' or self.isword(nextchar): + token += nextchar + elif self.isnum(nextchar) and token[-1] == '.': + token += nextchar + state = '0.' + else: + self.charstack.append(nextchar) + break # emit token + elif state == '0.': + # If we've seen at least one dot separator, keep going, we'll + # break up the tokens later. + if nextchar == '.' or self.isnum(nextchar): + token += nextchar + elif self.isword(nextchar) and token[-1] == '.': + token += nextchar + state = 'a.' + else: + self.charstack.append(nextchar) + break # emit token + + if (state in ('a.', '0.') and (seenletters or token.count('.') > 1 or + token[-1] in '.,')): + l = self._split_decimal.split(token) + token = l[0] + for tok in l[1:]: + if tok: + self.tokenstack.append(tok) + + if state == '0.' and token.count('.') == 0: + token = token.replace(',', '.') + + return token + + def __iter__(self): + return self + + def __next__(self): + token = self.get_token() + if token is None: + raise StopIteration + + return token + + def next(self): + return self.__next__() # Python 2.x support + + @classmethod + def split(cls, s): + return list(cls(s)) + + @classmethod + def isword(cls, nextchar): + """ Whether or not the next character is part of a word """ + return nextchar.isalpha() + + @classmethod + def isnum(cls, nextchar): + """ Whether the next character is part of a number """ + return nextchar.isdigit() + + @classmethod + def isspace(cls, nextchar): + """ Whether the next character is whitespace """ + return nextchar.isspace() + + +class _resultbase(object): + + def __init__(self): + for attr in self.__slots__: + setattr(self, attr, None) + + def _repr(self, classname): + l = [] + for attr in self.__slots__: + value = getattr(self, attr) + if value is not None: + l.append("%s=%s" % (attr, repr(value))) + return "%s(%s)" % (classname, ", ".join(l)) + + def __len__(self): + return (sum(getattr(self, attr) is not None + for attr in self.__slots__)) + + def __repr__(self): + return self._repr(self.__class__.__name__) + + +class parserinfo(object): + """ + Class which handles what inputs are accepted. Subclass this to customize + the language and acceptable values for each parameter. + + :param dayfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the day (``True``) or month (``False``). If + ``yearfirst`` is set to ``True``, this distinguishes between YDM + and YMD. Default is ``False``. + + :param yearfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the year. If ``True``, the first number is taken + to be the year, otherwise the last number is taken to be the year. + Default is ``False``. + """ + + # m from a.m/p.m, t from ISO T separator + JUMP = [" ", ".", ",", ";", "-", "/", "'", + "at", "on", "and", "ad", "m", "t", "of", + "st", "nd", "rd", "th"] + + WEEKDAYS = [("Mon", "Monday"), + ("Tue", "Tuesday"), # TODO: "Tues" + ("Wed", "Wednesday"), + ("Thu", "Thursday"), # TODO: "Thurs" + ("Fri", "Friday"), + ("Sat", "Saturday"), + ("Sun", "Sunday")] + MONTHS = [("Jan", "January"), + ("Feb", "February"), # TODO: "Febr" + ("Mar", "March"), + ("Apr", "April"), + ("May", "May"), + ("Jun", "June"), + ("Jul", "July"), + ("Aug", "August"), + ("Sep", "Sept", "September"), + ("Oct", "October"), + ("Nov", "November"), + ("Dec", "December")] + HMS = [("h", "hour", "hours"), + ("m", "minute", "minutes"), + ("s", "second", "seconds")] + AMPM = [("am", "a"), + ("pm", "p")] + UTCZONE = ["UTC", "GMT", "Z", "z"] + PERTAIN = ["of"] + TZOFFSET = {} + # TODO: ERA = ["AD", "BC", "CE", "BCE", "Stardate", + # "Anno Domini", "Year of Our Lord"] + + def __init__(self, dayfirst=False, yearfirst=False): + self._jump = self._convert(self.JUMP) + self._weekdays = self._convert(self.WEEKDAYS) + self._months = self._convert(self.MONTHS) + self._hms = self._convert(self.HMS) + self._ampm = self._convert(self.AMPM) + self._utczone = self._convert(self.UTCZONE) + self._pertain = self._convert(self.PERTAIN) + + self.dayfirst = dayfirst + self.yearfirst = yearfirst + + self._year = time.localtime().tm_year + self._century = self._year // 100 * 100 + + def _convert(self, lst): + dct = {} + for i, v in enumerate(lst): + if isinstance(v, tuple): + for v in v: + dct[v.lower()] = i + else: + dct[v.lower()] = i + return dct + + def jump(self, name): + return name.lower() in self._jump + + def weekday(self, name): + try: + return self._weekdays[name.lower()] + except KeyError: + pass + return None + + def month(self, name): + try: + return self._months[name.lower()] + 1 + except KeyError: + pass + return None + + def hms(self, name): + try: + return self._hms[name.lower()] + except KeyError: + return None + + def ampm(self, name): + try: + return self._ampm[name.lower()] + except KeyError: + return None + + def pertain(self, name): + return name.lower() in self._pertain + + def utczone(self, name): + return name.lower() in self._utczone + + def tzoffset(self, name): + if name in self._utczone: + return 0 + + return self.TZOFFSET.get(name) + + def convertyear(self, year, century_specified=False): + """ + Converts two-digit years to year within [-50, 49] + range of self._year (current local time) + """ + + # Function contract is that the year is always positive + assert year >= 0 + + if year < 100 and not century_specified: + # assume current century to start + year += self._century + + if year >= self._year + 50: # if too far in future + year -= 100 + elif year < self._year - 50: # if too far in past + year += 100 + + return year + + def validate(self, res): + # move to info + if res.year is not None: + res.year = self.convertyear(res.year, res.century_specified) + + if ((res.tzoffset == 0 and not res.tzname) or + (res.tzname == 'Z' or res.tzname == 'z')): + res.tzname = "UTC" + res.tzoffset = 0 + elif res.tzoffset != 0 and res.tzname and self.utczone(res.tzname): + res.tzoffset = 0 + return True + + +class _ymd(list): + def __init__(self, *args, **kwargs): + super(self.__class__, self).__init__(*args, **kwargs) + self.century_specified = False + self.dstridx = None + self.mstridx = None + self.ystridx = None + + @property + def has_year(self): + return self.ystridx is not None + + @property + def has_month(self): + return self.mstridx is not None + + @property + def has_day(self): + return self.dstridx is not None + + def could_be_day(self, value): + if self.has_day: + return False + elif not self.has_month: + return 1 <= value <= 31 + elif not self.has_year: + # Be permissive, assume leap year + month = self[self.mstridx] + return 1 <= value <= monthrange(2000, month)[1] + else: + month = self[self.mstridx] + year = self[self.ystridx] + return 1 <= value <= monthrange(year, month)[1] + + def append(self, val, label=None): + if hasattr(val, '__len__'): + if val.isdigit() and len(val) > 2: + self.century_specified = True + if label not in [None, 'Y']: # pragma: no cover + raise ValueError(label) + label = 'Y' + elif val > 100: + self.century_specified = True + if label not in [None, 'Y']: # pragma: no cover + raise ValueError(label) + label = 'Y' + + super(self.__class__, self).append(int(val)) + + if label == 'M': + if self.has_month: + raise ValueError('Month is already set') + self.mstridx = len(self) - 1 + elif label == 'D': + if self.has_day: + raise ValueError('Day is already set') + self.dstridx = len(self) - 1 + elif label == 'Y': + if self.has_year: + raise ValueError('Year is already set') + self.ystridx = len(self) - 1 + + def _resolve_from_stridxs(self, strids): + """ + Try to resolve the identities of year/month/day elements using + ystridx, mstridx, and dstridx, if enough of these are specified. + """ + if len(self) == 3 and len(strids) == 2: + # we can back out the remaining stridx value + missing = [x for x in range(3) if x not in strids.values()] + key = [x for x in ['y', 'm', 'd'] if x not in strids] + assert len(missing) == len(key) == 1 + key = key[0] + val = missing[0] + strids[key] = val + + assert len(self) == len(strids) # otherwise this should not be called + out = {key: self[strids[key]] for key in strids} + return (out.get('y'), out.get('m'), out.get('d')) + + def resolve_ymd(self, yearfirst, dayfirst): + len_ymd = len(self) + year, month, day = (None, None, None) + + strids = (('y', self.ystridx), + ('m', self.mstridx), + ('d', self.dstridx)) + + strids = {key: val for key, val in strids if val is not None} + if (len(self) == len(strids) > 0 or + (len(self) == 3 and len(strids) == 2)): + return self._resolve_from_stridxs(strids) + + mstridx = self.mstridx + + if len_ymd > 3: + raise ValueError("More than three YMD values") + elif len_ymd == 1 or (mstridx is not None and len_ymd == 2): + # One member, or two members with a month string + if mstridx is not None: + month = self[mstridx] + # since mstridx is 0 or 1, self[mstridx-1] always + # looks up the other element + other = self[mstridx - 1] + else: + other = self[0] + + if len_ymd > 1 or mstridx is None: + if other > 31: + year = other + else: + day = other + + elif len_ymd == 2: + # Two members with numbers + if self[0] > 31: + # 99-01 + year, month = self + elif self[1] > 31: + # 01-99 + month, year = self + elif dayfirst and self[1] <= 12: + # 13-01 + day, month = self + else: + # 01-13 + month, day = self + + elif len_ymd == 3: + # Three members + if mstridx == 0: + if self[1] > 31: + # Apr-2003-25 + month, year, day = self + else: + month, day, year = self + elif mstridx == 1: + if self[0] > 31 or (yearfirst and self[2] <= 31): + # 99-Jan-01 + year, month, day = self + else: + # 01-Jan-01 + # Give precedence to day-first, since + # two-digit years is usually hand-written. + day, month, year = self + + elif mstridx == 2: + # WTF!? + if self[1] > 31: + # 01-99-Jan + day, year, month = self + else: + # 99-01-Jan + year, day, month = self + + else: + if (self[0] > 31 or + self.ystridx == 0 or + (yearfirst and self[1] <= 12 and self[2] <= 31)): + # 99-01-01 + if dayfirst and self[2] <= 12: + year, day, month = self + else: + year, month, day = self + elif self[0] > 12 or (dayfirst and self[1] <= 12): + # 13-01-01 + day, month, year = self + else: + # 01-13-01 + month, day, year = self + + return year, month, day + + +class parser(object): + def __init__(self, info=None): + self.info = info or parserinfo() + + def parse(self, timestr, default=None, + ignoretz=False, tzinfos=None, **kwargs): + """ + Parse the date/time string into a :class:`datetime.datetime` object. + + :param timestr: + Any date/time string using the supported formats. + + :param default: + The default datetime object, if this is a datetime object and not + ``None``, elements specified in ``timestr`` replace elements in the + default object. + + :param ignoretz: + If set ``True``, time zones in parsed strings are ignored and a + naive :class:`datetime.datetime` object is returned. + + :param tzinfos: + Additional time zone names / aliases which may be present in the + string. This argument maps time zone names (and optionally offsets + from those time zones) to time zones. This parameter can be a + dictionary with timezone aliases mapping time zone names to time + zones or a function taking two parameters (``tzname`` and + ``tzoffset``) and returning a time zone. + + The timezones to which the names are mapped can be an integer + offset from UTC in seconds or a :class:`tzinfo` object. + + .. doctest:: + :options: +NORMALIZE_WHITESPACE + + >>> from dateutil.parser import parse + >>> from dateutil.tz import gettz + >>> tzinfos = {"BRST": -7200, "CST": gettz("America/Chicago")} + >>> parse("2012-01-19 17:21:00 BRST", tzinfos=tzinfos) + datetime.datetime(2012, 1, 19, 17, 21, tzinfo=tzoffset(u'BRST', -7200)) + >>> parse("2012-01-19 17:21:00 CST", tzinfos=tzinfos) + datetime.datetime(2012, 1, 19, 17, 21, + tzinfo=tzfile('/usr/share/zoneinfo/America/Chicago')) + + This parameter is ignored if ``ignoretz`` is set. + + :param \\*\\*kwargs: + Keyword arguments as passed to ``_parse()``. + + :return: + Returns a :class:`datetime.datetime` object or, if the + ``fuzzy_with_tokens`` option is ``True``, returns a tuple, the + first element being a :class:`datetime.datetime` object, the second + a tuple containing the fuzzy tokens. + + :raises ParserError: + Raised for invalid or unknown string format, if the provided + :class:`tzinfo` is not in a valid format, or if an invalid date + would be created. + + :raises TypeError: + Raised for non-string or character stream input. + + :raises OverflowError: + Raised if the parsed date exceeds the largest valid C integer on + your system. + """ + + if default is None: + default = datetime.datetime.now().replace(hour=0, minute=0, + second=0, microsecond=0) + + res, skipped_tokens = self._parse(timestr, **kwargs) + + if res is None: + raise ParserError("Unknown string format: %s", timestr) + + if len(res) == 0: + raise ParserError("String does not contain a date: %s", timestr) + + try: + ret = self._build_naive(res, default) + except ValueError as e: + six.raise_from(ParserError(e.args[0] + ": %s", timestr), e) + + if not ignoretz: + ret = self._build_tzaware(ret, res, tzinfos) + + if kwargs.get('fuzzy_with_tokens', False): + return ret, skipped_tokens + else: + return ret + + class _result(_resultbase): + __slots__ = ["year", "month", "day", "weekday", + "hour", "minute", "second", "microsecond", + "tzname", "tzoffset", "ampm","any_unused_tokens"] + + def _parse(self, timestr, dayfirst=None, yearfirst=None, fuzzy=False, + fuzzy_with_tokens=False): + """ + Private method which performs the heavy lifting of parsing, called from + ``parse()``, which passes on its ``kwargs`` to this function. + + :param timestr: + The string to parse. + + :param dayfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the day (``True``) or month (``False``). If + ``yearfirst`` is set to ``True``, this distinguishes between YDM + and YMD. If set to ``None``, this value is retrieved from the + current :class:`parserinfo` object (which itself defaults to + ``False``). + + :param yearfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the year. If ``True``, the first number is taken + to be the year, otherwise the last number is taken to be the year. + If this is set to ``None``, the value is retrieved from the current + :class:`parserinfo` object (which itself defaults to ``False``). + + :param fuzzy: + Whether to allow fuzzy parsing, allowing for string like "Today is + January 1, 2047 at 8:21:00AM". + + :param fuzzy_with_tokens: + If ``True``, ``fuzzy`` is automatically set to True, and the parser + will return a tuple where the first element is the parsed + :class:`datetime.datetime` datetimestamp and the second element is + a tuple containing the portions of the string which were ignored: + + .. doctest:: + + >>> from dateutil.parser import parse + >>> parse("Today is January 1, 2047 at 8:21:00AM", fuzzy_with_tokens=True) + (datetime.datetime(2047, 1, 1, 8, 21), (u'Today is ', u' ', u'at ')) + + """ + if fuzzy_with_tokens: + fuzzy = True + + info = self.info + + if dayfirst is None: + dayfirst = info.dayfirst + + if yearfirst is None: + yearfirst = info.yearfirst + + res = self._result() + l = _timelex.split(timestr) # Splits the timestr into tokens + + skipped_idxs = [] + + # year/month/day list + ymd = _ymd() + + len_l = len(l) + i = 0 + try: + while i < len_l: + + # Check if it's a number + value_repr = l[i] + try: + value = float(value_repr) + except ValueError: + value = None + + if value is not None: + # Numeric token + i = self._parse_numeric_token(l, i, info, ymd, res, fuzzy) + + # Check weekday + elif info.weekday(l[i]) is not None: + value = info.weekday(l[i]) + res.weekday = value + + # Check month name + elif info.month(l[i]) is not None: + value = info.month(l[i]) + ymd.append(value, 'M') + + if i + 1 < len_l: + if l[i + 1] in ('-', '/'): + # Jan-01[-99] + sep = l[i + 1] + ymd.append(l[i + 2]) + + if i + 3 < len_l and l[i + 3] == sep: + # Jan-01-99 + ymd.append(l[i + 4]) + i += 2 + + i += 2 + + elif (i + 4 < len_l and l[i + 1] == l[i + 3] == ' ' and + info.pertain(l[i + 2])): + # Jan of 01 + # In this case, 01 is clearly year + if l[i + 4].isdigit(): + # Convert it here to become unambiguous + value = int(l[i + 4]) + year = str(info.convertyear(value)) + ymd.append(year, 'Y') + else: + # Wrong guess + pass + # TODO: not hit in tests + i += 4 + + # Check am/pm + elif info.ampm(l[i]) is not None: + value = info.ampm(l[i]) + val_is_ampm = self._ampm_valid(res.hour, res.ampm, fuzzy) + + if val_is_ampm: + res.hour = self._adjust_ampm(res.hour, value) + res.ampm = value + + elif fuzzy: + skipped_idxs.append(i) + + # Check for a timezone name + elif self._could_be_tzname(res.hour, res.tzname, res.tzoffset, l[i]): + res.tzname = l[i] + res.tzoffset = info.tzoffset(res.tzname) + + # Check for something like GMT+3, or BRST+3. Notice + # that it doesn't mean "I am 3 hours after GMT", but + # "my time +3 is GMT". If found, we reverse the + # logic so that timezone parsing code will get it + # right. + if i + 1 < len_l and l[i + 1] in ('+', '-'): + l[i + 1] = ('+', '-')[l[i + 1] == '+'] + res.tzoffset = None + if info.utczone(res.tzname): + # With something like GMT+3, the timezone + # is *not* GMT. + res.tzname = None + + # Check for a numbered timezone + elif res.hour is not None and l[i] in ('+', '-'): + signal = (-1, 1)[l[i] == '+'] + len_li = len(l[i + 1]) + + # TODO: check that l[i + 1] is integer? + if len_li == 4: + # -0300 + hour_offset = int(l[i + 1][:2]) + min_offset = int(l[i + 1][2:]) + elif i + 2 < len_l and l[i + 2] == ':': + # -03:00 + hour_offset = int(l[i + 1]) + min_offset = int(l[i + 3]) # TODO: Check that l[i+3] is minute-like? + i += 2 + elif len_li <= 2: + # -[0]3 + hour_offset = int(l[i + 1][:2]) + min_offset = 0 + else: + raise ValueError(timestr) + + res.tzoffset = signal * (hour_offset * 3600 + min_offset * 60) + + # Look for a timezone name between parenthesis + if (i + 5 < len_l and + info.jump(l[i + 2]) and l[i + 3] == '(' and + l[i + 5] == ')' and + 3 <= len(l[i + 4]) and + self._could_be_tzname(res.hour, res.tzname, + None, l[i + 4])): + # -0300 (BRST) + res.tzname = l[i + 4] + i += 4 + + i += 1 + + # Check jumps + elif not (info.jump(l[i]) or fuzzy): + raise ValueError(timestr) + + else: + skipped_idxs.append(i) + i += 1 + + # Process year/month/day + year, month, day = ymd.resolve_ymd(yearfirst, dayfirst) + + res.century_specified = ymd.century_specified + res.year = year + res.month = month + res.day = day + + except (IndexError, ValueError): + return None, None + + if not info.validate(res): + return None, None + + if fuzzy_with_tokens: + skipped_tokens = self._recombine_skipped(l, skipped_idxs) + return res, tuple(skipped_tokens) + else: + return res, None + + def _parse_numeric_token(self, tokens, idx, info, ymd, res, fuzzy): + # Token is a number + value_repr = tokens[idx] + try: + value = self._to_decimal(value_repr) + except Exception as e: + six.raise_from(ValueError('Unknown numeric token'), e) + + len_li = len(value_repr) + + len_l = len(tokens) + + if (len(ymd) == 3 and len_li in (2, 4) and + res.hour is None and + (idx + 1 >= len_l or + (tokens[idx + 1] != ':' and + info.hms(tokens[idx + 1]) is None))): + # 19990101T23[59] + s = tokens[idx] + res.hour = int(s[:2]) + + if len_li == 4: + res.minute = int(s[2:]) + + elif len_li == 6 or (len_li > 6 and tokens[idx].find('.') == 6): + # YYMMDD or HHMMSS[.ss] + s = tokens[idx] + + if not ymd and '.' not in tokens[idx]: + ymd.append(s[:2]) + ymd.append(s[2:4]) + ymd.append(s[4:]) + else: + # 19990101T235959[.59] + + # TODO: Check if res attributes already set. + res.hour = int(s[:2]) + res.minute = int(s[2:4]) + res.second, res.microsecond = self._parsems(s[4:]) + + elif len_li in (8, 12, 14): + # YYYYMMDD + s = tokens[idx] + ymd.append(s[:4], 'Y') + ymd.append(s[4:6]) + ymd.append(s[6:8]) + + if len_li > 8: + res.hour = int(s[8:10]) + res.minute = int(s[10:12]) + + if len_li > 12: + res.second = int(s[12:]) + + elif self._find_hms_idx(idx, tokens, info, allow_jump=True) is not None: + # HH[ ]h or MM[ ]m or SS[.ss][ ]s + hms_idx = self._find_hms_idx(idx, tokens, info, allow_jump=True) + (idx, hms) = self._parse_hms(idx, tokens, info, hms_idx) + if hms is not None: + # TODO: checking that hour/minute/second are not + # already set? + self._assign_hms(res, value_repr, hms) + + elif idx + 2 < len_l and tokens[idx + 1] == ':': + # HH:MM[:SS[.ss]] + res.hour = int(value) + value = self._to_decimal(tokens[idx + 2]) # TODO: try/except for this? + (res.minute, res.second) = self._parse_min_sec(value) + + if idx + 4 < len_l and tokens[idx + 3] == ':': + res.second, res.microsecond = self._parsems(tokens[idx + 4]) + + idx += 2 + + idx += 2 + + elif idx + 1 < len_l and tokens[idx + 1] in ('-', '/', '.'): + sep = tokens[idx + 1] + ymd.append(value_repr) + + if idx + 2 < len_l and not info.jump(tokens[idx + 2]): + if tokens[idx + 2].isdigit(): + # 01-01[-01] + ymd.append(tokens[idx + 2]) + else: + # 01-Jan[-01] + value = info.month(tokens[idx + 2]) + + if value is not None: + ymd.append(value, 'M') + else: + raise ValueError() + + if idx + 3 < len_l and tokens[idx + 3] == sep: + # We have three members + value = info.month(tokens[idx + 4]) + + if value is not None: + ymd.append(value, 'M') + else: + ymd.append(tokens[idx + 4]) + idx += 2 + + idx += 1 + idx += 1 + + elif idx + 1 >= len_l or info.jump(tokens[idx + 1]): + if idx + 2 < len_l and info.ampm(tokens[idx + 2]) is not None: + # 12 am + hour = int(value) + res.hour = self._adjust_ampm(hour, info.ampm(tokens[idx + 2])) + idx += 1 + else: + # Year, month or day + ymd.append(value) + idx += 1 + + elif info.ampm(tokens[idx + 1]) is not None and (0 <= value < 24): + # 12am + hour = int(value) + res.hour = self._adjust_ampm(hour, info.ampm(tokens[idx + 1])) + idx += 1 + + elif ymd.could_be_day(value): + ymd.append(value) + + elif not fuzzy: + raise ValueError() + + return idx + + def _find_hms_idx(self, idx, tokens, info, allow_jump): + len_l = len(tokens) + + if idx+1 < len_l and info.hms(tokens[idx+1]) is not None: + # There is an "h", "m", or "s" label following this token. We take + # assign the upcoming label to the current token. + # e.g. the "12" in 12h" + hms_idx = idx + 1 + + elif (allow_jump and idx+2 < len_l and tokens[idx+1] == ' ' and + info.hms(tokens[idx+2]) is not None): + # There is a space and then an "h", "m", or "s" label. + # e.g. the "12" in "12 h" + hms_idx = idx + 2 + + elif idx > 0 and info.hms(tokens[idx-1]) is not None: + # There is a "h", "m", or "s" preceding this token. Since neither + # of the previous cases was hit, there is no label following this + # token, so we use the previous label. + # e.g. the "04" in "12h04" + hms_idx = idx-1 + + elif (1 < idx == len_l-1 and tokens[idx-1] == ' ' and + info.hms(tokens[idx-2]) is not None): + # If we are looking at the final token, we allow for a + # backward-looking check to skip over a space. + # TODO: Are we sure this is the right condition here? + hms_idx = idx - 2 + + else: + hms_idx = None + + return hms_idx + + def _assign_hms(self, res, value_repr, hms): + # See GH issue #427, fixing float rounding + value = self._to_decimal(value_repr) + + if hms == 0: + # Hour + res.hour = int(value) + if value % 1: + res.minute = int(60*(value % 1)) + + elif hms == 1: + (res.minute, res.second) = self._parse_min_sec(value) + + elif hms == 2: + (res.second, res.microsecond) = self._parsems(value_repr) + + def _could_be_tzname(self, hour, tzname, tzoffset, token): + return (hour is not None and + tzname is None and + tzoffset is None and + len(token) <= 5 and + (all(x in string.ascii_uppercase for x in token) + or token in self.info.UTCZONE)) + + def _ampm_valid(self, hour, ampm, fuzzy): + """ + For fuzzy parsing, 'a' or 'am' (both valid English words) + may erroneously trigger the AM/PM flag. Deal with that + here. + """ + val_is_ampm = True + + # If there's already an AM/PM flag, this one isn't one. + if fuzzy and ampm is not None: + val_is_ampm = False + + # If AM/PM is found and hour is not, raise a ValueError + if hour is None: + if fuzzy: + val_is_ampm = False + else: + raise ValueError('No hour specified with AM or PM flag.') + elif not 0 <= hour <= 12: + # If AM/PM is found, it's a 12 hour clock, so raise + # an error for invalid range + if fuzzy: + val_is_ampm = False + else: + raise ValueError('Invalid hour specified for 12-hour clock.') + + return val_is_ampm + + def _adjust_ampm(self, hour, ampm): + if hour < 12 and ampm == 1: + hour += 12 + elif hour == 12 and ampm == 0: + hour = 0 + return hour + + def _parse_min_sec(self, value): + # TODO: Every usage of this function sets res.second to the return + # value. Are there any cases where second will be returned as None and + # we *don't* want to set res.second = None? + minute = int(value) + second = None + + sec_remainder = value % 1 + if sec_remainder: + second = int(60 * sec_remainder) + return (minute, second) + + def _parse_hms(self, idx, tokens, info, hms_idx): + # TODO: Is this going to admit a lot of false-positives for when we + # just happen to have digits and "h", "m" or "s" characters in non-date + # text? I guess hex hashes won't have that problem, but there's plenty + # of random junk out there. + if hms_idx is None: + hms = None + new_idx = idx + elif hms_idx > idx: + hms = info.hms(tokens[hms_idx]) + new_idx = hms_idx + else: + # Looking backwards, increment one. + hms = info.hms(tokens[hms_idx]) + 1 + new_idx = idx + + return (new_idx, hms) + + # ------------------------------------------------------------------ + # Handling for individual tokens. These are kept as methods instead + # of functions for the sake of customizability via subclassing. + + def _parsems(self, value): + """Parse a I[.F] seconds value into (seconds, microseconds).""" + if "." not in value: + return int(value), 0 + else: + i, f = value.split(".") + return int(i), int(f.ljust(6, "0")[:6]) + + def _to_decimal(self, val): + try: + decimal_value = Decimal(val) + # See GH 662, edge case, infinite value should not be converted + # via `_to_decimal` + if not decimal_value.is_finite(): + raise ValueError("Converted decimal value is infinite or NaN") + except Exception as e: + msg = "Could not convert %s to decimal" % val + six.raise_from(ValueError(msg), e) + else: + return decimal_value + + # ------------------------------------------------------------------ + # Post-Parsing construction of datetime output. These are kept as + # methods instead of functions for the sake of customizability via + # subclassing. + + def _build_tzinfo(self, tzinfos, tzname, tzoffset): + if callable(tzinfos): + tzdata = tzinfos(tzname, tzoffset) + else: + tzdata = tzinfos.get(tzname) + # handle case where tzinfo is paased an options that returns None + # eg tzinfos = {'BRST' : None} + if isinstance(tzdata, datetime.tzinfo) or tzdata is None: + tzinfo = tzdata + elif isinstance(tzdata, text_type): + tzinfo = tz.tzstr(tzdata) + elif isinstance(tzdata, integer_types): + tzinfo = tz.tzoffset(tzname, tzdata) + else: + raise TypeError("Offset must be tzinfo subclass, tz string, " + "or int offset.") + return tzinfo + + def _build_tzaware(self, naive, res, tzinfos): + if (callable(tzinfos) or (tzinfos and res.tzname in tzinfos)): + tzinfo = self._build_tzinfo(tzinfos, res.tzname, res.tzoffset) + aware = naive.replace(tzinfo=tzinfo) + aware = self._assign_tzname(aware, res.tzname) + + elif res.tzname and res.tzname in time.tzname: + aware = naive.replace(tzinfo=tz.tzlocal()) + + # Handle ambiguous local datetime + aware = self._assign_tzname(aware, res.tzname) + + # This is mostly relevant for winter GMT zones parsed in the UK + if (aware.tzname() != res.tzname and + res.tzname in self.info.UTCZONE): + aware = aware.replace(tzinfo=tz.UTC) + + elif res.tzoffset == 0: + aware = naive.replace(tzinfo=tz.UTC) + + elif res.tzoffset: + aware = naive.replace(tzinfo=tz.tzoffset(res.tzname, res.tzoffset)) + + elif not res.tzname and not res.tzoffset: + # i.e. no timezone information was found. + aware = naive + + elif res.tzname: + # tz-like string was parsed but we don't know what to do + # with it + warnings.warn("tzname {tzname} identified but not understood. " + "Pass `tzinfos` argument in order to correctly " + "return a timezone-aware datetime. In a future " + "version, this will raise an " + "exception.".format(tzname=res.tzname), + category=UnknownTimezoneWarning) + aware = naive + + return aware + + def _build_naive(self, res, default): + repl = {} + for attr in ("year", "month", "day", "hour", + "minute", "second", "microsecond"): + value = getattr(res, attr) + if value is not None: + repl[attr] = value + + if 'day' not in repl: + # If the default day exceeds the last day of the month, fall back + # to the end of the month. + cyear = default.year if res.year is None else res.year + cmonth = default.month if res.month is None else res.month + cday = default.day if res.day is None else res.day + + if cday > monthrange(cyear, cmonth)[1]: + repl['day'] = monthrange(cyear, cmonth)[1] + + naive = default.replace(**repl) + + if res.weekday is not None and not res.day: + naive = naive + relativedelta.relativedelta(weekday=res.weekday) + + return naive + + def _assign_tzname(self, dt, tzname): + if dt.tzname() != tzname: + new_dt = tz.enfold(dt, fold=1) + if new_dt.tzname() == tzname: + return new_dt + + return dt + + def _recombine_skipped(self, tokens, skipped_idxs): + """ + >>> tokens = ["foo", " ", "bar", " ", "19June2000", "baz"] + >>> skipped_idxs = [0, 1, 2, 5] + >>> _recombine_skipped(tokens, skipped_idxs) + ["foo bar", "baz"] + """ + skipped_tokens = [] + for i, idx in enumerate(sorted(skipped_idxs)): + if i > 0 and idx - 1 == skipped_idxs[i - 1]: + skipped_tokens[-1] = skipped_tokens[-1] + tokens[idx] + else: + skipped_tokens.append(tokens[idx]) + + return skipped_tokens + + +DEFAULTPARSER = parser() + + +def parse(timestr, parserinfo=None, **kwargs): + """ + + Parse a string in one of the supported formats, using the + ``parserinfo`` parameters. + + :param timestr: + A string containing a date/time stamp. + + :param parserinfo: + A :class:`parserinfo` object containing parameters for the parser. + If ``None``, the default arguments to the :class:`parserinfo` + constructor are used. + + The ``**kwargs`` parameter takes the following keyword arguments: + + :param default: + The default datetime object, if this is a datetime object and not + ``None``, elements specified in ``timestr`` replace elements in the + default object. + + :param ignoretz: + If set ``True``, time zones in parsed strings are ignored and a naive + :class:`datetime` object is returned. + + :param tzinfos: + Additional time zone names / aliases which may be present in the + string. This argument maps time zone names (and optionally offsets + from those time zones) to time zones. This parameter can be a + dictionary with timezone aliases mapping time zone names to time + zones or a function taking two parameters (``tzname`` and + ``tzoffset``) and returning a time zone. + + The timezones to which the names are mapped can be an integer + offset from UTC in seconds or a :class:`tzinfo` object. + + .. doctest:: + :options: +NORMALIZE_WHITESPACE + + >>> from dateutil.parser import parse + >>> from dateutil.tz import gettz + >>> tzinfos = {"BRST": -7200, "CST": gettz("America/Chicago")} + >>> parse("2012-01-19 17:21:00 BRST", tzinfos=tzinfos) + datetime.datetime(2012, 1, 19, 17, 21, tzinfo=tzoffset(u'BRST', -7200)) + >>> parse("2012-01-19 17:21:00 CST", tzinfos=tzinfos) + datetime.datetime(2012, 1, 19, 17, 21, + tzinfo=tzfile('/usr/share/zoneinfo/America/Chicago')) + + This parameter is ignored if ``ignoretz`` is set. + + :param dayfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the day (``True``) or month (``False``). If + ``yearfirst`` is set to ``True``, this distinguishes between YDM and + YMD. If set to ``None``, this value is retrieved from the current + :class:`parserinfo` object (which itself defaults to ``False``). + + :param yearfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the year. If ``True``, the first number is taken to + be the year, otherwise the last number is taken to be the year. If + this is set to ``None``, the value is retrieved from the current + :class:`parserinfo` object (which itself defaults to ``False``). + + :param fuzzy: + Whether to allow fuzzy parsing, allowing for string like "Today is + January 1, 2047 at 8:21:00AM". + + :param fuzzy_with_tokens: + If ``True``, ``fuzzy`` is automatically set to True, and the parser + will return a tuple where the first element is the parsed + :class:`datetime.datetime` datetimestamp and the second element is + a tuple containing the portions of the string which were ignored: + + .. doctest:: + + >>> from dateutil.parser import parse + >>> parse("Today is January 1, 2047 at 8:21:00AM", fuzzy_with_tokens=True) + (datetime.datetime(2047, 1, 1, 8, 21), (u'Today is ', u' ', u'at ')) + + :return: + Returns a :class:`datetime.datetime` object or, if the + ``fuzzy_with_tokens`` option is ``True``, returns a tuple, the + first element being a :class:`datetime.datetime` object, the second + a tuple containing the fuzzy tokens. + + :raises ValueError: + Raised for invalid or unknown string format, if the provided + :class:`tzinfo` is not in a valid format, or if an invalid date + would be created. + + :raises OverflowError: + Raised if the parsed date exceeds the largest valid C integer on + your system. + """ + if parserinfo: + return parser(parserinfo).parse(timestr, **kwargs) + else: + return DEFAULTPARSER.parse(timestr, **kwargs) + + +class _tzparser(object): + + class _result(_resultbase): + + __slots__ = ["stdabbr", "stdoffset", "dstabbr", "dstoffset", + "start", "end"] + + class _attr(_resultbase): + __slots__ = ["month", "week", "weekday", + "yday", "jyday", "day", "time"] + + def __repr__(self): + return self._repr("") + + def __init__(self): + _resultbase.__init__(self) + self.start = self._attr() + self.end = self._attr() + + def parse(self, tzstr): + res = self._result() + l = [x for x in re.split(r'([,:.]|[a-zA-Z]+|[0-9]+)',tzstr) if x] + used_idxs = list() + try: + + len_l = len(l) + + i = 0 + while i < len_l: + # BRST+3[BRDT[+2]] + j = i + while j < len_l and not [x for x in l[j] + if x in "0123456789:,-+"]: + j += 1 + if j != i: + if not res.stdabbr: + offattr = "stdoffset" + res.stdabbr = "".join(l[i:j]) + else: + offattr = "dstoffset" + res.dstabbr = "".join(l[i:j]) + + for ii in range(j): + used_idxs.append(ii) + i = j + if (i < len_l and (l[i] in ('+', '-') or l[i][0] in + "0123456789")): + if l[i] in ('+', '-'): + # Yes, that's right. See the TZ variable + # documentation. + signal = (1, -1)[l[i] == '+'] + used_idxs.append(i) + i += 1 + else: + signal = -1 + len_li = len(l[i]) + if len_li == 4: + # -0300 + setattr(res, offattr, (int(l[i][:2]) * 3600 + + int(l[i][2:]) * 60) * signal) + elif i + 1 < len_l and l[i + 1] == ':': + # -03:00 + setattr(res, offattr, + (int(l[i]) * 3600 + + int(l[i + 2]) * 60) * signal) + used_idxs.append(i) + i += 2 + elif len_li <= 2: + # -[0]3 + setattr(res, offattr, + int(l[i][:2]) * 3600 * signal) + else: + return None + used_idxs.append(i) + i += 1 + if res.dstabbr: + break + else: + break + + + if i < len_l: + for j in range(i, len_l): + if l[j] == ';': + l[j] = ',' + + assert l[i] == ',' + + i += 1 + + if i >= len_l: + pass + elif (8 <= l.count(',') <= 9 and + not [y for x in l[i:] if x != ',' + for y in x if y not in "0123456789+-"]): + # GMT0BST,3,0,30,3600,10,0,26,7200[,3600] + for x in (res.start, res.end): + x.month = int(l[i]) + used_idxs.append(i) + i += 2 + if l[i] == '-': + value = int(l[i + 1]) * -1 + used_idxs.append(i) + i += 1 + else: + value = int(l[i]) + used_idxs.append(i) + i += 2 + if value: + x.week = value + x.weekday = (int(l[i]) - 1) % 7 + else: + x.day = int(l[i]) + used_idxs.append(i) + i += 2 + x.time = int(l[i]) + used_idxs.append(i) + i += 2 + if i < len_l: + if l[i] in ('-', '+'): + signal = (-1, 1)[l[i] == "+"] + used_idxs.append(i) + i += 1 + else: + signal = 1 + used_idxs.append(i) + res.dstoffset = (res.stdoffset + int(l[i]) * signal) + + # This was a made-up format that is not in normal use + warn(('Parsed time zone "%s"' % tzstr) + + 'is in a non-standard dateutil-specific format, which ' + + 'is now deprecated; support for parsing this format ' + + 'will be removed in future versions. It is recommended ' + + 'that you switch to a standard format like the GNU ' + + 'TZ variable format.', tz.DeprecatedTzFormatWarning) + elif (l.count(',') == 2 and l[i:].count('/') <= 2 and + not [y for x in l[i:] if x not in (',', '/', 'J', 'M', + '.', '-', ':') + for y in x if y not in "0123456789"]): + for x in (res.start, res.end): + if l[i] == 'J': + # non-leap year day (1 based) + used_idxs.append(i) + i += 1 + x.jyday = int(l[i]) + elif l[i] == 'M': + # month[-.]week[-.]weekday + used_idxs.append(i) + i += 1 + x.month = int(l[i]) + used_idxs.append(i) + i += 1 + assert l[i] in ('-', '.') + used_idxs.append(i) + i += 1 + x.week = int(l[i]) + if x.week == 5: + x.week = -1 + used_idxs.append(i) + i += 1 + assert l[i] in ('-', '.') + used_idxs.append(i) + i += 1 + x.weekday = (int(l[i]) - 1) % 7 + else: + # year day (zero based) + x.yday = int(l[i]) + 1 + + used_idxs.append(i) + i += 1 + + if i < len_l and l[i] == '/': + used_idxs.append(i) + i += 1 + # start time + len_li = len(l[i]) + if len_li == 4: + # -0300 + x.time = (int(l[i][:2]) * 3600 + + int(l[i][2:]) * 60) + elif i + 1 < len_l and l[i + 1] == ':': + # -03:00 + x.time = int(l[i]) * 3600 + int(l[i + 2]) * 60 + used_idxs.append(i) + i += 2 + if i + 1 < len_l and l[i + 1] == ':': + used_idxs.append(i) + i += 2 + x.time += int(l[i]) + elif len_li <= 2: + # -[0]3 + x.time = (int(l[i][:2]) * 3600) + else: + return None + used_idxs.append(i) + i += 1 + + assert i == len_l or l[i] == ',' + + i += 1 + + assert i >= len_l + + except (IndexError, ValueError, AssertionError): + return None + + unused_idxs = set(range(len_l)).difference(used_idxs) + res.any_unused_tokens = not {l[n] for n in unused_idxs}.issubset({",",":"}) + return res + + +DEFAULTTZPARSER = _tzparser() + + +def _parsetz(tzstr): + return DEFAULTTZPARSER.parse(tzstr) + + +class ParserError(ValueError): + """Error class for representing failure to parse a datetime string.""" + def __str__(self): + try: + return self.args[0] % self.args[1:] + except (TypeError, IndexError): + return super(ParserError, self).__str__() + + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, str(self)) + + +class UnknownTimezoneWarning(RuntimeWarning): + """Raised when the parser finds a timezone it cannot parse into a tzinfo""" +# vim:ts=4:sw=4:et diff --git a/venv/Lib/site-packages/dateutil/parser/isoparser.py b/venv/Lib/site-packages/dateutil/parser/isoparser.py new file mode 100644 index 00000000..48f86a33 --- /dev/null +++ b/venv/Lib/site-packages/dateutil/parser/isoparser.py @@ -0,0 +1,411 @@ +# -*- coding: utf-8 -*- +""" +This module offers a parser for ISO-8601 strings + +It is intended to support all valid date, time and datetime formats per the +ISO-8601 specification. + +..versionadded:: 2.7.0 +""" +from datetime import datetime, timedelta, time, date +import calendar +from dateutil import tz + +from functools import wraps + +import re +import six + +__all__ = ["isoparse", "isoparser"] + + +def _takes_ascii(f): + @wraps(f) + def func(self, str_in, *args, **kwargs): + # If it's a stream, read the whole thing + str_in = getattr(str_in, 'read', lambda: str_in)() + + # If it's unicode, turn it into bytes, since ISO-8601 only covers ASCII + if isinstance(str_in, six.text_type): + # ASCII is the same in UTF-8 + try: + str_in = str_in.encode('ascii') + except UnicodeEncodeError as e: + msg = 'ISO-8601 strings should contain only ASCII characters' + six.raise_from(ValueError(msg), e) + + return f(self, str_in, *args, **kwargs) + + return func + + +class isoparser(object): + def __init__(self, sep=None): + """ + :param sep: + A single character that separates date and time portions. If + ``None``, the parser will accept any single character. + For strict ISO-8601 adherence, pass ``'T'``. + """ + if sep is not None: + if (len(sep) != 1 or ord(sep) >= 128 or sep in '0123456789'): + raise ValueError('Separator must be a single, non-numeric ' + + 'ASCII character') + + sep = sep.encode('ascii') + + self._sep = sep + + @_takes_ascii + def isoparse(self, dt_str): + """ + Parse an ISO-8601 datetime string into a :class:`datetime.datetime`. + + An ISO-8601 datetime string consists of a date portion, followed + optionally by a time portion - the date and time portions are separated + by a single character separator, which is ``T`` in the official + standard. Incomplete date formats (such as ``YYYY-MM``) may *not* be + combined with a time portion. + + Supported date formats are: + + Common: + + - ``YYYY`` + - ``YYYY-MM`` or ``YYYYMM`` + - ``YYYY-MM-DD`` or ``YYYYMMDD`` + + Uncommon: + + - ``YYYY-Www`` or ``YYYYWww`` - ISO week (day defaults to 0) + - ``YYYY-Www-D`` or ``YYYYWwwD`` - ISO week and day + + The ISO week and day numbering follows the same logic as + :func:`datetime.date.isocalendar`. + + Supported time formats are: + + - ``hh`` + - ``hh:mm`` or ``hhmm`` + - ``hh:mm:ss`` or ``hhmmss`` + - ``hh:mm:ss.ssssss`` (Up to 6 sub-second digits) + + Midnight is a special case for `hh`, as the standard supports both + 00:00 and 24:00 as a representation. The decimal separator can be + either a dot or a comma. + + + .. caution:: + + Support for fractional components other than seconds is part of the + ISO-8601 standard, but is not currently implemented in this parser. + + Supported time zone offset formats are: + + - `Z` (UTC) + - `±HH:MM` + - `±HHMM` + - `±HH` + + Offsets will be represented as :class:`dateutil.tz.tzoffset` objects, + with the exception of UTC, which will be represented as + :class:`dateutil.tz.tzutc`. Time zone offsets equivalent to UTC (such + as `+00:00`) will also be represented as :class:`dateutil.tz.tzutc`. + + :param dt_str: + A string or stream containing only an ISO-8601 datetime string + + :return: + Returns a :class:`datetime.datetime` representing the string. + Unspecified components default to their lowest value. + + .. warning:: + + As of version 2.7.0, the strictness of the parser should not be + considered a stable part of the contract. Any valid ISO-8601 string + that parses correctly with the default settings will continue to + parse correctly in future versions, but invalid strings that + currently fail (e.g. ``2017-01-01T00:00+00:00:00``) are not + guaranteed to continue failing in future versions if they encode + a valid date. + + .. versionadded:: 2.7.0 + """ + components, pos = self._parse_isodate(dt_str) + + if len(dt_str) > pos: + if self._sep is None or dt_str[pos:pos + 1] == self._sep: + components += self._parse_isotime(dt_str[pos + 1:]) + else: + raise ValueError('String contains unknown ISO components') + + if len(components) > 3 and components[3] == 24: + components[3] = 0 + return datetime(*components) + timedelta(days=1) + + return datetime(*components) + + @_takes_ascii + def parse_isodate(self, datestr): + """ + Parse the date portion of an ISO string. + + :param datestr: + The string portion of an ISO string, without a separator + + :return: + Returns a :class:`datetime.date` object + """ + components, pos = self._parse_isodate(datestr) + if pos < len(datestr): + raise ValueError('String contains unknown ISO ' + + 'components: {}'.format(datestr)) + return date(*components) + + @_takes_ascii + def parse_isotime(self, timestr): + """ + Parse the time portion of an ISO string. + + :param timestr: + The time portion of an ISO string, without a separator + + :return: + Returns a :class:`datetime.time` object + """ + components = self._parse_isotime(timestr) + if components[0] == 24: + components[0] = 0 + return time(*components) + + @_takes_ascii + def parse_tzstr(self, tzstr, zero_as_utc=True): + """ + Parse a valid ISO time zone string. + + See :func:`isoparser.isoparse` for details on supported formats. + + :param tzstr: + A string representing an ISO time zone offset + + :param zero_as_utc: + Whether to return :class:`dateutil.tz.tzutc` for zero-offset zones + + :return: + Returns :class:`dateutil.tz.tzoffset` for offsets and + :class:`dateutil.tz.tzutc` for ``Z`` and (if ``zero_as_utc`` is + specified) offsets equivalent to UTC. + """ + return self._parse_tzstr(tzstr, zero_as_utc=zero_as_utc) + + # Constants + _DATE_SEP = b'-' + _TIME_SEP = b':' + _FRACTION_REGEX = re.compile(b'[\\.,]([0-9]+)') + + def _parse_isodate(self, dt_str): + try: + return self._parse_isodate_common(dt_str) + except ValueError: + return self._parse_isodate_uncommon(dt_str) + + def _parse_isodate_common(self, dt_str): + len_str = len(dt_str) + components = [1, 1, 1] + + if len_str < 4: + raise ValueError('ISO string too short') + + # Year + components[0] = int(dt_str[0:4]) + pos = 4 + if pos >= len_str: + return components, pos + + has_sep = dt_str[pos:pos + 1] == self._DATE_SEP + if has_sep: + pos += 1 + + # Month + if len_str - pos < 2: + raise ValueError('Invalid common month') + + components[1] = int(dt_str[pos:pos + 2]) + pos += 2 + + if pos >= len_str: + if has_sep: + return components, pos + else: + raise ValueError('Invalid ISO format') + + if has_sep: + if dt_str[pos:pos + 1] != self._DATE_SEP: + raise ValueError('Invalid separator in ISO string') + pos += 1 + + # Day + if len_str - pos < 2: + raise ValueError('Invalid common day') + components[2] = int(dt_str[pos:pos + 2]) + return components, pos + 2 + + def _parse_isodate_uncommon(self, dt_str): + if len(dt_str) < 4: + raise ValueError('ISO string too short') + + # All ISO formats start with the year + year = int(dt_str[0:4]) + + has_sep = dt_str[4:5] == self._DATE_SEP + + pos = 4 + has_sep # Skip '-' if it's there + if dt_str[pos:pos + 1] == b'W': + # YYYY-?Www-?D? + pos += 1 + weekno = int(dt_str[pos:pos + 2]) + pos += 2 + + dayno = 1 + if len(dt_str) > pos: + if (dt_str[pos:pos + 1] == self._DATE_SEP) != has_sep: + raise ValueError('Inconsistent use of dash separator') + + pos += has_sep + + dayno = int(dt_str[pos:pos + 1]) + pos += 1 + + base_date = self._calculate_weekdate(year, weekno, dayno) + else: + # YYYYDDD or YYYY-DDD + if len(dt_str) - pos < 3: + raise ValueError('Invalid ordinal day') + + ordinal_day = int(dt_str[pos:pos + 3]) + pos += 3 + + if ordinal_day < 1 or ordinal_day > (365 + calendar.isleap(year)): + raise ValueError('Invalid ordinal day' + + ' {} for year {}'.format(ordinal_day, year)) + + base_date = date(year, 1, 1) + timedelta(days=ordinal_day - 1) + + components = [base_date.year, base_date.month, base_date.day] + return components, pos + + def _calculate_weekdate(self, year, week, day): + """ + Calculate the day of corresponding to the ISO year-week-day calendar. + + This function is effectively the inverse of + :func:`datetime.date.isocalendar`. + + :param year: + The year in the ISO calendar + + :param week: + The week in the ISO calendar - range is [1, 53] + + :param day: + The day in the ISO calendar - range is [1 (MON), 7 (SUN)] + + :return: + Returns a :class:`datetime.date` + """ + if not 0 < week < 54: + raise ValueError('Invalid week: {}'.format(week)) + + if not 0 < day < 8: # Range is 1-7 + raise ValueError('Invalid weekday: {}'.format(day)) + + # Get week 1 for the specific year: + jan_4 = date(year, 1, 4) # Week 1 always has January 4th in it + week_1 = jan_4 - timedelta(days=jan_4.isocalendar()[2] - 1) + + # Now add the specific number of weeks and days to get what we want + week_offset = (week - 1) * 7 + (day - 1) + return week_1 + timedelta(days=week_offset) + + def _parse_isotime(self, timestr): + len_str = len(timestr) + components = [0, 0, 0, 0, None] + pos = 0 + comp = -1 + + if len(timestr) < 2: + raise ValueError('ISO time too short') + + has_sep = len_str >= 3 and timestr[2:3] == self._TIME_SEP + + while pos < len_str and comp < 5: + comp += 1 + + if timestr[pos:pos + 1] in b'-+Zz': + # Detect time zone boundary + components[-1] = self._parse_tzstr(timestr[pos:]) + pos = len_str + break + + if comp < 3: + # Hour, minute, second + components[comp] = int(timestr[pos:pos + 2]) + pos += 2 + if (has_sep and pos < len_str and + timestr[pos:pos + 1] == self._TIME_SEP): + pos += 1 + + if comp == 3: + # Fraction of a second + frac = self._FRACTION_REGEX.match(timestr[pos:]) + if not frac: + continue + + us_str = frac.group(1)[:6] # Truncate to microseconds + components[comp] = int(us_str) * 10**(6 - len(us_str)) + pos += len(frac.group()) + + if pos < len_str: + raise ValueError('Unused components in ISO string') + + if components[0] == 24: + # Standard supports 00:00 and 24:00 as representations of midnight + if any(component != 0 for component in components[1:4]): + raise ValueError('Hour may only be 24 at 24:00:00.000') + + return components + + def _parse_tzstr(self, tzstr, zero_as_utc=True): + if tzstr == b'Z' or tzstr == b'z': + return tz.UTC + + if len(tzstr) not in {3, 5, 6}: + raise ValueError('Time zone offset must be 1, 3, 5 or 6 characters') + + if tzstr[0:1] == b'-': + mult = -1 + elif tzstr[0:1] == b'+': + mult = 1 + else: + raise ValueError('Time zone offset requires sign') + + hours = int(tzstr[1:3]) + if len(tzstr) == 3: + minutes = 0 + else: + minutes = int(tzstr[(4 if tzstr[3:4] == self._TIME_SEP else 3):]) + + if zero_as_utc and hours == 0 and minutes == 0: + return tz.UTC + else: + if minutes > 59: + raise ValueError('Invalid minutes in time zone offset') + + if hours > 23: + raise ValueError('Invalid hours in time zone offset') + + return tz.tzoffset(None, mult * (hours * 60 + minutes) * 60) + + +DEFAULT_ISOPARSER = isoparser() +isoparse = DEFAULT_ISOPARSER.isoparse diff --git a/venv/Lib/site-packages/dateutil/relativedelta.py b/venv/Lib/site-packages/dateutil/relativedelta.py new file mode 100644 index 00000000..a9e85f7e --- /dev/null +++ b/venv/Lib/site-packages/dateutil/relativedelta.py @@ -0,0 +1,599 @@ +# -*- coding: utf-8 -*- +import datetime +import calendar + +import operator +from math import copysign + +from six import integer_types +from warnings import warn + +from ._common import weekday + +MO, TU, WE, TH, FR, SA, SU = weekdays = tuple(weekday(x) for x in range(7)) + +__all__ = ["relativedelta", "MO", "TU", "WE", "TH", "FR", "SA", "SU"] + + +class relativedelta(object): + """ + The relativedelta type is designed to be applied to an existing datetime and + can replace specific components of that datetime, or represents an interval + of time. + + It is based on the specification of the excellent work done by M.-A. Lemburg + in his + `mx.DateTime <https://www.egenix.com/products/python/mxBase/mxDateTime/>`_ extension. + However, notice that this type does *NOT* implement the same algorithm as + his work. Do *NOT* expect it to behave like mx.DateTime's counterpart. + + There are two different ways to build a relativedelta instance. The + first one is passing it two date/datetime classes:: + + relativedelta(datetime1, datetime2) + + The second one is passing it any number of the following keyword arguments:: + + relativedelta(arg1=x,arg2=y,arg3=z...) + + year, month, day, hour, minute, second, microsecond: + Absolute information (argument is singular); adding or subtracting a + relativedelta with absolute information does not perform an arithmetic + operation, but rather REPLACES the corresponding value in the + original datetime with the value(s) in relativedelta. + + years, months, weeks, days, hours, minutes, seconds, microseconds: + Relative information, may be negative (argument is plural); adding + or subtracting a relativedelta with relative information performs + the corresponding arithmetic operation on the original datetime value + with the information in the relativedelta. + + weekday: + One of the weekday instances (MO, TU, etc) available in the + relativedelta module. These instances may receive a parameter N, + specifying the Nth weekday, which could be positive or negative + (like MO(+1) or MO(-2)). Not specifying it is the same as specifying + +1. You can also use an integer, where 0=MO. This argument is always + relative e.g. if the calculated date is already Monday, using MO(1) + or MO(-1) won't change the day. To effectively make it absolute, use + it in combination with the day argument (e.g. day=1, MO(1) for first + Monday of the month). + + leapdays: + Will add given days to the date found, if year is a leap + year, and the date found is post 28 of february. + + yearday, nlyearday: + Set the yearday or the non-leap year day (jump leap days). + These are converted to day/month/leapdays information. + + There are relative and absolute forms of the keyword + arguments. The plural is relative, and the singular is + absolute. For each argument in the order below, the absolute form + is applied first (by setting each attribute to that value) and + then the relative form (by adding the value to the attribute). + + The order of attributes considered when this relativedelta is + added to a datetime is: + + 1. Year + 2. Month + 3. Day + 4. Hours + 5. Minutes + 6. Seconds + 7. Microseconds + + Finally, weekday is applied, using the rule described above. + + For example + + >>> from datetime import datetime + >>> from dateutil.relativedelta import relativedelta, MO + >>> dt = datetime(2018, 4, 9, 13, 37, 0) + >>> delta = relativedelta(hours=25, day=1, weekday=MO(1)) + >>> dt + delta + datetime.datetime(2018, 4, 2, 14, 37) + + First, the day is set to 1 (the first of the month), then 25 hours + are added, to get to the 2nd day and 14th hour, finally the + weekday is applied, but since the 2nd is already a Monday there is + no effect. + + """ + + def __init__(self, dt1=None, dt2=None, + years=0, months=0, days=0, leapdays=0, weeks=0, + hours=0, minutes=0, seconds=0, microseconds=0, + year=None, month=None, day=None, weekday=None, + yearday=None, nlyearday=None, + hour=None, minute=None, second=None, microsecond=None): + + if dt1 and dt2: + # datetime is a subclass of date. So both must be date + if not (isinstance(dt1, datetime.date) and + isinstance(dt2, datetime.date)): + raise TypeError("relativedelta only diffs datetime/date") + + # We allow two dates, or two datetimes, so we coerce them to be + # of the same type + if (isinstance(dt1, datetime.datetime) != + isinstance(dt2, datetime.datetime)): + if not isinstance(dt1, datetime.datetime): + dt1 = datetime.datetime.fromordinal(dt1.toordinal()) + elif not isinstance(dt2, datetime.datetime): + dt2 = datetime.datetime.fromordinal(dt2.toordinal()) + + self.years = 0 + self.months = 0 + self.days = 0 + self.leapdays = 0 + self.hours = 0 + self.minutes = 0 + self.seconds = 0 + self.microseconds = 0 + self.year = None + self.month = None + self.day = None + self.weekday = None + self.hour = None + self.minute = None + self.second = None + self.microsecond = None + self._has_time = 0 + + # Get year / month delta between the two + months = (dt1.year - dt2.year) * 12 + (dt1.month - dt2.month) + self._set_months(months) + + # Remove the year/month delta so the timedelta is just well-defined + # time units (seconds, days and microseconds) + dtm = self.__radd__(dt2) + + # If we've overshot our target, make an adjustment + if dt1 < dt2: + compare = operator.gt + increment = 1 + else: + compare = operator.lt + increment = -1 + + while compare(dt1, dtm): + months += increment + self._set_months(months) + dtm = self.__radd__(dt2) + + # Get the timedelta between the "months-adjusted" date and dt1 + delta = dt1 - dtm + self.seconds = delta.seconds + delta.days * 86400 + self.microseconds = delta.microseconds + else: + # Check for non-integer values in integer-only quantities + if any(x is not None and x != int(x) for x in (years, months)): + raise ValueError("Non-integer years and months are " + "ambiguous and not currently supported.") + + # Relative information + self.years = int(years) + self.months = int(months) + self.days = days + weeks * 7 + self.leapdays = leapdays + self.hours = hours + self.minutes = minutes + self.seconds = seconds + self.microseconds = microseconds + + # Absolute information + self.year = year + self.month = month + self.day = day + self.hour = hour + self.minute = minute + self.second = second + self.microsecond = microsecond + + if any(x is not None and int(x) != x + for x in (year, month, day, hour, + minute, second, microsecond)): + # For now we'll deprecate floats - later it'll be an error. + warn("Non-integer value passed as absolute information. " + + "This is not a well-defined condition and will raise " + + "errors in future versions.", DeprecationWarning) + + if isinstance(weekday, integer_types): + self.weekday = weekdays[weekday] + else: + self.weekday = weekday + + yday = 0 + if nlyearday: + yday = nlyearday + elif yearday: + yday = yearday + if yearday > 59: + self.leapdays = -1 + if yday: + ydayidx = [31, 59, 90, 120, 151, 181, 212, + 243, 273, 304, 334, 366] + for idx, ydays in enumerate(ydayidx): + if yday <= ydays: + self.month = idx+1 + if idx == 0: + self.day = yday + else: + self.day = yday-ydayidx[idx-1] + break + else: + raise ValueError("invalid year day (%d)" % yday) + + self._fix() + + def _fix(self): + if abs(self.microseconds) > 999999: + s = _sign(self.microseconds) + div, mod = divmod(self.microseconds * s, 1000000) + self.microseconds = mod * s + self.seconds += div * s + if abs(self.seconds) > 59: + s = _sign(self.seconds) + div, mod = divmod(self.seconds * s, 60) + self.seconds = mod * s + self.minutes += div * s + if abs(self.minutes) > 59: + s = _sign(self.minutes) + div, mod = divmod(self.minutes * s, 60) + self.minutes = mod * s + self.hours += div * s + if abs(self.hours) > 23: + s = _sign(self.hours) + div, mod = divmod(self.hours * s, 24) + self.hours = mod * s + self.days += div * s + if abs(self.months) > 11: + s = _sign(self.months) + div, mod = divmod(self.months * s, 12) + self.months = mod * s + self.years += div * s + if (self.hours or self.minutes or self.seconds or self.microseconds + or self.hour is not None or self.minute is not None or + self.second is not None or self.microsecond is not None): + self._has_time = 1 + else: + self._has_time = 0 + + @property + def weeks(self): + return int(self.days / 7.0) + + @weeks.setter + def weeks(self, value): + self.days = self.days - (self.weeks * 7) + value * 7 + + def _set_months(self, months): + self.months = months + if abs(self.months) > 11: + s = _sign(self.months) + div, mod = divmod(self.months * s, 12) + self.months = mod * s + self.years = div * s + else: + self.years = 0 + + def normalized(self): + """ + Return a version of this object represented entirely using integer + values for the relative attributes. + + >>> relativedelta(days=1.5, hours=2).normalized() + relativedelta(days=+1, hours=+14) + + :return: + Returns a :class:`dateutil.relativedelta.relativedelta` object. + """ + # Cascade remainders down (rounding each to roughly nearest microsecond) + days = int(self.days) + + hours_f = round(self.hours + 24 * (self.days - days), 11) + hours = int(hours_f) + + minutes_f = round(self.minutes + 60 * (hours_f - hours), 10) + minutes = int(minutes_f) + + seconds_f = round(self.seconds + 60 * (minutes_f - minutes), 8) + seconds = int(seconds_f) + + microseconds = round(self.microseconds + 1e6 * (seconds_f - seconds)) + + # Constructor carries overflow back up with call to _fix() + return self.__class__(years=self.years, months=self.months, + days=days, hours=hours, minutes=minutes, + seconds=seconds, microseconds=microseconds, + leapdays=self.leapdays, year=self.year, + month=self.month, day=self.day, + weekday=self.weekday, hour=self.hour, + minute=self.minute, second=self.second, + microsecond=self.microsecond) + + def __add__(self, other): + if isinstance(other, relativedelta): + return self.__class__(years=other.years + self.years, + months=other.months + self.months, + days=other.days + self.days, + hours=other.hours + self.hours, + minutes=other.minutes + self.minutes, + seconds=other.seconds + self.seconds, + microseconds=(other.microseconds + + self.microseconds), + leapdays=other.leapdays or self.leapdays, + year=(other.year if other.year is not None + else self.year), + month=(other.month if other.month is not None + else self.month), + day=(other.day if other.day is not None + else self.day), + weekday=(other.weekday if other.weekday is not None + else self.weekday), + hour=(other.hour if other.hour is not None + else self.hour), + minute=(other.minute if other.minute is not None + else self.minute), + second=(other.second if other.second is not None + else self.second), + microsecond=(other.microsecond if other.microsecond + is not None else + self.microsecond)) + if isinstance(other, datetime.timedelta): + return self.__class__(years=self.years, + months=self.months, + days=self.days + other.days, + hours=self.hours, + minutes=self.minutes, + seconds=self.seconds + other.seconds, + microseconds=self.microseconds + other.microseconds, + leapdays=self.leapdays, + year=self.year, + month=self.month, + day=self.day, + weekday=self.weekday, + hour=self.hour, + minute=self.minute, + second=self.second, + microsecond=self.microsecond) + if not isinstance(other, datetime.date): + return NotImplemented + elif self._has_time and not isinstance(other, datetime.datetime): + other = datetime.datetime.fromordinal(other.toordinal()) + year = (self.year or other.year)+self.years + month = self.month or other.month + if self.months: + assert 1 <= abs(self.months) <= 12 + month += self.months + if month > 12: + year += 1 + month -= 12 + elif month < 1: + year -= 1 + month += 12 + day = min(calendar.monthrange(year, month)[1], + self.day or other.day) + repl = {"year": year, "month": month, "day": day} + for attr in ["hour", "minute", "second", "microsecond"]: + value = getattr(self, attr) + if value is not None: + repl[attr] = value + days = self.days + if self.leapdays and month > 2 and calendar.isleap(year): + days += self.leapdays + ret = (other.replace(**repl) + + datetime.timedelta(days=days, + hours=self.hours, + minutes=self.minutes, + seconds=self.seconds, + microseconds=self.microseconds)) + if self.weekday: + weekday, nth = self.weekday.weekday, self.weekday.n or 1 + jumpdays = (abs(nth) - 1) * 7 + if nth > 0: + jumpdays += (7 - ret.weekday() + weekday) % 7 + else: + jumpdays += (ret.weekday() - weekday) % 7 + jumpdays *= -1 + ret += datetime.timedelta(days=jumpdays) + return ret + + def __radd__(self, other): + return self.__add__(other) + + def __rsub__(self, other): + return self.__neg__().__radd__(other) + + def __sub__(self, other): + if not isinstance(other, relativedelta): + return NotImplemented # In case the other object defines __rsub__ + return self.__class__(years=self.years - other.years, + months=self.months - other.months, + days=self.days - other.days, + hours=self.hours - other.hours, + minutes=self.minutes - other.minutes, + seconds=self.seconds - other.seconds, + microseconds=self.microseconds - other.microseconds, + leapdays=self.leapdays or other.leapdays, + year=(self.year if self.year is not None + else other.year), + month=(self.month if self.month is not None else + other.month), + day=(self.day if self.day is not None else + other.day), + weekday=(self.weekday if self.weekday is not None else + other.weekday), + hour=(self.hour if self.hour is not None else + other.hour), + minute=(self.minute if self.minute is not None else + other.minute), + second=(self.second if self.second is not None else + other.second), + microsecond=(self.microsecond if self.microsecond + is not None else + other.microsecond)) + + def __abs__(self): + return self.__class__(years=abs(self.years), + months=abs(self.months), + days=abs(self.days), + hours=abs(self.hours), + minutes=abs(self.minutes), + seconds=abs(self.seconds), + microseconds=abs(self.microseconds), + leapdays=self.leapdays, + year=self.year, + month=self.month, + day=self.day, + weekday=self.weekday, + hour=self.hour, + minute=self.minute, + second=self.second, + microsecond=self.microsecond) + + def __neg__(self): + return self.__class__(years=-self.years, + months=-self.months, + days=-self.days, + hours=-self.hours, + minutes=-self.minutes, + seconds=-self.seconds, + microseconds=-self.microseconds, + leapdays=self.leapdays, + year=self.year, + month=self.month, + day=self.day, + weekday=self.weekday, + hour=self.hour, + minute=self.minute, + second=self.second, + microsecond=self.microsecond) + + def __bool__(self): + return not (not self.years and + not self.months and + not self.days and + not self.hours and + not self.minutes and + not self.seconds and + not self.microseconds and + not self.leapdays and + self.year is None and + self.month is None and + self.day is None and + self.weekday is None and + self.hour is None and + self.minute is None and + self.second is None and + self.microsecond is None) + # Compatibility with Python 2.x + __nonzero__ = __bool__ + + def __mul__(self, other): + try: + f = float(other) + except TypeError: + return NotImplemented + + return self.__class__(years=int(self.years * f), + months=int(self.months * f), + days=int(self.days * f), + hours=int(self.hours * f), + minutes=int(self.minutes * f), + seconds=int(self.seconds * f), + microseconds=int(self.microseconds * f), + leapdays=self.leapdays, + year=self.year, + month=self.month, + day=self.day, + weekday=self.weekday, + hour=self.hour, + minute=self.minute, + second=self.second, + microsecond=self.microsecond) + + __rmul__ = __mul__ + + def __eq__(self, other): + if not isinstance(other, relativedelta): + return NotImplemented + if self.weekday or other.weekday: + if not self.weekday or not other.weekday: + return False + if self.weekday.weekday != other.weekday.weekday: + return False + n1, n2 = self.weekday.n, other.weekday.n + if n1 != n2 and not ((not n1 or n1 == 1) and (not n2 or n2 == 1)): + return False + return (self.years == other.years and + self.months == other.months and + self.days == other.days and + self.hours == other.hours and + self.minutes == other.minutes and + self.seconds == other.seconds and + self.microseconds == other.microseconds and + self.leapdays == other.leapdays and + self.year == other.year and + self.month == other.month and + self.day == other.day and + self.hour == other.hour and + self.minute == other.minute and + self.second == other.second and + self.microsecond == other.microsecond) + + def __hash__(self): + return hash(( + self.weekday, + self.years, + self.months, + self.days, + self.hours, + self.minutes, + self.seconds, + self.microseconds, + self.leapdays, + self.year, + self.month, + self.day, + self.hour, + self.minute, + self.second, + self.microsecond, + )) + + def __ne__(self, other): + return not self.__eq__(other) + + def __div__(self, other): + try: + reciprocal = 1 / float(other) + except TypeError: + return NotImplemented + + return self.__mul__(reciprocal) + + __truediv__ = __div__ + + def __repr__(self): + l = [] + for attr in ["years", "months", "days", "leapdays", + "hours", "minutes", "seconds", "microseconds"]: + value = getattr(self, attr) + if value: + l.append("{attr}={value:+g}".format(attr=attr, value=value)) + for attr in ["year", "month", "day", "weekday", + "hour", "minute", "second", "microsecond"]: + value = getattr(self, attr) + if value is not None: + l.append("{attr}={value}".format(attr=attr, value=repr(value))) + return "{classname}({attrs})".format(classname=self.__class__.__name__, + attrs=", ".join(l)) + + +def _sign(x): + return int(copysign(1, x)) + +# vim:ts=4:sw=4:et diff --git a/venv/Lib/site-packages/dateutil/rrule.py b/venv/Lib/site-packages/dateutil/rrule.py new file mode 100644 index 00000000..6bf0ea9c --- /dev/null +++ b/venv/Lib/site-packages/dateutil/rrule.py @@ -0,0 +1,1735 @@ +# -*- coding: utf-8 -*- +""" +The rrule module offers a small, complete, and very fast, implementation of +the recurrence rules documented in the +`iCalendar RFC <https://tools.ietf.org/html/rfc5545>`_, +including support for caching of results. +""" +import itertools +import datetime +import calendar +import re +import sys + +try: + from math import gcd +except ImportError: + from fractions import gcd + +from six import advance_iterator, integer_types +from six.moves import _thread, range +import heapq + +from ._common import weekday as weekdaybase + +# For warning about deprecation of until and count +from warnings import warn + +__all__ = ["rrule", "rruleset", "rrulestr", + "YEARLY", "MONTHLY", "WEEKLY", "DAILY", + "HOURLY", "MINUTELY", "SECONDLY", + "MO", "TU", "WE", "TH", "FR", "SA", "SU"] + +# Every mask is 7 days longer to handle cross-year weekly periods. +M366MASK = tuple([1]*31+[2]*29+[3]*31+[4]*30+[5]*31+[6]*30 + + [7]*31+[8]*31+[9]*30+[10]*31+[11]*30+[12]*31+[1]*7) +M365MASK = list(M366MASK) +M29, M30, M31 = list(range(1, 30)), list(range(1, 31)), list(range(1, 32)) +MDAY366MASK = tuple(M31+M29+M31+M30+M31+M30+M31+M31+M30+M31+M30+M31+M31[:7]) +MDAY365MASK = list(MDAY366MASK) +M29, M30, M31 = list(range(-29, 0)), list(range(-30, 0)), list(range(-31, 0)) +NMDAY366MASK = tuple(M31+M29+M31+M30+M31+M30+M31+M31+M30+M31+M30+M31+M31[:7]) +NMDAY365MASK = list(NMDAY366MASK) +M366RANGE = (0, 31, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 366) +M365RANGE = (0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 365) +WDAYMASK = [0, 1, 2, 3, 4, 5, 6]*55 +del M29, M30, M31, M365MASK[59], MDAY365MASK[59], NMDAY365MASK[31] +MDAY365MASK = tuple(MDAY365MASK) +M365MASK = tuple(M365MASK) + +FREQNAMES = ['YEARLY', 'MONTHLY', 'WEEKLY', 'DAILY', 'HOURLY', 'MINUTELY', 'SECONDLY'] + +(YEARLY, + MONTHLY, + WEEKLY, + DAILY, + HOURLY, + MINUTELY, + SECONDLY) = list(range(7)) + +# Imported on demand. +easter = None +parser = None + + +class weekday(weekdaybase): + """ + This version of weekday does not allow n = 0. + """ + def __init__(self, wkday, n=None): + if n == 0: + raise ValueError("Can't create weekday with n==0") + + super(weekday, self).__init__(wkday, n) + + +MO, TU, WE, TH, FR, SA, SU = weekdays = tuple(weekday(x) for x in range(7)) + + +def _invalidates_cache(f): + """ + Decorator for rruleset methods which may invalidate the + cached length. + """ + def inner_func(self, *args, **kwargs): + rv = f(self, *args, **kwargs) + self._invalidate_cache() + return rv + + return inner_func + + +class rrulebase(object): + def __init__(self, cache=False): + if cache: + self._cache = [] + self._cache_lock = _thread.allocate_lock() + self._invalidate_cache() + else: + self._cache = None + self._cache_complete = False + self._len = None + + def __iter__(self): + if self._cache_complete: + return iter(self._cache) + elif self._cache is None: + return self._iter() + else: + return self._iter_cached() + + def _invalidate_cache(self): + if self._cache is not None: + self._cache = [] + self._cache_complete = False + self._cache_gen = self._iter() + + if self._cache_lock.locked(): + self._cache_lock.release() + + self._len = None + + def _iter_cached(self): + i = 0 + gen = self._cache_gen + cache = self._cache + acquire = self._cache_lock.acquire + release = self._cache_lock.release + while gen: + if i == len(cache): + acquire() + if self._cache_complete: + break + try: + for j in range(10): + cache.append(advance_iterator(gen)) + except StopIteration: + self._cache_gen = gen = None + self._cache_complete = True + break + release() + yield cache[i] + i += 1 + while i < self._len: + yield cache[i] + i += 1 + + def __getitem__(self, item): + if self._cache_complete: + return self._cache[item] + elif isinstance(item, slice): + if item.step and item.step < 0: + return list(iter(self))[item] + else: + return list(itertools.islice(self, + item.start or 0, + item.stop or sys.maxsize, + item.step or 1)) + elif item >= 0: + gen = iter(self) + try: + for i in range(item+1): + res = advance_iterator(gen) + except StopIteration: + raise IndexError + return res + else: + return list(iter(self))[item] + + def __contains__(self, item): + if self._cache_complete: + return item in self._cache + else: + for i in self: + if i == item: + return True + elif i > item: + return False + return False + + # __len__() introduces a large performance penalty. + def count(self): + """ Returns the number of recurrences in this set. It will have go + trough the whole recurrence, if this hasn't been done before. """ + if self._len is None: + for x in self: + pass + return self._len + + def before(self, dt, inc=False): + """ Returns the last recurrence before the given datetime instance. The + inc keyword defines what happens if dt is an occurrence. With + inc=True, if dt itself is an occurrence, it will be returned. """ + if self._cache_complete: + gen = self._cache + else: + gen = self + last = None + if inc: + for i in gen: + if i > dt: + break + last = i + else: + for i in gen: + if i >= dt: + break + last = i + return last + + def after(self, dt, inc=False): + """ Returns the first recurrence after the given datetime instance. The + inc keyword defines what happens if dt is an occurrence. With + inc=True, if dt itself is an occurrence, it will be returned. """ + if self._cache_complete: + gen = self._cache + else: + gen = self + if inc: + for i in gen: + if i >= dt: + return i + else: + for i in gen: + if i > dt: + return i + return None + + def xafter(self, dt, count=None, inc=False): + """ + Generator which yields up to `count` recurrences after the given + datetime instance, equivalent to `after`. + + :param dt: + The datetime at which to start generating recurrences. + + :param count: + The maximum number of recurrences to generate. If `None` (default), + dates are generated until the recurrence rule is exhausted. + + :param inc: + If `dt` is an instance of the rule and `inc` is `True`, it is + included in the output. + + :yields: Yields a sequence of `datetime` objects. + """ + + if self._cache_complete: + gen = self._cache + else: + gen = self + + # Select the comparison function + if inc: + comp = lambda dc, dtc: dc >= dtc + else: + comp = lambda dc, dtc: dc > dtc + + # Generate dates + n = 0 + for d in gen: + if comp(d, dt): + if count is not None: + n += 1 + if n > count: + break + + yield d + + def between(self, after, before, inc=False, count=1): + """ Returns all the occurrences of the rrule between after and before. + The inc keyword defines what happens if after and/or before are + themselves occurrences. With inc=True, they will be included in the + list, if they are found in the recurrence set. """ + if self._cache_complete: + gen = self._cache + else: + gen = self + started = False + l = [] + if inc: + for i in gen: + if i > before: + break + elif not started: + if i >= after: + started = True + l.append(i) + else: + l.append(i) + else: + for i in gen: + if i >= before: + break + elif not started: + if i > after: + started = True + l.append(i) + else: + l.append(i) + return l + + +class rrule(rrulebase): + """ + That's the base of the rrule operation. It accepts all the keywords + defined in the RFC as its constructor parameters (except byday, + which was renamed to byweekday) and more. The constructor prototype is:: + + rrule(freq) + + Where freq must be one of YEARLY, MONTHLY, WEEKLY, DAILY, HOURLY, MINUTELY, + or SECONDLY. + + .. note:: + Per RFC section 3.3.10, recurrence instances falling on invalid dates + and times are ignored rather than coerced: + + Recurrence rules may generate recurrence instances with an invalid + date (e.g., February 30) or nonexistent local time (e.g., 1:30 AM + on a day where the local time is moved forward by an hour at 1:00 + AM). Such recurrence instances MUST be ignored and MUST NOT be + counted as part of the recurrence set. + + This can lead to possibly surprising behavior when, for example, the + start date occurs at the end of the month: + + >>> from dateutil.rrule import rrule, MONTHLY + >>> from datetime import datetime + >>> start_date = datetime(2014, 12, 31) + >>> list(rrule(freq=MONTHLY, count=4, dtstart=start_date)) + ... # doctest: +NORMALIZE_WHITESPACE + [datetime.datetime(2014, 12, 31, 0, 0), + datetime.datetime(2015, 1, 31, 0, 0), + datetime.datetime(2015, 3, 31, 0, 0), + datetime.datetime(2015, 5, 31, 0, 0)] + + Additionally, it supports the following keyword arguments: + + :param dtstart: + The recurrence start. Besides being the base for the recurrence, + missing parameters in the final recurrence instances will also be + extracted from this date. If not given, datetime.now() will be used + instead. + :param interval: + The interval between each freq iteration. For example, when using + YEARLY, an interval of 2 means once every two years, but with HOURLY, + it means once every two hours. The default interval is 1. + :param wkst: + The week start day. Must be one of the MO, TU, WE constants, or an + integer, specifying the first day of the week. This will affect + recurrences based on weekly periods. The default week start is got + from calendar.firstweekday(), and may be modified by + calendar.setfirstweekday(). + :param count: + If given, this determines how many occurrences will be generated. + + .. note:: + As of version 2.5.0, the use of the keyword ``until`` in conjunction + with ``count`` is deprecated, to make sure ``dateutil`` is fully + compliant with `RFC-5545 Sec. 3.3.10 <https://tools.ietf.org/ + html/rfc5545#section-3.3.10>`_. Therefore, ``until`` and ``count`` + **must not** occur in the same call to ``rrule``. + :param until: + If given, this must be a datetime instance specifying the upper-bound + limit of the recurrence. The last recurrence in the rule is the greatest + datetime that is less than or equal to the value specified in the + ``until`` parameter. + + .. note:: + As of version 2.5.0, the use of the keyword ``until`` in conjunction + with ``count`` is deprecated, to make sure ``dateutil`` is fully + compliant with `RFC-5545 Sec. 3.3.10 <https://tools.ietf.org/ + html/rfc5545#section-3.3.10>`_. Therefore, ``until`` and ``count`` + **must not** occur in the same call to ``rrule``. + :param bysetpos: + If given, it must be either an integer, or a sequence of integers, + positive or negative. Each given integer will specify an occurrence + number, corresponding to the nth occurrence of the rule inside the + frequency period. For example, a bysetpos of -1 if combined with a + MONTHLY frequency, and a byweekday of (MO, TU, WE, TH, FR), will + result in the last work day of every month. + :param bymonth: + If given, it must be either an integer, or a sequence of integers, + meaning the months to apply the recurrence to. + :param bymonthday: + If given, it must be either an integer, or a sequence of integers, + meaning the month days to apply the recurrence to. + :param byyearday: + If given, it must be either an integer, or a sequence of integers, + meaning the year days to apply the recurrence to. + :param byeaster: + If given, it must be either an integer, or a sequence of integers, + positive or negative. Each integer will define an offset from the + Easter Sunday. Passing the offset 0 to byeaster will yield the Easter + Sunday itself. This is an extension to the RFC specification. + :param byweekno: + If given, it must be either an integer, or a sequence of integers, + meaning the week numbers to apply the recurrence to. Week numbers + have the meaning described in ISO8601, that is, the first week of + the year is that containing at least four days of the new year. + :param byweekday: + If given, it must be either an integer (0 == MO), a sequence of + integers, one of the weekday constants (MO, TU, etc), or a sequence + of these constants. When given, these variables will define the + weekdays where the recurrence will be applied. It's also possible to + use an argument n for the weekday instances, which will mean the nth + occurrence of this weekday in the period. For example, with MONTHLY, + or with YEARLY and BYMONTH, using FR(+1) in byweekday will specify the + first friday of the month where the recurrence happens. Notice that in + the RFC documentation, this is specified as BYDAY, but was renamed to + avoid the ambiguity of that keyword. + :param byhour: + If given, it must be either an integer, or a sequence of integers, + meaning the hours to apply the recurrence to. + :param byminute: + If given, it must be either an integer, or a sequence of integers, + meaning the minutes to apply the recurrence to. + :param bysecond: + If given, it must be either an integer, or a sequence of integers, + meaning the seconds to apply the recurrence to. + :param cache: + If given, it must be a boolean value specifying to enable or disable + caching of results. If you will use the same rrule instance multiple + times, enabling caching will improve the performance considerably. + """ + def __init__(self, freq, dtstart=None, + interval=1, wkst=None, count=None, until=None, bysetpos=None, + bymonth=None, bymonthday=None, byyearday=None, byeaster=None, + byweekno=None, byweekday=None, + byhour=None, byminute=None, bysecond=None, + cache=False): + super(rrule, self).__init__(cache) + global easter + if not dtstart: + if until and until.tzinfo: + dtstart = datetime.datetime.now(tz=until.tzinfo).replace(microsecond=0) + else: + dtstart = datetime.datetime.now().replace(microsecond=0) + elif not isinstance(dtstart, datetime.datetime): + dtstart = datetime.datetime.fromordinal(dtstart.toordinal()) + else: + dtstart = dtstart.replace(microsecond=0) + self._dtstart = dtstart + self._tzinfo = dtstart.tzinfo + self._freq = freq + self._interval = interval + self._count = count + + # Cache the original byxxx rules, if they are provided, as the _byxxx + # attributes do not necessarily map to the inputs, and this can be + # a problem in generating the strings. Only store things if they've + # been supplied (the string retrieval will just use .get()) + self._original_rule = {} + + if until and not isinstance(until, datetime.datetime): + until = datetime.datetime.fromordinal(until.toordinal()) + self._until = until + + if self._dtstart and self._until: + if (self._dtstart.tzinfo is not None) != (self._until.tzinfo is not None): + # According to RFC5545 Section 3.3.10: + # https://tools.ietf.org/html/rfc5545#section-3.3.10 + # + # > If the "DTSTART" property is specified as a date with UTC + # > time or a date with local time and time zone reference, + # > then the UNTIL rule part MUST be specified as a date with + # > UTC time. + raise ValueError( + 'RRULE UNTIL values must be specified in UTC when DTSTART ' + 'is timezone-aware' + ) + + if count is not None and until: + warn("Using both 'count' and 'until' is inconsistent with RFC 5545" + " and has been deprecated in dateutil. Future versions will " + "raise an error.", DeprecationWarning) + + if wkst is None: + self._wkst = calendar.firstweekday() + elif isinstance(wkst, integer_types): + self._wkst = wkst + else: + self._wkst = wkst.weekday + + if bysetpos is None: + self._bysetpos = None + elif isinstance(bysetpos, integer_types): + if bysetpos == 0 or not (-366 <= bysetpos <= 366): + raise ValueError("bysetpos must be between 1 and 366, " + "or between -366 and -1") + self._bysetpos = (bysetpos,) + else: + self._bysetpos = tuple(bysetpos) + for pos in self._bysetpos: + if pos == 0 or not (-366 <= pos <= 366): + raise ValueError("bysetpos must be between 1 and 366, " + "or between -366 and -1") + + if self._bysetpos: + self._original_rule['bysetpos'] = self._bysetpos + + if (byweekno is None and byyearday is None and bymonthday is None and + byweekday is None and byeaster is None): + if freq == YEARLY: + if bymonth is None: + bymonth = dtstart.month + self._original_rule['bymonth'] = None + bymonthday = dtstart.day + self._original_rule['bymonthday'] = None + elif freq == MONTHLY: + bymonthday = dtstart.day + self._original_rule['bymonthday'] = None + elif freq == WEEKLY: + byweekday = dtstart.weekday() + self._original_rule['byweekday'] = None + + # bymonth + if bymonth is None: + self._bymonth = None + else: + if isinstance(bymonth, integer_types): + bymonth = (bymonth,) + + self._bymonth = tuple(sorted(set(bymonth))) + + if 'bymonth' not in self._original_rule: + self._original_rule['bymonth'] = self._bymonth + + # byyearday + if byyearday is None: + self._byyearday = None + else: + if isinstance(byyearday, integer_types): + byyearday = (byyearday,) + + self._byyearday = tuple(sorted(set(byyearday))) + self._original_rule['byyearday'] = self._byyearday + + # byeaster + if byeaster is not None: + if not easter: + from dateutil import easter + if isinstance(byeaster, integer_types): + self._byeaster = (byeaster,) + else: + self._byeaster = tuple(sorted(byeaster)) + + self._original_rule['byeaster'] = self._byeaster + else: + self._byeaster = None + + # bymonthday + if bymonthday is None: + self._bymonthday = () + self._bynmonthday = () + else: + if isinstance(bymonthday, integer_types): + bymonthday = (bymonthday,) + + bymonthday = set(bymonthday) # Ensure it's unique + + self._bymonthday = tuple(sorted(x for x in bymonthday if x > 0)) + self._bynmonthday = tuple(sorted(x for x in bymonthday if x < 0)) + + # Storing positive numbers first, then negative numbers + if 'bymonthday' not in self._original_rule: + self._original_rule['bymonthday'] = tuple( + itertools.chain(self._bymonthday, self._bynmonthday)) + + # byweekno + if byweekno is None: + self._byweekno = None + else: + if isinstance(byweekno, integer_types): + byweekno = (byweekno,) + + self._byweekno = tuple(sorted(set(byweekno))) + + self._original_rule['byweekno'] = self._byweekno + + # byweekday / bynweekday + if byweekday is None: + self._byweekday = None + self._bynweekday = None + else: + # If it's one of the valid non-sequence types, convert to a + # single-element sequence before the iterator that builds the + # byweekday set. + if isinstance(byweekday, integer_types) or hasattr(byweekday, "n"): + byweekday = (byweekday,) + + self._byweekday = set() + self._bynweekday = set() + for wday in byweekday: + if isinstance(wday, integer_types): + self._byweekday.add(wday) + elif not wday.n or freq > MONTHLY: + self._byweekday.add(wday.weekday) + else: + self._bynweekday.add((wday.weekday, wday.n)) + + if not self._byweekday: + self._byweekday = None + elif not self._bynweekday: + self._bynweekday = None + + if self._byweekday is not None: + self._byweekday = tuple(sorted(self._byweekday)) + orig_byweekday = [weekday(x) for x in self._byweekday] + else: + orig_byweekday = () + + if self._bynweekday is not None: + self._bynweekday = tuple(sorted(self._bynweekday)) + orig_bynweekday = [weekday(*x) for x in self._bynweekday] + else: + orig_bynweekday = () + + if 'byweekday' not in self._original_rule: + self._original_rule['byweekday'] = tuple(itertools.chain( + orig_byweekday, orig_bynweekday)) + + # byhour + if byhour is None: + if freq < HOURLY: + self._byhour = {dtstart.hour} + else: + self._byhour = None + else: + if isinstance(byhour, integer_types): + byhour = (byhour,) + + if freq == HOURLY: + self._byhour = self.__construct_byset(start=dtstart.hour, + byxxx=byhour, + base=24) + else: + self._byhour = set(byhour) + + self._byhour = tuple(sorted(self._byhour)) + self._original_rule['byhour'] = self._byhour + + # byminute + if byminute is None: + if freq < MINUTELY: + self._byminute = {dtstart.minute} + else: + self._byminute = None + else: + if isinstance(byminute, integer_types): + byminute = (byminute,) + + if freq == MINUTELY: + self._byminute = self.__construct_byset(start=dtstart.minute, + byxxx=byminute, + base=60) + else: + self._byminute = set(byminute) + + self._byminute = tuple(sorted(self._byminute)) + self._original_rule['byminute'] = self._byminute + + # bysecond + if bysecond is None: + if freq < SECONDLY: + self._bysecond = ((dtstart.second,)) + else: + self._bysecond = None + else: + if isinstance(bysecond, integer_types): + bysecond = (bysecond,) + + self._bysecond = set(bysecond) + + if freq == SECONDLY: + self._bysecond = self.__construct_byset(start=dtstart.second, + byxxx=bysecond, + base=60) + else: + self._bysecond = set(bysecond) + + self._bysecond = tuple(sorted(self._bysecond)) + self._original_rule['bysecond'] = self._bysecond + + if self._freq >= HOURLY: + self._timeset = None + else: + self._timeset = [] + for hour in self._byhour: + for minute in self._byminute: + for second in self._bysecond: + self._timeset.append( + datetime.time(hour, minute, second, + tzinfo=self._tzinfo)) + self._timeset.sort() + self._timeset = tuple(self._timeset) + + def __str__(self): + """ + Output a string that would generate this RRULE if passed to rrulestr. + This is mostly compatible with RFC5545, except for the + dateutil-specific extension BYEASTER. + """ + + output = [] + h, m, s = [None] * 3 + if self._dtstart: + output.append(self._dtstart.strftime('DTSTART:%Y%m%dT%H%M%S')) + h, m, s = self._dtstart.timetuple()[3:6] + + parts = ['FREQ=' + FREQNAMES[self._freq]] + if self._interval != 1: + parts.append('INTERVAL=' + str(self._interval)) + + if self._wkst: + parts.append('WKST=' + repr(weekday(self._wkst))[0:2]) + + if self._count is not None: + parts.append('COUNT=' + str(self._count)) + + if self._until: + parts.append(self._until.strftime('UNTIL=%Y%m%dT%H%M%S')) + + if self._original_rule.get('byweekday') is not None: + # The str() method on weekday objects doesn't generate + # RFC5545-compliant strings, so we should modify that. + original_rule = dict(self._original_rule) + wday_strings = [] + for wday in original_rule['byweekday']: + if wday.n: + wday_strings.append('{n:+d}{wday}'.format( + n=wday.n, + wday=repr(wday)[0:2])) + else: + wday_strings.append(repr(wday)) + + original_rule['byweekday'] = wday_strings + else: + original_rule = self._original_rule + + partfmt = '{name}={vals}' + for name, key in [('BYSETPOS', 'bysetpos'), + ('BYMONTH', 'bymonth'), + ('BYMONTHDAY', 'bymonthday'), + ('BYYEARDAY', 'byyearday'), + ('BYWEEKNO', 'byweekno'), + ('BYDAY', 'byweekday'), + ('BYHOUR', 'byhour'), + ('BYMINUTE', 'byminute'), + ('BYSECOND', 'bysecond'), + ('BYEASTER', 'byeaster')]: + value = original_rule.get(key) + if value: + parts.append(partfmt.format(name=name, vals=(','.join(str(v) + for v in value)))) + + output.append('RRULE:' + ';'.join(parts)) + return '\n'.join(output) + + def replace(self, **kwargs): + """Return new rrule with same attributes except for those attributes given new + values by whichever keyword arguments are specified.""" + new_kwargs = {"interval": self._interval, + "count": self._count, + "dtstart": self._dtstart, + "freq": self._freq, + "until": self._until, + "wkst": self._wkst, + "cache": False if self._cache is None else True } + new_kwargs.update(self._original_rule) + new_kwargs.update(kwargs) + return rrule(**new_kwargs) + + def _iter(self): + year, month, day, hour, minute, second, weekday, yearday, _ = \ + self._dtstart.timetuple() + + # Some local variables to speed things up a bit + freq = self._freq + interval = self._interval + wkst = self._wkst + until = self._until + bymonth = self._bymonth + byweekno = self._byweekno + byyearday = self._byyearday + byweekday = self._byweekday + byeaster = self._byeaster + bymonthday = self._bymonthday + bynmonthday = self._bynmonthday + bysetpos = self._bysetpos + byhour = self._byhour + byminute = self._byminute + bysecond = self._bysecond + + ii = _iterinfo(self) + ii.rebuild(year, month) + + getdayset = {YEARLY: ii.ydayset, + MONTHLY: ii.mdayset, + WEEKLY: ii.wdayset, + DAILY: ii.ddayset, + HOURLY: ii.ddayset, + MINUTELY: ii.ddayset, + SECONDLY: ii.ddayset}[freq] + + if freq < HOURLY: + timeset = self._timeset + else: + gettimeset = {HOURLY: ii.htimeset, + MINUTELY: ii.mtimeset, + SECONDLY: ii.stimeset}[freq] + if ((freq >= HOURLY and + self._byhour and hour not in self._byhour) or + (freq >= MINUTELY and + self._byminute and minute not in self._byminute) or + (freq >= SECONDLY and + self._bysecond and second not in self._bysecond)): + timeset = () + else: + timeset = gettimeset(hour, minute, second) + + total = 0 + count = self._count + while True: + # Get dayset with the right frequency + dayset, start, end = getdayset(year, month, day) + + # Do the "hard" work ;-) + filtered = False + for i in dayset[start:end]: + if ((bymonth and ii.mmask[i] not in bymonth) or + (byweekno and not ii.wnomask[i]) or + (byweekday and ii.wdaymask[i] not in byweekday) or + (ii.nwdaymask and not ii.nwdaymask[i]) or + (byeaster and not ii.eastermask[i]) or + ((bymonthday or bynmonthday) and + ii.mdaymask[i] not in bymonthday and + ii.nmdaymask[i] not in bynmonthday) or + (byyearday and + ((i < ii.yearlen and i+1 not in byyearday and + -ii.yearlen+i not in byyearday) or + (i >= ii.yearlen and i+1-ii.yearlen not in byyearday and + -ii.nextyearlen+i-ii.yearlen not in byyearday)))): + dayset[i] = None + filtered = True + + # Output results + if bysetpos and timeset: + poslist = [] + for pos in bysetpos: + if pos < 0: + daypos, timepos = divmod(pos, len(timeset)) + else: + daypos, timepos = divmod(pos-1, len(timeset)) + try: + i = [x for x in dayset[start:end] + if x is not None][daypos] + time = timeset[timepos] + except IndexError: + pass + else: + date = datetime.date.fromordinal(ii.yearordinal+i) + res = datetime.datetime.combine(date, time) + if res not in poslist: + poslist.append(res) + poslist.sort() + for res in poslist: + if until and res > until: + self._len = total + return + elif res >= self._dtstart: + if count is not None: + count -= 1 + if count < 0: + self._len = total + return + total += 1 + yield res + else: + for i in dayset[start:end]: + if i is not None: + date = datetime.date.fromordinal(ii.yearordinal + i) + for time in timeset: + res = datetime.datetime.combine(date, time) + if until and res > until: + self._len = total + return + elif res >= self._dtstart: + if count is not None: + count -= 1 + if count < 0: + self._len = total + return + + total += 1 + yield res + + # Handle frequency and interval + fixday = False + if freq == YEARLY: + year += interval + if year > datetime.MAXYEAR: + self._len = total + return + ii.rebuild(year, month) + elif freq == MONTHLY: + month += interval + if month > 12: + div, mod = divmod(month, 12) + month = mod + year += div + if month == 0: + month = 12 + year -= 1 + if year > datetime.MAXYEAR: + self._len = total + return + ii.rebuild(year, month) + elif freq == WEEKLY: + if wkst > weekday: + day += -(weekday+1+(6-wkst))+self._interval*7 + else: + day += -(weekday-wkst)+self._interval*7 + weekday = wkst + fixday = True + elif freq == DAILY: + day += interval + fixday = True + elif freq == HOURLY: + if filtered: + # Jump to one iteration before next day + hour += ((23-hour)//interval)*interval + + if byhour: + ndays, hour = self.__mod_distance(value=hour, + byxxx=self._byhour, + base=24) + else: + ndays, hour = divmod(hour+interval, 24) + + if ndays: + day += ndays + fixday = True + + timeset = gettimeset(hour, minute, second) + elif freq == MINUTELY: + if filtered: + # Jump to one iteration before next day + minute += ((1439-(hour*60+minute))//interval)*interval + + valid = False + rep_rate = (24*60) + for j in range(rep_rate // gcd(interval, rep_rate)): + if byminute: + nhours, minute = \ + self.__mod_distance(value=minute, + byxxx=self._byminute, + base=60) + else: + nhours, minute = divmod(minute+interval, 60) + + div, hour = divmod(hour+nhours, 24) + if div: + day += div + fixday = True + filtered = False + + if not byhour or hour in byhour: + valid = True + break + + if not valid: + raise ValueError('Invalid combination of interval and ' + + 'byhour resulting in empty rule.') + + timeset = gettimeset(hour, minute, second) + elif freq == SECONDLY: + if filtered: + # Jump to one iteration before next day + second += (((86399 - (hour * 3600 + minute * 60 + second)) + // interval) * interval) + + rep_rate = (24 * 3600) + valid = False + for j in range(0, rep_rate // gcd(interval, rep_rate)): + if bysecond: + nminutes, second = \ + self.__mod_distance(value=second, + byxxx=self._bysecond, + base=60) + else: + nminutes, second = divmod(second+interval, 60) + + div, minute = divmod(minute+nminutes, 60) + if div: + hour += div + div, hour = divmod(hour, 24) + if div: + day += div + fixday = True + + if ((not byhour or hour in byhour) and + (not byminute or minute in byminute) and + (not bysecond or second in bysecond)): + valid = True + break + + if not valid: + raise ValueError('Invalid combination of interval, ' + + 'byhour and byminute resulting in empty' + + ' rule.') + + timeset = gettimeset(hour, minute, second) + + if fixday and day > 28: + daysinmonth = calendar.monthrange(year, month)[1] + if day > daysinmonth: + while day > daysinmonth: + day -= daysinmonth + month += 1 + if month == 13: + month = 1 + year += 1 + if year > datetime.MAXYEAR: + self._len = total + return + daysinmonth = calendar.monthrange(year, month)[1] + ii.rebuild(year, month) + + def __construct_byset(self, start, byxxx, base): + """ + If a `BYXXX` sequence is passed to the constructor at the same level as + `FREQ` (e.g. `FREQ=HOURLY,BYHOUR={2,4,7},INTERVAL=3`), there are some + specifications which cannot be reached given some starting conditions. + + This occurs whenever the interval is not coprime with the base of a + given unit and the difference between the starting position and the + ending position is not coprime with the greatest common denominator + between the interval and the base. For example, with a FREQ of hourly + starting at 17:00 and an interval of 4, the only valid values for + BYHOUR would be {21, 1, 5, 9, 13, 17}, because 4 and 24 are not + coprime. + + :param start: + Specifies the starting position. + :param byxxx: + An iterable containing the list of allowed values. + :param base: + The largest allowable value for the specified frequency (e.g. + 24 hours, 60 minutes). + + This does not preserve the type of the iterable, returning a set, since + the values should be unique and the order is irrelevant, this will + speed up later lookups. + + In the event of an empty set, raises a :exception:`ValueError`, as this + results in an empty rrule. + """ + + cset = set() + + # Support a single byxxx value. + if isinstance(byxxx, integer_types): + byxxx = (byxxx, ) + + for num in byxxx: + i_gcd = gcd(self._interval, base) + # Use divmod rather than % because we need to wrap negative nums. + if i_gcd == 1 or divmod(num - start, i_gcd)[1] == 0: + cset.add(num) + + if len(cset) == 0: + raise ValueError("Invalid rrule byxxx generates an empty set.") + + return cset + + def __mod_distance(self, value, byxxx, base): + """ + Calculates the next value in a sequence where the `FREQ` parameter is + specified along with a `BYXXX` parameter at the same "level" + (e.g. `HOURLY` specified with `BYHOUR`). + + :param value: + The old value of the component. + :param byxxx: + The `BYXXX` set, which should have been generated by + `rrule._construct_byset`, or something else which checks that a + valid rule is present. + :param base: + The largest allowable value for the specified frequency (e.g. + 24 hours, 60 minutes). + + If a valid value is not found after `base` iterations (the maximum + number before the sequence would start to repeat), this raises a + :exception:`ValueError`, as no valid values were found. + + This returns a tuple of `divmod(n*interval, base)`, where `n` is the + smallest number of `interval` repetitions until the next specified + value in `byxxx` is found. + """ + accumulator = 0 + for ii in range(1, base + 1): + # Using divmod() over % to account for negative intervals + div, value = divmod(value + self._interval, base) + accumulator += div + if value in byxxx: + return (accumulator, value) + + +class _iterinfo(object): + __slots__ = ["rrule", "lastyear", "lastmonth", + "yearlen", "nextyearlen", "yearordinal", "yearweekday", + "mmask", "mrange", "mdaymask", "nmdaymask", + "wdaymask", "wnomask", "nwdaymask", "eastermask"] + + def __init__(self, rrule): + for attr in self.__slots__: + setattr(self, attr, None) + self.rrule = rrule + + def rebuild(self, year, month): + # Every mask is 7 days longer to handle cross-year weekly periods. + rr = self.rrule + if year != self.lastyear: + self.yearlen = 365 + calendar.isleap(year) + self.nextyearlen = 365 + calendar.isleap(year + 1) + firstyday = datetime.date(year, 1, 1) + self.yearordinal = firstyday.toordinal() + self.yearweekday = firstyday.weekday() + + wday = datetime.date(year, 1, 1).weekday() + if self.yearlen == 365: + self.mmask = M365MASK + self.mdaymask = MDAY365MASK + self.nmdaymask = NMDAY365MASK + self.wdaymask = WDAYMASK[wday:] + self.mrange = M365RANGE + else: + self.mmask = M366MASK + self.mdaymask = MDAY366MASK + self.nmdaymask = NMDAY366MASK + self.wdaymask = WDAYMASK[wday:] + self.mrange = M366RANGE + + if not rr._byweekno: + self.wnomask = None + else: + self.wnomask = [0]*(self.yearlen+7) + # no1wkst = firstwkst = self.wdaymask.index(rr._wkst) + no1wkst = firstwkst = (7-self.yearweekday+rr._wkst) % 7 + if no1wkst >= 4: + no1wkst = 0 + # Number of days in the year, plus the days we got + # from last year. + wyearlen = self.yearlen+(self.yearweekday-rr._wkst) % 7 + else: + # Number of days in the year, minus the days we + # left in last year. + wyearlen = self.yearlen-no1wkst + div, mod = divmod(wyearlen, 7) + numweeks = div+mod//4 + for n in rr._byweekno: + if n < 0: + n += numweeks+1 + if not (0 < n <= numweeks): + continue + if n > 1: + i = no1wkst+(n-1)*7 + if no1wkst != firstwkst: + i -= 7-firstwkst + else: + i = no1wkst + for j in range(7): + self.wnomask[i] = 1 + i += 1 + if self.wdaymask[i] == rr._wkst: + break + if 1 in rr._byweekno: + # Check week number 1 of next year as well + # TODO: Check -numweeks for next year. + i = no1wkst+numweeks*7 + if no1wkst != firstwkst: + i -= 7-firstwkst + if i < self.yearlen: + # If week starts in next year, we + # don't care about it. + for j in range(7): + self.wnomask[i] = 1 + i += 1 + if self.wdaymask[i] == rr._wkst: + break + if no1wkst: + # Check last week number of last year as + # well. If no1wkst is 0, either the year + # started on week start, or week number 1 + # got days from last year, so there are no + # days from last year's last week number in + # this year. + if -1 not in rr._byweekno: + lyearweekday = datetime.date(year-1, 1, 1).weekday() + lno1wkst = (7-lyearweekday+rr._wkst) % 7 + lyearlen = 365+calendar.isleap(year-1) + if lno1wkst >= 4: + lno1wkst = 0 + lnumweeks = 52+(lyearlen + + (lyearweekday-rr._wkst) % 7) % 7//4 + else: + lnumweeks = 52+(self.yearlen-no1wkst) % 7//4 + else: + lnumweeks = -1 + if lnumweeks in rr._byweekno: + for i in range(no1wkst): + self.wnomask[i] = 1 + + if (rr._bynweekday and (month != self.lastmonth or + year != self.lastyear)): + ranges = [] + if rr._freq == YEARLY: + if rr._bymonth: + for month in rr._bymonth: + ranges.append(self.mrange[month-1:month+1]) + else: + ranges = [(0, self.yearlen)] + elif rr._freq == MONTHLY: + ranges = [self.mrange[month-1:month+1]] + if ranges: + # Weekly frequency won't get here, so we may not + # care about cross-year weekly periods. + self.nwdaymask = [0]*self.yearlen + for first, last in ranges: + last -= 1 + for wday, n in rr._bynweekday: + if n < 0: + i = last+(n+1)*7 + i -= (self.wdaymask[i]-wday) % 7 + else: + i = first+(n-1)*7 + i += (7-self.wdaymask[i]+wday) % 7 + if first <= i <= last: + self.nwdaymask[i] = 1 + + if rr._byeaster: + self.eastermask = [0]*(self.yearlen+7) + eyday = easter.easter(year).toordinal()-self.yearordinal + for offset in rr._byeaster: + self.eastermask[eyday+offset] = 1 + + self.lastyear = year + self.lastmonth = month + + def ydayset(self, year, month, day): + return list(range(self.yearlen)), 0, self.yearlen + + def mdayset(self, year, month, day): + dset = [None]*self.yearlen + start, end = self.mrange[month-1:month+1] + for i in range(start, end): + dset[i] = i + return dset, start, end + + def wdayset(self, year, month, day): + # We need to handle cross-year weeks here. + dset = [None]*(self.yearlen+7) + i = datetime.date(year, month, day).toordinal()-self.yearordinal + start = i + for j in range(7): + dset[i] = i + i += 1 + # if (not (0 <= i < self.yearlen) or + # self.wdaymask[i] == self.rrule._wkst): + # This will cross the year boundary, if necessary. + if self.wdaymask[i] == self.rrule._wkst: + break + return dset, start, i + + def ddayset(self, year, month, day): + dset = [None] * self.yearlen + i = datetime.date(year, month, day).toordinal() - self.yearordinal + dset[i] = i + return dset, i, i + 1 + + def htimeset(self, hour, minute, second): + tset = [] + rr = self.rrule + for minute in rr._byminute: + for second in rr._bysecond: + tset.append(datetime.time(hour, minute, second, + tzinfo=rr._tzinfo)) + tset.sort() + return tset + + def mtimeset(self, hour, minute, second): + tset = [] + rr = self.rrule + for second in rr._bysecond: + tset.append(datetime.time(hour, minute, second, tzinfo=rr._tzinfo)) + tset.sort() + return tset + + def stimeset(self, hour, minute, second): + return (datetime.time(hour, minute, second, + tzinfo=self.rrule._tzinfo),) + + +class rruleset(rrulebase): + """ The rruleset type allows more complex recurrence setups, mixing + multiple rules, dates, exclusion rules, and exclusion dates. The type + constructor takes the following keyword arguments: + + :param cache: If True, caching of results will be enabled, improving + performance of multiple queries considerably. """ + + class _genitem(object): + def __init__(self, genlist, gen): + try: + self.dt = advance_iterator(gen) + genlist.append(self) + except StopIteration: + pass + self.genlist = genlist + self.gen = gen + + def __next__(self): + try: + self.dt = advance_iterator(self.gen) + except StopIteration: + if self.genlist[0] is self: + heapq.heappop(self.genlist) + else: + self.genlist.remove(self) + heapq.heapify(self.genlist) + + next = __next__ + + def __lt__(self, other): + return self.dt < other.dt + + def __gt__(self, other): + return self.dt > other.dt + + def __eq__(self, other): + return self.dt == other.dt + + def __ne__(self, other): + return self.dt != other.dt + + def __init__(self, cache=False): + super(rruleset, self).__init__(cache) + self._rrule = [] + self._rdate = [] + self._exrule = [] + self._exdate = [] + + @_invalidates_cache + def rrule(self, rrule): + """ Include the given :py:class:`rrule` instance in the recurrence set + generation. """ + self._rrule.append(rrule) + + @_invalidates_cache + def rdate(self, rdate): + """ Include the given :py:class:`datetime` instance in the recurrence + set generation. """ + self._rdate.append(rdate) + + @_invalidates_cache + def exrule(self, exrule): + """ Include the given rrule instance in the recurrence set exclusion + list. Dates which are part of the given recurrence rules will not + be generated, even if some inclusive rrule or rdate matches them. + """ + self._exrule.append(exrule) + + @_invalidates_cache + def exdate(self, exdate): + """ Include the given datetime instance in the recurrence set + exclusion list. Dates included that way will not be generated, + even if some inclusive rrule or rdate matches them. """ + self._exdate.append(exdate) + + def _iter(self): + rlist = [] + self._rdate.sort() + self._genitem(rlist, iter(self._rdate)) + for gen in [iter(x) for x in self._rrule]: + self._genitem(rlist, gen) + exlist = [] + self._exdate.sort() + self._genitem(exlist, iter(self._exdate)) + for gen in [iter(x) for x in self._exrule]: + self._genitem(exlist, gen) + lastdt = None + total = 0 + heapq.heapify(rlist) + heapq.heapify(exlist) + while rlist: + ritem = rlist[0] + if not lastdt or lastdt != ritem.dt: + while exlist and exlist[0] < ritem: + exitem = exlist[0] + advance_iterator(exitem) + if exlist and exlist[0] is exitem: + heapq.heapreplace(exlist, exitem) + if not exlist or ritem != exlist[0]: + total += 1 + yield ritem.dt + lastdt = ritem.dt + advance_iterator(ritem) + if rlist and rlist[0] is ritem: + heapq.heapreplace(rlist, ritem) + self._len = total + + + + +class _rrulestr(object): + """ Parses a string representation of a recurrence rule or set of + recurrence rules. + + :param s: + Required, a string defining one or more recurrence rules. + + :param dtstart: + If given, used as the default recurrence start if not specified in the + rule string. + + :param cache: + If set ``True`` caching of results will be enabled, improving + performance of multiple queries considerably. + + :param unfold: + If set ``True`` indicates that a rule string is split over more + than one line and should be joined before processing. + + :param forceset: + If set ``True`` forces a :class:`dateutil.rrule.rruleset` to + be returned. + + :param compatible: + If set ``True`` forces ``unfold`` and ``forceset`` to be ``True``. + + :param ignoretz: + If set ``True``, time zones in parsed strings are ignored and a naive + :class:`datetime.datetime` object is returned. + + :param tzids: + If given, a callable or mapping used to retrieve a + :class:`datetime.tzinfo` from a string representation. + Defaults to :func:`dateutil.tz.gettz`. + + :param tzinfos: + Additional time zone names / aliases which may be present in a string + representation. See :func:`dateutil.parser.parse` for more + information. + + :return: + Returns a :class:`dateutil.rrule.rruleset` or + :class:`dateutil.rrule.rrule` + """ + + _freq_map = {"YEARLY": YEARLY, + "MONTHLY": MONTHLY, + "WEEKLY": WEEKLY, + "DAILY": DAILY, + "HOURLY": HOURLY, + "MINUTELY": MINUTELY, + "SECONDLY": SECONDLY} + + _weekday_map = {"MO": 0, "TU": 1, "WE": 2, "TH": 3, + "FR": 4, "SA": 5, "SU": 6} + + def _handle_int(self, rrkwargs, name, value, **kwargs): + rrkwargs[name.lower()] = int(value) + + def _handle_int_list(self, rrkwargs, name, value, **kwargs): + rrkwargs[name.lower()] = [int(x) for x in value.split(',')] + + _handle_INTERVAL = _handle_int + _handle_COUNT = _handle_int + _handle_BYSETPOS = _handle_int_list + _handle_BYMONTH = _handle_int_list + _handle_BYMONTHDAY = _handle_int_list + _handle_BYYEARDAY = _handle_int_list + _handle_BYEASTER = _handle_int_list + _handle_BYWEEKNO = _handle_int_list + _handle_BYHOUR = _handle_int_list + _handle_BYMINUTE = _handle_int_list + _handle_BYSECOND = _handle_int_list + + def _handle_FREQ(self, rrkwargs, name, value, **kwargs): + rrkwargs["freq"] = self._freq_map[value] + + def _handle_UNTIL(self, rrkwargs, name, value, **kwargs): + global parser + if not parser: + from dateutil import parser + try: + rrkwargs["until"] = parser.parse(value, + ignoretz=kwargs.get("ignoretz"), + tzinfos=kwargs.get("tzinfos")) + except ValueError: + raise ValueError("invalid until date") + + def _handle_WKST(self, rrkwargs, name, value, **kwargs): + rrkwargs["wkst"] = self._weekday_map[value] + + def _handle_BYWEEKDAY(self, rrkwargs, name, value, **kwargs): + """ + Two ways to specify this: +1MO or MO(+1) + """ + l = [] + for wday in value.split(','): + if '(' in wday: + # If it's of the form TH(+1), etc. + splt = wday.split('(') + w = splt[0] + n = int(splt[1][:-1]) + elif len(wday): + # If it's of the form +1MO + for i in range(len(wday)): + if wday[i] not in '+-0123456789': + break + n = wday[:i] or None + w = wday[i:] + if n: + n = int(n) + else: + raise ValueError("Invalid (empty) BYDAY specification.") + + l.append(weekdays[self._weekday_map[w]](n)) + rrkwargs["byweekday"] = l + + _handle_BYDAY = _handle_BYWEEKDAY + + def _parse_rfc_rrule(self, line, + dtstart=None, + cache=False, + ignoretz=False, + tzinfos=None): + if line.find(':') != -1: + name, value = line.split(':') + if name != "RRULE": + raise ValueError("unknown parameter name") + else: + value = line + rrkwargs = {} + for pair in value.split(';'): + name, value = pair.split('=') + name = name.upper() + value = value.upper() + try: + getattr(self, "_handle_"+name)(rrkwargs, name, value, + ignoretz=ignoretz, + tzinfos=tzinfos) + except AttributeError: + raise ValueError("unknown parameter '%s'" % name) + except (KeyError, ValueError): + raise ValueError("invalid '%s': %s" % (name, value)) + return rrule(dtstart=dtstart, cache=cache, **rrkwargs) + + def _parse_date_value(self, date_value, parms, rule_tzids, + ignoretz, tzids, tzinfos): + global parser + if not parser: + from dateutil import parser + + datevals = [] + value_found = False + TZID = None + + for parm in parms: + if parm.startswith("TZID="): + try: + tzkey = rule_tzids[parm.split('TZID=')[-1]] + except KeyError: + continue + if tzids is None: + from . import tz + tzlookup = tz.gettz + elif callable(tzids): + tzlookup = tzids + else: + tzlookup = getattr(tzids, 'get', None) + if tzlookup is None: + msg = ('tzids must be a callable, mapping, or None, ' + 'not %s' % tzids) + raise ValueError(msg) + + TZID = tzlookup(tzkey) + continue + + # RFC 5445 3.8.2.4: The VALUE parameter is optional, but may be found + # only once. + if parm not in {"VALUE=DATE-TIME", "VALUE=DATE"}: + raise ValueError("unsupported parm: " + parm) + else: + if value_found: + msg = ("Duplicate value parameter found in: " + parm) + raise ValueError(msg) + value_found = True + + for datestr in date_value.split(','): + date = parser.parse(datestr, ignoretz=ignoretz, tzinfos=tzinfos) + if TZID is not None: + if date.tzinfo is None: + date = date.replace(tzinfo=TZID) + else: + raise ValueError('DTSTART/EXDATE specifies multiple timezone') + datevals.append(date) + + return datevals + + def _parse_rfc(self, s, + dtstart=None, + cache=False, + unfold=False, + forceset=False, + compatible=False, + ignoretz=False, + tzids=None, + tzinfos=None): + global parser + if compatible: + forceset = True + unfold = True + + TZID_NAMES = dict(map( + lambda x: (x.upper(), x), + re.findall('TZID=(?P<name>[^:]+):', s) + )) + s = s.upper() + if not s.strip(): + raise ValueError("empty string") + if unfold: + lines = s.splitlines() + i = 0 + while i < len(lines): + line = lines[i].rstrip() + if not line: + del lines[i] + elif i > 0 and line[0] == " ": + lines[i-1] += line[1:] + del lines[i] + else: + i += 1 + else: + lines = s.split() + if (not forceset and len(lines) == 1 and (s.find(':') == -1 or + s.startswith('RRULE:'))): + return self._parse_rfc_rrule(lines[0], cache=cache, + dtstart=dtstart, ignoretz=ignoretz, + tzinfos=tzinfos) + else: + rrulevals = [] + rdatevals = [] + exrulevals = [] + exdatevals = [] + for line in lines: + if not line: + continue + if line.find(':') == -1: + name = "RRULE" + value = line + else: + name, value = line.split(':', 1) + parms = name.split(';') + if not parms: + raise ValueError("empty property name") + name = parms[0] + parms = parms[1:] + if name == "RRULE": + for parm in parms: + raise ValueError("unsupported RRULE parm: "+parm) + rrulevals.append(value) + elif name == "RDATE": + for parm in parms: + if parm != "VALUE=DATE-TIME": + raise ValueError("unsupported RDATE parm: "+parm) + rdatevals.append(value) + elif name == "EXRULE": + for parm in parms: + raise ValueError("unsupported EXRULE parm: "+parm) + exrulevals.append(value) + elif name == "EXDATE": + exdatevals.extend( + self._parse_date_value(value, parms, + TZID_NAMES, ignoretz, + tzids, tzinfos) + ) + elif name == "DTSTART": + dtvals = self._parse_date_value(value, parms, TZID_NAMES, + ignoretz, tzids, tzinfos) + if len(dtvals) != 1: + raise ValueError("Multiple DTSTART values specified:" + + value) + dtstart = dtvals[0] + else: + raise ValueError("unsupported property: "+name) + if (forceset or len(rrulevals) > 1 or rdatevals + or exrulevals or exdatevals): + if not parser and (rdatevals or exdatevals): + from dateutil import parser + rset = rruleset(cache=cache) + for value in rrulevals: + rset.rrule(self._parse_rfc_rrule(value, dtstart=dtstart, + ignoretz=ignoretz, + tzinfos=tzinfos)) + for value in rdatevals: + for datestr in value.split(','): + rset.rdate(parser.parse(datestr, + ignoretz=ignoretz, + tzinfos=tzinfos)) + for value in exrulevals: + rset.exrule(self._parse_rfc_rrule(value, dtstart=dtstart, + ignoretz=ignoretz, + tzinfos=tzinfos)) + for value in exdatevals: + rset.exdate(value) + if compatible and dtstart: + rset.rdate(dtstart) + return rset + else: + return self._parse_rfc_rrule(rrulevals[0], + dtstart=dtstart, + cache=cache, + ignoretz=ignoretz, + tzinfos=tzinfos) + + def __call__(self, s, **kwargs): + return self._parse_rfc(s, **kwargs) + + +rrulestr = _rrulestr() + +# vim:ts=4:sw=4:et diff --git a/venv/Lib/site-packages/dateutil/tz/__init__.py b/venv/Lib/site-packages/dateutil/tz/__init__.py new file mode 100644 index 00000000..af1352c4 --- /dev/null +++ b/venv/Lib/site-packages/dateutil/tz/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +from .tz import * +from .tz import __doc__ + +__all__ = ["tzutc", "tzoffset", "tzlocal", "tzfile", "tzrange", + "tzstr", "tzical", "tzwin", "tzwinlocal", "gettz", + "enfold", "datetime_ambiguous", "datetime_exists", + "resolve_imaginary", "UTC", "DeprecatedTzFormatWarning"] + + +class DeprecatedTzFormatWarning(Warning): + """Warning raised when time zones are parsed from deprecated formats.""" diff --git a/venv/Lib/site-packages/dateutil/tz/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/dateutil/tz/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..b41ac0b1 Binary files /dev/null and b/venv/Lib/site-packages/dateutil/tz/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/dateutil/tz/__pycache__/_common.cpython-36.pyc b/venv/Lib/site-packages/dateutil/tz/__pycache__/_common.cpython-36.pyc new file mode 100644 index 00000000..46355b1a Binary files /dev/null and b/venv/Lib/site-packages/dateutil/tz/__pycache__/_common.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/dateutil/tz/__pycache__/_factories.cpython-36.pyc b/venv/Lib/site-packages/dateutil/tz/__pycache__/_factories.cpython-36.pyc new file mode 100644 index 00000000..29bc34d4 Binary files /dev/null and b/venv/Lib/site-packages/dateutil/tz/__pycache__/_factories.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/dateutil/tz/__pycache__/tz.cpython-36.pyc b/venv/Lib/site-packages/dateutil/tz/__pycache__/tz.cpython-36.pyc new file mode 100644 index 00000000..ebc661e6 Binary files /dev/null and b/venv/Lib/site-packages/dateutil/tz/__pycache__/tz.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/dateutil/tz/__pycache__/win.cpython-36.pyc b/venv/Lib/site-packages/dateutil/tz/__pycache__/win.cpython-36.pyc new file mode 100644 index 00000000..4f352c3c Binary files /dev/null and b/venv/Lib/site-packages/dateutil/tz/__pycache__/win.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/dateutil/tz/_common.py b/venv/Lib/site-packages/dateutil/tz/_common.py new file mode 100644 index 00000000..e6ac1183 --- /dev/null +++ b/venv/Lib/site-packages/dateutil/tz/_common.py @@ -0,0 +1,419 @@ +from six import PY2 + +from functools import wraps + +from datetime import datetime, timedelta, tzinfo + + +ZERO = timedelta(0) + +__all__ = ['tzname_in_python2', 'enfold'] + + +def tzname_in_python2(namefunc): + """Change unicode output into bytestrings in Python 2 + + tzname() API changed in Python 3. It used to return bytes, but was changed + to unicode strings + """ + if PY2: + @wraps(namefunc) + def adjust_encoding(*args, **kwargs): + name = namefunc(*args, **kwargs) + if name is not None: + name = name.encode() + + return name + + return adjust_encoding + else: + return namefunc + + +# The following is adapted from Alexander Belopolsky's tz library +# https://github.com/abalkin/tz +if hasattr(datetime, 'fold'): + # This is the pre-python 3.6 fold situation + def enfold(dt, fold=1): + """ + Provides a unified interface for assigning the ``fold`` attribute to + datetimes both before and after the implementation of PEP-495. + + :param fold: + The value for the ``fold`` attribute in the returned datetime. This + should be either 0 or 1. + + :return: + Returns an object for which ``getattr(dt, 'fold', 0)`` returns + ``fold`` for all versions of Python. In versions prior to + Python 3.6, this is a ``_DatetimeWithFold`` object, which is a + subclass of :py:class:`datetime.datetime` with the ``fold`` + attribute added, if ``fold`` is 1. + + .. versionadded:: 2.6.0 + """ + return dt.replace(fold=fold) + +else: + class _DatetimeWithFold(datetime): + """ + This is a class designed to provide a PEP 495-compliant interface for + Python versions before 3.6. It is used only for dates in a fold, so + the ``fold`` attribute is fixed at ``1``. + + .. versionadded:: 2.6.0 + """ + __slots__ = () + + def replace(self, *args, **kwargs): + """ + Return a datetime with the same attributes, except for those + attributes given new values by whichever keyword arguments are + specified. Note that tzinfo=None can be specified to create a naive + datetime from an aware datetime with no conversion of date and time + data. + + This is reimplemented in ``_DatetimeWithFold`` because pypy3 will + return a ``datetime.datetime`` even if ``fold`` is unchanged. + """ + argnames = ( + 'year', 'month', 'day', 'hour', 'minute', 'second', + 'microsecond', 'tzinfo' + ) + + for arg, argname in zip(args, argnames): + if argname in kwargs: + raise TypeError('Duplicate argument: {}'.format(argname)) + + kwargs[argname] = arg + + for argname in argnames: + if argname not in kwargs: + kwargs[argname] = getattr(self, argname) + + dt_class = self.__class__ if kwargs.get('fold', 1) else datetime + + return dt_class(**kwargs) + + @property + def fold(self): + return 1 + + def enfold(dt, fold=1): + """ + Provides a unified interface for assigning the ``fold`` attribute to + datetimes both before and after the implementation of PEP-495. + + :param fold: + The value for the ``fold`` attribute in the returned datetime. This + should be either 0 or 1. + + :return: + Returns an object for which ``getattr(dt, 'fold', 0)`` returns + ``fold`` for all versions of Python. In versions prior to + Python 3.6, this is a ``_DatetimeWithFold`` object, which is a + subclass of :py:class:`datetime.datetime` with the ``fold`` + attribute added, if ``fold`` is 1. + + .. versionadded:: 2.6.0 + """ + if getattr(dt, 'fold', 0) == fold: + return dt + + args = dt.timetuple()[:6] + args += (dt.microsecond, dt.tzinfo) + + if fold: + return _DatetimeWithFold(*args) + else: + return datetime(*args) + + +def _validate_fromutc_inputs(f): + """ + The CPython version of ``fromutc`` checks that the input is a ``datetime`` + object and that ``self`` is attached as its ``tzinfo``. + """ + @wraps(f) + def fromutc(self, dt): + if not isinstance(dt, datetime): + raise TypeError("fromutc() requires a datetime argument") + if dt.tzinfo is not self: + raise ValueError("dt.tzinfo is not self") + + return f(self, dt) + + return fromutc + + +class _tzinfo(tzinfo): + """ + Base class for all ``dateutil`` ``tzinfo`` objects. + """ + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + + dt = dt.replace(tzinfo=self) + + wall_0 = enfold(dt, fold=0) + wall_1 = enfold(dt, fold=1) + + same_offset = wall_0.utcoffset() == wall_1.utcoffset() + same_dt = wall_0.replace(tzinfo=None) == wall_1.replace(tzinfo=None) + + return same_dt and not same_offset + + def _fold_status(self, dt_utc, dt_wall): + """ + Determine the fold status of a "wall" datetime, given a representation + of the same datetime as a (naive) UTC datetime. This is calculated based + on the assumption that ``dt.utcoffset() - dt.dst()`` is constant for all + datetimes, and that this offset is the actual number of hours separating + ``dt_utc`` and ``dt_wall``. + + :param dt_utc: + Representation of the datetime as UTC + + :param dt_wall: + Representation of the datetime as "wall time". This parameter must + either have a `fold` attribute or have a fold-naive + :class:`datetime.tzinfo` attached, otherwise the calculation may + fail. + """ + if self.is_ambiguous(dt_wall): + delta_wall = dt_wall - dt_utc + _fold = int(delta_wall == (dt_utc.utcoffset() - dt_utc.dst())) + else: + _fold = 0 + + return _fold + + def _fold(self, dt): + return getattr(dt, 'fold', 0) + + def _fromutc(self, dt): + """ + Given a timezone-aware datetime in a given timezone, calculates a + timezone-aware datetime in a new timezone. + + Since this is the one time that we *know* we have an unambiguous + datetime object, we take this opportunity to determine whether the + datetime is ambiguous and in a "fold" state (e.g. if it's the first + occurrence, chronologically, of the ambiguous datetime). + + :param dt: + A timezone-aware :class:`datetime.datetime` object. + """ + + # Re-implement the algorithm from Python's datetime.py + dtoff = dt.utcoffset() + if dtoff is None: + raise ValueError("fromutc() requires a non-None utcoffset() " + "result") + + # The original datetime.py code assumes that `dst()` defaults to + # zero during ambiguous times. PEP 495 inverts this presumption, so + # for pre-PEP 495 versions of python, we need to tweak the algorithm. + dtdst = dt.dst() + if dtdst is None: + raise ValueError("fromutc() requires a non-None dst() result") + delta = dtoff - dtdst + + dt += delta + # Set fold=1 so we can default to being in the fold for + # ambiguous dates. + dtdst = enfold(dt, fold=1).dst() + if dtdst is None: + raise ValueError("fromutc(): dt.dst gave inconsistent " + "results; cannot convert") + return dt + dtdst + + @_validate_fromutc_inputs + def fromutc(self, dt): + """ + Given a timezone-aware datetime in a given timezone, calculates a + timezone-aware datetime in a new timezone. + + Since this is the one time that we *know* we have an unambiguous + datetime object, we take this opportunity to determine whether the + datetime is ambiguous and in a "fold" state (e.g. if it's the first + occurrence, chronologically, of the ambiguous datetime). + + :param dt: + A timezone-aware :class:`datetime.datetime` object. + """ + dt_wall = self._fromutc(dt) + + # Calculate the fold status given the two datetimes. + _fold = self._fold_status(dt, dt_wall) + + # Set the default fold value for ambiguous dates + return enfold(dt_wall, fold=_fold) + + +class tzrangebase(_tzinfo): + """ + This is an abstract base class for time zones represented by an annual + transition into and out of DST. Child classes should implement the following + methods: + + * ``__init__(self, *args, **kwargs)`` + * ``transitions(self, year)`` - this is expected to return a tuple of + datetimes representing the DST on and off transitions in standard + time. + + A fully initialized ``tzrangebase`` subclass should also provide the + following attributes: + * ``hasdst``: Boolean whether or not the zone uses DST. + * ``_dst_offset`` / ``_std_offset``: :class:`datetime.timedelta` objects + representing the respective UTC offsets. + * ``_dst_abbr`` / ``_std_abbr``: Strings representing the timezone short + abbreviations in DST and STD, respectively. + * ``_hasdst``: Whether or not the zone has DST. + + .. versionadded:: 2.6.0 + """ + def __init__(self): + raise NotImplementedError('tzrangebase is an abstract base class') + + def utcoffset(self, dt): + isdst = self._isdst(dt) + + if isdst is None: + return None + elif isdst: + return self._dst_offset + else: + return self._std_offset + + def dst(self, dt): + isdst = self._isdst(dt) + + if isdst is None: + return None + elif isdst: + return self._dst_base_offset + else: + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + if self._isdst(dt): + return self._dst_abbr + else: + return self._std_abbr + + def fromutc(self, dt): + """ Given a datetime in UTC, return local time """ + if not isinstance(dt, datetime): + raise TypeError("fromutc() requires a datetime argument") + + if dt.tzinfo is not self: + raise ValueError("dt.tzinfo is not self") + + # Get transitions - if there are none, fixed offset + transitions = self.transitions(dt.year) + if transitions is None: + return dt + self.utcoffset(dt) + + # Get the transition times in UTC + dston, dstoff = transitions + + dston -= self._std_offset + dstoff -= self._std_offset + + utc_transitions = (dston, dstoff) + dt_utc = dt.replace(tzinfo=None) + + isdst = self._naive_isdst(dt_utc, utc_transitions) + + if isdst: + dt_wall = dt + self._dst_offset + else: + dt_wall = dt + self._std_offset + + _fold = int(not isdst and self.is_ambiguous(dt_wall)) + + return enfold(dt_wall, fold=_fold) + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + if not self.hasdst: + return False + + start, end = self.transitions(dt.year) + + dt = dt.replace(tzinfo=None) + return (end <= dt < end + self._dst_base_offset) + + def _isdst(self, dt): + if not self.hasdst: + return False + elif dt is None: + return None + + transitions = self.transitions(dt.year) + + if transitions is None: + return False + + dt = dt.replace(tzinfo=None) + + isdst = self._naive_isdst(dt, transitions) + + # Handle ambiguous dates + if not isdst and self.is_ambiguous(dt): + return not self._fold(dt) + else: + return isdst + + def _naive_isdst(self, dt, transitions): + dston, dstoff = transitions + + dt = dt.replace(tzinfo=None) + + if dston < dstoff: + isdst = dston <= dt < dstoff + else: + isdst = not dstoff <= dt < dston + + return isdst + + @property + def _dst_base_offset(self): + return self._dst_offset - self._std_offset + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s(...)" % self.__class__.__name__ + + __reduce__ = object.__reduce__ diff --git a/venv/Lib/site-packages/dateutil/tz/_factories.py b/venv/Lib/site-packages/dateutil/tz/_factories.py new file mode 100644 index 00000000..f8a65891 --- /dev/null +++ b/venv/Lib/site-packages/dateutil/tz/_factories.py @@ -0,0 +1,80 @@ +from datetime import timedelta +import weakref +from collections import OrderedDict + +from six.moves import _thread + + +class _TzSingleton(type): + def __init__(cls, *args, **kwargs): + cls.__instance = None + super(_TzSingleton, cls).__init__(*args, **kwargs) + + def __call__(cls): + if cls.__instance is None: + cls.__instance = super(_TzSingleton, cls).__call__() + return cls.__instance + + +class _TzFactory(type): + def instance(cls, *args, **kwargs): + """Alternate constructor that returns a fresh instance""" + return type.__call__(cls, *args, **kwargs) + + +class _TzOffsetFactory(_TzFactory): + def __init__(cls, *args, **kwargs): + cls.__instances = weakref.WeakValueDictionary() + cls.__strong_cache = OrderedDict() + cls.__strong_cache_size = 8 + + cls._cache_lock = _thread.allocate_lock() + + def __call__(cls, name, offset): + if isinstance(offset, timedelta): + key = (name, offset.total_seconds()) + else: + key = (name, offset) + + instance = cls.__instances.get(key, None) + if instance is None: + instance = cls.__instances.setdefault(key, + cls.instance(name, offset)) + + # This lock may not be necessary in Python 3. See GH issue #901 + with cls._cache_lock: + cls.__strong_cache[key] = cls.__strong_cache.pop(key, instance) + + # Remove an item if the strong cache is overpopulated + if len(cls.__strong_cache) > cls.__strong_cache_size: + cls.__strong_cache.popitem(last=False) + + return instance + + +class _TzStrFactory(_TzFactory): + def __init__(cls, *args, **kwargs): + cls.__instances = weakref.WeakValueDictionary() + cls.__strong_cache = OrderedDict() + cls.__strong_cache_size = 8 + + cls.__cache_lock = _thread.allocate_lock() + + def __call__(cls, s, posix_offset=False): + key = (s, posix_offset) + instance = cls.__instances.get(key, None) + + if instance is None: + instance = cls.__instances.setdefault(key, + cls.instance(s, posix_offset)) + + # This lock may not be necessary in Python 3. See GH issue #901 + with cls.__cache_lock: + cls.__strong_cache[key] = cls.__strong_cache.pop(key, instance) + + # Remove an item if the strong cache is overpopulated + if len(cls.__strong_cache) > cls.__strong_cache_size: + cls.__strong_cache.popitem(last=False) + + return instance + diff --git a/venv/Lib/site-packages/dateutil/tz/tz.py b/venv/Lib/site-packages/dateutil/tz/tz.py new file mode 100644 index 00000000..af81e88e --- /dev/null +++ b/venv/Lib/site-packages/dateutil/tz/tz.py @@ -0,0 +1,1849 @@ +# -*- coding: utf-8 -*- +""" +This module offers timezone implementations subclassing the abstract +:py:class:`datetime.tzinfo` type. There are classes to handle tzfile format +files (usually are in :file:`/etc/localtime`, :file:`/usr/share/zoneinfo`, +etc), TZ environment string (in all known formats), given ranges (with help +from relative deltas), local machine timezone, fixed offset timezone, and UTC +timezone. +""" +import datetime +import struct +import time +import sys +import os +import bisect +import weakref +from collections import OrderedDict + +import six +from six import string_types +from six.moves import _thread +from ._common import tzname_in_python2, _tzinfo +from ._common import tzrangebase, enfold +from ._common import _validate_fromutc_inputs + +from ._factories import _TzSingleton, _TzOffsetFactory +from ._factories import _TzStrFactory +try: + from .win import tzwin, tzwinlocal +except ImportError: + tzwin = tzwinlocal = None + +# For warning about rounding tzinfo +from warnings import warn + +ZERO = datetime.timedelta(0) +EPOCH = datetime.datetime.utcfromtimestamp(0) +EPOCHORDINAL = EPOCH.toordinal() + + +@six.add_metaclass(_TzSingleton) +class tzutc(datetime.tzinfo): + """ + This is a tzinfo object that represents the UTC time zone. + + **Examples:** + + .. doctest:: + + >>> from datetime import * + >>> from dateutil.tz import * + + >>> datetime.now() + datetime.datetime(2003, 9, 27, 9, 40, 1, 521290) + + >>> datetime.now(tzutc()) + datetime.datetime(2003, 9, 27, 12, 40, 12, 156379, tzinfo=tzutc()) + + >>> datetime.now(tzutc()).tzname() + 'UTC' + + .. versionchanged:: 2.7.0 + ``tzutc()`` is now a singleton, so the result of ``tzutc()`` will + always return the same object. + + .. doctest:: + + >>> from dateutil.tz import tzutc, UTC + >>> tzutc() is tzutc() + True + >>> tzutc() is UTC + True + """ + def utcoffset(self, dt): + return ZERO + + def dst(self, dt): + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + return "UTC" + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + return False + + @_validate_fromutc_inputs + def fromutc(self, dt): + """ + Fast track version of fromutc() returns the original ``dt`` object for + any valid :py:class:`datetime.datetime` object. + """ + return dt + + def __eq__(self, other): + if not isinstance(other, (tzutc, tzoffset)): + return NotImplemented + + return (isinstance(other, tzutc) or + (isinstance(other, tzoffset) and other._offset == ZERO)) + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s()" % self.__class__.__name__ + + __reduce__ = object.__reduce__ + + +#: Convenience constant providing a :class:`tzutc()` instance +#: +#: .. versionadded:: 2.7.0 +UTC = tzutc() + + +@six.add_metaclass(_TzOffsetFactory) +class tzoffset(datetime.tzinfo): + """ + A simple class for representing a fixed offset from UTC. + + :param name: + The timezone name, to be returned when ``tzname()`` is called. + :param offset: + The time zone offset in seconds, or (since version 2.6.0, represented + as a :py:class:`datetime.timedelta` object). + """ + def __init__(self, name, offset): + self._name = name + + try: + # Allow a timedelta + offset = offset.total_seconds() + except (TypeError, AttributeError): + pass + + self._offset = datetime.timedelta(seconds=_get_supported_offset(offset)) + + def utcoffset(self, dt): + return self._offset + + def dst(self, dt): + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + return self._name + + @_validate_fromutc_inputs + def fromutc(self, dt): + return dt + self._offset + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + return False + + def __eq__(self, other): + if not isinstance(other, tzoffset): + return NotImplemented + + return self._offset == other._offset + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s(%s, %s)" % (self.__class__.__name__, + repr(self._name), + int(self._offset.total_seconds())) + + __reduce__ = object.__reduce__ + + +class tzlocal(_tzinfo): + """ + A :class:`tzinfo` subclass built around the ``time`` timezone functions. + """ + def __init__(self): + super(tzlocal, self).__init__() + + self._std_offset = datetime.timedelta(seconds=-time.timezone) + if time.daylight: + self._dst_offset = datetime.timedelta(seconds=-time.altzone) + else: + self._dst_offset = self._std_offset + + self._dst_saved = self._dst_offset - self._std_offset + self._hasdst = bool(self._dst_saved) + self._tznames = tuple(time.tzname) + + def utcoffset(self, dt): + if dt is None and self._hasdst: + return None + + if self._isdst(dt): + return self._dst_offset + else: + return self._std_offset + + def dst(self, dt): + if dt is None and self._hasdst: + return None + + if self._isdst(dt): + return self._dst_offset - self._std_offset + else: + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + return self._tznames[self._isdst(dt)] + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + naive_dst = self._naive_is_dst(dt) + return (not naive_dst and + (naive_dst != self._naive_is_dst(dt - self._dst_saved))) + + def _naive_is_dst(self, dt): + timestamp = _datetime_to_timestamp(dt) + return time.localtime(timestamp + time.timezone).tm_isdst + + def _isdst(self, dt, fold_naive=True): + # We can't use mktime here. It is unstable when deciding if + # the hour near to a change is DST or not. + # + # timestamp = time.mktime((dt.year, dt.month, dt.day, dt.hour, + # dt.minute, dt.second, dt.weekday(), 0, -1)) + # return time.localtime(timestamp).tm_isdst + # + # The code above yields the following result: + # + # >>> import tz, datetime + # >>> t = tz.tzlocal() + # >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() + # 'BRDT' + # >>> datetime.datetime(2003,2,16,0,tzinfo=t).tzname() + # 'BRST' + # >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() + # 'BRST' + # >>> datetime.datetime(2003,2,15,22,tzinfo=t).tzname() + # 'BRDT' + # >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() + # 'BRDT' + # + # Here is a more stable implementation: + # + if not self._hasdst: + return False + + # Check for ambiguous times: + dstval = self._naive_is_dst(dt) + fold = getattr(dt, 'fold', None) + + if self.is_ambiguous(dt): + if fold is not None: + return not self._fold(dt) + else: + return True + + return dstval + + def __eq__(self, other): + if isinstance(other, tzlocal): + return (self._std_offset == other._std_offset and + self._dst_offset == other._dst_offset) + elif isinstance(other, tzutc): + return (not self._hasdst and + self._tznames[0] in {'UTC', 'GMT'} and + self._std_offset == ZERO) + elif isinstance(other, tzoffset): + return (not self._hasdst and + self._tznames[0] == other._name and + self._std_offset == other._offset) + else: + return NotImplemented + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s()" % self.__class__.__name__ + + __reduce__ = object.__reduce__ + + +class _ttinfo(object): + __slots__ = ["offset", "delta", "isdst", "abbr", + "isstd", "isgmt", "dstoffset"] + + def __init__(self): + for attr in self.__slots__: + setattr(self, attr, None) + + def __repr__(self): + l = [] + for attr in self.__slots__: + value = getattr(self, attr) + if value is not None: + l.append("%s=%s" % (attr, repr(value))) + return "%s(%s)" % (self.__class__.__name__, ", ".join(l)) + + def __eq__(self, other): + if not isinstance(other, _ttinfo): + return NotImplemented + + return (self.offset == other.offset and + self.delta == other.delta and + self.isdst == other.isdst and + self.abbr == other.abbr and + self.isstd == other.isstd and + self.isgmt == other.isgmt and + self.dstoffset == other.dstoffset) + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __getstate__(self): + state = {} + for name in self.__slots__: + state[name] = getattr(self, name, None) + return state + + def __setstate__(self, state): + for name in self.__slots__: + if name in state: + setattr(self, name, state[name]) + + +class _tzfile(object): + """ + Lightweight class for holding the relevant transition and time zone + information read from binary tzfiles. + """ + attrs = ['trans_list', 'trans_list_utc', 'trans_idx', 'ttinfo_list', + 'ttinfo_std', 'ttinfo_dst', 'ttinfo_before', 'ttinfo_first'] + + def __init__(self, **kwargs): + for attr in self.attrs: + setattr(self, attr, kwargs.get(attr, None)) + + +class tzfile(_tzinfo): + """ + This is a ``tzinfo`` subclass that allows one to use the ``tzfile(5)`` + format timezone files to extract current and historical zone information. + + :param fileobj: + This can be an opened file stream or a file name that the time zone + information can be read from. + + :param filename: + This is an optional parameter specifying the source of the time zone + information in the event that ``fileobj`` is a file object. If omitted + and ``fileobj`` is a file stream, this parameter will be set either to + ``fileobj``'s ``name`` attribute or to ``repr(fileobj)``. + + See `Sources for Time Zone and Daylight Saving Time Data + <https://data.iana.org/time-zones/tz-link.html>`_ for more information. + Time zone files can be compiled from the `IANA Time Zone database files + <https://www.iana.org/time-zones>`_ with the `zic time zone compiler + <https://www.freebsd.org/cgi/man.cgi?query=zic&sektion=8>`_ + + .. note:: + + Only construct a ``tzfile`` directly if you have a specific timezone + file on disk that you want to read into a Python ``tzinfo`` object. + If you want to get a ``tzfile`` representing a specific IANA zone, + (e.g. ``'America/New_York'``), you should call + :func:`dateutil.tz.gettz` with the zone identifier. + + + **Examples:** + + Using the US Eastern time zone as an example, we can see that a ``tzfile`` + provides time zone information for the standard Daylight Saving offsets: + + .. testsetup:: tzfile + + from dateutil.tz import gettz + from datetime import datetime + + .. doctest:: tzfile + + >>> NYC = gettz('America/New_York') + >>> NYC + tzfile('/usr/share/zoneinfo/America/New_York') + + >>> print(datetime(2016, 1, 3, tzinfo=NYC)) # EST + 2016-01-03 00:00:00-05:00 + + >>> print(datetime(2016, 7, 7, tzinfo=NYC)) # EDT + 2016-07-07 00:00:00-04:00 + + + The ``tzfile`` structure contains a fully history of the time zone, + so historical dates will also have the right offsets. For example, before + the adoption of the UTC standards, New York used local solar mean time: + + .. doctest:: tzfile + + >>> print(datetime(1901, 4, 12, tzinfo=NYC)) # LMT + 1901-04-12 00:00:00-04:56 + + And during World War II, New York was on "Eastern War Time", which was a + state of permanent daylight saving time: + + .. doctest:: tzfile + + >>> print(datetime(1944, 2, 7, tzinfo=NYC)) # EWT + 1944-02-07 00:00:00-04:00 + + """ + + def __init__(self, fileobj, filename=None): + super(tzfile, self).__init__() + + file_opened_here = False + if isinstance(fileobj, string_types): + self._filename = fileobj + fileobj = open(fileobj, 'rb') + file_opened_here = True + elif filename is not None: + self._filename = filename + elif hasattr(fileobj, "name"): + self._filename = fileobj.name + else: + self._filename = repr(fileobj) + + if fileobj is not None: + if not file_opened_here: + fileobj = _nullcontext(fileobj) + + with fileobj as file_stream: + tzobj = self._read_tzfile(file_stream) + + self._set_tzdata(tzobj) + + def _set_tzdata(self, tzobj): + """ Set the time zone data of this object from a _tzfile object """ + # Copy the relevant attributes over as private attributes + for attr in _tzfile.attrs: + setattr(self, '_' + attr, getattr(tzobj, attr)) + + def _read_tzfile(self, fileobj): + out = _tzfile() + + # From tzfile(5): + # + # The time zone information files used by tzset(3) + # begin with the magic characters "TZif" to identify + # them as time zone information files, followed by + # sixteen bytes reserved for future use, followed by + # six four-byte values of type long, written in a + # ``standard'' byte order (the high-order byte + # of the value is written first). + if fileobj.read(4).decode() != "TZif": + raise ValueError("magic not found") + + fileobj.read(16) + + ( + # The number of UTC/local indicators stored in the file. + ttisgmtcnt, + + # The number of standard/wall indicators stored in the file. + ttisstdcnt, + + # The number of leap seconds for which data is + # stored in the file. + leapcnt, + + # The number of "transition times" for which data + # is stored in the file. + timecnt, + + # The number of "local time types" for which data + # is stored in the file (must not be zero). + typecnt, + + # The number of characters of "time zone + # abbreviation strings" stored in the file. + charcnt, + + ) = struct.unpack(">6l", fileobj.read(24)) + + # The above header is followed by tzh_timecnt four-byte + # values of type long, sorted in ascending order. + # These values are written in ``standard'' byte order. + # Each is used as a transition time (as returned by + # time(2)) at which the rules for computing local time + # change. + + if timecnt: + out.trans_list_utc = list(struct.unpack(">%dl" % timecnt, + fileobj.read(timecnt*4))) + else: + out.trans_list_utc = [] + + # Next come tzh_timecnt one-byte values of type unsigned + # char; each one tells which of the different types of + # ``local time'' types described in the file is associated + # with the same-indexed transition time. These values + # serve as indices into an array of ttinfo structures that + # appears next in the file. + + if timecnt: + out.trans_idx = struct.unpack(">%dB" % timecnt, + fileobj.read(timecnt)) + else: + out.trans_idx = [] + + # Each ttinfo structure is written as a four-byte value + # for tt_gmtoff of type long, in a standard byte + # order, followed by a one-byte value for tt_isdst + # and a one-byte value for tt_abbrind. In each + # structure, tt_gmtoff gives the number of + # seconds to be added to UTC, tt_isdst tells whether + # tm_isdst should be set by localtime(3), and + # tt_abbrind serves as an index into the array of + # time zone abbreviation characters that follow the + # ttinfo structure(s) in the file. + + ttinfo = [] + + for i in range(typecnt): + ttinfo.append(struct.unpack(">lbb", fileobj.read(6))) + + abbr = fileobj.read(charcnt).decode() + + # Then there are tzh_leapcnt pairs of four-byte + # values, written in standard byte order; the + # first value of each pair gives the time (as + # returned by time(2)) at which a leap second + # occurs; the second gives the total number of + # leap seconds to be applied after the given time. + # The pairs of values are sorted in ascending order + # by time. + + # Not used, for now (but seek for correct file position) + if leapcnt: + fileobj.seek(leapcnt * 8, os.SEEK_CUR) + + # Then there are tzh_ttisstdcnt standard/wall + # indicators, each stored as a one-byte value; + # they tell whether the transition times associated + # with local time types were specified as standard + # time or wall clock time, and are used when + # a time zone file is used in handling POSIX-style + # time zone environment variables. + + if ttisstdcnt: + isstd = struct.unpack(">%db" % ttisstdcnt, + fileobj.read(ttisstdcnt)) + + # Finally, there are tzh_ttisgmtcnt UTC/local + # indicators, each stored as a one-byte value; + # they tell whether the transition times associated + # with local time types were specified as UTC or + # local time, and are used when a time zone file + # is used in handling POSIX-style time zone envi- + # ronment variables. + + if ttisgmtcnt: + isgmt = struct.unpack(">%db" % ttisgmtcnt, + fileobj.read(ttisgmtcnt)) + + # Build ttinfo list + out.ttinfo_list = [] + for i in range(typecnt): + gmtoff, isdst, abbrind = ttinfo[i] + gmtoff = _get_supported_offset(gmtoff) + tti = _ttinfo() + tti.offset = gmtoff + tti.dstoffset = datetime.timedelta(0) + tti.delta = datetime.timedelta(seconds=gmtoff) + tti.isdst = isdst + tti.abbr = abbr[abbrind:abbr.find('\x00', abbrind)] + tti.isstd = (ttisstdcnt > i and isstd[i] != 0) + tti.isgmt = (ttisgmtcnt > i and isgmt[i] != 0) + out.ttinfo_list.append(tti) + + # Replace ttinfo indexes for ttinfo objects. + out.trans_idx = [out.ttinfo_list[idx] for idx in out.trans_idx] + + # Set standard, dst, and before ttinfos. before will be + # used when a given time is before any transitions, + # and will be set to the first non-dst ttinfo, or to + # the first dst, if all of them are dst. + out.ttinfo_std = None + out.ttinfo_dst = None + out.ttinfo_before = None + if out.ttinfo_list: + if not out.trans_list_utc: + out.ttinfo_std = out.ttinfo_first = out.ttinfo_list[0] + else: + for i in range(timecnt-1, -1, -1): + tti = out.trans_idx[i] + if not out.ttinfo_std and not tti.isdst: + out.ttinfo_std = tti + elif not out.ttinfo_dst and tti.isdst: + out.ttinfo_dst = tti + + if out.ttinfo_std and out.ttinfo_dst: + break + else: + if out.ttinfo_dst and not out.ttinfo_std: + out.ttinfo_std = out.ttinfo_dst + + for tti in out.ttinfo_list: + if not tti.isdst: + out.ttinfo_before = tti + break + else: + out.ttinfo_before = out.ttinfo_list[0] + + # Now fix transition times to become relative to wall time. + # + # I'm not sure about this. In my tests, the tz source file + # is setup to wall time, and in the binary file isstd and + # isgmt are off, so it should be in wall time. OTOH, it's + # always in gmt time. Let me know if you have comments + # about this. + lastdst = None + lastoffset = None + lastdstoffset = None + lastbaseoffset = None + out.trans_list = [] + + for i, tti in enumerate(out.trans_idx): + offset = tti.offset + dstoffset = 0 + + if lastdst is not None: + if tti.isdst: + if not lastdst: + dstoffset = offset - lastoffset + + if not dstoffset and lastdstoffset: + dstoffset = lastdstoffset + + tti.dstoffset = datetime.timedelta(seconds=dstoffset) + lastdstoffset = dstoffset + + # If a time zone changes its base offset during a DST transition, + # then you need to adjust by the previous base offset to get the + # transition time in local time. Otherwise you use the current + # base offset. Ideally, I would have some mathematical proof of + # why this is true, but I haven't really thought about it enough. + baseoffset = offset - dstoffset + adjustment = baseoffset + if (lastbaseoffset is not None and baseoffset != lastbaseoffset + and tti.isdst != lastdst): + # The base DST has changed + adjustment = lastbaseoffset + + lastdst = tti.isdst + lastoffset = offset + lastbaseoffset = baseoffset + + out.trans_list.append(out.trans_list_utc[i] + adjustment) + + out.trans_idx = tuple(out.trans_idx) + out.trans_list = tuple(out.trans_list) + out.trans_list_utc = tuple(out.trans_list_utc) + + return out + + def _find_last_transition(self, dt, in_utc=False): + # If there's no list, there are no transitions to find + if not self._trans_list: + return None + + timestamp = _datetime_to_timestamp(dt) + + # Find where the timestamp fits in the transition list - if the + # timestamp is a transition time, it's part of the "after" period. + trans_list = self._trans_list_utc if in_utc else self._trans_list + idx = bisect.bisect_right(trans_list, timestamp) + + # We want to know when the previous transition was, so subtract off 1 + return idx - 1 + + def _get_ttinfo(self, idx): + # For no list or after the last transition, default to _ttinfo_std + if idx is None or (idx + 1) >= len(self._trans_list): + return self._ttinfo_std + + # If there is a list and the time is before it, return _ttinfo_before + if idx < 0: + return self._ttinfo_before + + return self._trans_idx[idx] + + def _find_ttinfo(self, dt): + idx = self._resolve_ambiguous_time(dt) + + return self._get_ttinfo(idx) + + def fromutc(self, dt): + """ + The ``tzfile`` implementation of :py:func:`datetime.tzinfo.fromutc`. + + :param dt: + A :py:class:`datetime.datetime` object. + + :raises TypeError: + Raised if ``dt`` is not a :py:class:`datetime.datetime` object. + + :raises ValueError: + Raised if this is called with a ``dt`` which does not have this + ``tzinfo`` attached. + + :return: + Returns a :py:class:`datetime.datetime` object representing the + wall time in ``self``'s time zone. + """ + # These isinstance checks are in datetime.tzinfo, so we'll preserve + # them, even if we don't care about duck typing. + if not isinstance(dt, datetime.datetime): + raise TypeError("fromutc() requires a datetime argument") + + if dt.tzinfo is not self: + raise ValueError("dt.tzinfo is not self") + + # First treat UTC as wall time and get the transition we're in. + idx = self._find_last_transition(dt, in_utc=True) + tti = self._get_ttinfo(idx) + + dt_out = dt + datetime.timedelta(seconds=tti.offset) + + fold = self.is_ambiguous(dt_out, idx=idx) + + return enfold(dt_out, fold=int(fold)) + + def is_ambiguous(self, dt, idx=None): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + if idx is None: + idx = self._find_last_transition(dt) + + # Calculate the difference in offsets from current to previous + timestamp = _datetime_to_timestamp(dt) + tti = self._get_ttinfo(idx) + + if idx is None or idx <= 0: + return False + + od = self._get_ttinfo(idx - 1).offset - tti.offset + tt = self._trans_list[idx] # Transition time + + return timestamp < tt + od + + def _resolve_ambiguous_time(self, dt): + idx = self._find_last_transition(dt) + + # If we have no transitions, return the index + _fold = self._fold(dt) + if idx is None or idx == 0: + return idx + + # If it's ambiguous and we're in a fold, shift to a different index. + idx_offset = int(not _fold and self.is_ambiguous(dt, idx)) + + return idx - idx_offset + + def utcoffset(self, dt): + if dt is None: + return None + + if not self._ttinfo_std: + return ZERO + + return self._find_ttinfo(dt).delta + + def dst(self, dt): + if dt is None: + return None + + if not self._ttinfo_dst: + return ZERO + + tti = self._find_ttinfo(dt) + + if not tti.isdst: + return ZERO + + # The documentation says that utcoffset()-dst() must + # be constant for every dt. + return tti.dstoffset + + @tzname_in_python2 + def tzname(self, dt): + if not self._ttinfo_std or dt is None: + return None + return self._find_ttinfo(dt).abbr + + def __eq__(self, other): + if not isinstance(other, tzfile): + return NotImplemented + return (self._trans_list == other._trans_list and + self._trans_idx == other._trans_idx and + self._ttinfo_list == other._ttinfo_list) + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, repr(self._filename)) + + def __reduce__(self): + return self.__reduce_ex__(None) + + def __reduce_ex__(self, protocol): + return (self.__class__, (None, self._filename), self.__dict__) + + +class tzrange(tzrangebase): + """ + The ``tzrange`` object is a time zone specified by a set of offsets and + abbreviations, equivalent to the way the ``TZ`` variable can be specified + in POSIX-like systems, but using Python delta objects to specify DST + start, end and offsets. + + :param stdabbr: + The abbreviation for standard time (e.g. ``'EST'``). + + :param stdoffset: + An integer or :class:`datetime.timedelta` object or equivalent + specifying the base offset from UTC. + + If unspecified, +00:00 is used. + + :param dstabbr: + The abbreviation for DST / "Summer" time (e.g. ``'EDT'``). + + If specified, with no other DST information, DST is assumed to occur + and the default behavior or ``dstoffset``, ``start`` and ``end`` is + used. If unspecified and no other DST information is specified, it + is assumed that this zone has no DST. + + If this is unspecified and other DST information is *is* specified, + DST occurs in the zone but the time zone abbreviation is left + unchanged. + + :param dstoffset: + A an integer or :class:`datetime.timedelta` object or equivalent + specifying the UTC offset during DST. If unspecified and any other DST + information is specified, it is assumed to be the STD offset +1 hour. + + :param start: + A :class:`relativedelta.relativedelta` object or equivalent specifying + the time and time of year that daylight savings time starts. To + specify, for example, that DST starts at 2AM on the 2nd Sunday in + March, pass: + + ``relativedelta(hours=2, month=3, day=1, weekday=SU(+2))`` + + If unspecified and any other DST information is specified, the default + value is 2 AM on the first Sunday in April. + + :param end: + A :class:`relativedelta.relativedelta` object or equivalent + representing the time and time of year that daylight savings time + ends, with the same specification method as in ``start``. One note is + that this should point to the first time in the *standard* zone, so if + a transition occurs at 2AM in the DST zone and the clocks are set back + 1 hour to 1AM, set the ``hours`` parameter to +1. + + + **Examples:** + + .. testsetup:: tzrange + + from dateutil.tz import tzrange, tzstr + + .. doctest:: tzrange + + >>> tzstr('EST5EDT') == tzrange("EST", -18000, "EDT") + True + + >>> from dateutil.relativedelta import * + >>> range1 = tzrange("EST", -18000, "EDT") + >>> range2 = tzrange("EST", -18000, "EDT", -14400, + ... relativedelta(hours=+2, month=4, day=1, + ... weekday=SU(+1)), + ... relativedelta(hours=+1, month=10, day=31, + ... weekday=SU(-1))) + >>> tzstr('EST5EDT') == range1 == range2 + True + + """ + def __init__(self, stdabbr, stdoffset=None, + dstabbr=None, dstoffset=None, + start=None, end=None): + + global relativedelta + from dateutil import relativedelta + + self._std_abbr = stdabbr + self._dst_abbr = dstabbr + + try: + stdoffset = stdoffset.total_seconds() + except (TypeError, AttributeError): + pass + + try: + dstoffset = dstoffset.total_seconds() + except (TypeError, AttributeError): + pass + + if stdoffset is not None: + self._std_offset = datetime.timedelta(seconds=stdoffset) + else: + self._std_offset = ZERO + + if dstoffset is not None: + self._dst_offset = datetime.timedelta(seconds=dstoffset) + elif dstabbr and stdoffset is not None: + self._dst_offset = self._std_offset + datetime.timedelta(hours=+1) + else: + self._dst_offset = ZERO + + if dstabbr and start is None: + self._start_delta = relativedelta.relativedelta( + hours=+2, month=4, day=1, weekday=relativedelta.SU(+1)) + else: + self._start_delta = start + + if dstabbr and end is None: + self._end_delta = relativedelta.relativedelta( + hours=+1, month=10, day=31, weekday=relativedelta.SU(-1)) + else: + self._end_delta = end + + self._dst_base_offset_ = self._dst_offset - self._std_offset + self.hasdst = bool(self._start_delta) + + def transitions(self, year): + """ + For a given year, get the DST on and off transition times, expressed + always on the standard time side. For zones with no transitions, this + function returns ``None``. + + :param year: + The year whose transitions you would like to query. + + :return: + Returns a :class:`tuple` of :class:`datetime.datetime` objects, + ``(dston, dstoff)`` for zones with an annual DST transition, or + ``None`` for fixed offset zones. + """ + if not self.hasdst: + return None + + base_year = datetime.datetime(year, 1, 1) + + start = base_year + self._start_delta + end = base_year + self._end_delta + + return (start, end) + + def __eq__(self, other): + if not isinstance(other, tzrange): + return NotImplemented + + return (self._std_abbr == other._std_abbr and + self._dst_abbr == other._dst_abbr and + self._std_offset == other._std_offset and + self._dst_offset == other._dst_offset and + self._start_delta == other._start_delta and + self._end_delta == other._end_delta) + + @property + def _dst_base_offset(self): + return self._dst_base_offset_ + + +@six.add_metaclass(_TzStrFactory) +class tzstr(tzrange): + """ + ``tzstr`` objects are time zone objects specified by a time-zone string as + it would be passed to a ``TZ`` variable on POSIX-style systems (see + the `GNU C Library: TZ Variable`_ for more details). + + There is one notable exception, which is that POSIX-style time zones use an + inverted offset format, so normally ``GMT+3`` would be parsed as an offset + 3 hours *behind* GMT. The ``tzstr`` time zone object will parse this as an + offset 3 hours *ahead* of GMT. If you would like to maintain the POSIX + behavior, pass a ``True`` value to ``posix_offset``. + + The :class:`tzrange` object provides the same functionality, but is + specified using :class:`relativedelta.relativedelta` objects. rather than + strings. + + :param s: + A time zone string in ``TZ`` variable format. This can be a + :class:`bytes` (2.x: :class:`str`), :class:`str` (2.x: + :class:`unicode`) or a stream emitting unicode characters + (e.g. :class:`StringIO`). + + :param posix_offset: + Optional. If set to ``True``, interpret strings such as ``GMT+3`` or + ``UTC+3`` as being 3 hours *behind* UTC rather than ahead, per the + POSIX standard. + + .. caution:: + + Prior to version 2.7.0, this function also supported time zones + in the format: + + * ``EST5EDT,4,0,6,7200,10,0,26,7200,3600`` + * ``EST5EDT,4,1,0,7200,10,-1,0,7200,3600`` + + This format is non-standard and has been deprecated; this function + will raise a :class:`DeprecatedTZFormatWarning` until + support is removed in a future version. + + .. _`GNU C Library: TZ Variable`: + https://www.gnu.org/software/libc/manual/html_node/TZ-Variable.html + """ + def __init__(self, s, posix_offset=False): + global parser + from dateutil.parser import _parser as parser + + self._s = s + + res = parser._parsetz(s) + if res is None or res.any_unused_tokens: + raise ValueError("unknown string format") + + # Here we break the compatibility with the TZ variable handling. + # GMT-3 actually *means* the timezone -3. + if res.stdabbr in ("GMT", "UTC") and not posix_offset: + res.stdoffset *= -1 + + # We must initialize it first, since _delta() needs + # _std_offset and _dst_offset set. Use False in start/end + # to avoid building it two times. + tzrange.__init__(self, res.stdabbr, res.stdoffset, + res.dstabbr, res.dstoffset, + start=False, end=False) + + if not res.dstabbr: + self._start_delta = None + self._end_delta = None + else: + self._start_delta = self._delta(res.start) + if self._start_delta: + self._end_delta = self._delta(res.end, isend=1) + + self.hasdst = bool(self._start_delta) + + def _delta(self, x, isend=0): + from dateutil import relativedelta + kwargs = {} + if x.month is not None: + kwargs["month"] = x.month + if x.weekday is not None: + kwargs["weekday"] = relativedelta.weekday(x.weekday, x.week) + if x.week > 0: + kwargs["day"] = 1 + else: + kwargs["day"] = 31 + elif x.day: + kwargs["day"] = x.day + elif x.yday is not None: + kwargs["yearday"] = x.yday + elif x.jyday is not None: + kwargs["nlyearday"] = x.jyday + if not kwargs: + # Default is to start on first sunday of april, and end + # on last sunday of october. + if not isend: + kwargs["month"] = 4 + kwargs["day"] = 1 + kwargs["weekday"] = relativedelta.SU(+1) + else: + kwargs["month"] = 10 + kwargs["day"] = 31 + kwargs["weekday"] = relativedelta.SU(-1) + if x.time is not None: + kwargs["seconds"] = x.time + else: + # Default is 2AM. + kwargs["seconds"] = 7200 + if isend: + # Convert to standard time, to follow the documented way + # of working with the extra hour. See the documentation + # of the tzinfo class. + delta = self._dst_offset - self._std_offset + kwargs["seconds"] -= delta.seconds + delta.days * 86400 + return relativedelta.relativedelta(**kwargs) + + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, repr(self._s)) + + +class _tzicalvtzcomp(object): + def __init__(self, tzoffsetfrom, tzoffsetto, isdst, + tzname=None, rrule=None): + self.tzoffsetfrom = datetime.timedelta(seconds=tzoffsetfrom) + self.tzoffsetto = datetime.timedelta(seconds=tzoffsetto) + self.tzoffsetdiff = self.tzoffsetto - self.tzoffsetfrom + self.isdst = isdst + self.tzname = tzname + self.rrule = rrule + + +class _tzicalvtz(_tzinfo): + def __init__(self, tzid, comps=[]): + super(_tzicalvtz, self).__init__() + + self._tzid = tzid + self._comps = comps + self._cachedate = [] + self._cachecomp = [] + self._cache_lock = _thread.allocate_lock() + + def _find_comp(self, dt): + if len(self._comps) == 1: + return self._comps[0] + + dt = dt.replace(tzinfo=None) + + try: + with self._cache_lock: + return self._cachecomp[self._cachedate.index( + (dt, self._fold(dt)))] + except ValueError: + pass + + lastcompdt = None + lastcomp = None + + for comp in self._comps: + compdt = self._find_compdt(comp, dt) + + if compdt and (not lastcompdt or lastcompdt < compdt): + lastcompdt = compdt + lastcomp = comp + + if not lastcomp: + # RFC says nothing about what to do when a given + # time is before the first onset date. We'll look for the + # first standard component, or the first component, if + # none is found. + for comp in self._comps: + if not comp.isdst: + lastcomp = comp + break + else: + lastcomp = comp[0] + + with self._cache_lock: + self._cachedate.insert(0, (dt, self._fold(dt))) + self._cachecomp.insert(0, lastcomp) + + if len(self._cachedate) > 10: + self._cachedate.pop() + self._cachecomp.pop() + + return lastcomp + + def _find_compdt(self, comp, dt): + if comp.tzoffsetdiff < ZERO and self._fold(dt): + dt -= comp.tzoffsetdiff + + compdt = comp.rrule.before(dt, inc=True) + + return compdt + + def utcoffset(self, dt): + if dt is None: + return None + + return self._find_comp(dt).tzoffsetto + + def dst(self, dt): + comp = self._find_comp(dt) + if comp.isdst: + return comp.tzoffsetdiff + else: + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + return self._find_comp(dt).tzname + + def __repr__(self): + return "<tzicalvtz %s>" % repr(self._tzid) + + __reduce__ = object.__reduce__ + + +class tzical(object): + """ + This object is designed to parse an iCalendar-style ``VTIMEZONE`` structure + as set out in `RFC 5545`_ Section 4.6.5 into one or more `tzinfo` objects. + + :param `fileobj`: + A file or stream in iCalendar format, which should be UTF-8 encoded + with CRLF endings. + + .. _`RFC 5545`: https://tools.ietf.org/html/rfc5545 + """ + def __init__(self, fileobj): + global rrule + from dateutil import rrule + + if isinstance(fileobj, string_types): + self._s = fileobj + # ical should be encoded in UTF-8 with CRLF + fileobj = open(fileobj, 'r') + else: + self._s = getattr(fileobj, 'name', repr(fileobj)) + fileobj = _nullcontext(fileobj) + + self._vtz = {} + + with fileobj as fobj: + self._parse_rfc(fobj.read()) + + def keys(self): + """ + Retrieves the available time zones as a list. + """ + return list(self._vtz.keys()) + + def get(self, tzid=None): + """ + Retrieve a :py:class:`datetime.tzinfo` object by its ``tzid``. + + :param tzid: + If there is exactly one time zone available, omitting ``tzid`` + or passing :py:const:`None` value returns it. Otherwise a valid + key (which can be retrieved from :func:`keys`) is required. + + :raises ValueError: + Raised if ``tzid`` is not specified but there are either more + or fewer than 1 zone defined. + + :returns: + Returns either a :py:class:`datetime.tzinfo` object representing + the relevant time zone or :py:const:`None` if the ``tzid`` was + not found. + """ + if tzid is None: + if len(self._vtz) == 0: + raise ValueError("no timezones defined") + elif len(self._vtz) > 1: + raise ValueError("more than one timezone available") + tzid = next(iter(self._vtz)) + + return self._vtz.get(tzid) + + def _parse_offset(self, s): + s = s.strip() + if not s: + raise ValueError("empty offset") + if s[0] in ('+', '-'): + signal = (-1, +1)[s[0] == '+'] + s = s[1:] + else: + signal = +1 + if len(s) == 4: + return (int(s[:2]) * 3600 + int(s[2:]) * 60) * signal + elif len(s) == 6: + return (int(s[:2]) * 3600 + int(s[2:4]) * 60 + int(s[4:])) * signal + else: + raise ValueError("invalid offset: " + s) + + def _parse_rfc(self, s): + lines = s.splitlines() + if not lines: + raise ValueError("empty string") + + # Unfold + i = 0 + while i < len(lines): + line = lines[i].rstrip() + if not line: + del lines[i] + elif i > 0 and line[0] == " ": + lines[i-1] += line[1:] + del lines[i] + else: + i += 1 + + tzid = None + comps = [] + invtz = False + comptype = None + for line in lines: + if not line: + continue + name, value = line.split(':', 1) + parms = name.split(';') + if not parms: + raise ValueError("empty property name") + name = parms[0].upper() + parms = parms[1:] + if invtz: + if name == "BEGIN": + if value in ("STANDARD", "DAYLIGHT"): + # Process component + pass + else: + raise ValueError("unknown component: "+value) + comptype = value + founddtstart = False + tzoffsetfrom = None + tzoffsetto = None + rrulelines = [] + tzname = None + elif name == "END": + if value == "VTIMEZONE": + if comptype: + raise ValueError("component not closed: "+comptype) + if not tzid: + raise ValueError("mandatory TZID not found") + if not comps: + raise ValueError( + "at least one component is needed") + # Process vtimezone + self._vtz[tzid] = _tzicalvtz(tzid, comps) + invtz = False + elif value == comptype: + if not founddtstart: + raise ValueError("mandatory DTSTART not found") + if tzoffsetfrom is None: + raise ValueError( + "mandatory TZOFFSETFROM not found") + if tzoffsetto is None: + raise ValueError( + "mandatory TZOFFSETFROM not found") + # Process component + rr = None + if rrulelines: + rr = rrule.rrulestr("\n".join(rrulelines), + compatible=True, + ignoretz=True, + cache=True) + comp = _tzicalvtzcomp(tzoffsetfrom, tzoffsetto, + (comptype == "DAYLIGHT"), + tzname, rr) + comps.append(comp) + comptype = None + else: + raise ValueError("invalid component end: "+value) + elif comptype: + if name == "DTSTART": + # DTSTART in VTIMEZONE takes a subset of valid RRULE + # values under RFC 5545. + for parm in parms: + if parm != 'VALUE=DATE-TIME': + msg = ('Unsupported DTSTART param in ' + + 'VTIMEZONE: ' + parm) + raise ValueError(msg) + rrulelines.append(line) + founddtstart = True + elif name in ("RRULE", "RDATE", "EXRULE", "EXDATE"): + rrulelines.append(line) + elif name == "TZOFFSETFROM": + if parms: + raise ValueError( + "unsupported %s parm: %s " % (name, parms[0])) + tzoffsetfrom = self._parse_offset(value) + elif name == "TZOFFSETTO": + if parms: + raise ValueError( + "unsupported TZOFFSETTO parm: "+parms[0]) + tzoffsetto = self._parse_offset(value) + elif name == "TZNAME": + if parms: + raise ValueError( + "unsupported TZNAME parm: "+parms[0]) + tzname = value + elif name == "COMMENT": + pass + else: + raise ValueError("unsupported property: "+name) + else: + if name == "TZID": + if parms: + raise ValueError( + "unsupported TZID parm: "+parms[0]) + tzid = value + elif name in ("TZURL", "LAST-MODIFIED", "COMMENT"): + pass + else: + raise ValueError("unsupported property: "+name) + elif name == "BEGIN" and value == "VTIMEZONE": + tzid = None + comps = [] + invtz = True + + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, repr(self._s)) + + +if sys.platform != "win32": + TZFILES = ["/etc/localtime", "localtime"] + TZPATHS = ["/usr/share/zoneinfo", + "/usr/lib/zoneinfo", + "/usr/share/lib/zoneinfo", + "/etc/zoneinfo"] +else: + TZFILES = [] + TZPATHS = [] + + +def __get_gettz(): + tzlocal_classes = (tzlocal,) + if tzwinlocal is not None: + tzlocal_classes += (tzwinlocal,) + + class GettzFunc(object): + """ + Retrieve a time zone object from a string representation + + This function is intended to retrieve the :py:class:`tzinfo` subclass + that best represents the time zone that would be used if a POSIX + `TZ variable`_ were set to the same value. + + If no argument or an empty string is passed to ``gettz``, local time + is returned: + + .. code-block:: python3 + + >>> gettz() + tzfile('/etc/localtime') + + This function is also the preferred way to map IANA tz database keys + to :class:`tzfile` objects: + + .. code-block:: python3 + + >>> gettz('Pacific/Kiritimati') + tzfile('/usr/share/zoneinfo/Pacific/Kiritimati') + + On Windows, the standard is extended to include the Windows-specific + zone names provided by the operating system: + + .. code-block:: python3 + + >>> gettz('Egypt Standard Time') + tzwin('Egypt Standard Time') + + Passing a GNU ``TZ`` style string time zone specification returns a + :class:`tzstr` object: + + .. code-block:: python3 + + >>> gettz('AEST-10AEDT-11,M10.1.0/2,M4.1.0/3') + tzstr('AEST-10AEDT-11,M10.1.0/2,M4.1.0/3') + + :param name: + A time zone name (IANA, or, on Windows, Windows keys), location of + a ``tzfile(5)`` zoneinfo file or ``TZ`` variable style time zone + specifier. An empty string, no argument or ``None`` is interpreted + as local time. + + :return: + Returns an instance of one of ``dateutil``'s :py:class:`tzinfo` + subclasses. + + .. versionchanged:: 2.7.0 + + After version 2.7.0, any two calls to ``gettz`` using the same + input strings will return the same object: + + .. code-block:: python3 + + >>> tz.gettz('America/Chicago') is tz.gettz('America/Chicago') + True + + In addition to improving performance, this ensures that + `"same zone" semantics`_ are used for datetimes in the same zone. + + + .. _`TZ variable`: + https://www.gnu.org/software/libc/manual/html_node/TZ-Variable.html + + .. _`"same zone" semantics`: + https://blog.ganssle.io/articles/2018/02/aware-datetime-arithmetic.html + """ + def __init__(self): + + self.__instances = weakref.WeakValueDictionary() + self.__strong_cache_size = 8 + self.__strong_cache = OrderedDict() + self._cache_lock = _thread.allocate_lock() + + def __call__(self, name=None): + with self._cache_lock: + rv = self.__instances.get(name, None) + + if rv is None: + rv = self.nocache(name=name) + if not (name is None + or isinstance(rv, tzlocal_classes) + or rv is None): + # tzlocal is slightly more complicated than the other + # time zone providers because it depends on environment + # at construction time, so don't cache that. + # + # We also cannot store weak references to None, so we + # will also not store that. + self.__instances[name] = rv + else: + # No need for strong caching, return immediately + return rv + + self.__strong_cache[name] = self.__strong_cache.pop(name, rv) + + if len(self.__strong_cache) > self.__strong_cache_size: + self.__strong_cache.popitem(last=False) + + return rv + + def set_cache_size(self, size): + with self._cache_lock: + self.__strong_cache_size = size + while len(self.__strong_cache) > size: + self.__strong_cache.popitem(last=False) + + def cache_clear(self): + with self._cache_lock: + self.__instances = weakref.WeakValueDictionary() + self.__strong_cache.clear() + + @staticmethod + def nocache(name=None): + """A non-cached version of gettz""" + tz = None + if not name: + try: + name = os.environ["TZ"] + except KeyError: + pass + if name is None or name == ":": + for filepath in TZFILES: + if not os.path.isabs(filepath): + filename = filepath + for path in TZPATHS: + filepath = os.path.join(path, filename) + if os.path.isfile(filepath): + break + else: + continue + if os.path.isfile(filepath): + try: + tz = tzfile(filepath) + break + except (IOError, OSError, ValueError): + pass + else: + tz = tzlocal() + else: + try: + if name.startswith(":"): + name = name[1:] + except TypeError as e: + if isinstance(name, bytes): + new_msg = "gettz argument should be str, not bytes" + six.raise_from(TypeError(new_msg), e) + else: + raise + if os.path.isabs(name): + if os.path.isfile(name): + tz = tzfile(name) + else: + tz = None + else: + for path in TZPATHS: + filepath = os.path.join(path, name) + if not os.path.isfile(filepath): + filepath = filepath.replace(' ', '_') + if not os.path.isfile(filepath): + continue + try: + tz = tzfile(filepath) + break + except (IOError, OSError, ValueError): + pass + else: + tz = None + if tzwin is not None: + try: + tz = tzwin(name) + except (WindowsError, UnicodeEncodeError): + # UnicodeEncodeError is for Python 2.7 compat + tz = None + + if not tz: + from dateutil.zoneinfo import get_zonefile_instance + tz = get_zonefile_instance().get(name) + + if not tz: + for c in name: + # name is not a tzstr unless it has at least + # one offset. For short values of "name", an + # explicit for loop seems to be the fastest way + # To determine if a string contains a digit + if c in "0123456789": + try: + tz = tzstr(name) + except ValueError: + pass + break + else: + if name in ("GMT", "UTC"): + tz = UTC + elif name in time.tzname: + tz = tzlocal() + return tz + + return GettzFunc() + + +gettz = __get_gettz() +del __get_gettz + + +def datetime_exists(dt, tz=None): + """ + Given a datetime and a time zone, determine whether or not a given datetime + would fall in a gap. + + :param dt: + A :class:`datetime.datetime` (whose time zone will be ignored if ``tz`` + is provided.) + + :param tz: + A :class:`datetime.tzinfo` with support for the ``fold`` attribute. If + ``None`` or not provided, the datetime's own time zone will be used. + + :return: + Returns a boolean value whether or not the "wall time" exists in + ``tz``. + + .. versionadded:: 2.7.0 + """ + if tz is None: + if dt.tzinfo is None: + raise ValueError('Datetime is naive and no time zone provided.') + tz = dt.tzinfo + + dt = dt.replace(tzinfo=None) + + # This is essentially a test of whether or not the datetime can survive + # a round trip to UTC. + dt_rt = dt.replace(tzinfo=tz).astimezone(UTC).astimezone(tz) + dt_rt = dt_rt.replace(tzinfo=None) + + return dt == dt_rt + + +def datetime_ambiguous(dt, tz=None): + """ + Given a datetime and a time zone, determine whether or not a given datetime + is ambiguous (i.e if there are two times differentiated only by their DST + status). + + :param dt: + A :class:`datetime.datetime` (whose time zone will be ignored if ``tz`` + is provided.) + + :param tz: + A :class:`datetime.tzinfo` with support for the ``fold`` attribute. If + ``None`` or not provided, the datetime's own time zone will be used. + + :return: + Returns a boolean value whether or not the "wall time" is ambiguous in + ``tz``. + + .. versionadded:: 2.6.0 + """ + if tz is None: + if dt.tzinfo is None: + raise ValueError('Datetime is naive and no time zone provided.') + + tz = dt.tzinfo + + # If a time zone defines its own "is_ambiguous" function, we'll use that. + is_ambiguous_fn = getattr(tz, 'is_ambiguous', None) + if is_ambiguous_fn is not None: + try: + return tz.is_ambiguous(dt) + except Exception: + pass + + # If it doesn't come out and tell us it's ambiguous, we'll just check if + # the fold attribute has any effect on this particular date and time. + dt = dt.replace(tzinfo=tz) + wall_0 = enfold(dt, fold=0) + wall_1 = enfold(dt, fold=1) + + same_offset = wall_0.utcoffset() == wall_1.utcoffset() + same_dst = wall_0.dst() == wall_1.dst() + + return not (same_offset and same_dst) + + +def resolve_imaginary(dt): + """ + Given a datetime that may be imaginary, return an existing datetime. + + This function assumes that an imaginary datetime represents what the + wall time would be in a zone had the offset transition not occurred, so + it will always fall forward by the transition's change in offset. + + .. doctest:: + + >>> from dateutil import tz + >>> from datetime import datetime + >>> NYC = tz.gettz('America/New_York') + >>> print(tz.resolve_imaginary(datetime(2017, 3, 12, 2, 30, tzinfo=NYC))) + 2017-03-12 03:30:00-04:00 + + >>> KIR = tz.gettz('Pacific/Kiritimati') + >>> print(tz.resolve_imaginary(datetime(1995, 1, 1, 12, 30, tzinfo=KIR))) + 1995-01-02 12:30:00+14:00 + + As a note, :func:`datetime.astimezone` is guaranteed to produce a valid, + existing datetime, so a round-trip to and from UTC is sufficient to get + an extant datetime, however, this generally "falls back" to an earlier time + rather than falling forward to the STD side (though no guarantees are made + about this behavior). + + :param dt: + A :class:`datetime.datetime` which may or may not exist. + + :return: + Returns an existing :class:`datetime.datetime`. If ``dt`` was not + imaginary, the datetime returned is guaranteed to be the same object + passed to the function. + + .. versionadded:: 2.7.0 + """ + if dt.tzinfo is not None and not datetime_exists(dt): + + curr_offset = (dt + datetime.timedelta(hours=24)).utcoffset() + old_offset = (dt - datetime.timedelta(hours=24)).utcoffset() + + dt += curr_offset - old_offset + + return dt + + +def _datetime_to_timestamp(dt): + """ + Convert a :class:`datetime.datetime` object to an epoch timestamp in + seconds since January 1, 1970, ignoring the time zone. + """ + return (dt.replace(tzinfo=None) - EPOCH).total_seconds() + + +if sys.version_info >= (3, 6): + def _get_supported_offset(second_offset): + return second_offset +else: + def _get_supported_offset(second_offset): + # For python pre-3.6, round to full-minutes if that's not the case. + # Python's datetime doesn't accept sub-minute timezones. Check + # http://python.org/sf/1447945 or https://bugs.python.org/issue5288 + # for some information. + old_offset = second_offset + calculated_offset = 60 * ((second_offset + 30) // 60) + return calculated_offset + + +try: + # Python 3.7 feature + from contextlib import nullcontext as _nullcontext +except ImportError: + class _nullcontext(object): + """ + Class for wrapping contexts so that they are passed through in a + with statement. + """ + def __init__(self, context): + self.context = context + + def __enter__(self): + return self.context + + def __exit__(*args, **kwargs): + pass + +# vim:ts=4:sw=4:et diff --git a/venv/Lib/site-packages/dateutil/tz/win.py b/venv/Lib/site-packages/dateutil/tz/win.py new file mode 100644 index 00000000..cde07ba7 --- /dev/null +++ b/venv/Lib/site-packages/dateutil/tz/win.py @@ -0,0 +1,370 @@ +# -*- coding: utf-8 -*- +""" +This module provides an interface to the native time zone data on Windows, +including :py:class:`datetime.tzinfo` implementations. + +Attempting to import this module on a non-Windows platform will raise an +:py:obj:`ImportError`. +""" +# This code was originally contributed by Jeffrey Harris. +import datetime +import struct + +from six.moves import winreg +from six import text_type + +try: + import ctypes + from ctypes import wintypes +except ValueError: + # ValueError is raised on non-Windows systems for some horrible reason. + raise ImportError("Running tzwin on non-Windows system") + +from ._common import tzrangebase + +__all__ = ["tzwin", "tzwinlocal", "tzres"] + +ONEWEEK = datetime.timedelta(7) + +TZKEYNAMENT = r"SOFTWARE\Microsoft\Windows NT\CurrentVersion\Time Zones" +TZKEYNAME9X = r"SOFTWARE\Microsoft\Windows\CurrentVersion\Time Zones" +TZLOCALKEYNAME = r"SYSTEM\CurrentControlSet\Control\TimeZoneInformation" + + +def _settzkeyname(): + handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) + try: + winreg.OpenKey(handle, TZKEYNAMENT).Close() + TZKEYNAME = TZKEYNAMENT + except WindowsError: + TZKEYNAME = TZKEYNAME9X + handle.Close() + return TZKEYNAME + + +TZKEYNAME = _settzkeyname() + + +class tzres(object): + """ + Class for accessing ``tzres.dll``, which contains timezone name related + resources. + + .. versionadded:: 2.5.0 + """ + p_wchar = ctypes.POINTER(wintypes.WCHAR) # Pointer to a wide char + + def __init__(self, tzres_loc='tzres.dll'): + # Load the user32 DLL so we can load strings from tzres + user32 = ctypes.WinDLL('user32') + + # Specify the LoadStringW function + user32.LoadStringW.argtypes = (wintypes.HINSTANCE, + wintypes.UINT, + wintypes.LPWSTR, + ctypes.c_int) + + self.LoadStringW = user32.LoadStringW + self._tzres = ctypes.WinDLL(tzres_loc) + self.tzres_loc = tzres_loc + + def load_name(self, offset): + """ + Load a timezone name from a DLL offset (integer). + + >>> from dateutil.tzwin import tzres + >>> tzr = tzres() + >>> print(tzr.load_name(112)) + 'Eastern Standard Time' + + :param offset: + A positive integer value referring to a string from the tzres dll. + + .. note:: + + Offsets found in the registry are generally of the form + ``@tzres.dll,-114``. The offset in this case is 114, not -114. + + """ + resource = self.p_wchar() + lpBuffer = ctypes.cast(ctypes.byref(resource), wintypes.LPWSTR) + nchar = self.LoadStringW(self._tzres._handle, offset, lpBuffer, 0) + return resource[:nchar] + + def name_from_string(self, tzname_str): + """ + Parse strings as returned from the Windows registry into the time zone + name as defined in the registry. + + >>> from dateutil.tzwin import tzres + >>> tzr = tzres() + >>> print(tzr.name_from_string('@tzres.dll,-251')) + 'Dateline Daylight Time' + >>> print(tzr.name_from_string('Eastern Standard Time')) + 'Eastern Standard Time' + + :param tzname_str: + A timezone name string as returned from a Windows registry key. + + :return: + Returns the localized timezone string from tzres.dll if the string + is of the form `@tzres.dll,-offset`, else returns the input string. + """ + if not tzname_str.startswith('@'): + return tzname_str + + name_splt = tzname_str.split(',-') + try: + offset = int(name_splt[1]) + except: + raise ValueError("Malformed timezone string.") + + return self.load_name(offset) + + +class tzwinbase(tzrangebase): + """tzinfo class based on win32's timezones available in the registry.""" + def __init__(self): + raise NotImplementedError('tzwinbase is an abstract base class') + + def __eq__(self, other): + # Compare on all relevant dimensions, including name. + if not isinstance(other, tzwinbase): + return NotImplemented + + return (self._std_offset == other._std_offset and + self._dst_offset == other._dst_offset and + self._stddayofweek == other._stddayofweek and + self._dstdayofweek == other._dstdayofweek and + self._stdweeknumber == other._stdweeknumber and + self._dstweeknumber == other._dstweeknumber and + self._stdhour == other._stdhour and + self._dsthour == other._dsthour and + self._stdminute == other._stdminute and + self._dstminute == other._dstminute and + self._std_abbr == other._std_abbr and + self._dst_abbr == other._dst_abbr) + + @staticmethod + def list(): + """Return a list of all time zones known to the system.""" + with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle: + with winreg.OpenKey(handle, TZKEYNAME) as tzkey: + result = [winreg.EnumKey(tzkey, i) + for i in range(winreg.QueryInfoKey(tzkey)[0])] + return result + + def display(self): + """ + Return the display name of the time zone. + """ + return self._display + + def transitions(self, year): + """ + For a given year, get the DST on and off transition times, expressed + always on the standard time side. For zones with no transitions, this + function returns ``None``. + + :param year: + The year whose transitions you would like to query. + + :return: + Returns a :class:`tuple` of :class:`datetime.datetime` objects, + ``(dston, dstoff)`` for zones with an annual DST transition, or + ``None`` for fixed offset zones. + """ + + if not self.hasdst: + return None + + dston = picknthweekday(year, self._dstmonth, self._dstdayofweek, + self._dsthour, self._dstminute, + self._dstweeknumber) + + dstoff = picknthweekday(year, self._stdmonth, self._stddayofweek, + self._stdhour, self._stdminute, + self._stdweeknumber) + + # Ambiguous dates default to the STD side + dstoff -= self._dst_base_offset + + return dston, dstoff + + def _get_hasdst(self): + return self._dstmonth != 0 + + @property + def _dst_base_offset(self): + return self._dst_base_offset_ + + +class tzwin(tzwinbase): + """ + Time zone object created from the zone info in the Windows registry + + These are similar to :py:class:`dateutil.tz.tzrange` objects in that + the time zone data is provided in the format of a single offset rule + for either 0 or 2 time zone transitions per year. + + :param: name + The name of a Windows time zone key, e.g. "Eastern Standard Time". + The full list of keys can be retrieved with :func:`tzwin.list`. + """ + + def __init__(self, name): + self._name = name + + with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle: + tzkeyname = text_type("{kn}\\{name}").format(kn=TZKEYNAME, name=name) + with winreg.OpenKey(handle, tzkeyname) as tzkey: + keydict = valuestodict(tzkey) + + self._std_abbr = keydict["Std"] + self._dst_abbr = keydict["Dlt"] + + self._display = keydict["Display"] + + # See http://ww_winreg.jsiinc.com/SUBA/tip0300/rh0398.htm + tup = struct.unpack("=3l16h", keydict["TZI"]) + stdoffset = -tup[0]-tup[1] # Bias + StandardBias * -1 + dstoffset = stdoffset-tup[2] # + DaylightBias * -1 + self._std_offset = datetime.timedelta(minutes=stdoffset) + self._dst_offset = datetime.timedelta(minutes=dstoffset) + + # for the meaning see the win32 TIME_ZONE_INFORMATION structure docs + # http://msdn.microsoft.com/en-us/library/windows/desktop/ms725481(v=vs.85).aspx + (self._stdmonth, + self._stddayofweek, # Sunday = 0 + self._stdweeknumber, # Last = 5 + self._stdhour, + self._stdminute) = tup[4:9] + + (self._dstmonth, + self._dstdayofweek, # Sunday = 0 + self._dstweeknumber, # Last = 5 + self._dsthour, + self._dstminute) = tup[12:17] + + self._dst_base_offset_ = self._dst_offset - self._std_offset + self.hasdst = self._get_hasdst() + + def __repr__(self): + return "tzwin(%s)" % repr(self._name) + + def __reduce__(self): + return (self.__class__, (self._name,)) + + +class tzwinlocal(tzwinbase): + """ + Class representing the local time zone information in the Windows registry + + While :class:`dateutil.tz.tzlocal` makes system calls (via the :mod:`time` + module) to retrieve time zone information, ``tzwinlocal`` retrieves the + rules directly from the Windows registry and creates an object like + :class:`dateutil.tz.tzwin`. + + Because Windows does not have an equivalent of :func:`time.tzset`, on + Windows, :class:`dateutil.tz.tzlocal` instances will always reflect the + time zone settings *at the time that the process was started*, meaning + changes to the machine's time zone settings during the run of a program + on Windows will **not** be reflected by :class:`dateutil.tz.tzlocal`. + Because ``tzwinlocal`` reads the registry directly, it is unaffected by + this issue. + """ + def __init__(self): + with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle: + with winreg.OpenKey(handle, TZLOCALKEYNAME) as tzlocalkey: + keydict = valuestodict(tzlocalkey) + + self._std_abbr = keydict["StandardName"] + self._dst_abbr = keydict["DaylightName"] + + try: + tzkeyname = text_type('{kn}\\{sn}').format(kn=TZKEYNAME, + sn=self._std_abbr) + with winreg.OpenKey(handle, tzkeyname) as tzkey: + _keydict = valuestodict(tzkey) + self._display = _keydict["Display"] + except OSError: + self._display = None + + stdoffset = -keydict["Bias"]-keydict["StandardBias"] + dstoffset = stdoffset-keydict["DaylightBias"] + + self._std_offset = datetime.timedelta(minutes=stdoffset) + self._dst_offset = datetime.timedelta(minutes=dstoffset) + + # For reasons unclear, in this particular key, the day of week has been + # moved to the END of the SYSTEMTIME structure. + tup = struct.unpack("=8h", keydict["StandardStart"]) + + (self._stdmonth, + self._stdweeknumber, # Last = 5 + self._stdhour, + self._stdminute) = tup[1:5] + + self._stddayofweek = tup[7] + + tup = struct.unpack("=8h", keydict["DaylightStart"]) + + (self._dstmonth, + self._dstweeknumber, # Last = 5 + self._dsthour, + self._dstminute) = tup[1:5] + + self._dstdayofweek = tup[7] + + self._dst_base_offset_ = self._dst_offset - self._std_offset + self.hasdst = self._get_hasdst() + + def __repr__(self): + return "tzwinlocal()" + + def __str__(self): + # str will return the standard name, not the daylight name. + return "tzwinlocal(%s)" % repr(self._std_abbr) + + def __reduce__(self): + return (self.__class__, ()) + + +def picknthweekday(year, month, dayofweek, hour, minute, whichweek): + """ dayofweek == 0 means Sunday, whichweek 5 means last instance """ + first = datetime.datetime(year, month, 1, hour, minute) + + # This will work if dayofweek is ISO weekday (1-7) or Microsoft-style (0-6), + # Because 7 % 7 = 0 + weekdayone = first.replace(day=((dayofweek - first.isoweekday()) % 7) + 1) + wd = weekdayone + ((whichweek - 1) * ONEWEEK) + if (wd.month != month): + wd -= ONEWEEK + + return wd + + +def valuestodict(key): + """Convert a registry key's values to a dictionary.""" + dout = {} + size = winreg.QueryInfoKey(key)[1] + tz_res = None + + for i in range(size): + key_name, value, dtype = winreg.EnumValue(key, i) + if dtype == winreg.REG_DWORD or dtype == winreg.REG_DWORD_LITTLE_ENDIAN: + # If it's a DWORD (32-bit integer), it's stored as unsigned - convert + # that to a proper signed integer + if value & (1 << 31): + value = value - (1 << 32) + elif dtype == winreg.REG_SZ: + # If it's a reference to the tzres DLL, load the actual string + if value.startswith('@tzres'): + tz_res = tz_res or tzres() + value = tz_res.name_from_string(value) + + value = value.rstrip('\x00') # Remove trailing nulls + + dout[key_name] = value + + return dout diff --git a/venv/Lib/site-packages/dateutil/tzwin.py b/venv/Lib/site-packages/dateutil/tzwin.py new file mode 100644 index 00000000..cebc673e --- /dev/null +++ b/venv/Lib/site-packages/dateutil/tzwin.py @@ -0,0 +1,2 @@ +# tzwin has moved to dateutil.tz.win +from .tz.win import * diff --git a/venv/Lib/site-packages/dateutil/utils.py b/venv/Lib/site-packages/dateutil/utils.py new file mode 100644 index 00000000..44d9c994 --- /dev/null +++ b/venv/Lib/site-packages/dateutil/utils.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- +""" +This module offers general convenience and utility functions for dealing with +datetimes. + +.. versionadded:: 2.7.0 +""" +from __future__ import unicode_literals + +from datetime import datetime, time + + +def today(tzinfo=None): + """ + Returns a :py:class:`datetime` representing the current day at midnight + + :param tzinfo: + The time zone to attach (also used to determine the current day). + + :return: + A :py:class:`datetime.datetime` object representing the current day + at midnight. + """ + + dt = datetime.now(tzinfo) + return datetime.combine(dt.date(), time(0, tzinfo=tzinfo)) + + +def default_tzinfo(dt, tzinfo): + """ + Sets the ``tzinfo`` parameter on naive datetimes only + + This is useful for example when you are provided a datetime that may have + either an implicit or explicit time zone, such as when parsing a time zone + string. + + .. doctest:: + + >>> from dateutil.tz import tzoffset + >>> from dateutil.parser import parse + >>> from dateutil.utils import default_tzinfo + >>> dflt_tz = tzoffset("EST", -18000) + >>> print(default_tzinfo(parse('2014-01-01 12:30 UTC'), dflt_tz)) + 2014-01-01 12:30:00+00:00 + >>> print(default_tzinfo(parse('2014-01-01 12:30'), dflt_tz)) + 2014-01-01 12:30:00-05:00 + + :param dt: + The datetime on which to replace the time zone + + :param tzinfo: + The :py:class:`datetime.tzinfo` subclass instance to assign to + ``dt`` if (and only if) it is naive. + + :return: + Returns an aware :py:class:`datetime.datetime`. + """ + if dt.tzinfo is not None: + return dt + else: + return dt.replace(tzinfo=tzinfo) + + +def within_delta(dt1, dt2, delta): + """ + Useful for comparing two datetimes that may a negilible difference + to be considered equal. + """ + delta = abs(delta) + difference = dt1 - dt2 + return -delta <= difference <= delta diff --git a/venv/Lib/site-packages/dateutil/zoneinfo/__init__.py b/venv/Lib/site-packages/dateutil/zoneinfo/__init__.py new file mode 100644 index 00000000..34f11ad6 --- /dev/null +++ b/venv/Lib/site-packages/dateutil/zoneinfo/__init__.py @@ -0,0 +1,167 @@ +# -*- coding: utf-8 -*- +import warnings +import json + +from tarfile import TarFile +from pkgutil import get_data +from io import BytesIO + +from dateutil.tz import tzfile as _tzfile + +__all__ = ["get_zonefile_instance", "gettz", "gettz_db_metadata"] + +ZONEFILENAME = "dateutil-zoneinfo.tar.gz" +METADATA_FN = 'METADATA' + + +class tzfile(_tzfile): + def __reduce__(self): + return (gettz, (self._filename,)) + + +def getzoneinfofile_stream(): + try: + return BytesIO(get_data(__name__, ZONEFILENAME)) + except IOError as e: # TODO switch to FileNotFoundError? + warnings.warn("I/O error({0}): {1}".format(e.errno, e.strerror)) + return None + + +class ZoneInfoFile(object): + def __init__(self, zonefile_stream=None): + if zonefile_stream is not None: + with TarFile.open(fileobj=zonefile_stream) as tf: + self.zones = {zf.name: tzfile(tf.extractfile(zf), filename=zf.name) + for zf in tf.getmembers() + if zf.isfile() and zf.name != METADATA_FN} + # deal with links: They'll point to their parent object. Less + # waste of memory + links = {zl.name: self.zones[zl.linkname] + for zl in tf.getmembers() if + zl.islnk() or zl.issym()} + self.zones.update(links) + try: + metadata_json = tf.extractfile(tf.getmember(METADATA_FN)) + metadata_str = metadata_json.read().decode('UTF-8') + self.metadata = json.loads(metadata_str) + except KeyError: + # no metadata in tar file + self.metadata = None + else: + self.zones = {} + self.metadata = None + + def get(self, name, default=None): + """ + Wrapper for :func:`ZoneInfoFile.zones.get`. This is a convenience method + for retrieving zones from the zone dictionary. + + :param name: + The name of the zone to retrieve. (Generally IANA zone names) + + :param default: + The value to return in the event of a missing key. + + .. versionadded:: 2.6.0 + + """ + return self.zones.get(name, default) + + +# The current API has gettz as a module function, although in fact it taps into +# a stateful class. So as a workaround for now, without changing the API, we +# will create a new "global" class instance the first time a user requests a +# timezone. Ugly, but adheres to the api. +# +# TODO: Remove after deprecation period. +_CLASS_ZONE_INSTANCE = [] + + +def get_zonefile_instance(new_instance=False): + """ + This is a convenience function which provides a :class:`ZoneInfoFile` + instance using the data provided by the ``dateutil`` package. By default, it + caches a single instance of the ZoneInfoFile object and returns that. + + :param new_instance: + If ``True``, a new instance of :class:`ZoneInfoFile` is instantiated and + used as the cached instance for the next call. Otherwise, new instances + are created only as necessary. + + :return: + Returns a :class:`ZoneInfoFile` object. + + .. versionadded:: 2.6 + """ + if new_instance: + zif = None + else: + zif = getattr(get_zonefile_instance, '_cached_instance', None) + + if zif is None: + zif = ZoneInfoFile(getzoneinfofile_stream()) + + get_zonefile_instance._cached_instance = zif + + return zif + + +def gettz(name): + """ + This retrieves a time zone from the local zoneinfo tarball that is packaged + with dateutil. + + :param name: + An IANA-style time zone name, as found in the zoneinfo file. + + :return: + Returns a :class:`dateutil.tz.tzfile` time zone object. + + .. warning:: + It is generally inadvisable to use this function, and it is only + provided for API compatibility with earlier versions. This is *not* + equivalent to ``dateutil.tz.gettz()``, which selects an appropriate + time zone based on the inputs, favoring system zoneinfo. This is ONLY + for accessing the dateutil-specific zoneinfo (which may be out of + date compared to the system zoneinfo). + + .. deprecated:: 2.6 + If you need to use a specific zoneinfofile over the system zoneinfo, + instantiate a :class:`dateutil.zoneinfo.ZoneInfoFile` object and call + :func:`dateutil.zoneinfo.ZoneInfoFile.get(name)` instead. + + Use :func:`get_zonefile_instance` to retrieve an instance of the + dateutil-provided zoneinfo. + """ + warnings.warn("zoneinfo.gettz() will be removed in future versions, " + "to use the dateutil-provided zoneinfo files, instantiate a " + "ZoneInfoFile object and use ZoneInfoFile.zones.get() " + "instead. See the documentation for details.", + DeprecationWarning) + + if len(_CLASS_ZONE_INSTANCE) == 0: + _CLASS_ZONE_INSTANCE.append(ZoneInfoFile(getzoneinfofile_stream())) + return _CLASS_ZONE_INSTANCE[0].zones.get(name) + + +def gettz_db_metadata(): + """ Get the zonefile metadata + + See `zonefile_metadata`_ + + :returns: + A dictionary with the database metadata + + .. deprecated:: 2.6 + See deprecation warning in :func:`zoneinfo.gettz`. To get metadata, + query the attribute ``zoneinfo.ZoneInfoFile.metadata``. + """ + warnings.warn("zoneinfo.gettz_db_metadata() will be removed in future " + "versions, to use the dateutil-provided zoneinfo files, " + "ZoneInfoFile object and query the 'metadata' attribute " + "instead. See the documentation for details.", + DeprecationWarning) + + if len(_CLASS_ZONE_INSTANCE) == 0: + _CLASS_ZONE_INSTANCE.append(ZoneInfoFile(getzoneinfofile_stream())) + return _CLASS_ZONE_INSTANCE[0].metadata diff --git a/venv/Lib/site-packages/dateutil/zoneinfo/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/dateutil/zoneinfo/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..9a3800d8 Binary files /dev/null and b/venv/Lib/site-packages/dateutil/zoneinfo/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/dateutil/zoneinfo/__pycache__/rebuild.cpython-36.pyc b/venv/Lib/site-packages/dateutil/zoneinfo/__pycache__/rebuild.cpython-36.pyc new file mode 100644 index 00000000..30634673 Binary files /dev/null and b/venv/Lib/site-packages/dateutil/zoneinfo/__pycache__/rebuild.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz b/venv/Lib/site-packages/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz new file mode 100644 index 00000000..89e83517 Binary files /dev/null and b/venv/Lib/site-packages/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz differ diff --git a/venv/Lib/site-packages/dateutil/zoneinfo/rebuild.py b/venv/Lib/site-packages/dateutil/zoneinfo/rebuild.py new file mode 100644 index 00000000..78f0d1a0 --- /dev/null +++ b/venv/Lib/site-packages/dateutil/zoneinfo/rebuild.py @@ -0,0 +1,53 @@ +import logging +import os +import tempfile +import shutil +import json +from subprocess import check_call +from tarfile import TarFile + +from dateutil.zoneinfo import METADATA_FN, ZONEFILENAME + + +def rebuild(filename, tag=None, format="gz", zonegroups=[], metadata=None): + """Rebuild the internal timezone info in dateutil/zoneinfo/zoneinfo*tar* + + filename is the timezone tarball from ``ftp.iana.org/tz``. + + """ + tmpdir = tempfile.mkdtemp() + zonedir = os.path.join(tmpdir, "zoneinfo") + moduledir = os.path.dirname(__file__) + try: + with TarFile.open(filename) as tf: + for name in zonegroups: + tf.extract(name, tmpdir) + filepaths = [os.path.join(tmpdir, n) for n in zonegroups] + try: + check_call(["zic", "-d", zonedir] + filepaths) + except OSError as e: + _print_on_nosuchfile(e) + raise + # write metadata file + with open(os.path.join(zonedir, METADATA_FN), 'w') as f: + json.dump(metadata, f, indent=4, sort_keys=True) + target = os.path.join(moduledir, ZONEFILENAME) + with TarFile.open(target, "w:%s" % format) as tf: + for entry in os.listdir(zonedir): + entrypath = os.path.join(zonedir, entry) + tf.add(entrypath, entry) + finally: + shutil.rmtree(tmpdir) + + +def _print_on_nosuchfile(e): + """Print helpful troubleshooting message + + e is an exception raised by subprocess.check_call() + + """ + if e.errno == 2: + logging.error( + "Could not find zic. Perhaps you need to install " + "libc-bin or some other package that provides it, " + "or it's not in your PATH?") diff --git a/venv/Lib/site-packages/distutils-precedence.pth b/venv/Lib/site-packages/distutils-precedence.pth new file mode 100644 index 00000000..6de4198f --- /dev/null +++ b/venv/Lib/site-packages/distutils-precedence.pth @@ -0,0 +1 @@ +import os; var = 'SETUPTOOLS_USE_DISTUTILS'; enabled = os.environ.get(var, 'stdlib') == 'local'; enabled and __import__('_distutils_hack').add_shim(); diff --git a/venv/Lib/site-packages/isapi/PyISAPI_loader.dll b/venv/Lib/site-packages/isapi/PyISAPI_loader.dll new file mode 100644 index 00000000..f4b7e850 Binary files /dev/null and b/venv/Lib/site-packages/isapi/PyISAPI_loader.dll differ diff --git a/venv/Lib/site-packages/isapi/README.txt b/venv/Lib/site-packages/isapi/README.txt new file mode 100644 index 00000000..dc528624 --- /dev/null +++ b/venv/Lib/site-packages/isapi/README.txt @@ -0,0 +1,7 @@ +A Python ISAPI extension. Contributed by Phillip Frantz, and is +Copyright 2002-2003 by Blackdog Software Pty Ltd. + +See the 'samples' directory, and particularly samples\README.txt + +You can find documentation in the PyWin32.chm file that comes with pywin32 - +you can open this from Pythonwin->Help, or from the start menu. \ No newline at end of file diff --git a/venv/Lib/site-packages/isapi/__init__.py b/venv/Lib/site-packages/isapi/__init__.py new file mode 100644 index 00000000..3404f116 --- /dev/null +++ b/venv/Lib/site-packages/isapi/__init__.py @@ -0,0 +1,33 @@ +# The Python ISAPI package. + +# Exceptions thrown by the DLL framework. +class ISAPIError(Exception): + def __init__(self, errno, strerror = None, funcname = None): + # named attributes match IOError etc. + self.errno = errno + self.strerror = strerror + self.funcname = funcname + Exception.__init__(self, errno, strerror, funcname) + def __str__(self): + if self.strerror is None: + try: + import win32api + self.strerror = win32api.FormatMessage(self.errno).strip() + except: + self.strerror = "no error message is available" + # str() looks like a win32api error. + return str( (self.errno, self.strerror, self.funcname) ) + +class FilterError(ISAPIError): + pass + +class ExtensionError(ISAPIError): + pass + +# A little development aid - a filter or extension callback function can +# raise one of these exceptions, and the handler module will be reloaded. +# This means you can change your code without restarting IIS. +# After a reload, your filter/extension will have the GetFilterVersion/ +# GetExtensionVersion function called, but with None as the first arg. +class InternalReloadException(Exception): + pass diff --git a/venv/Lib/site-packages/isapi/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/isapi/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..d56b2531 Binary files /dev/null and b/venv/Lib/site-packages/isapi/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/isapi/__pycache__/install.cpython-36.pyc b/venv/Lib/site-packages/isapi/__pycache__/install.cpython-36.pyc new file mode 100644 index 00000000..51f5cc95 Binary files /dev/null and b/venv/Lib/site-packages/isapi/__pycache__/install.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/isapi/__pycache__/isapicon.cpython-36.pyc b/venv/Lib/site-packages/isapi/__pycache__/isapicon.cpython-36.pyc new file mode 100644 index 00000000..db57aa50 Binary files /dev/null and b/venv/Lib/site-packages/isapi/__pycache__/isapicon.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/isapi/__pycache__/simple.cpython-36.pyc b/venv/Lib/site-packages/isapi/__pycache__/simple.cpython-36.pyc new file mode 100644 index 00000000..ed630a0d Binary files /dev/null and b/venv/Lib/site-packages/isapi/__pycache__/simple.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/isapi/__pycache__/threaded_extension.cpython-36.pyc b/venv/Lib/site-packages/isapi/__pycache__/threaded_extension.cpython-36.pyc new file mode 100644 index 00000000..98693b28 Binary files /dev/null and b/venv/Lib/site-packages/isapi/__pycache__/threaded_extension.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/isapi/doc/isapi.html b/venv/Lib/site-packages/isapi/doc/isapi.html new file mode 100644 index 00000000..03001a1b --- /dev/null +++ b/venv/Lib/site-packages/isapi/doc/isapi.html @@ -0,0 +1,92 @@ +<!-- NOTE: This HTML is displayed inside the CHM file - hence some hrefs + will only work in that environment +--> +<HTML> +<BODY> +<TITLE>Introduction to Python ISAPI support + +

Introduction to Python ISAPI support

+ +

See also

+ +

Note: if you are viewing this documentation directly from disk, +most links in this document will fail - you can also find this document in the +CHM file that comes with pywin32, where the links will work + +

Introduction

+This documents Python support for hosting ISAPI exensions and filters inside +Microsoft Internet Information Server (IIS). It assumes a basic understanding +of the ISAPI filter and extension mechanism. +

+In summary, to implement a filter or extension, you provide a Python module +which defines a Filter and/or Extension class. Once your class has been +loaded, IIS/ISAPI will, via an extension DLL, call methods on your class. +

+A filter and a class instance need only provide 3 methods - for filters they +are called GetFilterVersion, HttpFilterProc and +TerminateFilter. For extensions they +are named GetExtensionVersion, HttpExtensionProc and +TerminateExtension. If you are familiar with writing ISAPI +extensions in C/C++, these names and their purpose will be familiar. +

+Most of the work is done in the HttpFilterProc and +HttpExtensionProc methods. These both take a single +parameter - an HTTP_FILTER_CONTEXT and +EXTENSION_CONTROL_BLOCK +object respectively. +

+In addition to these components, there is an 'isapi' package, containing +support facilities (base-classes, exceptions, etc) which can be leveraged +by the extension. + +

Base classes

+There are a number of base classes provided to make writing extensions a little +simpler. Of particular note is isapi.threaded_extension.ThreadPoolExtension. +This implements a thread-pool and informs IIS that the request is progressing +in the background. Your sub-class need only provide a Dispatch +method, which is called on one of the worker threads rather than the thread +that the request came in on. +

+There is base-class for a filter in isapi.simple, but there is no +equivilent threaded filter - filters work under a different model, where +background processing is not possible. +

Samples

+Please see the isapi/samples directory for some sample filters +and extensions. + +

Implementation

+A Python ISAPI filter extension consists of 2 main components: +
    +
  • A DLL used by ISAPI to interface with Python.
  • +
  • A Python script used by that DLL to implement the filter or extension +functionality
  • +
+ +

Extension DLL

+The DLL is usually managed automatically by the isapi.install module. As the +Python script for the extension is installed, a generic DLL provided with +the isapi package is installed next to the script, and IIS configured to +use this DLL. +

+The name of the DLL always has the same base name as the Python script, but +with a leading underscore (_), and an extension of .dll. For example, the +sample "redirector.py" will, when installed, have "_redirector.dll" created +in the same directory. +

+The Python script may provide 2 entry points - methods named __FilterFactory__ +and __ExtensionFactory__, both taking no arguments and returning a filter or +extension object. + +

Using py2exe and the isapi package

+You can instruct py2exe to create a 'frozen' Python ISAPI filter/extension. +In this case, py2exe will create a package with everything you need in one +directory, and the Python source file embedded in the .zip file. +

+In general, you will want to build a seperate installation executable along +with the ISAPI extension. This executable will be built from the same script. +See the ISAPI sample in the py2exe distribution. diff --git a/venv/Lib/site-packages/isapi/install.py b/venv/Lib/site-packages/isapi/install.py new file mode 100644 index 00000000..e4180735 --- /dev/null +++ b/venv/Lib/site-packages/isapi/install.py @@ -0,0 +1,730 @@ +"""Installation utilities for Python ISAPI filters and extensions.""" + +# this code adapted from "Tomcat JK2 ISAPI redirector", part of Apache +# Created July 2004, Mark Hammond. +import sys, os, imp, shutil, stat +import operator +from win32com.client import GetObject, Dispatch +from win32com.client.gencache import EnsureModule, EnsureDispatch +import win32api +import pythoncom +import winerror +import traceback + +_APP_INPROC = 0 +_APP_OUTPROC = 1 +_APP_POOLED = 2 +_IIS_OBJECT = "IIS://LocalHost/W3SVC" +_IIS_SERVER = "IIsWebServer" +_IIS_WEBDIR = "IIsWebDirectory" +_IIS_WEBVIRTUALDIR = "IIsWebVirtualDir" +_IIS_FILTERS = "IIsFilters" +_IIS_FILTER = "IIsFilter" + +_DEFAULT_SERVER_NAME = "Default Web Site" +_DEFAULT_HEADERS = "X-Powered-By: Python" +_DEFAULT_PROTECTION = _APP_POOLED + +# Default is for 'execute' only access - ie, only the extension +# can be used. This can be overridden via your install script. +_DEFAULT_ACCESS_EXECUTE = True +_DEFAULT_ACCESS_READ = False +_DEFAULT_ACCESS_WRITE = False +_DEFAULT_ACCESS_SCRIPT = False +_DEFAULT_CONTENT_INDEXED = False +_DEFAULT_ENABLE_DIR_BROWSING = False +_DEFAULT_ENABLE_DEFAULT_DOC = False + +_extensions = [ext for ext, _, _ in imp.get_suffixes()] +is_debug_build = '_d.pyd' in _extensions + +this_dir = os.path.abspath(os.path.dirname(__file__)) + +class FilterParameters: + Name = None + Description = None + Path = None + Server = None + # Params that control if/how AddExtensionFile is called. + AddExtensionFile = True + AddExtensionFile_Enabled = True + AddExtensionFile_GroupID = None # defaults to Name + AddExtensionFile_CanDelete = True + AddExtensionFile_Description = None # defaults to Description. + + def __init__(self, **kw): + self.__dict__.update(kw) + +class VirtualDirParameters: + Name = None # Must be provided. + Description = None # defaults to Name + AppProtection = _DEFAULT_PROTECTION + Headers = _DEFAULT_HEADERS + Path = None # defaults to WWW root. + Type = _IIS_WEBVIRTUALDIR + AccessExecute = _DEFAULT_ACCESS_EXECUTE + AccessRead = _DEFAULT_ACCESS_READ + AccessWrite = _DEFAULT_ACCESS_WRITE + AccessScript = _DEFAULT_ACCESS_SCRIPT + ContentIndexed = _DEFAULT_CONTENT_INDEXED + EnableDirBrowsing = _DEFAULT_ENABLE_DIR_BROWSING + EnableDefaultDoc = _DEFAULT_ENABLE_DEFAULT_DOC + DefaultDoc = None # Only set in IIS if not None + ScriptMaps = [] + ScriptMapUpdate = "end" # can be 'start', 'end', 'replace' + Server = None + + def __init__(self, **kw): + self.__dict__.update(kw) + + def is_root(self): + "This virtual directory is a root directory if parent and name are blank" + parent, name = self.split_path() + return not parent and not name + + def split_path(self): + return split_path(self.Name) + +class ScriptMapParams: + Extension = None + Module = None + Flags = 5 + Verbs = "" + # Params that control if/how AddExtensionFile is called. + AddExtensionFile = True + AddExtensionFile_Enabled = True + AddExtensionFile_GroupID = None # defaults to Name + AddExtensionFile_CanDelete = True + AddExtensionFile_Description = None # defaults to Description. + def __init__(self, **kw): + self.__dict__.update(kw) + + def __str__(self): + "Format this parameter suitable for IIS" + items = [self.Extension, self.Module, self.Flags] + # IIS gets upset if there is a trailing verb comma, but no verbs + if self.Verbs: + items.append(self.Verbs) + items = [str(item) for item in items] + return ','.join(items) + +class ISAPIParameters: + ServerName = _DEFAULT_SERVER_NAME + # Description = None + Filters = [] + VirtualDirs = [] + def __init__(self, **kw): + self.__dict__.update(kw) + +verbose = 1 # The level - 0 is quiet. +def log(level, what): + if verbose >= level: + print(what) + +# Convert an ADSI COM exception to the Win32 error code embedded in it. +def _GetWin32ErrorCode(com_exc): + hr = com_exc.hresult + # If we have more details in the 'excepinfo' struct, use it. + if com_exc.excepinfo: + hr = com_exc.excepinfo[-1] + if winerror.HRESULT_FACILITY(hr) != winerror.FACILITY_WIN32: + raise + return winerror.SCODE_CODE(hr) + +class InstallationError(Exception): pass +class ItemNotFound(InstallationError): pass +class ConfigurationError(InstallationError): pass + +def FindPath(options, server, name): + if name.lower().startswith("iis://"): + return name + else: + if name and name[0] != "/": + name = "/"+name + return FindWebServer(options, server)+"/ROOT"+name + +def LocateWebServerPath(description): + """ + Find an IIS web server whose name or comment matches the provided + description (case-insensitive). + + >>> LocateWebServerPath('Default Web Site') # doctest: +SKIP + + or + + >>> LocateWebServerPath('1') #doctest: +SKIP + """ + assert len(description) >= 1, "Server name or comment is required" + iis = GetObject(_IIS_OBJECT) + description = description.lower().strip() + for site in iis: + # Name is generally a number, but no need to assume that. + site_attributes = [getattr(site, attr, "").lower().strip() + for attr in ("Name", "ServerComment")] + if description in site_attributes: + return site.AdsPath + msg = "No web sites match the description '%s'" % description + raise ItemNotFound(msg) + +def GetWebServer(description = None): + """ + Load the web server instance (COM object) for a given instance + or description. + If None is specified, the default website is retrieved (indicated + by the identifier 1. + """ + description = description or "1" + path = LocateWebServerPath(description) + server = LoadWebServer(path) + return server + +def LoadWebServer(path): + try: + server = GetObject(path) + except pythoncom.com_error as details: + msg = details.strerror + if exc.excepinfo and exc.excepinfo[2]: + msg = exc.excepinfo[2] + msg = "WebServer %s: %s" % (path, msg) + raise ItemNotFound(msg) + return server + +def FindWebServer(options, server_desc): + """ + Legacy function to allow options to define a .server property + to override the other parameter. Use GetWebServer instead. + """ + # options takes precedence + server_desc = options.server or server_desc + # make sure server_desc is unicode (could be mbcs if passed in + # sys.argv). + if server_desc and not isinstance(server_desc, str): + server_desc = server_desc.decode('mbcs') + + # get the server (if server_desc is None, the default site is acquired) + server = GetWebServer(server_desc) + return server.adsPath + +def split_path(path): + """ + Get the parent path and basename. + + >>> split_path('/') + ['', ''] + + >>> split_path('') + ['', ''] + + >>> split_path('foo') + ['', 'foo'] + + >>> split_path('/foo') + ['', 'foo'] + + >>> split_path('/foo/bar') + ['/foo', 'bar'] + + >>> split_path('foo/bar') + ['/foo', 'bar'] + """ + + if not path.startswith('/'): path = '/' + path + return path.rsplit('/', 1) + +def _CreateDirectory(iis_dir, name, params): + # We used to go to lengths to keep an existing virtual directory + # in place. However, in some cases the existing directories got + # into a bad state, and an update failed to get them working. + # So we nuke it first. If this is a problem, we could consider adding + # a --keep-existing option. + try: + # Also seen the Class change to a generic IISObject - so nuke + # *any* existing object, regardless of Class + assert name.strip("/"), "mustn't delete the root!" + iis_dir.Delete('', name) + log(2, "Deleted old directory '%s'" % (name,)) + except pythoncom.com_error: + pass + + newDir = iis_dir.Create(params.Type, name) + log(2, "Creating new directory '%s' in %s..." % (name,iis_dir.Name)) + + friendly = params.Description or params.Name + newDir.AppFriendlyName = friendly + + # Note that the new directory won't be visible in the IIS UI + # unless the directory exists on the filesystem. + try: + path = params.Path or iis_dir.Path + newDir.Path = path + except AttributeError: + # If params.Type is IIS_WEBDIRECTORY, an exception is thrown + pass + newDir.AppCreate2(params.AppProtection) + # XXX - note that these Headers only work in IIS6 and earlier. IIS7 + # only supports them on the w3svc node - not even on individial sites, + # let alone individual extensions in the site! + if params.Headers: + newDir.HttpCustomHeaders = params.Headers + + log(2, "Setting directory options...") + newDir.AccessExecute = params.AccessExecute + newDir.AccessRead = params.AccessRead + newDir.AccessWrite = params.AccessWrite + newDir.AccessScript = params.AccessScript + newDir.ContentIndexed = params.ContentIndexed + newDir.EnableDirBrowsing = params.EnableDirBrowsing + newDir.EnableDefaultDoc = params.EnableDefaultDoc + if params.DefaultDoc is not None: + newDir.DefaultDoc = params.DefaultDoc + newDir.SetInfo() + return newDir + + +def CreateDirectory(params, options): + _CallHook(params, "PreInstall", options) + if not params.Name: + raise ConfigurationError("No Name param") + parent, name = params.split_path() + target_dir = GetObject(FindPath(options, params.Server, parent)) + + if not params.is_root(): + target_dir = _CreateDirectory(target_dir, name, params) + + AssignScriptMaps(params.ScriptMaps, target_dir, params.ScriptMapUpdate) + + _CallHook(params, "PostInstall", options, target_dir) + log(1, "Configured Virtual Directory: %s" % (params.Name,)) + return target_dir + +def AssignScriptMaps(script_maps, target, update='replace'): + """Updates IIS with the supplied script map information. + + script_maps is a list of ScriptMapParameter objects + + target is an IIS Virtual Directory to assign the script maps to + + update is a string indicating how to update the maps, one of ('start', + 'end', or 'replace') + """ + # determine which function to use to assign script maps + script_map_func = '_AssignScriptMaps' + update.capitalize() + try: + script_map_func = eval(script_map_func) + except NameError: + msg = "Unknown ScriptMapUpdate option '%s'" % update + raise ConfigurationError(msg) + # use the str method to format the script maps for IIS + script_maps = [str(s) for s in script_maps] + # call the correct function + script_map_func(target, script_maps) + target.SetInfo() + +def get_unique_items(sequence, reference): + "Return items in sequence that can't be found in reference." + return tuple([item for item in sequence if item not in reference]) + +def _AssignScriptMapsReplace(target, script_maps): + target.ScriptMaps = script_maps + +def _AssignScriptMapsEnd(target, script_maps): + unique_new_maps = get_unique_items(script_maps, target.ScriptMaps) + target.ScriptMaps = target.ScriptMaps + unique_new_maps + +def _AssignScriptMapsStart(target, script_maps): + unique_new_maps = get_unique_items(script_maps, target.ScriptMaps) + target.ScriptMaps = unique_new_maps + target.ScriptMaps + +def CreateISAPIFilter(filterParams, options): + server = FindWebServer(options, filterParams.Server) + _CallHook(filterParams, "PreInstall", options) + try: + filters = GetObject(server+"/Filters") + except pythoncom.com_error as exc: + # Brand new sites don't have the '/Filters' collection - create it. + # Any errors other than 'not found' we shouldn't ignore. + if winerror.HRESULT_FACILITY(exc.hresult) != winerror.FACILITY_WIN32 or \ + winerror.HRESULT_CODE(exc.hresult) != winerror.ERROR_PATH_NOT_FOUND: + raise + server_ob = GetObject(server) + filters = server_ob.Create(_IIS_FILTERS, "Filters") + filters.FilterLoadOrder = "" + filters.SetInfo() + + # As for VirtualDir, delete an existing one. + assert filterParams.Name.strip("/"), "mustn't delete the root!" + try: + filters.Delete(_IIS_FILTER, filterParams.Name) + log(2, "Deleted old filter '%s'" % (filterParams.Name,)) + except pythoncom.com_error: + pass + newFilter = filters.Create(_IIS_FILTER, filterParams.Name) + log(2, "Created new ISAPI filter...") + assert os.path.isfile(filterParams.Path) + newFilter.FilterPath = filterParams.Path + newFilter.FilterDescription = filterParams.Description + newFilter.SetInfo() + load_order = [b.strip() for b in filters.FilterLoadOrder.split(",") if b] + if filterParams.Name not in load_order: + load_order.append(filterParams.Name) + filters.FilterLoadOrder = ",".join(load_order) + filters.SetInfo() + _CallHook(filterParams, "PostInstall", options, newFilter) + log (1, "Configured Filter: %s" % (filterParams.Name,)) + return newFilter + +def DeleteISAPIFilter(filterParams, options): + _CallHook(filterParams, "PreRemove", options) + server = FindWebServer(options, filterParams.Server) + ob_path = server+"/Filters" + try: + filters = GetObject(ob_path) + except pythoncom.com_error as details: + # failure to open the filters just means a totally clean IIS install + # (IIS5 at least has no 'Filters' key when freshly installed). + log(2, "ISAPI filter path '%s' did not exist." % (ob_path,)) + return + try: + assert filterParams.Name.strip("/"), "mustn't delete the root!" + filters.Delete(_IIS_FILTER, filterParams.Name) + log(2, "Deleted ISAPI filter '%s'" % (filterParams.Name,)) + except pythoncom.com_error as details: + rc = _GetWin32ErrorCode(details) + if rc != winerror.ERROR_PATH_NOT_FOUND: + raise + log(2, "ISAPI filter '%s' did not exist." % (filterParams.Name,)) + # Remove from the load order + load_order = [b.strip() for b in filters.FilterLoadOrder.split(",") if b] + if filterParams.Name in load_order: + load_order.remove(filterParams.Name) + filters.FilterLoadOrder = ",".join(load_order) + filters.SetInfo() + _CallHook(filterParams, "PostRemove", options) + log (1, "Deleted Filter: %s" % (filterParams.Name,)) + +def _AddExtensionFile(module, def_groupid, def_desc, params, options): + group_id = params.AddExtensionFile_GroupID or def_groupid + desc = params.AddExtensionFile_Description or def_desc + try: + ob = GetObject(_IIS_OBJECT) + ob.AddExtensionFile(module, + params.AddExtensionFile_Enabled, + group_id, + params.AddExtensionFile_CanDelete, + desc) + log(2, "Added extension file '%s' (%s)" % (module, desc)) + except (pythoncom.com_error, AttributeError) as details: + # IIS5 always fails. Probably should upgrade this to + # complain more loudly if IIS6 fails. + log(2, "Failed to add extension file '%s': %s" % (module, details)) + +def AddExtensionFiles(params, options): + """Register the modules used by the filters/extensions as a trusted + 'extension module' - required by the default IIS6 security settings.""" + # Add each module only once. + added = {} + for vd in params.VirtualDirs: + for smp in vd.ScriptMaps: + if smp.Module not in added and smp.AddExtensionFile: + _AddExtensionFile(smp.Module, vd.Name, vd.Description, smp, + options) + added[smp.Module] = True + + for fd in params.Filters: + if fd.Path not in added and fd.AddExtensionFile: + _AddExtensionFile(fd.Path, fd.Name, fd.Description, fd, options) + added[fd.Path] = True + +def _DeleteExtensionFileRecord(module, options): + try: + ob = GetObject(_IIS_OBJECT) + ob.DeleteExtensionFileRecord(module) + log(2, "Deleted extension file record for '%s'" % module) + except (pythoncom.com_error, AttributeError) as details: + log(2, "Failed to remove extension file '%s': %s" % (module, details)) + +def DeleteExtensionFileRecords(params, options): + deleted = {} # only remove each .dll once. + for vd in params.VirtualDirs: + for smp in vd.ScriptMaps: + if smp.Module not in deleted and smp.AddExtensionFile: + _DeleteExtensionFileRecord(smp.Module, options) + deleted[smp.Module] = True + + for filter_def in params.Filters: + if filter_def.Path not in deleted and filter_def.AddExtensionFile: + _DeleteExtensionFileRecord(filter_def.Path, options) + deleted[filter_def.Path] = True + +def CheckLoaderModule(dll_name): + suffix = "" + if is_debug_build: suffix = "_d" + template = os.path.join(this_dir, + "PyISAPI_loader" + suffix + ".dll") + if not os.path.isfile(template): + raise ConfigurationError( + "Template loader '%s' does not exist" % (template,)) + # We can't do a simple "is newer" check, as the DLL is specific to the + # Python version. So we check the date-time and size are identical, + # and skip the copy in that case. + src_stat = os.stat(template) + try: + dest_stat = os.stat(dll_name) + except os.error: + same = 0 + else: + same = src_stat[stat.ST_SIZE]==dest_stat[stat.ST_SIZE] and \ + src_stat[stat.ST_MTIME]==dest_stat[stat.ST_MTIME] + if not same: + log(2, "Updating %s->%s" % (template, dll_name)) + shutil.copyfile(template, dll_name) + shutil.copystat(template, dll_name) + else: + log(2, "%s is up to date." % (dll_name,)) + +def _CallHook(ob, hook_name, options, *extra_args): + func = getattr(ob, hook_name, None) + if func is not None: + args = (ob,options) + extra_args + func(*args) + +def Install(params, options): + _CallHook(params, "PreInstall", options) + for vd in params.VirtualDirs: + CreateDirectory(vd, options) + + for filter_def in params.Filters: + CreateISAPIFilter(filter_def, options) + + AddExtensionFiles(params, options) + + _CallHook(params, "PostInstall", options) + +def RemoveDirectory(params, options): + if params.is_root(): + return + try: + directory = GetObject(FindPath(options, params.Server, params.Name)) + except pythoncom.com_error as details: + rc = _GetWin32ErrorCode(details) + if rc != winerror.ERROR_PATH_NOT_FOUND: + raise + log(2, "VirtualDirectory '%s' did not exist" % params.Name) + directory = None + if directory is not None: + # Be robust should IIS get upset about unloading. + try: + directory.AppUnLoad() + except: + exc_val = sys.exc_info()[1] + log(2, "AppUnLoad() for %s failed: %s" % (params.Name, exc_val)) + # Continue trying to delete it. + try: + parent = GetObject(directory.Parent) + parent.Delete(directory.Class, directory.Name) + log (1, "Deleted Virtual Directory: %s" % (params.Name,)) + except: + exc_val = sys.exc_info()[1] + log(1, "Failed to remove directory %s: %s" % (params.Name, exc_val)) + +def RemoveScriptMaps(vd_params, options): + "Remove script maps from the already installed virtual directory" + parent, name = vd_params.split_path() + target_dir = GetObject(FindPath(options, vd_params.Server, parent)) + installed_maps = list(target_dir.ScriptMaps) + for _map in map(str, vd_params.ScriptMaps): + if _map in installed_maps: + installed_maps.remove(_map) + target_dir.ScriptMaps = installed_maps + target_dir.SetInfo() + +def Uninstall(params, options): + _CallHook(params, "PreRemove", options) + + DeleteExtensionFileRecords(params, options) + + for vd in params.VirtualDirs: + _CallHook(vd, "PreRemove", options) + + RemoveDirectory(vd, options) + if vd.is_root(): + # if this is installed to the root virtual directory, we can't delete it + # so remove the script maps. + RemoveScriptMaps(vd, options) + + _CallHook(vd, "PostRemove", options) + + for filter_def in params.Filters: + DeleteISAPIFilter(filter_def, options) + _CallHook(params, "PostRemove", options) + +# Patch up any missing module names in the params, replacing them with +# the DLL name that hosts this extension/filter. +def _PatchParamsModule(params, dll_name, file_must_exist = True): + if file_must_exist: + if not os.path.isfile(dll_name): + raise ConfigurationError("%s does not exist" % (dll_name,)) + + # Patch up all references to the DLL. + for f in params.Filters: + if f.Path is None: f.Path = dll_name + for d in params.VirtualDirs: + for sm in d.ScriptMaps: + if sm.Module is None: sm.Module = dll_name + +def GetLoaderModuleName(mod_name, check_module = None): + # find the name of the DLL hosting us. + # By default, this is "_{module_base_name}.dll" + if hasattr(sys, "frozen"): + # What to do? The .dll knows its name, but this is likely to be + # executed via a .exe, which does not know. + base, ext = os.path.splitext(mod_name) + path, base = os.path.split(base) + # handle the common case of 'foo.exe'/'foow.exe' + if base.endswith('w'): + base = base[:-1] + # For py2exe, we have '_foo.dll' as the standard pyisapi loader - but + # 'foo.dll' is what we use (it just delegates). + # So no leading '_' on the installed name. + dll_name = os.path.abspath(os.path.join(path, base + ".dll")) + else: + base, ext = os.path.splitext(mod_name) + path, base = os.path.split(base) + dll_name = os.path.abspath(os.path.join(path, "_" + base + ".dll")) + # Check we actually have it. + if check_module is None: check_module = not hasattr(sys, "frozen") + if check_module: + CheckLoaderModule(dll_name) + return dll_name + +# Note the 'log' params to these 'builtin' args - old versions of pywin32 +# didn't log at all in this function (by intent; anyone calling this was +# responsible). So existing code that calls this function with the old +# signature (ie, without a 'log' param) still gets the same behaviour as +# before... + +def InstallModule(conf_module_name, params, options, log=lambda *args:None): + "Install the extension" + if not hasattr(sys, "frozen"): + conf_module_name = os.path.abspath(conf_module_name) + if not os.path.isfile(conf_module_name): + raise ConfigurationError("%s does not exist" % (conf_module_name,)) + + loader_dll = GetLoaderModuleName(conf_module_name) + _PatchParamsModule(params, loader_dll) + Install(params, options) + log(1, "Installation complete.") + +def UninstallModule(conf_module_name, params, options, log=lambda *args:None): + "Remove the extension" + loader_dll = GetLoaderModuleName(conf_module_name, False) + _PatchParamsModule(params, loader_dll, False) + Uninstall(params, options) + log(1, "Uninstallation complete.") + +standard_arguments = { + "install" : InstallModule, + "remove" : UninstallModule, +} + +def build_usage(handler_map): + docstrings = [handler.__doc__ for handler in handler_map.values()] + all_args = dict(zip(iter(handler_map.keys()), docstrings)) + arg_names = "|".join(iter(all_args.keys())) + usage_string = "%prog [options] [" + arg_names + "]\n" + usage_string += "commands:\n" + for arg, desc in all_args.items(): + usage_string += " %-10s: %s" % (arg, desc) + "\n" + return usage_string[:-1] + +def MergeStandardOptions(options, params): + """ + Take an options object generated by the command line and merge + the values into the IISParameters object. + """ + pass + + +# We support 2 ways of extending our command-line/install support. +# * Many of the installation items allow you to specify "PreInstall", +# "PostInstall", "PreRemove" and "PostRemove" hooks +# All hooks are called with the 'params' object being operated on, and +# the 'optparser' options for this session (ie, the command-line options) +# PostInstall for VirtualDirectories and Filters both have an additional +# param - the ADSI object just created. +# * You can pass your own option parser for us to use, and/or define a map +# with your own custom arg handlers. It is a map of 'arg'->function. +# The function is called with (options, log_fn, arg). The function's +# docstring is used in the usage output. +def HandleCommandLine(params, argv=None, conf_module_name = None, + default_arg = "install", + opt_parser = None, custom_arg_handlers = {}): + """Perform installation or removal of an ISAPI filter or extension. + + This module handles standard command-line options and configuration + information, and installs, removes or updates the configuration of an + ISAPI filter or extension. + + You must pass your configuration information in params - all other + arguments are optional, and allow you to configure the installation + process. + """ + global verbose + from optparse import OptionParser + + argv = argv or sys.argv + if not conf_module_name: + conf_module_name = sys.argv[0] + # convert to a long name so that if we were somehow registered with + # the "short" version but unregistered with the "long" version we + # still work (that will depend on exactly how the installer was + # started) + try: + conf_module_name = win32api.GetLongPathName(conf_module_name) + except win32api.error as exc: + log(2, "Couldn't determine the long name for %r: %s" % + (conf_module_name, exc)) + + if opt_parser is None: + # Build our own parser. + parser = OptionParser(usage='') + else: + # The caller is providing their own filter, presumably with their + # own options all setup. + parser = opt_parser + + # build a usage string if we don't have one. + if not parser.get_usage(): + all_handlers = standard_arguments.copy() + all_handlers.update(custom_arg_handlers) + parser.set_usage(build_usage(all_handlers)) + + # allow the user to use uninstall as a synonym for remove if it wasn't + # defined by the custom arg handlers. + all_handlers.setdefault('uninstall', all_handlers['remove']) + + parser.add_option("-q", "--quiet", + action="store_false", dest="verbose", default=True, + help="don't print status messages to stdout") + parser.add_option("-v", "--verbosity", action="count", + dest="verbose", default=1, + help="increase the verbosity of status messages") + parser.add_option("", "--server", action="store", + help="Specifies the IIS server to install/uninstall on." \ + " Default is '%s/1'" % (_IIS_OBJECT,)) + + (options, args) = parser.parse_args(argv[1:]) + MergeStandardOptions(options, params) + verbose = options.verbose + if not args: + args = [default_arg] + try: + for arg in args: + handler = all_handlers[arg] + handler(conf_module_name, params, options, log) + except (ItemNotFound, InstallationError) as details: + if options.verbose > 1: + traceback.print_exc() + print("%s: %s" % (details.__class__.__name__, details)) + except KeyError: + parser.error("Invalid arg '%s'" % arg) diff --git a/venv/Lib/site-packages/isapi/isapicon.py b/venv/Lib/site-packages/isapi/isapicon.py new file mode 100644 index 00000000..d6f44b5e --- /dev/null +++ b/venv/Lib/site-packages/isapi/isapicon.py @@ -0,0 +1,120 @@ +"""Constants needed by ISAPI filters and extensions.""" +# ====================================================================== +# Copyright 2002-2003 by Blackdog Software Pty Ltd. +# +# All Rights Reserved +# +# Permission to use, copy, modify, and distribute this software and +# its documentation for any purpose and without fee is hereby +# granted, provided that the above copyright notice appear in all +# copies and that both that copyright notice and this permission +# notice appear in supporting documentation, and that the name of +# Blackdog Software not be used in advertising or publicity pertaining to +# distribution of the software without specific, written prior +# permission. +# +# BLACKDOG SOFTWARE DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, +# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN +# NO EVENT SHALL BLACKDOG SOFTWARE BE LIABLE FOR ANY SPECIAL, INDIRECT OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN +# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. +# ====================================================================== + +# HTTP reply codes + +HTTP_CONTINUE = 100 +HTTP_SWITCHING_PROTOCOLS = 101 +HTTP_PROCESSING = 102 +HTTP_OK = 200 +HTTP_CREATED = 201 +HTTP_ACCEPTED = 202 +HTTP_NON_AUTHORITATIVE = 203 +HTTP_NO_CONTENT = 204 +HTTP_RESET_CONTENT = 205 +HTTP_PARTIAL_CONTENT = 206 +HTTP_MULTI_STATUS = 207 +HTTP_MULTIPLE_CHOICES = 300 +HTTP_MOVED_PERMANENTLY = 301 +HTTP_MOVED_TEMPORARILY = 302 +HTTP_SEE_OTHER = 303 +HTTP_NOT_MODIFIED = 304 +HTTP_USE_PROXY = 305 +HTTP_TEMPORARY_REDIRECT = 307 +HTTP_BAD_REQUEST = 400 +HTTP_UNAUTHORIZED = 401 +HTTP_PAYMENT_REQUIRED = 402 +HTTP_FORBIDDEN = 403 +HTTP_NOT_FOUND = 404 +HTTP_METHOD_NOT_ALLOWED = 405 +HTTP_NOT_ACCEPTABLE = 406 +HTTP_PROXY_AUTHENTICATION_REQUIRED= 407 +HTTP_REQUEST_TIME_OUT = 408 +HTTP_CONFLICT = 409 +HTTP_GONE = 410 +HTTP_LENGTH_REQUIRED = 411 +HTTP_PRECONDITION_FAILED = 412 +HTTP_REQUEST_ENTITY_TOO_LARGE = 413 +HTTP_REQUEST_URI_TOO_LARGE = 414 +HTTP_UNSUPPORTED_MEDIA_TYPE = 415 +HTTP_RANGE_NOT_SATISFIABLE = 416 +HTTP_EXPECTATION_FAILED = 417 +HTTP_UNPROCESSABLE_ENTITY = 422 +HTTP_INTERNAL_SERVER_ERROR = 500 +HTTP_NOT_IMPLEMENTED = 501 +HTTP_BAD_GATEWAY = 502 +HTTP_SERVICE_UNAVAILABLE = 503 +HTTP_GATEWAY_TIME_OUT = 504 +HTTP_VERSION_NOT_SUPPORTED = 505 +HTTP_VARIANT_ALSO_VARIES = 506 + +HSE_STATUS_SUCCESS = 1 +HSE_STATUS_SUCCESS_AND_KEEP_CONN = 2 +HSE_STATUS_PENDING = 3 +HSE_STATUS_ERROR = 4 + +SF_NOTIFY_SECURE_PORT = 0x00000001 +SF_NOTIFY_NONSECURE_PORT = 0x00000002 +SF_NOTIFY_READ_RAW_DATA = 0x00008000 +SF_NOTIFY_PREPROC_HEADERS = 0x00004000 +SF_NOTIFY_AUTHENTICATION = 0x00002000 +SF_NOTIFY_URL_MAP = 0x00001000 +SF_NOTIFY_ACCESS_DENIED = 0x00000800 +SF_NOTIFY_SEND_RESPONSE = 0x00000040 +SF_NOTIFY_SEND_RAW_DATA = 0x00000400 +SF_NOTIFY_LOG = 0x00000200 +SF_NOTIFY_END_OF_REQUEST = 0x00000080 +SF_NOTIFY_END_OF_NET_SESSION = 0x00000100 + +SF_NOTIFY_ORDER_HIGH = 0x00080000 +SF_NOTIFY_ORDER_MEDIUM = 0x00040000 +SF_NOTIFY_ORDER_LOW = 0x00020000 +SF_NOTIFY_ORDER_DEFAULT = SF_NOTIFY_ORDER_LOW + +SF_NOTIFY_ORDER_MASK = (SF_NOTIFY_ORDER_HIGH | \ + SF_NOTIFY_ORDER_MEDIUM | \ + SF_NOTIFY_ORDER_LOW) + +SF_STATUS_REQ_FINISHED = 134217728 # 0x8000000 +SF_STATUS_REQ_FINISHED_KEEP_CONN = 134217728 + 1 +SF_STATUS_REQ_NEXT_NOTIFICATION = 134217728 + 2 +SF_STATUS_REQ_HANDLED_NOTIFICATION = 134217728 + 3 +SF_STATUS_REQ_ERROR = 134217728 + 4 +SF_STATUS_REQ_READ_NEXT = 134217728 + 5 + +HSE_IO_SYNC = 0x00000001 # for WriteClient +HSE_IO_ASYNC = 0x00000002 # for WriteClient/TF/EU +HSE_IO_DISCONNECT_AFTER_SEND = 0x00000004 # for TF +HSE_IO_SEND_HEADERS = 0x00000008 # for TF +HSE_IO_NODELAY = 0x00001000 # turn off nagling +# These two are only used by VectorSend +HSE_IO_FINAL_SEND = 0x00000010 +HSE_IO_CACHE_RESPONSE = 0x00000020 + +HSE_EXEC_URL_NO_HEADERS = 0x02 +HSE_EXEC_URL_IGNORE_CURRENT_INTERCEPTOR = 0x04 +HSE_EXEC_URL_IGNORE_VALIDATION_AND_RANGE = 0x10 +HSE_EXEC_URL_DISABLE_CUSTOM_ERROR = 0x20 +HSE_EXEC_URL_SSI_CMD = 0x40 +HSE_EXEC_URL_HTTP_CACHE_ELIGIBLE = 0x80 diff --git a/venv/Lib/site-packages/isapi/samples/README.txt b/venv/Lib/site-packages/isapi/samples/README.txt new file mode 100644 index 00000000..cff87587 --- /dev/null +++ b/venv/Lib/site-packages/isapi/samples/README.txt @@ -0,0 +1,20 @@ +In this directory you will find examples of ISAPI filters and extensions. + +The filter loading mechanism works like this: +* IIS loads the special Python "loader" DLL. This DLL will generally have a + leading underscore as part of its name. +* This loader DLL looks for a Python module, by removing the first letter of + the DLL base name. + +This means that an ISAPI extension module consists of 2 key files - the loader +DLL (eg, "_MyIISModule.dll", and a Python module (which for this example +would be "MyIISModule.py") + +When you install an ISAPI extension, the installation code checks to see if +there is a loader DLL for your implementation file - if one does not exist, +or the standard loader is different, it is copied and renamed accordingly. + +We use this mechanism to provide the maximum separation between different +Python extensions installed on the same server - otherwise filter order and +other tricky IIS semantics would need to be replicated. Also, each filter +gets its own thread-pool, etc. diff --git a/venv/Lib/site-packages/isapi/samples/__pycache__/advanced.cpython-36.pyc b/venv/Lib/site-packages/isapi/samples/__pycache__/advanced.cpython-36.pyc new file mode 100644 index 00000000..91f83127 Binary files /dev/null and b/venv/Lib/site-packages/isapi/samples/__pycache__/advanced.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/isapi/samples/__pycache__/redirector.cpython-36.pyc b/venv/Lib/site-packages/isapi/samples/__pycache__/redirector.cpython-36.pyc new file mode 100644 index 00000000..beaf1489 Binary files /dev/null and b/venv/Lib/site-packages/isapi/samples/__pycache__/redirector.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/isapi/samples/__pycache__/redirector_asynch.cpython-36.pyc b/venv/Lib/site-packages/isapi/samples/__pycache__/redirector_asynch.cpython-36.pyc new file mode 100644 index 00000000..2ea64871 Binary files /dev/null and b/venv/Lib/site-packages/isapi/samples/__pycache__/redirector_asynch.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/isapi/samples/__pycache__/redirector_with_filter.cpython-36.pyc b/venv/Lib/site-packages/isapi/samples/__pycache__/redirector_with_filter.cpython-36.pyc new file mode 100644 index 00000000..4852f7be Binary files /dev/null and b/venv/Lib/site-packages/isapi/samples/__pycache__/redirector_with_filter.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/isapi/samples/__pycache__/test.cpython-36.pyc b/venv/Lib/site-packages/isapi/samples/__pycache__/test.cpython-36.pyc new file mode 100644 index 00000000..1b470b29 Binary files /dev/null and b/venv/Lib/site-packages/isapi/samples/__pycache__/test.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/isapi/samples/advanced.py b/venv/Lib/site-packages/isapi/samples/advanced.py new file mode 100644 index 00000000..b4937f9e --- /dev/null +++ b/venv/Lib/site-packages/isapi/samples/advanced.py @@ -0,0 +1,196 @@ +# This extension demonstrates some advanced features of the Python ISAPI +# framework. +# We demonstrate: +# * Reloading your Python module without shutting down IIS (eg, when your +# .py implementation file changes.) +# * Custom command-line handling - both additional options and commands. +# * Using a query string - any part of the URL after a '?' is assumed to +# be "variable names" separated by '&' - we will print the values of +# these server variables. +# * If the tail portion of the URL is "ReportUnhealthy", IIS will be +# notified we are unhealthy via a HSE_REQ_REPORT_UNHEALTHY request. +# Whether this is acted upon depends on if the IIS health-checking +# tools are installed, but you should always see the reason written +# to the Windows event log - see the IIS documentation for more. + +from isapi import isapicon +from isapi.simple import SimpleExtension +import sys, os, stat + +if hasattr(sys, "isapidllhandle"): + import win32traceutil + +# Notes on reloading +# If your HttpFilterProc or HttpExtensionProc functions raises +# 'isapi.InternalReloadException', the framework will not treat it +# as an error but instead will terminate your extension, reload your +# extension module, re-initialize the instance, and re-issue the request. +# The Initialize functions are called with None as their param. The +# return code from the terminate function is ignored. +# +# This is all the framework does to help you. It is up to your code +# when you raise this exception. This sample uses a Win32 "find +# notification". Whenever windows tells us one of the files in the +# directory has changed, we check if the time of our source-file has +# changed, and set a flag. Next imcoming request, we check the flag and +# raise the special exception if set. +# +# The end result is that the module is automatically reloaded whenever +# the source-file changes - you need take no further action to see your +# changes reflected in the running server. + +# The framework only reloads your module - if you have libraries you +# depend on and also want reloaded, you must arrange for this yourself. +# One way of doing this would be to special case the import of these +# modules. Eg: +# -- +# try: +# my_module = reload(my_module) # module already imported - reload it +# except NameError: +# import my_module # first time around - import it. +# -- +# When your module is imported for the first time, the NameError will +# be raised, and the module imported. When the ISAPI framework reloads +# your module, the existing module will avoid the NameError, and allow +# you to reload that module. + +from isapi import InternalReloadException +import win32event, win32file, winerror, win32con, threading + +try: + reload_counter += 1 +except NameError: + reload_counter = 0 + +# A watcher thread that checks for __file__ changing. +# When it detects it, it simply sets "change_detected" to true. +class ReloadWatcherThread(threading.Thread): + def __init__(self): + self.change_detected = False + self.filename = __file__ + if self.filename.endswith("c") or self.filename.endswith("o"): + self.filename = self.filename[:-1] + self.handle = win32file.FindFirstChangeNotification( + os.path.dirname(self.filename), + False, # watch tree? + win32con.FILE_NOTIFY_CHANGE_LAST_WRITE) + threading.Thread.__init__(self) + + def run(self): + last_time = os.stat(self.filename)[stat.ST_MTIME] + while 1: + try: + rc = win32event.WaitForSingleObject(self.handle, + win32event.INFINITE) + win32file.FindNextChangeNotification(self.handle) + except win32event.error as details: + # handle closed - thread should terminate. + if details.winerror != winerror.ERROR_INVALID_HANDLE: + raise + break + this_time = os.stat(self.filename)[stat.ST_MTIME] + if this_time != last_time: + print("Detected file change - flagging for reload.") + self.change_detected = True + last_time = this_time + + def stop(self): + win32file.FindCloseChangeNotification(self.handle) + +# The ISAPI extension - handles requests in our virtual dir, and sends the +# response to the client. +class Extension(SimpleExtension): + "Python advanced sample Extension" + def __init__(self): + self.reload_watcher = ReloadWatcherThread() + self.reload_watcher.start() + + def HttpExtensionProc(self, ecb): + # NOTE: If you use a ThreadPoolExtension, you must still perform + # this check in HttpExtensionProc - raising the exception from + # The "Dispatch" method will just cause the exception to be + # rendered to the browser. + if self.reload_watcher.change_detected: + print("Doing reload") + raise InternalReloadException + + url = ecb.GetServerVariable("UNICODE_URL") + if url.endswith("ReportUnhealthy"): + ecb.ReportUnhealthy("I'm a little sick") + + ecb.SendResponseHeaders("200 OK", "Content-Type: text/html\r\n\r\n", 0) + print("", file=ecb) + + qs = ecb.GetServerVariable("QUERY_STRING") + if qs: + queries = qs.split("&") + print("

", file=ecb)
+            for q in queries:
+                val = ecb.GetServerVariable(q, '<no such variable>')
+                print("%s=%r" % (q, val), file=ecb)
+            print("

", file=ecb) + + print("This module has been imported", file=ecb) + print("%d times" % (reload_counter,), file=ecb) + print("", file=ecb) + ecb.close() + return isapicon.HSE_STATUS_SUCCESS + + def TerminateExtension(self, status): + self.reload_watcher.stop() + +# The entry points for the ISAPI extension. +def __ExtensionFactory__(): + return Extension() + +# Our special command line customization. +# Pre-install hook for our virtual directory. +def PreInstallDirectory(params, options): + # If the user used our special '--description' option, + # then we override our default. + if options.description: + params.Description = options.description + +# Post install hook for our entire script +def PostInstall(params, options): + print() + print("The sample has been installed.") + print("Point your browser to /AdvancedPythonSample") + print("If you modify the source file and reload the page,") + print("you should see the reload counter increment") + +# Handler for our custom 'status' argument. +def status_handler(options, log, arg): + "Query the status of something" + print("Everything seems to be fine!") + +custom_arg_handlers = {"status": status_handler} + +if __name__=='__main__': + # If run from the command-line, install ourselves. + from isapi.install import * + params = ISAPIParameters(PostInstall = PostInstall) + # Setup the virtual directories - this is a list of directories our + # extension uses - in this case only 1. + # Each extension has a "script map" - this is the mapping of ISAPI + # extensions. + sm = [ + ScriptMapParams(Extension="*", Flags=0) + ] + vd = VirtualDirParameters(Name="AdvancedPythonSample", + Description = Extension.__doc__, + ScriptMaps = sm, + ScriptMapUpdate = "replace", + # specify the pre-install hook. + PreInstall = PreInstallDirectory + ) + params.VirtualDirs = [vd] + # Setup our custom option parser. + from optparse import OptionParser + parser = OptionParser('') # blank usage, so isapi sets it. + parser.add_option("", "--description", + action="store", + help="custom description to use for the virtual directory") + + HandleCommandLine(params, opt_parser=parser, + custom_arg_handlers = custom_arg_handlers) diff --git a/venv/Lib/site-packages/isapi/samples/redirector.py b/venv/Lib/site-packages/isapi/samples/redirector.py new file mode 100644 index 00000000..bc81e037 --- /dev/null +++ b/venv/Lib/site-packages/isapi/samples/redirector.py @@ -0,0 +1,109 @@ +# This is a sample ISAPI extension written in Python. +# +# Please see README.txt in this directory, and specifically the +# information about the "loader" DLL - installing this sample will create +# "_redirector.dll" in the current directory. The readme explains this. + +# Executing this script (or any server config script) will install the extension +# into your web server. As the server executes, the PyISAPI framework will load +# this module and create your Extension and Filter objects. + +# This is the simplest possible redirector (or proxy) we can write. The +# extension installs with a mask of '*' in the root of the site. +# As an added bonus though, we optionally show how, on IIS6 and later, we +# can use HSE_ERQ_EXEC_URL to ignore certain requests - in IIS5 and earlier +# we can only do this with an ISAPI filter - see redirector_with_filter for +# an example. If this sample is run on IIS5 or earlier it simply ignores +# any excludes. + +from isapi import isapicon, threaded_extension +import sys +import traceback +try: + from urllib.request import urlopen +except ImportError: + # py3k spelling... + from urllib.request import urlopen +import win32api + +# sys.isapidllhandle will exist when we are loaded by the IIS framework. +# In this case we redirect our output to the win32traceutil collector. +if hasattr(sys, "isapidllhandle"): + import win32traceutil + +# The site we are proxying. +proxy = "http://www.python.org" + +# Urls we exclude (ie, allow IIS to handle itself) - all are lowered, +# and these entries exist by default on Vista... +excludes = ["/iisstart.htm", "/welcome.png"] + +# An "io completion" function, called when ecb.ExecURL completes... +def io_callback(ecb, url, cbIO, errcode): + # Get the status of our ExecURL + httpstatus, substatus, win32 = ecb.GetExecURLStatus() + print("ExecURL of %r finished with http status %d.%d, win32 status %d (%s)" % ( + url, httpstatus, substatus, win32, win32api.FormatMessage(win32).strip())) + # nothing more to do! + ecb.DoneWithSession() + +# The ISAPI extension - handles all requests in the site. +class Extension(threaded_extension.ThreadPoolExtension): + "Python sample Extension" + def Dispatch(self, ecb): + # Note that our ThreadPoolExtension base class will catch exceptions + # in our Dispatch method, and write the traceback to the client. + # That is perfect for this sample, so we don't catch our own. + #print 'IIS dispatching "%s"' % (ecb.GetServerVariable("URL"),) + url = ecb.GetServerVariable("URL").decode("ascii") + for exclude in excludes: + if url.lower().startswith(exclude): + print("excluding %s" % url) + if ecb.Version < 0x60000: + print("(but this is IIS5 or earlier - can't do 'excludes')") + else: + ecb.IOCompletion(io_callback, url) + ecb.ExecURL(None, None, None, None, None, isapicon.HSE_EXEC_URL_IGNORE_CURRENT_INTERCEPTOR) + return isapicon.HSE_STATUS_PENDING + + new_url = proxy + url + print("Opening %s" % new_url) + fp = urlopen(new_url) + headers = fp.info() + # subtle py3k breakage: in py3k, str(headers) has normalized \r\n + # back to \n and also stuck an extra \n term. py2k leaves the + # \r\n from the server in tact and finishes with a single term. + if sys.version_info < (3,0): + header_text = str(headers) + "\r\n" + else: + # take *all* trailing \n off, replace remaining with + # \r\n, then add the 2 trailing \r\n. + header_text = str(headers).rstrip('\n').replace('\n', '\r\n') + '\r\n\r\n' + ecb.SendResponseHeaders("200 OK", header_text, False) + ecb.WriteClient(fp.read()) + ecb.DoneWithSession() + print("Returned data from '%s'" % (new_url,)) + return isapicon.HSE_STATUS_SUCCESS + +# The entry points for the ISAPI extension. +def __ExtensionFactory__(): + return Extension() + +if __name__=='__main__': + # If run from the command-line, install ourselves. + from isapi.install import * + params = ISAPIParameters() + # Setup the virtual directories - this is a list of directories our + # extension uses - in this case only 1. + # Each extension has a "script map" - this is the mapping of ISAPI + # extensions. + sm = [ + ScriptMapParams(Extension="*", Flags=0) + ] + vd = VirtualDirParameters(Name="/", + Description = Extension.__doc__, + ScriptMaps = sm, + ScriptMapUpdate = "replace" + ) + params.VirtualDirs = [vd] + HandleCommandLine(params) diff --git a/venv/Lib/site-packages/isapi/samples/redirector_asynch.py b/venv/Lib/site-packages/isapi/samples/redirector_asynch.py new file mode 100644 index 00000000..e8da439e --- /dev/null +++ b/venv/Lib/site-packages/isapi/samples/redirector_asynch.py @@ -0,0 +1,78 @@ +# This is a sample ISAPI extension written in Python. + +# This is like the other 'redirector' samples, but uses asnch IO when writing +# back to the client (it does *not* use asynch io talking to the remote +# server!) + +from isapi import isapicon, threaded_extension +import sys +import traceback +import urllib.request, urllib.parse, urllib.error + +# sys.isapidllhandle will exist when we are loaded by the IIS framework. +# In this case we redirect our output to the win32traceutil collector. +if hasattr(sys, "isapidllhandle"): + import win32traceutil + +# The site we are proxying. +proxy = "http://www.python.org" + +# We synchronously read chunks of this size then asynchronously write them. +CHUNK_SIZE=8192 + +# The callback made when IIS completes the asynch write. +def io_callback(ecb, fp, cbIO, errcode): + print("IO callback", ecb, fp, cbIO, errcode) + chunk = fp.read(CHUNK_SIZE) + if chunk: + ecb.WriteClient(chunk, isapicon.HSE_IO_ASYNC) + # and wait for the next callback to say this chunk is done. + else: + # eof - say we are complete. + fp.close() + ecb.DoneWithSession() + +# The ISAPI extension - handles all requests in the site. +class Extension(threaded_extension.ThreadPoolExtension): + "Python sample proxy server - asynch version." + def Dispatch(self, ecb): + print('IIS dispatching "%s"' % (ecb.GetServerVariable("URL"),)) + url = ecb.GetServerVariable("URL") + + new_url = proxy + url + print("Opening %s" % new_url) + fp = urllib.request.urlopen(new_url) + headers = fp.info() + ecb.SendResponseHeaders("200 OK", str(headers) + "\r\n", False) + # now send the first chunk asynchronously + ecb.ReqIOCompletion(io_callback, fp) + chunk = fp.read(CHUNK_SIZE) + if chunk: + ecb.WriteClient(chunk, isapicon.HSE_IO_ASYNC) + return isapicon.HSE_STATUS_PENDING + # no data - just close things now. + ecb.DoneWithSession() + return isapicon.HSE_STATUS_SUCCESS + +# The entry points for the ISAPI extension. +def __ExtensionFactory__(): + return Extension() + +if __name__=='__main__': + # If run from the command-line, install ourselves. + from isapi.install import * + params = ISAPIParameters() + # Setup the virtual directories - this is a list of directories our + # extension uses - in this case only 1. + # Each extension has a "script map" - this is the mapping of ISAPI + # extensions. + sm = [ + ScriptMapParams(Extension="*", Flags=0) + ] + vd = VirtualDirParameters(Name="/", + Description = Extension.__doc__, + ScriptMaps = sm, + ScriptMapUpdate = "replace" + ) + params.VirtualDirs = [vd] + HandleCommandLine(params) diff --git a/venv/Lib/site-packages/isapi/samples/redirector_with_filter.py b/venv/Lib/site-packages/isapi/samples/redirector_with_filter.py new file mode 100644 index 00000000..85a9db00 --- /dev/null +++ b/venv/Lib/site-packages/isapi/samples/redirector_with_filter.py @@ -0,0 +1,155 @@ +# This is a sample configuration file for an ISAPI filter and extension +# written in Python. +# +# Please see README.txt in this directory, and specifically the +# information about the "loader" DLL - installing this sample will create +# "_redirector_with_filter.dll" in the current directory. The readme explains +# this. + +# Executing this script (or any server config script) will install the extension +# into your web server. As the server executes, the PyISAPI framework will load +# this module and create your Extension and Filter objects. + +# This sample provides sample redirector: +# It is implemented by a filter and an extension, so that some requests can +# be ignored. Compare with 'redirector_simple' which avoids the filter, but +# is unable to selectively ignore certain requests. +# The process is sample uses is: +# * The filter is installed globally, as all filters are. +# * A Virtual Directory named "python" is setup. This dir has our ISAPI +# extension as the only application, mapped to file-extension '*'. Thus, our +# extension handles *all* requests in this directory. +# The basic process is that the filter does URL rewriting, redirecting every +# URL to our Virtual Directory. Our extension then handles this request, +# forwarding the data from the proxied site. +# For example: +# * URL of "index.html" comes in. +# * Filter rewrites this to "/python/index.html" +# * Our extension sees the full "/python/index.html", removes the leading +# portion, and opens and forwards the remote URL. + + +# This sample is very small - it avoid most error handling, etc. It is for +# demonstration purposes only. + +from isapi import isapicon, threaded_extension +from isapi.simple import SimpleFilter +import sys +import traceback +import urllib.request, urllib.parse, urllib.error + +# sys.isapidllhandle will exist when we are loaded by the IIS framework. +# In this case we redirect our output to the win32traceutil collector. +if hasattr(sys, "isapidllhandle"): + import win32traceutil + +# The site we are proxying. +proxy = "http://www.python.org" +# The name of the virtual directory we install in, and redirect from. +virtualdir = "/python" + +# The key feature of this redirector over the simple redirector is that it +# can choose to ignore certain responses by having the filter not rewrite them +# to our virtual dir. For this sample, we just exclude the IIS help directory. + +# The ISAPI extension - handles requests in our virtual dir, and sends the +# response to the client. +class Extension(threaded_extension.ThreadPoolExtension): + "Python sample Extension" + def Dispatch(self, ecb): + # Note that our ThreadPoolExtension base class will catch exceptions + # in our Dispatch method, and write the traceback to the client. + # That is perfect for this sample, so we don't catch our own. + #print 'IIS dispatching "%s"' % (ecb.GetServerVariable("URL"),) + url = ecb.GetServerVariable("URL") + if url.startswith(virtualdir): + new_url = proxy + url[len(virtualdir):] + print("Opening", new_url) + fp = urllib.request.urlopen(new_url) + headers = fp.info() + ecb.SendResponseHeaders("200 OK", str(headers) + "\r\n", False) + ecb.WriteClient(fp.read()) + ecb.DoneWithSession() + print("Returned data from '%s'!" % (new_url,)) + else: + # this should never happen - we should only see requests that + # start with our virtual directory name. + print("Not proxying '%s'" % (url,)) + + +# The ISAPI filter. +class Filter(SimpleFilter): + "Sample Python Redirector" + filter_flags = isapicon.SF_NOTIFY_PREPROC_HEADERS | \ + isapicon.SF_NOTIFY_ORDER_DEFAULT + + def HttpFilterProc(self, fc): + #print "Filter Dispatch" + nt = fc.NotificationType + if nt != isapicon.SF_NOTIFY_PREPROC_HEADERS: + return isapicon.SF_STATUS_REQ_NEXT_NOTIFICATION + + pp = fc.GetData() + url = pp.GetHeader("url") + #print "URL is '%s'" % (url,) + prefix = virtualdir + if not url.startswith(prefix): + new_url = prefix + url + print("New proxied URL is '%s'" % (new_url,)) + pp.SetHeader("url", new_url) + # For the sake of demonstration, show how the FilterContext + # attribute is used. It always starts out life as None, and + # any assignments made are automatically decref'd by the + # framework during a SF_NOTIFY_END_OF_NET_SESSION notification. + if fc.FilterContext is None: + fc.FilterContext = 0 + fc.FilterContext += 1 + print("This is request number %d on this connection" % fc.FilterContext) + return isapicon.SF_STATUS_REQ_HANDLED_NOTIFICATION + else: + print("Filter ignoring URL '%s'" % (url,)) + + # Some older code that handled SF_NOTIFY_URL_MAP. + #~ print "Have URL_MAP notify" + #~ urlmap = fc.GetData() + #~ print "URI is", urlmap.URL + #~ print "Path is", urlmap.PhysicalPath + #~ if urlmap.URL.startswith("/UC/"): + #~ # Find the /UC/ in the physical path, and nuke it (except + #~ # as the path is physical, it is \) + #~ p = urlmap.PhysicalPath + #~ pos = p.index("\\UC\\") + #~ p = p[:pos] + p[pos+3:] + #~ p = r"E:\src\pyisapi\webroot\PyTest\formTest.htm" + #~ print "New path is", p + #~ urlmap.PhysicalPath = p + +# The entry points for the ISAPI extension. +def __FilterFactory__(): + return Filter() +def __ExtensionFactory__(): + return Extension() + +if __name__=='__main__': + # If run from the command-line, install ourselves. + from isapi.install import * + params = ISAPIParameters() + # Setup all filters - these are global to the site. + params.Filters = [ + FilterParameters(Name="PythonRedirector", + Description=Filter.__doc__), + ] + # Setup the virtual directories - this is a list of directories our + # extension uses - in this case only 1. + # Each extension has a "script map" - this is the mapping of ISAPI + # extensions. + sm = [ + ScriptMapParams(Extension="*", Flags=0) + ] + vd = VirtualDirParameters(Name=virtualdir[1:], + Description = Extension.__doc__, + ScriptMaps = sm, + ScriptMapUpdate = "replace" + ) + params.VirtualDirs = [vd] + HandleCommandLine(params) diff --git a/venv/Lib/site-packages/isapi/samples/test.py b/venv/Lib/site-packages/isapi/samples/test.py new file mode 100644 index 00000000..b769114a --- /dev/null +++ b/venv/Lib/site-packages/isapi/samples/test.py @@ -0,0 +1,154 @@ +# This extension is used mainly for testing purposes - it is not +# designed to be a simple sample, but instead is a hotch-potch of things +# that attempts to exercise the framework. + +from isapi import isapicon +from isapi.simple import SimpleExtension +import sys, os, stat + +if hasattr(sys, "isapidllhandle"): + import win32traceutil + +# We use the same reload support as 'advanced.py' demonstrates. +from isapi import InternalReloadException +import win32event, win32file, winerror, win32con, threading + +# A watcher thread that checks for __file__ changing. +# When it detects it, it simply sets "change_detected" to true. +class ReloadWatcherThread(threading.Thread): + def __init__(self): + self.change_detected = False + self.filename = __file__ + if self.filename.endswith("c") or self.filename.endswith("o"): + self.filename = self.filename[:-1] + self.handle = win32file.FindFirstChangeNotification( + os.path.dirname(self.filename), + False, # watch tree? + win32con.FILE_NOTIFY_CHANGE_LAST_WRITE) + threading.Thread.__init__(self) + + def run(self): + last_time = os.stat(self.filename)[stat.ST_MTIME] + while 1: + try: + rc = win32event.WaitForSingleObject(self.handle, + win32event.INFINITE) + win32file.FindNextChangeNotification(self.handle) + except win32event.error as details: + # handle closed - thread should terminate. + if details.winerror != winerror.ERROR_INVALID_HANDLE: + raise + break + this_time = os.stat(self.filename)[stat.ST_MTIME] + if this_time != last_time: + print("Detected file change - flagging for reload.") + self.change_detected = True + last_time = this_time + + def stop(self): + win32file.FindCloseChangeNotification(self.handle) + +def TransmitFileCallback(ecb, hFile, cbIO, errCode): + print("Transmit complete!") + ecb.close() + +# The ISAPI extension - handles requests in our virtual dir, and sends the +# response to the client. +class Extension(SimpleExtension): + "Python test Extension" + def __init__(self): + self.reload_watcher = ReloadWatcherThread() + self.reload_watcher.start() + + def HttpExtensionProc(self, ecb): + # NOTE: If you use a ThreadPoolExtension, you must still perform + # this check in HttpExtensionProc - raising the exception from + # The "Dispatch" method will just cause the exception to be + # rendered to the browser. + if self.reload_watcher.change_detected: + print("Doing reload") + raise InternalReloadException + + if ecb.GetServerVariable("UNICODE_URL").endswith("test.py"): + file_flags = win32con.FILE_FLAG_SEQUENTIAL_SCAN | win32con.FILE_FLAG_OVERLAPPED + hfile = win32file.CreateFile(__file__, win32con.GENERIC_READ, + 0, None, win32con.OPEN_EXISTING, + file_flags, None) + flags = isapicon.HSE_IO_ASYNC | isapicon.HSE_IO_DISCONNECT_AFTER_SEND | \ + isapicon.HSE_IO_SEND_HEADERS + # We pass hFile to the callback simply as a way of keeping it alive + # for the duration of the transmission + try: + ecb.TransmitFile(TransmitFileCallback, hfile, + int(hfile), + "200 OK", + 0, 0, None, None, flags) + except: + # Errors keep this source file open! + hfile.Close() + raise + else: + # default response + ecb.SendResponseHeaders("200 OK", "Content-Type: text/html\r\n\r\n", 0) + print("", file=ecb) + print("The root of this site is at", ecb.MapURLToPath("/"), file=ecb) + print("", file=ecb) + ecb.close() + return isapicon.HSE_STATUS_SUCCESS + + def TerminateExtension(self, status): + self.reload_watcher.stop() + +# The entry points for the ISAPI extension. +def __ExtensionFactory__(): + return Extension() + +# Our special command line customization. +# Pre-install hook for our virtual directory. +def PreInstallDirectory(params, options): + # If the user used our special '--description' option, + # then we override our default. + if options.description: + params.Description = options.description + +# Post install hook for our entire script +def PostInstall(params, options): + print() + print("The sample has been installed.") + print("Point your browser to /PyISAPITest") + +# Handler for our custom 'status' argument. +def status_handler(options, log, arg): + "Query the status of something" + print("Everything seems to be fine!") + +custom_arg_handlers = {"status": status_handler} + +if __name__=='__main__': + # If run from the command-line, install ourselves. + from isapi.install import * + params = ISAPIParameters(PostInstall = PostInstall) + # Setup the virtual directories - this is a list of directories our + # extension uses - in this case only 1. + # Each extension has a "script map" - this is the mapping of ISAPI + # extensions. + sm = [ + ScriptMapParams(Extension="*", Flags=0) + ] + vd = VirtualDirParameters(Name="PyISAPITest", + Description = Extension.__doc__, + ScriptMaps = sm, + ScriptMapUpdate = "replace", + # specify the pre-install hook. + PreInstall = PreInstallDirectory + ) + params.VirtualDirs = [vd] + # Setup our custom option parser. + from optparse import OptionParser + parser = OptionParser('') # blank usage, so isapi sets it. + parser.add_option("", "--description", + action="store", + help="custom description to use for the virtual directory") + + HandleCommandLine(params, opt_parser=parser, + custom_arg_handlers = custom_arg_handlers) diff --git a/venv/Lib/site-packages/isapi/simple.py b/venv/Lib/site-packages/isapi/simple.py new file mode 100644 index 00000000..42b11000 --- /dev/null +++ b/venv/Lib/site-packages/isapi/simple.py @@ -0,0 +1,68 @@ +"""Simple base-classes for extensions and filters. + +None of the filter and extension functions are considered 'optional' by the +framework. These base-classes provide simple implementations for the +Initialize and Terminate functions, allowing you to omit them, + +It is not necessary to use these base-classes - but if you don't, you +must ensure each of the required methods are implemented. +""" + +class SimpleExtension: + "Base class for a simple ISAPI extension" + def __init__(self): + pass + + def GetExtensionVersion(self, vi): + """Called by the ISAPI framework to get the extension version + + The default implementation uses the classes docstring to + set the extension description.""" + # nod to our reload capability - vi is None when we are reloaded. + if vi is not None: + vi.ExtensionDesc = self.__doc__ + + def HttpExtensionProc(self, control_block): + """Called by the ISAPI framework for each extension request. + + sub-classes must provide an implementation for this method. + """ + raise NotImplementedError("sub-classes should override HttpExtensionProc") + + def TerminateExtension(self, status): + """Called by the ISAPI framework as the extension terminates. + """ + pass + +class SimpleFilter: + "Base class for a a simple ISAPI filter" + filter_flags = None + def __init__(self): + pass + + def GetFilterVersion(self, fv): + """Called by the ISAPI framework to get the extension version + + The default implementation uses the classes docstring to + set the extension description, and uses the classes + filter_flags attribute to set the ISAPI filter flags - you + must specify filter_flags in your class. + """ + if self.filter_flags is None: + raise RuntimeError("You must specify the filter flags") + # nod to our reload capability - fv is None when we are reloaded. + if fv is not None: + fv.Flags = self.filter_flags + fv.FilterDesc = self.__doc__ + + def HttpFilterProc(self, fc): + """Called by the ISAPI framework for each filter request. + + sub-classes must provide an implementation for this method. + """ + raise NotImplementedError("sub-classes should override HttpExtensionProc") + + def TerminateFilter(self, status): + """Called by the ISAPI framework as the filter terminates. + """ + pass diff --git a/venv/Lib/site-packages/isapi/test/README.txt b/venv/Lib/site-packages/isapi/test/README.txt new file mode 100644 index 00000000..18643dd7 --- /dev/null +++ b/venv/Lib/site-packages/isapi/test/README.txt @@ -0,0 +1,3 @@ +This is a directory for tests of the PyISAPI framework. + +For demos, please see the pyisapi 'samples' directory. \ No newline at end of file diff --git a/venv/Lib/site-packages/isapi/test/__pycache__/extension_simple.cpython-36.pyc b/venv/Lib/site-packages/isapi/test/__pycache__/extension_simple.cpython-36.pyc new file mode 100644 index 00000000..7abc5b9f Binary files /dev/null and b/venv/Lib/site-packages/isapi/test/__pycache__/extension_simple.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/isapi/test/extension_simple.py b/venv/Lib/site-packages/isapi/test/extension_simple.py new file mode 100644 index 00000000..8994b48c --- /dev/null +++ b/venv/Lib/site-packages/isapi/test/extension_simple.py @@ -0,0 +1,111 @@ +# This is an ISAPI extension purely for testing purposes. It is NOT +# a 'demo' (even though it may be useful!) +# +# Install this extension, then point your browser to: +# "http://localhost/pyisapi_test/test1" +# This will execute the method 'test1' below. See below for the list of +# test methods that are acceptable. + +from isapi import isapicon, threaded_extension, ExtensionError +from isapi.simple import SimpleFilter +import traceback +import urllib.request, urllib.parse, urllib.error +import winerror + +# If we have no console (eg, am running from inside IIS), redirect output +# somewhere useful - in this case, the standard win32 trace collector. +import win32api +try: + win32api.GetConsoleTitle() +except win32api.error: + # No console - redirect + import win32traceutil + +# The ISAPI extension - handles requests in our virtual dir, and sends the +# response to the client. +class Extension(threaded_extension.ThreadPoolExtension): + "Python ISAPI Tester" + def Dispatch(self, ecb): + print('Tester dispatching "%s"' % (ecb.GetServerVariable("URL"),)) + url = ecb.GetServerVariable("URL") + test_name = url.split("/")[-1] + meth = getattr(self, test_name, None) + if meth is None: + raise AttributeError("No test named '%s'" % (test_name,)) + result = meth(ecb) + if result is None: + # This means the test finalized everything + return + ecb.SendResponseHeaders("200 OK", "Content-type: text/html\r\n\r\n", + False) + print("Finished running test ", test_name, "", file=ecb) + print("

", file=ecb)
+        print(result, file=ecb)
+        print("
", file=ecb) + print("", file=ecb) + ecb.DoneWithSession() + + def test1(self, ecb): + try: + ecb.GetServerVariable("foo bar") + raise RuntimeError("should have failed!") + except ExtensionError as err: + assert err.errno == winerror.ERROR_INVALID_INDEX, err + return "worked!" + + def test_long_vars(self, ecb): + qs = ecb.GetServerVariable("QUERY_STRING") + # Our implementation has a default buffer size of 8k - so we test + # the code that handles an overflow by ensuring there are more + # than 8k worth of chars in the URL. + expected_query = ('x' * 8500) + if len(qs)==0: + # Just the URL with no query part - redirect to myself, but with + # a huge query portion. + me = ecb.GetServerVariable("URL") + headers = "Location: " + me + "?" + expected_query + "\r\n\r\n" + ecb.SendResponseHeaders("301 Moved", headers) + ecb.DoneWithSession() + return None + if qs == expected_query: + return "Total length of variable is %d - test worked!" % (len(qs),) + else: + return "Unexpected query portion! Got %d chars, expected %d" % \ + (len(qs), len(expected_query)) + + def test_unicode_vars(self, ecb): + # We need to check that we are running IIS6! This seems the only + # effective way from an extension. + ver = float(ecb.GetServerVariable("SERVER_SOFTWARE").split('/')[1]) + if ver < 6.0: + return "This is IIS version %g - unicode only works in IIS6 and later" % ver + + us = ecb.GetServerVariable("UNICODE_SERVER_NAME") + if not isinstance(us, str): + raise RuntimeError("unexpected type!") + if us != str(ecb.GetServerVariable("SERVER_NAME")): + raise RuntimeError("Unicode and non-unicode values were not the same") + return "worked!" + +# The entry points for the ISAPI extension. +def __ExtensionFactory__(): + return Extension() + +if __name__=='__main__': + # If run from the command-line, install ourselves. + from isapi.install import * + params = ISAPIParameters() + # Setup the virtual directories - this is a list of directories our + # extension uses - in this case only 1. + # Each extension has a "script map" - this is the mapping of ISAPI + # extensions. + sm = [ + ScriptMapParams(Extension="*", Flags=0) + ] + vd = VirtualDirParameters(Name="pyisapi_test", + Description = Extension.__doc__, + ScriptMaps = sm, + ScriptMapUpdate = "replace" + ) + params.VirtualDirs = [vd] + HandleCommandLine(params) diff --git a/venv/Lib/site-packages/isapi/threaded_extension.py b/venv/Lib/site-packages/isapi/threaded_extension.py new file mode 100644 index 00000000..d3995174 --- /dev/null +++ b/venv/Lib/site-packages/isapi/threaded_extension.py @@ -0,0 +1,171 @@ +"""An ISAPI extension base class implemented using a thread-pool.""" +# $Id$ + +import sys +import time +from isapi import isapicon, ExtensionError +import isapi.simple +from win32file import GetQueuedCompletionStatus, CreateIoCompletionPort, \ + PostQueuedCompletionStatus, CloseHandle +from win32security import SetThreadToken +from win32event import INFINITE +from pywintypes import OVERLAPPED + +import threading +import traceback + +ISAPI_REQUEST = 1 +ISAPI_SHUTDOWN = 2 + +class WorkerThread(threading.Thread): + def __init__(self, extension, io_req_port): + self.running = False + self.io_req_port = io_req_port + self.extension = extension + threading.Thread.__init__(self) + # We wait 15 seconds for a thread to terminate, but if it fails to, + # we don't want the process to hang at exit waiting for it... + self.setDaemon(True) + + def run(self): + self.running = True + while self.running: + errCode, bytes, key, overlapped = \ + GetQueuedCompletionStatus(self.io_req_port, INFINITE) + if key == ISAPI_SHUTDOWN and overlapped is None: + break + + # Let the parent extension handle the command. + dispatcher = self.extension.dispatch_map.get(key) + if dispatcher is None: + raise RuntimeError("Bad request '%s'" % (key,)) + + dispatcher(errCode, bytes, key, overlapped) + + def call_handler(self, cblock): + self.extension.Dispatch(cblock) + +# A generic thread-pool based extension, using IO Completion Ports. +# Sub-classes can override one method to implement a simple extension, or +# may leverage the CompletionPort to queue their own requests, and implement a +# fully asynch extension. +class ThreadPoolExtension(isapi.simple.SimpleExtension): + "Base class for an ISAPI extension based around a thread-pool" + max_workers = 20 + worker_shutdown_wait = 15000 # 15 seconds for workers to quit... + def __init__(self): + self.workers = [] + # extensible dispatch map, for sub-classes that need to post their + # own requests to the completion port. + # Each of these functions is called with the result of + # GetQueuedCompletionStatus for our port. + self.dispatch_map = { + ISAPI_REQUEST: self.DispatchConnection, + } + + def GetExtensionVersion(self, vi): + isapi.simple.SimpleExtension.GetExtensionVersion(self, vi) + # As per Q192800, the CompletionPort should be created with the number + # of processors, even if the number of worker threads is much larger. + # Passing 0 means the system picks the number. + self.io_req_port = CreateIoCompletionPort(-1, None, 0, 0) + # start up the workers + self.workers = [] + for i in range(self.max_workers): + worker = WorkerThread(self, self.io_req_port) + worker.start() + self.workers.append(worker) + + def HttpExtensionProc(self, control_block): + overlapped = OVERLAPPED() + overlapped.object = control_block + PostQueuedCompletionStatus(self.io_req_port, 0, ISAPI_REQUEST, overlapped) + return isapicon.HSE_STATUS_PENDING + + def TerminateExtension(self, status): + for worker in self.workers: + worker.running = False + for worker in self.workers: + PostQueuedCompletionStatus(self.io_req_port, 0, ISAPI_SHUTDOWN, None) + # wait for them to terminate - pity we aren't using 'native' threads + # as then we could do a smart wait - but now we need to poll.... + end_time = time.time() + self.worker_shutdown_wait/1000 + alive = self.workers + while alive: + if time.time() > end_time: + # xxx - might be nice to log something here. + break + time.sleep(0.2) + alive = [w for w in alive if w.is_alive()] + self.dispatch_map = {} # break circles + CloseHandle(self.io_req_port) + + # This is the one operation the base class supports - a simple + # Connection request. We setup the thread-token, and dispatch to the + # sub-class's 'Dispatch' method. + def DispatchConnection(self, errCode, bytes, key, overlapped): + control_block = overlapped.object + # setup the correct user for this request + hRequestToken = control_block.GetImpersonationToken() + SetThreadToken(None, hRequestToken) + try: + try: + self.Dispatch(control_block) + except: + self.HandleDispatchError(control_block) + finally: + # reset the security context + SetThreadToken(None, None) + + def Dispatch(self, ecb): + """Overridden by the sub-class to handle connection requests. + + This class creates a thread-pool using a Windows completion port, + and dispatches requests via this port. Sub-classes can generally + implement each connection request using blocking reads and writes, and + the thread-pool will still provide decent response to the end user. + + The sub-class can set a max_workers attribute (default is 20). Note + that this generally does *not* mean 20 threads will all be concurrently + running, via the magic of Windows completion ports. + + There is no default implementation - sub-classes must implement this. + """ + raise NotImplementedError("sub-classes should override Dispatch") + + def HandleDispatchError(self, ecb): + """Handles errors in the Dispatch method. + + When a Dispatch method call fails, this method is called to handle + the exception. The default implementation formats the traceback + in the browser. + """ + ecb.HttpStatusCode = isapicon.HSE_STATUS_ERROR + #control_block.LogData = "we failed!" + exc_typ, exc_val, exc_tb = sys.exc_info() + limit = None + try: + try: + import cgi + ecb.SendResponseHeaders("200 OK", "Content-type: text/html\r\n\r\n", + False) + print(file=ecb) + print("

Traceback (most recent call last):

", file=ecb) + list = traceback.format_tb(exc_tb, limit) + \ + traceback.format_exception_only(exc_typ, exc_val) + print("
%s%s
" % ( + cgi.escape("".join(list[:-1])), cgi.escape(list[-1]),), file=ecb) + except ExtensionError: + # The client disconnected without reading the error body - + # its probably not a real browser at the other end, ignore it. + pass + except: + print("FAILED to render the error message!") + traceback.print_exc() + print("ORIGINAL extension error:") + traceback.print_exception(exc_typ, exc_val, exc_tb) + finally: + # holding tracebacks in a local of a frame that may itself be + # part of a traceback used to be evil and cause leaks! + exc_tb = None + ecb.DoneWithSession() diff --git a/env/lib/python2.7/site-packages/pip-19.2.2.dist-info/INSTALLER b/venv/Lib/site-packages/pip-21.1.2.dist-info/INSTALLER similarity index 100% rename from env/lib/python2.7/site-packages/pip-19.2.2.dist-info/INSTALLER rename to venv/Lib/site-packages/pip-21.1.2.dist-info/INSTALLER diff --git a/venv/Lib/site-packages/pip-21.1.2.dist-info/LICENSE.txt b/venv/Lib/site-packages/pip-21.1.2.dist-info/LICENSE.txt new file mode 100644 index 00000000..00addc27 --- /dev/null +++ b/venv/Lib/site-packages/pip-21.1.2.dist-info/LICENSE.txt @@ -0,0 +1,20 @@ +Copyright (c) 2008-2021 The pip developers (see AUTHORS.txt file) + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/venv/Lib/site-packages/pip-21.1.2.dist-info/METADATA b/venv/Lib/site-packages/pip-21.1.2.dist-info/METADATA new file mode 100644 index 00000000..706211ce --- /dev/null +++ b/venv/Lib/site-packages/pip-21.1.2.dist-info/METADATA @@ -0,0 +1,91 @@ +Metadata-Version: 2.1 +Name: pip +Version: 21.1.2 +Summary: The PyPA recommended tool for installing Python packages. +Home-page: https://pip.pypa.io/ +Author: The pip developers +Author-email: distutils-sig@python.org +License: MIT +Project-URL: Documentation, https://pip.pypa.io +Project-URL: Source, https://github.com/pypa/pip +Project-URL: Changelog, https://pip.pypa.io/en/stable/news/ +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Topic :: Software Development :: Build Tools +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Requires-Python: >=3.6 + +pip - The Python Package Installer +================================== + +.. image:: https://img.shields.io/pypi/v/pip.svg + :target: https://pypi.org/project/pip/ + +.. image:: https://readthedocs.org/projects/pip/badge/?version=latest + :target: https://pip.pypa.io/en/latest + +pip is the `package installer`_ for Python. You can use pip to install packages from the `Python Package Index`_ and other indexes. + +Please take a look at our documentation for how to install and use pip: + +* `Installation`_ +* `Usage`_ + +We release updates regularly, with a new version every 3 months. Find more details in our documentation: + +* `Release notes`_ +* `Release process`_ + +In pip 20.3, we've `made a big improvement to the heart of pip`_; `learn more`_. We want your input, so `sign up for our user experience research studies`_ to help us do it right. + +**Note**: pip 21.0, in January 2021, removed Python 2 support, per pip's `Python 2 support policy`_. Please migrate to Python 3. + +If you find bugs, need help, or want to talk to the developers, please use our mailing lists or chat rooms: + +* `Issue tracking`_ +* `Discourse channel`_ +* `User IRC`_ + +If you want to get involved head over to GitHub to get the source code, look at our development documentation and feel free to jump on the developer mailing lists and chat rooms: + +* `GitHub page`_ +* `Development documentation`_ +* `Development mailing list`_ +* `Development IRC`_ + +Code of Conduct +--------------- + +Everyone interacting in the pip project's codebases, issue trackers, chat +rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_. + +.. _package installer: https://packaging.python.org/guides/tool-recommendations/ +.. _Python Package Index: https://pypi.org +.. _Installation: https://pip.pypa.io/en/stable/installing.html +.. _Usage: https://pip.pypa.io/en/stable/ +.. _Release notes: https://pip.pypa.io/en/stable/news.html +.. _Release process: https://pip.pypa.io/en/latest/development/release-process/ +.. _GitHub page: https://github.com/pypa/pip +.. _Development documentation: https://pip.pypa.io/en/latest/development +.. _made a big improvement to the heart of pip: https://pyfound.blogspot.com/2020/11/pip-20-3-new-resolver.html +.. _learn more: https://pip.pypa.io/en/latest/user_guide/#changes-to-the-pip-dependency-resolver-in-20-3-2020 +.. _sign up for our user experience research studies: https://pyfound.blogspot.com/2020/03/new-pip-resolver-to-roll-out-this-year.html +.. _Python 2 support policy: https://pip.pypa.io/en/latest/development/release-process/#python-2-support +.. _Issue tracking: https://github.com/pypa/pip/issues +.. _Discourse channel: https://discuss.python.org/c/packaging +.. _Development mailing list: https://mail.python.org/mailman3/lists/distutils-sig.python.org/ +.. _User IRC: https://webchat.freenode.net/?channels=%23pypa +.. _Development IRC: https://webchat.freenode.net/?channels=%23pypa-dev +.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md + + diff --git a/venv/Lib/site-packages/pip-21.1.2.dist-info/RECORD b/venv/Lib/site-packages/pip-21.1.2.dist-info/RECORD new file mode 100644 index 00000000..48ba37ad --- /dev/null +++ b/venv/Lib/site-packages/pip-21.1.2.dist-info/RECORD @@ -0,0 +1,796 @@ +../../Scripts/pip.exe,sha256=a0ABTzgDXvmnqK0U8p3shPe8hSyGtaah-bGA4NALSLQ,106371 +../../Scripts/pip3.6.exe,sha256=a0ABTzgDXvmnqK0U8p3shPe8hSyGtaah-bGA4NALSLQ,106371 +../../Scripts/pip3.exe,sha256=a0ABTzgDXvmnqK0U8p3shPe8hSyGtaah-bGA4NALSLQ,106371 +pip-21.1.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pip-21.1.2.dist-info/LICENSE.txt,sha256=I6c2HCsVgQKLxiO52ivSSZeryqR4Gs5q1ESjeUT42uE,1090 +pip-21.1.2.dist-info/METADATA,sha256=y7MhSvSvMOtugsnkLlhg556X0KfAwbSbQtWIkYcP10k,4103 +pip-21.1.2.dist-info/RECORD,, +pip-21.1.2.dist-info/WHEEL,sha256=OqRkF0eY5GHssMorFjlbTIq072vpHpF60fIQA6lS9xA,92 +pip-21.1.2.dist-info/entry_points.txt,sha256=HtfDOwpUlr9s73jqLQ6wF9V0_0qvUXJwCBz7Vwx0Ue0,125 +pip-21.1.2.dist-info/top_level.txt,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pip/__init__.py,sha256=xl6vJWTn7sB7v5h7qPj-CtTzfXvbBezY1Fxe9Smjd1I,368 +pip/__main__.py,sha256=mXwWDftNLMKfwVqKFWGE_uuBZvGSIiUELhLkeysIuZc,1198 +pip/__pycache__/__init__.cpython-36.pyc,, +pip/__pycache__/__main__.cpython-36.pyc,, +pip/_internal/__init__.py,sha256=XvJ1JIumQnfLNFxVRdf_xrbhkTg1WMUrf2GzrH27F3A,410 +pip/_internal/__pycache__/__init__.cpython-36.pyc,, +pip/_internal/__pycache__/build_env.cpython-36.pyc,, +pip/_internal/__pycache__/cache.cpython-36.pyc,, +pip/_internal/__pycache__/configuration.cpython-36.pyc,, +pip/_internal/__pycache__/exceptions.cpython-36.pyc,, +pip/_internal/__pycache__/main.cpython-36.pyc,, +pip/_internal/__pycache__/pyproject.cpython-36.pyc,, +pip/_internal/__pycache__/self_outdated_check.cpython-36.pyc,, +pip/_internal/__pycache__/wheel_builder.cpython-36.pyc,, +pip/_internal/build_env.py,sha256=2hFtbEoO4vA0FxehN_e2oXZ_3E3tAvKpnVmc8sOYjv0,9746 +pip/_internal/cache.py,sha256=6VONtoReGZbBd7sqY1n6hwkdWC4iz3tmXwXwZjpjZKw,9958 +pip/_internal/cli/__init__.py,sha256=FkHBgpxxb-_gd6r1FjnNhfMOzAUYyXoXKJ6abijfcFU,132 +pip/_internal/cli/__pycache__/__init__.cpython-36.pyc,, +pip/_internal/cli/__pycache__/autocompletion.cpython-36.pyc,, +pip/_internal/cli/__pycache__/base_command.cpython-36.pyc,, +pip/_internal/cli/__pycache__/cmdoptions.cpython-36.pyc,, +pip/_internal/cli/__pycache__/command_context.cpython-36.pyc,, +pip/_internal/cli/__pycache__/main.cpython-36.pyc,, +pip/_internal/cli/__pycache__/main_parser.cpython-36.pyc,, +pip/_internal/cli/__pycache__/parser.cpython-36.pyc,, +pip/_internal/cli/__pycache__/progress_bars.cpython-36.pyc,, +pip/_internal/cli/__pycache__/req_command.cpython-36.pyc,, +pip/_internal/cli/__pycache__/spinners.cpython-36.pyc,, +pip/_internal/cli/__pycache__/status_codes.cpython-36.pyc,, +pip/_internal/cli/autocompletion.py,sha256=r2GQSaHHim1LwPhMaO9MPeKdsSv5H8S9ElVsmByQNew,6350 +pip/_internal/cli/base_command.py,sha256=26MHnlzZSC-Wk2j2OGsBDs5cl2ladrovJyVy1_2g0Zk,7741 +pip/_internal/cli/cmdoptions.py,sha256=52JIyP5C6yT8DpT1O2ZseAY-vMvLTb8FqO0g85OFYMs,28999 +pip/_internal/cli/command_context.py,sha256=k2JF5WPsP1MNKaXWK8jZFbJhYffzkdvGaPsL53tZbDU,815 +pip/_internal/cli/main.py,sha256=G_OsY66FZRtmLrMJ4k3m77tmtsRRRQd3_-qle1lvmng,2483 +pip/_internal/cli/main_parser.py,sha256=G70Z1fXLYzeJuuotgwKwq-daCJ0jCmmHxx6aFHz6WAQ,2642 +pip/_internal/cli/parser.py,sha256=rx4w6IgD0Obi7t1k9mV0zlYhy_DuCoaDCqhkUKMOFNU,11097 +pip/_internal/cli/progress_bars.py,sha256=ck_ILji6aRTG0zxXajnPWIpQTGxTzm3nscZOxwNmTWo,8576 +pip/_internal/cli/req_command.py,sha256=refPyZdKuluridcLaCdSJtgyYFchxd9y8pMMp_7PO-s,16884 +pip/_internal/cli/spinners.py,sha256=VLdSWCvyk3KokujLyBf_QKYcGbrePQoPB4v7jqG7xyA,5347 +pip/_internal/cli/status_codes.py,sha256=sEFHUaUJbqv8iArL3HAtcztWZmGOFX01hTesSytDEh0,116 +pip/_internal/commands/__init__.py,sha256=v-xml8oMwrQhCpmApkpcMOE97Mp8QaBxoRObnGS43_8,3659 +pip/_internal/commands/__pycache__/__init__.cpython-36.pyc,, +pip/_internal/commands/__pycache__/cache.cpython-36.pyc,, +pip/_internal/commands/__pycache__/check.cpython-36.pyc,, +pip/_internal/commands/__pycache__/completion.cpython-36.pyc,, +pip/_internal/commands/__pycache__/configuration.cpython-36.pyc,, +pip/_internal/commands/__pycache__/debug.cpython-36.pyc,, +pip/_internal/commands/__pycache__/download.cpython-36.pyc,, +pip/_internal/commands/__pycache__/freeze.cpython-36.pyc,, +pip/_internal/commands/__pycache__/hash.cpython-36.pyc,, +pip/_internal/commands/__pycache__/help.cpython-36.pyc,, +pip/_internal/commands/__pycache__/install.cpython-36.pyc,, +pip/_internal/commands/__pycache__/list.cpython-36.pyc,, +pip/_internal/commands/__pycache__/search.cpython-36.pyc,, +pip/_internal/commands/__pycache__/show.cpython-36.pyc,, +pip/_internal/commands/__pycache__/uninstall.cpython-36.pyc,, +pip/_internal/commands/__pycache__/wheel.cpython-36.pyc,, +pip/_internal/commands/cache.py,sha256=AELf98RWR_giU9wl0RSXf-MsTyO5G_iwO0iHoF4Fbmc,7414 +pip/_internal/commands/check.py,sha256=Dt0w7NqFp8o_45J7w32GQrKezsz2vwo_U8UmsHD9YNI,1587 +pip/_internal/commands/completion.py,sha256=UxS09s8rEnU08AAiN3gHdQIjU4XGSlv5SJ3rIJdTyhA,2951 +pip/_internal/commands/configuration.py,sha256=X1fdVdEg8MHFtArU-3bM6WBNax1E7Z7qszPEdlK1zqo,9206 +pip/_internal/commands/debug.py,sha256=yntOplw93VZoQAVBB3BXPKuqbam4mT6TErastFwFy3s,6806 +pip/_internal/commands/download.py,sha256=zv8S_DN2-k6K0VSR3yCPLSrLehoYkj3IvyO1Ho8t8V4,4993 +pip/_internal/commands/freeze.py,sha256=vPVguwBb15ubv8Es9oPSyWePBe2cq39QxjU4KizeTwk,3431 +pip/_internal/commands/hash.py,sha256=ip64AsJ6EFUEaWKDvsZmdQHks1JTEgrDjH5byl-IYyc,1713 +pip/_internal/commands/help.py,sha256=6Mnzrak_j-yE3psDCqi2GxISJqIZJ04DObKU9QhnxME,1149 +pip/_internal/commands/install.py,sha256=aFvZQfPrMrHDb6jjbmrVlyvDxMIeX3ZcZKSQvY6c0KI,27135 +pip/_internal/commands/list.py,sha256=jfqDS4xvm6WV8rHVSmvpaI811ukvD4OiPZwGGKMwwkI,11331 +pip/_internal/commands/search.py,sha256=EwcGPkDDTwFMpi2PBKhPuWX2YBMPcy7Ox1WFcWnouaw,5598 +pip/_internal/commands/show.py,sha256=sz2vbxh4l7Bj4jKlkDGTHYD6I8_duSpSUFVxUiH44xQ,6866 +pip/_internal/commands/uninstall.py,sha256=EDcx3a03l3U8tpZ2p4ffIdn45hY2YFEmq9yoeccF2ow,3216 +pip/_internal/commands/wheel.py,sha256=wKGSksuYjjhgOYa_jD6ulaKpPXaUzPiyzfRNNT4DOio,6233 +pip/_internal/configuration.py,sha256=QBLfhv-sbP-oR08NFxSYnv_mLB-SgtNOsWXAF9tDEcM,13725 +pip/_internal/distributions/__init__.py,sha256=ow1iPW_Qp-TOyOU-WghOKC8vAv1_Syk1zETZVO_vKEE,864 +pip/_internal/distributions/__pycache__/__init__.cpython-36.pyc,, +pip/_internal/distributions/__pycache__/base.cpython-36.pyc,, +pip/_internal/distributions/__pycache__/installed.cpython-36.pyc,, +pip/_internal/distributions/__pycache__/sdist.cpython-36.pyc,, +pip/_internal/distributions/__pycache__/wheel.cpython-36.pyc,, +pip/_internal/distributions/base.py,sha256=UVndaok0jOHrLH0JqN0YzlxVEnvFQumYy37diY3ZCuE,1245 +pip/_internal/distributions/installed.py,sha256=uaTMPvY3hr_M1BCy107vJHWspKMJgrPxv30W3_zZZ0Q,667 +pip/_internal/distributions/sdist.py,sha256=co8fNR8qIhHRLBncwV92oJ7e8IOCGPgEsbEFdNPk1Yk,3900 +pip/_internal/distributions/wheel.py,sha256=n9MqNoWyMqNscfbNeeqh1bztoZUiB5x1H9h4tFfiJUw,1205 +pip/_internal/exceptions.py,sha256=2JQJSS68oggR_ZIOA-h1U2DRADURbkQn9Nf4EZWZ834,13170 +pip/_internal/index/__init__.py,sha256=vpt-JeTZefh8a-FC22ZeBSXFVbuBcXSGiILhQZJaNpQ,30 +pip/_internal/index/__pycache__/__init__.cpython-36.pyc,, +pip/_internal/index/__pycache__/collector.cpython-36.pyc,, +pip/_internal/index/__pycache__/package_finder.cpython-36.pyc,, +pip/_internal/index/__pycache__/sources.cpython-36.pyc,, +pip/_internal/index/collector.py,sha256=aEXtHK0La4nGP7mu5N5CQ3tmfjaczLwbGi8Ar4oGz5o,18192 +pip/_internal/index/package_finder.py,sha256=3J9Rzq1NAO2p_zDb4fv33GeBBBOYusV9kXtAn2j6eCU,37294 +pip/_internal/index/sources.py,sha256=SVyPitv08-Qalh2_Bk5diAJ9GAA_d-a93koouQodAG0,6557 +pip/_internal/locations/__init__.py,sha256=9EXRxCpyiMClU87-P5E66tcFxybcA_KzLrzcK2Vt7zs,4826 +pip/_internal/locations/__pycache__/__init__.cpython-36.pyc,, +pip/_internal/locations/__pycache__/_distutils.cpython-36.pyc,, +pip/_internal/locations/__pycache__/_sysconfig.cpython-36.pyc,, +pip/_internal/locations/__pycache__/base.cpython-36.pyc,, +pip/_internal/locations/_distutils.py,sha256=L5flRSr9BH0lBwPUl61cyBc1OnVD06FOENkDMRjyg38,5212 +pip/_internal/locations/_sysconfig.py,sha256=Tt8gkN7shxbqoUlzqM19myiBRzbft9CzkmcSS4YHk1s,5959 +pip/_internal/locations/base.py,sha256=QbkpgmzIbWBnUL2_3qu29sqCNewoqYbkVw8KmigRe2c,1478 +pip/_internal/main.py,sha256=BZ0vkdqgpoteTo1A1Q8ovFe8EzgKFJWOUjPmIUQfGCY,351 +pip/_internal/metadata/__init__.py,sha256=KINR8ZYO_ilc2pkV3t5KcQLzWLNc3GjZDklGWTVJ-zU,1471 +pip/_internal/metadata/__pycache__/__init__.cpython-36.pyc,, +pip/_internal/metadata/__pycache__/base.cpython-36.pyc,, +pip/_internal/metadata/__pycache__/pkg_resources.cpython-36.pyc,, +pip/_internal/metadata/base.py,sha256=6BiB_b3lvNHYIVKbzrDhi0bJmSls5Q1K-iBeHWlKnIw,4750 +pip/_internal/metadata/pkg_resources.py,sha256=4FVPxYFABQ_1tbh_CRBzK4x0_SIgH1uCKx2ZLyhkouQ,4248 +pip/_internal/models/__init__.py,sha256=3DHUd_qxpPozfzouoqa9g9ts1Czr5qaHfFxbnxriepM,63 +pip/_internal/models/__pycache__/__init__.cpython-36.pyc,, +pip/_internal/models/__pycache__/candidate.cpython-36.pyc,, +pip/_internal/models/__pycache__/direct_url.cpython-36.pyc,, +pip/_internal/models/__pycache__/format_control.cpython-36.pyc,, +pip/_internal/models/__pycache__/index.cpython-36.pyc,, +pip/_internal/models/__pycache__/link.cpython-36.pyc,, +pip/_internal/models/__pycache__/scheme.cpython-36.pyc,, +pip/_internal/models/__pycache__/search_scope.cpython-36.pyc,, +pip/_internal/models/__pycache__/selection_prefs.cpython-36.pyc,, +pip/_internal/models/__pycache__/target_python.cpython-36.pyc,, +pip/_internal/models/__pycache__/wheel.cpython-36.pyc,, +pip/_internal/models/candidate.py,sha256=LlyGF2SMGjeet9bLbEAzAWDP82Wcp3342Ysa7tCW_9M,1001 +pip/_internal/models/direct_url.py,sha256=VrnJNOqcPznfNarjQJavsx2tgG7GfcLa6PyZCuf_L7A,6555 +pip/_internal/models/format_control.py,sha256=l2jp47mWsJp7-LxMs05l9T-qFg9Z5PwdyP9R7Xc_VZQ,2629 +pip/_internal/models/index.py,sha256=asMraZVPI0snye404GztEpXgKerj1yAFmZl2p3eN4Bg,1092 +pip/_internal/models/link.py,sha256=5wdHbGDLbafSdYpo2Ky7F9RRo226zRy6ik3cLH_8Kwc,7472 +pip/_internal/models/scheme.py,sha256=iqceC7gKiTn2ZLgCOgGQbcmo49TRg9EnQUSsQH3U-7A,770 +pip/_internal/models/search_scope.py,sha256=4uGNEqYrz4ku6_WzowqivuMvN0fj5XQ03WB14YjcN5U,4613 +pip/_internal/models/selection_prefs.py,sha256=aNRDL97Gz3yWJW3og0yuvOkU02UL8OeNQDuDatZ8SDo,1947 +pip/_internal/models/target_python.py,sha256=SLGG3z9Pj_CiA5jmMnNDv2MN3ST3keVuanVDzTvO5pM,3962 +pip/_internal/models/wheel.py,sha256=MWjxQkBNXI6XOWiTuzMG7uONhFu8xA94OqD_9BuIsVc,3614 +pip/_internal/network/__init__.py,sha256=jf6Tt5nV_7zkARBrKojIXItgejvoegVJVKUbhAa5Ioc,50 +pip/_internal/network/__pycache__/__init__.cpython-36.pyc,, +pip/_internal/network/__pycache__/auth.cpython-36.pyc,, +pip/_internal/network/__pycache__/cache.cpython-36.pyc,, +pip/_internal/network/__pycache__/download.cpython-36.pyc,, +pip/_internal/network/__pycache__/lazy_wheel.cpython-36.pyc,, +pip/_internal/network/__pycache__/session.cpython-36.pyc,, +pip/_internal/network/__pycache__/utils.cpython-36.pyc,, +pip/_internal/network/__pycache__/xmlrpc.cpython-36.pyc,, +pip/_internal/network/auth.py,sha256=d8Df0fy01P1jJlF3XDMM8ACyktR1cN9zURG-ye1ncc0,11833 +pip/_internal/network/cache.py,sha256=J_xpsLWbRrlCSUcQhA5-TuT5LWIlpVtTH4fZ1XSjyb4,2213 +pip/_internal/network/download.py,sha256=8frb2bINOf-jbmFPapKbyEO9sjXJWJG6OJaW4hQ9r3s,6243 +pip/_internal/network/lazy_wheel.py,sha256=XMfrDK1IBy44L3Gx3UZ2B8s90VRXDa96520IOPmzmOU,7924 +pip/_internal/network/session.py,sha256=VHeiorPflYPNWK2pM_q22c-H5gmRBDh9UKCJW3VAUFI,16247 +pip/_internal/network/utils.py,sha256=uqT6QkO9NHUwqTw3gHBWMQFdaYqYabB423QUZuiQD3c,4072 +pip/_internal/network/xmlrpc.py,sha256=CL1WBOTgxPwbcZ6QubZ4pXQXjb7qTTFpTUFe-ZaWkcA,1703 +pip/_internal/operations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/operations/__pycache__/__init__.cpython-36.pyc,, +pip/_internal/operations/__pycache__/check.cpython-36.pyc,, +pip/_internal/operations/__pycache__/freeze.cpython-36.pyc,, +pip/_internal/operations/__pycache__/prepare.cpython-36.pyc,, +pip/_internal/operations/build/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/operations/build/__pycache__/__init__.cpython-36.pyc,, +pip/_internal/operations/build/__pycache__/metadata.cpython-36.pyc,, +pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-36.pyc,, +pip/_internal/operations/build/__pycache__/wheel.cpython-36.pyc,, +pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-36.pyc,, +pip/_internal/operations/build/metadata.py,sha256=jJp05Rrp0AMsQb7izDXbNGC1LtPNwOhHQj7cRM5324c,1165 +pip/_internal/operations/build/metadata_legacy.py,sha256=ECMBhLEPEQv6PUUCpPCXW-wN9QRXdY45PNXJv7BZKTU,1917 +pip/_internal/operations/build/wheel.py,sha256=WYLMxuxqN3ahJTQk2MI9hdmZKBpFyxHeNpUdO0PybxU,1106 +pip/_internal/operations/build/wheel_legacy.py,sha256=NOJhTYMYljdbizFo_WjkaKGWG1SEZ6aByrBdCrrsZB8,3227 +pip/_internal/operations/check.py,sha256=OtMZ2ff0zk8Ghpl7eIXySZ4D8pCUfzPAYNpGTxw1qWU,5245 +pip/_internal/operations/freeze.py,sha256=D-ex0Bwy6E0EVS_gHlixlEpKDpRxFZnUmTy7nf8s7ts,9999 +pip/_internal/operations/install/__init__.py,sha256=mX7hyD2GNBO2mFGokDQ30r_GXv7Y_PLdtxcUv144e-s,51 +pip/_internal/operations/install/__pycache__/__init__.cpython-36.pyc,, +pip/_internal/operations/install/__pycache__/editable_legacy.cpython-36.pyc,, +pip/_internal/operations/install/__pycache__/legacy.cpython-36.pyc,, +pip/_internal/operations/install/__pycache__/wheel.cpython-36.pyc,, +pip/_internal/operations/install/editable_legacy.py,sha256=bjBObfE6sz3UmGI7y4-GCgKa2WmTgnWlFFU7b-i0sQs,1396 +pip/_internal/operations/install/legacy.py,sha256=f59fQbNLO2rvl8bNQm_CuW6dgPvXXQ7y5apulWZi01E,4177 +pip/_internal/operations/install/wheel.py,sha256=1gV2G-owlA2iwcbxYAc4BOTiPRRGB8TzpuU0wuhM2VQ,29960 +pip/_internal/operations/prepare.py,sha256=AXHNg1iGceg1lyqDqbcabmAFIfQ1k1cIfgmVY5JCWoo,24850 +pip/_internal/pyproject.py,sha256=bN_dliFVxorLITxCEzT0UmPYFoSqk_vGBtM1QwiQays,7061 +pip/_internal/req/__init__.py,sha256=lRNHBv0ZAZNbSwmXU-XUdm66gsiNmuiBDi1DFYJ4hIQ,2983 +pip/_internal/req/__pycache__/__init__.cpython-36.pyc,, +pip/_internal/req/__pycache__/constructors.cpython-36.pyc,, +pip/_internal/req/__pycache__/req_file.cpython-36.pyc,, +pip/_internal/req/__pycache__/req_install.cpython-36.pyc,, +pip/_internal/req/__pycache__/req_set.cpython-36.pyc,, +pip/_internal/req/__pycache__/req_tracker.cpython-36.pyc,, +pip/_internal/req/__pycache__/req_uninstall.cpython-36.pyc,, +pip/_internal/req/constructors.py,sha256=4sinGd7srKhI94DV6XO-qRX2M6Kr907OFmsfklKrt64,16267 +pip/_internal/req/req_file.py,sha256=nPIFl2Mi9UDGhrj-K0E3_QugF7tl3UBDty1czbIF7fk,18000 +pip/_internal/req/req_install.py,sha256=RR2mkaAU2REDtjZY3nRy0ojcUA_Bf0JpjX9ZTyZUUa4,33067 +pip/_internal/req/req_set.py,sha256=AutsaiV2s-2ILwtWtTA4OJW_ZLRg4GXg6wM0Y_hZb1k,7778 +pip/_internal/req/req_tracker.py,sha256=XuPweX1lbJXT2gSkCXICS5hna6byme5PeQp4Ok8-R2o,4391 +pip/_internal/req/req_uninstall.py,sha256=gACinTIcScZGw81qLaFdTj9KGXlVuCpru7XvHGjIE-E,23468 +pip/_internal/resolution/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/resolution/__pycache__/__init__.cpython-36.pyc,, +pip/_internal/resolution/__pycache__/base.cpython-36.pyc,, +pip/_internal/resolution/base.py,sha256=T4QnfShJErpPWe4iOiO7VmXuz1bxe20LLNs33AUslYM,563 +pip/_internal/resolution/legacy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/resolution/legacy/__pycache__/__init__.cpython-36.pyc,, +pip/_internal/resolution/legacy/__pycache__/resolver.cpython-36.pyc,, +pip/_internal/resolution/legacy/resolver.py,sha256=OF_6Yh4hrFfJ4u0HLF4ZRBlA8lBHUfAaFnhuVKIQhPM,17934 +pip/_internal/resolution/resolvelib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-36.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/base.cpython-36.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-36.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-36.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-36.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-36.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-36.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-36.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-36.pyc,, +pip/_internal/resolution/resolvelib/base.py,sha256=MbakyqSotBGVJpI3kApqqP2fPPZih9DgsfkpuFd-ADM,5677 +pip/_internal/resolution/resolvelib/candidates.py,sha256=dEKSuK9B5M52c1SugB43zXnnxgNWNTa7hCCwItSX61c,19976 +pip/_internal/resolution/resolvelib/factory.py,sha256=taqeDmXk0kAY9EVqSMhEJriY02MSShbZvt9VqEAgkw4,25446 +pip/_internal/resolution/resolvelib/found_candidates.py,sha256=FzxKczhel3GhViOIEfGHUfUQ6rN3U0blMMUuu-blHfU,5410 +pip/_internal/resolution/resolvelib/provider.py,sha256=HYITnjs7hcxDGANCDdL4qg2MJ1aw1jA9cMyxNP2mLrk,7673 +pip/_internal/resolution/resolvelib/reporter.py,sha256=xgaCtXLj791A_qRfV9Y1nXGeaWVq3JE0ygIA3YNRWq0,2765 +pip/_internal/resolution/resolvelib/requirements.py,sha256=fF2RH6VCanTuF-iwu8tZY8Bh0FakDBTw7tkDJyTsy9E,6047 +pip/_internal/resolution/resolvelib/resolver.py,sha256=3hlnrZklszFUwGQFF33nLkEO8kxz4vZ3_uKp_L8YvmE,12085 +pip/_internal/self_outdated_check.py,sha256=ivoUYaGuq-Ra_DvlZvPtHhgbY97NKHYuPGzrgN2G1A8,6484 +pip/_internal/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/utils/__pycache__/__init__.cpython-36.pyc,, +pip/_internal/utils/__pycache__/appdirs.cpython-36.pyc,, +pip/_internal/utils/__pycache__/compat.cpython-36.pyc,, +pip/_internal/utils/__pycache__/compatibility_tags.cpython-36.pyc,, +pip/_internal/utils/__pycache__/datetime.cpython-36.pyc,, +pip/_internal/utils/__pycache__/deprecation.cpython-36.pyc,, +pip/_internal/utils/__pycache__/direct_url_helpers.cpython-36.pyc,, +pip/_internal/utils/__pycache__/distutils_args.cpython-36.pyc,, +pip/_internal/utils/__pycache__/encoding.cpython-36.pyc,, +pip/_internal/utils/__pycache__/entrypoints.cpython-36.pyc,, +pip/_internal/utils/__pycache__/filesystem.cpython-36.pyc,, +pip/_internal/utils/__pycache__/filetypes.cpython-36.pyc,, +pip/_internal/utils/__pycache__/glibc.cpython-36.pyc,, +pip/_internal/utils/__pycache__/hashes.cpython-36.pyc,, +pip/_internal/utils/__pycache__/inject_securetransport.cpython-36.pyc,, +pip/_internal/utils/__pycache__/logging.cpython-36.pyc,, +pip/_internal/utils/__pycache__/misc.cpython-36.pyc,, +pip/_internal/utils/__pycache__/models.cpython-36.pyc,, +pip/_internal/utils/__pycache__/packaging.cpython-36.pyc,, +pip/_internal/utils/__pycache__/parallel.cpython-36.pyc,, +pip/_internal/utils/__pycache__/pkg_resources.cpython-36.pyc,, +pip/_internal/utils/__pycache__/setuptools_build.cpython-36.pyc,, +pip/_internal/utils/__pycache__/subprocess.cpython-36.pyc,, +pip/_internal/utils/__pycache__/temp_dir.cpython-36.pyc,, +pip/_internal/utils/__pycache__/unpacking.cpython-36.pyc,, +pip/_internal/utils/__pycache__/urls.cpython-36.pyc,, +pip/_internal/utils/__pycache__/virtualenv.cpython-36.pyc,, +pip/_internal/utils/__pycache__/wheel.cpython-36.pyc,, +pip/_internal/utils/appdirs.py,sha256=HCCFaOrZOnMLzRDpKXcMiFh_2kWZ-PzFdN8peLiwkNY,1222 +pip/_internal/utils/compat.py,sha256=I58tTZ3qqGZqeGVP_mERM8N7QPu71niLpxfO3Ij2jfQ,1912 +pip/_internal/utils/compatibility_tags.py,sha256=IcQEHCZJvdfKciACmXGCKt39Yog2_Q2XQKMHojA_2pg,5589 +pip/_internal/utils/datetime.py,sha256=biZdEJEQBGq8A-N7ooposipeGzmSHdI0WX60kll_AEs,255 +pip/_internal/utils/deprecation.py,sha256=CD9gU1zmDtC3Nk2TM14FVpAa_bxCMd03Kx5t3LoFwkg,3277 +pip/_internal/utils/direct_url_helpers.py,sha256=-chZUxdJkFRG-pA2MY7_Wii5U5o18o5K4AqBsWd92-c,3935 +pip/_internal/utils/distutils_args.py,sha256=KxWTaz07A_1ukCyw_pNah-i6sBvrVtdMsnF8jguDNYQ,1262 +pip/_internal/utils/encoding.py,sha256=T0cQTkGB7-s3wivLlHcKbKqvJoM0yLdo8ot89LlGdz0,1190 +pip/_internal/utils/entrypoints.py,sha256=m4UXkLZTnPsdSisQzNFiHM1CZcMK8N1CA98g4ORex2c,1066 +pip/_internal/utils/filesystem.py,sha256=a3rnoUB_HTdEbDaAUHSNMPIHqHds4UA-mLQ5bvgOjSQ,6045 +pip/_internal/utils/filetypes.py,sha256=weviVbapHWVQ_8-K-PTQ_TnYL66kZi4SrVBTmRYZXLc,761 +pip/_internal/utils/glibc.py,sha256=GM1Y2hWkOf_tumySGFg-iNbc7oilBQQrjczb_705CF8,3170 +pip/_internal/utils/hashes.py,sha256=o1qQEkqe2AqsRm_JhLoM4hkxmVtewH0ZZpQ6EBObHuU,5167 +pip/_internal/utils/inject_securetransport.py,sha256=tGl9Bgyt2IHKtB3b0B-6r3W2yYF3Og-PBe0647S3lZs,810 +pip/_internal/utils/logging.py,sha256=Bkp3QSjur3ekkunAInsGJ6ls7KF8ANTtBgGhjY0vltg,12133 +pip/_internal/utils/misc.py,sha256=F7LDb6PQIwniYwLczhU2pSAyHZ9bnTVT1yI_OduYh3w,23315 +pip/_internal/utils/models.py,sha256=qCgYyUw2mIH1pombsJ3YQsMtONZgyJ4BGwO5MJnSC4c,1329 +pip/_internal/utils/packaging.py,sha256=I1938AB7FprcVJJd6C0vSiMuCVajmrxZF55vX5j0bMo,2900 +pip/_internal/utils/parallel.py,sha256=RZF4JddPEWVbkkPCknfvpqaLfm3Pmqd_ABoCHmV4lXs,3224 +pip/_internal/utils/pkg_resources.py,sha256=jwH5JViPe-JlXLvLC0-ASfTTCRYvm0u9CwQGcWjxStI,1106 +pip/_internal/utils/setuptools_build.py,sha256=xk9sRBjUyNTHs_TvEWebVWs1GfLPN208MzpSXr9Ok_A,5047 +pip/_internal/utils/subprocess.py,sha256=uxaP3IzPiBYhG0MbdfPK_uchZAh27uZ3wO3q5hRfEyo,10036 +pip/_internal/utils/temp_dir.py,sha256=9gs3N9GQeVXRVWjJIalSpH1uj8yQXPTzarb5n1_HMVo,7950 +pip/_internal/utils/unpacking.py,sha256=PioYYwfTCn_VeYer80onhrO9Y1ggetqOPSOroG38bRQ,9032 +pip/_internal/utils/urls.py,sha256=XzjQsHGd2YDmJhoCogspPTqh6Kl5tGENRHPcwjS0JC4,1256 +pip/_internal/utils/virtualenv.py,sha256=iRTK-sD6bWpHqXcZ0ECfdpFLWatMOHFUVCIRa0L6Gu0,3564 +pip/_internal/utils/wheel.py,sha256=DOIVZaXN7bMOAeMEqzIOZHGl4OFO-KGrEqBUB848DPo,6290 +pip/_internal/vcs/__init__.py,sha256=CjyxHCgdt19l21j0tJGiQ_6Yk8m-KWmQThmYvljd1eo,571 +pip/_internal/vcs/__pycache__/__init__.cpython-36.pyc,, +pip/_internal/vcs/__pycache__/bazaar.cpython-36.pyc,, +pip/_internal/vcs/__pycache__/git.cpython-36.pyc,, +pip/_internal/vcs/__pycache__/mercurial.cpython-36.pyc,, +pip/_internal/vcs/__pycache__/subversion.cpython-36.pyc,, +pip/_internal/vcs/__pycache__/versioncontrol.cpython-36.pyc,, +pip/_internal/vcs/bazaar.py,sha256=Ay_vN-87vYSEzBqXT3RVwl40vlk56j3jy_AfQbMj4uo,2962 +pip/_internal/vcs/git.py,sha256=URUz1kSqhDhqJsr9ulaFTewP8Zjwf7oVPP7skdj9SMQ,15431 +pip/_internal/vcs/mercurial.py,sha256=2X3eIyeAWQWI2TxoPT-xuVsD6fxr7YSyHw4MR9EWz4M,5043 +pip/_internal/vcs/subversion.py,sha256=lPfCu841JAMRG_jTX_TbRZrBpKdId5eQ8t7_xI7w3L0,11876 +pip/_internal/vcs/versioncontrol.py,sha256=N60TSMbTr79ADzR61BCrk8YogUQcBBnNaLgJPTfXsfc,23086 +pip/_internal/wheel_builder.py,sha256=hW63ZmABr65rOiSRBHXu1jBUdEZw5LZiw0LaQBbz0lI,11740 +pip/_vendor/__init__.py,sha256=gCrQwPBY2OZBeedvKOLdRZ3W1LIRM60fG6d4mgW_-9Y,4760 +pip/_vendor/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/__pycache__/appdirs.cpython-36.pyc,, +pip/_vendor/__pycache__/distro.cpython-36.pyc,, +pip/_vendor/__pycache__/pyparsing.cpython-36.pyc,, +pip/_vendor/__pycache__/six.cpython-36.pyc,, +pip/_vendor/appdirs.py,sha256=M6IYRJtdZgmSPCXCSMBRB0VT3P8MdFbWCDbSLrB2Ebg,25907 +pip/_vendor/cachecontrol/__init__.py,sha256=pJtAaUxOsMPnytI1A3juAJkXYDr8krdSnsg4Yg3OBEg,302 +pip/_vendor/cachecontrol/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-36.pyc,, +pip/_vendor/cachecontrol/__pycache__/adapter.cpython-36.pyc,, +pip/_vendor/cachecontrol/__pycache__/cache.cpython-36.pyc,, +pip/_vendor/cachecontrol/__pycache__/compat.cpython-36.pyc,, +pip/_vendor/cachecontrol/__pycache__/controller.cpython-36.pyc,, +pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-36.pyc,, +pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-36.pyc,, +pip/_vendor/cachecontrol/__pycache__/serialize.cpython-36.pyc,, +pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-36.pyc,, +pip/_vendor/cachecontrol/_cmd.py,sha256=URGE0KrA87QekCG3SGPatlSPT571dZTDjNa-ZXX3pDc,1295 +pip/_vendor/cachecontrol/adapter.py,sha256=sSwaSYd93IIfCFU4tOMgSo6b2LCt_gBSaQUj8ktJFOA,4882 +pip/_vendor/cachecontrol/cache.py,sha256=1fc4wJP8HYt1ycnJXeEw5pCpeBL2Cqxx6g9Fb0AYDWQ,805 +pip/_vendor/cachecontrol/caches/__init__.py,sha256=-gHNKYvaeD0kOk5M74eOrsSgIKUtC6i6GfbmugGweEo,86 +pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-36.pyc,, +pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-36.pyc,, +pip/_vendor/cachecontrol/caches/file_cache.py,sha256=nYVKsJtXh6gJXvdn1iWyrhxvkwpQrK-eKoMRzuiwkKk,4153 +pip/_vendor/cachecontrol/caches/redis_cache.py,sha256=HxelMpNCo-dYr2fiJDwM3hhhRmxUYtB5tXm1GpAAT4Y,856 +pip/_vendor/cachecontrol/compat.py,sha256=kHNvMRdt6s_Xwqq_9qJmr9ou3wYMOMUMxPPcwNxT8Mc,695 +pip/_vendor/cachecontrol/controller.py,sha256=CWEX3pedIM9s60suf4zZPtm_JvVgnvogMGK_OiBG5F8,14149 +pip/_vendor/cachecontrol/filewrapper.py,sha256=vACKO8Llzu_ZWyjV1Fxn1MA4TGU60N5N3GSrAFdAY2Q,2533 +pip/_vendor/cachecontrol/heuristics.py,sha256=BFGHJ3yQcxvZizfo90LLZ04T_Z5XSCXvFotrp7Us0sc,4070 +pip/_vendor/cachecontrol/serialize.py,sha256=vIa4jvq4x_KSOLdEIedoknX2aXYHQujLDFV4-F21Dno,7091 +pip/_vendor/cachecontrol/wrapper.py,sha256=5LX0uJwkNQUtYSEw3aGmGu9WY8wGipd81mJ8lG0d0M4,690 +pip/_vendor/certifi/__init__.py,sha256=SsmdmFHjHCY4VLtqwpp9P_jsOcAuHj-5c5WqoEz-oFg,62 +pip/_vendor/certifi/__main__.py,sha256=1k3Cr95vCxxGRGDljrW3wMdpZdL3Nhf0u1n-k2qdsCY,255 +pip/_vendor/certifi/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/certifi/__pycache__/__main__.cpython-36.pyc,, +pip/_vendor/certifi/__pycache__/core.cpython-36.pyc,, +pip/_vendor/certifi/cacert.pem,sha256=u3fxPT--yemLvyislQRrRBlsfY9Vq3cgBh6ZmRqCkZc,263774 +pip/_vendor/certifi/core.py,sha256=gOFd0zHYlx4krrLEn982esOtmz3djiG0BFSDhgjlvcI,2840 +pip/_vendor/chardet/__init__.py,sha256=mWZaWmvZkhwfBEAT9O1Y6nRTfKzhT7FHhQTTAujbqUA,3271 +pip/_vendor/chardet/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/big5freq.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/big5prober.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/chardistribution.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/charsetprober.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/compat.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/cp949prober.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/enums.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/escprober.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/escsm.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/eucjpprober.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/euckrfreq.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/euckrprober.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/euctwfreq.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/euctwprober.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/gb2312freq.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/gb2312prober.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/hebrewprober.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/jisfreq.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/jpcntx.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/langrussianmodel.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/langthaimodel.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/latin1prober.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/mbcssm.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/sjisprober.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/universaldetector.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/utf8prober.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/version.cpython-36.pyc,, +pip/_vendor/chardet/big5freq.py,sha256=D_zK5GyzoVsRes0HkLJziltFQX0bKCLOrFe9_xDvO_8,31254 +pip/_vendor/chardet/big5prober.py,sha256=kBxHbdetBpPe7xrlb-e990iot64g_eGSLd32lB7_h3M,1757 +pip/_vendor/chardet/chardistribution.py,sha256=3woWS62KrGooKyqz4zQSnjFbJpa6V7g02daAibTwcl8,9411 +pip/_vendor/chardet/charsetgroupprober.py,sha256=GZLReHP6FRRn43hvSOoGCxYamErKzyp6RgOQxVeC3kg,3839 +pip/_vendor/chardet/charsetprober.py,sha256=KSmwJErjypyj0bRZmC5F5eM7c8YQgLYIjZXintZNstg,5110 +pip/_vendor/chardet/cli/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +pip/_vendor/chardet/cli/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-36.pyc,, +pip/_vendor/chardet/cli/chardetect.py,sha256=XK5zqjUG2a4-y6eLHZ8ThYcp6WWUrdlmELxNypcc2SE,2747 +pip/_vendor/chardet/codingstatemachine.py,sha256=VYp_6cyyki5sHgXDSZnXW4q1oelHc3cu9AyQTX7uug8,3590 +pip/_vendor/chardet/compat.py,sha256=40zr6wICZwknxyuLGGcIOPyve8DTebBCbbvttvnmp5Q,1200 +pip/_vendor/chardet/cp949prober.py,sha256=TZ434QX8zzBsnUvL_8wm4AQVTZ2ZkqEEQL_lNw9f9ow,1855 +pip/_vendor/chardet/enums.py,sha256=Aimwdb9as1dJKZaFNUH2OhWIVBVd6ZkJJ_WK5sNY8cU,1661 +pip/_vendor/chardet/escprober.py,sha256=kkyqVg1Yw3DIOAMJ2bdlyQgUFQhuHAW8dUGskToNWSc,3950 +pip/_vendor/chardet/escsm.py,sha256=RuXlgNvTIDarndvllNCk5WZBIpdCxQ0kcd9EAuxUh84,10510 +pip/_vendor/chardet/eucjpprober.py,sha256=iD8Jdp0ISRjgjiVN7f0e8xGeQJ5GM2oeZ1dA8nbSeUw,3749 +pip/_vendor/chardet/euckrfreq.py,sha256=-7GdmvgWez4-eO4SuXpa7tBiDi5vRXQ8WvdFAzVaSfo,13546 +pip/_vendor/chardet/euckrprober.py,sha256=MqFMTQXxW4HbzIpZ9lKDHB3GN8SP4yiHenTmf8g_PxY,1748 +pip/_vendor/chardet/euctwfreq.py,sha256=No1WyduFOgB5VITUA7PLyC5oJRNzRyMbBxaKI1l16MA,31621 +pip/_vendor/chardet/euctwprober.py,sha256=13p6EP4yRaxqnP4iHtxHOJ6R2zxHq1_m8hTRjzVZ95c,1747 +pip/_vendor/chardet/gb2312freq.py,sha256=JX8lsweKLmnCwmk8UHEQsLgkr_rP_kEbvivC4qPOrlc,20715 +pip/_vendor/chardet/gb2312prober.py,sha256=gGvIWi9WhDjE-xQXHvNIyrnLvEbMAYgyUSZ65HUfylw,1754 +pip/_vendor/chardet/hebrewprober.py,sha256=c3SZ-K7hvyzGY6JRAZxJgwJ_sUS9k0WYkvMY00YBYFo,13838 +pip/_vendor/chardet/jisfreq.py,sha256=vpmJv2Bu0J8gnMVRPHMFefTRvo_ha1mryLig8CBwgOg,25777 +pip/_vendor/chardet/jpcntx.py,sha256=PYlNqRUQT8LM3cT5FmHGP0iiscFlTWED92MALvBungo,19643 +pip/_vendor/chardet/langbulgarianmodel.py,sha256=rk9CJpuxO0bObboJcv6gNgWuosYZmd8qEEds5y7DS_Y,105697 +pip/_vendor/chardet/langgreekmodel.py,sha256=S-uNQ1ihC75yhBvSux24gLFZv3QyctMwC6OxLJdX-bw,99571 +pip/_vendor/chardet/langhebrewmodel.py,sha256=DzPP6TPGG_-PV7tqspu_d8duueqm7uN-5eQ0aHUw1Gg,98776 +pip/_vendor/chardet/langhungarianmodel.py,sha256=RtJH7DZdsmaHqyK46Kkmnk5wQHiJwJPPJSqqIlpeZRc,102498 +pip/_vendor/chardet/langrussianmodel.py,sha256=THqJOhSxiTQcHboDNSc5yofc2koXXQFHFyjtyuntUfM,131180 +pip/_vendor/chardet/langthaimodel.py,sha256=R1wXHnUMtejpw0JnH_JO8XdYasME6wjVqp1zP7TKLgg,103312 +pip/_vendor/chardet/langturkishmodel.py,sha256=rfwanTptTwSycE4-P-QasPmzd-XVYgevytzjlEzBBu8,95946 +pip/_vendor/chardet/latin1prober.py,sha256=S2IoORhFk39FEFOlSFWtgVybRiP6h7BlLldHVclNkU8,5370 +pip/_vendor/chardet/mbcharsetprober.py,sha256=AR95eFH9vuqSfvLQZN-L5ijea25NOBCoXqw8s5O9xLQ,3413 +pip/_vendor/chardet/mbcsgroupprober.py,sha256=h6TRnnYq2OxG1WdD5JOyxcdVpn7dG0q-vB8nWr5mbh4,2012 +pip/_vendor/chardet/mbcssm.py,sha256=SY32wVIF3HzcjY3BaEspy9metbNSKxIIB0RKPn7tjpI,25481 +pip/_vendor/chardet/metadata/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/chardet/metadata/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/chardet/metadata/__pycache__/languages.cpython-36.pyc,, +pip/_vendor/chardet/metadata/languages.py,sha256=41tLq3eLSrBEbEVVQpVGFq9K7o1ln9b1HpY1l0hCUQo,19474 +pip/_vendor/chardet/sbcharsetprober.py,sha256=nmyMyuxzG87DN6K3Rk2MUzJLMLR69MrWpdnHzOwVUwQ,6136 +pip/_vendor/chardet/sbcsgroupprober.py,sha256=hqefQuXmiFyDBArOjujH6hd6WFXlOD1kWCsxDhjx5Vc,4309 +pip/_vendor/chardet/sjisprober.py,sha256=IIt-lZj0WJqK4rmUZzKZP4GJlE8KUEtFYVuY96ek5MQ,3774 +pip/_vendor/chardet/universaldetector.py,sha256=DpZTXCX0nUHXxkQ9sr4GZxGB_hveZ6hWt3uM94cgWKs,12503 +pip/_vendor/chardet/utf8prober.py,sha256=IdD8v3zWOsB8OLiyPi-y_fqwipRFxV9Nc1eKBLSuIEw,2766 +pip/_vendor/chardet/version.py,sha256=A4CILFAd8MRVG1HoXPp45iK9RLlWyV73a1EtwE8Tvn8,242 +pip/_vendor/colorama/__init__.py,sha256=pCdErryzLSzDW5P-rRPBlPLqbBtIRNJB6cMgoeJns5k,239 +pip/_vendor/colorama/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/colorama/__pycache__/ansi.cpython-36.pyc,, +pip/_vendor/colorama/__pycache__/ansitowin32.cpython-36.pyc,, +pip/_vendor/colorama/__pycache__/initialise.cpython-36.pyc,, +pip/_vendor/colorama/__pycache__/win32.cpython-36.pyc,, +pip/_vendor/colorama/__pycache__/winterm.cpython-36.pyc,, +pip/_vendor/colorama/ansi.py,sha256=Top4EeEuaQdBWdteKMEcGOTeKeF19Q-Wo_6_Cj5kOzQ,2522 +pip/_vendor/colorama/ansitowin32.py,sha256=yV7CEmCb19MjnJKODZEEvMH_fnbJhwnpzo4sxZuGXmA,10517 +pip/_vendor/colorama/initialise.py,sha256=PprovDNxMTrvoNHFcL2NZjpH2XzDc8BLxLxiErfUl4k,1915 +pip/_vendor/colorama/win32.py,sha256=bJ8Il9jwaBN5BJ8bmN6FoYZ1QYuMKv2j8fGrXh7TJjw,5404 +pip/_vendor/colorama/winterm.py,sha256=2y_2b7Zsv34feAsP67mLOVc-Bgq51mdYGo571VprlrM,6438 +pip/_vendor/distlib/__init__.py,sha256=3veAk2rPznOB2gsK6tjbbh0TQMmGE5P82eE9wXq6NIk,581 +pip/_vendor/distlib/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/distlib/__pycache__/compat.cpython-36.pyc,, +pip/_vendor/distlib/__pycache__/database.cpython-36.pyc,, +pip/_vendor/distlib/__pycache__/index.cpython-36.pyc,, +pip/_vendor/distlib/__pycache__/locators.cpython-36.pyc,, +pip/_vendor/distlib/__pycache__/manifest.cpython-36.pyc,, +pip/_vendor/distlib/__pycache__/markers.cpython-36.pyc,, +pip/_vendor/distlib/__pycache__/metadata.cpython-36.pyc,, +pip/_vendor/distlib/__pycache__/resources.cpython-36.pyc,, +pip/_vendor/distlib/__pycache__/scripts.cpython-36.pyc,, +pip/_vendor/distlib/__pycache__/util.cpython-36.pyc,, +pip/_vendor/distlib/__pycache__/version.cpython-36.pyc,, +pip/_vendor/distlib/__pycache__/wheel.cpython-36.pyc,, +pip/_vendor/distlib/_backport/__init__.py,sha256=bqS_dTOH6uW9iGgd0uzfpPjo6vZ4xpPZ7kyfZJ2vNaw,274 +pip/_vendor/distlib/_backport/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/distlib/_backport/__pycache__/misc.cpython-36.pyc,, +pip/_vendor/distlib/_backport/__pycache__/shutil.cpython-36.pyc,, +pip/_vendor/distlib/_backport/__pycache__/sysconfig.cpython-36.pyc,, +pip/_vendor/distlib/_backport/__pycache__/tarfile.cpython-36.pyc,, +pip/_vendor/distlib/_backport/misc.py,sha256=KWecINdbFNOxSOP1fGF680CJnaC6S4fBRgEtaYTw0ig,971 +pip/_vendor/distlib/_backport/shutil.py,sha256=IX_G2NPqwecJibkIDje04bqu0xpHkfSQ2GaGdEVqM5Y,25707 +pip/_vendor/distlib/_backport/sysconfig.cfg,sha256=swZKxq9RY5e9r3PXCrlvQPMsvOdiWZBTHLEbqS8LJLU,2617 +pip/_vendor/distlib/_backport/sysconfig.py,sha256=BQHFlb6pubCl_dvT1NjtzIthylofjKisox239stDg0U,26854 +pip/_vendor/distlib/_backport/tarfile.py,sha256=Ihp7rXRcjbIKw8COm9wSePV9ARGXbSF9gGXAMn2Q-KU,92628 +pip/_vendor/distlib/compat.py,sha256=ADA56xiAxar3mU6qemlBhNbsrFPosXRhO44RzsbJPqk,41408 +pip/_vendor/distlib/database.py,sha256=Kl0YvPQKc4OcpVi7k5cFziydM1xOK8iqdxLGXgbZHV4,51059 +pip/_vendor/distlib/index.py,sha256=SXKzpQCERctxYDMp_OLee2f0J0e19ZhGdCIoMlUfUQM,21066 +pip/_vendor/distlib/locators.py,sha256=c9E4cDEacJ_uKbuE5BqAVocoWp6rsuBGTkiNDQq3zV4,52100 +pip/_vendor/distlib/manifest.py,sha256=nQEhYmgoreaBZzyFzwYsXxJARu3fo4EkunU163U16iE,14811 +pip/_vendor/distlib/markers.py,sha256=6Ac3cCfFBERexiESWIOXmg-apIP8l2esafNSX3KMy-8,4387 +pip/_vendor/distlib/metadata.py,sha256=z2KPy3h3tcDnb9Xs7nAqQ5Oz0bqjWAUFmKWcFKRoodg,38962 +pip/_vendor/distlib/resources.py,sha256=2FGv0ZHF14KXjLIlL0R991lyQQGcewOS4mJ-5n-JVnc,10766 +pip/_vendor/distlib/scripts.py,sha256=_MAj3sMuv56kuM8FsiIWXqbT0gmumPGaOR_atOzn4a4,17180 +pip/_vendor/distlib/t32.exe,sha256=NS3xBCVAld35JVFNmb-1QRyVtThukMrwZVeXn4LhaEQ,96768 +pip/_vendor/distlib/t64.exe,sha256=oAqHes78rUWVM0OtVqIhUvequl_PKhAhXYQWnUf7zR0,105984 +pip/_vendor/distlib/util.py,sha256=f2jZCPrcLCt6LcnC0gUy-Fur60tXD8reA7k4rDpHMDw,59845 +pip/_vendor/distlib/version.py,sha256=_n7F6juvQGAcn769E_SHa7fOcf5ERlEVymJ_EjPRwGw,23391 +pip/_vendor/distlib/w32.exe,sha256=lJtnZdeUxTZWya_EW5DZos_K5rswRECGspIl8ZJCIXs,90112 +pip/_vendor/distlib/w64.exe,sha256=0aRzoN2BO9NWW4ENy4_4vHkHR4qZTFZNVSAJJYlODTI,99840 +pip/_vendor/distlib/wheel.py,sha256=v6DnwTqhNHwrEVFr8_YeiTW6G4ftP_evsywNgrmdb2o,41144 +pip/_vendor/distro.py,sha256=xxMIh2a3KmippeWEHzynTdHT3_jZM0o-pos0dAWJROM,43628 +pip/_vendor/html5lib/__init__.py,sha256=BYzcKCqeEii52xDrqBFruhnmtmkiuHXFyFh-cglQ8mk,1160 +pip/_vendor/html5lib/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/html5lib/__pycache__/_ihatexml.cpython-36.pyc,, +pip/_vendor/html5lib/__pycache__/_inputstream.cpython-36.pyc,, +pip/_vendor/html5lib/__pycache__/_tokenizer.cpython-36.pyc,, +pip/_vendor/html5lib/__pycache__/_utils.cpython-36.pyc,, +pip/_vendor/html5lib/__pycache__/constants.cpython-36.pyc,, +pip/_vendor/html5lib/__pycache__/html5parser.cpython-36.pyc,, +pip/_vendor/html5lib/__pycache__/serializer.cpython-36.pyc,, +pip/_vendor/html5lib/_ihatexml.py,sha256=ifOwF7pXqmyThIXc3boWc96s4MDezqRrRVp7FwDYUFs,16728 +pip/_vendor/html5lib/_inputstream.py,sha256=jErNASMlkgs7MpOM9Ve_VdLDJyFFweAjLuhVutZz33U,32353 +pip/_vendor/html5lib/_tokenizer.py,sha256=04mgA2sNTniutl2fxFv-ei5bns4iRaPxVXXHh_HrV_4,77040 +pip/_vendor/html5lib/_trie/__init__.py,sha256=nqfgO910329BEVJ5T4psVwQtjd2iJyEXQ2-X8c1YxwU,109 +pip/_vendor/html5lib/_trie/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/html5lib/_trie/__pycache__/_base.cpython-36.pyc,, +pip/_vendor/html5lib/_trie/__pycache__/py.cpython-36.pyc,, +pip/_vendor/html5lib/_trie/_base.py,sha256=CaybYyMro8uERQYjby2tTeSUatnWDfWroUN9N7ety5w,1013 +pip/_vendor/html5lib/_trie/py.py,sha256=wXmQLrZRf4MyWNyg0m3h81m9InhLR7GJ002mIIZh-8o,1775 +pip/_vendor/html5lib/_utils.py,sha256=Dx9AKntksRjFT1veBj7I362pf5OgIaT0zglwq43RnfU,4931 +pip/_vendor/html5lib/constants.py,sha256=Ll-yzLU_jcjyAI_h57zkqZ7aQWE5t5xA4y_jQgoUUhw,83464 +pip/_vendor/html5lib/filters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/html5lib/filters/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/html5lib/filters/__pycache__/alphabeticalattributes.cpython-36.pyc,, +pip/_vendor/html5lib/filters/__pycache__/base.cpython-36.pyc,, +pip/_vendor/html5lib/filters/__pycache__/inject_meta_charset.cpython-36.pyc,, +pip/_vendor/html5lib/filters/__pycache__/lint.cpython-36.pyc,, +pip/_vendor/html5lib/filters/__pycache__/optionaltags.cpython-36.pyc,, +pip/_vendor/html5lib/filters/__pycache__/sanitizer.cpython-36.pyc,, +pip/_vendor/html5lib/filters/__pycache__/whitespace.cpython-36.pyc,, +pip/_vendor/html5lib/filters/alphabeticalattributes.py,sha256=lViZc2JMCclXi_5gduvmdzrRxtO5Xo9ONnbHBVCsykU,919 +pip/_vendor/html5lib/filters/base.py,sha256=z-IU9ZAYjpsVsqmVt7kuWC63jR11hDMr6CVrvuao8W0,286 +pip/_vendor/html5lib/filters/inject_meta_charset.py,sha256=egDXUEHXmAG9504xz0K6ALDgYkvUrC2q15YUVeNlVQg,2945 +pip/_vendor/html5lib/filters/lint.py,sha256=jk6q56xY0ojiYfvpdP-OZSm9eTqcAdRqhCoPItemPYA,3643 +pip/_vendor/html5lib/filters/optionaltags.py,sha256=8lWT75J0aBOHmPgfmqTHSfPpPMp01T84NKu0CRedxcE,10588 +pip/_vendor/html5lib/filters/sanitizer.py,sha256=m6oGmkBhkGAnn2nV6D4hE78SCZ6WEnK9rKdZB3uXBIc,26897 +pip/_vendor/html5lib/filters/whitespace.py,sha256=8eWqZxd4UC4zlFGW6iyY6f-2uuT8pOCSALc3IZt7_t4,1214 +pip/_vendor/html5lib/html5parser.py,sha256=anr-aXre_ImfrkQ35c_rftKXxC80vJCREKe06Tq15HA,117186 +pip/_vendor/html5lib/serializer.py,sha256=_PpvcZF07cwE7xr9uKkZqh5f4UEaI8ltCU2xPJzaTpk,15759 +pip/_vendor/html5lib/treeadapters/__init__.py,sha256=A0rY5gXIe4bJOiSGRO_j_tFhngRBO8QZPzPtPw5dFzo,679 +pip/_vendor/html5lib/treeadapters/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/html5lib/treeadapters/__pycache__/genshi.cpython-36.pyc,, +pip/_vendor/html5lib/treeadapters/__pycache__/sax.cpython-36.pyc,, +pip/_vendor/html5lib/treeadapters/genshi.py,sha256=CH27pAsDKmu4ZGkAUrwty7u0KauGLCZRLPMzaO3M5vo,1715 +pip/_vendor/html5lib/treeadapters/sax.py,sha256=BKS8woQTnKiqeffHsxChUqL4q2ZR_wb5fc9MJ3zQC8s,1776 +pip/_vendor/html5lib/treebuilders/__init__.py,sha256=AysSJyvPfikCMMsTVvaxwkgDieELD5dfR8FJIAuq7hY,3592 +pip/_vendor/html5lib/treebuilders/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/html5lib/treebuilders/__pycache__/base.cpython-36.pyc,, +pip/_vendor/html5lib/treebuilders/__pycache__/dom.cpython-36.pyc,, +pip/_vendor/html5lib/treebuilders/__pycache__/etree.cpython-36.pyc,, +pip/_vendor/html5lib/treebuilders/__pycache__/etree_lxml.cpython-36.pyc,, +pip/_vendor/html5lib/treebuilders/base.py,sha256=z-o51vt9r_l2IDG5IioTOKGzZne4Fy3_Fc-7ztrOh4I,14565 +pip/_vendor/html5lib/treebuilders/dom.py,sha256=22whb0C71zXIsai5mamg6qzBEiigcBIvaDy4Asw3at0,8925 +pip/_vendor/html5lib/treebuilders/etree.py,sha256=w5ZFpKk6bAxnrwD2_BrF5EVC7vzz0L3LMi9Sxrbc_8w,12836 +pip/_vendor/html5lib/treebuilders/etree_lxml.py,sha256=9gqDjs-IxsPhBYa5cpvv2FZ1KZlG83Giusy2lFmvIkE,14766 +pip/_vendor/html5lib/treewalkers/__init__.py,sha256=OBPtc1TU5mGyy18QDMxKEyYEz0wxFUUNj5v0-XgmYhY,5719 +pip/_vendor/html5lib/treewalkers/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/base.cpython-36.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/dom.cpython-36.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/etree.cpython-36.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/etree_lxml.cpython-36.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/genshi.cpython-36.pyc,, +pip/_vendor/html5lib/treewalkers/base.py,sha256=ouiOsuSzvI0KgzdWP8PlxIaSNs9falhbiinAEc_UIJY,7476 +pip/_vendor/html5lib/treewalkers/dom.py,sha256=EHyFR8D8lYNnyDU9lx_IKigVJRyecUGua0mOi7HBukc,1413 +pip/_vendor/html5lib/treewalkers/etree.py,sha256=xo1L5m9VtkfpFJK0pFmkLVajhqYYVisVZn3k9kYpPkI,4551 +pip/_vendor/html5lib/treewalkers/etree_lxml.py,sha256=_b0LAVWLcVu9WaU_-w3D8f0IRSpCbjf667V-3NRdhTw,6357 +pip/_vendor/html5lib/treewalkers/genshi.py,sha256=4D2PECZ5n3ZN3qu3jMl9yY7B81jnQApBQSVlfaIuYbA,2309 +pip/_vendor/idna/__init__.py,sha256=9Nt7xpyet3DmOrPUGooDdAwmHZZu1qUAy2EaJ93kGiQ,58 +pip/_vendor/idna/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/idna/__pycache__/codec.cpython-36.pyc,, +pip/_vendor/idna/__pycache__/compat.cpython-36.pyc,, +pip/_vendor/idna/__pycache__/core.cpython-36.pyc,, +pip/_vendor/idna/__pycache__/idnadata.cpython-36.pyc,, +pip/_vendor/idna/__pycache__/intranges.cpython-36.pyc,, +pip/_vendor/idna/__pycache__/package_data.cpython-36.pyc,, +pip/_vendor/idna/__pycache__/uts46data.cpython-36.pyc,, +pip/_vendor/idna/codec.py,sha256=4RVMhqFquJgyGBKyl40ARqcgDzkDDXZUvyl1EOCRLFE,3027 +pip/_vendor/idna/compat.py,sha256=g-7Ph45nzILe_7xvxdbTebrHZq4mQWxIOH1rjMc6xrs,232 +pip/_vendor/idna/core.py,sha256=VdFGQyiit1eMKUQ2x0mNXoGThrXlRyp070mPDyLX9Yg,11849 +pip/_vendor/idna/idnadata.py,sha256=cl4x9RLdw1ZMtEEbvKwAsX-Id3AdIjO5U3HaoKM6VGs,42350 +pip/_vendor/idna/intranges.py,sha256=TY1lpxZIQWEP6tNqjZkFA5hgoMWOj1OBmnUG8ihT87E,1749 +pip/_vendor/idna/package_data.py,sha256=kxptFveZ37zbPSmKU7KMEA8Pi7h3-sM1-p2agm2PpCI,21 +pip/_vendor/idna/uts46data.py,sha256=4CZEB6ZQgmSNIATBn2V_xdW9PEgVOXAOYRzCeQGsK_E,196224 +pip/_vendor/msgpack/__init__.py,sha256=2gJwcsTIaAtCM0GMi2rU-_Y6kILeeQuqRkrQ22jSANc,1118 +pip/_vendor/msgpack/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/msgpack/__pycache__/_version.cpython-36.pyc,, +pip/_vendor/msgpack/__pycache__/exceptions.cpython-36.pyc,, +pip/_vendor/msgpack/__pycache__/ext.cpython-36.pyc,, +pip/_vendor/msgpack/__pycache__/fallback.cpython-36.pyc,, +pip/_vendor/msgpack/_version.py,sha256=dFR03oACnj4lsKd1RnwD7BPMiVI_FMygdOL1TOBEw_U,20 +pip/_vendor/msgpack/exceptions.py,sha256=dCTWei8dpkrMsQDcjQk74ATl9HsIBH0ybt8zOPNqMYc,1081 +pip/_vendor/msgpack/ext.py,sha256=4l356Y4sVEcvCla2dh_cL57vh4GMhZfa3kuWHFHYz6A,6088 +pip/_vendor/msgpack/fallback.py,sha256=Rpv1Ldey8f8ueRnQznD4ARKBn9dxM2PywVNkXI8IEeE,38026 +pip/_vendor/packaging/__about__.py,sha256=j4B7IMMSqpUnYzcYd5H5WZlILXevD7Zm_n9lj_TROTw,726 +pip/_vendor/packaging/__init__.py,sha256=6enbp5XgRfjBjsI9-bn00HjHf5TH21PDMOKkJW8xw-w,562 +pip/_vendor/packaging/__pycache__/__about__.cpython-36.pyc,, +pip/_vendor/packaging/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/packaging/__pycache__/_compat.cpython-36.pyc,, +pip/_vendor/packaging/__pycache__/_structures.cpython-36.pyc,, +pip/_vendor/packaging/__pycache__/_typing.cpython-36.pyc,, +pip/_vendor/packaging/__pycache__/markers.cpython-36.pyc,, +pip/_vendor/packaging/__pycache__/requirements.cpython-36.pyc,, +pip/_vendor/packaging/__pycache__/specifiers.cpython-36.pyc,, +pip/_vendor/packaging/__pycache__/tags.cpython-36.pyc,, +pip/_vendor/packaging/__pycache__/utils.cpython-36.pyc,, +pip/_vendor/packaging/__pycache__/version.cpython-36.pyc,, +pip/_vendor/packaging/_compat.py,sha256=MXdsGpSE_W-ZrHoC87andI4LV2FAwU7HLL-eHe_CjhU,1128 +pip/_vendor/packaging/_structures.py,sha256=ozkCX8Q8f2qE1Eic3YiQ4buDVfgz2iYevY9e7R2y3iY,2022 +pip/_vendor/packaging/_typing.py,sha256=VgA0AAvsc97KB5nF89zoudOyCMEsV7FlaXzZbYqEkzA,1824 +pip/_vendor/packaging/markers.py,sha256=8DOn1c7oZ_DySBlLom_9o49GzobVGYN8-kpK_nsj8oQ,9472 +pip/_vendor/packaging/requirements.py,sha256=MHqf_FKihHC0VkOB62ZUdUyG8okEL97D4Xy_jK1yFS0,5110 +pip/_vendor/packaging/specifiers.py,sha256=RaxQ-JKyCqI5QBm6gDvboZ2K6jjLVd-pxq0kvYf28kc,32208 +pip/_vendor/packaging/tags.py,sha256=BMEL_3W3E8nXK_AXAWqmlYccsvoznFKkTBkTPR48DB8,29561 +pip/_vendor/packaging/utils.py,sha256=5vUxwCVYSmaNJFgd7KaCBpxHXQN89KIvRLvCsDzao0k,4385 +pip/_vendor/packaging/version.py,sha256=t7FpsZKmDncMn6EG28dEu_5NBZUa9_HVoiG-fsDo3oc,15974 +pip/_vendor/pep517/__init__.py,sha256=mju9elFHLEUJ23rU5Zpdj8nROdY0Vj3bp4ZgvBTs6bg,130 +pip/_vendor/pep517/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/pep517/__pycache__/build.cpython-36.pyc,, +pip/_vendor/pep517/__pycache__/check.cpython-36.pyc,, +pip/_vendor/pep517/__pycache__/colorlog.cpython-36.pyc,, +pip/_vendor/pep517/__pycache__/compat.cpython-36.pyc,, +pip/_vendor/pep517/__pycache__/dirtools.cpython-36.pyc,, +pip/_vendor/pep517/__pycache__/envbuild.cpython-36.pyc,, +pip/_vendor/pep517/__pycache__/meta.cpython-36.pyc,, +pip/_vendor/pep517/__pycache__/wrappers.cpython-36.pyc,, +pip/_vendor/pep517/build.py,sha256=Z49CmRFafX7NjoBModiibwQYa_EYz3E0F31b7D5WVvs,3456 +pip/_vendor/pep517/check.py,sha256=8LJLtfZ99zAcV4vKJ1a-odMxg2sEImD7RMNg_Ere-1Y,6082 +pip/_vendor/pep517/colorlog.py,sha256=Tk9AuYm_cLF3BKTBoSTJt9bRryn0aFojIQOwbfVUTxQ,4098 +pip/_vendor/pep517/compat.py,sha256=M-5s4VNp8rjyT76ZZ_ibnPD44DYVzSQlyCEHayjtDPw,780 +pip/_vendor/pep517/dirtools.py,sha256=2mkAkAL0mRz_elYFjRKuekTJVipH1zTn4tbf1EDev84,1129 +pip/_vendor/pep517/envbuild.py,sha256=szKUFlO50X1ahQfXwz4hD9V2VE_bz9MLVPIeidsFo4w,6041 +pip/_vendor/pep517/in_process/__init__.py,sha256=MyWoAi8JHdcBv7yXuWpUSVADbx6LSB9rZh7kTIgdA8Y,563 +pip/_vendor/pep517/in_process/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/pep517/in_process/__pycache__/_in_process.cpython-36.pyc,, +pip/_vendor/pep517/in_process/_in_process.py,sha256=XrKOTURJdia5R7i3i_OQmS89LASFXE3HQXfX63qZBIE,8438 +pip/_vendor/pep517/meta.py,sha256=8mnM5lDnT4zXQpBTliJbRGfesH7iioHwozbDxALPS9Y,2463 +pip/_vendor/pep517/wrappers.py,sha256=QYZfN1nWoq4Z2krY-UX14JLAxkdNwujYjRGf7qFc914,11044 +pip/_vendor/pkg_resources/__init__.py,sha256=XpGBfvS9fafA6bm5rx7vnxdxs7yqyoc_NnpzKApkJ64,108277 +pip/_vendor/pkg_resources/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/pkg_resources/__pycache__/py31compat.cpython-36.pyc,, +pip/_vendor/pkg_resources/py31compat.py,sha256=CRk8fkiPRDLsbi5pZcKsHI__Pbmh_94L8mr9Qy9Ab2U,562 +pip/_vendor/progress/__init__.py,sha256=fcbQQXo5np2CoQyhSH5XprkicwLZNLePR3uIahznSO0,4857 +pip/_vendor/progress/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/progress/__pycache__/bar.cpython-36.pyc,, +pip/_vendor/progress/__pycache__/counter.cpython-36.pyc,, +pip/_vendor/progress/__pycache__/spinner.cpython-36.pyc,, +pip/_vendor/progress/bar.py,sha256=QuDuVNcmXgpxtNtxO0Fq72xKigxABaVmxYGBw4J3Z_E,2854 +pip/_vendor/progress/counter.py,sha256=MznyBrvPWrOlGe4MZAlGUb9q3aODe6_aNYeAE_VNoYA,1372 +pip/_vendor/progress/spinner.py,sha256=k8JbDW94T0-WXuXfxZIFhdoNPYp3jfnpXqBnfRv5fGs,1380 +pip/_vendor/pyparsing.py,sha256=J1b4z3S_KwyJW7hKGnoN-hXW9pgMIzIP6QThyY5yJq4,273394 +pip/_vendor/requests/__init__.py,sha256=ib7nRjDadbCMOeX2sMQLcbXzy982HoKRY2LD_gWqwPM,4458 +pip/_vendor/requests/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/__version__.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/_internal_utils.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/adapters.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/api.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/auth.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/certs.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/compat.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/cookies.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/exceptions.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/help.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/hooks.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/models.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/packages.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/sessions.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/status_codes.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/structures.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/utils.cpython-36.pyc,, +pip/_vendor/requests/__version__.py,sha256=k4J8c1yFRFzwGWwlN7miaDOclFtbcIs1GlnmT17YbXQ,441 +pip/_vendor/requests/_internal_utils.py,sha256=Zx3PnEUccyfsB-ie11nZVAW8qClJy0gx1qNME7rgT18,1096 +pip/_vendor/requests/adapters.py,sha256=e-bmKEApNVqFdylxuMJJfiaHdlmS_zhWhIMEzlHvGuc,21548 +pip/_vendor/requests/api.py,sha256=PlHM-HT3PQ5lyufoeGmV-nJxRi7UnUyGVh7OV7B9XV4,6496 +pip/_vendor/requests/auth.py,sha256=OMoJIVKyRLy9THr91y8rxysZuclwPB-K1Xg1zBomUhQ,10207 +pip/_vendor/requests/certs.py,sha256=nXRVq9DtGmv_1AYbwjTu9UrgAcdJv05ZvkNeaoLOZxY,465 +pip/_vendor/requests/compat.py,sha256=LQWuCR4qXk6w7-qQopXyz0WNHUdAD40k0mKnaAEf1-g,2045 +pip/_vendor/requests/cookies.py,sha256=Y-bKX6TvW3FnYlE6Au0SXtVVWcaNdFvuAwQxw-G0iTI,18430 +pip/_vendor/requests/exceptions.py,sha256=d9fJJw8YFBB9VzG9qhvxLuOx6be3c_Dwbck-dVUEAcs,3173 +pip/_vendor/requests/help.py,sha256=SJPVcoXeo7KfK4AxJN5eFVQCjr0im87tU2n7ubLsksU,3578 +pip/_vendor/requests/hooks.py,sha256=QReGyy0bRcr5rkwCuObNakbYsc7EkiKeBwG4qHekr2Q,757 +pip/_vendor/requests/models.py,sha256=UkkaVuU1tc-BKYB41dds35saisoTpaYJ2YBCFZEEfhM,34373 +pip/_vendor/requests/packages.py,sha256=njJmVifY4aSctuW3PP5EFRCxjEwMRDO6J_feG2dKWsI,695 +pip/_vendor/requests/sessions.py,sha256=BsnR-zYILgoFzJ6yq4T8ht_i0PwwPGVAxWxWaV5dcHg,30137 +pip/_vendor/requests/status_codes.py,sha256=gT79Pbs_cQjBgp-fvrUgg1dn2DQO32bDj4TInjnMPSc,4188 +pip/_vendor/requests/structures.py,sha256=msAtr9mq1JxHd-JRyiILfdFlpbJwvvFuP3rfUQT_QxE,3005 +pip/_vendor/requests/utils.py,sha256=_K9AgkN6efPe-a-zgZurXzds5PBC0CzDkyjAE2oCQFQ,30529 +pip/_vendor/resolvelib/__init__.py,sha256=QWAqNErjxqEMKl-AUccXz10aCKVmO-WmWvxUl3QOlFY,537 +pip/_vendor/resolvelib/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/resolvelib/__pycache__/providers.cpython-36.pyc,, +pip/_vendor/resolvelib/__pycache__/reporters.cpython-36.pyc,, +pip/_vendor/resolvelib/__pycache__/resolvers.cpython-36.pyc,, +pip/_vendor/resolvelib/__pycache__/structs.cpython-36.pyc,, +pip/_vendor/resolvelib/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/resolvelib/compat/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/resolvelib/compat/__pycache__/collections_abc.cpython-36.pyc,, +pip/_vendor/resolvelib/compat/collections_abc.py,sha256=uy8xUZ-NDEw916tugUXm8HgwCGiMO0f-RcdnpkfXfOs,156 +pip/_vendor/resolvelib/providers.py,sha256=bfzFDZd7UqkkAS7lUM_HeYbA-HzjKfDlle_pn_79vio,5638 +pip/_vendor/resolvelib/reporters.py,sha256=hQvvXuuEBOyEWO8KDfLsWKVjX55UFMAUwO0YZMNpzAw,1364 +pip/_vendor/resolvelib/resolvers.py,sha256=P6aq-7pY5E7zROb0zUUWqFIHEA9Lm0MWsx_bYXzUg3A,17292 +pip/_vendor/resolvelib/structs.py,sha256=Z6m4CkKJlWH4ZIKelEsKNeZqKTvyux4hqBNzY4kZzLo,4495 +pip/_vendor/six.py,sha256=U4Z_yv534W5CNyjY9i8V1OXY2SjAny8y2L5vDLhhThM,34159 +pip/_vendor/tenacity/__init__.py,sha256=6qSjN2BJDt864b6nxFoalpbCLQHiD2iYAlnUS9dWSSw,16528 +pip/_vendor/tenacity/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/tenacity/__pycache__/_asyncio.cpython-36.pyc,, +pip/_vendor/tenacity/__pycache__/_utils.cpython-36.pyc,, +pip/_vendor/tenacity/__pycache__/after.cpython-36.pyc,, +pip/_vendor/tenacity/__pycache__/before.cpython-36.pyc,, +pip/_vendor/tenacity/__pycache__/before_sleep.cpython-36.pyc,, +pip/_vendor/tenacity/__pycache__/compat.cpython-36.pyc,, +pip/_vendor/tenacity/__pycache__/nap.cpython-36.pyc,, +pip/_vendor/tenacity/__pycache__/retry.cpython-36.pyc,, +pip/_vendor/tenacity/__pycache__/stop.cpython-36.pyc,, +pip/_vendor/tenacity/__pycache__/tornadoweb.cpython-36.pyc,, +pip/_vendor/tenacity/__pycache__/wait.cpython-36.pyc,, +pip/_vendor/tenacity/_asyncio.py,sha256=6C4Sfv9IOUYf1-0vuIoE6OGbmJrJywH0-YslrxmbxKw,2833 +pip/_vendor/tenacity/_utils.py,sha256=W1nujHum1f9i4RQpOSjqsQo9_mQtaUtNznXAmQHsL28,4555 +pip/_vendor/tenacity/after.py,sha256=KNIi2WT83r4eqA3QaXMK1zXQzkbLgVHj5uRanY6HabM,1307 +pip/_vendor/tenacity/before.py,sha256=B9pAXn6_J1UKzwTL9nFtRpOhNg8s5vGSi4bqnx4-laA,1154 +pip/_vendor/tenacity/before_sleep.py,sha256=lZEMHNaFRmdCcws3Moh4EOZ9zeo4MRxskdiUudvNuvY,1784 +pip/_vendor/tenacity/compat.py,sha256=dHonJkJlHwD2cmqLrYHYU0Tdzm2bn1-76QZSt6OCemw,739 +pip/_vendor/tenacity/nap.py,sha256=7VVudOTmuv_-C_XJlvjGcgHbV6_A2HlzymaXu8vj1d8,1280 +pip/_vendor/tenacity/retry.py,sha256=xskLGa15EsNhPPOmIUcKS7CqjaRAtWxGFNPNRjjz9UU,5463 +pip/_vendor/tenacity/stop.py,sha256=4cjSe_YPSawz6iI-QBDN0xFfE_zlKvjhFwx21ZlyD2E,2435 +pip/_vendor/tenacity/tornadoweb.py,sha256=q3XZW2A9Rky1BhUQbNHF61hM1EXQ57dA7wxPnlSOx3s,1729 +pip/_vendor/tenacity/wait.py,sha256=FAoIfIUSNf5OWJYT7nhjFC0uOVijHMBd56AJRyLN230,6017 +pip/_vendor/toml/__init__.py,sha256=kYgYzehhUx1cctsuprmjEKwnSdmQeC53cTxi7nxQrko,747 +pip/_vendor/toml/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/toml/__pycache__/decoder.cpython-36.pyc,, +pip/_vendor/toml/__pycache__/encoder.cpython-36.pyc,, +pip/_vendor/toml/__pycache__/ordered.cpython-36.pyc,, +pip/_vendor/toml/__pycache__/tz.cpython-36.pyc,, +pip/_vendor/toml/decoder.py,sha256=deDPQqpj92SG6pAtwLbgKHrIsly7hAZG-U6g2y7hyGc,38954 +pip/_vendor/toml/encoder.py,sha256=tBe93_GB21K52TlSbMiYuGeIGXH70F2WzAg-lIfVoko,9964 +pip/_vendor/toml/ordered.py,sha256=UWt5Eka90IWVBYdvLgY5PXnkBcVYpHjnw9T67rM85T8,378 +pip/_vendor/toml/tz.py,sha256=-5vg8wkg_atnVi2TnEveexIVE7T_FxBVr_-2WVfO1oA,701 +pip/_vendor/urllib3/__init__.py,sha256=j3yzHIbmW7CS-IKQJ9-PPQf_YKO8EOAey_rMW0UR7us,2763 +pip/_vendor/urllib3/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/urllib3/__pycache__/_collections.cpython-36.pyc,, +pip/_vendor/urllib3/__pycache__/_version.cpython-36.pyc,, +pip/_vendor/urllib3/__pycache__/connection.cpython-36.pyc,, +pip/_vendor/urllib3/__pycache__/connectionpool.cpython-36.pyc,, +pip/_vendor/urllib3/__pycache__/exceptions.cpython-36.pyc,, +pip/_vendor/urllib3/__pycache__/fields.cpython-36.pyc,, +pip/_vendor/urllib3/__pycache__/filepost.cpython-36.pyc,, +pip/_vendor/urllib3/__pycache__/poolmanager.cpython-36.pyc,, +pip/_vendor/urllib3/__pycache__/request.cpython-36.pyc,, +pip/_vendor/urllib3/__pycache__/response.cpython-36.pyc,, +pip/_vendor/urllib3/_collections.py,sha256=Rp1mVyBgc_UlAcp6M3at1skJBXR5J43NawRTvW2g_XY,10811 +pip/_vendor/urllib3/_version.py,sha256=2Bjk_cB49921PTvereWp8ZR3NhLNoCMAyHSGP-OesLk,63 +pip/_vendor/urllib3/connection.py,sha256=q-vf_TM3MyRbZcFn3-VCKZBSf0oEhGjv7BFeZm_7kw4,18748 +pip/_vendor/urllib3/connectionpool.py,sha256=IKoeuJZY9YAYm0GK4q-MXAhyXW0M_FnvabYaNsDIR-E,37133 +pip/_vendor/urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-36.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-36.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-36.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-36.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-36.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-36.pyc,, +pip/_vendor/urllib3/contrib/_appengine_environ.py,sha256=bDbyOEhW2CKLJcQqAKAyrEHN-aklsyHFKq6vF8ZFsmk,957 +pip/_vendor/urllib3/contrib/_securetransport/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-36.pyc,, +pip/_vendor/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-36.pyc,, +pip/_vendor/urllib3/contrib/_securetransport/bindings.py,sha256=eRy1Mj-wpg7sR6-OSvnSV4jUbjMT464dLN_CWxbIRVw,17649 +pip/_vendor/urllib3/contrib/_securetransport/low_level.py,sha256=lgIdsSycqfB0Xm5BiJzXGeIKT7ybCQMFPJAgkcwPa1s,13908 +pip/_vendor/urllib3/contrib/appengine.py,sha256=lm86XjaOI7ajbonsN0JLA0ckkgSFWhgxWKLW_Ymt4sI,11034 +pip/_vendor/urllib3/contrib/ntlmpool.py,sha256=6I95h1_71fzxmoMSNtY0gB8lnyCoVtP_DpqFGj14fdU,4160 +pip/_vendor/urllib3/contrib/pyopenssl.py,sha256=kqm9SX4h_6h76QwGDBiNQ7i-ktKZunZuxzTVjjtHDto,16795 +pip/_vendor/urllib3/contrib/securetransport.py,sha256=MEEHa3YqG8ifDPYG0gO12C1tZu2I-HqGF4lC53cHFPg,34303 +pip/_vendor/urllib3/contrib/socks.py,sha256=DcRjM2l0rQMIyhYrN6r-tnVkY6ZTDxHJlM8_usAkGCA,7097 +pip/_vendor/urllib3/exceptions.py,sha256=0Mnno3KHTNfXRfY7638NufOPkUb6mXOm-Lqj-4x2w8A,8217 +pip/_vendor/urllib3/fields.py,sha256=kvLDCg_JmH1lLjUUEY_FLS8UhY7hBvDPuVETbY8mdrM,8579 +pip/_vendor/urllib3/filepost.py,sha256=5b_qqgRHVlL7uLtdAYBzBh-GHmU5AfJVt_2N0XS3PeY,2440 +pip/_vendor/urllib3/packages/__init__.py,sha256=h4BLhD4tLaBx1adaDtKXfupsgqY0wWLXb_f1_yVlV6A,108 +pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/urllib3/packages/__pycache__/six.cpython-36.pyc,, +pip/_vendor/urllib3/packages/backports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/urllib3/packages/backports/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/urllib3/packages/backports/__pycache__/makefile.cpython-36.pyc,, +pip/_vendor/urllib3/packages/backports/makefile.py,sha256=nbzt3i0agPVP07jqqgjhaYjMmuAi_W5E0EywZivVO8E,1417 +pip/_vendor/urllib3/packages/six.py,sha256=adx4z-eM_D0Vvu0IIqVzFACQ_ux9l64y7DkSEfbxCDs,32536 +pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py,sha256=zppezdEQdpGsYerI6mV6MfUYy495JV4mcOWC_GgbljU,757 +pip/_vendor/urllib3/packages/ssl_match_hostname/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/urllib3/packages/ssl_match_hostname/__pycache__/_implementation.cpython-36.pyc,, +pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py,sha256=6dZ-q074g7XhsJ27MFCgkct8iVNZB3sMZvKhf-KUVy0,5679 +pip/_vendor/urllib3/poolmanager.py,sha256=whzlX6UTEgODMOCy0ZDMUONRBCz5wyIM8Z9opXAY-Lk,19763 +pip/_vendor/urllib3/request.py,sha256=ZFSIqX0C6WizixecChZ3_okyu7BEv0lZu1VT0s6h4SM,5985 +pip/_vendor/urllib3/response.py,sha256=hGhGBh7TkEkh_IQg5C1W_xuPNrgIKv5BUXPyE-q0LuE,28203 +pip/_vendor/urllib3/util/__init__.py,sha256=JEmSmmqqLyaw8P51gUImZh8Gwg9i1zSe-DoqAitn2nc,1155 +pip/_vendor/urllib3/util/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/urllib3/util/__pycache__/connection.cpython-36.pyc,, +pip/_vendor/urllib3/util/__pycache__/proxy.cpython-36.pyc,, +pip/_vendor/urllib3/util/__pycache__/queue.cpython-36.pyc,, +pip/_vendor/urllib3/util/__pycache__/request.cpython-36.pyc,, +pip/_vendor/urllib3/util/__pycache__/response.cpython-36.pyc,, +pip/_vendor/urllib3/util/__pycache__/retry.cpython-36.pyc,, +pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-36.pyc,, +pip/_vendor/urllib3/util/__pycache__/ssltransport.cpython-36.pyc,, +pip/_vendor/urllib3/util/__pycache__/timeout.cpython-36.pyc,, +pip/_vendor/urllib3/util/__pycache__/url.cpython-36.pyc,, +pip/_vendor/urllib3/util/__pycache__/wait.cpython-36.pyc,, +pip/_vendor/urllib3/util/connection.py,sha256=_I-ZoF58xXLLjo-Q5IGaJrMxy2IW_exI8K9O9pq7op0,4922 +pip/_vendor/urllib3/util/proxy.py,sha256=FGipAEnvZteyldXNjce4DEB7YzwU-a5lep8y5S0qHQg,1604 +pip/_vendor/urllib3/util/queue.py,sha256=nRgX8_eX-_VkvxoX096QWoz8Ps0QHUAExILCY_7PncM,498 +pip/_vendor/urllib3/util/request.py,sha256=NnzaEKQ1Pauw5MFMV6HmgEMHITf0Aua9fQuzi2uZzGc,4123 +pip/_vendor/urllib3/util/response.py,sha256=GJpg3Egi9qaJXRwBh5wv-MNuRWan5BIu40oReoxWP28,3510 +pip/_vendor/urllib3/util/retry.py,sha256=s3ZNKXO6_t23ZQMg8zlu20PMSqraT495-S_mEY_19ak,21396 +pip/_vendor/urllib3/util/ssl_.py,sha256=dKcH-sqiR_ESWqKP1PJ6SUAUSvqC-fkMQGrTokV4NMY,16281 +pip/_vendor/urllib3/util/ssltransport.py,sha256=vOOCPRn-dODUZ2qtMCfStb0JmjgrgJaKLqJ9qvKucFs,6932 +pip/_vendor/urllib3/util/timeout.py,sha256=QSbBUNOB9yh6AnDn61SrLQ0hg5oz0I9-uXEG91AJuIg,10003 +pip/_vendor/urllib3/util/url.py,sha256=KP_yaHA0TFFAsQSImc_FOHO-Wq3PNHf_bKObKcrgdU4,13981 +pip/_vendor/urllib3/util/wait.py,sha256=3MUKRSAUJDB2tgco7qRUskW0zXGAWYvRRE4Q1_6xlLs,5404 +pip/_vendor/vendor.txt,sha256=yaN2qLLkKuoRmFLCxGJ1LZtZiuV7T7NoisZqwWNRhIU,364 +pip/_vendor/webencodings/__init__.py,sha256=qOBJIuPy_4ByYH6W_bNgJF-qYQ2DoU-dKsDu5yRWCXg,10579 +pip/_vendor/webencodings/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/webencodings/__pycache__/labels.cpython-36.pyc,, +pip/_vendor/webencodings/__pycache__/mklabels.cpython-36.pyc,, +pip/_vendor/webencodings/__pycache__/tests.cpython-36.pyc,, +pip/_vendor/webencodings/__pycache__/x_user_defined.cpython-36.pyc,, +pip/_vendor/webencodings/labels.py,sha256=4AO_KxTddqGtrL9ns7kAPjb0CcN6xsCIxbK37HY9r3E,8979 +pip/_vendor/webencodings/mklabels.py,sha256=GYIeywnpaLnP0GSic8LFWgd0UVvO_l1Nc6YoF-87R_4,1305 +pip/_vendor/webencodings/tests.py,sha256=OtGLyjhNY1fvkW1GvLJ_FV9ZoqC9Anyjr7q3kxTbzNs,6563 +pip/_vendor/webencodings/x_user_defined.py,sha256=yOqWSdmpytGfUgh_Z6JYgDNhoc-BAHyyeeT15Fr42tM,4307 +pip/py.typed,sha256=l9g-Fc1zgtIZ70tLJDcx6qKeqDutTVVSceIqUod-awg,286 diff --git a/venv/Lib/site-packages/pip-21.1.2.dist-info/WHEEL b/venv/Lib/site-packages/pip-21.1.2.dist-info/WHEEL new file mode 100644 index 00000000..385faab0 --- /dev/null +++ b/venv/Lib/site-packages/pip-21.1.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.36.2) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/Lib/site-packages/pip-21.1.2.dist-info/entry_points.txt b/venv/Lib/site-packages/pip-21.1.2.dist-info/entry_points.txt new file mode 100644 index 00000000..d48bd8a8 --- /dev/null +++ b/venv/Lib/site-packages/pip-21.1.2.dist-info/entry_points.txt @@ -0,0 +1,5 @@ +[console_scripts] +pip = pip._internal.cli.main:main +pip3 = pip._internal.cli.main:main +pip3.8 = pip._internal.cli.main:main + diff --git a/env/lib/python2.7/site-packages/pip-19.2.2.dist-info/top_level.txt b/venv/Lib/site-packages/pip-21.1.2.dist-info/top_level.txt similarity index 100% rename from env/lib/python2.7/site-packages/pip-19.2.2.dist-info/top_level.txt rename to venv/Lib/site-packages/pip-21.1.2.dist-info/top_level.txt diff --git a/venv/Lib/site-packages/pip/__init__.py b/venv/Lib/site-packages/pip/__init__.py new file mode 100644 index 00000000..82f53c38 --- /dev/null +++ b/venv/Lib/site-packages/pip/__init__.py @@ -0,0 +1,14 @@ +from typing import List, Optional + +__version__ = "21.1.2" + + +def main(args=None): + # type: (Optional[List[str]]) -> int + """This is an internal API only meant for use by pip's own console scripts. + + For additional details, see https://github.com/pypa/pip/issues/7498. + """ + from pip._internal.utils.entrypoints import _wrapper + + return _wrapper(args) diff --git a/venv/Lib/site-packages/pip/__main__.py b/venv/Lib/site-packages/pip/__main__.py new file mode 100644 index 00000000..fe34a7b7 --- /dev/null +++ b/venv/Lib/site-packages/pip/__main__.py @@ -0,0 +1,31 @@ +import os +import sys +import warnings + +# Remove '' and current working directory from the first entry +# of sys.path, if present to avoid using current directory +# in pip commands check, freeze, install, list and show, +# when invoked as python -m pip +if sys.path[0] in ("", os.getcwd()): + sys.path.pop(0) + +# If we are running from a wheel, add the wheel to sys.path +# This allows the usage python pip-*.whl/pip install pip-*.whl +if __package__ == "": + # __file__ is pip-*.whl/pip/__main__.py + # first dirname call strips of '/__main__.py', second strips off '/pip' + # Resulting path is the name of the wheel itself + # Add that to sys.path so we can import pip + path = os.path.dirname(os.path.dirname(__file__)) + sys.path.insert(0, path) + +if __name__ == "__main__": + # Work around the error reported in #9540, pending a proper fix. + # Note: It is essential the warning filter is set *before* importing + # pip, as the deprecation happens at import time, not runtime. + warnings.filterwarnings( + "ignore", category=DeprecationWarning, module=".*packaging\\.version" + ) + from pip._internal.cli.main import main as _main + + sys.exit(_main()) diff --git a/venv/Lib/site-packages/pip/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..73757195 Binary files /dev/null and b/venv/Lib/site-packages/pip/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/__pycache__/__main__.cpython-36.pyc b/venv/Lib/site-packages/pip/__pycache__/__main__.cpython-36.pyc new file mode 100644 index 00000000..f0e26bf8 Binary files /dev/null and b/venv/Lib/site-packages/pip/__pycache__/__main__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/__init__.py b/venv/Lib/site-packages/pip/_internal/__init__.py new file mode 100644 index 00000000..41071cd8 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/__init__.py @@ -0,0 +1,15 @@ +from typing import List, Optional + +import pip._internal.utils.inject_securetransport # noqa + + +def main(args=None): + # type: (Optional[List[str]]) -> int + """This is preserved for old console scripts that may still be referencing + it. + + For additional details, see https://github.com/pypa/pip/issues/7498. + """ + from pip._internal.utils.entrypoints import _wrapper + + return _wrapper(args) diff --git a/venv/Lib/site-packages/pip/_internal/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..6dd2a46e Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/__pycache__/build_env.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/__pycache__/build_env.cpython-36.pyc new file mode 100644 index 00000000..baad3096 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/__pycache__/build_env.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/__pycache__/cache.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/__pycache__/cache.cpython-36.pyc new file mode 100644 index 00000000..fc24c170 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/__pycache__/cache.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/__pycache__/configuration.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/__pycache__/configuration.cpython-36.pyc new file mode 100644 index 00000000..c6585ec5 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/__pycache__/configuration.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/__pycache__/exceptions.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/__pycache__/exceptions.cpython-36.pyc new file mode 100644 index 00000000..61715b20 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/__pycache__/exceptions.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/__pycache__/main.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/__pycache__/main.cpython-36.pyc new file mode 100644 index 00000000..9d305869 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/__pycache__/main.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/__pycache__/pyproject.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/__pycache__/pyproject.cpython-36.pyc new file mode 100644 index 00000000..bb1e4e73 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/__pycache__/pyproject.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-36.pyc new file mode 100644 index 00000000..bebf75ad Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-36.pyc new file mode 100644 index 00000000..9de5d68c Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/build_env.py b/venv/Lib/site-packages/pip/_internal/build_env.py new file mode 100644 index 00000000..cc15250f --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/build_env.py @@ -0,0 +1,286 @@ +"""Build Environment used for isolation during sdist building +""" + +import contextlib +import logging +import os +import pathlib +import sys +import textwrap +import zipfile +from collections import OrderedDict +from sysconfig import get_paths +from types import TracebackType +from typing import TYPE_CHECKING, Iterable, Iterator, List, Optional, Set, Tuple, Type + +from pip._vendor.certifi import where +from pip._vendor.pkg_resources import Requirement, VersionConflict, WorkingSet + +from pip import __file__ as pip_location +from pip._internal.cli.spinners import open_spinner +from pip._internal.locations import get_platlib, get_prefixed_libs, get_purelib +from pip._internal.utils.subprocess import call_subprocess +from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds + +if TYPE_CHECKING: + from pip._internal.index.package_finder import PackageFinder + +logger = logging.getLogger(__name__) + + +class _Prefix: + + def __init__(self, path): + # type: (str) -> None + self.path = path + self.setup = False + self.bin_dir = get_paths( + 'nt' if os.name == 'nt' else 'posix_prefix', + vars={'base': path, 'platbase': path} + )['scripts'] + self.lib_dirs = get_prefixed_libs(path) + + +@contextlib.contextmanager +def _create_standalone_pip() -> Iterator[str]: + """Create a "standalone pip" zip file. + + The zip file's content is identical to the currently-running pip. + It will be used to install requirements into the build environment. + """ + source = pathlib.Path(pip_location).resolve().parent + + # Return the current instance if `source` is not a directory. We can't build + # a zip from this, and it likely means the instance is already standalone. + if not source.is_dir(): + yield str(source) + return + + with TempDirectory(kind="standalone-pip") as tmp_dir: + pip_zip = os.path.join(tmp_dir.path, "__env_pip__.zip") + kwargs = {} + if sys.version_info >= (3, 8): + kwargs["strict_timestamps"] = False + with zipfile.ZipFile(pip_zip, "w", **kwargs) as zf: + for child in source.rglob("*"): + zf.write(child, child.relative_to(source.parent).as_posix()) + yield os.path.join(pip_zip, "pip") + + +class BuildEnvironment: + """Creates and manages an isolated environment to install build deps + """ + + def __init__(self): + # type: () -> None + temp_dir = TempDirectory( + kind=tempdir_kinds.BUILD_ENV, globally_managed=True + ) + + self._prefixes = OrderedDict( + (name, _Prefix(os.path.join(temp_dir.path, name))) + for name in ('normal', 'overlay') + ) + + self._bin_dirs = [] # type: List[str] + self._lib_dirs = [] # type: List[str] + for prefix in reversed(list(self._prefixes.values())): + self._bin_dirs.append(prefix.bin_dir) + self._lib_dirs.extend(prefix.lib_dirs) + + # Customize site to: + # - ensure .pth files are honored + # - prevent access to system site packages + system_sites = { + os.path.normcase(site) for site in (get_purelib(), get_platlib()) + } + self._site_dir = os.path.join(temp_dir.path, 'site') + if not os.path.exists(self._site_dir): + os.mkdir(self._site_dir) + with open(os.path.join(self._site_dir, 'sitecustomize.py'), 'w') as fp: + fp.write(textwrap.dedent( + ''' + import os, site, sys + + # First, drop system-sites related paths. + original_sys_path = sys.path[:] + known_paths = set() + for path in {system_sites!r}: + site.addsitedir(path, known_paths=known_paths) + system_paths = set( + os.path.normcase(path) + for path in sys.path[len(original_sys_path):] + ) + original_sys_path = [ + path for path in original_sys_path + if os.path.normcase(path) not in system_paths + ] + sys.path = original_sys_path + + # Second, add lib directories. + # ensuring .pth file are processed. + for path in {lib_dirs!r}: + assert not path in sys.path + site.addsitedir(path) + ''' + ).format(system_sites=system_sites, lib_dirs=self._lib_dirs)) + + def __enter__(self): + # type: () -> None + self._save_env = { + name: os.environ.get(name, None) + for name in ('PATH', 'PYTHONNOUSERSITE', 'PYTHONPATH') + } + + path = self._bin_dirs[:] + old_path = self._save_env['PATH'] + if old_path: + path.extend(old_path.split(os.pathsep)) + + pythonpath = [self._site_dir] + + os.environ.update({ + 'PATH': os.pathsep.join(path), + 'PYTHONNOUSERSITE': '1', + 'PYTHONPATH': os.pathsep.join(pythonpath), + }) + + def __exit__( + self, + exc_type, # type: Optional[Type[BaseException]] + exc_val, # type: Optional[BaseException] + exc_tb # type: Optional[TracebackType] + ): + # type: (...) -> None + for varname, old_value in self._save_env.items(): + if old_value is None: + os.environ.pop(varname, None) + else: + os.environ[varname] = old_value + + def check_requirements(self, reqs): + # type: (Iterable[str]) -> Tuple[Set[Tuple[str, str]], Set[str]] + """Return 2 sets: + - conflicting requirements: set of (installed, wanted) reqs tuples + - missing requirements: set of reqs + """ + missing = set() + conflicting = set() + if reqs: + ws = WorkingSet(self._lib_dirs) + for req in reqs: + try: + if ws.find(Requirement.parse(req)) is None: + missing.add(req) + except VersionConflict as e: + conflicting.add((str(e.args[0].as_requirement()), + str(e.args[1]))) + return conflicting, missing + + def install_requirements( + self, + finder, # type: PackageFinder + requirements, # type: Iterable[str] + prefix_as_string, # type: str + message # type: str + ): + # type: (...) -> None + prefix = self._prefixes[prefix_as_string] + assert not prefix.setup + prefix.setup = True + if not requirements: + return + with contextlib.ExitStack() as ctx: + # TODO: Remove this block when dropping 3.6 support. Python 3.6 + # lacks importlib.resources and pep517 has issues loading files in + # a zip, so we fallback to the "old" method by adding the current + # pip directory to the child process's sys.path. + if sys.version_info < (3, 7): + pip_runnable = os.path.dirname(pip_location) + else: + pip_runnable = ctx.enter_context(_create_standalone_pip()) + self._install_requirements( + pip_runnable, + finder, + requirements, + prefix, + message, + ) + + @staticmethod + def _install_requirements( + pip_runnable: str, + finder: "PackageFinder", + requirements: Iterable[str], + prefix: _Prefix, + message: str, + ) -> None: + args = [ + sys.executable, pip_runnable, 'install', + '--ignore-installed', '--no-user', '--prefix', prefix.path, + '--no-warn-script-location', + ] # type: List[str] + if logger.getEffectiveLevel() <= logging.DEBUG: + args.append('-v') + for format_control in ('no_binary', 'only_binary'): + formats = getattr(finder.format_control, format_control) + args.extend(('--' + format_control.replace('_', '-'), + ','.join(sorted(formats or {':none:'})))) + + index_urls = finder.index_urls + if index_urls: + args.extend(['-i', index_urls[0]]) + for extra_index in index_urls[1:]: + args.extend(['--extra-index-url', extra_index]) + else: + args.append('--no-index') + for link in finder.find_links: + args.extend(['--find-links', link]) + + for host in finder.trusted_hosts: + args.extend(['--trusted-host', host]) + if finder.allow_all_prereleases: + args.append('--pre') + if finder.prefer_binary: + args.append('--prefer-binary') + args.append('--') + args.extend(requirements) + extra_environ = {"_PIP_STANDALONE_CERT": where()} + with open_spinner(message) as spinner: + call_subprocess(args, spinner=spinner, extra_environ=extra_environ) + + +class NoOpBuildEnvironment(BuildEnvironment): + """A no-op drop-in replacement for BuildEnvironment + """ + + def __init__(self): + # type: () -> None + pass + + def __enter__(self): + # type: () -> None + pass + + def __exit__( + self, + exc_type, # type: Optional[Type[BaseException]] + exc_val, # type: Optional[BaseException] + exc_tb # type: Optional[TracebackType] + ): + # type: (...) -> None + pass + + def cleanup(self): + # type: () -> None + pass + + def install_requirements( + self, + finder, # type: PackageFinder + requirements, # type: Iterable[str] + prefix_as_string, # type: str + message # type: str + ): + # type: (...) -> None + raise NotImplementedError() diff --git a/venv/Lib/site-packages/pip/_internal/cache.py b/venv/Lib/site-packages/pip/_internal/cache.py new file mode 100644 index 00000000..7ef51b92 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/cache.py @@ -0,0 +1,287 @@ +"""Cache Management +""" + +import hashlib +import json +import logging +import os +from typing import Any, Dict, List, Optional, Set + +from pip._vendor.packaging.tags import Tag, interpreter_name, interpreter_version +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.exceptions import InvalidWheelFilename +from pip._internal.models.format_control import FormatControl +from pip._internal.models.link import Link +from pip._internal.models.wheel import Wheel +from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds +from pip._internal.utils.urls import path_to_url + +logger = logging.getLogger(__name__) + + +def _hash_dict(d): + # type: (Dict[str, str]) -> str + """Return a stable sha224 of a dictionary.""" + s = json.dumps(d, sort_keys=True, separators=(",", ":"), ensure_ascii=True) + return hashlib.sha224(s.encode("ascii")).hexdigest() + + +class Cache: + """An abstract class - provides cache directories for data from links + + + :param cache_dir: The root of the cache. + :param format_control: An object of FormatControl class to limit + binaries being read from the cache. + :param allowed_formats: which formats of files the cache should store. + ('binary' and 'source' are the only allowed values) + """ + + def __init__(self, cache_dir, format_control, allowed_formats): + # type: (str, FormatControl, Set[str]) -> None + super().__init__() + assert not cache_dir or os.path.isabs(cache_dir) + self.cache_dir = cache_dir or None + self.format_control = format_control + self.allowed_formats = allowed_formats + + _valid_formats = {"source", "binary"} + assert self.allowed_formats.union(_valid_formats) == _valid_formats + + def _get_cache_path_parts(self, link): + # type: (Link) -> List[str] + """Get parts of part that must be os.path.joined with cache_dir + """ + + # We want to generate an url to use as our cache key, we don't want to + # just re-use the URL because it might have other items in the fragment + # and we don't care about those. + key_parts = {"url": link.url_without_fragment} + if link.hash_name is not None and link.hash is not None: + key_parts[link.hash_name] = link.hash + if link.subdirectory_fragment: + key_parts["subdirectory"] = link.subdirectory_fragment + + # Include interpreter name, major and minor version in cache key + # to cope with ill-behaved sdists that build a different wheel + # depending on the python version their setup.py is being run on, + # and don't encode the difference in compatibility tags. + # https://github.com/pypa/pip/issues/7296 + key_parts["interpreter_name"] = interpreter_name() + key_parts["interpreter_version"] = interpreter_version() + + # Encode our key url with sha224, we'll use this because it has similar + # security properties to sha256, but with a shorter total output (and + # thus less secure). However the differences don't make a lot of + # difference for our use case here. + hashed = _hash_dict(key_parts) + + # We want to nest the directories some to prevent having a ton of top + # level directories where we might run out of sub directories on some + # FS. + parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]] + + return parts + + def _get_candidates(self, link, canonical_package_name): + # type: (Link, str) -> List[Any] + can_not_cache = ( + not self.cache_dir or + not canonical_package_name or + not link + ) + if can_not_cache: + return [] + + formats = self.format_control.get_allowed_formats( + canonical_package_name + ) + if not self.allowed_formats.intersection(formats): + return [] + + candidates = [] + path = self.get_path_for_link(link) + if os.path.isdir(path): + for candidate in os.listdir(path): + candidates.append((candidate, path)) + return candidates + + def get_path_for_link(self, link): + # type: (Link) -> str + """Return a directory to store cached items in for link. + """ + raise NotImplementedError() + + def get( + self, + link, # type: Link + package_name, # type: Optional[str] + supported_tags, # type: List[Tag] + ): + # type: (...) -> Link + """Returns a link to a cached item if it exists, otherwise returns the + passed link. + """ + raise NotImplementedError() + + +class SimpleWheelCache(Cache): + """A cache of wheels for future installs. + """ + + def __init__(self, cache_dir, format_control): + # type: (str, FormatControl) -> None + super().__init__(cache_dir, format_control, {"binary"}) + + def get_path_for_link(self, link): + # type: (Link) -> str + """Return a directory to store cached wheels for link + + Because there are M wheels for any one sdist, we provide a directory + to cache them in, and then consult that directory when looking up + cache hits. + + We only insert things into the cache if they have plausible version + numbers, so that we don't contaminate the cache with things that were + not unique. E.g. ./package might have dozens of installs done for it + and build a version of 0.0...and if we built and cached a wheel, we'd + end up using the same wheel even if the source has been edited. + + :param link: The link of the sdist for which this will cache wheels. + """ + parts = self._get_cache_path_parts(link) + assert self.cache_dir + # Store wheels within the root cache_dir + return os.path.join(self.cache_dir, "wheels", *parts) + + def get( + self, + link, # type: Link + package_name, # type: Optional[str] + supported_tags, # type: List[Tag] + ): + # type: (...) -> Link + candidates = [] + + if not package_name: + return link + + canonical_package_name = canonicalize_name(package_name) + for wheel_name, wheel_dir in self._get_candidates( + link, canonical_package_name + ): + try: + wheel = Wheel(wheel_name) + except InvalidWheelFilename: + continue + if canonicalize_name(wheel.name) != canonical_package_name: + logger.debug( + "Ignoring cached wheel %s for %s as it " + "does not match the expected distribution name %s.", + wheel_name, link, package_name, + ) + continue + if not wheel.supported(supported_tags): + # Built for a different python/arch/etc + continue + candidates.append( + ( + wheel.support_index_min(supported_tags), + wheel_name, + wheel_dir, + ) + ) + + if not candidates: + return link + + _, wheel_name, wheel_dir = min(candidates) + return Link(path_to_url(os.path.join(wheel_dir, wheel_name))) + + +class EphemWheelCache(SimpleWheelCache): + """A SimpleWheelCache that creates it's own temporary cache directory + """ + + def __init__(self, format_control): + # type: (FormatControl) -> None + self._temp_dir = TempDirectory( + kind=tempdir_kinds.EPHEM_WHEEL_CACHE, + globally_managed=True, + ) + + super().__init__(self._temp_dir.path, format_control) + + +class CacheEntry: + def __init__( + self, + link, # type: Link + persistent, # type: bool + ): + self.link = link + self.persistent = persistent + + +class WheelCache(Cache): + """Wraps EphemWheelCache and SimpleWheelCache into a single Cache + + This Cache allows for gracefully degradation, using the ephem wheel cache + when a certain link is not found in the simple wheel cache first. + """ + + def __init__(self, cache_dir, format_control): + # type: (str, FormatControl) -> None + super().__init__(cache_dir, format_control, {'binary'}) + self._wheel_cache = SimpleWheelCache(cache_dir, format_control) + self._ephem_cache = EphemWheelCache(format_control) + + def get_path_for_link(self, link): + # type: (Link) -> str + return self._wheel_cache.get_path_for_link(link) + + def get_ephem_path_for_link(self, link): + # type: (Link) -> str + return self._ephem_cache.get_path_for_link(link) + + def get( + self, + link, # type: Link + package_name, # type: Optional[str] + supported_tags, # type: List[Tag] + ): + # type: (...) -> Link + cache_entry = self.get_cache_entry(link, package_name, supported_tags) + if cache_entry is None: + return link + return cache_entry.link + + def get_cache_entry( + self, + link, # type: Link + package_name, # type: Optional[str] + supported_tags, # type: List[Tag] + ): + # type: (...) -> Optional[CacheEntry] + """Returns a CacheEntry with a link to a cached item if it exists or + None. The cache entry indicates if the item was found in the persistent + or ephemeral cache. + """ + retval = self._wheel_cache.get( + link=link, + package_name=package_name, + supported_tags=supported_tags, + ) + if retval is not link: + return CacheEntry(retval, persistent=True) + + retval = self._ephem_cache.get( + link=link, + package_name=package_name, + supported_tags=supported_tags, + ) + if retval is not link: + return CacheEntry(retval, persistent=False) + + return None diff --git a/env/lib/python2.7/site-packages/pip/_internal/cli/__init__.py b/venv/Lib/site-packages/pip/_internal/cli/__init__.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_internal/cli/__init__.py rename to venv/Lib/site-packages/pip/_internal/cli/__init__.py diff --git a/venv/Lib/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..d0cbaef4 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-36.pyc new file mode 100644 index 00000000..e444c6da Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-36.pyc new file mode 100644 index 00000000..091dd308 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-36.pyc new file mode 100644 index 00000000..74f1b53e Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-36.pyc new file mode 100644 index 00000000..bab5f3a9 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/cli/__pycache__/main.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/cli/__pycache__/main.cpython-36.pyc new file mode 100644 index 00000000..fa6e6e62 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/cli/__pycache__/main.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-36.pyc new file mode 100644 index 00000000..1b25b179 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/cli/__pycache__/parser.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/cli/__pycache__/parser.cpython-36.pyc new file mode 100644 index 00000000..74416ec1 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/cli/__pycache__/parser.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-36.pyc new file mode 100644 index 00000000..da14abd9 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-36.pyc new file mode 100644 index 00000000..5d063717 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-36.pyc new file mode 100644 index 00000000..66b5061d Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-36.pyc new file mode 100644 index 00000000..35f1af07 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/cli/autocompletion.py b/venv/Lib/site-packages/pip/_internal/cli/autocompletion.py new file mode 100644 index 00000000..3b1d2ac9 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/cli/autocompletion.py @@ -0,0 +1,162 @@ +"""Logic that powers autocompletion installed by ``pip completion``. +""" + +import optparse +import os +import sys +from itertools import chain +from typing import Any, Iterable, List, Optional + +from pip._internal.cli.main_parser import create_main_parser +from pip._internal.commands import commands_dict, create_command +from pip._internal.utils.misc import get_installed_distributions + + +def autocomplete(): + # type: () -> None + """Entry Point for completion of main and subcommand options.""" + # Don't complete if user hasn't sourced bash_completion file. + if "PIP_AUTO_COMPLETE" not in os.environ: + return + cwords = os.environ["COMP_WORDS"].split()[1:] + cword = int(os.environ["COMP_CWORD"]) + try: + current = cwords[cword - 1] + except IndexError: + current = "" + + parser = create_main_parser() + subcommands = list(commands_dict) + options = [] + + # subcommand + subcommand_name = None # type: Optional[str] + for word in cwords: + if word in subcommands: + subcommand_name = word + break + # subcommand options + if subcommand_name is not None: + # special case: 'help' subcommand has no options + if subcommand_name == "help": + sys.exit(1) + # special case: list locally installed dists for show and uninstall + should_list_installed = not current.startswith("-") and subcommand_name in [ + "show", + "uninstall", + ] + if should_list_installed: + lc = current.lower() + installed = [ + dist.key + for dist in get_installed_distributions(local_only=True) + if dist.key.startswith(lc) and dist.key not in cwords[1:] + ] + # if there are no dists installed, fall back to option completion + if installed: + for dist in installed: + print(dist) + sys.exit(1) + + subcommand = create_command(subcommand_name) + + for opt in subcommand.parser.option_list_all: + if opt.help != optparse.SUPPRESS_HELP: + for opt_str in opt._long_opts + opt._short_opts: + options.append((opt_str, opt.nargs)) + + # filter out previously specified options from available options + prev_opts = [x.split("=")[0] for x in cwords[1 : cword - 1]] + options = [(x, v) for (x, v) in options if x not in prev_opts] + # filter options by current input + options = [(k, v) for k, v in options if k.startswith(current)] + # get completion type given cwords and available subcommand options + completion_type = get_path_completion_type( + cwords, + cword, + subcommand.parser.option_list_all, + ) + # get completion files and directories if ``completion_type`` is + # ````, ```` or ```` + if completion_type: + paths = auto_complete_paths(current, completion_type) + options = [(path, 0) for path in paths] + for option in options: + opt_label = option[0] + # append '=' to options which require args + if option[1] and option[0][:2] == "--": + opt_label += "=" + print(opt_label) + else: + # show main parser options only when necessary + + opts = [i.option_list for i in parser.option_groups] + opts.append(parser.option_list) + flattened_opts = chain.from_iterable(opts) + if current.startswith("-"): + for opt in flattened_opts: + if opt.help != optparse.SUPPRESS_HELP: + subcommands += opt._long_opts + opt._short_opts + else: + # get completion type given cwords and all available options + completion_type = get_path_completion_type(cwords, cword, flattened_opts) + if completion_type: + subcommands = list(auto_complete_paths(current, completion_type)) + + print(" ".join([x for x in subcommands if x.startswith(current)])) + sys.exit(1) + + +def get_path_completion_type(cwords, cword, opts): + # type: (List[str], int, Iterable[Any]) -> Optional[str] + """Get the type of path completion (``file``, ``dir``, ``path`` or None) + + :param cwords: same as the environmental variable ``COMP_WORDS`` + :param cword: same as the environmental variable ``COMP_CWORD`` + :param opts: The available options to check + :return: path completion type (``file``, ``dir``, ``path`` or None) + """ + if cword < 2 or not cwords[cword - 2].startswith("-"): + return None + for opt in opts: + if opt.help == optparse.SUPPRESS_HELP: + continue + for o in str(opt).split("/"): + if cwords[cword - 2].split("=")[0] == o: + if not opt.metavar or any( + x in ("path", "file", "dir") for x in opt.metavar.split("/") + ): + return opt.metavar + return None + + +def auto_complete_paths(current, completion_type): + # type: (str, str) -> Iterable[str] + """If ``completion_type`` is ``file`` or ``path``, list all regular files + and directories starting with ``current``; otherwise only list directories + starting with ``current``. + + :param current: The word to be completed + :param completion_type: path completion type(`file`, `path` or `dir`)i + :return: A generator of regular files and/or directories + """ + directory, filename = os.path.split(current) + current_path = os.path.abspath(directory) + # Don't complete paths if they can't be accessed + if not os.access(current_path, os.R_OK): + return + filename = os.path.normcase(filename) + # list all files that start with ``filename`` + file_list = ( + x for x in os.listdir(current_path) if os.path.normcase(x).startswith(filename) + ) + for f in file_list: + opt = os.path.join(current_path, f) + comp_file = os.path.normcase(os.path.join(directory, f)) + # complete regular files when there is not ```` after option + # complete directories when there is ````, ```` or + # ````after option + if completion_type != "dir" and os.path.isfile(opt): + yield comp_file + elif os.path.isdir(opt): + yield os.path.join(comp_file, "") diff --git a/venv/Lib/site-packages/pip/_internal/cli/base_command.py b/venv/Lib/site-packages/pip/_internal/cli/base_command.py new file mode 100644 index 00000000..b59420dd --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/cli/base_command.py @@ -0,0 +1,221 @@ +"""Base Command class, and related routines""" + +import logging +import logging.config +import optparse +import os +import sys +import traceback +from optparse import Values +from typing import Any, List, Optional, Tuple + +from pip._internal.cli import cmdoptions +from pip._internal.cli.command_context import CommandContextMixIn +from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter +from pip._internal.cli.status_codes import ( + ERROR, + PREVIOUS_BUILD_DIR_ERROR, + UNKNOWN_ERROR, + VIRTUALENV_NOT_FOUND, +) +from pip._internal.exceptions import ( + BadCommand, + CommandError, + InstallationError, + NetworkConnectionError, + PreviousBuildDirError, + UninstallationError, +) +from pip._internal.utils.deprecation import deprecated +from pip._internal.utils.filesystem import check_path_owner +from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging +from pip._internal.utils.misc import get_prog, normalize_path +from pip._internal.utils.temp_dir import TempDirectoryTypeRegistry as TempDirRegistry +from pip._internal.utils.temp_dir import global_tempdir_manager, tempdir_registry +from pip._internal.utils.virtualenv import running_under_virtualenv + +__all__ = ["Command"] + +logger = logging.getLogger(__name__) + + +class Command(CommandContextMixIn): + usage = None # type: str + ignore_require_venv = False # type: bool + + def __init__(self, name, summary, isolated=False): + # type: (str, str, bool) -> None + super().__init__() + + self.name = name + self.summary = summary + self.parser = ConfigOptionParser( + usage=self.usage, + prog=f"{get_prog()} {name}", + formatter=UpdatingDefaultsHelpFormatter(), + add_help_option=False, + name=name, + description=self.__doc__, + isolated=isolated, + ) + + self.tempdir_registry = None # type: Optional[TempDirRegistry] + + # Commands should add options to this option group + optgroup_name = f"{self.name.capitalize()} Options" + self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name) + + # Add the general options + gen_opts = cmdoptions.make_option_group( + cmdoptions.general_group, + self.parser, + ) + self.parser.add_option_group(gen_opts) + + self.add_options() + + def add_options(self): + # type: () -> None + pass + + def handle_pip_version_check(self, options): + # type: (Values) -> None + """ + This is a no-op so that commands by default do not do the pip version + check. + """ + # Make sure we do the pip version check if the index_group options + # are present. + assert not hasattr(options, "no_index") + + def run(self, options, args): + # type: (Values, List[Any]) -> int + raise NotImplementedError + + def parse_args(self, args): + # type: (List[str]) -> Tuple[Any, Any] + # factored out for testability + return self.parser.parse_args(args) + + def main(self, args): + # type: (List[str]) -> int + try: + with self.main_context(): + return self._main(args) + finally: + logging.shutdown() + + def _main(self, args): + # type: (List[str]) -> int + # We must initialize this before the tempdir manager, otherwise the + # configuration would not be accessible by the time we clean up the + # tempdir manager. + self.tempdir_registry = self.enter_context(tempdir_registry()) + # Intentionally set as early as possible so globally-managed temporary + # directories are available to the rest of the code. + self.enter_context(global_tempdir_manager()) + + options, args = self.parse_args(args) + + # Set verbosity so that it can be used elsewhere. + self.verbosity = options.verbose - options.quiet + + level_number = setup_logging( + verbosity=self.verbosity, + no_color=options.no_color, + user_log_file=options.log, + ) + + # TODO: Try to get these passing down from the command? + # without resorting to os.environ to hold these. + # This also affects isolated builds and it should. + + if options.no_input: + os.environ["PIP_NO_INPUT"] = "1" + + if options.exists_action: + os.environ["PIP_EXISTS_ACTION"] = " ".join(options.exists_action) + + if options.require_venv and not self.ignore_require_venv: + # If a venv is required check if it can really be found + if not running_under_virtualenv(): + logger.critical("Could not find an activated virtualenv (required).") + sys.exit(VIRTUALENV_NOT_FOUND) + + if options.cache_dir: + options.cache_dir = normalize_path(options.cache_dir) + if not check_path_owner(options.cache_dir): + logger.warning( + "The directory '%s' or its parent directory is not owned " + "or is not writable by the current user. The cache " + "has been disabled. Check the permissions and owner of " + "that directory. If executing pip with sudo, you should " + "use sudo's -H flag.", + options.cache_dir, + ) + options.cache_dir = None + + if getattr(options, "build_dir", None): + deprecated( + reason=( + "The -b/--build/--build-dir/--build-directory " + "option is deprecated and has no effect anymore." + ), + replacement=( + "use the TMPDIR/TEMP/TMP environment variable, " + "possibly combined with --no-clean" + ), + gone_in="21.3", + issue=8333, + ) + + if "2020-resolver" in options.features_enabled: + logger.warning( + "--use-feature=2020-resolver no longer has any effect, " + "since it is now the default dependency resolver in pip. " + "This will become an error in pip 21.0." + ) + + try: + status = self.run(options, args) + assert isinstance(status, int) + return status + except PreviousBuildDirError as exc: + logger.critical(str(exc)) + logger.debug("Exception information:", exc_info=True) + + return PREVIOUS_BUILD_DIR_ERROR + except ( + InstallationError, + UninstallationError, + BadCommand, + NetworkConnectionError, + ) as exc: + logger.critical(str(exc)) + logger.debug("Exception information:", exc_info=True) + + return ERROR + except CommandError as exc: + logger.critical("%s", exc) + logger.debug("Exception information:", exc_info=True) + + return ERROR + except BrokenStdoutLoggingError: + # Bypass our logger and write any remaining messages to stderr + # because stdout no longer works. + print("ERROR: Pipe to stdout was broken", file=sys.stderr) + if level_number <= logging.DEBUG: + traceback.print_exc(file=sys.stderr) + + return ERROR + except KeyboardInterrupt: + logger.critical("Operation cancelled by user") + logger.debug("Exception information:", exc_info=True) + + return ERROR + except BaseException: + logger.critical("Exception:", exc_info=True) + + return UNKNOWN_ERROR + finally: + self.handle_pip_version_check(options) diff --git a/venv/Lib/site-packages/pip/_internal/cli/cmdoptions.py b/venv/Lib/site-packages/pip/_internal/cli/cmdoptions.py new file mode 100644 index 00000000..f71c0b02 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/cli/cmdoptions.py @@ -0,0 +1,1024 @@ +""" +shared options and groups + +The principle here is to define options once, but *not* instantiate them +globally. One reason being that options with action='append' can carry state +between parses. pip parses general options twice internally, and shouldn't +pass on state. To be consistent, all options will follow this design. +""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +import os +import textwrap +import warnings +from functools import partial +from optparse import SUPPRESS_HELP, Option, OptionGroup, OptionParser, Values +from textwrap import dedent +from typing import Any, Callable, Dict, Optional, Tuple + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.cli.parser import ConfigOptionParser +from pip._internal.cli.progress_bars import BAR_TYPES +from pip._internal.exceptions import CommandError +from pip._internal.locations import USER_CACHE_DIR, get_src_prefix +from pip._internal.models.format_control import FormatControl +from pip._internal.models.index import PyPI +from pip._internal.models.target_python import TargetPython +from pip._internal.utils.hashes import STRONG_HASHES +from pip._internal.utils.misc import strtobool + + +def raise_option_error(parser, option, msg): + # type: (OptionParser, Option, str) -> None + """ + Raise an option parsing error using parser.error(). + + Args: + parser: an OptionParser instance. + option: an Option instance. + msg: the error text. + """ + msg = f"{option} error: {msg}" + msg = textwrap.fill(" ".join(msg.split())) + parser.error(msg) + + +def make_option_group(group, parser): + # type: (Dict[str, Any], ConfigOptionParser) -> OptionGroup + """ + Return an OptionGroup object + group -- assumed to be dict with 'name' and 'options' keys + parser -- an optparse Parser + """ + option_group = OptionGroup(parser, group["name"]) + for option in group["options"]: + option_group.add_option(option()) + return option_group + + +def check_install_build_global(options, check_options=None): + # type: (Values, Optional[Values]) -> None + """Disable wheels if per-setup.py call options are set. + + :param options: The OptionParser options to update. + :param check_options: The options to check, if not supplied defaults to + options. + """ + if check_options is None: + check_options = options + + def getname(n): + # type: (str) -> Optional[Any] + return getattr(check_options, n, None) + + names = ["build_options", "global_options", "install_options"] + if any(map(getname, names)): + control = options.format_control + control.disallow_binaries() + warnings.warn( + "Disabling all use of wheels due to the use of --build-option " + "/ --global-option / --install-option.", + stacklevel=2, + ) + + +def check_dist_restriction(options, check_target=False): + # type: (Values, bool) -> None + """Function for determining if custom platform options are allowed. + + :param options: The OptionParser options. + :param check_target: Whether or not to check if --target is being used. + """ + dist_restriction_set = any( + [ + options.python_version, + options.platforms, + options.abis, + options.implementation, + ] + ) + + binary_only = FormatControl(set(), {":all:"}) + sdist_dependencies_allowed = ( + options.format_control != binary_only and not options.ignore_dependencies + ) + + # Installations or downloads using dist restrictions must not combine + # source distributions and dist-specific wheels, as they are not + # guaranteed to be locally compatible. + if dist_restriction_set and sdist_dependencies_allowed: + raise CommandError( + "When restricting platform and interpreter constraints using " + "--python-version, --platform, --abi, or --implementation, " + "either --no-deps must be set, or --only-binary=:all: must be " + "set and --no-binary must not be set (or must be set to " + ":none:)." + ) + + if check_target: + if dist_restriction_set and not options.target_dir: + raise CommandError( + "Can not use any platform or abi specific options unless " + "installing via '--target'" + ) + + +def _path_option_check(option, opt, value): + # type: (Option, str, str) -> str + return os.path.expanduser(value) + + +def _package_name_option_check(option, opt, value): + # type: (Option, str, str) -> str + return canonicalize_name(value) + + +class PipOption(Option): + TYPES = Option.TYPES + ("path", "package_name") + TYPE_CHECKER = Option.TYPE_CHECKER.copy() + TYPE_CHECKER["package_name"] = _package_name_option_check + TYPE_CHECKER["path"] = _path_option_check + + +########### +# options # +########### + +help_ = partial( + Option, + "-h", + "--help", + dest="help", + action="help", + help="Show help.", +) # type: Callable[..., Option] + +isolated_mode = partial( + Option, + "--isolated", + dest="isolated_mode", + action="store_true", + default=False, + help=( + "Run pip in an isolated mode, ignoring environment variables and user " + "configuration." + ), +) # type: Callable[..., Option] + +require_virtualenv = partial( + Option, + # Run only if inside a virtualenv, bail if not. + "--require-virtualenv", + "--require-venv", + dest="require_venv", + action="store_true", + default=False, + help=SUPPRESS_HELP, +) # type: Callable[..., Option] + +verbose = partial( + Option, + "-v", + "--verbose", + dest="verbose", + action="count", + default=0, + help="Give more output. Option is additive, and can be used up to 3 times.", +) # type: Callable[..., Option] + +no_color = partial( + Option, + "--no-color", + dest="no_color", + action="store_true", + default=False, + help="Suppress colored output.", +) # type: Callable[..., Option] + +version = partial( + Option, + "-V", + "--version", + dest="version", + action="store_true", + help="Show version and exit.", +) # type: Callable[..., Option] + +quiet = partial( + Option, + "-q", + "--quiet", + dest="quiet", + action="count", + default=0, + help=( + "Give less output. Option is additive, and can be used up to 3" + " times (corresponding to WARNING, ERROR, and CRITICAL logging" + " levels)." + ), +) # type: Callable[..., Option] + +progress_bar = partial( + Option, + "--progress-bar", + dest="progress_bar", + type="choice", + choices=list(BAR_TYPES.keys()), + default="on", + help=( + "Specify type of progress to be displayed [" + + "|".join(BAR_TYPES.keys()) + + "] (default: %default)" + ), +) # type: Callable[..., Option] + +log = partial( + PipOption, + "--log", + "--log-file", + "--local-log", + dest="log", + metavar="path", + type="path", + help="Path to a verbose appending log.", +) # type: Callable[..., Option] + +no_input = partial( + Option, + # Don't ask for input + "--no-input", + dest="no_input", + action="store_true", + default=False, + help="Disable prompting for input.", +) # type: Callable[..., Option] + +proxy = partial( + Option, + "--proxy", + dest="proxy", + type="str", + default="", + help="Specify a proxy in the form [user:passwd@]proxy.server:port.", +) # type: Callable[..., Option] + +retries = partial( + Option, + "--retries", + dest="retries", + type="int", + default=5, + help="Maximum number of retries each connection should attempt " + "(default %default times).", +) # type: Callable[..., Option] + +timeout = partial( + Option, + "--timeout", + "--default-timeout", + metavar="sec", + dest="timeout", + type="float", + default=15, + help="Set the socket timeout (default %default seconds).", +) # type: Callable[..., Option] + + +def exists_action(): + # type: () -> Option + return Option( + # Option when path already exist + "--exists-action", + dest="exists_action", + type="choice", + choices=["s", "i", "w", "b", "a"], + default=[], + action="append", + metavar="action", + help="Default action when a path already exists: " + "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.", + ) + + +cert = partial( + PipOption, + "--cert", + dest="cert", + type="path", + metavar="path", + help=( + "Path to PEM-encoded CA certificate bundle. " + "If provided, overrides the default. " + "See 'SSL Certificate Verification' in pip documentation " + "for more information." + ), +) # type: Callable[..., Option] + +client_cert = partial( + PipOption, + "--client-cert", + dest="client_cert", + type="path", + default=None, + metavar="path", + help="Path to SSL client certificate, a single file containing the " + "private key and the certificate in PEM format.", +) # type: Callable[..., Option] + +index_url = partial( + Option, + "-i", + "--index-url", + "--pypi-url", + dest="index_url", + metavar="URL", + default=PyPI.simple_url, + help="Base URL of the Python Package Index (default %default). " + "This should point to a repository compliant with PEP 503 " + "(the simple repository API) or a local directory laid out " + "in the same format.", +) # type: Callable[..., Option] + + +def extra_index_url(): + # type: () -> Option + return Option( + "--extra-index-url", + dest="extra_index_urls", + metavar="URL", + action="append", + default=[], + help="Extra URLs of package indexes to use in addition to " + "--index-url. Should follow the same rules as " + "--index-url.", + ) + + +no_index = partial( + Option, + "--no-index", + dest="no_index", + action="store_true", + default=False, + help="Ignore package index (only looking at --find-links URLs instead).", +) # type: Callable[..., Option] + + +def find_links(): + # type: () -> Option + return Option( + "-f", + "--find-links", + dest="find_links", + action="append", + default=[], + metavar="url", + help="If a URL or path to an html file, then parse for links to " + "archives such as sdist (.tar.gz) or wheel (.whl) files. " + "If a local path or file:// URL that's a directory, " + "then look for archives in the directory listing. " + "Links to VCS project URLs are not supported.", + ) + + +def trusted_host(): + # type: () -> Option + return Option( + "--trusted-host", + dest="trusted_hosts", + action="append", + metavar="HOSTNAME", + default=[], + help="Mark this host or host:port pair as trusted, even though it " + "does not have valid or any HTTPS.", + ) + + +def constraints(): + # type: () -> Option + return Option( + "-c", + "--constraint", + dest="constraints", + action="append", + default=[], + metavar="file", + help="Constrain versions using the given constraints file. " + "This option can be used multiple times.", + ) + + +def requirements(): + # type: () -> Option + return Option( + "-r", + "--requirement", + dest="requirements", + action="append", + default=[], + metavar="file", + help="Install from the given requirements file. " + "This option can be used multiple times.", + ) + + +def editable(): + # type: () -> Option + return Option( + "-e", + "--editable", + dest="editables", + action="append", + default=[], + metavar="path/url", + help=( + "Install a project in editable mode (i.e. setuptools " + '"develop mode") from a local project path or a VCS url.' + ), + ) + + +def _handle_src(option, opt_str, value, parser): + # type: (Option, str, str, OptionParser) -> None + value = os.path.abspath(value) + setattr(parser.values, option.dest, value) + + +src = partial( + PipOption, + "--src", + "--source", + "--source-dir", + "--source-directory", + dest="src_dir", + type="path", + metavar="dir", + default=get_src_prefix(), + action="callback", + callback=_handle_src, + help="Directory to check out editable projects into. " + 'The default in a virtualenv is "/src". ' + 'The default for global installs is "/src".', +) # type: Callable[..., Option] + + +def _get_format_control(values, option): + # type: (Values, Option) -> Any + """Get a format_control object.""" + return getattr(values, option.dest) + + +def _handle_no_binary(option, opt_str, value, parser): + # type: (Option, str, str, OptionParser) -> None + existing = _get_format_control(parser.values, option) + FormatControl.handle_mutual_excludes( + value, + existing.no_binary, + existing.only_binary, + ) + + +def _handle_only_binary(option, opt_str, value, parser): + # type: (Option, str, str, OptionParser) -> None + existing = _get_format_control(parser.values, option) + FormatControl.handle_mutual_excludes( + value, + existing.only_binary, + existing.no_binary, + ) + + +def no_binary(): + # type: () -> Option + format_control = FormatControl(set(), set()) + return Option( + "--no-binary", + dest="format_control", + action="callback", + callback=_handle_no_binary, + type="str", + default=format_control, + help="Do not use binary packages. Can be supplied multiple times, and " + 'each time adds to the existing value. Accepts either ":all:" to ' + 'disable all binary packages, ":none:" to empty the set (notice ' + "the colons), or one or more package names with commas between " + "them (no colons). Note that some packages are tricky to compile " + "and may fail to install when this option is used on them.", + ) + + +def only_binary(): + # type: () -> Option + format_control = FormatControl(set(), set()) + return Option( + "--only-binary", + dest="format_control", + action="callback", + callback=_handle_only_binary, + type="str", + default=format_control, + help="Do not use source packages. Can be supplied multiple times, and " + 'each time adds to the existing value. Accepts either ":all:" to ' + 'disable all source packages, ":none:" to empty the set, or one ' + "or more package names with commas between them. Packages " + "without binary distributions will fail to install when this " + "option is used on them.", + ) + + +platforms = partial( + Option, + "--platform", + dest="platforms", + metavar="platform", + action="append", + default=None, + help=( + "Only use wheels compatible with . Defaults to the " + "platform of the running system. Use this option multiple times to " + "specify multiple platforms supported by the target interpreter." + ), +) # type: Callable[..., Option] + + +# This was made a separate function for unit-testing purposes. +def _convert_python_version(value): + # type: (str) -> Tuple[Tuple[int, ...], Optional[str]] + """ + Convert a version string like "3", "37", or "3.7.3" into a tuple of ints. + + :return: A 2-tuple (version_info, error_msg), where `error_msg` is + non-None if and only if there was a parsing error. + """ + if not value: + # The empty string is the same as not providing a value. + return (None, None) + + parts = value.split(".") + if len(parts) > 3: + return ((), "at most three version parts are allowed") + + if len(parts) == 1: + # Then we are in the case of "3" or "37". + value = parts[0] + if len(value) > 1: + parts = [value[0], value[1:]] + + try: + version_info = tuple(int(part) for part in parts) + except ValueError: + return ((), "each version part must be an integer") + + return (version_info, None) + + +def _handle_python_version(option, opt_str, value, parser): + # type: (Option, str, str, OptionParser) -> None + """ + Handle a provided --python-version value. + """ + version_info, error_msg = _convert_python_version(value) + if error_msg is not None: + msg = "invalid --python-version value: {!r}: {}".format( + value, + error_msg, + ) + raise_option_error(parser, option=option, msg=msg) + + parser.values.python_version = version_info + + +python_version = partial( + Option, + "--python-version", + dest="python_version", + metavar="python_version", + action="callback", + callback=_handle_python_version, + type="str", + default=None, + help=dedent( + """\ + The Python interpreter version to use for wheel and "Requires-Python" + compatibility checks. Defaults to a version derived from the running + interpreter. The version can be specified using up to three dot-separated + integers (e.g. "3" for 3.0.0, "3.7" for 3.7.0, or "3.7.3"). A major-minor + version can also be given as a string without dots (e.g. "37" for 3.7.0). + """ + ), +) # type: Callable[..., Option] + + +implementation = partial( + Option, + "--implementation", + dest="implementation", + metavar="implementation", + default=None, + help=( + "Only use wheels compatible with Python " + "implementation , e.g. 'pp', 'jy', 'cp', " + " or 'ip'. If not specified, then the current " + "interpreter implementation is used. Use 'py' to force " + "implementation-agnostic wheels." + ), +) # type: Callable[..., Option] + + +abis = partial( + Option, + "--abi", + dest="abis", + metavar="abi", + action="append", + default=None, + help=( + "Only use wheels compatible with Python abi , e.g. 'pypy_41'. " + "If not specified, then the current interpreter abi tag is used. " + "Use this option multiple times to specify multiple abis supported " + "by the target interpreter. Generally you will need to specify " + "--implementation, --platform, and --python-version when using this " + "option." + ), +) # type: Callable[..., Option] + + +def add_target_python_options(cmd_opts): + # type: (OptionGroup) -> None + cmd_opts.add_option(platforms()) + cmd_opts.add_option(python_version()) + cmd_opts.add_option(implementation()) + cmd_opts.add_option(abis()) + + +def make_target_python(options): + # type: (Values) -> TargetPython + target_python = TargetPython( + platforms=options.platforms, + py_version_info=options.python_version, + abis=options.abis, + implementation=options.implementation, + ) + + return target_python + + +def prefer_binary(): + # type: () -> Option + return Option( + "--prefer-binary", + dest="prefer_binary", + action="store_true", + default=False, + help="Prefer older binary packages over newer source packages.", + ) + + +cache_dir = partial( + PipOption, + "--cache-dir", + dest="cache_dir", + default=USER_CACHE_DIR, + metavar="dir", + type="path", + help="Store the cache data in .", +) # type: Callable[..., Option] + + +def _handle_no_cache_dir(option, opt, value, parser): + # type: (Option, str, str, OptionParser) -> None + """ + Process a value provided for the --no-cache-dir option. + + This is an optparse.Option callback for the --no-cache-dir option. + """ + # The value argument will be None if --no-cache-dir is passed via the + # command-line, since the option doesn't accept arguments. However, + # the value can be non-None if the option is triggered e.g. by an + # environment variable, like PIP_NO_CACHE_DIR=true. + if value is not None: + # Then parse the string value to get argument error-checking. + try: + strtobool(value) + except ValueError as exc: + raise_option_error(parser, option=option, msg=str(exc)) + + # Originally, setting PIP_NO_CACHE_DIR to a value that strtobool() + # converted to 0 (like "false" or "no") caused cache_dir to be disabled + # rather than enabled (logic would say the latter). Thus, we disable + # the cache directory not just on values that parse to True, but (for + # backwards compatibility reasons) also on values that parse to False. + # In other words, always set it to False if the option is provided in + # some (valid) form. + parser.values.cache_dir = False + + +no_cache = partial( + Option, + "--no-cache-dir", + dest="cache_dir", + action="callback", + callback=_handle_no_cache_dir, + help="Disable the cache.", +) # type: Callable[..., Option] + +no_deps = partial( + Option, + "--no-deps", + "--no-dependencies", + dest="ignore_dependencies", + action="store_true", + default=False, + help="Don't install package dependencies.", +) # type: Callable[..., Option] + +build_dir = partial( + PipOption, + "-b", + "--build", + "--build-dir", + "--build-directory", + dest="build_dir", + type="path", + metavar="dir", + help=SUPPRESS_HELP, +) # type: Callable[..., Option] + +ignore_requires_python = partial( + Option, + "--ignore-requires-python", + dest="ignore_requires_python", + action="store_true", + help="Ignore the Requires-Python information.", +) # type: Callable[..., Option] + +no_build_isolation = partial( + Option, + "--no-build-isolation", + dest="build_isolation", + action="store_false", + default=True, + help="Disable isolation when building a modern source distribution. " + "Build dependencies specified by PEP 518 must be already installed " + "if this option is used.", +) # type: Callable[..., Option] + + +def _handle_no_use_pep517(option, opt, value, parser): + # type: (Option, str, str, OptionParser) -> None + """ + Process a value provided for the --no-use-pep517 option. + + This is an optparse.Option callback for the no_use_pep517 option. + """ + # Since --no-use-pep517 doesn't accept arguments, the value argument + # will be None if --no-use-pep517 is passed via the command-line. + # However, the value can be non-None if the option is triggered e.g. + # by an environment variable, for example "PIP_NO_USE_PEP517=true". + if value is not None: + msg = """A value was passed for --no-use-pep517, + probably using either the PIP_NO_USE_PEP517 environment variable + or the "no-use-pep517" config file option. Use an appropriate value + of the PIP_USE_PEP517 environment variable or the "use-pep517" + config file option instead. + """ + raise_option_error(parser, option=option, msg=msg) + + # Otherwise, --no-use-pep517 was passed via the command-line. + parser.values.use_pep517 = False + + +use_pep517 = partial( + Option, + "--use-pep517", + dest="use_pep517", + action="store_true", + default=None, + help="Use PEP 517 for building source distributions " + "(use --no-use-pep517 to force legacy behaviour).", +) # type: Any + +no_use_pep517 = partial( + Option, + "--no-use-pep517", + dest="use_pep517", + action="callback", + callback=_handle_no_use_pep517, + default=None, + help=SUPPRESS_HELP, +) # type: Any + +install_options = partial( + Option, + "--install-option", + dest="install_options", + action="append", + metavar="options", + help="Extra arguments to be supplied to the setup.py install " + 'command (use like --install-option="--install-scripts=/usr/local/' + 'bin"). Use multiple --install-option options to pass multiple ' + "options to setup.py install. If you are using an option with a " + "directory path, be sure to use absolute path.", +) # type: Callable[..., Option] + +build_options = partial( + Option, + "--build-option", + dest="build_options", + metavar="options", + action="append", + help="Extra arguments to be supplied to 'setup.py bdist_wheel'.", +) # type: Callable[..., Option] + +global_options = partial( + Option, + "--global-option", + dest="global_options", + action="append", + metavar="options", + help="Extra global options to be supplied to the setup.py " + "call before the install or bdist_wheel command.", +) # type: Callable[..., Option] + +no_clean = partial( + Option, + "--no-clean", + action="store_true", + default=False, + help="Don't clean up build directories.", +) # type: Callable[..., Option] + +pre = partial( + Option, + "--pre", + action="store_true", + default=False, + help="Include pre-release and development versions. By default, " + "pip only finds stable versions.", +) # type: Callable[..., Option] + +disable_pip_version_check = partial( + Option, + "--disable-pip-version-check", + dest="disable_pip_version_check", + action="store_true", + default=False, + help="Don't periodically check PyPI to determine whether a new version " + "of pip is available for download. Implied with --no-index.", +) # type: Callable[..., Option] + + +def _handle_merge_hash(option, opt_str, value, parser): + # type: (Option, str, str, OptionParser) -> None + """Given a value spelled "algo:digest", append the digest to a list + pointed to in a dict by the algo name.""" + if not parser.values.hashes: + parser.values.hashes = {} + try: + algo, digest = value.split(":", 1) + except ValueError: + parser.error( + "Arguments to {} must be a hash name " # noqa + "followed by a value, like --hash=sha256:" + "abcde...".format(opt_str) + ) + if algo not in STRONG_HASHES: + parser.error( + "Allowed hash algorithms for {} are {}.".format( # noqa + opt_str, ", ".join(STRONG_HASHES) + ) + ) + parser.values.hashes.setdefault(algo, []).append(digest) + + +hash = partial( + Option, + "--hash", + # Hash values eventually end up in InstallRequirement.hashes due to + # __dict__ copying in process_line(). + dest="hashes", + action="callback", + callback=_handle_merge_hash, + type="string", + help="Verify that the package's archive matches this " + "hash before installing. Example: --hash=sha256:abcdef...", +) # type: Callable[..., Option] + + +require_hashes = partial( + Option, + "--require-hashes", + dest="require_hashes", + action="store_true", + default=False, + help="Require a hash to check each requirement against, for " + "repeatable installs. This option is implied when any package in a " + "requirements file has a --hash option.", +) # type: Callable[..., Option] + + +list_path = partial( + PipOption, + "--path", + dest="path", + type="path", + action="append", + help="Restrict to the specified installation path for listing " + "packages (can be used multiple times).", +) # type: Callable[..., Option] + + +def check_list_path_option(options): + # type: (Values) -> None + if options.path and (options.user or options.local): + raise CommandError("Cannot combine '--path' with '--user' or '--local'") + + +list_exclude = partial( + PipOption, + "--exclude", + dest="excludes", + action="append", + metavar="package", + type="package_name", + help="Exclude specified package from the output", +) # type: Callable[..., Option] + + +no_python_version_warning = partial( + Option, + "--no-python-version-warning", + dest="no_python_version_warning", + action="store_true", + default=False, + help="Silence deprecation warnings for upcoming unsupported Pythons.", +) # type: Callable[..., Option] + + +use_new_feature = partial( + Option, + "--use-feature", + dest="features_enabled", + metavar="feature", + action="append", + default=[], + choices=["2020-resolver", "fast-deps", "in-tree-build"], + help="Enable new functionality, that may be backward incompatible.", +) # type: Callable[..., Option] + +use_deprecated_feature = partial( + Option, + "--use-deprecated", + dest="deprecated_features_enabled", + metavar="feature", + action="append", + default=[], + choices=["legacy-resolver"], + help=("Enable deprecated functionality, that will be removed in the future."), +) # type: Callable[..., Option] + + +########## +# groups # +########## + +general_group = { + "name": "General Options", + "options": [ + help_, + isolated_mode, + require_virtualenv, + verbose, + version, + quiet, + log, + no_input, + proxy, + retries, + timeout, + exists_action, + trusted_host, + cert, + client_cert, + cache_dir, + no_cache, + disable_pip_version_check, + no_color, + no_python_version_warning, + use_new_feature, + use_deprecated_feature, + ], +} # type: Dict[str, Any] + +index_group = { + "name": "Package Index Options", + "options": [ + index_url, + extra_index_url, + no_index, + find_links, + ], +} # type: Dict[str, Any] diff --git a/venv/Lib/site-packages/pip/_internal/cli/command_context.py b/venv/Lib/site-packages/pip/_internal/cli/command_context.py new file mode 100644 index 00000000..375a2e36 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/cli/command_context.py @@ -0,0 +1,30 @@ +from contextlib import ExitStack, contextmanager +from typing import ContextManager, Iterator, TypeVar + +_T = TypeVar("_T", covariant=True) + + +class CommandContextMixIn: + def __init__(self): + # type: () -> None + super().__init__() + self._in_main_context = False + self._main_context = ExitStack() + + @contextmanager + def main_context(self): + # type: () -> Iterator[None] + assert not self._in_main_context + + self._in_main_context = True + try: + with self._main_context: + yield + finally: + self._in_main_context = False + + def enter_context(self, context_provider): + # type: (ContextManager[_T]) -> _T + assert self._in_main_context + + return self._main_context.enter_context(context_provider) diff --git a/venv/Lib/site-packages/pip/_internal/cli/main.py b/venv/Lib/site-packages/pip/_internal/cli/main.py new file mode 100644 index 00000000..7ae074b5 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/cli/main.py @@ -0,0 +1,71 @@ +"""Primary application entrypoint. +""" +import locale +import logging +import os +import sys +from typing import List, Optional + +from pip._internal.cli.autocompletion import autocomplete +from pip._internal.cli.main_parser import parse_command +from pip._internal.commands import create_command +from pip._internal.exceptions import PipError +from pip._internal.utils import deprecation + +logger = logging.getLogger(__name__) + + +# Do not import and use main() directly! Using it directly is actively +# discouraged by pip's maintainers. The name, location and behavior of +# this function is subject to change, so calling it directly is not +# portable across different pip versions. + +# In addition, running pip in-process is unsupported and unsafe. This is +# elaborated in detail at +# https://pip.pypa.io/en/stable/user_guide/#using-pip-from-your-program. +# That document also provides suggestions that should work for nearly +# all users that are considering importing and using main() directly. + +# However, we know that certain users will still want to invoke pip +# in-process. If you understand and accept the implications of using pip +# in an unsupported manner, the best approach is to use runpy to avoid +# depending on the exact location of this entry point. + +# The following example shows how to use runpy to invoke pip in that +# case: +# +# sys.argv = ["pip", your, args, here] +# runpy.run_module("pip", run_name="__main__") +# +# Note that this will exit the process after running, unlike a direct +# call to main. As it is not safe to do any processing after calling +# main, this should not be an issue in practice. + + +def main(args=None): + # type: (Optional[List[str]]) -> int + if args is None: + args = sys.argv[1:] + + # Configure our deprecation warnings to be sent through loggers + deprecation.install_warning_logger() + + autocomplete() + + try: + cmd_name, cmd_args = parse_command(args) + except PipError as exc: + sys.stderr.write(f"ERROR: {exc}") + sys.stderr.write(os.linesep) + sys.exit(1) + + # Needed for locale.getpreferredencoding(False) to work + # in pip._internal.utils.encoding.auto_decode + try: + locale.setlocale(locale.LC_ALL, "") + except locale.Error as e: + # setlocale can apparently crash if locale are uninitialized + logger.debug("Ignoring error %s when setting locale", e) + command = create_command(cmd_name, isolated=("--isolated" in cmd_args)) + + return command.main(cmd_args) diff --git a/venv/Lib/site-packages/pip/_internal/cli/main_parser.py b/venv/Lib/site-packages/pip/_internal/cli/main_parser.py new file mode 100644 index 00000000..d0f58fe4 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/cli/main_parser.py @@ -0,0 +1,89 @@ +"""A single place for constructing and exposing the main parser +""" + +import os +import sys +from typing import List, Tuple + +from pip._internal.cli import cmdoptions +from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter +from pip._internal.commands import commands_dict, get_similar_commands +from pip._internal.exceptions import CommandError +from pip._internal.utils.misc import get_pip_version, get_prog + +__all__ = ["create_main_parser", "parse_command"] + + +def create_main_parser(): + # type: () -> ConfigOptionParser + """Creates and returns the main parser for pip's CLI""" + + parser = ConfigOptionParser( + usage="\n%prog [options]", + add_help_option=False, + formatter=UpdatingDefaultsHelpFormatter(), + name="global", + prog=get_prog(), + ) + parser.disable_interspersed_args() + + parser.version = get_pip_version() + + # add the general options + gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser) + parser.add_option_group(gen_opts) + + # so the help formatter knows + parser.main = True # type: ignore + + # create command listing for description + description = [""] + [ + f"{name:27} {command_info.summary}" + for name, command_info in commands_dict.items() + ] + parser.description = "\n".join(description) + + return parser + + +def parse_command(args): + # type: (List[str]) -> Tuple[str, List[str]] + parser = create_main_parser() + + # Note: parser calls disable_interspersed_args(), so the result of this + # call is to split the initial args into the general options before the + # subcommand and everything else. + # For example: + # args: ['--timeout=5', 'install', '--user', 'INITools'] + # general_options: ['--timeout==5'] + # args_else: ['install', '--user', 'INITools'] + general_options, args_else = parser.parse_args(args) + + # --version + if general_options.version: + sys.stdout.write(parser.version) + sys.stdout.write(os.linesep) + sys.exit() + + # pip || pip help -> print_help() + if not args_else or (args_else[0] == "help" and len(args_else) == 1): + parser.print_help() + sys.exit() + + # the subcommand name + cmd_name = args_else[0] + + if cmd_name not in commands_dict: + guess = get_similar_commands(cmd_name) + + msg = [f'unknown command "{cmd_name}"'] + if guess: + msg.append(f'maybe you meant "{guess}"') + + raise CommandError(" - ".join(msg)) + + # all the args without the subcommand + cmd_args = args[:] + cmd_args.remove(cmd_name) + + return cmd_name, cmd_args diff --git a/venv/Lib/site-packages/pip/_internal/cli/parser.py b/venv/Lib/site-packages/pip/_internal/cli/parser.py new file mode 100644 index 00000000..16523c5a --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/cli/parser.py @@ -0,0 +1,305 @@ +"""Base option parser setup""" + +import logging +import optparse +import shutil +import sys +import textwrap +from contextlib import suppress +from typing import Any, Dict, Iterator, List, Tuple + +from pip._internal.cli.status_codes import UNKNOWN_ERROR +from pip._internal.configuration import Configuration, ConfigurationError +from pip._internal.utils.misc import redact_auth_from_url, strtobool + +logger = logging.getLogger(__name__) + + +class PrettyHelpFormatter(optparse.IndentedHelpFormatter): + """A prettier/less verbose help formatter for optparse.""" + + def __init__(self, *args, **kwargs): + # type: (*Any, **Any) -> None + # help position must be aligned with __init__.parseopts.description + kwargs["max_help_position"] = 30 + kwargs["indent_increment"] = 1 + kwargs["width"] = shutil.get_terminal_size()[0] - 2 + super().__init__(*args, **kwargs) + + def format_option_strings(self, option): + # type: (optparse.Option) -> str + return self._format_option_strings(option) + + def _format_option_strings(self, option, mvarfmt=" <{}>", optsep=", "): + # type: (optparse.Option, str, str) -> str + """ + Return a comma-separated list of option strings and metavars. + + :param option: tuple of (short opt, long opt), e.g: ('-f', '--format') + :param mvarfmt: metavar format string + :param optsep: separator + """ + opts = [] + + if option._short_opts: + opts.append(option._short_opts[0]) + if option._long_opts: + opts.append(option._long_opts[0]) + if len(opts) > 1: + opts.insert(1, optsep) + + if option.takes_value(): + assert option.dest is not None + metavar = option.metavar or option.dest.lower() + opts.append(mvarfmt.format(metavar.lower())) + + return "".join(opts) + + def format_heading(self, heading): + # type: (str) -> str + if heading == "Options": + return "" + return heading + ":\n" + + def format_usage(self, usage): + # type: (str) -> str + """ + Ensure there is only one newline between usage and the first heading + if there is no description. + """ + msg = "\nUsage: {}\n".format(self.indent_lines(textwrap.dedent(usage), " ")) + return msg + + def format_description(self, description): + # type: (str) -> str + # leave full control over description to us + if description: + if hasattr(self.parser, "main"): + label = "Commands" + else: + label = "Description" + # some doc strings have initial newlines, some don't + description = description.lstrip("\n") + # some doc strings have final newlines and spaces, some don't + description = description.rstrip() + # dedent, then reindent + description = self.indent_lines(textwrap.dedent(description), " ") + description = f"{label}:\n{description}\n" + return description + else: + return "" + + def format_epilog(self, epilog): + # type: (str) -> str + # leave full control over epilog to us + if epilog: + return epilog + else: + return "" + + def indent_lines(self, text, indent): + # type: (str, str) -> str + new_lines = [indent + line for line in text.split("\n")] + return "\n".join(new_lines) + + +class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter): + """Custom help formatter for use in ConfigOptionParser. + + This is updates the defaults before expanding them, allowing + them to show up correctly in the help listing. + + Also redact auth from url type options + """ + + def expand_default(self, option): + # type: (optparse.Option) -> str + default_values = None + if self.parser is not None: + assert isinstance(self.parser, ConfigOptionParser) + self.parser._update_defaults(self.parser.defaults) + assert option.dest is not None + default_values = self.parser.defaults.get(option.dest) + help_text = super().expand_default(option) + + if default_values and option.metavar == "URL": + if isinstance(default_values, str): + default_values = [default_values] + + # If its not a list, we should abort and just return the help text + if not isinstance(default_values, list): + default_values = [] + + for val in default_values: + help_text = help_text.replace(val, redact_auth_from_url(val)) + + return help_text + + +class CustomOptionParser(optparse.OptionParser): + def insert_option_group(self, idx, *args, **kwargs): + # type: (int, Any, Any) -> optparse.OptionGroup + """Insert an OptionGroup at a given position.""" + group = self.add_option_group(*args, **kwargs) + + self.option_groups.pop() + self.option_groups.insert(idx, group) + + return group + + @property + def option_list_all(self): + # type: () -> List[optparse.Option] + """Get a list of all options, including those in option groups.""" + res = self.option_list[:] + for i in self.option_groups: + res.extend(i.option_list) + + return res + + +class ConfigOptionParser(CustomOptionParser): + """Custom option parser which updates its defaults by checking the + configuration files and environmental variables""" + + def __init__( + self, + *args, # type: Any + name, # type: str + isolated=False, # type: bool + **kwargs, # type: Any + ): + # type: (...) -> None + self.name = name + self.config = Configuration(isolated) + + assert self.name + super().__init__(*args, **kwargs) + + def check_default(self, option, key, val): + # type: (optparse.Option, str, Any) -> Any + try: + return option.check_value(key, val) + except optparse.OptionValueError as exc: + print(f"An error occurred during configuration: {exc}") + sys.exit(3) + + def _get_ordered_configuration_items(self): + # type: () -> Iterator[Tuple[str, Any]] + # Configuration gives keys in an unordered manner. Order them. + override_order = ["global", self.name, ":env:"] + + # Pool the options into different groups + section_items = { + name: [] for name in override_order + } # type: Dict[str, List[Tuple[str, Any]]] + for section_key, val in self.config.items(): + # ignore empty values + if not val: + logger.debug( + "Ignoring configuration key '%s' as it's value is empty.", + section_key, + ) + continue + + section, key = section_key.split(".", 1) + if section in override_order: + section_items[section].append((key, val)) + + # Yield each group in their override order + for section in override_order: + for key, val in section_items[section]: + yield key, val + + def _update_defaults(self, defaults): + # type: (Dict[str, Any]) -> Dict[str, Any] + """Updates the given defaults with values from the config files and + the environ. Does a little special handling for certain types of + options (lists).""" + + # Accumulate complex default state. + self.values = optparse.Values(self.defaults) + late_eval = set() + # Then set the options with those values + for key, val in self._get_ordered_configuration_items(): + # '--' because configuration supports only long names + option = self.get_option("--" + key) + + # Ignore options not present in this parser. E.g. non-globals put + # in [global] by users that want them to apply to all applicable + # commands. + if option is None: + continue + + assert option.dest is not None + + if option.action in ("store_true", "store_false"): + try: + val = strtobool(val) + except ValueError: + self.error( + "{} is not a valid value for {} option, " # noqa + "please specify a boolean value like yes/no, " + "true/false or 1/0 instead.".format(val, key) + ) + elif option.action == "count": + with suppress(ValueError): + val = strtobool(val) + with suppress(ValueError): + val = int(val) + if not isinstance(val, int) or val < 0: + self.error( + "{} is not a valid value for {} option, " # noqa + "please instead specify either a non-negative integer " + "or a boolean value like yes/no or false/true " + "which is equivalent to 1/0.".format(val, key) + ) + elif option.action == "append": + val = val.split() + val = [self.check_default(option, key, v) for v in val] + elif option.action == "callback": + assert option.callback is not None + late_eval.add(option.dest) + opt_str = option.get_opt_string() + val = option.convert_value(opt_str, val) + # From take_action + args = option.callback_args or () + kwargs = option.callback_kwargs or {} + option.callback(option, opt_str, val, self, *args, **kwargs) + else: + val = self.check_default(option, key, val) + + defaults[option.dest] = val + + for key in late_eval: + defaults[key] = getattr(self.values, key) + self.values = None + return defaults + + def get_default_values(self): + # type: () -> optparse.Values + """Overriding to make updating the defaults after instantiation of + the option parser possible, _update_defaults() does the dirty work.""" + if not self.process_default_values: + # Old, pre-Optik 1.5 behaviour. + return optparse.Values(self.defaults) + + # Load the configuration, or error out in case of an error + try: + self.config.load() + except ConfigurationError as err: + self.exit(UNKNOWN_ERROR, str(err)) + + defaults = self._update_defaults(self.defaults.copy()) # ours + for option in self._get_all_options(): + assert option.dest is not None + default = defaults.get(option.dest) + if isinstance(default, str): + opt_str = option.get_opt_string() + defaults[option.dest] = option.check_value(opt_str, default) + return optparse.Values(defaults) + + def error(self, msg): + # type: (str) -> None + self.print_usage(sys.stderr) + self.exit(UNKNOWN_ERROR, f"{msg}\n") diff --git a/venv/Lib/site-packages/pip/_internal/cli/progress_bars.py b/venv/Lib/site-packages/pip/_internal/cli/progress_bars.py new file mode 100644 index 00000000..3064c856 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/cli/progress_bars.py @@ -0,0 +1,261 @@ +import itertools +import sys +from signal import SIGINT, default_int_handler, signal +from typing import Any, Dict, List + +from pip._vendor.progress.bar import Bar, FillingCirclesBar, IncrementalBar +from pip._vendor.progress.spinner import Spinner + +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.logging import get_indentation +from pip._internal.utils.misc import format_size + +try: + from pip._vendor import colorama +# Lots of different errors can come from this, including SystemError and +# ImportError. +except Exception: + colorama = None + + +def _select_progress_class(preferred, fallback): + # type: (Bar, Bar) -> Bar + encoding = getattr(preferred.file, "encoding", None) + + # If we don't know what encoding this file is in, then we'll just assume + # that it doesn't support unicode and use the ASCII bar. + if not encoding: + return fallback + + # Collect all of the possible characters we want to use with the preferred + # bar. + characters = [ + getattr(preferred, "empty_fill", ""), + getattr(preferred, "fill", ""), + ] + characters += list(getattr(preferred, "phases", [])) + + # Try to decode the characters we're using for the bar using the encoding + # of the given file, if this works then we'll assume that we can use the + # fancier bar and if not we'll fall back to the plaintext bar. + try: + "".join(characters).encode(encoding) + except UnicodeEncodeError: + return fallback + else: + return preferred + + +_BaseBar = _select_progress_class(IncrementalBar, Bar) # type: Any + + +class InterruptibleMixin: + """ + Helper to ensure that self.finish() gets called on keyboard interrupt. + + This allows downloads to be interrupted without leaving temporary state + (like hidden cursors) behind. + + This class is similar to the progress library's existing SigIntMixin + helper, but as of version 1.2, that helper has the following problems: + + 1. It calls sys.exit(). + 2. It discards the existing SIGINT handler completely. + 3. It leaves its own handler in place even after an uninterrupted finish, + which will have unexpected delayed effects if the user triggers an + unrelated keyboard interrupt some time after a progress-displaying + download has already completed, for example. + """ + + def __init__(self, *args, **kwargs): + # type: (List[Any], Dict[Any, Any]) -> None + """ + Save the original SIGINT handler for later. + """ + # https://github.com/python/mypy/issues/5887 + super().__init__(*args, **kwargs) # type: ignore + + self.original_handler = signal(SIGINT, self.handle_sigint) + + # If signal() returns None, the previous handler was not installed from + # Python, and we cannot restore it. This probably should not happen, + # but if it does, we must restore something sensible instead, at least. + # The least bad option should be Python's default SIGINT handler, which + # just raises KeyboardInterrupt. + if self.original_handler is None: + self.original_handler = default_int_handler + + def finish(self): + # type: () -> None + """ + Restore the original SIGINT handler after finishing. + + This should happen regardless of whether the progress display finishes + normally, or gets interrupted. + """ + super().finish() # type: ignore + signal(SIGINT, self.original_handler) + + def handle_sigint(self, signum, frame): # type: ignore + """ + Call self.finish() before delegating to the original SIGINT handler. + + This handler should only be in place while the progress display is + active. + """ + self.finish() + self.original_handler(signum, frame) + + +class SilentBar(Bar): + def update(self): + # type: () -> None + pass + + +class BlueEmojiBar(IncrementalBar): + + suffix = "%(percent)d%%" + bar_prefix = " " + bar_suffix = " " + phases = ("\U0001F539", "\U0001F537", "\U0001F535") + + +class DownloadProgressMixin: + def __init__(self, *args, **kwargs): + # type: (List[Any], Dict[Any, Any]) -> None + # https://github.com/python/mypy/issues/5887 + super().__init__(*args, **kwargs) # type: ignore + self.message = (" " * (get_indentation() + 2)) + self.message # type: str + + @property + def downloaded(self): + # type: () -> str + return format_size(self.index) # type: ignore + + @property + def download_speed(self): + # type: () -> str + # Avoid zero division errors... + if self.avg == 0.0: # type: ignore + return "..." + return format_size(1 / self.avg) + "/s" # type: ignore + + @property + def pretty_eta(self): + # type: () -> str + if self.eta: # type: ignore + return f"eta {self.eta_td}" # type: ignore + return "" + + def iter(self, it): # type: ignore + for x in it: + yield x + # B305 is incorrectly raised here + # https://github.com/PyCQA/flake8-bugbear/issues/59 + self.next(len(x)) # noqa: B305 + self.finish() + + +class WindowsMixin: + def __init__(self, *args, **kwargs): + # type: (List[Any], Dict[Any, Any]) -> None + # The Windows terminal does not support the hide/show cursor ANSI codes + # even with colorama. So we'll ensure that hide_cursor is False on + # Windows. + # This call needs to go before the super() call, so that hide_cursor + # is set in time. The base progress bar class writes the "hide cursor" + # code to the terminal in its init, so if we don't set this soon + # enough, we get a "hide" with no corresponding "show"... + if WINDOWS and self.hide_cursor: # type: ignore + self.hide_cursor = False + + # https://github.com/python/mypy/issues/5887 + super().__init__(*args, **kwargs) # type: ignore + + # Check if we are running on Windows and we have the colorama module, + # if we do then wrap our file with it. + if WINDOWS and colorama: + self.file = colorama.AnsiToWin32(self.file) # type: ignore + # The progress code expects to be able to call self.file.isatty() + # but the colorama.AnsiToWin32() object doesn't have that, so we'll + # add it. + self.file.isatty = lambda: self.file.wrapped.isatty() + # The progress code expects to be able to call self.file.flush() + # but the colorama.AnsiToWin32() object doesn't have that, so we'll + # add it. + self.file.flush = lambda: self.file.wrapped.flush() + + +class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin, DownloadProgressMixin): + + file = sys.stdout + message = "%(percent)d%%" + suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s" + + +class DefaultDownloadProgressBar(BaseDownloadProgressBar, _BaseBar): + pass + + +class DownloadSilentBar(BaseDownloadProgressBar, SilentBar): + pass + + +class DownloadBar(BaseDownloadProgressBar, Bar): + pass + + +class DownloadFillingCirclesBar(BaseDownloadProgressBar, FillingCirclesBar): + pass + + +class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar, BlueEmojiBar): + pass + + +class DownloadProgressSpinner( + WindowsMixin, InterruptibleMixin, DownloadProgressMixin, Spinner +): + + file = sys.stdout + suffix = "%(downloaded)s %(download_speed)s" + + def next_phase(self): + # type: () -> str + if not hasattr(self, "_phaser"): + self._phaser = itertools.cycle(self.phases) + return next(self._phaser) + + def update(self): + # type: () -> None + message = self.message % self + phase = self.next_phase() + suffix = self.suffix % self + line = "".join( + [ + message, + " " if message else "", + phase, + " " if suffix else "", + suffix, + ] + ) + + self.writeln(line) + + +BAR_TYPES = { + "off": (DownloadSilentBar, DownloadSilentBar), + "on": (DefaultDownloadProgressBar, DownloadProgressSpinner), + "ascii": (DownloadBar, DownloadProgressSpinner), + "pretty": (DownloadFillingCirclesBar, DownloadProgressSpinner), + "emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner), +} + + +def DownloadProgressProvider(progress_bar, max=None): # type: ignore + if max is None or max == 0: + return BAR_TYPES[progress_bar][1]().iter + else: + return BAR_TYPES[progress_bar][0](max=max).iter diff --git a/venv/Lib/site-packages/pip/_internal/cli/req_command.py b/venv/Lib/site-packages/pip/_internal/cli/req_command.py new file mode 100644 index 00000000..3fc00d4f --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/cli/req_command.py @@ -0,0 +1,461 @@ +"""Contains the Command base classes that depend on PipSession. + +The classes in this module are in a separate module so the commands not +needing download / PackageFinder capability don't unnecessarily import the +PackageFinder machinery and all its vendored dependencies, etc. +""" + +import logging +import os +import sys +from functools import partial +from optparse import Values +from typing import Any, List, Optional, Tuple + +from pip._internal.cache import WheelCache +from pip._internal.cli import cmdoptions +from pip._internal.cli.base_command import Command +from pip._internal.cli.command_context import CommandContextMixIn +from pip._internal.exceptions import CommandError, PreviousBuildDirError +from pip._internal.index.collector import LinkCollector +from pip._internal.index.package_finder import PackageFinder +from pip._internal.models.selection_prefs import SelectionPreferences +from pip._internal.models.target_python import TargetPython +from pip._internal.network.session import PipSession +from pip._internal.operations.prepare import RequirementPreparer +from pip._internal.req.constructors import ( + install_req_from_editable, + install_req_from_line, + install_req_from_parsed_requirement, + install_req_from_req_string, +) +from pip._internal.req.req_file import parse_requirements +from pip._internal.req.req_install import InstallRequirement +from pip._internal.req.req_tracker import RequirementTracker +from pip._internal.resolution.base import BaseResolver +from pip._internal.self_outdated_check import pip_self_version_check +from pip._internal.utils.temp_dir import ( + TempDirectory, + TempDirectoryTypeRegistry, + tempdir_kinds, +) +from pip._internal.utils.virtualenv import running_under_virtualenv + +logger = logging.getLogger(__name__) + + +class SessionCommandMixin(CommandContextMixIn): + + """ + A class mixin for command classes needing _build_session(). + """ + + def __init__(self): + # type: () -> None + super().__init__() + self._session = None # Optional[PipSession] + + @classmethod + def _get_index_urls(cls, options): + # type: (Values) -> Optional[List[str]] + """Return a list of index urls from user-provided options.""" + index_urls = [] + if not getattr(options, "no_index", False): + url = getattr(options, "index_url", None) + if url: + index_urls.append(url) + urls = getattr(options, "extra_index_urls", None) + if urls: + index_urls.extend(urls) + # Return None rather than an empty list + return index_urls or None + + def get_default_session(self, options): + # type: (Values) -> PipSession + """Get a default-managed session.""" + if self._session is None: + self._session = self.enter_context(self._build_session(options)) + # there's no type annotation on requests.Session, so it's + # automatically ContextManager[Any] and self._session becomes Any, + # then https://github.com/python/mypy/issues/7696 kicks in + assert self._session is not None + return self._session + + def _build_session(self, options, retries=None, timeout=None): + # type: (Values, Optional[int], Optional[int]) -> PipSession + assert not options.cache_dir or os.path.isabs(options.cache_dir) + session = PipSession( + cache=( + os.path.join(options.cache_dir, "http") if options.cache_dir else None + ), + retries=retries if retries is not None else options.retries, + trusted_hosts=options.trusted_hosts, + index_urls=self._get_index_urls(options), + ) + + # Handle custom ca-bundles from the user + if options.cert: + session.verify = options.cert + + # Handle SSL client certificate + if options.client_cert: + session.cert = options.client_cert + + # Handle timeouts + if options.timeout or timeout: + session.timeout = timeout if timeout is not None else options.timeout + + # Handle configured proxies + if options.proxy: + session.proxies = { + "http": options.proxy, + "https": options.proxy, + } + + # Determine if we can prompt the user for authentication or not + session.auth.prompting = not options.no_input + + return session + + +class IndexGroupCommand(Command, SessionCommandMixin): + + """ + Abstract base class for commands with the index_group options. + + This also corresponds to the commands that permit the pip version check. + """ + + def handle_pip_version_check(self, options): + # type: (Values) -> None + """ + Do the pip version check if not disabled. + + This overrides the default behavior of not doing the check. + """ + # Make sure the index_group options are present. + assert hasattr(options, "no_index") + + if options.disable_pip_version_check or options.no_index: + return + + # Otherwise, check if we're using the latest version of pip available. + session = self._build_session( + options, retries=0, timeout=min(5, options.timeout) + ) + with session: + pip_self_version_check(session, options) + + +KEEPABLE_TEMPDIR_TYPES = [ + tempdir_kinds.BUILD_ENV, + tempdir_kinds.EPHEM_WHEEL_CACHE, + tempdir_kinds.REQ_BUILD, +] + + +def warn_if_run_as_root(): + # type: () -> None + """Output a warning for sudo users on Unix. + + In a virtual environment, sudo pip still writes to virtualenv. + On Windows, users may run pip as Administrator without issues. + This warning only applies to Unix root users outside of virtualenv. + """ + if running_under_virtualenv(): + return + if not hasattr(os, "getuid"): + return + # On Windows, there are no "system managed" Python packages. Installing as + # Administrator via pip is the correct way of updating system environments. + # + # We choose sys.platform over utils.compat.WINDOWS here to enable Mypy platform + # checks: https://mypy.readthedocs.io/en/stable/common_issues.html + if sys.platform == "win32" or sys.platform == "cygwin": + return + if sys.platform == "darwin" or sys.platform == "linux": + if os.getuid() != 0: + return + logger.warning( + "Running pip as root will break packages and permissions. " + "You should install packages reliably by using venv: " + "https://pip.pypa.io/warnings/venv" + ) + + +def with_cleanup(func): + # type: (Any) -> Any + """Decorator for common logic related to managing temporary + directories. + """ + + def configure_tempdir_registry(registry): + # type: (TempDirectoryTypeRegistry) -> None + for t in KEEPABLE_TEMPDIR_TYPES: + registry.set_delete(t, False) + + def wrapper(self, options, args): + # type: (RequirementCommand, Values, List[Any]) -> Optional[int] + assert self.tempdir_registry is not None + if options.no_clean: + configure_tempdir_registry(self.tempdir_registry) + + try: + return func(self, options, args) + except PreviousBuildDirError: + # This kind of conflict can occur when the user passes an explicit + # build directory with a pre-existing folder. In that case we do + # not want to accidentally remove it. + configure_tempdir_registry(self.tempdir_registry) + raise + + return wrapper + + +class RequirementCommand(IndexGroupCommand): + def __init__(self, *args, **kw): + # type: (Any, Any) -> None + super().__init__(*args, **kw) + + self.cmd_opts.add_option(cmdoptions.no_clean()) + + @staticmethod + def determine_resolver_variant(options): + # type: (Values) -> str + """Determines which resolver should be used, based on the given options.""" + if "legacy-resolver" in options.deprecated_features_enabled: + return "legacy" + + return "2020-resolver" + + @classmethod + def make_requirement_preparer( + cls, + temp_build_dir, # type: TempDirectory + options, # type: Values + req_tracker, # type: RequirementTracker + session, # type: PipSession + finder, # type: PackageFinder + use_user_site, # type: bool + download_dir=None, # type: str + ): + # type: (...) -> RequirementPreparer + """ + Create a RequirementPreparer instance for the given parameters. + """ + temp_build_dir_path = temp_build_dir.path + assert temp_build_dir_path is not None + + resolver_variant = cls.determine_resolver_variant(options) + if resolver_variant == "2020-resolver": + lazy_wheel = "fast-deps" in options.features_enabled + if lazy_wheel: + logger.warning( + "pip is using lazily downloaded wheels using HTTP " + "range requests to obtain dependency information. " + "This experimental feature is enabled through " + "--use-feature=fast-deps and it is not ready for " + "production." + ) + else: + lazy_wheel = False + if "fast-deps" in options.features_enabled: + logger.warning( + "fast-deps has no effect when used with the legacy resolver." + ) + + return RequirementPreparer( + build_dir=temp_build_dir_path, + src_dir=options.src_dir, + download_dir=download_dir, + build_isolation=options.build_isolation, + req_tracker=req_tracker, + session=session, + progress_bar=options.progress_bar, + finder=finder, + require_hashes=options.require_hashes, + use_user_site=use_user_site, + lazy_wheel=lazy_wheel, + in_tree_build="in-tree-build" in options.features_enabled, + ) + + @classmethod + def make_resolver( + cls, + preparer, # type: RequirementPreparer + finder, # type: PackageFinder + options, # type: Values + wheel_cache=None, # type: Optional[WheelCache] + use_user_site=False, # type: bool + ignore_installed=True, # type: bool + ignore_requires_python=False, # type: bool + force_reinstall=False, # type: bool + upgrade_strategy="to-satisfy-only", # type: str + use_pep517=None, # type: Optional[bool] + py_version_info=None, # type: Optional[Tuple[int, ...]] + ): + # type: (...) -> BaseResolver + """ + Create a Resolver instance for the given parameters. + """ + make_install_req = partial( + install_req_from_req_string, + isolated=options.isolated_mode, + use_pep517=use_pep517, + ) + resolver_variant = cls.determine_resolver_variant(options) + # The long import name and duplicated invocation is needed to convince + # Mypy into correctly typechecking. Otherwise it would complain the + # "Resolver" class being redefined. + if resolver_variant == "2020-resolver": + import pip._internal.resolution.resolvelib.resolver + + return pip._internal.resolution.resolvelib.resolver.Resolver( + preparer=preparer, + finder=finder, + wheel_cache=wheel_cache, + make_install_req=make_install_req, + use_user_site=use_user_site, + ignore_dependencies=options.ignore_dependencies, + ignore_installed=ignore_installed, + ignore_requires_python=ignore_requires_python, + force_reinstall=force_reinstall, + upgrade_strategy=upgrade_strategy, + py_version_info=py_version_info, + ) + import pip._internal.resolution.legacy.resolver + + return pip._internal.resolution.legacy.resolver.Resolver( + preparer=preparer, + finder=finder, + wheel_cache=wheel_cache, + make_install_req=make_install_req, + use_user_site=use_user_site, + ignore_dependencies=options.ignore_dependencies, + ignore_installed=ignore_installed, + ignore_requires_python=ignore_requires_python, + force_reinstall=force_reinstall, + upgrade_strategy=upgrade_strategy, + py_version_info=py_version_info, + ) + + def get_requirements( + self, + args, # type: List[str] + options, # type: Values + finder, # type: PackageFinder + session, # type: PipSession + ): + # type: (...) -> List[InstallRequirement] + """ + Parse command-line arguments into the corresponding requirements. + """ + requirements = [] # type: List[InstallRequirement] + for filename in options.constraints: + for parsed_req in parse_requirements( + filename, + constraint=True, + finder=finder, + options=options, + session=session, + ): + req_to_add = install_req_from_parsed_requirement( + parsed_req, + isolated=options.isolated_mode, + user_supplied=False, + ) + requirements.append(req_to_add) + + for req in args: + req_to_add = install_req_from_line( + req, + None, + isolated=options.isolated_mode, + use_pep517=options.use_pep517, + user_supplied=True, + ) + requirements.append(req_to_add) + + for req in options.editables: + req_to_add = install_req_from_editable( + req, + user_supplied=True, + isolated=options.isolated_mode, + use_pep517=options.use_pep517, + ) + requirements.append(req_to_add) + + # NOTE: options.require_hashes may be set if --require-hashes is True + for filename in options.requirements: + for parsed_req in parse_requirements( + filename, finder=finder, options=options, session=session + ): + req_to_add = install_req_from_parsed_requirement( + parsed_req, + isolated=options.isolated_mode, + use_pep517=options.use_pep517, + user_supplied=True, + ) + requirements.append(req_to_add) + + # If any requirement has hash options, enable hash checking. + if any(req.has_hash_options for req in requirements): + options.require_hashes = True + + if not (args or options.editables or options.requirements): + opts = {"name": self.name} + if options.find_links: + raise CommandError( + "You must give at least one requirement to {name} " + '(maybe you meant "pip {name} {links}"?)'.format( + **dict(opts, links=" ".join(options.find_links)) + ) + ) + else: + raise CommandError( + "You must give at least one requirement to {name} " + '(see "pip help {name}")'.format(**opts) + ) + + return requirements + + @staticmethod + def trace_basic_info(finder): + # type: (PackageFinder) -> None + """ + Trace basic information about the provided objects. + """ + # Display where finder is looking for packages + search_scope = finder.search_scope + locations = search_scope.get_formatted_locations() + if locations: + logger.info(locations) + + def _build_package_finder( + self, + options, # type: Values + session, # type: PipSession + target_python=None, # type: Optional[TargetPython] + ignore_requires_python=None, # type: Optional[bool] + ): + # type: (...) -> PackageFinder + """ + Create a package finder appropriate to this requirement command. + + :param ignore_requires_python: Whether to ignore incompatible + "Requires-Python" values in links. Defaults to False. + """ + link_collector = LinkCollector.create(session, options=options) + selection_prefs = SelectionPreferences( + allow_yanked=True, + format_control=options.format_control, + allow_all_prereleases=options.pre, + prefer_binary=options.prefer_binary, + ignore_requires_python=ignore_requires_python, + ) + + return PackageFinder.create( + link_collector=link_collector, + selection_prefs=selection_prefs, + target_python=target_python, + ) diff --git a/venv/Lib/site-packages/pip/_internal/cli/spinners.py b/venv/Lib/site-packages/pip/_internal/cli/spinners.py new file mode 100644 index 00000000..08e15661 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/cli/spinners.py @@ -0,0 +1,172 @@ +import contextlib +import itertools +import logging +import sys +import time +from typing import IO, Iterator + +from pip._vendor.progress import HIDE_CURSOR, SHOW_CURSOR + +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.logging import get_indentation + +logger = logging.getLogger(__name__) + + +class SpinnerInterface: + def spin(self): + # type: () -> None + raise NotImplementedError() + + def finish(self, final_status): + # type: (str) -> None + raise NotImplementedError() + + +class InteractiveSpinner(SpinnerInterface): + def __init__( + self, + message, + file=None, + spin_chars="-\\|/", + # Empirically, 8 updates/second looks nice + min_update_interval_seconds=0.125, + ): + # type: (str, IO[str], str, float) -> None + self._message = message + if file is None: + file = sys.stdout + self._file = file + self._rate_limiter = RateLimiter(min_update_interval_seconds) + self._finished = False + + self._spin_cycle = itertools.cycle(spin_chars) + + self._file.write(" " * get_indentation() + self._message + " ... ") + self._width = 0 + + def _write(self, status): + # type: (str) -> None + assert not self._finished + # Erase what we wrote before by backspacing to the beginning, writing + # spaces to overwrite the old text, and then backspacing again + backup = "\b" * self._width + self._file.write(backup + " " * self._width + backup) + # Now we have a blank slate to add our status + self._file.write(status) + self._width = len(status) + self._file.flush() + self._rate_limiter.reset() + + def spin(self): + # type: () -> None + if self._finished: + return + if not self._rate_limiter.ready(): + return + self._write(next(self._spin_cycle)) + + def finish(self, final_status): + # type: (str) -> None + if self._finished: + return + self._write(final_status) + self._file.write("\n") + self._file.flush() + self._finished = True + + +# Used for dumb terminals, non-interactive installs (no tty), etc. +# We still print updates occasionally (once every 60 seconds by default) to +# act as a keep-alive for systems like Travis-CI that take lack-of-output as +# an indication that a task has frozen. +class NonInteractiveSpinner(SpinnerInterface): + def __init__(self, message, min_update_interval_seconds=60): + # type: (str, float) -> None + self._message = message + self._finished = False + self._rate_limiter = RateLimiter(min_update_interval_seconds) + self._update("started") + + def _update(self, status): + # type: (str) -> None + assert not self._finished + self._rate_limiter.reset() + logger.info("%s: %s", self._message, status) + + def spin(self): + # type: () -> None + if self._finished: + return + if not self._rate_limiter.ready(): + return + self._update("still running...") + + def finish(self, final_status): + # type: (str) -> None + if self._finished: + return + self._update(f"finished with status '{final_status}'") + self._finished = True + + +class RateLimiter: + def __init__(self, min_update_interval_seconds): + # type: (float) -> None + self._min_update_interval_seconds = min_update_interval_seconds + self._last_update = 0 # type: float + + def ready(self): + # type: () -> bool + now = time.time() + delta = now - self._last_update + return delta >= self._min_update_interval_seconds + + def reset(self): + # type: () -> None + self._last_update = time.time() + + +@contextlib.contextmanager +def open_spinner(message): + # type: (str) -> Iterator[SpinnerInterface] + # Interactive spinner goes directly to sys.stdout rather than being routed + # through the logging system, but it acts like it has level INFO, + # i.e. it's only displayed if we're at level INFO or better. + # Non-interactive spinner goes through the logging system, so it is always + # in sync with logging configuration. + if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO: + spinner = InteractiveSpinner(message) # type: SpinnerInterface + else: + spinner = NonInteractiveSpinner(message) + try: + with hidden_cursor(sys.stdout): + yield spinner + except KeyboardInterrupt: + spinner.finish("canceled") + raise + except Exception: + spinner.finish("error") + raise + else: + spinner.finish("done") + + +@contextlib.contextmanager +def hidden_cursor(file): + # type: (IO[str]) -> Iterator[None] + # The Windows terminal does not support the hide/show cursor ANSI codes, + # even via colorama. So don't even try. + if WINDOWS: + yield + # We don't want to clutter the output with control characters if we're + # writing to a file, or if the user is running with --quiet. + # See https://github.com/pypa/pip/issues/3418 + elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO: + yield + else: + file.write(HIDE_CURSOR) + try: + yield + finally: + file.write(SHOW_CURSOR) diff --git a/venv/Lib/site-packages/pip/_internal/cli/status_codes.py b/venv/Lib/site-packages/pip/_internal/cli/status_codes.py new file mode 100644 index 00000000..5e29502c --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/cli/status_codes.py @@ -0,0 +1,6 @@ +SUCCESS = 0 +ERROR = 1 +UNKNOWN_ERROR = 2 +VIRTUALENV_NOT_FOUND = 3 +PREVIOUS_BUILD_DIR_ERROR = 4 +NO_MATCHES_FOUND = 23 diff --git a/venv/Lib/site-packages/pip/_internal/commands/__init__.py b/venv/Lib/site-packages/pip/_internal/commands/__init__.py new file mode 100644 index 00000000..31c985fd --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/commands/__init__.py @@ -0,0 +1,110 @@ +""" +Package containing all pip commands +""" + +import importlib +from collections import OrderedDict, namedtuple +from typing import Any, Optional + +from pip._internal.cli.base_command import Command + +CommandInfo = namedtuple('CommandInfo', 'module_path, class_name, summary') + +# The ordering matters for help display. +# Also, even though the module path starts with the same +# "pip._internal.commands" prefix in each case, we include the full path +# because it makes testing easier (specifically when modifying commands_dict +# in test setup / teardown by adding info for a FakeCommand class defined +# in a test-related module). +# Finally, we need to pass an iterable of pairs here rather than a dict +# so that the ordering won't be lost when using Python 2.7. +commands_dict = OrderedDict([ + ('install', CommandInfo( + 'pip._internal.commands.install', 'InstallCommand', + 'Install packages.', + )), + ('download', CommandInfo( + 'pip._internal.commands.download', 'DownloadCommand', + 'Download packages.', + )), + ('uninstall', CommandInfo( + 'pip._internal.commands.uninstall', 'UninstallCommand', + 'Uninstall packages.', + )), + ('freeze', CommandInfo( + 'pip._internal.commands.freeze', 'FreezeCommand', + 'Output installed packages in requirements format.', + )), + ('list', CommandInfo( + 'pip._internal.commands.list', 'ListCommand', + 'List installed packages.', + )), + ('show', CommandInfo( + 'pip._internal.commands.show', 'ShowCommand', + 'Show information about installed packages.', + )), + ('check', CommandInfo( + 'pip._internal.commands.check', 'CheckCommand', + 'Verify installed packages have compatible dependencies.', + )), + ('config', CommandInfo( + 'pip._internal.commands.configuration', 'ConfigurationCommand', + 'Manage local and global configuration.', + )), + ('search', CommandInfo( + 'pip._internal.commands.search', 'SearchCommand', + 'Search PyPI for packages.', + )), + ('cache', CommandInfo( + 'pip._internal.commands.cache', 'CacheCommand', + "Inspect and manage pip's wheel cache.", + )), + ('wheel', CommandInfo( + 'pip._internal.commands.wheel', 'WheelCommand', + 'Build wheels from your requirements.', + )), + ('hash', CommandInfo( + 'pip._internal.commands.hash', 'HashCommand', + 'Compute hashes of package archives.', + )), + ('completion', CommandInfo( + 'pip._internal.commands.completion', 'CompletionCommand', + 'A helper command used for command completion.', + )), + ('debug', CommandInfo( + 'pip._internal.commands.debug', 'DebugCommand', + 'Show information useful for debugging.', + )), + ('help', CommandInfo( + 'pip._internal.commands.help', 'HelpCommand', + 'Show help for commands.', + )), +]) # type: OrderedDict[str, CommandInfo] + + +def create_command(name, **kwargs): + # type: (str, **Any) -> Command + """ + Create an instance of the Command class with the given name. + """ + module_path, class_name, summary = commands_dict[name] + module = importlib.import_module(module_path) + command_class = getattr(module, class_name) + command = command_class(name=name, summary=summary, **kwargs) + + return command + + +def get_similar_commands(name): + # type: (str) -> Optional[str] + """Command name auto-correct.""" + from difflib import get_close_matches + + name = name.lower() + + close_commands = get_close_matches(name, commands_dict.keys()) + + if close_commands: + return close_commands[0] + else: + return None diff --git a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..598c9404 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/cache.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/cache.cpython-36.pyc new file mode 100644 index 00000000..73090c36 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/cache.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/check.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/check.cpython-36.pyc new file mode 100644 index 00000000..5e42134b Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/check.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/completion.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/completion.cpython-36.pyc new file mode 100644 index 00000000..05920fae Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/completion.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-36.pyc new file mode 100644 index 00000000..284718cb Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/debug.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/debug.cpython-36.pyc new file mode 100644 index 00000000..9aa0ed2c Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/debug.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/download.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/download.cpython-36.pyc new file mode 100644 index 00000000..5b27ea25 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/download.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-36.pyc new file mode 100644 index 00000000..0661cb06 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/hash.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/hash.cpython-36.pyc new file mode 100644 index 00000000..52d8489a Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/hash.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/help.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/help.cpython-36.pyc new file mode 100644 index 00000000..1e7eadb6 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/help.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/install.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/install.cpython-36.pyc new file mode 100644 index 00000000..29dacac7 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/install.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/list.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/list.cpython-36.pyc new file mode 100644 index 00000000..28430cf8 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/list.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/search.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/search.cpython-36.pyc new file mode 100644 index 00000000..8a0f4493 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/search.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/show.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/show.cpython-36.pyc new file mode 100644 index 00000000..47502d57 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/show.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-36.pyc new file mode 100644 index 00000000..e3f276f8 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-36.pyc new file mode 100644 index 00000000..05be3f78 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/commands/cache.py b/venv/Lib/site-packages/pip/_internal/commands/cache.py new file mode 100644 index 00000000..5155a505 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/commands/cache.py @@ -0,0 +1,228 @@ +import logging +import os +import textwrap +from optparse import Values +from typing import Any, List + +import pip._internal.utils.filesystem as filesystem +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.exceptions import CommandError, PipError + +logger = logging.getLogger(__name__) + + +class CacheCommand(Command): + """ + Inspect and manage pip's wheel cache. + + Subcommands: + + - dir: Show the cache directory. + - info: Show information about the cache. + - list: List filenames of packages stored in the cache. + - remove: Remove one or more package from the cache. + - purge: Remove all items from the cache. + + ```` can be a glob expression or a package name. + """ + + ignore_require_venv = True + usage = """ + %prog dir + %prog info + %prog list [] [--format=[human, abspath]] + %prog remove + %prog purge + """ + + def add_options(self): + # type: () -> None + + self.cmd_opts.add_option( + '--format', + action='store', + dest='list_format', + default="human", + choices=('human', 'abspath'), + help="Select the output format among: human (default) or abspath" + ) + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options, args): + # type: (Values, List[Any]) -> int + handlers = { + "dir": self.get_cache_dir, + "info": self.get_cache_info, + "list": self.list_cache_items, + "remove": self.remove_cache_items, + "purge": self.purge_cache, + } + + if not options.cache_dir: + logger.error("pip cache commands can not " + "function since cache is disabled.") + return ERROR + + # Determine action + if not args or args[0] not in handlers: + logger.error( + "Need an action (%s) to perform.", + ", ".join(sorted(handlers)), + ) + return ERROR + + action = args[0] + + # Error handling happens here, not in the action-handlers. + try: + handlers[action](options, args[1:]) + except PipError as e: + logger.error(e.args[0]) + return ERROR + + return SUCCESS + + def get_cache_dir(self, options, args): + # type: (Values, List[Any]) -> None + if args: + raise CommandError('Too many arguments') + + logger.info(options.cache_dir) + + def get_cache_info(self, options, args): + # type: (Values, List[Any]) -> None + if args: + raise CommandError('Too many arguments') + + num_http_files = len(self._find_http_files(options)) + num_packages = len(self._find_wheels(options, '*')) + + http_cache_location = self._cache_dir(options, 'http') + wheels_cache_location = self._cache_dir(options, 'wheels') + http_cache_size = filesystem.format_directory_size(http_cache_location) + wheels_cache_size = filesystem.format_directory_size( + wheels_cache_location + ) + + message = textwrap.dedent(""" + Package index page cache location: {http_cache_location} + Package index page cache size: {http_cache_size} + Number of HTTP files: {num_http_files} + Wheels location: {wheels_cache_location} + Wheels size: {wheels_cache_size} + Number of wheels: {package_count} + """).format( + http_cache_location=http_cache_location, + http_cache_size=http_cache_size, + num_http_files=num_http_files, + wheels_cache_location=wheels_cache_location, + package_count=num_packages, + wheels_cache_size=wheels_cache_size, + ).strip() + + logger.info(message) + + def list_cache_items(self, options, args): + # type: (Values, List[Any]) -> None + if len(args) > 1: + raise CommandError('Too many arguments') + + if args: + pattern = args[0] + else: + pattern = '*' + + files = self._find_wheels(options, pattern) + if options.list_format == 'human': + self.format_for_human(files) + else: + self.format_for_abspath(files) + + def format_for_human(self, files): + # type: (List[str]) -> None + if not files: + logger.info('Nothing cached.') + return + + results = [] + for filename in files: + wheel = os.path.basename(filename) + size = filesystem.format_file_size(filename) + results.append(f' - {wheel} ({size})') + logger.info('Cache contents:\n') + logger.info('\n'.join(sorted(results))) + + def format_for_abspath(self, files): + # type: (List[str]) -> None + if not files: + return + + results = [] + for filename in files: + results.append(filename) + + logger.info('\n'.join(sorted(results))) + + def remove_cache_items(self, options, args): + # type: (Values, List[Any]) -> None + if len(args) > 1: + raise CommandError('Too many arguments') + + if not args: + raise CommandError('Please provide a pattern') + + files = self._find_wheels(options, args[0]) + + # Only fetch http files if no specific pattern given + if args[0] == '*': + files += self._find_http_files(options) + + if not files: + raise CommandError('No matching packages') + + for filename in files: + os.unlink(filename) + logger.debug('Removed %s', filename) + logger.info('Files removed: %s', len(files)) + + def purge_cache(self, options, args): + # type: (Values, List[Any]) -> None + if args: + raise CommandError('Too many arguments') + + return self.remove_cache_items(options, ['*']) + + def _cache_dir(self, options, subdir): + # type: (Values, str) -> str + return os.path.join(options.cache_dir, subdir) + + def _find_http_files(self, options): + # type: (Values) -> List[str] + http_dir = self._cache_dir(options, 'http') + return filesystem.find_files(http_dir, '*') + + def _find_wheels(self, options, pattern): + # type: (Values, str) -> List[str] + wheel_dir = self._cache_dir(options, 'wheels') + + # The wheel filename format, as specified in PEP 427, is: + # {distribution}-{version}(-{build})?-{python}-{abi}-{platform}.whl + # + # Additionally, non-alphanumeric values in the distribution are + # normalized to underscores (_), meaning hyphens can never occur + # before `-{version}`. + # + # Given that information: + # - If the pattern we're given contains a hyphen (-), the user is + # providing at least the version. Thus, we can just append `*.whl` + # to match the rest of it. + # - If the pattern we're given doesn't contain a hyphen (-), the + # user is only providing the name. Thus, we append `-*.whl` to + # match the hyphen before the version, followed by anything else. + # + # PEP 427: https://www.python.org/dev/peps/pep-0427/ + pattern = pattern + ("*.whl" if "-" in pattern else "-*.whl") + + return filesystem.find_files(wheel_dir, pattern) diff --git a/venv/Lib/site-packages/pip/_internal/commands/check.py b/venv/Lib/site-packages/pip/_internal/commands/check.py new file mode 100644 index 00000000..70aa5af2 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/commands/check.py @@ -0,0 +1,48 @@ +import logging +from optparse import Values +from typing import Any, List + +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.operations.check import ( + check_package_set, + create_package_set_from_installed, +) +from pip._internal.utils.misc import write_output + +logger = logging.getLogger(__name__) + + +class CheckCommand(Command): + """Verify installed packages have compatible dependencies.""" + + usage = """ + %prog [options]""" + + def run(self, options, args): + # type: (Values, List[Any]) -> int + + package_set, parsing_probs = create_package_set_from_installed() + missing, conflicting = check_package_set(package_set) + + for project_name in missing: + version = package_set[project_name].version + for dependency in missing[project_name]: + write_output( + "%s %s requires %s, which is not installed.", + project_name, version, dependency[0], + ) + + for project_name in conflicting: + version = package_set[project_name].version + for dep_name, dep_version, req in conflicting[project_name]: + write_output( + "%s %s has requirement %s, but you have %s %s.", + project_name, version, req, dep_name, dep_version, + ) + + if missing or conflicting or parsing_probs: + return ERROR + else: + write_output("No broken requirements found.") + return SUCCESS diff --git a/venv/Lib/site-packages/pip/_internal/commands/completion.py b/venv/Lib/site-packages/pip/_internal/commands/completion.py new file mode 100644 index 00000000..92cb7882 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/commands/completion.py @@ -0,0 +1,93 @@ +import sys +import textwrap +from optparse import Values +from typing import List + +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.utils.misc import get_prog + +BASE_COMPLETION = """ +# pip {shell} completion start{script}# pip {shell} completion end +""" + +COMPLETION_SCRIPTS = { + 'bash': """ + _pip_completion() + {{ + COMPREPLY=( $( COMP_WORDS="${{COMP_WORDS[*]}}" \\ + COMP_CWORD=$COMP_CWORD \\ + PIP_AUTO_COMPLETE=1 $1 2>/dev/null ) ) + }} + complete -o default -F _pip_completion {prog} + """, + 'zsh': """ + function _pip_completion {{ + local words cword + read -Ac words + read -cn cword + reply=( $( COMP_WORDS="$words[*]" \\ + COMP_CWORD=$(( cword-1 )) \\ + PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null )) + }} + compctl -K _pip_completion {prog} + """, + 'fish': """ + function __fish_complete_pip + set -lx COMP_WORDS (commandline -o) "" + set -lx COMP_CWORD ( \\ + math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\ + ) + set -lx PIP_AUTO_COMPLETE 1 + string split \\ -- (eval $COMP_WORDS[1]) + end + complete -fa "(__fish_complete_pip)" -c {prog} + """, +} + + +class CompletionCommand(Command): + """A helper command to be used for command completion.""" + + ignore_require_venv = True + + def add_options(self): + # type: () -> None + self.cmd_opts.add_option( + '--bash', '-b', + action='store_const', + const='bash', + dest='shell', + help='Emit completion code for bash') + self.cmd_opts.add_option( + '--zsh', '-z', + action='store_const', + const='zsh', + dest='shell', + help='Emit completion code for zsh') + self.cmd_opts.add_option( + '--fish', '-f', + action='store_const', + const='fish', + dest='shell', + help='Emit completion code for fish') + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options, args): + # type: (Values, List[str]) -> int + """Prints the completion code of the given shell""" + shells = COMPLETION_SCRIPTS.keys() + shell_options = ['--' + shell for shell in sorted(shells)] + if options.shell in shells: + script = textwrap.dedent( + COMPLETION_SCRIPTS.get(options.shell, '').format( + prog=get_prog()) + ) + print(BASE_COMPLETION.format(script=script, shell=options.shell)) + return SUCCESS + else: + sys.stderr.write( + 'ERROR: You must pass {}\n' .format(' or '.join(shell_options)) + ) + return SUCCESS diff --git a/venv/Lib/site-packages/pip/_internal/commands/configuration.py b/venv/Lib/site-packages/pip/_internal/commands/configuration.py new file mode 100644 index 00000000..e13f7142 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/commands/configuration.py @@ -0,0 +1,280 @@ +import logging +import os +import subprocess +from optparse import Values +from typing import Any, List, Optional + +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.configuration import ( + Configuration, + Kind, + get_configuration_files, + kinds, +) +from pip._internal.exceptions import PipError +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import get_prog, write_output + +logger = logging.getLogger(__name__) + + +class ConfigurationCommand(Command): + """ + Manage local and global configuration. + + Subcommands: + + - list: List the active configuration (or from the file specified) + - edit: Edit the configuration file in an editor + - get: Get the value associated with name + - set: Set the name=value + - unset: Unset the value associated with name + - debug: List the configuration files and values defined under them + + If none of --user, --global and --site are passed, a virtual + environment configuration file is used if one is active and the file + exists. Otherwise, all modifications happen on the to the user file by + default. + """ + + ignore_require_venv = True + usage = """ + %prog [] list + %prog [] [--editor ] edit + + %prog [] get name + %prog [] set name value + %prog [] unset name + %prog [] debug + """ + + def add_options(self): + # type: () -> None + self.cmd_opts.add_option( + '--editor', + dest='editor', + action='store', + default=None, + help=( + 'Editor to use to edit the file. Uses VISUAL or EDITOR ' + 'environment variables if not provided.' + ) + ) + + self.cmd_opts.add_option( + '--global', + dest='global_file', + action='store_true', + default=False, + help='Use the system-wide configuration file only' + ) + + self.cmd_opts.add_option( + '--user', + dest='user_file', + action='store_true', + default=False, + help='Use the user configuration file only' + ) + + self.cmd_opts.add_option( + '--site', + dest='site_file', + action='store_true', + default=False, + help='Use the current environment configuration file only' + ) + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options, args): + # type: (Values, List[str]) -> int + handlers = { + "list": self.list_values, + "edit": self.open_in_editor, + "get": self.get_name, + "set": self.set_name_value, + "unset": self.unset_name, + "debug": self.list_config_values, + } + + # Determine action + if not args or args[0] not in handlers: + logger.error( + "Need an action (%s) to perform.", + ", ".join(sorted(handlers)), + ) + return ERROR + + action = args[0] + + # Determine which configuration files are to be loaded + # Depends on whether the command is modifying. + try: + load_only = self._determine_file( + options, need_value=(action in ["get", "set", "unset", "edit"]) + ) + except PipError as e: + logger.error(e.args[0]) + return ERROR + + # Load a new configuration + self.configuration = Configuration( + isolated=options.isolated_mode, load_only=load_only + ) + self.configuration.load() + + # Error handling happens here, not in the action-handlers. + try: + handlers[action](options, args[1:]) + except PipError as e: + logger.error(e.args[0]) + return ERROR + + return SUCCESS + + def _determine_file(self, options, need_value): + # type: (Values, bool) -> Optional[Kind] + file_options = [key for key, value in ( + (kinds.USER, options.user_file), + (kinds.GLOBAL, options.global_file), + (kinds.SITE, options.site_file), + ) if value] + + if not file_options: + if not need_value: + return None + # Default to user, unless there's a site file. + elif any( + os.path.exists(site_config_file) + for site_config_file in get_configuration_files()[kinds.SITE] + ): + return kinds.SITE + else: + return kinds.USER + elif len(file_options) == 1: + return file_options[0] + + raise PipError( + "Need exactly one file to operate upon " + "(--user, --site, --global) to perform." + ) + + def list_values(self, options, args): + # type: (Values, List[str]) -> None + self._get_n_args(args, "list", n=0) + + for key, value in sorted(self.configuration.items()): + write_output("%s=%r", key, value) + + def get_name(self, options, args): + # type: (Values, List[str]) -> None + key = self._get_n_args(args, "get [name]", n=1) + value = self.configuration.get_value(key) + + write_output("%s", value) + + def set_name_value(self, options, args): + # type: (Values, List[str]) -> None + key, value = self._get_n_args(args, "set [name] [value]", n=2) + self.configuration.set_value(key, value) + + self._save_configuration() + + def unset_name(self, options, args): + # type: (Values, List[str]) -> None + key = self._get_n_args(args, "unset [name]", n=1) + self.configuration.unset_value(key) + + self._save_configuration() + + def list_config_values(self, options, args): + # type: (Values, List[str]) -> None + """List config key-value pairs across different config files""" + self._get_n_args(args, "debug", n=0) + + self.print_env_var_values() + # Iterate over config files and print if they exist, and the + # key-value pairs present in them if they do + for variant, files in sorted(self.configuration.iter_config_files()): + write_output("%s:", variant) + for fname in files: + with indent_log(): + file_exists = os.path.exists(fname) + write_output("%s, exists: %r", + fname, file_exists) + if file_exists: + self.print_config_file_values(variant) + + def print_config_file_values(self, variant): + # type: (Kind) -> None + """Get key-value pairs from the file of a variant""" + for name, value in self.configuration.\ + get_values_in_config(variant).items(): + with indent_log(): + write_output("%s: %s", name, value) + + def print_env_var_values(self): + # type: () -> None + """Get key-values pairs present as environment variables""" + write_output("%s:", 'env_var') + with indent_log(): + for key, value in sorted(self.configuration.get_environ_vars()): + env_var = f'PIP_{key.upper()}' + write_output("%s=%r", env_var, value) + + def open_in_editor(self, options, args): + # type: (Values, List[str]) -> None + editor = self._determine_editor(options) + + fname = self.configuration.get_file_to_edit() + if fname is None: + raise PipError("Could not determine appropriate file.") + + try: + subprocess.check_call([editor, fname]) + except subprocess.CalledProcessError as e: + raise PipError( + "Editor Subprocess exited with exit code {}" + .format(e.returncode) + ) + + def _get_n_args(self, args, example, n): + # type: (List[str], str, int) -> Any + """Helper to make sure the command got the right number of arguments + """ + if len(args) != n: + msg = ( + 'Got unexpected number of arguments, expected {}. ' + '(example: "{} config {}")' + ).format(n, get_prog(), example) + raise PipError(msg) + + if n == 1: + return args[0] + else: + return args + + def _save_configuration(self): + # type: () -> None + # We successfully ran a modifying command. Need to save the + # configuration. + try: + self.configuration.save() + except Exception: + logger.exception( + "Unable to save configuration. Please report this as a bug." + ) + raise PipError("Internal Error.") + + def _determine_editor(self, options): + # type: (Values) -> str + if options.editor is not None: + return options.editor + elif "VISUAL" in os.environ: + return os.environ["VISUAL"] + elif "EDITOR" in os.environ: + return os.environ["EDITOR"] + else: + raise PipError("Could not determine editor to use.") diff --git a/venv/Lib/site-packages/pip/_internal/commands/debug.py b/venv/Lib/site-packages/pip/_internal/commands/debug.py new file mode 100644 index 00000000..ead5119a --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/commands/debug.py @@ -0,0 +1,215 @@ +import locale +import logging +import os +import sys +from optparse import Values +from types import ModuleType +from typing import Any, Dict, List, Optional + +import pip._vendor +from pip._vendor.certifi import where +from pip._vendor.packaging.version import parse as parse_version + +from pip import __file__ as pip_location +from pip._internal.cli import cmdoptions +from pip._internal.cli.base_command import Command +from pip._internal.cli.cmdoptions import make_target_python +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.configuration import Configuration +from pip._internal.metadata import get_environment +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import get_pip_version + +logger = logging.getLogger(__name__) + + +def show_value(name, value): + # type: (str, Any) -> None + logger.info('%s: %s', name, value) + + +def show_sys_implementation(): + # type: () -> None + logger.info('sys.implementation:') + implementation_name = sys.implementation.name + with indent_log(): + show_value('name', implementation_name) + + +def create_vendor_txt_map(): + # type: () -> Dict[str, str] + vendor_txt_path = os.path.join( + os.path.dirname(pip_location), + '_vendor', + 'vendor.txt' + ) + + with open(vendor_txt_path) as f: + # Purge non version specifying lines. + # Also, remove any space prefix or suffixes (including comments). + lines = [line.strip().split(' ', 1)[0] + for line in f.readlines() if '==' in line] + + # Transform into "module" -> version dict. + return dict(line.split('==', 1) for line in lines) # type: ignore + + +def get_module_from_module_name(module_name): + # type: (str) -> ModuleType + # Module name can be uppercase in vendor.txt for some reason... + module_name = module_name.lower() + # PATCH: setuptools is actually only pkg_resources. + if module_name == 'setuptools': + module_name = 'pkg_resources' + + __import__( + f'pip._vendor.{module_name}', + globals(), + locals(), + level=0 + ) + return getattr(pip._vendor, module_name) + + +def get_vendor_version_from_module(module_name): + # type: (str) -> Optional[str] + module = get_module_from_module_name(module_name) + version = getattr(module, '__version__', None) + + if not version: + # Try to find version in debundled module info. + env = get_environment([os.path.dirname(module.__file__)]) + dist = env.get_distribution(module_name) + if dist: + version = str(dist.version) + + return version + + +def show_actual_vendor_versions(vendor_txt_versions): + # type: (Dict[str, str]) -> None + """Log the actual version and print extra info if there is + a conflict or if the actual version could not be imported. + """ + for module_name, expected_version in vendor_txt_versions.items(): + extra_message = '' + actual_version = get_vendor_version_from_module(module_name) + if not actual_version: + extra_message = ' (Unable to locate actual module version, using'\ + ' vendor.txt specified version)' + actual_version = expected_version + elif parse_version(actual_version) != parse_version(expected_version): + extra_message = ' (CONFLICT: vendor.txt suggests version should'\ + ' be {})'.format(expected_version) + logger.info('%s==%s%s', module_name, actual_version, extra_message) + + +def show_vendor_versions(): + # type: () -> None + logger.info('vendored library versions:') + + vendor_txt_versions = create_vendor_txt_map() + with indent_log(): + show_actual_vendor_versions(vendor_txt_versions) + + +def show_tags(options): + # type: (Values) -> None + tag_limit = 10 + + target_python = make_target_python(options) + tags = target_python.get_tags() + + # Display the target options that were explicitly provided. + formatted_target = target_python.format_given() + suffix = '' + if formatted_target: + suffix = f' (target: {formatted_target})' + + msg = 'Compatible tags: {}{}'.format(len(tags), suffix) + logger.info(msg) + + if options.verbose < 1 and len(tags) > tag_limit: + tags_limited = True + tags = tags[:tag_limit] + else: + tags_limited = False + + with indent_log(): + for tag in tags: + logger.info(str(tag)) + + if tags_limited: + msg = ( + '...\n' + '[First {tag_limit} tags shown. Pass --verbose to show all.]' + ).format(tag_limit=tag_limit) + logger.info(msg) + + +def ca_bundle_info(config): + # type: (Configuration) -> str + levels = set() + for key, _ in config.items(): + levels.add(key.split('.')[0]) + + if not levels: + return "Not specified" + + levels_that_override_global = ['install', 'wheel', 'download'] + global_overriding_level = [ + level for level in levels if level in levels_that_override_global + ] + if not global_overriding_level: + return 'global' + + if 'global' in levels: + levels.remove('global') + return ", ".join(levels) + + +class DebugCommand(Command): + """ + Display debug information. + """ + + usage = """ + %prog """ + ignore_require_venv = True + + def add_options(self): + # type: () -> None + cmdoptions.add_target_python_options(self.cmd_opts) + self.parser.insert_option_group(0, self.cmd_opts) + self.parser.config.load() + + def run(self, options, args): + # type: (Values, List[str]) -> int + logger.warning( + "This command is only meant for debugging. " + "Do not use this with automation for parsing and getting these " + "details, since the output and options of this command may " + "change without notice." + ) + show_value('pip version', get_pip_version()) + show_value('sys.version', sys.version) + show_value('sys.executable', sys.executable) + show_value('sys.getdefaultencoding', sys.getdefaultencoding()) + show_value('sys.getfilesystemencoding', sys.getfilesystemencoding()) + show_value( + 'locale.getpreferredencoding', locale.getpreferredencoding(), + ) + show_value('sys.platform', sys.platform) + show_sys_implementation() + + show_value("'cert' config value", ca_bundle_info(self.parser.config)) + show_value("REQUESTS_CA_BUNDLE", os.environ.get('REQUESTS_CA_BUNDLE')) + show_value("CURL_CA_BUNDLE", os.environ.get('CURL_CA_BUNDLE')) + show_value("pip._vendor.certifi.where()", where()) + show_value("pip._vendor.DEBUNDLED", pip._vendor.DEBUNDLED) + + show_vendor_versions() + + show_tags(options) + + return SUCCESS diff --git a/venv/Lib/site-packages/pip/_internal/commands/download.py b/venv/Lib/site-packages/pip/_internal/commands/download.py new file mode 100644 index 00000000..19f8d6c0 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/commands/download.py @@ -0,0 +1,141 @@ +import logging +import os +from optparse import Values +from typing import List + +from pip._internal.cli import cmdoptions +from pip._internal.cli.cmdoptions import make_target_python +from pip._internal.cli.req_command import RequirementCommand, with_cleanup +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.req.req_tracker import get_requirement_tracker +from pip._internal.utils.misc import ensure_dir, normalize_path, write_output +from pip._internal.utils.temp_dir import TempDirectory + +logger = logging.getLogger(__name__) + + +class DownloadCommand(RequirementCommand): + """ + Download packages from: + + - PyPI (and other indexes) using requirement specifiers. + - VCS project urls. + - Local project directories. + - Local or remote source archives. + + pip also supports downloading from "requirements files", which provide + an easy way to specify a whole environment to be downloaded. + """ + + usage = """ + %prog [options] [package-index-options] ... + %prog [options] -r [package-index-options] ... + %prog [options] ... + %prog [options] ... + %prog [options] ...""" + + def add_options(self): + # type: () -> None + self.cmd_opts.add_option(cmdoptions.constraints()) + self.cmd_opts.add_option(cmdoptions.requirements()) + self.cmd_opts.add_option(cmdoptions.build_dir()) + self.cmd_opts.add_option(cmdoptions.no_deps()) + self.cmd_opts.add_option(cmdoptions.global_options()) + self.cmd_opts.add_option(cmdoptions.no_binary()) + self.cmd_opts.add_option(cmdoptions.only_binary()) + self.cmd_opts.add_option(cmdoptions.prefer_binary()) + self.cmd_opts.add_option(cmdoptions.src()) + self.cmd_opts.add_option(cmdoptions.pre()) + self.cmd_opts.add_option(cmdoptions.require_hashes()) + self.cmd_opts.add_option(cmdoptions.progress_bar()) + self.cmd_opts.add_option(cmdoptions.no_build_isolation()) + self.cmd_opts.add_option(cmdoptions.use_pep517()) + self.cmd_opts.add_option(cmdoptions.no_use_pep517()) + self.cmd_opts.add_option(cmdoptions.ignore_requires_python()) + + self.cmd_opts.add_option( + '-d', '--dest', '--destination-dir', '--destination-directory', + dest='download_dir', + metavar='dir', + default=os.curdir, + help=("Download packages into ."), + ) + + cmdoptions.add_target_python_options(self.cmd_opts) + + index_opts = cmdoptions.make_option_group( + cmdoptions.index_group, + self.parser, + ) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, self.cmd_opts) + + @with_cleanup + def run(self, options, args): + # type: (Values, List[str]) -> int + + options.ignore_installed = True + # editable doesn't really make sense for `pip download`, but the bowels + # of the RequirementSet code require that property. + options.editables = [] + + cmdoptions.check_dist_restriction(options) + + options.download_dir = normalize_path(options.download_dir) + ensure_dir(options.download_dir) + + session = self.get_default_session(options) + + target_python = make_target_python(options) + finder = self._build_package_finder( + options=options, + session=session, + target_python=target_python, + ignore_requires_python=options.ignore_requires_python, + ) + + req_tracker = self.enter_context(get_requirement_tracker()) + + directory = TempDirectory( + delete=not options.no_clean, + kind="download", + globally_managed=True, + ) + + reqs = self.get_requirements(args, options, finder, session) + + preparer = self.make_requirement_preparer( + temp_build_dir=directory, + options=options, + req_tracker=req_tracker, + session=session, + finder=finder, + download_dir=options.download_dir, + use_user_site=False, + ) + + resolver = self.make_resolver( + preparer=preparer, + finder=finder, + options=options, + ignore_requires_python=options.ignore_requires_python, + py_version_info=options.python_version, + ) + + self.trace_basic_info(finder) + + requirement_set = resolver.resolve( + reqs, check_supported_wheels=True + ) + + downloaded = [] # type: List[str] + for req in requirement_set.requirements.values(): + if req.satisfied_by is None: + assert req.name is not None + preparer.save_linked_requirement(req) + downloaded.append(req.name) + if downloaded: + write_output('Successfully downloaded %s', ' '.join(downloaded)) + + return SUCCESS diff --git a/venv/Lib/site-packages/pip/_internal/commands/freeze.py b/venv/Lib/site-packages/pip/_internal/commands/freeze.py new file mode 100644 index 00000000..430d1018 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/commands/freeze.py @@ -0,0 +1,104 @@ +import sys +from optparse import Values +from typing import List + +from pip._internal.cli import cmdoptions +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.operations.freeze import freeze +from pip._internal.utils.compat import stdlib_pkgs +from pip._internal.utils.deprecation import deprecated + +DEV_PKGS = {'pip', 'setuptools', 'distribute', 'wheel'} + + +class FreezeCommand(Command): + """ + Output installed packages in requirements format. + + packages are listed in a case-insensitive sorted order. + """ + + usage = """ + %prog [options]""" + log_streams = ("ext://sys.stderr", "ext://sys.stderr") + + def add_options(self): + # type: () -> None + self.cmd_opts.add_option( + '-r', '--requirement', + dest='requirements', + action='append', + default=[], + metavar='file', + help="Use the order in the given requirements file and its " + "comments when generating output. This option can be " + "used multiple times.") + self.cmd_opts.add_option( + '-f', '--find-links', + dest='find_links', + action='append', + default=[], + metavar='URL', + help='URL for finding packages, which will be added to the ' + 'output.') + self.cmd_opts.add_option( + '-l', '--local', + dest='local', + action='store_true', + default=False, + help='If in a virtualenv that has global access, do not output ' + 'globally-installed packages.') + self.cmd_opts.add_option( + '--user', + dest='user', + action='store_true', + default=False, + help='Only output packages installed in user-site.') + self.cmd_opts.add_option(cmdoptions.list_path()) + self.cmd_opts.add_option( + '--all', + dest='freeze_all', + action='store_true', + help='Do not skip these packages in the output:' + ' {}'.format(', '.join(DEV_PKGS))) + self.cmd_opts.add_option( + '--exclude-editable', + dest='exclude_editable', + action='store_true', + help='Exclude editable package from output.') + self.cmd_opts.add_option(cmdoptions.list_exclude()) + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options, args): + # type: (Values, List[str]) -> int + skip = set(stdlib_pkgs) + if not options.freeze_all: + skip.update(DEV_PKGS) + + if options.excludes: + skip.update(options.excludes) + + cmdoptions.check_list_path_option(options) + + if options.find_links: + deprecated( + "--find-links option in pip freeze is deprecated.", + replacement=None, + gone_in="21.2", + issue=9069, + ) + + for line in freeze( + requirement=options.requirements, + find_links=options.find_links, + local_only=options.local, + user_only=options.user, + paths=options.path, + isolated=options.isolated_mode, + skip=skip, + exclude_editable=options.exclude_editable, + ): + sys.stdout.write(line + '\n') + return SUCCESS diff --git a/venv/Lib/site-packages/pip/_internal/commands/hash.py b/venv/Lib/site-packages/pip/_internal/commands/hash.py new file mode 100644 index 00000000..bca48dcc --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/commands/hash.py @@ -0,0 +1,58 @@ +import hashlib +import logging +import sys +from optparse import Values +from typing import List + +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES +from pip._internal.utils.misc import read_chunks, write_output + +logger = logging.getLogger(__name__) + + +class HashCommand(Command): + """ + Compute a hash of a local package archive. + + These can be used with --hash in a requirements file to do repeatable + installs. + """ + + usage = '%prog [options] ...' + ignore_require_venv = True + + def add_options(self): + # type: () -> None + self.cmd_opts.add_option( + '-a', '--algorithm', + dest='algorithm', + choices=STRONG_HASHES, + action='store', + default=FAVORITE_HASH, + help='The hash algorithm to use: one of {}'.format( + ', '.join(STRONG_HASHES))) + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options, args): + # type: (Values, List[str]) -> int + if not args: + self.parser.print_usage(sys.stderr) + return ERROR + + algorithm = options.algorithm + for path in args: + write_output('%s:\n--hash=%s:%s', + path, algorithm, _hash_of_file(path, algorithm)) + return SUCCESS + + +def _hash_of_file(path, algorithm): + # type: (str, str) -> str + """Return the hash digest of a file.""" + with open(path, 'rb') as archive: + hash = hashlib.new(algorithm) + for chunk in read_chunks(archive): + hash.update(chunk) + return hash.hexdigest() diff --git a/venv/Lib/site-packages/pip/_internal/commands/help.py b/venv/Lib/site-packages/pip/_internal/commands/help.py new file mode 100644 index 00000000..79d0eb49 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/commands/help.py @@ -0,0 +1,42 @@ +from optparse import Values +from typing import List + +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.exceptions import CommandError + + +class HelpCommand(Command): + """Show help for commands""" + + usage = """ + %prog """ + ignore_require_venv = True + + def run(self, options, args): + # type: (Values, List[str]) -> int + from pip._internal.commands import ( + commands_dict, + create_command, + get_similar_commands, + ) + + try: + # 'pip help' with no args is handled by pip.__init__.parseopt() + cmd_name = args[0] # the command we need help for + except IndexError: + return SUCCESS + + if cmd_name not in commands_dict: + guess = get_similar_commands(cmd_name) + + msg = [f'unknown command "{cmd_name}"'] + if guess: + msg.append(f'maybe you meant "{guess}"') + + raise CommandError(' - '.join(msg)) + + command = create_command(cmd_name) + command.parser.print_help() + + return SUCCESS diff --git a/venv/Lib/site-packages/pip/_internal/commands/install.py b/venv/Lib/site-packages/pip/_internal/commands/install.py new file mode 100644 index 00000000..6932f5a6 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/commands/install.py @@ -0,0 +1,740 @@ +import errno +import logging +import operator +import os +import shutil +import site +from optparse import SUPPRESS_HELP, Values +from typing import Iterable, List, Optional + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.cache import WheelCache +from pip._internal.cli import cmdoptions +from pip._internal.cli.cmdoptions import make_target_python +from pip._internal.cli.req_command import ( + RequirementCommand, + warn_if_run_as_root, + with_cleanup, +) +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.exceptions import CommandError, InstallationError +from pip._internal.locations import get_scheme +from pip._internal.metadata import get_environment +from pip._internal.models.format_control import FormatControl +from pip._internal.operations.check import ConflictDetails, check_install_conflicts +from pip._internal.req import install_given_reqs +from pip._internal.req.req_install import InstallRequirement +from pip._internal.req.req_tracker import get_requirement_tracker +from pip._internal.utils.distutils_args import parse_distutils_args +from pip._internal.utils.filesystem import test_writable_dir +from pip._internal.utils.misc import ( + ensure_dir, + get_pip_version, + protect_pip_from_modification_on_windows, + write_output, +) +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.virtualenv import ( + running_under_virtualenv, + virtualenv_no_global, +) +from pip._internal.wheel_builder import ( + BinaryAllowedPredicate, + build, + should_build_for_install_command, +) + +logger = logging.getLogger(__name__) + + +def get_check_binary_allowed(format_control): + # type: (FormatControl) -> BinaryAllowedPredicate + def check_binary_allowed(req): + # type: (InstallRequirement) -> bool + canonical_name = canonicalize_name(req.name or "") + allowed_formats = format_control.get_allowed_formats(canonical_name) + return "binary" in allowed_formats + + return check_binary_allowed + + +class InstallCommand(RequirementCommand): + """ + Install packages from: + + - PyPI (and other indexes) using requirement specifiers. + - VCS project urls. + - Local project directories. + - Local or remote source archives. + + pip also supports installing from "requirements files", which provide + an easy way to specify a whole environment to be installed. + """ + + usage = """ + %prog [options] [package-index-options] ... + %prog [options] -r [package-index-options] ... + %prog [options] [-e] ... + %prog [options] [-e] ... + %prog [options] ...""" + + def add_options(self): + # type: () -> None + self.cmd_opts.add_option(cmdoptions.requirements()) + self.cmd_opts.add_option(cmdoptions.constraints()) + self.cmd_opts.add_option(cmdoptions.no_deps()) + self.cmd_opts.add_option(cmdoptions.pre()) + + self.cmd_opts.add_option(cmdoptions.editable()) + self.cmd_opts.add_option( + '-t', '--target', + dest='target_dir', + metavar='dir', + default=None, + help='Install packages into . ' + 'By default this will not replace existing files/folders in ' + '. Use --upgrade to replace existing packages in ' + 'with new versions.' + ) + cmdoptions.add_target_python_options(self.cmd_opts) + + self.cmd_opts.add_option( + '--user', + dest='use_user_site', + action='store_true', + help="Install to the Python user install directory for your " + "platform. Typically ~/.local/, or %APPDATA%\\Python on " + "Windows. (See the Python documentation for site.USER_BASE " + "for full details.)") + self.cmd_opts.add_option( + '--no-user', + dest='use_user_site', + action='store_false', + help=SUPPRESS_HELP) + self.cmd_opts.add_option( + '--root', + dest='root_path', + metavar='dir', + default=None, + help="Install everything relative to this alternate root " + "directory.") + self.cmd_opts.add_option( + '--prefix', + dest='prefix_path', + metavar='dir', + default=None, + help="Installation prefix where lib, bin and other top-level " + "folders are placed") + + self.cmd_opts.add_option(cmdoptions.build_dir()) + + self.cmd_opts.add_option(cmdoptions.src()) + + self.cmd_opts.add_option( + '-U', '--upgrade', + dest='upgrade', + action='store_true', + help='Upgrade all specified packages to the newest available ' + 'version. The handling of dependencies depends on the ' + 'upgrade-strategy used.' + ) + + self.cmd_opts.add_option( + '--upgrade-strategy', + dest='upgrade_strategy', + default='only-if-needed', + choices=['only-if-needed', 'eager'], + help='Determines how dependency upgrading should be handled ' + '[default: %default]. ' + '"eager" - dependencies are upgraded regardless of ' + 'whether the currently installed version satisfies the ' + 'requirements of the upgraded package(s). ' + '"only-if-needed" - are upgraded only when they do not ' + 'satisfy the requirements of the upgraded package(s).' + ) + + self.cmd_opts.add_option( + '--force-reinstall', + dest='force_reinstall', + action='store_true', + help='Reinstall all packages even if they are already ' + 'up-to-date.') + + self.cmd_opts.add_option( + '-I', '--ignore-installed', + dest='ignore_installed', + action='store_true', + help='Ignore the installed packages, overwriting them. ' + 'This can break your system if the existing package ' + 'is of a different version or was installed ' + 'with a different package manager!' + ) + + self.cmd_opts.add_option(cmdoptions.ignore_requires_python()) + self.cmd_opts.add_option(cmdoptions.no_build_isolation()) + self.cmd_opts.add_option(cmdoptions.use_pep517()) + self.cmd_opts.add_option(cmdoptions.no_use_pep517()) + + self.cmd_opts.add_option(cmdoptions.install_options()) + self.cmd_opts.add_option(cmdoptions.global_options()) + + self.cmd_opts.add_option( + "--compile", + action="store_true", + dest="compile", + default=True, + help="Compile Python source files to bytecode", + ) + + self.cmd_opts.add_option( + "--no-compile", + action="store_false", + dest="compile", + help="Do not compile Python source files to bytecode", + ) + + self.cmd_opts.add_option( + "--no-warn-script-location", + action="store_false", + dest="warn_script_location", + default=True, + help="Do not warn when installing scripts outside PATH", + ) + self.cmd_opts.add_option( + "--no-warn-conflicts", + action="store_false", + dest="warn_about_conflicts", + default=True, + help="Do not warn about broken dependencies", + ) + + self.cmd_opts.add_option(cmdoptions.no_binary()) + self.cmd_opts.add_option(cmdoptions.only_binary()) + self.cmd_opts.add_option(cmdoptions.prefer_binary()) + self.cmd_opts.add_option(cmdoptions.require_hashes()) + self.cmd_opts.add_option(cmdoptions.progress_bar()) + + index_opts = cmdoptions.make_option_group( + cmdoptions.index_group, + self.parser, + ) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, self.cmd_opts) + + @with_cleanup + def run(self, options, args): + # type: (Values, List[str]) -> int + if options.use_user_site and options.target_dir is not None: + raise CommandError("Can not combine '--user' and '--target'") + + cmdoptions.check_install_build_global(options) + upgrade_strategy = "to-satisfy-only" + if options.upgrade: + upgrade_strategy = options.upgrade_strategy + + cmdoptions.check_dist_restriction(options, check_target=True) + + install_options = options.install_options or [] + + logger.debug("Using %s", get_pip_version()) + options.use_user_site = decide_user_install( + options.use_user_site, + prefix_path=options.prefix_path, + target_dir=options.target_dir, + root_path=options.root_path, + isolated_mode=options.isolated_mode, + ) + + target_temp_dir = None # type: Optional[TempDirectory] + target_temp_dir_path = None # type: Optional[str] + if options.target_dir: + options.ignore_installed = True + options.target_dir = os.path.abspath(options.target_dir) + if (os.path.exists(options.target_dir) and not + os.path.isdir(options.target_dir)): + raise CommandError( + "Target path exists but is not a directory, will not " + "continue." + ) + + # Create a target directory for using with the target option + target_temp_dir = TempDirectory(kind="target") + target_temp_dir_path = target_temp_dir.path + self.enter_context(target_temp_dir) + + global_options = options.global_options or [] + + session = self.get_default_session(options) + + target_python = make_target_python(options) + finder = self._build_package_finder( + options=options, + session=session, + target_python=target_python, + ignore_requires_python=options.ignore_requires_python, + ) + wheel_cache = WheelCache(options.cache_dir, options.format_control) + + req_tracker = self.enter_context(get_requirement_tracker()) + + directory = TempDirectory( + delete=not options.no_clean, + kind="install", + globally_managed=True, + ) + + try: + reqs = self.get_requirements(args, options, finder, session) + + reject_location_related_install_options( + reqs, options.install_options + ) + + preparer = self.make_requirement_preparer( + temp_build_dir=directory, + options=options, + req_tracker=req_tracker, + session=session, + finder=finder, + use_user_site=options.use_user_site, + ) + resolver = self.make_resolver( + preparer=preparer, + finder=finder, + options=options, + wheel_cache=wheel_cache, + use_user_site=options.use_user_site, + ignore_installed=options.ignore_installed, + ignore_requires_python=options.ignore_requires_python, + force_reinstall=options.force_reinstall, + upgrade_strategy=upgrade_strategy, + use_pep517=options.use_pep517, + ) + + self.trace_basic_info(finder) + + requirement_set = resolver.resolve( + reqs, check_supported_wheels=not options.target_dir + ) + + try: + pip_req = requirement_set.get_requirement("pip") + except KeyError: + modifying_pip = False + else: + # If we're not replacing an already installed pip, + # we're not modifying it. + modifying_pip = pip_req.satisfied_by is None + protect_pip_from_modification_on_windows( + modifying_pip=modifying_pip + ) + + check_binary_allowed = get_check_binary_allowed( + finder.format_control + ) + + reqs_to_build = [ + r for r in requirement_set.requirements.values() + if should_build_for_install_command( + r, check_binary_allowed + ) + ] + + _, build_failures = build( + reqs_to_build, + wheel_cache=wheel_cache, + verify=True, + build_options=[], + global_options=[], + ) + + # If we're using PEP 517, we cannot do a direct install + # so we fail here. + pep517_build_failure_names = [ + r.name # type: ignore + for r in build_failures if r.use_pep517 + ] # type: List[str] + if pep517_build_failure_names: + raise InstallationError( + "Could not build wheels for {} which use" + " PEP 517 and cannot be installed directly".format( + ", ".join(pep517_build_failure_names) + ) + ) + + # For now, we just warn about failures building legacy + # requirements, as we'll fall through to a direct + # install for those. + for r in build_failures: + if not r.use_pep517: + r.legacy_install_reason = 8368 + + to_install = resolver.get_installation_order( + requirement_set + ) + + # Check for conflicts in the package set we're installing. + conflicts = None # type: Optional[ConflictDetails] + should_warn_about_conflicts = ( + not options.ignore_dependencies and + options.warn_about_conflicts + ) + if should_warn_about_conflicts: + conflicts = self._determine_conflicts(to_install) + + # Don't warn about script install locations if + # --target has been specified + warn_script_location = options.warn_script_location + if options.target_dir: + warn_script_location = False + + installed = install_given_reqs( + to_install, + install_options, + global_options, + root=options.root_path, + home=target_temp_dir_path, + prefix=options.prefix_path, + warn_script_location=warn_script_location, + use_user_site=options.use_user_site, + pycompile=options.compile, + ) + + lib_locations = get_lib_location_guesses( + user=options.use_user_site, + home=target_temp_dir_path, + root=options.root_path, + prefix=options.prefix_path, + isolated=options.isolated_mode, + ) + env = get_environment(lib_locations) + + installed.sort(key=operator.attrgetter('name')) + items = [] + for result in installed: + item = result.name + try: + installed_dist = env.get_distribution(item) + if installed_dist is not None: + item = f"{item}-{installed_dist.version}" + except Exception: + pass + items.append(item) + + if conflicts is not None: + self._warn_about_conflicts( + conflicts, + resolver_variant=self.determine_resolver_variant(options), + ) + + installed_desc = ' '.join(items) + if installed_desc: + write_output( + 'Successfully installed %s', installed_desc, + ) + except OSError as error: + show_traceback = (self.verbosity >= 1) + + message = create_os_error_message( + error, show_traceback, options.use_user_site, + ) + logger.error(message, exc_info=show_traceback) # noqa + + return ERROR + + if options.target_dir: + assert target_temp_dir + self._handle_target_dir( + options.target_dir, target_temp_dir, options.upgrade + ) + + warn_if_run_as_root() + return SUCCESS + + def _handle_target_dir(self, target_dir, target_temp_dir, upgrade): + # type: (str, TempDirectory, bool) -> None + ensure_dir(target_dir) + + # Checking both purelib and platlib directories for installed + # packages to be moved to target directory + lib_dir_list = [] + + # Checking both purelib and platlib directories for installed + # packages to be moved to target directory + scheme = get_scheme('', home=target_temp_dir.path) + purelib_dir = scheme.purelib + platlib_dir = scheme.platlib + data_dir = scheme.data + + if os.path.exists(purelib_dir): + lib_dir_list.append(purelib_dir) + if os.path.exists(platlib_dir) and platlib_dir != purelib_dir: + lib_dir_list.append(platlib_dir) + if os.path.exists(data_dir): + lib_dir_list.append(data_dir) + + for lib_dir in lib_dir_list: + for item in os.listdir(lib_dir): + if lib_dir == data_dir: + ddir = os.path.join(data_dir, item) + if any(s.startswith(ddir) for s in lib_dir_list[:-1]): + continue + target_item_dir = os.path.join(target_dir, item) + if os.path.exists(target_item_dir): + if not upgrade: + logger.warning( + 'Target directory %s already exists. Specify ' + '--upgrade to force replacement.', + target_item_dir + ) + continue + if os.path.islink(target_item_dir): + logger.warning( + 'Target directory %s already exists and is ' + 'a link. pip will not automatically replace ' + 'links, please remove if replacement is ' + 'desired.', + target_item_dir + ) + continue + if os.path.isdir(target_item_dir): + shutil.rmtree(target_item_dir) + else: + os.remove(target_item_dir) + + shutil.move( + os.path.join(lib_dir, item), + target_item_dir + ) + + def _determine_conflicts(self, to_install): + # type: (List[InstallRequirement]) -> Optional[ConflictDetails] + try: + return check_install_conflicts(to_install) + except Exception: + logger.exception( + "Error while checking for conflicts. Please file an issue on " + "pip's issue tracker: https://github.com/pypa/pip/issues/new" + ) + return None + + def _warn_about_conflicts(self, conflict_details, resolver_variant): + # type: (ConflictDetails, str) -> None + package_set, (missing, conflicting) = conflict_details + if not missing and not conflicting: + return + + parts = [] # type: List[str] + if resolver_variant == "legacy": + parts.append( + "pip's legacy dependency resolver does not consider dependency " + "conflicts when selecting packages. This behaviour is the " + "source of the following dependency conflicts." + ) + else: + assert resolver_variant == "2020-resolver" + parts.append( + "pip's dependency resolver does not currently take into account " + "all the packages that are installed. This behaviour is the " + "source of the following dependency conflicts." + ) + + # NOTE: There is some duplication here, with commands/check.py + for project_name in missing: + version = package_set[project_name][0] + for dependency in missing[project_name]: + message = ( + "{name} {version} requires {requirement}, " + "which is not installed." + ).format( + name=project_name, + version=version, + requirement=dependency[1], + ) + parts.append(message) + + for project_name in conflicting: + version = package_set[project_name][0] + for dep_name, dep_version, req in conflicting[project_name]: + message = ( + "{name} {version} requires {requirement}, but {you} have " + "{dep_name} {dep_version} which is incompatible." + ).format( + name=project_name, + version=version, + requirement=req, + dep_name=dep_name, + dep_version=dep_version, + you=("you" if resolver_variant == "2020-resolver" else "you'll") + ) + parts.append(message) + + logger.critical("\n".join(parts)) + + +def get_lib_location_guesses( + user=False, # type: bool + home=None, # type: Optional[str] + root=None, # type: Optional[str] + isolated=False, # type: bool + prefix=None # type: Optional[str] +): + # type:(...) -> List[str] + scheme = get_scheme( + '', + user=user, + home=home, + root=root, + isolated=isolated, + prefix=prefix, + ) + return [scheme.purelib, scheme.platlib] + + +def site_packages_writable(root, isolated): + # type: (Optional[str], bool) -> bool + return all( + test_writable_dir(d) for d in set( + get_lib_location_guesses(root=root, isolated=isolated)) + ) + + +def decide_user_install( + use_user_site, # type: Optional[bool] + prefix_path=None, # type: Optional[str] + target_dir=None, # type: Optional[str] + root_path=None, # type: Optional[str] + isolated_mode=False, # type: bool +): + # type: (...) -> bool + """Determine whether to do a user install based on the input options. + + If use_user_site is False, no additional checks are done. + If use_user_site is True, it is checked for compatibility with other + options. + If use_user_site is None, the default behaviour depends on the environment, + which is provided by the other arguments. + """ + # In some cases (config from tox), use_user_site can be set to an integer + # rather than a bool, which 'use_user_site is False' wouldn't catch. + if (use_user_site is not None) and (not use_user_site): + logger.debug("Non-user install by explicit request") + return False + + if use_user_site: + if prefix_path: + raise CommandError( + "Can not combine '--user' and '--prefix' as they imply " + "different installation locations" + ) + if virtualenv_no_global(): + raise InstallationError( + "Can not perform a '--user' install. User site-packages " + "are not visible in this virtualenv." + ) + logger.debug("User install by explicit request") + return True + + # If we are here, user installs have not been explicitly requested/avoided + assert use_user_site is None + + # user install incompatible with --prefix/--target + if prefix_path or target_dir: + logger.debug("Non-user install due to --prefix or --target option") + return False + + # If user installs are not enabled, choose a non-user install + if not site.ENABLE_USER_SITE: + logger.debug("Non-user install because user site-packages disabled") + return False + + # If we have permission for a non-user install, do that, + # otherwise do a user install. + if site_packages_writable(root=root_path, isolated=isolated_mode): + logger.debug("Non-user install because site-packages writeable") + return False + + logger.info("Defaulting to user installation because normal site-packages " + "is not writeable") + return True + + +def reject_location_related_install_options(requirements, options): + # type: (List[InstallRequirement], Optional[List[str]]) -> None + """If any location-changing --install-option arguments were passed for + requirements or on the command-line, then show a deprecation warning. + """ + def format_options(option_names): + # type: (Iterable[str]) -> List[str] + return ["--{}".format(name.replace("_", "-")) for name in option_names] + + offenders = [] + + for requirement in requirements: + install_options = requirement.install_options + location_options = parse_distutils_args(install_options) + if location_options: + offenders.append( + "{!r} from {}".format( + format_options(location_options.keys()), requirement + ) + ) + + if options: + location_options = parse_distutils_args(options) + if location_options: + offenders.append( + "{!r} from command line".format( + format_options(location_options.keys()) + ) + ) + + if not offenders: + return + + raise CommandError( + "Location-changing options found in --install-option: {}." + " This is unsupported, use pip-level options like --user," + " --prefix, --root, and --target instead.".format( + "; ".join(offenders) + ) + ) + + +def create_os_error_message(error, show_traceback, using_user_site): + # type: (OSError, bool, bool) -> str + """Format an error message for an OSError + + It may occur anytime during the execution of the install command. + """ + parts = [] + + # Mention the error if we are not going to show a traceback + parts.append("Could not install packages due to an OSError") + if not show_traceback: + parts.append(": ") + parts.append(str(error)) + else: + parts.append(".") + + # Spilt the error indication from a helper message (if any) + parts[-1] += "\n" + + # Suggest useful actions to the user: + # (1) using user site-packages or (2) verifying the permissions + if error.errno == errno.EACCES: + user_option_part = "Consider using the `--user` option" + permissions_part = "Check the permissions" + + if not running_under_virtualenv() and not using_user_site: + parts.extend([ + user_option_part, " or ", + permissions_part.lower(), + ]) + else: + parts.append(permissions_part) + parts.append(".\n") + + return "".join(parts).strip() + "\n" diff --git a/venv/Lib/site-packages/pip/_internal/commands/list.py b/venv/Lib/site-packages/pip/_internal/commands/list.py new file mode 100644 index 00000000..dcf94326 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/commands/list.py @@ -0,0 +1,319 @@ +import json +import logging +from optparse import Values +from typing import Iterator, List, Set, Tuple + +from pip._vendor.pkg_resources import Distribution + +from pip._internal.cli import cmdoptions +from pip._internal.cli.req_command import IndexGroupCommand +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.exceptions import CommandError +from pip._internal.index.collector import LinkCollector +from pip._internal.index.package_finder import PackageFinder +from pip._internal.models.selection_prefs import SelectionPreferences +from pip._internal.network.session import PipSession +from pip._internal.utils.compat import stdlib_pkgs +from pip._internal.utils.misc import ( + dist_is_editable, + get_installed_distributions, + tabulate, + write_output, +) +from pip._internal.utils.packaging import get_installer +from pip._internal.utils.parallel import map_multithread + +logger = logging.getLogger(__name__) + + +class ListCommand(IndexGroupCommand): + """ + List installed packages, including editables. + + Packages are listed in a case-insensitive sorted order. + """ + + ignore_require_venv = True + usage = """ + %prog [options]""" + + def add_options(self): + # type: () -> None + self.cmd_opts.add_option( + '-o', '--outdated', + action='store_true', + default=False, + help='List outdated packages') + self.cmd_opts.add_option( + '-u', '--uptodate', + action='store_true', + default=False, + help='List uptodate packages') + self.cmd_opts.add_option( + '-e', '--editable', + action='store_true', + default=False, + help='List editable projects.') + self.cmd_opts.add_option( + '-l', '--local', + action='store_true', + default=False, + help=('If in a virtualenv that has global access, do not list ' + 'globally-installed packages.'), + ) + self.cmd_opts.add_option( + '--user', + dest='user', + action='store_true', + default=False, + help='Only output packages installed in user-site.') + self.cmd_opts.add_option(cmdoptions.list_path()) + self.cmd_opts.add_option( + '--pre', + action='store_true', + default=False, + help=("Include pre-release and development versions. By default, " + "pip only finds stable versions."), + ) + + self.cmd_opts.add_option( + '--format', + action='store', + dest='list_format', + default="columns", + choices=('columns', 'freeze', 'json'), + help="Select the output format among: columns (default), freeze, " + "or json", + ) + + self.cmd_opts.add_option( + '--not-required', + action='store_true', + dest='not_required', + help="List packages that are not dependencies of " + "installed packages.", + ) + + self.cmd_opts.add_option( + '--exclude-editable', + action='store_false', + dest='include_editable', + help='Exclude editable package from output.', + ) + self.cmd_opts.add_option( + '--include-editable', + action='store_true', + dest='include_editable', + help='Include editable package from output.', + default=True, + ) + self.cmd_opts.add_option(cmdoptions.list_exclude()) + index_opts = cmdoptions.make_option_group( + cmdoptions.index_group, self.parser + ) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, self.cmd_opts) + + def _build_package_finder(self, options, session): + # type: (Values, PipSession) -> PackageFinder + """ + Create a package finder appropriate to this list command. + """ + link_collector = LinkCollector.create(session, options=options) + + # Pass allow_yanked=False to ignore yanked versions. + selection_prefs = SelectionPreferences( + allow_yanked=False, + allow_all_prereleases=options.pre, + ) + + return PackageFinder.create( + link_collector=link_collector, + selection_prefs=selection_prefs, + ) + + def run(self, options, args): + # type: (Values, List[str]) -> int + if options.outdated and options.uptodate: + raise CommandError( + "Options --outdated and --uptodate cannot be combined.") + + cmdoptions.check_list_path_option(options) + + skip = set(stdlib_pkgs) + if options.excludes: + skip.update(options.excludes) + + packages = get_installed_distributions( + local_only=options.local, + user_only=options.user, + editables_only=options.editable, + include_editables=options.include_editable, + paths=options.path, + skip=skip, + ) + + # get_not_required must be called firstly in order to find and + # filter out all dependencies correctly. Otherwise a package + # can't be identified as requirement because some parent packages + # could be filtered out before. + if options.not_required: + packages = self.get_not_required(packages, options) + + if options.outdated: + packages = self.get_outdated(packages, options) + elif options.uptodate: + packages = self.get_uptodate(packages, options) + + self.output_package_listing(packages, options) + return SUCCESS + + def get_outdated(self, packages, options): + # type: (List[Distribution], Values) -> List[Distribution] + return [ + dist for dist in self.iter_packages_latest_infos(packages, options) + if dist.latest_version > dist.parsed_version + ] + + def get_uptodate(self, packages, options): + # type: (List[Distribution], Values) -> List[Distribution] + return [ + dist for dist in self.iter_packages_latest_infos(packages, options) + if dist.latest_version == dist.parsed_version + ] + + def get_not_required(self, packages, options): + # type: (List[Distribution], Values) -> List[Distribution] + dep_keys = set() # type: Set[Distribution] + for dist in packages: + dep_keys.update(requirement.key for requirement in dist.requires()) + + # Create a set to remove duplicate packages, and cast it to a list + # to keep the return type consistent with get_outdated and + # get_uptodate + return list({pkg for pkg in packages if pkg.key not in dep_keys}) + + def iter_packages_latest_infos(self, packages, options): + # type: (List[Distribution], Values) -> Iterator[Distribution] + with self._build_session(options) as session: + finder = self._build_package_finder(options, session) + + def latest_info(dist): + # type: (Distribution) -> Distribution + all_candidates = finder.find_all_candidates(dist.key) + if not options.pre: + # Remove prereleases + all_candidates = [candidate for candidate in all_candidates + if not candidate.version.is_prerelease] + + evaluator = finder.make_candidate_evaluator( + project_name=dist.project_name, + ) + best_candidate = evaluator.sort_best_candidate(all_candidates) + if best_candidate is None: + return None + + remote_version = best_candidate.version + if best_candidate.link.is_wheel: + typ = 'wheel' + else: + typ = 'sdist' + # This is dirty but makes the rest of the code much cleaner + dist.latest_version = remote_version + dist.latest_filetype = typ + return dist + + for dist in map_multithread(latest_info, packages): + if dist is not None: + yield dist + + def output_package_listing(self, packages, options): + # type: (List[Distribution], Values) -> None + packages = sorted( + packages, + key=lambda dist: dist.project_name.lower(), + ) + if options.list_format == 'columns' and packages: + data, header = format_for_columns(packages, options) + self.output_package_listing_columns(data, header) + elif options.list_format == 'freeze': + for dist in packages: + if options.verbose >= 1: + write_output("%s==%s (%s)", dist.project_name, + dist.version, dist.location) + else: + write_output("%s==%s", dist.project_name, dist.version) + elif options.list_format == 'json': + write_output(format_for_json(packages, options)) + + def output_package_listing_columns(self, data, header): + # type: (List[List[str]], List[str]) -> None + # insert the header first: we need to know the size of column names + if len(data) > 0: + data.insert(0, header) + + pkg_strings, sizes = tabulate(data) + + # Create and add a separator. + if len(data) > 0: + pkg_strings.insert(1, " ".join(map(lambda x: '-' * x, sizes))) + + for val in pkg_strings: + write_output(val) + + +def format_for_columns(pkgs, options): + # type: (List[Distribution], Values) -> Tuple[List[List[str]], List[str]] + """ + Convert the package data into something usable + by output_package_listing_columns. + """ + running_outdated = options.outdated + # Adjust the header for the `pip list --outdated` case. + if running_outdated: + header = ["Package", "Version", "Latest", "Type"] + else: + header = ["Package", "Version"] + + data = [] + if options.verbose >= 1 or any(dist_is_editable(x) for x in pkgs): + header.append("Location") + if options.verbose >= 1: + header.append("Installer") + + for proj in pkgs: + # if we're working on the 'outdated' list, separate out the + # latest_version and type + row = [proj.project_name, proj.version] + + if running_outdated: + row.append(proj.latest_version) + row.append(proj.latest_filetype) + + if options.verbose >= 1 or dist_is_editable(proj): + row.append(proj.location) + if options.verbose >= 1: + row.append(get_installer(proj)) + + data.append(row) + + return data, header + + +def format_for_json(packages, options): + # type: (List[Distribution], Values) -> str + data = [] + for dist in packages: + info = { + 'name': dist.project_name, + 'version': str(dist.version), + } + if options.verbose >= 1: + info['location'] = dist.location + info['installer'] = get_installer(dist) + if options.outdated: + info['latest_version'] = str(dist.latest_version) + info['latest_filetype'] = dist.latest_filetype + data.append(info) + return json.dumps(data) diff --git a/venv/Lib/site-packages/pip/_internal/commands/search.py b/venv/Lib/site-packages/pip/_internal/commands/search.py new file mode 100644 index 00000000..d66e8234 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/commands/search.py @@ -0,0 +1,162 @@ +import logging +import shutil +import sys +import textwrap +import xmlrpc.client +from collections import OrderedDict +from optparse import Values +from typing import TYPE_CHECKING, Dict, List, Optional + +from pip._vendor.packaging.version import parse as parse_version + +from pip._internal.cli.base_command import Command +from pip._internal.cli.req_command import SessionCommandMixin +from pip._internal.cli.status_codes import NO_MATCHES_FOUND, SUCCESS +from pip._internal.exceptions import CommandError +from pip._internal.metadata import get_default_environment +from pip._internal.models.index import PyPI +from pip._internal.network.xmlrpc import PipXmlrpcTransport +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import write_output + +if TYPE_CHECKING: + from typing import TypedDict + + class TransformedHit(TypedDict): + name: str + summary: str + versions: List[str] + +logger = logging.getLogger(__name__) + + +class SearchCommand(Command, SessionCommandMixin): + """Search for PyPI packages whose name or summary contains .""" + + usage = """ + %prog [options] """ + ignore_require_venv = True + + def add_options(self): + # type: () -> None + self.cmd_opts.add_option( + '-i', '--index', + dest='index', + metavar='URL', + default=PyPI.pypi_url, + help='Base URL of Python Package Index (default %default)') + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options, args): + # type: (Values, List[str]) -> int + if not args: + raise CommandError('Missing required argument (search query).') + query = args + pypi_hits = self.search(query, options) + hits = transform_hits(pypi_hits) + + terminal_width = None + if sys.stdout.isatty(): + terminal_width = shutil.get_terminal_size()[0] + + print_results(hits, terminal_width=terminal_width) + if pypi_hits: + return SUCCESS + return NO_MATCHES_FOUND + + def search(self, query, options): + # type: (List[str], Values) -> List[Dict[str, str]] + index_url = options.index + + session = self.get_default_session(options) + + transport = PipXmlrpcTransport(index_url, session) + pypi = xmlrpc.client.ServerProxy(index_url, transport) + try: + hits = pypi.search({'name': query, 'summary': query}, 'or') + except xmlrpc.client.Fault as fault: + message = "XMLRPC request failed [code: {code}]\n{string}".format( + code=fault.faultCode, + string=fault.faultString, + ) + raise CommandError(message) + assert isinstance(hits, list) + return hits + + +def transform_hits(hits): + # type: (List[Dict[str, str]]) -> List[TransformedHit] + """ + The list from pypi is really a list of versions. We want a list of + packages with the list of versions stored inline. This converts the + list from pypi into one we can use. + """ + packages = OrderedDict() # type: OrderedDict[str, TransformedHit] + for hit in hits: + name = hit['name'] + summary = hit['summary'] + version = hit['version'] + + if name not in packages.keys(): + packages[name] = { + 'name': name, + 'summary': summary, + 'versions': [version], + } + else: + packages[name]['versions'].append(version) + + # if this is the highest version, replace summary and score + if version == highest_version(packages[name]['versions']): + packages[name]['summary'] = summary + + return list(packages.values()) + + +def print_results(hits, name_column_width=None, terminal_width=None): + # type: (List[TransformedHit], Optional[int], Optional[int]) -> None + if not hits: + return + if name_column_width is None: + name_column_width = max([ + len(hit['name']) + len(highest_version(hit.get('versions', ['-']))) + for hit in hits + ]) + 4 + + env = get_default_environment() + for hit in hits: + name = hit['name'] + summary = hit['summary'] or '' + latest = highest_version(hit.get('versions', ['-'])) + if terminal_width is not None: + target_width = terminal_width - name_column_width - 5 + if target_width > 10: + # wrap and indent summary to fit terminal + summary_lines = textwrap.wrap(summary, target_width) + summary = ('\n' + ' ' * (name_column_width + 3)).join( + summary_lines) + + name_latest = f'{name} ({latest})' + line = f'{name_latest:{name_column_width}} - {summary}' + try: + write_output(line) + dist = env.get_distribution(name) + if dist is not None: + with indent_log(): + if dist.version == latest: + write_output('INSTALLED: %s (latest)', dist.version) + else: + write_output('INSTALLED: %s', dist.version) + if parse_version(latest).pre: + write_output('LATEST: %s (pre-release; install' + ' with "pip install --pre")', latest) + else: + write_output('LATEST: %s', latest) + except UnicodeEncodeError: + pass + + +def highest_version(versions): + # type: (List[str]) -> str + return max(versions, key=parse_version) diff --git a/venv/Lib/site-packages/pip/_internal/commands/show.py b/venv/Lib/site-packages/pip/_internal/commands/show.py new file mode 100644 index 00000000..24e855a8 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/commands/show.py @@ -0,0 +1,181 @@ +import logging +import os +from email.parser import FeedParser +from optparse import Values +from typing import Dict, Iterator, List + +from pip._vendor import pkg_resources +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.utils.misc import write_output + +logger = logging.getLogger(__name__) + + +class ShowCommand(Command): + """ + Show information about one or more installed packages. + + The output is in RFC-compliant mail header format. + """ + + usage = """ + %prog [options] ...""" + ignore_require_venv = True + + def add_options(self): + # type: () -> None + self.cmd_opts.add_option( + '-f', '--files', + dest='files', + action='store_true', + default=False, + help='Show the full list of installed files for each package.') + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options, args): + # type: (Values, List[str]) -> int + if not args: + logger.warning('ERROR: Please provide a package name or names.') + return ERROR + query = args + + results = search_packages_info(query) + if not print_results( + results, list_files=options.files, verbose=options.verbose): + return ERROR + return SUCCESS + + +def search_packages_info(query): + # type: (List[str]) -> Iterator[Dict[str, str]] + """ + Gather details from installed distributions. Print distribution name, + version, location, and installed files. Installed files requires a + pip generated 'installed-files.txt' in the distributions '.egg-info' + directory. + """ + installed = {} + for p in pkg_resources.working_set: + installed[canonicalize_name(p.project_name)] = p + + query_names = [canonicalize_name(name) for name in query] + missing = sorted( + [name for name, pkg in zip(query, query_names) if pkg not in installed] + ) + if missing: + logger.warning('Package(s) not found: %s', ', '.join(missing)) + + def get_requiring_packages(package_name): + # type: (str) -> List[str] + canonical_name = canonicalize_name(package_name) + return [ + pkg.project_name for pkg in pkg_resources.working_set + if canonical_name in + [canonicalize_name(required.name) for required in + pkg.requires()] + ] + + for dist in [installed[pkg] for pkg in query_names if pkg in installed]: + package = { + 'name': dist.project_name, + 'version': dist.version, + 'location': dist.location, + 'requires': [dep.project_name for dep in dist.requires()], + 'required_by': get_requiring_packages(dist.project_name) + } + file_list = None + metadata = '' + if isinstance(dist, pkg_resources.DistInfoDistribution): + # RECORDs should be part of .dist-info metadatas + if dist.has_metadata('RECORD'): + lines = dist.get_metadata_lines('RECORD') + paths = [line.split(',')[0] for line in lines] + paths = [os.path.join(dist.location, p) for p in paths] + file_list = [os.path.relpath(p, dist.location) for p in paths] + + if dist.has_metadata('METADATA'): + metadata = dist.get_metadata('METADATA') + else: + # Otherwise use pip's log for .egg-info's + if dist.has_metadata('installed-files.txt'): + paths = dist.get_metadata_lines('installed-files.txt') + paths = [os.path.join(dist.egg_info, p) for p in paths] + file_list = [os.path.relpath(p, dist.location) for p in paths] + + if dist.has_metadata('PKG-INFO'): + metadata = dist.get_metadata('PKG-INFO') + + if dist.has_metadata('entry_points.txt'): + entry_points = dist.get_metadata_lines('entry_points.txt') + package['entry_points'] = entry_points + + if dist.has_metadata('INSTALLER'): + for line in dist.get_metadata_lines('INSTALLER'): + if line.strip(): + package['installer'] = line.strip() + break + + # @todo: Should pkg_resources.Distribution have a + # `get_pkg_info` method? + feed_parser = FeedParser() + feed_parser.feed(metadata) + pkg_info_dict = feed_parser.close() + for key in ('metadata-version', 'summary', + 'home-page', 'author', 'author-email', 'license'): + package[key] = pkg_info_dict.get(key) + + # It looks like FeedParser cannot deal with repeated headers + classifiers = [] + for line in metadata.splitlines(): + if line.startswith('Classifier: '): + classifiers.append(line[len('Classifier: '):]) + package['classifiers'] = classifiers + + if file_list: + package['files'] = sorted(file_list) + yield package + + +def print_results(distributions, list_files=False, verbose=False): + # type: (Iterator[Dict[str, str]], bool, bool) -> bool + """ + Print the information from installed distributions found. + """ + results_printed = False + for i, dist in enumerate(distributions): + results_printed = True + if i > 0: + write_output("---") + + write_output("Name: %s", dist.get('name', '')) + write_output("Version: %s", dist.get('version', '')) + write_output("Summary: %s", dist.get('summary', '')) + write_output("Home-page: %s", dist.get('home-page', '')) + write_output("Author: %s", dist.get('author', '')) + write_output("Author-email: %s", dist.get('author-email', '')) + write_output("License: %s", dist.get('license', '')) + write_output("Location: %s", dist.get('location', '')) + write_output("Requires: %s", ', '.join(dist.get('requires', []))) + write_output("Required-by: %s", ', '.join(dist.get('required_by', []))) + + if verbose: + write_output("Metadata-Version: %s", + dist.get('metadata-version', '')) + write_output("Installer: %s", dist.get('installer', '')) + write_output("Classifiers:") + for classifier in dist.get('classifiers', []): + write_output(" %s", classifier) + write_output("Entry-points:") + for entry in dist.get('entry_points', []): + write_output(" %s", entry.strip()) + if list_files: + write_output("Files:") + for line in dist.get('files', []): + write_output(" %s", line.strip()) + if "files" not in dist: + write_output("Cannot locate installed-files.txt") + return results_printed diff --git a/venv/Lib/site-packages/pip/_internal/commands/uninstall.py b/venv/Lib/site-packages/pip/_internal/commands/uninstall.py new file mode 100644 index 00000000..9a3c9f88 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/commands/uninstall.py @@ -0,0 +1,92 @@ +from optparse import Values +from typing import List + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.cli.base_command import Command +from pip._internal.cli.req_command import SessionCommandMixin, warn_if_run_as_root +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.exceptions import InstallationError +from pip._internal.req import parse_requirements +from pip._internal.req.constructors import ( + install_req_from_line, + install_req_from_parsed_requirement, +) +from pip._internal.utils.misc import protect_pip_from_modification_on_windows + + +class UninstallCommand(Command, SessionCommandMixin): + """ + Uninstall packages. + + pip is able to uninstall most installed packages. Known exceptions are: + + - Pure distutils packages installed with ``python setup.py install``, which + leave behind no metadata to determine what files were installed. + - Script wrappers installed by ``python setup.py develop``. + """ + + usage = """ + %prog [options] ... + %prog [options] -r ...""" + + def add_options(self): + # type: () -> None + self.cmd_opts.add_option( + '-r', '--requirement', + dest='requirements', + action='append', + default=[], + metavar='file', + help='Uninstall all the packages listed in the given requirements ' + 'file. This option can be used multiple times.', + ) + self.cmd_opts.add_option( + '-y', '--yes', + dest='yes', + action='store_true', + help="Don't ask for confirmation of uninstall deletions.") + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options, args): + # type: (Values, List[str]) -> int + session = self.get_default_session(options) + + reqs_to_uninstall = {} + for name in args: + req = install_req_from_line( + name, isolated=options.isolated_mode, + ) + if req.name: + reqs_to_uninstall[canonicalize_name(req.name)] = req + for filename in options.requirements: + for parsed_req in parse_requirements( + filename, + options=options, + session=session): + req = install_req_from_parsed_requirement( + parsed_req, + isolated=options.isolated_mode + ) + if req.name: + reqs_to_uninstall[canonicalize_name(req.name)] = req + if not reqs_to_uninstall: + raise InstallationError( + f'You must give at least one requirement to {self.name} (see ' + f'"pip help {self.name}")' + ) + + protect_pip_from_modification_on_windows( + modifying_pip="pip" in reqs_to_uninstall + ) + + for req in reqs_to_uninstall.values(): + uninstall_pathset = req.uninstall( + auto_confirm=options.yes, verbose=self.verbosity > 0, + ) + if uninstall_pathset: + uninstall_pathset.commit() + + warn_if_run_as_root() + return SUCCESS diff --git a/venv/Lib/site-packages/pip/_internal/commands/wheel.py b/venv/Lib/site-packages/pip/_internal/commands/wheel.py new file mode 100644 index 00000000..ff47dbac --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/commands/wheel.py @@ -0,0 +1,178 @@ +import logging +import os +import shutil +from optparse import Values +from typing import List + +from pip._internal.cache import WheelCache +from pip._internal.cli import cmdoptions +from pip._internal.cli.req_command import RequirementCommand, with_cleanup +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.exceptions import CommandError +from pip._internal.req.req_install import InstallRequirement +from pip._internal.req.req_tracker import get_requirement_tracker +from pip._internal.utils.misc import ensure_dir, normalize_path +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.wheel_builder import build, should_build_for_wheel_command + +logger = logging.getLogger(__name__) + + +class WheelCommand(RequirementCommand): + """ + Build Wheel archives for your requirements and dependencies. + + Wheel is a built-package format, and offers the advantage of not + recompiling your software during every install. For more details, see the + wheel docs: https://wheel.readthedocs.io/en/latest/ + + Requirements: setuptools>=0.8, and wheel. + + 'pip wheel' uses the bdist_wheel setuptools extension from the wheel + package to build individual wheels. + + """ + + usage = """ + %prog [options] ... + %prog [options] -r ... + %prog [options] [-e] ... + %prog [options] [-e] ... + %prog [options] ...""" + + def add_options(self): + # type: () -> None + + self.cmd_opts.add_option( + '-w', '--wheel-dir', + dest='wheel_dir', + metavar='dir', + default=os.curdir, + help=("Build wheels into , where the default is the " + "current working directory."), + ) + self.cmd_opts.add_option(cmdoptions.no_binary()) + self.cmd_opts.add_option(cmdoptions.only_binary()) + self.cmd_opts.add_option(cmdoptions.prefer_binary()) + self.cmd_opts.add_option(cmdoptions.no_build_isolation()) + self.cmd_opts.add_option(cmdoptions.use_pep517()) + self.cmd_opts.add_option(cmdoptions.no_use_pep517()) + self.cmd_opts.add_option(cmdoptions.constraints()) + self.cmd_opts.add_option(cmdoptions.editable()) + self.cmd_opts.add_option(cmdoptions.requirements()) + self.cmd_opts.add_option(cmdoptions.src()) + self.cmd_opts.add_option(cmdoptions.ignore_requires_python()) + self.cmd_opts.add_option(cmdoptions.no_deps()) + self.cmd_opts.add_option(cmdoptions.build_dir()) + self.cmd_opts.add_option(cmdoptions.progress_bar()) + + self.cmd_opts.add_option( + '--no-verify', + dest='no_verify', + action='store_true', + default=False, + help="Don't verify if built wheel is valid.", + ) + + self.cmd_opts.add_option(cmdoptions.build_options()) + self.cmd_opts.add_option(cmdoptions.global_options()) + + self.cmd_opts.add_option( + '--pre', + action='store_true', + default=False, + help=("Include pre-release and development versions. By default, " + "pip only finds stable versions."), + ) + + self.cmd_opts.add_option(cmdoptions.require_hashes()) + + index_opts = cmdoptions.make_option_group( + cmdoptions.index_group, + self.parser, + ) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, self.cmd_opts) + + @with_cleanup + def run(self, options, args): + # type: (Values, List[str]) -> int + cmdoptions.check_install_build_global(options) + + session = self.get_default_session(options) + + finder = self._build_package_finder(options, session) + wheel_cache = WheelCache(options.cache_dir, options.format_control) + + options.wheel_dir = normalize_path(options.wheel_dir) + ensure_dir(options.wheel_dir) + + req_tracker = self.enter_context(get_requirement_tracker()) + + directory = TempDirectory( + delete=not options.no_clean, + kind="wheel", + globally_managed=True, + ) + + reqs = self.get_requirements(args, options, finder, session) + + preparer = self.make_requirement_preparer( + temp_build_dir=directory, + options=options, + req_tracker=req_tracker, + session=session, + finder=finder, + download_dir=options.wheel_dir, + use_user_site=False, + ) + + resolver = self.make_resolver( + preparer=preparer, + finder=finder, + options=options, + wheel_cache=wheel_cache, + ignore_requires_python=options.ignore_requires_python, + use_pep517=options.use_pep517, + ) + + self.trace_basic_info(finder) + + requirement_set = resolver.resolve( + reqs, check_supported_wheels=True + ) + + reqs_to_build = [] # type: List[InstallRequirement] + for req in requirement_set.requirements.values(): + if req.is_wheel: + preparer.save_linked_requirement(req) + elif should_build_for_wheel_command(req): + reqs_to_build.append(req) + + # build wheels + build_successes, build_failures = build( + reqs_to_build, + wheel_cache=wheel_cache, + verify=(not options.no_verify), + build_options=options.build_options or [], + global_options=options.global_options or [], + ) + for req in build_successes: + assert req.link and req.link.is_wheel + assert req.local_file_path + # copy from cache to target directory + try: + shutil.copy(req.local_file_path, options.wheel_dir) + except OSError as e: + logger.warning( + "Building wheel for %s failed: %s", + req.name, e, + ) + build_failures.append(req) + if len(build_failures) != 0: + raise CommandError( + "Failed to build one or more wheels" + ) + + return SUCCESS diff --git a/venv/Lib/site-packages/pip/_internal/configuration.py b/venv/Lib/site-packages/pip/_internal/configuration.py new file mode 100644 index 00000000..a4698ec1 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/configuration.py @@ -0,0 +1,403 @@ +"""Configuration management setup + +Some terminology: +- name + As written in config files. +- value + Value associated with a name +- key + Name combined with it's section (section.name) +- variant + A single word describing where the configuration key-value pair came from +""" + +import configparser +import locale +import logging +import os +import sys +from typing import Any, Dict, Iterable, List, NewType, Optional, Tuple + +from pip._internal.exceptions import ( + ConfigurationError, + ConfigurationFileCouldNotBeLoaded, +) +from pip._internal.utils import appdirs +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.misc import ensure_dir, enum + +RawConfigParser = configparser.RawConfigParser # Shorthand +Kind = NewType("Kind", str) + +CONFIG_BASENAME = 'pip.ini' if WINDOWS else 'pip.conf' +ENV_NAMES_IGNORED = "version", "help" + +# The kinds of configurations there are. +kinds = enum( + USER="user", # User Specific + GLOBAL="global", # System Wide + SITE="site", # [Virtual] Environment Specific + ENV="env", # from PIP_CONFIG_FILE + ENV_VAR="env-var", # from Environment Variables +) +OVERRIDE_ORDER = kinds.GLOBAL, kinds.USER, kinds.SITE, kinds.ENV, kinds.ENV_VAR +VALID_LOAD_ONLY = kinds.USER, kinds.GLOBAL, kinds.SITE + +logger = logging.getLogger(__name__) + + +# NOTE: Maybe use the optionx attribute to normalize keynames. +def _normalize_name(name): + # type: (str) -> str + """Make a name consistent regardless of source (environment or file) + """ + name = name.lower().replace('_', '-') + if name.startswith('--'): + name = name[2:] # only prefer long opts + return name + + +def _disassemble_key(name): + # type: (str) -> List[str] + if "." not in name: + error_message = ( + "Key does not contain dot separated section and key. " + "Perhaps you wanted to use 'global.{}' instead?" + ).format(name) + raise ConfigurationError(error_message) + return name.split(".", 1) + + +def get_configuration_files(): + # type: () -> Dict[Kind, List[str]] + global_config_files = [ + os.path.join(path, CONFIG_BASENAME) + for path in appdirs.site_config_dirs('pip') + ] + + site_config_file = os.path.join(sys.prefix, CONFIG_BASENAME) + legacy_config_file = os.path.join( + os.path.expanduser('~'), + 'pip' if WINDOWS else '.pip', + CONFIG_BASENAME, + ) + new_config_file = os.path.join( + appdirs.user_config_dir("pip"), CONFIG_BASENAME + ) + return { + kinds.GLOBAL: global_config_files, + kinds.SITE: [site_config_file], + kinds.USER: [legacy_config_file, new_config_file], + } + + +class Configuration: + """Handles management of configuration. + + Provides an interface to accessing and managing configuration files. + + This class converts provides an API that takes "section.key-name" style + keys and stores the value associated with it as "key-name" under the + section "section". + + This allows for a clean interface wherein the both the section and the + key-name are preserved in an easy to manage form in the configuration files + and the data stored is also nice. + """ + + def __init__(self, isolated, load_only=None): + # type: (bool, Optional[Kind]) -> None + super().__init__() + + if load_only is not None and load_only not in VALID_LOAD_ONLY: + raise ConfigurationError( + "Got invalid value for load_only - should be one of {}".format( + ", ".join(map(repr, VALID_LOAD_ONLY)) + ) + ) + self.isolated = isolated + self.load_only = load_only + + # Because we keep track of where we got the data from + self._parsers = { + variant: [] for variant in OVERRIDE_ORDER + } # type: Dict[Kind, List[Tuple[str, RawConfigParser]]] + self._config = { + variant: {} for variant in OVERRIDE_ORDER + } # type: Dict[Kind, Dict[str, Any]] + self._modified_parsers = [] # type: List[Tuple[str, RawConfigParser]] + + def load(self): + # type: () -> None + """Loads configuration from configuration files and environment + """ + self._load_config_files() + if not self.isolated: + self._load_environment_vars() + + def get_file_to_edit(self): + # type: () -> Optional[str] + """Returns the file with highest priority in configuration + """ + assert self.load_only is not None, \ + "Need to be specified a file to be editing" + + try: + return self._get_parser_to_modify()[0] + except IndexError: + return None + + def items(self): + # type: () -> Iterable[Tuple[str, Any]] + """Returns key-value pairs like dict.items() representing the loaded + configuration + """ + return self._dictionary.items() + + def get_value(self, key): + # type: (str) -> Any + """Get a value from the configuration. + """ + try: + return self._dictionary[key] + except KeyError: + raise ConfigurationError(f"No such key - {key}") + + def set_value(self, key, value): + # type: (str, Any) -> None + """Modify a value in the configuration. + """ + self._ensure_have_load_only() + + assert self.load_only + fname, parser = self._get_parser_to_modify() + + if parser is not None: + section, name = _disassemble_key(key) + + # Modify the parser and the configuration + if not parser.has_section(section): + parser.add_section(section) + parser.set(section, name, value) + + self._config[self.load_only][key] = value + self._mark_as_modified(fname, parser) + + def unset_value(self, key): + # type: (str) -> None + """Unset a value in the configuration.""" + self._ensure_have_load_only() + + assert self.load_only + if key not in self._config[self.load_only]: + raise ConfigurationError(f"No such key - {key}") + + fname, parser = self._get_parser_to_modify() + + if parser is not None: + section, name = _disassemble_key(key) + if not (parser.has_section(section) + and parser.remove_option(section, name)): + # The option was not removed. + raise ConfigurationError( + "Fatal Internal error [id=1]. Please report as a bug." + ) + + # The section may be empty after the option was removed. + if not parser.items(section): + parser.remove_section(section) + self._mark_as_modified(fname, parser) + + del self._config[self.load_only][key] + + def save(self): + # type: () -> None + """Save the current in-memory state. + """ + self._ensure_have_load_only() + + for fname, parser in self._modified_parsers: + logger.info("Writing to %s", fname) + + # Ensure directory exists. + ensure_dir(os.path.dirname(fname)) + + with open(fname, "w") as f: + parser.write(f) + + # + # Private routines + # + + def _ensure_have_load_only(self): + # type: () -> None + if self.load_only is None: + raise ConfigurationError("Needed a specific file to be modifying.") + logger.debug("Will be working with %s variant only", self.load_only) + + @property + def _dictionary(self): + # type: () -> Dict[str, Any] + """A dictionary representing the loaded configuration. + """ + # NOTE: Dictionaries are not populated if not loaded. So, conditionals + # are not needed here. + retval = {} + + for variant in OVERRIDE_ORDER: + retval.update(self._config[variant]) + + return retval + + def _load_config_files(self): + # type: () -> None + """Loads configuration from configuration files + """ + config_files = dict(self.iter_config_files()) + if config_files[kinds.ENV][0:1] == [os.devnull]: + logger.debug( + "Skipping loading configuration files due to " + "environment's PIP_CONFIG_FILE being os.devnull" + ) + return + + for variant, files in config_files.items(): + for fname in files: + # If there's specific variant set in `load_only`, load only + # that variant, not the others. + if self.load_only is not None and variant != self.load_only: + logger.debug( + "Skipping file '%s' (variant: %s)", fname, variant + ) + continue + + parser = self._load_file(variant, fname) + + # Keeping track of the parsers used + self._parsers[variant].append((fname, parser)) + + def _load_file(self, variant, fname): + # type: (Kind, str) -> RawConfigParser + logger.debug("For variant '%s', will try loading '%s'", variant, fname) + parser = self._construct_parser(fname) + + for section in parser.sections(): + items = parser.items(section) + self._config[variant].update(self._normalized_keys(section, items)) + + return parser + + def _construct_parser(self, fname): + # type: (str) -> RawConfigParser + parser = configparser.RawConfigParser() + # If there is no such file, don't bother reading it but create the + # parser anyway, to hold the data. + # Doing this is useful when modifying and saving files, where we don't + # need to construct a parser. + if os.path.exists(fname): + try: + parser.read(fname) + except UnicodeDecodeError: + # See https://github.com/pypa/pip/issues/4963 + raise ConfigurationFileCouldNotBeLoaded( + reason="contains invalid {} characters".format( + locale.getpreferredencoding(False) + ), + fname=fname, + ) + except configparser.Error as error: + # See https://github.com/pypa/pip/issues/4893 + raise ConfigurationFileCouldNotBeLoaded(error=error) + return parser + + def _load_environment_vars(self): + # type: () -> None + """Loads configuration from environment variables + """ + self._config[kinds.ENV_VAR].update( + self._normalized_keys(":env:", self.get_environ_vars()) + ) + + def _normalized_keys(self, section, items): + # type: (str, Iterable[Tuple[str, Any]]) -> Dict[str, Any] + """Normalizes items to construct a dictionary with normalized keys. + + This routine is where the names become keys and are made the same + regardless of source - configuration files or environment. + """ + normalized = {} + for name, val in items: + key = section + "." + _normalize_name(name) + normalized[key] = val + return normalized + + def get_environ_vars(self): + # type: () -> Iterable[Tuple[str, str]] + """Returns a generator with all environmental vars with prefix PIP_""" + for key, val in os.environ.items(): + if key.startswith("PIP_"): + name = key[4:].lower() + if name not in ENV_NAMES_IGNORED: + yield name, val + + # XXX: This is patched in the tests. + def iter_config_files(self): + # type: () -> Iterable[Tuple[Kind, List[str]]] + """Yields variant and configuration files associated with it. + + This should be treated like items of a dictionary. + """ + # SMELL: Move the conditions out of this function + + # environment variables have the lowest priority + config_file = os.environ.get('PIP_CONFIG_FILE', None) + if config_file is not None: + yield kinds.ENV, [config_file] + else: + yield kinds.ENV, [] + + config_files = get_configuration_files() + + # at the base we have any global configuration + yield kinds.GLOBAL, config_files[kinds.GLOBAL] + + # per-user configuration next + should_load_user_config = not self.isolated and not ( + config_file and os.path.exists(config_file) + ) + if should_load_user_config: + # The legacy config file is overridden by the new config file + yield kinds.USER, config_files[kinds.USER] + + # finally virtualenv configuration first trumping others + yield kinds.SITE, config_files[kinds.SITE] + + def get_values_in_config(self, variant): + # type: (Kind) -> Dict[str, Any] + """Get values present in a config file""" + return self._config[variant] + + def _get_parser_to_modify(self): + # type: () -> Tuple[str, RawConfigParser] + # Determine which parser to modify + assert self.load_only + parsers = self._parsers[self.load_only] + if not parsers: + # This should not happen if everything works correctly. + raise ConfigurationError( + "Fatal Internal error [id=2]. Please report as a bug." + ) + + # Use the highest priority parser. + return parsers[-1] + + # XXX: This is patched in the tests. + def _mark_as_modified(self, fname, parser): + # type: (str, RawConfigParser) -> None + file_parser_tuple = (fname, parser) + if file_parser_tuple not in self._modified_parsers: + self._modified_parsers.append(file_parser_tuple) + + def __repr__(self): + # type: () -> str + return f"{self.__class__.__name__}({self._dictionary!r})" diff --git a/venv/Lib/site-packages/pip/_internal/distributions/__init__.py b/venv/Lib/site-packages/pip/_internal/distributions/__init__.py new file mode 100644 index 00000000..a222f248 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/distributions/__init__.py @@ -0,0 +1,20 @@ +from pip._internal.distributions.base import AbstractDistribution +from pip._internal.distributions.sdist import SourceDistribution +from pip._internal.distributions.wheel import WheelDistribution +from pip._internal.req.req_install import InstallRequirement + + +def make_distribution_for_install_requirement(install_req): + # type: (InstallRequirement) -> AbstractDistribution + """Returns a Distribution for the given InstallRequirement""" + # Editable requirements will always be source distributions. They use the + # legacy logic until we create a modern standard for them. + if install_req.editable: + return SourceDistribution(install_req) + + # If it's a wheel, it's a WheelDistribution + if install_req.is_wheel: + return WheelDistribution(install_req) + + # Otherwise, a SourceDistribution + return SourceDistribution(install_req) diff --git a/venv/Lib/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..73278d60 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/distributions/__pycache__/base.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/distributions/__pycache__/base.cpython-36.pyc new file mode 100644 index 00000000..7bf48dc3 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/distributions/__pycache__/base.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-36.pyc new file mode 100644 index 00000000..a1b52e0d Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-36.pyc new file mode 100644 index 00000000..0b2e42ee Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-36.pyc new file mode 100644 index 00000000..a7b8d138 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/distributions/base.py b/venv/Lib/site-packages/pip/_internal/distributions/base.py new file mode 100644 index 00000000..78ee91e7 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/distributions/base.py @@ -0,0 +1,39 @@ +import abc +from typing import Optional + +from pip._vendor.pkg_resources import Distribution + +from pip._internal.index.package_finder import PackageFinder +from pip._internal.req import InstallRequirement + + +class AbstractDistribution(metaclass=abc.ABCMeta): + """A base class for handling installable artifacts. + + The requirements for anything installable are as follows: + + - we must be able to determine the requirement name + (or we can't correctly handle the non-upgrade case). + + - for packages with setup requirements, we must also be able + to determine their requirements without installing additional + packages (for the same reason as run-time dependencies) + + - we must be able to create a Distribution object exposing the + above metadata. + """ + + def __init__(self, req): + # type: (InstallRequirement) -> None + super().__init__() + self.req = req + + @abc.abstractmethod + def get_pkg_resources_distribution(self): + # type: () -> Optional[Distribution] + raise NotImplementedError() + + @abc.abstractmethod + def prepare_distribution_metadata(self, finder, build_isolation): + # type: (PackageFinder, bool) -> None + raise NotImplementedError() diff --git a/venv/Lib/site-packages/pip/_internal/distributions/installed.py b/venv/Lib/site-packages/pip/_internal/distributions/installed.py new file mode 100644 index 00000000..b19dfacb --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/distributions/installed.py @@ -0,0 +1,22 @@ +from typing import Optional + +from pip._vendor.pkg_resources import Distribution + +from pip._internal.distributions.base import AbstractDistribution +from pip._internal.index.package_finder import PackageFinder + + +class InstalledDistribution(AbstractDistribution): + """Represents an installed package. + + This does not need any preparation as the required information has already + been computed. + """ + + def get_pkg_resources_distribution(self): + # type: () -> Optional[Distribution] + return self.req.satisfied_by + + def prepare_distribution_metadata(self, finder, build_isolation): + # type: (PackageFinder, bool) -> None + pass diff --git a/venv/Lib/site-packages/pip/_internal/distributions/sdist.py b/venv/Lib/site-packages/pip/_internal/distributions/sdist.py new file mode 100644 index 00000000..c873a9f1 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/distributions/sdist.py @@ -0,0 +1,95 @@ +import logging +from typing import Set, Tuple + +from pip._vendor.pkg_resources import Distribution + +from pip._internal.build_env import BuildEnvironment +from pip._internal.distributions.base import AbstractDistribution +from pip._internal.exceptions import InstallationError +from pip._internal.index.package_finder import PackageFinder +from pip._internal.utils.subprocess import runner_with_spinner_message + +logger = logging.getLogger(__name__) + + +class SourceDistribution(AbstractDistribution): + """Represents a source distribution. + + The preparation step for these needs metadata for the packages to be + generated, either using PEP 517 or using the legacy `setup.py egg_info`. + """ + + def get_pkg_resources_distribution(self): + # type: () -> Distribution + return self.req.get_dist() + + def prepare_distribution_metadata(self, finder, build_isolation): + # type: (PackageFinder, bool) -> None + # Load pyproject.toml, to determine whether PEP 517 is to be used + self.req.load_pyproject_toml() + + # Set up the build isolation, if this requirement should be isolated + should_isolate = self.req.use_pep517 and build_isolation + if should_isolate: + self._setup_isolation(finder) + + self.req.prepare_metadata() + + def _setup_isolation(self, finder): + # type: (PackageFinder) -> None + def _raise_conflicts(conflicting_with, conflicting_reqs): + # type: (str, Set[Tuple[str, str]]) -> None + format_string = ( + "Some build dependencies for {requirement} " + "conflict with {conflicting_with}: {description}." + ) + error_message = format_string.format( + requirement=self.req, + conflicting_with=conflicting_with, + description=", ".join( + f"{installed} is incompatible with {wanted}" + for installed, wanted in sorted(conflicting) + ), + ) + raise InstallationError(error_message) + + # Isolate in a BuildEnvironment and install the build-time + # requirements. + pyproject_requires = self.req.pyproject_requires + assert pyproject_requires is not None + + self.req.build_env = BuildEnvironment() + self.req.build_env.install_requirements( + finder, pyproject_requires, "overlay", "Installing build dependencies" + ) + conflicting, missing = self.req.build_env.check_requirements( + self.req.requirements_to_check + ) + if conflicting: + _raise_conflicts("PEP 517/518 supported requirements", conflicting) + if missing: + logger.warning( + "Missing build requirements in pyproject.toml for %s.", + self.req, + ) + logger.warning( + "The project does not specify a build backend, and " + "pip cannot fall back to setuptools without %s.", + " and ".join(map(repr, sorted(missing))), + ) + # Install any extra build dependencies that the backend requests. + # This must be done in a second pass, as the pyproject.toml + # dependencies must be installed before we can call the backend. + with self.req.build_env: + runner = runner_with_spinner_message("Getting requirements to build wheel") + backend = self.req.pep517_backend + assert backend is not None + with backend.subprocess_runner(runner): + reqs = backend.get_requires_for_build_wheel() + + conflicting, missing = self.req.build_env.check_requirements(reqs) + if conflicting: + _raise_conflicts("the backend dependencies", conflicting) + self.req.build_env.install_requirements( + finder, missing, "normal", "Installing backend dependencies" + ) diff --git a/venv/Lib/site-packages/pip/_internal/distributions/wheel.py b/venv/Lib/site-packages/pip/_internal/distributions/wheel.py new file mode 100644 index 00000000..d0384797 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/distributions/wheel.py @@ -0,0 +1,34 @@ +from zipfile import ZipFile + +from pip._vendor.pkg_resources import Distribution + +from pip._internal.distributions.base import AbstractDistribution +from pip._internal.index.package_finder import PackageFinder +from pip._internal.utils.wheel import pkg_resources_distribution_for_wheel + + +class WheelDistribution(AbstractDistribution): + """Represents a wheel distribution. + + This does not need any preparation as wheels can be directly unpacked. + """ + + def get_pkg_resources_distribution(self): + # type: () -> Distribution + """Loads the metadata from the wheel file into memory and returns a + Distribution that uses it, not relying on the wheel file or + requirement. + """ + # Set as part of preparation during download. + assert self.req.local_file_path + # Wheels are never unnamed. + assert self.req.name + + with ZipFile(self.req.local_file_path, allowZip64=True) as z: + return pkg_resources_distribution_for_wheel( + z, self.req.name, self.req.local_file_path + ) + + def prepare_distribution_metadata(self, finder, build_isolation): + # type: (PackageFinder, bool) -> None + pass diff --git a/venv/Lib/site-packages/pip/_internal/exceptions.py b/venv/Lib/site-packages/pip/_internal/exceptions.py new file mode 100644 index 00000000..8aacf812 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/exceptions.py @@ -0,0 +1,397 @@ +"""Exceptions used throughout package""" + +import configparser +from itertools import chain, groupby, repeat +from typing import TYPE_CHECKING, Dict, List, Optional + +from pip._vendor.pkg_resources import Distribution +from pip._vendor.requests.models import Request, Response + +if TYPE_CHECKING: + from hashlib import _Hash + + from pip._internal.req.req_install import InstallRequirement + + +class PipError(Exception): + """Base pip exception""" + + +class ConfigurationError(PipError): + """General exception in configuration""" + + +class InstallationError(PipError): + """General exception during installation""" + + +class UninstallationError(PipError): + """General exception during uninstallation""" + + +class NoneMetadataError(PipError): + """ + Raised when accessing "METADATA" or "PKG-INFO" metadata for a + pip._vendor.pkg_resources.Distribution object and + `dist.has_metadata('METADATA')` returns True but + `dist.get_metadata('METADATA')` returns None (and similarly for + "PKG-INFO"). + """ + + def __init__(self, dist, metadata_name): + # type: (Distribution, str) -> None + """ + :param dist: A Distribution object. + :param metadata_name: The name of the metadata being accessed + (can be "METADATA" or "PKG-INFO"). + """ + self.dist = dist + self.metadata_name = metadata_name + + def __str__(self): + # type: () -> str + # Use `dist` in the error message because its stringification + # includes more information, like the version and location. + return ( + 'None {} metadata found for distribution: {}'.format( + self.metadata_name, self.dist, + ) + ) + + +class UserInstallationInvalid(InstallationError): + """A --user install is requested on an environment without user site.""" + + def __str__(self): + # type: () -> str + return "User base directory is not specified" + + +class InvalidSchemeCombination(InstallationError): + def __str__(self): + # type: () -> str + before = ", ".join(str(a) for a in self.args[:-1]) + return f"Cannot set {before} and {self.args[-1]} together" + + +class DistributionNotFound(InstallationError): + """Raised when a distribution cannot be found to satisfy a requirement""" + + +class RequirementsFileParseError(InstallationError): + """Raised when a general error occurs parsing a requirements file line.""" + + +class BestVersionAlreadyInstalled(PipError): + """Raised when the most up-to-date version of a package is already + installed.""" + + +class BadCommand(PipError): + """Raised when virtualenv or a command is not found""" + + +class CommandError(PipError): + """Raised when there is an error in command-line arguments""" + + +class PreviousBuildDirError(PipError): + """Raised when there's a previous conflicting build directory""" + + +class NetworkConnectionError(PipError): + """HTTP connection error""" + + def __init__(self, error_msg, response=None, request=None): + # type: (str, Response, Request) -> None + """ + Initialize NetworkConnectionError with `request` and `response` + objects. + """ + self.response = response + self.request = request + self.error_msg = error_msg + if (self.response is not None and not self.request and + hasattr(response, 'request')): + self.request = self.response.request + super().__init__(error_msg, response, request) + + def __str__(self): + # type: () -> str + return str(self.error_msg) + + +class InvalidWheelFilename(InstallationError): + """Invalid wheel filename.""" + + +class UnsupportedWheel(InstallationError): + """Unsupported wheel.""" + + +class MetadataInconsistent(InstallationError): + """Built metadata contains inconsistent information. + + This is raised when the metadata contains values (e.g. name and version) + that do not match the information previously obtained from sdist filename + or user-supplied ``#egg=`` value. + """ + def __init__(self, ireq, field, f_val, m_val): + # type: (InstallRequirement, str, str, str) -> None + self.ireq = ireq + self.field = field + self.f_val = f_val + self.m_val = m_val + + def __str__(self): + # type: () -> str + template = ( + "Requested {} has inconsistent {}: " + "filename has {!r}, but metadata has {!r}" + ) + return template.format(self.ireq, self.field, self.f_val, self.m_val) + + +class InstallationSubprocessError(InstallationError): + """A subprocess call failed during installation.""" + def __init__(self, returncode, description): + # type: (int, str) -> None + self.returncode = returncode + self.description = description + + def __str__(self): + # type: () -> str + return ( + "Command errored out with exit status {}: {} " + "Check the logs for full command output." + ).format(self.returncode, self.description) + + +class HashErrors(InstallationError): + """Multiple HashError instances rolled into one for reporting""" + + def __init__(self): + # type: () -> None + self.errors = [] # type: List[HashError] + + def append(self, error): + # type: (HashError) -> None + self.errors.append(error) + + def __str__(self): + # type: () -> str + lines = [] + self.errors.sort(key=lambda e: e.order) + for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__): + lines.append(cls.head) + lines.extend(e.body() for e in errors_of_cls) + if lines: + return '\n'.join(lines) + return '' + + def __nonzero__(self): + # type: () -> bool + return bool(self.errors) + + def __bool__(self): + # type: () -> bool + return self.__nonzero__() + + +class HashError(InstallationError): + """ + A failure to verify a package against known-good hashes + + :cvar order: An int sorting hash exception classes by difficulty of + recovery (lower being harder), so the user doesn't bother fretting + about unpinned packages when he has deeper issues, like VCS + dependencies, to deal with. Also keeps error reports in a + deterministic order. + :cvar head: A section heading for display above potentially many + exceptions of this kind + :ivar req: The InstallRequirement that triggered this error. This is + pasted on after the exception is instantiated, because it's not + typically available earlier. + + """ + req = None # type: Optional[InstallRequirement] + head = '' + order = -1 # type: int + + def body(self): + # type: () -> str + """Return a summary of me for display under the heading. + + This default implementation simply prints a description of the + triggering requirement. + + :param req: The InstallRequirement that provoked this error, with + its link already populated by the resolver's _populate_link(). + + """ + return f' {self._requirement_name()}' + + def __str__(self): + # type: () -> str + return f'{self.head}\n{self.body()}' + + def _requirement_name(self): + # type: () -> str + """Return a description of the requirement that triggered me. + + This default implementation returns long description of the req, with + line numbers + + """ + return str(self.req) if self.req else 'unknown package' + + +class VcsHashUnsupported(HashError): + """A hash was provided for a version-control-system-based requirement, but + we don't have a method for hashing those.""" + + order = 0 + head = ("Can't verify hashes for these requirements because we don't " + "have a way to hash version control repositories:") + + +class DirectoryUrlHashUnsupported(HashError): + """A hash was provided for a version-control-system-based requirement, but + we don't have a method for hashing those.""" + + order = 1 + head = ("Can't verify hashes for these file:// requirements because they " + "point to directories:") + + +class HashMissing(HashError): + """A hash was needed for a requirement but is absent.""" + + order = 2 + head = ('Hashes are required in --require-hashes mode, but they are ' + 'missing from some requirements. Here is a list of those ' + 'requirements along with the hashes their downloaded archives ' + 'actually had. Add lines like these to your requirements files to ' + 'prevent tampering. (If you did not enable --require-hashes ' + 'manually, note that it turns on automatically when any package ' + 'has a hash.)') + + def __init__(self, gotten_hash): + # type: (str) -> None + """ + :param gotten_hash: The hash of the (possibly malicious) archive we + just downloaded + """ + self.gotten_hash = gotten_hash + + def body(self): + # type: () -> str + # Dodge circular import. + from pip._internal.utils.hashes import FAVORITE_HASH + + package = None + if self.req: + # In the case of URL-based requirements, display the original URL + # seen in the requirements file rather than the package name, + # so the output can be directly copied into the requirements file. + package = (self.req.original_link if self.req.original_link + # In case someone feeds something downright stupid + # to InstallRequirement's constructor. + else getattr(self.req, 'req', None)) + return ' {} --hash={}:{}'.format(package or 'unknown package', + FAVORITE_HASH, + self.gotten_hash) + + +class HashUnpinned(HashError): + """A requirement had a hash specified but was not pinned to a specific + version.""" + + order = 3 + head = ('In --require-hashes mode, all requirements must have their ' + 'versions pinned with ==. These do not:') + + +class HashMismatch(HashError): + """ + Distribution file hash values don't match. + + :ivar package_name: The name of the package that triggered the hash + mismatch. Feel free to write to this after the exception is raise to + improve its error message. + + """ + order = 4 + head = ('THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS ' + 'FILE. If you have updated the package versions, please update ' + 'the hashes. Otherwise, examine the package contents carefully; ' + 'someone may have tampered with them.') + + def __init__(self, allowed, gots): + # type: (Dict[str, List[str]], Dict[str, _Hash]) -> None + """ + :param allowed: A dict of algorithm names pointing to lists of allowed + hex digests + :param gots: A dict of algorithm names pointing to hashes we + actually got from the files under suspicion + """ + self.allowed = allowed + self.gots = gots + + def body(self): + # type: () -> str + return ' {}:\n{}'.format(self._requirement_name(), + self._hash_comparison()) + + def _hash_comparison(self): + # type: () -> str + """ + Return a comparison of actual and expected hash values. + + Example:: + + Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde + or 123451234512345123451234512345123451234512345 + Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef + + """ + def hash_then_or(hash_name): + # type: (str) -> chain[str] + # For now, all the decent hashes have 6-char names, so we can get + # away with hard-coding space literals. + return chain([hash_name], repeat(' or')) + + lines = [] # type: List[str] + for hash_name, expecteds in self.allowed.items(): + prefix = hash_then_or(hash_name) + lines.extend((' Expected {} {}'.format(next(prefix), e)) + for e in expecteds) + lines.append(' Got {}\n'.format( + self.gots[hash_name].hexdigest())) + return '\n'.join(lines) + + +class UnsupportedPythonVersion(InstallationError): + """Unsupported python version according to Requires-Python package + metadata.""" + + +class ConfigurationFileCouldNotBeLoaded(ConfigurationError): + """When there are errors while loading a configuration file + """ + + def __init__(self, reason="could not be loaded", fname=None, error=None): + # type: (str, Optional[str], Optional[configparser.Error]) -> None + super().__init__(error) + self.reason = reason + self.fname = fname + self.error = error + + def __str__(self): + # type: () -> str + if self.fname is not None: + message_part = f" in {self.fname}." + else: + assert self.error is not None + message_part = f".\n{self.error}\n" + return f"Configuration file {self.reason}{message_part}" diff --git a/venv/Lib/site-packages/pip/_internal/index/__init__.py b/venv/Lib/site-packages/pip/_internal/index/__init__.py new file mode 100644 index 00000000..7a17b7b3 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/index/__init__.py @@ -0,0 +1,2 @@ +"""Index interaction code +""" diff --git a/venv/Lib/site-packages/pip/_internal/index/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/index/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..7aacde80 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/index/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/index/__pycache__/collector.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/index/__pycache__/collector.cpython-36.pyc new file mode 100644 index 00000000..0621a526 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/index/__pycache__/collector.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-36.pyc new file mode 100644 index 00000000..8613f7b1 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/index/__pycache__/sources.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/index/__pycache__/sources.cpython-36.pyc new file mode 100644 index 00000000..51fb9bb1 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/index/__pycache__/sources.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/index/collector.py b/venv/Lib/site-packages/pip/_internal/index/collector.py new file mode 100644 index 00000000..0721e368 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/index/collector.py @@ -0,0 +1,556 @@ +""" +The main purpose of this module is to expose LinkCollector.collect_sources(). +""" + +import cgi +import collections +import functools +import html +import itertools +import logging +import os +import re +import urllib.parse +import urllib.request +import xml.etree.ElementTree +from optparse import Values +from typing import ( + Callable, + Iterable, + List, + MutableMapping, + NamedTuple, + Optional, + Sequence, + Union, +) + +from pip._vendor import html5lib, requests +from pip._vendor.requests import Response +from pip._vendor.requests.exceptions import RetryError, SSLError + +from pip._internal.exceptions import NetworkConnectionError +from pip._internal.models.link import Link +from pip._internal.models.search_scope import SearchScope +from pip._internal.network.session import PipSession +from pip._internal.network.utils import raise_for_status +from pip._internal.utils.filetypes import is_archive_file +from pip._internal.utils.misc import pairwise, redact_auth_from_url +from pip._internal.vcs import vcs + +from .sources import CandidatesFromPage, LinkSource, build_source + +logger = logging.getLogger(__name__) + +HTMLElement = xml.etree.ElementTree.Element +ResponseHeaders = MutableMapping[str, str] + + +def _match_vcs_scheme(url): + # type: (str) -> Optional[str] + """Look for VCS schemes in the URL. + + Returns the matched VCS scheme, or None if there's no match. + """ + for scheme in vcs.schemes: + if url.lower().startswith(scheme) and url[len(scheme)] in '+:': + return scheme + return None + + +class _NotHTML(Exception): + def __init__(self, content_type, request_desc): + # type: (str, str) -> None + super().__init__(content_type, request_desc) + self.content_type = content_type + self.request_desc = request_desc + + +def _ensure_html_header(response): + # type: (Response) -> None + """Check the Content-Type header to ensure the response contains HTML. + + Raises `_NotHTML` if the content type is not text/html. + """ + content_type = response.headers.get("Content-Type", "") + if not content_type.lower().startswith("text/html"): + raise _NotHTML(content_type, response.request.method) + + +class _NotHTTP(Exception): + pass + + +def _ensure_html_response(url, session): + # type: (str, PipSession) -> None + """Send a HEAD request to the URL, and ensure the response contains HTML. + + Raises `_NotHTTP` if the URL is not available for a HEAD request, or + `_NotHTML` if the content type is not text/html. + """ + scheme, netloc, path, query, fragment = urllib.parse.urlsplit(url) + if scheme not in {'http', 'https'}: + raise _NotHTTP() + + resp = session.head(url, allow_redirects=True) + raise_for_status(resp) + + _ensure_html_header(resp) + + +def _get_html_response(url, session): + # type: (str, PipSession) -> Response + """Access an HTML page with GET, and return the response. + + This consists of three parts: + + 1. If the URL looks suspiciously like an archive, send a HEAD first to + check the Content-Type is HTML, to avoid downloading a large file. + Raise `_NotHTTP` if the content type cannot be determined, or + `_NotHTML` if it is not HTML. + 2. Actually perform the request. Raise HTTP exceptions on network failures. + 3. Check the Content-Type header to make sure we got HTML, and raise + `_NotHTML` otherwise. + """ + if is_archive_file(Link(url).filename): + _ensure_html_response(url, session=session) + + logger.debug('Getting page %s', redact_auth_from_url(url)) + + resp = session.get( + url, + headers={ + "Accept": "text/html", + # We don't want to blindly returned cached data for + # /simple/, because authors generally expecting that + # twine upload && pip install will function, but if + # they've done a pip install in the last ~10 minutes + # it won't. Thus by setting this to zero we will not + # blindly use any cached data, however the benefit of + # using max-age=0 instead of no-cache, is that we will + # still support conditional requests, so we will still + # minimize traffic sent in cases where the page hasn't + # changed at all, we will just always incur the round + # trip for the conditional GET now instead of only + # once per 10 minutes. + # For more information, please see pypa/pip#5670. + "Cache-Control": "max-age=0", + }, + ) + raise_for_status(resp) + + # The check for archives above only works if the url ends with + # something that looks like an archive. However that is not a + # requirement of an url. Unless we issue a HEAD request on every + # url we cannot know ahead of time for sure if something is HTML + # or not. However we can check after we've downloaded it. + _ensure_html_header(resp) + + return resp + + +def _get_encoding_from_headers(headers): + # type: (ResponseHeaders) -> Optional[str] + """Determine if we have any encoding information in our headers. + """ + if headers and "Content-Type" in headers: + content_type, params = cgi.parse_header(headers["Content-Type"]) + if "charset" in params: + return params['charset'] + return None + + +def _determine_base_url(document, page_url): + # type: (HTMLElement, str) -> str + """Determine the HTML document's base URL. + + This looks for a ```` tag in the HTML document. If present, its href + attribute denotes the base URL of anchor tags in the document. If there is + no such tag (or if it does not have a valid href attribute), the HTML + file's URL is used as the base URL. + + :param document: An HTML document representation. The current + implementation expects the result of ``html5lib.parse()``. + :param page_url: The URL of the HTML document. + """ + for base in document.findall(".//base"): + href = base.get("href") + if href is not None: + return href + return page_url + + +def _clean_url_path_part(part): + # type: (str) -> str + """ + Clean a "part" of a URL path (i.e. after splitting on "@" characters). + """ + # We unquote prior to quoting to make sure nothing is double quoted. + return urllib.parse.quote(urllib.parse.unquote(part)) + + +def _clean_file_url_path(part): + # type: (str) -> str + """ + Clean the first part of a URL path that corresponds to a local + filesystem path (i.e. the first part after splitting on "@" characters). + """ + # We unquote prior to quoting to make sure nothing is double quoted. + # Also, on Windows the path part might contain a drive letter which + # should not be quoted. On Linux where drive letters do not + # exist, the colon should be quoted. We rely on urllib.request + # to do the right thing here. + return urllib.request.pathname2url(urllib.request.url2pathname(part)) + + +# percent-encoded: / +_reserved_chars_re = re.compile('(@|%2F)', re.IGNORECASE) + + +def _clean_url_path(path, is_local_path): + # type: (str, bool) -> str + """ + Clean the path portion of a URL. + """ + if is_local_path: + clean_func = _clean_file_url_path + else: + clean_func = _clean_url_path_part + + # Split on the reserved characters prior to cleaning so that + # revision strings in VCS URLs are properly preserved. + parts = _reserved_chars_re.split(path) + + cleaned_parts = [] + for to_clean, reserved in pairwise(itertools.chain(parts, [''])): + cleaned_parts.append(clean_func(to_clean)) + # Normalize %xx escapes (e.g. %2f -> %2F) + cleaned_parts.append(reserved.upper()) + + return ''.join(cleaned_parts) + + +def _clean_link(url): + # type: (str) -> str + """ + Make sure a link is fully quoted. + For example, if ' ' occurs in the URL, it will be replaced with "%20", + and without double-quoting other characters. + """ + # Split the URL into parts according to the general structure + # `scheme://netloc/path;parameters?query#fragment`. + result = urllib.parse.urlparse(url) + # If the netloc is empty, then the URL refers to a local filesystem path. + is_local_path = not result.netloc + path = _clean_url_path(result.path, is_local_path=is_local_path) + return urllib.parse.urlunparse(result._replace(path=path)) + + +def _create_link_from_element( + anchor, # type: HTMLElement + page_url, # type: str + base_url, # type: str +): + # type: (...) -> Optional[Link] + """ + Convert an anchor element in a simple repository page to a Link. + """ + href = anchor.get("href") + if not href: + return None + + url = _clean_link(urllib.parse.urljoin(base_url, href)) + pyrequire = anchor.get('data-requires-python') + pyrequire = html.unescape(pyrequire) if pyrequire else None + + yanked_reason = anchor.get('data-yanked') + if yanked_reason: + yanked_reason = html.unescape(yanked_reason) + + link = Link( + url, + comes_from=page_url, + requires_python=pyrequire, + yanked_reason=yanked_reason, + ) + + return link + + +class CacheablePageContent: + def __init__(self, page): + # type: (HTMLPage) -> None + assert page.cache_link_parsing + self.page = page + + def __eq__(self, other): + # type: (object) -> bool + return (isinstance(other, type(self)) and + self.page.url == other.page.url) + + def __hash__(self): + # type: () -> int + return hash(self.page.url) + + +def with_cached_html_pages( + fn, # type: Callable[[HTMLPage], Iterable[Link]] +): + # type: (...) -> Callable[[HTMLPage], List[Link]] + """ + Given a function that parses an Iterable[Link] from an HTMLPage, cache the + function's result (keyed by CacheablePageContent), unless the HTMLPage + `page` has `page.cache_link_parsing == False`. + """ + + @functools.lru_cache(maxsize=None) + def wrapper(cacheable_page): + # type: (CacheablePageContent) -> List[Link] + return list(fn(cacheable_page.page)) + + @functools.wraps(fn) + def wrapper_wrapper(page): + # type: (HTMLPage) -> List[Link] + if page.cache_link_parsing: + return wrapper(CacheablePageContent(page)) + return list(fn(page)) + + return wrapper_wrapper + + +@with_cached_html_pages +def parse_links(page): + # type: (HTMLPage) -> Iterable[Link] + """ + Parse an HTML document, and yield its anchor elements as Link objects. + """ + document = html5lib.parse( + page.content, + transport_encoding=page.encoding, + namespaceHTMLElements=False, + ) + + url = page.url + base_url = _determine_base_url(document, url) + for anchor in document.findall(".//a"): + link = _create_link_from_element( + anchor, + page_url=url, + base_url=base_url, + ) + if link is None: + continue + yield link + + +class HTMLPage: + """Represents one page, along with its URL""" + + def __init__( + self, + content, # type: bytes + encoding, # type: Optional[str] + url, # type: str + cache_link_parsing=True, # type: bool + ): + # type: (...) -> None + """ + :param encoding: the encoding to decode the given content. + :param url: the URL from which the HTML was downloaded. + :param cache_link_parsing: whether links parsed from this page's url + should be cached. PyPI index urls should + have this set to False, for example. + """ + self.content = content + self.encoding = encoding + self.url = url + self.cache_link_parsing = cache_link_parsing + + def __str__(self): + # type: () -> str + return redact_auth_from_url(self.url) + + +def _handle_get_page_fail( + link, # type: Link + reason, # type: Union[str, Exception] + meth=None # type: Optional[Callable[..., None]] +): + # type: (...) -> None + if meth is None: + meth = logger.debug + meth("Could not fetch URL %s: %s - skipping", link, reason) + + +def _make_html_page(response, cache_link_parsing=True): + # type: (Response, bool) -> HTMLPage + encoding = _get_encoding_from_headers(response.headers) + return HTMLPage( + response.content, + encoding=encoding, + url=response.url, + cache_link_parsing=cache_link_parsing) + + +def _get_html_page(link, session=None): + # type: (Link, Optional[PipSession]) -> Optional[HTMLPage] + if session is None: + raise TypeError( + "_get_html_page() missing 1 required keyword argument: 'session'" + ) + + url = link.url.split('#', 1)[0] + + # Check for VCS schemes that do not support lookup as web pages. + vcs_scheme = _match_vcs_scheme(url) + if vcs_scheme: + logger.warning('Cannot look at %s URL %s because it does not support ' + 'lookup as web pages.', vcs_scheme, link) + return None + + # Tack index.html onto file:// URLs that point to directories + scheme, _, path, _, _, _ = urllib.parse.urlparse(url) + if (scheme == 'file' and os.path.isdir(urllib.request.url2pathname(path))): + # add trailing slash if not present so urljoin doesn't trim + # final segment + if not url.endswith('/'): + url += '/' + url = urllib.parse.urljoin(url, 'index.html') + logger.debug(' file: URL is directory, getting %s', url) + + try: + resp = _get_html_response(url, session=session) + except _NotHTTP: + logger.warning( + 'Skipping page %s because it looks like an archive, and cannot ' + 'be checked by a HTTP HEAD request.', link, + ) + except _NotHTML as exc: + logger.warning( + 'Skipping page %s because the %s request got Content-Type: %s.' + 'The only supported Content-Type is text/html', + link, exc.request_desc, exc.content_type, + ) + except NetworkConnectionError as exc: + _handle_get_page_fail(link, exc) + except RetryError as exc: + _handle_get_page_fail(link, exc) + except SSLError as exc: + reason = "There was a problem confirming the ssl certificate: " + reason += str(exc) + _handle_get_page_fail(link, reason, meth=logger.info) + except requests.ConnectionError as exc: + _handle_get_page_fail(link, f"connection error: {exc}") + except requests.Timeout: + _handle_get_page_fail(link, "timed out") + else: + return _make_html_page(resp, + cache_link_parsing=link.cache_link_parsing) + return None + + +class CollectedSources(NamedTuple): + find_links: Sequence[Optional[LinkSource]] + index_urls: Sequence[Optional[LinkSource]] + + +class LinkCollector: + + """ + Responsible for collecting Link objects from all configured locations, + making network requests as needed. + + The class's main method is its collect_sources() method. + """ + + def __init__( + self, + session, # type: PipSession + search_scope, # type: SearchScope + ): + # type: (...) -> None + self.search_scope = search_scope + self.session = session + + @classmethod + def create(cls, session, options, suppress_no_index=False): + # type: (PipSession, Values, bool) -> LinkCollector + """ + :param session: The Session to use to make requests. + :param suppress_no_index: Whether to ignore the --no-index option + when constructing the SearchScope object. + """ + index_urls = [options.index_url] + options.extra_index_urls + if options.no_index and not suppress_no_index: + logger.debug( + 'Ignoring indexes: %s', + ','.join(redact_auth_from_url(url) for url in index_urls), + ) + index_urls = [] + + # Make sure find_links is a list before passing to create(). + find_links = options.find_links or [] + + search_scope = SearchScope.create( + find_links=find_links, index_urls=index_urls, + ) + link_collector = LinkCollector( + session=session, search_scope=search_scope, + ) + return link_collector + + @property + def find_links(self): + # type: () -> List[str] + return self.search_scope.find_links + + def fetch_page(self, location): + # type: (Link) -> Optional[HTMLPage] + """ + Fetch an HTML page containing package links. + """ + return _get_html_page(location, session=self.session) + + def collect_sources( + self, + project_name: str, + candidates_from_page: CandidatesFromPage, + ) -> CollectedSources: + # The OrderedDict calls deduplicate sources by URL. + index_url_sources = collections.OrderedDict( + build_source( + loc, + candidates_from_page=candidates_from_page, + page_validator=self.session.is_secure_origin, + expand_dir=False, + cache_link_parsing=False, + ) + for loc in self.search_scope.get_index_urls_locations(project_name) + ).values() + find_links_sources = collections.OrderedDict( + build_source( + loc, + candidates_from_page=candidates_from_page, + page_validator=self.session.is_secure_origin, + expand_dir=True, + cache_link_parsing=True, + ) + for loc in self.find_links + ).values() + + if logger.isEnabledFor(logging.DEBUG): + lines = [ + f"* {s.link}" + for s in itertools.chain(find_links_sources, index_url_sources) + if s is not None and s.link is not None + ] + lines = [ + f"{len(lines)} location(s) to search " + f"for versions of {project_name}:" + ] + lines + logger.debug("\n".join(lines)) + + return CollectedSources( + find_links=list(find_links_sources), + index_urls=list(index_url_sources), + ) diff --git a/venv/Lib/site-packages/pip/_internal/index/package_finder.py b/venv/Lib/site-packages/pip/_internal/index/package_finder.py new file mode 100644 index 00000000..7f2e04e7 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/index/package_finder.py @@ -0,0 +1,1012 @@ +"""Routines related to PyPI, indexes""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +import functools +import itertools +import logging +import re +from typing import FrozenSet, Iterable, List, Optional, Set, Tuple, Union + +from pip._vendor.packaging import specifiers +from pip._vendor.packaging.tags import Tag +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.packaging.version import _BaseVersion +from pip._vendor.packaging.version import parse as parse_version + +from pip._internal.exceptions import ( + BestVersionAlreadyInstalled, + DistributionNotFound, + InvalidWheelFilename, + UnsupportedWheel, +) +from pip._internal.index.collector import LinkCollector, parse_links +from pip._internal.models.candidate import InstallationCandidate +from pip._internal.models.format_control import FormatControl +from pip._internal.models.link import Link +from pip._internal.models.search_scope import SearchScope +from pip._internal.models.selection_prefs import SelectionPreferences +from pip._internal.models.target_python import TargetPython +from pip._internal.models.wheel import Wheel +from pip._internal.req import InstallRequirement +from pip._internal.utils.filetypes import WHEEL_EXTENSION +from pip._internal.utils.hashes import Hashes +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import build_netloc +from pip._internal.utils.packaging import check_requires_python +from pip._internal.utils.unpacking import SUPPORTED_EXTENSIONS +from pip._internal.utils.urls import url_to_path + +__all__ = ['FormatControl', 'BestCandidateResult', 'PackageFinder'] + + +logger = logging.getLogger(__name__) + +BuildTag = Union[Tuple[()], Tuple[int, str]] +CandidateSortingKey = ( + Tuple[int, int, int, _BaseVersion, Optional[int], BuildTag] +) + + +def _check_link_requires_python( + link, # type: Link + version_info, # type: Tuple[int, int, int] + ignore_requires_python=False, # type: bool +): + # type: (...) -> bool + """ + Return whether the given Python version is compatible with a link's + "Requires-Python" value. + + :param version_info: A 3-tuple of ints representing the Python + major-minor-micro version to check. + :param ignore_requires_python: Whether to ignore the "Requires-Python" + value if the given Python version isn't compatible. + """ + try: + is_compatible = check_requires_python( + link.requires_python, version_info=version_info, + ) + except specifiers.InvalidSpecifier: + logger.debug( + "Ignoring invalid Requires-Python (%r) for link: %s", + link.requires_python, link, + ) + else: + if not is_compatible: + version = '.'.join(map(str, version_info)) + if not ignore_requires_python: + logger.debug( + 'Link requires a different Python (%s not in: %r): %s', + version, link.requires_python, link, + ) + return False + + logger.debug( + 'Ignoring failed Requires-Python check (%s not in: %r) ' + 'for link: %s', + version, link.requires_python, link, + ) + + return True + + +class LinkEvaluator: + + """ + Responsible for evaluating links for a particular project. + """ + + _py_version_re = re.compile(r'-py([123]\.?[0-9]?)$') + + # Don't include an allow_yanked default value to make sure each call + # site considers whether yanked releases are allowed. This also causes + # that decision to be made explicit in the calling code, which helps + # people when reading the code. + def __init__( + self, + project_name, # type: str + canonical_name, # type: str + formats, # type: FrozenSet[str] + target_python, # type: TargetPython + allow_yanked, # type: bool + ignore_requires_python=None, # type: Optional[bool] + ): + # type: (...) -> None + """ + :param project_name: The user supplied package name. + :param canonical_name: The canonical package name. + :param formats: The formats allowed for this package. Should be a set + with 'binary' or 'source' or both in it. + :param target_python: The target Python interpreter to use when + evaluating link compatibility. This is used, for example, to + check wheel compatibility, as well as when checking the Python + version, e.g. the Python version embedded in a link filename + (or egg fragment) and against an HTML link's optional PEP 503 + "data-requires-python" attribute. + :param allow_yanked: Whether files marked as yanked (in the sense + of PEP 592) are permitted to be candidates for install. + :param ignore_requires_python: Whether to ignore incompatible + PEP 503 "data-requires-python" values in HTML links. Defaults + to False. + """ + if ignore_requires_python is None: + ignore_requires_python = False + + self._allow_yanked = allow_yanked + self._canonical_name = canonical_name + self._ignore_requires_python = ignore_requires_python + self._formats = formats + self._target_python = target_python + + self.project_name = project_name + + def evaluate_link(self, link): + # type: (Link) -> Tuple[bool, Optional[str]] + """ + Determine whether a link is a candidate for installation. + + :return: A tuple (is_candidate, result), where `result` is (1) a + version string if `is_candidate` is True, and (2) if + `is_candidate` is False, an optional string to log the reason + the link fails to qualify. + """ + version = None + if link.is_yanked and not self._allow_yanked: + reason = link.yanked_reason or '' + return (False, f'yanked for reason: {reason}') + + if link.egg_fragment: + egg_info = link.egg_fragment + ext = link.ext + else: + egg_info, ext = link.splitext() + if not ext: + return (False, 'not a file') + if ext not in SUPPORTED_EXTENSIONS: + return (False, f'unsupported archive format: {ext}') + if "binary" not in self._formats and ext == WHEEL_EXTENSION: + reason = 'No binaries permitted for {}'.format( + self.project_name) + return (False, reason) + if "macosx10" in link.path and ext == '.zip': + return (False, 'macosx10 one') + if ext == WHEEL_EXTENSION: + try: + wheel = Wheel(link.filename) + except InvalidWheelFilename: + return (False, 'invalid wheel filename') + if canonicalize_name(wheel.name) != self._canonical_name: + reason = 'wrong project name (not {})'.format( + self.project_name) + return (False, reason) + + supported_tags = self._target_python.get_tags() + if not wheel.supported(supported_tags): + # Include the wheel's tags in the reason string to + # simplify troubleshooting compatibility issues. + file_tags = wheel.get_formatted_file_tags() + reason = ( + "none of the wheel's tags ({}) are compatible " + "(run pip debug --verbose to show compatible tags)".format( + ', '.join(file_tags) + ) + ) + return (False, reason) + + version = wheel.version + + # This should be up by the self.ok_binary check, but see issue 2700. + if "source" not in self._formats and ext != WHEEL_EXTENSION: + reason = f'No sources permitted for {self.project_name}' + return (False, reason) + + if not version: + version = _extract_version_from_fragment( + egg_info, self._canonical_name, + ) + if not version: + reason = f'Missing project version for {self.project_name}' + return (False, reason) + + match = self._py_version_re.search(version) + if match: + version = version[:match.start()] + py_version = match.group(1) + if py_version != self._target_python.py_version: + return (False, 'Python version is incorrect') + + supports_python = _check_link_requires_python( + link, version_info=self._target_python.py_version_info, + ignore_requires_python=self._ignore_requires_python, + ) + if not supports_python: + # Return None for the reason text to suppress calling + # _log_skipped_link(). + return (False, None) + + logger.debug('Found link %s, version: %s', link, version) + + return (True, version) + + +def filter_unallowed_hashes( + candidates, # type: List[InstallationCandidate] + hashes, # type: Hashes + project_name, # type: str +): + # type: (...) -> List[InstallationCandidate] + """ + Filter out candidates whose hashes aren't allowed, and return a new + list of candidates. + + If at least one candidate has an allowed hash, then all candidates with + either an allowed hash or no hash specified are returned. Otherwise, + the given candidates are returned. + + Including the candidates with no hash specified when there is a match + allows a warning to be logged if there is a more preferred candidate + with no hash specified. Returning all candidates in the case of no + matches lets pip report the hash of the candidate that would otherwise + have been installed (e.g. permitting the user to more easily update + their requirements file with the desired hash). + """ + if not hashes: + logger.debug( + 'Given no hashes to check %s links for project %r: ' + 'discarding no candidates', + len(candidates), + project_name, + ) + # Make sure we're not returning back the given value. + return list(candidates) + + matches_or_no_digest = [] + # Collect the non-matches for logging purposes. + non_matches = [] + match_count = 0 + for candidate in candidates: + link = candidate.link + if not link.has_hash: + pass + elif link.is_hash_allowed(hashes=hashes): + match_count += 1 + else: + non_matches.append(candidate) + continue + + matches_or_no_digest.append(candidate) + + if match_count: + filtered = matches_or_no_digest + else: + # Make sure we're not returning back the given value. + filtered = list(candidates) + + if len(filtered) == len(candidates): + discard_message = 'discarding no candidates' + else: + discard_message = 'discarding {} non-matches:\n {}'.format( + len(non_matches), + '\n '.join(str(candidate.link) for candidate in non_matches) + ) + + logger.debug( + 'Checked %s links for project %r against %s hashes ' + '(%s matches, %s no digest): %s', + len(candidates), + project_name, + hashes.digest_count, + match_count, + len(matches_or_no_digest) - match_count, + discard_message + ) + + return filtered + + +class CandidatePreferences: + + """ + Encapsulates some of the preferences for filtering and sorting + InstallationCandidate objects. + """ + + def __init__( + self, + prefer_binary=False, # type: bool + allow_all_prereleases=False, # type: bool + ): + # type: (...) -> None + """ + :param allow_all_prereleases: Whether to allow all pre-releases. + """ + self.allow_all_prereleases = allow_all_prereleases + self.prefer_binary = prefer_binary + + +class BestCandidateResult: + """A collection of candidates, returned by `PackageFinder.find_best_candidate`. + + This class is only intended to be instantiated by CandidateEvaluator's + `compute_best_candidate()` method. + """ + + def __init__( + self, + candidates, # type: List[InstallationCandidate] + applicable_candidates, # type: List[InstallationCandidate] + best_candidate, # type: Optional[InstallationCandidate] + ): + # type: (...) -> None + """ + :param candidates: A sequence of all available candidates found. + :param applicable_candidates: The applicable candidates. + :param best_candidate: The most preferred candidate found, or None + if no applicable candidates were found. + """ + assert set(applicable_candidates) <= set(candidates) + + if best_candidate is None: + assert not applicable_candidates + else: + assert best_candidate in applicable_candidates + + self._applicable_candidates = applicable_candidates + self._candidates = candidates + + self.best_candidate = best_candidate + + def iter_all(self): + # type: () -> Iterable[InstallationCandidate] + """Iterate through all candidates. + """ + return iter(self._candidates) + + def iter_applicable(self): + # type: () -> Iterable[InstallationCandidate] + """Iterate through the applicable candidates. + """ + return iter(self._applicable_candidates) + + +class CandidateEvaluator: + + """ + Responsible for filtering and sorting candidates for installation based + on what tags are valid. + """ + + @classmethod + def create( + cls, + project_name, # type: str + target_python=None, # type: Optional[TargetPython] + prefer_binary=False, # type: bool + allow_all_prereleases=False, # type: bool + specifier=None, # type: Optional[specifiers.BaseSpecifier] + hashes=None, # type: Optional[Hashes] + ): + # type: (...) -> CandidateEvaluator + """Create a CandidateEvaluator object. + + :param target_python: The target Python interpreter to use when + checking compatibility. If None (the default), a TargetPython + object will be constructed from the running Python. + :param specifier: An optional object implementing `filter` + (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable + versions. + :param hashes: An optional collection of allowed hashes. + """ + if target_python is None: + target_python = TargetPython() + if specifier is None: + specifier = specifiers.SpecifierSet() + + supported_tags = target_python.get_tags() + + return cls( + project_name=project_name, + supported_tags=supported_tags, + specifier=specifier, + prefer_binary=prefer_binary, + allow_all_prereleases=allow_all_prereleases, + hashes=hashes, + ) + + def __init__( + self, + project_name, # type: str + supported_tags, # type: List[Tag] + specifier, # type: specifiers.BaseSpecifier + prefer_binary=False, # type: bool + allow_all_prereleases=False, # type: bool + hashes=None, # type: Optional[Hashes] + ): + # type: (...) -> None + """ + :param supported_tags: The PEP 425 tags supported by the target + Python in order of preference (most preferred first). + """ + self._allow_all_prereleases = allow_all_prereleases + self._hashes = hashes + self._prefer_binary = prefer_binary + self._project_name = project_name + self._specifier = specifier + self._supported_tags = supported_tags + # Since the index of the tag in the _supported_tags list is used + # as a priority, precompute a map from tag to index/priority to be + # used in wheel.find_most_preferred_tag. + self._wheel_tag_preferences = { + tag: idx for idx, tag in enumerate(supported_tags) + } + + def get_applicable_candidates( + self, + candidates, # type: List[InstallationCandidate] + ): + # type: (...) -> List[InstallationCandidate] + """ + Return the applicable candidates from a list of candidates. + """ + # Using None infers from the specifier instead. + allow_prereleases = self._allow_all_prereleases or None + specifier = self._specifier + versions = { + str(v) for v in specifier.filter( + # We turn the version object into a str here because otherwise + # when we're debundled but setuptools isn't, Python will see + # packaging.version.Version and + # pkg_resources._vendor.packaging.version.Version as different + # types. This way we'll use a str as a common data interchange + # format. If we stop using the pkg_resources provided specifier + # and start using our own, we can drop the cast to str(). + (str(c.version) for c in candidates), + prereleases=allow_prereleases, + ) + } + + # Again, converting version to str to deal with debundling. + applicable_candidates = [ + c for c in candidates if str(c.version) in versions + ] + + filtered_applicable_candidates = filter_unallowed_hashes( + candidates=applicable_candidates, + hashes=self._hashes, + project_name=self._project_name, + ) + + return sorted(filtered_applicable_candidates, key=self._sort_key) + + def _sort_key(self, candidate): + # type: (InstallationCandidate) -> CandidateSortingKey + """ + Function to pass as the `key` argument to a call to sorted() to sort + InstallationCandidates by preference. + + Returns a tuple such that tuples sorting as greater using Python's + default comparison operator are more preferred. + + The preference is as follows: + + First and foremost, candidates with allowed (matching) hashes are + always preferred over candidates without matching hashes. This is + because e.g. if the only candidate with an allowed hash is yanked, + we still want to use that candidate. + + Second, excepting hash considerations, candidates that have been + yanked (in the sense of PEP 592) are always less preferred than + candidates that haven't been yanked. Then: + + If not finding wheels, they are sorted by version only. + If finding wheels, then the sort order is by version, then: + 1. existing installs + 2. wheels ordered via Wheel.support_index_min(self._supported_tags) + 3. source archives + If prefer_binary was set, then all wheels are sorted above sources. + + Note: it was considered to embed this logic into the Link + comparison operators, but then different sdist links + with the same version, would have to be considered equal + """ + valid_tags = self._supported_tags + support_num = len(valid_tags) + build_tag = () # type: BuildTag + binary_preference = 0 + link = candidate.link + if link.is_wheel: + # can raise InvalidWheelFilename + wheel = Wheel(link.filename) + try: + pri = -(wheel.find_most_preferred_tag( + valid_tags, self._wheel_tag_preferences + )) + except ValueError: + raise UnsupportedWheel( + "{} is not a supported wheel for this platform. It " + "can't be sorted.".format(wheel.filename) + ) + if self._prefer_binary: + binary_preference = 1 + if wheel.build_tag is not None: + match = re.match(r'^(\d+)(.*)$', wheel.build_tag) + build_tag_groups = match.groups() + build_tag = (int(build_tag_groups[0]), build_tag_groups[1]) + else: # sdist + pri = -(support_num) + has_allowed_hash = int(link.is_hash_allowed(self._hashes)) + yank_value = -1 * int(link.is_yanked) # -1 for yanked. + return ( + has_allowed_hash, yank_value, binary_preference, candidate.version, + pri, build_tag, + ) + + def sort_best_candidate( + self, + candidates, # type: List[InstallationCandidate] + ): + # type: (...) -> Optional[InstallationCandidate] + """ + Return the best candidate per the instance's sort order, or None if + no candidate is acceptable. + """ + if not candidates: + return None + best_candidate = max(candidates, key=self._sort_key) + return best_candidate + + def compute_best_candidate( + self, + candidates, # type: List[InstallationCandidate] + ): + # type: (...) -> BestCandidateResult + """ + Compute and return a `BestCandidateResult` instance. + """ + applicable_candidates = self.get_applicable_candidates(candidates) + + best_candidate = self.sort_best_candidate(applicable_candidates) + + return BestCandidateResult( + candidates, + applicable_candidates=applicable_candidates, + best_candidate=best_candidate, + ) + + +class PackageFinder: + """This finds packages. + + This is meant to match easy_install's technique for looking for + packages, by reading pages and looking for appropriate links. + """ + + def __init__( + self, + link_collector, # type: LinkCollector + target_python, # type: TargetPython + allow_yanked, # type: bool + format_control=None, # type: Optional[FormatControl] + candidate_prefs=None, # type: CandidatePreferences + ignore_requires_python=None, # type: Optional[bool] + ): + # type: (...) -> None + """ + This constructor is primarily meant to be used by the create() class + method and from tests. + + :param format_control: A FormatControl object, used to control + the selection of source packages / binary packages when consulting + the index and links. + :param candidate_prefs: Options to use when creating a + CandidateEvaluator object. + """ + if candidate_prefs is None: + candidate_prefs = CandidatePreferences() + + format_control = format_control or FormatControl(set(), set()) + + self._allow_yanked = allow_yanked + self._candidate_prefs = candidate_prefs + self._ignore_requires_python = ignore_requires_python + self._link_collector = link_collector + self._target_python = target_python + + self.format_control = format_control + + # These are boring links that have already been logged somehow. + self._logged_links = set() # type: Set[Link] + + # Don't include an allow_yanked default value to make sure each call + # site considers whether yanked releases are allowed. This also causes + # that decision to be made explicit in the calling code, which helps + # people when reading the code. + @classmethod + def create( + cls, + link_collector, # type: LinkCollector + selection_prefs, # type: SelectionPreferences + target_python=None, # type: Optional[TargetPython] + ): + # type: (...) -> PackageFinder + """Create a PackageFinder. + + :param selection_prefs: The candidate selection preferences, as a + SelectionPreferences object. + :param target_python: The target Python interpreter to use when + checking compatibility. If None (the default), a TargetPython + object will be constructed from the running Python. + """ + if target_python is None: + target_python = TargetPython() + + candidate_prefs = CandidatePreferences( + prefer_binary=selection_prefs.prefer_binary, + allow_all_prereleases=selection_prefs.allow_all_prereleases, + ) + + return cls( + candidate_prefs=candidate_prefs, + link_collector=link_collector, + target_python=target_python, + allow_yanked=selection_prefs.allow_yanked, + format_control=selection_prefs.format_control, + ignore_requires_python=selection_prefs.ignore_requires_python, + ) + + @property + def target_python(self): + # type: () -> TargetPython + return self._target_python + + @property + def search_scope(self): + # type: () -> SearchScope + return self._link_collector.search_scope + + @search_scope.setter + def search_scope(self, search_scope): + # type: (SearchScope) -> None + self._link_collector.search_scope = search_scope + + @property + def find_links(self): + # type: () -> List[str] + return self._link_collector.find_links + + @property + def index_urls(self): + # type: () -> List[str] + return self.search_scope.index_urls + + @property + def trusted_hosts(self): + # type: () -> Iterable[str] + for host_port in self._link_collector.session.pip_trusted_origins: + yield build_netloc(*host_port) + + @property + def allow_all_prereleases(self): + # type: () -> bool + return self._candidate_prefs.allow_all_prereleases + + def set_allow_all_prereleases(self): + # type: () -> None + self._candidate_prefs.allow_all_prereleases = True + + @property + def prefer_binary(self): + # type: () -> bool + return self._candidate_prefs.prefer_binary + + def set_prefer_binary(self): + # type: () -> None + self._candidate_prefs.prefer_binary = True + + def make_link_evaluator(self, project_name): + # type: (str) -> LinkEvaluator + canonical_name = canonicalize_name(project_name) + formats = self.format_control.get_allowed_formats(canonical_name) + + return LinkEvaluator( + project_name=project_name, + canonical_name=canonical_name, + formats=formats, + target_python=self._target_python, + allow_yanked=self._allow_yanked, + ignore_requires_python=self._ignore_requires_python, + ) + + def _sort_links(self, links): + # type: (Iterable[Link]) -> List[Link] + """ + Returns elements of links in order, non-egg links first, egg links + second, while eliminating duplicates + """ + eggs, no_eggs = [], [] + seen = set() # type: Set[Link] + for link in links: + if link not in seen: + seen.add(link) + if link.egg_fragment: + eggs.append(link) + else: + no_eggs.append(link) + return no_eggs + eggs + + def _log_skipped_link(self, link, reason): + # type: (Link, str) -> None + if link not in self._logged_links: + # Put the link at the end so the reason is more visible and because + # the link string is usually very long. + logger.debug('Skipping link: %s: %s', reason, link) + self._logged_links.add(link) + + def get_install_candidate(self, link_evaluator, link): + # type: (LinkEvaluator, Link) -> Optional[InstallationCandidate] + """ + If the link is a candidate for install, convert it to an + InstallationCandidate and return it. Otherwise, return None. + """ + is_candidate, result = link_evaluator.evaluate_link(link) + if not is_candidate: + if result: + self._log_skipped_link(link, reason=result) + return None + + return InstallationCandidate( + name=link_evaluator.project_name, + link=link, + version=result, + ) + + def evaluate_links(self, link_evaluator, links): + # type: (LinkEvaluator, Iterable[Link]) -> List[InstallationCandidate] + """ + Convert links that are candidates to InstallationCandidate objects. + """ + candidates = [] + for link in self._sort_links(links): + candidate = self.get_install_candidate(link_evaluator, link) + if candidate is not None: + candidates.append(candidate) + + return candidates + + def process_project_url(self, project_url, link_evaluator): + # type: (Link, LinkEvaluator) -> List[InstallationCandidate] + logger.debug( + 'Fetching project page and analyzing links: %s', project_url, + ) + html_page = self._link_collector.fetch_page(project_url) + if html_page is None: + return [] + + page_links = list(parse_links(html_page)) + + with indent_log(): + package_links = self.evaluate_links( + link_evaluator, + links=page_links, + ) + + return package_links + + @functools.lru_cache(maxsize=None) + def find_all_candidates(self, project_name): + # type: (str) -> List[InstallationCandidate] + """Find all available InstallationCandidate for project_name + + This checks index_urls and find_links. + All versions found are returned as an InstallationCandidate list. + + See LinkEvaluator.evaluate_link() for details on which files + are accepted. + """ + link_evaluator = self.make_link_evaluator(project_name) + + collected_sources = self._link_collector.collect_sources( + project_name=project_name, + candidates_from_page=functools.partial( + self.process_project_url, + link_evaluator=link_evaluator, + ), + ) + + page_candidates_it = itertools.chain.from_iterable( + source.page_candidates() + for sources in collected_sources + for source in sources + if source is not None + ) + page_candidates = list(page_candidates_it) + + file_links_it = itertools.chain.from_iterable( + source.file_links() + for sources in collected_sources + for source in sources + if source is not None + ) + file_candidates = self.evaluate_links( + link_evaluator, + sorted(file_links_it, reverse=True), + ) + + if logger.isEnabledFor(logging.DEBUG) and file_candidates: + paths = [url_to_path(c.link.url) for c in file_candidates] + logger.debug("Local files found: %s", ", ".join(paths)) + + # This is an intentional priority ordering + return file_candidates + page_candidates + + def make_candidate_evaluator( + self, + project_name, # type: str + specifier=None, # type: Optional[specifiers.BaseSpecifier] + hashes=None, # type: Optional[Hashes] + ): + # type: (...) -> CandidateEvaluator + """Create a CandidateEvaluator object to use. + """ + candidate_prefs = self._candidate_prefs + return CandidateEvaluator.create( + project_name=project_name, + target_python=self._target_python, + prefer_binary=candidate_prefs.prefer_binary, + allow_all_prereleases=candidate_prefs.allow_all_prereleases, + specifier=specifier, + hashes=hashes, + ) + + @functools.lru_cache(maxsize=None) + def find_best_candidate( + self, + project_name, # type: str + specifier=None, # type: Optional[specifiers.BaseSpecifier] + hashes=None, # type: Optional[Hashes] + ): + # type: (...) -> BestCandidateResult + """Find matches for the given project and specifier. + + :param specifier: An optional object implementing `filter` + (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable + versions. + + :return: A `BestCandidateResult` instance. + """ + candidates = self.find_all_candidates(project_name) + candidate_evaluator = self.make_candidate_evaluator( + project_name=project_name, + specifier=specifier, + hashes=hashes, + ) + return candidate_evaluator.compute_best_candidate(candidates) + + def find_requirement(self, req, upgrade): + # type: (InstallRequirement, bool) -> Optional[InstallationCandidate] + """Try to find a Link matching req + + Expects req, an InstallRequirement and upgrade, a boolean + Returns a InstallationCandidate if found, + Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise + """ + hashes = req.hashes(trust_internet=False) + best_candidate_result = self.find_best_candidate( + req.name, specifier=req.specifier, hashes=hashes, + ) + best_candidate = best_candidate_result.best_candidate + + installed_version = None # type: Optional[_BaseVersion] + if req.satisfied_by is not None: + installed_version = parse_version(req.satisfied_by.version) + + def _format_versions(cand_iter): + # type: (Iterable[InstallationCandidate]) -> str + # This repeated parse_version and str() conversion is needed to + # handle different vendoring sources from pip and pkg_resources. + # If we stop using the pkg_resources provided specifier and start + # using our own, we can drop the cast to str(). + return ", ".join(sorted( + {str(c.version) for c in cand_iter}, + key=parse_version, + )) or "none" + + if installed_version is None and best_candidate is None: + logger.critical( + 'Could not find a version that satisfies the requirement %s ' + '(from versions: %s)', + req, + _format_versions(best_candidate_result.iter_all()), + ) + + raise DistributionNotFound( + 'No matching distribution found for {}'.format( + req) + ) + + best_installed = False + if installed_version and ( + best_candidate is None or + best_candidate.version <= installed_version): + best_installed = True + + if not upgrade and installed_version is not None: + if best_installed: + logger.debug( + 'Existing installed version (%s) is most up-to-date and ' + 'satisfies requirement', + installed_version, + ) + else: + logger.debug( + 'Existing installed version (%s) satisfies requirement ' + '(most up-to-date version is %s)', + installed_version, + best_candidate.version, + ) + return None + + if best_installed: + # We have an existing version, and its the best version + logger.debug( + 'Installed version (%s) is most up-to-date (past versions: ' + '%s)', + installed_version, + _format_versions(best_candidate_result.iter_applicable()), + ) + raise BestVersionAlreadyInstalled + + logger.debug( + 'Using version %s (newest of versions: %s)', + best_candidate.version, + _format_versions(best_candidate_result.iter_applicable()), + ) + return best_candidate + + +def _find_name_version_sep(fragment, canonical_name): + # type: (str, str) -> int + """Find the separator's index based on the package's canonical name. + + :param fragment: A + filename "fragment" (stem) or + egg fragment. + :param canonical_name: The package's canonical name. + + This function is needed since the canonicalized name does not necessarily + have the same length as the egg info's name part. An example:: + + >>> fragment = 'foo__bar-1.0' + >>> canonical_name = 'foo-bar' + >>> _find_name_version_sep(fragment, canonical_name) + 8 + """ + # Project name and version must be separated by one single dash. Find all + # occurrences of dashes; if the string in front of it matches the canonical + # name, this is the one separating the name and version parts. + for i, c in enumerate(fragment): + if c != "-": + continue + if canonicalize_name(fragment[:i]) == canonical_name: + return i + raise ValueError(f"{fragment} does not match {canonical_name}") + + +def _extract_version_from_fragment(fragment, canonical_name): + # type: (str, str) -> Optional[str] + """Parse the version string from a + filename + "fragment" (stem) or egg fragment. + + :param fragment: The string to parse. E.g. foo-2.1 + :param canonical_name: The canonicalized name of the package this + belongs to. + """ + try: + version_start = _find_name_version_sep(fragment, canonical_name) + 1 + except ValueError: + return None + version = fragment[version_start:] + if not version: + return None + return version diff --git a/venv/Lib/site-packages/pip/_internal/index/sources.py b/venv/Lib/site-packages/pip/_internal/index/sources.py new file mode 100644 index 00000000..eec3f12f --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/index/sources.py @@ -0,0 +1,224 @@ +import logging +import mimetypes +import os +import pathlib +from typing import Callable, Iterable, Optional, Tuple + +from pip._internal.models.candidate import InstallationCandidate +from pip._internal.models.link import Link +from pip._internal.utils.urls import path_to_url, url_to_path +from pip._internal.vcs import is_url + +logger = logging.getLogger(__name__) + +FoundCandidates = Iterable[InstallationCandidate] +FoundLinks = Iterable[Link] +CandidatesFromPage = Callable[[Link], Iterable[InstallationCandidate]] +PageValidator = Callable[[Link], bool] + + +class LinkSource: + @property + def link(self) -> Optional[Link]: + """Returns the underlying link, if there's one.""" + raise NotImplementedError() + + def page_candidates(self) -> FoundCandidates: + """Candidates found by parsing an archive listing HTML file.""" + raise NotImplementedError() + + def file_links(self) -> FoundLinks: + """Links found by specifying archives directly.""" + raise NotImplementedError() + + +def _is_html_file(file_url: str) -> bool: + return mimetypes.guess_type(file_url, strict=False)[0] == "text/html" + + +class _FlatDirectorySource(LinkSource): + """Link source specified by ``--find-links=``. + + This looks the content of the directory, and returns: + + * ``page_candidates``: Links listed on each HTML file in the directory. + * ``file_candidates``: Archives in the directory. + """ + + def __init__( + self, + candidates_from_page: CandidatesFromPage, + path: str, + ) -> None: + self._candidates_from_page = candidates_from_page + self._path = pathlib.Path(os.path.realpath(path)) + + @property + def link(self) -> Optional[Link]: + return None + + def page_candidates(self) -> FoundCandidates: + for path in self._path.iterdir(): + url = path_to_url(str(path)) + if not _is_html_file(url): + continue + yield from self._candidates_from_page(Link(url)) + + def file_links(self) -> FoundLinks: + for path in self._path.iterdir(): + url = path_to_url(str(path)) + if _is_html_file(url): + continue + yield Link(url) + + +class _LocalFileSource(LinkSource): + """``--find-links=`` or ``--[extra-]index-url=``. + + If a URL is supplied, it must be a ``file:`` URL. If a path is supplied to + the option, it is converted to a URL first. This returns: + + * ``page_candidates``: Links listed on an HTML file. + * ``file_candidates``: The non-HTML file. + """ + + def __init__( + self, + candidates_from_page: CandidatesFromPage, + link: Link, + ) -> None: + self._candidates_from_page = candidates_from_page + self._link = link + + @property + def link(self) -> Optional[Link]: + return self._link + + def page_candidates(self) -> FoundCandidates: + if not _is_html_file(self._link.url): + return + yield from self._candidates_from_page(self._link) + + def file_links(self) -> FoundLinks: + if _is_html_file(self._link.url): + return + yield self._link + + +class _RemoteFileSource(LinkSource): + """``--find-links=`` or ``--[extra-]index-url=``. + + This returns: + + * ``page_candidates``: Links listed on an HTML file. + * ``file_candidates``: The non-HTML file. + """ + + def __init__( + self, + candidates_from_page: CandidatesFromPage, + page_validator: PageValidator, + link: Link, + ) -> None: + self._candidates_from_page = candidates_from_page + self._page_validator = page_validator + self._link = link + + @property + def link(self) -> Optional[Link]: + return self._link + + def page_candidates(self) -> FoundCandidates: + if not self._page_validator(self._link): + return + yield from self._candidates_from_page(self._link) + + def file_links(self) -> FoundLinks: + yield self._link + + +class _IndexDirectorySource(LinkSource): + """``--[extra-]index-url=``. + + This is treated like a remote URL; ``candidates_from_page`` contains logic + for this by appending ``index.html`` to the link. + """ + + def __init__( + self, + candidates_from_page: CandidatesFromPage, + link: Link, + ) -> None: + self._candidates_from_page = candidates_from_page + self._link = link + + @property + def link(self) -> Optional[Link]: + return self._link + + def page_candidates(self) -> FoundCandidates: + yield from self._candidates_from_page(self._link) + + def file_links(self) -> FoundLinks: + return () + + +def build_source( + location: str, + *, + candidates_from_page: CandidatesFromPage, + page_validator: PageValidator, + expand_dir: bool, + cache_link_parsing: bool, +) -> Tuple[Optional[str], Optional[LinkSource]]: + + path: Optional[str] = None + url: Optional[str] = None + if os.path.exists(location): # Is a local path. + url = path_to_url(location) + path = location + elif location.startswith("file:"): # A file: URL. + url = location + path = url_to_path(location) + elif is_url(location): + url = location + + if url is None: + msg = ( + "Location '%s' is ignored: " + "it is either a non-existing path or lacks a specific scheme." + ) + logger.warning(msg, location) + return (None, None) + + if path is None: + source: LinkSource = _RemoteFileSource( + candidates_from_page=candidates_from_page, + page_validator=page_validator, + link=Link(url, cache_link_parsing=cache_link_parsing), + ) + return (url, source) + + if os.path.isdir(path): + if expand_dir: + source = _FlatDirectorySource( + candidates_from_page=candidates_from_page, + path=path, + ) + else: + source = _IndexDirectorySource( + candidates_from_page=candidates_from_page, + link=Link(url, cache_link_parsing=cache_link_parsing), + ) + return (url, source) + elif os.path.isfile(path): + source = _LocalFileSource( + candidates_from_page=candidates_from_page, + link=Link(url, cache_link_parsing=cache_link_parsing), + ) + return (url, source) + logger.warning( + "Location '%s' is ignored: it is neither a file nor a directory.", + location, + ) + return (url, None) diff --git a/venv/Lib/site-packages/pip/_internal/locations/__init__.py b/venv/Lib/site-packages/pip/_internal/locations/__init__.py new file mode 100644 index 00000000..3acb51bc --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/locations/__init__.py @@ -0,0 +1,184 @@ +import logging +import pathlib +import sys +import sysconfig +from typing import List, Optional + +from pip._internal.models.scheme import SCHEME_KEYS, Scheme + +from . import _distutils, _sysconfig +from .base import ( + USER_CACHE_DIR, + get_major_minor_version, + get_src_prefix, + site_packages, + user_site, +) + +__all__ = [ + "USER_CACHE_DIR", + "get_bin_prefix", + "get_bin_user", + "get_major_minor_version", + "get_platlib", + "get_prefixed_libs", + "get_purelib", + "get_scheme", + "get_src_prefix", + "site_packages", + "user_site", +] + + +logger = logging.getLogger(__name__) + + +def _default_base(*, user: bool) -> str: + if user: + base = sysconfig.get_config_var("userbase") + else: + base = sysconfig.get_config_var("base") + assert base is not None + return base + + +def _warn_if_mismatch(old: pathlib.Path, new: pathlib.Path, *, key: str) -> bool: + if old == new: + return False + issue_url = "https://github.com/pypa/pip/issues/9617" + message = ( + "Value for %s does not match. Please report this to <%s>" + "\ndistutils: %s" + "\nsysconfig: %s" + ) + logger.debug(message, key, issue_url, old, new) + return True + + +def _log_context( + *, + user: bool = False, + home: Optional[str] = None, + root: Optional[str] = None, + prefix: Optional[str] = None, +) -> None: + message = ( + "Additional context:" "\nuser = %r" "\nhome = %r" "\nroot = %r" "\nprefix = %r" + ) + logger.debug(message, user, home, root, prefix) + + +def get_scheme( + dist_name, # type: str + user=False, # type: bool + home=None, # type: Optional[str] + root=None, # type: Optional[str] + isolated=False, # type: bool + prefix=None, # type: Optional[str] +): + # type: (...) -> Scheme + old = _distutils.get_scheme( + dist_name, + user=user, + home=home, + root=root, + isolated=isolated, + prefix=prefix, + ) + new = _sysconfig.get_scheme( + dist_name, + user=user, + home=home, + root=root, + isolated=isolated, + prefix=prefix, + ) + + base = prefix or home or _default_base(user=user) + warned = [] + for k in SCHEME_KEYS: + # Extra join because distutils can return relative paths. + old_v = pathlib.Path(base, getattr(old, k)) + new_v = pathlib.Path(getattr(new, k)) + + # distutils incorrectly put PyPy packages under ``site-packages/python`` + # in the ``posix_home`` scheme, but PyPy devs said they expect the + # directory name to be ``pypy`` instead. So we treat this as a bug fix + # and not warn about it. See bpo-43307 and python/cpython#24628. + skip_pypy_special_case = ( + sys.implementation.name == "pypy" + and home is not None + and k in ("platlib", "purelib") + and old_v.parent == new_v.parent + and old_v.name == "python" + and new_v.name == "pypy" + ) + if skip_pypy_special_case: + continue + + warned.append(_warn_if_mismatch(old_v, new_v, key=f"scheme.{k}")) + + if any(warned): + _log_context(user=user, home=home, root=root, prefix=prefix) + + return old + + +def get_bin_prefix(): + # type: () -> str + old = _distutils.get_bin_prefix() + new = _sysconfig.get_bin_prefix() + if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="bin_prefix"): + _log_context() + return old + + +def get_bin_user(): + # type: () -> str + return _sysconfig.get_scheme("", user=True).scripts + + +def get_purelib(): + # type: () -> str + """Return the default pure-Python lib location.""" + old = _distutils.get_purelib() + new = _sysconfig.get_purelib() + if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="purelib"): + _log_context() + return old + + +def get_platlib(): + # type: () -> str + """Return the default platform-shared lib location.""" + old = _distutils.get_platlib() + new = _sysconfig.get_platlib() + if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="platlib"): + _log_context() + return old + + +def get_prefixed_libs(prefix): + # type: (str) -> List[str] + """Return the lib locations under ``prefix``.""" + old_pure, old_plat = _distutils.get_prefixed_libs(prefix) + new_pure, new_plat = _sysconfig.get_prefixed_libs(prefix) + + warned = [ + _warn_if_mismatch( + pathlib.Path(old_pure), + pathlib.Path(new_pure), + key="prefixed-purelib", + ), + _warn_if_mismatch( + pathlib.Path(old_plat), + pathlib.Path(new_plat), + key="prefixed-platlib", + ), + ] + if any(warned): + _log_context(prefix=prefix) + + if old_pure == old_plat: + return [old_pure] + return [old_pure, old_plat] diff --git a/venv/Lib/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..153ca98c Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-36.pyc new file mode 100644 index 00000000..4d464035 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-36.pyc new file mode 100644 index 00000000..c8e63789 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/locations/__pycache__/base.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/locations/__pycache__/base.cpython-36.pyc new file mode 100644 index 00000000..8a83b43d Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/locations/__pycache__/base.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/locations/_distutils.py b/venv/Lib/site-packages/pip/_internal/locations/_distutils.py new file mode 100644 index 00000000..2d7ab732 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/locations/_distutils.py @@ -0,0 +1,150 @@ +"""Locations where we look for configs, install stuff, etc""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +import os +import sys +from distutils.cmd import Command as DistutilsCommand +from distutils.command.install import SCHEME_KEYS +from distutils.command.install import install as distutils_install_command +from distutils.sysconfig import get_python_lib +from typing import Dict, List, Optional, Tuple, Union, cast + +from pip._internal.models.scheme import Scheme +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.virtualenv import running_under_virtualenv + +from .base import get_major_minor_version + + +def _distutils_scheme( + dist_name, user=False, home=None, root=None, isolated=False, prefix=None +): + # type:(str, bool, str, str, bool, str) -> Dict[str, str] + """ + Return a distutils install scheme + """ + from distutils.dist import Distribution + + dist_args = {"name": dist_name} # type: Dict[str, Union[str, List[str]]] + if isolated: + dist_args["script_args"] = ["--no-user-cfg"] + + d = Distribution(dist_args) + d.parse_config_files() + obj = None # type: Optional[DistutilsCommand] + obj = d.get_command_obj("install", create=True) + assert obj is not None + i = cast(distutils_install_command, obj) + # NOTE: setting user or home has the side-effect of creating the home dir + # or user base for installations during finalize_options() + # ideally, we'd prefer a scheme class that has no side-effects. + assert not (user and prefix), f"user={user} prefix={prefix}" + assert not (home and prefix), f"home={home} prefix={prefix}" + i.user = user or i.user + if user or home: + i.prefix = "" + i.prefix = prefix or i.prefix + i.home = home or i.home + i.root = root or i.root + i.finalize_options() + + scheme = {} + for key in SCHEME_KEYS: + scheme[key] = getattr(i, "install_" + key) + + # install_lib specified in setup.cfg should install *everything* + # into there (i.e. it takes precedence over both purelib and + # platlib). Note, i.install_lib is *always* set after + # finalize_options(); we only want to override here if the user + # has explicitly requested it hence going back to the config + if "install_lib" in d.get_option_dict("install"): + scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib)) + + if running_under_virtualenv(): + scheme["headers"] = os.path.join( + i.prefix, + "include", + "site", + f"python{get_major_minor_version()}", + dist_name, + ) + + if root is not None: + path_no_drive = os.path.splitdrive(os.path.abspath(scheme["headers"]))[1] + scheme["headers"] = os.path.join( + root, + path_no_drive[1:], + ) + + return scheme + + +def get_scheme( + dist_name, # type: str + user=False, # type: bool + home=None, # type: Optional[str] + root=None, # type: Optional[str] + isolated=False, # type: bool + prefix=None, # type: Optional[str] +): + # type: (...) -> Scheme + """ + Get the "scheme" corresponding to the input parameters. The distutils + documentation provides the context for the available schemes: + https://docs.python.org/3/install/index.html#alternate-installation + + :param dist_name: the name of the package to retrieve the scheme for, used + in the headers scheme path + :param user: indicates to use the "user" scheme + :param home: indicates to use the "home" scheme and provides the base + directory for the same + :param root: root under which other directories are re-based + :param isolated: equivalent to --no-user-cfg, i.e. do not consider + ~/.pydistutils.cfg (posix) or ~/pydistutils.cfg (non-posix) for + scheme paths + :param prefix: indicates to use the "prefix" scheme and provides the + base directory for the same + """ + scheme = _distutils_scheme(dist_name, user, home, root, isolated, prefix) + return Scheme( + platlib=scheme["platlib"], + purelib=scheme["purelib"], + headers=scheme["headers"], + scripts=scheme["scripts"], + data=scheme["data"], + ) + + +def get_bin_prefix(): + # type: () -> str + if WINDOWS: + bin_py = os.path.join(sys.prefix, "Scripts") + # buildout uses 'bin' on Windows too? + if not os.path.exists(bin_py): + bin_py = os.path.join(sys.prefix, "bin") + return bin_py + # Forcing to use /usr/local/bin for standard macOS framework installs + # Also log to ~/Library/Logs/ for use with the Console.app log viewer + if sys.platform[:6] == "darwin" and sys.prefix[:16] == "/System/Library/": + return "/usr/local/bin" + return os.path.join(sys.prefix, "bin") + + +def get_purelib(): + # type: () -> str + return get_python_lib(plat_specific=False) + + +def get_platlib(): + # type: () -> str + return get_python_lib(plat_specific=True) + + +def get_prefixed_libs(prefix): + # type: (str) -> Tuple[str, str] + return ( + get_python_lib(plat_specific=False, prefix=prefix), + get_python_lib(plat_specific=True, prefix=prefix), + ) diff --git a/venv/Lib/site-packages/pip/_internal/locations/_sysconfig.py b/venv/Lib/site-packages/pip/_internal/locations/_sysconfig.py new file mode 100644 index 00000000..03366ce6 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/locations/_sysconfig.py @@ -0,0 +1,180 @@ +import distutils.util # FIXME: For change_root. +import logging +import os +import sys +import sysconfig +import typing + +from pip._internal.exceptions import InvalidSchemeCombination, UserInstallationInvalid +from pip._internal.models.scheme import SCHEME_KEYS, Scheme +from pip._internal.utils.virtualenv import running_under_virtualenv + +from .base import get_major_minor_version + +logger = logging.getLogger(__name__) + + +# Notes on _infer_* functions. +# Unfortunately ``_get_default_scheme()`` is private, so there's no way to +# ask things like "what is the '_prefix' scheme on this platform". These +# functions try to answer that with some heuristics while accounting for ad-hoc +# platforms not covered by CPython's default sysconfig implementation. If the +# ad-hoc implementation does not fully implement sysconfig, we'll fall back to +# a POSIX scheme. + +_AVAILABLE_SCHEMES = set(sysconfig.get_scheme_names()) + + +def _infer_prefix(): + # type: () -> str + """Try to find a prefix scheme for the current platform. + + This tries: + + * Implementation + OS, used by PyPy on Windows (``pypy_nt``). + * Implementation without OS, used by PyPy on POSIX (``pypy``). + * OS + "prefix", used by CPython on POSIX (``posix_prefix``). + * Just the OS name, used by CPython on Windows (``nt``). + + If none of the above works, fall back to ``posix_prefix``. + """ + implementation_suffixed = f"{sys.implementation.name}_{os.name}" + if implementation_suffixed in _AVAILABLE_SCHEMES: + return implementation_suffixed + if sys.implementation.name in _AVAILABLE_SCHEMES: + return sys.implementation.name + suffixed = f"{os.name}_prefix" + if suffixed in _AVAILABLE_SCHEMES: + return suffixed + if os.name in _AVAILABLE_SCHEMES: # On Windows, prefx is just called "nt". + return os.name + return "posix_prefix" + + +def _infer_user(): + # type: () -> str + """Try to find a user scheme for the current platform.""" + suffixed = f"{os.name}_user" + if suffixed in _AVAILABLE_SCHEMES: + return suffixed + if "posix_user" not in _AVAILABLE_SCHEMES: # User scheme unavailable. + raise UserInstallationInvalid() + return "posix_user" + + +def _infer_home(): + # type: () -> str + """Try to find a home for the current platform.""" + suffixed = f"{os.name}_home" + if suffixed in _AVAILABLE_SCHEMES: + return suffixed + return "posix_home" + + +# Update these keys if the user sets a custom home. +_HOME_KEYS = [ + "installed_base", + "base", + "installed_platbase", + "platbase", + "prefix", + "exec_prefix", +] +if sysconfig.get_config_var("userbase") is not None: + _HOME_KEYS.append("userbase") + + +def get_scheme( + dist_name, # type: str + user=False, # type: bool + home=None, # type: typing.Optional[str] + root=None, # type: typing.Optional[str] + isolated=False, # type: bool + prefix=None, # type: typing.Optional[str] +): + # type: (...) -> Scheme + """ + Get the "scheme" corresponding to the input parameters. + + :param dist_name: the name of the package to retrieve the scheme for, used + in the headers scheme path + :param user: indicates to use the "user" scheme + :param home: indicates to use the "home" scheme + :param root: root under which other directories are re-based + :param isolated: ignored, but kept for distutils compatibility (where + this controls whether the user-site pydistutils.cfg is honored) + :param prefix: indicates to use the "prefix" scheme and provides the + base directory for the same + """ + if user and prefix: + raise InvalidSchemeCombination("--user", "--prefix") + if home and prefix: + raise InvalidSchemeCombination("--home", "--prefix") + + if home is not None: + scheme_name = _infer_home() + elif user: + scheme_name = _infer_user() + else: + scheme_name = _infer_prefix() + + if home is not None: + variables = {k: home for k in _HOME_KEYS} + elif prefix is not None: + variables = {k: prefix for k in _HOME_KEYS} + else: + variables = {} + + paths = sysconfig.get_paths(scheme=scheme_name, vars=variables) + + # Logic here is very arbitrary, we're doing it for compatibility, don't ask. + # 1. Pip historically uses a special header path in virtual environments. + # 2. If the distribution name is not known, distutils uses 'UNKNOWN'. We + # only do the same when not running in a virtual environment because + # pip's historical header path logic (see point 1) did not do this. + if running_under_virtualenv(): + if user: + base = variables.get("userbase", sys.prefix) + else: + base = variables.get("base", sys.prefix) + python_xy = f"python{get_major_minor_version()}" + paths["include"] = os.path.join(base, "include", "site", python_xy) + elif not dist_name: + dist_name = "UNKNOWN" + + scheme = Scheme( + platlib=paths["platlib"], + purelib=paths["purelib"], + headers=os.path.join(paths["include"], dist_name), + scripts=paths["scripts"], + data=paths["data"], + ) + if root is not None: + for key in SCHEME_KEYS: + value = distutils.util.change_root(root, getattr(scheme, key)) + setattr(scheme, key, value) + return scheme + + +def get_bin_prefix(): + # type: () -> str + # Forcing to use /usr/local/bin for standard macOS framework installs. + if sys.platform[:6] == "darwin" and sys.prefix[:16] == "/System/Library/": + return "/usr/local/bin" + return sysconfig.get_paths()["scripts"] + + +def get_purelib(): + # type: () -> str + return sysconfig.get_paths()["purelib"] + + +def get_platlib(): + # type: () -> str + return sysconfig.get_paths()["platlib"] + + +def get_prefixed_libs(prefix): + # type: (str) -> typing.Tuple[str, str] + paths = sysconfig.get_paths(vars={"base": prefix, "platbase": prefix}) + return (paths["purelib"], paths["platlib"]) diff --git a/venv/Lib/site-packages/pip/_internal/locations/base.py b/venv/Lib/site-packages/pip/_internal/locations/base.py new file mode 100644 index 00000000..98557abb --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/locations/base.py @@ -0,0 +1,48 @@ +import os +import site +import sys +import sysconfig +import typing + +from pip._internal.utils import appdirs +from pip._internal.utils.virtualenv import running_under_virtualenv + +# Application Directories +USER_CACHE_DIR = appdirs.user_cache_dir("pip") + +# FIXME doesn't account for venv linked to global site-packages +site_packages = sysconfig.get_path("purelib") # type: typing.Optional[str] + + +def get_major_minor_version(): + # type: () -> str + """ + Return the major-minor version of the current Python as a string, e.g. + "3.7" or "3.10". + """ + return "{}.{}".format(*sys.version_info) + + +def get_src_prefix(): + # type: () -> str + if running_under_virtualenv(): + src_prefix = os.path.join(sys.prefix, "src") + else: + # FIXME: keep src in cwd for now (it is not a temporary folder) + try: + src_prefix = os.path.join(os.getcwd(), "src") + except OSError: + # In case the current working directory has been renamed or deleted + sys.exit("The folder you are executing pip from can no longer be found.") + + # under macOS + virtualenv sys.prefix is not properly resolved + # it is something like /path/to/python/bin/.. + return os.path.abspath(src_prefix) + + +try: + # Use getusersitepackages if this is present, as it ensures that the + # value is initialised properly. + user_site = site.getusersitepackages() # type: typing.Optional[str] +except AttributeError: + user_site = site.USER_SITE diff --git a/venv/Lib/site-packages/pip/_internal/main.py b/venv/Lib/site-packages/pip/_internal/main.py new file mode 100644 index 00000000..51eee158 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/main.py @@ -0,0 +1,13 @@ +from typing import List, Optional + + +def main(args=None): + # type: (Optional[List[str]]) -> int + """This is preserved for old console scripts that may still be referencing + it. + + For additional details, see https://github.com/pypa/pip/issues/7498. + """ + from pip._internal.utils.entrypoints import _wrapper + + return _wrapper(args) diff --git a/venv/Lib/site-packages/pip/_internal/metadata/__init__.py b/venv/Lib/site-packages/pip/_internal/metadata/__init__.py new file mode 100644 index 00000000..63335a19 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/metadata/__init__.py @@ -0,0 +1,43 @@ +from typing import List, Optional + +from .base import BaseDistribution, BaseEnvironment + + +def get_default_environment(): + # type: () -> BaseEnvironment + """Get the default representation for the current environment. + + This returns an Environment instance from the chosen backend. The default + Environment instance should be built from ``sys.path`` and may use caching + to share instance state accorss calls. + """ + from .pkg_resources import Environment + + return Environment.default() + + +def get_environment(paths): + # type: (Optional[List[str]]) -> BaseEnvironment + """Get a representation of the environment specified by ``paths``. + + This returns an Environment instance from the chosen backend based on the + given import paths. The backend must build a fresh instance representing + the state of installed distributions when this function is called. + """ + from .pkg_resources import Environment + + return Environment.from_paths(paths) + + +def get_wheel_distribution(wheel_path, canonical_name): + # type: (str, str) -> BaseDistribution + """Get the representation of the specified wheel's distribution metadata. + + This returns a Distribution instance from the chosen backend based on + the given wheel's ``.dist-info`` directory. + + :param canonical_name: Normalized project name of the given wheel. + """ + from .pkg_resources import Distribution + + return Distribution.from_wheel(wheel_path, canonical_name) diff --git a/venv/Lib/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..f20c0fee Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/metadata/__pycache__/base.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/metadata/__pycache__/base.cpython-36.pyc new file mode 100644 index 00000000..e6edd70b Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/metadata/__pycache__/base.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-36.pyc new file mode 100644 index 00000000..758ba7dc Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/metadata/base.py b/venv/Lib/site-packages/pip/_internal/metadata/base.py new file mode 100644 index 00000000..37f9a823 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/metadata/base.py @@ -0,0 +1,142 @@ +import logging +import re +from typing import Container, Iterator, List, Optional, Union + +from pip._vendor.packaging.version import LegacyVersion, Version + +from pip._internal.utils.misc import stdlib_pkgs # TODO: Move definition here. + +DistributionVersion = Union[LegacyVersion, Version] + +logger = logging.getLogger(__name__) + + +class BaseDistribution: + @property + def location(self): + # type: () -> Optional[str] + """Where the distribution is loaded from. + + A string value is not necessarily a filesystem path, since distributions + can be loaded from other sources, e.g. arbitrary zip archives. ``None`` + means the distribution is created in-memory. + """ + raise NotImplementedError() + + @property + def metadata_version(self): + # type: () -> Optional[str] + """Value of "Metadata-Version:" in the distribution, if available.""" + raise NotImplementedError() + + @property + def canonical_name(self): + # type: () -> str + raise NotImplementedError() + + @property + def version(self): + # type: () -> DistributionVersion + raise NotImplementedError() + + @property + def installer(self): + # type: () -> str + raise NotImplementedError() + + @property + def editable(self): + # type: () -> bool + raise NotImplementedError() + + @property + def local(self): + # type: () -> bool + raise NotImplementedError() + + @property + def in_usersite(self): + # type: () -> bool + raise NotImplementedError() + + +class BaseEnvironment: + """An environment containing distributions to introspect.""" + + @classmethod + def default(cls): + # type: () -> BaseEnvironment + raise NotImplementedError() + + @classmethod + def from_paths(cls, paths): + # type: (Optional[List[str]]) -> BaseEnvironment + raise NotImplementedError() + + def get_distribution(self, name): + # type: (str) -> Optional[BaseDistribution] + """Given a requirement name, return the installed distributions.""" + raise NotImplementedError() + + def _iter_distributions(self): + # type: () -> Iterator[BaseDistribution] + """Iterate through installed distributions. + + This function should be implemented by subclass, but never called + directly. Use the public ``iter_distribution()`` instead, which + implements additional logic to make sure the distributions are valid. + """ + raise NotImplementedError() + + def iter_distributions(self): + # type: () -> Iterator[BaseDistribution] + """Iterate through installed distributions.""" + for dist in self._iter_distributions(): + # Make sure the distribution actually comes from a valid Python + # packaging distribution. Pip's AdjacentTempDirectory leaves folders + # e.g. ``~atplotlib.dist-info`` if cleanup was interrupted. The + # valid project name pattern is taken from PEP 508. + project_name_valid = re.match( + r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", + dist.canonical_name, + flags=re.IGNORECASE, + ) + if not project_name_valid: + logger.warning( + "Ignoring invalid distribution %s (%s)", + dist.canonical_name, + dist.location, + ) + continue + yield dist + + def iter_installed_distributions( + self, + local_only=True, # type: bool + skip=stdlib_pkgs, # type: Container[str] + include_editables=True, # type: bool + editables_only=False, # type: bool + user_only=False, # type: bool + ): + # type: (...) -> Iterator[BaseDistribution] + """Return a list of installed distributions. + + :param local_only: If True (default), only return installations + local to the current virtualenv, if in a virtualenv. + :param skip: An iterable of canonicalized project names to ignore; + defaults to ``stdlib_pkgs``. + :param include_editables: If False, don't report editables. + :param editables_only: If True, only report editables. + :param user_only: If True, only report installations in the user + site directory. + """ + it = self.iter_distributions() + if local_only: + it = (d for d in it if d.local) + if not include_editables: + it = (d for d in it if not d.editable) + if editables_only: + it = (d for d in it if d.editable) + if user_only: + it = (d for d in it if d.in_usersite) + return (d for d in it if d.canonical_name not in skip) diff --git a/venv/Lib/site-packages/pip/_internal/metadata/pkg_resources.py b/venv/Lib/site-packages/pip/_internal/metadata/pkg_resources.py new file mode 100644 index 00000000..f39a39eb --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/metadata/pkg_resources.py @@ -0,0 +1,126 @@ +import zipfile +from typing import Iterator, List, Optional + +from pip._vendor import pkg_resources +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.packaging.version import parse as parse_version + +from pip._internal.utils import misc # TODO: Move definition here. +from pip._internal.utils.packaging import get_installer +from pip._internal.utils.wheel import pkg_resources_distribution_for_wheel + +from .base import BaseDistribution, BaseEnvironment, DistributionVersion + + +class Distribution(BaseDistribution): + def __init__(self, dist): + # type: (pkg_resources.Distribution) -> None + self._dist = dist + + @classmethod + def from_wheel(cls, path, name): + # type: (str, str) -> Distribution + with zipfile.ZipFile(path, allowZip64=True) as zf: + dist = pkg_resources_distribution_for_wheel(zf, name, path) + return cls(dist) + + @property + def location(self): + # type: () -> Optional[str] + return self._dist.location + + @property + def metadata_version(self): + # type: () -> Optional[str] + for line in self._dist.get_metadata_lines(self._dist.PKG_INFO): + if line.lower().startswith("metadata-version:"): + return line.split(":", 1)[-1].strip() + return None + + @property + def canonical_name(self): + # type: () -> str + return canonicalize_name(self._dist.project_name) + + @property + def version(self): + # type: () -> DistributionVersion + return parse_version(self._dist.version) + + @property + def installer(self): + # type: () -> str + return get_installer(self._dist) + + @property + def editable(self): + # type: () -> bool + return misc.dist_is_editable(self._dist) + + @property + def local(self): + # type: () -> bool + return misc.dist_is_local(self._dist) + + @property + def in_usersite(self): + # type: () -> bool + return misc.dist_in_usersite(self._dist) + + +class Environment(BaseEnvironment): + def __init__(self, ws): + # type: (pkg_resources.WorkingSet) -> None + self._ws = ws + + @classmethod + def default(cls): + # type: () -> BaseEnvironment + return cls(pkg_resources.working_set) + + @classmethod + def from_paths(cls, paths): + # type: (Optional[List[str]]) -> BaseEnvironment + return cls(pkg_resources.WorkingSet(paths)) + + def _search_distribution(self, name): + # type: (str) -> Optional[BaseDistribution] + """Find a distribution matching the ``name`` in the environment. + + This searches from *all* distributions available in the environment, to + match the behavior of ``pkg_resources.get_distribution()``. + """ + canonical_name = canonicalize_name(name) + for dist in self.iter_distributions(): + if dist.canonical_name == canonical_name: + return dist + return None + + def get_distribution(self, name): + # type: (str) -> Optional[BaseDistribution] + + # Search the distribution by looking through the working set. + dist = self._search_distribution(name) + if dist: + return dist + + # If distribution could not be found, call working_set.require to + # update the working set, and try to find the distribution again. + # This might happen for e.g. when you install a package twice, once + # using setup.py develop and again using setup.py install. Now when + # running pip uninstall twice, the package gets removed from the + # working set in the first uninstall, so we have to populate the + # working set again so that pip knows about it and the packages gets + # picked up and is successfully uninstalled the second time too. + try: + # We didn't pass in any version specifiers, so this can never + # raise pkg_resources.VersionConflict. + self._ws.require(name) + except pkg_resources.DistributionNotFound: + return None + return self._search_distribution(name) + + def _iter_distributions(self): + # type: () -> Iterator[BaseDistribution] + for dist in self._ws: + yield Distribution(dist) diff --git a/env/lib/python2.7/site-packages/pip/_internal/models/__init__.py b/venv/Lib/site-packages/pip/_internal/models/__init__.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_internal/models/__init__.py rename to venv/Lib/site-packages/pip/_internal/models/__init__.py diff --git a/venv/Lib/site-packages/pip/_internal/models/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/models/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..794b13db Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/models/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/models/__pycache__/candidate.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/models/__pycache__/candidate.cpython-36.pyc new file mode 100644 index 00000000..a986cd04 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/models/__pycache__/candidate.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-36.pyc new file mode 100644 index 00000000..dab3ac4e Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/models/__pycache__/format_control.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/models/__pycache__/format_control.cpython-36.pyc new file mode 100644 index 00000000..368aa7ca Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/models/__pycache__/format_control.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/models/__pycache__/index.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/models/__pycache__/index.cpython-36.pyc new file mode 100644 index 00000000..bb23a4bd Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/models/__pycache__/index.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/models/__pycache__/link.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/models/__pycache__/link.cpython-36.pyc new file mode 100644 index 00000000..5e6a08e3 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/models/__pycache__/link.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/models/__pycache__/scheme.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/models/__pycache__/scheme.cpython-36.pyc new file mode 100644 index 00000000..3f2b5371 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/models/__pycache__/scheme.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-36.pyc new file mode 100644 index 00000000..34a51688 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-36.pyc new file mode 100644 index 00000000..ad7c1be1 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/models/__pycache__/target_python.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/models/__pycache__/target_python.cpython-36.pyc new file mode 100644 index 00000000..a4e4a718 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/models/__pycache__/target_python.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/models/__pycache__/wheel.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/models/__pycache__/wheel.cpython-36.pyc new file mode 100644 index 00000000..14db1754 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/models/__pycache__/wheel.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/models/candidate.py b/venv/Lib/site-packages/pip/_internal/models/candidate.py new file mode 100644 index 00000000..3b91704a --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/models/candidate.py @@ -0,0 +1,34 @@ +from pip._vendor.packaging.version import parse as parse_version + +from pip._internal.models.link import Link +from pip._internal.utils.models import KeyBasedCompareMixin + + +class InstallationCandidate(KeyBasedCompareMixin): + """Represents a potential "candidate" for installation. + """ + + __slots__ = ["name", "version", "link"] + + def __init__(self, name, version, link): + # type: (str, str, Link) -> None + self.name = name + self.version = parse_version(version) + self.link = link + + super().__init__( + key=(self.name, self.version, self.link), + defining_class=InstallationCandidate + ) + + def __repr__(self): + # type: () -> str + return "".format( + self.name, self.version, self.link, + ) + + def __str__(self): + # type: () -> str + return '{!r} candidate (version {} at {})'.format( + self.name, self.version, self.link, + ) diff --git a/venv/Lib/site-packages/pip/_internal/models/direct_url.py b/venv/Lib/site-packages/pip/_internal/models/direct_url.py new file mode 100644 index 00000000..345dbaf1 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/models/direct_url.py @@ -0,0 +1,233 @@ +""" PEP 610 """ +import json +import re +import urllib.parse +from typing import Any, Dict, Iterable, Optional, Type, TypeVar, Union + +__all__ = [ + "DirectUrl", + "DirectUrlValidationError", + "DirInfo", + "ArchiveInfo", + "VcsInfo", +] + +T = TypeVar("T") + +DIRECT_URL_METADATA_NAME = "direct_url.json" +ENV_VAR_RE = re.compile(r"^\$\{[A-Za-z0-9-_]+\}(:\$\{[A-Za-z0-9-_]+\})?$") + + +class DirectUrlValidationError(Exception): + pass + + +def _get(d, expected_type, key, default=None): + # type: (Dict[str, Any], Type[T], str, Optional[T]) -> Optional[T] + """Get value from dictionary and verify expected type.""" + if key not in d: + return default + value = d[key] + if not isinstance(value, expected_type): + raise DirectUrlValidationError( + "{!r} has unexpected type for {} (expected {})".format( + value, key, expected_type + ) + ) + return value + + +def _get_required(d, expected_type, key, default=None): + # type: (Dict[str, Any], Type[T], str, Optional[T]) -> T + value = _get(d, expected_type, key, default) + if value is None: + raise DirectUrlValidationError(f"{key} must have a value") + return value + + +def _exactly_one_of(infos): + # type: (Iterable[Optional[InfoType]]) -> InfoType + infos = [info for info in infos if info is not None] + if not infos: + raise DirectUrlValidationError( + "missing one of archive_info, dir_info, vcs_info" + ) + if len(infos) > 1: + raise DirectUrlValidationError( + "more than one of archive_info, dir_info, vcs_info" + ) + assert infos[0] is not None + return infos[0] + + +def _filter_none(**kwargs): + # type: (Any) -> Dict[str, Any] + """Make dict excluding None values.""" + return {k: v for k, v in kwargs.items() if v is not None} + + +class VcsInfo: + name = "vcs_info" + + def __init__( + self, + vcs, # type: str + commit_id, # type: str + requested_revision=None, # type: Optional[str] + resolved_revision=None, # type: Optional[str] + resolved_revision_type=None, # type: Optional[str] + ): + self.vcs = vcs + self.requested_revision = requested_revision + self.commit_id = commit_id + self.resolved_revision = resolved_revision + self.resolved_revision_type = resolved_revision_type + + @classmethod + def _from_dict(cls, d): + # type: (Optional[Dict[str, Any]]) -> Optional[VcsInfo] + if d is None: + return None + return cls( + vcs=_get_required(d, str, "vcs"), + commit_id=_get_required(d, str, "commit_id"), + requested_revision=_get(d, str, "requested_revision"), + resolved_revision=_get(d, str, "resolved_revision"), + resolved_revision_type=_get(d, str, "resolved_revision_type"), + ) + + def _to_dict(self): + # type: () -> Dict[str, Any] + return _filter_none( + vcs=self.vcs, + requested_revision=self.requested_revision, + commit_id=self.commit_id, + resolved_revision=self.resolved_revision, + resolved_revision_type=self.resolved_revision_type, + ) + + +class ArchiveInfo: + name = "archive_info" + + def __init__( + self, + hash=None, # type: Optional[str] + ): + self.hash = hash + + @classmethod + def _from_dict(cls, d): + # type: (Optional[Dict[str, Any]]) -> Optional[ArchiveInfo] + if d is None: + return None + return cls(hash=_get(d, str, "hash")) + + def _to_dict(self): + # type: () -> Dict[str, Any] + return _filter_none(hash=self.hash) + + +class DirInfo: + name = "dir_info" + + def __init__( + self, + editable=False, # type: bool + ): + self.editable = editable + + @classmethod + def _from_dict(cls, d): + # type: (Optional[Dict[str, Any]]) -> Optional[DirInfo] + if d is None: + return None + return cls( + editable=_get_required(d, bool, "editable", default=False) + ) + + def _to_dict(self): + # type: () -> Dict[str, Any] + return _filter_none(editable=self.editable or None) + + +InfoType = Union[ArchiveInfo, DirInfo, VcsInfo] + + +class DirectUrl: + + def __init__( + self, + url, # type: str + info, # type: InfoType + subdirectory=None, # type: Optional[str] + ): + self.url = url + self.info = info + self.subdirectory = subdirectory + + def _remove_auth_from_netloc(self, netloc): + # type: (str) -> str + if "@" not in netloc: + return netloc + user_pass, netloc_no_user_pass = netloc.split("@", 1) + if ( + isinstance(self.info, VcsInfo) and + self.info.vcs == "git" and + user_pass == "git" + ): + return netloc + if ENV_VAR_RE.match(user_pass): + return netloc + return netloc_no_user_pass + + @property + def redacted_url(self): + # type: () -> str + """url with user:password part removed unless it is formed with + environment variables as specified in PEP 610, or it is ``git`` + in the case of a git URL. + """ + purl = urllib.parse.urlsplit(self.url) + netloc = self._remove_auth_from_netloc(purl.netloc) + surl = urllib.parse.urlunsplit( + (purl.scheme, netloc, purl.path, purl.query, purl.fragment) + ) + return surl + + def validate(self): + # type: () -> None + self.from_dict(self.to_dict()) + + @classmethod + def from_dict(cls, d): + # type: (Dict[str, Any]) -> DirectUrl + return DirectUrl( + url=_get_required(d, str, "url"), + subdirectory=_get(d, str, "subdirectory"), + info=_exactly_one_of( + [ + ArchiveInfo._from_dict(_get(d, dict, "archive_info")), + DirInfo._from_dict(_get(d, dict, "dir_info")), + VcsInfo._from_dict(_get(d, dict, "vcs_info")), + ] + ), + ) + + def to_dict(self): + # type: () -> Dict[str, Any] + res = _filter_none( + url=self.redacted_url, + subdirectory=self.subdirectory, + ) + res[self.info.name] = self.info._to_dict() + return res + + @classmethod + def from_json(cls, s): + # type: (str) -> DirectUrl + return cls.from_dict(json.loads(s)) + + def to_json(self): + # type: () -> str + return json.dumps(self.to_dict(), sort_keys=True) diff --git a/venv/Lib/site-packages/pip/_internal/models/format_control.py b/venv/Lib/site-packages/pip/_internal/models/format_control.py new file mode 100644 index 00000000..cf262af2 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/models/format_control.py @@ -0,0 +1,86 @@ +from typing import FrozenSet, Optional, Set + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.exceptions import CommandError + + +class FormatControl: + """Helper for managing formats from which a package can be installed. + """ + + __slots__ = ["no_binary", "only_binary"] + + def __init__(self, no_binary=None, only_binary=None): + # type: (Optional[Set[str]], Optional[Set[str]]) -> None + if no_binary is None: + no_binary = set() + if only_binary is None: + only_binary = set() + + self.no_binary = no_binary + self.only_binary = only_binary + + def __eq__(self, other): + # type: (object) -> bool + if not isinstance(other, self.__class__): + return NotImplemented + + if self.__slots__ != other.__slots__: + return False + + return all( + getattr(self, k) == getattr(other, k) + for k in self.__slots__ + ) + + def __repr__(self): + # type: () -> str + return "{}({}, {})".format( + self.__class__.__name__, + self.no_binary, + self.only_binary + ) + + @staticmethod + def handle_mutual_excludes(value, target, other): + # type: (str, Set[str], Set[str]) -> None + if value.startswith('-'): + raise CommandError( + "--no-binary / --only-binary option requires 1 argument." + ) + new = value.split(',') + while ':all:' in new: + other.clear() + target.clear() + target.add(':all:') + del new[:new.index(':all:') + 1] + # Without a none, we want to discard everything as :all: covers it + if ':none:' not in new: + return + for name in new: + if name == ':none:': + target.clear() + continue + name = canonicalize_name(name) + other.discard(name) + target.add(name) + + def get_allowed_formats(self, canonical_name): + # type: (str) -> FrozenSet[str] + result = {"binary", "source"} + if canonical_name in self.only_binary: + result.discard('source') + elif canonical_name in self.no_binary: + result.discard('binary') + elif ':all:' in self.only_binary: + result.discard('source') + elif ':all:' in self.no_binary: + result.discard('binary') + return frozenset(result) + + def disallow_binaries(self): + # type: () -> None + self.handle_mutual_excludes( + ':all:', self.no_binary, self.only_binary, + ) diff --git a/venv/Lib/site-packages/pip/_internal/models/index.py b/venv/Lib/site-packages/pip/_internal/models/index.py new file mode 100644 index 00000000..b148abb4 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/models/index.py @@ -0,0 +1,34 @@ +import urllib.parse + + +class PackageIndex: + """Represents a Package Index and provides easier access to endpoints + """ + + __slots__ = ['url', 'netloc', 'simple_url', 'pypi_url', + 'file_storage_domain'] + + def __init__(self, url, file_storage_domain): + # type: (str, str) -> None + super().__init__() + self.url = url + self.netloc = urllib.parse.urlsplit(url).netloc + self.simple_url = self._url_for_path('simple') + self.pypi_url = self._url_for_path('pypi') + + # This is part of a temporary hack used to block installs of PyPI + # packages which depend on external urls only necessary until PyPI can + # block such packages themselves + self.file_storage_domain = file_storage_domain + + def _url_for_path(self, path): + # type: (str) -> str + return urllib.parse.urljoin(self.url, path) + + +PyPI = PackageIndex( + 'https://pypi.org/', file_storage_domain='files.pythonhosted.org' +) +TestPyPI = PackageIndex( + 'https://test.pypi.org/', file_storage_domain='test-files.pythonhosted.org' +) diff --git a/venv/Lib/site-packages/pip/_internal/models/link.py b/venv/Lib/site-packages/pip/_internal/models/link.py new file mode 100644 index 00000000..86d0be40 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/models/link.py @@ -0,0 +1,248 @@ +import os +import posixpath +import re +import urllib.parse +from typing import TYPE_CHECKING, Optional, Tuple, Union + +from pip._internal.utils.filetypes import WHEEL_EXTENSION +from pip._internal.utils.hashes import Hashes +from pip._internal.utils.misc import ( + redact_auth_from_url, + split_auth_from_netloc, + splitext, +) +from pip._internal.utils.models import KeyBasedCompareMixin +from pip._internal.utils.urls import path_to_url, url_to_path + +if TYPE_CHECKING: + from pip._internal.index.collector import HTMLPage + + +class Link(KeyBasedCompareMixin): + """Represents a parsed link from a Package Index's simple URL + """ + + __slots__ = [ + "_parsed_url", + "_url", + "comes_from", + "requires_python", + "yanked_reason", + "cache_link_parsing", + ] + + def __init__( + self, + url, # type: str + comes_from=None, # type: Optional[Union[str, HTMLPage]] + requires_python=None, # type: Optional[str] + yanked_reason=None, # type: Optional[str] + cache_link_parsing=True, # type: bool + ): + # type: (...) -> None + """ + :param url: url of the resource pointed to (href of the link) + :param comes_from: instance of HTMLPage where the link was found, + or string. + :param requires_python: String containing the `Requires-Python` + metadata field, specified in PEP 345. This may be specified by + a data-requires-python attribute in the HTML link tag, as + described in PEP 503. + :param yanked_reason: the reason the file has been yanked, if the + file has been yanked, or None if the file hasn't been yanked. + This is the value of the "data-yanked" attribute, if present, in + a simple repository HTML link. If the file has been yanked but + no reason was provided, this should be the empty string. See + PEP 592 for more information and the specification. + :param cache_link_parsing: A flag that is used elsewhere to determine + whether resources retrieved from this link + should be cached. PyPI index urls should + generally have this set to False, for + example. + """ + + # url can be a UNC windows share + if url.startswith('\\\\'): + url = path_to_url(url) + + self._parsed_url = urllib.parse.urlsplit(url) + # Store the url as a private attribute to prevent accidentally + # trying to set a new value. + self._url = url + + self.comes_from = comes_from + self.requires_python = requires_python if requires_python else None + self.yanked_reason = yanked_reason + + super().__init__(key=url, defining_class=Link) + + self.cache_link_parsing = cache_link_parsing + + def __str__(self): + # type: () -> str + if self.requires_python: + rp = f' (requires-python:{self.requires_python})' + else: + rp = '' + if self.comes_from: + return '{} (from {}){}'.format( + redact_auth_from_url(self._url), self.comes_from, rp) + else: + return redact_auth_from_url(str(self._url)) + + def __repr__(self): + # type: () -> str + return f'' + + @property + def url(self): + # type: () -> str + return self._url + + @property + def filename(self): + # type: () -> str + path = self.path.rstrip('/') + name = posixpath.basename(path) + if not name: + # Make sure we don't leak auth information if the netloc + # includes a username and password. + netloc, user_pass = split_auth_from_netloc(self.netloc) + return netloc + + name = urllib.parse.unquote(name) + assert name, f'URL {self._url!r} produced no filename' + return name + + @property + def file_path(self): + # type: () -> str + return url_to_path(self.url) + + @property + def scheme(self): + # type: () -> str + return self._parsed_url.scheme + + @property + def netloc(self): + # type: () -> str + """ + This can contain auth information. + """ + return self._parsed_url.netloc + + @property + def path(self): + # type: () -> str + return urllib.parse.unquote(self._parsed_url.path) + + def splitext(self): + # type: () -> Tuple[str, str] + return splitext(posixpath.basename(self.path.rstrip('/'))) + + @property + def ext(self): + # type: () -> str + return self.splitext()[1] + + @property + def url_without_fragment(self): + # type: () -> str + scheme, netloc, path, query, fragment = self._parsed_url + return urllib.parse.urlunsplit((scheme, netloc, path, query, None)) + + _egg_fragment_re = re.compile(r'[#&]egg=([^&]*)') + + @property + def egg_fragment(self): + # type: () -> Optional[str] + match = self._egg_fragment_re.search(self._url) + if not match: + return None + return match.group(1) + + _subdirectory_fragment_re = re.compile(r'[#&]subdirectory=([^&]*)') + + @property + def subdirectory_fragment(self): + # type: () -> Optional[str] + match = self._subdirectory_fragment_re.search(self._url) + if not match: + return None + return match.group(1) + + _hash_re = re.compile( + r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)' + ) + + @property + def hash(self): + # type: () -> Optional[str] + match = self._hash_re.search(self._url) + if match: + return match.group(2) + return None + + @property + def hash_name(self): + # type: () -> Optional[str] + match = self._hash_re.search(self._url) + if match: + return match.group(1) + return None + + @property + def show_url(self): + # type: () -> str + return posixpath.basename(self._url.split('#', 1)[0].split('?', 1)[0]) + + @property + def is_file(self): + # type: () -> bool + return self.scheme == 'file' + + def is_existing_dir(self): + # type: () -> bool + return self.is_file and os.path.isdir(self.file_path) + + @property + def is_wheel(self): + # type: () -> bool + return self.ext == WHEEL_EXTENSION + + @property + def is_vcs(self): + # type: () -> bool + from pip._internal.vcs import vcs + + return self.scheme in vcs.all_schemes + + @property + def is_yanked(self): + # type: () -> bool + return self.yanked_reason is not None + + @property + def has_hash(self): + # type: () -> bool + return self.hash_name is not None + + def is_hash_allowed(self, hashes): + # type: (Optional[Hashes]) -> bool + """ + Return True if the link has a hash and it is allowed. + """ + if hashes is None or not self.has_hash: + return False + # Assert non-None so mypy knows self.hash_name and self.hash are str. + assert self.hash_name is not None + assert self.hash is not None + + return hashes.is_hash_allowed(self.hash_name, hex_digest=self.hash) + + +# TODO: Relax this comparison logic to ignore, for example, fragments. +def links_equivalent(link1, link2): + # type: (Link, Link) -> bool + return link1 == link2 diff --git a/venv/Lib/site-packages/pip/_internal/models/scheme.py b/venv/Lib/site-packages/pip/_internal/models/scheme.py new file mode 100644 index 00000000..697cd19b --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/models/scheme.py @@ -0,0 +1,31 @@ +""" +For types associated with installation schemes. + +For a general overview of available schemes and their context, see +https://docs.python.org/3/install/index.html#alternate-installation. +""" + + +SCHEME_KEYS = ['platlib', 'purelib', 'headers', 'scripts', 'data'] + + +class Scheme: + """A Scheme holds paths which are used as the base directories for + artifacts associated with a Python package. + """ + + __slots__ = SCHEME_KEYS + + def __init__( + self, + platlib, # type: str + purelib, # type: str + headers, # type: str + scripts, # type: str + data, # type: str + ): + self.platlib = platlib + self.purelib = purelib + self.headers = headers + self.scripts = scripts + self.data = data diff --git a/venv/Lib/site-packages/pip/_internal/models/search_scope.py b/venv/Lib/site-packages/pip/_internal/models/search_scope.py new file mode 100644 index 00000000..a3f0a5c0 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/models/search_scope.py @@ -0,0 +1,131 @@ +import itertools +import logging +import os +import posixpath +import urllib.parse +from typing import List + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.models.index import PyPI +from pip._internal.utils.compat import has_tls +from pip._internal.utils.misc import normalize_path, redact_auth_from_url + +logger = logging.getLogger(__name__) + + +class SearchScope: + + """ + Encapsulates the locations that pip is configured to search. + """ + + __slots__ = ["find_links", "index_urls"] + + @classmethod + def create( + cls, + find_links, # type: List[str] + index_urls, # type: List[str] + ): + # type: (...) -> SearchScope + """ + Create a SearchScope object after normalizing the `find_links`. + """ + # Build find_links. If an argument starts with ~, it may be + # a local file relative to a home directory. So try normalizing + # it and if it exists, use the normalized version. + # This is deliberately conservative - it might be fine just to + # blindly normalize anything starting with a ~... + built_find_links = [] # type: List[str] + for link in find_links: + if link.startswith('~'): + new_link = normalize_path(link) + if os.path.exists(new_link): + link = new_link + built_find_links.append(link) + + # If we don't have TLS enabled, then WARN if anyplace we're looking + # relies on TLS. + if not has_tls(): + for link in itertools.chain(index_urls, built_find_links): + parsed = urllib.parse.urlparse(link) + if parsed.scheme == 'https': + logger.warning( + 'pip is configured with locations that require ' + 'TLS/SSL, however the ssl module in Python is not ' + 'available.' + ) + break + + return cls( + find_links=built_find_links, + index_urls=index_urls, + ) + + def __init__( + self, + find_links, # type: List[str] + index_urls, # type: List[str] + ): + # type: (...) -> None + self.find_links = find_links + self.index_urls = index_urls + + def get_formatted_locations(self): + # type: () -> str + lines = [] + redacted_index_urls = [] + if self.index_urls and self.index_urls != [PyPI.simple_url]: + for url in self.index_urls: + + redacted_index_url = redact_auth_from_url(url) + + # Parse the URL + purl = urllib.parse.urlsplit(redacted_index_url) + + # URL is generally invalid if scheme and netloc is missing + # there are issues with Python and URL parsing, so this test + # is a bit crude. See bpo-20271, bpo-23505. Python doesn't + # always parse invalid URLs correctly - it should raise + # exceptions for malformed URLs + if not purl.scheme and not purl.netloc: + logger.warning( + 'The index url "%s" seems invalid, ' + 'please provide a scheme.', redacted_index_url) + + redacted_index_urls.append(redacted_index_url) + + lines.append('Looking in indexes: {}'.format( + ', '.join(redacted_index_urls))) + + if self.find_links: + lines.append( + 'Looking in links: {}'.format(', '.join( + redact_auth_from_url(url) for url in self.find_links)) + ) + return '\n'.join(lines) + + def get_index_urls_locations(self, project_name): + # type: (str) -> List[str] + """Returns the locations found via self.index_urls + + Checks the url_name on the main (first in the list) index and + use this url_name to produce all locations + """ + + def mkurl_pypi_url(url): + # type: (str) -> str + loc = posixpath.join( + url, + urllib.parse.quote(canonicalize_name(project_name))) + # For maximum compatibility with easy_install, ensure the path + # ends in a trailing slash. Although this isn't in the spec + # (and PyPI can handle it without the slash) some other index + # implementations might break if they relied on easy_install's + # behavior. + if not loc.endswith('/'): + loc = loc + '/' + return loc + + return [mkurl_pypi_url(url) for url in self.index_urls] diff --git a/env/lib/python2.7/site-packages/pip/_internal/models/selection_prefs.py b/venv/Lib/site-packages/pip/_internal/models/selection_prefs.py similarity index 86% rename from env/lib/python2.7/site-packages/pip/_internal/models/selection_prefs.py rename to venv/Lib/site-packages/pip/_internal/models/selection_prefs.py index f58fdce9..edc1cf79 100644 --- a/env/lib/python2.7/site-packages/pip/_internal/models/selection_prefs.py +++ b/venv/Lib/site-packages/pip/_internal/models/selection_prefs.py @@ -1,17 +1,17 @@ -from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from typing import Optional -if MYPY_CHECK_RUNNING: - from typing import Optional - from pip._internal.models.format_control import FormatControl +from pip._internal.models.format_control import FormatControl -class SelectionPreferences(object): - +class SelectionPreferences: """ Encapsulates the candidate selection preferences for downloading and installing files. """ + __slots__ = ['allow_yanked', 'allow_all_prereleases', 'format_control', + 'prefer_binary', 'ignore_requires_python'] + # Don't include an allow_yanked default value to make sure each call # site considers whether yanked releases are allowed. This also causes # that decision to be made explicit in the calling code, which helps diff --git a/venv/Lib/site-packages/pip/_internal/models/target_python.py b/venv/Lib/site-packages/pip/_internal/models/target_python.py new file mode 100644 index 00000000..b91e349f --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/models/target_python.py @@ -0,0 +1,114 @@ +import sys +from typing import List, Optional, Tuple + +from pip._vendor.packaging.tags import Tag + +from pip._internal.utils.compatibility_tags import get_supported, version_info_to_nodot +from pip._internal.utils.misc import normalize_version_info + + +class TargetPython: + + """ + Encapsulates the properties of a Python interpreter one is targeting + for a package install, download, etc. + """ + + __slots__ = [ + "_given_py_version_info", + "abis", + "implementation", + "platforms", + "py_version", + "py_version_info", + "_valid_tags", + ] + + def __init__( + self, + platforms=None, # type: Optional[List[str]] + py_version_info=None, # type: Optional[Tuple[int, ...]] + abis=None, # type: Optional[List[str]] + implementation=None, # type: Optional[str] + ): + # type: (...) -> None + """ + :param platforms: A list of strings or None. If None, searches for + packages that are supported by the current system. Otherwise, will + find packages that can be built on the platforms passed in. These + packages will only be downloaded for distribution: they will + not be built locally. + :param py_version_info: An optional tuple of ints representing the + Python version information to use (e.g. `sys.version_info[:3]`). + This can have length 1, 2, or 3 when provided. + :param abis: A list of strings or None. This is passed to + compatibility_tags.py's get_supported() function as is. + :param implementation: A string or None. This is passed to + compatibility_tags.py's get_supported() function as is. + """ + # Store the given py_version_info for when we call get_supported(). + self._given_py_version_info = py_version_info + + if py_version_info is None: + py_version_info = sys.version_info[:3] + else: + py_version_info = normalize_version_info(py_version_info) + + py_version = '.'.join(map(str, py_version_info[:2])) + + self.abis = abis + self.implementation = implementation + self.platforms = platforms + self.py_version = py_version + self.py_version_info = py_version_info + + # This is used to cache the return value of get_tags(). + self._valid_tags = None # type: Optional[List[Tag]] + + def format_given(self): + # type: () -> str + """ + Format the given, non-None attributes for display. + """ + display_version = None + if self._given_py_version_info is not None: + display_version = '.'.join( + str(part) for part in self._given_py_version_info + ) + + key_values = [ + ('platforms', self.platforms), + ('version_info', display_version), + ('abis', self.abis), + ('implementation', self.implementation), + ] + return ' '.join( + f'{key}={value!r}' for key, value in key_values + if value is not None + ) + + def get_tags(self): + # type: () -> List[Tag] + """ + Return the supported PEP 425 tags to check wheel candidates against. + + The tags are returned in order of preference (most preferred first). + """ + if self._valid_tags is None: + # Pass versions=None if no py_version_info was given since + # versions=None uses special default logic. + py_version_info = self._given_py_version_info + if py_version_info is None: + version = None + else: + version = version_info_to_nodot(py_version_info) + + tags = get_supported( + version=version, + platforms=self.platforms, + abis=self.abis, + impl=self.implementation, + ) + self._valid_tags = tags + + return self._valid_tags diff --git a/venv/Lib/site-packages/pip/_internal/models/wheel.py b/venv/Lib/site-packages/pip/_internal/models/wheel.py new file mode 100644 index 00000000..0a582b30 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/models/wheel.py @@ -0,0 +1,95 @@ +"""Represents a wheel file and provides access to the various parts of the +name that have meaning. +""" +import re +from typing import Dict, Iterable, List + +from pip._vendor.packaging.tags import Tag + +from pip._internal.exceptions import InvalidWheelFilename + + +class Wheel: + """A wheel file""" + + wheel_file_re = re.compile( + r"""^(?P(?P.+?)-(?P.*?)) + ((-(?P\d[^-]*?))?-(?P.+?)-(?P.+?)-(?P.+?) + \.whl|\.dist-info)$""", + re.VERBOSE + ) + + def __init__(self, filename): + # type: (str) -> None + """ + :raises InvalidWheelFilename: when the filename is invalid for a wheel + """ + wheel_info = self.wheel_file_re.match(filename) + if not wheel_info: + raise InvalidWheelFilename( + f"{filename} is not a valid wheel filename." + ) + self.filename = filename + self.name = wheel_info.group('name').replace('_', '-') + # we'll assume "_" means "-" due to wheel naming scheme + # (https://github.com/pypa/pip/issues/1150) + self.version = wheel_info.group('ver').replace('_', '-') + self.build_tag = wheel_info.group('build') + self.pyversions = wheel_info.group('pyver').split('.') + self.abis = wheel_info.group('abi').split('.') + self.plats = wheel_info.group('plat').split('.') + + # All the tag combinations from this file + self.file_tags = { + Tag(x, y, z) for x in self.pyversions + for y in self.abis for z in self.plats + } + + def get_formatted_file_tags(self): + # type: () -> List[str] + """Return the wheel's tags as a sorted list of strings.""" + return sorted(str(tag) for tag in self.file_tags) + + def support_index_min(self, tags): + # type: (List[Tag]) -> int + """Return the lowest index that one of the wheel's file_tag combinations + achieves in the given list of supported tags. + + For example, if there are 8 supported tags and one of the file tags + is first in the list, then return 0. + + :param tags: the PEP 425 tags to check the wheel against, in order + with most preferred first. + + :raises ValueError: If none of the wheel's file tags match one of + the supported tags. + """ + return min(tags.index(tag) for tag in self.file_tags if tag in tags) + + def find_most_preferred_tag(self, tags, tag_to_priority): + # type: (List[Tag], Dict[Tag, int]) -> int + """Return the priority of the most preferred tag that one of the wheel's file + tag combinations acheives in the given list of supported tags using the given + tag_to_priority mapping, where lower priorities are more-preferred. + + This is used in place of support_index_min in some cases in order to avoid + an expensive linear scan of a large list of tags. + + :param tags: the PEP 425 tags to check the wheel against. + :param tag_to_priority: a mapping from tag to priority of that tag, where + lower is more preferred. + + :raises ValueError: If none of the wheel's file tags match one of + the supported tags. + """ + return min( + tag_to_priority[tag] for tag in self.file_tags if tag in tag_to_priority + ) + + def supported(self, tags): + # type: (Iterable[Tag]) -> bool + """Return whether the wheel is compatible with one of the given tags. + + :param tags: the PEP 425 tags to check the wheel against. + """ + return not self.file_tags.isdisjoint(tags) diff --git a/venv/Lib/site-packages/pip/_internal/network/__init__.py b/venv/Lib/site-packages/pip/_internal/network/__init__.py new file mode 100644 index 00000000..b51bde91 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/network/__init__.py @@ -0,0 +1,2 @@ +"""Contains purely network-related utilities. +""" diff --git a/venv/Lib/site-packages/pip/_internal/network/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/network/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..03516937 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/network/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/network/__pycache__/auth.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/network/__pycache__/auth.cpython-36.pyc new file mode 100644 index 00000000..7155cad5 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/network/__pycache__/auth.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/network/__pycache__/cache.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/network/__pycache__/cache.cpython-36.pyc new file mode 100644 index 00000000..1bfc660c Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/network/__pycache__/cache.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/network/__pycache__/download.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/network/__pycache__/download.cpython-36.pyc new file mode 100644 index 00000000..742cf980 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/network/__pycache__/download.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-36.pyc new file mode 100644 index 00000000..53cb17ab Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/network/__pycache__/session.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/network/__pycache__/session.cpython-36.pyc new file mode 100644 index 00000000..2430cdb5 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/network/__pycache__/session.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/network/__pycache__/utils.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/network/__pycache__/utils.cpython-36.pyc new file mode 100644 index 00000000..3d93fdb8 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/network/__pycache__/utils.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-36.pyc new file mode 100644 index 00000000..bdd70a94 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/network/auth.py b/venv/Lib/site-packages/pip/_internal/network/auth.py new file mode 100644 index 00000000..bd54a5cb --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/network/auth.py @@ -0,0 +1,312 @@ +"""Network Authentication Helpers + +Contains interface (MultiDomainBasicAuth) and associated glue code for +providing credentials in the context of network requests. +""" + +import logging +import urllib.parse +from typing import Any, Dict, List, Optional, Tuple + +from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth +from pip._vendor.requests.models import Request, Response +from pip._vendor.requests.utils import get_netrc_auth + +from pip._internal.utils.misc import ( + ask, + ask_input, + ask_password, + remove_auth_from_url, + split_auth_netloc_from_url, +) +from pip._internal.vcs.versioncontrol import AuthInfo + +logger = logging.getLogger(__name__) + +Credentials = Tuple[str, str, str] + +try: + import keyring +except ImportError: + keyring = None +except Exception as exc: + logger.warning( + "Keyring is skipped due to an exception: %s", str(exc), + ) + keyring = None + + +def get_keyring_auth(url, username): + # type: (Optional[str], Optional[str]) -> Optional[AuthInfo] + """Return the tuple auth for a given url from keyring.""" + global keyring + if not url or not keyring: + return None + + try: + try: + get_credential = keyring.get_credential + except AttributeError: + pass + else: + logger.debug("Getting credentials from keyring for %s", url) + cred = get_credential(url, username) + if cred is not None: + return cred.username, cred.password + return None + + if username: + logger.debug("Getting password from keyring for %s", url) + password = keyring.get_password(url, username) + if password: + return username, password + + except Exception as exc: + logger.warning( + "Keyring is skipped due to an exception: %s", str(exc), + ) + keyring = None + return None + + +class MultiDomainBasicAuth(AuthBase): + + def __init__(self, prompting=True, index_urls=None): + # type: (bool, Optional[List[str]]) -> None + self.prompting = prompting + self.index_urls = index_urls + self.passwords = {} # type: Dict[str, AuthInfo] + # When the user is prompted to enter credentials and keyring is + # available, we will offer to save them. If the user accepts, + # this value is set to the credentials they entered. After the + # request authenticates, the caller should call + # ``save_credentials`` to save these. + self._credentials_to_save = None # type: Optional[Credentials] + + def _get_index_url(self, url): + # type: (str) -> Optional[str] + """Return the original index URL matching the requested URL. + + Cached or dynamically generated credentials may work against + the original index URL rather than just the netloc. + + The provided url should have had its username and password + removed already. If the original index url had credentials then + they will be included in the return value. + + Returns None if no matching index was found, or if --no-index + was specified by the user. + """ + if not url or not self.index_urls: + return None + + for u in self.index_urls: + prefix = remove_auth_from_url(u).rstrip("/") + "/" + if url.startswith(prefix): + return u + return None + + def _get_new_credentials(self, original_url, allow_netrc=True, + allow_keyring=False): + # type: (str, bool, bool) -> AuthInfo + """Find and return credentials for the specified URL.""" + # Split the credentials and netloc from the url. + url, netloc, url_user_password = split_auth_netloc_from_url( + original_url, + ) + + # Start with the credentials embedded in the url + username, password = url_user_password + if username is not None and password is not None: + logger.debug("Found credentials in url for %s", netloc) + return url_user_password + + # Find a matching index url for this request + index_url = self._get_index_url(url) + if index_url: + # Split the credentials from the url. + index_info = split_auth_netloc_from_url(index_url) + if index_info: + index_url, _, index_url_user_password = index_info + logger.debug("Found index url %s", index_url) + + # If an index URL was found, try its embedded credentials + if index_url and index_url_user_password[0] is not None: + username, password = index_url_user_password + if username is not None and password is not None: + logger.debug("Found credentials in index url for %s", netloc) + return index_url_user_password + + # Get creds from netrc if we still don't have them + if allow_netrc: + netrc_auth = get_netrc_auth(original_url) + if netrc_auth: + logger.debug("Found credentials in netrc for %s", netloc) + return netrc_auth + + # If we don't have a password and keyring is available, use it. + if allow_keyring: + # The index url is more specific than the netloc, so try it first + kr_auth = ( + get_keyring_auth(index_url, username) or + get_keyring_auth(netloc, username) + ) + if kr_auth: + logger.debug("Found credentials in keyring for %s", netloc) + return kr_auth + + return username, password + + def _get_url_and_credentials(self, original_url): + # type: (str) -> Tuple[str, Optional[str], Optional[str]] + """Return the credentials to use for the provided URL. + + If allowed, netrc and keyring may be used to obtain the + correct credentials. + + Returns (url_without_credentials, username, password). Note + that even if the original URL contains credentials, this + function may return a different username and password. + """ + url, netloc, _ = split_auth_netloc_from_url(original_url) + + # Use any stored credentials that we have for this netloc + username, password = self.passwords.get(netloc, (None, None)) + + if username is None and password is None: + # No stored credentials. Acquire new credentials without prompting + # the user. (e.g. from netrc, keyring, or the URL itself) + username, password = self._get_new_credentials(original_url) + + if username is not None or password is not None: + # Convert the username and password if they're None, so that + # this netloc will show up as "cached" in the conditional above. + # Further, HTTPBasicAuth doesn't accept None, so it makes sense to + # cache the value that is going to be used. + username = username or "" + password = password or "" + + # Store any acquired credentials. + self.passwords[netloc] = (username, password) + + assert ( + # Credentials were found + (username is not None and password is not None) or + # Credentials were not found + (username is None and password is None) + ), f"Could not load credentials from url: {original_url}" + + return url, username, password + + def __call__(self, req): + # type: (Request) -> Request + # Get credentials for this request + url, username, password = self._get_url_and_credentials(req.url) + + # Set the url of the request to the url without any credentials + req.url = url + + if username is not None and password is not None: + # Send the basic auth with this request + req = HTTPBasicAuth(username, password)(req) + + # Attach a hook to handle 401 responses + req.register_hook("response", self.handle_401) + + return req + + # Factored out to allow for easy patching in tests + def _prompt_for_password(self, netloc): + # type: (str) -> Tuple[Optional[str], Optional[str], bool] + username = ask_input(f"User for {netloc}: ") + if not username: + return None, None, False + auth = get_keyring_auth(netloc, username) + if auth and auth[0] is not None and auth[1] is not None: + return auth[0], auth[1], False + password = ask_password("Password: ") + return username, password, True + + # Factored out to allow for easy patching in tests + def _should_save_password_to_keyring(self): + # type: () -> bool + if not keyring: + return False + return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y" + + def handle_401(self, resp, **kwargs): + # type: (Response, **Any) -> Response + # We only care about 401 responses, anything else we want to just + # pass through the actual response + if resp.status_code != 401: + return resp + + # We are not able to prompt the user so simply return the response + if not self.prompting: + return resp + + parsed = urllib.parse.urlparse(resp.url) + + # Query the keyring for credentials: + username, password = self._get_new_credentials(resp.url, + allow_netrc=False, + allow_keyring=True) + + # Prompt the user for a new username and password + save = False + if not username and not password: + username, password, save = self._prompt_for_password(parsed.netloc) + + # Store the new username and password to use for future requests + self._credentials_to_save = None + if username is not None and password is not None: + self.passwords[parsed.netloc] = (username, password) + + # Prompt to save the password to keyring + if save and self._should_save_password_to_keyring(): + self._credentials_to_save = (parsed.netloc, username, password) + + # Consume content and release the original connection to allow our new + # request to reuse the same one. + resp.content + resp.raw.release_conn() + + # Add our new username and password to the request + req = HTTPBasicAuth(username or "", password or "")(resp.request) + req.register_hook("response", self.warn_on_401) + + # On successful request, save the credentials that were used to + # keyring. (Note that if the user responded "no" above, this member + # is not set and nothing will be saved.) + if self._credentials_to_save: + req.register_hook("response", self.save_credentials) + + # Send our new request + new_resp = resp.connection.send(req, **kwargs) + new_resp.history.append(resp) + + return new_resp + + def warn_on_401(self, resp, **kwargs): + # type: (Response, **Any) -> None + """Response callback to warn about incorrect credentials.""" + if resp.status_code == 401: + logger.warning( + '401 Error, Credentials not correct for %s', resp.request.url, + ) + + def save_credentials(self, resp, **kwargs): + # type: (Response, **Any) -> None + """Response callback to save credentials on success.""" + assert keyring is not None, "should never reach here without keyring" + if not keyring: + return + + creds = self._credentials_to_save + self._credentials_to_save = None + if creds and resp.status_code < 400: + try: + logger.info('Saving credentials to keyring') + keyring.set_password(*creds) + except Exception: + logger.exception('Failed to save credentials') diff --git a/venv/Lib/site-packages/pip/_internal/network/cache.py b/venv/Lib/site-packages/pip/_internal/network/cache.py new file mode 100644 index 00000000..ce08932a --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/network/cache.py @@ -0,0 +1,76 @@ +"""HTTP cache implementation. +""" + +import os +from contextlib import contextmanager +from typing import Iterator, Optional + +from pip._vendor.cachecontrol.cache import BaseCache +from pip._vendor.cachecontrol.caches import FileCache +from pip._vendor.requests.models import Response + +from pip._internal.utils.filesystem import adjacent_tmp_file, replace +from pip._internal.utils.misc import ensure_dir + + +def is_from_cache(response): + # type: (Response) -> bool + return getattr(response, "from_cache", False) + + +@contextmanager +def suppressed_cache_errors(): + # type: () -> Iterator[None] + """If we can't access the cache then we can just skip caching and process + requests as if caching wasn't enabled. + """ + try: + yield + except OSError: + pass + + +class SafeFileCache(BaseCache): + """ + A file based cache which is safe to use even when the target directory may + not be accessible or writable. + """ + + def __init__(self, directory): + # type: (str) -> None + assert directory is not None, "Cache directory must not be None." + super().__init__() + self.directory = directory + + def _get_cache_path(self, name): + # type: (str) -> str + # From cachecontrol.caches.file_cache.FileCache._fn, brought into our + # class for backwards-compatibility and to avoid using a non-public + # method. + hashed = FileCache.encode(name) + parts = list(hashed[:5]) + [hashed] + return os.path.join(self.directory, *parts) + + def get(self, key): + # type: (str) -> Optional[bytes] + path = self._get_cache_path(key) + with suppressed_cache_errors(): + with open(path, 'rb') as f: + return f.read() + + def set(self, key, value): + # type: (str, bytes) -> None + path = self._get_cache_path(key) + with suppressed_cache_errors(): + ensure_dir(os.path.dirname(path)) + + with adjacent_tmp_file(path) as f: + f.write(value) + + replace(f.name, path) + + def delete(self, key): + # type: (str) -> None + path = self._get_cache_path(key) + with suppressed_cache_errors(): + os.remove(path) diff --git a/venv/Lib/site-packages/pip/_internal/network/download.py b/venv/Lib/site-packages/pip/_internal/network/download.py new file mode 100644 index 00000000..1897d99a --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/network/download.py @@ -0,0 +1,196 @@ +"""Download files with progress indicators. +""" +import cgi +import logging +import mimetypes +import os +from typing import Iterable, Optional, Tuple + +from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response + +from pip._internal.cli.progress_bars import DownloadProgressProvider +from pip._internal.exceptions import NetworkConnectionError +from pip._internal.models.index import PyPI +from pip._internal.models.link import Link +from pip._internal.network.cache import is_from_cache +from pip._internal.network.session import PipSession +from pip._internal.network.utils import HEADERS, raise_for_status, response_chunks +from pip._internal.utils.misc import format_size, redact_auth_from_url, splitext + +logger = logging.getLogger(__name__) + + +def _get_http_response_size(resp): + # type: (Response) -> Optional[int] + try: + return int(resp.headers['content-length']) + except (ValueError, KeyError, TypeError): + return None + + +def _prepare_download( + resp, # type: Response + link, # type: Link + progress_bar # type: str +): + # type: (...) -> Iterable[bytes] + total_length = _get_http_response_size(resp) + + if link.netloc == PyPI.file_storage_domain: + url = link.show_url + else: + url = link.url_without_fragment + + logged_url = redact_auth_from_url(url) + + if total_length: + logged_url = '{} ({})'.format(logged_url, format_size(total_length)) + + if is_from_cache(resp): + logger.info("Using cached %s", logged_url) + else: + logger.info("Downloading %s", logged_url) + + if logger.getEffectiveLevel() > logging.INFO: + show_progress = False + elif is_from_cache(resp): + show_progress = False + elif not total_length: + show_progress = True + elif total_length > (40 * 1000): + show_progress = True + else: + show_progress = False + + chunks = response_chunks(resp, CONTENT_CHUNK_SIZE) + + if not show_progress: + return chunks + + return DownloadProgressProvider( + progress_bar, max=total_length + )(chunks) + + +def sanitize_content_filename(filename): + # type: (str) -> str + """ + Sanitize the "filename" value from a Content-Disposition header. + """ + return os.path.basename(filename) + + +def parse_content_disposition(content_disposition, default_filename): + # type: (str, str) -> str + """ + Parse the "filename" value from a Content-Disposition header, and + return the default filename if the result is empty. + """ + _type, params = cgi.parse_header(content_disposition) + filename = params.get('filename') + if filename: + # We need to sanitize the filename to prevent directory traversal + # in case the filename contains ".." path parts. + filename = sanitize_content_filename(filename) + return filename or default_filename + + +def _get_http_response_filename(resp, link): + # type: (Response, Link) -> str + """Get an ideal filename from the given HTTP response, falling back to + the link filename if not provided. + """ + filename = link.filename # fallback + # Have a look at the Content-Disposition header for a better guess + content_disposition = resp.headers.get('content-disposition') + if content_disposition: + filename = parse_content_disposition(content_disposition, filename) + ext = splitext(filename)[1] # type: Optional[str] + if not ext: + ext = mimetypes.guess_extension( + resp.headers.get('content-type', '') + ) + if ext: + filename += ext + if not ext and link.url != resp.url: + ext = os.path.splitext(resp.url)[1] + if ext: + filename += ext + return filename + + +def _http_get_download(session, link): + # type: (PipSession, Link) -> Response + target_url = link.url.split('#', 1)[0] + resp = session.get(target_url, headers=HEADERS, stream=True) + raise_for_status(resp) + return resp + + +class Downloader: + def __init__( + self, + session, # type: PipSession + progress_bar, # type: str + ): + # type: (...) -> None + self._session = session + self._progress_bar = progress_bar + + def __call__(self, link, location): + # type: (Link, str) -> Tuple[str, str] + """Download the file given by link into location.""" + try: + resp = _http_get_download(self._session, link) + except NetworkConnectionError as e: + assert e.response is not None + logger.critical( + "HTTP error %s while getting %s", e.response.status_code, link + ) + raise + + filename = _get_http_response_filename(resp, link) + filepath = os.path.join(location, filename) + + chunks = _prepare_download(resp, link, self._progress_bar) + with open(filepath, 'wb') as content_file: + for chunk in chunks: + content_file.write(chunk) + content_type = resp.headers.get('Content-Type', '') + return filepath, content_type + + +class BatchDownloader: + + def __init__( + self, + session, # type: PipSession + progress_bar, # type: str + ): + # type: (...) -> None + self._session = session + self._progress_bar = progress_bar + + def __call__(self, links, location): + # type: (Iterable[Link], str) -> Iterable[Tuple[Link, Tuple[str, str]]] + """Download the files given by links into location.""" + for link in links: + try: + resp = _http_get_download(self._session, link) + except NetworkConnectionError as e: + assert e.response is not None + logger.critical( + "HTTP error %s while getting %s", + e.response.status_code, link, + ) + raise + + filename = _get_http_response_filename(resp, link) + filepath = os.path.join(location, filename) + + chunks = _prepare_download(resp, link, self._progress_bar) + with open(filepath, 'wb') as content_file: + for chunk in chunks: + content_file.write(chunk) + content_type = resp.headers.get('Content-Type', '') + yield link, (filepath, content_type) diff --git a/venv/Lib/site-packages/pip/_internal/network/lazy_wheel.py b/venv/Lib/site-packages/pip/_internal/network/lazy_wheel.py new file mode 100644 index 00000000..b877d3b7 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/network/lazy_wheel.py @@ -0,0 +1,224 @@ +"""Lazy ZIP over HTTP""" + +__all__ = ['HTTPRangeRequestUnsupported', 'dist_from_wheel_url'] + +from bisect import bisect_left, bisect_right +from contextlib import contextmanager +from tempfile import NamedTemporaryFile +from typing import Any, Dict, Iterator, List, Optional, Tuple +from zipfile import BadZipfile, ZipFile + +from pip._vendor.pkg_resources import Distribution +from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response + +from pip._internal.network.session import PipSession +from pip._internal.network.utils import HEADERS, raise_for_status, response_chunks +from pip._internal.utils.wheel import pkg_resources_distribution_for_wheel + + +class HTTPRangeRequestUnsupported(Exception): + pass + + +def dist_from_wheel_url(name, url, session): + # type: (str, str, PipSession) -> Distribution + """Return a pkg_resources.Distribution from the given wheel URL. + + This uses HTTP range requests to only fetch the potion of the wheel + containing metadata, just enough for the object to be constructed. + If such requests are not supported, HTTPRangeRequestUnsupported + is raised. + """ + with LazyZipOverHTTP(url, session) as wheel: + # For read-only ZIP files, ZipFile only needs methods read, + # seek, seekable and tell, not the whole IO protocol. + zip_file = ZipFile(wheel) # type: ignore + # After context manager exit, wheel.name + # is an invalid file by intention. + return pkg_resources_distribution_for_wheel(zip_file, name, wheel.name) + + +class LazyZipOverHTTP: + """File-like object mapped to a ZIP file over HTTP. + + This uses HTTP range requests to lazily fetch the file's content, + which is supposed to be fed to ZipFile. If such requests are not + supported by the server, raise HTTPRangeRequestUnsupported + during initialization. + """ + + def __init__(self, url, session, chunk_size=CONTENT_CHUNK_SIZE): + # type: (str, PipSession, int) -> None + head = session.head(url, headers=HEADERS) + raise_for_status(head) + assert head.status_code == 200 + self._session, self._url, self._chunk_size = session, url, chunk_size + self._length = int(head.headers['Content-Length']) + self._file = NamedTemporaryFile() + self.truncate(self._length) + self._left = [] # type: List[int] + self._right = [] # type: List[int] + if 'bytes' not in head.headers.get('Accept-Ranges', 'none'): + raise HTTPRangeRequestUnsupported('range request is not supported') + self._check_zip() + + @property + def mode(self): + # type: () -> str + """Opening mode, which is always rb.""" + return 'rb' + + @property + def name(self): + # type: () -> str + """Path to the underlying file.""" + return self._file.name + + def seekable(self): + # type: () -> bool + """Return whether random access is supported, which is True.""" + return True + + def close(self): + # type: () -> None + """Close the file.""" + self._file.close() + + @property + def closed(self): + # type: () -> bool + """Whether the file is closed.""" + return self._file.closed + + def read(self, size=-1): + # type: (int) -> bytes + """Read up to size bytes from the object and return them. + + As a convenience, if size is unspecified or -1, + all bytes until EOF are returned. Fewer than + size bytes may be returned if EOF is reached. + """ + download_size = max(size, self._chunk_size) + start, length = self.tell(), self._length + stop = length if size < 0 else min(start+download_size, length) + start = max(0, stop-download_size) + self._download(start, stop-1) + return self._file.read(size) + + def readable(self): + # type: () -> bool + """Return whether the file is readable, which is True.""" + return True + + def seek(self, offset, whence=0): + # type: (int, int) -> int + """Change stream position and return the new absolute position. + + Seek to offset relative position indicated by whence: + * 0: Start of stream (the default). pos should be >= 0; + * 1: Current position - pos may be negative; + * 2: End of stream - pos usually negative. + """ + return self._file.seek(offset, whence) + + def tell(self): + # type: () -> int + """Return the current possition.""" + return self._file.tell() + + def truncate(self, size=None): + # type: (Optional[int]) -> int + """Resize the stream to the given size in bytes. + + If size is unspecified resize to the current position. + The current stream position isn't changed. + + Return the new file size. + """ + return self._file.truncate(size) + + def writable(self): + # type: () -> bool + """Return False.""" + return False + + def __enter__(self): + # type: () -> LazyZipOverHTTP + self._file.__enter__() + return self + + def __exit__(self, *exc): + # type: (*Any) -> Optional[bool] + return self._file.__exit__(*exc) + + @contextmanager + def _stay(self): + # type: ()-> Iterator[None] + """Return a context manager keeping the position. + + At the end of the block, seek back to original position. + """ + pos = self.tell() + try: + yield + finally: + self.seek(pos) + + def _check_zip(self): + # type: () -> None + """Check and download until the file is a valid ZIP.""" + end = self._length - 1 + for start in reversed(range(0, end, self._chunk_size)): + self._download(start, end) + with self._stay(): + try: + # For read-only ZIP files, ZipFile only needs + # methods read, seek, seekable and tell. + ZipFile(self) # type: ignore + except BadZipfile: + pass + else: + break + + def _stream_response(self, start, end, base_headers=HEADERS): + # type: (int, int, Dict[str, str]) -> Response + """Return HTTP response to a range request from start to end.""" + headers = base_headers.copy() + headers['Range'] = f'bytes={start}-{end}' + # TODO: Get range requests to be correctly cached + headers['Cache-Control'] = 'no-cache' + return self._session.get(self._url, headers=headers, stream=True) + + def _merge(self, start, end, left, right): + # type: (int, int, int, int) -> Iterator[Tuple[int, int]] + """Return an iterator of intervals to be fetched. + + Args: + start (int): Start of needed interval + end (int): End of needed interval + left (int): Index of first overlapping downloaded data + right (int): Index after last overlapping downloaded data + """ + lslice, rslice = self._left[left:right], self._right[left:right] + i = start = min([start]+lslice[:1]) + end = max([end]+rslice[-1:]) + for j, k in zip(lslice, rslice): + if j > i: + yield i, j-1 + i = k + 1 + if i <= end: + yield i, end + self._left[left:right], self._right[left:right] = [start], [end] + + def _download(self, start, end): + # type: (int, int) -> None + """Download bytes from start to end inclusively.""" + with self._stay(): + left = bisect_left(self._right, start) + right = bisect_right(self._left, end) + for start, end in self._merge(start, end, left, right): + response = self._stream_response(start, end) + response.raise_for_status() + self.seek(start) + for chunk in response_chunks(response, self._chunk_size): + self._file.write(chunk) diff --git a/venv/Lib/site-packages/pip/_internal/network/session.py b/venv/Lib/site-packages/pip/_internal/network/session.py new file mode 100644 index 00000000..4af800f1 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/network/session.py @@ -0,0 +1,449 @@ +"""PipSession and supporting code, containing all pip-specific +network request configuration and behavior. +""" + +# When mypy runs on Windows the call to distro.linux_distribution() is skipped +# resulting in the failure: +# +# error: unused 'type: ignore' comment +# +# If the upstream module adds typing, this comment should be removed. See +# https://github.com/nir0s/distro/pull/269 +# +# mypy: warn-unused-ignores=False + +import email.utils +import ipaddress +import json +import logging +import mimetypes +import os +import platform +import sys +import urllib.parse +import warnings +from typing import Any, Dict, Iterator, List, Mapping, Optional, Sequence, Tuple, Union + +from pip._vendor import requests, urllib3 +from pip._vendor.cachecontrol import CacheControlAdapter +from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter +from pip._vendor.requests.models import PreparedRequest, Response +from pip._vendor.requests.structures import CaseInsensitiveDict +from pip._vendor.urllib3.connectionpool import ConnectionPool +from pip._vendor.urllib3.exceptions import InsecureRequestWarning + +from pip import __version__ +from pip._internal.metadata import get_default_environment +from pip._internal.models.link import Link +from pip._internal.network.auth import MultiDomainBasicAuth +from pip._internal.network.cache import SafeFileCache + +# Import ssl from compat so the initial import occurs in only one place. +from pip._internal.utils.compat import has_tls +from pip._internal.utils.glibc import libc_ver +from pip._internal.utils.misc import build_url_from_netloc, parse_netloc +from pip._internal.utils.urls import url_to_path + +logger = logging.getLogger(__name__) + +SecureOrigin = Tuple[str, str, Optional[Union[int, str]]] + + +# Ignore warning raised when using --trusted-host. +warnings.filterwarnings("ignore", category=InsecureRequestWarning) + + +SECURE_ORIGINS = [ + # protocol, hostname, port + # Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC) + ("https", "*", "*"), + ("*", "localhost", "*"), + ("*", "127.0.0.0/8", "*"), + ("*", "::1/128", "*"), + ("file", "*", None), + # ssh is always secure. + ("ssh", "*", "*"), +] # type: List[SecureOrigin] + + +# These are environment variables present when running under various +# CI systems. For each variable, some CI systems that use the variable +# are indicated. The collection was chosen so that for each of a number +# of popular systems, at least one of the environment variables is used. +# This list is used to provide some indication of and lower bound for +# CI traffic to PyPI. Thus, it is okay if the list is not comprehensive. +# For more background, see: https://github.com/pypa/pip/issues/5499 +CI_ENVIRONMENT_VARIABLES = ( + # Azure Pipelines + 'BUILD_BUILDID', + # Jenkins + 'BUILD_ID', + # AppVeyor, CircleCI, Codeship, Gitlab CI, Shippable, Travis CI + 'CI', + # Explicit environment variable. + 'PIP_IS_CI', +) + + +def looks_like_ci(): + # type: () -> bool + """ + Return whether it looks like pip is running under CI. + """ + # We don't use the method of checking for a tty (e.g. using isatty()) + # because some CI systems mimic a tty (e.g. Travis CI). Thus that + # method doesn't provide definitive information in either direction. + return any(name in os.environ for name in CI_ENVIRONMENT_VARIABLES) + + +def user_agent(): + # type: () -> str + """ + Return a string representing the user agent. + """ + data = { + "installer": {"name": "pip", "version": __version__}, + "python": platform.python_version(), + "implementation": { + "name": platform.python_implementation(), + }, + } # type: Dict[str, Any] + + if data["implementation"]["name"] == 'CPython': + data["implementation"]["version"] = platform.python_version() + elif data["implementation"]["name"] == 'PyPy': + pypy_version_info = sys.pypy_version_info # type: ignore + if pypy_version_info.releaselevel == 'final': + pypy_version_info = pypy_version_info[:3] + data["implementation"]["version"] = ".".join( + [str(x) for x in pypy_version_info] + ) + elif data["implementation"]["name"] == 'Jython': + # Complete Guess + data["implementation"]["version"] = platform.python_version() + elif data["implementation"]["name"] == 'IronPython': + # Complete Guess + data["implementation"]["version"] = platform.python_version() + + if sys.platform.startswith("linux"): + from pip._vendor import distro + + # https://github.com/nir0s/distro/pull/269 + linux_distribution = distro.linux_distribution() # type: ignore + distro_infos = dict(filter( + lambda x: x[1], + zip(["name", "version", "id"], linux_distribution), + )) + libc = dict(filter( + lambda x: x[1], + zip(["lib", "version"], libc_ver()), + )) + if libc: + distro_infos["libc"] = libc + if distro_infos: + data["distro"] = distro_infos + + if sys.platform.startswith("darwin") and platform.mac_ver()[0]: + data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]} + + if platform.system(): + data.setdefault("system", {})["name"] = platform.system() + + if platform.release(): + data.setdefault("system", {})["release"] = platform.release() + + if platform.machine(): + data["cpu"] = platform.machine() + + if has_tls(): + import _ssl as ssl + data["openssl_version"] = ssl.OPENSSL_VERSION + + setuptools_dist = get_default_environment().get_distribution("setuptools") + if setuptools_dist is not None: + data["setuptools_version"] = str(setuptools_dist.version) + + # Use None rather than False so as not to give the impression that + # pip knows it is not being run under CI. Rather, it is a null or + # inconclusive result. Also, we include some value rather than no + # value to make it easier to know that the check has been run. + data["ci"] = True if looks_like_ci() else None + + user_data = os.environ.get("PIP_USER_AGENT_USER_DATA") + if user_data is not None: + data["user_data"] = user_data + + return "{data[installer][name]}/{data[installer][version]} {json}".format( + data=data, + json=json.dumps(data, separators=(",", ":"), sort_keys=True), + ) + + +class LocalFSAdapter(BaseAdapter): + + def send( + self, + request, # type: PreparedRequest + stream=False, # type: bool + timeout=None, # type: Optional[Union[float, Tuple[float, float]]] + verify=True, # type: Union[bool, str] + cert=None, # type: Optional[Union[str, Tuple[str, str]]] + proxies=None, # type:Optional[Mapping[str, str]] + ): + # type: (...) -> Response + pathname = url_to_path(request.url) + + resp = Response() + resp.status_code = 200 + resp.url = request.url + + try: + stats = os.stat(pathname) + except OSError as exc: + resp.status_code = 404 + resp.raw = exc + else: + modified = email.utils.formatdate(stats.st_mtime, usegmt=True) + content_type = mimetypes.guess_type(pathname)[0] or "text/plain" + resp.headers = CaseInsensitiveDict({ + "Content-Type": content_type, + "Content-Length": stats.st_size, + "Last-Modified": modified, + }) + + resp.raw = open(pathname, "rb") + resp.close = resp.raw.close + + return resp + + def close(self): + # type: () -> None + pass + + +class InsecureHTTPAdapter(HTTPAdapter): + + def cert_verify( + self, + conn, # type: ConnectionPool + url, # type: str + verify, # type: Union[bool, str] + cert, # type: Optional[Union[str, Tuple[str, str]]] + ): + # type: (...) -> None + super().cert_verify(conn=conn, url=url, verify=False, cert=cert) + + +class InsecureCacheControlAdapter(CacheControlAdapter): + + def cert_verify( + self, + conn, # type: ConnectionPool + url, # type: str + verify, # type: Union[bool, str] + cert, # type: Optional[Union[str, Tuple[str, str]]] + ): + # type: (...) -> None + super().cert_verify(conn=conn, url=url, verify=False, cert=cert) + + +class PipSession(requests.Session): + + timeout = None # type: Optional[int] + + def __init__( + self, + *args, # type: Any + retries=0, # type: int + cache=None, # type: Optional[str] + trusted_hosts=(), # type: Sequence[str] + index_urls=None, # type: Optional[List[str]] + **kwargs, # type: Any + ): + # type: (...) -> None + """ + :param trusted_hosts: Domains not to emit warnings for when not using + HTTPS. + """ + super().__init__(*args, **kwargs) + + # Namespace the attribute with "pip_" just in case to prevent + # possible conflicts with the base class. + self.pip_trusted_origins = [] # type: List[Tuple[str, Optional[int]]] + + # Attach our User Agent to the request + self.headers["User-Agent"] = user_agent() + + # Attach our Authentication handler to the session + self.auth = MultiDomainBasicAuth(index_urls=index_urls) + + # Create our urllib3.Retry instance which will allow us to customize + # how we handle retries. + retries = urllib3.Retry( + # Set the total number of retries that a particular request can + # have. + total=retries, + + # A 503 error from PyPI typically means that the Fastly -> Origin + # connection got interrupted in some way. A 503 error in general + # is typically considered a transient error so we'll go ahead and + # retry it. + # A 500 may indicate transient error in Amazon S3 + # A 520 or 527 - may indicate transient error in CloudFlare + status_forcelist=[500, 503, 520, 527], + + # Add a small amount of back off between failed requests in + # order to prevent hammering the service. + backoff_factor=0.25, + ) # type: ignore + + # Our Insecure HTTPAdapter disables HTTPS validation. It does not + # support caching so we'll use it for all http:// URLs. + # If caching is disabled, we will also use it for + # https:// hosts that we've marked as ignoring + # TLS errors for (trusted-hosts). + insecure_adapter = InsecureHTTPAdapter(max_retries=retries) + + # We want to _only_ cache responses on securely fetched origins or when + # the host is specified as trusted. We do this because + # we can't validate the response of an insecurely/untrusted fetched + # origin, and we don't want someone to be able to poison the cache and + # require manual eviction from the cache to fix it. + if cache: + secure_adapter = CacheControlAdapter( + cache=SafeFileCache(cache), + max_retries=retries, + ) + self._trusted_host_adapter = InsecureCacheControlAdapter( + cache=SafeFileCache(cache), + max_retries=retries, + ) + else: + secure_adapter = HTTPAdapter(max_retries=retries) + self._trusted_host_adapter = insecure_adapter + + self.mount("https://", secure_adapter) + self.mount("http://", insecure_adapter) + + # Enable file:// urls + self.mount("file://", LocalFSAdapter()) + + for host in trusted_hosts: + self.add_trusted_host(host, suppress_logging=True) + + def update_index_urls(self, new_index_urls): + # type: (List[str]) -> None + """ + :param new_index_urls: New index urls to update the authentication + handler with. + """ + self.auth.index_urls = new_index_urls + + def add_trusted_host(self, host, source=None, suppress_logging=False): + # type: (str, Optional[str], bool) -> None + """ + :param host: It is okay to provide a host that has previously been + added. + :param source: An optional source string, for logging where the host + string came from. + """ + if not suppress_logging: + msg = f'adding trusted host: {host!r}' + if source is not None: + msg += f' (from {source})' + logger.info(msg) + + host_port = parse_netloc(host) + if host_port not in self.pip_trusted_origins: + self.pip_trusted_origins.append(host_port) + + self.mount( + build_url_from_netloc(host) + '/', + self._trusted_host_adapter + ) + if not host_port[1]: + # Mount wildcard ports for the same host. + self.mount( + build_url_from_netloc(host) + ':', + self._trusted_host_adapter + ) + + def iter_secure_origins(self): + # type: () -> Iterator[SecureOrigin] + yield from SECURE_ORIGINS + for host, port in self.pip_trusted_origins: + yield ('*', host, '*' if port is None else port) + + def is_secure_origin(self, location): + # type: (Link) -> bool + # Determine if this url used a secure transport mechanism + parsed = urllib.parse.urlparse(str(location)) + origin_protocol, origin_host, origin_port = ( + parsed.scheme, parsed.hostname, parsed.port, + ) + + # The protocol to use to see if the protocol matches. + # Don't count the repository type as part of the protocol: in + # cases such as "git+ssh", only use "ssh". (I.e., Only verify against + # the last scheme.) + origin_protocol = origin_protocol.rsplit('+', 1)[-1] + + # Determine if our origin is a secure origin by looking through our + # hardcoded list of secure origins, as well as any additional ones + # configured on this PackageFinder instance. + for secure_origin in self.iter_secure_origins(): + secure_protocol, secure_host, secure_port = secure_origin + if origin_protocol != secure_protocol and secure_protocol != "*": + continue + + try: + addr = ipaddress.ip_address(origin_host) + network = ipaddress.ip_network(secure_host) + except ValueError: + # We don't have both a valid address or a valid network, so + # we'll check this origin against hostnames. + if ( + origin_host and + origin_host.lower() != secure_host.lower() and + secure_host != "*" + ): + continue + else: + # We have a valid address and network, so see if the address + # is contained within the network. + if addr not in network: + continue + + # Check to see if the port matches. + if ( + origin_port != secure_port and + secure_port != "*" and + secure_port is not None + ): + continue + + # If we've gotten here, then this origin matches the current + # secure origin and we should return True + return True + + # If we've gotten to this point, then the origin isn't secure and we + # will not accept it as a valid location to search. We will however + # log a warning that we are ignoring it. + logger.warning( + "The repository located at %s is not a trusted or secure host and " + "is being ignored. If this repository is available via HTTPS we " + "recommend you use HTTPS instead, otherwise you may silence " + "this warning and allow it anyway with '--trusted-host %s'.", + origin_host, + origin_host, + ) + + return False + + def request(self, method, url, *args, **kwargs): + # type: (str, str, *Any, **Any) -> Response + # Allow setting a default timeout on a session + kwargs.setdefault("timeout", self.timeout) + + # Dispatch the actual request + return super().request(method, url, *args, **kwargs) diff --git a/venv/Lib/site-packages/pip/_internal/network/utils.py b/venv/Lib/site-packages/pip/_internal/network/utils.py new file mode 100644 index 00000000..6e5cf0d1 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/network/utils.py @@ -0,0 +1,95 @@ +from typing import Dict, Iterator + +from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response + +from pip._internal.exceptions import NetworkConnectionError + +# The following comments and HTTP headers were originally added by +# Donald Stufft in git commit 22c562429a61bb77172039e480873fb239dd8c03. +# +# We use Accept-Encoding: identity here because requests defaults to +# accepting compressed responses. This breaks in a variety of ways +# depending on how the server is configured. +# - Some servers will notice that the file isn't a compressible file +# and will leave the file alone and with an empty Content-Encoding +# - Some servers will notice that the file is already compressed and +# will leave the file alone, adding a Content-Encoding: gzip header +# - Some servers won't notice anything at all and will take a file +# that's already been compressed and compress it again, and set +# the Content-Encoding: gzip header +# By setting this to request only the identity encoding we're hoping +# to eliminate the third case. Hopefully there does not exist a server +# which when given a file will notice it is already compressed and that +# you're not asking for a compressed file and will then decompress it +# before sending because if that's the case I don't think it'll ever be +# possible to make this work. +HEADERS = {'Accept-Encoding': 'identity'} # type: Dict[str, str] + + +def raise_for_status(resp): + # type: (Response) -> None + http_error_msg = '' + if isinstance(resp.reason, bytes): + # We attempt to decode utf-8 first because some servers + # choose to localize their reason strings. If the string + # isn't utf-8, we fall back to iso-8859-1 for all other + # encodings. + try: + reason = resp.reason.decode('utf-8') + except UnicodeDecodeError: + reason = resp.reason.decode('iso-8859-1') + else: + reason = resp.reason + + if 400 <= resp.status_code < 500: + http_error_msg = ( + f'{resp.status_code} Client Error: {reason} for url: {resp.url}') + + elif 500 <= resp.status_code < 600: + http_error_msg = ( + f'{resp.status_code} Server Error: {reason} for url: {resp.url}') + + if http_error_msg: + raise NetworkConnectionError(http_error_msg, response=resp) + + +def response_chunks(response, chunk_size=CONTENT_CHUNK_SIZE): + # type: (Response, int) -> Iterator[bytes] + """Given a requests Response, provide the data chunks. + """ + try: + # Special case for urllib3. + for chunk in response.raw.stream( + chunk_size, + # We use decode_content=False here because we don't + # want urllib3 to mess with the raw bytes we get + # from the server. If we decompress inside of + # urllib3 then we cannot verify the checksum + # because the checksum will be of the compressed + # file. This breakage will only occur if the + # server adds a Content-Encoding header, which + # depends on how the server was configured: + # - Some servers will notice that the file isn't a + # compressible file and will leave the file alone + # and with an empty Content-Encoding + # - Some servers will notice that the file is + # already compressed and will leave the file + # alone and will add a Content-Encoding: gzip + # header + # - Some servers won't notice anything at all and + # will take a file that's already been compressed + # and compress it again and set the + # Content-Encoding: gzip header + # + # By setting this not to decode automatically we + # hope to eliminate problems with the second case. + decode_content=False, + ): + yield chunk + except AttributeError: + # Standard file-like object. + while True: + chunk = response.raw.read(chunk_size) + if not chunk: + break + yield chunk diff --git a/venv/Lib/site-packages/pip/_internal/network/xmlrpc.py b/venv/Lib/site-packages/pip/_internal/network/xmlrpc.py new file mode 100644 index 00000000..b92b8d9a --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/network/xmlrpc.py @@ -0,0 +1,49 @@ +"""xmlrpclib.Transport implementation +""" + +import logging +import urllib.parse +import xmlrpc.client +from typing import TYPE_CHECKING, Tuple + +from pip._internal.exceptions import NetworkConnectionError +from pip._internal.network.session import PipSession +from pip._internal.network.utils import raise_for_status + +if TYPE_CHECKING: + from xmlrpc.client import _HostType, _Marshallable + +logger = logging.getLogger(__name__) + + +class PipXmlrpcTransport(xmlrpc.client.Transport): + """Provide a `xmlrpclib.Transport` implementation via a `PipSession` + object. + """ + + def __init__(self, index_url, session, use_datetime=False): + # type: (str, PipSession, bool) -> None + super().__init__(use_datetime) + index_parts = urllib.parse.urlparse(index_url) + self._scheme = index_parts.scheme + self._session = session + + def request(self, host, handler, request_body, verbose=False): + # type: (_HostType, str, bytes, bool) -> Tuple[_Marshallable, ...] + assert isinstance(host, str) + parts = (self._scheme, host, handler, None, None, None) + url = urllib.parse.urlunparse(parts) + try: + headers = {'Content-Type': 'text/xml'} + response = self._session.post(url, data=request_body, + headers=headers, stream=True) + raise_for_status(response) + self.verbose = verbose + return self.parse_response(response.raw) + except NetworkConnectionError as exc: + assert exc.response + logger.critical( + "HTTP error %s while getting %s", + exc.response.status_code, url, + ) + raise diff --git a/env/lib/python2.7/site-packages/pip/_internal/utils/__init__.py b/venv/Lib/site-packages/pip/_internal/operations/__init__.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_internal/utils/__init__.py rename to venv/Lib/site-packages/pip/_internal/operations/__init__.py diff --git a/venv/Lib/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..2ce5ef52 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/operations/__pycache__/check.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/operations/__pycache__/check.cpython-36.pyc new file mode 100644 index 00000000..c284076f Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/operations/__pycache__/check.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-36.pyc new file mode 100644 index 00000000..81e7c266 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-36.pyc new file mode 100644 index 00000000..32dc6c01 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-36.pyc differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/__init__.py b/venv/Lib/site-packages/pip/_internal/operations/build/__init__.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/__init__.py rename to venv/Lib/site-packages/pip/_internal/operations/build/__init__.py diff --git a/venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..a4ff36d5 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-36.pyc new file mode 100644 index 00000000..4decef54 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-36.pyc new file mode 100644 index 00000000..fe142d30 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-36.pyc new file mode 100644 index 00000000..aafd1b80 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-36.pyc new file mode 100644 index 00000000..4575e500 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/operations/build/metadata.py b/venv/Lib/site-packages/pip/_internal/operations/build/metadata.py new file mode 100644 index 00000000..1c826835 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/operations/build/metadata.py @@ -0,0 +1,35 @@ +"""Metadata generation logic for source distributions. +""" + +import os + +from pip._vendor.pep517.wrappers import Pep517HookCaller + +from pip._internal.build_env import BuildEnvironment +from pip._internal.utils.subprocess import runner_with_spinner_message +from pip._internal.utils.temp_dir import TempDirectory + + +def generate_metadata(build_env, backend): + # type: (BuildEnvironment, Pep517HookCaller) -> str + """Generate metadata using mechanisms described in PEP 517. + + Returns the generated metadata directory. + """ + metadata_tmpdir = TempDirectory( + kind="modern-metadata", globally_managed=True + ) + + metadata_dir = metadata_tmpdir.path + + with build_env: + # Note that Pep517HookCaller implements a fallback for + # prepare_metadata_for_build_wheel, so we don't have to + # consider the possibility that this hook doesn't exist. + runner = runner_with_spinner_message("Preparing wheel metadata") + with backend.subprocess_runner(runner): + distinfo_dir = backend.prepare_metadata_for_build_wheel( + metadata_dir + ) + + return os.path.join(metadata_dir, distinfo_dir) diff --git a/venv/Lib/site-packages/pip/_internal/operations/build/metadata_legacy.py b/venv/Lib/site-packages/pip/_internal/operations/build/metadata_legacy.py new file mode 100644 index 00000000..f46538a0 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/operations/build/metadata_legacy.py @@ -0,0 +1,74 @@ +"""Metadata generation logic for legacy source distributions. +""" + +import logging +import os + +from pip._internal.build_env import BuildEnvironment +from pip._internal.exceptions import InstallationError +from pip._internal.utils.setuptools_build import make_setuptools_egg_info_args +from pip._internal.utils.subprocess import call_subprocess +from pip._internal.utils.temp_dir import TempDirectory + +logger = logging.getLogger(__name__) + + +def _find_egg_info(directory): + # type: (str) -> str + """Find an .egg-info subdirectory in `directory`. + """ + filenames = [ + f for f in os.listdir(directory) if f.endswith(".egg-info") + ] + + if not filenames: + raise InstallationError( + f"No .egg-info directory found in {directory}" + ) + + if len(filenames) > 1: + raise InstallationError( + "More than one .egg-info directory found in {}".format( + directory + ) + ) + + return os.path.join(directory, filenames[0]) + + +def generate_metadata( + build_env, # type: BuildEnvironment + setup_py_path, # type: str + source_dir, # type: str + isolated, # type: bool + details, # type: str +): + # type: (...) -> str + """Generate metadata using setup.py-based defacto mechanisms. + + Returns the generated metadata directory. + """ + logger.debug( + 'Running setup.py (path:%s) egg_info for package %s', + setup_py_path, details, + ) + + egg_info_dir = TempDirectory( + kind="pip-egg-info", globally_managed=True + ).path + + args = make_setuptools_egg_info_args( + setup_py_path, + egg_info_dir=egg_info_dir, + no_user_config=isolated, + ) + + with build_env: + call_subprocess( + args, + cwd=source_dir, + command_desc='python setup.py egg_info', + ) + + # Return the .egg-info directory. + return _find_egg_info(egg_info_dir) diff --git a/venv/Lib/site-packages/pip/_internal/operations/build/wheel.py b/venv/Lib/site-packages/pip/_internal/operations/build/wheel.py new file mode 100644 index 00000000..903bd7a0 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/operations/build/wheel.py @@ -0,0 +1,38 @@ +import logging +import os +from typing import Optional + +from pip._vendor.pep517.wrappers import Pep517HookCaller + +from pip._internal.utils.subprocess import runner_with_spinner_message + +logger = logging.getLogger(__name__) + + +def build_wheel_pep517( + name, # type: str + backend, # type: Pep517HookCaller + metadata_directory, # type: str + tempd, # type: str +): + # type: (...) -> Optional[str] + """Build one InstallRequirement using the PEP 517 build process. + + Returns path to wheel if successfully built. Otherwise, returns None. + """ + assert metadata_directory is not None + try: + logger.debug('Destination directory: %s', tempd) + + runner = runner_with_spinner_message( + f'Building wheel for {name} (PEP 517)' + ) + with backend.subprocess_runner(runner): + wheel_name = backend.build_wheel( + tempd, + metadata_directory=metadata_directory, + ) + except Exception: + logger.error('Failed building wheel for %s', name) + return None + return os.path.join(tempd, wheel_name) diff --git a/venv/Lib/site-packages/pip/_internal/operations/build/wheel_legacy.py b/venv/Lib/site-packages/pip/_internal/operations/build/wheel_legacy.py new file mode 100644 index 00000000..755c3bc8 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/operations/build/wheel_legacy.py @@ -0,0 +1,110 @@ +import logging +import os.path +from typing import List, Optional + +from pip._internal.cli.spinners import open_spinner +from pip._internal.utils.setuptools_build import make_setuptools_bdist_wheel_args +from pip._internal.utils.subprocess import ( + LOG_DIVIDER, + call_subprocess, + format_command_args, +) + +logger = logging.getLogger(__name__) + + +def format_command_result( + command_args, # type: List[str] + command_output, # type: str +): + # type: (...) -> str + """Format command information for logging.""" + command_desc = format_command_args(command_args) + text = f'Command arguments: {command_desc}\n' + + if not command_output: + text += 'Command output: None' + elif logger.getEffectiveLevel() > logging.DEBUG: + text += 'Command output: [use --verbose to show]' + else: + if not command_output.endswith('\n'): + command_output += '\n' + text += f'Command output:\n{command_output}{LOG_DIVIDER}' + + return text + + +def get_legacy_build_wheel_path( + names, # type: List[str] + temp_dir, # type: str + name, # type: str + command_args, # type: List[str] + command_output, # type: str +): + # type: (...) -> Optional[str] + """Return the path to the wheel in the temporary build directory.""" + # Sort for determinism. + names = sorted(names) + if not names: + msg = ( + 'Legacy build of wheel for {!r} created no files.\n' + ).format(name) + msg += format_command_result(command_args, command_output) + logger.warning(msg) + return None + + if len(names) > 1: + msg = ( + 'Legacy build of wheel for {!r} created more than one file.\n' + 'Filenames (choosing first): {}\n' + ).format(name, names) + msg += format_command_result(command_args, command_output) + logger.warning(msg) + + return os.path.join(temp_dir, names[0]) + + +def build_wheel_legacy( + name, # type: str + setup_py_path, # type: str + source_dir, # type: str + global_options, # type: List[str] + build_options, # type: List[str] + tempd, # type: str +): + # type: (...) -> Optional[str] + """Build one unpacked package using the "legacy" build process. + + Returns path to wheel if successfully built. Otherwise, returns None. + """ + wheel_args = make_setuptools_bdist_wheel_args( + setup_py_path, + global_options=global_options, + build_options=build_options, + destination_dir=tempd, + ) + + spin_message = f'Building wheel for {name} (setup.py)' + with open_spinner(spin_message) as spinner: + logger.debug('Destination directory: %s', tempd) + + try: + output = call_subprocess( + wheel_args, + cwd=source_dir, + spinner=spinner, + ) + except Exception: + spinner.finish("error") + logger.error('Failed building wheel for %s', name) + return None + + names = os.listdir(tempd) + wheel_path = get_legacy_build_wheel_path( + names=names, + temp_dir=tempd, + name=name, + command_args=wheel_args, + command_output=output, + ) + return wheel_path diff --git a/venv/Lib/site-packages/pip/_internal/operations/check.py b/venv/Lib/site-packages/pip/_internal/operations/check.py new file mode 100644 index 00000000..5699c0b9 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/operations/check.py @@ -0,0 +1,153 @@ +"""Validation of dependencies of packages +""" + +import logging +from collections import namedtuple +from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Set, Tuple + +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.pkg_resources import RequirementParseError + +from pip._internal.distributions import make_distribution_for_install_requirement +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.misc import get_installed_distributions + +if TYPE_CHECKING: + from pip._vendor.packaging.utils import NormalizedName + +logger = logging.getLogger(__name__) + +# Shorthands +PackageSet = Dict['NormalizedName', 'PackageDetails'] +Missing = Tuple[str, Any] +Conflicting = Tuple[str, str, Any] + +MissingDict = Dict['NormalizedName', List[Missing]] +ConflictingDict = Dict['NormalizedName', List[Conflicting]] +CheckResult = Tuple[MissingDict, ConflictingDict] +ConflictDetails = Tuple[PackageSet, CheckResult] + +PackageDetails = namedtuple('PackageDetails', ['version', 'requires']) + + +def create_package_set_from_installed(**kwargs: Any) -> Tuple["PackageSet", bool]: + """Converts a list of distributions into a PackageSet. + """ + # Default to using all packages installed on the system + if kwargs == {}: + kwargs = {"local_only": False, "skip": ()} + + package_set = {} + problems = False + for dist in get_installed_distributions(**kwargs): + name = canonicalize_name(dist.project_name) + try: + package_set[name] = PackageDetails(dist.version, dist.requires()) + except (OSError, RequirementParseError) as e: + # Don't crash on unreadable or broken metadata + logger.warning("Error parsing requirements for %s: %s", name, e) + problems = True + return package_set, problems + + +def check_package_set(package_set, should_ignore=None): + # type: (PackageSet, Optional[Callable[[str], bool]]) -> CheckResult + """Check if a package set is consistent + + If should_ignore is passed, it should be a callable that takes a + package name and returns a boolean. + """ + + missing = {} + conflicting = {} + + for package_name, package_detail in package_set.items(): + # Info about dependencies of package_name + missing_deps = set() # type: Set[Missing] + conflicting_deps = set() # type: Set[Conflicting] + + if should_ignore and should_ignore(package_name): + continue + + for req in package_detail.requires: + name = canonicalize_name(req.project_name) + + # Check if it's missing + if name not in package_set: + missed = True + if req.marker is not None: + missed = req.marker.evaluate() + if missed: + missing_deps.add((name, req)) + continue + + # Check if there's a conflict + version = package_set[name].version # type: str + if not req.specifier.contains(version, prereleases=True): + conflicting_deps.add((name, version, req)) + + if missing_deps: + missing[package_name] = sorted(missing_deps, key=str) + if conflicting_deps: + conflicting[package_name] = sorted(conflicting_deps, key=str) + + return missing, conflicting + + +def check_install_conflicts(to_install): + # type: (List[InstallRequirement]) -> ConflictDetails + """For checking if the dependency graph would be consistent after \ + installing given requirements + """ + # Start from the current state + package_set, _ = create_package_set_from_installed() + # Install packages + would_be_installed = _simulate_installation_of(to_install, package_set) + + # Only warn about directly-dependent packages; create a whitelist of them + whitelist = _create_whitelist(would_be_installed, package_set) + + return ( + package_set, + check_package_set( + package_set, should_ignore=lambda name: name not in whitelist + ) + ) + + +def _simulate_installation_of(to_install, package_set): + # type: (List[InstallRequirement], PackageSet) -> Set[NormalizedName] + """Computes the version of packages after installing to_install. + """ + + # Keep track of packages that were installed + installed = set() + + # Modify it as installing requirement_set would (assuming no errors) + for inst_req in to_install: + abstract_dist = make_distribution_for_install_requirement(inst_req) + dist = abstract_dist.get_pkg_resources_distribution() + + assert dist is not None + name = canonicalize_name(dist.key) + package_set[name] = PackageDetails(dist.version, dist.requires()) + + installed.add(name) + + return installed + + +def _create_whitelist(would_be_installed, package_set): + # type: (Set[NormalizedName], PackageSet) -> Set[NormalizedName] + packages_affected = set(would_be_installed) + + for package_name in package_set: + if package_name in packages_affected: + continue + + for req in package_set[package_name].requires: + if canonicalize_name(req.name) in packages_affected: + packages_affected.add(package_name) + break + + return packages_affected diff --git a/venv/Lib/site-packages/pip/_internal/operations/freeze.py b/venv/Lib/site-packages/pip/_internal/operations/freeze.py new file mode 100644 index 00000000..f34a9d4b --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/operations/freeze.py @@ -0,0 +1,264 @@ +import collections +import logging +import os +from typing import ( + Container, + Dict, + Iterable, + Iterator, + List, + Optional, + Set, + Tuple, + Union, +) + +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.pkg_resources import Distribution, Requirement, RequirementParseError + +from pip._internal.exceptions import BadCommand, InstallationError +from pip._internal.req.constructors import ( + install_req_from_editable, + install_req_from_line, +) +from pip._internal.req.req_file import COMMENT_RE +from pip._internal.utils.direct_url_helpers import ( + direct_url_as_pep440_direct_reference, + dist_get_direct_url, +) +from pip._internal.utils.misc import dist_is_editable, get_installed_distributions + +logger = logging.getLogger(__name__) + +RequirementInfo = Tuple[Optional[Union[str, Requirement]], bool, List[str]] + + +def freeze( + requirement=None, # type: Optional[List[str]] + find_links=None, # type: Optional[List[str]] + local_only=False, # type: bool + user_only=False, # type: bool + paths=None, # type: Optional[List[str]] + isolated=False, # type: bool + exclude_editable=False, # type: bool + skip=() # type: Container[str] +): + # type: (...) -> Iterator[str] + find_links = find_links or [] + + for link in find_links: + yield f'-f {link}' + installations = {} # type: Dict[str, FrozenRequirement] + + for dist in get_installed_distributions( + local_only=local_only, + skip=(), + user_only=user_only, + paths=paths + ): + try: + req = FrozenRequirement.from_dist(dist) + except RequirementParseError as exc: + # We include dist rather than dist.project_name because the + # dist string includes more information, like the version and + # location. We also include the exception message to aid + # troubleshooting. + logger.warning( + 'Could not generate requirement for distribution %r: %s', + dist, exc + ) + continue + if exclude_editable and req.editable: + continue + installations[req.canonical_name] = req + + if requirement: + # the options that don't get turned into an InstallRequirement + # should only be emitted once, even if the same option is in multiple + # requirements files, so we need to keep track of what has been emitted + # so that we don't emit it again if it's seen again + emitted_options = set() # type: Set[str] + # keep track of which files a requirement is in so that we can + # give an accurate warning if a requirement appears multiple times. + req_files = collections.defaultdict(list) # type: Dict[str, List[str]] + for req_file_path in requirement: + with open(req_file_path) as req_file: + for line in req_file: + if (not line.strip() or + line.strip().startswith('#') or + line.startswith(( + '-r', '--requirement', + '-f', '--find-links', + '-i', '--index-url', + '--pre', + '--trusted-host', + '--process-dependency-links', + '--extra-index-url', + '--use-feature'))): + line = line.rstrip() + if line not in emitted_options: + emitted_options.add(line) + yield line + continue + + if line.startswith('-e') or line.startswith('--editable'): + if line.startswith('-e'): + line = line[2:].strip() + else: + line = line[len('--editable'):].strip().lstrip('=') + line_req = install_req_from_editable( + line, + isolated=isolated, + ) + else: + line_req = install_req_from_line( + COMMENT_RE.sub('', line).strip(), + isolated=isolated, + ) + + if not line_req.name: + logger.info( + "Skipping line in requirement file [%s] because " + "it's not clear what it would install: %s", + req_file_path, line.strip(), + ) + logger.info( + " (add #egg=PackageName to the URL to avoid" + " this warning)" + ) + else: + line_req_canonical_name = canonicalize_name( + line_req.name) + if line_req_canonical_name not in installations: + # either it's not installed, or it is installed + # but has been processed already + if not req_files[line_req.name]: + logger.warning( + "Requirement file [%s] contains %s, but " + "package %r is not installed", + req_file_path, + COMMENT_RE.sub('', line).strip(), + line_req.name + ) + else: + req_files[line_req.name].append(req_file_path) + else: + yield str(installations[ + line_req_canonical_name]).rstrip() + del installations[line_req_canonical_name] + req_files[line_req.name].append(req_file_path) + + # Warn about requirements that were included multiple times (in a + # single requirements file or in different requirements files). + for name, files in req_files.items(): + if len(files) > 1: + logger.warning("Requirement %s included multiple times [%s]", + name, ', '.join(sorted(set(files)))) + + yield( + '## The following requirements were added by ' + 'pip freeze:' + ) + for installation in sorted( + installations.values(), key=lambda x: x.name.lower()): + if installation.canonical_name not in skip: + yield str(installation).rstrip() + + +def get_requirement_info(dist): + # type: (Distribution) -> RequirementInfo + """ + Compute and return values (req, editable, comments) for use in + FrozenRequirement.from_dist(). + """ + if not dist_is_editable(dist): + return (None, False, []) + + location = os.path.normcase(os.path.abspath(dist.location)) + + from pip._internal.vcs import RemoteNotFoundError, vcs + vcs_backend = vcs.get_backend_for_dir(location) + + if vcs_backend is None: + req = dist.as_requirement() + logger.debug( + 'No VCS found for editable requirement "%s" in: %r', req, + location, + ) + comments = [ + f'# Editable install with no version control ({req})' + ] + return (location, True, comments) + + try: + req = vcs_backend.get_src_requirement(location, dist.project_name) + except RemoteNotFoundError: + req = dist.as_requirement() + comments = [ + '# Editable {} install with no remote ({})'.format( + type(vcs_backend).__name__, req, + ) + ] + return (location, True, comments) + + except BadCommand: + logger.warning( + 'cannot determine version of editable source in %s ' + '(%s command not found in path)', + location, + vcs_backend.name, + ) + return (None, True, []) + + except InstallationError as exc: + logger.warning( + "Error when trying to get requirement for VCS system %s, " + "falling back to uneditable format", exc + ) + else: + return (req, True, []) + + logger.warning( + 'Could not determine repository location of %s', location + ) + comments = ['## !! Could not determine repository location'] + + return (None, False, comments) + + +class FrozenRequirement: + def __init__(self, name, req, editable, comments=()): + # type: (str, Union[str, Requirement], bool, Iterable[str]) -> None + self.name = name + self.canonical_name = canonicalize_name(name) + self.req = req + self.editable = editable + self.comments = comments + + @classmethod + def from_dist(cls, dist): + # type: (Distribution) -> FrozenRequirement + # TODO `get_requirement_info` is taking care of editable requirements. + # TODO This should be refactored when we will add detection of + # editable that provide .dist-info metadata. + req, editable, comments = get_requirement_info(dist) + if req is None and not editable: + # if PEP 610 metadata is present, attempt to use it + direct_url = dist_get_direct_url(dist) + if direct_url: + req = direct_url_as_pep440_direct_reference( + direct_url, dist.project_name + ) + comments = [] + if req is None: + # name==version requirement + req = dist.as_requirement() + + return cls(dist.project_name, req, editable, comments=comments) + + def __str__(self): + # type: () -> str + req = self.req + if self.editable: + req = f'-e {req}' + return '\n'.join(list(self.comments) + [str(req)]) + '\n' diff --git a/venv/Lib/site-packages/pip/_internal/operations/install/__init__.py b/venv/Lib/site-packages/pip/_internal/operations/install/__init__.py new file mode 100644 index 00000000..24d6a5dd --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/operations/install/__init__.py @@ -0,0 +1,2 @@ +"""For modules related to installing packages. +""" diff --git a/venv/Lib/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..1812b8b9 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-36.pyc new file mode 100644 index 00000000..1aee85d4 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/operations/install/__pycache__/legacy.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/operations/install/__pycache__/legacy.cpython-36.pyc new file mode 100644 index 00000000..ab5fb9cf Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/operations/install/__pycache__/legacy.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-36.pyc new file mode 100644 index 00000000..438ea051 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/operations/install/editable_legacy.py b/venv/Lib/site-packages/pip/_internal/operations/install/editable_legacy.py new file mode 100644 index 00000000..6882c475 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/operations/install/editable_legacy.py @@ -0,0 +1,47 @@ +"""Legacy editable installation process, i.e. `setup.py develop`. +""" +import logging +from typing import List, Optional, Sequence + +from pip._internal.build_env import BuildEnvironment +from pip._internal.utils.logging import indent_log +from pip._internal.utils.setuptools_build import make_setuptools_develop_args +from pip._internal.utils.subprocess import call_subprocess + +logger = logging.getLogger(__name__) + + +def install_editable( + install_options, # type: List[str] + global_options, # type: Sequence[str] + prefix, # type: Optional[str] + home, # type: Optional[str] + use_user_site, # type: bool + name, # type: str + setup_py_path, # type: str + isolated, # type: bool + build_env, # type: BuildEnvironment + unpacked_source_directory, # type: str +): + # type: (...) -> None + """Install a package in editable mode. Most arguments are pass-through + to setuptools. + """ + logger.info('Running setup.py develop for %s', name) + + args = make_setuptools_develop_args( + setup_py_path, + global_options=global_options, + install_options=install_options, + no_user_config=isolated, + prefix=prefix, + home=home, + use_user_site=use_user_site, + ) + + with indent_log(): + with build_env: + call_subprocess( + args, + cwd=unpacked_source_directory, + ) diff --git a/venv/Lib/site-packages/pip/_internal/operations/install/legacy.py b/venv/Lib/site-packages/pip/_internal/operations/install/legacy.py new file mode 100644 index 00000000..41d0c1f9 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/operations/install/legacy.py @@ -0,0 +1,125 @@ +"""Legacy installation process, i.e. `setup.py install`. +""" + +import logging +import os +import sys +from distutils.util import change_root +from typing import List, Optional, Sequence + +from pip._internal.build_env import BuildEnvironment +from pip._internal.exceptions import InstallationError +from pip._internal.models.scheme import Scheme +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import ensure_dir +from pip._internal.utils.setuptools_build import make_setuptools_install_args +from pip._internal.utils.subprocess import runner_with_spinner_message +from pip._internal.utils.temp_dir import TempDirectory + +logger = logging.getLogger(__name__) + + +class LegacyInstallFailure(Exception): + def __init__(self): + # type: () -> None + self.parent = sys.exc_info() + + +def install( + install_options, # type: List[str] + global_options, # type: Sequence[str] + root, # type: Optional[str] + home, # type: Optional[str] + prefix, # type: Optional[str] + use_user_site, # type: bool + pycompile, # type: bool + scheme, # type: Scheme + setup_py_path, # type: str + isolated, # type: bool + req_name, # type: str + build_env, # type: BuildEnvironment + unpacked_source_directory, # type: str + req_description, # type: str +): + # type: (...) -> bool + + header_dir = scheme.headers + + with TempDirectory(kind="record") as temp_dir: + try: + record_filename = os.path.join(temp_dir.path, 'install-record.txt') + install_args = make_setuptools_install_args( + setup_py_path, + global_options=global_options, + install_options=install_options, + record_filename=record_filename, + root=root, + prefix=prefix, + header_dir=header_dir, + home=home, + use_user_site=use_user_site, + no_user_config=isolated, + pycompile=pycompile, + ) + + runner = runner_with_spinner_message( + f"Running setup.py install for {req_name}" + ) + with indent_log(), build_env: + runner( + cmd=install_args, + cwd=unpacked_source_directory, + ) + + if not os.path.exists(record_filename): + logger.debug('Record file %s not found', record_filename) + # Signal to the caller that we didn't install the new package + return False + + except Exception: + # Signal to the caller that we didn't install the new package + raise LegacyInstallFailure + + # At this point, we have successfully installed the requirement. + + # We intentionally do not use any encoding to read the file because + # setuptools writes the file using distutils.file_util.write_file, + # which does not specify an encoding. + with open(record_filename) as f: + record_lines = f.read().splitlines() + + def prepend_root(path): + # type: (str) -> str + if root is None or not os.path.isabs(path): + return path + else: + return change_root(root, path) + + for line in record_lines: + directory = os.path.dirname(line) + if directory.endswith('.egg-info'): + egg_info_dir = prepend_root(directory) + break + else: + message = ( + "{} did not indicate that it installed an " + ".egg-info directory. Only setup.py projects " + "generating .egg-info directories are supported." + ).format(req_description) + raise InstallationError(message) + + new_lines = [] + for line in record_lines: + filename = line.strip() + if os.path.isdir(filename): + filename += os.path.sep + new_lines.append( + os.path.relpath(prepend_root(filename), egg_info_dir) + ) + new_lines.sort() + ensure_dir(egg_info_dir) + inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt') + with open(inst_files_path, 'w') as f: + f.write('\n'.join(new_lines) + '\n') + + return True diff --git a/venv/Lib/site-packages/pip/_internal/operations/install/wheel.py b/venv/Lib/site-packages/pip/_internal/operations/install/wheel.py new file mode 100644 index 00000000..10e5b15f --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/operations/install/wheel.py @@ -0,0 +1,819 @@ +"""Support for installing and building the "wheel" binary package format. +""" + +import collections +import compileall +import contextlib +import csv +import importlib +import logging +import os.path +import re +import shutil +import sys +import warnings +from base64 import urlsafe_b64encode +from email.message import Message +from itertools import chain, filterfalse, starmap +from typing import ( + IO, + TYPE_CHECKING, + Any, + BinaryIO, + Callable, + Dict, + Iterable, + Iterator, + List, + NewType, + Optional, + Sequence, + Set, + Tuple, + Union, + cast, +) +from zipfile import ZipFile, ZipInfo + +from pip._vendor import pkg_resources +from pip._vendor.distlib.scripts import ScriptMaker +from pip._vendor.distlib.util import get_export_entry +from pip._vendor.pkg_resources import Distribution +from pip._vendor.six import ensure_str, ensure_text, reraise + +from pip._internal.exceptions import InstallationError +from pip._internal.locations import get_major_minor_version +from pip._internal.models.direct_url import DIRECT_URL_METADATA_NAME, DirectUrl +from pip._internal.models.scheme import SCHEME_KEYS, Scheme +from pip._internal.utils.filesystem import adjacent_tmp_file, replace +from pip._internal.utils.misc import captured_stdout, ensure_dir, hash_file, partition +from pip._internal.utils.unpacking import ( + current_umask, + is_within_directory, + set_extracted_file_to_default_mode_plus_executable, + zip_item_is_executable, +) +from pip._internal.utils.wheel import parse_wheel, pkg_resources_distribution_for_wheel + +if TYPE_CHECKING: + from typing import Protocol + + class File(Protocol): + src_record_path = None # type: RecordPath + dest_path = None # type: str + changed = None # type: bool + + def save(self): + # type: () -> None + pass + + +logger = logging.getLogger(__name__) + +RecordPath = NewType('RecordPath', str) +InstalledCSVRow = Tuple[RecordPath, str, Union[int, str]] + + +def rehash(path, blocksize=1 << 20): + # type: (str, int) -> Tuple[str, str] + """Return (encoded_digest, length) for path using hashlib.sha256()""" + h, length = hash_file(path, blocksize) + digest = 'sha256=' + urlsafe_b64encode( + h.digest() + ).decode('latin1').rstrip('=') + return (digest, str(length)) + + +def csv_io_kwargs(mode): + # type: (str) -> Dict[str, Any] + """Return keyword arguments to properly open a CSV file + in the given mode. + """ + return {'mode': mode, 'newline': '', 'encoding': 'utf-8'} + + +def fix_script(path): + # type: (str) -> bool + """Replace #!python with #!/path/to/python + Return True if file was changed. + """ + # XXX RECORD hashes will need to be updated + assert os.path.isfile(path) + + with open(path, 'rb') as script: + firstline = script.readline() + if not firstline.startswith(b'#!python'): + return False + exename = sys.executable.encode(sys.getfilesystemencoding()) + firstline = b'#!' + exename + os.linesep.encode("ascii") + rest = script.read() + with open(path, 'wb') as script: + script.write(firstline) + script.write(rest) + return True + + +def wheel_root_is_purelib(metadata): + # type: (Message) -> bool + return metadata.get("Root-Is-Purelib", "").lower() == "true" + + +def get_entrypoints(distribution): + # type: (Distribution) -> Tuple[Dict[str, str], Dict[str, str]] + # get the entry points and then the script names + try: + console = distribution.get_entry_map('console_scripts') + gui = distribution.get_entry_map('gui_scripts') + except KeyError: + # Our dict-based Distribution raises KeyError if entry_points.txt + # doesn't exist. + return {}, {} + + def _split_ep(s): + # type: (pkg_resources.EntryPoint) -> Tuple[str, str] + """get the string representation of EntryPoint, + remove space and split on '=' + """ + split_parts = str(s).replace(" ", "").split("=") + return split_parts[0], split_parts[1] + + # convert the EntryPoint objects into strings with module:function + console = dict(_split_ep(v) for v in console.values()) + gui = dict(_split_ep(v) for v in gui.values()) + return console, gui + + +def message_about_scripts_not_on_PATH(scripts): + # type: (Sequence[str]) -> Optional[str] + """Determine if any scripts are not on PATH and format a warning. + Returns a warning message if one or more scripts are not on PATH, + otherwise None. + """ + if not scripts: + return None + + # Group scripts by the path they were installed in + grouped_by_dir = collections.defaultdict(set) # type: Dict[str, Set[str]] + for destfile in scripts: + parent_dir = os.path.dirname(destfile) + script_name = os.path.basename(destfile) + grouped_by_dir[parent_dir].add(script_name) + + # We don't want to warn for directories that are on PATH. + not_warn_dirs = [ + os.path.normcase(i).rstrip(os.sep) for i in + os.environ.get("PATH", "").split(os.pathsep) + ] + # If an executable sits with sys.executable, we don't warn for it. + # This covers the case of venv invocations without activating the venv. + not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable))) + warn_for = { + parent_dir: scripts for parent_dir, scripts in grouped_by_dir.items() + if os.path.normcase(parent_dir) not in not_warn_dirs + } # type: Dict[str, Set[str]] + if not warn_for: + return None + + # Format a message + msg_lines = [] + for parent_dir, dir_scripts in warn_for.items(): + sorted_scripts = sorted(dir_scripts) # type: List[str] + if len(sorted_scripts) == 1: + start_text = "script {} is".format(sorted_scripts[0]) + else: + start_text = "scripts {} are".format( + ", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1] + ) + + msg_lines.append( + "The {} installed in '{}' which is not on PATH." + .format(start_text, parent_dir) + ) + + last_line_fmt = ( + "Consider adding {} to PATH or, if you prefer " + "to suppress this warning, use --no-warn-script-location." + ) + if len(msg_lines) == 1: + msg_lines.append(last_line_fmt.format("this directory")) + else: + msg_lines.append(last_line_fmt.format("these directories")) + + # Add a note if any directory starts with ~ + warn_for_tilde = any( + i[0] == "~" for i in os.environ.get("PATH", "").split(os.pathsep) if i + ) + if warn_for_tilde: + tilde_warning_msg = ( + "NOTE: The current PATH contains path(s) starting with `~`, " + "which may not be expanded by all applications." + ) + msg_lines.append(tilde_warning_msg) + + # Returns the formatted multiline message + return "\n".join(msg_lines) + + +def _normalized_outrows(outrows): + # type: (Iterable[InstalledCSVRow]) -> List[Tuple[str, str, str]] + """Normalize the given rows of a RECORD file. + + Items in each row are converted into str. Rows are then sorted to make + the value more predictable for tests. + + Each row is a 3-tuple (path, hash, size) and corresponds to a record of + a RECORD file (see PEP 376 and PEP 427 for details). For the rows + passed to this function, the size can be an integer as an int or string, + or the empty string. + """ + # Normally, there should only be one row per path, in which case the + # second and third elements don't come into play when sorting. + # However, in cases in the wild where a path might happen to occur twice, + # we don't want the sort operation to trigger an error (but still want + # determinism). Since the third element can be an int or string, we + # coerce each element to a string to avoid a TypeError in this case. + # For additional background, see-- + # https://github.com/pypa/pip/issues/5868 + return sorted( + (ensure_str(record_path, encoding='utf-8'), hash_, str(size)) + for record_path, hash_, size in outrows + ) + + +def _record_to_fs_path(record_path): + # type: (RecordPath) -> str + return record_path + + +def _fs_to_record_path(path, relative_to=None): + # type: (str, Optional[str]) -> RecordPath + if relative_to is not None: + # On Windows, do not handle relative paths if they belong to different + # logical disks + if os.path.splitdrive(path)[0].lower() == \ + os.path.splitdrive(relative_to)[0].lower(): + path = os.path.relpath(path, relative_to) + path = path.replace(os.path.sep, '/') + return cast('RecordPath', path) + + +def _parse_record_path(record_column): + # type: (str) -> RecordPath + p = ensure_text(record_column, encoding='utf-8') + return cast('RecordPath', p) + + +def get_csv_rows_for_installed( + old_csv_rows, # type: List[List[str]] + installed, # type: Dict[RecordPath, RecordPath] + changed, # type: Set[RecordPath] + generated, # type: List[str] + lib_dir, # type: str +): + # type: (...) -> List[InstalledCSVRow] + """ + :param installed: A map from archive RECORD path to installation RECORD + path. + """ + installed_rows = [] # type: List[InstalledCSVRow] + for row in old_csv_rows: + if len(row) > 3: + logger.warning('RECORD line has more than three elements: %s', row) + old_record_path = _parse_record_path(row[0]) + new_record_path = installed.pop(old_record_path, old_record_path) + if new_record_path in changed: + digest, length = rehash(_record_to_fs_path(new_record_path)) + else: + digest = row[1] if len(row) > 1 else '' + length = row[2] if len(row) > 2 else '' + installed_rows.append((new_record_path, digest, length)) + for f in generated: + path = _fs_to_record_path(f, lib_dir) + digest, length = rehash(f) + installed_rows.append((path, digest, length)) + for installed_record_path in installed.values(): + installed_rows.append((installed_record_path, '', '')) + return installed_rows + + +def get_console_script_specs(console): + # type: (Dict[str, str]) -> List[str] + """ + Given the mapping from entrypoint name to callable, return the relevant + console script specs. + """ + # Don't mutate caller's version + console = console.copy() + + scripts_to_generate = [] + + # Special case pip and setuptools to generate versioned wrappers + # + # The issue is that some projects (specifically, pip and setuptools) use + # code in setup.py to create "versioned" entry points - pip2.7 on Python + # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into + # the wheel metadata at build time, and so if the wheel is installed with + # a *different* version of Python the entry points will be wrong. The + # correct fix for this is to enhance the metadata to be able to describe + # such versioned entry points, but that won't happen till Metadata 2.0 is + # available. + # In the meantime, projects using versioned entry points will either have + # incorrect versioned entry points, or they will not be able to distribute + # "universal" wheels (i.e., they will need a wheel per Python version). + # + # Because setuptools and pip are bundled with _ensurepip and virtualenv, + # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we + # override the versioned entry points in the wheel and generate the + # correct ones. This code is purely a short-term measure until Metadata 2.0 + # is available. + # + # To add the level of hack in this section of code, in order to support + # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment + # variable which will control which version scripts get installed. + # + # ENSUREPIP_OPTIONS=altinstall + # - Only pipX.Y and easy_install-X.Y will be generated and installed + # ENSUREPIP_OPTIONS=install + # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note + # that this option is technically if ENSUREPIP_OPTIONS is set and is + # not altinstall + # DEFAULT + # - The default behavior is to install pip, pipX, pipX.Y, easy_install + # and easy_install-X.Y. + pip_script = console.pop('pip', None) + if pip_script: + if "ENSUREPIP_OPTIONS" not in os.environ: + scripts_to_generate.append('pip = ' + pip_script) + + if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall": + scripts_to_generate.append( + 'pip{} = {}'.format(sys.version_info[0], pip_script) + ) + + scripts_to_generate.append( + f'pip{get_major_minor_version()} = {pip_script}' + ) + # Delete any other versioned pip entry points + pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)] + for k in pip_ep: + del console[k] + easy_install_script = console.pop('easy_install', None) + if easy_install_script: + if "ENSUREPIP_OPTIONS" not in os.environ: + scripts_to_generate.append( + 'easy_install = ' + easy_install_script + ) + + scripts_to_generate.append( + 'easy_install-{} = {}'.format( + get_major_minor_version(), easy_install_script + ) + ) + # Delete any other versioned easy_install entry points + easy_install_ep = [ + k for k in console if re.match(r'easy_install(-\d\.\d)?$', k) + ] + for k in easy_install_ep: + del console[k] + + # Generate the console entry points specified in the wheel + scripts_to_generate.extend(starmap('{} = {}'.format, console.items())) + + return scripts_to_generate + + +class ZipBackedFile: + def __init__(self, src_record_path, dest_path, zip_file): + # type: (RecordPath, str, ZipFile) -> None + self.src_record_path = src_record_path + self.dest_path = dest_path + self._zip_file = zip_file + self.changed = False + + def _getinfo(self): + # type: () -> ZipInfo + return self._zip_file.getinfo(self.src_record_path) + + def save(self): + # type: () -> None + # directory creation is lazy and after file filtering + # to ensure we don't install empty dirs; empty dirs can't be + # uninstalled. + parent_dir = os.path.dirname(self.dest_path) + ensure_dir(parent_dir) + + # When we open the output file below, any existing file is truncated + # before we start writing the new contents. This is fine in most + # cases, but can cause a segfault if pip has loaded a shared + # object (e.g. from pyopenssl through its vendored urllib3) + # Since the shared object is mmap'd an attempt to call a + # symbol in it will then cause a segfault. Unlinking the file + # allows writing of new contents while allowing the process to + # continue to use the old copy. + if os.path.exists(self.dest_path): + os.unlink(self.dest_path) + + zipinfo = self._getinfo() + + with self._zip_file.open(zipinfo) as f: + with open(self.dest_path, "wb") as dest: + shutil.copyfileobj(f, dest) + + if zip_item_is_executable(zipinfo): + set_extracted_file_to_default_mode_plus_executable(self.dest_path) + + +class ScriptFile: + def __init__(self, file): + # type: (File) -> None + self._file = file + self.src_record_path = self._file.src_record_path + self.dest_path = self._file.dest_path + self.changed = False + + def save(self): + # type: () -> None + self._file.save() + self.changed = fix_script(self.dest_path) + + +class MissingCallableSuffix(InstallationError): + def __init__(self, entry_point): + # type: (str) -> None + super().__init__( + "Invalid script entry point: {} - A callable " + "suffix is required. Cf https://packaging.python.org/" + "specifications/entry-points/#use-for-scripts for more " + "information.".format(entry_point) + ) + + +def _raise_for_invalid_entrypoint(specification): + # type: (str) -> None + entry = get_export_entry(specification) + if entry is not None and entry.suffix is None: + raise MissingCallableSuffix(str(entry)) + + +class PipScriptMaker(ScriptMaker): + def make(self, specification, options=None): + # type: (str, Dict[str, Any]) -> List[str] + _raise_for_invalid_entrypoint(specification) + return super().make(specification, options) + + +def _install_wheel( + name, # type: str + wheel_zip, # type: ZipFile + wheel_path, # type: str + scheme, # type: Scheme + pycompile=True, # type: bool + warn_script_location=True, # type: bool + direct_url=None, # type: Optional[DirectUrl] + requested=False, # type: bool +): + # type: (...) -> None + """Install a wheel. + + :param name: Name of the project to install + :param wheel_zip: open ZipFile for wheel being installed + :param scheme: Distutils scheme dictating the install directories + :param req_description: String used in place of the requirement, for + logging + :param pycompile: Whether to byte-compile installed Python files + :param warn_script_location: Whether to check that scripts are installed + into a directory on PATH + :raises UnsupportedWheel: + * when the directory holds an unpacked wheel with incompatible + Wheel-Version + * when the .dist-info dir does not match the wheel + """ + info_dir, metadata = parse_wheel(wheel_zip, name) + + if wheel_root_is_purelib(metadata): + lib_dir = scheme.purelib + else: + lib_dir = scheme.platlib + + # Record details of the files moved + # installed = files copied from the wheel to the destination + # changed = files changed while installing (scripts #! line typically) + # generated = files newly generated during the install (script wrappers) + installed = {} # type: Dict[RecordPath, RecordPath] + changed = set() # type: Set[RecordPath] + generated = [] # type: List[str] + + def record_installed(srcfile, destfile, modified=False): + # type: (RecordPath, str, bool) -> None + """Map archive RECORD paths to installation RECORD paths.""" + newpath = _fs_to_record_path(destfile, lib_dir) + installed[srcfile] = newpath + if modified: + changed.add(_fs_to_record_path(destfile)) + + def all_paths(): + # type: () -> Iterable[RecordPath] + names = wheel_zip.namelist() + # If a flag is set, names may be unicode in Python 2. We convert to + # text explicitly so these are valid for lookup in RECORD. + decoded_names = map(ensure_text, names) + for name in decoded_names: + yield cast("RecordPath", name) + + def is_dir_path(path): + # type: (RecordPath) -> bool + return path.endswith("/") + + def assert_no_path_traversal(dest_dir_path, target_path): + # type: (str, str) -> None + if not is_within_directory(dest_dir_path, target_path): + message = ( + "The wheel {!r} has a file {!r} trying to install" + " outside the target directory {!r}" + ) + raise InstallationError( + message.format(wheel_path, target_path, dest_dir_path) + ) + + def root_scheme_file_maker(zip_file, dest): + # type: (ZipFile, str) -> Callable[[RecordPath], File] + def make_root_scheme_file(record_path): + # type: (RecordPath) -> File + normed_path = os.path.normpath(record_path) + dest_path = os.path.join(dest, normed_path) + assert_no_path_traversal(dest, dest_path) + return ZipBackedFile(record_path, dest_path, zip_file) + + return make_root_scheme_file + + def data_scheme_file_maker(zip_file, scheme): + # type: (ZipFile, Scheme) -> Callable[[RecordPath], File] + scheme_paths = {} + for key in SCHEME_KEYS: + encoded_key = ensure_text(key) + scheme_paths[encoded_key] = ensure_text( + getattr(scheme, key), encoding=sys.getfilesystemencoding() + ) + + def make_data_scheme_file(record_path): + # type: (RecordPath) -> File + normed_path = os.path.normpath(record_path) + try: + _, scheme_key, dest_subpath = normed_path.split(os.path.sep, 2) + except ValueError: + message = ( + "Unexpected file in {}: {!r}. .data directory contents" + " should be named like: '/'." + ).format(wheel_path, record_path) + raise InstallationError(message) + + try: + scheme_path = scheme_paths[scheme_key] + except KeyError: + valid_scheme_keys = ", ".join(sorted(scheme_paths)) + message = ( + "Unknown scheme key used in {}: {} (for file {!r}). .data" + " directory contents should be in subdirectories named" + " with a valid scheme key ({})" + ).format( + wheel_path, scheme_key, record_path, valid_scheme_keys + ) + raise InstallationError(message) + + dest_path = os.path.join(scheme_path, dest_subpath) + assert_no_path_traversal(scheme_path, dest_path) + return ZipBackedFile(record_path, dest_path, zip_file) + + return make_data_scheme_file + + def is_data_scheme_path(path): + # type: (RecordPath) -> bool + return path.split("/", 1)[0].endswith(".data") + + paths = all_paths() + file_paths = filterfalse(is_dir_path, paths) + root_scheme_paths, data_scheme_paths = partition( + is_data_scheme_path, file_paths + ) + + make_root_scheme_file = root_scheme_file_maker( + wheel_zip, + ensure_text(lib_dir, encoding=sys.getfilesystemencoding()), + ) + files = map(make_root_scheme_file, root_scheme_paths) + + def is_script_scheme_path(path): + # type: (RecordPath) -> bool + parts = path.split("/", 2) + return ( + len(parts) > 2 and + parts[0].endswith(".data") and + parts[1] == "scripts" + ) + + other_scheme_paths, script_scheme_paths = partition( + is_script_scheme_path, data_scheme_paths + ) + + make_data_scheme_file = data_scheme_file_maker(wheel_zip, scheme) + other_scheme_files = map(make_data_scheme_file, other_scheme_paths) + files = chain(files, other_scheme_files) + + # Get the defined entry points + distribution = pkg_resources_distribution_for_wheel( + wheel_zip, name, wheel_path + ) + console, gui = get_entrypoints(distribution) + + def is_entrypoint_wrapper(file): + # type: (File) -> bool + # EP, EP.exe and EP-script.py are scripts generated for + # entry point EP by setuptools + path = file.dest_path + name = os.path.basename(path) + if name.lower().endswith('.exe'): + matchname = name[:-4] + elif name.lower().endswith('-script.py'): + matchname = name[:-10] + elif name.lower().endswith(".pya"): + matchname = name[:-4] + else: + matchname = name + # Ignore setuptools-generated scripts + return (matchname in console or matchname in gui) + + script_scheme_files = map(make_data_scheme_file, script_scheme_paths) + script_scheme_files = filterfalse( + is_entrypoint_wrapper, script_scheme_files + ) + script_scheme_files = map(ScriptFile, script_scheme_files) + files = chain(files, script_scheme_files) + + for file in files: + file.save() + record_installed(file.src_record_path, file.dest_path, file.changed) + + def pyc_source_file_paths(): + # type: () -> Iterator[str] + # We de-duplicate installation paths, since there can be overlap (e.g. + # file in .data maps to same location as file in wheel root). + # Sorting installation paths makes it easier to reproduce and debug + # issues related to permissions on existing files. + for installed_path in sorted(set(installed.values())): + full_installed_path = os.path.join(lib_dir, installed_path) + if not os.path.isfile(full_installed_path): + continue + if not full_installed_path.endswith('.py'): + continue + yield full_installed_path + + def pyc_output_path(path): + # type: (str) -> str + """Return the path the pyc file would have been written to. + """ + return importlib.util.cache_from_source(path) + + # Compile all of the pyc files for the installed files + if pycompile: + with captured_stdout() as stdout: + with warnings.catch_warnings(): + warnings.filterwarnings('ignore') + for path in pyc_source_file_paths(): + # Python 2's `compileall.compile_file` requires a str in + # error cases, so we must convert to the native type. + path_arg = ensure_str( + path, encoding=sys.getfilesystemencoding() + ) + success = compileall.compile_file( + path_arg, force=True, quiet=True + ) + if success: + pyc_path = pyc_output_path(path) + assert os.path.exists(pyc_path) + pyc_record_path = cast( + "RecordPath", pyc_path.replace(os.path.sep, "/") + ) + record_installed(pyc_record_path, pyc_path) + logger.debug(stdout.getvalue()) + + maker = PipScriptMaker(None, scheme.scripts) + + # Ensure old scripts are overwritten. + # See https://github.com/pypa/pip/issues/1800 + maker.clobber = True + + # Ensure we don't generate any variants for scripts because this is almost + # never what somebody wants. + # See https://bitbucket.org/pypa/distlib/issue/35/ + maker.variants = {''} + + # This is required because otherwise distlib creates scripts that are not + # executable. + # See https://bitbucket.org/pypa/distlib/issue/32/ + maker.set_mode = True + + # Generate the console and GUI entry points specified in the wheel + scripts_to_generate = get_console_script_specs(console) + + gui_scripts_to_generate = list(starmap('{} = {}'.format, gui.items())) + + generated_console_scripts = maker.make_multiple(scripts_to_generate) + generated.extend(generated_console_scripts) + + generated.extend( + maker.make_multiple(gui_scripts_to_generate, {'gui': True}) + ) + + if warn_script_location: + msg = message_about_scripts_not_on_PATH(generated_console_scripts) + if msg is not None: + logger.warning(msg) + + generated_file_mode = 0o666 & ~current_umask() + + @contextlib.contextmanager + def _generate_file(path, **kwargs): + # type: (str, **Any) -> Iterator[BinaryIO] + with adjacent_tmp_file(path, **kwargs) as f: + yield f + os.chmod(f.name, generated_file_mode) + replace(f.name, path) + + dest_info_dir = os.path.join(lib_dir, info_dir) + + # Record pip as the installer + installer_path = os.path.join(dest_info_dir, 'INSTALLER') + with _generate_file(installer_path) as installer_file: + installer_file.write(b'pip\n') + generated.append(installer_path) + + # Record the PEP 610 direct URL reference + if direct_url is not None: + direct_url_path = os.path.join(dest_info_dir, DIRECT_URL_METADATA_NAME) + with _generate_file(direct_url_path) as direct_url_file: + direct_url_file.write(direct_url.to_json().encode("utf-8")) + generated.append(direct_url_path) + + # Record the REQUESTED file + if requested: + requested_path = os.path.join(dest_info_dir, 'REQUESTED') + with open(requested_path, "wb"): + pass + generated.append(requested_path) + + record_text = distribution.get_metadata('RECORD') + record_rows = list(csv.reader(record_text.splitlines())) + + rows = get_csv_rows_for_installed( + record_rows, + installed=installed, + changed=changed, + generated=generated, + lib_dir=lib_dir) + + # Record details of all files installed + record_path = os.path.join(dest_info_dir, 'RECORD') + + with _generate_file(record_path, **csv_io_kwargs('w')) as record_file: + # The type mypy infers for record_file is different for Python 3 + # (typing.IO[Any]) and Python 2 (typing.BinaryIO). We explicitly + # cast to typing.IO[str] as a workaround. + writer = csv.writer(cast('IO[str]', record_file)) + writer.writerows(_normalized_outrows(rows)) + + +@contextlib.contextmanager +def req_error_context(req_description): + # type: (str) -> Iterator[None] + try: + yield + except InstallationError as e: + message = "For req: {}. {}".format(req_description, e.args[0]) + reraise( + InstallationError, InstallationError(message), sys.exc_info()[2] + ) + + +def install_wheel( + name, # type: str + wheel_path, # type: str + scheme, # type: Scheme + req_description, # type: str + pycompile=True, # type: bool + warn_script_location=True, # type: bool + direct_url=None, # type: Optional[DirectUrl] + requested=False, # type: bool +): + # type: (...) -> None + with ZipFile(wheel_path, allowZip64=True) as z: + with req_error_context(req_description): + _install_wheel( + name=name, + wheel_zip=z, + wheel_path=wheel_path, + scheme=scheme, + pycompile=pycompile, + warn_script_location=warn_script_location, + direct_url=direct_url, + requested=requested, + ) diff --git a/venv/Lib/site-packages/pip/_internal/operations/prepare.py b/venv/Lib/site-packages/pip/_internal/operations/prepare.py new file mode 100644 index 00000000..3d074f9f --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/operations/prepare.py @@ -0,0 +1,655 @@ +"""Prepares a distribution for installation +""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +import logging +import mimetypes +import os +import shutil +from typing import Dict, Iterable, List, Optional, Tuple + +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.pkg_resources import Distribution + +from pip._internal.distributions import make_distribution_for_install_requirement +from pip._internal.distributions.installed import InstalledDistribution +from pip._internal.exceptions import ( + DirectoryUrlHashUnsupported, + HashMismatch, + HashUnpinned, + InstallationError, + NetworkConnectionError, + PreviousBuildDirError, + VcsHashUnsupported, +) +from pip._internal.index.package_finder import PackageFinder +from pip._internal.models.link import Link +from pip._internal.models.wheel import Wheel +from pip._internal.network.download import BatchDownloader, Downloader +from pip._internal.network.lazy_wheel import ( + HTTPRangeRequestUnsupported, + dist_from_wheel_url, +) +from pip._internal.network.session import PipSession +from pip._internal.req.req_install import InstallRequirement +from pip._internal.req.req_tracker import RequirementTracker +from pip._internal.utils.deprecation import deprecated +from pip._internal.utils.filesystem import copy2_fixed +from pip._internal.utils.hashes import Hashes, MissingHashes +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import display_path, hide_url, rmtree +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.unpacking import unpack_file +from pip._internal.vcs import vcs + +logger = logging.getLogger(__name__) + + +def _get_prepared_distribution( + req, # type: InstallRequirement + req_tracker, # type: RequirementTracker + finder, # type: PackageFinder + build_isolation, # type: bool +): + # type: (...) -> Distribution + """Prepare a distribution for installation.""" + abstract_dist = make_distribution_for_install_requirement(req) + with req_tracker.track(req): + abstract_dist.prepare_distribution_metadata(finder, build_isolation) + return abstract_dist.get_pkg_resources_distribution() + + +def unpack_vcs_link(link, location): + # type: (Link, str) -> None + vcs_backend = vcs.get_backend_for_scheme(link.scheme) + assert vcs_backend is not None + vcs_backend.unpack(location, url=hide_url(link.url)) + + +class File: + + def __init__(self, path, content_type): + # type: (str, Optional[str]) -> None + self.path = path + if content_type is None: + self.content_type = mimetypes.guess_type(path)[0] + else: + self.content_type = content_type + + +def get_http_url( + link, # type: Link + download, # type: Downloader + download_dir=None, # type: Optional[str] + hashes=None, # type: Optional[Hashes] +): + # type: (...) -> File + temp_dir = TempDirectory(kind="unpack", globally_managed=True) + # If a download dir is specified, is the file already downloaded there? + already_downloaded_path = None + if download_dir: + already_downloaded_path = _check_download_dir( + link, download_dir, hashes + ) + + if already_downloaded_path: + from_path = already_downloaded_path + content_type = None + else: + # let's download to a tmp dir + from_path, content_type = download(link, temp_dir.path) + if hashes: + hashes.check_against_path(from_path) + + return File(from_path, content_type) + + +def _copy2_ignoring_special_files(src, dest): + # type: (str, str) -> None + """Copying special files is not supported, but as a convenience to users + we skip errors copying them. This supports tools that may create e.g. + socket files in the project source directory. + """ + try: + copy2_fixed(src, dest) + except shutil.SpecialFileError as e: + # SpecialFileError may be raised due to either the source or + # destination. If the destination was the cause then we would actually + # care, but since the destination directory is deleted prior to + # copy we ignore all of them assuming it is caused by the source. + logger.warning( + "Ignoring special file error '%s' encountered copying %s to %s.", + str(e), + src, + dest, + ) + + +def _copy_source_tree(source, target): + # type: (str, str) -> None + target_abspath = os.path.abspath(target) + target_basename = os.path.basename(target_abspath) + target_dirname = os.path.dirname(target_abspath) + + def ignore(d, names): + # type: (str, List[str]) -> List[str] + skipped = [] # type: List[str] + if d == source: + # Pulling in those directories can potentially be very slow, + # exclude the following directories if they appear in the top + # level dir (and only it). + # See discussion at https://github.com/pypa/pip/pull/6770 + skipped += ['.tox', '.nox'] + if os.path.abspath(d) == target_dirname: + # Prevent an infinite recursion if the target is in source. + # This can happen when TMPDIR is set to ${PWD}/... + # and we copy PWD to TMPDIR. + skipped += [target_basename] + return skipped + + shutil.copytree( + source, + target, + ignore=ignore, + symlinks=True, + copy_function=_copy2_ignoring_special_files, + ) + + +def get_file_url( + link, # type: Link + download_dir=None, # type: Optional[str] + hashes=None # type: Optional[Hashes] +): + # type: (...) -> File + """Get file and optionally check its hash. + """ + # If a download dir is specified, is the file already there and valid? + already_downloaded_path = None + if download_dir: + already_downloaded_path = _check_download_dir( + link, download_dir, hashes + ) + + if already_downloaded_path: + from_path = already_downloaded_path + else: + from_path = link.file_path + + # If --require-hashes is off, `hashes` is either empty, the + # link's embedded hash, or MissingHashes; it is required to + # match. If --require-hashes is on, we are satisfied by any + # hash in `hashes` matching: a URL-based or an option-based + # one; no internet-sourced hash will be in `hashes`. + if hashes: + hashes.check_against_path(from_path) + return File(from_path, None) + + +def unpack_url( + link, # type: Link + location, # type: str + download, # type: Downloader + download_dir=None, # type: Optional[str] + hashes=None, # type: Optional[Hashes] +): + # type: (...) -> Optional[File] + """Unpack link into location, downloading if required. + + :param hashes: A Hashes object, one of whose embedded hashes must match, + or HashMismatch will be raised. If the Hashes is empty, no matches are + required, and unhashable types of requirements (like VCS ones, which + would ordinarily raise HashUnsupported) are allowed. + """ + # non-editable vcs urls + if link.is_vcs: + unpack_vcs_link(link, location) + return None + + # Once out-of-tree-builds are no longer supported, could potentially + # replace the below condition with `assert not link.is_existing_dir` + # - unpack_url does not need to be called for in-tree-builds. + # + # As further cleanup, _copy_source_tree and accompanying tests can + # be removed. + if link.is_existing_dir(): + deprecated( + "A future pip version will change local packages to be built " + "in-place without first copying to a temporary directory. " + "We recommend you use --use-feature=in-tree-build to test " + "your packages with this new behavior before it becomes the " + "default.\n", + replacement=None, + gone_in="21.3", + issue=7555 + ) + if os.path.isdir(location): + rmtree(location) + _copy_source_tree(link.file_path, location) + return None + + # file urls + if link.is_file: + file = get_file_url(link, download_dir, hashes=hashes) + + # http urls + else: + file = get_http_url( + link, + download, + download_dir, + hashes=hashes, + ) + + # unpack the archive to the build dir location. even when only downloading + # archives, they have to be unpacked to parse dependencies, except wheels + if not link.is_wheel: + unpack_file(file.path, location, file.content_type) + + return file + + +def _check_download_dir(link, download_dir, hashes): + # type: (Link, str, Optional[Hashes]) -> Optional[str] + """ Check download_dir for previously downloaded file with correct hash + If a correct file is found return its path else None + """ + download_path = os.path.join(download_dir, link.filename) + + if not os.path.exists(download_path): + return None + + # If already downloaded, does its hash match? + logger.info('File was already downloaded %s', download_path) + if hashes: + try: + hashes.check_against_path(download_path) + except HashMismatch: + logger.warning( + 'Previously-downloaded file %s has bad hash. ' + 'Re-downloading.', + download_path + ) + os.unlink(download_path) + return None + return download_path + + +class RequirementPreparer: + """Prepares a Requirement + """ + + def __init__( + self, + build_dir, # type: str + download_dir, # type: Optional[str] + src_dir, # type: str + build_isolation, # type: bool + req_tracker, # type: RequirementTracker + session, # type: PipSession + progress_bar, # type: str + finder, # type: PackageFinder + require_hashes, # type: bool + use_user_site, # type: bool + lazy_wheel, # type: bool + in_tree_build, # type: bool + ): + # type: (...) -> None + super().__init__() + + self.src_dir = src_dir + self.build_dir = build_dir + self.req_tracker = req_tracker + self._session = session + self._download = Downloader(session, progress_bar) + self._batch_download = BatchDownloader(session, progress_bar) + self.finder = finder + + # Where still-packed archives should be written to. If None, they are + # not saved, and are deleted immediately after unpacking. + self.download_dir = download_dir + + # Is build isolation allowed? + self.build_isolation = build_isolation + + # Should hash-checking be required? + self.require_hashes = require_hashes + + # Should install in user site-packages? + self.use_user_site = use_user_site + + # Should wheels be downloaded lazily? + self.use_lazy_wheel = lazy_wheel + + # Should in-tree builds be used for local paths? + self.in_tree_build = in_tree_build + + # Memoized downloaded files, as mapping of url: (path, mime type) + self._downloaded = {} # type: Dict[str, Tuple[str, str]] + + # Previous "header" printed for a link-based InstallRequirement + self._previous_requirement_header = ("", "") + + def _log_preparing_link(self, req): + # type: (InstallRequirement) -> None + """Provide context for the requirement being prepared.""" + if req.link.is_file and not req.original_link_is_in_wheel_cache: + message = "Processing %s" + information = str(display_path(req.link.file_path)) + else: + message = "Collecting %s" + information = str(req.req or req) + + if (message, information) != self._previous_requirement_header: + self._previous_requirement_header = (message, information) + logger.info(message, information) + + if req.original_link_is_in_wheel_cache: + with indent_log(): + logger.info("Using cached %s", req.link.filename) + + def _ensure_link_req_src_dir(self, req, parallel_builds): + # type: (InstallRequirement, bool) -> None + """Ensure source_dir of a linked InstallRequirement.""" + # Since source_dir is only set for editable requirements. + if req.link.is_wheel: + # We don't need to unpack wheels, so no need for a source + # directory. + return + assert req.source_dir is None + if req.link.is_existing_dir() and self.in_tree_build: + # build local directories in-tree + req.source_dir = req.link.file_path + return + + # We always delete unpacked sdists after pip runs. + req.ensure_has_source_dir( + self.build_dir, + autodelete=True, + parallel_builds=parallel_builds, + ) + + # If a checkout exists, it's unwise to keep going. version + # inconsistencies are logged later, but do not fail the + # installation. + # FIXME: this won't upgrade when there's an existing + # package unpacked in `req.source_dir` + if os.path.exists(os.path.join(req.source_dir, 'setup.py')): + raise PreviousBuildDirError( + "pip can't proceed with requirements '{}' due to a" + "pre-existing build directory ({}). This is likely " + "due to a previous installation that failed . pip is " + "being responsible and not assuming it can delete this. " + "Please delete it and try again.".format(req, req.source_dir) + ) + + def _get_linked_req_hashes(self, req): + # type: (InstallRequirement) -> Hashes + # By the time this is called, the requirement's link should have + # been checked so we can tell what kind of requirements req is + # and raise some more informative errors than otherwise. + # (For example, we can raise VcsHashUnsupported for a VCS URL + # rather than HashMissing.) + if not self.require_hashes: + return req.hashes(trust_internet=True) + + # We could check these first 2 conditions inside unpack_url + # and save repetition of conditions, but then we would + # report less-useful error messages for unhashable + # requirements, complaining that there's no hash provided. + if req.link.is_vcs: + raise VcsHashUnsupported() + if req.link.is_existing_dir(): + raise DirectoryUrlHashUnsupported() + + # Unpinned packages are asking for trouble when a new version + # is uploaded. This isn't a security check, but it saves users + # a surprising hash mismatch in the future. + # file:/// URLs aren't pinnable, so don't complain about them + # not being pinned. + if req.original_link is None and not req.is_pinned: + raise HashUnpinned() + + # If known-good hashes are missing for this requirement, + # shim it with a facade object that will provoke hash + # computation and then raise a HashMissing exception + # showing the user what the hash should be. + return req.hashes(trust_internet=False) or MissingHashes() + + def _fetch_metadata_using_lazy_wheel(self, link): + # type: (Link) -> Optional[Distribution] + """Fetch metadata using lazy wheel, if possible.""" + if not self.use_lazy_wheel: + return None + if self.require_hashes: + logger.debug('Lazy wheel is not used as hash checking is required') + return None + if link.is_file or not link.is_wheel: + logger.debug( + 'Lazy wheel is not used as ' + '%r does not points to a remote wheel', + link, + ) + return None + + wheel = Wheel(link.filename) + name = canonicalize_name(wheel.name) + logger.info( + 'Obtaining dependency information from %s %s', + name, wheel.version, + ) + url = link.url.split('#', 1)[0] + try: + return dist_from_wheel_url(name, url, self._session) + except HTTPRangeRequestUnsupported: + logger.debug('%s does not support range requests', url) + return None + + def _complete_partial_requirements( + self, + partially_downloaded_reqs, # type: Iterable[InstallRequirement] + parallel_builds=False, # type: bool + ): + # type: (...) -> None + """Download any requirements which were only fetched by metadata.""" + # Download to a temporary directory. These will be copied over as + # needed for downstream 'download', 'wheel', and 'install' commands. + temp_dir = TempDirectory(kind="unpack", globally_managed=True).path + + # Map each link to the requirement that owns it. This allows us to set + # `req.local_file_path` on the appropriate requirement after passing + # all the links at once into BatchDownloader. + links_to_fully_download = {} # type: Dict[Link, InstallRequirement] + for req in partially_downloaded_reqs: + assert req.link + links_to_fully_download[req.link] = req + + batch_download = self._batch_download( + links_to_fully_download.keys(), + temp_dir, + ) + for link, (filepath, _) in batch_download: + logger.debug("Downloading link %s to %s", link, filepath) + req = links_to_fully_download[link] + req.local_file_path = filepath + + # This step is necessary to ensure all lazy wheels are processed + # successfully by the 'download', 'wheel', and 'install' commands. + for req in partially_downloaded_reqs: + self._prepare_linked_requirement(req, parallel_builds) + + def prepare_linked_requirement(self, req, parallel_builds=False): + # type: (InstallRequirement, bool) -> Distribution + """Prepare a requirement to be obtained from req.link.""" + assert req.link + link = req.link + self._log_preparing_link(req) + with indent_log(): + # Check if the relevant file is already available + # in the download directory + file_path = None + if self.download_dir is not None and link.is_wheel: + hashes = self._get_linked_req_hashes(req) + file_path = _check_download_dir(req.link, self.download_dir, hashes) + + if file_path is not None: + # The file is already available, so mark it as downloaded + self._downloaded[req.link.url] = file_path, None + else: + # The file is not available, attempt to fetch only metadata + wheel_dist = self._fetch_metadata_using_lazy_wheel(link) + if wheel_dist is not None: + req.needs_more_preparation = True + return wheel_dist + + # None of the optimizations worked, fully prepare the requirement + return self._prepare_linked_requirement(req, parallel_builds) + + def prepare_linked_requirements_more(self, reqs, parallel_builds=False): + # type: (Iterable[InstallRequirement], bool) -> None + """Prepare linked requirements more, if needed.""" + reqs = [req for req in reqs if req.needs_more_preparation] + for req in reqs: + # Determine if any of these requirements were already downloaded. + if self.download_dir is not None and req.link.is_wheel: + hashes = self._get_linked_req_hashes(req) + file_path = _check_download_dir(req.link, self.download_dir, hashes) + if file_path is not None: + self._downloaded[req.link.url] = file_path, None + req.needs_more_preparation = False + + # Prepare requirements we found were already downloaded for some + # reason. The other downloads will be completed separately. + partially_downloaded_reqs = [] # type: List[InstallRequirement] + for req in reqs: + if req.needs_more_preparation: + partially_downloaded_reqs.append(req) + else: + self._prepare_linked_requirement(req, parallel_builds) + + # TODO: separate this part out from RequirementPreparer when the v1 + # resolver can be removed! + self._complete_partial_requirements( + partially_downloaded_reqs, parallel_builds=parallel_builds, + ) + + def _prepare_linked_requirement(self, req, parallel_builds): + # type: (InstallRequirement, bool) -> Distribution + assert req.link + link = req.link + + self._ensure_link_req_src_dir(req, parallel_builds) + hashes = self._get_linked_req_hashes(req) + + if link.is_existing_dir() and self.in_tree_build: + local_file = None + elif link.url not in self._downloaded: + try: + local_file = unpack_url( + link, req.source_dir, self._download, + self.download_dir, hashes + ) + except NetworkConnectionError as exc: + raise InstallationError( + 'Could not install requirement {} because of HTTP ' + 'error {} for URL {}'.format(req, exc, link) + ) + else: + file_path, content_type = self._downloaded[link.url] + if hashes: + hashes.check_against_path(file_path) + local_file = File(file_path, content_type) + + # For use in later processing, + # preserve the file path on the requirement. + if local_file: + req.local_file_path = local_file.path + + dist = _get_prepared_distribution( + req, self.req_tracker, self.finder, self.build_isolation, + ) + return dist + + def save_linked_requirement(self, req): + # type: (InstallRequirement) -> None + assert self.download_dir is not None + assert req.link is not None + link = req.link + if link.is_vcs or (link.is_existing_dir() and req.editable): + # Make a .zip of the source_dir we already created. + req.archive(self.download_dir) + return + + if link.is_existing_dir(): + logger.debug( + 'Not copying link to destination directory ' + 'since it is a directory: %s', link, + ) + return + if req.local_file_path is None: + # No distribution was downloaded for this requirement. + return + + download_location = os.path.join(self.download_dir, link.filename) + if not os.path.exists(download_location): + shutil.copy(req.local_file_path, download_location) + download_path = display_path(download_location) + logger.info('Saved %s', download_path) + + def prepare_editable_requirement( + self, + req, # type: InstallRequirement + ): + # type: (...) -> Distribution + """Prepare an editable requirement + """ + assert req.editable, "cannot prepare a non-editable req as editable" + + logger.info('Obtaining %s', req) + + with indent_log(): + if self.require_hashes: + raise InstallationError( + 'The editable requirement {} cannot be installed when ' + 'requiring hashes, because there is no single file to ' + 'hash.'.format(req) + ) + req.ensure_has_source_dir(self.src_dir) + req.update_editable() + + dist = _get_prepared_distribution( + req, self.req_tracker, self.finder, self.build_isolation, + ) + + req.check_if_exists(self.use_user_site) + + return dist + + def prepare_installed_requirement( + self, + req, # type: InstallRequirement + skip_reason # type: str + ): + # type: (...) -> Distribution + """Prepare an already-installed requirement + """ + assert req.satisfied_by, "req should have been satisfied but isn't" + assert skip_reason is not None, ( + "did not get skip reason skipped but req.satisfied_by " + "is set to {}".format(req.satisfied_by) + ) + logger.info( + 'Requirement %s: %s (%s)', + skip_reason, req, req.satisfied_by.version + ) + with indent_log(): + if self.require_hashes: + logger.debug( + 'Since it is already installed, we are trusting this ' + 'package without checking its hash. To ensure a ' + 'completely repeatable environment, install into an ' + 'empty virtualenv.' + ) + return InstalledDistribution(req).get_pkg_resources_distribution() diff --git a/env/lib/python2.7/site-packages/pip/_internal/pyproject.py b/venv/Lib/site-packages/pip/_internal/pyproject.py similarity index 80% rename from env/lib/python2.7/site-packages/pip/_internal/pyproject.py rename to venv/Lib/site-packages/pip/_internal/pyproject.py index 43efbed4..9016d355 100644 --- a/env/lib/python2.7/site-packages/pip/_internal/pyproject.py +++ b/venv/Lib/site-packages/pip/_internal/pyproject.py @@ -1,35 +1,29 @@ -from __future__ import absolute_import - -import io import os -import sys +from collections import namedtuple +from typing import Any, List, Optional -from pip._vendor import pytoml, six +from pip._vendor import toml +from pip._vendor.packaging.requirements import InvalidRequirement, Requirement from pip._internal.exceptions import InstallationError -from pip._internal.utils.typing import MYPY_CHECK_RUNNING - -if MYPY_CHECK_RUNNING: - from typing import Any, Tuple, Optional, List def _is_list_of_str(obj): # type: (Any) -> bool return ( isinstance(obj, list) and - all(isinstance(item, six.string_types) for item in obj) + all(isinstance(item, str) for item in obj) ) -def make_pyproject_path(setup_py_dir): +def make_pyproject_path(unpacked_source_directory): # type: (str) -> str - path = os.path.join(setup_py_dir, 'pyproject.toml') + return os.path.join(unpacked_source_directory, 'pyproject.toml') - # Python2 __file__ should not be unicode - if six.PY2 and isinstance(path, six.text_type): - path = path.encode(sys.getfilesystemencoding()) - return path +BuildSystemDetails = namedtuple('BuildSystemDetails', [ + 'requires', 'backend', 'check', 'backend_path' +]) def load_pyproject_toml( @@ -38,7 +32,7 @@ def load_pyproject_toml( setup_py, # type: str req_name # type: str ): - # type: (...) -> Optional[Tuple[List[str], str, List[str]]] + # type: (...) -> Optional[BuildSystemDetails] """Load the pyproject.toml file. Parameters: @@ -56,14 +50,16 @@ def load_pyproject_toml( name of PEP 517 backend, requirements we should check are installed after setting up the build environment + directory paths to import the backend from (backend-path), + relative to the project root. ) """ has_pyproject = os.path.isfile(pyproject_toml) has_setup = os.path.isfile(setup_py) if has_pyproject: - with io.open(pyproject_toml, encoding="utf-8") as f: - pp_toml = pytoml.load(f) + with open(pyproject_toml, encoding="utf-8") as f: + pp_toml = toml.load(f) build_system = pp_toml.get("build-system") else: build_system = None @@ -150,7 +146,23 @@ def load_pyproject_toml( reason="'build-system.requires' is not a list of strings.", )) + # Each requirement must be valid as per PEP 508 + for requirement in requires: + try: + Requirement(requirement) + except InvalidRequirement: + raise InstallationError( + error_template.format( + package=req_name, + reason=( + "'build-system.requires' contains an invalid " + "requirement: {!r}".format(requirement) + ), + ) + ) + backend = build_system.get("build-backend") + backend_path = build_system.get("backend-path", []) check = [] # type: List[str] if backend is None: # If the user didn't specify a backend, we assume they want to use @@ -168,4 +180,4 @@ def load_pyproject_toml( backend = "setuptools.build_meta:__legacy__" check = ["setuptools>=40.8.0", "wheel"] - return (requires, backend, check) + return BuildSystemDetails(requires, backend, check, backend_path) diff --git a/venv/Lib/site-packages/pip/_internal/req/__init__.py b/venv/Lib/site-packages/pip/_internal/req/__init__.py new file mode 100644 index 00000000..06f0a082 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/req/__init__.py @@ -0,0 +1,98 @@ +import collections +import logging +from typing import Iterator, List, Optional, Sequence, Tuple + +from pip._internal.utils.logging import indent_log + +from .req_file import parse_requirements +from .req_install import InstallRequirement +from .req_set import RequirementSet + +__all__ = [ + "RequirementSet", "InstallRequirement", + "parse_requirements", "install_given_reqs", +] + +logger = logging.getLogger(__name__) + + +class InstallationResult: + def __init__(self, name): + # type: (str) -> None + self.name = name + + def __repr__(self): + # type: () -> str + return f"InstallationResult(name={self.name!r})" + + +def _validate_requirements( + requirements, # type: List[InstallRequirement] +): + # type: (...) -> Iterator[Tuple[str, InstallRequirement]] + for req in requirements: + assert req.name, f"invalid to-be-installed requirement: {req}" + yield req.name, req + + +def install_given_reqs( + requirements, # type: List[InstallRequirement] + install_options, # type: List[str] + global_options, # type: Sequence[str] + root, # type: Optional[str] + home, # type: Optional[str] + prefix, # type: Optional[str] + warn_script_location, # type: bool + use_user_site, # type: bool + pycompile, # type: bool +): + # type: (...) -> List[InstallationResult] + """ + Install everything in the given list. + + (to be called after having downloaded and unpacked the packages) + """ + to_install = collections.OrderedDict(_validate_requirements(requirements)) + + if to_install: + logger.info( + 'Installing collected packages: %s', + ', '.join(to_install.keys()), + ) + + installed = [] + + with indent_log(): + for req_name, requirement in to_install.items(): + if requirement.should_reinstall: + logger.info('Attempting uninstall: %s', req_name) + with indent_log(): + uninstalled_pathset = requirement.uninstall( + auto_confirm=True + ) + else: + uninstalled_pathset = None + + try: + requirement.install( + install_options, + global_options, + root=root, + home=home, + prefix=prefix, + warn_script_location=warn_script_location, + use_user_site=use_user_site, + pycompile=pycompile, + ) + except Exception: + # if install did not succeed, rollback previous uninstall + if uninstalled_pathset and not requirement.install_succeeded: + uninstalled_pathset.rollback() + raise + else: + if uninstalled_pathset and requirement.install_succeeded: + uninstalled_pathset.commit() + + installed.append(InstallationResult(req_name)) + + return installed diff --git a/venv/Lib/site-packages/pip/_internal/req/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/req/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..4d6dc613 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/req/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/req/__pycache__/constructors.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/req/__pycache__/constructors.cpython-36.pyc new file mode 100644 index 00000000..c0a274e6 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/req/__pycache__/constructors.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/req/__pycache__/req_file.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/req/__pycache__/req_file.cpython-36.pyc new file mode 100644 index 00000000..9761feaa Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/req/__pycache__/req_file.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/req/__pycache__/req_install.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/req/__pycache__/req_install.cpython-36.pyc new file mode 100644 index 00000000..823de3fc Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/req/__pycache__/req_install.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/req/__pycache__/req_set.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/req/__pycache__/req_set.cpython-36.pyc new file mode 100644 index 00000000..1b8af9d1 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/req/__pycache__/req_set.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/req/__pycache__/req_tracker.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/req/__pycache__/req_tracker.cpython-36.pyc new file mode 100644 index 00000000..ce5a9e40 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/req/__pycache__/req_tracker.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-36.pyc new file mode 100644 index 00000000..fafdf989 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/req/constructors.py b/venv/Lib/site-packages/pip/_internal/req/constructors.py new file mode 100644 index 00000000..3f9e7dd7 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/req/constructors.py @@ -0,0 +1,486 @@ +"""Backing implementation for InstallRequirement's various constructors + +The idea here is that these formed a major chunk of InstallRequirement's size +so, moving them and support code dedicated to them outside of that class +helps creates for better understandability for the rest of the code. + +These are meant to be used elsewhere within pip to create instances of +InstallRequirement. +""" + +import logging +import os +import re +from typing import Any, Dict, Optional, Set, Tuple, Union + +from pip._vendor.packaging.markers import Marker +from pip._vendor.packaging.requirements import InvalidRequirement, Requirement +from pip._vendor.packaging.specifiers import Specifier +from pip._vendor.pkg_resources import RequirementParseError, parse_requirements + +from pip._internal.exceptions import InstallationError +from pip._internal.models.index import PyPI, TestPyPI +from pip._internal.models.link import Link +from pip._internal.models.wheel import Wheel +from pip._internal.pyproject import make_pyproject_path +from pip._internal.req.req_file import ParsedRequirement +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.filetypes import is_archive_file +from pip._internal.utils.misc import is_installable_dir +from pip._internal.utils.urls import path_to_url +from pip._internal.vcs import is_url, vcs + +__all__ = [ + "install_req_from_editable", "install_req_from_line", + "parse_editable" +] + +logger = logging.getLogger(__name__) +operators = Specifier._operators.keys() + + +def _strip_extras(path): + # type: (str) -> Tuple[str, Optional[str]] + m = re.match(r'^(.+)(\[[^\]]+\])$', path) + extras = None + if m: + path_no_extras = m.group(1) + extras = m.group(2) + else: + path_no_extras = path + + return path_no_extras, extras + + +def convert_extras(extras): + # type: (Optional[str]) -> Set[str] + if not extras: + return set() + return Requirement("placeholder" + extras.lower()).extras + + +def parse_editable(editable_req): + # type: (str) -> Tuple[Optional[str], str, Set[str]] + """Parses an editable requirement into: + - a requirement name + - an URL + - extras + - editable options + Accepted requirements: + svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir + .[some_extra] + """ + + url = editable_req + + # If a file path is specified with extras, strip off the extras. + url_no_extras, extras = _strip_extras(url) + + if os.path.isdir(url_no_extras): + setup_py = os.path.join(url_no_extras, 'setup.py') + setup_cfg = os.path.join(url_no_extras, 'setup.cfg') + if not os.path.exists(setup_py) and not os.path.exists(setup_cfg): + msg = ( + 'File "setup.py" or "setup.cfg" not found. Directory cannot be ' + 'installed in editable mode: {}' + .format(os.path.abspath(url_no_extras)) + ) + pyproject_path = make_pyproject_path(url_no_extras) + if os.path.isfile(pyproject_path): + msg += ( + '\n(A "pyproject.toml" file was found, but editable ' + 'mode currently requires a setuptools-based build.)' + ) + raise InstallationError(msg) + + # Treating it as code that has already been checked out + url_no_extras = path_to_url(url_no_extras) + + if url_no_extras.lower().startswith('file:'): + package_name = Link(url_no_extras).egg_fragment + if extras: + return ( + package_name, + url_no_extras, + Requirement("placeholder" + extras.lower()).extras, + ) + else: + return package_name, url_no_extras, set() + + for version_control in vcs: + if url.lower().startswith(f'{version_control}:'): + url = f'{version_control}+{url}' + break + + link = Link(url) + + if not link.is_vcs: + backends = ", ".join(vcs.all_schemes) + raise InstallationError( + f'{editable_req} is not a valid editable requirement. ' + f'It should either be a path to a local project or a VCS URL ' + f'(beginning with {backends}).' + ) + + package_name = link.egg_fragment + if not package_name: + raise InstallationError( + "Could not detect requirement name for '{}', please specify one " + "with #egg=your_package_name".format(editable_req) + ) + return package_name, url, set() + + +def deduce_helpful_msg(req): + # type: (str) -> str + """Returns helpful msg in case requirements file does not exist, + or cannot be parsed. + + :params req: Requirements file path + """ + msg = "" + if os.path.exists(req): + msg = " The path does exist. " + # Try to parse and check if it is a requirements file. + try: + with open(req) as fp: + # parse first line only + next(parse_requirements(fp.read())) + msg += ( + "The argument you provided " + "({}) appears to be a" + " requirements file. If that is the" + " case, use the '-r' flag to install" + " the packages specified within it." + ).format(req) + except RequirementParseError: + logger.debug( + "Cannot parse '%s' as requirements file", req, exc_info=True + ) + else: + msg += f" File '{req}' does not exist." + return msg + + +class RequirementParts: + def __init__( + self, + requirement, # type: Optional[Requirement] + link, # type: Optional[Link] + markers, # type: Optional[Marker] + extras, # type: Set[str] + ): + self.requirement = requirement + self.link = link + self.markers = markers + self.extras = extras + + +def parse_req_from_editable(editable_req): + # type: (str) -> RequirementParts + name, url, extras_override = parse_editable(editable_req) + + if name is not None: + try: + req = Requirement(name) # type: Optional[Requirement] + except InvalidRequirement: + raise InstallationError(f"Invalid requirement: '{name}'") + else: + req = None + + link = Link(url) + + return RequirementParts(req, link, None, extras_override) + + +# ---- The actual constructors follow ---- + + +def install_req_from_editable( + editable_req, # type: str + comes_from=None, # type: Optional[Union[InstallRequirement, str]] + use_pep517=None, # type: Optional[bool] + isolated=False, # type: bool + options=None, # type: Optional[Dict[str, Any]] + constraint=False, # type: bool + user_supplied=False, # type: bool +): + # type: (...) -> InstallRequirement + + parts = parse_req_from_editable(editable_req) + + return InstallRequirement( + parts.requirement, + comes_from=comes_from, + user_supplied=user_supplied, + editable=True, + link=parts.link, + constraint=constraint, + use_pep517=use_pep517, + isolated=isolated, + install_options=options.get("install_options", []) if options else [], + global_options=options.get("global_options", []) if options else [], + hash_options=options.get("hashes", {}) if options else {}, + extras=parts.extras, + ) + + +def _looks_like_path(name): + # type: (str) -> bool + """Checks whether the string "looks like" a path on the filesystem. + + This does not check whether the target actually exists, only judge from the + appearance. + + Returns true if any of the following conditions is true: + * a path separator is found (either os.path.sep or os.path.altsep); + * a dot is found (which represents the current directory). + """ + if os.path.sep in name: + return True + if os.path.altsep is not None and os.path.altsep in name: + return True + if name.startswith("."): + return True + return False + + +def _get_url_from_path(path, name): + # type: (str, str) -> Optional[str] + """ + First, it checks whether a provided path is an installable directory + (e.g. it has a setup.py). If it is, returns the path. + + If false, check if the path is an archive file (such as a .whl). + The function checks if the path is a file. If false, if the path has + an @, it will treat it as a PEP 440 URL requirement and return the path. + """ + if _looks_like_path(name) and os.path.isdir(path): + if is_installable_dir(path): + return path_to_url(path) + raise InstallationError( + f"Directory {name!r} is not installable. Neither 'setup.py' " + "nor 'pyproject.toml' found." + ) + if not is_archive_file(path): + return None + if os.path.isfile(path): + return path_to_url(path) + urlreq_parts = name.split('@', 1) + if len(urlreq_parts) >= 2 and not _looks_like_path(urlreq_parts[0]): + # If the path contains '@' and the part before it does not look + # like a path, try to treat it as a PEP 440 URL req instead. + return None + logger.warning( + 'Requirement %r looks like a filename, but the ' + 'file does not exist', + name + ) + return path_to_url(path) + + +def parse_req_from_line(name, line_source): + # type: (str, Optional[str]) -> RequirementParts + if is_url(name): + marker_sep = '; ' + else: + marker_sep = ';' + if marker_sep in name: + name, markers_as_string = name.split(marker_sep, 1) + markers_as_string = markers_as_string.strip() + if not markers_as_string: + markers = None + else: + markers = Marker(markers_as_string) + else: + markers = None + name = name.strip() + req_as_string = None + path = os.path.normpath(os.path.abspath(name)) + link = None + extras_as_string = None + + if is_url(name): + link = Link(name) + else: + p, extras_as_string = _strip_extras(path) + url = _get_url_from_path(p, name) + if url is not None: + link = Link(url) + + # it's a local file, dir, or url + if link: + # Handle relative file URLs + if link.scheme == 'file' and re.search(r'\.\./', link.url): + link = Link( + path_to_url(os.path.normpath(os.path.abspath(link.path)))) + # wheel file + if link.is_wheel: + wheel = Wheel(link.filename) # can raise InvalidWheelFilename + req_as_string = f"{wheel.name}=={wheel.version}" + else: + # set the req to the egg fragment. when it's not there, this + # will become an 'unnamed' requirement + req_as_string = link.egg_fragment + + # a requirement specifier + else: + req_as_string = name + + extras = convert_extras(extras_as_string) + + def with_source(text): + # type: (str) -> str + if not line_source: + return text + return f'{text} (from {line_source})' + + def _parse_req_string(req_as_string: str) -> Requirement: + try: + req = Requirement(req_as_string) + except InvalidRequirement: + if os.path.sep in req_as_string: + add_msg = "It looks like a path." + add_msg += deduce_helpful_msg(req_as_string) + elif ('=' in req_as_string and + not any(op in req_as_string for op in operators)): + add_msg = "= is not a valid operator. Did you mean == ?" + else: + add_msg = '' + msg = with_source( + f'Invalid requirement: {req_as_string!r}' + ) + if add_msg: + msg += f'\nHint: {add_msg}' + raise InstallationError(msg) + else: + # Deprecate extras after specifiers: "name>=1.0[extras]" + # This currently works by accident because _strip_extras() parses + # any extras in the end of the string and those are saved in + # RequirementParts + for spec in req.specifier: + spec_str = str(spec) + if spec_str.endswith(']'): + msg = f"Extras after version '{spec_str}'." + raise InstallationError(msg) + return req + + if req_as_string is not None: + req = _parse_req_string(req_as_string) # type: Optional[Requirement] + else: + req = None + + return RequirementParts(req, link, markers, extras) + + +def install_req_from_line( + name, # type: str + comes_from=None, # type: Optional[Union[str, InstallRequirement]] + use_pep517=None, # type: Optional[bool] + isolated=False, # type: bool + options=None, # type: Optional[Dict[str, Any]] + constraint=False, # type: bool + line_source=None, # type: Optional[str] + user_supplied=False, # type: bool +): + # type: (...) -> InstallRequirement + """Creates an InstallRequirement from a name, which might be a + requirement, directory containing 'setup.py', filename, or URL. + + :param line_source: An optional string describing where the line is from, + for logging purposes in case of an error. + """ + parts = parse_req_from_line(name, line_source) + + return InstallRequirement( + parts.requirement, comes_from, link=parts.link, markers=parts.markers, + use_pep517=use_pep517, isolated=isolated, + install_options=options.get("install_options", []) if options else [], + global_options=options.get("global_options", []) if options else [], + hash_options=options.get("hashes", {}) if options else {}, + constraint=constraint, + extras=parts.extras, + user_supplied=user_supplied, + ) + + +def install_req_from_req_string( + req_string, # type: str + comes_from=None, # type: Optional[InstallRequirement] + isolated=False, # type: bool + use_pep517=None, # type: Optional[bool] + user_supplied=False, # type: bool +): + # type: (...) -> InstallRequirement + try: + req = Requirement(req_string) + except InvalidRequirement: + raise InstallationError(f"Invalid requirement: '{req_string}'") + + domains_not_allowed = [ + PyPI.file_storage_domain, + TestPyPI.file_storage_domain, + ] + if (req.url and comes_from and comes_from.link and + comes_from.link.netloc in domains_not_allowed): + # Explicitly disallow pypi packages that depend on external urls + raise InstallationError( + "Packages installed from PyPI cannot depend on packages " + "which are not also hosted on PyPI.\n" + "{} depends on {} ".format(comes_from.name, req) + ) + + return InstallRequirement( + req, + comes_from, + isolated=isolated, + use_pep517=use_pep517, + user_supplied=user_supplied, + ) + + +def install_req_from_parsed_requirement( + parsed_req, # type: ParsedRequirement + isolated=False, # type: bool + use_pep517=None, # type: Optional[bool] + user_supplied=False, # type: bool +): + # type: (...) -> InstallRequirement + if parsed_req.is_editable: + req = install_req_from_editable( + parsed_req.requirement, + comes_from=parsed_req.comes_from, + use_pep517=use_pep517, + constraint=parsed_req.constraint, + isolated=isolated, + user_supplied=user_supplied, + ) + + else: + req = install_req_from_line( + parsed_req.requirement, + comes_from=parsed_req.comes_from, + use_pep517=use_pep517, + isolated=isolated, + options=parsed_req.options, + constraint=parsed_req.constraint, + line_source=parsed_req.line_source, + user_supplied=user_supplied, + ) + return req + + +def install_req_from_link_and_ireq(link, ireq): + # type: (Link, InstallRequirement) -> InstallRequirement + return InstallRequirement( + req=ireq.req, + comes_from=ireq.comes_from, + editable=ireq.editable, + link=link, + markers=ireq.markers, + use_pep517=ireq.use_pep517, + isolated=ireq.isolated, + install_options=ireq.install_options, + global_options=ireq.global_options, + hash_options=ireq.hash_options, + ) diff --git a/venv/Lib/site-packages/pip/_internal/req/req_file.py b/venv/Lib/site-packages/pip/_internal/req/req_file.py new file mode 100644 index 00000000..080c1281 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/req/req_file.py @@ -0,0 +1,551 @@ +""" +Requirements file parsing +""" + +import optparse +import os +import re +import shlex +import urllib.parse +from optparse import Values +from typing import TYPE_CHECKING, Any, Callable, Dict, Iterator, List, Optional, Tuple + +from pip._internal.cli import cmdoptions +from pip._internal.exceptions import InstallationError, RequirementsFileParseError +from pip._internal.models.search_scope import SearchScope +from pip._internal.network.session import PipSession +from pip._internal.network.utils import raise_for_status +from pip._internal.utils.encoding import auto_decode +from pip._internal.utils.urls import get_url_scheme, url_to_path + +if TYPE_CHECKING: + # NoReturn introduced in 3.6.2; imported only for type checking to maintain + # pip compatibility with older patch versions of Python 3.6 + from typing import NoReturn + + from pip._internal.index.package_finder import PackageFinder + +__all__ = ['parse_requirements'] + +ReqFileLines = Iterator[Tuple[int, str]] + +LineParser = Callable[[str], Tuple[str, Values]] + +SCHEME_RE = re.compile(r'^(http|https|file):', re.I) +COMMENT_RE = re.compile(r'(^|\s+)#.*$') + +# Matches environment variable-style values in '${MY_VARIABLE_1}' with the +# variable name consisting of only uppercase letters, digits or the '_' +# (underscore). This follows the POSIX standard defined in IEEE Std 1003.1, +# 2013 Edition. +ENV_VAR_RE = re.compile(r'(?P\$\{(?P[A-Z0-9_]+)\})') + +SUPPORTED_OPTIONS = [ + cmdoptions.index_url, + cmdoptions.extra_index_url, + cmdoptions.no_index, + cmdoptions.constraints, + cmdoptions.requirements, + cmdoptions.editable, + cmdoptions.find_links, + cmdoptions.no_binary, + cmdoptions.only_binary, + cmdoptions.prefer_binary, + cmdoptions.require_hashes, + cmdoptions.pre, + cmdoptions.trusted_host, + cmdoptions.use_new_feature, +] # type: List[Callable[..., optparse.Option]] + +# options to be passed to requirements +SUPPORTED_OPTIONS_REQ = [ + cmdoptions.install_options, + cmdoptions.global_options, + cmdoptions.hash, +] # type: List[Callable[..., optparse.Option]] + +# the 'dest' string values +SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ] + + +class ParsedRequirement: + def __init__( + self, + requirement, # type:str + is_editable, # type: bool + comes_from, # type: str + constraint, # type: bool + options=None, # type: Optional[Dict[str, Any]] + line_source=None, # type: Optional[str] + ): + # type: (...) -> None + self.requirement = requirement + self.is_editable = is_editable + self.comes_from = comes_from + self.options = options + self.constraint = constraint + self.line_source = line_source + + +class ParsedLine: + def __init__( + self, + filename, # type: str + lineno, # type: int + args, # type: str + opts, # type: Values + constraint, # type: bool + ): + # type: (...) -> None + self.filename = filename + self.lineno = lineno + self.opts = opts + self.constraint = constraint + + if args: + self.is_requirement = True + self.is_editable = False + self.requirement = args + elif opts.editables: + self.is_requirement = True + self.is_editable = True + # We don't support multiple -e on one line + self.requirement = opts.editables[0] + else: + self.is_requirement = False + + +def parse_requirements( + filename, # type: str + session, # type: PipSession + finder=None, # type: Optional[PackageFinder] + options=None, # type: Optional[optparse.Values] + constraint=False, # type: bool +): + # type: (...) -> Iterator[ParsedRequirement] + """Parse a requirements file and yield ParsedRequirement instances. + + :param filename: Path or url of requirements file. + :param session: PipSession instance. + :param finder: Instance of pip.index.PackageFinder. + :param options: cli options. + :param constraint: If true, parsing a constraint file rather than + requirements file. + """ + line_parser = get_line_parser(finder) + parser = RequirementsFileParser(session, line_parser) + + for parsed_line in parser.parse(filename, constraint): + parsed_req = handle_line( + parsed_line, + options=options, + finder=finder, + session=session + ) + if parsed_req is not None: + yield parsed_req + + +def preprocess(content): + # type: (str) -> ReqFileLines + """Split, filter, and join lines, and return a line iterator + + :param content: the content of the requirements file + """ + lines_enum = enumerate(content.splitlines(), start=1) # type: ReqFileLines + lines_enum = join_lines(lines_enum) + lines_enum = ignore_comments(lines_enum) + lines_enum = expand_env_variables(lines_enum) + return lines_enum + + +def handle_requirement_line( + line, # type: ParsedLine + options=None, # type: Optional[optparse.Values] +): + # type: (...) -> ParsedRequirement + + # preserve for the nested code path + line_comes_from = '{} {} (line {})'.format( + '-c' if line.constraint else '-r', line.filename, line.lineno, + ) + + assert line.is_requirement + + if line.is_editable: + # For editable requirements, we don't support per-requirement + # options, so just return the parsed requirement. + return ParsedRequirement( + requirement=line.requirement, + is_editable=line.is_editable, + comes_from=line_comes_from, + constraint=line.constraint, + ) + else: + if options: + # Disable wheels if the user has specified build options + cmdoptions.check_install_build_global(options, line.opts) + + # get the options that apply to requirements + req_options = {} + for dest in SUPPORTED_OPTIONS_REQ_DEST: + if dest in line.opts.__dict__ and line.opts.__dict__[dest]: + req_options[dest] = line.opts.__dict__[dest] + + line_source = f'line {line.lineno} of {line.filename}' + return ParsedRequirement( + requirement=line.requirement, + is_editable=line.is_editable, + comes_from=line_comes_from, + constraint=line.constraint, + options=req_options, + line_source=line_source, + ) + + +def handle_option_line( + opts, # type: Values + filename, # type: str + lineno, # type: int + finder=None, # type: Optional[PackageFinder] + options=None, # type: Optional[optparse.Values] + session=None, # type: Optional[PipSession] +): + # type: (...) -> None + + if options: + # percolate options upward + if opts.require_hashes: + options.require_hashes = opts.require_hashes + if opts.features_enabled: + options.features_enabled.extend( + f for f in opts.features_enabled + if f not in options.features_enabled + ) + + # set finder options + if finder: + find_links = finder.find_links + index_urls = finder.index_urls + if opts.index_url: + index_urls = [opts.index_url] + if opts.no_index is True: + index_urls = [] + if opts.extra_index_urls: + index_urls.extend(opts.extra_index_urls) + if opts.find_links: + # FIXME: it would be nice to keep track of the source + # of the find_links: support a find-links local path + # relative to a requirements file. + value = opts.find_links[0] + req_dir = os.path.dirname(os.path.abspath(filename)) + relative_to_reqs_file = os.path.join(req_dir, value) + if os.path.exists(relative_to_reqs_file): + value = relative_to_reqs_file + find_links.append(value) + + if session: + # We need to update the auth urls in session + session.update_index_urls(index_urls) + + search_scope = SearchScope( + find_links=find_links, + index_urls=index_urls, + ) + finder.search_scope = search_scope + + if opts.pre: + finder.set_allow_all_prereleases() + + if opts.prefer_binary: + finder.set_prefer_binary() + + if session: + for host in opts.trusted_hosts or []: + source = f'line {lineno} of {filename}' + session.add_trusted_host(host, source=source) + + +def handle_line( + line, # type: ParsedLine + options=None, # type: Optional[optparse.Values] + finder=None, # type: Optional[PackageFinder] + session=None, # type: Optional[PipSession] +): + # type: (...) -> Optional[ParsedRequirement] + """Handle a single parsed requirements line; This can result in + creating/yielding requirements, or updating the finder. + + :param line: The parsed line to be processed. + :param options: CLI options. + :param finder: The finder - updated by non-requirement lines. + :param session: The session - updated by non-requirement lines. + + Returns a ParsedRequirement object if the line is a requirement line, + otherwise returns None. + + For lines that contain requirements, the only options that have an effect + are from SUPPORTED_OPTIONS_REQ, and they are scoped to the + requirement. Other options from SUPPORTED_OPTIONS may be present, but are + ignored. + + For lines that do not contain requirements, the only options that have an + effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may + be present, but are ignored. These lines may contain multiple options + (although our docs imply only one is supported), and all our parsed and + affect the finder. + """ + + if line.is_requirement: + parsed_req = handle_requirement_line(line, options) + return parsed_req + else: + handle_option_line( + line.opts, + line.filename, + line.lineno, + finder, + options, + session, + ) + return None + + +class RequirementsFileParser: + def __init__( + self, + session, # type: PipSession + line_parser, # type: LineParser + ): + # type: (...) -> None + self._session = session + self._line_parser = line_parser + + def parse(self, filename, constraint): + # type: (str, bool) -> Iterator[ParsedLine] + """Parse a given file, yielding parsed lines. + """ + yield from self._parse_and_recurse(filename, constraint) + + def _parse_and_recurse(self, filename, constraint): + # type: (str, bool) -> Iterator[ParsedLine] + for line in self._parse_file(filename, constraint): + if ( + not line.is_requirement and + (line.opts.requirements or line.opts.constraints) + ): + # parse a nested requirements file + if line.opts.requirements: + req_path = line.opts.requirements[0] + nested_constraint = False + else: + req_path = line.opts.constraints[0] + nested_constraint = True + + # original file is over http + if SCHEME_RE.search(filename): + # do a url join so relative paths work + req_path = urllib.parse.urljoin(filename, req_path) + # original file and nested file are paths + elif not SCHEME_RE.search(req_path): + # do a join so relative paths work + req_path = os.path.join( + os.path.dirname(filename), req_path, + ) + + yield from self._parse_and_recurse(req_path, nested_constraint) + else: + yield line + + def _parse_file(self, filename, constraint): + # type: (str, bool) -> Iterator[ParsedLine] + _, content = get_file_content(filename, self._session) + + lines_enum = preprocess(content) + + for line_number, line in lines_enum: + try: + args_str, opts = self._line_parser(line) + except OptionParsingError as e: + # add offending line + msg = f'Invalid requirement: {line}\n{e.msg}' + raise RequirementsFileParseError(msg) + + yield ParsedLine( + filename, + line_number, + args_str, + opts, + constraint, + ) + + +def get_line_parser(finder): + # type: (Optional[PackageFinder]) -> LineParser + def parse_line(line): + # type: (str) -> Tuple[str, Values] + # Build new parser for each line since it accumulates appendable + # options. + parser = build_parser() + defaults = parser.get_default_values() + defaults.index_url = None + if finder: + defaults.format_control = finder.format_control + + args_str, options_str = break_args_options(line) + + opts, _ = parser.parse_args(shlex.split(options_str), defaults) + + return args_str, opts + + return parse_line + + +def break_args_options(line): + # type: (str) -> Tuple[str, str] + """Break up the line into an args and options string. We only want to shlex + (and then optparse) the options, not the args. args can contain markers + which are corrupted by shlex. + """ + tokens = line.split(' ') + args = [] + options = tokens[:] + for token in tokens: + if token.startswith('-') or token.startswith('--'): + break + else: + args.append(token) + options.pop(0) + return ' '.join(args), ' '.join(options) + + +class OptionParsingError(Exception): + def __init__(self, msg): + # type: (str) -> None + self.msg = msg + + +def build_parser(): + # type: () -> optparse.OptionParser + """ + Return a parser for parsing requirement lines + """ + parser = optparse.OptionParser(add_help_option=False) + + option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ + for option_factory in option_factories: + option = option_factory() + parser.add_option(option) + + # By default optparse sys.exits on parsing errors. We want to wrap + # that in our own exception. + def parser_exit(self, msg): + # type: (Any, str) -> NoReturn + raise OptionParsingError(msg) + # NOTE: mypy disallows assigning to a method + # https://github.com/python/mypy/issues/2427 + parser.exit = parser_exit # type: ignore + + return parser + + +def join_lines(lines_enum): + # type: (ReqFileLines) -> ReqFileLines + """Joins a line ending in '\' with the previous line (except when following + comments). The joined line takes on the index of the first line. + """ + primary_line_number = None + new_line = [] # type: List[str] + for line_number, line in lines_enum: + if not line.endswith('\\') or COMMENT_RE.match(line): + if COMMENT_RE.match(line): + # this ensures comments are always matched later + line = ' ' + line + if new_line: + new_line.append(line) + assert primary_line_number is not None + yield primary_line_number, ''.join(new_line) + new_line = [] + else: + yield line_number, line + else: + if not new_line: + primary_line_number = line_number + new_line.append(line.strip('\\')) + + # last line contains \ + if new_line: + assert primary_line_number is not None + yield primary_line_number, ''.join(new_line) + + # TODO: handle space after '\'. + + +def ignore_comments(lines_enum): + # type: (ReqFileLines) -> ReqFileLines + """ + Strips comments and filter empty lines. + """ + for line_number, line in lines_enum: + line = COMMENT_RE.sub('', line) + line = line.strip() + if line: + yield line_number, line + + +def expand_env_variables(lines_enum): + # type: (ReqFileLines) -> ReqFileLines + """Replace all environment variables that can be retrieved via `os.getenv`. + + The only allowed format for environment variables defined in the + requirement file is `${MY_VARIABLE_1}` to ensure two things: + + 1. Strings that contain a `$` aren't accidentally (partially) expanded. + 2. Ensure consistency across platforms for requirement files. + + These points are the result of a discussion on the `github pull + request #3514 `_. + + Valid characters in variable names follow the `POSIX standard + `_ and are limited + to uppercase letter, digits and the `_` (underscore). + """ + for line_number, line in lines_enum: + for env_var, var_name in ENV_VAR_RE.findall(line): + value = os.getenv(var_name) + if not value: + continue + + line = line.replace(env_var, value) + + yield line_number, line + + +def get_file_content(url, session): + # type: (str, PipSession) -> Tuple[str, str] + """Gets the content of a file; it may be a filename, file: URL, or + http: URL. Returns (location, content). Content is unicode. + Respects # -*- coding: declarations on the retrieved files. + + :param url: File path or url. + :param session: PipSession instance. + """ + scheme = get_url_scheme(url) + + if scheme in ['http', 'https']: + # FIXME: catch some errors + resp = session.get(url) + raise_for_status(resp) + return resp.url, resp.text + + elif scheme == 'file': + url = url_to_path(url) + + try: + with open(url, 'rb') as f: + content = auto_decode(f.read()) + except OSError as exc: + raise InstallationError( + f'Could not open requirements file: {exc}' + ) + return url, content diff --git a/venv/Lib/site-packages/pip/_internal/req/req_install.py b/venv/Lib/site-packages/pip/_internal/req/req_install.py new file mode 100644 index 00000000..c2eea371 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/req/req_install.py @@ -0,0 +1,892 @@ +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +import logging +import os +import shutil +import sys +import uuid +import zipfile +from typing import Any, Dict, Iterable, List, Optional, Sequence, Union + +from pip._vendor import pkg_resources, six +from pip._vendor.packaging.markers import Marker +from pip._vendor.packaging.requirements import Requirement +from pip._vendor.packaging.specifiers import SpecifierSet +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.packaging.version import Version +from pip._vendor.packaging.version import parse as parse_version +from pip._vendor.pep517.wrappers import Pep517HookCaller +from pip._vendor.pkg_resources import Distribution + +from pip._internal.build_env import BuildEnvironment, NoOpBuildEnvironment +from pip._internal.exceptions import InstallationError +from pip._internal.locations import get_scheme +from pip._internal.models.link import Link +from pip._internal.operations.build.metadata import generate_metadata +from pip._internal.operations.build.metadata_legacy import ( + generate_metadata as generate_metadata_legacy, +) +from pip._internal.operations.install.editable_legacy import ( + install_editable as install_editable_legacy, +) +from pip._internal.operations.install.legacy import LegacyInstallFailure +from pip._internal.operations.install.legacy import install as install_legacy +from pip._internal.operations.install.wheel import install_wheel +from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path +from pip._internal.req.req_uninstall import UninstallPathSet +from pip._internal.utils.deprecation import deprecated +from pip._internal.utils.direct_url_helpers import direct_url_from_link +from pip._internal.utils.hashes import Hashes +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import ( + ask_path_exists, + backup_dir, + display_path, + dist_in_site_packages, + dist_in_usersite, + get_distribution, + hide_url, + redact_auth_from_url, +) +from pip._internal.utils.packaging import get_metadata +from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds +from pip._internal.utils.virtualenv import running_under_virtualenv +from pip._internal.vcs import vcs + +logger = logging.getLogger(__name__) + + +def _get_dist(metadata_directory): + # type: (str) -> Distribution + """Return a pkg_resources.Distribution for the provided + metadata directory. + """ + dist_dir = metadata_directory.rstrip(os.sep) + + # Build a PathMetadata object, from path to metadata. :wink: + base_dir, dist_dir_name = os.path.split(dist_dir) + metadata = pkg_resources.PathMetadata(base_dir, dist_dir) + + # Determine the correct Distribution object type. + if dist_dir.endswith(".egg-info"): + dist_cls = pkg_resources.Distribution + dist_name = os.path.splitext(dist_dir_name)[0] + else: + assert dist_dir.endswith(".dist-info") + dist_cls = pkg_resources.DistInfoDistribution + dist_name = os.path.splitext(dist_dir_name)[0].split("-")[0] + + return dist_cls( + base_dir, + project_name=dist_name, + metadata=metadata, + ) + + +class InstallRequirement: + """ + Represents something that may be installed later on, may have information + about where to fetch the relevant requirement and also contains logic for + installing the said requirement. + """ + + def __init__( + self, + req, # type: Optional[Requirement] + comes_from, # type: Optional[Union[str, InstallRequirement]] + editable=False, # type: bool + link=None, # type: Optional[Link] + markers=None, # type: Optional[Marker] + use_pep517=None, # type: Optional[bool] + isolated=False, # type: bool + install_options=None, # type: Optional[List[str]] + global_options=None, # type: Optional[List[str]] + hash_options=None, # type: Optional[Dict[str, List[str]]] + constraint=False, # type: bool + extras=(), # type: Iterable[str] + user_supplied=False, # type: bool + ): + # type: (...) -> None + assert req is None or isinstance(req, Requirement), req + self.req = req + self.comes_from = comes_from + self.constraint = constraint + self.editable = editable + self.legacy_install_reason = None # type: Optional[int] + + # source_dir is the local directory where the linked requirement is + # located, or unpacked. In case unpacking is needed, creating and + # populating source_dir is done by the RequirementPreparer. Note this + # is not necessarily the directory where pyproject.toml or setup.py is + # located - that one is obtained via unpacked_source_directory. + self.source_dir = None # type: Optional[str] + if self.editable: + assert link + if link.is_file: + self.source_dir = os.path.normpath( + os.path.abspath(link.file_path) + ) + + if link is None and req and req.url: + # PEP 508 URL requirement + link = Link(req.url) + self.link = self.original_link = link + self.original_link_is_in_wheel_cache = False + + # Path to any downloaded or already-existing package. + self.local_file_path = None # type: Optional[str] + if self.link and self.link.is_file: + self.local_file_path = self.link.file_path + + if extras: + self.extras = extras + elif req: + self.extras = { + pkg_resources.safe_extra(extra) for extra in req.extras + } + else: + self.extras = set() + if markers is None and req: + markers = req.marker + self.markers = markers + + # This holds the pkg_resources.Distribution object if this requirement + # is already available: + self.satisfied_by = None # type: Optional[Distribution] + # Whether the installation process should try to uninstall an existing + # distribution before installing this requirement. + self.should_reinstall = False + # Temporary build location + self._temp_build_dir = None # type: Optional[TempDirectory] + # Set to True after successful installation + self.install_succeeded = None # type: Optional[bool] + # Supplied options + self.install_options = install_options if install_options else [] + self.global_options = global_options if global_options else [] + self.hash_options = hash_options if hash_options else {} + # Set to True after successful preparation of this requirement + self.prepared = False + # User supplied requirement are explicitly requested for installation + # by the user via CLI arguments or requirements files, as opposed to, + # e.g. dependencies, extras or constraints. + self.user_supplied = user_supplied + + self.isolated = isolated + self.build_env = NoOpBuildEnvironment() # type: BuildEnvironment + + # For PEP 517, the directory where we request the project metadata + # gets stored. We need this to pass to build_wheel, so the backend + # can ensure that the wheel matches the metadata (see the PEP for + # details). + self.metadata_directory = None # type: Optional[str] + + # The static build requirements (from pyproject.toml) + self.pyproject_requires = None # type: Optional[List[str]] + + # Build requirements that we will check are available + self.requirements_to_check = [] # type: List[str] + + # The PEP 517 backend we should use to build the project + self.pep517_backend = None # type: Optional[Pep517HookCaller] + + # Are we using PEP 517 for this requirement? + # After pyproject.toml has been loaded, the only valid values are True + # and False. Before loading, None is valid (meaning "use the default"). + # Setting an explicit value before loading pyproject.toml is supported, + # but after loading this flag should be treated as read only. + self.use_pep517 = use_pep517 + + # This requirement needs more preparation before it can be built + self.needs_more_preparation = False + + def __str__(self): + # type: () -> str + if self.req: + s = str(self.req) + if self.link: + s += ' from {}'.format(redact_auth_from_url(self.link.url)) + elif self.link: + s = redact_auth_from_url(self.link.url) + else: + s = '' + if self.satisfied_by is not None: + s += ' in {}'.format(display_path(self.satisfied_by.location)) + if self.comes_from: + if isinstance(self.comes_from, str): + comes_from = self.comes_from # type: Optional[str] + else: + comes_from = self.comes_from.from_path() + if comes_from: + s += f' (from {comes_from})' + return s + + def __repr__(self): + # type: () -> str + return '<{} object: {} editable={!r}>'.format( + self.__class__.__name__, str(self), self.editable) + + def format_debug(self): + # type: () -> str + """An un-tested helper for getting state, for debugging. + """ + attributes = vars(self) + names = sorted(attributes) + + state = ( + "{}={!r}".format(attr, attributes[attr]) for attr in sorted(names) + ) + return '<{name} object: {{{state}}}>'.format( + name=self.__class__.__name__, + state=", ".join(state), + ) + + # Things that are valid for all kinds of requirements? + @property + def name(self): + # type: () -> Optional[str] + if self.req is None: + return None + return pkg_resources.safe_name(self.req.name) + + @property + def specifier(self): + # type: () -> SpecifierSet + return self.req.specifier + + @property + def is_pinned(self): + # type: () -> bool + """Return whether I am pinned to an exact version. + + For example, some-package==1.2 is pinned; some-package>1.2 is not. + """ + specifiers = self.specifier + return (len(specifiers) == 1 and + next(iter(specifiers)).operator in {'==', '==='}) + + def match_markers(self, extras_requested=None): + # type: (Optional[Iterable[str]]) -> bool + if not extras_requested: + # Provide an extra to safely evaluate the markers + # without matching any extra + extras_requested = ('',) + if self.markers is not None: + return any( + self.markers.evaluate({'extra': extra}) + for extra in extras_requested) + else: + return True + + @property + def has_hash_options(self): + # type: () -> bool + """Return whether any known-good hashes are specified as options. + + These activate --require-hashes mode; hashes specified as part of a + URL do not. + + """ + return bool(self.hash_options) + + def hashes(self, trust_internet=True): + # type: (bool) -> Hashes + """Return a hash-comparer that considers my option- and URL-based + hashes to be known-good. + + Hashes in URLs--ones embedded in the requirements file, not ones + downloaded from an index server--are almost peers with ones from + flags. They satisfy --require-hashes (whether it was implicitly or + explicitly activated) but do not activate it. md5 and sha224 are not + allowed in flags, which should nudge people toward good algos. We + always OR all hashes together, even ones from URLs. + + :param trust_internet: Whether to trust URL-based (#md5=...) hashes + downloaded from the internet, as by populate_link() + + """ + good_hashes = self.hash_options.copy() + link = self.link if trust_internet else self.original_link + if link and link.hash: + good_hashes.setdefault(link.hash_name, []).append(link.hash) + return Hashes(good_hashes) + + def from_path(self): + # type: () -> Optional[str] + """Format a nice indicator to show where this "comes from" + """ + if self.req is None: + return None + s = str(self.req) + if self.comes_from: + if isinstance(self.comes_from, str): + comes_from = self.comes_from + else: + comes_from = self.comes_from.from_path() + if comes_from: + s += '->' + comes_from + return s + + def ensure_build_location(self, build_dir, autodelete, parallel_builds): + # type: (str, bool, bool) -> str + assert build_dir is not None + if self._temp_build_dir is not None: + assert self._temp_build_dir.path + return self._temp_build_dir.path + if self.req is None: + # Some systems have /tmp as a symlink which confuses custom + # builds (such as numpy). Thus, we ensure that the real path + # is returned. + self._temp_build_dir = TempDirectory( + kind=tempdir_kinds.REQ_BUILD, globally_managed=True + ) + + return self._temp_build_dir.path + + # This is the only remaining place where we manually determine the path + # for the temporary directory. It is only needed for editables where + # it is the value of the --src option. + + # When parallel builds are enabled, add a UUID to the build directory + # name so multiple builds do not interfere with each other. + dir_name = canonicalize_name(self.name) # type: str + if parallel_builds: + dir_name = f"{dir_name}_{uuid.uuid4().hex}" + + # FIXME: Is there a better place to create the build_dir? (hg and bzr + # need this) + if not os.path.exists(build_dir): + logger.debug('Creating directory %s', build_dir) + os.makedirs(build_dir) + actual_build_dir = os.path.join(build_dir, dir_name) + # `None` indicates that we respect the globally-configured deletion + # settings, which is what we actually want when auto-deleting. + delete_arg = None if autodelete else False + return TempDirectory( + path=actual_build_dir, + delete=delete_arg, + kind=tempdir_kinds.REQ_BUILD, + globally_managed=True, + ).path + + def _set_requirement(self): + # type: () -> None + """Set requirement after generating metadata. + """ + assert self.req is None + assert self.metadata is not None + assert self.source_dir is not None + + # Construct a Requirement object from the generated metadata + if isinstance(parse_version(self.metadata["Version"]), Version): + op = "==" + else: + op = "===" + + self.req = Requirement( + "".join([ + self.metadata["Name"], + op, + self.metadata["Version"], + ]) + ) + + def warn_on_mismatching_name(self): + # type: () -> None + metadata_name = canonicalize_name(self.metadata["Name"]) + if canonicalize_name(self.req.name) == metadata_name: + # Everything is fine. + return + + # If we're here, there's a mismatch. Log a warning about it. + logger.warning( + 'Generating metadata for package %s ' + 'produced metadata for project name %s. Fix your ' + '#egg=%s fragments.', + self.name, metadata_name, self.name + ) + self.req = Requirement(metadata_name) + + def check_if_exists(self, use_user_site): + # type: (bool) -> None + """Find an installed distribution that satisfies or conflicts + with this requirement, and set self.satisfied_by or + self.should_reinstall appropriately. + """ + if self.req is None: + return + existing_dist = get_distribution(self.req.name) + if not existing_dist: + return + + # pkg_resouces may contain a different copy of packaging.version from + # pip in if the downstream distributor does a poor job debundling pip. + # We avoid existing_dist.parsed_version and let SpecifierSet.contains + # parses the version instead. + existing_version = existing_dist.version + version_compatible = ( + existing_version is not None and + self.req.specifier.contains(existing_version, prereleases=True) + ) + if not version_compatible: + self.satisfied_by = None + if use_user_site: + if dist_in_usersite(existing_dist): + self.should_reinstall = True + elif (running_under_virtualenv() and + dist_in_site_packages(existing_dist)): + raise InstallationError( + "Will not install to the user site because it will " + "lack sys.path precedence to {} in {}".format( + existing_dist.project_name, existing_dist.location) + ) + else: + self.should_reinstall = True + else: + if self.editable: + self.should_reinstall = True + # when installing editables, nothing pre-existing should ever + # satisfy + self.satisfied_by = None + else: + self.satisfied_by = existing_dist + + # Things valid for wheels + @property + def is_wheel(self): + # type: () -> bool + if not self.link: + return False + return self.link.is_wheel + + # Things valid for sdists + @property + def unpacked_source_directory(self): + # type: () -> str + return os.path.join( + self.source_dir, + self.link and self.link.subdirectory_fragment or '') + + @property + def setup_py_path(self): + # type: () -> str + assert self.source_dir, f"No source dir for {self}" + setup_py = os.path.join(self.unpacked_source_directory, 'setup.py') + + return setup_py + + @property + def pyproject_toml_path(self): + # type: () -> str + assert self.source_dir, f"No source dir for {self}" + return make_pyproject_path(self.unpacked_source_directory) + + def load_pyproject_toml(self): + # type: () -> None + """Load the pyproject.toml file. + + After calling this routine, all of the attributes related to PEP 517 + processing for this requirement have been set. In particular, the + use_pep517 attribute can be used to determine whether we should + follow the PEP 517 or legacy (setup.py) code path. + """ + pyproject_toml_data = load_pyproject_toml( + self.use_pep517, + self.pyproject_toml_path, + self.setup_py_path, + str(self) + ) + + if pyproject_toml_data is None: + self.use_pep517 = False + return + + self.use_pep517 = True + requires, backend, check, backend_path = pyproject_toml_data + self.requirements_to_check = check + self.pyproject_requires = requires + self.pep517_backend = Pep517HookCaller( + self.unpacked_source_directory, backend, backend_path=backend_path, + ) + + def _check_setup_py_or_cfg_exists(self) -> bool: + """Check if the requirement actually has a setuptools build file. + + If setup.py does not exist, we also check setup.cfg in the same + directory and allow the directory if that exists. + """ + if os.path.exists(self.setup_py_path): + return True + stem, ext = os.path.splitext(self.setup_py_path) + if ext == ".py" and os.path.exists(f"{stem}.cfg"): + return True + return False + + def _generate_metadata(self): + # type: () -> str + """Invokes metadata generator functions, with the required arguments. + """ + if not self.use_pep517: + assert self.unpacked_source_directory + + if not self._check_setup_py_or_cfg_exists(): + raise InstallationError( + f'File "setup.py" or "setup.cfg" not found for legacy ' + f'project {self}.' + ) + + return generate_metadata_legacy( + build_env=self.build_env, + setup_py_path=self.setup_py_path, + source_dir=self.unpacked_source_directory, + isolated=self.isolated, + details=self.name or f"from {self.link}" + ) + + assert self.pep517_backend is not None + + return generate_metadata( + build_env=self.build_env, + backend=self.pep517_backend, + ) + + def prepare_metadata(self): + # type: () -> None + """Ensure that project metadata is available. + + Under PEP 517, call the backend hook to prepare the metadata. + Under legacy processing, call setup.py egg-info. + """ + assert self.source_dir + + with indent_log(): + self.metadata_directory = self._generate_metadata() + + # Act on the newly generated metadata, based on the name and version. + if not self.name: + self._set_requirement() + else: + self.warn_on_mismatching_name() + + self.assert_source_matches_version() + + @property + def metadata(self): + # type: () -> Any + if not hasattr(self, '_metadata'): + self._metadata = get_metadata(self.get_dist()) + + return self._metadata + + def get_dist(self): + # type: () -> Distribution + return _get_dist(self.metadata_directory) + + def assert_source_matches_version(self): + # type: () -> None + assert self.source_dir + version = self.metadata['version'] + if self.req.specifier and version not in self.req.specifier: + logger.warning( + 'Requested %s, but installing version %s', + self, + version, + ) + else: + logger.debug( + 'Source in %s has version %s, which satisfies requirement %s', + display_path(self.source_dir), + version, + self, + ) + + # For both source distributions and editables + def ensure_has_source_dir( + self, + parent_dir, + autodelete=False, + parallel_builds=False, + ): + # type: (str, bool, bool) -> None + """Ensure that a source_dir is set. + + This will create a temporary build dir if the name of the requirement + isn't known yet. + + :param parent_dir: The ideal pip parent_dir for the source_dir. + Generally src_dir for editables and build_dir for sdists. + :return: self.source_dir + """ + if self.source_dir is None: + self.source_dir = self.ensure_build_location( + parent_dir, + autodelete=autodelete, + parallel_builds=parallel_builds, + ) + + # For editable installations + def update_editable(self): + # type: () -> None + if not self.link: + logger.debug( + "Cannot update repository at %s; repository location is " + "unknown", + self.source_dir, + ) + return + assert self.editable + assert self.source_dir + if self.link.scheme == 'file': + # Static paths don't get updated + return + vcs_backend = vcs.get_backend_for_scheme(self.link.scheme) + # Editable requirements are validated in Requirement constructors. + # So here, if it's neither a path nor a valid VCS URL, it's a bug. + assert vcs_backend, f"Unsupported VCS URL {self.link.url}" + hidden_url = hide_url(self.link.url) + vcs_backend.obtain(self.source_dir, url=hidden_url) + + # Top-level Actions + def uninstall(self, auto_confirm=False, verbose=False): + # type: (bool, bool) -> Optional[UninstallPathSet] + """ + Uninstall the distribution currently satisfying this requirement. + + Prompts before removing or modifying files unless + ``auto_confirm`` is True. + + Refuses to delete or modify files outside of ``sys.prefix`` - + thus uninstallation within a virtual environment can only + modify that virtual environment, even if the virtualenv is + linked to global site-packages. + + """ + assert self.req + dist = get_distribution(self.req.name) + if not dist: + logger.warning("Skipping %s as it is not installed.", self.name) + return None + logger.info('Found existing installation: %s', dist) + + uninstalled_pathset = UninstallPathSet.from_dist(dist) + uninstalled_pathset.remove(auto_confirm, verbose) + return uninstalled_pathset + + def _get_archive_name(self, path, parentdir, rootdir): + # type: (str, str, str) -> str + + def _clean_zip_name(name, prefix): + # type: (str, str) -> str + assert name.startswith(prefix + os.path.sep), ( + f"name {name!r} doesn't start with prefix {prefix!r}" + ) + name = name[len(prefix) + 1:] + name = name.replace(os.path.sep, '/') + return name + + path = os.path.join(parentdir, path) + name = _clean_zip_name(path, rootdir) + return self.name + '/' + name + + def archive(self, build_dir): + # type: (Optional[str]) -> None + """Saves archive to provided build_dir. + + Used for saving downloaded VCS requirements as part of `pip download`. + """ + assert self.source_dir + if build_dir is None: + return + + create_archive = True + archive_name = '{}-{}.zip'.format(self.name, self.metadata["version"]) + archive_path = os.path.join(build_dir, archive_name) + + if os.path.exists(archive_path): + response = ask_path_exists( + 'The file {} exists. (i)gnore, (w)ipe, ' + '(b)ackup, (a)bort '.format( + display_path(archive_path)), + ('i', 'w', 'b', 'a')) + if response == 'i': + create_archive = False + elif response == 'w': + logger.warning('Deleting %s', display_path(archive_path)) + os.remove(archive_path) + elif response == 'b': + dest_file = backup_dir(archive_path) + logger.warning( + 'Backing up %s to %s', + display_path(archive_path), + display_path(dest_file), + ) + shutil.move(archive_path, dest_file) + elif response == 'a': + sys.exit(-1) + + if not create_archive: + return + + zip_output = zipfile.ZipFile( + archive_path, 'w', zipfile.ZIP_DEFLATED, allowZip64=True, + ) + with zip_output: + dir = os.path.normcase( + os.path.abspath(self.unpacked_source_directory) + ) + for dirpath, dirnames, filenames in os.walk(dir): + for dirname in dirnames: + dir_arcname = self._get_archive_name( + dirname, parentdir=dirpath, rootdir=dir, + ) + zipdir = zipfile.ZipInfo(dir_arcname + '/') + zipdir.external_attr = 0x1ED << 16 # 0o755 + zip_output.writestr(zipdir, '') + for filename in filenames: + file_arcname = self._get_archive_name( + filename, parentdir=dirpath, rootdir=dir, + ) + filename = os.path.join(dirpath, filename) + zip_output.write(filename, file_arcname) + + logger.info('Saved %s', display_path(archive_path)) + + def install( + self, + install_options, # type: List[str] + global_options=None, # type: Optional[Sequence[str]] + root=None, # type: Optional[str] + home=None, # type: Optional[str] + prefix=None, # type: Optional[str] + warn_script_location=True, # type: bool + use_user_site=False, # type: bool + pycompile=True # type: bool + ): + # type: (...) -> None + scheme = get_scheme( + self.name, + user=use_user_site, + home=home, + root=root, + isolated=self.isolated, + prefix=prefix, + ) + + global_options = global_options if global_options is not None else [] + if self.editable: + install_editable_legacy( + install_options, + global_options, + prefix=prefix, + home=home, + use_user_site=use_user_site, + name=self.name, + setup_py_path=self.setup_py_path, + isolated=self.isolated, + build_env=self.build_env, + unpacked_source_directory=self.unpacked_source_directory, + ) + self.install_succeeded = True + return + + if self.is_wheel: + assert self.local_file_path + direct_url = None + if self.original_link: + direct_url = direct_url_from_link( + self.original_link, + self.source_dir, + self.original_link_is_in_wheel_cache, + ) + install_wheel( + self.name, + self.local_file_path, + scheme=scheme, + req_description=str(self.req), + pycompile=pycompile, + warn_script_location=warn_script_location, + direct_url=direct_url, + requested=self.user_supplied, + ) + self.install_succeeded = True + return + + # TODO: Why don't we do this for editable installs? + + # Extend the list of global and install options passed on to + # the setup.py call with the ones from the requirements file. + # Options specified in requirements file override those + # specified on the command line, since the last option given + # to setup.py is the one that is used. + global_options = list(global_options) + self.global_options + install_options = list(install_options) + self.install_options + + try: + success = install_legacy( + install_options=install_options, + global_options=global_options, + root=root, + home=home, + prefix=prefix, + use_user_site=use_user_site, + pycompile=pycompile, + scheme=scheme, + setup_py_path=self.setup_py_path, + isolated=self.isolated, + req_name=self.name, + build_env=self.build_env, + unpacked_source_directory=self.unpacked_source_directory, + req_description=str(self.req), + ) + except LegacyInstallFailure as exc: + self.install_succeeded = False + six.reraise(*exc.parent) + except Exception: + self.install_succeeded = True + raise + + self.install_succeeded = success + + if success and self.legacy_install_reason == 8368: + deprecated( + reason=( + "{} was installed using the legacy 'setup.py install' " + "method, because a wheel could not be built for it.". + format(self.name) + ), + replacement="to fix the wheel build issue reported above", + gone_in=None, + issue=8368, + ) + + +def check_invalid_constraint_type(req): + # type: (InstallRequirement) -> str + + # Check for unsupported forms + problem = "" + if not req.name: + problem = "Unnamed requirements are not allowed as constraints" + elif req.editable: + problem = "Editable requirements are not allowed as constraints" + elif req.extras: + problem = "Constraints cannot have extras" + + if problem: + deprecated( + reason=( + "Constraints are only allowed to take the form of a package " + "name and a version specifier. Other forms were originally " + "permitted as an accident of the implementation, but were " + "undocumented. The new implementation of the resolver no " + "longer supports these forms." + ), + replacement=( + "replacing the constraint with a requirement." + ), + # No plan yet for when the new resolver becomes default + gone_in=None, + issue=8210 + ) + + return problem diff --git a/venv/Lib/site-packages/pip/_internal/req/req_set.py b/venv/Lib/site-packages/pip/_internal/req/req_set.py new file mode 100644 index 00000000..59c58435 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/req/req_set.py @@ -0,0 +1,199 @@ +import logging +from collections import OrderedDict +from typing import Dict, Iterable, List, Optional, Tuple + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.exceptions import InstallationError +from pip._internal.models.wheel import Wheel +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils import compatibility_tags + +logger = logging.getLogger(__name__) + + +class RequirementSet: + + def __init__(self, check_supported_wheels=True): + # type: (bool) -> None + """Create a RequirementSet. + """ + + self.requirements = OrderedDict() # type: Dict[str, InstallRequirement] + self.check_supported_wheels = check_supported_wheels + + self.unnamed_requirements = [] # type: List[InstallRequirement] + + def __str__(self): + # type: () -> str + requirements = sorted( + (req for req in self.requirements.values() if not req.comes_from), + key=lambda req: canonicalize_name(req.name or ""), + ) + return ' '.join(str(req.req) for req in requirements) + + def __repr__(self): + # type: () -> str + requirements = sorted( + self.requirements.values(), + key=lambda req: canonicalize_name(req.name or ""), + ) + + format_string = '<{classname} object; {count} requirement(s): {reqs}>' + return format_string.format( + classname=self.__class__.__name__, + count=len(requirements), + reqs=', '.join(str(req.req) for req in requirements), + ) + + def add_unnamed_requirement(self, install_req): + # type: (InstallRequirement) -> None + assert not install_req.name + self.unnamed_requirements.append(install_req) + + def add_named_requirement(self, install_req): + # type: (InstallRequirement) -> None + assert install_req.name + + project_name = canonicalize_name(install_req.name) + self.requirements[project_name] = install_req + + def add_requirement( + self, + install_req, # type: InstallRequirement + parent_req_name=None, # type: Optional[str] + extras_requested=None # type: Optional[Iterable[str]] + ): + # type: (...) -> Tuple[List[InstallRequirement], Optional[InstallRequirement]] + """Add install_req as a requirement to install. + + :param parent_req_name: The name of the requirement that needed this + added. The name is used because when multiple unnamed requirements + resolve to the same name, we could otherwise end up with dependency + links that point outside the Requirements set. parent_req must + already be added. Note that None implies that this is a user + supplied requirement, vs an inferred one. + :param extras_requested: an iterable of extras used to evaluate the + environment markers. + :return: Additional requirements to scan. That is either [] if + the requirement is not applicable, or [install_req] if the + requirement is applicable and has just been added. + """ + # If the markers do not match, ignore this requirement. + if not install_req.match_markers(extras_requested): + logger.info( + "Ignoring %s: markers '%s' don't match your environment", + install_req.name, install_req.markers, + ) + return [], None + + # If the wheel is not supported, raise an error. + # Should check this after filtering out based on environment markers to + # allow specifying different wheels based on the environment/OS, in a + # single requirements file. + if install_req.link and install_req.link.is_wheel: + wheel = Wheel(install_req.link.filename) + tags = compatibility_tags.get_supported() + if (self.check_supported_wheels and not wheel.supported(tags)): + raise InstallationError( + "{} is not a supported wheel on this platform.".format( + wheel.filename) + ) + + # This next bit is really a sanity check. + assert not install_req.user_supplied or parent_req_name is None, ( + "a user supplied req shouldn't have a parent" + ) + + # Unnamed requirements are scanned again and the requirement won't be + # added as a dependency until after scanning. + if not install_req.name: + self.add_unnamed_requirement(install_req) + return [install_req], None + + try: + existing_req = self.get_requirement( + install_req.name) # type: Optional[InstallRequirement] + except KeyError: + existing_req = None + + has_conflicting_requirement = ( + parent_req_name is None and + existing_req and + not existing_req.constraint and + existing_req.extras == install_req.extras and + existing_req.req and + install_req.req and + existing_req.req.specifier != install_req.req.specifier + ) + if has_conflicting_requirement: + raise InstallationError( + "Double requirement given: {} (already in {}, name={!r})" + .format(install_req, existing_req, install_req.name) + ) + + # When no existing requirement exists, add the requirement as a + # dependency and it will be scanned again after. + if not existing_req: + self.add_named_requirement(install_req) + # We'd want to rescan this requirement later + return [install_req], install_req + + # Assume there's no need to scan, and that we've already + # encountered this for scanning. + if install_req.constraint or not existing_req.constraint: + return [], existing_req + + does_not_satisfy_constraint = ( + install_req.link and + not ( + existing_req.link and + install_req.link.path == existing_req.link.path + ) + ) + if does_not_satisfy_constraint: + raise InstallationError( + "Could not satisfy constraints for '{}': " + "installation from path or url cannot be " + "constrained to a version".format(install_req.name) + ) + # If we're now installing a constraint, mark the existing + # object for real installation. + existing_req.constraint = False + # If we're now installing a user supplied requirement, + # mark the existing object as such. + if install_req.user_supplied: + existing_req.user_supplied = True + existing_req.extras = tuple(sorted( + set(existing_req.extras) | set(install_req.extras) + )) + logger.debug( + "Setting %s extras to: %s", + existing_req, existing_req.extras, + ) + # Return the existing requirement for addition to the parent and + # scanning again. + return [existing_req], existing_req + + def has_requirement(self, name): + # type: (str) -> bool + project_name = canonicalize_name(name) + + return ( + project_name in self.requirements and + not self.requirements[project_name].constraint + ) + + def get_requirement(self, name): + # type: (str) -> InstallRequirement + project_name = canonicalize_name(name) + + if project_name in self.requirements: + return self.requirements[project_name] + + raise KeyError(f"No project with the name {name!r}") + + @property + def all_requirements(self): + # type: () -> List[InstallRequirement] + return self.unnamed_requirements + list(self.requirements.values()) diff --git a/venv/Lib/site-packages/pip/_internal/req/req_tracker.py b/venv/Lib/site-packages/pip/_internal/req/req_tracker.py new file mode 100644 index 00000000..542e0d94 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/req/req_tracker.py @@ -0,0 +1,140 @@ +import contextlib +import hashlib +import logging +import os +from types import TracebackType +from typing import Dict, Iterator, Optional, Set, Type, Union + +from pip._internal.models.link import Link +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.temp_dir import TempDirectory + +logger = logging.getLogger(__name__) + + +@contextlib.contextmanager +def update_env_context_manager(**changes): + # type: (str) -> Iterator[None] + target = os.environ + + # Save values from the target and change them. + non_existent_marker = object() + saved_values = {} # type: Dict[str, Union[object, str]] + for name, new_value in changes.items(): + try: + saved_values[name] = target[name] + except KeyError: + saved_values[name] = non_existent_marker + target[name] = new_value + + try: + yield + finally: + # Restore original values in the target. + for name, original_value in saved_values.items(): + if original_value is non_existent_marker: + del target[name] + else: + assert isinstance(original_value, str) # for mypy + target[name] = original_value + + +@contextlib.contextmanager +def get_requirement_tracker(): + # type: () -> Iterator[RequirementTracker] + root = os.environ.get('PIP_REQ_TRACKER') + with contextlib.ExitStack() as ctx: + if root is None: + root = ctx.enter_context( + TempDirectory(kind='req-tracker') + ).path + ctx.enter_context(update_env_context_manager(PIP_REQ_TRACKER=root)) + logger.debug("Initialized build tracking at %s", root) + + with RequirementTracker(root) as tracker: + yield tracker + + +class RequirementTracker: + + def __init__(self, root): + # type: (str) -> None + self._root = root + self._entries = set() # type: Set[InstallRequirement] + logger.debug("Created build tracker: %s", self._root) + + def __enter__(self): + # type: () -> RequirementTracker + logger.debug("Entered build tracker: %s", self._root) + return self + + def __exit__( + self, + exc_type, # type: Optional[Type[BaseException]] + exc_val, # type: Optional[BaseException] + exc_tb # type: Optional[TracebackType] + ): + # type: (...) -> None + self.cleanup() + + def _entry_path(self, link): + # type: (Link) -> str + hashed = hashlib.sha224(link.url_without_fragment.encode()).hexdigest() + return os.path.join(self._root, hashed) + + def add(self, req): + # type: (InstallRequirement) -> None + """Add an InstallRequirement to build tracking. + """ + + assert req.link + # Get the file to write information about this requirement. + entry_path = self._entry_path(req.link) + + # Try reading from the file. If it exists and can be read from, a build + # is already in progress, so a LookupError is raised. + try: + with open(entry_path) as fp: + contents = fp.read() + except FileNotFoundError: + pass + else: + message = '{} is already being built: {}'.format( + req.link, contents) + raise LookupError(message) + + # If we're here, req should really not be building already. + assert req not in self._entries + + # Start tracking this requirement. + with open(entry_path, 'w', encoding="utf-8") as fp: + fp.write(str(req)) + self._entries.add(req) + + logger.debug('Added %s to build tracker %r', req, self._root) + + def remove(self, req): + # type: (InstallRequirement) -> None + """Remove an InstallRequirement from build tracking. + """ + + assert req.link + # Delete the created file and the corresponding entries. + os.unlink(self._entry_path(req.link)) + self._entries.remove(req) + + logger.debug('Removed %s from build tracker %r', req, self._root) + + def cleanup(self): + # type: () -> None + for req in set(self._entries): + self.remove(req) + + logger.debug("Removed build tracker: %r", self._root) + + @contextlib.contextmanager + def track(self, req): + # type: (InstallRequirement) -> Iterator[None] + self.add(req) + yield + self.remove(req) diff --git a/env/lib/python2.7/site-packages/pip/_internal/req/req_uninstall.py b/venv/Lib/site-packages/pip/_internal/req/req_uninstall.py similarity index 91% rename from env/lib/python2.7/site-packages/pip/_internal/req/req_uninstall.py rename to venv/Lib/site-packages/pip/_internal/req/req_uninstall.py index 733301ce..b7223417 100644 --- a/env/lib/python2.7/site-packages/pip/_internal/req/req_uninstall.py +++ b/venv/Lib/site-packages/pip/_internal/req/req_uninstall.py @@ -1,30 +1,30 @@ -from __future__ import absolute_import - import csv import functools import logging import os import sys import sysconfig +from importlib.util import cache_from_source +from typing import Any, Callable, Dict, Iterable, Iterator, List, Optional, Set, Tuple from pip._vendor import pkg_resources +from pip._vendor.pkg_resources import Distribution from pip._internal.exceptions import UninstallationError -from pip._internal.locations import bin_py, bin_user -from pip._internal.utils.compat import WINDOWS, cache_from_source, uses_pycache +from pip._internal.locations import get_bin_prefix, get_bin_user +from pip._internal.utils.compat import WINDOWS from pip._internal.utils.logging import indent_log from pip._internal.utils.misc import ( - FakeFile, ask, dist_in_usersite, dist_is_local, egg_link_path, is_local, - normalize_path, renames, rmtree, + ask, + dist_in_usersite, + dist_is_local, + egg_link_path, + is_local, + normalize_path, + renames, + rmtree, ) from pip._internal.utils.temp_dir import AdjacentTempDirectory, TempDirectory -from pip._internal.utils.typing import MYPY_CHECK_RUNNING - -if MYPY_CHECK_RUNNING: - from typing import ( - Any, Callable, Dict, Iterable, Iterator, List, Optional, Set, Tuple, - ) - from pip._vendor.pkg_resources import Distribution logger = logging.getLogger(__name__) @@ -36,9 +36,9 @@ def _script_names(dist, script_name, is_gui): Returns the list of file names """ if dist_in_usersite(dist): - bin_dir = bin_user + bin_dir = get_bin_user() else: - bin_dir = bin_py + bin_dir = get_bin_prefix() exe_name = os.path.join(bin_dir, script_name) paths_to_remove = [exe_name] if WINDOWS: @@ -52,7 +52,7 @@ def _script_names(dist, script_name, is_gui): def _unique(fn): - # type: (Callable) -> Callable[..., Iterator[Any]] + # type: (Callable[..., Iterator[Any]]) -> Callable[..., Iterator[Any]] @functools.wraps(fn) def unique(*args, **kw): # type: (Any, Any) -> Iterator[Any] @@ -75,7 +75,7 @@ def uninstallation_paths(dist): UninstallPathSet.add() takes care of the __pycache__ .py[co]. """ - r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD'))) + r = csv.reader(dist.get_metadata_lines('RECORD')) for row in r: path = os.path.join(dist.location, row[0]) yield path @@ -115,10 +115,9 @@ def compress_for_rename(paths): This set may include directories when the original sequence of paths included every file on disk. """ - case_map = dict((os.path.normcase(p), p) for p in paths) + case_map = {os.path.normcase(p): p for p in paths} remaining = set(case_map) - unchecked = sorted(set(os.path.split(p)[0] - for p in case_map.values()), key=len) + unchecked = sorted({os.path.split(p)[0] for p in case_map.values()}, key=len) wildcards = set() # type: Set[str] def norm_join(*a): @@ -199,7 +198,7 @@ def compress_for_output_listing(paths): return will_remove, will_skip -class StashedUninstallPathSet(object): +class StashedUninstallPathSet: """A set of file rename operations to stash files while tentatively uninstalling them.""" def __init__(self): @@ -220,10 +219,8 @@ def _get_directory_stash(self, path): try: save_dir = AdjacentTempDirectory(path) # type: TempDirectory - save_dir.create() except OSError: save_dir = TempDirectory(kind="uninstall") - save_dir.create() self._save_dirs[os.path.normcase(path)] = save_dir return save_dir.path @@ -249,7 +246,6 @@ def _get_file_stash(self, path): # Did not find any suitable root head = os.path.dirname(path) save_dir = TempDirectory(kind='uninstall') - save_dir.create() self._save_dirs[head] = save_dir relpath = os.path.relpath(path, head) @@ -260,14 +256,16 @@ def _get_file_stash(self, path): def stash(self, path): # type: (str) -> str """Stashes the directory or file and returns its new location. + Handle symlinks as files to avoid modifying the symlink targets. """ - if os.path.isdir(path): + path_is_dir = os.path.isdir(path) and not os.path.islink(path) + if path_is_dir: new_path = self._get_directory_stash(path) else: new_path = self._get_file_stash(path) self._moves.append((path, new_path)) - if os.path.isdir(path) and os.path.isdir(new_path): + if (path_is_dir and os.path.isdir(new_path)): # If we're moving a directory, we need to # remove the destination first or else it will be # moved to inside the existing directory. @@ -289,12 +287,12 @@ def rollback(self): # type: () -> None """Undoes the uninstall by moving stashed files back.""" for p in self._moves: - logging.info("Moving to %s\n from %s", *p) + logger.info("Moving to %s\n from %s", *p) for new_path, path in self._moves: try: logger.debug('Replacing %s from %s', new_path, path) - if os.path.isfile(new_path): + if os.path.isfile(new_path) or os.path.islink(new_path): os.unlink(new_path) elif os.path.isdir(new_path): rmtree(new_path) @@ -311,7 +309,7 @@ def can_rollback(self): return bool(self._moves) -class UninstallPathSet(object): +class UninstallPathSet: """A set of file paths to be removed in the uninstallation of a requirement.""" def __init__(self, dist): @@ -348,7 +346,7 @@ def add(self, path): # __pycache__ files can show up after 'installed-files.txt' is created, # due to imports - if os.path.splitext(path)[1] == '.py' and uses_pycache: + if os.path.splitext(path)[1] == '.py': self.add(cache_from_source(path)) def add_pth(self, pth_file, entry): @@ -531,11 +529,12 @@ def from_dist(cls, dist): elif develop_egg_link: # develop egg - with open(develop_egg_link, 'r') as fh: + with open(develop_egg_link) as fh: link_pointer = os.path.normcase(fh.readline().strip()) assert (link_pointer == dist.location), ( - 'Egg-link %s does not match installed location of %s ' - '(at %s)' % (link_pointer, dist.project_name, dist.location) + 'Egg-link {} does not match installed location of {} ' + '(at {})'.format( + link_pointer, dist.project_name, dist.location) ) paths_to_remove.add(develop_egg_link) easy_install_pth = os.path.join(os.path.dirname(develop_egg_link), @@ -552,9 +551,9 @@ def from_dist(cls, dist): if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'): for script in dist.metadata_listdir('scripts'): if dist_in_usersite(dist): - bin_dir = bin_user + bin_dir = get_bin_user() else: - bin_dir = bin_py + bin_dir = get_bin_prefix() paths_to_remove.add(os.path.join(bin_dir, script)) if WINDOWS: paths_to_remove.add(os.path.join(bin_dir, script) + '.bat') @@ -575,13 +574,9 @@ def from_dist(cls, dist): return paths_to_remove -class UninstallPthEntries(object): +class UninstallPthEntries: def __init__(self, pth_file): # type: (str) -> None - if not os.path.isfile(pth_file): - raise UninstallationError( - "Cannot remove entries from nonexistent file %s" % pth_file - ) self.file = pth_file self.entries = set() # type: Set[str] self._saved_lines = None # type: Optional[List[bytes]] @@ -593,6 +588,11 @@ def add(self, entry): # backslashes. This is correct for entries that describe absolute # paths outside of site-packages, but all the others use forward # slashes. + # os.path.splitdrive is used instead of os.path.isabs because isabs + # treats non-absolute paths with drive letter markings like c:foo\bar + # as absolute paths. It also does not recognize UNC paths if they don't + # have more than "\\sever\share". Valid examples: "\\server\share\" or + # "\\server\share\folder". if WINDOWS and not os.path.splitdrive(entry)[0]: entry = entry.replace('\\', '/') self.entries.add(entry) @@ -600,6 +600,13 @@ def add(self, entry): def remove(self): # type: () -> None logger.debug('Removing pth entries from %s:', self.file) + + # If the file doesn't exist, log a warning and return + if not os.path.isfile(self.file): + logger.warning( + "Cannot remove entries from nonexistent file %s", self.file + ) + return with open(self.file, 'rb') as fh: # windows uses '\r\n' with py3k, but uses '\n' with py2.x lines = fh.readlines() diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/__init__.py b/venv/Lib/site-packages/pip/_internal/resolution/__init__.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/__init__.py rename to venv/Lib/site-packages/pip/_internal/resolution/__init__.py diff --git a/venv/Lib/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..81a091ad Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/resolution/__pycache__/base.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/resolution/__pycache__/base.cpython-36.pyc new file mode 100644 index 00000000..7a4483ac Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/resolution/__pycache__/base.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/resolution/base.py b/venv/Lib/site-packages/pip/_internal/resolution/base.py new file mode 100644 index 00000000..1be0cb27 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/resolution/base.py @@ -0,0 +1,16 @@ +from typing import Callable, List + +from pip._internal.req.req_install import InstallRequirement +from pip._internal.req.req_set import RequirementSet + +InstallRequirementProvider = Callable[[str, InstallRequirement], InstallRequirement] + + +class BaseResolver: + def resolve(self, root_reqs, check_supported_wheels): + # type: (List[InstallRequirement], bool) -> RequirementSet + raise NotImplementedError() + + def get_installation_order(self, req_set): + # type: (RequirementSet) -> List[InstallRequirement] + raise NotImplementedError() diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__init__.py b/venv/Lib/site-packages/pip/_internal/resolution/legacy/__init__.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__init__.py rename to venv/Lib/site-packages/pip/_internal/resolution/legacy/__init__.py diff --git a/venv/Lib/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..f5aeaeba Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-36.pyc new file mode 100644 index 00000000..cf6ae87f Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/resolution/legacy/resolver.py b/venv/Lib/site-packages/pip/_internal/resolution/legacy/resolver.py new file mode 100644 index 00000000..17de7f09 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/resolution/legacy/resolver.py @@ -0,0 +1,462 @@ +"""Dependency Resolution + +The dependency resolution in pip is performed as follows: + +for top-level requirements: + a. only one spec allowed per project, regardless of conflicts or not. + otherwise a "double requirement" exception is raised + b. they override sub-dependency requirements. +for sub-dependencies + a. "first found, wins" (where the order is breadth first) +""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +import logging +import sys +from collections import defaultdict +from itertools import chain +from typing import DefaultDict, Iterable, List, Optional, Set, Tuple + +from pip._vendor.packaging import specifiers +from pip._vendor.pkg_resources import Distribution + +from pip._internal.cache import WheelCache +from pip._internal.exceptions import ( + BestVersionAlreadyInstalled, + DistributionNotFound, + HashError, + HashErrors, + UnsupportedPythonVersion, +) +from pip._internal.index.package_finder import PackageFinder +from pip._internal.models.link import Link +from pip._internal.operations.prepare import RequirementPreparer +from pip._internal.req.req_install import ( + InstallRequirement, + check_invalid_constraint_type, +) +from pip._internal.req.req_set import RequirementSet +from pip._internal.resolution.base import BaseResolver, InstallRequirementProvider +from pip._internal.utils.compatibility_tags import get_supported +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import dist_in_usersite, normalize_version_info +from pip._internal.utils.packaging import check_requires_python, get_requires_python + +logger = logging.getLogger(__name__) + +DiscoveredDependencies = DefaultDict[str, List[InstallRequirement]] + + +def _check_dist_requires_python( + dist, # type: Distribution + version_info, # type: Tuple[int, int, int] + ignore_requires_python=False, # type: bool +): + # type: (...) -> None + """ + Check whether the given Python version is compatible with a distribution's + "Requires-Python" value. + + :param version_info: A 3-tuple of ints representing the Python + major-minor-micro version to check. + :param ignore_requires_python: Whether to ignore the "Requires-Python" + value if the given Python version isn't compatible. + + :raises UnsupportedPythonVersion: When the given Python version isn't + compatible. + """ + requires_python = get_requires_python(dist) + try: + is_compatible = check_requires_python( + requires_python, version_info=version_info + ) + except specifiers.InvalidSpecifier as exc: + logger.warning( + "Package %r has an invalid Requires-Python: %s", dist.project_name, exc + ) + return + + if is_compatible: + return + + version = ".".join(map(str, version_info)) + if ignore_requires_python: + logger.debug( + "Ignoring failed Requires-Python check for package %r: " "%s not in %r", + dist.project_name, + version, + requires_python, + ) + return + + raise UnsupportedPythonVersion( + "Package {!r} requires a different Python: {} not in {!r}".format( + dist.project_name, version, requires_python + ) + ) + + +class Resolver(BaseResolver): + """Resolves which packages need to be installed/uninstalled to perform \ + the requested operation without breaking the requirements of any package. + """ + + _allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"} + + def __init__( + self, + preparer, # type: RequirementPreparer + finder, # type: PackageFinder + wheel_cache, # type: Optional[WheelCache] + make_install_req, # type: InstallRequirementProvider + use_user_site, # type: bool + ignore_dependencies, # type: bool + ignore_installed, # type: bool + ignore_requires_python, # type: bool + force_reinstall, # type: bool + upgrade_strategy, # type: str + py_version_info=None, # type: Optional[Tuple[int, ...]] + ): + # type: (...) -> None + super().__init__() + assert upgrade_strategy in self._allowed_strategies + + if py_version_info is None: + py_version_info = sys.version_info[:3] + else: + py_version_info = normalize_version_info(py_version_info) + + self._py_version_info = py_version_info + + self.preparer = preparer + self.finder = finder + self.wheel_cache = wheel_cache + + self.upgrade_strategy = upgrade_strategy + self.force_reinstall = force_reinstall + self.ignore_dependencies = ignore_dependencies + self.ignore_installed = ignore_installed + self.ignore_requires_python = ignore_requires_python + self.use_user_site = use_user_site + self._make_install_req = make_install_req + + self._discovered_dependencies = defaultdict( + list + ) # type: DiscoveredDependencies + + def resolve(self, root_reqs, check_supported_wheels): + # type: (List[InstallRequirement], bool) -> RequirementSet + """Resolve what operations need to be done + + As a side-effect of this method, the packages (and their dependencies) + are downloaded, unpacked and prepared for installation. This + preparation is done by ``pip.operations.prepare``. + + Once PyPI has static dependency metadata available, it would be + possible to move the preparation to become a step separated from + dependency resolution. + """ + requirement_set = RequirementSet(check_supported_wheels=check_supported_wheels) + for req in root_reqs: + if req.constraint: + check_invalid_constraint_type(req) + requirement_set.add_requirement(req) + + # Actually prepare the files, and collect any exceptions. Most hash + # exceptions cannot be checked ahead of time, because + # _populate_link() needs to be called before we can make decisions + # based on link type. + discovered_reqs = [] # type: List[InstallRequirement] + hash_errors = HashErrors() + for req in chain(requirement_set.all_requirements, discovered_reqs): + try: + discovered_reqs.extend(self._resolve_one(requirement_set, req)) + except HashError as exc: + exc.req = req + hash_errors.append(exc) + + if hash_errors: + raise hash_errors + + return requirement_set + + def _is_upgrade_allowed(self, req): + # type: (InstallRequirement) -> bool + if self.upgrade_strategy == "to-satisfy-only": + return False + elif self.upgrade_strategy == "eager": + return True + else: + assert self.upgrade_strategy == "only-if-needed" + return req.user_supplied or req.constraint + + def _set_req_to_reinstall(self, req): + # type: (InstallRequirement) -> None + """ + Set a requirement to be installed. + """ + # Don't uninstall the conflict if doing a user install and the + # conflict is not a user install. + if not self.use_user_site or dist_in_usersite(req.satisfied_by): + req.should_reinstall = True + req.satisfied_by = None + + def _check_skip_installed(self, req_to_install): + # type: (InstallRequirement) -> Optional[str] + """Check if req_to_install should be skipped. + + This will check if the req is installed, and whether we should upgrade + or reinstall it, taking into account all the relevant user options. + + After calling this req_to_install will only have satisfied_by set to + None if the req_to_install is to be upgraded/reinstalled etc. Any + other value will be a dist recording the current thing installed that + satisfies the requirement. + + Note that for vcs urls and the like we can't assess skipping in this + routine - we simply identify that we need to pull the thing down, + then later on it is pulled down and introspected to assess upgrade/ + reinstalls etc. + + :return: A text reason for why it was skipped, or None. + """ + if self.ignore_installed: + return None + + req_to_install.check_if_exists(self.use_user_site) + if not req_to_install.satisfied_by: + return None + + if self.force_reinstall: + self._set_req_to_reinstall(req_to_install) + return None + + if not self._is_upgrade_allowed(req_to_install): + if self.upgrade_strategy == "only-if-needed": + return "already satisfied, skipping upgrade" + return "already satisfied" + + # Check for the possibility of an upgrade. For link-based + # requirements we have to pull the tree down and inspect to assess + # the version #, so it's handled way down. + if not req_to_install.link: + try: + self.finder.find_requirement(req_to_install, upgrade=True) + except BestVersionAlreadyInstalled: + # Then the best version is installed. + return "already up-to-date" + except DistributionNotFound: + # No distribution found, so we squash the error. It will + # be raised later when we re-try later to do the install. + # Why don't we just raise here? + pass + + self._set_req_to_reinstall(req_to_install) + return None + + def _find_requirement_link(self, req): + # type: (InstallRequirement) -> Optional[Link] + upgrade = self._is_upgrade_allowed(req) + best_candidate = self.finder.find_requirement(req, upgrade) + if not best_candidate: + return None + + # Log a warning per PEP 592 if necessary before returning. + link = best_candidate.link + if link.is_yanked: + reason = link.yanked_reason or "" + msg = ( + # Mark this as a unicode string to prevent + # "UnicodeEncodeError: 'ascii' codec can't encode character" + # in Python 2 when the reason contains non-ascii characters. + "The candidate selected for download or install is a " + "yanked version: {candidate}\n" + "Reason for being yanked: {reason}" + ).format(candidate=best_candidate, reason=reason) + logger.warning(msg) + + return link + + def _populate_link(self, req): + # type: (InstallRequirement) -> None + """Ensure that if a link can be found for this, that it is found. + + Note that req.link may still be None - if the requirement is already + installed and not needed to be upgraded based on the return value of + _is_upgrade_allowed(). + + If preparer.require_hashes is True, don't use the wheel cache, because + cached wheels, always built locally, have different hashes than the + files downloaded from the index server and thus throw false hash + mismatches. Furthermore, cached wheels at present have undeterministic + contents due to file modification times. + """ + if req.link is None: + req.link = self._find_requirement_link(req) + + if self.wheel_cache is None or self.preparer.require_hashes: + return + cache_entry = self.wheel_cache.get_cache_entry( + link=req.link, + package_name=req.name, + supported_tags=get_supported(), + ) + if cache_entry is not None: + logger.debug("Using cached wheel link: %s", cache_entry.link) + if req.link is req.original_link and cache_entry.persistent: + req.original_link_is_in_wheel_cache = True + req.link = cache_entry.link + + def _get_dist_for(self, req): + # type: (InstallRequirement) -> Distribution + """Takes a InstallRequirement and returns a single AbstractDist \ + representing a prepared variant of the same. + """ + if req.editable: + return self.preparer.prepare_editable_requirement(req) + + # satisfied_by is only evaluated by calling _check_skip_installed, + # so it must be None here. + assert req.satisfied_by is None + skip_reason = self._check_skip_installed(req) + + if req.satisfied_by: + return self.preparer.prepare_installed_requirement(req, skip_reason) + + # We eagerly populate the link, since that's our "legacy" behavior. + self._populate_link(req) + dist = self.preparer.prepare_linked_requirement(req) + + # NOTE + # The following portion is for determining if a certain package is + # going to be re-installed/upgraded or not and reporting to the user. + # This should probably get cleaned up in a future refactor. + + # req.req is only avail after unpack for URL + # pkgs repeat check_if_exists to uninstall-on-upgrade + # (#14) + if not self.ignore_installed: + req.check_if_exists(self.use_user_site) + + if req.satisfied_by: + should_modify = ( + self.upgrade_strategy != "to-satisfy-only" + or self.force_reinstall + or self.ignore_installed + or req.link.scheme == "file" + ) + if should_modify: + self._set_req_to_reinstall(req) + else: + logger.info( + "Requirement already satisfied (use --upgrade to upgrade):" " %s", + req, + ) + return dist + + def _resolve_one( + self, + requirement_set, # type: RequirementSet + req_to_install, # type: InstallRequirement + ): + # type: (...) -> List[InstallRequirement] + """Prepare a single requirements file. + + :return: A list of additional InstallRequirements to also install. + """ + # Tell user what we are doing for this requirement: + # obtain (editable), skipping, processing (local url), collecting + # (remote url or package name) + if req_to_install.constraint or req_to_install.prepared: + return [] + + req_to_install.prepared = True + + # Parse and return dependencies + dist = self._get_dist_for(req_to_install) + # This will raise UnsupportedPythonVersion if the given Python + # version isn't compatible with the distribution's Requires-Python. + _check_dist_requires_python( + dist, + version_info=self._py_version_info, + ignore_requires_python=self.ignore_requires_python, + ) + + more_reqs = [] # type: List[InstallRequirement] + + def add_req(subreq, extras_requested): + # type: (Distribution, Iterable[str]) -> None + sub_install_req = self._make_install_req( + str(subreq), + req_to_install, + ) + parent_req_name = req_to_install.name + to_scan_again, add_to_parent = requirement_set.add_requirement( + sub_install_req, + parent_req_name=parent_req_name, + extras_requested=extras_requested, + ) + if parent_req_name and add_to_parent: + self._discovered_dependencies[parent_req_name].append(add_to_parent) + more_reqs.extend(to_scan_again) + + with indent_log(): + # We add req_to_install before its dependencies, so that we + # can refer to it when adding dependencies. + if not requirement_set.has_requirement(req_to_install.name): + # 'unnamed' requirements will get added here + # 'unnamed' requirements can only come from being directly + # provided by the user. + assert req_to_install.user_supplied + requirement_set.add_requirement(req_to_install, parent_req_name=None) + + if not self.ignore_dependencies: + if req_to_install.extras: + logger.debug( + "Installing extra requirements: %r", + ",".join(req_to_install.extras), + ) + missing_requested = sorted( + set(req_to_install.extras) - set(dist.extras) + ) + for missing in missing_requested: + logger.warning("%s does not provide the extra '%s'", dist, missing) + + available_requested = sorted( + set(dist.extras) & set(req_to_install.extras) + ) + for subreq in dist.requires(available_requested): + add_req(subreq, extras_requested=available_requested) + + return more_reqs + + def get_installation_order(self, req_set): + # type: (RequirementSet) -> List[InstallRequirement] + """Create the installation order. + + The installation order is topological - requirements are installed + before the requiring thing. We break cycles at an arbitrary point, + and make no other guarantees. + """ + # The current implementation, which we may change at any point + # installs the user specified things in the order given, except when + # dependencies must come earlier to achieve topological order. + order = [] + ordered_reqs = set() # type: Set[InstallRequirement] + + def schedule(req): + # type: (InstallRequirement) -> None + if req.satisfied_by or req in ordered_reqs: + return + if req.constraint: + return + ordered_reqs.add(req) + for dep in self._discovered_dependencies[req.name]: + schedule(dep) + order.append(req) + + for install_req in req_set.requirements.values(): + schedule(install_req) + return order diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/backports/__init__.py b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__init__.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/backports/__init__.py rename to venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__init__.py diff --git a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..b284be85 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-36.pyc new file mode 100644 index 00000000..2000bb8b Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-36.pyc new file mode 100644 index 00000000..fe1f3463 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-36.pyc new file mode 100644 index 00000000..81c402b8 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-36.pyc new file mode 100644 index 00000000..a7bb1b81 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-36.pyc new file mode 100644 index 00000000..a254d01c Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-36.pyc new file mode 100644 index 00000000..92c70331 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-36.pyc new file mode 100644 index 00000000..b4007a91 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-36.pyc new file mode 100644 index 00000000..e6464d63 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/base.py b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/base.py new file mode 100644 index 00000000..26821a1f --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/base.py @@ -0,0 +1,165 @@ +from typing import FrozenSet, Iterable, Optional, Tuple, Union + +from pip._vendor.packaging.specifiers import SpecifierSet +from pip._vendor.packaging.utils import NormalizedName, canonicalize_name +from pip._vendor.packaging.version import LegacyVersion, Version + +from pip._internal.models.link import Link, links_equivalent +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.hashes import Hashes + +CandidateLookup = Tuple[Optional["Candidate"], Optional[InstallRequirement]] +CandidateVersion = Union[LegacyVersion, Version] + + +def format_name(project, extras): + # type: (str, FrozenSet[str]) -> str + if not extras: + return project + canonical_extras = sorted(canonicalize_name(e) for e in extras) + return "{}[{}]".format(project, ",".join(canonical_extras)) + + +class Constraint: + def __init__(self, specifier, hashes, links): + # type: (SpecifierSet, Hashes, FrozenSet[Link]) -> None + self.specifier = specifier + self.hashes = hashes + self.links = links + + @classmethod + def empty(cls): + # type: () -> Constraint + return Constraint(SpecifierSet(), Hashes(), frozenset()) + + @classmethod + def from_ireq(cls, ireq): + # type: (InstallRequirement) -> Constraint + links = frozenset([ireq.link]) if ireq.link else frozenset() + return Constraint(ireq.specifier, ireq.hashes(trust_internet=False), links) + + def __nonzero__(self): + # type: () -> bool + return bool(self.specifier) or bool(self.hashes) or bool(self.links) + + def __bool__(self): + # type: () -> bool + return self.__nonzero__() + + def __and__(self, other): + # type: (InstallRequirement) -> Constraint + if not isinstance(other, InstallRequirement): + return NotImplemented + specifier = self.specifier & other.specifier + hashes = self.hashes & other.hashes(trust_internet=False) + links = self.links + if other.link: + links = links.union([other.link]) + return Constraint(specifier, hashes, links) + + def is_satisfied_by(self, candidate): + # type: (Candidate) -> bool + # Reject if there are any mismatched URL constraints on this package. + if self.links and not all(_match_link(link, candidate) for link in self.links): + return False + # We can safely always allow prereleases here since PackageFinder + # already implements the prerelease logic, and would have filtered out + # prerelease candidates if the user does not expect them. + return self.specifier.contains(candidate.version, prereleases=True) + + +class Requirement: + @property + def project_name(self): + # type: () -> NormalizedName + """The "project name" of a requirement. + + This is different from ``name`` if this requirement contains extras, + in which case ``name`` would contain the ``[...]`` part, while this + refers to the name of the project. + """ + raise NotImplementedError("Subclass should override") + + @property + def name(self): + # type: () -> str + """The name identifying this requirement in the resolver. + + This is different from ``project_name`` if this requirement contains + extras, where ``project_name`` would not contain the ``[...]`` part. + """ + raise NotImplementedError("Subclass should override") + + def is_satisfied_by(self, candidate): + # type: (Candidate) -> bool + return False + + def get_candidate_lookup(self): + # type: () -> CandidateLookup + raise NotImplementedError("Subclass should override") + + def format_for_error(self): + # type: () -> str + raise NotImplementedError("Subclass should override") + + +def _match_link(link, candidate): + # type: (Link, Candidate) -> bool + if candidate.source_link: + return links_equivalent(link, candidate.source_link) + return False + + +class Candidate: + @property + def project_name(self): + # type: () -> NormalizedName + """The "project name" of the candidate. + + This is different from ``name`` if this candidate contains extras, + in which case ``name`` would contain the ``[...]`` part, while this + refers to the name of the project. + """ + raise NotImplementedError("Override in subclass") + + @property + def name(self): + # type: () -> str + """The name identifying this candidate in the resolver. + + This is different from ``project_name`` if this candidate contains + extras, where ``project_name`` would not contain the ``[...]`` part. + """ + raise NotImplementedError("Override in subclass") + + @property + def version(self): + # type: () -> CandidateVersion + raise NotImplementedError("Override in subclass") + + @property + def is_installed(self): + # type: () -> bool + raise NotImplementedError("Override in subclass") + + @property + def is_editable(self): + # type: () -> bool + raise NotImplementedError("Override in subclass") + + @property + def source_link(self): + # type: () -> Optional[Link] + raise NotImplementedError("Override in subclass") + + def iter_dependencies(self, with_requires): + # type: (bool) -> Iterable[Optional[Requirement]] + raise NotImplementedError("Override in subclass") + + def get_install_requirement(self): + # type: () -> Optional[InstallRequirement] + raise NotImplementedError("Override in subclass") + + def format_for_error(self): + # type: () -> str + raise NotImplementedError("Subclass should override") diff --git a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/candidates.py b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/candidates.py new file mode 100644 index 00000000..da516ad3 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/candidates.py @@ -0,0 +1,604 @@ +import logging +import sys +from typing import TYPE_CHECKING, Any, FrozenSet, Iterable, Optional, Tuple, Union, cast + +from pip._vendor.packaging.specifiers import InvalidSpecifier, SpecifierSet +from pip._vendor.packaging.utils import NormalizedName, canonicalize_name +from pip._vendor.packaging.version import Version +from pip._vendor.packaging.version import parse as parse_version +from pip._vendor.pkg_resources import Distribution + +from pip._internal.exceptions import HashError, MetadataInconsistent +from pip._internal.models.link import Link, links_equivalent +from pip._internal.models.wheel import Wheel +from pip._internal.req.constructors import ( + install_req_from_editable, + install_req_from_line, +) +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.misc import dist_is_editable, normalize_version_info +from pip._internal.utils.packaging import get_requires_python + +from .base import Candidate, CandidateVersion, Requirement, format_name + +if TYPE_CHECKING: + from .factory import Factory + +logger = logging.getLogger(__name__) + +BaseCandidate = Union[ + "AlreadyInstalledCandidate", + "EditableCandidate", + "LinkCandidate", +] + + +def as_base_candidate(candidate: Candidate) -> Optional[BaseCandidate]: + """The runtime version of BaseCandidate.""" + base_candidate_classes = ( + AlreadyInstalledCandidate, + EditableCandidate, + LinkCandidate, + ) + if isinstance(candidate, base_candidate_classes): + return candidate + return None + + +def make_install_req_from_link(link, template): + # type: (Link, InstallRequirement) -> InstallRequirement + assert not template.editable, "template is editable" + if template.req: + line = str(template.req) + else: + line = link.url + ireq = install_req_from_line( + line, + user_supplied=template.user_supplied, + comes_from=template.comes_from, + use_pep517=template.use_pep517, + isolated=template.isolated, + constraint=template.constraint, + options=dict( + install_options=template.install_options, + global_options=template.global_options, + hashes=template.hash_options, + ), + ) + ireq.original_link = template.original_link + ireq.link = link + return ireq + + +def make_install_req_from_editable(link, template): + # type: (Link, InstallRequirement) -> InstallRequirement + assert template.editable, "template not editable" + return install_req_from_editable( + link.url, + user_supplied=template.user_supplied, + comes_from=template.comes_from, + use_pep517=template.use_pep517, + isolated=template.isolated, + constraint=template.constraint, + options=dict( + install_options=template.install_options, + global_options=template.global_options, + hashes=template.hash_options, + ), + ) + + +def make_install_req_from_dist(dist, template): + # type: (Distribution, InstallRequirement) -> InstallRequirement + project_name = canonicalize_name(dist.project_name) + if template.req: + line = str(template.req) + elif template.link: + line = f"{project_name} @ {template.link.url}" + else: + line = f"{project_name}=={dist.parsed_version}" + ireq = install_req_from_line( + line, + user_supplied=template.user_supplied, + comes_from=template.comes_from, + use_pep517=template.use_pep517, + isolated=template.isolated, + constraint=template.constraint, + options=dict( + install_options=template.install_options, + global_options=template.global_options, + hashes=template.hash_options, + ), + ) + ireq.satisfied_by = dist + return ireq + + +class _InstallRequirementBackedCandidate(Candidate): + """A candidate backed by an ``InstallRequirement``. + + This represents a package request with the target not being already + in the environment, and needs to be fetched and installed. The backing + ``InstallRequirement`` is responsible for most of the leg work; this + class exposes appropriate information to the resolver. + + :param link: The link passed to the ``InstallRequirement``. The backing + ``InstallRequirement`` will use this link to fetch the distribution. + :param source_link: The link this candidate "originates" from. This is + different from ``link`` when the link is found in the wheel cache. + ``link`` would point to the wheel cache, while this points to the + found remote link (e.g. from pypi.org). + """ + + is_installed = False + + def __init__( + self, + link, # type: Link + source_link, # type: Link + ireq, # type: InstallRequirement + factory, # type: Factory + name=None, # type: Optional[NormalizedName] + version=None, # type: Optional[CandidateVersion] + ): + # type: (...) -> None + self._link = link + self._source_link = source_link + self._factory = factory + self._ireq = ireq + self._name = name + self._version = version + self.dist = self._prepare() + + def __str__(self): + # type: () -> str + return f"{self.name} {self.version}" + + def __repr__(self): + # type: () -> str + return "{class_name}({link!r})".format( + class_name=self.__class__.__name__, + link=str(self._link), + ) + + def __hash__(self): + # type: () -> int + return hash((self.__class__, self._link)) + + def __eq__(self, other): + # type: (Any) -> bool + if isinstance(other, self.__class__): + return links_equivalent(self._link, other._link) + return False + + @property + def source_link(self): + # type: () -> Optional[Link] + return self._source_link + + @property + def project_name(self): + # type: () -> NormalizedName + """The normalised name of the project the candidate refers to""" + if self._name is None: + self._name = canonicalize_name(self.dist.project_name) + return self._name + + @property + def name(self): + # type: () -> str + return self.project_name + + @property + def version(self): + # type: () -> CandidateVersion + if self._version is None: + self._version = parse_version(self.dist.version) + return self._version + + def format_for_error(self): + # type: () -> str + return "{} {} (from {})".format( + self.name, + self.version, + self._link.file_path if self._link.is_file else self._link, + ) + + def _prepare_distribution(self): + # type: () -> Distribution + raise NotImplementedError("Override in subclass") + + def _check_metadata_consistency(self, dist): + # type: (Distribution) -> None + """Check for consistency of project name and version of dist.""" + canonical_name = canonicalize_name(dist.project_name) + if self._name is not None and self._name != canonical_name: + raise MetadataInconsistent( + self._ireq, + "name", + self._name, + dist.project_name, + ) + parsed_version = parse_version(dist.version) + if self._version is not None and self._version != parsed_version: + raise MetadataInconsistent( + self._ireq, + "version", + str(self._version), + dist.version, + ) + + def _prepare(self): + # type: () -> Distribution + try: + dist = self._prepare_distribution() + except HashError as e: + # Provide HashError the underlying ireq that caused it. This + # provides context for the resulting error message to show the + # offending line to the user. + e.req = self._ireq + raise + self._check_metadata_consistency(dist) + return dist + + def _get_requires_python_dependency(self): + # type: () -> Optional[Requirement] + requires_python = get_requires_python(self.dist) + if requires_python is None: + return None + try: + spec = SpecifierSet(requires_python) + except InvalidSpecifier as e: + message = "Package %r has an invalid Requires-Python: %s" + logger.warning(message, self.name, e) + return None + return self._factory.make_requires_python_requirement(spec) + + def iter_dependencies(self, with_requires): + # type: (bool) -> Iterable[Optional[Requirement]] + requires = self.dist.requires() if with_requires else () + for r in requires: + yield self._factory.make_requirement_from_spec(str(r), self._ireq) + yield self._get_requires_python_dependency() + + def get_install_requirement(self): + # type: () -> Optional[InstallRequirement] + return self._ireq + + +class LinkCandidate(_InstallRequirementBackedCandidate): + is_editable = False + + def __init__( + self, + link, # type: Link + template, # type: InstallRequirement + factory, # type: Factory + name=None, # type: Optional[NormalizedName] + version=None, # type: Optional[CandidateVersion] + ): + # type: (...) -> None + source_link = link + cache_entry = factory.get_wheel_cache_entry(link, name) + if cache_entry is not None: + logger.debug("Using cached wheel link: %s", cache_entry.link) + link = cache_entry.link + ireq = make_install_req_from_link(link, template) + assert ireq.link == link + if ireq.link.is_wheel and not ireq.link.is_file: + wheel = Wheel(ireq.link.filename) + wheel_name = canonicalize_name(wheel.name) + assert name == wheel_name, f"{name!r} != {wheel_name!r} for wheel" + # Version may not be present for PEP 508 direct URLs + if version is not None: + wheel_version = Version(wheel.version) + assert version == wheel_version, "{!r} != {!r} for wheel {}".format( + version, wheel_version, name + ) + + if ( + cache_entry is not None + and cache_entry.persistent + and template.link is template.original_link + ): + ireq.original_link_is_in_wheel_cache = True + + super().__init__( + link=link, + source_link=source_link, + ireq=ireq, + factory=factory, + name=name, + version=version, + ) + + def _prepare_distribution(self): + # type: () -> Distribution + return self._factory.preparer.prepare_linked_requirement( + self._ireq, parallel_builds=True + ) + + +class EditableCandidate(_InstallRequirementBackedCandidate): + is_editable = True + + def __init__( + self, + link, # type: Link + template, # type: InstallRequirement + factory, # type: Factory + name=None, # type: Optional[NormalizedName] + version=None, # type: Optional[CandidateVersion] + ): + # type: (...) -> None + super().__init__( + link=link, + source_link=link, + ireq=make_install_req_from_editable(link, template), + factory=factory, + name=name, + version=version, + ) + + def _prepare_distribution(self): + # type: () -> Distribution + return self._factory.preparer.prepare_editable_requirement(self._ireq) + + +class AlreadyInstalledCandidate(Candidate): + is_installed = True + source_link = None + + def __init__( + self, + dist, # type: Distribution + template, # type: InstallRequirement + factory, # type: Factory + ): + # type: (...) -> None + self.dist = dist + self._ireq = make_install_req_from_dist(dist, template) + self._factory = factory + + # This is just logging some messages, so we can do it eagerly. + # The returned dist would be exactly the same as self.dist because we + # set satisfied_by in make_install_req_from_dist. + # TODO: Supply reason based on force_reinstall and upgrade_strategy. + skip_reason = "already satisfied" + factory.preparer.prepare_installed_requirement(self._ireq, skip_reason) + + def __str__(self): + # type: () -> str + return str(self.dist) + + def __repr__(self): + # type: () -> str + return "{class_name}({distribution!r})".format( + class_name=self.__class__.__name__, + distribution=self.dist, + ) + + def __hash__(self): + # type: () -> int + return hash((self.__class__, self.name, self.version)) + + def __eq__(self, other): + # type: (Any) -> bool + if isinstance(other, self.__class__): + return self.name == other.name and self.version == other.version + return False + + @property + def project_name(self): + # type: () -> NormalizedName + return canonicalize_name(self.dist.project_name) + + @property + def name(self): + # type: () -> str + return self.project_name + + @property + def version(self): + # type: () -> CandidateVersion + return parse_version(self.dist.version) + + @property + def is_editable(self): + # type: () -> bool + return dist_is_editable(self.dist) + + def format_for_error(self): + # type: () -> str + return f"{self.name} {self.version} (Installed)" + + def iter_dependencies(self, with_requires): + # type: (bool) -> Iterable[Optional[Requirement]] + if not with_requires: + return + for r in self.dist.requires(): + yield self._factory.make_requirement_from_spec(str(r), self._ireq) + + def get_install_requirement(self): + # type: () -> Optional[InstallRequirement] + return None + + +class ExtrasCandidate(Candidate): + """A candidate that has 'extras', indicating additional dependencies. + + Requirements can be for a project with dependencies, something like + foo[extra]. The extras don't affect the project/version being installed + directly, but indicate that we need additional dependencies. We model that + by having an artificial ExtrasCandidate that wraps the "base" candidate. + + The ExtrasCandidate differs from the base in the following ways: + + 1. It has a unique name, of the form foo[extra]. This causes the resolver + to treat it as a separate node in the dependency graph. + 2. When we're getting the candidate's dependencies, + a) We specify that we want the extra dependencies as well. + b) We add a dependency on the base candidate. + See below for why this is needed. + 3. We return None for the underlying InstallRequirement, as the base + candidate will provide it, and we don't want to end up with duplicates. + + The dependency on the base candidate is needed so that the resolver can't + decide that it should recommend foo[extra1] version 1.0 and foo[extra2] + version 2.0. Having those candidates depend on foo=1.0 and foo=2.0 + respectively forces the resolver to recognise that this is a conflict. + """ + + def __init__( + self, + base, # type: BaseCandidate + extras, # type: FrozenSet[str] + ): + # type: (...) -> None + self.base = base + self.extras = extras + + def __str__(self): + # type: () -> str + name, rest = str(self.base).split(" ", 1) + return "{}[{}] {}".format(name, ",".join(self.extras), rest) + + def __repr__(self): + # type: () -> str + return "{class_name}(base={base!r}, extras={extras!r})".format( + class_name=self.__class__.__name__, + base=self.base, + extras=self.extras, + ) + + def __hash__(self): + # type: () -> int + return hash((self.base, self.extras)) + + def __eq__(self, other): + # type: (Any) -> bool + if isinstance(other, self.__class__): + return self.base == other.base and self.extras == other.extras + return False + + @property + def project_name(self): + # type: () -> NormalizedName + return self.base.project_name + + @property + def name(self): + # type: () -> str + """The normalised name of the project the candidate refers to""" + return format_name(self.base.project_name, self.extras) + + @property + def version(self): + # type: () -> CandidateVersion + return self.base.version + + def format_for_error(self): + # type: () -> str + return "{} [{}]".format( + self.base.format_for_error(), ", ".join(sorted(self.extras)) + ) + + @property + def is_installed(self): + # type: () -> bool + return self.base.is_installed + + @property + def is_editable(self): + # type: () -> bool + return self.base.is_editable + + @property + def source_link(self): + # type: () -> Optional[Link] + return self.base.source_link + + def iter_dependencies(self, with_requires): + # type: (bool) -> Iterable[Optional[Requirement]] + factory = self.base._factory + + # Add a dependency on the exact base + # (See note 2b in the class docstring) + yield factory.make_requirement_from_candidate(self.base) + if not with_requires: + return + + # The user may have specified extras that the candidate doesn't + # support. We ignore any unsupported extras here. + valid_extras = self.extras.intersection(self.base.dist.extras) + invalid_extras = self.extras.difference(self.base.dist.extras) + for extra in sorted(invalid_extras): + logger.warning( + "%s %s does not provide the extra '%s'", + self.base.name, + self.version, + extra, + ) + + for r in self.base.dist.requires(valid_extras): + requirement = factory.make_requirement_from_spec( + str(r), self.base._ireq, valid_extras + ) + if requirement: + yield requirement + + def get_install_requirement(self): + # type: () -> Optional[InstallRequirement] + # We don't return anything here, because we always + # depend on the base candidate, and we'll get the + # install requirement from that. + return None + + +class RequiresPythonCandidate(Candidate): + is_installed = False + source_link = None + + def __init__(self, py_version_info): + # type: (Optional[Tuple[int, ...]]) -> None + if py_version_info is not None: + version_info = normalize_version_info(py_version_info) + else: + version_info = sys.version_info[:3] + self._version = Version(".".join(str(c) for c in version_info)) + + # We don't need to implement __eq__() and __ne__() since there is always + # only one RequiresPythonCandidate in a resolution, i.e. the host Python. + # The built-in object.__eq__() and object.__ne__() do exactly what we want. + + def __str__(self): + # type: () -> str + return f"Python {self._version}" + + @property + def project_name(self): + # type: () -> NormalizedName + # Avoid conflicting with the PyPI package "Python". + return cast(NormalizedName, "") + + @property + def name(self): + # type: () -> str + return self.project_name + + @property + def version(self): + # type: () -> CandidateVersion + return self._version + + def format_for_error(self): + # type: () -> str + return f"Python {self.version}" + + def iter_dependencies(self, with_requires): + # type: (bool) -> Iterable[Optional[Requirement]] + return () + + def get_install_requirement(self): + # type: () -> Optional[InstallRequirement] + return None diff --git a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/factory.py b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/factory.py new file mode 100644 index 00000000..5816a0ed --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/factory.py @@ -0,0 +1,661 @@ +import contextlib +import functools +import logging +from typing import ( + TYPE_CHECKING, + Dict, + FrozenSet, + Iterable, + Iterator, + List, + Mapping, + Optional, + Sequence, + Set, + Tuple, + TypeVar, + cast, +) + +from pip._vendor.packaging.requirements import InvalidRequirement +from pip._vendor.packaging.requirements import Requirement as PackagingRequirement +from pip._vendor.packaging.specifiers import SpecifierSet +from pip._vendor.packaging.utils import NormalizedName, canonicalize_name +from pip._vendor.pkg_resources import Distribution +from pip._vendor.resolvelib import ResolutionImpossible + +from pip._internal.cache import CacheEntry, WheelCache +from pip._internal.exceptions import ( + DistributionNotFound, + InstallationError, + InstallationSubprocessError, + MetadataInconsistent, + UnsupportedPythonVersion, + UnsupportedWheel, +) +from pip._internal.index.package_finder import PackageFinder +from pip._internal.models.link import Link +from pip._internal.models.wheel import Wheel +from pip._internal.operations.prepare import RequirementPreparer +from pip._internal.req.constructors import install_req_from_link_and_ireq +from pip._internal.req.req_install import InstallRequirement +from pip._internal.resolution.base import InstallRequirementProvider +from pip._internal.utils.compatibility_tags import get_supported +from pip._internal.utils.hashes import Hashes +from pip._internal.utils.misc import ( + dist_in_site_packages, + dist_in_usersite, + get_installed_distributions, +) +from pip._internal.utils.virtualenv import running_under_virtualenv + +from .base import Candidate, CandidateVersion, Constraint, Requirement +from .candidates import ( + AlreadyInstalledCandidate, + BaseCandidate, + EditableCandidate, + ExtrasCandidate, + LinkCandidate, + RequiresPythonCandidate, + as_base_candidate, +) +from .found_candidates import FoundCandidates, IndexCandidateInfo +from .requirements import ( + ExplicitRequirement, + RequiresPythonRequirement, + SpecifierRequirement, + UnsatisfiableRequirement, +) + +if TYPE_CHECKING: + from typing import Protocol + + class ConflictCause(Protocol): + requirement: RequiresPythonRequirement + parent: Candidate + + +logger = logging.getLogger(__name__) + +C = TypeVar("C") +Cache = Dict[Link, C] + + +class Factory: + def __init__( + self, + finder, # type: PackageFinder + preparer, # type: RequirementPreparer + make_install_req, # type: InstallRequirementProvider + wheel_cache, # type: Optional[WheelCache] + use_user_site, # type: bool + force_reinstall, # type: bool + ignore_installed, # type: bool + ignore_requires_python, # type: bool + py_version_info=None, # type: Optional[Tuple[int, ...]] + ): + # type: (...) -> None + self._finder = finder + self.preparer = preparer + self._wheel_cache = wheel_cache + self._python_candidate = RequiresPythonCandidate(py_version_info) + self._make_install_req_from_spec = make_install_req + self._use_user_site = use_user_site + self._force_reinstall = force_reinstall + self._ignore_requires_python = ignore_requires_python + + self._build_failures = {} # type: Cache[InstallationError] + self._link_candidate_cache = {} # type: Cache[LinkCandidate] + self._editable_candidate_cache = {} # type: Cache[EditableCandidate] + self._installed_candidate_cache = ( + {} + ) # type: Dict[str, AlreadyInstalledCandidate] + self._extras_candidate_cache = ( + {} + ) # type: Dict[Tuple[int, FrozenSet[str]], ExtrasCandidate] + + if not ignore_installed: + self._installed_dists = { + canonicalize_name(dist.project_name): dist + for dist in get_installed_distributions(local_only=False) + } + else: + self._installed_dists = {} + + @property + def force_reinstall(self): + # type: () -> bool + return self._force_reinstall + + def _fail_if_link_is_unsupported_wheel(self, link: Link) -> None: + if not link.is_wheel: + return + wheel = Wheel(link.filename) + if wheel.supported(self._finder.target_python.get_tags()): + return + msg = f"{link.filename} is not a supported wheel on this platform." + raise UnsupportedWheel(msg) + + def _make_extras_candidate(self, base, extras): + # type: (BaseCandidate, FrozenSet[str]) -> ExtrasCandidate + cache_key = (id(base), extras) + try: + candidate = self._extras_candidate_cache[cache_key] + except KeyError: + candidate = ExtrasCandidate(base, extras) + self._extras_candidate_cache[cache_key] = candidate + return candidate + + def _make_candidate_from_dist( + self, + dist, # type: Distribution + extras, # type: FrozenSet[str] + template, # type: InstallRequirement + ): + # type: (...) -> Candidate + try: + base = self._installed_candidate_cache[dist.key] + except KeyError: + base = AlreadyInstalledCandidate(dist, template, factory=self) + self._installed_candidate_cache[dist.key] = base + if not extras: + return base + return self._make_extras_candidate(base, extras) + + def _make_candidate_from_link( + self, + link, # type: Link + extras, # type: FrozenSet[str] + template, # type: InstallRequirement + name, # type: Optional[NormalizedName] + version, # type: Optional[CandidateVersion] + ): + # type: (...) -> Optional[Candidate] + # TODO: Check already installed candidate, and use it if the link and + # editable flag match. + + if link in self._build_failures: + # We already tried this candidate before, and it does not build. + # Don't bother trying again. + return None + + if template.editable: + if link not in self._editable_candidate_cache: + try: + self._editable_candidate_cache[link] = EditableCandidate( + link, + template, + factory=self, + name=name, + version=version, + ) + except (InstallationSubprocessError, MetadataInconsistent) as e: + logger.warning("Discarding %s. %s", link, e) + self._build_failures[link] = e + return None + base = self._editable_candidate_cache[link] # type: BaseCandidate + else: + if link not in self._link_candidate_cache: + try: + self._link_candidate_cache[link] = LinkCandidate( + link, + template, + factory=self, + name=name, + version=version, + ) + except (InstallationSubprocessError, MetadataInconsistent) as e: + logger.warning("Discarding %s. %s", link, e) + self._build_failures[link] = e + return None + base = self._link_candidate_cache[link] + + if not extras: + return base + return self._make_extras_candidate(base, extras) + + def _iter_found_candidates( + self, + ireqs: Sequence[InstallRequirement], + specifier: SpecifierSet, + hashes: Hashes, + prefers_installed: bool, + incompatible_ids: Set[int], + ) -> Iterable[Candidate]: + if not ireqs: + return () + + # The InstallRequirement implementation requires us to give it a + # "template". Here we just choose the first requirement to represent + # all of them. + # Hopefully the Project model can correct this mismatch in the future. + template = ireqs[0] + assert template.req, "Candidates found on index must be PEP 508" + name = canonicalize_name(template.req.name) + + extras = frozenset() # type: FrozenSet[str] + for ireq in ireqs: + assert ireq.req, "Candidates found on index must be PEP 508" + specifier &= ireq.req.specifier + hashes &= ireq.hashes(trust_internet=False) + extras |= frozenset(ireq.extras) + + def _get_installed_candidate() -> Optional[Candidate]: + """Get the candidate for the currently-installed version.""" + # If --force-reinstall is set, we want the version from the index + # instead, so we "pretend" there is nothing installed. + if self._force_reinstall: + return None + try: + installed_dist = self._installed_dists[name] + except KeyError: + return None + # Don't use the installed distribution if its version does not fit + # the current dependency graph. + if not specifier.contains(installed_dist.version, prereleases=True): + return None + candidate = self._make_candidate_from_dist( + dist=installed_dist, + extras=extras, + template=template, + ) + # The candidate is a known incompatiblity. Don't use it. + if id(candidate) in incompatible_ids: + return None + return candidate + + def iter_index_candidate_infos(): + # type: () -> Iterator[IndexCandidateInfo] + result = self._finder.find_best_candidate( + project_name=name, + specifier=specifier, + hashes=hashes, + ) + icans = list(result.iter_applicable()) + + # PEP 592: Yanked releases must be ignored unless only yanked + # releases can satisfy the version range. So if this is false, + # all yanked icans need to be skipped. + all_yanked = all(ican.link.is_yanked for ican in icans) + + # PackageFinder returns earlier versions first, so we reverse. + for ican in reversed(icans): + if not all_yanked and ican.link.is_yanked: + continue + func = functools.partial( + self._make_candidate_from_link, + link=ican.link, + extras=extras, + template=template, + name=name, + version=ican.version, + ) + yield ican.version, func + + return FoundCandidates( + iter_index_candidate_infos, + _get_installed_candidate(), + prefers_installed, + incompatible_ids, + ) + + def _iter_explicit_candidates_from_base( + self, + base_requirements: Iterable[Requirement], + extras: FrozenSet[str], + ) -> Iterator[Candidate]: + """Produce explicit candidates from the base given an extra-ed package. + + :param base_requirements: Requirements known to the resolver. The + requirements are guaranteed to not have extras. + :param extras: The extras to inject into the explicit requirements' + candidates. + """ + for req in base_requirements: + lookup_cand, _ = req.get_candidate_lookup() + if lookup_cand is None: # Not explicit. + continue + # We've stripped extras from the identifier, and should always + # get a BaseCandidate here, unless there's a bug elsewhere. + base_cand = as_base_candidate(lookup_cand) + assert base_cand is not None, "no extras here" + yield self._make_extras_candidate(base_cand, extras) + + def _iter_candidates_from_constraints( + self, + identifier: str, + constraint: Constraint, + template: InstallRequirement, + ) -> Iterator[Candidate]: + """Produce explicit candidates from constraints. + + This creates "fake" InstallRequirement objects that are basically clones + of what "should" be the template, but with original_link set to link. + """ + for link in constraint.links: + self._fail_if_link_is_unsupported_wheel(link) + candidate = self._make_candidate_from_link( + link, + extras=frozenset(), + template=install_req_from_link_and_ireq(link, template), + name=canonicalize_name(identifier), + version=None, + ) + if candidate: + yield candidate + + def find_candidates( + self, + identifier: str, + requirements: Mapping[str, Iterator[Requirement]], + incompatibilities: Mapping[str, Iterator[Candidate]], + constraint: Constraint, + prefers_installed: bool, + ) -> Iterable[Candidate]: + # Collect basic lookup information from the requirements. + explicit_candidates = set() # type: Set[Candidate] + ireqs = [] # type: List[InstallRequirement] + for req in requirements[identifier]: + cand, ireq = req.get_candidate_lookup() + if cand is not None: + explicit_candidates.add(cand) + if ireq is not None: + ireqs.append(ireq) + + # If the current identifier contains extras, add explicit candidates + # from entries from extra-less identifier. + with contextlib.suppress(InvalidRequirement): + parsed_requirement = PackagingRequirement(identifier) + explicit_candidates.update( + self._iter_explicit_candidates_from_base( + requirements.get(parsed_requirement.name, ()), + frozenset(parsed_requirement.extras), + ), + ) + + # Add explicit candidates from constraints. We only do this if there are + # kown ireqs, which represent requirements not already explicit. If + # there are no ireqs, we're constraining already-explicit requirements, + # which is handled later when we return the explicit candidates. + if ireqs: + try: + explicit_candidates.update( + self._iter_candidates_from_constraints( + identifier, + constraint, + template=ireqs[0], + ), + ) + except UnsupportedWheel: + # If we're constrained to install a wheel incompatible with the + # target architecture, no candidates will ever be valid. + return () + + # Since we cache all the candidates, incompatibility identification + # can be made quicker by comparing only the id() values. + incompat_ids = {id(c) for c in incompatibilities.get(identifier, ())} + + # If none of the requirements want an explicit candidate, we can ask + # the finder for candidates. + if not explicit_candidates: + return self._iter_found_candidates( + ireqs, + constraint.specifier, + constraint.hashes, + prefers_installed, + incompat_ids, + ) + + return ( + c + for c in explicit_candidates + if id(c) not in incompat_ids + and constraint.is_satisfied_by(c) + and all(req.is_satisfied_by(c) for req in requirements[identifier]) + ) + + def make_requirement_from_install_req(self, ireq, requested_extras): + # type: (InstallRequirement, Iterable[str]) -> Optional[Requirement] + if not ireq.match_markers(requested_extras): + logger.info( + "Ignoring %s: markers '%s' don't match your environment", + ireq.name, + ireq.markers, + ) + return None + if not ireq.link: + return SpecifierRequirement(ireq) + self._fail_if_link_is_unsupported_wheel(ireq.link) + cand = self._make_candidate_from_link( + ireq.link, + extras=frozenset(ireq.extras), + template=ireq, + name=canonicalize_name(ireq.name) if ireq.name else None, + version=None, + ) + if cand is None: + # There's no way we can satisfy a URL requirement if the underlying + # candidate fails to build. An unnamed URL must be user-supplied, so + # we fail eagerly. If the URL is named, an unsatisfiable requirement + # can make the resolver do the right thing, either backtrack (and + # maybe find some other requirement that's buildable) or raise a + # ResolutionImpossible eventually. + if not ireq.name: + raise self._build_failures[ireq.link] + return UnsatisfiableRequirement(canonicalize_name(ireq.name)) + return self.make_requirement_from_candidate(cand) + + def make_requirement_from_candidate(self, candidate): + # type: (Candidate) -> ExplicitRequirement + return ExplicitRequirement(candidate) + + def make_requirement_from_spec( + self, + specifier, # type: str + comes_from, # type: InstallRequirement + requested_extras=(), # type: Iterable[str] + ): + # type: (...) -> Optional[Requirement] + ireq = self._make_install_req_from_spec(specifier, comes_from) + return self.make_requirement_from_install_req(ireq, requested_extras) + + def make_requires_python_requirement(self, specifier): + # type: (Optional[SpecifierSet]) -> Optional[Requirement] + if self._ignore_requires_python or specifier is None: + return None + return RequiresPythonRequirement(specifier, self._python_candidate) + + def get_wheel_cache_entry(self, link, name): + # type: (Link, Optional[str]) -> Optional[CacheEntry] + """Look up the link in the wheel cache. + + If ``preparer.require_hashes`` is True, don't use the wheel cache, + because cached wheels, always built locally, have different hashes + than the files downloaded from the index server and thus throw false + hash mismatches. Furthermore, cached wheels at present have + nondeterministic contents due to file modification times. + """ + if self._wheel_cache is None or self.preparer.require_hashes: + return None + return self._wheel_cache.get_cache_entry( + link=link, + package_name=name, + supported_tags=get_supported(), + ) + + def get_dist_to_uninstall(self, candidate): + # type: (Candidate) -> Optional[Distribution] + # TODO: Are there more cases this needs to return True? Editable? + dist = self._installed_dists.get(candidate.project_name) + if dist is None: # Not installed, no uninstallation required. + return None + + # We're installing into global site. The current installation must + # be uninstalled, no matter it's in global or user site, because the + # user site installation has precedence over global. + if not self._use_user_site: + return dist + + # We're installing into user site. Remove the user site installation. + if dist_in_usersite(dist): + return dist + + # We're installing into user site, but the installed incompatible + # package is in global site. We can't uninstall that, and would let + # the new user installation to "shadow" it. But shadowing won't work + # in virtual environments, so we error out. + if running_under_virtualenv() and dist_in_site_packages(dist): + raise InstallationError( + "Will not install to the user site because it will " + "lack sys.path precedence to {} in {}".format( + dist.project_name, + dist.location, + ) + ) + return None + + def _report_requires_python_error(self, causes): + # type: (Sequence[ConflictCause]) -> UnsupportedPythonVersion + assert causes, "Requires-Python error reported with no cause" + + version = self._python_candidate.version + + if len(causes) == 1: + specifier = str(causes[0].requirement.specifier) + message = ( + f"Package {causes[0].parent.name!r} requires a different " + f"Python: {version} not in {specifier!r}" + ) + return UnsupportedPythonVersion(message) + + message = f"Packages require a different Python. {version} not in:" + for cause in causes: + package = cause.parent.format_for_error() + specifier = str(cause.requirement.specifier) + message += f"\n{specifier!r} (required by {package})" + return UnsupportedPythonVersion(message) + + def _report_single_requirement_conflict(self, req, parent): + # type: (Requirement, Optional[Candidate]) -> DistributionNotFound + if parent is None: + req_disp = str(req) + else: + req_disp = f"{req} (from {parent.name})" + + cands = self._finder.find_all_candidates(req.project_name) + versions = [str(v) for v in sorted({c.version for c in cands})] + + logger.critical( + "Could not find a version that satisfies the requirement %s " + "(from versions: %s)", + req_disp, + ", ".join(versions) or "none", + ) + + return DistributionNotFound(f"No matching distribution found for {req}") + + def get_installation_error( + self, + e, # type: ResolutionImpossible[Requirement, Candidate] + constraints, # type: Dict[str, Constraint] + ): + # type: (...) -> InstallationError + + assert e.causes, "Installation error reported with no cause" + + # If one of the things we can't solve is "we need Python X.Y", + # that is what we report. + requires_python_causes = [ + cause + for cause in e.causes + if isinstance(cause.requirement, RequiresPythonRequirement) + and not cause.requirement.is_satisfied_by(self._python_candidate) + ] + if requires_python_causes: + # The comprehension above makes sure all Requirement instances are + # RequiresPythonRequirement, so let's cast for convinience. + return self._report_requires_python_error( + cast("Sequence[ConflictCause]", requires_python_causes), + ) + + # Otherwise, we have a set of causes which can't all be satisfied + # at once. + + # The simplest case is when we have *one* cause that can't be + # satisfied. We just report that case. + if len(e.causes) == 1: + req, parent = e.causes[0] + if req.name not in constraints: + return self._report_single_requirement_conflict(req, parent) + + # OK, we now have a list of requirements that can't all be + # satisfied at once. + + # A couple of formatting helpers + def text_join(parts): + # type: (List[str]) -> str + if len(parts) == 1: + return parts[0] + + return ", ".join(parts[:-1]) + " and " + parts[-1] + + def describe_trigger(parent): + # type: (Candidate) -> str + ireq = parent.get_install_requirement() + if not ireq or not ireq.comes_from: + return f"{parent.name}=={parent.version}" + if isinstance(ireq.comes_from, InstallRequirement): + return str(ireq.comes_from.name) + return str(ireq.comes_from) + + triggers = set() + for req, parent in e.causes: + if parent is None: + # This is a root requirement, so we can report it directly + trigger = req.format_for_error() + else: + trigger = describe_trigger(parent) + triggers.add(trigger) + + if triggers: + info = text_join(sorted(triggers)) + else: + info = "the requested packages" + + msg = ( + "Cannot install {} because these package versions " + "have conflicting dependencies.".format(info) + ) + logger.critical(msg) + msg = "\nThe conflict is caused by:" + + relevant_constraints = set() + for req, parent in e.causes: + if req.name in constraints: + relevant_constraints.add(req.name) + msg = msg + "\n " + if parent: + msg = msg + f"{parent.name} {parent.version} depends on " + else: + msg = msg + "The user requested " + msg = msg + req.format_for_error() + for key in relevant_constraints: + spec = constraints[key].specifier + msg += f"\n The user requested (constraint) {key}{spec}" + + msg = ( + msg + + "\n\n" + + "To fix this you could try to:\n" + + "1. loosen the range of package versions you've specified\n" + + "2. remove package versions to allow pip attempt to solve " + + "the dependency conflict\n" + ) + + logger.info(msg) + + return DistributionNotFound( + "ResolutionImpossible: for help visit " + "https://pip.pypa.io/en/latest/user_guide/" + "#fixing-conflicting-dependencies" + ) diff --git a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py new file mode 100644 index 00000000..21fa08ec --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py @@ -0,0 +1,145 @@ +"""Utilities to lazily create and visit candidates found. + +Creating and visiting a candidate is a *very* costly operation. It involves +fetching, extracting, potentially building modules from source, and verifying +distribution metadata. It is therefore crucial for performance to keep +everything here lazy all the way down, so we only touch candidates that we +absolutely need, and not "download the world" when we only need one version of +something. +""" + +import functools +from typing import Callable, Iterator, Optional, Set, Tuple + +from pip._vendor.packaging.version import _BaseVersion +from pip._vendor.six.moves import collections_abc # type: ignore + +from .base import Candidate + +IndexCandidateInfo = Tuple[_BaseVersion, Callable[[], Optional[Candidate]]] + + +def _iter_built(infos): + # type: (Iterator[IndexCandidateInfo]) -> Iterator[Candidate] + """Iterator for ``FoundCandidates``. + + This iterator is used when the package is not already installed. Candidates + from index come later in their normal ordering. + """ + versions_found = set() # type: Set[_BaseVersion] + for version, func in infos: + if version in versions_found: + continue + candidate = func() + if candidate is None: + continue + yield candidate + versions_found.add(version) + + +def _iter_built_with_prepended(installed, infos): + # type: (Candidate, Iterator[IndexCandidateInfo]) -> Iterator[Candidate] + """Iterator for ``FoundCandidates``. + + This iterator is used when the resolver prefers the already-installed + candidate and NOT to upgrade. The installed candidate is therefore + always yielded first, and candidates from index come later in their + normal ordering, except skipped when the version is already installed. + """ + yield installed + versions_found = {installed.version} # type: Set[_BaseVersion] + for version, func in infos: + if version in versions_found: + continue + candidate = func() + if candidate is None: + continue + yield candidate + versions_found.add(version) + + +def _iter_built_with_inserted(installed, infos): + # type: (Candidate, Iterator[IndexCandidateInfo]) -> Iterator[Candidate] + """Iterator for ``FoundCandidates``. + + This iterator is used when the resolver prefers to upgrade an + already-installed package. Candidates from index are returned in their + normal ordering, except replaced when the version is already installed. + + The implementation iterates through and yields other candidates, inserting + the installed candidate exactly once before we start yielding older or + equivalent candidates, or after all other candidates if they are all newer. + """ + versions_found = set() # type: Set[_BaseVersion] + for version, func in infos: + if version in versions_found: + continue + # If the installed candidate is better, yield it first. + if installed.version >= version: + yield installed + versions_found.add(installed.version) + candidate = func() + if candidate is None: + continue + yield candidate + versions_found.add(version) + + # If the installed candidate is older than all other candidates. + if installed.version not in versions_found: + yield installed + + +class FoundCandidates(collections_abc.Sequence): + """A lazy sequence to provide candidates to the resolver. + + The intended usage is to return this from `find_matches()` so the resolver + can iterate through the sequence multiple times, but only access the index + page when remote packages are actually needed. This improve performances + when suitable candidates are already installed on disk. + """ + + def __init__( + self, + get_infos: Callable[[], Iterator[IndexCandidateInfo]], + installed: Optional[Candidate], + prefers_installed: bool, + incompatible_ids: Set[int], + ): + self._get_infos = get_infos + self._installed = installed + self._prefers_installed = prefers_installed + self._incompatible_ids = incompatible_ids + + def __getitem__(self, index): + # type: (int) -> Candidate + # Implemented to satisfy the ABC check. This is not needed by the + # resolver, and should not be used by the provider either (for + # performance reasons). + raise NotImplementedError("don't do this") + + def __iter__(self): + # type: () -> Iterator[Candidate] + infos = self._get_infos() + if not self._installed: + iterator = _iter_built(infos) + elif self._prefers_installed: + iterator = _iter_built_with_prepended(self._installed, infos) + else: + iterator = _iter_built_with_inserted(self._installed, infos) + return (c for c in iterator if id(c) not in self._incompatible_ids) + + def __len__(self): + # type: () -> int + # Implemented to satisfy the ABC check. This is not needed by the + # resolver, and should not be used by the provider either (for + # performance reasons). + raise NotImplementedError("don't do this") + + @functools.lru_cache(maxsize=1) + def __bool__(self): + # type: () -> bool + if self._prefers_installed and self._installed: + return True + return any(self) + + __nonzero__ = __bool__ # XXX: Python 2. diff --git a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/provider.py b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/provider.py new file mode 100644 index 00000000..0be58fd3 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/provider.py @@ -0,0 +1,175 @@ +from typing import TYPE_CHECKING, Dict, Iterable, Iterator, Mapping, Sequence, Union + +from pip._vendor.resolvelib.providers import AbstractProvider + +from .base import Candidate, Constraint, Requirement +from .factory import Factory + +if TYPE_CHECKING: + from pip._vendor.resolvelib.providers import Preference + from pip._vendor.resolvelib.resolvers import RequirementInformation + + PreferenceInformation = RequirementInformation[Requirement, Candidate] + + _ProviderBase = AbstractProvider[Requirement, Candidate, str] +else: + _ProviderBase = AbstractProvider + +# Notes on the relationship between the provider, the factory, and the +# candidate and requirement classes. +# +# The provider is a direct implementation of the resolvelib class. Its role +# is to deliver the API that resolvelib expects. +# +# Rather than work with completely abstract "requirement" and "candidate" +# concepts as resolvelib does, pip has concrete classes implementing these two +# ideas. The API of Requirement and Candidate objects are defined in the base +# classes, but essentially map fairly directly to the equivalent provider +# methods. In particular, `find_matches` and `is_satisfied_by` are +# requirement methods, and `get_dependencies` is a candidate method. +# +# The factory is the interface to pip's internal mechanisms. It is stateless, +# and is created by the resolver and held as a property of the provider. It is +# responsible for creating Requirement and Candidate objects, and provides +# services to those objects (access to pip's finder and preparer). + + +class PipProvider(_ProviderBase): + """Pip's provider implementation for resolvelib. + + :params constraints: A mapping of constraints specified by the user. Keys + are canonicalized project names. + :params ignore_dependencies: Whether the user specified ``--no-deps``. + :params upgrade_strategy: The user-specified upgrade strategy. + :params user_requested: A set of canonicalized package names that the user + supplied for pip to install/upgrade. + """ + + def __init__( + self, + factory, # type: Factory + constraints, # type: Dict[str, Constraint] + ignore_dependencies, # type: bool + upgrade_strategy, # type: str + user_requested, # type: Dict[str, int] + ): + # type: (...) -> None + self._factory = factory + self._constraints = constraints + self._ignore_dependencies = ignore_dependencies + self._upgrade_strategy = upgrade_strategy + self._user_requested = user_requested + + def identify(self, requirement_or_candidate): + # type: (Union[Requirement, Candidate]) -> str + return requirement_or_candidate.name + + def get_preference( + self, + identifier: str, + resolutions: Mapping[str, Candidate], + candidates: Mapping[str, Iterator[Candidate]], + information: Mapping[str, Iterator["PreferenceInformation"]], + ) -> "Preference": + """Produce a sort key for given requirement based on preference. + + The lower the return value is, the more preferred this group of + arguments is. + + Currently pip considers the followings in order: + + * Prefer if any of the known requirements points to an explicit URL. + * If equal, prefer if any requirements contain ``===`` and ``==``. + * If equal, prefer if requirements include version constraints, e.g. + ``>=`` and ``<``. + * If equal, prefer user-specified (non-transitive) requirements, and + order user-specified requirements by the order they are specified. + * If equal, order alphabetically for consistency (helps debuggability). + """ + + def _get_restrictive_rating(requirements): + # type: (Iterable[Requirement]) -> int + """Rate how restrictive a set of requirements are. + + ``Requirement.get_candidate_lookup()`` returns a 2-tuple for + lookup. The first element is ``Optional[Candidate]`` and the + second ``Optional[InstallRequirement]``. + + * If the requirement is an explicit one, the explicitly-required + candidate is returned as the first element. + * If the requirement is based on a PEP 508 specifier, the backing + ``InstallRequirement`` is returned as the second element. + + We use the first element to check whether there is an explicit + requirement, and the second for equality operator. + """ + lookups = (r.get_candidate_lookup() for r in requirements) + cands, ireqs = zip(*lookups) + if any(cand is not None for cand in cands): + return 0 + spec_sets = (ireq.specifier for ireq in ireqs if ireq) + operators = [ + specifier.operator for spec_set in spec_sets for specifier in spec_set + ] + if any(op in ("==", "===") for op in operators): + return 1 + if operators: + return 2 + # A "bare" requirement without any version requirements. + return 3 + + rating = _get_restrictive_rating(r for r, _ in information[identifier]) + order = self._user_requested.get(identifier, float("inf")) + + # HACK: Setuptools have a very long and solid backward compatibility + # track record, and extremely few projects would request a narrow, + # non-recent version range of it since that would break a lot things. + # (Most projects specify it only to request for an installer feature, + # which does not work, but that's another topic.) Intentionally + # delaying Setuptools helps reduce branches the resolver has to check. + # This serves as a temporary fix for issues like "apache-airlfow[all]" + # while we work on "proper" branch pruning techniques. + delay_this = identifier == "setuptools" + + return (delay_this, rating, order, identifier) + + def find_matches( + self, + identifier: str, + requirements: Mapping[str, Iterator[Requirement]], + incompatibilities: Mapping[str, Iterator[Candidate]], + ) -> Iterable[Candidate]: + def _eligible_for_upgrade(name): + # type: (str) -> bool + """Are upgrades allowed for this project? + + This checks the upgrade strategy, and whether the project was one + that the user specified in the command line, in order to decide + whether we should upgrade if there's a newer version available. + + (Note that we don't need access to the `--upgrade` flag, because + an upgrade strategy of "to-satisfy-only" means that `--upgrade` + was not specified). + """ + if self._upgrade_strategy == "eager": + return True + elif self._upgrade_strategy == "only-if-needed": + return name in self._user_requested + return False + + return self._factory.find_candidates( + identifier=identifier, + requirements=requirements, + constraint=self._constraints.get(identifier, Constraint.empty()), + prefers_installed=(not _eligible_for_upgrade(identifier)), + incompatibilities=incompatibilities, + ) + + def is_satisfied_by(self, requirement, candidate): + # type: (Requirement, Candidate) -> bool + return requirement.is_satisfied_by(candidate) + + def get_dependencies(self, candidate): + # type: (Candidate) -> Sequence[Requirement] + with_requires = not self._ignore_dependencies + return [r for r in candidate.iter_dependencies(with_requires) if r is not None] diff --git a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/reporter.py b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/reporter.py new file mode 100644 index 00000000..074583de --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/reporter.py @@ -0,0 +1,78 @@ +from collections import defaultdict +from logging import getLogger +from typing import Any, DefaultDict + +from pip._vendor.resolvelib.reporters import BaseReporter + +from .base import Candidate, Requirement + +logger = getLogger(__name__) + + +class PipReporter(BaseReporter): + def __init__(self): + # type: () -> None + self.backtracks_by_package = defaultdict(int) # type: DefaultDict[str, int] + + self._messages_at_backtrack = { + 1: ( + "pip is looking at multiple versions of {package_name} to " + "determine which version is compatible with other " + "requirements. This could take a while." + ), + 8: ( + "pip is looking at multiple versions of {package_name} to " + "determine which version is compatible with other " + "requirements. This could take a while." + ), + 13: ( + "This is taking longer than usual. You might need to provide " + "the dependency resolver with stricter constraints to reduce " + "runtime. If you want to abort this run, you can press " + "Ctrl + C to do so. To improve how pip performs, tell us what " + "happened here: https://pip.pypa.io/surveys/backtracking" + ), + } + + def backtracking(self, candidate): + # type: (Candidate) -> None + self.backtracks_by_package[candidate.name] += 1 + + count = self.backtracks_by_package[candidate.name] + if count not in self._messages_at_backtrack: + return + + message = self._messages_at_backtrack[count] + logger.info("INFO: %s", message.format(package_name=candidate.name)) + + +class PipDebuggingReporter(BaseReporter): + """A reporter that does an info log for every event it sees.""" + + def starting(self): + # type: () -> None + logger.info("Reporter.starting()") + + def starting_round(self, index): + # type: (int) -> None + logger.info("Reporter.starting_round(%r)", index) + + def ending_round(self, index, state): + # type: (int, Any) -> None + logger.info("Reporter.ending_round(%r, state)", index) + + def ending(self, state): + # type: (Any) -> None + logger.info("Reporter.ending(%r)", state) + + def adding_requirement(self, requirement, parent): + # type: (Requirement, Candidate) -> None + logger.info("Reporter.adding_requirement(%r, %r)", requirement, parent) + + def backtracking(self, candidate): + # type: (Candidate) -> None + logger.info("Reporter.backtracking(%r)", candidate) + + def pinning(self, candidate): + # type: (Candidate) -> None + logger.info("Reporter.pinning(%r)", candidate) diff --git a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/requirements.py b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/requirements.py new file mode 100644 index 00000000..a7fcdd1e --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/requirements.py @@ -0,0 +1,198 @@ +from pip._vendor.packaging.specifiers import SpecifierSet +from pip._vendor.packaging.utils import NormalizedName, canonicalize_name + +from pip._internal.req.req_install import InstallRequirement + +from .base import Candidate, CandidateLookup, Requirement, format_name + + +class ExplicitRequirement(Requirement): + def __init__(self, candidate): + # type: (Candidate) -> None + self.candidate = candidate + + def __str__(self): + # type: () -> str + return str(self.candidate) + + def __repr__(self): + # type: () -> str + return "{class_name}({candidate!r})".format( + class_name=self.__class__.__name__, + candidate=self.candidate, + ) + + @property + def project_name(self): + # type: () -> NormalizedName + # No need to canonicalise - the candidate did this + return self.candidate.project_name + + @property + def name(self): + # type: () -> str + # No need to canonicalise - the candidate did this + return self.candidate.name + + def format_for_error(self): + # type: () -> str + return self.candidate.format_for_error() + + def get_candidate_lookup(self): + # type: () -> CandidateLookup + return self.candidate, None + + def is_satisfied_by(self, candidate): + # type: (Candidate) -> bool + return candidate == self.candidate + + +class SpecifierRequirement(Requirement): + def __init__(self, ireq): + # type: (InstallRequirement) -> None + assert ireq.link is None, "This is a link, not a specifier" + self._ireq = ireq + self._extras = frozenset(ireq.extras) + + def __str__(self): + # type: () -> str + return str(self._ireq.req) + + def __repr__(self): + # type: () -> str + return "{class_name}({requirement!r})".format( + class_name=self.__class__.__name__, + requirement=str(self._ireq.req), + ) + + @property + def project_name(self): + # type: () -> NormalizedName + assert self._ireq.req, "Specifier-backed ireq is always PEP 508" + return canonicalize_name(self._ireq.req.name) + + @property + def name(self): + # type: () -> str + return format_name(self.project_name, self._extras) + + def format_for_error(self): + # type: () -> str + + # Convert comma-separated specifiers into "A, B, ..., F and G" + # This makes the specifier a bit more "human readable", without + # risking a change in meaning. (Hopefully! Not all edge cases have + # been checked) + parts = [s.strip() for s in str(self).split(",")] + if len(parts) == 0: + return "" + elif len(parts) == 1: + return parts[0] + + return ", ".join(parts[:-1]) + " and " + parts[-1] + + def get_candidate_lookup(self): + # type: () -> CandidateLookup + return None, self._ireq + + def is_satisfied_by(self, candidate): + # type: (Candidate) -> bool + assert candidate.name == self.name, ( + f"Internal issue: Candidate is not for this requirement " + f"{candidate.name} vs {self.name}" + ) + # We can safely always allow prereleases here since PackageFinder + # already implements the prerelease logic, and would have filtered out + # prerelease candidates if the user does not expect them. + assert self._ireq.req, "Specifier-backed ireq is always PEP 508" + spec = self._ireq.req.specifier + return spec.contains(candidate.version, prereleases=True) + + +class RequiresPythonRequirement(Requirement): + """A requirement representing Requires-Python metadata.""" + + def __init__(self, specifier, match): + # type: (SpecifierSet, Candidate) -> None + self.specifier = specifier + self._candidate = match + + def __str__(self): + # type: () -> str + return f"Python {self.specifier}" + + def __repr__(self): + # type: () -> str + return "{class_name}({specifier!r})".format( + class_name=self.__class__.__name__, + specifier=str(self.specifier), + ) + + @property + def project_name(self): + # type: () -> NormalizedName + return self._candidate.project_name + + @property + def name(self): + # type: () -> str + return self._candidate.name + + def format_for_error(self): + # type: () -> str + return str(self) + + def get_candidate_lookup(self): + # type: () -> CandidateLookup + if self.specifier.contains(self._candidate.version, prereleases=True): + return self._candidate, None + return None, None + + def is_satisfied_by(self, candidate): + # type: (Candidate) -> bool + assert candidate.name == self._candidate.name, "Not Python candidate" + # We can safely always allow prereleases here since PackageFinder + # already implements the prerelease logic, and would have filtered out + # prerelease candidates if the user does not expect them. + return self.specifier.contains(candidate.version, prereleases=True) + + +class UnsatisfiableRequirement(Requirement): + """A requirement that cannot be satisfied.""" + + def __init__(self, name): + # type: (NormalizedName) -> None + self._name = name + + def __str__(self): + # type: () -> str + return f"{self._name} (unavailable)" + + def __repr__(self): + # type: () -> str + return "{class_name}({name!r})".format( + class_name=self.__class__.__name__, + name=str(self._name), + ) + + @property + def project_name(self): + # type: () -> NormalizedName + return self._name + + @property + def name(self): + # type: () -> str + return self._name + + def format_for_error(self): + # type: () -> str + return str(self) + + def get_candidate_lookup(self): + # type: () -> CandidateLookup + return None, None + + def is_satisfied_by(self, candidate): + # type: (Candidate) -> bool + return False diff --git a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/resolver.py b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/resolver.py new file mode 100644 index 00000000..b90f82cf --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/resolver.py @@ -0,0 +1,305 @@ +import functools +import logging +import os +from typing import TYPE_CHECKING, Dict, List, Optional, Set, Tuple, cast + +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.packaging.version import parse as parse_version +from pip._vendor.resolvelib import BaseReporter, ResolutionImpossible +from pip._vendor.resolvelib import Resolver as RLResolver +from pip._vendor.resolvelib.structs import DirectedGraph + +from pip._internal.cache import WheelCache +from pip._internal.exceptions import InstallationError +from pip._internal.index.package_finder import PackageFinder +from pip._internal.operations.prepare import RequirementPreparer +from pip._internal.req.req_install import ( + InstallRequirement, + check_invalid_constraint_type, +) +from pip._internal.req.req_set import RequirementSet +from pip._internal.resolution.base import BaseResolver, InstallRequirementProvider +from pip._internal.resolution.resolvelib.provider import PipProvider +from pip._internal.resolution.resolvelib.reporter import ( + PipDebuggingReporter, + PipReporter, +) +from pip._internal.utils.deprecation import deprecated +from pip._internal.utils.filetypes import is_archive_file +from pip._internal.utils.misc import dist_is_editable + +from .base import Candidate, Constraint, Requirement +from .factory import Factory + +if TYPE_CHECKING: + from pip._vendor.resolvelib.resolvers import Result as RLResult + + Result = RLResult[Requirement, Candidate, str] + + +logger = logging.getLogger(__name__) + + +class Resolver(BaseResolver): + _allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"} + + def __init__( + self, + preparer, # type: RequirementPreparer + finder, # type: PackageFinder + wheel_cache, # type: Optional[WheelCache] + make_install_req, # type: InstallRequirementProvider + use_user_site, # type: bool + ignore_dependencies, # type: bool + ignore_installed, # type: bool + ignore_requires_python, # type: bool + force_reinstall, # type: bool + upgrade_strategy, # type: str + py_version_info=None, # type: Optional[Tuple[int, ...]] + ): + super().__init__() + assert upgrade_strategy in self._allowed_strategies + + self.factory = Factory( + finder=finder, + preparer=preparer, + make_install_req=make_install_req, + wheel_cache=wheel_cache, + use_user_site=use_user_site, + force_reinstall=force_reinstall, + ignore_installed=ignore_installed, + ignore_requires_python=ignore_requires_python, + py_version_info=py_version_info, + ) + self.ignore_dependencies = ignore_dependencies + self.upgrade_strategy = upgrade_strategy + self._result = None # type: Optional[Result] + + def resolve(self, root_reqs, check_supported_wheels): + # type: (List[InstallRequirement], bool) -> RequirementSet + + constraints = {} # type: Dict[str, Constraint] + user_requested = {} # type: Dict[str, int] + requirements = [] + for i, req in enumerate(root_reqs): + if req.constraint: + # Ensure we only accept valid constraints + problem = check_invalid_constraint_type(req) + if problem: + raise InstallationError(problem) + if not req.match_markers(): + continue + assert req.name, "Constraint must be named" + name = canonicalize_name(req.name) + if name in constraints: + constraints[name] &= req + else: + constraints[name] = Constraint.from_ireq(req) + else: + if req.user_supplied and req.name: + canonical_name = canonicalize_name(req.name) + if canonical_name not in user_requested: + user_requested[canonical_name] = i + r = self.factory.make_requirement_from_install_req( + req, requested_extras=() + ) + if r is not None: + requirements.append(r) + + provider = PipProvider( + factory=self.factory, + constraints=constraints, + ignore_dependencies=self.ignore_dependencies, + upgrade_strategy=self.upgrade_strategy, + user_requested=user_requested, + ) + if "PIP_RESOLVER_DEBUG" in os.environ: + reporter = PipDebuggingReporter() # type: BaseReporter + else: + reporter = PipReporter() + resolver = RLResolver( + provider, + reporter, + ) # type: RLResolver[Requirement, Candidate, str] + + try: + try_to_avoid_resolution_too_deep = 2000000 + result = self._result = resolver.resolve( + requirements, max_rounds=try_to_avoid_resolution_too_deep + ) + + except ResolutionImpossible as e: + error = self.factory.get_installation_error( + cast("ResolutionImpossible[Requirement, Candidate]", e), + constraints, + ) + raise error from e + + req_set = RequirementSet(check_supported_wheels=check_supported_wheels) + for candidate in result.mapping.values(): + ireq = candidate.get_install_requirement() + if ireq is None: + continue + + # Check if there is already an installation under the same name, + # and set a flag for later stages to uninstall it, if needed. + installed_dist = self.factory.get_dist_to_uninstall(candidate) + if installed_dist is None: + # There is no existing installation -- nothing to uninstall. + ireq.should_reinstall = False + elif self.factory.force_reinstall: + # The --force-reinstall flag is set -- reinstall. + ireq.should_reinstall = True + elif parse_version(installed_dist.version) != candidate.version: + # The installation is different in version -- reinstall. + ireq.should_reinstall = True + elif candidate.is_editable or dist_is_editable(installed_dist): + # The incoming distribution is editable, or different in + # editable-ness to installation -- reinstall. + ireq.should_reinstall = True + elif candidate.source_link and candidate.source_link.is_file: + # The incoming distribution is under file:// + if candidate.source_link.is_wheel: + # is a local wheel -- do nothing. + logger.info( + "%s is already installed with the same version as the " + "provided wheel. Use --force-reinstall to force an " + "installation of the wheel.", + ireq.name, + ) + continue + + looks_like_sdist = ( + is_archive_file(candidate.source_link.file_path) + and candidate.source_link.ext != ".zip" + ) + if looks_like_sdist: + # is a local sdist -- show a deprecation warning! + reason = ( + "Source distribution is being reinstalled despite an " + "installed package having the same name and version as " + "the installed package." + ) + replacement = "use --force-reinstall" + deprecated( + reason=reason, + replacement=replacement, + gone_in="21.2", + issue=8711, + ) + + # is a local sdist or path -- reinstall + ireq.should_reinstall = True + else: + continue + + link = candidate.source_link + if link and link.is_yanked: + # The reason can contain non-ASCII characters, Unicode + # is required for Python 2. + msg = ( + "The candidate selected for download or install is a " + "yanked version: {name!r} candidate (version {version} " + "at {link})\nReason for being yanked: {reason}" + ).format( + name=candidate.name, + version=candidate.version, + link=link, + reason=link.yanked_reason or "", + ) + logger.warning(msg) + + req_set.add_named_requirement(ireq) + + reqs = req_set.all_requirements + self.factory.preparer.prepare_linked_requirements_more(reqs) + return req_set + + def get_installation_order(self, req_set): + # type: (RequirementSet) -> List[InstallRequirement] + """Get order for installation of requirements in RequirementSet. + + The returned list contains a requirement before another that depends on + it. This helps ensure that the environment is kept consistent as they + get installed one-by-one. + + The current implementation creates a topological ordering of the + dependency graph, while breaking any cycles in the graph at arbitrary + points. We make no guarantees about where the cycle would be broken, + other than they would be broken. + """ + assert self._result is not None, "must call resolve() first" + + graph = self._result.graph + weights = get_topological_weights( + graph, + expected_node_count=len(self._result.mapping) + 1, + ) + + sorted_items = sorted( + req_set.requirements.items(), + key=functools.partial(_req_set_item_sorter, weights=weights), + reverse=True, + ) + return [ireq for _, ireq in sorted_items] + + +def get_topological_weights(graph, expected_node_count): + # type: (DirectedGraph[Optional[str]], int) -> Dict[Optional[str], int] + """Assign weights to each node based on how "deep" they are. + + This implementation may change at any point in the future without prior + notice. + + We take the length for the longest path to any node from root, ignoring any + paths that contain a single node twice (i.e. cycles). This is done through + a depth-first search through the graph, while keeping track of the path to + the node. + + Cycles in the graph result would result in node being revisited while also + being it's own path. In this case, take no action. This helps ensure we + don't get stuck in a cycle. + + When assigning weight, the longer path (i.e. larger length) is preferred. + """ + path = set() # type: Set[Optional[str]] + weights = {} # type: Dict[Optional[str], int] + + def visit(node): + # type: (Optional[str]) -> None + if node in path: + # We hit a cycle, so we'll break it here. + return + + # Time to visit the children! + path.add(node) + for child in graph.iter_children(node): + visit(child) + path.remove(node) + + last_known_parent_count = weights.get(node, 0) + weights[node] = max(last_known_parent_count, len(path)) + + # `None` is guaranteed to be the root node by resolvelib. + visit(None) + + # Sanity checks + assert weights[None] == 0 + assert len(weights) == expected_node_count + + return weights + + +def _req_set_item_sorter( + item, # type: Tuple[str, InstallRequirement] + weights, # type: Dict[Optional[str], int] +): + # type: (...) -> Tuple[int, str] + """Key function used to sort install requirements for installation. + + Based on the "weight" mapping calculated in ``get_installation_order()``. + The canonical package name is returned as the second member as a tie- + breaker to ensure the result is predictable, which is useful in tests. + """ + name = canonicalize_name(item[0]) + return weights[name], name diff --git a/venv/Lib/site-packages/pip/_internal/self_outdated_check.py b/venv/Lib/site-packages/pip/_internal/self_outdated_check.py new file mode 100644 index 00000000..6b24965b --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/self_outdated_check.py @@ -0,0 +1,187 @@ +import datetime +import hashlib +import json +import logging +import optparse +import os.path +import sys +from typing import Any, Dict + +from pip._vendor.packaging.version import parse as parse_version + +from pip._internal.index.collector import LinkCollector +from pip._internal.index.package_finder import PackageFinder +from pip._internal.metadata import get_default_environment +from pip._internal.models.selection_prefs import SelectionPreferences +from pip._internal.network.session import PipSession +from pip._internal.utils.filesystem import adjacent_tmp_file, check_path_owner, replace +from pip._internal.utils.misc import ensure_dir + +SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ" + + +logger = logging.getLogger(__name__) + + +def _get_statefile_name(key): + # type: (str) -> str + key_bytes = key.encode() + name = hashlib.sha224(key_bytes).hexdigest() + return name + + +class SelfCheckState: + def __init__(self, cache_dir): + # type: (str) -> None + self.state = {} # type: Dict[str, Any] + self.statefile_path = None + + # Try to load the existing state + if cache_dir: + self.statefile_path = os.path.join( + cache_dir, "selfcheck", _get_statefile_name(self.key) + ) + try: + with open(self.statefile_path, encoding="utf-8") as statefile: + self.state = json.load(statefile) + except (OSError, ValueError, KeyError): + # Explicitly suppressing exceptions, since we don't want to + # error out if the cache file is invalid. + pass + + @property + def key(self): + # type: () -> str + return sys.prefix + + def save(self, pypi_version, current_time): + # type: (str, datetime.datetime) -> None + # If we do not have a path to cache in, don't bother saving. + if not self.statefile_path: + return + + # Check to make sure that we own the directory + if not check_path_owner(os.path.dirname(self.statefile_path)): + return + + # Now that we've ensured the directory is owned by this user, we'll go + # ahead and make sure that all our directories are created. + ensure_dir(os.path.dirname(self.statefile_path)) + + state = { + # Include the key so it's easy to tell which pip wrote the + # file. + "key": self.key, + "last_check": current_time.strftime(SELFCHECK_DATE_FMT), + "pypi_version": pypi_version, + } + + text = json.dumps(state, sort_keys=True, separators=(",", ":")) + + with adjacent_tmp_file(self.statefile_path) as f: + f.write(text.encode()) + + try: + # Since we have a prefix-specific state file, we can just + # overwrite whatever is there, no need to check. + replace(f.name, self.statefile_path) + except OSError: + # Best effort. + pass + + +def was_installed_by_pip(pkg): + # type: (str) -> bool + """Checks whether pkg was installed by pip + + This is used not to display the upgrade message when pip is in fact + installed by system package manager, such as dnf on Fedora. + """ + dist = get_default_environment().get_distribution(pkg) + return dist is not None and "pip" == dist.installer + + +def pip_self_version_check(session, options): + # type: (PipSession, optparse.Values) -> None + """Check for an update for pip. + + Limit the frequency of checks to once per week. State is stored either in + the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix + of the pip script path. + """ + installed_dist = get_default_environment().get_distribution("pip") + if not installed_dist: + return + + pip_version = installed_dist.version + pypi_version = None + + try: + state = SelfCheckState(cache_dir=options.cache_dir) + + current_time = datetime.datetime.utcnow() + # Determine if we need to refresh the state + if "last_check" in state.state and "pypi_version" in state.state: + last_check = datetime.datetime.strptime( + state.state["last_check"], + SELFCHECK_DATE_FMT + ) + if (current_time - last_check).total_seconds() < 7 * 24 * 60 * 60: + pypi_version = state.state["pypi_version"] + + # Refresh the version if we need to or just see if we need to warn + if pypi_version is None: + # Lets use PackageFinder to see what the latest pip version is + link_collector = LinkCollector.create( + session, + options=options, + suppress_no_index=True, + ) + + # Pass allow_yanked=False so we don't suggest upgrading to a + # yanked version. + selection_prefs = SelectionPreferences( + allow_yanked=False, + allow_all_prereleases=False, # Explicitly set to False + ) + + finder = PackageFinder.create( + link_collector=link_collector, + selection_prefs=selection_prefs, + ) + best_candidate = finder.find_best_candidate("pip").best_candidate + if best_candidate is None: + return + pypi_version = str(best_candidate.version) + + # save that we've performed a check + state.save(pypi_version, current_time) + + remote_version = parse_version(pypi_version) + + local_version_is_older = ( + pip_version < remote_version and + pip_version.base_version != remote_version.base_version and + was_installed_by_pip('pip') + ) + + # Determine if our pypi_version is older + if not local_version_is_older: + return + + # We cannot tell how the current pip is available in the current + # command context, so be pragmatic here and suggest the command + # that's always available. This does not accommodate spaces in + # `sys.executable`. + pip_cmd = f"{sys.executable} -m pip" + logger.warning( + "You are using pip version %s; however, version %s is " + "available.\nYou should consider upgrading via the " + "'%s install --upgrade pip' command.", + pip_version, pypi_version, pip_cmd + ) + except Exception: + logger.debug( + "There was an error checking the latest version of pip", + exc_info=True, + ) diff --git a/env/lib/python2.7/site-packages/pkg_resources/_vendor/__init__.py b/venv/Lib/site-packages/pip/_internal/utils/__init__.py similarity index 100% rename from env/lib/python2.7/site-packages/pkg_resources/_vendor/__init__.py rename to venv/Lib/site-packages/pip/_internal/utils/__init__.py diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..7e51f553 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-36.pyc new file mode 100644 index 00000000..69a4dab9 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/compat.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/compat.cpython-36.pyc new file mode 100644 index 00000000..e8e1bb2f Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/compat.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-36.pyc new file mode 100644 index 00000000..fc15f61b Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/datetime.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/datetime.cpython-36.pyc new file mode 100644 index 00000000..fd0fbef5 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/datetime.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-36.pyc new file mode 100644 index 00000000..d681f432 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/direct_url_helpers.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/direct_url_helpers.cpython-36.pyc new file mode 100644 index 00000000..39f7b6b2 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/direct_url_helpers.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/distutils_args.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/distutils_args.cpython-36.pyc new file mode 100644 index 00000000..598d9406 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/distutils_args.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-36.pyc new file mode 100644 index 00000000..6ce9071a Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-36.pyc new file mode 100644 index 00000000..c892c4fb Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-36.pyc new file mode 100644 index 00000000..4d3baa3b Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-36.pyc new file mode 100644 index 00000000..6e465ed9 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-36.pyc new file mode 100644 index 00000000..3c99fa1e Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-36.pyc new file mode 100644 index 00000000..f385f1ab Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/inject_securetransport.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/inject_securetransport.cpython-36.pyc new file mode 100644 index 00000000..450bcb84 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/inject_securetransport.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/logging.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/logging.cpython-36.pyc new file mode 100644 index 00000000..6cabdcb6 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/logging.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/misc.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/misc.cpython-36.pyc new file mode 100644 index 00000000..71ea87c6 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/misc.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/models.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/models.cpython-36.pyc new file mode 100644 index 00000000..e792565e Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/models.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-36.pyc new file mode 100644 index 00000000..d60504c0 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/parallel.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/parallel.cpython-36.pyc new file mode 100644 index 00000000..3c8f1d5b Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/parallel.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/pkg_resources.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/pkg_resources.cpython-36.pyc new file mode 100644 index 00000000..4e1bc8c1 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/pkg_resources.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-36.pyc new file mode 100644 index 00000000..e8742b39 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-36.pyc new file mode 100644 index 00000000..6f9c48fd Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-36.pyc new file mode 100644 index 00000000..e6cbe98d Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-36.pyc new file mode 100644 index 00000000..2df175e6 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/urls.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/urls.cpython-36.pyc new file mode 100644 index 00000000..4ff4f0eb Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/urls.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-36.pyc new file mode 100644 index 00000000..710d32be Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-36.pyc new file mode 100644 index 00000000..4c3b8e6d Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/appdirs.py b/venv/Lib/site-packages/pip/_internal/utils/appdirs.py new file mode 100644 index 00000000..db974dad --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/utils/appdirs.py @@ -0,0 +1,38 @@ +""" +This code wraps the vendored appdirs module to so the return values are +compatible for the current pip code base. + +The intention is to rewrite current usages gradually, keeping the tests pass, +and eventually drop this after all usages are changed. +""" + +import os +from typing import List + +from pip._vendor import appdirs as _appdirs + + +def user_cache_dir(appname): + # type: (str) -> str + return _appdirs.user_cache_dir(appname, appauthor=False) + + +def user_config_dir(appname, roaming=True): + # type: (str, bool) -> str + path = _appdirs.user_config_dir(appname, appauthor=False, roaming=roaming) + if _appdirs.system == "darwin" and not os.path.isdir(path): + path = os.path.expanduser("~/.config/") + if appname: + path = os.path.join(path, appname) + return path + + +# for the discussion regarding site_config_dir locations +# see +def site_config_dirs(appname): + # type: (str) -> List[str] + dirval = _appdirs.site_config_dir(appname, appauthor=False, multipath=True) + if _appdirs.system not in ["win32", "darwin"]: + # always look in /etc directly as well + return dirval.split(os.pathsep) + ["/etc"] + return [dirval] diff --git a/venv/Lib/site-packages/pip/_internal/utils/compat.py b/venv/Lib/site-packages/pip/_internal/utils/compat.py new file mode 100644 index 00000000..1fb2dc72 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/utils/compat.py @@ -0,0 +1,65 @@ +"""Stuff that differs in different Python versions and platform +distributions.""" + +import logging +import os +import sys + +__all__ = ["get_path_uid", "stdlib_pkgs", "WINDOWS"] + + +logger = logging.getLogger(__name__) + + +def has_tls(): + # type: () -> bool + try: + import _ssl # noqa: F401 # ignore unused + + return True + except ImportError: + pass + + from pip._vendor.urllib3.util import IS_PYOPENSSL + + return IS_PYOPENSSL + + +def get_path_uid(path): + # type: (str) -> int + """ + Return path's uid. + + Does not follow symlinks: + https://github.com/pypa/pip/pull/935#discussion_r5307003 + + Placed this function in compat due to differences on AIX and + Jython, that should eventually go away. + + :raises OSError: When path is a symlink or can't be read. + """ + if hasattr(os, "O_NOFOLLOW"): + fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW) + file_uid = os.fstat(fd).st_uid + os.close(fd) + else: # AIX and Jython + # WARNING: time of check vulnerability, but best we can do w/o NOFOLLOW + if not os.path.islink(path): + # older versions of Jython don't have `os.fstat` + file_uid = os.stat(path).st_uid + else: + # raise OSError for parity with os.O_NOFOLLOW above + raise OSError(f"{path} is a symlink; Will not return uid for symlinks") + return file_uid + + +# packages in the stdlib that may have installation metadata, but should not be +# considered 'installed'. this theoretically could be determined based on +# dist.location (py27:`sysconfig.get_paths()['stdlib']`, +# py26:sysconfig.get_config_vars('LIBDEST')), but fear platform variation may +# make this ineffective, so hard-coding +stdlib_pkgs = {"python", "wsgiref", "argparse"} + + +# windows detection, covers cpython and ironpython +WINDOWS = sys.platform.startswith("win") or (sys.platform == "cli" and os.name == "nt") diff --git a/venv/Lib/site-packages/pip/_internal/utils/compatibility_tags.py b/venv/Lib/site-packages/pip/_internal/utils/compatibility_tags.py new file mode 100644 index 00000000..14fe51c1 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/utils/compatibility_tags.py @@ -0,0 +1,174 @@ +"""Generate and work with PEP 425 Compatibility Tags. +""" + +import re +from typing import TYPE_CHECKING, List, Optional, Tuple + +from pip._vendor.packaging.tags import ( + Tag, + compatible_tags, + cpython_tags, + generic_tags, + interpreter_name, + interpreter_version, + mac_platforms, +) + +if TYPE_CHECKING: + from pip._vendor.packaging.tags import PythonVersion + + +_osx_arch_pat = re.compile(r"(.+)_(\d+)_(\d+)_(.+)") + + +def version_info_to_nodot(version_info): + # type: (Tuple[int, ...]) -> str + # Only use up to the first two numbers. + return "".join(map(str, version_info[:2])) + + +def _mac_platforms(arch): + # type: (str) -> List[str] + match = _osx_arch_pat.match(arch) + if match: + name, major, minor, actual_arch = match.groups() + mac_version = (int(major), int(minor)) + arches = [ + # Since we have always only checked that the platform starts + # with "macosx", for backwards-compatibility we extract the + # actual prefix provided by the user in case they provided + # something like "macosxcustom_". It may be good to remove + # this as undocumented or deprecate it in the future. + "{}_{}".format(name, arch[len("macosx_") :]) + for arch in mac_platforms(mac_version, actual_arch) + ] + else: + # arch pattern didn't match (?!) + arches = [arch] + return arches + + +def _custom_manylinux_platforms(arch): + # type: (str) -> List[str] + arches = [arch] + arch_prefix, arch_sep, arch_suffix = arch.partition("_") + if arch_prefix == "manylinux2014": + # manylinux1/manylinux2010 wheels run on most manylinux2014 systems + # with the exception of wheels depending on ncurses. PEP 599 states + # manylinux1/manylinux2010 wheels should be considered + # manylinux2014 wheels: + # https://www.python.org/dev/peps/pep-0599/#backwards-compatibility-with-manylinux2010-wheels + if arch_suffix in {"i686", "x86_64"}: + arches.append("manylinux2010" + arch_sep + arch_suffix) + arches.append("manylinux1" + arch_sep + arch_suffix) + elif arch_prefix == "manylinux2010": + # manylinux1 wheels run on most manylinux2010 systems with the + # exception of wheels depending on ncurses. PEP 571 states + # manylinux1 wheels should be considered manylinux2010 wheels: + # https://www.python.org/dev/peps/pep-0571/#backwards-compatibility-with-manylinux1-wheels + arches.append("manylinux1" + arch_sep + arch_suffix) + return arches + + +def _get_custom_platforms(arch): + # type: (str) -> List[str] + arch_prefix, arch_sep, arch_suffix = arch.partition("_") + if arch.startswith("macosx"): + arches = _mac_platforms(arch) + elif arch_prefix in ["manylinux2014", "manylinux2010"]: + arches = _custom_manylinux_platforms(arch) + else: + arches = [arch] + return arches + + +def _expand_allowed_platforms(platforms): + # type: (Optional[List[str]]) -> Optional[List[str]] + if not platforms: + return None + + seen = set() + result = [] + + for p in platforms: + if p in seen: + continue + additions = [c for c in _get_custom_platforms(p) if c not in seen] + seen.update(additions) + result.extend(additions) + + return result + + +def _get_python_version(version): + # type: (str) -> PythonVersion + if len(version) > 1: + return int(version[0]), int(version[1:]) + else: + return (int(version[0]),) + + +def _get_custom_interpreter(implementation=None, version=None): + # type: (Optional[str], Optional[str]) -> str + if implementation is None: + implementation = interpreter_name() + if version is None: + version = interpreter_version() + return f"{implementation}{version}" + + +def get_supported( + version=None, # type: Optional[str] + platforms=None, # type: Optional[List[str]] + impl=None, # type: Optional[str] + abis=None, # type: Optional[List[str]] +): + # type: (...) -> List[Tag] + """Return a list of supported tags for each version specified in + `versions`. + + :param version: a string version, of the form "33" or "32", + or None. The version will be assumed to support our ABI. + :param platform: specify a list of platforms you want valid + tags for, or None. If None, use the local system platform. + :param impl: specify the exact implementation you want valid + tags for, or None. If None, use the local interpreter impl. + :param abis: specify a list of abis you want valid + tags for, or None. If None, use the local interpreter abi. + """ + supported = [] # type: List[Tag] + + python_version = None # type: Optional[PythonVersion] + if version is not None: + python_version = _get_python_version(version) + + interpreter = _get_custom_interpreter(impl, version) + + platforms = _expand_allowed_platforms(platforms) + + is_cpython = (impl or interpreter_name()) == "cp" + if is_cpython: + supported.extend( + cpython_tags( + python_version=python_version, + abis=abis, + platforms=platforms, + ) + ) + else: + supported.extend( + generic_tags( + interpreter=interpreter, + abis=abis, + platforms=platforms, + ) + ) + supported.extend( + compatible_tags( + python_version=python_version, + interpreter=interpreter, + platforms=platforms, + ) + ) + + return supported diff --git a/venv/Lib/site-packages/pip/_internal/utils/datetime.py b/venv/Lib/site-packages/pip/_internal/utils/datetime.py new file mode 100644 index 00000000..b638646c --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/utils/datetime.py @@ -0,0 +1,12 @@ +"""For when pip wants to check the date or time. +""" + +import datetime + + +def today_is_later_than(year, month, day): + # type: (int, int, int) -> bool + today = datetime.date.today() + given = datetime.date(year, month, day) + + return today > given diff --git a/env/lib/python2.7/site-packages/pip/_internal/utils/deprecation.py b/venv/Lib/site-packages/pip/_internal/utils/deprecation.py similarity index 79% rename from env/lib/python2.7/site-packages/pip/_internal/utils/deprecation.py rename to venv/Lib/site-packages/pip/_internal/utils/deprecation.py index b9359bdd..b62b3fb6 100644 --- a/env/lib/python2.7/site-packages/pip/_internal/utils/deprecation.py +++ b/venv/Lib/site-packages/pip/_internal/utils/deprecation.py @@ -1,19 +1,14 @@ """ A module that implements tooling to enable easy warnings about deprecations. """ -from __future__ import absolute_import import logging import warnings +from typing import Any, Optional, TextIO, Type, Union from pip._vendor.packaging.version import parse from pip import __version__ as current_version -from pip._internal.utils.typing import MYPY_CHECK_RUNNING - -if MYPY_CHECK_RUNNING: - from typing import Any, Optional - DEPRECATION_MSG_PREFIX = "DEPRECATION: " @@ -26,21 +21,25 @@ class PipDeprecationWarning(Warning): # Warnings <-> Logging Integration -def _showwarning(message, category, filename, lineno, file=None, line=None): +def _showwarning( + message, # type: Union[Warning, str] + category, # type: Type[Warning] + filename, # type: str + lineno, # type: int + file=None, # type: Optional[TextIO] + line=None, # type: Optional[str] +): + # type: (...) -> None if file is not None: if _original_showwarning is not None: - _original_showwarning( - message, category, filename, lineno, file, line, - ) + _original_showwarning(message, category, filename, lineno, file, line) elif issubclass(category, PipDeprecationWarning): # We use a specially named logger which will handle all of the # deprecation messages for pip. logger = logging.getLogger("pip._internal.deprecations") logger.warning(message) else: - _original_showwarning( - message, category, filename, lineno, file, line, - ) + _original_showwarning(message, category, filename, lineno, file, line) def install_warning_logger(): @@ -84,10 +83,13 @@ def deprecated(reason, replacement, gone_in, issue=None): (reason, DEPRECATION_MSG_PREFIX + "{}"), (gone_in, "pip {} will remove support for this functionality."), (replacement, "A possible replacement is {}."), - (issue, ( - "You can find discussion regarding this at " - "https://github.com/pypa/pip/issues/{}." - )), + ( + issue, + ( + "You can find discussion regarding this at " + "https://github.com/pypa/pip/issues/{}." + ), + ), ] message = " ".join( template.format(val) for val, template in sentences if val is not None diff --git a/venv/Lib/site-packages/pip/_internal/utils/direct_url_helpers.py b/venv/Lib/site-packages/pip/_internal/utils/direct_url_helpers.py new file mode 100644 index 00000000..eb50ac42 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/utils/direct_url_helpers.py @@ -0,0 +1,117 @@ +import json +import logging +from typing import Optional + +from pip._vendor.pkg_resources import Distribution + +from pip._internal.models.direct_url import ( + DIRECT_URL_METADATA_NAME, + ArchiveInfo, + DirectUrl, + DirectUrlValidationError, + DirInfo, + VcsInfo, +) +from pip._internal.models.link import Link +from pip._internal.vcs import vcs + +logger = logging.getLogger(__name__) + + +def direct_url_as_pep440_direct_reference(direct_url, name): + # type: (DirectUrl, str) -> str + """Convert a DirectUrl to a pip requirement string.""" + direct_url.validate() # if invalid, this is a pip bug + requirement = name + " @ " + fragments = [] + if isinstance(direct_url.info, VcsInfo): + requirement += "{}+{}@{}".format( + direct_url.info.vcs, direct_url.url, direct_url.info.commit_id + ) + elif isinstance(direct_url.info, ArchiveInfo): + requirement += direct_url.url + if direct_url.info.hash: + fragments.append(direct_url.info.hash) + else: + assert isinstance(direct_url.info, DirInfo) + requirement += direct_url.url + if direct_url.subdirectory: + fragments.append("subdirectory=" + direct_url.subdirectory) + if fragments: + requirement += "#" + "&".join(fragments) + return requirement + + +def direct_url_from_link(link, source_dir=None, link_is_in_wheel_cache=False): + # type: (Link, Optional[str], bool) -> DirectUrl + if link.is_vcs: + vcs_backend = vcs.get_backend_for_scheme(link.scheme) + assert vcs_backend + url, requested_revision, _ = vcs_backend.get_url_rev_and_auth( + link.url_without_fragment + ) + # For VCS links, we need to find out and add commit_id. + if link_is_in_wheel_cache: + # If the requested VCS link corresponds to a cached + # wheel, it means the requested revision was an + # immutable commit hash, otherwise it would not have + # been cached. In that case we don't have a source_dir + # with the VCS checkout. + assert requested_revision + commit_id = requested_revision + else: + # If the wheel was not in cache, it means we have + # had to checkout from VCS to build and we have a source_dir + # which we can inspect to find out the commit id. + assert source_dir + commit_id = vcs_backend.get_revision(source_dir) + return DirectUrl( + url=url, + info=VcsInfo( + vcs=vcs_backend.name, + commit_id=commit_id, + requested_revision=requested_revision, + ), + subdirectory=link.subdirectory_fragment, + ) + elif link.is_existing_dir(): + return DirectUrl( + url=link.url_without_fragment, + info=DirInfo(), + subdirectory=link.subdirectory_fragment, + ) + else: + hash = None + hash_name = link.hash_name + if hash_name: + hash = f"{hash_name}={link.hash}" + return DirectUrl( + url=link.url_without_fragment, + info=ArchiveInfo(hash=hash), + subdirectory=link.subdirectory_fragment, + ) + + +def dist_get_direct_url(dist): + # type: (Distribution) -> Optional[DirectUrl] + """Obtain a DirectUrl from a pkg_resource.Distribution. + + Returns None if the distribution has no `direct_url.json` metadata, + or if `direct_url.json` is invalid. + """ + if not dist.has_metadata(DIRECT_URL_METADATA_NAME): + return None + try: + return DirectUrl.from_json(dist.get_metadata(DIRECT_URL_METADATA_NAME)) + except ( + DirectUrlValidationError, + json.JSONDecodeError, + UnicodeDecodeError, + ) as e: + logger.warning( + "Error parsing %s for %s: %s", + DIRECT_URL_METADATA_NAME, + dist.project_name, + e, + ) + return None diff --git a/venv/Lib/site-packages/pip/_internal/utils/distutils_args.py b/venv/Lib/site-packages/pip/_internal/utils/distutils_args.py new file mode 100644 index 00000000..e886c888 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/utils/distutils_args.py @@ -0,0 +1,43 @@ +from distutils.errors import DistutilsArgError +from distutils.fancy_getopt import FancyGetopt +from typing import Dict, List + +_options = [ + ("exec-prefix=", None, ""), + ("home=", None, ""), + ("install-base=", None, ""), + ("install-data=", None, ""), + ("install-headers=", None, ""), + ("install-lib=", None, ""), + ("install-platlib=", None, ""), + ("install-purelib=", None, ""), + ("install-scripts=", None, ""), + ("prefix=", None, ""), + ("root=", None, ""), + ("user", None, ""), +] + + +# typeshed doesn't permit Tuple[str, None, str], see python/typeshed#3469. +_distutils_getopt = FancyGetopt(_options) # type: ignore + + +def parse_distutils_args(args): + # type: (List[str]) -> Dict[str, str] + """Parse provided arguments, returning an object that has the + matched arguments. + + Any unknown arguments are ignored. + """ + result = {} + for arg in args: + try: + _, match = _distutils_getopt.getopt(args=[arg]) + except DistutilsArgError: + # We don't care about any other options, which here may be + # considered unrecognized since our option list is not + # exhaustive. + pass + else: + result.update(match.__dict__) + return result diff --git a/venv/Lib/site-packages/pip/_internal/utils/encoding.py b/venv/Lib/site-packages/pip/_internal/utils/encoding.py new file mode 100644 index 00000000..7c8893d5 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/utils/encoding.py @@ -0,0 +1,37 @@ +import codecs +import locale +import re +import sys +from typing import List, Tuple + +BOMS = [ + (codecs.BOM_UTF8, "utf-8"), + (codecs.BOM_UTF16, "utf-16"), + (codecs.BOM_UTF16_BE, "utf-16-be"), + (codecs.BOM_UTF16_LE, "utf-16-le"), + (codecs.BOM_UTF32, "utf-32"), + (codecs.BOM_UTF32_BE, "utf-32-be"), + (codecs.BOM_UTF32_LE, "utf-32-le"), +] # type: List[Tuple[bytes, str]] + +ENCODING_RE = re.compile(br"coding[:=]\s*([-\w.]+)") + + +def auto_decode(data): + # type: (bytes) -> str + """Check a bytes string for a BOM to correctly detect the encoding + + Fallback to locale.getpreferredencoding(False) like open() on Python3""" + for bom, encoding in BOMS: + if data.startswith(bom): + return data[len(bom) :].decode(encoding) + # Lets check the first two lines as in PEP263 + for line in data.split(b"\n")[:2]: + if line[0:1] == b"#" and ENCODING_RE.search(line): + result = ENCODING_RE.search(line) + assert result is not None + encoding = result.groups()[0].decode("ascii") + return data.decode(encoding) + return data.decode( + locale.getpreferredencoding(False) or sys.getdefaultencoding(), + ) diff --git a/venv/Lib/site-packages/pip/_internal/utils/entrypoints.py b/venv/Lib/site-packages/pip/_internal/utils/entrypoints.py new file mode 100644 index 00000000..879bf21a --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/utils/entrypoints.py @@ -0,0 +1,28 @@ +import sys +from typing import List, Optional + +from pip._internal.cli.main import main + + +def _wrapper(args=None): + # type: (Optional[List[str]]) -> int + """Central wrapper for all old entrypoints. + + Historically pip has had several entrypoints defined. Because of issues + arising from PATH, sys.path, multiple Pythons, their interactions, and most + of them having a pip installed, users suffer every time an entrypoint gets + moved. + + To alleviate this pain, and provide a mechanism for warning users and + directing them to an appropriate place for help, we now define all of + our old entrypoints as wrappers for the current one. + """ + sys.stderr.write( + "WARNING: pip is being invoked by an old script wrapper. This will " + "fail in a future version of pip.\n" + "Please see https://github.com/pypa/pip/issues/5599 for advice on " + "fixing the underlying issue.\n" + "To avoid this problem you can invoke Python with '-m pip' instead of " + "running pip directly.\n" + ) + return main(args) diff --git a/venv/Lib/site-packages/pip/_internal/utils/filesystem.py b/venv/Lib/site-packages/pip/_internal/utils/filesystem.py new file mode 100644 index 00000000..3db97dc4 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/utils/filesystem.py @@ -0,0 +1,193 @@ +import fnmatch +import os +import os.path +import random +import shutil +import stat +import sys +from contextlib import contextmanager +from tempfile import NamedTemporaryFile +from typing import Any, BinaryIO, Iterator, List, Union, cast + +from pip._vendor.tenacity import retry, stop_after_delay, wait_fixed + +from pip._internal.utils.compat import get_path_uid +from pip._internal.utils.misc import format_size + + +def check_path_owner(path): + # type: (str) -> bool + # If we don't have a way to check the effective uid of this process, then + # we'll just assume that we own the directory. + if sys.platform == "win32" or not hasattr(os, "geteuid"): + return True + + assert os.path.isabs(path) + + previous = None + while path != previous: + if os.path.lexists(path): + # Check if path is writable by current user. + if os.geteuid() == 0: + # Special handling for root user in order to handle properly + # cases where users use sudo without -H flag. + try: + path_uid = get_path_uid(path) + except OSError: + return False + return path_uid == 0 + else: + return os.access(path, os.W_OK) + else: + previous, path = path, os.path.dirname(path) + return False # assume we don't own the path + + +def copy2_fixed(src, dest): + # type: (str, str) -> None + """Wrap shutil.copy2() but map errors copying socket files to + SpecialFileError as expected. + + See also https://bugs.python.org/issue37700. + """ + try: + shutil.copy2(src, dest) + except OSError: + for f in [src, dest]: + try: + is_socket_file = is_socket(f) + except OSError: + # An error has already occurred. Another error here is not + # a problem and we can ignore it. + pass + else: + if is_socket_file: + raise shutil.SpecialFileError(f"`{f}` is a socket") + + raise + + +def is_socket(path): + # type: (str) -> bool + return stat.S_ISSOCK(os.lstat(path).st_mode) + + +@contextmanager +def adjacent_tmp_file(path, **kwargs): + # type: (str, **Any) -> Iterator[BinaryIO] + """Return a file-like object pointing to a tmp file next to path. + + The file is created securely and is ensured to be written to disk + after the context reaches its end. + + kwargs will be passed to tempfile.NamedTemporaryFile to control + the way the temporary file will be opened. + """ + with NamedTemporaryFile( + delete=False, + dir=os.path.dirname(path), + prefix=os.path.basename(path), + suffix=".tmp", + **kwargs, + ) as f: + result = cast(BinaryIO, f) + try: + yield result + finally: + result.flush() + os.fsync(result.fileno()) + + +# Tenacity raises RetryError by default, explictly raise the original exception +_replace_retry = retry(reraise=True, stop=stop_after_delay(1), wait=wait_fixed(0.25)) + +replace = _replace_retry(os.replace) + + +# test_writable_dir and _test_writable_dir_win are copied from Flit, +# with the author's agreement to also place them under pip's license. +def test_writable_dir(path): + # type: (str) -> bool + """Check if a directory is writable. + + Uses os.access() on POSIX, tries creating files on Windows. + """ + # If the directory doesn't exist, find the closest parent that does. + while not os.path.isdir(path): + parent = os.path.dirname(path) + if parent == path: + break # Should never get here, but infinite loops are bad + path = parent + + if os.name == "posix": + return os.access(path, os.W_OK) + + return _test_writable_dir_win(path) + + +def _test_writable_dir_win(path): + # type: (str) -> bool + # os.access doesn't work on Windows: http://bugs.python.org/issue2528 + # and we can't use tempfile: http://bugs.python.org/issue22107 + basename = "accesstest_deleteme_fishfingers_custard_" + alphabet = "abcdefghijklmnopqrstuvwxyz0123456789" + for _ in range(10): + name = basename + "".join(random.choice(alphabet) for _ in range(6)) + file = os.path.join(path, name) + try: + fd = os.open(file, os.O_RDWR | os.O_CREAT | os.O_EXCL) + except FileExistsError: + pass + except PermissionError: + # This could be because there's a directory with the same name. + # But it's highly unlikely there's a directory called that, + # so we'll assume it's because the parent dir is not writable. + # This could as well be because the parent dir is not readable, + # due to non-privileged user access. + return False + else: + os.close(fd) + os.unlink(file) + return True + + # This should never be reached + raise OSError("Unexpected condition testing for writable directory") + + +def find_files(path, pattern): + # type: (str, str) -> List[str] + """Returns a list of absolute paths of files beneath path, recursively, + with filenames which match the UNIX-style shell glob pattern.""" + result = [] # type: List[str] + for root, _, files in os.walk(path): + matches = fnmatch.filter(files, pattern) + result.extend(os.path.join(root, f) for f in matches) + return result + + +def file_size(path): + # type: (str) -> Union[int, float] + # If it's a symlink, return 0. + if os.path.islink(path): + return 0 + return os.path.getsize(path) + + +def format_file_size(path): + # type: (str) -> str + return format_size(file_size(path)) + + +def directory_size(path): + # type: (str) -> Union[int, float] + size = 0.0 + for root, _dirs, files in os.walk(path): + for filename in files: + file_path = os.path.join(root, filename) + size += file_size(file_path) + return size + + +def format_directory_size(path): + # type: (str) -> str + return format_size(directory_size(path)) diff --git a/venv/Lib/site-packages/pip/_internal/utils/filetypes.py b/venv/Lib/site-packages/pip/_internal/utils/filetypes.py new file mode 100644 index 00000000..da935846 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/utils/filetypes.py @@ -0,0 +1,28 @@ +"""Filetype information. +""" + +from typing import Tuple + +from pip._internal.utils.misc import splitext + +WHEEL_EXTENSION = ".whl" +BZ2_EXTENSIONS = (".tar.bz2", ".tbz") # type: Tuple[str, ...] +XZ_EXTENSIONS = ( + ".tar.xz", + ".txz", + ".tlz", + ".tar.lz", + ".tar.lzma", +) # type: Tuple[str, ...] +ZIP_EXTENSIONS = (".zip", WHEEL_EXTENSION) # type: Tuple[str, ...] +TAR_EXTENSIONS = (".tar.gz", ".tgz", ".tar") # type: Tuple[str, ...] +ARCHIVE_EXTENSIONS = ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS + + +def is_archive_file(name): + # type: (str) -> bool + """Return True if `name` is a considered as an archive file.""" + ext = splitext(name)[1].lower() + if ext in ARCHIVE_EXTENSIONS: + return True + return False diff --git a/venv/Lib/site-packages/pip/_internal/utils/glibc.py b/venv/Lib/site-packages/pip/_internal/utils/glibc.py new file mode 100644 index 00000000..1c9ff354 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/utils/glibc.py @@ -0,0 +1,92 @@ +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +import os +import sys +from typing import Optional, Tuple + + +def glibc_version_string(): + # type: () -> Optional[str] + "Returns glibc version string, or None if not using glibc." + return glibc_version_string_confstr() or glibc_version_string_ctypes() + + +def glibc_version_string_confstr(): + # type: () -> Optional[str] + "Primary implementation of glibc_version_string using os.confstr." + # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely + # to be broken or missing. This strategy is used in the standard library + # platform module: + # https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183 + if sys.platform == "win32": + return None + try: + # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17": + _, version = os.confstr("CS_GNU_LIBC_VERSION").split() + except (AttributeError, OSError, ValueError): + # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... + return None + return version + + +def glibc_version_string_ctypes(): + # type: () -> Optional[str] + "Fallback implementation of glibc_version_string using ctypes." + + try: + import ctypes + except ImportError: + return None + + # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen + # manpage says, "If filename is NULL, then the returned handle is for the + # main program". This way we can let the linker do the work to figure out + # which libc our process is actually using. + process_namespace = ctypes.CDLL(None) + try: + gnu_get_libc_version = process_namespace.gnu_get_libc_version + except AttributeError: + # Symbol doesn't exist -> therefore, we are not linked to + # glibc. + return None + + # Call gnu_get_libc_version, which returns a string like "2.5" + gnu_get_libc_version.restype = ctypes.c_char_p + version_str = gnu_get_libc_version() + # py2 / py3 compatibility: + if not isinstance(version_str, str): + version_str = version_str.decode("ascii") + + return version_str + + +# platform.libc_ver regularly returns completely nonsensical glibc +# versions. E.g. on my computer, platform says: +# +# ~$ python2.7 -c 'import platform; print(platform.libc_ver())' +# ('glibc', '2.7') +# ~$ python3.5 -c 'import platform; print(platform.libc_ver())' +# ('glibc', '2.9') +# +# But the truth is: +# +# ~$ ldd --version +# ldd (Debian GLIBC 2.22-11) 2.22 +# +# This is unfortunate, because it means that the linehaul data on libc +# versions that was generated by pip 8.1.2 and earlier is useless and +# misleading. Solution: instead of using platform, use our code that actually +# works. +def libc_ver(): + # type: () -> Tuple[str, str] + """Try to determine the glibc version + + Returns a tuple of strings (lib, version) which default to empty strings + in case the lookup fails. + """ + glibc_version = glibc_version_string() + if glibc_version is None: + return ("", "") + else: + return ("glibc", glibc_version) diff --git a/venv/Lib/site-packages/pip/_internal/utils/hashes.py b/venv/Lib/site-packages/pip/_internal/utils/hashes.py new file mode 100644 index 00000000..3d20b8d0 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/utils/hashes.py @@ -0,0 +1,165 @@ +import hashlib +from typing import TYPE_CHECKING, BinaryIO, Dict, Iterator, List + +from pip._internal.exceptions import HashMismatch, HashMissing, InstallationError +from pip._internal.utils.misc import read_chunks + +if TYPE_CHECKING: + from hashlib import _Hash + + # NoReturn introduced in 3.6.2; imported only for type checking to maintain + # pip compatibility with older patch versions of Python 3.6 + from typing import NoReturn + + +# The recommended hash algo of the moment. Change this whenever the state of +# the art changes; it won't hurt backward compatibility. +FAVORITE_HASH = "sha256" + + +# Names of hashlib algorithms allowed by the --hash option and ``pip hash`` +# Currently, those are the ones at least as collision-resistant as sha256. +STRONG_HASHES = ["sha256", "sha384", "sha512"] + + +class Hashes: + """A wrapper that builds multiple hashes at once and checks them against + known-good values + + """ + + def __init__(self, hashes=None): + # type: (Dict[str, List[str]]) -> None + """ + :param hashes: A dict of algorithm names pointing to lists of allowed + hex digests + """ + allowed = {} + if hashes is not None: + for alg, keys in hashes.items(): + # Make sure values are always sorted (to ease equality checks) + allowed[alg] = sorted(keys) + self._allowed = allowed + + def __and__(self, other): + # type: (Hashes) -> Hashes + if not isinstance(other, Hashes): + return NotImplemented + + # If either of the Hashes object is entirely empty (i.e. no hash + # specified at all), all hashes from the other object are allowed. + if not other: + return self + if not self: + return other + + # Otherwise only hashes that present in both objects are allowed. + new = {} + for alg, values in other._allowed.items(): + if alg not in self._allowed: + continue + new[alg] = [v for v in values if v in self._allowed[alg]] + return Hashes(new) + + @property + def digest_count(self): + # type: () -> int + return sum(len(digests) for digests in self._allowed.values()) + + def is_hash_allowed( + self, + hash_name, # type: str + hex_digest, # type: str + ): + # type: (...) -> bool + """Return whether the given hex digest is allowed.""" + return hex_digest in self._allowed.get(hash_name, []) + + def check_against_chunks(self, chunks): + # type: (Iterator[bytes]) -> None + """Check good hashes against ones built from iterable of chunks of + data. + + Raise HashMismatch if none match. + + """ + gots = {} + for hash_name in self._allowed.keys(): + try: + gots[hash_name] = hashlib.new(hash_name) + except (ValueError, TypeError): + raise InstallationError(f"Unknown hash name: {hash_name}") + + for chunk in chunks: + for hash in gots.values(): + hash.update(chunk) + + for hash_name, got in gots.items(): + if got.hexdigest() in self._allowed[hash_name]: + return + self._raise(gots) + + def _raise(self, gots): + # type: (Dict[str, _Hash]) -> NoReturn + raise HashMismatch(self._allowed, gots) + + def check_against_file(self, file): + # type: (BinaryIO) -> None + """Check good hashes against a file-like object + + Raise HashMismatch if none match. + + """ + return self.check_against_chunks(read_chunks(file)) + + def check_against_path(self, path): + # type: (str) -> None + with open(path, "rb") as file: + return self.check_against_file(file) + + def __nonzero__(self): + # type: () -> bool + """Return whether I know any known-good hashes.""" + return bool(self._allowed) + + def __bool__(self): + # type: () -> bool + return self.__nonzero__() + + def __eq__(self, other): + # type: (object) -> bool + if not isinstance(other, Hashes): + return NotImplemented + return self._allowed == other._allowed + + def __hash__(self): + # type: () -> int + return hash( + ",".join( + sorted( + ":".join((alg, digest)) + for alg, digest_list in self._allowed.items() + for digest in digest_list + ) + ) + ) + + +class MissingHashes(Hashes): + """A workalike for Hashes used when we're missing a hash for a requirement + + It computes the actual hash of the requirement and raises a HashMissing + exception showing it to the user. + + """ + + def __init__(self): + # type: () -> None + """Don't offer the ``hashes`` kwarg.""" + # Pass our favorite hash in to generate a "gotten hash". With the + # empty list, it will never match, so an error will always raise. + super().__init__(hashes={FAVORITE_HASH: []}) + + def _raise(self, gots): + # type: (Dict[str, _Hash]) -> NoReturn + raise HashMissing(gots[FAVORITE_HASH].hexdigest()) diff --git a/venv/Lib/site-packages/pip/_internal/utils/inject_securetransport.py b/venv/Lib/site-packages/pip/_internal/utils/inject_securetransport.py new file mode 100644 index 00000000..b6863d93 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/utils/inject_securetransport.py @@ -0,0 +1,36 @@ +"""A helper module that injects SecureTransport, on import. + +The import should be done as early as possible, to ensure all requests and +sessions (or whatever) are created after injecting SecureTransport. + +Note that we only do the injection on macOS, when the linked OpenSSL is too +old to handle TLSv1.2. +""" + +import sys + + +def inject_securetransport(): + # type: () -> None + # Only relevant on macOS + if sys.platform != "darwin": + return + + try: + import ssl + except ImportError: + return + + # Checks for OpenSSL 1.0.1 + if ssl.OPENSSL_VERSION_NUMBER >= 0x1000100F: + return + + try: + from pip._vendor.urllib3.contrib import securetransport + except (ImportError, OSError): + return + + securetransport.inject_into_urllib3() + + +inject_securetransport() diff --git a/venv/Lib/site-packages/pip/_internal/utils/logging.py b/venv/Lib/site-packages/pip/_internal/utils/logging.py new file mode 100644 index 00000000..45798d54 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/utils/logging.py @@ -0,0 +1,387 @@ +import contextlib +import errno +import logging +import logging.handlers +import os +import sys +from logging import Filter, getLogger +from typing import IO, Any, Callable, Iterator, Optional, TextIO, Type, cast + +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.deprecation import DEPRECATION_MSG_PREFIX +from pip._internal.utils.misc import ensure_dir + +try: + import threading +except ImportError: + import dummy_threading as threading # type: ignore + + +try: + from pip._vendor import colorama +# Lots of different errors can come from this, including SystemError and +# ImportError. +except Exception: + colorama = None + + +_log_state = threading.local() +subprocess_logger = getLogger("pip.subprocessor") + + +class BrokenStdoutLoggingError(Exception): + """ + Raised if BrokenPipeError occurs for the stdout stream while logging. + """ + + pass + + +# BrokenPipeError manifests differently in Windows and non-Windows. +if WINDOWS: + # In Windows, a broken pipe can show up as EINVAL rather than EPIPE: + # https://bugs.python.org/issue19612 + # https://bugs.python.org/issue30418 + def _is_broken_pipe_error(exc_class, exc): + # type: (Type[BaseException], BaseException) -> bool + """See the docstring for non-Windows below.""" + return (exc_class is BrokenPipeError) or ( + isinstance(exc, OSError) and exc.errno in (errno.EINVAL, errno.EPIPE) + ) + + +else: + # Then we are in the non-Windows case. + def _is_broken_pipe_error(exc_class, exc): + # type: (Type[BaseException], BaseException) -> bool + """ + Return whether an exception is a broken pipe error. + + Args: + exc_class: an exception class. + exc: an exception instance. + """ + return exc_class is BrokenPipeError + + +@contextlib.contextmanager +def indent_log(num=2): + # type: (int) -> Iterator[None] + """ + A context manager which will cause the log output to be indented for any + log messages emitted inside it. + """ + # For thread-safety + _log_state.indentation = get_indentation() + _log_state.indentation += num + try: + yield + finally: + _log_state.indentation -= num + + +def get_indentation(): + # type: () -> int + return getattr(_log_state, "indentation", 0) + + +class IndentingFormatter(logging.Formatter): + default_time_format = "%Y-%m-%dT%H:%M:%S" + + def __init__( + self, + *args, # type: Any + add_timestamp=False, # type: bool + **kwargs, # type: Any + ): + # type: (...) -> None + """ + A logging.Formatter that obeys the indent_log() context manager. + + :param add_timestamp: A bool indicating output lines should be prefixed + with their record's timestamp. + """ + self.add_timestamp = add_timestamp + super().__init__(*args, **kwargs) + + def get_message_start(self, formatted, levelno): + # type: (str, int) -> str + """ + Return the start of the formatted log message (not counting the + prefix to add to each line). + """ + if levelno < logging.WARNING: + return "" + if formatted.startswith(DEPRECATION_MSG_PREFIX): + # Then the message already has a prefix. We don't want it to + # look like "WARNING: DEPRECATION: ...." + return "" + if levelno < logging.ERROR: + return "WARNING: " + + return "ERROR: " + + def format(self, record): + # type: (logging.LogRecord) -> str + """ + Calls the standard formatter, but will indent all of the log message + lines by our current indentation level. + """ + formatted = super().format(record) + message_start = self.get_message_start(formatted, record.levelno) + formatted = message_start + formatted + + prefix = "" + if self.add_timestamp: + prefix = f"{self.formatTime(record)} " + prefix += " " * get_indentation() + formatted = "".join([prefix + line for line in formatted.splitlines(True)]) + return formatted + + +def _color_wrap(*colors): + # type: (*str) -> Callable[[str], str] + def wrapped(inp): + # type: (str) -> str + return "".join(list(colors) + [inp, colorama.Style.RESET_ALL]) + + return wrapped + + +class ColorizedStreamHandler(logging.StreamHandler): + + # Don't build up a list of colors if we don't have colorama + if colorama: + COLORS = [ + # This needs to be in order from highest logging level to lowest. + (logging.ERROR, _color_wrap(colorama.Fore.RED)), + (logging.WARNING, _color_wrap(colorama.Fore.YELLOW)), + ] + else: + COLORS = [] + + def __init__(self, stream=None, no_color=None): + # type: (Optional[TextIO], bool) -> None + super().__init__(stream) + self._no_color = no_color + + if WINDOWS and colorama: + self.stream = colorama.AnsiToWin32(self.stream) + + def _using_stdout(self): + # type: () -> bool + """ + Return whether the handler is using sys.stdout. + """ + if WINDOWS and colorama: + # Then self.stream is an AnsiToWin32 object. + stream = cast(colorama.AnsiToWin32, self.stream) + return stream.wrapped is sys.stdout + + return self.stream is sys.stdout + + def should_color(self): + # type: () -> bool + # Don't colorize things if we do not have colorama or if told not to + if not colorama or self._no_color: + return False + + real_stream = ( + self.stream + if not isinstance(self.stream, colorama.AnsiToWin32) + else self.stream.wrapped + ) + + # If the stream is a tty we should color it + if hasattr(real_stream, "isatty") and real_stream.isatty(): + return True + + # If we have an ANSI term we should color it + if os.environ.get("TERM") == "ANSI": + return True + + # If anything else we should not color it + return False + + def format(self, record): + # type: (logging.LogRecord) -> str + msg = super().format(record) + + if self.should_color(): + for level, color in self.COLORS: + if record.levelno >= level: + msg = color(msg) + break + + return msg + + # The logging module says handleError() can be customized. + def handleError(self, record): + # type: (logging.LogRecord) -> None + exc_class, exc = sys.exc_info()[:2] + # If a broken pipe occurred while calling write() or flush() on the + # stdout stream in logging's Handler.emit(), then raise our special + # exception so we can handle it in main() instead of logging the + # broken pipe error and continuing. + if ( + exc_class + and exc + and self._using_stdout() + and _is_broken_pipe_error(exc_class, exc) + ): + raise BrokenStdoutLoggingError() + + return super().handleError(record) + + +class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler): + def _open(self): + # type: () -> IO[Any] + ensure_dir(os.path.dirname(self.baseFilename)) + return super()._open() + + +class MaxLevelFilter(Filter): + def __init__(self, level): + # type: (int) -> None + self.level = level + + def filter(self, record): + # type: (logging.LogRecord) -> bool + return record.levelno < self.level + + +class ExcludeLoggerFilter(Filter): + + """ + A logging Filter that excludes records from a logger (or its children). + """ + + def filter(self, record): + # type: (logging.LogRecord) -> bool + # The base Filter class allows only records from a logger (or its + # children). + return not super().filter(record) + + +def setup_logging(verbosity, no_color, user_log_file): + # type: (int, bool, Optional[str]) -> int + """Configures and sets up all of the logging + + Returns the requested logging level, as its integer value. + """ + + # Determine the level to be logging at. + if verbosity >= 1: + level = "DEBUG" + elif verbosity == -1: + level = "WARNING" + elif verbosity == -2: + level = "ERROR" + elif verbosity <= -3: + level = "CRITICAL" + else: + level = "INFO" + + level_number = getattr(logging, level) + + # The "root" logger should match the "console" level *unless* we also need + # to log to a user log file. + include_user_log = user_log_file is not None + if include_user_log: + additional_log_file = user_log_file + root_level = "DEBUG" + else: + additional_log_file = "/dev/null" + root_level = level + + # Disable any logging besides WARNING unless we have DEBUG level logging + # enabled for vendored libraries. + vendored_log_level = "WARNING" if level in ["INFO", "ERROR"] else "DEBUG" + + # Shorthands for clarity + log_streams = { + "stdout": "ext://sys.stdout", + "stderr": "ext://sys.stderr", + } + handler_classes = { + "stream": "pip._internal.utils.logging.ColorizedStreamHandler", + "file": "pip._internal.utils.logging.BetterRotatingFileHandler", + } + handlers = ["console", "console_errors", "console_subprocess"] + ( + ["user_log"] if include_user_log else [] + ) + + logging.config.dictConfig( + { + "version": 1, + "disable_existing_loggers": False, + "filters": { + "exclude_warnings": { + "()": "pip._internal.utils.logging.MaxLevelFilter", + "level": logging.WARNING, + }, + "restrict_to_subprocess": { + "()": "logging.Filter", + "name": subprocess_logger.name, + }, + "exclude_subprocess": { + "()": "pip._internal.utils.logging.ExcludeLoggerFilter", + "name": subprocess_logger.name, + }, + }, + "formatters": { + "indent": { + "()": IndentingFormatter, + "format": "%(message)s", + }, + "indent_with_timestamp": { + "()": IndentingFormatter, + "format": "%(message)s", + "add_timestamp": True, + }, + }, + "handlers": { + "console": { + "level": level, + "class": handler_classes["stream"], + "no_color": no_color, + "stream": log_streams["stdout"], + "filters": ["exclude_subprocess", "exclude_warnings"], + "formatter": "indent", + }, + "console_errors": { + "level": "WARNING", + "class": handler_classes["stream"], + "no_color": no_color, + "stream": log_streams["stderr"], + "filters": ["exclude_subprocess"], + "formatter": "indent", + }, + # A handler responsible for logging to the console messages + # from the "subprocessor" logger. + "console_subprocess": { + "level": level, + "class": handler_classes["stream"], + "no_color": no_color, + "stream": log_streams["stderr"], + "filters": ["restrict_to_subprocess"], + "formatter": "indent", + }, + "user_log": { + "level": "DEBUG", + "class": handler_classes["file"], + "filename": additional_log_file, + "delay": True, + "formatter": "indent_with_timestamp", + }, + }, + "root": { + "level": root_level, + "handlers": handlers, + }, + "loggers": {"pip._vendor": {"level": vendored_log_level}}, + } + ) + + return level_number diff --git a/venv/Lib/site-packages/pip/_internal/utils/misc.py b/venv/Lib/site-packages/pip/_internal/utils/misc.py new file mode 100644 index 00000000..a4ad35be --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/utils/misc.py @@ -0,0 +1,821 @@ +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +import contextlib +import errno +import getpass +import hashlib +import io +import logging +import os +import posixpath +import shutil +import stat +import sys +import urllib.parse +from io import StringIO +from itertools import filterfalse, tee, zip_longest +from types import TracebackType +from typing import ( + Any, + AnyStr, + BinaryIO, + Callable, + Container, + ContextManager, + Iterable, + Iterator, + List, + Optional, + TextIO, + Tuple, + Type, + TypeVar, + cast, +) + +from pip._vendor.pkg_resources import Distribution +from pip._vendor.tenacity import retry, stop_after_delay, wait_fixed + +from pip import __version__ +from pip._internal.exceptions import CommandError +from pip._internal.locations import get_major_minor_version, site_packages, user_site +from pip._internal.utils.compat import WINDOWS, stdlib_pkgs +from pip._internal.utils.virtualenv import ( + running_under_virtualenv, + virtualenv_no_global, +) + +__all__ = [ + "rmtree", + "display_path", + "backup_dir", + "ask", + "splitext", + "format_size", + "is_installable_dir", + "normalize_path", + "renames", + "get_prog", + "captured_stdout", + "ensure_dir", + "remove_auth_from_url", +] + + +logger = logging.getLogger(__name__) + +T = TypeVar("T") +ExcInfo = Tuple[Type[BaseException], BaseException, TracebackType] +VersionInfo = Tuple[int, int, int] +NetlocTuple = Tuple[str, Tuple[Optional[str], Optional[str]]] + + +def get_pip_version(): + # type: () -> str + pip_pkg_dir = os.path.join(os.path.dirname(__file__), "..", "..") + pip_pkg_dir = os.path.abspath(pip_pkg_dir) + + return "pip {} from {} (python {})".format( + __version__, + pip_pkg_dir, + get_major_minor_version(), + ) + + +def normalize_version_info(py_version_info): + # type: (Tuple[int, ...]) -> Tuple[int, int, int] + """ + Convert a tuple of ints representing a Python version to one of length + three. + + :param py_version_info: a tuple of ints representing a Python version, + or None to specify no version. The tuple can have any length. + + :return: a tuple of length three if `py_version_info` is non-None. + Otherwise, return `py_version_info` unchanged (i.e. None). + """ + if len(py_version_info) < 3: + py_version_info += (3 - len(py_version_info)) * (0,) + elif len(py_version_info) > 3: + py_version_info = py_version_info[:3] + + return cast("VersionInfo", py_version_info) + + +def ensure_dir(path): + # type: (AnyStr) -> None + """os.path.makedirs without EEXIST.""" + try: + os.makedirs(path) + except OSError as e: + # Windows can raise spurious ENOTEMPTY errors. See #6426. + if e.errno != errno.EEXIST and e.errno != errno.ENOTEMPTY: + raise + + +def get_prog(): + # type: () -> str + try: + prog = os.path.basename(sys.argv[0]) + if prog in ("__main__.py", "-c"): + return f"{sys.executable} -m pip" + else: + return prog + except (AttributeError, TypeError, IndexError): + pass + return "pip" + + +# Retry every half second for up to 3 seconds +# Tenacity raises RetryError by default, explictly raise the original exception +@retry(reraise=True, stop=stop_after_delay(3), wait=wait_fixed(0.5)) +def rmtree(dir, ignore_errors=False): + # type: (AnyStr, bool) -> None + shutil.rmtree(dir, ignore_errors=ignore_errors, onerror=rmtree_errorhandler) + + +def rmtree_errorhandler(func, path, exc_info): + # type: (Callable[..., Any], str, ExcInfo) -> None + """On Windows, the files in .svn are read-only, so when rmtree() tries to + remove them, an exception is thrown. We catch that here, remove the + read-only attribute, and hopefully continue without problems.""" + try: + has_attr_readonly = not (os.stat(path).st_mode & stat.S_IWRITE) + except OSError: + # it's equivalent to os.path.exists + return + + if has_attr_readonly: + # convert to read/write + os.chmod(path, stat.S_IWRITE) + # use the original function to repeat the operation + func(path) + return + else: + raise + + +def display_path(path): + # type: (str) -> str + """Gives the display value for a given path, making it relative to cwd + if possible.""" + path = os.path.normcase(os.path.abspath(path)) + if path.startswith(os.getcwd() + os.path.sep): + path = "." + path[len(os.getcwd()) :] + return path + + +def backup_dir(dir, ext=".bak"): + # type: (str, str) -> str + """Figure out the name of a directory to back up the given dir to + (adding .bak, .bak2, etc)""" + n = 1 + extension = ext + while os.path.exists(dir + extension): + n += 1 + extension = ext + str(n) + return dir + extension + + +def ask_path_exists(message, options): + # type: (str, Iterable[str]) -> str + for action in os.environ.get("PIP_EXISTS_ACTION", "").split(): + if action in options: + return action + return ask(message, options) + + +def _check_no_input(message): + # type: (str) -> None + """Raise an error if no input is allowed.""" + if os.environ.get("PIP_NO_INPUT"): + raise Exception( + f"No input was expected ($PIP_NO_INPUT set); question: {message}" + ) + + +def ask(message, options): + # type: (str, Iterable[str]) -> str + """Ask the message interactively, with the given possible responses""" + while 1: + _check_no_input(message) + response = input(message) + response = response.strip().lower() + if response not in options: + print( + "Your response ({!r}) was not one of the expected responses: " + "{}".format(response, ", ".join(options)) + ) + else: + return response + + +def ask_input(message): + # type: (str) -> str + """Ask for input interactively.""" + _check_no_input(message) + return input(message) + + +def ask_password(message): + # type: (str) -> str + """Ask for a password interactively.""" + _check_no_input(message) + return getpass.getpass(message) + + +def strtobool(val): + # type: (str) -> int + """Convert a string representation of truth to true (1) or false (0). + + True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values + are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if + 'val' is anything else. + """ + val = val.lower() + if val in ("y", "yes", "t", "true", "on", "1"): + return 1 + elif val in ("n", "no", "f", "false", "off", "0"): + return 0 + else: + raise ValueError(f"invalid truth value {val!r}") + + +def format_size(bytes): + # type: (float) -> str + if bytes > 1000 * 1000: + return "{:.1f} MB".format(bytes / 1000.0 / 1000) + elif bytes > 10 * 1000: + return "{} kB".format(int(bytes / 1000)) + elif bytes > 1000: + return "{:.1f} kB".format(bytes / 1000.0) + else: + return "{} bytes".format(int(bytes)) + + +def tabulate(rows): + # type: (Iterable[Iterable[Any]]) -> Tuple[List[str], List[int]] + """Return a list of formatted rows and a list of column sizes. + + For example:: + + >>> tabulate([['foobar', 2000], [0xdeadbeef]]) + (['foobar 2000', '3735928559'], [10, 4]) + """ + rows = [tuple(map(str, row)) for row in rows] + sizes = [max(map(len, col)) for col in zip_longest(*rows, fillvalue="")] + table = [" ".join(map(str.ljust, row, sizes)).rstrip() for row in rows] + return table, sizes + + +def is_installable_dir(path: str) -> bool: + """Is path is a directory containing pyproject.toml, setup.cfg or setup.py?""" + if not os.path.isdir(path): + return False + return any( + os.path.isfile(os.path.join(path, signifier)) + for signifier in ("pyproject.toml", "setup.cfg", "setup.py") + ) + + +def read_chunks(file, size=io.DEFAULT_BUFFER_SIZE): + # type: (BinaryIO, int) -> Iterator[bytes] + """Yield pieces of data from a file-like object until EOF.""" + while True: + chunk = file.read(size) + if not chunk: + break + yield chunk + + +def normalize_path(path, resolve_symlinks=True): + # type: (str, bool) -> str + """ + Convert a path to its canonical, case-normalized, absolute version. + + """ + path = os.path.expanduser(path) + if resolve_symlinks: + path = os.path.realpath(path) + else: + path = os.path.abspath(path) + return os.path.normcase(path) + + +def splitext(path): + # type: (str) -> Tuple[str, str] + """Like os.path.splitext, but take off .tar too""" + base, ext = posixpath.splitext(path) + if base.lower().endswith(".tar"): + ext = base[-4:] + ext + base = base[:-4] + return base, ext + + +def renames(old, new): + # type: (str, str) -> None + """Like os.renames(), but handles renaming across devices.""" + # Implementation borrowed from os.renames(). + head, tail = os.path.split(new) + if head and tail and not os.path.exists(head): + os.makedirs(head) + + shutil.move(old, new) + + head, tail = os.path.split(old) + if head and tail: + try: + os.removedirs(head) + except OSError: + pass + + +def is_local(path): + # type: (str) -> bool + """ + Return True if path is within sys.prefix, if we're running in a virtualenv. + + If we're not in a virtualenv, all paths are considered "local." + + Caution: this function assumes the head of path has been normalized + with normalize_path. + """ + if not running_under_virtualenv(): + return True + return path.startswith(normalize_path(sys.prefix)) + + +def dist_is_local(dist): + # type: (Distribution) -> bool + """ + Return True if given Distribution object is installed locally + (i.e. within current virtualenv). + + Always True if we're not in a virtualenv. + + """ + return is_local(dist_location(dist)) + + +def dist_in_usersite(dist): + # type: (Distribution) -> bool + """ + Return True if given Distribution is installed in user site. + """ + return dist_location(dist).startswith(normalize_path(user_site)) + + +def dist_in_site_packages(dist): + # type: (Distribution) -> bool + """ + Return True if given Distribution is installed in + sysconfig.get_python_lib(). + """ + return dist_location(dist).startswith(normalize_path(site_packages)) + + +def dist_is_editable(dist): + # type: (Distribution) -> bool + """ + Return True if given Distribution is an editable install. + """ + for path_item in sys.path: + egg_link = os.path.join(path_item, dist.project_name + ".egg-link") + if os.path.isfile(egg_link): + return True + return False + + +def get_installed_distributions( + local_only=True, # type: bool + skip=stdlib_pkgs, # type: Container[str] + include_editables=True, # type: bool + editables_only=False, # type: bool + user_only=False, # type: bool + paths=None, # type: Optional[List[str]] +): + # type: (...) -> List[Distribution] + """Return a list of installed Distribution objects. + + Left for compatibility until direct pkg_resources uses are refactored out. + """ + from pip._internal.metadata import get_default_environment, get_environment + from pip._internal.metadata.pkg_resources import Distribution as _Dist + + if paths is None: + env = get_default_environment() + else: + env = get_environment(paths) + dists = env.iter_installed_distributions( + local_only=local_only, + skip=skip, + include_editables=include_editables, + editables_only=editables_only, + user_only=user_only, + ) + return [cast(_Dist, dist)._dist for dist in dists] + + +def get_distribution(req_name): + # type: (str) -> Optional[Distribution] + """Given a requirement name, return the installed Distribution object. + + This searches from *all* distributions available in the environment, to + match the behavior of ``pkg_resources.get_distribution()``. + + Left for compatibility until direct pkg_resources uses are refactored out. + """ + from pip._internal.metadata import get_default_environment + from pip._internal.metadata.pkg_resources import Distribution as _Dist + + dist = get_default_environment().get_distribution(req_name) + if dist is None: + return None + return cast(_Dist, dist)._dist + + +def egg_link_path(dist): + # type: (Distribution) -> Optional[str] + """ + Return the path for the .egg-link file if it exists, otherwise, None. + + There's 3 scenarios: + 1) not in a virtualenv + try to find in site.USER_SITE, then site_packages + 2) in a no-global virtualenv + try to find in site_packages + 3) in a yes-global virtualenv + try to find in site_packages, then site.USER_SITE + (don't look in global location) + + For #1 and #3, there could be odd cases, where there's an egg-link in 2 + locations. + + This method will just return the first one found. + """ + sites = [] + if running_under_virtualenv(): + sites.append(site_packages) + if not virtualenv_no_global() and user_site: + sites.append(user_site) + else: + if user_site: + sites.append(user_site) + sites.append(site_packages) + + for site in sites: + egglink = os.path.join(site, dist.project_name) + ".egg-link" + if os.path.isfile(egglink): + return egglink + return None + + +def dist_location(dist): + # type: (Distribution) -> str + """ + Get the site-packages location of this distribution. Generally + this is dist.location, except in the case of develop-installed + packages, where dist.location is the source code location, and we + want to know where the egg-link file is. + + The returned location is normalized (in particular, with symlinks removed). + """ + egg_link = egg_link_path(dist) + if egg_link: + return normalize_path(egg_link) + return normalize_path(dist.location) + + +def write_output(msg, *args): + # type: (Any, Any) -> None + logger.info(msg, *args) + + +class StreamWrapper(StringIO): + orig_stream = None # type: TextIO + + @classmethod + def from_stream(cls, orig_stream): + # type: (TextIO) -> StreamWrapper + cls.orig_stream = orig_stream + return cls() + + # compileall.compile_dir() needs stdout.encoding to print to stdout + # https://github.com/python/mypy/issues/4125 + @property + def encoding(self): # type: ignore + return self.orig_stream.encoding + + +@contextlib.contextmanager +def captured_output(stream_name): + # type: (str) -> Iterator[StreamWrapper] + """Return a context manager used by captured_stdout/stdin/stderr + that temporarily replaces the sys stream *stream_name* with a StringIO. + + Taken from Lib/support/__init__.py in the CPython repo. + """ + orig_stdout = getattr(sys, stream_name) + setattr(sys, stream_name, StreamWrapper.from_stream(orig_stdout)) + try: + yield getattr(sys, stream_name) + finally: + setattr(sys, stream_name, orig_stdout) + + +def captured_stdout(): + # type: () -> ContextManager[StreamWrapper] + """Capture the output of sys.stdout: + + with captured_stdout() as stdout: + print('hello') + self.assertEqual(stdout.getvalue(), 'hello\n') + + Taken from Lib/support/__init__.py in the CPython repo. + """ + return captured_output("stdout") + + +def captured_stderr(): + # type: () -> ContextManager[StreamWrapper] + """ + See captured_stdout(). + """ + return captured_output("stderr") + + +# Simulates an enum +def enum(*sequential, **named): + # type: (*Any, **Any) -> Type[Any] + enums = dict(zip(sequential, range(len(sequential))), **named) + reverse = {value: key for key, value in enums.items()} + enums["reverse_mapping"] = reverse + return type("Enum", (), enums) + + +def build_netloc(host, port): + # type: (str, Optional[int]) -> str + """ + Build a netloc from a host-port pair + """ + if port is None: + return host + if ":" in host: + # Only wrap host with square brackets when it is IPv6 + host = f"[{host}]" + return f"{host}:{port}" + + +def build_url_from_netloc(netloc, scheme="https"): + # type: (str, str) -> str + """ + Build a full URL from a netloc. + """ + if netloc.count(":") >= 2 and "@" not in netloc and "[" not in netloc: + # It must be a bare IPv6 address, so wrap it with brackets. + netloc = f"[{netloc}]" + return f"{scheme}://{netloc}" + + +def parse_netloc(netloc): + # type: (str) -> Tuple[str, Optional[int]] + """ + Return the host-port pair from a netloc. + """ + url = build_url_from_netloc(netloc) + parsed = urllib.parse.urlparse(url) + return parsed.hostname, parsed.port + + +def split_auth_from_netloc(netloc): + # type: (str) -> NetlocTuple + """ + Parse out and remove the auth information from a netloc. + + Returns: (netloc, (username, password)). + """ + if "@" not in netloc: + return netloc, (None, None) + + # Split from the right because that's how urllib.parse.urlsplit() + # behaves if more than one @ is present (which can be checked using + # the password attribute of urlsplit()'s return value). + auth, netloc = netloc.rsplit("@", 1) + pw = None # type: Optional[str] + if ":" in auth: + # Split from the left because that's how urllib.parse.urlsplit() + # behaves if more than one : is present (which again can be checked + # using the password attribute of the return value) + user, pw = auth.split(":", 1) + else: + user, pw = auth, None + + user = urllib.parse.unquote(user) + if pw is not None: + pw = urllib.parse.unquote(pw) + + return netloc, (user, pw) + + +def redact_netloc(netloc): + # type: (str) -> str + """ + Replace the sensitive data in a netloc with "****", if it exists. + + For example: + - "user:pass@example.com" returns "user:****@example.com" + - "accesstoken@example.com" returns "****@example.com" + """ + netloc, (user, password) = split_auth_from_netloc(netloc) + if user is None: + return netloc + if password is None: + user = "****" + password = "" + else: + user = urllib.parse.quote(user) + password = ":****" + return "{user}{password}@{netloc}".format( + user=user, password=password, netloc=netloc + ) + + +def _transform_url(url, transform_netloc): + # type: (str, Callable[[str], Tuple[Any, ...]]) -> Tuple[str, NetlocTuple] + """Transform and replace netloc in a url. + + transform_netloc is a function taking the netloc and returning a + tuple. The first element of this tuple is the new netloc. The + entire tuple is returned. + + Returns a tuple containing the transformed url as item 0 and the + original tuple returned by transform_netloc as item 1. + """ + purl = urllib.parse.urlsplit(url) + netloc_tuple = transform_netloc(purl.netloc) + # stripped url + url_pieces = (purl.scheme, netloc_tuple[0], purl.path, purl.query, purl.fragment) + surl = urllib.parse.urlunsplit(url_pieces) + return surl, cast("NetlocTuple", netloc_tuple) + + +def _get_netloc(netloc): + # type: (str) -> NetlocTuple + return split_auth_from_netloc(netloc) + + +def _redact_netloc(netloc): + # type: (str) -> Tuple[str,] + return (redact_netloc(netloc),) + + +def split_auth_netloc_from_url(url): + # type: (str) -> Tuple[str, str, Tuple[str, str]] + """ + Parse a url into separate netloc, auth, and url with no auth. + + Returns: (url_without_auth, netloc, (username, password)) + """ + url_without_auth, (netloc, auth) = _transform_url(url, _get_netloc) + return url_without_auth, netloc, auth + + +def remove_auth_from_url(url): + # type: (str) -> str + """Return a copy of url with 'username:password@' removed.""" + # username/pass params are passed to subversion through flags + # and are not recognized in the url. + return _transform_url(url, _get_netloc)[0] + + +def redact_auth_from_url(url): + # type: (str) -> str + """Replace the password in a given url with ****.""" + return _transform_url(url, _redact_netloc)[0] + + +class HiddenText: + def __init__( + self, + secret, # type: str + redacted, # type: str + ): + # type: (...) -> None + self.secret = secret + self.redacted = redacted + + def __repr__(self): + # type: (...) -> str + return "".format(str(self)) + + def __str__(self): + # type: (...) -> str + return self.redacted + + # This is useful for testing. + def __eq__(self, other): + # type: (Any) -> bool + if type(self) != type(other): + return False + + # The string being used for redaction doesn't also have to match, + # just the raw, original string. + return self.secret == other.secret + + +def hide_value(value): + # type: (str) -> HiddenText + return HiddenText(value, redacted="****") + + +def hide_url(url): + # type: (str) -> HiddenText + redacted = redact_auth_from_url(url) + return HiddenText(url, redacted=redacted) + + +def protect_pip_from_modification_on_windows(modifying_pip): + # type: (bool) -> None + """Protection of pip.exe from modification on Windows + + On Windows, any operation modifying pip should be run as: + python -m pip ... + """ + pip_names = [ + "pip.exe", + "pip{}.exe".format(sys.version_info[0]), + "pip{}.{}.exe".format(*sys.version_info[:2]), + ] + + # See https://github.com/pypa/pip/issues/1299 for more discussion + should_show_use_python_msg = ( + modifying_pip and WINDOWS and os.path.basename(sys.argv[0]) in pip_names + ) + + if should_show_use_python_msg: + new_command = [sys.executable, "-m", "pip"] + sys.argv[1:] + raise CommandError( + "To modify pip, please run the following command:\n{}".format( + " ".join(new_command) + ) + ) + + +def is_console_interactive(): + # type: () -> bool + """Is this console interactive?""" + return sys.stdin is not None and sys.stdin.isatty() + + +def hash_file(path, blocksize=1 << 20): + # type: (str, int) -> Tuple[Any, int] + """Return (hash, length) for path using hashlib.sha256()""" + + h = hashlib.sha256() + length = 0 + with open(path, "rb") as f: + for block in read_chunks(f, size=blocksize): + length += len(block) + h.update(block) + return h, length + + +def is_wheel_installed(): + # type: () -> bool + """ + Return whether the wheel package is installed. + """ + try: + import wheel # noqa: F401 + except ImportError: + return False + + return True + + +def pairwise(iterable): + # type: (Iterable[Any]) -> Iterator[Tuple[Any, Any]] + """ + Return paired elements. + + For example: + s -> (s0, s1), (s2, s3), (s4, s5), ... + """ + iterable = iter(iterable) + return zip_longest(iterable, iterable) + + +def partition( + pred, # type: Callable[[T], bool] + iterable, # type: Iterable[T] +): + # type: (...) -> Tuple[Iterable[T], Iterable[T]] + """ + Use a predicate to partition entries into false entries and true entries, + like + + partition(is_odd, range(10)) --> 0 2 4 6 8 and 1 3 5 7 9 + """ + t1, t2 = tee(iterable) + return filterfalse(pred, t1), filter(pred, t2) diff --git a/venv/Lib/site-packages/pip/_internal/utils/models.py b/venv/Lib/site-packages/pip/_internal/utils/models.py new file mode 100644 index 00000000..0e02bc7a --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/utils/models.py @@ -0,0 +1,47 @@ +"""Utilities for defining models +""" + +import operator +from typing import Any, Callable, Type + + +class KeyBasedCompareMixin: + """Provides comparison capabilities that is based on a key""" + + __slots__ = ["_compare_key", "_defining_class"] + + def __init__(self, key, defining_class): + # type: (Any, Type[KeyBasedCompareMixin]) -> None + self._compare_key = key + self._defining_class = defining_class + + def __hash__(self): + # type: () -> int + return hash(self._compare_key) + + def __lt__(self, other): + # type: (Any) -> bool + return self._compare(other, operator.__lt__) + + def __le__(self, other): + # type: (Any) -> bool + return self._compare(other, operator.__le__) + + def __gt__(self, other): + # type: (Any) -> bool + return self._compare(other, operator.__gt__) + + def __ge__(self, other): + # type: (Any) -> bool + return self._compare(other, operator.__ge__) + + def __eq__(self, other): + # type: (Any) -> bool + return self._compare(other, operator.__eq__) + + def _compare(self, other, method): + # type: (Any, Callable[[Any, Any], bool]) -> bool + if not isinstance(other, self._defining_class): + return NotImplemented + + return method(self._compare_key, other._compare_key) diff --git a/env/lib/python2.7/site-packages/pip/_internal/utils/packaging.py b/venv/Lib/site-packages/pip/_internal/utils/packaging.py similarity index 75% rename from env/lib/python2.7/site-packages/pip/_internal/utils/packaging.py rename to venv/Lib/site-packages/pip/_internal/utils/packaging.py index 68aa86ed..3f9dbd3b 100644 --- a/env/lib/python2.7/site-packages/pip/_internal/utils/packaging.py +++ b/venv/Lib/site-packages/pip/_internal/utils/packaging.py @@ -1,20 +1,14 @@ -from __future__ import absolute_import - import logging +from email.message import Message from email.parser import FeedParser +from typing import Optional, Tuple from pip._vendor import pkg_resources from pip._vendor.packaging import specifiers, version +from pip._vendor.pkg_resources import Distribution from pip._internal.exceptions import NoneMetadataError from pip._internal.utils.misc import display_path -from pip._internal.utils.typing import MYPY_CHECK_RUNNING - -if MYPY_CHECK_RUNNING: - from typing import Optional, Tuple - from email.message import Message - from pip._vendor.pkg_resources import Distribution - logger = logging.getLogger(__name__) @@ -37,7 +31,7 @@ def check_requires_python(requires_python, version_info): return True requires_python_specifier = specifiers.SpecifierSet(requires_python) - python_version = version.parse('.'.join(map(str, version_info))) + python_version = version.parse(".".join(map(str, version_info))) return python_version in requires_python_specifier @@ -47,16 +41,17 @@ def get_metadata(dist): :raises NoneMetadataError: if the distribution reports `has_metadata()` True but `get_metadata()` returns None. """ - metadata_name = 'METADATA' - if (isinstance(dist, pkg_resources.DistInfoDistribution) and - dist.has_metadata(metadata_name)): + metadata_name = "METADATA" + if isinstance(dist, pkg_resources.DistInfoDistribution) and dist.has_metadata( + metadata_name + ): metadata = dist.get_metadata(metadata_name) - elif dist.has_metadata('PKG-INFO'): - metadata_name = 'PKG-INFO' + elif dist.has_metadata("PKG-INFO"): + metadata_name = "PKG-INFO" metadata = dist.get_metadata(metadata_name) else: logger.warning("No metadata found in %s", display_path(dist.location)) - metadata = '' + metadata = "" if metadata is None: raise NoneMetadataError(dist, metadata_name) @@ -75,7 +70,7 @@ def get_requires_python(dist): if not present. """ pkg_info_dict = get_metadata(dist) - requires_python = pkg_info_dict.get('Requires-Python') + requires_python = pkg_info_dict.get("Requires-Python") if requires_python is not None: # Convert to a str to satisfy the type checker, since requires_python @@ -87,8 +82,8 @@ def get_requires_python(dist): def get_installer(dist): # type: (Distribution) -> str - if dist.has_metadata('INSTALLER'): - for line in dist.get_metadata_lines('INSTALLER'): + if dist.has_metadata("INSTALLER"): + for line in dist.get_metadata_lines("INSTALLER"): if line.strip(): return line.strip() - return '' + return "" diff --git a/venv/Lib/site-packages/pip/_internal/utils/parallel.py b/venv/Lib/site-packages/pip/_internal/utils/parallel.py new file mode 100644 index 00000000..de91dc8a --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/utils/parallel.py @@ -0,0 +1,101 @@ +"""Convenient parallelization of higher order functions. + +This module provides two helper functions, with appropriate fallbacks on +Python 2 and on systems lacking support for synchronization mechanisms: + +- map_multiprocess +- map_multithread + +These helpers work like Python 3's map, with two differences: + +- They don't guarantee the order of processing of + the elements of the iterable. +- The underlying process/thread pools chop the iterable into + a number of chunks, so that for very long iterables using + a large value for chunksize can make the job complete much faster + than using the default value of 1. +""" + +__all__ = ["map_multiprocess", "map_multithread"] + +from contextlib import contextmanager +from multiprocessing import Pool as ProcessPool +from multiprocessing import pool +from multiprocessing.dummy import Pool as ThreadPool +from typing import Callable, Iterable, Iterator, TypeVar, Union + +from pip._vendor.requests.adapters import DEFAULT_POOLSIZE + +Pool = Union[pool.Pool, pool.ThreadPool] +S = TypeVar("S") +T = TypeVar("T") + +# On platforms without sem_open, multiprocessing[.dummy] Pool +# cannot be created. +try: + import multiprocessing.synchronize # noqa +except ImportError: + LACK_SEM_OPEN = True +else: + LACK_SEM_OPEN = False + +# Incredibly large timeout to work around bpo-8296 on Python 2. +TIMEOUT = 2000000 + + +@contextmanager +def closing(pool): + # type: (Pool) -> Iterator[Pool] + """Return a context manager making sure the pool closes properly.""" + try: + yield pool + finally: + # For Pool.imap*, close and join are needed + # for the returned iterator to begin yielding. + pool.close() + pool.join() + pool.terminate() + + +def _map_fallback(func, iterable, chunksize=1): + # type: (Callable[[S], T], Iterable[S], int) -> Iterator[T] + """Make an iterator applying func to each element in iterable. + + This function is the sequential fallback either on Python 2 + where Pool.imap* doesn't react to KeyboardInterrupt + or when sem_open is unavailable. + """ + return map(func, iterable) + + +def _map_multiprocess(func, iterable, chunksize=1): + # type: (Callable[[S], T], Iterable[S], int) -> Iterator[T] + """Chop iterable into chunks and submit them to a process pool. + + For very long iterables using a large value for chunksize can make + the job complete much faster than using the default value of 1. + + Return an unordered iterator of the results. + """ + with closing(ProcessPool()) as pool: + return pool.imap_unordered(func, iterable, chunksize) + + +def _map_multithread(func, iterable, chunksize=1): + # type: (Callable[[S], T], Iterable[S], int) -> Iterator[T] + """Chop iterable into chunks and submit them to a thread pool. + + For very long iterables using a large value for chunksize can make + the job complete much faster than using the default value of 1. + + Return an unordered iterator of the results. + """ + with closing(ThreadPool(DEFAULT_POOLSIZE)) as pool: + return pool.imap_unordered(func, iterable, chunksize) + + +if LACK_SEM_OPEN: + map_multiprocess = map_multithread = _map_fallback +else: + map_multiprocess = _map_multiprocess + map_multithread = _map_multithread diff --git a/venv/Lib/site-packages/pip/_internal/utils/pkg_resources.py b/venv/Lib/site-packages/pip/_internal/utils/pkg_resources.py new file mode 100644 index 00000000..ee1eca30 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/utils/pkg_resources.py @@ -0,0 +1,40 @@ +from typing import Dict, Iterable, List + +from pip._vendor.pkg_resources import yield_lines + + +class DictMetadata: + """IMetadataProvider that reads metadata files from a dictionary.""" + + def __init__(self, metadata): + # type: (Dict[str, bytes]) -> None + self._metadata = metadata + + def has_metadata(self, name): + # type: (str) -> bool + return name in self._metadata + + def get_metadata(self, name): + # type: (str) -> str + try: + return self._metadata[name].decode() + except UnicodeDecodeError as e: + # Mirrors handling done in pkg_resources.NullProvider. + e.reason += f" in {name} file" + raise + + def get_metadata_lines(self, name): + # type: (str) -> Iterable[str] + return yield_lines(self.get_metadata(name)) + + def metadata_isdir(self, name): + # type: (str) -> bool + return False + + def metadata_listdir(self, name): + # type: (str) -> List[str] + return [] + + def run_script(self, script_name, namespace): + # type: (str, str) -> None + pass diff --git a/venv/Lib/site-packages/pip/_internal/utils/setuptools_build.py b/venv/Lib/site-packages/pip/_internal/utils/setuptools_build.py new file mode 100644 index 00000000..4b8e4b35 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/utils/setuptools_build.py @@ -0,0 +1,173 @@ +import sys +from typing import List, Optional, Sequence + +# Shim to wrap setup.py invocation with setuptools +# +# We set sys.argv[0] to the path to the underlying setup.py file so +# setuptools / distutils don't take the path to the setup.py to be "-c" when +# invoking via the shim. This avoids e.g. the following manifest_maker +# warning: "warning: manifest_maker: standard file '-c' not found". +_SETUPTOOLS_SHIM = ( + "import io, os, sys, setuptools, tokenize; sys.argv[0] = {0!r}; __file__={0!r};" + "f = getattr(tokenize, 'open', open)(__file__) " + "if os.path.exists(__file__) " + "else io.StringIO('from setuptools import setup; setup()');" + "code = f.read().replace('\\r\\n', '\\n');" + "f.close();" + "exec(compile(code, __file__, 'exec'))" +) + + +def make_setuptools_shim_args( + setup_py_path, # type: str + global_options=None, # type: Sequence[str] + no_user_config=False, # type: bool + unbuffered_output=False, # type: bool +): + # type: (...) -> List[str] + """ + Get setuptools command arguments with shim wrapped setup file invocation. + + :param setup_py_path: The path to setup.py to be wrapped. + :param global_options: Additional global options. + :param no_user_config: If True, disables personal user configuration. + :param unbuffered_output: If True, adds the unbuffered switch to the + argument list. + """ + args = [sys.executable] + if unbuffered_output: + args += ["-u"] + args += ["-c", _SETUPTOOLS_SHIM.format(setup_py_path)] + if global_options: + args += global_options + if no_user_config: + args += ["--no-user-cfg"] + return args + + +def make_setuptools_bdist_wheel_args( + setup_py_path, # type: str + global_options, # type: Sequence[str] + build_options, # type: Sequence[str] + destination_dir, # type: str +): + # type: (...) -> List[str] + # NOTE: Eventually, we'd want to also -S to the flags here, when we're + # isolating. Currently, it breaks Python in virtualenvs, because it + # relies on site.py to find parts of the standard library outside the + # virtualenv. + args = make_setuptools_shim_args( + setup_py_path, global_options=global_options, unbuffered_output=True + ) + args += ["bdist_wheel", "-d", destination_dir] + args += build_options + return args + + +def make_setuptools_clean_args( + setup_py_path, # type: str + global_options, # type: Sequence[str] +): + # type: (...) -> List[str] + args = make_setuptools_shim_args( + setup_py_path, global_options=global_options, unbuffered_output=True + ) + args += ["clean", "--all"] + return args + + +def make_setuptools_develop_args( + setup_py_path, # type: str + global_options, # type: Sequence[str] + install_options, # type: Sequence[str] + no_user_config, # type: bool + prefix, # type: Optional[str] + home, # type: Optional[str] + use_user_site, # type: bool +): + # type: (...) -> List[str] + assert not (use_user_site and prefix) + + args = make_setuptools_shim_args( + setup_py_path, + global_options=global_options, + no_user_config=no_user_config, + ) + + args += ["develop", "--no-deps"] + + args += install_options + + if prefix: + args += ["--prefix", prefix] + if home is not None: + args += ["--install-dir", home] + + if use_user_site: + args += ["--user", "--prefix="] + + return args + + +def make_setuptools_egg_info_args( + setup_py_path, # type: str + egg_info_dir, # type: Optional[str] + no_user_config, # type: bool +): + # type: (...) -> List[str] + args = make_setuptools_shim_args(setup_py_path, no_user_config=no_user_config) + + args += ["egg_info"] + + if egg_info_dir: + args += ["--egg-base", egg_info_dir] + + return args + + +def make_setuptools_install_args( + setup_py_path, # type: str + global_options, # type: Sequence[str] + install_options, # type: Sequence[str] + record_filename, # type: str + root, # type: Optional[str] + prefix, # type: Optional[str] + header_dir, # type: Optional[str] + home, # type: Optional[str] + use_user_site, # type: bool + no_user_config, # type: bool + pycompile, # type: bool +): + # type: (...) -> List[str] + assert not (use_user_site and prefix) + assert not (use_user_site and root) + + args = make_setuptools_shim_args( + setup_py_path, + global_options=global_options, + no_user_config=no_user_config, + unbuffered_output=True, + ) + args += ["install", "--record", record_filename] + args += ["--single-version-externally-managed"] + + if root is not None: + args += ["--root", root] + if prefix is not None: + args += ["--prefix", prefix] + if home is not None: + args += ["--home", home] + if use_user_site: + args += ["--user", "--prefix="] + + if pycompile: + args += ["--compile"] + else: + args += ["--no-compile"] + + if header_dir: + args += ["--install-headers", header_dir] + + args += install_options + + return args diff --git a/venv/Lib/site-packages/pip/_internal/utils/subprocess.py b/venv/Lib/site-packages/pip/_internal/utils/subprocess.py new file mode 100644 index 00000000..2c8cf212 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/utils/subprocess.py @@ -0,0 +1,281 @@ +import logging +import os +import shlex +import subprocess +from typing import Any, Callable, Iterable, List, Mapping, Optional, Union + +from pip._internal.cli.spinners import SpinnerInterface, open_spinner +from pip._internal.exceptions import InstallationSubprocessError +from pip._internal.utils.logging import subprocess_logger +from pip._internal.utils.misc import HiddenText + +CommandArgs = List[Union[str, HiddenText]] + + +LOG_DIVIDER = "----------------------------------------" + + +def make_command(*args): + # type: (Union[str, HiddenText, CommandArgs]) -> CommandArgs + """ + Create a CommandArgs object. + """ + command_args = [] # type: CommandArgs + for arg in args: + # Check for list instead of CommandArgs since CommandArgs is + # only known during type-checking. + if isinstance(arg, list): + command_args.extend(arg) + else: + # Otherwise, arg is str or HiddenText. + command_args.append(arg) + + return command_args + + +def format_command_args(args): + # type: (Union[List[str], CommandArgs]) -> str + """ + Format command arguments for display. + """ + # For HiddenText arguments, display the redacted form by calling str(). + # Also, we don't apply str() to arguments that aren't HiddenText since + # this can trigger a UnicodeDecodeError in Python 2 if the argument + # has type unicode and includes a non-ascii character. (The type + # checker doesn't ensure the annotations are correct in all cases.) + return " ".join( + shlex.quote(str(arg)) if isinstance(arg, HiddenText) else shlex.quote(arg) + for arg in args + ) + + +def reveal_command_args(args): + # type: (Union[List[str], CommandArgs]) -> List[str] + """ + Return the arguments in their raw, unredacted form. + """ + return [arg.secret if isinstance(arg, HiddenText) else arg for arg in args] + + +def make_subprocess_output_error( + cmd_args, # type: Union[List[str], CommandArgs] + cwd, # type: Optional[str] + lines, # type: List[str] + exit_status, # type: int +): + # type: (...) -> str + """ + Create and return the error message to use to log a subprocess error + with command output. + + :param lines: A list of lines, each ending with a newline. + """ + command = format_command_args(cmd_args) + + # We know the joined output value ends in a newline. + output = "".join(lines) + msg = ( + # Use a unicode string to avoid "UnicodeEncodeError: 'ascii' + # codec can't encode character ..." in Python 2 when a format + # argument (e.g. `output`) has a non-ascii character. + "Command errored out with exit status {exit_status}:\n" + " command: {command_display}\n" + " cwd: {cwd_display}\n" + "Complete output ({line_count} lines):\n{output}{divider}" + ).format( + exit_status=exit_status, + command_display=command, + cwd_display=cwd, + line_count=len(lines), + output=output, + divider=LOG_DIVIDER, + ) + return msg + + +def call_subprocess( + cmd, # type: Union[List[str], CommandArgs] + show_stdout=False, # type: bool + cwd=None, # type: Optional[str] + on_returncode="raise", # type: str + extra_ok_returncodes=None, # type: Optional[Iterable[int]] + command_desc=None, # type: Optional[str] + extra_environ=None, # type: Optional[Mapping[str, Any]] + unset_environ=None, # type: Optional[Iterable[str]] + spinner=None, # type: Optional[SpinnerInterface] + log_failed_cmd=True, # type: Optional[bool] + stdout_only=False, # type: Optional[bool] +): + # type: (...) -> str + """ + Args: + show_stdout: if true, use INFO to log the subprocess's stderr and + stdout streams. Otherwise, use DEBUG. Defaults to False. + extra_ok_returncodes: an iterable of integer return codes that are + acceptable, in addition to 0. Defaults to None, which means []. + unset_environ: an iterable of environment variable names to unset + prior to calling subprocess.Popen(). + log_failed_cmd: if false, failed commands are not logged, only raised. + stdout_only: if true, return only stdout, else return both. When true, + logging of both stdout and stderr occurs when the subprocess has + terminated, else logging occurs as subprocess output is produced. + """ + if extra_ok_returncodes is None: + extra_ok_returncodes = [] + if unset_environ is None: + unset_environ = [] + # Most places in pip use show_stdout=False. What this means is-- + # + # - We connect the child's output (combined stderr and stdout) to a + # single pipe, which we read. + # - We log this output to stderr at DEBUG level as it is received. + # - If DEBUG logging isn't enabled (e.g. if --verbose logging wasn't + # requested), then we show a spinner so the user can still see the + # subprocess is in progress. + # - If the subprocess exits with an error, we log the output to stderr + # at ERROR level if it hasn't already been displayed to the console + # (e.g. if --verbose logging wasn't enabled). This way we don't log + # the output to the console twice. + # + # If show_stdout=True, then the above is still done, but with DEBUG + # replaced by INFO. + if show_stdout: + # Then log the subprocess output at INFO level. + log_subprocess = subprocess_logger.info + used_level = logging.INFO + else: + # Then log the subprocess output using DEBUG. This also ensures + # it will be logged to the log file (aka user_log), if enabled. + log_subprocess = subprocess_logger.debug + used_level = logging.DEBUG + + # Whether the subprocess will be visible in the console. + showing_subprocess = subprocess_logger.getEffectiveLevel() <= used_level + + # Only use the spinner if we're not showing the subprocess output + # and we have a spinner. + use_spinner = not showing_subprocess and spinner is not None + + if command_desc is None: + command_desc = format_command_args(cmd) + + log_subprocess("Running command %s", command_desc) + env = os.environ.copy() + if extra_environ: + env.update(extra_environ) + for name in unset_environ: + env.pop(name, None) + try: + proc = subprocess.Popen( + # Convert HiddenText objects to the underlying str. + reveal_command_args(cmd), + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT if not stdout_only else subprocess.PIPE, + cwd=cwd, + env=env, + errors="backslashreplace", + ) + except Exception as exc: + if log_failed_cmd: + subprocess_logger.critical( + "Error %s while executing command %s", + exc, + command_desc, + ) + raise + all_output = [] + if not stdout_only: + assert proc.stdout + assert proc.stdin + proc.stdin.close() + # In this mode, stdout and stderr are in the same pipe. + while True: + line = proc.stdout.readline() # type: str + if not line: + break + line = line.rstrip() + all_output.append(line + "\n") + + # Show the line immediately. + log_subprocess(line) + # Update the spinner. + if use_spinner: + assert spinner + spinner.spin() + try: + proc.wait() + finally: + if proc.stdout: + proc.stdout.close() + output = "".join(all_output) + else: + # In this mode, stdout and stderr are in different pipes. + # We must use communicate() which is the only safe way to read both. + out, err = proc.communicate() + # log line by line to preserve pip log indenting + for out_line in out.splitlines(): + log_subprocess(out_line) + all_output.append(out) + for err_line in err.splitlines(): + log_subprocess(err_line) + all_output.append(err) + output = out + + proc_had_error = proc.returncode and proc.returncode not in extra_ok_returncodes + if use_spinner: + assert spinner + if proc_had_error: + spinner.finish("error") + else: + spinner.finish("done") + if proc_had_error: + if on_returncode == "raise": + if not showing_subprocess and log_failed_cmd: + # Then the subprocess streams haven't been logged to the + # console yet. + msg = make_subprocess_output_error( + cmd_args=cmd, + cwd=cwd, + lines=all_output, + exit_status=proc.returncode, + ) + subprocess_logger.error(msg) + raise InstallationSubprocessError(proc.returncode, command_desc) + elif on_returncode == "warn": + subprocess_logger.warning( + 'Command "%s" had error code %s in %s', + command_desc, + proc.returncode, + cwd, + ) + elif on_returncode == "ignore": + pass + else: + raise ValueError(f"Invalid value: on_returncode={on_returncode!r}") + return output + + +def runner_with_spinner_message(message): + # type: (str) -> Callable[..., None] + """Provide a subprocess_runner that shows a spinner message. + + Intended for use with for pep517's Pep517HookCaller. Thus, the runner has + an API that matches what's expected by Pep517HookCaller.subprocess_runner. + """ + + def runner( + cmd, # type: List[str] + cwd=None, # type: Optional[str] + extra_environ=None, # type: Optional[Mapping[str, Any]] + ): + # type: (...) -> None + with open_spinner(message) as spinner: + call_subprocess( + cmd, + cwd=cwd, + extra_environ=extra_environ, + spinner=spinner, + ) + + return runner diff --git a/venv/Lib/site-packages/pip/_internal/utils/temp_dir.py b/venv/Lib/site-packages/pip/_internal/utils/temp_dir.py new file mode 100644 index 00000000..477cbe6b --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/utils/temp_dir.py @@ -0,0 +1,260 @@ +import errno +import itertools +import logging +import os.path +import tempfile +from contextlib import ExitStack, contextmanager +from typing import Any, Dict, Iterator, Optional, TypeVar, Union + +from pip._internal.utils.misc import enum, rmtree + +logger = logging.getLogger(__name__) + +_T = TypeVar("_T", bound="TempDirectory") + + +# Kinds of temporary directories. Only needed for ones that are +# globally-managed. +tempdir_kinds = enum( + BUILD_ENV="build-env", + EPHEM_WHEEL_CACHE="ephem-wheel-cache", + REQ_BUILD="req-build", +) + + +_tempdir_manager = None # type: Optional[ExitStack] + + +@contextmanager +def global_tempdir_manager(): + # type: () -> Iterator[None] + global _tempdir_manager + with ExitStack() as stack: + old_tempdir_manager, _tempdir_manager = _tempdir_manager, stack + try: + yield + finally: + _tempdir_manager = old_tempdir_manager + + +class TempDirectoryTypeRegistry: + """Manages temp directory behavior""" + + def __init__(self): + # type: () -> None + self._should_delete = {} # type: Dict[str, bool] + + def set_delete(self, kind, value): + # type: (str, bool) -> None + """Indicate whether a TempDirectory of the given kind should be + auto-deleted. + """ + self._should_delete[kind] = value + + def get_delete(self, kind): + # type: (str) -> bool + """Get configured auto-delete flag for a given TempDirectory type, + default True. + """ + return self._should_delete.get(kind, True) + + +_tempdir_registry = None # type: Optional[TempDirectoryTypeRegistry] + + +@contextmanager +def tempdir_registry(): + # type: () -> Iterator[TempDirectoryTypeRegistry] + """Provides a scoped global tempdir registry that can be used to dictate + whether directories should be deleted. + """ + global _tempdir_registry + old_tempdir_registry = _tempdir_registry + _tempdir_registry = TempDirectoryTypeRegistry() + try: + yield _tempdir_registry + finally: + _tempdir_registry = old_tempdir_registry + + +class _Default: + pass + + +_default = _Default() + + +class TempDirectory: + """Helper class that owns and cleans up a temporary directory. + + This class can be used as a context manager or as an OO representation of a + temporary directory. + + Attributes: + path + Location to the created temporary directory + delete + Whether the directory should be deleted when exiting + (when used as a contextmanager) + + Methods: + cleanup() + Deletes the temporary directory + + When used as a context manager, if the delete attribute is True, on + exiting the context the temporary directory is deleted. + """ + + def __init__( + self, + path=None, # type: Optional[str] + delete=_default, # type: Union[bool, None, _Default] + kind="temp", # type: str + globally_managed=False, # type: bool + ): + super().__init__() + + if delete is _default: + if path is not None: + # If we were given an explicit directory, resolve delete option + # now. + delete = False + else: + # Otherwise, we wait until cleanup and see what + # tempdir_registry says. + delete = None + + # The only time we specify path is in for editables where it + # is the value of the --src option. + if path is None: + path = self._create(kind) + + self._path = path + self._deleted = False + self.delete = delete + self.kind = kind + + if globally_managed: + assert _tempdir_manager is not None + _tempdir_manager.enter_context(self) + + @property + def path(self): + # type: () -> str + assert not self._deleted, f"Attempted to access deleted path: {self._path}" + return self._path + + def __repr__(self): + # type: () -> str + return f"<{self.__class__.__name__} {self.path!r}>" + + def __enter__(self): + # type: (_T) -> _T + return self + + def __exit__(self, exc, value, tb): + # type: (Any, Any, Any) -> None + if self.delete is not None: + delete = self.delete + elif _tempdir_registry: + delete = _tempdir_registry.get_delete(self.kind) + else: + delete = True + + if delete: + self.cleanup() + + def _create(self, kind): + # type: (str) -> str + """Create a temporary directory and store its path in self.path""" + # We realpath here because some systems have their default tmpdir + # symlinked to another directory. This tends to confuse build + # scripts, so we canonicalize the path by traversing potential + # symlinks here. + path = os.path.realpath(tempfile.mkdtemp(prefix=f"pip-{kind}-")) + logger.debug("Created temporary directory: %s", path) + return path + + def cleanup(self): + # type: () -> None + """Remove the temporary directory created and reset state""" + self._deleted = True + if not os.path.exists(self._path): + return + rmtree(self._path) + + +class AdjacentTempDirectory(TempDirectory): + """Helper class that creates a temporary directory adjacent to a real one. + + Attributes: + original + The original directory to create a temp directory for. + path + After calling create() or entering, contains the full + path to the temporary directory. + delete + Whether the directory should be deleted when exiting + (when used as a contextmanager) + + """ + + # The characters that may be used to name the temp directory + # We always prepend a ~ and then rotate through these until + # a usable name is found. + # pkg_resources raises a different error for .dist-info folder + # with leading '-' and invalid metadata + LEADING_CHARS = "-~.=%0123456789" + + def __init__(self, original, delete=None): + # type: (str, Optional[bool]) -> None + self.original = original.rstrip("/\\") + super().__init__(delete=delete) + + @classmethod + def _generate_names(cls, name): + # type: (str) -> Iterator[str] + """Generates a series of temporary names. + + The algorithm replaces the leading characters in the name + with ones that are valid filesystem characters, but are not + valid package names (for both Python and pip definitions of + package). + """ + for i in range(1, len(name)): + for candidate in itertools.combinations_with_replacement( + cls.LEADING_CHARS, i - 1 + ): + new_name = "~" + "".join(candidate) + name[i:] + if new_name != name: + yield new_name + + # If we make it this far, we will have to make a longer name + for i in range(len(cls.LEADING_CHARS)): + for candidate in itertools.combinations_with_replacement( + cls.LEADING_CHARS, i + ): + new_name = "~" + "".join(candidate) + name + if new_name != name: + yield new_name + + def _create(self, kind): + # type: (str) -> str + root, name = os.path.split(self.original) + for candidate in self._generate_names(name): + path = os.path.join(root, candidate) + try: + os.mkdir(path) + except OSError as ex: + # Continue if the name exists already + if ex.errno != errno.EEXIST: + raise + else: + path = os.path.realpath(path) + break + else: + # Final fallback on the default behavior. + path = os.path.realpath(tempfile.mkdtemp(prefix=f"pip-{kind}-")) + + logger.debug("Created temporary directory: %s", path) + return path diff --git a/venv/Lib/site-packages/pip/_internal/utils/unpacking.py b/venv/Lib/site-packages/pip/_internal/utils/unpacking.py new file mode 100644 index 00000000..44ac4753 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/utils/unpacking.py @@ -0,0 +1,267 @@ +"""Utilities related archives. +""" + +import logging +import os +import shutil +import stat +import tarfile +import zipfile +from typing import Iterable, List, Optional +from zipfile import ZipInfo + +from pip._internal.exceptions import InstallationError +from pip._internal.utils.filetypes import ( + BZ2_EXTENSIONS, + TAR_EXTENSIONS, + XZ_EXTENSIONS, + ZIP_EXTENSIONS, +) +from pip._internal.utils.misc import ensure_dir + +logger = logging.getLogger(__name__) + + +SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS + +try: + import bz2 # noqa + + SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS +except ImportError: + logger.debug("bz2 module is not available") + +try: + # Only for Python 3.3+ + import lzma # noqa + + SUPPORTED_EXTENSIONS += XZ_EXTENSIONS +except ImportError: + logger.debug("lzma module is not available") + + +def current_umask(): + # type: () -> int + """Get the current umask which involves having to set it temporarily.""" + mask = os.umask(0) + os.umask(mask) + return mask + + +def split_leading_dir(path): + # type: (str) -> List[str] + path = path.lstrip("/").lstrip("\\") + if "/" in path and ( + ("\\" in path and path.find("/") < path.find("\\")) or "\\" not in path + ): + return path.split("/", 1) + elif "\\" in path: + return path.split("\\", 1) + else: + return [path, ""] + + +def has_leading_dir(paths): + # type: (Iterable[str]) -> bool + """Returns true if all the paths have the same leading path name + (i.e., everything is in one subdirectory in an archive)""" + common_prefix = None + for path in paths: + prefix, rest = split_leading_dir(path) + if not prefix: + return False + elif common_prefix is None: + common_prefix = prefix + elif prefix != common_prefix: + return False + return True + + +def is_within_directory(directory, target): + # type: (str, str) -> bool + """ + Return true if the absolute path of target is within the directory + """ + abs_directory = os.path.abspath(directory) + abs_target = os.path.abspath(target) + + prefix = os.path.commonprefix([abs_directory, abs_target]) + return prefix == abs_directory + + +def set_extracted_file_to_default_mode_plus_executable(path): + # type: (str) -> None + """ + Make file present at path have execute for user/group/world + (chmod +x) is no-op on windows per python docs + """ + os.chmod(path, (0o777 & ~current_umask() | 0o111)) + + +def zip_item_is_executable(info): + # type: (ZipInfo) -> bool + mode = info.external_attr >> 16 + # if mode and regular file and any execute permissions for + # user/group/world? + return bool(mode and stat.S_ISREG(mode) and mode & 0o111) + + +def unzip_file(filename, location, flatten=True): + # type: (str, str, bool) -> None + """ + Unzip the file (with path `filename`) to the destination `location`. All + files are written based on system defaults and umask (i.e. permissions are + not preserved), except that regular file members with any execute + permissions (user, group, or world) have "chmod +x" applied after being + written. Note that for windows, any execute changes using os.chmod are + no-ops per the python docs. + """ + ensure_dir(location) + zipfp = open(filename, "rb") + try: + zip = zipfile.ZipFile(zipfp, allowZip64=True) + leading = has_leading_dir(zip.namelist()) and flatten + for info in zip.infolist(): + name = info.filename + fn = name + if leading: + fn = split_leading_dir(name)[1] + fn = os.path.join(location, fn) + dir = os.path.dirname(fn) + if not is_within_directory(location, fn): + message = ( + "The zip file ({}) has a file ({}) trying to install " + "outside target directory ({})" + ) + raise InstallationError(message.format(filename, fn, location)) + if fn.endswith("/") or fn.endswith("\\"): + # A directory + ensure_dir(fn) + else: + ensure_dir(dir) + # Don't use read() to avoid allocating an arbitrarily large + # chunk of memory for the file's content + fp = zip.open(name) + try: + with open(fn, "wb") as destfp: + shutil.copyfileobj(fp, destfp) + finally: + fp.close() + if zip_item_is_executable(info): + set_extracted_file_to_default_mode_plus_executable(fn) + finally: + zipfp.close() + + +def untar_file(filename, location): + # type: (str, str) -> None + """ + Untar the file (with path `filename`) to the destination `location`. + All files are written based on system defaults and umask (i.e. permissions + are not preserved), except that regular file members with any execute + permissions (user, group, or world) have "chmod +x" applied after being + written. Note that for windows, any execute changes using os.chmod are + no-ops per the python docs. + """ + ensure_dir(location) + if filename.lower().endswith(".gz") or filename.lower().endswith(".tgz"): + mode = "r:gz" + elif filename.lower().endswith(BZ2_EXTENSIONS): + mode = "r:bz2" + elif filename.lower().endswith(XZ_EXTENSIONS): + mode = "r:xz" + elif filename.lower().endswith(".tar"): + mode = "r" + else: + logger.warning( + "Cannot determine compression type for file %s", + filename, + ) + mode = "r:*" + tar = tarfile.open(filename, mode) + try: + leading = has_leading_dir([member.name for member in tar.getmembers()]) + for member in tar.getmembers(): + fn = member.name + if leading: + fn = split_leading_dir(fn)[1] + path = os.path.join(location, fn) + if not is_within_directory(location, path): + message = ( + "The tar file ({}) has a file ({}) trying to install " + "outside target directory ({})" + ) + raise InstallationError(message.format(filename, path, location)) + if member.isdir(): + ensure_dir(path) + elif member.issym(): + try: + # https://github.com/python/typeshed/issues/2673 + tar._extract_member(member, path) # type: ignore + except Exception as exc: + # Some corrupt tar files seem to produce this + # (specifically bad symlinks) + logger.warning( + "In the tar file %s the member %s is invalid: %s", + filename, + member.name, + exc, + ) + continue + else: + try: + fp = tar.extractfile(member) + except (KeyError, AttributeError) as exc: + # Some corrupt tar files seem to produce this + # (specifically bad symlinks) + logger.warning( + "In the tar file %s the member %s is invalid: %s", + filename, + member.name, + exc, + ) + continue + ensure_dir(os.path.dirname(path)) + assert fp is not None + with open(path, "wb") as destfp: + shutil.copyfileobj(fp, destfp) + fp.close() + # Update the timestamp (useful for cython compiled files) + tar.utime(member, path) + # member have any execute permissions for user/group/world? + if member.mode & 0o111: + set_extracted_file_to_default_mode_plus_executable(path) + finally: + tar.close() + + +def unpack_file( + filename, # type: str + location, # type: str + content_type=None, # type: Optional[str] +): + # type: (...) -> None + filename = os.path.realpath(filename) + if ( + content_type == "application/zip" + or filename.lower().endswith(ZIP_EXTENSIONS) + or zipfile.is_zipfile(filename) + ): + unzip_file(filename, location, flatten=not filename.endswith(".whl")) + elif ( + content_type == "application/x-gzip" + or tarfile.is_tarfile(filename) + or filename.lower().endswith(TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS) + ): + untar_file(filename, location) + else: + # FIXME: handle? + # FIXME: magic signatures? + logger.critical( + "Cannot unpack file %s (downloaded from %s, content-type: %s); " + "cannot detect archive format", + filename, + location, + content_type, + ) + raise InstallationError(f"Cannot determine archive format of {location}") diff --git a/venv/Lib/site-packages/pip/_internal/utils/urls.py b/venv/Lib/site-packages/pip/_internal/utils/urls.py new file mode 100644 index 00000000..50a04d86 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/utils/urls.py @@ -0,0 +1,49 @@ +import os +import sys +import urllib.parse +import urllib.request +from typing import Optional + + +def get_url_scheme(url): + # type: (str) -> Optional[str] + if ":" not in url: + return None + return url.split(":", 1)[0].lower() + + +def path_to_url(path): + # type: (str) -> str + """ + Convert a path to a file: URL. The path will be made absolute and have + quoted path parts. + """ + path = os.path.normpath(os.path.abspath(path)) + url = urllib.parse.urljoin("file:", urllib.request.pathname2url(path)) + return url + + +def url_to_path(url): + # type: (str) -> str + """ + Convert a file: URL to a path. + """ + assert url.startswith( + "file:" + ), f"You can only turn file: urls into filenames (not {url!r})" + + _, netloc, path, _, _ = urllib.parse.urlsplit(url) + + if not netloc or netloc == "localhost": + # According to RFC 8089, same as empty authority. + netloc = "" + elif sys.platform == "win32": + # If we have a UNC path, prepend UNC share notation. + netloc = "\\\\" + netloc + else: + raise ValueError( + f"non-local file URIs are not supported on this platform: {url!r}" + ) + + path = urllib.request.url2pathname(netloc + path) + return path diff --git a/venv/Lib/site-packages/pip/_internal/utils/virtualenv.py b/venv/Lib/site-packages/pip/_internal/utils/virtualenv.py new file mode 100644 index 00000000..51cacb55 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/utils/virtualenv.py @@ -0,0 +1,111 @@ +import logging +import os +import re +import site +import sys +from typing import List, Optional + +logger = logging.getLogger(__name__) +_INCLUDE_SYSTEM_SITE_PACKAGES_REGEX = re.compile( + r"include-system-site-packages\s*=\s*(?Ptrue|false)" +) + + +def _running_under_venv(): + # type: () -> bool + """Checks if sys.base_prefix and sys.prefix match. + + This handles PEP 405 compliant virtual environments. + """ + return sys.prefix != getattr(sys, "base_prefix", sys.prefix) + + +def _running_under_regular_virtualenv(): + # type: () -> bool + """Checks if sys.real_prefix is set. + + This handles virtual environments created with pypa's virtualenv. + """ + # pypa/virtualenv case + return hasattr(sys, "real_prefix") + + +def running_under_virtualenv(): + # type: () -> bool + """Return True if we're running inside a virtualenv, False otherwise.""" + return _running_under_venv() or _running_under_regular_virtualenv() + + +def _get_pyvenv_cfg_lines(): + # type: () -> Optional[List[str]] + """Reads {sys.prefix}/pyvenv.cfg and returns its contents as list of lines + + Returns None, if it could not read/access the file. + """ + pyvenv_cfg_file = os.path.join(sys.prefix, "pyvenv.cfg") + try: + # Although PEP 405 does not specify, the built-in venv module always + # writes with UTF-8. (pypa/pip#8717) + with open(pyvenv_cfg_file, encoding="utf-8") as f: + return f.read().splitlines() # avoids trailing newlines + except OSError: + return None + + +def _no_global_under_venv(): + # type: () -> bool + """Check `{sys.prefix}/pyvenv.cfg` for system site-packages inclusion + + PEP 405 specifies that when system site-packages are not supposed to be + visible from a virtual environment, `pyvenv.cfg` must contain the following + line: + + include-system-site-packages = false + + Additionally, log a warning if accessing the file fails. + """ + cfg_lines = _get_pyvenv_cfg_lines() + if cfg_lines is None: + # We're not in a "sane" venv, so assume there is no system + # site-packages access (since that's PEP 405's default state). + logger.warning( + "Could not access 'pyvenv.cfg' despite a virtual environment " + "being active. Assuming global site-packages is not accessible " + "in this environment." + ) + return True + + for line in cfg_lines: + match = _INCLUDE_SYSTEM_SITE_PACKAGES_REGEX.match(line) + if match is not None and match.group("value") == "false": + return True + return False + + +def _no_global_under_regular_virtualenv(): + # type: () -> bool + """Check if "no-global-site-packages.txt" exists beside site.py + + This mirrors logic in pypa/virtualenv for determining whether system + site-packages are visible in the virtual environment. + """ + site_mod_dir = os.path.dirname(os.path.abspath(site.__file__)) + no_global_site_packages_file = os.path.join( + site_mod_dir, + "no-global-site-packages.txt", + ) + return os.path.exists(no_global_site_packages_file) + + +def virtualenv_no_global(): + # type: () -> bool + """Returns a boolean, whether running in venv with no system site-packages.""" + # PEP 405 compliance needs to be checked first since virtualenv >=20 would + # return True for both checks, but is only able to use the PEP 405 config. + if _running_under_venv(): + return _no_global_under_venv() + + if _running_under_regular_virtualenv(): + return _no_global_under_regular_virtualenv() + + return False diff --git a/venv/Lib/site-packages/pip/_internal/utils/wheel.py b/venv/Lib/site-packages/pip/_internal/utils/wheel.py new file mode 100644 index 00000000..42f08084 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/utils/wheel.py @@ -0,0 +1,189 @@ +"""Support functions for working with wheel files. +""" + +import logging +from email.message import Message +from email.parser import Parser +from typing import Dict, Tuple +from zipfile import BadZipFile, ZipFile + +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.pkg_resources import DistInfoDistribution, Distribution + +from pip._internal.exceptions import UnsupportedWheel +from pip._internal.utils.pkg_resources import DictMetadata + +VERSION_COMPATIBLE = (1, 0) + + +logger = logging.getLogger(__name__) + + +class WheelMetadata(DictMetadata): + """Metadata provider that maps metadata decoding exceptions to our + internal exception type. + """ + + def __init__(self, metadata, wheel_name): + # type: (Dict[str, bytes], str) -> None + super().__init__(metadata) + self._wheel_name = wheel_name + + def get_metadata(self, name): + # type: (str) -> str + try: + return super().get_metadata(name) + except UnicodeDecodeError as e: + # Augment the default error with the origin of the file. + raise UnsupportedWheel( + f"Error decoding metadata for {self._wheel_name}: {e}" + ) + + +def pkg_resources_distribution_for_wheel(wheel_zip, name, location): + # type: (ZipFile, str, str) -> Distribution + """Get a pkg_resources distribution given a wheel. + + :raises UnsupportedWheel: on any errors + """ + info_dir, _ = parse_wheel(wheel_zip, name) + + metadata_files = [p for p in wheel_zip.namelist() if p.startswith(f"{info_dir}/")] + + metadata_text = {} # type: Dict[str, bytes] + for path in metadata_files: + _, metadata_name = path.split("/", 1) + + try: + metadata_text[metadata_name] = read_wheel_metadata_file(wheel_zip, path) + except UnsupportedWheel as e: + raise UnsupportedWheel("{} has an invalid wheel, {}".format(name, str(e))) + + metadata = WheelMetadata(metadata_text, location) + + return DistInfoDistribution(location=location, metadata=metadata, project_name=name) + + +def parse_wheel(wheel_zip, name): + # type: (ZipFile, str) -> Tuple[str, Message] + """Extract information from the provided wheel, ensuring it meets basic + standards. + + Returns the name of the .dist-info directory and the parsed WHEEL metadata. + """ + try: + info_dir = wheel_dist_info_dir(wheel_zip, name) + metadata = wheel_metadata(wheel_zip, info_dir) + version = wheel_version(metadata) + except UnsupportedWheel as e: + raise UnsupportedWheel("{} has an invalid wheel, {}".format(name, str(e))) + + check_compatibility(version, name) + + return info_dir, metadata + + +def wheel_dist_info_dir(source, name): + # type: (ZipFile, str) -> str + """Returns the name of the contained .dist-info directory. + + Raises AssertionError or UnsupportedWheel if not found, >1 found, or + it doesn't match the provided name. + """ + # Zip file path separators must be / + subdirs = {p.split("/", 1)[0] for p in source.namelist()} + + info_dirs = [s for s in subdirs if s.endswith(".dist-info")] + + if not info_dirs: + raise UnsupportedWheel(".dist-info directory not found") + + if len(info_dirs) > 1: + raise UnsupportedWheel( + "multiple .dist-info directories found: {}".format(", ".join(info_dirs)) + ) + + info_dir = info_dirs[0] + + info_dir_name = canonicalize_name(info_dir) + canonical_name = canonicalize_name(name) + if not info_dir_name.startswith(canonical_name): + raise UnsupportedWheel( + ".dist-info directory {!r} does not start with {!r}".format( + info_dir, canonical_name + ) + ) + + return info_dir + + +def read_wheel_metadata_file(source, path): + # type: (ZipFile, str) -> bytes + try: + return source.read(path) + # BadZipFile for general corruption, KeyError for missing entry, + # and RuntimeError for password-protected files + except (BadZipFile, KeyError, RuntimeError) as e: + raise UnsupportedWheel(f"could not read {path!r} file: {e!r}") + + +def wheel_metadata(source, dist_info_dir): + # type: (ZipFile, str) -> Message + """Return the WHEEL metadata of an extracted wheel, if possible. + Otherwise, raise UnsupportedWheel. + """ + path = f"{dist_info_dir}/WHEEL" + # Zip file path separators must be / + wheel_contents = read_wheel_metadata_file(source, path) + + try: + wheel_text = wheel_contents.decode() + except UnicodeDecodeError as e: + raise UnsupportedWheel(f"error decoding {path!r}: {e!r}") + + # FeedParser (used by Parser) does not raise any exceptions. The returned + # message may have .defects populated, but for backwards-compatibility we + # currently ignore them. + return Parser().parsestr(wheel_text) + + +def wheel_version(wheel_data): + # type: (Message) -> Tuple[int, ...] + """Given WHEEL metadata, return the parsed Wheel-Version. + Otherwise, raise UnsupportedWheel. + """ + version_text = wheel_data["Wheel-Version"] + if version_text is None: + raise UnsupportedWheel("WHEEL is missing Wheel-Version") + + version = version_text.strip() + + try: + return tuple(map(int, version.split("."))) + except ValueError: + raise UnsupportedWheel(f"invalid Wheel-Version: {version!r}") + + +def check_compatibility(version, name): + # type: (Tuple[int, ...], str) -> None + """Raises errors or warns if called with an incompatible Wheel-Version. + + pip should refuse to install a Wheel-Version that's a major series + ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when + installing a version only minor version ahead (e.g 1.2 > 1.1). + + version: a 2-tuple representing a Wheel-Version (Major, Minor) + name: name of wheel or package to raise exception about + + :raises UnsupportedWheel: when an incompatible Wheel-Version is given + """ + if version[0] > VERSION_COMPATIBLE[0]: + raise UnsupportedWheel( + "{}'s Wheel-Version ({}) is not compatible with this version " + "of pip".format(name, ".".join(map(str, version))) + ) + elif version > VERSION_COMPATIBLE: + logger.warning( + "Installing from a newer Wheel-Version (%s)", + ".".join(map(str, version)), + ) diff --git a/venv/Lib/site-packages/pip/_internal/vcs/__init__.py b/venv/Lib/site-packages/pip/_internal/vcs/__init__.py new file mode 100644 index 00000000..30025d63 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/vcs/__init__.py @@ -0,0 +1,14 @@ +# Expose a limited set of classes and functions so callers outside of +# the vcs package don't need to import deeper than `pip._internal.vcs`. +# (The test directory may still need to import from a vcs sub-package.) +# Import all vcs modules to register each VCS in the VcsSupport object. +import pip._internal.vcs.bazaar +import pip._internal.vcs.git +import pip._internal.vcs.mercurial +import pip._internal.vcs.subversion # noqa: F401 +from pip._internal.vcs.versioncontrol import ( # noqa: F401 + RemoteNotFoundError, + is_url, + make_vcs_requirement_url, + vcs, +) diff --git a/venv/Lib/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..7c5521d4 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-36.pyc new file mode 100644 index 00000000..52afbbbe Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/vcs/__pycache__/git.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/vcs/__pycache__/git.cpython-36.pyc new file mode 100644 index 00000000..a07ec164 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/vcs/__pycache__/git.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-36.pyc new file mode 100644 index 00000000..a2df9cee Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-36.pyc new file mode 100644 index 00000000..a36097fd Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-36.pyc b/venv/Lib/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-36.pyc new file mode 100644 index 00000000..b0019373 Binary files /dev/null and b/venv/Lib/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_internal/vcs/bazaar.py b/venv/Lib/site-packages/pip/_internal/vcs/bazaar.py new file mode 100644 index 00000000..42b68773 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/vcs/bazaar.py @@ -0,0 +1,96 @@ +import logging +from typing import List, Optional, Tuple + +from pip._internal.utils.misc import HiddenText, display_path +from pip._internal.utils.subprocess import make_command +from pip._internal.utils.urls import path_to_url +from pip._internal.vcs.versioncontrol import ( + AuthInfo, + RemoteNotFoundError, + RevOptions, + VersionControl, + vcs, +) + +logger = logging.getLogger(__name__) + + +class Bazaar(VersionControl): + name = 'bzr' + dirname = '.bzr' + repo_name = 'branch' + schemes = ( + 'bzr+http', 'bzr+https', 'bzr+ssh', 'bzr+sftp', 'bzr+ftp', + 'bzr+lp', 'bzr+file' + ) + + @staticmethod + def get_base_rev_args(rev): + # type: (str) -> List[str] + return ['-r', rev] + + def fetch_new(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None + rev_display = rev_options.to_display() + logger.info( + 'Checking out %s%s to %s', + url, + rev_display, + display_path(dest), + ) + cmd_args = ( + make_command('branch', '-q', rev_options.to_args(), url, dest) + ) + self.run_command(cmd_args) + + def switch(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None + self.run_command(make_command('switch', url), cwd=dest) + + def update(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None + cmd_args = make_command('pull', '-q', rev_options.to_args()) + self.run_command(cmd_args, cwd=dest) + + @classmethod + def get_url_rev_and_auth(cls, url): + # type: (str) -> Tuple[str, Optional[str], AuthInfo] + # hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it + url, rev, user_pass = super().get_url_rev_and_auth(url) + if url.startswith('ssh://'): + url = 'bzr+' + url + return url, rev, user_pass + + @classmethod + def get_remote_url(cls, location): + # type: (str) -> str + urls = cls.run_command( + ['info'], show_stdout=False, stdout_only=True, cwd=location + ) + for line in urls.splitlines(): + line = line.strip() + for x in ('checkout of branch: ', + 'parent branch: '): + if line.startswith(x): + repo = line.split(x)[1] + if cls._is_local_repository(repo): + return path_to_url(repo) + return repo + raise RemoteNotFoundError + + @classmethod + def get_revision(cls, location): + # type: (str) -> str + revision = cls.run_command( + ['revno'], show_stdout=False, stdout_only=True, cwd=location, + ) + return revision.splitlines()[-1] + + @classmethod + def is_commit_id_equal(cls, dest, name): + # type: (str, Optional[str]) -> bool + """Always assume the versions don't match""" + return False + + +vcs.register(Bazaar) diff --git a/venv/Lib/site-packages/pip/_internal/vcs/git.py b/venv/Lib/site-packages/pip/_internal/vcs/git.py new file mode 100644 index 00000000..b7c1b9fe --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/vcs/git.py @@ -0,0 +1,450 @@ +import logging +import os.path +import re +import urllib.parse +import urllib.request +from typing import List, Optional, Tuple + +from pip._vendor.packaging.version import _BaseVersion +from pip._vendor.packaging.version import parse as parse_version + +from pip._internal.exceptions import BadCommand, InstallationError +from pip._internal.utils.misc import HiddenText, display_path, hide_url +from pip._internal.utils.subprocess import make_command +from pip._internal.vcs.versioncontrol import ( + AuthInfo, + RemoteNotFoundError, + RevOptions, + VersionControl, + find_path_to_setup_from_repo_root, + vcs, +) + +urlsplit = urllib.parse.urlsplit +urlunsplit = urllib.parse.urlunsplit + + +logger = logging.getLogger(__name__) + + +HASH_REGEX = re.compile('^[a-fA-F0-9]{40}$') + + +def looks_like_hash(sha): + # type: (str) -> bool + return bool(HASH_REGEX.match(sha)) + + +class Git(VersionControl): + name = 'git' + dirname = '.git' + repo_name = 'clone' + schemes = ( + 'git+http', 'git+https', 'git+ssh', 'git+git', 'git+file', + ) + # Prevent the user's environment variables from interfering with pip: + # https://github.com/pypa/pip/issues/1130 + unset_environ = ('GIT_DIR', 'GIT_WORK_TREE') + default_arg_rev = 'HEAD' + + @staticmethod + def get_base_rev_args(rev): + # type: (str) -> List[str] + return [rev] + + def is_immutable_rev_checkout(self, url, dest): + # type: (str, str) -> bool + _, rev_options = self.get_url_rev_options(hide_url(url)) + if not rev_options.rev: + return False + if not self.is_commit_id_equal(dest, rev_options.rev): + # the current commit is different from rev, + # which means rev was something else than a commit hash + return False + # return False in the rare case rev is both a commit hash + # and a tag or a branch; we don't want to cache in that case + # because that branch/tag could point to something else in the future + is_tag_or_branch = bool( + self.get_revision_sha(dest, rev_options.rev)[0] + ) + return not is_tag_or_branch + + def get_git_version(self): + # type: () -> _BaseVersion + VERSION_PFX = 'git version ' + version = self.run_command( + ['version'], show_stdout=False, stdout_only=True + ) + if version.startswith(VERSION_PFX): + version = version[len(VERSION_PFX):].split()[0] + else: + version = '' + # get first 3 positions of the git version because + # on windows it is x.y.z.windows.t, and this parses as + # LegacyVersion which always smaller than a Version. + version = '.'.join(version.split('.')[:3]) + return parse_version(version) + + @classmethod + def get_current_branch(cls, location): + # type: (str) -> Optional[str] + """ + Return the current branch, or None if HEAD isn't at a branch + (e.g. detached HEAD). + """ + # git-symbolic-ref exits with empty stdout if "HEAD" is a detached + # HEAD rather than a symbolic ref. In addition, the -q causes the + # command to exit with status code 1 instead of 128 in this case + # and to suppress the message to stderr. + args = ['symbolic-ref', '-q', 'HEAD'] + output = cls.run_command( + args, + extra_ok_returncodes=(1, ), + show_stdout=False, + stdout_only=True, + cwd=location, + ) + ref = output.strip() + + if ref.startswith('refs/heads/'): + return ref[len('refs/heads/'):] + + return None + + @classmethod + def get_revision_sha(cls, dest, rev): + # type: (str, str) -> Tuple[Optional[str], bool] + """ + Return (sha_or_none, is_branch), where sha_or_none is a commit hash + if the revision names a remote branch or tag, otherwise None. + + Args: + dest: the repository directory. + rev: the revision name. + """ + # Pass rev to pre-filter the list. + output = cls.run_command( + ['show-ref', rev], + cwd=dest, + show_stdout=False, + stdout_only=True, + on_returncode='ignore', + ) + refs = {} + # NOTE: We do not use splitlines here since that would split on other + # unicode separators, which can be maliciously used to install a + # different revision. + for line in output.strip().split("\n"): + line = line.rstrip("\r") + if not line: + continue + try: + ref_sha, ref_name = line.split(" ", maxsplit=2) + except ValueError: + # Include the offending line to simplify troubleshooting if + # this error ever occurs. + raise ValueError(f'unexpected show-ref line: {line!r}') + + refs[ref_name] = ref_sha + + branch_ref = f'refs/remotes/origin/{rev}' + tag_ref = f'refs/tags/{rev}' + + sha = refs.get(branch_ref) + if sha is not None: + return (sha, True) + + sha = refs.get(tag_ref) + + return (sha, False) + + @classmethod + def _should_fetch(cls, dest, rev): + # type: (str, str) -> bool + """ + Return true if rev is a ref or is a commit that we don't have locally. + + Branches and tags are not considered in this method because they are + assumed to be always available locally (which is a normal outcome of + ``git clone`` and ``git fetch --tags``). + """ + if rev.startswith("refs/"): + # Always fetch remote refs. + return True + + if not looks_like_hash(rev): + # Git fetch would fail with abbreviated commits. + return False + + if cls.has_commit(dest, rev): + # Don't fetch if we have the commit locally. + return False + + return True + + @classmethod + def resolve_revision(cls, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> RevOptions + """ + Resolve a revision to a new RevOptions object with the SHA1 of the + branch, tag, or ref if found. + + Args: + rev_options: a RevOptions object. + """ + rev = rev_options.arg_rev + # The arg_rev property's implementation for Git ensures that the + # rev return value is always non-None. + assert rev is not None + + sha, is_branch = cls.get_revision_sha(dest, rev) + + if sha is not None: + rev_options = rev_options.make_new(sha) + rev_options.branch_name = rev if is_branch else None + + return rev_options + + # Do not show a warning for the common case of something that has + # the form of a Git commit hash. + if not looks_like_hash(rev): + logger.warning( + "Did not find branch or tag '%s', assuming revision or ref.", + rev, + ) + + if not cls._should_fetch(dest, rev): + return rev_options + + # fetch the requested revision + cls.run_command( + make_command('fetch', '-q', url, rev_options.to_args()), + cwd=dest, + ) + # Change the revision to the SHA of the ref we fetched + sha = cls.get_revision(dest, rev='FETCH_HEAD') + rev_options = rev_options.make_new(sha) + + return rev_options + + @classmethod + def is_commit_id_equal(cls, dest, name): + # type: (str, Optional[str]) -> bool + """ + Return whether the current commit hash equals the given name. + + Args: + dest: the repository directory. + name: a string name. + """ + if not name: + # Then avoid an unnecessary subprocess call. + return False + + return cls.get_revision(dest) == name + + def fetch_new(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None + rev_display = rev_options.to_display() + logger.info('Cloning %s%s to %s', url, rev_display, display_path(dest)) + self.run_command(make_command('clone', '-q', url, dest)) + + if rev_options.rev: + # Then a specific revision was requested. + rev_options = self.resolve_revision(dest, url, rev_options) + branch_name = getattr(rev_options, 'branch_name', None) + if branch_name is None: + # Only do a checkout if the current commit id doesn't match + # the requested revision. + if not self.is_commit_id_equal(dest, rev_options.rev): + cmd_args = make_command( + 'checkout', '-q', rev_options.to_args(), + ) + self.run_command(cmd_args, cwd=dest) + elif self.get_current_branch(dest) != branch_name: + # Then a specific branch was requested, and that branch + # is not yet checked out. + track_branch = f'origin/{branch_name}' + cmd_args = [ + 'checkout', '-b', branch_name, '--track', track_branch, + ] + self.run_command(cmd_args, cwd=dest) + + #: repo may contain submodules + self.update_submodules(dest) + + def switch(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None + self.run_command( + make_command('config', 'remote.origin.url', url), + cwd=dest, + ) + cmd_args = make_command('checkout', '-q', rev_options.to_args()) + self.run_command(cmd_args, cwd=dest) + + self.update_submodules(dest) + + def update(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None + # First fetch changes from the default remote + if self.get_git_version() >= parse_version('1.9.0'): + # fetch tags in addition to everything else + self.run_command(['fetch', '-q', '--tags'], cwd=dest) + else: + self.run_command(['fetch', '-q'], cwd=dest) + # Then reset to wanted revision (maybe even origin/master) + rev_options = self.resolve_revision(dest, url, rev_options) + cmd_args = make_command('reset', '--hard', '-q', rev_options.to_args()) + self.run_command(cmd_args, cwd=dest) + #: update submodules + self.update_submodules(dest) + + @classmethod + def get_remote_url(cls, location): + # type: (str) -> str + """ + Return URL of the first remote encountered. + + Raises RemoteNotFoundError if the repository does not have a remote + url configured. + """ + # We need to pass 1 for extra_ok_returncodes since the command + # exits with return code 1 if there are no matching lines. + stdout = cls.run_command( + ['config', '--get-regexp', r'remote\..*\.url'], + extra_ok_returncodes=(1, ), + show_stdout=False, + stdout_only=True, + cwd=location, + ) + remotes = stdout.splitlines() + try: + found_remote = remotes[0] + except IndexError: + raise RemoteNotFoundError + + for remote in remotes: + if remote.startswith('remote.origin.url '): + found_remote = remote + break + url = found_remote.split(' ')[1] + return url.strip() + + @classmethod + def has_commit(cls, location, rev): + # type: (str, str) -> bool + """ + Check if rev is a commit that is available in the local repository. + """ + try: + cls.run_command( + ['rev-parse', '-q', '--verify', "sha^" + rev], + cwd=location, + log_failed_cmd=False, + ) + except InstallationError: + return False + else: + return True + + @classmethod + def get_revision(cls, location, rev=None): + # type: (str, Optional[str]) -> str + if rev is None: + rev = 'HEAD' + current_rev = cls.run_command( + ['rev-parse', rev], + show_stdout=False, + stdout_only=True, + cwd=location, + ) + return current_rev.strip() + + @classmethod + def get_subdirectory(cls, location): + # type: (str) -> Optional[str] + """ + Return the path to setup.py, relative to the repo root. + Return None if setup.py is in the repo root. + """ + # find the repo root + git_dir = cls.run_command( + ['rev-parse', '--git-dir'], + show_stdout=False, + stdout_only=True, + cwd=location, + ).strip() + if not os.path.isabs(git_dir): + git_dir = os.path.join(location, git_dir) + repo_root = os.path.abspath(os.path.join(git_dir, '..')) + return find_path_to_setup_from_repo_root(location, repo_root) + + @classmethod + def get_url_rev_and_auth(cls, url): + # type: (str) -> Tuple[str, Optional[str], AuthInfo] + """ + Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'. + That's required because although they use SSH they sometimes don't + work with a ssh:// scheme (e.g. GitHub). But we need a scheme for + parsing. Hence we remove it again afterwards and return it as a stub. + """ + # Works around an apparent Git bug + # (see https://article.gmane.org/gmane.comp.version-control.git/146500) + scheme, netloc, path, query, fragment = urlsplit(url) + if scheme.endswith('file'): + initial_slashes = path[:-len(path.lstrip('/'))] + newpath = ( + initial_slashes + + urllib.request.url2pathname(path) + .replace('\\', '/').lstrip('/') + ) + after_plus = scheme.find('+') + 1 + url = scheme[:after_plus] + urlunsplit( + (scheme[after_plus:], netloc, newpath, query, fragment), + ) + + if '://' not in url: + assert 'file:' not in url + url = url.replace('git+', 'git+ssh://') + url, rev, user_pass = super().get_url_rev_and_auth(url) + url = url.replace('ssh://', '') + else: + url, rev, user_pass = super().get_url_rev_and_auth(url) + + return url, rev, user_pass + + @classmethod + def update_submodules(cls, location): + # type: (str) -> None + if not os.path.exists(os.path.join(location, '.gitmodules')): + return + cls.run_command( + ['submodule', 'update', '--init', '--recursive', '-q'], + cwd=location, + ) + + @classmethod + def get_repository_root(cls, location): + # type: (str) -> Optional[str] + loc = super().get_repository_root(location) + if loc: + return loc + try: + r = cls.run_command( + ['rev-parse', '--show-toplevel'], + cwd=location, + show_stdout=False, + stdout_only=True, + on_returncode='raise', + log_failed_cmd=False, + ) + except BadCommand: + logger.debug("could not determine if %s is under git control " + "because git is not available", location) + return None + except InstallationError: + return None + return os.path.normpath(r.rstrip('\r\n')) + + +vcs.register(Git) diff --git a/venv/Lib/site-packages/pip/_internal/vcs/mercurial.py b/venv/Lib/site-packages/pip/_internal/vcs/mercurial.py new file mode 100644 index 00000000..b4f887d3 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/vcs/mercurial.py @@ -0,0 +1,158 @@ +import configparser +import logging +import os +from typing import List, Optional + +from pip._internal.exceptions import BadCommand, InstallationError +from pip._internal.utils.misc import HiddenText, display_path +from pip._internal.utils.subprocess import make_command +from pip._internal.utils.urls import path_to_url +from pip._internal.vcs.versioncontrol import ( + RevOptions, + VersionControl, + find_path_to_setup_from_repo_root, + vcs, +) + +logger = logging.getLogger(__name__) + + +class Mercurial(VersionControl): + name = 'hg' + dirname = '.hg' + repo_name = 'clone' + schemes = ( + 'hg+file', 'hg+http', 'hg+https', 'hg+ssh', 'hg+static-http', + ) + + @staticmethod + def get_base_rev_args(rev): + # type: (str) -> List[str] + return [rev] + + def fetch_new(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None + rev_display = rev_options.to_display() + logger.info( + 'Cloning hg %s%s to %s', + url, + rev_display, + display_path(dest), + ) + self.run_command(make_command('clone', '--noupdate', '-q', url, dest)) + self.run_command( + make_command('update', '-q', rev_options.to_args()), + cwd=dest, + ) + + def switch(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None + repo_config = os.path.join(dest, self.dirname, 'hgrc') + config = configparser.RawConfigParser() + try: + config.read(repo_config) + config.set('paths', 'default', url.secret) + with open(repo_config, 'w') as config_file: + config.write(config_file) + except (OSError, configparser.NoSectionError) as exc: + logger.warning( + 'Could not switch Mercurial repository to %s: %s', url, exc, + ) + else: + cmd_args = make_command('update', '-q', rev_options.to_args()) + self.run_command(cmd_args, cwd=dest) + + def update(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None + self.run_command(['pull', '-q'], cwd=dest) + cmd_args = make_command('update', '-q', rev_options.to_args()) + self.run_command(cmd_args, cwd=dest) + + @classmethod + def get_remote_url(cls, location): + # type: (str) -> str + url = cls.run_command( + ['showconfig', 'paths.default'], + show_stdout=False, + stdout_only=True, + cwd=location, + ).strip() + if cls._is_local_repository(url): + url = path_to_url(url) + return url.strip() + + @classmethod + def get_revision(cls, location): + # type: (str) -> str + """ + Return the repository-local changeset revision number, as an integer. + """ + current_revision = cls.run_command( + ['parents', '--template={rev}'], + show_stdout=False, + stdout_only=True, + cwd=location, + ).strip() + return current_revision + + @classmethod + def get_requirement_revision(cls, location): + # type: (str) -> str + """ + Return the changeset identification hash, as a 40-character + hexadecimal string + """ + current_rev_hash = cls.run_command( + ['parents', '--template={node}'], + show_stdout=False, + stdout_only=True, + cwd=location, + ).strip() + return current_rev_hash + + @classmethod + def is_commit_id_equal(cls, dest, name): + # type: (str, Optional[str]) -> bool + """Always assume the versions don't match""" + return False + + @classmethod + def get_subdirectory(cls, location): + # type: (str) -> Optional[str] + """ + Return the path to setup.py, relative to the repo root. + Return None if setup.py is in the repo root. + """ + # find the repo root + repo_root = cls.run_command( + ['root'], show_stdout=False, stdout_only=True, cwd=location + ).strip() + if not os.path.isabs(repo_root): + repo_root = os.path.abspath(os.path.join(location, repo_root)) + return find_path_to_setup_from_repo_root(location, repo_root) + + @classmethod + def get_repository_root(cls, location): + # type: (str) -> Optional[str] + loc = super().get_repository_root(location) + if loc: + return loc + try: + r = cls.run_command( + ['root'], + cwd=location, + show_stdout=False, + stdout_only=True, + on_returncode='raise', + log_failed_cmd=False, + ) + except BadCommand: + logger.debug("could not determine if %s is under hg control " + "because hg is not available", location) + return None + except InstallationError: + return None + return os.path.normpath(r.rstrip('\r\n')) + + +vcs.register(Mercurial) diff --git a/env/lib/python2.7/site-packages/pip/_internal/vcs/subversion.py b/venv/Lib/site-packages/pip/_internal/vcs/subversion.py similarity index 76% rename from env/lib/python2.7/site-packages/pip/_internal/vcs/subversion.py rename to venv/Lib/site-packages/pip/_internal/vcs/subversion.py index 6bb4c8c5..4d1237ca 100644 --- a/env/lib/python2.7/site-packages/pip/_internal/vcs/subversion.py +++ b/venv/Lib/site-packages/pip/_internal/vcs/subversion.py @@ -1,16 +1,24 @@ -from __future__ import absolute_import - import logging import os import re -import sys +from typing import List, Optional, Tuple -from pip._internal.utils.logging import indent_log from pip._internal.utils.misc import ( - display_path, rmtree, split_auth_from_netloc, + HiddenText, + display_path, + is_console_interactive, + split_auth_from_netloc, +) +from pip._internal.utils.subprocess import CommandArgs, make_command +from pip._internal.vcs.versioncontrol import ( + AuthInfo, + RemoteNotFoundError, + RevOptions, + VersionControl, + vcs, ) -from pip._internal.utils.typing import MYPY_CHECK_RUNNING -from pip._internal.vcs.versioncontrol import VersionControl, vcs + +logger = logging.getLogger(__name__) _svn_xml_url_re = re.compile('url="([^"]+)"') _svn_rev_re = re.compile(r'committed-rev="(\d+)"') @@ -18,36 +26,34 @@ _svn_info_xml_url_re = re.compile(r'(.*)') -if MYPY_CHECK_RUNNING: - from typing import List, Optional, Tuple - from pip._internal.vcs.versioncontrol import RevOptions - -logger = logging.getLogger(__name__) - - class Subversion(VersionControl): name = 'svn' dirname = '.svn' repo_name = 'checkout' - schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https', 'svn+svn') + schemes = ( + 'svn+ssh', 'svn+http', 'svn+https', 'svn+svn', 'svn+file' + ) @classmethod def should_add_vcs_url_prefix(cls, remote_url): + # type: (str) -> bool return True @staticmethod def get_base_rev_args(rev): + # type: (str) -> List[str] return ['-r', rev] @classmethod def get_revision(cls, location): + # type: (str) -> str """ Return the maximum revision for all files under a given location """ # Note: taken from setuptools.command.egg_info revision = 0 - for base, dirs, files in os.walk(location): + for base, dirs, _ in os.walk(location): if cls.dirname not in dirs: dirs[:] = [] continue # no sense walking uncontrolled subdirs @@ -60,15 +66,17 @@ def get_revision(cls, location): dirurl, localrev = cls._get_svn_url_rev(base) if base == location: + assert dirurl is not None base = dirurl + '/' # save the root url elif not dirurl or not dirurl.startswith(base): dirs[:] = [] continue # not part of the same svn tree, skip it revision = max(revision, localrev) - return revision + return str(revision) @classmethod def get_netloc_and_auth(cls, netloc, scheme): + # type: (str, str) -> Tuple[str, Tuple[Optional[str], Optional[str]]] """ This override allows the auth information to be passed to svn via the --username and --password options instead of via the URL. @@ -76,21 +84,23 @@ def get_netloc_and_auth(cls, netloc, scheme): if scheme == 'ssh': # The --username and --password options can't be used for # svn+ssh URLs, so keep the auth information in the URL. - return super(Subversion, cls).get_netloc_and_auth(netloc, scheme) + return super().get_netloc_and_auth(netloc, scheme) return split_auth_from_netloc(netloc) @classmethod def get_url_rev_and_auth(cls, url): + # type: (str) -> Tuple[str, Optional[str], AuthInfo] # hotfix the URL scheme after removing svn+ from svn+ssh:// readd it - url, rev, user_pass = super(Subversion, cls).get_url_rev_and_auth(url) + url, rev, user_pass = super().get_url_rev_and_auth(url) if url.startswith('ssh://'): url = 'svn+' + url return url, rev, user_pass @staticmethod def make_rev_args(username, password): - extra_args = [] + # type: (Optional[str], Optional[HiddenText]) -> CommandArgs + extra_args = [] # type: CommandArgs if username: extra_args += ['--username', username] if password: @@ -100,6 +110,7 @@ def make_rev_args(username, password): @classmethod def get_remote_url(cls, location): + # type: (str) -> str # In cases where the source is in a subdirectory, not alongside # setup.py we have to look up in the location until we find a real # setup.py @@ -115,12 +126,17 @@ def get_remote_url(cls, location): "parent directories)", orig_location, ) - return None + raise RemoteNotFoundError - return cls._get_svn_url_rev(location)[0] + url, _rev = cls._get_svn_url_rev(location) + if url is None: + raise RemoteNotFoundError + + return url @classmethod def _get_svn_url_rev(cls, location): + # type: (str) -> Tuple[Optional[str], int] from pip._internal.exceptions import InstallationError entries_path = os.path.join(location, cls.dirname, 'entries') @@ -130,17 +146,18 @@ def _get_svn_url_rev(cls, location): else: # subversion >= 1.7 does not have the 'entries' file data = '' + url = None if (data.startswith('8') or data.startswith('9') or data.startswith('10')): - data = list(map(str.splitlines, data.split('\n\x0c\n'))) - del data[0][0] # get rid of the '8' - url = data[0][3] - revs = [int(d[9]) for d in data if len(d) > 9 and d[9]] + [0] + entries = list(map(str.splitlines, data.split('\n\x0c\n'))) + del entries[0][0] # get rid of the '8' + url = entries[0][3] + revs = [int(d[9]) for d in entries if len(d) > 9 and d[9]] + [0] elif data.startswith(' bool """Always assume the versions don't match""" return False def __init__(self, use_interactive=None): # type: (bool) -> None if use_interactive is None: - use_interactive = sys.stdin.isatty() + use_interactive = is_console_interactive() self.use_interactive = use_interactive # This member is used to cache the fetched version of the current @@ -187,7 +208,7 @@ def __init__(self, use_interactive=None): # Empty tuple: Could not parse version. self._vcs_version = None # type: Optional[Tuple[int, ...]] - super(Subversion, self).__init__() + super().__init__() def call_vcs_version(self): # type: () -> Tuple[int, ...] @@ -202,13 +223,17 @@ def call_vcs_version(self): # compiled Feb 25 2019, 14:20:39 on x86_64-apple-darwin17.0.0 # svn, version 1.7.14 (r1542130) # compiled Mar 28 2018, 08:49:13 on x86_64-pc-linux-gnu + # svn, version 1.12.0-SlikSvn (SlikSvn/1.12.0) + # compiled May 28 2019, 13:44:56 on x86_64-microsoft-windows6.2 version_prefix = 'svn, version ' - version = self.run_command(['--version'], show_stdout=False) + version = self.run_command( + ['--version'], show_stdout=False, stdout_only=True + ) if not version.startswith(version_prefix): return () version = version[len(version_prefix):].split()[0] - version_list = version.split('.') + version_list = version.partition('-')[0].split('.') try: parsed_version = tuple(map(int, version_list)) except ValueError: @@ -238,14 +263,13 @@ def get_vcs_version(self): return vcs_version def get_remote_call_options(self): - # type: () -> List[str] + # type: () -> CommandArgs """Return options to be used on calls to Subversion that contact the server. These options are applicable for the following ``svn`` subcommands used in this class. - checkout - - export - switch - update @@ -270,22 +294,8 @@ def get_remote_call_options(self): return [] - def export(self, location, url): - """Export the svn repository at the url to the destination location""" - url, rev_options = self.get_url_rev_options(url) - - logger.info('Exporting svn repository %s to %s', url, location) - with indent_log(): - if os.path.exists(location): - # Subversion doesn't like to check out over an existing - # directory --force fixes this, but was only added in svn 1.5 - rmtree(location) - cmd_args = (['export'] + self.get_remote_call_options() + - rev_options.to_args() + [url, location]) - self.run_command(cmd_args, show_stdout=False) - def fetch_new(self, dest, url, rev_options): - # type: (str, str, RevOptions) -> None + # type: (str, HiddenText, RevOptions) -> None rev_display = rev_options.to_display() logger.info( 'Checking out %s%s to %s', @@ -293,21 +303,26 @@ def fetch_new(self, dest, url, rev_options): rev_display, display_path(dest), ) - cmd_args = (['checkout', '-q'] + - self.get_remote_call_options() + - rev_options.to_args() + [url, dest]) + cmd_args = make_command( + 'checkout', '-q', self.get_remote_call_options(), + rev_options.to_args(), url, dest, + ) self.run_command(cmd_args) def switch(self, dest, url, rev_options): - # type: (str, str, RevOptions) -> None - cmd_args = (['switch'] + self.get_remote_call_options() + - rev_options.to_args() + [url, dest]) + # type: (str, HiddenText, RevOptions) -> None + cmd_args = make_command( + 'switch', self.get_remote_call_options(), rev_options.to_args(), + url, dest, + ) self.run_command(cmd_args) def update(self, dest, url, rev_options): - # type: (str, str, RevOptions) -> None - cmd_args = (['update'] + self.get_remote_call_options() + - rev_options.to_args() + [dest]) + # type: (str, HiddenText, RevOptions) -> None + cmd_args = make_command( + 'update', self.get_remote_call_options(), rev_options.to_args(), + dest, + ) self.run_command(cmd_args) diff --git a/venv/Lib/site-packages/pip/_internal/vcs/versioncontrol.py b/venv/Lib/site-packages/pip/_internal/vcs/versioncontrol.py new file mode 100644 index 00000000..97977b57 --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/vcs/versioncontrol.py @@ -0,0 +1,715 @@ +"""Handles all VCS (version control) support""" + +import logging +import os +import shutil +import sys +import urllib.parse +from typing import ( + Any, + Dict, + Iterable, + Iterator, + List, + Mapping, + Optional, + Tuple, + Type, + Union, +) + +from pip._internal.cli.spinners import SpinnerInterface +from pip._internal.exceptions import BadCommand, InstallationError +from pip._internal.utils.misc import ( + HiddenText, + ask_path_exists, + backup_dir, + display_path, + hide_url, + hide_value, + rmtree, +) +from pip._internal.utils.subprocess import CommandArgs, call_subprocess, make_command +from pip._internal.utils.urls import get_url_scheme + +__all__ = ['vcs'] + + +logger = logging.getLogger(__name__) + +AuthInfo = Tuple[Optional[str], Optional[str]] + + +def is_url(name): + # type: (str) -> bool + """ + Return true if the name looks like a URL. + """ + scheme = get_url_scheme(name) + if scheme is None: + return False + return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes + + +def make_vcs_requirement_url(repo_url, rev, project_name, subdir=None): + # type: (str, str, str, Optional[str]) -> str + """ + Return the URL for a VCS requirement. + + Args: + repo_url: the remote VCS url, with any needed VCS prefix (e.g. "git+"). + project_name: the (unescaped) project name. + """ + egg_project_name = project_name.replace("-", "_") + req = f'{repo_url}@{rev}#egg={egg_project_name}' + if subdir: + req += f'&subdirectory={subdir}' + + return req + + +def find_path_to_setup_from_repo_root(location, repo_root): + # type: (str, str) -> Optional[str] + """ + Find the path to `setup.py` by searching up the filesystem from `location`. + Return the path to `setup.py` relative to `repo_root`. + Return None if `setup.py` is in `repo_root` or cannot be found. + """ + # find setup.py + orig_location = location + while not os.path.exists(os.path.join(location, 'setup.py')): + last_location = location + location = os.path.dirname(location) + if location == last_location: + # We've traversed up to the root of the filesystem without + # finding setup.py + logger.warning( + "Could not find setup.py for directory %s (tried all " + "parent directories)", + orig_location, + ) + return None + + if os.path.samefile(repo_root, location): + return None + + return os.path.relpath(location, repo_root) + + +class RemoteNotFoundError(Exception): + pass + + +class RevOptions: + + """ + Encapsulates a VCS-specific revision to install, along with any VCS + install options. + + Instances of this class should be treated as if immutable. + """ + + def __init__( + self, + vc_class, # type: Type[VersionControl] + rev=None, # type: Optional[str] + extra_args=None, # type: Optional[CommandArgs] + ): + # type: (...) -> None + """ + Args: + vc_class: a VersionControl subclass. + rev: the name of the revision to install. + extra_args: a list of extra options. + """ + if extra_args is None: + extra_args = [] + + self.extra_args = extra_args + self.rev = rev + self.vc_class = vc_class + self.branch_name = None # type: Optional[str] + + def __repr__(self): + # type: () -> str + return f'' + + @property + def arg_rev(self): + # type: () -> Optional[str] + if self.rev is None: + return self.vc_class.default_arg_rev + + return self.rev + + def to_args(self): + # type: () -> CommandArgs + """ + Return the VCS-specific command arguments. + """ + args = [] # type: CommandArgs + rev = self.arg_rev + if rev is not None: + args += self.vc_class.get_base_rev_args(rev) + args += self.extra_args + + return args + + def to_display(self): + # type: () -> str + if not self.rev: + return '' + + return f' (to revision {self.rev})' + + def make_new(self, rev): + # type: (str) -> RevOptions + """ + Make a copy of the current instance, but with a new rev. + + Args: + rev: the name of the revision for the new object. + """ + return self.vc_class.make_rev_options(rev, extra_args=self.extra_args) + + +class VcsSupport: + _registry = {} # type: Dict[str, VersionControl] + schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn'] + + def __init__(self): + # type: () -> None + # Register more schemes with urlparse for various version control + # systems + urllib.parse.uses_netloc.extend(self.schemes) + super().__init__() + + def __iter__(self): + # type: () -> Iterator[str] + return self._registry.__iter__() + + @property + def backends(self): + # type: () -> List[VersionControl] + return list(self._registry.values()) + + @property + def dirnames(self): + # type: () -> List[str] + return [backend.dirname for backend in self.backends] + + @property + def all_schemes(self): + # type: () -> List[str] + schemes = [] # type: List[str] + for backend in self.backends: + schemes.extend(backend.schemes) + return schemes + + def register(self, cls): + # type: (Type[VersionControl]) -> None + if not hasattr(cls, 'name'): + logger.warning('Cannot register VCS %s', cls.__name__) + return + if cls.name not in self._registry: + self._registry[cls.name] = cls() + logger.debug('Registered VCS backend: %s', cls.name) + + def unregister(self, name): + # type: (str) -> None + if name in self._registry: + del self._registry[name] + + def get_backend_for_dir(self, location): + # type: (str) -> Optional[VersionControl] + """ + Return a VersionControl object if a repository of that type is found + at the given directory. + """ + vcs_backends = {} + for vcs_backend in self._registry.values(): + repo_path = vcs_backend.get_repository_root(location) + if not repo_path: + continue + logger.debug('Determine that %s uses VCS: %s', + location, vcs_backend.name) + vcs_backends[repo_path] = vcs_backend + + if not vcs_backends: + return None + + # Choose the VCS in the inner-most directory. Since all repository + # roots found here would be either `location` or one of its + # parents, the longest path should have the most path components, + # i.e. the backend representing the inner-most repository. + inner_most_repo_path = max(vcs_backends, key=len) + return vcs_backends[inner_most_repo_path] + + def get_backend_for_scheme(self, scheme): + # type: (str) -> Optional[VersionControl] + """ + Return a VersionControl object or None. + """ + for vcs_backend in self._registry.values(): + if scheme in vcs_backend.schemes: + return vcs_backend + return None + + def get_backend(self, name): + # type: (str) -> Optional[VersionControl] + """ + Return a VersionControl object or None. + """ + name = name.lower() + return self._registry.get(name) + + +vcs = VcsSupport() + + +class VersionControl: + name = '' + dirname = '' + repo_name = '' + # List of supported schemes for this Version Control + schemes = () # type: Tuple[str, ...] + # Iterable of environment variable names to pass to call_subprocess(). + unset_environ = () # type: Tuple[str, ...] + default_arg_rev = None # type: Optional[str] + + @classmethod + def should_add_vcs_url_prefix(cls, remote_url): + # type: (str) -> bool + """ + Return whether the vcs prefix (e.g. "git+") should be added to a + repository's remote url when used in a requirement. + """ + return not remote_url.lower().startswith(f'{cls.name}:') + + @classmethod + def get_subdirectory(cls, location): + # type: (str) -> Optional[str] + """ + Return the path to setup.py, relative to the repo root. + Return None if setup.py is in the repo root. + """ + return None + + @classmethod + def get_requirement_revision(cls, repo_dir): + # type: (str) -> str + """ + Return the revision string that should be used in a requirement. + """ + return cls.get_revision(repo_dir) + + @classmethod + def get_src_requirement(cls, repo_dir, project_name): + # type: (str, str) -> str + """ + Return the requirement string to use to redownload the files + currently at the given repository directory. + + Args: + project_name: the (unescaped) project name. + + The return value has a form similar to the following: + + {repository_url}@{revision}#egg={project_name} + """ + repo_url = cls.get_remote_url(repo_dir) + + if cls.should_add_vcs_url_prefix(repo_url): + repo_url = f'{cls.name}+{repo_url}' + + revision = cls.get_requirement_revision(repo_dir) + subdir = cls.get_subdirectory(repo_dir) + req = make_vcs_requirement_url(repo_url, revision, project_name, + subdir=subdir) + + return req + + @staticmethod + def get_base_rev_args(rev): + # type: (str) -> List[str] + """ + Return the base revision arguments for a vcs command. + + Args: + rev: the name of a revision to install. Cannot be None. + """ + raise NotImplementedError + + def is_immutable_rev_checkout(self, url, dest): + # type: (str, str) -> bool + """ + Return true if the commit hash checked out at dest matches + the revision in url. + + Always return False, if the VCS does not support immutable commit + hashes. + + This method does not check if there are local uncommitted changes + in dest after checkout, as pip currently has no use case for that. + """ + return False + + @classmethod + def make_rev_options(cls, rev=None, extra_args=None): + # type: (Optional[str], Optional[CommandArgs]) -> RevOptions + """ + Return a RevOptions object. + + Args: + rev: the name of a revision to install. + extra_args: a list of extra options. + """ + return RevOptions(cls, rev, extra_args=extra_args) + + @classmethod + def _is_local_repository(cls, repo): + # type: (str) -> bool + """ + posix absolute paths start with os.path.sep, + win32 ones start with drive (like c:\\folder) + """ + drive, tail = os.path.splitdrive(repo) + return repo.startswith(os.path.sep) or bool(drive) + + @classmethod + def get_netloc_and_auth(cls, netloc, scheme): + # type: (str, str) -> Tuple[str, Tuple[Optional[str], Optional[str]]] + """ + Parse the repository URL's netloc, and return the new netloc to use + along with auth information. + + Args: + netloc: the original repository URL netloc. + scheme: the repository URL's scheme without the vcs prefix. + + This is mainly for the Subversion class to override, so that auth + information can be provided via the --username and --password options + instead of through the URL. For other subclasses like Git without + such an option, auth information must stay in the URL. + + Returns: (netloc, (username, password)). + """ + return netloc, (None, None) + + @classmethod + def get_url_rev_and_auth(cls, url): + # type: (str) -> Tuple[str, Optional[str], AuthInfo] + """ + Parse the repository URL to use, and return the URL, revision, + and auth info to use. + + Returns: (url, rev, (username, password)). + """ + scheme, netloc, path, query, frag = urllib.parse.urlsplit(url) + if '+' not in scheme: + raise ValueError( + "Sorry, {!r} is a malformed VCS url. " + "The format is +://, " + "e.g. svn+http://myrepo/svn/MyApp#egg=MyApp".format(url) + ) + # Remove the vcs prefix. + scheme = scheme.split('+', 1)[1] + netloc, user_pass = cls.get_netloc_and_auth(netloc, scheme) + rev = None + if '@' in path: + path, rev = path.rsplit('@', 1) + if not rev: + raise InstallationError( + "The URL {!r} has an empty revision (after @) " + "which is not supported. Include a revision after @ " + "or remove @ from the URL.".format(url) + ) + url = urllib.parse.urlunsplit((scheme, netloc, path, query, '')) + return url, rev, user_pass + + @staticmethod + def make_rev_args(username, password): + # type: (Optional[str], Optional[HiddenText]) -> CommandArgs + """ + Return the RevOptions "extra arguments" to use in obtain(). + """ + return [] + + def get_url_rev_options(self, url): + # type: (HiddenText) -> Tuple[HiddenText, RevOptions] + """ + Return the URL and RevOptions object to use in obtain(), + as a tuple (url, rev_options). + """ + secret_url, rev, user_pass = self.get_url_rev_and_auth(url.secret) + username, secret_password = user_pass + password = None # type: Optional[HiddenText] + if secret_password is not None: + password = hide_value(secret_password) + extra_args = self.make_rev_args(username, password) + rev_options = self.make_rev_options(rev, extra_args=extra_args) + + return hide_url(secret_url), rev_options + + @staticmethod + def normalize_url(url): + # type: (str) -> str + """ + Normalize a URL for comparison by unquoting it and removing any + trailing slash. + """ + return urllib.parse.unquote(url).rstrip('/') + + @classmethod + def compare_urls(cls, url1, url2): + # type: (str, str) -> bool + """ + Compare two repo URLs for identity, ignoring incidental differences. + """ + return (cls.normalize_url(url1) == cls.normalize_url(url2)) + + def fetch_new(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None + """ + Fetch a revision from a repository, in the case that this is the + first fetch from the repository. + + Args: + dest: the directory to fetch the repository to. + rev_options: a RevOptions object. + """ + raise NotImplementedError + + def switch(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None + """ + Switch the repo at ``dest`` to point to ``URL``. + + Args: + rev_options: a RevOptions object. + """ + raise NotImplementedError + + def update(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None + """ + Update an already-existing repo to the given ``rev_options``. + + Args: + rev_options: a RevOptions object. + """ + raise NotImplementedError + + @classmethod + def is_commit_id_equal(cls, dest, name): + # type: (str, Optional[str]) -> bool + """ + Return whether the id of the current commit equals the given name. + + Args: + dest: the repository directory. + name: a string name. + """ + raise NotImplementedError + + def obtain(self, dest, url): + # type: (str, HiddenText) -> None + """ + Install or update in editable mode the package represented by this + VersionControl object. + + :param dest: the repository directory in which to install or update. + :param url: the repository URL starting with a vcs prefix. + """ + url, rev_options = self.get_url_rev_options(url) + + if not os.path.exists(dest): + self.fetch_new(dest, url, rev_options) + return + + rev_display = rev_options.to_display() + if self.is_repository_directory(dest): + existing_url = self.get_remote_url(dest) + if self.compare_urls(existing_url, url.secret): + logger.debug( + '%s in %s exists, and has correct URL (%s)', + self.repo_name.title(), + display_path(dest), + url, + ) + if not self.is_commit_id_equal(dest, rev_options.rev): + logger.info( + 'Updating %s %s%s', + display_path(dest), + self.repo_name, + rev_display, + ) + self.update(dest, url, rev_options) + else: + logger.info('Skipping because already up-to-date.') + return + + logger.warning( + '%s %s in %s exists with URL %s', + self.name, + self.repo_name, + display_path(dest), + existing_url, + ) + prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ', + ('s', 'i', 'w', 'b')) + else: + logger.warning( + 'Directory %s already exists, and is not a %s %s.', + dest, + self.name, + self.repo_name, + ) + # https://github.com/python/mypy/issues/1174 + prompt = ('(i)gnore, (w)ipe, (b)ackup ', # type: ignore + ('i', 'w', 'b')) + + logger.warning( + 'The plan is to install the %s repository %s', + self.name, + url, + ) + response = ask_path_exists('What to do? {}'.format( + prompt[0]), prompt[1]) + + if response == 'a': + sys.exit(-1) + + if response == 'w': + logger.warning('Deleting %s', display_path(dest)) + rmtree(dest) + self.fetch_new(dest, url, rev_options) + return + + if response == 'b': + dest_dir = backup_dir(dest) + logger.warning( + 'Backing up %s to %s', display_path(dest), dest_dir, + ) + shutil.move(dest, dest_dir) + self.fetch_new(dest, url, rev_options) + return + + # Do nothing if the response is "i". + if response == 's': + logger.info( + 'Switching %s %s to %s%s', + self.repo_name, + display_path(dest), + url, + rev_display, + ) + self.switch(dest, url, rev_options) + + def unpack(self, location, url): + # type: (str, HiddenText) -> None + """ + Clean up current location and download the url repository + (and vcs infos) into location + + :param url: the repository URL starting with a vcs prefix. + """ + if os.path.exists(location): + rmtree(location) + self.obtain(location, url=url) + + @classmethod + def get_remote_url(cls, location): + # type: (str) -> str + """ + Return the url used at location + + Raises RemoteNotFoundError if the repository does not have a remote + url configured. + """ + raise NotImplementedError + + @classmethod + def get_revision(cls, location): + # type: (str) -> str + """ + Return the current commit id of the files at the given location. + """ + raise NotImplementedError + + @classmethod + def run_command( + cls, + cmd, # type: Union[List[str], CommandArgs] + show_stdout=True, # type: bool + cwd=None, # type: Optional[str] + on_returncode='raise', # type: str + extra_ok_returncodes=None, # type: Optional[Iterable[int]] + command_desc=None, # type: Optional[str] + extra_environ=None, # type: Optional[Mapping[str, Any]] + spinner=None, # type: Optional[SpinnerInterface] + log_failed_cmd=True, # type: bool + stdout_only=False, # type: bool + ): + # type: (...) -> str + """ + Run a VCS subcommand + This is simply a wrapper around call_subprocess that adds the VCS + command name, and checks that the VCS is available + """ + cmd = make_command(cls.name, *cmd) + try: + return call_subprocess(cmd, show_stdout, cwd, + on_returncode=on_returncode, + extra_ok_returncodes=extra_ok_returncodes, + command_desc=command_desc, + extra_environ=extra_environ, + unset_environ=cls.unset_environ, + spinner=spinner, + log_failed_cmd=log_failed_cmd, + stdout_only=stdout_only) + except FileNotFoundError: + # errno.ENOENT = no such file or directory + # In other words, the VCS executable isn't available + raise BadCommand( + f'Cannot find command {cls.name!r} - do you have ' + f'{cls.name!r} installed and in your PATH?') + except PermissionError: + # errno.EACCES = Permission denied + # This error occurs, for instance, when the command is installed + # only for another user. So, the current user don't have + # permission to call the other user command. + raise BadCommand( + f"No permission to execute {cls.name!r} - install it " + f"locally, globally (ask admin), or check your PATH. " + f"See possible solutions at " + f"https://pip.pypa.io/en/latest/reference/pip_freeze/" + f"#fixing-permission-denied." + ) + + @classmethod + def is_repository_directory(cls, path): + # type: (str) -> bool + """ + Return whether a directory path is a repository directory. + """ + logger.debug('Checking in %s for %s (%s)...', + path, cls.dirname, cls.name) + return os.path.exists(os.path.join(path, cls.dirname)) + + @classmethod + def get_repository_root(cls, location): + # type: (str) -> Optional[str] + """ + Return the "root" (top-level) directory controlled by the vcs, + or `None` if the directory is not in any. + + It is meant to be overridden to implement smarter detection + mechanisms for specific vcs. + + This can do more than is_repository_directory() alone. For + example, the Git override checks that Git is actually available. + """ + if cls.is_repository_directory(location): + return location + return None diff --git a/venv/Lib/site-packages/pip/_internal/wheel_builder.py b/venv/Lib/site-packages/pip/_internal/wheel_builder.py new file mode 100644 index 00000000..92f172bc --- /dev/null +++ b/venv/Lib/site-packages/pip/_internal/wheel_builder.py @@ -0,0 +1,360 @@ +"""Orchestrator for building wheels from InstallRequirements. +""" + +import logging +import os.path +import re +import shutil +from typing import Any, Callable, Iterable, List, Optional, Tuple + +from pip._vendor.packaging.utils import canonicalize_name, canonicalize_version +from pip._vendor.packaging.version import InvalidVersion, Version + +from pip._internal.cache import WheelCache +from pip._internal.exceptions import InvalidWheelFilename, UnsupportedWheel +from pip._internal.metadata import get_wheel_distribution +from pip._internal.models.link import Link +from pip._internal.models.wheel import Wheel +from pip._internal.operations.build.wheel import build_wheel_pep517 +from pip._internal.operations.build.wheel_legacy import build_wheel_legacy +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import ensure_dir, hash_file, is_wheel_installed +from pip._internal.utils.setuptools_build import make_setuptools_clean_args +from pip._internal.utils.subprocess import call_subprocess +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.urls import path_to_url +from pip._internal.vcs import vcs + +logger = logging.getLogger(__name__) + +_egg_info_re = re.compile(r'([a-z0-9_.]+)-([a-z0-9_.!+-]+)', re.IGNORECASE) + +BinaryAllowedPredicate = Callable[[InstallRequirement], bool] +BuildResult = Tuple[List[InstallRequirement], List[InstallRequirement]] + + +def _contains_egg_info(s): + # type: (str) -> bool + """Determine whether the string looks like an egg_info. + + :param s: The string to parse. E.g. foo-2.1 + """ + return bool(_egg_info_re.search(s)) + + +def _should_build( + req, # type: InstallRequirement + need_wheel, # type: bool + check_binary_allowed, # type: BinaryAllowedPredicate +): + # type: (...) -> bool + """Return whether an InstallRequirement should be built into a wheel.""" + if req.constraint: + # never build requirements that are merely constraints + return False + if req.is_wheel: + if need_wheel: + logger.info( + 'Skipping %s, due to already being wheel.', req.name, + ) + return False + + if need_wheel: + # i.e. pip wheel, not pip install + return True + + # From this point, this concerns the pip install command only + # (need_wheel=False). + + if req.editable or not req.source_dir: + return False + + if req.use_pep517: + return True + + if not check_binary_allowed(req): + logger.info( + "Skipping wheel build for %s, due to binaries " + "being disabled for it.", req.name, + ) + return False + + if not is_wheel_installed(): + # we don't build legacy requirements if wheel is not installed + logger.info( + "Using legacy 'setup.py install' for %s, " + "since package 'wheel' is not installed.", req.name, + ) + return False + + return True + + +def should_build_for_wheel_command( + req, # type: InstallRequirement +): + # type: (...) -> bool + return _should_build( + req, need_wheel=True, check_binary_allowed=_always_true + ) + + +def should_build_for_install_command( + req, # type: InstallRequirement + check_binary_allowed, # type: BinaryAllowedPredicate +): + # type: (...) -> bool + return _should_build( + req, need_wheel=False, check_binary_allowed=check_binary_allowed + ) + + +def _should_cache( + req, # type: InstallRequirement +): + # type: (...) -> Optional[bool] + """ + Return whether a built InstallRequirement can be stored in the persistent + wheel cache, assuming the wheel cache is available, and _should_build() + has determined a wheel needs to be built. + """ + if req.editable or not req.source_dir: + # never cache editable requirements + return False + + if req.link and req.link.is_vcs: + # VCS checkout. Do not cache + # unless it points to an immutable commit hash. + assert not req.editable + assert req.source_dir + vcs_backend = vcs.get_backend_for_scheme(req.link.scheme) + assert vcs_backend + if vcs_backend.is_immutable_rev_checkout(req.link.url, req.source_dir): + return True + return False + + assert req.link + base, ext = req.link.splitext() + if _contains_egg_info(base): + return True + + # Otherwise, do not cache. + return False + + +def _get_cache_dir( + req, # type: InstallRequirement + wheel_cache, # type: WheelCache +): + # type: (...) -> str + """Return the persistent or temporary cache directory where the built + wheel need to be stored. + """ + cache_available = bool(wheel_cache.cache_dir) + assert req.link + if cache_available and _should_cache(req): + cache_dir = wheel_cache.get_path_for_link(req.link) + else: + cache_dir = wheel_cache.get_ephem_path_for_link(req.link) + return cache_dir + + +def _always_true(_): + # type: (Any) -> bool + return True + + +def _verify_one(req, wheel_path): + # type: (InstallRequirement, str) -> None + canonical_name = canonicalize_name(req.name or "") + w = Wheel(os.path.basename(wheel_path)) + if canonicalize_name(w.name) != canonical_name: + raise InvalidWheelFilename( + "Wheel has unexpected file name: expected {!r}, " + "got {!r}".format(canonical_name, w.name), + ) + dist = get_wheel_distribution(wheel_path, canonical_name) + dist_verstr = str(dist.version) + if canonicalize_version(dist_verstr) != canonicalize_version(w.version): + raise InvalidWheelFilename( + "Wheel has unexpected file name: expected {!r}, " + "got {!r}".format(dist_verstr, w.version), + ) + metadata_version_value = dist.metadata_version + if metadata_version_value is None: + raise UnsupportedWheel("Missing Metadata-Version") + try: + metadata_version = Version(metadata_version_value) + except InvalidVersion: + msg = f"Invalid Metadata-Version: {metadata_version_value}" + raise UnsupportedWheel(msg) + if (metadata_version >= Version("1.2") + and not isinstance(dist.version, Version)): + raise UnsupportedWheel( + "Metadata 1.2 mandates PEP 440 version, " + "but {!r} is not".format(dist_verstr) + ) + + +def _build_one( + req, # type: InstallRequirement + output_dir, # type: str + verify, # type: bool + build_options, # type: List[str] + global_options, # type: List[str] +): + # type: (...) -> Optional[str] + """Build one wheel. + + :return: The filename of the built wheel, or None if the build failed. + """ + try: + ensure_dir(output_dir) + except OSError as e: + logger.warning( + "Building wheel for %s failed: %s", + req.name, e, + ) + return None + + # Install build deps into temporary directory (PEP 518) + with req.build_env: + wheel_path = _build_one_inside_env( + req, output_dir, build_options, global_options + ) + if wheel_path and verify: + try: + _verify_one(req, wheel_path) + except (InvalidWheelFilename, UnsupportedWheel) as e: + logger.warning("Built wheel for %s is invalid: %s", req.name, e) + return None + return wheel_path + + +def _build_one_inside_env( + req, # type: InstallRequirement + output_dir, # type: str + build_options, # type: List[str] + global_options, # type: List[str] +): + # type: (...) -> Optional[str] + with TempDirectory(kind="wheel") as temp_dir: + assert req.name + if req.use_pep517: + assert req.metadata_directory + assert req.pep517_backend + if global_options: + logger.warning( + 'Ignoring --global-option when building %s using PEP 517', req.name + ) + if build_options: + logger.warning( + 'Ignoring --build-option when building %s using PEP 517', req.name + ) + wheel_path = build_wheel_pep517( + name=req.name, + backend=req.pep517_backend, + metadata_directory=req.metadata_directory, + tempd=temp_dir.path, + ) + else: + wheel_path = build_wheel_legacy( + name=req.name, + setup_py_path=req.setup_py_path, + source_dir=req.unpacked_source_directory, + global_options=global_options, + build_options=build_options, + tempd=temp_dir.path, + ) + + if wheel_path is not None: + wheel_name = os.path.basename(wheel_path) + dest_path = os.path.join(output_dir, wheel_name) + try: + wheel_hash, length = hash_file(wheel_path) + shutil.move(wheel_path, dest_path) + logger.info('Created wheel for %s: ' + 'filename=%s size=%d sha256=%s', + req.name, wheel_name, length, + wheel_hash.hexdigest()) + logger.info('Stored in directory: %s', output_dir) + return dest_path + except Exception as e: + logger.warning( + "Building wheel for %s failed: %s", + req.name, e, + ) + # Ignore return, we can't do anything else useful. + if not req.use_pep517: + _clean_one_legacy(req, global_options) + return None + + +def _clean_one_legacy(req, global_options): + # type: (InstallRequirement, List[str]) -> bool + clean_args = make_setuptools_clean_args( + req.setup_py_path, + global_options=global_options, + ) + + logger.info('Running setup.py clean for %s', req.name) + try: + call_subprocess(clean_args, cwd=req.source_dir) + return True + except Exception: + logger.error('Failed cleaning build dir for %s', req.name) + return False + + +def build( + requirements, # type: Iterable[InstallRequirement] + wheel_cache, # type: WheelCache + verify, # type: bool + build_options, # type: List[str] + global_options, # type: List[str] +): + # type: (...) -> BuildResult + """Build wheels. + + :return: The list of InstallRequirement that succeeded to build and + the list of InstallRequirement that failed to build. + """ + if not requirements: + return [], [] + + # Build the wheels. + logger.info( + 'Building wheels for collected packages: %s', + ', '.join(req.name for req in requirements), # type: ignore + ) + + with indent_log(): + build_successes, build_failures = [], [] + for req in requirements: + cache_dir = _get_cache_dir(req, wheel_cache) + wheel_file = _build_one( + req, cache_dir, verify, build_options, global_options + ) + if wheel_file: + # Update the link for this. + req.link = Link(path_to_url(wheel_file)) + req.local_file_path = req.link.file_path + assert req.link.is_wheel + build_successes.append(req) + else: + build_failures.append(req) + + # notify success/failure + if build_successes: + logger.info( + 'Successfully built %s', + ' '.join([req.name for req in build_successes]), # type: ignore + ) + if build_failures: + logger.info( + 'Failed to build %s', + ' '.join([req.name for req in build_failures]), # type: ignore + ) + # Return a list of requirements that failed to build + return build_successes, build_failures diff --git a/venv/Lib/site-packages/pip/_vendor/__init__.py b/venv/Lib/site-packages/pip/_vendor/__init__.py new file mode 100644 index 00000000..a10ecd60 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/__init__.py @@ -0,0 +1,113 @@ +""" +pip._vendor is for vendoring dependencies of pip to prevent needing pip to +depend on something external. + +Files inside of pip._vendor should be considered immutable and should only be +updated to versions from upstream. +""" +from __future__ import absolute_import + +import glob +import os.path +import sys + +# Downstream redistributors which have debundled our dependencies should also +# patch this value to be true. This will trigger the additional patching +# to cause things like "six" to be available as pip. +DEBUNDLED = False + +# By default, look in this directory for a bunch of .whl files which we will +# add to the beginning of sys.path before attempting to import anything. This +# is done to support downstream re-distributors like Debian and Fedora who +# wish to create their own Wheels for our dependencies to aid in debundling. +WHEEL_DIR = os.path.abspath(os.path.dirname(__file__)) + + +# Define a small helper function to alias our vendored modules to the real ones +# if the vendored ones do not exist. This idea of this was taken from +# https://github.com/kennethreitz/requests/pull/2567. +def vendored(modulename): + vendored_name = "{0}.{1}".format(__name__, modulename) + + try: + __import__(modulename, globals(), locals(), level=0) + except ImportError: + # We can just silently allow import failures to pass here. If we + # got to this point it means that ``import pip._vendor.whatever`` + # failed and so did ``import whatever``. Since we're importing this + # upfront in an attempt to alias imports, not erroring here will + # just mean we get a regular import error whenever pip *actually* + # tries to import one of these modules to use it, which actually + # gives us a better error message than we would have otherwise + # gotten. + pass + else: + sys.modules[vendored_name] = sys.modules[modulename] + base, head = vendored_name.rsplit(".", 1) + setattr(sys.modules[base], head, sys.modules[modulename]) + + +# If we're operating in a debundled setup, then we want to go ahead and trigger +# the aliasing of our vendored libraries as well as looking for wheels to add +# to our sys.path. This will cause all of this code to be a no-op typically +# however downstream redistributors can enable it in a consistent way across +# all platforms. +if DEBUNDLED: + # Actually look inside of WHEEL_DIR to find .whl files and add them to the + # front of our sys.path. + sys.path[:] = glob.glob(os.path.join(WHEEL_DIR, "*.whl")) + sys.path + + # Actually alias all of our vendored dependencies. + vendored("appdirs") + vendored("cachecontrol") + vendored("certifi") + vendored("colorama") + vendored("distlib") + vendored("distro") + vendored("html5lib") + vendored("six") + vendored("six.moves") + vendored("six.moves.urllib") + vendored("six.moves.urllib.parse") + vendored("packaging") + vendored("packaging.version") + vendored("packaging.specifiers") + vendored("pep517") + vendored("pkg_resources") + vendored("progress") + vendored("requests") + vendored("requests.exceptions") + vendored("requests.packages") + vendored("requests.packages.urllib3") + vendored("requests.packages.urllib3._collections") + vendored("requests.packages.urllib3.connection") + vendored("requests.packages.urllib3.connectionpool") + vendored("requests.packages.urllib3.contrib") + vendored("requests.packages.urllib3.contrib.ntlmpool") + vendored("requests.packages.urllib3.contrib.pyopenssl") + vendored("requests.packages.urllib3.exceptions") + vendored("requests.packages.urllib3.fields") + vendored("requests.packages.urllib3.filepost") + vendored("requests.packages.urllib3.packages") + vendored("requests.packages.urllib3.packages.ordered_dict") + vendored("requests.packages.urllib3.packages.six") + vendored("requests.packages.urllib3.packages.ssl_match_hostname") + vendored("requests.packages.urllib3.packages.ssl_match_hostname." + "_implementation") + vendored("requests.packages.urllib3.poolmanager") + vendored("requests.packages.urllib3.request") + vendored("requests.packages.urllib3.response") + vendored("requests.packages.urllib3.util") + vendored("requests.packages.urllib3.util.connection") + vendored("requests.packages.urllib3.util.request") + vendored("requests.packages.urllib3.util.response") + vendored("requests.packages.urllib3.util.retry") + vendored("requests.packages.urllib3.util.ssl_") + vendored("requests.packages.urllib3.util.timeout") + vendored("requests.packages.urllib3.util.url") + vendored("resolvelib") + vendored("tenacity") + vendored("toml") + vendored("toml.encoder") + vendored("toml.decoder") + vendored("urllib3") diff --git a/venv/Lib/site-packages/pip/_vendor/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..2be04b05 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/__pycache__/appdirs.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/__pycache__/appdirs.cpython-36.pyc new file mode 100644 index 00000000..485f1ac0 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/__pycache__/appdirs.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/__pycache__/distro.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/__pycache__/distro.cpython-36.pyc new file mode 100644 index 00000000..255458df Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/__pycache__/distro.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/__pycache__/pyparsing.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/__pycache__/pyparsing.cpython-36.pyc new file mode 100644 index 00000000..0104b10e Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/__pycache__/pyparsing.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/__pycache__/six.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/__pycache__/six.cpython-36.pyc new file mode 100644 index 00000000..1b04d366 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/__pycache__/six.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/appdirs.py b/venv/Lib/site-packages/pip/_vendor/appdirs.py new file mode 100644 index 00000000..33a3b774 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/appdirs.py @@ -0,0 +1,633 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2005-2010 ActiveState Software Inc. +# Copyright (c) 2013 Eddy Petrișor + +"""Utilities for determining application-specific dirs. + +See for details and usage. +""" +# Dev Notes: +# - MSDN on where to store app data files: +# http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120 +# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html +# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html + +__version__ = "1.4.4" +__version_info__ = tuple(int(segment) for segment in __version__.split(".")) + + +import sys +import os + +PY3 = sys.version_info[0] == 3 + +if PY3: + unicode = str + +if sys.platform.startswith('java'): + import platform + os_name = platform.java_ver()[3][0] + if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc. + system = 'win32' + elif os_name.startswith('Mac'): # "Mac OS X", etc. + system = 'darwin' + else: # "Linux", "SunOS", "FreeBSD", etc. + # Setting this to "linux2" is not ideal, but only Windows or Mac + # are actually checked for and the rest of the module expects + # *sys.platform* style strings. + system = 'linux2' +elif sys.platform == 'cli' and os.name == 'nt': + # Detect Windows in IronPython to match pip._internal.utils.compat.WINDOWS + # Discussion: + system = 'win32' +else: + system = sys.platform + + + +def user_data_dir(appname=None, appauthor=None, version=None, roaming=False): + r"""Return full path to the user-specific data dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "roaming" (boolean, default False) can be set True to use the Windows + roaming appdata directory. That means that for users on a Windows + network setup for roaming profiles, this user data will be + sync'd on login. See + + for a discussion of issues. + + Typical user data directories are: + Mac OS X: ~/Library/Application Support/ # or ~/.config/, if the other does not exist + Unix: ~/.local/share/ # or in $XDG_DATA_HOME, if defined + Win XP (not roaming): C:\Documents and Settings\\Application Data\\ + Win XP (roaming): C:\Documents and Settings\\Local Settings\Application Data\\ + Win 7 (not roaming): C:\Users\\AppData\Local\\ + Win 7 (roaming): C:\Users\\AppData\Roaming\\ + + For Unix, we follow the XDG spec and support $XDG_DATA_HOME. + That means, by default "~/.local/share/". + """ + if system == "win32": + if appauthor is None: + appauthor = appname + const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA" + path = os.path.normpath(_get_win_folder(const)) + if appname: + if appauthor is not False: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + elif system == 'darwin': + path = os.path.expanduser('~/Library/Application Support/') + if appname: + path = os.path.join(path, appname) + else: + path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share")) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, version) + return path + + +def site_data_dir(appname=None, appauthor=None, version=None, multipath=False): + r"""Return full path to the user-shared data dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "multipath" is an optional parameter only applicable to *nix + which indicates that the entire list of data dirs should be + returned. By default, the first item from XDG_DATA_DIRS is + returned, or '/usr/local/share/', + if XDG_DATA_DIRS is not set + + Typical site data directories are: + Mac OS X: /Library/Application Support/ + Unix: /usr/local/share/ or /usr/share/ + Win XP: C:\Documents and Settings\All Users\Application Data\\ + Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) + Win 7: C:\ProgramData\\ # Hidden, but writeable on Win 7. + + For Unix, this is using the $XDG_DATA_DIRS[0] default. + + WARNING: Do not use this on Windows. See the Vista-Fail note above for why. + """ + if system == "win32": + if appauthor is None: + appauthor = appname + path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA")) + if appname: + if appauthor is not False: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + elif system == 'darwin': + path = os.path.expanduser('/Library/Application Support') + if appname: + path = os.path.join(path, appname) + else: + # XDG default for $XDG_DATA_DIRS + # only first, if multipath is False + path = os.getenv('XDG_DATA_DIRS', + os.pathsep.join(['/usr/local/share', '/usr/share'])) + pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] + if appname: + if version: + appname = os.path.join(appname, version) + pathlist = [os.path.join(x, appname) for x in pathlist] + + if multipath: + path = os.pathsep.join(pathlist) + else: + path = pathlist[0] + return path + + if appname and version: + path = os.path.join(path, version) + return path + + +def user_config_dir(appname=None, appauthor=None, version=None, roaming=False): + r"""Return full path to the user-specific config dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "roaming" (boolean, default False) can be set True to use the Windows + roaming appdata directory. That means that for users on a Windows + network setup for roaming profiles, this user data will be + sync'd on login. See + + for a discussion of issues. + + Typical user config directories are: + Mac OS X: same as user_data_dir + Unix: ~/.config/ # or in $XDG_CONFIG_HOME, if defined + Win *: same as user_data_dir + + For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. + That means, by default "~/.config/". + """ + if system in ["win32", "darwin"]: + path = user_data_dir(appname, appauthor, None, roaming) + else: + path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config")) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, version) + return path + + +# for the discussion regarding site_config_dir locations +# see +def site_config_dir(appname=None, appauthor=None, version=None, multipath=False): + r"""Return full path to the user-shared data dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "multipath" is an optional parameter only applicable to *nix + which indicates that the entire list of config dirs should be + returned. By default, the first item from XDG_CONFIG_DIRS is + returned, or '/etc/xdg/', if XDG_CONFIG_DIRS is not set + + Typical site config directories are: + Mac OS X: same as site_data_dir + Unix: /etc/xdg/ or $XDG_CONFIG_DIRS[i]/ for each value in + $XDG_CONFIG_DIRS + Win *: same as site_data_dir + Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) + + For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False + + WARNING: Do not use this on Windows. See the Vista-Fail note above for why. + """ + if system in ["win32", "darwin"]: + path = site_data_dir(appname, appauthor) + if appname and version: + path = os.path.join(path, version) + else: + # XDG default for $XDG_CONFIG_DIRS (missing or empty) + # see + # only first, if multipath is False + path = os.getenv('XDG_CONFIG_DIRS') or '/etc/xdg' + pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep) if x] + if appname: + if version: + appname = os.path.join(appname, version) + pathlist = [os.path.join(x, appname) for x in pathlist] + + if multipath: + path = os.pathsep.join(pathlist) + else: + path = pathlist[0] + return path + + +def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True): + r"""Return full path to the user-specific cache dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "opinion" (boolean) can be False to disable the appending of + "Cache" to the base app data dir for Windows. See + discussion below. + + Typical user cache directories are: + Mac OS X: ~/Library/Caches/ + Unix: ~/.cache/ (XDG default) + Win XP: C:\Documents and Settings\\Local Settings\Application Data\\\Cache + Vista: C:\Users\\AppData\Local\\\Cache + + On Windows the only suggestion in the MSDN docs is that local settings go in + the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming + app data dir (the default returned by `user_data_dir` above). Apps typically + put cache data somewhere *under* the given dir here. Some examples: + ...\Mozilla\Firefox\Profiles\\Cache + ...\Acme\SuperApp\Cache\1.0 + OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. + This can be disabled with the `opinion=False` option. + """ + if system == "win32": + if appauthor is None: + appauthor = appname + path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA")) + # When using Python 2, return paths as bytes on Windows like we do on + # other operating systems. See helper function docs for more details. + if not PY3 and isinstance(path, unicode): + path = _win_path_to_bytes(path) + if appname: + if appauthor is not False: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + if opinion: + path = os.path.join(path, "Cache") + elif system == 'darwin': + path = os.path.expanduser('~/Library/Caches') + if appname: + path = os.path.join(path, appname) + else: + path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache')) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, version) + return path + + +def user_state_dir(appname=None, appauthor=None, version=None, roaming=False): + r"""Return full path to the user-specific state dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "roaming" (boolean, default False) can be set True to use the Windows + roaming appdata directory. That means that for users on a Windows + network setup for roaming profiles, this user data will be + sync'd on login. See + + for a discussion of issues. + + Typical user state directories are: + Mac OS X: same as user_data_dir + Unix: ~/.local/state/ # or in $XDG_STATE_HOME, if defined + Win *: same as user_data_dir + + For Unix, we follow this Debian proposal + to extend the XDG spec and support $XDG_STATE_HOME. + + That means, by default "~/.local/state/". + """ + if system in ["win32", "darwin"]: + path = user_data_dir(appname, appauthor, None, roaming) + else: + path = os.getenv('XDG_STATE_HOME', os.path.expanduser("~/.local/state")) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, version) + return path + + +def user_log_dir(appname=None, appauthor=None, version=None, opinion=True): + r"""Return full path to the user-specific log dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "opinion" (boolean) can be False to disable the appending of + "Logs" to the base app data dir for Windows, and "log" to the + base cache dir for Unix. See discussion below. + + Typical user log directories are: + Mac OS X: ~/Library/Logs/ + Unix: ~/.cache//log # or under $XDG_CACHE_HOME if defined + Win XP: C:\Documents and Settings\\Local Settings\Application Data\\\Logs + Vista: C:\Users\\AppData\Local\\\Logs + + On Windows the only suggestion in the MSDN docs is that local settings + go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in + examples of what some windows apps use for a logs dir.) + + OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA` + value for Windows and appends "log" to the user cache dir for Unix. + This can be disabled with the `opinion=False` option. + """ + if system == "darwin": + path = os.path.join( + os.path.expanduser('~/Library/Logs'), + appname) + elif system == "win32": + path = user_data_dir(appname, appauthor, version) + version = False + if opinion: + path = os.path.join(path, "Logs") + else: + path = user_cache_dir(appname, appauthor, version) + version = False + if opinion: + path = os.path.join(path, "log") + if appname and version: + path = os.path.join(path, version) + return path + + +class AppDirs(object): + """Convenience wrapper for getting application dirs.""" + def __init__(self, appname=None, appauthor=None, version=None, + roaming=False, multipath=False): + self.appname = appname + self.appauthor = appauthor + self.version = version + self.roaming = roaming + self.multipath = multipath + + @property + def user_data_dir(self): + return user_data_dir(self.appname, self.appauthor, + version=self.version, roaming=self.roaming) + + @property + def site_data_dir(self): + return site_data_dir(self.appname, self.appauthor, + version=self.version, multipath=self.multipath) + + @property + def user_config_dir(self): + return user_config_dir(self.appname, self.appauthor, + version=self.version, roaming=self.roaming) + + @property + def site_config_dir(self): + return site_config_dir(self.appname, self.appauthor, + version=self.version, multipath=self.multipath) + + @property + def user_cache_dir(self): + return user_cache_dir(self.appname, self.appauthor, + version=self.version) + + @property + def user_state_dir(self): + return user_state_dir(self.appname, self.appauthor, + version=self.version) + + @property + def user_log_dir(self): + return user_log_dir(self.appname, self.appauthor, + version=self.version) + + +#---- internal support stuff + +def _get_win_folder_from_registry(csidl_name): + """This is a fallback technique at best. I'm not sure if using the + registry for this guarantees us the correct answer for all CSIDL_* + names. + """ + if PY3: + import winreg as _winreg + else: + import _winreg + + shell_folder_name = { + "CSIDL_APPDATA": "AppData", + "CSIDL_COMMON_APPDATA": "Common AppData", + "CSIDL_LOCAL_APPDATA": "Local AppData", + }[csidl_name] + + key = _winreg.OpenKey( + _winreg.HKEY_CURRENT_USER, + r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders" + ) + dir, type = _winreg.QueryValueEx(key, shell_folder_name) + return dir + + +def _get_win_folder_with_pywin32(csidl_name): + from win32com.shell import shellcon, shell + dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0) + # Try to make this a unicode path because SHGetFolderPath does + # not return unicode strings when there is unicode data in the + # path. + try: + dir = unicode(dir) + + # Downgrade to short path name if have highbit chars. See + # . + has_high_char = False + for c in dir: + if ord(c) > 255: + has_high_char = True + break + if has_high_char: + try: + import win32api + dir = win32api.GetShortPathName(dir) + except ImportError: + pass + except UnicodeError: + pass + return dir + + +def _get_win_folder_with_ctypes(csidl_name): + import ctypes + + csidl_const = { + "CSIDL_APPDATA": 26, + "CSIDL_COMMON_APPDATA": 35, + "CSIDL_LOCAL_APPDATA": 28, + }[csidl_name] + + buf = ctypes.create_unicode_buffer(1024) + ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf) + + # Downgrade to short path name if have highbit chars. See + # . + has_high_char = False + for c in buf: + if ord(c) > 255: + has_high_char = True + break + if has_high_char: + buf2 = ctypes.create_unicode_buffer(1024) + if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024): + buf = buf2 + + return buf.value + +def _get_win_folder_with_jna(csidl_name): + import array + from com.sun import jna + from com.sun.jna.platform import win32 + + buf_size = win32.WinDef.MAX_PATH * 2 + buf = array.zeros('c', buf_size) + shell = win32.Shell32.INSTANCE + shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf) + dir = jna.Native.toString(buf.tostring()).rstrip("\0") + + # Downgrade to short path name if have highbit chars. See + # . + has_high_char = False + for c in dir: + if ord(c) > 255: + has_high_char = True + break + if has_high_char: + buf = array.zeros('c', buf_size) + kernel = win32.Kernel32.INSTANCE + if kernel.GetShortPathName(dir, buf, buf_size): + dir = jna.Native.toString(buf.tostring()).rstrip("\0") + + return dir + +if system == "win32": + try: + from ctypes import windll + _get_win_folder = _get_win_folder_with_ctypes + except ImportError: + try: + import com.sun.jna + _get_win_folder = _get_win_folder_with_jna + except ImportError: + _get_win_folder = _get_win_folder_from_registry + + +def _win_path_to_bytes(path): + """Encode Windows paths to bytes. Only used on Python 2. + + Motivation is to be consistent with other operating systems where paths + are also returned as bytes. This avoids problems mixing bytes and Unicode + elsewhere in the codebase. For more details and discussion see + . + + If encoding using ASCII and MBCS fails, return the original Unicode path. + """ + for encoding in ('ASCII', 'MBCS'): + try: + return path.encode(encoding) + except (UnicodeEncodeError, LookupError): + pass + return path + + +#---- self test code + +if __name__ == "__main__": + appname = "MyApp" + appauthor = "MyCompany" + + props = ("user_data_dir", + "user_config_dir", + "user_cache_dir", + "user_state_dir", + "user_log_dir", + "site_data_dir", + "site_config_dir") + + print("-- app dirs %s --" % __version__) + + print("-- app dirs (with optional 'version')") + dirs = AppDirs(appname, appauthor, version="1.0") + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) + + print("\n-- app dirs (without optional 'version')") + dirs = AppDirs(appname, appauthor) + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) + + print("\n-- app dirs (without optional 'appauthor')") + dirs = AppDirs(appname) + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) + + print("\n-- app dirs (with disabled 'appauthor')") + dirs = AppDirs(appname, appauthor=False) + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/__init__.py b/venv/Lib/site-packages/pip/_vendor/cachecontrol/__init__.py new file mode 100644 index 00000000..a1bbbbe3 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/cachecontrol/__init__.py @@ -0,0 +1,11 @@ +"""CacheControl import Interface. + +Make it easy to import from cachecontrol without long namespaces. +""" +__author__ = "Eric Larson" +__email__ = "eric@ionrock.org" +__version__ = "0.12.6" + +from .wrapper import CacheControl +from .adapter import CacheControlAdapter +from .controller import CacheController diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..9758a3b2 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-36.pyc new file mode 100644 index 00000000..766f1e3e Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-36.pyc new file mode 100644 index 00000000..45e20fe1 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-36.pyc new file mode 100644 index 00000000..11fe113a Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/compat.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/compat.cpython-36.pyc new file mode 100644 index 00000000..6d296c8e Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/compat.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-36.pyc new file mode 100644 index 00000000..cb7c1dc0 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-36.pyc new file mode 100644 index 00000000..00bb567d Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-36.pyc new file mode 100644 index 00000000..c692a2c6 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-36.pyc new file mode 100644 index 00000000..e4a8484e Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-36.pyc new file mode 100644 index 00000000..b378fa45 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-36.pyc differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/_cmd.py b/venv/Lib/site-packages/pip/_vendor/cachecontrol/_cmd.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/_cmd.py rename to venv/Lib/site-packages/pip/_vendor/cachecontrol/_cmd.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/adapter.py b/venv/Lib/site-packages/pip/_vendor/cachecontrol/adapter.py similarity index 98% rename from env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/adapter.py rename to venv/Lib/site-packages/pip/_vendor/cachecontrol/adapter.py index 780eb288..815650e8 100644 --- a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/adapter.py +++ b/venv/Lib/site-packages/pip/_vendor/cachecontrol/adapter.py @@ -24,7 +24,7 @@ def __init__( **kw ): super(CacheControlAdapter, self).__init__(*args, **kw) - self.cache = cache or DictCache() + self.cache = DictCache() if cache is None else cache self.heuristic = heuristic self.cacheable_methods = cacheable_methods or ("GET",) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/cache.py b/venv/Lib/site-packages/pip/_vendor/cachecontrol/cache.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/cache.py rename to venv/Lib/site-packages/pip/_vendor/cachecontrol/cache.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/__init__.py b/venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/__init__.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/__init__.py rename to venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/__init__.py diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..e48b9590 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-36.pyc new file mode 100644 index 00000000..8093f982 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-36.pyc new file mode 100644 index 00000000..4e3d4b70 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-36.pyc differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py b/venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py similarity index 96% rename from env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py rename to venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py index 1ba00806..607b9452 100644 --- a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py +++ b/venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py @@ -69,8 +69,8 @@ def __init__( raise ValueError("Cannot use use_dir_lock and lock_class together") try: - from pip._vendor.lockfile import LockFile - from pip._vendor.lockfile.mkdirlockfile import MkdirLockFile + from lockfile import LockFile + from lockfile.mkdirlockfile import MkdirLockFile except ImportError: notice = dedent( """ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py b/venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py rename to venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/compat.py b/venv/Lib/site-packages/pip/_vendor/cachecontrol/compat.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/compat.py rename to venv/Lib/site-packages/pip/_vendor/cachecontrol/compat.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/controller.py b/venv/Lib/site-packages/pip/_vendor/cachecontrol/controller.py similarity index 96% rename from env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/controller.py rename to venv/Lib/site-packages/pip/_vendor/cachecontrol/controller.py index 1b2b943c..dafe55ca 100644 --- a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/controller.py +++ b/venv/Lib/site-packages/pip/_vendor/cachecontrol/controller.py @@ -34,7 +34,7 @@ class CacheController(object): def __init__( self, cache=None, cache_etags=True, serializer=None, status_codes=None ): - self.cache = cache or DictCache() + self.cache = DictCache() if cache is None else cache self.cache_etags = cache_etags self.serializer = serializer or Serializer() self.cacheable_status_codes = status_codes or (200, 203, 300, 301) @@ -293,6 +293,15 @@ def cache_response(self, request, response, body=None, status_codes=None): if no_store: return + # https://tools.ietf.org/html/rfc7234#section-4.1: + # A Vary header field-value of "*" always fails to match. + # Storing such a response leads to a deserialization warning + # during cache lookup and is not allowed to ever be served, + # so storing it can be avoided. + if "*" in response_headers.get("vary", ""): + logger.debug('Response header has "Vary: *"') + return + # If we've been given an etag, then keep the response if self.cache_etags and "etag" in response_headers: logger.debug("Caching due to etag") diff --git a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/filewrapper.py b/venv/Lib/site-packages/pip/_vendor/cachecontrol/filewrapper.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/filewrapper.py rename to venv/Lib/site-packages/pip/_vendor/cachecontrol/filewrapper.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/heuristics.py b/venv/Lib/site-packages/pip/_vendor/cachecontrol/heuristics.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/heuristics.py rename to venv/Lib/site-packages/pip/_vendor/cachecontrol/heuristics.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/serialize.py b/venv/Lib/site-packages/pip/_vendor/cachecontrol/serialize.py similarity index 97% rename from env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/serialize.py rename to venv/Lib/site-packages/pip/_vendor/cachecontrol/serialize.py index ec43ff27..3b6ec2de 100644 --- a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/serialize.py +++ b/venv/Lib/site-packages/pip/_vendor/cachecontrol/serialize.py @@ -107,6 +107,8 @@ def prepare_response(self, request, cached): """ # Special case the '*' Vary value as it means we cannot actually # determine if the cached response is suitable for this request. + # This case is also handled in the controller code when creating + # a cache entry, but is left here for backwards compatibility. if "*" in cached.get("vary", {}): return @@ -179,7 +181,7 @@ def _loads_v3(self, request, data): def _loads_v4(self, request, data): try: - cached = msgpack.loads(data, encoding="utf-8") + cached = msgpack.loads(data, raw=False) except ValueError: return diff --git a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/wrapper.py b/venv/Lib/site-packages/pip/_vendor/cachecontrol/wrapper.py similarity index 92% rename from env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/wrapper.py rename to venv/Lib/site-packages/pip/_vendor/cachecontrol/wrapper.py index 265bfc8b..d8e6fc6a 100644 --- a/env/lib/python2.7/site-packages/pip/_vendor/cachecontrol/wrapper.py +++ b/venv/Lib/site-packages/pip/_vendor/cachecontrol/wrapper.py @@ -13,7 +13,7 @@ def CacheControl( cacheable_methods=None, ): - cache = cache or DictCache() + cache = DictCache() if cache is None else cache adapter_class = adapter_class or CacheControlAdapter adapter = adapter_class( cache, diff --git a/venv/Lib/site-packages/pip/_vendor/certifi/__init__.py b/venv/Lib/site-packages/pip/_vendor/certifi/__init__.py new file mode 100644 index 00000000..17aaf900 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/certifi/__init__.py @@ -0,0 +1,3 @@ +from .core import contents, where + +__version__ = "2020.12.05" diff --git a/venv/Lib/site-packages/pip/_vendor/certifi/__main__.py b/venv/Lib/site-packages/pip/_vendor/certifi/__main__.py new file mode 100644 index 00000000..00376349 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/certifi/__main__.py @@ -0,0 +1,12 @@ +import argparse + +from pip._vendor.certifi import contents, where + +parser = argparse.ArgumentParser() +parser.add_argument("-c", "--contents", action="store_true") +args = parser.parse_args() + +if args.contents: + print(contents()) +else: + print(where()) diff --git a/venv/Lib/site-packages/pip/_vendor/certifi/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/certifi/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..f207127c Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/certifi/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/certifi/__pycache__/__main__.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/certifi/__pycache__/__main__.cpython-36.pyc new file mode 100644 index 00000000..05617c0c Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/certifi/__pycache__/__main__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/certifi/__pycache__/core.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/certifi/__pycache__/core.cpython-36.pyc new file mode 100644 index 00000000..dfe621c8 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/certifi/__pycache__/core.cpython-36.pyc differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/certifi/cacert.pem b/venv/Lib/site-packages/pip/_vendor/certifi/cacert.pem similarity index 86% rename from env/lib/python2.7/site-packages/pip/_vendor/certifi/cacert.pem rename to venv/Lib/site-packages/pip/_vendor/certifi/cacert.pem index 9ca290f5..c9459dc8 100644 --- a/env/lib/python2.7/site-packages/pip/_vendor/certifi/cacert.pem +++ b/venv/Lib/site-packages/pip/_vendor/certifi/cacert.pem @@ -58,38 +58,6 @@ AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7 TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg== -----END CERTIFICATE----- -# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only -# Label: "Verisign Class 3 Public Primary Certification Authority - G3" -# Serial: 206684696279472310254277870180966723415 -# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09 -# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6 -# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44 ------BEGIN CERTIFICATE----- -MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw -CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl -cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu -LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT -aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp -dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD -VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT -aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ -bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu -IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg -LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b -N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t -KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu -kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm -CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ -Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu -imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te -2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe -DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC -/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p -F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt -TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ== ------END CERTIFICATE----- - # Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited # Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited # Label: "Entrust.net Premium 2048 Secure Server CA" @@ -152,39 +120,6 @@ ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp -----END CERTIFICATE----- -# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network -# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network -# Label: "AddTrust External Root" -# Serial: 1 -# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f -# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68 -# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2 ------BEGIN CERTIFICATE----- -MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU -MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs -IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290 -MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux -FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h -bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v -dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt -H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9 -uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX -mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX -a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN -E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0 -WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD -VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0 -Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU -cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx -IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN -AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH -YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5 -6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC -Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX -c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a -mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ= ------END CERTIFICATE----- - # Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. # Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. # Label: "Entrust Root Certification Authority" @@ -220,112 +155,6 @@ eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m 0vdXcDazv/wor3ElhVsT/h5/WrQ8 -----END CERTIFICATE----- -# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc. -# Subject: CN=GeoTrust Global CA O=GeoTrust Inc. -# Label: "GeoTrust Global CA" -# Serial: 144470 -# MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5 -# SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12 -# SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a ------BEGIN CERTIFICATE----- -MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT -MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i -YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG -EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg -R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9 -9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq -fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv -iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU -1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+ -bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW -MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA -ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l -uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn -Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS -tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF -PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un -hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV -5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw== ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc. -# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc. -# Label: "GeoTrust Universal CA" -# Serial: 1 -# MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48 -# SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79 -# SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12 ------BEGIN CERTIFICATE----- -MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW -MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy -c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE -BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0 -IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV -VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8 -cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT -QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh -F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v -c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w -mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd -VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX -teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ -f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe -Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+ -nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB -/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY -MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG -9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc -aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX -IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn -ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z -uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN -Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja -QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW -koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9 -ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt -DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm -bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw= ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. -# Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. -# Label: "GeoTrust Universal CA 2" -# Serial: 1 -# MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7 -# SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79 -# SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b ------BEGIN CERTIFICATE----- -MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW -MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy -c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD -VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1 -c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC -AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81 -WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG -FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq -XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL -se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb -KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd -IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73 -y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt -hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc -QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4 -Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV -HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ -KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z -dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ -L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr -Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo -ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY -T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz -GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m -1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV -OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH -6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX -QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS ------END CERTIFICATE----- - # Issuer: CN=AAA Certificate Services O=Comodo CA Limited # Subject: CN=AAA Certificate Services O=Comodo CA Limited # Label: "Comodo AAA Services root" @@ -640,46 +469,6 @@ VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= -----END CERTIFICATE----- -# Issuer: O=Government Root Certification Authority -# Subject: O=Government Root Certification Authority -# Label: "Taiwan GRCA" -# Serial: 42023070807708724159991140556527066870 -# MD5 Fingerprint: 37:85:44:53:32:45:1f:20:f0:f3:95:e1:25:c4:43:4e -# SHA1 Fingerprint: f4:8b:11:bf:de:ab:be:94:54:20:71:e6:41:de:6b:be:88:2b:40:b9 -# SHA256 Fingerprint: 76:00:29:5e:ef:e8:5b:9e:1f:d6:24:db:76:06:2a:aa:ae:59:81:8a:54:d2:77:4c:d4:c0:b2:c0:11:31:e1:b3 ------BEGIN CERTIFICATE----- -MIIFcjCCA1qgAwIBAgIQH51ZWtcvwgZEpYAIaeNe9jANBgkqhkiG9w0BAQUFADA/ -MQswCQYDVQQGEwJUVzEwMC4GA1UECgwnR292ZXJubWVudCBSb290IENlcnRpZmlj -YXRpb24gQXV0aG9yaXR5MB4XDTAyMTIwNTEzMjMzM1oXDTMyMTIwNTEzMjMzM1ow -PzELMAkGA1UEBhMCVFcxMDAuBgNVBAoMJ0dvdmVybm1lbnQgUm9vdCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB -AJoluOzMonWoe/fOW1mKydGGEghU7Jzy50b2iPN86aXfTEc2pBsBHH8eV4qNw8XR -IePaJD9IK/ufLqGU5ywck9G/GwGHU5nOp/UKIXZ3/6m3xnOUT0b3EEk3+qhZSV1q -gQdW8or5BtD3cCJNtLdBuTK4sfCxw5w/cP1T3YGq2GN49thTbqGsaoQkclSGxtKy -yhwOeYHWtXBiCAEuTk8O1RGvqa/lmr/czIdtJuTJV6L7lvnM4T9TjGxMfptTCAts -F/tnyMKtsc2AtJfcdgEWFelq16TheEfOhtX7MfP6Mb40qij7cEwdScevLJ1tZqa2 -jWR+tSBqnTuBto9AAGdLiYa4zGX+FVPpBMHWXx1E1wovJ5pGfaENda1UhhXcSTvx -ls4Pm6Dso3pdvtUqdULle96ltqqvKKyskKw4t9VoNSZ63Pc78/1Fm9G7Q3hub/FC -VGqY8A2tl+lSXunVanLeavcbYBT0peS2cWeqH+riTcFCQP5nRhc4L0c/cZyu5SHK -YS1tB6iEfC3uUSXxY5Ce/eFXiGvviiNtsea9P63RPZYLhY3Naye7twWb7LuRqQoH -EgKXTiCQ8P8NHuJBO9NAOueNXdpm5AKwB1KYXA6OM5zCppX7VRluTI6uSw+9wThN -Xo+EHWbNxWCWtFJaBYmOlXqYwZE8lSOyDvR5tMl8wUohAgMBAAGjajBoMB0GA1Ud -DgQWBBTMzO/MKWCkO7GStjz6MmKPrCUVOzAMBgNVHRMEBTADAQH/MDkGBGcqBwAE -MTAvMC0CAQAwCQYFKw4DAhoFADAHBgVnKgMAAAQUA5vwIhP/lSg209yewDL7MTqK -UWUwDQYJKoZIhvcNAQEFBQADggIBAECASvomyc5eMN1PhnR2WPWus4MzeKR6dBcZ -TulStbngCnRiqmjKeKBMmo4sIy7VahIkv9Ro04rQ2JyftB8M3jh+Vzj8jeJPXgyf -qzvS/3WXy6TjZwj/5cAWtUgBfen5Cv8b5Wppv3ghqMKnI6mGq3ZW6A4M9hPdKmaK -ZEk9GhiHkASfQlK3T8v+R0F2Ne//AHY2RTKbxkaFXeIksB7jSJaYV0eUVXoPQbFE -JPPB/hprv4j9wabak2BegUqZIJxIZhm1AHlUD7gsL0u8qV1bYH+Mh6XgUmMqvtg7 -hUAV/h62ZT/FS9p+tXo1KaMuephgIqP0fSdOLeq0dDzpD6QzDxARvBMB1uUO07+1 -EqLhRSPAzAhuYbeJq4PjJB7mXQfnHyA+z2fI56wwbSdLaG5LKlwCCDTb+HbkZ6Mm -nD+iMsJKxYEYMRBWqoTvLQr/uB930r+lWKBi5NdLkXWNiYCYfm3LU05er/ayl4WX -udpVBrkk7tfGOB5jGxI7leFYrPLfhNVfmS8NVVvmONsuP3LpSIXLuykTjx44Vbnz -ssQwmSNOXfJIoRIM3BKQCZBUkQM8R+XVyWXgt0t97EfTsws+rZ7QdAAO671RrcDe -LMDDav7v3Aun+kbfYNucpllQdSNpc5Oy+fwC00fmcc4QAu4njIT/rEUNE1yDMuAl -pYYsfPQS ------END CERTIFICATE----- - # Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com # Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com # Label: "DigiCert Assured ID Root CA" @@ -771,36 +560,6 @@ vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep +OkuE6N36B9K -----END CERTIFICATE----- -# Issuer: CN=Class 2 Primary CA O=Certplus -# Subject: CN=Class 2 Primary CA O=Certplus -# Label: "Certplus Class 2 Primary CA" -# Serial: 177770208045934040241468760488327595043 -# MD5 Fingerprint: 88:2c:8c:52:b8:a2:3c:f3:f7:bb:03:ea:ae:ac:42:0b -# SHA1 Fingerprint: 74:20:74:41:72:9c:dd:92:ec:79:31:d8:23:10:8d:c2:81:92:e2:bb -# SHA256 Fingerprint: 0f:99:3c:8a:ef:97:ba:af:56:87:14:0e:d5:9a:d1:82:1b:b4:af:ac:f0:aa:9a:58:b5:d5:7a:33:8a:3a:fb:cb ------BEGIN CERTIFICATE----- -MIIDkjCCAnqgAwIBAgIRAIW9S/PY2uNp9pTXX8OlRCMwDQYJKoZIhvcNAQEFBQAw -PTELMAkGA1UEBhMCRlIxETAPBgNVBAoTCENlcnRwbHVzMRswGQYDVQQDExJDbGFz -cyAyIFByaW1hcnkgQ0EwHhcNOTkwNzA3MTcwNTAwWhcNMTkwNzA2MjM1OTU5WjA9 -MQswCQYDVQQGEwJGUjERMA8GA1UEChMIQ2VydHBsdXMxGzAZBgNVBAMTEkNsYXNz -IDIgUHJpbWFyeSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANxQ -ltAS+DXSCHh6tlJw/W/uz7kRy1134ezpfgSN1sxvc0NXYKwzCkTsA18cgCSR5aiR -VhKC9+Ar9NuuYS6JEI1rbLqzAr3VNsVINyPi8Fo3UjMXEuLRYE2+L0ER4/YXJQyL -kcAbmXuZVg2v7tK8R1fjeUl7NIknJITesezpWE7+Tt9avkGtrAjFGA7v0lPubNCd -EgETjdyAYveVqUSISnFOYFWe2yMZeVYHDD9jC1yw4r5+FfyUM1hBOHTE4Y+L3yas -H7WLO7dDWWuwJKZtkIvEcupdM5i3y95ee++U8Rs+yskhwcWYAqqi9lt3m/V+llU0 -HGdpwPFC40es/CgcZlUCAwEAAaOBjDCBiTAPBgNVHRMECDAGAQH/AgEKMAsGA1Ud -DwQEAwIBBjAdBgNVHQ4EFgQU43Mt38sOKAze3bOkynm4jrvoMIkwEQYJYIZIAYb4 -QgEBBAQDAgEGMDcGA1UdHwQwMC4wLKAqoCiGJmh0dHA6Ly93d3cuY2VydHBsdXMu -Y29tL0NSTC9jbGFzczIuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQCnVM+IRBnL39R/ -AN9WM2K191EBkOvDP9GIROkkXe/nFL0gt5o8AP5tn9uQ3Nf0YtaLcF3n5QRIqWh8 -yfFC82x/xXp8HVGIutIKPidd3i1RTtMTZGnkLuPT55sJmabglZvOGtd/vjzOUrMR -FcEPF80Du5wlFbqidon8BvEY0JNLDnyCt6X09l/+7UCmnYR0ObncHoUW2ikbhiMA -ybuJfm6AiB4vFLQDJKgybwOaRywwvlbGp0ICcBvqQNi6BQNwB6SW//1IMwrh3KWB -kJtN3X3n57LNXMhqlfil9o3EXXgIvnsG1knPGTZQIy4I5p4FTUcY1Rbpsda2ENW7 -l7+ijrRU ------END CERTIFICATE----- - # Issuer: CN=DST Root CA X3 O=Digital Signature Trust Co. # Subject: CN=DST Root CA X3 O=Digital Signature Trust Co. # Label: "DST Root CA X3" @@ -911,104 +670,6 @@ hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u -----END CERTIFICATE----- -# Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. -# Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. -# Label: "GeoTrust Primary Certification Authority" -# Serial: 32798226551256963324313806436981982369 -# MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf -# SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96 -# SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c ------BEGIN CERTIFICATE----- -MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY -MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo -R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx -MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK -Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp -ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9 -AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA -ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0 -7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W -kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI -mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G -A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ -KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1 -6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl -4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K -oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj -UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU -AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk= ------END CERTIFICATE----- - -# Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only -# Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only -# Label: "thawte Primary Root CA" -# Serial: 69529181992039203566298953787712940909 -# MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12 -# SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81 -# SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f ------BEGIN CERTIFICATE----- -MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB -qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf -Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw -MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV -BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw -NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j -LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG -A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl -IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs -W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta -3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk -6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6 -Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J -NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA -MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP -r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU -DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz -YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX -xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2 -/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/ -LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7 -jVaMaA== ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only -# Label: "VeriSign Class 3 Public Primary Certification Authority - G5" -# Serial: 33037644167568058970164719475676101450 -# MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c -# SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5 -# SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df ------BEGIN CERTIFICATE----- -MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB -yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL -ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp -U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW -ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0 -aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL -MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW -ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln -biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp -U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y -aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1 -nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex -t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz -SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG -BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+ -rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/ -NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E -BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH -BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy -aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv -MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE -p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y -5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK -WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ -4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N -hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq ------END CERTIFICATE----- - # Issuer: CN=SecureTrust CA O=SecureTrust Corporation # Subject: CN=SecureTrust CA O=SecureTrust Corporation # Label: "SecureTrust CA" @@ -1157,38 +818,6 @@ fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= -----END CERTIFICATE----- -# Issuer: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed -# Subject: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed -# Label: "OISTE WISeKey Global Root GA CA" -# Serial: 86718877871133159090080555911823548314 -# MD5 Fingerprint: bc:6c:51:33:a7:e9:d3:66:63:54:15:72:1b:21:92:93 -# SHA1 Fingerprint: 59:22:a1:e1:5a:ea:16:35:21:f8:98:39:6a:46:46:b0:44:1b:0f:a9 -# SHA256 Fingerprint: 41:c9:23:86:6a:b4:ca:d6:b7:ad:57:80:81:58:2e:02:07:97:a6:cb:df:4f:ff:78:ce:83:96:b3:89:37:d7:f5 ------BEGIN CERTIFICATE----- -MIID8TCCAtmgAwIBAgIQQT1yx/RrH4FDffHSKFTfmjANBgkqhkiG9w0BAQUFADCB -ijELMAkGA1UEBhMCQ0gxEDAOBgNVBAoTB1dJU2VLZXkxGzAZBgNVBAsTEkNvcHly -aWdodCAoYykgMjAwNTEiMCAGA1UECxMZT0lTVEUgRm91bmRhdGlvbiBFbmRvcnNl -ZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwgUm9vdCBHQSBDQTAeFw0w -NTEyMTExNjAzNDRaFw0zNzEyMTExNjA5NTFaMIGKMQswCQYDVQQGEwJDSDEQMA4G -A1UEChMHV0lTZUtleTEbMBkGA1UECxMSQ29weXJpZ2h0IChjKSAyMDA1MSIwIAYD -VQQLExlPSVNURSBGb3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBX -SVNlS2V5IEdsb2JhbCBSb290IEdBIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A -MIIBCgKCAQEAy0+zAJs9Nt350UlqaxBJH+zYK7LG+DKBKUOVTJoZIyEVRd7jyBxR -VVuuk+g3/ytr6dTqvirdqFEr12bDYVxgAsj1znJ7O7jyTmUIms2kahnBAbtzptf2 -w93NvKSLtZlhuAGio9RN1AU9ka34tAhxZK9w8RxrfvbDd50kc3vkDIzh2TbhmYsF -mQvtRTEJysIA2/dyoJaqlYfQjse2YXMNdmaM3Bu0Y6Kff5MTMPGhJ9vZ/yxViJGg -4E8HsChWjBgbl0SOid3gF27nKu+POQoxhILYQBRJLnpB5Kf+42TMwVlxSywhp1t9 -4B3RLoGbw9ho972WG6xwsRYUC9tguSYBBQIDAQABo1EwTzALBgNVHQ8EBAMCAYYw -DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUswN+rja8sHnR3JQmthG+IbJphpQw -EAYJKwYBBAGCNxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBAEuh/wuHbrP5wUOx -SPMowB0uyQlB+pQAHKSkq0lPjz0e701vvbyk9vImMMkQyh2I+3QZH4VFvbBsUfk2 -ftv1TDI6QU9bR8/oCy22xBmddMVHxjtqD6wU2zz0c5ypBd8A3HR4+vg1YFkCExh8 -vPtNsCBtQ7tgMHpnM1zFmdH4LTlSc/uMqpclXHLZCB6rTjzjgTGfA6b7wP4piFXa -hNVQA7bihKOmNqoROgHhGEvWRGizPflTdISzRpFGlgC3gCy24eMQ4tui5yiPAZZi -Fj4A4xylNoEYokxSdsARo27mHbrjWr42U8U+dY+GaSlYU7Wcu2+fXMUY7N0v4ZjJ -/L7fCg0= ------END CERTIFICATE----- - # Issuer: CN=Certigna O=Dhimyotis # Subject: CN=Certigna O=Dhimyotis # Label: "Certigna" @@ -1219,36 +848,6 @@ t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== -----END CERTIFICATE----- -# Issuer: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center -# Subject: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center -# Label: "Deutsche Telekom Root CA 2" -# Serial: 38 -# MD5 Fingerprint: 74:01:4a:91:b1:08:c4:58:ce:47:cd:f0:dd:11:53:08 -# SHA1 Fingerprint: 85:a4:08:c0:9c:19:3e:5d:51:58:7d:cd:d6:13:30:fd:8c:de:37:bf -# SHA256 Fingerprint: b6:19:1a:50:d0:c3:97:7f:7d:a9:9b:cd:aa:c8:6a:22:7d:ae:b9:67:9e:c7:0b:a3:b0:c9:d9:22:71:c1:70:d3 ------BEGIN CERTIFICATE----- -MIIDnzCCAoegAwIBAgIBJjANBgkqhkiG9w0BAQUFADBxMQswCQYDVQQGEwJERTEc -MBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxlU2Vj -IFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290IENB -IDIwHhcNOTkwNzA5MTIxMTAwWhcNMTkwNzA5MjM1OTAwWjBxMQswCQYDVQQGEwJE -RTEcMBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxl -U2VjIFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290 -IENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCrC6M14IspFLEU -ha88EOQ5bzVdSq7d6mGNlUn0b2SjGmBmpKlAIoTZ1KXleJMOaAGtuU1cOs7TuKhC -QN/Po7qCWWqSG6wcmtoIKyUn+WkjR/Hg6yx6m/UTAtB+NHzCnjwAWav12gz1Mjwr -rFDa1sPeg5TKqAyZMg4ISFZbavva4VhYAUlfckE8FQYBjl2tqriTtM2e66foai1S -NNs671x1Udrb8zH57nGYMsRUFUQM+ZtV7a3fGAigo4aKSe5TBY8ZTNXeWHmb0moc -QqvF1afPaA+W5OFhmHZhyJF81j4A4pFQh+GdCuatl9Idxjp9y7zaAzTVjlsB9WoH -txa2bkp/AgMBAAGjQjBAMB0GA1UdDgQWBBQxw3kbuvVT1xfgiXotF2wKsyudMzAP -BgNVHRMECDAGAQH/AgEFMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOC -AQEAlGRZrTlk5ynrE/5aw4sTV8gEJPB0d8Bg42f76Ymmg7+Wgnxu1MM9756Abrsp -tJh6sTtU6zkXR34ajgv8HzFZMQSyzhfzLMdiNlXiItiJVbSYSKpk+tYcNthEeFpa -IzpXl/V6ME+un2pMSyuOoAPjPuCp1NJ70rOo4nI8rZ7/gFnkm0W09juwzTkZmDLl -6iFhkOQxIY40sfcvNUqFENrnijchvllj4PKFiDFT1FQUhXB59C4Gdyd1Lx+4ivn+ -xbrYNuSD7Odlt79jWvNGr4GUN9RBjNYj1h7P9WgbRGOiWrqnNVmh5XAFmw4jV5mU -Cm26OWMohpLzGITY+9HPBVZkVw== ------END CERTIFICATE----- - # Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc # Subject: CN=Cybertrust Global Root O=Cybertrust, Inc # Label: "Cybertrust Global Root" @@ -1348,95 +947,6 @@ i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN 9u6wWk5JRFRYX0KD -----END CERTIFICATE----- -# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only -# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only -# Label: "GeoTrust Primary Certification Authority - G3" -# Serial: 28809105769928564313984085209975885599 -# MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05 -# SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd -# SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4 ------BEGIN CERTIFICATE----- -MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB -mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT -MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s -eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv -cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ -BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg -MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0 -BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg -LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz -+uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm -hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn -5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W -JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL -DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC -huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw -HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB -AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB -zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN -kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD -AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH -SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G -spki4cErx5z481+oghLrGREt ------END CERTIFICATE----- - -# Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only -# Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only -# Label: "thawte Primary Root CA - G2" -# Serial: 71758320672825410020661621085256472406 -# MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f -# SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12 -# SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57 ------BEGIN CERTIFICATE----- -MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL -MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp -IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi -BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw -MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh -d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig -YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v -dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/ -BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6 -papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E -BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K -DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3 -KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox -XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg== ------END CERTIFICATE----- - -# Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only -# Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only -# Label: "thawte Primary Root CA - G3" -# Serial: 127614157056681299805556476275995414779 -# MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31 -# SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2 -# SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c ------BEGIN CERTIFICATE----- -MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB -rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf -Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw -MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV -BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa -Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl -LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u -MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl -ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm -gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8 -YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf -b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9 -9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S -zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk -OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV -HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA -2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW -oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu -t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c -KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM -m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu -MdRAGmI0Nj81Aa6sY6A= ------END CERTIFICATE----- - # Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only # Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only # Label: "GeoTrust Primary Certification Authority - G2" @@ -1498,35 +1008,6 @@ lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3 7M2CYfE45k+XmCpajQ== -----END CERTIFICATE----- -# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only -# Label: "VeriSign Class 3 Public Primary Certification Authority - G4" -# Serial: 63143484348153506665311985501458640051 -# MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41 -# SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a -# SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79 ------BEGIN CERTIFICATE----- -MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL -MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW -ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln -biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp -U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y -aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG -A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp -U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg -SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln -biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5 -IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm -GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve -fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw -AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ -aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj -aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW -kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC -4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga -FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA== ------END CERTIFICATE----- - # Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) # Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) # Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny" @@ -1559,47 +1040,6 @@ uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= -----END CERTIFICATE----- -# Issuer: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden -# Subject: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden -# Label: "Staat der Nederlanden Root CA - G2" -# Serial: 10000012 -# MD5 Fingerprint: 7c:a5:0f:f8:5b:9a:7d:6d:30:ae:54:5a:e3:42:a2:8a -# SHA1 Fingerprint: 59:af:82:79:91:86:c7:b4:75:07:cb:cf:03:57:46:eb:04:dd:b7:16 -# SHA256 Fingerprint: 66:8c:83:94:7d:a6:3b:72:4b:ec:e1:74:3c:31:a0:e6:ae:d0:db:8e:c5:b3:1b:e3:77:bb:78:4f:91:b6:71:6f ------BEGIN CERTIFICATE----- -MIIFyjCCA7KgAwIBAgIEAJiWjDANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO -TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh -dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEcyMB4XDTA4MDMyNjExMTgxN1oX -DTIwMDMyNTExMDMxMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl -ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv -b3QgQ0EgLSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMVZ5291 -qj5LnLW4rJ4L5PnZyqtdj7U5EILXr1HgO+EASGrP2uEGQxGZqhQlEq0i6ABtQ8Sp -uOUfiUtnvWFI7/3S4GCI5bkYYCjDdyutsDeqN95kWSpGV+RLufg3fNU254DBtvPU -Z5uW6M7XxgpT0GtJlvOjCwV3SPcl5XCsMBQgJeN/dVrlSPhOewMHBPqCYYdu8DvE -pMfQ9XQ+pV0aCPKbJdL2rAQmPlU6Yiile7Iwr/g3wtG61jj99O9JMDeZJiFIhQGp -5Rbn3JBV3w/oOM2ZNyFPXfUib2rFEhZgF1XyZWampzCROME4HYYEhLoaJXhena/M -UGDWE4dS7WMfbWV9whUYdMrhfmQpjHLYFhN9C0lK8SgbIHRrxT3dsKpICT0ugpTN -GmXZK4iambwYfp/ufWZ8Pr2UuIHOzZgweMFvZ9C+X+Bo7d7iscksWXiSqt8rYGPy -5V6548r6f1CGPqI0GAwJaCgRHOThuVw+R7oyPxjMW4T182t0xHJ04eOLoEq9jWYv -6q012iDTiIJh8BIitrzQ1aTsr1SIJSQ8p22xcik/Plemf1WvbibG/ufMQFxRRIEK -eN5KzlW/HdXZt1bv8Hb/C3m1r737qWmRRpdogBQ2HbN/uymYNqUg+oJgYjOk7Na6 -B6duxc8UpufWkjTYgfX8HV2qXB72o007uPc5AgMBAAGjgZcwgZQwDwYDVR0TAQH/ -BAUwAwEB/zBSBgNVHSAESzBJMEcGBFUdIAAwPzA9BggrBgEFBQcCARYxaHR0cDov -L3d3dy5wa2lvdmVyaGVpZC5ubC9wb2xpY2llcy9yb290LXBvbGljeS1HMjAOBgNV -HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJFoMocVHYnitfGsNig0jQt8YojrMA0GCSqG -SIb3DQEBCwUAA4ICAQCoQUpnKpKBglBu4dfYszk78wIVCVBR7y29JHuIhjv5tLyS -CZa59sCrI2AGeYwRTlHSeYAz+51IvuxBQ4EffkdAHOV6CMqqi3WtFMTC6GY8ggen -5ieCWxjmD27ZUD6KQhgpxrRW/FYQoAUXvQwjf/ST7ZwaUb7dRUG/kSS0H4zpX897 -IZmflZ85OkYcbPnNe5yQzSipx6lVu6xiNGI1E0sUOlWDuYaNkqbG9AclVMwWVxJK -gnjIFNkXgiYtXSAfea7+1HAWFpWD2DU5/1JddRwWxRNVz0fMdWVSSt7wsKfkCpYL -+63C4iWEst3kvX5ZbJvw8NjnyvLplzh+ib7M+zkXYT9y2zqR2GUBGR2tUKRXCnxL -vJxxcypFURmFzI79R6d0lR2o0a9OF7FpJsKqeFdbxU2n5Z4FF5TKsl+gSRiNNOkm -bEgeqmiSBeGCc1qb3AdbCG19ndeNIdn8FCCqwkXfP+cAslHkwvgFuXkajDTznlvk -N1trSt8sV4pAWja63XVECDdCcAz+3F4hoKOKwJCcaNpQ5kUQR3i2TtJlycM33+FC -Y7BXN0Ute4qcvwXqZVUz9zkQxSgqIXobisQk+T8VyJoVIPVVYpbtbZNQvOSqeK3Z -ywplh6ZmwcSBo3c6WB4L7oOLnR7SUqTMHW+wmG2UMbX4cQrcufx9MmDm66+KAQ== ------END CERTIFICATE----- - # Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post # Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post # Label: "Hongkong Post Root CA 1" @@ -2200,6 +1640,45 @@ t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 -----END CERTIFICATE----- +# Issuer: CN=EC-ACC O=Agencia Catalana de Certificacio (NIF Q-0801176-I) OU=Serveis Publics de Certificacio/Vegeu https://www.catcert.net/verarrel (c)03/Jerarquia Entitats de Certificacio Catalanes +# Subject: CN=EC-ACC O=Agencia Catalana de Certificacio (NIF Q-0801176-I) OU=Serveis Publics de Certificacio/Vegeu https://www.catcert.net/verarrel (c)03/Jerarquia Entitats de Certificacio Catalanes +# Label: "EC-ACC" +# Serial: -23701579247955709139626555126524820479 +# MD5 Fingerprint: eb:f5:9d:29:0d:61:f9:42:1f:7c:c2:ba:6d:e3:15:09 +# SHA1 Fingerprint: 28:90:3a:63:5b:52:80:fa:e6:77:4c:0b:6d:a7:d6:ba:a6:4a:f2:e8 +# SHA256 Fingerprint: 88:49:7f:01:60:2f:31:54:24:6a:e2:8c:4d:5a:ef:10:f1:d8:7e:bb:76:62:6f:4a:e0:b7:f9:5b:a7:96:87:99 +-----BEGIN CERTIFICATE----- +MIIFVjCCBD6gAwIBAgIQ7is969Qh3hSoYqwE893EATANBgkqhkiG9w0BAQUFADCB +8zELMAkGA1UEBhMCRVMxOzA5BgNVBAoTMkFnZW5jaWEgQ2F0YWxhbmEgZGUgQ2Vy +dGlmaWNhY2lvIChOSUYgUS0wODAxMTc2LUkpMSgwJgYDVQQLEx9TZXJ2ZWlzIFB1 +YmxpY3MgZGUgQ2VydGlmaWNhY2lvMTUwMwYDVQQLEyxWZWdldSBodHRwczovL3d3 +dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbCAoYykwMzE1MDMGA1UECxMsSmVyYXJxdWlh +IEVudGl0YXRzIGRlIENlcnRpZmljYWNpbyBDYXRhbGFuZXMxDzANBgNVBAMTBkVD +LUFDQzAeFw0wMzAxMDcyMzAwMDBaFw0zMTAxMDcyMjU5NTlaMIHzMQswCQYDVQQG +EwJFUzE7MDkGA1UEChMyQWdlbmNpYSBDYXRhbGFuYSBkZSBDZXJ0aWZpY2FjaW8g +KE5JRiBRLTA4MDExNzYtSSkxKDAmBgNVBAsTH1NlcnZlaXMgUHVibGljcyBkZSBD +ZXJ0aWZpY2FjaW8xNTAzBgNVBAsTLFZlZ2V1IGh0dHBzOi8vd3d3LmNhdGNlcnQu +bmV0L3ZlcmFycmVsIChjKTAzMTUwMwYDVQQLEyxKZXJhcnF1aWEgRW50aXRhdHMg +ZGUgQ2VydGlmaWNhY2lvIENhdGFsYW5lczEPMA0GA1UEAxMGRUMtQUNDMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsyLHT+KXQpWIR4NA9h0X84NzJB5R +85iKw5K4/0CQBXCHYMkAqbWUZRkiFRfCQ2xmRJoNBD45b6VLeqpjt4pEndljkYRm +4CgPukLjbo73FCeTae6RDqNfDrHrZqJyTxIThmV6PttPB/SnCWDaOkKZx7J/sxaV +HMf5NLWUhdWZXqBIoH7nF2W4onW4HvPlQn2v7fOKSGRdghST2MDk/7NQcvJ29rNd +QlB50JQ+awwAvthrDk4q7D7SzIKiGGUzE3eeml0aE9jD2z3Il3rucO2n5nzbcc8t +lGLfbdb1OL4/pYUKGbio2Al1QnDE6u/LDsg0qBIimAy4E5S2S+zw0JDnJwIDAQAB +o4HjMIHgMB0GA1UdEQQWMBSBEmVjX2FjY0BjYXRjZXJ0Lm5ldDAPBgNVHRMBAf8E +BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUoMOLRKo3pUW/l4Ba0fF4 +opvpXY0wfwYDVR0gBHgwdjB0BgsrBgEEAfV4AQMBCjBlMCwGCCsGAQUFBwIBFiBo +dHRwczovL3d3dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbDA1BggrBgEFBQcCAjApGidW +ZWdldSBodHRwczovL3d3dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbCAwDQYJKoZIhvcN +AQEFBQADggEBAKBIW4IB9k1IuDlVNZyAelOZ1Vr/sXE7zDkJlF7W2u++AVtd0x7Y +/X1PzaBB4DSTv8vihpw3kpBWHNzrKQXlxJ7HNd+KDM3FIUPpqojlNcAZQmNaAl6k +SBg6hW/cnbw/nZzBh7h6YQjpdwt/cKt63dmXLGQehb+8dJahw3oS7AwaboMMPOhy +Rp/7SNVel+axofjk70YllJyJ22k4vuxcDlbHZVHlUIiIv0LVKz3l+bqeLrPK9HOS +Agu+TGbrIP65y7WZf+a2E/rKS03Z7lNGBjvGTq2TWoF+bCpLagVFjPIhpDGQh2xl +nJ2lYJU6Un/10asIbvPuW/mIPX64b24D5EI= +-----END CERTIFICATE----- + # Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority # Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority # Label: "Hellenic Academic and Research Institutions RootCA 2011" @@ -2412,38 +1891,6 @@ e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p TpPDpFQUWw== -----END CERTIFICATE----- -# Issuer: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus -# Subject: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus -# Label: "EE Certification Centre Root CA" -# Serial: 112324828676200291871926431888494945866 -# MD5 Fingerprint: 43:5e:88:d4:7d:1a:4a:7e:fd:84:2e:52:eb:01:d4:6f -# SHA1 Fingerprint: c9:a8:b9:e7:55:80:5e:58:e3:53:77:a7:25:eb:af:c3:7b:27:cc:d7 -# SHA256 Fingerprint: 3e:84:ba:43:42:90:85:16:e7:75:73:c0:99:2f:09:79:ca:08:4e:46:85:68:1f:f1:95:cc:ba:8a:22:9b:8a:76 ------BEGIN CERTIFICATE----- -MIIEAzCCAuugAwIBAgIQVID5oHPtPwBMyonY43HmSjANBgkqhkiG9w0BAQUFADB1 -MQswCQYDVQQGEwJFRTEiMCAGA1UECgwZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1 -czEoMCYGA1UEAwwfRUUgQ2VydGlmaWNhdGlvbiBDZW50cmUgUm9vdCBDQTEYMBYG -CSqGSIb3DQEJARYJcGtpQHNrLmVlMCIYDzIwMTAxMDMwMTAxMDMwWhgPMjAzMDEy -MTcyMzU5NTlaMHUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKDBlBUyBTZXJ0aWZpdHNl -ZXJpbWlza2Vza3VzMSgwJgYDVQQDDB9FRSBDZXJ0aWZpY2F0aW9uIENlbnRyZSBS -b290IENBMRgwFgYJKoZIhvcNAQkBFglwa2lAc2suZWUwggEiMA0GCSqGSIb3DQEB -AQUAA4IBDwAwggEKAoIBAQDIIMDs4MVLqwd4lfNE7vsLDP90jmG7sWLqI9iroWUy -euuOF0+W2Ap7kaJjbMeMTC55v6kF/GlclY1i+blw7cNRfdCT5mzrMEvhvH2/UpvO -bntl8jixwKIy72KyaOBhU8E2lf/slLo2rpwcpzIP5Xy0xm90/XsY6KxX7QYgSzIw -WFv9zajmofxwvI6Sc9uXp3whrj3B9UiHbCe9nyV0gVWw93X2PaRka9ZP585ArQ/d -MtO8ihJTmMmJ+xAdTX7Nfh9WDSFwhfYggx/2uh8Ej+p3iDXE/+pOoYtNP2MbRMNE -1CV2yreN1x5KZmTNXMWcg+HCCIia7E6j8T4cLNlsHaFLAgMBAAGjgYowgYcwDwYD -VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBLyWj7qVhy/ -zQas8fElyalL1BSZMEUGA1UdJQQ+MDwGCCsGAQUFBwMCBggrBgEFBQcDAQYIKwYB -BQUHAwMGCCsGAQUFBwMEBggrBgEFBQcDCAYIKwYBBQUHAwkwDQYJKoZIhvcNAQEF -BQADggEBAHv25MANqhlHt01Xo/6tu7Fq1Q+e2+RjxY6hUFaTlrg4wCQiZrxTFGGV -v9DHKpY5P30osxBAIWrEr7BSdxjhlthWXePdNl4dp1BUoMUq5KqMlIpPnTX/dqQG -E5Gion0ARD9V04I8GtVbvFZMIi5GQ4okQC3zErg7cBqklrkar4dBGmoYDQZPxz5u -uSlNDUmJEYcyW+ZLBMjkXOZ0c5RdFpgTlf7727FE5TpwrDdr5rMzcijJs1eg9gIW -iAYLtqZLICjU3j2LrTcFU3T+bsy8QxdxXvnFzBqpYe73dgzzcvRyrc9yAjYHR8/v -GVCJYMzpJJUPwssd8m92kMfMdcGWxZ0= ------END CERTIFICATE----- - # Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH # Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH # Label: "D-TRUST Root Class 3 CA 2 2009" @@ -3809,47 +3256,6 @@ CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW 1KyLa2tJElMzrdfkviT8tQp21KW8EA== -----END CERTIFICATE----- -# Issuer: CN=LuxTrust Global Root 2 O=LuxTrust S.A. -# Subject: CN=LuxTrust Global Root 2 O=LuxTrust S.A. -# Label: "LuxTrust Global Root 2" -# Serial: 59914338225734147123941058376788110305822489521 -# MD5 Fingerprint: b2:e1:09:00:61:af:f7:f1:91:6f:c4:ad:8d:5e:3b:7c -# SHA1 Fingerprint: 1e:0e:56:19:0a:d1:8b:25:98:b2:04:44:ff:66:8a:04:17:99:5f:3f -# SHA256 Fingerprint: 54:45:5f:71:29:c2:0b:14:47:c4:18:f9:97:16:8f:24:c5:8f:c5:02:3b:f5:da:5b:e2:eb:6e:1d:d8:90:2e:d5 ------BEGIN CERTIFICATE----- -MIIFwzCCA6ugAwIBAgIUCn6m30tEntpqJIWe5rgV0xZ/u7EwDQYJKoZIhvcNAQEL -BQAwRjELMAkGA1UEBhMCTFUxFjAUBgNVBAoMDUx1eFRydXN0IFMuQS4xHzAdBgNV -BAMMFkx1eFRydXN0IEdsb2JhbCBSb290IDIwHhcNMTUwMzA1MTMyMTU3WhcNMzUw -MzA1MTMyMTU3WjBGMQswCQYDVQQGEwJMVTEWMBQGA1UECgwNTHV4VHJ1c3QgUy5B -LjEfMB0GA1UEAwwWTHV4VHJ1c3QgR2xvYmFsIFJvb3QgMjCCAiIwDQYJKoZIhvcN -AQEBBQADggIPADCCAgoCggIBANeFl78RmOnwYoNMPIf5U2o3C/IPPIfOb9wmKb3F -ibrJgz337spbxm1Jc7TJRqMbNBM/wYlFV/TZsfs2ZUv7COJIcRHIbjuend+JZTem -hfY7RBi2xjcwYkSSl2l9QjAk5A0MiWtj3sXh306pFGxT4GHO9hcvHTy95iJMHZP1 -EMShduxq3sVs35a0VkBCwGKSMKEtFZSg0iAGCW5qbeXrt77U8PEVfIvmTroTzEsn -Xpk8F12PgX8zPU/TPxvsXD/wPEx1bvKm1Z3aLQdjAsZy6ZS8TEmVT4hSyNvoaYL4 -zDRbIvCGp4m9SAptZoFtyMhk+wHh9OHe2Z7d21vUKpkmFRseTJIpgp7VkoGSQXAZ -96Tlk0u8d2cx3Rz9MXANF5kM+Qw5GSoXtTBxVdUPrljhPS80m8+f9niFwpN6cj5m -j5wWEWCPnolvZ77gR1o7DJpni89Gxq44o/KnvObWhWszJHAiS8sIm7vI+AIpHb4g -DEa/a4ebsypmQjVGbKq6rfmYe+lQVRQxv7HaLe2ArWgk+2mr2HETMOZns4dA/Yl+ -8kPREd8vZS9kzl8UubG/Mb2HeFpZZYiq/FkySIbWTLkpS5XTdvN3JW1CHDiDTf2j -X5t/Lax5Gw5CMZdjpPuKadUiDTSQMC6otOBttpSsvItO13D8xTiOZCXhTTmQzsmH -hFhxAgMBAAGjgagwgaUwDwYDVR0TAQH/BAUwAwEB/zBCBgNVHSAEOzA5MDcGByuB -KwEBAQowLDAqBggrBgEFBQcCARYeaHR0cHM6Ly9yZXBvc2l0b3J5Lmx1eHRydXN0 -Lmx1MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBT/GCh2+UgFLKGu8SsbK7JT -+Et8szAdBgNVHQ4EFgQU/xgodvlIBSyhrvErGyuyU/hLfLMwDQYJKoZIhvcNAQEL -BQADggIBAGoZFO1uecEsh9QNcH7X9njJCwROxLHOk3D+sFTAMs2ZMGQXvw/l4jP9 -BzZAcg4atmpZ1gDlaCDdLnINH2pkMSCEfUmmWjfrRcmF9dTHF5kH5ptV5AzoqbTO -jFu1EVzPig4N1qx3gf4ynCSecs5U89BvolbW7MM3LGVYvlcAGvI1+ut7MV3CwRI9 -loGIlonBWVx65n9wNOeD4rHh4bhY79SV5GCc8JaXcozrhAIuZY+kt9J/Z93I055c -qqmkoCUUBpvsT34tC38ddfEz2O3OuHVtPlu5mB0xDVbYQw8wkbIEa91WvpWAVWe+ -2M2D2RjuLg+GLZKecBPs3lHJQ3gCpU3I+V/EkVhGFndadKpAvAefMLmx9xIX3eP/ -JEAdemrRTxgKqpAd60Ae36EeRJIQmvKN4dFLRp7oRUKX6kWZ8+xm1QL68qZKJKre -zrnK+T+Tb/mjuuqlPpmt/f97mfVl7vBZKGfXkJWkE4SphMHozs51k2MavDzq1WQf -LSoSOcbDWjLtR5EWDrw4wVDej8oqkDQc7kGUnF4ZLvhFSZl0kbAEb+MEWrGrKqv+ -x9CWttrhSmQGbmBNvUJO/3jaJMobtNeWOWyu8Q6qp31IiyBMz2TWuJdGsE7RKlY6 -oJO9r4Ak4Ap+58rVyuiFVdw2KuGUaJPHZnJED4AhMmwlxyOAgwrr ------END CERTIFICATE----- - # Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM # Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM # Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" @@ -4616,3 +4022,304 @@ L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG mpv0 -----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G4" +# Serial: 289383649854506086828220374796556676440 +# MD5 Fingerprint: 89:53:f1:83:23:b7:7c:8e:05:f1:8c:71:38:4e:1f:88 +# SHA1 Fingerprint: 14:88:4e:86:26:37:b0:26:af:59:62:5c:40:77:ec:35:29:ba:96:01 +# SHA256 Fingerprint: db:35:17:d1:f6:73:2a:2d:5a:b9:7c:53:3e:c7:07:79:ee:32:70:a6:2f:b4:ac:42:38:37:24:60:e6:f0:1e:88 +-----BEGIN CERTIFICATE----- +MIIGSzCCBDOgAwIBAgIRANm1Q3+vqTkPAAAAAFVlrVgwDQYJKoZIhvcNAQELBQAw +gb4xCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQL +Ex9TZWUgd3d3LmVudHJ1c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykg +MjAxNSBFbnRydXN0LCBJbmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAw +BgNVBAMTKUVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0 +MB4XDTE1MDUyNzExMTExNloXDTM3MTIyNzExNDExNlowgb4xCzAJBgNVBAYTAlVT +MRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1 +c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxNSBFbnRydXN0LCBJ +bmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAwBgNVBAMTKUVudHJ1c3Qg +Um9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0MIICIjANBgkqhkiG9w0B +AQEFAAOCAg8AMIICCgKCAgEAsewsQu7i0TD/pZJH4i3DumSXbcr3DbVZwbPLqGgZ +2K+EbTBwXX7zLtJTmeH+H17ZSK9dE43b/2MzTdMAArzE+NEGCJR5WIoV3imz/f3E +T+iq4qA7ec2/a0My3dl0ELn39GjUu9CH1apLiipvKgS1sqbHoHrmSKvS0VnM1n4j +5pds8ELl3FFLFUHtSUrJ3hCX1nbB76W1NhSXNdh4IjVS70O92yfbYVaCNNzLiGAM +C1rlLAHGVK/XqsEQe9IFWrhAnoanw5CGAlZSCXqc0ieCU0plUmr1POeo8pyvi73T +DtTUXm6Hnmo9RR3RXRv06QqsYJn7ibT/mCzPfB3pAqoEmh643IhuJbNsZvc8kPNX +wbMv9W3y+8qh+CmdRouzavbmZwe+LGcKKh9asj5XxNMhIWNlUpEbsZmOeX7m640A +2Vqq6nPopIICR5b+W45UYaPrL0swsIsjdXJ8ITzI9vF01Bx7owVV7rtNOzK+mndm +nqxpkCIHH2E6lr7lmk/MBTwoWdPBDFSoWWG9yHJM6Nyfh3+9nEg2XpWjDrk4JFX8 +dWbrAuMINClKxuMrLzOg2qOGpRKX/YAr2hRC45K9PvJdXmd0LhyIRyk0X+IyqJwl +N4y6mACXi0mWHv0liqzc2thddG5msP9E36EYxr5ILzeUePiVSj9/E15dWf10hkNj +c0kCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFJ84xFYjwznooHFs6FRM5Og6sb9nMA0GCSqGSIb3DQEBCwUAA4ICAQAS +5UKme4sPDORGpbZgQIeMJX6tuGguW8ZAdjwD+MlZ9POrYs4QjbRaZIxowLByQzTS +Gwv2LFPSypBLhmb8qoMi9IsabyZIrHZ3CL/FmFz0Jomee8O5ZDIBf9PD3Vht7LGr +hFV0d4QEJ1JrhkzO3bll/9bGXp+aEJlLdWr+aumXIOTkdnrG0CSqkM0gkLpHZPt/ +B7NTeLUKYvJzQ85BK4FqLoUWlFPUa19yIqtRLULVAJyZv967lDtX/Zr1hstWO1uI +AeV8KEsD+UmDfLJ/fOPtjqF/YFOOVZ1QNBIPt5d7bIdKROf1beyAN/BYGW5KaHbw +H5Lk6rWS02FREAutp9lfx1/cH6NcjKF+m7ee01ZvZl4HliDtC3T7Zk6LERXpgUl+ +b7DUUH8i119lAg2m9IUe2K4GS0qn0jFmwvjO5QimpAKWRGhXxNUzzxkvFMSUHHuk +2fCfDrGA4tGeEWSpiBE6doLlYsKA2KSD7ZPvfC+QsDJMlhVoSFLUmQjAJOgc47Ol +IQ6SwJAfzyBfyjs4x7dtOvPmRLgOMWuIjnDrnBdSqEGULoe256YSxXXfW8AKbnuk +5F6G+TaU33fD6Q3AOfF5u0aOq0NZJ7cguyPpVkAh7DE9ZapD8j3fcEThuk0mEDuY +n/PIjhs4ViFqUZPTkcpG2om3PVODLAgfi49T3f+sHw== +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft ECC Root Certificate Authority 2017" +# Serial: 136839042543790627607696632466672567020 +# MD5 Fingerprint: dd:a1:03:e6:4a:93:10:d1:bf:f0:19:42:cb:fe:ed:67 +# SHA1 Fingerprint: 99:9a:64:c3:7f:f4:7d:9f:ab:95:f1:47:69:89:14:60:ee:c4:c3:c5 +# SHA256 Fingerprint: 35:8d:f3:9d:76:4a:f9:e1:b7:66:e9:c9:72:df:35:2e:e1:5c:fa:c2:27:af:6a:d1:d7:0e:8e:4a:6e:dc:ba:02 +-----BEGIN CERTIFICATE----- +MIICWTCCAd+gAwIBAgIQZvI9r4fei7FK6gxXMQHC7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYD +VQQDEy1NaWNyb3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIw +MTcwHhcNMTkxMjE4MjMwNjQ1WhcNNDIwNzE4MjMxNjA0WjBlMQswCQYDVQQGEwJV +UzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1NaWNy +b3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAATUvD0CQnVBEyPNgASGAlEvaqiBYgtlzPbKnR5vSmZR +ogPZnZH6thaxjG7efM3beaYvzrvOcS/lpaso7GMEZpn4+vKTEAXhgShC48Zo9OYb +hGBKia/teQ87zvH2RPUBeMCjVDBSMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBTIy5lycFIM+Oa+sgRXKSrPQhDtNTAQBgkrBgEEAYI3 +FQEEAwIBADAKBggqhkjOPQQDAwNoADBlAjBY8k3qDPlfXu5gKcs68tvWMoQZP3zV +L8KxzJOuULsJMsbG7X7JNpQS5GiFBqIb0C8CMQCZ6Ra0DvpWSNSkMBaReNtUjGUB +iudQZsIxtzm6uBoiB078a1QWIP8rtedMDE2mT3M= +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft RSA Root Certificate Authority 2017" +# Serial: 40975477897264996090493496164228220339 +# MD5 Fingerprint: 10:ff:00:ff:cf:c9:f8:c7:7a:c0:ee:35:8e:c9:0f:47 +# SHA1 Fingerprint: 73:a5:e6:4a:3b:ff:83:16:ff:0e:dc:cc:61:8a:90:6e:4e:ae:4d:74 +# SHA256 Fingerprint: c7:41:f7:0f:4b:2a:8d:88:bf:2e:71:c1:41:22:ef:53:ef:10:eb:a0:cf:a5:e6:4c:fa:20:f4:18:85:30:73:e0 +-----BEGIN CERTIFICATE----- +MIIFqDCCA5CgAwIBAgIQHtOXCV/YtLNHcB6qvn9FszANBgkqhkiG9w0BAQwFADBl +MQswCQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYw +NAYDVQQDEy1NaWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5 +IDIwMTcwHhcNMTkxMjE4MjI1MTIyWhcNNDIwNzE4MjMwMDIzWjBlMQswCQYDVQQG +EwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1N +aWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKW76UM4wplZEWCpW9R2LBifOZ +Nt9GkMml7Xhqb0eRaPgnZ1AzHaGm++DlQ6OEAlcBXZxIQIJTELy/xztokLaCLeX0 +ZdDMbRnMlfl7rEqUrQ7eS0MdhweSE5CAg2Q1OQT85elss7YfUJQ4ZVBcF0a5toW1 +HLUX6NZFndiyJrDKxHBKrmCk3bPZ7Pw71VdyvD/IybLeS2v4I2wDwAW9lcfNcztm +gGTjGqwu+UcF8ga2m3P1eDNbx6H7JyqhtJqRjJHTOoI+dkC0zVJhUXAoP8XFWvLJ +jEm7FFtNyP9nTUwSlq31/niol4fX/V4ggNyhSyL71Imtus5Hl0dVe49FyGcohJUc +aDDv70ngNXtk55iwlNpNhTs+VcQor1fznhPbRiefHqJeRIOkpcrVE7NLP8TjwuaG +YaRSMLl6IE9vDzhTyzMMEyuP1pq9KsgtsRx9S1HKR9FIJ3Jdh+vVReZIZZ2vUpC6 +W6IYZVcSn2i51BVrlMRpIpj0M+Dt+VGOQVDJNE92kKz8OMHY4Xu54+OU4UZpyw4K +UGsTuqwPN1q3ErWQgR5WrlcihtnJ0tHXUeOrO8ZV/R4O03QK0dqq6mm4lyiPSMQH ++FJDOvTKVTUssKZqwJz58oHhEmrARdlns87/I6KJClTUFLkqqNfs+avNJVgyeY+Q +W5g5xAgGwax/Dj0ApQIDAQABo1QwUjAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQUCctZf4aycI8awznjwNnpv7tNsiMwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEMBQADggIBAKyvPl3CEZaJjqPnktaXFbgToqZC +LgLNFgVZJ8og6Lq46BrsTaiXVq5lQ7GPAJtSzVXNUzltYkyLDVt8LkS/gxCP81OC +gMNPOsduET/m4xaRhPtthH80dK2Jp86519efhGSSvpWhrQlTM93uCupKUY5vVau6 +tZRGrox/2KJQJWVggEbbMwSubLWYdFQl3JPk+ONVFT24bcMKpBLBaYVu32TxU5nh +SnUgnZUP5NbcA/FZGOhHibJXWpS2qdgXKxdJ5XbLwVaZOjex/2kskZGT4d9Mozd2 +TaGf+G0eHdP67Pv0RR0Tbc/3WeUiJ3IrhvNXuzDtJE3cfVa7o7P4NHmJweDyAmH3 +pvwPuxwXC65B2Xy9J6P9LjrRk5Sxcx0ki69bIImtt2dmefU6xqaWM/5TkshGsRGR +xpl/j8nWZjEgQRCHLQzWwa80mMpkg/sTV9HB8Dx6jKXB/ZUhoHHBk2dxEuqPiApp +GWSZI1b7rCoucL5mxAyE7+WL85MB+GqQk2dLsmijtWKP6T+MejteD+eMuMZ87zf9 +dOLITzNy4ZQ5bb0Sr74MTnB8G2+NszKTc0QWbej09+CVgI+WXTik9KveCjCHk9hN +AHFiRSdLOkKEW39lt2c0Ui2cFmuqqNh7o0JMcccMyj6D5KbvtwEwXlGjefVwaaZB +RA+GsCyRxj3qrg+E +-----END CERTIFICATE----- + +# Issuer: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Subject: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Label: "e-Szigno Root CA 2017" +# Serial: 411379200276854331539784714 +# MD5 Fingerprint: de:1f:f6:9e:84:ae:a7:b4:21:ce:1e:58:7d:d1:84:98 +# SHA1 Fingerprint: 89:d4:83:03:4f:9e:9a:48:80:5f:72:37:d4:a9:a6:ef:cb:7c:1f:d1 +# SHA256 Fingerprint: be:b0:0b:30:83:9b:9b:c3:2c:32:e4:44:79:05:95:06:41:f2:64:21:b1:5e:d0:89:19:8b:51:8a:e2:ea:1b:99 +-----BEGIN CERTIFICATE----- +MIICQDCCAeWgAwIBAgIMAVRI7yH9l1kN9QQKMAoGCCqGSM49BAMCMHExCzAJBgNV +BAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMgTHRk +LjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25vIFJv +b3QgQ0EgMjAxNzAeFw0xNzA4MjIxMjA3MDZaFw00MjA4MjIxMjA3MDZaMHExCzAJ +BgNVBAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMg +THRkLjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25v +IFJvb3QgQ0EgMjAxNzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABJbcPYrYsHtv +xie+RJCxs1YVe45DJH0ahFnuY2iyxl6H0BVIHqiQrb1TotreOpCmYF9oMrWGQd+H +Wyx7xf58etqjYzBhMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBSHERUI0arBeAyxr87GyZDvvzAEwDAfBgNVHSMEGDAWgBSHERUI0arB +eAyxr87GyZDvvzAEwDAKBggqhkjOPQQDAgNJADBGAiEAtVfd14pVCzbhhkT61Nlo +jbjcI4qKDdQvfepz7L9NbKgCIQDLpbQS+ue16M9+k/zzNY9vTlp8tLxOsvxyqltZ ++efcMQ== +-----END CERTIFICATE----- + +# Issuer: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Subject: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Label: "certSIGN Root CA G2" +# Serial: 313609486401300475190 +# MD5 Fingerprint: 8c:f1:75:8a:c6:19:cf:94:b7:f7:65:20:87:c3:97:c7 +# SHA1 Fingerprint: 26:f9:93:b4:ed:3d:28:27:b0:b9:4b:a7:e9:15:1d:a3:8d:92:e5:32 +# SHA256 Fingerprint: 65:7c:fe:2f:a7:3f:aa:38:46:25:71:f3:32:a2:36:3a:46:fc:e7:02:09:51:71:07:02:cd:fb:b6:ee:da:33:05 +-----BEGIN CERTIFICATE----- +MIIFRzCCAy+gAwIBAgIJEQA0tk7GNi02MA0GCSqGSIb3DQEBCwUAMEExCzAJBgNV +BAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJR04g +Uk9PVCBDQSBHMjAeFw0xNzAyMDYwOTI3MzVaFw00MjAyMDYwOTI3MzVaMEExCzAJ +BgNVBAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJ +R04gUk9PVCBDQSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDF +dRmRfUR0dIf+DjuW3NgBFszuY5HnC2/OOwppGnzC46+CjobXXo9X69MhWf05N0Iw +vlDqtg+piNguLWkh59E3GE59kdUWX2tbAMI5Qw02hVK5U2UPHULlj88F0+7cDBrZ +uIt4ImfkabBoxTzkbFpG583H+u/E7Eu9aqSs/cwoUe+StCmrqzWaTOTECMYmzPhp +n+Sc8CnTXPnGFiWeI8MgwT0PPzhAsP6CRDiqWhqKa2NYOLQV07YRaXseVO6MGiKs +cpc/I1mbySKEwQdPzH/iV8oScLumZfNpdWO9lfsbl83kqK/20U6o2YpxJM02PbyW +xPFsqa7lzw1uKA2wDrXKUXt4FMMgL3/7FFXhEZn91QqhngLjYl/rNUssuHLoPj1P +rCy7Lobio3aP5ZMqz6WryFyNSwb/EkaseMsUBzXgqd+L6a8VTxaJW732jcZZroiF +DsGJ6x9nxUWO/203Nit4ZoORUSs9/1F3dmKh7Gc+PoGD4FapUB8fepmrY7+EF3fx +DTvf95xhszWYijqy7DwaNz9+j5LP2RIUZNoQAhVB/0/E6xyjyfqZ90bp4RjZsbgy +LcsUDFDYg2WD7rlcz8sFWkz6GZdr1l0T08JcVLwyc6B49fFtHsufpaafItzRUZ6C +eWRgKRM+o/1Pcmqr4tTluCRVLERLiohEnMqE0yo7AgMBAAGjQjBAMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSCIS1mxteg4BXrzkwJ +d8RgnlRuAzANBgkqhkiG9w0BAQsFAAOCAgEAYN4auOfyYILVAzOBywaK8SJJ6ejq +kX/GM15oGQOGO0MBzwdw5AgeZYWR5hEit/UCI46uuR59H35s5r0l1ZUa8gWmr4UC +b6741jH/JclKyMeKqdmfS0mbEVeZkkMR3rYzpMzXjWR91M08KCy0mpbqTfXERMQl +qiCA2ClV9+BB/AYm/7k29UMUA2Z44RGx2iBfRgB4ACGlHgAoYXhvqAEBj500mv/0 +OJD7uNGzcgbJceaBxXntC6Z58hMLnPddDnskk7RI24Zf3lCGeOdA5jGokHZwYa+c +NywRtYK3qq4kNFtyDGkNzVmf9nGvnAvRCjj5BiKDUyUM/FHE5r7iOZULJK2v0ZXk +ltd0ZGtxTgI8qoXzIKNDOXZbbFD+mpwUHmUUihW9o4JFWklWatKcsWMy5WHgUyIO +pwpJ6st+H6jiYoD2EEVSmAYY3qXNL3+q1Ok+CHLsIwMCPKaq2LxndD0UF/tUSxfj +03k9bWtJySgOLnRQvwzZRjoQhsmnP+mg7H/rpXdYaXHmgwo38oZJar55CJD2AhZk +PuXaTH4MNMn5X7azKFGnpyuqSfqNZSlO42sTp5SjLVFteAxEy9/eCG/Oo2Sr05WE +1LlSVHJ7liXMvGnjSG4N0MedJ5qq+BOS3R7fY581qRY27Iy4g/Q9iY/NtBde17MX +QRBdJ3NghVdJIgc= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global Certification Authority" +# Serial: 1846098327275375458322922162 +# MD5 Fingerprint: f8:1c:18:2d:2f:ba:5f:6d:a1:6c:bc:c7:ab:91:c7:0e +# SHA1 Fingerprint: 2f:8f:36:4f:e1:58:97:44:21:59:87:a5:2a:9a:d0:69:95:26:7f:b5 +# SHA256 Fingerprint: 97:55:20:15:f5:dd:fc:3c:87:88:c0:06:94:45:55:40:88:94:45:00:84:f1:00:86:70:86:bc:1a:2b:b5:8d:c8 +-----BEGIN CERTIFICATE----- +MIIF2jCCA8KgAwIBAgIMBfcOhtpJ80Y1LrqyMA0GCSqGSIb3DQEBCwUAMIGIMQsw +CQYDVQQGEwJVUzERMA8GA1UECAwISWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28x +ITAfBgNVBAoMGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1 +c3R3YXZlIEdsb2JhbCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0xNzA4MjMx +OTM0MTJaFw00MjA4MjMxOTM0MTJaMIGIMQswCQYDVQQGEwJVUzERMA8GA1UECAwI +SWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28xITAfBgNVBAoMGFRydXN0d2F2ZSBI +b2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1c3R3YXZlIEdsb2JhbCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB +ALldUShLPDeS0YLOvR29zd24q88KPuFd5dyqCblXAj7mY2Hf8g+CY66j96xz0Xzn +swuvCAAJWX/NKSqIk4cXGIDtiLK0thAfLdZfVaITXdHG6wZWiYj+rDKd/VzDBcdu +7oaJuogDnXIhhpCujwOl3J+IKMujkkkP7NAP4m1ET4BqstTnoApTAbqOl5F2brz8 +1Ws25kCI1nsvXwXoLG0R8+eyvpJETNKXpP7ScoFDB5zpET71ixpZfR9oWN0EACyW +80OzfpgZdNmcc9kYvkHHNHnZ9GLCQ7mzJ7Aiy/k9UscwR7PJPrhq4ufogXBeQotP +JqX+OsIgbrv4Fo7NDKm0G2x2EOFYeUY+VM6AqFcJNykbmROPDMjWLBz7BegIlT1l +RtzuzWniTY+HKE40Cz7PFNm73bZQmq131BnW2hqIyE4bJ3XYsgjxroMwuREOzYfw +hI0Vcnyh78zyiGG69Gm7DIwLdVcEuE4qFC49DxweMqZiNu5m4iK4BUBjECLzMx10 +coos9TkpoNPnG4CELcU9402x/RpvumUHO1jsQkUm+9jaJXLE9gCxInm943xZYkqc +BW89zubWR2OZxiRvchLIrH+QtAuRcOi35hYQcRfO3gZPSEF9NUqjifLJS3tBEW1n +twiYTOURGa5CgNz7kAXU+FDKvuStx8KU1xad5hePrzb7AgMBAAGjQjBAMA8GA1Ud +EwEB/wQFMAMBAf8wHQYDVR0OBBYEFJngGWcNYtt2s9o9uFvo/ULSMQ6HMA4GA1Ud +DwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAmHNw4rDT7TnsTGDZqRKGFx6W +0OhUKDtkLSGm+J1WE2pIPU/HPinbbViDVD2HfSMF1OQc3Og4ZYbFdada2zUFvXfe +uyk3QAUHw5RSn8pk3fEbK9xGChACMf1KaA0HZJDmHvUqoai7PF35owgLEQzxPy0Q +lG/+4jSHg9bP5Rs1bdID4bANqKCqRieCNqcVtgimQlRXtpla4gt5kNdXElE1GYhB +aCXUNxeEFfsBctyV3lImIJgm4nb1J2/6ADtKYdkNy1GTKv0WBpanI5ojSP5RvbbE +sLFUzt5sQa0WZ37b/TjNuThOssFgy50X31ieemKyJo90lZvkWx3SD92YHJtZuSPT +MaCm/zjdzyBP6VhWOmfD0faZmZ26NraAL4hHT4a/RDqA5Dccprrql5gR0IRiR2Qe +qu5AvzSxnI9O4fKSTx+O856X3vOmeWqJcU9LJxdI/uz0UA9PSX3MReO9ekDFQdxh +VicGaeVyQYHTtgGJoC86cnn+OjC/QezHYj6RS8fZMXZC+fc8Y+wmjHMMfRod6qh8 +h6jCJ3zhM0EPz8/8AKAigJ5Kp28AsEFFtyLKaEjFQqKu3R3y4G5OBVixwJAWKqQ9 +EEC+j2Jjg6mcgn0tAumDMHzLJ8n9HmYAsC7TIS+OMxZsmO0QqAfWzJPP29FpHOTK +yeC2nOnOcXHebD8WpHk= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P256 Certification Authority" +# Serial: 4151900041497450638097112925 +# MD5 Fingerprint: 5b:44:e3:8d:5d:36:86:26:e8:0d:05:d2:59:a7:83:54 +# SHA1 Fingerprint: b4:90:82:dd:45:0c:be:8b:5b:b1:66:d3:e2:a4:08:26:cd:ed:42:cf +# SHA256 Fingerprint: 94:5b:bc:82:5e:a5:54:f4:89:d1:fd:51:a7:3d:df:2e:a6:24:ac:70:19:a0:52:05:22:5c:22:a7:8c:cf:a8:b4 +-----BEGIN CERTIFICATE----- +MIICYDCCAgegAwIBAgIMDWpfCD8oXD5Rld9dMAoGCCqGSM49BAMCMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM1MTBaFw00MjA4MjMxOTM1MTBaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTBZMBMGByqGSM49AgEGCCqG +SM49AwEHA0IABH77bOYj43MyCMpg5lOcunSNGLB4kFKA3TjASh3RqMyTpJcGOMoN +FWLGjgEqZZ2q3zSRLoHB5DOSMcT9CTqmP62jQzBBMA8GA1UdEwEB/wQFMAMBAf8w +DwYDVR0PAQH/BAUDAwcGADAdBgNVHQ4EFgQUo0EGrJBt0UrrdaVKEJmzsaGLSvcw +CgYIKoZIzj0EAwIDRwAwRAIgB+ZU2g6gWrKuEZ+Hxbb/ad4lvvigtwjzRM4q3wgh +DDcCIC0mA6AFvWvR9lz4ZcyGbbOcNEhjhAnFjXca4syc4XR7 +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P384 Certification Authority" +# Serial: 2704997926503831671788816187 +# MD5 Fingerprint: ea:cf:60:c4:3b:b9:15:29:40:a1:97:ed:78:27:93:d6 +# SHA1 Fingerprint: e7:f3:a3:c8:cf:6f:c3:04:2e:6d:0e:67:32:c5:9e:68:95:0d:5e:d2 +# SHA256 Fingerprint: 55:90:38:59:c8:c0:c3:eb:b8:75:9e:ce:4e:25:57:22:5f:f5:75:8b:bd:38:eb:d4:82:76:60:1e:1b:d5:80:97 +-----BEGIN CERTIFICATE----- +MIICnTCCAiSgAwIBAgIMCL2Fl2yZJ6SAaEc7MAoGCCqGSM49BAMDMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM2NDNaFw00MjA4MjMxOTM2NDNaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTB2MBAGByqGSM49AgEGBSuB +BAAiA2IABGvaDXU1CDFHBa5FmVXxERMuSvgQMSOjfoPTfygIOiYaOs+Xgh+AtycJ +j9GOMMQKmw6sWASr9zZ9lCOkmwqKi6vr/TklZvFe/oyujUF5nQlgziip04pt89ZF +1PKYhDhloKNDMEEwDwYDVR0TAQH/BAUwAwEB/zAPBgNVHQ8BAf8EBQMDBwYAMB0G +A1UdDgQWBBRVqYSJ0sEyvRjLbKYHTsjnnb6CkDAKBggqhkjOPQQDAwNnADBkAjA3 +AZKXRRJ+oPM+rRk6ct30UJMDEr5E0k9BpIycnR+j9sKS50gU/k6bpZFXrsY3crsC +MGclCrEMXu6pY5Jv5ZAL/mYiykf9ijH3g/56vxC+GCsej/YpHpRZ744hN8tRmKVu +Sw== +-----END CERTIFICATE----- + +# Issuer: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Subject: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Label: "NAVER Global Root Certification Authority" +# Serial: 9013692873798656336226253319739695165984492813 +# MD5 Fingerprint: c8:7e:41:f6:25:3b:f5:09:b3:17:e8:46:3d:bf:d0:9b +# SHA1 Fingerprint: 8f:6b:f2:a9:27:4a:da:14:a0:c4:f4:8e:61:27:f9:c0:1e:78:5d:d1 +# SHA256 Fingerprint: 88:f4:38:dc:f8:ff:d1:fa:8f:42:91:15:ff:e5:f8:2a:e1:e0:6e:0c:70:c3:75:fa:ad:71:7b:34:a4:9e:72:65 +-----BEGIN CERTIFICATE----- +MIIFojCCA4qgAwIBAgIUAZQwHqIL3fXFMyqxQ0Rx+NZQTQ0wDQYJKoZIhvcNAQEM +BQAwaTELMAkGA1UEBhMCS1IxJjAkBgNVBAoMHU5BVkVSIEJVU0lORVNTIFBMQVRG +T1JNIENvcnAuMTIwMAYDVQQDDClOQVZFUiBHbG9iYWwgUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eTAeFw0xNzA4MTgwODU4NDJaFw0zNzA4MTgyMzU5NTlaMGkx +CzAJBgNVBAYTAktSMSYwJAYDVQQKDB1OQVZFUiBCVVNJTkVTUyBQTEFURk9STSBD +b3JwLjEyMDAGA1UEAwwpTkFWRVIgR2xvYmFsIFJvb3QgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC21PGTXLVA +iQqrDZBbUGOukJR0F0Vy1ntlWilLp1agS7gvQnXp2XskWjFlqxcX0TM62RHcQDaH +38dq6SZeWYp34+hInDEW+j6RscrJo+KfziFTowI2MMtSAuXaMl3Dxeb57hHHi8lE +HoSTGEq0n+USZGnQJoViAbbJAh2+g1G7XNr4rRVqmfeSVPc0W+m/6imBEtRTkZaz +kVrd/pBzKPswRrXKCAfHcXLJZtM0l/aM9BhK4dA9WkW2aacp+yPOiNgSnABIqKYP +szuSjXEOdMWLyEz59JuOuDxp7W87UC9Y7cSw0BwbagzivESq2M0UXZR4Yb8Obtoq +vC8MC3GmsxY/nOb5zJ9TNeIDoKAYv7vxvvTWjIcNQvcGufFt7QSUqP620wbGQGHf +nZ3zVHbOUzoBppJB7ASjjw2i1QnK1sua8e9DXcCrpUHPXFNwcMmIpi3Ua2FzUCaG +YQ5fG8Ir4ozVu53BA0K6lNpfqbDKzE0K70dpAy8i+/Eozr9dUGWokG2zdLAIx6yo +0es+nPxdGoMuK8u180SdOqcXYZaicdNwlhVNt0xz7hlcxVs+Qf6sdWA7G2POAN3a +CJBitOUt7kinaxeZVL6HSuOpXgRM6xBtVNbv8ejyYhbLgGvtPe31HzClrkvJE+2K +AQHJuFFYwGY6sWZLxNUxAmLpdIQM201GLQIDAQABo0IwQDAdBgNVHQ4EFgQU0p+I +36HNLL3s9TsBAZMzJ7LrYEswDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMB +Af8wDQYJKoZIhvcNAQEMBQADggIBADLKgLOdPVQG3dLSLvCkASELZ0jKbY7gyKoN +qo0hV4/GPnrK21HUUrPUloSlWGB/5QuOH/XcChWB5Tu2tyIvCZwTFrFsDDUIbatj +cu3cvuzHV+YwIHHW1xDBE1UBjCpD5EHxzzp6U5LOogMFDTjfArsQLtk70pt6wKGm ++LUx5vR1yblTmXVHIloUFcd4G7ad6Qz4G3bxhYTeodoS76TiEJd6eN4MUZeoIUCL +hr0N8F5OSza7OyAfikJW4Qsav3vQIkMsRIz75Sq0bBwcupTgE34h5prCy8VCZLQe +lHsIJchxzIdFV4XTnyliIoNRlwAYl3dqmJLJfGBs32x9SuRwTMKeuB330DTHD8z7 +p/8Dvq1wkNoL3chtl1+afwkyQf3NosxabUzyqkn+Zvjp2DXrDige7kgvOtB5CTh8 +piKCk5XQA76+AqAF3SAi428diDRgxuYKuQl1C/AH6GmWNcf7I4GOODm4RStDeKLR +LBT/DShycpWbXgnbiUSYqqFJu3FS8r/2/yehNq+4tneI3TqkbZs0kNwUXTC/t+sX +5Ie3cdCh13cV1ELX8vMxmV2b3RZtP+oGI/hGoiLtk/bdmuYqh7GYVPEi92tF4+KO +dh2ajcQGjTa3FPOdVGm3jjzVpG2Tgbet9r1ke8LJaDmgkpzNNIaRkPpkUZ3+/uul +9XXeifdy +-----END CERTIFICATE----- diff --git a/venv/Lib/site-packages/pip/_vendor/certifi/core.py b/venv/Lib/site-packages/pip/_vendor/certifi/core.py new file mode 100644 index 00000000..b8140cf1 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/certifi/core.py @@ -0,0 +1,76 @@ +# -*- coding: utf-8 -*- + +""" +certifi.py +~~~~~~~~~~ + +This module returns the installation location of cacert.pem or its contents. +""" +import os + + +class _PipPatchedCertificate(Exception): + pass + + +try: + # Return a certificate file on disk for a standalone pip zipapp running in + # an isolated build environment to use. Passing --cert to the standalone + # pip does not work since requests calls where() unconditionally on import. + _PIP_STANDALONE_CERT = os.environ.get("_PIP_STANDALONE_CERT") + if _PIP_STANDALONE_CERT: + def where(): + return _PIP_STANDALONE_CERT + raise _PipPatchedCertificate() + + from importlib.resources import path as get_path, read_text + + _CACERT_CTX = None + _CACERT_PATH = None + + def where(): + # This is slightly terrible, but we want to delay extracting the file + # in cases where we're inside of a zipimport situation until someone + # actually calls where(), but we don't want to re-extract the file + # on every call of where(), so we'll do it once then store it in a + # global variable. + global _CACERT_CTX + global _CACERT_PATH + if _CACERT_PATH is None: + # This is slightly janky, the importlib.resources API wants you to + # manage the cleanup of this file, so it doesn't actually return a + # path, it returns a context manager that will give you the path + # when you enter it and will do any cleanup when you leave it. In + # the common case of not needing a temporary file, it will just + # return the file system location and the __exit__() is a no-op. + # + # We also have to hold onto the actual context manager, because + # it will do the cleanup whenever it gets garbage collected, so + # we will also store that at the global level as well. + _CACERT_CTX = get_path("pip._vendor.certifi", "cacert.pem") + _CACERT_PATH = str(_CACERT_CTX.__enter__()) + + return _CACERT_PATH + +except _PipPatchedCertificate: + pass + +except ImportError: + # This fallback will work for Python versions prior to 3.7 that lack the + # importlib.resources module but relies on the existing `where` function + # so won't address issues with environments like PyOxidizer that don't set + # __file__ on modules. + def read_text(_module, _path, encoding="ascii"): + with open(where(), "r", encoding=encoding) as data: + return data.read() + + # If we don't have importlib.resources, then we will just do the old logic + # of assuming we're on the filesystem and munge the path directly. + def where(): + f = os.path.dirname(__file__) + + return os.path.join(f, "cacert.pem") + + +def contents(): + return read_text("certifi", "cacert.pem", encoding="ascii") diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__init__.py b/venv/Lib/site-packages/pip/_vendor/chardet/__init__.py new file mode 100644 index 00000000..80ad2546 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/chardet/__init__.py @@ -0,0 +1,83 @@ +######################## BEGIN LICENSE BLOCK ######################## +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + + +from .universaldetector import UniversalDetector +from .enums import InputState +from .version import __version__, VERSION + + +__all__ = ['UniversalDetector', 'detect', 'detect_all', '__version__', 'VERSION'] + + +def detect(byte_str): + """ + Detect the encoding of the given byte string. + + :param byte_str: The byte sequence to examine. + :type byte_str: ``bytes`` or ``bytearray`` + """ + if not isinstance(byte_str, bytearray): + if not isinstance(byte_str, bytes): + raise TypeError('Expected object of type bytes or bytearray, got: ' + '{}'.format(type(byte_str))) + else: + byte_str = bytearray(byte_str) + detector = UniversalDetector() + detector.feed(byte_str) + return detector.close() + + +def detect_all(byte_str): + """ + Detect all the possible encodings of the given byte string. + + :param byte_str: The byte sequence to examine. + :type byte_str: ``bytes`` or ``bytearray`` + """ + if not isinstance(byte_str, bytearray): + if not isinstance(byte_str, bytes): + raise TypeError('Expected object of type bytes or bytearray, got: ' + '{}'.format(type(byte_str))) + else: + byte_str = bytearray(byte_str) + + detector = UniversalDetector() + detector.feed(byte_str) + detector.close() + + if detector._input_state == InputState.HIGH_BYTE: + results = [] + for prober in detector._charset_probers: + if prober.get_confidence() > detector.MINIMUM_THRESHOLD: + charset_name = prober.charset_name + lower_charset_name = prober.charset_name.lower() + # Use Windows encoding name instead of ISO-8859 if we saw any + # extra Windows-specific bytes + if lower_charset_name.startswith('iso-8859'): + if detector._has_win_bytes: + charset_name = detector.ISO_WIN_MAP.get(lower_charset_name, + charset_name) + results.append({ + 'encoding': charset_name, + 'confidence': prober.get_confidence(), + 'language': prober.language, + }) + if len(results) > 0: + return sorted(results, key=lambda result: -result['confidence']) + + return [detector.result] diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..d958f6e1 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/big5freq.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/big5freq.cpython-36.pyc new file mode 100644 index 00000000..def407ef Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/big5freq.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/big5prober.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/big5prober.cpython-36.pyc new file mode 100644 index 00000000..a76a12fc Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/big5prober.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/chardistribution.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/chardistribution.cpython-36.pyc new file mode 100644 index 00000000..2cbf2f84 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/chardistribution.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-36.pyc new file mode 100644 index 00000000..7fce9b44 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/charsetprober.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/charsetprober.cpython-36.pyc new file mode 100644 index 00000000..5b0b83e0 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/charsetprober.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-36.pyc new file mode 100644 index 00000000..1d8c161e Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/compat.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/compat.cpython-36.pyc new file mode 100644 index 00000000..9dd5445c Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/compat.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/cp949prober.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/cp949prober.cpython-36.pyc new file mode 100644 index 00000000..fb7df1d7 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/cp949prober.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-36.pyc new file mode 100644 index 00000000..70032b9d Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/escprober.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/escprober.cpython-36.pyc new file mode 100644 index 00000000..b041eb0e Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/escprober.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/escsm.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/escsm.cpython-36.pyc new file mode 100644 index 00000000..b0916684 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/escsm.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/eucjpprober.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/eucjpprober.cpython-36.pyc new file mode 100644 index 00000000..075af67e Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/eucjpprober.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euckrfreq.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euckrfreq.cpython-36.pyc new file mode 100644 index 00000000..b5ededde Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euckrfreq.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euckrprober.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euckrprober.cpython-36.pyc new file mode 100644 index 00000000..f21f8bf1 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euckrprober.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euctwfreq.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euctwfreq.cpython-36.pyc new file mode 100644 index 00000000..cca01fee Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euctwfreq.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euctwprober.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euctwprober.cpython-36.pyc new file mode 100644 index 00000000..6a203b9f Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euctwprober.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/gb2312freq.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/gb2312freq.cpython-36.pyc new file mode 100644 index 00000000..54b12318 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/gb2312freq.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/gb2312prober.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/gb2312prober.cpython-36.pyc new file mode 100644 index 00000000..4c59d3c3 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/gb2312prober.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/hebrewprober.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/hebrewprober.cpython-36.pyc new file mode 100644 index 00000000..d06bcf53 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/hebrewprober.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/jisfreq.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/jisfreq.cpython-36.pyc new file mode 100644 index 00000000..3a1da5bd Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/jisfreq.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/jpcntx.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/jpcntx.cpython-36.pyc new file mode 100644 index 00000000..9a018049 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/jpcntx.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-36.pyc new file mode 100644 index 00000000..94eb9389 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-36.pyc new file mode 100644 index 00000000..c7dbf350 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-36.pyc new file mode 100644 index 00000000..fa399406 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-36.pyc new file mode 100644 index 00000000..16b9ee50 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langrussianmodel.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langrussianmodel.cpython-36.pyc new file mode 100644 index 00000000..02665fe7 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langrussianmodel.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langthaimodel.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langthaimodel.cpython-36.pyc new file mode 100644 index 00000000..f3059165 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langthaimodel.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-36.pyc new file mode 100644 index 00000000..95f8acd6 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/latin1prober.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/latin1prober.cpython-36.pyc new file mode 100644 index 00000000..9390a8f6 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/latin1prober.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-36.pyc new file mode 100644 index 00000000..6320e190 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-36.pyc new file mode 100644 index 00000000..8450bea5 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/mbcssm.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/mbcssm.cpython-36.pyc new file mode 100644 index 00000000..bb97f846 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/mbcssm.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-36.pyc new file mode 100644 index 00000000..6dcbefa5 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-36.pyc new file mode 100644 index 00000000..62e42f60 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/sjisprober.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/sjisprober.cpython-36.pyc new file mode 100644 index 00000000..ff0ccfd9 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/sjisprober.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/universaldetector.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/universaldetector.cpython-36.pyc new file mode 100644 index 00000000..c6e1c7d8 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/universaldetector.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/utf8prober.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/utf8prober.cpython-36.pyc new file mode 100644 index 00000000..3f78c3d5 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/utf8prober.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/version.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/version.cpython-36.pyc new file mode 100644 index 00000000..659fd02b Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/version.cpython-36.pyc differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/big5freq.py b/venv/Lib/site-packages/pip/_vendor/chardet/big5freq.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/chardet/big5freq.py rename to venv/Lib/site-packages/pip/_vendor/chardet/big5freq.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/big5prober.py b/venv/Lib/site-packages/pip/_vendor/chardet/big5prober.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/chardet/big5prober.py rename to venv/Lib/site-packages/pip/_vendor/chardet/big5prober.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.py b/venv/Lib/site-packages/pip/_vendor/chardet/chardistribution.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.py rename to venv/Lib/site-packages/pip/_vendor/chardet/chardistribution.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/charsetgroupprober.py b/venv/Lib/site-packages/pip/_vendor/chardet/charsetgroupprober.py similarity index 98% rename from env/lib/python2.7/site-packages/pip/_vendor/chardet/charsetgroupprober.py rename to venv/Lib/site-packages/pip/_vendor/chardet/charsetgroupprober.py index 8b3738ef..5812cef0 100644 --- a/env/lib/python2.7/site-packages/pip/_vendor/chardet/charsetgroupprober.py +++ b/venv/Lib/site-packages/pip/_vendor/chardet/charsetgroupprober.py @@ -73,6 +73,7 @@ def feed(self, byte_str): continue if state == ProbingState.FOUND_IT: self._best_guess_prober = prober + self._state = ProbingState.FOUND_IT return self.state elif state == ProbingState.NOT_ME: prober.active = False diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/charsetprober.py b/venv/Lib/site-packages/pip/_vendor/chardet/charsetprober.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/chardet/charsetprober.py rename to venv/Lib/site-packages/pip/_vendor/chardet/charsetprober.py diff --git a/env/lib/python2.7/site-packages/pyobjc-5.2.dist-info/top_level.txt b/venv/Lib/site-packages/pip/_vendor/chardet/cli/__init__.py similarity index 100% rename from env/lib/python2.7/site-packages/pyobjc-5.2.dist-info/top_level.txt rename to venv/Lib/site-packages/pip/_vendor/chardet/cli/__init__.py diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/cli/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/cli/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..a3add866 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/cli/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-36.pyc new file mode 100644 index 00000000..147a7107 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-36.pyc differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/cli/chardetect.py b/venv/Lib/site-packages/pip/_vendor/chardet/cli/chardetect.py similarity index 92% rename from env/lib/python2.7/site-packages/pip/_vendor/chardet/cli/chardetect.py rename to venv/Lib/site-packages/pip/_vendor/chardet/cli/chardetect.py index c61136b6..6d6f93aa 100644 --- a/env/lib/python2.7/site-packages/pip/_vendor/chardet/cli/chardetect.py +++ b/venv/Lib/site-packages/pip/_vendor/chardet/cli/chardetect.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ Script which takes one or more file paths and reports on their detected encodings @@ -45,10 +44,10 @@ def description_of(lines, name='stdin'): if PY2: name = name.decode(sys.getfilesystemencoding(), 'ignore') if result['encoding']: - return '{0}: {1} with confidence {2}'.format(name, result['encoding'], + return '{}: {} with confidence {}'.format(name, result['encoding'], result['confidence']) else: - return '{0}: no result'.format(name) + return '{}: no result'.format(name) def main(argv=None): @@ -69,7 +68,7 @@ def main(argv=None): type=argparse.FileType('rb'), nargs='*', default=[sys.stdin if PY2 else sys.stdin.buffer]) parser.add_argument('--version', action='version', - version='%(prog)s {0}'.format(__version__)) + version='%(prog)s {}'.format(__version__)) args = parser.parse_args(argv) for f in args.input: diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/codingstatemachine.py b/venv/Lib/site-packages/pip/_vendor/chardet/codingstatemachine.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/chardet/codingstatemachine.py rename to venv/Lib/site-packages/pip/_vendor/chardet/codingstatemachine.py diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/compat.py b/venv/Lib/site-packages/pip/_vendor/chardet/compat.py new file mode 100644 index 00000000..8941572b --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/chardet/compat.py @@ -0,0 +1,36 @@ +######################## BEGIN LICENSE BLOCK ######################## +# Contributor(s): +# Dan Blanchard +# Ian Cordasco +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +import sys + + +if sys.version_info < (3, 0): + PY2 = True + PY3 = False + string_types = (str, unicode) + text_type = unicode + iteritems = dict.iteritems +else: + PY2 = False + PY3 = True + string_types = (bytes, str) + text_type = str + iteritems = dict.items diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/cp949prober.py b/venv/Lib/site-packages/pip/_vendor/chardet/cp949prober.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/chardet/cp949prober.py rename to venv/Lib/site-packages/pip/_vendor/chardet/cp949prober.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/enums.py b/venv/Lib/site-packages/pip/_vendor/chardet/enums.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/chardet/enums.py rename to venv/Lib/site-packages/pip/_vendor/chardet/enums.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/escprober.py b/venv/Lib/site-packages/pip/_vendor/chardet/escprober.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/chardet/escprober.py rename to venv/Lib/site-packages/pip/_vendor/chardet/escprober.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/escsm.py b/venv/Lib/site-packages/pip/_vendor/chardet/escsm.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/chardet/escsm.py rename to venv/Lib/site-packages/pip/_vendor/chardet/escsm.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/eucjpprober.py b/venv/Lib/site-packages/pip/_vendor/chardet/eucjpprober.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/chardet/eucjpprober.py rename to venv/Lib/site-packages/pip/_vendor/chardet/eucjpprober.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/euckrfreq.py b/venv/Lib/site-packages/pip/_vendor/chardet/euckrfreq.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/chardet/euckrfreq.py rename to venv/Lib/site-packages/pip/_vendor/chardet/euckrfreq.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/euckrprober.py b/venv/Lib/site-packages/pip/_vendor/chardet/euckrprober.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/chardet/euckrprober.py rename to venv/Lib/site-packages/pip/_vendor/chardet/euckrprober.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/euctwfreq.py b/venv/Lib/site-packages/pip/_vendor/chardet/euctwfreq.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/chardet/euctwfreq.py rename to venv/Lib/site-packages/pip/_vendor/chardet/euctwfreq.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/euctwprober.py b/venv/Lib/site-packages/pip/_vendor/chardet/euctwprober.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/chardet/euctwprober.py rename to venv/Lib/site-packages/pip/_vendor/chardet/euctwprober.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/gb2312freq.py b/venv/Lib/site-packages/pip/_vendor/chardet/gb2312freq.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/chardet/gb2312freq.py rename to venv/Lib/site-packages/pip/_vendor/chardet/gb2312freq.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/gb2312prober.py b/venv/Lib/site-packages/pip/_vendor/chardet/gb2312prober.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/chardet/gb2312prober.py rename to venv/Lib/site-packages/pip/_vendor/chardet/gb2312prober.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/hebrewprober.py b/venv/Lib/site-packages/pip/_vendor/chardet/hebrewprober.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/chardet/hebrewprober.py rename to venv/Lib/site-packages/pip/_vendor/chardet/hebrewprober.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/jisfreq.py b/venv/Lib/site-packages/pip/_vendor/chardet/jisfreq.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/chardet/jisfreq.py rename to venv/Lib/site-packages/pip/_vendor/chardet/jisfreq.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/jpcntx.py b/venv/Lib/site-packages/pip/_vendor/chardet/jpcntx.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/chardet/jpcntx.py rename to venv/Lib/site-packages/pip/_vendor/chardet/jpcntx.py diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/langbulgarianmodel.py b/venv/Lib/site-packages/pip/_vendor/chardet/langbulgarianmodel.py new file mode 100644 index 00000000..e963a509 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/chardet/langbulgarianmodel.py @@ -0,0 +1,4650 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel + + +# 3: Positive +# 2: Likely +# 1: Unlikely +# 0: Negative + +BULGARIAN_LANG_MODEL = { + 63: { # 'e' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 1, # 'б' + 9: 1, # 'в' + 20: 1, # 'г' + 11: 1, # 'д' + 3: 1, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 0, # 'и' + 26: 1, # 'й' + 12: 1, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 1, # 'о' + 13: 1, # 'п' + 7: 1, # 'р' + 8: 1, # 'с' + 5: 1, # 'т' + 19: 0, # 'у' + 29: 1, # 'ф' + 25: 1, # 'х' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 45: { # '\xad' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 1, # 'Б' + 35: 1, # 'В' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 0, # 'Л' + 38: 1, # 'М' + 36: 0, # 'Н' + 41: 1, # 'О' + 30: 1, # 'П' + 39: 1, # 'Р' + 28: 1, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 1, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 0, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 0, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 0, # 'о' + 13: 0, # 'п' + 7: 0, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 0, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 31: { # 'А' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 1, # 'А' + 32: 1, # 'Б' + 35: 2, # 'В' + 43: 1, # 'Г' + 37: 2, # 'Д' + 44: 2, # 'Е' + 55: 1, # 'Ж' + 47: 2, # 'З' + 40: 1, # 'И' + 59: 1, # 'Й' + 33: 1, # 'К' + 46: 2, # 'Л' + 38: 1, # 'М' + 36: 2, # 'Н' + 41: 1, # 'О' + 30: 2, # 'П' + 39: 2, # 'Р' + 28: 2, # 'С' + 34: 2, # 'Т' + 51: 1, # 'У' + 48: 2, # 'Ф' + 49: 1, # 'Х' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 2, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 1, # 'Я' + 1: 1, # 'а' + 18: 2, # 'б' + 9: 2, # 'в' + 20: 2, # 'г' + 11: 2, # 'д' + 3: 1, # 'е' + 23: 1, # 'ж' + 15: 2, # 'з' + 2: 0, # 'и' + 26: 2, # 'й' + 12: 2, # 'к' + 10: 3, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 0, # 'о' + 13: 2, # 'п' + 7: 2, # 'р' + 8: 2, # 'с' + 5: 2, # 'т' + 19: 1, # 'у' + 29: 2, # 'ф' + 25: 1, # 'х' + 22: 1, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 32: { # 'Б' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'А' + 32: 2, # 'Б' + 35: 1, # 'В' + 43: 1, # 'Г' + 37: 2, # 'Д' + 44: 1, # 'Е' + 55: 1, # 'Ж' + 47: 2, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 2, # 'Н' + 41: 2, # 'О' + 30: 1, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 2, # 'Т' + 51: 1, # 'У' + 48: 2, # 'Ф' + 49: 1, # 'Х' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 0, # 'Ш' + 57: 1, # 'Щ' + 61: 2, # 'Ъ' + 60: 1, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 2, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 2, # 'р' + 8: 1, # 'с' + 5: 0, # 'т' + 19: 2, # 'у' + 29: 0, # 'ф' + 25: 1, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 3, # 'ъ' + 52: 1, # 'ь' + 42: 1, # 'ю' + 16: 2, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 35: { # 'В' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'А' + 32: 1, # 'Б' + 35: 1, # 'В' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 2, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Н' + 41: 1, # 'О' + 30: 1, # 'П' + 39: 2, # 'Р' + 28: 2, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 2, # 'Ф' + 49: 0, # 'Х' + 53: 1, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 2, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 2, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 2, # 'н' + 4: 2, # 'о' + 13: 1, # 'п' + 7: 2, # 'р' + 8: 2, # 'с' + 5: 2, # 'т' + 19: 1, # 'у' + 29: 0, # 'ф' + 25: 1, # 'х' + 22: 0, # 'ц' + 21: 2, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ъ' + 52: 1, # 'ь' + 42: 1, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 43: { # 'Г' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'А' + 32: 1, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 2, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 0, # 'М' + 36: 1, # 'Н' + 41: 1, # 'О' + 30: 0, # 'П' + 39: 1, # 'Р' + 28: 1, # 'С' + 34: 0, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 1, # 'Щ' + 61: 1, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 1, # 'б' + 9: 1, # 'в' + 20: 0, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 2, # 'о' + 13: 0, # 'п' + 7: 2, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 2, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 1, # 'щ' + 17: 2, # 'ъ' + 52: 1, # 'ь' + 42: 1, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 37: { # 'Д' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'А' + 32: 1, # 'Б' + 35: 2, # 'В' + 43: 1, # 'Г' + 37: 2, # 'Д' + 44: 2, # 'Е' + 55: 2, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Н' + 41: 2, # 'О' + 30: 2, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Х' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 2, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 3, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 2, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 2, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 2, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ъ' + 52: 1, # 'ь' + 42: 2, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 44: { # 'Е' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'А' + 32: 1, # 'Б' + 35: 2, # 'В' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 1, # 'Ж' + 47: 1, # 'З' + 40: 1, # 'И' + 59: 1, # 'Й' + 33: 2, # 'К' + 46: 2, # 'Л' + 38: 1, # 'М' + 36: 2, # 'Н' + 41: 2, # 'О' + 30: 1, # 'П' + 39: 2, # 'Р' + 28: 2, # 'С' + 34: 2, # 'Т' + 51: 1, # 'У' + 48: 2, # 'Ф' + 49: 1, # 'Х' + 53: 2, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 1, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 1, # 'Я' + 1: 0, # 'а' + 18: 1, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 2, # 'д' + 3: 0, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 0, # 'и' + 26: 1, # 'й' + 12: 2, # 'к' + 10: 2, # 'л' + 14: 2, # 'м' + 6: 2, # 'н' + 4: 0, # 'о' + 13: 1, # 'п' + 7: 2, # 'р' + 8: 2, # 'с' + 5: 1, # 'т' + 19: 1, # 'у' + 29: 1, # 'ф' + 25: 1, # 'х' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 1, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 55: { # 'Ж' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'А' + 32: 0, # 'Б' + 35: 1, # 'В' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 1, # 'Н' + 41: 1, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 1, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 1, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 2, # 'о' + 13: 1, # 'п' + 7: 1, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 1, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ъ' + 52: 1, # 'ь' + 42: 1, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 47: { # 'З' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'А' + 32: 1, # 'Б' + 35: 1, # 'В' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 2, # 'Н' + 41: 1, # 'О' + 30: 1, # 'П' + 39: 1, # 'Р' + 28: 1, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 0, # 'Ф' + 49: 1, # 'Х' + 53: 1, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 0, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 2, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 1, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 1, # 'о' + 13: 0, # 'п' + 7: 1, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 1, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 40: { # 'И' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 1, # 'А' + 32: 1, # 'Б' + 35: 1, # 'В' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 2, # 'Е' + 55: 1, # 'Ж' + 47: 2, # 'З' + 40: 1, # 'И' + 59: 1, # 'Й' + 33: 2, # 'К' + 46: 2, # 'Л' + 38: 2, # 'М' + 36: 2, # 'Н' + 41: 1, # 'О' + 30: 1, # 'П' + 39: 2, # 'Р' + 28: 2, # 'С' + 34: 2, # 'Т' + 51: 0, # 'У' + 48: 1, # 'Ф' + 49: 1, # 'Х' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 1, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 2, # 'Я' + 1: 1, # 'а' + 18: 1, # 'б' + 9: 3, # 'в' + 20: 2, # 'г' + 11: 1, # 'д' + 3: 1, # 'е' + 23: 0, # 'ж' + 15: 3, # 'з' + 2: 0, # 'и' + 26: 1, # 'й' + 12: 1, # 'к' + 10: 2, # 'л' + 14: 2, # 'м' + 6: 2, # 'н' + 4: 0, # 'о' + 13: 1, # 'п' + 7: 2, # 'р' + 8: 2, # 'с' + 5: 2, # 'т' + 19: 0, # 'у' + 29: 1, # 'ф' + 25: 1, # 'х' + 22: 1, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 59: { # 'Й' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Н' + 41: 1, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 1, # 'С' + 34: 1, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 1, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 1, # 'Я' + 1: 0, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 1, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 0, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 2, # 'о' + 13: 0, # 'п' + 7: 0, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 0, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 33: { # 'К' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 2, # 'А' + 32: 1, # 'Б' + 35: 1, # 'В' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 0, # 'М' + 36: 2, # 'Н' + 41: 2, # 'О' + 30: 2, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 1, # 'Х' + 53: 1, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 1, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 2, # 'е' + 23: 1, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 2, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 3, # 'р' + 8: 1, # 'с' + 5: 0, # 'т' + 19: 2, # 'у' + 29: 0, # 'ф' + 25: 1, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ъ' + 52: 1, # 'ь' + 42: 2, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 46: { # 'Л' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 2, # 'А' + 32: 1, # 'Б' + 35: 1, # 'В' + 43: 2, # 'Г' + 37: 1, # 'Д' + 44: 2, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 0, # 'М' + 36: 1, # 'Н' + 41: 2, # 'О' + 30: 1, # 'П' + 39: 0, # 'Р' + 28: 1, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 0, # 'Ф' + 49: 1, # 'Х' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 1, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 1, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 2, # 'о' + 13: 0, # 'п' + 7: 0, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 2, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ъ' + 52: 1, # 'ь' + 42: 2, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 38: { # 'М' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'А' + 32: 1, # 'Б' + 35: 2, # 'В' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Н' + 41: 2, # 'О' + 30: 1, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Х' + 53: 1, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 0, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 2, # 'л' + 14: 0, # 'м' + 6: 2, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 1, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 2, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ъ' + 52: 1, # 'ь' + 42: 2, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 36: { # 'Н' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'А' + 32: 2, # 'Б' + 35: 1, # 'В' + 43: 1, # 'Г' + 37: 2, # 'Д' + 44: 2, # 'Е' + 55: 1, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 1, # 'Й' + 33: 2, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Н' + 41: 2, # 'О' + 30: 1, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 2, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 1, # 'Х' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 1, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 0, # 'р' + 8: 0, # 'с' + 5: 1, # 'т' + 19: 1, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 2, # 'ю' + 16: 2, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 41: { # 'О' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'А' + 32: 1, # 'Б' + 35: 2, # 'В' + 43: 1, # 'Г' + 37: 2, # 'Д' + 44: 1, # 'Е' + 55: 1, # 'Ж' + 47: 1, # 'З' + 40: 1, # 'И' + 59: 1, # 'Й' + 33: 2, # 'К' + 46: 2, # 'Л' + 38: 2, # 'М' + 36: 2, # 'Н' + 41: 2, # 'О' + 30: 1, # 'П' + 39: 2, # 'Р' + 28: 2, # 'С' + 34: 2, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 1, # 'Х' + 53: 0, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 1, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 1, # 'Я' + 1: 1, # 'а' + 18: 2, # 'б' + 9: 2, # 'в' + 20: 2, # 'г' + 11: 1, # 'д' + 3: 1, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 0, # 'и' + 26: 1, # 'й' + 12: 2, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 0, # 'о' + 13: 2, # 'п' + 7: 2, # 'р' + 8: 2, # 'с' + 5: 3, # 'т' + 19: 1, # 'у' + 29: 1, # 'ф' + 25: 1, # 'х' + 22: 1, # 'ц' + 21: 2, # 'ч' + 27: 0, # 'ш' + 24: 2, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 30: { # 'П' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 2, # 'А' + 32: 1, # 'Б' + 35: 1, # 'В' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Н' + 41: 2, # 'О' + 30: 2, # 'П' + 39: 2, # 'Р' + 28: 2, # 'С' + 34: 1, # 'Т' + 51: 2, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Х' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 2, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 3, # 'л' + 14: 0, # 'м' + 6: 1, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 3, # 'р' + 8: 1, # 'с' + 5: 1, # 'т' + 19: 2, # 'у' + 29: 1, # 'ф' + 25: 1, # 'х' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ъ' + 52: 1, # 'ь' + 42: 1, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 39: { # 'Р' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 2, # 'А' + 32: 1, # 'Б' + 35: 1, # 'В' + 43: 2, # 'Г' + 37: 2, # 'Д' + 44: 2, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 0, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Н' + 41: 2, # 'О' + 30: 2, # 'П' + 39: 1, # 'Р' + 28: 1, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 1, # 'Х' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 1, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 0, # 'р' + 8: 1, # 'с' + 5: 0, # 'т' + 19: 3, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 28: { # 'С' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 3, # 'А' + 32: 2, # 'Б' + 35: 2, # 'В' + 43: 1, # 'Г' + 37: 2, # 'Д' + 44: 2, # 'Е' + 55: 1, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 2, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Н' + 41: 2, # 'О' + 30: 2, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 2, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 2, # 'к' + 10: 3, # 'л' + 14: 2, # 'м' + 6: 1, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 2, # 'р' + 8: 0, # 'с' + 5: 3, # 'т' + 19: 2, # 'у' + 29: 2, # 'ф' + 25: 1, # 'х' + 22: 1, # 'ц' + 21: 1, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 3, # 'ъ' + 52: 1, # 'ь' + 42: 1, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 34: { # 'Т' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'А' + 32: 2, # 'Б' + 35: 1, # 'В' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 2, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 2, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Н' + 41: 2, # 'О' + 30: 1, # 'П' + 39: 2, # 'Р' + 28: 2, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Х' + 53: 1, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 0, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 1, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 1, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 3, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 2, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 2, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 51: { # 'У' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 1, # 'А' + 32: 1, # 'Б' + 35: 1, # 'В' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 2, # 'Е' + 55: 1, # 'Ж' + 47: 1, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Н' + 41: 0, # 'О' + 30: 1, # 'П' + 39: 1, # 'Р' + 28: 1, # 'С' + 34: 2, # 'Т' + 51: 0, # 'У' + 48: 1, # 'Ф' + 49: 1, # 'Х' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 1, # 'а' + 18: 1, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 1, # 'д' + 3: 2, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 2, # 'и' + 26: 1, # 'й' + 12: 2, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 2, # 'н' + 4: 2, # 'о' + 13: 1, # 'п' + 7: 1, # 'р' + 8: 2, # 'с' + 5: 1, # 'т' + 19: 1, # 'у' + 29: 0, # 'ф' + 25: 1, # 'х' + 22: 0, # 'ц' + 21: 2, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 48: { # 'Ф' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'А' + 32: 1, # 'Б' + 35: 1, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 0, # 'М' + 36: 1, # 'Н' + 41: 1, # 'О' + 30: 2, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 2, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 2, # 'о' + 13: 0, # 'п' + 7: 2, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 1, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ъ' + 52: 1, # 'ь' + 42: 1, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 49: { # 'Х' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'А' + 32: 0, # 'Б' + 35: 1, # 'В' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Н' + 41: 1, # 'О' + 30: 1, # 'П' + 39: 1, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 1, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 1, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 0, # 'н' + 4: 2, # 'о' + 13: 0, # 'п' + 7: 2, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 2, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ъ' + 52: 1, # 'ь' + 42: 1, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 53: { # 'Ц' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'А' + 32: 0, # 'Б' + 35: 1, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 2, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 0, # 'Т' + 51: 1, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 2, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 1, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 1, # 'о' + 13: 0, # 'п' + 7: 1, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 1, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 50: { # 'Ч' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'А' + 32: 1, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 0, # 'М' + 36: 1, # 'Н' + 41: 1, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 1, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 1, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 2, # 'о' + 13: 0, # 'п' + 7: 1, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 2, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ъ' + 52: 1, # 'ь' + 42: 0, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 54: { # 'Ш' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 1, # 'Н' + 41: 1, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 1, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 2, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 2, # 'о' + 13: 1, # 'п' + 7: 1, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 2, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ъ' + 52: 1, # 'ь' + 42: 0, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 57: { # 'Щ' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 1, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 1, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 1, # 'о' + 13: 0, # 'п' + 7: 1, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 1, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 61: { # 'Ъ' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 1, # 'Б' + 35: 1, # 'В' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 0, # 'Е' + 55: 1, # 'Ж' + 47: 1, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 2, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Н' + 41: 0, # 'О' + 30: 1, # 'П' + 39: 2, # 'Р' + 28: 1, # 'С' + 34: 1, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 1, # 'Х' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 1, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 0, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 0, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 1, # 'л' + 14: 0, # 'м' + 6: 1, # 'н' + 4: 0, # 'о' + 13: 0, # 'п' + 7: 1, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 0, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 60: { # 'Ю' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'А' + 32: 1, # 'Б' + 35: 0, # 'В' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 0, # 'Е' + 55: 1, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 0, # 'М' + 36: 1, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 1, # 'Р' + 28: 1, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 1, # 'б' + 9: 1, # 'в' + 20: 2, # 'г' + 11: 1, # 'д' + 3: 0, # 'е' + 23: 2, # 'ж' + 15: 1, # 'з' + 2: 1, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 0, # 'о' + 13: 1, # 'п' + 7: 1, # 'р' + 8: 1, # 'с' + 5: 1, # 'т' + 19: 0, # 'у' + 29: 0, # 'ф' + 25: 1, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 56: { # 'Я' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 1, # 'Б' + 35: 1, # 'В' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 1, # 'С' + 34: 2, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 1, # 'б' + 9: 1, # 'в' + 20: 1, # 'г' + 11: 1, # 'д' + 3: 0, # 'е' + 23: 0, # 'ж' + 15: 1, # 'з' + 2: 1, # 'и' + 26: 1, # 'й' + 12: 1, # 'к' + 10: 1, # 'л' + 14: 2, # 'м' + 6: 2, # 'н' + 4: 0, # 'о' + 13: 2, # 'п' + 7: 1, # 'р' + 8: 1, # 'с' + 5: 1, # 'т' + 19: 0, # 'у' + 29: 0, # 'ф' + 25: 1, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 1: { # 'а' + 63: 1, # 'e' + 45: 1, # '\xad' + 31: 1, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 1, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 3, # 'ж' + 15: 3, # 'з' + 2: 3, # 'и' + 26: 3, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 2, # 'о' + 13: 3, # 'п' + 7: 3, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 3, # 'у' + 29: 3, # 'ф' + 25: 3, # 'х' + 22: 3, # 'ц' + 21: 3, # 'ч' + 27: 3, # 'ш' + 24: 3, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 3, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 18: { # 'б' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 3, # 'в' + 20: 1, # 'г' + 11: 2, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 3, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 1, # 'п' + 7: 3, # 'р' + 8: 3, # 'с' + 5: 0, # 'т' + 19: 3, # 'у' + 29: 0, # 'ф' + 25: 2, # 'х' + 22: 1, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 3, # 'щ' + 17: 3, # 'ъ' + 52: 1, # 'ь' + 42: 2, # 'ю' + 16: 3, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 9: { # 'в' + 63: 1, # 'e' + 45: 1, # '\xad' + 31: 0, # 'А' + 32: 1, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 0, # 'в' + 20: 2, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 3, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 2, # 'п' + 7: 3, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 2, # 'у' + 29: 0, # 'ф' + 25: 2, # 'х' + 22: 2, # 'ц' + 21: 3, # 'ч' + 27: 2, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ъ' + 52: 1, # 'ь' + 42: 2, # 'ю' + 16: 3, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 20: { # 'г' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 2, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 3, # 'л' + 14: 1, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 1, # 'п' + 7: 3, # 'р' + 8: 2, # 'с' + 5: 2, # 'т' + 19: 3, # 'у' + 29: 1, # 'ф' + 25: 1, # 'х' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 3, # 'ъ' + 52: 1, # 'ь' + 42: 1, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 11: { # 'д' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 2, # 'б' + 9: 3, # 'в' + 20: 2, # 'г' + 11: 2, # 'д' + 3: 3, # 'е' + 23: 3, # 'ж' + 15: 2, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 3, # 'р' + 8: 3, # 'с' + 5: 1, # 'т' + 19: 3, # 'у' + 29: 1, # 'ф' + 25: 2, # 'х' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ъ' + 52: 1, # 'ь' + 42: 1, # 'ю' + 16: 3, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 3: { # 'е' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 2, # 'е' + 23: 3, # 'ж' + 15: 3, # 'з' + 2: 2, # 'и' + 26: 3, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 3, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 2, # 'у' + 29: 3, # 'ф' + 25: 3, # 'х' + 22: 3, # 'ц' + 21: 3, # 'ч' + 27: 3, # 'ш' + 24: 3, # 'щ' + 17: 1, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 3, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 23: { # 'ж' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 2, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 3, # 'н' + 4: 2, # 'о' + 13: 1, # 'п' + 7: 1, # 'р' + 8: 1, # 'с' + 5: 1, # 'т' + 19: 2, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 1, # 'ц' + 21: 1, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 15: { # 'з' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 3, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 3, # 'у' + 29: 1, # 'ф' + 25: 2, # 'х' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 2, # 'ш' + 24: 1, # 'щ' + 17: 2, # 'ъ' + 52: 1, # 'ь' + 42: 1, # 'ю' + 16: 2, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 2: { # 'и' + 63: 1, # 'e' + 45: 1, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 1, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 1, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 3, # 'ж' + 15: 3, # 'з' + 2: 3, # 'и' + 26: 3, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 3, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 2, # 'у' + 29: 3, # 'ф' + 25: 3, # 'х' + 22: 3, # 'ц' + 21: 3, # 'ч' + 27: 3, # 'ш' + 24: 3, # 'щ' + 17: 2, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 3, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 26: { # 'й' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 1, # 'а' + 18: 2, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 2, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 2, # 'з' + 2: 1, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 2, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 2, # 'о' + 13: 1, # 'п' + 7: 2, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 1, # 'у' + 29: 2, # 'ф' + 25: 1, # 'х' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 1, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 12: { # 'к' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 1, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 3, # 'в' + 20: 2, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 2, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 3, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 1, # 'п' + 7: 3, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 3, # 'у' + 29: 1, # 'ф' + 25: 1, # 'х' + 22: 3, # 'ц' + 21: 2, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 3, # 'ъ' + 52: 1, # 'ь' + 42: 2, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 10: { # 'л' + 63: 1, # 'e' + 45: 1, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 1, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 2, # 'д' + 3: 3, # 'е' + 23: 3, # 'ж' + 15: 2, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 1, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 2, # 'п' + 7: 2, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 3, # 'у' + 29: 2, # 'ф' + 25: 2, # 'х' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 2, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ъ' + 52: 2, # 'ь' + 42: 3, # 'ю' + 16: 3, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 14: { # 'м' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 1, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 1, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 2, # 'к' + 10: 3, # 'л' + 14: 1, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 2, # 'р' + 8: 2, # 'с' + 5: 1, # 'т' + 19: 3, # 'у' + 29: 2, # 'ф' + 25: 1, # 'х' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 2, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ъ' + 52: 1, # 'ь' + 42: 2, # 'ю' + 16: 3, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 6: { # 'н' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 1, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 2, # 'б' + 9: 2, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 2, # 'ж' + 15: 2, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 1, # 'п' + 7: 2, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 3, # 'у' + 29: 3, # 'ф' + 25: 2, # 'х' + 22: 3, # 'ц' + 21: 3, # 'ч' + 27: 2, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ъ' + 52: 2, # 'ь' + 42: 2, # 'ю' + 16: 3, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 4: { # 'о' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 3, # 'ж' + 15: 3, # 'з' + 2: 3, # 'и' + 26: 3, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 2, # 'о' + 13: 3, # 'п' + 7: 3, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 2, # 'у' + 29: 3, # 'ф' + 25: 3, # 'х' + 22: 3, # 'ц' + 21: 3, # 'ч' + 27: 3, # 'ш' + 24: 3, # 'щ' + 17: 1, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 3, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 13: { # 'п' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 1, # 'й' + 12: 2, # 'к' + 10: 3, # 'л' + 14: 1, # 'м' + 6: 2, # 'н' + 4: 3, # 'о' + 13: 1, # 'п' + 7: 3, # 'р' + 8: 2, # 'с' + 5: 2, # 'т' + 19: 3, # 'у' + 29: 1, # 'ф' + 25: 1, # 'х' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ъ' + 52: 1, # 'ь' + 42: 2, # 'ю' + 16: 2, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 7: { # 'р' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 3, # 'ж' + 15: 2, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 2, # 'п' + 7: 1, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 3, # 'у' + 29: 2, # 'ф' + 25: 3, # 'х' + 22: 3, # 'ц' + 21: 2, # 'ч' + 27: 3, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ъ' + 52: 1, # 'ь' + 42: 2, # 'ю' + 16: 3, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 8: { # 'с' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 2, # 'б' + 9: 3, # 'в' + 20: 2, # 'г' + 11: 2, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 3, # 'р' + 8: 1, # 'с' + 5: 3, # 'т' + 19: 3, # 'у' + 29: 2, # 'ф' + 25: 2, # 'х' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 2, # 'ш' + 24: 0, # 'щ' + 17: 3, # 'ъ' + 52: 2, # 'ь' + 42: 2, # 'ю' + 16: 3, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 5: { # 'т' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 2, # 'г' + 11: 2, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 2, # 'п' + 7: 3, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 3, # 'у' + 29: 1, # 'ф' + 25: 2, # 'х' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ъ' + 52: 2, # 'ь' + 42: 2, # 'ю' + 16: 3, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 19: { # 'у' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 2, # 'е' + 23: 3, # 'ж' + 15: 3, # 'з' + 2: 2, # 'и' + 26: 2, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 2, # 'о' + 13: 3, # 'п' + 7: 3, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 1, # 'у' + 29: 2, # 'ф' + 25: 2, # 'х' + 22: 2, # 'ц' + 21: 3, # 'ч' + 27: 3, # 'ш' + 24: 2, # 'щ' + 17: 1, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 29: { # 'ф' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 1, # 'в' + 20: 1, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 2, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 2, # 'р' + 8: 2, # 'с' + 5: 2, # 'т' + 19: 2, # 'у' + 29: 0, # 'ф' + 25: 1, # 'х' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ъ' + 52: 2, # 'ь' + 42: 1, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 25: { # 'х' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 3, # 'в' + 20: 0, # 'г' + 11: 1, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 2, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 1, # 'п' + 7: 3, # 'р' + 8: 1, # 'с' + 5: 2, # 'т' + 19: 3, # 'у' + 29: 0, # 'ф' + 25: 1, # 'х' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 22: { # 'ц' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 2, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 2, # 'о' + 13: 1, # 'п' + 7: 1, # 'р' + 8: 1, # 'с' + 5: 1, # 'т' + 19: 2, # 'у' + 29: 1, # 'ф' + 25: 1, # 'х' + 22: 1, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 2, # 'ъ' + 52: 1, # 'ь' + 42: 0, # 'ю' + 16: 2, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 21: { # 'ч' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 3, # 'в' + 20: 1, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 2, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 2, # 'р' + 8: 0, # 'с' + 5: 2, # 'т' + 19: 3, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 27: { # 'ш' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 2, # 'в' + 20: 0, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 3, # 'н' + 4: 2, # 'о' + 13: 2, # 'п' + 7: 1, # 'р' + 8: 0, # 'с' + 5: 1, # 'т' + 19: 2, # 'у' + 29: 1, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ъ' + 52: 1, # 'ь' + 42: 1, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 24: { # 'щ' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 1, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 2, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 1, # 'р' + 8: 0, # 'с' + 5: 2, # 'т' + 19: 3, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 1, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 2, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 17: { # 'ъ' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 1, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 2, # 'е' + 23: 3, # 'ж' + 15: 3, # 'з' + 2: 1, # 'и' + 26: 2, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 3, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 1, # 'у' + 29: 1, # 'ф' + 25: 2, # 'х' + 22: 2, # 'ц' + 21: 3, # 'ч' + 27: 2, # 'ш' + 24: 3, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 2, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 52: { # 'ь' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 1, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 0, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 1, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 0, # 'р' + 8: 0, # 'с' + 5: 1, # 'т' + 19: 0, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 1, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 42: { # 'ю' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 1, # 'а' + 18: 2, # 'б' + 9: 1, # 'в' + 20: 2, # 'г' + 11: 2, # 'д' + 3: 1, # 'е' + 23: 2, # 'ж' + 15: 2, # 'з' + 2: 1, # 'и' + 26: 1, # 'й' + 12: 2, # 'к' + 10: 2, # 'л' + 14: 2, # 'м' + 6: 2, # 'н' + 4: 1, # 'о' + 13: 1, # 'п' + 7: 2, # 'р' + 8: 2, # 'с' + 5: 2, # 'т' + 19: 1, # 'у' + 29: 1, # 'ф' + 25: 1, # 'х' + 22: 2, # 'ц' + 21: 3, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 1, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 16: { # 'я' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 2, # 'г' + 11: 3, # 'д' + 3: 2, # 'е' + 23: 1, # 'ж' + 15: 2, # 'з' + 2: 1, # 'и' + 26: 2, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 1, # 'о' + 13: 2, # 'п' + 7: 2, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 1, # 'у' + 29: 1, # 'ф' + 25: 3, # 'х' + 22: 2, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 2, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 58: { # 'є' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 0, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 0, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 0, # 'о' + 13: 0, # 'п' + 7: 0, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 0, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 62: { # '№' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 0, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 0, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 0, # 'о' + 13: 0, # 'п' + 7: 0, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 0, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, +} + +# 255: Undefined characters that did not exist in training text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 +# 251: Control characters + +# Character Mapping Table(s): +ISO_8859_5_BULGARIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 77, # 'A' + 66: 90, # 'B' + 67: 99, # 'C' + 68: 100, # 'D' + 69: 72, # 'E' + 70: 109, # 'F' + 71: 107, # 'G' + 72: 101, # 'H' + 73: 79, # 'I' + 74: 185, # 'J' + 75: 81, # 'K' + 76: 102, # 'L' + 77: 76, # 'M' + 78: 94, # 'N' + 79: 82, # 'O' + 80: 110, # 'P' + 81: 186, # 'Q' + 82: 108, # 'R' + 83: 91, # 'S' + 84: 74, # 'T' + 85: 119, # 'U' + 86: 84, # 'V' + 87: 96, # 'W' + 88: 111, # 'X' + 89: 187, # 'Y' + 90: 115, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 65, # 'a' + 98: 69, # 'b' + 99: 70, # 'c' + 100: 66, # 'd' + 101: 63, # 'e' + 102: 68, # 'f' + 103: 112, # 'g' + 104: 103, # 'h' + 105: 92, # 'i' + 106: 194, # 'j' + 107: 104, # 'k' + 108: 95, # 'l' + 109: 86, # 'm' + 110: 87, # 'n' + 111: 71, # 'o' + 112: 116, # 'p' + 113: 195, # 'q' + 114: 85, # 'r' + 115: 93, # 's' + 116: 97, # 't' + 117: 113, # 'u' + 118: 196, # 'v' + 119: 197, # 'w' + 120: 198, # 'x' + 121: 199, # 'y' + 122: 200, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 194, # '\x80' + 129: 195, # '\x81' + 130: 196, # '\x82' + 131: 197, # '\x83' + 132: 198, # '\x84' + 133: 199, # '\x85' + 134: 200, # '\x86' + 135: 201, # '\x87' + 136: 202, # '\x88' + 137: 203, # '\x89' + 138: 204, # '\x8a' + 139: 205, # '\x8b' + 140: 206, # '\x8c' + 141: 207, # '\x8d' + 142: 208, # '\x8e' + 143: 209, # '\x8f' + 144: 210, # '\x90' + 145: 211, # '\x91' + 146: 212, # '\x92' + 147: 213, # '\x93' + 148: 214, # '\x94' + 149: 215, # '\x95' + 150: 216, # '\x96' + 151: 217, # '\x97' + 152: 218, # '\x98' + 153: 219, # '\x99' + 154: 220, # '\x9a' + 155: 221, # '\x9b' + 156: 222, # '\x9c' + 157: 223, # '\x9d' + 158: 224, # '\x9e' + 159: 225, # '\x9f' + 160: 81, # '\xa0' + 161: 226, # 'Ё' + 162: 227, # 'Ђ' + 163: 228, # 'Ѓ' + 164: 229, # 'Є' + 165: 230, # 'Ѕ' + 166: 105, # 'І' + 167: 231, # 'Ї' + 168: 232, # 'Ј' + 169: 233, # 'Љ' + 170: 234, # 'Њ' + 171: 235, # 'Ћ' + 172: 236, # 'Ќ' + 173: 45, # '\xad' + 174: 237, # 'Ў' + 175: 238, # 'Џ' + 176: 31, # 'А' + 177: 32, # 'Б' + 178: 35, # 'В' + 179: 43, # 'Г' + 180: 37, # 'Д' + 181: 44, # 'Е' + 182: 55, # 'Ж' + 183: 47, # 'З' + 184: 40, # 'И' + 185: 59, # 'Й' + 186: 33, # 'К' + 187: 46, # 'Л' + 188: 38, # 'М' + 189: 36, # 'Н' + 190: 41, # 'О' + 191: 30, # 'П' + 192: 39, # 'Р' + 193: 28, # 'С' + 194: 34, # 'Т' + 195: 51, # 'У' + 196: 48, # 'Ф' + 197: 49, # 'Х' + 198: 53, # 'Ц' + 199: 50, # 'Ч' + 200: 54, # 'Ш' + 201: 57, # 'Щ' + 202: 61, # 'Ъ' + 203: 239, # 'Ы' + 204: 67, # 'Ь' + 205: 240, # 'Э' + 206: 60, # 'Ю' + 207: 56, # 'Я' + 208: 1, # 'а' + 209: 18, # 'б' + 210: 9, # 'в' + 211: 20, # 'г' + 212: 11, # 'д' + 213: 3, # 'е' + 214: 23, # 'ж' + 215: 15, # 'з' + 216: 2, # 'и' + 217: 26, # 'й' + 218: 12, # 'к' + 219: 10, # 'л' + 220: 14, # 'м' + 221: 6, # 'н' + 222: 4, # 'о' + 223: 13, # 'п' + 224: 7, # 'р' + 225: 8, # 'с' + 226: 5, # 'т' + 227: 19, # 'у' + 228: 29, # 'ф' + 229: 25, # 'х' + 230: 22, # 'ц' + 231: 21, # 'ч' + 232: 27, # 'ш' + 233: 24, # 'щ' + 234: 17, # 'ъ' + 235: 75, # 'ы' + 236: 52, # 'ь' + 237: 241, # 'э' + 238: 42, # 'ю' + 239: 16, # 'я' + 240: 62, # '№' + 241: 242, # 'ё' + 242: 243, # 'ђ' + 243: 244, # 'ѓ' + 244: 58, # 'є' + 245: 245, # 'ѕ' + 246: 98, # 'і' + 247: 246, # 'ї' + 248: 247, # 'ј' + 249: 248, # 'љ' + 250: 249, # 'њ' + 251: 250, # 'ћ' + 252: 251, # 'ќ' + 253: 91, # '§' + 254: 252, # 'ў' + 255: 253, # 'џ' +} + +ISO_8859_5_BULGARIAN_MODEL = SingleByteCharSetModel(charset_name='ISO-8859-5', + language='Bulgarian', + char_to_order_map=ISO_8859_5_BULGARIAN_CHAR_TO_ORDER, + language_model=BULGARIAN_LANG_MODEL, + typical_positive_ratio=0.969392, + keep_ascii_letters=False, + alphabet='АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЬЮЯабвгдежзийклмнопрстуфхцчшщъьюя') + +WINDOWS_1251_BULGARIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 77, # 'A' + 66: 90, # 'B' + 67: 99, # 'C' + 68: 100, # 'D' + 69: 72, # 'E' + 70: 109, # 'F' + 71: 107, # 'G' + 72: 101, # 'H' + 73: 79, # 'I' + 74: 185, # 'J' + 75: 81, # 'K' + 76: 102, # 'L' + 77: 76, # 'M' + 78: 94, # 'N' + 79: 82, # 'O' + 80: 110, # 'P' + 81: 186, # 'Q' + 82: 108, # 'R' + 83: 91, # 'S' + 84: 74, # 'T' + 85: 119, # 'U' + 86: 84, # 'V' + 87: 96, # 'W' + 88: 111, # 'X' + 89: 187, # 'Y' + 90: 115, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 65, # 'a' + 98: 69, # 'b' + 99: 70, # 'c' + 100: 66, # 'd' + 101: 63, # 'e' + 102: 68, # 'f' + 103: 112, # 'g' + 104: 103, # 'h' + 105: 92, # 'i' + 106: 194, # 'j' + 107: 104, # 'k' + 108: 95, # 'l' + 109: 86, # 'm' + 110: 87, # 'n' + 111: 71, # 'o' + 112: 116, # 'p' + 113: 195, # 'q' + 114: 85, # 'r' + 115: 93, # 's' + 116: 97, # 't' + 117: 113, # 'u' + 118: 196, # 'v' + 119: 197, # 'w' + 120: 198, # 'x' + 121: 199, # 'y' + 122: 200, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 206, # 'Ђ' + 129: 207, # 'Ѓ' + 130: 208, # '‚' + 131: 209, # 'ѓ' + 132: 210, # '„' + 133: 211, # '…' + 134: 212, # '†' + 135: 213, # '‡' + 136: 120, # '€' + 137: 214, # '‰' + 138: 215, # 'Љ' + 139: 216, # '‹' + 140: 217, # 'Њ' + 141: 218, # 'Ќ' + 142: 219, # 'Ћ' + 143: 220, # 'Џ' + 144: 221, # 'ђ' + 145: 78, # '‘' + 146: 64, # '’' + 147: 83, # '“' + 148: 121, # '”' + 149: 98, # '•' + 150: 117, # '–' + 151: 105, # '—' + 152: 222, # None + 153: 223, # '™' + 154: 224, # 'љ' + 155: 225, # '›' + 156: 226, # 'њ' + 157: 227, # 'ќ' + 158: 228, # 'ћ' + 159: 229, # 'џ' + 160: 88, # '\xa0' + 161: 230, # 'Ў' + 162: 231, # 'ў' + 163: 232, # 'Ј' + 164: 233, # '¤' + 165: 122, # 'Ґ' + 166: 89, # '¦' + 167: 106, # '§' + 168: 234, # 'Ё' + 169: 235, # '©' + 170: 236, # 'Є' + 171: 237, # '«' + 172: 238, # '¬' + 173: 45, # '\xad' + 174: 239, # '®' + 175: 240, # 'Ї' + 176: 73, # '°' + 177: 80, # '±' + 178: 118, # 'І' + 179: 114, # 'і' + 180: 241, # 'ґ' + 181: 242, # 'µ' + 182: 243, # '¶' + 183: 244, # '·' + 184: 245, # 'ё' + 185: 62, # '№' + 186: 58, # 'є' + 187: 246, # '»' + 188: 247, # 'ј' + 189: 248, # 'Ѕ' + 190: 249, # 'ѕ' + 191: 250, # 'ї' + 192: 31, # 'А' + 193: 32, # 'Б' + 194: 35, # 'В' + 195: 43, # 'Г' + 196: 37, # 'Д' + 197: 44, # 'Е' + 198: 55, # 'Ж' + 199: 47, # 'З' + 200: 40, # 'И' + 201: 59, # 'Й' + 202: 33, # 'К' + 203: 46, # 'Л' + 204: 38, # 'М' + 205: 36, # 'Н' + 206: 41, # 'О' + 207: 30, # 'П' + 208: 39, # 'Р' + 209: 28, # 'С' + 210: 34, # 'Т' + 211: 51, # 'У' + 212: 48, # 'Ф' + 213: 49, # 'Х' + 214: 53, # 'Ц' + 215: 50, # 'Ч' + 216: 54, # 'Ш' + 217: 57, # 'Щ' + 218: 61, # 'Ъ' + 219: 251, # 'Ы' + 220: 67, # 'Ь' + 221: 252, # 'Э' + 222: 60, # 'Ю' + 223: 56, # 'Я' + 224: 1, # 'а' + 225: 18, # 'б' + 226: 9, # 'в' + 227: 20, # 'г' + 228: 11, # 'д' + 229: 3, # 'е' + 230: 23, # 'ж' + 231: 15, # 'з' + 232: 2, # 'и' + 233: 26, # 'й' + 234: 12, # 'к' + 235: 10, # 'л' + 236: 14, # 'м' + 237: 6, # 'н' + 238: 4, # 'о' + 239: 13, # 'п' + 240: 7, # 'р' + 241: 8, # 'с' + 242: 5, # 'т' + 243: 19, # 'у' + 244: 29, # 'ф' + 245: 25, # 'х' + 246: 22, # 'ц' + 247: 21, # 'ч' + 248: 27, # 'ш' + 249: 24, # 'щ' + 250: 17, # 'ъ' + 251: 75, # 'ы' + 252: 52, # 'ь' + 253: 253, # 'э' + 254: 42, # 'ю' + 255: 16, # 'я' +} + +WINDOWS_1251_BULGARIAN_MODEL = SingleByteCharSetModel(charset_name='windows-1251', + language='Bulgarian', + char_to_order_map=WINDOWS_1251_BULGARIAN_CHAR_TO_ORDER, + language_model=BULGARIAN_LANG_MODEL, + typical_positive_ratio=0.969392, + keep_ascii_letters=False, + alphabet='АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЬЮЯабвгдежзийклмнопрстуфхцчшщъьюя') + diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/langgreekmodel.py b/venv/Lib/site-packages/pip/_vendor/chardet/langgreekmodel.py new file mode 100644 index 00000000..d99528ed --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/chardet/langgreekmodel.py @@ -0,0 +1,4398 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel + + +# 3: Positive +# 2: Likely +# 1: Unlikely +# 0: Negative + +GREEK_LANG_MODEL = { + 60: { # 'e' + 60: 2, # 'e' + 55: 1, # 'o' + 58: 2, # 't' + 36: 1, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 1, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 55: { # 'o' + 60: 0, # 'e' + 55: 2, # 'o' + 58: 2, # 't' + 36: 1, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 1, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 1, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 58: { # 't' + 60: 2, # 'e' + 55: 1, # 'o' + 58: 1, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 1, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 36: { # '·' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 61: { # 'Ά' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 1, # 'γ' + 21: 2, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 1, # 'π' + 8: 2, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 46: { # 'Έ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 2, # 'β' + 20: 2, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 2, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 0, # 'ο' + 9: 2, # 'π' + 8: 2, # 'ρ' + 14: 0, # 'ς' + 7: 1, # 'σ' + 2: 2, # 'τ' + 12: 0, # 'υ' + 28: 2, # 'φ' + 23: 3, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 54: { # 'Ό' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 2, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 2, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 2, # 'σ' + 2: 3, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 2, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 31: { # 'Α' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 2, # 'Β' + 43: 2, # 'Γ' + 41: 1, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 2, # 'Θ' + 47: 2, # 'Ι' + 44: 2, # 'Κ' + 53: 2, # 'Λ' + 38: 2, # 'Μ' + 49: 2, # 'Ν' + 59: 1, # 'Ξ' + 39: 0, # 'Ο' + 35: 2, # 'Π' + 48: 2, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 2, # 'Υ' + 56: 2, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 2, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 1, # 'θ' + 5: 0, # 'ι' + 11: 2, # 'κ' + 16: 3, # 'λ' + 10: 2, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 0, # 'ο' + 9: 3, # 'π' + 8: 3, # 'ρ' + 14: 2, # 'ς' + 7: 2, # 'σ' + 2: 0, # 'τ' + 12: 3, # 'υ' + 28: 2, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 2, # 'ύ' + 27: 0, # 'ώ' + }, + 51: { # 'Β' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 2, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 1, # 'Ε' + 40: 1, # 'Η' + 52: 0, # 'Θ' + 47: 1, # 'Ι' + 44: 0, # 'Κ' + 53: 1, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 2, # 'ή' + 15: 0, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'π' + 8: 2, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 43: { # 'Γ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 1, # 'Α' + 51: 0, # 'Β' + 43: 2, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 1, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 1, # 'Κ' + 53: 1, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 1, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 2, # 'Υ' + 56: 0, # 'Φ' + 50: 1, # 'Χ' + 57: 2, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 2, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'π' + 8: 2, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 41: { # 'Δ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 2, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 2, # 'ή' + 15: 2, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'π' + 8: 2, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 2, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 2, # 'ω' + 19: 1, # 'ό' + 26: 2, # 'ύ' + 27: 2, # 'ώ' + }, + 34: { # 'Ε' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 2, # 'Α' + 51: 0, # 'Β' + 43: 2, # 'Γ' + 41: 2, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 2, # 'Κ' + 53: 2, # 'Λ' + 38: 2, # 'Μ' + 49: 2, # 'Ν' + 59: 1, # 'Ξ' + 39: 0, # 'Ο' + 35: 2, # 'Π' + 48: 2, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 2, # 'Υ' + 56: 0, # 'Φ' + 50: 2, # 'Χ' + 57: 2, # 'Ω' + 17: 3, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 3, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 3, # 'γ' + 21: 2, # 'δ' + 3: 1, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 1, # 'θ' + 5: 2, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 2, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 0, # 'ο' + 9: 3, # 'π' + 8: 2, # 'ρ' + 14: 0, # 'ς' + 7: 2, # 'σ' + 2: 2, # 'τ' + 12: 2, # 'υ' + 28: 2, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 1, # 'ύ' + 27: 0, # 'ώ' + }, + 40: { # 'Η' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 1, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 2, # 'Θ' + 47: 0, # 'Ι' + 44: 2, # 'Κ' + 53: 0, # 'Λ' + 38: 2, # 'Μ' + 49: 2, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 2, # 'Π' + 48: 2, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 1, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 1, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 1, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 52: { # 'Θ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 2, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 1, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 1, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 2, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 2, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 2, # 'ύ' + 27: 0, # 'ώ' + }, + 47: { # 'Ι' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 2, # 'Α' + 51: 1, # 'Β' + 43: 1, # 'Γ' + 41: 2, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 2, # 'Κ' + 53: 2, # 'Λ' + 38: 2, # 'Μ' + 49: 2, # 'Ν' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 0, # 'Υ' + 56: 2, # 'Φ' + 50: 0, # 'Χ' + 57: 2, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 2, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 1, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 2, # 'σ' + 2: 1, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 1, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 44: { # 'Κ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 2, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 1, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 1, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 0, # 'Σ' + 33: 1, # 'Τ' + 45: 2, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 1, # 'Ω' + 17: 3, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'π' + 8: 2, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 2, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 2, # 'ό' + 26: 2, # 'ύ' + 27: 2, # 'ώ' + }, + 53: { # 'Λ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 2, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 2, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 2, # 'Σ' + 33: 0, # 'Τ' + 45: 2, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 2, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 0, # 'ή' + 15: 2, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 1, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 2, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 2, # 'ό' + 26: 2, # 'ύ' + 27: 0, # 'ώ' + }, + 38: { # 'Μ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 2, # 'Α' + 51: 2, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 2, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 2, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 2, # 'ή' + 15: 2, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 3, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 2, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 2, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 49: { # 'Ν' + 60: 2, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 2, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 2, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 2, # 'Ω' + 17: 0, # 'ά' + 18: 2, # 'έ' + 22: 0, # 'ή' + 15: 2, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 1, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 1, # 'ω' + 19: 2, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 59: { # 'Ξ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 1, # 'Ε' + 40: 1, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 1, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 2, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 39: { # 'Ο' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 1, # 'Β' + 43: 2, # 'Γ' + 41: 2, # 'Δ' + 34: 2, # 'Ε' + 40: 1, # 'Η' + 52: 2, # 'Θ' + 47: 2, # 'Ι' + 44: 2, # 'Κ' + 53: 2, # 'Λ' + 38: 2, # 'Μ' + 49: 2, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 2, # 'Π' + 48: 2, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 2, # 'Υ' + 56: 2, # 'Φ' + 50: 2, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 2, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 2, # 'κ' + 16: 2, # 'λ' + 10: 2, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 2, # 'π' + 8: 2, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 2, # 'τ' + 12: 2, # 'υ' + 28: 1, # 'φ' + 23: 1, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 2, # 'ύ' + 27: 0, # 'ώ' + }, + 35: { # 'Π' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 2, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 2, # 'Λ' + 38: 1, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 0, # 'Σ' + 33: 1, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 1, # 'Χ' + 57: 2, # 'Ω' + 17: 2, # 'ά' + 18: 1, # 'έ' + 22: 1, # 'ή' + 15: 2, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'π' + 8: 3, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 2, # 'υ' + 28: 0, # 'φ' + 23: 2, # 'χ' + 42: 0, # 'ψ' + 24: 2, # 'ω' + 19: 2, # 'ό' + 26: 0, # 'ύ' + 27: 3, # 'ώ' + }, + 48: { # 'Ρ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 2, # 'Α' + 51: 0, # 'Β' + 43: 1, # 'Γ' + 41: 1, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 2, # 'Ν' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 0, # 'Σ' + 33: 1, # 'Τ' + 45: 1, # 'Υ' + 56: 0, # 'Φ' + 50: 1, # 'Χ' + 57: 1, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 2, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 1, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 3, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 2, # 'ω' + 19: 0, # 'ό' + 26: 2, # 'ύ' + 27: 0, # 'ώ' + }, + 37: { # 'Σ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 2, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 1, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 2, # 'Κ' + 53: 0, # 'Λ' + 38: 2, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 2, # 'Υ' + 56: 0, # 'Φ' + 50: 2, # 'Χ' + 57: 2, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 2, # 'ή' + 15: 2, # 'ί' + 1: 2, # 'α' + 29: 2, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 2, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 2, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 3, # 'τ' + 12: 3, # 'υ' + 28: 0, # 'φ' + 23: 2, # 'χ' + 42: 0, # 'ψ' + 24: 2, # 'ω' + 19: 0, # 'ό' + 26: 2, # 'ύ' + 27: 2, # 'ώ' + }, + 33: { # 'Τ' + 60: 0, # 'e' + 55: 1, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 2, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 2, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 0, # 'Σ' + 33: 1, # 'Τ' + 45: 1, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 2, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 0, # 'ή' + 15: 2, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 2, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'π' + 8: 2, # 'ρ' + 14: 0, # 'ς' + 7: 2, # 'σ' + 2: 0, # 'τ' + 12: 2, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 2, # 'ό' + 26: 2, # 'ύ' + 27: 3, # 'ώ' + }, + 45: { # 'Υ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 2, # 'Γ' + 41: 0, # 'Δ' + 34: 1, # 'Ε' + 40: 2, # 'Η' + 52: 2, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 1, # 'Λ' + 38: 2, # 'Μ' + 49: 2, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 2, # 'Π' + 48: 1, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 1, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 3, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 56: { # 'Φ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 1, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 1, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 2, # 'τ' + 12: 2, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 1, # 'ύ' + 27: 1, # 'ώ' + }, + 50: { # 'Χ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 1, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 1, # 'Ν' + 59: 0, # 'Ξ' + 39: 1, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 1, # 'Χ' + 57: 1, # 'Ω' + 17: 2, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'π' + 8: 3, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 2, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 2, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 57: { # 'Ω' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 1, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 1, # 'Λ' + 38: 0, # 'Μ' + 49: 2, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'π' + 8: 2, # 'ρ' + 14: 2, # 'ς' + 7: 2, # 'σ' + 2: 0, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 1, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 17: { # 'ά' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 3, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 3, # 'ε' + 32: 3, # 'ζ' + 13: 0, # 'η' + 25: 3, # 'θ' + 5: 2, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 0, # 'ο' + 9: 3, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 0, # 'υ' + 28: 3, # 'φ' + 23: 3, # 'χ' + 42: 3, # 'ψ' + 24: 2, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 18: { # 'έ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 3, # 'α' + 29: 2, # 'β' + 20: 3, # 'γ' + 21: 2, # 'δ' + 3: 3, # 'ε' + 32: 2, # 'ζ' + 13: 0, # 'η' + 25: 3, # 'θ' + 5: 0, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 0, # 'υ' + 28: 3, # 'φ' + 23: 3, # 'χ' + 42: 3, # 'ψ' + 24: 2, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 22: { # 'ή' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 1, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 3, # 'θ' + 5: 0, # 'ι' + 11: 3, # 'κ' + 16: 2, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 0, # 'ο' + 9: 3, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 0, # 'υ' + 28: 2, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 15: { # 'ί' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 3, # 'α' + 29: 2, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 3, # 'ε' + 32: 3, # 'ζ' + 13: 3, # 'η' + 25: 3, # 'θ' + 5: 0, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 0, # 'υ' + 28: 1, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 3, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 1: { # 'α' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 2, # 'έ' + 22: 0, # 'ή' + 15: 3, # 'ί' + 1: 0, # 'α' + 29: 3, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 2, # 'ε' + 32: 3, # 'ζ' + 13: 1, # 'η' + 25: 3, # 'θ' + 5: 3, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 2, # 'ο' + 9: 3, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 3, # 'υ' + 28: 3, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 0, # 'ω' + 19: 2, # 'ό' + 26: 2, # 'ύ' + 27: 0, # 'ώ' + }, + 29: { # 'β' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 2, # 'έ' + 22: 3, # 'ή' + 15: 2, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 2, # 'γ' + 21: 2, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 3, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'π' + 8: 3, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 2, # 'ω' + 19: 2, # 'ό' + 26: 2, # 'ύ' + 27: 2, # 'ώ' + }, + 20: { # 'γ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 3, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'π' + 8: 3, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 2, # 'υ' + 28: 0, # 'φ' + 23: 3, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 2, # 'ύ' + 27: 3, # 'ώ' + }, + 21: { # 'δ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'π' + 8: 3, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 3, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 3, # 'ύ' + 27: 3, # 'ώ' + }, + 3: { # 'ε' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 3, # 'ί' + 1: 2, # 'α' + 29: 3, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 2, # 'ε' + 32: 2, # 'ζ' + 13: 0, # 'η' + 25: 3, # 'θ' + 5: 3, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 2, # 'ο' + 9: 3, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 3, # 'υ' + 28: 3, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 3, # 'ω' + 19: 2, # 'ό' + 26: 3, # 'ύ' + 27: 2, # 'ώ' + }, + 32: { # 'ζ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 2, # 'ή' + 15: 2, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 1, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 2, # 'ό' + 26: 0, # 'ύ' + 27: 2, # 'ώ' + }, + 13: { # 'η' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 3, # 'γ' + 21: 2, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 3, # 'θ' + 5: 0, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 0, # 'ο' + 9: 2, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 0, # 'υ' + 28: 2, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 25: { # 'θ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 2, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 1, # 'λ' + 10: 3, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'π' + 8: 3, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 3, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 3, # 'ύ' + 27: 3, # 'ώ' + }, + 5: { # 'ι' + 60: 0, # 'e' + 55: 1, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 1, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 0, # 'ί' + 1: 3, # 'α' + 29: 3, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 3, # 'ε' + 32: 2, # 'ζ' + 13: 3, # 'η' + 25: 3, # 'θ' + 5: 0, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 0, # 'υ' + 28: 2, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 0, # 'ύ' + 27: 3, # 'ώ' + }, + 11: { # 'κ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 3, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 2, # 'θ' + 5: 3, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 2, # 'π' + 8: 3, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 3, # 'τ' + 12: 3, # 'υ' + 28: 2, # 'φ' + 23: 2, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 3, # 'ύ' + 27: 3, # 'ώ' + }, + 16: { # 'λ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 1, # 'β' + 20: 2, # 'γ' + 21: 1, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 2, # 'θ' + 5: 3, # 'ι' + 11: 2, # 'κ' + 16: 3, # 'λ' + 10: 2, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 3, # 'τ' + 12: 3, # 'υ' + 28: 2, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 3, # 'ύ' + 27: 3, # 'ώ' + }, + 10: { # 'μ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 1, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 3, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 2, # 'υ' + 28: 3, # 'φ' + 23: 0, # 'χ' + 42: 2, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 2, # 'ύ' + 27: 2, # 'ώ' + }, + 6: { # 'ν' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 3, # 'δ' + 3: 3, # 'ε' + 32: 2, # 'ζ' + 13: 3, # 'η' + 25: 3, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 1, # 'λ' + 10: 0, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 3, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 3, # 'ύ' + 27: 3, # 'ώ' + }, + 30: { # 'ξ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 2, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 3, # 'τ' + 12: 2, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 2, # 'ό' + 26: 3, # 'ύ' + 27: 1, # 'ώ' + }, + 4: { # 'ο' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 2, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 2, # 'α' + 29: 3, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 3, # 'θ' + 5: 3, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 2, # 'ο' + 9: 3, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 3, # 'υ' + 28: 3, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 2, # 'ω' + 19: 1, # 'ό' + 26: 3, # 'ύ' + 27: 2, # 'ώ' + }, + 9: { # 'π' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 3, # 'λ' + 10: 0, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'π' + 8: 3, # 'ρ' + 14: 2, # 'ς' + 7: 0, # 'σ' + 2: 3, # 'τ' + 12: 3, # 'υ' + 28: 0, # 'φ' + 23: 2, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 2, # 'ύ' + 27: 3, # 'ώ' + }, + 8: { # 'ρ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 2, # 'β' + 20: 3, # 'γ' + 21: 2, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 3, # 'θ' + 5: 3, # 'ι' + 11: 3, # 'κ' + 16: 1, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 3, # 'ο' + 9: 2, # 'π' + 8: 2, # 'ρ' + 14: 0, # 'ς' + 7: 2, # 'σ' + 2: 3, # 'τ' + 12: 3, # 'υ' + 28: 3, # 'φ' + 23: 3, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 3, # 'ύ' + 27: 3, # 'ώ' + }, + 14: { # 'ς' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 7: { # 'σ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 3, # 'β' + 20: 0, # 'γ' + 21: 2, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 3, # 'θ' + 5: 3, # 'ι' + 11: 3, # 'κ' + 16: 2, # 'λ' + 10: 3, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 3, # 'υ' + 28: 3, # 'φ' + 23: 3, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 3, # 'ύ' + 27: 2, # 'ώ' + }, + 2: { # 'τ' + 60: 0, # 'e' + 55: 2, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 2, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 2, # 'κ' + 16: 2, # 'λ' + 10: 3, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'π' + 8: 3, # 'ρ' + 14: 0, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 3, # 'υ' + 28: 2, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 3, # 'ύ' + 27: 3, # 'ώ' + }, + 12: { # 'υ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 3, # 'ή' + 15: 2, # 'ί' + 1: 3, # 'α' + 29: 2, # 'β' + 20: 3, # 'γ' + 21: 2, # 'δ' + 3: 2, # 'ε' + 32: 2, # 'ζ' + 13: 2, # 'η' + 25: 3, # 'θ' + 5: 2, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 0, # 'υ' + 28: 2, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 2, # 'ω' + 19: 2, # 'ό' + 26: 0, # 'ύ' + 27: 2, # 'ώ' + }, + 28: { # 'φ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 2, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 1, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'π' + 8: 3, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 3, # 'τ' + 12: 3, # 'υ' + 28: 1, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 2, # 'ύ' + 27: 2, # 'ώ' + }, + 23: { # 'χ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 2, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 2, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 2, # 'μ' + 6: 3, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'π' + 8: 3, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 3, # 'τ' + 12: 3, # 'υ' + 28: 0, # 'φ' + 23: 2, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 3, # 'ύ' + 27: 3, # 'ώ' + }, + 42: { # 'ψ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 1, # 'ή' + 15: 2, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 2, # 'τ' + 12: 1, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 2, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 24: { # 'ω' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 1, # 'ά' + 18: 0, # 'έ' + 22: 2, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 2, # 'β' + 20: 3, # 'γ' + 21: 2, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 3, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 3, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 0, # 'υ' + 28: 2, # 'φ' + 23: 2, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 19: { # 'ό' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 3, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 1, # 'ε' + 32: 2, # 'ζ' + 13: 2, # 'η' + 25: 2, # 'θ' + 5: 2, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 1, # 'ξ' + 4: 2, # 'ο' + 9: 3, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 0, # 'υ' + 28: 2, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 26: { # 'ύ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 2, # 'α' + 29: 2, # 'β' + 20: 2, # 'γ' + 21: 1, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 3, # 'θ' + 5: 0, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 0, # 'υ' + 28: 2, # 'φ' + 23: 2, # 'χ' + 42: 2, # 'ψ' + 24: 2, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 27: { # 'ώ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 1, # 'β' + 20: 0, # 'γ' + 21: 3, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 1, # 'η' + 25: 2, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 1, # 'ξ' + 4: 0, # 'ο' + 9: 2, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 0, # 'υ' + 28: 1, # 'φ' + 23: 1, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, +} + +# 255: Undefined characters that did not exist in training text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 +# 251: Control characters + +# Character Mapping Table(s): +WINDOWS_1253_GREEK_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 82, # 'A' + 66: 100, # 'B' + 67: 104, # 'C' + 68: 94, # 'D' + 69: 98, # 'E' + 70: 101, # 'F' + 71: 116, # 'G' + 72: 102, # 'H' + 73: 111, # 'I' + 74: 187, # 'J' + 75: 117, # 'K' + 76: 92, # 'L' + 77: 88, # 'M' + 78: 113, # 'N' + 79: 85, # 'O' + 80: 79, # 'P' + 81: 118, # 'Q' + 82: 105, # 'R' + 83: 83, # 'S' + 84: 67, # 'T' + 85: 114, # 'U' + 86: 119, # 'V' + 87: 95, # 'W' + 88: 99, # 'X' + 89: 109, # 'Y' + 90: 188, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 72, # 'a' + 98: 70, # 'b' + 99: 80, # 'c' + 100: 81, # 'd' + 101: 60, # 'e' + 102: 96, # 'f' + 103: 93, # 'g' + 104: 89, # 'h' + 105: 68, # 'i' + 106: 120, # 'j' + 107: 97, # 'k' + 108: 77, # 'l' + 109: 86, # 'm' + 110: 69, # 'n' + 111: 55, # 'o' + 112: 78, # 'p' + 113: 115, # 'q' + 114: 65, # 'r' + 115: 66, # 's' + 116: 58, # 't' + 117: 76, # 'u' + 118: 106, # 'v' + 119: 103, # 'w' + 120: 87, # 'x' + 121: 107, # 'y' + 122: 112, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 255, # '€' + 129: 255, # None + 130: 255, # '‚' + 131: 255, # 'ƒ' + 132: 255, # '„' + 133: 255, # '…' + 134: 255, # '†' + 135: 255, # '‡' + 136: 255, # None + 137: 255, # '‰' + 138: 255, # None + 139: 255, # '‹' + 140: 255, # None + 141: 255, # None + 142: 255, # None + 143: 255, # None + 144: 255, # None + 145: 255, # '‘' + 146: 255, # '’' + 147: 255, # '“' + 148: 255, # '”' + 149: 255, # '•' + 150: 255, # '–' + 151: 255, # '—' + 152: 255, # None + 153: 255, # '™' + 154: 255, # None + 155: 255, # '›' + 156: 255, # None + 157: 255, # None + 158: 255, # None + 159: 255, # None + 160: 253, # '\xa0' + 161: 233, # '΅' + 162: 61, # 'Ά' + 163: 253, # '£' + 164: 253, # '¤' + 165: 253, # '¥' + 166: 253, # '¦' + 167: 253, # '§' + 168: 253, # '¨' + 169: 253, # '©' + 170: 253, # None + 171: 253, # '«' + 172: 253, # '¬' + 173: 74, # '\xad' + 174: 253, # '®' + 175: 253, # '―' + 176: 253, # '°' + 177: 253, # '±' + 178: 253, # '²' + 179: 253, # '³' + 180: 247, # '΄' + 181: 253, # 'µ' + 182: 253, # '¶' + 183: 36, # '·' + 184: 46, # 'Έ' + 185: 71, # 'Ή' + 186: 73, # 'Ί' + 187: 253, # '»' + 188: 54, # 'Ό' + 189: 253, # '½' + 190: 108, # 'Ύ' + 191: 123, # 'Ώ' + 192: 110, # 'ΐ' + 193: 31, # 'Α' + 194: 51, # 'Β' + 195: 43, # 'Γ' + 196: 41, # 'Δ' + 197: 34, # 'Ε' + 198: 91, # 'Ζ' + 199: 40, # 'Η' + 200: 52, # 'Θ' + 201: 47, # 'Ι' + 202: 44, # 'Κ' + 203: 53, # 'Λ' + 204: 38, # 'Μ' + 205: 49, # 'Ν' + 206: 59, # 'Ξ' + 207: 39, # 'Ο' + 208: 35, # 'Π' + 209: 48, # 'Ρ' + 210: 250, # None + 211: 37, # 'Σ' + 212: 33, # 'Τ' + 213: 45, # 'Υ' + 214: 56, # 'Φ' + 215: 50, # 'Χ' + 216: 84, # 'Ψ' + 217: 57, # 'Ω' + 218: 120, # 'Ϊ' + 219: 121, # 'Ϋ' + 220: 17, # 'ά' + 221: 18, # 'έ' + 222: 22, # 'ή' + 223: 15, # 'ί' + 224: 124, # 'ΰ' + 225: 1, # 'α' + 226: 29, # 'β' + 227: 20, # 'γ' + 228: 21, # 'δ' + 229: 3, # 'ε' + 230: 32, # 'ζ' + 231: 13, # 'η' + 232: 25, # 'θ' + 233: 5, # 'ι' + 234: 11, # 'κ' + 235: 16, # 'λ' + 236: 10, # 'μ' + 237: 6, # 'ν' + 238: 30, # 'ξ' + 239: 4, # 'ο' + 240: 9, # 'π' + 241: 8, # 'ρ' + 242: 14, # 'ς' + 243: 7, # 'σ' + 244: 2, # 'τ' + 245: 12, # 'υ' + 246: 28, # 'φ' + 247: 23, # 'χ' + 248: 42, # 'ψ' + 249: 24, # 'ω' + 250: 64, # 'ϊ' + 251: 75, # 'ϋ' + 252: 19, # 'ό' + 253: 26, # 'ύ' + 254: 27, # 'ώ' + 255: 253, # None +} + +WINDOWS_1253_GREEK_MODEL = SingleByteCharSetModel(charset_name='windows-1253', + language='Greek', + char_to_order_map=WINDOWS_1253_GREEK_CHAR_TO_ORDER, + language_model=GREEK_LANG_MODEL, + typical_positive_ratio=0.982851, + keep_ascii_letters=False, + alphabet='ΆΈΉΊΌΎΏΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩάέήίαβγδεζηθικλμνξοπρςστυφχψωόύώ') + +ISO_8859_7_GREEK_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 82, # 'A' + 66: 100, # 'B' + 67: 104, # 'C' + 68: 94, # 'D' + 69: 98, # 'E' + 70: 101, # 'F' + 71: 116, # 'G' + 72: 102, # 'H' + 73: 111, # 'I' + 74: 187, # 'J' + 75: 117, # 'K' + 76: 92, # 'L' + 77: 88, # 'M' + 78: 113, # 'N' + 79: 85, # 'O' + 80: 79, # 'P' + 81: 118, # 'Q' + 82: 105, # 'R' + 83: 83, # 'S' + 84: 67, # 'T' + 85: 114, # 'U' + 86: 119, # 'V' + 87: 95, # 'W' + 88: 99, # 'X' + 89: 109, # 'Y' + 90: 188, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 72, # 'a' + 98: 70, # 'b' + 99: 80, # 'c' + 100: 81, # 'd' + 101: 60, # 'e' + 102: 96, # 'f' + 103: 93, # 'g' + 104: 89, # 'h' + 105: 68, # 'i' + 106: 120, # 'j' + 107: 97, # 'k' + 108: 77, # 'l' + 109: 86, # 'm' + 110: 69, # 'n' + 111: 55, # 'o' + 112: 78, # 'p' + 113: 115, # 'q' + 114: 65, # 'r' + 115: 66, # 's' + 116: 58, # 't' + 117: 76, # 'u' + 118: 106, # 'v' + 119: 103, # 'w' + 120: 87, # 'x' + 121: 107, # 'y' + 122: 112, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 255, # '\x80' + 129: 255, # '\x81' + 130: 255, # '\x82' + 131: 255, # '\x83' + 132: 255, # '\x84' + 133: 255, # '\x85' + 134: 255, # '\x86' + 135: 255, # '\x87' + 136: 255, # '\x88' + 137: 255, # '\x89' + 138: 255, # '\x8a' + 139: 255, # '\x8b' + 140: 255, # '\x8c' + 141: 255, # '\x8d' + 142: 255, # '\x8e' + 143: 255, # '\x8f' + 144: 255, # '\x90' + 145: 255, # '\x91' + 146: 255, # '\x92' + 147: 255, # '\x93' + 148: 255, # '\x94' + 149: 255, # '\x95' + 150: 255, # '\x96' + 151: 255, # '\x97' + 152: 255, # '\x98' + 153: 255, # '\x99' + 154: 255, # '\x9a' + 155: 255, # '\x9b' + 156: 255, # '\x9c' + 157: 255, # '\x9d' + 158: 255, # '\x9e' + 159: 255, # '\x9f' + 160: 253, # '\xa0' + 161: 233, # '‘' + 162: 90, # '’' + 163: 253, # '£' + 164: 253, # '€' + 165: 253, # '₯' + 166: 253, # '¦' + 167: 253, # '§' + 168: 253, # '¨' + 169: 253, # '©' + 170: 253, # 'ͺ' + 171: 253, # '«' + 172: 253, # '¬' + 173: 74, # '\xad' + 174: 253, # None + 175: 253, # '―' + 176: 253, # '°' + 177: 253, # '±' + 178: 253, # '²' + 179: 253, # '³' + 180: 247, # '΄' + 181: 248, # '΅' + 182: 61, # 'Ά' + 183: 36, # '·' + 184: 46, # 'Έ' + 185: 71, # 'Ή' + 186: 73, # 'Ί' + 187: 253, # '»' + 188: 54, # 'Ό' + 189: 253, # '½' + 190: 108, # 'Ύ' + 191: 123, # 'Ώ' + 192: 110, # 'ΐ' + 193: 31, # 'Α' + 194: 51, # 'Β' + 195: 43, # 'Γ' + 196: 41, # 'Δ' + 197: 34, # 'Ε' + 198: 91, # 'Ζ' + 199: 40, # 'Η' + 200: 52, # 'Θ' + 201: 47, # 'Ι' + 202: 44, # 'Κ' + 203: 53, # 'Λ' + 204: 38, # 'Μ' + 205: 49, # 'Ν' + 206: 59, # 'Ξ' + 207: 39, # 'Ο' + 208: 35, # 'Π' + 209: 48, # 'Ρ' + 210: 250, # None + 211: 37, # 'Σ' + 212: 33, # 'Τ' + 213: 45, # 'Υ' + 214: 56, # 'Φ' + 215: 50, # 'Χ' + 216: 84, # 'Ψ' + 217: 57, # 'Ω' + 218: 120, # 'Ϊ' + 219: 121, # 'Ϋ' + 220: 17, # 'ά' + 221: 18, # 'έ' + 222: 22, # 'ή' + 223: 15, # 'ί' + 224: 124, # 'ΰ' + 225: 1, # 'α' + 226: 29, # 'β' + 227: 20, # 'γ' + 228: 21, # 'δ' + 229: 3, # 'ε' + 230: 32, # 'ζ' + 231: 13, # 'η' + 232: 25, # 'θ' + 233: 5, # 'ι' + 234: 11, # 'κ' + 235: 16, # 'λ' + 236: 10, # 'μ' + 237: 6, # 'ν' + 238: 30, # 'ξ' + 239: 4, # 'ο' + 240: 9, # 'π' + 241: 8, # 'ρ' + 242: 14, # 'ς' + 243: 7, # 'σ' + 244: 2, # 'τ' + 245: 12, # 'υ' + 246: 28, # 'φ' + 247: 23, # 'χ' + 248: 42, # 'ψ' + 249: 24, # 'ω' + 250: 64, # 'ϊ' + 251: 75, # 'ϋ' + 252: 19, # 'ό' + 253: 26, # 'ύ' + 254: 27, # 'ώ' + 255: 253, # None +} + +ISO_8859_7_GREEK_MODEL = SingleByteCharSetModel(charset_name='ISO-8859-7', + language='Greek', + char_to_order_map=ISO_8859_7_GREEK_CHAR_TO_ORDER, + language_model=GREEK_LANG_MODEL, + typical_positive_ratio=0.982851, + keep_ascii_letters=False, + alphabet='ΆΈΉΊΌΎΏΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩάέήίαβγδεζηθικλμνξοπρςστυφχψωόύώ') + diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/langhebrewmodel.py b/venv/Lib/site-packages/pip/_vendor/chardet/langhebrewmodel.py new file mode 100644 index 00000000..484c652a --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/chardet/langhebrewmodel.py @@ -0,0 +1,4383 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel + + +# 3: Positive +# 2: Likely +# 1: Unlikely +# 0: Negative + +HEBREW_LANG_MODEL = { + 50: { # 'a' + 50: 0, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 2, # 'l' + 54: 2, # 'n' + 49: 0, # 'o' + 51: 2, # 'r' + 43: 1, # 's' + 44: 2, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 1, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 1, # 'ק' + 7: 0, # 'ר' + 10: 1, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 60: { # 'c' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 0, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 0, # 'n' + 49: 1, # 'o' + 51: 1, # 'r' + 43: 1, # 's' + 44: 2, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 1, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 1, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 1, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 61: { # 'd' + 50: 1, # 'a' + 60: 0, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 1, # 'n' + 49: 2, # 'o' + 51: 1, # 'r' + 43: 1, # 's' + 44: 0, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 1, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 1, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 42: { # 'e' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 2, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 2, # 'l' + 54: 2, # 'n' + 49: 1, # 'o' + 51: 2, # 'r' + 43: 2, # 's' + 44: 2, # 't' + 63: 1, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 1, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 1, # '–' + 52: 2, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 53: { # 'i' + 50: 1, # 'a' + 60: 2, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 0, # 'i' + 56: 1, # 'l' + 54: 2, # 'n' + 49: 2, # 'o' + 51: 1, # 'r' + 43: 2, # 's' + 44: 2, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 56: { # 'l' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 2, # 'e' + 53: 2, # 'i' + 56: 2, # 'l' + 54: 1, # 'n' + 49: 1, # 'o' + 51: 0, # 'r' + 43: 1, # 's' + 44: 1, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 54: { # 'n' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 1, # 'n' + 49: 1, # 'o' + 51: 0, # 'r' + 43: 1, # 's' + 44: 2, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 1, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 2, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 49: { # 'o' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 2, # 'n' + 49: 1, # 'o' + 51: 2, # 'r' + 43: 1, # 's' + 44: 1, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 51: { # 'r' + 50: 2, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 2, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 1, # 'n' + 49: 2, # 'o' + 51: 1, # 'r' + 43: 1, # 's' + 44: 1, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 2, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 43: { # 's' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 0, # 'd' + 42: 2, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 1, # 'n' + 49: 1, # 'o' + 51: 1, # 'r' + 43: 1, # 's' + 44: 2, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 2, # '”' + 58: 0, # '†' + 40: 2, # '…' + }, + 44: { # 't' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 0, # 'd' + 42: 2, # 'e' + 53: 2, # 'i' + 56: 1, # 'l' + 54: 0, # 'n' + 49: 1, # 'o' + 51: 1, # 'r' + 43: 1, # 's' + 44: 1, # 't' + 63: 1, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 2, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 63: { # 'u' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 1, # 'n' + 49: 0, # 'o' + 51: 1, # 'r' + 43: 2, # 's' + 44: 1, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 34: { # '\xa0' + 50: 1, # 'a' + 60: 0, # 'c' + 61: 1, # 'd' + 42: 0, # 'e' + 53: 1, # 'i' + 56: 0, # 'l' + 54: 1, # 'n' + 49: 1, # 'o' + 51: 0, # 'r' + 43: 1, # 's' + 44: 1, # 't' + 63: 0, # 'u' + 34: 2, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 2, # 'א' + 8: 1, # 'ב' + 20: 1, # 'ג' + 16: 1, # 'ד' + 3: 1, # 'ה' + 2: 1, # 'ו' + 24: 1, # 'ז' + 14: 1, # 'ח' + 22: 1, # 'ט' + 1: 2, # 'י' + 25: 0, # 'ך' + 15: 1, # 'כ' + 4: 1, # 'ל' + 11: 0, # 'ם' + 6: 2, # 'מ' + 23: 0, # 'ן' + 12: 1, # 'נ' + 19: 1, # 'ס' + 13: 1, # 'ע' + 26: 0, # 'ף' + 18: 1, # 'פ' + 27: 0, # 'ץ' + 21: 1, # 'צ' + 17: 1, # 'ק' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 55: { # '´' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 1, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 1, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 1, # 'ה' + 2: 1, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 2, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 1, # 'ל' + 11: 0, # 'ם' + 6: 1, # 'מ' + 23: 1, # 'ן' + 12: 1, # 'נ' + 19: 1, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 48: { # '¼' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 1, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 1, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 1, # 'כ' + 4: 1, # 'ל' + 11: 0, # 'ם' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 39: { # '½' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 1, # 'כ' + 4: 1, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 1, # 'צ' + 17: 1, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 57: { # '¾' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 30: { # 'ְ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 1, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 1, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 2, # 'א' + 8: 2, # 'ב' + 20: 2, # 'ג' + 16: 2, # 'ד' + 3: 2, # 'ה' + 2: 2, # 'ו' + 24: 2, # 'ז' + 14: 2, # 'ח' + 22: 2, # 'ט' + 1: 2, # 'י' + 25: 2, # 'ך' + 15: 2, # 'כ' + 4: 2, # 'ל' + 11: 1, # 'ם' + 6: 2, # 'מ' + 23: 0, # 'ן' + 12: 2, # 'נ' + 19: 2, # 'ס' + 13: 2, # 'ע' + 26: 0, # 'ף' + 18: 2, # 'פ' + 27: 0, # 'ץ' + 21: 2, # 'צ' + 17: 2, # 'ק' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 59: { # 'ֱ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 1, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 1, # 'ב' + 20: 1, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 1, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 1, # 'י' + 25: 0, # 'ך' + 15: 1, # 'כ' + 4: 2, # 'ל' + 11: 0, # 'ם' + 6: 2, # 'מ' + 23: 0, # 'ן' + 12: 1, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 41: { # 'ֲ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 2, # 'ב' + 20: 1, # 'ג' + 16: 2, # 'ד' + 3: 1, # 'ה' + 2: 1, # 'ו' + 24: 1, # 'ז' + 14: 1, # 'ח' + 22: 1, # 'ט' + 1: 1, # 'י' + 25: 1, # 'ך' + 15: 1, # 'כ' + 4: 2, # 'ל' + 11: 0, # 'ם' + 6: 2, # 'מ' + 23: 0, # 'ן' + 12: 2, # 'נ' + 19: 1, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 1, # 'פ' + 27: 0, # 'ץ' + 21: 2, # 'צ' + 17: 1, # 'ק' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 33: { # 'ִ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 1, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 1, # 'ִ' + 37: 0, # 'ֵ' + 36: 1, # 'ֶ' + 31: 0, # 'ַ' + 29: 1, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 1, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 1, # 'א' + 8: 2, # 'ב' + 20: 2, # 'ג' + 16: 2, # 'ד' + 3: 1, # 'ה' + 2: 1, # 'ו' + 24: 2, # 'ז' + 14: 1, # 'ח' + 22: 1, # 'ט' + 1: 3, # 'י' + 25: 1, # 'ך' + 15: 2, # 'כ' + 4: 2, # 'ל' + 11: 2, # 'ם' + 6: 2, # 'מ' + 23: 2, # 'ן' + 12: 2, # 'נ' + 19: 2, # 'ס' + 13: 1, # 'ע' + 26: 0, # 'ף' + 18: 2, # 'פ' + 27: 1, # 'ץ' + 21: 2, # 'צ' + 17: 2, # 'ק' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 37: { # 'ֵ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 1, # 'ֶ' + 31: 1, # 'ַ' + 29: 1, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 2, # 'א' + 8: 2, # 'ב' + 20: 1, # 'ג' + 16: 2, # 'ד' + 3: 2, # 'ה' + 2: 1, # 'ו' + 24: 1, # 'ז' + 14: 2, # 'ח' + 22: 1, # 'ט' + 1: 3, # 'י' + 25: 2, # 'ך' + 15: 1, # 'כ' + 4: 2, # 'ל' + 11: 2, # 'ם' + 6: 1, # 'מ' + 23: 2, # 'ן' + 12: 2, # 'נ' + 19: 1, # 'ס' + 13: 2, # 'ע' + 26: 1, # 'ף' + 18: 1, # 'פ' + 27: 1, # 'ץ' + 21: 1, # 'צ' + 17: 1, # 'ק' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 36: { # 'ֶ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 1, # 'ֶ' + 31: 1, # 'ַ' + 29: 1, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 2, # 'א' + 8: 2, # 'ב' + 20: 1, # 'ג' + 16: 2, # 'ד' + 3: 2, # 'ה' + 2: 1, # 'ו' + 24: 1, # 'ז' + 14: 2, # 'ח' + 22: 1, # 'ט' + 1: 2, # 'י' + 25: 2, # 'ך' + 15: 1, # 'כ' + 4: 2, # 'ל' + 11: 2, # 'ם' + 6: 2, # 'מ' + 23: 2, # 'ן' + 12: 2, # 'נ' + 19: 2, # 'ס' + 13: 1, # 'ע' + 26: 1, # 'ף' + 18: 1, # 'פ' + 27: 2, # 'ץ' + 21: 1, # 'צ' + 17: 1, # 'ק' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 31: { # 'ַ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 1, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 1, # 'ֶ' + 31: 0, # 'ַ' + 29: 2, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 2, # 'א' + 8: 2, # 'ב' + 20: 2, # 'ג' + 16: 2, # 'ד' + 3: 2, # 'ה' + 2: 1, # 'ו' + 24: 2, # 'ז' + 14: 2, # 'ח' + 22: 2, # 'ט' + 1: 3, # 'י' + 25: 1, # 'ך' + 15: 2, # 'כ' + 4: 2, # 'ל' + 11: 2, # 'ם' + 6: 2, # 'מ' + 23: 2, # 'ן' + 12: 2, # 'נ' + 19: 2, # 'ס' + 13: 2, # 'ע' + 26: 2, # 'ף' + 18: 2, # 'פ' + 27: 1, # 'ץ' + 21: 2, # 'צ' + 17: 2, # 'ק' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 29: { # 'ָ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 1, # 'ַ' + 29: 2, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 1, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 2, # 'א' + 8: 2, # 'ב' + 20: 2, # 'ג' + 16: 2, # 'ד' + 3: 3, # 'ה' + 2: 2, # 'ו' + 24: 2, # 'ז' + 14: 2, # 'ח' + 22: 1, # 'ט' + 1: 2, # 'י' + 25: 2, # 'ך' + 15: 2, # 'כ' + 4: 2, # 'ל' + 11: 2, # 'ם' + 6: 2, # 'מ' + 23: 2, # 'ן' + 12: 2, # 'נ' + 19: 1, # 'ס' + 13: 2, # 'ע' + 26: 1, # 'ף' + 18: 2, # 'פ' + 27: 1, # 'ץ' + 21: 2, # 'צ' + 17: 2, # 'ק' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 35: { # 'ֹ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 1, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 2, # 'א' + 8: 2, # 'ב' + 20: 1, # 'ג' + 16: 2, # 'ד' + 3: 2, # 'ה' + 2: 1, # 'ו' + 24: 1, # 'ז' + 14: 1, # 'ח' + 22: 1, # 'ט' + 1: 1, # 'י' + 25: 1, # 'ך' + 15: 2, # 'כ' + 4: 2, # 'ל' + 11: 2, # 'ם' + 6: 2, # 'מ' + 23: 2, # 'ן' + 12: 2, # 'נ' + 19: 2, # 'ס' + 13: 2, # 'ע' + 26: 1, # 'ף' + 18: 2, # 'פ' + 27: 1, # 'ץ' + 21: 2, # 'צ' + 17: 2, # 'ק' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 62: { # 'ֻ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 1, # 'ב' + 20: 1, # 'ג' + 16: 1, # 'ד' + 3: 1, # 'ה' + 2: 1, # 'ו' + 24: 1, # 'ז' + 14: 1, # 'ח' + 22: 0, # 'ט' + 1: 1, # 'י' + 25: 0, # 'ך' + 15: 1, # 'כ' + 4: 2, # 'ל' + 11: 1, # 'ם' + 6: 1, # 'מ' + 23: 1, # 'ן' + 12: 1, # 'נ' + 19: 1, # 'ס' + 13: 1, # 'ע' + 26: 0, # 'ף' + 18: 1, # 'פ' + 27: 0, # 'ץ' + 21: 1, # 'צ' + 17: 1, # 'ק' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 28: { # 'ּ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 3, # 'ְ' + 59: 0, # 'ֱ' + 41: 1, # 'ֲ' + 33: 3, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 3, # 'ַ' + 29: 3, # 'ָ' + 35: 2, # 'ֹ' + 62: 1, # 'ֻ' + 28: 0, # 'ּ' + 38: 2, # 'ׁ' + 45: 1, # 'ׂ' + 9: 2, # 'א' + 8: 2, # 'ב' + 20: 1, # 'ג' + 16: 2, # 'ד' + 3: 1, # 'ה' + 2: 2, # 'ו' + 24: 1, # 'ז' + 14: 1, # 'ח' + 22: 1, # 'ט' + 1: 2, # 'י' + 25: 2, # 'ך' + 15: 2, # 'כ' + 4: 2, # 'ל' + 11: 1, # 'ם' + 6: 2, # 'מ' + 23: 1, # 'ן' + 12: 2, # 'נ' + 19: 1, # 'ס' + 13: 2, # 'ע' + 26: 1, # 'ף' + 18: 1, # 'פ' + 27: 1, # 'ץ' + 21: 1, # 'צ' + 17: 1, # 'ק' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 38: { # 'ׁ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 1, # 'ֹ' + 62: 1, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 2, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 1, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 1, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 45: { # 'ׂ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 1, # 'ֵ' + 36: 2, # 'ֶ' + 31: 1, # 'ַ' + 29: 2, # 'ָ' + 35: 1, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 1, # 'א' + 8: 0, # 'ב' + 20: 1, # 'ג' + 16: 0, # 'ד' + 3: 1, # 'ה' + 2: 2, # 'ו' + 24: 0, # 'ז' + 14: 1, # 'ח' + 22: 0, # 'ט' + 1: 1, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 1, # 'ם' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 1, # 'נ' + 19: 0, # 'ס' + 13: 1, # 'ע' + 26: 0, # 'ף' + 18: 1, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 1, # 'ר' + 10: 0, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 9: { # 'א' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 1, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 2, # 'ֱ' + 41: 2, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 2, # 'ֹ' + 62: 1, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 2, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 3, # 'ז' + 14: 3, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 3, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 2, # 'ע' + 26: 3, # 'ף' + 18: 3, # 'פ' + 27: 1, # 'ץ' + 21: 3, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 8: { # 'ב' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 1, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 2, # 'ֹ' + 62: 1, # 'ֻ' + 28: 3, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 3, # 'ז' + 14: 3, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 2, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 2, # 'ם' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 3, # 'ע' + 26: 1, # 'ף' + 18: 3, # 'פ' + 27: 2, # 'ץ' + 21: 3, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 1, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 20: { # 'ג' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 2, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 1, # 'ִ' + 37: 1, # 'ֵ' + 36: 1, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 1, # 'ֹ' + 62: 0, # 'ֻ' + 28: 2, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 2, # 'א' + 8: 3, # 'ב' + 20: 2, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 3, # 'ז' + 14: 2, # 'ח' + 22: 2, # 'ט' + 1: 3, # 'י' + 25: 1, # 'ך' + 15: 1, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # 'נ' + 19: 2, # 'ס' + 13: 3, # 'ע' + 26: 2, # 'ף' + 18: 2, # 'פ' + 27: 1, # 'ץ' + 21: 1, # 'צ' + 17: 1, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 16: { # 'ד' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 2, # 'ֹ' + 62: 1, # 'ֻ' + 28: 2, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 1, # 'ז' + 14: 2, # 'ח' + 22: 2, # 'ט' + 1: 3, # 'י' + 25: 2, # 'ך' + 15: 2, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # 'נ' + 19: 2, # 'ס' + 13: 3, # 'ע' + 26: 2, # 'ף' + 18: 3, # 'פ' + 27: 0, # 'ץ' + 21: 2, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 3: { # 'ה' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 1, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 1, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 1, # 'ְ' + 59: 1, # 'ֱ' + 41: 2, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 3, # 'ַ' + 29: 2, # 'ָ' + 35: 1, # 'ֹ' + 62: 1, # 'ֻ' + 28: 2, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 3, # 'ז' + 14: 3, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 1, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 3, # 'ע' + 26: 0, # 'ף' + 18: 3, # 'פ' + 27: 1, # 'ץ' + 21: 3, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 1, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 2, # '…' + }, + 2: { # 'ו' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 1, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 1, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 1, # 'ֵ' + 36: 1, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 3, # 'ֹ' + 62: 0, # 'ֻ' + 28: 3, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 3, # 'ז' + 14: 3, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 3, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 3, # 'ע' + 26: 3, # 'ף' + 18: 3, # 'פ' + 27: 3, # 'ץ' + 21: 3, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 1, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 2, # '…' + }, + 24: { # 'ז' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 1, # 'ֲ' + 33: 1, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 1, # 'ֹ' + 62: 1, # 'ֻ' + 28: 2, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 2, # 'ב' + 20: 2, # 'ג' + 16: 2, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 2, # 'ז' + 14: 2, # 'ח' + 22: 1, # 'ט' + 1: 3, # 'י' + 25: 1, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 2, # 'ם' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 2, # 'נ' + 19: 1, # 'ס' + 13: 2, # 'ע' + 26: 1, # 'ף' + 18: 1, # 'פ' + 27: 0, # 'ץ' + 21: 2, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 1, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 14: { # 'ח' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 1, # 'ֱ' + 41: 2, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 2, # 'ֹ' + 62: 1, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 2, # 'א' + 8: 3, # 'ב' + 20: 2, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 3, # 'ז' + 14: 2, # 'ח' + 22: 2, # 'ט' + 1: 3, # 'י' + 25: 1, # 'ך' + 15: 2, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 1, # 'ע' + 26: 2, # 'ף' + 18: 2, # 'פ' + 27: 2, # 'ץ' + 21: 3, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 22: { # 'ט' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 1, # 'ֵ' + 36: 1, # 'ֶ' + 31: 2, # 'ַ' + 29: 1, # 'ָ' + 35: 1, # 'ֹ' + 62: 1, # 'ֻ' + 28: 1, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 1, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 2, # 'ז' + 14: 3, # 'ח' + 22: 2, # 'ט' + 1: 3, # 'י' + 25: 1, # 'ך' + 15: 2, # 'כ' + 4: 3, # 'ל' + 11: 2, # 'ם' + 6: 2, # 'מ' + 23: 2, # 'ן' + 12: 3, # 'נ' + 19: 2, # 'ס' + 13: 3, # 'ע' + 26: 2, # 'ף' + 18: 3, # 'פ' + 27: 1, # 'ץ' + 21: 2, # 'צ' + 17: 2, # 'ק' + 7: 3, # 'ר' + 10: 2, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 1: { # 'י' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 1, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 1, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 2, # 'ֹ' + 62: 1, # 'ֻ' + 28: 2, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 3, # 'ז' + 14: 3, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 3, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 3, # 'ע' + 26: 3, # 'ף' + 18: 3, # 'פ' + 27: 3, # 'ץ' + 21: 3, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 1, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 2, # '…' + }, + 25: { # 'ך' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 2, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 1, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 1, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 1, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 1, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 1, # 'ל' + 11: 0, # 'ם' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 1, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 15: { # 'כ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 1, # 'ֹ' + 62: 1, # 'ֻ' + 28: 3, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 2, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 3, # 'ז' + 14: 3, # 'ח' + 22: 2, # 'ט' + 1: 3, # 'י' + 25: 3, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 2, # 'ע' + 26: 3, # 'ף' + 18: 3, # 'פ' + 27: 1, # 'ץ' + 21: 2, # 'צ' + 17: 2, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 4: { # 'ל' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 3, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 2, # 'ֹ' + 62: 1, # 'ֻ' + 28: 2, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 3, # 'ז' + 14: 3, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 3, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 3, # 'ע' + 26: 2, # 'ף' + 18: 3, # 'פ' + 27: 2, # 'ץ' + 21: 3, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 1, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 11: { # 'ם' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 1, # 'א' + 8: 1, # 'ב' + 20: 1, # 'ג' + 16: 0, # 'ד' + 3: 1, # 'ה' + 2: 1, # 'ו' + 24: 1, # 'ז' + 14: 1, # 'ח' + 22: 0, # 'ט' + 1: 1, # 'י' + 25: 0, # 'ך' + 15: 1, # 'כ' + 4: 1, # 'ל' + 11: 1, # 'ם' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 1, # 'נ' + 19: 0, # 'ס' + 13: 1, # 'ע' + 26: 0, # 'ף' + 18: 1, # 'פ' + 27: 1, # 'ץ' + 21: 1, # 'צ' + 17: 1, # 'ק' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 2, # '…' + }, + 6: { # 'מ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 2, # 'ֹ' + 62: 1, # 'ֻ' + 28: 2, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 3, # 'ז' + 14: 3, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 2, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 3, # 'ע' + 26: 0, # 'ף' + 18: 3, # 'פ' + 27: 2, # 'ץ' + 21: 3, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 23: { # 'ן' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 1, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 1, # 'א' + 8: 1, # 'ב' + 20: 1, # 'ג' + 16: 1, # 'ד' + 3: 1, # 'ה' + 2: 1, # 'ו' + 24: 0, # 'ז' + 14: 1, # 'ח' + 22: 1, # 'ט' + 1: 1, # 'י' + 25: 0, # 'ך' + 15: 1, # 'כ' + 4: 1, # 'ל' + 11: 1, # 'ם' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 1, # 'נ' + 19: 1, # 'ס' + 13: 1, # 'ע' + 26: 1, # 'ף' + 18: 1, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 1, # 'ק' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 1, # 'ת' + 32: 1, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 2, # '…' + }, + 12: { # 'נ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 1, # 'ֹ' + 62: 1, # 'ֻ' + 28: 2, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 3, # 'ז' + 14: 3, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 2, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 3, # 'ע' + 26: 2, # 'ף' + 18: 3, # 'פ' + 27: 2, # 'ץ' + 21: 3, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 19: { # 'ס' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 1, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 1, # 'ָ' + 35: 1, # 'ֹ' + 62: 2, # 'ֻ' + 28: 2, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 2, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 1, # 'ז' + 14: 3, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 2, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 2, # 'ם' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # 'נ' + 19: 2, # 'ס' + 13: 3, # 'ע' + 26: 3, # 'ף' + 18: 3, # 'פ' + 27: 0, # 'ץ' + 21: 2, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 1, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 13: { # 'ע' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 1, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 1, # 'ְ' + 59: 1, # 'ֱ' + 41: 2, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 2, # 'ֹ' + 62: 1, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 2, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 3, # 'ז' + 14: 1, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 2, # 'ך' + 15: 2, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 2, # 'ע' + 26: 1, # 'ף' + 18: 2, # 'פ' + 27: 2, # 'ץ' + 21: 3, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 26: { # 'ף' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 1, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 1, # 'ו' + 24: 0, # 'ז' + 14: 1, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 1, # 'כ' + 4: 1, # 'ל' + 11: 0, # 'ם' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 1, # 'ס' + 13: 0, # 'ע' + 26: 1, # 'ף' + 18: 1, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 1, # 'ק' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 18: { # 'פ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 1, # 'ֵ' + 36: 2, # 'ֶ' + 31: 1, # 'ַ' + 29: 2, # 'ָ' + 35: 1, # 'ֹ' + 62: 1, # 'ֻ' + 28: 2, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 2, # 'ב' + 20: 3, # 'ג' + 16: 2, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 2, # 'ז' + 14: 3, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 2, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 2, # 'ם' + 6: 2, # 'מ' + 23: 3, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 3, # 'ע' + 26: 2, # 'ף' + 18: 2, # 'פ' + 27: 2, # 'ץ' + 21: 3, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 27: { # 'ץ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 1, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 1, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 1, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 1, # 'ר' + 10: 0, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 21: { # 'צ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 1, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 1, # 'ֹ' + 62: 1, # 'ֻ' + 28: 2, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 2, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 1, # 'ז' + 14: 3, # 'ח' + 22: 2, # 'ט' + 1: 3, # 'י' + 25: 1, # 'ך' + 15: 1, # 'כ' + 4: 3, # 'ל' + 11: 2, # 'ם' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # 'נ' + 19: 1, # 'ס' + 13: 3, # 'ע' + 26: 2, # 'ף' + 18: 3, # 'פ' + 27: 2, # 'ץ' + 21: 2, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 0, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 17: { # 'ק' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 1, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 2, # 'ֹ' + 62: 1, # 'ֻ' + 28: 2, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 2, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 2, # 'ז' + 14: 3, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 1, # 'ך' + 15: 1, # 'כ' + 4: 3, # 'ל' + 11: 2, # 'ם' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 3, # 'ע' + 26: 2, # 'ף' + 18: 3, # 'פ' + 27: 2, # 'ץ' + 21: 3, # 'צ' + 17: 2, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 7: { # 'ר' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 2, # '´' + 48: 1, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 1, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 2, # 'ֹ' + 62: 1, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 3, # 'ז' + 14: 3, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 3, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 3, # 'ע' + 26: 2, # 'ף' + 18: 3, # 'פ' + 27: 3, # 'ץ' + 21: 3, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 2, # '…' + }, + 10: { # 'ש' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 1, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 1, # 'ִ' + 37: 1, # 'ֵ' + 36: 1, # 'ֶ' + 31: 1, # 'ַ' + 29: 1, # 'ָ' + 35: 1, # 'ֹ' + 62: 1, # 'ֻ' + 28: 2, # 'ּ' + 38: 3, # 'ׁ' + 45: 2, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 2, # 'ז' + 14: 3, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 3, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # 'נ' + 19: 2, # 'ס' + 13: 3, # 'ע' + 26: 2, # 'ף' + 18: 3, # 'פ' + 27: 1, # 'ץ' + 21: 2, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 5: { # 'ת' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 1, # '¼' + 39: 1, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 1, # 'ֹ' + 62: 1, # 'ֻ' + 28: 2, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 2, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 2, # 'ז' + 14: 3, # 'ח' + 22: 2, # 'ט' + 1: 3, # 'י' + 25: 2, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # 'נ' + 19: 2, # 'ס' + 13: 3, # 'ע' + 26: 2, # 'ף' + 18: 3, # 'פ' + 27: 1, # 'ץ' + 21: 2, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 1, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 2, # '…' + }, + 32: { # '–' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 1, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 1, # 'א' + 8: 1, # 'ב' + 20: 1, # 'ג' + 16: 1, # 'ד' + 3: 1, # 'ה' + 2: 1, # 'ו' + 24: 0, # 'ז' + 14: 1, # 'ח' + 22: 0, # 'ט' + 1: 1, # 'י' + 25: 0, # 'ך' + 15: 1, # 'כ' + 4: 1, # 'ל' + 11: 0, # 'ם' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 1, # 'ס' + 13: 1, # 'ע' + 26: 0, # 'ף' + 18: 1, # 'פ' + 27: 0, # 'ץ' + 21: 1, # 'צ' + 17: 0, # 'ק' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 52: { # '’' + 50: 1, # 'a' + 60: 0, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 1, # 'r' + 43: 2, # 's' + 44: 2, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 1, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 47: { # '“' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 1, # 'n' + 49: 1, # 'o' + 51: 1, # 'r' + 43: 1, # 's' + 44: 1, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 2, # 'א' + 8: 1, # 'ב' + 20: 1, # 'ג' + 16: 1, # 'ד' + 3: 1, # 'ה' + 2: 1, # 'ו' + 24: 1, # 'ז' + 14: 1, # 'ח' + 22: 1, # 'ט' + 1: 1, # 'י' + 25: 0, # 'ך' + 15: 1, # 'כ' + 4: 1, # 'ל' + 11: 0, # 'ם' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 1, # 'נ' + 19: 1, # 'ס' + 13: 1, # 'ע' + 26: 0, # 'ף' + 18: 1, # 'פ' + 27: 0, # 'ץ' + 21: 1, # 'צ' + 17: 1, # 'ק' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 46: { # '”' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 1, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 1, # 'א' + 8: 1, # 'ב' + 20: 1, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 1, # 'י' + 25: 0, # 'ך' + 15: 1, # 'כ' + 4: 1, # 'ל' + 11: 0, # 'ם' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 1, # 'צ' + 17: 0, # 'ק' + 7: 1, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 58: { # '†' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 2, # '†' + 40: 0, # '…' + }, + 40: { # '…' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 0, # 'l' + 54: 1, # 'n' + 49: 0, # 'o' + 51: 1, # 'r' + 43: 1, # 's' + 44: 1, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 1, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 1, # 'ה' + 2: 1, # 'ו' + 24: 1, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 1, # 'י' + 25: 0, # 'ך' + 15: 1, # 'כ' + 4: 1, # 'ל' + 11: 0, # 'ם' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 1, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 1, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 2, # '…' + }, +} + +# 255: Undefined characters that did not exist in training text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 +# 251: Control characters + +# Character Mapping Table(s): +WINDOWS_1255_HEBREW_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 69, # 'A' + 66: 91, # 'B' + 67: 79, # 'C' + 68: 80, # 'D' + 69: 92, # 'E' + 70: 89, # 'F' + 71: 97, # 'G' + 72: 90, # 'H' + 73: 68, # 'I' + 74: 111, # 'J' + 75: 112, # 'K' + 76: 82, # 'L' + 77: 73, # 'M' + 78: 95, # 'N' + 79: 85, # 'O' + 80: 78, # 'P' + 81: 121, # 'Q' + 82: 86, # 'R' + 83: 71, # 'S' + 84: 67, # 'T' + 85: 102, # 'U' + 86: 107, # 'V' + 87: 84, # 'W' + 88: 114, # 'X' + 89: 103, # 'Y' + 90: 115, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 50, # 'a' + 98: 74, # 'b' + 99: 60, # 'c' + 100: 61, # 'd' + 101: 42, # 'e' + 102: 76, # 'f' + 103: 70, # 'g' + 104: 64, # 'h' + 105: 53, # 'i' + 106: 105, # 'j' + 107: 93, # 'k' + 108: 56, # 'l' + 109: 65, # 'm' + 110: 54, # 'n' + 111: 49, # 'o' + 112: 66, # 'p' + 113: 110, # 'q' + 114: 51, # 'r' + 115: 43, # 's' + 116: 44, # 't' + 117: 63, # 'u' + 118: 81, # 'v' + 119: 77, # 'w' + 120: 98, # 'x' + 121: 75, # 'y' + 122: 108, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 124, # '€' + 129: 202, # None + 130: 203, # '‚' + 131: 204, # 'ƒ' + 132: 205, # '„' + 133: 40, # '…' + 134: 58, # '†' + 135: 206, # '‡' + 136: 207, # 'ˆ' + 137: 208, # '‰' + 138: 209, # None + 139: 210, # '‹' + 140: 211, # None + 141: 212, # None + 142: 213, # None + 143: 214, # None + 144: 215, # None + 145: 83, # '‘' + 146: 52, # '’' + 147: 47, # '“' + 148: 46, # '”' + 149: 72, # '•' + 150: 32, # '–' + 151: 94, # '—' + 152: 216, # '˜' + 153: 113, # '™' + 154: 217, # None + 155: 109, # '›' + 156: 218, # None + 157: 219, # None + 158: 220, # None + 159: 221, # None + 160: 34, # '\xa0' + 161: 116, # '¡' + 162: 222, # '¢' + 163: 118, # '£' + 164: 100, # '₪' + 165: 223, # '¥' + 166: 224, # '¦' + 167: 117, # '§' + 168: 119, # '¨' + 169: 104, # '©' + 170: 125, # '×' + 171: 225, # '«' + 172: 226, # '¬' + 173: 87, # '\xad' + 174: 99, # '®' + 175: 227, # '¯' + 176: 106, # '°' + 177: 122, # '±' + 178: 123, # '²' + 179: 228, # '³' + 180: 55, # '´' + 181: 229, # 'µ' + 182: 230, # '¶' + 183: 101, # '·' + 184: 231, # '¸' + 185: 232, # '¹' + 186: 120, # '÷' + 187: 233, # '»' + 188: 48, # '¼' + 189: 39, # '½' + 190: 57, # '¾' + 191: 234, # '¿' + 192: 30, # 'ְ' + 193: 59, # 'ֱ' + 194: 41, # 'ֲ' + 195: 88, # 'ֳ' + 196: 33, # 'ִ' + 197: 37, # 'ֵ' + 198: 36, # 'ֶ' + 199: 31, # 'ַ' + 200: 29, # 'ָ' + 201: 35, # 'ֹ' + 202: 235, # None + 203: 62, # 'ֻ' + 204: 28, # 'ּ' + 205: 236, # 'ֽ' + 206: 126, # '־' + 207: 237, # 'ֿ' + 208: 238, # '׀' + 209: 38, # 'ׁ' + 210: 45, # 'ׂ' + 211: 239, # '׃' + 212: 240, # 'װ' + 213: 241, # 'ױ' + 214: 242, # 'ײ' + 215: 243, # '׳' + 216: 127, # '״' + 217: 244, # None + 218: 245, # None + 219: 246, # None + 220: 247, # None + 221: 248, # None + 222: 249, # None + 223: 250, # None + 224: 9, # 'א' + 225: 8, # 'ב' + 226: 20, # 'ג' + 227: 16, # 'ד' + 228: 3, # 'ה' + 229: 2, # 'ו' + 230: 24, # 'ז' + 231: 14, # 'ח' + 232: 22, # 'ט' + 233: 1, # 'י' + 234: 25, # 'ך' + 235: 15, # 'כ' + 236: 4, # 'ל' + 237: 11, # 'ם' + 238: 6, # 'מ' + 239: 23, # 'ן' + 240: 12, # 'נ' + 241: 19, # 'ס' + 242: 13, # 'ע' + 243: 26, # 'ף' + 244: 18, # 'פ' + 245: 27, # 'ץ' + 246: 21, # 'צ' + 247: 17, # 'ק' + 248: 7, # 'ר' + 249: 10, # 'ש' + 250: 5, # 'ת' + 251: 251, # None + 252: 252, # None + 253: 128, # '\u200e' + 254: 96, # '\u200f' + 255: 253, # None +} + +WINDOWS_1255_HEBREW_MODEL = SingleByteCharSetModel(charset_name='windows-1255', + language='Hebrew', + char_to_order_map=WINDOWS_1255_HEBREW_CHAR_TO_ORDER, + language_model=HEBREW_LANG_MODEL, + typical_positive_ratio=0.984004, + keep_ascii_letters=False, + alphabet='אבגדהוזחטיךכלםמןנסעףפץצקרשתװױײ') + diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/langhungarianmodel.py b/venv/Lib/site-packages/pip/_vendor/chardet/langhungarianmodel.py new file mode 100644 index 00000000..bbc5cda6 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/chardet/langhungarianmodel.py @@ -0,0 +1,4650 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel + + +# 3: Positive +# 2: Likely +# 1: Unlikely +# 0: Negative + +HUNGARIAN_LANG_MODEL = { + 28: { # 'A' + 28: 0, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 2, # 'D' + 32: 1, # 'E' + 50: 1, # 'F' + 49: 2, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 2, # 'K' + 41: 2, # 'L' + 34: 1, # 'M' + 35: 2, # 'N' + 47: 1, # 'O' + 46: 2, # 'P' + 43: 2, # 'R' + 33: 2, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 2, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 2, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 1, # 'i' + 22: 1, # 'j' + 7: 2, # 'k' + 6: 2, # 'l' + 13: 2, # 'm' + 4: 2, # 'n' + 8: 0, # 'o' + 23: 2, # 'p' + 10: 2, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 1, # 'u' + 19: 1, # 'v' + 62: 1, # 'x' + 16: 0, # 'y' + 11: 3, # 'z' + 51: 1, # 'Á' + 44: 0, # 'É' + 61: 1, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 40: { # 'B' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 0, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 3, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 2, # 'i' + 22: 1, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 3, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 1, # 'Á' + 44: 1, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'ő' + 56: 1, # 'ű' + }, + 54: { # 'C' + 28: 1, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 1, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 0, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 2, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 0, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 1, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 1, # 'h' + 9: 1, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 3, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 1, # 'z' + 51: 1, # 'Á' + 44: 1, # 'É' + 61: 1, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 45: { # 'D' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 0, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 0, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 3, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 1, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 1, # 'o' + 23: 0, # 'p' + 10: 2, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 2, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 1, # 'z' + 51: 1, # 'Á' + 44: 1, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'ő' + 56: 0, # 'ű' + }, + 32: { # 'E' + 28: 1, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 1, # 'E' + 50: 1, # 'F' + 49: 2, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 2, # 'K' + 41: 2, # 'L' + 34: 2, # 'M' + 35: 2, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 2, # 'R' + 33: 2, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 1, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 2, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 3, # 'g' + 20: 1, # 'h' + 9: 1, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 2, # 'l' + 13: 2, # 'm' + 4: 2, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 2, # 's' + 3: 1, # 't' + 21: 2, # 'u' + 19: 1, # 'v' + 62: 1, # 'x' + 16: 0, # 'y' + 11: 3, # 'z' + 51: 1, # 'Á' + 44: 1, # 'É' + 61: 0, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 0, # 'Ú' + 63: 1, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 1, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 50: { # 'F' + 28: 1, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 1, # 'E' + 50: 1, # 'F' + 49: 0, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 0, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 0, # 'V' + 55: 1, # 'Y' + 52: 0, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 1, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 2, # 'i' + 22: 1, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 2, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 1, # 'Á' + 44: 1, # 'É' + 61: 0, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 0, # 'Ú' + 63: 1, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 2, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'ő' + 56: 1, # 'ű' + }, + 49: { # 'G' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 2, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 1, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 2, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 2, # 'y' + 11: 0, # 'z' + 51: 1, # 'Á' + 44: 1, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 0, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'ő' + 56: 0, # 'ű' + }, + 38: { # 'H' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 0, # 'D' + 32: 1, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 1, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 1, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 1, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 0, # 'V' + 55: 1, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 2, # 'i' + 22: 1, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 0, # 'n' + 8: 3, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 2, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 2, # 'Á' + 44: 2, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 1, # 'é' + 30: 2, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'ő' + 56: 1, # 'ű' + }, + 39: { # 'I' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 1, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 2, # 'K' + 41: 2, # 'L' + 34: 1, # 'M' + 35: 2, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 2, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 2, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 2, # 'd' + 1: 0, # 'e' + 27: 1, # 'f' + 12: 2, # 'g' + 20: 1, # 'h' + 9: 0, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 2, # 'l' + 13: 2, # 'm' + 4: 1, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 2, # 's' + 3: 2, # 't' + 21: 0, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 1, # 'Á' + 44: 1, # 'É' + 61: 0, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 53: { # 'J' + 28: 2, # 'A' + 40: 0, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 1, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 1, # 'o' + 23: 0, # 'p' + 10: 0, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 2, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 1, # 'Á' + 44: 1, # 'É' + 61: 0, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 1, # 'é' + 30: 0, # 'í' + 25: 2, # 'ó' + 24: 2, # 'ö' + 31: 1, # 'ú' + 29: 0, # 'ü' + 42: 1, # 'ő' + 56: 0, # 'ű' + }, + 36: { # 'K' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 0, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 0, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 1, # 'f' + 12: 0, # 'g' + 20: 1, # 'h' + 9: 3, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 2, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 1, # 'Á' + 44: 1, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 2, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 2, # 'ö' + 31: 1, # 'ú' + 29: 2, # 'ü' + 42: 1, # 'ő' + 56: 0, # 'ű' + }, + 41: { # 'L' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 2, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 3, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 2, # 'i' + 22: 1, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 0, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 2, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 2, # 'Á' + 44: 1, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 0, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 34: { # 'M' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 0, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 3, # 'a' + 18: 0, # 'b' + 26: 1, # 'c' + 17: 0, # 'd' + 1: 3, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 3, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 3, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 2, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 2, # 'Á' + 44: 1, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'ő' + 56: 1, # 'ű' + }, + 35: { # 'N' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 2, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 2, # 'Y' + 52: 1, # 'Z' + 2: 3, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 3, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 2, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 0, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 2, # 'y' + 11: 0, # 'z' + 51: 1, # 'Á' + 44: 1, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 1, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 1, # 'ő' + 56: 0, # 'ű' + }, + 47: { # 'O' + 28: 1, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 1, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 2, # 'K' + 41: 2, # 'L' + 34: 2, # 'M' + 35: 2, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 2, # 'R' + 33: 2, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 1, # 'i' + 22: 1, # 'j' + 7: 2, # 'k' + 6: 2, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 1, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 1, # 's' + 3: 2, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 1, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 1, # 'Á' + 44: 1, # 'É' + 61: 0, # 'Í' + 58: 1, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 46: { # 'P' + 28: 1, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 1, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 0, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 2, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 1, # 'f' + 12: 0, # 'g' + 20: 1, # 'h' + 9: 2, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 2, # 'r' + 5: 1, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 2, # 'Á' + 44: 1, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 0, # 'Ú' + 63: 1, # 'Ü' + 14: 3, # 'á' + 15: 2, # 'é' + 30: 0, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 0, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'ő' + 56: 0, # 'ű' + }, + 43: { # 'R' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 2, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 1, # 'h' + 9: 2, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 0, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 2, # 'Á' + 44: 1, # 'É' + 61: 1, # 'Í' + 58: 2, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 2, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 33: { # 'S' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 2, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 3, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 1, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 1, # 'h' + 9: 2, # 'i' + 22: 0, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 1, # 'p' + 10: 0, # 'r' + 5: 0, # 's' + 3: 1, # 't' + 21: 1, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 3, # 'z' + 51: 2, # 'Á' + 44: 1, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'ő' + 56: 1, # 'ű' + }, + 37: { # 'T' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 1, # 'P' + 43: 2, # 'R' + 33: 1, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 1, # 'h' + 9: 2, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 0, # 't' + 21: 2, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 1, # 'z' + 51: 2, # 'Á' + 44: 2, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 2, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'ő' + 56: 1, # 'ű' + }, + 57: { # 'U' + 28: 1, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 1, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 2, # 'S' + 37: 1, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 1, # 'e' + 27: 0, # 'f' + 12: 2, # 'g' + 20: 0, # 'h' + 9: 0, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 1, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 48: { # 'V' + 28: 2, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 0, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 2, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 0, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 2, # 'Á' + 44: 2, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 0, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 0, # 'ó' + 24: 1, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 55: { # 'Y' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 2, # 'Z' + 2: 1, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 1, # 'd' + 1: 1, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 0, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 1, # 'o' + 23: 1, # 'p' + 10: 0, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 0, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 1, # 'Á' + 44: 1, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 52: { # 'Z' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 0, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 2, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 1, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 1, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 1, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 8: 1, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 2, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 2, # 'Á' + 44: 1, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 2: { # 'a' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 3, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 2, # 'e' + 27: 2, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 3, # 'i' + 22: 3, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 2, # 'o' + 23: 3, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 1, # 'x' + 16: 2, # 'y' + 11: 3, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 18: { # 'b' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 3, # 'i' + 22: 2, # 'j' + 7: 2, # 'k' + 6: 2, # 'l' + 13: 1, # 'm' + 4: 2, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 3, # 'r' + 5: 2, # 's' + 3: 1, # 't' + 21: 3, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 1, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 3, # 'ó' + 24: 2, # 'ö' + 31: 2, # 'ú' + 29: 2, # 'ü' + 42: 2, # 'ő' + 56: 1, # 'ű' + }, + 26: { # 'c' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 1, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 1, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 2, # 'a' + 18: 1, # 'b' + 26: 2, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 3, # 'h' + 9: 3, # 'i' + 22: 1, # 'j' + 7: 2, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 3, # 's' + 3: 2, # 't' + 21: 2, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 2, # 'á' + 15: 2, # 'é' + 30: 2, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 17: { # 'd' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 2, # 'b' + 26: 1, # 'c' + 17: 2, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 3, # 'j' + 7: 2, # 'k' + 6: 1, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 2, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 3, # 'í' + 25: 3, # 'ó' + 24: 3, # 'ö' + 31: 2, # 'ú' + 29: 2, # 'ü' + 42: 2, # 'ő' + 56: 1, # 'ű' + }, + 1: { # 'e' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 2, # 'a' + 18: 3, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 2, # 'e' + 27: 3, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 3, # 'i' + 22: 3, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 2, # 'o' + 23: 3, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 2, # 'u' + 19: 3, # 'v' + 62: 2, # 'x' + 16: 2, # 'y' + 11: 3, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 27: { # 'f' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 2, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 3, # 'i' + 22: 2, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 3, # 'o' + 23: 0, # 'p' + 10: 3, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 2, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 3, # 'ö' + 31: 1, # 'ú' + 29: 2, # 'ü' + 42: 1, # 'ő' + 56: 1, # 'ű' + }, + 12: { # 'g' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 2, # 'c' + 17: 2, # 'd' + 1: 3, # 'e' + 27: 2, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 3, # 'i' + 22: 3, # 'j' + 7: 2, # 'k' + 6: 3, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 3, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 3, # 'ó' + 24: 2, # 'ö' + 31: 2, # 'ú' + 29: 2, # 'ü' + 42: 2, # 'ő' + 56: 1, # 'ű' + }, + 20: { # 'h' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 0, # 'd' + 1: 3, # 'e' + 27: 0, # 'f' + 12: 1, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 3, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 2, # 's' + 3: 1, # 't' + 21: 3, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 2, # 'y' + 11: 0, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 3, # 'í' + 25: 2, # 'ó' + 24: 2, # 'ö' + 31: 2, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'ő' + 56: 1, # 'ű' + }, + 9: { # 'i' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 3, # 'e' + 27: 3, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 2, # 'i' + 22: 2, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 2, # 'o' + 23: 2, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 1, # 'x' + 16: 1, # 'y' + 11: 3, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 3, # 'ó' + 24: 1, # 'ö' + 31: 2, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'ő' + 56: 1, # 'ű' + }, + 22: { # 'j' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 2, # 'b' + 26: 1, # 'c' + 17: 3, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 2, # 'h' + 9: 1, # 'i' + 22: 2, # 'j' + 7: 2, # 'k' + 6: 2, # 'l' + 13: 1, # 'm' + 4: 2, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 2, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 1, # 'í' + 25: 3, # 'ó' + 24: 3, # 'ö' + 31: 3, # 'ú' + 29: 2, # 'ü' + 42: 1, # 'ő' + 56: 1, # 'ű' + }, + 7: { # 'k' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 2, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 2, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 1, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 2, # 'v' + 62: 0, # 'x' + 16: 2, # 'y' + 11: 1, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 3, # 'í' + 25: 2, # 'ó' + 24: 3, # 'ö' + 31: 1, # 'ú' + 29: 3, # 'ü' + 42: 1, # 'ő' + 56: 1, # 'ű' + }, + 6: { # 'l' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 1, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 1, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 2, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 3, # 'e' + 27: 3, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 3, # 'i' + 22: 3, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 2, # 'p' + 10: 2, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 3, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 3, # 'í' + 25: 3, # 'ó' + 24: 3, # 'ö' + 31: 2, # 'ú' + 29: 2, # 'ü' + 42: 3, # 'ő' + 56: 1, # 'ű' + }, + 13: { # 'm' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 2, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 2, # 'j' + 7: 1, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 8: 3, # 'o' + 23: 3, # 'p' + 10: 2, # 'r' + 5: 2, # 's' + 3: 2, # 't' + 21: 3, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 2, # 'ó' + 24: 2, # 'ö' + 31: 2, # 'ú' + 29: 2, # 'ü' + 42: 1, # 'ő' + 56: 2, # 'ű' + }, + 4: { # 'n' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 3, # 'e' + 27: 2, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 3, # 'i' + 22: 2, # 'j' + 7: 3, # 'k' + 6: 2, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 2, # 'p' + 10: 2, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 2, # 'v' + 62: 1, # 'x' + 16: 3, # 'y' + 11: 3, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 2, # 'ó' + 24: 3, # 'ö' + 31: 2, # 'ú' + 29: 3, # 'ü' + 42: 2, # 'ő' + 56: 1, # 'ű' + }, + 8: { # 'o' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 1, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 2, # 'a' + 18: 3, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 2, # 'e' + 27: 2, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 2, # 'i' + 22: 2, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 1, # 'o' + 23: 3, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 2, # 'u' + 19: 3, # 'v' + 62: 1, # 'x' + 16: 1, # 'y' + 11: 3, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 23: { # 'p' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 1, # 'b' + 26: 2, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 2, # 'j' + 7: 2, # 'k' + 6: 3, # 'l' + 13: 1, # 'm' + 4: 2, # 'n' + 8: 3, # 'o' + 23: 3, # 'p' + 10: 3, # 'r' + 5: 2, # 's' + 3: 2, # 't' + 21: 3, # 'u' + 19: 2, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 2, # 'ó' + 24: 2, # 'ö' + 31: 1, # 'ú' + 29: 2, # 'ü' + 42: 1, # 'ő' + 56: 1, # 'ű' + }, + 10: { # 'r' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 3, # 'e' + 27: 2, # 'f' + 12: 3, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 3, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 2, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 1, # 'x' + 16: 2, # 'y' + 11: 3, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 3, # 'ó' + 24: 3, # 'ö' + 31: 3, # 'ú' + 29: 3, # 'ü' + 42: 2, # 'ő' + 56: 2, # 'ű' + }, + 5: { # 's' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 2, # 'c' + 17: 2, # 'd' + 1: 3, # 'e' + 27: 2, # 'f' + 12: 2, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 1, # 'j' + 7: 3, # 'k' + 6: 2, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 2, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 2, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 3, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 3, # 'í' + 25: 3, # 'ó' + 24: 3, # 'ö' + 31: 3, # 'ú' + 29: 3, # 'ü' + 42: 2, # 'ő' + 56: 1, # 'ű' + }, + 3: { # 't' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 2, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 2, # 'f' + 12: 1, # 'g' + 20: 3, # 'h' + 9: 3, # 'i' + 22: 3, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 3, # 'y' + 11: 1, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 3, # 'ó' + 24: 3, # 'ö' + 31: 3, # 'ú' + 29: 3, # 'ü' + 42: 3, # 'ő' + 56: 2, # 'ű' + }, + 21: { # 'u' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 2, # 'b' + 26: 2, # 'c' + 17: 3, # 'd' + 1: 2, # 'e' + 27: 1, # 'f' + 12: 3, # 'g' + 20: 2, # 'h' + 9: 2, # 'i' + 22: 2, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 1, # 'o' + 23: 2, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 1, # 'u' + 19: 3, # 'v' + 62: 1, # 'x' + 16: 1, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 2, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 0, # 'ö' + 31: 1, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 19: { # 'v' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 2, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 3, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 1, # 'r' + 5: 2, # 's' + 3: 2, # 't' + 21: 2, # 'u' + 19: 2, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 1, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 2, # 'ó' + 24: 2, # 'ö' + 31: 1, # 'ú' + 29: 2, # 'ü' + 42: 1, # 'ő' + 56: 1, # 'ű' + }, + 62: { # 'x' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 0, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 1, # 'i' + 22: 0, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 1, # 'o' + 23: 1, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 16: { # 'y' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 2, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 2, # 'f' + 12: 2, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 2, # 'j' + 7: 2, # 'k' + 6: 2, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 2, # 'p' + 10: 2, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 2, # 'ó' + 24: 3, # 'ö' + 31: 2, # 'ú' + 29: 2, # 'ü' + 42: 1, # 'ő' + 56: 2, # 'ű' + }, + 11: { # 'z' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 2, # 'b' + 26: 1, # 'c' + 17: 3, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 2, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 1, # 'j' + 7: 3, # 'k' + 6: 2, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 2, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 3, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 3, # 'í' + 25: 3, # 'ó' + 24: 3, # 'ö' + 31: 2, # 'ú' + 29: 3, # 'ü' + 42: 2, # 'ő' + 56: 1, # 'ű' + }, + 51: { # 'Á' + 28: 0, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 0, # 'E' + 50: 1, # 'F' + 49: 2, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 2, # 'L' + 34: 1, # 'M' + 35: 2, # 'N' + 47: 0, # 'O' + 46: 1, # 'P' + 43: 2, # 'R' + 33: 2, # 'S' + 37: 1, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 0, # 'e' + 27: 0, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 0, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 2, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 1, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 44: { # 'É' + 28: 0, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 1, # 'E' + 50: 0, # 'F' + 49: 2, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 2, # 'L' + 34: 1, # 'M' + 35: 2, # 'N' + 47: 0, # 'O' + 46: 1, # 'P' + 43: 2, # 'R' + 33: 2, # 'S' + 37: 2, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 0, # 'e' + 27: 0, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 0, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 2, # 'l' + 13: 1, # 'm' + 4: 2, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 3, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 0, # 'Á' + 44: 1, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 61: { # 'Í' + 28: 0, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 0, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 1, # 'J' + 36: 0, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 0, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 0, # 'e' + 27: 0, # 'f' + 12: 2, # 'g' + 20: 0, # 'h' + 9: 0, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 1, # 'm' + 4: 0, # 'n' + 8: 0, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 0, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 58: { # 'Ó' + 28: 1, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 0, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 2, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 0, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 0, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 2, # 'h' + 9: 0, # 'i' + 22: 0, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 0, # 't' + 21: 0, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 0, # 'Á' + 44: 1, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 59: { # 'Ö' + 28: 0, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 0, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 0, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 0, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 0, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 0, # 'i' + 22: 0, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 0, # 'o' + 23: 0, # 'p' + 10: 2, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 60: { # 'Ú' + 28: 0, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 0, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 0, # 'e' + 27: 0, # 'f' + 12: 2, # 'g' + 20: 0, # 'h' + 9: 0, # 'i' + 22: 2, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 8: 0, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 63: { # 'Ü' + 28: 0, # 'A' + 40: 1, # 'B' + 54: 0, # 'C' + 45: 1, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 0, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 0, # 'c' + 17: 1, # 'd' + 1: 0, # 'e' + 27: 0, # 'f' + 12: 1, # 'g' + 20: 0, # 'h' + 9: 0, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 8: 0, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 14: { # 'á' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 3, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 1, # 'e' + 27: 2, # 'f' + 12: 3, # 'g' + 20: 2, # 'h' + 9: 2, # 'i' + 22: 3, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 1, # 'o' + 23: 2, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 2, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 3, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 0, # 'ó' + 24: 1, # 'ö' + 31: 0, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 15: { # 'é' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 3, # 'b' + 26: 2, # 'c' + 17: 3, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 2, # 'i' + 22: 2, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 1, # 'o' + 23: 3, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 0, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 3, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 30: { # 'í' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 2, # 'c' + 17: 1, # 'd' + 1: 0, # 'e' + 27: 1, # 'f' + 12: 3, # 'g' + 20: 0, # 'h' + 9: 0, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 2, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 3, # 'r' + 5: 2, # 's' + 3: 3, # 't' + 21: 0, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 25: { # 'ó' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 2, # 'a' + 18: 3, # 'b' + 26: 2, # 'c' + 17: 3, # 'd' + 1: 1, # 'e' + 27: 2, # 'f' + 12: 2, # 'g' + 20: 2, # 'h' + 9: 2, # 'i' + 22: 2, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 8: 1, # 'o' + 23: 2, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 1, # 'u' + 19: 2, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 3, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 0, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 24: { # 'ö' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 0, # 'a' + 18: 3, # 'b' + 26: 1, # 'c' + 17: 2, # 'd' + 1: 0, # 'e' + 27: 1, # 'f' + 12: 2, # 'g' + 20: 1, # 'h' + 9: 0, # 'i' + 22: 1, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 0, # 'o' + 23: 2, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 0, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 3, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 31: { # 'ú' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 1, # 'b' + 26: 2, # 'c' + 17: 1, # 'd' + 1: 1, # 'e' + 27: 2, # 'f' + 12: 3, # 'g' + 20: 1, # 'h' + 9: 1, # 'i' + 22: 3, # 'j' + 7: 1, # 'k' + 6: 3, # 'l' + 13: 1, # 'm' + 4: 2, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 2, # 't' + 21: 1, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 29: { # 'ü' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 2, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 3, # 'g' + 20: 2, # 'h' + 9: 1, # 'i' + 22: 1, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 1, # 'm' + 4: 3, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 2, # 's' + 3: 2, # 't' + 21: 0, # 'u' + 19: 2, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 1, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 42: { # 'ő' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 2, # 'b' + 26: 1, # 'c' + 17: 2, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 1, # 'i' + 22: 1, # 'j' + 7: 2, # 'k' + 6: 3, # 'l' + 13: 1, # 'm' + 4: 2, # 'n' + 8: 1, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 2, # 's' + 3: 2, # 't' + 21: 1, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 56: { # 'ű' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 1, # 'b' + 26: 0, # 'c' + 17: 1, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 1, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 2, # 'n' + 8: 0, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, +} + +# 255: Undefined characters that did not exist in training text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 +# 251: Control characters + +# Character Mapping Table(s): +WINDOWS_1250_HUNGARIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 28, # 'A' + 66: 40, # 'B' + 67: 54, # 'C' + 68: 45, # 'D' + 69: 32, # 'E' + 70: 50, # 'F' + 71: 49, # 'G' + 72: 38, # 'H' + 73: 39, # 'I' + 74: 53, # 'J' + 75: 36, # 'K' + 76: 41, # 'L' + 77: 34, # 'M' + 78: 35, # 'N' + 79: 47, # 'O' + 80: 46, # 'P' + 81: 72, # 'Q' + 82: 43, # 'R' + 83: 33, # 'S' + 84: 37, # 'T' + 85: 57, # 'U' + 86: 48, # 'V' + 87: 64, # 'W' + 88: 68, # 'X' + 89: 55, # 'Y' + 90: 52, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 2, # 'a' + 98: 18, # 'b' + 99: 26, # 'c' + 100: 17, # 'd' + 101: 1, # 'e' + 102: 27, # 'f' + 103: 12, # 'g' + 104: 20, # 'h' + 105: 9, # 'i' + 106: 22, # 'j' + 107: 7, # 'k' + 108: 6, # 'l' + 109: 13, # 'm' + 110: 4, # 'n' + 111: 8, # 'o' + 112: 23, # 'p' + 113: 67, # 'q' + 114: 10, # 'r' + 115: 5, # 's' + 116: 3, # 't' + 117: 21, # 'u' + 118: 19, # 'v' + 119: 65, # 'w' + 120: 62, # 'x' + 121: 16, # 'y' + 122: 11, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 161, # '€' + 129: 162, # None + 130: 163, # '‚' + 131: 164, # None + 132: 165, # '„' + 133: 166, # '…' + 134: 167, # '†' + 135: 168, # '‡' + 136: 169, # None + 137: 170, # '‰' + 138: 171, # 'Š' + 139: 172, # '‹' + 140: 173, # 'Ś' + 141: 174, # 'Ť' + 142: 175, # 'Ž' + 143: 176, # 'Ź' + 144: 177, # None + 145: 178, # '‘' + 146: 179, # '’' + 147: 180, # '“' + 148: 78, # '”' + 149: 181, # '•' + 150: 69, # '–' + 151: 182, # '—' + 152: 183, # None + 153: 184, # '™' + 154: 185, # 'š' + 155: 186, # '›' + 156: 187, # 'ś' + 157: 188, # 'ť' + 158: 189, # 'ž' + 159: 190, # 'ź' + 160: 191, # '\xa0' + 161: 192, # 'ˇ' + 162: 193, # '˘' + 163: 194, # 'Ł' + 164: 195, # '¤' + 165: 196, # 'Ą' + 166: 197, # '¦' + 167: 76, # '§' + 168: 198, # '¨' + 169: 199, # '©' + 170: 200, # 'Ş' + 171: 201, # '«' + 172: 202, # '¬' + 173: 203, # '\xad' + 174: 204, # '®' + 175: 205, # 'Ż' + 176: 81, # '°' + 177: 206, # '±' + 178: 207, # '˛' + 179: 208, # 'ł' + 180: 209, # '´' + 181: 210, # 'µ' + 182: 211, # '¶' + 183: 212, # '·' + 184: 213, # '¸' + 185: 214, # 'ą' + 186: 215, # 'ş' + 187: 216, # '»' + 188: 217, # 'Ľ' + 189: 218, # '˝' + 190: 219, # 'ľ' + 191: 220, # 'ż' + 192: 221, # 'Ŕ' + 193: 51, # 'Á' + 194: 83, # 'Â' + 195: 222, # 'Ă' + 196: 80, # 'Ä' + 197: 223, # 'Ĺ' + 198: 224, # 'Ć' + 199: 225, # 'Ç' + 200: 226, # 'Č' + 201: 44, # 'É' + 202: 227, # 'Ę' + 203: 228, # 'Ë' + 204: 229, # 'Ě' + 205: 61, # 'Í' + 206: 230, # 'Î' + 207: 231, # 'Ď' + 208: 232, # 'Đ' + 209: 233, # 'Ń' + 210: 234, # 'Ň' + 211: 58, # 'Ó' + 212: 235, # 'Ô' + 213: 66, # 'Ő' + 214: 59, # 'Ö' + 215: 236, # '×' + 216: 237, # 'Ř' + 217: 238, # 'Ů' + 218: 60, # 'Ú' + 219: 70, # 'Ű' + 220: 63, # 'Ü' + 221: 239, # 'Ý' + 222: 240, # 'Ţ' + 223: 241, # 'ß' + 224: 84, # 'ŕ' + 225: 14, # 'á' + 226: 75, # 'â' + 227: 242, # 'ă' + 228: 71, # 'ä' + 229: 82, # 'ĺ' + 230: 243, # 'ć' + 231: 73, # 'ç' + 232: 244, # 'č' + 233: 15, # 'é' + 234: 85, # 'ę' + 235: 79, # 'ë' + 236: 86, # 'ě' + 237: 30, # 'í' + 238: 77, # 'î' + 239: 87, # 'ď' + 240: 245, # 'đ' + 241: 246, # 'ń' + 242: 247, # 'ň' + 243: 25, # 'ó' + 244: 74, # 'ô' + 245: 42, # 'ő' + 246: 24, # 'ö' + 247: 248, # '÷' + 248: 249, # 'ř' + 249: 250, # 'ů' + 250: 31, # 'ú' + 251: 56, # 'ű' + 252: 29, # 'ü' + 253: 251, # 'ý' + 254: 252, # 'ţ' + 255: 253, # '˙' +} + +WINDOWS_1250_HUNGARIAN_MODEL = SingleByteCharSetModel(charset_name='windows-1250', + language='Hungarian', + char_to_order_map=WINDOWS_1250_HUNGARIAN_CHAR_TO_ORDER, + language_model=HUNGARIAN_LANG_MODEL, + typical_positive_ratio=0.947368, + keep_ascii_letters=True, + alphabet='ABCDEFGHIJKLMNOPRSTUVZabcdefghijklmnoprstuvzÁÉÍÓÖÚÜáéíóöúüŐőŰű') + +ISO_8859_2_HUNGARIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 28, # 'A' + 66: 40, # 'B' + 67: 54, # 'C' + 68: 45, # 'D' + 69: 32, # 'E' + 70: 50, # 'F' + 71: 49, # 'G' + 72: 38, # 'H' + 73: 39, # 'I' + 74: 53, # 'J' + 75: 36, # 'K' + 76: 41, # 'L' + 77: 34, # 'M' + 78: 35, # 'N' + 79: 47, # 'O' + 80: 46, # 'P' + 81: 71, # 'Q' + 82: 43, # 'R' + 83: 33, # 'S' + 84: 37, # 'T' + 85: 57, # 'U' + 86: 48, # 'V' + 87: 64, # 'W' + 88: 68, # 'X' + 89: 55, # 'Y' + 90: 52, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 2, # 'a' + 98: 18, # 'b' + 99: 26, # 'c' + 100: 17, # 'd' + 101: 1, # 'e' + 102: 27, # 'f' + 103: 12, # 'g' + 104: 20, # 'h' + 105: 9, # 'i' + 106: 22, # 'j' + 107: 7, # 'k' + 108: 6, # 'l' + 109: 13, # 'm' + 110: 4, # 'n' + 111: 8, # 'o' + 112: 23, # 'p' + 113: 67, # 'q' + 114: 10, # 'r' + 115: 5, # 's' + 116: 3, # 't' + 117: 21, # 'u' + 118: 19, # 'v' + 119: 65, # 'w' + 120: 62, # 'x' + 121: 16, # 'y' + 122: 11, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 159, # '\x80' + 129: 160, # '\x81' + 130: 161, # '\x82' + 131: 162, # '\x83' + 132: 163, # '\x84' + 133: 164, # '\x85' + 134: 165, # '\x86' + 135: 166, # '\x87' + 136: 167, # '\x88' + 137: 168, # '\x89' + 138: 169, # '\x8a' + 139: 170, # '\x8b' + 140: 171, # '\x8c' + 141: 172, # '\x8d' + 142: 173, # '\x8e' + 143: 174, # '\x8f' + 144: 175, # '\x90' + 145: 176, # '\x91' + 146: 177, # '\x92' + 147: 178, # '\x93' + 148: 179, # '\x94' + 149: 180, # '\x95' + 150: 181, # '\x96' + 151: 182, # '\x97' + 152: 183, # '\x98' + 153: 184, # '\x99' + 154: 185, # '\x9a' + 155: 186, # '\x9b' + 156: 187, # '\x9c' + 157: 188, # '\x9d' + 158: 189, # '\x9e' + 159: 190, # '\x9f' + 160: 191, # '\xa0' + 161: 192, # 'Ą' + 162: 193, # '˘' + 163: 194, # 'Ł' + 164: 195, # '¤' + 165: 196, # 'Ľ' + 166: 197, # 'Ś' + 167: 75, # '§' + 168: 198, # '¨' + 169: 199, # 'Š' + 170: 200, # 'Ş' + 171: 201, # 'Ť' + 172: 202, # 'Ź' + 173: 203, # '\xad' + 174: 204, # 'Ž' + 175: 205, # 'Ż' + 176: 79, # '°' + 177: 206, # 'ą' + 178: 207, # '˛' + 179: 208, # 'ł' + 180: 209, # '´' + 181: 210, # 'ľ' + 182: 211, # 'ś' + 183: 212, # 'ˇ' + 184: 213, # '¸' + 185: 214, # 'š' + 186: 215, # 'ş' + 187: 216, # 'ť' + 188: 217, # 'ź' + 189: 218, # '˝' + 190: 219, # 'ž' + 191: 220, # 'ż' + 192: 221, # 'Ŕ' + 193: 51, # 'Á' + 194: 81, # 'Â' + 195: 222, # 'Ă' + 196: 78, # 'Ä' + 197: 223, # 'Ĺ' + 198: 224, # 'Ć' + 199: 225, # 'Ç' + 200: 226, # 'Č' + 201: 44, # 'É' + 202: 227, # 'Ę' + 203: 228, # 'Ë' + 204: 229, # 'Ě' + 205: 61, # 'Í' + 206: 230, # 'Î' + 207: 231, # 'Ď' + 208: 232, # 'Đ' + 209: 233, # 'Ń' + 210: 234, # 'Ň' + 211: 58, # 'Ó' + 212: 235, # 'Ô' + 213: 66, # 'Ő' + 214: 59, # 'Ö' + 215: 236, # '×' + 216: 237, # 'Ř' + 217: 238, # 'Ů' + 218: 60, # 'Ú' + 219: 69, # 'Ű' + 220: 63, # 'Ü' + 221: 239, # 'Ý' + 222: 240, # 'Ţ' + 223: 241, # 'ß' + 224: 82, # 'ŕ' + 225: 14, # 'á' + 226: 74, # 'â' + 227: 242, # 'ă' + 228: 70, # 'ä' + 229: 80, # 'ĺ' + 230: 243, # 'ć' + 231: 72, # 'ç' + 232: 244, # 'č' + 233: 15, # 'é' + 234: 83, # 'ę' + 235: 77, # 'ë' + 236: 84, # 'ě' + 237: 30, # 'í' + 238: 76, # 'î' + 239: 85, # 'ď' + 240: 245, # 'đ' + 241: 246, # 'ń' + 242: 247, # 'ň' + 243: 25, # 'ó' + 244: 73, # 'ô' + 245: 42, # 'ő' + 246: 24, # 'ö' + 247: 248, # '÷' + 248: 249, # 'ř' + 249: 250, # 'ů' + 250: 31, # 'ú' + 251: 56, # 'ű' + 252: 29, # 'ü' + 253: 251, # 'ý' + 254: 252, # 'ţ' + 255: 253, # '˙' +} + +ISO_8859_2_HUNGARIAN_MODEL = SingleByteCharSetModel(charset_name='ISO-8859-2', + language='Hungarian', + char_to_order_map=ISO_8859_2_HUNGARIAN_CHAR_TO_ORDER, + language_model=HUNGARIAN_LANG_MODEL, + typical_positive_ratio=0.947368, + keep_ascii_letters=True, + alphabet='ABCDEFGHIJKLMNOPRSTUVZabcdefghijklmnoprstuvzÁÉÍÓÖÚÜáéíóöúüŐőŰű') + diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/langrussianmodel.py b/venv/Lib/site-packages/pip/_vendor/chardet/langrussianmodel.py new file mode 100644 index 00000000..5594452b --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/chardet/langrussianmodel.py @@ -0,0 +1,5718 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel + + +# 3: Positive +# 2: Likely +# 1: Unlikely +# 0: Negative + +RUSSIAN_LANG_MODEL = { + 37: { # 'А' + 37: 0, # 'А' + 44: 1, # 'Б' + 33: 1, # 'В' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 1, # 'Ж' + 51: 1, # 'З' + 42: 1, # 'И' + 60: 1, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 2, # 'Н' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 1, # 'Ф' + 55: 1, # 'Х' + 58: 1, # 'Ц' + 50: 1, # 'Ч' + 57: 1, # 'Ш' + 63: 1, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 1, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 0, # 'е' + 24: 1, # 'ж' + 20: 1, # 'з' + 4: 0, # 'и' + 23: 1, # 'й' + 11: 2, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 2, # 'н' + 1: 0, # 'о' + 15: 2, # 'п' + 9: 2, # 'р' + 7: 2, # 'с' + 6: 2, # 'т' + 14: 2, # 'у' + 39: 2, # 'ф' + 26: 2, # 'х' + 28: 0, # 'ц' + 22: 1, # 'ч' + 25: 2, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 1, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 44: { # 'Б' + 37: 1, # 'А' + 44: 0, # 'Б' + 33: 1, # 'В' + 46: 1, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 1, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 2, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 2, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 2, # 'ы' + 17: 1, # 'ь' + 30: 2, # 'э' + 27: 1, # 'ю' + 16: 1, # 'я' + }, + 33: { # 'В' + 37: 2, # 'А' + 44: 0, # 'Б' + 33: 1, # 'В' + 46: 0, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 1, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 2, # 'а' + 21: 1, # 'б' + 10: 1, # 'в' + 19: 1, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 2, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 2, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 2, # 'р' + 7: 3, # 'с' + 6: 2, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 1, # 'х' + 28: 1, # 'ц' + 22: 2, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 1, # 'ъ' + 18: 3, # 'ы' + 17: 1, # 'ь' + 30: 2, # 'э' + 27: 0, # 'ю' + 16: 1, # 'я' + }, + 46: { # 'Г' + 37: 1, # 'А' + 44: 1, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 1, # 'в' + 19: 0, # 'г' + 13: 2, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 1, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 2, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 1, # 'ь' + 30: 1, # 'э' + 27: 1, # 'ю' + 16: 0, # 'я' + }, + 41: { # 'Д' + 37: 1, # 'А' + 44: 0, # 'Б' + 33: 1, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 2, # 'Е' + 56: 1, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 1, # 'Ц' + 50: 1, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 3, # 'а' + 21: 0, # 'б' + 10: 2, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 3, # 'ж' + 20: 1, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 1, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 2, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 1, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 1, # 'ы' + 17: 1, # 'ь' + 30: 2, # 'э' + 27: 1, # 'ю' + 16: 1, # 'я' + }, + 48: { # 'Е' + 37: 1, # 'А' + 44: 1, # 'Б' + 33: 1, # 'В' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 1, # 'Ж' + 51: 1, # 'З' + 42: 1, # 'И' + 60: 1, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 2, # 'Н' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 2, # 'Р' + 32: 2, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 1, # 'Х' + 58: 1, # 'Ц' + 50: 1, # 'Ч' + 57: 1, # 'Ш' + 63: 1, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 0, # 'а' + 21: 0, # 'б' + 10: 2, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 2, # 'е' + 24: 1, # 'ж' + 20: 1, # 'з' + 4: 0, # 'и' + 23: 2, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 1, # 'н' + 1: 0, # 'о' + 15: 1, # 'п' + 9: 1, # 'р' + 7: 3, # 'с' + 6: 0, # 'т' + 14: 0, # 'у' + 39: 1, # 'ф' + 26: 1, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 2, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 1, # 'ю' + 16: 0, # 'я' + }, + 56: { # 'Ж' + 37: 1, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 1, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 1, # 'б' + 10: 0, # 'в' + 19: 1, # 'г' + 13: 1, # 'д' + 2: 2, # 'е' + 24: 1, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 1, # 'м' + 5: 0, # 'н' + 1: 2, # 'о' + 15: 0, # 'п' + 9: 1, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 2, # 'ю' + 16: 0, # 'я' + }, + 51: { # 'З' + 37: 1, # 'А' + 44: 0, # 'Б' + 33: 1, # 'В' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 2, # 'в' + 19: 0, # 'г' + 13: 2, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 1, # 'л' + 12: 1, # 'м' + 5: 2, # 'н' + 1: 2, # 'о' + 15: 0, # 'п' + 9: 1, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 1, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 1, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 0, # 'ю' + 16: 1, # 'я' + }, + 42: { # 'И' + 37: 1, # 'А' + 44: 1, # 'Б' + 33: 1, # 'В' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 2, # 'Е' + 56: 1, # 'Ж' + 51: 1, # 'З' + 42: 1, # 'И' + 60: 1, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 2, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 1, # 'Ф' + 55: 1, # 'Х' + 58: 1, # 'Ц' + 50: 1, # 'Ч' + 57: 0, # 'Ш' + 63: 1, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 1, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 2, # 'з' + 4: 1, # 'и' + 23: 0, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 2, # 'н' + 1: 1, # 'о' + 15: 1, # 'п' + 9: 2, # 'р' + 7: 2, # 'с' + 6: 2, # 'т' + 14: 1, # 'у' + 39: 1, # 'ф' + 26: 2, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 1, # 'ю' + 16: 0, # 'я' + }, + 60: { # 'Й' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 1, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 1, # 'Х' + 58: 1, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 1, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 0, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 2, # 'о' + 15: 0, # 'п' + 9: 0, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 0, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 36: { # 'К' + 37: 2, # 'А' + 44: 0, # 'Б' + 33: 1, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 1, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 1, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Н' + 34: 2, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 1, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 0, # 'б' + 10: 1, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 2, # 'л' + 12: 0, # 'м' + 5: 1, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 2, # 'р' + 7: 2, # 'с' + 6: 2, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 1, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 1, # 'ы' + 17: 1, # 'ь' + 30: 2, # 'э' + 27: 1, # 'ю' + 16: 0, # 'я' + }, + 49: { # 'Л' + 37: 2, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 1, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 1, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 0, # 'Н' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 0, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 1, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 0, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 1, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 1, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 1, # 'л' + 12: 0, # 'м' + 5: 1, # 'н' + 1: 2, # 'о' + 15: 0, # 'п' + 9: 0, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 1, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 1, # 'ы' + 17: 1, # 'ь' + 30: 2, # 'э' + 27: 2, # 'ю' + 16: 1, # 'я' + }, + 38: { # 'М' + 37: 1, # 'А' + 44: 1, # 'Б' + 33: 1, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 1, # 'Ф' + 55: 1, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 0, # 'Ь' + 47: 1, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 3, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 1, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 1, # 'л' + 12: 1, # 'м' + 5: 2, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 1, # 'р' + 7: 1, # 'с' + 6: 0, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 3, # 'ы' + 17: 1, # 'ь' + 30: 2, # 'э' + 27: 1, # 'ю' + 16: 1, # 'я' + }, + 31: { # 'Н' + 37: 2, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 1, # 'З' + 42: 2, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 1, # 'Ф' + 55: 1, # 'Х' + 58: 1, # 'Ц' + 50: 1, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 1, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 3, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 1, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 3, # 'у' + 39: 0, # 'ф' + 26: 1, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 1, # 'ы' + 17: 2, # 'ь' + 30: 1, # 'э' + 27: 1, # 'ю' + 16: 1, # 'я' + }, + 34: { # 'О' + 37: 0, # 'А' + 44: 1, # 'Б' + 33: 1, # 'В' + 46: 1, # 'Г' + 41: 2, # 'Д' + 48: 1, # 'Е' + 56: 1, # 'Ж' + 51: 1, # 'З' + 42: 1, # 'И' + 60: 1, # 'Й' + 36: 1, # 'К' + 49: 2, # 'Л' + 38: 1, # 'М' + 31: 2, # 'Н' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 2, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 1, # 'Ф' + 55: 1, # 'Х' + 58: 0, # 'Ц' + 50: 1, # 'Ч' + 57: 1, # 'Ш' + 63: 1, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 1, # 'а' + 21: 2, # 'б' + 10: 1, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 0, # 'е' + 24: 1, # 'ж' + 20: 1, # 'з' + 4: 0, # 'и' + 23: 1, # 'й' + 11: 2, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 3, # 'н' + 1: 0, # 'о' + 15: 2, # 'п' + 9: 2, # 'р' + 7: 2, # 'с' + 6: 2, # 'т' + 14: 1, # 'у' + 39: 1, # 'ф' + 26: 2, # 'х' + 28: 1, # 'ц' + 22: 2, # 'ч' + 25: 2, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 35: { # 'П' + 37: 1, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 1, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 2, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 2, # 'л' + 12: 0, # 'м' + 5: 1, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 3, # 'р' + 7: 1, # 'с' + 6: 1, # 'т' + 14: 2, # 'у' + 39: 1, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 1, # 'ы' + 17: 2, # 'ь' + 30: 1, # 'э' + 27: 0, # 'ю' + 16: 2, # 'я' + }, + 45: { # 'Р' + 37: 2, # 'А' + 44: 1, # 'Б' + 33: 1, # 'В' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 2, # 'Е' + 56: 1, # 'Ж' + 51: 0, # 'З' + 42: 2, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 2, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 1, # 'Х' + 58: 1, # 'Ц' + 50: 1, # 'Ч' + 57: 1, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 1, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 3, # 'а' + 21: 0, # 'б' + 10: 1, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 1, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 1, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 2, # 'ы' + 17: 0, # 'ь' + 30: 1, # 'э' + 27: 1, # 'ю' + 16: 2, # 'я' + }, + 32: { # 'С' + 37: 1, # 'А' + 44: 1, # 'Б' + 33: 1, # 'В' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 2, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 1, # 'Х' + 58: 1, # 'Ц' + 50: 1, # 'Ч' + 57: 1, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 1, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 2, # 'а' + 21: 1, # 'б' + 10: 2, # 'в' + 19: 1, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 1, # 'ж' + 20: 1, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 2, # 'н' + 1: 2, # 'о' + 15: 2, # 'п' + 9: 2, # 'р' + 7: 1, # 'с' + 6: 3, # 'т' + 14: 2, # 'у' + 39: 1, # 'ф' + 26: 1, # 'х' + 28: 1, # 'ц' + 22: 1, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 1, # 'ъ' + 18: 1, # 'ы' + 17: 1, # 'ь' + 30: 2, # 'э' + 27: 1, # 'ю' + 16: 1, # 'я' + }, + 40: { # 'Т' + 37: 1, # 'А' + 44: 0, # 'Б' + 33: 1, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 2, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 1, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 1, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 2, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 1, # 'к' + 8: 1, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 2, # 'р' + 7: 1, # 'с' + 6: 0, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ъ' + 18: 3, # 'ы' + 17: 1, # 'ь' + 30: 2, # 'э' + 27: 1, # 'ю' + 16: 1, # 'я' + }, + 52: { # 'У' + 37: 1, # 'А' + 44: 1, # 'Б' + 33: 1, # 'В' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 1, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 1, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 1, # 'Х' + 58: 0, # 'Ц' + 50: 1, # 'Ч' + 57: 1, # 'Ш' + 63: 1, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 1, # 'Ю' + 43: 0, # 'Я' + 3: 1, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 1, # 'г' + 13: 2, # 'д' + 2: 1, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 2, # 'и' + 23: 1, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 1, # 'н' + 1: 2, # 'о' + 15: 1, # 'п' + 9: 2, # 'р' + 7: 2, # 'с' + 6: 2, # 'т' + 14: 0, # 'у' + 39: 1, # 'ф' + 26: 1, # 'х' + 28: 1, # 'ц' + 22: 2, # 'ч' + 25: 1, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 2, # 'э' + 27: 1, # 'ю' + 16: 0, # 'я' + }, + 53: { # 'Ф' + 37: 1, # 'А' + 44: 1, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 1, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 2, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 2, # 'о' + 15: 0, # 'п' + 9: 2, # 'р' + 7: 0, # 'с' + 6: 1, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 1, # 'ь' + 30: 2, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 55: { # 'Х' + 37: 1, # 'А' + 44: 0, # 'Б' + 33: 1, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 2, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 0, # 'н' + 1: 2, # 'о' + 15: 0, # 'п' + 9: 2, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 1, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 1, # 'ь' + 30: 1, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 58: { # 'Ц' + 37: 1, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 1, # 'а' + 21: 0, # 'б' + 10: 1, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 0, # 'о' + 15: 0, # 'п' + 9: 0, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 1, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 1, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 1, # 'ю' + 16: 0, # 'я' + }, + 50: { # 'Ч' + 37: 1, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Н' + 34: 0, # 'О' + 35: 1, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 1, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 1, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 1, # 'о' + 15: 0, # 'п' + 9: 1, # 'р' + 7: 0, # 'с' + 6: 3, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 1, # 'ь' + 30: 0, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 57: { # 'Ш' + 37: 1, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 1, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 1, # 'и' + 23: 0, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 1, # 'н' + 1: 2, # 'о' + 15: 2, # 'п' + 9: 1, # 'р' + 7: 0, # 'с' + 6: 2, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 1, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 1, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 63: { # 'Щ' + 37: 1, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 1, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 1, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 1, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 1, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 1, # 'о' + 15: 0, # 'п' + 9: 0, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 1, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 62: { # 'Ы' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 1, # 'В' + 46: 1, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 1, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 0, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 1, # 'Х' + 58: 1, # 'Ц' + 50: 0, # 'Ч' + 57: 1, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 0, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 0, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 0, # 'о' + 15: 0, # 'п' + 9: 0, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 0, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 61: { # 'Ь' + 37: 0, # 'А' + 44: 1, # 'Б' + 33: 1, # 'В' + 46: 0, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 0, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 1, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 1, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 1, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 0, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 0, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 0, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 0, # 'о' + 15: 0, # 'п' + 9: 0, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 0, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 47: { # 'Э' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 1, # 'В' + 46: 0, # 'Г' + 41: 1, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 1, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 0, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 1, # 'а' + 21: 1, # 'б' + 10: 2, # 'в' + 19: 1, # 'г' + 13: 2, # 'д' + 2: 0, # 'е' + 24: 1, # 'ж' + 20: 0, # 'з' + 4: 0, # 'и' + 23: 2, # 'й' + 11: 2, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 2, # 'н' + 1: 0, # 'о' + 15: 1, # 'п' + 9: 2, # 'р' + 7: 1, # 'с' + 6: 3, # 'т' + 14: 1, # 'у' + 39: 1, # 'ф' + 26: 1, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 59: { # 'Ю' + 37: 1, # 'А' + 44: 1, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 1, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 0, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 1, # 'Ч' + 57: 0, # 'Ш' + 63: 1, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 1, # 'б' + 10: 0, # 'в' + 19: 1, # 'г' + 13: 1, # 'д' + 2: 0, # 'е' + 24: 1, # 'ж' + 20: 0, # 'з' + 4: 0, # 'и' + 23: 0, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 2, # 'н' + 1: 0, # 'о' + 15: 1, # 'п' + 9: 1, # 'р' + 7: 1, # 'с' + 6: 0, # 'т' + 14: 0, # 'у' + 39: 0, # 'ф' + 26: 1, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 43: { # 'Я' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 1, # 'В' + 46: 1, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 1, # 'Х' + 58: 0, # 'Ц' + 50: 1, # 'Ч' + 57: 0, # 'Ш' + 63: 1, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 0, # 'а' + 21: 1, # 'б' + 10: 1, # 'в' + 19: 1, # 'г' + 13: 1, # 'д' + 2: 0, # 'е' + 24: 0, # 'ж' + 20: 1, # 'з' + 4: 0, # 'и' + 23: 1, # 'й' + 11: 1, # 'к' + 8: 1, # 'л' + 12: 1, # 'м' + 5: 2, # 'н' + 1: 0, # 'о' + 15: 1, # 'п' + 9: 1, # 'р' + 7: 1, # 'с' + 6: 0, # 'т' + 14: 0, # 'у' + 39: 0, # 'ф' + 26: 1, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 3: { # 'а' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 3, # 'б' + 10: 3, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 3, # 'з' + 4: 3, # 'и' + 23: 3, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 2, # 'о' + 15: 3, # 'п' + 9: 3, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 3, # 'у' + 39: 2, # 'ф' + 26: 3, # 'х' + 28: 3, # 'ц' + 22: 3, # 'ч' + 25: 3, # 'ш' + 29: 3, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 2, # 'э' + 27: 3, # 'ю' + 16: 3, # 'я' + }, + 21: { # 'б' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 1, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 1, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 1, # 'п' + 9: 3, # 'р' + 7: 3, # 'с' + 6: 2, # 'т' + 14: 3, # 'у' + 39: 0, # 'ф' + 26: 2, # 'х' + 28: 1, # 'ц' + 22: 1, # 'ч' + 25: 2, # 'ш' + 29: 3, # 'щ' + 54: 2, # 'ъ' + 18: 3, # 'ы' + 17: 2, # 'ь' + 30: 1, # 'э' + 27: 2, # 'ю' + 16: 3, # 'я' + }, + 10: { # 'в' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 2, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 1, # 'ж' + 20: 3, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 3, # 'п' + 9: 3, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 3, # 'у' + 39: 1, # 'ф' + 26: 2, # 'х' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 3, # 'ш' + 29: 2, # 'щ' + 54: 2, # 'ъ' + 18: 3, # 'ы' + 17: 3, # 'ь' + 30: 1, # 'э' + 27: 1, # 'ю' + 16: 3, # 'я' + }, + 19: { # 'г' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 2, # 'в' + 19: 1, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 1, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 3, # 'р' + 7: 2, # 'с' + 6: 2, # 'т' + 14: 3, # 'у' + 39: 1, # 'ф' + 26: 1, # 'х' + 28: 1, # 'ц' + 22: 2, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 1, # 'ы' + 17: 1, # 'ь' + 30: 1, # 'э' + 27: 1, # 'ю' + 16: 0, # 'я' + }, + 13: { # 'д' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 3, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 3, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 3, # 'у' + 39: 1, # 'ф' + 26: 2, # 'х' + 28: 3, # 'ц' + 22: 2, # 'ч' + 25: 2, # 'ш' + 29: 1, # 'щ' + 54: 2, # 'ъ' + 18: 3, # 'ы' + 17: 3, # 'ь' + 30: 1, # 'э' + 27: 2, # 'ю' + 16: 3, # 'я' + }, + 2: { # 'е' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 3, # 'б' + 10: 3, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 3, # 'з' + 4: 2, # 'и' + 23: 3, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 3, # 'п' + 9: 3, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 2, # 'у' + 39: 2, # 'ф' + 26: 3, # 'х' + 28: 3, # 'ц' + 22: 3, # 'ч' + 25: 3, # 'ш' + 29: 3, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 1, # 'э' + 27: 2, # 'ю' + 16: 3, # 'я' + }, + 24: { # 'ж' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 1, # 'в' + 19: 2, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 1, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 3, # 'н' + 1: 2, # 'о' + 15: 1, # 'п' + 9: 2, # 'р' + 7: 2, # 'с' + 6: 1, # 'т' + 14: 3, # 'у' + 39: 1, # 'ф' + 26: 0, # 'х' + 28: 1, # 'ц' + 22: 2, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 1, # 'ы' + 17: 2, # 'ь' + 30: 1, # 'э' + 27: 1, # 'ю' + 16: 1, # 'я' + }, + 20: { # 'з' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 3, # 'б' + 10: 3, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 3, # 'р' + 7: 2, # 'с' + 6: 2, # 'т' + 14: 3, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 1, # 'ц' + 22: 2, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 2, # 'ъ' + 18: 3, # 'ы' + 17: 2, # 'ь' + 30: 1, # 'э' + 27: 1, # 'ю' + 16: 3, # 'я' + }, + 4: { # 'и' + 37: 1, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 3, # 'б' + 10: 3, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 3, # 'з' + 4: 3, # 'и' + 23: 3, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 3, # 'п' + 9: 3, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 2, # 'у' + 39: 2, # 'ф' + 26: 3, # 'х' + 28: 3, # 'ц' + 22: 3, # 'ч' + 25: 3, # 'ш' + 29: 3, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 2, # 'э' + 27: 3, # 'ю' + 16: 3, # 'я' + }, + 23: { # 'й' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 1, # 'а' + 21: 1, # 'б' + 10: 1, # 'в' + 19: 2, # 'г' + 13: 3, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 2, # 'з' + 4: 1, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 2, # 'о' + 15: 1, # 'п' + 9: 2, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 1, # 'у' + 39: 2, # 'ф' + 26: 1, # 'х' + 28: 2, # 'ц' + 22: 3, # 'ч' + 25: 2, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 1, # 'э' + 27: 1, # 'ю' + 16: 2, # 'я' + }, + 11: { # 'к' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 3, # 'в' + 19: 1, # 'г' + 13: 1, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 3, # 'л' + 12: 1, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 3, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 3, # 'у' + 39: 1, # 'ф' + 26: 2, # 'х' + 28: 2, # 'ц' + 22: 1, # 'ч' + 25: 2, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 1, # 'ы' + 17: 1, # 'ь' + 30: 1, # 'э' + 27: 1, # 'ю' + 16: 1, # 'я' + }, + 8: { # 'л' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 3, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 2, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 1, # 'р' + 7: 3, # 'с' + 6: 2, # 'т' + 14: 3, # 'у' + 39: 2, # 'ф' + 26: 2, # 'х' + 28: 1, # 'ц' + 22: 3, # 'ч' + 25: 2, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ъ' + 18: 3, # 'ы' + 17: 3, # 'ь' + 30: 1, # 'э' + 27: 3, # 'ю' + 16: 3, # 'я' + }, + 12: { # 'м' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 2, # 'г' + 13: 1, # 'д' + 2: 3, # 'е' + 24: 1, # 'ж' + 20: 1, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 2, # 'р' + 7: 3, # 'с' + 6: 2, # 'т' + 14: 3, # 'у' + 39: 2, # 'ф' + 26: 2, # 'х' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 1, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ъ' + 18: 3, # 'ы' + 17: 2, # 'ь' + 30: 2, # 'э' + 27: 1, # 'ю' + 16: 3, # 'я' + }, + 5: { # 'н' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 1, # 'п' + 9: 2, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 3, # 'у' + 39: 2, # 'ф' + 26: 2, # 'х' + 28: 3, # 'ц' + 22: 3, # 'ч' + 25: 2, # 'ш' + 29: 2, # 'щ' + 54: 1, # 'ъ' + 18: 3, # 'ы' + 17: 3, # 'ь' + 30: 1, # 'э' + 27: 3, # 'ю' + 16: 3, # 'я' + }, + 1: { # 'о' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 3, # 'б' + 10: 3, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 3, # 'з' + 4: 3, # 'и' + 23: 3, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 3, # 'п' + 9: 3, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 2, # 'у' + 39: 2, # 'ф' + 26: 3, # 'х' + 28: 2, # 'ц' + 22: 3, # 'ч' + 25: 3, # 'ш' + 29: 3, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 2, # 'э' + 27: 3, # 'ю' + 16: 3, # 'я' + }, + 15: { # 'п' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 3, # 'л' + 12: 1, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 3, # 'р' + 7: 2, # 'с' + 6: 2, # 'т' + 14: 3, # 'у' + 39: 1, # 'ф' + 26: 0, # 'х' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 1, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ъ' + 18: 3, # 'ы' + 17: 2, # 'ь' + 30: 1, # 'э' + 27: 1, # 'ю' + 16: 3, # 'я' + }, + 9: { # 'р' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 3, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 2, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 2, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 2, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 3, # 'у' + 39: 2, # 'ф' + 26: 3, # 'х' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 3, # 'ш' + 29: 2, # 'щ' + 54: 0, # 'ъ' + 18: 3, # 'ы' + 17: 3, # 'ь' + 30: 2, # 'э' + 27: 2, # 'ю' + 16: 3, # 'я' + }, + 7: { # 'с' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 1, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 3, # 'в' + 19: 2, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 3, # 'п' + 9: 3, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 3, # 'у' + 39: 2, # 'ф' + 26: 3, # 'х' + 28: 2, # 'ц' + 22: 3, # 'ч' + 25: 2, # 'ш' + 29: 1, # 'щ' + 54: 2, # 'ъ' + 18: 3, # 'ы' + 17: 3, # 'ь' + 30: 2, # 'э' + 27: 3, # 'ю' + 16: 3, # 'я' + }, + 6: { # 'т' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 3, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 1, # 'ж' + 20: 1, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 3, # 'р' + 7: 3, # 'с' + 6: 2, # 'т' + 14: 3, # 'у' + 39: 2, # 'ф' + 26: 2, # 'х' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 2, # 'ш' + 29: 2, # 'щ' + 54: 2, # 'ъ' + 18: 3, # 'ы' + 17: 3, # 'ь' + 30: 2, # 'э' + 27: 2, # 'ю' + 16: 3, # 'я' + }, + 14: { # 'у' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 3, # 'б' + 10: 3, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 3, # 'з' + 4: 2, # 'и' + 23: 2, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 2, # 'о' + 15: 3, # 'п' + 9: 3, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 1, # 'у' + 39: 2, # 'ф' + 26: 3, # 'х' + 28: 2, # 'ц' + 22: 3, # 'ч' + 25: 3, # 'ш' + 29: 3, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 2, # 'э' + 27: 3, # 'ю' + 16: 2, # 'я' + }, + 39: { # 'ф' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 0, # 'в' + 19: 1, # 'г' + 13: 0, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 1, # 'н' + 1: 3, # 'о' + 15: 1, # 'п' + 9: 2, # 'р' + 7: 2, # 'с' + 6: 2, # 'т' + 14: 2, # 'у' + 39: 2, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 1, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 2, # 'ы' + 17: 1, # 'ь' + 30: 2, # 'э' + 27: 1, # 'ю' + 16: 1, # 'я' + }, + 26: { # 'х' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 0, # 'б' + 10: 3, # 'в' + 19: 1, # 'г' + 13: 1, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 1, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 1, # 'п' + 9: 3, # 'р' + 7: 2, # 'с' + 6: 2, # 'т' + 14: 2, # 'у' + 39: 1, # 'ф' + 26: 1, # 'х' + 28: 1, # 'ц' + 22: 1, # 'ч' + 25: 2, # 'ш' + 29: 0, # 'щ' + 54: 1, # 'ъ' + 18: 0, # 'ы' + 17: 1, # 'ь' + 30: 1, # 'э' + 27: 1, # 'ю' + 16: 0, # 'я' + }, + 28: { # 'ц' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 2, # 'в' + 19: 1, # 'г' + 13: 1, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 1, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 1, # 'л' + 12: 1, # 'м' + 5: 1, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 1, # 'р' + 7: 0, # 'с' + 6: 1, # 'т' + 14: 3, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 1, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 3, # 'ы' + 17: 1, # 'ь' + 30: 0, # 'э' + 27: 1, # 'ю' + 16: 0, # 'я' + }, + 22: { # 'ч' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 1, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 3, # 'е' + 24: 1, # 'ж' + 20: 0, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 3, # 'н' + 1: 2, # 'о' + 15: 0, # 'п' + 9: 2, # 'р' + 7: 1, # 'с' + 6: 3, # 'т' + 14: 3, # 'у' + 39: 1, # 'ф' + 26: 1, # 'х' + 28: 0, # 'ц' + 22: 1, # 'ч' + 25: 2, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 3, # 'ь' + 30: 0, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 25: { # 'ш' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 2, # 'в' + 19: 1, # 'г' + 13: 0, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 2, # 'р' + 7: 1, # 'с' + 6: 2, # 'т' + 14: 3, # 'у' + 39: 2, # 'ф' + 26: 1, # 'х' + 28: 1, # 'ц' + 22: 1, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 3, # 'ь' + 30: 1, # 'э' + 27: 1, # 'ю' + 16: 0, # 'я' + }, + 29: { # 'щ' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 0, # 'б' + 10: 1, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 1, # 'м' + 5: 2, # 'н' + 1: 1, # 'о' + 15: 0, # 'п' + 9: 2, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 2, # 'ь' + 30: 0, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 54: { # 'ъ' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 0, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 0, # 'о' + 15: 0, # 'п' + 9: 0, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 0, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 1, # 'ю' + 16: 2, # 'я' + }, + 18: { # 'ы' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 3, # 'б' + 10: 3, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 2, # 'и' + 23: 3, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 1, # 'о' + 15: 3, # 'п' + 9: 3, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 1, # 'у' + 39: 0, # 'ф' + 26: 3, # 'х' + 28: 2, # 'ц' + 22: 3, # 'ч' + 25: 3, # 'ш' + 29: 2, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 0, # 'ю' + 16: 2, # 'я' + }, + 17: { # 'ь' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 1, # 'ж' + 20: 3, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 0, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 2, # 'о' + 15: 2, # 'п' + 9: 1, # 'р' + 7: 3, # 'с' + 6: 2, # 'т' + 14: 0, # 'у' + 39: 2, # 'ф' + 26: 1, # 'х' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 3, # 'ш' + 29: 2, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 1, # 'э' + 27: 3, # 'ю' + 16: 3, # 'я' + }, + 30: { # 'э' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 1, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 1, # 'б' + 10: 1, # 'в' + 19: 1, # 'г' + 13: 2, # 'д' + 2: 1, # 'е' + 24: 0, # 'ж' + 20: 1, # 'з' + 4: 0, # 'и' + 23: 2, # 'й' + 11: 2, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 2, # 'н' + 1: 0, # 'о' + 15: 2, # 'п' + 9: 2, # 'р' + 7: 2, # 'с' + 6: 3, # 'т' + 14: 1, # 'у' + 39: 2, # 'ф' + 26: 1, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 1, # 'э' + 27: 1, # 'ю' + 16: 1, # 'я' + }, + 27: { # 'ю' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 3, # 'б' + 10: 1, # 'в' + 19: 2, # 'г' + 13: 3, # 'д' + 2: 1, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 1, # 'и' + 23: 1, # 'й' + 11: 2, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 2, # 'н' + 1: 1, # 'о' + 15: 2, # 'п' + 9: 2, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 0, # 'у' + 39: 1, # 'ф' + 26: 2, # 'х' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 2, # 'ш' + 29: 3, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 1, # 'э' + 27: 2, # 'ю' + 16: 1, # 'я' + }, + 16: { # 'я' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 2, # 'б' + 10: 3, # 'в' + 19: 2, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 3, # 'з' + 4: 2, # 'и' + 23: 2, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 0, # 'о' + 15: 2, # 'п' + 9: 2, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 1, # 'у' + 39: 1, # 'ф' + 26: 3, # 'х' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 2, # 'ш' + 29: 3, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 2, # 'ю' + 16: 2, # 'я' + }, +} + +# 255: Undefined characters that did not exist in training text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 +# 251: Control characters + +# Character Mapping Table(s): +IBM866_RUSSIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 142, # 'A' + 66: 143, # 'B' + 67: 144, # 'C' + 68: 145, # 'D' + 69: 146, # 'E' + 70: 147, # 'F' + 71: 148, # 'G' + 72: 149, # 'H' + 73: 150, # 'I' + 74: 151, # 'J' + 75: 152, # 'K' + 76: 74, # 'L' + 77: 153, # 'M' + 78: 75, # 'N' + 79: 154, # 'O' + 80: 155, # 'P' + 81: 156, # 'Q' + 82: 157, # 'R' + 83: 158, # 'S' + 84: 159, # 'T' + 85: 160, # 'U' + 86: 161, # 'V' + 87: 162, # 'W' + 88: 163, # 'X' + 89: 164, # 'Y' + 90: 165, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 71, # 'a' + 98: 172, # 'b' + 99: 66, # 'c' + 100: 173, # 'd' + 101: 65, # 'e' + 102: 174, # 'f' + 103: 76, # 'g' + 104: 175, # 'h' + 105: 64, # 'i' + 106: 176, # 'j' + 107: 177, # 'k' + 108: 77, # 'l' + 109: 72, # 'm' + 110: 178, # 'n' + 111: 69, # 'o' + 112: 67, # 'p' + 113: 179, # 'q' + 114: 78, # 'r' + 115: 73, # 's' + 116: 180, # 't' + 117: 181, # 'u' + 118: 79, # 'v' + 119: 182, # 'w' + 120: 183, # 'x' + 121: 184, # 'y' + 122: 185, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 37, # 'А' + 129: 44, # 'Б' + 130: 33, # 'В' + 131: 46, # 'Г' + 132: 41, # 'Д' + 133: 48, # 'Е' + 134: 56, # 'Ж' + 135: 51, # 'З' + 136: 42, # 'И' + 137: 60, # 'Й' + 138: 36, # 'К' + 139: 49, # 'Л' + 140: 38, # 'М' + 141: 31, # 'Н' + 142: 34, # 'О' + 143: 35, # 'П' + 144: 45, # 'Р' + 145: 32, # 'С' + 146: 40, # 'Т' + 147: 52, # 'У' + 148: 53, # 'Ф' + 149: 55, # 'Х' + 150: 58, # 'Ц' + 151: 50, # 'Ч' + 152: 57, # 'Ш' + 153: 63, # 'Щ' + 154: 70, # 'Ъ' + 155: 62, # 'Ы' + 156: 61, # 'Ь' + 157: 47, # 'Э' + 158: 59, # 'Ю' + 159: 43, # 'Я' + 160: 3, # 'а' + 161: 21, # 'б' + 162: 10, # 'в' + 163: 19, # 'г' + 164: 13, # 'д' + 165: 2, # 'е' + 166: 24, # 'ж' + 167: 20, # 'з' + 168: 4, # 'и' + 169: 23, # 'й' + 170: 11, # 'к' + 171: 8, # 'л' + 172: 12, # 'м' + 173: 5, # 'н' + 174: 1, # 'о' + 175: 15, # 'п' + 176: 191, # '░' + 177: 192, # '▒' + 178: 193, # '▓' + 179: 194, # '│' + 180: 195, # '┤' + 181: 196, # '╡' + 182: 197, # '╢' + 183: 198, # '╖' + 184: 199, # '╕' + 185: 200, # '╣' + 186: 201, # '║' + 187: 202, # '╗' + 188: 203, # '╝' + 189: 204, # '╜' + 190: 205, # '╛' + 191: 206, # '┐' + 192: 207, # '└' + 193: 208, # '┴' + 194: 209, # '┬' + 195: 210, # '├' + 196: 211, # '─' + 197: 212, # '┼' + 198: 213, # '╞' + 199: 214, # '╟' + 200: 215, # '╚' + 201: 216, # '╔' + 202: 217, # '╩' + 203: 218, # '╦' + 204: 219, # '╠' + 205: 220, # '═' + 206: 221, # '╬' + 207: 222, # '╧' + 208: 223, # '╨' + 209: 224, # '╤' + 210: 225, # '╥' + 211: 226, # '╙' + 212: 227, # '╘' + 213: 228, # '╒' + 214: 229, # '╓' + 215: 230, # '╫' + 216: 231, # '╪' + 217: 232, # '┘' + 218: 233, # '┌' + 219: 234, # '█' + 220: 235, # '▄' + 221: 236, # '▌' + 222: 237, # '▐' + 223: 238, # '▀' + 224: 9, # 'р' + 225: 7, # 'с' + 226: 6, # 'т' + 227: 14, # 'у' + 228: 39, # 'ф' + 229: 26, # 'х' + 230: 28, # 'ц' + 231: 22, # 'ч' + 232: 25, # 'ш' + 233: 29, # 'щ' + 234: 54, # 'ъ' + 235: 18, # 'ы' + 236: 17, # 'ь' + 237: 30, # 'э' + 238: 27, # 'ю' + 239: 16, # 'я' + 240: 239, # 'Ё' + 241: 68, # 'ё' + 242: 240, # 'Є' + 243: 241, # 'є' + 244: 242, # 'Ї' + 245: 243, # 'ї' + 246: 244, # 'Ў' + 247: 245, # 'ў' + 248: 246, # '°' + 249: 247, # '∙' + 250: 248, # '·' + 251: 249, # '√' + 252: 250, # '№' + 253: 251, # '¤' + 254: 252, # '■' + 255: 255, # '\xa0' +} + +IBM866_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name='IBM866', + language='Russian', + char_to_order_map=IBM866_RUSSIAN_CHAR_TO_ORDER, + language_model=RUSSIAN_LANG_MODEL, + typical_positive_ratio=0.976601, + keep_ascii_letters=False, + alphabet='ЁАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяё') + +WINDOWS_1251_RUSSIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 142, # 'A' + 66: 143, # 'B' + 67: 144, # 'C' + 68: 145, # 'D' + 69: 146, # 'E' + 70: 147, # 'F' + 71: 148, # 'G' + 72: 149, # 'H' + 73: 150, # 'I' + 74: 151, # 'J' + 75: 152, # 'K' + 76: 74, # 'L' + 77: 153, # 'M' + 78: 75, # 'N' + 79: 154, # 'O' + 80: 155, # 'P' + 81: 156, # 'Q' + 82: 157, # 'R' + 83: 158, # 'S' + 84: 159, # 'T' + 85: 160, # 'U' + 86: 161, # 'V' + 87: 162, # 'W' + 88: 163, # 'X' + 89: 164, # 'Y' + 90: 165, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 71, # 'a' + 98: 172, # 'b' + 99: 66, # 'c' + 100: 173, # 'd' + 101: 65, # 'e' + 102: 174, # 'f' + 103: 76, # 'g' + 104: 175, # 'h' + 105: 64, # 'i' + 106: 176, # 'j' + 107: 177, # 'k' + 108: 77, # 'l' + 109: 72, # 'm' + 110: 178, # 'n' + 111: 69, # 'o' + 112: 67, # 'p' + 113: 179, # 'q' + 114: 78, # 'r' + 115: 73, # 's' + 116: 180, # 't' + 117: 181, # 'u' + 118: 79, # 'v' + 119: 182, # 'w' + 120: 183, # 'x' + 121: 184, # 'y' + 122: 185, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 191, # 'Ђ' + 129: 192, # 'Ѓ' + 130: 193, # '‚' + 131: 194, # 'ѓ' + 132: 195, # '„' + 133: 196, # '…' + 134: 197, # '†' + 135: 198, # '‡' + 136: 199, # '€' + 137: 200, # '‰' + 138: 201, # 'Љ' + 139: 202, # '‹' + 140: 203, # 'Њ' + 141: 204, # 'Ќ' + 142: 205, # 'Ћ' + 143: 206, # 'Џ' + 144: 207, # 'ђ' + 145: 208, # '‘' + 146: 209, # '’' + 147: 210, # '“' + 148: 211, # '”' + 149: 212, # '•' + 150: 213, # '–' + 151: 214, # '—' + 152: 215, # None + 153: 216, # '™' + 154: 217, # 'љ' + 155: 218, # '›' + 156: 219, # 'њ' + 157: 220, # 'ќ' + 158: 221, # 'ћ' + 159: 222, # 'џ' + 160: 223, # '\xa0' + 161: 224, # 'Ў' + 162: 225, # 'ў' + 163: 226, # 'Ј' + 164: 227, # '¤' + 165: 228, # 'Ґ' + 166: 229, # '¦' + 167: 230, # '§' + 168: 231, # 'Ё' + 169: 232, # '©' + 170: 233, # 'Є' + 171: 234, # '«' + 172: 235, # '¬' + 173: 236, # '\xad' + 174: 237, # '®' + 175: 238, # 'Ї' + 176: 239, # '°' + 177: 240, # '±' + 178: 241, # 'І' + 179: 242, # 'і' + 180: 243, # 'ґ' + 181: 244, # 'µ' + 182: 245, # '¶' + 183: 246, # '·' + 184: 68, # 'ё' + 185: 247, # '№' + 186: 248, # 'є' + 187: 249, # '»' + 188: 250, # 'ј' + 189: 251, # 'Ѕ' + 190: 252, # 'ѕ' + 191: 253, # 'ї' + 192: 37, # 'А' + 193: 44, # 'Б' + 194: 33, # 'В' + 195: 46, # 'Г' + 196: 41, # 'Д' + 197: 48, # 'Е' + 198: 56, # 'Ж' + 199: 51, # 'З' + 200: 42, # 'И' + 201: 60, # 'Й' + 202: 36, # 'К' + 203: 49, # 'Л' + 204: 38, # 'М' + 205: 31, # 'Н' + 206: 34, # 'О' + 207: 35, # 'П' + 208: 45, # 'Р' + 209: 32, # 'С' + 210: 40, # 'Т' + 211: 52, # 'У' + 212: 53, # 'Ф' + 213: 55, # 'Х' + 214: 58, # 'Ц' + 215: 50, # 'Ч' + 216: 57, # 'Ш' + 217: 63, # 'Щ' + 218: 70, # 'Ъ' + 219: 62, # 'Ы' + 220: 61, # 'Ь' + 221: 47, # 'Э' + 222: 59, # 'Ю' + 223: 43, # 'Я' + 224: 3, # 'а' + 225: 21, # 'б' + 226: 10, # 'в' + 227: 19, # 'г' + 228: 13, # 'д' + 229: 2, # 'е' + 230: 24, # 'ж' + 231: 20, # 'з' + 232: 4, # 'и' + 233: 23, # 'й' + 234: 11, # 'к' + 235: 8, # 'л' + 236: 12, # 'м' + 237: 5, # 'н' + 238: 1, # 'о' + 239: 15, # 'п' + 240: 9, # 'р' + 241: 7, # 'с' + 242: 6, # 'т' + 243: 14, # 'у' + 244: 39, # 'ф' + 245: 26, # 'х' + 246: 28, # 'ц' + 247: 22, # 'ч' + 248: 25, # 'ш' + 249: 29, # 'щ' + 250: 54, # 'ъ' + 251: 18, # 'ы' + 252: 17, # 'ь' + 253: 30, # 'э' + 254: 27, # 'ю' + 255: 16, # 'я' +} + +WINDOWS_1251_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name='windows-1251', + language='Russian', + char_to_order_map=WINDOWS_1251_RUSSIAN_CHAR_TO_ORDER, + language_model=RUSSIAN_LANG_MODEL, + typical_positive_ratio=0.976601, + keep_ascii_letters=False, + alphabet='ЁАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяё') + +IBM855_RUSSIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 142, # 'A' + 66: 143, # 'B' + 67: 144, # 'C' + 68: 145, # 'D' + 69: 146, # 'E' + 70: 147, # 'F' + 71: 148, # 'G' + 72: 149, # 'H' + 73: 150, # 'I' + 74: 151, # 'J' + 75: 152, # 'K' + 76: 74, # 'L' + 77: 153, # 'M' + 78: 75, # 'N' + 79: 154, # 'O' + 80: 155, # 'P' + 81: 156, # 'Q' + 82: 157, # 'R' + 83: 158, # 'S' + 84: 159, # 'T' + 85: 160, # 'U' + 86: 161, # 'V' + 87: 162, # 'W' + 88: 163, # 'X' + 89: 164, # 'Y' + 90: 165, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 71, # 'a' + 98: 172, # 'b' + 99: 66, # 'c' + 100: 173, # 'd' + 101: 65, # 'e' + 102: 174, # 'f' + 103: 76, # 'g' + 104: 175, # 'h' + 105: 64, # 'i' + 106: 176, # 'j' + 107: 177, # 'k' + 108: 77, # 'l' + 109: 72, # 'm' + 110: 178, # 'n' + 111: 69, # 'o' + 112: 67, # 'p' + 113: 179, # 'q' + 114: 78, # 'r' + 115: 73, # 's' + 116: 180, # 't' + 117: 181, # 'u' + 118: 79, # 'v' + 119: 182, # 'w' + 120: 183, # 'x' + 121: 184, # 'y' + 122: 185, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 191, # 'ђ' + 129: 192, # 'Ђ' + 130: 193, # 'ѓ' + 131: 194, # 'Ѓ' + 132: 68, # 'ё' + 133: 195, # 'Ё' + 134: 196, # 'є' + 135: 197, # 'Є' + 136: 198, # 'ѕ' + 137: 199, # 'Ѕ' + 138: 200, # 'і' + 139: 201, # 'І' + 140: 202, # 'ї' + 141: 203, # 'Ї' + 142: 204, # 'ј' + 143: 205, # 'Ј' + 144: 206, # 'љ' + 145: 207, # 'Љ' + 146: 208, # 'њ' + 147: 209, # 'Њ' + 148: 210, # 'ћ' + 149: 211, # 'Ћ' + 150: 212, # 'ќ' + 151: 213, # 'Ќ' + 152: 214, # 'ў' + 153: 215, # 'Ў' + 154: 216, # 'џ' + 155: 217, # 'Џ' + 156: 27, # 'ю' + 157: 59, # 'Ю' + 158: 54, # 'ъ' + 159: 70, # 'Ъ' + 160: 3, # 'а' + 161: 37, # 'А' + 162: 21, # 'б' + 163: 44, # 'Б' + 164: 28, # 'ц' + 165: 58, # 'Ц' + 166: 13, # 'д' + 167: 41, # 'Д' + 168: 2, # 'е' + 169: 48, # 'Е' + 170: 39, # 'ф' + 171: 53, # 'Ф' + 172: 19, # 'г' + 173: 46, # 'Г' + 174: 218, # '«' + 175: 219, # '»' + 176: 220, # '░' + 177: 221, # '▒' + 178: 222, # '▓' + 179: 223, # '│' + 180: 224, # '┤' + 181: 26, # 'х' + 182: 55, # 'Х' + 183: 4, # 'и' + 184: 42, # 'И' + 185: 225, # '╣' + 186: 226, # '║' + 187: 227, # '╗' + 188: 228, # '╝' + 189: 23, # 'й' + 190: 60, # 'Й' + 191: 229, # '┐' + 192: 230, # '└' + 193: 231, # '┴' + 194: 232, # '┬' + 195: 233, # '├' + 196: 234, # '─' + 197: 235, # '┼' + 198: 11, # 'к' + 199: 36, # 'К' + 200: 236, # '╚' + 201: 237, # '╔' + 202: 238, # '╩' + 203: 239, # '╦' + 204: 240, # '╠' + 205: 241, # '═' + 206: 242, # '╬' + 207: 243, # '¤' + 208: 8, # 'л' + 209: 49, # 'Л' + 210: 12, # 'м' + 211: 38, # 'М' + 212: 5, # 'н' + 213: 31, # 'Н' + 214: 1, # 'о' + 215: 34, # 'О' + 216: 15, # 'п' + 217: 244, # '┘' + 218: 245, # '┌' + 219: 246, # '█' + 220: 247, # '▄' + 221: 35, # 'П' + 222: 16, # 'я' + 223: 248, # '▀' + 224: 43, # 'Я' + 225: 9, # 'р' + 226: 45, # 'Р' + 227: 7, # 'с' + 228: 32, # 'С' + 229: 6, # 'т' + 230: 40, # 'Т' + 231: 14, # 'у' + 232: 52, # 'У' + 233: 24, # 'ж' + 234: 56, # 'Ж' + 235: 10, # 'в' + 236: 33, # 'В' + 237: 17, # 'ь' + 238: 61, # 'Ь' + 239: 249, # '№' + 240: 250, # '\xad' + 241: 18, # 'ы' + 242: 62, # 'Ы' + 243: 20, # 'з' + 244: 51, # 'З' + 245: 25, # 'ш' + 246: 57, # 'Ш' + 247: 30, # 'э' + 248: 47, # 'Э' + 249: 29, # 'щ' + 250: 63, # 'Щ' + 251: 22, # 'ч' + 252: 50, # 'Ч' + 253: 251, # '§' + 254: 252, # '■' + 255: 255, # '\xa0' +} + +IBM855_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name='IBM855', + language='Russian', + char_to_order_map=IBM855_RUSSIAN_CHAR_TO_ORDER, + language_model=RUSSIAN_LANG_MODEL, + typical_positive_ratio=0.976601, + keep_ascii_letters=False, + alphabet='ЁАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяё') + +KOI8_R_RUSSIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 142, # 'A' + 66: 143, # 'B' + 67: 144, # 'C' + 68: 145, # 'D' + 69: 146, # 'E' + 70: 147, # 'F' + 71: 148, # 'G' + 72: 149, # 'H' + 73: 150, # 'I' + 74: 151, # 'J' + 75: 152, # 'K' + 76: 74, # 'L' + 77: 153, # 'M' + 78: 75, # 'N' + 79: 154, # 'O' + 80: 155, # 'P' + 81: 156, # 'Q' + 82: 157, # 'R' + 83: 158, # 'S' + 84: 159, # 'T' + 85: 160, # 'U' + 86: 161, # 'V' + 87: 162, # 'W' + 88: 163, # 'X' + 89: 164, # 'Y' + 90: 165, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 71, # 'a' + 98: 172, # 'b' + 99: 66, # 'c' + 100: 173, # 'd' + 101: 65, # 'e' + 102: 174, # 'f' + 103: 76, # 'g' + 104: 175, # 'h' + 105: 64, # 'i' + 106: 176, # 'j' + 107: 177, # 'k' + 108: 77, # 'l' + 109: 72, # 'm' + 110: 178, # 'n' + 111: 69, # 'o' + 112: 67, # 'p' + 113: 179, # 'q' + 114: 78, # 'r' + 115: 73, # 's' + 116: 180, # 't' + 117: 181, # 'u' + 118: 79, # 'v' + 119: 182, # 'w' + 120: 183, # 'x' + 121: 184, # 'y' + 122: 185, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 191, # '─' + 129: 192, # '│' + 130: 193, # '┌' + 131: 194, # '┐' + 132: 195, # '└' + 133: 196, # '┘' + 134: 197, # '├' + 135: 198, # '┤' + 136: 199, # '┬' + 137: 200, # '┴' + 138: 201, # '┼' + 139: 202, # '▀' + 140: 203, # '▄' + 141: 204, # '█' + 142: 205, # '▌' + 143: 206, # '▐' + 144: 207, # '░' + 145: 208, # '▒' + 146: 209, # '▓' + 147: 210, # '⌠' + 148: 211, # '■' + 149: 212, # '∙' + 150: 213, # '√' + 151: 214, # '≈' + 152: 215, # '≤' + 153: 216, # '≥' + 154: 217, # '\xa0' + 155: 218, # '⌡' + 156: 219, # '°' + 157: 220, # '²' + 158: 221, # '·' + 159: 222, # '÷' + 160: 223, # '═' + 161: 224, # '║' + 162: 225, # '╒' + 163: 68, # 'ё' + 164: 226, # '╓' + 165: 227, # '╔' + 166: 228, # '╕' + 167: 229, # '╖' + 168: 230, # '╗' + 169: 231, # '╘' + 170: 232, # '╙' + 171: 233, # '╚' + 172: 234, # '╛' + 173: 235, # '╜' + 174: 236, # '╝' + 175: 237, # '╞' + 176: 238, # '╟' + 177: 239, # '╠' + 178: 240, # '╡' + 179: 241, # 'Ё' + 180: 242, # '╢' + 181: 243, # '╣' + 182: 244, # '╤' + 183: 245, # '╥' + 184: 246, # '╦' + 185: 247, # '╧' + 186: 248, # '╨' + 187: 249, # '╩' + 188: 250, # '╪' + 189: 251, # '╫' + 190: 252, # '╬' + 191: 253, # '©' + 192: 27, # 'ю' + 193: 3, # 'а' + 194: 21, # 'б' + 195: 28, # 'ц' + 196: 13, # 'д' + 197: 2, # 'е' + 198: 39, # 'ф' + 199: 19, # 'г' + 200: 26, # 'х' + 201: 4, # 'и' + 202: 23, # 'й' + 203: 11, # 'к' + 204: 8, # 'л' + 205: 12, # 'м' + 206: 5, # 'н' + 207: 1, # 'о' + 208: 15, # 'п' + 209: 16, # 'я' + 210: 9, # 'р' + 211: 7, # 'с' + 212: 6, # 'т' + 213: 14, # 'у' + 214: 24, # 'ж' + 215: 10, # 'в' + 216: 17, # 'ь' + 217: 18, # 'ы' + 218: 20, # 'з' + 219: 25, # 'ш' + 220: 30, # 'э' + 221: 29, # 'щ' + 222: 22, # 'ч' + 223: 54, # 'ъ' + 224: 59, # 'Ю' + 225: 37, # 'А' + 226: 44, # 'Б' + 227: 58, # 'Ц' + 228: 41, # 'Д' + 229: 48, # 'Е' + 230: 53, # 'Ф' + 231: 46, # 'Г' + 232: 55, # 'Х' + 233: 42, # 'И' + 234: 60, # 'Й' + 235: 36, # 'К' + 236: 49, # 'Л' + 237: 38, # 'М' + 238: 31, # 'Н' + 239: 34, # 'О' + 240: 35, # 'П' + 241: 43, # 'Я' + 242: 45, # 'Р' + 243: 32, # 'С' + 244: 40, # 'Т' + 245: 52, # 'У' + 246: 56, # 'Ж' + 247: 33, # 'В' + 248: 61, # 'Ь' + 249: 62, # 'Ы' + 250: 51, # 'З' + 251: 57, # 'Ш' + 252: 47, # 'Э' + 253: 63, # 'Щ' + 254: 50, # 'Ч' + 255: 70, # 'Ъ' +} + +KOI8_R_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name='KOI8-R', + language='Russian', + char_to_order_map=KOI8_R_RUSSIAN_CHAR_TO_ORDER, + language_model=RUSSIAN_LANG_MODEL, + typical_positive_ratio=0.976601, + keep_ascii_letters=False, + alphabet='ЁАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяё') + +MACCYRILLIC_RUSSIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 142, # 'A' + 66: 143, # 'B' + 67: 144, # 'C' + 68: 145, # 'D' + 69: 146, # 'E' + 70: 147, # 'F' + 71: 148, # 'G' + 72: 149, # 'H' + 73: 150, # 'I' + 74: 151, # 'J' + 75: 152, # 'K' + 76: 74, # 'L' + 77: 153, # 'M' + 78: 75, # 'N' + 79: 154, # 'O' + 80: 155, # 'P' + 81: 156, # 'Q' + 82: 157, # 'R' + 83: 158, # 'S' + 84: 159, # 'T' + 85: 160, # 'U' + 86: 161, # 'V' + 87: 162, # 'W' + 88: 163, # 'X' + 89: 164, # 'Y' + 90: 165, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 71, # 'a' + 98: 172, # 'b' + 99: 66, # 'c' + 100: 173, # 'd' + 101: 65, # 'e' + 102: 174, # 'f' + 103: 76, # 'g' + 104: 175, # 'h' + 105: 64, # 'i' + 106: 176, # 'j' + 107: 177, # 'k' + 108: 77, # 'l' + 109: 72, # 'm' + 110: 178, # 'n' + 111: 69, # 'o' + 112: 67, # 'p' + 113: 179, # 'q' + 114: 78, # 'r' + 115: 73, # 's' + 116: 180, # 't' + 117: 181, # 'u' + 118: 79, # 'v' + 119: 182, # 'w' + 120: 183, # 'x' + 121: 184, # 'y' + 122: 185, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 37, # 'А' + 129: 44, # 'Б' + 130: 33, # 'В' + 131: 46, # 'Г' + 132: 41, # 'Д' + 133: 48, # 'Е' + 134: 56, # 'Ж' + 135: 51, # 'З' + 136: 42, # 'И' + 137: 60, # 'Й' + 138: 36, # 'К' + 139: 49, # 'Л' + 140: 38, # 'М' + 141: 31, # 'Н' + 142: 34, # 'О' + 143: 35, # 'П' + 144: 45, # 'Р' + 145: 32, # 'С' + 146: 40, # 'Т' + 147: 52, # 'У' + 148: 53, # 'Ф' + 149: 55, # 'Х' + 150: 58, # 'Ц' + 151: 50, # 'Ч' + 152: 57, # 'Ш' + 153: 63, # 'Щ' + 154: 70, # 'Ъ' + 155: 62, # 'Ы' + 156: 61, # 'Ь' + 157: 47, # 'Э' + 158: 59, # 'Ю' + 159: 43, # 'Я' + 160: 191, # '†' + 161: 192, # '°' + 162: 193, # 'Ґ' + 163: 194, # '£' + 164: 195, # '§' + 165: 196, # '•' + 166: 197, # '¶' + 167: 198, # 'І' + 168: 199, # '®' + 169: 200, # '©' + 170: 201, # '™' + 171: 202, # 'Ђ' + 172: 203, # 'ђ' + 173: 204, # '≠' + 174: 205, # 'Ѓ' + 175: 206, # 'ѓ' + 176: 207, # '∞' + 177: 208, # '±' + 178: 209, # '≤' + 179: 210, # '≥' + 180: 211, # 'і' + 181: 212, # 'µ' + 182: 213, # 'ґ' + 183: 214, # 'Ј' + 184: 215, # 'Є' + 185: 216, # 'є' + 186: 217, # 'Ї' + 187: 218, # 'ї' + 188: 219, # 'Љ' + 189: 220, # 'љ' + 190: 221, # 'Њ' + 191: 222, # 'њ' + 192: 223, # 'ј' + 193: 224, # 'Ѕ' + 194: 225, # '¬' + 195: 226, # '√' + 196: 227, # 'ƒ' + 197: 228, # '≈' + 198: 229, # '∆' + 199: 230, # '«' + 200: 231, # '»' + 201: 232, # '…' + 202: 233, # '\xa0' + 203: 234, # 'Ћ' + 204: 235, # 'ћ' + 205: 236, # 'Ќ' + 206: 237, # 'ќ' + 207: 238, # 'ѕ' + 208: 239, # '–' + 209: 240, # '—' + 210: 241, # '“' + 211: 242, # '”' + 212: 243, # '‘' + 213: 244, # '’' + 214: 245, # '÷' + 215: 246, # '„' + 216: 247, # 'Ў' + 217: 248, # 'ў' + 218: 249, # 'Џ' + 219: 250, # 'џ' + 220: 251, # '№' + 221: 252, # 'Ё' + 222: 68, # 'ё' + 223: 16, # 'я' + 224: 3, # 'а' + 225: 21, # 'б' + 226: 10, # 'в' + 227: 19, # 'г' + 228: 13, # 'д' + 229: 2, # 'е' + 230: 24, # 'ж' + 231: 20, # 'з' + 232: 4, # 'и' + 233: 23, # 'й' + 234: 11, # 'к' + 235: 8, # 'л' + 236: 12, # 'м' + 237: 5, # 'н' + 238: 1, # 'о' + 239: 15, # 'п' + 240: 9, # 'р' + 241: 7, # 'с' + 242: 6, # 'т' + 243: 14, # 'у' + 244: 39, # 'ф' + 245: 26, # 'х' + 246: 28, # 'ц' + 247: 22, # 'ч' + 248: 25, # 'ш' + 249: 29, # 'щ' + 250: 54, # 'ъ' + 251: 18, # 'ы' + 252: 17, # 'ь' + 253: 30, # 'э' + 254: 27, # 'ю' + 255: 255, # '€' +} + +MACCYRILLIC_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name='MacCyrillic', + language='Russian', + char_to_order_map=MACCYRILLIC_RUSSIAN_CHAR_TO_ORDER, + language_model=RUSSIAN_LANG_MODEL, + typical_positive_ratio=0.976601, + keep_ascii_letters=False, + alphabet='ЁАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяё') + +ISO_8859_5_RUSSIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 142, # 'A' + 66: 143, # 'B' + 67: 144, # 'C' + 68: 145, # 'D' + 69: 146, # 'E' + 70: 147, # 'F' + 71: 148, # 'G' + 72: 149, # 'H' + 73: 150, # 'I' + 74: 151, # 'J' + 75: 152, # 'K' + 76: 74, # 'L' + 77: 153, # 'M' + 78: 75, # 'N' + 79: 154, # 'O' + 80: 155, # 'P' + 81: 156, # 'Q' + 82: 157, # 'R' + 83: 158, # 'S' + 84: 159, # 'T' + 85: 160, # 'U' + 86: 161, # 'V' + 87: 162, # 'W' + 88: 163, # 'X' + 89: 164, # 'Y' + 90: 165, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 71, # 'a' + 98: 172, # 'b' + 99: 66, # 'c' + 100: 173, # 'd' + 101: 65, # 'e' + 102: 174, # 'f' + 103: 76, # 'g' + 104: 175, # 'h' + 105: 64, # 'i' + 106: 176, # 'j' + 107: 177, # 'k' + 108: 77, # 'l' + 109: 72, # 'm' + 110: 178, # 'n' + 111: 69, # 'o' + 112: 67, # 'p' + 113: 179, # 'q' + 114: 78, # 'r' + 115: 73, # 's' + 116: 180, # 't' + 117: 181, # 'u' + 118: 79, # 'v' + 119: 182, # 'w' + 120: 183, # 'x' + 121: 184, # 'y' + 122: 185, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 191, # '\x80' + 129: 192, # '\x81' + 130: 193, # '\x82' + 131: 194, # '\x83' + 132: 195, # '\x84' + 133: 196, # '\x85' + 134: 197, # '\x86' + 135: 198, # '\x87' + 136: 199, # '\x88' + 137: 200, # '\x89' + 138: 201, # '\x8a' + 139: 202, # '\x8b' + 140: 203, # '\x8c' + 141: 204, # '\x8d' + 142: 205, # '\x8e' + 143: 206, # '\x8f' + 144: 207, # '\x90' + 145: 208, # '\x91' + 146: 209, # '\x92' + 147: 210, # '\x93' + 148: 211, # '\x94' + 149: 212, # '\x95' + 150: 213, # '\x96' + 151: 214, # '\x97' + 152: 215, # '\x98' + 153: 216, # '\x99' + 154: 217, # '\x9a' + 155: 218, # '\x9b' + 156: 219, # '\x9c' + 157: 220, # '\x9d' + 158: 221, # '\x9e' + 159: 222, # '\x9f' + 160: 223, # '\xa0' + 161: 224, # 'Ё' + 162: 225, # 'Ђ' + 163: 226, # 'Ѓ' + 164: 227, # 'Є' + 165: 228, # 'Ѕ' + 166: 229, # 'І' + 167: 230, # 'Ї' + 168: 231, # 'Ј' + 169: 232, # 'Љ' + 170: 233, # 'Њ' + 171: 234, # 'Ћ' + 172: 235, # 'Ќ' + 173: 236, # '\xad' + 174: 237, # 'Ў' + 175: 238, # 'Џ' + 176: 37, # 'А' + 177: 44, # 'Б' + 178: 33, # 'В' + 179: 46, # 'Г' + 180: 41, # 'Д' + 181: 48, # 'Е' + 182: 56, # 'Ж' + 183: 51, # 'З' + 184: 42, # 'И' + 185: 60, # 'Й' + 186: 36, # 'К' + 187: 49, # 'Л' + 188: 38, # 'М' + 189: 31, # 'Н' + 190: 34, # 'О' + 191: 35, # 'П' + 192: 45, # 'Р' + 193: 32, # 'С' + 194: 40, # 'Т' + 195: 52, # 'У' + 196: 53, # 'Ф' + 197: 55, # 'Х' + 198: 58, # 'Ц' + 199: 50, # 'Ч' + 200: 57, # 'Ш' + 201: 63, # 'Щ' + 202: 70, # 'Ъ' + 203: 62, # 'Ы' + 204: 61, # 'Ь' + 205: 47, # 'Э' + 206: 59, # 'Ю' + 207: 43, # 'Я' + 208: 3, # 'а' + 209: 21, # 'б' + 210: 10, # 'в' + 211: 19, # 'г' + 212: 13, # 'д' + 213: 2, # 'е' + 214: 24, # 'ж' + 215: 20, # 'з' + 216: 4, # 'и' + 217: 23, # 'й' + 218: 11, # 'к' + 219: 8, # 'л' + 220: 12, # 'м' + 221: 5, # 'н' + 222: 1, # 'о' + 223: 15, # 'п' + 224: 9, # 'р' + 225: 7, # 'с' + 226: 6, # 'т' + 227: 14, # 'у' + 228: 39, # 'ф' + 229: 26, # 'х' + 230: 28, # 'ц' + 231: 22, # 'ч' + 232: 25, # 'ш' + 233: 29, # 'щ' + 234: 54, # 'ъ' + 235: 18, # 'ы' + 236: 17, # 'ь' + 237: 30, # 'э' + 238: 27, # 'ю' + 239: 16, # 'я' + 240: 239, # '№' + 241: 68, # 'ё' + 242: 240, # 'ђ' + 243: 241, # 'ѓ' + 244: 242, # 'є' + 245: 243, # 'ѕ' + 246: 244, # 'і' + 247: 245, # 'ї' + 248: 246, # 'ј' + 249: 247, # 'љ' + 250: 248, # 'њ' + 251: 249, # 'ћ' + 252: 250, # 'ќ' + 253: 251, # '§' + 254: 252, # 'ў' + 255: 255, # 'џ' +} + +ISO_8859_5_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name='ISO-8859-5', + language='Russian', + char_to_order_map=ISO_8859_5_RUSSIAN_CHAR_TO_ORDER, + language_model=RUSSIAN_LANG_MODEL, + typical_positive_ratio=0.976601, + keep_ascii_letters=False, + alphabet='ЁАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяё') + diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/langthaimodel.py b/venv/Lib/site-packages/pip/_vendor/chardet/langthaimodel.py new file mode 100644 index 00000000..9a37db57 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/chardet/langthaimodel.py @@ -0,0 +1,4383 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel + + +# 3: Positive +# 2: Likely +# 1: Unlikely +# 0: Negative + +THAI_LANG_MODEL = { + 5: { # 'ก' + 5: 2, # 'ก' + 30: 2, # 'ข' + 24: 2, # 'ค' + 8: 2, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'ญ' + 58: 3, # 'ฎ' + 57: 2, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 2, # 'ณ' + 20: 2, # 'ด' + 19: 3, # 'ต' + 44: 0, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 1, # 'บ' + 25: 2, # 'ป' + 39: 1, # 'ผ' + 62: 1, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 2, # 'ม' + 16: 1, # 'ย' + 2: 3, # 'ร' + 61: 2, # 'ฤ' + 15: 3, # 'ล' + 12: 3, # 'ว' + 42: 2, # 'ศ' + 46: 3, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 3, # 'อ' + 63: 1, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 3, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 0, # 'ึ' + 27: 2, # 'ื' + 32: 2, # 'ุ' + 35: 1, # 'ู' + 11: 2, # 'เ' + 28: 2, # 'แ' + 41: 1, # 'โ' + 29: 1, # 'ใ' + 33: 2, # 'ไ' + 50: 1, # 'ๆ' + 37: 3, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 30: { # 'ข' + 5: 1, # 'ก' + 30: 0, # 'ข' + 24: 1, # 'ค' + 8: 1, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 2, # 'ณ' + 20: 0, # 'ด' + 19: 2, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 1, # 'บ' + 25: 1, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 2, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 1, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 2, # 'ี' + 40: 3, # 'ึ' + 27: 1, # 'ื' + 32: 1, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 1, # '็' + 6: 2, # '่' + 7: 3, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 24: { # 'ค' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 2, # 'ค' + 8: 2, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 2, # 'ณ' + 20: 2, # 'ด' + 19: 2, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 0, # 'บ' + 25: 1, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 2, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 3, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 2, # 'า' + 36: 3, # 'ำ' + 23: 3, # 'ิ' + 13: 2, # 'ี' + 40: 0, # 'ึ' + 27: 3, # 'ื' + 32: 3, # 'ุ' + 35: 2, # 'ู' + 11: 1, # 'เ' + 28: 0, # 'แ' + 41: 3, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 1, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 3, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 8: { # 'ง' + 5: 3, # 'ก' + 30: 2, # 'ข' + 24: 3, # 'ค' + 8: 2, # 'ง' + 26: 2, # 'จ' + 52: 1, # 'ฉ' + 34: 2, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 3, # 'ท' + 48: 1, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 2, # 'ผ' + 62: 1, # 'ฝ' + 31: 2, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 2, # 'ม' + 16: 1, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 2, # 'ว' + 42: 2, # 'ศ' + 46: 1, # 'ษ' + 18: 3, # 'ส' + 21: 3, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 1, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 2, # 'ิ' + 13: 1, # 'ี' + 40: 0, # 'ึ' + 27: 1, # 'ื' + 32: 1, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 2, # 'แ' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 3, # 'ๆ' + 37: 0, # '็' + 6: 2, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 26: { # 'จ' + 5: 2, # 'ก' + 30: 1, # 'ข' + 24: 0, # 'ค' + 8: 2, # 'ง' + 26: 3, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 1, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 1, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 1, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 1, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 3, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 3, # 'ำ' + 23: 2, # 'ิ' + 13: 1, # 'ี' + 40: 3, # 'ึ' + 27: 1, # 'ื' + 32: 3, # 'ุ' + 35: 2, # 'ู' + 11: 1, # 'เ' + 28: 1, # 'แ' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 2, # '่' + 7: 2, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 52: { # 'ฉ' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 3, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 3, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 1, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 1, # 'ะ' + 10: 1, # 'ั' + 1: 1, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 1, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 1, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 34: { # 'ช' + 5: 1, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 1, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 1, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 1, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 2, # 'ั' + 1: 3, # 'า' + 36: 1, # 'ำ' + 23: 3, # 'ิ' + 13: 2, # 'ี' + 40: 0, # 'ึ' + 27: 3, # 'ื' + 32: 3, # 'ุ' + 35: 1, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 1, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 51: { # 'ซ' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 1, # 'ั' + 1: 1, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 2, # 'ี' + 40: 3, # 'ึ' + 27: 2, # 'ื' + 32: 1, # 'ุ' + 35: 1, # 'ู' + 11: 1, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 1, # '็' + 6: 1, # '่' + 7: 2, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 47: { # 'ญ' + 5: 1, # 'ก' + 30: 1, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 3, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 1, # 'บ' + 25: 1, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 2, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 1, # 'ะ' + 10: 2, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 1, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 1, # 'เ' + 28: 1, # 'แ' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 0, # 'ไ' + 50: 1, # 'ๆ' + 37: 0, # '็' + 6: 2, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 58: { # 'ฎ' + 5: 2, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 1, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 2, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 57: { # 'ฏ' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 3, # 'ิ' + 13: 1, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 49: { # 'ฐ' + 5: 1, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 2, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 53: { # 'ฑ' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 2, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 3, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 55: { # 'ฒ' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 43: { # 'ณ' + 5: 1, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 3, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 3, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 1, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 3, # 'ะ' + 10: 0, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 2, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 1, # 'เ' + 28: 1, # 'แ' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 3, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 20: { # 'ด' + 5: 2, # 'ก' + 30: 2, # 'ข' + 24: 2, # 'ค' + 8: 3, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 1, # 'บ' + 25: 1, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 2, # 'ม' + 16: 3, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 3, # 'ั' + 1: 2, # 'า' + 36: 2, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 1, # 'ึ' + 27: 2, # 'ื' + 32: 3, # 'ุ' + 35: 2, # 'ู' + 11: 2, # 'เ' + 28: 2, # 'แ' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 2, # 'ๆ' + 37: 2, # '็' + 6: 1, # '่' + 7: 3, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 19: { # 'ต' + 5: 2, # 'ก' + 30: 1, # 'ข' + 24: 1, # 'ค' + 8: 0, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 1, # 'ต' + 44: 2, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 1, # 'บ' + 25: 1, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 2, # 'ภ' + 9: 1, # 'ม' + 16: 1, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 3, # 'ส' + 21: 0, # 'ห' + 4: 3, # 'อ' + 63: 1, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 2, # 'ำ' + 23: 3, # 'ิ' + 13: 2, # 'ี' + 40: 1, # 'ึ' + 27: 1, # 'ื' + 32: 3, # 'ุ' + 35: 2, # 'ู' + 11: 1, # 'เ' + 28: 1, # 'แ' + 41: 1, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 2, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 44: { # 'ถ' + 5: 1, # 'ก' + 30: 0, # 'ข' + 24: 1, # 'ค' + 8: 0, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 2, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 2, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 2, # 'ิ' + 13: 1, # 'ี' + 40: 3, # 'ึ' + 27: 2, # 'ื' + 32: 2, # 'ุ' + 35: 3, # 'ู' + 11: 1, # 'เ' + 28: 1, # 'แ' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 2, # '่' + 7: 3, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 14: { # 'ท' + 5: 1, # 'ก' + 30: 1, # 'ข' + 24: 3, # 'ค' + 8: 1, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 3, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'ฝ' + 31: 2, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 3, # 'ย' + 2: 3, # 'ร' + 61: 1, # 'ฤ' + 15: 1, # 'ล' + 12: 2, # 'ว' + 42: 3, # 'ศ' + 46: 1, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 3, # 'ำ' + 23: 2, # 'ิ' + 13: 3, # 'ี' + 40: 2, # 'ึ' + 27: 1, # 'ื' + 32: 3, # 'ุ' + 35: 1, # 'ู' + 11: 0, # 'เ' + 28: 1, # 'แ' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 1, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 48: { # 'ธ' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 1, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 2, # 'า' + 36: 0, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 2, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 3, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 3: { # 'น' + 5: 3, # 'ก' + 30: 2, # 'ข' + 24: 3, # 'ค' + 8: 1, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 1, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 2, # 'ถ' + 14: 3, # 'ท' + 48: 3, # 'ธ' + 3: 2, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 2, # 'ผ' + 62: 0, # 'ฝ' + 31: 2, # 'พ' + 54: 1, # 'ฟ' + 45: 1, # 'ภ' + 9: 2, # 'ม' + 16: 2, # 'ย' + 2: 2, # 'ร' + 61: 1, # 'ฤ' + 15: 2, # 'ล' + 12: 3, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 3, # 'อ' + 63: 1, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 3, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 3, # 'ึ' + 27: 3, # 'ื' + 32: 3, # 'ุ' + 35: 2, # 'ู' + 11: 3, # 'เ' + 28: 2, # 'แ' + 41: 3, # 'โ' + 29: 3, # 'ใ' + 33: 3, # 'ไ' + 50: 2, # 'ๆ' + 37: 1, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 17: { # 'บ' + 5: 3, # 'ก' + 30: 2, # 'ข' + 24: 2, # 'ค' + 8: 1, # 'ง' + 26: 1, # 'จ' + 52: 1, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 3, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 2, # 'ป' + 39: 2, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 1, # 'ฟ' + 45: 1, # 'ภ' + 9: 1, # 'ม' + 16: 0, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 3, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 2, # 'อ' + 63: 1, # 'ฯ' + 22: 0, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 2, # 'ำ' + 23: 2, # 'ิ' + 13: 2, # 'ี' + 40: 0, # 'ึ' + 27: 2, # 'ื' + 32: 3, # 'ุ' + 35: 2, # 'ู' + 11: 2, # 'เ' + 28: 2, # 'แ' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 0, # 'ๆ' + 37: 1, # '็' + 6: 2, # '่' + 7: 2, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 25: { # 'ป' + 5: 2, # 'ก' + 30: 0, # 'ข' + 24: 1, # 'ค' + 8: 0, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'ญ' + 58: 1, # 'ฎ' + 57: 3, # 'ฏ' + 49: 1, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 1, # 'ต' + 44: 1, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 0, # 'บ' + 25: 1, # 'ป' + 39: 1, # 'ผ' + 62: 1, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 0, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 1, # 'ษ' + 18: 2, # 'ส' + 21: 1, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 1, # 'ะ' + 10: 3, # 'ั' + 1: 1, # 'า' + 36: 0, # 'ำ' + 23: 2, # 'ิ' + 13: 3, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 1, # 'ุ' + 35: 0, # 'ู' + 11: 1, # 'เ' + 28: 2, # 'แ' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 2, # 'ไ' + 50: 0, # 'ๆ' + 37: 3, # '็' + 6: 1, # '่' + 7: 2, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 39: { # 'ผ' + 5: 1, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 1, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 2, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 1, # 'ะ' + 10: 1, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 2, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 1, # 'ื' + 32: 0, # 'ุ' + 35: 3, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 1, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 62: { # 'ฝ' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 1, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 1, # 'ี' + 40: 2, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 2, # '่' + 7: 1, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 31: { # 'พ' + 5: 1, # 'ก' + 30: 1, # 'ข' + 24: 1, # 'ค' + 8: 1, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 1, # 'ณ' + 20: 1, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 2, # 'ท' + 48: 1, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 0, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 2, # 'ย' + 2: 3, # 'ร' + 61: 2, # 'ฤ' + 15: 2, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 1, # 'ห' + 4: 2, # 'อ' + 63: 1, # 'ฯ' + 22: 0, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 3, # 'ิ' + 13: 2, # 'ี' + 40: 1, # 'ึ' + 27: 3, # 'ื' + 32: 1, # 'ุ' + 35: 2, # 'ู' + 11: 1, # 'เ' + 28: 1, # 'แ' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 1, # '็' + 6: 0, # '่' + 7: 1, # '้' + 38: 3, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 54: { # 'ฟ' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 2, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 2, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 1, # 'ี' + 40: 0, # 'ึ' + 27: 1, # 'ื' + 32: 1, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 1, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 2, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 45: { # 'ภ' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 1, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 3, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 2, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 9: { # 'ม' + 5: 2, # 'ก' + 30: 2, # 'ข' + 24: 2, # 'ค' + 8: 2, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 1, # 'ณ' + 20: 2, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 1, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'ฝ' + 31: 3, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 2, # 'ม' + 16: 1, # 'ย' + 2: 2, # 'ร' + 61: 2, # 'ฤ' + 15: 2, # 'ล' + 12: 2, # 'ว' + 42: 1, # 'ศ' + 46: 1, # 'ษ' + 18: 3, # 'ส' + 21: 3, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 1, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 0, # 'ึ' + 27: 3, # 'ื' + 32: 3, # 'ุ' + 35: 3, # 'ู' + 11: 2, # 'เ' + 28: 2, # 'แ' + 41: 2, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 1, # 'ๆ' + 37: 1, # '็' + 6: 3, # '่' + 7: 2, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 16: { # 'ย' + 5: 3, # 'ก' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 3, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 2, # 'ช' + 51: 0, # 'ซ' + 47: 2, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 1, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 1, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 2, # 'ม' + 16: 0, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 3, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 1, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 2, # 'ิ' + 13: 3, # 'ี' + 40: 1, # 'ึ' + 27: 2, # 'ื' + 32: 2, # 'ุ' + 35: 3, # 'ู' + 11: 2, # 'เ' + 28: 1, # 'แ' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 2, # 'ๆ' + 37: 1, # '็' + 6: 3, # '่' + 7: 2, # '้' + 38: 3, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 2: { # 'ร' + 5: 3, # 'ก' + 30: 2, # 'ข' + 24: 2, # 'ค' + 8: 3, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 2, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 3, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 3, # 'ณ' + 20: 2, # 'ด' + 19: 2, # 'ต' + 44: 3, # 'ถ' + 14: 3, # 'ท' + 48: 1, # 'ธ' + 3: 2, # 'น' + 17: 2, # 'บ' + 25: 3, # 'ป' + 39: 2, # 'ผ' + 62: 1, # 'ฝ' + 31: 2, # 'พ' + 54: 1, # 'ฟ' + 45: 1, # 'ภ' + 9: 3, # 'ม' + 16: 2, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 3, # 'ว' + 42: 2, # 'ศ' + 46: 2, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 3, # 'อ' + 63: 1, # 'ฯ' + 22: 3, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 2, # 'ึ' + 27: 3, # 'ื' + 32: 3, # 'ุ' + 35: 3, # 'ู' + 11: 3, # 'เ' + 28: 3, # 'แ' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 3, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 3, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 61: { # 'ฤ' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 2, # 'ต' + 44: 0, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 2, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 15: { # 'ล' + 5: 2, # 'ก' + 30: 3, # 'ข' + 24: 1, # 'ค' + 8: 3, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 1, # 'ม' + 16: 3, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 1, # 'ห' + 4: 3, # 'อ' + 63: 2, # 'ฯ' + 22: 3, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 2, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 2, # 'ึ' + 27: 3, # 'ื' + 32: 2, # 'ุ' + 35: 3, # 'ู' + 11: 2, # 'เ' + 28: 1, # 'แ' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 2, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 12: { # 'ว' + 5: 3, # 'ก' + 30: 2, # 'ข' + 24: 1, # 'ค' + 8: 3, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 1, # 'ณ' + 20: 2, # 'ด' + 19: 1, # 'ต' + 44: 1, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 1, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 1, # 'ฟ' + 45: 0, # 'ภ' + 9: 3, # 'ม' + 16: 3, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 3, # 'ิ' + 13: 2, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 2, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 2, # 'แ' + 41: 1, # 'โ' + 29: 1, # 'ใ' + 33: 2, # 'ไ' + 50: 1, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 42: { # 'ศ' + 5: 1, # 'ก' + 30: 0, # 'ข' + 24: 1, # 'ค' + 8: 0, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 1, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 2, # 'ว' + 42: 1, # 'ศ' + 46: 2, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 2, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 2, # 'ิ' + 13: 0, # 'ี' + 40: 3, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 2, # 'ู' + 11: 0, # 'เ' + 28: 1, # 'แ' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 46: { # 'ษ' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 2, # 'ฎ' + 57: 1, # 'ฏ' + 49: 2, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 3, # 'ณ' + 20: 0, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 1, # 'ม' + 16: 2, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 2, # 'ะ' + 10: 2, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 1, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 1, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 18: { # 'ส' + 5: 2, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 2, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 3, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 1, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 2, # 'ภ' + 9: 3, # 'ม' + 16: 1, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 2, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 3, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 2, # 'ึ' + 27: 3, # 'ื' + 32: 3, # 'ุ' + 35: 3, # 'ู' + 11: 2, # 'เ' + 28: 0, # 'แ' + 41: 1, # 'โ' + 29: 0, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 1, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 21: { # 'ห' + 5: 3, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 1, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 2, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 3, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 0, # 'บ' + 25: 1, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 3, # 'ม' + 16: 2, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 1, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 1, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 1, # 'ุ' + 35: 1, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 3, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 4: { # 'อ' + 5: 3, # 'ก' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 3, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 1, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 1, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 1, # 'ฟ' + 45: 1, # 'ภ' + 9: 3, # 'ม' + 16: 3, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 2, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 2, # 'ำ' + 23: 2, # 'ิ' + 13: 3, # 'ี' + 40: 0, # 'ึ' + 27: 3, # 'ื' + 32: 3, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 1, # 'แ' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 1, # 'ๆ' + 37: 1, # '็' + 6: 2, # '่' + 7: 2, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 63: { # 'ฯ' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 22: { # 'ะ' + 5: 3, # 'ก' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 1, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 3, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 1, # 'ถ' + 14: 3, # 'ท' + 48: 1, # 'ธ' + 3: 2, # 'น' + 17: 3, # 'บ' + 25: 2, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'ฝ' + 31: 2, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 3, # 'ม' + 16: 2, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 3, # 'ส' + 21: 3, # 'ห' + 4: 2, # 'อ' + 63: 1, # 'ฯ' + 22: 1, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 2, # 'แ' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 10: { # 'ั' + 5: 3, # 'ก' + 30: 0, # 'ข' + 24: 1, # 'ค' + 8: 3, # 'ง' + 26: 3, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 3, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 2, # 'ฐ' + 53: 0, # 'ฑ' + 55: 3, # 'ฒ' + 43: 3, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 0, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 1, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 2, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 3, # 'ม' + 16: 3, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 3, # 'ว' + 42: 2, # 'ศ' + 46: 0, # 'ษ' + 18: 3, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 1: { # 'า' + 5: 3, # 'ก' + 30: 2, # 'ข' + 24: 3, # 'ค' + 8: 3, # 'ง' + 26: 3, # 'จ' + 52: 0, # 'ฉ' + 34: 3, # 'ช' + 51: 1, # 'ซ' + 47: 2, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 3, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 1, # 'ถ' + 14: 3, # 'ท' + 48: 2, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 2, # 'ป' + 39: 1, # 'ผ' + 62: 1, # 'ฝ' + 31: 3, # 'พ' + 54: 1, # 'ฟ' + 45: 1, # 'ภ' + 9: 3, # 'ม' + 16: 3, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 3, # 'ว' + 42: 2, # 'ศ' + 46: 3, # 'ษ' + 18: 3, # 'ส' + 21: 3, # 'ห' + 4: 2, # 'อ' + 63: 1, # 'ฯ' + 22: 3, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 2, # 'แ' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 1, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 36: { # 'ำ' + 5: 2, # 'ก' + 30: 1, # 'ข' + 24: 3, # 'ค' + 8: 2, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 1, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 1, # 'ต' + 44: 1, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 1, # 'บ' + 25: 1, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 1, # 'ม' + 16: 0, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 3, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 2, # 'แ' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 23: { # 'ิ' + 5: 3, # 'ก' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 3, # 'ง' + 26: 3, # 'จ' + 52: 0, # 'ฉ' + 34: 3, # 'ช' + 51: 0, # 'ซ' + 47: 2, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 1, # 'ถ' + 14: 3, # 'ท' + 48: 3, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 2, # 'ป' + 39: 2, # 'ผ' + 62: 0, # 'ฝ' + 31: 3, # 'พ' + 54: 1, # 'ฟ' + 45: 2, # 'ภ' + 9: 3, # 'ม' + 16: 2, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 3, # 'ว' + 42: 3, # 'ศ' + 46: 2, # 'ษ' + 18: 2, # 'ส' + 21: 3, # 'ห' + 4: 1, # 'อ' + 63: 1, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 1, # 'แ' + 41: 1, # 'โ' + 29: 1, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 2, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 13: { # 'ี' + 5: 3, # 'ก' + 30: 2, # 'ข' + 24: 2, # 'ค' + 8: 0, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'ฝ' + 31: 2, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 3, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 2, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 1, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 2, # 'เ' + 28: 2, # 'แ' + 41: 1, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 1, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 40: { # 'ึ' + 5: 3, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 3, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 27: { # 'ื' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 3, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 32: { # 'ุ' + 5: 3, # 'ก' + 30: 2, # 'ข' + 24: 3, # 'ค' + 8: 3, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 2, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 1, # 'ฒ' + 43: 3, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 1, # 'ธ' + 3: 2, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 2, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 3, # 'ม' + 16: 1, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 1, # 'ว' + 42: 1, # 'ศ' + 46: 2, # 'ษ' + 18: 1, # 'ส' + 21: 1, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 1, # 'เ' + 28: 0, # 'แ' + 41: 1, # 'โ' + 29: 0, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 2, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 35: { # 'ู' + 5: 3, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 2, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 2, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 1, # 'ณ' + 20: 2, # 'ด' + 19: 2, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 0, # 'บ' + 25: 3, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 0, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 1, # 'เ' + 28: 1, # 'แ' + 41: 1, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 11: { # 'เ' + 5: 3, # 'ก' + 30: 3, # 'ข' + 24: 3, # 'ค' + 8: 2, # 'ง' + 26: 3, # 'จ' + 52: 3, # 'ฉ' + 34: 3, # 'ช' + 51: 2, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 1, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 1, # 'ถ' + 14: 3, # 'ท' + 48: 1, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 3, # 'ป' + 39: 2, # 'ผ' + 62: 1, # 'ฝ' + 31: 3, # 'พ' + 54: 1, # 'ฟ' + 45: 3, # 'ภ' + 9: 3, # 'ม' + 16: 2, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 3, # 'ว' + 42: 2, # 'ศ' + 46: 0, # 'ษ' + 18: 3, # 'ส' + 21: 3, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 28: { # 'แ' + 5: 3, # 'ก' + 30: 2, # 'ข' + 24: 2, # 'ค' + 8: 1, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 3, # 'ต' + 44: 2, # 'ถ' + 14: 3, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 2, # 'ป' + 39: 3, # 'ผ' + 62: 0, # 'ฝ' + 31: 2, # 'พ' + 54: 2, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 2, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 3, # 'ส' + 21: 3, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 41: { # 'โ' + 5: 2, # 'ก' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 0, # 'ง' + 26: 1, # 'จ' + 52: 1, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 2, # 'ต' + 44: 0, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 1, # 'บ' + 25: 3, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 1, # 'ฟ' + 45: 1, # 'ภ' + 9: 1, # 'ม' + 16: 2, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 0, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 0, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 29: { # 'ใ' + 5: 2, # 'ก' + 30: 0, # 'ข' + 24: 1, # 'ค' + 8: 0, # 'ง' + 26: 3, # 'จ' + 52: 0, # 'ฉ' + 34: 3, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 1, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 3, # 'ส' + 21: 3, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 33: { # 'ไ' + 5: 1, # 'ก' + 30: 2, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 3, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 1, # 'บ' + 25: 3, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 2, # 'ฟ' + 45: 0, # 'ภ' + 9: 3, # 'ม' + 16: 0, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 3, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 2, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 50: { # 'ๆ' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 37: { # '็' + 5: 2, # 'ก' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 2, # 'ง' + 26: 3, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 1, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 2, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 1, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 1, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 6: { # '่' + 5: 2, # 'ก' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 3, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 1, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 1, # 'ธ' + 3: 3, # 'น' + 17: 1, # 'บ' + 25: 2, # 'ป' + 39: 2, # 'ผ' + 62: 1, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 3, # 'ม' + 16: 3, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 3, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 1, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 1, # 'ะ' + 10: 0, # 'ั' + 1: 3, # 'า' + 36: 2, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 2, # 'แ' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 1, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 7: { # '้' + 5: 2, # 'ก' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 3, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 2, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 1, # 'ฟ' + 45: 0, # 'ภ' + 9: 3, # 'ม' + 16: 2, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 3, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 3, # 'า' + 36: 2, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 2, # 'เ' + 28: 2, # 'แ' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 38: { # '์' + 5: 2, # 'ก' + 30: 1, # 'ข' + 24: 1, # 'ค' + 8: 0, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 1, # 'ต' + 44: 1, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 1, # 'บ' + 25: 1, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 1, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 0, # 'ย' + 2: 1, # 'ร' + 61: 1, # 'ฤ' + 15: 1, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 1, # 'ห' + 4: 2, # 'อ' + 63: 1, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 2, # 'เ' + 28: 2, # 'แ' + 41: 1, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 56: { # '๑' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 2, # '๑' + 59: 1, # '๒' + 60: 1, # '๕' + }, + 59: { # '๒' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 1, # '๑' + 59: 1, # '๒' + 60: 3, # '๕' + }, + 60: { # '๕' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 2, # '๑' + 59: 1, # '๒' + 60: 0, # '๕' + }, +} + +# 255: Undefined characters that did not exist in training text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 +# 251: Control characters + +# Character Mapping Table(s): +TIS_620_THAI_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 182, # 'A' + 66: 106, # 'B' + 67: 107, # 'C' + 68: 100, # 'D' + 69: 183, # 'E' + 70: 184, # 'F' + 71: 185, # 'G' + 72: 101, # 'H' + 73: 94, # 'I' + 74: 186, # 'J' + 75: 187, # 'K' + 76: 108, # 'L' + 77: 109, # 'M' + 78: 110, # 'N' + 79: 111, # 'O' + 80: 188, # 'P' + 81: 189, # 'Q' + 82: 190, # 'R' + 83: 89, # 'S' + 84: 95, # 'T' + 85: 112, # 'U' + 86: 113, # 'V' + 87: 191, # 'W' + 88: 192, # 'X' + 89: 193, # 'Y' + 90: 194, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 64, # 'a' + 98: 72, # 'b' + 99: 73, # 'c' + 100: 114, # 'd' + 101: 74, # 'e' + 102: 115, # 'f' + 103: 116, # 'g' + 104: 102, # 'h' + 105: 81, # 'i' + 106: 201, # 'j' + 107: 117, # 'k' + 108: 90, # 'l' + 109: 103, # 'm' + 110: 78, # 'n' + 111: 82, # 'o' + 112: 96, # 'p' + 113: 202, # 'q' + 114: 91, # 'r' + 115: 79, # 's' + 116: 84, # 't' + 117: 104, # 'u' + 118: 105, # 'v' + 119: 97, # 'w' + 120: 98, # 'x' + 121: 92, # 'y' + 122: 203, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 209, # '\x80' + 129: 210, # '\x81' + 130: 211, # '\x82' + 131: 212, # '\x83' + 132: 213, # '\x84' + 133: 88, # '\x85' + 134: 214, # '\x86' + 135: 215, # '\x87' + 136: 216, # '\x88' + 137: 217, # '\x89' + 138: 218, # '\x8a' + 139: 219, # '\x8b' + 140: 220, # '\x8c' + 141: 118, # '\x8d' + 142: 221, # '\x8e' + 143: 222, # '\x8f' + 144: 223, # '\x90' + 145: 224, # '\x91' + 146: 99, # '\x92' + 147: 85, # '\x93' + 148: 83, # '\x94' + 149: 225, # '\x95' + 150: 226, # '\x96' + 151: 227, # '\x97' + 152: 228, # '\x98' + 153: 229, # '\x99' + 154: 230, # '\x9a' + 155: 231, # '\x9b' + 156: 232, # '\x9c' + 157: 233, # '\x9d' + 158: 234, # '\x9e' + 159: 235, # '\x9f' + 160: 236, # None + 161: 5, # 'ก' + 162: 30, # 'ข' + 163: 237, # 'ฃ' + 164: 24, # 'ค' + 165: 238, # 'ฅ' + 166: 75, # 'ฆ' + 167: 8, # 'ง' + 168: 26, # 'จ' + 169: 52, # 'ฉ' + 170: 34, # 'ช' + 171: 51, # 'ซ' + 172: 119, # 'ฌ' + 173: 47, # 'ญ' + 174: 58, # 'ฎ' + 175: 57, # 'ฏ' + 176: 49, # 'ฐ' + 177: 53, # 'ฑ' + 178: 55, # 'ฒ' + 179: 43, # 'ณ' + 180: 20, # 'ด' + 181: 19, # 'ต' + 182: 44, # 'ถ' + 183: 14, # 'ท' + 184: 48, # 'ธ' + 185: 3, # 'น' + 186: 17, # 'บ' + 187: 25, # 'ป' + 188: 39, # 'ผ' + 189: 62, # 'ฝ' + 190: 31, # 'พ' + 191: 54, # 'ฟ' + 192: 45, # 'ภ' + 193: 9, # 'ม' + 194: 16, # 'ย' + 195: 2, # 'ร' + 196: 61, # 'ฤ' + 197: 15, # 'ล' + 198: 239, # 'ฦ' + 199: 12, # 'ว' + 200: 42, # 'ศ' + 201: 46, # 'ษ' + 202: 18, # 'ส' + 203: 21, # 'ห' + 204: 76, # 'ฬ' + 205: 4, # 'อ' + 206: 66, # 'ฮ' + 207: 63, # 'ฯ' + 208: 22, # 'ะ' + 209: 10, # 'ั' + 210: 1, # 'า' + 211: 36, # 'ำ' + 212: 23, # 'ิ' + 213: 13, # 'ี' + 214: 40, # 'ึ' + 215: 27, # 'ื' + 216: 32, # 'ุ' + 217: 35, # 'ู' + 218: 86, # 'ฺ' + 219: 240, # None + 220: 241, # None + 221: 242, # None + 222: 243, # None + 223: 244, # '฿' + 224: 11, # 'เ' + 225: 28, # 'แ' + 226: 41, # 'โ' + 227: 29, # 'ใ' + 228: 33, # 'ไ' + 229: 245, # 'ๅ' + 230: 50, # 'ๆ' + 231: 37, # '็' + 232: 6, # '่' + 233: 7, # '้' + 234: 67, # '๊' + 235: 77, # '๋' + 236: 38, # '์' + 237: 93, # 'ํ' + 238: 246, # '๎' + 239: 247, # '๏' + 240: 68, # '๐' + 241: 56, # '๑' + 242: 59, # '๒' + 243: 65, # '๓' + 244: 69, # '๔' + 245: 60, # '๕' + 246: 70, # '๖' + 247: 80, # '๗' + 248: 71, # '๘' + 249: 87, # '๙' + 250: 248, # '๚' + 251: 249, # '๛' + 252: 250, # None + 253: 251, # None + 254: 252, # None + 255: 253, # None +} + +TIS_620_THAI_MODEL = SingleByteCharSetModel(charset_name='TIS-620', + language='Thai', + char_to_order_map=TIS_620_THAI_CHAR_TO_ORDER, + language_model=THAI_LANG_MODEL, + typical_positive_ratio=0.926386, + keep_ascii_letters=False, + alphabet='กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛') + diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/langturkishmodel.py b/venv/Lib/site-packages/pip/_vendor/chardet/langturkishmodel.py new file mode 100644 index 00000000..43f4230a --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/chardet/langturkishmodel.py @@ -0,0 +1,4383 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel + + +# 3: Positive +# 2: Likely +# 1: Unlikely +# 0: Negative + +TURKISH_LANG_MODEL = { + 23: { # 'A' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 1, # 'h' + 3: 1, # 'i' + 24: 0, # 'j' + 10: 2, # 'k' + 5: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 1, # 'r' + 8: 1, # 's' + 9: 1, # 't' + 14: 1, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 0, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 37: { # 'B' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 2, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 1, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 1, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 0, # 'ı' + 40: 1, # 'Ş' + 19: 1, # 'ş' + }, + 47: { # 'C' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 1, # 'L' + 20: 0, # 'M' + 46: 1, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 1, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 2, # 'j' + 10: 1, # 'k' + 5: 2, # 'l' + 13: 2, # 'm' + 4: 2, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 2, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 1, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 39: { # 'D' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 1, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 1, # 'l' + 13: 3, # 'm' + 4: 0, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 1, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ğ' + 41: 0, # 'İ' + 6: 1, # 'ı' + 40: 1, # 'Ş' + 19: 0, # 'ş' + }, + 29: { # 'E' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 1, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 0, # 'h' + 3: 1, # 'i' + 24: 1, # 'j' + 10: 0, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 1, # 's' + 9: 1, # 't' + 14: 1, # 'u' + 32: 1, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 52: { # 'F' + 23: 0, # 'A' + 37: 1, # 'B' + 47: 1, # 'C' + 39: 1, # 'D' + 29: 1, # 'E' + 52: 2, # 'F' + 36: 0, # 'G' + 45: 2, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 1, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 1, # 'b' + 28: 1, # 'c' + 12: 1, # 'd' + 2: 0, # 'e' + 18: 1, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 2, # 'i' + 24: 1, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 1, # 'm' + 4: 2, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 2, # 'r' + 8: 1, # 's' + 9: 1, # 't' + 14: 1, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 1, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 1, # 'Ö' + 55: 2, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 2, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ğ' + 41: 1, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Ş' + 19: 2, # 'ş' + }, + 36: { # 'G' + 23: 1, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 2, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 2, # 'N' + 42: 1, # 'O' + 48: 1, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 1, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 1, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 0, # 'r' + 8: 1, # 's' + 9: 1, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 2, # 'Ö' + 55: 0, # 'Ü' + 59: 1, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ğ' + 41: 1, # 'İ' + 6: 2, # 'ı' + 40: 2, # 'Ş' + 19: 1, # 'ş' + }, + 45: { # 'H' + 23: 0, # 'A' + 37: 1, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 2, # 'G' + 45: 1, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 1, # 'L' + 20: 0, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 2, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 2, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 15: 1, # 'o' + 26: 1, # 'p' + 7: 1, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 2, # 'ğ' + 41: 1, # 'İ' + 6: 0, # 'ı' + 40: 2, # 'Ş' + 19: 1, # 'ş' + }, + 53: { # 'I' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 2, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 0, # 'ı' + 40: 1, # 'Ş' + 19: 1, # 'ş' + }, + 60: { # 'J' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 1, # 'd' + 2: 0, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 1, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 1, # 's' + 9: 0, # 't' + 14: 0, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 0, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 16: { # 'K' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 3, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 1, # 'e' + 18: 3, # 'f' + 27: 3, # 'g' + 25: 3, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 0, # 'u' + 32: 3, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 2, # 'ü' + 30: 0, # 'ğ' + 41: 1, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 49: { # 'L' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 2, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 2, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 0, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 2, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 2, # 'n' + 15: 1, # 'o' + 26: 1, # 'p' + 7: 1, # 'r' + 8: 1, # 's' + 9: 1, # 't' + 14: 0, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 2, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 1, # 'ü' + 30: 1, # 'ğ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 20: { # 'M' + 23: 1, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 1, # 'h' + 3: 2, # 'i' + 24: 2, # 'j' + 10: 2, # 'k' + 5: 2, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 3, # 'r' + 8: 0, # 's' + 9: 2, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 3, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 46: { # 'N' + 23: 0, # 'A' + 37: 1, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 1, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 2, # 'j' + 10: 1, # 'k' + 5: 1, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 1, # 'o' + 26: 1, # 'p' + 7: 1, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 1, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 1, # 'İ' + 6: 2, # 'ı' + 40: 1, # 'Ş' + 19: 1, # 'ş' + }, + 42: { # 'O' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 0, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 1, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 0, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 2, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ğ' + 41: 2, # 'İ' + 6: 1, # 'ı' + 40: 1, # 'Ş' + 19: 1, # 'ş' + }, + 48: { # 'P' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 2, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 1, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 15: 2, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 2, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 2, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 2, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ğ' + 41: 1, # 'İ' + 6: 0, # 'ı' + 40: 2, # 'Ş' + 19: 1, # 'ş' + }, + 44: { # 'R' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 1, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 2, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 1, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 1, # 'ü' + 30: 1, # 'ğ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 1, # 'Ş' + 19: 1, # 'ş' + }, + 35: { # 'S' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 1, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 1, # 'l' + 13: 2, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 1, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 2, # 'Ç' + 50: 2, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 3, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 2, # 'Ş' + 19: 1, # 'ş' + }, + 31: { # 'T' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 0, # 'c' + 12: 1, # 'd' + 2: 3, # 'e' + 18: 2, # 'f' + 27: 2, # 'g' + 25: 0, # 'h' + 3: 1, # 'i' + 24: 1, # 'j' + 10: 2, # 'k' + 5: 2, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 2, # 'p' + 7: 2, # 'r' + 8: 0, # 's' + 9: 2, # 't' + 14: 2, # 'u' + 32: 1, # 'v' + 57: 1, # 'w' + 58: 1, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 51: { # 'U' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 1, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 1, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 1, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 1, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 2, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 1, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ğ' + 41: 1, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Ş' + 19: 1, # 'ş' + }, + 38: { # 'V' + 23: 1, # 'A' + 37: 1, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 1, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 2, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 15: 2, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 1, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 1, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 1, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ğ' + 41: 1, # 'İ' + 6: 3, # 'ı' + 40: 2, # 'Ş' + 19: 1, # 'ş' + }, + 62: { # 'W' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 0, # 'd' + 2: 0, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 0, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 0, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 43: { # 'Y' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 0, # 'G' + 45: 1, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 2, # 'N' + 42: 0, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 1, # 'j' + 10: 1, # 'k' + 5: 1, # 'l' + 13: 3, # 'm' + 4: 0, # 'n' + 15: 2, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 2, # 'Ö' + 55: 1, # 'Ü' + 59: 1, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ğ' + 41: 1, # 'İ' + 6: 0, # 'ı' + 40: 2, # 'Ş' + 19: 1, # 'ş' + }, + 56: { # 'Z' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 2, # 'Z' + 1: 2, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 2, # 'i' + 24: 1, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 1, # 'r' + 8: 1, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 1, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 1: { # 'a' + 23: 3, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 3, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 1, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 3, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 2, # 'Z' + 1: 2, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 2, # 'e' + 18: 3, # 'f' + 27: 3, # 'g' + 25: 3, # 'h' + 3: 3, # 'i' + 24: 3, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 3, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 3, # 'v' + 57: 2, # 'w' + 58: 0, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 1, # 'î' + 34: 1, # 'ö' + 17: 3, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 1, # 'ş' + }, + 21: { # 'b' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 3, # 'g' + 25: 1, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 3, # 'p' + 7: 1, # 'r' + 8: 2, # 's' + 9: 2, # 't' + 14: 2, # 'u' + 32: 1, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ğ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 28: { # 'c' + 23: 0, # 'A' + 37: 1, # 'B' + 47: 1, # 'C' + 39: 1, # 'D' + 29: 2, # 'E' + 52: 0, # 'F' + 36: 2, # 'G' + 45: 2, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 2, # 'T' + 51: 2, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 3, # 'Y' + 56: 0, # 'Z' + 1: 1, # 'a' + 21: 1, # 'b' + 28: 2, # 'c' + 12: 2, # 'd' + 2: 1, # 'e' + 18: 1, # 'f' + 27: 2, # 'g' + 25: 2, # 'h' + 3: 3, # 'i' + 24: 1, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 15: 2, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 1, # 'u' + 32: 0, # 'v' + 57: 1, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 1, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 1, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 1, # 'î' + 34: 2, # 'ö' + 17: 2, # 'ü' + 30: 2, # 'ğ' + 41: 1, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 2, # 'ş' + }, + 12: { # 'd' + 23: 1, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 2, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 1, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 1, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 1, # 'f' + 27: 3, # 'g' + 25: 3, # 'h' + 3: 2, # 'i' + 24: 3, # 'j' + 10: 2, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 2, # 's' + 9: 2, # 't' + 14: 3, # 'u' + 32: 1, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 3, # 'y' + 22: 1, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 2: { # 'e' + 23: 2, # 'A' + 37: 0, # 'B' + 47: 2, # 'C' + 39: 0, # 'D' + 29: 3, # 'E' + 52: 1, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 1, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 1, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 1, # 'R' + 35: 0, # 'S' + 31: 3, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 2, # 'e' + 18: 3, # 'f' + 27: 3, # 'g' + 25: 3, # 'h' + 3: 3, # 'i' + 24: 3, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 3, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 3, # 'v' + 57: 2, # 'w' + 58: 0, # 'x' + 11: 3, # 'y' + 22: 1, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 3, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 18: { # 'f' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 2, # 'f' + 27: 1, # 'g' + 25: 1, # 'h' + 3: 1, # 'i' + 24: 1, # 'j' + 10: 1, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 2, # 'p' + 7: 1, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 1, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 1, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 1, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 27: { # 'g' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 1, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 1, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 2, # 'g' + 25: 1, # 'h' + 3: 2, # 'i' + 24: 3, # 'j' + 10: 2, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 2, # 'r' + 8: 2, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 1, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 1, # 'y' + 22: 0, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 25: { # 'h' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 2, # 'h' + 3: 2, # 'i' + 24: 3, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 1, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 2, # 't' + 14: 3, # 'u' + 32: 2, # 'v' + 57: 1, # 'w' + 58: 0, # 'x' + 11: 1, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 3: { # 'i' + 23: 2, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 0, # 'N' + 42: 1, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 1, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 2, # 'f' + 27: 3, # 'g' + 25: 1, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 3, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 2, # 'v' + 57: 1, # 'w' + 58: 1, # 'x' + 11: 3, # 'y' + 22: 1, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 1, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 3, # 'ü' + 30: 0, # 'ğ' + 41: 1, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 24: { # 'j' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 1, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 2, # 'f' + 27: 1, # 'g' + 25: 1, # 'h' + 3: 2, # 'i' + 24: 1, # 'j' + 10: 2, # 'k' + 5: 2, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 2, # 'r' + 8: 3, # 's' + 9: 2, # 't' + 14: 3, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 2, # 'x' + 11: 1, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 10: { # 'k' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 3, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 3, # 'e' + 18: 1, # 'f' + 27: 2, # 'g' + 25: 2, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 2, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 3, # 'p' + 7: 2, # 'r' + 8: 2, # 's' + 9: 2, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 3, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 3, # 'ü' + 30: 1, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 1, # 'ş' + }, + 5: { # 'l' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 3, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 1, # 'e' + 18: 3, # 'f' + 27: 3, # 'g' + 25: 2, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 1, # 'l' + 13: 1, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 2, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 2, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 13: { # 'm' + 23: 1, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 3, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 3, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 2, # 'e' + 18: 3, # 'f' + 27: 3, # 'g' + 25: 3, # 'h' + 3: 3, # 'i' + 24: 3, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 2, # 'u' + 32: 2, # 'v' + 57: 1, # 'w' + 58: 0, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 3, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 1, # 'ş' + }, + 4: { # 'n' + 23: 1, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 2, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 1, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 1, # 'f' + 27: 2, # 'g' + 25: 3, # 'h' + 3: 2, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 3, # 'p' + 7: 2, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 2, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 2, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 1, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 15: { # 'o' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 2, # 'L' + 20: 0, # 'M' + 46: 2, # 'N' + 42: 1, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 1, # 'i' + 24: 2, # 'j' + 10: 1, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 2, # 'o' + 26: 0, # 'p' + 7: 1, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 2, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 2, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 3, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 2, # 'ğ' + 41: 2, # 'İ' + 6: 3, # 'ı' + 40: 2, # 'Ş' + 19: 2, # 'ş' + }, + 26: { # 'p' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 1, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 1, # 'h' + 3: 2, # 'i' + 24: 3, # 'j' + 10: 1, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 0, # 'o' + 26: 2, # 'p' + 7: 2, # 'r' + 8: 1, # 's' + 9: 1, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 1, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 3, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 7: { # 'r' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 2, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 1, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 2, # 'g' + 25: 3, # 'h' + 3: 2, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 3, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 8: { # 's' + 23: 1, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 1, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 2, # 'g' + 25: 2, # 'h' + 3: 2, # 'i' + 24: 3, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 3, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 2, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 2, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 1, # 'ş' + }, + 9: { # 't' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 2, # 'f' + 27: 2, # 'g' + 25: 2, # 'h' + 3: 2, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 3, # 'v' + 57: 0, # 'w' + 58: 2, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 3, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 2, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 14: { # 'u' + 23: 3, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 3, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 2, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 3, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 2, # 'Z' + 1: 2, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 2, # 'e' + 18: 2, # 'f' + 27: 3, # 'g' + 25: 3, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 3, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 2, # 'v' + 57: 2, # 'w' + 58: 0, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 3, # 'ü' + 30: 1, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 32: { # 'v' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 1, # 'j' + 10: 1, # 'k' + 5: 3, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 1, # 'r' + 8: 2, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 1, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 1, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 57: { # 'w' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 1, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 1, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 1, # 's' + 9: 0, # 't' + 14: 1, # 'u' + 32: 0, # 'v' + 57: 2, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 0, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 58: { # 'x' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 1, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 1, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 2, # 'i' + 24: 2, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 2, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 1, # 'r' + 8: 2, # 's' + 9: 1, # 't' + 14: 0, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 11: { # 'y' + 23: 1, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 2, # 'g' + 25: 2, # 'h' + 3: 2, # 'i' + 24: 1, # 'j' + 10: 2, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 2, # 'r' + 8: 1, # 's' + 9: 2, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 3, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 2, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 22: { # 'z' + 23: 2, # 'A' + 37: 2, # 'B' + 47: 1, # 'C' + 39: 2, # 'D' + 29: 3, # 'E' + 52: 1, # 'F' + 36: 2, # 'G' + 45: 2, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 2, # 'N' + 42: 2, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 3, # 'T' + 51: 2, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 1, # 'Z' + 1: 1, # 'a' + 21: 2, # 'b' + 28: 1, # 'c' + 12: 2, # 'd' + 2: 2, # 'e' + 18: 3, # 'f' + 27: 2, # 'g' + 25: 2, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 15: 2, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 0, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 3, # 'y' + 22: 2, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 2, # 'Ü' + 59: 1, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 2, # 'ö' + 17: 2, # 'ü' + 30: 2, # 'ğ' + 41: 1, # 'İ' + 6: 3, # 'ı' + 40: 1, # 'Ş' + 19: 2, # 'ş' + }, + 63: { # '·' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 0, # 'd' + 2: 1, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 0, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 54: { # 'Ç' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 1, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 1, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 1, # 'd' + 2: 0, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 0, # 'h' + 3: 3, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 2, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 2, # 'r' + 8: 0, # 's' + 9: 1, # 't' + 14: 0, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 2, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Ş' + 19: 1, # 'ş' + }, + 50: { # 'Ö' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 1, # 'D' + 29: 2, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 2, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 1, # 'N' + 42: 2, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 2, # 'b' + 28: 1, # 'c' + 12: 2, # 'd' + 2: 0, # 'e' + 18: 1, # 'f' + 27: 1, # 'g' + 25: 1, # 'h' + 3: 2, # 'i' + 24: 0, # 'j' + 10: 2, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 3, # 'n' + 15: 2, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 1, # 's' + 9: 2, # 't' + 14: 0, # 'u' + 32: 1, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 2, # 'ö' + 17: 2, # 'ü' + 30: 1, # 'ğ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Ş' + 19: 1, # 'ş' + }, + 55: { # 'Ü' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 1, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 1, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ğ' + 41: 1, # 'İ' + 6: 0, # 'ı' + 40: 0, # 'Ş' + 19: 1, # 'ş' + }, + 59: { # 'â' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 1, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 1, # 'ı' + 40: 1, # 'Ş' + 19: 0, # 'ş' + }, + 33: { # 'ç' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 3, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 0, # 'e' + 18: 2, # 'f' + 27: 1, # 'g' + 25: 3, # 'h' + 3: 3, # 'i' + 24: 0, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 3, # 'r' + 8: 2, # 's' + 9: 3, # 't' + 14: 0, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 61: { # 'î' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 2, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 1, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 1, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 1, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 1, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 34: { # 'ö' + 23: 0, # 'A' + 37: 1, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 1, # 'L' + 20: 0, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 1, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 2, # 'c' + 12: 1, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 2, # 'g' + 25: 2, # 'h' + 3: 1, # 'i' + 24: 2, # 'j' + 10: 1, # 'k' + 5: 2, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 2, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 3, # 's' + 9: 1, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 1, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 2, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 2, # 'ö' + 17: 0, # 'ü' + 30: 2, # 'ğ' + 41: 1, # 'İ' + 6: 1, # 'ı' + 40: 2, # 'Ş' + 19: 1, # 'ş' + }, + 17: { # 'ü' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 1, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 1, # 'd' + 2: 3, # 'e' + 18: 1, # 'f' + 27: 2, # 'g' + 25: 0, # 'h' + 3: 1, # 'i' + 24: 1, # 'j' + 10: 2, # 'k' + 5: 3, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 2, # 'p' + 7: 2, # 'r' + 8: 3, # 's' + 9: 2, # 't' + 14: 3, # 'u' + 32: 1, # 'v' + 57: 1, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 2, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 30: { # 'ğ' + 23: 0, # 'A' + 37: 2, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 1, # 'G' + 45: 0, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 2, # 'N' + 42: 2, # 'O' + 48: 1, # 'P' + 44: 1, # 'R' + 35: 0, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 3, # 'j' + 10: 1, # 'k' + 5: 2, # 'l' + 13: 3, # 'm' + 4: 0, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 1, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 2, # 'Ç' + 50: 2, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 2, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ğ' + 41: 2, # 'İ' + 6: 2, # 'ı' + 40: 2, # 'Ş' + 19: 1, # 'ş' + }, + 41: { # 'İ' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 1, # 'D' + 29: 1, # 'E' + 52: 0, # 'F' + 36: 2, # 'G' + 45: 2, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 2, # 'P' + 44: 0, # 'R' + 35: 1, # 'S' + 31: 1, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 0, # 'Z' + 1: 1, # 'a' + 21: 2, # 'b' + 28: 1, # 'c' + 12: 2, # 'd' + 2: 1, # 'e' + 18: 0, # 'f' + 27: 3, # 'g' + 25: 2, # 'h' + 3: 2, # 'i' + 24: 2, # 'j' + 10: 2, # 'k' + 5: 0, # 'l' + 13: 1, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 1, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 2, # 't' + 14: 0, # 'u' + 32: 0, # 'v' + 57: 1, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 1, # 'Ü' + 59: 1, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 1, # 'ü' + 30: 2, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 1, # 'ş' + }, + 6: { # 'ı' + 23: 2, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 2, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 1, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 1, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 3, # 'f' + 27: 3, # 'g' + 25: 2, # 'h' + 3: 3, # 'i' + 24: 3, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 3, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 3, # 'v' + 57: 1, # 'w' + 58: 1, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 3, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 40: { # 'Ş' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 1, # 'D' + 29: 1, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 2, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 2, # 'P' + 44: 2, # 'R' + 35: 1, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 1, # 'Z' + 1: 0, # 'a' + 21: 2, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 0, # 'e' + 18: 3, # 'f' + 27: 0, # 'g' + 25: 2, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 1, # 'm' + 4: 3, # 'n' + 15: 2, # 'o' + 26: 0, # 'p' + 7: 3, # 'r' + 8: 2, # 's' + 9: 2, # 't' + 14: 1, # 'u' + 32: 3, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 1, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 2, # 'ö' + 17: 1, # 'ü' + 30: 2, # 'ğ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 1, # 'Ş' + 19: 2, # 'ş' + }, + 19: { # 'ş' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 1, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 2, # 'L' + 20: 0, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 1, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 2, # 'g' + 25: 1, # 'h' + 3: 1, # 'i' + 24: 0, # 'j' + 10: 2, # 'k' + 5: 2, # 'l' + 13: 3, # 'm' + 4: 0, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 3, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 2, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 1, # 'î' + 34: 2, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ğ' + 41: 1, # 'İ' + 6: 1, # 'ı' + 40: 1, # 'Ş' + 19: 1, # 'ş' + }, +} + +# 255: Undefined characters that did not exist in training text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 +# 251: Control characters + +# Character Mapping Table(s): +ISO_8859_9_TURKISH_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 255, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 255, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 255, # ' ' + 33: 255, # '!' + 34: 255, # '"' + 35: 255, # '#' + 36: 255, # '$' + 37: 255, # '%' + 38: 255, # '&' + 39: 255, # "'" + 40: 255, # '(' + 41: 255, # ')' + 42: 255, # '*' + 43: 255, # '+' + 44: 255, # ',' + 45: 255, # '-' + 46: 255, # '.' + 47: 255, # '/' + 48: 255, # '0' + 49: 255, # '1' + 50: 255, # '2' + 51: 255, # '3' + 52: 255, # '4' + 53: 255, # '5' + 54: 255, # '6' + 55: 255, # '7' + 56: 255, # '8' + 57: 255, # '9' + 58: 255, # ':' + 59: 255, # ';' + 60: 255, # '<' + 61: 255, # '=' + 62: 255, # '>' + 63: 255, # '?' + 64: 255, # '@' + 65: 23, # 'A' + 66: 37, # 'B' + 67: 47, # 'C' + 68: 39, # 'D' + 69: 29, # 'E' + 70: 52, # 'F' + 71: 36, # 'G' + 72: 45, # 'H' + 73: 53, # 'I' + 74: 60, # 'J' + 75: 16, # 'K' + 76: 49, # 'L' + 77: 20, # 'M' + 78: 46, # 'N' + 79: 42, # 'O' + 80: 48, # 'P' + 81: 69, # 'Q' + 82: 44, # 'R' + 83: 35, # 'S' + 84: 31, # 'T' + 85: 51, # 'U' + 86: 38, # 'V' + 87: 62, # 'W' + 88: 65, # 'X' + 89: 43, # 'Y' + 90: 56, # 'Z' + 91: 255, # '[' + 92: 255, # '\\' + 93: 255, # ']' + 94: 255, # '^' + 95: 255, # '_' + 96: 255, # '`' + 97: 1, # 'a' + 98: 21, # 'b' + 99: 28, # 'c' + 100: 12, # 'd' + 101: 2, # 'e' + 102: 18, # 'f' + 103: 27, # 'g' + 104: 25, # 'h' + 105: 3, # 'i' + 106: 24, # 'j' + 107: 10, # 'k' + 108: 5, # 'l' + 109: 13, # 'm' + 110: 4, # 'n' + 111: 15, # 'o' + 112: 26, # 'p' + 113: 64, # 'q' + 114: 7, # 'r' + 115: 8, # 's' + 116: 9, # 't' + 117: 14, # 'u' + 118: 32, # 'v' + 119: 57, # 'w' + 120: 58, # 'x' + 121: 11, # 'y' + 122: 22, # 'z' + 123: 255, # '{' + 124: 255, # '|' + 125: 255, # '}' + 126: 255, # '~' + 127: 255, # '\x7f' + 128: 180, # '\x80' + 129: 179, # '\x81' + 130: 178, # '\x82' + 131: 177, # '\x83' + 132: 176, # '\x84' + 133: 175, # '\x85' + 134: 174, # '\x86' + 135: 173, # '\x87' + 136: 172, # '\x88' + 137: 171, # '\x89' + 138: 170, # '\x8a' + 139: 169, # '\x8b' + 140: 168, # '\x8c' + 141: 167, # '\x8d' + 142: 166, # '\x8e' + 143: 165, # '\x8f' + 144: 164, # '\x90' + 145: 163, # '\x91' + 146: 162, # '\x92' + 147: 161, # '\x93' + 148: 160, # '\x94' + 149: 159, # '\x95' + 150: 101, # '\x96' + 151: 158, # '\x97' + 152: 157, # '\x98' + 153: 156, # '\x99' + 154: 155, # '\x9a' + 155: 154, # '\x9b' + 156: 153, # '\x9c' + 157: 152, # '\x9d' + 158: 151, # '\x9e' + 159: 106, # '\x9f' + 160: 150, # '\xa0' + 161: 149, # '¡' + 162: 148, # '¢' + 163: 147, # '£' + 164: 146, # '¤' + 165: 145, # '¥' + 166: 144, # '¦' + 167: 100, # '§' + 168: 143, # '¨' + 169: 142, # '©' + 170: 141, # 'ª' + 171: 140, # '«' + 172: 139, # '¬' + 173: 138, # '\xad' + 174: 137, # '®' + 175: 136, # '¯' + 176: 94, # '°' + 177: 80, # '±' + 178: 93, # '²' + 179: 135, # '³' + 180: 105, # '´' + 181: 134, # 'µ' + 182: 133, # '¶' + 183: 63, # '·' + 184: 132, # '¸' + 185: 131, # '¹' + 186: 130, # 'º' + 187: 129, # '»' + 188: 128, # '¼' + 189: 127, # '½' + 190: 126, # '¾' + 191: 125, # '¿' + 192: 124, # 'À' + 193: 104, # 'Á' + 194: 73, # 'Â' + 195: 99, # 'Ã' + 196: 79, # 'Ä' + 197: 85, # 'Å' + 198: 123, # 'Æ' + 199: 54, # 'Ç' + 200: 122, # 'È' + 201: 98, # 'É' + 202: 92, # 'Ê' + 203: 121, # 'Ë' + 204: 120, # 'Ì' + 205: 91, # 'Í' + 206: 103, # 'Î' + 207: 119, # 'Ï' + 208: 68, # 'Ğ' + 209: 118, # 'Ñ' + 210: 117, # 'Ò' + 211: 97, # 'Ó' + 212: 116, # 'Ô' + 213: 115, # 'Õ' + 214: 50, # 'Ö' + 215: 90, # '×' + 216: 114, # 'Ø' + 217: 113, # 'Ù' + 218: 112, # 'Ú' + 219: 111, # 'Û' + 220: 55, # 'Ü' + 221: 41, # 'İ' + 222: 40, # 'Ş' + 223: 86, # 'ß' + 224: 89, # 'à' + 225: 70, # 'á' + 226: 59, # 'â' + 227: 78, # 'ã' + 228: 71, # 'ä' + 229: 82, # 'å' + 230: 88, # 'æ' + 231: 33, # 'ç' + 232: 77, # 'è' + 233: 66, # 'é' + 234: 84, # 'ê' + 235: 83, # 'ë' + 236: 110, # 'ì' + 237: 75, # 'í' + 238: 61, # 'î' + 239: 96, # 'ï' + 240: 30, # 'ğ' + 241: 67, # 'ñ' + 242: 109, # 'ò' + 243: 74, # 'ó' + 244: 87, # 'ô' + 245: 102, # 'õ' + 246: 34, # 'ö' + 247: 95, # '÷' + 248: 81, # 'ø' + 249: 108, # 'ù' + 250: 76, # 'ú' + 251: 72, # 'û' + 252: 17, # 'ü' + 253: 6, # 'ı' + 254: 19, # 'ş' + 255: 107, # 'ÿ' +} + +ISO_8859_9_TURKISH_MODEL = SingleByteCharSetModel(charset_name='ISO-8859-9', + language='Turkish', + char_to_order_map=ISO_8859_9_TURKISH_CHAR_TO_ORDER, + language_model=TURKISH_LANG_MODEL, + typical_positive_ratio=0.97029, + keep_ascii_letters=True, + alphabet='ABCDEFGHIJKLMNOPRSTUVYZabcdefghijklmnoprstuvyzÂÇÎÖÛÜâçîöûüĞğİıŞş') + diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/latin1prober.py b/venv/Lib/site-packages/pip/_vendor/chardet/latin1prober.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/chardet/latin1prober.py rename to venv/Lib/site-packages/pip/_vendor/chardet/latin1prober.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/mbcharsetprober.py b/venv/Lib/site-packages/pip/_vendor/chardet/mbcharsetprober.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/chardet/mbcharsetprober.py rename to venv/Lib/site-packages/pip/_vendor/chardet/mbcharsetprober.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/mbcsgroupprober.py b/venv/Lib/site-packages/pip/_vendor/chardet/mbcsgroupprober.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/chardet/mbcsgroupprober.py rename to venv/Lib/site-packages/pip/_vendor/chardet/mbcsgroupprober.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/mbcssm.py b/venv/Lib/site-packages/pip/_vendor/chardet/mbcssm.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/chardet/mbcssm.py rename to venv/Lib/site-packages/pip/_vendor/chardet/mbcssm.py diff --git a/env/lib/python2.7/site-packages/setuptools/_vendor/__init__.py b/venv/Lib/site-packages/pip/_vendor/chardet/metadata/__init__.py similarity index 100% rename from env/lib/python2.7/site-packages/setuptools/_vendor/__init__.py rename to venv/Lib/site-packages/pip/_vendor/chardet/metadata/__init__.py diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/metadata/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/metadata/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..29a56edd Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/metadata/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/metadata/__pycache__/languages.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/metadata/__pycache__/languages.cpython-36.pyc new file mode 100644 index 00000000..3c38ecda Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/chardet/metadata/__pycache__/languages.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/metadata/languages.py b/venv/Lib/site-packages/pip/_vendor/chardet/metadata/languages.py new file mode 100644 index 00000000..3237d5ab --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/chardet/metadata/languages.py @@ -0,0 +1,310 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +""" +Metadata about languages used by our model training code for our +SingleByteCharSetProbers. Could be used for other things in the future. + +This code is based on the language metadata from the uchardet project. +""" +from __future__ import absolute_import, print_function + +from string import ascii_letters + + +# TODO: Add Ukranian (KOI8-U) + +class Language(object): + """Metadata about a language useful for training models + + :ivar name: The human name for the language, in English. + :type name: str + :ivar iso_code: 2-letter ISO 639-1 if possible, 3-letter ISO code otherwise, + or use another catalog as a last resort. + :type iso_code: str + :ivar use_ascii: Whether or not ASCII letters should be included in trained + models. + :type use_ascii: bool + :ivar charsets: The charsets we want to support and create data for. + :type charsets: list of str + :ivar alphabet: The characters in the language's alphabet. If `use_ascii` is + `True`, you only need to add those not in the ASCII set. + :type alphabet: str + :ivar wiki_start_pages: The Wikipedia pages to start from if we're crawling + Wikipedia for training data. + :type wiki_start_pages: list of str + """ + def __init__(self, name=None, iso_code=None, use_ascii=True, charsets=None, + alphabet=None, wiki_start_pages=None): + super(Language, self).__init__() + self.name = name + self.iso_code = iso_code + self.use_ascii = use_ascii + self.charsets = charsets + if self.use_ascii: + if alphabet: + alphabet += ascii_letters + else: + alphabet = ascii_letters + elif not alphabet: + raise ValueError('Must supply alphabet if use_ascii is False') + self.alphabet = ''.join(sorted(set(alphabet))) if alphabet else None + self.wiki_start_pages = wiki_start_pages + + def __repr__(self): + return '{}({})'.format(self.__class__.__name__, + ', '.join('{}={!r}'.format(k, v) + for k, v in self.__dict__.items() + if not k.startswith('_'))) + + +LANGUAGES = {'Arabic': Language(name='Arabic', + iso_code='ar', + use_ascii=False, + # We only support encodings that use isolated + # forms, because the current recommendation is + # that the rendering system handles presentation + # forms. This means we purposefully skip IBM864. + charsets=['ISO-8859-6', 'WINDOWS-1256', + 'CP720', 'CP864'], + alphabet=u'ءآأؤإئابةتثجحخدذرزسشصضطظعغػؼؽؾؿـفقكلمنهوىيًٌٍَُِّ', + wiki_start_pages=[u'الصفحة_الرئيسية']), + 'Belarusian': Language(name='Belarusian', + iso_code='be', + use_ascii=False, + charsets=['ISO-8859-5', 'WINDOWS-1251', + 'IBM866', 'MacCyrillic'], + alphabet=(u'АБВГДЕЁЖЗІЙКЛМНОПРСТУЎФХЦЧШЫЬЭЮЯ' + u'абвгдеёжзійклмнопрстуўфхцчшыьэюяʼ'), + wiki_start_pages=[u'Галоўная_старонка']), + 'Bulgarian': Language(name='Bulgarian', + iso_code='bg', + use_ascii=False, + charsets=['ISO-8859-5', 'WINDOWS-1251', + 'IBM855'], + alphabet=(u'АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЬЮЯ' + u'абвгдежзийклмнопрстуфхцчшщъьюя'), + wiki_start_pages=[u'Начална_страница']), + 'Czech': Language(name='Czech', + iso_code='cz', + use_ascii=True, + charsets=['ISO-8859-2', 'WINDOWS-1250'], + alphabet=u'áčďéěíňóřšťúůýžÁČĎÉĚÍŇÓŘŠŤÚŮÝŽ', + wiki_start_pages=[u'Hlavní_strana']), + 'Danish': Language(name='Danish', + iso_code='da', + use_ascii=True, + charsets=['ISO-8859-1', 'ISO-8859-15', + 'WINDOWS-1252'], + alphabet=u'æøåÆØÅ', + wiki_start_pages=[u'Forside']), + 'German': Language(name='German', + iso_code='de', + use_ascii=True, + charsets=['ISO-8859-1', 'WINDOWS-1252'], + alphabet=u'äöüßÄÖÜ', + wiki_start_pages=[u'Wikipedia:Hauptseite']), + 'Greek': Language(name='Greek', + iso_code='el', + use_ascii=False, + charsets=['ISO-8859-7', 'WINDOWS-1253'], + alphabet=(u'αβγδεζηθικλμνξοπρσςτυφχψωάέήίόύώ' + u'ΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΣΤΥΦΧΨΩΆΈΉΊΌΎΏ'), + wiki_start_pages=[u'Πύλη:Κύρια']), + 'English': Language(name='English', + iso_code='en', + use_ascii=True, + charsets=['ISO-8859-1', 'WINDOWS-1252'], + wiki_start_pages=[u'Main_Page']), + 'Esperanto': Language(name='Esperanto', + iso_code='eo', + # Q, W, X, and Y not used at all + use_ascii=False, + charsets=['ISO-8859-3'], + alphabet=(u'abcĉdefgĝhĥijĵklmnoprsŝtuŭvz' + u'ABCĈDEFGĜHĤIJĴKLMNOPRSŜTUŬVZ'), + wiki_start_pages=[u'Vikipedio:Ĉefpaĝo']), + 'Spanish': Language(name='Spanish', + iso_code='es', + use_ascii=True, + charsets=['ISO-8859-1', 'ISO-8859-15', + 'WINDOWS-1252'], + alphabet=u'ñáéíóúüÑÁÉÍÓÚÜ', + wiki_start_pages=[u'Wikipedia:Portada']), + 'Estonian': Language(name='Estonian', + iso_code='et', + use_ascii=False, + charsets=['ISO-8859-4', 'ISO-8859-13', + 'WINDOWS-1257'], + # C, F, Š, Q, W, X, Y, Z, Ž are only for + # loanwords + alphabet=(u'ABDEGHIJKLMNOPRSTUVÕÄÖÜ' + u'abdeghijklmnoprstuvõäöü'), + wiki_start_pages=[u'Esileht']), + 'Finnish': Language(name='Finnish', + iso_code='fi', + use_ascii=True, + charsets=['ISO-8859-1', 'ISO-8859-15', + 'WINDOWS-1252'], + alphabet=u'ÅÄÖŠŽåäöšž', + wiki_start_pages=[u'Wikipedia:Etusivu']), + 'French': Language(name='French', + iso_code='fr', + use_ascii=True, + charsets=['ISO-8859-1', 'ISO-8859-15', + 'WINDOWS-1252'], + alphabet=u'œàâçèéîïùûêŒÀÂÇÈÉÎÏÙÛÊ', + wiki_start_pages=[u'Wikipédia:Accueil_principal', + u'Bœuf (animal)']), + 'Hebrew': Language(name='Hebrew', + iso_code='he', + use_ascii=False, + charsets=['ISO-8859-8', 'WINDOWS-1255'], + alphabet=u'אבגדהוזחטיךכלםמןנסעףפץצקרשתװױײ', + wiki_start_pages=[u'עמוד_ראשי']), + 'Croatian': Language(name='Croatian', + iso_code='hr', + # Q, W, X, Y are only used for foreign words. + use_ascii=False, + charsets=['ISO-8859-2', 'WINDOWS-1250'], + alphabet=(u'abcčćdđefghijklmnoprsštuvzž' + u'ABCČĆDĐEFGHIJKLMNOPRSŠTUVZŽ'), + wiki_start_pages=[u'Glavna_stranica']), + 'Hungarian': Language(name='Hungarian', + iso_code='hu', + # Q, W, X, Y are only used for foreign words. + use_ascii=False, + charsets=['ISO-8859-2', 'WINDOWS-1250'], + alphabet=(u'abcdefghijklmnoprstuvzáéíóöőúüű' + u'ABCDEFGHIJKLMNOPRSTUVZÁÉÍÓÖŐÚÜŰ'), + wiki_start_pages=[u'Kezdőlap']), + 'Italian': Language(name='Italian', + iso_code='it', + use_ascii=True, + charsets=['ISO-8859-1', 'ISO-8859-15', + 'WINDOWS-1252'], + alphabet=u'ÀÈÉÌÒÓÙàèéìòóù', + wiki_start_pages=[u'Pagina_principale']), + 'Lithuanian': Language(name='Lithuanian', + iso_code='lt', + use_ascii=False, + charsets=['ISO-8859-13', 'WINDOWS-1257', + 'ISO-8859-4'], + # Q, W, and X not used at all + alphabet=(u'AĄBCČDEĘĖFGHIĮYJKLMNOPRSŠTUŲŪVZŽ' + u'aąbcčdeęėfghiįyjklmnoprsštuųūvzž'), + wiki_start_pages=[u'Pagrindinis_puslapis']), + 'Latvian': Language(name='Latvian', + iso_code='lv', + use_ascii=False, + charsets=['ISO-8859-13', 'WINDOWS-1257', + 'ISO-8859-4'], + # Q, W, X, Y are only for loanwords + alphabet=(u'AĀBCČDEĒFGĢHIĪJKĶLĻMNŅOPRSŠTUŪVZŽ' + u'aābcčdeēfgģhiījkķlļmnņoprsštuūvzž'), + wiki_start_pages=[u'Sākumlapa']), + 'Macedonian': Language(name='Macedonian', + iso_code='mk', + use_ascii=False, + charsets=['ISO-8859-5', 'WINDOWS-1251', + 'MacCyrillic', 'IBM855'], + alphabet=(u'АБВГДЃЕЖЗЅИЈКЛЉМНЊОПРСТЌУФХЦЧЏШ' + u'абвгдѓежзѕијклљмнњопрстќуфхцчџш'), + wiki_start_pages=[u'Главна_страница']), + 'Dutch': Language(name='Dutch', + iso_code='nl', + use_ascii=True, + charsets=['ISO-8859-1', 'WINDOWS-1252'], + wiki_start_pages=[u'Hoofdpagina']), + 'Polish': Language(name='Polish', + iso_code='pl', + # Q and X are only used for foreign words. + use_ascii=False, + charsets=['ISO-8859-2', 'WINDOWS-1250'], + alphabet=(u'AĄBCĆDEĘFGHIJKLŁMNŃOÓPRSŚTUWYZŹŻ' + u'aąbcćdeęfghijklłmnńoóprsśtuwyzźż'), + wiki_start_pages=[u'Wikipedia:Strona_główna']), + 'Portuguese': Language(name='Portuguese', + iso_code='pt', + use_ascii=True, + charsets=['ISO-8859-1', 'ISO-8859-15', + 'WINDOWS-1252'], + alphabet=u'ÁÂÃÀÇÉÊÍÓÔÕÚáâãàçéêíóôõú', + wiki_start_pages=[u'Wikipédia:Página_principal']), + 'Romanian': Language(name='Romanian', + iso_code='ro', + use_ascii=True, + charsets=['ISO-8859-2', 'WINDOWS-1250'], + alphabet=u'ăâîșțĂÂÎȘȚ', + wiki_start_pages=[u'Pagina_principală']), + 'Russian': Language(name='Russian', + iso_code='ru', + use_ascii=False, + charsets=['ISO-8859-5', 'WINDOWS-1251', + 'KOI8-R', 'MacCyrillic', 'IBM866', + 'IBM855'], + alphabet=(u'абвгдеёжзийклмнопрстуфхцчшщъыьэюя' + u'АБВГДЕЁЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯ'), + wiki_start_pages=[u'Заглавная_страница']), + 'Slovak': Language(name='Slovak', + iso_code='sk', + use_ascii=True, + charsets=['ISO-8859-2', 'WINDOWS-1250'], + alphabet=u'áäčďéíĺľňóôŕšťúýžÁÄČĎÉÍĹĽŇÓÔŔŠŤÚÝŽ', + wiki_start_pages=[u'Hlavná_stránka']), + 'Slovene': Language(name='Slovene', + iso_code='sl', + # Q, W, X, Y are only used for foreign words. + use_ascii=False, + charsets=['ISO-8859-2', 'WINDOWS-1250'], + alphabet=(u'abcčdefghijklmnoprsštuvzž' + u'ABCČDEFGHIJKLMNOPRSŠTUVZŽ'), + wiki_start_pages=[u'Glavna_stran']), + # Serbian can be written in both Latin and Cyrillic, but there's no + # simple way to get the Latin alphabet pages from Wikipedia through + # the API, so for now we just support Cyrillic. + 'Serbian': Language(name='Serbian', + iso_code='sr', + alphabet=(u'АБВГДЂЕЖЗИЈКЛЉМНЊОПРСТЋУФХЦЧЏШ' + u'абвгдђежзијклљмнњопрстћуфхцчџш'), + charsets=['ISO-8859-5', 'WINDOWS-1251', + 'MacCyrillic', 'IBM855'], + wiki_start_pages=[u'Главна_страна']), + 'Thai': Language(name='Thai', + iso_code='th', + use_ascii=False, + charsets=['ISO-8859-11', 'TIS-620', 'CP874'], + alphabet=u'กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛', + wiki_start_pages=[u'หน้าหลัก']), + 'Turkish': Language(name='Turkish', + iso_code='tr', + # Q, W, and X are not used by Turkish + use_ascii=False, + charsets=['ISO-8859-3', 'ISO-8859-9', + 'WINDOWS-1254'], + alphabet=(u'abcçdefgğhıijklmnoöprsştuüvyzâîû' + u'ABCÇDEFGĞHIİJKLMNOÖPRSŞTUÜVYZÂÎÛ'), + wiki_start_pages=[u'Ana_Sayfa']), + 'Vietnamese': Language(name='Vietnamese', + iso_code='vi', + use_ascii=False, + # Windows-1258 is the only common 8-bit + # Vietnamese encoding supported by Python. + # From Wikipedia: + # For systems that lack support for Unicode, + # dozens of 8-bit Vietnamese code pages are + # available.[1] The most common are VISCII + # (TCVN 5712:1993), VPS, and Windows-1258.[3] + # Where ASCII is required, such as when + # ensuring readability in plain text e-mail, + # Vietnamese letters are often encoded + # according to Vietnamese Quoted-Readable + # (VIQR) or VSCII Mnemonic (VSCII-MNEM),[4] + # though usage of either variable-width + # scheme has declined dramatically following + # the adoption of Unicode on the World Wide + # Web. + charsets=['WINDOWS-1258'], + alphabet=(u'aăâbcdđeêghiklmnoôơpqrstuưvxy' + u'AĂÂBCDĐEÊGHIKLMNOÔƠPQRSTUƯVXY'), + wiki_start_pages=[u'Chữ_Quốc_ngữ']), + } diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/sbcharsetprober.py b/venv/Lib/site-packages/pip/_vendor/chardet/sbcharsetprober.py similarity index 76% rename from env/lib/python2.7/site-packages/pip/_vendor/chardet/sbcharsetprober.py rename to venv/Lib/site-packages/pip/_vendor/chardet/sbcharsetprober.py index 0adb51de..46ba835c 100644 --- a/env/lib/python2.7/site-packages/pip/_vendor/chardet/sbcharsetprober.py +++ b/venv/Lib/site-packages/pip/_vendor/chardet/sbcharsetprober.py @@ -26,10 +26,22 @@ # 02110-1301 USA ######################### END LICENSE BLOCK ######################### +from collections import namedtuple + from .charsetprober import CharSetProber from .enums import CharacterCategory, ProbingState, SequenceLikelihood +SingleByteCharSetModel = namedtuple('SingleByteCharSetModel', + ['charset_name', + 'language', + 'char_to_order_map', + 'language_model', + 'typical_positive_ratio', + 'keep_ascii_letters', + 'alphabet']) + + class SingleByteCharSetProber(CharSetProber): SAMPLE_SIZE = 64 SB_ENOUGH_REL_THRESHOLD = 1024 # 0.25 * SAMPLE_SIZE^2 @@ -65,25 +77,25 @@ def charset_name(self): if self._name_prober: return self._name_prober.charset_name else: - return self._model['charset_name'] + return self._model.charset_name @property def language(self): if self._name_prober: return self._name_prober.language else: - return self._model.get('language') + return self._model.language def feed(self, byte_str): - if not self._model['keep_english_letter']: + # TODO: Make filter_international_words keep things in self.alphabet + if not self._model.keep_ascii_letters: byte_str = self.filter_international_words(byte_str) if not byte_str: return self.state - char_to_order_map = self._model['char_to_order_map'] - for i, c in enumerate(byte_str): - # XXX: Order is in range 1-64, so one would think we want 0-63 here, - # but that leads to 27 more test failures than before. - order = char_to_order_map[c] + char_to_order_map = self._model.char_to_order_map + language_model = self._model.language_model + for char in byte_str: + order = char_to_order_map.get(char, CharacterCategory.UNDEFINED) # XXX: This was SYMBOL_CAT_ORDER before, with a value of 250, but # CharacterCategory.SYMBOL is actually 253, so we use CONTROL # to make it closer to the original intent. The only difference @@ -91,20 +103,21 @@ def feed(self, byte_str): # _total_char purposes. if order < CharacterCategory.CONTROL: self._total_char += 1 + # TODO: Follow uchardet's lead and discount confidence for frequent + # control characters. + # See https://github.com/BYVoid/uchardet/commit/55b4f23971db61 if order < self.SAMPLE_SIZE: self._freq_char += 1 if self._last_order < self.SAMPLE_SIZE: self._total_seqs += 1 if not self._reversed: - i = (self._last_order * self.SAMPLE_SIZE) + order - model = self._model['precedence_matrix'][i] - else: # reverse the order of the letters in the lookup - i = (order * self.SAMPLE_SIZE) + self._last_order - model = self._model['precedence_matrix'][i] - self._seq_counters[model] += 1 + lm_cat = language_model[self._last_order][order] + else: + lm_cat = language_model[order][self._last_order] + self._seq_counters[lm_cat] += 1 self._last_order = order - charset_name = self._model['charset_name'] + charset_name = self._model.charset_name if self.state == ProbingState.DETECTING: if self._total_seqs > self.SB_ENOUGH_REL_THRESHOLD: confidence = self.get_confidence() @@ -125,7 +138,7 @@ def get_confidence(self): r = 0.01 if self._total_seqs > 0: r = ((1.0 * self._seq_counters[SequenceLikelihood.POSITIVE]) / - self._total_seqs / self._model['typical_positive_ratio']) + self._total_seqs / self._model.typical_positive_ratio) r = r * self._freq_char / self._total_char if r >= 1.0: r = 0.99 diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/sbcsgroupprober.py b/venv/Lib/site-packages/pip/_vendor/chardet/sbcsgroupprober.py new file mode 100644 index 00000000..bdeef4e1 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/chardet/sbcsgroupprober.py @@ -0,0 +1,83 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetgroupprober import CharSetGroupProber +from .hebrewprober import HebrewProber +from .langbulgarianmodel import (ISO_8859_5_BULGARIAN_MODEL, + WINDOWS_1251_BULGARIAN_MODEL) +from .langgreekmodel import ISO_8859_7_GREEK_MODEL, WINDOWS_1253_GREEK_MODEL +from .langhebrewmodel import WINDOWS_1255_HEBREW_MODEL +# from .langhungarianmodel import (ISO_8859_2_HUNGARIAN_MODEL, +# WINDOWS_1250_HUNGARIAN_MODEL) +from .langrussianmodel import (IBM855_RUSSIAN_MODEL, IBM866_RUSSIAN_MODEL, + ISO_8859_5_RUSSIAN_MODEL, KOI8_R_RUSSIAN_MODEL, + MACCYRILLIC_RUSSIAN_MODEL, + WINDOWS_1251_RUSSIAN_MODEL) +from .langthaimodel import TIS_620_THAI_MODEL +from .langturkishmodel import ISO_8859_9_TURKISH_MODEL +from .sbcharsetprober import SingleByteCharSetProber + + +class SBCSGroupProber(CharSetGroupProber): + def __init__(self): + super(SBCSGroupProber, self).__init__() + hebrew_prober = HebrewProber() + logical_hebrew_prober = SingleByteCharSetProber(WINDOWS_1255_HEBREW_MODEL, + False, hebrew_prober) + # TODO: See if using ISO-8859-8 Hebrew model works better here, since + # it's actually the visual one + visual_hebrew_prober = SingleByteCharSetProber(WINDOWS_1255_HEBREW_MODEL, + True, hebrew_prober) + hebrew_prober.set_model_probers(logical_hebrew_prober, + visual_hebrew_prober) + # TODO: ORDER MATTERS HERE. I changed the order vs what was in master + # and several tests failed that did not before. Some thought + # should be put into the ordering, and we should consider making + # order not matter here, because that is very counter-intuitive. + self.probers = [ + SingleByteCharSetProber(WINDOWS_1251_RUSSIAN_MODEL), + SingleByteCharSetProber(KOI8_R_RUSSIAN_MODEL), + SingleByteCharSetProber(ISO_8859_5_RUSSIAN_MODEL), + SingleByteCharSetProber(MACCYRILLIC_RUSSIAN_MODEL), + SingleByteCharSetProber(IBM866_RUSSIAN_MODEL), + SingleByteCharSetProber(IBM855_RUSSIAN_MODEL), + SingleByteCharSetProber(ISO_8859_7_GREEK_MODEL), + SingleByteCharSetProber(WINDOWS_1253_GREEK_MODEL), + SingleByteCharSetProber(ISO_8859_5_BULGARIAN_MODEL), + SingleByteCharSetProber(WINDOWS_1251_BULGARIAN_MODEL), + # TODO: Restore Hungarian encodings (iso-8859-2 and windows-1250) + # after we retrain model. + # SingleByteCharSetProber(ISO_8859_2_HUNGARIAN_MODEL), + # SingleByteCharSetProber(WINDOWS_1250_HUNGARIAN_MODEL), + SingleByteCharSetProber(TIS_620_THAI_MODEL), + SingleByteCharSetProber(ISO_8859_9_TURKISH_MODEL), + hebrew_prober, + logical_hebrew_prober, + visual_hebrew_prober, + ] + self.reset() diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/sjisprober.py b/venv/Lib/site-packages/pip/_vendor/chardet/sjisprober.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/chardet/sjisprober.py rename to venv/Lib/site-packages/pip/_vendor/chardet/sjisprober.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/universaldetector.py b/venv/Lib/site-packages/pip/_vendor/chardet/universaldetector.py similarity index 97% rename from env/lib/python2.7/site-packages/pip/_vendor/chardet/universaldetector.py rename to venv/Lib/site-packages/pip/_vendor/chardet/universaldetector.py index 7b4e92d6..055a8ac1 100644 --- a/env/lib/python2.7/site-packages/pip/_vendor/chardet/universaldetector.py +++ b/venv/Lib/site-packages/pip/_vendor/chardet/universaldetector.py @@ -266,7 +266,7 @@ def close(self): 'language': max_prober.language} # Log all prober confidences if none met MINIMUM_THRESHOLD - if self.logger.getEffectiveLevel() == logging.DEBUG: + if self.logger.getEffectiveLevel() <= logging.DEBUG: if self.result['encoding'] is None: self.logger.debug('no probers hit minimum threshold') for group_prober in self._charset_probers: @@ -280,7 +280,7 @@ def close(self): prober.get_confidence()) else: self.logger.debug('%s %s confidence = %s', - prober.charset_name, - prober.language, - prober.get_confidence()) + group_prober.charset_name, + group_prober.language, + group_prober.get_confidence()) return self.result diff --git a/env/lib/python2.7/site-packages/pip/_vendor/chardet/utf8prober.py b/venv/Lib/site-packages/pip/_vendor/chardet/utf8prober.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/chardet/utf8prober.py rename to venv/Lib/site-packages/pip/_vendor/chardet/utf8prober.py diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/version.py b/venv/Lib/site-packages/pip/_vendor/chardet/version.py new file mode 100644 index 00000000..70369b9d --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/chardet/version.py @@ -0,0 +1,9 @@ +""" +This module exists only to simplify retrieving the version number of chardet +from within setup.py and from chardet subpackages. + +:author: Dan Blanchard (dan.blanchard@gmail.com) +""" + +__version__ = "4.0.0" +VERSION = __version__.split('.') diff --git a/venv/Lib/site-packages/pip/_vendor/colorama/__init__.py b/venv/Lib/site-packages/pip/_vendor/colorama/__init__.py new file mode 100644 index 00000000..b149ed79 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/colorama/__init__.py @@ -0,0 +1,6 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +from .initialise import init, deinit, reinit, colorama_text +from .ansi import Fore, Back, Style, Cursor +from .ansitowin32 import AnsiToWin32 + +__version__ = '0.4.4' diff --git a/venv/Lib/site-packages/pip/_vendor/colorama/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/colorama/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..70567e24 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/colorama/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/colorama/__pycache__/ansi.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/colorama/__pycache__/ansi.cpython-36.pyc new file mode 100644 index 00000000..91f846a3 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/colorama/__pycache__/ansi.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/colorama/__pycache__/ansitowin32.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/colorama/__pycache__/ansitowin32.cpython-36.pyc new file mode 100644 index 00000000..df65264a Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/colorama/__pycache__/ansitowin32.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/colorama/__pycache__/initialise.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/colorama/__pycache__/initialise.cpython-36.pyc new file mode 100644 index 00000000..c6c604c9 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/colorama/__pycache__/initialise.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/colorama/__pycache__/win32.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/colorama/__pycache__/win32.cpython-36.pyc new file mode 100644 index 00000000..890c0d99 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/colorama/__pycache__/win32.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/colorama/__pycache__/winterm.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/colorama/__pycache__/winterm.cpython-36.pyc new file mode 100644 index 00000000..9e6ab82a Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/colorama/__pycache__/winterm.cpython-36.pyc differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.py b/venv/Lib/site-packages/pip/_vendor/colorama/ansi.py similarity index 99% rename from env/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.py rename to venv/Lib/site-packages/pip/_vendor/colorama/ansi.py index 78776588..11ec695f 100644 --- a/env/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.py +++ b/venv/Lib/site-packages/pip/_vendor/colorama/ansi.py @@ -6,7 +6,7 @@ CSI = '\033[' OSC = '\033]' -BEL = '\007' +BEL = '\a' def code_to_chars(code): diff --git a/env/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.py b/venv/Lib/site-packages/pip/_vendor/colorama/ansitowin32.py similarity index 94% rename from env/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.py rename to venv/Lib/site-packages/pip/_vendor/colorama/ansitowin32.py index 359c92be..6039a054 100644 --- a/env/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.py +++ b/venv/Lib/site-packages/pip/_vendor/colorama/ansitowin32.py @@ -3,7 +3,7 @@ import sys import os -from .ansi import AnsiFore, AnsiBack, AnsiStyle, Style +from .ansi import AnsiFore, AnsiBack, AnsiStyle, Style, BEL from .winterm import WinTerm, WinColor, WinStyle from .win32 import windll, winapi_test @@ -68,7 +68,7 @@ class AnsiToWin32(object): win32 function calls. ''' ANSI_CSI_RE = re.compile('\001?\033\\[((?:\\d|;)*)([a-zA-Z])\002?') # Control Sequence Introducer - ANSI_OSC_RE = re.compile('\001?\033\\]((?:.|;)*?)(\x07)\002?') # Operating System Command + ANSI_OSC_RE = re.compile('\001?\033\\]([^\a]*)(\a)\002?') # Operating System Command def __init__(self, wrapped, convert=None, strip=None, autoreset=False): # The wrapped stream (normally sys.stdout or sys.stderr) @@ -247,11 +247,12 @@ def convert_osc(self, text): start, end = match.span() text = text[:start] + text[end:] paramstring, command = match.groups() - if command in '\x07': # \x07 = BEL - params = paramstring.split(";") - # 0 - change title and icon (we will only change title) - # 1 - change icon (we don't support this) - # 2 - change title - if params[0] in '02': - winterm.set_title(params[1]) + if command == BEL: + if paramstring.count(";") == 1: + params = paramstring.split(";") + # 0 - change title and icon (we will only change title) + # 1 - change icon (we don't support this) + # 2 - change title + if params[0] in '02': + winterm.set_title(params[1]) return text diff --git a/env/lib/python2.7/site-packages/pip/_vendor/colorama/initialise.py b/venv/Lib/site-packages/pip/_vendor/colorama/initialise.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/colorama/initialise.py rename to venv/Lib/site-packages/pip/_vendor/colorama/initialise.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/colorama/win32.py b/venv/Lib/site-packages/pip/_vendor/colorama/win32.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/colorama/win32.py rename to venv/Lib/site-packages/pip/_vendor/colorama/win32.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.py b/venv/Lib/site-packages/pip/_vendor/colorama/winterm.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.py rename to venv/Lib/site-packages/pip/_vendor/colorama/winterm.py diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/__init__.py b/venv/Lib/site-packages/pip/_vendor/distlib/__init__.py new file mode 100644 index 00000000..63d916e3 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/distlib/__init__.py @@ -0,0 +1,23 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2019 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +import logging + +__version__ = '0.3.1' + +class DistlibException(Exception): + pass + +try: + from logging import NullHandler +except ImportError: # pragma: no cover + class NullHandler(logging.Handler): + def handle(self, record): pass + def emit(self, record): pass + def createLock(self): self.lock = None + +logger = logging.getLogger(__name__) +logger.addHandler(NullHandler()) diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..f98b68f7 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/compat.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/compat.cpython-36.pyc new file mode 100644 index 00000000..ba055430 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/compat.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/database.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/database.cpython-36.pyc new file mode 100644 index 00000000..73ed7c2c Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/database.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/index.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/index.cpython-36.pyc new file mode 100644 index 00000000..36d45509 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/index.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/locators.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/locators.cpython-36.pyc new file mode 100644 index 00000000..ee0e6496 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/locators.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/manifest.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/manifest.cpython-36.pyc new file mode 100644 index 00000000..f5391399 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/manifest.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/markers.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/markers.cpython-36.pyc new file mode 100644 index 00000000..5ba0a0d5 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/markers.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/metadata.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/metadata.cpython-36.pyc new file mode 100644 index 00000000..18426e29 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/metadata.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/resources.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/resources.cpython-36.pyc new file mode 100644 index 00000000..bc259a0f Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/resources.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/scripts.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/scripts.cpython-36.pyc new file mode 100644 index 00000000..52ba4bd6 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/scripts.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/util.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/util.cpython-36.pyc new file mode 100644 index 00000000..e489c139 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/util.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/version.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/version.cpython-36.pyc new file mode 100644 index 00000000..fb927a0b Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/version.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/wheel.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/wheel.cpython-36.pyc new file mode 100644 index 00000000..87ec09a0 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/wheel.cpython-36.pyc differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/__init__.py b/venv/Lib/site-packages/pip/_vendor/distlib/_backport/__init__.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/__init__.py rename to venv/Lib/site-packages/pip/_vendor/distlib/_backport/__init__.py diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/_backport/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/distlib/_backport/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..472e0a8b Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/distlib/_backport/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/_backport/__pycache__/misc.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/distlib/_backport/__pycache__/misc.cpython-36.pyc new file mode 100644 index 00000000..a2b893a4 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/distlib/_backport/__pycache__/misc.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/_backport/__pycache__/shutil.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/distlib/_backport/__pycache__/shutil.cpython-36.pyc new file mode 100644 index 00000000..01cc0ce1 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/distlib/_backport/__pycache__/shutil.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/_backport/__pycache__/sysconfig.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/distlib/_backport/__pycache__/sysconfig.cpython-36.pyc new file mode 100644 index 00000000..d7ab7c67 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/distlib/_backport/__pycache__/sysconfig.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/_backport/__pycache__/tarfile.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/distlib/_backport/__pycache__/tarfile.cpython-36.pyc new file mode 100644 index 00000000..95692482 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/distlib/_backport/__pycache__/tarfile.cpython-36.pyc differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/misc.py b/venv/Lib/site-packages/pip/_vendor/distlib/_backport/misc.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/misc.py rename to venv/Lib/site-packages/pip/_vendor/distlib/_backport/misc.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.py b/venv/Lib/site-packages/pip/_vendor/distlib/_backport/shutil.py similarity index 99% rename from env/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.py rename to venv/Lib/site-packages/pip/_vendor/distlib/_backport/shutil.py index 159e49ee..10ed3625 100644 --- a/env/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.py +++ b/venv/Lib/site-packages/pip/_vendor/distlib/_backport/shutil.py @@ -14,7 +14,10 @@ import stat from os.path import abspath import fnmatch -import collections +try: + from collections.abc import Callable +except ImportError: + from collections import Callable import errno from . import tarfile @@ -528,7 +531,7 @@ def register_archive_format(name, function, extra_args=None, description=''): """ if extra_args is None: extra_args = [] - if not isinstance(function, collections.Callable): + if not isinstance(function, Callable): raise TypeError('The %s object is not callable' % function) if not isinstance(extra_args, (tuple, list)): raise TypeError('extra_args needs to be a sequence') @@ -621,7 +624,7 @@ def _check_unpack_options(extensions, function, extra_args): raise RegistryError(msg % (extension, existing_extensions[extension])) - if not isinstance(function, collections.Callable): + if not isinstance(function, Callable): raise TypeError('The registered function must be a callable') diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.cfg b/venv/Lib/site-packages/pip/_vendor/distlib/_backport/sysconfig.cfg similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.cfg rename to venv/Lib/site-packages/pip/_vendor/distlib/_backport/sysconfig.cfg diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/_backport/sysconfig.py b/venv/Lib/site-packages/pip/_vendor/distlib/_backport/sysconfig.py new file mode 100644 index 00000000..b470a373 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/distlib/_backport/sysconfig.py @@ -0,0 +1,786 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +"""Access to Python's configuration information.""" + +import codecs +import os +import re +import sys +from os.path import pardir, realpath +try: + import configparser +except ImportError: + import ConfigParser as configparser + + +__all__ = [ + 'get_config_h_filename', + 'get_config_var', + 'get_config_vars', + 'get_makefile_filename', + 'get_path', + 'get_path_names', + 'get_paths', + 'get_platform', + 'get_python_version', + 'get_scheme_names', + 'parse_config_h', +] + + +def _safe_realpath(path): + try: + return realpath(path) + except OSError: + return path + + +if sys.executable: + _PROJECT_BASE = os.path.dirname(_safe_realpath(sys.executable)) +else: + # sys.executable can be empty if argv[0] has been changed and Python is + # unable to retrieve the real program name + _PROJECT_BASE = _safe_realpath(os.getcwd()) + +if os.name == "nt" and "pcbuild" in _PROJECT_BASE[-8:].lower(): + _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir)) +# PC/VS7.1 +if os.name == "nt" and "\\pc\\v" in _PROJECT_BASE[-10:].lower(): + _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir)) +# PC/AMD64 +if os.name == "nt" and "\\pcbuild\\amd64" in _PROJECT_BASE[-14:].lower(): + _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir)) + + +def is_python_build(): + for fn in ("Setup.dist", "Setup.local"): + if os.path.isfile(os.path.join(_PROJECT_BASE, "Modules", fn)): + return True + return False + +_PYTHON_BUILD = is_python_build() + +_cfg_read = False + +def _ensure_cfg_read(): + global _cfg_read + if not _cfg_read: + from ..resources import finder + backport_package = __name__.rsplit('.', 1)[0] + _finder = finder(backport_package) + _cfgfile = _finder.find('sysconfig.cfg') + assert _cfgfile, 'sysconfig.cfg exists' + with _cfgfile.as_stream() as s: + _SCHEMES.readfp(s) + if _PYTHON_BUILD: + for scheme in ('posix_prefix', 'posix_home'): + _SCHEMES.set(scheme, 'include', '{srcdir}/Include') + _SCHEMES.set(scheme, 'platinclude', '{projectbase}/.') + + _cfg_read = True + + +_SCHEMES = configparser.RawConfigParser() +_VAR_REPL = re.compile(r'\{([^{]*?)\}') + +def _expand_globals(config): + _ensure_cfg_read() + if config.has_section('globals'): + globals = config.items('globals') + else: + globals = tuple() + + sections = config.sections() + for section in sections: + if section == 'globals': + continue + for option, value in globals: + if config.has_option(section, option): + continue + config.set(section, option, value) + config.remove_section('globals') + + # now expanding local variables defined in the cfg file + # + for section in config.sections(): + variables = dict(config.items(section)) + + def _replacer(matchobj): + name = matchobj.group(1) + if name in variables: + return variables[name] + return matchobj.group(0) + + for option, value in config.items(section): + config.set(section, option, _VAR_REPL.sub(_replacer, value)) + +#_expand_globals(_SCHEMES) + +_PY_VERSION = '%s.%s.%s' % sys.version_info[:3] +_PY_VERSION_SHORT = '%s.%s' % sys.version_info[:2] +_PY_VERSION_SHORT_NO_DOT = '%s%s' % sys.version_info[:2] +_PREFIX = os.path.normpath(sys.prefix) +_EXEC_PREFIX = os.path.normpath(sys.exec_prefix) +_CONFIG_VARS = None +_USER_BASE = None + + +def _subst_vars(path, local_vars): + """In the string `path`, replace tokens like {some.thing} with the + corresponding value from the map `local_vars`. + + If there is no corresponding value, leave the token unchanged. + """ + def _replacer(matchobj): + name = matchobj.group(1) + if name in local_vars: + return local_vars[name] + elif name in os.environ: + return os.environ[name] + return matchobj.group(0) + return _VAR_REPL.sub(_replacer, path) + + +def _extend_dict(target_dict, other_dict): + target_keys = target_dict.keys() + for key, value in other_dict.items(): + if key in target_keys: + continue + target_dict[key] = value + + +def _expand_vars(scheme, vars): + res = {} + if vars is None: + vars = {} + _extend_dict(vars, get_config_vars()) + + for key, value in _SCHEMES.items(scheme): + if os.name in ('posix', 'nt'): + value = os.path.expanduser(value) + res[key] = os.path.normpath(_subst_vars(value, vars)) + return res + + +def format_value(value, vars): + def _replacer(matchobj): + name = matchobj.group(1) + if name in vars: + return vars[name] + return matchobj.group(0) + return _VAR_REPL.sub(_replacer, value) + + +def _get_default_scheme(): + if os.name == 'posix': + # the default scheme for posix is posix_prefix + return 'posix_prefix' + return os.name + + +def _getuserbase(): + env_base = os.environ.get("PYTHONUSERBASE", None) + + def joinuser(*args): + return os.path.expanduser(os.path.join(*args)) + + # what about 'os2emx', 'riscos' ? + if os.name == "nt": + base = os.environ.get("APPDATA") or "~" + if env_base: + return env_base + else: + return joinuser(base, "Python") + + if sys.platform == "darwin": + framework = get_config_var("PYTHONFRAMEWORK") + if framework: + if env_base: + return env_base + else: + return joinuser("~", "Library", framework, "%d.%d" % + sys.version_info[:2]) + + if env_base: + return env_base + else: + return joinuser("~", ".local") + + +def _parse_makefile(filename, vars=None): + """Parse a Makefile-style file. + + A dictionary containing name/value pairs is returned. If an + optional dictionary is passed in as the second argument, it is + used instead of a new dictionary. + """ + # Regexes needed for parsing Makefile (and similar syntaxes, + # like old-style Setup files). + _variable_rx = re.compile(r"([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)") + _findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)") + _findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}") + + if vars is None: + vars = {} + done = {} + notdone = {} + + with codecs.open(filename, encoding='utf-8', errors="surrogateescape") as f: + lines = f.readlines() + + for line in lines: + if line.startswith('#') or line.strip() == '': + continue + m = _variable_rx.match(line) + if m: + n, v = m.group(1, 2) + v = v.strip() + # `$$' is a literal `$' in make + tmpv = v.replace('$$', '') + + if "$" in tmpv: + notdone[n] = v + else: + try: + v = int(v) + except ValueError: + # insert literal `$' + done[n] = v.replace('$$', '$') + else: + done[n] = v + + # do variable interpolation here + variables = list(notdone.keys()) + + # Variables with a 'PY_' prefix in the makefile. These need to + # be made available without that prefix through sysconfig. + # Special care is needed to ensure that variable expansion works, even + # if the expansion uses the name without a prefix. + renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS') + + while len(variables) > 0: + for name in tuple(variables): + value = notdone[name] + m = _findvar1_rx.search(value) or _findvar2_rx.search(value) + if m is not None: + n = m.group(1) + found = True + if n in done: + item = str(done[n]) + elif n in notdone: + # get it on a subsequent round + found = False + elif n in os.environ: + # do it like make: fall back to environment + item = os.environ[n] + + elif n in renamed_variables: + if (name.startswith('PY_') and + name[3:] in renamed_variables): + item = "" + + elif 'PY_' + n in notdone: + found = False + + else: + item = str(done['PY_' + n]) + + else: + done[n] = item = "" + + if found: + after = value[m.end():] + value = value[:m.start()] + item + after + if "$" in after: + notdone[name] = value + else: + try: + value = int(value) + except ValueError: + done[name] = value.strip() + else: + done[name] = value + variables.remove(name) + + if (name.startswith('PY_') and + name[3:] in renamed_variables): + + name = name[3:] + if name not in done: + done[name] = value + + else: + # bogus variable reference (e.g. "prefix=$/opt/python"); + # just drop it since we can't deal + done[name] = value + variables.remove(name) + + # strip spurious spaces + for k, v in done.items(): + if isinstance(v, str): + done[k] = v.strip() + + # save the results in the global dictionary + vars.update(done) + return vars + + +def get_makefile_filename(): + """Return the path of the Makefile.""" + if _PYTHON_BUILD: + return os.path.join(_PROJECT_BASE, "Makefile") + if hasattr(sys, 'abiflags'): + config_dir_name = 'config-%s%s' % (_PY_VERSION_SHORT, sys.abiflags) + else: + config_dir_name = 'config' + return os.path.join(get_path('stdlib'), config_dir_name, 'Makefile') + + +def _init_posix(vars): + """Initialize the module as appropriate for POSIX systems.""" + # load the installed Makefile: + makefile = get_makefile_filename() + try: + _parse_makefile(makefile, vars) + except IOError as e: + msg = "invalid Python installation: unable to open %s" % makefile + if hasattr(e, "strerror"): + msg = msg + " (%s)" % e.strerror + raise IOError(msg) + # load the installed pyconfig.h: + config_h = get_config_h_filename() + try: + with open(config_h) as f: + parse_config_h(f, vars) + except IOError as e: + msg = "invalid Python installation: unable to open %s" % config_h + if hasattr(e, "strerror"): + msg = msg + " (%s)" % e.strerror + raise IOError(msg) + # On AIX, there are wrong paths to the linker scripts in the Makefile + # -- these paths are relative to the Python source, but when installed + # the scripts are in another directory. + if _PYTHON_BUILD: + vars['LDSHARED'] = vars['BLDSHARED'] + + +def _init_non_posix(vars): + """Initialize the module as appropriate for NT""" + # set basic install directories + vars['LIBDEST'] = get_path('stdlib') + vars['BINLIBDEST'] = get_path('platstdlib') + vars['INCLUDEPY'] = get_path('include') + vars['SO'] = '.pyd' + vars['EXE'] = '.exe' + vars['VERSION'] = _PY_VERSION_SHORT_NO_DOT + vars['BINDIR'] = os.path.dirname(_safe_realpath(sys.executable)) + +# +# public APIs +# + + +def parse_config_h(fp, vars=None): + """Parse a config.h-style file. + + A dictionary containing name/value pairs is returned. If an + optional dictionary is passed in as the second argument, it is + used instead of a new dictionary. + """ + if vars is None: + vars = {} + define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n") + undef_rx = re.compile("/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\n") + + while True: + line = fp.readline() + if not line: + break + m = define_rx.match(line) + if m: + n, v = m.group(1, 2) + try: + v = int(v) + except ValueError: + pass + vars[n] = v + else: + m = undef_rx.match(line) + if m: + vars[m.group(1)] = 0 + return vars + + +def get_config_h_filename(): + """Return the path of pyconfig.h.""" + if _PYTHON_BUILD: + if os.name == "nt": + inc_dir = os.path.join(_PROJECT_BASE, "PC") + else: + inc_dir = _PROJECT_BASE + else: + inc_dir = get_path('platinclude') + return os.path.join(inc_dir, 'pyconfig.h') + + +def get_scheme_names(): + """Return a tuple containing the schemes names.""" + return tuple(sorted(_SCHEMES.sections())) + + +def get_path_names(): + """Return a tuple containing the paths names.""" + # xxx see if we want a static list + return _SCHEMES.options('posix_prefix') + + +def get_paths(scheme=_get_default_scheme(), vars=None, expand=True): + """Return a mapping containing an install scheme. + + ``scheme`` is the install scheme name. If not provided, it will + return the default scheme for the current platform. + """ + _ensure_cfg_read() + if expand: + return _expand_vars(scheme, vars) + else: + return dict(_SCHEMES.items(scheme)) + + +def get_path(name, scheme=_get_default_scheme(), vars=None, expand=True): + """Return a path corresponding to the scheme. + + ``scheme`` is the install scheme name. + """ + return get_paths(scheme, vars, expand)[name] + + +def get_config_vars(*args): + """With no arguments, return a dictionary of all configuration + variables relevant for the current platform. + + On Unix, this means every variable defined in Python's installed Makefile; + On Windows and Mac OS it's a much smaller set. + + With arguments, return a list of values that result from looking up + each argument in the configuration variable dictionary. + """ + global _CONFIG_VARS + if _CONFIG_VARS is None: + _CONFIG_VARS = {} + # Normalized versions of prefix and exec_prefix are handy to have; + # in fact, these are the standard versions used most places in the + # distutils2 module. + _CONFIG_VARS['prefix'] = _PREFIX + _CONFIG_VARS['exec_prefix'] = _EXEC_PREFIX + _CONFIG_VARS['py_version'] = _PY_VERSION + _CONFIG_VARS['py_version_short'] = _PY_VERSION_SHORT + _CONFIG_VARS['py_version_nodot'] = _PY_VERSION[0] + _PY_VERSION[2] + _CONFIG_VARS['base'] = _PREFIX + _CONFIG_VARS['platbase'] = _EXEC_PREFIX + _CONFIG_VARS['projectbase'] = _PROJECT_BASE + try: + _CONFIG_VARS['abiflags'] = sys.abiflags + except AttributeError: + # sys.abiflags may not be defined on all platforms. + _CONFIG_VARS['abiflags'] = '' + + if os.name in ('nt', 'os2'): + _init_non_posix(_CONFIG_VARS) + if os.name == 'posix': + _init_posix(_CONFIG_VARS) + # Setting 'userbase' is done below the call to the + # init function to enable using 'get_config_var' in + # the init-function. + if sys.version >= '2.6': + _CONFIG_VARS['userbase'] = _getuserbase() + + if 'srcdir' not in _CONFIG_VARS: + _CONFIG_VARS['srcdir'] = _PROJECT_BASE + else: + _CONFIG_VARS['srcdir'] = _safe_realpath(_CONFIG_VARS['srcdir']) + + # Convert srcdir into an absolute path if it appears necessary. + # Normally it is relative to the build directory. However, during + # testing, for example, we might be running a non-installed python + # from a different directory. + if _PYTHON_BUILD and os.name == "posix": + base = _PROJECT_BASE + try: + cwd = os.getcwd() + except OSError: + cwd = None + if (not os.path.isabs(_CONFIG_VARS['srcdir']) and + base != cwd): + # srcdir is relative and we are not in the same directory + # as the executable. Assume executable is in the build + # directory and make srcdir absolute. + srcdir = os.path.join(base, _CONFIG_VARS['srcdir']) + _CONFIG_VARS['srcdir'] = os.path.normpath(srcdir) + + if sys.platform == 'darwin': + kernel_version = os.uname()[2] # Kernel version (8.4.3) + major_version = int(kernel_version.split('.')[0]) + + if major_version < 8: + # On Mac OS X before 10.4, check if -arch and -isysroot + # are in CFLAGS or LDFLAGS and remove them if they are. + # This is needed when building extensions on a 10.3 system + # using a universal build of python. + for key in ('LDFLAGS', 'BASECFLAGS', + # a number of derived variables. These need to be + # patched up as well. + 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): + flags = _CONFIG_VARS[key] + flags = re.sub(r'-arch\s+\w+\s', ' ', flags) + flags = re.sub('-isysroot [^ \t]*', ' ', flags) + _CONFIG_VARS[key] = flags + else: + # Allow the user to override the architecture flags using + # an environment variable. + # NOTE: This name was introduced by Apple in OSX 10.5 and + # is used by several scripting languages distributed with + # that OS release. + if 'ARCHFLAGS' in os.environ: + arch = os.environ['ARCHFLAGS'] + for key in ('LDFLAGS', 'BASECFLAGS', + # a number of derived variables. These need to be + # patched up as well. + 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): + + flags = _CONFIG_VARS[key] + flags = re.sub(r'-arch\s+\w+\s', ' ', flags) + flags = flags + ' ' + arch + _CONFIG_VARS[key] = flags + + # If we're on OSX 10.5 or later and the user tries to + # compiles an extension using an SDK that is not present + # on the current machine it is better to not use an SDK + # than to fail. + # + # The major usecase for this is users using a Python.org + # binary installer on OSX 10.6: that installer uses + # the 10.4u SDK, but that SDK is not installed by default + # when you install Xcode. + # + CFLAGS = _CONFIG_VARS.get('CFLAGS', '') + m = re.search(r'-isysroot\s+(\S+)', CFLAGS) + if m is not None: + sdk = m.group(1) + if not os.path.exists(sdk): + for key in ('LDFLAGS', 'BASECFLAGS', + # a number of derived variables. These need to be + # patched up as well. + 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): + + flags = _CONFIG_VARS[key] + flags = re.sub(r'-isysroot\s+\S+(\s|$)', ' ', flags) + _CONFIG_VARS[key] = flags + + if args: + vals = [] + for name in args: + vals.append(_CONFIG_VARS.get(name)) + return vals + else: + return _CONFIG_VARS + + +def get_config_var(name): + """Return the value of a single variable using the dictionary returned by + 'get_config_vars()'. + + Equivalent to get_config_vars().get(name) + """ + return get_config_vars().get(name) + + +def get_platform(): + """Return a string that identifies the current platform. + + This is used mainly to distinguish platform-specific build directories and + platform-specific built distributions. Typically includes the OS name + and version and the architecture (as supplied by 'os.uname()'), + although the exact information included depends on the OS; eg. for IRIX + the architecture isn't particularly important (IRIX only runs on SGI + hardware), but for Linux the kernel version isn't particularly + important. + + Examples of returned values: + linux-i586 + linux-alpha (?) + solaris-2.6-sun4u + irix-5.3 + irix64-6.2 + + Windows will return one of: + win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc) + win-ia64 (64bit Windows on Itanium) + win32 (all others - specifically, sys.platform is returned) + + For other non-POSIX platforms, currently just returns 'sys.platform'. + """ + if os.name == 'nt': + # sniff sys.version for architecture. + prefix = " bit (" + i = sys.version.find(prefix) + if i == -1: + return sys.platform + j = sys.version.find(")", i) + look = sys.version[i+len(prefix):j].lower() + if look == 'amd64': + return 'win-amd64' + if look == 'itanium': + return 'win-ia64' + return sys.platform + + if os.name != "posix" or not hasattr(os, 'uname'): + # XXX what about the architecture? NT is Intel or Alpha, + # Mac OS is M68k or PPC, etc. + return sys.platform + + # Try to distinguish various flavours of Unix + osname, host, release, version, machine = os.uname() + + # Convert the OS name to lowercase, remove '/' characters + # (to accommodate BSD/OS), and translate spaces (for "Power Macintosh") + osname = osname.lower().replace('/', '') + machine = machine.replace(' ', '_') + machine = machine.replace('/', '-') + + if osname[:5] == "linux": + # At least on Linux/Intel, 'machine' is the processor -- + # i386, etc. + # XXX what about Alpha, SPARC, etc? + return "%s-%s" % (osname, machine) + elif osname[:5] == "sunos": + if release[0] >= "5": # SunOS 5 == Solaris 2 + osname = "solaris" + release = "%d.%s" % (int(release[0]) - 3, release[2:]) + # fall through to standard osname-release-machine representation + elif osname[:4] == "irix": # could be "irix64"! + return "%s-%s" % (osname, release) + elif osname[:3] == "aix": + return "%s-%s.%s" % (osname, version, release) + elif osname[:6] == "cygwin": + osname = "cygwin" + rel_re = re.compile(r'[\d.]+') + m = rel_re.match(release) + if m: + release = m.group() + elif osname[:6] == "darwin": + # + # For our purposes, we'll assume that the system version from + # distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set + # to. This makes the compatibility story a bit more sane because the + # machine is going to compile and link as if it were + # MACOSX_DEPLOYMENT_TARGET. + cfgvars = get_config_vars() + macver = cfgvars.get('MACOSX_DEPLOYMENT_TARGET') + + if True: + # Always calculate the release of the running machine, + # needed to determine if we can build fat binaries or not. + + macrelease = macver + # Get the system version. Reading this plist is a documented + # way to get the system version (see the documentation for + # the Gestalt Manager) + try: + f = open('/System/Library/CoreServices/SystemVersion.plist') + except IOError: + # We're on a plain darwin box, fall back to the default + # behaviour. + pass + else: + try: + m = re.search(r'ProductUserVisibleVersion\s*' + r'(.*?)', f.read()) + finally: + f.close() + if m is not None: + macrelease = '.'.join(m.group(1).split('.')[:2]) + # else: fall back to the default behaviour + + if not macver: + macver = macrelease + + if macver: + release = macver + osname = "macosx" + + if ((macrelease + '.') >= '10.4.' and + '-arch' in get_config_vars().get('CFLAGS', '').strip()): + # The universal build will build fat binaries, but not on + # systems before 10.4 + # + # Try to detect 4-way universal builds, those have machine-type + # 'universal' instead of 'fat'. + + machine = 'fat' + cflags = get_config_vars().get('CFLAGS') + + archs = re.findall(r'-arch\s+(\S+)', cflags) + archs = tuple(sorted(set(archs))) + + if len(archs) == 1: + machine = archs[0] + elif archs == ('i386', 'ppc'): + machine = 'fat' + elif archs == ('i386', 'x86_64'): + machine = 'intel' + elif archs == ('i386', 'ppc', 'x86_64'): + machine = 'fat3' + elif archs == ('ppc64', 'x86_64'): + machine = 'fat64' + elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'): + machine = 'universal' + else: + raise ValueError( + "Don't know machine value for archs=%r" % (archs,)) + + elif machine == 'i386': + # On OSX the machine type returned by uname is always the + # 32-bit variant, even if the executable architecture is + # the 64-bit variant + if sys.maxsize >= 2**32: + machine = 'x86_64' + + elif machine in ('PowerPC', 'Power_Macintosh'): + # Pick a sane name for the PPC architecture. + # See 'i386' case + if sys.maxsize >= 2**32: + machine = 'ppc64' + else: + machine = 'ppc' + + return "%s-%s-%s" % (osname, release, machine) + + +def get_python_version(): + return _PY_VERSION_SHORT + + +def _print_dict(title, data): + for index, (key, value) in enumerate(sorted(data.items())): + if index == 0: + print('%s: ' % (title)) + print('\t%s = "%s"' % (key, value)) + + +def _main(): + """Display all information sysconfig detains.""" + print('Platform: "%s"' % get_platform()) + print('Python version: "%s"' % get_python_version()) + print('Current installation scheme: "%s"' % _get_default_scheme()) + print() + _print_dict('Paths', get_paths()) + print() + _print_dict('Variables', get_config_vars()) + + +if __name__ == '__main__': + _main() diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.py b/venv/Lib/site-packages/pip/_vendor/distlib/_backport/tarfile.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.py rename to venv/Lib/site-packages/pip/_vendor/distlib/_backport/tarfile.py diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/compat.py b/venv/Lib/site-packages/pip/_vendor/distlib/compat.py new file mode 100644 index 00000000..c316fd97 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/distlib/compat.py @@ -0,0 +1,1120 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2017 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +from __future__ import absolute_import + +import os +import re +import sys + +try: + import ssl +except ImportError: # pragma: no cover + ssl = None + +if sys.version_info[0] < 3: # pragma: no cover + from StringIO import StringIO + string_types = basestring, + text_type = unicode + from types import FileType as file_type + import __builtin__ as builtins + import ConfigParser as configparser + from ._backport import shutil + from urlparse import urlparse, urlunparse, urljoin, urlsplit, urlunsplit + from urllib import (urlretrieve, quote as _quote, unquote, url2pathname, + pathname2url, ContentTooShortError, splittype) + + def quote(s): + if isinstance(s, unicode): + s = s.encode('utf-8') + return _quote(s) + + import urllib2 + from urllib2 import (Request, urlopen, URLError, HTTPError, + HTTPBasicAuthHandler, HTTPPasswordMgr, + HTTPHandler, HTTPRedirectHandler, + build_opener) + if ssl: + from urllib2 import HTTPSHandler + import httplib + import xmlrpclib + import Queue as queue + from HTMLParser import HTMLParser + import htmlentitydefs + raw_input = raw_input + from itertools import ifilter as filter + from itertools import ifilterfalse as filterfalse + + _userprog = None + def splituser(host): + """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'.""" + global _userprog + if _userprog is None: + import re + _userprog = re.compile('^(.*)@(.*)$') + + match = _userprog.match(host) + if match: return match.group(1, 2) + return None, host + +else: # pragma: no cover + from io import StringIO + string_types = str, + text_type = str + from io import TextIOWrapper as file_type + import builtins + import configparser + import shutil + from urllib.parse import (urlparse, urlunparse, urljoin, splituser, quote, + unquote, urlsplit, urlunsplit, splittype) + from urllib.request import (urlopen, urlretrieve, Request, url2pathname, + pathname2url, + HTTPBasicAuthHandler, HTTPPasswordMgr, + HTTPHandler, HTTPRedirectHandler, + build_opener) + if ssl: + from urllib.request import HTTPSHandler + from urllib.error import HTTPError, URLError, ContentTooShortError + import http.client as httplib + import urllib.request as urllib2 + import xmlrpc.client as xmlrpclib + import queue + from html.parser import HTMLParser + import html.entities as htmlentitydefs + raw_input = input + from itertools import filterfalse + filter = filter + +try: + from ssl import match_hostname, CertificateError +except ImportError: # pragma: no cover + class CertificateError(ValueError): + pass + + + def _dnsname_match(dn, hostname, max_wildcards=1): + """Matching according to RFC 6125, section 6.4.3 + + http://tools.ietf.org/html/rfc6125#section-6.4.3 + """ + pats = [] + if not dn: + return False + + parts = dn.split('.') + leftmost, remainder = parts[0], parts[1:] + + wildcards = leftmost.count('*') + if wildcards > max_wildcards: + # Issue #17980: avoid denials of service by refusing more + # than one wildcard per fragment. A survey of established + # policy among SSL implementations showed it to be a + # reasonable choice. + raise CertificateError( + "too many wildcards in certificate DNS name: " + repr(dn)) + + # speed up common case w/o wildcards + if not wildcards: + return dn.lower() == hostname.lower() + + # RFC 6125, section 6.4.3, subitem 1. + # The client SHOULD NOT attempt to match a presented identifier in which + # the wildcard character comprises a label other than the left-most label. + if leftmost == '*': + # When '*' is a fragment by itself, it matches a non-empty dotless + # fragment. + pats.append('[^.]+') + elif leftmost.startswith('xn--') or hostname.startswith('xn--'): + # RFC 6125, section 6.4.3, subitem 3. + # The client SHOULD NOT attempt to match a presented identifier + # where the wildcard character is embedded within an A-label or + # U-label of an internationalized domain name. + pats.append(re.escape(leftmost)) + else: + # Otherwise, '*' matches any dotless string, e.g. www* + pats.append(re.escape(leftmost).replace(r'\*', '[^.]*')) + + # add the remaining fragments, ignore any wildcards + for frag in remainder: + pats.append(re.escape(frag)) + + pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) + return pat.match(hostname) + + + def match_hostname(cert, hostname): + """Verify that *cert* (in decoded format as returned by + SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 + rules are followed, but IP addresses are not accepted for *hostname*. + + CertificateError is raised on failure. On success, the function + returns nothing. + """ + if not cert: + raise ValueError("empty or no certificate, match_hostname needs a " + "SSL socket or SSL context with either " + "CERT_OPTIONAL or CERT_REQUIRED") + dnsnames = [] + san = cert.get('subjectAltName', ()) + for key, value in san: + if key == 'DNS': + if _dnsname_match(value, hostname): + return + dnsnames.append(value) + if not dnsnames: + # The subject is only checked when there is no dNSName entry + # in subjectAltName + for sub in cert.get('subject', ()): + for key, value in sub: + # XXX according to RFC 2818, the most specific Common Name + # must be used. + if key == 'commonName': + if _dnsname_match(value, hostname): + return + dnsnames.append(value) + if len(dnsnames) > 1: + raise CertificateError("hostname %r " + "doesn't match either of %s" + % (hostname, ', '.join(map(repr, dnsnames)))) + elif len(dnsnames) == 1: + raise CertificateError("hostname %r " + "doesn't match %r" + % (hostname, dnsnames[0])) + else: + raise CertificateError("no appropriate commonName or " + "subjectAltName fields were found") + + +try: + from types import SimpleNamespace as Container +except ImportError: # pragma: no cover + class Container(object): + """ + A generic container for when multiple values need to be returned + """ + def __init__(self, **kwargs): + self.__dict__.update(kwargs) + + +try: + from shutil import which +except ImportError: # pragma: no cover + # Implementation from Python 3.3 + def which(cmd, mode=os.F_OK | os.X_OK, path=None): + """Given a command, mode, and a PATH string, return the path which + conforms to the given mode on the PATH, or None if there is no such + file. + + `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result + of os.environ.get("PATH"), or can be overridden with a custom search + path. + + """ + # Check that a given file can be accessed with the correct mode. + # Additionally check that `file` is not a directory, as on Windows + # directories pass the os.access check. + def _access_check(fn, mode): + return (os.path.exists(fn) and os.access(fn, mode) + and not os.path.isdir(fn)) + + # If we're given a path with a directory part, look it up directly rather + # than referring to PATH directories. This includes checking relative to the + # current directory, e.g. ./script + if os.path.dirname(cmd): + if _access_check(cmd, mode): + return cmd + return None + + if path is None: + path = os.environ.get("PATH", os.defpath) + if not path: + return None + path = path.split(os.pathsep) + + if sys.platform == "win32": + # The current directory takes precedence on Windows. + if not os.curdir in path: + path.insert(0, os.curdir) + + # PATHEXT is necessary to check on Windows. + pathext = os.environ.get("PATHEXT", "").split(os.pathsep) + # See if the given file matches any of the expected path extensions. + # This will allow us to short circuit when given "python.exe". + # If it does match, only test that one, otherwise we have to try + # others. + if any(cmd.lower().endswith(ext.lower()) for ext in pathext): + files = [cmd] + else: + files = [cmd + ext for ext in pathext] + else: + # On other platforms you don't have things like PATHEXT to tell you + # what file suffixes are executable, so just pass on cmd as-is. + files = [cmd] + + seen = set() + for dir in path: + normdir = os.path.normcase(dir) + if not normdir in seen: + seen.add(normdir) + for thefile in files: + name = os.path.join(dir, thefile) + if _access_check(name, mode): + return name + return None + + +# ZipFile is a context manager in 2.7, but not in 2.6 + +from zipfile import ZipFile as BaseZipFile + +if hasattr(BaseZipFile, '__enter__'): # pragma: no cover + ZipFile = BaseZipFile +else: # pragma: no cover + from zipfile import ZipExtFile as BaseZipExtFile + + class ZipExtFile(BaseZipExtFile): + def __init__(self, base): + self.__dict__.update(base.__dict__) + + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.close() + # return None, so if an exception occurred, it will propagate + + class ZipFile(BaseZipFile): + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.close() + # return None, so if an exception occurred, it will propagate + + def open(self, *args, **kwargs): + base = BaseZipFile.open(self, *args, **kwargs) + return ZipExtFile(base) + +try: + from platform import python_implementation +except ImportError: # pragma: no cover + def python_implementation(): + """Return a string identifying the Python implementation.""" + if 'PyPy' in sys.version: + return 'PyPy' + if os.name == 'java': + return 'Jython' + if sys.version.startswith('IronPython'): + return 'IronPython' + return 'CPython' + +try: + import sysconfig +except ImportError: # pragma: no cover + from ._backport import sysconfig + +try: + callable = callable +except NameError: # pragma: no cover + from collections.abc import Callable + + def callable(obj): + return isinstance(obj, Callable) + + +try: + fsencode = os.fsencode + fsdecode = os.fsdecode +except AttributeError: # pragma: no cover + # Issue #99: on some systems (e.g. containerised), + # sys.getfilesystemencoding() returns None, and we need a real value, + # so fall back to utf-8. From the CPython 2.7 docs relating to Unix and + # sys.getfilesystemencoding(): the return value is "the user’s preference + # according to the result of nl_langinfo(CODESET), or None if the + # nl_langinfo(CODESET) failed." + _fsencoding = sys.getfilesystemencoding() or 'utf-8' + if _fsencoding == 'mbcs': + _fserrors = 'strict' + else: + _fserrors = 'surrogateescape' + + def fsencode(filename): + if isinstance(filename, bytes): + return filename + elif isinstance(filename, text_type): + return filename.encode(_fsencoding, _fserrors) + else: + raise TypeError("expect bytes or str, not %s" % + type(filename).__name__) + + def fsdecode(filename): + if isinstance(filename, text_type): + return filename + elif isinstance(filename, bytes): + return filename.decode(_fsencoding, _fserrors) + else: + raise TypeError("expect bytes or str, not %s" % + type(filename).__name__) + +try: + from tokenize import detect_encoding +except ImportError: # pragma: no cover + from codecs import BOM_UTF8, lookup + import re + + cookie_re = re.compile(r"coding[:=]\s*([-\w.]+)") + + def _get_normal_name(orig_enc): + """Imitates get_normal_name in tokenizer.c.""" + # Only care about the first 12 characters. + enc = orig_enc[:12].lower().replace("_", "-") + if enc == "utf-8" or enc.startswith("utf-8-"): + return "utf-8" + if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \ + enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")): + return "iso-8859-1" + return orig_enc + + def detect_encoding(readline): + """ + The detect_encoding() function is used to detect the encoding that should + be used to decode a Python source file. It requires one argument, readline, + in the same way as the tokenize() generator. + + It will call readline a maximum of twice, and return the encoding used + (as a string) and a list of any lines (left as bytes) it has read in. + + It detects the encoding from the presence of a utf-8 bom or an encoding + cookie as specified in pep-0263. If both a bom and a cookie are present, + but disagree, a SyntaxError will be raised. If the encoding cookie is an + invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found, + 'utf-8-sig' is returned. + + If no encoding is specified, then the default of 'utf-8' will be returned. + """ + try: + filename = readline.__self__.name + except AttributeError: + filename = None + bom_found = False + encoding = None + default = 'utf-8' + def read_or_stop(): + try: + return readline() + except StopIteration: + return b'' + + def find_cookie(line): + try: + # Decode as UTF-8. Either the line is an encoding declaration, + # in which case it should be pure ASCII, or it must be UTF-8 + # per default encoding. + line_string = line.decode('utf-8') + except UnicodeDecodeError: + msg = "invalid or missing encoding declaration" + if filename is not None: + msg = '{} for {!r}'.format(msg, filename) + raise SyntaxError(msg) + + matches = cookie_re.findall(line_string) + if not matches: + return None + encoding = _get_normal_name(matches[0]) + try: + codec = lookup(encoding) + except LookupError: + # This behaviour mimics the Python interpreter + if filename is None: + msg = "unknown encoding: " + encoding + else: + msg = "unknown encoding for {!r}: {}".format(filename, + encoding) + raise SyntaxError(msg) + + if bom_found: + if codec.name != 'utf-8': + # This behaviour mimics the Python interpreter + if filename is None: + msg = 'encoding problem: utf-8' + else: + msg = 'encoding problem for {!r}: utf-8'.format(filename) + raise SyntaxError(msg) + encoding += '-sig' + return encoding + + first = read_or_stop() + if first.startswith(BOM_UTF8): + bom_found = True + first = first[3:] + default = 'utf-8-sig' + if not first: + return default, [] + + encoding = find_cookie(first) + if encoding: + return encoding, [first] + + second = read_or_stop() + if not second: + return default, [first] + + encoding = find_cookie(second) + if encoding: + return encoding, [first, second] + + return default, [first, second] + +# For converting & <-> & etc. +try: + from html import escape +except ImportError: + from cgi import escape +if sys.version_info[:2] < (3, 4): + unescape = HTMLParser().unescape +else: + from html import unescape + +try: + from collections import ChainMap +except ImportError: # pragma: no cover + from collections import MutableMapping + + try: + from reprlib import recursive_repr as _recursive_repr + except ImportError: + def _recursive_repr(fillvalue='...'): + ''' + Decorator to make a repr function return fillvalue for a recursive + call + ''' + + def decorating_function(user_function): + repr_running = set() + + def wrapper(self): + key = id(self), get_ident() + if key in repr_running: + return fillvalue + repr_running.add(key) + try: + result = user_function(self) + finally: + repr_running.discard(key) + return result + + # Can't use functools.wraps() here because of bootstrap issues + wrapper.__module__ = getattr(user_function, '__module__') + wrapper.__doc__ = getattr(user_function, '__doc__') + wrapper.__name__ = getattr(user_function, '__name__') + wrapper.__annotations__ = getattr(user_function, '__annotations__', {}) + return wrapper + + return decorating_function + + class ChainMap(MutableMapping): + ''' A ChainMap groups multiple dicts (or other mappings) together + to create a single, updateable view. + + The underlying mappings are stored in a list. That list is public and can + accessed or updated using the *maps* attribute. There is no other state. + + Lookups search the underlying mappings successively until a key is found. + In contrast, writes, updates, and deletions only operate on the first + mapping. + + ''' + + def __init__(self, *maps): + '''Initialize a ChainMap by setting *maps* to the given mappings. + If no mappings are provided, a single empty dictionary is used. + + ''' + self.maps = list(maps) or [{}] # always at least one map + + def __missing__(self, key): + raise KeyError(key) + + def __getitem__(self, key): + for mapping in self.maps: + try: + return mapping[key] # can't use 'key in mapping' with defaultdict + except KeyError: + pass + return self.__missing__(key) # support subclasses that define __missing__ + + def get(self, key, default=None): + return self[key] if key in self else default + + def __len__(self): + return len(set().union(*self.maps)) # reuses stored hash values if possible + + def __iter__(self): + return iter(set().union(*self.maps)) + + def __contains__(self, key): + return any(key in m for m in self.maps) + + def __bool__(self): + return any(self.maps) + + @_recursive_repr() + def __repr__(self): + return '{0.__class__.__name__}({1})'.format( + self, ', '.join(map(repr, self.maps))) + + @classmethod + def fromkeys(cls, iterable, *args): + 'Create a ChainMap with a single dict created from the iterable.' + return cls(dict.fromkeys(iterable, *args)) + + def copy(self): + 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]' + return self.__class__(self.maps[0].copy(), *self.maps[1:]) + + __copy__ = copy + + def new_child(self): # like Django's Context.push() + 'New ChainMap with a new dict followed by all previous maps.' + return self.__class__({}, *self.maps) + + @property + def parents(self): # like Django's Context.pop() + 'New ChainMap from maps[1:].' + return self.__class__(*self.maps[1:]) + + def __setitem__(self, key, value): + self.maps[0][key] = value + + def __delitem__(self, key): + try: + del self.maps[0][key] + except KeyError: + raise KeyError('Key not found in the first mapping: {!r}'.format(key)) + + def popitem(self): + 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' + try: + return self.maps[0].popitem() + except KeyError: + raise KeyError('No keys found in the first mapping.') + + def pop(self, key, *args): + 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].' + try: + return self.maps[0].pop(key, *args) + except KeyError: + raise KeyError('Key not found in the first mapping: {!r}'.format(key)) + + def clear(self): + 'Clear maps[0], leaving maps[1:] intact.' + self.maps[0].clear() + +try: + from importlib.util import cache_from_source # Python >= 3.4 +except ImportError: # pragma: no cover + try: + from imp import cache_from_source + except ImportError: # pragma: no cover + def cache_from_source(path, debug_override=None): + assert path.endswith('.py') + if debug_override is None: + debug_override = __debug__ + if debug_override: + suffix = 'c' + else: + suffix = 'o' + return path + suffix + +try: + from collections import OrderedDict +except ImportError: # pragma: no cover +## {{{ http://code.activestate.com/recipes/576693/ (r9) +# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy. +# Passes Python2.7's test suite and incorporates all the latest updates. + try: + from thread import get_ident as _get_ident + except ImportError: + from dummy_thread import get_ident as _get_ident + + try: + from _abcoll import KeysView, ValuesView, ItemsView + except ImportError: + pass + + + class OrderedDict(dict): + 'Dictionary that remembers insertion order' + # An inherited dict maps keys to values. + # The inherited dict provides __getitem__, __len__, __contains__, and get. + # The remaining methods are order-aware. + # Big-O running times for all methods are the same as for regular dictionaries. + + # The internal self.__map dictionary maps keys to links in a doubly linked list. + # The circular doubly linked list starts and ends with a sentinel element. + # The sentinel element never gets deleted (this simplifies the algorithm). + # Each link is stored as a list of length three: [PREV, NEXT, KEY]. + + def __init__(self, *args, **kwds): + '''Initialize an ordered dictionary. Signature is the same as for + regular dictionaries, but keyword arguments are not recommended + because their insertion order is arbitrary. + + ''' + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + try: + self.__root + except AttributeError: + self.__root = root = [] # sentinel node + root[:] = [root, root, None] + self.__map = {} + self.__update(*args, **kwds) + + def __setitem__(self, key, value, dict_setitem=dict.__setitem__): + 'od.__setitem__(i, y) <==> od[i]=y' + # Setting a new item creates a new link which goes at the end of the linked + # list, and the inherited dictionary is updated with the new key/value pair. + if key not in self: + root = self.__root + last = root[0] + last[1] = root[0] = self.__map[key] = [last, root, key] + dict_setitem(self, key, value) + + def __delitem__(self, key, dict_delitem=dict.__delitem__): + 'od.__delitem__(y) <==> del od[y]' + # Deleting an existing item uses self.__map to find the link which is + # then removed by updating the links in the predecessor and successor nodes. + dict_delitem(self, key) + link_prev, link_next, key = self.__map.pop(key) + link_prev[1] = link_next + link_next[0] = link_prev + + def __iter__(self): + 'od.__iter__() <==> iter(od)' + root = self.__root + curr = root[1] + while curr is not root: + yield curr[2] + curr = curr[1] + + def __reversed__(self): + 'od.__reversed__() <==> reversed(od)' + root = self.__root + curr = root[0] + while curr is not root: + yield curr[2] + curr = curr[0] + + def clear(self): + 'od.clear() -> None. Remove all items from od.' + try: + for node in self.__map.itervalues(): + del node[:] + root = self.__root + root[:] = [root, root, None] + self.__map.clear() + except AttributeError: + pass + dict.clear(self) + + def popitem(self, last=True): + '''od.popitem() -> (k, v), return and remove a (key, value) pair. + Pairs are returned in LIFO order if last is true or FIFO order if false. + + ''' + if not self: + raise KeyError('dictionary is empty') + root = self.__root + if last: + link = root[0] + link_prev = link[0] + link_prev[1] = root + root[0] = link_prev + else: + link = root[1] + link_next = link[1] + root[1] = link_next + link_next[0] = root + key = link[2] + del self.__map[key] + value = dict.pop(self, key) + return key, value + + # -- the following methods do not depend on the internal structure -- + + def keys(self): + 'od.keys() -> list of keys in od' + return list(self) + + def values(self): + 'od.values() -> list of values in od' + return [self[key] for key in self] + + def items(self): + 'od.items() -> list of (key, value) pairs in od' + return [(key, self[key]) for key in self] + + def iterkeys(self): + 'od.iterkeys() -> an iterator over the keys in od' + return iter(self) + + def itervalues(self): + 'od.itervalues -> an iterator over the values in od' + for k in self: + yield self[k] + + def iteritems(self): + 'od.iteritems -> an iterator over the (key, value) items in od' + for k in self: + yield (k, self[k]) + + def update(*args, **kwds): + '''od.update(E, **F) -> None. Update od from dict/iterable E and F. + + If E is a dict instance, does: for k in E: od[k] = E[k] + If E has a .keys() method, does: for k in E.keys(): od[k] = E[k] + Or if E is an iterable of items, does: for k, v in E: od[k] = v + In either case, this is followed by: for k, v in F.items(): od[k] = v + + ''' + if len(args) > 2: + raise TypeError('update() takes at most 2 positional ' + 'arguments (%d given)' % (len(args),)) + elif not args: + raise TypeError('update() takes at least 1 argument (0 given)') + self = args[0] + # Make progressively weaker assumptions about "other" + other = () + if len(args) == 2: + other = args[1] + if isinstance(other, dict): + for key in other: + self[key] = other[key] + elif hasattr(other, 'keys'): + for key in other.keys(): + self[key] = other[key] + else: + for key, value in other: + self[key] = value + for key, value in kwds.items(): + self[key] = value + + __update = update # let subclasses override update without breaking __init__ + + __marker = object() + + def pop(self, key, default=__marker): + '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value. + If key is not found, d is returned if given, otherwise KeyError is raised. + + ''' + if key in self: + result = self[key] + del self[key] + return result + if default is self.__marker: + raise KeyError(key) + return default + + def setdefault(self, key, default=None): + 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' + if key in self: + return self[key] + self[key] = default + return default + + def __repr__(self, _repr_running=None): + 'od.__repr__() <==> repr(od)' + if not _repr_running: _repr_running = {} + call_key = id(self), _get_ident() + if call_key in _repr_running: + return '...' + _repr_running[call_key] = 1 + try: + if not self: + return '%s()' % (self.__class__.__name__,) + return '%s(%r)' % (self.__class__.__name__, self.items()) + finally: + del _repr_running[call_key] + + def __reduce__(self): + 'Return state information for pickling' + items = [[k, self[k]] for k in self] + inst_dict = vars(self).copy() + for k in vars(OrderedDict()): + inst_dict.pop(k, None) + if inst_dict: + return (self.__class__, (items,), inst_dict) + return self.__class__, (items,) + + def copy(self): + 'od.copy() -> a shallow copy of od' + return self.__class__(self) + + @classmethod + def fromkeys(cls, iterable, value=None): + '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S + and values equal to v (which defaults to None). + + ''' + d = cls() + for key in iterable: + d[key] = value + return d + + def __eq__(self, other): + '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive + while comparison to a regular mapping is order-insensitive. + + ''' + if isinstance(other, OrderedDict): + return len(self)==len(other) and self.items() == other.items() + return dict.__eq__(self, other) + + def __ne__(self, other): + return not self == other + + # -- the following methods are only used in Python 2.7 -- + + def viewkeys(self): + "od.viewkeys() -> a set-like object providing a view on od's keys" + return KeysView(self) + + def viewvalues(self): + "od.viewvalues() -> an object providing a view on od's values" + return ValuesView(self) + + def viewitems(self): + "od.viewitems() -> a set-like object providing a view on od's items" + return ItemsView(self) + +try: + from logging.config import BaseConfigurator, valid_ident +except ImportError: # pragma: no cover + IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I) + + + def valid_ident(s): + m = IDENTIFIER.match(s) + if not m: + raise ValueError('Not a valid Python identifier: %r' % s) + return True + + + # The ConvertingXXX classes are wrappers around standard Python containers, + # and they serve to convert any suitable values in the container. The + # conversion converts base dicts, lists and tuples to their wrapped + # equivalents, whereas strings which match a conversion format are converted + # appropriately. + # + # Each wrapper should have a configurator attribute holding the actual + # configurator to use for conversion. + + class ConvertingDict(dict): + """A converting dictionary wrapper.""" + + def __getitem__(self, key): + value = dict.__getitem__(self, key) + result = self.configurator.convert(value) + #If the converted value is different, save for next time + if value is not result: + self[key] = result + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + def get(self, key, default=None): + value = dict.get(self, key, default) + result = self.configurator.convert(value) + #If the converted value is different, save for next time + if value is not result: + self[key] = result + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + def pop(self, key, default=None): + value = dict.pop(self, key, default) + result = self.configurator.convert(value) + if value is not result: + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + class ConvertingList(list): + """A converting list wrapper.""" + def __getitem__(self, key): + value = list.__getitem__(self, key) + result = self.configurator.convert(value) + #If the converted value is different, save for next time + if value is not result: + self[key] = result + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + def pop(self, idx=-1): + value = list.pop(self, idx) + result = self.configurator.convert(value) + if value is not result: + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + return result + + class ConvertingTuple(tuple): + """A converting tuple wrapper.""" + def __getitem__(self, key): + value = tuple.__getitem__(self, key) + result = self.configurator.convert(value) + if value is not result: + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + class BaseConfigurator(object): + """ + The configurator base class which defines some useful defaults. + """ + + CONVERT_PATTERN = re.compile(r'^(?P[a-z]+)://(?P.*)$') + + WORD_PATTERN = re.compile(r'^\s*(\w+)\s*') + DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*') + INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*') + DIGIT_PATTERN = re.compile(r'^\d+$') + + value_converters = { + 'ext' : 'ext_convert', + 'cfg' : 'cfg_convert', + } + + # We might want to use a different one, e.g. importlib + importer = staticmethod(__import__) + + def __init__(self, config): + self.config = ConvertingDict(config) + self.config.configurator = self + + def resolve(self, s): + """ + Resolve strings to objects using standard import and attribute + syntax. + """ + name = s.split('.') + used = name.pop(0) + try: + found = self.importer(used) + for frag in name: + used += '.' + frag + try: + found = getattr(found, frag) + except AttributeError: + self.importer(used) + found = getattr(found, frag) + return found + except ImportError: + e, tb = sys.exc_info()[1:] + v = ValueError('Cannot resolve %r: %s' % (s, e)) + v.__cause__, v.__traceback__ = e, tb + raise v + + def ext_convert(self, value): + """Default converter for the ext:// protocol.""" + return self.resolve(value) + + def cfg_convert(self, value): + """Default converter for the cfg:// protocol.""" + rest = value + m = self.WORD_PATTERN.match(rest) + if m is None: + raise ValueError("Unable to convert %r" % value) + else: + rest = rest[m.end():] + d = self.config[m.groups()[0]] + #print d, rest + while rest: + m = self.DOT_PATTERN.match(rest) + if m: + d = d[m.groups()[0]] + else: + m = self.INDEX_PATTERN.match(rest) + if m: + idx = m.groups()[0] + if not self.DIGIT_PATTERN.match(idx): + d = d[idx] + else: + try: + n = int(idx) # try as number first (most likely) + d = d[n] + except TypeError: + d = d[idx] + if m: + rest = rest[m.end():] + else: + raise ValueError('Unable to convert ' + '%r at %r' % (value, rest)) + #rest should be empty + return d + + def convert(self, value): + """ + Convert values to an appropriate type. dicts, lists and tuples are + replaced by their converting alternatives. Strings are checked to + see if they have a conversion format and are converted if they do. + """ + if not isinstance(value, ConvertingDict) and isinstance(value, dict): + value = ConvertingDict(value) + value.configurator = self + elif not isinstance(value, ConvertingList) and isinstance(value, list): + value = ConvertingList(value) + value.configurator = self + elif not isinstance(value, ConvertingTuple) and\ + isinstance(value, tuple): + value = ConvertingTuple(value) + value.configurator = self + elif isinstance(value, string_types): + m = self.CONVERT_PATTERN.match(value) + if m: + d = m.groupdict() + prefix = d['prefix'] + converter = self.value_converters.get(prefix, None) + if converter: + suffix = d['suffix'] + converter = getattr(self, converter) + value = converter(suffix) + return value + + def configure_custom(self, config): + """Configure an object with a user-supplied factory.""" + c = config.pop('()') + if not callable(c): + c = self.resolve(c) + props = config.pop('.', None) + # Check for valid identifiers + kwargs = dict([(k, config[k]) for k in config if valid_ident(k)]) + result = c(**kwargs) + if props: + for name, value in props.items(): + setattr(result, name, value) + return result + + def as_tuple(self, value): + """Utility function which converts lists to tuples.""" + if isinstance(value, list): + value = tuple(value) + return value diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/database.py b/venv/Lib/site-packages/pip/_vendor/distlib/database.py similarity index 99% rename from env/lib/python2.7/site-packages/pip/_vendor/distlib/database.py rename to venv/Lib/site-packages/pip/_vendor/distlib/database.py index b13cdac9..0a90c300 100644 --- a/env/lib/python2.7/site-packages/pip/_vendor/distlib/database.py +++ b/venv/Lib/site-packages/pip/_vendor/distlib/database.py @@ -550,7 +550,7 @@ def __init__(self, path, metadata=None, env=None): r = finder.find(WHEEL_METADATA_FILENAME) # Temporary - for legacy support if r is None: - r = finder.find('METADATA') + r = finder.find(LEGACY_METADATA_FILENAME) if r is None: raise ValueError('no %s found in %s' % (METADATA_FILENAME, path)) @@ -567,7 +567,7 @@ def __init__(self, path, metadata=None, env=None): p = os.path.join(path, 'top_level.txt') if os.path.exists(p): with open(p, 'rb') as f: - data = f.read() + data = f.read().decode('utf-8') self.modules = data.splitlines() def __repr__(self): diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/index.py b/venv/Lib/site-packages/pip/_vendor/distlib/index.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/distlib/index.py rename to venv/Lib/site-packages/pip/_vendor/distlib/index.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/locators.py b/venv/Lib/site-packages/pip/_vendor/distlib/locators.py similarity index 98% rename from env/lib/python2.7/site-packages/pip/_vendor/distlib/locators.py rename to venv/Lib/site-packages/pip/_vendor/distlib/locators.py index a7ed9469..12a1d063 100644 --- a/env/lib/python2.7/site-packages/pip/_vendor/distlib/locators.py +++ b/venv/Lib/site-packages/pip/_vendor/distlib/locators.py @@ -304,18 +304,25 @@ def same_project(name1, name2): def _get_digest(self, info): """ - Get a digest from a dictionary by looking at keys of the form - 'algo_digest'. + Get a digest from a dictionary by looking at a "digests" dictionary + or keys of the form 'algo_digest'. Returns a 2-tuple (algo, digest) if found, else None. Currently looks only for SHA256, then MD5. """ result = None - for algo in ('sha256', 'md5'): - key = '%s_digest' % algo - if key in info: - result = (algo, info[key]) - break + if 'digests' in info: + digests = info['digests'] + for algo in ('sha256', 'md5'): + if algo in digests: + result = (algo, digests[algo]) + break + if not result: + for algo in ('sha256', 'md5'): + key = '%s_digest' % algo + if key in info: + result = (algo, info[key]) + break return result def _update_version_data(self, result, info): diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/manifest.py b/venv/Lib/site-packages/pip/_vendor/distlib/manifest.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/distlib/manifest.py rename to venv/Lib/site-packages/pip/_vendor/distlib/manifest.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/markers.py b/venv/Lib/site-packages/pip/_vendor/distlib/markers.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/distlib/markers.py rename to venv/Lib/site-packages/pip/_vendor/distlib/markers.py diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/metadata.py b/venv/Lib/site-packages/pip/_vendor/distlib/metadata.py new file mode 100644 index 00000000..6d5e2360 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/distlib/metadata.py @@ -0,0 +1,1056 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +"""Implementation of the Metadata for Python packages PEPs. + +Supports all metadata formats (1.0, 1.1, 1.2, 1.3/2.1 and withdrawn 2.0). +""" +from __future__ import unicode_literals + +import codecs +from email import message_from_file +import json +import logging +import re + + +from . import DistlibException, __version__ +from .compat import StringIO, string_types, text_type +from .markers import interpret +from .util import extract_by_key, get_extras +from .version import get_scheme, PEP440_VERSION_RE + +logger = logging.getLogger(__name__) + + +class MetadataMissingError(DistlibException): + """A required metadata is missing""" + + +class MetadataConflictError(DistlibException): + """Attempt to read or write metadata fields that are conflictual.""" + + +class MetadataUnrecognizedVersionError(DistlibException): + """Unknown metadata version number.""" + + +class MetadataInvalidError(DistlibException): + """A metadata value is invalid""" + +# public API of this module +__all__ = ['Metadata', 'PKG_INFO_ENCODING', 'PKG_INFO_PREFERRED_VERSION'] + +# Encoding used for the PKG-INFO files +PKG_INFO_ENCODING = 'utf-8' + +# preferred version. Hopefully will be changed +# to 1.2 once PEP 345 is supported everywhere +PKG_INFO_PREFERRED_VERSION = '1.1' + +_LINE_PREFIX_1_2 = re.compile('\n \\|') +_LINE_PREFIX_PRE_1_2 = re.compile('\n ') +_241_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', + 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', + 'License') + +_314_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', + 'Supported-Platform', 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', + 'License', 'Classifier', 'Download-URL', 'Obsoletes', + 'Provides', 'Requires') + +_314_MARKERS = ('Obsoletes', 'Provides', 'Requires', 'Classifier', + 'Download-URL') + +_345_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', + 'Supported-Platform', 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', + 'Maintainer', 'Maintainer-email', 'License', + 'Classifier', 'Download-URL', 'Obsoletes-Dist', + 'Project-URL', 'Provides-Dist', 'Requires-Dist', + 'Requires-Python', 'Requires-External') + +_345_MARKERS = ('Provides-Dist', 'Requires-Dist', 'Requires-Python', + 'Obsoletes-Dist', 'Requires-External', 'Maintainer', + 'Maintainer-email', 'Project-URL') + +_426_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', + 'Supported-Platform', 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', + 'Maintainer', 'Maintainer-email', 'License', + 'Classifier', 'Download-URL', 'Obsoletes-Dist', + 'Project-URL', 'Provides-Dist', 'Requires-Dist', + 'Requires-Python', 'Requires-External', 'Private-Version', + 'Obsoleted-By', 'Setup-Requires-Dist', 'Extension', + 'Provides-Extra') + +_426_MARKERS = ('Private-Version', 'Provides-Extra', 'Obsoleted-By', + 'Setup-Requires-Dist', 'Extension') + +# See issue #106: Sometimes 'Requires' and 'Provides' occur wrongly in +# the metadata. Include them in the tuple literal below to allow them +# (for now). +_566_FIELDS = _426_FIELDS + ('Description-Content-Type', + 'Requires', 'Provides') + +_566_MARKERS = ('Description-Content-Type',) + +_ALL_FIELDS = set() +_ALL_FIELDS.update(_241_FIELDS) +_ALL_FIELDS.update(_314_FIELDS) +_ALL_FIELDS.update(_345_FIELDS) +_ALL_FIELDS.update(_426_FIELDS) +_ALL_FIELDS.update(_566_FIELDS) + +EXTRA_RE = re.compile(r'''extra\s*==\s*("([^"]+)"|'([^']+)')''') + + +def _version2fieldlist(version): + if version == '1.0': + return _241_FIELDS + elif version == '1.1': + return _314_FIELDS + elif version == '1.2': + return _345_FIELDS + elif version in ('1.3', '2.1'): + return _345_FIELDS + _566_FIELDS + elif version == '2.0': + return _426_FIELDS + raise MetadataUnrecognizedVersionError(version) + + +def _best_version(fields): + """Detect the best version depending on the fields used.""" + def _has_marker(keys, markers): + for marker in markers: + if marker in keys: + return True + return False + + keys = [] + for key, value in fields.items(): + if value in ([], 'UNKNOWN', None): + continue + keys.append(key) + + possible_versions = ['1.0', '1.1', '1.2', '1.3', '2.0', '2.1'] + + # first let's try to see if a field is not part of one of the version + for key in keys: + if key not in _241_FIELDS and '1.0' in possible_versions: + possible_versions.remove('1.0') + logger.debug('Removed 1.0 due to %s', key) + if key not in _314_FIELDS and '1.1' in possible_versions: + possible_versions.remove('1.1') + logger.debug('Removed 1.1 due to %s', key) + if key not in _345_FIELDS and '1.2' in possible_versions: + possible_versions.remove('1.2') + logger.debug('Removed 1.2 due to %s', key) + if key not in _566_FIELDS and '1.3' in possible_versions: + possible_versions.remove('1.3') + logger.debug('Removed 1.3 due to %s', key) + if key not in _566_FIELDS and '2.1' in possible_versions: + if key != 'Description': # In 2.1, description allowed after headers + possible_versions.remove('2.1') + logger.debug('Removed 2.1 due to %s', key) + if key not in _426_FIELDS and '2.0' in possible_versions: + possible_versions.remove('2.0') + logger.debug('Removed 2.0 due to %s', key) + + # possible_version contains qualified versions + if len(possible_versions) == 1: + return possible_versions[0] # found ! + elif len(possible_versions) == 0: + logger.debug('Out of options - unknown metadata set: %s', fields) + raise MetadataConflictError('Unknown metadata set') + + # let's see if one unique marker is found + is_1_1 = '1.1' in possible_versions and _has_marker(keys, _314_MARKERS) + is_1_2 = '1.2' in possible_versions and _has_marker(keys, _345_MARKERS) + is_2_1 = '2.1' in possible_versions and _has_marker(keys, _566_MARKERS) + is_2_0 = '2.0' in possible_versions and _has_marker(keys, _426_MARKERS) + if int(is_1_1) + int(is_1_2) + int(is_2_1) + int(is_2_0) > 1: + raise MetadataConflictError('You used incompatible 1.1/1.2/2.0/2.1 fields') + + # we have the choice, 1.0, or 1.2, or 2.0 + # - 1.0 has a broken Summary field but works with all tools + # - 1.1 is to avoid + # - 1.2 fixes Summary but has little adoption + # - 2.0 adds more features and is very new + if not is_1_1 and not is_1_2 and not is_2_1 and not is_2_0: + # we couldn't find any specific marker + if PKG_INFO_PREFERRED_VERSION in possible_versions: + return PKG_INFO_PREFERRED_VERSION + if is_1_1: + return '1.1' + if is_1_2: + return '1.2' + if is_2_1: + return '2.1' + + return '2.0' + +# This follows the rules about transforming keys as described in +# https://www.python.org/dev/peps/pep-0566/#id17 +_ATTR2FIELD = { + name.lower().replace("-", "_"): name for name in _ALL_FIELDS +} +_FIELD2ATTR = {field: attr for attr, field in _ATTR2FIELD.items()} + +_PREDICATE_FIELDS = ('Requires-Dist', 'Obsoletes-Dist', 'Provides-Dist') +_VERSIONS_FIELDS = ('Requires-Python',) +_VERSION_FIELDS = ('Version',) +_LISTFIELDS = ('Platform', 'Classifier', 'Obsoletes', + 'Requires', 'Provides', 'Obsoletes-Dist', + 'Provides-Dist', 'Requires-Dist', 'Requires-External', + 'Project-URL', 'Supported-Platform', 'Setup-Requires-Dist', + 'Provides-Extra', 'Extension') +_LISTTUPLEFIELDS = ('Project-URL',) + +_ELEMENTSFIELD = ('Keywords',) + +_UNICODEFIELDS = ('Author', 'Maintainer', 'Summary', 'Description') + +_MISSING = object() + +_FILESAFE = re.compile('[^A-Za-z0-9.]+') + + +def _get_name_and_version(name, version, for_filename=False): + """Return the distribution name with version. + + If for_filename is true, return a filename-escaped form.""" + if for_filename: + # For both name and version any runs of non-alphanumeric or '.' + # characters are replaced with a single '-'. Additionally any + # spaces in the version string become '.' + name = _FILESAFE.sub('-', name) + version = _FILESAFE.sub('-', version.replace(' ', '.')) + return '%s-%s' % (name, version) + + +class LegacyMetadata(object): + """The legacy metadata of a release. + + Supports versions 1.0, 1.1, 1.2, 2.0 and 1.3/2.1 (auto-detected). You can + instantiate the class with one of these arguments (or none): + - *path*, the path to a metadata file + - *fileobj* give a file-like object with metadata as content + - *mapping* is a dict-like object + - *scheme* is a version scheme name + """ + # TODO document the mapping API and UNKNOWN default key + + def __init__(self, path=None, fileobj=None, mapping=None, + scheme='default'): + if [path, fileobj, mapping].count(None) < 2: + raise TypeError('path, fileobj and mapping are exclusive') + self._fields = {} + self.requires_files = [] + self._dependencies = None + self.scheme = scheme + if path is not None: + self.read(path) + elif fileobj is not None: + self.read_file(fileobj) + elif mapping is not None: + self.update(mapping) + self.set_metadata_version() + + def set_metadata_version(self): + self._fields['Metadata-Version'] = _best_version(self._fields) + + def _write_field(self, fileobj, name, value): + fileobj.write('%s: %s\n' % (name, value)) + + def __getitem__(self, name): + return self.get(name) + + def __setitem__(self, name, value): + return self.set(name, value) + + def __delitem__(self, name): + field_name = self._convert_name(name) + try: + del self._fields[field_name] + except KeyError: + raise KeyError(name) + + def __contains__(self, name): + return (name in self._fields or + self._convert_name(name) in self._fields) + + def _convert_name(self, name): + if name in _ALL_FIELDS: + return name + name = name.replace('-', '_').lower() + return _ATTR2FIELD.get(name, name) + + def _default_value(self, name): + if name in _LISTFIELDS or name in _ELEMENTSFIELD: + return [] + return 'UNKNOWN' + + def _remove_line_prefix(self, value): + if self.metadata_version in ('1.0', '1.1'): + return _LINE_PREFIX_PRE_1_2.sub('\n', value) + else: + return _LINE_PREFIX_1_2.sub('\n', value) + + def __getattr__(self, name): + if name in _ATTR2FIELD: + return self[name] + raise AttributeError(name) + + # + # Public API + # + +# dependencies = property(_get_dependencies, _set_dependencies) + + def get_fullname(self, filesafe=False): + """Return the distribution name with version. + + If filesafe is true, return a filename-escaped form.""" + return _get_name_and_version(self['Name'], self['Version'], filesafe) + + def is_field(self, name): + """return True if name is a valid metadata key""" + name = self._convert_name(name) + return name in _ALL_FIELDS + + def is_multi_field(self, name): + name = self._convert_name(name) + return name in _LISTFIELDS + + def read(self, filepath): + """Read the metadata values from a file path.""" + fp = codecs.open(filepath, 'r', encoding='utf-8') + try: + self.read_file(fp) + finally: + fp.close() + + def read_file(self, fileob): + """Read the metadata values from a file object.""" + msg = message_from_file(fileob) + self._fields['Metadata-Version'] = msg['metadata-version'] + + # When reading, get all the fields we can + for field in _ALL_FIELDS: + if field not in msg: + continue + if field in _LISTFIELDS: + # we can have multiple lines + values = msg.get_all(field) + if field in _LISTTUPLEFIELDS and values is not None: + values = [tuple(value.split(',')) for value in values] + self.set(field, values) + else: + # single line + value = msg[field] + if value is not None and value != 'UNKNOWN': + self.set(field, value) + + # PEP 566 specifies that the body be used for the description, if + # available + body = msg.get_payload() + self["Description"] = body if body else self["Description"] + # logger.debug('Attempting to set metadata for %s', self) + # self.set_metadata_version() + + def write(self, filepath, skip_unknown=False): + """Write the metadata fields to filepath.""" + fp = codecs.open(filepath, 'w', encoding='utf-8') + try: + self.write_file(fp, skip_unknown) + finally: + fp.close() + + def write_file(self, fileobject, skip_unknown=False): + """Write the PKG-INFO format data to a file object.""" + self.set_metadata_version() + + for field in _version2fieldlist(self['Metadata-Version']): + values = self.get(field) + if skip_unknown and values in ('UNKNOWN', [], ['UNKNOWN']): + continue + if field in _ELEMENTSFIELD: + self._write_field(fileobject, field, ','.join(values)) + continue + if field not in _LISTFIELDS: + if field == 'Description': + if self.metadata_version in ('1.0', '1.1'): + values = values.replace('\n', '\n ') + else: + values = values.replace('\n', '\n |') + values = [values] + + if field in _LISTTUPLEFIELDS: + values = [','.join(value) for value in values] + + for value in values: + self._write_field(fileobject, field, value) + + def update(self, other=None, **kwargs): + """Set metadata values from the given iterable `other` and kwargs. + + Behavior is like `dict.update`: If `other` has a ``keys`` method, + they are looped over and ``self[key]`` is assigned ``other[key]``. + Else, ``other`` is an iterable of ``(key, value)`` iterables. + + Keys that don't match a metadata field or that have an empty value are + dropped. + """ + def _set(key, value): + if key in _ATTR2FIELD and value: + self.set(self._convert_name(key), value) + + if not other: + # other is None or empty container + pass + elif hasattr(other, 'keys'): + for k in other.keys(): + _set(k, other[k]) + else: + for k, v in other: + _set(k, v) + + if kwargs: + for k, v in kwargs.items(): + _set(k, v) + + def set(self, name, value): + """Control then set a metadata field.""" + name = self._convert_name(name) + + if ((name in _ELEMENTSFIELD or name == 'Platform') and + not isinstance(value, (list, tuple))): + if isinstance(value, string_types): + value = [v.strip() for v in value.split(',')] + else: + value = [] + elif (name in _LISTFIELDS and + not isinstance(value, (list, tuple))): + if isinstance(value, string_types): + value = [value] + else: + value = [] + + if logger.isEnabledFor(logging.WARNING): + project_name = self['Name'] + + scheme = get_scheme(self.scheme) + if name in _PREDICATE_FIELDS and value is not None: + for v in value: + # check that the values are valid + if not scheme.is_valid_matcher(v.split(';')[0]): + logger.warning( + "'%s': '%s' is not valid (field '%s')", + project_name, v, name) + # FIXME this rejects UNKNOWN, is that right? + elif name in _VERSIONS_FIELDS and value is not None: + if not scheme.is_valid_constraint_list(value): + logger.warning("'%s': '%s' is not a valid version (field '%s')", + project_name, value, name) + elif name in _VERSION_FIELDS and value is not None: + if not scheme.is_valid_version(value): + logger.warning("'%s': '%s' is not a valid version (field '%s')", + project_name, value, name) + + if name in _UNICODEFIELDS: + if name == 'Description': + value = self._remove_line_prefix(value) + + self._fields[name] = value + + def get(self, name, default=_MISSING): + """Get a metadata field.""" + name = self._convert_name(name) + if name not in self._fields: + if default is _MISSING: + default = self._default_value(name) + return default + if name in _UNICODEFIELDS: + value = self._fields[name] + return value + elif name in _LISTFIELDS: + value = self._fields[name] + if value is None: + return [] + res = [] + for val in value: + if name not in _LISTTUPLEFIELDS: + res.append(val) + else: + # That's for Project-URL + res.append((val[0], val[1])) + return res + + elif name in _ELEMENTSFIELD: + value = self._fields[name] + if isinstance(value, string_types): + return value.split(',') + return self._fields[name] + + def check(self, strict=False): + """Check if the metadata is compliant. If strict is True then raise if + no Name or Version are provided""" + self.set_metadata_version() + + # XXX should check the versions (if the file was loaded) + missing, warnings = [], [] + + for attr in ('Name', 'Version'): # required by PEP 345 + if attr not in self: + missing.append(attr) + + if strict and missing != []: + msg = 'missing required metadata: %s' % ', '.join(missing) + raise MetadataMissingError(msg) + + for attr in ('Home-page', 'Author'): + if attr not in self: + missing.append(attr) + + # checking metadata 1.2 (XXX needs to check 1.1, 1.0) + if self['Metadata-Version'] != '1.2': + return missing, warnings + + scheme = get_scheme(self.scheme) + + def are_valid_constraints(value): + for v in value: + if not scheme.is_valid_matcher(v.split(';')[0]): + return False + return True + + for fields, controller in ((_PREDICATE_FIELDS, are_valid_constraints), + (_VERSIONS_FIELDS, + scheme.is_valid_constraint_list), + (_VERSION_FIELDS, + scheme.is_valid_version)): + for field in fields: + value = self.get(field, None) + if value is not None and not controller(value): + warnings.append("Wrong value for '%s': %s" % (field, value)) + + return missing, warnings + + def todict(self, skip_missing=False): + """Return fields as a dict. + + Field names will be converted to use the underscore-lowercase style + instead of hyphen-mixed case (i.e. home_page instead of Home-page). + This is as per https://www.python.org/dev/peps/pep-0566/#id17. + """ + self.set_metadata_version() + + fields = _version2fieldlist(self['Metadata-Version']) + + data = {} + + for field_name in fields: + if not skip_missing or field_name in self._fields: + key = _FIELD2ATTR[field_name] + if key != 'project_url': + data[key] = self[field_name] + else: + data[key] = [','.join(u) for u in self[field_name]] + + return data + + def add_requirements(self, requirements): + if self['Metadata-Version'] == '1.1': + # we can't have 1.1 metadata *and* Setuptools requires + for field in ('Obsoletes', 'Requires', 'Provides'): + if field in self: + del self[field] + self['Requires-Dist'] += requirements + + # Mapping API + # TODO could add iter* variants + + def keys(self): + return list(_version2fieldlist(self['Metadata-Version'])) + + def __iter__(self): + for key in self.keys(): + yield key + + def values(self): + return [self[key] for key in self.keys()] + + def items(self): + return [(key, self[key]) for key in self.keys()] + + def __repr__(self): + return '<%s %s %s>' % (self.__class__.__name__, self.name, + self.version) + + +METADATA_FILENAME = 'pydist.json' +WHEEL_METADATA_FILENAME = 'metadata.json' +LEGACY_METADATA_FILENAME = 'METADATA' + + +class Metadata(object): + """ + The metadata of a release. This implementation uses 2.0 (JSON) + metadata where possible. If not possible, it wraps a LegacyMetadata + instance which handles the key-value metadata format. + """ + + METADATA_VERSION_MATCHER = re.compile(r'^\d+(\.\d+)*$') + + NAME_MATCHER = re.compile('^[0-9A-Z]([0-9A-Z_.-]*[0-9A-Z])?$', re.I) + + VERSION_MATCHER = PEP440_VERSION_RE + + SUMMARY_MATCHER = re.compile('.{1,2047}') + + METADATA_VERSION = '2.0' + + GENERATOR = 'distlib (%s)' % __version__ + + MANDATORY_KEYS = { + 'name': (), + 'version': (), + 'summary': ('legacy',), + } + + INDEX_KEYS = ('name version license summary description author ' + 'author_email keywords platform home_page classifiers ' + 'download_url') + + DEPENDENCY_KEYS = ('extras run_requires test_requires build_requires ' + 'dev_requires provides meta_requires obsoleted_by ' + 'supports_environments') + + SYNTAX_VALIDATORS = { + 'metadata_version': (METADATA_VERSION_MATCHER, ()), + 'name': (NAME_MATCHER, ('legacy',)), + 'version': (VERSION_MATCHER, ('legacy',)), + 'summary': (SUMMARY_MATCHER, ('legacy',)), + } + + __slots__ = ('_legacy', '_data', 'scheme') + + def __init__(self, path=None, fileobj=None, mapping=None, + scheme='default'): + if [path, fileobj, mapping].count(None) < 2: + raise TypeError('path, fileobj and mapping are exclusive') + self._legacy = None + self._data = None + self.scheme = scheme + #import pdb; pdb.set_trace() + if mapping is not None: + try: + self._validate_mapping(mapping, scheme) + self._data = mapping + except MetadataUnrecognizedVersionError: + self._legacy = LegacyMetadata(mapping=mapping, scheme=scheme) + self.validate() + else: + data = None + if path: + with open(path, 'rb') as f: + data = f.read() + elif fileobj: + data = fileobj.read() + if data is None: + # Initialised with no args - to be added + self._data = { + 'metadata_version': self.METADATA_VERSION, + 'generator': self.GENERATOR, + } + else: + if not isinstance(data, text_type): + data = data.decode('utf-8') + try: + self._data = json.loads(data) + self._validate_mapping(self._data, scheme) + except ValueError: + # Note: MetadataUnrecognizedVersionError does not + # inherit from ValueError (it's a DistlibException, + # which should not inherit from ValueError). + # The ValueError comes from the json.load - if that + # succeeds and we get a validation error, we want + # that to propagate + self._legacy = LegacyMetadata(fileobj=StringIO(data), + scheme=scheme) + self.validate() + + common_keys = set(('name', 'version', 'license', 'keywords', 'summary')) + + none_list = (None, list) + none_dict = (None, dict) + + mapped_keys = { + 'run_requires': ('Requires-Dist', list), + 'build_requires': ('Setup-Requires-Dist', list), + 'dev_requires': none_list, + 'test_requires': none_list, + 'meta_requires': none_list, + 'extras': ('Provides-Extra', list), + 'modules': none_list, + 'namespaces': none_list, + 'exports': none_dict, + 'commands': none_dict, + 'classifiers': ('Classifier', list), + 'source_url': ('Download-URL', None), + 'metadata_version': ('Metadata-Version', None), + } + + del none_list, none_dict + + def __getattribute__(self, key): + common = object.__getattribute__(self, 'common_keys') + mapped = object.__getattribute__(self, 'mapped_keys') + if key in mapped: + lk, maker = mapped[key] + if self._legacy: + if lk is None: + result = None if maker is None else maker() + else: + result = self._legacy.get(lk) + else: + value = None if maker is None else maker() + if key not in ('commands', 'exports', 'modules', 'namespaces', + 'classifiers'): + result = self._data.get(key, value) + else: + # special cases for PEP 459 + sentinel = object() + result = sentinel + d = self._data.get('extensions') + if d: + if key == 'commands': + result = d.get('python.commands', value) + elif key == 'classifiers': + d = d.get('python.details') + if d: + result = d.get(key, value) + else: + d = d.get('python.exports') + if not d: + d = self._data.get('python.exports') + if d: + result = d.get(key, value) + if result is sentinel: + result = value + elif key not in common: + result = object.__getattribute__(self, key) + elif self._legacy: + result = self._legacy.get(key) + else: + result = self._data.get(key) + return result + + def _validate_value(self, key, value, scheme=None): + if key in self.SYNTAX_VALIDATORS: + pattern, exclusions = self.SYNTAX_VALIDATORS[key] + if (scheme or self.scheme) not in exclusions: + m = pattern.match(value) + if not m: + raise MetadataInvalidError("'%s' is an invalid value for " + "the '%s' property" % (value, + key)) + + def __setattr__(self, key, value): + self._validate_value(key, value) + common = object.__getattribute__(self, 'common_keys') + mapped = object.__getattribute__(self, 'mapped_keys') + if key in mapped: + lk, _ = mapped[key] + if self._legacy: + if lk is None: + raise NotImplementedError + self._legacy[lk] = value + elif key not in ('commands', 'exports', 'modules', 'namespaces', + 'classifiers'): + self._data[key] = value + else: + # special cases for PEP 459 + d = self._data.setdefault('extensions', {}) + if key == 'commands': + d['python.commands'] = value + elif key == 'classifiers': + d = d.setdefault('python.details', {}) + d[key] = value + else: + d = d.setdefault('python.exports', {}) + d[key] = value + elif key not in common: + object.__setattr__(self, key, value) + else: + if key == 'keywords': + if isinstance(value, string_types): + value = value.strip() + if value: + value = value.split() + else: + value = [] + if self._legacy: + self._legacy[key] = value + else: + self._data[key] = value + + @property + def name_and_version(self): + return _get_name_and_version(self.name, self.version, True) + + @property + def provides(self): + if self._legacy: + result = self._legacy['Provides-Dist'] + else: + result = self._data.setdefault('provides', []) + s = '%s (%s)' % (self.name, self.version) + if s not in result: + result.append(s) + return result + + @provides.setter + def provides(self, value): + if self._legacy: + self._legacy['Provides-Dist'] = value + else: + self._data['provides'] = value + + def get_requirements(self, reqts, extras=None, env=None): + """ + Base method to get dependencies, given a set of extras + to satisfy and an optional environment context. + :param reqts: A list of sometimes-wanted dependencies, + perhaps dependent on extras and environment. + :param extras: A list of optional components being requested. + :param env: An optional environment for marker evaluation. + """ + if self._legacy: + result = reqts + else: + result = [] + extras = get_extras(extras or [], self.extras) + for d in reqts: + if 'extra' not in d and 'environment' not in d: + # unconditional + include = True + else: + if 'extra' not in d: + # Not extra-dependent - only environment-dependent + include = True + else: + include = d.get('extra') in extras + if include: + # Not excluded because of extras, check environment + marker = d.get('environment') + if marker: + include = interpret(marker, env) + if include: + result.extend(d['requires']) + for key in ('build', 'dev', 'test'): + e = ':%s:' % key + if e in extras: + extras.remove(e) + # A recursive call, but it should terminate since 'test' + # has been removed from the extras + reqts = self._data.get('%s_requires' % key, []) + result.extend(self.get_requirements(reqts, extras=extras, + env=env)) + return result + + @property + def dictionary(self): + if self._legacy: + return self._from_legacy() + return self._data + + @property + def dependencies(self): + if self._legacy: + raise NotImplementedError + else: + return extract_by_key(self._data, self.DEPENDENCY_KEYS) + + @dependencies.setter + def dependencies(self, value): + if self._legacy: + raise NotImplementedError + else: + self._data.update(value) + + def _validate_mapping(self, mapping, scheme): + if mapping.get('metadata_version') != self.METADATA_VERSION: + raise MetadataUnrecognizedVersionError() + missing = [] + for key, exclusions in self.MANDATORY_KEYS.items(): + if key not in mapping: + if scheme not in exclusions: + missing.append(key) + if missing: + msg = 'Missing metadata items: %s' % ', '.join(missing) + raise MetadataMissingError(msg) + for k, v in mapping.items(): + self._validate_value(k, v, scheme) + + def validate(self): + if self._legacy: + missing, warnings = self._legacy.check(True) + if missing or warnings: + logger.warning('Metadata: missing: %s, warnings: %s', + missing, warnings) + else: + self._validate_mapping(self._data, self.scheme) + + def todict(self): + if self._legacy: + return self._legacy.todict(True) + else: + result = extract_by_key(self._data, self.INDEX_KEYS) + return result + + def _from_legacy(self): + assert self._legacy and not self._data + result = { + 'metadata_version': self.METADATA_VERSION, + 'generator': self.GENERATOR, + } + lmd = self._legacy.todict(True) # skip missing ones + for k in ('name', 'version', 'license', 'summary', 'description', + 'classifier'): + if k in lmd: + if k == 'classifier': + nk = 'classifiers' + else: + nk = k + result[nk] = lmd[k] + kw = lmd.get('Keywords', []) + if kw == ['']: + kw = [] + result['keywords'] = kw + keys = (('requires_dist', 'run_requires'), + ('setup_requires_dist', 'build_requires')) + for ok, nk in keys: + if ok in lmd and lmd[ok]: + result[nk] = [{'requires': lmd[ok]}] + result['provides'] = self.provides + author = {} + maintainer = {} + return result + + LEGACY_MAPPING = { + 'name': 'Name', + 'version': 'Version', + ('extensions', 'python.details', 'license'): 'License', + 'summary': 'Summary', + 'description': 'Description', + ('extensions', 'python.project', 'project_urls', 'Home'): 'Home-page', + ('extensions', 'python.project', 'contacts', 0, 'name'): 'Author', + ('extensions', 'python.project', 'contacts', 0, 'email'): 'Author-email', + 'source_url': 'Download-URL', + ('extensions', 'python.details', 'classifiers'): 'Classifier', + } + + def _to_legacy(self): + def process_entries(entries): + reqts = set() + for e in entries: + extra = e.get('extra') + env = e.get('environment') + rlist = e['requires'] + for r in rlist: + if not env and not extra: + reqts.add(r) + else: + marker = '' + if extra: + marker = 'extra == "%s"' % extra + if env: + if marker: + marker = '(%s) and %s' % (env, marker) + else: + marker = env + reqts.add(';'.join((r, marker))) + return reqts + + assert self._data and not self._legacy + result = LegacyMetadata() + nmd = self._data + # import pdb; pdb.set_trace() + for nk, ok in self.LEGACY_MAPPING.items(): + if not isinstance(nk, tuple): + if nk in nmd: + result[ok] = nmd[nk] + else: + d = nmd + found = True + for k in nk: + try: + d = d[k] + except (KeyError, IndexError): + found = False + break + if found: + result[ok] = d + r1 = process_entries(self.run_requires + self.meta_requires) + r2 = process_entries(self.build_requires + self.dev_requires) + if self.extras: + result['Provides-Extra'] = sorted(self.extras) + result['Requires-Dist'] = sorted(r1) + result['Setup-Requires-Dist'] = sorted(r2) + # TODO: any other fields wanted + return result + + def write(self, path=None, fileobj=None, legacy=False, skip_unknown=True): + if [path, fileobj].count(None) != 1: + raise ValueError('Exactly one of path and fileobj is needed') + self.validate() + if legacy: + if self._legacy: + legacy_md = self._legacy + else: + legacy_md = self._to_legacy() + if path: + legacy_md.write(path, skip_unknown=skip_unknown) + else: + legacy_md.write_file(fileobj, skip_unknown=skip_unknown) + else: + if self._legacy: + d = self._from_legacy() + else: + d = self._data + if fileobj: + json.dump(d, fileobj, ensure_ascii=True, indent=2, + sort_keys=True) + else: + with codecs.open(path, 'w', 'utf-8') as f: + json.dump(d, f, ensure_ascii=True, indent=2, + sort_keys=True) + + def add_requirements(self, requirements): + if self._legacy: + self._legacy.add_requirements(requirements) + else: + run_requires = self._data.setdefault('run_requires', []) + always = None + for entry in run_requires: + if 'environment' not in entry and 'extra' not in entry: + always = entry + break + if always is None: + always = { 'requires': requirements } + run_requires.insert(0, always) + else: + rset = set(always['requires']) | set(requirements) + always['requires'] = sorted(rset) + + def __repr__(self): + name = self.name or '(no name)' + version = self.version or 'no version' + return '<%s %s %s (%s)>' % (self.__class__.__name__, + self.metadata_version, name, version) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/resources.py b/venv/Lib/site-packages/pip/_vendor/distlib/resources.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/distlib/resources.py rename to venv/Lib/site-packages/pip/_vendor/distlib/resources.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.py b/venv/Lib/site-packages/pip/_vendor/distlib/scripts.py similarity index 92% rename from env/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.py rename to venv/Lib/site-packages/pip/_vendor/distlib/scripts.py index 5965e241..03f8f21e 100644 --- a/env/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.py +++ b/venv/Lib/site-packages/pip/_vendor/distlib/scripts.py @@ -48,7 +48,7 @@ ''' -def _enquote_executable(executable): +def enquote_executable(executable): if ' ' in executable: # make sure we quote only the executable in case of env # for example /usr/bin/env "/dir with spaces/bin/jython" @@ -63,6 +63,8 @@ def _enquote_executable(executable): executable = '"%s"' % executable return executable +# Keep the old name around (for now), as there is at least one project using it! +_enquote_executable = enquote_executable class ScriptMaker(object): """ @@ -88,6 +90,7 @@ def __init__(self, source_dir, target_dir, add_launchers=True, self._is_nt = os.name == 'nt' or ( os.name == 'java' and os._name == 'nt') + self.version_info = sys.version_info def _get_alternate_executable(self, executable, options): if options.get('gui', False) and self._is_nt: # pragma: no cover @@ -172,12 +175,20 @@ def _get_shebang(self, encoding, post_interp=b'', options=None): if sys.platform.startswith('java'): # pragma: no cover executable = self._fix_jython_executable(executable) - # Normalise case for Windows - executable = os.path.normcase(executable) + + # Normalise case for Windows - COMMENTED OUT + # executable = os.path.normcase(executable) + # N.B. The normalising operation above has been commented out: See + # issue #124. Although paths in Windows are generally case-insensitive, + # they aren't always. For example, a path containing a ẞ (which is a + # LATIN CAPITAL LETTER SHARP S - U+1E9E) is normcased to ß (which is a + # LATIN SMALL LETTER SHARP S' - U+00DF). The two are not considered by + # Windows as equivalent in path names. + # If the user didn't specify an executable, it may be necessary to # cater for executable paths with spaces (not uncommon on Windows) if enquote: - executable = _enquote_executable(executable) + executable = enquote_executable(executable) # Issue #51: don't use fsencode, since we later try to # check that the shebang is decodable using utf-8. executable = executable.encode('utf-8') @@ -285,9 +296,10 @@ def _make_script(self, entry, filenames, options=None): if '' in self.variants: scriptnames.add(name) if 'X' in self.variants: - scriptnames.add('%s%s' % (name, sys.version[0])) + scriptnames.add('%s%s' % (name, self.version_info[0])) if 'X.Y' in self.variants: - scriptnames.add('%s-%s' % (name, sys.version[:3])) + scriptnames.add('%s-%s.%s' % (name, self.version_info[0], + self.version_info[1])) if options and options.get('gui', False): ext = 'pyw' else: @@ -367,8 +379,12 @@ def _get_launcher(self, kind): # Issue 31: don't hardcode an absolute package name, but # determine it relative to the current package distlib_package = __name__.rsplit('.', 1)[0] - result = finder(distlib_package).find(name).bytes - return result + resource = finder(distlib_package).find(name) + if not resource: + msg = ('Unable to find resource %s in package %s' % (name, + distlib_package)) + raise ValueError(msg) + return resource.bytes # Public API follows diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/t32.exe b/venv/Lib/site-packages/pip/_vendor/distlib/t32.exe new file mode 100644 index 00000000..8932a18e Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/distlib/t32.exe differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/t64.exe b/venv/Lib/site-packages/pip/_vendor/distlib/t64.exe new file mode 100644 index 00000000..325b8057 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/distlib/t64.exe differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/util.py b/venv/Lib/site-packages/pip/_vendor/distlib/util.py new file mode 100644 index 00000000..01324eae --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/distlib/util.py @@ -0,0 +1,1761 @@ +# +# Copyright (C) 2012-2017 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +import codecs +from collections import deque +import contextlib +import csv +from glob import iglob as std_iglob +import io +import json +import logging +import os +import py_compile +import re +import socket +try: + import ssl +except ImportError: # pragma: no cover + ssl = None +import subprocess +import sys +import tarfile +import tempfile +import textwrap + +try: + import threading +except ImportError: # pragma: no cover + import dummy_threading as threading +import time + +from . import DistlibException +from .compat import (string_types, text_type, shutil, raw_input, StringIO, + cache_from_source, urlopen, urljoin, httplib, xmlrpclib, + splittype, HTTPHandler, BaseConfigurator, valid_ident, + Container, configparser, URLError, ZipFile, fsdecode, + unquote, urlparse) + +logger = logging.getLogger(__name__) + +# +# Requirement parsing code as per PEP 508 +# + +IDENTIFIER = re.compile(r'^([\w\.-]+)\s*') +VERSION_IDENTIFIER = re.compile(r'^([\w\.*+-]+)\s*') +COMPARE_OP = re.compile(r'^(<=?|>=?|={2,3}|[~!]=)\s*') +MARKER_OP = re.compile(r'^((<=?)|(>=?)|={2,3}|[~!]=|in|not\s+in)\s*') +OR = re.compile(r'^or\b\s*') +AND = re.compile(r'^and\b\s*') +NON_SPACE = re.compile(r'(\S+)\s*') +STRING_CHUNK = re.compile(r'([\s\w\.{}()*+#:;,/?!~`@$%^&=|<>\[\]-]+)') + + +def parse_marker(marker_string): + """ + Parse a marker string and return a dictionary containing a marker expression. + + The dictionary will contain keys "op", "lhs" and "rhs" for non-terminals in + the expression grammar, or strings. A string contained in quotes is to be + interpreted as a literal string, and a string not contained in quotes is a + variable (such as os_name). + """ + def marker_var(remaining): + # either identifier, or literal string + m = IDENTIFIER.match(remaining) + if m: + result = m.groups()[0] + remaining = remaining[m.end():] + elif not remaining: + raise SyntaxError('unexpected end of input') + else: + q = remaining[0] + if q not in '\'"': + raise SyntaxError('invalid expression: %s' % remaining) + oq = '\'"'.replace(q, '') + remaining = remaining[1:] + parts = [q] + while remaining: + # either a string chunk, or oq, or q to terminate + if remaining[0] == q: + break + elif remaining[0] == oq: + parts.append(oq) + remaining = remaining[1:] + else: + m = STRING_CHUNK.match(remaining) + if not m: + raise SyntaxError('error in string literal: %s' % remaining) + parts.append(m.groups()[0]) + remaining = remaining[m.end():] + else: + s = ''.join(parts) + raise SyntaxError('unterminated string: %s' % s) + parts.append(q) + result = ''.join(parts) + remaining = remaining[1:].lstrip() # skip past closing quote + return result, remaining + + def marker_expr(remaining): + if remaining and remaining[0] == '(': + result, remaining = marker(remaining[1:].lstrip()) + if remaining[0] != ')': + raise SyntaxError('unterminated parenthesis: %s' % remaining) + remaining = remaining[1:].lstrip() + else: + lhs, remaining = marker_var(remaining) + while remaining: + m = MARKER_OP.match(remaining) + if not m: + break + op = m.groups()[0] + remaining = remaining[m.end():] + rhs, remaining = marker_var(remaining) + lhs = {'op': op, 'lhs': lhs, 'rhs': rhs} + result = lhs + return result, remaining + + def marker_and(remaining): + lhs, remaining = marker_expr(remaining) + while remaining: + m = AND.match(remaining) + if not m: + break + remaining = remaining[m.end():] + rhs, remaining = marker_expr(remaining) + lhs = {'op': 'and', 'lhs': lhs, 'rhs': rhs} + return lhs, remaining + + def marker(remaining): + lhs, remaining = marker_and(remaining) + while remaining: + m = OR.match(remaining) + if not m: + break + remaining = remaining[m.end():] + rhs, remaining = marker_and(remaining) + lhs = {'op': 'or', 'lhs': lhs, 'rhs': rhs} + return lhs, remaining + + return marker(marker_string) + + +def parse_requirement(req): + """ + Parse a requirement passed in as a string. Return a Container + whose attributes contain the various parts of the requirement. + """ + remaining = req.strip() + if not remaining or remaining.startswith('#'): + return None + m = IDENTIFIER.match(remaining) + if not m: + raise SyntaxError('name expected: %s' % remaining) + distname = m.groups()[0] + remaining = remaining[m.end():] + extras = mark_expr = versions = uri = None + if remaining and remaining[0] == '[': + i = remaining.find(']', 1) + if i < 0: + raise SyntaxError('unterminated extra: %s' % remaining) + s = remaining[1:i] + remaining = remaining[i + 1:].lstrip() + extras = [] + while s: + m = IDENTIFIER.match(s) + if not m: + raise SyntaxError('malformed extra: %s' % s) + extras.append(m.groups()[0]) + s = s[m.end():] + if not s: + break + if s[0] != ',': + raise SyntaxError('comma expected in extras: %s' % s) + s = s[1:].lstrip() + if not extras: + extras = None + if remaining: + if remaining[0] == '@': + # it's a URI + remaining = remaining[1:].lstrip() + m = NON_SPACE.match(remaining) + if not m: + raise SyntaxError('invalid URI: %s' % remaining) + uri = m.groups()[0] + t = urlparse(uri) + # there are issues with Python and URL parsing, so this test + # is a bit crude. See bpo-20271, bpo-23505. Python doesn't + # always parse invalid URLs correctly - it should raise + # exceptions for malformed URLs + if not (t.scheme and t.netloc): + raise SyntaxError('Invalid URL: %s' % uri) + remaining = remaining[m.end():].lstrip() + else: + + def get_versions(ver_remaining): + """ + Return a list of operator, version tuples if any are + specified, else None. + """ + m = COMPARE_OP.match(ver_remaining) + versions = None + if m: + versions = [] + while True: + op = m.groups()[0] + ver_remaining = ver_remaining[m.end():] + m = VERSION_IDENTIFIER.match(ver_remaining) + if not m: + raise SyntaxError('invalid version: %s' % ver_remaining) + v = m.groups()[0] + versions.append((op, v)) + ver_remaining = ver_remaining[m.end():] + if not ver_remaining or ver_remaining[0] != ',': + break + ver_remaining = ver_remaining[1:].lstrip() + m = COMPARE_OP.match(ver_remaining) + if not m: + raise SyntaxError('invalid constraint: %s' % ver_remaining) + if not versions: + versions = None + return versions, ver_remaining + + if remaining[0] != '(': + versions, remaining = get_versions(remaining) + else: + i = remaining.find(')', 1) + if i < 0: + raise SyntaxError('unterminated parenthesis: %s' % remaining) + s = remaining[1:i] + remaining = remaining[i + 1:].lstrip() + # As a special diversion from PEP 508, allow a version number + # a.b.c in parentheses as a synonym for ~= a.b.c (because this + # is allowed in earlier PEPs) + if COMPARE_OP.match(s): + versions, _ = get_versions(s) + else: + m = VERSION_IDENTIFIER.match(s) + if not m: + raise SyntaxError('invalid constraint: %s' % s) + v = m.groups()[0] + s = s[m.end():].lstrip() + if s: + raise SyntaxError('invalid constraint: %s' % s) + versions = [('~=', v)] + + if remaining: + if remaining[0] != ';': + raise SyntaxError('invalid requirement: %s' % remaining) + remaining = remaining[1:].lstrip() + + mark_expr, remaining = parse_marker(remaining) + + if remaining and remaining[0] != '#': + raise SyntaxError('unexpected trailing data: %s' % remaining) + + if not versions: + rs = distname + else: + rs = '%s %s' % (distname, ', '.join(['%s %s' % con for con in versions])) + return Container(name=distname, extras=extras, constraints=versions, + marker=mark_expr, url=uri, requirement=rs) + + +def get_resources_dests(resources_root, rules): + """Find destinations for resources files""" + + def get_rel_path(root, path): + # normalizes and returns a lstripped-/-separated path + root = root.replace(os.path.sep, '/') + path = path.replace(os.path.sep, '/') + assert path.startswith(root) + return path[len(root):].lstrip('/') + + destinations = {} + for base, suffix, dest in rules: + prefix = os.path.join(resources_root, base) + for abs_base in iglob(prefix): + abs_glob = os.path.join(abs_base, suffix) + for abs_path in iglob(abs_glob): + resource_file = get_rel_path(resources_root, abs_path) + if dest is None: # remove the entry if it was here + destinations.pop(resource_file, None) + else: + rel_path = get_rel_path(abs_base, abs_path) + rel_dest = dest.replace(os.path.sep, '/').rstrip('/') + destinations[resource_file] = rel_dest + '/' + rel_path + return destinations + + +def in_venv(): + if hasattr(sys, 'real_prefix'): + # virtualenv venvs + result = True + else: + # PEP 405 venvs + result = sys.prefix != getattr(sys, 'base_prefix', sys.prefix) + return result + + +def get_executable(): +# The __PYVENV_LAUNCHER__ dance is apparently no longer needed, as +# changes to the stub launcher mean that sys.executable always points +# to the stub on OS X +# if sys.platform == 'darwin' and ('__PYVENV_LAUNCHER__' +# in os.environ): +# result = os.environ['__PYVENV_LAUNCHER__'] +# else: +# result = sys.executable +# return result + result = os.path.normcase(sys.executable) + if not isinstance(result, text_type): + result = fsdecode(result) + return result + + +def proceed(prompt, allowed_chars, error_prompt=None, default=None): + p = prompt + while True: + s = raw_input(p) + p = prompt + if not s and default: + s = default + if s: + c = s[0].lower() + if c in allowed_chars: + break + if error_prompt: + p = '%c: %s\n%s' % (c, error_prompt, prompt) + return c + + +def extract_by_key(d, keys): + if isinstance(keys, string_types): + keys = keys.split() + result = {} + for key in keys: + if key in d: + result[key] = d[key] + return result + +def read_exports(stream): + if sys.version_info[0] >= 3: + # needs to be a text stream + stream = codecs.getreader('utf-8')(stream) + # Try to load as JSON, falling back on legacy format + data = stream.read() + stream = StringIO(data) + try: + jdata = json.load(stream) + result = jdata['extensions']['python.exports']['exports'] + for group, entries in result.items(): + for k, v in entries.items(): + s = '%s = %s' % (k, v) + entry = get_export_entry(s) + assert entry is not None + entries[k] = entry + return result + except Exception: + stream.seek(0, 0) + + def read_stream(cp, stream): + if hasattr(cp, 'read_file'): + cp.read_file(stream) + else: + cp.readfp(stream) + + cp = configparser.ConfigParser() + try: + read_stream(cp, stream) + except configparser.MissingSectionHeaderError: + stream.close() + data = textwrap.dedent(data) + stream = StringIO(data) + read_stream(cp, stream) + + result = {} + for key in cp.sections(): + result[key] = entries = {} + for name, value in cp.items(key): + s = '%s = %s' % (name, value) + entry = get_export_entry(s) + assert entry is not None + #entry.dist = self + entries[name] = entry + return result + + +def write_exports(exports, stream): + if sys.version_info[0] >= 3: + # needs to be a text stream + stream = codecs.getwriter('utf-8')(stream) + cp = configparser.ConfigParser() + for k, v in exports.items(): + # TODO check k, v for valid values + cp.add_section(k) + for entry in v.values(): + if entry.suffix is None: + s = entry.prefix + else: + s = '%s:%s' % (entry.prefix, entry.suffix) + if entry.flags: + s = '%s [%s]' % (s, ', '.join(entry.flags)) + cp.set(k, entry.name, s) + cp.write(stream) + + +@contextlib.contextmanager +def tempdir(): + td = tempfile.mkdtemp() + try: + yield td + finally: + shutil.rmtree(td) + +@contextlib.contextmanager +def chdir(d): + cwd = os.getcwd() + try: + os.chdir(d) + yield + finally: + os.chdir(cwd) + + +@contextlib.contextmanager +def socket_timeout(seconds=15): + cto = socket.getdefaulttimeout() + try: + socket.setdefaulttimeout(seconds) + yield + finally: + socket.setdefaulttimeout(cto) + + +class cached_property(object): + def __init__(self, func): + self.func = func + #for attr in ('__name__', '__module__', '__doc__'): + # setattr(self, attr, getattr(func, attr, None)) + + def __get__(self, obj, cls=None): + if obj is None: + return self + value = self.func(obj) + object.__setattr__(obj, self.func.__name__, value) + #obj.__dict__[self.func.__name__] = value = self.func(obj) + return value + +def convert_path(pathname): + """Return 'pathname' as a name that will work on the native filesystem. + + The path is split on '/' and put back together again using the current + directory separator. Needed because filenames in the setup script are + always supplied in Unix style, and have to be converted to the local + convention before we can actually use them in the filesystem. Raises + ValueError on non-Unix-ish systems if 'pathname' either starts or + ends with a slash. + """ + if os.sep == '/': + return pathname + if not pathname: + return pathname + if pathname[0] == '/': + raise ValueError("path '%s' cannot be absolute" % pathname) + if pathname[-1] == '/': + raise ValueError("path '%s' cannot end with '/'" % pathname) + + paths = pathname.split('/') + while os.curdir in paths: + paths.remove(os.curdir) + if not paths: + return os.curdir + return os.path.join(*paths) + + +class FileOperator(object): + def __init__(self, dry_run=False): + self.dry_run = dry_run + self.ensured = set() + self._init_record() + + def _init_record(self): + self.record = False + self.files_written = set() + self.dirs_created = set() + + def record_as_written(self, path): + if self.record: + self.files_written.add(path) + + def newer(self, source, target): + """Tell if the target is newer than the source. + + Returns true if 'source' exists and is more recently modified than + 'target', or if 'source' exists and 'target' doesn't. + + Returns false if both exist and 'target' is the same age or younger + than 'source'. Raise PackagingFileError if 'source' does not exist. + + Note that this test is not very accurate: files created in the same + second will have the same "age". + """ + if not os.path.exists(source): + raise DistlibException("file '%r' does not exist" % + os.path.abspath(source)) + if not os.path.exists(target): + return True + + return os.stat(source).st_mtime > os.stat(target).st_mtime + + def copy_file(self, infile, outfile, check=True): + """Copy a file respecting dry-run and force flags. + """ + self.ensure_dir(os.path.dirname(outfile)) + logger.info('Copying %s to %s', infile, outfile) + if not self.dry_run: + msg = None + if check: + if os.path.islink(outfile): + msg = '%s is a symlink' % outfile + elif os.path.exists(outfile) and not os.path.isfile(outfile): + msg = '%s is a non-regular file' % outfile + if msg: + raise ValueError(msg + ' which would be overwritten') + shutil.copyfile(infile, outfile) + self.record_as_written(outfile) + + def copy_stream(self, instream, outfile, encoding=None): + assert not os.path.isdir(outfile) + self.ensure_dir(os.path.dirname(outfile)) + logger.info('Copying stream %s to %s', instream, outfile) + if not self.dry_run: + if encoding is None: + outstream = open(outfile, 'wb') + else: + outstream = codecs.open(outfile, 'w', encoding=encoding) + try: + shutil.copyfileobj(instream, outstream) + finally: + outstream.close() + self.record_as_written(outfile) + + def write_binary_file(self, path, data): + self.ensure_dir(os.path.dirname(path)) + if not self.dry_run: + if os.path.exists(path): + os.remove(path) + with open(path, 'wb') as f: + f.write(data) + self.record_as_written(path) + + def write_text_file(self, path, data, encoding): + self.write_binary_file(path, data.encode(encoding)) + + def set_mode(self, bits, mask, files): + if os.name == 'posix' or (os.name == 'java' and os._name == 'posix'): + # Set the executable bits (owner, group, and world) on + # all the files specified. + for f in files: + if self.dry_run: + logger.info("changing mode of %s", f) + else: + mode = (os.stat(f).st_mode | bits) & mask + logger.info("changing mode of %s to %o", f, mode) + os.chmod(f, mode) + + set_executable_mode = lambda s, f: s.set_mode(0o555, 0o7777, f) + + def ensure_dir(self, path): + path = os.path.abspath(path) + if path not in self.ensured and not os.path.exists(path): + self.ensured.add(path) + d, f = os.path.split(path) + self.ensure_dir(d) + logger.info('Creating %s' % path) + if not self.dry_run: + os.mkdir(path) + if self.record: + self.dirs_created.add(path) + + def byte_compile(self, path, optimize=False, force=False, prefix=None, hashed_invalidation=False): + dpath = cache_from_source(path, not optimize) + logger.info('Byte-compiling %s to %s', path, dpath) + if not self.dry_run: + if force or self.newer(path, dpath): + if not prefix: + diagpath = None + else: + assert path.startswith(prefix) + diagpath = path[len(prefix):] + compile_kwargs = {} + if hashed_invalidation and hasattr(py_compile, 'PycInvalidationMode'): + compile_kwargs['invalidation_mode'] = py_compile.PycInvalidationMode.CHECKED_HASH + py_compile.compile(path, dpath, diagpath, True, **compile_kwargs) # raise error + self.record_as_written(dpath) + return dpath + + def ensure_removed(self, path): + if os.path.exists(path): + if os.path.isdir(path) and not os.path.islink(path): + logger.debug('Removing directory tree at %s', path) + if not self.dry_run: + shutil.rmtree(path) + if self.record: + if path in self.dirs_created: + self.dirs_created.remove(path) + else: + if os.path.islink(path): + s = 'link' + else: + s = 'file' + logger.debug('Removing %s %s', s, path) + if not self.dry_run: + os.remove(path) + if self.record: + if path in self.files_written: + self.files_written.remove(path) + + def is_writable(self, path): + result = False + while not result: + if os.path.exists(path): + result = os.access(path, os.W_OK) + break + parent = os.path.dirname(path) + if parent == path: + break + path = parent + return result + + def commit(self): + """ + Commit recorded changes, turn off recording, return + changes. + """ + assert self.record + result = self.files_written, self.dirs_created + self._init_record() + return result + + def rollback(self): + if not self.dry_run: + for f in list(self.files_written): + if os.path.exists(f): + os.remove(f) + # dirs should all be empty now, except perhaps for + # __pycache__ subdirs + # reverse so that subdirs appear before their parents + dirs = sorted(self.dirs_created, reverse=True) + for d in dirs: + flist = os.listdir(d) + if flist: + assert flist == ['__pycache__'] + sd = os.path.join(d, flist[0]) + os.rmdir(sd) + os.rmdir(d) # should fail if non-empty + self._init_record() + +def resolve(module_name, dotted_path): + if module_name in sys.modules: + mod = sys.modules[module_name] + else: + mod = __import__(module_name) + if dotted_path is None: + result = mod + else: + parts = dotted_path.split('.') + result = getattr(mod, parts.pop(0)) + for p in parts: + result = getattr(result, p) + return result + + +class ExportEntry(object): + def __init__(self, name, prefix, suffix, flags): + self.name = name + self.prefix = prefix + self.suffix = suffix + self.flags = flags + + @cached_property + def value(self): + return resolve(self.prefix, self.suffix) + + def __repr__(self): # pragma: no cover + return '' % (self.name, self.prefix, + self.suffix, self.flags) + + def __eq__(self, other): + if not isinstance(other, ExportEntry): + result = False + else: + result = (self.name == other.name and + self.prefix == other.prefix and + self.suffix == other.suffix and + self.flags == other.flags) + return result + + __hash__ = object.__hash__ + + +ENTRY_RE = re.compile(r'''(?P(\w|[-.+])+) + \s*=\s*(?P(\w+)([:\.]\w+)*) + \s*(\[\s*(?P[\w-]+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])? + ''', re.VERBOSE) + +def get_export_entry(specification): + m = ENTRY_RE.search(specification) + if not m: + result = None + if '[' in specification or ']' in specification: + raise DistlibException("Invalid specification " + "'%s'" % specification) + else: + d = m.groupdict() + name = d['name'] + path = d['callable'] + colons = path.count(':') + if colons == 0: + prefix, suffix = path, None + else: + if colons != 1: + raise DistlibException("Invalid specification " + "'%s'" % specification) + prefix, suffix = path.split(':') + flags = d['flags'] + if flags is None: + if '[' in specification or ']' in specification: + raise DistlibException("Invalid specification " + "'%s'" % specification) + flags = [] + else: + flags = [f.strip() for f in flags.split(',')] + result = ExportEntry(name, prefix, suffix, flags) + return result + + +def get_cache_base(suffix=None): + """ + Return the default base location for distlib caches. If the directory does + not exist, it is created. Use the suffix provided for the base directory, + and default to '.distlib' if it isn't provided. + + On Windows, if LOCALAPPDATA is defined in the environment, then it is + assumed to be a directory, and will be the parent directory of the result. + On POSIX, and on Windows if LOCALAPPDATA is not defined, the user's home + directory - using os.expanduser('~') - will be the parent directory of + the result. + + The result is just the directory '.distlib' in the parent directory as + determined above, or with the name specified with ``suffix``. + """ + if suffix is None: + suffix = '.distlib' + if os.name == 'nt' and 'LOCALAPPDATA' in os.environ: + result = os.path.expandvars('$localappdata') + else: + # Assume posix, or old Windows + result = os.path.expanduser('~') + # we use 'isdir' instead of 'exists', because we want to + # fail if there's a file with that name + if os.path.isdir(result): + usable = os.access(result, os.W_OK) + if not usable: + logger.warning('Directory exists but is not writable: %s', result) + else: + try: + os.makedirs(result) + usable = True + except OSError: + logger.warning('Unable to create %s', result, exc_info=True) + usable = False + if not usable: + result = tempfile.mkdtemp() + logger.warning('Default location unusable, using %s', result) + return os.path.join(result, suffix) + + +def path_to_cache_dir(path): + """ + Convert an absolute path to a directory name for use in a cache. + + The algorithm used is: + + #. On Windows, any ``':'`` in the drive is replaced with ``'---'``. + #. Any occurrence of ``os.sep`` is replaced with ``'--'``. + #. ``'.cache'`` is appended. + """ + d, p = os.path.splitdrive(os.path.abspath(path)) + if d: + d = d.replace(':', '---') + p = p.replace(os.sep, '--') + return d + p + '.cache' + + +def ensure_slash(s): + if not s.endswith('/'): + return s + '/' + return s + + +def parse_credentials(netloc): + username = password = None + if '@' in netloc: + prefix, netloc = netloc.rsplit('@', 1) + if ':' not in prefix: + username = prefix + else: + username, password = prefix.split(':', 1) + if username: + username = unquote(username) + if password: + password = unquote(password) + return username, password, netloc + + +def get_process_umask(): + result = os.umask(0o22) + os.umask(result) + return result + +def is_string_sequence(seq): + result = True + i = None + for i, s in enumerate(seq): + if not isinstance(s, string_types): + result = False + break + assert i is not None + return result + +PROJECT_NAME_AND_VERSION = re.compile('([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-' + '([a-z0-9_.+-]+)', re.I) +PYTHON_VERSION = re.compile(r'-py(\d\.?\d?)') + + +def split_filename(filename, project_name=None): + """ + Extract name, version, python version from a filename (no extension) + + Return name, version, pyver or None + """ + result = None + pyver = None + filename = unquote(filename).replace(' ', '-') + m = PYTHON_VERSION.search(filename) + if m: + pyver = m.group(1) + filename = filename[:m.start()] + if project_name and len(filename) > len(project_name) + 1: + m = re.match(re.escape(project_name) + r'\b', filename) + if m: + n = m.end() + result = filename[:n], filename[n + 1:], pyver + if result is None: + m = PROJECT_NAME_AND_VERSION.match(filename) + if m: + result = m.group(1), m.group(3), pyver + return result + +# Allow spaces in name because of legacy dists like "Twisted Core" +NAME_VERSION_RE = re.compile(r'(?P[\w .-]+)\s*' + r'\(\s*(?P[^\s)]+)\)$') + +def parse_name_and_version(p): + """ + A utility method used to get name and version from a string. + + From e.g. a Provides-Dist value. + + :param p: A value in a form 'foo (1.0)' + :return: The name and version as a tuple. + """ + m = NAME_VERSION_RE.match(p) + if not m: + raise DistlibException('Ill-formed name/version string: \'%s\'' % p) + d = m.groupdict() + return d['name'].strip().lower(), d['ver'] + +def get_extras(requested, available): + result = set() + requested = set(requested or []) + available = set(available or []) + if '*' in requested: + requested.remove('*') + result |= available + for r in requested: + if r == '-': + result.add(r) + elif r.startswith('-'): + unwanted = r[1:] + if unwanted not in available: + logger.warning('undeclared extra: %s' % unwanted) + if unwanted in result: + result.remove(unwanted) + else: + if r not in available: + logger.warning('undeclared extra: %s' % r) + result.add(r) + return result +# +# Extended metadata functionality +# + +def _get_external_data(url): + result = {} + try: + # urlopen might fail if it runs into redirections, + # because of Python issue #13696. Fixed in locators + # using a custom redirect handler. + resp = urlopen(url) + headers = resp.info() + ct = headers.get('Content-Type') + if not ct.startswith('application/json'): + logger.debug('Unexpected response for JSON request: %s', ct) + else: + reader = codecs.getreader('utf-8')(resp) + #data = reader.read().decode('utf-8') + #result = json.loads(data) + result = json.load(reader) + except Exception as e: + logger.exception('Failed to get external data for %s: %s', url, e) + return result + +_external_data_base_url = 'https://www.red-dove.com/pypi/projects/' + +def get_project_data(name): + url = '%s/%s/project.json' % (name[0].upper(), name) + url = urljoin(_external_data_base_url, url) + result = _get_external_data(url) + return result + +def get_package_data(name, version): + url = '%s/%s/package-%s.json' % (name[0].upper(), name, version) + url = urljoin(_external_data_base_url, url) + return _get_external_data(url) + + +class Cache(object): + """ + A class implementing a cache for resources that need to live in the file system + e.g. shared libraries. This class was moved from resources to here because it + could be used by other modules, e.g. the wheel module. + """ + + def __init__(self, base): + """ + Initialise an instance. + + :param base: The base directory where the cache should be located. + """ + # we use 'isdir' instead of 'exists', because we want to + # fail if there's a file with that name + if not os.path.isdir(base): # pragma: no cover + os.makedirs(base) + if (os.stat(base).st_mode & 0o77) != 0: + logger.warning('Directory \'%s\' is not private', base) + self.base = os.path.abspath(os.path.normpath(base)) + + def prefix_to_dir(self, prefix): + """ + Converts a resource prefix to a directory name in the cache. + """ + return path_to_cache_dir(prefix) + + def clear(self): + """ + Clear the cache. + """ + not_removed = [] + for fn in os.listdir(self.base): + fn = os.path.join(self.base, fn) + try: + if os.path.islink(fn) or os.path.isfile(fn): + os.remove(fn) + elif os.path.isdir(fn): + shutil.rmtree(fn) + except Exception: + not_removed.append(fn) + return not_removed + + +class EventMixin(object): + """ + A very simple publish/subscribe system. + """ + def __init__(self): + self._subscribers = {} + + def add(self, event, subscriber, append=True): + """ + Add a subscriber for an event. + + :param event: The name of an event. + :param subscriber: The subscriber to be added (and called when the + event is published). + :param append: Whether to append or prepend the subscriber to an + existing subscriber list for the event. + """ + subs = self._subscribers + if event not in subs: + subs[event] = deque([subscriber]) + else: + sq = subs[event] + if append: + sq.append(subscriber) + else: + sq.appendleft(subscriber) + + def remove(self, event, subscriber): + """ + Remove a subscriber for an event. + + :param event: The name of an event. + :param subscriber: The subscriber to be removed. + """ + subs = self._subscribers + if event not in subs: + raise ValueError('No subscribers: %r' % event) + subs[event].remove(subscriber) + + def get_subscribers(self, event): + """ + Return an iterator for the subscribers for an event. + :param event: The event to return subscribers for. + """ + return iter(self._subscribers.get(event, ())) + + def publish(self, event, *args, **kwargs): + """ + Publish a event and return a list of values returned by its + subscribers. + + :param event: The event to publish. + :param args: The positional arguments to pass to the event's + subscribers. + :param kwargs: The keyword arguments to pass to the event's + subscribers. + """ + result = [] + for subscriber in self.get_subscribers(event): + try: + value = subscriber(event, *args, **kwargs) + except Exception: + logger.exception('Exception during event publication') + value = None + result.append(value) + logger.debug('publish %s: args = %s, kwargs = %s, result = %s', + event, args, kwargs, result) + return result + +# +# Simple sequencing +# +class Sequencer(object): + def __init__(self): + self._preds = {} + self._succs = {} + self._nodes = set() # nodes with no preds/succs + + def add_node(self, node): + self._nodes.add(node) + + def remove_node(self, node, edges=False): + if node in self._nodes: + self._nodes.remove(node) + if edges: + for p in set(self._preds.get(node, ())): + self.remove(p, node) + for s in set(self._succs.get(node, ())): + self.remove(node, s) + # Remove empties + for k, v in list(self._preds.items()): + if not v: + del self._preds[k] + for k, v in list(self._succs.items()): + if not v: + del self._succs[k] + + def add(self, pred, succ): + assert pred != succ + self._preds.setdefault(succ, set()).add(pred) + self._succs.setdefault(pred, set()).add(succ) + + def remove(self, pred, succ): + assert pred != succ + try: + preds = self._preds[succ] + succs = self._succs[pred] + except KeyError: # pragma: no cover + raise ValueError('%r not a successor of anything' % succ) + try: + preds.remove(pred) + succs.remove(succ) + except KeyError: # pragma: no cover + raise ValueError('%r not a successor of %r' % (succ, pred)) + + def is_step(self, step): + return (step in self._preds or step in self._succs or + step in self._nodes) + + def get_steps(self, final): + if not self.is_step(final): + raise ValueError('Unknown: %r' % final) + result = [] + todo = [] + seen = set() + todo.append(final) + while todo: + step = todo.pop(0) + if step in seen: + # if a step was already seen, + # move it to the end (so it will appear earlier + # when reversed on return) ... but not for the + # final step, as that would be confusing for + # users + if step != final: + result.remove(step) + result.append(step) + else: + seen.add(step) + result.append(step) + preds = self._preds.get(step, ()) + todo.extend(preds) + return reversed(result) + + @property + def strong_connections(self): + #http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm + index_counter = [0] + stack = [] + lowlinks = {} + index = {} + result = [] + + graph = self._succs + + def strongconnect(node): + # set the depth index for this node to the smallest unused index + index[node] = index_counter[0] + lowlinks[node] = index_counter[0] + index_counter[0] += 1 + stack.append(node) + + # Consider successors + try: + successors = graph[node] + except Exception: + successors = [] + for successor in successors: + if successor not in lowlinks: + # Successor has not yet been visited + strongconnect(successor) + lowlinks[node] = min(lowlinks[node],lowlinks[successor]) + elif successor in stack: + # the successor is in the stack and hence in the current + # strongly connected component (SCC) + lowlinks[node] = min(lowlinks[node],index[successor]) + + # If `node` is a root node, pop the stack and generate an SCC + if lowlinks[node] == index[node]: + connected_component = [] + + while True: + successor = stack.pop() + connected_component.append(successor) + if successor == node: break + component = tuple(connected_component) + # storing the result + result.append(component) + + for node in graph: + if node not in lowlinks: + strongconnect(node) + + return result + + @property + def dot(self): + result = ['digraph G {'] + for succ in self._preds: + preds = self._preds[succ] + for pred in preds: + result.append(' %s -> %s;' % (pred, succ)) + for node in self._nodes: + result.append(' %s;' % node) + result.append('}') + return '\n'.join(result) + +# +# Unarchiving functionality for zip, tar, tgz, tbz, whl +# + +ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip', + '.tgz', '.tbz', '.whl') + +def unarchive(archive_filename, dest_dir, format=None, check=True): + + def check_path(path): + if not isinstance(path, text_type): + path = path.decode('utf-8') + p = os.path.abspath(os.path.join(dest_dir, path)) + if not p.startswith(dest_dir) or p[plen] != os.sep: + raise ValueError('path outside destination: %r' % p) + + dest_dir = os.path.abspath(dest_dir) + plen = len(dest_dir) + archive = None + if format is None: + if archive_filename.endswith(('.zip', '.whl')): + format = 'zip' + elif archive_filename.endswith(('.tar.gz', '.tgz')): + format = 'tgz' + mode = 'r:gz' + elif archive_filename.endswith(('.tar.bz2', '.tbz')): + format = 'tbz' + mode = 'r:bz2' + elif archive_filename.endswith('.tar'): + format = 'tar' + mode = 'r' + else: # pragma: no cover + raise ValueError('Unknown format for %r' % archive_filename) + try: + if format == 'zip': + archive = ZipFile(archive_filename, 'r') + if check: + names = archive.namelist() + for name in names: + check_path(name) + else: + archive = tarfile.open(archive_filename, mode) + if check: + names = archive.getnames() + for name in names: + check_path(name) + if format != 'zip' and sys.version_info[0] < 3: + # See Python issue 17153. If the dest path contains Unicode, + # tarfile extraction fails on Python 2.x if a member path name + # contains non-ASCII characters - it leads to an implicit + # bytes -> unicode conversion using ASCII to decode. + for tarinfo in archive.getmembers(): + if not isinstance(tarinfo.name, text_type): + tarinfo.name = tarinfo.name.decode('utf-8') + archive.extractall(dest_dir) + + finally: + if archive: + archive.close() + + +def zip_dir(directory): + """zip a directory tree into a BytesIO object""" + result = io.BytesIO() + dlen = len(directory) + with ZipFile(result, "w") as zf: + for root, dirs, files in os.walk(directory): + for name in files: + full = os.path.join(root, name) + rel = root[dlen:] + dest = os.path.join(rel, name) + zf.write(full, dest) + return result + +# +# Simple progress bar +# + +UNITS = ('', 'K', 'M', 'G','T','P') + + +class Progress(object): + unknown = 'UNKNOWN' + + def __init__(self, minval=0, maxval=100): + assert maxval is None or maxval >= minval + self.min = self.cur = minval + self.max = maxval + self.started = None + self.elapsed = 0 + self.done = False + + def update(self, curval): + assert self.min <= curval + assert self.max is None or curval <= self.max + self.cur = curval + now = time.time() + if self.started is None: + self.started = now + else: + self.elapsed = now - self.started + + def increment(self, incr): + assert incr >= 0 + self.update(self.cur + incr) + + def start(self): + self.update(self.min) + return self + + def stop(self): + if self.max is not None: + self.update(self.max) + self.done = True + + @property + def maximum(self): + return self.unknown if self.max is None else self.max + + @property + def percentage(self): + if self.done: + result = '100 %' + elif self.max is None: + result = ' ?? %' + else: + v = 100.0 * (self.cur - self.min) / (self.max - self.min) + result = '%3d %%' % v + return result + + def format_duration(self, duration): + if (duration <= 0) and self.max is None or self.cur == self.min: + result = '??:??:??' + #elif duration < 1: + # result = '--:--:--' + else: + result = time.strftime('%H:%M:%S', time.gmtime(duration)) + return result + + @property + def ETA(self): + if self.done: + prefix = 'Done' + t = self.elapsed + #import pdb; pdb.set_trace() + else: + prefix = 'ETA ' + if self.max is None: + t = -1 + elif self.elapsed == 0 or (self.cur == self.min): + t = 0 + else: + #import pdb; pdb.set_trace() + t = float(self.max - self.min) + t /= self.cur - self.min + t = (t - 1) * self.elapsed + return '%s: %s' % (prefix, self.format_duration(t)) + + @property + def speed(self): + if self.elapsed == 0: + result = 0.0 + else: + result = (self.cur - self.min) / self.elapsed + for unit in UNITS: + if result < 1000: + break + result /= 1000.0 + return '%d %sB/s' % (result, unit) + +# +# Glob functionality +# + +RICH_GLOB = re.compile(r'\{([^}]*)\}') +_CHECK_RECURSIVE_GLOB = re.compile(r'[^/\\,{]\*\*|\*\*[^/\\,}]') +_CHECK_MISMATCH_SET = re.compile(r'^[^{]*\}|\{[^}]*$') + + +def iglob(path_glob): + """Extended globbing function that supports ** and {opt1,opt2,opt3}.""" + if _CHECK_RECURSIVE_GLOB.search(path_glob): + msg = """invalid glob %r: recursive glob "**" must be used alone""" + raise ValueError(msg % path_glob) + if _CHECK_MISMATCH_SET.search(path_glob): + msg = """invalid glob %r: mismatching set marker '{' or '}'""" + raise ValueError(msg % path_glob) + return _iglob(path_glob) + + +def _iglob(path_glob): + rich_path_glob = RICH_GLOB.split(path_glob, 1) + if len(rich_path_glob) > 1: + assert len(rich_path_glob) == 3, rich_path_glob + prefix, set, suffix = rich_path_glob + for item in set.split(','): + for path in _iglob(''.join((prefix, item, suffix))): + yield path + else: + if '**' not in path_glob: + for item in std_iglob(path_glob): + yield item + else: + prefix, radical = path_glob.split('**', 1) + if prefix == '': + prefix = '.' + if radical == '': + radical = '*' + else: + # we support both + radical = radical.lstrip('/') + radical = radical.lstrip('\\') + for path, dir, files in os.walk(prefix): + path = os.path.normpath(path) + for fn in _iglob(os.path.join(path, radical)): + yield fn + +if ssl: + from .compat import (HTTPSHandler as BaseHTTPSHandler, match_hostname, + CertificateError) + + +# +# HTTPSConnection which verifies certificates/matches domains +# + + class HTTPSConnection(httplib.HTTPSConnection): + ca_certs = None # set this to the path to the certs file (.pem) + check_domain = True # only used if ca_certs is not None + + # noinspection PyPropertyAccess + def connect(self): + sock = socket.create_connection((self.host, self.port), self.timeout) + if getattr(self, '_tunnel_host', False): + self.sock = sock + self._tunnel() + + if not hasattr(ssl, 'SSLContext'): + # For 2.x + if self.ca_certs: + cert_reqs = ssl.CERT_REQUIRED + else: + cert_reqs = ssl.CERT_NONE + self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file, + cert_reqs=cert_reqs, + ssl_version=ssl.PROTOCOL_SSLv23, + ca_certs=self.ca_certs) + else: # pragma: no cover + context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + if hasattr(ssl, 'OP_NO_SSLv2'): + context.options |= ssl.OP_NO_SSLv2 + if self.cert_file: + context.load_cert_chain(self.cert_file, self.key_file) + kwargs = {} + if self.ca_certs: + context.verify_mode = ssl.CERT_REQUIRED + context.load_verify_locations(cafile=self.ca_certs) + if getattr(ssl, 'HAS_SNI', False): + kwargs['server_hostname'] = self.host + self.sock = context.wrap_socket(sock, **kwargs) + if self.ca_certs and self.check_domain: + try: + match_hostname(self.sock.getpeercert(), self.host) + logger.debug('Host verified: %s', self.host) + except CertificateError: # pragma: no cover + self.sock.shutdown(socket.SHUT_RDWR) + self.sock.close() + raise + + class HTTPSHandler(BaseHTTPSHandler): + def __init__(self, ca_certs, check_domain=True): + BaseHTTPSHandler.__init__(self) + self.ca_certs = ca_certs + self.check_domain = check_domain + + def _conn_maker(self, *args, **kwargs): + """ + This is called to create a connection instance. Normally you'd + pass a connection class to do_open, but it doesn't actually check for + a class, and just expects a callable. As long as we behave just as a + constructor would have, we should be OK. If it ever changes so that + we *must* pass a class, we'll create an UnsafeHTTPSConnection class + which just sets check_domain to False in the class definition, and + choose which one to pass to do_open. + """ + result = HTTPSConnection(*args, **kwargs) + if self.ca_certs: + result.ca_certs = self.ca_certs + result.check_domain = self.check_domain + return result + + def https_open(self, req): + try: + return self.do_open(self._conn_maker, req) + except URLError as e: + if 'certificate verify failed' in str(e.reason): + raise CertificateError('Unable to verify server certificate ' + 'for %s' % req.host) + else: + raise + + # + # To prevent against mixing HTTP traffic with HTTPS (examples: A Man-In-The- + # Middle proxy using HTTP listens on port 443, or an index mistakenly serves + # HTML containing a http://xyz link when it should be https://xyz), + # you can use the following handler class, which does not allow HTTP traffic. + # + # It works by inheriting from HTTPHandler - so build_opener won't add a + # handler for HTTP itself. + # + class HTTPSOnlyHandler(HTTPSHandler, HTTPHandler): + def http_open(self, req): + raise URLError('Unexpected HTTP request on what should be a secure ' + 'connection: %s' % req) + +# +# XML-RPC with timeouts +# + +_ver_info = sys.version_info[:2] + +if _ver_info == (2, 6): + class HTTP(httplib.HTTP): + def __init__(self, host='', port=None, **kwargs): + if port == 0: # 0 means use port 0, not the default port + port = None + self._setup(self._connection_class(host, port, **kwargs)) + + + if ssl: + class HTTPS(httplib.HTTPS): + def __init__(self, host='', port=None, **kwargs): + if port == 0: # 0 means use port 0, not the default port + port = None + self._setup(self._connection_class(host, port, **kwargs)) + + +class Transport(xmlrpclib.Transport): + def __init__(self, timeout, use_datetime=0): + self.timeout = timeout + xmlrpclib.Transport.__init__(self, use_datetime) + + def make_connection(self, host): + h, eh, x509 = self.get_host_info(host) + if _ver_info == (2, 6): + result = HTTP(h, timeout=self.timeout) + else: + if not self._connection or host != self._connection[0]: + self._extra_headers = eh + self._connection = host, httplib.HTTPConnection(h) + result = self._connection[1] + return result + +if ssl: + class SafeTransport(xmlrpclib.SafeTransport): + def __init__(self, timeout, use_datetime=0): + self.timeout = timeout + xmlrpclib.SafeTransport.__init__(self, use_datetime) + + def make_connection(self, host): + h, eh, kwargs = self.get_host_info(host) + if not kwargs: + kwargs = {} + kwargs['timeout'] = self.timeout + if _ver_info == (2, 6): + result = HTTPS(host, None, **kwargs) + else: + if not self._connection or host != self._connection[0]: + self._extra_headers = eh + self._connection = host, httplib.HTTPSConnection(h, None, + **kwargs) + result = self._connection[1] + return result + + +class ServerProxy(xmlrpclib.ServerProxy): + def __init__(self, uri, **kwargs): + self.timeout = timeout = kwargs.pop('timeout', None) + # The above classes only come into play if a timeout + # is specified + if timeout is not None: + scheme, _ = splittype(uri) + use_datetime = kwargs.get('use_datetime', 0) + if scheme == 'https': + tcls = SafeTransport + else: + tcls = Transport + kwargs['transport'] = t = tcls(timeout, use_datetime=use_datetime) + self.transport = t + xmlrpclib.ServerProxy.__init__(self, uri, **kwargs) + +# +# CSV functionality. This is provided because on 2.x, the csv module can't +# handle Unicode. However, we need to deal with Unicode in e.g. RECORD files. +# + +def _csv_open(fn, mode, **kwargs): + if sys.version_info[0] < 3: + mode += 'b' + else: + kwargs['newline'] = '' + # Python 3 determines encoding from locale. Force 'utf-8' + # file encoding to match other forced utf-8 encoding + kwargs['encoding'] = 'utf-8' + return open(fn, mode, **kwargs) + + +class CSVBase(object): + defaults = { + 'delimiter': str(','), # The strs are used because we need native + 'quotechar': str('"'), # str in the csv API (2.x won't take + 'lineterminator': str('\n') # Unicode) + } + + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.stream.close() + + +class CSVReader(CSVBase): + def __init__(self, **kwargs): + if 'stream' in kwargs: + stream = kwargs['stream'] + if sys.version_info[0] >= 3: + # needs to be a text stream + stream = codecs.getreader('utf-8')(stream) + self.stream = stream + else: + self.stream = _csv_open(kwargs['path'], 'r') + self.reader = csv.reader(self.stream, **self.defaults) + + def __iter__(self): + return self + + def next(self): + result = next(self.reader) + if sys.version_info[0] < 3: + for i, item in enumerate(result): + if not isinstance(item, text_type): + result[i] = item.decode('utf-8') + return result + + __next__ = next + +class CSVWriter(CSVBase): + def __init__(self, fn, **kwargs): + self.stream = _csv_open(fn, 'w') + self.writer = csv.writer(self.stream, **self.defaults) + + def writerow(self, row): + if sys.version_info[0] < 3: + r = [] + for item in row: + if isinstance(item, text_type): + item = item.encode('utf-8') + r.append(item) + row = r + self.writer.writerow(row) + +# +# Configurator functionality +# + +class Configurator(BaseConfigurator): + + value_converters = dict(BaseConfigurator.value_converters) + value_converters['inc'] = 'inc_convert' + + def __init__(self, config, base=None): + super(Configurator, self).__init__(config) + self.base = base or os.getcwd() + + def configure_custom(self, config): + def convert(o): + if isinstance(o, (list, tuple)): + result = type(o)([convert(i) for i in o]) + elif isinstance(o, dict): + if '()' in o: + result = self.configure_custom(o) + else: + result = {} + for k in o: + result[k] = convert(o[k]) + else: + result = self.convert(o) + return result + + c = config.pop('()') + if not callable(c): + c = self.resolve(c) + props = config.pop('.', None) + # Check for valid identifiers + args = config.pop('[]', ()) + if args: + args = tuple([convert(o) for o in args]) + items = [(k, convert(config[k])) for k in config if valid_ident(k)] + kwargs = dict(items) + result = c(*args, **kwargs) + if props: + for n, v in props.items(): + setattr(result, n, convert(v)) + return result + + def __getitem__(self, key): + result = self.config[key] + if isinstance(result, dict) and '()' in result: + self.config[key] = result = self.configure_custom(result) + return result + + def inc_convert(self, value): + """Default converter for the inc:// protocol.""" + if not os.path.isabs(value): + value = os.path.join(self.base, value) + with codecs.open(value, 'r', encoding='utf-8') as f: + result = json.load(f) + return result + + +class SubprocessMixin(object): + """ + Mixin for running subprocesses and capturing their output + """ + def __init__(self, verbose=False, progress=None): + self.verbose = verbose + self.progress = progress + + def reader(self, stream, context): + """ + Read lines from a subprocess' output stream and either pass to a progress + callable (if specified) or write progress information to sys.stderr. + """ + progress = self.progress + verbose = self.verbose + while True: + s = stream.readline() + if not s: + break + if progress is not None: + progress(s, context) + else: + if not verbose: + sys.stderr.write('.') + else: + sys.stderr.write(s.decode('utf-8')) + sys.stderr.flush() + stream.close() + + def run_command(self, cmd, **kwargs): + p = subprocess.Popen(cmd, stdout=subprocess.PIPE, + stderr=subprocess.PIPE, **kwargs) + t1 = threading.Thread(target=self.reader, args=(p.stdout, 'stdout')) + t1.start() + t2 = threading.Thread(target=self.reader, args=(p.stderr, 'stderr')) + t2.start() + p.wait() + t1.join() + t2.join() + if self.progress is not None: + self.progress('done.', 'main') + elif self.verbose: + sys.stderr.write('done.\n') + return p + + +def normalize_name(name): + """Normalize a python package name a la PEP 503""" + # https://www.python.org/dev/peps/pep-0503/#normalized-names + return re.sub('[-_.]+', '-', name).lower() diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distlib/version.py b/venv/Lib/site-packages/pip/_vendor/distlib/version.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/distlib/version.py rename to venv/Lib/site-packages/pip/_vendor/distlib/version.py diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/w32.exe b/venv/Lib/site-packages/pip/_vendor/distlib/w32.exe new file mode 100644 index 00000000..e6439e9e Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/distlib/w32.exe differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/w64.exe b/venv/Lib/site-packages/pip/_vendor/distlib/w64.exe new file mode 100644 index 00000000..46139dbf Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/distlib/w64.exe differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/wheel.py b/venv/Lib/site-packages/pip/_vendor/distlib/wheel.py new file mode 100644 index 00000000..1e2c7a02 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/distlib/wheel.py @@ -0,0 +1,1018 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2017 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +from __future__ import unicode_literals + +import base64 +import codecs +import datetime +import distutils.util +from email import message_from_file +import hashlib +import imp +import json +import logging +import os +import posixpath +import re +import shutil +import sys +import tempfile +import zipfile + +from . import __version__, DistlibException +from .compat import sysconfig, ZipFile, fsdecode, text_type, filter +from .database import InstalledDistribution +from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME, + LEGACY_METADATA_FILENAME) +from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache, + cached_property, get_cache_base, read_exports, tempdir) +from .version import NormalizedVersion, UnsupportedVersionError + +logger = logging.getLogger(__name__) + +cache = None # created when needed + +if hasattr(sys, 'pypy_version_info'): # pragma: no cover + IMP_PREFIX = 'pp' +elif sys.platform.startswith('java'): # pragma: no cover + IMP_PREFIX = 'jy' +elif sys.platform == 'cli': # pragma: no cover + IMP_PREFIX = 'ip' +else: + IMP_PREFIX = 'cp' + +VER_SUFFIX = sysconfig.get_config_var('py_version_nodot') +if not VER_SUFFIX: # pragma: no cover + VER_SUFFIX = '%s%s' % sys.version_info[:2] +PYVER = 'py' + VER_SUFFIX +IMPVER = IMP_PREFIX + VER_SUFFIX + +ARCH = distutils.util.get_platform().replace('-', '_').replace('.', '_') + +ABI = sysconfig.get_config_var('SOABI') +if ABI and ABI.startswith('cpython-'): + ABI = ABI.replace('cpython-', 'cp') +else: + def _derive_abi(): + parts = ['cp', VER_SUFFIX] + if sysconfig.get_config_var('Py_DEBUG'): + parts.append('d') + if sysconfig.get_config_var('WITH_PYMALLOC'): + parts.append('m') + if sysconfig.get_config_var('Py_UNICODE_SIZE') == 4: + parts.append('u') + return ''.join(parts) + ABI = _derive_abi() + del _derive_abi + +FILENAME_RE = re.compile(r''' +(?P[^-]+) +-(?P\d+[^-]*) +(-(?P\d+[^-]*))? +-(?P\w+\d+(\.\w+\d+)*) +-(?P\w+) +-(?P\w+(\.\w+)*) +\.whl$ +''', re.IGNORECASE | re.VERBOSE) + +NAME_VERSION_RE = re.compile(r''' +(?P[^-]+) +-(?P\d+[^-]*) +(-(?P\d+[^-]*))?$ +''', re.IGNORECASE | re.VERBOSE) + +SHEBANG_RE = re.compile(br'\s*#![^\r\n]*') +SHEBANG_DETAIL_RE = re.compile(br'^(\s*#!("[^"]+"|\S+))\s+(.*)$') +SHEBANG_PYTHON = b'#!python' +SHEBANG_PYTHONW = b'#!pythonw' + +if os.sep == '/': + to_posix = lambda o: o +else: + to_posix = lambda o: o.replace(os.sep, '/') + + +class Mounter(object): + def __init__(self): + self.impure_wheels = {} + self.libs = {} + + def add(self, pathname, extensions): + self.impure_wheels[pathname] = extensions + self.libs.update(extensions) + + def remove(self, pathname): + extensions = self.impure_wheels.pop(pathname) + for k, v in extensions: + if k in self.libs: + del self.libs[k] + + def find_module(self, fullname, path=None): + if fullname in self.libs: + result = self + else: + result = None + return result + + def load_module(self, fullname): + if fullname in sys.modules: + result = sys.modules[fullname] + else: + if fullname not in self.libs: + raise ImportError('unable to find extension for %s' % fullname) + result = imp.load_dynamic(fullname, self.libs[fullname]) + result.__loader__ = self + parts = fullname.rsplit('.', 1) + if len(parts) > 1: + result.__package__ = parts[0] + return result + +_hook = Mounter() + + +class Wheel(object): + """ + Class to build and install from Wheel files (PEP 427). + """ + + wheel_version = (1, 1) + hash_kind = 'sha256' + + def __init__(self, filename=None, sign=False, verify=False): + """ + Initialise an instance using a (valid) filename. + """ + self.sign = sign + self.should_verify = verify + self.buildver = '' + self.pyver = [PYVER] + self.abi = ['none'] + self.arch = ['any'] + self.dirname = os.getcwd() + if filename is None: + self.name = 'dummy' + self.version = '0.1' + self._filename = self.filename + else: + m = NAME_VERSION_RE.match(filename) + if m: + info = m.groupdict('') + self.name = info['nm'] + # Reinstate the local version separator + self.version = info['vn'].replace('_', '-') + self.buildver = info['bn'] + self._filename = self.filename + else: + dirname, filename = os.path.split(filename) + m = FILENAME_RE.match(filename) + if not m: + raise DistlibException('Invalid name or ' + 'filename: %r' % filename) + if dirname: + self.dirname = os.path.abspath(dirname) + self._filename = filename + info = m.groupdict('') + self.name = info['nm'] + self.version = info['vn'] + self.buildver = info['bn'] + self.pyver = info['py'].split('.') + self.abi = info['bi'].split('.') + self.arch = info['ar'].split('.') + + @property + def filename(self): + """ + Build and return a filename from the various components. + """ + if self.buildver: + buildver = '-' + self.buildver + else: + buildver = '' + pyver = '.'.join(self.pyver) + abi = '.'.join(self.abi) + arch = '.'.join(self.arch) + # replace - with _ as a local version separator + version = self.version.replace('-', '_') + return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver, + pyver, abi, arch) + + @property + def exists(self): + path = os.path.join(self.dirname, self.filename) + return os.path.isfile(path) + + @property + def tags(self): + for pyver in self.pyver: + for abi in self.abi: + for arch in self.arch: + yield pyver, abi, arch + + @cached_property + def metadata(self): + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + wrapper = codecs.getreader('utf-8') + with ZipFile(pathname, 'r') as zf: + wheel_metadata = self.get_wheel_metadata(zf) + wv = wheel_metadata['Wheel-Version'].split('.', 1) + file_version = tuple([int(i) for i in wv]) + # if file_version < (1, 1): + # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME, + # LEGACY_METADATA_FILENAME] + # else: + # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME] + fns = [WHEEL_METADATA_FILENAME, LEGACY_METADATA_FILENAME] + result = None + for fn in fns: + try: + metadata_filename = posixpath.join(info_dir, fn) + with zf.open(metadata_filename) as bf: + wf = wrapper(bf) + result = Metadata(fileobj=wf) + if result: + break + except KeyError: + pass + if not result: + raise ValueError('Invalid wheel, because metadata is ' + 'missing: looked in %s' % ', '.join(fns)) + return result + + def get_wheel_metadata(self, zf): + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + metadata_filename = posixpath.join(info_dir, 'WHEEL') + with zf.open(metadata_filename) as bf: + wf = codecs.getreader('utf-8')(bf) + message = message_from_file(wf) + return dict(message) + + @cached_property + def info(self): + pathname = os.path.join(self.dirname, self.filename) + with ZipFile(pathname, 'r') as zf: + result = self.get_wheel_metadata(zf) + return result + + def process_shebang(self, data): + m = SHEBANG_RE.match(data) + if m: + end = m.end() + shebang, data_after_shebang = data[:end], data[end:] + # Preserve any arguments after the interpreter + if b'pythonw' in shebang.lower(): + shebang_python = SHEBANG_PYTHONW + else: + shebang_python = SHEBANG_PYTHON + m = SHEBANG_DETAIL_RE.match(shebang) + if m: + args = b' ' + m.groups()[-1] + else: + args = b'' + shebang = shebang_python + args + data = shebang + data_after_shebang + else: + cr = data.find(b'\r') + lf = data.find(b'\n') + if cr < 0 or cr > lf: + term = b'\n' + else: + if data[cr:cr + 2] == b'\r\n': + term = b'\r\n' + else: + term = b'\r' + data = SHEBANG_PYTHON + term + data + return data + + def get_hash(self, data, hash_kind=None): + if hash_kind is None: + hash_kind = self.hash_kind + try: + hasher = getattr(hashlib, hash_kind) + except AttributeError: + raise DistlibException('Unsupported hash algorithm: %r' % hash_kind) + result = hasher(data).digest() + result = base64.urlsafe_b64encode(result).rstrip(b'=').decode('ascii') + return hash_kind, result + + def write_record(self, records, record_path, base): + records = list(records) # make a copy, as mutated + p = to_posix(os.path.relpath(record_path, base)) + records.append((p, '', '')) + with CSVWriter(record_path) as writer: + for row in records: + writer.writerow(row) + + def write_records(self, info, libdir, archive_paths): + records = [] + distinfo, info_dir = info + hasher = getattr(hashlib, self.hash_kind) + for ap, p in archive_paths: + with open(p, 'rb') as f: + data = f.read() + digest = '%s=%s' % self.get_hash(data) + size = os.path.getsize(p) + records.append((ap, digest, size)) + + p = os.path.join(distinfo, 'RECORD') + self.write_record(records, p, libdir) + ap = to_posix(os.path.join(info_dir, 'RECORD')) + archive_paths.append((ap, p)) + + def build_zip(self, pathname, archive_paths): + with ZipFile(pathname, 'w', zipfile.ZIP_DEFLATED) as zf: + for ap, p in archive_paths: + logger.debug('Wrote %s to %s in wheel', p, ap) + zf.write(p, ap) + + def build(self, paths, tags=None, wheel_version=None): + """ + Build a wheel from files in specified paths, and use any specified tags + when determining the name of the wheel. + """ + if tags is None: + tags = {} + + libkey = list(filter(lambda o: o in paths, ('purelib', 'platlib')))[0] + if libkey == 'platlib': + is_pure = 'false' + default_pyver = [IMPVER] + default_abi = [ABI] + default_arch = [ARCH] + else: + is_pure = 'true' + default_pyver = [PYVER] + default_abi = ['none'] + default_arch = ['any'] + + self.pyver = tags.get('pyver', default_pyver) + self.abi = tags.get('abi', default_abi) + self.arch = tags.get('arch', default_arch) + + libdir = paths[libkey] + + name_ver = '%s-%s' % (self.name, self.version) + data_dir = '%s.data' % name_ver + info_dir = '%s.dist-info' % name_ver + + archive_paths = [] + + # First, stuff which is not in site-packages + for key in ('data', 'headers', 'scripts'): + if key not in paths: + continue + path = paths[key] + if os.path.isdir(path): + for root, dirs, files in os.walk(path): + for fn in files: + p = fsdecode(os.path.join(root, fn)) + rp = os.path.relpath(p, path) + ap = to_posix(os.path.join(data_dir, key, rp)) + archive_paths.append((ap, p)) + if key == 'scripts' and not p.endswith('.exe'): + with open(p, 'rb') as f: + data = f.read() + data = self.process_shebang(data) + with open(p, 'wb') as f: + f.write(data) + + # Now, stuff which is in site-packages, other than the + # distinfo stuff. + path = libdir + distinfo = None + for root, dirs, files in os.walk(path): + if root == path: + # At the top level only, save distinfo for later + # and skip it for now + for i, dn in enumerate(dirs): + dn = fsdecode(dn) + if dn.endswith('.dist-info'): + distinfo = os.path.join(root, dn) + del dirs[i] + break + assert distinfo, '.dist-info directory expected, not found' + + for fn in files: + # comment out next suite to leave .pyc files in + if fsdecode(fn).endswith(('.pyc', '.pyo')): + continue + p = os.path.join(root, fn) + rp = to_posix(os.path.relpath(p, path)) + archive_paths.append((rp, p)) + + # Now distinfo. Assumed to be flat, i.e. os.listdir is enough. + files = os.listdir(distinfo) + for fn in files: + if fn not in ('RECORD', 'INSTALLER', 'SHARED', 'WHEEL'): + p = fsdecode(os.path.join(distinfo, fn)) + ap = to_posix(os.path.join(info_dir, fn)) + archive_paths.append((ap, p)) + + wheel_metadata = [ + 'Wheel-Version: %d.%d' % (wheel_version or self.wheel_version), + 'Generator: distlib %s' % __version__, + 'Root-Is-Purelib: %s' % is_pure, + ] + for pyver, abi, arch in self.tags: + wheel_metadata.append('Tag: %s-%s-%s' % (pyver, abi, arch)) + p = os.path.join(distinfo, 'WHEEL') + with open(p, 'w') as f: + f.write('\n'.join(wheel_metadata)) + ap = to_posix(os.path.join(info_dir, 'WHEEL')) + archive_paths.append((ap, p)) + + # sort the entries by archive path. Not needed by any spec, but it + # keeps the archive listing and RECORD tidier than they would otherwise + # be. Use the number of path segments to keep directory entries together, + # and keep the dist-info stuff at the end. + def sorter(t): + ap = t[0] + n = ap.count('/') + if '.dist-info' in ap: + n += 10000 + return (n, ap) + archive_paths = sorted(archive_paths, key=sorter) + + # Now, at last, RECORD. + # Paths in here are archive paths - nothing else makes sense. + self.write_records((distinfo, info_dir), libdir, archive_paths) + # Now, ready to build the zip file + pathname = os.path.join(self.dirname, self.filename) + self.build_zip(pathname, archive_paths) + return pathname + + def skip_entry(self, arcname): + """ + Determine whether an archive entry should be skipped when verifying + or installing. + """ + # The signature file won't be in RECORD, + # and we don't currently don't do anything with it + # We also skip directories, as they won't be in RECORD + # either. See: + # + # https://github.com/pypa/wheel/issues/294 + # https://github.com/pypa/wheel/issues/287 + # https://github.com/pypa/wheel/pull/289 + # + return arcname.endswith(('/', '/RECORD.jws')) + + def install(self, paths, maker, **kwargs): + """ + Install a wheel to the specified paths. If kwarg ``warner`` is + specified, it should be a callable, which will be called with two + tuples indicating the wheel version of this software and the wheel + version in the file, if there is a discrepancy in the versions. + This can be used to issue any warnings to raise any exceptions. + If kwarg ``lib_only`` is True, only the purelib/platlib files are + installed, and the headers, scripts, data and dist-info metadata are + not written. If kwarg ``bytecode_hashed_invalidation`` is True, written + bytecode will try to use file-hash based invalidation (PEP-552) on + supported interpreter versions (CPython 2.7+). + + The return value is a :class:`InstalledDistribution` instance unless + ``options.lib_only`` is True, in which case the return value is ``None``. + """ + + dry_run = maker.dry_run + warner = kwargs.get('warner') + lib_only = kwargs.get('lib_only', False) + bc_hashed_invalidation = kwargs.get('bytecode_hashed_invalidation', False) + + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + data_dir = '%s.data' % name_ver + info_dir = '%s.dist-info' % name_ver + + metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME) + wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') + record_name = posixpath.join(info_dir, 'RECORD') + + wrapper = codecs.getreader('utf-8') + + with ZipFile(pathname, 'r') as zf: + with zf.open(wheel_metadata_name) as bwf: + wf = wrapper(bwf) + message = message_from_file(wf) + wv = message['Wheel-Version'].split('.', 1) + file_version = tuple([int(i) for i in wv]) + if (file_version != self.wheel_version) and warner: + warner(self.wheel_version, file_version) + + if message['Root-Is-Purelib'] == 'true': + libdir = paths['purelib'] + else: + libdir = paths['platlib'] + + records = {} + with zf.open(record_name) as bf: + with CSVReader(stream=bf) as reader: + for row in reader: + p = row[0] + records[p] = row + + data_pfx = posixpath.join(data_dir, '') + info_pfx = posixpath.join(info_dir, '') + script_pfx = posixpath.join(data_dir, 'scripts', '') + + # make a new instance rather than a copy of maker's, + # as we mutate it + fileop = FileOperator(dry_run=dry_run) + fileop.record = True # so we can rollback if needed + + bc = not sys.dont_write_bytecode # Double negatives. Lovely! + + outfiles = [] # for RECORD writing + + # for script copying/shebang processing + workdir = tempfile.mkdtemp() + # set target dir later + # we default add_launchers to False, as the + # Python Launcher should be used instead + maker.source_dir = workdir + maker.target_dir = None + try: + for zinfo in zf.infolist(): + arcname = zinfo.filename + if isinstance(arcname, text_type): + u_arcname = arcname + else: + u_arcname = arcname.decode('utf-8') + if self.skip_entry(u_arcname): + continue + row = records[u_arcname] + if row[2] and str(zinfo.file_size) != row[2]: + raise DistlibException('size mismatch for ' + '%s' % u_arcname) + if row[1]: + kind, value = row[1].split('=', 1) + with zf.open(arcname) as bf: + data = bf.read() + _, digest = self.get_hash(data, kind) + if digest != value: + raise DistlibException('digest mismatch for ' + '%s' % arcname) + + if lib_only and u_arcname.startswith((info_pfx, data_pfx)): + logger.debug('lib_only: skipping %s', u_arcname) + continue + is_script = (u_arcname.startswith(script_pfx) + and not u_arcname.endswith('.exe')) + + if u_arcname.startswith(data_pfx): + _, where, rp = u_arcname.split('/', 2) + outfile = os.path.join(paths[where], convert_path(rp)) + else: + # meant for site-packages. + if u_arcname in (wheel_metadata_name, record_name): + continue + outfile = os.path.join(libdir, convert_path(u_arcname)) + if not is_script: + with zf.open(arcname) as bf: + fileop.copy_stream(bf, outfile) + outfiles.append(outfile) + # Double check the digest of the written file + if not dry_run and row[1]: + with open(outfile, 'rb') as bf: + data = bf.read() + _, newdigest = self.get_hash(data, kind) + if newdigest != digest: + raise DistlibException('digest mismatch ' + 'on write for ' + '%s' % outfile) + if bc and outfile.endswith('.py'): + try: + pyc = fileop.byte_compile(outfile, + hashed_invalidation=bc_hashed_invalidation) + outfiles.append(pyc) + except Exception: + # Don't give up if byte-compilation fails, + # but log it and perhaps warn the user + logger.warning('Byte-compilation failed', + exc_info=True) + else: + fn = os.path.basename(convert_path(arcname)) + workname = os.path.join(workdir, fn) + with zf.open(arcname) as bf: + fileop.copy_stream(bf, workname) + + dn, fn = os.path.split(outfile) + maker.target_dir = dn + filenames = maker.make(fn) + fileop.set_executable_mode(filenames) + outfiles.extend(filenames) + + if lib_only: + logger.debug('lib_only: returning None') + dist = None + else: + # Generate scripts + + # Try to get pydist.json so we can see if there are + # any commands to generate. If this fails (e.g. because + # of a legacy wheel), log a warning but don't give up. + commands = None + file_version = self.info['Wheel-Version'] + if file_version == '1.0': + # Use legacy info + ep = posixpath.join(info_dir, 'entry_points.txt') + try: + with zf.open(ep) as bwf: + epdata = read_exports(bwf) + commands = {} + for key in ('console', 'gui'): + k = '%s_scripts' % key + if k in epdata: + commands['wrap_%s' % key] = d = {} + for v in epdata[k].values(): + s = '%s:%s' % (v.prefix, v.suffix) + if v.flags: + s += ' [%s]' % ','.join(v.flags) + d[v.name] = s + except Exception: + logger.warning('Unable to read legacy script ' + 'metadata, so cannot generate ' + 'scripts') + else: + try: + with zf.open(metadata_name) as bwf: + wf = wrapper(bwf) + commands = json.load(wf).get('extensions') + if commands: + commands = commands.get('python.commands') + except Exception: + logger.warning('Unable to read JSON metadata, so ' + 'cannot generate scripts') + if commands: + console_scripts = commands.get('wrap_console', {}) + gui_scripts = commands.get('wrap_gui', {}) + if console_scripts or gui_scripts: + script_dir = paths.get('scripts', '') + if not os.path.isdir(script_dir): + raise ValueError('Valid script path not ' + 'specified') + maker.target_dir = script_dir + for k, v in console_scripts.items(): + script = '%s = %s' % (k, v) + filenames = maker.make(script) + fileop.set_executable_mode(filenames) + + if gui_scripts: + options = {'gui': True } + for k, v in gui_scripts.items(): + script = '%s = %s' % (k, v) + filenames = maker.make(script, options) + fileop.set_executable_mode(filenames) + + p = os.path.join(libdir, info_dir) + dist = InstalledDistribution(p) + + # Write SHARED + paths = dict(paths) # don't change passed in dict + del paths['purelib'] + del paths['platlib'] + paths['lib'] = libdir + p = dist.write_shared_locations(paths, dry_run) + if p: + outfiles.append(p) + + # Write RECORD + dist.write_installed_files(outfiles, paths['prefix'], + dry_run) + return dist + except Exception: # pragma: no cover + logger.exception('installation failed.') + fileop.rollback() + raise + finally: + shutil.rmtree(workdir) + + def _get_dylib_cache(self): + global cache + if cache is None: + # Use native string to avoid issues on 2.x: see Python #20140. + base = os.path.join(get_cache_base(), str('dylib-cache'), + '%s.%s' % sys.version_info[:2]) + cache = Cache(base) + return cache + + def _get_extensions(self): + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + arcname = posixpath.join(info_dir, 'EXTENSIONS') + wrapper = codecs.getreader('utf-8') + result = [] + with ZipFile(pathname, 'r') as zf: + try: + with zf.open(arcname) as bf: + wf = wrapper(bf) + extensions = json.load(wf) + cache = self._get_dylib_cache() + prefix = cache.prefix_to_dir(pathname) + cache_base = os.path.join(cache.base, prefix) + if not os.path.isdir(cache_base): + os.makedirs(cache_base) + for name, relpath in extensions.items(): + dest = os.path.join(cache_base, convert_path(relpath)) + if not os.path.exists(dest): + extract = True + else: + file_time = os.stat(dest).st_mtime + file_time = datetime.datetime.fromtimestamp(file_time) + info = zf.getinfo(relpath) + wheel_time = datetime.datetime(*info.date_time) + extract = wheel_time > file_time + if extract: + zf.extract(relpath, cache_base) + result.append((name, dest)) + except KeyError: + pass + return result + + def is_compatible(self): + """ + Determine if a wheel is compatible with the running system. + """ + return is_compatible(self) + + def is_mountable(self): + """ + Determine if a wheel is asserted as mountable by its metadata. + """ + return True # for now - metadata details TBD + + def mount(self, append=False): + pathname = os.path.abspath(os.path.join(self.dirname, self.filename)) + if not self.is_compatible(): + msg = 'Wheel %s not compatible with this Python.' % pathname + raise DistlibException(msg) + if not self.is_mountable(): + msg = 'Wheel %s is marked as not mountable.' % pathname + raise DistlibException(msg) + if pathname in sys.path: + logger.debug('%s already in path', pathname) + else: + if append: + sys.path.append(pathname) + else: + sys.path.insert(0, pathname) + extensions = self._get_extensions() + if extensions: + if _hook not in sys.meta_path: + sys.meta_path.append(_hook) + _hook.add(pathname, extensions) + + def unmount(self): + pathname = os.path.abspath(os.path.join(self.dirname, self.filename)) + if pathname not in sys.path: + logger.debug('%s not in path', pathname) + else: + sys.path.remove(pathname) + if pathname in _hook.impure_wheels: + _hook.remove(pathname) + if not _hook.impure_wheels: + if _hook in sys.meta_path: + sys.meta_path.remove(_hook) + + def verify(self): + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + data_dir = '%s.data' % name_ver + info_dir = '%s.dist-info' % name_ver + + metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME) + wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') + record_name = posixpath.join(info_dir, 'RECORD') + + wrapper = codecs.getreader('utf-8') + + with ZipFile(pathname, 'r') as zf: + with zf.open(wheel_metadata_name) as bwf: + wf = wrapper(bwf) + message = message_from_file(wf) + wv = message['Wheel-Version'].split('.', 1) + file_version = tuple([int(i) for i in wv]) + # TODO version verification + + records = {} + with zf.open(record_name) as bf: + with CSVReader(stream=bf) as reader: + for row in reader: + p = row[0] + records[p] = row + + for zinfo in zf.infolist(): + arcname = zinfo.filename + if isinstance(arcname, text_type): + u_arcname = arcname + else: + u_arcname = arcname.decode('utf-8') + # See issue #115: some wheels have .. in their entries, but + # in the filename ... e.g. __main__..py ! So the check is + # updated to look for .. in the directory portions + p = u_arcname.split('/') + if '..' in p: + raise DistlibException('invalid entry in ' + 'wheel: %r' % u_arcname) + + if self.skip_entry(u_arcname): + continue + row = records[u_arcname] + if row[2] and str(zinfo.file_size) != row[2]: + raise DistlibException('size mismatch for ' + '%s' % u_arcname) + if row[1]: + kind, value = row[1].split('=', 1) + with zf.open(arcname) as bf: + data = bf.read() + _, digest = self.get_hash(data, kind) + if digest != value: + raise DistlibException('digest mismatch for ' + '%s' % arcname) + + def update(self, modifier, dest_dir=None, **kwargs): + """ + Update the contents of a wheel in a generic way. The modifier should + be a callable which expects a dictionary argument: its keys are + archive-entry paths, and its values are absolute filesystem paths + where the contents the corresponding archive entries can be found. The + modifier is free to change the contents of the files pointed to, add + new entries and remove entries, before returning. This method will + extract the entire contents of the wheel to a temporary location, call + the modifier, and then use the passed (and possibly updated) + dictionary to write a new wheel. If ``dest_dir`` is specified, the new + wheel is written there -- otherwise, the original wheel is overwritten. + + The modifier should return True if it updated the wheel, else False. + This method returns the same value the modifier returns. + """ + + def get_version(path_map, info_dir): + version = path = None + key = '%s/%s' % (info_dir, LEGACY_METADATA_FILENAME) + if key not in path_map: + key = '%s/PKG-INFO' % info_dir + if key in path_map: + path = path_map[key] + version = Metadata(path=path).version + return version, path + + def update_version(version, path): + updated = None + try: + v = NormalizedVersion(version) + i = version.find('-') + if i < 0: + updated = '%s+1' % version + else: + parts = [int(s) for s in version[i + 1:].split('.')] + parts[-1] += 1 + updated = '%s+%s' % (version[:i], + '.'.join(str(i) for i in parts)) + except UnsupportedVersionError: + logger.debug('Cannot update non-compliant (PEP-440) ' + 'version %r', version) + if updated: + md = Metadata(path=path) + md.version = updated + legacy = path.endswith(LEGACY_METADATA_FILENAME) + md.write(path=path, legacy=legacy) + logger.debug('Version updated from %r to %r', version, + updated) + + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + record_name = posixpath.join(info_dir, 'RECORD') + with tempdir() as workdir: + with ZipFile(pathname, 'r') as zf: + path_map = {} + for zinfo in zf.infolist(): + arcname = zinfo.filename + if isinstance(arcname, text_type): + u_arcname = arcname + else: + u_arcname = arcname.decode('utf-8') + if u_arcname == record_name: + continue + if '..' in u_arcname: + raise DistlibException('invalid entry in ' + 'wheel: %r' % u_arcname) + zf.extract(zinfo, workdir) + path = os.path.join(workdir, convert_path(u_arcname)) + path_map[u_arcname] = path + + # Remember the version. + original_version, _ = get_version(path_map, info_dir) + # Files extracted. Call the modifier. + modified = modifier(path_map, **kwargs) + if modified: + # Something changed - need to build a new wheel. + current_version, path = get_version(path_map, info_dir) + if current_version and (current_version == original_version): + # Add or update local version to signify changes. + update_version(current_version, path) + # Decide where the new wheel goes. + if dest_dir is None: + fd, newpath = tempfile.mkstemp(suffix='.whl', + prefix='wheel-update-', + dir=workdir) + os.close(fd) + else: + if not os.path.isdir(dest_dir): + raise DistlibException('Not a directory: %r' % dest_dir) + newpath = os.path.join(dest_dir, self.filename) + archive_paths = list(path_map.items()) + distinfo = os.path.join(workdir, info_dir) + info = distinfo, info_dir + self.write_records(info, workdir, archive_paths) + self.build_zip(newpath, archive_paths) + if dest_dir is None: + shutil.copyfile(newpath, pathname) + return modified + +def compatible_tags(): + """ + Return (pyver, abi, arch) tuples compatible with this Python. + """ + versions = [VER_SUFFIX] + major = VER_SUFFIX[0] + for minor in range(sys.version_info[1] - 1, - 1, -1): + versions.append(''.join([major, str(minor)])) + + abis = [] + for suffix, _, _ in imp.get_suffixes(): + if suffix.startswith('.abi'): + abis.append(suffix.split('.', 2)[1]) + abis.sort() + if ABI != 'none': + abis.insert(0, ABI) + abis.append('none') + result = [] + + arches = [ARCH] + if sys.platform == 'darwin': + m = re.match(r'(\w+)_(\d+)_(\d+)_(\w+)$', ARCH) + if m: + name, major, minor, arch = m.groups() + minor = int(minor) + matches = [arch] + if arch in ('i386', 'ppc'): + matches.append('fat') + if arch in ('i386', 'ppc', 'x86_64'): + matches.append('fat3') + if arch in ('ppc64', 'x86_64'): + matches.append('fat64') + if arch in ('i386', 'x86_64'): + matches.append('intel') + if arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'): + matches.append('universal') + while minor >= 0: + for match in matches: + s = '%s_%s_%s_%s' % (name, major, minor, match) + if s != ARCH: # already there + arches.append(s) + minor -= 1 + + # Most specific - our Python version, ABI and arch + for abi in abis: + for arch in arches: + result.append((''.join((IMP_PREFIX, versions[0])), abi, arch)) + + # where no ABI / arch dependency, but IMP_PREFIX dependency + for i, version in enumerate(versions): + result.append((''.join((IMP_PREFIX, version)), 'none', 'any')) + if i == 0: + result.append((''.join((IMP_PREFIX, version[0])), 'none', 'any')) + + # no IMP_PREFIX, ABI or arch dependency + for i, version in enumerate(versions): + result.append((''.join(('py', version)), 'none', 'any')) + if i == 0: + result.append((''.join(('py', version[0])), 'none', 'any')) + return set(result) + + +COMPATIBLE_TAGS = compatible_tags() + +del compatible_tags + + +def is_compatible(wheel, tags=None): + if not isinstance(wheel, Wheel): + wheel = Wheel(wheel) # assume it's a filename + result = False + if tags is None: + tags = COMPATIBLE_TAGS + for ver, abi, arch in tags: + if ver in wheel.pyver and abi in wheel.abi and arch in wheel.arch: + result = True + break + return result diff --git a/env/lib/python2.7/site-packages/pip/_vendor/distro.py b/venv/Lib/site-packages/pip/_vendor/distro.py similarity index 97% rename from env/lib/python2.7/site-packages/pip/_vendor/distro.py rename to venv/Lib/site-packages/pip/_vendor/distro.py index 33061633..0611b62a 100644 --- a/env/lib/python2.7/site-packages/pip/_vendor/distro.py +++ b/venv/Lib/site-packages/pip/_vendor/distro.py @@ -49,7 +49,7 @@ #: #: * Value: Normalized value. NORMALIZED_OS_ID = { - 'ol': 'oracle', # Oracle Enterprise Linux + 'ol': 'oracle', # Oracle Linux } #: Translation table for normalizing the "Distributor ID" attribute returned by @@ -60,9 +60,11 @@ #: #: * Value: Normalized value. NORMALIZED_LSB_ID = { - 'enterpriseenterprise': 'oracle', # Oracle Enterprise Linux + 'enterpriseenterpriseas': 'oracle', # Oracle Enterprise Linux 4 + 'enterpriseenterpriseserver': 'oracle', # Oracle Linux 5 'redhatenterpriseworkstation': 'rhel', # RHEL 6, 7 Workstation 'redhatenterpriseserver': 'rhel', # RHEL 6, 7 Server + 'redhatenterprisecomputenode': 'rhel', # RHEL 6 ComputeNode } #: Translation table for normalizing the distro ID derived from the file name @@ -90,7 +92,8 @@ 'lsb-release', 'oem-release', _OS_RELEASE_BASENAME, - 'system-release' + 'system-release', + 'plesk-release', ) @@ -163,6 +166,7 @@ def id(): "openbsd" OpenBSD "netbsd" NetBSD "freebsd" FreeBSD + "midnightbsd" MidnightBSD ============== ========================================= If you have a need to get distros for reliable IDs added into this set, @@ -609,7 +613,7 @@ def __init__(self, distro release file can be found, the data source for the distro release file will be empty. - * ``include_name`` (bool): Controls whether uname command output is + * ``include_uname`` (bool): Controls whether uname command output is included as a data source. If the uname command is not available in the program execution path the data source for the uname command will be empty. @@ -757,7 +761,7 @@ def version(self, pretty=False, best=False): version = v break if pretty and version and self.codename(): - version = u'{0} ({1})'.format(version, self.codename()) + version = '{0} ({1})'.format(version, self.codename()) return version def version_parts(self, best=False): @@ -967,8 +971,6 @@ def _parse_os_release_content(lines): # * commands or their arguments (not allowed in os-release) if '=' in token: k, v = token.split('=', 1) - if isinstance(v, bytes): - v = v.decode('utf-8') props[k.lower()] = v else: # Ignore any tokens that are not variable assignments @@ -1012,7 +1014,7 @@ def _lsb_release_info(self): stdout = subprocess.check_output(cmd, stderr=devnull) except OSError: # Command not found return {} - content = stdout.decode(sys.getfilesystemencoding()).splitlines() + content = self._to_str(stdout).splitlines() return self._parse_lsb_release_content(content) @staticmethod @@ -1047,7 +1049,7 @@ def _uname_info(self): stdout = subprocess.check_output(cmd, stderr=devnull) except OSError: return {} - content = stdout.decode(sys.getfilesystemencoding()).splitlines() + content = self._to_str(stdout).splitlines() return self._parse_uname_content(content) @staticmethod @@ -1067,6 +1069,20 @@ def _parse_uname_content(lines): props['release'] = version return props + @staticmethod + def _to_str(text): + encoding = sys.getfilesystemencoding() + encoding = 'utf-8' if encoding == 'ascii' else encoding + + if sys.version_info[0] >= 3: + if isinstance(text, bytes): + return text.decode(encoding) + else: + if isinstance(text, unicode): # noqa + return text.encode(encoding) + + return text + @cached_property def _distro_release_info(self): """ @@ -1169,8 +1185,6 @@ def _parse_distro_release_content(line): Returns: A dictionary containing all information items. """ - if isinstance(line, bytes): - line = line.decode('utf-8') matches = _DISTRO_RELEASE_CONTENT_REVERSED_PATTERN.match( line.strip()[::-1]) distro_info = {} diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/__init__.py b/venv/Lib/site-packages/pip/_vendor/html5lib/__init__.py new file mode 100644 index 00000000..d1d82f15 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/html5lib/__init__.py @@ -0,0 +1,35 @@ +""" +HTML parsing library based on the `WHATWG HTML specification +`_. The parser is designed to be compatible with +existing HTML found in the wild and implements well-defined error recovery that +is largely compatible with modern desktop web browsers. + +Example usage:: + + from pip._vendor import html5lib + with open("my_document.html", "rb") as f: + tree = html5lib.parse(f) + +For convenience, this module re-exports the following names: + +* :func:`~.html5parser.parse` +* :func:`~.html5parser.parseFragment` +* :class:`~.html5parser.HTMLParser` +* :func:`~.treebuilders.getTreeBuilder` +* :func:`~.treewalkers.getTreeWalker` +* :func:`~.serializer.serialize` +""" + +from __future__ import absolute_import, division, unicode_literals + +from .html5parser import HTMLParser, parse, parseFragment +from .treebuilders import getTreeBuilder +from .treewalkers import getTreeWalker +from .serializer import serialize + +__all__ = ["HTMLParser", "parse", "parseFragment", "getTreeBuilder", + "getTreeWalker", "serialize"] + +# this has to be at the top level, see how setup.py parses this +#: Distribution version number. +__version__ = "1.1" diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..a8cf9e9b Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/_ihatexml.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/_ihatexml.cpython-36.pyc new file mode 100644 index 00000000..abdeb17c Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/_ihatexml.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/_inputstream.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/_inputstream.cpython-36.pyc new file mode 100644 index 00000000..85b31b34 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/_inputstream.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/_tokenizer.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/_tokenizer.cpython-36.pyc new file mode 100644 index 00000000..d9b18fa0 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/_tokenizer.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/_utils.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/_utils.cpython-36.pyc new file mode 100644 index 00000000..1076af66 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/_utils.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/constants.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/constants.cpython-36.pyc new file mode 100644 index 00000000..8b96396a Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/constants.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/html5parser.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/html5parser.cpython-36.pyc new file mode 100644 index 00000000..405546c1 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/html5parser.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/serializer.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/serializer.cpython-36.pyc new file mode 100644 index 00000000..62ca6e23 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/serializer.cpython-36.pyc differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/_ihatexml.py b/venv/Lib/site-packages/pip/_vendor/html5lib/_ihatexml.py similarity index 99% rename from env/lib/python2.7/site-packages/pip/_vendor/html5lib/_ihatexml.py rename to venv/Lib/site-packages/pip/_vendor/html5lib/_ihatexml.py index 4c77717b..3ff803c1 100644 --- a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/_ihatexml.py +++ b/venv/Lib/site-packages/pip/_vendor/html5lib/_ihatexml.py @@ -136,6 +136,7 @@ def normaliseCharList(charList): i += j return rv + # We don't really support characters above the BMP :( max_unicode = int("FFFF", 16) @@ -254,7 +255,7 @@ def toXmlName(self, name): nameRest = name[1:] m = nonXmlNameFirstBMPRegexp.match(nameFirst) if m: - warnings.warn("Coercing non-XML name", DataLossWarning) + warnings.warn("Coercing non-XML name: %s" % name, DataLossWarning) nameFirstOutput = self.getReplacementCharacter(nameFirst) else: nameFirstOutput = nameFirst @@ -262,7 +263,7 @@ def toXmlName(self, name): nameRestOutput = nameRest replaceChars = set(nonXmlNameBMPRegexp.findall(nameRest)) for char in replaceChars: - warnings.warn("Coercing non-XML name", DataLossWarning) + warnings.warn("Coercing non-XML name: %s" % name, DataLossWarning) replacement = self.getReplacementCharacter(char) nameRestOutput = nameRestOutput.replace(char, replacement) return nameFirstOutput + nameRestOutput diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.py b/venv/Lib/site-packages/pip/_vendor/html5lib/_inputstream.py similarity index 96% rename from env/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.py rename to venv/Lib/site-packages/pip/_vendor/html5lib/_inputstream.py index a65e55f6..e0bb3760 100644 --- a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.py +++ b/venv/Lib/site-packages/pip/_vendor/html5lib/_inputstream.py @@ -1,10 +1,11 @@ from __future__ import absolute_import, division, unicode_literals -from pip._vendor.six import text_type, binary_type +from pip._vendor.six import text_type from pip._vendor.six.moves import http_client, urllib import codecs import re +from io import BytesIO, StringIO from pip._vendor import webencodings @@ -12,13 +13,6 @@ from .constants import _ReparseException from . import _utils -from io import StringIO - -try: - from io import BytesIO -except ImportError: - BytesIO = StringIO - # Non-unicode versions of constants for use in the pre-parser spaceCharactersBytes = frozenset([item.encode("ascii") for item in spaceCharacters]) asciiLettersBytes = frozenset([item.encode("ascii") for item in asciiLetters]) @@ -40,13 +34,13 @@ else: invalid_unicode_re = re.compile(invalid_unicode_no_surrogate) -non_bmp_invalid_codepoints = set([0x1FFFE, 0x1FFFF, 0x2FFFE, 0x2FFFF, 0x3FFFE, - 0x3FFFF, 0x4FFFE, 0x4FFFF, 0x5FFFE, 0x5FFFF, - 0x6FFFE, 0x6FFFF, 0x7FFFE, 0x7FFFF, 0x8FFFE, - 0x8FFFF, 0x9FFFE, 0x9FFFF, 0xAFFFE, 0xAFFFF, - 0xBFFFE, 0xBFFFF, 0xCFFFE, 0xCFFFF, 0xDFFFE, - 0xDFFFF, 0xEFFFE, 0xEFFFF, 0xFFFFE, 0xFFFFF, - 0x10FFFE, 0x10FFFF]) +non_bmp_invalid_codepoints = {0x1FFFE, 0x1FFFF, 0x2FFFE, 0x2FFFF, 0x3FFFE, + 0x3FFFF, 0x4FFFE, 0x4FFFF, 0x5FFFE, 0x5FFFF, + 0x6FFFE, 0x6FFFF, 0x7FFFE, 0x7FFFF, 0x8FFFE, + 0x8FFFF, 0x9FFFE, 0x9FFFF, 0xAFFFE, 0xAFFFF, + 0xBFFFE, 0xBFFFF, 0xCFFFE, 0xCFFFF, 0xDFFFE, + 0xDFFFF, 0xEFFFE, 0xEFFFF, 0xFFFFE, 0xFFFFF, + 0x10FFFE, 0x10FFFF} ascii_punctuation_re = re.compile("[\u0009-\u000D\u0020-\u002F\u003A-\u0040\u005C\u005B-\u0060\u007B-\u007E]") @@ -367,7 +361,7 @@ def charsUntil(self, characters, opposite=False): def unget(self, char): # Only one character is allowed to be ungotten at once - it must # be consumed again before any further call to unget - if char is not None: + if char is not EOF: if self.chunkOffset == 0: # unget is called quite rarely, so it's a good idea to do # more work here if it saves a bit of work in the frequently @@ -449,7 +443,7 @@ def openStream(self, source): try: stream.seek(stream.tell()) - except: # pylint:disable=bare-except + except Exception: stream = BufferedStream(stream) return stream @@ -461,7 +455,7 @@ def determineEncoding(self, chardet=True): if charEncoding[0] is not None: return charEncoding - # If we've been overriden, we've been overriden + # If we've been overridden, we've been overridden charEncoding = lookupEncoding(self.override_encoding), "certain" if charEncoding[0] is not None: return charEncoding @@ -664,9 +658,7 @@ def matchBytes(self, bytes): """Look for a sequence of bytes at the start of a string. If the bytes are found return True and advance the position to the byte after the match. Otherwise return False and leave the position alone""" - p = self.position - data = self[p:p + len(bytes)] - rv = data.startswith(bytes) + rv = self.startswith(bytes, self.position) if rv: self.position += len(bytes) return rv @@ -674,15 +666,11 @@ def matchBytes(self, bytes): def jumpTo(self, bytes): """Look for the next sequence of bytes matching a given sequence. If a match is found advance the position to the last byte of the match""" - newPosition = self[self.position:].find(bytes) - if newPosition > -1: - # XXX: This is ugly, but I can't see a nicer way to fix this. - if self._position == -1: - self._position = 0 - self._position += (newPosition + len(bytes) - 1) - return True - else: + try: + self._position = self.index(bytes, self.position) + len(bytes) - 1 + except ValueError: raise StopIteration + return True class EncodingParser(object): @@ -694,6 +682,9 @@ def __init__(self, data): self.encoding = None def getEncoding(self): + if b"= (3, 7): + attributeMap = dict +else: + attributeMap = OrderedDict + class HTMLTokenizer(object): """ This class takes care of tokenizing HTML. @@ -228,6 +234,14 @@ def emitCurrentToken(self): # Add token to the queue to be yielded if (token["type"] in tagTokenTypes): token["name"] = token["name"].translate(asciiUpper2Lower) + if token["type"] == tokenTypes["StartTag"]: + raw = token["data"] + data = attributeMap(raw) + if len(raw) > len(data): + # we had some duplicated attribute, fix so first wins + data.update(raw[::-1]) + token["data"] = data + if token["type"] == tokenTypes["EndTag"]: if token["data"]: self.tokenQueue.append({"type": tokenTypes["ParseError"], diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/_trie/__init__.py b/venv/Lib/site-packages/pip/_vendor/html5lib/_trie/__init__.py new file mode 100644 index 00000000..07bad5d3 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/html5lib/_trie/__init__.py @@ -0,0 +1,5 @@ +from __future__ import absolute_import, division, unicode_literals + +from .py import Trie + +__all__ = ["Trie"] diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/_trie/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/_trie/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..19013528 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/html5lib/_trie/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/_trie/__pycache__/_base.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/_trie/__pycache__/_base.cpython-36.pyc new file mode 100644 index 00000000..338ddf91 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/html5lib/_trie/__pycache__/_base.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/_trie/__pycache__/py.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/_trie/__pycache__/py.cpython-36.pyc new file mode 100644 index 00000000..7ccae9af Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/html5lib/_trie/__pycache__/py.cpython-36.pyc differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/_trie/_base.py b/venv/Lib/site-packages/pip/_vendor/html5lib/_trie/_base.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/html5lib/_trie/_base.py rename to venv/Lib/site-packages/pip/_vendor/html5lib/_trie/_base.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/_trie/py.py b/venv/Lib/site-packages/pip/_vendor/html5lib/_trie/py.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/html5lib/_trie/py.py rename to venv/Lib/site-packages/pip/_vendor/html5lib/_trie/py.py diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/_utils.py b/venv/Lib/site-packages/pip/_vendor/html5lib/_utils.py new file mode 100644 index 00000000..d7c4926a --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/html5lib/_utils.py @@ -0,0 +1,159 @@ +from __future__ import absolute_import, division, unicode_literals + +from types import ModuleType + +try: + from collections.abc import Mapping +except ImportError: + from collections import Mapping + +from pip._vendor.six import text_type, PY3 + +if PY3: + import xml.etree.ElementTree as default_etree +else: + try: + import xml.etree.cElementTree as default_etree + except ImportError: + import xml.etree.ElementTree as default_etree + + +__all__ = ["default_etree", "MethodDispatcher", "isSurrogatePair", + "surrogatePairToCodepoint", "moduleFactoryFactory", + "supports_lone_surrogates"] + + +# Platforms not supporting lone surrogates (\uD800-\uDFFF) should be +# caught by the below test. In general this would be any platform +# using UTF-16 as its encoding of unicode strings, such as +# Jython. This is because UTF-16 itself is based on the use of such +# surrogates, and there is no mechanism to further escape such +# escapes. +try: + _x = eval('"\\uD800"') # pylint:disable=eval-used + if not isinstance(_x, text_type): + # We need this with u"" because of http://bugs.jython.org/issue2039 + _x = eval('u"\\uD800"') # pylint:disable=eval-used + assert isinstance(_x, text_type) +except Exception: + supports_lone_surrogates = False +else: + supports_lone_surrogates = True + + +class MethodDispatcher(dict): + """Dict with 2 special properties: + + On initiation, keys that are lists, sets or tuples are converted to + multiple keys so accessing any one of the items in the original + list-like object returns the matching value + + md = MethodDispatcher({("foo", "bar"):"baz"}) + md["foo"] == "baz" + + A default value which can be set through the default attribute. + """ + + def __init__(self, items=()): + _dictEntries = [] + for name, value in items: + if isinstance(name, (list, tuple, frozenset, set)): + for item in name: + _dictEntries.append((item, value)) + else: + _dictEntries.append((name, value)) + dict.__init__(self, _dictEntries) + assert len(self) == len(_dictEntries) + self.default = None + + def __getitem__(self, key): + return dict.get(self, key, self.default) + + def __get__(self, instance, owner=None): + return BoundMethodDispatcher(instance, self) + + +class BoundMethodDispatcher(Mapping): + """Wraps a MethodDispatcher, binding its return values to `instance`""" + def __init__(self, instance, dispatcher): + self.instance = instance + self.dispatcher = dispatcher + + def __getitem__(self, key): + # see https://docs.python.org/3/reference/datamodel.html#object.__get__ + # on a function, __get__ is used to bind a function to an instance as a bound method + return self.dispatcher[key].__get__(self.instance) + + def get(self, key, default): + if key in self.dispatcher: + return self[key] + else: + return default + + def __iter__(self): + return iter(self.dispatcher) + + def __len__(self): + return len(self.dispatcher) + + def __contains__(self, key): + return key in self.dispatcher + + +# Some utility functions to deal with weirdness around UCS2 vs UCS4 +# python builds + +def isSurrogatePair(data): + return (len(data) == 2 and + ord(data[0]) >= 0xD800 and ord(data[0]) <= 0xDBFF and + ord(data[1]) >= 0xDC00 and ord(data[1]) <= 0xDFFF) + + +def surrogatePairToCodepoint(data): + char_val = (0x10000 + (ord(data[0]) - 0xD800) * 0x400 + + (ord(data[1]) - 0xDC00)) + return char_val + +# Module Factory Factory (no, this isn't Java, I know) +# Here to stop this being duplicated all over the place. + + +def moduleFactoryFactory(factory): + moduleCache = {} + + def moduleFactory(baseModule, *args, **kwargs): + if isinstance(ModuleType.__name__, type("")): + name = "_%s_factory" % baseModule.__name__ + else: + name = b"_%s_factory" % baseModule.__name__ + + kwargs_tuple = tuple(kwargs.items()) + + try: + return moduleCache[name][args][kwargs_tuple] + except KeyError: + mod = ModuleType(name) + objs = factory(baseModule, *args, **kwargs) + mod.__dict__.update(objs) + if "name" not in moduleCache: + moduleCache[name] = {} + if "args" not in moduleCache[name]: + moduleCache[name][args] = {} + if "kwargs" not in moduleCache[name][args]: + moduleCache[name][args][kwargs_tuple] = {} + moduleCache[name][args][kwargs_tuple] = mod + return mod + + return moduleFactory + + +def memoize(func): + cache = {} + + def wrapped(*args, **kwargs): + key = (tuple(args), tuple(kwargs.items())) + if key not in cache: + cache[key] = func(*args, **kwargs) + return cache[key] + + return wrapped diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/constants.py b/venv/Lib/site-packages/pip/_vendor/html5lib/constants.py similarity index 99% rename from env/lib/python2.7/site-packages/pip/_vendor/html5lib/constants.py rename to venv/Lib/site-packages/pip/_vendor/html5lib/constants.py index 1ff80419..fe3e237c 100644 --- a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/constants.py +++ b/venv/Lib/site-packages/pip/_vendor/html5lib/constants.py @@ -519,8 +519,8 @@ "xmlns:xlink": ("xmlns", "xlink", namespaces["xmlns"]) } -unadjustForeignAttributes = dict([((ns, local), qname) for qname, (prefix, local, ns) in - adjustForeignAttributes.items()]) +unadjustForeignAttributes = {(ns, local): qname for qname, (prefix, local, ns) in + adjustForeignAttributes.items()} spaceCharacters = frozenset([ "\t", @@ -544,8 +544,7 @@ digits = frozenset(string.digits) hexDigits = frozenset(string.hexdigits) -asciiUpper2Lower = dict([(ord(c), ord(c.lower())) - for c in string.ascii_uppercase]) +asciiUpper2Lower = {ord(c): ord(c.lower()) for c in string.ascii_uppercase} # Heading elements need to be ordered headingElements = ( @@ -2934,7 +2933,7 @@ tokenTypes["EmptyTag"]]) -prefixes = dict([(v, k) for k, v in namespaces.items()]) +prefixes = {v: k for k, v in namespaces.items()} prefixes["http://www.w3.org/1998/Math/MathML"] = "math" diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__init__.py b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..a7a70e6b Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/alphabeticalattributes.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/alphabeticalattributes.cpython-36.pyc new file mode 100644 index 00000000..d24e493f Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/alphabeticalattributes.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/base.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/base.cpython-36.pyc new file mode 100644 index 00000000..b17e6ca5 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/base.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/inject_meta_charset.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/inject_meta_charset.cpython-36.pyc new file mode 100644 index 00000000..b9e23d60 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/inject_meta_charset.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/lint.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/lint.cpython-36.pyc new file mode 100644 index 00000000..1ff1069a Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/lint.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/optionaltags.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/optionaltags.cpython-36.pyc new file mode 100644 index 00000000..6a0cd089 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/optionaltags.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/sanitizer.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/sanitizer.cpython-36.pyc new file mode 100644 index 00000000..146780c2 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/sanitizer.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/whitespace.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/whitespace.cpython-36.pyc new file mode 100644 index 00000000..e3c55702 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/whitespace.cpython-36.pyc differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.py b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.py rename to venv/Lib/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/base.py b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/base.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/base.py rename to venv/Lib/site-packages/pip/_vendor/html5lib/filters/base.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/inject_meta_charset.py b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/inject_meta_charset.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/inject_meta_charset.py rename to venv/Lib/site-packages/pip/_vendor/html5lib/filters/inject_meta_charset.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/lint.py b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/lint.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/lint.py rename to venv/Lib/site-packages/pip/_vendor/html5lib/filters/lint.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/optionaltags.py b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/optionaltags.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/optionaltags.py rename to venv/Lib/site-packages/pip/_vendor/html5lib/filters/optionaltags.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/sanitizer.py b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/sanitizer.py similarity index 97% rename from env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/sanitizer.py rename to venv/Lib/site-packages/pip/_vendor/html5lib/filters/sanitizer.py index af8e77b8..aa7431d1 100644 --- a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/sanitizer.py +++ b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/sanitizer.py @@ -1,6 +1,15 @@ +"""Deprecated from html5lib 1.1. + +See `here `_ for +information about its deprecation; `Bleach `_ +is recommended as a replacement. Please let us know in the aforementioned issue +if Bleach is unsuitable for your needs. + +""" from __future__ import absolute_import, division, unicode_literals import re +import warnings from xml.sax.saxutils import escape, unescape from pip._vendor.six.moves import urllib_parse as urlparse @@ -11,6 +20,14 @@ __all__ = ["Filter"] +_deprecation_msg = ( + "html5lib's sanitizer is deprecated; see " + + "https://github.com/html5lib/html5lib-python/issues/443 and please let " + + "us know if Bleach is unsuitable for your needs" +) + +warnings.warn(_deprecation_msg, DeprecationWarning) + allowed_elements = frozenset(( (namespaces['html'], 'a'), (namespaces['html'], 'abbr'), @@ -750,6 +767,9 @@ def __init__(self, """ super(Filter, self).__init__(source) + + warnings.warn(_deprecation_msg, DeprecationWarning) + self.allowed_elements = allowed_elements self.allowed_attributes = allowed_attributes self.allowed_css_properties = allowed_css_properties diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/whitespace.py b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/whitespace.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/whitespace.py rename to venv/Lib/site-packages/pip/_vendor/html5lib/filters/whitespace.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.py b/venv/Lib/site-packages/pip/_vendor/html5lib/html5parser.py similarity index 85% rename from env/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.py rename to venv/Lib/site-packages/pip/_vendor/html5lib/html5parser.py index ae41a133..d06784f3 100644 --- a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.py +++ b/venv/Lib/site-packages/pip/_vendor/html5lib/html5parser.py @@ -2,7 +2,6 @@ from pip._vendor.six import with_metaclass, viewkeys import types -from collections import OrderedDict from . import _inputstream from . import _tokenizer @@ -119,8 +118,8 @@ def __init__(self, tree=None, strict=False, namespaceHTMLElements=True, debug=Fa self.tree = tree(namespaceHTMLElements) self.errors = [] - self.phases = dict([(name, cls(self, self.tree)) for name, cls in - getPhases(debug).items()]) + self.phases = {name: cls(self, self.tree) for name, cls in + getPhases(debug).items()} def _parse(self, stream, innerHTML=False, container="div", scripting=False, **kwargs): @@ -202,7 +201,7 @@ def mainLoop(self): DoctypeToken = tokenTypes["Doctype"] ParseErrorToken = tokenTypes["ParseError"] - for token in self.normalizedTokens(): + for token in self.tokenizer: prev_token = None new_token = token while new_token is not None: @@ -260,10 +259,6 @@ def mainLoop(self): if reprocess: assert self.phase not in phases - def normalizedTokens(self): - for token in self.tokenizer: - yield self.normalizeToken(token) - def parse(self, stream, *args, **kwargs): """Parse a HTML document into a well-formed tree @@ -325,17 +320,6 @@ def parseError(self, errorcode="XXX-undefined-error", datavars=None): if self.strict: raise ParseError(E[errorcode] % datavars) - def normalizeToken(self, token): - # HTML5 specific normalizations to the token stream - if token["type"] == tokenTypes["StartTag"]: - raw = token["data"] - token["data"] = OrderedDict(raw) - if len(raw) > len(token["data"]): - # we had some duplicated attribute, fix so first wins - token["data"].update(raw[::-1]) - - return token - def adjustMathMLAttributes(self, token): adjust_attributes(token, adjustMathMLAttributes) @@ -413,16 +397,12 @@ def parseRCDataRawtext(self, token, contentType): def getPhases(debug): def log(function): """Logger that records which phase processes each token""" - type_names = dict((value, key) for key, value in - tokenTypes.items()) + type_names = {value: key for key, value in tokenTypes.items()} def wrapped(self, *args, **kwargs): if function.__name__.startswith("process") and len(args) > 0: token = args[0] - try: - info = {"type": type_names[token['type']]} - except: - raise + info = {"type": type_names[token['type']]} if token['type'] in tagTokenTypes: info["name"] = token['name'] @@ -446,10 +426,13 @@ def getMetaclass(use_metaclass, metaclass_func): class Phase(with_metaclass(getMetaclass(debug, log))): """Base class for helper object that implements each phase of processing """ + __slots__ = ("parser", "tree", "__startTagCache", "__endTagCache") def __init__(self, parser, tree): self.parser = parser self.tree = tree + self.__startTagCache = {} + self.__endTagCache = {} def processEOF(self): raise NotImplementedError @@ -469,7 +452,21 @@ def processSpaceCharacters(self, token): self.tree.insertText(token["data"]) def processStartTag(self, token): - return self.startTagHandler[token["name"]](token) + # Note the caching is done here rather than BoundMethodDispatcher as doing it there + # requires a circular reference to the Phase, and this ends up with a significant + # (CPython 2.7, 3.8) GC cost when parsing many short inputs + name = token["name"] + # In Py2, using `in` is quicker in general than try/except KeyError + # In Py3, `in` is quicker when there are few cache hits (typically short inputs) + if name in self.__startTagCache: + func = self.__startTagCache[name] + else: + func = self.__startTagCache[name] = self.startTagHandler[name] + # bound the cache size in case we get loads of unknown tags + while len(self.__startTagCache) > len(self.startTagHandler) * 1.1: + # this makes the eviction policy random on Py < 3.7 and FIFO >= 3.7 + self.__startTagCache.pop(next(iter(self.__startTagCache))) + return func(token) def startTagHtml(self, token): if not self.parser.firstStartTag and token["name"] == "html": @@ -482,9 +479,25 @@ def startTagHtml(self, token): self.parser.firstStartTag = False def processEndTag(self, token): - return self.endTagHandler[token["name"]](token) + # Note the caching is done here rather than BoundMethodDispatcher as doing it there + # requires a circular reference to the Phase, and this ends up with a significant + # (CPython 2.7, 3.8) GC cost when parsing many short inputs + name = token["name"] + # In Py2, using `in` is quicker in general than try/except KeyError + # In Py3, `in` is quicker when there are few cache hits (typically short inputs) + if name in self.__endTagCache: + func = self.__endTagCache[name] + else: + func = self.__endTagCache[name] = self.endTagHandler[name] + # bound the cache size in case we get loads of unknown tags + while len(self.__endTagCache) > len(self.endTagHandler) * 1.1: + # this makes the eviction policy random on Py < 3.7 and FIFO >= 3.7 + self.__endTagCache.pop(next(iter(self.__endTagCache))) + return func(token) class InitialPhase(Phase): + __slots__ = tuple() + def processSpaceCharacters(self, token): pass @@ -613,6 +626,8 @@ def processEOF(self): return True class BeforeHtmlPhase(Phase): + __slots__ = tuple() + # helper methods def insertHtmlElement(self): self.tree.insertRoot(impliedTagToken("html", "StartTag")) @@ -648,19 +663,7 @@ def processEndTag(self, token): return token class BeforeHeadPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - ("head", self.startTagHead) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - (("head", "body", "html", "br"), self.endTagImplyHead) - ]) - self.endTagHandler.default = self.endTagOther + __slots__ = tuple() def processEOF(self): self.startTagHead(impliedTagToken("head", "StartTag")) @@ -693,28 +696,19 @@ def endTagOther(self, token): self.parser.parseError("end-tag-after-implied-root", {"name": token["name"]}) + startTagHandler = _utils.MethodDispatcher([ + ("html", startTagHtml), + ("head", startTagHead) + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + (("head", "body", "html", "br"), endTagImplyHead) + ]) + endTagHandler.default = endTagOther + class InHeadPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - ("title", self.startTagTitle), - (("noframes", "style"), self.startTagNoFramesStyle), - ("noscript", self.startTagNoscript), - ("script", self.startTagScript), - (("base", "basefont", "bgsound", "command", "link"), - self.startTagBaseLinkCommand), - ("meta", self.startTagMeta), - ("head", self.startTagHead) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - ("head", self.endTagHead), - (("br", "html", "body"), self.endTagHtmlBodyBr) - ]) - self.endTagHandler.default = self.endTagOther + __slots__ = tuple() # the real thing def processEOF(self): @@ -796,22 +790,27 @@ def endTagOther(self, token): def anythingElse(self): self.endTagHead(impliedTagToken("head")) - class InHeadNoscriptPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) + startTagHandler = _utils.MethodDispatcher([ + ("html", startTagHtml), + ("title", startTagTitle), + (("noframes", "style"), startTagNoFramesStyle), + ("noscript", startTagNoscript), + ("script", startTagScript), + (("base", "basefont", "bgsound", "command", "link"), + startTagBaseLinkCommand), + ("meta", startTagMeta), + ("head", startTagHead) + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + ("head", endTagHead), + (("br", "html", "body"), endTagHtmlBodyBr) + ]) + endTagHandler.default = endTagOther - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - (("basefont", "bgsound", "link", "meta", "noframes", "style"), self.startTagBaseLinkCommand), - (("head", "noscript"), self.startTagHeadNoscript), - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - ("noscript", self.endTagNoscript), - ("br", self.endTagBr), - ]) - self.endTagHandler.default = self.endTagOther + class InHeadNoscriptPhase(Phase): + __slots__ = tuple() def processEOF(self): self.parser.parseError("eof-in-head-noscript") @@ -860,23 +859,21 @@ def anythingElse(self): # Caller must raise parse error first! self.endTagNoscript(impliedTagToken("noscript")) + startTagHandler = _utils.MethodDispatcher([ + ("html", startTagHtml), + (("basefont", "bgsound", "link", "meta", "noframes", "style"), startTagBaseLinkCommand), + (("head", "noscript"), startTagHeadNoscript), + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + ("noscript", endTagNoscript), + ("br", endTagBr), + ]) + endTagHandler.default = endTagOther + class AfterHeadPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - ("body", self.startTagBody), - ("frameset", self.startTagFrameset), - (("base", "basefont", "bgsound", "link", "meta", "noframes", "script", - "style", "title"), - self.startTagFromHead), - ("head", self.startTagHead) - ]) - self.startTagHandler.default = self.startTagOther - self.endTagHandler = _utils.MethodDispatcher([(("body", "html", "br"), - self.endTagHtmlBodyBr)]) - self.endTagHandler.default = self.endTagOther + __slots__ = tuple() def processEOF(self): self.anythingElse() @@ -927,80 +924,30 @@ def anythingElse(self): self.parser.phase = self.parser.phases["inBody"] self.parser.framesetOK = True + startTagHandler = _utils.MethodDispatcher([ + ("html", startTagHtml), + ("body", startTagBody), + ("frameset", startTagFrameset), + (("base", "basefont", "bgsound", "link", "meta", "noframes", "script", + "style", "title"), + startTagFromHead), + ("head", startTagHead) + ]) + startTagHandler.default = startTagOther + endTagHandler = _utils.MethodDispatcher([(("body", "html", "br"), + endTagHtmlBodyBr)]) + endTagHandler.default = endTagOther + class InBodyPhase(Phase): # http://www.whatwg.org/specs/web-apps/current-work/#parsing-main-inbody # the really-really-really-very crazy mode - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) + __slots__ = ("processSpaceCharacters",) + def __init__(self, *args, **kwargs): + super(InBodyPhase, self).__init__(*args, **kwargs) # Set this to the default handler self.processSpaceCharacters = self.processSpaceCharactersNonPre - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - (("base", "basefont", "bgsound", "command", "link", "meta", - "script", "style", "title"), - self.startTagProcessInHead), - ("body", self.startTagBody), - ("frameset", self.startTagFrameset), - (("address", "article", "aside", "blockquote", "center", "details", - "dir", "div", "dl", "fieldset", "figcaption", "figure", - "footer", "header", "hgroup", "main", "menu", "nav", "ol", "p", - "section", "summary", "ul"), - self.startTagCloseP), - (headingElements, self.startTagHeading), - (("pre", "listing"), self.startTagPreListing), - ("form", self.startTagForm), - (("li", "dd", "dt"), self.startTagListItem), - ("plaintext", self.startTagPlaintext), - ("a", self.startTagA), - (("b", "big", "code", "em", "font", "i", "s", "small", "strike", - "strong", "tt", "u"), self.startTagFormatting), - ("nobr", self.startTagNobr), - ("button", self.startTagButton), - (("applet", "marquee", "object"), self.startTagAppletMarqueeObject), - ("xmp", self.startTagXmp), - ("table", self.startTagTable), - (("area", "br", "embed", "img", "keygen", "wbr"), - self.startTagVoidFormatting), - (("param", "source", "track"), self.startTagParamSource), - ("input", self.startTagInput), - ("hr", self.startTagHr), - ("image", self.startTagImage), - ("isindex", self.startTagIsIndex), - ("textarea", self.startTagTextarea), - ("iframe", self.startTagIFrame), - ("noscript", self.startTagNoscript), - (("noembed", "noframes"), self.startTagRawtext), - ("select", self.startTagSelect), - (("rp", "rt"), self.startTagRpRt), - (("option", "optgroup"), self.startTagOpt), - (("math"), self.startTagMath), - (("svg"), self.startTagSvg), - (("caption", "col", "colgroup", "frame", "head", - "tbody", "td", "tfoot", "th", "thead", - "tr"), self.startTagMisplaced) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - ("body", self.endTagBody), - ("html", self.endTagHtml), - (("address", "article", "aside", "blockquote", "button", "center", - "details", "dialog", "dir", "div", "dl", "fieldset", "figcaption", "figure", - "footer", "header", "hgroup", "listing", "main", "menu", "nav", "ol", "pre", - "section", "summary", "ul"), self.endTagBlock), - ("form", self.endTagForm), - ("p", self.endTagP), - (("dd", "dt", "li"), self.endTagListItem), - (headingElements, self.endTagHeading), - (("a", "b", "big", "code", "em", "font", "i", "nobr", "s", "small", - "strike", "strong", "tt", "u"), self.endTagFormatting), - (("applet", "marquee", "object"), self.endTagAppletMarqueeObject), - ("br", self.endTagBr), - ]) - self.endTagHandler.default = self.endTagOther - def isMatchingFormattingElement(self, node1, node2): return (node1.name == node2.name and node1.namespace == node2.namespace and @@ -1650,14 +1597,73 @@ def endTagOther(self, token): self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) break + startTagHandler = _utils.MethodDispatcher([ + ("html", Phase.startTagHtml), + (("base", "basefont", "bgsound", "command", "link", "meta", + "script", "style", "title"), + startTagProcessInHead), + ("body", startTagBody), + ("frameset", startTagFrameset), + (("address", "article", "aside", "blockquote", "center", "details", + "dir", "div", "dl", "fieldset", "figcaption", "figure", + "footer", "header", "hgroup", "main", "menu", "nav", "ol", "p", + "section", "summary", "ul"), + startTagCloseP), + (headingElements, startTagHeading), + (("pre", "listing"), startTagPreListing), + ("form", startTagForm), + (("li", "dd", "dt"), startTagListItem), + ("plaintext", startTagPlaintext), + ("a", startTagA), + (("b", "big", "code", "em", "font", "i", "s", "small", "strike", + "strong", "tt", "u"), startTagFormatting), + ("nobr", startTagNobr), + ("button", startTagButton), + (("applet", "marquee", "object"), startTagAppletMarqueeObject), + ("xmp", startTagXmp), + ("table", startTagTable), + (("area", "br", "embed", "img", "keygen", "wbr"), + startTagVoidFormatting), + (("param", "source", "track"), startTagParamSource), + ("input", startTagInput), + ("hr", startTagHr), + ("image", startTagImage), + ("isindex", startTagIsIndex), + ("textarea", startTagTextarea), + ("iframe", startTagIFrame), + ("noscript", startTagNoscript), + (("noembed", "noframes"), startTagRawtext), + ("select", startTagSelect), + (("rp", "rt"), startTagRpRt), + (("option", "optgroup"), startTagOpt), + (("math"), startTagMath), + (("svg"), startTagSvg), + (("caption", "col", "colgroup", "frame", "head", + "tbody", "td", "tfoot", "th", "thead", + "tr"), startTagMisplaced) + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + ("body", endTagBody), + ("html", endTagHtml), + (("address", "article", "aside", "blockquote", "button", "center", + "details", "dialog", "dir", "div", "dl", "fieldset", "figcaption", "figure", + "footer", "header", "hgroup", "listing", "main", "menu", "nav", "ol", "pre", + "section", "summary", "ul"), endTagBlock), + ("form", endTagForm), + ("p", endTagP), + (("dd", "dt", "li"), endTagListItem), + (headingElements, endTagHeading), + (("a", "b", "big", "code", "em", "font", "i", "nobr", "s", "small", + "strike", "strong", "tt", "u"), endTagFormatting), + (("applet", "marquee", "object"), endTagAppletMarqueeObject), + ("br", endTagBr), + ]) + endTagHandler.default = endTagOther + class TextPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - self.startTagHandler = _utils.MethodDispatcher([]) - self.startTagHandler.default = self.startTagOther - self.endTagHandler = _utils.MethodDispatcher([ - ("script", self.endTagScript)]) - self.endTagHandler.default = self.endTagOther + __slots__ = tuple() def processCharacters(self, token): self.tree.insertText(token["data"]) @@ -1683,30 +1689,15 @@ def endTagOther(self, token): self.tree.openElements.pop() self.parser.phase = self.parser.originalPhase + startTagHandler = _utils.MethodDispatcher([]) + startTagHandler.default = startTagOther + endTagHandler = _utils.MethodDispatcher([ + ("script", endTagScript)]) + endTagHandler.default = endTagOther + class InTablePhase(Phase): # http://www.whatwg.org/specs/web-apps/current-work/#in-table - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - ("caption", self.startTagCaption), - ("colgroup", self.startTagColgroup), - ("col", self.startTagCol), - (("tbody", "tfoot", "thead"), self.startTagRowGroup), - (("td", "th", "tr"), self.startTagImplyTbody), - ("table", self.startTagTable), - (("style", "script"), self.startTagStyleScript), - ("input", self.startTagInput), - ("form", self.startTagForm) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - ("table", self.endTagTable), - (("body", "caption", "col", "colgroup", "html", "tbody", "td", - "tfoot", "th", "thead", "tr"), self.endTagIgnore) - ]) - self.endTagHandler.default = self.endTagOther + __slots__ = tuple() # helper methods def clearStackToTableContext(self): @@ -1828,9 +1819,32 @@ def endTagOther(self, token): self.parser.phases["inBody"].processEndTag(token) self.tree.insertFromTable = False + startTagHandler = _utils.MethodDispatcher([ + ("html", Phase.startTagHtml), + ("caption", startTagCaption), + ("colgroup", startTagColgroup), + ("col", startTagCol), + (("tbody", "tfoot", "thead"), startTagRowGroup), + (("td", "th", "tr"), startTagImplyTbody), + ("table", startTagTable), + (("style", "script"), startTagStyleScript), + ("input", startTagInput), + ("form", startTagForm) + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + ("table", endTagTable), + (("body", "caption", "col", "colgroup", "html", "tbody", "td", + "tfoot", "th", "thead", "tr"), endTagIgnore) + ]) + endTagHandler.default = endTagOther + class InTableTextPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) + __slots__ = ("originalPhase", "characterTokens") + + def __init__(self, *args, **kwargs): + super(InTableTextPhase, self).__init__(*args, **kwargs) self.originalPhase = None self.characterTokens = [] @@ -1875,23 +1889,7 @@ def processEndTag(self, token): class InCaptionPhase(Phase): # http://www.whatwg.org/specs/web-apps/current-work/#in-caption - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - (("caption", "col", "colgroup", "tbody", "td", "tfoot", "th", - "thead", "tr"), self.startTagTableElement) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - ("caption", self.endTagCaption), - ("table", self.endTagTable), - (("body", "col", "colgroup", "html", "tbody", "td", "tfoot", "th", - "thead", "tr"), self.endTagIgnore) - ]) - self.endTagHandler.default = self.endTagOther + __slots__ = tuple() def ignoreEndTagCaption(self): return not self.tree.elementInScope("caption", variant="table") @@ -1944,23 +1942,24 @@ def endTagIgnore(self, token): def endTagOther(self, token): return self.parser.phases["inBody"].processEndTag(token) + startTagHandler = _utils.MethodDispatcher([ + ("html", Phase.startTagHtml), + (("caption", "col", "colgroup", "tbody", "td", "tfoot", "th", + "thead", "tr"), startTagTableElement) + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + ("caption", endTagCaption), + ("table", endTagTable), + (("body", "col", "colgroup", "html", "tbody", "td", "tfoot", "th", + "thead", "tr"), endTagIgnore) + ]) + endTagHandler.default = endTagOther + class InColumnGroupPhase(Phase): # http://www.whatwg.org/specs/web-apps/current-work/#in-column - - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - ("col", self.startTagCol) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - ("colgroup", self.endTagColgroup), - ("col", self.endTagCol) - ]) - self.endTagHandler.default = self.endTagOther + __slots__ = tuple() def ignoreEndTagColgroup(self): return self.tree.openElements[-1].name == "html" @@ -2010,26 +2009,21 @@ def endTagOther(self, token): if not ignoreEndTag: return token + startTagHandler = _utils.MethodDispatcher([ + ("html", Phase.startTagHtml), + ("col", startTagCol) + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + ("colgroup", endTagColgroup), + ("col", endTagCol) + ]) + endTagHandler.default = endTagOther + class InTableBodyPhase(Phase): # http://www.whatwg.org/specs/web-apps/current-work/#in-table0 - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - ("tr", self.startTagTr), - (("td", "th"), self.startTagTableCell), - (("caption", "col", "colgroup", "tbody", "tfoot", "thead"), - self.startTagTableOther) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - (("tbody", "tfoot", "thead"), self.endTagTableRowGroup), - ("table", self.endTagTable), - (("body", "caption", "col", "colgroup", "html", "td", "th", - "tr"), self.endTagIgnore) - ]) - self.endTagHandler.default = self.endTagOther + __slots__ = tuple() # helper methods def clearStackToTableBodyContext(self): @@ -2108,26 +2102,26 @@ def endTagIgnore(self, token): def endTagOther(self, token): return self.parser.phases["inTable"].processEndTag(token) + startTagHandler = _utils.MethodDispatcher([ + ("html", Phase.startTagHtml), + ("tr", startTagTr), + (("td", "th"), startTagTableCell), + (("caption", "col", "colgroup", "tbody", "tfoot", "thead"), + startTagTableOther) + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + (("tbody", "tfoot", "thead"), endTagTableRowGroup), + ("table", endTagTable), + (("body", "caption", "col", "colgroup", "html", "td", "th", + "tr"), endTagIgnore) + ]) + endTagHandler.default = endTagOther + class InRowPhase(Phase): # http://www.whatwg.org/specs/web-apps/current-work/#in-row - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - (("td", "th"), self.startTagTableCell), - (("caption", "col", "colgroup", "tbody", "tfoot", "thead", - "tr"), self.startTagTableOther) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - ("tr", self.endTagTr), - ("table", self.endTagTable), - (("tbody", "tfoot", "thead"), self.endTagTableRowGroup), - (("body", "caption", "col", "colgroup", "html", "td", "th"), - self.endTagIgnore) - ]) - self.endTagHandler.default = self.endTagOther + __slots__ = tuple() # helper methods (XXX unify this with other table helper methods) def clearStackToTableRowContext(self): @@ -2197,23 +2191,26 @@ def endTagIgnore(self, token): def endTagOther(self, token): return self.parser.phases["inTable"].processEndTag(token) + startTagHandler = _utils.MethodDispatcher([ + ("html", Phase.startTagHtml), + (("td", "th"), startTagTableCell), + (("caption", "col", "colgroup", "tbody", "tfoot", "thead", + "tr"), startTagTableOther) + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + ("tr", endTagTr), + ("table", endTagTable), + (("tbody", "tfoot", "thead"), endTagTableRowGroup), + (("body", "caption", "col", "colgroup", "html", "td", "th"), + endTagIgnore) + ]) + endTagHandler.default = endTagOther + class InCellPhase(Phase): # http://www.whatwg.org/specs/web-apps/current-work/#in-cell - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - (("caption", "col", "colgroup", "tbody", "td", "tfoot", "th", - "thead", "tr"), self.startTagTableOther) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - (("td", "th"), self.endTagTableCell), - (("body", "caption", "col", "colgroup", "html"), self.endTagIgnore), - (("table", "tbody", "tfoot", "thead", "tr"), self.endTagImply) - ]) - self.endTagHandler.default = self.endTagOther + __slots__ = tuple() # helper def closeCell(self): @@ -2273,26 +2270,22 @@ def endTagImply(self, token): def endTagOther(self, token): return self.parser.phases["inBody"].processEndTag(token) + startTagHandler = _utils.MethodDispatcher([ + ("html", Phase.startTagHtml), + (("caption", "col", "colgroup", "tbody", "td", "tfoot", "th", + "thead", "tr"), startTagTableOther) + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + (("td", "th"), endTagTableCell), + (("body", "caption", "col", "colgroup", "html"), endTagIgnore), + (("table", "tbody", "tfoot", "thead", "tr"), endTagImply) + ]) + endTagHandler.default = endTagOther + class InSelectPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - ("option", self.startTagOption), - ("optgroup", self.startTagOptgroup), - ("select", self.startTagSelect), - (("input", "keygen", "textarea"), self.startTagInput), - ("script", self.startTagScript) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - ("option", self.endTagOption), - ("optgroup", self.endTagOptgroup), - ("select", self.endTagSelect) - ]) - self.endTagHandler.default = self.endTagOther + __slots__ = tuple() # http://www.whatwg.org/specs/web-apps/current-work/#in-select def processEOF(self): @@ -2373,21 +2366,25 @@ def endTagOther(self, token): self.parser.parseError("unexpected-end-tag-in-select", {"name": token["name"]}) - class InSelectInTablePhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - - self.startTagHandler = _utils.MethodDispatcher([ - (("caption", "table", "tbody", "tfoot", "thead", "tr", "td", "th"), - self.startTagTable) - ]) - self.startTagHandler.default = self.startTagOther + startTagHandler = _utils.MethodDispatcher([ + ("html", Phase.startTagHtml), + ("option", startTagOption), + ("optgroup", startTagOptgroup), + ("select", startTagSelect), + (("input", "keygen", "textarea"), startTagInput), + ("script", startTagScript) + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + ("option", endTagOption), + ("optgroup", endTagOptgroup), + ("select", endTagSelect) + ]) + endTagHandler.default = endTagOther - self.endTagHandler = _utils.MethodDispatcher([ - (("caption", "table", "tbody", "tfoot", "thead", "tr", "td", "th"), - self.endTagTable) - ]) - self.endTagHandler.default = self.endTagOther + class InSelectInTablePhase(Phase): + __slots__ = tuple() def processEOF(self): self.parser.phases["inSelect"].processEOF() @@ -2412,7 +2409,21 @@ def endTagTable(self, token): def endTagOther(self, token): return self.parser.phases["inSelect"].processEndTag(token) + startTagHandler = _utils.MethodDispatcher([ + (("caption", "table", "tbody", "tfoot", "thead", "tr", "td", "th"), + startTagTable) + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + (("caption", "table", "tbody", "tfoot", "thead", "tr", "td", "th"), + endTagTable) + ]) + endTagHandler.default = endTagOther + class InForeignContentPhase(Phase): + __slots__ = tuple() + breakoutElements = frozenset(["b", "big", "blockquote", "body", "br", "center", "code", "dd", "div", "dl", "dt", "em", "embed", "h1", "h2", "h3", @@ -2422,9 +2433,6 @@ class InForeignContentPhase(Phase): "span", "strong", "strike", "sub", "sup", "table", "tt", "u", "ul", "var"]) - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - def adjustSVGTagNames(self, token): replacements = {"altglyph": "altGlyph", "altglyphdef": "altGlyphDef", @@ -2478,7 +2486,7 @@ def processStartTag(self, token): currentNode = self.tree.openElements[-1] if (token["name"] in self.breakoutElements or (token["name"] == "font" and - set(token["data"].keys()) & set(["color", "face", "size"]))): + set(token["data"].keys()) & {"color", "face", "size"})): self.parser.parseError("unexpected-html-element-in-foreign-content", {"name": token["name"]}) while (self.tree.openElements[-1].namespace != @@ -2528,16 +2536,7 @@ def processEndTag(self, token): return new_token class AfterBodyPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([("html", self.endTagHtml)]) - self.endTagHandler.default = self.endTagOther + __slots__ = tuple() def processEOF(self): # Stop parsing @@ -2574,23 +2573,17 @@ def endTagOther(self, token): self.parser.phase = self.parser.phases["inBody"] return token - class InFramesetPhase(Phase): - # http://www.whatwg.org/specs/web-apps/current-work/#in-frameset - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) + startTagHandler = _utils.MethodDispatcher([ + ("html", startTagHtml) + ]) + startTagHandler.default = startTagOther - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - ("frameset", self.startTagFrameset), - ("frame", self.startTagFrame), - ("noframes", self.startTagNoframes) - ]) - self.startTagHandler.default = self.startTagOther + endTagHandler = _utils.MethodDispatcher([("html", endTagHtml)]) + endTagHandler.default = endTagOther - self.endTagHandler = _utils.MethodDispatcher([ - ("frameset", self.endTagFrameset) - ]) - self.endTagHandler.default = self.endTagOther + class InFramesetPhase(Phase): + # http://www.whatwg.org/specs/web-apps/current-work/#in-frameset + __slots__ = tuple() def processEOF(self): if self.tree.openElements[-1].name != "html": @@ -2631,21 +2624,22 @@ def endTagOther(self, token): self.parser.parseError("unexpected-end-tag-in-frameset", {"name": token["name"]}) - class AfterFramesetPhase(Phase): - # http://www.whatwg.org/specs/web-apps/current-work/#after3 - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) + startTagHandler = _utils.MethodDispatcher([ + ("html", Phase.startTagHtml), + ("frameset", startTagFrameset), + ("frame", startTagFrame), + ("noframes", startTagNoframes) + ]) + startTagHandler.default = startTagOther - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - ("noframes", self.startTagNoframes) - ]) - self.startTagHandler.default = self.startTagOther + endTagHandler = _utils.MethodDispatcher([ + ("frameset", endTagFrameset) + ]) + endTagHandler.default = endTagOther - self.endTagHandler = _utils.MethodDispatcher([ - ("html", self.endTagHtml) - ]) - self.endTagHandler.default = self.endTagOther + class AfterFramesetPhase(Phase): + # http://www.whatwg.org/specs/web-apps/current-work/#after3 + __slots__ = tuple() def processEOF(self): # Stop parsing @@ -2668,14 +2662,19 @@ def endTagOther(self, token): self.parser.parseError("unexpected-end-tag-after-frameset", {"name": token["name"]}) - class AfterAfterBodyPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) + startTagHandler = _utils.MethodDispatcher([ + ("html", Phase.startTagHtml), + ("noframes", startTagNoframes) + ]) + startTagHandler.default = startTagOther - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml) - ]) - self.startTagHandler.default = self.startTagOther + endTagHandler = _utils.MethodDispatcher([ + ("html", endTagHtml) + ]) + endTagHandler.default = endTagOther + + class AfterAfterBodyPhase(Phase): + __slots__ = tuple() def processEOF(self): pass @@ -2706,15 +2705,13 @@ def processEndTag(self, token): self.parser.phase = self.parser.phases["inBody"] return token - class AfterAfterFramesetPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) + startTagHandler = _utils.MethodDispatcher([ + ("html", startTagHtml) + ]) + startTagHandler.default = startTagOther - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - ("noframes", self.startTagNoFrames) - ]) - self.startTagHandler.default = self.startTagOther + class AfterAfterFramesetPhase(Phase): + __slots__ = tuple() def processEOF(self): pass @@ -2741,6 +2738,13 @@ def startTagOther(self, token): def processEndTag(self, token): self.parser.parseError("expected-eof-but-got-end-tag", {"name": token["name"]}) + + startTagHandler = _utils.MethodDispatcher([ + ("html", startTagHtml), + ("noframes", startTagNoFrames) + ]) + startTagHandler.default = startTagOther + # pylint:enable=unused-argument return { @@ -2774,8 +2778,8 @@ def processEndTag(self, token): def adjust_attributes(token, replacements): needs_adjustment = viewkeys(token['data']) & viewkeys(replacements) if needs_adjustment: - token['data'] = OrderedDict((replacements.get(k, k), v) - for k, v in token['data'].items()) + token['data'] = type(token['data'])((replacements.get(k, k), v) + for k, v in token['data'].items()) def impliedTagToken(name, type="EndTag", attributes=None, diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/serializer.py b/venv/Lib/site-packages/pip/_vendor/html5lib/serializer.py similarity index 99% rename from env/lib/python2.7/site-packages/pip/_vendor/html5lib/serializer.py rename to venv/Lib/site-packages/pip/_vendor/html5lib/serializer.py index 53f4d44c..d5669d8c 100644 --- a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/serializer.py +++ b/venv/Lib/site-packages/pip/_vendor/html5lib/serializer.py @@ -274,7 +274,7 @@ def serialize(self, treewalker, encoding=None): if token["systemId"]: if token["systemId"].find('"') >= 0: if token["systemId"].find("'") >= 0: - self.serializeError("System identifer contains both single and double quote characters") + self.serializeError("System identifier contains both single and double quote characters") quote_char = "'" else: quote_char = '"' diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treeadapters/__init__.py b/venv/Lib/site-packages/pip/_vendor/html5lib/treeadapters/__init__.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/html5lib/treeadapters/__init__.py rename to venv/Lib/site-packages/pip/_vendor/html5lib/treeadapters/__init__.py diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/treeadapters/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/treeadapters/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..e0f80347 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/html5lib/treeadapters/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/treeadapters/__pycache__/genshi.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/treeadapters/__pycache__/genshi.cpython-36.pyc new file mode 100644 index 00000000..aabfa6e2 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/html5lib/treeadapters/__pycache__/genshi.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/treeadapters/__pycache__/sax.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/treeadapters/__pycache__/sax.cpython-36.pyc new file mode 100644 index 00000000..70fb78d6 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/html5lib/treeadapters/__pycache__/sax.cpython-36.pyc differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treeadapters/genshi.py b/venv/Lib/site-packages/pip/_vendor/html5lib/treeadapters/genshi.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/html5lib/treeadapters/genshi.py rename to venv/Lib/site-packages/pip/_vendor/html5lib/treeadapters/genshi.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treeadapters/sax.py b/venv/Lib/site-packages/pip/_vendor/html5lib/treeadapters/sax.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/html5lib/treeadapters/sax.py rename to venv/Lib/site-packages/pip/_vendor/html5lib/treeadapters/sax.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treebuilders/__init__.py b/venv/Lib/site-packages/pip/_vendor/html5lib/treebuilders/__init__.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/html5lib/treebuilders/__init__.py rename to venv/Lib/site-packages/pip/_vendor/html5lib/treebuilders/__init__.py diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..eb2dac53 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/base.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/base.cpython-36.pyc new file mode 100644 index 00000000..c23f546c Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/base.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/dom.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/dom.cpython-36.pyc new file mode 100644 index 00000000..db976059 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/dom.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/etree.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/etree.cpython-36.pyc new file mode 100644 index 00000000..199e3a6a Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/etree.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/etree_lxml.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/etree_lxml.cpython-36.pyc new file mode 100644 index 00000000..2d4d473c Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/etree_lxml.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/treebuilders/base.py b/venv/Lib/site-packages/pip/_vendor/html5lib/treebuilders/base.py new file mode 100644 index 00000000..965fce29 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/html5lib/treebuilders/base.py @@ -0,0 +1,417 @@ +from __future__ import absolute_import, division, unicode_literals +from pip._vendor.six import text_type + +from ..constants import scopingElements, tableInsertModeElements, namespaces + +# The scope markers are inserted when entering object elements, +# marquees, table cells, and table captions, and are used to prevent formatting +# from "leaking" into tables, object elements, and marquees. +Marker = None + +listElementsMap = { + None: (frozenset(scopingElements), False), + "button": (frozenset(scopingElements | {(namespaces["html"], "button")}), False), + "list": (frozenset(scopingElements | {(namespaces["html"], "ol"), + (namespaces["html"], "ul")}), False), + "table": (frozenset([(namespaces["html"], "html"), + (namespaces["html"], "table")]), False), + "select": (frozenset([(namespaces["html"], "optgroup"), + (namespaces["html"], "option")]), True) +} + + +class Node(object): + """Represents an item in the tree""" + def __init__(self, name): + """Creates a Node + + :arg name: The tag name associated with the node + + """ + # The tag name associated with the node + self.name = name + # The parent of the current node (or None for the document node) + self.parent = None + # The value of the current node (applies to text nodes and comments) + self.value = None + # A dict holding name -> value pairs for attributes of the node + self.attributes = {} + # A list of child nodes of the current node. This must include all + # elements but not necessarily other node types. + self.childNodes = [] + # A list of miscellaneous flags that can be set on the node. + self._flags = [] + + def __str__(self): + attributesStr = " ".join(["%s=\"%s\"" % (name, value) + for name, value in + self.attributes.items()]) + if attributesStr: + return "<%s %s>" % (self.name, attributesStr) + else: + return "<%s>" % (self.name) + + def __repr__(self): + return "<%s>" % (self.name) + + def appendChild(self, node): + """Insert node as a child of the current node + + :arg node: the node to insert + + """ + raise NotImplementedError + + def insertText(self, data, insertBefore=None): + """Insert data as text in the current node, positioned before the + start of node insertBefore or to the end of the node's text. + + :arg data: the data to insert + + :arg insertBefore: True if you want to insert the text before the node + and False if you want to insert it after the node + + """ + raise NotImplementedError + + def insertBefore(self, node, refNode): + """Insert node as a child of the current node, before refNode in the + list of child nodes. Raises ValueError if refNode is not a child of + the current node + + :arg node: the node to insert + + :arg refNode: the child node to insert the node before + + """ + raise NotImplementedError + + def removeChild(self, node): + """Remove node from the children of the current node + + :arg node: the child node to remove + + """ + raise NotImplementedError + + def reparentChildren(self, newParent): + """Move all the children of the current node to newParent. + This is needed so that trees that don't store text as nodes move the + text in the correct way + + :arg newParent: the node to move all this node's children to + + """ + # XXX - should this method be made more general? + for child in self.childNodes: + newParent.appendChild(child) + self.childNodes = [] + + def cloneNode(self): + """Return a shallow copy of the current node i.e. a node with the same + name and attributes but with no parent or child nodes + """ + raise NotImplementedError + + def hasContent(self): + """Return true if the node has children or text, false otherwise + """ + raise NotImplementedError + + +class ActiveFormattingElements(list): + def append(self, node): + equalCount = 0 + if node != Marker: + for element in self[::-1]: + if element == Marker: + break + if self.nodesEqual(element, node): + equalCount += 1 + if equalCount == 3: + self.remove(element) + break + list.append(self, node) + + def nodesEqual(self, node1, node2): + if not node1.nameTuple == node2.nameTuple: + return False + + if not node1.attributes == node2.attributes: + return False + + return True + + +class TreeBuilder(object): + """Base treebuilder implementation + + * documentClass - the class to use for the bottommost node of a document + * elementClass - the class to use for HTML Elements + * commentClass - the class to use for comments + * doctypeClass - the class to use for doctypes + + """ + # pylint:disable=not-callable + + # Document class + documentClass = None + + # The class to use for creating a node + elementClass = None + + # The class to use for creating comments + commentClass = None + + # The class to use for creating doctypes + doctypeClass = None + + # Fragment class + fragmentClass = None + + def __init__(self, namespaceHTMLElements): + """Create a TreeBuilder + + :arg namespaceHTMLElements: whether or not to namespace HTML elements + + """ + if namespaceHTMLElements: + self.defaultNamespace = "http://www.w3.org/1999/xhtml" + else: + self.defaultNamespace = None + self.reset() + + def reset(self): + self.openElements = [] + self.activeFormattingElements = ActiveFormattingElements() + + # XXX - rename these to headElement, formElement + self.headPointer = None + self.formPointer = None + + self.insertFromTable = False + + self.document = self.documentClass() + + def elementInScope(self, target, variant=None): + + # If we pass a node in we match that. if we pass a string + # match any node with that name + exactNode = hasattr(target, "nameTuple") + if not exactNode: + if isinstance(target, text_type): + target = (namespaces["html"], target) + assert isinstance(target, tuple) + + listElements, invert = listElementsMap[variant] + + for node in reversed(self.openElements): + if exactNode and node == target: + return True + elif not exactNode and node.nameTuple == target: + return True + elif (invert ^ (node.nameTuple in listElements)): + return False + + assert False # We should never reach this point + + def reconstructActiveFormattingElements(self): + # Within this algorithm the order of steps described in the + # specification is not quite the same as the order of steps in the + # code. It should still do the same though. + + # Step 1: stop the algorithm when there's nothing to do. + if not self.activeFormattingElements: + return + + # Step 2 and step 3: we start with the last element. So i is -1. + i = len(self.activeFormattingElements) - 1 + entry = self.activeFormattingElements[i] + if entry == Marker or entry in self.openElements: + return + + # Step 6 + while entry != Marker and entry not in self.openElements: + if i == 0: + # This will be reset to 0 below + i = -1 + break + i -= 1 + # Step 5: let entry be one earlier in the list. + entry = self.activeFormattingElements[i] + + while True: + # Step 7 + i += 1 + + # Step 8 + entry = self.activeFormattingElements[i] + clone = entry.cloneNode() # Mainly to get a new copy of the attributes + + # Step 9 + element = self.insertElement({"type": "StartTag", + "name": clone.name, + "namespace": clone.namespace, + "data": clone.attributes}) + + # Step 10 + self.activeFormattingElements[i] = element + + # Step 11 + if element == self.activeFormattingElements[-1]: + break + + def clearActiveFormattingElements(self): + entry = self.activeFormattingElements.pop() + while self.activeFormattingElements and entry != Marker: + entry = self.activeFormattingElements.pop() + + def elementInActiveFormattingElements(self, name): + """Check if an element exists between the end of the active + formatting elements and the last marker. If it does, return it, else + return false""" + + for item in self.activeFormattingElements[::-1]: + # Check for Marker first because if it's a Marker it doesn't have a + # name attribute. + if item == Marker: + break + elif item.name == name: + return item + return False + + def insertRoot(self, token): + element = self.createElement(token) + self.openElements.append(element) + self.document.appendChild(element) + + def insertDoctype(self, token): + name = token["name"] + publicId = token["publicId"] + systemId = token["systemId"] + + doctype = self.doctypeClass(name, publicId, systemId) + self.document.appendChild(doctype) + + def insertComment(self, token, parent=None): + if parent is None: + parent = self.openElements[-1] + parent.appendChild(self.commentClass(token["data"])) + + def createElement(self, token): + """Create an element but don't insert it anywhere""" + name = token["name"] + namespace = token.get("namespace", self.defaultNamespace) + element = self.elementClass(name, namespace) + element.attributes = token["data"] + return element + + def _getInsertFromTable(self): + return self._insertFromTable + + def _setInsertFromTable(self, value): + """Switch the function used to insert an element from the + normal one to the misnested table one and back again""" + self._insertFromTable = value + if value: + self.insertElement = self.insertElementTable + else: + self.insertElement = self.insertElementNormal + + insertFromTable = property(_getInsertFromTable, _setInsertFromTable) + + def insertElementNormal(self, token): + name = token["name"] + assert isinstance(name, text_type), "Element %s not unicode" % name + namespace = token.get("namespace", self.defaultNamespace) + element = self.elementClass(name, namespace) + element.attributes = token["data"] + self.openElements[-1].appendChild(element) + self.openElements.append(element) + return element + + def insertElementTable(self, token): + """Create an element and insert it into the tree""" + element = self.createElement(token) + if self.openElements[-1].name not in tableInsertModeElements: + return self.insertElementNormal(token) + else: + # We should be in the InTable mode. This means we want to do + # special magic element rearranging + parent, insertBefore = self.getTableMisnestedNodePosition() + if insertBefore is None: + parent.appendChild(element) + else: + parent.insertBefore(element, insertBefore) + self.openElements.append(element) + return element + + def insertText(self, data, parent=None): + """Insert text data.""" + if parent is None: + parent = self.openElements[-1] + + if (not self.insertFromTable or (self.insertFromTable and + self.openElements[-1].name + not in tableInsertModeElements)): + parent.insertText(data) + else: + # We should be in the InTable mode. This means we want to do + # special magic element rearranging + parent, insertBefore = self.getTableMisnestedNodePosition() + parent.insertText(data, insertBefore) + + def getTableMisnestedNodePosition(self): + """Get the foster parent element, and sibling to insert before + (or None) when inserting a misnested table node""" + # The foster parent element is the one which comes before the most + # recently opened table element + # XXX - this is really inelegant + lastTable = None + fosterParent = None + insertBefore = None + for elm in self.openElements[::-1]: + if elm.name == "table": + lastTable = elm + break + if lastTable: + # XXX - we should really check that this parent is actually a + # node here + if lastTable.parent: + fosterParent = lastTable.parent + insertBefore = lastTable + else: + fosterParent = self.openElements[ + self.openElements.index(lastTable) - 1] + else: + fosterParent = self.openElements[0] + return fosterParent, insertBefore + + def generateImpliedEndTags(self, exclude=None): + name = self.openElements[-1].name + # XXX td, th and tr are not actually needed + if (name in frozenset(("dd", "dt", "li", "option", "optgroup", "p", "rp", "rt")) and + name != exclude): + self.openElements.pop() + # XXX This is not entirely what the specification says. We should + # investigate it more closely. + self.generateImpliedEndTags(exclude) + + def getDocument(self): + """Return the final tree""" + return self.document + + def getFragment(self): + """Return the final fragment""" + # assert self.innerHTML + fragment = self.fragmentClass() + self.openElements[0].reparentChildren(fragment) + return fragment + + def testSerializer(self, node): + """Serialize the subtree of node in the format required by unit tests + + :arg node: the node from which to start serializing + + """ + raise NotImplementedError diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treebuilders/dom.py b/venv/Lib/site-packages/pip/_vendor/html5lib/treebuilders/dom.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/html5lib/treebuilders/dom.py rename to venv/Lib/site-packages/pip/_vendor/html5lib/treebuilders/dom.py diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/treebuilders/etree.py b/venv/Lib/site-packages/pip/_vendor/html5lib/treebuilders/etree.py new file mode 100644 index 00000000..ea92dc30 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/html5lib/treebuilders/etree.py @@ -0,0 +1,343 @@ +from __future__ import absolute_import, division, unicode_literals +# pylint:disable=protected-access + +from pip._vendor.six import text_type + +import re + +from copy import copy + +from . import base +from .. import _ihatexml +from .. import constants +from ..constants import namespaces +from .._utils import moduleFactoryFactory + +tag_regexp = re.compile("{([^}]*)}(.*)") + + +def getETreeBuilder(ElementTreeImplementation, fullTree=False): + ElementTree = ElementTreeImplementation + ElementTreeCommentType = ElementTree.Comment("asd").tag + + class Element(base.Node): + def __init__(self, name, namespace=None): + self._name = name + self._namespace = namespace + self._element = ElementTree.Element(self._getETreeTag(name, + namespace)) + if namespace is None: + self.nameTuple = namespaces["html"], self._name + else: + self.nameTuple = self._namespace, self._name + self.parent = None + self._childNodes = [] + self._flags = [] + + def _getETreeTag(self, name, namespace): + if namespace is None: + etree_tag = name + else: + etree_tag = "{%s}%s" % (namespace, name) + return etree_tag + + def _setName(self, name): + self._name = name + self._element.tag = self._getETreeTag(self._name, self._namespace) + + def _getName(self): + return self._name + + name = property(_getName, _setName) + + def _setNamespace(self, namespace): + self._namespace = namespace + self._element.tag = self._getETreeTag(self._name, self._namespace) + + def _getNamespace(self): + return self._namespace + + namespace = property(_getNamespace, _setNamespace) + + def _getAttributes(self): + return self._element.attrib + + def _setAttributes(self, attributes): + el_attrib = self._element.attrib + el_attrib.clear() + if attributes: + # calling .items _always_ allocates, and the above truthy check is cheaper than the + # allocation on average + for key, value in attributes.items(): + if isinstance(key, tuple): + name = "{%s}%s" % (key[2], key[1]) + else: + name = key + el_attrib[name] = value + + attributes = property(_getAttributes, _setAttributes) + + def _getChildNodes(self): + return self._childNodes + + def _setChildNodes(self, value): + del self._element[:] + self._childNodes = [] + for element in value: + self.insertChild(element) + + childNodes = property(_getChildNodes, _setChildNodes) + + def hasContent(self): + """Return true if the node has children or text""" + return bool(self._element.text or len(self._element)) + + def appendChild(self, node): + self._childNodes.append(node) + self._element.append(node._element) + node.parent = self + + def insertBefore(self, node, refNode): + index = list(self._element).index(refNode._element) + self._element.insert(index, node._element) + node.parent = self + + def removeChild(self, node): + self._childNodes.remove(node) + self._element.remove(node._element) + node.parent = None + + def insertText(self, data, insertBefore=None): + if not(len(self._element)): + if not self._element.text: + self._element.text = "" + self._element.text += data + elif insertBefore is None: + # Insert the text as the tail of the last child element + if not self._element[-1].tail: + self._element[-1].tail = "" + self._element[-1].tail += data + else: + # Insert the text before the specified node + children = list(self._element) + index = children.index(insertBefore._element) + if index > 0: + if not self._element[index - 1].tail: + self._element[index - 1].tail = "" + self._element[index - 1].tail += data + else: + if not self._element.text: + self._element.text = "" + self._element.text += data + + def cloneNode(self): + element = type(self)(self.name, self.namespace) + if self._element.attrib: + element._element.attrib = copy(self._element.attrib) + return element + + def reparentChildren(self, newParent): + if newParent.childNodes: + newParent.childNodes[-1]._element.tail += self._element.text + else: + if not newParent._element.text: + newParent._element.text = "" + if self._element.text is not None: + newParent._element.text += self._element.text + self._element.text = "" + base.Node.reparentChildren(self, newParent) + + class Comment(Element): + def __init__(self, data): + # Use the superclass constructor to set all properties on the + # wrapper element + self._element = ElementTree.Comment(data) + self.parent = None + self._childNodes = [] + self._flags = [] + + def _getData(self): + return self._element.text + + def _setData(self, value): + self._element.text = value + + data = property(_getData, _setData) + + class DocumentType(Element): + def __init__(self, name, publicId, systemId): + Element.__init__(self, "") + self._element.text = name + self.publicId = publicId + self.systemId = systemId + + def _getPublicId(self): + return self._element.get("publicId", "") + + def _setPublicId(self, value): + if value is not None: + self._element.set("publicId", value) + + publicId = property(_getPublicId, _setPublicId) + + def _getSystemId(self): + return self._element.get("systemId", "") + + def _setSystemId(self, value): + if value is not None: + self._element.set("systemId", value) + + systemId = property(_getSystemId, _setSystemId) + + class Document(Element): + def __init__(self): + Element.__init__(self, "DOCUMENT_ROOT") + + class DocumentFragment(Element): + def __init__(self): + Element.__init__(self, "DOCUMENT_FRAGMENT") + + def testSerializer(element): + rv = [] + + def serializeElement(element, indent=0): + if not(hasattr(element, "tag")): + element = element.getroot() + if element.tag == "": + if element.get("publicId") or element.get("systemId"): + publicId = element.get("publicId") or "" + systemId = element.get("systemId") or "" + rv.append("""""" % + (element.text, publicId, systemId)) + else: + rv.append("" % (element.text,)) + elif element.tag == "DOCUMENT_ROOT": + rv.append("#document") + if element.text is not None: + rv.append("|%s\"%s\"" % (' ' * (indent + 2), element.text)) + if element.tail is not None: + raise TypeError("Document node cannot have tail") + if hasattr(element, "attrib") and len(element.attrib): + raise TypeError("Document node cannot have attributes") + elif element.tag == ElementTreeCommentType: + rv.append("|%s" % (' ' * indent, element.text)) + else: + assert isinstance(element.tag, text_type), \ + "Expected unicode, got %s, %s" % (type(element.tag), element.tag) + nsmatch = tag_regexp.match(element.tag) + + if nsmatch is None: + name = element.tag + else: + ns, name = nsmatch.groups() + prefix = constants.prefixes[ns] + name = "%s %s" % (prefix, name) + rv.append("|%s<%s>" % (' ' * indent, name)) + + if hasattr(element, "attrib"): + attributes = [] + for name, value in element.attrib.items(): + nsmatch = tag_regexp.match(name) + if nsmatch is not None: + ns, name = nsmatch.groups() + prefix = constants.prefixes[ns] + attr_string = "%s %s" % (prefix, name) + else: + attr_string = name + attributes.append((attr_string, value)) + + for name, value in sorted(attributes): + rv.append('|%s%s="%s"' % (' ' * (indent + 2), name, value)) + if element.text: + rv.append("|%s\"%s\"" % (' ' * (indent + 2), element.text)) + indent += 2 + for child in element: + serializeElement(child, indent) + if element.tail: + rv.append("|%s\"%s\"" % (' ' * (indent - 2), element.tail)) + serializeElement(element, 0) + + return "\n".join(rv) + + def tostring(element): # pylint:disable=unused-variable + """Serialize an element and its child nodes to a string""" + rv = [] + filter = _ihatexml.InfosetFilter() + + def serializeElement(element): + if isinstance(element, ElementTree.ElementTree): + element = element.getroot() + + if element.tag == "": + if element.get("publicId") or element.get("systemId"): + publicId = element.get("publicId") or "" + systemId = element.get("systemId") or "" + rv.append("""""" % + (element.text, publicId, systemId)) + else: + rv.append("" % (element.text,)) + elif element.tag == "DOCUMENT_ROOT": + if element.text is not None: + rv.append(element.text) + if element.tail is not None: + raise TypeError("Document node cannot have tail") + if hasattr(element, "attrib") and len(element.attrib): + raise TypeError("Document node cannot have attributes") + + for child in element: + serializeElement(child) + + elif element.tag == ElementTreeCommentType: + rv.append("" % (element.text,)) + else: + # This is assumed to be an ordinary element + if not element.attrib: + rv.append("<%s>" % (filter.fromXmlName(element.tag),)) + else: + attr = " ".join(["%s=\"%s\"" % ( + filter.fromXmlName(name), value) + for name, value in element.attrib.items()]) + rv.append("<%s %s>" % (element.tag, attr)) + if element.text: + rv.append(element.text) + + for child in element: + serializeElement(child) + + rv.append("" % (element.tag,)) + + if element.tail: + rv.append(element.tail) + + serializeElement(element) + + return "".join(rv) + + class TreeBuilder(base.TreeBuilder): # pylint:disable=unused-variable + documentClass = Document + doctypeClass = DocumentType + elementClass = Element + commentClass = Comment + fragmentClass = DocumentFragment + implementation = ElementTreeImplementation + + def testSerializer(self, element): + return testSerializer(element) + + def getDocument(self): + if fullTree: + return self.document._element + else: + if self.defaultNamespace is not None: + return self.document._element.find( + "{%s}html" % self.defaultNamespace) + else: + return self.document._element.find("html") + + def getFragment(self): + return base.TreeBuilder.getFragment(self)._element + + return locals() + + +getETreeModule = moduleFactoryFactory(getETreeBuilder) diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/treebuilders/etree_lxml.py b/venv/Lib/site-packages/pip/_vendor/html5lib/treebuilders/etree_lxml.py new file mode 100644 index 00000000..f037759f --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/html5lib/treebuilders/etree_lxml.py @@ -0,0 +1,392 @@ +"""Module for supporting the lxml.etree library. The idea here is to use as much +of the native library as possible, without using fragile hacks like custom element +names that break between releases. The downside of this is that we cannot represent +all possible trees; specifically the following are known to cause problems: + +Text or comments as siblings of the root element +Docypes with no name + +When any of these things occur, we emit a DataLossWarning +""" + +from __future__ import absolute_import, division, unicode_literals +# pylint:disable=protected-access + +import warnings +import re +import sys + +try: + from collections.abc import MutableMapping +except ImportError: + from collections import MutableMapping + +from . import base +from ..constants import DataLossWarning +from .. import constants +from . import etree as etree_builders +from .. import _ihatexml + +import lxml.etree as etree +from pip._vendor.six import PY3, binary_type + + +fullTree = True +tag_regexp = re.compile("{([^}]*)}(.*)") + +comment_type = etree.Comment("asd").tag + + +class DocumentType(object): + def __init__(self, name, publicId, systemId): + self.name = name + self.publicId = publicId + self.systemId = systemId + + +class Document(object): + def __init__(self): + self._elementTree = None + self._childNodes = [] + + def appendChild(self, element): + last = self._elementTree.getroot() + for last in self._elementTree.getroot().itersiblings(): + pass + + last.addnext(element._element) + + def _getChildNodes(self): + return self._childNodes + + childNodes = property(_getChildNodes) + + +def testSerializer(element): + rv = [] + infosetFilter = _ihatexml.InfosetFilter(preventDoubleDashComments=True) + + def serializeElement(element, indent=0): + if not hasattr(element, "tag"): + if hasattr(element, "getroot"): + # Full tree case + rv.append("#document") + if element.docinfo.internalDTD: + if not (element.docinfo.public_id or + element.docinfo.system_url): + dtd_str = "" % element.docinfo.root_name + else: + dtd_str = """""" % ( + element.docinfo.root_name, + element.docinfo.public_id, + element.docinfo.system_url) + rv.append("|%s%s" % (' ' * (indent + 2), dtd_str)) + next_element = element.getroot() + while next_element.getprevious() is not None: + next_element = next_element.getprevious() + while next_element is not None: + serializeElement(next_element, indent + 2) + next_element = next_element.getnext() + elif isinstance(element, str) or isinstance(element, bytes): + # Text in a fragment + assert isinstance(element, str) or sys.version_info[0] == 2 + rv.append("|%s\"%s\"" % (' ' * indent, element)) + else: + # Fragment case + rv.append("#document-fragment") + for next_element in element: + serializeElement(next_element, indent + 2) + elif element.tag == comment_type: + rv.append("|%s" % (' ' * indent, element.text)) + if hasattr(element, "tail") and element.tail: + rv.append("|%s\"%s\"" % (' ' * indent, element.tail)) + else: + assert isinstance(element, etree._Element) + nsmatch = etree_builders.tag_regexp.match(element.tag) + if nsmatch is not None: + ns = nsmatch.group(1) + tag = nsmatch.group(2) + prefix = constants.prefixes[ns] + rv.append("|%s<%s %s>" % (' ' * indent, prefix, + infosetFilter.fromXmlName(tag))) + else: + rv.append("|%s<%s>" % (' ' * indent, + infosetFilter.fromXmlName(element.tag))) + + if hasattr(element, "attrib"): + attributes = [] + for name, value in element.attrib.items(): + nsmatch = tag_regexp.match(name) + if nsmatch is not None: + ns, name = nsmatch.groups() + name = infosetFilter.fromXmlName(name) + prefix = constants.prefixes[ns] + attr_string = "%s %s" % (prefix, name) + else: + attr_string = infosetFilter.fromXmlName(name) + attributes.append((attr_string, value)) + + for name, value in sorted(attributes): + rv.append('|%s%s="%s"' % (' ' * (indent + 2), name, value)) + + if element.text: + rv.append("|%s\"%s\"" % (' ' * (indent + 2), element.text)) + indent += 2 + for child in element: + serializeElement(child, indent) + if hasattr(element, "tail") and element.tail: + rv.append("|%s\"%s\"" % (' ' * (indent - 2), element.tail)) + serializeElement(element, 0) + + return "\n".join(rv) + + +def tostring(element): + """Serialize an element and its child nodes to a string""" + rv = [] + + def serializeElement(element): + if not hasattr(element, "tag"): + if element.docinfo.internalDTD: + if element.docinfo.doctype: + dtd_str = element.docinfo.doctype + else: + dtd_str = "" % element.docinfo.root_name + rv.append(dtd_str) + serializeElement(element.getroot()) + + elif element.tag == comment_type: + rv.append("" % (element.text,)) + + else: + # This is assumed to be an ordinary element + if not element.attrib: + rv.append("<%s>" % (element.tag,)) + else: + attr = " ".join(["%s=\"%s\"" % (name, value) + for name, value in element.attrib.items()]) + rv.append("<%s %s>" % (element.tag, attr)) + if element.text: + rv.append(element.text) + + for child in element: + serializeElement(child) + + rv.append("" % (element.tag,)) + + if hasattr(element, "tail") and element.tail: + rv.append(element.tail) + + serializeElement(element) + + return "".join(rv) + + +class TreeBuilder(base.TreeBuilder): + documentClass = Document + doctypeClass = DocumentType + elementClass = None + commentClass = None + fragmentClass = Document + implementation = etree + + def __init__(self, namespaceHTMLElements, fullTree=False): + builder = etree_builders.getETreeModule(etree, fullTree=fullTree) + infosetFilter = self.infosetFilter = _ihatexml.InfosetFilter(preventDoubleDashComments=True) + self.namespaceHTMLElements = namespaceHTMLElements + + class Attributes(MutableMapping): + def __init__(self, element): + self._element = element + + def _coerceKey(self, key): + if isinstance(key, tuple): + name = "{%s}%s" % (key[2], infosetFilter.coerceAttribute(key[1])) + else: + name = infosetFilter.coerceAttribute(key) + return name + + def __getitem__(self, key): + value = self._element._element.attrib[self._coerceKey(key)] + if not PY3 and isinstance(value, binary_type): + value = value.decode("ascii") + return value + + def __setitem__(self, key, value): + self._element._element.attrib[self._coerceKey(key)] = value + + def __delitem__(self, key): + del self._element._element.attrib[self._coerceKey(key)] + + def __iter__(self): + return iter(self._element._element.attrib) + + def __len__(self): + return len(self._element._element.attrib) + + def clear(self): + return self._element._element.attrib.clear() + + class Element(builder.Element): + def __init__(self, name, namespace): + name = infosetFilter.coerceElement(name) + builder.Element.__init__(self, name, namespace=namespace) + self._attributes = Attributes(self) + + def _setName(self, name): + self._name = infosetFilter.coerceElement(name) + self._element.tag = self._getETreeTag( + self._name, self._namespace) + + def _getName(self): + return infosetFilter.fromXmlName(self._name) + + name = property(_getName, _setName) + + def _getAttributes(self): + return self._attributes + + def _setAttributes(self, value): + attributes = self.attributes + attributes.clear() + attributes.update(value) + + attributes = property(_getAttributes, _setAttributes) + + def insertText(self, data, insertBefore=None): + data = infosetFilter.coerceCharacters(data) + builder.Element.insertText(self, data, insertBefore) + + def cloneNode(self): + element = type(self)(self.name, self.namespace) + if self._element.attrib: + element._element.attrib.update(self._element.attrib) + return element + + class Comment(builder.Comment): + def __init__(self, data): + data = infosetFilter.coerceComment(data) + builder.Comment.__init__(self, data) + + def _setData(self, data): + data = infosetFilter.coerceComment(data) + self._element.text = data + + def _getData(self): + return self._element.text + + data = property(_getData, _setData) + + self.elementClass = Element + self.commentClass = Comment + # self.fragmentClass = builder.DocumentFragment + base.TreeBuilder.__init__(self, namespaceHTMLElements) + + def reset(self): + base.TreeBuilder.reset(self) + self.insertComment = self.insertCommentInitial + self.initial_comments = [] + self.doctype = None + + def testSerializer(self, element): + return testSerializer(element) + + def getDocument(self): + if fullTree: + return self.document._elementTree + else: + return self.document._elementTree.getroot() + + def getFragment(self): + fragment = [] + element = self.openElements[0]._element + if element.text: + fragment.append(element.text) + fragment.extend(list(element)) + if element.tail: + fragment.append(element.tail) + return fragment + + def insertDoctype(self, token): + name = token["name"] + publicId = token["publicId"] + systemId = token["systemId"] + + if not name: + warnings.warn("lxml cannot represent empty doctype", DataLossWarning) + self.doctype = None + else: + coercedName = self.infosetFilter.coerceElement(name) + if coercedName != name: + warnings.warn("lxml cannot represent non-xml doctype", DataLossWarning) + + doctype = self.doctypeClass(coercedName, publicId, systemId) + self.doctype = doctype + + def insertCommentInitial(self, data, parent=None): + assert parent is None or parent is self.document + assert self.document._elementTree is None + self.initial_comments.append(data) + + def insertCommentMain(self, data, parent=None): + if (parent == self.document and + self.document._elementTree.getroot()[-1].tag == comment_type): + warnings.warn("lxml cannot represent adjacent comments beyond the root elements", DataLossWarning) + super(TreeBuilder, self).insertComment(data, parent) + + def insertRoot(self, token): + # Because of the way libxml2 works, it doesn't seem to be possible to + # alter information like the doctype after the tree has been parsed. + # Therefore we need to use the built-in parser to create our initial + # tree, after which we can add elements like normal + docStr = "" + if self.doctype: + assert self.doctype.name + docStr += "= 0 and sysid.find('"') >= 0: + warnings.warn("DOCTYPE system cannot contain single and double quotes", DataLossWarning) + sysid = sysid.replace("'", 'U00027') + if sysid.find("'") >= 0: + docStr += '"%s"' % sysid + else: + docStr += "'%s'" % sysid + else: + docStr += "''" + docStr += ">" + if self.doctype.name != token["name"]: + warnings.warn("lxml cannot represent doctype with a different name to the root element", DataLossWarning) + docStr += "" + root = etree.fromstring(docStr) + + # Append the initial comments: + for comment_token in self.initial_comments: + comment = self.commentClass(comment_token["data"]) + root.addprevious(comment._element) + + # Create the root document and add the ElementTree to it + self.document = self.documentClass() + self.document._elementTree = root.getroottree() + + # Give the root element the right name + name = token["name"] + namespace = token.get("namespace", self.defaultNamespace) + if namespace is None: + etree_tag = name + else: + etree_tag = "{%s}%s" % (namespace, name) + root.tag = etree_tag + + # Add the root element to the internal child/open data structures + root_element = self.elementClass(name, namespace) + root_element._element = root + self.document._childNodes.append(root_element) + self.openElements.append(root_element) + + # Reset to the default insert comment function + self.insertComment = self.insertCommentMain diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/treewalkers/__init__.py b/venv/Lib/site-packages/pip/_vendor/html5lib/treewalkers/__init__.py new file mode 100644 index 00000000..b2d3aac3 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/html5lib/treewalkers/__init__.py @@ -0,0 +1,154 @@ +"""A collection of modules for iterating through different kinds of +tree, generating tokens identical to those produced by the tokenizer +module. + +To create a tree walker for a new type of tree, you need to +implement a tree walker object (called TreeWalker by convention) that +implements a 'serialize' method which takes a tree as sole argument and +returns an iterator which generates tokens. +""" + +from __future__ import absolute_import, division, unicode_literals + +from .. import constants +from .._utils import default_etree + +__all__ = ["getTreeWalker", "pprint"] + +treeWalkerCache = {} + + +def getTreeWalker(treeType, implementation=None, **kwargs): + """Get a TreeWalker class for various types of tree with built-in support + + :arg str treeType: the name of the tree type required (case-insensitive). + Supported values are: + + * "dom": The xml.dom.minidom DOM implementation + * "etree": A generic walker for tree implementations exposing an + elementtree-like interface (known to work with ElementTree, + cElementTree and lxml.etree). + * "lxml": Optimized walker for lxml.etree + * "genshi": a Genshi stream + + :arg implementation: A module implementing the tree type e.g. + xml.etree.ElementTree or cElementTree (Currently applies to the "etree" + tree type only). + + :arg kwargs: keyword arguments passed to the etree walker--for other + walkers, this has no effect + + :returns: a TreeWalker class + + """ + + treeType = treeType.lower() + if treeType not in treeWalkerCache: + if treeType == "dom": + from . import dom + treeWalkerCache[treeType] = dom.TreeWalker + elif treeType == "genshi": + from . import genshi + treeWalkerCache[treeType] = genshi.TreeWalker + elif treeType == "lxml": + from . import etree_lxml + treeWalkerCache[treeType] = etree_lxml.TreeWalker + elif treeType == "etree": + from . import etree + if implementation is None: + implementation = default_etree + # XXX: NEVER cache here, caching is done in the etree submodule + return etree.getETreeModule(implementation, **kwargs).TreeWalker + return treeWalkerCache.get(treeType) + + +def concatenateCharacterTokens(tokens): + pendingCharacters = [] + for token in tokens: + type = token["type"] + if type in ("Characters", "SpaceCharacters"): + pendingCharacters.append(token["data"]) + else: + if pendingCharacters: + yield {"type": "Characters", "data": "".join(pendingCharacters)} + pendingCharacters = [] + yield token + if pendingCharacters: + yield {"type": "Characters", "data": "".join(pendingCharacters)} + + +def pprint(walker): + """Pretty printer for tree walkers + + Takes a TreeWalker instance and pretty prints the output of walking the tree. + + :arg walker: a TreeWalker instance + + """ + output = [] + indent = 0 + for token in concatenateCharacterTokens(walker): + type = token["type"] + if type in ("StartTag", "EmptyTag"): + # tag name + if token["namespace"] and token["namespace"] != constants.namespaces["html"]: + if token["namespace"] in constants.prefixes: + ns = constants.prefixes[token["namespace"]] + else: + ns = token["namespace"] + name = "%s %s" % (ns, token["name"]) + else: + name = token["name"] + output.append("%s<%s>" % (" " * indent, name)) + indent += 2 + # attributes (sorted for consistent ordering) + attrs = token["data"] + for (namespace, localname), value in sorted(attrs.items()): + if namespace: + if namespace in constants.prefixes: + ns = constants.prefixes[namespace] + else: + ns = namespace + name = "%s %s" % (ns, localname) + else: + name = localname + output.append("%s%s=\"%s\"" % (" " * indent, name, value)) + # self-closing + if type == "EmptyTag": + indent -= 2 + + elif type == "EndTag": + indent -= 2 + + elif type == "Comment": + output.append("%s" % (" " * indent, token["data"])) + + elif type == "Doctype": + if token["name"]: + if token["publicId"]: + output.append("""%s""" % + (" " * indent, + token["name"], + token["publicId"], + token["systemId"] if token["systemId"] else "")) + elif token["systemId"]: + output.append("""%s""" % + (" " * indent, + token["name"], + token["systemId"])) + else: + output.append("%s" % (" " * indent, + token["name"])) + else: + output.append("%s" % (" " * indent,)) + + elif type == "Characters": + output.append("%s\"%s\"" % (" " * indent, token["data"])) + + elif type == "SpaceCharacters": + assert False, "concatenateCharacterTokens should have got rid of all Space tokens" + + else: + raise ValueError("Unknown token type, %s" % type) + + return "\n".join(output) diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..a8f7f696 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/base.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/base.cpython-36.pyc new file mode 100644 index 00000000..dfc25863 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/base.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/dom.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/dom.cpython-36.pyc new file mode 100644 index 00000000..a574dec6 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/dom.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/etree.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/etree.cpython-36.pyc new file mode 100644 index 00000000..64685f66 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/etree.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/etree_lxml.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/etree_lxml.cpython-36.pyc new file mode 100644 index 00000000..27bdaaf2 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/etree_lxml.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/genshi.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/genshi.cpython-36.pyc new file mode 100644 index 00000000..a96ea97c Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/genshi.cpython-36.pyc differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treewalkers/base.py b/venv/Lib/site-packages/pip/_vendor/html5lib/treewalkers/base.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/html5lib/treewalkers/base.py rename to venv/Lib/site-packages/pip/_vendor/html5lib/treewalkers/base.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treewalkers/dom.py b/venv/Lib/site-packages/pip/_vendor/html5lib/treewalkers/dom.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/html5lib/treewalkers/dom.py rename to venv/Lib/site-packages/pip/_vendor/html5lib/treewalkers/dom.py diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/treewalkers/etree.py b/venv/Lib/site-packages/pip/_vendor/html5lib/treewalkers/etree.py new file mode 100644 index 00000000..837b27ec --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/html5lib/treewalkers/etree.py @@ -0,0 +1,131 @@ +from __future__ import absolute_import, division, unicode_literals + +from collections import OrderedDict +import re + +from pip._vendor.six import string_types + +from . import base +from .._utils import moduleFactoryFactory + +tag_regexp = re.compile("{([^}]*)}(.*)") + + +def getETreeBuilder(ElementTreeImplementation): + ElementTree = ElementTreeImplementation + ElementTreeCommentType = ElementTree.Comment("asd").tag + + class TreeWalker(base.NonRecursiveTreeWalker): # pylint:disable=unused-variable + """Given the particular ElementTree representation, this implementation, + to avoid using recursion, returns "nodes" as tuples with the following + content: + + 1. The current element + + 2. The index of the element relative to its parent + + 3. A stack of ancestor elements + + 4. A flag "text", "tail" or None to indicate if the current node is a + text node; either the text or tail of the current element (1) + """ + def getNodeDetails(self, node): + if isinstance(node, tuple): # It might be the root Element + elt, _, _, flag = node + if flag in ("text", "tail"): + return base.TEXT, getattr(elt, flag) + else: + node = elt + + if not(hasattr(node, "tag")): + node = node.getroot() + + if node.tag in ("DOCUMENT_ROOT", "DOCUMENT_FRAGMENT"): + return (base.DOCUMENT,) + + elif node.tag == "": + return (base.DOCTYPE, node.text, + node.get("publicId"), node.get("systemId")) + + elif node.tag == ElementTreeCommentType: + return base.COMMENT, node.text + + else: + assert isinstance(node.tag, string_types), type(node.tag) + # This is assumed to be an ordinary element + match = tag_regexp.match(node.tag) + if match: + namespace, tag = match.groups() + else: + namespace = None + tag = node.tag + attrs = OrderedDict() + for name, value in list(node.attrib.items()): + match = tag_regexp.match(name) + if match: + attrs[(match.group(1), match.group(2))] = value + else: + attrs[(None, name)] = value + return (base.ELEMENT, namespace, tag, + attrs, len(node) or node.text) + + def getFirstChild(self, node): + if isinstance(node, tuple): + element, key, parents, flag = node + else: + element, key, parents, flag = node, None, [], None + + if flag in ("text", "tail"): + return None + else: + if element.text: + return element, key, parents, "text" + elif len(element): + parents.append(element) + return element[0], 0, parents, None + else: + return None + + def getNextSibling(self, node): + if isinstance(node, tuple): + element, key, parents, flag = node + else: + return None + + if flag == "text": + if len(element): + parents.append(element) + return element[0], 0, parents, None + else: + return None + else: + if element.tail and flag != "tail": + return element, key, parents, "tail" + elif key < len(parents[-1]) - 1: + return parents[-1][key + 1], key + 1, parents, None + else: + return None + + def getParentNode(self, node): + if isinstance(node, tuple): + element, key, parents, flag = node + else: + return None + + if flag == "text": + if not parents: + return element + else: + return element, key, parents, None + else: + parent = parents.pop() + if not parents: + return parent + else: + assert list(parents[-1]).count(parent) == 1 + return parent, list(parents[-1]).index(parent), parents, None + + return locals() + + +getETreeModule = moduleFactoryFactory(getETreeBuilder) diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/treewalkers/etree_lxml.py b/venv/Lib/site-packages/pip/_vendor/html5lib/treewalkers/etree_lxml.py new file mode 100644 index 00000000..c56af390 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/html5lib/treewalkers/etree_lxml.py @@ -0,0 +1,215 @@ +from __future__ import absolute_import, division, unicode_literals +from pip._vendor.six import text_type + +from collections import OrderedDict + +from lxml import etree +from ..treebuilders.etree import tag_regexp + +from . import base + +from .. import _ihatexml + + +def ensure_str(s): + if s is None: + return None + elif isinstance(s, text_type): + return s + else: + return s.decode("ascii", "strict") + + +class Root(object): + def __init__(self, et): + self.elementtree = et + self.children = [] + + try: + if et.docinfo.internalDTD: + self.children.append(Doctype(self, + ensure_str(et.docinfo.root_name), + ensure_str(et.docinfo.public_id), + ensure_str(et.docinfo.system_url))) + except AttributeError: + pass + + try: + node = et.getroot() + except AttributeError: + node = et + + while node.getprevious() is not None: + node = node.getprevious() + while node is not None: + self.children.append(node) + node = node.getnext() + + self.text = None + self.tail = None + + def __getitem__(self, key): + return self.children[key] + + def getnext(self): + return None + + def __len__(self): + return 1 + + +class Doctype(object): + def __init__(self, root_node, name, public_id, system_id): + self.root_node = root_node + self.name = name + self.public_id = public_id + self.system_id = system_id + + self.text = None + self.tail = None + + def getnext(self): + return self.root_node.children[1] + + +class FragmentRoot(Root): + def __init__(self, children): + self.children = [FragmentWrapper(self, child) for child in children] + self.text = self.tail = None + + def getnext(self): + return None + + +class FragmentWrapper(object): + def __init__(self, fragment_root, obj): + self.root_node = fragment_root + self.obj = obj + if hasattr(self.obj, 'text'): + self.text = ensure_str(self.obj.text) + else: + self.text = None + if hasattr(self.obj, 'tail'): + self.tail = ensure_str(self.obj.tail) + else: + self.tail = None + + def __getattr__(self, name): + return getattr(self.obj, name) + + def getnext(self): + siblings = self.root_node.children + idx = siblings.index(self) + if idx < len(siblings) - 1: + return siblings[idx + 1] + else: + return None + + def __getitem__(self, key): + return self.obj[key] + + def __bool__(self): + return bool(self.obj) + + def getparent(self): + return None + + def __str__(self): + return str(self.obj) + + def __unicode__(self): + return str(self.obj) + + def __len__(self): + return len(self.obj) + + +class TreeWalker(base.NonRecursiveTreeWalker): + def __init__(self, tree): + # pylint:disable=redefined-variable-type + if isinstance(tree, list): + self.fragmentChildren = set(tree) + tree = FragmentRoot(tree) + else: + self.fragmentChildren = set() + tree = Root(tree) + base.NonRecursiveTreeWalker.__init__(self, tree) + self.filter = _ihatexml.InfosetFilter() + + def getNodeDetails(self, node): + if isinstance(node, tuple): # Text node + node, key = node + assert key in ("text", "tail"), "Text nodes are text or tail, found %s" % key + return base.TEXT, ensure_str(getattr(node, key)) + + elif isinstance(node, Root): + return (base.DOCUMENT,) + + elif isinstance(node, Doctype): + return base.DOCTYPE, node.name, node.public_id, node.system_id + + elif isinstance(node, FragmentWrapper) and not hasattr(node, "tag"): + return base.TEXT, ensure_str(node.obj) + + elif node.tag == etree.Comment: + return base.COMMENT, ensure_str(node.text) + + elif node.tag == etree.Entity: + return base.ENTITY, ensure_str(node.text)[1:-1] # strip &; + + else: + # This is assumed to be an ordinary element + match = tag_regexp.match(ensure_str(node.tag)) + if match: + namespace, tag = match.groups() + else: + namespace = None + tag = ensure_str(node.tag) + attrs = OrderedDict() + for name, value in list(node.attrib.items()): + name = ensure_str(name) + value = ensure_str(value) + match = tag_regexp.match(name) + if match: + attrs[(match.group(1), match.group(2))] = value + else: + attrs[(None, name)] = value + return (base.ELEMENT, namespace, self.filter.fromXmlName(tag), + attrs, len(node) > 0 or node.text) + + def getFirstChild(self, node): + assert not isinstance(node, tuple), "Text nodes have no children" + + assert len(node) or node.text, "Node has no children" + if node.text: + return (node, "text") + else: + return node[0] + + def getNextSibling(self, node): + if isinstance(node, tuple): # Text node + node, key = node + assert key in ("text", "tail"), "Text nodes are text or tail, found %s" % key + if key == "text": + # XXX: we cannot use a "bool(node) and node[0] or None" construct here + # because node[0] might evaluate to False if it has no child element + if len(node): + return node[0] + else: + return None + else: # tail + return node.getnext() + + return (node, "tail") if node.tail else node.getnext() + + def getParentNode(self, node): + if isinstance(node, tuple): # Text node + node, key = node + assert key in ("text", "tail"), "Text nodes are text or tail, found %s" % key + if key == "text": + return node + # else: fallback to "normal" processing + elif node in self.fragmentChildren: + return None + + return node.getparent() diff --git a/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treewalkers/genshi.py b/venv/Lib/site-packages/pip/_vendor/html5lib/treewalkers/genshi.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/html5lib/treewalkers/genshi.py rename to venv/Lib/site-packages/pip/_vendor/html5lib/treewalkers/genshi.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/idna/__init__.py b/venv/Lib/site-packages/pip/_vendor/idna/__init__.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/idna/__init__.py rename to venv/Lib/site-packages/pip/_vendor/idna/__init__.py diff --git a/venv/Lib/site-packages/pip/_vendor/idna/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/idna/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..3adac580 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/idna/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/idna/__pycache__/codec.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/idna/__pycache__/codec.cpython-36.pyc new file mode 100644 index 00000000..4d5f037b Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/idna/__pycache__/codec.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/idna/__pycache__/compat.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/idna/__pycache__/compat.cpython-36.pyc new file mode 100644 index 00000000..ea4ad9f9 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/idna/__pycache__/compat.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/idna/__pycache__/core.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/idna/__pycache__/core.cpython-36.pyc new file mode 100644 index 00000000..37a6062c Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/idna/__pycache__/core.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/idna/__pycache__/idnadata.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/idna/__pycache__/idnadata.cpython-36.pyc new file mode 100644 index 00000000..ceed4d40 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/idna/__pycache__/idnadata.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/idna/__pycache__/intranges.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/idna/__pycache__/intranges.cpython-36.pyc new file mode 100644 index 00000000..0d494f89 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/idna/__pycache__/intranges.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/idna/__pycache__/package_data.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/idna/__pycache__/package_data.cpython-36.pyc new file mode 100644 index 00000000..d9751e27 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/idna/__pycache__/package_data.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/idna/__pycache__/uts46data.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/idna/__pycache__/uts46data.cpython-36.pyc new file mode 100644 index 00000000..ecb4f6ea Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/idna/__pycache__/uts46data.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/idna/codec.py b/venv/Lib/site-packages/pip/_vendor/idna/codec.py new file mode 100644 index 00000000..30fe72fb --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/idna/codec.py @@ -0,0 +1,110 @@ +from .core import encode, decode, alabel, ulabel, IDNAError +import codecs +import re + +_unicode_dots_re = re.compile('[\u002e\u3002\uff0e\uff61]') + +class Codec(codecs.Codec): + + def encode(self, data, errors='strict'): + + if errors != 'strict': + raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) + + if not data: + return "", 0 + + return encode(data), len(data) + + def decode(self, data, errors='strict'): + + if errors != 'strict': + raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) + + if not data: + return '', 0 + + return decode(data), len(data) + +class IncrementalEncoder(codecs.BufferedIncrementalEncoder): + def _buffer_encode(self, data, errors, final): + if errors != 'strict': + raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) + + if not data: + return ('', 0) + + labels = _unicode_dots_re.split(data) + trailing_dot = '' + if labels: + if not labels[-1]: + trailing_dot = '.' + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = '.' + + result = [] + size = 0 + for label in labels: + result.append(alabel(label)) + if size: + size += 1 + size += len(label) + + # Join with U+002E + result = '.'.join(result) + trailing_dot + size += len(trailing_dot) + return (result, size) + +class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + def _buffer_decode(self, data, errors, final): + if errors != 'strict': + raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) + + if not data: + return ('', 0) + + labels = _unicode_dots_re.split(data) + trailing_dot = '' + if labels: + if not labels[-1]: + trailing_dot = '.' + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = '.' + + result = [] + size = 0 + for label in labels: + result.append(ulabel(label)) + if size: + size += 1 + size += len(label) + + result = '.'.join(result) + trailing_dot + size += len(trailing_dot) + return (result, size) + + +class StreamWriter(Codec, codecs.StreamWriter): + pass + +class StreamReader(Codec, codecs.StreamReader): + pass + +def getregentry(): + return codecs.CodecInfo( + name='idna', + encode=Codec().encode, + decode=Codec().decode, + incrementalencoder=IncrementalEncoder, + incrementaldecoder=IncrementalDecoder, + streamwriter=StreamWriter, + streamreader=StreamReader, + ) diff --git a/venv/Lib/site-packages/pip/_vendor/idna/compat.py b/venv/Lib/site-packages/pip/_vendor/idna/compat.py new file mode 100644 index 00000000..2e622d6f --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/idna/compat.py @@ -0,0 +1,12 @@ +from .core import * +from .codec import * + +def ToASCII(label): + return encode(label) + +def ToUnicode(label): + return decode(label) + +def nameprep(s): + raise NotImplementedError('IDNA 2008 does not utilise nameprep protocol') + diff --git a/venv/Lib/site-packages/pip/_vendor/idna/core.py b/venv/Lib/site-packages/pip/_vendor/idna/core.py new file mode 100644 index 00000000..2c193d63 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/idna/core.py @@ -0,0 +1,396 @@ +from . import idnadata +import bisect +import unicodedata +import re +import sys +from .intranges import intranges_contain + +_virama_combining_class = 9 +_alabel_prefix = b'xn--' +_unicode_dots_re = re.compile('[\u002e\u3002\uff0e\uff61]') + +class IDNAError(UnicodeError): + """ Base exception for all IDNA-encoding related problems """ + pass + + +class IDNABidiError(IDNAError): + """ Exception when bidirectional requirements are not satisfied """ + pass + + +class InvalidCodepoint(IDNAError): + """ Exception when a disallowed or unallocated codepoint is used """ + pass + + +class InvalidCodepointContext(IDNAError): + """ Exception when the codepoint is not valid in the context it is used """ + pass + + +def _combining_class(cp): + v = unicodedata.combining(chr(cp)) + if v == 0: + if not unicodedata.name(chr(cp)): + raise ValueError('Unknown character in unicodedata') + return v + +def _is_script(cp, script): + return intranges_contain(ord(cp), idnadata.scripts[script]) + +def _punycode(s): + return s.encode('punycode') + +def _unot(s): + return 'U+{:04X}'.format(s) + + +def valid_label_length(label): + + if len(label) > 63: + return False + return True + + +def valid_string_length(label, trailing_dot): + + if len(label) > (254 if trailing_dot else 253): + return False + return True + + +def check_bidi(label, check_ltr=False): + + # Bidi rules should only be applied if string contains RTL characters + bidi_label = False + for (idx, cp) in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + if direction == '': + # String likely comes from a newer version of Unicode + raise IDNABidiError('Unknown directionality in label {} at position {}'.format(repr(label), idx)) + if direction in ['R', 'AL', 'AN']: + bidi_label = True + if not bidi_label and not check_ltr: + return True + + # Bidi rule 1 + direction = unicodedata.bidirectional(label[0]) + if direction in ['R', 'AL']: + rtl = True + elif direction == 'L': + rtl = False + else: + raise IDNABidiError('First codepoint in label {} must be directionality L, R or AL'.format(repr(label))) + + valid_ending = False + number_type = False + for (idx, cp) in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + + if rtl: + # Bidi rule 2 + if not direction in ['R', 'AL', 'AN', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: + raise IDNABidiError('Invalid direction for codepoint at position {} in a right-to-left label'.format(idx)) + # Bidi rule 3 + if direction in ['R', 'AL', 'EN', 'AN']: + valid_ending = True + elif direction != 'NSM': + valid_ending = False + # Bidi rule 4 + if direction in ['AN', 'EN']: + if not number_type: + number_type = direction + else: + if number_type != direction: + raise IDNABidiError('Can not mix numeral types in a right-to-left label') + else: + # Bidi rule 5 + if not direction in ['L', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: + raise IDNABidiError('Invalid direction for codepoint at position {} in a left-to-right label'.format(idx)) + # Bidi rule 6 + if direction in ['L', 'EN']: + valid_ending = True + elif direction != 'NSM': + valid_ending = False + + if not valid_ending: + raise IDNABidiError('Label ends with illegal codepoint directionality') + + return True + + +def check_initial_combiner(label): + + if unicodedata.category(label[0])[0] == 'M': + raise IDNAError('Label begins with an illegal combining character') + return True + + +def check_hyphen_ok(label): + + if label[2:4] == '--': + raise IDNAError('Label has disallowed hyphens in 3rd and 4th position') + if label[0] == '-' or label[-1] == '-': + raise IDNAError('Label must not start or end with a hyphen') + return True + + +def check_nfc(label): + + if unicodedata.normalize('NFC', label) != label: + raise IDNAError('Label must be in Normalization Form C') + + +def valid_contextj(label, pos): + + cp_value = ord(label[pos]) + + if cp_value == 0x200c: + + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + + ok = False + for i in range(pos-1, -1, -1): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == ord('T'): + continue + if joining_type in [ord('L'), ord('D')]: + ok = True + break + + if not ok: + return False + + ok = False + for i in range(pos+1, len(label)): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == ord('T'): + continue + if joining_type in [ord('R'), ord('D')]: + ok = True + break + return ok + + if cp_value == 0x200d: + + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + return False + + else: + + return False + + +def valid_contexto(label, pos, exception=False): + + cp_value = ord(label[pos]) + + if cp_value == 0x00b7: + if 0 < pos < len(label)-1: + if ord(label[pos - 1]) == 0x006c and ord(label[pos + 1]) == 0x006c: + return True + return False + + elif cp_value == 0x0375: + if pos < len(label)-1 and len(label) > 1: + return _is_script(label[pos + 1], 'Greek') + return False + + elif cp_value == 0x05f3 or cp_value == 0x05f4: + if pos > 0: + return _is_script(label[pos - 1], 'Hebrew') + return False + + elif cp_value == 0x30fb: + for cp in label: + if cp == '\u30fb': + continue + if _is_script(cp, 'Hiragana') or _is_script(cp, 'Katakana') or _is_script(cp, 'Han'): + return True + return False + + elif 0x660 <= cp_value <= 0x669: + for cp in label: + if 0x6f0 <= ord(cp) <= 0x06f9: + return False + return True + + elif 0x6f0 <= cp_value <= 0x6f9: + for cp in label: + if 0x660 <= ord(cp) <= 0x0669: + return False + return True + + +def check_label(label): + + if isinstance(label, (bytes, bytearray)): + label = label.decode('utf-8') + if len(label) == 0: + raise IDNAError('Empty Label') + + check_nfc(label) + check_hyphen_ok(label) + check_initial_combiner(label) + + for (pos, cp) in enumerate(label): + cp_value = ord(cp) + if intranges_contain(cp_value, idnadata.codepoint_classes['PVALID']): + continue + elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTJ']): + try: + if not valid_contextj(label, pos): + raise InvalidCodepointContext('Joiner {} not allowed at position {} in {}'.format( + _unot(cp_value), pos+1, repr(label))) + except ValueError: + raise IDNAError('Unknown codepoint adjacent to joiner {} at position {} in {}'.format( + _unot(cp_value), pos+1, repr(label))) + elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTO']): + if not valid_contexto(label, pos): + raise InvalidCodepointContext('Codepoint {} not allowed at position {} in {}'.format(_unot(cp_value), pos+1, repr(label))) + else: + raise InvalidCodepoint('Codepoint {} at position {} of {} not allowed'.format(_unot(cp_value), pos+1, repr(label))) + + check_bidi(label) + + +def alabel(label): + + try: + label = label.encode('ascii') + ulabel(label) + if not valid_label_length(label): + raise IDNAError('Label too long') + return label + except UnicodeEncodeError: + pass + + if not label: + raise IDNAError('No Input') + + label = str(label) + check_label(label) + label = _punycode(label) + label = _alabel_prefix + label + + if not valid_label_length(label): + raise IDNAError('Label too long') + + return label + + +def ulabel(label): + + if not isinstance(label, (bytes, bytearray)): + try: + label = label.encode('ascii') + except UnicodeEncodeError: + check_label(label) + return label + + label = label.lower() + if label.startswith(_alabel_prefix): + label = label[len(_alabel_prefix):] + if not label: + raise IDNAError('Malformed A-label, no Punycode eligible content found') + if label.decode('ascii')[-1] == '-': + raise IDNAError('A-label must not end with a hyphen') + else: + check_label(label) + return label.decode('ascii') + + label = label.decode('punycode') + check_label(label) + return label + + +def uts46_remap(domain, std3_rules=True, transitional=False): + """Re-map the characters in the string according to UTS46 processing.""" + from .uts46data import uts46data + output = '' + try: + for pos, char in enumerate(domain): + code_point = ord(char) + uts46row = uts46data[code_point if code_point < 256 else + bisect.bisect_left(uts46data, (code_point, 'Z')) - 1] + status = uts46row[1] + replacement = uts46row[2] if len(uts46row) == 3 else None + if (status == 'V' or + (status == 'D' and not transitional) or + (status == '3' and not std3_rules and replacement is None)): + output += char + elif replacement is not None and (status == 'M' or + (status == '3' and not std3_rules) or + (status == 'D' and transitional)): + output += replacement + elif status != 'I': + raise IndexError() + return unicodedata.normalize('NFC', output) + except IndexError: + raise InvalidCodepoint( + 'Codepoint {} not allowed at position {} in {}'.format( + _unot(code_point), pos + 1, repr(domain))) + + +def encode(s, strict=False, uts46=False, std3_rules=False, transitional=False): + + if isinstance(s, (bytes, bytearray)): + s = s.decode('ascii') + if uts46: + s = uts46_remap(s, std3_rules, transitional) + trailing_dot = False + result = [] + if strict: + labels = s.split('.') + else: + labels = _unicode_dots_re.split(s) + if not labels or labels == ['']: + raise IDNAError('Empty domain') + if labels[-1] == '': + del labels[-1] + trailing_dot = True + for label in labels: + s = alabel(label) + if s: + result.append(s) + else: + raise IDNAError('Empty label') + if trailing_dot: + result.append(b'') + s = b'.'.join(result) + if not valid_string_length(s, trailing_dot): + raise IDNAError('Domain too long') + return s + + +def decode(s, strict=False, uts46=False, std3_rules=False): + + if isinstance(s, (bytes, bytearray)): + s = s.decode('ascii') + if uts46: + s = uts46_remap(s, std3_rules, False) + trailing_dot = False + result = [] + if not strict: + labels = _unicode_dots_re.split(s) + else: + labels = s.split('.') + if not labels or labels == ['']: + raise IDNAError('Empty domain') + if not labels[-1]: + del labels[-1] + trailing_dot = True + for label in labels: + s = ulabel(label) + if s: + result.append(s) + else: + raise IDNAError('Empty label') + if trailing_dot: + result.append('') + return '.'.join(result) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/idna/idnadata.py b/venv/Lib/site-packages/pip/_vendor/idna/idnadata.py similarity index 94% rename from env/lib/python2.7/site-packages/pip/_vendor/idna/idnadata.py rename to venv/Lib/site-packages/pip/_vendor/idna/idnadata.py index a80c959d..b86a3e06 100644 --- a/env/lib/python2.7/site-packages/pip/_vendor/idna/idnadata.py +++ b/venv/Lib/site-packages/pip/_vendor/idna/idnadata.py @@ -1,6 +1,6 @@ # This file is automatically generated by tools/idna-data -__version__ = "11.0.0" +__version__ = '13.0.0' scripts = { 'Greek': ( 0x37000000374, @@ -48,16 +48,18 @@ 0x300700003008, 0x30210000302a, 0x30380000303c, - 0x340000004db6, - 0x4e0000009ff0, + 0x340000004dc0, + 0x4e0000009ffd, 0xf9000000fa6e, 0xfa700000fada, - 0x200000002a6d7, + 0x16ff000016ff2, + 0x200000002a6de, 0x2a7000002b735, 0x2b7400002b81e, 0x2b8200002cea2, 0x2ceb00002ebe1, 0x2f8000002fa1e, + 0x300000003134b, ), 'Hebrew': ( 0x591000005c8, @@ -74,6 +76,7 @@ 0x304100003097, 0x309d000030a0, 0x1b0010001b11f, + 0x1b1500001b153, 0x1f2000001f201, ), 'Katakana': ( @@ -85,6 +88,7 @@ 0xff660000ff70, 0xff710000ff9e, 0x1b0000001b001, + 0x1b1640001b168, ), } joining_types = { @@ -387,9 +391,9 @@ 0x853: 68, 0x854: 82, 0x855: 68, - 0x856: 85, - 0x857: 85, - 0x858: 85, + 0x856: 82, + 0x857: 82, + 0x858: 82, 0x860: 68, 0x861: 85, 0x862: 68, @@ -430,6 +434,16 @@ 0x8bb: 68, 0x8bc: 68, 0x8bd: 68, + 0x8be: 68, + 0x8bf: 68, + 0x8c0: 68, + 0x8c1: 68, + 0x8c2: 68, + 0x8c3: 68, + 0x8c4: 68, + 0x8c5: 68, + 0x8c6: 68, + 0x8c7: 68, 0x8e2: 85, 0x1806: 85, 0x1807: 68, @@ -754,6 +768,34 @@ 0x10f52: 68, 0x10f53: 68, 0x10f54: 82, + 0x10fb0: 68, + 0x10fb1: 85, + 0x10fb2: 68, + 0x10fb3: 68, + 0x10fb4: 82, + 0x10fb5: 82, + 0x10fb6: 82, + 0x10fb7: 85, + 0x10fb8: 68, + 0x10fb9: 82, + 0x10fba: 82, + 0x10fbb: 68, + 0x10fbc: 68, + 0x10fbd: 82, + 0x10fbe: 68, + 0x10fbf: 68, + 0x10fc0: 85, + 0x10fc1: 68, + 0x10fc2: 82, + 0x10fc3: 82, + 0x10fc4: 68, + 0x10fc5: 85, + 0x10fc6: 85, + 0x10fc7: 85, + 0x10fc8: 85, + 0x10fc9: 82, + 0x10fca: 68, + 0x10fcb: 76, 0x110bd: 85, 0x110cd: 85, 0x1e900: 68, @@ -824,6 +866,7 @@ 0x1e941: 68, 0x1e942: 68, 0x1e943: 68, + 0x1e94b: 84, } codepoint_classes = { 'PVALID': ( @@ -1126,7 +1169,7 @@ 0x8400000085c, 0x8600000086b, 0x8a0000008b5, - 0x8b6000008be, + 0x8b6000008c8, 0x8d3000008e2, 0x8e300000958, 0x96000000964, @@ -1185,7 +1228,7 @@ 0xb3c00000b45, 0xb4700000b49, 0xb4b00000b4e, - 0xb5600000b58, + 0xb5500000b58, 0xb5f00000b64, 0xb6600000b70, 0xb7100000b72, @@ -1230,8 +1273,7 @@ 0xce000000ce4, 0xce600000cf0, 0xcf100000cf3, - 0xd0000000d04, - 0xd0500000d0d, + 0xd0000000d0d, 0xd0e00000d11, 0xd1200000d45, 0xd4600000d49, @@ -1240,7 +1282,7 @@ 0xd5f00000d64, 0xd6600000d70, 0xd7a00000d80, - 0xd8200000d84, + 0xd8100000d84, 0xd8500000d97, 0xd9a00000db2, 0xdb300000dbc, @@ -1258,18 +1300,11 @@ 0xe5000000e5a, 0xe8100000e83, 0xe8400000e85, - 0xe8700000e89, - 0xe8a00000e8b, - 0xe8d00000e8e, - 0xe9400000e98, - 0xe9900000ea0, - 0xea100000ea4, + 0xe8600000e8b, + 0xe8c00000ea4, 0xea500000ea6, - 0xea700000ea8, - 0xeaa00000eac, - 0xead00000eb3, - 0xeb400000eba, - 0xebb00000ebe, + 0xea700000eb3, + 0xeb400000ebe, 0xec000000ec5, 0xec600000ec7, 0xec800000ece, @@ -1362,6 +1397,7 @@ 0x1a9000001a9a, 0x1aa700001aa8, 0x1ab000001abe, + 0x1abf00001ac1, 0x1b0000001b4c, 0x1b5000001b5a, 0x1b6b00001b74, @@ -1370,7 +1406,7 @@ 0x1c4000001c4a, 0x1c4d00001c7e, 0x1cd000001cd3, - 0x1cd400001cfa, + 0x1cd400001cfb, 0x1d0000001d2c, 0x1d2f00001d30, 0x1d3b00001d3c, @@ -1613,10 +1649,10 @@ 0x30a1000030fb, 0x30fc000030ff, 0x310500003130, - 0x31a0000031bb, + 0x31a0000031c0, 0x31f000003200, - 0x340000004db6, - 0x4e0000009ff0, + 0x340000004dc0, + 0x4e0000009ffd, 0xa0000000a48d, 0xa4d00000a4fe, 0xa5000000a60d, @@ -1727,8 +1763,15 @@ 0xa7b50000a7b6, 0xa7b70000a7b8, 0xa7b90000a7ba, - 0xa7f70000a7f8, + 0xa7bb0000a7bc, + 0xa7bd0000a7be, + 0xa7bf0000a7c0, + 0xa7c30000a7c4, + 0xa7c80000a7c9, + 0xa7ca0000a7cb, + 0xa7f60000a7f8, 0xa7fa0000a828, + 0xa82c0000a82d, 0xa8400000a874, 0xa8800000a8c6, 0xa8d00000a8da, @@ -1753,7 +1796,7 @@ 0xab200000ab27, 0xab280000ab2f, 0xab300000ab5b, - 0xab600000ab66, + 0xab600000ab6a, 0xabc00000abeb, 0xabec0000abee, 0xabf00000abfa, @@ -1827,9 +1870,14 @@ 0x10cc000010cf3, 0x10d0000010d28, 0x10d3000010d3a, + 0x10e8000010eaa, + 0x10eab00010ead, + 0x10eb000010eb2, 0x10f0000010f1d, 0x10f2700010f28, 0x10f3000010f51, + 0x10fb000010fc5, + 0x10fe000010ff7, 0x1100000011047, 0x1106600011070, 0x1107f000110bb, @@ -1837,12 +1885,12 @@ 0x110f0000110fa, 0x1110000011135, 0x1113600011140, - 0x1114400011147, + 0x1114400011148, 0x1115000011174, 0x1117600011177, 0x11180000111c5, 0x111c9000111cd, - 0x111d0000111db, + 0x111ce000111db, 0x111dc000111dd, 0x1120000011212, 0x1121300011238, @@ -1871,7 +1919,7 @@ 0x1137000011375, 0x114000001144b, 0x114500001145a, - 0x1145e0001145f, + 0x1145e00011462, 0x11480000114c6, 0x114c7000114c8, 0x114d0000114da, @@ -1881,18 +1929,28 @@ 0x1160000011641, 0x1164400011645, 0x116500001165a, - 0x11680000116b8, + 0x11680000116b9, 0x116c0000116ca, 0x117000001171b, 0x1171d0001172c, 0x117300001173a, 0x118000001183b, 0x118c0000118ea, - 0x118ff00011900, + 0x118ff00011907, + 0x119090001190a, + 0x1190c00011914, + 0x1191500011917, + 0x1191800011936, + 0x1193700011939, + 0x1193b00011944, + 0x119500001195a, + 0x119a0000119a8, + 0x119aa000119d8, + 0x119da000119e2, + 0x119e3000119e5, 0x11a0000011a3f, 0x11a4700011a48, - 0x11a5000011a84, - 0x11a8600011a9a, + 0x11a5000011a9a, 0x11a9d00011a9e, 0x11ac000011af9, 0x11c0000011c09, @@ -1916,6 +1974,7 @@ 0x11d9300011d99, 0x11da000011daa, 0x11ee000011ef7, + 0x11fb000011fb1, 0x120000001239a, 0x1248000012544, 0x130000001342f, @@ -1931,13 +1990,18 @@ 0x16b6300016b78, 0x16b7d00016b90, 0x16e6000016e80, - 0x16f0000016f45, - 0x16f5000016f7f, + 0x16f0000016f4b, + 0x16f4f00016f88, 0x16f8f00016fa0, 0x16fe000016fe2, - 0x17000000187f2, - 0x1880000018af3, + 0x16fe300016fe5, + 0x16ff000016ff2, + 0x17000000187f8, + 0x1880000018cd6, + 0x18d0000018d09, 0x1b0000001b11f, + 0x1b1500001b153, + 0x1b1640001b168, 0x1b1700001b2fc, 0x1bc000001bc6b, 0x1bc700001bc7d, @@ -1955,15 +2019,22 @@ 0x1e01b0001e022, 0x1e0230001e025, 0x1e0260001e02b, + 0x1e1000001e12d, + 0x1e1300001e13e, + 0x1e1400001e14a, + 0x1e14e0001e14f, + 0x1e2c00001e2fa, 0x1e8000001e8c5, 0x1e8d00001e8d7, - 0x1e9220001e94b, + 0x1e9220001e94c, 0x1e9500001e95a, - 0x200000002a6d7, + 0x1fbf00001fbfa, + 0x200000002a6de, 0x2a7000002b735, 0x2b7400002b81e, 0x2b8200002cea2, 0x2ceb00002ebe1, + 0x300000003134b, ), 'CONTEXTJ': ( 0x200c0000200e, diff --git a/env/lib/python2.7/site-packages/pip/_vendor/idna/intranges.py b/venv/Lib/site-packages/pip/_vendor/idna/intranges.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/idna/intranges.py rename to venv/Lib/site-packages/pip/_vendor/idna/intranges.py diff --git a/venv/Lib/site-packages/pip/_vendor/idna/package_data.py b/venv/Lib/site-packages/pip/_vendor/idna/package_data.py new file mode 100644 index 00000000..1420ea2f --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/idna/package_data.py @@ -0,0 +1,2 @@ +__version__ = '3.1' + diff --git a/venv/Lib/site-packages/pip/_vendor/idna/uts46data.py b/venv/Lib/site-packages/pip/_vendor/idna/uts46data.py new file mode 100644 index 00000000..8ae36cbe --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/idna/uts46data.py @@ -0,0 +1,8356 @@ +# This file is automatically generated by tools/idna-data + +"""IDNA Mapping Table from UTS46.""" + + +__version__ = '13.0.0' +def _seg_0(): + return [ + (0x0, '3'), + (0x1, '3'), + (0x2, '3'), + (0x3, '3'), + (0x4, '3'), + (0x5, '3'), + (0x6, '3'), + (0x7, '3'), + (0x8, '3'), + (0x9, '3'), + (0xA, '3'), + (0xB, '3'), + (0xC, '3'), + (0xD, '3'), + (0xE, '3'), + (0xF, '3'), + (0x10, '3'), + (0x11, '3'), + (0x12, '3'), + (0x13, '3'), + (0x14, '3'), + (0x15, '3'), + (0x16, '3'), + (0x17, '3'), + (0x18, '3'), + (0x19, '3'), + (0x1A, '3'), + (0x1B, '3'), + (0x1C, '3'), + (0x1D, '3'), + (0x1E, '3'), + (0x1F, '3'), + (0x20, '3'), + (0x21, '3'), + (0x22, '3'), + (0x23, '3'), + (0x24, '3'), + (0x25, '3'), + (0x26, '3'), + (0x27, '3'), + (0x28, '3'), + (0x29, '3'), + (0x2A, '3'), + (0x2B, '3'), + (0x2C, '3'), + (0x2D, 'V'), + (0x2E, 'V'), + (0x2F, '3'), + (0x30, 'V'), + (0x31, 'V'), + (0x32, 'V'), + (0x33, 'V'), + (0x34, 'V'), + (0x35, 'V'), + (0x36, 'V'), + (0x37, 'V'), + (0x38, 'V'), + (0x39, 'V'), + (0x3A, '3'), + (0x3B, '3'), + (0x3C, '3'), + (0x3D, '3'), + (0x3E, '3'), + (0x3F, '3'), + (0x40, '3'), + (0x41, 'M', 'a'), + (0x42, 'M', 'b'), + (0x43, 'M', 'c'), + (0x44, 'M', 'd'), + (0x45, 'M', 'e'), + (0x46, 'M', 'f'), + (0x47, 'M', 'g'), + (0x48, 'M', 'h'), + (0x49, 'M', 'i'), + (0x4A, 'M', 'j'), + (0x4B, 'M', 'k'), + (0x4C, 'M', 'l'), + (0x4D, 'M', 'm'), + (0x4E, 'M', 'n'), + (0x4F, 'M', 'o'), + (0x50, 'M', 'p'), + (0x51, 'M', 'q'), + (0x52, 'M', 'r'), + (0x53, 'M', 's'), + (0x54, 'M', 't'), + (0x55, 'M', 'u'), + (0x56, 'M', 'v'), + (0x57, 'M', 'w'), + (0x58, 'M', 'x'), + (0x59, 'M', 'y'), + (0x5A, 'M', 'z'), + (0x5B, '3'), + (0x5C, '3'), + (0x5D, '3'), + (0x5E, '3'), + (0x5F, '3'), + (0x60, '3'), + (0x61, 'V'), + (0x62, 'V'), + (0x63, 'V'), + ] + +def _seg_1(): + return [ + (0x64, 'V'), + (0x65, 'V'), + (0x66, 'V'), + (0x67, 'V'), + (0x68, 'V'), + (0x69, 'V'), + (0x6A, 'V'), + (0x6B, 'V'), + (0x6C, 'V'), + (0x6D, 'V'), + (0x6E, 'V'), + (0x6F, 'V'), + (0x70, 'V'), + (0x71, 'V'), + (0x72, 'V'), + (0x73, 'V'), + (0x74, 'V'), + (0x75, 'V'), + (0x76, 'V'), + (0x77, 'V'), + (0x78, 'V'), + (0x79, 'V'), + (0x7A, 'V'), + (0x7B, '3'), + (0x7C, '3'), + (0x7D, '3'), + (0x7E, '3'), + (0x7F, '3'), + (0x80, 'X'), + (0x81, 'X'), + (0x82, 'X'), + (0x83, 'X'), + (0x84, 'X'), + (0x85, 'X'), + (0x86, 'X'), + (0x87, 'X'), + (0x88, 'X'), + (0x89, 'X'), + (0x8A, 'X'), + (0x8B, 'X'), + (0x8C, 'X'), + (0x8D, 'X'), + (0x8E, 'X'), + (0x8F, 'X'), + (0x90, 'X'), + (0x91, 'X'), + (0x92, 'X'), + (0x93, 'X'), + (0x94, 'X'), + (0x95, 'X'), + (0x96, 'X'), + (0x97, 'X'), + (0x98, 'X'), + (0x99, 'X'), + (0x9A, 'X'), + (0x9B, 'X'), + (0x9C, 'X'), + (0x9D, 'X'), + (0x9E, 'X'), + (0x9F, 'X'), + (0xA0, '3', ' '), + (0xA1, 'V'), + (0xA2, 'V'), + (0xA3, 'V'), + (0xA4, 'V'), + (0xA5, 'V'), + (0xA6, 'V'), + (0xA7, 'V'), + (0xA8, '3', ' ̈'), + (0xA9, 'V'), + (0xAA, 'M', 'a'), + (0xAB, 'V'), + (0xAC, 'V'), + (0xAD, 'I'), + (0xAE, 'V'), + (0xAF, '3', ' ̄'), + (0xB0, 'V'), + (0xB1, 'V'), + (0xB2, 'M', '2'), + (0xB3, 'M', '3'), + (0xB4, '3', ' ́'), + (0xB5, 'M', 'μ'), + (0xB6, 'V'), + (0xB7, 'V'), + (0xB8, '3', ' ̧'), + (0xB9, 'M', '1'), + (0xBA, 'M', 'o'), + (0xBB, 'V'), + (0xBC, 'M', '1⁄4'), + (0xBD, 'M', '1⁄2'), + (0xBE, 'M', '3⁄4'), + (0xBF, 'V'), + (0xC0, 'M', 'à'), + (0xC1, 'M', 'á'), + (0xC2, 'M', 'â'), + (0xC3, 'M', 'ã'), + (0xC4, 'M', 'ä'), + (0xC5, 'M', 'å'), + (0xC6, 'M', 'æ'), + (0xC7, 'M', 'ç'), + ] + +def _seg_2(): + return [ + (0xC8, 'M', 'è'), + (0xC9, 'M', 'é'), + (0xCA, 'M', 'ê'), + (0xCB, 'M', 'ë'), + (0xCC, 'M', 'ì'), + (0xCD, 'M', 'í'), + (0xCE, 'M', 'î'), + (0xCF, 'M', 'ï'), + (0xD0, 'M', 'ð'), + (0xD1, 'M', 'ñ'), + (0xD2, 'M', 'ò'), + (0xD3, 'M', 'ó'), + (0xD4, 'M', 'ô'), + (0xD5, 'M', 'õ'), + (0xD6, 'M', 'ö'), + (0xD7, 'V'), + (0xD8, 'M', 'ø'), + (0xD9, 'M', 'ù'), + (0xDA, 'M', 'ú'), + (0xDB, 'M', 'û'), + (0xDC, 'M', 'ü'), + (0xDD, 'M', 'ý'), + (0xDE, 'M', 'þ'), + (0xDF, 'D', 'ss'), + (0xE0, 'V'), + (0xE1, 'V'), + (0xE2, 'V'), + (0xE3, 'V'), + (0xE4, 'V'), + (0xE5, 'V'), + (0xE6, 'V'), + (0xE7, 'V'), + (0xE8, 'V'), + (0xE9, 'V'), + (0xEA, 'V'), + (0xEB, 'V'), + (0xEC, 'V'), + (0xED, 'V'), + (0xEE, 'V'), + (0xEF, 'V'), + (0xF0, 'V'), + (0xF1, 'V'), + (0xF2, 'V'), + (0xF3, 'V'), + (0xF4, 'V'), + (0xF5, 'V'), + (0xF6, 'V'), + (0xF7, 'V'), + (0xF8, 'V'), + (0xF9, 'V'), + (0xFA, 'V'), + (0xFB, 'V'), + (0xFC, 'V'), + (0xFD, 'V'), + (0xFE, 'V'), + (0xFF, 'V'), + (0x100, 'M', 'ā'), + (0x101, 'V'), + (0x102, 'M', 'ă'), + (0x103, 'V'), + (0x104, 'M', 'ą'), + (0x105, 'V'), + (0x106, 'M', 'ć'), + (0x107, 'V'), + (0x108, 'M', 'ĉ'), + (0x109, 'V'), + (0x10A, 'M', 'ċ'), + (0x10B, 'V'), + (0x10C, 'M', 'č'), + (0x10D, 'V'), + (0x10E, 'M', 'ď'), + (0x10F, 'V'), + (0x110, 'M', 'đ'), + (0x111, 'V'), + (0x112, 'M', 'ē'), + (0x113, 'V'), + (0x114, 'M', 'ĕ'), + (0x115, 'V'), + (0x116, 'M', 'ė'), + (0x117, 'V'), + (0x118, 'M', 'ę'), + (0x119, 'V'), + (0x11A, 'M', 'ě'), + (0x11B, 'V'), + (0x11C, 'M', 'ĝ'), + (0x11D, 'V'), + (0x11E, 'M', 'ğ'), + (0x11F, 'V'), + (0x120, 'M', 'ġ'), + (0x121, 'V'), + (0x122, 'M', 'ģ'), + (0x123, 'V'), + (0x124, 'M', 'ĥ'), + (0x125, 'V'), + (0x126, 'M', 'ħ'), + (0x127, 'V'), + (0x128, 'M', 'ĩ'), + (0x129, 'V'), + (0x12A, 'M', 'ī'), + (0x12B, 'V'), + ] + +def _seg_3(): + return [ + (0x12C, 'M', 'ĭ'), + (0x12D, 'V'), + (0x12E, 'M', 'į'), + (0x12F, 'V'), + (0x130, 'M', 'i̇'), + (0x131, 'V'), + (0x132, 'M', 'ij'), + (0x134, 'M', 'ĵ'), + (0x135, 'V'), + (0x136, 'M', 'ķ'), + (0x137, 'V'), + (0x139, 'M', 'ĺ'), + (0x13A, 'V'), + (0x13B, 'M', 'ļ'), + (0x13C, 'V'), + (0x13D, 'M', 'ľ'), + (0x13E, 'V'), + (0x13F, 'M', 'l·'), + (0x141, 'M', 'ł'), + (0x142, 'V'), + (0x143, 'M', 'ń'), + (0x144, 'V'), + (0x145, 'M', 'ņ'), + (0x146, 'V'), + (0x147, 'M', 'ň'), + (0x148, 'V'), + (0x149, 'M', 'ʼn'), + (0x14A, 'M', 'ŋ'), + (0x14B, 'V'), + (0x14C, 'M', 'ō'), + (0x14D, 'V'), + (0x14E, 'M', 'ŏ'), + (0x14F, 'V'), + (0x150, 'M', 'ő'), + (0x151, 'V'), + (0x152, 'M', 'œ'), + (0x153, 'V'), + (0x154, 'M', 'ŕ'), + (0x155, 'V'), + (0x156, 'M', 'ŗ'), + (0x157, 'V'), + (0x158, 'M', 'ř'), + (0x159, 'V'), + (0x15A, 'M', 'ś'), + (0x15B, 'V'), + (0x15C, 'M', 'ŝ'), + (0x15D, 'V'), + (0x15E, 'M', 'ş'), + (0x15F, 'V'), + (0x160, 'M', 'š'), + (0x161, 'V'), + (0x162, 'M', 'ţ'), + (0x163, 'V'), + (0x164, 'M', 'ť'), + (0x165, 'V'), + (0x166, 'M', 'ŧ'), + (0x167, 'V'), + (0x168, 'M', 'ũ'), + (0x169, 'V'), + (0x16A, 'M', 'ū'), + (0x16B, 'V'), + (0x16C, 'M', 'ŭ'), + (0x16D, 'V'), + (0x16E, 'M', 'ů'), + (0x16F, 'V'), + (0x170, 'M', 'ű'), + (0x171, 'V'), + (0x172, 'M', 'ų'), + (0x173, 'V'), + (0x174, 'M', 'ŵ'), + (0x175, 'V'), + (0x176, 'M', 'ŷ'), + (0x177, 'V'), + (0x178, 'M', 'ÿ'), + (0x179, 'M', 'ź'), + (0x17A, 'V'), + (0x17B, 'M', 'ż'), + (0x17C, 'V'), + (0x17D, 'M', 'ž'), + (0x17E, 'V'), + (0x17F, 'M', 's'), + (0x180, 'V'), + (0x181, 'M', 'ɓ'), + (0x182, 'M', 'ƃ'), + (0x183, 'V'), + (0x184, 'M', 'ƅ'), + (0x185, 'V'), + (0x186, 'M', 'ɔ'), + (0x187, 'M', 'ƈ'), + (0x188, 'V'), + (0x189, 'M', 'ɖ'), + (0x18A, 'M', 'ɗ'), + (0x18B, 'M', 'ƌ'), + (0x18C, 'V'), + (0x18E, 'M', 'ǝ'), + (0x18F, 'M', 'ə'), + (0x190, 'M', 'ɛ'), + (0x191, 'M', 'ƒ'), + (0x192, 'V'), + (0x193, 'M', 'ɠ'), + ] + +def _seg_4(): + return [ + (0x194, 'M', 'ɣ'), + (0x195, 'V'), + (0x196, 'M', 'ɩ'), + (0x197, 'M', 'ɨ'), + (0x198, 'M', 'ƙ'), + (0x199, 'V'), + (0x19C, 'M', 'ɯ'), + (0x19D, 'M', 'ɲ'), + (0x19E, 'V'), + (0x19F, 'M', 'ɵ'), + (0x1A0, 'M', 'ơ'), + (0x1A1, 'V'), + (0x1A2, 'M', 'ƣ'), + (0x1A3, 'V'), + (0x1A4, 'M', 'ƥ'), + (0x1A5, 'V'), + (0x1A6, 'M', 'ʀ'), + (0x1A7, 'M', 'ƨ'), + (0x1A8, 'V'), + (0x1A9, 'M', 'ʃ'), + (0x1AA, 'V'), + (0x1AC, 'M', 'ƭ'), + (0x1AD, 'V'), + (0x1AE, 'M', 'ʈ'), + (0x1AF, 'M', 'ư'), + (0x1B0, 'V'), + (0x1B1, 'M', 'ʊ'), + (0x1B2, 'M', 'ʋ'), + (0x1B3, 'M', 'ƴ'), + (0x1B4, 'V'), + (0x1B5, 'M', 'ƶ'), + (0x1B6, 'V'), + (0x1B7, 'M', 'ʒ'), + (0x1B8, 'M', 'ƹ'), + (0x1B9, 'V'), + (0x1BC, 'M', 'ƽ'), + (0x1BD, 'V'), + (0x1C4, 'M', 'dž'), + (0x1C7, 'M', 'lj'), + (0x1CA, 'M', 'nj'), + (0x1CD, 'M', 'ǎ'), + (0x1CE, 'V'), + (0x1CF, 'M', 'ǐ'), + (0x1D0, 'V'), + (0x1D1, 'M', 'ǒ'), + (0x1D2, 'V'), + (0x1D3, 'M', 'ǔ'), + (0x1D4, 'V'), + (0x1D5, 'M', 'ǖ'), + (0x1D6, 'V'), + (0x1D7, 'M', 'ǘ'), + (0x1D8, 'V'), + (0x1D9, 'M', 'ǚ'), + (0x1DA, 'V'), + (0x1DB, 'M', 'ǜ'), + (0x1DC, 'V'), + (0x1DE, 'M', 'ǟ'), + (0x1DF, 'V'), + (0x1E0, 'M', 'ǡ'), + (0x1E1, 'V'), + (0x1E2, 'M', 'ǣ'), + (0x1E3, 'V'), + (0x1E4, 'M', 'ǥ'), + (0x1E5, 'V'), + (0x1E6, 'M', 'ǧ'), + (0x1E7, 'V'), + (0x1E8, 'M', 'ǩ'), + (0x1E9, 'V'), + (0x1EA, 'M', 'ǫ'), + (0x1EB, 'V'), + (0x1EC, 'M', 'ǭ'), + (0x1ED, 'V'), + (0x1EE, 'M', 'ǯ'), + (0x1EF, 'V'), + (0x1F1, 'M', 'dz'), + (0x1F4, 'M', 'ǵ'), + (0x1F5, 'V'), + (0x1F6, 'M', 'ƕ'), + (0x1F7, 'M', 'ƿ'), + (0x1F8, 'M', 'ǹ'), + (0x1F9, 'V'), + (0x1FA, 'M', 'ǻ'), + (0x1FB, 'V'), + (0x1FC, 'M', 'ǽ'), + (0x1FD, 'V'), + (0x1FE, 'M', 'ǿ'), + (0x1FF, 'V'), + (0x200, 'M', 'ȁ'), + (0x201, 'V'), + (0x202, 'M', 'ȃ'), + (0x203, 'V'), + (0x204, 'M', 'ȅ'), + (0x205, 'V'), + (0x206, 'M', 'ȇ'), + (0x207, 'V'), + (0x208, 'M', 'ȉ'), + (0x209, 'V'), + (0x20A, 'M', 'ȋ'), + (0x20B, 'V'), + (0x20C, 'M', 'ȍ'), + ] + +def _seg_5(): + return [ + (0x20D, 'V'), + (0x20E, 'M', 'ȏ'), + (0x20F, 'V'), + (0x210, 'M', 'ȑ'), + (0x211, 'V'), + (0x212, 'M', 'ȓ'), + (0x213, 'V'), + (0x214, 'M', 'ȕ'), + (0x215, 'V'), + (0x216, 'M', 'ȗ'), + (0x217, 'V'), + (0x218, 'M', 'ș'), + (0x219, 'V'), + (0x21A, 'M', 'ț'), + (0x21B, 'V'), + (0x21C, 'M', 'ȝ'), + (0x21D, 'V'), + (0x21E, 'M', 'ȟ'), + (0x21F, 'V'), + (0x220, 'M', 'ƞ'), + (0x221, 'V'), + (0x222, 'M', 'ȣ'), + (0x223, 'V'), + (0x224, 'M', 'ȥ'), + (0x225, 'V'), + (0x226, 'M', 'ȧ'), + (0x227, 'V'), + (0x228, 'M', 'ȩ'), + (0x229, 'V'), + (0x22A, 'M', 'ȫ'), + (0x22B, 'V'), + (0x22C, 'M', 'ȭ'), + (0x22D, 'V'), + (0x22E, 'M', 'ȯ'), + (0x22F, 'V'), + (0x230, 'M', 'ȱ'), + (0x231, 'V'), + (0x232, 'M', 'ȳ'), + (0x233, 'V'), + (0x23A, 'M', 'ⱥ'), + (0x23B, 'M', 'ȼ'), + (0x23C, 'V'), + (0x23D, 'M', 'ƚ'), + (0x23E, 'M', 'ⱦ'), + (0x23F, 'V'), + (0x241, 'M', 'ɂ'), + (0x242, 'V'), + (0x243, 'M', 'ƀ'), + (0x244, 'M', 'ʉ'), + (0x245, 'M', 'ʌ'), + (0x246, 'M', 'ɇ'), + (0x247, 'V'), + (0x248, 'M', 'ɉ'), + (0x249, 'V'), + (0x24A, 'M', 'ɋ'), + (0x24B, 'V'), + (0x24C, 'M', 'ɍ'), + (0x24D, 'V'), + (0x24E, 'M', 'ɏ'), + (0x24F, 'V'), + (0x2B0, 'M', 'h'), + (0x2B1, 'M', 'ɦ'), + (0x2B2, 'M', 'j'), + (0x2B3, 'M', 'r'), + (0x2B4, 'M', 'ɹ'), + (0x2B5, 'M', 'ɻ'), + (0x2B6, 'M', 'ʁ'), + (0x2B7, 'M', 'w'), + (0x2B8, 'M', 'y'), + (0x2B9, 'V'), + (0x2D8, '3', ' ̆'), + (0x2D9, '3', ' ̇'), + (0x2DA, '3', ' ̊'), + (0x2DB, '3', ' ̨'), + (0x2DC, '3', ' ̃'), + (0x2DD, '3', ' ̋'), + (0x2DE, 'V'), + (0x2E0, 'M', 'ɣ'), + (0x2E1, 'M', 'l'), + (0x2E2, 'M', 's'), + (0x2E3, 'M', 'x'), + (0x2E4, 'M', 'ʕ'), + (0x2E5, 'V'), + (0x340, 'M', '̀'), + (0x341, 'M', '́'), + (0x342, 'V'), + (0x343, 'M', '̓'), + (0x344, 'M', '̈́'), + (0x345, 'M', 'ι'), + (0x346, 'V'), + (0x34F, 'I'), + (0x350, 'V'), + (0x370, 'M', 'ͱ'), + (0x371, 'V'), + (0x372, 'M', 'ͳ'), + (0x373, 'V'), + (0x374, 'M', 'ʹ'), + (0x375, 'V'), + (0x376, 'M', 'ͷ'), + (0x377, 'V'), + ] + +def _seg_6(): + return [ + (0x378, 'X'), + (0x37A, '3', ' ι'), + (0x37B, 'V'), + (0x37E, '3', ';'), + (0x37F, 'M', 'ϳ'), + (0x380, 'X'), + (0x384, '3', ' ́'), + (0x385, '3', ' ̈́'), + (0x386, 'M', 'ά'), + (0x387, 'M', '·'), + (0x388, 'M', 'έ'), + (0x389, 'M', 'ή'), + (0x38A, 'M', 'ί'), + (0x38B, 'X'), + (0x38C, 'M', 'ό'), + (0x38D, 'X'), + (0x38E, 'M', 'ύ'), + (0x38F, 'M', 'ώ'), + (0x390, 'V'), + (0x391, 'M', 'α'), + (0x392, 'M', 'β'), + (0x393, 'M', 'γ'), + (0x394, 'M', 'δ'), + (0x395, 'M', 'ε'), + (0x396, 'M', 'ζ'), + (0x397, 'M', 'η'), + (0x398, 'M', 'θ'), + (0x399, 'M', 'ι'), + (0x39A, 'M', 'κ'), + (0x39B, 'M', 'λ'), + (0x39C, 'M', 'μ'), + (0x39D, 'M', 'ν'), + (0x39E, 'M', 'ξ'), + (0x39F, 'M', 'ο'), + (0x3A0, 'M', 'π'), + (0x3A1, 'M', 'ρ'), + (0x3A2, 'X'), + (0x3A3, 'M', 'σ'), + (0x3A4, 'M', 'τ'), + (0x3A5, 'M', 'υ'), + (0x3A6, 'M', 'φ'), + (0x3A7, 'M', 'χ'), + (0x3A8, 'M', 'ψ'), + (0x3A9, 'M', 'ω'), + (0x3AA, 'M', 'ϊ'), + (0x3AB, 'M', 'ϋ'), + (0x3AC, 'V'), + (0x3C2, 'D', 'σ'), + (0x3C3, 'V'), + (0x3CF, 'M', 'ϗ'), + (0x3D0, 'M', 'β'), + (0x3D1, 'M', 'θ'), + (0x3D2, 'M', 'υ'), + (0x3D3, 'M', 'ύ'), + (0x3D4, 'M', 'ϋ'), + (0x3D5, 'M', 'φ'), + (0x3D6, 'M', 'π'), + (0x3D7, 'V'), + (0x3D8, 'M', 'ϙ'), + (0x3D9, 'V'), + (0x3DA, 'M', 'ϛ'), + (0x3DB, 'V'), + (0x3DC, 'M', 'ϝ'), + (0x3DD, 'V'), + (0x3DE, 'M', 'ϟ'), + (0x3DF, 'V'), + (0x3E0, 'M', 'ϡ'), + (0x3E1, 'V'), + (0x3E2, 'M', 'ϣ'), + (0x3E3, 'V'), + (0x3E4, 'M', 'ϥ'), + (0x3E5, 'V'), + (0x3E6, 'M', 'ϧ'), + (0x3E7, 'V'), + (0x3E8, 'M', 'ϩ'), + (0x3E9, 'V'), + (0x3EA, 'M', 'ϫ'), + (0x3EB, 'V'), + (0x3EC, 'M', 'ϭ'), + (0x3ED, 'V'), + (0x3EE, 'M', 'ϯ'), + (0x3EF, 'V'), + (0x3F0, 'M', 'κ'), + (0x3F1, 'M', 'ρ'), + (0x3F2, 'M', 'σ'), + (0x3F3, 'V'), + (0x3F4, 'M', 'θ'), + (0x3F5, 'M', 'ε'), + (0x3F6, 'V'), + (0x3F7, 'M', 'ϸ'), + (0x3F8, 'V'), + (0x3F9, 'M', 'σ'), + (0x3FA, 'M', 'ϻ'), + (0x3FB, 'V'), + (0x3FD, 'M', 'ͻ'), + (0x3FE, 'M', 'ͼ'), + (0x3FF, 'M', 'ͽ'), + (0x400, 'M', 'ѐ'), + (0x401, 'M', 'ё'), + (0x402, 'M', 'ђ'), + ] + +def _seg_7(): + return [ + (0x403, 'M', 'ѓ'), + (0x404, 'M', 'є'), + (0x405, 'M', 'ѕ'), + (0x406, 'M', 'і'), + (0x407, 'M', 'ї'), + (0x408, 'M', 'ј'), + (0x409, 'M', 'љ'), + (0x40A, 'M', 'њ'), + (0x40B, 'M', 'ћ'), + (0x40C, 'M', 'ќ'), + (0x40D, 'M', 'ѝ'), + (0x40E, 'M', 'ў'), + (0x40F, 'M', 'џ'), + (0x410, 'M', 'а'), + (0x411, 'M', 'б'), + (0x412, 'M', 'в'), + (0x413, 'M', 'г'), + (0x414, 'M', 'д'), + (0x415, 'M', 'е'), + (0x416, 'M', 'ж'), + (0x417, 'M', 'з'), + (0x418, 'M', 'и'), + (0x419, 'M', 'й'), + (0x41A, 'M', 'к'), + (0x41B, 'M', 'л'), + (0x41C, 'M', 'м'), + (0x41D, 'M', 'н'), + (0x41E, 'M', 'о'), + (0x41F, 'M', 'п'), + (0x420, 'M', 'р'), + (0x421, 'M', 'с'), + (0x422, 'M', 'т'), + (0x423, 'M', 'у'), + (0x424, 'M', 'ф'), + (0x425, 'M', 'х'), + (0x426, 'M', 'ц'), + (0x427, 'M', 'ч'), + (0x428, 'M', 'ш'), + (0x429, 'M', 'щ'), + (0x42A, 'M', 'ъ'), + (0x42B, 'M', 'ы'), + (0x42C, 'M', 'ь'), + (0x42D, 'M', 'э'), + (0x42E, 'M', 'ю'), + (0x42F, 'M', 'я'), + (0x430, 'V'), + (0x460, 'M', 'ѡ'), + (0x461, 'V'), + (0x462, 'M', 'ѣ'), + (0x463, 'V'), + (0x464, 'M', 'ѥ'), + (0x465, 'V'), + (0x466, 'M', 'ѧ'), + (0x467, 'V'), + (0x468, 'M', 'ѩ'), + (0x469, 'V'), + (0x46A, 'M', 'ѫ'), + (0x46B, 'V'), + (0x46C, 'M', 'ѭ'), + (0x46D, 'V'), + (0x46E, 'M', 'ѯ'), + (0x46F, 'V'), + (0x470, 'M', 'ѱ'), + (0x471, 'V'), + (0x472, 'M', 'ѳ'), + (0x473, 'V'), + (0x474, 'M', 'ѵ'), + (0x475, 'V'), + (0x476, 'M', 'ѷ'), + (0x477, 'V'), + (0x478, 'M', 'ѹ'), + (0x479, 'V'), + (0x47A, 'M', 'ѻ'), + (0x47B, 'V'), + (0x47C, 'M', 'ѽ'), + (0x47D, 'V'), + (0x47E, 'M', 'ѿ'), + (0x47F, 'V'), + (0x480, 'M', 'ҁ'), + (0x481, 'V'), + (0x48A, 'M', 'ҋ'), + (0x48B, 'V'), + (0x48C, 'M', 'ҍ'), + (0x48D, 'V'), + (0x48E, 'M', 'ҏ'), + (0x48F, 'V'), + (0x490, 'M', 'ґ'), + (0x491, 'V'), + (0x492, 'M', 'ғ'), + (0x493, 'V'), + (0x494, 'M', 'ҕ'), + (0x495, 'V'), + (0x496, 'M', 'җ'), + (0x497, 'V'), + (0x498, 'M', 'ҙ'), + (0x499, 'V'), + (0x49A, 'M', 'қ'), + (0x49B, 'V'), + (0x49C, 'M', 'ҝ'), + (0x49D, 'V'), + ] + +def _seg_8(): + return [ + (0x49E, 'M', 'ҟ'), + (0x49F, 'V'), + (0x4A0, 'M', 'ҡ'), + (0x4A1, 'V'), + (0x4A2, 'M', 'ң'), + (0x4A3, 'V'), + (0x4A4, 'M', 'ҥ'), + (0x4A5, 'V'), + (0x4A6, 'M', 'ҧ'), + (0x4A7, 'V'), + (0x4A8, 'M', 'ҩ'), + (0x4A9, 'V'), + (0x4AA, 'M', 'ҫ'), + (0x4AB, 'V'), + (0x4AC, 'M', 'ҭ'), + (0x4AD, 'V'), + (0x4AE, 'M', 'ү'), + (0x4AF, 'V'), + (0x4B0, 'M', 'ұ'), + (0x4B1, 'V'), + (0x4B2, 'M', 'ҳ'), + (0x4B3, 'V'), + (0x4B4, 'M', 'ҵ'), + (0x4B5, 'V'), + (0x4B6, 'M', 'ҷ'), + (0x4B7, 'V'), + (0x4B8, 'M', 'ҹ'), + (0x4B9, 'V'), + (0x4BA, 'M', 'һ'), + (0x4BB, 'V'), + (0x4BC, 'M', 'ҽ'), + (0x4BD, 'V'), + (0x4BE, 'M', 'ҿ'), + (0x4BF, 'V'), + (0x4C0, 'X'), + (0x4C1, 'M', 'ӂ'), + (0x4C2, 'V'), + (0x4C3, 'M', 'ӄ'), + (0x4C4, 'V'), + (0x4C5, 'M', 'ӆ'), + (0x4C6, 'V'), + (0x4C7, 'M', 'ӈ'), + (0x4C8, 'V'), + (0x4C9, 'M', 'ӊ'), + (0x4CA, 'V'), + (0x4CB, 'M', 'ӌ'), + (0x4CC, 'V'), + (0x4CD, 'M', 'ӎ'), + (0x4CE, 'V'), + (0x4D0, 'M', 'ӑ'), + (0x4D1, 'V'), + (0x4D2, 'M', 'ӓ'), + (0x4D3, 'V'), + (0x4D4, 'M', 'ӕ'), + (0x4D5, 'V'), + (0x4D6, 'M', 'ӗ'), + (0x4D7, 'V'), + (0x4D8, 'M', 'ә'), + (0x4D9, 'V'), + (0x4DA, 'M', 'ӛ'), + (0x4DB, 'V'), + (0x4DC, 'M', 'ӝ'), + (0x4DD, 'V'), + (0x4DE, 'M', 'ӟ'), + (0x4DF, 'V'), + (0x4E0, 'M', 'ӡ'), + (0x4E1, 'V'), + (0x4E2, 'M', 'ӣ'), + (0x4E3, 'V'), + (0x4E4, 'M', 'ӥ'), + (0x4E5, 'V'), + (0x4E6, 'M', 'ӧ'), + (0x4E7, 'V'), + (0x4E8, 'M', 'ө'), + (0x4E9, 'V'), + (0x4EA, 'M', 'ӫ'), + (0x4EB, 'V'), + (0x4EC, 'M', 'ӭ'), + (0x4ED, 'V'), + (0x4EE, 'M', 'ӯ'), + (0x4EF, 'V'), + (0x4F0, 'M', 'ӱ'), + (0x4F1, 'V'), + (0x4F2, 'M', 'ӳ'), + (0x4F3, 'V'), + (0x4F4, 'M', 'ӵ'), + (0x4F5, 'V'), + (0x4F6, 'M', 'ӷ'), + (0x4F7, 'V'), + (0x4F8, 'M', 'ӹ'), + (0x4F9, 'V'), + (0x4FA, 'M', 'ӻ'), + (0x4FB, 'V'), + (0x4FC, 'M', 'ӽ'), + (0x4FD, 'V'), + (0x4FE, 'M', 'ӿ'), + (0x4FF, 'V'), + (0x500, 'M', 'ԁ'), + (0x501, 'V'), + (0x502, 'M', 'ԃ'), + ] + +def _seg_9(): + return [ + (0x503, 'V'), + (0x504, 'M', 'ԅ'), + (0x505, 'V'), + (0x506, 'M', 'ԇ'), + (0x507, 'V'), + (0x508, 'M', 'ԉ'), + (0x509, 'V'), + (0x50A, 'M', 'ԋ'), + (0x50B, 'V'), + (0x50C, 'M', 'ԍ'), + (0x50D, 'V'), + (0x50E, 'M', 'ԏ'), + (0x50F, 'V'), + (0x510, 'M', 'ԑ'), + (0x511, 'V'), + (0x512, 'M', 'ԓ'), + (0x513, 'V'), + (0x514, 'M', 'ԕ'), + (0x515, 'V'), + (0x516, 'M', 'ԗ'), + (0x517, 'V'), + (0x518, 'M', 'ԙ'), + (0x519, 'V'), + (0x51A, 'M', 'ԛ'), + (0x51B, 'V'), + (0x51C, 'M', 'ԝ'), + (0x51D, 'V'), + (0x51E, 'M', 'ԟ'), + (0x51F, 'V'), + (0x520, 'M', 'ԡ'), + (0x521, 'V'), + (0x522, 'M', 'ԣ'), + (0x523, 'V'), + (0x524, 'M', 'ԥ'), + (0x525, 'V'), + (0x526, 'M', 'ԧ'), + (0x527, 'V'), + (0x528, 'M', 'ԩ'), + (0x529, 'V'), + (0x52A, 'M', 'ԫ'), + (0x52B, 'V'), + (0x52C, 'M', 'ԭ'), + (0x52D, 'V'), + (0x52E, 'M', 'ԯ'), + (0x52F, 'V'), + (0x530, 'X'), + (0x531, 'M', 'ա'), + (0x532, 'M', 'բ'), + (0x533, 'M', 'գ'), + (0x534, 'M', 'դ'), + (0x535, 'M', 'ե'), + (0x536, 'M', 'զ'), + (0x537, 'M', 'է'), + (0x538, 'M', 'ը'), + (0x539, 'M', 'թ'), + (0x53A, 'M', 'ժ'), + (0x53B, 'M', 'ի'), + (0x53C, 'M', 'լ'), + (0x53D, 'M', 'խ'), + (0x53E, 'M', 'ծ'), + (0x53F, 'M', 'կ'), + (0x540, 'M', 'հ'), + (0x541, 'M', 'ձ'), + (0x542, 'M', 'ղ'), + (0x543, 'M', 'ճ'), + (0x544, 'M', 'մ'), + (0x545, 'M', 'յ'), + (0x546, 'M', 'ն'), + (0x547, 'M', 'շ'), + (0x548, 'M', 'ո'), + (0x549, 'M', 'չ'), + (0x54A, 'M', 'պ'), + (0x54B, 'M', 'ջ'), + (0x54C, 'M', 'ռ'), + (0x54D, 'M', 'ս'), + (0x54E, 'M', 'վ'), + (0x54F, 'M', 'տ'), + (0x550, 'M', 'ր'), + (0x551, 'M', 'ց'), + (0x552, 'M', 'ւ'), + (0x553, 'M', 'փ'), + (0x554, 'M', 'ք'), + (0x555, 'M', 'օ'), + (0x556, 'M', 'ֆ'), + (0x557, 'X'), + (0x559, 'V'), + (0x587, 'M', 'եւ'), + (0x588, 'V'), + (0x58B, 'X'), + (0x58D, 'V'), + (0x590, 'X'), + (0x591, 'V'), + (0x5C8, 'X'), + (0x5D0, 'V'), + (0x5EB, 'X'), + (0x5EF, 'V'), + (0x5F5, 'X'), + (0x606, 'V'), + (0x61C, 'X'), + (0x61E, 'V'), + ] + +def _seg_10(): + return [ + (0x675, 'M', 'اٴ'), + (0x676, 'M', 'وٴ'), + (0x677, 'M', 'ۇٴ'), + (0x678, 'M', 'يٴ'), + (0x679, 'V'), + (0x6DD, 'X'), + (0x6DE, 'V'), + (0x70E, 'X'), + (0x710, 'V'), + (0x74B, 'X'), + (0x74D, 'V'), + (0x7B2, 'X'), + (0x7C0, 'V'), + (0x7FB, 'X'), + (0x7FD, 'V'), + (0x82E, 'X'), + (0x830, 'V'), + (0x83F, 'X'), + (0x840, 'V'), + (0x85C, 'X'), + (0x85E, 'V'), + (0x85F, 'X'), + (0x860, 'V'), + (0x86B, 'X'), + (0x8A0, 'V'), + (0x8B5, 'X'), + (0x8B6, 'V'), + (0x8C8, 'X'), + (0x8D3, 'V'), + (0x8E2, 'X'), + (0x8E3, 'V'), + (0x958, 'M', 'क़'), + (0x959, 'M', 'ख़'), + (0x95A, 'M', 'ग़'), + (0x95B, 'M', 'ज़'), + (0x95C, 'M', 'ड़'), + (0x95D, 'M', 'ढ़'), + (0x95E, 'M', 'फ़'), + (0x95F, 'M', 'य़'), + (0x960, 'V'), + (0x984, 'X'), + (0x985, 'V'), + (0x98D, 'X'), + (0x98F, 'V'), + (0x991, 'X'), + (0x993, 'V'), + (0x9A9, 'X'), + (0x9AA, 'V'), + (0x9B1, 'X'), + (0x9B2, 'V'), + (0x9B3, 'X'), + (0x9B6, 'V'), + (0x9BA, 'X'), + (0x9BC, 'V'), + (0x9C5, 'X'), + (0x9C7, 'V'), + (0x9C9, 'X'), + (0x9CB, 'V'), + (0x9CF, 'X'), + (0x9D7, 'V'), + (0x9D8, 'X'), + (0x9DC, 'M', 'ড়'), + (0x9DD, 'M', 'ঢ়'), + (0x9DE, 'X'), + (0x9DF, 'M', 'য়'), + (0x9E0, 'V'), + (0x9E4, 'X'), + (0x9E6, 'V'), + (0x9FF, 'X'), + (0xA01, 'V'), + (0xA04, 'X'), + (0xA05, 'V'), + (0xA0B, 'X'), + (0xA0F, 'V'), + (0xA11, 'X'), + (0xA13, 'V'), + (0xA29, 'X'), + (0xA2A, 'V'), + (0xA31, 'X'), + (0xA32, 'V'), + (0xA33, 'M', 'ਲ਼'), + (0xA34, 'X'), + (0xA35, 'V'), + (0xA36, 'M', 'ਸ਼'), + (0xA37, 'X'), + (0xA38, 'V'), + (0xA3A, 'X'), + (0xA3C, 'V'), + (0xA3D, 'X'), + (0xA3E, 'V'), + (0xA43, 'X'), + (0xA47, 'V'), + (0xA49, 'X'), + (0xA4B, 'V'), + (0xA4E, 'X'), + (0xA51, 'V'), + (0xA52, 'X'), + (0xA59, 'M', 'ਖ਼'), + (0xA5A, 'M', 'ਗ਼'), + (0xA5B, 'M', 'ਜ਼'), + ] + +def _seg_11(): + return [ + (0xA5C, 'V'), + (0xA5D, 'X'), + (0xA5E, 'M', 'ਫ਼'), + (0xA5F, 'X'), + (0xA66, 'V'), + (0xA77, 'X'), + (0xA81, 'V'), + (0xA84, 'X'), + (0xA85, 'V'), + (0xA8E, 'X'), + (0xA8F, 'V'), + (0xA92, 'X'), + (0xA93, 'V'), + (0xAA9, 'X'), + (0xAAA, 'V'), + (0xAB1, 'X'), + (0xAB2, 'V'), + (0xAB4, 'X'), + (0xAB5, 'V'), + (0xABA, 'X'), + (0xABC, 'V'), + (0xAC6, 'X'), + (0xAC7, 'V'), + (0xACA, 'X'), + (0xACB, 'V'), + (0xACE, 'X'), + (0xAD0, 'V'), + (0xAD1, 'X'), + (0xAE0, 'V'), + (0xAE4, 'X'), + (0xAE6, 'V'), + (0xAF2, 'X'), + (0xAF9, 'V'), + (0xB00, 'X'), + (0xB01, 'V'), + (0xB04, 'X'), + (0xB05, 'V'), + (0xB0D, 'X'), + (0xB0F, 'V'), + (0xB11, 'X'), + (0xB13, 'V'), + (0xB29, 'X'), + (0xB2A, 'V'), + (0xB31, 'X'), + (0xB32, 'V'), + (0xB34, 'X'), + (0xB35, 'V'), + (0xB3A, 'X'), + (0xB3C, 'V'), + (0xB45, 'X'), + (0xB47, 'V'), + (0xB49, 'X'), + (0xB4B, 'V'), + (0xB4E, 'X'), + (0xB55, 'V'), + (0xB58, 'X'), + (0xB5C, 'M', 'ଡ଼'), + (0xB5D, 'M', 'ଢ଼'), + (0xB5E, 'X'), + (0xB5F, 'V'), + (0xB64, 'X'), + (0xB66, 'V'), + (0xB78, 'X'), + (0xB82, 'V'), + (0xB84, 'X'), + (0xB85, 'V'), + (0xB8B, 'X'), + (0xB8E, 'V'), + (0xB91, 'X'), + (0xB92, 'V'), + (0xB96, 'X'), + (0xB99, 'V'), + (0xB9B, 'X'), + (0xB9C, 'V'), + (0xB9D, 'X'), + (0xB9E, 'V'), + (0xBA0, 'X'), + (0xBA3, 'V'), + (0xBA5, 'X'), + (0xBA8, 'V'), + (0xBAB, 'X'), + (0xBAE, 'V'), + (0xBBA, 'X'), + (0xBBE, 'V'), + (0xBC3, 'X'), + (0xBC6, 'V'), + (0xBC9, 'X'), + (0xBCA, 'V'), + (0xBCE, 'X'), + (0xBD0, 'V'), + (0xBD1, 'X'), + (0xBD7, 'V'), + (0xBD8, 'X'), + (0xBE6, 'V'), + (0xBFB, 'X'), + (0xC00, 'V'), + (0xC0D, 'X'), + (0xC0E, 'V'), + (0xC11, 'X'), + (0xC12, 'V'), + ] + +def _seg_12(): + return [ + (0xC29, 'X'), + (0xC2A, 'V'), + (0xC3A, 'X'), + (0xC3D, 'V'), + (0xC45, 'X'), + (0xC46, 'V'), + (0xC49, 'X'), + (0xC4A, 'V'), + (0xC4E, 'X'), + (0xC55, 'V'), + (0xC57, 'X'), + (0xC58, 'V'), + (0xC5B, 'X'), + (0xC60, 'V'), + (0xC64, 'X'), + (0xC66, 'V'), + (0xC70, 'X'), + (0xC77, 'V'), + (0xC8D, 'X'), + (0xC8E, 'V'), + (0xC91, 'X'), + (0xC92, 'V'), + (0xCA9, 'X'), + (0xCAA, 'V'), + (0xCB4, 'X'), + (0xCB5, 'V'), + (0xCBA, 'X'), + (0xCBC, 'V'), + (0xCC5, 'X'), + (0xCC6, 'V'), + (0xCC9, 'X'), + (0xCCA, 'V'), + (0xCCE, 'X'), + (0xCD5, 'V'), + (0xCD7, 'X'), + (0xCDE, 'V'), + (0xCDF, 'X'), + (0xCE0, 'V'), + (0xCE4, 'X'), + (0xCE6, 'V'), + (0xCF0, 'X'), + (0xCF1, 'V'), + (0xCF3, 'X'), + (0xD00, 'V'), + (0xD0D, 'X'), + (0xD0E, 'V'), + (0xD11, 'X'), + (0xD12, 'V'), + (0xD45, 'X'), + (0xD46, 'V'), + (0xD49, 'X'), + (0xD4A, 'V'), + (0xD50, 'X'), + (0xD54, 'V'), + (0xD64, 'X'), + (0xD66, 'V'), + (0xD80, 'X'), + (0xD81, 'V'), + (0xD84, 'X'), + (0xD85, 'V'), + (0xD97, 'X'), + (0xD9A, 'V'), + (0xDB2, 'X'), + (0xDB3, 'V'), + (0xDBC, 'X'), + (0xDBD, 'V'), + (0xDBE, 'X'), + (0xDC0, 'V'), + (0xDC7, 'X'), + (0xDCA, 'V'), + (0xDCB, 'X'), + (0xDCF, 'V'), + (0xDD5, 'X'), + (0xDD6, 'V'), + (0xDD7, 'X'), + (0xDD8, 'V'), + (0xDE0, 'X'), + (0xDE6, 'V'), + (0xDF0, 'X'), + (0xDF2, 'V'), + (0xDF5, 'X'), + (0xE01, 'V'), + (0xE33, 'M', 'ํา'), + (0xE34, 'V'), + (0xE3B, 'X'), + (0xE3F, 'V'), + (0xE5C, 'X'), + (0xE81, 'V'), + (0xE83, 'X'), + (0xE84, 'V'), + (0xE85, 'X'), + (0xE86, 'V'), + (0xE8B, 'X'), + (0xE8C, 'V'), + (0xEA4, 'X'), + (0xEA5, 'V'), + (0xEA6, 'X'), + (0xEA7, 'V'), + (0xEB3, 'M', 'ໍາ'), + (0xEB4, 'V'), + ] + +def _seg_13(): + return [ + (0xEBE, 'X'), + (0xEC0, 'V'), + (0xEC5, 'X'), + (0xEC6, 'V'), + (0xEC7, 'X'), + (0xEC8, 'V'), + (0xECE, 'X'), + (0xED0, 'V'), + (0xEDA, 'X'), + (0xEDC, 'M', 'ຫນ'), + (0xEDD, 'M', 'ຫມ'), + (0xEDE, 'V'), + (0xEE0, 'X'), + (0xF00, 'V'), + (0xF0C, 'M', '་'), + (0xF0D, 'V'), + (0xF43, 'M', 'གྷ'), + (0xF44, 'V'), + (0xF48, 'X'), + (0xF49, 'V'), + (0xF4D, 'M', 'ཌྷ'), + (0xF4E, 'V'), + (0xF52, 'M', 'དྷ'), + (0xF53, 'V'), + (0xF57, 'M', 'བྷ'), + (0xF58, 'V'), + (0xF5C, 'M', 'ཛྷ'), + (0xF5D, 'V'), + (0xF69, 'M', 'ཀྵ'), + (0xF6A, 'V'), + (0xF6D, 'X'), + (0xF71, 'V'), + (0xF73, 'M', 'ཱི'), + (0xF74, 'V'), + (0xF75, 'M', 'ཱུ'), + (0xF76, 'M', 'ྲྀ'), + (0xF77, 'M', 'ྲཱྀ'), + (0xF78, 'M', 'ླྀ'), + (0xF79, 'M', 'ླཱྀ'), + (0xF7A, 'V'), + (0xF81, 'M', 'ཱྀ'), + (0xF82, 'V'), + (0xF93, 'M', 'ྒྷ'), + (0xF94, 'V'), + (0xF98, 'X'), + (0xF99, 'V'), + (0xF9D, 'M', 'ྜྷ'), + (0xF9E, 'V'), + (0xFA2, 'M', 'ྡྷ'), + (0xFA3, 'V'), + (0xFA7, 'M', 'ྦྷ'), + (0xFA8, 'V'), + (0xFAC, 'M', 'ྫྷ'), + (0xFAD, 'V'), + (0xFB9, 'M', 'ྐྵ'), + (0xFBA, 'V'), + (0xFBD, 'X'), + (0xFBE, 'V'), + (0xFCD, 'X'), + (0xFCE, 'V'), + (0xFDB, 'X'), + (0x1000, 'V'), + (0x10A0, 'X'), + (0x10C7, 'M', 'ⴧ'), + (0x10C8, 'X'), + (0x10CD, 'M', 'ⴭ'), + (0x10CE, 'X'), + (0x10D0, 'V'), + (0x10FC, 'M', 'ნ'), + (0x10FD, 'V'), + (0x115F, 'X'), + (0x1161, 'V'), + (0x1249, 'X'), + (0x124A, 'V'), + (0x124E, 'X'), + (0x1250, 'V'), + (0x1257, 'X'), + (0x1258, 'V'), + (0x1259, 'X'), + (0x125A, 'V'), + (0x125E, 'X'), + (0x1260, 'V'), + (0x1289, 'X'), + (0x128A, 'V'), + (0x128E, 'X'), + (0x1290, 'V'), + (0x12B1, 'X'), + (0x12B2, 'V'), + (0x12B6, 'X'), + (0x12B8, 'V'), + (0x12BF, 'X'), + (0x12C0, 'V'), + (0x12C1, 'X'), + (0x12C2, 'V'), + (0x12C6, 'X'), + (0x12C8, 'V'), + (0x12D7, 'X'), + (0x12D8, 'V'), + (0x1311, 'X'), + (0x1312, 'V'), + ] + +def _seg_14(): + return [ + (0x1316, 'X'), + (0x1318, 'V'), + (0x135B, 'X'), + (0x135D, 'V'), + (0x137D, 'X'), + (0x1380, 'V'), + (0x139A, 'X'), + (0x13A0, 'V'), + (0x13F6, 'X'), + (0x13F8, 'M', 'Ᏸ'), + (0x13F9, 'M', 'Ᏹ'), + (0x13FA, 'M', 'Ᏺ'), + (0x13FB, 'M', 'Ᏻ'), + (0x13FC, 'M', 'Ᏼ'), + (0x13FD, 'M', 'Ᏽ'), + (0x13FE, 'X'), + (0x1400, 'V'), + (0x1680, 'X'), + (0x1681, 'V'), + (0x169D, 'X'), + (0x16A0, 'V'), + (0x16F9, 'X'), + (0x1700, 'V'), + (0x170D, 'X'), + (0x170E, 'V'), + (0x1715, 'X'), + (0x1720, 'V'), + (0x1737, 'X'), + (0x1740, 'V'), + (0x1754, 'X'), + (0x1760, 'V'), + (0x176D, 'X'), + (0x176E, 'V'), + (0x1771, 'X'), + (0x1772, 'V'), + (0x1774, 'X'), + (0x1780, 'V'), + (0x17B4, 'X'), + (0x17B6, 'V'), + (0x17DE, 'X'), + (0x17E0, 'V'), + (0x17EA, 'X'), + (0x17F0, 'V'), + (0x17FA, 'X'), + (0x1800, 'V'), + (0x1806, 'X'), + (0x1807, 'V'), + (0x180B, 'I'), + (0x180E, 'X'), + (0x1810, 'V'), + (0x181A, 'X'), + (0x1820, 'V'), + (0x1879, 'X'), + (0x1880, 'V'), + (0x18AB, 'X'), + (0x18B0, 'V'), + (0x18F6, 'X'), + (0x1900, 'V'), + (0x191F, 'X'), + (0x1920, 'V'), + (0x192C, 'X'), + (0x1930, 'V'), + (0x193C, 'X'), + (0x1940, 'V'), + (0x1941, 'X'), + (0x1944, 'V'), + (0x196E, 'X'), + (0x1970, 'V'), + (0x1975, 'X'), + (0x1980, 'V'), + (0x19AC, 'X'), + (0x19B0, 'V'), + (0x19CA, 'X'), + (0x19D0, 'V'), + (0x19DB, 'X'), + (0x19DE, 'V'), + (0x1A1C, 'X'), + (0x1A1E, 'V'), + (0x1A5F, 'X'), + (0x1A60, 'V'), + (0x1A7D, 'X'), + (0x1A7F, 'V'), + (0x1A8A, 'X'), + (0x1A90, 'V'), + (0x1A9A, 'X'), + (0x1AA0, 'V'), + (0x1AAE, 'X'), + (0x1AB0, 'V'), + (0x1AC1, 'X'), + (0x1B00, 'V'), + (0x1B4C, 'X'), + (0x1B50, 'V'), + (0x1B7D, 'X'), + (0x1B80, 'V'), + (0x1BF4, 'X'), + (0x1BFC, 'V'), + (0x1C38, 'X'), + (0x1C3B, 'V'), + (0x1C4A, 'X'), + (0x1C4D, 'V'), + ] + +def _seg_15(): + return [ + (0x1C80, 'M', 'в'), + (0x1C81, 'M', 'д'), + (0x1C82, 'M', 'о'), + (0x1C83, 'M', 'с'), + (0x1C84, 'M', 'т'), + (0x1C86, 'M', 'ъ'), + (0x1C87, 'M', 'ѣ'), + (0x1C88, 'M', 'ꙋ'), + (0x1C89, 'X'), + (0x1C90, 'M', 'ა'), + (0x1C91, 'M', 'ბ'), + (0x1C92, 'M', 'გ'), + (0x1C93, 'M', 'დ'), + (0x1C94, 'M', 'ე'), + (0x1C95, 'M', 'ვ'), + (0x1C96, 'M', 'ზ'), + (0x1C97, 'M', 'თ'), + (0x1C98, 'M', 'ი'), + (0x1C99, 'M', 'კ'), + (0x1C9A, 'M', 'ლ'), + (0x1C9B, 'M', 'მ'), + (0x1C9C, 'M', 'ნ'), + (0x1C9D, 'M', 'ო'), + (0x1C9E, 'M', 'პ'), + (0x1C9F, 'M', 'ჟ'), + (0x1CA0, 'M', 'რ'), + (0x1CA1, 'M', 'ს'), + (0x1CA2, 'M', 'ტ'), + (0x1CA3, 'M', 'უ'), + (0x1CA4, 'M', 'ფ'), + (0x1CA5, 'M', 'ქ'), + (0x1CA6, 'M', 'ღ'), + (0x1CA7, 'M', 'ყ'), + (0x1CA8, 'M', 'შ'), + (0x1CA9, 'M', 'ჩ'), + (0x1CAA, 'M', 'ც'), + (0x1CAB, 'M', 'ძ'), + (0x1CAC, 'M', 'წ'), + (0x1CAD, 'M', 'ჭ'), + (0x1CAE, 'M', 'ხ'), + (0x1CAF, 'M', 'ჯ'), + (0x1CB0, 'M', 'ჰ'), + (0x1CB1, 'M', 'ჱ'), + (0x1CB2, 'M', 'ჲ'), + (0x1CB3, 'M', 'ჳ'), + (0x1CB4, 'M', 'ჴ'), + (0x1CB5, 'M', 'ჵ'), + (0x1CB6, 'M', 'ჶ'), + (0x1CB7, 'M', 'ჷ'), + (0x1CB8, 'M', 'ჸ'), + (0x1CB9, 'M', 'ჹ'), + (0x1CBA, 'M', 'ჺ'), + (0x1CBB, 'X'), + (0x1CBD, 'M', 'ჽ'), + (0x1CBE, 'M', 'ჾ'), + (0x1CBF, 'M', 'ჿ'), + (0x1CC0, 'V'), + (0x1CC8, 'X'), + (0x1CD0, 'V'), + (0x1CFB, 'X'), + (0x1D00, 'V'), + (0x1D2C, 'M', 'a'), + (0x1D2D, 'M', 'æ'), + (0x1D2E, 'M', 'b'), + (0x1D2F, 'V'), + (0x1D30, 'M', 'd'), + (0x1D31, 'M', 'e'), + (0x1D32, 'M', 'ǝ'), + (0x1D33, 'M', 'g'), + (0x1D34, 'M', 'h'), + (0x1D35, 'M', 'i'), + (0x1D36, 'M', 'j'), + (0x1D37, 'M', 'k'), + (0x1D38, 'M', 'l'), + (0x1D39, 'M', 'm'), + (0x1D3A, 'M', 'n'), + (0x1D3B, 'V'), + (0x1D3C, 'M', 'o'), + (0x1D3D, 'M', 'ȣ'), + (0x1D3E, 'M', 'p'), + (0x1D3F, 'M', 'r'), + (0x1D40, 'M', 't'), + (0x1D41, 'M', 'u'), + (0x1D42, 'M', 'w'), + (0x1D43, 'M', 'a'), + (0x1D44, 'M', 'ɐ'), + (0x1D45, 'M', 'ɑ'), + (0x1D46, 'M', 'ᴂ'), + (0x1D47, 'M', 'b'), + (0x1D48, 'M', 'd'), + (0x1D49, 'M', 'e'), + (0x1D4A, 'M', 'ə'), + (0x1D4B, 'M', 'ɛ'), + (0x1D4C, 'M', 'ɜ'), + (0x1D4D, 'M', 'g'), + (0x1D4E, 'V'), + (0x1D4F, 'M', 'k'), + (0x1D50, 'M', 'm'), + (0x1D51, 'M', 'ŋ'), + (0x1D52, 'M', 'o'), + ] + +def _seg_16(): + return [ + (0x1D53, 'M', 'ɔ'), + (0x1D54, 'M', 'ᴖ'), + (0x1D55, 'M', 'ᴗ'), + (0x1D56, 'M', 'p'), + (0x1D57, 'M', 't'), + (0x1D58, 'M', 'u'), + (0x1D59, 'M', 'ᴝ'), + (0x1D5A, 'M', 'ɯ'), + (0x1D5B, 'M', 'v'), + (0x1D5C, 'M', 'ᴥ'), + (0x1D5D, 'M', 'β'), + (0x1D5E, 'M', 'γ'), + (0x1D5F, 'M', 'δ'), + (0x1D60, 'M', 'φ'), + (0x1D61, 'M', 'χ'), + (0x1D62, 'M', 'i'), + (0x1D63, 'M', 'r'), + (0x1D64, 'M', 'u'), + (0x1D65, 'M', 'v'), + (0x1D66, 'M', 'β'), + (0x1D67, 'M', 'γ'), + (0x1D68, 'M', 'ρ'), + (0x1D69, 'M', 'φ'), + (0x1D6A, 'M', 'χ'), + (0x1D6B, 'V'), + (0x1D78, 'M', 'н'), + (0x1D79, 'V'), + (0x1D9B, 'M', 'ɒ'), + (0x1D9C, 'M', 'c'), + (0x1D9D, 'M', 'ɕ'), + (0x1D9E, 'M', 'ð'), + (0x1D9F, 'M', 'ɜ'), + (0x1DA0, 'M', 'f'), + (0x1DA1, 'M', 'ɟ'), + (0x1DA2, 'M', 'ɡ'), + (0x1DA3, 'M', 'ɥ'), + (0x1DA4, 'M', 'ɨ'), + (0x1DA5, 'M', 'ɩ'), + (0x1DA6, 'M', 'ɪ'), + (0x1DA7, 'M', 'ᵻ'), + (0x1DA8, 'M', 'ʝ'), + (0x1DA9, 'M', 'ɭ'), + (0x1DAA, 'M', 'ᶅ'), + (0x1DAB, 'M', 'ʟ'), + (0x1DAC, 'M', 'ɱ'), + (0x1DAD, 'M', 'ɰ'), + (0x1DAE, 'M', 'ɲ'), + (0x1DAF, 'M', 'ɳ'), + (0x1DB0, 'M', 'ɴ'), + (0x1DB1, 'M', 'ɵ'), + (0x1DB2, 'M', 'ɸ'), + (0x1DB3, 'M', 'ʂ'), + (0x1DB4, 'M', 'ʃ'), + (0x1DB5, 'M', 'ƫ'), + (0x1DB6, 'M', 'ʉ'), + (0x1DB7, 'M', 'ʊ'), + (0x1DB8, 'M', 'ᴜ'), + (0x1DB9, 'M', 'ʋ'), + (0x1DBA, 'M', 'ʌ'), + (0x1DBB, 'M', 'z'), + (0x1DBC, 'M', 'ʐ'), + (0x1DBD, 'M', 'ʑ'), + (0x1DBE, 'M', 'ʒ'), + (0x1DBF, 'M', 'θ'), + (0x1DC0, 'V'), + (0x1DFA, 'X'), + (0x1DFB, 'V'), + (0x1E00, 'M', 'ḁ'), + (0x1E01, 'V'), + (0x1E02, 'M', 'ḃ'), + (0x1E03, 'V'), + (0x1E04, 'M', 'ḅ'), + (0x1E05, 'V'), + (0x1E06, 'M', 'ḇ'), + (0x1E07, 'V'), + (0x1E08, 'M', 'ḉ'), + (0x1E09, 'V'), + (0x1E0A, 'M', 'ḋ'), + (0x1E0B, 'V'), + (0x1E0C, 'M', 'ḍ'), + (0x1E0D, 'V'), + (0x1E0E, 'M', 'ḏ'), + (0x1E0F, 'V'), + (0x1E10, 'M', 'ḑ'), + (0x1E11, 'V'), + (0x1E12, 'M', 'ḓ'), + (0x1E13, 'V'), + (0x1E14, 'M', 'ḕ'), + (0x1E15, 'V'), + (0x1E16, 'M', 'ḗ'), + (0x1E17, 'V'), + (0x1E18, 'M', 'ḙ'), + (0x1E19, 'V'), + (0x1E1A, 'M', 'ḛ'), + (0x1E1B, 'V'), + (0x1E1C, 'M', 'ḝ'), + (0x1E1D, 'V'), + (0x1E1E, 'M', 'ḟ'), + (0x1E1F, 'V'), + (0x1E20, 'M', 'ḡ'), + ] + +def _seg_17(): + return [ + (0x1E21, 'V'), + (0x1E22, 'M', 'ḣ'), + (0x1E23, 'V'), + (0x1E24, 'M', 'ḥ'), + (0x1E25, 'V'), + (0x1E26, 'M', 'ḧ'), + (0x1E27, 'V'), + (0x1E28, 'M', 'ḩ'), + (0x1E29, 'V'), + (0x1E2A, 'M', 'ḫ'), + (0x1E2B, 'V'), + (0x1E2C, 'M', 'ḭ'), + (0x1E2D, 'V'), + (0x1E2E, 'M', 'ḯ'), + (0x1E2F, 'V'), + (0x1E30, 'M', 'ḱ'), + (0x1E31, 'V'), + (0x1E32, 'M', 'ḳ'), + (0x1E33, 'V'), + (0x1E34, 'M', 'ḵ'), + (0x1E35, 'V'), + (0x1E36, 'M', 'ḷ'), + (0x1E37, 'V'), + (0x1E38, 'M', 'ḹ'), + (0x1E39, 'V'), + (0x1E3A, 'M', 'ḻ'), + (0x1E3B, 'V'), + (0x1E3C, 'M', 'ḽ'), + (0x1E3D, 'V'), + (0x1E3E, 'M', 'ḿ'), + (0x1E3F, 'V'), + (0x1E40, 'M', 'ṁ'), + (0x1E41, 'V'), + (0x1E42, 'M', 'ṃ'), + (0x1E43, 'V'), + (0x1E44, 'M', 'ṅ'), + (0x1E45, 'V'), + (0x1E46, 'M', 'ṇ'), + (0x1E47, 'V'), + (0x1E48, 'M', 'ṉ'), + (0x1E49, 'V'), + (0x1E4A, 'M', 'ṋ'), + (0x1E4B, 'V'), + (0x1E4C, 'M', 'ṍ'), + (0x1E4D, 'V'), + (0x1E4E, 'M', 'ṏ'), + (0x1E4F, 'V'), + (0x1E50, 'M', 'ṑ'), + (0x1E51, 'V'), + (0x1E52, 'M', 'ṓ'), + (0x1E53, 'V'), + (0x1E54, 'M', 'ṕ'), + (0x1E55, 'V'), + (0x1E56, 'M', 'ṗ'), + (0x1E57, 'V'), + (0x1E58, 'M', 'ṙ'), + (0x1E59, 'V'), + (0x1E5A, 'M', 'ṛ'), + (0x1E5B, 'V'), + (0x1E5C, 'M', 'ṝ'), + (0x1E5D, 'V'), + (0x1E5E, 'M', 'ṟ'), + (0x1E5F, 'V'), + (0x1E60, 'M', 'ṡ'), + (0x1E61, 'V'), + (0x1E62, 'M', 'ṣ'), + (0x1E63, 'V'), + (0x1E64, 'M', 'ṥ'), + (0x1E65, 'V'), + (0x1E66, 'M', 'ṧ'), + (0x1E67, 'V'), + (0x1E68, 'M', 'ṩ'), + (0x1E69, 'V'), + (0x1E6A, 'M', 'ṫ'), + (0x1E6B, 'V'), + (0x1E6C, 'M', 'ṭ'), + (0x1E6D, 'V'), + (0x1E6E, 'M', 'ṯ'), + (0x1E6F, 'V'), + (0x1E70, 'M', 'ṱ'), + (0x1E71, 'V'), + (0x1E72, 'M', 'ṳ'), + (0x1E73, 'V'), + (0x1E74, 'M', 'ṵ'), + (0x1E75, 'V'), + (0x1E76, 'M', 'ṷ'), + (0x1E77, 'V'), + (0x1E78, 'M', 'ṹ'), + (0x1E79, 'V'), + (0x1E7A, 'M', 'ṻ'), + (0x1E7B, 'V'), + (0x1E7C, 'M', 'ṽ'), + (0x1E7D, 'V'), + (0x1E7E, 'M', 'ṿ'), + (0x1E7F, 'V'), + (0x1E80, 'M', 'ẁ'), + (0x1E81, 'V'), + (0x1E82, 'M', 'ẃ'), + (0x1E83, 'V'), + (0x1E84, 'M', 'ẅ'), + ] + +def _seg_18(): + return [ + (0x1E85, 'V'), + (0x1E86, 'M', 'ẇ'), + (0x1E87, 'V'), + (0x1E88, 'M', 'ẉ'), + (0x1E89, 'V'), + (0x1E8A, 'M', 'ẋ'), + (0x1E8B, 'V'), + (0x1E8C, 'M', 'ẍ'), + (0x1E8D, 'V'), + (0x1E8E, 'M', 'ẏ'), + (0x1E8F, 'V'), + (0x1E90, 'M', 'ẑ'), + (0x1E91, 'V'), + (0x1E92, 'M', 'ẓ'), + (0x1E93, 'V'), + (0x1E94, 'M', 'ẕ'), + (0x1E95, 'V'), + (0x1E9A, 'M', 'aʾ'), + (0x1E9B, 'M', 'ṡ'), + (0x1E9C, 'V'), + (0x1E9E, 'M', 'ss'), + (0x1E9F, 'V'), + (0x1EA0, 'M', 'ạ'), + (0x1EA1, 'V'), + (0x1EA2, 'M', 'ả'), + (0x1EA3, 'V'), + (0x1EA4, 'M', 'ấ'), + (0x1EA5, 'V'), + (0x1EA6, 'M', 'ầ'), + (0x1EA7, 'V'), + (0x1EA8, 'M', 'ẩ'), + (0x1EA9, 'V'), + (0x1EAA, 'M', 'ẫ'), + (0x1EAB, 'V'), + (0x1EAC, 'M', 'ậ'), + (0x1EAD, 'V'), + (0x1EAE, 'M', 'ắ'), + (0x1EAF, 'V'), + (0x1EB0, 'M', 'ằ'), + (0x1EB1, 'V'), + (0x1EB2, 'M', 'ẳ'), + (0x1EB3, 'V'), + (0x1EB4, 'M', 'ẵ'), + (0x1EB5, 'V'), + (0x1EB6, 'M', 'ặ'), + (0x1EB7, 'V'), + (0x1EB8, 'M', 'ẹ'), + (0x1EB9, 'V'), + (0x1EBA, 'M', 'ẻ'), + (0x1EBB, 'V'), + (0x1EBC, 'M', 'ẽ'), + (0x1EBD, 'V'), + (0x1EBE, 'M', 'ế'), + (0x1EBF, 'V'), + (0x1EC0, 'M', 'ề'), + (0x1EC1, 'V'), + (0x1EC2, 'M', 'ể'), + (0x1EC3, 'V'), + (0x1EC4, 'M', 'ễ'), + (0x1EC5, 'V'), + (0x1EC6, 'M', 'ệ'), + (0x1EC7, 'V'), + (0x1EC8, 'M', 'ỉ'), + (0x1EC9, 'V'), + (0x1ECA, 'M', 'ị'), + (0x1ECB, 'V'), + (0x1ECC, 'M', 'ọ'), + (0x1ECD, 'V'), + (0x1ECE, 'M', 'ỏ'), + (0x1ECF, 'V'), + (0x1ED0, 'M', 'ố'), + (0x1ED1, 'V'), + (0x1ED2, 'M', 'ồ'), + (0x1ED3, 'V'), + (0x1ED4, 'M', 'ổ'), + (0x1ED5, 'V'), + (0x1ED6, 'M', 'ỗ'), + (0x1ED7, 'V'), + (0x1ED8, 'M', 'ộ'), + (0x1ED9, 'V'), + (0x1EDA, 'M', 'ớ'), + (0x1EDB, 'V'), + (0x1EDC, 'M', 'ờ'), + (0x1EDD, 'V'), + (0x1EDE, 'M', 'ở'), + (0x1EDF, 'V'), + (0x1EE0, 'M', 'ỡ'), + (0x1EE1, 'V'), + (0x1EE2, 'M', 'ợ'), + (0x1EE3, 'V'), + (0x1EE4, 'M', 'ụ'), + (0x1EE5, 'V'), + (0x1EE6, 'M', 'ủ'), + (0x1EE7, 'V'), + (0x1EE8, 'M', 'ứ'), + (0x1EE9, 'V'), + (0x1EEA, 'M', 'ừ'), + (0x1EEB, 'V'), + (0x1EEC, 'M', 'ử'), + (0x1EED, 'V'), + ] + +def _seg_19(): + return [ + (0x1EEE, 'M', 'ữ'), + (0x1EEF, 'V'), + (0x1EF0, 'M', 'ự'), + (0x1EF1, 'V'), + (0x1EF2, 'M', 'ỳ'), + (0x1EF3, 'V'), + (0x1EF4, 'M', 'ỵ'), + (0x1EF5, 'V'), + (0x1EF6, 'M', 'ỷ'), + (0x1EF7, 'V'), + (0x1EF8, 'M', 'ỹ'), + (0x1EF9, 'V'), + (0x1EFA, 'M', 'ỻ'), + (0x1EFB, 'V'), + (0x1EFC, 'M', 'ỽ'), + (0x1EFD, 'V'), + (0x1EFE, 'M', 'ỿ'), + (0x1EFF, 'V'), + (0x1F08, 'M', 'ἀ'), + (0x1F09, 'M', 'ἁ'), + (0x1F0A, 'M', 'ἂ'), + (0x1F0B, 'M', 'ἃ'), + (0x1F0C, 'M', 'ἄ'), + (0x1F0D, 'M', 'ἅ'), + (0x1F0E, 'M', 'ἆ'), + (0x1F0F, 'M', 'ἇ'), + (0x1F10, 'V'), + (0x1F16, 'X'), + (0x1F18, 'M', 'ἐ'), + (0x1F19, 'M', 'ἑ'), + (0x1F1A, 'M', 'ἒ'), + (0x1F1B, 'M', 'ἓ'), + (0x1F1C, 'M', 'ἔ'), + (0x1F1D, 'M', 'ἕ'), + (0x1F1E, 'X'), + (0x1F20, 'V'), + (0x1F28, 'M', 'ἠ'), + (0x1F29, 'M', 'ἡ'), + (0x1F2A, 'M', 'ἢ'), + (0x1F2B, 'M', 'ἣ'), + (0x1F2C, 'M', 'ἤ'), + (0x1F2D, 'M', 'ἥ'), + (0x1F2E, 'M', 'ἦ'), + (0x1F2F, 'M', 'ἧ'), + (0x1F30, 'V'), + (0x1F38, 'M', 'ἰ'), + (0x1F39, 'M', 'ἱ'), + (0x1F3A, 'M', 'ἲ'), + (0x1F3B, 'M', 'ἳ'), + (0x1F3C, 'M', 'ἴ'), + (0x1F3D, 'M', 'ἵ'), + (0x1F3E, 'M', 'ἶ'), + (0x1F3F, 'M', 'ἷ'), + (0x1F40, 'V'), + (0x1F46, 'X'), + (0x1F48, 'M', 'ὀ'), + (0x1F49, 'M', 'ὁ'), + (0x1F4A, 'M', 'ὂ'), + (0x1F4B, 'M', 'ὃ'), + (0x1F4C, 'M', 'ὄ'), + (0x1F4D, 'M', 'ὅ'), + (0x1F4E, 'X'), + (0x1F50, 'V'), + (0x1F58, 'X'), + (0x1F59, 'M', 'ὑ'), + (0x1F5A, 'X'), + (0x1F5B, 'M', 'ὓ'), + (0x1F5C, 'X'), + (0x1F5D, 'M', 'ὕ'), + (0x1F5E, 'X'), + (0x1F5F, 'M', 'ὗ'), + (0x1F60, 'V'), + (0x1F68, 'M', 'ὠ'), + (0x1F69, 'M', 'ὡ'), + (0x1F6A, 'M', 'ὢ'), + (0x1F6B, 'M', 'ὣ'), + (0x1F6C, 'M', 'ὤ'), + (0x1F6D, 'M', 'ὥ'), + (0x1F6E, 'M', 'ὦ'), + (0x1F6F, 'M', 'ὧ'), + (0x1F70, 'V'), + (0x1F71, 'M', 'ά'), + (0x1F72, 'V'), + (0x1F73, 'M', 'έ'), + (0x1F74, 'V'), + (0x1F75, 'M', 'ή'), + (0x1F76, 'V'), + (0x1F77, 'M', 'ί'), + (0x1F78, 'V'), + (0x1F79, 'M', 'ό'), + (0x1F7A, 'V'), + (0x1F7B, 'M', 'ύ'), + (0x1F7C, 'V'), + (0x1F7D, 'M', 'ώ'), + (0x1F7E, 'X'), + (0x1F80, 'M', 'ἀι'), + (0x1F81, 'M', 'ἁι'), + (0x1F82, 'M', 'ἂι'), + (0x1F83, 'M', 'ἃι'), + (0x1F84, 'M', 'ἄι'), + ] + +def _seg_20(): + return [ + (0x1F85, 'M', 'ἅι'), + (0x1F86, 'M', 'ἆι'), + (0x1F87, 'M', 'ἇι'), + (0x1F88, 'M', 'ἀι'), + (0x1F89, 'M', 'ἁι'), + (0x1F8A, 'M', 'ἂι'), + (0x1F8B, 'M', 'ἃι'), + (0x1F8C, 'M', 'ἄι'), + (0x1F8D, 'M', 'ἅι'), + (0x1F8E, 'M', 'ἆι'), + (0x1F8F, 'M', 'ἇι'), + (0x1F90, 'M', 'ἠι'), + (0x1F91, 'M', 'ἡι'), + (0x1F92, 'M', 'ἢι'), + (0x1F93, 'M', 'ἣι'), + (0x1F94, 'M', 'ἤι'), + (0x1F95, 'M', 'ἥι'), + (0x1F96, 'M', 'ἦι'), + (0x1F97, 'M', 'ἧι'), + (0x1F98, 'M', 'ἠι'), + (0x1F99, 'M', 'ἡι'), + (0x1F9A, 'M', 'ἢι'), + (0x1F9B, 'M', 'ἣι'), + (0x1F9C, 'M', 'ἤι'), + (0x1F9D, 'M', 'ἥι'), + (0x1F9E, 'M', 'ἦι'), + (0x1F9F, 'M', 'ἧι'), + (0x1FA0, 'M', 'ὠι'), + (0x1FA1, 'M', 'ὡι'), + (0x1FA2, 'M', 'ὢι'), + (0x1FA3, 'M', 'ὣι'), + (0x1FA4, 'M', 'ὤι'), + (0x1FA5, 'M', 'ὥι'), + (0x1FA6, 'M', 'ὦι'), + (0x1FA7, 'M', 'ὧι'), + (0x1FA8, 'M', 'ὠι'), + (0x1FA9, 'M', 'ὡι'), + (0x1FAA, 'M', 'ὢι'), + (0x1FAB, 'M', 'ὣι'), + (0x1FAC, 'M', 'ὤι'), + (0x1FAD, 'M', 'ὥι'), + (0x1FAE, 'M', 'ὦι'), + (0x1FAF, 'M', 'ὧι'), + (0x1FB0, 'V'), + (0x1FB2, 'M', 'ὰι'), + (0x1FB3, 'M', 'αι'), + (0x1FB4, 'M', 'άι'), + (0x1FB5, 'X'), + (0x1FB6, 'V'), + (0x1FB7, 'M', 'ᾶι'), + (0x1FB8, 'M', 'ᾰ'), + (0x1FB9, 'M', 'ᾱ'), + (0x1FBA, 'M', 'ὰ'), + (0x1FBB, 'M', 'ά'), + (0x1FBC, 'M', 'αι'), + (0x1FBD, '3', ' ̓'), + (0x1FBE, 'M', 'ι'), + (0x1FBF, '3', ' ̓'), + (0x1FC0, '3', ' ͂'), + (0x1FC1, '3', ' ̈͂'), + (0x1FC2, 'M', 'ὴι'), + (0x1FC3, 'M', 'ηι'), + (0x1FC4, 'M', 'ήι'), + (0x1FC5, 'X'), + (0x1FC6, 'V'), + (0x1FC7, 'M', 'ῆι'), + (0x1FC8, 'M', 'ὲ'), + (0x1FC9, 'M', 'έ'), + (0x1FCA, 'M', 'ὴ'), + (0x1FCB, 'M', 'ή'), + (0x1FCC, 'M', 'ηι'), + (0x1FCD, '3', ' ̓̀'), + (0x1FCE, '3', ' ̓́'), + (0x1FCF, '3', ' ̓͂'), + (0x1FD0, 'V'), + (0x1FD3, 'M', 'ΐ'), + (0x1FD4, 'X'), + (0x1FD6, 'V'), + (0x1FD8, 'M', 'ῐ'), + (0x1FD9, 'M', 'ῑ'), + (0x1FDA, 'M', 'ὶ'), + (0x1FDB, 'M', 'ί'), + (0x1FDC, 'X'), + (0x1FDD, '3', ' ̔̀'), + (0x1FDE, '3', ' ̔́'), + (0x1FDF, '3', ' ̔͂'), + (0x1FE0, 'V'), + (0x1FE3, 'M', 'ΰ'), + (0x1FE4, 'V'), + (0x1FE8, 'M', 'ῠ'), + (0x1FE9, 'M', 'ῡ'), + (0x1FEA, 'M', 'ὺ'), + (0x1FEB, 'M', 'ύ'), + (0x1FEC, 'M', 'ῥ'), + (0x1FED, '3', ' ̈̀'), + (0x1FEE, '3', ' ̈́'), + (0x1FEF, '3', '`'), + (0x1FF0, 'X'), + (0x1FF2, 'M', 'ὼι'), + (0x1FF3, 'M', 'ωι'), + ] + +def _seg_21(): + return [ + (0x1FF4, 'M', 'ώι'), + (0x1FF5, 'X'), + (0x1FF6, 'V'), + (0x1FF7, 'M', 'ῶι'), + (0x1FF8, 'M', 'ὸ'), + (0x1FF9, 'M', 'ό'), + (0x1FFA, 'M', 'ὼ'), + (0x1FFB, 'M', 'ώ'), + (0x1FFC, 'M', 'ωι'), + (0x1FFD, '3', ' ́'), + (0x1FFE, '3', ' ̔'), + (0x1FFF, 'X'), + (0x2000, '3', ' '), + (0x200B, 'I'), + (0x200C, 'D', ''), + (0x200E, 'X'), + (0x2010, 'V'), + (0x2011, 'M', '‐'), + (0x2012, 'V'), + (0x2017, '3', ' ̳'), + (0x2018, 'V'), + (0x2024, 'X'), + (0x2027, 'V'), + (0x2028, 'X'), + (0x202F, '3', ' '), + (0x2030, 'V'), + (0x2033, 'M', '′′'), + (0x2034, 'M', '′′′'), + (0x2035, 'V'), + (0x2036, 'M', '‵‵'), + (0x2037, 'M', '‵‵‵'), + (0x2038, 'V'), + (0x203C, '3', '!!'), + (0x203D, 'V'), + (0x203E, '3', ' ̅'), + (0x203F, 'V'), + (0x2047, '3', '??'), + (0x2048, '3', '?!'), + (0x2049, '3', '!?'), + (0x204A, 'V'), + (0x2057, 'M', '′′′′'), + (0x2058, 'V'), + (0x205F, '3', ' '), + (0x2060, 'I'), + (0x2061, 'X'), + (0x2064, 'I'), + (0x2065, 'X'), + (0x2070, 'M', '0'), + (0x2071, 'M', 'i'), + (0x2072, 'X'), + (0x2074, 'M', '4'), + (0x2075, 'M', '5'), + (0x2076, 'M', '6'), + (0x2077, 'M', '7'), + (0x2078, 'M', '8'), + (0x2079, 'M', '9'), + (0x207A, '3', '+'), + (0x207B, 'M', '−'), + (0x207C, '3', '='), + (0x207D, '3', '('), + (0x207E, '3', ')'), + (0x207F, 'M', 'n'), + (0x2080, 'M', '0'), + (0x2081, 'M', '1'), + (0x2082, 'M', '2'), + (0x2083, 'M', '3'), + (0x2084, 'M', '4'), + (0x2085, 'M', '5'), + (0x2086, 'M', '6'), + (0x2087, 'M', '7'), + (0x2088, 'M', '8'), + (0x2089, 'M', '9'), + (0x208A, '3', '+'), + (0x208B, 'M', '−'), + (0x208C, '3', '='), + (0x208D, '3', '('), + (0x208E, '3', ')'), + (0x208F, 'X'), + (0x2090, 'M', 'a'), + (0x2091, 'M', 'e'), + (0x2092, 'M', 'o'), + (0x2093, 'M', 'x'), + (0x2094, 'M', 'ə'), + (0x2095, 'M', 'h'), + (0x2096, 'M', 'k'), + (0x2097, 'M', 'l'), + (0x2098, 'M', 'm'), + (0x2099, 'M', 'n'), + (0x209A, 'M', 'p'), + (0x209B, 'M', 's'), + (0x209C, 'M', 't'), + (0x209D, 'X'), + (0x20A0, 'V'), + (0x20A8, 'M', 'rs'), + (0x20A9, 'V'), + (0x20C0, 'X'), + (0x20D0, 'V'), + (0x20F1, 'X'), + (0x2100, '3', 'a/c'), + (0x2101, '3', 'a/s'), + ] + +def _seg_22(): + return [ + (0x2102, 'M', 'c'), + (0x2103, 'M', '°c'), + (0x2104, 'V'), + (0x2105, '3', 'c/o'), + (0x2106, '3', 'c/u'), + (0x2107, 'M', 'ɛ'), + (0x2108, 'V'), + (0x2109, 'M', '°f'), + (0x210A, 'M', 'g'), + (0x210B, 'M', 'h'), + (0x210F, 'M', 'ħ'), + (0x2110, 'M', 'i'), + (0x2112, 'M', 'l'), + (0x2114, 'V'), + (0x2115, 'M', 'n'), + (0x2116, 'M', 'no'), + (0x2117, 'V'), + (0x2119, 'M', 'p'), + (0x211A, 'M', 'q'), + (0x211B, 'M', 'r'), + (0x211E, 'V'), + (0x2120, 'M', 'sm'), + (0x2121, 'M', 'tel'), + (0x2122, 'M', 'tm'), + (0x2123, 'V'), + (0x2124, 'M', 'z'), + (0x2125, 'V'), + (0x2126, 'M', 'ω'), + (0x2127, 'V'), + (0x2128, 'M', 'z'), + (0x2129, 'V'), + (0x212A, 'M', 'k'), + (0x212B, 'M', 'å'), + (0x212C, 'M', 'b'), + (0x212D, 'M', 'c'), + (0x212E, 'V'), + (0x212F, 'M', 'e'), + (0x2131, 'M', 'f'), + (0x2132, 'X'), + (0x2133, 'M', 'm'), + (0x2134, 'M', 'o'), + (0x2135, 'M', 'א'), + (0x2136, 'M', 'ב'), + (0x2137, 'M', 'ג'), + (0x2138, 'M', 'ד'), + (0x2139, 'M', 'i'), + (0x213A, 'V'), + (0x213B, 'M', 'fax'), + (0x213C, 'M', 'π'), + (0x213D, 'M', 'γ'), + (0x213F, 'M', 'π'), + (0x2140, 'M', '∑'), + (0x2141, 'V'), + (0x2145, 'M', 'd'), + (0x2147, 'M', 'e'), + (0x2148, 'M', 'i'), + (0x2149, 'M', 'j'), + (0x214A, 'V'), + (0x2150, 'M', '1⁄7'), + (0x2151, 'M', '1⁄9'), + (0x2152, 'M', '1⁄10'), + (0x2153, 'M', '1⁄3'), + (0x2154, 'M', '2⁄3'), + (0x2155, 'M', '1⁄5'), + (0x2156, 'M', '2⁄5'), + (0x2157, 'M', '3⁄5'), + (0x2158, 'M', '4⁄5'), + (0x2159, 'M', '1⁄6'), + (0x215A, 'M', '5⁄6'), + (0x215B, 'M', '1⁄8'), + (0x215C, 'M', '3⁄8'), + (0x215D, 'M', '5⁄8'), + (0x215E, 'M', '7⁄8'), + (0x215F, 'M', '1⁄'), + (0x2160, 'M', 'i'), + (0x2161, 'M', 'ii'), + (0x2162, 'M', 'iii'), + (0x2163, 'M', 'iv'), + (0x2164, 'M', 'v'), + (0x2165, 'M', 'vi'), + (0x2166, 'M', 'vii'), + (0x2167, 'M', 'viii'), + (0x2168, 'M', 'ix'), + (0x2169, 'M', 'x'), + (0x216A, 'M', 'xi'), + (0x216B, 'M', 'xii'), + (0x216C, 'M', 'l'), + (0x216D, 'M', 'c'), + (0x216E, 'M', 'd'), + (0x216F, 'M', 'm'), + (0x2170, 'M', 'i'), + (0x2171, 'M', 'ii'), + (0x2172, 'M', 'iii'), + (0x2173, 'M', 'iv'), + (0x2174, 'M', 'v'), + (0x2175, 'M', 'vi'), + (0x2176, 'M', 'vii'), + (0x2177, 'M', 'viii'), + (0x2178, 'M', 'ix'), + (0x2179, 'M', 'x'), + ] + +def _seg_23(): + return [ + (0x217A, 'M', 'xi'), + (0x217B, 'M', 'xii'), + (0x217C, 'M', 'l'), + (0x217D, 'M', 'c'), + (0x217E, 'M', 'd'), + (0x217F, 'M', 'm'), + (0x2180, 'V'), + (0x2183, 'X'), + (0x2184, 'V'), + (0x2189, 'M', '0⁄3'), + (0x218A, 'V'), + (0x218C, 'X'), + (0x2190, 'V'), + (0x222C, 'M', '∫∫'), + (0x222D, 'M', '∫∫∫'), + (0x222E, 'V'), + (0x222F, 'M', '∮∮'), + (0x2230, 'M', '∮∮∮'), + (0x2231, 'V'), + (0x2260, '3'), + (0x2261, 'V'), + (0x226E, '3'), + (0x2270, 'V'), + (0x2329, 'M', '〈'), + (0x232A, 'M', '〉'), + (0x232B, 'V'), + (0x2427, 'X'), + (0x2440, 'V'), + (0x244B, 'X'), + (0x2460, 'M', '1'), + (0x2461, 'M', '2'), + (0x2462, 'M', '3'), + (0x2463, 'M', '4'), + (0x2464, 'M', '5'), + (0x2465, 'M', '6'), + (0x2466, 'M', '7'), + (0x2467, 'M', '8'), + (0x2468, 'M', '9'), + (0x2469, 'M', '10'), + (0x246A, 'M', '11'), + (0x246B, 'M', '12'), + (0x246C, 'M', '13'), + (0x246D, 'M', '14'), + (0x246E, 'M', '15'), + (0x246F, 'M', '16'), + (0x2470, 'M', '17'), + (0x2471, 'M', '18'), + (0x2472, 'M', '19'), + (0x2473, 'M', '20'), + (0x2474, '3', '(1)'), + (0x2475, '3', '(2)'), + (0x2476, '3', '(3)'), + (0x2477, '3', '(4)'), + (0x2478, '3', '(5)'), + (0x2479, '3', '(6)'), + (0x247A, '3', '(7)'), + (0x247B, '3', '(8)'), + (0x247C, '3', '(9)'), + (0x247D, '3', '(10)'), + (0x247E, '3', '(11)'), + (0x247F, '3', '(12)'), + (0x2480, '3', '(13)'), + (0x2481, '3', '(14)'), + (0x2482, '3', '(15)'), + (0x2483, '3', '(16)'), + (0x2484, '3', '(17)'), + (0x2485, '3', '(18)'), + (0x2486, '3', '(19)'), + (0x2487, '3', '(20)'), + (0x2488, 'X'), + (0x249C, '3', '(a)'), + (0x249D, '3', '(b)'), + (0x249E, '3', '(c)'), + (0x249F, '3', '(d)'), + (0x24A0, '3', '(e)'), + (0x24A1, '3', '(f)'), + (0x24A2, '3', '(g)'), + (0x24A3, '3', '(h)'), + (0x24A4, '3', '(i)'), + (0x24A5, '3', '(j)'), + (0x24A6, '3', '(k)'), + (0x24A7, '3', '(l)'), + (0x24A8, '3', '(m)'), + (0x24A9, '3', '(n)'), + (0x24AA, '3', '(o)'), + (0x24AB, '3', '(p)'), + (0x24AC, '3', '(q)'), + (0x24AD, '3', '(r)'), + (0x24AE, '3', '(s)'), + (0x24AF, '3', '(t)'), + (0x24B0, '3', '(u)'), + (0x24B1, '3', '(v)'), + (0x24B2, '3', '(w)'), + (0x24B3, '3', '(x)'), + (0x24B4, '3', '(y)'), + (0x24B5, '3', '(z)'), + (0x24B6, 'M', 'a'), + (0x24B7, 'M', 'b'), + (0x24B8, 'M', 'c'), + (0x24B9, 'M', 'd'), + ] + +def _seg_24(): + return [ + (0x24BA, 'M', 'e'), + (0x24BB, 'M', 'f'), + (0x24BC, 'M', 'g'), + (0x24BD, 'M', 'h'), + (0x24BE, 'M', 'i'), + (0x24BF, 'M', 'j'), + (0x24C0, 'M', 'k'), + (0x24C1, 'M', 'l'), + (0x24C2, 'M', 'm'), + (0x24C3, 'M', 'n'), + (0x24C4, 'M', 'o'), + (0x24C5, 'M', 'p'), + (0x24C6, 'M', 'q'), + (0x24C7, 'M', 'r'), + (0x24C8, 'M', 's'), + (0x24C9, 'M', 't'), + (0x24CA, 'M', 'u'), + (0x24CB, 'M', 'v'), + (0x24CC, 'M', 'w'), + (0x24CD, 'M', 'x'), + (0x24CE, 'M', 'y'), + (0x24CF, 'M', 'z'), + (0x24D0, 'M', 'a'), + (0x24D1, 'M', 'b'), + (0x24D2, 'M', 'c'), + (0x24D3, 'M', 'd'), + (0x24D4, 'M', 'e'), + (0x24D5, 'M', 'f'), + (0x24D6, 'M', 'g'), + (0x24D7, 'M', 'h'), + (0x24D8, 'M', 'i'), + (0x24D9, 'M', 'j'), + (0x24DA, 'M', 'k'), + (0x24DB, 'M', 'l'), + (0x24DC, 'M', 'm'), + (0x24DD, 'M', 'n'), + (0x24DE, 'M', 'o'), + (0x24DF, 'M', 'p'), + (0x24E0, 'M', 'q'), + (0x24E1, 'M', 'r'), + (0x24E2, 'M', 's'), + (0x24E3, 'M', 't'), + (0x24E4, 'M', 'u'), + (0x24E5, 'M', 'v'), + (0x24E6, 'M', 'w'), + (0x24E7, 'M', 'x'), + (0x24E8, 'M', 'y'), + (0x24E9, 'M', 'z'), + (0x24EA, 'M', '0'), + (0x24EB, 'V'), + (0x2A0C, 'M', '∫∫∫∫'), + (0x2A0D, 'V'), + (0x2A74, '3', '::='), + (0x2A75, '3', '=='), + (0x2A76, '3', '==='), + (0x2A77, 'V'), + (0x2ADC, 'M', '⫝̸'), + (0x2ADD, 'V'), + (0x2B74, 'X'), + (0x2B76, 'V'), + (0x2B96, 'X'), + (0x2B97, 'V'), + (0x2C00, 'M', 'ⰰ'), + (0x2C01, 'M', 'ⰱ'), + (0x2C02, 'M', 'ⰲ'), + (0x2C03, 'M', 'ⰳ'), + (0x2C04, 'M', 'ⰴ'), + (0x2C05, 'M', 'ⰵ'), + (0x2C06, 'M', 'ⰶ'), + (0x2C07, 'M', 'ⰷ'), + (0x2C08, 'M', 'ⰸ'), + (0x2C09, 'M', 'ⰹ'), + (0x2C0A, 'M', 'ⰺ'), + (0x2C0B, 'M', 'ⰻ'), + (0x2C0C, 'M', 'ⰼ'), + (0x2C0D, 'M', 'ⰽ'), + (0x2C0E, 'M', 'ⰾ'), + (0x2C0F, 'M', 'ⰿ'), + (0x2C10, 'M', 'ⱀ'), + (0x2C11, 'M', 'ⱁ'), + (0x2C12, 'M', 'ⱂ'), + (0x2C13, 'M', 'ⱃ'), + (0x2C14, 'M', 'ⱄ'), + (0x2C15, 'M', 'ⱅ'), + (0x2C16, 'M', 'ⱆ'), + (0x2C17, 'M', 'ⱇ'), + (0x2C18, 'M', 'ⱈ'), + (0x2C19, 'M', 'ⱉ'), + (0x2C1A, 'M', 'ⱊ'), + (0x2C1B, 'M', 'ⱋ'), + (0x2C1C, 'M', 'ⱌ'), + (0x2C1D, 'M', 'ⱍ'), + (0x2C1E, 'M', 'ⱎ'), + (0x2C1F, 'M', 'ⱏ'), + (0x2C20, 'M', 'ⱐ'), + (0x2C21, 'M', 'ⱑ'), + (0x2C22, 'M', 'ⱒ'), + (0x2C23, 'M', 'ⱓ'), + (0x2C24, 'M', 'ⱔ'), + (0x2C25, 'M', 'ⱕ'), + ] + +def _seg_25(): + return [ + (0x2C26, 'M', 'ⱖ'), + (0x2C27, 'M', 'ⱗ'), + (0x2C28, 'M', 'ⱘ'), + (0x2C29, 'M', 'ⱙ'), + (0x2C2A, 'M', 'ⱚ'), + (0x2C2B, 'M', 'ⱛ'), + (0x2C2C, 'M', 'ⱜ'), + (0x2C2D, 'M', 'ⱝ'), + (0x2C2E, 'M', 'ⱞ'), + (0x2C2F, 'X'), + (0x2C30, 'V'), + (0x2C5F, 'X'), + (0x2C60, 'M', 'ⱡ'), + (0x2C61, 'V'), + (0x2C62, 'M', 'ɫ'), + (0x2C63, 'M', 'ᵽ'), + (0x2C64, 'M', 'ɽ'), + (0x2C65, 'V'), + (0x2C67, 'M', 'ⱨ'), + (0x2C68, 'V'), + (0x2C69, 'M', 'ⱪ'), + (0x2C6A, 'V'), + (0x2C6B, 'M', 'ⱬ'), + (0x2C6C, 'V'), + (0x2C6D, 'M', 'ɑ'), + (0x2C6E, 'M', 'ɱ'), + (0x2C6F, 'M', 'ɐ'), + (0x2C70, 'M', 'ɒ'), + (0x2C71, 'V'), + (0x2C72, 'M', 'ⱳ'), + (0x2C73, 'V'), + (0x2C75, 'M', 'ⱶ'), + (0x2C76, 'V'), + (0x2C7C, 'M', 'j'), + (0x2C7D, 'M', 'v'), + (0x2C7E, 'M', 'ȿ'), + (0x2C7F, 'M', 'ɀ'), + (0x2C80, 'M', 'ⲁ'), + (0x2C81, 'V'), + (0x2C82, 'M', 'ⲃ'), + (0x2C83, 'V'), + (0x2C84, 'M', 'ⲅ'), + (0x2C85, 'V'), + (0x2C86, 'M', 'ⲇ'), + (0x2C87, 'V'), + (0x2C88, 'M', 'ⲉ'), + (0x2C89, 'V'), + (0x2C8A, 'M', 'ⲋ'), + (0x2C8B, 'V'), + (0x2C8C, 'M', 'ⲍ'), + (0x2C8D, 'V'), + (0x2C8E, 'M', 'ⲏ'), + (0x2C8F, 'V'), + (0x2C90, 'M', 'ⲑ'), + (0x2C91, 'V'), + (0x2C92, 'M', 'ⲓ'), + (0x2C93, 'V'), + (0x2C94, 'M', 'ⲕ'), + (0x2C95, 'V'), + (0x2C96, 'M', 'ⲗ'), + (0x2C97, 'V'), + (0x2C98, 'M', 'ⲙ'), + (0x2C99, 'V'), + (0x2C9A, 'M', 'ⲛ'), + (0x2C9B, 'V'), + (0x2C9C, 'M', 'ⲝ'), + (0x2C9D, 'V'), + (0x2C9E, 'M', 'ⲟ'), + (0x2C9F, 'V'), + (0x2CA0, 'M', 'ⲡ'), + (0x2CA1, 'V'), + (0x2CA2, 'M', 'ⲣ'), + (0x2CA3, 'V'), + (0x2CA4, 'M', 'ⲥ'), + (0x2CA5, 'V'), + (0x2CA6, 'M', 'ⲧ'), + (0x2CA7, 'V'), + (0x2CA8, 'M', 'ⲩ'), + (0x2CA9, 'V'), + (0x2CAA, 'M', 'ⲫ'), + (0x2CAB, 'V'), + (0x2CAC, 'M', 'ⲭ'), + (0x2CAD, 'V'), + (0x2CAE, 'M', 'ⲯ'), + (0x2CAF, 'V'), + (0x2CB0, 'M', 'ⲱ'), + (0x2CB1, 'V'), + (0x2CB2, 'M', 'ⲳ'), + (0x2CB3, 'V'), + (0x2CB4, 'M', 'ⲵ'), + (0x2CB5, 'V'), + (0x2CB6, 'M', 'ⲷ'), + (0x2CB7, 'V'), + (0x2CB8, 'M', 'ⲹ'), + (0x2CB9, 'V'), + (0x2CBA, 'M', 'ⲻ'), + (0x2CBB, 'V'), + (0x2CBC, 'M', 'ⲽ'), + (0x2CBD, 'V'), + (0x2CBE, 'M', 'ⲿ'), + ] + +def _seg_26(): + return [ + (0x2CBF, 'V'), + (0x2CC0, 'M', 'ⳁ'), + (0x2CC1, 'V'), + (0x2CC2, 'M', 'ⳃ'), + (0x2CC3, 'V'), + (0x2CC4, 'M', 'ⳅ'), + (0x2CC5, 'V'), + (0x2CC6, 'M', 'ⳇ'), + (0x2CC7, 'V'), + (0x2CC8, 'M', 'ⳉ'), + (0x2CC9, 'V'), + (0x2CCA, 'M', 'ⳋ'), + (0x2CCB, 'V'), + (0x2CCC, 'M', 'ⳍ'), + (0x2CCD, 'V'), + (0x2CCE, 'M', 'ⳏ'), + (0x2CCF, 'V'), + (0x2CD0, 'M', 'ⳑ'), + (0x2CD1, 'V'), + (0x2CD2, 'M', 'ⳓ'), + (0x2CD3, 'V'), + (0x2CD4, 'M', 'ⳕ'), + (0x2CD5, 'V'), + (0x2CD6, 'M', 'ⳗ'), + (0x2CD7, 'V'), + (0x2CD8, 'M', 'ⳙ'), + (0x2CD9, 'V'), + (0x2CDA, 'M', 'ⳛ'), + (0x2CDB, 'V'), + (0x2CDC, 'M', 'ⳝ'), + (0x2CDD, 'V'), + (0x2CDE, 'M', 'ⳟ'), + (0x2CDF, 'V'), + (0x2CE0, 'M', 'ⳡ'), + (0x2CE1, 'V'), + (0x2CE2, 'M', 'ⳣ'), + (0x2CE3, 'V'), + (0x2CEB, 'M', 'ⳬ'), + (0x2CEC, 'V'), + (0x2CED, 'M', 'ⳮ'), + (0x2CEE, 'V'), + (0x2CF2, 'M', 'ⳳ'), + (0x2CF3, 'V'), + (0x2CF4, 'X'), + (0x2CF9, 'V'), + (0x2D26, 'X'), + (0x2D27, 'V'), + (0x2D28, 'X'), + (0x2D2D, 'V'), + (0x2D2E, 'X'), + (0x2D30, 'V'), + (0x2D68, 'X'), + (0x2D6F, 'M', 'ⵡ'), + (0x2D70, 'V'), + (0x2D71, 'X'), + (0x2D7F, 'V'), + (0x2D97, 'X'), + (0x2DA0, 'V'), + (0x2DA7, 'X'), + (0x2DA8, 'V'), + (0x2DAF, 'X'), + (0x2DB0, 'V'), + (0x2DB7, 'X'), + (0x2DB8, 'V'), + (0x2DBF, 'X'), + (0x2DC0, 'V'), + (0x2DC7, 'X'), + (0x2DC8, 'V'), + (0x2DCF, 'X'), + (0x2DD0, 'V'), + (0x2DD7, 'X'), + (0x2DD8, 'V'), + (0x2DDF, 'X'), + (0x2DE0, 'V'), + (0x2E53, 'X'), + (0x2E80, 'V'), + (0x2E9A, 'X'), + (0x2E9B, 'V'), + (0x2E9F, 'M', '母'), + (0x2EA0, 'V'), + (0x2EF3, 'M', '龟'), + (0x2EF4, 'X'), + (0x2F00, 'M', '一'), + (0x2F01, 'M', '丨'), + (0x2F02, 'M', '丶'), + (0x2F03, 'M', '丿'), + (0x2F04, 'M', '乙'), + (0x2F05, 'M', '亅'), + (0x2F06, 'M', '二'), + (0x2F07, 'M', '亠'), + (0x2F08, 'M', '人'), + (0x2F09, 'M', '儿'), + (0x2F0A, 'M', '入'), + (0x2F0B, 'M', '八'), + (0x2F0C, 'M', '冂'), + (0x2F0D, 'M', '冖'), + (0x2F0E, 'M', '冫'), + (0x2F0F, 'M', '几'), + (0x2F10, 'M', '凵'), + (0x2F11, 'M', '刀'), + ] + +def _seg_27(): + return [ + (0x2F12, 'M', '力'), + (0x2F13, 'M', '勹'), + (0x2F14, 'M', '匕'), + (0x2F15, 'M', '匚'), + (0x2F16, 'M', '匸'), + (0x2F17, 'M', '十'), + (0x2F18, 'M', '卜'), + (0x2F19, 'M', '卩'), + (0x2F1A, 'M', '厂'), + (0x2F1B, 'M', '厶'), + (0x2F1C, 'M', '又'), + (0x2F1D, 'M', '口'), + (0x2F1E, 'M', '囗'), + (0x2F1F, 'M', '土'), + (0x2F20, 'M', '士'), + (0x2F21, 'M', '夂'), + (0x2F22, 'M', '夊'), + (0x2F23, 'M', '夕'), + (0x2F24, 'M', '大'), + (0x2F25, 'M', '女'), + (0x2F26, 'M', '子'), + (0x2F27, 'M', '宀'), + (0x2F28, 'M', '寸'), + (0x2F29, 'M', '小'), + (0x2F2A, 'M', '尢'), + (0x2F2B, 'M', '尸'), + (0x2F2C, 'M', '屮'), + (0x2F2D, 'M', '山'), + (0x2F2E, 'M', '巛'), + (0x2F2F, 'M', '工'), + (0x2F30, 'M', '己'), + (0x2F31, 'M', '巾'), + (0x2F32, 'M', '干'), + (0x2F33, 'M', '幺'), + (0x2F34, 'M', '广'), + (0x2F35, 'M', '廴'), + (0x2F36, 'M', '廾'), + (0x2F37, 'M', '弋'), + (0x2F38, 'M', '弓'), + (0x2F39, 'M', '彐'), + (0x2F3A, 'M', '彡'), + (0x2F3B, 'M', '彳'), + (0x2F3C, 'M', '心'), + (0x2F3D, 'M', '戈'), + (0x2F3E, 'M', '戶'), + (0x2F3F, 'M', '手'), + (0x2F40, 'M', '支'), + (0x2F41, 'M', '攴'), + (0x2F42, 'M', '文'), + (0x2F43, 'M', '斗'), + (0x2F44, 'M', '斤'), + (0x2F45, 'M', '方'), + (0x2F46, 'M', '无'), + (0x2F47, 'M', '日'), + (0x2F48, 'M', '曰'), + (0x2F49, 'M', '月'), + (0x2F4A, 'M', '木'), + (0x2F4B, 'M', '欠'), + (0x2F4C, 'M', '止'), + (0x2F4D, 'M', '歹'), + (0x2F4E, 'M', '殳'), + (0x2F4F, 'M', '毋'), + (0x2F50, 'M', '比'), + (0x2F51, 'M', '毛'), + (0x2F52, 'M', '氏'), + (0x2F53, 'M', '气'), + (0x2F54, 'M', '水'), + (0x2F55, 'M', '火'), + (0x2F56, 'M', '爪'), + (0x2F57, 'M', '父'), + (0x2F58, 'M', '爻'), + (0x2F59, 'M', '爿'), + (0x2F5A, 'M', '片'), + (0x2F5B, 'M', '牙'), + (0x2F5C, 'M', '牛'), + (0x2F5D, 'M', '犬'), + (0x2F5E, 'M', '玄'), + (0x2F5F, 'M', '玉'), + (0x2F60, 'M', '瓜'), + (0x2F61, 'M', '瓦'), + (0x2F62, 'M', '甘'), + (0x2F63, 'M', '生'), + (0x2F64, 'M', '用'), + (0x2F65, 'M', '田'), + (0x2F66, 'M', '疋'), + (0x2F67, 'M', '疒'), + (0x2F68, 'M', '癶'), + (0x2F69, 'M', '白'), + (0x2F6A, 'M', '皮'), + (0x2F6B, 'M', '皿'), + (0x2F6C, 'M', '目'), + (0x2F6D, 'M', '矛'), + (0x2F6E, 'M', '矢'), + (0x2F6F, 'M', '石'), + (0x2F70, 'M', '示'), + (0x2F71, 'M', '禸'), + (0x2F72, 'M', '禾'), + (0x2F73, 'M', '穴'), + (0x2F74, 'M', '立'), + (0x2F75, 'M', '竹'), + ] + +def _seg_28(): + return [ + (0x2F76, 'M', '米'), + (0x2F77, 'M', '糸'), + (0x2F78, 'M', '缶'), + (0x2F79, 'M', '网'), + (0x2F7A, 'M', '羊'), + (0x2F7B, 'M', '羽'), + (0x2F7C, 'M', '老'), + (0x2F7D, 'M', '而'), + (0x2F7E, 'M', '耒'), + (0x2F7F, 'M', '耳'), + (0x2F80, 'M', '聿'), + (0x2F81, 'M', '肉'), + (0x2F82, 'M', '臣'), + (0x2F83, 'M', '自'), + (0x2F84, 'M', '至'), + (0x2F85, 'M', '臼'), + (0x2F86, 'M', '舌'), + (0x2F87, 'M', '舛'), + (0x2F88, 'M', '舟'), + (0x2F89, 'M', '艮'), + (0x2F8A, 'M', '色'), + (0x2F8B, 'M', '艸'), + (0x2F8C, 'M', '虍'), + (0x2F8D, 'M', '虫'), + (0x2F8E, 'M', '血'), + (0x2F8F, 'M', '行'), + (0x2F90, 'M', '衣'), + (0x2F91, 'M', '襾'), + (0x2F92, 'M', '見'), + (0x2F93, 'M', '角'), + (0x2F94, 'M', '言'), + (0x2F95, 'M', '谷'), + (0x2F96, 'M', '豆'), + (0x2F97, 'M', '豕'), + (0x2F98, 'M', '豸'), + (0x2F99, 'M', '貝'), + (0x2F9A, 'M', '赤'), + (0x2F9B, 'M', '走'), + (0x2F9C, 'M', '足'), + (0x2F9D, 'M', '身'), + (0x2F9E, 'M', '車'), + (0x2F9F, 'M', '辛'), + (0x2FA0, 'M', '辰'), + (0x2FA1, 'M', '辵'), + (0x2FA2, 'M', '邑'), + (0x2FA3, 'M', '酉'), + (0x2FA4, 'M', '釆'), + (0x2FA5, 'M', '里'), + (0x2FA6, 'M', '金'), + (0x2FA7, 'M', '長'), + (0x2FA8, 'M', '門'), + (0x2FA9, 'M', '阜'), + (0x2FAA, 'M', '隶'), + (0x2FAB, 'M', '隹'), + (0x2FAC, 'M', '雨'), + (0x2FAD, 'M', '靑'), + (0x2FAE, 'M', '非'), + (0x2FAF, 'M', '面'), + (0x2FB0, 'M', '革'), + (0x2FB1, 'M', '韋'), + (0x2FB2, 'M', '韭'), + (0x2FB3, 'M', '音'), + (0x2FB4, 'M', '頁'), + (0x2FB5, 'M', '風'), + (0x2FB6, 'M', '飛'), + (0x2FB7, 'M', '食'), + (0x2FB8, 'M', '首'), + (0x2FB9, 'M', '香'), + (0x2FBA, 'M', '馬'), + (0x2FBB, 'M', '骨'), + (0x2FBC, 'M', '高'), + (0x2FBD, 'M', '髟'), + (0x2FBE, 'M', '鬥'), + (0x2FBF, 'M', '鬯'), + (0x2FC0, 'M', '鬲'), + (0x2FC1, 'M', '鬼'), + (0x2FC2, 'M', '魚'), + (0x2FC3, 'M', '鳥'), + (0x2FC4, 'M', '鹵'), + (0x2FC5, 'M', '鹿'), + (0x2FC6, 'M', '麥'), + (0x2FC7, 'M', '麻'), + (0x2FC8, 'M', '黃'), + (0x2FC9, 'M', '黍'), + (0x2FCA, 'M', '黑'), + (0x2FCB, 'M', '黹'), + (0x2FCC, 'M', '黽'), + (0x2FCD, 'M', '鼎'), + (0x2FCE, 'M', '鼓'), + (0x2FCF, 'M', '鼠'), + (0x2FD0, 'M', '鼻'), + (0x2FD1, 'M', '齊'), + (0x2FD2, 'M', '齒'), + (0x2FD3, 'M', '龍'), + (0x2FD4, 'M', '龜'), + (0x2FD5, 'M', '龠'), + (0x2FD6, 'X'), + (0x3000, '3', ' '), + (0x3001, 'V'), + (0x3002, 'M', '.'), + ] + +def _seg_29(): + return [ + (0x3003, 'V'), + (0x3036, 'M', '〒'), + (0x3037, 'V'), + (0x3038, 'M', '十'), + (0x3039, 'M', '卄'), + (0x303A, 'M', '卅'), + (0x303B, 'V'), + (0x3040, 'X'), + (0x3041, 'V'), + (0x3097, 'X'), + (0x3099, 'V'), + (0x309B, '3', ' ゙'), + (0x309C, '3', ' ゚'), + (0x309D, 'V'), + (0x309F, 'M', 'より'), + (0x30A0, 'V'), + (0x30FF, 'M', 'コト'), + (0x3100, 'X'), + (0x3105, 'V'), + (0x3130, 'X'), + (0x3131, 'M', 'ᄀ'), + (0x3132, 'M', 'ᄁ'), + (0x3133, 'M', 'ᆪ'), + (0x3134, 'M', 'ᄂ'), + (0x3135, 'M', 'ᆬ'), + (0x3136, 'M', 'ᆭ'), + (0x3137, 'M', 'ᄃ'), + (0x3138, 'M', 'ᄄ'), + (0x3139, 'M', 'ᄅ'), + (0x313A, 'M', 'ᆰ'), + (0x313B, 'M', 'ᆱ'), + (0x313C, 'M', 'ᆲ'), + (0x313D, 'M', 'ᆳ'), + (0x313E, 'M', 'ᆴ'), + (0x313F, 'M', 'ᆵ'), + (0x3140, 'M', 'ᄚ'), + (0x3141, 'M', 'ᄆ'), + (0x3142, 'M', 'ᄇ'), + (0x3143, 'M', 'ᄈ'), + (0x3144, 'M', 'ᄡ'), + (0x3145, 'M', 'ᄉ'), + (0x3146, 'M', 'ᄊ'), + (0x3147, 'M', 'ᄋ'), + (0x3148, 'M', 'ᄌ'), + (0x3149, 'M', 'ᄍ'), + (0x314A, 'M', 'ᄎ'), + (0x314B, 'M', 'ᄏ'), + (0x314C, 'M', 'ᄐ'), + (0x314D, 'M', 'ᄑ'), + (0x314E, 'M', 'ᄒ'), + (0x314F, 'M', 'ᅡ'), + (0x3150, 'M', 'ᅢ'), + (0x3151, 'M', 'ᅣ'), + (0x3152, 'M', 'ᅤ'), + (0x3153, 'M', 'ᅥ'), + (0x3154, 'M', 'ᅦ'), + (0x3155, 'M', 'ᅧ'), + (0x3156, 'M', 'ᅨ'), + (0x3157, 'M', 'ᅩ'), + (0x3158, 'M', 'ᅪ'), + (0x3159, 'M', 'ᅫ'), + (0x315A, 'M', 'ᅬ'), + (0x315B, 'M', 'ᅭ'), + (0x315C, 'M', 'ᅮ'), + (0x315D, 'M', 'ᅯ'), + (0x315E, 'M', 'ᅰ'), + (0x315F, 'M', 'ᅱ'), + (0x3160, 'M', 'ᅲ'), + (0x3161, 'M', 'ᅳ'), + (0x3162, 'M', 'ᅴ'), + (0x3163, 'M', 'ᅵ'), + (0x3164, 'X'), + (0x3165, 'M', 'ᄔ'), + (0x3166, 'M', 'ᄕ'), + (0x3167, 'M', 'ᇇ'), + (0x3168, 'M', 'ᇈ'), + (0x3169, 'M', 'ᇌ'), + (0x316A, 'M', 'ᇎ'), + (0x316B, 'M', 'ᇓ'), + (0x316C, 'M', 'ᇗ'), + (0x316D, 'M', 'ᇙ'), + (0x316E, 'M', 'ᄜ'), + (0x316F, 'M', 'ᇝ'), + (0x3170, 'M', 'ᇟ'), + (0x3171, 'M', 'ᄝ'), + (0x3172, 'M', 'ᄞ'), + (0x3173, 'M', 'ᄠ'), + (0x3174, 'M', 'ᄢ'), + (0x3175, 'M', 'ᄣ'), + (0x3176, 'M', 'ᄧ'), + (0x3177, 'M', 'ᄩ'), + (0x3178, 'M', 'ᄫ'), + (0x3179, 'M', 'ᄬ'), + (0x317A, 'M', 'ᄭ'), + (0x317B, 'M', 'ᄮ'), + (0x317C, 'M', 'ᄯ'), + (0x317D, 'M', 'ᄲ'), + (0x317E, 'M', 'ᄶ'), + (0x317F, 'M', 'ᅀ'), + (0x3180, 'M', 'ᅇ'), + ] + +def _seg_30(): + return [ + (0x3181, 'M', 'ᅌ'), + (0x3182, 'M', 'ᇱ'), + (0x3183, 'M', 'ᇲ'), + (0x3184, 'M', 'ᅗ'), + (0x3185, 'M', 'ᅘ'), + (0x3186, 'M', 'ᅙ'), + (0x3187, 'M', 'ᆄ'), + (0x3188, 'M', 'ᆅ'), + (0x3189, 'M', 'ᆈ'), + (0x318A, 'M', 'ᆑ'), + (0x318B, 'M', 'ᆒ'), + (0x318C, 'M', 'ᆔ'), + (0x318D, 'M', 'ᆞ'), + (0x318E, 'M', 'ᆡ'), + (0x318F, 'X'), + (0x3190, 'V'), + (0x3192, 'M', '一'), + (0x3193, 'M', '二'), + (0x3194, 'M', '三'), + (0x3195, 'M', '四'), + (0x3196, 'M', '上'), + (0x3197, 'M', '中'), + (0x3198, 'M', '下'), + (0x3199, 'M', '甲'), + (0x319A, 'M', '乙'), + (0x319B, 'M', '丙'), + (0x319C, 'M', '丁'), + (0x319D, 'M', '天'), + (0x319E, 'M', '地'), + (0x319F, 'M', '人'), + (0x31A0, 'V'), + (0x31E4, 'X'), + (0x31F0, 'V'), + (0x3200, '3', '(ᄀ)'), + (0x3201, '3', '(ᄂ)'), + (0x3202, '3', '(ᄃ)'), + (0x3203, '3', '(ᄅ)'), + (0x3204, '3', '(ᄆ)'), + (0x3205, '3', '(ᄇ)'), + (0x3206, '3', '(ᄉ)'), + (0x3207, '3', '(ᄋ)'), + (0x3208, '3', '(ᄌ)'), + (0x3209, '3', '(ᄎ)'), + (0x320A, '3', '(ᄏ)'), + (0x320B, '3', '(ᄐ)'), + (0x320C, '3', '(ᄑ)'), + (0x320D, '3', '(ᄒ)'), + (0x320E, '3', '(가)'), + (0x320F, '3', '(나)'), + (0x3210, '3', '(다)'), + (0x3211, '3', '(라)'), + (0x3212, '3', '(마)'), + (0x3213, '3', '(바)'), + (0x3214, '3', '(사)'), + (0x3215, '3', '(아)'), + (0x3216, '3', '(자)'), + (0x3217, '3', '(차)'), + (0x3218, '3', '(카)'), + (0x3219, '3', '(타)'), + (0x321A, '3', '(파)'), + (0x321B, '3', '(하)'), + (0x321C, '3', '(주)'), + (0x321D, '3', '(오전)'), + (0x321E, '3', '(오후)'), + (0x321F, 'X'), + (0x3220, '3', '(一)'), + (0x3221, '3', '(二)'), + (0x3222, '3', '(三)'), + (0x3223, '3', '(四)'), + (0x3224, '3', '(五)'), + (0x3225, '3', '(六)'), + (0x3226, '3', '(七)'), + (0x3227, '3', '(八)'), + (0x3228, '3', '(九)'), + (0x3229, '3', '(十)'), + (0x322A, '3', '(月)'), + (0x322B, '3', '(火)'), + (0x322C, '3', '(水)'), + (0x322D, '3', '(木)'), + (0x322E, '3', '(金)'), + (0x322F, '3', '(土)'), + (0x3230, '3', '(日)'), + (0x3231, '3', '(株)'), + (0x3232, '3', '(有)'), + (0x3233, '3', '(社)'), + (0x3234, '3', '(名)'), + (0x3235, '3', '(特)'), + (0x3236, '3', '(財)'), + (0x3237, '3', '(祝)'), + (0x3238, '3', '(労)'), + (0x3239, '3', '(代)'), + (0x323A, '3', '(呼)'), + (0x323B, '3', '(学)'), + (0x323C, '3', '(監)'), + (0x323D, '3', '(企)'), + (0x323E, '3', '(資)'), + (0x323F, '3', '(協)'), + (0x3240, '3', '(祭)'), + (0x3241, '3', '(休)'), + (0x3242, '3', '(自)'), + ] + +def _seg_31(): + return [ + (0x3243, '3', '(至)'), + (0x3244, 'M', '問'), + (0x3245, 'M', '幼'), + (0x3246, 'M', '文'), + (0x3247, 'M', '箏'), + (0x3248, 'V'), + (0x3250, 'M', 'pte'), + (0x3251, 'M', '21'), + (0x3252, 'M', '22'), + (0x3253, 'M', '23'), + (0x3254, 'M', '24'), + (0x3255, 'M', '25'), + (0x3256, 'M', '26'), + (0x3257, 'M', '27'), + (0x3258, 'M', '28'), + (0x3259, 'M', '29'), + (0x325A, 'M', '30'), + (0x325B, 'M', '31'), + (0x325C, 'M', '32'), + (0x325D, 'M', '33'), + (0x325E, 'M', '34'), + (0x325F, 'M', '35'), + (0x3260, 'M', 'ᄀ'), + (0x3261, 'M', 'ᄂ'), + (0x3262, 'M', 'ᄃ'), + (0x3263, 'M', 'ᄅ'), + (0x3264, 'M', 'ᄆ'), + (0x3265, 'M', 'ᄇ'), + (0x3266, 'M', 'ᄉ'), + (0x3267, 'M', 'ᄋ'), + (0x3268, 'M', 'ᄌ'), + (0x3269, 'M', 'ᄎ'), + (0x326A, 'M', 'ᄏ'), + (0x326B, 'M', 'ᄐ'), + (0x326C, 'M', 'ᄑ'), + (0x326D, 'M', 'ᄒ'), + (0x326E, 'M', '가'), + (0x326F, 'M', '나'), + (0x3270, 'M', '다'), + (0x3271, 'M', '라'), + (0x3272, 'M', '마'), + (0x3273, 'M', '바'), + (0x3274, 'M', '사'), + (0x3275, 'M', '아'), + (0x3276, 'M', '자'), + (0x3277, 'M', '차'), + (0x3278, 'M', '카'), + (0x3279, 'M', '타'), + (0x327A, 'M', '파'), + (0x327B, 'M', '하'), + (0x327C, 'M', '참고'), + (0x327D, 'M', '주의'), + (0x327E, 'M', '우'), + (0x327F, 'V'), + (0x3280, 'M', '一'), + (0x3281, 'M', '二'), + (0x3282, 'M', '三'), + (0x3283, 'M', '四'), + (0x3284, 'M', '五'), + (0x3285, 'M', '六'), + (0x3286, 'M', '七'), + (0x3287, 'M', '八'), + (0x3288, 'M', '九'), + (0x3289, 'M', '十'), + (0x328A, 'M', '月'), + (0x328B, 'M', '火'), + (0x328C, 'M', '水'), + (0x328D, 'M', '木'), + (0x328E, 'M', '金'), + (0x328F, 'M', '土'), + (0x3290, 'M', '日'), + (0x3291, 'M', '株'), + (0x3292, 'M', '有'), + (0x3293, 'M', '社'), + (0x3294, 'M', '名'), + (0x3295, 'M', '特'), + (0x3296, 'M', '財'), + (0x3297, 'M', '祝'), + (0x3298, 'M', '労'), + (0x3299, 'M', '秘'), + (0x329A, 'M', '男'), + (0x329B, 'M', '女'), + (0x329C, 'M', '適'), + (0x329D, 'M', '優'), + (0x329E, 'M', '印'), + (0x329F, 'M', '注'), + (0x32A0, 'M', '項'), + (0x32A1, 'M', '休'), + (0x32A2, 'M', '写'), + (0x32A3, 'M', '正'), + (0x32A4, 'M', '上'), + (0x32A5, 'M', '中'), + (0x32A6, 'M', '下'), + (0x32A7, 'M', '左'), + (0x32A8, 'M', '右'), + (0x32A9, 'M', '医'), + (0x32AA, 'M', '宗'), + (0x32AB, 'M', '学'), + (0x32AC, 'M', '監'), + (0x32AD, 'M', '企'), + ] + +def _seg_32(): + return [ + (0x32AE, 'M', '資'), + (0x32AF, 'M', '協'), + (0x32B0, 'M', '夜'), + (0x32B1, 'M', '36'), + (0x32B2, 'M', '37'), + (0x32B3, 'M', '38'), + (0x32B4, 'M', '39'), + (0x32B5, 'M', '40'), + (0x32B6, 'M', '41'), + (0x32B7, 'M', '42'), + (0x32B8, 'M', '43'), + (0x32B9, 'M', '44'), + (0x32BA, 'M', '45'), + (0x32BB, 'M', '46'), + (0x32BC, 'M', '47'), + (0x32BD, 'M', '48'), + (0x32BE, 'M', '49'), + (0x32BF, 'M', '50'), + (0x32C0, 'M', '1月'), + (0x32C1, 'M', '2月'), + (0x32C2, 'M', '3月'), + (0x32C3, 'M', '4月'), + (0x32C4, 'M', '5月'), + (0x32C5, 'M', '6月'), + (0x32C6, 'M', '7月'), + (0x32C7, 'M', '8月'), + (0x32C8, 'M', '9月'), + (0x32C9, 'M', '10月'), + (0x32CA, 'M', '11月'), + (0x32CB, 'M', '12月'), + (0x32CC, 'M', 'hg'), + (0x32CD, 'M', 'erg'), + (0x32CE, 'M', 'ev'), + (0x32CF, 'M', 'ltd'), + (0x32D0, 'M', 'ア'), + (0x32D1, 'M', 'イ'), + (0x32D2, 'M', 'ウ'), + (0x32D3, 'M', 'エ'), + (0x32D4, 'M', 'オ'), + (0x32D5, 'M', 'カ'), + (0x32D6, 'M', 'キ'), + (0x32D7, 'M', 'ク'), + (0x32D8, 'M', 'ケ'), + (0x32D9, 'M', 'コ'), + (0x32DA, 'M', 'サ'), + (0x32DB, 'M', 'シ'), + (0x32DC, 'M', 'ス'), + (0x32DD, 'M', 'セ'), + (0x32DE, 'M', 'ソ'), + (0x32DF, 'M', 'タ'), + (0x32E0, 'M', 'チ'), + (0x32E1, 'M', 'ツ'), + (0x32E2, 'M', 'テ'), + (0x32E3, 'M', 'ト'), + (0x32E4, 'M', 'ナ'), + (0x32E5, 'M', 'ニ'), + (0x32E6, 'M', 'ヌ'), + (0x32E7, 'M', 'ネ'), + (0x32E8, 'M', 'ノ'), + (0x32E9, 'M', 'ハ'), + (0x32EA, 'M', 'ヒ'), + (0x32EB, 'M', 'フ'), + (0x32EC, 'M', 'ヘ'), + (0x32ED, 'M', 'ホ'), + (0x32EE, 'M', 'マ'), + (0x32EF, 'M', 'ミ'), + (0x32F0, 'M', 'ム'), + (0x32F1, 'M', 'メ'), + (0x32F2, 'M', 'モ'), + (0x32F3, 'M', 'ヤ'), + (0x32F4, 'M', 'ユ'), + (0x32F5, 'M', 'ヨ'), + (0x32F6, 'M', 'ラ'), + (0x32F7, 'M', 'リ'), + (0x32F8, 'M', 'ル'), + (0x32F9, 'M', 'レ'), + (0x32FA, 'M', 'ロ'), + (0x32FB, 'M', 'ワ'), + (0x32FC, 'M', 'ヰ'), + (0x32FD, 'M', 'ヱ'), + (0x32FE, 'M', 'ヲ'), + (0x32FF, 'M', '令和'), + (0x3300, 'M', 'アパート'), + (0x3301, 'M', 'アルファ'), + (0x3302, 'M', 'アンペア'), + (0x3303, 'M', 'アール'), + (0x3304, 'M', 'イニング'), + (0x3305, 'M', 'インチ'), + (0x3306, 'M', 'ウォン'), + (0x3307, 'M', 'エスクード'), + (0x3308, 'M', 'エーカー'), + (0x3309, 'M', 'オンス'), + (0x330A, 'M', 'オーム'), + (0x330B, 'M', 'カイリ'), + (0x330C, 'M', 'カラット'), + (0x330D, 'M', 'カロリー'), + (0x330E, 'M', 'ガロン'), + (0x330F, 'M', 'ガンマ'), + (0x3310, 'M', 'ギガ'), + (0x3311, 'M', 'ギニー'), + ] + +def _seg_33(): + return [ + (0x3312, 'M', 'キュリー'), + (0x3313, 'M', 'ギルダー'), + (0x3314, 'M', 'キロ'), + (0x3315, 'M', 'キログラム'), + (0x3316, 'M', 'キロメートル'), + (0x3317, 'M', 'キロワット'), + (0x3318, 'M', 'グラム'), + (0x3319, 'M', 'グラムトン'), + (0x331A, 'M', 'クルゼイロ'), + (0x331B, 'M', 'クローネ'), + (0x331C, 'M', 'ケース'), + (0x331D, 'M', 'コルナ'), + (0x331E, 'M', 'コーポ'), + (0x331F, 'M', 'サイクル'), + (0x3320, 'M', 'サンチーム'), + (0x3321, 'M', 'シリング'), + (0x3322, 'M', 'センチ'), + (0x3323, 'M', 'セント'), + (0x3324, 'M', 'ダース'), + (0x3325, 'M', 'デシ'), + (0x3326, 'M', 'ドル'), + (0x3327, 'M', 'トン'), + (0x3328, 'M', 'ナノ'), + (0x3329, 'M', 'ノット'), + (0x332A, 'M', 'ハイツ'), + (0x332B, 'M', 'パーセント'), + (0x332C, 'M', 'パーツ'), + (0x332D, 'M', 'バーレル'), + (0x332E, 'M', 'ピアストル'), + (0x332F, 'M', 'ピクル'), + (0x3330, 'M', 'ピコ'), + (0x3331, 'M', 'ビル'), + (0x3332, 'M', 'ファラッド'), + (0x3333, 'M', 'フィート'), + (0x3334, 'M', 'ブッシェル'), + (0x3335, 'M', 'フラン'), + (0x3336, 'M', 'ヘクタール'), + (0x3337, 'M', 'ペソ'), + (0x3338, 'M', 'ペニヒ'), + (0x3339, 'M', 'ヘルツ'), + (0x333A, 'M', 'ペンス'), + (0x333B, 'M', 'ページ'), + (0x333C, 'M', 'ベータ'), + (0x333D, 'M', 'ポイント'), + (0x333E, 'M', 'ボルト'), + (0x333F, 'M', 'ホン'), + (0x3340, 'M', 'ポンド'), + (0x3341, 'M', 'ホール'), + (0x3342, 'M', 'ホーン'), + (0x3343, 'M', 'マイクロ'), + (0x3344, 'M', 'マイル'), + (0x3345, 'M', 'マッハ'), + (0x3346, 'M', 'マルク'), + (0x3347, 'M', 'マンション'), + (0x3348, 'M', 'ミクロン'), + (0x3349, 'M', 'ミリ'), + (0x334A, 'M', 'ミリバール'), + (0x334B, 'M', 'メガ'), + (0x334C, 'M', 'メガトン'), + (0x334D, 'M', 'メートル'), + (0x334E, 'M', 'ヤード'), + (0x334F, 'M', 'ヤール'), + (0x3350, 'M', 'ユアン'), + (0x3351, 'M', 'リットル'), + (0x3352, 'M', 'リラ'), + (0x3353, 'M', 'ルピー'), + (0x3354, 'M', 'ルーブル'), + (0x3355, 'M', 'レム'), + (0x3356, 'M', 'レントゲン'), + (0x3357, 'M', 'ワット'), + (0x3358, 'M', '0点'), + (0x3359, 'M', '1点'), + (0x335A, 'M', '2点'), + (0x335B, 'M', '3点'), + (0x335C, 'M', '4点'), + (0x335D, 'M', '5点'), + (0x335E, 'M', '6点'), + (0x335F, 'M', '7点'), + (0x3360, 'M', '8点'), + (0x3361, 'M', '9点'), + (0x3362, 'M', '10点'), + (0x3363, 'M', '11点'), + (0x3364, 'M', '12点'), + (0x3365, 'M', '13点'), + (0x3366, 'M', '14点'), + (0x3367, 'M', '15点'), + (0x3368, 'M', '16点'), + (0x3369, 'M', '17点'), + (0x336A, 'M', '18点'), + (0x336B, 'M', '19点'), + (0x336C, 'M', '20点'), + (0x336D, 'M', '21点'), + (0x336E, 'M', '22点'), + (0x336F, 'M', '23点'), + (0x3370, 'M', '24点'), + (0x3371, 'M', 'hpa'), + (0x3372, 'M', 'da'), + (0x3373, 'M', 'au'), + (0x3374, 'M', 'bar'), + (0x3375, 'M', 'ov'), + ] + +def _seg_34(): + return [ + (0x3376, 'M', 'pc'), + (0x3377, 'M', 'dm'), + (0x3378, 'M', 'dm2'), + (0x3379, 'M', 'dm3'), + (0x337A, 'M', 'iu'), + (0x337B, 'M', '平成'), + (0x337C, 'M', '昭和'), + (0x337D, 'M', '大正'), + (0x337E, 'M', '明治'), + (0x337F, 'M', '株式会社'), + (0x3380, 'M', 'pa'), + (0x3381, 'M', 'na'), + (0x3382, 'M', 'μa'), + (0x3383, 'M', 'ma'), + (0x3384, 'M', 'ka'), + (0x3385, 'M', 'kb'), + (0x3386, 'M', 'mb'), + (0x3387, 'M', 'gb'), + (0x3388, 'M', 'cal'), + (0x3389, 'M', 'kcal'), + (0x338A, 'M', 'pf'), + (0x338B, 'M', 'nf'), + (0x338C, 'M', 'μf'), + (0x338D, 'M', 'μg'), + (0x338E, 'M', 'mg'), + (0x338F, 'M', 'kg'), + (0x3390, 'M', 'hz'), + (0x3391, 'M', 'khz'), + (0x3392, 'M', 'mhz'), + (0x3393, 'M', 'ghz'), + (0x3394, 'M', 'thz'), + (0x3395, 'M', 'μl'), + (0x3396, 'M', 'ml'), + (0x3397, 'M', 'dl'), + (0x3398, 'M', 'kl'), + (0x3399, 'M', 'fm'), + (0x339A, 'M', 'nm'), + (0x339B, 'M', 'μm'), + (0x339C, 'M', 'mm'), + (0x339D, 'M', 'cm'), + (0x339E, 'M', 'km'), + (0x339F, 'M', 'mm2'), + (0x33A0, 'M', 'cm2'), + (0x33A1, 'M', 'm2'), + (0x33A2, 'M', 'km2'), + (0x33A3, 'M', 'mm3'), + (0x33A4, 'M', 'cm3'), + (0x33A5, 'M', 'm3'), + (0x33A6, 'M', 'km3'), + (0x33A7, 'M', 'm∕s'), + (0x33A8, 'M', 'm∕s2'), + (0x33A9, 'M', 'pa'), + (0x33AA, 'M', 'kpa'), + (0x33AB, 'M', 'mpa'), + (0x33AC, 'M', 'gpa'), + (0x33AD, 'M', 'rad'), + (0x33AE, 'M', 'rad∕s'), + (0x33AF, 'M', 'rad∕s2'), + (0x33B0, 'M', 'ps'), + (0x33B1, 'M', 'ns'), + (0x33B2, 'M', 'μs'), + (0x33B3, 'M', 'ms'), + (0x33B4, 'M', 'pv'), + (0x33B5, 'M', 'nv'), + (0x33B6, 'M', 'μv'), + (0x33B7, 'M', 'mv'), + (0x33B8, 'M', 'kv'), + (0x33B9, 'M', 'mv'), + (0x33BA, 'M', 'pw'), + (0x33BB, 'M', 'nw'), + (0x33BC, 'M', 'μw'), + (0x33BD, 'M', 'mw'), + (0x33BE, 'M', 'kw'), + (0x33BF, 'M', 'mw'), + (0x33C0, 'M', 'kω'), + (0x33C1, 'M', 'mω'), + (0x33C2, 'X'), + (0x33C3, 'M', 'bq'), + (0x33C4, 'M', 'cc'), + (0x33C5, 'M', 'cd'), + (0x33C6, 'M', 'c∕kg'), + (0x33C7, 'X'), + (0x33C8, 'M', 'db'), + (0x33C9, 'M', 'gy'), + (0x33CA, 'M', 'ha'), + (0x33CB, 'M', 'hp'), + (0x33CC, 'M', 'in'), + (0x33CD, 'M', 'kk'), + (0x33CE, 'M', 'km'), + (0x33CF, 'M', 'kt'), + (0x33D0, 'M', 'lm'), + (0x33D1, 'M', 'ln'), + (0x33D2, 'M', 'log'), + (0x33D3, 'M', 'lx'), + (0x33D4, 'M', 'mb'), + (0x33D5, 'M', 'mil'), + (0x33D6, 'M', 'mol'), + (0x33D7, 'M', 'ph'), + (0x33D8, 'X'), + (0x33D9, 'M', 'ppm'), + ] + +def _seg_35(): + return [ + (0x33DA, 'M', 'pr'), + (0x33DB, 'M', 'sr'), + (0x33DC, 'M', 'sv'), + (0x33DD, 'M', 'wb'), + (0x33DE, 'M', 'v∕m'), + (0x33DF, 'M', 'a∕m'), + (0x33E0, 'M', '1日'), + (0x33E1, 'M', '2日'), + (0x33E2, 'M', '3日'), + (0x33E3, 'M', '4日'), + (0x33E4, 'M', '5日'), + (0x33E5, 'M', '6日'), + (0x33E6, 'M', '7日'), + (0x33E7, 'M', '8日'), + (0x33E8, 'M', '9日'), + (0x33E9, 'M', '10日'), + (0x33EA, 'M', '11日'), + (0x33EB, 'M', '12日'), + (0x33EC, 'M', '13日'), + (0x33ED, 'M', '14日'), + (0x33EE, 'M', '15日'), + (0x33EF, 'M', '16日'), + (0x33F0, 'M', '17日'), + (0x33F1, 'M', '18日'), + (0x33F2, 'M', '19日'), + (0x33F3, 'M', '20日'), + (0x33F4, 'M', '21日'), + (0x33F5, 'M', '22日'), + (0x33F6, 'M', '23日'), + (0x33F7, 'M', '24日'), + (0x33F8, 'M', '25日'), + (0x33F9, 'M', '26日'), + (0x33FA, 'M', '27日'), + (0x33FB, 'M', '28日'), + (0x33FC, 'M', '29日'), + (0x33FD, 'M', '30日'), + (0x33FE, 'M', '31日'), + (0x33FF, 'M', 'gal'), + (0x3400, 'V'), + (0x9FFD, 'X'), + (0xA000, 'V'), + (0xA48D, 'X'), + (0xA490, 'V'), + (0xA4C7, 'X'), + (0xA4D0, 'V'), + (0xA62C, 'X'), + (0xA640, 'M', 'ꙁ'), + (0xA641, 'V'), + (0xA642, 'M', 'ꙃ'), + (0xA643, 'V'), + (0xA644, 'M', 'ꙅ'), + (0xA645, 'V'), + (0xA646, 'M', 'ꙇ'), + (0xA647, 'V'), + (0xA648, 'M', 'ꙉ'), + (0xA649, 'V'), + (0xA64A, 'M', 'ꙋ'), + (0xA64B, 'V'), + (0xA64C, 'M', 'ꙍ'), + (0xA64D, 'V'), + (0xA64E, 'M', 'ꙏ'), + (0xA64F, 'V'), + (0xA650, 'M', 'ꙑ'), + (0xA651, 'V'), + (0xA652, 'M', 'ꙓ'), + (0xA653, 'V'), + (0xA654, 'M', 'ꙕ'), + (0xA655, 'V'), + (0xA656, 'M', 'ꙗ'), + (0xA657, 'V'), + (0xA658, 'M', 'ꙙ'), + (0xA659, 'V'), + (0xA65A, 'M', 'ꙛ'), + (0xA65B, 'V'), + (0xA65C, 'M', 'ꙝ'), + (0xA65D, 'V'), + (0xA65E, 'M', 'ꙟ'), + (0xA65F, 'V'), + (0xA660, 'M', 'ꙡ'), + (0xA661, 'V'), + (0xA662, 'M', 'ꙣ'), + (0xA663, 'V'), + (0xA664, 'M', 'ꙥ'), + (0xA665, 'V'), + (0xA666, 'M', 'ꙧ'), + (0xA667, 'V'), + (0xA668, 'M', 'ꙩ'), + (0xA669, 'V'), + (0xA66A, 'M', 'ꙫ'), + (0xA66B, 'V'), + (0xA66C, 'M', 'ꙭ'), + (0xA66D, 'V'), + (0xA680, 'M', 'ꚁ'), + (0xA681, 'V'), + (0xA682, 'M', 'ꚃ'), + (0xA683, 'V'), + (0xA684, 'M', 'ꚅ'), + (0xA685, 'V'), + (0xA686, 'M', 'ꚇ'), + (0xA687, 'V'), + ] + +def _seg_36(): + return [ + (0xA688, 'M', 'ꚉ'), + (0xA689, 'V'), + (0xA68A, 'M', 'ꚋ'), + (0xA68B, 'V'), + (0xA68C, 'M', 'ꚍ'), + (0xA68D, 'V'), + (0xA68E, 'M', 'ꚏ'), + (0xA68F, 'V'), + (0xA690, 'M', 'ꚑ'), + (0xA691, 'V'), + (0xA692, 'M', 'ꚓ'), + (0xA693, 'V'), + (0xA694, 'M', 'ꚕ'), + (0xA695, 'V'), + (0xA696, 'M', 'ꚗ'), + (0xA697, 'V'), + (0xA698, 'M', 'ꚙ'), + (0xA699, 'V'), + (0xA69A, 'M', 'ꚛ'), + (0xA69B, 'V'), + (0xA69C, 'M', 'ъ'), + (0xA69D, 'M', 'ь'), + (0xA69E, 'V'), + (0xA6F8, 'X'), + (0xA700, 'V'), + (0xA722, 'M', 'ꜣ'), + (0xA723, 'V'), + (0xA724, 'M', 'ꜥ'), + (0xA725, 'V'), + (0xA726, 'M', 'ꜧ'), + (0xA727, 'V'), + (0xA728, 'M', 'ꜩ'), + (0xA729, 'V'), + (0xA72A, 'M', 'ꜫ'), + (0xA72B, 'V'), + (0xA72C, 'M', 'ꜭ'), + (0xA72D, 'V'), + (0xA72E, 'M', 'ꜯ'), + (0xA72F, 'V'), + (0xA732, 'M', 'ꜳ'), + (0xA733, 'V'), + (0xA734, 'M', 'ꜵ'), + (0xA735, 'V'), + (0xA736, 'M', 'ꜷ'), + (0xA737, 'V'), + (0xA738, 'M', 'ꜹ'), + (0xA739, 'V'), + (0xA73A, 'M', 'ꜻ'), + (0xA73B, 'V'), + (0xA73C, 'M', 'ꜽ'), + (0xA73D, 'V'), + (0xA73E, 'M', 'ꜿ'), + (0xA73F, 'V'), + (0xA740, 'M', 'ꝁ'), + (0xA741, 'V'), + (0xA742, 'M', 'ꝃ'), + (0xA743, 'V'), + (0xA744, 'M', 'ꝅ'), + (0xA745, 'V'), + (0xA746, 'M', 'ꝇ'), + (0xA747, 'V'), + (0xA748, 'M', 'ꝉ'), + (0xA749, 'V'), + (0xA74A, 'M', 'ꝋ'), + (0xA74B, 'V'), + (0xA74C, 'M', 'ꝍ'), + (0xA74D, 'V'), + (0xA74E, 'M', 'ꝏ'), + (0xA74F, 'V'), + (0xA750, 'M', 'ꝑ'), + (0xA751, 'V'), + (0xA752, 'M', 'ꝓ'), + (0xA753, 'V'), + (0xA754, 'M', 'ꝕ'), + (0xA755, 'V'), + (0xA756, 'M', 'ꝗ'), + (0xA757, 'V'), + (0xA758, 'M', 'ꝙ'), + (0xA759, 'V'), + (0xA75A, 'M', 'ꝛ'), + (0xA75B, 'V'), + (0xA75C, 'M', 'ꝝ'), + (0xA75D, 'V'), + (0xA75E, 'M', 'ꝟ'), + (0xA75F, 'V'), + (0xA760, 'M', 'ꝡ'), + (0xA761, 'V'), + (0xA762, 'M', 'ꝣ'), + (0xA763, 'V'), + (0xA764, 'M', 'ꝥ'), + (0xA765, 'V'), + (0xA766, 'M', 'ꝧ'), + (0xA767, 'V'), + (0xA768, 'M', 'ꝩ'), + (0xA769, 'V'), + (0xA76A, 'M', 'ꝫ'), + (0xA76B, 'V'), + (0xA76C, 'M', 'ꝭ'), + (0xA76D, 'V'), + (0xA76E, 'M', 'ꝯ'), + ] + +def _seg_37(): + return [ + (0xA76F, 'V'), + (0xA770, 'M', 'ꝯ'), + (0xA771, 'V'), + (0xA779, 'M', 'ꝺ'), + (0xA77A, 'V'), + (0xA77B, 'M', 'ꝼ'), + (0xA77C, 'V'), + (0xA77D, 'M', 'ᵹ'), + (0xA77E, 'M', 'ꝿ'), + (0xA77F, 'V'), + (0xA780, 'M', 'ꞁ'), + (0xA781, 'V'), + (0xA782, 'M', 'ꞃ'), + (0xA783, 'V'), + (0xA784, 'M', 'ꞅ'), + (0xA785, 'V'), + (0xA786, 'M', 'ꞇ'), + (0xA787, 'V'), + (0xA78B, 'M', 'ꞌ'), + (0xA78C, 'V'), + (0xA78D, 'M', 'ɥ'), + (0xA78E, 'V'), + (0xA790, 'M', 'ꞑ'), + (0xA791, 'V'), + (0xA792, 'M', 'ꞓ'), + (0xA793, 'V'), + (0xA796, 'M', 'ꞗ'), + (0xA797, 'V'), + (0xA798, 'M', 'ꞙ'), + (0xA799, 'V'), + (0xA79A, 'M', 'ꞛ'), + (0xA79B, 'V'), + (0xA79C, 'M', 'ꞝ'), + (0xA79D, 'V'), + (0xA79E, 'M', 'ꞟ'), + (0xA79F, 'V'), + (0xA7A0, 'M', 'ꞡ'), + (0xA7A1, 'V'), + (0xA7A2, 'M', 'ꞣ'), + (0xA7A3, 'V'), + (0xA7A4, 'M', 'ꞥ'), + (0xA7A5, 'V'), + (0xA7A6, 'M', 'ꞧ'), + (0xA7A7, 'V'), + (0xA7A8, 'M', 'ꞩ'), + (0xA7A9, 'V'), + (0xA7AA, 'M', 'ɦ'), + (0xA7AB, 'M', 'ɜ'), + (0xA7AC, 'M', 'ɡ'), + (0xA7AD, 'M', 'ɬ'), + (0xA7AE, 'M', 'ɪ'), + (0xA7AF, 'V'), + (0xA7B0, 'M', 'ʞ'), + (0xA7B1, 'M', 'ʇ'), + (0xA7B2, 'M', 'ʝ'), + (0xA7B3, 'M', 'ꭓ'), + (0xA7B4, 'M', 'ꞵ'), + (0xA7B5, 'V'), + (0xA7B6, 'M', 'ꞷ'), + (0xA7B7, 'V'), + (0xA7B8, 'M', 'ꞹ'), + (0xA7B9, 'V'), + (0xA7BA, 'M', 'ꞻ'), + (0xA7BB, 'V'), + (0xA7BC, 'M', 'ꞽ'), + (0xA7BD, 'V'), + (0xA7BE, 'M', 'ꞿ'), + (0xA7BF, 'V'), + (0xA7C0, 'X'), + (0xA7C2, 'M', 'ꟃ'), + (0xA7C3, 'V'), + (0xA7C4, 'M', 'ꞔ'), + (0xA7C5, 'M', 'ʂ'), + (0xA7C6, 'M', 'ᶎ'), + (0xA7C7, 'M', 'ꟈ'), + (0xA7C8, 'V'), + (0xA7C9, 'M', 'ꟊ'), + (0xA7CA, 'V'), + (0xA7CB, 'X'), + (0xA7F5, 'M', 'ꟶ'), + (0xA7F6, 'V'), + (0xA7F8, 'M', 'ħ'), + (0xA7F9, 'M', 'œ'), + (0xA7FA, 'V'), + (0xA82D, 'X'), + (0xA830, 'V'), + (0xA83A, 'X'), + (0xA840, 'V'), + (0xA878, 'X'), + (0xA880, 'V'), + (0xA8C6, 'X'), + (0xA8CE, 'V'), + (0xA8DA, 'X'), + (0xA8E0, 'V'), + (0xA954, 'X'), + (0xA95F, 'V'), + (0xA97D, 'X'), + (0xA980, 'V'), + (0xA9CE, 'X'), + (0xA9CF, 'V'), + ] + +def _seg_38(): + return [ + (0xA9DA, 'X'), + (0xA9DE, 'V'), + (0xA9FF, 'X'), + (0xAA00, 'V'), + (0xAA37, 'X'), + (0xAA40, 'V'), + (0xAA4E, 'X'), + (0xAA50, 'V'), + (0xAA5A, 'X'), + (0xAA5C, 'V'), + (0xAAC3, 'X'), + (0xAADB, 'V'), + (0xAAF7, 'X'), + (0xAB01, 'V'), + (0xAB07, 'X'), + (0xAB09, 'V'), + (0xAB0F, 'X'), + (0xAB11, 'V'), + (0xAB17, 'X'), + (0xAB20, 'V'), + (0xAB27, 'X'), + (0xAB28, 'V'), + (0xAB2F, 'X'), + (0xAB30, 'V'), + (0xAB5C, 'M', 'ꜧ'), + (0xAB5D, 'M', 'ꬷ'), + (0xAB5E, 'M', 'ɫ'), + (0xAB5F, 'M', 'ꭒ'), + (0xAB60, 'V'), + (0xAB69, 'M', 'ʍ'), + (0xAB6A, 'V'), + (0xAB6C, 'X'), + (0xAB70, 'M', 'Ꭰ'), + (0xAB71, 'M', 'Ꭱ'), + (0xAB72, 'M', 'Ꭲ'), + (0xAB73, 'M', 'Ꭳ'), + (0xAB74, 'M', 'Ꭴ'), + (0xAB75, 'M', 'Ꭵ'), + (0xAB76, 'M', 'Ꭶ'), + (0xAB77, 'M', 'Ꭷ'), + (0xAB78, 'M', 'Ꭸ'), + (0xAB79, 'M', 'Ꭹ'), + (0xAB7A, 'M', 'Ꭺ'), + (0xAB7B, 'M', 'Ꭻ'), + (0xAB7C, 'M', 'Ꭼ'), + (0xAB7D, 'M', 'Ꭽ'), + (0xAB7E, 'M', 'Ꭾ'), + (0xAB7F, 'M', 'Ꭿ'), + (0xAB80, 'M', 'Ꮀ'), + (0xAB81, 'M', 'Ꮁ'), + (0xAB82, 'M', 'Ꮂ'), + (0xAB83, 'M', 'Ꮃ'), + (0xAB84, 'M', 'Ꮄ'), + (0xAB85, 'M', 'Ꮅ'), + (0xAB86, 'M', 'Ꮆ'), + (0xAB87, 'M', 'Ꮇ'), + (0xAB88, 'M', 'Ꮈ'), + (0xAB89, 'M', 'Ꮉ'), + (0xAB8A, 'M', 'Ꮊ'), + (0xAB8B, 'M', 'Ꮋ'), + (0xAB8C, 'M', 'Ꮌ'), + (0xAB8D, 'M', 'Ꮍ'), + (0xAB8E, 'M', 'Ꮎ'), + (0xAB8F, 'M', 'Ꮏ'), + (0xAB90, 'M', 'Ꮐ'), + (0xAB91, 'M', 'Ꮑ'), + (0xAB92, 'M', 'Ꮒ'), + (0xAB93, 'M', 'Ꮓ'), + (0xAB94, 'M', 'Ꮔ'), + (0xAB95, 'M', 'Ꮕ'), + (0xAB96, 'M', 'Ꮖ'), + (0xAB97, 'M', 'Ꮗ'), + (0xAB98, 'M', 'Ꮘ'), + (0xAB99, 'M', 'Ꮙ'), + (0xAB9A, 'M', 'Ꮚ'), + (0xAB9B, 'M', 'Ꮛ'), + (0xAB9C, 'M', 'Ꮜ'), + (0xAB9D, 'M', 'Ꮝ'), + (0xAB9E, 'M', 'Ꮞ'), + (0xAB9F, 'M', 'Ꮟ'), + (0xABA0, 'M', 'Ꮠ'), + (0xABA1, 'M', 'Ꮡ'), + (0xABA2, 'M', 'Ꮢ'), + (0xABA3, 'M', 'Ꮣ'), + (0xABA4, 'M', 'Ꮤ'), + (0xABA5, 'M', 'Ꮥ'), + (0xABA6, 'M', 'Ꮦ'), + (0xABA7, 'M', 'Ꮧ'), + (0xABA8, 'M', 'Ꮨ'), + (0xABA9, 'M', 'Ꮩ'), + (0xABAA, 'M', 'Ꮪ'), + (0xABAB, 'M', 'Ꮫ'), + (0xABAC, 'M', 'Ꮬ'), + (0xABAD, 'M', 'Ꮭ'), + (0xABAE, 'M', 'Ꮮ'), + (0xABAF, 'M', 'Ꮯ'), + (0xABB0, 'M', 'Ꮰ'), + (0xABB1, 'M', 'Ꮱ'), + (0xABB2, 'M', 'Ꮲ'), + (0xABB3, 'M', 'Ꮳ'), + ] + +def _seg_39(): + return [ + (0xABB4, 'M', 'Ꮴ'), + (0xABB5, 'M', 'Ꮵ'), + (0xABB6, 'M', 'Ꮶ'), + (0xABB7, 'M', 'Ꮷ'), + (0xABB8, 'M', 'Ꮸ'), + (0xABB9, 'M', 'Ꮹ'), + (0xABBA, 'M', 'Ꮺ'), + (0xABBB, 'M', 'Ꮻ'), + (0xABBC, 'M', 'Ꮼ'), + (0xABBD, 'M', 'Ꮽ'), + (0xABBE, 'M', 'Ꮾ'), + (0xABBF, 'M', 'Ꮿ'), + (0xABC0, 'V'), + (0xABEE, 'X'), + (0xABF0, 'V'), + (0xABFA, 'X'), + (0xAC00, 'V'), + (0xD7A4, 'X'), + (0xD7B0, 'V'), + (0xD7C7, 'X'), + (0xD7CB, 'V'), + (0xD7FC, 'X'), + (0xF900, 'M', '豈'), + (0xF901, 'M', '更'), + (0xF902, 'M', '車'), + (0xF903, 'M', '賈'), + (0xF904, 'M', '滑'), + (0xF905, 'M', '串'), + (0xF906, 'M', '句'), + (0xF907, 'M', '龜'), + (0xF909, 'M', '契'), + (0xF90A, 'M', '金'), + (0xF90B, 'M', '喇'), + (0xF90C, 'M', '奈'), + (0xF90D, 'M', '懶'), + (0xF90E, 'M', '癩'), + (0xF90F, 'M', '羅'), + (0xF910, 'M', '蘿'), + (0xF911, 'M', '螺'), + (0xF912, 'M', '裸'), + (0xF913, 'M', '邏'), + (0xF914, 'M', '樂'), + (0xF915, 'M', '洛'), + (0xF916, 'M', '烙'), + (0xF917, 'M', '珞'), + (0xF918, 'M', '落'), + (0xF919, 'M', '酪'), + (0xF91A, 'M', '駱'), + (0xF91B, 'M', '亂'), + (0xF91C, 'M', '卵'), + (0xF91D, 'M', '欄'), + (0xF91E, 'M', '爛'), + (0xF91F, 'M', '蘭'), + (0xF920, 'M', '鸞'), + (0xF921, 'M', '嵐'), + (0xF922, 'M', '濫'), + (0xF923, 'M', '藍'), + (0xF924, 'M', '襤'), + (0xF925, 'M', '拉'), + (0xF926, 'M', '臘'), + (0xF927, 'M', '蠟'), + (0xF928, 'M', '廊'), + (0xF929, 'M', '朗'), + (0xF92A, 'M', '浪'), + (0xF92B, 'M', '狼'), + (0xF92C, 'M', '郎'), + (0xF92D, 'M', '來'), + (0xF92E, 'M', '冷'), + (0xF92F, 'M', '勞'), + (0xF930, 'M', '擄'), + (0xF931, 'M', '櫓'), + (0xF932, 'M', '爐'), + (0xF933, 'M', '盧'), + (0xF934, 'M', '老'), + (0xF935, 'M', '蘆'), + (0xF936, 'M', '虜'), + (0xF937, 'M', '路'), + (0xF938, 'M', '露'), + (0xF939, 'M', '魯'), + (0xF93A, 'M', '鷺'), + (0xF93B, 'M', '碌'), + (0xF93C, 'M', '祿'), + (0xF93D, 'M', '綠'), + (0xF93E, 'M', '菉'), + (0xF93F, 'M', '錄'), + (0xF940, 'M', '鹿'), + (0xF941, 'M', '論'), + (0xF942, 'M', '壟'), + (0xF943, 'M', '弄'), + (0xF944, 'M', '籠'), + (0xF945, 'M', '聾'), + (0xF946, 'M', '牢'), + (0xF947, 'M', '磊'), + (0xF948, 'M', '賂'), + (0xF949, 'M', '雷'), + (0xF94A, 'M', '壘'), + (0xF94B, 'M', '屢'), + (0xF94C, 'M', '樓'), + (0xF94D, 'M', '淚'), + (0xF94E, 'M', '漏'), + ] + +def _seg_40(): + return [ + (0xF94F, 'M', '累'), + (0xF950, 'M', '縷'), + (0xF951, 'M', '陋'), + (0xF952, 'M', '勒'), + (0xF953, 'M', '肋'), + (0xF954, 'M', '凜'), + (0xF955, 'M', '凌'), + (0xF956, 'M', '稜'), + (0xF957, 'M', '綾'), + (0xF958, 'M', '菱'), + (0xF959, 'M', '陵'), + (0xF95A, 'M', '讀'), + (0xF95B, 'M', '拏'), + (0xF95C, 'M', '樂'), + (0xF95D, 'M', '諾'), + (0xF95E, 'M', '丹'), + (0xF95F, 'M', '寧'), + (0xF960, 'M', '怒'), + (0xF961, 'M', '率'), + (0xF962, 'M', '異'), + (0xF963, 'M', '北'), + (0xF964, 'M', '磻'), + (0xF965, 'M', '便'), + (0xF966, 'M', '復'), + (0xF967, 'M', '不'), + (0xF968, 'M', '泌'), + (0xF969, 'M', '數'), + (0xF96A, 'M', '索'), + (0xF96B, 'M', '參'), + (0xF96C, 'M', '塞'), + (0xF96D, 'M', '省'), + (0xF96E, 'M', '葉'), + (0xF96F, 'M', '說'), + (0xF970, 'M', '殺'), + (0xF971, 'M', '辰'), + (0xF972, 'M', '沈'), + (0xF973, 'M', '拾'), + (0xF974, 'M', '若'), + (0xF975, 'M', '掠'), + (0xF976, 'M', '略'), + (0xF977, 'M', '亮'), + (0xF978, 'M', '兩'), + (0xF979, 'M', '凉'), + (0xF97A, 'M', '梁'), + (0xF97B, 'M', '糧'), + (0xF97C, 'M', '良'), + (0xF97D, 'M', '諒'), + (0xF97E, 'M', '量'), + (0xF97F, 'M', '勵'), + (0xF980, 'M', '呂'), + (0xF981, 'M', '女'), + (0xF982, 'M', '廬'), + (0xF983, 'M', '旅'), + (0xF984, 'M', '濾'), + (0xF985, 'M', '礪'), + (0xF986, 'M', '閭'), + (0xF987, 'M', '驪'), + (0xF988, 'M', '麗'), + (0xF989, 'M', '黎'), + (0xF98A, 'M', '力'), + (0xF98B, 'M', '曆'), + (0xF98C, 'M', '歷'), + (0xF98D, 'M', '轢'), + (0xF98E, 'M', '年'), + (0xF98F, 'M', '憐'), + (0xF990, 'M', '戀'), + (0xF991, 'M', '撚'), + (0xF992, 'M', '漣'), + (0xF993, 'M', '煉'), + (0xF994, 'M', '璉'), + (0xF995, 'M', '秊'), + (0xF996, 'M', '練'), + (0xF997, 'M', '聯'), + (0xF998, 'M', '輦'), + (0xF999, 'M', '蓮'), + (0xF99A, 'M', '連'), + (0xF99B, 'M', '鍊'), + (0xF99C, 'M', '列'), + (0xF99D, 'M', '劣'), + (0xF99E, 'M', '咽'), + (0xF99F, 'M', '烈'), + (0xF9A0, 'M', '裂'), + (0xF9A1, 'M', '說'), + (0xF9A2, 'M', '廉'), + (0xF9A3, 'M', '念'), + (0xF9A4, 'M', '捻'), + (0xF9A5, 'M', '殮'), + (0xF9A6, 'M', '簾'), + (0xF9A7, 'M', '獵'), + (0xF9A8, 'M', '令'), + (0xF9A9, 'M', '囹'), + (0xF9AA, 'M', '寧'), + (0xF9AB, 'M', '嶺'), + (0xF9AC, 'M', '怜'), + (0xF9AD, 'M', '玲'), + (0xF9AE, 'M', '瑩'), + (0xF9AF, 'M', '羚'), + (0xF9B0, 'M', '聆'), + (0xF9B1, 'M', '鈴'), + (0xF9B2, 'M', '零'), + ] + +def _seg_41(): + return [ + (0xF9B3, 'M', '靈'), + (0xF9B4, 'M', '領'), + (0xF9B5, 'M', '例'), + (0xF9B6, 'M', '禮'), + (0xF9B7, 'M', '醴'), + (0xF9B8, 'M', '隸'), + (0xF9B9, 'M', '惡'), + (0xF9BA, 'M', '了'), + (0xF9BB, 'M', '僚'), + (0xF9BC, 'M', '寮'), + (0xF9BD, 'M', '尿'), + (0xF9BE, 'M', '料'), + (0xF9BF, 'M', '樂'), + (0xF9C0, 'M', '燎'), + (0xF9C1, 'M', '療'), + (0xF9C2, 'M', '蓼'), + (0xF9C3, 'M', '遼'), + (0xF9C4, 'M', '龍'), + (0xF9C5, 'M', '暈'), + (0xF9C6, 'M', '阮'), + (0xF9C7, 'M', '劉'), + (0xF9C8, 'M', '杻'), + (0xF9C9, 'M', '柳'), + (0xF9CA, 'M', '流'), + (0xF9CB, 'M', '溜'), + (0xF9CC, 'M', '琉'), + (0xF9CD, 'M', '留'), + (0xF9CE, 'M', '硫'), + (0xF9CF, 'M', '紐'), + (0xF9D0, 'M', '類'), + (0xF9D1, 'M', '六'), + (0xF9D2, 'M', '戮'), + (0xF9D3, 'M', '陸'), + (0xF9D4, 'M', '倫'), + (0xF9D5, 'M', '崙'), + (0xF9D6, 'M', '淪'), + (0xF9D7, 'M', '輪'), + (0xF9D8, 'M', '律'), + (0xF9D9, 'M', '慄'), + (0xF9DA, 'M', '栗'), + (0xF9DB, 'M', '率'), + (0xF9DC, 'M', '隆'), + (0xF9DD, 'M', '利'), + (0xF9DE, 'M', '吏'), + (0xF9DF, 'M', '履'), + (0xF9E0, 'M', '易'), + (0xF9E1, 'M', '李'), + (0xF9E2, 'M', '梨'), + (0xF9E3, 'M', '泥'), + (0xF9E4, 'M', '理'), + (0xF9E5, 'M', '痢'), + (0xF9E6, 'M', '罹'), + (0xF9E7, 'M', '裏'), + (0xF9E8, 'M', '裡'), + (0xF9E9, 'M', '里'), + (0xF9EA, 'M', '離'), + (0xF9EB, 'M', '匿'), + (0xF9EC, 'M', '溺'), + (0xF9ED, 'M', '吝'), + (0xF9EE, 'M', '燐'), + (0xF9EF, 'M', '璘'), + (0xF9F0, 'M', '藺'), + (0xF9F1, 'M', '隣'), + (0xF9F2, 'M', '鱗'), + (0xF9F3, 'M', '麟'), + (0xF9F4, 'M', '林'), + (0xF9F5, 'M', '淋'), + (0xF9F6, 'M', '臨'), + (0xF9F7, 'M', '立'), + (0xF9F8, 'M', '笠'), + (0xF9F9, 'M', '粒'), + (0xF9FA, 'M', '狀'), + (0xF9FB, 'M', '炙'), + (0xF9FC, 'M', '識'), + (0xF9FD, 'M', '什'), + (0xF9FE, 'M', '茶'), + (0xF9FF, 'M', '刺'), + (0xFA00, 'M', '切'), + (0xFA01, 'M', '度'), + (0xFA02, 'M', '拓'), + (0xFA03, 'M', '糖'), + (0xFA04, 'M', '宅'), + (0xFA05, 'M', '洞'), + (0xFA06, 'M', '暴'), + (0xFA07, 'M', '輻'), + (0xFA08, 'M', '行'), + (0xFA09, 'M', '降'), + (0xFA0A, 'M', '見'), + (0xFA0B, 'M', '廓'), + (0xFA0C, 'M', '兀'), + (0xFA0D, 'M', '嗀'), + (0xFA0E, 'V'), + (0xFA10, 'M', '塚'), + (0xFA11, 'V'), + (0xFA12, 'M', '晴'), + (0xFA13, 'V'), + (0xFA15, 'M', '凞'), + (0xFA16, 'M', '猪'), + (0xFA17, 'M', '益'), + (0xFA18, 'M', '礼'), + ] + +def _seg_42(): + return [ + (0xFA19, 'M', '神'), + (0xFA1A, 'M', '祥'), + (0xFA1B, 'M', '福'), + (0xFA1C, 'M', '靖'), + (0xFA1D, 'M', '精'), + (0xFA1E, 'M', '羽'), + (0xFA1F, 'V'), + (0xFA20, 'M', '蘒'), + (0xFA21, 'V'), + (0xFA22, 'M', '諸'), + (0xFA23, 'V'), + (0xFA25, 'M', '逸'), + (0xFA26, 'M', '都'), + (0xFA27, 'V'), + (0xFA2A, 'M', '飯'), + (0xFA2B, 'M', '飼'), + (0xFA2C, 'M', '館'), + (0xFA2D, 'M', '鶴'), + (0xFA2E, 'M', '郞'), + (0xFA2F, 'M', '隷'), + (0xFA30, 'M', '侮'), + (0xFA31, 'M', '僧'), + (0xFA32, 'M', '免'), + (0xFA33, 'M', '勉'), + (0xFA34, 'M', '勤'), + (0xFA35, 'M', '卑'), + (0xFA36, 'M', '喝'), + (0xFA37, 'M', '嘆'), + (0xFA38, 'M', '器'), + (0xFA39, 'M', '塀'), + (0xFA3A, 'M', '墨'), + (0xFA3B, 'M', '層'), + (0xFA3C, 'M', '屮'), + (0xFA3D, 'M', '悔'), + (0xFA3E, 'M', '慨'), + (0xFA3F, 'M', '憎'), + (0xFA40, 'M', '懲'), + (0xFA41, 'M', '敏'), + (0xFA42, 'M', '既'), + (0xFA43, 'M', '暑'), + (0xFA44, 'M', '梅'), + (0xFA45, 'M', '海'), + (0xFA46, 'M', '渚'), + (0xFA47, 'M', '漢'), + (0xFA48, 'M', '煮'), + (0xFA49, 'M', '爫'), + (0xFA4A, 'M', '琢'), + (0xFA4B, 'M', '碑'), + (0xFA4C, 'M', '社'), + (0xFA4D, 'M', '祉'), + (0xFA4E, 'M', '祈'), + (0xFA4F, 'M', '祐'), + (0xFA50, 'M', '祖'), + (0xFA51, 'M', '祝'), + (0xFA52, 'M', '禍'), + (0xFA53, 'M', '禎'), + (0xFA54, 'M', '穀'), + (0xFA55, 'M', '突'), + (0xFA56, 'M', '節'), + (0xFA57, 'M', '練'), + (0xFA58, 'M', '縉'), + (0xFA59, 'M', '繁'), + (0xFA5A, 'M', '署'), + (0xFA5B, 'M', '者'), + (0xFA5C, 'M', '臭'), + (0xFA5D, 'M', '艹'), + (0xFA5F, 'M', '著'), + (0xFA60, 'M', '褐'), + (0xFA61, 'M', '視'), + (0xFA62, 'M', '謁'), + (0xFA63, 'M', '謹'), + (0xFA64, 'M', '賓'), + (0xFA65, 'M', '贈'), + (0xFA66, 'M', '辶'), + (0xFA67, 'M', '逸'), + (0xFA68, 'M', '難'), + (0xFA69, 'M', '響'), + (0xFA6A, 'M', '頻'), + (0xFA6B, 'M', '恵'), + (0xFA6C, 'M', '𤋮'), + (0xFA6D, 'M', '舘'), + (0xFA6E, 'X'), + (0xFA70, 'M', '並'), + (0xFA71, 'M', '况'), + (0xFA72, 'M', '全'), + (0xFA73, 'M', '侀'), + (0xFA74, 'M', '充'), + (0xFA75, 'M', '冀'), + (0xFA76, 'M', '勇'), + (0xFA77, 'M', '勺'), + (0xFA78, 'M', '喝'), + (0xFA79, 'M', '啕'), + (0xFA7A, 'M', '喙'), + (0xFA7B, 'M', '嗢'), + (0xFA7C, 'M', '塚'), + (0xFA7D, 'M', '墳'), + (0xFA7E, 'M', '奄'), + (0xFA7F, 'M', '奔'), + (0xFA80, 'M', '婢'), + (0xFA81, 'M', '嬨'), + ] + +def _seg_43(): + return [ + (0xFA82, 'M', '廒'), + (0xFA83, 'M', '廙'), + (0xFA84, 'M', '彩'), + (0xFA85, 'M', '徭'), + (0xFA86, 'M', '惘'), + (0xFA87, 'M', '慎'), + (0xFA88, 'M', '愈'), + (0xFA89, 'M', '憎'), + (0xFA8A, 'M', '慠'), + (0xFA8B, 'M', '懲'), + (0xFA8C, 'M', '戴'), + (0xFA8D, 'M', '揄'), + (0xFA8E, 'M', '搜'), + (0xFA8F, 'M', '摒'), + (0xFA90, 'M', '敖'), + (0xFA91, 'M', '晴'), + (0xFA92, 'M', '朗'), + (0xFA93, 'M', '望'), + (0xFA94, 'M', '杖'), + (0xFA95, 'M', '歹'), + (0xFA96, 'M', '殺'), + (0xFA97, 'M', '流'), + (0xFA98, 'M', '滛'), + (0xFA99, 'M', '滋'), + (0xFA9A, 'M', '漢'), + (0xFA9B, 'M', '瀞'), + (0xFA9C, 'M', '煮'), + (0xFA9D, 'M', '瞧'), + (0xFA9E, 'M', '爵'), + (0xFA9F, 'M', '犯'), + (0xFAA0, 'M', '猪'), + (0xFAA1, 'M', '瑱'), + (0xFAA2, 'M', '甆'), + (0xFAA3, 'M', '画'), + (0xFAA4, 'M', '瘝'), + (0xFAA5, 'M', '瘟'), + (0xFAA6, 'M', '益'), + (0xFAA7, 'M', '盛'), + (0xFAA8, 'M', '直'), + (0xFAA9, 'M', '睊'), + (0xFAAA, 'M', '着'), + (0xFAAB, 'M', '磌'), + (0xFAAC, 'M', '窱'), + (0xFAAD, 'M', '節'), + (0xFAAE, 'M', '类'), + (0xFAAF, 'M', '絛'), + (0xFAB0, 'M', '練'), + (0xFAB1, 'M', '缾'), + (0xFAB2, 'M', '者'), + (0xFAB3, 'M', '荒'), + (0xFAB4, 'M', '華'), + (0xFAB5, 'M', '蝹'), + (0xFAB6, 'M', '襁'), + (0xFAB7, 'M', '覆'), + (0xFAB8, 'M', '視'), + (0xFAB9, 'M', '調'), + (0xFABA, 'M', '諸'), + (0xFABB, 'M', '請'), + (0xFABC, 'M', '謁'), + (0xFABD, 'M', '諾'), + (0xFABE, 'M', '諭'), + (0xFABF, 'M', '謹'), + (0xFAC0, 'M', '變'), + (0xFAC1, 'M', '贈'), + (0xFAC2, 'M', '輸'), + (0xFAC3, 'M', '遲'), + (0xFAC4, 'M', '醙'), + (0xFAC5, 'M', '鉶'), + (0xFAC6, 'M', '陼'), + (0xFAC7, 'M', '難'), + (0xFAC8, 'M', '靖'), + (0xFAC9, 'M', '韛'), + (0xFACA, 'M', '響'), + (0xFACB, 'M', '頋'), + (0xFACC, 'M', '頻'), + (0xFACD, 'M', '鬒'), + (0xFACE, 'M', '龜'), + (0xFACF, 'M', '𢡊'), + (0xFAD0, 'M', '𢡄'), + (0xFAD1, 'M', '𣏕'), + (0xFAD2, 'M', '㮝'), + (0xFAD3, 'M', '䀘'), + (0xFAD4, 'M', '䀹'), + (0xFAD5, 'M', '𥉉'), + (0xFAD6, 'M', '𥳐'), + (0xFAD7, 'M', '𧻓'), + (0xFAD8, 'M', '齃'), + (0xFAD9, 'M', '龎'), + (0xFADA, 'X'), + (0xFB00, 'M', 'ff'), + (0xFB01, 'M', 'fi'), + (0xFB02, 'M', 'fl'), + (0xFB03, 'M', 'ffi'), + (0xFB04, 'M', 'ffl'), + (0xFB05, 'M', 'st'), + (0xFB07, 'X'), + (0xFB13, 'M', 'մն'), + (0xFB14, 'M', 'մե'), + (0xFB15, 'M', 'մի'), + (0xFB16, 'M', 'վն'), + ] + +def _seg_44(): + return [ + (0xFB17, 'M', 'մխ'), + (0xFB18, 'X'), + (0xFB1D, 'M', 'יִ'), + (0xFB1E, 'V'), + (0xFB1F, 'M', 'ײַ'), + (0xFB20, 'M', 'ע'), + (0xFB21, 'M', 'א'), + (0xFB22, 'M', 'ד'), + (0xFB23, 'M', 'ה'), + (0xFB24, 'M', 'כ'), + (0xFB25, 'M', 'ל'), + (0xFB26, 'M', 'ם'), + (0xFB27, 'M', 'ר'), + (0xFB28, 'M', 'ת'), + (0xFB29, '3', '+'), + (0xFB2A, 'M', 'שׁ'), + (0xFB2B, 'M', 'שׂ'), + (0xFB2C, 'M', 'שּׁ'), + (0xFB2D, 'M', 'שּׂ'), + (0xFB2E, 'M', 'אַ'), + (0xFB2F, 'M', 'אָ'), + (0xFB30, 'M', 'אּ'), + (0xFB31, 'M', 'בּ'), + (0xFB32, 'M', 'גּ'), + (0xFB33, 'M', 'דּ'), + (0xFB34, 'M', 'הּ'), + (0xFB35, 'M', 'וּ'), + (0xFB36, 'M', 'זּ'), + (0xFB37, 'X'), + (0xFB38, 'M', 'טּ'), + (0xFB39, 'M', 'יּ'), + (0xFB3A, 'M', 'ךּ'), + (0xFB3B, 'M', 'כּ'), + (0xFB3C, 'M', 'לּ'), + (0xFB3D, 'X'), + (0xFB3E, 'M', 'מּ'), + (0xFB3F, 'X'), + (0xFB40, 'M', 'נּ'), + (0xFB41, 'M', 'סּ'), + (0xFB42, 'X'), + (0xFB43, 'M', 'ףּ'), + (0xFB44, 'M', 'פּ'), + (0xFB45, 'X'), + (0xFB46, 'M', 'צּ'), + (0xFB47, 'M', 'קּ'), + (0xFB48, 'M', 'רּ'), + (0xFB49, 'M', 'שּ'), + (0xFB4A, 'M', 'תּ'), + (0xFB4B, 'M', 'וֹ'), + (0xFB4C, 'M', 'בֿ'), + (0xFB4D, 'M', 'כֿ'), + (0xFB4E, 'M', 'פֿ'), + (0xFB4F, 'M', 'אל'), + (0xFB50, 'M', 'ٱ'), + (0xFB52, 'M', 'ٻ'), + (0xFB56, 'M', 'پ'), + (0xFB5A, 'M', 'ڀ'), + (0xFB5E, 'M', 'ٺ'), + (0xFB62, 'M', 'ٿ'), + (0xFB66, 'M', 'ٹ'), + (0xFB6A, 'M', 'ڤ'), + (0xFB6E, 'M', 'ڦ'), + (0xFB72, 'M', 'ڄ'), + (0xFB76, 'M', 'ڃ'), + (0xFB7A, 'M', 'چ'), + (0xFB7E, 'M', 'ڇ'), + (0xFB82, 'M', 'ڍ'), + (0xFB84, 'M', 'ڌ'), + (0xFB86, 'M', 'ڎ'), + (0xFB88, 'M', 'ڈ'), + (0xFB8A, 'M', 'ژ'), + (0xFB8C, 'M', 'ڑ'), + (0xFB8E, 'M', 'ک'), + (0xFB92, 'M', 'گ'), + (0xFB96, 'M', 'ڳ'), + (0xFB9A, 'M', 'ڱ'), + (0xFB9E, 'M', 'ں'), + (0xFBA0, 'M', 'ڻ'), + (0xFBA4, 'M', 'ۀ'), + (0xFBA6, 'M', 'ہ'), + (0xFBAA, 'M', 'ھ'), + (0xFBAE, 'M', 'ے'), + (0xFBB0, 'M', 'ۓ'), + (0xFBB2, 'V'), + (0xFBC2, 'X'), + (0xFBD3, 'M', 'ڭ'), + (0xFBD7, 'M', 'ۇ'), + (0xFBD9, 'M', 'ۆ'), + (0xFBDB, 'M', 'ۈ'), + (0xFBDD, 'M', 'ۇٴ'), + (0xFBDE, 'M', 'ۋ'), + (0xFBE0, 'M', 'ۅ'), + (0xFBE2, 'M', 'ۉ'), + (0xFBE4, 'M', 'ې'), + (0xFBE8, 'M', 'ى'), + (0xFBEA, 'M', 'ئا'), + (0xFBEC, 'M', 'ئە'), + (0xFBEE, 'M', 'ئو'), + (0xFBF0, 'M', 'ئۇ'), + (0xFBF2, 'M', 'ئۆ'), + ] + +def _seg_45(): + return [ + (0xFBF4, 'M', 'ئۈ'), + (0xFBF6, 'M', 'ئې'), + (0xFBF9, 'M', 'ئى'), + (0xFBFC, 'M', 'ی'), + (0xFC00, 'M', 'ئج'), + (0xFC01, 'M', 'ئح'), + (0xFC02, 'M', 'ئم'), + (0xFC03, 'M', 'ئى'), + (0xFC04, 'M', 'ئي'), + (0xFC05, 'M', 'بج'), + (0xFC06, 'M', 'بح'), + (0xFC07, 'M', 'بخ'), + (0xFC08, 'M', 'بم'), + (0xFC09, 'M', 'بى'), + (0xFC0A, 'M', 'بي'), + (0xFC0B, 'M', 'تج'), + (0xFC0C, 'M', 'تح'), + (0xFC0D, 'M', 'تخ'), + (0xFC0E, 'M', 'تم'), + (0xFC0F, 'M', 'تى'), + (0xFC10, 'M', 'تي'), + (0xFC11, 'M', 'ثج'), + (0xFC12, 'M', 'ثم'), + (0xFC13, 'M', 'ثى'), + (0xFC14, 'M', 'ثي'), + (0xFC15, 'M', 'جح'), + (0xFC16, 'M', 'جم'), + (0xFC17, 'M', 'حج'), + (0xFC18, 'M', 'حم'), + (0xFC19, 'M', 'خج'), + (0xFC1A, 'M', 'خح'), + (0xFC1B, 'M', 'خم'), + (0xFC1C, 'M', 'سج'), + (0xFC1D, 'M', 'سح'), + (0xFC1E, 'M', 'سخ'), + (0xFC1F, 'M', 'سم'), + (0xFC20, 'M', 'صح'), + (0xFC21, 'M', 'صم'), + (0xFC22, 'M', 'ضج'), + (0xFC23, 'M', 'ضح'), + (0xFC24, 'M', 'ضخ'), + (0xFC25, 'M', 'ضم'), + (0xFC26, 'M', 'طح'), + (0xFC27, 'M', 'طم'), + (0xFC28, 'M', 'ظم'), + (0xFC29, 'M', 'عج'), + (0xFC2A, 'M', 'عم'), + (0xFC2B, 'M', 'غج'), + (0xFC2C, 'M', 'غم'), + (0xFC2D, 'M', 'فج'), + (0xFC2E, 'M', 'فح'), + (0xFC2F, 'M', 'فخ'), + (0xFC30, 'M', 'فم'), + (0xFC31, 'M', 'فى'), + (0xFC32, 'M', 'في'), + (0xFC33, 'M', 'قح'), + (0xFC34, 'M', 'قم'), + (0xFC35, 'M', 'قى'), + (0xFC36, 'M', 'قي'), + (0xFC37, 'M', 'كا'), + (0xFC38, 'M', 'كج'), + (0xFC39, 'M', 'كح'), + (0xFC3A, 'M', 'كخ'), + (0xFC3B, 'M', 'كل'), + (0xFC3C, 'M', 'كم'), + (0xFC3D, 'M', 'كى'), + (0xFC3E, 'M', 'كي'), + (0xFC3F, 'M', 'لج'), + (0xFC40, 'M', 'لح'), + (0xFC41, 'M', 'لخ'), + (0xFC42, 'M', 'لم'), + (0xFC43, 'M', 'لى'), + (0xFC44, 'M', 'لي'), + (0xFC45, 'M', 'مج'), + (0xFC46, 'M', 'مح'), + (0xFC47, 'M', 'مخ'), + (0xFC48, 'M', 'مم'), + (0xFC49, 'M', 'مى'), + (0xFC4A, 'M', 'مي'), + (0xFC4B, 'M', 'نج'), + (0xFC4C, 'M', 'نح'), + (0xFC4D, 'M', 'نخ'), + (0xFC4E, 'M', 'نم'), + (0xFC4F, 'M', 'نى'), + (0xFC50, 'M', 'ني'), + (0xFC51, 'M', 'هج'), + (0xFC52, 'M', 'هم'), + (0xFC53, 'M', 'هى'), + (0xFC54, 'M', 'هي'), + (0xFC55, 'M', 'يج'), + (0xFC56, 'M', 'يح'), + (0xFC57, 'M', 'يخ'), + (0xFC58, 'M', 'يم'), + (0xFC59, 'M', 'يى'), + (0xFC5A, 'M', 'يي'), + (0xFC5B, 'M', 'ذٰ'), + (0xFC5C, 'M', 'رٰ'), + (0xFC5D, 'M', 'ىٰ'), + (0xFC5E, '3', ' ٌّ'), + (0xFC5F, '3', ' ٍّ'), + ] + +def _seg_46(): + return [ + (0xFC60, '3', ' َّ'), + (0xFC61, '3', ' ُّ'), + (0xFC62, '3', ' ِّ'), + (0xFC63, '3', ' ّٰ'), + (0xFC64, 'M', 'ئر'), + (0xFC65, 'M', 'ئز'), + (0xFC66, 'M', 'ئم'), + (0xFC67, 'M', 'ئن'), + (0xFC68, 'M', 'ئى'), + (0xFC69, 'M', 'ئي'), + (0xFC6A, 'M', 'بر'), + (0xFC6B, 'M', 'بز'), + (0xFC6C, 'M', 'بم'), + (0xFC6D, 'M', 'بن'), + (0xFC6E, 'M', 'بى'), + (0xFC6F, 'M', 'بي'), + (0xFC70, 'M', 'تر'), + (0xFC71, 'M', 'تز'), + (0xFC72, 'M', 'تم'), + (0xFC73, 'M', 'تن'), + (0xFC74, 'M', 'تى'), + (0xFC75, 'M', 'تي'), + (0xFC76, 'M', 'ثر'), + (0xFC77, 'M', 'ثز'), + (0xFC78, 'M', 'ثم'), + (0xFC79, 'M', 'ثن'), + (0xFC7A, 'M', 'ثى'), + (0xFC7B, 'M', 'ثي'), + (0xFC7C, 'M', 'فى'), + (0xFC7D, 'M', 'في'), + (0xFC7E, 'M', 'قى'), + (0xFC7F, 'M', 'قي'), + (0xFC80, 'M', 'كا'), + (0xFC81, 'M', 'كل'), + (0xFC82, 'M', 'كم'), + (0xFC83, 'M', 'كى'), + (0xFC84, 'M', 'كي'), + (0xFC85, 'M', 'لم'), + (0xFC86, 'M', 'لى'), + (0xFC87, 'M', 'لي'), + (0xFC88, 'M', 'ما'), + (0xFC89, 'M', 'مم'), + (0xFC8A, 'M', 'نر'), + (0xFC8B, 'M', 'نز'), + (0xFC8C, 'M', 'نم'), + (0xFC8D, 'M', 'نن'), + (0xFC8E, 'M', 'نى'), + (0xFC8F, 'M', 'ني'), + (0xFC90, 'M', 'ىٰ'), + (0xFC91, 'M', 'ير'), + (0xFC92, 'M', 'يز'), + (0xFC93, 'M', 'يم'), + (0xFC94, 'M', 'ين'), + (0xFC95, 'M', 'يى'), + (0xFC96, 'M', 'يي'), + (0xFC97, 'M', 'ئج'), + (0xFC98, 'M', 'ئح'), + (0xFC99, 'M', 'ئخ'), + (0xFC9A, 'M', 'ئم'), + (0xFC9B, 'M', 'ئه'), + (0xFC9C, 'M', 'بج'), + (0xFC9D, 'M', 'بح'), + (0xFC9E, 'M', 'بخ'), + (0xFC9F, 'M', 'بم'), + (0xFCA0, 'M', 'به'), + (0xFCA1, 'M', 'تج'), + (0xFCA2, 'M', 'تح'), + (0xFCA3, 'M', 'تخ'), + (0xFCA4, 'M', 'تم'), + (0xFCA5, 'M', 'ته'), + (0xFCA6, 'M', 'ثم'), + (0xFCA7, 'M', 'جح'), + (0xFCA8, 'M', 'جم'), + (0xFCA9, 'M', 'حج'), + (0xFCAA, 'M', 'حم'), + (0xFCAB, 'M', 'خج'), + (0xFCAC, 'M', 'خم'), + (0xFCAD, 'M', 'سج'), + (0xFCAE, 'M', 'سح'), + (0xFCAF, 'M', 'سخ'), + (0xFCB0, 'M', 'سم'), + (0xFCB1, 'M', 'صح'), + (0xFCB2, 'M', 'صخ'), + (0xFCB3, 'M', 'صم'), + (0xFCB4, 'M', 'ضج'), + (0xFCB5, 'M', 'ضح'), + (0xFCB6, 'M', 'ضخ'), + (0xFCB7, 'M', 'ضم'), + (0xFCB8, 'M', 'طح'), + (0xFCB9, 'M', 'ظم'), + (0xFCBA, 'M', 'عج'), + (0xFCBB, 'M', 'عم'), + (0xFCBC, 'M', 'غج'), + (0xFCBD, 'M', 'غم'), + (0xFCBE, 'M', 'فج'), + (0xFCBF, 'M', 'فح'), + (0xFCC0, 'M', 'فخ'), + (0xFCC1, 'M', 'فم'), + (0xFCC2, 'M', 'قح'), + (0xFCC3, 'M', 'قم'), + ] + +def _seg_47(): + return [ + (0xFCC4, 'M', 'كج'), + (0xFCC5, 'M', 'كح'), + (0xFCC6, 'M', 'كخ'), + (0xFCC7, 'M', 'كل'), + (0xFCC8, 'M', 'كم'), + (0xFCC9, 'M', 'لج'), + (0xFCCA, 'M', 'لح'), + (0xFCCB, 'M', 'لخ'), + (0xFCCC, 'M', 'لم'), + (0xFCCD, 'M', 'له'), + (0xFCCE, 'M', 'مج'), + (0xFCCF, 'M', 'مح'), + (0xFCD0, 'M', 'مخ'), + (0xFCD1, 'M', 'مم'), + (0xFCD2, 'M', 'نج'), + (0xFCD3, 'M', 'نح'), + (0xFCD4, 'M', 'نخ'), + (0xFCD5, 'M', 'نم'), + (0xFCD6, 'M', 'نه'), + (0xFCD7, 'M', 'هج'), + (0xFCD8, 'M', 'هم'), + (0xFCD9, 'M', 'هٰ'), + (0xFCDA, 'M', 'يج'), + (0xFCDB, 'M', 'يح'), + (0xFCDC, 'M', 'يخ'), + (0xFCDD, 'M', 'يم'), + (0xFCDE, 'M', 'يه'), + (0xFCDF, 'M', 'ئم'), + (0xFCE0, 'M', 'ئه'), + (0xFCE1, 'M', 'بم'), + (0xFCE2, 'M', 'به'), + (0xFCE3, 'M', 'تم'), + (0xFCE4, 'M', 'ته'), + (0xFCE5, 'M', 'ثم'), + (0xFCE6, 'M', 'ثه'), + (0xFCE7, 'M', 'سم'), + (0xFCE8, 'M', 'سه'), + (0xFCE9, 'M', 'شم'), + (0xFCEA, 'M', 'شه'), + (0xFCEB, 'M', 'كل'), + (0xFCEC, 'M', 'كم'), + (0xFCED, 'M', 'لم'), + (0xFCEE, 'M', 'نم'), + (0xFCEF, 'M', 'نه'), + (0xFCF0, 'M', 'يم'), + (0xFCF1, 'M', 'يه'), + (0xFCF2, 'M', 'ـَّ'), + (0xFCF3, 'M', 'ـُّ'), + (0xFCF4, 'M', 'ـِّ'), + (0xFCF5, 'M', 'طى'), + (0xFCF6, 'M', 'طي'), + (0xFCF7, 'M', 'عى'), + (0xFCF8, 'M', 'عي'), + (0xFCF9, 'M', 'غى'), + (0xFCFA, 'M', 'غي'), + (0xFCFB, 'M', 'سى'), + (0xFCFC, 'M', 'سي'), + (0xFCFD, 'M', 'شى'), + (0xFCFE, 'M', 'شي'), + (0xFCFF, 'M', 'حى'), + (0xFD00, 'M', 'حي'), + (0xFD01, 'M', 'جى'), + (0xFD02, 'M', 'جي'), + (0xFD03, 'M', 'خى'), + (0xFD04, 'M', 'خي'), + (0xFD05, 'M', 'صى'), + (0xFD06, 'M', 'صي'), + (0xFD07, 'M', 'ضى'), + (0xFD08, 'M', 'ضي'), + (0xFD09, 'M', 'شج'), + (0xFD0A, 'M', 'شح'), + (0xFD0B, 'M', 'شخ'), + (0xFD0C, 'M', 'شم'), + (0xFD0D, 'M', 'شر'), + (0xFD0E, 'M', 'سر'), + (0xFD0F, 'M', 'صر'), + (0xFD10, 'M', 'ضر'), + (0xFD11, 'M', 'طى'), + (0xFD12, 'M', 'طي'), + (0xFD13, 'M', 'عى'), + (0xFD14, 'M', 'عي'), + (0xFD15, 'M', 'غى'), + (0xFD16, 'M', 'غي'), + (0xFD17, 'M', 'سى'), + (0xFD18, 'M', 'سي'), + (0xFD19, 'M', 'شى'), + (0xFD1A, 'M', 'شي'), + (0xFD1B, 'M', 'حى'), + (0xFD1C, 'M', 'حي'), + (0xFD1D, 'M', 'جى'), + (0xFD1E, 'M', 'جي'), + (0xFD1F, 'M', 'خى'), + (0xFD20, 'M', 'خي'), + (0xFD21, 'M', 'صى'), + (0xFD22, 'M', 'صي'), + (0xFD23, 'M', 'ضى'), + (0xFD24, 'M', 'ضي'), + (0xFD25, 'M', 'شج'), + (0xFD26, 'M', 'شح'), + (0xFD27, 'M', 'شخ'), + ] + +def _seg_48(): + return [ + (0xFD28, 'M', 'شم'), + (0xFD29, 'M', 'شر'), + (0xFD2A, 'M', 'سر'), + (0xFD2B, 'M', 'صر'), + (0xFD2C, 'M', 'ضر'), + (0xFD2D, 'M', 'شج'), + (0xFD2E, 'M', 'شح'), + (0xFD2F, 'M', 'شخ'), + (0xFD30, 'M', 'شم'), + (0xFD31, 'M', 'سه'), + (0xFD32, 'M', 'شه'), + (0xFD33, 'M', 'طم'), + (0xFD34, 'M', 'سج'), + (0xFD35, 'M', 'سح'), + (0xFD36, 'M', 'سخ'), + (0xFD37, 'M', 'شج'), + (0xFD38, 'M', 'شح'), + (0xFD39, 'M', 'شخ'), + (0xFD3A, 'M', 'طم'), + (0xFD3B, 'M', 'ظم'), + (0xFD3C, 'M', 'اً'), + (0xFD3E, 'V'), + (0xFD40, 'X'), + (0xFD50, 'M', 'تجم'), + (0xFD51, 'M', 'تحج'), + (0xFD53, 'M', 'تحم'), + (0xFD54, 'M', 'تخم'), + (0xFD55, 'M', 'تمج'), + (0xFD56, 'M', 'تمح'), + (0xFD57, 'M', 'تمخ'), + (0xFD58, 'M', 'جمح'), + (0xFD5A, 'M', 'حمي'), + (0xFD5B, 'M', 'حمى'), + (0xFD5C, 'M', 'سحج'), + (0xFD5D, 'M', 'سجح'), + (0xFD5E, 'M', 'سجى'), + (0xFD5F, 'M', 'سمح'), + (0xFD61, 'M', 'سمج'), + (0xFD62, 'M', 'سمم'), + (0xFD64, 'M', 'صحح'), + (0xFD66, 'M', 'صمم'), + (0xFD67, 'M', 'شحم'), + (0xFD69, 'M', 'شجي'), + (0xFD6A, 'M', 'شمخ'), + (0xFD6C, 'M', 'شمم'), + (0xFD6E, 'M', 'ضحى'), + (0xFD6F, 'M', 'ضخم'), + (0xFD71, 'M', 'طمح'), + (0xFD73, 'M', 'طمم'), + (0xFD74, 'M', 'طمي'), + (0xFD75, 'M', 'عجم'), + (0xFD76, 'M', 'عمم'), + (0xFD78, 'M', 'عمى'), + (0xFD79, 'M', 'غمم'), + (0xFD7A, 'M', 'غمي'), + (0xFD7B, 'M', 'غمى'), + (0xFD7C, 'M', 'فخم'), + (0xFD7E, 'M', 'قمح'), + (0xFD7F, 'M', 'قمم'), + (0xFD80, 'M', 'لحم'), + (0xFD81, 'M', 'لحي'), + (0xFD82, 'M', 'لحى'), + (0xFD83, 'M', 'لجج'), + (0xFD85, 'M', 'لخم'), + (0xFD87, 'M', 'لمح'), + (0xFD89, 'M', 'محج'), + (0xFD8A, 'M', 'محم'), + (0xFD8B, 'M', 'محي'), + (0xFD8C, 'M', 'مجح'), + (0xFD8D, 'M', 'مجم'), + (0xFD8E, 'M', 'مخج'), + (0xFD8F, 'M', 'مخم'), + (0xFD90, 'X'), + (0xFD92, 'M', 'مجخ'), + (0xFD93, 'M', 'همج'), + (0xFD94, 'M', 'همم'), + (0xFD95, 'M', 'نحم'), + (0xFD96, 'M', 'نحى'), + (0xFD97, 'M', 'نجم'), + (0xFD99, 'M', 'نجى'), + (0xFD9A, 'M', 'نمي'), + (0xFD9B, 'M', 'نمى'), + (0xFD9C, 'M', 'يمم'), + (0xFD9E, 'M', 'بخي'), + (0xFD9F, 'M', 'تجي'), + (0xFDA0, 'M', 'تجى'), + (0xFDA1, 'M', 'تخي'), + (0xFDA2, 'M', 'تخى'), + (0xFDA3, 'M', 'تمي'), + (0xFDA4, 'M', 'تمى'), + (0xFDA5, 'M', 'جمي'), + (0xFDA6, 'M', 'جحى'), + (0xFDA7, 'M', 'جمى'), + (0xFDA8, 'M', 'سخى'), + (0xFDA9, 'M', 'صحي'), + (0xFDAA, 'M', 'شحي'), + (0xFDAB, 'M', 'ضحي'), + (0xFDAC, 'M', 'لجي'), + (0xFDAD, 'M', 'لمي'), + (0xFDAE, 'M', 'يحي'), + ] + +def _seg_49(): + return [ + (0xFDAF, 'M', 'يجي'), + (0xFDB0, 'M', 'يمي'), + (0xFDB1, 'M', 'ممي'), + (0xFDB2, 'M', 'قمي'), + (0xFDB3, 'M', 'نحي'), + (0xFDB4, 'M', 'قمح'), + (0xFDB5, 'M', 'لحم'), + (0xFDB6, 'M', 'عمي'), + (0xFDB7, 'M', 'كمي'), + (0xFDB8, 'M', 'نجح'), + (0xFDB9, 'M', 'مخي'), + (0xFDBA, 'M', 'لجم'), + (0xFDBB, 'M', 'كمم'), + (0xFDBC, 'M', 'لجم'), + (0xFDBD, 'M', 'نجح'), + (0xFDBE, 'M', 'جحي'), + (0xFDBF, 'M', 'حجي'), + (0xFDC0, 'M', 'مجي'), + (0xFDC1, 'M', 'فمي'), + (0xFDC2, 'M', 'بحي'), + (0xFDC3, 'M', 'كمم'), + (0xFDC4, 'M', 'عجم'), + (0xFDC5, 'M', 'صمم'), + (0xFDC6, 'M', 'سخي'), + (0xFDC7, 'M', 'نجي'), + (0xFDC8, 'X'), + (0xFDF0, 'M', 'صلے'), + (0xFDF1, 'M', 'قلے'), + (0xFDF2, 'M', 'الله'), + (0xFDF3, 'M', 'اكبر'), + (0xFDF4, 'M', 'محمد'), + (0xFDF5, 'M', 'صلعم'), + (0xFDF6, 'M', 'رسول'), + (0xFDF7, 'M', 'عليه'), + (0xFDF8, 'M', 'وسلم'), + (0xFDF9, 'M', 'صلى'), + (0xFDFA, '3', 'صلى الله عليه وسلم'), + (0xFDFB, '3', 'جل جلاله'), + (0xFDFC, 'M', 'ریال'), + (0xFDFD, 'V'), + (0xFDFE, 'X'), + (0xFE00, 'I'), + (0xFE10, '3', ','), + (0xFE11, 'M', '、'), + (0xFE12, 'X'), + (0xFE13, '3', ':'), + (0xFE14, '3', ';'), + (0xFE15, '3', '!'), + (0xFE16, '3', '?'), + (0xFE17, 'M', '〖'), + (0xFE18, 'M', '〗'), + (0xFE19, 'X'), + (0xFE20, 'V'), + (0xFE30, 'X'), + (0xFE31, 'M', '—'), + (0xFE32, 'M', '–'), + (0xFE33, '3', '_'), + (0xFE35, '3', '('), + (0xFE36, '3', ')'), + (0xFE37, '3', '{'), + (0xFE38, '3', '}'), + (0xFE39, 'M', '〔'), + (0xFE3A, 'M', '〕'), + (0xFE3B, 'M', '【'), + (0xFE3C, 'M', '】'), + (0xFE3D, 'M', '《'), + (0xFE3E, 'M', '》'), + (0xFE3F, 'M', '〈'), + (0xFE40, 'M', '〉'), + (0xFE41, 'M', '「'), + (0xFE42, 'M', '」'), + (0xFE43, 'M', '『'), + (0xFE44, 'M', '』'), + (0xFE45, 'V'), + (0xFE47, '3', '['), + (0xFE48, '3', ']'), + (0xFE49, '3', ' ̅'), + (0xFE4D, '3', '_'), + (0xFE50, '3', ','), + (0xFE51, 'M', '、'), + (0xFE52, 'X'), + (0xFE54, '3', ';'), + (0xFE55, '3', ':'), + (0xFE56, '3', '?'), + (0xFE57, '3', '!'), + (0xFE58, 'M', '—'), + (0xFE59, '3', '('), + (0xFE5A, '3', ')'), + (0xFE5B, '3', '{'), + (0xFE5C, '3', '}'), + (0xFE5D, 'M', '〔'), + (0xFE5E, 'M', '〕'), + (0xFE5F, '3', '#'), + (0xFE60, '3', '&'), + (0xFE61, '3', '*'), + (0xFE62, '3', '+'), + (0xFE63, 'M', '-'), + (0xFE64, '3', '<'), + (0xFE65, '3', '>'), + (0xFE66, '3', '='), + ] + +def _seg_50(): + return [ + (0xFE67, 'X'), + (0xFE68, '3', '\\'), + (0xFE69, '3', '$'), + (0xFE6A, '3', '%'), + (0xFE6B, '3', '@'), + (0xFE6C, 'X'), + (0xFE70, '3', ' ً'), + (0xFE71, 'M', 'ـً'), + (0xFE72, '3', ' ٌ'), + (0xFE73, 'V'), + (0xFE74, '3', ' ٍ'), + (0xFE75, 'X'), + (0xFE76, '3', ' َ'), + (0xFE77, 'M', 'ـَ'), + (0xFE78, '3', ' ُ'), + (0xFE79, 'M', 'ـُ'), + (0xFE7A, '3', ' ِ'), + (0xFE7B, 'M', 'ـِ'), + (0xFE7C, '3', ' ّ'), + (0xFE7D, 'M', 'ـّ'), + (0xFE7E, '3', ' ْ'), + (0xFE7F, 'M', 'ـْ'), + (0xFE80, 'M', 'ء'), + (0xFE81, 'M', 'آ'), + (0xFE83, 'M', 'أ'), + (0xFE85, 'M', 'ؤ'), + (0xFE87, 'M', 'إ'), + (0xFE89, 'M', 'ئ'), + (0xFE8D, 'M', 'ا'), + (0xFE8F, 'M', 'ب'), + (0xFE93, 'M', 'ة'), + (0xFE95, 'M', 'ت'), + (0xFE99, 'M', 'ث'), + (0xFE9D, 'M', 'ج'), + (0xFEA1, 'M', 'ح'), + (0xFEA5, 'M', 'خ'), + (0xFEA9, 'M', 'د'), + (0xFEAB, 'M', 'ذ'), + (0xFEAD, 'M', 'ر'), + (0xFEAF, 'M', 'ز'), + (0xFEB1, 'M', 'س'), + (0xFEB5, 'M', 'ش'), + (0xFEB9, 'M', 'ص'), + (0xFEBD, 'M', 'ض'), + (0xFEC1, 'M', 'ط'), + (0xFEC5, 'M', 'ظ'), + (0xFEC9, 'M', 'ع'), + (0xFECD, 'M', 'غ'), + (0xFED1, 'M', 'ف'), + (0xFED5, 'M', 'ق'), + (0xFED9, 'M', 'ك'), + (0xFEDD, 'M', 'ل'), + (0xFEE1, 'M', 'م'), + (0xFEE5, 'M', 'ن'), + (0xFEE9, 'M', 'ه'), + (0xFEED, 'M', 'و'), + (0xFEEF, 'M', 'ى'), + (0xFEF1, 'M', 'ي'), + (0xFEF5, 'M', 'لآ'), + (0xFEF7, 'M', 'لأ'), + (0xFEF9, 'M', 'لإ'), + (0xFEFB, 'M', 'لا'), + (0xFEFD, 'X'), + (0xFEFF, 'I'), + (0xFF00, 'X'), + (0xFF01, '3', '!'), + (0xFF02, '3', '"'), + (0xFF03, '3', '#'), + (0xFF04, '3', '$'), + (0xFF05, '3', '%'), + (0xFF06, '3', '&'), + (0xFF07, '3', '\''), + (0xFF08, '3', '('), + (0xFF09, '3', ')'), + (0xFF0A, '3', '*'), + (0xFF0B, '3', '+'), + (0xFF0C, '3', ','), + (0xFF0D, 'M', '-'), + (0xFF0E, 'M', '.'), + (0xFF0F, '3', '/'), + (0xFF10, 'M', '0'), + (0xFF11, 'M', '1'), + (0xFF12, 'M', '2'), + (0xFF13, 'M', '3'), + (0xFF14, 'M', '4'), + (0xFF15, 'M', '5'), + (0xFF16, 'M', '6'), + (0xFF17, 'M', '7'), + (0xFF18, 'M', '8'), + (0xFF19, 'M', '9'), + (0xFF1A, '3', ':'), + (0xFF1B, '3', ';'), + (0xFF1C, '3', '<'), + (0xFF1D, '3', '='), + (0xFF1E, '3', '>'), + (0xFF1F, '3', '?'), + (0xFF20, '3', '@'), + (0xFF21, 'M', 'a'), + (0xFF22, 'M', 'b'), + (0xFF23, 'M', 'c'), + ] + +def _seg_51(): + return [ + (0xFF24, 'M', 'd'), + (0xFF25, 'M', 'e'), + (0xFF26, 'M', 'f'), + (0xFF27, 'M', 'g'), + (0xFF28, 'M', 'h'), + (0xFF29, 'M', 'i'), + (0xFF2A, 'M', 'j'), + (0xFF2B, 'M', 'k'), + (0xFF2C, 'M', 'l'), + (0xFF2D, 'M', 'm'), + (0xFF2E, 'M', 'n'), + (0xFF2F, 'M', 'o'), + (0xFF30, 'M', 'p'), + (0xFF31, 'M', 'q'), + (0xFF32, 'M', 'r'), + (0xFF33, 'M', 's'), + (0xFF34, 'M', 't'), + (0xFF35, 'M', 'u'), + (0xFF36, 'M', 'v'), + (0xFF37, 'M', 'w'), + (0xFF38, 'M', 'x'), + (0xFF39, 'M', 'y'), + (0xFF3A, 'M', 'z'), + (0xFF3B, '3', '['), + (0xFF3C, '3', '\\'), + (0xFF3D, '3', ']'), + (0xFF3E, '3', '^'), + (0xFF3F, '3', '_'), + (0xFF40, '3', '`'), + (0xFF41, 'M', 'a'), + (0xFF42, 'M', 'b'), + (0xFF43, 'M', 'c'), + (0xFF44, 'M', 'd'), + (0xFF45, 'M', 'e'), + (0xFF46, 'M', 'f'), + (0xFF47, 'M', 'g'), + (0xFF48, 'M', 'h'), + (0xFF49, 'M', 'i'), + (0xFF4A, 'M', 'j'), + (0xFF4B, 'M', 'k'), + (0xFF4C, 'M', 'l'), + (0xFF4D, 'M', 'm'), + (0xFF4E, 'M', 'n'), + (0xFF4F, 'M', 'o'), + (0xFF50, 'M', 'p'), + (0xFF51, 'M', 'q'), + (0xFF52, 'M', 'r'), + (0xFF53, 'M', 's'), + (0xFF54, 'M', 't'), + (0xFF55, 'M', 'u'), + (0xFF56, 'M', 'v'), + (0xFF57, 'M', 'w'), + (0xFF58, 'M', 'x'), + (0xFF59, 'M', 'y'), + (0xFF5A, 'M', 'z'), + (0xFF5B, '3', '{'), + (0xFF5C, '3', '|'), + (0xFF5D, '3', '}'), + (0xFF5E, '3', '~'), + (0xFF5F, 'M', '⦅'), + (0xFF60, 'M', '⦆'), + (0xFF61, 'M', '.'), + (0xFF62, 'M', '「'), + (0xFF63, 'M', '」'), + (0xFF64, 'M', '、'), + (0xFF65, 'M', '・'), + (0xFF66, 'M', 'ヲ'), + (0xFF67, 'M', 'ァ'), + (0xFF68, 'M', 'ィ'), + (0xFF69, 'M', 'ゥ'), + (0xFF6A, 'M', 'ェ'), + (0xFF6B, 'M', 'ォ'), + (0xFF6C, 'M', 'ャ'), + (0xFF6D, 'M', 'ュ'), + (0xFF6E, 'M', 'ョ'), + (0xFF6F, 'M', 'ッ'), + (0xFF70, 'M', 'ー'), + (0xFF71, 'M', 'ア'), + (0xFF72, 'M', 'イ'), + (0xFF73, 'M', 'ウ'), + (0xFF74, 'M', 'エ'), + (0xFF75, 'M', 'オ'), + (0xFF76, 'M', 'カ'), + (0xFF77, 'M', 'キ'), + (0xFF78, 'M', 'ク'), + (0xFF79, 'M', 'ケ'), + (0xFF7A, 'M', 'コ'), + (0xFF7B, 'M', 'サ'), + (0xFF7C, 'M', 'シ'), + (0xFF7D, 'M', 'ス'), + (0xFF7E, 'M', 'セ'), + (0xFF7F, 'M', 'ソ'), + (0xFF80, 'M', 'タ'), + (0xFF81, 'M', 'チ'), + (0xFF82, 'M', 'ツ'), + (0xFF83, 'M', 'テ'), + (0xFF84, 'M', 'ト'), + (0xFF85, 'M', 'ナ'), + (0xFF86, 'M', 'ニ'), + (0xFF87, 'M', 'ヌ'), + ] + +def _seg_52(): + return [ + (0xFF88, 'M', 'ネ'), + (0xFF89, 'M', 'ノ'), + (0xFF8A, 'M', 'ハ'), + (0xFF8B, 'M', 'ヒ'), + (0xFF8C, 'M', 'フ'), + (0xFF8D, 'M', 'ヘ'), + (0xFF8E, 'M', 'ホ'), + (0xFF8F, 'M', 'マ'), + (0xFF90, 'M', 'ミ'), + (0xFF91, 'M', 'ム'), + (0xFF92, 'M', 'メ'), + (0xFF93, 'M', 'モ'), + (0xFF94, 'M', 'ヤ'), + (0xFF95, 'M', 'ユ'), + (0xFF96, 'M', 'ヨ'), + (0xFF97, 'M', 'ラ'), + (0xFF98, 'M', 'リ'), + (0xFF99, 'M', 'ル'), + (0xFF9A, 'M', 'レ'), + (0xFF9B, 'M', 'ロ'), + (0xFF9C, 'M', 'ワ'), + (0xFF9D, 'M', 'ン'), + (0xFF9E, 'M', '゙'), + (0xFF9F, 'M', '゚'), + (0xFFA0, 'X'), + (0xFFA1, 'M', 'ᄀ'), + (0xFFA2, 'M', 'ᄁ'), + (0xFFA3, 'M', 'ᆪ'), + (0xFFA4, 'M', 'ᄂ'), + (0xFFA5, 'M', 'ᆬ'), + (0xFFA6, 'M', 'ᆭ'), + (0xFFA7, 'M', 'ᄃ'), + (0xFFA8, 'M', 'ᄄ'), + (0xFFA9, 'M', 'ᄅ'), + (0xFFAA, 'M', 'ᆰ'), + (0xFFAB, 'M', 'ᆱ'), + (0xFFAC, 'M', 'ᆲ'), + (0xFFAD, 'M', 'ᆳ'), + (0xFFAE, 'M', 'ᆴ'), + (0xFFAF, 'M', 'ᆵ'), + (0xFFB0, 'M', 'ᄚ'), + (0xFFB1, 'M', 'ᄆ'), + (0xFFB2, 'M', 'ᄇ'), + (0xFFB3, 'M', 'ᄈ'), + (0xFFB4, 'M', 'ᄡ'), + (0xFFB5, 'M', 'ᄉ'), + (0xFFB6, 'M', 'ᄊ'), + (0xFFB7, 'M', 'ᄋ'), + (0xFFB8, 'M', 'ᄌ'), + (0xFFB9, 'M', 'ᄍ'), + (0xFFBA, 'M', 'ᄎ'), + (0xFFBB, 'M', 'ᄏ'), + (0xFFBC, 'M', 'ᄐ'), + (0xFFBD, 'M', 'ᄑ'), + (0xFFBE, 'M', 'ᄒ'), + (0xFFBF, 'X'), + (0xFFC2, 'M', 'ᅡ'), + (0xFFC3, 'M', 'ᅢ'), + (0xFFC4, 'M', 'ᅣ'), + (0xFFC5, 'M', 'ᅤ'), + (0xFFC6, 'M', 'ᅥ'), + (0xFFC7, 'M', 'ᅦ'), + (0xFFC8, 'X'), + (0xFFCA, 'M', 'ᅧ'), + (0xFFCB, 'M', 'ᅨ'), + (0xFFCC, 'M', 'ᅩ'), + (0xFFCD, 'M', 'ᅪ'), + (0xFFCE, 'M', 'ᅫ'), + (0xFFCF, 'M', 'ᅬ'), + (0xFFD0, 'X'), + (0xFFD2, 'M', 'ᅭ'), + (0xFFD3, 'M', 'ᅮ'), + (0xFFD4, 'M', 'ᅯ'), + (0xFFD5, 'M', 'ᅰ'), + (0xFFD6, 'M', 'ᅱ'), + (0xFFD7, 'M', 'ᅲ'), + (0xFFD8, 'X'), + (0xFFDA, 'M', 'ᅳ'), + (0xFFDB, 'M', 'ᅴ'), + (0xFFDC, 'M', 'ᅵ'), + (0xFFDD, 'X'), + (0xFFE0, 'M', '¢'), + (0xFFE1, 'M', '£'), + (0xFFE2, 'M', '¬'), + (0xFFE3, '3', ' ̄'), + (0xFFE4, 'M', '¦'), + (0xFFE5, 'M', '¥'), + (0xFFE6, 'M', '₩'), + (0xFFE7, 'X'), + (0xFFE8, 'M', '│'), + (0xFFE9, 'M', '←'), + (0xFFEA, 'M', '↑'), + (0xFFEB, 'M', '→'), + (0xFFEC, 'M', '↓'), + (0xFFED, 'M', '■'), + (0xFFEE, 'M', '○'), + (0xFFEF, 'X'), + (0x10000, 'V'), + (0x1000C, 'X'), + (0x1000D, 'V'), + ] + +def _seg_53(): + return [ + (0x10027, 'X'), + (0x10028, 'V'), + (0x1003B, 'X'), + (0x1003C, 'V'), + (0x1003E, 'X'), + (0x1003F, 'V'), + (0x1004E, 'X'), + (0x10050, 'V'), + (0x1005E, 'X'), + (0x10080, 'V'), + (0x100FB, 'X'), + (0x10100, 'V'), + (0x10103, 'X'), + (0x10107, 'V'), + (0x10134, 'X'), + (0x10137, 'V'), + (0x1018F, 'X'), + (0x10190, 'V'), + (0x1019D, 'X'), + (0x101A0, 'V'), + (0x101A1, 'X'), + (0x101D0, 'V'), + (0x101FE, 'X'), + (0x10280, 'V'), + (0x1029D, 'X'), + (0x102A0, 'V'), + (0x102D1, 'X'), + (0x102E0, 'V'), + (0x102FC, 'X'), + (0x10300, 'V'), + (0x10324, 'X'), + (0x1032D, 'V'), + (0x1034B, 'X'), + (0x10350, 'V'), + (0x1037B, 'X'), + (0x10380, 'V'), + (0x1039E, 'X'), + (0x1039F, 'V'), + (0x103C4, 'X'), + (0x103C8, 'V'), + (0x103D6, 'X'), + (0x10400, 'M', '𐐨'), + (0x10401, 'M', '𐐩'), + (0x10402, 'M', '𐐪'), + (0x10403, 'M', '𐐫'), + (0x10404, 'M', '𐐬'), + (0x10405, 'M', '𐐭'), + (0x10406, 'M', '𐐮'), + (0x10407, 'M', '𐐯'), + (0x10408, 'M', '𐐰'), + (0x10409, 'M', '𐐱'), + (0x1040A, 'M', '𐐲'), + (0x1040B, 'M', '𐐳'), + (0x1040C, 'M', '𐐴'), + (0x1040D, 'M', '𐐵'), + (0x1040E, 'M', '𐐶'), + (0x1040F, 'M', '𐐷'), + (0x10410, 'M', '𐐸'), + (0x10411, 'M', '𐐹'), + (0x10412, 'M', '𐐺'), + (0x10413, 'M', '𐐻'), + (0x10414, 'M', '𐐼'), + (0x10415, 'M', '𐐽'), + (0x10416, 'M', '𐐾'), + (0x10417, 'M', '𐐿'), + (0x10418, 'M', '𐑀'), + (0x10419, 'M', '𐑁'), + (0x1041A, 'M', '𐑂'), + (0x1041B, 'M', '𐑃'), + (0x1041C, 'M', '𐑄'), + (0x1041D, 'M', '𐑅'), + (0x1041E, 'M', '𐑆'), + (0x1041F, 'M', '𐑇'), + (0x10420, 'M', '𐑈'), + (0x10421, 'M', '𐑉'), + (0x10422, 'M', '𐑊'), + (0x10423, 'M', '𐑋'), + (0x10424, 'M', '𐑌'), + (0x10425, 'M', '𐑍'), + (0x10426, 'M', '𐑎'), + (0x10427, 'M', '𐑏'), + (0x10428, 'V'), + (0x1049E, 'X'), + (0x104A0, 'V'), + (0x104AA, 'X'), + (0x104B0, 'M', '𐓘'), + (0x104B1, 'M', '𐓙'), + (0x104B2, 'M', '𐓚'), + (0x104B3, 'M', '𐓛'), + (0x104B4, 'M', '𐓜'), + (0x104B5, 'M', '𐓝'), + (0x104B6, 'M', '𐓞'), + (0x104B7, 'M', '𐓟'), + (0x104B8, 'M', '𐓠'), + (0x104B9, 'M', '𐓡'), + (0x104BA, 'M', '𐓢'), + (0x104BB, 'M', '𐓣'), + (0x104BC, 'M', '𐓤'), + (0x104BD, 'M', '𐓥'), + (0x104BE, 'M', '𐓦'), + ] + +def _seg_54(): + return [ + (0x104BF, 'M', '𐓧'), + (0x104C0, 'M', '𐓨'), + (0x104C1, 'M', '𐓩'), + (0x104C2, 'M', '𐓪'), + (0x104C3, 'M', '𐓫'), + (0x104C4, 'M', '𐓬'), + (0x104C5, 'M', '𐓭'), + (0x104C6, 'M', '𐓮'), + (0x104C7, 'M', '𐓯'), + (0x104C8, 'M', '𐓰'), + (0x104C9, 'M', '𐓱'), + (0x104CA, 'M', '𐓲'), + (0x104CB, 'M', '𐓳'), + (0x104CC, 'M', '𐓴'), + (0x104CD, 'M', '𐓵'), + (0x104CE, 'M', '𐓶'), + (0x104CF, 'M', '𐓷'), + (0x104D0, 'M', '𐓸'), + (0x104D1, 'M', '𐓹'), + (0x104D2, 'M', '𐓺'), + (0x104D3, 'M', '𐓻'), + (0x104D4, 'X'), + (0x104D8, 'V'), + (0x104FC, 'X'), + (0x10500, 'V'), + (0x10528, 'X'), + (0x10530, 'V'), + (0x10564, 'X'), + (0x1056F, 'V'), + (0x10570, 'X'), + (0x10600, 'V'), + (0x10737, 'X'), + (0x10740, 'V'), + (0x10756, 'X'), + (0x10760, 'V'), + (0x10768, 'X'), + (0x10800, 'V'), + (0x10806, 'X'), + (0x10808, 'V'), + (0x10809, 'X'), + (0x1080A, 'V'), + (0x10836, 'X'), + (0x10837, 'V'), + (0x10839, 'X'), + (0x1083C, 'V'), + (0x1083D, 'X'), + (0x1083F, 'V'), + (0x10856, 'X'), + (0x10857, 'V'), + (0x1089F, 'X'), + (0x108A7, 'V'), + (0x108B0, 'X'), + (0x108E0, 'V'), + (0x108F3, 'X'), + (0x108F4, 'V'), + (0x108F6, 'X'), + (0x108FB, 'V'), + (0x1091C, 'X'), + (0x1091F, 'V'), + (0x1093A, 'X'), + (0x1093F, 'V'), + (0x10940, 'X'), + (0x10980, 'V'), + (0x109B8, 'X'), + (0x109BC, 'V'), + (0x109D0, 'X'), + (0x109D2, 'V'), + (0x10A04, 'X'), + (0x10A05, 'V'), + (0x10A07, 'X'), + (0x10A0C, 'V'), + (0x10A14, 'X'), + (0x10A15, 'V'), + (0x10A18, 'X'), + (0x10A19, 'V'), + (0x10A36, 'X'), + (0x10A38, 'V'), + (0x10A3B, 'X'), + (0x10A3F, 'V'), + (0x10A49, 'X'), + (0x10A50, 'V'), + (0x10A59, 'X'), + (0x10A60, 'V'), + (0x10AA0, 'X'), + (0x10AC0, 'V'), + (0x10AE7, 'X'), + (0x10AEB, 'V'), + (0x10AF7, 'X'), + (0x10B00, 'V'), + (0x10B36, 'X'), + (0x10B39, 'V'), + (0x10B56, 'X'), + (0x10B58, 'V'), + (0x10B73, 'X'), + (0x10B78, 'V'), + (0x10B92, 'X'), + (0x10B99, 'V'), + (0x10B9D, 'X'), + (0x10BA9, 'V'), + (0x10BB0, 'X'), + ] + +def _seg_55(): + return [ + (0x10C00, 'V'), + (0x10C49, 'X'), + (0x10C80, 'M', '𐳀'), + (0x10C81, 'M', '𐳁'), + (0x10C82, 'M', '𐳂'), + (0x10C83, 'M', '𐳃'), + (0x10C84, 'M', '𐳄'), + (0x10C85, 'M', '𐳅'), + (0x10C86, 'M', '𐳆'), + (0x10C87, 'M', '𐳇'), + (0x10C88, 'M', '𐳈'), + (0x10C89, 'M', '𐳉'), + (0x10C8A, 'M', '𐳊'), + (0x10C8B, 'M', '𐳋'), + (0x10C8C, 'M', '𐳌'), + (0x10C8D, 'M', '𐳍'), + (0x10C8E, 'M', '𐳎'), + (0x10C8F, 'M', '𐳏'), + (0x10C90, 'M', '𐳐'), + (0x10C91, 'M', '𐳑'), + (0x10C92, 'M', '𐳒'), + (0x10C93, 'M', '𐳓'), + (0x10C94, 'M', '𐳔'), + (0x10C95, 'M', '𐳕'), + (0x10C96, 'M', '𐳖'), + (0x10C97, 'M', '𐳗'), + (0x10C98, 'M', '𐳘'), + (0x10C99, 'M', '𐳙'), + (0x10C9A, 'M', '𐳚'), + (0x10C9B, 'M', '𐳛'), + (0x10C9C, 'M', '𐳜'), + (0x10C9D, 'M', '𐳝'), + (0x10C9E, 'M', '𐳞'), + (0x10C9F, 'M', '𐳟'), + (0x10CA0, 'M', '𐳠'), + (0x10CA1, 'M', '𐳡'), + (0x10CA2, 'M', '𐳢'), + (0x10CA3, 'M', '𐳣'), + (0x10CA4, 'M', '𐳤'), + (0x10CA5, 'M', '𐳥'), + (0x10CA6, 'M', '𐳦'), + (0x10CA7, 'M', '𐳧'), + (0x10CA8, 'M', '𐳨'), + (0x10CA9, 'M', '𐳩'), + (0x10CAA, 'M', '𐳪'), + (0x10CAB, 'M', '𐳫'), + (0x10CAC, 'M', '𐳬'), + (0x10CAD, 'M', '𐳭'), + (0x10CAE, 'M', '𐳮'), + (0x10CAF, 'M', '𐳯'), + (0x10CB0, 'M', '𐳰'), + (0x10CB1, 'M', '𐳱'), + (0x10CB2, 'M', '𐳲'), + (0x10CB3, 'X'), + (0x10CC0, 'V'), + (0x10CF3, 'X'), + (0x10CFA, 'V'), + (0x10D28, 'X'), + (0x10D30, 'V'), + (0x10D3A, 'X'), + (0x10E60, 'V'), + (0x10E7F, 'X'), + (0x10E80, 'V'), + (0x10EAA, 'X'), + (0x10EAB, 'V'), + (0x10EAE, 'X'), + (0x10EB0, 'V'), + (0x10EB2, 'X'), + (0x10F00, 'V'), + (0x10F28, 'X'), + (0x10F30, 'V'), + (0x10F5A, 'X'), + (0x10FB0, 'V'), + (0x10FCC, 'X'), + (0x10FE0, 'V'), + (0x10FF7, 'X'), + (0x11000, 'V'), + (0x1104E, 'X'), + (0x11052, 'V'), + (0x11070, 'X'), + (0x1107F, 'V'), + (0x110BD, 'X'), + (0x110BE, 'V'), + (0x110C2, 'X'), + (0x110D0, 'V'), + (0x110E9, 'X'), + (0x110F0, 'V'), + (0x110FA, 'X'), + (0x11100, 'V'), + (0x11135, 'X'), + (0x11136, 'V'), + (0x11148, 'X'), + (0x11150, 'V'), + (0x11177, 'X'), + (0x11180, 'V'), + (0x111E0, 'X'), + (0x111E1, 'V'), + (0x111F5, 'X'), + (0x11200, 'V'), + (0x11212, 'X'), + ] + +def _seg_56(): + return [ + (0x11213, 'V'), + (0x1123F, 'X'), + (0x11280, 'V'), + (0x11287, 'X'), + (0x11288, 'V'), + (0x11289, 'X'), + (0x1128A, 'V'), + (0x1128E, 'X'), + (0x1128F, 'V'), + (0x1129E, 'X'), + (0x1129F, 'V'), + (0x112AA, 'X'), + (0x112B0, 'V'), + (0x112EB, 'X'), + (0x112F0, 'V'), + (0x112FA, 'X'), + (0x11300, 'V'), + (0x11304, 'X'), + (0x11305, 'V'), + (0x1130D, 'X'), + (0x1130F, 'V'), + (0x11311, 'X'), + (0x11313, 'V'), + (0x11329, 'X'), + (0x1132A, 'V'), + (0x11331, 'X'), + (0x11332, 'V'), + (0x11334, 'X'), + (0x11335, 'V'), + (0x1133A, 'X'), + (0x1133B, 'V'), + (0x11345, 'X'), + (0x11347, 'V'), + (0x11349, 'X'), + (0x1134B, 'V'), + (0x1134E, 'X'), + (0x11350, 'V'), + (0x11351, 'X'), + (0x11357, 'V'), + (0x11358, 'X'), + (0x1135D, 'V'), + (0x11364, 'X'), + (0x11366, 'V'), + (0x1136D, 'X'), + (0x11370, 'V'), + (0x11375, 'X'), + (0x11400, 'V'), + (0x1145C, 'X'), + (0x1145D, 'V'), + (0x11462, 'X'), + (0x11480, 'V'), + (0x114C8, 'X'), + (0x114D0, 'V'), + (0x114DA, 'X'), + (0x11580, 'V'), + (0x115B6, 'X'), + (0x115B8, 'V'), + (0x115DE, 'X'), + (0x11600, 'V'), + (0x11645, 'X'), + (0x11650, 'V'), + (0x1165A, 'X'), + (0x11660, 'V'), + (0x1166D, 'X'), + (0x11680, 'V'), + (0x116B9, 'X'), + (0x116C0, 'V'), + (0x116CA, 'X'), + (0x11700, 'V'), + (0x1171B, 'X'), + (0x1171D, 'V'), + (0x1172C, 'X'), + (0x11730, 'V'), + (0x11740, 'X'), + (0x11800, 'V'), + (0x1183C, 'X'), + (0x118A0, 'M', '𑣀'), + (0x118A1, 'M', '𑣁'), + (0x118A2, 'M', '𑣂'), + (0x118A3, 'M', '𑣃'), + (0x118A4, 'M', '𑣄'), + (0x118A5, 'M', '𑣅'), + (0x118A6, 'M', '𑣆'), + (0x118A7, 'M', '𑣇'), + (0x118A8, 'M', '𑣈'), + (0x118A9, 'M', '𑣉'), + (0x118AA, 'M', '𑣊'), + (0x118AB, 'M', '𑣋'), + (0x118AC, 'M', '𑣌'), + (0x118AD, 'M', '𑣍'), + (0x118AE, 'M', '𑣎'), + (0x118AF, 'M', '𑣏'), + (0x118B0, 'M', '𑣐'), + (0x118B1, 'M', '𑣑'), + (0x118B2, 'M', '𑣒'), + (0x118B3, 'M', '𑣓'), + (0x118B4, 'M', '𑣔'), + (0x118B5, 'M', '𑣕'), + (0x118B6, 'M', '𑣖'), + (0x118B7, 'M', '𑣗'), + ] + +def _seg_57(): + return [ + (0x118B8, 'M', '𑣘'), + (0x118B9, 'M', '𑣙'), + (0x118BA, 'M', '𑣚'), + (0x118BB, 'M', '𑣛'), + (0x118BC, 'M', '𑣜'), + (0x118BD, 'M', '𑣝'), + (0x118BE, 'M', '𑣞'), + (0x118BF, 'M', '𑣟'), + (0x118C0, 'V'), + (0x118F3, 'X'), + (0x118FF, 'V'), + (0x11907, 'X'), + (0x11909, 'V'), + (0x1190A, 'X'), + (0x1190C, 'V'), + (0x11914, 'X'), + (0x11915, 'V'), + (0x11917, 'X'), + (0x11918, 'V'), + (0x11936, 'X'), + (0x11937, 'V'), + (0x11939, 'X'), + (0x1193B, 'V'), + (0x11947, 'X'), + (0x11950, 'V'), + (0x1195A, 'X'), + (0x119A0, 'V'), + (0x119A8, 'X'), + (0x119AA, 'V'), + (0x119D8, 'X'), + (0x119DA, 'V'), + (0x119E5, 'X'), + (0x11A00, 'V'), + (0x11A48, 'X'), + (0x11A50, 'V'), + (0x11AA3, 'X'), + (0x11AC0, 'V'), + (0x11AF9, 'X'), + (0x11C00, 'V'), + (0x11C09, 'X'), + (0x11C0A, 'V'), + (0x11C37, 'X'), + (0x11C38, 'V'), + (0x11C46, 'X'), + (0x11C50, 'V'), + (0x11C6D, 'X'), + (0x11C70, 'V'), + (0x11C90, 'X'), + (0x11C92, 'V'), + (0x11CA8, 'X'), + (0x11CA9, 'V'), + (0x11CB7, 'X'), + (0x11D00, 'V'), + (0x11D07, 'X'), + (0x11D08, 'V'), + (0x11D0A, 'X'), + (0x11D0B, 'V'), + (0x11D37, 'X'), + (0x11D3A, 'V'), + (0x11D3B, 'X'), + (0x11D3C, 'V'), + (0x11D3E, 'X'), + (0x11D3F, 'V'), + (0x11D48, 'X'), + (0x11D50, 'V'), + (0x11D5A, 'X'), + (0x11D60, 'V'), + (0x11D66, 'X'), + (0x11D67, 'V'), + (0x11D69, 'X'), + (0x11D6A, 'V'), + (0x11D8F, 'X'), + (0x11D90, 'V'), + (0x11D92, 'X'), + (0x11D93, 'V'), + (0x11D99, 'X'), + (0x11DA0, 'V'), + (0x11DAA, 'X'), + (0x11EE0, 'V'), + (0x11EF9, 'X'), + (0x11FB0, 'V'), + (0x11FB1, 'X'), + (0x11FC0, 'V'), + (0x11FF2, 'X'), + (0x11FFF, 'V'), + (0x1239A, 'X'), + (0x12400, 'V'), + (0x1246F, 'X'), + (0x12470, 'V'), + (0x12475, 'X'), + (0x12480, 'V'), + (0x12544, 'X'), + (0x13000, 'V'), + (0x1342F, 'X'), + (0x14400, 'V'), + (0x14647, 'X'), + (0x16800, 'V'), + (0x16A39, 'X'), + (0x16A40, 'V'), + (0x16A5F, 'X'), + ] + +def _seg_58(): + return [ + (0x16A60, 'V'), + (0x16A6A, 'X'), + (0x16A6E, 'V'), + (0x16A70, 'X'), + (0x16AD0, 'V'), + (0x16AEE, 'X'), + (0x16AF0, 'V'), + (0x16AF6, 'X'), + (0x16B00, 'V'), + (0x16B46, 'X'), + (0x16B50, 'V'), + (0x16B5A, 'X'), + (0x16B5B, 'V'), + (0x16B62, 'X'), + (0x16B63, 'V'), + (0x16B78, 'X'), + (0x16B7D, 'V'), + (0x16B90, 'X'), + (0x16E40, 'M', '𖹠'), + (0x16E41, 'M', '𖹡'), + (0x16E42, 'M', '𖹢'), + (0x16E43, 'M', '𖹣'), + (0x16E44, 'M', '𖹤'), + (0x16E45, 'M', '𖹥'), + (0x16E46, 'M', '𖹦'), + (0x16E47, 'M', '𖹧'), + (0x16E48, 'M', '𖹨'), + (0x16E49, 'M', '𖹩'), + (0x16E4A, 'M', '𖹪'), + (0x16E4B, 'M', '𖹫'), + (0x16E4C, 'M', '𖹬'), + (0x16E4D, 'M', '𖹭'), + (0x16E4E, 'M', '𖹮'), + (0x16E4F, 'M', '𖹯'), + (0x16E50, 'M', '𖹰'), + (0x16E51, 'M', '𖹱'), + (0x16E52, 'M', '𖹲'), + (0x16E53, 'M', '𖹳'), + (0x16E54, 'M', '𖹴'), + (0x16E55, 'M', '𖹵'), + (0x16E56, 'M', '𖹶'), + (0x16E57, 'M', '𖹷'), + (0x16E58, 'M', '𖹸'), + (0x16E59, 'M', '𖹹'), + (0x16E5A, 'M', '𖹺'), + (0x16E5B, 'M', '𖹻'), + (0x16E5C, 'M', '𖹼'), + (0x16E5D, 'M', '𖹽'), + (0x16E5E, 'M', '𖹾'), + (0x16E5F, 'M', '𖹿'), + (0x16E60, 'V'), + (0x16E9B, 'X'), + (0x16F00, 'V'), + (0x16F4B, 'X'), + (0x16F4F, 'V'), + (0x16F88, 'X'), + (0x16F8F, 'V'), + (0x16FA0, 'X'), + (0x16FE0, 'V'), + (0x16FE5, 'X'), + (0x16FF0, 'V'), + (0x16FF2, 'X'), + (0x17000, 'V'), + (0x187F8, 'X'), + (0x18800, 'V'), + (0x18CD6, 'X'), + (0x18D00, 'V'), + (0x18D09, 'X'), + (0x1B000, 'V'), + (0x1B11F, 'X'), + (0x1B150, 'V'), + (0x1B153, 'X'), + (0x1B164, 'V'), + (0x1B168, 'X'), + (0x1B170, 'V'), + (0x1B2FC, 'X'), + (0x1BC00, 'V'), + (0x1BC6B, 'X'), + (0x1BC70, 'V'), + (0x1BC7D, 'X'), + (0x1BC80, 'V'), + (0x1BC89, 'X'), + (0x1BC90, 'V'), + (0x1BC9A, 'X'), + (0x1BC9C, 'V'), + (0x1BCA0, 'I'), + (0x1BCA4, 'X'), + (0x1D000, 'V'), + (0x1D0F6, 'X'), + (0x1D100, 'V'), + (0x1D127, 'X'), + (0x1D129, 'V'), + (0x1D15E, 'M', '𝅗𝅥'), + (0x1D15F, 'M', '𝅘𝅥'), + (0x1D160, 'M', '𝅘𝅥𝅮'), + (0x1D161, 'M', '𝅘𝅥𝅯'), + (0x1D162, 'M', '𝅘𝅥𝅰'), + (0x1D163, 'M', '𝅘𝅥𝅱'), + (0x1D164, 'M', '𝅘𝅥𝅲'), + (0x1D165, 'V'), + ] + +def _seg_59(): + return [ + (0x1D173, 'X'), + (0x1D17B, 'V'), + (0x1D1BB, 'M', '𝆹𝅥'), + (0x1D1BC, 'M', '𝆺𝅥'), + (0x1D1BD, 'M', '𝆹𝅥𝅮'), + (0x1D1BE, 'M', '𝆺𝅥𝅮'), + (0x1D1BF, 'M', '𝆹𝅥𝅯'), + (0x1D1C0, 'M', '𝆺𝅥𝅯'), + (0x1D1C1, 'V'), + (0x1D1E9, 'X'), + (0x1D200, 'V'), + (0x1D246, 'X'), + (0x1D2E0, 'V'), + (0x1D2F4, 'X'), + (0x1D300, 'V'), + (0x1D357, 'X'), + (0x1D360, 'V'), + (0x1D379, 'X'), + (0x1D400, 'M', 'a'), + (0x1D401, 'M', 'b'), + (0x1D402, 'M', 'c'), + (0x1D403, 'M', 'd'), + (0x1D404, 'M', 'e'), + (0x1D405, 'M', 'f'), + (0x1D406, 'M', 'g'), + (0x1D407, 'M', 'h'), + (0x1D408, 'M', 'i'), + (0x1D409, 'M', 'j'), + (0x1D40A, 'M', 'k'), + (0x1D40B, 'M', 'l'), + (0x1D40C, 'M', 'm'), + (0x1D40D, 'M', 'n'), + (0x1D40E, 'M', 'o'), + (0x1D40F, 'M', 'p'), + (0x1D410, 'M', 'q'), + (0x1D411, 'M', 'r'), + (0x1D412, 'M', 's'), + (0x1D413, 'M', 't'), + (0x1D414, 'M', 'u'), + (0x1D415, 'M', 'v'), + (0x1D416, 'M', 'w'), + (0x1D417, 'M', 'x'), + (0x1D418, 'M', 'y'), + (0x1D419, 'M', 'z'), + (0x1D41A, 'M', 'a'), + (0x1D41B, 'M', 'b'), + (0x1D41C, 'M', 'c'), + (0x1D41D, 'M', 'd'), + (0x1D41E, 'M', 'e'), + (0x1D41F, 'M', 'f'), + (0x1D420, 'M', 'g'), + (0x1D421, 'M', 'h'), + (0x1D422, 'M', 'i'), + (0x1D423, 'M', 'j'), + (0x1D424, 'M', 'k'), + (0x1D425, 'M', 'l'), + (0x1D426, 'M', 'm'), + (0x1D427, 'M', 'n'), + (0x1D428, 'M', 'o'), + (0x1D429, 'M', 'p'), + (0x1D42A, 'M', 'q'), + (0x1D42B, 'M', 'r'), + (0x1D42C, 'M', 's'), + (0x1D42D, 'M', 't'), + (0x1D42E, 'M', 'u'), + (0x1D42F, 'M', 'v'), + (0x1D430, 'M', 'w'), + (0x1D431, 'M', 'x'), + (0x1D432, 'M', 'y'), + (0x1D433, 'M', 'z'), + (0x1D434, 'M', 'a'), + (0x1D435, 'M', 'b'), + (0x1D436, 'M', 'c'), + (0x1D437, 'M', 'd'), + (0x1D438, 'M', 'e'), + (0x1D439, 'M', 'f'), + (0x1D43A, 'M', 'g'), + (0x1D43B, 'M', 'h'), + (0x1D43C, 'M', 'i'), + (0x1D43D, 'M', 'j'), + (0x1D43E, 'M', 'k'), + (0x1D43F, 'M', 'l'), + (0x1D440, 'M', 'm'), + (0x1D441, 'M', 'n'), + (0x1D442, 'M', 'o'), + (0x1D443, 'M', 'p'), + (0x1D444, 'M', 'q'), + (0x1D445, 'M', 'r'), + (0x1D446, 'M', 's'), + (0x1D447, 'M', 't'), + (0x1D448, 'M', 'u'), + (0x1D449, 'M', 'v'), + (0x1D44A, 'M', 'w'), + (0x1D44B, 'M', 'x'), + (0x1D44C, 'M', 'y'), + (0x1D44D, 'M', 'z'), + (0x1D44E, 'M', 'a'), + (0x1D44F, 'M', 'b'), + (0x1D450, 'M', 'c'), + (0x1D451, 'M', 'd'), + ] + +def _seg_60(): + return [ + (0x1D452, 'M', 'e'), + (0x1D453, 'M', 'f'), + (0x1D454, 'M', 'g'), + (0x1D455, 'X'), + (0x1D456, 'M', 'i'), + (0x1D457, 'M', 'j'), + (0x1D458, 'M', 'k'), + (0x1D459, 'M', 'l'), + (0x1D45A, 'M', 'm'), + (0x1D45B, 'M', 'n'), + (0x1D45C, 'M', 'o'), + (0x1D45D, 'M', 'p'), + (0x1D45E, 'M', 'q'), + (0x1D45F, 'M', 'r'), + (0x1D460, 'M', 's'), + (0x1D461, 'M', 't'), + (0x1D462, 'M', 'u'), + (0x1D463, 'M', 'v'), + (0x1D464, 'M', 'w'), + (0x1D465, 'M', 'x'), + (0x1D466, 'M', 'y'), + (0x1D467, 'M', 'z'), + (0x1D468, 'M', 'a'), + (0x1D469, 'M', 'b'), + (0x1D46A, 'M', 'c'), + (0x1D46B, 'M', 'd'), + (0x1D46C, 'M', 'e'), + (0x1D46D, 'M', 'f'), + (0x1D46E, 'M', 'g'), + (0x1D46F, 'M', 'h'), + (0x1D470, 'M', 'i'), + (0x1D471, 'M', 'j'), + (0x1D472, 'M', 'k'), + (0x1D473, 'M', 'l'), + (0x1D474, 'M', 'm'), + (0x1D475, 'M', 'n'), + (0x1D476, 'M', 'o'), + (0x1D477, 'M', 'p'), + (0x1D478, 'M', 'q'), + (0x1D479, 'M', 'r'), + (0x1D47A, 'M', 's'), + (0x1D47B, 'M', 't'), + (0x1D47C, 'M', 'u'), + (0x1D47D, 'M', 'v'), + (0x1D47E, 'M', 'w'), + (0x1D47F, 'M', 'x'), + (0x1D480, 'M', 'y'), + (0x1D481, 'M', 'z'), + (0x1D482, 'M', 'a'), + (0x1D483, 'M', 'b'), + (0x1D484, 'M', 'c'), + (0x1D485, 'M', 'd'), + (0x1D486, 'M', 'e'), + (0x1D487, 'M', 'f'), + (0x1D488, 'M', 'g'), + (0x1D489, 'M', 'h'), + (0x1D48A, 'M', 'i'), + (0x1D48B, 'M', 'j'), + (0x1D48C, 'M', 'k'), + (0x1D48D, 'M', 'l'), + (0x1D48E, 'M', 'm'), + (0x1D48F, 'M', 'n'), + (0x1D490, 'M', 'o'), + (0x1D491, 'M', 'p'), + (0x1D492, 'M', 'q'), + (0x1D493, 'M', 'r'), + (0x1D494, 'M', 's'), + (0x1D495, 'M', 't'), + (0x1D496, 'M', 'u'), + (0x1D497, 'M', 'v'), + (0x1D498, 'M', 'w'), + (0x1D499, 'M', 'x'), + (0x1D49A, 'M', 'y'), + (0x1D49B, 'M', 'z'), + (0x1D49C, 'M', 'a'), + (0x1D49D, 'X'), + (0x1D49E, 'M', 'c'), + (0x1D49F, 'M', 'd'), + (0x1D4A0, 'X'), + (0x1D4A2, 'M', 'g'), + (0x1D4A3, 'X'), + (0x1D4A5, 'M', 'j'), + (0x1D4A6, 'M', 'k'), + (0x1D4A7, 'X'), + (0x1D4A9, 'M', 'n'), + (0x1D4AA, 'M', 'o'), + (0x1D4AB, 'M', 'p'), + (0x1D4AC, 'M', 'q'), + (0x1D4AD, 'X'), + (0x1D4AE, 'M', 's'), + (0x1D4AF, 'M', 't'), + (0x1D4B0, 'M', 'u'), + (0x1D4B1, 'M', 'v'), + (0x1D4B2, 'M', 'w'), + (0x1D4B3, 'M', 'x'), + (0x1D4B4, 'M', 'y'), + (0x1D4B5, 'M', 'z'), + (0x1D4B6, 'M', 'a'), + (0x1D4B7, 'M', 'b'), + (0x1D4B8, 'M', 'c'), + ] + +def _seg_61(): + return [ + (0x1D4B9, 'M', 'd'), + (0x1D4BA, 'X'), + (0x1D4BB, 'M', 'f'), + (0x1D4BC, 'X'), + (0x1D4BD, 'M', 'h'), + (0x1D4BE, 'M', 'i'), + (0x1D4BF, 'M', 'j'), + (0x1D4C0, 'M', 'k'), + (0x1D4C1, 'M', 'l'), + (0x1D4C2, 'M', 'm'), + (0x1D4C3, 'M', 'n'), + (0x1D4C4, 'X'), + (0x1D4C5, 'M', 'p'), + (0x1D4C6, 'M', 'q'), + (0x1D4C7, 'M', 'r'), + (0x1D4C8, 'M', 's'), + (0x1D4C9, 'M', 't'), + (0x1D4CA, 'M', 'u'), + (0x1D4CB, 'M', 'v'), + (0x1D4CC, 'M', 'w'), + (0x1D4CD, 'M', 'x'), + (0x1D4CE, 'M', 'y'), + (0x1D4CF, 'M', 'z'), + (0x1D4D0, 'M', 'a'), + (0x1D4D1, 'M', 'b'), + (0x1D4D2, 'M', 'c'), + (0x1D4D3, 'M', 'd'), + (0x1D4D4, 'M', 'e'), + (0x1D4D5, 'M', 'f'), + (0x1D4D6, 'M', 'g'), + (0x1D4D7, 'M', 'h'), + (0x1D4D8, 'M', 'i'), + (0x1D4D9, 'M', 'j'), + (0x1D4DA, 'M', 'k'), + (0x1D4DB, 'M', 'l'), + (0x1D4DC, 'M', 'm'), + (0x1D4DD, 'M', 'n'), + (0x1D4DE, 'M', 'o'), + (0x1D4DF, 'M', 'p'), + (0x1D4E0, 'M', 'q'), + (0x1D4E1, 'M', 'r'), + (0x1D4E2, 'M', 's'), + (0x1D4E3, 'M', 't'), + (0x1D4E4, 'M', 'u'), + (0x1D4E5, 'M', 'v'), + (0x1D4E6, 'M', 'w'), + (0x1D4E7, 'M', 'x'), + (0x1D4E8, 'M', 'y'), + (0x1D4E9, 'M', 'z'), + (0x1D4EA, 'M', 'a'), + (0x1D4EB, 'M', 'b'), + (0x1D4EC, 'M', 'c'), + (0x1D4ED, 'M', 'd'), + (0x1D4EE, 'M', 'e'), + (0x1D4EF, 'M', 'f'), + (0x1D4F0, 'M', 'g'), + (0x1D4F1, 'M', 'h'), + (0x1D4F2, 'M', 'i'), + (0x1D4F3, 'M', 'j'), + (0x1D4F4, 'M', 'k'), + (0x1D4F5, 'M', 'l'), + (0x1D4F6, 'M', 'm'), + (0x1D4F7, 'M', 'n'), + (0x1D4F8, 'M', 'o'), + (0x1D4F9, 'M', 'p'), + (0x1D4FA, 'M', 'q'), + (0x1D4FB, 'M', 'r'), + (0x1D4FC, 'M', 's'), + (0x1D4FD, 'M', 't'), + (0x1D4FE, 'M', 'u'), + (0x1D4FF, 'M', 'v'), + (0x1D500, 'M', 'w'), + (0x1D501, 'M', 'x'), + (0x1D502, 'M', 'y'), + (0x1D503, 'M', 'z'), + (0x1D504, 'M', 'a'), + (0x1D505, 'M', 'b'), + (0x1D506, 'X'), + (0x1D507, 'M', 'd'), + (0x1D508, 'M', 'e'), + (0x1D509, 'M', 'f'), + (0x1D50A, 'M', 'g'), + (0x1D50B, 'X'), + (0x1D50D, 'M', 'j'), + (0x1D50E, 'M', 'k'), + (0x1D50F, 'M', 'l'), + (0x1D510, 'M', 'm'), + (0x1D511, 'M', 'n'), + (0x1D512, 'M', 'o'), + (0x1D513, 'M', 'p'), + (0x1D514, 'M', 'q'), + (0x1D515, 'X'), + (0x1D516, 'M', 's'), + (0x1D517, 'M', 't'), + (0x1D518, 'M', 'u'), + (0x1D519, 'M', 'v'), + (0x1D51A, 'M', 'w'), + (0x1D51B, 'M', 'x'), + (0x1D51C, 'M', 'y'), + (0x1D51D, 'X'), + ] + +def _seg_62(): + return [ + (0x1D51E, 'M', 'a'), + (0x1D51F, 'M', 'b'), + (0x1D520, 'M', 'c'), + (0x1D521, 'M', 'd'), + (0x1D522, 'M', 'e'), + (0x1D523, 'M', 'f'), + (0x1D524, 'M', 'g'), + (0x1D525, 'M', 'h'), + (0x1D526, 'M', 'i'), + (0x1D527, 'M', 'j'), + (0x1D528, 'M', 'k'), + (0x1D529, 'M', 'l'), + (0x1D52A, 'M', 'm'), + (0x1D52B, 'M', 'n'), + (0x1D52C, 'M', 'o'), + (0x1D52D, 'M', 'p'), + (0x1D52E, 'M', 'q'), + (0x1D52F, 'M', 'r'), + (0x1D530, 'M', 's'), + (0x1D531, 'M', 't'), + (0x1D532, 'M', 'u'), + (0x1D533, 'M', 'v'), + (0x1D534, 'M', 'w'), + (0x1D535, 'M', 'x'), + (0x1D536, 'M', 'y'), + (0x1D537, 'M', 'z'), + (0x1D538, 'M', 'a'), + (0x1D539, 'M', 'b'), + (0x1D53A, 'X'), + (0x1D53B, 'M', 'd'), + (0x1D53C, 'M', 'e'), + (0x1D53D, 'M', 'f'), + (0x1D53E, 'M', 'g'), + (0x1D53F, 'X'), + (0x1D540, 'M', 'i'), + (0x1D541, 'M', 'j'), + (0x1D542, 'M', 'k'), + (0x1D543, 'M', 'l'), + (0x1D544, 'M', 'm'), + (0x1D545, 'X'), + (0x1D546, 'M', 'o'), + (0x1D547, 'X'), + (0x1D54A, 'M', 's'), + (0x1D54B, 'M', 't'), + (0x1D54C, 'M', 'u'), + (0x1D54D, 'M', 'v'), + (0x1D54E, 'M', 'w'), + (0x1D54F, 'M', 'x'), + (0x1D550, 'M', 'y'), + (0x1D551, 'X'), + (0x1D552, 'M', 'a'), + (0x1D553, 'M', 'b'), + (0x1D554, 'M', 'c'), + (0x1D555, 'M', 'd'), + (0x1D556, 'M', 'e'), + (0x1D557, 'M', 'f'), + (0x1D558, 'M', 'g'), + (0x1D559, 'M', 'h'), + (0x1D55A, 'M', 'i'), + (0x1D55B, 'M', 'j'), + (0x1D55C, 'M', 'k'), + (0x1D55D, 'M', 'l'), + (0x1D55E, 'M', 'm'), + (0x1D55F, 'M', 'n'), + (0x1D560, 'M', 'o'), + (0x1D561, 'M', 'p'), + (0x1D562, 'M', 'q'), + (0x1D563, 'M', 'r'), + (0x1D564, 'M', 's'), + (0x1D565, 'M', 't'), + (0x1D566, 'M', 'u'), + (0x1D567, 'M', 'v'), + (0x1D568, 'M', 'w'), + (0x1D569, 'M', 'x'), + (0x1D56A, 'M', 'y'), + (0x1D56B, 'M', 'z'), + (0x1D56C, 'M', 'a'), + (0x1D56D, 'M', 'b'), + (0x1D56E, 'M', 'c'), + (0x1D56F, 'M', 'd'), + (0x1D570, 'M', 'e'), + (0x1D571, 'M', 'f'), + (0x1D572, 'M', 'g'), + (0x1D573, 'M', 'h'), + (0x1D574, 'M', 'i'), + (0x1D575, 'M', 'j'), + (0x1D576, 'M', 'k'), + (0x1D577, 'M', 'l'), + (0x1D578, 'M', 'm'), + (0x1D579, 'M', 'n'), + (0x1D57A, 'M', 'o'), + (0x1D57B, 'M', 'p'), + (0x1D57C, 'M', 'q'), + (0x1D57D, 'M', 'r'), + (0x1D57E, 'M', 's'), + (0x1D57F, 'M', 't'), + (0x1D580, 'M', 'u'), + (0x1D581, 'M', 'v'), + (0x1D582, 'M', 'w'), + (0x1D583, 'M', 'x'), + ] + +def _seg_63(): + return [ + (0x1D584, 'M', 'y'), + (0x1D585, 'M', 'z'), + (0x1D586, 'M', 'a'), + (0x1D587, 'M', 'b'), + (0x1D588, 'M', 'c'), + (0x1D589, 'M', 'd'), + (0x1D58A, 'M', 'e'), + (0x1D58B, 'M', 'f'), + (0x1D58C, 'M', 'g'), + (0x1D58D, 'M', 'h'), + (0x1D58E, 'M', 'i'), + (0x1D58F, 'M', 'j'), + (0x1D590, 'M', 'k'), + (0x1D591, 'M', 'l'), + (0x1D592, 'M', 'm'), + (0x1D593, 'M', 'n'), + (0x1D594, 'M', 'o'), + (0x1D595, 'M', 'p'), + (0x1D596, 'M', 'q'), + (0x1D597, 'M', 'r'), + (0x1D598, 'M', 's'), + (0x1D599, 'M', 't'), + (0x1D59A, 'M', 'u'), + (0x1D59B, 'M', 'v'), + (0x1D59C, 'M', 'w'), + (0x1D59D, 'M', 'x'), + (0x1D59E, 'M', 'y'), + (0x1D59F, 'M', 'z'), + (0x1D5A0, 'M', 'a'), + (0x1D5A1, 'M', 'b'), + (0x1D5A2, 'M', 'c'), + (0x1D5A3, 'M', 'd'), + (0x1D5A4, 'M', 'e'), + (0x1D5A5, 'M', 'f'), + (0x1D5A6, 'M', 'g'), + (0x1D5A7, 'M', 'h'), + (0x1D5A8, 'M', 'i'), + (0x1D5A9, 'M', 'j'), + (0x1D5AA, 'M', 'k'), + (0x1D5AB, 'M', 'l'), + (0x1D5AC, 'M', 'm'), + (0x1D5AD, 'M', 'n'), + (0x1D5AE, 'M', 'o'), + (0x1D5AF, 'M', 'p'), + (0x1D5B0, 'M', 'q'), + (0x1D5B1, 'M', 'r'), + (0x1D5B2, 'M', 's'), + (0x1D5B3, 'M', 't'), + (0x1D5B4, 'M', 'u'), + (0x1D5B5, 'M', 'v'), + (0x1D5B6, 'M', 'w'), + (0x1D5B7, 'M', 'x'), + (0x1D5B8, 'M', 'y'), + (0x1D5B9, 'M', 'z'), + (0x1D5BA, 'M', 'a'), + (0x1D5BB, 'M', 'b'), + (0x1D5BC, 'M', 'c'), + (0x1D5BD, 'M', 'd'), + (0x1D5BE, 'M', 'e'), + (0x1D5BF, 'M', 'f'), + (0x1D5C0, 'M', 'g'), + (0x1D5C1, 'M', 'h'), + (0x1D5C2, 'M', 'i'), + (0x1D5C3, 'M', 'j'), + (0x1D5C4, 'M', 'k'), + (0x1D5C5, 'M', 'l'), + (0x1D5C6, 'M', 'm'), + (0x1D5C7, 'M', 'n'), + (0x1D5C8, 'M', 'o'), + (0x1D5C9, 'M', 'p'), + (0x1D5CA, 'M', 'q'), + (0x1D5CB, 'M', 'r'), + (0x1D5CC, 'M', 's'), + (0x1D5CD, 'M', 't'), + (0x1D5CE, 'M', 'u'), + (0x1D5CF, 'M', 'v'), + (0x1D5D0, 'M', 'w'), + (0x1D5D1, 'M', 'x'), + (0x1D5D2, 'M', 'y'), + (0x1D5D3, 'M', 'z'), + (0x1D5D4, 'M', 'a'), + (0x1D5D5, 'M', 'b'), + (0x1D5D6, 'M', 'c'), + (0x1D5D7, 'M', 'd'), + (0x1D5D8, 'M', 'e'), + (0x1D5D9, 'M', 'f'), + (0x1D5DA, 'M', 'g'), + (0x1D5DB, 'M', 'h'), + (0x1D5DC, 'M', 'i'), + (0x1D5DD, 'M', 'j'), + (0x1D5DE, 'M', 'k'), + (0x1D5DF, 'M', 'l'), + (0x1D5E0, 'M', 'm'), + (0x1D5E1, 'M', 'n'), + (0x1D5E2, 'M', 'o'), + (0x1D5E3, 'M', 'p'), + (0x1D5E4, 'M', 'q'), + (0x1D5E5, 'M', 'r'), + (0x1D5E6, 'M', 's'), + (0x1D5E7, 'M', 't'), + ] + +def _seg_64(): + return [ + (0x1D5E8, 'M', 'u'), + (0x1D5E9, 'M', 'v'), + (0x1D5EA, 'M', 'w'), + (0x1D5EB, 'M', 'x'), + (0x1D5EC, 'M', 'y'), + (0x1D5ED, 'M', 'z'), + (0x1D5EE, 'M', 'a'), + (0x1D5EF, 'M', 'b'), + (0x1D5F0, 'M', 'c'), + (0x1D5F1, 'M', 'd'), + (0x1D5F2, 'M', 'e'), + (0x1D5F3, 'M', 'f'), + (0x1D5F4, 'M', 'g'), + (0x1D5F5, 'M', 'h'), + (0x1D5F6, 'M', 'i'), + (0x1D5F7, 'M', 'j'), + (0x1D5F8, 'M', 'k'), + (0x1D5F9, 'M', 'l'), + (0x1D5FA, 'M', 'm'), + (0x1D5FB, 'M', 'n'), + (0x1D5FC, 'M', 'o'), + (0x1D5FD, 'M', 'p'), + (0x1D5FE, 'M', 'q'), + (0x1D5FF, 'M', 'r'), + (0x1D600, 'M', 's'), + (0x1D601, 'M', 't'), + (0x1D602, 'M', 'u'), + (0x1D603, 'M', 'v'), + (0x1D604, 'M', 'w'), + (0x1D605, 'M', 'x'), + (0x1D606, 'M', 'y'), + (0x1D607, 'M', 'z'), + (0x1D608, 'M', 'a'), + (0x1D609, 'M', 'b'), + (0x1D60A, 'M', 'c'), + (0x1D60B, 'M', 'd'), + (0x1D60C, 'M', 'e'), + (0x1D60D, 'M', 'f'), + (0x1D60E, 'M', 'g'), + (0x1D60F, 'M', 'h'), + (0x1D610, 'M', 'i'), + (0x1D611, 'M', 'j'), + (0x1D612, 'M', 'k'), + (0x1D613, 'M', 'l'), + (0x1D614, 'M', 'm'), + (0x1D615, 'M', 'n'), + (0x1D616, 'M', 'o'), + (0x1D617, 'M', 'p'), + (0x1D618, 'M', 'q'), + (0x1D619, 'M', 'r'), + (0x1D61A, 'M', 's'), + (0x1D61B, 'M', 't'), + (0x1D61C, 'M', 'u'), + (0x1D61D, 'M', 'v'), + (0x1D61E, 'M', 'w'), + (0x1D61F, 'M', 'x'), + (0x1D620, 'M', 'y'), + (0x1D621, 'M', 'z'), + (0x1D622, 'M', 'a'), + (0x1D623, 'M', 'b'), + (0x1D624, 'M', 'c'), + (0x1D625, 'M', 'd'), + (0x1D626, 'M', 'e'), + (0x1D627, 'M', 'f'), + (0x1D628, 'M', 'g'), + (0x1D629, 'M', 'h'), + (0x1D62A, 'M', 'i'), + (0x1D62B, 'M', 'j'), + (0x1D62C, 'M', 'k'), + (0x1D62D, 'M', 'l'), + (0x1D62E, 'M', 'm'), + (0x1D62F, 'M', 'n'), + (0x1D630, 'M', 'o'), + (0x1D631, 'M', 'p'), + (0x1D632, 'M', 'q'), + (0x1D633, 'M', 'r'), + (0x1D634, 'M', 's'), + (0x1D635, 'M', 't'), + (0x1D636, 'M', 'u'), + (0x1D637, 'M', 'v'), + (0x1D638, 'M', 'w'), + (0x1D639, 'M', 'x'), + (0x1D63A, 'M', 'y'), + (0x1D63B, 'M', 'z'), + (0x1D63C, 'M', 'a'), + (0x1D63D, 'M', 'b'), + (0x1D63E, 'M', 'c'), + (0x1D63F, 'M', 'd'), + (0x1D640, 'M', 'e'), + (0x1D641, 'M', 'f'), + (0x1D642, 'M', 'g'), + (0x1D643, 'M', 'h'), + (0x1D644, 'M', 'i'), + (0x1D645, 'M', 'j'), + (0x1D646, 'M', 'k'), + (0x1D647, 'M', 'l'), + (0x1D648, 'M', 'm'), + (0x1D649, 'M', 'n'), + (0x1D64A, 'M', 'o'), + (0x1D64B, 'M', 'p'), + ] + +def _seg_65(): + return [ + (0x1D64C, 'M', 'q'), + (0x1D64D, 'M', 'r'), + (0x1D64E, 'M', 's'), + (0x1D64F, 'M', 't'), + (0x1D650, 'M', 'u'), + (0x1D651, 'M', 'v'), + (0x1D652, 'M', 'w'), + (0x1D653, 'M', 'x'), + (0x1D654, 'M', 'y'), + (0x1D655, 'M', 'z'), + (0x1D656, 'M', 'a'), + (0x1D657, 'M', 'b'), + (0x1D658, 'M', 'c'), + (0x1D659, 'M', 'd'), + (0x1D65A, 'M', 'e'), + (0x1D65B, 'M', 'f'), + (0x1D65C, 'M', 'g'), + (0x1D65D, 'M', 'h'), + (0x1D65E, 'M', 'i'), + (0x1D65F, 'M', 'j'), + (0x1D660, 'M', 'k'), + (0x1D661, 'M', 'l'), + (0x1D662, 'M', 'm'), + (0x1D663, 'M', 'n'), + (0x1D664, 'M', 'o'), + (0x1D665, 'M', 'p'), + (0x1D666, 'M', 'q'), + (0x1D667, 'M', 'r'), + (0x1D668, 'M', 's'), + (0x1D669, 'M', 't'), + (0x1D66A, 'M', 'u'), + (0x1D66B, 'M', 'v'), + (0x1D66C, 'M', 'w'), + (0x1D66D, 'M', 'x'), + (0x1D66E, 'M', 'y'), + (0x1D66F, 'M', 'z'), + (0x1D670, 'M', 'a'), + (0x1D671, 'M', 'b'), + (0x1D672, 'M', 'c'), + (0x1D673, 'M', 'd'), + (0x1D674, 'M', 'e'), + (0x1D675, 'M', 'f'), + (0x1D676, 'M', 'g'), + (0x1D677, 'M', 'h'), + (0x1D678, 'M', 'i'), + (0x1D679, 'M', 'j'), + (0x1D67A, 'M', 'k'), + (0x1D67B, 'M', 'l'), + (0x1D67C, 'M', 'm'), + (0x1D67D, 'M', 'n'), + (0x1D67E, 'M', 'o'), + (0x1D67F, 'M', 'p'), + (0x1D680, 'M', 'q'), + (0x1D681, 'M', 'r'), + (0x1D682, 'M', 's'), + (0x1D683, 'M', 't'), + (0x1D684, 'M', 'u'), + (0x1D685, 'M', 'v'), + (0x1D686, 'M', 'w'), + (0x1D687, 'M', 'x'), + (0x1D688, 'M', 'y'), + (0x1D689, 'M', 'z'), + (0x1D68A, 'M', 'a'), + (0x1D68B, 'M', 'b'), + (0x1D68C, 'M', 'c'), + (0x1D68D, 'M', 'd'), + (0x1D68E, 'M', 'e'), + (0x1D68F, 'M', 'f'), + (0x1D690, 'M', 'g'), + (0x1D691, 'M', 'h'), + (0x1D692, 'M', 'i'), + (0x1D693, 'M', 'j'), + (0x1D694, 'M', 'k'), + (0x1D695, 'M', 'l'), + (0x1D696, 'M', 'm'), + (0x1D697, 'M', 'n'), + (0x1D698, 'M', 'o'), + (0x1D699, 'M', 'p'), + (0x1D69A, 'M', 'q'), + (0x1D69B, 'M', 'r'), + (0x1D69C, 'M', 's'), + (0x1D69D, 'M', 't'), + (0x1D69E, 'M', 'u'), + (0x1D69F, 'M', 'v'), + (0x1D6A0, 'M', 'w'), + (0x1D6A1, 'M', 'x'), + (0x1D6A2, 'M', 'y'), + (0x1D6A3, 'M', 'z'), + (0x1D6A4, 'M', 'ı'), + (0x1D6A5, 'M', 'ȷ'), + (0x1D6A6, 'X'), + (0x1D6A8, 'M', 'α'), + (0x1D6A9, 'M', 'β'), + (0x1D6AA, 'M', 'γ'), + (0x1D6AB, 'M', 'δ'), + (0x1D6AC, 'M', 'ε'), + (0x1D6AD, 'M', 'ζ'), + (0x1D6AE, 'M', 'η'), + (0x1D6AF, 'M', 'θ'), + (0x1D6B0, 'M', 'ι'), + ] + +def _seg_66(): + return [ + (0x1D6B1, 'M', 'κ'), + (0x1D6B2, 'M', 'λ'), + (0x1D6B3, 'M', 'μ'), + (0x1D6B4, 'M', 'ν'), + (0x1D6B5, 'M', 'ξ'), + (0x1D6B6, 'M', 'ο'), + (0x1D6B7, 'M', 'π'), + (0x1D6B8, 'M', 'ρ'), + (0x1D6B9, 'M', 'θ'), + (0x1D6BA, 'M', 'σ'), + (0x1D6BB, 'M', 'τ'), + (0x1D6BC, 'M', 'υ'), + (0x1D6BD, 'M', 'φ'), + (0x1D6BE, 'M', 'χ'), + (0x1D6BF, 'M', 'ψ'), + (0x1D6C0, 'M', 'ω'), + (0x1D6C1, 'M', '∇'), + (0x1D6C2, 'M', 'α'), + (0x1D6C3, 'M', 'β'), + (0x1D6C4, 'M', 'γ'), + (0x1D6C5, 'M', 'δ'), + (0x1D6C6, 'M', 'ε'), + (0x1D6C7, 'M', 'ζ'), + (0x1D6C8, 'M', 'η'), + (0x1D6C9, 'M', 'θ'), + (0x1D6CA, 'M', 'ι'), + (0x1D6CB, 'M', 'κ'), + (0x1D6CC, 'M', 'λ'), + (0x1D6CD, 'M', 'μ'), + (0x1D6CE, 'M', 'ν'), + (0x1D6CF, 'M', 'ξ'), + (0x1D6D0, 'M', 'ο'), + (0x1D6D1, 'M', 'π'), + (0x1D6D2, 'M', 'ρ'), + (0x1D6D3, 'M', 'σ'), + (0x1D6D5, 'M', 'τ'), + (0x1D6D6, 'M', 'υ'), + (0x1D6D7, 'M', 'φ'), + (0x1D6D8, 'M', 'χ'), + (0x1D6D9, 'M', 'ψ'), + (0x1D6DA, 'M', 'ω'), + (0x1D6DB, 'M', '∂'), + (0x1D6DC, 'M', 'ε'), + (0x1D6DD, 'M', 'θ'), + (0x1D6DE, 'M', 'κ'), + (0x1D6DF, 'M', 'φ'), + (0x1D6E0, 'M', 'ρ'), + (0x1D6E1, 'M', 'π'), + (0x1D6E2, 'M', 'α'), + (0x1D6E3, 'M', 'β'), + (0x1D6E4, 'M', 'γ'), + (0x1D6E5, 'M', 'δ'), + (0x1D6E6, 'M', 'ε'), + (0x1D6E7, 'M', 'ζ'), + (0x1D6E8, 'M', 'η'), + (0x1D6E9, 'M', 'θ'), + (0x1D6EA, 'M', 'ι'), + (0x1D6EB, 'M', 'κ'), + (0x1D6EC, 'M', 'λ'), + (0x1D6ED, 'M', 'μ'), + (0x1D6EE, 'M', 'ν'), + (0x1D6EF, 'M', 'ξ'), + (0x1D6F0, 'M', 'ο'), + (0x1D6F1, 'M', 'π'), + (0x1D6F2, 'M', 'ρ'), + (0x1D6F3, 'M', 'θ'), + (0x1D6F4, 'M', 'σ'), + (0x1D6F5, 'M', 'τ'), + (0x1D6F6, 'M', 'υ'), + (0x1D6F7, 'M', 'φ'), + (0x1D6F8, 'M', 'χ'), + (0x1D6F9, 'M', 'ψ'), + (0x1D6FA, 'M', 'ω'), + (0x1D6FB, 'M', '∇'), + (0x1D6FC, 'M', 'α'), + (0x1D6FD, 'M', 'β'), + (0x1D6FE, 'M', 'γ'), + (0x1D6FF, 'M', 'δ'), + (0x1D700, 'M', 'ε'), + (0x1D701, 'M', 'ζ'), + (0x1D702, 'M', 'η'), + (0x1D703, 'M', 'θ'), + (0x1D704, 'M', 'ι'), + (0x1D705, 'M', 'κ'), + (0x1D706, 'M', 'λ'), + (0x1D707, 'M', 'μ'), + (0x1D708, 'M', 'ν'), + (0x1D709, 'M', 'ξ'), + (0x1D70A, 'M', 'ο'), + (0x1D70B, 'M', 'π'), + (0x1D70C, 'M', 'ρ'), + (0x1D70D, 'M', 'σ'), + (0x1D70F, 'M', 'τ'), + (0x1D710, 'M', 'υ'), + (0x1D711, 'M', 'φ'), + (0x1D712, 'M', 'χ'), + (0x1D713, 'M', 'ψ'), + (0x1D714, 'M', 'ω'), + (0x1D715, 'M', '∂'), + (0x1D716, 'M', 'ε'), + ] + +def _seg_67(): + return [ + (0x1D717, 'M', 'θ'), + (0x1D718, 'M', 'κ'), + (0x1D719, 'M', 'φ'), + (0x1D71A, 'M', 'ρ'), + (0x1D71B, 'M', 'π'), + (0x1D71C, 'M', 'α'), + (0x1D71D, 'M', 'β'), + (0x1D71E, 'M', 'γ'), + (0x1D71F, 'M', 'δ'), + (0x1D720, 'M', 'ε'), + (0x1D721, 'M', 'ζ'), + (0x1D722, 'M', 'η'), + (0x1D723, 'M', 'θ'), + (0x1D724, 'M', 'ι'), + (0x1D725, 'M', 'κ'), + (0x1D726, 'M', 'λ'), + (0x1D727, 'M', 'μ'), + (0x1D728, 'M', 'ν'), + (0x1D729, 'M', 'ξ'), + (0x1D72A, 'M', 'ο'), + (0x1D72B, 'M', 'π'), + (0x1D72C, 'M', 'ρ'), + (0x1D72D, 'M', 'θ'), + (0x1D72E, 'M', 'σ'), + (0x1D72F, 'M', 'τ'), + (0x1D730, 'M', 'υ'), + (0x1D731, 'M', 'φ'), + (0x1D732, 'M', 'χ'), + (0x1D733, 'M', 'ψ'), + (0x1D734, 'M', 'ω'), + (0x1D735, 'M', '∇'), + (0x1D736, 'M', 'α'), + (0x1D737, 'M', 'β'), + (0x1D738, 'M', 'γ'), + (0x1D739, 'M', 'δ'), + (0x1D73A, 'M', 'ε'), + (0x1D73B, 'M', 'ζ'), + (0x1D73C, 'M', 'η'), + (0x1D73D, 'M', 'θ'), + (0x1D73E, 'M', 'ι'), + (0x1D73F, 'M', 'κ'), + (0x1D740, 'M', 'λ'), + (0x1D741, 'M', 'μ'), + (0x1D742, 'M', 'ν'), + (0x1D743, 'M', 'ξ'), + (0x1D744, 'M', 'ο'), + (0x1D745, 'M', 'π'), + (0x1D746, 'M', 'ρ'), + (0x1D747, 'M', 'σ'), + (0x1D749, 'M', 'τ'), + (0x1D74A, 'M', 'υ'), + (0x1D74B, 'M', 'φ'), + (0x1D74C, 'M', 'χ'), + (0x1D74D, 'M', 'ψ'), + (0x1D74E, 'M', 'ω'), + (0x1D74F, 'M', '∂'), + (0x1D750, 'M', 'ε'), + (0x1D751, 'M', 'θ'), + (0x1D752, 'M', 'κ'), + (0x1D753, 'M', 'φ'), + (0x1D754, 'M', 'ρ'), + (0x1D755, 'M', 'π'), + (0x1D756, 'M', 'α'), + (0x1D757, 'M', 'β'), + (0x1D758, 'M', 'γ'), + (0x1D759, 'M', 'δ'), + (0x1D75A, 'M', 'ε'), + (0x1D75B, 'M', 'ζ'), + (0x1D75C, 'M', 'η'), + (0x1D75D, 'M', 'θ'), + (0x1D75E, 'M', 'ι'), + (0x1D75F, 'M', 'κ'), + (0x1D760, 'M', 'λ'), + (0x1D761, 'M', 'μ'), + (0x1D762, 'M', 'ν'), + (0x1D763, 'M', 'ξ'), + (0x1D764, 'M', 'ο'), + (0x1D765, 'M', 'π'), + (0x1D766, 'M', 'ρ'), + (0x1D767, 'M', 'θ'), + (0x1D768, 'M', 'σ'), + (0x1D769, 'M', 'τ'), + (0x1D76A, 'M', 'υ'), + (0x1D76B, 'M', 'φ'), + (0x1D76C, 'M', 'χ'), + (0x1D76D, 'M', 'ψ'), + (0x1D76E, 'M', 'ω'), + (0x1D76F, 'M', '∇'), + (0x1D770, 'M', 'α'), + (0x1D771, 'M', 'β'), + (0x1D772, 'M', 'γ'), + (0x1D773, 'M', 'δ'), + (0x1D774, 'M', 'ε'), + (0x1D775, 'M', 'ζ'), + (0x1D776, 'M', 'η'), + (0x1D777, 'M', 'θ'), + (0x1D778, 'M', 'ι'), + (0x1D779, 'M', 'κ'), + (0x1D77A, 'M', 'λ'), + (0x1D77B, 'M', 'μ'), + ] + +def _seg_68(): + return [ + (0x1D77C, 'M', 'ν'), + (0x1D77D, 'M', 'ξ'), + (0x1D77E, 'M', 'ο'), + (0x1D77F, 'M', 'π'), + (0x1D780, 'M', 'ρ'), + (0x1D781, 'M', 'σ'), + (0x1D783, 'M', 'τ'), + (0x1D784, 'M', 'υ'), + (0x1D785, 'M', 'φ'), + (0x1D786, 'M', 'χ'), + (0x1D787, 'M', 'ψ'), + (0x1D788, 'M', 'ω'), + (0x1D789, 'M', '∂'), + (0x1D78A, 'M', 'ε'), + (0x1D78B, 'M', 'θ'), + (0x1D78C, 'M', 'κ'), + (0x1D78D, 'M', 'φ'), + (0x1D78E, 'M', 'ρ'), + (0x1D78F, 'M', 'π'), + (0x1D790, 'M', 'α'), + (0x1D791, 'M', 'β'), + (0x1D792, 'M', 'γ'), + (0x1D793, 'M', 'δ'), + (0x1D794, 'M', 'ε'), + (0x1D795, 'M', 'ζ'), + (0x1D796, 'M', 'η'), + (0x1D797, 'M', 'θ'), + (0x1D798, 'M', 'ι'), + (0x1D799, 'M', 'κ'), + (0x1D79A, 'M', 'λ'), + (0x1D79B, 'M', 'μ'), + (0x1D79C, 'M', 'ν'), + (0x1D79D, 'M', 'ξ'), + (0x1D79E, 'M', 'ο'), + (0x1D79F, 'M', 'π'), + (0x1D7A0, 'M', 'ρ'), + (0x1D7A1, 'M', 'θ'), + (0x1D7A2, 'M', 'σ'), + (0x1D7A3, 'M', 'τ'), + (0x1D7A4, 'M', 'υ'), + (0x1D7A5, 'M', 'φ'), + (0x1D7A6, 'M', 'χ'), + (0x1D7A7, 'M', 'ψ'), + (0x1D7A8, 'M', 'ω'), + (0x1D7A9, 'M', '∇'), + (0x1D7AA, 'M', 'α'), + (0x1D7AB, 'M', 'β'), + (0x1D7AC, 'M', 'γ'), + (0x1D7AD, 'M', 'δ'), + (0x1D7AE, 'M', 'ε'), + (0x1D7AF, 'M', 'ζ'), + (0x1D7B0, 'M', 'η'), + (0x1D7B1, 'M', 'θ'), + (0x1D7B2, 'M', 'ι'), + (0x1D7B3, 'M', 'κ'), + (0x1D7B4, 'M', 'λ'), + (0x1D7B5, 'M', 'μ'), + (0x1D7B6, 'M', 'ν'), + (0x1D7B7, 'M', 'ξ'), + (0x1D7B8, 'M', 'ο'), + (0x1D7B9, 'M', 'π'), + (0x1D7BA, 'M', 'ρ'), + (0x1D7BB, 'M', 'σ'), + (0x1D7BD, 'M', 'τ'), + (0x1D7BE, 'M', 'υ'), + (0x1D7BF, 'M', 'φ'), + (0x1D7C0, 'M', 'χ'), + (0x1D7C1, 'M', 'ψ'), + (0x1D7C2, 'M', 'ω'), + (0x1D7C3, 'M', '∂'), + (0x1D7C4, 'M', 'ε'), + (0x1D7C5, 'M', 'θ'), + (0x1D7C6, 'M', 'κ'), + (0x1D7C7, 'M', 'φ'), + (0x1D7C8, 'M', 'ρ'), + (0x1D7C9, 'M', 'π'), + (0x1D7CA, 'M', 'ϝ'), + (0x1D7CC, 'X'), + (0x1D7CE, 'M', '0'), + (0x1D7CF, 'M', '1'), + (0x1D7D0, 'M', '2'), + (0x1D7D1, 'M', '3'), + (0x1D7D2, 'M', '4'), + (0x1D7D3, 'M', '5'), + (0x1D7D4, 'M', '6'), + (0x1D7D5, 'M', '7'), + (0x1D7D6, 'M', '8'), + (0x1D7D7, 'M', '9'), + (0x1D7D8, 'M', '0'), + (0x1D7D9, 'M', '1'), + (0x1D7DA, 'M', '2'), + (0x1D7DB, 'M', '3'), + (0x1D7DC, 'M', '4'), + (0x1D7DD, 'M', '5'), + (0x1D7DE, 'M', '6'), + (0x1D7DF, 'M', '7'), + (0x1D7E0, 'M', '8'), + (0x1D7E1, 'M', '9'), + (0x1D7E2, 'M', '0'), + (0x1D7E3, 'M', '1'), + ] + +def _seg_69(): + return [ + (0x1D7E4, 'M', '2'), + (0x1D7E5, 'M', '3'), + (0x1D7E6, 'M', '4'), + (0x1D7E7, 'M', '5'), + (0x1D7E8, 'M', '6'), + (0x1D7E9, 'M', '7'), + (0x1D7EA, 'M', '8'), + (0x1D7EB, 'M', '9'), + (0x1D7EC, 'M', '0'), + (0x1D7ED, 'M', '1'), + (0x1D7EE, 'M', '2'), + (0x1D7EF, 'M', '3'), + (0x1D7F0, 'M', '4'), + (0x1D7F1, 'M', '5'), + (0x1D7F2, 'M', '6'), + (0x1D7F3, 'M', '7'), + (0x1D7F4, 'M', '8'), + (0x1D7F5, 'M', '9'), + (0x1D7F6, 'M', '0'), + (0x1D7F7, 'M', '1'), + (0x1D7F8, 'M', '2'), + (0x1D7F9, 'M', '3'), + (0x1D7FA, 'M', '4'), + (0x1D7FB, 'M', '5'), + (0x1D7FC, 'M', '6'), + (0x1D7FD, 'M', '7'), + (0x1D7FE, 'M', '8'), + (0x1D7FF, 'M', '9'), + (0x1D800, 'V'), + (0x1DA8C, 'X'), + (0x1DA9B, 'V'), + (0x1DAA0, 'X'), + (0x1DAA1, 'V'), + (0x1DAB0, 'X'), + (0x1E000, 'V'), + (0x1E007, 'X'), + (0x1E008, 'V'), + (0x1E019, 'X'), + (0x1E01B, 'V'), + (0x1E022, 'X'), + (0x1E023, 'V'), + (0x1E025, 'X'), + (0x1E026, 'V'), + (0x1E02B, 'X'), + (0x1E100, 'V'), + (0x1E12D, 'X'), + (0x1E130, 'V'), + (0x1E13E, 'X'), + (0x1E140, 'V'), + (0x1E14A, 'X'), + (0x1E14E, 'V'), + (0x1E150, 'X'), + (0x1E2C0, 'V'), + (0x1E2FA, 'X'), + (0x1E2FF, 'V'), + (0x1E300, 'X'), + (0x1E800, 'V'), + (0x1E8C5, 'X'), + (0x1E8C7, 'V'), + (0x1E8D7, 'X'), + (0x1E900, 'M', '𞤢'), + (0x1E901, 'M', '𞤣'), + (0x1E902, 'M', '𞤤'), + (0x1E903, 'M', '𞤥'), + (0x1E904, 'M', '𞤦'), + (0x1E905, 'M', '𞤧'), + (0x1E906, 'M', '𞤨'), + (0x1E907, 'M', '𞤩'), + (0x1E908, 'M', '𞤪'), + (0x1E909, 'M', '𞤫'), + (0x1E90A, 'M', '𞤬'), + (0x1E90B, 'M', '𞤭'), + (0x1E90C, 'M', '𞤮'), + (0x1E90D, 'M', '𞤯'), + (0x1E90E, 'M', '𞤰'), + (0x1E90F, 'M', '𞤱'), + (0x1E910, 'M', '𞤲'), + (0x1E911, 'M', '𞤳'), + (0x1E912, 'M', '𞤴'), + (0x1E913, 'M', '𞤵'), + (0x1E914, 'M', '𞤶'), + (0x1E915, 'M', '𞤷'), + (0x1E916, 'M', '𞤸'), + (0x1E917, 'M', '𞤹'), + (0x1E918, 'M', '𞤺'), + (0x1E919, 'M', '𞤻'), + (0x1E91A, 'M', '𞤼'), + (0x1E91B, 'M', '𞤽'), + (0x1E91C, 'M', '𞤾'), + (0x1E91D, 'M', '𞤿'), + (0x1E91E, 'M', '𞥀'), + (0x1E91F, 'M', '𞥁'), + (0x1E920, 'M', '𞥂'), + (0x1E921, 'M', '𞥃'), + (0x1E922, 'V'), + (0x1E94C, 'X'), + (0x1E950, 'V'), + (0x1E95A, 'X'), + (0x1E95E, 'V'), + (0x1E960, 'X'), + ] + +def _seg_70(): + return [ + (0x1EC71, 'V'), + (0x1ECB5, 'X'), + (0x1ED01, 'V'), + (0x1ED3E, 'X'), + (0x1EE00, 'M', 'ا'), + (0x1EE01, 'M', 'ب'), + (0x1EE02, 'M', 'ج'), + (0x1EE03, 'M', 'د'), + (0x1EE04, 'X'), + (0x1EE05, 'M', 'و'), + (0x1EE06, 'M', 'ز'), + (0x1EE07, 'M', 'ح'), + (0x1EE08, 'M', 'ط'), + (0x1EE09, 'M', 'ي'), + (0x1EE0A, 'M', 'ك'), + (0x1EE0B, 'M', 'ل'), + (0x1EE0C, 'M', 'م'), + (0x1EE0D, 'M', 'ن'), + (0x1EE0E, 'M', 'س'), + (0x1EE0F, 'M', 'ع'), + (0x1EE10, 'M', 'ف'), + (0x1EE11, 'M', 'ص'), + (0x1EE12, 'M', 'ق'), + (0x1EE13, 'M', 'ر'), + (0x1EE14, 'M', 'ش'), + (0x1EE15, 'M', 'ت'), + (0x1EE16, 'M', 'ث'), + (0x1EE17, 'M', 'خ'), + (0x1EE18, 'M', 'ذ'), + (0x1EE19, 'M', 'ض'), + (0x1EE1A, 'M', 'ظ'), + (0x1EE1B, 'M', 'غ'), + (0x1EE1C, 'M', 'ٮ'), + (0x1EE1D, 'M', 'ں'), + (0x1EE1E, 'M', 'ڡ'), + (0x1EE1F, 'M', 'ٯ'), + (0x1EE20, 'X'), + (0x1EE21, 'M', 'ب'), + (0x1EE22, 'M', 'ج'), + (0x1EE23, 'X'), + (0x1EE24, 'M', 'ه'), + (0x1EE25, 'X'), + (0x1EE27, 'M', 'ح'), + (0x1EE28, 'X'), + (0x1EE29, 'M', 'ي'), + (0x1EE2A, 'M', 'ك'), + (0x1EE2B, 'M', 'ل'), + (0x1EE2C, 'M', 'م'), + (0x1EE2D, 'M', 'ن'), + (0x1EE2E, 'M', 'س'), + (0x1EE2F, 'M', 'ع'), + (0x1EE30, 'M', 'ف'), + (0x1EE31, 'M', 'ص'), + (0x1EE32, 'M', 'ق'), + (0x1EE33, 'X'), + (0x1EE34, 'M', 'ش'), + (0x1EE35, 'M', 'ت'), + (0x1EE36, 'M', 'ث'), + (0x1EE37, 'M', 'خ'), + (0x1EE38, 'X'), + (0x1EE39, 'M', 'ض'), + (0x1EE3A, 'X'), + (0x1EE3B, 'M', 'غ'), + (0x1EE3C, 'X'), + (0x1EE42, 'M', 'ج'), + (0x1EE43, 'X'), + (0x1EE47, 'M', 'ح'), + (0x1EE48, 'X'), + (0x1EE49, 'M', 'ي'), + (0x1EE4A, 'X'), + (0x1EE4B, 'M', 'ل'), + (0x1EE4C, 'X'), + (0x1EE4D, 'M', 'ن'), + (0x1EE4E, 'M', 'س'), + (0x1EE4F, 'M', 'ع'), + (0x1EE50, 'X'), + (0x1EE51, 'M', 'ص'), + (0x1EE52, 'M', 'ق'), + (0x1EE53, 'X'), + (0x1EE54, 'M', 'ش'), + (0x1EE55, 'X'), + (0x1EE57, 'M', 'خ'), + (0x1EE58, 'X'), + (0x1EE59, 'M', 'ض'), + (0x1EE5A, 'X'), + (0x1EE5B, 'M', 'غ'), + (0x1EE5C, 'X'), + (0x1EE5D, 'M', 'ں'), + (0x1EE5E, 'X'), + (0x1EE5F, 'M', 'ٯ'), + (0x1EE60, 'X'), + (0x1EE61, 'M', 'ب'), + (0x1EE62, 'M', 'ج'), + (0x1EE63, 'X'), + (0x1EE64, 'M', 'ه'), + (0x1EE65, 'X'), + (0x1EE67, 'M', 'ح'), + (0x1EE68, 'M', 'ط'), + (0x1EE69, 'M', 'ي'), + (0x1EE6A, 'M', 'ك'), + ] + +def _seg_71(): + return [ + (0x1EE6B, 'X'), + (0x1EE6C, 'M', 'م'), + (0x1EE6D, 'M', 'ن'), + (0x1EE6E, 'M', 'س'), + (0x1EE6F, 'M', 'ع'), + (0x1EE70, 'M', 'ف'), + (0x1EE71, 'M', 'ص'), + (0x1EE72, 'M', 'ق'), + (0x1EE73, 'X'), + (0x1EE74, 'M', 'ش'), + (0x1EE75, 'M', 'ت'), + (0x1EE76, 'M', 'ث'), + (0x1EE77, 'M', 'خ'), + (0x1EE78, 'X'), + (0x1EE79, 'M', 'ض'), + (0x1EE7A, 'M', 'ظ'), + (0x1EE7B, 'M', 'غ'), + (0x1EE7C, 'M', 'ٮ'), + (0x1EE7D, 'X'), + (0x1EE7E, 'M', 'ڡ'), + (0x1EE7F, 'X'), + (0x1EE80, 'M', 'ا'), + (0x1EE81, 'M', 'ب'), + (0x1EE82, 'M', 'ج'), + (0x1EE83, 'M', 'د'), + (0x1EE84, 'M', 'ه'), + (0x1EE85, 'M', 'و'), + (0x1EE86, 'M', 'ز'), + (0x1EE87, 'M', 'ح'), + (0x1EE88, 'M', 'ط'), + (0x1EE89, 'M', 'ي'), + (0x1EE8A, 'X'), + (0x1EE8B, 'M', 'ل'), + (0x1EE8C, 'M', 'م'), + (0x1EE8D, 'M', 'ن'), + (0x1EE8E, 'M', 'س'), + (0x1EE8F, 'M', 'ع'), + (0x1EE90, 'M', 'ف'), + (0x1EE91, 'M', 'ص'), + (0x1EE92, 'M', 'ق'), + (0x1EE93, 'M', 'ر'), + (0x1EE94, 'M', 'ش'), + (0x1EE95, 'M', 'ت'), + (0x1EE96, 'M', 'ث'), + (0x1EE97, 'M', 'خ'), + (0x1EE98, 'M', 'ذ'), + (0x1EE99, 'M', 'ض'), + (0x1EE9A, 'M', 'ظ'), + (0x1EE9B, 'M', 'غ'), + (0x1EE9C, 'X'), + (0x1EEA1, 'M', 'ب'), + (0x1EEA2, 'M', 'ج'), + (0x1EEA3, 'M', 'د'), + (0x1EEA4, 'X'), + (0x1EEA5, 'M', 'و'), + (0x1EEA6, 'M', 'ز'), + (0x1EEA7, 'M', 'ح'), + (0x1EEA8, 'M', 'ط'), + (0x1EEA9, 'M', 'ي'), + (0x1EEAA, 'X'), + (0x1EEAB, 'M', 'ل'), + (0x1EEAC, 'M', 'م'), + (0x1EEAD, 'M', 'ن'), + (0x1EEAE, 'M', 'س'), + (0x1EEAF, 'M', 'ع'), + (0x1EEB0, 'M', 'ف'), + (0x1EEB1, 'M', 'ص'), + (0x1EEB2, 'M', 'ق'), + (0x1EEB3, 'M', 'ر'), + (0x1EEB4, 'M', 'ش'), + (0x1EEB5, 'M', 'ت'), + (0x1EEB6, 'M', 'ث'), + (0x1EEB7, 'M', 'خ'), + (0x1EEB8, 'M', 'ذ'), + (0x1EEB9, 'M', 'ض'), + (0x1EEBA, 'M', 'ظ'), + (0x1EEBB, 'M', 'غ'), + (0x1EEBC, 'X'), + (0x1EEF0, 'V'), + (0x1EEF2, 'X'), + (0x1F000, 'V'), + (0x1F02C, 'X'), + (0x1F030, 'V'), + (0x1F094, 'X'), + (0x1F0A0, 'V'), + (0x1F0AF, 'X'), + (0x1F0B1, 'V'), + (0x1F0C0, 'X'), + (0x1F0C1, 'V'), + (0x1F0D0, 'X'), + (0x1F0D1, 'V'), + (0x1F0F6, 'X'), + (0x1F101, '3', '0,'), + (0x1F102, '3', '1,'), + (0x1F103, '3', '2,'), + (0x1F104, '3', '3,'), + (0x1F105, '3', '4,'), + (0x1F106, '3', '5,'), + (0x1F107, '3', '6,'), + (0x1F108, '3', '7,'), + ] + +def _seg_72(): + return [ + (0x1F109, '3', '8,'), + (0x1F10A, '3', '9,'), + (0x1F10B, 'V'), + (0x1F110, '3', '(a)'), + (0x1F111, '3', '(b)'), + (0x1F112, '3', '(c)'), + (0x1F113, '3', '(d)'), + (0x1F114, '3', '(e)'), + (0x1F115, '3', '(f)'), + (0x1F116, '3', '(g)'), + (0x1F117, '3', '(h)'), + (0x1F118, '3', '(i)'), + (0x1F119, '3', '(j)'), + (0x1F11A, '3', '(k)'), + (0x1F11B, '3', '(l)'), + (0x1F11C, '3', '(m)'), + (0x1F11D, '3', '(n)'), + (0x1F11E, '3', '(o)'), + (0x1F11F, '3', '(p)'), + (0x1F120, '3', '(q)'), + (0x1F121, '3', '(r)'), + (0x1F122, '3', '(s)'), + (0x1F123, '3', '(t)'), + (0x1F124, '3', '(u)'), + (0x1F125, '3', '(v)'), + (0x1F126, '3', '(w)'), + (0x1F127, '3', '(x)'), + (0x1F128, '3', '(y)'), + (0x1F129, '3', '(z)'), + (0x1F12A, 'M', '〔s〕'), + (0x1F12B, 'M', 'c'), + (0x1F12C, 'M', 'r'), + (0x1F12D, 'M', 'cd'), + (0x1F12E, 'M', 'wz'), + (0x1F12F, 'V'), + (0x1F130, 'M', 'a'), + (0x1F131, 'M', 'b'), + (0x1F132, 'M', 'c'), + (0x1F133, 'M', 'd'), + (0x1F134, 'M', 'e'), + (0x1F135, 'M', 'f'), + (0x1F136, 'M', 'g'), + (0x1F137, 'M', 'h'), + (0x1F138, 'M', 'i'), + (0x1F139, 'M', 'j'), + (0x1F13A, 'M', 'k'), + (0x1F13B, 'M', 'l'), + (0x1F13C, 'M', 'm'), + (0x1F13D, 'M', 'n'), + (0x1F13E, 'M', 'o'), + (0x1F13F, 'M', 'p'), + (0x1F140, 'M', 'q'), + (0x1F141, 'M', 'r'), + (0x1F142, 'M', 's'), + (0x1F143, 'M', 't'), + (0x1F144, 'M', 'u'), + (0x1F145, 'M', 'v'), + (0x1F146, 'M', 'w'), + (0x1F147, 'M', 'x'), + (0x1F148, 'M', 'y'), + (0x1F149, 'M', 'z'), + (0x1F14A, 'M', 'hv'), + (0x1F14B, 'M', 'mv'), + (0x1F14C, 'M', 'sd'), + (0x1F14D, 'M', 'ss'), + (0x1F14E, 'M', 'ppv'), + (0x1F14F, 'M', 'wc'), + (0x1F150, 'V'), + (0x1F16A, 'M', 'mc'), + (0x1F16B, 'M', 'md'), + (0x1F16C, 'M', 'mr'), + (0x1F16D, 'V'), + (0x1F190, 'M', 'dj'), + (0x1F191, 'V'), + (0x1F1AE, 'X'), + (0x1F1E6, 'V'), + (0x1F200, 'M', 'ほか'), + (0x1F201, 'M', 'ココ'), + (0x1F202, 'M', 'サ'), + (0x1F203, 'X'), + (0x1F210, 'M', '手'), + (0x1F211, 'M', '字'), + (0x1F212, 'M', '双'), + (0x1F213, 'M', 'デ'), + (0x1F214, 'M', '二'), + (0x1F215, 'M', '多'), + (0x1F216, 'M', '解'), + (0x1F217, 'M', '天'), + (0x1F218, 'M', '交'), + (0x1F219, 'M', '映'), + (0x1F21A, 'M', '無'), + (0x1F21B, 'M', '料'), + (0x1F21C, 'M', '前'), + (0x1F21D, 'M', '後'), + (0x1F21E, 'M', '再'), + (0x1F21F, 'M', '新'), + (0x1F220, 'M', '初'), + (0x1F221, 'M', '終'), + (0x1F222, 'M', '生'), + (0x1F223, 'M', '販'), + ] + +def _seg_73(): + return [ + (0x1F224, 'M', '声'), + (0x1F225, 'M', '吹'), + (0x1F226, 'M', '演'), + (0x1F227, 'M', '投'), + (0x1F228, 'M', '捕'), + (0x1F229, 'M', '一'), + (0x1F22A, 'M', '三'), + (0x1F22B, 'M', '遊'), + (0x1F22C, 'M', '左'), + (0x1F22D, 'M', '中'), + (0x1F22E, 'M', '右'), + (0x1F22F, 'M', '指'), + (0x1F230, 'M', '走'), + (0x1F231, 'M', '打'), + (0x1F232, 'M', '禁'), + (0x1F233, 'M', '空'), + (0x1F234, 'M', '合'), + (0x1F235, 'M', '満'), + (0x1F236, 'M', '有'), + (0x1F237, 'M', '月'), + (0x1F238, 'M', '申'), + (0x1F239, 'M', '割'), + (0x1F23A, 'M', '営'), + (0x1F23B, 'M', '配'), + (0x1F23C, 'X'), + (0x1F240, 'M', '〔本〕'), + (0x1F241, 'M', '〔三〕'), + (0x1F242, 'M', '〔二〕'), + (0x1F243, 'M', '〔安〕'), + (0x1F244, 'M', '〔点〕'), + (0x1F245, 'M', '〔打〕'), + (0x1F246, 'M', '〔盗〕'), + (0x1F247, 'M', '〔勝〕'), + (0x1F248, 'M', '〔敗〕'), + (0x1F249, 'X'), + (0x1F250, 'M', '得'), + (0x1F251, 'M', '可'), + (0x1F252, 'X'), + (0x1F260, 'V'), + (0x1F266, 'X'), + (0x1F300, 'V'), + (0x1F6D8, 'X'), + (0x1F6E0, 'V'), + (0x1F6ED, 'X'), + (0x1F6F0, 'V'), + (0x1F6FD, 'X'), + (0x1F700, 'V'), + (0x1F774, 'X'), + (0x1F780, 'V'), + (0x1F7D9, 'X'), + (0x1F7E0, 'V'), + (0x1F7EC, 'X'), + (0x1F800, 'V'), + (0x1F80C, 'X'), + (0x1F810, 'V'), + (0x1F848, 'X'), + (0x1F850, 'V'), + (0x1F85A, 'X'), + (0x1F860, 'V'), + (0x1F888, 'X'), + (0x1F890, 'V'), + (0x1F8AE, 'X'), + (0x1F8B0, 'V'), + (0x1F8B2, 'X'), + (0x1F900, 'V'), + (0x1F979, 'X'), + (0x1F97A, 'V'), + (0x1F9CC, 'X'), + (0x1F9CD, 'V'), + (0x1FA54, 'X'), + (0x1FA60, 'V'), + (0x1FA6E, 'X'), + (0x1FA70, 'V'), + (0x1FA75, 'X'), + (0x1FA78, 'V'), + (0x1FA7B, 'X'), + (0x1FA80, 'V'), + (0x1FA87, 'X'), + (0x1FA90, 'V'), + (0x1FAA9, 'X'), + (0x1FAB0, 'V'), + (0x1FAB7, 'X'), + (0x1FAC0, 'V'), + (0x1FAC3, 'X'), + (0x1FAD0, 'V'), + (0x1FAD7, 'X'), + (0x1FB00, 'V'), + (0x1FB93, 'X'), + (0x1FB94, 'V'), + (0x1FBCB, 'X'), + (0x1FBF0, 'M', '0'), + (0x1FBF1, 'M', '1'), + (0x1FBF2, 'M', '2'), + (0x1FBF3, 'M', '3'), + (0x1FBF4, 'M', '4'), + (0x1FBF5, 'M', '5'), + (0x1FBF6, 'M', '6'), + (0x1FBF7, 'M', '7'), + (0x1FBF8, 'M', '8'), + (0x1FBF9, 'M', '9'), + ] + +def _seg_74(): + return [ + (0x1FBFA, 'X'), + (0x20000, 'V'), + (0x2A6DE, 'X'), + (0x2A700, 'V'), + (0x2B735, 'X'), + (0x2B740, 'V'), + (0x2B81E, 'X'), + (0x2B820, 'V'), + (0x2CEA2, 'X'), + (0x2CEB0, 'V'), + (0x2EBE1, 'X'), + (0x2F800, 'M', '丽'), + (0x2F801, 'M', '丸'), + (0x2F802, 'M', '乁'), + (0x2F803, 'M', '𠄢'), + (0x2F804, 'M', '你'), + (0x2F805, 'M', '侮'), + (0x2F806, 'M', '侻'), + (0x2F807, 'M', '倂'), + (0x2F808, 'M', '偺'), + (0x2F809, 'M', '備'), + (0x2F80A, 'M', '僧'), + (0x2F80B, 'M', '像'), + (0x2F80C, 'M', '㒞'), + (0x2F80D, 'M', '𠘺'), + (0x2F80E, 'M', '免'), + (0x2F80F, 'M', '兔'), + (0x2F810, 'M', '兤'), + (0x2F811, 'M', '具'), + (0x2F812, 'M', '𠔜'), + (0x2F813, 'M', '㒹'), + (0x2F814, 'M', '內'), + (0x2F815, 'M', '再'), + (0x2F816, 'M', '𠕋'), + (0x2F817, 'M', '冗'), + (0x2F818, 'M', '冤'), + (0x2F819, 'M', '仌'), + (0x2F81A, 'M', '冬'), + (0x2F81B, 'M', '况'), + (0x2F81C, 'M', '𩇟'), + (0x2F81D, 'M', '凵'), + (0x2F81E, 'M', '刃'), + (0x2F81F, 'M', '㓟'), + (0x2F820, 'M', '刻'), + (0x2F821, 'M', '剆'), + (0x2F822, 'M', '割'), + (0x2F823, 'M', '剷'), + (0x2F824, 'M', '㔕'), + (0x2F825, 'M', '勇'), + (0x2F826, 'M', '勉'), + (0x2F827, 'M', '勤'), + (0x2F828, 'M', '勺'), + (0x2F829, 'M', '包'), + (0x2F82A, 'M', '匆'), + (0x2F82B, 'M', '北'), + (0x2F82C, 'M', '卉'), + (0x2F82D, 'M', '卑'), + (0x2F82E, 'M', '博'), + (0x2F82F, 'M', '即'), + (0x2F830, 'M', '卽'), + (0x2F831, 'M', '卿'), + (0x2F834, 'M', '𠨬'), + (0x2F835, 'M', '灰'), + (0x2F836, 'M', '及'), + (0x2F837, 'M', '叟'), + (0x2F838, 'M', '𠭣'), + (0x2F839, 'M', '叫'), + (0x2F83A, 'M', '叱'), + (0x2F83B, 'M', '吆'), + (0x2F83C, 'M', '咞'), + (0x2F83D, 'M', '吸'), + (0x2F83E, 'M', '呈'), + (0x2F83F, 'M', '周'), + (0x2F840, 'M', '咢'), + (0x2F841, 'M', '哶'), + (0x2F842, 'M', '唐'), + (0x2F843, 'M', '啓'), + (0x2F844, 'M', '啣'), + (0x2F845, 'M', '善'), + (0x2F847, 'M', '喙'), + (0x2F848, 'M', '喫'), + (0x2F849, 'M', '喳'), + (0x2F84A, 'M', '嗂'), + (0x2F84B, 'M', '圖'), + (0x2F84C, 'M', '嘆'), + (0x2F84D, 'M', '圗'), + (0x2F84E, 'M', '噑'), + (0x2F84F, 'M', '噴'), + (0x2F850, 'M', '切'), + (0x2F851, 'M', '壮'), + (0x2F852, 'M', '城'), + (0x2F853, 'M', '埴'), + (0x2F854, 'M', '堍'), + (0x2F855, 'M', '型'), + (0x2F856, 'M', '堲'), + (0x2F857, 'M', '報'), + (0x2F858, 'M', '墬'), + (0x2F859, 'M', '𡓤'), + (0x2F85A, 'M', '売'), + (0x2F85B, 'M', '壷'), + ] + +def _seg_75(): + return [ + (0x2F85C, 'M', '夆'), + (0x2F85D, 'M', '多'), + (0x2F85E, 'M', '夢'), + (0x2F85F, 'M', '奢'), + (0x2F860, 'M', '𡚨'), + (0x2F861, 'M', '𡛪'), + (0x2F862, 'M', '姬'), + (0x2F863, 'M', '娛'), + (0x2F864, 'M', '娧'), + (0x2F865, 'M', '姘'), + (0x2F866, 'M', '婦'), + (0x2F867, 'M', '㛮'), + (0x2F868, 'X'), + (0x2F869, 'M', '嬈'), + (0x2F86A, 'M', '嬾'), + (0x2F86C, 'M', '𡧈'), + (0x2F86D, 'M', '寃'), + (0x2F86E, 'M', '寘'), + (0x2F86F, 'M', '寧'), + (0x2F870, 'M', '寳'), + (0x2F871, 'M', '𡬘'), + (0x2F872, 'M', '寿'), + (0x2F873, 'M', '将'), + (0x2F874, 'X'), + (0x2F875, 'M', '尢'), + (0x2F876, 'M', '㞁'), + (0x2F877, 'M', '屠'), + (0x2F878, 'M', '屮'), + (0x2F879, 'M', '峀'), + (0x2F87A, 'M', '岍'), + (0x2F87B, 'M', '𡷤'), + (0x2F87C, 'M', '嵃'), + (0x2F87D, 'M', '𡷦'), + (0x2F87E, 'M', '嵮'), + (0x2F87F, 'M', '嵫'), + (0x2F880, 'M', '嵼'), + (0x2F881, 'M', '巡'), + (0x2F882, 'M', '巢'), + (0x2F883, 'M', '㠯'), + (0x2F884, 'M', '巽'), + (0x2F885, 'M', '帨'), + (0x2F886, 'M', '帽'), + (0x2F887, 'M', '幩'), + (0x2F888, 'M', '㡢'), + (0x2F889, 'M', '𢆃'), + (0x2F88A, 'M', '㡼'), + (0x2F88B, 'M', '庰'), + (0x2F88C, 'M', '庳'), + (0x2F88D, 'M', '庶'), + (0x2F88E, 'M', '廊'), + (0x2F88F, 'M', '𪎒'), + (0x2F890, 'M', '廾'), + (0x2F891, 'M', '𢌱'), + (0x2F893, 'M', '舁'), + (0x2F894, 'M', '弢'), + (0x2F896, 'M', '㣇'), + (0x2F897, 'M', '𣊸'), + (0x2F898, 'M', '𦇚'), + (0x2F899, 'M', '形'), + (0x2F89A, 'M', '彫'), + (0x2F89B, 'M', '㣣'), + (0x2F89C, 'M', '徚'), + (0x2F89D, 'M', '忍'), + (0x2F89E, 'M', '志'), + (0x2F89F, 'M', '忹'), + (0x2F8A0, 'M', '悁'), + (0x2F8A1, 'M', '㤺'), + (0x2F8A2, 'M', '㤜'), + (0x2F8A3, 'M', '悔'), + (0x2F8A4, 'M', '𢛔'), + (0x2F8A5, 'M', '惇'), + (0x2F8A6, 'M', '慈'), + (0x2F8A7, 'M', '慌'), + (0x2F8A8, 'M', '慎'), + (0x2F8A9, 'M', '慌'), + (0x2F8AA, 'M', '慺'), + (0x2F8AB, 'M', '憎'), + (0x2F8AC, 'M', '憲'), + (0x2F8AD, 'M', '憤'), + (0x2F8AE, 'M', '憯'), + (0x2F8AF, 'M', '懞'), + (0x2F8B0, 'M', '懲'), + (0x2F8B1, 'M', '懶'), + (0x2F8B2, 'M', '成'), + (0x2F8B3, 'M', '戛'), + (0x2F8B4, 'M', '扝'), + (0x2F8B5, 'M', '抱'), + (0x2F8B6, 'M', '拔'), + (0x2F8B7, 'M', '捐'), + (0x2F8B8, 'M', '𢬌'), + (0x2F8B9, 'M', '挽'), + (0x2F8BA, 'M', '拼'), + (0x2F8BB, 'M', '捨'), + (0x2F8BC, 'M', '掃'), + (0x2F8BD, 'M', '揤'), + (0x2F8BE, 'M', '𢯱'), + (0x2F8BF, 'M', '搢'), + (0x2F8C0, 'M', '揅'), + (0x2F8C1, 'M', '掩'), + (0x2F8C2, 'M', '㨮'), + ] + +def _seg_76(): + return [ + (0x2F8C3, 'M', '摩'), + (0x2F8C4, 'M', '摾'), + (0x2F8C5, 'M', '撝'), + (0x2F8C6, 'M', '摷'), + (0x2F8C7, 'M', '㩬'), + (0x2F8C8, 'M', '敏'), + (0x2F8C9, 'M', '敬'), + (0x2F8CA, 'M', '𣀊'), + (0x2F8CB, 'M', '旣'), + (0x2F8CC, 'M', '書'), + (0x2F8CD, 'M', '晉'), + (0x2F8CE, 'M', '㬙'), + (0x2F8CF, 'M', '暑'), + (0x2F8D0, 'M', '㬈'), + (0x2F8D1, 'M', '㫤'), + (0x2F8D2, 'M', '冒'), + (0x2F8D3, 'M', '冕'), + (0x2F8D4, 'M', '最'), + (0x2F8D5, 'M', '暜'), + (0x2F8D6, 'M', '肭'), + (0x2F8D7, 'M', '䏙'), + (0x2F8D8, 'M', '朗'), + (0x2F8D9, 'M', '望'), + (0x2F8DA, 'M', '朡'), + (0x2F8DB, 'M', '杞'), + (0x2F8DC, 'M', '杓'), + (0x2F8DD, 'M', '𣏃'), + (0x2F8DE, 'M', '㭉'), + (0x2F8DF, 'M', '柺'), + (0x2F8E0, 'M', '枅'), + (0x2F8E1, 'M', '桒'), + (0x2F8E2, 'M', '梅'), + (0x2F8E3, 'M', '𣑭'), + (0x2F8E4, 'M', '梎'), + (0x2F8E5, 'M', '栟'), + (0x2F8E6, 'M', '椔'), + (0x2F8E7, 'M', '㮝'), + (0x2F8E8, 'M', '楂'), + (0x2F8E9, 'M', '榣'), + (0x2F8EA, 'M', '槪'), + (0x2F8EB, 'M', '檨'), + (0x2F8EC, 'M', '𣚣'), + (0x2F8ED, 'M', '櫛'), + (0x2F8EE, 'M', '㰘'), + (0x2F8EF, 'M', '次'), + (0x2F8F0, 'M', '𣢧'), + (0x2F8F1, 'M', '歔'), + (0x2F8F2, 'M', '㱎'), + (0x2F8F3, 'M', '歲'), + (0x2F8F4, 'M', '殟'), + (0x2F8F5, 'M', '殺'), + (0x2F8F6, 'M', '殻'), + (0x2F8F7, 'M', '𣪍'), + (0x2F8F8, 'M', '𡴋'), + (0x2F8F9, 'M', '𣫺'), + (0x2F8FA, 'M', '汎'), + (0x2F8FB, 'M', '𣲼'), + (0x2F8FC, 'M', '沿'), + (0x2F8FD, 'M', '泍'), + (0x2F8FE, 'M', '汧'), + (0x2F8FF, 'M', '洖'), + (0x2F900, 'M', '派'), + (0x2F901, 'M', '海'), + (0x2F902, 'M', '流'), + (0x2F903, 'M', '浩'), + (0x2F904, 'M', '浸'), + (0x2F905, 'M', '涅'), + (0x2F906, 'M', '𣴞'), + (0x2F907, 'M', '洴'), + (0x2F908, 'M', '港'), + (0x2F909, 'M', '湮'), + (0x2F90A, 'M', '㴳'), + (0x2F90B, 'M', '滋'), + (0x2F90C, 'M', '滇'), + (0x2F90D, 'M', '𣻑'), + (0x2F90E, 'M', '淹'), + (0x2F90F, 'M', '潮'), + (0x2F910, 'M', '𣽞'), + (0x2F911, 'M', '𣾎'), + (0x2F912, 'M', '濆'), + (0x2F913, 'M', '瀹'), + (0x2F914, 'M', '瀞'), + (0x2F915, 'M', '瀛'), + (0x2F916, 'M', '㶖'), + (0x2F917, 'M', '灊'), + (0x2F918, 'M', '災'), + (0x2F919, 'M', '灷'), + (0x2F91A, 'M', '炭'), + (0x2F91B, 'M', '𠔥'), + (0x2F91C, 'M', '煅'), + (0x2F91D, 'M', '𤉣'), + (0x2F91E, 'M', '熜'), + (0x2F91F, 'X'), + (0x2F920, 'M', '爨'), + (0x2F921, 'M', '爵'), + (0x2F922, 'M', '牐'), + (0x2F923, 'M', '𤘈'), + (0x2F924, 'M', '犀'), + (0x2F925, 'M', '犕'), + (0x2F926, 'M', '𤜵'), + ] + +def _seg_77(): + return [ + (0x2F927, 'M', '𤠔'), + (0x2F928, 'M', '獺'), + (0x2F929, 'M', '王'), + (0x2F92A, 'M', '㺬'), + (0x2F92B, 'M', '玥'), + (0x2F92C, 'M', '㺸'), + (0x2F92E, 'M', '瑇'), + (0x2F92F, 'M', '瑜'), + (0x2F930, 'M', '瑱'), + (0x2F931, 'M', '璅'), + (0x2F932, 'M', '瓊'), + (0x2F933, 'M', '㼛'), + (0x2F934, 'M', '甤'), + (0x2F935, 'M', '𤰶'), + (0x2F936, 'M', '甾'), + (0x2F937, 'M', '𤲒'), + (0x2F938, 'M', '異'), + (0x2F939, 'M', '𢆟'), + (0x2F93A, 'M', '瘐'), + (0x2F93B, 'M', '𤾡'), + (0x2F93C, 'M', '𤾸'), + (0x2F93D, 'M', '𥁄'), + (0x2F93E, 'M', '㿼'), + (0x2F93F, 'M', '䀈'), + (0x2F940, 'M', '直'), + (0x2F941, 'M', '𥃳'), + (0x2F942, 'M', '𥃲'), + (0x2F943, 'M', '𥄙'), + (0x2F944, 'M', '𥄳'), + (0x2F945, 'M', '眞'), + (0x2F946, 'M', '真'), + (0x2F948, 'M', '睊'), + (0x2F949, 'M', '䀹'), + (0x2F94A, 'M', '瞋'), + (0x2F94B, 'M', '䁆'), + (0x2F94C, 'M', '䂖'), + (0x2F94D, 'M', '𥐝'), + (0x2F94E, 'M', '硎'), + (0x2F94F, 'M', '碌'), + (0x2F950, 'M', '磌'), + (0x2F951, 'M', '䃣'), + (0x2F952, 'M', '𥘦'), + (0x2F953, 'M', '祖'), + (0x2F954, 'M', '𥚚'), + (0x2F955, 'M', '𥛅'), + (0x2F956, 'M', '福'), + (0x2F957, 'M', '秫'), + (0x2F958, 'M', '䄯'), + (0x2F959, 'M', '穀'), + (0x2F95A, 'M', '穊'), + (0x2F95B, 'M', '穏'), + (0x2F95C, 'M', '𥥼'), + (0x2F95D, 'M', '𥪧'), + (0x2F95F, 'X'), + (0x2F960, 'M', '䈂'), + (0x2F961, 'M', '𥮫'), + (0x2F962, 'M', '篆'), + (0x2F963, 'M', '築'), + (0x2F964, 'M', '䈧'), + (0x2F965, 'M', '𥲀'), + (0x2F966, 'M', '糒'), + (0x2F967, 'M', '䊠'), + (0x2F968, 'M', '糨'), + (0x2F969, 'M', '糣'), + (0x2F96A, 'M', '紀'), + (0x2F96B, 'M', '𥾆'), + (0x2F96C, 'M', '絣'), + (0x2F96D, 'M', '䌁'), + (0x2F96E, 'M', '緇'), + (0x2F96F, 'M', '縂'), + (0x2F970, 'M', '繅'), + (0x2F971, 'M', '䌴'), + (0x2F972, 'M', '𦈨'), + (0x2F973, 'M', '𦉇'), + (0x2F974, 'M', '䍙'), + (0x2F975, 'M', '𦋙'), + (0x2F976, 'M', '罺'), + (0x2F977, 'M', '𦌾'), + (0x2F978, 'M', '羕'), + (0x2F979, 'M', '翺'), + (0x2F97A, 'M', '者'), + (0x2F97B, 'M', '𦓚'), + (0x2F97C, 'M', '𦔣'), + (0x2F97D, 'M', '聠'), + (0x2F97E, 'M', '𦖨'), + (0x2F97F, 'M', '聰'), + (0x2F980, 'M', '𣍟'), + (0x2F981, 'M', '䏕'), + (0x2F982, 'M', '育'), + (0x2F983, 'M', '脃'), + (0x2F984, 'M', '䐋'), + (0x2F985, 'M', '脾'), + (0x2F986, 'M', '媵'), + (0x2F987, 'M', '𦞧'), + (0x2F988, 'M', '𦞵'), + (0x2F989, 'M', '𣎓'), + (0x2F98A, 'M', '𣎜'), + (0x2F98B, 'M', '舁'), + (0x2F98C, 'M', '舄'), + (0x2F98D, 'M', '辞'), + ] + +def _seg_78(): + return [ + (0x2F98E, 'M', '䑫'), + (0x2F98F, 'M', '芑'), + (0x2F990, 'M', '芋'), + (0x2F991, 'M', '芝'), + (0x2F992, 'M', '劳'), + (0x2F993, 'M', '花'), + (0x2F994, 'M', '芳'), + (0x2F995, 'M', '芽'), + (0x2F996, 'M', '苦'), + (0x2F997, 'M', '𦬼'), + (0x2F998, 'M', '若'), + (0x2F999, 'M', '茝'), + (0x2F99A, 'M', '荣'), + (0x2F99B, 'M', '莭'), + (0x2F99C, 'M', '茣'), + (0x2F99D, 'M', '莽'), + (0x2F99E, 'M', '菧'), + (0x2F99F, 'M', '著'), + (0x2F9A0, 'M', '荓'), + (0x2F9A1, 'M', '菊'), + (0x2F9A2, 'M', '菌'), + (0x2F9A3, 'M', '菜'), + (0x2F9A4, 'M', '𦰶'), + (0x2F9A5, 'M', '𦵫'), + (0x2F9A6, 'M', '𦳕'), + (0x2F9A7, 'M', '䔫'), + (0x2F9A8, 'M', '蓱'), + (0x2F9A9, 'M', '蓳'), + (0x2F9AA, 'M', '蔖'), + (0x2F9AB, 'M', '𧏊'), + (0x2F9AC, 'M', '蕤'), + (0x2F9AD, 'M', '𦼬'), + (0x2F9AE, 'M', '䕝'), + (0x2F9AF, 'M', '䕡'), + (0x2F9B0, 'M', '𦾱'), + (0x2F9B1, 'M', '𧃒'), + (0x2F9B2, 'M', '䕫'), + (0x2F9B3, 'M', '虐'), + (0x2F9B4, 'M', '虜'), + (0x2F9B5, 'M', '虧'), + (0x2F9B6, 'M', '虩'), + (0x2F9B7, 'M', '蚩'), + (0x2F9B8, 'M', '蚈'), + (0x2F9B9, 'M', '蜎'), + (0x2F9BA, 'M', '蛢'), + (0x2F9BB, 'M', '蝹'), + (0x2F9BC, 'M', '蜨'), + (0x2F9BD, 'M', '蝫'), + (0x2F9BE, 'M', '螆'), + (0x2F9BF, 'X'), + (0x2F9C0, 'M', '蟡'), + (0x2F9C1, 'M', '蠁'), + (0x2F9C2, 'M', '䗹'), + (0x2F9C3, 'M', '衠'), + (0x2F9C4, 'M', '衣'), + (0x2F9C5, 'M', '𧙧'), + (0x2F9C6, 'M', '裗'), + (0x2F9C7, 'M', '裞'), + (0x2F9C8, 'M', '䘵'), + (0x2F9C9, 'M', '裺'), + (0x2F9CA, 'M', '㒻'), + (0x2F9CB, 'M', '𧢮'), + (0x2F9CC, 'M', '𧥦'), + (0x2F9CD, 'M', '䚾'), + (0x2F9CE, 'M', '䛇'), + (0x2F9CF, 'M', '誠'), + (0x2F9D0, 'M', '諭'), + (0x2F9D1, 'M', '變'), + (0x2F9D2, 'M', '豕'), + (0x2F9D3, 'M', '𧲨'), + (0x2F9D4, 'M', '貫'), + (0x2F9D5, 'M', '賁'), + (0x2F9D6, 'M', '贛'), + (0x2F9D7, 'M', '起'), + (0x2F9D8, 'M', '𧼯'), + (0x2F9D9, 'M', '𠠄'), + (0x2F9DA, 'M', '跋'), + (0x2F9DB, 'M', '趼'), + (0x2F9DC, 'M', '跰'), + (0x2F9DD, 'M', '𠣞'), + (0x2F9DE, 'M', '軔'), + (0x2F9DF, 'M', '輸'), + (0x2F9E0, 'M', '𨗒'), + (0x2F9E1, 'M', '𨗭'), + (0x2F9E2, 'M', '邔'), + (0x2F9E3, 'M', '郱'), + (0x2F9E4, 'M', '鄑'), + (0x2F9E5, 'M', '𨜮'), + (0x2F9E6, 'M', '鄛'), + (0x2F9E7, 'M', '鈸'), + (0x2F9E8, 'M', '鋗'), + (0x2F9E9, 'M', '鋘'), + (0x2F9EA, 'M', '鉼'), + (0x2F9EB, 'M', '鏹'), + (0x2F9EC, 'M', '鐕'), + (0x2F9ED, 'M', '𨯺'), + (0x2F9EE, 'M', '開'), + (0x2F9EF, 'M', '䦕'), + (0x2F9F0, 'M', '閷'), + (0x2F9F1, 'M', '𨵷'), + ] + +def _seg_79(): + return [ + (0x2F9F2, 'M', '䧦'), + (0x2F9F3, 'M', '雃'), + (0x2F9F4, 'M', '嶲'), + (0x2F9F5, 'M', '霣'), + (0x2F9F6, 'M', '𩅅'), + (0x2F9F7, 'M', '𩈚'), + (0x2F9F8, 'M', '䩮'), + (0x2F9F9, 'M', '䩶'), + (0x2F9FA, 'M', '韠'), + (0x2F9FB, 'M', '𩐊'), + (0x2F9FC, 'M', '䪲'), + (0x2F9FD, 'M', '𩒖'), + (0x2F9FE, 'M', '頋'), + (0x2FA00, 'M', '頩'), + (0x2FA01, 'M', '𩖶'), + (0x2FA02, 'M', '飢'), + (0x2FA03, 'M', '䬳'), + (0x2FA04, 'M', '餩'), + (0x2FA05, 'M', '馧'), + (0x2FA06, 'M', '駂'), + (0x2FA07, 'M', '駾'), + (0x2FA08, 'M', '䯎'), + (0x2FA09, 'M', '𩬰'), + (0x2FA0A, 'M', '鬒'), + (0x2FA0B, 'M', '鱀'), + (0x2FA0C, 'M', '鳽'), + (0x2FA0D, 'M', '䳎'), + (0x2FA0E, 'M', '䳭'), + (0x2FA0F, 'M', '鵧'), + (0x2FA10, 'M', '𪃎'), + (0x2FA11, 'M', '䳸'), + (0x2FA12, 'M', '𪄅'), + (0x2FA13, 'M', '𪈎'), + (0x2FA14, 'M', '𪊑'), + (0x2FA15, 'M', '麻'), + (0x2FA16, 'M', '䵖'), + (0x2FA17, 'M', '黹'), + (0x2FA18, 'M', '黾'), + (0x2FA19, 'M', '鼅'), + (0x2FA1A, 'M', '鼏'), + (0x2FA1B, 'M', '鼖'), + (0x2FA1C, 'M', '鼻'), + (0x2FA1D, 'M', '𪘀'), + (0x2FA1E, 'X'), + (0x30000, 'V'), + (0x3134B, 'X'), + (0xE0100, 'I'), + (0xE01F0, 'X'), + ] + +uts46data = tuple( + _seg_0() + + _seg_1() + + _seg_2() + + _seg_3() + + _seg_4() + + _seg_5() + + _seg_6() + + _seg_7() + + _seg_8() + + _seg_9() + + _seg_10() + + _seg_11() + + _seg_12() + + _seg_13() + + _seg_14() + + _seg_15() + + _seg_16() + + _seg_17() + + _seg_18() + + _seg_19() + + _seg_20() + + _seg_21() + + _seg_22() + + _seg_23() + + _seg_24() + + _seg_25() + + _seg_26() + + _seg_27() + + _seg_28() + + _seg_29() + + _seg_30() + + _seg_31() + + _seg_32() + + _seg_33() + + _seg_34() + + _seg_35() + + _seg_36() + + _seg_37() + + _seg_38() + + _seg_39() + + _seg_40() + + _seg_41() + + _seg_42() + + _seg_43() + + _seg_44() + + _seg_45() + + _seg_46() + + _seg_47() + + _seg_48() + + _seg_49() + + _seg_50() + + _seg_51() + + _seg_52() + + _seg_53() + + _seg_54() + + _seg_55() + + _seg_56() + + _seg_57() + + _seg_58() + + _seg_59() + + _seg_60() + + _seg_61() + + _seg_62() + + _seg_63() + + _seg_64() + + _seg_65() + + _seg_66() + + _seg_67() + + _seg_68() + + _seg_69() + + _seg_70() + + _seg_71() + + _seg_72() + + _seg_73() + + _seg_74() + + _seg_75() + + _seg_76() + + _seg_77() + + _seg_78() + + _seg_79() +) diff --git a/venv/Lib/site-packages/pip/_vendor/msgpack/__init__.py b/venv/Lib/site-packages/pip/_vendor/msgpack/__init__.py new file mode 100644 index 00000000..d6705e22 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/msgpack/__init__.py @@ -0,0 +1,54 @@ +# coding: utf-8 +from ._version import version +from .exceptions import * +from .ext import ExtType, Timestamp + +import os +import sys + + +if os.environ.get("MSGPACK_PUREPYTHON") or sys.version_info[0] == 2: + from .fallback import Packer, unpackb, Unpacker +else: + try: + from ._cmsgpack import Packer, unpackb, Unpacker + except ImportError: + from .fallback import Packer, unpackb, Unpacker + + +def pack(o, stream, **kwargs): + """ + Pack object `o` and write it to `stream` + + See :class:`Packer` for options. + """ + packer = Packer(**kwargs) + stream.write(packer.pack(o)) + + +def packb(o, **kwargs): + """ + Pack object `o` and return packed bytes + + See :class:`Packer` for options. + """ + return Packer(**kwargs).pack(o) + + +def unpack(stream, **kwargs): + """ + Unpack an object from `stream`. + + Raises `ExtraData` when `stream` contains extra bytes. + See :class:`Unpacker` for options. + """ + data = stream.read() + return unpackb(data, **kwargs) + + +# alias for compatibility to simplejson/marshal/pickle. +load = unpack +loads = unpackb + +dump = pack +dumps = packb diff --git a/venv/Lib/site-packages/pip/_vendor/msgpack/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/msgpack/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..f466434a Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/msgpack/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/msgpack/__pycache__/_version.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/msgpack/__pycache__/_version.cpython-36.pyc new file mode 100644 index 00000000..451fc27c Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/msgpack/__pycache__/_version.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/msgpack/__pycache__/exceptions.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/msgpack/__pycache__/exceptions.cpython-36.pyc new file mode 100644 index 00000000..39262825 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/msgpack/__pycache__/exceptions.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/msgpack/__pycache__/ext.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/msgpack/__pycache__/ext.cpython-36.pyc new file mode 100644 index 00000000..b383f713 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/msgpack/__pycache__/ext.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/msgpack/__pycache__/fallback.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/msgpack/__pycache__/fallback.cpython-36.pyc new file mode 100644 index 00000000..25eb4f14 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/msgpack/__pycache__/fallback.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/msgpack/_version.py b/venv/Lib/site-packages/pip/_vendor/msgpack/_version.py new file mode 100644 index 00000000..1c83c8ed --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/msgpack/_version.py @@ -0,0 +1 @@ +version = (1, 0, 2) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/msgpack/exceptions.py b/venv/Lib/site-packages/pip/_vendor/msgpack/exceptions.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/msgpack/exceptions.py rename to venv/Lib/site-packages/pip/_vendor/msgpack/exceptions.py diff --git a/venv/Lib/site-packages/pip/_vendor/msgpack/ext.py b/venv/Lib/site-packages/pip/_vendor/msgpack/ext.py new file mode 100644 index 00000000..4eb9dd65 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/msgpack/ext.py @@ -0,0 +1,193 @@ +# coding: utf-8 +from collections import namedtuple +import datetime +import sys +import struct + + +PY2 = sys.version_info[0] == 2 + +if PY2: + int_types = (int, long) + _utc = None +else: + int_types = int + try: + _utc = datetime.timezone.utc + except AttributeError: + _utc = datetime.timezone(datetime.timedelta(0)) + + +class ExtType(namedtuple("ExtType", "code data")): + """ExtType represents ext type in msgpack.""" + + def __new__(cls, code, data): + if not isinstance(code, int): + raise TypeError("code must be int") + if not isinstance(data, bytes): + raise TypeError("data must be bytes") + if not 0 <= code <= 127: + raise ValueError("code must be 0~127") + return super(ExtType, cls).__new__(cls, code, data) + + +class Timestamp(object): + """Timestamp represents the Timestamp extension type in msgpack. + + When built with Cython, msgpack uses C methods to pack and unpack `Timestamp`. When using pure-Python + msgpack, :func:`to_bytes` and :func:`from_bytes` are used to pack and unpack `Timestamp`. + + This class is immutable: Do not override seconds and nanoseconds. + """ + + __slots__ = ["seconds", "nanoseconds"] + + def __init__(self, seconds, nanoseconds=0): + """Initialize a Timestamp object. + + :param int seconds: + Number of seconds since the UNIX epoch (00:00:00 UTC Jan 1 1970, minus leap seconds). + May be negative. + + :param int nanoseconds: + Number of nanoseconds to add to `seconds` to get fractional time. + Maximum is 999_999_999. Default is 0. + + Note: Negative times (before the UNIX epoch) are represented as negative seconds + positive ns. + """ + if not isinstance(seconds, int_types): + raise TypeError("seconds must be an interger") + if not isinstance(nanoseconds, int_types): + raise TypeError("nanoseconds must be an integer") + if not (0 <= nanoseconds < 10 ** 9): + raise ValueError( + "nanoseconds must be a non-negative integer less than 999999999." + ) + self.seconds = seconds + self.nanoseconds = nanoseconds + + def __repr__(self): + """String representation of Timestamp.""" + return "Timestamp(seconds={0}, nanoseconds={1})".format( + self.seconds, self.nanoseconds + ) + + def __eq__(self, other): + """Check for equality with another Timestamp object""" + if type(other) is self.__class__: + return ( + self.seconds == other.seconds and self.nanoseconds == other.nanoseconds + ) + return False + + def __ne__(self, other): + """not-equals method (see :func:`__eq__()`)""" + return not self.__eq__(other) + + def __hash__(self): + return hash((self.seconds, self.nanoseconds)) + + @staticmethod + def from_bytes(b): + """Unpack bytes into a `Timestamp` object. + + Used for pure-Python msgpack unpacking. + + :param b: Payload from msgpack ext message with code -1 + :type b: bytes + + :returns: Timestamp object unpacked from msgpack ext payload + :rtype: Timestamp + """ + if len(b) == 4: + seconds = struct.unpack("!L", b)[0] + nanoseconds = 0 + elif len(b) == 8: + data64 = struct.unpack("!Q", b)[0] + seconds = data64 & 0x00000003FFFFFFFF + nanoseconds = data64 >> 34 + elif len(b) == 12: + nanoseconds, seconds = struct.unpack("!Iq", b) + else: + raise ValueError( + "Timestamp type can only be created from 32, 64, or 96-bit byte objects" + ) + return Timestamp(seconds, nanoseconds) + + def to_bytes(self): + """Pack this Timestamp object into bytes. + + Used for pure-Python msgpack packing. + + :returns data: Payload for EXT message with code -1 (timestamp type) + :rtype: bytes + """ + if (self.seconds >> 34) == 0: # seconds is non-negative and fits in 34 bits + data64 = self.nanoseconds << 34 | self.seconds + if data64 & 0xFFFFFFFF00000000 == 0: + # nanoseconds is zero and seconds < 2**32, so timestamp 32 + data = struct.pack("!L", data64) + else: + # timestamp 64 + data = struct.pack("!Q", data64) + else: + # timestamp 96 + data = struct.pack("!Iq", self.nanoseconds, self.seconds) + return data + + @staticmethod + def from_unix(unix_sec): + """Create a Timestamp from posix timestamp in seconds. + + :param unix_float: Posix timestamp in seconds. + :type unix_float: int or float. + """ + seconds = int(unix_sec // 1) + nanoseconds = int((unix_sec % 1) * 10 ** 9) + return Timestamp(seconds, nanoseconds) + + def to_unix(self): + """Get the timestamp as a floating-point value. + + :returns: posix timestamp + :rtype: float + """ + return self.seconds + self.nanoseconds / 1e9 + + @staticmethod + def from_unix_nano(unix_ns): + """Create a Timestamp from posix timestamp in nanoseconds. + + :param int unix_ns: Posix timestamp in nanoseconds. + :rtype: Timestamp + """ + return Timestamp(*divmod(unix_ns, 10 ** 9)) + + def to_unix_nano(self): + """Get the timestamp as a unixtime in nanoseconds. + + :returns: posix timestamp in nanoseconds + :rtype: int + """ + return self.seconds * 10 ** 9 + self.nanoseconds + + def to_datetime(self): + """Get the timestamp as a UTC datetime. + + Python 2 is not supported. + + :rtype: datetime. + """ + return datetime.datetime.fromtimestamp(0, _utc) + datetime.timedelta( + seconds=self.to_unix() + ) + + @staticmethod + def from_datetime(dt): + """Create a Timestamp from datetime with tzinfo. + + Python 2 is not supported. + + :rtype: Timestamp + """ + return Timestamp.from_unix(dt.timestamp()) diff --git a/venv/Lib/site-packages/pip/_vendor/msgpack/fallback.py b/venv/Lib/site-packages/pip/_vendor/msgpack/fallback.py new file mode 100644 index 00000000..0bfa94ea --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/msgpack/fallback.py @@ -0,0 +1,1087 @@ +"""Fallback pure Python implementation of msgpack""" + +from datetime import datetime as _DateTime +import sys +import struct + + +PY2 = sys.version_info[0] == 2 +if PY2: + int_types = (int, long) + + def dict_iteritems(d): + return d.iteritems() + + +else: + int_types = int + unicode = str + xrange = range + + def dict_iteritems(d): + return d.items() + + +if sys.version_info < (3, 5): + # Ugly hack... + RecursionError = RuntimeError + + def _is_recursionerror(e): + return ( + len(e.args) == 1 + and isinstance(e.args[0], str) + and e.args[0].startswith("maximum recursion depth exceeded") + ) + + +else: + + def _is_recursionerror(e): + return True + + +if hasattr(sys, "pypy_version_info"): + # StringIO is slow on PyPy, StringIO is faster. However: PyPy's own + # StringBuilder is fastest. + from __pypy__ import newlist_hint + + try: + from __pypy__.builders import BytesBuilder as StringBuilder + except ImportError: + from __pypy__.builders import StringBuilder + USING_STRINGBUILDER = True + + class StringIO(object): + def __init__(self, s=b""): + if s: + self.builder = StringBuilder(len(s)) + self.builder.append(s) + else: + self.builder = StringBuilder() + + def write(self, s): + if isinstance(s, memoryview): + s = s.tobytes() + elif isinstance(s, bytearray): + s = bytes(s) + self.builder.append(s) + + def getvalue(self): + return self.builder.build() + + +else: + USING_STRINGBUILDER = False + from io import BytesIO as StringIO + + newlist_hint = lambda size: [] + + +from .exceptions import BufferFull, OutOfData, ExtraData, FormatError, StackError + +from .ext import ExtType, Timestamp + + +EX_SKIP = 0 +EX_CONSTRUCT = 1 +EX_READ_ARRAY_HEADER = 2 +EX_READ_MAP_HEADER = 3 + +TYPE_IMMEDIATE = 0 +TYPE_ARRAY = 1 +TYPE_MAP = 2 +TYPE_RAW = 3 +TYPE_BIN = 4 +TYPE_EXT = 5 + +DEFAULT_RECURSE_LIMIT = 511 + + +def _check_type_strict(obj, t, type=type, tuple=tuple): + if type(t) is tuple: + return type(obj) in t + else: + return type(obj) is t + + +def _get_data_from_buffer(obj): + view = memoryview(obj) + if view.itemsize != 1: + raise ValueError("cannot unpack from multi-byte object") + return view + + +def unpackb(packed, **kwargs): + """ + Unpack an object from `packed`. + + Raises ``ExtraData`` when *packed* contains extra bytes. + Raises ``ValueError`` when *packed* is incomplete. + Raises ``FormatError`` when *packed* is not valid msgpack. + Raises ``StackError`` when *packed* contains too nested. + Other exceptions can be raised during unpacking. + + See :class:`Unpacker` for options. + """ + unpacker = Unpacker(None, max_buffer_size=len(packed), **kwargs) + unpacker.feed(packed) + try: + ret = unpacker._unpack() + except OutOfData: + raise ValueError("Unpack failed: incomplete input") + except RecursionError as e: + if _is_recursionerror(e): + raise StackError + raise + if unpacker._got_extradata(): + raise ExtraData(ret, unpacker._get_extradata()) + return ret + + +if sys.version_info < (2, 7, 6): + + def _unpack_from(f, b, o=0): + """Explicit type cast for legacy struct.unpack_from""" + return struct.unpack_from(f, bytes(b), o) + + +else: + _unpack_from = struct.unpack_from + + +class Unpacker(object): + """Streaming unpacker. + + Arguments: + + :param file_like: + File-like object having `.read(n)` method. + If specified, unpacker reads serialized data from it and :meth:`feed()` is not usable. + + :param int read_size: + Used as `file_like.read(read_size)`. (default: `min(16*1024, max_buffer_size)`) + + :param bool use_list: + If true, unpack msgpack array to Python list. + Otherwise, unpack to Python tuple. (default: True) + + :param bool raw: + If true, unpack msgpack raw to Python bytes. + Otherwise, unpack to Python str by decoding with UTF-8 encoding (default). + + :param int timestamp: + Control how timestamp type is unpacked: + + 0 - Timestamp + 1 - float (Seconds from the EPOCH) + 2 - int (Nanoseconds from the EPOCH) + 3 - datetime.datetime (UTC). Python 2 is not supported. + + :param bool strict_map_key: + If true (default), only str or bytes are accepted for map (dict) keys. + + :param callable object_hook: + When specified, it should be callable. + Unpacker calls it with a dict argument after unpacking msgpack map. + (See also simplejson) + + :param callable object_pairs_hook: + When specified, it should be callable. + Unpacker calls it with a list of key-value pairs after unpacking msgpack map. + (See also simplejson) + + :param str unicode_errors: + The error handler for decoding unicode. (default: 'strict') + This option should be used only when you have msgpack data which + contains invalid UTF-8 string. + + :param int max_buffer_size: + Limits size of data waiting unpacked. 0 means 2**32-1. + The default value is 100*1024*1024 (100MiB). + Raises `BufferFull` exception when it is insufficient. + You should set this parameter when unpacking data from untrusted source. + + :param int max_str_len: + Deprecated, use *max_buffer_size* instead. + Limits max length of str. (default: max_buffer_size) + + :param int max_bin_len: + Deprecated, use *max_buffer_size* instead. + Limits max length of bin. (default: max_buffer_size) + + :param int max_array_len: + Limits max length of array. + (default: max_buffer_size) + + :param int max_map_len: + Limits max length of map. + (default: max_buffer_size//2) + + :param int max_ext_len: + Deprecated, use *max_buffer_size* instead. + Limits max size of ext type. (default: max_buffer_size) + + Example of streaming deserialize from file-like object:: + + unpacker = Unpacker(file_like) + for o in unpacker: + process(o) + + Example of streaming deserialize from socket:: + + unpacker = Unpacker(max_buffer_size) + while True: + buf = sock.recv(1024**2) + if not buf: + break + unpacker.feed(buf) + for o in unpacker: + process(o) + + Raises ``ExtraData`` when *packed* contains extra bytes. + Raises ``OutOfData`` when *packed* is incomplete. + Raises ``FormatError`` when *packed* is not valid msgpack. + Raises ``StackError`` when *packed* contains too nested. + Other exceptions can be raised during unpacking. + """ + + def __init__( + self, + file_like=None, + read_size=0, + use_list=True, + raw=False, + timestamp=0, + strict_map_key=True, + object_hook=None, + object_pairs_hook=None, + list_hook=None, + unicode_errors=None, + max_buffer_size=100 * 1024 * 1024, + ext_hook=ExtType, + max_str_len=-1, + max_bin_len=-1, + max_array_len=-1, + max_map_len=-1, + max_ext_len=-1, + ): + if unicode_errors is None: + unicode_errors = "strict" + + if file_like is None: + self._feeding = True + else: + if not callable(file_like.read): + raise TypeError("`file_like.read` must be callable") + self.file_like = file_like + self._feeding = False + + #: array of bytes fed. + self._buffer = bytearray() + #: Which position we currently reads + self._buff_i = 0 + + # When Unpacker is used as an iterable, between the calls to next(), + # the buffer is not "consumed" completely, for efficiency sake. + # Instead, it is done sloppily. To make sure we raise BufferFull at + # the correct moments, we have to keep track of how sloppy we were. + # Furthermore, when the buffer is incomplete (that is: in the case + # we raise an OutOfData) we need to rollback the buffer to the correct + # state, which _buf_checkpoint records. + self._buf_checkpoint = 0 + + if not max_buffer_size: + max_buffer_size = 2 ** 31 - 1 + if max_str_len == -1: + max_str_len = max_buffer_size + if max_bin_len == -1: + max_bin_len = max_buffer_size + if max_array_len == -1: + max_array_len = max_buffer_size + if max_map_len == -1: + max_map_len = max_buffer_size // 2 + if max_ext_len == -1: + max_ext_len = max_buffer_size + + self._max_buffer_size = max_buffer_size + if read_size > self._max_buffer_size: + raise ValueError("read_size must be smaller than max_buffer_size") + self._read_size = read_size or min(self._max_buffer_size, 16 * 1024) + self._raw = bool(raw) + self._strict_map_key = bool(strict_map_key) + self._unicode_errors = unicode_errors + self._use_list = use_list + if not (0 <= timestamp <= 3): + raise ValueError("timestamp must be 0..3") + self._timestamp = timestamp + self._list_hook = list_hook + self._object_hook = object_hook + self._object_pairs_hook = object_pairs_hook + self._ext_hook = ext_hook + self._max_str_len = max_str_len + self._max_bin_len = max_bin_len + self._max_array_len = max_array_len + self._max_map_len = max_map_len + self._max_ext_len = max_ext_len + self._stream_offset = 0 + + if list_hook is not None and not callable(list_hook): + raise TypeError("`list_hook` is not callable") + if object_hook is not None and not callable(object_hook): + raise TypeError("`object_hook` is not callable") + if object_pairs_hook is not None and not callable(object_pairs_hook): + raise TypeError("`object_pairs_hook` is not callable") + if object_hook is not None and object_pairs_hook is not None: + raise TypeError( + "object_pairs_hook and object_hook are mutually " "exclusive" + ) + if not callable(ext_hook): + raise TypeError("`ext_hook` is not callable") + + def feed(self, next_bytes): + assert self._feeding + view = _get_data_from_buffer(next_bytes) + if len(self._buffer) - self._buff_i + len(view) > self._max_buffer_size: + raise BufferFull + + # Strip buffer before checkpoint before reading file. + if self._buf_checkpoint > 0: + del self._buffer[: self._buf_checkpoint] + self._buff_i -= self._buf_checkpoint + self._buf_checkpoint = 0 + + # Use extend here: INPLACE_ADD += doesn't reliably typecast memoryview in jython + self._buffer.extend(view) + + def _consume(self): + """ Gets rid of the used parts of the buffer. """ + self._stream_offset += self._buff_i - self._buf_checkpoint + self._buf_checkpoint = self._buff_i + + def _got_extradata(self): + return self._buff_i < len(self._buffer) + + def _get_extradata(self): + return self._buffer[self._buff_i :] + + def read_bytes(self, n): + ret = self._read(n, raise_outofdata=False) + self._consume() + return ret + + def _read(self, n, raise_outofdata=True): + # (int) -> bytearray + self._reserve(n, raise_outofdata=raise_outofdata) + i = self._buff_i + ret = self._buffer[i : i + n] + self._buff_i = i + len(ret) + return ret + + def _reserve(self, n, raise_outofdata=True): + remain_bytes = len(self._buffer) - self._buff_i - n + + # Fast path: buffer has n bytes already + if remain_bytes >= 0: + return + + if self._feeding: + self._buff_i = self._buf_checkpoint + raise OutOfData + + # Strip buffer before checkpoint before reading file. + if self._buf_checkpoint > 0: + del self._buffer[: self._buf_checkpoint] + self._buff_i -= self._buf_checkpoint + self._buf_checkpoint = 0 + + # Read from file + remain_bytes = -remain_bytes + while remain_bytes > 0: + to_read_bytes = max(self._read_size, remain_bytes) + read_data = self.file_like.read(to_read_bytes) + if not read_data: + break + assert isinstance(read_data, bytes) + self._buffer += read_data + remain_bytes -= len(read_data) + + if len(self._buffer) < n + self._buff_i and raise_outofdata: + self._buff_i = 0 # rollback + raise OutOfData + + def _read_header(self, execute=EX_CONSTRUCT): + typ = TYPE_IMMEDIATE + n = 0 + obj = None + self._reserve(1) + b = self._buffer[self._buff_i] + self._buff_i += 1 + if b & 0b10000000 == 0: + obj = b + elif b & 0b11100000 == 0b11100000: + obj = -1 - (b ^ 0xFF) + elif b & 0b11100000 == 0b10100000: + n = b & 0b00011111 + typ = TYPE_RAW + if n > self._max_str_len: + raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) + obj = self._read(n) + elif b & 0b11110000 == 0b10010000: + n = b & 0b00001111 + typ = TYPE_ARRAY + if n > self._max_array_len: + raise ValueError("%s exceeds max_array_len(%s)", n, self._max_array_len) + elif b & 0b11110000 == 0b10000000: + n = b & 0b00001111 + typ = TYPE_MAP + if n > self._max_map_len: + raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len) + elif b == 0xC0: + obj = None + elif b == 0xC2: + obj = False + elif b == 0xC3: + obj = True + elif b == 0xC4: + typ = TYPE_BIN + self._reserve(1) + n = self._buffer[self._buff_i] + self._buff_i += 1 + if n > self._max_bin_len: + raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len)) + obj = self._read(n) + elif b == 0xC5: + typ = TYPE_BIN + self._reserve(2) + n = _unpack_from(">H", self._buffer, self._buff_i)[0] + self._buff_i += 2 + if n > self._max_bin_len: + raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len)) + obj = self._read(n) + elif b == 0xC6: + typ = TYPE_BIN + self._reserve(4) + n = _unpack_from(">I", self._buffer, self._buff_i)[0] + self._buff_i += 4 + if n > self._max_bin_len: + raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len)) + obj = self._read(n) + elif b == 0xC7: # ext 8 + typ = TYPE_EXT + self._reserve(2) + L, n = _unpack_from("Bb", self._buffer, self._buff_i) + self._buff_i += 2 + if L > self._max_ext_len: + raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len)) + obj = self._read(L) + elif b == 0xC8: # ext 16 + typ = TYPE_EXT + self._reserve(3) + L, n = _unpack_from(">Hb", self._buffer, self._buff_i) + self._buff_i += 3 + if L > self._max_ext_len: + raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len)) + obj = self._read(L) + elif b == 0xC9: # ext 32 + typ = TYPE_EXT + self._reserve(5) + L, n = _unpack_from(">Ib", self._buffer, self._buff_i) + self._buff_i += 5 + if L > self._max_ext_len: + raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len)) + obj = self._read(L) + elif b == 0xCA: + self._reserve(4) + obj = _unpack_from(">f", self._buffer, self._buff_i)[0] + self._buff_i += 4 + elif b == 0xCB: + self._reserve(8) + obj = _unpack_from(">d", self._buffer, self._buff_i)[0] + self._buff_i += 8 + elif b == 0xCC: + self._reserve(1) + obj = self._buffer[self._buff_i] + self._buff_i += 1 + elif b == 0xCD: + self._reserve(2) + obj = _unpack_from(">H", self._buffer, self._buff_i)[0] + self._buff_i += 2 + elif b == 0xCE: + self._reserve(4) + obj = _unpack_from(">I", self._buffer, self._buff_i)[0] + self._buff_i += 4 + elif b == 0xCF: + self._reserve(8) + obj = _unpack_from(">Q", self._buffer, self._buff_i)[0] + self._buff_i += 8 + elif b == 0xD0: + self._reserve(1) + obj = _unpack_from("b", self._buffer, self._buff_i)[0] + self._buff_i += 1 + elif b == 0xD1: + self._reserve(2) + obj = _unpack_from(">h", self._buffer, self._buff_i)[0] + self._buff_i += 2 + elif b == 0xD2: + self._reserve(4) + obj = _unpack_from(">i", self._buffer, self._buff_i)[0] + self._buff_i += 4 + elif b == 0xD3: + self._reserve(8) + obj = _unpack_from(">q", self._buffer, self._buff_i)[0] + self._buff_i += 8 + elif b == 0xD4: # fixext 1 + typ = TYPE_EXT + if self._max_ext_len < 1: + raise ValueError("%s exceeds max_ext_len(%s)" % (1, self._max_ext_len)) + self._reserve(2) + n, obj = _unpack_from("b1s", self._buffer, self._buff_i) + self._buff_i += 2 + elif b == 0xD5: # fixext 2 + typ = TYPE_EXT + if self._max_ext_len < 2: + raise ValueError("%s exceeds max_ext_len(%s)" % (2, self._max_ext_len)) + self._reserve(3) + n, obj = _unpack_from("b2s", self._buffer, self._buff_i) + self._buff_i += 3 + elif b == 0xD6: # fixext 4 + typ = TYPE_EXT + if self._max_ext_len < 4: + raise ValueError("%s exceeds max_ext_len(%s)" % (4, self._max_ext_len)) + self._reserve(5) + n, obj = _unpack_from("b4s", self._buffer, self._buff_i) + self._buff_i += 5 + elif b == 0xD7: # fixext 8 + typ = TYPE_EXT + if self._max_ext_len < 8: + raise ValueError("%s exceeds max_ext_len(%s)" % (8, self._max_ext_len)) + self._reserve(9) + n, obj = _unpack_from("b8s", self._buffer, self._buff_i) + self._buff_i += 9 + elif b == 0xD8: # fixext 16 + typ = TYPE_EXT + if self._max_ext_len < 16: + raise ValueError("%s exceeds max_ext_len(%s)" % (16, self._max_ext_len)) + self._reserve(17) + n, obj = _unpack_from("b16s", self._buffer, self._buff_i) + self._buff_i += 17 + elif b == 0xD9: + typ = TYPE_RAW + self._reserve(1) + n = self._buffer[self._buff_i] + self._buff_i += 1 + if n > self._max_str_len: + raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) + obj = self._read(n) + elif b == 0xDA: + typ = TYPE_RAW + self._reserve(2) + (n,) = _unpack_from(">H", self._buffer, self._buff_i) + self._buff_i += 2 + if n > self._max_str_len: + raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) + obj = self._read(n) + elif b == 0xDB: + typ = TYPE_RAW + self._reserve(4) + (n,) = _unpack_from(">I", self._buffer, self._buff_i) + self._buff_i += 4 + if n > self._max_str_len: + raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) + obj = self._read(n) + elif b == 0xDC: + typ = TYPE_ARRAY + self._reserve(2) + (n,) = _unpack_from(">H", self._buffer, self._buff_i) + self._buff_i += 2 + if n > self._max_array_len: + raise ValueError("%s exceeds max_array_len(%s)", n, self._max_array_len) + elif b == 0xDD: + typ = TYPE_ARRAY + self._reserve(4) + (n,) = _unpack_from(">I", self._buffer, self._buff_i) + self._buff_i += 4 + if n > self._max_array_len: + raise ValueError("%s exceeds max_array_len(%s)", n, self._max_array_len) + elif b == 0xDE: + self._reserve(2) + (n,) = _unpack_from(">H", self._buffer, self._buff_i) + self._buff_i += 2 + if n > self._max_map_len: + raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len) + typ = TYPE_MAP + elif b == 0xDF: + self._reserve(4) + (n,) = _unpack_from(">I", self._buffer, self._buff_i) + self._buff_i += 4 + if n > self._max_map_len: + raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len) + typ = TYPE_MAP + else: + raise FormatError("Unknown header: 0x%x" % b) + return typ, n, obj + + def _unpack(self, execute=EX_CONSTRUCT): + typ, n, obj = self._read_header(execute) + + if execute == EX_READ_ARRAY_HEADER: + if typ != TYPE_ARRAY: + raise ValueError("Expected array") + return n + if execute == EX_READ_MAP_HEADER: + if typ != TYPE_MAP: + raise ValueError("Expected map") + return n + # TODO should we eliminate the recursion? + if typ == TYPE_ARRAY: + if execute == EX_SKIP: + for i in xrange(n): + # TODO check whether we need to call `list_hook` + self._unpack(EX_SKIP) + return + ret = newlist_hint(n) + for i in xrange(n): + ret.append(self._unpack(EX_CONSTRUCT)) + if self._list_hook is not None: + ret = self._list_hook(ret) + # TODO is the interaction between `list_hook` and `use_list` ok? + return ret if self._use_list else tuple(ret) + if typ == TYPE_MAP: + if execute == EX_SKIP: + for i in xrange(n): + # TODO check whether we need to call hooks + self._unpack(EX_SKIP) + self._unpack(EX_SKIP) + return + if self._object_pairs_hook is not None: + ret = self._object_pairs_hook( + (self._unpack(EX_CONSTRUCT), self._unpack(EX_CONSTRUCT)) + for _ in xrange(n) + ) + else: + ret = {} + for _ in xrange(n): + key = self._unpack(EX_CONSTRUCT) + if self._strict_map_key and type(key) not in (unicode, bytes): + raise ValueError( + "%s is not allowed for map key" % str(type(key)) + ) + if not PY2 and type(key) is str: + key = sys.intern(key) + ret[key] = self._unpack(EX_CONSTRUCT) + if self._object_hook is not None: + ret = self._object_hook(ret) + return ret + if execute == EX_SKIP: + return + if typ == TYPE_RAW: + if self._raw: + obj = bytes(obj) + else: + obj = obj.decode("utf_8", self._unicode_errors) + return obj + if typ == TYPE_BIN: + return bytes(obj) + if typ == TYPE_EXT: + if n == -1: # timestamp + ts = Timestamp.from_bytes(bytes(obj)) + if self._timestamp == 1: + return ts.to_unix() + elif self._timestamp == 2: + return ts.to_unix_nano() + elif self._timestamp == 3: + return ts.to_datetime() + else: + return ts + else: + return self._ext_hook(n, bytes(obj)) + assert typ == TYPE_IMMEDIATE + return obj + + def __iter__(self): + return self + + def __next__(self): + try: + ret = self._unpack(EX_CONSTRUCT) + self._consume() + return ret + except OutOfData: + self._consume() + raise StopIteration + except RecursionError: + raise StackError + + next = __next__ + + def skip(self): + self._unpack(EX_SKIP) + self._consume() + + def unpack(self): + try: + ret = self._unpack(EX_CONSTRUCT) + except RecursionError: + raise StackError + self._consume() + return ret + + def read_array_header(self): + ret = self._unpack(EX_READ_ARRAY_HEADER) + self._consume() + return ret + + def read_map_header(self): + ret = self._unpack(EX_READ_MAP_HEADER) + self._consume() + return ret + + def tell(self): + return self._stream_offset + + +class Packer(object): + """ + MessagePack Packer + + Usage:: + + packer = Packer() + astream.write(packer.pack(a)) + astream.write(packer.pack(b)) + + Packer's constructor has some keyword arguments: + + :param callable default: + Convert user type to builtin type that Packer supports. + See also simplejson's document. + + :param bool use_single_float: + Use single precision float type for float. (default: False) + + :param bool autoreset: + Reset buffer after each pack and return its content as `bytes`. (default: True). + If set this to false, use `bytes()` to get content and `.reset()` to clear buffer. + + :param bool use_bin_type: + Use bin type introduced in msgpack spec 2.0 for bytes. + It also enables str8 type for unicode. (default: True) + + :param bool strict_types: + If set to true, types will be checked to be exact. Derived classes + from serializable types will not be serialized and will be + treated as unsupported type and forwarded to default. + Additionally tuples will not be serialized as lists. + This is useful when trying to implement accurate serialization + for python types. + + :param bool datetime: + If set to true, datetime with tzinfo is packed into Timestamp type. + Note that the tzinfo is stripped in the timestamp. + You can get UTC datetime with `timestamp=3` option of the Unpacker. + (Python 2 is not supported). + + :param str unicode_errors: + The error handler for encoding unicode. (default: 'strict') + DO NOT USE THIS!! This option is kept for very specific usage. + + Example of streaming deserialize from file-like object:: + + unpacker = Unpacker(file_like) + for o in unpacker: + process(o) + + Example of streaming deserialize from socket:: + + unpacker = Unpacker() + while True: + buf = sock.recv(1024**2) + if not buf: + break + unpacker.feed(buf) + for o in unpacker: + process(o) + + Raises ``ExtraData`` when *packed* contains extra bytes. + Raises ``OutOfData`` when *packed* is incomplete. + Raises ``FormatError`` when *packed* is not valid msgpack. + Raises ``StackError`` when *packed* contains too nested. + Other exceptions can be raised during unpacking. + """ + + def __init__( + self, + default=None, + use_single_float=False, + autoreset=True, + use_bin_type=True, + strict_types=False, + datetime=False, + unicode_errors=None, + ): + self._strict_types = strict_types + self._use_float = use_single_float + self._autoreset = autoreset + self._use_bin_type = use_bin_type + self._buffer = StringIO() + if PY2 and datetime: + raise ValueError("datetime is not supported in Python 2") + self._datetime = bool(datetime) + self._unicode_errors = unicode_errors or "strict" + if default is not None: + if not callable(default): + raise TypeError("default must be callable") + self._default = default + + def _pack( + self, + obj, + nest_limit=DEFAULT_RECURSE_LIMIT, + check=isinstance, + check_type_strict=_check_type_strict, + ): + default_used = False + if self._strict_types: + check = check_type_strict + list_types = list + else: + list_types = (list, tuple) + while True: + if nest_limit < 0: + raise ValueError("recursion limit exceeded") + if obj is None: + return self._buffer.write(b"\xc0") + if check(obj, bool): + if obj: + return self._buffer.write(b"\xc3") + return self._buffer.write(b"\xc2") + if check(obj, int_types): + if 0 <= obj < 0x80: + return self._buffer.write(struct.pack("B", obj)) + if -0x20 <= obj < 0: + return self._buffer.write(struct.pack("b", obj)) + if 0x80 <= obj <= 0xFF: + return self._buffer.write(struct.pack("BB", 0xCC, obj)) + if -0x80 <= obj < 0: + return self._buffer.write(struct.pack(">Bb", 0xD0, obj)) + if 0xFF < obj <= 0xFFFF: + return self._buffer.write(struct.pack(">BH", 0xCD, obj)) + if -0x8000 <= obj < -0x80: + return self._buffer.write(struct.pack(">Bh", 0xD1, obj)) + if 0xFFFF < obj <= 0xFFFFFFFF: + return self._buffer.write(struct.pack(">BI", 0xCE, obj)) + if -0x80000000 <= obj < -0x8000: + return self._buffer.write(struct.pack(">Bi", 0xD2, obj)) + if 0xFFFFFFFF < obj <= 0xFFFFFFFFFFFFFFFF: + return self._buffer.write(struct.pack(">BQ", 0xCF, obj)) + if -0x8000000000000000 <= obj < -0x80000000: + return self._buffer.write(struct.pack(">Bq", 0xD3, obj)) + if not default_used and self._default is not None: + obj = self._default(obj) + default_used = True + continue + raise OverflowError("Integer value out of range") + if check(obj, (bytes, bytearray)): + n = len(obj) + if n >= 2 ** 32: + raise ValueError("%s is too large" % type(obj).__name__) + self._pack_bin_header(n) + return self._buffer.write(obj) + if check(obj, unicode): + obj = obj.encode("utf-8", self._unicode_errors) + n = len(obj) + if n >= 2 ** 32: + raise ValueError("String is too large") + self._pack_raw_header(n) + return self._buffer.write(obj) + if check(obj, memoryview): + n = len(obj) * obj.itemsize + if n >= 2 ** 32: + raise ValueError("Memoryview is too large") + self._pack_bin_header(n) + return self._buffer.write(obj) + if check(obj, float): + if self._use_float: + return self._buffer.write(struct.pack(">Bf", 0xCA, obj)) + return self._buffer.write(struct.pack(">Bd", 0xCB, obj)) + if check(obj, (ExtType, Timestamp)): + if check(obj, Timestamp): + code = -1 + data = obj.to_bytes() + else: + code = obj.code + data = obj.data + assert isinstance(code, int) + assert isinstance(data, bytes) + L = len(data) + if L == 1: + self._buffer.write(b"\xd4") + elif L == 2: + self._buffer.write(b"\xd5") + elif L == 4: + self._buffer.write(b"\xd6") + elif L == 8: + self._buffer.write(b"\xd7") + elif L == 16: + self._buffer.write(b"\xd8") + elif L <= 0xFF: + self._buffer.write(struct.pack(">BB", 0xC7, L)) + elif L <= 0xFFFF: + self._buffer.write(struct.pack(">BH", 0xC8, L)) + else: + self._buffer.write(struct.pack(">BI", 0xC9, L)) + self._buffer.write(struct.pack("b", code)) + self._buffer.write(data) + return + if check(obj, list_types): + n = len(obj) + self._pack_array_header(n) + for i in xrange(n): + self._pack(obj[i], nest_limit - 1) + return + if check(obj, dict): + return self._pack_map_pairs( + len(obj), dict_iteritems(obj), nest_limit - 1 + ) + + if self._datetime and check(obj, _DateTime) and obj.tzinfo is not None: + obj = Timestamp.from_datetime(obj) + default_used = 1 + continue + + if not default_used and self._default is not None: + obj = self._default(obj) + default_used = 1 + continue + raise TypeError("Cannot serialize %r" % (obj,)) + + def pack(self, obj): + try: + self._pack(obj) + except: + self._buffer = StringIO() # force reset + raise + if self._autoreset: + ret = self._buffer.getvalue() + self._buffer = StringIO() + return ret + + def pack_map_pairs(self, pairs): + self._pack_map_pairs(len(pairs), pairs) + if self._autoreset: + ret = self._buffer.getvalue() + self._buffer = StringIO() + return ret + + def pack_array_header(self, n): + if n >= 2 ** 32: + raise ValueError + self._pack_array_header(n) + if self._autoreset: + ret = self._buffer.getvalue() + self._buffer = StringIO() + return ret + + def pack_map_header(self, n): + if n >= 2 ** 32: + raise ValueError + self._pack_map_header(n) + if self._autoreset: + ret = self._buffer.getvalue() + self._buffer = StringIO() + return ret + + def pack_ext_type(self, typecode, data): + if not isinstance(typecode, int): + raise TypeError("typecode must have int type.") + if not 0 <= typecode <= 127: + raise ValueError("typecode should be 0-127") + if not isinstance(data, bytes): + raise TypeError("data must have bytes type") + L = len(data) + if L > 0xFFFFFFFF: + raise ValueError("Too large data") + if L == 1: + self._buffer.write(b"\xd4") + elif L == 2: + self._buffer.write(b"\xd5") + elif L == 4: + self._buffer.write(b"\xd6") + elif L == 8: + self._buffer.write(b"\xd7") + elif L == 16: + self._buffer.write(b"\xd8") + elif L <= 0xFF: + self._buffer.write(b"\xc7" + struct.pack("B", L)) + elif L <= 0xFFFF: + self._buffer.write(b"\xc8" + struct.pack(">H", L)) + else: + self._buffer.write(b"\xc9" + struct.pack(">I", L)) + self._buffer.write(struct.pack("B", typecode)) + self._buffer.write(data) + + def _pack_array_header(self, n): + if n <= 0x0F: + return self._buffer.write(struct.pack("B", 0x90 + n)) + if n <= 0xFFFF: + return self._buffer.write(struct.pack(">BH", 0xDC, n)) + if n <= 0xFFFFFFFF: + return self._buffer.write(struct.pack(">BI", 0xDD, n)) + raise ValueError("Array is too large") + + def _pack_map_header(self, n): + if n <= 0x0F: + return self._buffer.write(struct.pack("B", 0x80 + n)) + if n <= 0xFFFF: + return self._buffer.write(struct.pack(">BH", 0xDE, n)) + if n <= 0xFFFFFFFF: + return self._buffer.write(struct.pack(">BI", 0xDF, n)) + raise ValueError("Dict is too large") + + def _pack_map_pairs(self, n, pairs, nest_limit=DEFAULT_RECURSE_LIMIT): + self._pack_map_header(n) + for (k, v) in pairs: + self._pack(k, nest_limit - 1) + self._pack(v, nest_limit - 1) + + def _pack_raw_header(self, n): + if n <= 0x1F: + self._buffer.write(struct.pack("B", 0xA0 + n)) + elif self._use_bin_type and n <= 0xFF: + self._buffer.write(struct.pack(">BB", 0xD9, n)) + elif n <= 0xFFFF: + self._buffer.write(struct.pack(">BH", 0xDA, n)) + elif n <= 0xFFFFFFFF: + self._buffer.write(struct.pack(">BI", 0xDB, n)) + else: + raise ValueError("Raw is too large") + + def _pack_bin_header(self, n): + if not self._use_bin_type: + return self._pack_raw_header(n) + elif n <= 0xFF: + return self._buffer.write(struct.pack(">BB", 0xC4, n)) + elif n <= 0xFFFF: + return self._buffer.write(struct.pack(">BH", 0xC5, n)) + elif n <= 0xFFFFFFFF: + return self._buffer.write(struct.pack(">BI", 0xC6, n)) + else: + raise ValueError("Bin is too large") + + def bytes(self): + """Return internal buffer contents as bytes object""" + return self._buffer.getvalue() + + def reset(self): + """Reset internal buffer. + + This method is useful only when autoreset=False. + """ + self._buffer = StringIO() + + def getbuffer(self): + """Return view of internal buffer.""" + if USING_STRINGBUILDER or PY2: + return memoryview(self.bytes()) + else: + return self._buffer.getbuffer() diff --git a/venv/Lib/site-packages/pip/_vendor/packaging/__about__.py b/venv/Lib/site-packages/pip/_vendor/packaging/__about__.py new file mode 100644 index 00000000..4c43a968 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/packaging/__about__.py @@ -0,0 +1,27 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +__all__ = [ + "__title__", + "__summary__", + "__uri__", + "__version__", + "__author__", + "__email__", + "__license__", + "__copyright__", +] + +__title__ = "packaging" +__summary__ = "Core utilities for Python packages" +__uri__ = "https://github.com/pypa/packaging" + +__version__ = "20.9" + +__author__ = "Donald Stufft and individual contributors" +__email__ = "donald@stufft.io" + +__license__ = "BSD-2-Clause or Apache-2.0" +__copyright__ = "2014-2019 %s" % __author__ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/packaging/__init__.py b/venv/Lib/site-packages/pip/_vendor/packaging/__init__.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/packaging/__init__.py rename to venv/Lib/site-packages/pip/_vendor/packaging/__init__.py diff --git a/venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/__about__.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/__about__.cpython-36.pyc new file mode 100644 index 00000000..86dd9c75 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/__about__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..a08992b9 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/_compat.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/_compat.cpython-36.pyc new file mode 100644 index 00000000..29d90606 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/_compat.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/_structures.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/_structures.cpython-36.pyc new file mode 100644 index 00000000..67f3cf8e Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/_structures.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/_typing.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/_typing.cpython-36.pyc new file mode 100644 index 00000000..2e77f1c1 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/_typing.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/markers.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/markers.cpython-36.pyc new file mode 100644 index 00000000..cb779a49 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/markers.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/requirements.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/requirements.cpython-36.pyc new file mode 100644 index 00000000..5efba94c Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/requirements.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/specifiers.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/specifiers.cpython-36.pyc new file mode 100644 index 00000000..be581d1a Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/specifiers.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/tags.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/tags.cpython-36.pyc new file mode 100644 index 00000000..c5cc0d5a Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/tags.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/utils.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/utils.cpython-36.pyc new file mode 100644 index 00000000..9b5fcd79 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/utils.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/version.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/version.cpython-36.pyc new file mode 100644 index 00000000..5f172415 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/packaging/__pycache__/version.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/packaging/_compat.py b/venv/Lib/site-packages/pip/_vendor/packaging/_compat.py new file mode 100644 index 00000000..e54bd4ed --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/packaging/_compat.py @@ -0,0 +1,38 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import sys + +from ._typing import TYPE_CHECKING + +if TYPE_CHECKING: # pragma: no cover + from typing import Any, Dict, Tuple, Type + + +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 + +# flake8: noqa + +if PY3: + string_types = (str,) +else: + string_types = (basestring,) + + +def with_metaclass(meta, *bases): + # type: (Type[Any], Tuple[Type[Any], ...]) -> Any + """ + Create a base class with a metaclass. + """ + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(meta): # type: ignore + def __new__(cls, name, this_bases, d): + # type: (Type[Any], str, Tuple[Any], Dict[Any, Any]) -> Any + return meta(name, bases, d) + + return type.__new__(metaclass, "temporary_class", (), {}) diff --git a/venv/Lib/site-packages/pip/_vendor/packaging/_structures.py b/venv/Lib/site-packages/pip/_vendor/packaging/_structures.py new file mode 100644 index 00000000..800d5c55 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/packaging/_structures.py @@ -0,0 +1,86 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + + +class InfinityType(object): + def __repr__(self): + # type: () -> str + return "Infinity" + + def __hash__(self): + # type: () -> int + return hash(repr(self)) + + def __lt__(self, other): + # type: (object) -> bool + return False + + def __le__(self, other): + # type: (object) -> bool + return False + + def __eq__(self, other): + # type: (object) -> bool + return isinstance(other, self.__class__) + + def __ne__(self, other): + # type: (object) -> bool + return not isinstance(other, self.__class__) + + def __gt__(self, other): + # type: (object) -> bool + return True + + def __ge__(self, other): + # type: (object) -> bool + return True + + def __neg__(self): + # type: (object) -> NegativeInfinityType + return NegativeInfinity + + +Infinity = InfinityType() + + +class NegativeInfinityType(object): + def __repr__(self): + # type: () -> str + return "-Infinity" + + def __hash__(self): + # type: () -> int + return hash(repr(self)) + + def __lt__(self, other): + # type: (object) -> bool + return True + + def __le__(self, other): + # type: (object) -> bool + return True + + def __eq__(self, other): + # type: (object) -> bool + return isinstance(other, self.__class__) + + def __ne__(self, other): + # type: (object) -> bool + return not isinstance(other, self.__class__) + + def __gt__(self, other): + # type: (object) -> bool + return False + + def __ge__(self, other): + # type: (object) -> bool + return False + + def __neg__(self): + # type: (object) -> InfinityType + return Infinity + + +NegativeInfinity = NegativeInfinityType() diff --git a/venv/Lib/site-packages/pip/_vendor/packaging/_typing.py b/venv/Lib/site-packages/pip/_vendor/packaging/_typing.py new file mode 100644 index 00000000..2846133b --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/packaging/_typing.py @@ -0,0 +1,48 @@ +"""For neatly implementing static typing in packaging. + +`mypy` - the static type analysis tool we use - uses the `typing` module, which +provides core functionality fundamental to mypy's functioning. + +Generally, `typing` would be imported at runtime and used in that fashion - +it acts as a no-op at runtime and does not have any run-time overhead by +design. + +As it turns out, `typing` is not vendorable - it uses separate sources for +Python 2/Python 3. Thus, this codebase can not expect it to be present. +To work around this, mypy allows the typing import to be behind a False-y +optional to prevent it from running at runtime and type-comments can be used +to remove the need for the types to be accessible directly during runtime. + +This module provides the False-y guard in a nicely named fashion so that a +curious maintainer can reach here to read this. + +In packaging, all static-typing related imports should be guarded as follows: + + from pip._vendor.packaging._typing import TYPE_CHECKING + + if TYPE_CHECKING: + from typing import ... + +Ref: https://github.com/python/mypy/issues/3216 +""" + +__all__ = ["TYPE_CHECKING", "cast"] + +# The TYPE_CHECKING constant defined by the typing module is False at runtime +# but True while type checking. +if False: # pragma: no cover + from typing import TYPE_CHECKING +else: + TYPE_CHECKING = False + +# typing's cast syntax requires calling typing.cast at runtime, but we don't +# want to import typing at runtime. Here, we inform the type checkers that +# we're importing `typing.cast` as `cast` and re-implement typing.cast's +# runtime behavior in a block that is ignored by type checkers. +if TYPE_CHECKING: # pragma: no cover + # not executed at runtime + from typing import cast +else: + # executed at runtime + def cast(type_, value): # noqa + return value diff --git a/venv/Lib/site-packages/pip/_vendor/packaging/markers.py b/venv/Lib/site-packages/pip/_vendor/packaging/markers.py new file mode 100644 index 00000000..69a60cf1 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/packaging/markers.py @@ -0,0 +1,336 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import operator +import os +import platform +import sys + +from pip._vendor.pyparsing import ( # noqa: N817 + Forward, + Group, + Literal as L, + ParseException, + ParseResults, + QuotedString, + ZeroOrMore, + stringEnd, + stringStart, +) + +from ._compat import string_types +from ._typing import TYPE_CHECKING +from .specifiers import InvalidSpecifier, Specifier + +if TYPE_CHECKING: # pragma: no cover + from typing import Any, Callable, Dict, List, Optional, Tuple, Union + + Operator = Callable[[str, str], bool] + + +__all__ = [ + "InvalidMarker", + "UndefinedComparison", + "UndefinedEnvironmentName", + "Marker", + "default_environment", +] + + +class InvalidMarker(ValueError): + """ + An invalid marker was found, users should refer to PEP 508. + """ + + +class UndefinedComparison(ValueError): + """ + An invalid operation was attempted on a value that doesn't support it. + """ + + +class UndefinedEnvironmentName(ValueError): + """ + A name was attempted to be used that does not exist inside of the + environment. + """ + + +class Node(object): + def __init__(self, value): + # type: (Any) -> None + self.value = value + + def __str__(self): + # type: () -> str + return str(self.value) + + def __repr__(self): + # type: () -> str + return "<{0}({1!r})>".format(self.__class__.__name__, str(self)) + + def serialize(self): + # type: () -> str + raise NotImplementedError + + +class Variable(Node): + def serialize(self): + # type: () -> str + return str(self) + + +class Value(Node): + def serialize(self): + # type: () -> str + return '"{0}"'.format(self) + + +class Op(Node): + def serialize(self): + # type: () -> str + return str(self) + + +VARIABLE = ( + L("implementation_version") + | L("platform_python_implementation") + | L("implementation_name") + | L("python_full_version") + | L("platform_release") + | L("platform_version") + | L("platform_machine") + | L("platform_system") + | L("python_version") + | L("sys_platform") + | L("os_name") + | L("os.name") # PEP-345 + | L("sys.platform") # PEP-345 + | L("platform.version") # PEP-345 + | L("platform.machine") # PEP-345 + | L("platform.python_implementation") # PEP-345 + | L("python_implementation") # undocumented setuptools legacy + | L("extra") # PEP-508 +) +ALIASES = { + "os.name": "os_name", + "sys.platform": "sys_platform", + "platform.version": "platform_version", + "platform.machine": "platform_machine", + "platform.python_implementation": "platform_python_implementation", + "python_implementation": "platform_python_implementation", +} +VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0]))) + +VERSION_CMP = ( + L("===") | L("==") | L(">=") | L("<=") | L("!=") | L("~=") | L(">") | L("<") +) + +MARKER_OP = VERSION_CMP | L("not in") | L("in") +MARKER_OP.setParseAction(lambda s, l, t: Op(t[0])) + +MARKER_VALUE = QuotedString("'") | QuotedString('"') +MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0])) + +BOOLOP = L("and") | L("or") + +MARKER_VAR = VARIABLE | MARKER_VALUE + +MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR) +MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0])) + +LPAREN = L("(").suppress() +RPAREN = L(")").suppress() + +MARKER_EXPR = Forward() +MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN) +MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR) + +MARKER = stringStart + MARKER_EXPR + stringEnd + + +def _coerce_parse_result(results): + # type: (Union[ParseResults, List[Any]]) -> List[Any] + if isinstance(results, ParseResults): + return [_coerce_parse_result(i) for i in results] + else: + return results + + +def _format_marker(marker, first=True): + # type: (Union[List[str], Tuple[Node, ...], str], Optional[bool]) -> str + + assert isinstance(marker, (list, tuple, string_types)) + + # Sometimes we have a structure like [[...]] which is a single item list + # where the single item is itself it's own list. In that case we want skip + # the rest of this function so that we don't get extraneous () on the + # outside. + if ( + isinstance(marker, list) + and len(marker) == 1 + and isinstance(marker[0], (list, tuple)) + ): + return _format_marker(marker[0]) + + if isinstance(marker, list): + inner = (_format_marker(m, first=False) for m in marker) + if first: + return " ".join(inner) + else: + return "(" + " ".join(inner) + ")" + elif isinstance(marker, tuple): + return " ".join([m.serialize() for m in marker]) + else: + return marker + + +_operators = { + "in": lambda lhs, rhs: lhs in rhs, + "not in": lambda lhs, rhs: lhs not in rhs, + "<": operator.lt, + "<=": operator.le, + "==": operator.eq, + "!=": operator.ne, + ">=": operator.ge, + ">": operator.gt, +} # type: Dict[str, Operator] + + +def _eval_op(lhs, op, rhs): + # type: (str, Op, str) -> bool + try: + spec = Specifier("".join([op.serialize(), rhs])) + except InvalidSpecifier: + pass + else: + return spec.contains(lhs) + + oper = _operators.get(op.serialize()) # type: Optional[Operator] + if oper is None: + raise UndefinedComparison( + "Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs) + ) + + return oper(lhs, rhs) + + +class Undefined(object): + pass + + +_undefined = Undefined() + + +def _get_env(environment, name): + # type: (Dict[str, str], str) -> str + value = environment.get(name, _undefined) # type: Union[str, Undefined] + + if isinstance(value, Undefined): + raise UndefinedEnvironmentName( + "{0!r} does not exist in evaluation environment.".format(name) + ) + + return value + + +def _evaluate_markers(markers, environment): + # type: (List[Any], Dict[str, str]) -> bool + groups = [[]] # type: List[List[bool]] + + for marker in markers: + assert isinstance(marker, (list, tuple, string_types)) + + if isinstance(marker, list): + groups[-1].append(_evaluate_markers(marker, environment)) + elif isinstance(marker, tuple): + lhs, op, rhs = marker + + if isinstance(lhs, Variable): + lhs_value = _get_env(environment, lhs.value) + rhs_value = rhs.value + else: + lhs_value = lhs.value + rhs_value = _get_env(environment, rhs.value) + + groups[-1].append(_eval_op(lhs_value, op, rhs_value)) + else: + assert marker in ["and", "or"] + if marker == "or": + groups.append([]) + + return any(all(item) for item in groups) + + +def format_full_version(info): + # type: (sys._version_info) -> str + version = "{0.major}.{0.minor}.{0.micro}".format(info) + kind = info.releaselevel + if kind != "final": + version += kind[0] + str(info.serial) + return version + + +def default_environment(): + # type: () -> Dict[str, str] + if hasattr(sys, "implementation"): + # Ignoring the `sys.implementation` reference for type checking due to + # mypy not liking that the attribute doesn't exist in Python 2.7 when + # run with the `--py27` flag. + iver = format_full_version(sys.implementation.version) # type: ignore + implementation_name = sys.implementation.name # type: ignore + else: + iver = "0" + implementation_name = "" + + return { + "implementation_name": implementation_name, + "implementation_version": iver, + "os_name": os.name, + "platform_machine": platform.machine(), + "platform_release": platform.release(), + "platform_system": platform.system(), + "platform_version": platform.version(), + "python_full_version": platform.python_version(), + "platform_python_implementation": platform.python_implementation(), + "python_version": ".".join(platform.python_version_tuple()[:2]), + "sys_platform": sys.platform, + } + + +class Marker(object): + def __init__(self, marker): + # type: (str) -> None + try: + self._markers = _coerce_parse_result(MARKER.parseString(marker)) + except ParseException as e: + err_str = "Invalid marker: {0!r}, parse error at {1!r}".format( + marker, marker[e.loc : e.loc + 8] + ) + raise InvalidMarker(err_str) + + def __str__(self): + # type: () -> str + return _format_marker(self._markers) + + def __repr__(self): + # type: () -> str + return "".format(str(self)) + + def evaluate(self, environment=None): + # type: (Optional[Dict[str, str]]) -> bool + """Evaluate a marker. + + Return the boolean from evaluating the given marker against the + environment. environment is an optional argument to override all or + part of the determined environment. + + The environment is determined from the current Python process. + """ + current_environment = default_environment() + if environment is not None: + current_environment.update(environment) + + return _evaluate_markers(self._markers, current_environment) diff --git a/venv/Lib/site-packages/pip/_vendor/packaging/requirements.py b/venv/Lib/site-packages/pip/_vendor/packaging/requirements.py new file mode 100644 index 00000000..c2a7fdac --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/packaging/requirements.py @@ -0,0 +1,160 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import re +import string +import sys + +from pip._vendor.pyparsing import ( # noqa: N817 + Combine, + Literal as L, + Optional, + ParseException, + Regex, + Word, + ZeroOrMore, + originalTextFor, + stringEnd, + stringStart, +) + +from ._typing import TYPE_CHECKING +from .markers import MARKER_EXPR, Marker +from .specifiers import LegacySpecifier, Specifier, SpecifierSet + +if sys.version_info[0] >= 3: + from urllib import parse as urlparse # pragma: no cover +else: # pragma: no cover + import urlparse + + +if TYPE_CHECKING: # pragma: no cover + from typing import List, Optional as TOptional, Set + + +class InvalidRequirement(ValueError): + """ + An invalid requirement was found, users should refer to PEP 508. + """ + + +ALPHANUM = Word(string.ascii_letters + string.digits) + +LBRACKET = L("[").suppress() +RBRACKET = L("]").suppress() +LPAREN = L("(").suppress() +RPAREN = L(")").suppress() +COMMA = L(",").suppress() +SEMICOLON = L(";").suppress() +AT = L("@").suppress() + +PUNCTUATION = Word("-_.") +IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM) +IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END)) + +NAME = IDENTIFIER("name") +EXTRA = IDENTIFIER + +URI = Regex(r"[^ ]+")("url") +URL = AT + URI + +EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA) +EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras") + +VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE) +VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE) + +VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY +VERSION_MANY = Combine( + VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), joinString=",", adjacent=False +)("_raw_spec") +_VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY)) +_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or "") + +VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier") +VERSION_SPEC.setParseAction(lambda s, l, t: t[1]) + +MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker") +MARKER_EXPR.setParseAction( + lambda s, l, t: Marker(s[t._original_start : t._original_end]) +) +MARKER_SEPARATOR = SEMICOLON +MARKER = MARKER_SEPARATOR + MARKER_EXPR + +VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER) +URL_AND_MARKER = URL + Optional(MARKER) + +NAMED_REQUIREMENT = NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER) + +REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd +# pyparsing isn't thread safe during initialization, so we do it eagerly, see +# issue #104 +REQUIREMENT.parseString("x[]") + + +class Requirement(object): + """Parse a requirement. + + Parse a given requirement string into its parts, such as name, specifier, + URL, and extras. Raises InvalidRequirement on a badly-formed requirement + string. + """ + + # TODO: Can we test whether something is contained within a requirement? + # If so how do we do that? Do we need to test against the _name_ of + # the thing as well as the version? What about the markers? + # TODO: Can we normalize the name and extra name? + + def __init__(self, requirement_string): + # type: (str) -> None + try: + req = REQUIREMENT.parseString(requirement_string) + except ParseException as e: + raise InvalidRequirement( + 'Parse error at "{0!r}": {1}'.format( + requirement_string[e.loc : e.loc + 8], e.msg + ) + ) + + self.name = req.name # type: str + if req.url: + parsed_url = urlparse.urlparse(req.url) + if parsed_url.scheme == "file": + if urlparse.urlunparse(parsed_url) != req.url: + raise InvalidRequirement("Invalid URL given") + elif not (parsed_url.scheme and parsed_url.netloc) or ( + not parsed_url.scheme and not parsed_url.netloc + ): + raise InvalidRequirement("Invalid URL: {0}".format(req.url)) + self.url = req.url # type: TOptional[str] + else: + self.url = None + self.extras = set(req.extras.asList() if req.extras else []) # type: Set[str] + self.specifier = SpecifierSet(req.specifier) # type: SpecifierSet + self.marker = req.marker if req.marker else None # type: TOptional[Marker] + + def __str__(self): + # type: () -> str + parts = [self.name] # type: List[str] + + if self.extras: + parts.append("[{0}]".format(",".join(sorted(self.extras)))) + + if self.specifier: + parts.append(str(self.specifier)) + + if self.url: + parts.append("@ {0}".format(self.url)) + if self.marker: + parts.append(" ") + + if self.marker: + parts.append("; {0}".format(self.marker)) + + return "".join(parts) + + def __repr__(self): + # type: () -> str + return "".format(str(self)) diff --git a/venv/Lib/site-packages/pip/_vendor/packaging/specifiers.py b/venv/Lib/site-packages/pip/_vendor/packaging/specifiers.py new file mode 100644 index 00000000..a6a83c1f --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/packaging/specifiers.py @@ -0,0 +1,864 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import abc +import functools +import itertools +import re +import warnings + +from ._compat import string_types, with_metaclass +from ._typing import TYPE_CHECKING +from .utils import canonicalize_version +from .version import LegacyVersion, Version, parse + +if TYPE_CHECKING: # pragma: no cover + from typing import Callable, Dict, Iterable, Iterator, List, Optional, Tuple, Union + + ParsedVersion = Union[Version, LegacyVersion] + UnparsedVersion = Union[Version, LegacyVersion, str] + CallableOperator = Callable[[ParsedVersion, str], bool] + + +class InvalidSpecifier(ValueError): + """ + An invalid specifier was found, users should refer to PEP 440. + """ + + +class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): # type: ignore + @abc.abstractmethod + def __str__(self): + # type: () -> str + """ + Returns the str representation of this Specifier like object. This + should be representative of the Specifier itself. + """ + + @abc.abstractmethod + def __hash__(self): + # type: () -> int + """ + Returns a hash value for this Specifier like object. + """ + + @abc.abstractmethod + def __eq__(self, other): + # type: (object) -> bool + """ + Returns a boolean representing whether or not the two Specifier like + objects are equal. + """ + + @abc.abstractmethod + def __ne__(self, other): + # type: (object) -> bool + """ + Returns a boolean representing whether or not the two Specifier like + objects are not equal. + """ + + @abc.abstractproperty + def prereleases(self): + # type: () -> Optional[bool] + """ + Returns whether or not pre-releases as a whole are allowed by this + specifier. + """ + + @prereleases.setter + def prereleases(self, value): + # type: (bool) -> None + """ + Sets whether or not pre-releases as a whole are allowed by this + specifier. + """ + + @abc.abstractmethod + def contains(self, item, prereleases=None): + # type: (str, Optional[bool]) -> bool + """ + Determines if the given item is contained within this specifier. + """ + + @abc.abstractmethod + def filter(self, iterable, prereleases=None): + # type: (Iterable[UnparsedVersion], Optional[bool]) -> Iterable[UnparsedVersion] + """ + Takes an iterable of items and filters them so that only items which + are contained within this specifier are allowed in it. + """ + + +class _IndividualSpecifier(BaseSpecifier): + + _operators = {} # type: Dict[str, str] + + def __init__(self, spec="", prereleases=None): + # type: (str, Optional[bool]) -> None + match = self._regex.search(spec) + if not match: + raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec)) + + self._spec = ( + match.group("operator").strip(), + match.group("version").strip(), + ) # type: Tuple[str, str] + + # Store whether or not this Specifier should accept prereleases + self._prereleases = prereleases + + def __repr__(self): + # type: () -> str + pre = ( + ", prereleases={0!r}".format(self.prereleases) + if self._prereleases is not None + else "" + ) + + return "<{0}({1!r}{2})>".format(self.__class__.__name__, str(self), pre) + + def __str__(self): + # type: () -> str + return "{0}{1}".format(*self._spec) + + @property + def _canonical_spec(self): + # type: () -> Tuple[str, Union[Version, str]] + return self._spec[0], canonicalize_version(self._spec[1]) + + def __hash__(self): + # type: () -> int + return hash(self._canonical_spec) + + def __eq__(self, other): + # type: (object) -> bool + if isinstance(other, string_types): + try: + other = self.__class__(str(other)) + except InvalidSpecifier: + return NotImplemented + elif not isinstance(other, self.__class__): + return NotImplemented + + return self._canonical_spec == other._canonical_spec + + def __ne__(self, other): + # type: (object) -> bool + if isinstance(other, string_types): + try: + other = self.__class__(str(other)) + except InvalidSpecifier: + return NotImplemented + elif not isinstance(other, self.__class__): + return NotImplemented + + return self._spec != other._spec + + def _get_operator(self, op): + # type: (str) -> CallableOperator + operator_callable = getattr( + self, "_compare_{0}".format(self._operators[op]) + ) # type: CallableOperator + return operator_callable + + def _coerce_version(self, version): + # type: (UnparsedVersion) -> ParsedVersion + if not isinstance(version, (LegacyVersion, Version)): + version = parse(version) + return version + + @property + def operator(self): + # type: () -> str + return self._spec[0] + + @property + def version(self): + # type: () -> str + return self._spec[1] + + @property + def prereleases(self): + # type: () -> Optional[bool] + return self._prereleases + + @prereleases.setter + def prereleases(self, value): + # type: (bool) -> None + self._prereleases = value + + def __contains__(self, item): + # type: (str) -> bool + return self.contains(item) + + def contains(self, item, prereleases=None): + # type: (UnparsedVersion, Optional[bool]) -> bool + + # Determine if prereleases are to be allowed or not. + if prereleases is None: + prereleases = self.prereleases + + # Normalize item to a Version or LegacyVersion, this allows us to have + # a shortcut for ``"2.0" in Specifier(">=2") + normalized_item = self._coerce_version(item) + + # Determine if we should be supporting prereleases in this specifier + # or not, if we do not support prereleases than we can short circuit + # logic if this version is a prereleases. + if normalized_item.is_prerelease and not prereleases: + return False + + # Actually do the comparison to determine if this item is contained + # within this Specifier or not. + operator_callable = self._get_operator(self.operator) # type: CallableOperator + return operator_callable(normalized_item, self.version) + + def filter(self, iterable, prereleases=None): + # type: (Iterable[UnparsedVersion], Optional[bool]) -> Iterable[UnparsedVersion] + + yielded = False + found_prereleases = [] + + kw = {"prereleases": prereleases if prereleases is not None else True} + + # Attempt to iterate over all the values in the iterable and if any of + # them match, yield them. + for version in iterable: + parsed_version = self._coerce_version(version) + + if self.contains(parsed_version, **kw): + # If our version is a prerelease, and we were not set to allow + # prereleases, then we'll store it for later incase nothing + # else matches this specifier. + if parsed_version.is_prerelease and not ( + prereleases or self.prereleases + ): + found_prereleases.append(version) + # Either this is not a prerelease, or we should have been + # accepting prereleases from the beginning. + else: + yielded = True + yield version + + # Now that we've iterated over everything, determine if we've yielded + # any values, and if we have not and we have any prereleases stored up + # then we will go ahead and yield the prereleases. + if not yielded and found_prereleases: + for version in found_prereleases: + yield version + + +class LegacySpecifier(_IndividualSpecifier): + + _regex_str = r""" + (?P(==|!=|<=|>=|<|>)) + \s* + (?P + [^,;\s)]* # Since this is a "legacy" specifier, and the version + # string can be just about anything, we match everything + # except for whitespace, a semi-colon for marker support, + # a closing paren since versions can be enclosed in + # them, and a comma since it's a version separator. + ) + """ + + _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) + + _operators = { + "==": "equal", + "!=": "not_equal", + "<=": "less_than_equal", + ">=": "greater_than_equal", + "<": "less_than", + ">": "greater_than", + } + + def __init__(self, spec="", prereleases=None): + # type: (str, Optional[bool]) -> None + super(LegacySpecifier, self).__init__(spec, prereleases) + + warnings.warn( + "Creating a LegacyVersion has been deprecated and will be " + "removed in the next major release", + DeprecationWarning, + ) + + def _coerce_version(self, version): + # type: (Union[ParsedVersion, str]) -> LegacyVersion + if not isinstance(version, LegacyVersion): + version = LegacyVersion(str(version)) + return version + + def _compare_equal(self, prospective, spec): + # type: (LegacyVersion, str) -> bool + return prospective == self._coerce_version(spec) + + def _compare_not_equal(self, prospective, spec): + # type: (LegacyVersion, str) -> bool + return prospective != self._coerce_version(spec) + + def _compare_less_than_equal(self, prospective, spec): + # type: (LegacyVersion, str) -> bool + return prospective <= self._coerce_version(spec) + + def _compare_greater_than_equal(self, prospective, spec): + # type: (LegacyVersion, str) -> bool + return prospective >= self._coerce_version(spec) + + def _compare_less_than(self, prospective, spec): + # type: (LegacyVersion, str) -> bool + return prospective < self._coerce_version(spec) + + def _compare_greater_than(self, prospective, spec): + # type: (LegacyVersion, str) -> bool + return prospective > self._coerce_version(spec) + + +def _require_version_compare( + fn, # type: (Callable[[Specifier, ParsedVersion, str], bool]) +): + # type: (...) -> Callable[[Specifier, ParsedVersion, str], bool] + @functools.wraps(fn) + def wrapped(self, prospective, spec): + # type: (Specifier, ParsedVersion, str) -> bool + if not isinstance(prospective, Version): + return False + return fn(self, prospective, spec) + + return wrapped + + +class Specifier(_IndividualSpecifier): + + _regex_str = r""" + (?P(~=|==|!=|<=|>=|<|>|===)) + (?P + (?: + # The identity operators allow for an escape hatch that will + # do an exact string match of the version you wish to install. + # This will not be parsed by PEP 440 and we cannot determine + # any semantic meaning from it. This operator is discouraged + # but included entirely as an escape hatch. + (?<====) # Only match for the identity operator + \s* + [^\s]* # We just match everything, except for whitespace + # since we are only testing for strict identity. + ) + | + (?: + # The (non)equality operators allow for wild card and local + # versions to be specified so we have to define these two + # operators separately to enable that. + (?<===|!=) # Only match for equals and not equals + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)* # release + (?: # pre release + [-_\.]? + (a|b|c|rc|alpha|beta|pre|preview) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + + # You cannot use a wild card and a dev or local version + # together so group them with a | and make them optional. + (?: + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local + | + \.\* # Wild card syntax of .* + )? + ) + | + (?: + # The compatible operator requires at least two digits in the + # release segment. + (?<=~=) # Only match for the compatible operator + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) + (?: # pre release + [-_\.]? + (a|b|c|rc|alpha|beta|pre|preview) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + ) + | + (?: + # All other operators only allow a sub set of what the + # (non)equality operators do. Specifically they do not allow + # local versions to be specified nor do they allow the prefix + # matching wild cards. + (?=": "greater_than_equal", + "<": "less_than", + ">": "greater_than", + "===": "arbitrary", + } + + @_require_version_compare + def _compare_compatible(self, prospective, spec): + # type: (ParsedVersion, str) -> bool + + # Compatible releases have an equivalent combination of >= and ==. That + # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to + # implement this in terms of the other specifiers instead of + # implementing it ourselves. The only thing we need to do is construct + # the other specifiers. + + # We want everything but the last item in the version, but we want to + # ignore post and dev releases and we want to treat the pre-release as + # it's own separate segment. + prefix = ".".join( + list( + itertools.takewhile( + lambda x: (not x.startswith("post") and not x.startswith("dev")), + _version_split(spec), + ) + )[:-1] + ) + + # Add the prefix notation to the end of our string + prefix += ".*" + + return self._get_operator(">=")(prospective, spec) and self._get_operator("==")( + prospective, prefix + ) + + @_require_version_compare + def _compare_equal(self, prospective, spec): + # type: (ParsedVersion, str) -> bool + + # We need special logic to handle prefix matching + if spec.endswith(".*"): + # In the case of prefix matching we want to ignore local segment. + prospective = Version(prospective.public) + # Split the spec out by dots, and pretend that there is an implicit + # dot in between a release segment and a pre-release segment. + split_spec = _version_split(spec[:-2]) # Remove the trailing .* + + # Split the prospective version out by dots, and pretend that there + # is an implicit dot in between a release segment and a pre-release + # segment. + split_prospective = _version_split(str(prospective)) + + # Shorten the prospective version to be the same length as the spec + # so that we can determine if the specifier is a prefix of the + # prospective version or not. + shortened_prospective = split_prospective[: len(split_spec)] + + # Pad out our two sides with zeros so that they both equal the same + # length. + padded_spec, padded_prospective = _pad_version( + split_spec, shortened_prospective + ) + + return padded_prospective == padded_spec + else: + # Convert our spec string into a Version + spec_version = Version(spec) + + # If the specifier does not have a local segment, then we want to + # act as if the prospective version also does not have a local + # segment. + if not spec_version.local: + prospective = Version(prospective.public) + + return prospective == spec_version + + @_require_version_compare + def _compare_not_equal(self, prospective, spec): + # type: (ParsedVersion, str) -> bool + return not self._compare_equal(prospective, spec) + + @_require_version_compare + def _compare_less_than_equal(self, prospective, spec): + # type: (ParsedVersion, str) -> bool + + # NB: Local version identifiers are NOT permitted in the version + # specifier, so local version labels can be universally removed from + # the prospective version. + return Version(prospective.public) <= Version(spec) + + @_require_version_compare + def _compare_greater_than_equal(self, prospective, spec): + # type: (ParsedVersion, str) -> bool + + # NB: Local version identifiers are NOT permitted in the version + # specifier, so local version labels can be universally removed from + # the prospective version. + return Version(prospective.public) >= Version(spec) + + @_require_version_compare + def _compare_less_than(self, prospective, spec_str): + # type: (ParsedVersion, str) -> bool + + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = Version(spec_str) + + # Check to see if the prospective version is less than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective < spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a pre-release version, that we do not accept pre-release + # versions for the version mentioned in the specifier (e.g. <3.1 should + # not match 3.1.dev0, but should match 3.0.dev0). + if not spec.is_prerelease and prospective.is_prerelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # If we've gotten to here, it means that prospective version is both + # less than the spec version *and* it's not a pre-release of the same + # version in the spec. + return True + + @_require_version_compare + def _compare_greater_than(self, prospective, spec_str): + # type: (ParsedVersion, str) -> bool + + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = Version(spec_str) + + # Check to see if the prospective version is greater than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective > spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a post-release version, that we do not accept + # post-release versions for the version mentioned in the specifier + # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). + if not spec.is_postrelease and prospective.is_postrelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # Ensure that we do not allow a local version of the version mentioned + # in the specifier, which is technically greater than, to match. + if prospective.local is not None: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # If we've gotten to here, it means that prospective version is both + # greater than the spec version *and* it's not a pre-release of the + # same version in the spec. + return True + + def _compare_arbitrary(self, prospective, spec): + # type: (Version, str) -> bool + return str(prospective).lower() == str(spec).lower() + + @property + def prereleases(self): + # type: () -> bool + + # If there is an explicit prereleases set for this, then we'll just + # blindly use that. + if self._prereleases is not None: + return self._prereleases + + # Look at all of our specifiers and determine if they are inclusive + # operators, and if they are if they are including an explicit + # prerelease. + operator, version = self._spec + if operator in ["==", ">=", "<=", "~=", "==="]: + # The == specifier can include a trailing .*, if it does we + # want to remove before parsing. + if operator == "==" and version.endswith(".*"): + version = version[:-2] + + # Parse the version, and if it is a pre-release than this + # specifier allows pre-releases. + if parse(version).is_prerelease: + return True + + return False + + @prereleases.setter + def prereleases(self, value): + # type: (bool) -> None + self._prereleases = value + + +_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") + + +def _version_split(version): + # type: (str) -> List[str] + result = [] # type: List[str] + for item in version.split("."): + match = _prefix_regex.search(item) + if match: + result.extend(match.groups()) + else: + result.append(item) + return result + + +def _pad_version(left, right): + # type: (List[str], List[str]) -> Tuple[List[str], List[str]] + left_split, right_split = [], [] + + # Get the release segment of our versions + left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) + right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) + + # Get the rest of our versions + left_split.append(left[len(left_split[0]) :]) + right_split.append(right[len(right_split[0]) :]) + + # Insert our padding + left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0]))) + right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0]))) + + return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split))) + + +class SpecifierSet(BaseSpecifier): + def __init__(self, specifiers="", prereleases=None): + # type: (str, Optional[bool]) -> None + + # Split on , to break each individual specifier into it's own item, and + # strip each item to remove leading/trailing whitespace. + split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] + + # Parsed each individual specifier, attempting first to make it a + # Specifier and falling back to a LegacySpecifier. + parsed = set() + for specifier in split_specifiers: + try: + parsed.add(Specifier(specifier)) + except InvalidSpecifier: + parsed.add(LegacySpecifier(specifier)) + + # Turn our parsed specifiers into a frozen set and save them for later. + self._specs = frozenset(parsed) + + # Store our prereleases value so we can use it later to determine if + # we accept prereleases or not. + self._prereleases = prereleases + + def __repr__(self): + # type: () -> str + pre = ( + ", prereleases={0!r}".format(self.prereleases) + if self._prereleases is not None + else "" + ) + + return "".format(str(self), pre) + + def __str__(self): + # type: () -> str + return ",".join(sorted(str(s) for s in self._specs)) + + def __hash__(self): + # type: () -> int + return hash(self._specs) + + def __and__(self, other): + # type: (Union[SpecifierSet, str]) -> SpecifierSet + if isinstance(other, string_types): + other = SpecifierSet(other) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + specifier = SpecifierSet() + specifier._specs = frozenset(self._specs | other._specs) + + if self._prereleases is None and other._prereleases is not None: + specifier._prereleases = other._prereleases + elif self._prereleases is not None and other._prereleases is None: + specifier._prereleases = self._prereleases + elif self._prereleases == other._prereleases: + specifier._prereleases = self._prereleases + else: + raise ValueError( + "Cannot combine SpecifierSets with True and False prerelease " + "overrides." + ) + + return specifier + + def __eq__(self, other): + # type: (object) -> bool + if isinstance(other, (string_types, _IndividualSpecifier)): + other = SpecifierSet(str(other)) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + return self._specs == other._specs + + def __ne__(self, other): + # type: (object) -> bool + if isinstance(other, (string_types, _IndividualSpecifier)): + other = SpecifierSet(str(other)) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + return self._specs != other._specs + + def __len__(self): + # type: () -> int + return len(self._specs) + + def __iter__(self): + # type: () -> Iterator[_IndividualSpecifier] + return iter(self._specs) + + @property + def prereleases(self): + # type: () -> Optional[bool] + + # If we have been given an explicit prerelease modifier, then we'll + # pass that through here. + if self._prereleases is not None: + return self._prereleases + + # If we don't have any specifiers, and we don't have a forced value, + # then we'll just return None since we don't know if this should have + # pre-releases or not. + if not self._specs: + return None + + # Otherwise we'll see if any of the given specifiers accept + # prereleases, if any of them do we'll return True, otherwise False. + return any(s.prereleases for s in self._specs) + + @prereleases.setter + def prereleases(self, value): + # type: (bool) -> None + self._prereleases = value + + def __contains__(self, item): + # type: (Union[ParsedVersion, str]) -> bool + return self.contains(item) + + def contains(self, item, prereleases=None): + # type: (Union[ParsedVersion, str], Optional[bool]) -> bool + + # Ensure that our item is a Version or LegacyVersion instance. + if not isinstance(item, (LegacyVersion, Version)): + item = parse(item) + + # Determine if we're forcing a prerelease or not, if we're not forcing + # one for this particular filter call, then we'll use whatever the + # SpecifierSet thinks for whether or not we should support prereleases. + if prereleases is None: + prereleases = self.prereleases + + # We can determine if we're going to allow pre-releases by looking to + # see if any of the underlying items supports them. If none of them do + # and this item is a pre-release then we do not allow it and we can + # short circuit that here. + # Note: This means that 1.0.dev1 would not be contained in something + # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0 + if not prereleases and item.is_prerelease: + return False + + # We simply dispatch to the underlying specs here to make sure that the + # given version is contained within all of them. + # Note: This use of all() here means that an empty set of specifiers + # will always return True, this is an explicit design decision. + return all(s.contains(item, prereleases=prereleases) for s in self._specs) + + def filter( + self, + iterable, # type: Iterable[Union[ParsedVersion, str]] + prereleases=None, # type: Optional[bool] + ): + # type: (...) -> Iterable[Union[ParsedVersion, str]] + + # Determine if we're forcing a prerelease or not, if we're not forcing + # one for this particular filter call, then we'll use whatever the + # SpecifierSet thinks for whether or not we should support prereleases. + if prereleases is None: + prereleases = self.prereleases + + # If we have any specifiers, then we want to wrap our iterable in the + # filter method for each one, this will act as a logical AND amongst + # each specifier. + if self._specs: + for spec in self._specs: + iterable = spec.filter(iterable, prereleases=bool(prereleases)) + return iterable + # If we do not have any specifiers, then we need to have a rough filter + # which will filter out any pre-releases, unless there are no final + # releases, and which will filter out LegacyVersion in general. + else: + filtered = [] # type: List[Union[ParsedVersion, str]] + found_prereleases = [] # type: List[Union[ParsedVersion, str]] + + for item in iterable: + # Ensure that we some kind of Version class for this item. + if not isinstance(item, (LegacyVersion, Version)): + parsed_version = parse(item) + else: + parsed_version = item + + # Filter out any item which is parsed as a LegacyVersion + if isinstance(parsed_version, LegacyVersion): + continue + + # Store any item which is a pre-release for later unless we've + # already found a final version or we are accepting prereleases + if parsed_version.is_prerelease and not prereleases: + if not filtered: + found_prereleases.append(item) + else: + filtered.append(item) + + # If we've found no items except for pre-releases, then we'll go + # ahead and use the pre-releases + if not filtered and found_prereleases and prereleases is None: + return found_prereleases + + return filtered diff --git a/venv/Lib/site-packages/pip/_vendor/packaging/tags.py b/venv/Lib/site-packages/pip/_vendor/packaging/tags.py new file mode 100644 index 00000000..d637f1b6 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/packaging/tags.py @@ -0,0 +1,866 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import + +import distutils.util + +try: + from importlib.machinery import EXTENSION_SUFFIXES +except ImportError: # pragma: no cover + import imp + + EXTENSION_SUFFIXES = [x[0] for x in imp.get_suffixes()] + del imp +import collections +import logging +import os +import platform +import re +import struct +import sys +import sysconfig +import warnings + +from ._typing import TYPE_CHECKING, cast + +if TYPE_CHECKING: # pragma: no cover + from typing import ( + IO, + Dict, + FrozenSet, + Iterable, + Iterator, + List, + Optional, + Sequence, + Tuple, + Union, + ) + + PythonVersion = Sequence[int] + MacVersion = Tuple[int, int] + GlibcVersion = Tuple[int, int] + + +logger = logging.getLogger(__name__) + +INTERPRETER_SHORT_NAMES = { + "python": "py", # Generic. + "cpython": "cp", + "pypy": "pp", + "ironpython": "ip", + "jython": "jy", +} # type: Dict[str, str] + + +_32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32 + + +_LEGACY_MANYLINUX_MAP = { + # CentOS 7 w/ glibc 2.17 (PEP 599) + (2, 17): "manylinux2014", + # CentOS 6 w/ glibc 2.12 (PEP 571) + (2, 12): "manylinux2010", + # CentOS 5 w/ glibc 2.5 (PEP 513) + (2, 5): "manylinux1", +} + +# If glibc ever changes its major version, we need to know what the last +# minor version was, so we can build the complete list of all versions. +# For now, guess what the highest minor version might be, assume it will +# be 50 for testing. Once this actually happens, update the dictionary +# with the actual value. +_LAST_GLIBC_MINOR = collections.defaultdict(lambda: 50) # type: Dict[int, int] +glibcVersion = collections.namedtuple("Version", ["major", "minor"]) + + +class Tag(object): + """ + A representation of the tag triple for a wheel. + + Instances are considered immutable and thus are hashable. Equality checking + is also supported. + """ + + __slots__ = ["_interpreter", "_abi", "_platform", "_hash"] + + def __init__(self, interpreter, abi, platform): + # type: (str, str, str) -> None + self._interpreter = interpreter.lower() + self._abi = abi.lower() + self._platform = platform.lower() + # The __hash__ of every single element in a Set[Tag] will be evaluated each time + # that a set calls its `.disjoint()` method, which may be called hundreds of + # times when scanning a page of links for packages with tags matching that + # Set[Tag]. Pre-computing the value here produces significant speedups for + # downstream consumers. + self._hash = hash((self._interpreter, self._abi, self._platform)) + + @property + def interpreter(self): + # type: () -> str + return self._interpreter + + @property + def abi(self): + # type: () -> str + return self._abi + + @property + def platform(self): + # type: () -> str + return self._platform + + def __eq__(self, other): + # type: (object) -> bool + if not isinstance(other, Tag): + return NotImplemented + + return ( + (self.platform == other.platform) + and (self.abi == other.abi) + and (self.interpreter == other.interpreter) + ) + + def __hash__(self): + # type: () -> int + return self._hash + + def __str__(self): + # type: () -> str + return "{}-{}-{}".format(self._interpreter, self._abi, self._platform) + + def __repr__(self): + # type: () -> str + return "<{self} @ {self_id}>".format(self=self, self_id=id(self)) + + +def parse_tag(tag): + # type: (str) -> FrozenSet[Tag] + """ + Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances. + + Returning a set is required due to the possibility that the tag is a + compressed tag set. + """ + tags = set() + interpreters, abis, platforms = tag.split("-") + for interpreter in interpreters.split("."): + for abi in abis.split("."): + for platform_ in platforms.split("."): + tags.add(Tag(interpreter, abi, platform_)) + return frozenset(tags) + + +def _warn_keyword_parameter(func_name, kwargs): + # type: (str, Dict[str, bool]) -> bool + """ + Backwards-compatibility with Python 2.7 to allow treating 'warn' as keyword-only. + """ + if not kwargs: + return False + elif len(kwargs) > 1 or "warn" not in kwargs: + kwargs.pop("warn", None) + arg = next(iter(kwargs.keys())) + raise TypeError( + "{}() got an unexpected keyword argument {!r}".format(func_name, arg) + ) + return kwargs["warn"] + + +def _get_config_var(name, warn=False): + # type: (str, bool) -> Union[int, str, None] + value = sysconfig.get_config_var(name) + if value is None and warn: + logger.debug( + "Config variable '%s' is unset, Python ABI tag may be incorrect", name + ) + return value + + +def _normalize_string(string): + # type: (str) -> str + return string.replace(".", "_").replace("-", "_") + + +def _abi3_applies(python_version): + # type: (PythonVersion) -> bool + """ + Determine if the Python version supports abi3. + + PEP 384 was first implemented in Python 3.2. + """ + return len(python_version) > 1 and tuple(python_version) >= (3, 2) + + +def _cpython_abis(py_version, warn=False): + # type: (PythonVersion, bool) -> List[str] + py_version = tuple(py_version) # To allow for version comparison. + abis = [] + version = _version_nodot(py_version[:2]) + debug = pymalloc = ucs4 = "" + with_debug = _get_config_var("Py_DEBUG", warn) + has_refcount = hasattr(sys, "gettotalrefcount") + # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled + # extension modules is the best option. + # https://github.com/pypa/pip/issues/3383#issuecomment-173267692 + has_ext = "_d.pyd" in EXTENSION_SUFFIXES + if with_debug or (with_debug is None and (has_refcount or has_ext)): + debug = "d" + if py_version < (3, 8): + with_pymalloc = _get_config_var("WITH_PYMALLOC", warn) + if with_pymalloc or with_pymalloc is None: + pymalloc = "m" + if py_version < (3, 3): + unicode_size = _get_config_var("Py_UNICODE_SIZE", warn) + if unicode_size == 4 or ( + unicode_size is None and sys.maxunicode == 0x10FFFF + ): + ucs4 = "u" + elif debug: + # Debug builds can also load "normal" extension modules. + # We can also assume no UCS-4 or pymalloc requirement. + abis.append("cp{version}".format(version=version)) + abis.insert( + 0, + "cp{version}{debug}{pymalloc}{ucs4}".format( + version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4 + ), + ) + return abis + + +def cpython_tags( + python_version=None, # type: Optional[PythonVersion] + abis=None, # type: Optional[Iterable[str]] + platforms=None, # type: Optional[Iterable[str]] + **kwargs # type: bool +): + # type: (...) -> Iterator[Tag] + """ + Yields the tags for a CPython interpreter. + + The tags consist of: + - cp-- + - cp-abi3- + - cp-none- + - cp-abi3- # Older Python versions down to 3.2. + + If python_version only specifies a major version then user-provided ABIs and + the 'none' ABItag will be used. + + If 'abi3' or 'none' are specified in 'abis' then they will be yielded at + their normal position and not at the beginning. + """ + warn = _warn_keyword_parameter("cpython_tags", kwargs) + if not python_version: + python_version = sys.version_info[:2] + + interpreter = "cp{}".format(_version_nodot(python_version[:2])) + + if abis is None: + if len(python_version) > 1: + abis = _cpython_abis(python_version, warn) + else: + abis = [] + abis = list(abis) + # 'abi3' and 'none' are explicitly handled later. + for explicit_abi in ("abi3", "none"): + try: + abis.remove(explicit_abi) + except ValueError: + pass + + platforms = list(platforms or _platform_tags()) + for abi in abis: + for platform_ in platforms: + yield Tag(interpreter, abi, platform_) + if _abi3_applies(python_version): + for tag in (Tag(interpreter, "abi3", platform_) for platform_ in platforms): + yield tag + for tag in (Tag(interpreter, "none", platform_) for platform_ in platforms): + yield tag + + if _abi3_applies(python_version): + for minor_version in range(python_version[1] - 1, 1, -1): + for platform_ in platforms: + interpreter = "cp{version}".format( + version=_version_nodot((python_version[0], minor_version)) + ) + yield Tag(interpreter, "abi3", platform_) + + +def _generic_abi(): + # type: () -> Iterator[str] + abi = sysconfig.get_config_var("SOABI") + if abi: + yield _normalize_string(abi) + + +def generic_tags( + interpreter=None, # type: Optional[str] + abis=None, # type: Optional[Iterable[str]] + platforms=None, # type: Optional[Iterable[str]] + **kwargs # type: bool +): + # type: (...) -> Iterator[Tag] + """ + Yields the tags for a generic interpreter. + + The tags consist of: + - -- + + The "none" ABI will be added if it was not explicitly provided. + """ + warn = _warn_keyword_parameter("generic_tags", kwargs) + if not interpreter: + interp_name = interpreter_name() + interp_version = interpreter_version(warn=warn) + interpreter = "".join([interp_name, interp_version]) + if abis is None: + abis = _generic_abi() + platforms = list(platforms or _platform_tags()) + abis = list(abis) + if "none" not in abis: + abis.append("none") + for abi in abis: + for platform_ in platforms: + yield Tag(interpreter, abi, platform_) + + +def _py_interpreter_range(py_version): + # type: (PythonVersion) -> Iterator[str] + """ + Yields Python versions in descending order. + + After the latest version, the major-only version will be yielded, and then + all previous versions of that major version. + """ + if len(py_version) > 1: + yield "py{version}".format(version=_version_nodot(py_version[:2])) + yield "py{major}".format(major=py_version[0]) + if len(py_version) > 1: + for minor in range(py_version[1] - 1, -1, -1): + yield "py{version}".format(version=_version_nodot((py_version[0], minor))) + + +def compatible_tags( + python_version=None, # type: Optional[PythonVersion] + interpreter=None, # type: Optional[str] + platforms=None, # type: Optional[Iterable[str]] +): + # type: (...) -> Iterator[Tag] + """ + Yields the sequence of tags that are compatible with a specific version of Python. + + The tags consist of: + - py*-none- + - -none-any # ... if `interpreter` is provided. + - py*-none-any + """ + if not python_version: + python_version = sys.version_info[:2] + platforms = list(platforms or _platform_tags()) + for version in _py_interpreter_range(python_version): + for platform_ in platforms: + yield Tag(version, "none", platform_) + if interpreter: + yield Tag(interpreter, "none", "any") + for version in _py_interpreter_range(python_version): + yield Tag(version, "none", "any") + + +def _mac_arch(arch, is_32bit=_32_BIT_INTERPRETER): + # type: (str, bool) -> str + if not is_32bit: + return arch + + if arch.startswith("ppc"): + return "ppc" + + return "i386" + + +def _mac_binary_formats(version, cpu_arch): + # type: (MacVersion, str) -> List[str] + formats = [cpu_arch] + if cpu_arch == "x86_64": + if version < (10, 4): + return [] + formats.extend(["intel", "fat64", "fat32"]) + + elif cpu_arch == "i386": + if version < (10, 4): + return [] + formats.extend(["intel", "fat32", "fat"]) + + elif cpu_arch == "ppc64": + # TODO: Need to care about 32-bit PPC for ppc64 through 10.2? + if version > (10, 5) or version < (10, 4): + return [] + formats.append("fat64") + + elif cpu_arch == "ppc": + if version > (10, 6): + return [] + formats.extend(["fat32", "fat"]) + + if cpu_arch in {"arm64", "x86_64"}: + formats.append("universal2") + + if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}: + formats.append("universal") + + return formats + + +def mac_platforms(version=None, arch=None): + # type: (Optional[MacVersion], Optional[str]) -> Iterator[str] + """ + Yields the platform tags for a macOS system. + + The `version` parameter is a two-item tuple specifying the macOS version to + generate platform tags for. The `arch` parameter is the CPU architecture to + generate platform tags for. Both parameters default to the appropriate value + for the current system. + """ + version_str, _, cpu_arch = platform.mac_ver() # type: ignore + if version is None: + version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2]))) + else: + version = version + if arch is None: + arch = _mac_arch(cpu_arch) + else: + arch = arch + + if (10, 0) <= version and version < (11, 0): + # Prior to Mac OS 11, each yearly release of Mac OS bumped the + # "minor" version number. The major version was always 10. + for minor_version in range(version[1], -1, -1): + compat_version = 10, minor_version + binary_formats = _mac_binary_formats(compat_version, arch) + for binary_format in binary_formats: + yield "macosx_{major}_{minor}_{binary_format}".format( + major=10, minor=minor_version, binary_format=binary_format + ) + + if version >= (11, 0): + # Starting with Mac OS 11, each yearly release bumps the major version + # number. The minor versions are now the midyear updates. + for major_version in range(version[0], 10, -1): + compat_version = major_version, 0 + binary_formats = _mac_binary_formats(compat_version, arch) + for binary_format in binary_formats: + yield "macosx_{major}_{minor}_{binary_format}".format( + major=major_version, minor=0, binary_format=binary_format + ) + + if version >= (11, 0): + # Mac OS 11 on x86_64 is compatible with binaries from previous releases. + # Arm64 support was introduced in 11.0, so no Arm binaries from previous + # releases exist. + # + # However, the "universal2" binary format can have a + # macOS version earlier than 11.0 when the x86_64 part of the binary supports + # that version of macOS. + if arch == "x86_64": + for minor_version in range(16, 3, -1): + compat_version = 10, minor_version + binary_formats = _mac_binary_formats(compat_version, arch) + for binary_format in binary_formats: + yield "macosx_{major}_{minor}_{binary_format}".format( + major=compat_version[0], + minor=compat_version[1], + binary_format=binary_format, + ) + else: + for minor_version in range(16, 3, -1): + compat_version = 10, minor_version + binary_format = "universal2" + yield "macosx_{major}_{minor}_{binary_format}".format( + major=compat_version[0], + minor=compat_version[1], + binary_format=binary_format, + ) + + +# From PEP 513, PEP 600 +def _is_manylinux_compatible(name, arch, glibc_version): + # type: (str, str, GlibcVersion) -> bool + sys_glibc = _get_glibc_version() + if sys_glibc < glibc_version: + return False + # Check for presence of _manylinux module. + try: + import _manylinux # noqa + except ImportError: + pass + else: + if hasattr(_manylinux, "manylinux_compatible"): + result = _manylinux.manylinux_compatible( + glibc_version[0], glibc_version[1], arch + ) + if result is not None: + return bool(result) + else: + if glibc_version == (2, 5): + if hasattr(_manylinux, "manylinux1_compatible"): + return bool(_manylinux.manylinux1_compatible) + if glibc_version == (2, 12): + if hasattr(_manylinux, "manylinux2010_compatible"): + return bool(_manylinux.manylinux2010_compatible) + if glibc_version == (2, 17): + if hasattr(_manylinux, "manylinux2014_compatible"): + return bool(_manylinux.manylinux2014_compatible) + return True + + +def _glibc_version_string(): + # type: () -> Optional[str] + # Returns glibc version string, or None if not using glibc. + return _glibc_version_string_confstr() or _glibc_version_string_ctypes() + + +def _glibc_version_string_confstr(): + # type: () -> Optional[str] + """ + Primary implementation of glibc_version_string using os.confstr. + """ + # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely + # to be broken or missing. This strategy is used in the standard library + # platform module. + # https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183 + try: + # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17". + version_string = os.confstr( # type: ignore[attr-defined] # noqa: F821 + "CS_GNU_LIBC_VERSION" + ) + assert version_string is not None + _, version = version_string.split() # type: Tuple[str, str] + except (AssertionError, AttributeError, OSError, ValueError): + # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... + return None + return version + + +def _glibc_version_string_ctypes(): + # type: () -> Optional[str] + """ + Fallback implementation of glibc_version_string using ctypes. + """ + try: + import ctypes + except ImportError: + return None + + # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen + # manpage says, "If filename is NULL, then the returned handle is for the + # main program". This way we can let the linker do the work to figure out + # which libc our process is actually using. + # + # We must also handle the special case where the executable is not a + # dynamically linked executable. This can occur when using musl libc, + # for example. In this situation, dlopen() will error, leading to an + # OSError. Interestingly, at least in the case of musl, there is no + # errno set on the OSError. The single string argument used to construct + # OSError comes from libc itself and is therefore not portable to + # hard code here. In any case, failure to call dlopen() means we + # can proceed, so we bail on our attempt. + try: + # Note: typeshed is wrong here so we are ignoring this line. + process_namespace = ctypes.CDLL(None) # type: ignore + except OSError: + return None + + try: + gnu_get_libc_version = process_namespace.gnu_get_libc_version + except AttributeError: + # Symbol doesn't exist -> therefore, we are not linked to + # glibc. + return None + + # Call gnu_get_libc_version, which returns a string like "2.5" + gnu_get_libc_version.restype = ctypes.c_char_p + version_str = gnu_get_libc_version() # type: str + # py2 / py3 compatibility: + if not isinstance(version_str, str): + version_str = version_str.decode("ascii") + + return version_str + + +def _parse_glibc_version(version_str): + # type: (str) -> Tuple[int, int] + # Parse glibc version. + # + # We use a regexp instead of str.split because we want to discard any + # random junk that might come after the minor version -- this might happen + # in patched/forked versions of glibc (e.g. Linaro's version of glibc + # uses version strings like "2.20-2014.11"). See gh-3588. + m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str) + if not m: + warnings.warn( + "Expected glibc version with 2 components major.minor," + " got: %s" % version_str, + RuntimeWarning, + ) + return -1, -1 + return (int(m.group("major")), int(m.group("minor"))) + + +_glibc_version = [] # type: List[Tuple[int, int]] + + +def _get_glibc_version(): + # type: () -> Tuple[int, int] + if _glibc_version: + return _glibc_version[0] + version_str = _glibc_version_string() + if version_str is None: + _glibc_version.append((-1, -1)) + else: + _glibc_version.append(_parse_glibc_version(version_str)) + return _glibc_version[0] + + +# Python does not provide platform information at sufficient granularity to +# identify the architecture of the running executable in some cases, so we +# determine it dynamically by reading the information from the running +# process. This only applies on Linux, which uses the ELF format. +class _ELFFileHeader(object): + # https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header + class _InvalidELFFileHeader(ValueError): + """ + An invalid ELF file header was found. + """ + + ELF_MAGIC_NUMBER = 0x7F454C46 + ELFCLASS32 = 1 + ELFCLASS64 = 2 + ELFDATA2LSB = 1 + ELFDATA2MSB = 2 + EM_386 = 3 + EM_S390 = 22 + EM_ARM = 40 + EM_X86_64 = 62 + EF_ARM_ABIMASK = 0xFF000000 + EF_ARM_ABI_VER5 = 0x05000000 + EF_ARM_ABI_FLOAT_HARD = 0x00000400 + + def __init__(self, file): + # type: (IO[bytes]) -> None + def unpack(fmt): + # type: (str) -> int + try: + (result,) = struct.unpack( + fmt, file.read(struct.calcsize(fmt)) + ) # type: (int, ) + except struct.error: + raise _ELFFileHeader._InvalidELFFileHeader() + return result + + self.e_ident_magic = unpack(">I") + if self.e_ident_magic != self.ELF_MAGIC_NUMBER: + raise _ELFFileHeader._InvalidELFFileHeader() + self.e_ident_class = unpack("B") + if self.e_ident_class not in {self.ELFCLASS32, self.ELFCLASS64}: + raise _ELFFileHeader._InvalidELFFileHeader() + self.e_ident_data = unpack("B") + if self.e_ident_data not in {self.ELFDATA2LSB, self.ELFDATA2MSB}: + raise _ELFFileHeader._InvalidELFFileHeader() + self.e_ident_version = unpack("B") + self.e_ident_osabi = unpack("B") + self.e_ident_abiversion = unpack("B") + self.e_ident_pad = file.read(7) + format_h = "H" + format_i = "I" + format_q = "Q" + format_p = format_i if self.e_ident_class == self.ELFCLASS32 else format_q + self.e_type = unpack(format_h) + self.e_machine = unpack(format_h) + self.e_version = unpack(format_i) + self.e_entry = unpack(format_p) + self.e_phoff = unpack(format_p) + self.e_shoff = unpack(format_p) + self.e_flags = unpack(format_i) + self.e_ehsize = unpack(format_h) + self.e_phentsize = unpack(format_h) + self.e_phnum = unpack(format_h) + self.e_shentsize = unpack(format_h) + self.e_shnum = unpack(format_h) + self.e_shstrndx = unpack(format_h) + + +def _get_elf_header(): + # type: () -> Optional[_ELFFileHeader] + try: + with open(sys.executable, "rb") as f: + elf_header = _ELFFileHeader(f) + except (IOError, OSError, TypeError, _ELFFileHeader._InvalidELFFileHeader): + return None + return elf_header + + +def _is_linux_armhf(): + # type: () -> bool + # hard-float ABI can be detected from the ELF header of the running + # process + # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf + elf_header = _get_elf_header() + if elf_header is None: + return False + result = elf_header.e_ident_class == elf_header.ELFCLASS32 + result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB + result &= elf_header.e_machine == elf_header.EM_ARM + result &= ( + elf_header.e_flags & elf_header.EF_ARM_ABIMASK + ) == elf_header.EF_ARM_ABI_VER5 + result &= ( + elf_header.e_flags & elf_header.EF_ARM_ABI_FLOAT_HARD + ) == elf_header.EF_ARM_ABI_FLOAT_HARD + return result + + +def _is_linux_i686(): + # type: () -> bool + elf_header = _get_elf_header() + if elf_header is None: + return False + result = elf_header.e_ident_class == elf_header.ELFCLASS32 + result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB + result &= elf_header.e_machine == elf_header.EM_386 + return result + + +def _have_compatible_manylinux_abi(arch): + # type: (str) -> bool + if arch == "armv7l": + return _is_linux_armhf() + if arch == "i686": + return _is_linux_i686() + return arch in {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x"} + + +def _manylinux_tags(linux, arch): + # type: (str, str) -> Iterator[str] + # Oldest glibc to be supported regardless of architecture is (2, 17). + too_old_glibc2 = glibcVersion(2, 16) + if arch in {"x86_64", "i686"}: + # On x86/i686 also oldest glibc to be supported is (2, 5). + too_old_glibc2 = glibcVersion(2, 4) + current_glibc = glibcVersion(*_get_glibc_version()) + glibc_max_list = [current_glibc] + # We can assume compatibility across glibc major versions. + # https://sourceware.org/bugzilla/show_bug.cgi?id=24636 + # + # Build a list of maximum glibc versions so that we can + # output the canonical list of all glibc from current_glibc + # down to too_old_glibc2, including all intermediary versions. + for glibc_major in range(current_glibc.major - 1, 1, -1): + glibc_max_list.append(glibcVersion(glibc_major, _LAST_GLIBC_MINOR[glibc_major])) + for glibc_max in glibc_max_list: + if glibc_max.major == too_old_glibc2.major: + min_minor = too_old_glibc2.minor + else: + # For other glibc major versions oldest supported is (x, 0). + min_minor = -1 + for glibc_minor in range(glibc_max.minor, min_minor, -1): + glibc_version = (glibc_max.major, glibc_minor) + tag = "manylinux_{}_{}".format(*glibc_version) + if _is_manylinux_compatible(tag, arch, glibc_version): + yield linux.replace("linux", tag) + # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags. + if glibc_version in _LEGACY_MANYLINUX_MAP: + legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version] + if _is_manylinux_compatible(legacy_tag, arch, glibc_version): + yield linux.replace("linux", legacy_tag) + + +def _linux_platforms(is_32bit=_32_BIT_INTERPRETER): + # type: (bool) -> Iterator[str] + linux = _normalize_string(distutils.util.get_platform()) + if is_32bit: + if linux == "linux_x86_64": + linux = "linux_i686" + elif linux == "linux_aarch64": + linux = "linux_armv7l" + _, arch = linux.split("_", 1) + if _have_compatible_manylinux_abi(arch): + for tag in _manylinux_tags(linux, arch): + yield tag + yield linux + + +def _generic_platforms(): + # type: () -> Iterator[str] + yield _normalize_string(distutils.util.get_platform()) + + +def _platform_tags(): + # type: () -> Iterator[str] + """ + Provides the platform tags for this installation. + """ + if platform.system() == "Darwin": + return mac_platforms() + elif platform.system() == "Linux": + return _linux_platforms() + else: + return _generic_platforms() + + +def interpreter_name(): + # type: () -> str + """ + Returns the name of the running interpreter. + """ + try: + name = sys.implementation.name # type: ignore + except AttributeError: # pragma: no cover + # Python 2.7 compatibility. + name = platform.python_implementation().lower() + return INTERPRETER_SHORT_NAMES.get(name) or name + + +def interpreter_version(**kwargs): + # type: (bool) -> str + """ + Returns the version of the running interpreter. + """ + warn = _warn_keyword_parameter("interpreter_version", kwargs) + version = _get_config_var("py_version_nodot", warn=warn) + if version: + version = str(version) + else: + version = _version_nodot(sys.version_info[:2]) + return version + + +def _version_nodot(version): + # type: (PythonVersion) -> str + return "".join(map(str, version)) + + +def sys_tags(**kwargs): + # type: (bool) -> Iterator[Tag] + """ + Returns the sequence of tag triples for the running interpreter. + + The order of the sequence corresponds to priority order for the + interpreter, from most to least important. + """ + warn = _warn_keyword_parameter("sys_tags", kwargs) + + interp_name = interpreter_name() + if interp_name == "cp": + for tag in cpython_tags(warn=warn): + yield tag + else: + for tag in generic_tags(): + yield tag + + for tag in compatible_tags(): + yield tag diff --git a/venv/Lib/site-packages/pip/_vendor/packaging/utils.py b/venv/Lib/site-packages/pip/_vendor/packaging/utils.py new file mode 100644 index 00000000..6e8c2a3e --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/packaging/utils.py @@ -0,0 +1,138 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import re + +from ._typing import TYPE_CHECKING, cast +from .tags import Tag, parse_tag +from .version import InvalidVersion, Version + +if TYPE_CHECKING: # pragma: no cover + from typing import FrozenSet, NewType, Tuple, Union + + BuildTag = Union[Tuple[()], Tuple[int, str]] + NormalizedName = NewType("NormalizedName", str) +else: + BuildTag = tuple + NormalizedName = str + + +class InvalidWheelFilename(ValueError): + """ + An invalid wheel filename was found, users should refer to PEP 427. + """ + + +class InvalidSdistFilename(ValueError): + """ + An invalid sdist filename was found, users should refer to the packaging user guide. + """ + + +_canonicalize_regex = re.compile(r"[-_.]+") +# PEP 427: The build number must start with a digit. +_build_tag_regex = re.compile(r"(\d+)(.*)") + + +def canonicalize_name(name): + # type: (str) -> NormalizedName + # This is taken from PEP 503. + value = _canonicalize_regex.sub("-", name).lower() + return cast(NormalizedName, value) + + +def canonicalize_version(version): + # type: (Union[Version, str]) -> Union[Version, str] + """ + This is very similar to Version.__str__, but has one subtle difference + with the way it handles the release segment. + """ + if not isinstance(version, Version): + try: + version = Version(version) + except InvalidVersion: + # Legacy versions cannot be normalized + return version + + parts = [] + + # Epoch + if version.epoch != 0: + parts.append("{0}!".format(version.epoch)) + + # Release segment + # NB: This strips trailing '.0's to normalize + parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in version.release))) + + # Pre-release + if version.pre is not None: + parts.append("".join(str(x) for x in version.pre)) + + # Post-release + if version.post is not None: + parts.append(".post{0}".format(version.post)) + + # Development release + if version.dev is not None: + parts.append(".dev{0}".format(version.dev)) + + # Local version segment + if version.local is not None: + parts.append("+{0}".format(version.local)) + + return "".join(parts) + + +def parse_wheel_filename(filename): + # type: (str) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]] + if not filename.endswith(".whl"): + raise InvalidWheelFilename( + "Invalid wheel filename (extension must be '.whl'): {0}".format(filename) + ) + + filename = filename[:-4] + dashes = filename.count("-") + if dashes not in (4, 5): + raise InvalidWheelFilename( + "Invalid wheel filename (wrong number of parts): {0}".format(filename) + ) + + parts = filename.split("-", dashes - 2) + name_part = parts[0] + # See PEP 427 for the rules on escaping the project name + if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None: + raise InvalidWheelFilename("Invalid project name: {0}".format(filename)) + name = canonicalize_name(name_part) + version = Version(parts[1]) + if dashes == 5: + build_part = parts[2] + build_match = _build_tag_regex.match(build_part) + if build_match is None: + raise InvalidWheelFilename( + "Invalid build number: {0} in '{1}'".format(build_part, filename) + ) + build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2))) + else: + build = () + tags = parse_tag(parts[-1]) + return (name, version, build, tags) + + +def parse_sdist_filename(filename): + # type: (str) -> Tuple[NormalizedName, Version] + if not filename.endswith(".tar.gz"): + raise InvalidSdistFilename( + "Invalid sdist filename (extension must be '.tar.gz'): {0}".format(filename) + ) + + # We are requiring a PEP 440 version, which cannot contain dashes, + # so we split on the last dash. + name_part, sep, version_part = filename[:-7].rpartition("-") + if not sep: + raise InvalidSdistFilename("Invalid sdist filename: {0}".format(filename)) + + name = canonicalize_name(name_part) + version = Version(version_part) + return (name, version) diff --git a/venv/Lib/site-packages/pip/_vendor/packaging/version.py b/venv/Lib/site-packages/pip/_vendor/packaging/version.py new file mode 100644 index 00000000..517d91f2 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/packaging/version.py @@ -0,0 +1,556 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import collections +import itertools +import re +import warnings + +from ._structures import Infinity, NegativeInfinity +from ._typing import TYPE_CHECKING + +if TYPE_CHECKING: # pragma: no cover + from typing import Callable, Iterator, List, Optional, SupportsInt, Tuple, Union + + from ._structures import InfinityType, NegativeInfinityType + + InfiniteTypes = Union[InfinityType, NegativeInfinityType] + PrePostDevType = Union[InfiniteTypes, Tuple[str, int]] + SubLocalType = Union[InfiniteTypes, int, str] + LocalType = Union[ + NegativeInfinityType, + Tuple[ + Union[ + SubLocalType, + Tuple[SubLocalType, str], + Tuple[NegativeInfinityType, SubLocalType], + ], + ..., + ], + ] + CmpKey = Tuple[ + int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType + ] + LegacyCmpKey = Tuple[int, Tuple[str, ...]] + VersionComparisonMethod = Callable[ + [Union[CmpKey, LegacyCmpKey], Union[CmpKey, LegacyCmpKey]], bool + ] + +__all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"] + + +_Version = collections.namedtuple( + "_Version", ["epoch", "release", "dev", "pre", "post", "local"] +) + + +def parse(version): + # type: (str) -> Union[LegacyVersion, Version] + """ + Parse the given version string and return either a :class:`Version` object + or a :class:`LegacyVersion` object depending on if the given version is + a valid PEP 440 version or a legacy version. + """ + try: + return Version(version) + except InvalidVersion: + return LegacyVersion(version) + + +class InvalidVersion(ValueError): + """ + An invalid version was found, users should refer to PEP 440. + """ + + +class _BaseVersion(object): + _key = None # type: Union[CmpKey, LegacyCmpKey] + + def __hash__(self): + # type: () -> int + return hash(self._key) + + # Please keep the duplicated `isinstance` check + # in the six comparisons hereunder + # unless you find a way to avoid adding overhead function calls. + def __lt__(self, other): + # type: (_BaseVersion) -> bool + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key < other._key + + def __le__(self, other): + # type: (_BaseVersion) -> bool + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key <= other._key + + def __eq__(self, other): + # type: (object) -> bool + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key == other._key + + def __ge__(self, other): + # type: (_BaseVersion) -> bool + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key >= other._key + + def __gt__(self, other): + # type: (_BaseVersion) -> bool + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key > other._key + + def __ne__(self, other): + # type: (object) -> bool + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key != other._key + + +class LegacyVersion(_BaseVersion): + def __init__(self, version): + # type: (str) -> None + self._version = str(version) + self._key = _legacy_cmpkey(self._version) + + warnings.warn( + "Creating a LegacyVersion has been deprecated and will be " + "removed in the next major release", + DeprecationWarning, + ) + + def __str__(self): + # type: () -> str + return self._version + + def __repr__(self): + # type: () -> str + return "".format(repr(str(self))) + + @property + def public(self): + # type: () -> str + return self._version + + @property + def base_version(self): + # type: () -> str + return self._version + + @property + def epoch(self): + # type: () -> int + return -1 + + @property + def release(self): + # type: () -> None + return None + + @property + def pre(self): + # type: () -> None + return None + + @property + def post(self): + # type: () -> None + return None + + @property + def dev(self): + # type: () -> None + return None + + @property + def local(self): + # type: () -> None + return None + + @property + def is_prerelease(self): + # type: () -> bool + return False + + @property + def is_postrelease(self): + # type: () -> bool + return False + + @property + def is_devrelease(self): + # type: () -> bool + return False + + +_legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE) + +_legacy_version_replacement_map = { + "pre": "c", + "preview": "c", + "-": "final-", + "rc": "c", + "dev": "@", +} + + +def _parse_version_parts(s): + # type: (str) -> Iterator[str] + for part in _legacy_version_component_re.split(s): + part = _legacy_version_replacement_map.get(part, part) + + if not part or part == ".": + continue + + if part[:1] in "0123456789": + # pad for numeric comparison + yield part.zfill(8) + else: + yield "*" + part + + # ensure that alpha/beta/candidate are before final + yield "*final" + + +def _legacy_cmpkey(version): + # type: (str) -> LegacyCmpKey + + # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch + # greater than or equal to 0. This will effectively put the LegacyVersion, + # which uses the defacto standard originally implemented by setuptools, + # as before all PEP 440 versions. + epoch = -1 + + # This scheme is taken from pkg_resources.parse_version setuptools prior to + # it's adoption of the packaging library. + parts = [] # type: List[str] + for part in _parse_version_parts(version.lower()): + if part.startswith("*"): + # remove "-" before a prerelease tag + if part < "*final": + while parts and parts[-1] == "*final-": + parts.pop() + + # remove trailing zeros from each series of numeric parts + while parts and parts[-1] == "00000000": + parts.pop() + + parts.append(part) + + return epoch, tuple(parts) + + +# Deliberately not anchored to the start and end of the string, to make it +# easier for 3rd party code to reuse +VERSION_PATTERN = r""" + v? + (?: + (?:(?P[0-9]+)!)? # epoch + (?P[0-9]+(?:\.[0-9]+)*) # release segment + (?P
                                          # pre-release
+            [-_\.]?
+            (?P(a|b|c|rc|alpha|beta|pre|preview))
+            [-_\.]?
+            (?P[0-9]+)?
+        )?
+        (?P                                         # post release
+            (?:-(?P[0-9]+))
+            |
+            (?:
+                [-_\.]?
+                (?Ppost|rev|r)
+                [-_\.]?
+                (?P[0-9]+)?
+            )
+        )?
+        (?P                                          # dev release
+            [-_\.]?
+            (?Pdev)
+            [-_\.]?
+            (?P[0-9]+)?
+        )?
+    )
+    (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
+"""
+
+
+class Version(_BaseVersion):
+
+    _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
+
+    def __init__(self, version):
+        # type: (str) -> None
+
+        # Validate the version and parse it into pieces
+        match = self._regex.search(version)
+        if not match:
+            raise InvalidVersion("Invalid version: '{0}'".format(version))
+
+        # Store the parsed out pieces of the version
+        self._version = _Version(
+            epoch=int(match.group("epoch")) if match.group("epoch") else 0,
+            release=tuple(int(i) for i in match.group("release").split(".")),
+            pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
+            post=_parse_letter_version(
+                match.group("post_l"), match.group("post_n1") or match.group("post_n2")
+            ),
+            dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
+            local=_parse_local_version(match.group("local")),
+        )
+
+        # Generate a key which will be used for sorting
+        self._key = _cmpkey(
+            self._version.epoch,
+            self._version.release,
+            self._version.pre,
+            self._version.post,
+            self._version.dev,
+            self._version.local,
+        )
+
+    def __repr__(self):
+        # type: () -> str
+        return "".format(repr(str(self)))
+
+    def __str__(self):
+        # type: () -> str
+        parts = []
+
+        # Epoch
+        if self.epoch != 0:
+            parts.append("{0}!".format(self.epoch))
+
+        # Release segment
+        parts.append(".".join(str(x) for x in self.release))
+
+        # Pre-release
+        if self.pre is not None:
+            parts.append("".join(str(x) for x in self.pre))
+
+        # Post-release
+        if self.post is not None:
+            parts.append(".post{0}".format(self.post))
+
+        # Development release
+        if self.dev is not None:
+            parts.append(".dev{0}".format(self.dev))
+
+        # Local version segment
+        if self.local is not None:
+            parts.append("+{0}".format(self.local))
+
+        return "".join(parts)
+
+    @property
+    def epoch(self):
+        # type: () -> int
+        _epoch = self._version.epoch  # type: int
+        return _epoch
+
+    @property
+    def release(self):
+        # type: () -> Tuple[int, ...]
+        _release = self._version.release  # type: Tuple[int, ...]
+        return _release
+
+    @property
+    def pre(self):
+        # type: () -> Optional[Tuple[str, int]]
+        _pre = self._version.pre  # type: Optional[Tuple[str, int]]
+        return _pre
+
+    @property
+    def post(self):
+        # type: () -> Optional[Tuple[str, int]]
+        return self._version.post[1] if self._version.post else None
+
+    @property
+    def dev(self):
+        # type: () -> Optional[Tuple[str, int]]
+        return self._version.dev[1] if self._version.dev else None
+
+    @property
+    def local(self):
+        # type: () -> Optional[str]
+        if self._version.local:
+            return ".".join(str(x) for x in self._version.local)
+        else:
+            return None
+
+    @property
+    def public(self):
+        # type: () -> str
+        return str(self).split("+", 1)[0]
+
+    @property
+    def base_version(self):
+        # type: () -> str
+        parts = []
+
+        # Epoch
+        if self.epoch != 0:
+            parts.append("{0}!".format(self.epoch))
+
+        # Release segment
+        parts.append(".".join(str(x) for x in self.release))
+
+        return "".join(parts)
+
+    @property
+    def is_prerelease(self):
+        # type: () -> bool
+        return self.dev is not None or self.pre is not None
+
+    @property
+    def is_postrelease(self):
+        # type: () -> bool
+        return self.post is not None
+
+    @property
+    def is_devrelease(self):
+        # type: () -> bool
+        return self.dev is not None
+
+    @property
+    def major(self):
+        # type: () -> int
+        return self.release[0] if len(self.release) >= 1 else 0
+
+    @property
+    def minor(self):
+        # type: () -> int
+        return self.release[1] if len(self.release) >= 2 else 0
+
+    @property
+    def micro(self):
+        # type: () -> int
+        return self.release[2] if len(self.release) >= 3 else 0
+
+
+def _parse_letter_version(
+    letter,  # type: str
+    number,  # type: Union[str, bytes, SupportsInt]
+):
+    # type: (...) -> Optional[Tuple[str, int]]
+
+    if letter:
+        # We consider there to be an implicit 0 in a pre-release if there is
+        # not a numeral associated with it.
+        if number is None:
+            number = 0
+
+        # We normalize any letters to their lower case form
+        letter = letter.lower()
+
+        # We consider some words to be alternate spellings of other words and
+        # in those cases we want to normalize the spellings to our preferred
+        # spelling.
+        if letter == "alpha":
+            letter = "a"
+        elif letter == "beta":
+            letter = "b"
+        elif letter in ["c", "pre", "preview"]:
+            letter = "rc"
+        elif letter in ["rev", "r"]:
+            letter = "post"
+
+        return letter, int(number)
+    if not letter and number:
+        # We assume if we are given a number, but we are not given a letter
+        # then this is using the implicit post release syntax (e.g. 1.0-1)
+        letter = "post"
+
+        return letter, int(number)
+
+    return None
+
+
+_local_version_separators = re.compile(r"[\._-]")
+
+
+def _parse_local_version(local):
+    # type: (str) -> Optional[LocalType]
+    """
+    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
+    """
+    if local is not None:
+        return tuple(
+            part.lower() if not part.isdigit() else int(part)
+            for part in _local_version_separators.split(local)
+        )
+    return None
+
+
+def _cmpkey(
+    epoch,  # type: int
+    release,  # type: Tuple[int, ...]
+    pre,  # type: Optional[Tuple[str, int]]
+    post,  # type: Optional[Tuple[str, int]]
+    dev,  # type: Optional[Tuple[str, int]]
+    local,  # type: Optional[Tuple[SubLocalType]]
+):
+    # type: (...) -> CmpKey
+
+    # When we compare a release version, we want to compare it with all of the
+    # trailing zeros removed. So we'll use a reverse the list, drop all the now
+    # leading zeros until we come to something non zero, then take the rest
+    # re-reverse it back into the correct order and make it a tuple and use
+    # that for our sorting key.
+    _release = tuple(
+        reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
+    )
+
+    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
+    # We'll do this by abusing the pre segment, but we _only_ want to do this
+    # if there is not a pre or a post segment. If we have one of those then
+    # the normal sorting rules will handle this case correctly.
+    if pre is None and post is None and dev is not None:
+        _pre = NegativeInfinity  # type: PrePostDevType
+    # Versions without a pre-release (except as noted above) should sort after
+    # those with one.
+    elif pre is None:
+        _pre = Infinity
+    else:
+        _pre = pre
+
+    # Versions without a post segment should sort before those with one.
+    if post is None:
+        _post = NegativeInfinity  # type: PrePostDevType
+
+    else:
+        _post = post
+
+    # Versions without a development segment should sort after those with one.
+    if dev is None:
+        _dev = Infinity  # type: PrePostDevType
+
+    else:
+        _dev = dev
+
+    if local is None:
+        # Versions without a local segment should sort before those with one.
+        _local = NegativeInfinity  # type: LocalType
+    else:
+        # Versions with a local segment need that segment parsed to implement
+        # the sorting rules in PEP440.
+        # - Alpha numeric segments sort before numeric segments
+        # - Alpha numeric segments sort lexicographically
+        # - Numeric segments sort numerically
+        # - Shorter versions sort before longer versions when the prefixes
+        #   match exactly
+        _local = tuple(
+            (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
+        )
+
+    return epoch, _release, _pre, _post, _dev, _local
diff --git a/venv/Lib/site-packages/pip/_vendor/pep517/__init__.py b/venv/Lib/site-packages/pip/_vendor/pep517/__init__.py
new file mode 100644
index 00000000..3b07c639
--- /dev/null
+++ b/venv/Lib/site-packages/pip/_vendor/pep517/__init__.py
@@ -0,0 +1,6 @@
+"""Wrappers to build Python packages using PEP 517 hooks
+"""
+
+__version__ = '0.10.0'
+
+from .wrappers import *  # noqa: F401, F403
diff --git a/venv/Lib/site-packages/pip/_vendor/pep517/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/pep517/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 00000000..8aaf64a1
Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/pep517/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/pip/_vendor/pep517/__pycache__/build.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/pep517/__pycache__/build.cpython-36.pyc
new file mode 100644
index 00000000..97c01f1d
Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/pep517/__pycache__/build.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/pip/_vendor/pep517/__pycache__/check.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/pep517/__pycache__/check.cpython-36.pyc
new file mode 100644
index 00000000..33a90e6e
Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/pep517/__pycache__/check.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/pip/_vendor/pep517/__pycache__/colorlog.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/pep517/__pycache__/colorlog.cpython-36.pyc
new file mode 100644
index 00000000..1208a125
Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/pep517/__pycache__/colorlog.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/pip/_vendor/pep517/__pycache__/compat.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/pep517/__pycache__/compat.cpython-36.pyc
new file mode 100644
index 00000000..a271a882
Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/pep517/__pycache__/compat.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/pip/_vendor/pep517/__pycache__/dirtools.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/pep517/__pycache__/dirtools.cpython-36.pyc
new file mode 100644
index 00000000..b01d8d56
Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/pep517/__pycache__/dirtools.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/pip/_vendor/pep517/__pycache__/envbuild.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/pep517/__pycache__/envbuild.cpython-36.pyc
new file mode 100644
index 00000000..50708bc9
Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/pep517/__pycache__/envbuild.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/pip/_vendor/pep517/__pycache__/meta.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/pep517/__pycache__/meta.cpython-36.pyc
new file mode 100644
index 00000000..f53ad8f4
Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/pep517/__pycache__/meta.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/pip/_vendor/pep517/__pycache__/wrappers.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/pep517/__pycache__/wrappers.cpython-36.pyc
new file mode 100644
index 00000000..2c968aa3
Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/pep517/__pycache__/wrappers.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/pip/_vendor/pep517/build.py b/venv/Lib/site-packages/pip/_vendor/pep517/build.py
new file mode 100644
index 00000000..f884bcf1
--- /dev/null
+++ b/venv/Lib/site-packages/pip/_vendor/pep517/build.py
@@ -0,0 +1,127 @@
+"""Build a project using PEP 517 hooks.
+"""
+import argparse
+import logging
+import os
+from pip._vendor import toml
+import shutil
+
+from .envbuild import BuildEnvironment
+from .wrappers import Pep517HookCaller
+from .dirtools import tempdir, mkdir_p
+from .compat import FileNotFoundError
+
+log = logging.getLogger(__name__)
+
+
+def validate_system(system):
+    """
+    Ensure build system has the requisite fields.
+    """
+    required = {'requires', 'build-backend'}
+    if not (required <= set(system)):
+        message = "Missing required fields: {missing}".format(
+            missing=required-set(system),
+        )
+        raise ValueError(message)
+
+
+def load_system(source_dir):
+    """
+    Load the build system from a source dir (pyproject.toml).
+    """
+    pyproject = os.path.join(source_dir, 'pyproject.toml')
+    with open(pyproject) as f:
+        pyproject_data = toml.load(f)
+    return pyproject_data['build-system']
+
+
+def compat_system(source_dir):
+    """
+    Given a source dir, attempt to get a build system backend
+    and requirements from pyproject.toml. Fallback to
+    setuptools but only if the file was not found or a build
+    system was not indicated.
+    """
+    try:
+        system = load_system(source_dir)
+    except (FileNotFoundError, KeyError):
+        system = {}
+    system.setdefault(
+        'build-backend',
+        'setuptools.build_meta:__legacy__',
+    )
+    system.setdefault('requires', ['setuptools', 'wheel'])
+    return system
+
+
+def _do_build(hooks, env, dist, dest):
+    get_requires_name = 'get_requires_for_build_{dist}'.format(**locals())
+    get_requires = getattr(hooks, get_requires_name)
+    reqs = get_requires({})
+    log.info('Got build requires: %s', reqs)
+
+    env.pip_install(reqs)
+    log.info('Installed dynamic build dependencies')
+
+    with tempdir() as td:
+        log.info('Trying to build %s in %s', dist, td)
+        build_name = 'build_{dist}'.format(**locals())
+        build = getattr(hooks, build_name)
+        filename = build(td, {})
+        source = os.path.join(td, filename)
+        shutil.move(source, os.path.join(dest, os.path.basename(filename)))
+
+
+def build(source_dir, dist, dest=None, system=None):
+    system = system or load_system(source_dir)
+    dest = os.path.join(source_dir, dest or 'dist')
+    mkdir_p(dest)
+
+    validate_system(system)
+    hooks = Pep517HookCaller(
+        source_dir, system['build-backend'], system.get('backend-path')
+    )
+
+    with BuildEnvironment() as env:
+        env.pip_install(system['requires'])
+        _do_build(hooks, env, dist, dest)
+
+
+parser = argparse.ArgumentParser()
+parser.add_argument(
+    'source_dir',
+    help="A directory containing pyproject.toml",
+)
+parser.add_argument(
+    '--binary', '-b',
+    action='store_true',
+    default=False,
+)
+parser.add_argument(
+    '--source', '-s',
+    action='store_true',
+    default=False,
+)
+parser.add_argument(
+    '--out-dir', '-o',
+    help="Destination in which to save the builds relative to source dir",
+)
+
+
+def main(args):
+    log.warning('pep517.build is deprecated. '
+                'Consider switching to https://pypi.org/project/build/')
+
+    # determine which dists to build
+    dists = list(filter(None, (
+        'sdist' if args.source or not args.binary else None,
+        'wheel' if args.binary or not args.source else None,
+    )))
+
+    for dist in dists:
+        build(args.source_dir, dist, args.out_dir)
+
+
+if __name__ == '__main__':
+    main(parser.parse_args())
diff --git a/venv/Lib/site-packages/pip/_vendor/pep517/check.py b/venv/Lib/site-packages/pip/_vendor/pep517/check.py
new file mode 100644
index 00000000..decab8a3
--- /dev/null
+++ b/venv/Lib/site-packages/pip/_vendor/pep517/check.py
@@ -0,0 +1,206 @@
+"""Check a project and backend by attempting to build using PEP 517 hooks.
+"""
+import argparse
+import logging
+import os
+from os.path import isfile, join as pjoin
+from pip._vendor.toml import TomlDecodeError, load as toml_load
+import shutil
+from subprocess import CalledProcessError
+import sys
+import tarfile
+from tempfile import mkdtemp
+import zipfile
+
+from .colorlog import enable_colourful_output
+from .envbuild import BuildEnvironment
+from .wrappers import Pep517HookCaller
+
+log = logging.getLogger(__name__)
+
+
+def check_build_sdist(hooks, build_sys_requires):
+    with BuildEnvironment() as env:
+        try:
+            env.pip_install(build_sys_requires)
+            log.info('Installed static build dependencies')
+        except CalledProcessError:
+            log.error('Failed to install static build dependencies')
+            return False
+
+        try:
+            reqs = hooks.get_requires_for_build_sdist({})
+            log.info('Got build requires: %s', reqs)
+        except Exception:
+            log.error('Failure in get_requires_for_build_sdist', exc_info=True)
+            return False
+
+        try:
+            env.pip_install(reqs)
+            log.info('Installed dynamic build dependencies')
+        except CalledProcessError:
+            log.error('Failed to install dynamic build dependencies')
+            return False
+
+        td = mkdtemp()
+        log.info('Trying to build sdist in %s', td)
+        try:
+            try:
+                filename = hooks.build_sdist(td, {})
+                log.info('build_sdist returned %r', filename)
+            except Exception:
+                log.info('Failure in build_sdist', exc_info=True)
+                return False
+
+            if not filename.endswith('.tar.gz'):
+                log.error(
+                    "Filename %s doesn't have .tar.gz extension", filename)
+                return False
+
+            path = pjoin(td, filename)
+            if isfile(path):
+                log.info("Output file %s exists", path)
+            else:
+                log.error("Output file %s does not exist", path)
+                return False
+
+            if tarfile.is_tarfile(path):
+                log.info("Output file is a tar file")
+            else:
+                log.error("Output file is not a tar file")
+                return False
+
+        finally:
+            shutil.rmtree(td)
+
+        return True
+
+
+def check_build_wheel(hooks, build_sys_requires):
+    with BuildEnvironment() as env:
+        try:
+            env.pip_install(build_sys_requires)
+            log.info('Installed static build dependencies')
+        except CalledProcessError:
+            log.error('Failed to install static build dependencies')
+            return False
+
+        try:
+            reqs = hooks.get_requires_for_build_wheel({})
+            log.info('Got build requires: %s', reqs)
+        except Exception:
+            log.error('Failure in get_requires_for_build_sdist', exc_info=True)
+            return False
+
+        try:
+            env.pip_install(reqs)
+            log.info('Installed dynamic build dependencies')
+        except CalledProcessError:
+            log.error('Failed to install dynamic build dependencies')
+            return False
+
+        td = mkdtemp()
+        log.info('Trying to build wheel in %s', td)
+        try:
+            try:
+                filename = hooks.build_wheel(td, {})
+                log.info('build_wheel returned %r', filename)
+            except Exception:
+                log.info('Failure in build_wheel', exc_info=True)
+                return False
+
+            if not filename.endswith('.whl'):
+                log.error("Filename %s doesn't have .whl extension", filename)
+                return False
+
+            path = pjoin(td, filename)
+            if isfile(path):
+                log.info("Output file %s exists", path)
+            else:
+                log.error("Output file %s does not exist", path)
+                return False
+
+            if zipfile.is_zipfile(path):
+                log.info("Output file is a zip file")
+            else:
+                log.error("Output file is not a zip file")
+                return False
+
+        finally:
+            shutil.rmtree(td)
+
+        return True
+
+
+def check(source_dir):
+    pyproject = pjoin(source_dir, 'pyproject.toml')
+    if isfile(pyproject):
+        log.info('Found pyproject.toml')
+    else:
+        log.error('Missing pyproject.toml')
+        return False
+
+    try:
+        with open(pyproject) as f:
+            pyproject_data = toml_load(f)
+        # Ensure the mandatory data can be loaded
+        buildsys = pyproject_data['build-system']
+        requires = buildsys['requires']
+        backend = buildsys['build-backend']
+        backend_path = buildsys.get('backend-path')
+        log.info('Loaded pyproject.toml')
+    except (TomlDecodeError, KeyError):
+        log.error("Invalid pyproject.toml", exc_info=True)
+        return False
+
+    hooks = Pep517HookCaller(source_dir, backend, backend_path)
+
+    sdist_ok = check_build_sdist(hooks, requires)
+    wheel_ok = check_build_wheel(hooks, requires)
+
+    if not sdist_ok:
+        log.warning('Sdist checks failed; scroll up to see')
+    if not wheel_ok:
+        log.warning('Wheel checks failed')
+
+    return sdist_ok
+
+
+def main(argv=None):
+    log.warning('pep517.check is deprecated. '
+                'Consider switching to https://pypi.org/project/build/')
+
+    ap = argparse.ArgumentParser()
+    ap.add_argument(
+        'source_dir',
+        help="A directory containing pyproject.toml")
+    args = ap.parse_args(argv)
+
+    enable_colourful_output()
+
+    ok = check(args.source_dir)
+
+    if ok:
+        print(ansi('Checks passed', 'green'))
+    else:
+        print(ansi('Checks failed', 'red'))
+        sys.exit(1)
+
+
+ansi_codes = {
+    'reset': '\x1b[0m',
+    'bold': '\x1b[1m',
+    'red': '\x1b[31m',
+    'green': '\x1b[32m',
+}
+
+
+def ansi(s, attr):
+    if os.name != 'nt' and sys.stdout.isatty():
+        return ansi_codes[attr] + str(s) + ansi_codes['reset']
+    else:
+        return str(s)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/env/lib/python2.7/site-packages/pip/_vendor/pep517/colorlog.py b/venv/Lib/site-packages/pip/_vendor/pep517/colorlog.py
similarity index 100%
rename from env/lib/python2.7/site-packages/pip/_vendor/pep517/colorlog.py
rename to venv/Lib/site-packages/pip/_vendor/pep517/colorlog.py
diff --git a/venv/Lib/site-packages/pip/_vendor/pep517/compat.py b/venv/Lib/site-packages/pip/_vendor/pep517/compat.py
new file mode 100644
index 00000000..8432acb7
--- /dev/null
+++ b/venv/Lib/site-packages/pip/_vendor/pep517/compat.py
@@ -0,0 +1,34 @@
+"""Python 2/3 compatibility"""
+import json
+import sys
+
+
+# Handle reading and writing JSON in UTF-8, on Python 3 and 2.
+
+if sys.version_info[0] >= 3:
+    # Python 3
+    def write_json(obj, path, **kwargs):
+        with open(path, 'w', encoding='utf-8') as f:
+            json.dump(obj, f, **kwargs)
+
+    def read_json(path):
+        with open(path, 'r', encoding='utf-8') as f:
+            return json.load(f)
+
+else:
+    # Python 2
+    def write_json(obj, path, **kwargs):
+        with open(path, 'wb') as f:
+            json.dump(obj, f, encoding='utf-8', **kwargs)
+
+    def read_json(path):
+        with open(path, 'rb') as f:
+            return json.load(f)
+
+
+# FileNotFoundError
+
+try:
+    FileNotFoundError = FileNotFoundError
+except NameError:
+    FileNotFoundError = IOError
diff --git a/venv/Lib/site-packages/pip/_vendor/pep517/dirtools.py b/venv/Lib/site-packages/pip/_vendor/pep517/dirtools.py
new file mode 100644
index 00000000..58c6ca0c
--- /dev/null
+++ b/venv/Lib/site-packages/pip/_vendor/pep517/dirtools.py
@@ -0,0 +1,44 @@
+import os
+import io
+import contextlib
+import tempfile
+import shutil
+import errno
+import zipfile
+
+
+@contextlib.contextmanager
+def tempdir():
+    """Create a temporary directory in a context manager."""
+    td = tempfile.mkdtemp()
+    try:
+        yield td
+    finally:
+        shutil.rmtree(td)
+
+
+def mkdir_p(*args, **kwargs):
+    """Like `mkdir`, but does not raise an exception if the
+    directory already exists.
+    """
+    try:
+        return os.mkdir(*args, **kwargs)
+    except OSError as exc:
+        if exc.errno != errno.EEXIST:
+            raise
+
+
+def dir_to_zipfile(root):
+    """Construct an in-memory zip file for a directory."""
+    buffer = io.BytesIO()
+    zip_file = zipfile.ZipFile(buffer, 'w')
+    for root, dirs, files in os.walk(root):
+        for path in dirs:
+            fs_path = os.path.join(root, path)
+            rel_path = os.path.relpath(fs_path, root)
+            zip_file.writestr(rel_path + '/', '')
+        for path in files:
+            fs_path = os.path.join(root, path)
+            rel_path = os.path.relpath(fs_path, root)
+            zip_file.write(fs_path, rel_path)
+    return zip_file
diff --git a/env/lib/python2.7/site-packages/pip/_vendor/pep517/envbuild.py b/venv/Lib/site-packages/pip/_vendor/pep517/envbuild.py
similarity index 88%
rename from env/lib/python2.7/site-packages/pip/_vendor/pep517/envbuild.py
rename to venv/Lib/site-packages/pip/_vendor/pep517/envbuild.py
index f7ac5f46..4088dcdb 100644
--- a/env/lib/python2.7/site-packages/pip/_vendor/pep517/envbuild.py
+++ b/venv/Lib/site-packages/pip/_vendor/pep517/envbuild.py
@@ -3,23 +3,27 @@
 
 import os
 import logging
-from pip._vendor import pytoml
+from pip._vendor import toml
 import shutil
 from subprocess import check_call
 import sys
 from sysconfig import get_paths
 from tempfile import mkdtemp
 
-from .wrappers import Pep517HookCaller
+from .wrappers import Pep517HookCaller, LoggerWrapper
 
 log = logging.getLogger(__name__)
 
 
 def _load_pyproject(source_dir):
     with open(os.path.join(source_dir, 'pyproject.toml')) as f:
-        pyproject_data = pytoml.load(f)
+        pyproject_data = toml.load(f)
     buildsys = pyproject_data['build-system']
-    return buildsys['requires'], buildsys['build-backend']
+    return (
+        buildsys['requires'],
+        buildsys['build-backend'],
+        buildsys.get('backend-path'),
+    )
 
 
 class BuildEnvironment(object):
@@ -90,9 +94,14 @@ def pip_install(self, reqs):
         if not reqs:
             return
         log.info('Calling pip to install %s', reqs)
-        check_call([
+        cmd = [
             sys.executable, '-m', 'pip', 'install', '--ignore-installed',
-            '--prefix', self.path] + list(reqs))
+            '--prefix', self.path] + list(reqs)
+        check_call(
+            cmd,
+            stdout=LoggerWrapper(log, logging.INFO),
+            stderr=LoggerWrapper(log, logging.ERROR),
+        )
 
     def __exit__(self, exc_type, exc_val, exc_tb):
         needs_cleanup = (
@@ -126,8 +135,8 @@ def build_wheel(source_dir, wheel_dir, config_settings=None):
     """
     if config_settings is None:
         config_settings = {}
-    requires, backend = _load_pyproject(source_dir)
-    hooks = Pep517HookCaller(source_dir, backend)
+    requires, backend, backend_path = _load_pyproject(source_dir)
+    hooks = Pep517HookCaller(source_dir, backend, backend_path)
 
     with BuildEnvironment() as env:
         env.pip_install(requires)
@@ -148,8 +157,8 @@ def build_sdist(source_dir, sdist_dir, config_settings=None):
     """
     if config_settings is None:
         config_settings = {}
-    requires, backend = _load_pyproject(source_dir)
-    hooks = Pep517HookCaller(source_dir, backend)
+    requires, backend, backend_path = _load_pyproject(source_dir)
+    hooks = Pep517HookCaller(source_dir, backend, backend_path)
 
     with BuildEnvironment() as env:
         env.pip_install(requires)
diff --git a/venv/Lib/site-packages/pip/_vendor/pep517/in_process/__init__.py b/venv/Lib/site-packages/pip/_vendor/pep517/in_process/__init__.py
new file mode 100644
index 00000000..c932313b
--- /dev/null
+++ b/venv/Lib/site-packages/pip/_vendor/pep517/in_process/__init__.py
@@ -0,0 +1,17 @@
+"""This is a subpackage because the directory is on sys.path for _in_process.py
+
+The subpackage should stay as empty as possible to avoid shadowing modules that
+the backend might import.
+"""
+from os.path import dirname, abspath, join as pjoin
+from contextlib import contextmanager
+
+try:
+    import importlib.resources as resources
+
+    def _in_proc_script_path():
+        return resources.path(__package__, '_in_process.py')
+except ImportError:
+    @contextmanager
+    def _in_proc_script_path():
+        yield pjoin(dirname(abspath(__file__)), '_in_process.py')
diff --git a/venv/Lib/site-packages/pip/_vendor/pep517/in_process/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/pep517/in_process/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 00000000..f61e9cf3
Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/pep517/in_process/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/pip/_vendor/pep517/in_process/__pycache__/_in_process.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/pep517/in_process/__pycache__/_in_process.cpython-36.pyc
new file mode 100644
index 00000000..aeaa324d
Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/pep517/in_process/__pycache__/_in_process.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/pip/_vendor/pep517/in_process/_in_process.py b/venv/Lib/site-packages/pip/_vendor/pep517/in_process/_in_process.py
new file mode 100644
index 00000000..a536b03e
--- /dev/null
+++ b/venv/Lib/site-packages/pip/_vendor/pep517/in_process/_in_process.py
@@ -0,0 +1,280 @@
+"""This is invoked in a subprocess to call the build backend hooks.
+
+It expects:
+- Command line args: hook_name, control_dir
+- Environment variables:
+      PEP517_BUILD_BACKEND=entry.point:spec
+      PEP517_BACKEND_PATH=paths (separated with os.pathsep)
+- control_dir/input.json:
+  - {"kwargs": {...}}
+
+Results:
+- control_dir/output.json
+  - {"return_val": ...}
+"""
+from glob import glob
+from importlib import import_module
+import json
+import os
+import os.path
+from os.path import join as pjoin
+import re
+import shutil
+import sys
+import traceback
+
+# This file is run as a script, and `import compat` is not zip-safe, so we
+# include write_json() and read_json() from compat.py.
+#
+# Handle reading and writing JSON in UTF-8, on Python 3 and 2.
+
+if sys.version_info[0] >= 3:
+    # Python 3
+    def write_json(obj, path, **kwargs):
+        with open(path, 'w', encoding='utf-8') as f:
+            json.dump(obj, f, **kwargs)
+
+    def read_json(path):
+        with open(path, 'r', encoding='utf-8') as f:
+            return json.load(f)
+
+else:
+    # Python 2
+    def write_json(obj, path, **kwargs):
+        with open(path, 'wb') as f:
+            json.dump(obj, f, encoding='utf-8', **kwargs)
+
+    def read_json(path):
+        with open(path, 'rb') as f:
+            return json.load(f)
+
+
+class BackendUnavailable(Exception):
+    """Raised if we cannot import the backend"""
+    def __init__(self, traceback):
+        self.traceback = traceback
+
+
+class BackendInvalid(Exception):
+    """Raised if the backend is invalid"""
+    def __init__(self, message):
+        self.message = message
+
+
+class HookMissing(Exception):
+    """Raised if a hook is missing and we are not executing the fallback"""
+
+
+def contained_in(filename, directory):
+    """Test if a file is located within the given directory."""
+    filename = os.path.normcase(os.path.abspath(filename))
+    directory = os.path.normcase(os.path.abspath(directory))
+    return os.path.commonprefix([filename, directory]) == directory
+
+
+def _build_backend():
+    """Find and load the build backend"""
+    # Add in-tree backend directories to the front of sys.path.
+    backend_path = os.environ.get('PEP517_BACKEND_PATH')
+    if backend_path:
+        extra_pathitems = backend_path.split(os.pathsep)
+        sys.path[:0] = extra_pathitems
+
+    ep = os.environ['PEP517_BUILD_BACKEND']
+    mod_path, _, obj_path = ep.partition(':')
+    try:
+        obj = import_module(mod_path)
+    except ImportError:
+        raise BackendUnavailable(traceback.format_exc())
+
+    if backend_path:
+        if not any(
+            contained_in(obj.__file__, path)
+            for path in extra_pathitems
+        ):
+            raise BackendInvalid("Backend was not loaded from backend-path")
+
+    if obj_path:
+        for path_part in obj_path.split('.'):
+            obj = getattr(obj, path_part)
+    return obj
+
+
+def get_requires_for_build_wheel(config_settings):
+    """Invoke the optional get_requires_for_build_wheel hook
+
+    Returns [] if the hook is not defined.
+    """
+    backend = _build_backend()
+    try:
+        hook = backend.get_requires_for_build_wheel
+    except AttributeError:
+        return []
+    else:
+        return hook(config_settings)
+
+
+def prepare_metadata_for_build_wheel(
+        metadata_directory, config_settings, _allow_fallback):
+    """Invoke optional prepare_metadata_for_build_wheel
+
+    Implements a fallback by building a wheel if the hook isn't defined,
+    unless _allow_fallback is False in which case HookMissing is raised.
+    """
+    backend = _build_backend()
+    try:
+        hook = backend.prepare_metadata_for_build_wheel
+    except AttributeError:
+        if not _allow_fallback:
+            raise HookMissing()
+        return _get_wheel_metadata_from_wheel(backend, metadata_directory,
+                                              config_settings)
+    else:
+        return hook(metadata_directory, config_settings)
+
+
+WHEEL_BUILT_MARKER = 'PEP517_ALREADY_BUILT_WHEEL'
+
+
+def _dist_info_files(whl_zip):
+    """Identify the .dist-info folder inside a wheel ZipFile."""
+    res = []
+    for path in whl_zip.namelist():
+        m = re.match(r'[^/\\]+-[^/\\]+\.dist-info/', path)
+        if m:
+            res.append(path)
+    if res:
+        return res
+    raise Exception("No .dist-info folder found in wheel")
+
+
+def _get_wheel_metadata_from_wheel(
+        backend, metadata_directory, config_settings):
+    """Build a wheel and extract the metadata from it.
+
+    Fallback for when the build backend does not
+    define the 'get_wheel_metadata' hook.
+    """
+    from zipfile import ZipFile
+    whl_basename = backend.build_wheel(metadata_directory, config_settings)
+    with open(os.path.join(metadata_directory, WHEEL_BUILT_MARKER), 'wb'):
+        pass  # Touch marker file
+
+    whl_file = os.path.join(metadata_directory, whl_basename)
+    with ZipFile(whl_file) as zipf:
+        dist_info = _dist_info_files(zipf)
+        zipf.extractall(path=metadata_directory, members=dist_info)
+    return dist_info[0].split('/')[0]
+
+
+def _find_already_built_wheel(metadata_directory):
+    """Check for a wheel already built during the get_wheel_metadata hook.
+    """
+    if not metadata_directory:
+        return None
+    metadata_parent = os.path.dirname(metadata_directory)
+    if not os.path.isfile(pjoin(metadata_parent, WHEEL_BUILT_MARKER)):
+        return None
+
+    whl_files = glob(os.path.join(metadata_parent, '*.whl'))
+    if not whl_files:
+        print('Found wheel built marker, but no .whl files')
+        return None
+    if len(whl_files) > 1:
+        print('Found multiple .whl files; unspecified behaviour. '
+              'Will call build_wheel.')
+        return None
+
+    # Exactly one .whl file
+    return whl_files[0]
+
+
+def build_wheel(wheel_directory, config_settings, metadata_directory=None):
+    """Invoke the mandatory build_wheel hook.
+
+    If a wheel was already built in the
+    prepare_metadata_for_build_wheel fallback, this
+    will copy it rather than rebuilding the wheel.
+    """
+    prebuilt_whl = _find_already_built_wheel(metadata_directory)
+    if prebuilt_whl:
+        shutil.copy2(prebuilt_whl, wheel_directory)
+        return os.path.basename(prebuilt_whl)
+
+    return _build_backend().build_wheel(wheel_directory, config_settings,
+                                        metadata_directory)
+
+
+def get_requires_for_build_sdist(config_settings):
+    """Invoke the optional get_requires_for_build_wheel hook
+
+    Returns [] if the hook is not defined.
+    """
+    backend = _build_backend()
+    try:
+        hook = backend.get_requires_for_build_sdist
+    except AttributeError:
+        return []
+    else:
+        return hook(config_settings)
+
+
+class _DummyException(Exception):
+    """Nothing should ever raise this exception"""
+
+
+class GotUnsupportedOperation(Exception):
+    """For internal use when backend raises UnsupportedOperation"""
+    def __init__(self, traceback):
+        self.traceback = traceback
+
+
+def build_sdist(sdist_directory, config_settings):
+    """Invoke the mandatory build_sdist hook."""
+    backend = _build_backend()
+    try:
+        return backend.build_sdist(sdist_directory, config_settings)
+    except getattr(backend, 'UnsupportedOperation', _DummyException):
+        raise GotUnsupportedOperation(traceback.format_exc())
+
+
+HOOK_NAMES = {
+    'get_requires_for_build_wheel',
+    'prepare_metadata_for_build_wheel',
+    'build_wheel',
+    'get_requires_for_build_sdist',
+    'build_sdist',
+}
+
+
+def main():
+    if len(sys.argv) < 3:
+        sys.exit("Needs args: hook_name, control_dir")
+    hook_name = sys.argv[1]
+    control_dir = sys.argv[2]
+    if hook_name not in HOOK_NAMES:
+        sys.exit("Unknown hook: %s" % hook_name)
+    hook = globals()[hook_name]
+
+    hook_input = read_json(pjoin(control_dir, 'input.json'))
+
+    json_out = {'unsupported': False, 'return_val': None}
+    try:
+        json_out['return_val'] = hook(**hook_input['kwargs'])
+    except BackendUnavailable as e:
+        json_out['no_backend'] = True
+        json_out['traceback'] = e.traceback
+    except BackendInvalid as e:
+        json_out['backend_invalid'] = True
+        json_out['backend_error'] = e.message
+    except GotUnsupportedOperation as e:
+        json_out['unsupported'] = True
+        json_out['traceback'] = e.traceback
+    except HookMissing:
+        json_out['hook_missing'] = True
+
+    write_json(json_out, pjoin(control_dir, 'output.json'), indent=2)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/venv/Lib/site-packages/pip/_vendor/pep517/meta.py b/venv/Lib/site-packages/pip/_vendor/pep517/meta.py
new file mode 100644
index 00000000..d525de5c
--- /dev/null
+++ b/venv/Lib/site-packages/pip/_vendor/pep517/meta.py
@@ -0,0 +1,92 @@
+"""Build metadata for a project using PEP 517 hooks.
+"""
+import argparse
+import logging
+import os
+import shutil
+import functools
+
+try:
+    import importlib.metadata as imp_meta
+except ImportError:
+    import importlib_metadata as imp_meta
+
+try:
+    from zipfile import Path
+except ImportError:
+    from zipp import Path
+
+from .envbuild import BuildEnvironment
+from .wrappers import Pep517HookCaller, quiet_subprocess_runner
+from .dirtools import tempdir, mkdir_p, dir_to_zipfile
+from .build import validate_system, load_system, compat_system
+
+log = logging.getLogger(__name__)
+
+
+def _prep_meta(hooks, env, dest):
+    reqs = hooks.get_requires_for_build_wheel({})
+    log.info('Got build requires: %s', reqs)
+
+    env.pip_install(reqs)
+    log.info('Installed dynamic build dependencies')
+
+    with tempdir() as td:
+        log.info('Trying to build metadata in %s', td)
+        filename = hooks.prepare_metadata_for_build_wheel(td, {})
+        source = os.path.join(td, filename)
+        shutil.move(source, os.path.join(dest, os.path.basename(filename)))
+
+
+def build(source_dir='.', dest=None, system=None):
+    system = system or load_system(source_dir)
+    dest = os.path.join(source_dir, dest or 'dist')
+    mkdir_p(dest)
+    validate_system(system)
+    hooks = Pep517HookCaller(
+        source_dir, system['build-backend'], system.get('backend-path')
+    )
+
+    with hooks.subprocess_runner(quiet_subprocess_runner):
+        with BuildEnvironment() as env:
+            env.pip_install(system['requires'])
+            _prep_meta(hooks, env, dest)
+
+
+def build_as_zip(builder=build):
+    with tempdir() as out_dir:
+        builder(dest=out_dir)
+        return dir_to_zipfile(out_dir)
+
+
+def load(root):
+    """
+    Given a source directory (root) of a package,
+    return an importlib.metadata.Distribution object
+    with metadata build from that package.
+    """
+    root = os.path.expanduser(root)
+    system = compat_system(root)
+    builder = functools.partial(build, source_dir=root, system=system)
+    path = Path(build_as_zip(builder))
+    return imp_meta.PathDistribution(path)
+
+
+parser = argparse.ArgumentParser()
+parser.add_argument(
+    'source_dir',
+    help="A directory containing pyproject.toml",
+)
+parser.add_argument(
+    '--out-dir', '-o',
+    help="Destination in which to save the builds relative to source dir",
+)
+
+
+def main():
+    args = parser.parse_args()
+    build(args.source_dir, args.out_dir)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/venv/Lib/site-packages/pip/_vendor/pep517/wrappers.py b/venv/Lib/site-packages/pip/_vendor/pep517/wrappers.py
new file mode 100644
index 00000000..00974aa8
--- /dev/null
+++ b/venv/Lib/site-packages/pip/_vendor/pep517/wrappers.py
@@ -0,0 +1,318 @@
+import threading
+from contextlib import contextmanager
+import os
+from os.path import abspath, join as pjoin
+import shutil
+from subprocess import check_call, check_output, STDOUT
+import sys
+from tempfile import mkdtemp
+
+from . import compat
+from .in_process import _in_proc_script_path
+
+__all__ = [
+    'BackendUnavailable',
+    'BackendInvalid',
+    'HookMissing',
+    'UnsupportedOperation',
+    'default_subprocess_runner',
+    'quiet_subprocess_runner',
+    'Pep517HookCaller',
+]
+
+
+@contextmanager
+def tempdir():
+    td = mkdtemp()
+    try:
+        yield td
+    finally:
+        shutil.rmtree(td)
+
+
+class BackendUnavailable(Exception):
+    """Will be raised if the backend cannot be imported in the hook process."""
+    def __init__(self, traceback):
+        self.traceback = traceback
+
+
+class BackendInvalid(Exception):
+    """Will be raised if the backend is invalid."""
+    def __init__(self, backend_name, backend_path, message):
+        self.backend_name = backend_name
+        self.backend_path = backend_path
+        self.message = message
+
+
+class HookMissing(Exception):
+    """Will be raised on missing hooks."""
+    def __init__(self, hook_name):
+        super(HookMissing, self).__init__(hook_name)
+        self.hook_name = hook_name
+
+
+class UnsupportedOperation(Exception):
+    """May be raised by build_sdist if the backend indicates that it can't."""
+    def __init__(self, traceback):
+        self.traceback = traceback
+
+
+def default_subprocess_runner(cmd, cwd=None, extra_environ=None):
+    """The default method of calling the wrapper subprocess."""
+    env = os.environ.copy()
+    if extra_environ:
+        env.update(extra_environ)
+
+    check_call(cmd, cwd=cwd, env=env)
+
+
+def quiet_subprocess_runner(cmd, cwd=None, extra_environ=None):
+    """A method of calling the wrapper subprocess while suppressing output."""
+    env = os.environ.copy()
+    if extra_environ:
+        env.update(extra_environ)
+
+    check_output(cmd, cwd=cwd, env=env, stderr=STDOUT)
+
+
+def norm_and_check(source_tree, requested):
+    """Normalise and check a backend path.
+
+    Ensure that the requested backend path is specified as a relative path,
+    and resolves to a location under the given source tree.
+
+    Return an absolute version of the requested path.
+    """
+    if os.path.isabs(requested):
+        raise ValueError("paths must be relative")
+
+    abs_source = os.path.abspath(source_tree)
+    abs_requested = os.path.normpath(os.path.join(abs_source, requested))
+    # We have to use commonprefix for Python 2.7 compatibility. So we
+    # normalise case to avoid problems because commonprefix is a character
+    # based comparison :-(
+    norm_source = os.path.normcase(abs_source)
+    norm_requested = os.path.normcase(abs_requested)
+    if os.path.commonprefix([norm_source, norm_requested]) != norm_source:
+        raise ValueError("paths must be inside source tree")
+
+    return abs_requested
+
+
+class Pep517HookCaller(object):
+    """A wrapper around a source directory to be built with a PEP 517 backend.
+
+    :param source_dir: The path to the source directory, containing
+        pyproject.toml.
+    :param build_backend: The build backend spec, as per PEP 517, from
+        pyproject.toml.
+    :param backend_path: The backend path, as per PEP 517, from pyproject.toml.
+    :param runner: A callable that invokes the wrapper subprocess.
+    :param python_executable: The Python executable used to invoke the backend
+
+    The 'runner', if provided, must expect the following:
+
+    - cmd: a list of strings representing the command and arguments to
+      execute, as would be passed to e.g. 'subprocess.check_call'.
+    - cwd: a string representing the working directory that must be
+      used for the subprocess. Corresponds to the provided source_dir.
+    - extra_environ: a dict mapping environment variable names to values
+      which must be set for the subprocess execution.
+    """
+    def __init__(
+            self,
+            source_dir,
+            build_backend,
+            backend_path=None,
+            runner=None,
+            python_executable=None,
+    ):
+        if runner is None:
+            runner = default_subprocess_runner
+
+        self.source_dir = abspath(source_dir)
+        self.build_backend = build_backend
+        if backend_path:
+            backend_path = [
+                norm_and_check(self.source_dir, p) for p in backend_path
+            ]
+        self.backend_path = backend_path
+        self._subprocess_runner = runner
+        if not python_executable:
+            python_executable = sys.executable
+        self.python_executable = python_executable
+
+    @contextmanager
+    def subprocess_runner(self, runner):
+        """A context manager for temporarily overriding the default subprocess
+        runner.
+        """
+        prev = self._subprocess_runner
+        self._subprocess_runner = runner
+        try:
+            yield
+        finally:
+            self._subprocess_runner = prev
+
+    def get_requires_for_build_wheel(self, config_settings=None):
+        """Identify packages required for building a wheel
+
+        Returns a list of dependency specifications, e.g.::
+
+            ["wheel >= 0.25", "setuptools"]
+
+        This does not include requirements specified in pyproject.toml.
+        It returns the result of calling the equivalently named hook in a
+        subprocess.
+        """
+        return self._call_hook('get_requires_for_build_wheel', {
+            'config_settings': config_settings
+        })
+
+    def prepare_metadata_for_build_wheel(
+            self, metadata_directory, config_settings=None,
+            _allow_fallback=True):
+        """Prepare a ``*.dist-info`` folder with metadata for this project.
+
+        Returns the name of the newly created folder.
+
+        If the build backend defines a hook with this name, it will be called
+        in a subprocess. If not, the backend will be asked to build a wheel,
+        and the dist-info extracted from that (unless _allow_fallback is
+        False).
+        """
+        return self._call_hook('prepare_metadata_for_build_wheel', {
+            'metadata_directory': abspath(metadata_directory),
+            'config_settings': config_settings,
+            '_allow_fallback': _allow_fallback,
+        })
+
+    def build_wheel(
+            self, wheel_directory, config_settings=None,
+            metadata_directory=None):
+        """Build a wheel from this project.
+
+        Returns the name of the newly created file.
+
+        In general, this will call the 'build_wheel' hook in the backend.
+        However, if that was previously called by
+        'prepare_metadata_for_build_wheel', and the same metadata_directory is
+        used, the previously built wheel will be copied to wheel_directory.
+        """
+        if metadata_directory is not None:
+            metadata_directory = abspath(metadata_directory)
+        return self._call_hook('build_wheel', {
+            'wheel_directory': abspath(wheel_directory),
+            'config_settings': config_settings,
+            'metadata_directory': metadata_directory,
+        })
+
+    def get_requires_for_build_sdist(self, config_settings=None):
+        """Identify packages required for building a wheel
+
+        Returns a list of dependency specifications, e.g.::
+
+            ["setuptools >= 26"]
+
+        This does not include requirements specified in pyproject.toml.
+        It returns the result of calling the equivalently named hook in a
+        subprocess.
+        """
+        return self._call_hook('get_requires_for_build_sdist', {
+            'config_settings': config_settings
+        })
+
+    def build_sdist(self, sdist_directory, config_settings=None):
+        """Build an sdist from this project.
+
+        Returns the name of the newly created file.
+
+        This calls the 'build_sdist' backend hook in a subprocess.
+        """
+        return self._call_hook('build_sdist', {
+            'sdist_directory': abspath(sdist_directory),
+            'config_settings': config_settings,
+        })
+
+    def _call_hook(self, hook_name, kwargs):
+        # On Python 2, pytoml returns Unicode values (which is correct) but the
+        # environment passed to check_call needs to contain string values. We
+        # convert here by encoding using ASCII (the backend can only contain
+        # letters, digits and _, . and : characters, and will be used as a
+        # Python identifier, so non-ASCII content is wrong on Python 2 in
+        # any case).
+        # For backend_path, we use sys.getfilesystemencoding.
+        if sys.version_info[0] == 2:
+            build_backend = self.build_backend.encode('ASCII')
+        else:
+            build_backend = self.build_backend
+        extra_environ = {'PEP517_BUILD_BACKEND': build_backend}
+
+        if self.backend_path:
+            backend_path = os.pathsep.join(self.backend_path)
+            if sys.version_info[0] == 2:
+                backend_path = backend_path.encode(sys.getfilesystemencoding())
+            extra_environ['PEP517_BACKEND_PATH'] = backend_path
+
+        with tempdir() as td:
+            hook_input = {'kwargs': kwargs}
+            compat.write_json(hook_input, pjoin(td, 'input.json'),
+                              indent=2)
+
+            # Run the hook in a subprocess
+            with _in_proc_script_path() as script:
+                python = self.python_executable
+                self._subprocess_runner(
+                    [python, abspath(str(script)), hook_name, td],
+                    cwd=self.source_dir,
+                    extra_environ=extra_environ
+                )
+
+            data = compat.read_json(pjoin(td, 'output.json'))
+            if data.get('unsupported'):
+                raise UnsupportedOperation(data.get('traceback', ''))
+            if data.get('no_backend'):
+                raise BackendUnavailable(data.get('traceback', ''))
+            if data.get('backend_invalid'):
+                raise BackendInvalid(
+                    backend_name=self.build_backend,
+                    backend_path=self.backend_path,
+                    message=data.get('backend_error', '')
+                )
+            if data.get('hook_missing'):
+                raise HookMissing(hook_name)
+            return data['return_val']
+
+
+class LoggerWrapper(threading.Thread):
+    """
+    Read messages from a pipe and redirect them
+    to a logger (see python's logging module).
+    """
+
+    def __init__(self, logger, level):
+        threading.Thread.__init__(self)
+        self.daemon = True
+
+        self.logger = logger
+        self.level = level
+
+        # create the pipe and reader
+        self.fd_read, self.fd_write = os.pipe()
+        self.reader = os.fdopen(self.fd_read)
+
+        self.start()
+
+    def fileno(self):
+        return self.fd_write
+
+    @staticmethod
+    def remove_newline(msg):
+        return msg[:-1] if msg.endswith(os.linesep) else msg
+
+    def run(self):
+        for line in self.reader:
+            self._write(self.remove_newline(line))
+
+    def _write(self, message):
+        self.logger.log(self.level, message)
diff --git a/venv/Lib/site-packages/pip/_vendor/pkg_resources/__init__.py b/venv/Lib/site-packages/pip/_vendor/pkg_resources/__init__.py
new file mode 100644
index 00000000..a457ff27
--- /dev/null
+++ b/venv/Lib/site-packages/pip/_vendor/pkg_resources/__init__.py
@@ -0,0 +1,3296 @@
+# coding: utf-8
+"""
+Package resource API
+--------------------
+
+A resource is a logical file contained within a package, or a logical
+subdirectory thereof.  The package resource API expects resource names
+to have their path parts separated with ``/``, *not* whatever the local
+path separator is.  Do not use os.path operations to manipulate resource
+names being passed into the API.
+
+The package resource API is designed to work with normal filesystem packages,
+.egg files, and unpacked .egg files.  It can also work in a limited way with
+.zip files and with custom PEP 302 loaders that support the ``get_data()``
+method.
+"""
+
+from __future__ import absolute_import
+
+import sys
+import os
+import io
+import time
+import re
+import types
+import zipfile
+import zipimport
+import warnings
+import stat
+import functools
+import pkgutil
+import operator
+import platform
+import collections
+import plistlib
+import email.parser
+import errno
+import tempfile
+import textwrap
+import itertools
+import inspect
+import ntpath
+import posixpath
+from pkgutil import get_importer
+
+try:
+    import _imp
+except ImportError:
+    # Python 3.2 compatibility
+    import imp as _imp
+
+try:
+    FileExistsError
+except NameError:
+    FileExistsError = OSError
+
+from pip._vendor import six
+from pip._vendor.six.moves import urllib, map, filter
+
+# capture these to bypass sandboxing
+from os import utime
+try:
+    from os import mkdir, rename, unlink
+    WRITE_SUPPORT = True
+except ImportError:
+    # no write support, probably under GAE
+    WRITE_SUPPORT = False
+
+from os import open as os_open
+from os.path import isdir, split
+
+try:
+    import importlib.machinery as importlib_machinery
+    # access attribute to force import under delayed import mechanisms.
+    importlib_machinery.__name__
+except ImportError:
+    importlib_machinery = None
+
+from . import py31compat
+from pip._vendor import appdirs
+from pip._vendor import packaging
+__import__('pip._vendor.packaging.version')
+__import__('pip._vendor.packaging.specifiers')
+__import__('pip._vendor.packaging.requirements')
+__import__('pip._vendor.packaging.markers')
+
+
+__metaclass__ = type
+
+
+if (3, 0) < sys.version_info < (3, 5):
+    raise RuntimeError("Python 3.5 or later is required")
+
+if six.PY2:
+    # Those builtin exceptions are only defined in Python 3
+    PermissionError = None
+    NotADirectoryError = None
+
+# declare some globals that will be defined later to
+# satisfy the linters.
+require = None
+working_set = None
+add_activation_listener = None
+resources_stream = None
+cleanup_resources = None
+resource_dir = None
+resource_stream = None
+set_extraction_path = None
+resource_isdir = None
+resource_string = None
+iter_entry_points = None
+resource_listdir = None
+resource_filename = None
+resource_exists = None
+_distribution_finders = None
+_namespace_handlers = None
+_namespace_packages = None
+
+
+class PEP440Warning(RuntimeWarning):
+    """
+    Used when there is an issue with a version or specifier not complying with
+    PEP 440.
+    """
+
+
+def parse_version(v):
+    try:
+        return packaging.version.Version(v)
+    except packaging.version.InvalidVersion:
+        return packaging.version.LegacyVersion(v)
+
+
+_state_vars = {}
+
+
+def _declare_state(vartype, **kw):
+    globals().update(kw)
+    _state_vars.update(dict.fromkeys(kw, vartype))
+
+
+def __getstate__():
+    state = {}
+    g = globals()
+    for k, v in _state_vars.items():
+        state[k] = g['_sget_' + v](g[k])
+    return state
+
+
+def __setstate__(state):
+    g = globals()
+    for k, v in state.items():
+        g['_sset_' + _state_vars[k]](k, g[k], v)
+    return state
+
+
+def _sget_dict(val):
+    return val.copy()
+
+
+def _sset_dict(key, ob, state):
+    ob.clear()
+    ob.update(state)
+
+
+def _sget_object(val):
+    return val.__getstate__()
+
+
+def _sset_object(key, ob, state):
+    ob.__setstate__(state)
+
+
+_sget_none = _sset_none = lambda *args: None
+
+
+def get_supported_platform():
+    """Return this platform's maximum compatible version.
+
+    distutils.util.get_platform() normally reports the minimum version
+    of Mac OS X that would be required to *use* extensions produced by
+    distutils.  But what we want when checking compatibility is to know the
+    version of Mac OS X that we are *running*.  To allow usage of packages that
+    explicitly require a newer version of Mac OS X, we must also know the
+    current version of the OS.
+
+    If this condition occurs for any other platform with a version in its
+    platform strings, this function should be extended accordingly.
+    """
+    plat = get_build_platform()
+    m = macosVersionString.match(plat)
+    if m is not None and sys.platform == "darwin":
+        try:
+            plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))
+        except ValueError:
+            # not Mac OS X
+            pass
+    return plat
+
+
+__all__ = [
+    # Basic resource access and distribution/entry point discovery
+    'require', 'run_script', 'get_provider', 'get_distribution',
+    'load_entry_point', 'get_entry_map', 'get_entry_info',
+    'iter_entry_points',
+    'resource_string', 'resource_stream', 'resource_filename',
+    'resource_listdir', 'resource_exists', 'resource_isdir',
+
+    # Environmental control
+    'declare_namespace', 'working_set', 'add_activation_listener',
+    'find_distributions', 'set_extraction_path', 'cleanup_resources',
+    'get_default_cache',
+
+    # Primary implementation classes
+    'Environment', 'WorkingSet', 'ResourceManager',
+    'Distribution', 'Requirement', 'EntryPoint',
+
+    # Exceptions
+    'ResolutionError', 'VersionConflict', 'DistributionNotFound',
+    'UnknownExtra', 'ExtractionError',
+
+    # Warnings
+    'PEP440Warning',
+
+    # Parsing functions and string utilities
+    'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
+    'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
+    'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker',
+
+    # filesystem utilities
+    'ensure_directory', 'normalize_path',
+
+    # Distribution "precedence" constants
+    'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',
+
+    # "Provider" interfaces, implementations, and registration/lookup APIs
+    'IMetadataProvider', 'IResourceProvider', 'FileMetadata',
+    'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider',
+    'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider',
+    'register_finder', 'register_namespace_handler', 'register_loader_type',
+    'fixup_namespace_packages', 'get_importer',
+
+    # Warnings
+    'PkgResourcesDeprecationWarning',
+
+    # Deprecated/backward compatibility only
+    'run_main', 'AvailableDistributions',
+]
+
+
+class ResolutionError(Exception):
+    """Abstract base for dependency resolution errors"""
+
+    def __repr__(self):
+        return self.__class__.__name__ + repr(self.args)
+
+
+class VersionConflict(ResolutionError):
+    """
+    An already-installed version conflicts with the requested version.
+
+    Should be initialized with the installed Distribution and the requested
+    Requirement.
+    """
+
+    _template = "{self.dist} is installed but {self.req} is required"
+
+    @property
+    def dist(self):
+        return self.args[0]
+
+    @property
+    def req(self):
+        return self.args[1]
+
+    def report(self):
+        return self._template.format(**locals())
+
+    def with_context(self, required_by):
+        """
+        If required_by is non-empty, return a version of self that is a
+        ContextualVersionConflict.
+        """
+        if not required_by:
+            return self
+        args = self.args + (required_by,)
+        return ContextualVersionConflict(*args)
+
+
+class ContextualVersionConflict(VersionConflict):
+    """
+    A VersionConflict that accepts a third parameter, the set of the
+    requirements that required the installed Distribution.
+    """
+
+    _template = VersionConflict._template + ' by {self.required_by}'
+
+    @property
+    def required_by(self):
+        return self.args[2]
+
+
+class DistributionNotFound(ResolutionError):
+    """A requested distribution was not found"""
+
+    _template = ("The '{self.req}' distribution was not found "
+                 "and is required by {self.requirers_str}")
+
+    @property
+    def req(self):
+        return self.args[0]
+
+    @property
+    def requirers(self):
+        return self.args[1]
+
+    @property
+    def requirers_str(self):
+        if not self.requirers:
+            return 'the application'
+        return ', '.join(self.requirers)
+
+    def report(self):
+        return self._template.format(**locals())
+
+    def __str__(self):
+        return self.report()
+
+
+class UnknownExtra(ResolutionError):
+    """Distribution doesn't have an "extra feature" of the given name"""
+
+
+_provider_factories = {}
+
+PY_MAJOR = '{}.{}'.format(*sys.version_info)
+EGG_DIST = 3
+BINARY_DIST = 2
+SOURCE_DIST = 1
+CHECKOUT_DIST = 0
+DEVELOP_DIST = -1
+
+
+def register_loader_type(loader_type, provider_factory):
+    """Register `provider_factory` to make providers for `loader_type`
+
+    `loader_type` is the type or class of a PEP 302 ``module.__loader__``,
+    and `provider_factory` is a function that, passed a *module* object,
+    returns an ``IResourceProvider`` for that module.
+    """
+    _provider_factories[loader_type] = provider_factory
+
+
+def get_provider(moduleOrReq):
+    """Return an IResourceProvider for the named module or requirement"""
+    if isinstance(moduleOrReq, Requirement):
+        return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
+    try:
+        module = sys.modules[moduleOrReq]
+    except KeyError:
+        __import__(moduleOrReq)
+        module = sys.modules[moduleOrReq]
+    loader = getattr(module, '__loader__', None)
+    return _find_adapter(_provider_factories, loader)(module)
+
+
+def _macosx_vers(_cache=[]):
+    if not _cache:
+        version = platform.mac_ver()[0]
+        # fallback for MacPorts
+        if version == '':
+            plist = '/System/Library/CoreServices/SystemVersion.plist'
+            if os.path.exists(plist):
+                if hasattr(plistlib, 'readPlist'):
+                    plist_content = plistlib.readPlist(plist)
+                    if 'ProductVersion' in plist_content:
+                        version = plist_content['ProductVersion']
+
+        _cache.append(version.split('.'))
+    return _cache[0]
+
+
+def _macosx_arch(machine):
+    return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine)
+
+
+def get_build_platform():
+    """Return this platform's string for platform-specific distributions
+
+    XXX Currently this is the same as ``distutils.util.get_platform()``, but it
+    needs some hacks for Linux and Mac OS X.
+    """
+    from sysconfig import get_platform
+
+    plat = get_platform()
+    if sys.platform == "darwin" and not plat.startswith('macosx-'):
+        try:
+            version = _macosx_vers()
+            machine = os.uname()[4].replace(" ", "_")
+            return "macosx-%d.%d-%s" % (
+                int(version[0]), int(version[1]),
+                _macosx_arch(machine),
+            )
+        except ValueError:
+            # if someone is running a non-Mac darwin system, this will fall
+            # through to the default implementation
+            pass
+    return plat
+
+
+macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
+darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
+# XXX backward compat
+get_platform = get_build_platform
+
+
+def compatible_platforms(provided, required):
+    """Can code for the `provided` platform run on the `required` platform?
+
+    Returns true if either platform is ``None``, or the platforms are equal.
+
+    XXX Needs compatibility checks for Linux and other unixy OSes.
+    """
+    if provided is None or required is None or provided == required:
+        # easy case
+        return True
+
+    # Mac OS X special cases
+    reqMac = macosVersionString.match(required)
+    if reqMac:
+        provMac = macosVersionString.match(provided)
+
+        # is this a Mac package?
+        if not provMac:
+            # this is backwards compatibility for packages built before
+            # setuptools 0.6. All packages built after this point will
+            # use the new macosx designation.
+            provDarwin = darwinVersionString.match(provided)
+            if provDarwin:
+                dversion = int(provDarwin.group(1))
+                macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
+                if dversion == 7 and macosversion >= "10.3" or \
+                        dversion == 8 and macosversion >= "10.4":
+                    return True
+            # egg isn't macosx or legacy darwin
+            return False
+
+        # are they the same major version and machine type?
+        if provMac.group(1) != reqMac.group(1) or \
+                provMac.group(3) != reqMac.group(3):
+            return False
+
+        # is the required OS major update >= the provided one?
+        if int(provMac.group(2)) > int(reqMac.group(2)):
+            return False
+
+        return True
+
+    # XXX Linux and other platforms' special cases should go here
+    return False
+
+
+def run_script(dist_spec, script_name):
+    """Locate distribution `dist_spec` and run its `script_name` script"""
+    ns = sys._getframe(1).f_globals
+    name = ns['__name__']
+    ns.clear()
+    ns['__name__'] = name
+    require(dist_spec)[0].run_script(script_name, ns)
+
+
+# backward compatibility
+run_main = run_script
+
+
+def get_distribution(dist):
+    """Return a current distribution object for a Requirement or string"""
+    if isinstance(dist, six.string_types):
+        dist = Requirement.parse(dist)
+    if isinstance(dist, Requirement):
+        dist = get_provider(dist)
+    if not isinstance(dist, Distribution):
+        raise TypeError("Expected string, Requirement, or Distribution", dist)
+    return dist
+
+
+def load_entry_point(dist, group, name):
+    """Return `name` entry point of `group` for `dist` or raise ImportError"""
+    return get_distribution(dist).load_entry_point(group, name)
+
+
+def get_entry_map(dist, group=None):
+    """Return the entry point map for `group`, or the full entry map"""
+    return get_distribution(dist).get_entry_map(group)
+
+
+def get_entry_info(dist, group, name):
+    """Return the EntryPoint object for `group`+`name`, or ``None``"""
+    return get_distribution(dist).get_entry_info(group, name)
+
+
+class IMetadataProvider:
+    def has_metadata(name):
+        """Does the package's distribution contain the named metadata?"""
+
+    def get_metadata(name):
+        """The named metadata resource as a string"""
+
+    def get_metadata_lines(name):
+        """Yield named metadata resource as list of non-blank non-comment lines
+
+       Leading and trailing whitespace is stripped from each line, and lines
+       with ``#`` as the first non-blank character are omitted."""
+
+    def metadata_isdir(name):
+        """Is the named metadata a directory?  (like ``os.path.isdir()``)"""
+
+    def metadata_listdir(name):
+        """List of metadata names in the directory (like ``os.listdir()``)"""
+
+    def run_script(script_name, namespace):
+        """Execute the named script in the supplied namespace dictionary"""
+
+
+class IResourceProvider(IMetadataProvider):
+    """An object that provides access to package resources"""
+
+    def get_resource_filename(manager, resource_name):
+        """Return a true filesystem path for `resource_name`
+
+        `manager` must be an ``IResourceManager``"""
+
+    def get_resource_stream(manager, resource_name):
+        """Return a readable file-like object for `resource_name`
+
+        `manager` must be an ``IResourceManager``"""
+
+    def get_resource_string(manager, resource_name):
+        """Return a string containing the contents of `resource_name`
+
+        `manager` must be an ``IResourceManager``"""
+
+    def has_resource(resource_name):
+        """Does the package contain the named resource?"""
+
+    def resource_isdir(resource_name):
+        """Is the named resource a directory?  (like ``os.path.isdir()``)"""
+
+    def resource_listdir(resource_name):
+        """List of resource names in the directory (like ``os.listdir()``)"""
+
+
+class WorkingSet:
+    """A collection of active distributions on sys.path (or a similar list)"""
+
+    def __init__(self, entries=None):
+        """Create working set from list of path entries (default=sys.path)"""
+        self.entries = []
+        self.entry_keys = {}
+        self.by_key = {}
+        self.callbacks = []
+
+        if entries is None:
+            entries = sys.path
+
+        for entry in entries:
+            self.add_entry(entry)
+
+    @classmethod
+    def _build_master(cls):
+        """
+        Prepare the master working set.
+        """
+        ws = cls()
+        try:
+            from __main__ import __requires__
+        except ImportError:
+            # The main program does not list any requirements
+            return ws
+
+        # ensure the requirements are met
+        try:
+            ws.require(__requires__)
+        except VersionConflict:
+            return cls._build_from_requirements(__requires__)
+
+        return ws
+
+    @classmethod
+    def _build_from_requirements(cls, req_spec):
+        """
+        Build a working set from a requirement spec. Rewrites sys.path.
+        """
+        # try it without defaults already on sys.path
+        # by starting with an empty path
+        ws = cls([])
+        reqs = parse_requirements(req_spec)
+        dists = ws.resolve(reqs, Environment())
+        for dist in dists:
+            ws.add(dist)
+
+        # add any missing entries from sys.path
+        for entry in sys.path:
+            if entry not in ws.entries:
+                ws.add_entry(entry)
+
+        # then copy back to sys.path
+        sys.path[:] = ws.entries
+        return ws
+
+    def add_entry(self, entry):
+        """Add a path item to ``.entries``, finding any distributions on it
+
+        ``find_distributions(entry, True)`` is used to find distributions
+        corresponding to the path entry, and they are added.  `entry` is
+        always appended to ``.entries``, even if it is already present.
+        (This is because ``sys.path`` can contain the same value more than
+        once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
+        equal ``sys.path``.)
+        """
+        self.entry_keys.setdefault(entry, [])
+        self.entries.append(entry)
+        for dist in find_distributions(entry, True):
+            self.add(dist, entry, False)
+
+    def __contains__(self, dist):
+        """True if `dist` is the active distribution for its project"""
+        return self.by_key.get(dist.key) == dist
+
+    def find(self, req):
+        """Find a distribution matching requirement `req`
+
+        If there is an active distribution for the requested project, this
+        returns it as long as it meets the version requirement specified by
+        `req`.  But, if there is an active distribution for the project and it
+        does *not* meet the `req` requirement, ``VersionConflict`` is raised.
+        If there is no active distribution for the requested project, ``None``
+        is returned.
+        """
+        dist = self.by_key.get(req.key)
+        if dist is not None and dist not in req:
+            # XXX add more info
+            raise VersionConflict(dist, req)
+        return dist
+
+    def iter_entry_points(self, group, name=None):
+        """Yield entry point objects from `group` matching `name`
+
+        If `name` is None, yields all entry points in `group` from all
+        distributions in the working set, otherwise only ones matching
+        both `group` and `name` are yielded (in distribution order).
+        """
+        return (
+            entry
+            for dist in self
+            for entry in dist.get_entry_map(group).values()
+            if name is None or name == entry.name
+        )
+
+    def run_script(self, requires, script_name):
+        """Locate distribution for `requires` and run `script_name` script"""
+        ns = sys._getframe(1).f_globals
+        name = ns['__name__']
+        ns.clear()
+        ns['__name__'] = name
+        self.require(requires)[0].run_script(script_name, ns)
+
+    def __iter__(self):
+        """Yield distributions for non-duplicate projects in the working set
+
+        The yield order is the order in which the items' path entries were
+        added to the working set.
+        """
+        seen = {}
+        for item in self.entries:
+            if item not in self.entry_keys:
+                # workaround a cache issue
+                continue
+
+            for key in self.entry_keys[item]:
+                if key not in seen:
+                    seen[key] = 1
+                    yield self.by_key[key]
+
+    def add(self, dist, entry=None, insert=True, replace=False):
+        """Add `dist` to working set, associated with `entry`
+
+        If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
+        On exit from this routine, `entry` is added to the end of the working
+        set's ``.entries`` (if it wasn't already present).
+
+        `dist` is only added to the working set if it's for a project that
+        doesn't already have a distribution in the set, unless `replace=True`.
+        If it's added, any callbacks registered with the ``subscribe()`` method
+        will be called.
+        """
+        if insert:
+            dist.insert_on(self.entries, entry, replace=replace)
+
+        if entry is None:
+            entry = dist.location
+        keys = self.entry_keys.setdefault(entry, [])
+        keys2 = self.entry_keys.setdefault(dist.location, [])
+        if not replace and dist.key in self.by_key:
+            # ignore hidden distros
+            return
+
+        self.by_key[dist.key] = dist
+        if dist.key not in keys:
+            keys.append(dist.key)
+        if dist.key not in keys2:
+            keys2.append(dist.key)
+        self._added_new(dist)
+
+    def resolve(self, requirements, env=None, installer=None,
+                replace_conflicting=False, extras=None):
+        """List all distributions needed to (recursively) meet `requirements`
+
+        `requirements` must be a sequence of ``Requirement`` objects.  `env`,
+        if supplied, should be an ``Environment`` instance.  If
+        not supplied, it defaults to all distributions available within any
+        entry or distribution in the working set.  `installer`, if supplied,
+        will be invoked with each requirement that cannot be met by an
+        already-installed distribution; it should return a ``Distribution`` or
+        ``None``.
+
+        Unless `replace_conflicting=True`, raises a VersionConflict exception
+        if
+        any requirements are found on the path that have the correct name but
+        the wrong version.  Otherwise, if an `installer` is supplied it will be
+        invoked to obtain the correct version of the requirement and activate
+        it.
+
+        `extras` is a list of the extras to be used with these requirements.
+        This is important because extra requirements may look like `my_req;
+        extra = "my_extra"`, which would otherwise be interpreted as a purely
+        optional requirement.  Instead, we want to be able to assert that these
+        requirements are truly required.
+        """
+
+        # set up the stack
+        requirements = list(requirements)[::-1]
+        # set of processed requirements
+        processed = {}
+        # key -> dist
+        best = {}
+        to_activate = []
+
+        req_extras = _ReqExtras()
+
+        # Mapping of requirement to set of distributions that required it;
+        # useful for reporting info about conflicts.
+        required_by = collections.defaultdict(set)
+
+        while requirements:
+            # process dependencies breadth-first
+            req = requirements.pop(0)
+            if req in processed:
+                # Ignore cyclic or redundant dependencies
+                continue
+
+            if not req_extras.markers_pass(req, extras):
+                continue
+
+            dist = best.get(req.key)
+            if dist is None:
+                # Find the best distribution and add it to the map
+                dist = self.by_key.get(req.key)
+                if dist is None or (dist not in req and replace_conflicting):
+                    ws = self
+                    if env is None:
+                        if dist is None:
+                            env = Environment(self.entries)
+                        else:
+                            # Use an empty environment and workingset to avoid
+                            # any further conflicts with the conflicting
+                            # distribution
+                            env = Environment([])
+                            ws = WorkingSet([])
+                    dist = best[req.key] = env.best_match(
+                        req, ws, installer,
+                        replace_conflicting=replace_conflicting
+                    )
+                    if dist is None:
+                        requirers = required_by.get(req, None)
+                        raise DistributionNotFound(req, requirers)
+                to_activate.append(dist)
+            if dist not in req:
+                # Oops, the "best" so far conflicts with a dependency
+                dependent_req = required_by[req]
+                raise VersionConflict(dist, req).with_context(dependent_req)
+
+            # push the new requirements onto the stack
+            new_requirements = dist.requires(req.extras)[::-1]
+            requirements.extend(new_requirements)
+
+            # Register the new requirements needed by req
+            for new_requirement in new_requirements:
+                required_by[new_requirement].add(req.project_name)
+                req_extras[new_requirement] = req.extras
+
+            processed[req] = True
+
+        # return list of distros to activate
+        return to_activate
+
+    def find_plugins(
+            self, plugin_env, full_env=None, installer=None, fallback=True):
+        """Find all activatable distributions in `plugin_env`
+
+        Example usage::
+
+            distributions, errors = working_set.find_plugins(
+                Environment(plugin_dirlist)
+            )
+            # add plugins+libs to sys.path
+            map(working_set.add, distributions)
+            # display errors
+            print('Could not load', errors)
+
+        The `plugin_env` should be an ``Environment`` instance that contains
+        only distributions that are in the project's "plugin directory" or
+        directories. The `full_env`, if supplied, should be an ``Environment``
+        contains all currently-available distributions.  If `full_env` is not
+        supplied, one is created automatically from the ``WorkingSet`` this
+        method is called on, which will typically mean that every directory on
+        ``sys.path`` will be scanned for distributions.
+
+        `installer` is a standard installer callback as used by the
+        ``resolve()`` method. The `fallback` flag indicates whether we should
+        attempt to resolve older versions of a plugin if the newest version
+        cannot be resolved.
+
+        This method returns a 2-tuple: (`distributions`, `error_info`), where
+        `distributions` is a list of the distributions found in `plugin_env`
+        that were loadable, along with any other distributions that are needed
+        to resolve their dependencies.  `error_info` is a dictionary mapping
+        unloadable plugin distributions to an exception instance describing the
+        error that occurred. Usually this will be a ``DistributionNotFound`` or
+        ``VersionConflict`` instance.
+        """
+
+        plugin_projects = list(plugin_env)
+        # scan project names in alphabetic order
+        plugin_projects.sort()
+
+        error_info = {}
+        distributions = {}
+
+        if full_env is None:
+            env = Environment(self.entries)
+            env += plugin_env
+        else:
+            env = full_env + plugin_env
+
+        shadow_set = self.__class__([])
+        # put all our entries in shadow_set
+        list(map(shadow_set.add, self))
+
+        for project_name in plugin_projects:
+
+            for dist in plugin_env[project_name]:
+
+                req = [dist.as_requirement()]
+
+                try:
+                    resolvees = shadow_set.resolve(req, env, installer)
+
+                except ResolutionError as v:
+                    # save error info
+                    error_info[dist] = v
+                    if fallback:
+                        # try the next older version of project
+                        continue
+                    else:
+                        # give up on this project, keep going
+                        break
+
+                else:
+                    list(map(shadow_set.add, resolvees))
+                    distributions.update(dict.fromkeys(resolvees))
+
+                    # success, no need to try any more versions of this project
+                    break
+
+        distributions = list(distributions)
+        distributions.sort()
+
+        return distributions, error_info
+
+    def require(self, *requirements):
+        """Ensure that distributions matching `requirements` are activated
+
+        `requirements` must be a string or a (possibly-nested) sequence
+        thereof, specifying the distributions and versions required.  The
+        return value is a sequence of the distributions that needed to be
+        activated to fulfill the requirements; all relevant distributions are
+        included, even if they were already activated in this working set.
+        """
+        needed = self.resolve(parse_requirements(requirements))
+
+        for dist in needed:
+            self.add(dist)
+
+        return needed
+
+    def subscribe(self, callback, existing=True):
+        """Invoke `callback` for all distributions
+
+        If `existing=True` (default),
+        call on all existing ones, as well.
+        """
+        if callback in self.callbacks:
+            return
+        self.callbacks.append(callback)
+        if not existing:
+            return
+        for dist in self:
+            callback(dist)
+
+    def _added_new(self, dist):
+        for callback in self.callbacks:
+            callback(dist)
+
+    def __getstate__(self):
+        return (
+            self.entries[:], self.entry_keys.copy(), self.by_key.copy(),
+            self.callbacks[:]
+        )
+
+    def __setstate__(self, e_k_b_c):
+        entries, keys, by_key, callbacks = e_k_b_c
+        self.entries = entries[:]
+        self.entry_keys = keys.copy()
+        self.by_key = by_key.copy()
+        self.callbacks = callbacks[:]
+
+
+class _ReqExtras(dict):
+    """
+    Map each requirement to the extras that demanded it.
+    """
+
+    def markers_pass(self, req, extras=None):
+        """
+        Evaluate markers for req against each extra that
+        demanded it.
+
+        Return False if the req has a marker and fails
+        evaluation. Otherwise, return True.
+        """
+        extra_evals = (
+            req.marker.evaluate({'extra': extra})
+            for extra in self.get(req, ()) + (extras or (None,))
+        )
+        return not req.marker or any(extra_evals)
+
+
+class Environment:
+    """Searchable snapshot of distributions on a search path"""
+
+    def __init__(
+            self, search_path=None, platform=get_supported_platform(),
+            python=PY_MAJOR):
+        """Snapshot distributions available on a search path
+
+        Any distributions found on `search_path` are added to the environment.
+        `search_path` should be a sequence of ``sys.path`` items.  If not
+        supplied, ``sys.path`` is used.
+
+        `platform` is an optional string specifying the name of the platform
+        that platform-specific distributions must be compatible with.  If
+        unspecified, it defaults to the current platform.  `python` is an
+        optional string naming the desired version of Python (e.g. ``'3.6'``);
+        it defaults to the current version.
+
+        You may explicitly set `platform` (and/or `python`) to ``None`` if you
+        wish to map *all* distributions, not just those compatible with the
+        running platform or Python version.
+        """
+        self._distmap = {}
+        self.platform = platform
+        self.python = python
+        self.scan(search_path)
+
+    def can_add(self, dist):
+        """Is distribution `dist` acceptable for this environment?
+
+        The distribution must match the platform and python version
+        requirements specified when this environment was created, or False
+        is returned.
+        """
+        py_compat = (
+            self.python is None
+            or dist.py_version is None
+            or dist.py_version == self.python
+        )
+        return py_compat and compatible_platforms(dist.platform, self.platform)
+
+    def remove(self, dist):
+        """Remove `dist` from the environment"""
+        self._distmap[dist.key].remove(dist)
+
+    def scan(self, search_path=None):
+        """Scan `search_path` for distributions usable in this environment
+
+        Any distributions found are added to the environment.
+        `search_path` should be a sequence of ``sys.path`` items.  If not
+        supplied, ``sys.path`` is used.  Only distributions conforming to
+        the platform/python version defined at initialization are added.
+        """
+        if search_path is None:
+            search_path = sys.path
+
+        for item in search_path:
+            for dist in find_distributions(item):
+                self.add(dist)
+
+    def __getitem__(self, project_name):
+        """Return a newest-to-oldest list of distributions for `project_name`
+
+        Uses case-insensitive `project_name` comparison, assuming all the
+        project's distributions use their project's name converted to all
+        lowercase as their key.
+
+        """
+        distribution_key = project_name.lower()
+        return self._distmap.get(distribution_key, [])
+
+    def add(self, dist):
+        """Add `dist` if we ``can_add()`` it and it has not already been added
+        """
+        if self.can_add(dist) and dist.has_version():
+            dists = self._distmap.setdefault(dist.key, [])
+            if dist not in dists:
+                dists.append(dist)
+                dists.sort(key=operator.attrgetter('hashcmp'), reverse=True)
+
+    def best_match(
+            self, req, working_set, installer=None, replace_conflicting=False):
+        """Find distribution best matching `req` and usable on `working_set`
+
+        This calls the ``find(req)`` method of the `working_set` to see if a
+        suitable distribution is already active.  (This may raise
+        ``VersionConflict`` if an unsuitable version of the project is already
+        active in the specified `working_set`.)  If a suitable distribution
+        isn't active, this method returns the newest distribution in the
+        environment that meets the ``Requirement`` in `req`.  If no suitable
+        distribution is found, and `installer` is supplied, then the result of
+        calling the environment's ``obtain(req, installer)`` method will be
+        returned.
+        """
+        try:
+            dist = working_set.find(req)
+        except VersionConflict:
+            if not replace_conflicting:
+                raise
+            dist = None
+        if dist is not None:
+            return dist
+        for dist in self[req.key]:
+            if dist in req:
+                return dist
+        # try to download/install
+        return self.obtain(req, installer)
+
+    def obtain(self, requirement, installer=None):
+        """Obtain a distribution matching `requirement` (e.g. via download)
+
+        Obtain a distro that matches requirement (e.g. via download).  In the
+        base ``Environment`` class, this routine just returns
+        ``installer(requirement)``, unless `installer` is None, in which case
+        None is returned instead.  This method is a hook that allows subclasses
+        to attempt other ways of obtaining a distribution before falling back
+        to the `installer` argument."""
+        if installer is not None:
+            return installer(requirement)
+
+    def __iter__(self):
+        """Yield the unique project names of the available distributions"""
+        for key in self._distmap.keys():
+            if self[key]:
+                yield key
+
+    def __iadd__(self, other):
+        """In-place addition of a distribution or environment"""
+        if isinstance(other, Distribution):
+            self.add(other)
+        elif isinstance(other, Environment):
+            for project in other:
+                for dist in other[project]:
+                    self.add(dist)
+        else:
+            raise TypeError("Can't add %r to environment" % (other,))
+        return self
+
+    def __add__(self, other):
+        """Add an environment or distribution to an environment"""
+        new = self.__class__([], platform=None, python=None)
+        for env in self, other:
+            new += env
+        return new
+
+
+# XXX backward compatibility
+AvailableDistributions = Environment
+
+
+class ExtractionError(RuntimeError):
+    """An error occurred extracting a resource
+
+    The following attributes are available from instances of this exception:
+
+    manager
+        The resource manager that raised this exception
+
+    cache_path
+        The base directory for resource extraction
+
+    original_error
+        The exception instance that caused extraction to fail
+    """
+
+
+class ResourceManager:
+    """Manage resource extraction and packages"""
+    extraction_path = None
+
+    def __init__(self):
+        self.cached_files = {}
+
+    def resource_exists(self, package_or_requirement, resource_name):
+        """Does the named resource exist?"""
+        return get_provider(package_or_requirement).has_resource(resource_name)
+
+    def resource_isdir(self, package_or_requirement, resource_name):
+        """Is the named resource an existing directory?"""
+        return get_provider(package_or_requirement).resource_isdir(
+            resource_name
+        )
+
+    def resource_filename(self, package_or_requirement, resource_name):
+        """Return a true filesystem path for specified resource"""
+        return get_provider(package_or_requirement).get_resource_filename(
+            self, resource_name
+        )
+
+    def resource_stream(self, package_or_requirement, resource_name):
+        """Return a readable file-like object for specified resource"""
+        return get_provider(package_or_requirement).get_resource_stream(
+            self, resource_name
+        )
+
+    def resource_string(self, package_or_requirement, resource_name):
+        """Return specified resource as a string"""
+        return get_provider(package_or_requirement).get_resource_string(
+            self, resource_name
+        )
+
+    def resource_listdir(self, package_or_requirement, resource_name):
+        """List the contents of the named resource directory"""
+        return get_provider(package_or_requirement).resource_listdir(
+            resource_name
+        )
+
+    def extraction_error(self):
+        """Give an error message for problems extracting file(s)"""
+
+        old_exc = sys.exc_info()[1]
+        cache_path = self.extraction_path or get_default_cache()
+
+        tmpl = textwrap.dedent("""
+            Can't extract file(s) to egg cache
+
+            The following error occurred while trying to extract file(s)
+            to the Python egg cache:
+
+              {old_exc}
+
+            The Python egg cache directory is currently set to:
+
+              {cache_path}
+
+            Perhaps your account does not have write access to this directory?
+            You can change the cache directory by setting the PYTHON_EGG_CACHE
+            environment variable to point to an accessible directory.
+            """).lstrip()
+        err = ExtractionError(tmpl.format(**locals()))
+        err.manager = self
+        err.cache_path = cache_path
+        err.original_error = old_exc
+        raise err
+
+    def get_cache_path(self, archive_name, names=()):
+        """Return absolute location in cache for `archive_name` and `names`
+
+        The parent directory of the resulting path will be created if it does
+        not already exist.  `archive_name` should be the base filename of the
+        enclosing egg (which may not be the name of the enclosing zipfile!),
+        including its ".egg" extension.  `names`, if provided, should be a
+        sequence of path name parts "under" the egg's extraction location.
+
+        This method should only be called by resource providers that need to
+        obtain an extraction location, and only for names they intend to
+        extract, as it tracks the generated names for possible cleanup later.
+        """
+        extract_path = self.extraction_path or get_default_cache()
+        target_path = os.path.join(extract_path, archive_name + '-tmp', *names)
+        try:
+            _bypass_ensure_directory(target_path)
+        except Exception:
+            self.extraction_error()
+
+        self._warn_unsafe_extraction_path(extract_path)
+
+        self.cached_files[target_path] = 1
+        return target_path
+
+    @staticmethod
+    def _warn_unsafe_extraction_path(path):
+        """
+        If the default extraction path is overridden and set to an insecure
+        location, such as /tmp, it opens up an opportunity for an attacker to
+        replace an extracted file with an unauthorized payload. Warn the user
+        if a known insecure location is used.
+
+        See Distribute #375 for more details.
+        """
+        if os.name == 'nt' and not path.startswith(os.environ['windir']):
+            # On Windows, permissions are generally restrictive by default
+            #  and temp directories are not writable by other users, so
+            #  bypass the warning.
+            return
+        mode = os.stat(path).st_mode
+        if mode & stat.S_IWOTH or mode & stat.S_IWGRP:
+            msg = (
+                "%s is writable by group/others and vulnerable to attack "
+                "when "
+                "used with get_resource_filename. Consider a more secure "
+                "location (set with .set_extraction_path or the "
+                "PYTHON_EGG_CACHE environment variable)." % path
+            )
+            warnings.warn(msg, UserWarning)
+
+    def postprocess(self, tempname, filename):
+        """Perform any platform-specific postprocessing of `tempname`
+
+        This is where Mac header rewrites should be done; other platforms don't
+        have anything special they should do.
+
+        Resource providers should call this method ONLY after successfully
+        extracting a compressed resource.  They must NOT call it on resources
+        that are already in the filesystem.
+
+        `tempname` is the current (temporary) name of the file, and `filename`
+        is the name it will be renamed to by the caller after this routine
+        returns.
+        """
+
+        if os.name == 'posix':
+            # Make the resource executable
+            mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777
+            os.chmod(tempname, mode)
+
+    def set_extraction_path(self, path):
+        """Set the base path where resources will be extracted to, if needed.
+
+        If you do not call this routine before any extractions take place, the
+        path defaults to the return value of ``get_default_cache()``.  (Which
+        is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
+        platform-specific fallbacks.  See that routine's documentation for more
+        details.)
+
+        Resources are extracted to subdirectories of this path based upon
+        information given by the ``IResourceProvider``.  You may set this to a
+        temporary directory, but then you must call ``cleanup_resources()`` to
+        delete the extracted files when done.  There is no guarantee that
+        ``cleanup_resources()`` will be able to remove all extracted files.
+
+        (Note: you may not change the extraction path for a given resource
+        manager once resources have been extracted, unless you first call
+        ``cleanup_resources()``.)
+        """
+        if self.cached_files:
+            raise ValueError(
+                "Can't change extraction path, files already extracted"
+            )
+
+        self.extraction_path = path
+
+    def cleanup_resources(self, force=False):
+        """
+        Delete all extracted resource files and directories, returning a list
+        of the file and directory names that could not be successfully removed.
+        This function does not have any concurrency protection, so it should
+        generally only be called when the extraction path is a temporary
+        directory exclusive to a single process.  This method is not
+        automatically called; you must call it explicitly or register it as an
+        ``atexit`` function if you wish to ensure cleanup of a temporary
+        directory used for extractions.
+        """
+        # XXX
+
+
+def get_default_cache():
+    """
+    Return the ``PYTHON_EGG_CACHE`` environment variable
+    or a platform-relevant user cache dir for an app
+    named "Python-Eggs".
+    """
+    return (
+        os.environ.get('PYTHON_EGG_CACHE')
+        or appdirs.user_cache_dir(appname='Python-Eggs')
+    )
+
+
+def safe_name(name):
+    """Convert an arbitrary string to a standard distribution name
+
+    Any runs of non-alphanumeric/. characters are replaced with a single '-'.
+    """
+    return re.sub('[^A-Za-z0-9.]+', '-', name)
+
+
+def safe_version(version):
+    """
+    Convert an arbitrary string to a standard version string
+    """
+    try:
+        # normalize the version
+        return str(packaging.version.Version(version))
+    except packaging.version.InvalidVersion:
+        version = version.replace(' ', '.')
+        return re.sub('[^A-Za-z0-9.]+', '-', version)
+
+
+def safe_extra(extra):
+    """Convert an arbitrary string to a standard 'extra' name
+
+    Any runs of non-alphanumeric characters are replaced with a single '_',
+    and the result is always lowercased.
+    """
+    return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower()
+
+
+def to_filename(name):
+    """Convert a project or version name to its filename-escaped form
+
+    Any '-' characters are currently replaced with '_'.
+    """
+    return name.replace('-', '_')
+
+
+def invalid_marker(text):
+    """
+    Validate text as a PEP 508 environment marker; return an exception
+    if invalid or False otherwise.
+    """
+    try:
+        evaluate_marker(text)
+    except SyntaxError as e:
+        e.filename = None
+        e.lineno = None
+        return e
+    return False
+
+
+def evaluate_marker(text, extra=None):
+    """
+    Evaluate a PEP 508 environment marker.
+    Return a boolean indicating the marker result in this environment.
+    Raise SyntaxError if marker is invalid.
+
+    This implementation uses the 'pyparsing' module.
+    """
+    try:
+        marker = packaging.markers.Marker(text)
+        return marker.evaluate()
+    except packaging.markers.InvalidMarker as e:
+        raise SyntaxError(e)
+
+
+class NullProvider:
+    """Try to implement resources and metadata for arbitrary PEP 302 loaders"""
+
+    egg_name = None
+    egg_info = None
+    loader = None
+
+    def __init__(self, module):
+        self.loader = getattr(module, '__loader__', None)
+        self.module_path = os.path.dirname(getattr(module, '__file__', ''))
+
+    def get_resource_filename(self, manager, resource_name):
+        return self._fn(self.module_path, resource_name)
+
+    def get_resource_stream(self, manager, resource_name):
+        return io.BytesIO(self.get_resource_string(manager, resource_name))
+
+    def get_resource_string(self, manager, resource_name):
+        return self._get(self._fn(self.module_path, resource_name))
+
+    def has_resource(self, resource_name):
+        return self._has(self._fn(self.module_path, resource_name))
+
+    def _get_metadata_path(self, name):
+        return self._fn(self.egg_info, name)
+
+    def has_metadata(self, name):
+        if not self.egg_info:
+            return self.egg_info
+
+        path = self._get_metadata_path(name)
+        return self._has(path)
+
+    def get_metadata(self, name):
+        if not self.egg_info:
+            return ""
+        path = self._get_metadata_path(name)
+        value = self._get(path)
+        if six.PY2:
+            return value
+        try:
+            return value.decode('utf-8')
+        except UnicodeDecodeError as exc:
+            # Include the path in the error message to simplify
+            # troubleshooting, and without changing the exception type.
+            exc.reason += ' in {} file at path: {}'.format(name, path)
+            raise
+
+    def get_metadata_lines(self, name):
+        return yield_lines(self.get_metadata(name))
+
+    def resource_isdir(self, resource_name):
+        return self._isdir(self._fn(self.module_path, resource_name))
+
+    def metadata_isdir(self, name):
+        return self.egg_info and self._isdir(self._fn(self.egg_info, name))
+
+    def resource_listdir(self, resource_name):
+        return self._listdir(self._fn(self.module_path, resource_name))
+
+    def metadata_listdir(self, name):
+        if self.egg_info:
+            return self._listdir(self._fn(self.egg_info, name))
+        return []
+
+    def run_script(self, script_name, namespace):
+        script = 'scripts/' + script_name
+        if not self.has_metadata(script):
+            raise ResolutionError(
+                "Script {script!r} not found in metadata at {self.egg_info!r}"
+                .format(**locals()),
+            )
+        script_text = self.get_metadata(script).replace('\r\n', '\n')
+        script_text = script_text.replace('\r', '\n')
+        script_filename = self._fn(self.egg_info, script)
+        namespace['__file__'] = script_filename
+        if os.path.exists(script_filename):
+            source = open(script_filename).read()
+            code = compile(source, script_filename, 'exec')
+            exec(code, namespace, namespace)
+        else:
+            from linecache import cache
+            cache[script_filename] = (
+                len(script_text), 0, script_text.split('\n'), script_filename
+            )
+            script_code = compile(script_text, script_filename, 'exec')
+            exec(script_code, namespace, namespace)
+
+    def _has(self, path):
+        raise NotImplementedError(
+            "Can't perform this operation for unregistered loader type"
+        )
+
+    def _isdir(self, path):
+        raise NotImplementedError(
+            "Can't perform this operation for unregistered loader type"
+        )
+
+    def _listdir(self, path):
+        raise NotImplementedError(
+            "Can't perform this operation for unregistered loader type"
+        )
+
+    def _fn(self, base, resource_name):
+        self._validate_resource_path(resource_name)
+        if resource_name:
+            return os.path.join(base, *resource_name.split('/'))
+        return base
+
+    @staticmethod
+    def _validate_resource_path(path):
+        """
+        Validate the resource paths according to the docs.
+        https://setuptools.readthedocs.io/en/latest/pkg_resources.html#basic-resource-access
+
+        >>> warned = getfixture('recwarn')
+        >>> warnings.simplefilter('always')
+        >>> vrp = NullProvider._validate_resource_path
+        >>> vrp('foo/bar.txt')
+        >>> bool(warned)
+        False
+        >>> vrp('../foo/bar.txt')
+        >>> bool(warned)
+        True
+        >>> warned.clear()
+        >>> vrp('/foo/bar.txt')
+        >>> bool(warned)
+        True
+        >>> vrp('foo/../../bar.txt')
+        >>> bool(warned)
+        True
+        >>> warned.clear()
+        >>> vrp('foo/f../bar.txt')
+        >>> bool(warned)
+        False
+
+        Windows path separators are straight-up disallowed.
+        >>> vrp(r'\\foo/bar.txt')
+        Traceback (most recent call last):
+        ...
+        ValueError: Use of .. or absolute path in a resource path \
+is not allowed.
+
+        >>> vrp(r'C:\\foo/bar.txt')
+        Traceback (most recent call last):
+        ...
+        ValueError: Use of .. or absolute path in a resource path \
+is not allowed.
+
+        Blank values are allowed
+
+        >>> vrp('')
+        >>> bool(warned)
+        False
+
+        Non-string values are not.
+
+        >>> vrp(None)
+        Traceback (most recent call last):
+        ...
+        AttributeError: ...
+        """
+        invalid = (
+            os.path.pardir in path.split(posixpath.sep) or
+            posixpath.isabs(path) or
+            ntpath.isabs(path)
+        )
+        if not invalid:
+            return
+
+        msg = "Use of .. or absolute path in a resource path is not allowed."
+
+        # Aggressively disallow Windows absolute paths
+        if ntpath.isabs(path) and not posixpath.isabs(path):
+            raise ValueError(msg)
+
+        # for compatibility, warn; in future
+        # raise ValueError(msg)
+        warnings.warn(
+            msg[:-1] + " and will raise exceptions in a future release.",
+            DeprecationWarning,
+            stacklevel=4,
+        )
+
+    def _get(self, path):
+        if hasattr(self.loader, 'get_data'):
+            return self.loader.get_data(path)
+        raise NotImplementedError(
+            "Can't perform this operation for loaders without 'get_data()'"
+        )
+
+
+register_loader_type(object, NullProvider)
+
+
+class EggProvider(NullProvider):
+    """Provider based on a virtual filesystem"""
+
+    def __init__(self, module):
+        NullProvider.__init__(self, module)
+        self._setup_prefix()
+
+    def _setup_prefix(self):
+        # we assume here that our metadata may be nested inside a "basket"
+        # of multiple eggs; that's why we use module_path instead of .archive
+        path = self.module_path
+        old = None
+        while path != old:
+            if _is_egg_path(path):
+                self.egg_name = os.path.basename(path)
+                self.egg_info = os.path.join(path, 'EGG-INFO')
+                self.egg_root = path
+                break
+            old = path
+            path, base = os.path.split(path)
+
+
+class DefaultProvider(EggProvider):
+    """Provides access to package resources in the filesystem"""
+
+    def _has(self, path):
+        return os.path.exists(path)
+
+    def _isdir(self, path):
+        return os.path.isdir(path)
+
+    def _listdir(self, path):
+        return os.listdir(path)
+
+    def get_resource_stream(self, manager, resource_name):
+        return open(self._fn(self.module_path, resource_name), 'rb')
+
+    def _get(self, path):
+        with open(path, 'rb') as stream:
+            return stream.read()
+
+    @classmethod
+    def _register(cls):
+        loader_names = 'SourceFileLoader', 'SourcelessFileLoader',
+        for name in loader_names:
+            loader_cls = getattr(importlib_machinery, name, type(None))
+            register_loader_type(loader_cls, cls)
+
+
+DefaultProvider._register()
+
+
+class EmptyProvider(NullProvider):
+    """Provider that returns nothing for all requests"""
+
+    module_path = None
+
+    _isdir = _has = lambda self, path: False
+
+    def _get(self, path):
+        return ''
+
+    def _listdir(self, path):
+        return []
+
+    def __init__(self):
+        pass
+
+
+empty_provider = EmptyProvider()
+
+
+class ZipManifests(dict):
+    """
+    zip manifest builder
+    """
+
+    @classmethod
+    def build(cls, path):
+        """
+        Build a dictionary similar to the zipimport directory
+        caches, except instead of tuples, store ZipInfo objects.
+
+        Use a platform-specific path separator (os.sep) for the path keys
+        for compatibility with pypy on Windows.
+        """
+        with zipfile.ZipFile(path) as zfile:
+            items = (
+                (
+                    name.replace('/', os.sep),
+                    zfile.getinfo(name),
+                )
+                for name in zfile.namelist()
+            )
+            return dict(items)
+
+    load = build
+
+
+class MemoizedZipManifests(ZipManifests):
+    """
+    Memoized zipfile manifests.
+    """
+    manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime')
+
+    def load(self, path):
+        """
+        Load a manifest at path or return a suitable manifest already loaded.
+        """
+        path = os.path.normpath(path)
+        mtime = os.stat(path).st_mtime
+
+        if path not in self or self[path].mtime != mtime:
+            manifest = self.build(path)
+            self[path] = self.manifest_mod(manifest, mtime)
+
+        return self[path].manifest
+
+
+class ZipProvider(EggProvider):
+    """Resource support for zips and eggs"""
+
+    eagers = None
+    _zip_manifests = MemoizedZipManifests()
+
+    def __init__(self, module):
+        EggProvider.__init__(self, module)
+        self.zip_pre = self.loader.archive + os.sep
+
+    def _zipinfo_name(self, fspath):
+        # Convert a virtual filename (full path to file) into a zipfile subpath
+        # usable with the zipimport directory cache for our target archive
+        fspath = fspath.rstrip(os.sep)
+        if fspath == self.loader.archive:
+            return ''
+        if fspath.startswith(self.zip_pre):
+            return fspath[len(self.zip_pre):]
+        raise AssertionError(
+            "%s is not a subpath of %s" % (fspath, self.zip_pre)
+        )
+
+    def _parts(self, zip_path):
+        # Convert a zipfile subpath into an egg-relative path part list.
+        # pseudo-fs path
+        fspath = self.zip_pre + zip_path
+        if fspath.startswith(self.egg_root + os.sep):
+            return fspath[len(self.egg_root) + 1:].split(os.sep)
+        raise AssertionError(
+            "%s is not a subpath of %s" % (fspath, self.egg_root)
+        )
+
+    @property
+    def zipinfo(self):
+        return self._zip_manifests.load(self.loader.archive)
+
+    def get_resource_filename(self, manager, resource_name):
+        if not self.egg_name:
+            raise NotImplementedError(
+                "resource_filename() only supported for .egg, not .zip"
+            )
+        # no need to lock for extraction, since we use temp names
+        zip_path = self._resource_to_zip(resource_name)
+        eagers = self._get_eager_resources()
+        if '/'.join(self._parts(zip_path)) in eagers:
+            for name in eagers:
+                self._extract_resource(manager, self._eager_to_zip(name))
+        return self._extract_resource(manager, zip_path)
+
+    @staticmethod
+    def _get_date_and_size(zip_stat):
+        size = zip_stat.file_size
+        # ymdhms+wday, yday, dst
+        date_time = zip_stat.date_time + (0, 0, -1)
+        # 1980 offset already done
+        timestamp = time.mktime(date_time)
+        return timestamp, size
+
+    def _extract_resource(self, manager, zip_path):
+
+        if zip_path in self._index():
+            for name in self._index()[zip_path]:
+                last = self._extract_resource(
+                    manager, os.path.join(zip_path, name)
+                )
+            # return the extracted directory name
+            return os.path.dirname(last)
+
+        timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
+
+        if not WRITE_SUPPORT:
+            raise IOError('"os.rename" and "os.unlink" are not supported '
+                          'on this platform')
+        try:
+
+            real_path = manager.get_cache_path(
+                self.egg_name, self._parts(zip_path)
+            )
+
+            if self._is_current(real_path, zip_path):
+                return real_path
+
+            outf, tmpnam = _mkstemp(
+                ".$extract",
+                dir=os.path.dirname(real_path),
+            )
+            os.write(outf, self.loader.get_data(zip_path))
+            os.close(outf)
+            utime(tmpnam, (timestamp, timestamp))
+            manager.postprocess(tmpnam, real_path)
+
+            try:
+                rename(tmpnam, real_path)
+
+            except os.error:
+                if os.path.isfile(real_path):
+                    if self._is_current(real_path, zip_path):
+                        # the file became current since it was checked above,
+                        #  so proceed.
+                        return real_path
+                    # Windows, del old file and retry
+                    elif os.name == 'nt':
+                        unlink(real_path)
+                        rename(tmpnam, real_path)
+                        return real_path
+                raise
+
+        except os.error:
+            # report a user-friendly error
+            manager.extraction_error()
+
+        return real_path
+
+    def _is_current(self, file_path, zip_path):
+        """
+        Return True if the file_path is current for this zip_path
+        """
+        timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
+        if not os.path.isfile(file_path):
+            return False
+        stat = os.stat(file_path)
+        if stat.st_size != size or stat.st_mtime != timestamp:
+            return False
+        # check that the contents match
+        zip_contents = self.loader.get_data(zip_path)
+        with open(file_path, 'rb') as f:
+            file_contents = f.read()
+        return zip_contents == file_contents
+
+    def _get_eager_resources(self):
+        if self.eagers is None:
+            eagers = []
+            for name in ('native_libs.txt', 'eager_resources.txt'):
+                if self.has_metadata(name):
+                    eagers.extend(self.get_metadata_lines(name))
+            self.eagers = eagers
+        return self.eagers
+
+    def _index(self):
+        try:
+            return self._dirindex
+        except AttributeError:
+            ind = {}
+            for path in self.zipinfo:
+                parts = path.split(os.sep)
+                while parts:
+                    parent = os.sep.join(parts[:-1])
+                    if parent in ind:
+                        ind[parent].append(parts[-1])
+                        break
+                    else:
+                        ind[parent] = [parts.pop()]
+            self._dirindex = ind
+            return ind
+
+    def _has(self, fspath):
+        zip_path = self._zipinfo_name(fspath)
+        return zip_path in self.zipinfo or zip_path in self._index()
+
+    def _isdir(self, fspath):
+        return self._zipinfo_name(fspath) in self._index()
+
+    def _listdir(self, fspath):
+        return list(self._index().get(self._zipinfo_name(fspath), ()))
+
+    def _eager_to_zip(self, resource_name):
+        return self._zipinfo_name(self._fn(self.egg_root, resource_name))
+
+    def _resource_to_zip(self, resource_name):
+        return self._zipinfo_name(self._fn(self.module_path, resource_name))
+
+
+register_loader_type(zipimport.zipimporter, ZipProvider)
+
+
+class FileMetadata(EmptyProvider):
+    """Metadata handler for standalone PKG-INFO files
+
+    Usage::
+
+        metadata = FileMetadata("/path/to/PKG-INFO")
+
+    This provider rejects all data and metadata requests except for PKG-INFO,
+    which is treated as existing, and will be the contents of the file at
+    the provided location.
+    """
+
+    def __init__(self, path):
+        self.path = path
+
+    def _get_metadata_path(self, name):
+        return self.path
+
+    def has_metadata(self, name):
+        return name == 'PKG-INFO' and os.path.isfile(self.path)
+
+    def get_metadata(self, name):
+        if name != 'PKG-INFO':
+            raise KeyError("No metadata except PKG-INFO is available")
+
+        with io.open(self.path, encoding='utf-8', errors="replace") as f:
+            metadata = f.read()
+        self._warn_on_replacement(metadata)
+        return metadata
+
+    def _warn_on_replacement(self, metadata):
+        # Python 2.7 compat for: replacement_char = '�'
+        replacement_char = b'\xef\xbf\xbd'.decode('utf-8')
+        if replacement_char in metadata:
+            tmpl = "{self.path} could not be properly decoded in UTF-8"
+            msg = tmpl.format(**locals())
+            warnings.warn(msg)
+
+    def get_metadata_lines(self, name):
+        return yield_lines(self.get_metadata(name))
+
+
+class PathMetadata(DefaultProvider):
+    """Metadata provider for egg directories
+
+    Usage::
+
+        # Development eggs:
+
+        egg_info = "/path/to/PackageName.egg-info"
+        base_dir = os.path.dirname(egg_info)
+        metadata = PathMetadata(base_dir, egg_info)
+        dist_name = os.path.splitext(os.path.basename(egg_info))[0]
+        dist = Distribution(basedir, project_name=dist_name, metadata=metadata)
+
+        # Unpacked egg directories:
+
+        egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
+        metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
+        dist = Distribution.from_filename(egg_path, metadata=metadata)
+    """
+
+    def __init__(self, path, egg_info):
+        self.module_path = path
+        self.egg_info = egg_info
+
+
+class EggMetadata(ZipProvider):
+    """Metadata provider for .egg files"""
+
+    def __init__(self, importer):
+        """Create a metadata provider from a zipimporter"""
+
+        self.zip_pre = importer.archive + os.sep
+        self.loader = importer
+        if importer.prefix:
+            self.module_path = os.path.join(importer.archive, importer.prefix)
+        else:
+            self.module_path = importer.archive
+        self._setup_prefix()
+
+
+_declare_state('dict', _distribution_finders={})
+
+
+def register_finder(importer_type, distribution_finder):
+    """Register `distribution_finder` to find distributions in sys.path items
+
+    `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
+    handler), and `distribution_finder` is a callable that, passed a path
+    item and the importer instance, yields ``Distribution`` instances found on
+    that path item.  See ``pkg_resources.find_on_path`` for an example."""
+    _distribution_finders[importer_type] = distribution_finder
+
+
+def find_distributions(path_item, only=False):
+    """Yield distributions accessible via `path_item`"""
+    importer = get_importer(path_item)
+    finder = _find_adapter(_distribution_finders, importer)
+    return finder(importer, path_item, only)
+
+
+def find_eggs_in_zip(importer, path_item, only=False):
+    """
+    Find eggs in zip files; possibly multiple nested eggs.
+    """
+    if importer.archive.endswith('.whl'):
+        # wheels are not supported with this finder
+        # they don't have PKG-INFO metadata, and won't ever contain eggs
+        return
+    metadata = EggMetadata(importer)
+    if metadata.has_metadata('PKG-INFO'):
+        yield Distribution.from_filename(path_item, metadata=metadata)
+    if only:
+        # don't yield nested distros
+        return
+    for subitem in metadata.resource_listdir(''):
+        if _is_egg_path(subitem):
+            subpath = os.path.join(path_item, subitem)
+            dists = find_eggs_in_zip(zipimport.zipimporter(subpath), subpath)
+            for dist in dists:
+                yield dist
+        elif subitem.lower().endswith('.dist-info'):
+            subpath = os.path.join(path_item, subitem)
+            submeta = EggMetadata(zipimport.zipimporter(subpath))
+            submeta.egg_info = subpath
+            yield Distribution.from_location(path_item, subitem, submeta)
+
+
+register_finder(zipimport.zipimporter, find_eggs_in_zip)
+
+
+def find_nothing(importer, path_item, only=False):
+    return ()
+
+
+register_finder(object, find_nothing)
+
+
+def _by_version_descending(names):
+    """
+    Given a list of filenames, return them in descending order
+    by version number.
+
+    >>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg'
+    >>> _by_version_descending(names)
+    ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'foo', 'bar']
+    >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg'
+    >>> _by_version_descending(names)
+    ['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg']
+    >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg'
+    >>> _by_version_descending(names)
+    ['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg']
+    """
+    def _by_version(name):
+        """
+        Parse each component of the filename
+        """
+        name, ext = os.path.splitext(name)
+        parts = itertools.chain(name.split('-'), [ext])
+        return [packaging.version.parse(part) for part in parts]
+
+    return sorted(names, key=_by_version, reverse=True)
+
+
+def find_on_path(importer, path_item, only=False):
+    """Yield distributions accessible on a sys.path directory"""
+    path_item = _normalize_cached(path_item)
+
+    if _is_unpacked_egg(path_item):
+        yield Distribution.from_filename(
+            path_item, metadata=PathMetadata(
+                path_item, os.path.join(path_item, 'EGG-INFO')
+            )
+        )
+        return
+
+    entries = safe_listdir(path_item)
+
+    # for performance, before sorting by version,
+    # screen entries for only those that will yield
+    # distributions
+    filtered = (
+        entry
+        for entry in entries
+        if dist_factory(path_item, entry, only)
+    )
+
+    # scan for .egg and .egg-info in directory
+    path_item_entries = _by_version_descending(filtered)
+    for entry in path_item_entries:
+        fullpath = os.path.join(path_item, entry)
+        factory = dist_factory(path_item, entry, only)
+        for dist in factory(fullpath):
+            yield dist
+
+
+def dist_factory(path_item, entry, only):
+    """
+    Return a dist_factory for a path_item and entry
+    """
+    lower = entry.lower()
+    is_meta = any(map(lower.endswith, ('.egg-info', '.dist-info')))
+    return (
+        distributions_from_metadata
+        if is_meta else
+        find_distributions
+        if not only and _is_egg_path(entry) else
+        resolve_egg_link
+        if not only and lower.endswith('.egg-link') else
+        NoDists()
+    )
+
+
+class NoDists:
+    """
+    >>> bool(NoDists())
+    False
+
+    >>> list(NoDists()('anything'))
+    []
+    """
+    def __bool__(self):
+        return False
+    if six.PY2:
+        __nonzero__ = __bool__
+
+    def __call__(self, fullpath):
+        return iter(())
+
+
+def safe_listdir(path):
+    """
+    Attempt to list contents of path, but suppress some exceptions.
+    """
+    try:
+        return os.listdir(path)
+    except (PermissionError, NotADirectoryError):
+        pass
+    except OSError as e:
+        # Ignore the directory if does not exist, not a directory or
+        # permission denied
+        ignorable = (
+            e.errno in (errno.ENOTDIR, errno.EACCES, errno.ENOENT)
+            # Python 2 on Windows needs to be handled this way :(
+            or getattr(e, "winerror", None) == 267
+        )
+        if not ignorable:
+            raise
+    return ()
+
+
+def distributions_from_metadata(path):
+    root = os.path.dirname(path)
+    if os.path.isdir(path):
+        if len(os.listdir(path)) == 0:
+            # empty metadata dir; skip
+            return
+        metadata = PathMetadata(root, path)
+    else:
+        metadata = FileMetadata(path)
+    entry = os.path.basename(path)
+    yield Distribution.from_location(
+        root, entry, metadata, precedence=DEVELOP_DIST,
+    )
+
+
+def non_empty_lines(path):
+    """
+    Yield non-empty lines from file at path
+    """
+    with open(path) as f:
+        for line in f:
+            line = line.strip()
+            if line:
+                yield line
+
+
+def resolve_egg_link(path):
+    """
+    Given a path to an .egg-link, resolve distributions
+    present in the referenced path.
+    """
+    referenced_paths = non_empty_lines(path)
+    resolved_paths = (
+        os.path.join(os.path.dirname(path), ref)
+        for ref in referenced_paths
+    )
+    dist_groups = map(find_distributions, resolved_paths)
+    return next(dist_groups, ())
+
+
+register_finder(pkgutil.ImpImporter, find_on_path)
+
+if hasattr(importlib_machinery, 'FileFinder'):
+    register_finder(importlib_machinery.FileFinder, find_on_path)
+
+_declare_state('dict', _namespace_handlers={})
+_declare_state('dict', _namespace_packages={})
+
+
+def register_namespace_handler(importer_type, namespace_handler):
+    """Register `namespace_handler` to declare namespace packages
+
+    `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
+    handler), and `namespace_handler` is a callable like this::
+
+        def namespace_handler(importer, path_entry, moduleName, module):
+            # return a path_entry to use for child packages
+
+    Namespace handlers are only called if the importer object has already
+    agreed that it can handle the relevant path item, and they should only
+    return a subpath if the module __path__ does not already contain an
+    equivalent subpath.  For an example namespace handler, see
+    ``pkg_resources.file_ns_handler``.
+    """
+    _namespace_handlers[importer_type] = namespace_handler
+
+
+def _handle_ns(packageName, path_item):
+    """Ensure that named package includes a subpath of path_item (if needed)"""
+
+    importer = get_importer(path_item)
+    if importer is None:
+        return None
+
+    # capture warnings due to #1111
+    with warnings.catch_warnings():
+        warnings.simplefilter("ignore")
+        loader = importer.find_module(packageName)
+
+    if loader is None:
+        return None
+    module = sys.modules.get(packageName)
+    if module is None:
+        module = sys.modules[packageName] = types.ModuleType(packageName)
+        module.__path__ = []
+        _set_parent_ns(packageName)
+    elif not hasattr(module, '__path__'):
+        raise TypeError("Not a package:", packageName)
+    handler = _find_adapter(_namespace_handlers, importer)
+    subpath = handler(importer, path_item, packageName, module)
+    if subpath is not None:
+        path = module.__path__
+        path.append(subpath)
+        loader.load_module(packageName)
+        _rebuild_mod_path(path, packageName, module)
+    return subpath
+
+
+def _rebuild_mod_path(orig_path, package_name, module):
+    """
+    Rebuild module.__path__ ensuring that all entries are ordered
+    corresponding to their sys.path order
+    """
+    sys_path = [_normalize_cached(p) for p in sys.path]
+
+    def safe_sys_path_index(entry):
+        """
+        Workaround for #520 and #513.
+        """
+        try:
+            return sys_path.index(entry)
+        except ValueError:
+            return float('inf')
+
+    def position_in_sys_path(path):
+        """
+        Return the ordinal of the path based on its position in sys.path
+        """
+        path_parts = path.split(os.sep)
+        module_parts = package_name.count('.') + 1
+        parts = path_parts[:-module_parts]
+        return safe_sys_path_index(_normalize_cached(os.sep.join(parts)))
+
+    new_path = sorted(orig_path, key=position_in_sys_path)
+    new_path = [_normalize_cached(p) for p in new_path]
+
+    if isinstance(module.__path__, list):
+        module.__path__[:] = new_path
+    else:
+        module.__path__ = new_path
+
+
+def declare_namespace(packageName):
+    """Declare that package 'packageName' is a namespace package"""
+
+    _imp.acquire_lock()
+    try:
+        if packageName in _namespace_packages:
+            return
+
+        path = sys.path
+        parent, _, _ = packageName.rpartition('.')
+
+        if parent:
+            declare_namespace(parent)
+            if parent not in _namespace_packages:
+                __import__(parent)
+            try:
+                path = sys.modules[parent].__path__
+            except AttributeError:
+                raise TypeError("Not a package:", parent)
+
+        # Track what packages are namespaces, so when new path items are added,
+        # they can be updated
+        _namespace_packages.setdefault(parent or None, []).append(packageName)
+        _namespace_packages.setdefault(packageName, [])
+
+        for path_item in path:
+            # Ensure all the parent's path items are reflected in the child,
+            # if they apply
+            _handle_ns(packageName, path_item)
+
+    finally:
+        _imp.release_lock()
+
+
+def fixup_namespace_packages(path_item, parent=None):
+    """Ensure that previously-declared namespace packages include path_item"""
+    _imp.acquire_lock()
+    try:
+        for package in _namespace_packages.get(parent, ()):
+            subpath = _handle_ns(package, path_item)
+            if subpath:
+                fixup_namespace_packages(subpath, package)
+    finally:
+        _imp.release_lock()
+
+
+def file_ns_handler(importer, path_item, packageName, module):
+    """Compute an ns-package subpath for a filesystem or zipfile importer"""
+
+    subpath = os.path.join(path_item, packageName.split('.')[-1])
+    normalized = _normalize_cached(subpath)
+    for item in module.__path__:
+        if _normalize_cached(item) == normalized:
+            break
+    else:
+        # Only return the path if it's not already there
+        return subpath
+
+
+register_namespace_handler(pkgutil.ImpImporter, file_ns_handler)
+register_namespace_handler(zipimport.zipimporter, file_ns_handler)
+
+if hasattr(importlib_machinery, 'FileFinder'):
+    register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler)
+
+
+def null_ns_handler(importer, path_item, packageName, module):
+    return None
+
+
+register_namespace_handler(object, null_ns_handler)
+
+
+def normalize_path(filename):
+    """Normalize a file/dir name for comparison purposes"""
+    return os.path.normcase(os.path.realpath(os.path.normpath(_cygwin_patch(filename))))
+
+
+def _cygwin_patch(filename):  # pragma: nocover
+    """
+    Contrary to POSIX 2008, on Cygwin, getcwd (3) contains
+    symlink components. Using
+    os.path.abspath() works around this limitation. A fix in os.getcwd()
+    would probably better, in Cygwin even more so, except
+    that this seems to be by design...
+    """
+    return os.path.abspath(filename) if sys.platform == 'cygwin' else filename
+
+
+def _normalize_cached(filename, _cache={}):
+    try:
+        return _cache[filename]
+    except KeyError:
+        _cache[filename] = result = normalize_path(filename)
+        return result
+
+
+def _is_egg_path(path):
+    """
+    Determine if given path appears to be an egg.
+    """
+    return path.lower().endswith('.egg')
+
+
+def _is_unpacked_egg(path):
+    """
+    Determine if given path appears to be an unpacked egg.
+    """
+    return (
+        _is_egg_path(path) and
+        os.path.isfile(os.path.join(path, 'EGG-INFO', 'PKG-INFO'))
+    )
+
+
+def _set_parent_ns(packageName):
+    parts = packageName.split('.')
+    name = parts.pop()
+    if parts:
+        parent = '.'.join(parts)
+        setattr(sys.modules[parent], name, sys.modules[packageName])
+
+
+def yield_lines(strs):
+    """Yield non-empty/non-comment lines of a string or sequence"""
+    if isinstance(strs, six.string_types):
+        for s in strs.splitlines():
+            s = s.strip()
+            # skip blank lines/comments
+            if s and not s.startswith('#'):
+                yield s
+    else:
+        for ss in strs:
+            for s in yield_lines(ss):
+                yield s
+
+
+MODULE = re.compile(r"\w+(\.\w+)*$").match
+EGG_NAME = re.compile(
+    r"""
+    (?P[^-]+) (
+        -(?P[^-]+) (
+            -py(?P[^-]+) (
+                -(?P.+)
+            )?
+        )?
+    )?
+    """,
+    re.VERBOSE | re.IGNORECASE,
+).match
+
+
+class EntryPoint:
+    """Object representing an advertised importable object"""
+
+    def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
+        if not MODULE(module_name):
+            raise ValueError("Invalid module name", module_name)
+        self.name = name
+        self.module_name = module_name
+        self.attrs = tuple(attrs)
+        self.extras = tuple(extras)
+        self.dist = dist
+
+    def __str__(self):
+        s = "%s = %s" % (self.name, self.module_name)
+        if self.attrs:
+            s += ':' + '.'.join(self.attrs)
+        if self.extras:
+            s += ' [%s]' % ','.join(self.extras)
+        return s
+
+    def __repr__(self):
+        return "EntryPoint.parse(%r)" % str(self)
+
+    def load(self, require=True, *args, **kwargs):
+        """
+        Require packages for this EntryPoint, then resolve it.
+        """
+        if not require or args or kwargs:
+            warnings.warn(
+                "Parameters to load are deprecated.  Call .resolve and "
+                ".require separately.",
+                PkgResourcesDeprecationWarning,
+                stacklevel=2,
+            )
+        if require:
+            self.require(*args, **kwargs)
+        return self.resolve()
+
+    def resolve(self):
+        """
+        Resolve the entry point from its module and attrs.
+        """
+        module = __import__(self.module_name, fromlist=['__name__'], level=0)
+        try:
+            return functools.reduce(getattr, self.attrs, module)
+        except AttributeError as exc:
+            raise ImportError(str(exc))
+
+    def require(self, env=None, installer=None):
+        if self.extras and not self.dist:
+            raise UnknownExtra("Can't require() without a distribution", self)
+
+        # Get the requirements for this entry point with all its extras and
+        # then resolve them. We have to pass `extras` along when resolving so
+        # that the working set knows what extras we want. Otherwise, for
+        # dist-info distributions, the working set will assume that the
+        # requirements for that extra are purely optional and skip over them.
+        reqs = self.dist.requires(self.extras)
+        items = working_set.resolve(reqs, env, installer, extras=self.extras)
+        list(map(working_set.add, items))
+
+    pattern = re.compile(
+        r'\s*'
+        r'(?P.+?)\s*'
+        r'=\s*'
+        r'(?P[\w.]+)\s*'
+        r'(:\s*(?P[\w.]+))?\s*'
+        r'(?P\[.*\])?\s*$'
+    )
+
+    @classmethod
+    def parse(cls, src, dist=None):
+        """Parse a single entry point from string `src`
+
+        Entry point syntax follows the form::
+
+            name = some.module:some.attr [extra1, extra2]
+
+        The entry name and module name are required, but the ``:attrs`` and
+        ``[extras]`` parts are optional
+        """
+        m = cls.pattern.match(src)
+        if not m:
+            msg = "EntryPoint must be in 'name=module:attrs [extras]' format"
+            raise ValueError(msg, src)
+        res = m.groupdict()
+        extras = cls._parse_extras(res['extras'])
+        attrs = res['attr'].split('.') if res['attr'] else ()
+        return cls(res['name'], res['module'], attrs, extras, dist)
+
+    @classmethod
+    def _parse_extras(cls, extras_spec):
+        if not extras_spec:
+            return ()
+        req = Requirement.parse('x' + extras_spec)
+        if req.specs:
+            raise ValueError()
+        return req.extras
+
+    @classmethod
+    def parse_group(cls, group, lines, dist=None):
+        """Parse an entry point group"""
+        if not MODULE(group):
+            raise ValueError("Invalid group name", group)
+        this = {}
+        for line in yield_lines(lines):
+            ep = cls.parse(line, dist)
+            if ep.name in this:
+                raise ValueError("Duplicate entry point", group, ep.name)
+            this[ep.name] = ep
+        return this
+
+    @classmethod
+    def parse_map(cls, data, dist=None):
+        """Parse a map of entry point groups"""
+        if isinstance(data, dict):
+            data = data.items()
+        else:
+            data = split_sections(data)
+        maps = {}
+        for group, lines in data:
+            if group is None:
+                if not lines:
+                    continue
+                raise ValueError("Entry points must be listed in groups")
+            group = group.strip()
+            if group in maps:
+                raise ValueError("Duplicate group name", group)
+            maps[group] = cls.parse_group(group, lines, dist)
+        return maps
+
+
+def _remove_md5_fragment(location):
+    if not location:
+        return ''
+    parsed = urllib.parse.urlparse(location)
+    if parsed[-1].startswith('md5='):
+        return urllib.parse.urlunparse(parsed[:-1] + ('',))
+    return location
+
+
+def _version_from_file(lines):
+    """
+    Given an iterable of lines from a Metadata file, return
+    the value of the Version field, if present, or None otherwise.
+    """
+    def is_version_line(line):
+        return line.lower().startswith('version:')
+    version_lines = filter(is_version_line, lines)
+    line = next(iter(version_lines), '')
+    _, _, value = line.partition(':')
+    return safe_version(value.strip()) or None
+
+
+class Distribution:
+    """Wrap an actual or potential sys.path entry w/metadata"""
+    PKG_INFO = 'PKG-INFO'
+
+    def __init__(
+            self, location=None, metadata=None, project_name=None,
+            version=None, py_version=PY_MAJOR, platform=None,
+            precedence=EGG_DIST):
+        self.project_name = safe_name(project_name or 'Unknown')
+        if version is not None:
+            self._version = safe_version(version)
+        self.py_version = py_version
+        self.platform = platform
+        self.location = location
+        self.precedence = precedence
+        self._provider = metadata or empty_provider
+
+    @classmethod
+    def from_location(cls, location, basename, metadata=None, **kw):
+        project_name, version, py_version, platform = [None] * 4
+        basename, ext = os.path.splitext(basename)
+        if ext.lower() in _distributionImpl:
+            cls = _distributionImpl[ext.lower()]
+
+            match = EGG_NAME(basename)
+            if match:
+                project_name, version, py_version, platform = match.group(
+                    'name', 'ver', 'pyver', 'plat'
+                )
+        return cls(
+            location, metadata, project_name=project_name, version=version,
+            py_version=py_version, platform=platform, **kw
+        )._reload_version()
+
+    def _reload_version(self):
+        return self
+
+    @property
+    def hashcmp(self):
+        return (
+            self.parsed_version,
+            self.precedence,
+            self.key,
+            _remove_md5_fragment(self.location),
+            self.py_version or '',
+            self.platform or '',
+        )
+
+    def __hash__(self):
+        return hash(self.hashcmp)
+
+    def __lt__(self, other):
+        return self.hashcmp < other.hashcmp
+
+    def __le__(self, other):
+        return self.hashcmp <= other.hashcmp
+
+    def __gt__(self, other):
+        return self.hashcmp > other.hashcmp
+
+    def __ge__(self, other):
+        return self.hashcmp >= other.hashcmp
+
+    def __eq__(self, other):
+        if not isinstance(other, self.__class__):
+            # It's not a Distribution, so they are not equal
+            return False
+        return self.hashcmp == other.hashcmp
+
+    def __ne__(self, other):
+        return not self == other
+
+    # These properties have to be lazy so that we don't have to load any
+    # metadata until/unless it's actually needed.  (i.e., some distributions
+    # may not know their name or version without loading PKG-INFO)
+
+    @property
+    def key(self):
+        try:
+            return self._key
+        except AttributeError:
+            self._key = key = self.project_name.lower()
+            return key
+
+    @property
+    def parsed_version(self):
+        if not hasattr(self, "_parsed_version"):
+            self._parsed_version = parse_version(self.version)
+
+        return self._parsed_version
+
+    def _warn_legacy_version(self):
+        LV = packaging.version.LegacyVersion
+        is_legacy = isinstance(self._parsed_version, LV)
+        if not is_legacy:
+            return
+
+        # While an empty version is technically a legacy version and
+        # is not a valid PEP 440 version, it's also unlikely to
+        # actually come from someone and instead it is more likely that
+        # it comes from setuptools attempting to parse a filename and
+        # including it in the list. So for that we'll gate this warning
+        # on if the version is anything at all or not.
+        if not self.version:
+            return
+
+        tmpl = textwrap.dedent("""
+            '{project_name} ({version})' is being parsed as a legacy,
+            non PEP 440,
+            version. You may find odd behavior and sort order.
+            In particular it will be sorted as less than 0.0. It
+            is recommended to migrate to PEP 440 compatible
+            versions.
+            """).strip().replace('\n', ' ')
+
+        warnings.warn(tmpl.format(**vars(self)), PEP440Warning)
+
+    @property
+    def version(self):
+        try:
+            return self._version
+        except AttributeError:
+            version = self._get_version()
+            if version is None:
+                path = self._get_metadata_path_for_display(self.PKG_INFO)
+                msg = (
+                    "Missing 'Version:' header and/or {} file at path: {}"
+                ).format(self.PKG_INFO, path)
+                raise ValueError(msg, self)
+
+            return version
+
+    @property
+    def _dep_map(self):
+        """
+        A map of extra to its list of (direct) requirements
+        for this distribution, including the null extra.
+        """
+        try:
+            return self.__dep_map
+        except AttributeError:
+            self.__dep_map = self._filter_extras(self._build_dep_map())
+        return self.__dep_map
+
+    @staticmethod
+    def _filter_extras(dm):
+        """
+        Given a mapping of extras to dependencies, strip off
+        environment markers and filter out any dependencies
+        not matching the markers.
+        """
+        for extra in list(filter(None, dm)):
+            new_extra = extra
+            reqs = dm.pop(extra)
+            new_extra, _, marker = extra.partition(':')
+            fails_marker = marker and (
+                invalid_marker(marker)
+                or not evaluate_marker(marker)
+            )
+            if fails_marker:
+                reqs = []
+            new_extra = safe_extra(new_extra) or None
+
+            dm.setdefault(new_extra, []).extend(reqs)
+        return dm
+
+    def _build_dep_map(self):
+        dm = {}
+        for name in 'requires.txt', 'depends.txt':
+            for extra, reqs in split_sections(self._get_metadata(name)):
+                dm.setdefault(extra, []).extend(parse_requirements(reqs))
+        return dm
+
+    def requires(self, extras=()):
+        """List of Requirements needed for this distro if `extras` are used"""
+        dm = self._dep_map
+        deps = []
+        deps.extend(dm.get(None, ()))
+        for ext in extras:
+            try:
+                deps.extend(dm[safe_extra(ext)])
+            except KeyError:
+                raise UnknownExtra(
+                    "%s has no such extra feature %r" % (self, ext)
+                )
+        return deps
+
+    def _get_metadata_path_for_display(self, name):
+        """
+        Return the path to the given metadata file, if available.
+        """
+        try:
+            # We need to access _get_metadata_path() on the provider object
+            # directly rather than through this class's __getattr__()
+            # since _get_metadata_path() is marked private.
+            path = self._provider._get_metadata_path(name)
+
+        # Handle exceptions e.g. in case the distribution's metadata
+        # provider doesn't support _get_metadata_path().
+        except Exception:
+            return '[could not detect]'
+
+        return path
+
+    def _get_metadata(self, name):
+        if self.has_metadata(name):
+            for line in self.get_metadata_lines(name):
+                yield line
+
+    def _get_version(self):
+        lines = self._get_metadata(self.PKG_INFO)
+        version = _version_from_file(lines)
+
+        return version
+
+    def activate(self, path=None, replace=False):
+        """Ensure distribution is importable on `path` (default=sys.path)"""
+        if path is None:
+            path = sys.path
+        self.insert_on(path, replace=replace)
+        if path is sys.path:
+            fixup_namespace_packages(self.location)
+            for pkg in self._get_metadata('namespace_packages.txt'):
+                if pkg in sys.modules:
+                    declare_namespace(pkg)
+
+    def egg_name(self):
+        """Return what this distribution's standard .egg filename should be"""
+        filename = "%s-%s-py%s" % (
+            to_filename(self.project_name), to_filename(self.version),
+            self.py_version or PY_MAJOR
+        )
+
+        if self.platform:
+            filename += '-' + self.platform
+        return filename
+
+    def __repr__(self):
+        if self.location:
+            return "%s (%s)" % (self, self.location)
+        else:
+            return str(self)
+
+    def __str__(self):
+        try:
+            version = getattr(self, 'version', None)
+        except ValueError:
+            version = None
+        version = version or "[unknown version]"
+        return "%s %s" % (self.project_name, version)
+
+    def __getattr__(self, attr):
+        """Delegate all unrecognized public attributes to .metadata provider"""
+        if attr.startswith('_'):
+            raise AttributeError(attr)
+        return getattr(self._provider, attr)
+
+    def __dir__(self):
+        return list(
+            set(super(Distribution, self).__dir__())
+            | set(
+                attr for attr in self._provider.__dir__()
+                if not attr.startswith('_')
+            )
+        )
+
+    if not hasattr(object, '__dir__'):
+        # python 2.7 not supported
+        del __dir__
+
+    @classmethod
+    def from_filename(cls, filename, metadata=None, **kw):
+        return cls.from_location(
+            _normalize_cached(filename), os.path.basename(filename), metadata,
+            **kw
+        )
+
+    def as_requirement(self):
+        """Return a ``Requirement`` that matches this distribution exactly"""
+        if isinstance(self.parsed_version, packaging.version.Version):
+            spec = "%s==%s" % (self.project_name, self.parsed_version)
+        else:
+            spec = "%s===%s" % (self.project_name, self.parsed_version)
+
+        return Requirement.parse(spec)
+
+    def load_entry_point(self, group, name):
+        """Return the `name` entry point of `group` or raise ImportError"""
+        ep = self.get_entry_info(group, name)
+        if ep is None:
+            raise ImportError("Entry point %r not found" % ((group, name),))
+        return ep.load()
+
+    def get_entry_map(self, group=None):
+        """Return the entry point map for `group`, or the full entry map"""
+        try:
+            ep_map = self._ep_map
+        except AttributeError:
+            ep_map = self._ep_map = EntryPoint.parse_map(
+                self._get_metadata('entry_points.txt'), self
+            )
+        if group is not None:
+            return ep_map.get(group, {})
+        return ep_map
+
+    def get_entry_info(self, group, name):
+        """Return the EntryPoint object for `group`+`name`, or ``None``"""
+        return self.get_entry_map(group).get(name)
+
+    def insert_on(self, path, loc=None, replace=False):
+        """Ensure self.location is on path
+
+        If replace=False (default):
+            - If location is already in path anywhere, do nothing.
+            - Else:
+              - If it's an egg and its parent directory is on path,
+                insert just ahead of the parent.
+              - Else: add to the end of path.
+        If replace=True:
+            - If location is already on path anywhere (not eggs)
+              or higher priority than its parent (eggs)
+              do nothing.
+            - Else:
+              - If it's an egg and its parent directory is on path,
+                insert just ahead of the parent,
+                removing any lower-priority entries.
+              - Else: add it to the front of path.
+        """
+
+        loc = loc or self.location
+        if not loc:
+            return
+
+        nloc = _normalize_cached(loc)
+        bdir = os.path.dirname(nloc)
+        npath = [(p and _normalize_cached(p) or p) for p in path]
+
+        for p, item in enumerate(npath):
+            if item == nloc:
+                if replace:
+                    break
+                else:
+                    # don't modify path (even removing duplicates) if
+                    # found and not replace
+                    return
+            elif item == bdir and self.precedence == EGG_DIST:
+                # if it's an .egg, give it precedence over its directory
+                # UNLESS it's already been added to sys.path and replace=False
+                if (not replace) and nloc in npath[p:]:
+                    return
+                if path is sys.path:
+                    self.check_version_conflict()
+                path.insert(p, loc)
+                npath.insert(p, nloc)
+                break
+        else:
+            if path is sys.path:
+                self.check_version_conflict()
+            if replace:
+                path.insert(0, loc)
+            else:
+                path.append(loc)
+            return
+
+        # p is the spot where we found or inserted loc; now remove duplicates
+        while True:
+            try:
+                np = npath.index(nloc, p + 1)
+            except ValueError:
+                break
+            else:
+                del npath[np], path[np]
+                # ha!
+                p = np
+
+        return
+
+    def check_version_conflict(self):
+        if self.key == 'setuptools':
+            # ignore the inevitable setuptools self-conflicts  :(
+            return
+
+        nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
+        loc = normalize_path(self.location)
+        for modname in self._get_metadata('top_level.txt'):
+            if (modname not in sys.modules or modname in nsp
+                    or modname in _namespace_packages):
+                continue
+            if modname in ('pkg_resources', 'setuptools', 'site'):
+                continue
+            fn = getattr(sys.modules[modname], '__file__', None)
+            if fn and (normalize_path(fn).startswith(loc) or
+                       fn.startswith(self.location)):
+                continue
+            issue_warning(
+                "Module %s was already imported from %s, but %s is being added"
+                " to sys.path" % (modname, fn, self.location),
+            )
+
+    def has_version(self):
+        try:
+            self.version
+        except ValueError:
+            issue_warning("Unbuilt egg for " + repr(self))
+            return False
+        return True
+
+    def clone(self, **kw):
+        """Copy this distribution, substituting in any changed keyword args"""
+        names = 'project_name version py_version platform location precedence'
+        for attr in names.split():
+            kw.setdefault(attr, getattr(self, attr, None))
+        kw.setdefault('metadata', self._provider)
+        return self.__class__(**kw)
+
+    @property
+    def extras(self):
+        return [dep for dep in self._dep_map if dep]
+
+
+class EggInfoDistribution(Distribution):
+    def _reload_version(self):
+        """
+        Packages installed by distutils (e.g. numpy or scipy),
+        which uses an old safe_version, and so
+        their version numbers can get mangled when
+        converted to filenames (e.g., 1.11.0.dev0+2329eae to
+        1.11.0.dev0_2329eae). These distributions will not be
+        parsed properly
+        downstream by Distribution and safe_version, so
+        take an extra step and try to get the version number from
+        the metadata file itself instead of the filename.
+        """
+        md_version = self._get_version()
+        if md_version:
+            self._version = md_version
+        return self
+
+
+class DistInfoDistribution(Distribution):
+    """
+    Wrap an actual or potential sys.path entry
+    w/metadata, .dist-info style.
+    """
+    PKG_INFO = 'METADATA'
+    EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])")
+
+    @property
+    def _parsed_pkg_info(self):
+        """Parse and cache metadata"""
+        try:
+            return self._pkg_info
+        except AttributeError:
+            metadata = self.get_metadata(self.PKG_INFO)
+            self._pkg_info = email.parser.Parser().parsestr(metadata)
+            return self._pkg_info
+
+    @property
+    def _dep_map(self):
+        try:
+            return self.__dep_map
+        except AttributeError:
+            self.__dep_map = self._compute_dependencies()
+            return self.__dep_map
+
+    def _compute_dependencies(self):
+        """Recompute this distribution's dependencies."""
+        dm = self.__dep_map = {None: []}
+
+        reqs = []
+        # Including any condition expressions
+        for req in self._parsed_pkg_info.get_all('Requires-Dist') or []:
+            reqs.extend(parse_requirements(req))
+
+        def reqs_for_extra(extra):
+            for req in reqs:
+                if not req.marker or req.marker.evaluate({'extra': extra}):
+                    yield req
+
+        common = frozenset(reqs_for_extra(None))
+        dm[None].extend(common)
+
+        for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []:
+            s_extra = safe_extra(extra.strip())
+            dm[s_extra] = list(frozenset(reqs_for_extra(extra)) - common)
+
+        return dm
+
+
+_distributionImpl = {
+    '.egg': Distribution,
+    '.egg-info': EggInfoDistribution,
+    '.dist-info': DistInfoDistribution,
+}
+
+
+def issue_warning(*args, **kw):
+    level = 1
+    g = globals()
+    try:
+        # find the first stack frame that is *not* code in
+        # the pkg_resources module, to use for the warning
+        while sys._getframe(level).f_globals is g:
+            level += 1
+    except ValueError:
+        pass
+    warnings.warn(stacklevel=level + 1, *args, **kw)
+
+
+class RequirementParseError(ValueError):
+    def __str__(self):
+        return ' '.join(self.args)
+
+
+def parse_requirements(strs):
+    """Yield ``Requirement`` objects for each specification in `strs`
+
+    `strs` must be a string, or a (possibly-nested) iterable thereof.
+    """
+    # create a steppable iterator, so we can handle \-continuations
+    lines = iter(yield_lines(strs))
+
+    for line in lines:
+        # Drop comments -- a hash without a space may be in a URL.
+        if ' #' in line:
+            line = line[:line.find(' #')]
+        # If there is a line continuation, drop it, and append the next line.
+        if line.endswith('\\'):
+            line = line[:-2].strip()
+            try:
+                line += next(lines)
+            except StopIteration:
+                return
+        yield Requirement(line)
+
+
+class Requirement(packaging.requirements.Requirement):
+    def __init__(self, requirement_string):
+        """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
+        try:
+            super(Requirement, self).__init__(requirement_string)
+        except packaging.requirements.InvalidRequirement as e:
+            raise RequirementParseError(str(e))
+        self.unsafe_name = self.name
+        project_name = safe_name(self.name)
+        self.project_name, self.key = project_name, project_name.lower()
+        self.specs = [
+            (spec.operator, spec.version) for spec in self.specifier]
+        self.extras = tuple(map(safe_extra, self.extras))
+        self.hashCmp = (
+            self.key,
+            self.url,
+            self.specifier,
+            frozenset(self.extras),
+            str(self.marker) if self.marker else None,
+        )
+        self.__hash = hash(self.hashCmp)
+
+    def __eq__(self, other):
+        return (
+            isinstance(other, Requirement) and
+            self.hashCmp == other.hashCmp
+        )
+
+    def __ne__(self, other):
+        return not self == other
+
+    def __contains__(self, item):
+        if isinstance(item, Distribution):
+            if item.key != self.key:
+                return False
+
+            item = item.version
+
+        # Allow prereleases always in order to match the previous behavior of
+        # this method. In the future this should be smarter and follow PEP 440
+        # more accurately.
+        return self.specifier.contains(item, prereleases=True)
+
+    def __hash__(self):
+        return self.__hash
+
+    def __repr__(self):
+        return "Requirement.parse(%r)" % str(self)
+
+    @staticmethod
+    def parse(s):
+        req, = parse_requirements(s)
+        return req
+
+
+def _always_object(classes):
+    """
+    Ensure object appears in the mro even
+    for old-style classes.
+    """
+    if object not in classes:
+        return classes + (object,)
+    return classes
+
+
+def _find_adapter(registry, ob):
+    """Return an adapter factory for `ob` from `registry`"""
+    types = _always_object(inspect.getmro(getattr(ob, '__class__', type(ob))))
+    for t in types:
+        if t in registry:
+            return registry[t]
+
+
+def ensure_directory(path):
+    """Ensure that the parent directory of `path` exists"""
+    dirname = os.path.dirname(path)
+    py31compat.makedirs(dirname, exist_ok=True)
+
+
+def _bypass_ensure_directory(path):
+    """Sandbox-bypassing version of ensure_directory()"""
+    if not WRITE_SUPPORT:
+        raise IOError('"os.mkdir" not supported on this platform.')
+    dirname, filename = split(path)
+    if dirname and filename and not isdir(dirname):
+        _bypass_ensure_directory(dirname)
+        try:
+            mkdir(dirname, 0o755)
+        except FileExistsError:
+            pass
+
+
+def split_sections(s):
+    """Split a string or iterable thereof into (section, content) pairs
+
+    Each ``section`` is a stripped version of the section header ("[section]")
+    and each ``content`` is a list of stripped lines excluding blank lines and
+    comment-only lines.  If there are any such lines before the first section
+    header, they're returned in a first ``section`` of ``None``.
+    """
+    section = None
+    content = []
+    for line in yield_lines(s):
+        if line.startswith("["):
+            if line.endswith("]"):
+                if section or content:
+                    yield section, content
+                section = line[1:-1].strip()
+                content = []
+            else:
+                raise ValueError("Invalid section heading", line)
+        else:
+            content.append(line)
+
+    # wrap up last segment
+    yield section, content
+
+
+def _mkstemp(*args, **kw):
+    old_open = os.open
+    try:
+        # temporarily bypass sandboxing
+        os.open = os_open
+        return tempfile.mkstemp(*args, **kw)
+    finally:
+        # and then put it back
+        os.open = old_open
+
+
+# Silence the PEP440Warning by default, so that end users don't get hit by it
+# randomly just because they use pkg_resources. We want to append the rule
+# because we want earlier uses of filterwarnings to take precedence over this
+# one.
+warnings.filterwarnings("ignore", category=PEP440Warning, append=True)
+
+
+# from jaraco.functools 1.3
+def _call_aside(f, *args, **kwargs):
+    f(*args, **kwargs)
+    return f
+
+
+@_call_aside
+def _initialize(g=globals()):
+    "Set up global resource manager (deliberately not state-saved)"
+    manager = ResourceManager()
+    g['_manager'] = manager
+    g.update(
+        (name, getattr(manager, name))
+        for name in dir(manager)
+        if not name.startswith('_')
+    )
+
+
+@_call_aside
+def _initialize_master_working_set():
+    """
+    Prepare the master working set and make the ``require()``
+    API available.
+
+    This function has explicit effects on the global state
+    of pkg_resources. It is intended to be invoked once at
+    the initialization of this module.
+
+    Invocation by other packages is unsupported and done
+    at their own risk.
+    """
+    working_set = WorkingSet._build_master()
+    _declare_state('object', working_set=working_set)
+
+    require = working_set.require
+    iter_entry_points = working_set.iter_entry_points
+    add_activation_listener = working_set.subscribe
+    run_script = working_set.run_script
+    # backward compatibility
+    run_main = run_script
+    # Activate all distributions already on sys.path with replace=False and
+    # ensure that all distributions added to the working set in the future
+    # (e.g. by calling ``require()``) will get activated as well,
+    # with higher priority (replace=True).
+    tuple(
+        dist.activate(replace=False)
+        for dist in working_set
+    )
+    add_activation_listener(
+        lambda dist: dist.activate(replace=True),
+        existing=False,
+    )
+    working_set.entries = []
+    # match order
+    list(map(working_set.add_entry, sys.path))
+    globals().update(locals())
+
+class PkgResourcesDeprecationWarning(Warning):
+    """
+    Base class for warning about deprecations in ``pkg_resources``
+
+    This class is not derived from ``DeprecationWarning``, and as such is
+    visible by default.
+    """
diff --git a/venv/Lib/site-packages/pip/_vendor/pkg_resources/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/pkg_resources/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 00000000..ee77fab5
Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/pkg_resources/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/pip/_vendor/pkg_resources/__pycache__/py31compat.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/pkg_resources/__pycache__/py31compat.cpython-36.pyc
new file mode 100644
index 00000000..fdbfb6b6
Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/pkg_resources/__pycache__/py31compat.cpython-36.pyc differ
diff --git a/env/lib/python2.7/site-packages/pip/_vendor/pkg_resources/py31compat.py b/venv/Lib/site-packages/pip/_vendor/pkg_resources/py31compat.py
similarity index 100%
rename from env/lib/python2.7/site-packages/pip/_vendor/pkg_resources/py31compat.py
rename to venv/Lib/site-packages/pip/_vendor/pkg_resources/py31compat.py
diff --git a/env/lib/python2.7/site-packages/pip/_vendor/progress/__init__.py b/venv/Lib/site-packages/pip/_vendor/progress/__init__.py
similarity index 100%
rename from env/lib/python2.7/site-packages/pip/_vendor/progress/__init__.py
rename to venv/Lib/site-packages/pip/_vendor/progress/__init__.py
diff --git a/venv/Lib/site-packages/pip/_vendor/progress/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/progress/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 00000000..a0312012
Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/progress/__pycache__/__init__.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/pip/_vendor/progress/__pycache__/bar.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/progress/__pycache__/bar.cpython-36.pyc
new file mode 100644
index 00000000..a5f94701
Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/progress/__pycache__/bar.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/pip/_vendor/progress/__pycache__/counter.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/progress/__pycache__/counter.cpython-36.pyc
new file mode 100644
index 00000000..78ba2cf8
Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/progress/__pycache__/counter.cpython-36.pyc differ
diff --git a/venv/Lib/site-packages/pip/_vendor/progress/__pycache__/spinner.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/progress/__pycache__/spinner.cpython-36.pyc
new file mode 100644
index 00000000..d2927c61
Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/progress/__pycache__/spinner.cpython-36.pyc differ
diff --git a/env/lib/python2.7/site-packages/pip/_vendor/progress/bar.py b/venv/Lib/site-packages/pip/_vendor/progress/bar.py
similarity index 100%
rename from env/lib/python2.7/site-packages/pip/_vendor/progress/bar.py
rename to venv/Lib/site-packages/pip/_vendor/progress/bar.py
diff --git a/env/lib/python2.7/site-packages/pip/_vendor/progress/counter.py b/venv/Lib/site-packages/pip/_vendor/progress/counter.py
similarity index 100%
rename from env/lib/python2.7/site-packages/pip/_vendor/progress/counter.py
rename to venv/Lib/site-packages/pip/_vendor/progress/counter.py
diff --git a/env/lib/python2.7/site-packages/pip/_vendor/progress/spinner.py b/venv/Lib/site-packages/pip/_vendor/progress/spinner.py
similarity index 100%
rename from env/lib/python2.7/site-packages/pip/_vendor/progress/spinner.py
rename to venv/Lib/site-packages/pip/_vendor/progress/spinner.py
diff --git a/venv/Lib/site-packages/pip/_vendor/pyparsing.py b/venv/Lib/site-packages/pip/_vendor/pyparsing.py
new file mode 100644
index 00000000..7ebc7eb9
--- /dev/null
+++ b/venv/Lib/site-packages/pip/_vendor/pyparsing.py
@@ -0,0 +1,7107 @@
+# -*- coding: utf-8 -*-
+# module pyparsing.py
+#
+# Copyright (c) 2003-2019  Paul T. McGuire
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__doc__ = \
+"""
+pyparsing module - Classes and methods to define and execute parsing grammars
+=============================================================================
+
+The pyparsing module is an alternative approach to creating and
+executing simple grammars, vs. the traditional lex/yacc approach, or the
+use of regular expressions.  With pyparsing, you don't need to learn
+a new syntax for defining grammars or matching expressions - the parsing
+module provides a library of classes that you use to construct the
+grammar directly in Python.
+
+Here is a program to parse "Hello, World!" (or any greeting of the form
+``", !"``), built up using :class:`Word`,
+:class:`Literal`, and :class:`And` elements
+(the :class:`'+'` operators create :class:`And` expressions,
+and the strings are auto-converted to :class:`Literal` expressions)::
+
+    from pip._vendor.pyparsing import Word, alphas
+
+    # define grammar of a greeting
+    greet = Word(alphas) + "," + Word(alphas) + "!"
+
+    hello = "Hello, World!"
+    print (hello, "->", greet.parseString(hello))
+
+The program outputs the following::
+
+    Hello, World! -> ['Hello', ',', 'World', '!']
+
+The Python representation of the grammar is quite readable, owing to the
+self-explanatory class names, and the use of '+', '|' and '^' operators.
+
+The :class:`ParseResults` object returned from
+:class:`ParserElement.parseString` can be
+accessed as a nested list, a dictionary, or an object with named
+attributes.
+
+The pyparsing module handles some of the problems that are typically
+vexing when writing text parsers:
+
+  - extra or missing whitespace (the above program will also handle
+    "Hello,World!", "Hello  ,  World  !", etc.)
+  - quoted strings
+  - embedded comments
+
+
+Getting Started -
+-----------------
+Visit the classes :class:`ParserElement` and :class:`ParseResults` to
+see the base classes that most other pyparsing
+classes inherit from. Use the docstrings for examples of how to:
+
+ - construct literal match expressions from :class:`Literal` and
+   :class:`CaselessLiteral` classes
+ - construct character word-group expressions using the :class:`Word`
+   class
+ - see how to create repetitive expressions using :class:`ZeroOrMore`
+   and :class:`OneOrMore` classes
+ - use :class:`'+'`, :class:`'|'`, :class:`'^'`,
+   and :class:`'&'` operators to combine simple expressions into
+   more complex ones
+ - associate names with your parsed results using
+   :class:`ParserElement.setResultsName`
+ - access the parsed data, which is returned as a :class:`ParseResults`
+   object
+ - find some helpful expression short-cuts like :class:`delimitedList`
+   and :class:`oneOf`
+ - find more useful common expressions in the :class:`pyparsing_common`
+   namespace class
+"""
+
+__version__ = "2.4.7"
+__versionTime__ = "30 Mar 2020 00:43 UTC"
+__author__ = "Paul McGuire "
+
+import string
+from weakref import ref as wkref
+import copy
+import sys
+import warnings
+import re
+import sre_constants
+import collections
+import pprint
+import traceback
+import types
+from datetime import datetime
+from operator import itemgetter
+import itertools
+from functools import wraps
+from contextlib import contextmanager
+
+try:
+    # Python 3
+    from itertools import filterfalse
+except ImportError:
+    from itertools import ifilterfalse as filterfalse
+
+try:
+    from _thread import RLock
+except ImportError:
+    from threading import RLock
+
+try:
+    # Python 3
+    from collections.abc import Iterable
+    from collections.abc import MutableMapping, Mapping
+except ImportError:
+    # Python 2.7
+    from collections import Iterable
+    from collections import MutableMapping, Mapping
+
+try:
+    from collections import OrderedDict as _OrderedDict
+except ImportError:
+    try:
+        from ordereddict import OrderedDict as _OrderedDict
+    except ImportError:
+        _OrderedDict = None
+
+try:
+    from types import SimpleNamespace
+except ImportError:
+    class SimpleNamespace: pass
+
+# version compatibility configuration
+__compat__ = SimpleNamespace()
+__compat__.__doc__ = """
+    A cross-version compatibility configuration for pyparsing features that will be
+    released in a future version. By setting values in this configuration to True,
+    those features can be enabled in prior versions for compatibility development
+    and testing.
+
+     - collect_all_And_tokens - flag to enable fix for Issue #63 that fixes erroneous grouping
+       of results names when an And expression is nested within an Or or MatchFirst; set to
+       True to enable bugfix released in pyparsing 2.3.0, or False to preserve
+       pre-2.3.0 handling of named results
+"""
+__compat__.collect_all_And_tokens = True
+
+__diag__ = SimpleNamespace()
+__diag__.__doc__ = """
+Diagnostic configuration (all default to False)
+     - warn_multiple_tokens_in_named_alternation - flag to enable warnings when a results
+       name is defined on a MatchFirst or Or expression with one or more And subexpressions
+       (only warns if __compat__.collect_all_And_tokens is False)
+     - warn_ungrouped_named_tokens_in_collection - flag to enable warnings when a results
+       name is defined on a containing expression with ungrouped subexpressions that also
+       have results names
+     - warn_name_set_on_empty_Forward - flag to enable warnings whan a Forward is defined
+       with a results name, but has no contents defined
+     - warn_on_multiple_string_args_to_oneof - flag to enable warnings whan oneOf is
+       incorrectly called with multiple str arguments
+     - enable_debug_on_named_expressions - flag to auto-enable debug on all subsequent
+       calls to ParserElement.setName()
+"""
+__diag__.warn_multiple_tokens_in_named_alternation = False
+__diag__.warn_ungrouped_named_tokens_in_collection = False
+__diag__.warn_name_set_on_empty_Forward = False
+__diag__.warn_on_multiple_string_args_to_oneof = False
+__diag__.enable_debug_on_named_expressions = False
+__diag__._all_names = [nm for nm in vars(__diag__) if nm.startswith("enable_") or nm.startswith("warn_")]
+
+def _enable_all_warnings():
+    __diag__.warn_multiple_tokens_in_named_alternation = True
+    __diag__.warn_ungrouped_named_tokens_in_collection = True
+    __diag__.warn_name_set_on_empty_Forward = True
+    __diag__.warn_on_multiple_string_args_to_oneof = True
+__diag__.enable_all_warnings = _enable_all_warnings
+
+
+__all__ = ['__version__', '__versionTime__', '__author__', '__compat__', '__diag__',
+           'And', 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 'Empty',
+           'FollowedBy', 'Forward', 'GoToColumn', 'Group', 'Keyword', 'LineEnd', 'LineStart', 'Literal',
+           'PrecededBy', 'MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or',
+           'ParseBaseException', 'ParseElementEnhance', 'ParseException', 'ParseExpression', 'ParseFatalException',
+           'ParseResults', 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException',
+           'Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter',
+           'White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore', 'Char',
+           'alphanums', 'alphas', 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col',
+           'commaSeparatedList', 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString',
+           'dblSlashComment', 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'hexnums',
+           'htmlComment', 'javaStyleComment', 'line', 'lineEnd', 'lineStart', 'lineno',
+           'makeHTMLTags', 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral',
+           'nestedExpr', 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables',
+           'punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity',
+           'replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd',
+           'stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute',
+           'indentedBlock', 'originalTextFor', 'ungroup', 'infixNotation', 'locatedExpr', 'withClass',
+           'CloseMatch', 'tokenMap', 'pyparsing_common', 'pyparsing_unicode', 'unicode_set',
+           'conditionAsParseAction', 're',
+           ]
+
+system_version = tuple(sys.version_info)[:3]
+PY_3 = system_version[0] == 3
+if PY_3:
+    _MAX_INT = sys.maxsize
+    basestring = str
+    unichr = chr
+    unicode = str
+    _ustr = str
+
+    # build list of single arg builtins, that can be used as parse actions
+    singleArgBuiltins = [sum, len, sorted, reversed, list, tuple, set, any, all, min, max]
+
+else:
+    _MAX_INT = sys.maxint
+    range = xrange
+
+    def _ustr(obj):
+        """Drop-in replacement for str(obj) that tries to be Unicode
+        friendly. It first tries str(obj). If that fails with
+        a UnicodeEncodeError, then it tries unicode(obj). It then
+        < returns the unicode object | encodes it with the default
+        encoding | ... >.
+        """
+        if isinstance(obj, unicode):
+            return obj
+
+        try:
+            # If this works, then _ustr(obj) has the same behaviour as str(obj), so
+            # it won't break any existing code.
+            return str(obj)
+
+        except UnicodeEncodeError:
+            # Else encode it
+            ret = unicode(obj).encode(sys.getdefaultencoding(), 'xmlcharrefreplace')
+            xmlcharref = Regex(r'&#\d+;')
+            xmlcharref.setParseAction(lambda t: '\\u' + hex(int(t[0][2:-1]))[2:])
+            return xmlcharref.transformString(ret)
+
+    # build list of single arg builtins, tolerant of Python version, that can be used as parse actions
+    singleArgBuiltins = []
+    import __builtin__
+
+    for fname in "sum len sorted reversed list tuple set any all min max".split():
+        try:
+            singleArgBuiltins.append(getattr(__builtin__, fname))
+        except AttributeError:
+            continue
+
+_generatorType = type((y for y in range(1)))
+
+def _xml_escape(data):
+    """Escape &, <, >, ", ', etc. in a string of data."""
+
+    # ampersand must be replaced first
+    from_symbols = '&><"\''
+    to_symbols = ('&' + s + ';' for s in "amp gt lt quot apos".split())
+    for from_, to_ in zip(from_symbols, to_symbols):
+        data = data.replace(from_, to_)
+    return data
+
+alphas = string.ascii_uppercase + string.ascii_lowercase
+nums = "0123456789"
+hexnums = nums + "ABCDEFabcdef"
+alphanums = alphas + nums
+_bslash = chr(92)
+printables = "".join(c for c in string.printable if c not in string.whitespace)
+
+
+def conditionAsParseAction(fn, message=None, fatal=False):
+    msg = message if message is not None else "failed user-defined condition"
+    exc_type = ParseFatalException if fatal else ParseException
+    fn = _trim_arity(fn)
+
+    @wraps(fn)
+    def pa(s, l, t):
+        if not bool(fn(s, l, t)):
+            raise exc_type(s, l, msg)
+
+    return pa
+
+class ParseBaseException(Exception):
+    """base exception class for all parsing runtime exceptions"""
+    # Performance tuning: we construct a *lot* of these, so keep this
+    # constructor as small and fast as possible
+    def __init__(self, pstr, loc=0, msg=None, elem=None):
+        self.loc = loc
+        if msg is None:
+            self.msg = pstr
+            self.pstr = ""
+        else:
+            self.msg = msg
+            self.pstr = pstr
+        self.parserElement = elem
+        self.args = (pstr, loc, msg)
+
+    @classmethod
+    def _from_exception(cls, pe):
+        """
+        internal factory method to simplify creating one type of ParseException
+        from another - avoids having __init__ signature conflicts among subclasses
+        """
+        return cls(pe.pstr, pe.loc, pe.msg, pe.parserElement)
+
+    def __getattr__(self, aname):
+        """supported attributes by name are:
+           - lineno - returns the line number of the exception text
+           - col - returns the column number of the exception text
+           - line - returns the line containing the exception text
+        """
+        if aname == "lineno":
+            return lineno(self.loc, self.pstr)
+        elif aname in ("col", "column"):
+            return col(self.loc, self.pstr)
+        elif aname == "line":
+            return line(self.loc, self.pstr)
+        else:
+            raise AttributeError(aname)
+
+    def __str__(self):
+        if self.pstr:
+            if self.loc >= len(self.pstr):
+                foundstr = ', found end of text'
+            else:
+                foundstr = (', found %r' % self.pstr[self.loc:self.loc + 1]).replace(r'\\', '\\')
+        else:
+            foundstr = ''
+        return ("%s%s  (at char %d), (line:%d, col:%d)" %
+                   (self.msg, foundstr, self.loc, self.lineno, self.column))
+    def __repr__(self):
+        return _ustr(self)
+    def markInputline(self, markerString=">!<"):
+        """Extracts the exception line from the input string, and marks
+           the location of the exception with a special symbol.
+        """
+        line_str = self.line
+        line_column = self.column - 1
+        if markerString:
+            line_str = "".join((line_str[:line_column],
+                                markerString, line_str[line_column:]))
+        return line_str.strip()
+    def __dir__(self):
+        return "lineno col line".split() + dir(type(self))
+
+class ParseException(ParseBaseException):
+    """
+    Exception thrown when parse expressions don't match class;
+    supported attributes by name are:
+    - lineno - returns the line number of the exception text
+    - col - returns the column number of the exception text
+    - line - returns the line containing the exception text
+
+    Example::
+
+        try:
+            Word(nums).setName("integer").parseString("ABC")
+        except ParseException as pe:
+            print(pe)
+            print("column: {}".format(pe.col))
+
+    prints::
+
+       Expected integer (at char 0), (line:1, col:1)
+        column: 1
+
+    """
+
+    @staticmethod
+    def explain(exc, depth=16):
+        """
+        Method to take an exception and translate the Python internal traceback into a list
+        of the pyparsing expressions that caused the exception to be raised.
+
+        Parameters:
+
+         - exc - exception raised during parsing (need not be a ParseException, in support
+           of Python exceptions that might be raised in a parse action)
+         - depth (default=16) - number of levels back in the stack trace to list expression
+           and function names; if None, the full stack trace names will be listed; if 0, only
+           the failing input line, marker, and exception string will be shown
+
+        Returns a multi-line string listing the ParserElements and/or function names in the
+        exception's stack trace.
+
+        Note: the diagnostic output will include string representations of the expressions
+        that failed to parse. These representations will be more helpful if you use `setName` to
+        give identifiable names to your expressions. Otherwise they will use the default string
+        forms, which may be cryptic to read.
+
+        explain() is only supported under Python 3.
+        """
+        import inspect
+
+        if depth is None:
+            depth = sys.getrecursionlimit()
+        ret = []
+        if isinstance(exc, ParseBaseException):
+            ret.append(exc.line)
+            ret.append(' ' * (exc.col - 1) + '^')
+        ret.append("{0}: {1}".format(type(exc).__name__, exc))
+
+        if depth > 0:
+            callers = inspect.getinnerframes(exc.__traceback__, context=depth)
+            seen = set()
+            for i, ff in enumerate(callers[-depth:]):
+                frm = ff[0]
+
+                f_self = frm.f_locals.get('self', None)
+                if isinstance(f_self, ParserElement):
+                    if frm.f_code.co_name not in ('parseImpl', '_parseNoCache'):
+                        continue
+                    if f_self in seen:
+                        continue
+                    seen.add(f_self)
+
+                    self_type = type(f_self)
+                    ret.append("{0}.{1} - {2}".format(self_type.__module__,
+                                                      self_type.__name__,
+                                                      f_self))
+                elif f_self is not None:
+                    self_type = type(f_self)
+                    ret.append("{0}.{1}".format(self_type.__module__,
+                                                self_type.__name__))
+                else:
+                    code = frm.f_code
+                    if code.co_name in ('wrapper', ''):
+                        continue
+
+                    ret.append("{0}".format(code.co_name))
+
+                depth -= 1
+                if not depth:
+                    break
+
+        return '\n'.join(ret)
+
+
+class ParseFatalException(ParseBaseException):
+    """user-throwable exception thrown when inconsistent parse content
+       is found; stops all parsing immediately"""
+    pass
+
+class ParseSyntaxException(ParseFatalException):
+    """just like :class:`ParseFatalException`, but thrown internally
+    when an :class:`ErrorStop` ('-' operator) indicates
+    that parsing is to stop immediately because an unbacktrackable
+    syntax error has been found.
+    """
+    pass
+
+#~ class ReparseException(ParseBaseException):
+    #~ """Experimental class - parse actions can raise this exception to cause
+       #~ pyparsing to reparse the input string:
+        #~ - with a modified input string, and/or
+        #~ - with a modified start location
+       #~ Set the values of the ReparseException in the constructor, and raise the
+       #~ exception in a parse action to cause pyparsing to use the new string/location.
+       #~ Setting the values as None causes no change to be made.
+       #~ """
+    #~ def __init_( self, newstring, restartLoc ):
+        #~ self.newParseText = newstring
+        #~ self.reparseLoc = restartLoc
+
+class RecursiveGrammarException(Exception):
+    """exception thrown by :class:`ParserElement.validate` if the
+    grammar could be improperly recursive
+    """
+    def __init__(self, parseElementList):
+        self.parseElementTrace = parseElementList
+
+    def __str__(self):
+        return "RecursiveGrammarException: %s" % self.parseElementTrace
+
+class _ParseResultsWithOffset(object):
+    def __init__(self, p1, p2):
+        self.tup = (p1, p2)
+    def __getitem__(self, i):
+        return self.tup[i]
+    def __repr__(self):
+        return repr(self.tup[0])
+    def setOffset(self, i):
+        self.tup = (self.tup[0], i)
+
+class ParseResults(object):
+    """Structured parse results, to provide multiple means of access to
+    the parsed data:
+
+       - as a list (``len(results)``)
+       - by list index (``results[0], results[1]``, etc.)
+       - by attribute (``results.`` - see :class:`ParserElement.setResultsName`)
+
+    Example::
+
+        integer = Word(nums)
+        date_str = (integer.setResultsName("year") + '/'
+                        + integer.setResultsName("month") + '/'
+                        + integer.setResultsName("day"))
+        # equivalent form:
+        # date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+        # parseString returns a ParseResults object
+        result = date_str.parseString("1999/12/31")
+
+        def test(s, fn=repr):
+            print("%s -> %s" % (s, fn(eval(s))))
+        test("list(result)")
+        test("result[0]")
+        test("result['month']")
+        test("result.day")
+        test("'month' in result")
+        test("'minutes' in result")
+        test("result.dump()", str)
+
+    prints::
+
+        list(result) -> ['1999', '/', '12', '/', '31']
+        result[0] -> '1999'
+        result['month'] -> '12'
+        result.day -> '31'
+        'month' in result -> True
+        'minutes' in result -> False
+        result.dump() -> ['1999', '/', '12', '/', '31']
+        - day: 31
+        - month: 12
+        - year: 1999
+    """
+    def __new__(cls, toklist=None, name=None, asList=True, modal=True):
+        if isinstance(toklist, cls):
+            return toklist
+        retobj = object.__new__(cls)
+        retobj.__doinit = True
+        return retobj
+
+    # Performance tuning: we construct a *lot* of these, so keep this
+    # constructor as small and fast as possible
+    def __init__(self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance):
+        if self.__doinit:
+            self.__doinit = False
+            self.__name = None
+            self.__parent = None
+            self.__accumNames = {}
+            self.__asList = asList
+            self.__modal = modal
+            if toklist is None:
+                toklist = []
+            if isinstance(toklist, list):
+                self.__toklist = toklist[:]
+            elif isinstance(toklist, _generatorType):
+                self.__toklist = list(toklist)
+            else:
+                self.__toklist = [toklist]
+            self.__tokdict = dict()
+
+        if name is not None and name:
+            if not modal:
+                self.__accumNames[name] = 0
+            if isinstance(name, int):
+                name = _ustr(name)  # will always return a str, but use _ustr for consistency
+            self.__name = name
+            if not (isinstance(toklist, (type(None), basestring, list)) and toklist in (None, '', [])):
+                if isinstance(toklist, basestring):
+                    toklist = [toklist]
+                if asList:
+                    if isinstance(toklist, ParseResults):
+                        self[name] = _ParseResultsWithOffset(ParseResults(toklist.__toklist), 0)
+                    else:
+                        self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]), 0)
+                    self[name].__name = name
+                else:
+                    try:
+                        self[name] = toklist[0]
+                    except (KeyError, TypeError, IndexError):
+                        self[name] = toklist
+
+    def __getitem__(self, i):
+        if isinstance(i, (int, slice)):
+            return self.__toklist[i]
+        else:
+            if i not in self.__accumNames:
+                return self.__tokdict[i][-1][0]
+            else:
+                return ParseResults([v[0] for v in self.__tokdict[i]])
+
+    def __setitem__(self, k, v, isinstance=isinstance):
+        if isinstance(v, _ParseResultsWithOffset):
+            self.__tokdict[k] = self.__tokdict.get(k, list()) + [v]
+            sub = v[0]
+        elif isinstance(k, (int, slice)):
+            self.__toklist[k] = v
+            sub = v
+        else:
+            self.__tokdict[k] = self.__tokdict.get(k, list()) + [_ParseResultsWithOffset(v, 0)]
+            sub = v
+        if isinstance(sub, ParseResults):
+            sub.__parent = wkref(self)
+
+    def __delitem__(self, i):
+        if isinstance(i, (int, slice)):
+            mylen = len(self.__toklist)
+            del self.__toklist[i]
+
+            # convert int to slice
+            if isinstance(i, int):
+                if i < 0:
+                    i += mylen
+                i = slice(i, i + 1)
+            # get removed indices
+            removed = list(range(*i.indices(mylen)))
+            removed.reverse()
+            # fixup indices in token dictionary
+            for name, occurrences in self.__tokdict.items():
+                for j in removed:
+                    for k, (value, position) in enumerate(occurrences):
+                        occurrences[k] = _ParseResultsWithOffset(value, position - (position > j))
+        else:
+            del self.__tokdict[i]
+
+    def __contains__(self, k):
+        return k in self.__tokdict
+
+    def __len__(self):
+        return len(self.__toklist)
+
+    def __bool__(self):
+        return (not not self.__toklist)
+    __nonzero__ = __bool__
+
+    def __iter__(self):
+        return iter(self.__toklist)
+
+    def __reversed__(self):
+        return iter(self.__toklist[::-1])
+
+    def _iterkeys(self):
+        if hasattr(self.__tokdict, "iterkeys"):
+            return self.__tokdict.iterkeys()
+        else:
+            return iter(self.__tokdict)
+
+    def _itervalues(self):
+        return (self[k] for k in self._iterkeys())
+
+    def _iteritems(self):
+        return ((k, self[k]) for k in self._iterkeys())
+
+    if PY_3:
+        keys = _iterkeys
+        """Returns an iterator of all named result keys."""
+
+        values = _itervalues
+        """Returns an iterator of all named result values."""
+
+        items = _iteritems
+        """Returns an iterator of all named result key-value tuples."""
+
+    else:
+        iterkeys = _iterkeys
+        """Returns an iterator of all named result keys (Python 2.x only)."""
+
+        itervalues = _itervalues
+        """Returns an iterator of all named result values (Python 2.x only)."""
+
+        iteritems = _iteritems
+        """Returns an iterator of all named result key-value tuples (Python 2.x only)."""
+
+        def keys(self):
+            """Returns all named result keys (as a list in Python 2.x, as an iterator in Python 3.x)."""
+            return list(self.iterkeys())
+
+        def values(self):
+            """Returns all named result values (as a list in Python 2.x, as an iterator in Python 3.x)."""
+            return list(self.itervalues())
+
+        def items(self):
+            """Returns all named result key-values (as a list of tuples in Python 2.x, as an iterator in Python 3.x)."""
+            return list(self.iteritems())
+
+    def haskeys(self):
+        """Since keys() returns an iterator, this method is helpful in bypassing
+           code that looks for the existence of any defined results names."""
+        return bool(self.__tokdict)
+
+    def pop(self, *args, **kwargs):
+        """
+        Removes and returns item at specified index (default= ``last``).
+        Supports both ``list`` and ``dict`` semantics for ``pop()``. If
+        passed no argument or an integer argument, it will use ``list``
+        semantics and pop tokens from the list of parsed tokens. If passed
+        a non-integer argument (most likely a string), it will use ``dict``
+        semantics and pop the corresponding value from any defined results
+        names. A second default return value argument is supported, just as in
+        ``dict.pop()``.
+
+        Example::
+
+            def remove_first(tokens):
+                tokens.pop(0)
+            print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
+            print(OneOrMore(Word(nums)).addParseAction(remove_first).parseString("0 123 321")) # -> ['123', '321']
+
+            label = Word(alphas)
+            patt = label("LABEL") + OneOrMore(Word(nums))
+            print(patt.parseString("AAB 123 321").dump())
+
+            # Use pop() in a parse action to remove named result (note that corresponding value is not
+            # removed from list form of results)
+            def remove_LABEL(tokens):
+                tokens.pop("LABEL")
+                return tokens
+            patt.addParseAction(remove_LABEL)
+            print(patt.parseString("AAB 123 321").dump())
+
+        prints::
+
+            ['AAB', '123', '321']
+            - LABEL: AAB
+
+            ['AAB', '123', '321']
+        """
+        if not args:
+            args = [-1]
+        for k, v in kwargs.items():
+            if k == 'default':
+                args = (args[0], v)
+            else:
+                raise TypeError("pop() got an unexpected keyword argument '%s'" % k)
+        if (isinstance(args[0], int)
+                or len(args) == 1
+                or args[0] in self):
+            index = args[0]
+            ret = self[index]
+            del self[index]
+            return ret
+        else:
+            defaultvalue = args[1]
+            return defaultvalue
+
+    def get(self, key, defaultValue=None):
+        """
+        Returns named result matching the given key, or if there is no
+        such name, then returns the given ``defaultValue`` or ``None`` if no
+        ``defaultValue`` is specified.
+
+        Similar to ``dict.get()``.
+
+        Example::
+
+            integer = Word(nums)
+            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+            result = date_str.parseString("1999/12/31")
+            print(result.get("year")) # -> '1999'
+            print(result.get("hour", "not specified")) # -> 'not specified'
+            print(result.get("hour")) # -> None
+        """
+        if key in self:
+            return self[key]
+        else:
+            return defaultValue
+
+    def insert(self, index, insStr):
+        """
+        Inserts new element at location index in the list of parsed tokens.
+
+        Similar to ``list.insert()``.
+
+        Example::
+
+            print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
+
+            # use a parse action to insert the parse location in the front of the parsed results
+            def insert_locn(locn, tokens):
+                tokens.insert(0, locn)
+            print(OneOrMore(Word(nums)).addParseAction(insert_locn).parseString("0 123 321")) # -> [0, '0', '123', '321']
+        """
+        self.__toklist.insert(index, insStr)
+        # fixup indices in token dictionary
+        for name, occurrences in self.__tokdict.items():
+            for k, (value, position) in enumerate(occurrences):
+                occurrences[k] = _ParseResultsWithOffset(value, position + (position > index))
+
+    def append(self, item):
+        """
+        Add single element to end of ParseResults list of elements.
+
+        Example::
+
+            print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
+
+            # use a parse action to compute the sum of the parsed integers, and add it to the end
+            def append_sum(tokens):
+                tokens.append(sum(map(int, tokens)))
+            print(OneOrMore(Word(nums)).addParseAction(append_sum).parseString("0 123 321")) # -> ['0', '123', '321', 444]
+        """
+        self.__toklist.append(item)
+
+    def extend(self, itemseq):
+        """
+        Add sequence of elements to end of ParseResults list of elements.
+
+        Example::
+
+            patt = OneOrMore(Word(alphas))
+
+            # use a parse action to append the reverse of the matched strings, to make a palindrome
+            def make_palindrome(tokens):
+                tokens.extend(reversed([t[::-1] for t in tokens]))
+                return ''.join(tokens)
+            print(patt.addParseAction(make_palindrome).parseString("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl'
+        """
+        if isinstance(itemseq, ParseResults):
+            self.__iadd__(itemseq)
+        else:
+            self.__toklist.extend(itemseq)
+
+    def clear(self):
+        """
+        Clear all elements and results names.
+        """
+        del self.__toklist[:]
+        self.__tokdict.clear()
+
+    def __getattr__(self, name):
+        try:
+            return self[name]
+        except KeyError:
+            return ""
+
+    def __add__(self, other):
+        ret = self.copy()
+        ret += other
+        return ret
+
+    def __iadd__(self, other):
+        if other.__tokdict:
+            offset = len(self.__toklist)
+            addoffset = lambda a: offset if a < 0 else a + offset
+            otheritems = other.__tokdict.items()
+            otherdictitems = [(k, _ParseResultsWithOffset(v[0], addoffset(v[1])))
+                              for k, vlist in otheritems for v in vlist]
+            for k, v in otherdictitems:
+                self[k] = v
+                if isinstance(v[0], ParseResults):
+                    v[0].__parent = wkref(self)
+
+        self.__toklist += other.__toklist
+        self.__accumNames.update(other.__accumNames)
+        return self
+
+    def __radd__(self, other):
+        if isinstance(other, int) and other == 0:
+            # useful for merging many ParseResults using sum() builtin
+            return self.copy()
+        else:
+            # this may raise a TypeError - so be it
+            return other + self
+
+    def __repr__(self):
+        return "(%s, %s)" % (repr(self.__toklist), repr(self.__tokdict))
+
+    def __str__(self):
+        return '[' + ', '.join(_ustr(i) if isinstance(i, ParseResults) else repr(i) for i in self.__toklist) + ']'
+
+    def _asStringList(self, sep=''):
+        out = []
+        for item in self.__toklist:
+            if out and sep:
+                out.append(sep)
+            if isinstance(item, ParseResults):
+                out += item._asStringList()
+            else:
+                out.append(_ustr(item))
+        return out
+
+    def asList(self):
+        """
+        Returns the parse results as a nested list of matching tokens, all converted to strings.
+
+        Example::
+
+            patt = OneOrMore(Word(alphas))
+            result = patt.parseString("sldkj lsdkj sldkj")
+            # even though the result prints in string-like form, it is actually a pyparsing ParseResults
+            print(type(result), result) # ->  ['sldkj', 'lsdkj', 'sldkj']
+
+            # Use asList() to create an actual list
+            result_list = result.asList()
+            print(type(result_list), result_list) # ->  ['sldkj', 'lsdkj', 'sldkj']
+        """
+        return [res.asList() if isinstance(res, ParseResults) else res for res in self.__toklist]
+
+    def asDict(self):
+        """
+        Returns the named parse results as a nested dictionary.
+
+        Example::
+
+            integer = Word(nums)
+            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+            result = date_str.parseString('12/31/1999')
+            print(type(result), repr(result)) # ->  (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]})
+
+            result_dict = result.asDict()
+            print(type(result_dict), repr(result_dict)) # ->  {'day': '1999', 'year': '12', 'month': '31'}
+
+            # even though a ParseResults supports dict-like access, sometime you just need to have a dict
+            import json
+            print(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializable
+            print(json.dumps(result.asDict())) # -> {"month": "31", "day": "1999", "year": "12"}
+        """
+        if PY_3:
+            item_fn = self.items
+        else:
+            item_fn = self.iteritems
+
+        def toItem(obj):
+            if isinstance(obj, ParseResults):
+                if obj.haskeys():
+                    return obj.asDict()
+                else:
+                    return [toItem(v) for v in obj]
+            else:
+                return obj
+
+        return dict((k, toItem(v)) for k, v in item_fn())
+
+    def copy(self):
+        """
+        Returns a new copy of a :class:`ParseResults` object.
+        """
+        ret = ParseResults(self.__toklist)
+        ret.__tokdict = dict(self.__tokdict.items())
+        ret.__parent = self.__parent
+        ret.__accumNames.update(self.__accumNames)
+        ret.__name = self.__name
+        return ret
+
+    def asXML(self, doctag=None, namedItemsOnly=False, indent="", formatted=True):
+        """
+        (Deprecated) Returns the parse results as XML. Tags are created for tokens and lists that have defined results names.
+        """
+        nl = "\n"
+        out = []
+        namedItems = dict((v[1], k) for (k, vlist) in self.__tokdict.items()
+                          for v in vlist)
+        nextLevelIndent = indent + "  "
+
+        # collapse out indents if formatting is not desired
+        if not formatted:
+            indent = ""
+            nextLevelIndent = ""
+            nl = ""
+
+        selfTag = None
+        if doctag is not None:
+            selfTag = doctag
+        else:
+            if self.__name:
+                selfTag = self.__name
+
+        if not selfTag:
+            if namedItemsOnly:
+                return ""
+            else:
+                selfTag = "ITEM"
+
+        out += [nl, indent, "<", selfTag, ">"]
+
+        for i, res in enumerate(self.__toklist):
+            if isinstance(res, ParseResults):
+                if i in namedItems:
+                    out += [res.asXML(namedItems[i],
+                                      namedItemsOnly and doctag is None,
+                                      nextLevelIndent,
+                                      formatted)]
+                else:
+                    out += [res.asXML(None,
+                                      namedItemsOnly and doctag is None,
+                                      nextLevelIndent,
+                                      formatted)]
+            else:
+                # individual token, see if there is a name for it
+                resTag = None
+                if i in namedItems:
+                    resTag = namedItems[i]
+                if not resTag:
+                    if namedItemsOnly:
+                        continue
+                    else:
+                        resTag = "ITEM"
+                xmlBodyText = _xml_escape(_ustr(res))
+                out += [nl, nextLevelIndent, "<", resTag, ">",
+                        xmlBodyText,
+                                                ""]
+
+        out += [nl, indent, ""]
+        return "".join(out)
+
+    def __lookup(self, sub):
+        for k, vlist in self.__tokdict.items():
+            for v, loc in vlist:
+                if sub is v:
+                    return k
+        return None
+
+    def getName(self):
+        r"""
+        Returns the results name for this token expression. Useful when several
+        different expressions might match at a particular location.
+
+        Example::
+
+            integer = Word(nums)
+            ssn_expr = Regex(r"\d\d\d-\d\d-\d\d\d\d")
+            house_number_expr = Suppress('#') + Word(nums, alphanums)
+            user_data = (Group(house_number_expr)("house_number")
+                        | Group(ssn_expr)("ssn")
+                        | Group(integer)("age"))
+            user_info = OneOrMore(user_data)
+
+            result = user_info.parseString("22 111-22-3333 #221B")
+            for item in result:
+                print(item.getName(), ':', item[0])
+
+        prints::
+
+            age : 22
+            ssn : 111-22-3333
+            house_number : 221B
+        """
+        if self.__name:
+            return self.__name
+        elif self.__parent:
+            par = self.__parent()
+            if par:
+                return par.__lookup(self)
+            else:
+                return None
+        elif (len(self) == 1
+              and len(self.__tokdict) == 1
+              and next(iter(self.__tokdict.values()))[0][1] in (0, -1)):
+            return next(iter(self.__tokdict.keys()))
+        else:
+            return None
+
+    def dump(self, indent='', full=True, include_list=True, _depth=0):
+        """
+        Diagnostic method for listing out the contents of
+        a :class:`ParseResults`. Accepts an optional ``indent`` argument so
+        that this string can be embedded in a nested display of other data.
+
+        Example::
+
+            integer = Word(nums)
+            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+            result = date_str.parseString('12/31/1999')
+            print(result.dump())
+
+        prints::
+
+            ['12', '/', '31', '/', '1999']
+            - day: 1999
+            - month: 31
+            - year: 12
+        """
+        out = []
+        NL = '\n'
+        if include_list:
+            out.append(indent + _ustr(self.asList()))
+        else:
+            out.append('')
+
+        if full:
+            if self.haskeys():
+                items = sorted((str(k), v) for k, v in self.items())
+                for k, v in items:
+                    if out:
+                        out.append(NL)
+                    out.append("%s%s- %s: " % (indent, ('  ' * _depth), k))
+                    if isinstance(v, ParseResults):
+                        if v:
+                            out.append(v.dump(indent=indent, full=full, include_list=include_list, _depth=_depth + 1))
+                        else:
+                            out.append(_ustr(v))
+                    else:
+                        out.append(repr(v))
+            elif any(isinstance(vv, ParseResults) for vv in self):
+                v = self
+                for i, vv in enumerate(v):
+                    if isinstance(vv, ParseResults):
+                        out.append("\n%s%s[%d]:\n%s%s%s" % (indent,
+                                                            ('  ' * (_depth)),
+                                                            i,
+                                                            indent,
+                                                            ('  ' * (_depth + 1)),
+                                                            vv.dump(indent=indent,
+                                                                    full=full,
+                                                                    include_list=include_list,
+                                                                    _depth=_depth + 1)))
+                    else:
+                        out.append("\n%s%s[%d]:\n%s%s%s" % (indent,
+                                                            ('  ' * (_depth)),
+                                                            i,
+                                                            indent,
+                                                            ('  ' * (_depth + 1)),
+                                                            _ustr(vv)))
+
+        return "".join(out)
+
+    def pprint(self, *args, **kwargs):
+        """
+        Pretty-printer for parsed results as a list, using the
+        `pprint `_ module.
+        Accepts additional positional or keyword args as defined for
+        `pprint.pprint `_ .
+
+        Example::
+
+            ident = Word(alphas, alphanums)
+            num = Word(nums)
+            func = Forward()
+            term = ident | num | Group('(' + func + ')')
+            func <<= ident + Group(Optional(delimitedList(term)))
+            result = func.parseString("fna a,b,(fnb c,d,200),100")
+            result.pprint(width=40)
+
+        prints::
+
+            ['fna',
+             ['a',
+              'b',
+              ['(', 'fnb', ['c', 'd', '200'], ')'],
+              '100']]
+        """
+        pprint.pprint(self.asList(), *args, **kwargs)
+
+    # add support for pickle protocol
+    def __getstate__(self):
+        return (self.__toklist,
+                (self.__tokdict.copy(),
+                 self.__parent is not None and self.__parent() or None,
+                 self.__accumNames,
+                 self.__name))
+
+    def __setstate__(self, state):
+        self.__toklist = state[0]
+        self.__tokdict, par, inAccumNames, self.__name = state[1]
+        self.__accumNames = {}
+        self.__accumNames.update(inAccumNames)
+        if par is not None:
+            self.__parent = wkref(par)
+        else:
+            self.__parent = None
+
+    def __getnewargs__(self):
+        return self.__toklist, self.__name, self.__asList, self.__modal
+
+    def __dir__(self):
+        return dir(type(self)) + list(self.keys())
+
+    @classmethod
+    def from_dict(cls, other, name=None):
+        """
+        Helper classmethod to construct a ParseResults from a dict, preserving the
+        name-value relations as results names. If an optional 'name' argument is
+        given, a nested ParseResults will be returned
+        """
+        def is_iterable(obj):
+            try:
+                iter(obj)
+            except Exception:
+                return False
+            else:
+                if PY_3:
+                    return not isinstance(obj, (str, bytes))
+                else:
+                    return not isinstance(obj, basestring)
+
+        ret = cls([])
+        for k, v in other.items():
+            if isinstance(v, Mapping):
+                ret += cls.from_dict(v, name=k)
+            else:
+                ret += cls([v], name=k, asList=is_iterable(v))
+        if name is not None:
+            ret = cls([ret], name=name)
+        return ret
+
+MutableMapping.register(ParseResults)
+
+def col (loc, strg):
+    """Returns current column within a string, counting newlines as line separators.
+   The first column is number 1.
+
+   Note: the default parsing behavior is to expand tabs in the input string
+   before starting the parsing process.  See
+   :class:`ParserElement.parseString` for more
+   information on parsing strings containing ```` s, and suggested
+   methods to maintain a consistent view of the parsed string, the parse
+   location, and line and column positions within the parsed string.
+   """
+    s = strg
+    return 1 if 0 < loc < len(s) and s[loc-1] == '\n' else loc - s.rfind("\n", 0, loc)
+
+def lineno(loc, strg):
+    """Returns current line number within a string, counting newlines as line separators.
+    The first line is number 1.
+
+    Note - the default parsing behavior is to expand tabs in the input string
+    before starting the parsing process.  See :class:`ParserElement.parseString`
+    for more information on parsing strings containing ```` s, and
+    suggested methods to maintain a consistent view of the parsed string, the
+    parse location, and line and column positions within the parsed string.
+    """
+    return strg.count("\n", 0, loc) + 1
+
+def line(loc, strg):
+    """Returns the line of text containing loc within a string, counting newlines as line separators.
+       """
+    lastCR = strg.rfind("\n", 0, loc)
+    nextCR = strg.find("\n", loc)
+    if nextCR >= 0:
+        return strg[lastCR + 1:nextCR]
+    else:
+        return strg[lastCR + 1:]
+
+def _defaultStartDebugAction(instring, loc, expr):
+    print(("Match " + _ustr(expr) + " at loc " + _ustr(loc) + "(%d,%d)" % (lineno(loc, instring), col(loc, instring))))
+
+def _defaultSuccessDebugAction(instring, startloc, endloc, expr, toks):
+    print("Matched " + _ustr(expr) + " -> " + str(toks.asList()))
+
+def _defaultExceptionDebugAction(instring, loc, expr, exc):
+    print("Exception raised:" + _ustr(exc))
+
+def nullDebugAction(*args):
+    """'Do-nothing' debug action, to suppress debugging output during parsing."""
+    pass
+
+# Only works on Python 3.x - nonlocal is toxic to Python 2 installs
+#~ 'decorator to trim function calls to match the arity of the target'
+#~ def _trim_arity(func, maxargs=3):
+    #~ if func in singleArgBuiltins:
+        #~ return lambda s,l,t: func(t)
+    #~ limit = 0
+    #~ foundArity = False
+    #~ def wrapper(*args):
+        #~ nonlocal limit,foundArity
+        #~ while 1:
+            #~ try:
+                #~ ret = func(*args[limit:])
+                #~ foundArity = True
+                #~ return ret
+            #~ except TypeError:
+                #~ if limit == maxargs or foundArity:
+                    #~ raise
+                #~ limit += 1
+                #~ continue
+    #~ return wrapper
+
+# this version is Python 2.x-3.x cross-compatible
+'decorator to trim function calls to match the arity of the target'
+def _trim_arity(func, maxargs=2):
+    if func in singleArgBuiltins:
+        return lambda s, l, t: func(t)
+    limit = [0]
+    foundArity = [False]
+
+    # traceback return data structure changed in Py3.5 - normalize back to plain tuples
+    if system_version[:2] >= (3, 5):
+        def extract_stack(limit=0):
+            # special handling for Python 3.5.0 - extra deep call stack by 1
+            offset = -3 if system_version == (3, 5, 0) else -2
+            frame_summary = traceback.extract_stack(limit=-offset + limit - 1)[offset]
+            return [frame_summary[:2]]
+        def extract_tb(tb, limit=0):
+            frames = traceback.extract_tb(tb, limit=limit)
+            frame_summary = frames[-1]
+            return [frame_summary[:2]]
+    else:
+        extract_stack = traceback.extract_stack
+        extract_tb = traceback.extract_tb
+
+    # synthesize what would be returned by traceback.extract_stack at the call to
+    # user's parse action 'func', so that we don't incur call penalty at parse time
+
+    LINE_DIFF = 6
+    # IF ANY CODE CHANGES, EVEN JUST COMMENTS OR BLANK LINES, BETWEEN THE NEXT LINE AND
+    # THE CALL TO FUNC INSIDE WRAPPER, LINE_DIFF MUST BE MODIFIED!!!!
+    this_line = extract_stack(limit=2)[-1]
+    pa_call_line_synth = (this_line[0], this_line[1] + LINE_DIFF)
+
+    def wrapper(*args):
+        while 1:
+            try:
+                ret = func(*args[limit[0]:])
+                foundArity[0] = True
+                return ret
+            except TypeError:
+                # re-raise TypeErrors if they did not come from our arity testing
+                if foundArity[0]:
+                    raise
+                else:
+                    try:
+                        tb = sys.exc_info()[-1]
+                        if not extract_tb(tb, limit=2)[-1][:2] == pa_call_line_synth:
+                            raise
+                    finally:
+                        try:
+                            del tb
+                        except NameError:
+                            pass
+
+                if limit[0] <= maxargs:
+                    limit[0] += 1
+                    continue
+                raise
+
+    # copy func name to wrapper for sensible debug output
+    func_name = ""
+    try:
+        func_name = getattr(func, '__name__',
+                            getattr(func, '__class__').__name__)
+    except Exception:
+        func_name = str(func)
+    wrapper.__name__ = func_name
+
+    return wrapper
+
+
+class ParserElement(object):
+    """Abstract base level parser element class."""
+    DEFAULT_WHITE_CHARS = " \n\t\r"
+    verbose_stacktrace = False
+
+    @staticmethod
+    def setDefaultWhitespaceChars(chars):
+        r"""
+        Overrides the default whitespace chars
+
+        Example::
+
+            # default whitespace chars are space,  and newline
+            OneOrMore(Word(alphas)).parseString("abc def\nghi jkl")  # -> ['abc', 'def', 'ghi', 'jkl']
+
+            # change to just treat newline as significant
+            ParserElement.setDefaultWhitespaceChars(" \t")
+            OneOrMore(Word(alphas)).parseString("abc def\nghi jkl")  # -> ['abc', 'def']
+        """
+        ParserElement.DEFAULT_WHITE_CHARS = chars
+
+    @staticmethod
+    def inlineLiteralsUsing(cls):
+        """
+        Set class to be used for inclusion of string literals into a parser.
+
+        Example::
+
+            # default literal class used is Literal
+            integer = Word(nums)
+            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+            date_str.parseString("1999/12/31")  # -> ['1999', '/', '12', '/', '31']
+
+
+            # change to Suppress
+            ParserElement.inlineLiteralsUsing(Suppress)
+            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+            date_str.parseString("1999/12/31")  # -> ['1999', '12', '31']
+        """
+        ParserElement._literalStringClass = cls
+
+    @classmethod
+    def _trim_traceback(cls, tb):
+        while tb.tb_next:
+            tb = tb.tb_next
+        return tb
+
+    def __init__(self, savelist=False):
+        self.parseAction = list()
+        self.failAction = None
+        # ~ self.name = ""  # don't define self.name, let subclasses try/except upcall
+        self.strRepr = None
+        self.resultsName = None
+        self.saveAsList = savelist
+        self.skipWhitespace = True
+        self.whiteChars = set(ParserElement.DEFAULT_WHITE_CHARS)
+        self.copyDefaultWhiteChars = True
+        self.mayReturnEmpty = False # used when checking for left-recursion
+        self.keepTabs = False
+        self.ignoreExprs = list()
+        self.debug = False
+        self.streamlined = False
+        self.mayIndexError = True # used to optimize exception handling for subclasses that don't advance parse index
+        self.errmsg = ""
+        self.modalResults = True # used to mark results names as modal (report only last) or cumulative (list all)
+        self.debugActions = (None, None, None)  # custom debug actions
+        self.re = None
+        self.callPreparse = True # used to avoid redundant calls to preParse
+        self.callDuringTry = False
+
+    def copy(self):
+        """
+        Make a copy of this :class:`ParserElement`.  Useful for defining
+        different parse actions for the same parsing pattern, using copies of
+        the original parse element.
+
+        Example::
+
+            integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
+            integerK = integer.copy().addParseAction(lambda toks: toks[0] * 1024) + Suppress("K")
+            integerM = integer.copy().addParseAction(lambda toks: toks[0] * 1024 * 1024) + Suppress("M")
+
+            print(OneOrMore(integerK | integerM | integer).parseString("5K 100 640K 256M"))
+
+        prints::
+
+            [5120, 100, 655360, 268435456]
+
+        Equivalent form of ``expr.copy()`` is just ``expr()``::
+
+            integerM = integer().addParseAction(lambda toks: toks[0] * 1024 * 1024) + Suppress("M")
+        """
+        cpy = copy.copy(self)
+        cpy.parseAction = self.parseAction[:]
+        cpy.ignoreExprs = self.ignoreExprs[:]
+        if self.copyDefaultWhiteChars:
+            cpy.whiteChars = ParserElement.DEFAULT_WHITE_CHARS
+        return cpy
+
+    def setName(self, name):
+        """
+        Define name for this expression, makes debugging and exception messages clearer.
+
+        Example::
+
+            Word(nums).parseString("ABC")  # -> Exception: Expected W:(0123...) (at char 0), (line:1, col:1)
+            Word(nums).setName("integer").parseString("ABC")  # -> Exception: Expected integer (at char 0), (line:1, col:1)
+        """
+        self.name = name
+        self.errmsg = "Expected " + self.name
+        if __diag__.enable_debug_on_named_expressions:
+            self.setDebug()
+        return self
+
+    def setResultsName(self, name, listAllMatches=False):
+        """
+        Define name for referencing matching tokens as a nested attribute
+        of the returned parse results.
+        NOTE: this returns a *copy* of the original :class:`ParserElement` object;
+        this is so that the client can define a basic element, such as an
+        integer, and reference it in multiple places with different names.
+
+        You can also set results names using the abbreviated syntax,
+        ``expr("name")`` in place of ``expr.setResultsName("name")``
+        - see :class:`__call__`.
+
+        Example::
+
+            date_str = (integer.setResultsName("year") + '/'
+                        + integer.setResultsName("month") + '/'
+                        + integer.setResultsName("day"))
+
+            # equivalent form:
+            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+        """
+        return self._setResultsName(name, listAllMatches)
+
+    def _setResultsName(self, name, listAllMatches=False):
+        newself = self.copy()
+        if name.endswith("*"):
+            name = name[:-1]
+            listAllMatches = True
+        newself.resultsName = name
+        newself.modalResults = not listAllMatches
+        return newself
+
+    def setBreak(self, breakFlag=True):
+        """Method to invoke the Python pdb debugger when this element is
+           about to be parsed. Set ``breakFlag`` to True to enable, False to
+           disable.
+        """
+        if breakFlag:
+            _parseMethod = self._parse
+            def breaker(instring, loc, doActions=True, callPreParse=True):
+                import pdb
+                # this call to pdb.set_trace() is intentional, not a checkin error
+                pdb.set_trace()
+                return _parseMethod(instring, loc, doActions, callPreParse)
+            breaker._originalParseMethod = _parseMethod
+            self._parse = breaker
+        else:
+            if hasattr(self._parse, "_originalParseMethod"):
+                self._parse = self._parse._originalParseMethod
+        return self
+
+    def setParseAction(self, *fns, **kwargs):
+        """
+        Define one or more actions to perform when successfully matching parse element definition.
+        Parse action fn is a callable method with 0-3 arguments, called as ``fn(s, loc, toks)`` ,
+        ``fn(loc, toks)`` , ``fn(toks)`` , or just ``fn()`` , where:
+
+        - s   = the original string being parsed (see note below)
+        - loc = the location of the matching substring
+        - toks = a list of the matched tokens, packaged as a :class:`ParseResults` object
+
+        If the functions in fns modify the tokens, they can return them as the return
+        value from fn, and the modified list of tokens will replace the original.
+        Otherwise, fn does not need to return any value.
+
+        If None is passed as the parse action, all previously added parse actions for this
+        expression are cleared.
+
+        Optional keyword arguments:
+        - callDuringTry = (default= ``False``) indicate if parse action should be run during lookaheads and alternate testing
+
+        Note: the default parsing behavior is to expand tabs in the input string
+        before starting the parsing process.  See :class:`parseString for more
+        information on parsing strings containing ```` s, and suggested
+        methods to maintain a consistent view of the parsed string, the parse
+        location, and line and column positions within the parsed string.
+
+        Example::
+
+            integer = Word(nums)
+            date_str = integer + '/' + integer + '/' + integer
+
+            date_str.parseString("1999/12/31")  # -> ['1999', '/', '12', '/', '31']
+
+            # use parse action to convert to ints at parse time
+            integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
+            date_str = integer + '/' + integer + '/' + integer
+
+            # note that integer fields are now ints, not strings
+            date_str.parseString("1999/12/31")  # -> [1999, '/', 12, '/', 31]
+        """
+        if list(fns) == [None,]:
+            self.parseAction = []
+        else:
+            if not all(callable(fn) for fn in fns):
+                raise TypeError("parse actions must be callable")
+            self.parseAction = list(map(_trim_arity, list(fns)))
+            self.callDuringTry = kwargs.get("callDuringTry", False)
+        return self
+
+    def addParseAction(self, *fns, **kwargs):
+        """
+        Add one or more parse actions to expression's list of parse actions. See :class:`setParseAction`.
+
+        See examples in :class:`copy`.
+        """
+        self.parseAction += list(map(_trim_arity, list(fns)))
+        self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False)
+        return self
+
+    def addCondition(self, *fns, **kwargs):
+        """Add a boolean predicate function to expression's list of parse actions. See
+        :class:`setParseAction` for function call signatures. Unlike ``setParseAction``,
+        functions passed to ``addCondition`` need to return boolean success/fail of the condition.
+
+        Optional keyword arguments:
+        - message = define a custom message to be used in the raised exception
+        - fatal   = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise ParseException
+
+        Example::
+
+            integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
+            year_int = integer.copy()
+            year_int.addCondition(lambda toks: toks[0] >= 2000, message="Only support years 2000 and later")
+            date_str = year_int + '/' + integer + '/' + integer
+
+            result = date_str.parseString("1999/12/31")  # -> Exception: Only support years 2000 and later (at char 0), (line:1, col:1)
+        """
+        for fn in fns:
+            self.parseAction.append(conditionAsParseAction(fn, message=kwargs.get('message'),
+                                                           fatal=kwargs.get('fatal', False)))
+
+        self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False)
+        return self
+
+    def setFailAction(self, fn):
+        """Define action to perform if parsing fails at this expression.
+           Fail acton fn is a callable function that takes the arguments
+           ``fn(s, loc, expr, err)`` where:
+           - s = string being parsed
+           - loc = location where expression match was attempted and failed
+           - expr = the parse expression that failed
+           - err = the exception thrown
+           The function returns no value.  It may throw :class:`ParseFatalException`
+           if it is desired to stop parsing immediately."""
+        self.failAction = fn
+        return self
+
+    def _skipIgnorables(self, instring, loc):
+        exprsFound = True
+        while exprsFound:
+            exprsFound = False
+            for e in self.ignoreExprs:
+                try:
+                    while 1:
+                        loc, dummy = e._parse(instring, loc)
+                        exprsFound = True
+                except ParseException:
+                    pass
+        return loc
+
+    def preParse(self, instring, loc):
+        if self.ignoreExprs:
+            loc = self._skipIgnorables(instring, loc)
+
+        if self.skipWhitespace:
+            wt = self.whiteChars
+            instrlen = len(instring)
+            while loc < instrlen and instring[loc] in wt:
+                loc += 1
+
+        return loc
+
+    def parseImpl(self, instring, loc, doActions=True):
+        return loc, []
+
+    def postParse(self, instring, loc, tokenlist):
+        return tokenlist
+
+    # ~ @profile
+    def _parseNoCache(self, instring, loc, doActions=True, callPreParse=True):
+        TRY, MATCH, FAIL = 0, 1, 2
+        debugging = (self.debug)  # and doActions)
+
+        if debugging or self.failAction:
+            # ~ print ("Match", self, "at loc", loc, "(%d, %d)" % (lineno(loc, instring), col(loc, instring)))
+            if self.debugActions[TRY]:
+                self.debugActions[TRY](instring, loc, self)
+            try:
+                if callPreParse and self.callPreparse:
+                    preloc = self.preParse(instring, loc)
+                else:
+                    preloc = loc
+                tokensStart = preloc
+                if self.mayIndexError or preloc >= len(instring):
+                    try:
+                        loc, tokens = self.parseImpl(instring, preloc, doActions)
+                    except IndexError:
+                        raise ParseException(instring, len(instring), self.errmsg, self)
+                else:
+                    loc, tokens = self.parseImpl(instring, preloc, doActions)
+            except Exception as err:
+                # ~ print ("Exception raised:", err)
+                if self.debugActions[FAIL]:
+                    self.debugActions[FAIL](instring, tokensStart, self, err)
+                if self.failAction:
+                    self.failAction(instring, tokensStart, self, err)
+                raise
+        else:
+            if callPreParse and self.callPreparse:
+                preloc = self.preParse(instring, loc)
+            else:
+                preloc = loc
+            tokensStart = preloc
+            if self.mayIndexError or preloc >= len(instring):
+                try:
+                    loc, tokens = self.parseImpl(instring, preloc, doActions)
+                except IndexError:
+                    raise ParseException(instring, len(instring), self.errmsg, self)
+            else:
+                loc, tokens = self.parseImpl(instring, preloc, doActions)
+
+        tokens = self.postParse(instring, loc, tokens)
+
+        retTokens = ParseResults(tokens, self.resultsName, asList=self.saveAsList, modal=self.modalResults)
+        if self.parseAction and (doActions or self.callDuringTry):
+            if debugging:
+                try:
+                    for fn in self.parseAction:
+                        try:
+                            tokens = fn(instring, tokensStart, retTokens)
+                        except IndexError as parse_action_exc:
+                            exc = ParseException("exception raised in parse action")
+                            exc.__cause__ = parse_action_exc
+                            raise exc
+
+                        if tokens is not None and tokens is not retTokens:
+                            retTokens = ParseResults(tokens,
+                                                      self.resultsName,
+                                                      asList=self.saveAsList and isinstance(tokens, (ParseResults, list)),
+                                                      modal=self.modalResults)
+                except Exception as err:
+                    # ~ print "Exception raised in user parse action:", err
+                    if self.debugActions[FAIL]:
+                        self.debugActions[FAIL](instring, tokensStart, self, err)
+                    raise
+            else:
+                for fn in self.parseAction:
+                    try:
+                        tokens = fn(instring, tokensStart, retTokens)
+                    except IndexError as parse_action_exc:
+                        exc = ParseException("exception raised in parse action")
+                        exc.__cause__ = parse_action_exc
+                        raise exc
+
+                    if tokens is not None and tokens is not retTokens:
+                        retTokens = ParseResults(tokens,
+                                                  self.resultsName,
+                                                  asList=self.saveAsList and isinstance(tokens, (ParseResults, list)),
+                                                  modal=self.modalResults)
+        if debugging:
+            # ~ print ("Matched", self, "->", retTokens.asList())
+            if self.debugActions[MATCH]:
+                self.debugActions[MATCH](instring, tokensStart, loc, self, retTokens)
+
+        return loc, retTokens
+
+    def tryParse(self, instring, loc):
+        try:
+            return self._parse(instring, loc, doActions=False)[0]
+        except ParseFatalException:
+            raise ParseException(instring, loc, self.errmsg, self)
+
+    def canParseNext(self, instring, loc):
+        try:
+            self.tryParse(instring, loc)
+        except (ParseException, IndexError):
+            return False
+        else:
+            return True
+
+    class _UnboundedCache(object):
+        def __init__(self):
+            cache = {}
+            self.not_in_cache = not_in_cache = object()
+
+            def get(self, key):
+                return cache.get(key, not_in_cache)
+
+            def set(self, key, value):
+                cache[key] = value
+
+            def clear(self):
+                cache.clear()
+
+            def cache_len(self):
+                return len(cache)
+
+            self.get = types.MethodType(get, self)
+            self.set = types.MethodType(set, self)
+            self.clear = types.MethodType(clear, self)
+            self.__len__ = types.MethodType(cache_len, self)
+
+    if _OrderedDict is not None:
+        class _FifoCache(object):
+            def __init__(self, size):
+                self.not_in_cache = not_in_cache = object()
+
+                cache = _OrderedDict()
+
+                def get(self, key):
+                    return cache.get(key, not_in_cache)
+
+                def set(self, key, value):
+                    cache[key] = value
+                    while len(cache) > size:
+                        try:
+                            cache.popitem(False)
+                        except KeyError:
+                            pass
+
+                def clear(self):
+                    cache.clear()
+
+                def cache_len(self):
+                    return len(cache)
+
+                self.get = types.MethodType(get, self)
+                self.set = types.MethodType(set, self)
+                self.clear = types.MethodType(clear, self)
+                self.__len__ = types.MethodType(cache_len, self)
+
+    else:
+        class _FifoCache(object):
+            def __init__(self, size):
+                self.not_in_cache = not_in_cache = object()
+
+                cache = {}
+                key_fifo = collections.deque([], size)
+
+                def get(self, key):
+                    return cache.get(key, not_in_cache)
+
+                def set(self, key, value):
+                    cache[key] = value
+                    while len(key_fifo) > size:
+                        cache.pop(key_fifo.popleft(), None)
+                    key_fifo.append(key)
+
+                def clear(self):
+                    cache.clear()
+                    key_fifo.clear()
+
+                def cache_len(self):
+                    return len(cache)
+
+                self.get = types.MethodType(get, self)
+                self.set = types.MethodType(set, self)
+                self.clear = types.MethodType(clear, self)
+                self.__len__ = types.MethodType(cache_len, self)
+
+    # argument cache for optimizing repeated calls when backtracking through recursive expressions
+    packrat_cache = {} # this is set later by enabledPackrat(); this is here so that resetCache() doesn't fail
+    packrat_cache_lock = RLock()
+    packrat_cache_stats = [0, 0]
+
+    # this method gets repeatedly called during backtracking with the same arguments -
+    # we can cache these arguments and save ourselves the trouble of re-parsing the contained expression
+    def _parseCache(self, instring, loc, doActions=True, callPreParse=True):
+        HIT, MISS = 0, 1
+        lookup = (self, instring, loc, callPreParse, doActions)
+        with ParserElement.packrat_cache_lock:
+            cache = ParserElement.packrat_cache
+            value = cache.get(lookup)
+            if value is cache.not_in_cache:
+                ParserElement.packrat_cache_stats[MISS] += 1
+                try:
+                    value = self._parseNoCache(instring, loc, doActions, callPreParse)
+                except ParseBaseException as pe:
+                    # cache a copy of the exception, without the traceback
+                    cache.set(lookup, pe.__class__(*pe.args))
+                    raise
+                else:
+                    cache.set(lookup, (value[0], value[1].copy()))
+                    return value
+            else:
+                ParserElement.packrat_cache_stats[HIT] += 1
+                if isinstance(value, Exception):
+                    raise value
+                return value[0], value[1].copy()
+
+    _parse = _parseNoCache
+
+    @staticmethod
+    def resetCache():
+        ParserElement.packrat_cache.clear()
+        ParserElement.packrat_cache_stats[:] = [0] * len(ParserElement.packrat_cache_stats)
+
+    _packratEnabled = False
+    @staticmethod
+    def enablePackrat(cache_size_limit=128):
+        """Enables "packrat" parsing, which adds memoizing to the parsing logic.
+           Repeated parse attempts at the same string location (which happens
+           often in many complex grammars) can immediately return a cached value,
+           instead of re-executing parsing/validating code.  Memoizing is done of
+           both valid results and parsing exceptions.
+
+           Parameters:
+
+           - cache_size_limit - (default= ``128``) - if an integer value is provided
+             will limit the size of the packrat cache; if None is passed, then
+             the cache size will be unbounded; if 0 is passed, the cache will
+             be effectively disabled.
+
+           This speedup may break existing programs that use parse actions that
+           have side-effects.  For this reason, packrat parsing is disabled when
+           you first import pyparsing.  To activate the packrat feature, your
+           program must call the class method :class:`ParserElement.enablePackrat`.
+           For best results, call ``enablePackrat()`` immediately after
+           importing pyparsing.
+
+           Example::
+
+               from pip._vendor import pyparsing
+               pyparsing.ParserElement.enablePackrat()
+        """
+        if not ParserElement._packratEnabled:
+            ParserElement._packratEnabled = True
+            if cache_size_limit is None:
+                ParserElement.packrat_cache = ParserElement._UnboundedCache()
+            else:
+                ParserElement.packrat_cache = ParserElement._FifoCache(cache_size_limit)
+            ParserElement._parse = ParserElement._parseCache
+
+    def parseString(self, instring, parseAll=False):
+        """
+        Execute the parse expression with the given string.
+        This is the main interface to the client code, once the complete
+        expression has been built.
+
+        Returns the parsed data as a :class:`ParseResults` object, which may be
+        accessed as a list, or as a dict or object with attributes if the given parser
+        includes results names.
+
+        If you want the grammar to require that the entire input string be
+        successfully parsed, then set ``parseAll`` to True (equivalent to ending
+        the grammar with ``StringEnd()``).
+
+        Note: ``parseString`` implicitly calls ``expandtabs()`` on the input string,
+        in order to report proper column numbers in parse actions.
+        If the input string contains tabs and
+        the grammar uses parse actions that use the ``loc`` argument to index into the
+        string being parsed, you can ensure you have a consistent view of the input
+        string by:
+
+        - calling ``parseWithTabs`` on your grammar before calling ``parseString``
+          (see :class:`parseWithTabs`)
+        - define your parse action using the full ``(s, loc, toks)`` signature, and
+          reference the input string using the parse action's ``s`` argument
+        - explictly expand the tabs in your input string before calling
+          ``parseString``
+
+        Example::
+
+            Word('a').parseString('aaaaabaaa')  # -> ['aaaaa']
+            Word('a').parseString('aaaaabaaa', parseAll=True)  # -> Exception: Expected end of text
+        """
+        ParserElement.resetCache()
+        if not self.streamlined:
+            self.streamline()
+            # ~ self.saveAsList = True
+        for e in self.ignoreExprs:
+            e.streamline()
+        if not self.keepTabs:
+            instring = instring.expandtabs()
+        try:
+            loc, tokens = self._parse(instring, 0)
+            if parseAll:
+                loc = self.preParse(instring, loc)
+                se = Empty() + StringEnd()
+                se._parse(instring, loc)
+        except ParseBaseException as exc:
+            if ParserElement.verbose_stacktrace:
+                raise
+            else:
+                # catch and re-raise exception from here, clearing out pyparsing internal stack trace
+                if getattr(exc, '__traceback__', None) is not None:
+                    exc.__traceback__ = self._trim_traceback(exc.__traceback__)
+                raise exc
+        else:
+            return tokens
+
+    def scanString(self, instring, maxMatches=_MAX_INT, overlap=False):
+        """
+        Scan the input string for expression matches.  Each match will return the
+        matching tokens, start location, and end location.  May be called with optional
+        ``maxMatches`` argument, to clip scanning after 'n' matches are found.  If
+        ``overlap`` is specified, then overlapping matches will be reported.
+
+        Note that the start and end locations are reported relative to the string
+        being parsed.  See :class:`parseString` for more information on parsing
+        strings with embedded tabs.
+
+        Example::
+
+            source = "sldjf123lsdjjkf345sldkjf879lkjsfd987"
+            print(source)
+            for tokens, start, end in Word(alphas).scanString(source):
+                print(' '*start + '^'*(end-start))
+                print(' '*start + tokens[0])
+
+        prints::
+
+            sldjf123lsdjjkf345sldkjf879lkjsfd987
+            ^^^^^
+            sldjf
+                    ^^^^^^^
+                    lsdjjkf
+                              ^^^^^^
+                              sldkjf
+                                       ^^^^^^
+                                       lkjsfd
+        """
+        if not self.streamlined:
+            self.streamline()
+        for e in self.ignoreExprs:
+            e.streamline()
+
+        if not self.keepTabs:
+            instring = _ustr(instring).expandtabs()
+        instrlen = len(instring)
+        loc = 0
+        preparseFn = self.preParse
+        parseFn = self._parse
+        ParserElement.resetCache()
+        matches = 0
+        try:
+            while loc <= instrlen and matches < maxMatches:
+                try:
+                    preloc = preparseFn(instring, loc)
+                    nextLoc, tokens = parseFn(instring, preloc, callPreParse=False)
+                except ParseException:
+                    loc = preloc + 1
+                else:
+                    if nextLoc > loc:
+                        matches += 1
+                        yield tokens, preloc, nextLoc
+                        if overlap:
+                            nextloc = preparseFn(instring, loc)
+                            if nextloc > loc:
+                                loc = nextLoc
+                            else:
+                                loc += 1
+                        else:
+                            loc = nextLoc
+                    else:
+                        loc = preloc + 1
+        except ParseBaseException as exc:
+            if ParserElement.verbose_stacktrace:
+                raise
+            else:
+                # catch and re-raise exception from here, clearing out pyparsing internal stack trace
+                if getattr(exc, '__traceback__', None) is not None:
+                    exc.__traceback__ = self._trim_traceback(exc.__traceback__)
+                raise exc
+
+    def transformString(self, instring):
+        """
+        Extension to :class:`scanString`, to modify matching text with modified tokens that may
+        be returned from a parse action.  To use ``transformString``, define a grammar and
+        attach a parse action to it that modifies the returned token list.
+        Invoking ``transformString()`` on a target string will then scan for matches,
+        and replace the matched text patterns according to the logic in the parse
+        action.  ``transformString()`` returns the resulting transformed string.
+
+        Example::
+
+            wd = Word(alphas)
+            wd.setParseAction(lambda toks: toks[0].title())
+
+            print(wd.transformString("now is the winter of our discontent made glorious summer by this sun of york."))
+
+        prints::
+
+            Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York.
+        """
+        out = []
+        lastE = 0
+        # force preservation of s, to minimize unwanted transformation of string, and to
+        # keep string locs straight between transformString and scanString
+        self.keepTabs = True
+        try:
+            for t, s, e in self.scanString(instring):
+                out.append(instring[lastE:s])
+                if t:
+                    if isinstance(t, ParseResults):
+                        out += t.asList()
+                    elif isinstance(t, list):
+                        out += t
+                    else:
+                        out.append(t)
+                lastE = e
+            out.append(instring[lastE:])
+            out = [o for o in out if o]
+            return "".join(map(_ustr, _flatten(out)))
+        except ParseBaseException as exc:
+            if ParserElement.verbose_stacktrace:
+                raise
+            else:
+                # catch and re-raise exception from here, clearing out pyparsing internal stack trace
+                if getattr(exc, '__traceback__', None) is not None:
+                    exc.__traceback__ = self._trim_traceback(exc.__traceback__)
+                raise exc
+
+    def searchString(self, instring, maxMatches=_MAX_INT):
+        """
+        Another extension to :class:`scanString`, simplifying the access to the tokens found
+        to match the given parse expression.  May be called with optional
+        ``maxMatches`` argument, to clip searching after 'n' matches are found.
+
+        Example::
+
+            # a capitalized word starts with an uppercase letter, followed by zero or more lowercase letters
+            cap_word = Word(alphas.upper(), alphas.lower())
+
+            print(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity"))
+
+            # the sum() builtin can be used to merge results into a single ParseResults object
+            print(sum(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity")))
+
+        prints::
+
+            [['More'], ['Iron'], ['Lead'], ['Gold'], ['I'], ['Electricity']]
+            ['More', 'Iron', 'Lead', 'Gold', 'I', 'Electricity']
+        """
+        try:
+            return ParseResults([t for t, s, e in self.scanString(instring, maxMatches)])
+        except ParseBaseException as exc:
+            if ParserElement.verbose_stacktrace:
+                raise
+            else:
+                # catch and re-raise exception from here, clearing out pyparsing internal stack trace
+                if getattr(exc, '__traceback__', None) is not None:
+                    exc.__traceback__ = self._trim_traceback(exc.__traceback__)
+                raise exc
+
+    def split(self, instring, maxsplit=_MAX_INT, includeSeparators=False):
+        """
+        Generator method to split a string using the given expression as a separator.
+        May be called with optional ``maxsplit`` argument, to limit the number of splits;
+        and the optional ``includeSeparators`` argument (default= ``False``), if the separating
+        matching text should be included in the split results.
+
+        Example::
+
+            punc = oneOf(list(".,;:/-!?"))
+            print(list(punc.split("This, this?, this sentence, is badly punctuated!")))
+
+        prints::
+
+            ['This', ' this', '', ' this sentence', ' is badly punctuated', '']
+        """
+        splits = 0
+        last = 0
+        for t, s, e in self.scanString(instring, maxMatches=maxsplit):
+            yield instring[last:s]
+            if includeSeparators:
+                yield t[0]
+            last = e
+        yield instring[last:]
+
+    def __add__(self, other):
+        """
+        Implementation of + operator - returns :class:`And`. Adding strings to a ParserElement
+        converts them to :class:`Literal`s by default.
+
+        Example::
+
+            greet = Word(alphas) + "," + Word(alphas) + "!"
+            hello = "Hello, World!"
+            print (hello, "->", greet.parseString(hello))
+
+        prints::
+
+            Hello, World! -> ['Hello', ',', 'World', '!']
+
+        ``...`` may be used as a parse expression as a short form of :class:`SkipTo`.
+
+            Literal('start') + ... + Literal('end')
+
+        is equivalent to:
+
+            Literal('start') + SkipTo('end')("_skipped*") + Literal('end')
+
+        Note that the skipped text is returned with '_skipped' as a results name,
+        and to support having multiple skips in the same parser, the value returned is
+        a list of all skipped text.
+        """
+        if other is Ellipsis:
+            return _PendingSkip(self)
+
+        if isinstance(other, basestring):
+            other = self._literalStringClass(other)
+        if not isinstance(other, ParserElement):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                          SyntaxWarning, stacklevel=2)
+            return None
+        return And([self, other])
+
+    def __radd__(self, other):
+        """
+        Implementation of + operator when left operand is not a :class:`ParserElement`
+        """
+        if other is Ellipsis:
+            return SkipTo(self)("_skipped*") + self
+
+        if isinstance(other, basestring):
+            other = self._literalStringClass(other)
+        if not isinstance(other, ParserElement):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                          SyntaxWarning, stacklevel=2)
+            return None
+        return other + self
+
+    def __sub__(self, other):
+        """
+        Implementation of - operator, returns :class:`And` with error stop
+        """
+        if isinstance(other, basestring):
+            other = self._literalStringClass(other)
+        if not isinstance(other, ParserElement):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                          SyntaxWarning, stacklevel=2)
+            return None
+        return self + And._ErrorStop() + other
+
+    def __rsub__(self, other):
+        """
+        Implementation of - operator when left operand is not a :class:`ParserElement`
+        """
+        if isinstance(other, basestring):
+            other = self._literalStringClass(other)
+        if not isinstance(other, ParserElement):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                          SyntaxWarning, stacklevel=2)
+            return None
+        return other - self
+
+    def __mul__(self, other):
+        """
+        Implementation of * operator, allows use of ``expr * 3`` in place of
+        ``expr + expr + expr``.  Expressions may also me multiplied by a 2-integer
+        tuple, similar to ``{min, max}`` multipliers in regular expressions.  Tuples
+        may also include ``None`` as in:
+         - ``expr*(n, None)`` or ``expr*(n, )`` is equivalent
+              to ``expr*n + ZeroOrMore(expr)``
+              (read as "at least n instances of ``expr``")
+         - ``expr*(None, n)`` is equivalent to ``expr*(0, n)``
+              (read as "0 to n instances of ``expr``")
+         - ``expr*(None, None)`` is equivalent to ``ZeroOrMore(expr)``
+         - ``expr*(1, None)`` is equivalent to ``OneOrMore(expr)``
+
+        Note that ``expr*(None, n)`` does not raise an exception if
+        more than n exprs exist in the input stream; that is,
+        ``expr*(None, n)`` does not enforce a maximum number of expr
+        occurrences.  If this behavior is desired, then write
+        ``expr*(None, n) + ~expr``
+        """
+        if other is Ellipsis:
+            other = (0, None)
+        elif isinstance(other, tuple) and other[:1] == (Ellipsis,):
+            other = ((0, ) + other[1:] + (None,))[:2]
+
+        if isinstance(other, int):
+            minElements, optElements = other, 0
+        elif isinstance(other, tuple):
+            other = tuple(o if o is not Ellipsis else None for o in other)
+            other = (other + (None, None))[:2]
+            if other[0] is None:
+                other = (0, other[1])
+            if isinstance(other[0], int) and other[1] is None:
+                if other[0] == 0:
+                    return ZeroOrMore(self)
+                if other[0] == 1:
+                    return OneOrMore(self)
+                else:
+                    return self * other[0] + ZeroOrMore(self)
+            elif isinstance(other[0], int) and isinstance(other[1], int):
+                minElements, optElements = other
+                optElements -= minElements
+            else:
+                raise TypeError("cannot multiply 'ParserElement' and ('%s', '%s') objects", type(other[0]), type(other[1]))
+        else:
+            raise TypeError("cannot multiply 'ParserElement' and '%s' objects", type(other))
+
+        if minElements < 0:
+            raise ValueError("cannot multiply ParserElement by negative value")
+        if optElements < 0:
+            raise ValueError("second tuple value must be greater or equal to first tuple value")
+        if minElements == optElements == 0:
+            raise ValueError("cannot multiply ParserElement by 0 or (0, 0)")
+
+        if optElements:
+            def makeOptionalList(n):
+                if n > 1:
+                    return Optional(self + makeOptionalList(n - 1))
+                else:
+                    return Optional(self)
+            if minElements:
+                if minElements == 1:
+                    ret = self + makeOptionalList(optElements)
+                else:
+                    ret = And([self] * minElements) + makeOptionalList(optElements)
+            else:
+                ret = makeOptionalList(optElements)
+        else:
+            if minElements == 1:
+                ret = self
+            else:
+                ret = And([self] * minElements)
+        return ret
+
+    def __rmul__(self, other):
+        return self.__mul__(other)
+
+    def __or__(self, other):
+        """
+        Implementation of | operator - returns :class:`MatchFirst`
+        """
+        if other is Ellipsis:
+            return _PendingSkip(self, must_skip=True)
+
+        if isinstance(other, basestring):
+            other = self._literalStringClass(other)
+        if not isinstance(other, ParserElement):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                          SyntaxWarning, stacklevel=2)
+            return None
+        return MatchFirst([self, other])
+
+    def __ror__(self, other):
+        """
+        Implementation of | operator when left operand is not a :class:`ParserElement`
+        """
+        if isinstance(other, basestring):
+            other = self._literalStringClass(other)
+        if not isinstance(other, ParserElement):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                          SyntaxWarning, stacklevel=2)
+            return None
+        return other | self
+
+    def __xor__(self, other):
+        """
+        Implementation of ^ operator - returns :class:`Or`
+        """
+        if isinstance(other, basestring):
+            other = self._literalStringClass(other)
+        if not isinstance(other, ParserElement):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                          SyntaxWarning, stacklevel=2)
+            return None
+        return Or([self, other])
+
+    def __rxor__(self, other):
+        """
+        Implementation of ^ operator when left operand is not a :class:`ParserElement`
+        """
+        if isinstance(other, basestring):
+            other = self._literalStringClass(other)
+        if not isinstance(other, ParserElement):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                          SyntaxWarning, stacklevel=2)
+            return None
+        return other ^ self
+
+    def __and__(self, other):
+        """
+        Implementation of & operator - returns :class:`Each`
+        """
+        if isinstance(other, basestring):
+            other = self._literalStringClass(other)
+        if not isinstance(other, ParserElement):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                          SyntaxWarning, stacklevel=2)
+            return None
+        return Each([self, other])
+
+    def __rand__(self, other):
+        """
+        Implementation of & operator when left operand is not a :class:`ParserElement`
+        """
+        if isinstance(other, basestring):
+            other = self._literalStringClass(other)
+        if not isinstance(other, ParserElement):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                          SyntaxWarning, stacklevel=2)
+            return None
+        return other & self
+
+    def __invert__(self):
+        """
+        Implementation of ~ operator - returns :class:`NotAny`
+        """
+        return NotAny(self)
+
+    def __iter__(self):
+        # must implement __iter__ to override legacy use of sequential access to __getitem__ to
+        # iterate over a sequence
+        raise TypeError('%r object is not iterable' % self.__class__.__name__)
+
+    def __getitem__(self, key):
+        """
+        use ``[]`` indexing notation as a short form for expression repetition:
+         - ``expr[n]`` is equivalent to ``expr*n``
+         - ``expr[m, n]`` is equivalent to ``expr*(m, n)``
+         - ``expr[n, ...]`` or ``expr[n,]`` is equivalent
+              to ``expr*n + ZeroOrMore(expr)``
+              (read as "at least n instances of ``expr``")
+         - ``expr[..., n]`` is equivalent to ``expr*(0, n)``
+              (read as "0 to n instances of ``expr``")
+         - ``expr[...]`` and ``expr[0, ...]`` are equivalent to ``ZeroOrMore(expr)``
+         - ``expr[1, ...]`` is equivalent to ``OneOrMore(expr)``
+         ``None`` may be used in place of ``...``.
+
+        Note that ``expr[..., n]`` and ``expr[m, n]``do not raise an exception
+        if more than ``n`` ``expr``s exist in the input stream.  If this behavior is
+        desired, then write ``expr[..., n] + ~expr``.
+       """
+
+        # convert single arg keys to tuples
+        try:
+            if isinstance(key, str):
+                key = (key,)
+            iter(key)
+        except TypeError:
+            key = (key, key)
+
+        if len(key) > 2:
+            warnings.warn("only 1 or 2 index arguments supported ({0}{1})".format(key[:5],
+                                                                                '... [{0}]'.format(len(key))
+                                                                                if len(key) > 5 else ''))
+
+        # clip to 2 elements
+        ret = self * tuple(key[:2])
+        return ret
+
+    def __call__(self, name=None):
+        """
+        Shortcut for :class:`setResultsName`, with ``listAllMatches=False``.
+
+        If ``name`` is given with a trailing ``'*'`` character, then ``listAllMatches`` will be
+        passed as ``True``.
+
+        If ``name` is omitted, same as calling :class:`copy`.
+
+        Example::
+
+            # these are equivalent
+            userdata = Word(alphas).setResultsName("name") + Word(nums + "-").setResultsName("socsecno")
+            userdata = Word(alphas)("name") + Word(nums + "-")("socsecno")
+        """
+        if name is not None:
+            return self._setResultsName(name)
+        else:
+            return self.copy()
+
+    def suppress(self):
+        """
+        Suppresses the output of this :class:`ParserElement`; useful to keep punctuation from
+        cluttering up returned output.
+        """
+        return Suppress(self)
+
+    def leaveWhitespace(self):
+        """
+        Disables the skipping of whitespace before matching the characters in the
+        :class:`ParserElement`'s defined pattern.  This is normally only used internally by
+        the pyparsing module, but may be needed in some whitespace-sensitive grammars.
+        """
+        self.skipWhitespace = False
+        return self
+
+    def setWhitespaceChars(self, chars):
+        """
+        Overrides the default whitespace chars
+        """
+        self.skipWhitespace = True
+        self.whiteChars = chars
+        self.copyDefaultWhiteChars = False
+        return self
+
+    def parseWithTabs(self):
+        """
+        Overrides default behavior to expand ````s to spaces before parsing the input string.
+        Must be called before ``parseString`` when the input grammar contains elements that
+        match ```` characters.
+        """
+        self.keepTabs = True
+        return self
+
+    def ignore(self, other):
+        """
+        Define expression to be ignored (e.g., comments) while doing pattern
+        matching; may be called repeatedly, to define multiple comment or other
+        ignorable patterns.
+
+        Example::
+
+            patt = OneOrMore(Word(alphas))
+            patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj']
+
+            patt.ignore(cStyleComment)
+            patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj', 'lskjd']
+        """
+        if isinstance(other, basestring):
+            other = Suppress(other)
+
+        if isinstance(other, Suppress):
+            if other not in self.ignoreExprs:
+                self.ignoreExprs.append(other)
+        else:
+            self.ignoreExprs.append(Suppress(other.copy()))
+        return self
+
+    def setDebugActions(self, startAction, successAction, exceptionAction):
+        """
+        Enable display of debugging messages while doing pattern matching.
+        """
+        self.debugActions = (startAction or _defaultStartDebugAction,
+                             successAction or _defaultSuccessDebugAction,
+                             exceptionAction or _defaultExceptionDebugAction)
+        self.debug = True
+        return self
+
+    def setDebug(self, flag=True):
+        """
+        Enable display of debugging messages while doing pattern matching.
+        Set ``flag`` to True to enable, False to disable.
+
+        Example::
+
+            wd = Word(alphas).setName("alphaword")
+            integer = Word(nums).setName("numword")
+            term = wd | integer
+
+            # turn on debugging for wd
+            wd.setDebug()
+
+            OneOrMore(term).parseString("abc 123 xyz 890")
+
+        prints::
+
+            Match alphaword at loc 0(1,1)
+            Matched alphaword -> ['abc']
+            Match alphaword at loc 3(1,4)
+            Exception raised:Expected alphaword (at char 4), (line:1, col:5)
+            Match alphaword at loc 7(1,8)
+            Matched alphaword -> ['xyz']
+            Match alphaword at loc 11(1,12)
+            Exception raised:Expected alphaword (at char 12), (line:1, col:13)
+            Match alphaword at loc 15(1,16)
+            Exception raised:Expected alphaword (at char 15), (line:1, col:16)
+
+        The output shown is that produced by the default debug actions - custom debug actions can be
+        specified using :class:`setDebugActions`. Prior to attempting
+        to match the ``wd`` expression, the debugging message ``"Match  at loc (,)"``
+        is shown. Then if the parse succeeds, a ``"Matched"`` message is shown, or an ``"Exception raised"``
+        message is shown. Also note the use of :class:`setName` to assign a human-readable name to the expression,
+        which makes debugging and exception messages easier to understand - for instance, the default
+        name created for the :class:`Word` expression without calling ``setName`` is ``"W:(ABCD...)"``.
+        """
+        if flag:
+            self.setDebugActions(_defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction)
+        else:
+            self.debug = False
+        return self
+
+    def __str__(self):
+        return self.name
+
+    def __repr__(self):
+        return _ustr(self)
+
+    def streamline(self):
+        self.streamlined = True
+        self.strRepr = None
+        return self
+
+    def checkRecursion(self, parseElementList):
+        pass
+
+    def validate(self, validateTrace=None):
+        """
+        Check defined expressions for valid structure, check for infinite recursive definitions.
+        """
+        self.checkRecursion([])
+
+    def parseFile(self, file_or_filename, parseAll=False):
+        """
+        Execute the parse expression on the given file or filename.
+        If a filename is specified (instead of a file object),
+        the entire file is opened, read, and closed before parsing.
+        """
+        try:
+            file_contents = file_or_filename.read()
+        except AttributeError:
+            with open(file_or_filename, "r") as f:
+                file_contents = f.read()
+        try:
+            return self.parseString(file_contents, parseAll)
+        except ParseBaseException as exc:
+            if ParserElement.verbose_stacktrace:
+                raise
+            else:
+                # catch and re-raise exception from here, clearing out pyparsing internal stack trace
+                if getattr(exc, '__traceback__', None) is not None:
+                    exc.__traceback__ = self._trim_traceback(exc.__traceback__)
+                raise exc
+
+    def __eq__(self, other):
+        if self is other:
+            return True
+        elif isinstance(other, basestring):
+            return self.matches(other)
+        elif isinstance(other, ParserElement):
+            return vars(self) == vars(other)
+        return False
+
+    def __ne__(self, other):
+        return not (self == other)
+
+    def __hash__(self):
+        return id(self)
+
+    def __req__(self, other):
+        return self == other
+
+    def __rne__(self, other):
+        return not (self == other)
+
+    def matches(self, testString, parseAll=True):
+        """
+        Method for quick testing of a parser against a test string. Good for simple
+        inline microtests of sub expressions while building up larger parser.
+
+        Parameters:
+         - testString - to test against this expression for a match
+         - parseAll - (default= ``True``) - flag to pass to :class:`parseString` when running tests
+
+        Example::
+
+            expr = Word(nums)
+            assert expr.matches("100")
+        """
+        try:
+            self.parseString(_ustr(testString), parseAll=parseAll)
+            return True
+        except ParseBaseException:
+            return False
+
+    def runTests(self, tests, parseAll=True, comment='#',
+                 fullDump=True, printResults=True, failureTests=False, postParse=None,
+                 file=None):
+        """
+        Execute the parse expression on a series of test strings, showing each
+        test, the parsed results or where the parse failed. Quick and easy way to
+        run a parse expression against a list of sample strings.
+
+        Parameters:
+         - tests - a list of separate test strings, or a multiline string of test strings
+         - parseAll - (default= ``True``) - flag to pass to :class:`parseString` when running tests
+         - comment - (default= ``'#'``) - expression for indicating embedded comments in the test
+              string; pass None to disable comment filtering
+         - fullDump - (default= ``True``) - dump results as list followed by results names in nested outline;
+              if False, only dump nested list
+         - printResults - (default= ``True``) prints test output to stdout
+         - failureTests - (default= ``False``) indicates if these tests are expected to fail parsing
+         - postParse - (default= ``None``) optional callback for successful parse results; called as
+              `fn(test_string, parse_results)` and returns a string to be added to the test output
+         - file - (default=``None``) optional file-like object to which test output will be written;
+              if None, will default to ``sys.stdout``
+
+        Returns: a (success, results) tuple, where success indicates that all tests succeeded
+        (or failed if ``failureTests`` is True), and the results contain a list of lines of each
+        test's output
+
+        Example::
+
+            number_expr = pyparsing_common.number.copy()
+
+            result = number_expr.runTests('''
+                # unsigned integer
+                100
+                # negative integer
+                -100
+                # float with scientific notation
+                6.02e23
+                # integer with scientific notation
+                1e-12
+                ''')
+            print("Success" if result[0] else "Failed!")
+
+            result = number_expr.runTests('''
+                # stray character
+                100Z
+                # missing leading digit before '.'
+                -.100
+                # too many '.'
+                3.14.159
+                ''', failureTests=True)
+            print("Success" if result[0] else "Failed!")
+
+        prints::
+
+            # unsigned integer
+            100
+            [100]
+
+            # negative integer
+            -100
+            [-100]
+
+            # float with scientific notation
+            6.02e23
+            [6.02e+23]
+
+            # integer with scientific notation
+            1e-12
+            [1e-12]
+
+            Success
+
+            # stray character
+            100Z
+               ^
+            FAIL: Expected end of text (at char 3), (line:1, col:4)
+
+            # missing leading digit before '.'
+            -.100
+            ^
+            FAIL: Expected {real number with scientific notation | real number | signed integer} (at char 0), (line:1, col:1)
+
+            # too many '.'
+            3.14.159
+                ^
+            FAIL: Expected end of text (at char 4), (line:1, col:5)
+
+            Success
+
+        Each test string must be on a single line. If you want to test a string that spans multiple
+        lines, create a test like this::
+
+            expr.runTest(r"this is a test\\n of strings that spans \\n 3 lines")
+
+        (Note that this is a raw string literal, you must include the leading 'r'.)
+        """
+        if isinstance(tests, basestring):
+            tests = list(map(str.strip, tests.rstrip().splitlines()))
+        if isinstance(comment, basestring):
+            comment = Literal(comment)
+        if file is None:
+            file = sys.stdout
+        print_ = file.write
+
+        allResults = []
+        comments = []
+        success = True
+        NL = Literal(r'\n').addParseAction(replaceWith('\n')).ignore(quotedString)
+        BOM = u'\ufeff'
+        for t in tests:
+            if comment is not None and comment.matches(t, False) or comments and not t:
+                comments.append(t)
+                continue
+            if not t:
+                continue
+            out = ['\n' + '\n'.join(comments) if comments else '', t]
+            comments = []
+            try:
+                # convert newline marks to actual newlines, and strip leading BOM if present
+                t = NL.transformString(t.lstrip(BOM))
+                result = self.parseString(t, parseAll=parseAll)
+            except ParseBaseException as pe:
+                fatal = "(FATAL)" if isinstance(pe, ParseFatalException) else ""
+                if '\n' in t:
+                    out.append(line(pe.loc, t))
+                    out.append(' ' * (col(pe.loc, t) - 1) + '^' + fatal)
+                else:
+                    out.append(' ' * pe.loc + '^' + fatal)
+                out.append("FAIL: " + str(pe))
+                success = success and failureTests
+                result = pe
+            except Exception as exc:
+                out.append("FAIL-EXCEPTION: " + str(exc))
+                success = success and failureTests
+                result = exc
+            else:
+                success = success and not failureTests
+                if postParse is not None:
+                    try:
+                        pp_value = postParse(t, result)
+                        if pp_value is not None:
+                            if isinstance(pp_value, ParseResults):
+                                out.append(pp_value.dump())
+                            else:
+                                out.append(str(pp_value))
+                        else:
+                            out.append(result.dump())
+                    except Exception as e:
+                        out.append(result.dump(full=fullDump))
+                        out.append("{0} failed: {1}: {2}".format(postParse.__name__, type(e).__name__, e))
+                else:
+                    out.append(result.dump(full=fullDump))
+
+            if printResults:
+                if fullDump:
+                    out.append('')
+                print_('\n'.join(out))
+
+            allResults.append((t, result))
+
+        return success, allResults
+
+
+class _PendingSkip(ParserElement):
+    # internal placeholder class to hold a place were '...' is added to a parser element,
+    # once another ParserElement is added, this placeholder will be replaced with a SkipTo
+    def __init__(self, expr, must_skip=False):
+        super(_PendingSkip, self).__init__()
+        self.strRepr = str(expr + Empty()).replace('Empty', '...')
+        self.name = self.strRepr
+        self.anchor = expr
+        self.must_skip = must_skip
+
+    def __add__(self, other):
+        skipper = SkipTo(other).setName("...")("_skipped*")
+        if self.must_skip:
+            def must_skip(t):
+                if not t._skipped or t._skipped.asList() == ['']:
+                    del t[0]
+                    t.pop("_skipped", None)
+            def show_skip(t):
+                if t._skipped.asList()[-1:] == ['']:
+                    skipped = t.pop('_skipped')
+                    t['_skipped'] = 'missing <' + repr(self.anchor) + '>'
+            return (self.anchor + skipper().addParseAction(must_skip)
+                    | skipper().addParseAction(show_skip)) + other
+
+        return self.anchor + skipper + other
+
+    def __repr__(self):
+        return self.strRepr
+
+    def parseImpl(self, *args):
+        raise Exception("use of `...` expression without following SkipTo target expression")
+
+
+class Token(ParserElement):
+    """Abstract :class:`ParserElement` subclass, for defining atomic
+    matching patterns.
+    """
+    def __init__(self):
+        super(Token, self).__init__(savelist=False)
+
+
+class Empty(Token):
+    """An empty token, will always match.
+    """
+    def __init__(self):
+        super(Empty, self).__init__()
+        self.name = "Empty"
+        self.mayReturnEmpty = True
+        self.mayIndexError = False
+
+
+class NoMatch(Token):
+    """A token that will never match.
+    """
+    def __init__(self):
+        super(NoMatch, self).__init__()
+        self.name = "NoMatch"
+        self.mayReturnEmpty = True
+        self.mayIndexError = False
+        self.errmsg = "Unmatchable token"
+
+    def parseImpl(self, instring, loc, doActions=True):
+        raise ParseException(instring, loc, self.errmsg, self)
+
+
+class Literal(Token):
+    """Token to exactly match a specified string.
+
+    Example::
+
+        Literal('blah').parseString('blah')  # -> ['blah']
+        Literal('blah').parseString('blahfooblah')  # -> ['blah']
+        Literal('blah').parseString('bla')  # -> Exception: Expected "blah"
+
+    For case-insensitive matching, use :class:`CaselessLiteral`.
+
+    For keyword matching (force word break before and after the matched string),
+    use :class:`Keyword` or :class:`CaselessKeyword`.
+    """
+    def __init__(self, matchString):
+        super(Literal, self).__init__()
+        self.match = matchString
+        self.matchLen = len(matchString)
+        try:
+            self.firstMatchChar = matchString[0]
+        except IndexError:
+            warnings.warn("null string passed to Literal; use Empty() instead",
+                            SyntaxWarning, stacklevel=2)
+            self.__class__ = Empty
+        self.name = '"%s"' % _ustr(self.match)
+        self.errmsg = "Expected " + self.name
+        self.mayReturnEmpty = False
+        self.mayIndexError = False
+
+        # Performance tuning: modify __class__ to select
+        # a parseImpl optimized for single-character check
+        if self.matchLen == 1 and type(self) is Literal:
+            self.__class__ = _SingleCharLiteral
+
+    def parseImpl(self, instring, loc, doActions=True):
+        if instring[loc] == self.firstMatchChar and instring.startswith(self.match, loc):
+            return loc + self.matchLen, self.match
+        raise ParseException(instring, loc, self.errmsg, self)
+
+class _SingleCharLiteral(Literal):
+    def parseImpl(self, instring, loc, doActions=True):
+        if instring[loc] == self.firstMatchChar:
+            return loc + 1, self.match
+        raise ParseException(instring, loc, self.errmsg, self)
+
+_L = Literal
+ParserElement._literalStringClass = Literal
+
+class Keyword(Token):
+    """Token to exactly match a specified string as a keyword, that is,
+    it must be immediately followed by a non-keyword character.  Compare
+    with :class:`Literal`:
+
+     - ``Literal("if")`` will match the leading ``'if'`` in
+       ``'ifAndOnlyIf'``.
+     - ``Keyword("if")`` will not; it will only match the leading
+       ``'if'`` in ``'if x=1'``, or ``'if(y==2)'``
+
+    Accepts two optional constructor arguments in addition to the
+    keyword string:
+
+     - ``identChars`` is a string of characters that would be valid
+       identifier characters, defaulting to all alphanumerics + "_" and
+       "$"
+     - ``caseless`` allows case-insensitive matching, default is ``False``.
+
+    Example::
+
+        Keyword("start").parseString("start")  # -> ['start']
+        Keyword("start").parseString("starting")  # -> Exception
+
+    For case-insensitive matching, use :class:`CaselessKeyword`.
+    """
+    DEFAULT_KEYWORD_CHARS = alphanums + "_$"
+
+    def __init__(self, matchString, identChars=None, caseless=False):
+        super(Keyword, self).__init__()
+        if identChars is None:
+            identChars = Keyword.DEFAULT_KEYWORD_CHARS
+        self.match = matchString
+        self.matchLen = len(matchString)
+        try:
+            self.firstMatchChar = matchString[0]
+        except IndexError:
+            warnings.warn("null string passed to Keyword; use Empty() instead",
+                          SyntaxWarning, stacklevel=2)
+        self.name = '"%s"' % self.match
+        self.errmsg = "Expected " + self.name
+        self.mayReturnEmpty = False
+        self.mayIndexError = False
+        self.caseless = caseless
+        if caseless:
+            self.caselessmatch = matchString.upper()
+            identChars = identChars.upper()
+        self.identChars = set(identChars)
+
+    def parseImpl(self, instring, loc, doActions=True):
+        if self.caseless:
+            if ((instring[loc:loc + self.matchLen].upper() == self.caselessmatch)
+                    and (loc >= len(instring) - self.matchLen
+                         or instring[loc + self.matchLen].upper() not in self.identChars)
+                    and (loc == 0
+                         or instring[loc - 1].upper() not in self.identChars)):
+                return loc + self.matchLen, self.match
+
+        else:
+            if instring[loc] == self.firstMatchChar:
+                if ((self.matchLen == 1 or instring.startswith(self.match, loc))
+                        and (loc >= len(instring) - self.matchLen
+                             or instring[loc + self.matchLen] not in self.identChars)
+                        and (loc == 0 or instring[loc - 1] not in self.identChars)):
+                    return loc + self.matchLen, self.match
+
+        raise ParseException(instring, loc, self.errmsg, self)
+
+    def copy(self):
+        c = super(Keyword, self).copy()
+        c.identChars = Keyword.DEFAULT_KEYWORD_CHARS
+        return c
+
+    @staticmethod
+    def setDefaultKeywordChars(chars):
+        """Overrides the default Keyword chars
+        """
+        Keyword.DEFAULT_KEYWORD_CHARS = chars
+
+class CaselessLiteral(Literal):
+    """Token to match a specified string, ignoring case of letters.
+    Note: the matched results will always be in the case of the given
+    match string, NOT the case of the input text.
+
+    Example::
+
+        OneOrMore(CaselessLiteral("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD', 'CMD']
+
+    (Contrast with example for :class:`CaselessKeyword`.)
+    """
+    def __init__(self, matchString):
+        super(CaselessLiteral, self).__init__(matchString.upper())
+        # Preserve the defining literal.
+        self.returnString = matchString
+        self.name = "'%s'" % self.returnString
+        self.errmsg = "Expected " + self.name
+
+    def parseImpl(self, instring, loc, doActions=True):
+        if instring[loc:loc + self.matchLen].upper() == self.match:
+            return loc + self.matchLen, self.returnString
+        raise ParseException(instring, loc, self.errmsg, self)
+
+class CaselessKeyword(Keyword):
+    """
+    Caseless version of :class:`Keyword`.
+
+    Example::
+
+        OneOrMore(CaselessKeyword("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD']
+
+    (Contrast with example for :class:`CaselessLiteral`.)
+    """
+    def __init__(self, matchString, identChars=None):
+        super(CaselessKeyword, self).__init__(matchString, identChars, caseless=True)
+
+class CloseMatch(Token):
+    """A variation on :class:`Literal` which matches "close" matches,
+    that is, strings with at most 'n' mismatching characters.
+    :class:`CloseMatch` takes parameters:
+
+     - ``match_string`` - string to be matched
+     - ``maxMismatches`` - (``default=1``) maximum number of
+       mismatches allowed to count as a match
+
+    The results from a successful parse will contain the matched text
+    from the input string and the following named results:
+
+     - ``mismatches`` - a list of the positions within the
+       match_string where mismatches were found
+     - ``original`` - the original match_string used to compare
+       against the input string
+
+    If ``mismatches`` is an empty list, then the match was an exact
+    match.
+
+    Example::
+
+        patt = CloseMatch("ATCATCGAATGGA")
+        patt.parseString("ATCATCGAAXGGA") # -> (['ATCATCGAAXGGA'], {'mismatches': [[9]], 'original': ['ATCATCGAATGGA']})
+        patt.parseString("ATCAXCGAAXGGA") # -> Exception: Expected 'ATCATCGAATGGA' (with up to 1 mismatches) (at char 0), (line:1, col:1)
+
+        # exact match
+        patt.parseString("ATCATCGAATGGA") # -> (['ATCATCGAATGGA'], {'mismatches': [[]], 'original': ['ATCATCGAATGGA']})
+
+        # close match allowing up to 2 mismatches
+        patt = CloseMatch("ATCATCGAATGGA", maxMismatches=2)
+        patt.parseString("ATCAXCGAAXGGA") # -> (['ATCAXCGAAXGGA'], {'mismatches': [[4, 9]], 'original': ['ATCATCGAATGGA']})
+    """
+    def __init__(self, match_string, maxMismatches=1):
+        super(CloseMatch, self).__init__()
+        self.name = match_string
+        self.match_string = match_string
+        self.maxMismatches = maxMismatches
+        self.errmsg = "Expected %r (with up to %d mismatches)" % (self.match_string, self.maxMismatches)
+        self.mayIndexError = False
+        self.mayReturnEmpty = False
+
+    def parseImpl(self, instring, loc, doActions=True):
+        start = loc
+        instrlen = len(instring)
+        maxloc = start + len(self.match_string)
+
+        if maxloc <= instrlen:
+            match_string = self.match_string
+            match_stringloc = 0
+            mismatches = []
+            maxMismatches = self.maxMismatches
+
+            for match_stringloc, s_m in enumerate(zip(instring[loc:maxloc], match_string)):
+                src, mat = s_m
+                if src != mat:
+                    mismatches.append(match_stringloc)
+                    if len(mismatches) > maxMismatches:
+                        break
+            else:
+                loc = match_stringloc + 1
+                results = ParseResults([instring[start:loc]])
+                results['original'] = match_string
+                results['mismatches'] = mismatches
+                return loc, results
+
+        raise ParseException(instring, loc, self.errmsg, self)
+
+
+class Word(Token):
+    """Token for matching words composed of allowed character sets.
+    Defined with string containing all allowed initial characters, an
+    optional string containing allowed body characters (if omitted,
+    defaults to the initial character set), and an optional minimum,
+    maximum, and/or exact length.  The default value for ``min`` is
+    1 (a minimum value < 1 is not valid); the default values for
+    ``max`` and ``exact`` are 0, meaning no maximum or exact
+    length restriction. An optional ``excludeChars`` parameter can
+    list characters that might be found in the input ``bodyChars``
+    string; useful to define a word of all printables except for one or
+    two characters, for instance.
+
+    :class:`srange` is useful for defining custom character set strings
+    for defining ``Word`` expressions, using range notation from
+    regular expression character sets.
+
+    A common mistake is to use :class:`Word` to match a specific literal
+    string, as in ``Word("Address")``. Remember that :class:`Word`
+    uses the string argument to define *sets* of matchable characters.
+    This expression would match "Add", "AAA", "dAred", or any other word
+    made up of the characters 'A', 'd', 'r', 'e', and 's'. To match an
+    exact literal string, use :class:`Literal` or :class:`Keyword`.
+
+    pyparsing includes helper strings for building Words:
+
+     - :class:`alphas`
+     - :class:`nums`
+     - :class:`alphanums`
+     - :class:`hexnums`
+     - :class:`alphas8bit` (alphabetic characters in ASCII range 128-255
+       - accented, tilded, umlauted, etc.)
+     - :class:`punc8bit` (non-alphabetic characters in ASCII range
+       128-255 - currency, symbols, superscripts, diacriticals, etc.)
+     - :class:`printables` (any non-whitespace character)
+
+    Example::
+
+        # a word composed of digits
+        integer = Word(nums) # equivalent to Word("0123456789") or Word(srange("0-9"))
+
+        # a word with a leading capital, and zero or more lowercase
+        capital_word = Word(alphas.upper(), alphas.lower())
+
+        # hostnames are alphanumeric, with leading alpha, and '-'
+        hostname = Word(alphas, alphanums + '-')
+
+        # roman numeral (not a strict parser, accepts invalid mix of characters)
+        roman = Word("IVXLCDM")
+
+        # any string of non-whitespace characters, except for ','
+        csv_value = Word(printables, excludeChars=",")
+    """
+    def __init__(self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False, excludeChars=None):
+        super(Word, self).__init__()
+        if excludeChars:
+            excludeChars = set(excludeChars)
+            initChars = ''.join(c for c in initChars if c not in excludeChars)
+            if bodyChars:
+                bodyChars = ''.join(c for c in bodyChars if c not in excludeChars)
+        self.initCharsOrig = initChars
+        self.initChars = set(initChars)
+        if bodyChars:
+            self.bodyCharsOrig = bodyChars
+            self.bodyChars = set(bodyChars)
+        else:
+            self.bodyCharsOrig = initChars
+            self.bodyChars = set(initChars)
+
+        self.maxSpecified = max > 0
+
+        if min < 1:
+            raise ValueError("cannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permitted")
+
+        self.minLen = min
+
+        if max > 0:
+            self.maxLen = max
+        else:
+            self.maxLen = _MAX_INT
+
+        if exact > 0:
+            self.maxLen = exact
+            self.minLen = exact
+
+        self.name = _ustr(self)
+        self.errmsg = "Expected " + self.name
+        self.mayIndexError = False
+        self.asKeyword = asKeyword
+
+        if ' ' not in self.initCharsOrig + self.bodyCharsOrig and (min == 1 and max == 0 and exact == 0):
+            if self.bodyCharsOrig == self.initCharsOrig:
+                self.reString = "[%s]+" % _escapeRegexRangeChars(self.initCharsOrig)
+            elif len(self.initCharsOrig) == 1:
+                self.reString = "%s[%s]*" % (re.escape(self.initCharsOrig),
+                                             _escapeRegexRangeChars(self.bodyCharsOrig),)
+            else:
+                self.reString = "[%s][%s]*" % (_escapeRegexRangeChars(self.initCharsOrig),
+                                               _escapeRegexRangeChars(self.bodyCharsOrig),)
+            if self.asKeyword:
+                self.reString = r"\b" + self.reString + r"\b"
+
+            try:
+                self.re = re.compile(self.reString)
+            except Exception:
+                self.re = None
+            else:
+                self.re_match = self.re.match
+                self.__class__ = _WordRegex
+
+    def parseImpl(self, instring, loc, doActions=True):
+        if instring[loc] not in self.initChars:
+            raise ParseException(instring, loc, self.errmsg, self)
+
+        start = loc
+        loc += 1
+        instrlen = len(instring)
+        bodychars = self.bodyChars
+        maxloc = start + self.maxLen
+        maxloc = min(maxloc, instrlen)
+        while loc < maxloc and instring[loc] in bodychars:
+            loc += 1
+
+        throwException = False
+        if loc - start < self.minLen:
+            throwException = True
+        elif self.maxSpecified and loc < instrlen and instring[loc] in bodychars:
+            throwException = True
+        elif self.asKeyword:
+            if (start > 0 and instring[start - 1] in bodychars
+                    or loc < instrlen and instring[loc] in bodychars):
+                throwException = True
+
+        if throwException:
+            raise ParseException(instring, loc, self.errmsg, self)
+
+        return loc, instring[start:loc]
+
+    def __str__(self):
+        try:
+            return super(Word, self).__str__()
+        except Exception:
+            pass
+
+        if self.strRepr is None:
+
+            def charsAsStr(s):
+                if len(s) > 4:
+                    return s[:4] + "..."
+                else:
+                    return s
+
+            if self.initCharsOrig != self.bodyCharsOrig:
+                self.strRepr = "W:(%s, %s)" % (charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig))
+            else:
+                self.strRepr = "W:(%s)" % charsAsStr(self.initCharsOrig)
+
+        return self.strRepr
+
+class _WordRegex(Word):
+    def parseImpl(self, instring, loc, doActions=True):
+        result = self.re_match(instring, loc)
+        if not result:
+            raise ParseException(instring, loc, self.errmsg, self)
+
+        loc = result.end()
+        return loc, result.group()
+
+
+class Char(_WordRegex):
+    """A short-cut class for defining ``Word(characters, exact=1)``,
+    when defining a match of any single character in a string of
+    characters.
+    """
+    def __init__(self, charset, asKeyword=False, excludeChars=None):
+        super(Char, self).__init__(charset, exact=1, asKeyword=asKeyword, excludeChars=excludeChars)
+        self.reString = "[%s]" % _escapeRegexRangeChars(''.join(self.initChars))
+        if asKeyword:
+            self.reString = r"\b%s\b" % self.reString
+        self.re = re.compile(self.reString)
+        self.re_match = self.re.match
+
+
+class Regex(Token):
+    r"""Token for matching strings that match a given regular
+    expression. Defined with string specifying the regular expression in
+    a form recognized by the stdlib Python  `re module `_.
+    If the given regex contains named groups (defined using ``(?P...)``),
+    these will be preserved as named parse results.
+
+    If instead of the Python stdlib re module you wish to use a different RE module
+    (such as the `regex` module), you can replace it by either building your
+    Regex object with a compiled RE that was compiled using regex:
+
+    Example::
+
+        realnum = Regex(r"[+-]?\d+\.\d*")
+        date = Regex(r'(?P\d{4})-(?P\d\d?)-(?P\d\d?)')
+        # ref: https://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expression
+        roman = Regex(r"M{0,4}(CM|CD|D?{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})")
+
+        # use regex module instead of stdlib re module to construct a Regex using
+        # a compiled regular expression
+        import regex
+        parser = pp.Regex(regex.compile(r'[0-9]'))
+
+    """
+    def __init__(self, pattern, flags=0, asGroupList=False, asMatch=False):
+        """The parameters ``pattern`` and ``flags`` are passed
+        to the ``re.compile()`` function as-is. See the Python
+        `re module `_ module for an
+        explanation of the acceptable patterns and flags.
+        """
+        super(Regex, self).__init__()
+
+        if isinstance(pattern, basestring):
+            if not pattern:
+                warnings.warn("null string passed to Regex; use Empty() instead",
+                              SyntaxWarning, stacklevel=2)
+
+            self.pattern = pattern
+            self.flags = flags
+
+            try:
+                self.re = re.compile(self.pattern, self.flags)
+                self.reString = self.pattern
+            except sre_constants.error:
+                warnings.warn("invalid pattern (%s) passed to Regex" % pattern,
+                              SyntaxWarning, stacklevel=2)
+                raise
+
+        elif hasattr(pattern, 'pattern') and hasattr(pattern, 'match'):
+            self.re = pattern
+            self.pattern = self.reString = pattern.pattern
+            self.flags = flags
+
+        else:
+            raise TypeError("Regex may only be constructed with a string or a compiled RE object")
+
+        self.re_match = self.re.match
+
+        self.name = _ustr(self)
+        self.errmsg = "Expected " + self.name
+        self.mayIndexError = False
+        self.mayReturnEmpty = self.re_match("") is not None
+        self.asGroupList = asGroupList
+        self.asMatch = asMatch
+        if self.asGroupList:
+            self.parseImpl = self.parseImplAsGroupList
+        if self.asMatch:
+            self.parseImpl = self.parseImplAsMatch
+
+    def parseImpl(self, instring, loc, doActions=True):
+        result = self.re_match(instring, loc)
+        if not result:
+            raise ParseException(instring, loc, self.errmsg, self)
+
+        loc = result.end()
+        ret = ParseResults(result.group())
+        d = result.groupdict()
+        if d:
+            for k, v in d.items():
+                ret[k] = v
+        return loc, ret
+
+    def parseImplAsGroupList(self, instring, loc, doActions=True):
+        result = self.re_match(instring, loc)
+        if not result:
+            raise ParseException(instring, loc, self.errmsg, self)
+
+        loc = result.end()
+        ret = result.groups()
+        return loc, ret
+
+    def parseImplAsMatch(self, instring, loc, doActions=True):
+        result = self.re_match(instring, loc)
+        if not result:
+            raise ParseException(instring, loc, self.errmsg, self)
+
+        loc = result.end()
+        ret = result
+        return loc, ret
+
+    def __str__(self):
+        try:
+            return super(Regex, self).__str__()
+        except Exception:
+            pass
+
+        if self.strRepr is None:
+            self.strRepr = "Re:(%s)" % repr(self.pattern)
+
+        return self.strRepr
+
+    def sub(self, repl):
+        r"""
+        Return Regex with an attached parse action to transform the parsed
+        result as if called using `re.sub(expr, repl, string) `_.
+
+        Example::
+
+            make_html = Regex(r"(\w+):(.*?):").sub(r"<\1>\2")
+            print(make_html.transformString("h1:main title:"))
+            # prints "

main title

" + """ + if self.asGroupList: + warnings.warn("cannot use sub() with Regex(asGroupList=True)", + SyntaxWarning, stacklevel=2) + raise SyntaxError() + + if self.asMatch and callable(repl): + warnings.warn("cannot use sub() with a callable with Regex(asMatch=True)", + SyntaxWarning, stacklevel=2) + raise SyntaxError() + + if self.asMatch: + def pa(tokens): + return tokens[0].expand(repl) + else: + def pa(tokens): + return self.re.sub(repl, tokens[0]) + return self.addParseAction(pa) + +class QuotedString(Token): + r""" + Token for matching strings that are delimited by quoting characters. + + Defined with the following parameters: + + - quoteChar - string of one or more characters defining the + quote delimiting string + - escChar - character to escape quotes, typically backslash + (default= ``None``) + - escQuote - special quote sequence to escape an embedded quote + string (such as SQL's ``""`` to escape an embedded ``"``) + (default= ``None``) + - multiline - boolean indicating whether quotes can span + multiple lines (default= ``False``) + - unquoteResults - boolean indicating whether the matched text + should be unquoted (default= ``True``) + - endQuoteChar - string of one or more characters defining the + end of the quote delimited string (default= ``None`` => same as + quoteChar) + - convertWhitespaceEscapes - convert escaped whitespace + (``'\t'``, ``'\n'``, etc.) to actual whitespace + (default= ``True``) + + Example:: + + qs = QuotedString('"') + print(qs.searchString('lsjdf "This is the quote" sldjf')) + complex_qs = QuotedString('{{', endQuoteChar='}}') + print(complex_qs.searchString('lsjdf {{This is the "quote"}} sldjf')) + sql_qs = QuotedString('"', escQuote='""') + print(sql_qs.searchString('lsjdf "This is the quote with ""embedded"" quotes" sldjf')) + + prints:: + + [['This is the quote']] + [['This is the "quote"']] + [['This is the quote with "embedded" quotes']] + """ + def __init__(self, quoteChar, escChar=None, escQuote=None, multiline=False, + unquoteResults=True, endQuoteChar=None, convertWhitespaceEscapes=True): + super(QuotedString, self).__init__() + + # remove white space from quote chars - wont work anyway + quoteChar = quoteChar.strip() + if not quoteChar: + warnings.warn("quoteChar cannot be the empty string", SyntaxWarning, stacklevel=2) + raise SyntaxError() + + if endQuoteChar is None: + endQuoteChar = quoteChar + else: + endQuoteChar = endQuoteChar.strip() + if not endQuoteChar: + warnings.warn("endQuoteChar cannot be the empty string", SyntaxWarning, stacklevel=2) + raise SyntaxError() + + self.quoteChar = quoteChar + self.quoteCharLen = len(quoteChar) + self.firstQuoteChar = quoteChar[0] + self.endQuoteChar = endQuoteChar + self.endQuoteCharLen = len(endQuoteChar) + self.escChar = escChar + self.escQuote = escQuote + self.unquoteResults = unquoteResults + self.convertWhitespaceEscapes = convertWhitespaceEscapes + + if multiline: + self.flags = re.MULTILINE | re.DOTALL + self.pattern = r'%s(?:[^%s%s]' % (re.escape(self.quoteChar), + _escapeRegexRangeChars(self.endQuoteChar[0]), + (escChar is not None and _escapeRegexRangeChars(escChar) or '')) + else: + self.flags = 0 + self.pattern = r'%s(?:[^%s\n\r%s]' % (re.escape(self.quoteChar), + _escapeRegexRangeChars(self.endQuoteChar[0]), + (escChar is not None and _escapeRegexRangeChars(escChar) or '')) + if len(self.endQuoteChar) > 1: + self.pattern += ( + '|(?:' + ')|(?:'.join("%s[^%s]" % (re.escape(self.endQuoteChar[:i]), + _escapeRegexRangeChars(self.endQuoteChar[i])) + for i in range(len(self.endQuoteChar) - 1, 0, -1)) + ')') + + if escQuote: + self.pattern += (r'|(?:%s)' % re.escape(escQuote)) + if escChar: + self.pattern += (r'|(?:%s.)' % re.escape(escChar)) + self.escCharReplacePattern = re.escape(self.escChar) + "(.)" + self.pattern += (r')*%s' % re.escape(self.endQuoteChar)) + + try: + self.re = re.compile(self.pattern, self.flags) + self.reString = self.pattern + self.re_match = self.re.match + except sre_constants.error: + warnings.warn("invalid pattern (%s) passed to Regex" % self.pattern, + SyntaxWarning, stacklevel=2) + raise + + self.name = _ustr(self) + self.errmsg = "Expected " + self.name + self.mayIndexError = False + self.mayReturnEmpty = True + + def parseImpl(self, instring, loc, doActions=True): + result = instring[loc] == self.firstQuoteChar and self.re_match(instring, loc) or None + if not result: + raise ParseException(instring, loc, self.errmsg, self) + + loc = result.end() + ret = result.group() + + if self.unquoteResults: + + # strip off quotes + ret = ret[self.quoteCharLen: -self.endQuoteCharLen] + + if isinstance(ret, basestring): + # replace escaped whitespace + if '\\' in ret and self.convertWhitespaceEscapes: + ws_map = { + r'\t': '\t', + r'\n': '\n', + r'\f': '\f', + r'\r': '\r', + } + for wslit, wschar in ws_map.items(): + ret = ret.replace(wslit, wschar) + + # replace escaped characters + if self.escChar: + ret = re.sub(self.escCharReplacePattern, r"\g<1>", ret) + + # replace escaped quotes + if self.escQuote: + ret = ret.replace(self.escQuote, self.endQuoteChar) + + return loc, ret + + def __str__(self): + try: + return super(QuotedString, self).__str__() + except Exception: + pass + + if self.strRepr is None: + self.strRepr = "quoted string, starting with %s ending with %s" % (self.quoteChar, self.endQuoteChar) + + return self.strRepr + + +class CharsNotIn(Token): + """Token for matching words composed of characters *not* in a given + set (will include whitespace in matched characters if not listed in + the provided exclusion set - see example). Defined with string + containing all disallowed characters, and an optional minimum, + maximum, and/or exact length. The default value for ``min`` is + 1 (a minimum value < 1 is not valid); the default values for + ``max`` and ``exact`` are 0, meaning no maximum or exact + length restriction. + + Example:: + + # define a comma-separated-value as anything that is not a ',' + csv_value = CharsNotIn(',') + print(delimitedList(csv_value).parseString("dkls,lsdkjf,s12 34,@!#,213")) + + prints:: + + ['dkls', 'lsdkjf', 's12 34', '@!#', '213'] + """ + def __init__(self, notChars, min=1, max=0, exact=0): + super(CharsNotIn, self).__init__() + self.skipWhitespace = False + self.notChars = notChars + + if min < 1: + raise ValueError("cannot specify a minimum length < 1; use " + "Optional(CharsNotIn()) if zero-length char group is permitted") + + self.minLen = min + + if max > 0: + self.maxLen = max + else: + self.maxLen = _MAX_INT + + if exact > 0: + self.maxLen = exact + self.minLen = exact + + self.name = _ustr(self) + self.errmsg = "Expected " + self.name + self.mayReturnEmpty = (self.minLen == 0) + self.mayIndexError = False + + def parseImpl(self, instring, loc, doActions=True): + if instring[loc] in self.notChars: + raise ParseException(instring, loc, self.errmsg, self) + + start = loc + loc += 1 + notchars = self.notChars + maxlen = min(start + self.maxLen, len(instring)) + while loc < maxlen and instring[loc] not in notchars: + loc += 1 + + if loc - start < self.minLen: + raise ParseException(instring, loc, self.errmsg, self) + + return loc, instring[start:loc] + + def __str__(self): + try: + return super(CharsNotIn, self).__str__() + except Exception: + pass + + if self.strRepr is None: + if len(self.notChars) > 4: + self.strRepr = "!W:(%s...)" % self.notChars[:4] + else: + self.strRepr = "!W:(%s)" % self.notChars + + return self.strRepr + +class White(Token): + """Special matching class for matching whitespace. Normally, + whitespace is ignored by pyparsing grammars. This class is included + when some whitespace structures are significant. Define with + a string containing the whitespace characters to be matched; default + is ``" \\t\\r\\n"``. Also takes optional ``min``, + ``max``, and ``exact`` arguments, as defined for the + :class:`Word` class. + """ + whiteStrs = { + ' ' : '', + '\t': '', + '\n': '', + '\r': '', + '\f': '', + u'\u00A0': '', + u'\u1680': '', + u'\u180E': '', + u'\u2000': '', + u'\u2001': '', + u'\u2002': '', + u'\u2003': '', + u'\u2004': '', + u'\u2005': '', + u'\u2006': '', + u'\u2007': '', + u'\u2008': '', + u'\u2009': '', + u'\u200A': '', + u'\u200B': '', + u'\u202F': '', + u'\u205F': '', + u'\u3000': '', + } + def __init__(self, ws=" \t\r\n", min=1, max=0, exact=0): + super(White, self).__init__() + self.matchWhite = ws + self.setWhitespaceChars("".join(c for c in self.whiteChars if c not in self.matchWhite)) + # ~ self.leaveWhitespace() + self.name = ("".join(White.whiteStrs[c] for c in self.matchWhite)) + self.mayReturnEmpty = True + self.errmsg = "Expected " + self.name + + self.minLen = min + + if max > 0: + self.maxLen = max + else: + self.maxLen = _MAX_INT + + if exact > 0: + self.maxLen = exact + self.minLen = exact + + def parseImpl(self, instring, loc, doActions=True): + if instring[loc] not in self.matchWhite: + raise ParseException(instring, loc, self.errmsg, self) + start = loc + loc += 1 + maxloc = start + self.maxLen + maxloc = min(maxloc, len(instring)) + while loc < maxloc and instring[loc] in self.matchWhite: + loc += 1 + + if loc - start < self.minLen: + raise ParseException(instring, loc, self.errmsg, self) + + return loc, instring[start:loc] + + +class _PositionToken(Token): + def __init__(self): + super(_PositionToken, self).__init__() + self.name = self.__class__.__name__ + self.mayReturnEmpty = True + self.mayIndexError = False + +class GoToColumn(_PositionToken): + """Token to advance to a specific column of input text; useful for + tabular report scraping. + """ + def __init__(self, colno): + super(GoToColumn, self).__init__() + self.col = colno + + def preParse(self, instring, loc): + if col(loc, instring) != self.col: + instrlen = len(instring) + if self.ignoreExprs: + loc = self._skipIgnorables(instring, loc) + while loc < instrlen and instring[loc].isspace() and col(loc, instring) != self.col: + loc += 1 + return loc + + def parseImpl(self, instring, loc, doActions=True): + thiscol = col(loc, instring) + if thiscol > self.col: + raise ParseException(instring, loc, "Text not in expected column", self) + newloc = loc + self.col - thiscol + ret = instring[loc: newloc] + return newloc, ret + + +class LineStart(_PositionToken): + r"""Matches if current position is at the beginning of a line within + the parse string + + Example:: + + test = '''\ + AAA this line + AAA and this line + AAA but not this one + B AAA and definitely not this one + ''' + + for t in (LineStart() + 'AAA' + restOfLine).searchString(test): + print(t) + + prints:: + + ['AAA', ' this line'] + ['AAA', ' and this line'] + + """ + def __init__(self): + super(LineStart, self).__init__() + self.errmsg = "Expected start of line" + + def parseImpl(self, instring, loc, doActions=True): + if col(loc, instring) == 1: + return loc, [] + raise ParseException(instring, loc, self.errmsg, self) + +class LineEnd(_PositionToken): + """Matches if current position is at the end of a line within the + parse string + """ + def __init__(self): + super(LineEnd, self).__init__() + self.setWhitespaceChars(ParserElement.DEFAULT_WHITE_CHARS.replace("\n", "")) + self.errmsg = "Expected end of line" + + def parseImpl(self, instring, loc, doActions=True): + if loc < len(instring): + if instring[loc] == "\n": + return loc + 1, "\n" + else: + raise ParseException(instring, loc, self.errmsg, self) + elif loc == len(instring): + return loc + 1, [] + else: + raise ParseException(instring, loc, self.errmsg, self) + +class StringStart(_PositionToken): + """Matches if current position is at the beginning of the parse + string + """ + def __init__(self): + super(StringStart, self).__init__() + self.errmsg = "Expected start of text" + + def parseImpl(self, instring, loc, doActions=True): + if loc != 0: + # see if entire string up to here is just whitespace and ignoreables + if loc != self.preParse(instring, 0): + raise ParseException(instring, loc, self.errmsg, self) + return loc, [] + +class StringEnd(_PositionToken): + """Matches if current position is at the end of the parse string + """ + def __init__(self): + super(StringEnd, self).__init__() + self.errmsg = "Expected end of text" + + def parseImpl(self, instring, loc, doActions=True): + if loc < len(instring): + raise ParseException(instring, loc, self.errmsg, self) + elif loc == len(instring): + return loc + 1, [] + elif loc > len(instring): + return loc, [] + else: + raise ParseException(instring, loc, self.errmsg, self) + +class WordStart(_PositionToken): + """Matches if the current position is at the beginning of a Word, + and is not preceded by any character in a given set of + ``wordChars`` (default= ``printables``). To emulate the + ``\b`` behavior of regular expressions, use + ``WordStart(alphanums)``. ``WordStart`` will also match at + the beginning of the string being parsed, or at the beginning of + a line. + """ + def __init__(self, wordChars=printables): + super(WordStart, self).__init__() + self.wordChars = set(wordChars) + self.errmsg = "Not at the start of a word" + + def parseImpl(self, instring, loc, doActions=True): + if loc != 0: + if (instring[loc - 1] in self.wordChars + or instring[loc] not in self.wordChars): + raise ParseException(instring, loc, self.errmsg, self) + return loc, [] + +class WordEnd(_PositionToken): + """Matches if the current position is at the end of a Word, and is + not followed by any character in a given set of ``wordChars`` + (default= ``printables``). To emulate the ``\b`` behavior of + regular expressions, use ``WordEnd(alphanums)``. ``WordEnd`` + will also match at the end of the string being parsed, or at the end + of a line. + """ + def __init__(self, wordChars=printables): + super(WordEnd, self).__init__() + self.wordChars = set(wordChars) + self.skipWhitespace = False + self.errmsg = "Not at the end of a word" + + def parseImpl(self, instring, loc, doActions=True): + instrlen = len(instring) + if instrlen > 0 and loc < instrlen: + if (instring[loc] in self.wordChars or + instring[loc - 1] not in self.wordChars): + raise ParseException(instring, loc, self.errmsg, self) + return loc, [] + + +class ParseExpression(ParserElement): + """Abstract subclass of ParserElement, for combining and + post-processing parsed tokens. + """ + def __init__(self, exprs, savelist=False): + super(ParseExpression, self).__init__(savelist) + if isinstance(exprs, _generatorType): + exprs = list(exprs) + + if isinstance(exprs, basestring): + self.exprs = [self._literalStringClass(exprs)] + elif isinstance(exprs, ParserElement): + self.exprs = [exprs] + elif isinstance(exprs, Iterable): + exprs = list(exprs) + # if sequence of strings provided, wrap with Literal + if any(isinstance(expr, basestring) for expr in exprs): + exprs = (self._literalStringClass(e) if isinstance(e, basestring) else e for e in exprs) + self.exprs = list(exprs) + else: + try: + self.exprs = list(exprs) + except TypeError: + self.exprs = [exprs] + self.callPreparse = False + + def append(self, other): + self.exprs.append(other) + self.strRepr = None + return self + + def leaveWhitespace(self): + """Extends ``leaveWhitespace`` defined in base class, and also invokes ``leaveWhitespace`` on + all contained expressions.""" + self.skipWhitespace = False + self.exprs = [e.copy() for e in self.exprs] + for e in self.exprs: + e.leaveWhitespace() + return self + + def ignore(self, other): + if isinstance(other, Suppress): + if other not in self.ignoreExprs: + super(ParseExpression, self).ignore(other) + for e in self.exprs: + e.ignore(self.ignoreExprs[-1]) + else: + super(ParseExpression, self).ignore(other) + for e in self.exprs: + e.ignore(self.ignoreExprs[-1]) + return self + + def __str__(self): + try: + return super(ParseExpression, self).__str__() + except Exception: + pass + + if self.strRepr is None: + self.strRepr = "%s:(%s)" % (self.__class__.__name__, _ustr(self.exprs)) + return self.strRepr + + def streamline(self): + super(ParseExpression, self).streamline() + + for e in self.exprs: + e.streamline() + + # collapse nested And's of the form And(And(And(a, b), c), d) to And(a, b, c, d) + # but only if there are no parse actions or resultsNames on the nested And's + # (likewise for Or's and MatchFirst's) + if len(self.exprs) == 2: + other = self.exprs[0] + if (isinstance(other, self.__class__) + and not other.parseAction + and other.resultsName is None + and not other.debug): + self.exprs = other.exprs[:] + [self.exprs[1]] + self.strRepr = None + self.mayReturnEmpty |= other.mayReturnEmpty + self.mayIndexError |= other.mayIndexError + + other = self.exprs[-1] + if (isinstance(other, self.__class__) + and not other.parseAction + and other.resultsName is None + and not other.debug): + self.exprs = self.exprs[:-1] + other.exprs[:] + self.strRepr = None + self.mayReturnEmpty |= other.mayReturnEmpty + self.mayIndexError |= other.mayIndexError + + self.errmsg = "Expected " + _ustr(self) + + return self + + def validate(self, validateTrace=None): + tmp = (validateTrace if validateTrace is not None else [])[:] + [self] + for e in self.exprs: + e.validate(tmp) + self.checkRecursion([]) + + def copy(self): + ret = super(ParseExpression, self).copy() + ret.exprs = [e.copy() for e in self.exprs] + return ret + + def _setResultsName(self, name, listAllMatches=False): + if __diag__.warn_ungrouped_named_tokens_in_collection: + for e in self.exprs: + if isinstance(e, ParserElement) and e.resultsName: + warnings.warn("{0}: setting results name {1!r} on {2} expression " + "collides with {3!r} on contained expression".format("warn_ungrouped_named_tokens_in_collection", + name, + type(self).__name__, + e.resultsName), + stacklevel=3) + + return super(ParseExpression, self)._setResultsName(name, listAllMatches) + + +class And(ParseExpression): + """ + Requires all given :class:`ParseExpression` s to be found in the given order. + Expressions may be separated by whitespace. + May be constructed using the ``'+'`` operator. + May also be constructed using the ``'-'`` operator, which will + suppress backtracking. + + Example:: + + integer = Word(nums) + name_expr = OneOrMore(Word(alphas)) + + expr = And([integer("id"), name_expr("name"), integer("age")]) + # more easily written as: + expr = integer("id") + name_expr("name") + integer("age") + """ + + class _ErrorStop(Empty): + def __init__(self, *args, **kwargs): + super(And._ErrorStop, self).__init__(*args, **kwargs) + self.name = '-' + self.leaveWhitespace() + + def __init__(self, exprs, savelist=True): + exprs = list(exprs) + if exprs and Ellipsis in exprs: + tmp = [] + for i, expr in enumerate(exprs): + if expr is Ellipsis: + if i < len(exprs) - 1: + skipto_arg = (Empty() + exprs[i + 1]).exprs[-1] + tmp.append(SkipTo(skipto_arg)("_skipped*")) + else: + raise Exception("cannot construct And with sequence ending in ...") + else: + tmp.append(expr) + exprs[:] = tmp + super(And, self).__init__(exprs, savelist) + self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) + self.setWhitespaceChars(self.exprs[0].whiteChars) + self.skipWhitespace = self.exprs[0].skipWhitespace + self.callPreparse = True + + def streamline(self): + # collapse any _PendingSkip's + if self.exprs: + if any(isinstance(e, ParseExpression) and e.exprs and isinstance(e.exprs[-1], _PendingSkip) + for e in self.exprs[:-1]): + for i, e in enumerate(self.exprs[:-1]): + if e is None: + continue + if (isinstance(e, ParseExpression) + and e.exprs and isinstance(e.exprs[-1], _PendingSkip)): + e.exprs[-1] = e.exprs[-1] + self.exprs[i + 1] + self.exprs[i + 1] = None + self.exprs = [e for e in self.exprs if e is not None] + + super(And, self).streamline() + self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) + return self + + def parseImpl(self, instring, loc, doActions=True): + # pass False as last arg to _parse for first element, since we already + # pre-parsed the string as part of our And pre-parsing + loc, resultlist = self.exprs[0]._parse(instring, loc, doActions, callPreParse=False) + errorStop = False + for e in self.exprs[1:]: + if isinstance(e, And._ErrorStop): + errorStop = True + continue + if errorStop: + try: + loc, exprtokens = e._parse(instring, loc, doActions) + except ParseSyntaxException: + raise + except ParseBaseException as pe: + pe.__traceback__ = None + raise ParseSyntaxException._from_exception(pe) + except IndexError: + raise ParseSyntaxException(instring, len(instring), self.errmsg, self) + else: + loc, exprtokens = e._parse(instring, loc, doActions) + if exprtokens or exprtokens.haskeys(): + resultlist += exprtokens + return loc, resultlist + + def __iadd__(self, other): + if isinstance(other, basestring): + other = self._literalStringClass(other) + return self.append(other) # And([self, other]) + + def checkRecursion(self, parseElementList): + subRecCheckList = parseElementList[:] + [self] + for e in self.exprs: + e.checkRecursion(subRecCheckList) + if not e.mayReturnEmpty: + break + + def __str__(self): + if hasattr(self, "name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + " ".join(_ustr(e) for e in self.exprs) + "}" + + return self.strRepr + + +class Or(ParseExpression): + """Requires that at least one :class:`ParseExpression` is found. If + two expressions match, the expression that matches the longest + string will be used. May be constructed using the ``'^'`` + operator. + + Example:: + + # construct Or using '^' operator + + number = Word(nums) ^ Combine(Word(nums) + '.' + Word(nums)) + print(number.searchString("123 3.1416 789")) + + prints:: + + [['123'], ['3.1416'], ['789']] + """ + def __init__(self, exprs, savelist=False): + super(Or, self).__init__(exprs, savelist) + if self.exprs: + self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs) + else: + self.mayReturnEmpty = True + + def streamline(self): + super(Or, self).streamline() + if __compat__.collect_all_And_tokens: + self.saveAsList = any(e.saveAsList for e in self.exprs) + return self + + def parseImpl(self, instring, loc, doActions=True): + maxExcLoc = -1 + maxException = None + matches = [] + for e in self.exprs: + try: + loc2 = e.tryParse(instring, loc) + except ParseException as err: + err.__traceback__ = None + if err.loc > maxExcLoc: + maxException = err + maxExcLoc = err.loc + except IndexError: + if len(instring) > maxExcLoc: + maxException = ParseException(instring, len(instring), e.errmsg, self) + maxExcLoc = len(instring) + else: + # save match among all matches, to retry longest to shortest + matches.append((loc2, e)) + + if matches: + # re-evaluate all matches in descending order of length of match, in case attached actions + # might change whether or how much they match of the input. + matches.sort(key=itemgetter(0), reverse=True) + + if not doActions: + # no further conditions or parse actions to change the selection of + # alternative, so the first match will be the best match + best_expr = matches[0][1] + return best_expr._parse(instring, loc, doActions) + + longest = -1, None + for loc1, expr1 in matches: + if loc1 <= longest[0]: + # already have a longer match than this one will deliver, we are done + return longest + + try: + loc2, toks = expr1._parse(instring, loc, doActions) + except ParseException as err: + err.__traceback__ = None + if err.loc > maxExcLoc: + maxException = err + maxExcLoc = err.loc + else: + if loc2 >= loc1: + return loc2, toks + # didn't match as much as before + elif loc2 > longest[0]: + longest = loc2, toks + + if longest != (-1, None): + return longest + + if maxException is not None: + maxException.msg = self.errmsg + raise maxException + else: + raise ParseException(instring, loc, "no defined alternatives to match", self) + + + def __ixor__(self, other): + if isinstance(other, basestring): + other = self._literalStringClass(other) + return self.append(other) # Or([self, other]) + + def __str__(self): + if hasattr(self, "name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + " ^ ".join(_ustr(e) for e in self.exprs) + "}" + + return self.strRepr + + def checkRecursion(self, parseElementList): + subRecCheckList = parseElementList[:] + [self] + for e in self.exprs: + e.checkRecursion(subRecCheckList) + + def _setResultsName(self, name, listAllMatches=False): + if (not __compat__.collect_all_And_tokens + and __diag__.warn_multiple_tokens_in_named_alternation): + if any(isinstance(e, And) for e in self.exprs): + warnings.warn("{0}: setting results name {1!r} on {2} expression " + "may only return a single token for an And alternative, " + "in future will return the full list of tokens".format( + "warn_multiple_tokens_in_named_alternation", name, type(self).__name__), + stacklevel=3) + + return super(Or, self)._setResultsName(name, listAllMatches) + + +class MatchFirst(ParseExpression): + """Requires that at least one :class:`ParseExpression` is found. If + two expressions match, the first one listed is the one that will + match. May be constructed using the ``'|'`` operator. + + Example:: + + # construct MatchFirst using '|' operator + + # watch the order of expressions to match + number = Word(nums) | Combine(Word(nums) + '.' + Word(nums)) + print(number.searchString("123 3.1416 789")) # Fail! -> [['123'], ['3'], ['1416'], ['789']] + + # put more selective expression first + number = Combine(Word(nums) + '.' + Word(nums)) | Word(nums) + print(number.searchString("123 3.1416 789")) # Better -> [['123'], ['3.1416'], ['789']] + """ + def __init__(self, exprs, savelist=False): + super(MatchFirst, self).__init__(exprs, savelist) + if self.exprs: + self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs) + else: + self.mayReturnEmpty = True + + def streamline(self): + super(MatchFirst, self).streamline() + if __compat__.collect_all_And_tokens: + self.saveAsList = any(e.saveAsList for e in self.exprs) + return self + + def parseImpl(self, instring, loc, doActions=True): + maxExcLoc = -1 + maxException = None + for e in self.exprs: + try: + ret = e._parse(instring, loc, doActions) + return ret + except ParseException as err: + if err.loc > maxExcLoc: + maxException = err + maxExcLoc = err.loc + except IndexError: + if len(instring) > maxExcLoc: + maxException = ParseException(instring, len(instring), e.errmsg, self) + maxExcLoc = len(instring) + + # only got here if no expression matched, raise exception for match that made it the furthest + else: + if maxException is not None: + maxException.msg = self.errmsg + raise maxException + else: + raise ParseException(instring, loc, "no defined alternatives to match", self) + + def __ior__(self, other): + if isinstance(other, basestring): + other = self._literalStringClass(other) + return self.append(other) # MatchFirst([self, other]) + + def __str__(self): + if hasattr(self, "name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + " | ".join(_ustr(e) for e in self.exprs) + "}" + + return self.strRepr + + def checkRecursion(self, parseElementList): + subRecCheckList = parseElementList[:] + [self] + for e in self.exprs: + e.checkRecursion(subRecCheckList) + + def _setResultsName(self, name, listAllMatches=False): + if (not __compat__.collect_all_And_tokens + and __diag__.warn_multiple_tokens_in_named_alternation): + if any(isinstance(e, And) for e in self.exprs): + warnings.warn("{0}: setting results name {1!r} on {2} expression " + "may only return a single token for an And alternative, " + "in future will return the full list of tokens".format( + "warn_multiple_tokens_in_named_alternation", name, type(self).__name__), + stacklevel=3) + + return super(MatchFirst, self)._setResultsName(name, listAllMatches) + + +class Each(ParseExpression): + """Requires all given :class:`ParseExpression` s to be found, but in + any order. Expressions may be separated by whitespace. + + May be constructed using the ``'&'`` operator. + + Example:: + + color = oneOf("RED ORANGE YELLOW GREEN BLUE PURPLE BLACK WHITE BROWN") + shape_type = oneOf("SQUARE CIRCLE TRIANGLE STAR HEXAGON OCTAGON") + integer = Word(nums) + shape_attr = "shape:" + shape_type("shape") + posn_attr = "posn:" + Group(integer("x") + ',' + integer("y"))("posn") + color_attr = "color:" + color("color") + size_attr = "size:" + integer("size") + + # use Each (using operator '&') to accept attributes in any order + # (shape and posn are required, color and size are optional) + shape_spec = shape_attr & posn_attr & Optional(color_attr) & Optional(size_attr) + + shape_spec.runTests(''' + shape: SQUARE color: BLACK posn: 100, 120 + shape: CIRCLE size: 50 color: BLUE posn: 50,80 + color:GREEN size:20 shape:TRIANGLE posn:20,40 + ''' + ) + + prints:: + + shape: SQUARE color: BLACK posn: 100, 120 + ['shape:', 'SQUARE', 'color:', 'BLACK', 'posn:', ['100', ',', '120']] + - color: BLACK + - posn: ['100', ',', '120'] + - x: 100 + - y: 120 + - shape: SQUARE + + + shape: CIRCLE size: 50 color: BLUE posn: 50,80 + ['shape:', 'CIRCLE', 'size:', '50', 'color:', 'BLUE', 'posn:', ['50', ',', '80']] + - color: BLUE + - posn: ['50', ',', '80'] + - x: 50 + - y: 80 + - shape: CIRCLE + - size: 50 + + + color: GREEN size: 20 shape: TRIANGLE posn: 20,40 + ['color:', 'GREEN', 'size:', '20', 'shape:', 'TRIANGLE', 'posn:', ['20', ',', '40']] + - color: GREEN + - posn: ['20', ',', '40'] + - x: 20 + - y: 40 + - shape: TRIANGLE + - size: 20 + """ + def __init__(self, exprs, savelist=True): + super(Each, self).__init__(exprs, savelist) + self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) + self.skipWhitespace = True + self.initExprGroups = True + self.saveAsList = True + + def streamline(self): + super(Each, self).streamline() + self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) + return self + + def parseImpl(self, instring, loc, doActions=True): + if self.initExprGroups: + self.opt1map = dict((id(e.expr), e) for e in self.exprs if isinstance(e, Optional)) + opt1 = [e.expr for e in self.exprs if isinstance(e, Optional)] + opt2 = [e for e in self.exprs if e.mayReturnEmpty and not isinstance(e, (Optional, Regex))] + self.optionals = opt1 + opt2 + self.multioptionals = [e.expr for e in self.exprs if isinstance(e, ZeroOrMore)] + self.multirequired = [e.expr for e in self.exprs if isinstance(e, OneOrMore)] + self.required = [e for e in self.exprs if not isinstance(e, (Optional, ZeroOrMore, OneOrMore))] + self.required += self.multirequired + self.initExprGroups = False + tmpLoc = loc + tmpReqd = self.required[:] + tmpOpt = self.optionals[:] + matchOrder = [] + + keepMatching = True + while keepMatching: + tmpExprs = tmpReqd + tmpOpt + self.multioptionals + self.multirequired + failed = [] + for e in tmpExprs: + try: + tmpLoc = e.tryParse(instring, tmpLoc) + except ParseException: + failed.append(e) + else: + matchOrder.append(self.opt1map.get(id(e), e)) + if e in tmpReqd: + tmpReqd.remove(e) + elif e in tmpOpt: + tmpOpt.remove(e) + if len(failed) == len(tmpExprs): + keepMatching = False + + if tmpReqd: + missing = ", ".join(_ustr(e) for e in tmpReqd) + raise ParseException(instring, loc, "Missing one or more required elements (%s)" % missing) + + # add any unmatched Optionals, in case they have default values defined + matchOrder += [e for e in self.exprs if isinstance(e, Optional) and e.expr in tmpOpt] + + resultlist = [] + for e in matchOrder: + loc, results = e._parse(instring, loc, doActions) + resultlist.append(results) + + finalResults = sum(resultlist, ParseResults([])) + return loc, finalResults + + def __str__(self): + if hasattr(self, "name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + " & ".join(_ustr(e) for e in self.exprs) + "}" + + return self.strRepr + + def checkRecursion(self, parseElementList): + subRecCheckList = parseElementList[:] + [self] + for e in self.exprs: + e.checkRecursion(subRecCheckList) + + +class ParseElementEnhance(ParserElement): + """Abstract subclass of :class:`ParserElement`, for combining and + post-processing parsed tokens. + """ + def __init__(self, expr, savelist=False): + super(ParseElementEnhance, self).__init__(savelist) + if isinstance(expr, basestring): + if issubclass(self._literalStringClass, Token): + expr = self._literalStringClass(expr) + else: + expr = self._literalStringClass(Literal(expr)) + self.expr = expr + self.strRepr = None + if expr is not None: + self.mayIndexError = expr.mayIndexError + self.mayReturnEmpty = expr.mayReturnEmpty + self.setWhitespaceChars(expr.whiteChars) + self.skipWhitespace = expr.skipWhitespace + self.saveAsList = expr.saveAsList + self.callPreparse = expr.callPreparse + self.ignoreExprs.extend(expr.ignoreExprs) + + def parseImpl(self, instring, loc, doActions=True): + if self.expr is not None: + return self.expr._parse(instring, loc, doActions, callPreParse=False) + else: + raise ParseException("", loc, self.errmsg, self) + + def leaveWhitespace(self): + self.skipWhitespace = False + self.expr = self.expr.copy() + if self.expr is not None: + self.expr.leaveWhitespace() + return self + + def ignore(self, other): + if isinstance(other, Suppress): + if other not in self.ignoreExprs: + super(ParseElementEnhance, self).ignore(other) + if self.expr is not None: + self.expr.ignore(self.ignoreExprs[-1]) + else: + super(ParseElementEnhance, self).ignore(other) + if self.expr is not None: + self.expr.ignore(self.ignoreExprs[-1]) + return self + + def streamline(self): + super(ParseElementEnhance, self).streamline() + if self.expr is not None: + self.expr.streamline() + return self + + def checkRecursion(self, parseElementList): + if self in parseElementList: + raise RecursiveGrammarException(parseElementList + [self]) + subRecCheckList = parseElementList[:] + [self] + if self.expr is not None: + self.expr.checkRecursion(subRecCheckList) + + def validate(self, validateTrace=None): + if validateTrace is None: + validateTrace = [] + tmp = validateTrace[:] + [self] + if self.expr is not None: + self.expr.validate(tmp) + self.checkRecursion([]) + + def __str__(self): + try: + return super(ParseElementEnhance, self).__str__() + except Exception: + pass + + if self.strRepr is None and self.expr is not None: + self.strRepr = "%s:(%s)" % (self.__class__.__name__, _ustr(self.expr)) + return self.strRepr + + +class FollowedBy(ParseElementEnhance): + """Lookahead matching of the given parse expression. + ``FollowedBy`` does *not* advance the parsing position within + the input string, it only verifies that the specified parse + expression matches at the current position. ``FollowedBy`` + always returns a null token list. If any results names are defined + in the lookahead expression, those *will* be returned for access by + name. + + Example:: + + # use FollowedBy to match a label only if it is followed by a ':' + data_word = Word(alphas) + label = data_word + FollowedBy(':') + attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) + + OneOrMore(attr_expr).parseString("shape: SQUARE color: BLACK posn: upper left").pprint() + + prints:: + + [['shape', 'SQUARE'], ['color', 'BLACK'], ['posn', 'upper left']] + """ + def __init__(self, expr): + super(FollowedBy, self).__init__(expr) + self.mayReturnEmpty = True + + def parseImpl(self, instring, loc, doActions=True): + # by using self._expr.parse and deleting the contents of the returned ParseResults list + # we keep any named results that were defined in the FollowedBy expression + _, ret = self.expr._parse(instring, loc, doActions=doActions) + del ret[:] + + return loc, ret + + +class PrecededBy(ParseElementEnhance): + """Lookbehind matching of the given parse expression. + ``PrecededBy`` does not advance the parsing position within the + input string, it only verifies that the specified parse expression + matches prior to the current position. ``PrecededBy`` always + returns a null token list, but if a results name is defined on the + given expression, it is returned. + + Parameters: + + - expr - expression that must match prior to the current parse + location + - retreat - (default= ``None``) - (int) maximum number of characters + to lookbehind prior to the current parse location + + If the lookbehind expression is a string, Literal, Keyword, or + a Word or CharsNotIn with a specified exact or maximum length, then + the retreat parameter is not required. Otherwise, retreat must be + specified to give a maximum number of characters to look back from + the current parse position for a lookbehind match. + + Example:: + + # VB-style variable names with type prefixes + int_var = PrecededBy("#") + pyparsing_common.identifier + str_var = PrecededBy("$") + pyparsing_common.identifier + + """ + def __init__(self, expr, retreat=None): + super(PrecededBy, self).__init__(expr) + self.expr = self.expr().leaveWhitespace() + self.mayReturnEmpty = True + self.mayIndexError = False + self.exact = False + if isinstance(expr, str): + retreat = len(expr) + self.exact = True + elif isinstance(expr, (Literal, Keyword)): + retreat = expr.matchLen + self.exact = True + elif isinstance(expr, (Word, CharsNotIn)) and expr.maxLen != _MAX_INT: + retreat = expr.maxLen + self.exact = True + elif isinstance(expr, _PositionToken): + retreat = 0 + self.exact = True + self.retreat = retreat + self.errmsg = "not preceded by " + str(expr) + self.skipWhitespace = False + self.parseAction.append(lambda s, l, t: t.__delitem__(slice(None, None))) + + def parseImpl(self, instring, loc=0, doActions=True): + if self.exact: + if loc < self.retreat: + raise ParseException(instring, loc, self.errmsg) + start = loc - self.retreat + _, ret = self.expr._parse(instring, start) + else: + # retreat specified a maximum lookbehind window, iterate + test_expr = self.expr + StringEnd() + instring_slice = instring[max(0, loc - self.retreat):loc] + last_expr = ParseException(instring, loc, self.errmsg) + for offset in range(1, min(loc, self.retreat + 1)+1): + try: + # print('trying', offset, instring_slice, repr(instring_slice[loc - offset:])) + _, ret = test_expr._parse(instring_slice, len(instring_slice) - offset) + except ParseBaseException as pbe: + last_expr = pbe + else: + break + else: + raise last_expr + return loc, ret + + +class NotAny(ParseElementEnhance): + """Lookahead to disallow matching with the given parse expression. + ``NotAny`` does *not* advance the parsing position within the + input string, it only verifies that the specified parse expression + does *not* match at the current position. Also, ``NotAny`` does + *not* skip over leading whitespace. ``NotAny`` always returns + a null token list. May be constructed using the '~' operator. + + Example:: + + AND, OR, NOT = map(CaselessKeyword, "AND OR NOT".split()) + + # take care not to mistake keywords for identifiers + ident = ~(AND | OR | NOT) + Word(alphas) + boolean_term = Optional(NOT) + ident + + # very crude boolean expression - to support parenthesis groups and + # operation hierarchy, use infixNotation + boolean_expr = boolean_term + ZeroOrMore((AND | OR) + boolean_term) + + # integers that are followed by "." are actually floats + integer = Word(nums) + ~Char(".") + """ + def __init__(self, expr): + super(NotAny, self).__init__(expr) + # ~ self.leaveWhitespace() + self.skipWhitespace = False # do NOT use self.leaveWhitespace(), don't want to propagate to exprs + self.mayReturnEmpty = True + self.errmsg = "Found unwanted token, " + _ustr(self.expr) + + def parseImpl(self, instring, loc, doActions=True): + if self.expr.canParseNext(instring, loc): + raise ParseException(instring, loc, self.errmsg, self) + return loc, [] + + def __str__(self): + if hasattr(self, "name"): + return self.name + + if self.strRepr is None: + self.strRepr = "~{" + _ustr(self.expr) + "}" + + return self.strRepr + +class _MultipleMatch(ParseElementEnhance): + def __init__(self, expr, stopOn=None): + super(_MultipleMatch, self).__init__(expr) + self.saveAsList = True + ender = stopOn + if isinstance(ender, basestring): + ender = self._literalStringClass(ender) + self.stopOn(ender) + + def stopOn(self, ender): + if isinstance(ender, basestring): + ender = self._literalStringClass(ender) + self.not_ender = ~ender if ender is not None else None + return self + + def parseImpl(self, instring, loc, doActions=True): + self_expr_parse = self.expr._parse + self_skip_ignorables = self._skipIgnorables + check_ender = self.not_ender is not None + if check_ender: + try_not_ender = self.not_ender.tryParse + + # must be at least one (but first see if we are the stopOn sentinel; + # if so, fail) + if check_ender: + try_not_ender(instring, loc) + loc, tokens = self_expr_parse(instring, loc, doActions, callPreParse=False) + try: + hasIgnoreExprs = (not not self.ignoreExprs) + while 1: + if check_ender: + try_not_ender(instring, loc) + if hasIgnoreExprs: + preloc = self_skip_ignorables(instring, loc) + else: + preloc = loc + loc, tmptokens = self_expr_parse(instring, preloc, doActions) + if tmptokens or tmptokens.haskeys(): + tokens += tmptokens + except (ParseException, IndexError): + pass + + return loc, tokens + + def _setResultsName(self, name, listAllMatches=False): + if __diag__.warn_ungrouped_named_tokens_in_collection: + for e in [self.expr] + getattr(self.expr, 'exprs', []): + if isinstance(e, ParserElement) and e.resultsName: + warnings.warn("{0}: setting results name {1!r} on {2} expression " + "collides with {3!r} on contained expression".format("warn_ungrouped_named_tokens_in_collection", + name, + type(self).__name__, + e.resultsName), + stacklevel=3) + + return super(_MultipleMatch, self)._setResultsName(name, listAllMatches) + + +class OneOrMore(_MultipleMatch): + """Repetition of one or more of the given expression. + + Parameters: + - expr - expression that must match one or more times + - stopOn - (default= ``None``) - expression for a terminating sentinel + (only required if the sentinel would ordinarily match the repetition + expression) + + Example:: + + data_word = Word(alphas) + label = data_word + FollowedBy(':') + attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join)) + + text = "shape: SQUARE posn: upper left color: BLACK" + OneOrMore(attr_expr).parseString(text).pprint() # Fail! read 'color' as data instead of next label -> [['shape', 'SQUARE color']] + + # use stopOn attribute for OneOrMore to avoid reading label string as part of the data + attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) + OneOrMore(attr_expr).parseString(text).pprint() # Better -> [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'BLACK']] + + # could also be written as + (attr_expr * (1,)).parseString(text).pprint() + """ + + def __str__(self): + if hasattr(self, "name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + _ustr(self.expr) + "}..." + + return self.strRepr + +class ZeroOrMore(_MultipleMatch): + """Optional repetition of zero or more of the given expression. + + Parameters: + - expr - expression that must match zero or more times + - stopOn - (default= ``None``) - expression for a terminating sentinel + (only required if the sentinel would ordinarily match the repetition + expression) + + Example: similar to :class:`OneOrMore` + """ + def __init__(self, expr, stopOn=None): + super(ZeroOrMore, self).__init__(expr, stopOn=stopOn) + self.mayReturnEmpty = True + + def parseImpl(self, instring, loc, doActions=True): + try: + return super(ZeroOrMore, self).parseImpl(instring, loc, doActions) + except (ParseException, IndexError): + return loc, [] + + def __str__(self): + if hasattr(self, "name"): + return self.name + + if self.strRepr is None: + self.strRepr = "[" + _ustr(self.expr) + "]..." + + return self.strRepr + + +class _NullToken(object): + def __bool__(self): + return False + __nonzero__ = __bool__ + def __str__(self): + return "" + +class Optional(ParseElementEnhance): + """Optional matching of the given expression. + + Parameters: + - expr - expression that must match zero or more times + - default (optional) - value to be returned if the optional expression is not found. + + Example:: + + # US postal code can be a 5-digit zip, plus optional 4-digit qualifier + zip = Combine(Word(nums, exact=5) + Optional('-' + Word(nums, exact=4))) + zip.runTests(''' + # traditional ZIP code + 12345 + + # ZIP+4 form + 12101-0001 + + # invalid ZIP + 98765- + ''') + + prints:: + + # traditional ZIP code + 12345 + ['12345'] + + # ZIP+4 form + 12101-0001 + ['12101-0001'] + + # invalid ZIP + 98765- + ^ + FAIL: Expected end of text (at char 5), (line:1, col:6) + """ + __optionalNotMatched = _NullToken() + + def __init__(self, expr, default=__optionalNotMatched): + super(Optional, self).__init__(expr, savelist=False) + self.saveAsList = self.expr.saveAsList + self.defaultValue = default + self.mayReturnEmpty = True + + def parseImpl(self, instring, loc, doActions=True): + try: + loc, tokens = self.expr._parse(instring, loc, doActions, callPreParse=False) + except (ParseException, IndexError): + if self.defaultValue is not self.__optionalNotMatched: + if self.expr.resultsName: + tokens = ParseResults([self.defaultValue]) + tokens[self.expr.resultsName] = self.defaultValue + else: + tokens = [self.defaultValue] + else: + tokens = [] + return loc, tokens + + def __str__(self): + if hasattr(self, "name"): + return self.name + + if self.strRepr is None: + self.strRepr = "[" + _ustr(self.expr) + "]" + + return self.strRepr + +class SkipTo(ParseElementEnhance): + """Token for skipping over all undefined text until the matched + expression is found. + + Parameters: + - expr - target expression marking the end of the data to be skipped + - include - (default= ``False``) if True, the target expression is also parsed + (the skipped text and target expression are returned as a 2-element list). + - ignore - (default= ``None``) used to define grammars (typically quoted strings and + comments) that might contain false matches to the target expression + - failOn - (default= ``None``) define expressions that are not allowed to be + included in the skipped test; if found before the target expression is found, + the SkipTo is not a match + + Example:: + + report = ''' + Outstanding Issues Report - 1 Jan 2000 + + # | Severity | Description | Days Open + -----+----------+-------------------------------------------+----------- + 101 | Critical | Intermittent system crash | 6 + 94 | Cosmetic | Spelling error on Login ('log|n') | 14 + 79 | Minor | System slow when running too many reports | 47 + ''' + integer = Word(nums) + SEP = Suppress('|') + # use SkipTo to simply match everything up until the next SEP + # - ignore quoted strings, so that a '|' character inside a quoted string does not match + # - parse action will call token.strip() for each matched token, i.e., the description body + string_data = SkipTo(SEP, ignore=quotedString) + string_data.setParseAction(tokenMap(str.strip)) + ticket_expr = (integer("issue_num") + SEP + + string_data("sev") + SEP + + string_data("desc") + SEP + + integer("days_open")) + + for tkt in ticket_expr.searchString(report): + print tkt.dump() + + prints:: + + ['101', 'Critical', 'Intermittent system crash', '6'] + - days_open: 6 + - desc: Intermittent system crash + - issue_num: 101 + - sev: Critical + ['94', 'Cosmetic', "Spelling error on Login ('log|n')", '14'] + - days_open: 14 + - desc: Spelling error on Login ('log|n') + - issue_num: 94 + - sev: Cosmetic + ['79', 'Minor', 'System slow when running too many reports', '47'] + - days_open: 47 + - desc: System slow when running too many reports + - issue_num: 79 + - sev: Minor + """ + def __init__(self, other, include=False, ignore=None, failOn=None): + super(SkipTo, self).__init__(other) + self.ignoreExpr = ignore + self.mayReturnEmpty = True + self.mayIndexError = False + self.includeMatch = include + self.saveAsList = False + if isinstance(failOn, basestring): + self.failOn = self._literalStringClass(failOn) + else: + self.failOn = failOn + self.errmsg = "No match found for " + _ustr(self.expr) + + def parseImpl(self, instring, loc, doActions=True): + startloc = loc + instrlen = len(instring) + expr = self.expr + expr_parse = self.expr._parse + self_failOn_canParseNext = self.failOn.canParseNext if self.failOn is not None else None + self_ignoreExpr_tryParse = self.ignoreExpr.tryParse if self.ignoreExpr is not None else None + + tmploc = loc + while tmploc <= instrlen: + if self_failOn_canParseNext is not None: + # break if failOn expression matches + if self_failOn_canParseNext(instring, tmploc): + break + + if self_ignoreExpr_tryParse is not None: + # advance past ignore expressions + while 1: + try: + tmploc = self_ignoreExpr_tryParse(instring, tmploc) + except ParseBaseException: + break + + try: + expr_parse(instring, tmploc, doActions=False, callPreParse=False) + except (ParseException, IndexError): + # no match, advance loc in string + tmploc += 1 + else: + # matched skipto expr, done + break + + else: + # ran off the end of the input string without matching skipto expr, fail + raise ParseException(instring, loc, self.errmsg, self) + + # build up return values + loc = tmploc + skiptext = instring[startloc:loc] + skipresult = ParseResults(skiptext) + + if self.includeMatch: + loc, mat = expr_parse(instring, loc, doActions, callPreParse=False) + skipresult += mat + + return loc, skipresult + +class Forward(ParseElementEnhance): + """Forward declaration of an expression to be defined later - + used for recursive grammars, such as algebraic infix notation. + When the expression is known, it is assigned to the ``Forward`` + variable using the '<<' operator. + + Note: take care when assigning to ``Forward`` not to overlook + precedence of operators. + + Specifically, '|' has a lower precedence than '<<', so that:: + + fwdExpr << a | b | c + + will actually be evaluated as:: + + (fwdExpr << a) | b | c + + thereby leaving b and c out as parseable alternatives. It is recommended that you + explicitly group the values inserted into the ``Forward``:: + + fwdExpr << (a | b | c) + + Converting to use the '<<=' operator instead will avoid this problem. + + See :class:`ParseResults.pprint` for an example of a recursive + parser created using ``Forward``. + """ + def __init__(self, other=None): + super(Forward, self).__init__(other, savelist=False) + + def __lshift__(self, other): + if isinstance(other, basestring): + other = self._literalStringClass(other) + self.expr = other + self.strRepr = None + self.mayIndexError = self.expr.mayIndexError + self.mayReturnEmpty = self.expr.mayReturnEmpty + self.setWhitespaceChars(self.expr.whiteChars) + self.skipWhitespace = self.expr.skipWhitespace + self.saveAsList = self.expr.saveAsList + self.ignoreExprs.extend(self.expr.ignoreExprs) + return self + + def __ilshift__(self, other): + return self << other + + def leaveWhitespace(self): + self.skipWhitespace = False + return self + + def streamline(self): + if not self.streamlined: + self.streamlined = True + if self.expr is not None: + self.expr.streamline() + return self + + def validate(self, validateTrace=None): + if validateTrace is None: + validateTrace = [] + + if self not in validateTrace: + tmp = validateTrace[:] + [self] + if self.expr is not None: + self.expr.validate(tmp) + self.checkRecursion([]) + + def __str__(self): + if hasattr(self, "name"): + return self.name + if self.strRepr is not None: + return self.strRepr + + # Avoid infinite recursion by setting a temporary strRepr + self.strRepr = ": ..." + + # Use the string representation of main expression. + retString = '...' + try: + if self.expr is not None: + retString = _ustr(self.expr)[:1000] + else: + retString = "None" + finally: + self.strRepr = self.__class__.__name__ + ": " + retString + return self.strRepr + + def copy(self): + if self.expr is not None: + return super(Forward, self).copy() + else: + ret = Forward() + ret <<= self + return ret + + def _setResultsName(self, name, listAllMatches=False): + if __diag__.warn_name_set_on_empty_Forward: + if self.expr is None: + warnings.warn("{0}: setting results name {0!r} on {1} expression " + "that has no contained expression".format("warn_name_set_on_empty_Forward", + name, + type(self).__name__), + stacklevel=3) + + return super(Forward, self)._setResultsName(name, listAllMatches) + +class TokenConverter(ParseElementEnhance): + """ + Abstract subclass of :class:`ParseExpression`, for converting parsed results. + """ + def __init__(self, expr, savelist=False): + super(TokenConverter, self).__init__(expr) # , savelist) + self.saveAsList = False + +class Combine(TokenConverter): + """Converter to concatenate all matching tokens to a single string. + By default, the matching patterns must also be contiguous in the + input string; this can be disabled by specifying + ``'adjacent=False'`` in the constructor. + + Example:: + + real = Word(nums) + '.' + Word(nums) + print(real.parseString('3.1416')) # -> ['3', '.', '1416'] + # will also erroneously match the following + print(real.parseString('3. 1416')) # -> ['3', '.', '1416'] + + real = Combine(Word(nums) + '.' + Word(nums)) + print(real.parseString('3.1416')) # -> ['3.1416'] + # no match when there are internal spaces + print(real.parseString('3. 1416')) # -> Exception: Expected W:(0123...) + """ + def __init__(self, expr, joinString="", adjacent=True): + super(Combine, self).__init__(expr) + # suppress whitespace-stripping in contained parse expressions, but re-enable it on the Combine itself + if adjacent: + self.leaveWhitespace() + self.adjacent = adjacent + self.skipWhitespace = True + self.joinString = joinString + self.callPreparse = True + + def ignore(self, other): + if self.adjacent: + ParserElement.ignore(self, other) + else: + super(Combine, self).ignore(other) + return self + + def postParse(self, instring, loc, tokenlist): + retToks = tokenlist.copy() + del retToks[:] + retToks += ParseResults(["".join(tokenlist._asStringList(self.joinString))], modal=self.modalResults) + + if self.resultsName and retToks.haskeys(): + return [retToks] + else: + return retToks + +class Group(TokenConverter): + """Converter to return the matched tokens as a list - useful for + returning tokens of :class:`ZeroOrMore` and :class:`OneOrMore` expressions. + + Example:: + + ident = Word(alphas) + num = Word(nums) + term = ident | num + func = ident + Optional(delimitedList(term)) + print(func.parseString("fn a, b, 100")) # -> ['fn', 'a', 'b', '100'] + + func = ident + Group(Optional(delimitedList(term))) + print(func.parseString("fn a, b, 100")) # -> ['fn', ['a', 'b', '100']] + """ + def __init__(self, expr): + super(Group, self).__init__(expr) + self.saveAsList = True + + def postParse(self, instring, loc, tokenlist): + return [tokenlist] + +class Dict(TokenConverter): + """Converter to return a repetitive expression as a list, but also + as a dictionary. Each element can also be referenced using the first + token in the expression as its key. Useful for tabular report + scraping when the first column can be used as a item key. + + Example:: + + data_word = Word(alphas) + label = data_word + FollowedBy(':') + attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join)) + + text = "shape: SQUARE posn: upper left color: light blue texture: burlap" + attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) + + # print attributes as plain groups + print(OneOrMore(attr_expr).parseString(text).dump()) + + # instead of OneOrMore(expr), parse using Dict(OneOrMore(Group(expr))) - Dict will auto-assign names + result = Dict(OneOrMore(Group(attr_expr))).parseString(text) + print(result.dump()) + + # access named fields as dict entries, or output as dict + print(result['shape']) + print(result.asDict()) + + prints:: + + ['shape', 'SQUARE', 'posn', 'upper left', 'color', 'light blue', 'texture', 'burlap'] + [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] + - color: light blue + - posn: upper left + - shape: SQUARE + - texture: burlap + SQUARE + {'color': 'light blue', 'posn': 'upper left', 'texture': 'burlap', 'shape': 'SQUARE'} + + See more examples at :class:`ParseResults` of accessing fields by results name. + """ + def __init__(self, expr): + super(Dict, self).__init__(expr) + self.saveAsList = True + + def postParse(self, instring, loc, tokenlist): + for i, tok in enumerate(tokenlist): + if len(tok) == 0: + continue + ikey = tok[0] + if isinstance(ikey, int): + ikey = _ustr(tok[0]).strip() + if len(tok) == 1: + tokenlist[ikey] = _ParseResultsWithOffset("", i) + elif len(tok) == 2 and not isinstance(tok[1], ParseResults): + tokenlist[ikey] = _ParseResultsWithOffset(tok[1], i) + else: + dictvalue = tok.copy() # ParseResults(i) + del dictvalue[0] + if len(dictvalue) != 1 or (isinstance(dictvalue, ParseResults) and dictvalue.haskeys()): + tokenlist[ikey] = _ParseResultsWithOffset(dictvalue, i) + else: + tokenlist[ikey] = _ParseResultsWithOffset(dictvalue[0], i) + + if self.resultsName: + return [tokenlist] + else: + return tokenlist + + +class Suppress(TokenConverter): + """Converter for ignoring the results of a parsed expression. + + Example:: + + source = "a, b, c,d" + wd = Word(alphas) + wd_list1 = wd + ZeroOrMore(',' + wd) + print(wd_list1.parseString(source)) + + # often, delimiters that are useful during parsing are just in the + # way afterward - use Suppress to keep them out of the parsed output + wd_list2 = wd + ZeroOrMore(Suppress(',') + wd) + print(wd_list2.parseString(source)) + + prints:: + + ['a', ',', 'b', ',', 'c', ',', 'd'] + ['a', 'b', 'c', 'd'] + + (See also :class:`delimitedList`.) + """ + def postParse(self, instring, loc, tokenlist): + return [] + + def suppress(self): + return self + + +class OnlyOnce(object): + """Wrapper for parse actions, to ensure they are only called once. + """ + def __init__(self, methodCall): + self.callable = _trim_arity(methodCall) + self.called = False + def __call__(self, s, l, t): + if not self.called: + results = self.callable(s, l, t) + self.called = True + return results + raise ParseException(s, l, "") + def reset(self): + self.called = False + +def traceParseAction(f): + """Decorator for debugging parse actions. + + When the parse action is called, this decorator will print + ``">> entering method-name(line:, , )"``. + When the parse action completes, the decorator will print + ``"<<"`` followed by the returned value, or any exception that the parse action raised. + + Example:: + + wd = Word(alphas) + + @traceParseAction + def remove_duplicate_chars(tokens): + return ''.join(sorted(set(''.join(tokens)))) + + wds = OneOrMore(wd).setParseAction(remove_duplicate_chars) + print(wds.parseString("slkdjs sld sldd sdlf sdljf")) + + prints:: + + >>entering remove_duplicate_chars(line: 'slkdjs sld sldd sdlf sdljf', 0, (['slkdjs', 'sld', 'sldd', 'sdlf', 'sdljf'], {})) + < 3: + thisFunc = paArgs[0].__class__.__name__ + '.' + thisFunc + sys.stderr.write(">>entering %s(line: '%s', %d, %r)\n" % (thisFunc, line(l, s), l, t)) + try: + ret = f(*paArgs) + except Exception as exc: + sys.stderr.write("< ['aa', 'bb', 'cc'] + delimitedList(Word(hexnums), delim=':', combine=True).parseString("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE'] + """ + dlName = _ustr(expr) + " [" + _ustr(delim) + " " + _ustr(expr) + "]..." + if combine: + return Combine(expr + ZeroOrMore(delim + expr)).setName(dlName) + else: + return (expr + ZeroOrMore(Suppress(delim) + expr)).setName(dlName) + +def countedArray(expr, intExpr=None): + """Helper to define a counted list of expressions. + + This helper defines a pattern of the form:: + + integer expr expr expr... + + where the leading integer tells how many expr expressions follow. + The matched tokens returns the array of expr tokens as a list - the + leading count token is suppressed. + + If ``intExpr`` is specified, it should be a pyparsing expression + that produces an integer value. + + Example:: + + countedArray(Word(alphas)).parseString('2 ab cd ef') # -> ['ab', 'cd'] + + # in this parser, the leading integer value is given in binary, + # '10' indicating that 2 values are in the array + binaryConstant = Word('01').setParseAction(lambda t: int(t[0], 2)) + countedArray(Word(alphas), intExpr=binaryConstant).parseString('10 ab cd ef') # -> ['ab', 'cd'] + """ + arrayExpr = Forward() + def countFieldParseAction(s, l, t): + n = t[0] + arrayExpr << (n and Group(And([expr] * n)) or Group(empty)) + return [] + if intExpr is None: + intExpr = Word(nums).setParseAction(lambda t: int(t[0])) + else: + intExpr = intExpr.copy() + intExpr.setName("arrayLen") + intExpr.addParseAction(countFieldParseAction, callDuringTry=True) + return (intExpr + arrayExpr).setName('(len) ' + _ustr(expr) + '...') + +def _flatten(L): + ret = [] + for i in L: + if isinstance(i, list): + ret.extend(_flatten(i)) + else: + ret.append(i) + return ret + +def matchPreviousLiteral(expr): + """Helper to define an expression that is indirectly defined from + the tokens matched in a previous expression, that is, it looks for + a 'repeat' of a previous expression. For example:: + + first = Word(nums) + second = matchPreviousLiteral(first) + matchExpr = first + ":" + second + + will match ``"1:1"``, but not ``"1:2"``. Because this + matches a previous literal, will also match the leading + ``"1:1"`` in ``"1:10"``. If this is not desired, use + :class:`matchPreviousExpr`. Do *not* use with packrat parsing + enabled. + """ + rep = Forward() + def copyTokenToRepeater(s, l, t): + if t: + if len(t) == 1: + rep << t[0] + else: + # flatten t tokens + tflat = _flatten(t.asList()) + rep << And(Literal(tt) for tt in tflat) + else: + rep << Empty() + expr.addParseAction(copyTokenToRepeater, callDuringTry=True) + rep.setName('(prev) ' + _ustr(expr)) + return rep + +def matchPreviousExpr(expr): + """Helper to define an expression that is indirectly defined from + the tokens matched in a previous expression, that is, it looks for + a 'repeat' of a previous expression. For example:: + + first = Word(nums) + second = matchPreviousExpr(first) + matchExpr = first + ":" + second + + will match ``"1:1"``, but not ``"1:2"``. Because this + matches by expressions, will *not* match the leading ``"1:1"`` + in ``"1:10"``; the expressions are evaluated first, and then + compared, so ``"1"`` is compared with ``"10"``. Do *not* use + with packrat parsing enabled. + """ + rep = Forward() + e2 = expr.copy() + rep <<= e2 + def copyTokenToRepeater(s, l, t): + matchTokens = _flatten(t.asList()) + def mustMatchTheseTokens(s, l, t): + theseTokens = _flatten(t.asList()) + if theseTokens != matchTokens: + raise ParseException('', 0, '') + rep.setParseAction(mustMatchTheseTokens, callDuringTry=True) + expr.addParseAction(copyTokenToRepeater, callDuringTry=True) + rep.setName('(prev) ' + _ustr(expr)) + return rep + +def _escapeRegexRangeChars(s): + # ~ escape these chars: ^-[] + for c in r"\^-[]": + s = s.replace(c, _bslash + c) + s = s.replace("\n", r"\n") + s = s.replace("\t", r"\t") + return _ustr(s) + +def oneOf(strs, caseless=False, useRegex=True, asKeyword=False): + """Helper to quickly define a set of alternative Literals, and makes + sure to do longest-first testing when there is a conflict, + regardless of the input order, but returns + a :class:`MatchFirst` for best performance. + + Parameters: + + - strs - a string of space-delimited literals, or a collection of + string literals + - caseless - (default= ``False``) - treat all literals as + caseless + - useRegex - (default= ``True``) - as an optimization, will + generate a Regex object; otherwise, will generate + a :class:`MatchFirst` object (if ``caseless=True`` or ``asKeyword=True``, or if + creating a :class:`Regex` raises an exception) + - asKeyword - (default=``False``) - enforce Keyword-style matching on the + generated expressions + + Example:: + + comp_oper = oneOf("< = > <= >= !=") + var = Word(alphas) + number = Word(nums) + term = var | number + comparison_expr = term + comp_oper + term + print(comparison_expr.searchString("B = 12 AA=23 B<=AA AA>12")) + + prints:: + + [['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']] + """ + if isinstance(caseless, basestring): + warnings.warn("More than one string argument passed to oneOf, pass " + "choices as a list or space-delimited string", stacklevel=2) + + if caseless: + isequal = (lambda a, b: a.upper() == b.upper()) + masks = (lambda a, b: b.upper().startswith(a.upper())) + parseElementClass = CaselessKeyword if asKeyword else CaselessLiteral + else: + isequal = (lambda a, b: a == b) + masks = (lambda a, b: b.startswith(a)) + parseElementClass = Keyword if asKeyword else Literal + + symbols = [] + if isinstance(strs, basestring): + symbols = strs.split() + elif isinstance(strs, Iterable): + symbols = list(strs) + else: + warnings.warn("Invalid argument to oneOf, expected string or iterable", + SyntaxWarning, stacklevel=2) + if not symbols: + return NoMatch() + + if not asKeyword: + # if not producing keywords, need to reorder to take care to avoid masking + # longer choices with shorter ones + i = 0 + while i < len(symbols) - 1: + cur = symbols[i] + for j, other in enumerate(symbols[i + 1:]): + if isequal(other, cur): + del symbols[i + j + 1] + break + elif masks(cur, other): + del symbols[i + j + 1] + symbols.insert(i, other) + break + else: + i += 1 + + if not (caseless or asKeyword) and useRegex: + # ~ print (strs, "->", "|".join([_escapeRegexChars(sym) for sym in symbols])) + try: + if len(symbols) == len("".join(symbols)): + return Regex("[%s]" % "".join(_escapeRegexRangeChars(sym) for sym in symbols)).setName(' | '.join(symbols)) + else: + return Regex("|".join(re.escape(sym) for sym in symbols)).setName(' | '.join(symbols)) + except Exception: + warnings.warn("Exception creating Regex for oneOf, building MatchFirst", + SyntaxWarning, stacklevel=2) + + # last resort, just use MatchFirst + return MatchFirst(parseElementClass(sym) for sym in symbols).setName(' | '.join(symbols)) + +def dictOf(key, value): + """Helper to easily and clearly define a dictionary by specifying + the respective patterns for the key and value. Takes care of + defining the :class:`Dict`, :class:`ZeroOrMore`, and + :class:`Group` tokens in the proper order. The key pattern + can include delimiting markers or punctuation, as long as they are + suppressed, thereby leaving the significant key text. The value + pattern can include named results, so that the :class:`Dict` results + can include named token fields. + + Example:: + + text = "shape: SQUARE posn: upper left color: light blue texture: burlap" + attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) + print(OneOrMore(attr_expr).parseString(text).dump()) + + attr_label = label + attr_value = Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join) + + # similar to Dict, but simpler call format + result = dictOf(attr_label, attr_value).parseString(text) + print(result.dump()) + print(result['shape']) + print(result.shape) # object attribute access works too + print(result.asDict()) + + prints:: + + [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] + - color: light blue + - posn: upper left + - shape: SQUARE + - texture: burlap + SQUARE + SQUARE + {'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'} + """ + return Dict(OneOrMore(Group(key + value))) + +def originalTextFor(expr, asString=True): + """Helper to return the original, untokenized text for a given + expression. Useful to restore the parsed fields of an HTML start + tag into the raw tag text itself, or to revert separate tokens with + intervening whitespace back to the original matching input text. By + default, returns astring containing the original parsed text. + + If the optional ``asString`` argument is passed as + ``False``, then the return value is + a :class:`ParseResults` containing any results names that + were originally matched, and a single token containing the original + matched text from the input string. So if the expression passed to + :class:`originalTextFor` contains expressions with defined + results names, you must set ``asString`` to ``False`` if you + want to preserve those results name values. + + Example:: + + src = "this is test bold text normal text " + for tag in ("b", "i"): + opener, closer = makeHTMLTags(tag) + patt = originalTextFor(opener + SkipTo(closer) + closer) + print(patt.searchString(src)[0]) + + prints:: + + [' bold text '] + ['text'] + """ + locMarker = Empty().setParseAction(lambda s, loc, t: loc) + endlocMarker = locMarker.copy() + endlocMarker.callPreparse = False + matchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end") + if asString: + extractText = lambda s, l, t: s[t._original_start: t._original_end] + else: + def extractText(s, l, t): + t[:] = [s[t.pop('_original_start'):t.pop('_original_end')]] + matchExpr.setParseAction(extractText) + matchExpr.ignoreExprs = expr.ignoreExprs + return matchExpr + +def ungroup(expr): + """Helper to undo pyparsing's default grouping of And expressions, + even if all but one are non-empty. + """ + return TokenConverter(expr).addParseAction(lambda t: t[0]) + +def locatedExpr(expr): + """Helper to decorate a returned token with its starting and ending + locations in the input string. + + This helper adds the following results names: + + - locn_start = location where matched expression begins + - locn_end = location where matched expression ends + - value = the actual parsed results + + Be careful if the input text contains ```` characters, you + may want to call :class:`ParserElement.parseWithTabs` + + Example:: + + wd = Word(alphas) + for match in locatedExpr(wd).searchString("ljsdf123lksdjjf123lkkjj1222"): + print(match) + + prints:: + + [[0, 'ljsdf', 5]] + [[8, 'lksdjjf', 15]] + [[18, 'lkkjj', 23]] + """ + locator = Empty().setParseAction(lambda s, l, t: l) + return Group(locator("locn_start") + expr("value") + locator.copy().leaveWhitespace()("locn_end")) + + +# convenience constants for positional expressions +empty = Empty().setName("empty") +lineStart = LineStart().setName("lineStart") +lineEnd = LineEnd().setName("lineEnd") +stringStart = StringStart().setName("stringStart") +stringEnd = StringEnd().setName("stringEnd") + +_escapedPunc = Word(_bslash, r"\[]-*.$+^?()~ ", exact=2).setParseAction(lambda s, l, t: t[0][1]) +_escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s, l, t: unichr(int(t[0].lstrip(r'\0x'), 16))) +_escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s, l, t: unichr(int(t[0][1:], 8))) +_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | CharsNotIn(r'\]', exact=1) +_charRange = Group(_singleChar + Suppress("-") + _singleChar) +_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group(OneOrMore(_charRange | _singleChar)).setResultsName("body") + "]" + +def srange(s): + r"""Helper to easily define string ranges for use in Word + construction. Borrows syntax from regexp '[]' string range + definitions:: + + srange("[0-9]") -> "0123456789" + srange("[a-z]") -> "abcdefghijklmnopqrstuvwxyz" + srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_" + + The input string must be enclosed in []'s, and the returned string + is the expanded character set joined into a single string. The + values enclosed in the []'s may be: + + - a single character + - an escaped character with a leading backslash (such as ``\-`` + or ``\]``) + - an escaped hex character with a leading ``'\x'`` + (``\x21``, which is a ``'!'`` character) (``\0x##`` + is also supported for backwards compatibility) + - an escaped octal character with a leading ``'\0'`` + (``\041``, which is a ``'!'`` character) + - a range of any of the above, separated by a dash (``'a-z'``, + etc.) + - any combination of the above (``'aeiouy'``, + ``'a-zA-Z0-9_$'``, etc.) + """ + _expanded = lambda p: p if not isinstance(p, ParseResults) else ''.join(unichr(c) for c in range(ord(p[0]), ord(p[1]) + 1)) + try: + return "".join(_expanded(part) for part in _reBracketExpr.parseString(s).body) + except Exception: + return "" + +def matchOnlyAtCol(n): + """Helper method for defining parse actions that require matching at + a specific column in the input text. + """ + def verifyCol(strg, locn, toks): + if col(locn, strg) != n: + raise ParseException(strg, locn, "matched token not at column %d" % n) + return verifyCol + +def replaceWith(replStr): + """Helper method for common parse actions that simply return + a literal value. Especially useful when used with + :class:`transformString` (). + + Example:: + + num = Word(nums).setParseAction(lambda toks: int(toks[0])) + na = oneOf("N/A NA").setParseAction(replaceWith(math.nan)) + term = na | num + + OneOrMore(term).parseString("324 234 N/A 234") # -> [324, 234, nan, 234] + """ + return lambda s, l, t: [replStr] + +def removeQuotes(s, l, t): + """Helper parse action for removing quotation marks from parsed + quoted strings. + + Example:: + + # by default, quotation marks are included in parsed results + quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["'Now is the Winter of our Discontent'"] + + # use removeQuotes to strip quotation marks from parsed results + quotedString.setParseAction(removeQuotes) + quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["Now is the Winter of our Discontent"] + """ + return t[0][1:-1] + +def tokenMap(func, *args): + """Helper to define a parse action by mapping a function to all + elements of a ParseResults list. If any additional args are passed, + they are forwarded to the given function as additional arguments + after the token, as in + ``hex_integer = Word(hexnums).setParseAction(tokenMap(int, 16))``, + which will convert the parsed data to an integer using base 16. + + Example (compare the last to example in :class:`ParserElement.transformString`:: + + hex_ints = OneOrMore(Word(hexnums)).setParseAction(tokenMap(int, 16)) + hex_ints.runTests(''' + 00 11 22 aa FF 0a 0d 1a + ''') + + upperword = Word(alphas).setParseAction(tokenMap(str.upper)) + OneOrMore(upperword).runTests(''' + my kingdom for a horse + ''') + + wd = Word(alphas).setParseAction(tokenMap(str.title)) + OneOrMore(wd).setParseAction(' '.join).runTests(''' + now is the winter of our discontent made glorious summer by this sun of york + ''') + + prints:: + + 00 11 22 aa FF 0a 0d 1a + [0, 17, 34, 170, 255, 10, 13, 26] + + my kingdom for a horse + ['MY', 'KINGDOM', 'FOR', 'A', 'HORSE'] + + now is the winter of our discontent made glorious summer by this sun of york + ['Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York'] + """ + def pa(s, l, t): + return [func(tokn, *args) for tokn in t] + + try: + func_name = getattr(func, '__name__', + getattr(func, '__class__').__name__) + except Exception: + func_name = str(func) + pa.__name__ = func_name + + return pa + +upcaseTokens = tokenMap(lambda t: _ustr(t).upper()) +"""(Deprecated) Helper parse action to convert tokens to upper case. +Deprecated in favor of :class:`pyparsing_common.upcaseTokens`""" + +downcaseTokens = tokenMap(lambda t: _ustr(t).lower()) +"""(Deprecated) Helper parse action to convert tokens to lower case. +Deprecated in favor of :class:`pyparsing_common.downcaseTokens`""" + +def _makeTags(tagStr, xml, + suppress_LT=Suppress("<"), + suppress_GT=Suppress(">")): + """Internal helper to construct opening and closing tag expressions, given a tag name""" + if isinstance(tagStr, basestring): + resname = tagStr + tagStr = Keyword(tagStr, caseless=not xml) + else: + resname = tagStr.name + + tagAttrName = Word(alphas, alphanums + "_-:") + if xml: + tagAttrValue = dblQuotedString.copy().setParseAction(removeQuotes) + openTag = (suppress_LT + + tagStr("tag") + + Dict(ZeroOrMore(Group(tagAttrName + Suppress("=") + tagAttrValue))) + + Optional("/", default=[False])("empty").setParseAction(lambda s, l, t: t[0] == '/') + + suppress_GT) + else: + tagAttrValue = quotedString.copy().setParseAction(removeQuotes) | Word(printables, excludeChars=">") + openTag = (suppress_LT + + tagStr("tag") + + Dict(ZeroOrMore(Group(tagAttrName.setParseAction(downcaseTokens) + + Optional(Suppress("=") + tagAttrValue)))) + + Optional("/", default=[False])("empty").setParseAction(lambda s, l, t: t[0] == '/') + + suppress_GT) + closeTag = Combine(_L("", adjacent=False) + + openTag.setName("<%s>" % resname) + # add start results name in parse action now that ungrouped names are not reported at two levels + openTag.addParseAction(lambda t: t.__setitem__("start" + "".join(resname.replace(":", " ").title().split()), t.copy())) + closeTag = closeTag("end" + "".join(resname.replace(":", " ").title().split())).setName("" % resname) + openTag.tag = resname + closeTag.tag = resname + openTag.tag_body = SkipTo(closeTag()) + return openTag, closeTag + +def makeHTMLTags(tagStr): + """Helper to construct opening and closing tag expressions for HTML, + given a tag name. Matches tags in either upper or lower case, + attributes with namespaces and with quoted or unquoted values. + + Example:: + + text = 'More info at the pyparsing wiki page' + # makeHTMLTags returns pyparsing expressions for the opening and + # closing tags as a 2-tuple + a, a_end = makeHTMLTags("A") + link_expr = a + SkipTo(a_end)("link_text") + a_end + + for link in link_expr.searchString(text): + # attributes in the tag (like "href" shown here) are + # also accessible as named results + print(link.link_text, '->', link.href) + + prints:: + + pyparsing -> https://github.com/pyparsing/pyparsing/wiki + """ + return _makeTags(tagStr, False) + +def makeXMLTags(tagStr): + """Helper to construct opening and closing tag expressions for XML, + given a tag name. Matches tags only in the given upper/lower case. + + Example: similar to :class:`makeHTMLTags` + """ + return _makeTags(tagStr, True) + +def withAttribute(*args, **attrDict): + """Helper to create a validating parse action to be used with start + tags created with :class:`makeXMLTags` or + :class:`makeHTMLTags`. Use ``withAttribute`` to qualify + a starting tag with a required attribute value, to avoid false + matches on common tags such as ```` or ``
``. + + Call ``withAttribute`` with a series of attribute names and + values. Specify the list of filter attributes names and values as: + + - keyword arguments, as in ``(align="right")``, or + - as an explicit dict with ``**`` operator, when an attribute + name is also a Python reserved word, as in ``**{"class":"Customer", "align":"right"}`` + - a list of name-value tuples, as in ``(("ns1:class", "Customer"), ("ns2:align", "right"))`` + + For attribute names with a namespace prefix, you must use the second + form. Attribute names are matched insensitive to upper/lower case. + + If just testing for ``class`` (with or without a namespace), use + :class:`withClass`. + + To verify that the attribute exists, but without specifying a value, + pass ``withAttribute.ANY_VALUE`` as the value. + + Example:: + + html = ''' +
+ Some text +
1 4 0 1 0
+
1,3 2,3 1,1
+
this has no type
+
+ + ''' + div,div_end = makeHTMLTags("div") + + # only match div tag having a type attribute with value "grid" + div_grid = div().setParseAction(withAttribute(type="grid")) + grid_expr = div_grid + SkipTo(div | div_end)("body") + for grid_header in grid_expr.searchString(html): + print(grid_header.body) + + # construct a match with any div tag having a type attribute, regardless of the value + div_any_type = div().setParseAction(withAttribute(type=withAttribute.ANY_VALUE)) + div_expr = div_any_type + SkipTo(div | div_end)("body") + for div_header in div_expr.searchString(html): + print(div_header.body) + + prints:: + + 1 4 0 1 0 + + 1 4 0 1 0 + 1,3 2,3 1,1 + """ + if args: + attrs = args[:] + else: + attrs = attrDict.items() + attrs = [(k, v) for k, v in attrs] + def pa(s, l, tokens): + for attrName, attrValue in attrs: + if attrName not in tokens: + raise ParseException(s, l, "no matching attribute " + attrName) + if attrValue != withAttribute.ANY_VALUE and tokens[attrName] != attrValue: + raise ParseException(s, l, "attribute '%s' has value '%s', must be '%s'" % + (attrName, tokens[attrName], attrValue)) + return pa +withAttribute.ANY_VALUE = object() + +def withClass(classname, namespace=''): + """Simplified version of :class:`withAttribute` when + matching on a div class - made difficult because ``class`` is + a reserved word in Python. + + Example:: + + html = ''' +
+ Some text +
1 4 0 1 0
+
1,3 2,3 1,1
+
this <div> has no class
+
+ + ''' + div,div_end = makeHTMLTags("div") + div_grid = div().setParseAction(withClass("grid")) + + grid_expr = div_grid + SkipTo(div | div_end)("body") + for grid_header in grid_expr.searchString(html): + print(grid_header.body) + + div_any_type = div().setParseAction(withClass(withAttribute.ANY_VALUE)) + div_expr = div_any_type + SkipTo(div | div_end)("body") + for div_header in div_expr.searchString(html): + print(div_header.body) + + prints:: + + 1 4 0 1 0 + + 1 4 0 1 0 + 1,3 2,3 1,1 + """ + classattr = "%s:class" % namespace if namespace else "class" + return withAttribute(**{classattr: classname}) + +opAssoc = SimpleNamespace() +opAssoc.LEFT = object() +opAssoc.RIGHT = object() + +def infixNotation(baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')')): + """Helper method for constructing grammars of expressions made up of + operators working in a precedence hierarchy. Operators may be unary + or binary, left- or right-associative. Parse actions can also be + attached to operator expressions. The generated parser will also + recognize the use of parentheses to override operator precedences + (see example below). + + Note: if you define a deep operator list, you may see performance + issues when using infixNotation. See + :class:`ParserElement.enablePackrat` for a mechanism to potentially + improve your parser performance. + + Parameters: + - baseExpr - expression representing the most basic element for the + nested + - opList - list of tuples, one for each operator precedence level + in the expression grammar; each tuple is of the form ``(opExpr, + numTerms, rightLeftAssoc, parseAction)``, where: + + - opExpr is the pyparsing expression for the operator; may also + be a string, which will be converted to a Literal; if numTerms + is 3, opExpr is a tuple of two expressions, for the two + operators separating the 3 terms + - numTerms is the number of terms for this operator (must be 1, + 2, or 3) + - rightLeftAssoc is the indicator whether the operator is right + or left associative, using the pyparsing-defined constants + ``opAssoc.RIGHT`` and ``opAssoc.LEFT``. + - parseAction is the parse action to be associated with + expressions matching this operator expression (the parse action + tuple member may be omitted); if the parse action is passed + a tuple or list of functions, this is equivalent to calling + ``setParseAction(*fn)`` + (:class:`ParserElement.setParseAction`) + - lpar - expression for matching left-parentheses + (default= ``Suppress('(')``) + - rpar - expression for matching right-parentheses + (default= ``Suppress(')')``) + + Example:: + + # simple example of four-function arithmetic with ints and + # variable names + integer = pyparsing_common.signed_integer + varname = pyparsing_common.identifier + + arith_expr = infixNotation(integer | varname, + [ + ('-', 1, opAssoc.RIGHT), + (oneOf('* /'), 2, opAssoc.LEFT), + (oneOf('+ -'), 2, opAssoc.LEFT), + ]) + + arith_expr.runTests(''' + 5+3*6 + (5+3)*6 + -2--11 + ''', fullDump=False) + + prints:: + + 5+3*6 + [[5, '+', [3, '*', 6]]] + + (5+3)*6 + [[[5, '+', 3], '*', 6]] + + -2--11 + [[['-', 2], '-', ['-', 11]]] + """ + # captive version of FollowedBy that does not do parse actions or capture results names + class _FB(FollowedBy): + def parseImpl(self, instring, loc, doActions=True): + self.expr.tryParse(instring, loc) + return loc, [] + + ret = Forward() + lastExpr = baseExpr | (lpar + ret + rpar) + for i, operDef in enumerate(opList): + opExpr, arity, rightLeftAssoc, pa = (operDef + (None, ))[:4] + termName = "%s term" % opExpr if arity < 3 else "%s%s term" % opExpr + if arity == 3: + if opExpr is None or len(opExpr) != 2: + raise ValueError( + "if numterms=3, opExpr must be a tuple or list of two expressions") + opExpr1, opExpr2 = opExpr + thisExpr = Forward().setName(termName) + if rightLeftAssoc == opAssoc.LEFT: + if arity == 1: + matchExpr = _FB(lastExpr + opExpr) + Group(lastExpr + OneOrMore(opExpr)) + elif arity == 2: + if opExpr is not None: + matchExpr = _FB(lastExpr + opExpr + lastExpr) + Group(lastExpr + OneOrMore(opExpr + lastExpr)) + else: + matchExpr = _FB(lastExpr + lastExpr) + Group(lastExpr + OneOrMore(lastExpr)) + elif arity == 3: + matchExpr = (_FB(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) + + Group(lastExpr + OneOrMore(opExpr1 + lastExpr + opExpr2 + lastExpr))) + else: + raise ValueError("operator must be unary (1), binary (2), or ternary (3)") + elif rightLeftAssoc == opAssoc.RIGHT: + if arity == 1: + # try to avoid LR with this extra test + if not isinstance(opExpr, Optional): + opExpr = Optional(opExpr) + matchExpr = _FB(opExpr.expr + thisExpr) + Group(opExpr + thisExpr) + elif arity == 2: + if opExpr is not None: + matchExpr = _FB(lastExpr + opExpr + thisExpr) + Group(lastExpr + OneOrMore(opExpr + thisExpr)) + else: + matchExpr = _FB(lastExpr + thisExpr) + Group(lastExpr + OneOrMore(thisExpr)) + elif arity == 3: + matchExpr = (_FB(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + + Group(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr)) + else: + raise ValueError("operator must be unary (1), binary (2), or ternary (3)") + else: + raise ValueError("operator must indicate right or left associativity") + if pa: + if isinstance(pa, (tuple, list)): + matchExpr.setParseAction(*pa) + else: + matchExpr.setParseAction(pa) + thisExpr <<= (matchExpr.setName(termName) | lastExpr) + lastExpr = thisExpr + ret <<= lastExpr + return ret + +operatorPrecedence = infixNotation +"""(Deprecated) Former name of :class:`infixNotation`, will be +dropped in a future release.""" + +dblQuotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*') + '"').setName("string enclosed in double quotes") +sglQuotedString = Combine(Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*") + "'").setName("string enclosed in single quotes") +quotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*') + '"' + | Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*") + "'").setName("quotedString using single or double quotes") +unicodeString = Combine(_L('u') + quotedString.copy()).setName("unicode string literal") + +def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.copy()): + """Helper method for defining nested lists enclosed in opening and + closing delimiters ("(" and ")" are the default). + + Parameters: + - opener - opening character for a nested list + (default= ``"("``); can also be a pyparsing expression + - closer - closing character for a nested list + (default= ``")"``); can also be a pyparsing expression + - content - expression for items within the nested lists + (default= ``None``) + - ignoreExpr - expression for ignoring opening and closing + delimiters (default= :class:`quotedString`) + + If an expression is not provided for the content argument, the + nested expression will capture all whitespace-delimited content + between delimiters as a list of separate values. + + Use the ``ignoreExpr`` argument to define expressions that may + contain opening or closing characters that should not be treated as + opening or closing characters for nesting, such as quotedString or + a comment expression. Specify multiple expressions using an + :class:`Or` or :class:`MatchFirst`. The default is + :class:`quotedString`, but if no expressions are to be ignored, then + pass ``None`` for this argument. + + Example:: + + data_type = oneOf("void int short long char float double") + decl_data_type = Combine(data_type + Optional(Word('*'))) + ident = Word(alphas+'_', alphanums+'_') + number = pyparsing_common.number + arg = Group(decl_data_type + ident) + LPAR, RPAR = map(Suppress, "()") + + code_body = nestedExpr('{', '}', ignoreExpr=(quotedString | cStyleComment)) + + c_function = (decl_data_type("type") + + ident("name") + + LPAR + Optional(delimitedList(arg), [])("args") + RPAR + + code_body("body")) + c_function.ignore(cStyleComment) + + source_code = ''' + int is_odd(int x) { + return (x%2); + } + + int dec_to_hex(char hchar) { + if (hchar >= '0' && hchar <= '9') { + return (ord(hchar)-ord('0')); + } else { + return (10+ord(hchar)-ord('A')); + } + } + ''' + for func in c_function.searchString(source_code): + print("%(name)s (%(type)s) args: %(args)s" % func) + + + prints:: + + is_odd (int) args: [['int', 'x']] + dec_to_hex (int) args: [['char', 'hchar']] + """ + if opener == closer: + raise ValueError("opening and closing strings cannot be the same") + if content is None: + if isinstance(opener, basestring) and isinstance(closer, basestring): + if len(opener) == 1 and len(closer) == 1: + if ignoreExpr is not None: + content = (Combine(OneOrMore(~ignoreExpr + + CharsNotIn(opener + + closer + + ParserElement.DEFAULT_WHITE_CHARS, exact=1) + ) + ).setParseAction(lambda t: t[0].strip())) + else: + content = (empty.copy() + CharsNotIn(opener + + closer + + ParserElement.DEFAULT_WHITE_CHARS + ).setParseAction(lambda t: t[0].strip())) + else: + if ignoreExpr is not None: + content = (Combine(OneOrMore(~ignoreExpr + + ~Literal(opener) + + ~Literal(closer) + + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS, exact=1)) + ).setParseAction(lambda t: t[0].strip())) + else: + content = (Combine(OneOrMore(~Literal(opener) + + ~Literal(closer) + + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS, exact=1)) + ).setParseAction(lambda t: t[0].strip())) + else: + raise ValueError("opening and closing arguments must be strings if no content expression is given") + ret = Forward() + if ignoreExpr is not None: + ret <<= Group(Suppress(opener) + ZeroOrMore(ignoreExpr | ret | content) + Suppress(closer)) + else: + ret <<= Group(Suppress(opener) + ZeroOrMore(ret | content) + Suppress(closer)) + ret.setName('nested %s%s expression' % (opener, closer)) + return ret + +def indentedBlock(blockStatementExpr, indentStack, indent=True): + """Helper method for defining space-delimited indentation blocks, + such as those used to define block statements in Python source code. + + Parameters: + + - blockStatementExpr - expression defining syntax of statement that + is repeated within the indented block + - indentStack - list created by caller to manage indentation stack + (multiple statementWithIndentedBlock expressions within a single + grammar should share a common indentStack) + - indent - boolean indicating whether block must be indented beyond + the current level; set to False for block of left-most + statements (default= ``True``) + + A valid block must contain at least one ``blockStatement``. + + Example:: + + data = ''' + def A(z): + A1 + B = 100 + G = A2 + A2 + A3 + B + def BB(a,b,c): + BB1 + def BBA(): + bba1 + bba2 + bba3 + C + D + def spam(x,y): + def eggs(z): + pass + ''' + + + indentStack = [1] + stmt = Forward() + + identifier = Word(alphas, alphanums) + funcDecl = ("def" + identifier + Group("(" + Optional(delimitedList(identifier)) + ")") + ":") + func_body = indentedBlock(stmt, indentStack) + funcDef = Group(funcDecl + func_body) + + rvalue = Forward() + funcCall = Group(identifier + "(" + Optional(delimitedList(rvalue)) + ")") + rvalue << (funcCall | identifier | Word(nums)) + assignment = Group(identifier + "=" + rvalue) + stmt << (funcDef | assignment | identifier) + + module_body = OneOrMore(stmt) + + parseTree = module_body.parseString(data) + parseTree.pprint() + + prints:: + + [['def', + 'A', + ['(', 'z', ')'], + ':', + [['A1'], [['B', '=', '100']], [['G', '=', 'A2']], ['A2'], ['A3']]], + 'B', + ['def', + 'BB', + ['(', 'a', 'b', 'c', ')'], + ':', + [['BB1'], [['def', 'BBA', ['(', ')'], ':', [['bba1'], ['bba2'], ['bba3']]]]]], + 'C', + 'D', + ['def', + 'spam', + ['(', 'x', 'y', ')'], + ':', + [[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]] + """ + backup_stack = indentStack[:] + + def reset_stack(): + indentStack[:] = backup_stack + + def checkPeerIndent(s, l, t): + if l >= len(s): return + curCol = col(l, s) + if curCol != indentStack[-1]: + if curCol > indentStack[-1]: + raise ParseException(s, l, "illegal nesting") + raise ParseException(s, l, "not a peer entry") + + def checkSubIndent(s, l, t): + curCol = col(l, s) + if curCol > indentStack[-1]: + indentStack.append(curCol) + else: + raise ParseException(s, l, "not a subentry") + + def checkUnindent(s, l, t): + if l >= len(s): return + curCol = col(l, s) + if not(indentStack and curCol in indentStack): + raise ParseException(s, l, "not an unindent") + if curCol < indentStack[-1]: + indentStack.pop() + + NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress(), stopOn=StringEnd()) + INDENT = (Empty() + Empty().setParseAction(checkSubIndent)).setName('INDENT') + PEER = Empty().setParseAction(checkPeerIndent).setName('') + UNDENT = Empty().setParseAction(checkUnindent).setName('UNINDENT') + if indent: + smExpr = Group(Optional(NL) + + INDENT + + OneOrMore(PEER + Group(blockStatementExpr) + Optional(NL), stopOn=StringEnd()) + + UNDENT) + else: + smExpr = Group(Optional(NL) + + OneOrMore(PEER + Group(blockStatementExpr) + Optional(NL), stopOn=StringEnd()) + + UNDENT) + smExpr.setFailAction(lambda a, b, c, d: reset_stack()) + blockStatementExpr.ignore(_bslash + LineEnd()) + return smExpr.setName('indented block') + +alphas8bit = srange(r"[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]") +punc8bit = srange(r"[\0xa1-\0xbf\0xd7\0xf7]") + +anyOpenTag, anyCloseTag = makeHTMLTags(Word(alphas, alphanums + "_:").setName('any tag')) +_htmlEntityMap = dict(zip("gt lt amp nbsp quot apos".split(), '><& "\'')) +commonHTMLEntity = Regex('&(?P' + '|'.join(_htmlEntityMap.keys()) +");").setName("common HTML entity") +def replaceHTMLEntity(t): + """Helper parser action to replace common HTML entities with their special characters""" + return _htmlEntityMap.get(t.entity) + +# it's easy to get these comment structures wrong - they're very common, so may as well make them available +cStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/').setName("C style comment") +"Comment of the form ``/* ... */``" + +htmlComment = Regex(r"").setName("HTML comment") +"Comment of the form ````" + +restOfLine = Regex(r".*").leaveWhitespace().setName("rest of line") +dblSlashComment = Regex(r"//(?:\\\n|[^\n])*").setName("// comment") +"Comment of the form ``// ... (to end of line)``" + +cppStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/' | dblSlashComment).setName("C++ style comment") +"Comment of either form :class:`cStyleComment` or :class:`dblSlashComment`" + +javaStyleComment = cppStyleComment +"Same as :class:`cppStyleComment`" + +pythonStyleComment = Regex(r"#.*").setName("Python style comment") +"Comment of the form ``# ... (to end of line)``" + +_commasepitem = Combine(OneOrMore(Word(printables, excludeChars=',') + + Optional(Word(" \t") + + ~Literal(",") + ~LineEnd()))).streamline().setName("commaItem") +commaSeparatedList = delimitedList(Optional(quotedString.copy() | _commasepitem, default="")).setName("commaSeparatedList") +"""(Deprecated) Predefined expression of 1 or more printable words or +quoted strings, separated by commas. + +This expression is deprecated in favor of :class:`pyparsing_common.comma_separated_list`. +""" + +# some other useful expressions - using lower-case class name since we are really using this as a namespace +class pyparsing_common: + """Here are some common low-level expressions that may be useful in + jump-starting parser development: + + - numeric forms (:class:`integers`, :class:`reals`, + :class:`scientific notation`) + - common :class:`programming identifiers` + - network addresses (:class:`MAC`, + :class:`IPv4`, :class:`IPv6`) + - ISO8601 :class:`dates` and + :class:`datetime` + - :class:`UUID` + - :class:`comma-separated list` + + Parse actions: + + - :class:`convertToInteger` + - :class:`convertToFloat` + - :class:`convertToDate` + - :class:`convertToDatetime` + - :class:`stripHTMLTags` + - :class:`upcaseTokens` + - :class:`downcaseTokens` + + Example:: + + pyparsing_common.number.runTests(''' + # any int or real number, returned as the appropriate type + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + ''') + + pyparsing_common.fnumber.runTests(''' + # any int or real number, returned as float + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + ''') + + pyparsing_common.hex_integer.runTests(''' + # hex numbers + 100 + FF + ''') + + pyparsing_common.fraction.runTests(''' + # fractions + 1/2 + -3/4 + ''') + + pyparsing_common.mixed_integer.runTests(''' + # mixed fractions + 1 + 1/2 + -3/4 + 1-3/4 + ''') + + import uuid + pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) + pyparsing_common.uuid.runTests(''' + # uuid + 12345678-1234-5678-1234-567812345678 + ''') + + prints:: + + # any int or real number, returned as the appropriate type + 100 + [100] + + -100 + [-100] + + +100 + [100] + + 3.14159 + [3.14159] + + 6.02e23 + [6.02e+23] + + 1e-12 + [1e-12] + + # any int or real number, returned as float + 100 + [100.0] + + -100 + [-100.0] + + +100 + [100.0] + + 3.14159 + [3.14159] + + 6.02e23 + [6.02e+23] + + 1e-12 + [1e-12] + + # hex numbers + 100 + [256] + + FF + [255] + + # fractions + 1/2 + [0.5] + + -3/4 + [-0.75] + + # mixed fractions + 1 + [1] + + 1/2 + [0.5] + + -3/4 + [-0.75] + + 1-3/4 + [1.75] + + # uuid + 12345678-1234-5678-1234-567812345678 + [UUID('12345678-1234-5678-1234-567812345678')] + """ + + convertToInteger = tokenMap(int) + """ + Parse action for converting parsed integers to Python int + """ + + convertToFloat = tokenMap(float) + """ + Parse action for converting parsed numbers to Python float + """ + + integer = Word(nums).setName("integer").setParseAction(convertToInteger) + """expression that parses an unsigned integer, returns an int""" + + hex_integer = Word(hexnums).setName("hex integer").setParseAction(tokenMap(int, 16)) + """expression that parses a hexadecimal integer, returns an int""" + + signed_integer = Regex(r'[+-]?\d+').setName("signed integer").setParseAction(convertToInteger) + """expression that parses an integer with optional leading sign, returns an int""" + + fraction = (signed_integer().setParseAction(convertToFloat) + '/' + signed_integer().setParseAction(convertToFloat)).setName("fraction") + """fractional expression of an integer divided by an integer, returns a float""" + fraction.addParseAction(lambda t: t[0]/t[-1]) + + mixed_integer = (fraction | signed_integer + Optional(Optional('-').suppress() + fraction)).setName("fraction or mixed integer-fraction") + """mixed integer of the form 'integer - fraction', with optional leading integer, returns float""" + mixed_integer.addParseAction(sum) + + real = Regex(r'[+-]?(?:\d+\.\d*|\.\d+)').setName("real number").setParseAction(convertToFloat) + """expression that parses a floating point number and returns a float""" + + sci_real = Regex(r'[+-]?(?:\d+(?:[eE][+-]?\d+)|(?:\d+\.\d*|\.\d+)(?:[eE][+-]?\d+)?)').setName("real number with scientific notation").setParseAction(convertToFloat) + """expression that parses a floating point number with optional + scientific notation and returns a float""" + + # streamlining this expression makes the docs nicer-looking + number = (sci_real | real | signed_integer).streamline() + """any numeric expression, returns the corresponding Python type""" + + fnumber = Regex(r'[+-]?\d+\.?\d*([eE][+-]?\d+)?').setName("fnumber").setParseAction(convertToFloat) + """any int or real number, returned as float""" + + identifier = Word(alphas + '_', alphanums + '_').setName("identifier") + """typical code identifier (leading alpha or '_', followed by 0 or more alphas, nums, or '_')""" + + ipv4_address = Regex(r'(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}').setName("IPv4 address") + "IPv4 address (``0.0.0.0 - 255.255.255.255``)" + + _ipv6_part = Regex(r'[0-9a-fA-F]{1,4}').setName("hex_integer") + _full_ipv6_address = (_ipv6_part + (':' + _ipv6_part) * 7).setName("full IPv6 address") + _short_ipv6_address = (Optional(_ipv6_part + (':' + _ipv6_part) * (0, 6)) + + "::" + + Optional(_ipv6_part + (':' + _ipv6_part) * (0, 6)) + ).setName("short IPv6 address") + _short_ipv6_address.addCondition(lambda t: sum(1 for tt in t if pyparsing_common._ipv6_part.matches(tt)) < 8) + _mixed_ipv6_address = ("::ffff:" + ipv4_address).setName("mixed IPv6 address") + ipv6_address = Combine((_full_ipv6_address | _mixed_ipv6_address | _short_ipv6_address).setName("IPv6 address")).setName("IPv6 address") + "IPv6 address (long, short, or mixed form)" + + mac_address = Regex(r'[0-9a-fA-F]{2}([:.-])[0-9a-fA-F]{2}(?:\1[0-9a-fA-F]{2}){4}').setName("MAC address") + "MAC address xx:xx:xx:xx:xx (may also have '-' or '.' delimiters)" + + @staticmethod + def convertToDate(fmt="%Y-%m-%d"): + """ + Helper to create a parse action for converting parsed date string to Python datetime.date + + Params - + - fmt - format to be passed to datetime.strptime (default= ``"%Y-%m-%d"``) + + Example:: + + date_expr = pyparsing_common.iso8601_date.copy() + date_expr.setParseAction(pyparsing_common.convertToDate()) + print(date_expr.parseString("1999-12-31")) + + prints:: + + [datetime.date(1999, 12, 31)] + """ + def cvt_fn(s, l, t): + try: + return datetime.strptime(t[0], fmt).date() + except ValueError as ve: + raise ParseException(s, l, str(ve)) + return cvt_fn + + @staticmethod + def convertToDatetime(fmt="%Y-%m-%dT%H:%M:%S.%f"): + """Helper to create a parse action for converting parsed + datetime string to Python datetime.datetime + + Params - + - fmt - format to be passed to datetime.strptime (default= ``"%Y-%m-%dT%H:%M:%S.%f"``) + + Example:: + + dt_expr = pyparsing_common.iso8601_datetime.copy() + dt_expr.setParseAction(pyparsing_common.convertToDatetime()) + print(dt_expr.parseString("1999-12-31T23:59:59.999")) + + prints:: + + [datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)] + """ + def cvt_fn(s, l, t): + try: + return datetime.strptime(t[0], fmt) + except ValueError as ve: + raise ParseException(s, l, str(ve)) + return cvt_fn + + iso8601_date = Regex(r'(?P\d{4})(?:-(?P\d\d)(?:-(?P\d\d))?)?').setName("ISO8601 date") + "ISO8601 date (``yyyy-mm-dd``)" + + iso8601_datetime = Regex(r'(?P\d{4})-(?P\d\d)-(?P\d\d)[T ](?P\d\d):(?P\d\d)(:(?P\d\d(\.\d*)?)?)?(?PZ|[+-]\d\d:?\d\d)?').setName("ISO8601 datetime") + "ISO8601 datetime (``yyyy-mm-ddThh:mm:ss.s(Z|+-00:00)``) - trailing seconds, milliseconds, and timezone optional; accepts separating ``'T'`` or ``' '``" + + uuid = Regex(r'[0-9a-fA-F]{8}(-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12}').setName("UUID") + "UUID (``xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx``)" + + _html_stripper = anyOpenTag.suppress() | anyCloseTag.suppress() + @staticmethod + def stripHTMLTags(s, l, tokens): + """Parse action to remove HTML tags from web page HTML source + + Example:: + + # strip HTML links from normal text + text = 'More info at the
pyparsing wiki page' + td, td_end = makeHTMLTags("TD") + table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end + print(table_text.parseString(text).body) + + Prints:: + + More info at the pyparsing wiki page + """ + return pyparsing_common._html_stripper.transformString(tokens[0]) + + _commasepitem = Combine(OneOrMore(~Literal(",") + + ~LineEnd() + + Word(printables, excludeChars=',') + + Optional(White(" \t")))).streamline().setName("commaItem") + comma_separated_list = delimitedList(Optional(quotedString.copy() + | _commasepitem, default='') + ).setName("comma separated list") + """Predefined expression of 1 or more printable words or quoted strings, separated by commas.""" + + upcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).upper())) + """Parse action to convert tokens to upper case.""" + + downcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).lower())) + """Parse action to convert tokens to lower case.""" + + +class _lazyclassproperty(object): + def __init__(self, fn): + self.fn = fn + self.__doc__ = fn.__doc__ + self.__name__ = fn.__name__ + + def __get__(self, obj, cls): + if cls is None: + cls = type(obj) + if not hasattr(cls, '_intern') or any(cls._intern is getattr(superclass, '_intern', []) + for superclass in cls.__mro__[1:]): + cls._intern = {} + attrname = self.fn.__name__ + if attrname not in cls._intern: + cls._intern[attrname] = self.fn(cls) + return cls._intern[attrname] + + +class unicode_set(object): + """ + A set of Unicode characters, for language-specific strings for + ``alphas``, ``nums``, ``alphanums``, and ``printables``. + A unicode_set is defined by a list of ranges in the Unicode character + set, in a class attribute ``_ranges``, such as:: + + _ranges = [(0x0020, 0x007e), (0x00a0, 0x00ff),] + + A unicode set can also be defined using multiple inheritance of other unicode sets:: + + class CJK(Chinese, Japanese, Korean): + pass + """ + _ranges = [] + + @classmethod + def _get_chars_for_ranges(cls): + ret = [] + for cc in cls.__mro__: + if cc is unicode_set: + break + for rr in cc._ranges: + ret.extend(range(rr[0], rr[-1] + 1)) + return [unichr(c) for c in sorted(set(ret))] + + @_lazyclassproperty + def printables(cls): + "all non-whitespace characters in this range" + return u''.join(filterfalse(unicode.isspace, cls._get_chars_for_ranges())) + + @_lazyclassproperty + def alphas(cls): + "all alphabetic characters in this range" + return u''.join(filter(unicode.isalpha, cls._get_chars_for_ranges())) + + @_lazyclassproperty + def nums(cls): + "all numeric digit characters in this range" + return u''.join(filter(unicode.isdigit, cls._get_chars_for_ranges())) + + @_lazyclassproperty + def alphanums(cls): + "all alphanumeric characters in this range" + return cls.alphas + cls.nums + + +class pyparsing_unicode(unicode_set): + """ + A namespace class for defining common language unicode_sets. + """ + _ranges = [(32, sys.maxunicode)] + + class Latin1(unicode_set): + "Unicode set for Latin-1 Unicode Character Range" + _ranges = [(0x0020, 0x007e), (0x00a0, 0x00ff),] + + class LatinA(unicode_set): + "Unicode set for Latin-A Unicode Character Range" + _ranges = [(0x0100, 0x017f),] + + class LatinB(unicode_set): + "Unicode set for Latin-B Unicode Character Range" + _ranges = [(0x0180, 0x024f),] + + class Greek(unicode_set): + "Unicode set for Greek Unicode Character Ranges" + _ranges = [ + (0x0370, 0x03ff), (0x1f00, 0x1f15), (0x1f18, 0x1f1d), (0x1f20, 0x1f45), (0x1f48, 0x1f4d), + (0x1f50, 0x1f57), (0x1f59,), (0x1f5b,), (0x1f5d,), (0x1f5f, 0x1f7d), (0x1f80, 0x1fb4), (0x1fb6, 0x1fc4), + (0x1fc6, 0x1fd3), (0x1fd6, 0x1fdb), (0x1fdd, 0x1fef), (0x1ff2, 0x1ff4), (0x1ff6, 0x1ffe), + ] + + class Cyrillic(unicode_set): + "Unicode set for Cyrillic Unicode Character Range" + _ranges = [(0x0400, 0x04ff)] + + class Chinese(unicode_set): + "Unicode set for Chinese Unicode Character Range" + _ranges = [(0x4e00, 0x9fff), (0x3000, 0x303f),] + + class Japanese(unicode_set): + "Unicode set for Japanese Unicode Character Range, combining Kanji, Hiragana, and Katakana ranges" + _ranges = [] + + class Kanji(unicode_set): + "Unicode set for Kanji Unicode Character Range" + _ranges = [(0x4E00, 0x9Fbf), (0x3000, 0x303f),] + + class Hiragana(unicode_set): + "Unicode set for Hiragana Unicode Character Range" + _ranges = [(0x3040, 0x309f),] + + class Katakana(unicode_set): + "Unicode set for Katakana Unicode Character Range" + _ranges = [(0x30a0, 0x30ff),] + + class Korean(unicode_set): + "Unicode set for Korean Unicode Character Range" + _ranges = [(0xac00, 0xd7af), (0x1100, 0x11ff), (0x3130, 0x318f), (0xa960, 0xa97f), (0xd7b0, 0xd7ff), (0x3000, 0x303f),] + + class CJK(Chinese, Japanese, Korean): + "Unicode set for combined Chinese, Japanese, and Korean (CJK) Unicode Character Range" + pass + + class Thai(unicode_set): + "Unicode set for Thai Unicode Character Range" + _ranges = [(0x0e01, 0x0e3a), (0x0e3f, 0x0e5b),] + + class Arabic(unicode_set): + "Unicode set for Arabic Unicode Character Range" + _ranges = [(0x0600, 0x061b), (0x061e, 0x06ff), (0x0700, 0x077f),] + + class Hebrew(unicode_set): + "Unicode set for Hebrew Unicode Character Range" + _ranges = [(0x0590, 0x05ff),] + + class Devanagari(unicode_set): + "Unicode set for Devanagari Unicode Character Range" + _ranges = [(0x0900, 0x097f), (0xa8e0, 0xa8ff)] + +pyparsing_unicode.Japanese._ranges = (pyparsing_unicode.Japanese.Kanji._ranges + + pyparsing_unicode.Japanese.Hiragana._ranges + + pyparsing_unicode.Japanese.Katakana._ranges) + +# define ranges in language character sets +if PY_3: + setattr(pyparsing_unicode, u"العربية", pyparsing_unicode.Arabic) + setattr(pyparsing_unicode, u"中文", pyparsing_unicode.Chinese) + setattr(pyparsing_unicode, u"кириллица", pyparsing_unicode.Cyrillic) + setattr(pyparsing_unicode, u"Ελληνικά", pyparsing_unicode.Greek) + setattr(pyparsing_unicode, u"עִברִית", pyparsing_unicode.Hebrew) + setattr(pyparsing_unicode, u"日本語", pyparsing_unicode.Japanese) + setattr(pyparsing_unicode.Japanese, u"漢字", pyparsing_unicode.Japanese.Kanji) + setattr(pyparsing_unicode.Japanese, u"カタカナ", pyparsing_unicode.Japanese.Katakana) + setattr(pyparsing_unicode.Japanese, u"ひらがな", pyparsing_unicode.Japanese.Hiragana) + setattr(pyparsing_unicode, u"한국어", pyparsing_unicode.Korean) + setattr(pyparsing_unicode, u"ไทย", pyparsing_unicode.Thai) + setattr(pyparsing_unicode, u"देवनागरी", pyparsing_unicode.Devanagari) + + +class pyparsing_test: + """ + namespace class for classes useful in writing unit tests + """ + + class reset_pyparsing_context: + """ + Context manager to be used when writing unit tests that modify pyparsing config values: + - packrat parsing + - default whitespace characters. + - default keyword characters + - literal string auto-conversion class + - __diag__ settings + + Example: + with reset_pyparsing_context(): + # test that literals used to construct a grammar are automatically suppressed + ParserElement.inlineLiteralsUsing(Suppress) + + term = Word(alphas) | Word(nums) + group = Group('(' + term[...] + ')') + + # assert that the '()' characters are not included in the parsed tokens + self.assertParseAndCheckLisst(group, "(abc 123 def)", ['abc', '123', 'def']) + + # after exiting context manager, literals are converted to Literal expressions again + """ + + def __init__(self): + self._save_context = {} + + def save(self): + self._save_context["default_whitespace"] = ParserElement.DEFAULT_WHITE_CHARS + self._save_context["default_keyword_chars"] = Keyword.DEFAULT_KEYWORD_CHARS + self._save_context[ + "literal_string_class" + ] = ParserElement._literalStringClass + self._save_context["packrat_enabled"] = ParserElement._packratEnabled + self._save_context["packrat_parse"] = ParserElement._parse + self._save_context["__diag__"] = { + name: getattr(__diag__, name) for name in __diag__._all_names + } + self._save_context["__compat__"] = { + "collect_all_And_tokens": __compat__.collect_all_And_tokens + } + return self + + def restore(self): + # reset pyparsing global state + if ( + ParserElement.DEFAULT_WHITE_CHARS + != self._save_context["default_whitespace"] + ): + ParserElement.setDefaultWhitespaceChars( + self._save_context["default_whitespace"] + ) + Keyword.DEFAULT_KEYWORD_CHARS = self._save_context["default_keyword_chars"] + ParserElement.inlineLiteralsUsing( + self._save_context["literal_string_class"] + ) + for name, value in self._save_context["__diag__"].items(): + setattr(__diag__, name, value) + ParserElement._packratEnabled = self._save_context["packrat_enabled"] + ParserElement._parse = self._save_context["packrat_parse"] + __compat__.collect_all_And_tokens = self._save_context["__compat__"] + + def __enter__(self): + return self.save() + + def __exit__(self, *args): + return self.restore() + + class TestParseResultsAsserts: + """ + A mixin class to add parse results assertion methods to normal unittest.TestCase classes. + """ + def assertParseResultsEquals( + self, result, expected_list=None, expected_dict=None, msg=None + ): + """ + Unit test assertion to compare a ParseResults object with an optional expected_list, + and compare any defined results names with an optional expected_dict. + """ + if expected_list is not None: + self.assertEqual(expected_list, result.asList(), msg=msg) + if expected_dict is not None: + self.assertEqual(expected_dict, result.asDict(), msg=msg) + + def assertParseAndCheckList( + self, expr, test_string, expected_list, msg=None, verbose=True + ): + """ + Convenience wrapper assert to test a parser element and input string, and assert that + the resulting ParseResults.asList() is equal to the expected_list. + """ + result = expr.parseString(test_string, parseAll=True) + if verbose: + print(result.dump()) + self.assertParseResultsEquals(result, expected_list=expected_list, msg=msg) + + def assertParseAndCheckDict( + self, expr, test_string, expected_dict, msg=None, verbose=True + ): + """ + Convenience wrapper assert to test a parser element and input string, and assert that + the resulting ParseResults.asDict() is equal to the expected_dict. + """ + result = expr.parseString(test_string, parseAll=True) + if verbose: + print(result.dump()) + self.assertParseResultsEquals(result, expected_dict=expected_dict, msg=msg) + + def assertRunTestResults( + self, run_tests_report, expected_parse_results=None, msg=None + ): + """ + Unit test assertion to evaluate output of ParserElement.runTests(). If a list of + list-dict tuples is given as the expected_parse_results argument, then these are zipped + with the report tuples returned by runTests and evaluated using assertParseResultsEquals. + Finally, asserts that the overall runTests() success value is True. + + :param run_tests_report: tuple(bool, [tuple(str, ParseResults or Exception)]) returned from runTests + :param expected_parse_results (optional): [tuple(str, list, dict, Exception)] + """ + run_test_success, run_test_results = run_tests_report + + if expected_parse_results is not None: + merged = [ + (rpt[0], rpt[1], expected) + for rpt, expected in zip(run_test_results, expected_parse_results) + ] + for test_string, result, expected in merged: + # expected should be a tuple containing a list and/or a dict or an exception, + # and optional failure message string + # an empty tuple will skip any result validation + fail_msg = next( + (exp for exp in expected if isinstance(exp, str)), None + ) + expected_exception = next( + ( + exp + for exp in expected + if isinstance(exp, type) and issubclass(exp, Exception) + ), + None, + ) + if expected_exception is not None: + with self.assertRaises( + expected_exception=expected_exception, msg=fail_msg or msg + ): + if isinstance(result, Exception): + raise result + else: + expected_list = next( + (exp for exp in expected if isinstance(exp, list)), None + ) + expected_dict = next( + (exp for exp in expected if isinstance(exp, dict)), None + ) + if (expected_list, expected_dict) != (None, None): + self.assertParseResultsEquals( + result, + expected_list=expected_list, + expected_dict=expected_dict, + msg=fail_msg or msg, + ) + else: + # warning here maybe? + print("no validation for {!r}".format(test_string)) + + # do this last, in case some specific test results can be reported instead + self.assertTrue( + run_test_success, msg=msg if msg is not None else "failed runTests" + ) + + @contextmanager + def assertRaisesParseException(self, exc_type=ParseException, msg=None): + with self.assertRaises(exc_type, msg=msg): + yield + + +if __name__ == "__main__": + + selectToken = CaselessLiteral("select") + fromToken = CaselessLiteral("from") + + ident = Word(alphas, alphanums + "_$") + + columnName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) + columnNameList = Group(delimitedList(columnName)).setName("columns") + columnSpec = ('*' | columnNameList) + + tableName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) + tableNameList = Group(delimitedList(tableName)).setName("tables") + + simpleSQL = selectToken("command") + columnSpec("columns") + fromToken + tableNameList("tables") + + # demo runTests method, including embedded comments in test string + simpleSQL.runTests(""" + # '*' as column list and dotted table name + select * from SYS.XYZZY + + # caseless match on "SELECT", and casts back to "select" + SELECT * from XYZZY, ABC + + # list of column names, and mixed case SELECT keyword + Select AA,BB,CC from Sys.dual + + # multiple tables + Select A, B, C from Sys.dual, Table2 + + # invalid SELECT keyword - should fail + Xelect A, B, C from Sys.dual + + # incomplete command - should fail + Select + + # invalid column name - should fail + Select ^^^ frox Sys.dual + + """) + + pyparsing_common.number.runTests(""" + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + """) + + # any int or real number, returned as float + pyparsing_common.fnumber.runTests(""" + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + """) + + pyparsing_common.hex_integer.runTests(""" + 100 + FF + """) + + import uuid + pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) + pyparsing_common.uuid.runTests(""" + 12345678-1234-5678-1234-567812345678 + """) diff --git a/venv/Lib/site-packages/pip/_vendor/requests/__init__.py b/venv/Lib/site-packages/pip/_vendor/requests/__init__.py new file mode 100644 index 00000000..18046c45 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/requests/__init__.py @@ -0,0 +1,142 @@ +# -*- coding: utf-8 -*- + +# __ +# /__) _ _ _ _ _/ _ +# / ( (- (/ (/ (- _) / _) +# / + +""" +Requests HTTP Library +~~~~~~~~~~~~~~~~~~~~~ + +Requests is an HTTP library, written in Python, for human beings. +Basic GET usage: + + >>> import requests + >>> r = requests.get('https://www.python.org') + >>> r.status_code + 200 + >>> b'Python is a programming language' in r.content + True + +... or POST: + + >>> payload = dict(key1='value1', key2='value2') + >>> r = requests.post('https://httpbin.org/post', data=payload) + >>> print(r.text) + { + ... + "form": { + "key1": "value1", + "key2": "value2" + }, + ... + } + +The other HTTP methods are supported - see `requests.api`. Full documentation +is at . + +:copyright: (c) 2017 by Kenneth Reitz. +:license: Apache 2.0, see LICENSE for more details. +""" + +from pip._vendor import urllib3 +from pip._vendor import chardet +import warnings +from .exceptions import RequestsDependencyWarning + + +def check_compatibility(urllib3_version, chardet_version): + urllib3_version = urllib3_version.split('.') + assert urllib3_version != ['dev'] # Verify urllib3 isn't installed from git. + + # Sometimes, urllib3 only reports its version as 16.1. + if len(urllib3_version) == 2: + urllib3_version.append('0') + + # Check urllib3 for compatibility. + major, minor, patch = urllib3_version # noqa: F811 + major, minor, patch = int(major), int(minor), int(patch) + # urllib3 >= 1.21.1, <= 1.26 + assert major == 1 + assert minor >= 21 + assert minor <= 26 + + # Check chardet for compatibility. + major, minor, patch = chardet_version.split('.')[:3] + major, minor, patch = int(major), int(minor), int(patch) + # chardet >= 3.0.2, < 5.0.0 + assert (3, 0, 2) <= (major, minor, patch) < (5, 0, 0) + + +def _check_cryptography(cryptography_version): + # cryptography < 1.3.4 + try: + cryptography_version = list(map(int, cryptography_version.split('.'))) + except ValueError: + return + + if cryptography_version < [1, 3, 4]: + warning = 'Old version of cryptography ({}) may cause slowdown.'.format(cryptography_version) + warnings.warn(warning, RequestsDependencyWarning) + +# Check imported dependencies for compatibility. +try: + check_compatibility(urllib3.__version__, chardet.__version__) +except (AssertionError, ValueError): + warnings.warn("urllib3 ({}) or chardet ({}) doesn't match a supported " + "version!".format(urllib3.__version__, chardet.__version__), + RequestsDependencyWarning) + +# Attempt to enable urllib3's fallback for SNI support +# if the standard library doesn't support SNI or the +# 'ssl' library isn't available. +try: + # Note: This logic prevents upgrading cryptography on Windows, if imported + # as part of pip. + from pip._internal.utils.compat import WINDOWS + if not WINDOWS: + raise ImportError("pip internals: don't import cryptography on Windows") + try: + import ssl + except ImportError: + ssl = None + + if not getattr(ssl, "HAS_SNI", False): + from pip._vendor.urllib3.contrib import pyopenssl + pyopenssl.inject_into_urllib3() + + # Check cryptography version + from cryptography import __version__ as cryptography_version + _check_cryptography(cryptography_version) +except ImportError: + pass + +# urllib3's DependencyWarnings should be silenced. +from pip._vendor.urllib3.exceptions import DependencyWarning +warnings.simplefilter('ignore', DependencyWarning) + +from .__version__ import __title__, __description__, __url__, __version__ +from .__version__ import __build__, __author__, __author_email__, __license__ +from .__version__ import __copyright__, __cake__ + +from . import utils +from . import packages +from .models import Request, Response, PreparedRequest +from .api import request, get, head, post, patch, put, delete, options +from .sessions import session, Session +from .status_codes import codes +from .exceptions import ( + RequestException, Timeout, URLRequired, + TooManyRedirects, HTTPError, ConnectionError, + FileModeWarning, ConnectTimeout, ReadTimeout +) + +# Set default logging handler to avoid "No handler found" warnings. +import logging +from logging import NullHandler + +logging.getLogger(__name__).addHandler(NullHandler()) + +# FileModeWarnings go off per the default. +warnings.simplefilter('default', FileModeWarning, append=True) diff --git a/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..5e0f977e Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/__version__.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/__version__.cpython-36.pyc new file mode 100644 index 00000000..7855e61a Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/__version__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/_internal_utils.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/_internal_utils.cpython-36.pyc new file mode 100644 index 00000000..edc3eb0c Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/_internal_utils.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/adapters.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/adapters.cpython-36.pyc new file mode 100644 index 00000000..ae171c1d Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/adapters.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/api.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/api.cpython-36.pyc new file mode 100644 index 00000000..2ac154c3 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/api.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/auth.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/auth.cpython-36.pyc new file mode 100644 index 00000000..9aff2d40 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/auth.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/certs.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/certs.cpython-36.pyc new file mode 100644 index 00000000..8047a100 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/certs.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/compat.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/compat.cpython-36.pyc new file mode 100644 index 00000000..64c46b00 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/compat.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/cookies.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/cookies.cpython-36.pyc new file mode 100644 index 00000000..6e9e1de4 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/cookies.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/exceptions.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/exceptions.cpython-36.pyc new file mode 100644 index 00000000..05e176b1 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/exceptions.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/help.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/help.cpython-36.pyc new file mode 100644 index 00000000..39362674 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/help.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/hooks.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/hooks.cpython-36.pyc new file mode 100644 index 00000000..57a68490 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/hooks.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/models.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/models.cpython-36.pyc new file mode 100644 index 00000000..82285575 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/models.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/packages.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/packages.cpython-36.pyc new file mode 100644 index 00000000..eb85328d Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/packages.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/sessions.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/sessions.cpython-36.pyc new file mode 100644 index 00000000..7b692c14 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/sessions.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/status_codes.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/status_codes.cpython-36.pyc new file mode 100644 index 00000000..553f332a Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/status_codes.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/structures.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/structures.cpython-36.pyc new file mode 100644 index 00000000..13d5c15a Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/structures.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/utils.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/utils.cpython-36.pyc new file mode 100644 index 00000000..ae55cf12 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/requests/__pycache__/utils.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/requests/__version__.py b/venv/Lib/site-packages/pip/_vendor/requests/__version__.py new file mode 100644 index 00000000..1267488d --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/requests/__version__.py @@ -0,0 +1,14 @@ +# .-. .-. .-. . . .-. .-. .-. .-. +# |( |- |.| | | |- `-. | `-. +# ' ' `-' `-`.`-' `-' `-' ' `-' + +__title__ = 'requests' +__description__ = 'Python HTTP for Humans.' +__url__ = 'https://requests.readthedocs.io' +__version__ = '2.25.1' +__build__ = 0x022501 +__author__ = 'Kenneth Reitz' +__author_email__ = 'me@kennethreitz.org' +__license__ = 'Apache 2.0' +__copyright__ = 'Copyright 2020 Kenneth Reitz' +__cake__ = u'\u2728 \U0001f370 \u2728' diff --git a/env/lib/python2.7/site-packages/pip/_vendor/requests/_internal_utils.py b/venv/Lib/site-packages/pip/_vendor/requests/_internal_utils.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/requests/_internal_utils.py rename to venv/Lib/site-packages/pip/_vendor/requests/_internal_utils.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/requests/adapters.py b/venv/Lib/site-packages/pip/_vendor/requests/adapters.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/requests/adapters.py rename to venv/Lib/site-packages/pip/_vendor/requests/adapters.py diff --git a/venv/Lib/site-packages/pip/_vendor/requests/api.py b/venv/Lib/site-packages/pip/_vendor/requests/api.py new file mode 100644 index 00000000..e978e203 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/requests/api.py @@ -0,0 +1,161 @@ +# -*- coding: utf-8 -*- + +""" +requests.api +~~~~~~~~~~~~ + +This module implements the Requests API. + +:copyright: (c) 2012 by Kenneth Reitz. +:license: Apache2, see LICENSE for more details. +""" + +from . import sessions + + +def request(method, url, **kwargs): + """Constructs and sends a :class:`Request `. + + :param method: method for the new :class:`Request` object: ``GET``, ``OPTIONS``, ``HEAD``, ``POST``, ``PUT``, ``PATCH``, or ``DELETE``. + :param url: URL for the new :class:`Request` object. + :param params: (optional) Dictionary, list of tuples or bytes to send + in the query string for the :class:`Request`. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. + :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. + :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. + :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload. + ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')`` + or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string + defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers + to add for the file. + :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. + :param timeout: (optional) How many seconds to wait for the server to send data + before giving up, as a float, or a :ref:`(connect timeout, read + timeout) ` tuple. + :type timeout: float or tuple + :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``. + :type allow_redirects: bool + :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. + :param verify: (optional) Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use. Defaults to ``True``. + :param stream: (optional) if ``False``, the response content will be immediately downloaded. + :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. + :return: :class:`Response ` object + :rtype: requests.Response + + Usage:: + + >>> import requests + >>> req = requests.request('GET', 'https://httpbin.org/get') + >>> req + + """ + + # By using the 'with' statement we are sure the session is closed, thus we + # avoid leaving sockets open which can trigger a ResourceWarning in some + # cases, and look like a memory leak in others. + with sessions.Session() as session: + return session.request(method=method, url=url, **kwargs) + + +def get(url, params=None, **kwargs): + r"""Sends a GET request. + + :param url: URL for the new :class:`Request` object. + :param params: (optional) Dictionary, list of tuples or bytes to send + in the query string for the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + kwargs.setdefault('allow_redirects', True) + return request('get', url, params=params, **kwargs) + + +def options(url, **kwargs): + r"""Sends an OPTIONS request. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + kwargs.setdefault('allow_redirects', True) + return request('options', url, **kwargs) + + +def head(url, **kwargs): + r"""Sends a HEAD request. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. If + `allow_redirects` is not provided, it will be set to `False` (as + opposed to the default :meth:`request` behavior). + :return: :class:`Response ` object + :rtype: requests.Response + """ + + kwargs.setdefault('allow_redirects', False) + return request('head', url, **kwargs) + + +def post(url, data=None, json=None, **kwargs): + r"""Sends a POST request. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) json data to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request('post', url, data=data, json=json, **kwargs) + + +def put(url, data=None, **kwargs): + r"""Sends a PUT request. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) json data to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request('put', url, data=data, **kwargs) + + +def patch(url, data=None, **kwargs): + r"""Sends a PATCH request. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) json data to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request('patch', url, data=data, **kwargs) + + +def delete(url, **kwargs): + r"""Sends a DELETE request. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request('delete', url, **kwargs) diff --git a/venv/Lib/site-packages/pip/_vendor/requests/auth.py b/venv/Lib/site-packages/pip/_vendor/requests/auth.py new file mode 100644 index 00000000..eeface39 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/requests/auth.py @@ -0,0 +1,305 @@ +# -*- coding: utf-8 -*- + +""" +requests.auth +~~~~~~~~~~~~~ + +This module contains the authentication handlers for Requests. +""" + +import os +import re +import time +import hashlib +import threading +import warnings + +from base64 import b64encode + +from .compat import urlparse, str, basestring +from .cookies import extract_cookies_to_jar +from ._internal_utils import to_native_string +from .utils import parse_dict_header + +CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded' +CONTENT_TYPE_MULTI_PART = 'multipart/form-data' + + +def _basic_auth_str(username, password): + """Returns a Basic Auth string.""" + + # "I want us to put a big-ol' comment on top of it that + # says that this behaviour is dumb but we need to preserve + # it because people are relying on it." + # - Lukasa + # + # These are here solely to maintain backwards compatibility + # for things like ints. This will be removed in 3.0.0. + if not isinstance(username, basestring): + warnings.warn( + "Non-string usernames will no longer be supported in Requests " + "3.0.0. Please convert the object you've passed in ({!r}) to " + "a string or bytes object in the near future to avoid " + "problems.".format(username), + category=DeprecationWarning, + ) + username = str(username) + + if not isinstance(password, basestring): + warnings.warn( + "Non-string passwords will no longer be supported in Requests " + "3.0.0. Please convert the object you've passed in ({!r}) to " + "a string or bytes object in the near future to avoid " + "problems.".format(type(password)), + category=DeprecationWarning, + ) + password = str(password) + # -- End Removal -- + + if isinstance(username, str): + username = username.encode('latin1') + + if isinstance(password, str): + password = password.encode('latin1') + + authstr = 'Basic ' + to_native_string( + b64encode(b':'.join((username, password))).strip() + ) + + return authstr + + +class AuthBase(object): + """Base class that all auth implementations derive from""" + + def __call__(self, r): + raise NotImplementedError('Auth hooks must be callable.') + + +class HTTPBasicAuth(AuthBase): + """Attaches HTTP Basic Authentication to the given Request object.""" + + def __init__(self, username, password): + self.username = username + self.password = password + + def __eq__(self, other): + return all([ + self.username == getattr(other, 'username', None), + self.password == getattr(other, 'password', None) + ]) + + def __ne__(self, other): + return not self == other + + def __call__(self, r): + r.headers['Authorization'] = _basic_auth_str(self.username, self.password) + return r + + +class HTTPProxyAuth(HTTPBasicAuth): + """Attaches HTTP Proxy Authentication to a given Request object.""" + + def __call__(self, r): + r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password) + return r + + +class HTTPDigestAuth(AuthBase): + """Attaches HTTP Digest Authentication to the given Request object.""" + + def __init__(self, username, password): + self.username = username + self.password = password + # Keep state in per-thread local storage + self._thread_local = threading.local() + + def init_per_thread_state(self): + # Ensure state is initialized just once per-thread + if not hasattr(self._thread_local, 'init'): + self._thread_local.init = True + self._thread_local.last_nonce = '' + self._thread_local.nonce_count = 0 + self._thread_local.chal = {} + self._thread_local.pos = None + self._thread_local.num_401_calls = None + + def build_digest_header(self, method, url): + """ + :rtype: str + """ + + realm = self._thread_local.chal['realm'] + nonce = self._thread_local.chal['nonce'] + qop = self._thread_local.chal.get('qop') + algorithm = self._thread_local.chal.get('algorithm') + opaque = self._thread_local.chal.get('opaque') + hash_utf8 = None + + if algorithm is None: + _algorithm = 'MD5' + else: + _algorithm = algorithm.upper() + # lambdas assume digest modules are imported at the top level + if _algorithm == 'MD5' or _algorithm == 'MD5-SESS': + def md5_utf8(x): + if isinstance(x, str): + x = x.encode('utf-8') + return hashlib.md5(x).hexdigest() + hash_utf8 = md5_utf8 + elif _algorithm == 'SHA': + def sha_utf8(x): + if isinstance(x, str): + x = x.encode('utf-8') + return hashlib.sha1(x).hexdigest() + hash_utf8 = sha_utf8 + elif _algorithm == 'SHA-256': + def sha256_utf8(x): + if isinstance(x, str): + x = x.encode('utf-8') + return hashlib.sha256(x).hexdigest() + hash_utf8 = sha256_utf8 + elif _algorithm == 'SHA-512': + def sha512_utf8(x): + if isinstance(x, str): + x = x.encode('utf-8') + return hashlib.sha512(x).hexdigest() + hash_utf8 = sha512_utf8 + + KD = lambda s, d: hash_utf8("%s:%s" % (s, d)) + + if hash_utf8 is None: + return None + + # XXX not implemented yet + entdig = None + p_parsed = urlparse(url) + #: path is request-uri defined in RFC 2616 which should not be empty + path = p_parsed.path or "/" + if p_parsed.query: + path += '?' + p_parsed.query + + A1 = '%s:%s:%s' % (self.username, realm, self.password) + A2 = '%s:%s' % (method, path) + + HA1 = hash_utf8(A1) + HA2 = hash_utf8(A2) + + if nonce == self._thread_local.last_nonce: + self._thread_local.nonce_count += 1 + else: + self._thread_local.nonce_count = 1 + ncvalue = '%08x' % self._thread_local.nonce_count + s = str(self._thread_local.nonce_count).encode('utf-8') + s += nonce.encode('utf-8') + s += time.ctime().encode('utf-8') + s += os.urandom(8) + + cnonce = (hashlib.sha1(s).hexdigest()[:16]) + if _algorithm == 'MD5-SESS': + HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce)) + + if not qop: + respdig = KD(HA1, "%s:%s" % (nonce, HA2)) + elif qop == 'auth' or 'auth' in qop.split(','): + noncebit = "%s:%s:%s:%s:%s" % ( + nonce, ncvalue, cnonce, 'auth', HA2 + ) + respdig = KD(HA1, noncebit) + else: + # XXX handle auth-int. + return None + + self._thread_local.last_nonce = nonce + + # XXX should the partial digests be encoded too? + base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \ + 'response="%s"' % (self.username, realm, nonce, path, respdig) + if opaque: + base += ', opaque="%s"' % opaque + if algorithm: + base += ', algorithm="%s"' % algorithm + if entdig: + base += ', digest="%s"' % entdig + if qop: + base += ', qop="auth", nc=%s, cnonce="%s"' % (ncvalue, cnonce) + + return 'Digest %s' % (base) + + def handle_redirect(self, r, **kwargs): + """Reset num_401_calls counter on redirects.""" + if r.is_redirect: + self._thread_local.num_401_calls = 1 + + def handle_401(self, r, **kwargs): + """ + Takes the given response and tries digest-auth, if needed. + + :rtype: requests.Response + """ + + # If response is not 4xx, do not auth + # See https://github.com/psf/requests/issues/3772 + if not 400 <= r.status_code < 500: + self._thread_local.num_401_calls = 1 + return r + + if self._thread_local.pos is not None: + # Rewind the file position indicator of the body to where + # it was to resend the request. + r.request.body.seek(self._thread_local.pos) + s_auth = r.headers.get('www-authenticate', '') + + if 'digest' in s_auth.lower() and self._thread_local.num_401_calls < 2: + + self._thread_local.num_401_calls += 1 + pat = re.compile(r'digest ', flags=re.IGNORECASE) + self._thread_local.chal = parse_dict_header(pat.sub('', s_auth, count=1)) + + # Consume content and release the original connection + # to allow our new request to reuse the same one. + r.content + r.close() + prep = r.request.copy() + extract_cookies_to_jar(prep._cookies, r.request, r.raw) + prep.prepare_cookies(prep._cookies) + + prep.headers['Authorization'] = self.build_digest_header( + prep.method, prep.url) + _r = r.connection.send(prep, **kwargs) + _r.history.append(r) + _r.request = prep + + return _r + + self._thread_local.num_401_calls = 1 + return r + + def __call__(self, r): + # Initialize per-thread state, if needed + self.init_per_thread_state() + # If we have a saved nonce, skip the 401 + if self._thread_local.last_nonce: + r.headers['Authorization'] = self.build_digest_header(r.method, r.url) + try: + self._thread_local.pos = r.body.tell() + except AttributeError: + # In the case of HTTPDigestAuth being reused and the body of + # the previous request was a file-like object, pos has the + # file position of the previous body. Ensure it's set to + # None. + self._thread_local.pos = None + r.register_hook('response', self.handle_401) + r.register_hook('response', self.handle_redirect) + self._thread_local.num_401_calls = 1 + + return r + + def __eq__(self, other): + return all([ + self.username == getattr(other, 'username', None), + self.password == getattr(other, 'password', None) + ]) + + def __ne__(self, other): + return not self == other diff --git a/env/lib/python2.7/site-packages/pip/_vendor/requests/certs.py b/venv/Lib/site-packages/pip/_vendor/requests/certs.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/requests/certs.py rename to venv/Lib/site-packages/pip/_vendor/requests/certs.py diff --git a/venv/Lib/site-packages/pip/_vendor/requests/compat.py b/venv/Lib/site-packages/pip/_vendor/requests/compat.py new file mode 100644 index 00000000..9e293716 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/requests/compat.py @@ -0,0 +1,76 @@ +# -*- coding: utf-8 -*- + +""" +requests.compat +~~~~~~~~~~~~~~~ + +This module handles import compatibility issues between Python 2 and +Python 3. +""" + +from pip._vendor import chardet + +import sys + +# ------- +# Pythons +# ------- + +# Syntax sugar. +_ver = sys.version_info + +#: Python 2.x? +is_py2 = (_ver[0] == 2) + +#: Python 3.x? +is_py3 = (_ver[0] == 3) + +# Note: We've patched out simplejson support in pip because it prevents +# upgrading simplejson on Windows. +# try: +# import simplejson as json +# except (ImportError, SyntaxError): +# # simplejson does not support Python 3.2, it throws a SyntaxError +# # because of u'...' Unicode literals. +import json + +# --------- +# Specifics +# --------- + +if is_py2: + from urllib import ( + quote, unquote, quote_plus, unquote_plus, urlencode, getproxies, + proxy_bypass, proxy_bypass_environment, getproxies_environment) + from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag + from urllib2 import parse_http_list + import cookielib + from Cookie import Morsel + from StringIO import StringIO + # Keep OrderedDict for backwards compatibility. + from collections import Callable, Mapping, MutableMapping, OrderedDict + + + builtin_str = str + bytes = str + str = unicode + basestring = basestring + numeric_types = (int, long, float) + integer_types = (int, long) + +elif is_py3: + from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag + from urllib.request import parse_http_list, getproxies, proxy_bypass, proxy_bypass_environment, getproxies_environment + from http import cookiejar as cookielib + from http.cookies import Morsel + from io import StringIO + # Keep OrderedDict for backwards compatibility. + from collections import OrderedDict + from collections.abc import Callable, Mapping, MutableMapping + + builtin_str = str + str = str + bytes = bytes + basestring = (str, bytes) + numeric_types = (int, float) + integer_types = (int,) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/requests/cookies.py b/venv/Lib/site-packages/pip/_vendor/requests/cookies.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/requests/cookies.py rename to venv/Lib/site-packages/pip/_vendor/requests/cookies.py diff --git a/venv/Lib/site-packages/pip/_vendor/requests/exceptions.py b/venv/Lib/site-packages/pip/_vendor/requests/exceptions.py new file mode 100644 index 00000000..9ef9e6e9 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/requests/exceptions.py @@ -0,0 +1,123 @@ +# -*- coding: utf-8 -*- + +""" +requests.exceptions +~~~~~~~~~~~~~~~~~~~ + +This module contains the set of Requests' exceptions. +""" +from pip._vendor.urllib3.exceptions import HTTPError as BaseHTTPError + + +class RequestException(IOError): + """There was an ambiguous exception that occurred while handling your + request. + """ + + def __init__(self, *args, **kwargs): + """Initialize RequestException with `request` and `response` objects.""" + response = kwargs.pop('response', None) + self.response = response + self.request = kwargs.pop('request', None) + if (response is not None and not self.request and + hasattr(response, 'request')): + self.request = self.response.request + super(RequestException, self).__init__(*args, **kwargs) + + +class HTTPError(RequestException): + """An HTTP error occurred.""" + + +class ConnectionError(RequestException): + """A Connection error occurred.""" + + +class ProxyError(ConnectionError): + """A proxy error occurred.""" + + +class SSLError(ConnectionError): + """An SSL error occurred.""" + + +class Timeout(RequestException): + """The request timed out. + + Catching this error will catch both + :exc:`~requests.exceptions.ConnectTimeout` and + :exc:`~requests.exceptions.ReadTimeout` errors. + """ + + +class ConnectTimeout(ConnectionError, Timeout): + """The request timed out while trying to connect to the remote server. + + Requests that produced this error are safe to retry. + """ + + +class ReadTimeout(Timeout): + """The server did not send any data in the allotted amount of time.""" + + +class URLRequired(RequestException): + """A valid URL is required to make a request.""" + + +class TooManyRedirects(RequestException): + """Too many redirects.""" + + +class MissingSchema(RequestException, ValueError): + """The URL schema (e.g. http or https) is missing.""" + + +class InvalidSchema(RequestException, ValueError): + """See defaults.py for valid schemas.""" + + +class InvalidURL(RequestException, ValueError): + """The URL provided was somehow invalid.""" + + +class InvalidHeader(RequestException, ValueError): + """The header value provided was somehow invalid.""" + + +class InvalidProxyURL(InvalidURL): + """The proxy URL provided is invalid.""" + + +class ChunkedEncodingError(RequestException): + """The server declared chunked encoding but sent an invalid chunk.""" + + +class ContentDecodingError(RequestException, BaseHTTPError): + """Failed to decode response content.""" + + +class StreamConsumedError(RequestException, TypeError): + """The content for this response was already consumed.""" + + +class RetryError(RequestException): + """Custom retries logic failed""" + + +class UnrewindableBodyError(RequestException): + """Requests encountered an error when trying to rewind a body.""" + +# Warnings + + +class RequestsWarning(Warning): + """Base warning for Requests.""" + + +class FileModeWarning(RequestsWarning, DeprecationWarning): + """A file was opened in text mode, but Requests determined its binary length.""" + + +class RequestsDependencyWarning(RequestsWarning): + """An imported dependency doesn't match the expected version range.""" diff --git a/env/lib/python2.7/site-packages/pip/_vendor/requests/help.py b/venv/Lib/site-packages/pip/_vendor/requests/help.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/requests/help.py rename to venv/Lib/site-packages/pip/_vendor/requests/help.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/requests/hooks.py b/venv/Lib/site-packages/pip/_vendor/requests/hooks.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/requests/hooks.py rename to venv/Lib/site-packages/pip/_vendor/requests/hooks.py diff --git a/venv/Lib/site-packages/pip/_vendor/requests/models.py b/venv/Lib/site-packages/pip/_vendor/requests/models.py new file mode 100644 index 00000000..b0ce2950 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/requests/models.py @@ -0,0 +1,956 @@ +# -*- coding: utf-8 -*- + +""" +requests.models +~~~~~~~~~~~~~~~ + +This module contains the primary objects that power Requests. +""" + +import datetime +import sys + +# Import encoding now, to avoid implicit import later. +# Implicit import within threads may cause LookupError when standard library is in a ZIP, +# such as in Embedded Python. See https://github.com/psf/requests/issues/3578. +import encodings.idna + +from pip._vendor.urllib3.fields import RequestField +from pip._vendor.urllib3.filepost import encode_multipart_formdata +from pip._vendor.urllib3.util import parse_url +from pip._vendor.urllib3.exceptions import ( + DecodeError, ReadTimeoutError, ProtocolError, LocationParseError) + +from io import UnsupportedOperation +from .hooks import default_hooks +from .structures import CaseInsensitiveDict + +from .auth import HTTPBasicAuth +from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar +from .exceptions import ( + HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError, + ContentDecodingError, ConnectionError, StreamConsumedError) +from ._internal_utils import to_native_string, unicode_is_ascii +from .utils import ( + guess_filename, get_auth_from_url, requote_uri, + stream_decode_response_unicode, to_key_val_list, parse_header_links, + iter_slices, guess_json_utf, super_len, check_header_validity) +from .compat import ( + Callable, Mapping, + cookielib, urlunparse, urlsplit, urlencode, str, bytes, + is_py2, chardet, builtin_str, basestring) +from .compat import json as complexjson +from .status_codes import codes + +#: The set of HTTP status codes that indicate an automatically +#: processable redirect. +REDIRECT_STATI = ( + codes.moved, # 301 + codes.found, # 302 + codes.other, # 303 + codes.temporary_redirect, # 307 + codes.permanent_redirect, # 308 +) + +DEFAULT_REDIRECT_LIMIT = 30 +CONTENT_CHUNK_SIZE = 10 * 1024 +ITER_CHUNK_SIZE = 512 + + +class RequestEncodingMixin(object): + @property + def path_url(self): + """Build the path URL to use.""" + + url = [] + + p = urlsplit(self.url) + + path = p.path + if not path: + path = '/' + + url.append(path) + + query = p.query + if query: + url.append('?') + url.append(query) + + return ''.join(url) + + @staticmethod + def _encode_params(data): + """Encode parameters in a piece of data. + + Will successfully encode parameters when passed as a dict or a list of + 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary + if parameters are supplied as a dict. + """ + + if isinstance(data, (str, bytes)): + return data + elif hasattr(data, 'read'): + return data + elif hasattr(data, '__iter__'): + result = [] + for k, vs in to_key_val_list(data): + if isinstance(vs, basestring) or not hasattr(vs, '__iter__'): + vs = [vs] + for v in vs: + if v is not None: + result.append( + (k.encode('utf-8') if isinstance(k, str) else k, + v.encode('utf-8') if isinstance(v, str) else v)) + return urlencode(result, doseq=True) + else: + return data + + @staticmethod + def _encode_files(files, data): + """Build the body for a multipart/form-data request. + + Will successfully encode files when passed as a dict or a list of + tuples. Order is retained if data is a list of tuples but arbitrary + if parameters are supplied as a dict. + The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype) + or 4-tuples (filename, fileobj, contentype, custom_headers). + """ + if (not files): + raise ValueError("Files must be provided.") + elif isinstance(data, basestring): + raise ValueError("Data must not be a string.") + + new_fields = [] + fields = to_key_val_list(data or {}) + files = to_key_val_list(files or {}) + + for field, val in fields: + if isinstance(val, basestring) or not hasattr(val, '__iter__'): + val = [val] + for v in val: + if v is not None: + # Don't call str() on bytestrings: in Py3 it all goes wrong. + if not isinstance(v, bytes): + v = str(v) + + new_fields.append( + (field.decode('utf-8') if isinstance(field, bytes) else field, + v.encode('utf-8') if isinstance(v, str) else v)) + + for (k, v) in files: + # support for explicit filename + ft = None + fh = None + if isinstance(v, (tuple, list)): + if len(v) == 2: + fn, fp = v + elif len(v) == 3: + fn, fp, ft = v + else: + fn, fp, ft, fh = v + else: + fn = guess_filename(v) or k + fp = v + + if isinstance(fp, (str, bytes, bytearray)): + fdata = fp + elif hasattr(fp, 'read'): + fdata = fp.read() + elif fp is None: + continue + else: + fdata = fp + + rf = RequestField(name=k, data=fdata, filename=fn, headers=fh) + rf.make_multipart(content_type=ft) + new_fields.append(rf) + + body, content_type = encode_multipart_formdata(new_fields) + + return body, content_type + + +class RequestHooksMixin(object): + def register_hook(self, event, hook): + """Properly register a hook.""" + + if event not in self.hooks: + raise ValueError('Unsupported event specified, with event name "%s"' % (event)) + + if isinstance(hook, Callable): + self.hooks[event].append(hook) + elif hasattr(hook, '__iter__'): + self.hooks[event].extend(h for h in hook if isinstance(h, Callable)) + + def deregister_hook(self, event, hook): + """Deregister a previously registered hook. + Returns True if the hook existed, False if not. + """ + + try: + self.hooks[event].remove(hook) + return True + except ValueError: + return False + + +class Request(RequestHooksMixin): + """A user-created :class:`Request ` object. + + Used to prepare a :class:`PreparedRequest `, which is sent to the server. + + :param method: HTTP method to use. + :param url: URL to send. + :param headers: dictionary of headers to send. + :param files: dictionary of {filename: fileobject} files to multipart upload. + :param data: the body to attach to the request. If a dictionary or + list of tuples ``[(key, value)]`` is provided, form-encoding will + take place. + :param json: json for the body to attach to the request (if files or data is not specified). + :param params: URL parameters to append to the URL. If a dictionary or + list of tuples ``[(key, value)]`` is provided, form-encoding will + take place. + :param auth: Auth handler or (user, pass) tuple. + :param cookies: dictionary or CookieJar of cookies to attach to this request. + :param hooks: dictionary of callback hooks, for internal usage. + + Usage:: + + >>> import requests + >>> req = requests.Request('GET', 'https://httpbin.org/get') + >>> req.prepare() + + """ + + def __init__(self, + method=None, url=None, headers=None, files=None, data=None, + params=None, auth=None, cookies=None, hooks=None, json=None): + + # Default empty dicts for dict params. + data = [] if data is None else data + files = [] if files is None else files + headers = {} if headers is None else headers + params = {} if params is None else params + hooks = {} if hooks is None else hooks + + self.hooks = default_hooks() + for (k, v) in list(hooks.items()): + self.register_hook(event=k, hook=v) + + self.method = method + self.url = url + self.headers = headers + self.files = files + self.data = data + self.json = json + self.params = params + self.auth = auth + self.cookies = cookies + + def __repr__(self): + return '' % (self.method) + + def prepare(self): + """Constructs a :class:`PreparedRequest ` for transmission and returns it.""" + p = PreparedRequest() + p.prepare( + method=self.method, + url=self.url, + headers=self.headers, + files=self.files, + data=self.data, + json=self.json, + params=self.params, + auth=self.auth, + cookies=self.cookies, + hooks=self.hooks, + ) + return p + + +class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): + """The fully mutable :class:`PreparedRequest ` object, + containing the exact bytes that will be sent to the server. + + Instances are generated from a :class:`Request ` object, and + should not be instantiated manually; doing so may produce undesirable + effects. + + Usage:: + + >>> import requests + >>> req = requests.Request('GET', 'https://httpbin.org/get') + >>> r = req.prepare() + >>> r + + + >>> s = requests.Session() + >>> s.send(r) + + """ + + def __init__(self): + #: HTTP verb to send to the server. + self.method = None + #: HTTP URL to send the request to. + self.url = None + #: dictionary of HTTP headers. + self.headers = None + # The `CookieJar` used to create the Cookie header will be stored here + # after prepare_cookies is called + self._cookies = None + #: request body to send to the server. + self.body = None + #: dictionary of callback hooks, for internal usage. + self.hooks = default_hooks() + #: integer denoting starting position of a readable file-like body. + self._body_position = None + + def prepare(self, + method=None, url=None, headers=None, files=None, data=None, + params=None, auth=None, cookies=None, hooks=None, json=None): + """Prepares the entire request with the given parameters.""" + + self.prepare_method(method) + self.prepare_url(url, params) + self.prepare_headers(headers) + self.prepare_cookies(cookies) + self.prepare_body(data, files, json) + self.prepare_auth(auth, url) + + # Note that prepare_auth must be last to enable authentication schemes + # such as OAuth to work on a fully prepared request. + + # This MUST go after prepare_auth. Authenticators could add a hook + self.prepare_hooks(hooks) + + def __repr__(self): + return '' % (self.method) + + def copy(self): + p = PreparedRequest() + p.method = self.method + p.url = self.url + p.headers = self.headers.copy() if self.headers is not None else None + p._cookies = _copy_cookie_jar(self._cookies) + p.body = self.body + p.hooks = self.hooks + p._body_position = self._body_position + return p + + def prepare_method(self, method): + """Prepares the given HTTP method.""" + self.method = method + if self.method is not None: + self.method = to_native_string(self.method.upper()) + + @staticmethod + def _get_idna_encoded_host(host): + from pip._vendor import idna + + try: + host = idna.encode(host, uts46=True).decode('utf-8') + except idna.IDNAError: + raise UnicodeError + return host + + def prepare_url(self, url, params): + """Prepares the given HTTP URL.""" + #: Accept objects that have string representations. + #: We're unable to blindly call unicode/str functions + #: as this will include the bytestring indicator (b'') + #: on python 3.x. + #: https://github.com/psf/requests/pull/2238 + if isinstance(url, bytes): + url = url.decode('utf8') + else: + url = unicode(url) if is_py2 else str(url) + + # Remove leading whitespaces from url + url = url.lstrip() + + # Don't do any URL preparation for non-HTTP schemes like `mailto`, + # `data` etc to work around exceptions from `url_parse`, which + # handles RFC 3986 only. + if ':' in url and not url.lower().startswith('http'): + self.url = url + return + + # Support for unicode domain names and paths. + try: + scheme, auth, host, port, path, query, fragment = parse_url(url) + except LocationParseError as e: + raise InvalidURL(*e.args) + + if not scheme: + error = ("Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?") + error = error.format(to_native_string(url, 'utf8')) + + raise MissingSchema(error) + + if not host: + raise InvalidURL("Invalid URL %r: No host supplied" % url) + + # In general, we want to try IDNA encoding the hostname if the string contains + # non-ASCII characters. This allows users to automatically get the correct IDNA + # behaviour. For strings containing only ASCII characters, we need to also verify + # it doesn't start with a wildcard (*), before allowing the unencoded hostname. + if not unicode_is_ascii(host): + try: + host = self._get_idna_encoded_host(host) + except UnicodeError: + raise InvalidURL('URL has an invalid label.') + elif host.startswith(u'*'): + raise InvalidURL('URL has an invalid label.') + + # Carefully reconstruct the network location + netloc = auth or '' + if netloc: + netloc += '@' + netloc += host + if port: + netloc += ':' + str(port) + + # Bare domains aren't valid URLs. + if not path: + path = '/' + + if is_py2: + if isinstance(scheme, str): + scheme = scheme.encode('utf-8') + if isinstance(netloc, str): + netloc = netloc.encode('utf-8') + if isinstance(path, str): + path = path.encode('utf-8') + if isinstance(query, str): + query = query.encode('utf-8') + if isinstance(fragment, str): + fragment = fragment.encode('utf-8') + + if isinstance(params, (str, bytes)): + params = to_native_string(params) + + enc_params = self._encode_params(params) + if enc_params: + if query: + query = '%s&%s' % (query, enc_params) + else: + query = enc_params + + url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment])) + self.url = url + + def prepare_headers(self, headers): + """Prepares the given HTTP headers.""" + + self.headers = CaseInsensitiveDict() + if headers: + for header in headers.items(): + # Raise exception on invalid header value. + check_header_validity(header) + name, value = header + self.headers[to_native_string(name)] = value + + def prepare_body(self, data, files, json=None): + """Prepares the given HTTP body data.""" + + # Check if file, fo, generator, iterator. + # If not, run through normal process. + + # Nottin' on you. + body = None + content_type = None + + if not data and json is not None: + # urllib3 requires a bytes-like body. Python 2's json.dumps + # provides this natively, but Python 3 gives a Unicode string. + content_type = 'application/json' + body = complexjson.dumps(json) + if not isinstance(body, bytes): + body = body.encode('utf-8') + + is_stream = all([ + hasattr(data, '__iter__'), + not isinstance(data, (basestring, list, tuple, Mapping)) + ]) + + if is_stream: + try: + length = super_len(data) + except (TypeError, AttributeError, UnsupportedOperation): + length = None + + body = data + + if getattr(body, 'tell', None) is not None: + # Record the current file position before reading. + # This will allow us to rewind a file in the event + # of a redirect. + try: + self._body_position = body.tell() + except (IOError, OSError): + # This differentiates from None, allowing us to catch + # a failed `tell()` later when trying to rewind the body + self._body_position = object() + + if files: + raise NotImplementedError('Streamed bodies and files are mutually exclusive.') + + if length: + self.headers['Content-Length'] = builtin_str(length) + else: + self.headers['Transfer-Encoding'] = 'chunked' + else: + # Multi-part file uploads. + if files: + (body, content_type) = self._encode_files(files, data) + else: + if data: + body = self._encode_params(data) + if isinstance(data, basestring) or hasattr(data, 'read'): + content_type = None + else: + content_type = 'application/x-www-form-urlencoded' + + self.prepare_content_length(body) + + # Add content-type if it wasn't explicitly provided. + if content_type and ('content-type' not in self.headers): + self.headers['Content-Type'] = content_type + + self.body = body + + def prepare_content_length(self, body): + """Prepare Content-Length header based on request method and body""" + if body is not None: + length = super_len(body) + if length: + # If length exists, set it. Otherwise, we fallback + # to Transfer-Encoding: chunked. + self.headers['Content-Length'] = builtin_str(length) + elif self.method not in ('GET', 'HEAD') and self.headers.get('Content-Length') is None: + # Set Content-Length to 0 for methods that can have a body + # but don't provide one. (i.e. not GET or HEAD) + self.headers['Content-Length'] = '0' + + def prepare_auth(self, auth, url=''): + """Prepares the given HTTP auth data.""" + + # If no Auth is explicitly provided, extract it from the URL first. + if auth is None: + url_auth = get_auth_from_url(self.url) + auth = url_auth if any(url_auth) else None + + if auth: + if isinstance(auth, tuple) and len(auth) == 2: + # special-case basic HTTP auth + auth = HTTPBasicAuth(*auth) + + # Allow auth to make its changes. + r = auth(self) + + # Update self to reflect the auth changes. + self.__dict__.update(r.__dict__) + + # Recompute Content-Length + self.prepare_content_length(self.body) + + def prepare_cookies(self, cookies): + """Prepares the given HTTP cookie data. + + This function eventually generates a ``Cookie`` header from the + given cookies using cookielib. Due to cookielib's design, the header + will not be regenerated if it already exists, meaning this function + can only be called once for the life of the + :class:`PreparedRequest ` object. Any subsequent calls + to ``prepare_cookies`` will have no actual effect, unless the "Cookie" + header is removed beforehand. + """ + if isinstance(cookies, cookielib.CookieJar): + self._cookies = cookies + else: + self._cookies = cookiejar_from_dict(cookies) + + cookie_header = get_cookie_header(self._cookies, self) + if cookie_header is not None: + self.headers['Cookie'] = cookie_header + + def prepare_hooks(self, hooks): + """Prepares the given hooks.""" + # hooks can be passed as None to the prepare method and to this + # method. To prevent iterating over None, simply use an empty list + # if hooks is False-y + hooks = hooks or [] + for event in hooks: + self.register_hook(event, hooks[event]) + + +class Response(object): + """The :class:`Response ` object, which contains a + server's response to an HTTP request. + """ + + __attrs__ = [ + '_content', 'status_code', 'headers', 'url', 'history', + 'encoding', 'reason', 'cookies', 'elapsed', 'request' + ] + + def __init__(self): + self._content = False + self._content_consumed = False + self._next = None + + #: Integer Code of responded HTTP Status, e.g. 404 or 200. + self.status_code = None + + #: Case-insensitive Dictionary of Response Headers. + #: For example, ``headers['content-encoding']`` will return the + #: value of a ``'Content-Encoding'`` response header. + self.headers = CaseInsensitiveDict() + + #: File-like object representation of response (for advanced usage). + #: Use of ``raw`` requires that ``stream=True`` be set on the request. + #: This requirement does not apply for use internally to Requests. + self.raw = None + + #: Final URL location of Response. + self.url = None + + #: Encoding to decode with when accessing r.text. + self.encoding = None + + #: A list of :class:`Response ` objects from + #: the history of the Request. Any redirect responses will end + #: up here. The list is sorted from the oldest to the most recent request. + self.history = [] + + #: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK". + self.reason = None + + #: A CookieJar of Cookies the server sent back. + self.cookies = cookiejar_from_dict({}) + + #: The amount of time elapsed between sending the request + #: and the arrival of the response (as a timedelta). + #: This property specifically measures the time taken between sending + #: the first byte of the request and finishing parsing the headers. It + #: is therefore unaffected by consuming the response content or the + #: value of the ``stream`` keyword argument. + self.elapsed = datetime.timedelta(0) + + #: The :class:`PreparedRequest ` object to which this + #: is a response. + self.request = None + + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + def __getstate__(self): + # Consume everything; accessing the content attribute makes + # sure the content has been fully read. + if not self._content_consumed: + self.content + + return {attr: getattr(self, attr, None) for attr in self.__attrs__} + + def __setstate__(self, state): + for name, value in state.items(): + setattr(self, name, value) + + # pickled objects do not have .raw + setattr(self, '_content_consumed', True) + setattr(self, 'raw', None) + + def __repr__(self): + return '' % (self.status_code) + + def __bool__(self): + """Returns True if :attr:`status_code` is less than 400. + + This attribute checks if the status code of the response is between + 400 and 600 to see if there was a client error or a server error. If + the status code, is between 200 and 400, this will return True. This + is **not** a check to see if the response code is ``200 OK``. + """ + return self.ok + + def __nonzero__(self): + """Returns True if :attr:`status_code` is less than 400. + + This attribute checks if the status code of the response is between + 400 and 600 to see if there was a client error or a server error. If + the status code, is between 200 and 400, this will return True. This + is **not** a check to see if the response code is ``200 OK``. + """ + return self.ok + + def __iter__(self): + """Allows you to use a response as an iterator.""" + return self.iter_content(128) + + @property + def ok(self): + """Returns True if :attr:`status_code` is less than 400, False if not. + + This attribute checks if the status code of the response is between + 400 and 600 to see if there was a client error or a server error. If + the status code is between 200 and 400, this will return True. This + is **not** a check to see if the response code is ``200 OK``. + """ + try: + self.raise_for_status() + except HTTPError: + return False + return True + + @property + def is_redirect(self): + """True if this Response is a well-formed HTTP redirect that could have + been processed automatically (by :meth:`Session.resolve_redirects`). + """ + return ('location' in self.headers and self.status_code in REDIRECT_STATI) + + @property + def is_permanent_redirect(self): + """True if this Response one of the permanent versions of redirect.""" + return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect)) + + @property + def next(self): + """Returns a PreparedRequest for the next request in a redirect chain, if there is one.""" + return self._next + + @property + def apparent_encoding(self): + """The apparent encoding, provided by the chardet library.""" + return chardet.detect(self.content)['encoding'] + + def iter_content(self, chunk_size=1, decode_unicode=False): + """Iterates over the response data. When stream=True is set on the + request, this avoids reading the content at once into memory for + large responses. The chunk size is the number of bytes it should + read into memory. This is not necessarily the length of each item + returned as decoding can take place. + + chunk_size must be of type int or None. A value of None will + function differently depending on the value of `stream`. + stream=True will read data as it arrives in whatever size the + chunks are received. If stream=False, data is returned as + a single chunk. + + If decode_unicode is True, content will be decoded using the best + available encoding based on the response. + """ + + def generate(): + # Special case for urllib3. + if hasattr(self.raw, 'stream'): + try: + for chunk in self.raw.stream(chunk_size, decode_content=True): + yield chunk + except ProtocolError as e: + raise ChunkedEncodingError(e) + except DecodeError as e: + raise ContentDecodingError(e) + except ReadTimeoutError as e: + raise ConnectionError(e) + else: + # Standard file-like object. + while True: + chunk = self.raw.read(chunk_size) + if not chunk: + break + yield chunk + + self._content_consumed = True + + if self._content_consumed and isinstance(self._content, bool): + raise StreamConsumedError() + elif chunk_size is not None and not isinstance(chunk_size, int): + raise TypeError("chunk_size must be an int, it is instead a %s." % type(chunk_size)) + # simulate reading small chunks of the content + reused_chunks = iter_slices(self._content, chunk_size) + + stream_chunks = generate() + + chunks = reused_chunks if self._content_consumed else stream_chunks + + if decode_unicode: + chunks = stream_decode_response_unicode(chunks, self) + + return chunks + + def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None): + """Iterates over the response data, one line at a time. When + stream=True is set on the request, this avoids reading the + content at once into memory for large responses. + + .. note:: This method is not reentrant safe. + """ + + pending = None + + for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode): + + if pending is not None: + chunk = pending + chunk + + if delimiter: + lines = chunk.split(delimiter) + else: + lines = chunk.splitlines() + + if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]: + pending = lines.pop() + else: + pending = None + + for line in lines: + yield line + + if pending is not None: + yield pending + + @property + def content(self): + """Content of the response, in bytes.""" + + if self._content is False: + # Read the contents. + if self._content_consumed: + raise RuntimeError( + 'The content for this response was already consumed') + + if self.status_code == 0 or self.raw is None: + self._content = None + else: + self._content = b''.join(self.iter_content(CONTENT_CHUNK_SIZE)) or b'' + + self._content_consumed = True + # don't need to release the connection; that's been handled by urllib3 + # since we exhausted the data. + return self._content + + @property + def text(self): + """Content of the response, in unicode. + + If Response.encoding is None, encoding will be guessed using + ``chardet``. + + The encoding of the response content is determined based solely on HTTP + headers, following RFC 2616 to the letter. If you can take advantage of + non-HTTP knowledge to make a better guess at the encoding, you should + set ``r.encoding`` appropriately before accessing this property. + """ + + # Try charset from content-type + content = None + encoding = self.encoding + + if not self.content: + return str('') + + # Fallback to auto-detected encoding. + if self.encoding is None: + encoding = self.apparent_encoding + + # Decode unicode from given encoding. + try: + content = str(self.content, encoding, errors='replace') + except (LookupError, TypeError): + # A LookupError is raised if the encoding was not found which could + # indicate a misspelling or similar mistake. + # + # A TypeError can be raised if encoding is None + # + # So we try blindly encoding. + content = str(self.content, errors='replace') + + return content + + def json(self, **kwargs): + r"""Returns the json-encoded content of a response, if any. + + :param \*\*kwargs: Optional arguments that ``json.loads`` takes. + :raises ValueError: If the response body does not contain valid json. + """ + + if not self.encoding and self.content and len(self.content) > 3: + # No encoding set. JSON RFC 4627 section 3 states we should expect + # UTF-8, -16 or -32. Detect which one to use; If the detection or + # decoding fails, fall back to `self.text` (using chardet to make + # a best guess). + encoding = guess_json_utf(self.content) + if encoding is not None: + try: + return complexjson.loads( + self.content.decode(encoding), **kwargs + ) + except UnicodeDecodeError: + # Wrong UTF codec detected; usually because it's not UTF-8 + # but some other 8-bit codec. This is an RFC violation, + # and the server didn't bother to tell us what codec *was* + # used. + pass + return complexjson.loads(self.text, **kwargs) + + @property + def links(self): + """Returns the parsed header links of the response, if any.""" + + header = self.headers.get('link') + + # l = MultiDict() + l = {} + + if header: + links = parse_header_links(header) + + for link in links: + key = link.get('rel') or link.get('url') + l[key] = link + + return l + + def raise_for_status(self): + """Raises :class:`HTTPError`, if one occurred.""" + + http_error_msg = '' + if isinstance(self.reason, bytes): + # We attempt to decode utf-8 first because some servers + # choose to localize their reason strings. If the string + # isn't utf-8, we fall back to iso-8859-1 for all other + # encodings. (See PR #3538) + try: + reason = self.reason.decode('utf-8') + except UnicodeDecodeError: + reason = self.reason.decode('iso-8859-1') + else: + reason = self.reason + + if 400 <= self.status_code < 500: + http_error_msg = u'%s Client Error: %s for url: %s' % (self.status_code, reason, self.url) + + elif 500 <= self.status_code < 600: + http_error_msg = u'%s Server Error: %s for url: %s' % (self.status_code, reason, self.url) + + if http_error_msg: + raise HTTPError(http_error_msg, response=self) + + def close(self): + """Releases the connection back to the pool. Once this method has been + called the underlying ``raw`` object must not be accessed again. + + *Note: Should not normally need to be called explicitly.* + """ + if not self._content_consumed: + self.raw.close() + + release_conn = getattr(self.raw, 'release_conn', None) + if release_conn is not None: + release_conn() diff --git a/env/lib/python2.7/site-packages/pip/_vendor/requests/packages.py b/venv/Lib/site-packages/pip/_vendor/requests/packages.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/requests/packages.py rename to venv/Lib/site-packages/pip/_vendor/requests/packages.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/requests/sessions.py b/venv/Lib/site-packages/pip/_vendor/requests/sessions.py similarity index 94% rename from env/lib/python2.7/site-packages/pip/_vendor/requests/sessions.py rename to venv/Lib/site-packages/pip/_vendor/requests/sessions.py index d73d700f..45ab8a5d 100644 --- a/env/lib/python2.7/site-packages/pip/_vendor/requests/sessions.py +++ b/venv/Lib/site-packages/pip/_vendor/requests/sessions.py @@ -1,8 +1,8 @@ # -*- coding: utf-8 -*- """ -requests.session -~~~~~~~~~~~~~~~~ +requests.sessions +~~~~~~~~~~~~~~~~~ This module provides a Session object to manage and persist settings across requests (cookies, auth, proxies). @@ -11,9 +11,10 @@ import sys import time from datetime import timedelta +from collections import OrderedDict from .auth import _basic_auth_str -from .compat import cookielib, is_py3, OrderedDict, urljoin, urlparse, Mapping +from .compat import cookielib, is_py3, urljoin, urlparse, Mapping from .cookies import ( cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies) from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT @@ -162,7 +163,7 @@ def resolve_redirects(self, resp, req, stream=False, timeout=None, resp.raw.read(decode_content=False) if len(resp.history) >= self.max_redirects: - raise TooManyRedirects('Exceeded %s redirects.' % self.max_redirects, response=resp) + raise TooManyRedirects('Exceeded {} redirects.'.format(self.max_redirects), response=resp) # Release the connection back into the pool. resp.close() @@ -170,7 +171,7 @@ def resolve_redirects(self, resp, req, stream=False, timeout=None, # Handle redirection without scheme (see: RFC 1808 Section 4) if url.startswith('//'): parsed_rurl = urlparse(resp.url) - url = '%s:%s' % (to_native_string(parsed_rurl.scheme), url) + url = ':'.join([to_native_string(parsed_rurl.scheme), url]) # Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2) parsed = urlparse(url) @@ -192,19 +193,16 @@ def resolve_redirects(self, resp, req, stream=False, timeout=None, self.rebuild_method(prepared_request, resp) - # https://github.com/requests/requests/issues/1084 + # https://github.com/psf/requests/issues/1084 if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect): - # https://github.com/requests/requests/issues/3490 + # https://github.com/psf/requests/issues/3490 purged_headers = ('Content-Length', 'Content-Type', 'Transfer-Encoding') for header in purged_headers: prepared_request.headers.pop(header, None) prepared_request.body = None headers = prepared_request.headers - try: - del headers['Cookie'] - except KeyError: - pass + headers.pop('Cookie', None) # Extract any cookies sent on the response to the cookiejar # in the new request. Because we've mutated our copied prepared @@ -271,7 +269,6 @@ def rebuild_auth(self, prepared_request, response): if new_auth is not None: prepared_request.prepare_auth(new_auth) - return def rebuild_proxies(self, prepared_request, proxies): """This method re-evaluates the proxy configuration by considering the @@ -352,13 +349,13 @@ class Session(SessionRedirectMixin): Or as a context manager:: >>> with requests.Session() as s: - >>> s.get('https://httpbin.org/get') + ... s.get('https://httpbin.org/get') """ __attrs__ = [ 'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify', - 'cert', 'prefetch', 'adapters', 'stream', 'trust_env', + 'cert', 'adapters', 'stream', 'trust_env', 'max_redirects', ] @@ -390,6 +387,13 @@ def __init__(self): self.stream = False #: SSL Verification default. + #: Defaults to `True`, requiring requests to verify the TLS certificate at the + #: remote end. + #: If verify is set to `False`, requests will accept any TLS certificate + #: presented by the server, and will ignore hostname mismatches and/or + #: expired certificates, which will make your application vulnerable to + #: man-in-the-middle (MitM) attacks. + #: Only set this to `False` for testing. self.verify = True #: SSL client certificate default, if String, path to ssl client @@ -498,7 +502,12 @@ def request(self, method, url, content. Defaults to ``False``. :param verify: (optional) Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path - to a CA bundle to use. Defaults to ``True``. + to a CA bundle to use. Defaults to ``True``. When set to + ``False``, requests will accept any TLS certificate presented by + the server, and will ignore hostname mismatches and/or expired + certificates, which will make your application vulnerable to + man-in-the-middle (MitM) attacks. Setting verify to ``False`` + may be useful during local development or testing. :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. :rtype: requests.Response @@ -661,11 +670,13 @@ def send(self, request, **kwargs): extract_cookies_to_jar(self.cookies, request, r.raw) - # Redirect resolving generator. - gen = self.resolve_redirects(r, request, **kwargs) - # Resolve redirects if allowed. - history = [resp for resp in gen] if allow_redirects else [] + if allow_redirects: + # Redirect resolving generator. + gen = self.resolve_redirects(r, request, **kwargs) + history = [resp for resp in gen] + else: + history = [] # Shuffle things around if there's history. if history: @@ -728,7 +739,7 @@ def get_adapter(self, url): return adapter # Nothing matches :-/ - raise InvalidSchema("No connection adapters were found for '%s'" % url) + raise InvalidSchema("No connection adapters were found for {!r}".format(url)) def close(self): """Closes all adapters and as such the session""" diff --git a/venv/Lib/site-packages/pip/_vendor/requests/status_codes.py b/venv/Lib/site-packages/pip/_vendor/requests/status_codes.py new file mode 100644 index 00000000..d80a7cd4 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/requests/status_codes.py @@ -0,0 +1,123 @@ +# -*- coding: utf-8 -*- + +r""" +The ``codes`` object defines a mapping from common names for HTTP statuses +to their numerical codes, accessible either as attributes or as dictionary +items. + +Example:: + + >>> import requests + >>> requests.codes['temporary_redirect'] + 307 + >>> requests.codes.teapot + 418 + >>> requests.codes['\o/'] + 200 + +Some codes have multiple names, and both upper- and lower-case versions of +the names are allowed. For example, ``codes.ok``, ``codes.OK``, and +``codes.okay`` all correspond to the HTTP status code 200. +""" + +from .structures import LookupDict + +_codes = { + + # Informational. + 100: ('continue',), + 101: ('switching_protocols',), + 102: ('processing',), + 103: ('checkpoint',), + 122: ('uri_too_long', 'request_uri_too_long'), + 200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/', '✓'), + 201: ('created',), + 202: ('accepted',), + 203: ('non_authoritative_info', 'non_authoritative_information'), + 204: ('no_content',), + 205: ('reset_content', 'reset'), + 206: ('partial_content', 'partial'), + 207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'), + 208: ('already_reported',), + 226: ('im_used',), + + # Redirection. + 300: ('multiple_choices',), + 301: ('moved_permanently', 'moved', '\\o-'), + 302: ('found',), + 303: ('see_other', 'other'), + 304: ('not_modified',), + 305: ('use_proxy',), + 306: ('switch_proxy',), + 307: ('temporary_redirect', 'temporary_moved', 'temporary'), + 308: ('permanent_redirect', + 'resume_incomplete', 'resume',), # These 2 to be removed in 3.0 + + # Client Error. + 400: ('bad_request', 'bad'), + 401: ('unauthorized',), + 402: ('payment_required', 'payment'), + 403: ('forbidden',), + 404: ('not_found', '-o-'), + 405: ('method_not_allowed', 'not_allowed'), + 406: ('not_acceptable',), + 407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'), + 408: ('request_timeout', 'timeout'), + 409: ('conflict',), + 410: ('gone',), + 411: ('length_required',), + 412: ('precondition_failed', 'precondition'), + 413: ('request_entity_too_large',), + 414: ('request_uri_too_large',), + 415: ('unsupported_media_type', 'unsupported_media', 'media_type'), + 416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'), + 417: ('expectation_failed',), + 418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'), + 421: ('misdirected_request',), + 422: ('unprocessable_entity', 'unprocessable'), + 423: ('locked',), + 424: ('failed_dependency', 'dependency'), + 425: ('unordered_collection', 'unordered'), + 426: ('upgrade_required', 'upgrade'), + 428: ('precondition_required', 'precondition'), + 429: ('too_many_requests', 'too_many'), + 431: ('header_fields_too_large', 'fields_too_large'), + 444: ('no_response', 'none'), + 449: ('retry_with', 'retry'), + 450: ('blocked_by_windows_parental_controls', 'parental_controls'), + 451: ('unavailable_for_legal_reasons', 'legal_reasons'), + 499: ('client_closed_request',), + + # Server Error. + 500: ('internal_server_error', 'server_error', '/o\\', '✗'), + 501: ('not_implemented',), + 502: ('bad_gateway',), + 503: ('service_unavailable', 'unavailable'), + 504: ('gateway_timeout',), + 505: ('http_version_not_supported', 'http_version'), + 506: ('variant_also_negotiates',), + 507: ('insufficient_storage',), + 509: ('bandwidth_limit_exceeded', 'bandwidth'), + 510: ('not_extended',), + 511: ('network_authentication_required', 'network_auth', 'network_authentication'), +} + +codes = LookupDict(name='status_codes') + +def _init(): + for code, titles in _codes.items(): + for title in titles: + setattr(codes, title, code) + if not title.startswith(('\\', '/')): + setattr(codes, title.upper(), code) + + def doc(code): + names = ', '.join('``%s``' % n for n in _codes[code]) + return '* %d: %s' % (code, names) + + global __doc__ + __doc__ = (__doc__ + '\n' + + '\n'.join(doc(code) for code in sorted(_codes)) + if __doc__ is not None else None) + +_init() diff --git a/env/lib/python2.7/site-packages/pip/_vendor/requests/structures.py b/venv/Lib/site-packages/pip/_vendor/requests/structures.py similarity index 97% rename from env/lib/python2.7/site-packages/pip/_vendor/requests/structures.py rename to venv/Lib/site-packages/pip/_vendor/requests/structures.py index da930e28..8ee0ba7a 100644 --- a/env/lib/python2.7/site-packages/pip/_vendor/requests/structures.py +++ b/venv/Lib/site-packages/pip/_vendor/requests/structures.py @@ -7,7 +7,9 @@ Data structures that power Requests. """ -from .compat import OrderedDict, Mapping, MutableMapping +from collections import OrderedDict + +from .compat import Mapping, MutableMapping class CaseInsensitiveDict(MutableMapping): diff --git a/venv/Lib/site-packages/pip/_vendor/requests/utils.py b/venv/Lib/site-packages/pip/_vendor/requests/utils.py new file mode 100644 index 00000000..db67938e --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/requests/utils.py @@ -0,0 +1,992 @@ +# -*- coding: utf-8 -*- + +""" +requests.utils +~~~~~~~~~~~~~~ + +This module provides utility functions that are used within Requests +that are also useful for external consumption. +""" + +import codecs +import contextlib +import io +import os +import re +import socket +import struct +import sys +import tempfile +import warnings +import zipfile +from collections import OrderedDict + +from .__version__ import __version__ +from . import certs +# to_native_string is unused here, but imported here for backwards compatibility +from ._internal_utils import to_native_string +from .compat import parse_http_list as _parse_list_header +from .compat import ( + quote, urlparse, bytes, str, unquote, getproxies, + proxy_bypass, urlunparse, basestring, integer_types, is_py3, + proxy_bypass_environment, getproxies_environment, Mapping) +from .cookies import cookiejar_from_dict +from .structures import CaseInsensitiveDict +from .exceptions import ( + InvalidURL, InvalidHeader, FileModeWarning, UnrewindableBodyError) + +NETRC_FILES = ('.netrc', '_netrc') + +DEFAULT_CA_BUNDLE_PATH = certs.where() + +DEFAULT_PORTS = {'http': 80, 'https': 443} + + +if sys.platform == 'win32': + # provide a proxy_bypass version on Windows without DNS lookups + + def proxy_bypass_registry(host): + try: + if is_py3: + import winreg + else: + import _winreg as winreg + except ImportError: + return False + + try: + internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER, + r'Software\Microsoft\Windows\CurrentVersion\Internet Settings') + # ProxyEnable could be REG_SZ or REG_DWORD, normalizing it + proxyEnable = int(winreg.QueryValueEx(internetSettings, + 'ProxyEnable')[0]) + # ProxyOverride is almost always a string + proxyOverride = winreg.QueryValueEx(internetSettings, + 'ProxyOverride')[0] + except OSError: + return False + if not proxyEnable or not proxyOverride: + return False + + # make a check value list from the registry entry: replace the + # '' string by the localhost entry and the corresponding + # canonical entry. + proxyOverride = proxyOverride.split(';') + # now check if we match one of the registry values. + for test in proxyOverride: + if test == '': + if '.' not in host: + return True + test = test.replace(".", r"\.") # mask dots + test = test.replace("*", r".*") # change glob sequence + test = test.replace("?", r".") # change glob char + if re.match(test, host, re.I): + return True + return False + + def proxy_bypass(host): # noqa + """Return True, if the host should be bypassed. + + Checks proxy settings gathered from the environment, if specified, + or the registry. + """ + if getproxies_environment(): + return proxy_bypass_environment(host) + else: + return proxy_bypass_registry(host) + + +def dict_to_sequence(d): + """Returns an internal sequence dictionary update.""" + + if hasattr(d, 'items'): + d = d.items() + + return d + + +def super_len(o): + total_length = None + current_position = 0 + + if hasattr(o, '__len__'): + total_length = len(o) + + elif hasattr(o, 'len'): + total_length = o.len + + elif hasattr(o, 'fileno'): + try: + fileno = o.fileno() + except io.UnsupportedOperation: + pass + else: + total_length = os.fstat(fileno).st_size + + # Having used fstat to determine the file length, we need to + # confirm that this file was opened up in binary mode. + if 'b' not in o.mode: + warnings.warn(( + "Requests has determined the content-length for this " + "request using the binary size of the file: however, the " + "file has been opened in text mode (i.e. without the 'b' " + "flag in the mode). This may lead to an incorrect " + "content-length. In Requests 3.0, support will be removed " + "for files in text mode."), + FileModeWarning + ) + + if hasattr(o, 'tell'): + try: + current_position = o.tell() + except (OSError, IOError): + # This can happen in some weird situations, such as when the file + # is actually a special file descriptor like stdin. In this + # instance, we don't know what the length is, so set it to zero and + # let requests chunk it instead. + if total_length is not None: + current_position = total_length + else: + if hasattr(o, 'seek') and total_length is None: + # StringIO and BytesIO have seek but no useable fileno + try: + # seek to end of file + o.seek(0, 2) + total_length = o.tell() + + # seek back to current position to support + # partially read file-like objects + o.seek(current_position or 0) + except (OSError, IOError): + total_length = 0 + + if total_length is None: + total_length = 0 + + return max(0, total_length - current_position) + + +def get_netrc_auth(url, raise_errors=False): + """Returns the Requests tuple auth for a given url from netrc.""" + + netrc_file = os.environ.get('NETRC') + if netrc_file is not None: + netrc_locations = (netrc_file,) + else: + netrc_locations = ('~/{}'.format(f) for f in NETRC_FILES) + + try: + from netrc import netrc, NetrcParseError + + netrc_path = None + + for f in netrc_locations: + try: + loc = os.path.expanduser(f) + except KeyError: + # os.path.expanduser can fail when $HOME is undefined and + # getpwuid fails. See https://bugs.python.org/issue20164 & + # https://github.com/psf/requests/issues/1846 + return + + if os.path.exists(loc): + netrc_path = loc + break + + # Abort early if there isn't one. + if netrc_path is None: + return + + ri = urlparse(url) + + # Strip port numbers from netloc. This weird `if...encode`` dance is + # used for Python 3.2, which doesn't support unicode literals. + splitstr = b':' + if isinstance(url, str): + splitstr = splitstr.decode('ascii') + host = ri.netloc.split(splitstr)[0] + + try: + _netrc = netrc(netrc_path).authenticators(host) + if _netrc: + # Return with login / password + login_i = (0 if _netrc[0] else 1) + return (_netrc[login_i], _netrc[2]) + except (NetrcParseError, IOError): + # If there was a parsing error or a permissions issue reading the file, + # we'll just skip netrc auth unless explicitly asked to raise errors. + if raise_errors: + raise + + # App Engine hackiness. + except (ImportError, AttributeError): + pass + + +def guess_filename(obj): + """Tries to guess the filename of the given object.""" + name = getattr(obj, 'name', None) + if (name and isinstance(name, basestring) and name[0] != '<' and + name[-1] != '>'): + return os.path.basename(name) + + +def extract_zipped_paths(path): + """Replace nonexistent paths that look like they refer to a member of a zip + archive with the location of an extracted copy of the target, or else + just return the provided path unchanged. + """ + if os.path.exists(path): + # this is already a valid path, no need to do anything further + return path + + # find the first valid part of the provided path and treat that as a zip archive + # assume the rest of the path is the name of a member in the archive + archive, member = os.path.split(path) + while archive and not os.path.exists(archive): + archive, prefix = os.path.split(archive) + member = '/'.join([prefix, member]) + + if not zipfile.is_zipfile(archive): + return path + + zip_file = zipfile.ZipFile(archive) + if member not in zip_file.namelist(): + return path + + # we have a valid zip archive and a valid member of that archive + tmp = tempfile.gettempdir() + extracted_path = os.path.join(tmp, *member.split('/')) + if not os.path.exists(extracted_path): + extracted_path = zip_file.extract(member, path=tmp) + + return extracted_path + + +def from_key_val_list(value): + """Take an object and test to see if it can be represented as a + dictionary. Unless it can not be represented as such, return an + OrderedDict, e.g., + + :: + + >>> from_key_val_list([('key', 'val')]) + OrderedDict([('key', 'val')]) + >>> from_key_val_list('string') + Traceback (most recent call last): + ... + ValueError: cannot encode objects that are not 2-tuples + >>> from_key_val_list({'key': 'val'}) + OrderedDict([('key', 'val')]) + + :rtype: OrderedDict + """ + if value is None: + return None + + if isinstance(value, (str, bytes, bool, int)): + raise ValueError('cannot encode objects that are not 2-tuples') + + return OrderedDict(value) + + +def to_key_val_list(value): + """Take an object and test to see if it can be represented as a + dictionary. If it can be, return a list of tuples, e.g., + + :: + + >>> to_key_val_list([('key', 'val')]) + [('key', 'val')] + >>> to_key_val_list({'key': 'val'}) + [('key', 'val')] + >>> to_key_val_list('string') + Traceback (most recent call last): + ... + ValueError: cannot encode objects that are not 2-tuples + + :rtype: list + """ + if value is None: + return None + + if isinstance(value, (str, bytes, bool, int)): + raise ValueError('cannot encode objects that are not 2-tuples') + + if isinstance(value, Mapping): + value = value.items() + + return list(value) + + +# From mitsuhiko/werkzeug (used with permission). +def parse_list_header(value): + """Parse lists as described by RFC 2068 Section 2. + + In particular, parse comma-separated lists where the elements of + the list may include quoted-strings. A quoted-string could + contain a comma. A non-quoted string could have quotes in the + middle. Quotes are removed automatically after parsing. + + It basically works like :func:`parse_set_header` just that items + may appear multiple times and case sensitivity is preserved. + + The return value is a standard :class:`list`: + + >>> parse_list_header('token, "quoted value"') + ['token', 'quoted value'] + + To create a header from the :class:`list` again, use the + :func:`dump_header` function. + + :param value: a string with a list header. + :return: :class:`list` + :rtype: list + """ + result = [] + for item in _parse_list_header(value): + if item[:1] == item[-1:] == '"': + item = unquote_header_value(item[1:-1]) + result.append(item) + return result + + +# From mitsuhiko/werkzeug (used with permission). +def parse_dict_header(value): + """Parse lists of key, value pairs as described by RFC 2068 Section 2 and + convert them into a python dict: + + >>> d = parse_dict_header('foo="is a fish", bar="as well"') + >>> type(d) is dict + True + >>> sorted(d.items()) + [('bar', 'as well'), ('foo', 'is a fish')] + + If there is no value for a key it will be `None`: + + >>> parse_dict_header('key_without_value') + {'key_without_value': None} + + To create a header from the :class:`dict` again, use the + :func:`dump_header` function. + + :param value: a string with a dict header. + :return: :class:`dict` + :rtype: dict + """ + result = {} + for item in _parse_list_header(value): + if '=' not in item: + result[item] = None + continue + name, value = item.split('=', 1) + if value[:1] == value[-1:] == '"': + value = unquote_header_value(value[1:-1]) + result[name] = value + return result + + +# From mitsuhiko/werkzeug (used with permission). +def unquote_header_value(value, is_filename=False): + r"""Unquotes a header value. (Reversal of :func:`quote_header_value`). + This does not use the real unquoting but what browsers are actually + using for quoting. + + :param value: the header value to unquote. + :rtype: str + """ + if value and value[0] == value[-1] == '"': + # this is not the real unquoting, but fixing this so that the + # RFC is met will result in bugs with internet explorer and + # probably some other browsers as well. IE for example is + # uploading files with "C:\foo\bar.txt" as filename + value = value[1:-1] + + # if this is a filename and the starting characters look like + # a UNC path, then just return the value without quotes. Using the + # replace sequence below on a UNC path has the effect of turning + # the leading double slash into a single slash and then + # _fix_ie_filename() doesn't work correctly. See #458. + if not is_filename or value[:2] != '\\\\': + return value.replace('\\\\', '\\').replace('\\"', '"') + return value + + +def dict_from_cookiejar(cj): + """Returns a key/value dictionary from a CookieJar. + + :param cj: CookieJar object to extract cookies from. + :rtype: dict + """ + + cookie_dict = {} + + for cookie in cj: + cookie_dict[cookie.name] = cookie.value + + return cookie_dict + + +def add_dict_to_cookiejar(cj, cookie_dict): + """Returns a CookieJar from a key/value dictionary. + + :param cj: CookieJar to insert cookies into. + :param cookie_dict: Dict of key/values to insert into CookieJar. + :rtype: CookieJar + """ + + return cookiejar_from_dict(cookie_dict, cj) + + +def get_encodings_from_content(content): + """Returns encodings from given content string. + + :param content: bytestring to extract encodings from. + """ + warnings.warn(( + 'In requests 3.0, get_encodings_from_content will be removed. For ' + 'more information, please see the discussion on issue #2266. (This' + ' warning should only appear once.)'), + DeprecationWarning) + + charset_re = re.compile(r']', flags=re.I) + pragma_re = re.compile(r']', flags=re.I) + xml_re = re.compile(r'^<\?xml.*?encoding=["\']*(.+?)["\'>]') + + return (charset_re.findall(content) + + pragma_re.findall(content) + + xml_re.findall(content)) + + +def _parse_content_type_header(header): + """Returns content type and parameters from given header + + :param header: string + :return: tuple containing content type and dictionary of + parameters + """ + + tokens = header.split(';') + content_type, params = tokens[0].strip(), tokens[1:] + params_dict = {} + items_to_strip = "\"' " + + for param in params: + param = param.strip() + if param: + key, value = param, True + index_of_equals = param.find("=") + if index_of_equals != -1: + key = param[:index_of_equals].strip(items_to_strip) + value = param[index_of_equals + 1:].strip(items_to_strip) + params_dict[key.lower()] = value + return content_type, params_dict + + +def get_encoding_from_headers(headers): + """Returns encodings from given HTTP Header Dict. + + :param headers: dictionary to extract encoding from. + :rtype: str + """ + + content_type = headers.get('content-type') + + if not content_type: + return None + + content_type, params = _parse_content_type_header(content_type) + + if 'charset' in params: + return params['charset'].strip("'\"") + + if 'text' in content_type: + return 'ISO-8859-1' + + if 'application/json' in content_type: + # Assume UTF-8 based on RFC 4627: https://www.ietf.org/rfc/rfc4627.txt since the charset was unset + return 'utf-8' + + +def stream_decode_response_unicode(iterator, r): + """Stream decodes a iterator.""" + + if r.encoding is None: + for item in iterator: + yield item + return + + decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace') + for chunk in iterator: + rv = decoder.decode(chunk) + if rv: + yield rv + rv = decoder.decode(b'', final=True) + if rv: + yield rv + + +def iter_slices(string, slice_length): + """Iterate over slices of a string.""" + pos = 0 + if slice_length is None or slice_length <= 0: + slice_length = len(string) + while pos < len(string): + yield string[pos:pos + slice_length] + pos += slice_length + + +def get_unicode_from_response(r): + """Returns the requested content back in unicode. + + :param r: Response object to get unicode content from. + + Tried: + + 1. charset from content-type + 2. fall back and replace all unicode characters + + :rtype: str + """ + warnings.warn(( + 'In requests 3.0, get_unicode_from_response will be removed. For ' + 'more information, please see the discussion on issue #2266. (This' + ' warning should only appear once.)'), + DeprecationWarning) + + tried_encodings = [] + + # Try charset from content-type + encoding = get_encoding_from_headers(r.headers) + + if encoding: + try: + return str(r.content, encoding) + except UnicodeError: + tried_encodings.append(encoding) + + # Fall back: + try: + return str(r.content, encoding, errors='replace') + except TypeError: + return r.content + + +# The unreserved URI characters (RFC 3986) +UNRESERVED_SET = frozenset( + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + "0123456789-._~") + + +def unquote_unreserved(uri): + """Un-escape any percent-escape sequences in a URI that are unreserved + characters. This leaves all reserved, illegal and non-ASCII bytes encoded. + + :rtype: str + """ + parts = uri.split('%') + for i in range(1, len(parts)): + h = parts[i][0:2] + if len(h) == 2 and h.isalnum(): + try: + c = chr(int(h, 16)) + except ValueError: + raise InvalidURL("Invalid percent-escape sequence: '%s'" % h) + + if c in UNRESERVED_SET: + parts[i] = c + parts[i][2:] + else: + parts[i] = '%' + parts[i] + else: + parts[i] = '%' + parts[i] + return ''.join(parts) + + +def requote_uri(uri): + """Re-quote the given URI. + + This function passes the given URI through an unquote/quote cycle to + ensure that it is fully and consistently quoted. + + :rtype: str + """ + safe_with_percent = "!#$%&'()*+,/:;=?@[]~" + safe_without_percent = "!#$&'()*+,/:;=?@[]~" + try: + # Unquote only the unreserved characters + # Then quote only illegal characters (do not quote reserved, + # unreserved, or '%') + return quote(unquote_unreserved(uri), safe=safe_with_percent) + except InvalidURL: + # We couldn't unquote the given URI, so let's try quoting it, but + # there may be unquoted '%'s in the URI. We need to make sure they're + # properly quoted so they do not cause issues elsewhere. + return quote(uri, safe=safe_without_percent) + + +def address_in_network(ip, net): + """This function allows you to check if an IP belongs to a network subnet + + Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24 + returns False if ip = 192.168.1.1 and net = 192.168.100.0/24 + + :rtype: bool + """ + ipaddr = struct.unpack('=L', socket.inet_aton(ip))[0] + netaddr, bits = net.split('/') + netmask = struct.unpack('=L', socket.inet_aton(dotted_netmask(int(bits))))[0] + network = struct.unpack('=L', socket.inet_aton(netaddr))[0] & netmask + return (ipaddr & netmask) == (network & netmask) + + +def dotted_netmask(mask): + """Converts mask from /xx format to xxx.xxx.xxx.xxx + + Example: if mask is 24 function returns 255.255.255.0 + + :rtype: str + """ + bits = 0xffffffff ^ (1 << 32 - mask) - 1 + return socket.inet_ntoa(struct.pack('>I', bits)) + + +def is_ipv4_address(string_ip): + """ + :rtype: bool + """ + try: + socket.inet_aton(string_ip) + except socket.error: + return False + return True + + +def is_valid_cidr(string_network): + """ + Very simple check of the cidr format in no_proxy variable. + + :rtype: bool + """ + if string_network.count('/') == 1: + try: + mask = int(string_network.split('/')[1]) + except ValueError: + return False + + if mask < 1 or mask > 32: + return False + + try: + socket.inet_aton(string_network.split('/')[0]) + except socket.error: + return False + else: + return False + return True + + +@contextlib.contextmanager +def set_environ(env_name, value): + """Set the environment variable 'env_name' to 'value' + + Save previous value, yield, and then restore the previous value stored in + the environment variable 'env_name'. + + If 'value' is None, do nothing""" + value_changed = value is not None + if value_changed: + old_value = os.environ.get(env_name) + os.environ[env_name] = value + try: + yield + finally: + if value_changed: + if old_value is None: + del os.environ[env_name] + else: + os.environ[env_name] = old_value + + +def should_bypass_proxies(url, no_proxy): + """ + Returns whether we should bypass proxies or not. + + :rtype: bool + """ + # Prioritize lowercase environment variables over uppercase + # to keep a consistent behaviour with other http projects (curl, wget). + get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper()) + + # First check whether no_proxy is defined. If it is, check that the URL + # we're getting isn't in the no_proxy list. + no_proxy_arg = no_proxy + if no_proxy is None: + no_proxy = get_proxy('no_proxy') + parsed = urlparse(url) + + if parsed.hostname is None: + # URLs don't always have hostnames, e.g. file:/// urls. + return True + + if no_proxy: + # We need to check whether we match here. We need to see if we match + # the end of the hostname, both with and without the port. + no_proxy = ( + host for host in no_proxy.replace(' ', '').split(',') if host + ) + + if is_ipv4_address(parsed.hostname): + for proxy_ip in no_proxy: + if is_valid_cidr(proxy_ip): + if address_in_network(parsed.hostname, proxy_ip): + return True + elif parsed.hostname == proxy_ip: + # If no_proxy ip was defined in plain IP notation instead of cidr notation & + # matches the IP of the index + return True + else: + host_with_port = parsed.hostname + if parsed.port: + host_with_port += ':{}'.format(parsed.port) + + for host in no_proxy: + if parsed.hostname.endswith(host) or host_with_port.endswith(host): + # The URL does match something in no_proxy, so we don't want + # to apply the proxies on this URL. + return True + + with set_environ('no_proxy', no_proxy_arg): + # parsed.hostname can be `None` in cases such as a file URI. + try: + bypass = proxy_bypass(parsed.hostname) + except (TypeError, socket.gaierror): + bypass = False + + if bypass: + return True + + return False + + +def get_environ_proxies(url, no_proxy=None): + """ + Return a dict of environment proxies. + + :rtype: dict + """ + if should_bypass_proxies(url, no_proxy=no_proxy): + return {} + else: + return getproxies() + + +def select_proxy(url, proxies): + """Select a proxy for the url, if applicable. + + :param url: The url being for the request + :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs + """ + proxies = proxies or {} + urlparts = urlparse(url) + if urlparts.hostname is None: + return proxies.get(urlparts.scheme, proxies.get('all')) + + proxy_keys = [ + urlparts.scheme + '://' + urlparts.hostname, + urlparts.scheme, + 'all://' + urlparts.hostname, + 'all', + ] + proxy = None + for proxy_key in proxy_keys: + if proxy_key in proxies: + proxy = proxies[proxy_key] + break + + return proxy + + +def default_user_agent(name="python-requests"): + """ + Return a string representing the default user agent. + + :rtype: str + """ + return '%s/%s' % (name, __version__) + + +def default_headers(): + """ + :rtype: requests.structures.CaseInsensitiveDict + """ + return CaseInsensitiveDict({ + 'User-Agent': default_user_agent(), + 'Accept-Encoding': ', '.join(('gzip', 'deflate')), + 'Accept': '*/*', + 'Connection': 'keep-alive', + }) + + +def parse_header_links(value): + """Return a list of parsed link headers proxies. + + i.e. Link: ; rel=front; type="image/jpeg",; rel=back;type="image/jpeg" + + :rtype: list + """ + + links = [] + + replace_chars = ' \'"' + + value = value.strip(replace_chars) + if not value: + return links + + for val in re.split(', *<', value): + try: + url, params = val.split(';', 1) + except ValueError: + url, params = val, '' + + link = {'url': url.strip('<> \'"')} + + for param in params.split(';'): + try: + key, value = param.split('=') + except ValueError: + break + + link[key.strip(replace_chars)] = value.strip(replace_chars) + + links.append(link) + + return links + + +# Null bytes; no need to recreate these on each call to guess_json_utf +_null = '\x00'.encode('ascii') # encoding to ASCII for Python 3 +_null2 = _null * 2 +_null3 = _null * 3 + + +def guess_json_utf(data): + """ + :rtype: str + """ + # JSON always starts with two ASCII characters, so detection is as + # easy as counting the nulls and from their location and count + # determine the encoding. Also detect a BOM, if present. + sample = data[:4] + if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE): + return 'utf-32' # BOM included + if sample[:3] == codecs.BOM_UTF8: + return 'utf-8-sig' # BOM included, MS style (discouraged) + if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE): + return 'utf-16' # BOM included + nullcount = sample.count(_null) + if nullcount == 0: + return 'utf-8' + if nullcount == 2: + if sample[::2] == _null2: # 1st and 3rd are null + return 'utf-16-be' + if sample[1::2] == _null2: # 2nd and 4th are null + return 'utf-16-le' + # Did not detect 2 valid UTF-16 ascii-range characters + if nullcount == 3: + if sample[:3] == _null3: + return 'utf-32-be' + if sample[1:] == _null3: + return 'utf-32-le' + # Did not detect a valid UTF-32 ascii-range character + return None + + +def prepend_scheme_if_needed(url, new_scheme): + """Given a URL that may or may not have a scheme, prepend the given scheme. + Does not replace a present scheme with the one provided as an argument. + + :rtype: str + """ + scheme, netloc, path, params, query, fragment = urlparse(url, new_scheme) + + # urlparse is a finicky beast, and sometimes decides that there isn't a + # netloc present. Assume that it's being over-cautious, and switch netloc + # and path if urlparse decided there was no netloc. + if not netloc: + netloc, path = path, netloc + + return urlunparse((scheme, netloc, path, params, query, fragment)) + + +def get_auth_from_url(url): + """Given a url with authentication components, extract them into a tuple of + username,password. + + :rtype: (str,str) + """ + parsed = urlparse(url) + + try: + auth = (unquote(parsed.username), unquote(parsed.password)) + except (AttributeError, TypeError): + auth = ('', '') + + return auth + + +# Moved outside of function to avoid recompile every call +_CLEAN_HEADER_REGEX_BYTE = re.compile(b'^\\S[^\\r\\n]*$|^$') +_CLEAN_HEADER_REGEX_STR = re.compile(r'^\S[^\r\n]*$|^$') + + +def check_header_validity(header): + """Verifies that header value is a string which doesn't contain + leading whitespace or return characters. This prevents unintended + header injection. + + :param header: tuple, in the format (name, value). + """ + name, value = header + + if isinstance(value, bytes): + pat = _CLEAN_HEADER_REGEX_BYTE + else: + pat = _CLEAN_HEADER_REGEX_STR + try: + if not pat.match(value): + raise InvalidHeader("Invalid return character or leading space in header: %s" % name) + except TypeError: + raise InvalidHeader("Value for header {%s: %s} must be of type str or " + "bytes, not %s" % (name, value, type(value))) + + +def urldefragauth(url): + """ + Given a url remove the fragment and the authentication part. + + :rtype: str + """ + scheme, netloc, path, params, query, fragment = urlparse(url) + + # see func:`prepend_scheme_if_needed` + if not netloc: + netloc, path = path, netloc + + netloc = netloc.rsplit('@', 1)[-1] + + return urlunparse((scheme, netloc, path, params, query, '')) + + +def rewind_body(prepared_request): + """Move file pointer back to its recorded starting position + so it can be read again on redirect. + """ + body_seek = getattr(prepared_request.body, 'seek', None) + if body_seek is not None and isinstance(prepared_request._body_position, integer_types): + try: + body_seek(prepared_request._body_position) + except (IOError, OSError): + raise UnrewindableBodyError("An error occurred when rewinding request " + "body for redirect.") + else: + raise UnrewindableBodyError("Unable to rewind request body for redirect.") diff --git a/venv/Lib/site-packages/pip/_vendor/resolvelib/__init__.py b/venv/Lib/site-packages/pip/_vendor/resolvelib/__init__.py new file mode 100644 index 00000000..184874d4 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/resolvelib/__init__.py @@ -0,0 +1,26 @@ +__all__ = [ + "__version__", + "AbstractProvider", + "AbstractResolver", + "BaseReporter", + "InconsistentCandidate", + "Resolver", + "RequirementsConflicted", + "ResolutionError", + "ResolutionImpossible", + "ResolutionTooDeep", +] + +__version__ = "0.7.0" + + +from .providers import AbstractProvider, AbstractResolver +from .reporters import BaseReporter +from .resolvers import ( + InconsistentCandidate, + RequirementsConflicted, + Resolver, + ResolutionError, + ResolutionImpossible, + ResolutionTooDeep, +) diff --git a/venv/Lib/site-packages/pip/_vendor/resolvelib/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/resolvelib/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..61fcc0e7 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/resolvelib/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/resolvelib/__pycache__/providers.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/resolvelib/__pycache__/providers.cpython-36.pyc new file mode 100644 index 00000000..68d56b53 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/resolvelib/__pycache__/providers.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/resolvelib/__pycache__/reporters.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/resolvelib/__pycache__/reporters.cpython-36.pyc new file mode 100644 index 00000000..a205bd83 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/resolvelib/__pycache__/reporters.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/resolvelib/__pycache__/resolvers.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/resolvelib/__pycache__/resolvers.cpython-36.pyc new file mode 100644 index 00000000..dfb1e25c Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/resolvelib/__pycache__/resolvers.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/resolvelib/__pycache__/structs.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/resolvelib/__pycache__/structs.cpython-36.pyc new file mode 100644 index 00000000..cd353dc7 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/resolvelib/__pycache__/structs.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/resolvelib/compat/__init__.py b/venv/Lib/site-packages/pip/_vendor/resolvelib/compat/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/pip/_vendor/resolvelib/compat/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/resolvelib/compat/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..7d8cd7c6 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/resolvelib/compat/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/resolvelib/compat/__pycache__/collections_abc.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/resolvelib/compat/__pycache__/collections_abc.cpython-36.pyc new file mode 100644 index 00000000..dbd77667 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/resolvelib/compat/__pycache__/collections_abc.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/resolvelib/compat/collections_abc.py b/venv/Lib/site-packages/pip/_vendor/resolvelib/compat/collections_abc.py new file mode 100644 index 00000000..1becc509 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/resolvelib/compat/collections_abc.py @@ -0,0 +1,6 @@ +__all__ = ["Mapping", "Sequence"] + +try: + from collections.abc import Mapping, Sequence +except ImportError: + from collections import Mapping, Sequence diff --git a/venv/Lib/site-packages/pip/_vendor/resolvelib/providers.py b/venv/Lib/site-packages/pip/_vendor/resolvelib/providers.py new file mode 100644 index 00000000..4822d166 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/resolvelib/providers.py @@ -0,0 +1,124 @@ +class AbstractProvider(object): + """Delegate class to provide requirement interface for the resolver.""" + + def identify(self, requirement_or_candidate): + """Given a requirement, return an identifier for it. + + This is used to identify a requirement, e.g. whether two requirements + should have their specifier parts merged. + """ + raise NotImplementedError + + def get_preference(self, identifier, resolutions, candidates, information): + """Produce a sort key for given requirement based on preference. + + The preference is defined as "I think this requirement should be + resolved first". The lower the return value is, the more preferred + this group of arguments is. + + :param identifier: An identifier as returned by ``identify()``. This + identifies the dependency matches of which should be returned. + :param resolutions: Mapping of candidates currently pinned by the + resolver. Each key is an identifier, and the value a candidate. + The candidate may conflict with requirements from ``information``. + :param candidates: Mapping of each dependency's possible candidates. + Each value is an iterator of candidates. + :param information: Mapping of requirement information of each package. + Each value is an iterator of *requirement information*. + + A *requirement information* instance is a named tuple with two members: + + * ``requirement`` specifies a requirement contributing to the current + list of candidates. + * ``parent`` specifies the candidate that provides (dependend on) the + requirement, or ``None`` to indicate a root requirement. + + The preference could depend on a various of issues, including (not + necessarily in this order): + + * Is this package pinned in the current resolution result? + * How relaxed is the requirement? Stricter ones should probably be + worked on first? (I don't know, actually.) + * How many possibilities are there to satisfy this requirement? Those + with few left should likely be worked on first, I guess? + * Are there any known conflicts for this requirement? We should + probably work on those with the most known conflicts. + + A sortable value should be returned (this will be used as the ``key`` + parameter of the built-in sorting function). The smaller the value is, + the more preferred this requirement is (i.e. the sorting function + is called with ``reverse=False``). + """ + raise NotImplementedError + + def find_matches(self, identifier, requirements, incompatibilities): + """Find all possible candidates that satisfy given constraints. + + :param identifier: An identifier as returned by ``identify()``. This + identifies the dependency matches of which should be returned. + :param requirements: A mapping of requirements that all returned + candidates must satisfy. Each key is an identifier, and the value + an iterator of requirements for that dependency. + :param incompatibilities: A mapping of known incompatibilities of + each dependency. Each key is an identifier, and the value an + iterator of incompatibilities known to the resolver. All + incompatibilities *must* be excluded from the return value. + + This should try to get candidates based on the requirements' types. + For VCS, local, and archive requirements, the one-and-only match is + returned, and for a "named" requirement, the index(es) should be + consulted to find concrete candidates for this requirement. + + The return value should produce candidates ordered by preference; the + most preferred candidate should come first. The return type may be one + of the following: + + * A callable that returns an iterator that yields candidates. + * An collection of candidates. + * An iterable of candidates. This will be consumed immediately into a + list of candidates. + """ + raise NotImplementedError + + def is_satisfied_by(self, requirement, candidate): + """Whether the given requirement can be satisfied by a candidate. + + The candidate is guarenteed to have been generated from the + requirement. + + A boolean should be returned to indicate whether ``candidate`` is a + viable solution to the requirement. + """ + raise NotImplementedError + + def get_dependencies(self, candidate): + """Get dependencies of a candidate. + + This should return a collection of requirements that `candidate` + specifies as its dependencies. + """ + raise NotImplementedError + + +class AbstractResolver(object): + """The thing that performs the actual resolution work.""" + + base_exception = Exception + + def __init__(self, provider, reporter): + self.provider = provider + self.reporter = reporter + + def resolve(self, requirements, **kwargs): + """Take a collection of constraints, spit out the resolution result. + + This returns a representation of the final resolution state, with one + guarenteed attribute ``mapping`` that contains resolved candidates as + values. The keys are their respective identifiers. + + :param requirements: A collection of constraints. + :param kwargs: Additional keyword arguments that subclasses may accept. + + :raises: ``self.base_exception`` or its subclass. + """ + raise NotImplementedError diff --git a/venv/Lib/site-packages/pip/_vendor/resolvelib/reporters.py b/venv/Lib/site-packages/pip/_vendor/resolvelib/reporters.py new file mode 100644 index 00000000..563489e1 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/resolvelib/reporters.py @@ -0,0 +1,37 @@ +class BaseReporter(object): + """Delegate class to provider progress reporting for the resolver.""" + + def starting(self): + """Called before the resolution actually starts.""" + + def starting_round(self, index): + """Called before each round of resolution starts. + + The index is zero-based. + """ + + def ending_round(self, index, state): + """Called before each round of resolution ends. + + This is NOT called if the resolution ends at this round. Use `ending` + if you want to report finalization. The index is zero-based. + """ + + def ending(self, state): + """Called before the resolution ends successfully.""" + + def adding_requirement(self, requirement, parent): + """Called when adding a new requirement into the resolve criteria. + + :param requirement: The additional requirement to be applied to filter + the available candidaites. + :param parent: The candidate that requires ``requirement`` as a + dependency, or None if ``requirement`` is one of the root + requirements passed in from ``Resolver.resolve()``. + """ + + def backtracking(self, candidate): + """Called when rejecting a candidate during backtracking.""" + + def pinning(self, candidate): + """Called when adding a candidate to the potential solution.""" diff --git a/venv/Lib/site-packages/pip/_vendor/resolvelib/resolvers.py b/venv/Lib/site-packages/pip/_vendor/resolvelib/resolvers.py new file mode 100644 index 00000000..99ee1051 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/resolvelib/resolvers.py @@ -0,0 +1,474 @@ +import collections +import operator + +from .providers import AbstractResolver +from .structs import DirectedGraph, IteratorMapping, build_iter_view + + +RequirementInformation = collections.namedtuple( + "RequirementInformation", ["requirement", "parent"] +) + + +class ResolverException(Exception): + """A base class for all exceptions raised by this module. + + Exceptions derived by this class should all be handled in this module. Any + bubbling pass the resolver should be treated as a bug. + """ + + +class RequirementsConflicted(ResolverException): + def __init__(self, criterion): + super(RequirementsConflicted, self).__init__(criterion) + self.criterion = criterion + + def __str__(self): + return "Requirements conflict: {}".format( + ", ".join(repr(r) for r in self.criterion.iter_requirement()), + ) + + +class InconsistentCandidate(ResolverException): + def __init__(self, candidate, criterion): + super(InconsistentCandidate, self).__init__(candidate, criterion) + self.candidate = candidate + self.criterion = criterion + + def __str__(self): + return "Provided candidate {!r} does not satisfy {}".format( + self.candidate, + ", ".join(repr(r) for r in self.criterion.iter_requirement()), + ) + + +class Criterion(object): + """Representation of possible resolution results of a package. + + This holds three attributes: + + * `information` is a collection of `RequirementInformation` pairs. + Each pair is a requirement contributing to this criterion, and the + candidate that provides the requirement. + * `incompatibilities` is a collection of all known not-to-work candidates + to exclude from consideration. + * `candidates` is a collection containing all possible candidates deducted + from the union of contributing requirements and known incompatibilities. + It should never be empty, except when the criterion is an attribute of a + raised `RequirementsConflicted` (in which case it is always empty). + + .. note:: + This class is intended to be externally immutable. **Do not** mutate + any of its attribute containers. + """ + + def __init__(self, candidates, information, incompatibilities): + self.candidates = candidates + self.information = information + self.incompatibilities = incompatibilities + + def __repr__(self): + requirements = ", ".join( + "({!r}, via={!r})".format(req, parent) + for req, parent in self.information + ) + return "Criterion({})".format(requirements) + + def iter_requirement(self): + return (i.requirement for i in self.information) + + def iter_parent(self): + return (i.parent for i in self.information) + + +class ResolutionError(ResolverException): + pass + + +class ResolutionImpossible(ResolutionError): + def __init__(self, causes): + super(ResolutionImpossible, self).__init__(causes) + # causes is a list of RequirementInformation objects + self.causes = causes + + +class ResolutionTooDeep(ResolutionError): + def __init__(self, round_count): + super(ResolutionTooDeep, self).__init__(round_count) + self.round_count = round_count + + +# Resolution state in a round. +State = collections.namedtuple("State", "mapping criteria") + + +class Resolution(object): + """Stateful resolution object. + + This is designed as a one-off object that holds information to kick start + the resolution process, and holds the results afterwards. + """ + + def __init__(self, provider, reporter): + self._p = provider + self._r = reporter + self._states = [] + + @property + def state(self): + try: + return self._states[-1] + except IndexError: + raise AttributeError("state") + + def _push_new_state(self): + """Push a new state into history. + + This new state will be used to hold resolution results of the next + coming round. + """ + base = self._states[-1] + state = State( + mapping=base.mapping.copy(), + criteria=base.criteria.copy(), + ) + self._states.append(state) + + def _merge_into_criterion(self, requirement, parent): + self._r.adding_requirement(requirement=requirement, parent=parent) + + identifier = self._p.identify(requirement_or_candidate=requirement) + criterion = self.state.criteria.get(identifier) + if criterion: + incompatibilities = list(criterion.incompatibilities) + else: + incompatibilities = [] + + matches = self._p.find_matches( + identifier=identifier, + requirements=IteratorMapping( + self.state.criteria, + operator.methodcaller("iter_requirement"), + {identifier: [requirement]}, + ), + incompatibilities=IteratorMapping( + self.state.criteria, + operator.attrgetter("incompatibilities"), + {identifier: incompatibilities}, + ), + ) + + if criterion: + information = list(criterion.information) + information.append(RequirementInformation(requirement, parent)) + else: + information = [RequirementInformation(requirement, parent)] + + criterion = Criterion( + candidates=build_iter_view(matches), + information=information, + incompatibilities=incompatibilities, + ) + if not criterion.candidates: + raise RequirementsConflicted(criterion) + return identifier, criterion + + def _get_preference(self, name): + return self._p.get_preference( + identifier=name, + resolutions=self.state.mapping, + candidates=IteratorMapping( + self.state.criteria, + operator.attrgetter("candidates"), + ), + information=IteratorMapping( + self.state.criteria, + operator.attrgetter("information"), + ), + ) + + def _is_current_pin_satisfying(self, name, criterion): + try: + current_pin = self.state.mapping[name] + except KeyError: + return False + return all( + self._p.is_satisfied_by(requirement=r, candidate=current_pin) + for r in criterion.iter_requirement() + ) + + def _get_criteria_to_update(self, candidate): + criteria = {} + for r in self._p.get_dependencies(candidate=candidate): + name, crit = self._merge_into_criterion(r, parent=candidate) + criteria[name] = crit + return criteria + + def _attempt_to_pin_criterion(self, name): + criterion = self.state.criteria[name] + + causes = [] + for candidate in criterion.candidates: + try: + criteria = self._get_criteria_to_update(candidate) + except RequirementsConflicted as e: + causes.append(e.criterion) + continue + + # Check the newly-pinned candidate actually works. This should + # always pass under normal circumstances, but in the case of a + # faulty provider, we will raise an error to notify the implementer + # to fix find_matches() and/or is_satisfied_by(). + satisfied = all( + self._p.is_satisfied_by(requirement=r, candidate=candidate) + for r in criterion.iter_requirement() + ) + if not satisfied: + raise InconsistentCandidate(candidate, criterion) + + # Put newly-pinned candidate at the end. This is essential because + # backtracking looks at this mapping to get the last pin. + self._r.pinning(candidate=candidate) + self.state.mapping.pop(name, None) + self.state.mapping[name] = candidate + self.state.criteria.update(criteria) + + return [] + + # All candidates tried, nothing works. This criterion is a dead + # end, signal for backtracking. + return causes + + def _backtrack(self): + """Perform backtracking. + + When we enter here, the stack is like this:: + + [ state Z ] + [ state Y ] + [ state X ] + .... earlier states are irrelevant. + + 1. No pins worked for Z, so it does not have a pin. + 2. We want to reset state Y to unpinned, and pin another candidate. + 3. State X holds what state Y was before the pin, but does not + have the incompatibility information gathered in state Y. + + Each iteration of the loop will: + + 1. Discard Z. + 2. Discard Y but remember its incompatibility information gathered + previously, and the failure we're dealing with right now. + 3. Push a new state Y' based on X, and apply the incompatibility + information from Y to Y'. + 4a. If this causes Y' to conflict, we need to backtrack again. Make Y' + the new Z and go back to step 2. + 4b. If the incompatibilities apply cleanly, end backtracking. + """ + while len(self._states) >= 3: + # Remove the state that triggered backtracking. + del self._states[-1] + + # Retrieve the last candidate pin and known incompatibilities. + broken_state = self._states.pop() + name, candidate = broken_state.mapping.popitem() + incompatibilities_from_broken = [ + (k, list(v.incompatibilities)) + for k, v in broken_state.criteria.items() + ] + + # Also mark the newly known incompatibility. + incompatibilities_from_broken.append((name, [candidate])) + + self._r.backtracking(candidate=candidate) + + # Create a new state from the last known-to-work one, and apply + # the previously gathered incompatibility information. + def _patch_criteria(): + for k, incompatibilities in incompatibilities_from_broken: + if not incompatibilities: + continue + try: + criterion = self.state.criteria[k] + except KeyError: + continue + matches = self._p.find_matches( + identifier=k, + requirements=IteratorMapping( + self.state.criteria, + operator.methodcaller("iter_requirement"), + ), + incompatibilities=IteratorMapping( + self.state.criteria, + operator.attrgetter("incompatibilities"), + {k: incompatibilities}, + ), + ) + candidates = build_iter_view(matches) + if not candidates: + return False + incompatibilities.extend(criterion.incompatibilities) + self.state.criteria[k] = Criterion( + candidates=candidates, + information=list(criterion.information), + incompatibilities=incompatibilities, + ) + return True + + self._push_new_state() + success = _patch_criteria() + + # It works! Let's work on this new state. + if success: + return True + + # State does not work after applying known incompatibilities. + # Try the still previous state. + + # No way to backtrack anymore. + return False + + def resolve(self, requirements, max_rounds): + if self._states: + raise RuntimeError("already resolved") + + self._r.starting() + + # Initialize the root state. + self._states = [State(mapping=collections.OrderedDict(), criteria={})] + for r in requirements: + try: + name, crit = self._merge_into_criterion(r, parent=None) + except RequirementsConflicted as e: + raise ResolutionImpossible(e.criterion.information) + self.state.criteria[name] = crit + + # The root state is saved as a sentinel so the first ever pin can have + # something to backtrack to if it fails. The root state is basically + # pinning the virtual "root" package in the graph. + self._push_new_state() + + for round_index in range(max_rounds): + self._r.starting_round(index=round_index) + + unsatisfied_names = [ + key + for key, criterion in self.state.criteria.items() + if not self._is_current_pin_satisfying(key, criterion) + ] + + # All criteria are accounted for. Nothing more to pin, we are done! + if not unsatisfied_names: + self._r.ending(state=self.state) + return self.state + + # Choose the most preferred unpinned criterion to try. + name = min(unsatisfied_names, key=self._get_preference) + failure_causes = self._attempt_to_pin_criterion(name) + + if failure_causes: + # Backtrack if pinning fails. The backtrack process puts us in + # an unpinned state, so we can work on it in the next round. + success = self._backtrack() + + # Dead ends everywhere. Give up. + if not success: + causes = [i for c in failure_causes for i in c.information] + raise ResolutionImpossible(causes) + else: + # Pinning was successful. Push a new state to do another pin. + self._push_new_state() + + self._r.ending_round(index=round_index, state=self.state) + + raise ResolutionTooDeep(max_rounds) + + +def _has_route_to_root(criteria, key, all_keys, connected): + if key in connected: + return True + if key not in criteria: + return False + for p in criteria[key].iter_parent(): + try: + pkey = all_keys[id(p)] + except KeyError: + continue + if pkey in connected: + connected.add(key) + return True + if _has_route_to_root(criteria, pkey, all_keys, connected): + connected.add(key) + return True + return False + + +Result = collections.namedtuple("Result", "mapping graph criteria") + + +def _build_result(state): + mapping = state.mapping + all_keys = {id(v): k for k, v in mapping.items()} + all_keys[id(None)] = None + + graph = DirectedGraph() + graph.add(None) # Sentinel as root dependencies' parent. + + connected = {None} + for key, criterion in state.criteria.items(): + if not _has_route_to_root(state.criteria, key, all_keys, connected): + continue + if key not in graph: + graph.add(key) + for p in criterion.iter_parent(): + try: + pkey = all_keys[id(p)] + except KeyError: + continue + if pkey not in graph: + graph.add(pkey) + graph.connect(pkey, key) + + return Result( + mapping={k: v for k, v in mapping.items() if k in connected}, + graph=graph, + criteria=state.criteria, + ) + + +class Resolver(AbstractResolver): + """The thing that performs the actual resolution work.""" + + base_exception = ResolverException + + def resolve(self, requirements, max_rounds=100): + """Take a collection of constraints, spit out the resolution result. + + The return value is a representation to the final resolution result. It + is a tuple subclass with three public members: + + * `mapping`: A dict of resolved candidates. Each key is an identifier + of a requirement (as returned by the provider's `identify` method), + and the value is the resolved candidate. + * `graph`: A `DirectedGraph` instance representing the dependency tree. + The vertices are keys of `mapping`, and each edge represents *why* + a particular package is included. A special vertex `None` is + included to represent parents of user-supplied requirements. + * `criteria`: A dict of "criteria" that hold detailed information on + how edges in the graph are derived. Each key is an identifier of a + requirement, and the value is a `Criterion` instance. + + The following exceptions may be raised if a resolution cannot be found: + + * `ResolutionImpossible`: A resolution cannot be found for the given + combination of requirements. The `causes` attribute of the + exception is a list of (requirement, parent), giving the + requirements that could not be satisfied. + * `ResolutionTooDeep`: The dependency tree is too deeply nested and + the resolver gave up. This is usually caused by a circular + dependency, but you can try to resolve this by increasing the + `max_rounds` argument. + """ + resolution = Resolution(self.provider, self.reporter) + state = resolution.resolve(requirements, max_rounds=max_rounds) + return _build_result(state) diff --git a/venv/Lib/site-packages/pip/_vendor/resolvelib/structs.py b/venv/Lib/site-packages/pip/_vendor/resolvelib/structs.py new file mode 100644 index 00000000..e1e7aa42 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/resolvelib/structs.py @@ -0,0 +1,153 @@ +import itertools + +from .compat import collections_abc + + +class DirectedGraph(object): + """A graph structure with directed edges.""" + + def __init__(self): + self._vertices = set() + self._forwards = {} # -> Set[] + self._backwards = {} # -> Set[] + + def __iter__(self): + return iter(self._vertices) + + def __len__(self): + return len(self._vertices) + + def __contains__(self, key): + return key in self._vertices + + def copy(self): + """Return a shallow copy of this graph.""" + other = DirectedGraph() + other._vertices = set(self._vertices) + other._forwards = {k: set(v) for k, v in self._forwards.items()} + other._backwards = {k: set(v) for k, v in self._backwards.items()} + return other + + def add(self, key): + """Add a new vertex to the graph.""" + if key in self._vertices: + raise ValueError("vertex exists") + self._vertices.add(key) + self._forwards[key] = set() + self._backwards[key] = set() + + def remove(self, key): + """Remove a vertex from the graph, disconnecting all edges from/to it.""" + self._vertices.remove(key) + for f in self._forwards.pop(key): + self._backwards[f].remove(key) + for t in self._backwards.pop(key): + self._forwards[t].remove(key) + + def connected(self, f, t): + return f in self._backwards[t] and t in self._forwards[f] + + def connect(self, f, t): + """Connect two existing vertices. + + Nothing happens if the vertices are already connected. + """ + if t not in self._vertices: + raise KeyError(t) + self._forwards[f].add(t) + self._backwards[t].add(f) + + def iter_edges(self): + for f, children in self._forwards.items(): + for t in children: + yield f, t + + def iter_children(self, key): + return iter(self._forwards[key]) + + def iter_parents(self, key): + return iter(self._backwards[key]) + + +class IteratorMapping(collections_abc.Mapping): + def __init__(self, mapping, accessor, appends=None): + self._mapping = mapping + self._accessor = accessor + self._appends = appends or {} + + def __contains__(self, key): + return key in self._mapping or key in self._appends + + def __getitem__(self, k): + try: + v = self._mapping[k] + except KeyError: + return iter(self._appends[k]) + return itertools.chain(self._accessor(v), self._appends.get(k, ())) + + def __iter__(self): + more = (k for k in self._appends if k not in self._mapping) + return itertools.chain(self._mapping, more) + + def __len__(self): + more = len(k for k in self._appends if k not in self._mapping) + return len(self._mapping) + more + + +class _FactoryIterableView(object): + """Wrap an iterator factory returned by `find_matches()`. + + Calling `iter()` on this class would invoke the underlying iterator + factory, making it a "collection with ordering" that can be iterated + through multiple times, but lacks random access methods presented in + built-in Python sequence types. + """ + + def __init__(self, factory): + self._factory = factory + + def __repr__(self): + return "{}({})".format(type(self).__name__, list(self._factory())) + + def __bool__(self): + try: + next(self._factory()) + except StopIteration: + return False + return True + + __nonzero__ = __bool__ # XXX: Python 2. + + def __iter__(self): + return self._factory() + + +class _SequenceIterableView(object): + """Wrap an iterable returned by find_matches(). + + This is essentially just a proxy to the underlying sequence that provides + the same interface as `_FactoryIterableView`. + """ + + def __init__(self, sequence): + self._sequence = sequence + + def __repr__(self): + return "{}({})".format(type(self).__name__, self._sequence) + + def __bool__(self): + return bool(self._sequence) + + __nonzero__ = __bool__ # XXX: Python 2. + + def __iter__(self): + return iter(self._sequence) + + +def build_iter_view(matches): + """Build an iterable view from the value returned by `find_matches()`.""" + if callable(matches): + return _FactoryIterableView(matches) + if not isinstance(matches, collections_abc.Sequence): + matches = list(matches) + return _SequenceIterableView(matches) diff --git a/venv/Lib/site-packages/pip/_vendor/six.py b/venv/Lib/site-packages/pip/_vendor/six.py new file mode 100644 index 00000000..83f69783 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/six.py @@ -0,0 +1,982 @@ +# Copyright (c) 2010-2020 Benjamin Peterson +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Utilities for writing code that runs on Python 2 and 3""" + +from __future__ import absolute_import + +import functools +import itertools +import operator +import sys +import types + +__author__ = "Benjamin Peterson " +__version__ = "1.15.0" + + +# Useful for very coarse version differentiation. +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 +PY34 = sys.version_info[0:2] >= (3, 4) + +if PY3: + string_types = str, + integer_types = int, + class_types = type, + text_type = str + binary_type = bytes + + MAXSIZE = sys.maxsize +else: + string_types = basestring, + integer_types = (int, long) + class_types = (type, types.ClassType) + text_type = unicode + binary_type = str + + if sys.platform.startswith("java"): + # Jython always uses 32 bits. + MAXSIZE = int((1 << 31) - 1) + else: + # It's possible to have sizeof(long) != sizeof(Py_ssize_t). + class X(object): + + def __len__(self): + return 1 << 31 + try: + len(X()) + except OverflowError: + # 32-bit + MAXSIZE = int((1 << 31) - 1) + else: + # 64-bit + MAXSIZE = int((1 << 63) - 1) + del X + + +def _add_doc(func, doc): + """Add documentation to a function.""" + func.__doc__ = doc + + +def _import_module(name): + """Import module, returning the module after the last dot.""" + __import__(name) + return sys.modules[name] + + +class _LazyDescr(object): + + def __init__(self, name): + self.name = name + + def __get__(self, obj, tp): + result = self._resolve() + setattr(obj, self.name, result) # Invokes __set__. + try: + # This is a bit ugly, but it avoids running this again by + # removing this descriptor. + delattr(obj.__class__, self.name) + except AttributeError: + pass + return result + + +class MovedModule(_LazyDescr): + + def __init__(self, name, old, new=None): + super(MovedModule, self).__init__(name) + if PY3: + if new is None: + new = name + self.mod = new + else: + self.mod = old + + def _resolve(self): + return _import_module(self.mod) + + def __getattr__(self, attr): + _module = self._resolve() + value = getattr(_module, attr) + setattr(self, attr, value) + return value + + +class _LazyModule(types.ModuleType): + + def __init__(self, name): + super(_LazyModule, self).__init__(name) + self.__doc__ = self.__class__.__doc__ + + def __dir__(self): + attrs = ["__doc__", "__name__"] + attrs += [attr.name for attr in self._moved_attributes] + return attrs + + # Subclasses should override this + _moved_attributes = [] + + +class MovedAttribute(_LazyDescr): + + def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): + super(MovedAttribute, self).__init__(name) + if PY3: + if new_mod is None: + new_mod = name + self.mod = new_mod + if new_attr is None: + if old_attr is None: + new_attr = name + else: + new_attr = old_attr + self.attr = new_attr + else: + self.mod = old_mod + if old_attr is None: + old_attr = name + self.attr = old_attr + + def _resolve(self): + module = _import_module(self.mod) + return getattr(module, self.attr) + + +class _SixMetaPathImporter(object): + + """ + A meta path importer to import six.moves and its submodules. + + This class implements a PEP302 finder and loader. It should be compatible + with Python 2.5 and all existing versions of Python3 + """ + + def __init__(self, six_module_name): + self.name = six_module_name + self.known_modules = {} + + def _add_module(self, mod, *fullnames): + for fullname in fullnames: + self.known_modules[self.name + "." + fullname] = mod + + def _get_module(self, fullname): + return self.known_modules[self.name + "." + fullname] + + def find_module(self, fullname, path=None): + if fullname in self.known_modules: + return self + return None + + def __get_module(self, fullname): + try: + return self.known_modules[fullname] + except KeyError: + raise ImportError("This loader does not know module " + fullname) + + def load_module(self, fullname): + try: + # in case of a reload + return sys.modules[fullname] + except KeyError: + pass + mod = self.__get_module(fullname) + if isinstance(mod, MovedModule): + mod = mod._resolve() + else: + mod.__loader__ = self + sys.modules[fullname] = mod + return mod + + def is_package(self, fullname): + """ + Return true, if the named module is a package. + + We need this method to get correct spec objects with + Python 3.4 (see PEP451) + """ + return hasattr(self.__get_module(fullname), "__path__") + + def get_code(self, fullname): + """Return None + + Required, if is_package is implemented""" + self.__get_module(fullname) # eventually raises ImportError + return None + get_source = get_code # same as get_code + +_importer = _SixMetaPathImporter(__name__) + + +class _MovedItems(_LazyModule): + + """Lazy loading of moved objects""" + __path__ = [] # mark as package + + +_moved_attributes = [ + MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), + MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), + MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), + MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), + MovedAttribute("intern", "__builtin__", "sys"), + MovedAttribute("map", "itertools", "builtins", "imap", "map"), + MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), + MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), + MovedAttribute("getoutput", "commands", "subprocess"), + MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), + MovedAttribute("reduce", "__builtin__", "functools"), + MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), + MovedAttribute("StringIO", "StringIO", "io"), + MovedAttribute("UserDict", "UserDict", "collections"), + MovedAttribute("UserList", "UserList", "collections"), + MovedAttribute("UserString", "UserString", "collections"), + MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), + MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), + MovedModule("builtins", "__builtin__"), + MovedModule("configparser", "ConfigParser"), + MovedModule("collections_abc", "collections", "collections.abc" if sys.version_info >= (3, 3) else "collections"), + MovedModule("copyreg", "copy_reg"), + MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), + MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"), + MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread" if sys.version_info < (3, 9) else "_thread"), + MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), + MovedModule("http_cookies", "Cookie", "http.cookies"), + MovedModule("html_entities", "htmlentitydefs", "html.entities"), + MovedModule("html_parser", "HTMLParser", "html.parser"), + MovedModule("http_client", "httplib", "http.client"), + MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), + MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"), + MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), + MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), + MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), + MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), + MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), + MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), + MovedModule("cPickle", "cPickle", "pickle"), + MovedModule("queue", "Queue"), + MovedModule("reprlib", "repr"), + MovedModule("socketserver", "SocketServer"), + MovedModule("_thread", "thread", "_thread"), + MovedModule("tkinter", "Tkinter"), + MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), + MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), + MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), + MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), + MovedModule("tkinter_tix", "Tix", "tkinter.tix"), + MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), + MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), + MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), + MovedModule("tkinter_colorchooser", "tkColorChooser", + "tkinter.colorchooser"), + MovedModule("tkinter_commondialog", "tkCommonDialog", + "tkinter.commondialog"), + MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), + MovedModule("tkinter_font", "tkFont", "tkinter.font"), + MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), + MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", + "tkinter.simpledialog"), + MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), + MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), + MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), + MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), + MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), + MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), +] +# Add windows specific modules. +if sys.platform == "win32": + _moved_attributes += [ + MovedModule("winreg", "_winreg"), + ] + +for attr in _moved_attributes: + setattr(_MovedItems, attr.name, attr) + if isinstance(attr, MovedModule): + _importer._add_module(attr, "moves." + attr.name) +del attr + +_MovedItems._moved_attributes = _moved_attributes + +moves = _MovedItems(__name__ + ".moves") +_importer._add_module(moves, "moves") + + +class Module_six_moves_urllib_parse(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_parse""" + + +_urllib_parse_moved_attributes = [ + MovedAttribute("ParseResult", "urlparse", "urllib.parse"), + MovedAttribute("SplitResult", "urlparse", "urllib.parse"), + MovedAttribute("parse_qs", "urlparse", "urllib.parse"), + MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), + MovedAttribute("urldefrag", "urlparse", "urllib.parse"), + MovedAttribute("urljoin", "urlparse", "urllib.parse"), + MovedAttribute("urlparse", "urlparse", "urllib.parse"), + MovedAttribute("urlsplit", "urlparse", "urllib.parse"), + MovedAttribute("urlunparse", "urlparse", "urllib.parse"), + MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), + MovedAttribute("quote", "urllib", "urllib.parse"), + MovedAttribute("quote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote", "urllib", "urllib.parse"), + MovedAttribute("unquote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"), + MovedAttribute("urlencode", "urllib", "urllib.parse"), + MovedAttribute("splitquery", "urllib", "urllib.parse"), + MovedAttribute("splittag", "urllib", "urllib.parse"), + MovedAttribute("splituser", "urllib", "urllib.parse"), + MovedAttribute("splitvalue", "urllib", "urllib.parse"), + MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), + MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), + MovedAttribute("uses_params", "urlparse", "urllib.parse"), + MovedAttribute("uses_query", "urlparse", "urllib.parse"), + MovedAttribute("uses_relative", "urlparse", "urllib.parse"), +] +for attr in _urllib_parse_moved_attributes: + setattr(Module_six_moves_urllib_parse, attr.name, attr) +del attr + +Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes + +_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), + "moves.urllib_parse", "moves.urllib.parse") + + +class Module_six_moves_urllib_error(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_error""" + + +_urllib_error_moved_attributes = [ + MovedAttribute("URLError", "urllib2", "urllib.error"), + MovedAttribute("HTTPError", "urllib2", "urllib.error"), + MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), +] +for attr in _urllib_error_moved_attributes: + setattr(Module_six_moves_urllib_error, attr.name, attr) +del attr + +Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes + +_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), + "moves.urllib_error", "moves.urllib.error") + + +class Module_six_moves_urllib_request(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_request""" + + +_urllib_request_moved_attributes = [ + MovedAttribute("urlopen", "urllib2", "urllib.request"), + MovedAttribute("install_opener", "urllib2", "urllib.request"), + MovedAttribute("build_opener", "urllib2", "urllib.request"), + MovedAttribute("pathname2url", "urllib", "urllib.request"), + MovedAttribute("url2pathname", "urllib", "urllib.request"), + MovedAttribute("getproxies", "urllib", "urllib.request"), + MovedAttribute("Request", "urllib2", "urllib.request"), + MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), + MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), + MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), + MovedAttribute("BaseHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), + MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), + MovedAttribute("FileHandler", "urllib2", "urllib.request"), + MovedAttribute("FTPHandler", "urllib2", "urllib.request"), + MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), + MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), + MovedAttribute("urlretrieve", "urllib", "urllib.request"), + MovedAttribute("urlcleanup", "urllib", "urllib.request"), + MovedAttribute("URLopener", "urllib", "urllib.request"), + MovedAttribute("FancyURLopener", "urllib", "urllib.request"), + MovedAttribute("proxy_bypass", "urllib", "urllib.request"), + MovedAttribute("parse_http_list", "urllib2", "urllib.request"), + MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"), +] +for attr in _urllib_request_moved_attributes: + setattr(Module_six_moves_urllib_request, attr.name, attr) +del attr + +Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes + +_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), + "moves.urllib_request", "moves.urllib.request") + + +class Module_six_moves_urllib_response(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_response""" + + +_urllib_response_moved_attributes = [ + MovedAttribute("addbase", "urllib", "urllib.response"), + MovedAttribute("addclosehook", "urllib", "urllib.response"), + MovedAttribute("addinfo", "urllib", "urllib.response"), + MovedAttribute("addinfourl", "urllib", "urllib.response"), +] +for attr in _urllib_response_moved_attributes: + setattr(Module_six_moves_urllib_response, attr.name, attr) +del attr + +Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes + +_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), + "moves.urllib_response", "moves.urllib.response") + + +class Module_six_moves_urllib_robotparser(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_robotparser""" + + +_urllib_robotparser_moved_attributes = [ + MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), +] +for attr in _urllib_robotparser_moved_attributes: + setattr(Module_six_moves_urllib_robotparser, attr.name, attr) +del attr + +Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes + +_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), + "moves.urllib_robotparser", "moves.urllib.robotparser") + + +class Module_six_moves_urllib(types.ModuleType): + + """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" + __path__ = [] # mark as package + parse = _importer._get_module("moves.urllib_parse") + error = _importer._get_module("moves.urllib_error") + request = _importer._get_module("moves.urllib_request") + response = _importer._get_module("moves.urllib_response") + robotparser = _importer._get_module("moves.urllib_robotparser") + + def __dir__(self): + return ['parse', 'error', 'request', 'response', 'robotparser'] + +_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), + "moves.urllib") + + +def add_move(move): + """Add an item to six.moves.""" + setattr(_MovedItems, move.name, move) + + +def remove_move(name): + """Remove item from six.moves.""" + try: + delattr(_MovedItems, name) + except AttributeError: + try: + del moves.__dict__[name] + except KeyError: + raise AttributeError("no such move, %r" % (name,)) + + +if PY3: + _meth_func = "__func__" + _meth_self = "__self__" + + _func_closure = "__closure__" + _func_code = "__code__" + _func_defaults = "__defaults__" + _func_globals = "__globals__" +else: + _meth_func = "im_func" + _meth_self = "im_self" + + _func_closure = "func_closure" + _func_code = "func_code" + _func_defaults = "func_defaults" + _func_globals = "func_globals" + + +try: + advance_iterator = next +except NameError: + def advance_iterator(it): + return it.next() +next = advance_iterator + + +try: + callable = callable +except NameError: + def callable(obj): + return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) + + +if PY3: + def get_unbound_function(unbound): + return unbound + + create_bound_method = types.MethodType + + def create_unbound_method(func, cls): + return func + + Iterator = object +else: + def get_unbound_function(unbound): + return unbound.im_func + + def create_bound_method(func, obj): + return types.MethodType(func, obj, obj.__class__) + + def create_unbound_method(func, cls): + return types.MethodType(func, None, cls) + + class Iterator(object): + + def next(self): + return type(self).__next__(self) + + callable = callable +_add_doc(get_unbound_function, + """Get the function out of a possibly unbound function""") + + +get_method_function = operator.attrgetter(_meth_func) +get_method_self = operator.attrgetter(_meth_self) +get_function_closure = operator.attrgetter(_func_closure) +get_function_code = operator.attrgetter(_func_code) +get_function_defaults = operator.attrgetter(_func_defaults) +get_function_globals = operator.attrgetter(_func_globals) + + +if PY3: + def iterkeys(d, **kw): + return iter(d.keys(**kw)) + + def itervalues(d, **kw): + return iter(d.values(**kw)) + + def iteritems(d, **kw): + return iter(d.items(**kw)) + + def iterlists(d, **kw): + return iter(d.lists(**kw)) + + viewkeys = operator.methodcaller("keys") + + viewvalues = operator.methodcaller("values") + + viewitems = operator.methodcaller("items") +else: + def iterkeys(d, **kw): + return d.iterkeys(**kw) + + def itervalues(d, **kw): + return d.itervalues(**kw) + + def iteritems(d, **kw): + return d.iteritems(**kw) + + def iterlists(d, **kw): + return d.iterlists(**kw) + + viewkeys = operator.methodcaller("viewkeys") + + viewvalues = operator.methodcaller("viewvalues") + + viewitems = operator.methodcaller("viewitems") + +_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") +_add_doc(itervalues, "Return an iterator over the values of a dictionary.") +_add_doc(iteritems, + "Return an iterator over the (key, value) pairs of a dictionary.") +_add_doc(iterlists, + "Return an iterator over the (key, [values]) pairs of a dictionary.") + + +if PY3: + def b(s): + return s.encode("latin-1") + + def u(s): + return s + unichr = chr + import struct + int2byte = struct.Struct(">B").pack + del struct + byte2int = operator.itemgetter(0) + indexbytes = operator.getitem + iterbytes = iter + import io + StringIO = io.StringIO + BytesIO = io.BytesIO + del io + _assertCountEqual = "assertCountEqual" + if sys.version_info[1] <= 1: + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" + _assertNotRegex = "assertNotRegexpMatches" + else: + _assertRaisesRegex = "assertRaisesRegex" + _assertRegex = "assertRegex" + _assertNotRegex = "assertNotRegex" +else: + def b(s): + return s + # Workaround for standalone backslash + + def u(s): + return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") + unichr = unichr + int2byte = chr + + def byte2int(bs): + return ord(bs[0]) + + def indexbytes(buf, i): + return ord(buf[i]) + iterbytes = functools.partial(itertools.imap, ord) + import StringIO + StringIO = BytesIO = StringIO.StringIO + _assertCountEqual = "assertItemsEqual" + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" + _assertNotRegex = "assertNotRegexpMatches" +_add_doc(b, """Byte literal""") +_add_doc(u, """Text literal""") + + +def assertCountEqual(self, *args, **kwargs): + return getattr(self, _assertCountEqual)(*args, **kwargs) + + +def assertRaisesRegex(self, *args, **kwargs): + return getattr(self, _assertRaisesRegex)(*args, **kwargs) + + +def assertRegex(self, *args, **kwargs): + return getattr(self, _assertRegex)(*args, **kwargs) + + +def assertNotRegex(self, *args, **kwargs): + return getattr(self, _assertNotRegex)(*args, **kwargs) + + +if PY3: + exec_ = getattr(moves.builtins, "exec") + + def reraise(tp, value, tb=None): + try: + if value is None: + value = tp() + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + finally: + value = None + tb = None + +else: + def exec_(_code_, _globs_=None, _locs_=None): + """Execute code in a namespace.""" + if _globs_ is None: + frame = sys._getframe(1) + _globs_ = frame.f_globals + if _locs_ is None: + _locs_ = frame.f_locals + del frame + elif _locs_ is None: + _locs_ = _globs_ + exec("""exec _code_ in _globs_, _locs_""") + + exec_("""def reraise(tp, value, tb=None): + try: + raise tp, value, tb + finally: + tb = None +""") + + +if sys.version_info[:2] > (3,): + exec_("""def raise_from(value, from_value): + try: + raise value from from_value + finally: + value = None +""") +else: + def raise_from(value, from_value): + raise value + + +print_ = getattr(moves.builtins, "print", None) +if print_ is None: + def print_(*args, **kwargs): + """The new-style print function for Python 2.4 and 2.5.""" + fp = kwargs.pop("file", sys.stdout) + if fp is None: + return + + def write(data): + if not isinstance(data, basestring): + data = str(data) + # If the file has an encoding, encode unicode with it. + if (isinstance(fp, file) and + isinstance(data, unicode) and + fp.encoding is not None): + errors = getattr(fp, "errors", None) + if errors is None: + errors = "strict" + data = data.encode(fp.encoding, errors) + fp.write(data) + want_unicode = False + sep = kwargs.pop("sep", None) + if sep is not None: + if isinstance(sep, unicode): + want_unicode = True + elif not isinstance(sep, str): + raise TypeError("sep must be None or a string") + end = kwargs.pop("end", None) + if end is not None: + if isinstance(end, unicode): + want_unicode = True + elif not isinstance(end, str): + raise TypeError("end must be None or a string") + if kwargs: + raise TypeError("invalid keyword arguments to print()") + if not want_unicode: + for arg in args: + if isinstance(arg, unicode): + want_unicode = True + break + if want_unicode: + newline = unicode("\n") + space = unicode(" ") + else: + newline = "\n" + space = " " + if sep is None: + sep = space + if end is None: + end = newline + for i, arg in enumerate(args): + if i: + write(sep) + write(arg) + write(end) +if sys.version_info[:2] < (3, 3): + _print = print_ + + def print_(*args, **kwargs): + fp = kwargs.get("file", sys.stdout) + flush = kwargs.pop("flush", False) + _print(*args, **kwargs) + if flush and fp is not None: + fp.flush() + +_add_doc(reraise, """Reraise an exception.""") + +if sys.version_info[0:2] < (3, 4): + # This does exactly the same what the :func:`py3:functools.update_wrapper` + # function does on Python versions after 3.2. It sets the ``__wrapped__`` + # attribute on ``wrapper`` object and it doesn't raise an error if any of + # the attributes mentioned in ``assigned`` and ``updated`` are missing on + # ``wrapped`` object. + def _update_wrapper(wrapper, wrapped, + assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES): + for attr in assigned: + try: + value = getattr(wrapped, attr) + except AttributeError: + continue + else: + setattr(wrapper, attr, value) + for attr in updated: + getattr(wrapper, attr).update(getattr(wrapped, attr, {})) + wrapper.__wrapped__ = wrapped + return wrapper + _update_wrapper.__doc__ = functools.update_wrapper.__doc__ + + def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES): + return functools.partial(_update_wrapper, wrapped=wrapped, + assigned=assigned, updated=updated) + wraps.__doc__ = functools.wraps.__doc__ + +else: + wraps = functools.wraps + + +def with_metaclass(meta, *bases): + """Create a base class with a metaclass.""" + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(type): + + def __new__(cls, name, this_bases, d): + if sys.version_info[:2] >= (3, 7): + # This version introduced PEP 560 that requires a bit + # of extra care (we mimic what is done by __build_class__). + resolved_bases = types.resolve_bases(bases) + if resolved_bases is not bases: + d['__orig_bases__'] = bases + else: + resolved_bases = bases + return meta(name, resolved_bases, d) + + @classmethod + def __prepare__(cls, name, this_bases): + return meta.__prepare__(name, bases) + return type.__new__(metaclass, 'temporary_class', (), {}) + + +def add_metaclass(metaclass): + """Class decorator for creating a class with a metaclass.""" + def wrapper(cls): + orig_vars = cls.__dict__.copy() + slots = orig_vars.get('__slots__') + if slots is not None: + if isinstance(slots, str): + slots = [slots] + for slots_var in slots: + orig_vars.pop(slots_var) + orig_vars.pop('__dict__', None) + orig_vars.pop('__weakref__', None) + if hasattr(cls, '__qualname__'): + orig_vars['__qualname__'] = cls.__qualname__ + return metaclass(cls.__name__, cls.__bases__, orig_vars) + return wrapper + + +def ensure_binary(s, encoding='utf-8', errors='strict'): + """Coerce **s** to six.binary_type. + + For Python 2: + - `unicode` -> encoded to `str` + - `str` -> `str` + + For Python 3: + - `str` -> encoded to `bytes` + - `bytes` -> `bytes` + """ + if isinstance(s, binary_type): + return s + if isinstance(s, text_type): + return s.encode(encoding, errors) + raise TypeError("not expecting type '%s'" % type(s)) + + +def ensure_str(s, encoding='utf-8', errors='strict'): + """Coerce *s* to `str`. + + For Python 2: + - `unicode` -> encoded to `str` + - `str` -> `str` + + For Python 3: + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + # Optimization: Fast return for the common case. + if type(s) is str: + return s + if PY2 and isinstance(s, text_type): + return s.encode(encoding, errors) + elif PY3 and isinstance(s, binary_type): + return s.decode(encoding, errors) + elif not isinstance(s, (text_type, binary_type)): + raise TypeError("not expecting type '%s'" % type(s)) + return s + + +def ensure_text(s, encoding='utf-8', errors='strict'): + """Coerce *s* to six.text_type. + + For Python 2: + - `unicode` -> `unicode` + - `str` -> `unicode` + + For Python 3: + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + if isinstance(s, binary_type): + return s.decode(encoding, errors) + elif isinstance(s, text_type): + return s + else: + raise TypeError("not expecting type '%s'" % type(s)) + + +def python_2_unicode_compatible(klass): + """ + A class decorator that defines __unicode__ and __str__ methods under Python 2. + Under Python 3 it does nothing. + + To support Python 2 and 3 with a single code base, define a __str__ method + returning text and apply this decorator to the class. + """ + if PY2: + if '__str__' not in klass.__dict__: + raise ValueError("@python_2_unicode_compatible cannot be applied " + "to %s because it doesn't define __str__()." % + klass.__name__) + klass.__unicode__ = klass.__str__ + klass.__str__ = lambda self: self.__unicode__().encode('utf-8') + return klass + + +# Complete the moves implementation. +# This code is at the end of this module to speed up module loading. +# Turn this module into a package. +__path__ = [] # required for PEP 302 and PEP 451 +__package__ = __name__ # see PEP 366 @ReservedAssignment +if globals().get("__spec__") is not None: + __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable +# Remove other six meta path importers, since they cause problems. This can +# happen if six is removed from sys.modules and then reloaded. (Setuptools does +# this for some reason.) +if sys.meta_path: + for i, importer in enumerate(sys.meta_path): + # Here's some real nastiness: Another "instance" of the six module might + # be floating around. Therefore, we can't use isinstance() to check for + # the six meta path importer, since the other six instance will have + # inserted an importer with different class. + if (type(importer).__name__ == "_SixMetaPathImporter" and + importer.name == __name__): + del sys.meta_path[i] + break + del i, importer +# Finally, add the importer to the meta path import hook. +sys.meta_path.append(_importer) diff --git a/venv/Lib/site-packages/pip/_vendor/tenacity/__init__.py b/venv/Lib/site-packages/pip/_vendor/tenacity/__init__.py new file mode 100644 index 00000000..5f8cb505 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/tenacity/__init__.py @@ -0,0 +1,523 @@ +# -*- coding: utf-8 -*- +# Copyright 2016-2018 Julien Danjou +# Copyright 2017 Elisey Zanko +# Copyright 2016 Étienne Bersac +# Copyright 2016 Joshua Harlow +# Copyright 2013-2014 Ray Holder +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +try: + from inspect import iscoroutinefunction +except ImportError: + iscoroutinefunction = None + +try: + import tornado +except ImportError: + tornado = None + +import sys +import threading +import typing as t +import warnings +from abc import ABCMeta, abstractmethod +from concurrent import futures + + +from pip._vendor import six + +from pip._vendor.tenacity import _utils + +# Import all built-in retry strategies for easier usage. +from .retry import retry_base # noqa +from .retry import retry_all # noqa +from .retry import retry_always # noqa +from .retry import retry_any # noqa +from .retry import retry_if_exception # noqa +from .retry import retry_if_exception_type # noqa +from .retry import retry_if_not_result # noqa +from .retry import retry_if_result # noqa +from .retry import retry_never # noqa +from .retry import retry_unless_exception_type # noqa +from .retry import retry_if_exception_message # noqa +from .retry import retry_if_not_exception_message # noqa + +# Import all nap strategies for easier usage. +from .nap import sleep # noqa +from .nap import sleep_using_event # noqa + +# Import all built-in stop strategies for easier usage. +from .stop import stop_after_attempt # noqa +from .stop import stop_after_delay # noqa +from .stop import stop_all # noqa +from .stop import stop_any # noqa +from .stop import stop_never # noqa +from .stop import stop_when_event_set # noqa + +# Import all built-in wait strategies for easier usage. +from .wait import wait_chain # noqa +from .wait import wait_combine # noqa +from .wait import wait_exponential # noqa +from .wait import wait_fixed # noqa +from .wait import wait_incrementing # noqa +from .wait import wait_none # noqa +from .wait import wait_random # noqa +from .wait import wait_random_exponential # noqa +from .wait import wait_random_exponential as wait_full_jitter # noqa + +# Import all built-in before strategies for easier usage. +from .before import before_log # noqa +from .before import before_nothing # noqa + +# Import all built-in after strategies for easier usage. +from .after import after_log # noqa +from .after import after_nothing # noqa + +# Import all built-in after strategies for easier usage. +from .before_sleep import before_sleep_log # noqa +from .before_sleep import before_sleep_nothing # noqa + + +WrappedFn = t.TypeVar("WrappedFn", bound=t.Callable) + + +@t.overload +def retry(fn): + # type: (WrappedFn) -> WrappedFn + """Type signature for @retry as a raw decorator.""" + pass + + +@t.overload +def retry(*dargs, **dkw): # noqa + # type: (...) -> t.Callable[[WrappedFn], WrappedFn] + """Type signature for the @retry() decorator constructor.""" + pass + + +def retry(*dargs, **dkw): # noqa + """Wrap a function with a new `Retrying` object. + + :param dargs: positional arguments passed to Retrying object + :param dkw: keyword arguments passed to the Retrying object + """ + # support both @retry and @retry() as valid syntax + if len(dargs) == 1 and callable(dargs[0]): + return retry()(dargs[0]) + else: + + def wrap(f): + if isinstance(f, retry_base): + warnings.warn( + ( + "Got retry_base instance ({cls}) as callable argument, " + + "this will probably hang indefinitely (did you mean " + + "retry={cls}(...)?)" + ).format(cls=f.__class__.__name__) + ) + if iscoroutinefunction is not None and iscoroutinefunction(f): + r = AsyncRetrying(*dargs, **dkw) + elif ( + tornado + and hasattr(tornado.gen, "is_coroutine_function") + and tornado.gen.is_coroutine_function(f) + ): + r = TornadoRetrying(*dargs, **dkw) + else: + r = Retrying(*dargs, **dkw) + + return r.wraps(f) + + return wrap + + +class TryAgain(Exception): + """Always retry the executed function when raised.""" + + +NO_RESULT = object() + + +class DoAttempt(object): + pass + + +class DoSleep(float): + pass + + +class BaseAction(object): + """Base class for representing actions to take by retry object. + + Concrete implementations must define: + - __init__: to initialize all necessary fields + - REPR_ATTRS: class variable specifying attributes to include in repr(self) + - NAME: for identification in retry object methods and callbacks + """ + + REPR_FIELDS = () + NAME = None + + def __repr__(self): + state_str = ", ".join( + "%s=%r" % (field, getattr(self, field)) for field in self.REPR_FIELDS + ) + return "%s(%s)" % (type(self).__name__, state_str) + + def __str__(self): + return repr(self) + + +class RetryAction(BaseAction): + REPR_FIELDS = ("sleep",) + NAME = "retry" + + def __init__(self, sleep): + self.sleep = float(sleep) + + +_unset = object() + + +def _first_set(first, second): + return second if first is _unset else first + + +class RetryError(Exception): + """Encapsulates the last attempt instance right before giving up.""" + + def __init__(self, last_attempt): + self.last_attempt = last_attempt + super(RetryError, self).__init__(last_attempt) + + def reraise(self): + if self.last_attempt.failed: + raise self.last_attempt.result() + raise self + + def __str__(self): + return "{0}[{1}]".format(self.__class__.__name__, self.last_attempt) + + +class AttemptManager(object): + """Manage attempt context.""" + + def __init__(self, retry_state): + self.retry_state = retry_state + + def __enter__(self): + pass + + def __exit__(self, exc_type, exc_value, traceback): + if isinstance(exc_value, BaseException): + self.retry_state.set_exception((exc_type, exc_value, traceback)) + return True # Swallow exception. + else: + # We don't have the result, actually. + self.retry_state.set_result(None) + + +class BaseRetrying(object): + __metaclass__ = ABCMeta + + def __init__( + self, + sleep=sleep, + stop=stop_never, + wait=wait_none(), + retry=retry_if_exception_type(), + before=before_nothing, + after=after_nothing, + before_sleep=None, + reraise=False, + retry_error_cls=RetryError, + retry_error_callback=None, + ): + self.sleep = sleep + self.stop = stop + self.wait = wait + self.retry = retry + self.before = before + self.after = after + self.before_sleep = before_sleep + self.reraise = reraise + self._local = threading.local() + self.retry_error_cls = retry_error_cls + self.retry_error_callback = retry_error_callback + + # This attribute was moved to RetryCallState and is deprecated on + # Retrying objects but kept for backward compatibility. + self.fn = None + + def copy( + self, + sleep=_unset, + stop=_unset, + wait=_unset, + retry=_unset, + before=_unset, + after=_unset, + before_sleep=_unset, + reraise=_unset, + retry_error_cls=_unset, + retry_error_callback=_unset, + ): + """Copy this object with some parameters changed if needed.""" + return self.__class__( + sleep=_first_set(sleep, self.sleep), + stop=_first_set(stop, self.stop), + wait=_first_set(wait, self.wait), + retry=_first_set(retry, self.retry), + before=_first_set(before, self.before), + after=_first_set(after, self.after), + before_sleep=_first_set(before_sleep, self.before_sleep), + reraise=_first_set(reraise, self.reraise), + retry_error_cls=_first_set(retry_error_cls, self.retry_error_cls), + retry_error_callback=_first_set( + retry_error_callback, self.retry_error_callback + ), + ) + + def __repr__(self): + attrs = dict( + _utils.visible_attrs(self, attrs={"me": id(self)}), + __class__=self.__class__.__name__, + ) + return ( + "<%(__class__)s object at 0x%(me)x (stop=%(stop)s, " + "wait=%(wait)s, sleep=%(sleep)s, retry=%(retry)s, " + "before=%(before)s, after=%(after)s)>" + ) % (attrs) + + @property + def statistics(self): + """Return a dictionary of runtime statistics. + + This dictionary will be empty when the controller has never been + ran. When it is running or has ran previously it should have (but + may not) have useful and/or informational keys and values when + running is underway and/or completed. + + .. warning:: The keys in this dictionary **should** be some what + stable (not changing), but there existence **may** + change between major releases as new statistics are + gathered or removed so before accessing keys ensure that + they actually exist and handle when they do not. + + .. note:: The values in this dictionary are local to the thread + running call (so if multiple threads share the same retrying + object - either directly or indirectly) they will each have + there own view of statistics they have collected (in the + future we may provide a way to aggregate the various + statistics from each thread). + """ + try: + return self._local.statistics + except AttributeError: + self._local.statistics = {} + return self._local.statistics + + def wraps(self, f): + """Wrap a function for retrying. + + :param f: A function to wraps for retrying. + """ + + @_utils.wraps(f) + def wrapped_f(*args, **kw): + return self(f, *args, **kw) + + def retry_with(*args, **kwargs): + return self.copy(*args, **kwargs).wraps(f) + + wrapped_f.retry = self + wrapped_f.retry_with = retry_with + + return wrapped_f + + def begin(self, fn): + self.statistics.clear() + self.statistics["start_time"] = _utils.now() + self.statistics["attempt_number"] = 1 + self.statistics["idle_for"] = 0 + self.fn = fn + + def iter(self, retry_state): # noqa + fut = retry_state.outcome + if fut is None: + if self.before is not None: + self.before(retry_state) + return DoAttempt() + + is_explicit_retry = retry_state.outcome.failed and isinstance( + retry_state.outcome.exception(), TryAgain + ) + if not (is_explicit_retry or self.retry(retry_state=retry_state)): + return fut.result() + + if self.after is not None: + self.after(retry_state=retry_state) + + self.statistics["delay_since_first_attempt"] = retry_state.seconds_since_start + if self.stop(retry_state=retry_state): + if self.retry_error_callback: + return self.retry_error_callback(retry_state=retry_state) + retry_exc = self.retry_error_cls(fut) + if self.reraise: + raise retry_exc.reraise() + six.raise_from(retry_exc, fut.exception()) + + if self.wait: + sleep = self.wait(retry_state=retry_state) + else: + sleep = 0.0 + retry_state.next_action = RetryAction(sleep) + retry_state.idle_for += sleep + self.statistics["idle_for"] += sleep + self.statistics["attempt_number"] += 1 + + if self.before_sleep is not None: + self.before_sleep(retry_state=retry_state) + + return DoSleep(sleep) + + def __iter__(self): + self.begin(None) + + retry_state = RetryCallState(self, fn=None, args=(), kwargs={}) + while True: + do = self.iter(retry_state=retry_state) + if isinstance(do, DoAttempt): + yield AttemptManager(retry_state=retry_state) + elif isinstance(do, DoSleep): + retry_state.prepare_for_next_attempt() + self.sleep(do) + else: + break + + @abstractmethod + def __call__(self, *args, **kwargs): + pass + + def call(self, *args, **kwargs): + """Use ``__call__`` instead because this method is deprecated.""" + warnings.warn( + "'call()' method is deprecated. " + "Use '__call__()' instead", + DeprecationWarning, + ) + return self.__call__(*args, **kwargs) + + +class Retrying(BaseRetrying): + """Retrying controller.""" + + def __call__(self, fn, *args, **kwargs): + self.begin(fn) + + retry_state = RetryCallState(retry_object=self, fn=fn, args=args, kwargs=kwargs) + while True: + do = self.iter(retry_state=retry_state) + if isinstance(do, DoAttempt): + try: + result = fn(*args, **kwargs) + except BaseException: # noqa: B902 + retry_state.set_exception(sys.exc_info()) + else: + retry_state.set_result(result) + elif isinstance(do, DoSleep): + retry_state.prepare_for_next_attempt() + self.sleep(do) + else: + return do + + +class Future(futures.Future): + """Encapsulates a (future or past) attempted call to a target function.""" + + def __init__(self, attempt_number): + super(Future, self).__init__() + self.attempt_number = attempt_number + + @property + def failed(self): + """Return whether a exception is being held in this future.""" + return self.exception() is not None + + @classmethod + def construct(cls, attempt_number, value, has_exception): + """Construct a new Future object.""" + fut = cls(attempt_number) + if has_exception: + fut.set_exception(value) + else: + fut.set_result(value) + return fut + + +class RetryCallState(object): + """State related to a single call wrapped with Retrying.""" + + def __init__(self, retry_object, fn, args, kwargs): + #: Retry call start timestamp + self.start_time = _utils.now() + #: Retry manager object + self.retry_object = retry_object + #: Function wrapped by this retry call + self.fn = fn + #: Arguments of the function wrapped by this retry call + self.args = args + #: Keyword arguments of the function wrapped by this retry call + self.kwargs = kwargs + + #: The number of the current attempt + self.attempt_number = 1 + #: Last outcome (result or exception) produced by the function + self.outcome = None + #: Timestamp of the last outcome + self.outcome_timestamp = None + #: Time spent sleeping in retries + self.idle_for = 0 + #: Next action as decided by the retry manager + self.next_action = None + + @property + def seconds_since_start(self): + if self.outcome_timestamp is None: + return None + return self.outcome_timestamp - self.start_time + + def prepare_for_next_attempt(self): + self.outcome = None + self.outcome_timestamp = None + self.attempt_number += 1 + self.next_action = None + + def set_result(self, val): + ts = _utils.now() + fut = Future(self.attempt_number) + fut.set_result(val) + self.outcome, self.outcome_timestamp = fut, ts + + def set_exception(self, exc_info): + ts = _utils.now() + fut = Future(self.attempt_number) + _utils.capture(fut, exc_info) + self.outcome, self.outcome_timestamp = fut, ts + + +if iscoroutinefunction: + from pip._vendor.tenacity._asyncio import AsyncRetrying + +if tornado: + from pip._vendor.tenacity.tornadoweb import TornadoRetrying diff --git a/venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..57593ca1 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/_asyncio.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/_asyncio.cpython-36.pyc new file mode 100644 index 00000000..36b79c01 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/_asyncio.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/_utils.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/_utils.cpython-36.pyc new file mode 100644 index 00000000..30683777 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/_utils.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/after.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/after.cpython-36.pyc new file mode 100644 index 00000000..8a221ce6 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/after.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/before.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/before.cpython-36.pyc new file mode 100644 index 00000000..7a171ea6 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/before.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/before_sleep.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/before_sleep.cpython-36.pyc new file mode 100644 index 00000000..edae21a1 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/before_sleep.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/compat.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/compat.cpython-36.pyc new file mode 100644 index 00000000..006af6dd Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/compat.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/nap.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/nap.cpython-36.pyc new file mode 100644 index 00000000..636e4050 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/nap.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/retry.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/retry.cpython-36.pyc new file mode 100644 index 00000000..3075a8f2 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/retry.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/stop.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/stop.cpython-36.pyc new file mode 100644 index 00000000..2f10f45a Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/stop.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/tornadoweb.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/tornadoweb.cpython-36.pyc new file mode 100644 index 00000000..c47d4dee Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/tornadoweb.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/wait.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/wait.cpython-36.pyc new file mode 100644 index 00000000..c1aa9511 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/tenacity/__pycache__/wait.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/tenacity/_asyncio.py b/venv/Lib/site-packages/pip/_vendor/tenacity/_asyncio.py new file mode 100644 index 00000000..d9a2d463 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/tenacity/_asyncio.py @@ -0,0 +1,81 @@ +# -*- coding: utf-8 -*- +# Copyright 2016 Étienne Bersac +# Copyright 2016 Julien Danjou +# Copyright 2016 Joshua Harlow +# Copyright 2013-2014 Ray Holder +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import sys +from asyncio import sleep + +from pip._vendor.tenacity import AttemptManager +from pip._vendor.tenacity import BaseRetrying +from pip._vendor.tenacity import DoAttempt +from pip._vendor.tenacity import DoSleep +from pip._vendor.tenacity import RetryCallState + + +class AsyncRetrying(BaseRetrying): + def __init__(self, sleep=sleep, **kwargs): + super(AsyncRetrying, self).__init__(**kwargs) + self.sleep = sleep + + async def __call__(self, fn, *args, **kwargs): + self.begin(fn) + + retry_state = RetryCallState(retry_object=self, fn=fn, args=args, kwargs=kwargs) + while True: + do = self.iter(retry_state=retry_state) + if isinstance(do, DoAttempt): + try: + result = await fn(*args, **kwargs) + except BaseException: # noqa: B902 + retry_state.set_exception(sys.exc_info()) + else: + retry_state.set_result(result) + elif isinstance(do, DoSleep): + retry_state.prepare_for_next_attempt() + await self.sleep(do) + else: + return do + + def __aiter__(self): + self.begin(None) + self._retry_state = RetryCallState(self, fn=None, args=(), kwargs={}) + return self + + async def __anext__(self): + while True: + do = self.iter(retry_state=self._retry_state) + if do is None: + raise StopAsyncIteration + elif isinstance(do, DoAttempt): + return AttemptManager(retry_state=self._retry_state) + elif isinstance(do, DoSleep): + self._retry_state.prepare_for_next_attempt() + await self.sleep(do) + else: + return do + + def wraps(self, fn): + fn = super().wraps(fn) + # Ensure wrapper is recognized as a coroutine function. + + async def async_wrapped(*args, **kwargs): + return await fn(*args, **kwargs) + + # Preserve attributes + async_wrapped.retry = fn.retry + async_wrapped.retry_with = fn.retry_with + + return async_wrapped diff --git a/venv/Lib/site-packages/pip/_vendor/tenacity/_utils.py b/venv/Lib/site-packages/pip/_vendor/tenacity/_utils.py new file mode 100644 index 00000000..8c0ca788 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/tenacity/_utils.py @@ -0,0 +1,159 @@ +# Copyright 2016 Julien Danjou +# Copyright 2016 Joshua Harlow +# Copyright 2013-2014 Ray Holder +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import inspect +import sys +import time +from functools import update_wrapper + +from pip._vendor import six + +# sys.maxint / 2, since Python 3.2 doesn't have a sys.maxint... +try: + MAX_WAIT = sys.maxint / 2 +except AttributeError: + MAX_WAIT = 1073741823 + + +if six.PY2: + from functools import WRAPPER_ASSIGNMENTS, WRAPPER_UPDATES + + def wraps(fn): + """Do the same as six.wraps but only copy attributes that exist. + + For example, object instances don't have __name__ attribute, so + six.wraps fails. This is fixed in Python 3 + (https://bugs.python.org/issue3445), but didn't get backported to six. + + Also, see https://github.com/benjaminp/six/issues/250. + """ + + def filter_hasattr(obj, attrs): + return tuple(a for a in attrs if hasattr(obj, a)) + + return six.wraps( + fn, + assigned=filter_hasattr(fn, WRAPPER_ASSIGNMENTS), + updated=filter_hasattr(fn, WRAPPER_UPDATES), + ) + + def capture(fut, tb): + # TODO(harlowja): delete this in future, since its + # has to repeatedly calculate this crap. + fut.set_exception_info(tb[1], tb[2]) + + def getargspec(func): + # This was deprecated in Python 3. + return inspect.getargspec(func) + + +else: + from functools import wraps # noqa + + def capture(fut, tb): + fut.set_exception(tb[1]) + + def getargspec(func): + return inspect.getfullargspec(func) + + +def visible_attrs(obj, attrs=None): + if attrs is None: + attrs = {} + for attr_name, attr in inspect.getmembers(obj): + if attr_name.startswith("_"): + continue + attrs[attr_name] = attr + return attrs + + +def find_ordinal(pos_num): + # See: https://en.wikipedia.org/wiki/English_numerals#Ordinal_numbers + if pos_num == 0: + return "th" + elif pos_num == 1: + return "st" + elif pos_num == 2: + return "nd" + elif pos_num == 3: + return "rd" + elif pos_num >= 4 and pos_num <= 20: + return "th" + else: + return find_ordinal(pos_num % 10) + + +def to_ordinal(pos_num): + return "%i%s" % (pos_num, find_ordinal(pos_num)) + + +def get_callback_name(cb): + """Get a callback fully-qualified name. + + If no name can be produced ``repr(cb)`` is called and returned. + """ + segments = [] + try: + segments.append(cb.__qualname__) + except AttributeError: + try: + segments.append(cb.__name__) + if inspect.ismethod(cb): + try: + # This attribute doesn't exist on py3.x or newer, so + # we optionally ignore it... (on those versions of + # python `__qualname__` should have been found anyway). + segments.insert(0, cb.im_class.__name__) + except AttributeError: + pass + except AttributeError: + pass + if not segments: + return repr(cb) + else: + try: + # When running under sphinx it appears this can be none? + if cb.__module__: + segments.insert(0, cb.__module__) + except AttributeError: + pass + return ".".join(segments) + + +try: + now = time.monotonic # noqa +except AttributeError: + from monotonic import monotonic as now # noqa + + +class cached_property(object): + """A property that is computed once per instance. + + Upon being computed it replaces itself with an ordinary attribute. Deleting + the attribute resets the property. + + Source: https://github.com/bottlepy/bottle/blob/1de24157e74a6971d136550afe1b63eec5b0df2b/bottle.py#L234-L246 + """ # noqa: E501 + + def __init__(self, func): + update_wrapper(self, func) + self.func = func + + def __get__(self, obj, cls): + if obj is None: + return self + value = obj.__dict__[self.func.__name__] = self.func(obj) + return value diff --git a/venv/Lib/site-packages/pip/_vendor/tenacity/after.py b/venv/Lib/site-packages/pip/_vendor/tenacity/after.py new file mode 100644 index 00000000..c04e7c18 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/tenacity/after.py @@ -0,0 +1,40 @@ +# Copyright 2016 Julien Danjou +# Copyright 2016 Joshua Harlow +# Copyright 2013-2014 Ray Holder +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from pip._vendor.tenacity import _utils + + +def after_nothing(retry_state): + """After call strategy that does nothing.""" + + +def after_log(logger, log_level, sec_format="%0.3f"): + """After call strategy that logs to some logger the finished attempt.""" + log_tpl = ( + "Finished call to '%s' after " + str(sec_format) + "(s), " + "this was the %s time calling it." + ) + + def log_it(retry_state): + logger.log( + log_level, + log_tpl, + _utils.get_callback_name(retry_state.fn), + retry_state.seconds_since_start, + _utils.to_ordinal(retry_state.attempt_number), + ) + + return log_it diff --git a/venv/Lib/site-packages/pip/_vendor/tenacity/before.py b/venv/Lib/site-packages/pip/_vendor/tenacity/before.py new file mode 100644 index 00000000..3229517d --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/tenacity/before.py @@ -0,0 +1,35 @@ +# Copyright 2016 Julien Danjou +# Copyright 2016 Joshua Harlow +# Copyright 2013-2014 Ray Holder +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from pip._vendor.tenacity import _utils + + +def before_nothing(retry_state): + """Before call strategy that does nothing.""" + + +def before_log(logger, log_level): + """Before call strategy that logs to some logger the attempt.""" + + def log_it(retry_state): + logger.log( + log_level, + "Starting call to '%s', this is the %s time calling it.", + _utils.get_callback_name(retry_state.fn), + _utils.to_ordinal(retry_state.attempt_number), + ) + + return log_it diff --git a/venv/Lib/site-packages/pip/_vendor/tenacity/before_sleep.py b/venv/Lib/site-packages/pip/_vendor/tenacity/before_sleep.py new file mode 100644 index 00000000..a051acac --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/tenacity/before_sleep.py @@ -0,0 +1,51 @@ +# Copyright 2016 Julien Danjou +# Copyright 2016 Joshua Harlow +# Copyright 2013-2014 Ray Holder +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from pip._vendor.tenacity import _utils +from pip._vendor.tenacity.compat import get_exc_info_from_future + + +def before_sleep_nothing(retry_state): + """Before call strategy that does nothing.""" + + +def before_sleep_log(logger, log_level, exc_info=False): + """Before call strategy that logs to some logger the attempt.""" + + def log_it(retry_state): + if retry_state.outcome.failed: + ex = retry_state.outcome.exception() + verb, value = "raised", "%s: %s" % (type(ex).__name__, ex) + + if exc_info: + local_exc_info = get_exc_info_from_future(retry_state.outcome) + else: + local_exc_info = False + else: + verb, value = "returned", retry_state.outcome.result() + local_exc_info = False # exc_info does not apply when no exception + + logger.log( + log_level, + "Retrying %s in %s seconds as it %s %s.", + _utils.get_callback_name(retry_state.fn), + getattr(retry_state.next_action, "sleep"), + verb, + value, + exc_info=local_exc_info, + ) + + return log_it diff --git a/venv/Lib/site-packages/pip/_vendor/tenacity/compat.py b/venv/Lib/site-packages/pip/_vendor/tenacity/compat.py new file mode 100644 index 00000000..ce4796b1 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/tenacity/compat.py @@ -0,0 +1,23 @@ +"""Utilities for providing backward compatibility.""" +from pip._vendor import six + + +def get_exc_info_from_future(future): + """ + Get an exc_info value from a Future. + + Given a a Future instance, retrieve an exc_info value suitable for passing + in as the exc_info parameter to logging.Logger.log() and related methods. + + On Python 2, this will be a (type, value, traceback) triple. + On Python 3, this will be an exception instance (with embedded traceback). + + If there was no exception, None is returned on both versions of Python. + """ + if six.PY3: + return future.exception() + else: + ex, tb = future.exception_info() + if ex is None: + return None + return type(ex), ex, tb diff --git a/venv/Lib/site-packages/pip/_vendor/tenacity/nap.py b/venv/Lib/site-packages/pip/_vendor/tenacity/nap.py new file mode 100644 index 00000000..83ff839c --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/tenacity/nap.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- +# Copyright 2016 Étienne Bersac +# Copyright 2016 Julien Danjou +# Copyright 2016 Joshua Harlow +# Copyright 2013-2014 Ray Holder +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time + + +def sleep(seconds): + """ + Sleep strategy that delays execution for a given number of seconds. + + This is the default strategy, and may be mocked out for unit testing. + """ + time.sleep(seconds) + + +class sleep_using_event(object): + """Sleep strategy that waits on an event to be set.""" + + def __init__(self, event): + self.event = event + + def __call__(self, timeout): + # NOTE(harlowja): this may *not* actually wait for timeout + # seconds if the event is set (ie this may eject out early). + self.event.wait(timeout=timeout) diff --git a/venv/Lib/site-packages/pip/_vendor/tenacity/retry.py b/venv/Lib/site-packages/pip/_vendor/tenacity/retry.py new file mode 100644 index 00000000..ddaf8e7f --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/tenacity/retry.py @@ -0,0 +1,192 @@ +# -*- encoding: utf-8 -*- +# +# Copyright 2016–2021 Julien Danjou +# Copyright 2016 Joshua Harlow +# Copyright 2013-2014 Ray Holder +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import abc +import re + +from pip._vendor import six + + +@six.add_metaclass(abc.ABCMeta) +class retry_base(object): + """Abstract base class for retry strategies.""" + + @abc.abstractmethod + def __call__(self, retry_state): + pass + + def __and__(self, other): + return retry_all(self, other) + + def __or__(self, other): + return retry_any(self, other) + + +class _retry_never(retry_base): + """Retry strategy that never rejects any result.""" + + def __call__(self, retry_state): + return False + + +retry_never = _retry_never() + + +class _retry_always(retry_base): + """Retry strategy that always rejects any result.""" + + def __call__(self, retry_state): + return True + + +retry_always = _retry_always() + + +class retry_if_exception(retry_base): + """Retry strategy that retries if an exception verifies a predicate.""" + + def __init__(self, predicate): + self.predicate = predicate + + def __call__(self, retry_state): + if retry_state.outcome.failed: + return self.predicate(retry_state.outcome.exception()) + else: + return False + + +class retry_if_exception_type(retry_if_exception): + """Retries if an exception has been raised of one or more types.""" + + def __init__(self, exception_types=Exception): + self.exception_types = exception_types + super(retry_if_exception_type, self).__init__( + lambda e: isinstance(e, exception_types) + ) + + +class retry_unless_exception_type(retry_if_exception): + """Retries until an exception is raised of one or more types.""" + + def __init__(self, exception_types=Exception): + self.exception_types = exception_types + super(retry_unless_exception_type, self).__init__( + lambda e: not isinstance(e, exception_types) + ) + + def __call__(self, retry_state): + # always retry if no exception was raised + if not retry_state.outcome.failed: + return True + return self.predicate(retry_state.outcome.exception()) + + +class retry_if_result(retry_base): + """Retries if the result verifies a predicate.""" + + def __init__(self, predicate): + self.predicate = predicate + + def __call__(self, retry_state): + if not retry_state.outcome.failed: + return self.predicate(retry_state.outcome.result()) + else: + return False + + +class retry_if_not_result(retry_base): + """Retries if the result refutes a predicate.""" + + def __init__(self, predicate): + self.predicate = predicate + + def __call__(self, retry_state): + if not retry_state.outcome.failed: + return not self.predicate(retry_state.outcome.result()) + else: + return False + + +class retry_if_exception_message(retry_if_exception): + """Retries if an exception message equals or matches.""" + + def __init__(self, message=None, match=None): + if message and match: + raise TypeError( + "{}() takes either 'message' or 'match', not both".format( + self.__class__.__name__ + ) + ) + + # set predicate + if message: + + def message_fnc(exception): + return message == str(exception) + + predicate = message_fnc + elif match: + prog = re.compile(match) + + def match_fnc(exception): + return prog.match(str(exception)) + + predicate = match_fnc + else: + raise TypeError( + "{}() missing 1 required argument 'message' or 'match'".format( + self.__class__.__name__ + ) + ) + + super(retry_if_exception_message, self).__init__(predicate) + + +class retry_if_not_exception_message(retry_if_exception_message): + """Retries until an exception message equals or matches.""" + + def __init__(self, *args, **kwargs): + super(retry_if_not_exception_message, self).__init__(*args, **kwargs) + # invert predicate + if_predicate = self.predicate + self.predicate = lambda *args_, **kwargs_: not if_predicate(*args_, **kwargs_) + + def __call__(self, retry_state): + if not retry_state.outcome.failed: + return True + return self.predicate(retry_state.outcome.exception()) + + +class retry_any(retry_base): + """Retries if any of the retries condition is valid.""" + + def __init__(self, *retries): + self.retries = retries + + def __call__(self, retry_state): + return any(r(retry_state) for r in self.retries) + + +class retry_all(retry_base): + """Retries if all the retries condition are valid.""" + + def __init__(self, *retries): + self.retries = retries + + def __call__(self, retry_state): + return all(r(retry_state) for r in self.retries) diff --git a/venv/Lib/site-packages/pip/_vendor/tenacity/stop.py b/venv/Lib/site-packages/pip/_vendor/tenacity/stop.py new file mode 100644 index 00000000..4db27f14 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/tenacity/stop.py @@ -0,0 +1,95 @@ +# -*- encoding: utf-8 -*- +# +# Copyright 2016–2021 Julien Danjou +# Copyright 2016 Joshua Harlow +# Copyright 2013-2014 Ray Holder +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import abc + +from pip._vendor import six + + +@six.add_metaclass(abc.ABCMeta) +class stop_base(object): + """Abstract base class for stop strategies.""" + + @abc.abstractmethod + def __call__(self, retry_state): + pass + + def __and__(self, other): + return stop_all(self, other) + + def __or__(self, other): + return stop_any(self, other) + + +class stop_any(stop_base): + """Stop if any of the stop condition is valid.""" + + def __init__(self, *stops): + self.stops = stops + + def __call__(self, retry_state): + return any(x(retry_state) for x in self.stops) + + +class stop_all(stop_base): + """Stop if all the stop conditions are valid.""" + + def __init__(self, *stops): + self.stops = stops + + def __call__(self, retry_state): + return all(x(retry_state) for x in self.stops) + + +class _stop_never(stop_base): + """Never stop.""" + + def __call__(self, retry_state): + return False + + +stop_never = _stop_never() + + +class stop_when_event_set(stop_base): + """Stop when the given event is set.""" + + def __init__(self, event): + self.event = event + + def __call__(self, retry_state): + return self.event.is_set() + + +class stop_after_attempt(stop_base): + """Stop when the previous attempt >= max_attempt.""" + + def __init__(self, max_attempt_number): + self.max_attempt_number = max_attempt_number + + def __call__(self, retry_state): + return retry_state.attempt_number >= self.max_attempt_number + + +class stop_after_delay(stop_base): + """Stop when the time from the first attempt >= limit.""" + + def __init__(self, max_delay): + self.max_delay = max_delay + + def __call__(self, retry_state): + return retry_state.seconds_since_start >= self.max_delay diff --git a/venv/Lib/site-packages/pip/_vendor/tenacity/tornadoweb.py b/venv/Lib/site-packages/pip/_vendor/tenacity/tornadoweb.py new file mode 100644 index 00000000..dbf9f762 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/tenacity/tornadoweb.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2017 Elisey Zanko +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +from pip._vendor.tenacity import BaseRetrying +from pip._vendor.tenacity import DoAttempt +from pip._vendor.tenacity import DoSleep +from pip._vendor.tenacity import RetryCallState + +from tornado import gen + + +class TornadoRetrying(BaseRetrying): + def __init__(self, sleep=gen.sleep, **kwargs): + super(TornadoRetrying, self).__init__(**kwargs) + self.sleep = sleep + + @gen.coroutine + def __call__(self, fn, *args, **kwargs): + self.begin(fn) + + retry_state = RetryCallState(retry_object=self, fn=fn, args=args, kwargs=kwargs) + while True: + do = self.iter(retry_state=retry_state) + if isinstance(do, DoAttempt): + try: + result = yield fn(*args, **kwargs) + except BaseException: # noqa: B902 + retry_state.set_exception(sys.exc_info()) + else: + retry_state.set_result(result) + elif isinstance(do, DoSleep): + retry_state.prepare_for_next_attempt() + yield self.sleep(do) + else: + raise gen.Return(do) diff --git a/venv/Lib/site-packages/pip/_vendor/tenacity/wait.py b/venv/Lib/site-packages/pip/_vendor/tenacity/wait.py new file mode 100644 index 00000000..625b0e36 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/tenacity/wait.py @@ -0,0 +1,183 @@ +# -*- encoding: utf-8 -*- +# +# Copyright 2016–2021 Julien Danjou +# Copyright 2016 Joshua Harlow +# Copyright 2013-2014 Ray Holder +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import abc +import random + +from pip._vendor import six + +from pip._vendor.tenacity import _utils + + +@six.add_metaclass(abc.ABCMeta) +class wait_base(object): + """Abstract base class for wait strategies.""" + + @abc.abstractmethod + def __call__(self, retry_state): + pass + + def __add__(self, other): + return wait_combine(self, other) + + def __radd__(self, other): + # make it possible to use multiple waits with the built-in sum function + if other == 0: + return self + return self.__add__(other) + + +class wait_fixed(wait_base): + """Wait strategy that waits a fixed amount of time between each retry.""" + + def __init__(self, wait): + self.wait_fixed = wait + + def __call__(self, retry_state): + return self.wait_fixed + + +class wait_none(wait_fixed): + """Wait strategy that doesn't wait at all before retrying.""" + + def __init__(self): + super(wait_none, self).__init__(0) + + +class wait_random(wait_base): + """Wait strategy that waits a random amount of time between min/max.""" + + def __init__(self, min=0, max=1): # noqa + self.wait_random_min = min + self.wait_random_max = max + + def __call__(self, retry_state): + return self.wait_random_min + ( + random.random() * (self.wait_random_max - self.wait_random_min) + ) + + +class wait_combine(wait_base): + """Combine several waiting strategies.""" + + def __init__(self, *strategies): + self.wait_funcs = strategies + + def __call__(self, retry_state): + return sum(x(retry_state=retry_state) for x in self.wait_funcs) + + +class wait_chain(wait_base): + """Chain two or more waiting strategies. + + If all strategies are exhausted, the very last strategy is used + thereafter. + + For example:: + + @retry(wait=wait_chain(*[wait_fixed(1) for i in range(3)] + + [wait_fixed(2) for j in range(5)] + + [wait_fixed(5) for k in range(4))) + def wait_chained(): + print("Wait 1s for 3 attempts, 2s for 5 attempts and 5s + thereafter.") + """ + + def __init__(self, *strategies): + self.strategies = strategies + + def __call__(self, retry_state): + wait_func_no = min(max(retry_state.attempt_number, 1), len(self.strategies)) + wait_func = self.strategies[wait_func_no - 1] + return wait_func(retry_state=retry_state) + + +class wait_incrementing(wait_base): + """Wait an incremental amount of time after each attempt. + + Starting at a starting value and incrementing by a value for each attempt + (and restricting the upper limit to some maximum value). + """ + + def __init__(self, start=0, increment=100, max=_utils.MAX_WAIT): # noqa + self.start = start + self.increment = increment + self.max = max + + def __call__(self, retry_state): + result = self.start + (self.increment * (retry_state.attempt_number - 1)) + return max(0, min(result, self.max)) + + +class wait_exponential(wait_base): + """Wait strategy that applies exponential backoff. + + It allows for a customized multiplier and an ability to restrict the + upper and lower limits to some maximum and minimum value. + + The intervals are fixed (i.e. there is no jitter), so this strategy is + suitable for balancing retries against latency when a required resource is + unavailable for an unknown duration, but *not* suitable for resolving + contention between multiple processes for a shared resource. Use + wait_random_exponential for the latter case. + """ + + def __init__(self, multiplier=1, max=_utils.MAX_WAIT, exp_base=2, min=0): # noqa + self.multiplier = multiplier + self.min = min + self.max = max + self.exp_base = exp_base + + def __call__(self, retry_state): + try: + exp = self.exp_base ** (retry_state.attempt_number - 1) + result = self.multiplier * exp + except OverflowError: + return self.max + return max(max(0, self.min), min(result, self.max)) + + +class wait_random_exponential(wait_exponential): + """Random wait with exponentially widening window. + + An exponential backoff strategy used to mediate contention between multiple + uncoordinated processes for a shared resource in distributed systems. This + is the sense in which "exponential backoff" is meant in e.g. Ethernet + networking, and corresponds to the "Full Jitter" algorithm described in + this blog post: + + https://aws.amazon.com/blogs/architecture/exponential-backoff-and-jitter/ + + Each retry occurs at a random time in a geometrically expanding interval. + It allows for a custom multiplier and an ability to restrict the upper + limit of the random interval to some maximum value. + + Example:: + + wait_random_exponential(multiplier=0.5, # initial window 0.5s + max=60) # max 60s timeout + + When waiting for an unavailable resource to become available again, as + opposed to trying to resolve contention for a shared resource, the + wait_exponential strategy (which uses a fixed interval) may be preferable. + + """ + + def __call__(self, retry_state): + high = super(wait_random_exponential, self).__call__(retry_state=retry_state) + return random.uniform(0, high) diff --git a/venv/Lib/site-packages/pip/_vendor/toml/__init__.py b/venv/Lib/site-packages/pip/_vendor/toml/__init__.py new file mode 100644 index 00000000..34a5eabb --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/toml/__init__.py @@ -0,0 +1,25 @@ +"""Python module which parses and emits TOML. + +Released under the MIT license. +""" + +from pip._vendor.toml import encoder +from pip._vendor.toml import decoder + +__version__ = "0.10.2" +_spec_ = "0.5.0" + +load = decoder.load +loads = decoder.loads +TomlDecoder = decoder.TomlDecoder +TomlDecodeError = decoder.TomlDecodeError +TomlPreserveCommentDecoder = decoder.TomlPreserveCommentDecoder + +dump = encoder.dump +dumps = encoder.dumps +TomlEncoder = encoder.TomlEncoder +TomlArraySeparatorEncoder = encoder.TomlArraySeparatorEncoder +TomlPreserveInlineDictEncoder = encoder.TomlPreserveInlineDictEncoder +TomlNumpyEncoder = encoder.TomlNumpyEncoder +TomlPreserveCommentEncoder = encoder.TomlPreserveCommentEncoder +TomlPathlibEncoder = encoder.TomlPathlibEncoder diff --git a/venv/Lib/site-packages/pip/_vendor/toml/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/toml/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..f6b2ce61 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/toml/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/toml/__pycache__/decoder.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/toml/__pycache__/decoder.cpython-36.pyc new file mode 100644 index 00000000..ef58ef85 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/toml/__pycache__/decoder.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/toml/__pycache__/encoder.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/toml/__pycache__/encoder.cpython-36.pyc new file mode 100644 index 00000000..8b9248e8 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/toml/__pycache__/encoder.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/toml/__pycache__/ordered.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/toml/__pycache__/ordered.cpython-36.pyc new file mode 100644 index 00000000..aa11efb2 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/toml/__pycache__/ordered.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/toml/__pycache__/tz.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/toml/__pycache__/tz.cpython-36.pyc new file mode 100644 index 00000000..45b46e50 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/toml/__pycache__/tz.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/toml/decoder.py b/venv/Lib/site-packages/pip/_vendor/toml/decoder.py new file mode 100644 index 00000000..e071100d --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/toml/decoder.py @@ -0,0 +1,1057 @@ +import datetime +import io +from os import linesep +import re +import sys + +from pip._vendor.toml.tz import TomlTz + +if sys.version_info < (3,): + _range = xrange # noqa: F821 +else: + unicode = str + _range = range + basestring = str + unichr = chr + + +def _detect_pathlib_path(p): + if (3, 4) <= sys.version_info: + import pathlib + if isinstance(p, pathlib.PurePath): + return True + return False + + +def _ispath(p): + if isinstance(p, (bytes, basestring)): + return True + return _detect_pathlib_path(p) + + +def _getpath(p): + if (3, 6) <= sys.version_info: + import os + return os.fspath(p) + if _detect_pathlib_path(p): + return str(p) + return p + + +try: + FNFError = FileNotFoundError +except NameError: + FNFError = IOError + + +TIME_RE = re.compile(r"([0-9]{2}):([0-9]{2}):([0-9]{2})(\.([0-9]{3,6}))?") + + +class TomlDecodeError(ValueError): + """Base toml Exception / Error.""" + + def __init__(self, msg, doc, pos): + lineno = doc.count('\n', 0, pos) + 1 + colno = pos - doc.rfind('\n', 0, pos) + emsg = '{} (line {} column {} char {})'.format(msg, lineno, colno, pos) + ValueError.__init__(self, emsg) + self.msg = msg + self.doc = doc + self.pos = pos + self.lineno = lineno + self.colno = colno + + +# Matches a TOML number, which allows underscores for readability +_number_with_underscores = re.compile('([0-9])(_([0-9]))*') + + +class CommentValue(object): + def __init__(self, val, comment, beginline, _dict): + self.val = val + separator = "\n" if beginline else " " + self.comment = separator + comment + self._dict = _dict + + def __getitem__(self, key): + return self.val[key] + + def __setitem__(self, key, value): + self.val[key] = value + + def dump(self, dump_value_func): + retstr = dump_value_func(self.val) + if isinstance(self.val, self._dict): + return self.comment + "\n" + unicode(retstr) + else: + return unicode(retstr) + self.comment + + +def _strictly_valid_num(n): + n = n.strip() + if not n: + return False + if n[0] == '_': + return False + if n[-1] == '_': + return False + if "_." in n or "._" in n: + return False + if len(n) == 1: + return True + if n[0] == '0' and n[1] not in ['.', 'o', 'b', 'x']: + return False + if n[0] == '+' or n[0] == '-': + n = n[1:] + if len(n) > 1 and n[0] == '0' and n[1] != '.': + return False + if '__' in n: + return False + return True + + +def load(f, _dict=dict, decoder=None): + """Parses named file or files as toml and returns a dictionary + + Args: + f: Path to the file to open, array of files to read into single dict + or a file descriptor + _dict: (optional) Specifies the class of the returned toml dictionary + decoder: The decoder to use + + Returns: + Parsed toml file represented as a dictionary + + Raises: + TypeError -- When f is invalid type + TomlDecodeError: Error while decoding toml + IOError / FileNotFoundError -- When an array with no valid (existing) + (Python 2 / Python 3) file paths is passed + """ + + if _ispath(f): + with io.open(_getpath(f), encoding='utf-8') as ffile: + return loads(ffile.read(), _dict, decoder) + elif isinstance(f, list): + from os import path as op + from warnings import warn + if not [path for path in f if op.exists(path)]: + error_msg = "Load expects a list to contain filenames only." + error_msg += linesep + error_msg += ("The list needs to contain the path of at least one " + "existing file.") + raise FNFError(error_msg) + if decoder is None: + decoder = TomlDecoder(_dict) + d = decoder.get_empty_table() + for l in f: # noqa: E741 + if op.exists(l): + d.update(load(l, _dict, decoder)) + else: + warn("Non-existent filename in list with at least one valid " + "filename") + return d + else: + try: + return loads(f.read(), _dict, decoder) + except AttributeError: + raise TypeError("You can only load a file descriptor, filename or " + "list") + + +_groupname_re = re.compile(r'^[A-Za-z0-9_-]+$') + + +def loads(s, _dict=dict, decoder=None): + """Parses string as toml + + Args: + s: String to be parsed + _dict: (optional) Specifies the class of the returned toml dictionary + + Returns: + Parsed toml file represented as a dictionary + + Raises: + TypeError: When a non-string is passed + TomlDecodeError: Error while decoding toml + """ + + implicitgroups = [] + if decoder is None: + decoder = TomlDecoder(_dict) + retval = decoder.get_empty_table() + currentlevel = retval + if not isinstance(s, basestring): + raise TypeError("Expecting something like a string") + + if not isinstance(s, unicode): + s = s.decode('utf8') + + original = s + sl = list(s) + openarr = 0 + openstring = False + openstrchar = "" + multilinestr = False + arrayoftables = False + beginline = True + keygroup = False + dottedkey = False + keyname = 0 + key = '' + prev_key = '' + line_no = 1 + + for i, item in enumerate(sl): + if item == '\r' and sl[i + 1] == '\n': + sl[i] = ' ' + continue + if keyname: + key += item + if item == '\n': + raise TomlDecodeError("Key name found without value." + " Reached end of line.", original, i) + if openstring: + if item == openstrchar: + oddbackslash = False + k = 1 + while i >= k and sl[i - k] == '\\': + oddbackslash = not oddbackslash + k += 1 + if not oddbackslash: + keyname = 2 + openstring = False + openstrchar = "" + continue + elif keyname == 1: + if item.isspace(): + keyname = 2 + continue + elif item == '.': + dottedkey = True + continue + elif item.isalnum() or item == '_' or item == '-': + continue + elif (dottedkey and sl[i - 1] == '.' and + (item == '"' or item == "'")): + openstring = True + openstrchar = item + continue + elif keyname == 2: + if item.isspace(): + if dottedkey: + nextitem = sl[i + 1] + if not nextitem.isspace() and nextitem != '.': + keyname = 1 + continue + if item == '.': + dottedkey = True + nextitem = sl[i + 1] + if not nextitem.isspace() and nextitem != '.': + keyname = 1 + continue + if item == '=': + keyname = 0 + prev_key = key[:-1].rstrip() + key = '' + dottedkey = False + else: + raise TomlDecodeError("Found invalid character in key name: '" + + item + "'. Try quoting the key name.", + original, i) + if item == "'" and openstrchar != '"': + k = 1 + try: + while sl[i - k] == "'": + k += 1 + if k == 3: + break + except IndexError: + pass + if k == 3: + multilinestr = not multilinestr + openstring = multilinestr + else: + openstring = not openstring + if openstring: + openstrchar = "'" + else: + openstrchar = "" + if item == '"' and openstrchar != "'": + oddbackslash = False + k = 1 + tripquote = False + try: + while sl[i - k] == '"': + k += 1 + if k == 3: + tripquote = True + break + if k == 1 or (k == 3 and tripquote): + while sl[i - k] == '\\': + oddbackslash = not oddbackslash + k += 1 + except IndexError: + pass + if not oddbackslash: + if tripquote: + multilinestr = not multilinestr + openstring = multilinestr + else: + openstring = not openstring + if openstring: + openstrchar = '"' + else: + openstrchar = "" + if item == '#' and (not openstring and not keygroup and + not arrayoftables): + j = i + comment = "" + try: + while sl[j] != '\n': + comment += s[j] + sl[j] = ' ' + j += 1 + except IndexError: + break + if not openarr: + decoder.preserve_comment(line_no, prev_key, comment, beginline) + if item == '[' and (not openstring and not keygroup and + not arrayoftables): + if beginline: + if len(sl) > i + 1 and sl[i + 1] == '[': + arrayoftables = True + else: + keygroup = True + else: + openarr += 1 + if item == ']' and not openstring: + if keygroup: + keygroup = False + elif arrayoftables: + if sl[i - 1] == ']': + arrayoftables = False + else: + openarr -= 1 + if item == '\n': + if openstring or multilinestr: + if not multilinestr: + raise TomlDecodeError("Unbalanced quotes", original, i) + if ((sl[i - 1] == "'" or sl[i - 1] == '"') and ( + sl[i - 2] == sl[i - 1])): + sl[i] = sl[i - 1] + if sl[i - 3] == sl[i - 1]: + sl[i - 3] = ' ' + elif openarr: + sl[i] = ' ' + else: + beginline = True + line_no += 1 + elif beginline and sl[i] != ' ' and sl[i] != '\t': + beginline = False + if not keygroup and not arrayoftables: + if sl[i] == '=': + raise TomlDecodeError("Found empty keyname. ", original, i) + keyname = 1 + key += item + if keyname: + raise TomlDecodeError("Key name found without value." + " Reached end of file.", original, len(s)) + if openstring: # reached EOF and have an unterminated string + raise TomlDecodeError("Unterminated string found." + " Reached end of file.", original, len(s)) + s = ''.join(sl) + s = s.split('\n') + multikey = None + multilinestr = "" + multibackslash = False + pos = 0 + for idx, line in enumerate(s): + if idx > 0: + pos += len(s[idx - 1]) + 1 + + decoder.embed_comments(idx, currentlevel) + + if not multilinestr or multibackslash or '\n' not in multilinestr: + line = line.strip() + if line == "" and (not multikey or multibackslash): + continue + if multikey: + if multibackslash: + multilinestr += line + else: + multilinestr += line + multibackslash = False + closed = False + if multilinestr[0] == '[': + closed = line[-1] == ']' + elif len(line) > 2: + closed = (line[-1] == multilinestr[0] and + line[-2] == multilinestr[0] and + line[-3] == multilinestr[0]) + if closed: + try: + value, vtype = decoder.load_value(multilinestr) + except ValueError as err: + raise TomlDecodeError(str(err), original, pos) + currentlevel[multikey] = value + multikey = None + multilinestr = "" + else: + k = len(multilinestr) - 1 + while k > -1 and multilinestr[k] == '\\': + multibackslash = not multibackslash + k -= 1 + if multibackslash: + multilinestr = multilinestr[:-1] + else: + multilinestr += "\n" + continue + if line[0] == '[': + arrayoftables = False + if len(line) == 1: + raise TomlDecodeError("Opening key group bracket on line by " + "itself.", original, pos) + if line[1] == '[': + arrayoftables = True + line = line[2:] + splitstr = ']]' + else: + line = line[1:] + splitstr = ']' + i = 1 + quotesplits = decoder._get_split_on_quotes(line) + quoted = False + for quotesplit in quotesplits: + if not quoted and splitstr in quotesplit: + break + i += quotesplit.count(splitstr) + quoted = not quoted + line = line.split(splitstr, i) + if len(line) < i + 1 or line[-1].strip() != "": + raise TomlDecodeError("Key group not on a line by itself.", + original, pos) + groups = splitstr.join(line[:-1]).split('.') + i = 0 + while i < len(groups): + groups[i] = groups[i].strip() + if len(groups[i]) > 0 and (groups[i][0] == '"' or + groups[i][0] == "'"): + groupstr = groups[i] + j = i + 1 + while ((not groupstr[0] == groupstr[-1]) or + len(groupstr) == 1): + j += 1 + if j > len(groups) + 2: + raise TomlDecodeError("Invalid group name '" + + groupstr + "' Something " + + "went wrong.", original, pos) + groupstr = '.'.join(groups[i:j]).strip() + groups[i] = groupstr[1:-1] + groups[i + 1:j] = [] + else: + if not _groupname_re.match(groups[i]): + raise TomlDecodeError("Invalid group name '" + + groups[i] + "'. Try quoting it.", + original, pos) + i += 1 + currentlevel = retval + for i in _range(len(groups)): + group = groups[i] + if group == "": + raise TomlDecodeError("Can't have a keygroup with an empty " + "name", original, pos) + try: + currentlevel[group] + if i == len(groups) - 1: + if group in implicitgroups: + implicitgroups.remove(group) + if arrayoftables: + raise TomlDecodeError("An implicitly defined " + "table can't be an array", + original, pos) + elif arrayoftables: + currentlevel[group].append(decoder.get_empty_table() + ) + else: + raise TomlDecodeError("What? " + group + + " already exists?" + + str(currentlevel), + original, pos) + except TypeError: + currentlevel = currentlevel[-1] + if group not in currentlevel: + currentlevel[group] = decoder.get_empty_table() + if i == len(groups) - 1 and arrayoftables: + currentlevel[group] = [decoder.get_empty_table()] + except KeyError: + if i != len(groups) - 1: + implicitgroups.append(group) + currentlevel[group] = decoder.get_empty_table() + if i == len(groups) - 1 and arrayoftables: + currentlevel[group] = [decoder.get_empty_table()] + currentlevel = currentlevel[group] + if arrayoftables: + try: + currentlevel = currentlevel[-1] + except KeyError: + pass + elif line[0] == "{": + if line[-1] != "}": + raise TomlDecodeError("Line breaks are not allowed in inline" + "objects", original, pos) + try: + decoder.load_inline_object(line, currentlevel, multikey, + multibackslash) + except ValueError as err: + raise TomlDecodeError(str(err), original, pos) + elif "=" in line: + try: + ret = decoder.load_line(line, currentlevel, multikey, + multibackslash) + except ValueError as err: + raise TomlDecodeError(str(err), original, pos) + if ret is not None: + multikey, multilinestr, multibackslash = ret + return retval + + +def _load_date(val): + microsecond = 0 + tz = None + try: + if len(val) > 19: + if val[19] == '.': + if val[-1].upper() == 'Z': + subsecondval = val[20:-1] + tzval = "Z" + else: + subsecondvalandtz = val[20:] + if '+' in subsecondvalandtz: + splitpoint = subsecondvalandtz.index('+') + subsecondval = subsecondvalandtz[:splitpoint] + tzval = subsecondvalandtz[splitpoint:] + elif '-' in subsecondvalandtz: + splitpoint = subsecondvalandtz.index('-') + subsecondval = subsecondvalandtz[:splitpoint] + tzval = subsecondvalandtz[splitpoint:] + else: + tzval = None + subsecondval = subsecondvalandtz + if tzval is not None: + tz = TomlTz(tzval) + microsecond = int(int(subsecondval) * + (10 ** (6 - len(subsecondval)))) + else: + tz = TomlTz(val[19:]) + except ValueError: + tz = None + if "-" not in val[1:]: + return None + try: + if len(val) == 10: + d = datetime.date( + int(val[:4]), int(val[5:7]), + int(val[8:10])) + else: + d = datetime.datetime( + int(val[:4]), int(val[5:7]), + int(val[8:10]), int(val[11:13]), + int(val[14:16]), int(val[17:19]), microsecond, tz) + except ValueError: + return None + return d + + +def _load_unicode_escapes(v, hexbytes, prefix): + skip = False + i = len(v) - 1 + while i > -1 and v[i] == '\\': + skip = not skip + i -= 1 + for hx in hexbytes: + if skip: + skip = False + i = len(hx) - 1 + while i > -1 and hx[i] == '\\': + skip = not skip + i -= 1 + v += prefix + v += hx + continue + hxb = "" + i = 0 + hxblen = 4 + if prefix == "\\U": + hxblen = 8 + hxb = ''.join(hx[i:i + hxblen]).lower() + if hxb.strip('0123456789abcdef'): + raise ValueError("Invalid escape sequence: " + hxb) + if hxb[0] == "d" and hxb[1].strip('01234567'): + raise ValueError("Invalid escape sequence: " + hxb + + ". Only scalar unicode points are allowed.") + v += unichr(int(hxb, 16)) + v += unicode(hx[len(hxb):]) + return v + + +# Unescape TOML string values. + +# content after the \ +_escapes = ['0', 'b', 'f', 'n', 'r', 't', '"'] +# What it should be replaced by +_escapedchars = ['\0', '\b', '\f', '\n', '\r', '\t', '\"'] +# Used for substitution +_escape_to_escapedchars = dict(zip(_escapes, _escapedchars)) + + +def _unescape(v): + """Unescape characters in a TOML string.""" + i = 0 + backslash = False + while i < len(v): + if backslash: + backslash = False + if v[i] in _escapes: + v = v[:i - 1] + _escape_to_escapedchars[v[i]] + v[i + 1:] + elif v[i] == '\\': + v = v[:i - 1] + v[i:] + elif v[i] == 'u' or v[i] == 'U': + i += 1 + else: + raise ValueError("Reserved escape sequence used") + continue + elif v[i] == '\\': + backslash = True + i += 1 + return v + + +class InlineTableDict(object): + """Sentinel subclass of dict for inline tables.""" + + +class TomlDecoder(object): + + def __init__(self, _dict=dict): + self._dict = _dict + + def get_empty_table(self): + return self._dict() + + def get_empty_inline_table(self): + class DynamicInlineTableDict(self._dict, InlineTableDict): + """Concrete sentinel subclass for inline tables. + It is a subclass of _dict which is passed in dynamically at load + time + + It is also a subclass of InlineTableDict + """ + + return DynamicInlineTableDict() + + def load_inline_object(self, line, currentlevel, multikey=False, + multibackslash=False): + candidate_groups = line[1:-1].split(",") + groups = [] + if len(candidate_groups) == 1 and not candidate_groups[0].strip(): + candidate_groups.pop() + while len(candidate_groups) > 0: + candidate_group = candidate_groups.pop(0) + try: + _, value = candidate_group.split('=', 1) + except ValueError: + raise ValueError("Invalid inline table encountered") + value = value.strip() + if ((value[0] == value[-1] and value[0] in ('"', "'")) or ( + value[0] in '-0123456789' or + value in ('true', 'false') or + (value[0] == "[" and value[-1] == "]") or + (value[0] == '{' and value[-1] == '}'))): + groups.append(candidate_group) + elif len(candidate_groups) > 0: + candidate_groups[0] = (candidate_group + "," + + candidate_groups[0]) + else: + raise ValueError("Invalid inline table value encountered") + for group in groups: + status = self.load_line(group, currentlevel, multikey, + multibackslash) + if status is not None: + break + + def _get_split_on_quotes(self, line): + doublequotesplits = line.split('"') + quoted = False + quotesplits = [] + if len(doublequotesplits) > 1 and "'" in doublequotesplits[0]: + singlequotesplits = doublequotesplits[0].split("'") + doublequotesplits = doublequotesplits[1:] + while len(singlequotesplits) % 2 == 0 and len(doublequotesplits): + singlequotesplits[-1] += '"' + doublequotesplits[0] + doublequotesplits = doublequotesplits[1:] + if "'" in singlequotesplits[-1]: + singlequotesplits = (singlequotesplits[:-1] + + singlequotesplits[-1].split("'")) + quotesplits += singlequotesplits + for doublequotesplit in doublequotesplits: + if quoted: + quotesplits.append(doublequotesplit) + else: + quotesplits += doublequotesplit.split("'") + quoted = not quoted + return quotesplits + + def load_line(self, line, currentlevel, multikey, multibackslash): + i = 1 + quotesplits = self._get_split_on_quotes(line) + quoted = False + for quotesplit in quotesplits: + if not quoted and '=' in quotesplit: + break + i += quotesplit.count('=') + quoted = not quoted + pair = line.split('=', i) + strictly_valid = _strictly_valid_num(pair[-1]) + if _number_with_underscores.match(pair[-1]): + pair[-1] = pair[-1].replace('_', '') + while len(pair[-1]) and (pair[-1][0] != ' ' and pair[-1][0] != '\t' and + pair[-1][0] != "'" and pair[-1][0] != '"' and + pair[-1][0] != '[' and pair[-1][0] != '{' and + pair[-1].strip() != 'true' and + pair[-1].strip() != 'false'): + try: + float(pair[-1]) + break + except ValueError: + pass + if _load_date(pair[-1]) is not None: + break + if TIME_RE.match(pair[-1]): + break + i += 1 + prev_val = pair[-1] + pair = line.split('=', i) + if prev_val == pair[-1]: + raise ValueError("Invalid date or number") + if strictly_valid: + strictly_valid = _strictly_valid_num(pair[-1]) + pair = ['='.join(pair[:-1]).strip(), pair[-1].strip()] + if '.' in pair[0]: + if '"' in pair[0] or "'" in pair[0]: + quotesplits = self._get_split_on_quotes(pair[0]) + quoted = False + levels = [] + for quotesplit in quotesplits: + if quoted: + levels.append(quotesplit) + else: + levels += [level.strip() for level in + quotesplit.split('.')] + quoted = not quoted + else: + levels = pair[0].split('.') + while levels[-1] == "": + levels = levels[:-1] + for level in levels[:-1]: + if level == "": + continue + if level not in currentlevel: + currentlevel[level] = self.get_empty_table() + currentlevel = currentlevel[level] + pair[0] = levels[-1].strip() + elif (pair[0][0] == '"' or pair[0][0] == "'") and \ + (pair[0][-1] == pair[0][0]): + pair[0] = _unescape(pair[0][1:-1]) + k, koffset = self._load_line_multiline_str(pair[1]) + if k > -1: + while k > -1 and pair[1][k + koffset] == '\\': + multibackslash = not multibackslash + k -= 1 + if multibackslash: + multilinestr = pair[1][:-1] + else: + multilinestr = pair[1] + "\n" + multikey = pair[0] + else: + value, vtype = self.load_value(pair[1], strictly_valid) + try: + currentlevel[pair[0]] + raise ValueError("Duplicate keys!") + except TypeError: + raise ValueError("Duplicate keys!") + except KeyError: + if multikey: + return multikey, multilinestr, multibackslash + else: + currentlevel[pair[0]] = value + + def _load_line_multiline_str(self, p): + poffset = 0 + if len(p) < 3: + return -1, poffset + if p[0] == '[' and (p.strip()[-1] != ']' and + self._load_array_isstrarray(p)): + newp = p[1:].strip().split(',') + while len(newp) > 1 and newp[-1][0] != '"' and newp[-1][0] != "'": + newp = newp[:-2] + [newp[-2] + ',' + newp[-1]] + newp = newp[-1] + poffset = len(p) - len(newp) + p = newp + if p[0] != '"' and p[0] != "'": + return -1, poffset + if p[1] != p[0] or p[2] != p[0]: + return -1, poffset + if len(p) > 5 and p[-1] == p[0] and p[-2] == p[0] and p[-3] == p[0]: + return -1, poffset + return len(p) - 1, poffset + + def load_value(self, v, strictly_valid=True): + if not v: + raise ValueError("Empty value is invalid") + if v == 'true': + return (True, "bool") + elif v.lower() == 'true': + raise ValueError("Only all lowercase booleans allowed") + elif v == 'false': + return (False, "bool") + elif v.lower() == 'false': + raise ValueError("Only all lowercase booleans allowed") + elif v[0] == '"' or v[0] == "'": + quotechar = v[0] + testv = v[1:].split(quotechar) + triplequote = False + triplequotecount = 0 + if len(testv) > 1 and testv[0] == '' and testv[1] == '': + testv = testv[2:] + triplequote = True + closed = False + for tv in testv: + if tv == '': + if triplequote: + triplequotecount += 1 + else: + closed = True + else: + oddbackslash = False + try: + i = -1 + j = tv[i] + while j == '\\': + oddbackslash = not oddbackslash + i -= 1 + j = tv[i] + except IndexError: + pass + if not oddbackslash: + if closed: + raise ValueError("Found tokens after a closed " + + "string. Invalid TOML.") + else: + if not triplequote or triplequotecount > 1: + closed = True + else: + triplequotecount = 0 + if quotechar == '"': + escapeseqs = v.split('\\')[1:] + backslash = False + for i in escapeseqs: + if i == '': + backslash = not backslash + else: + if i[0] not in _escapes and (i[0] != 'u' and + i[0] != 'U' and + not backslash): + raise ValueError("Reserved escape sequence used") + if backslash: + backslash = False + for prefix in ["\\u", "\\U"]: + if prefix in v: + hexbytes = v.split(prefix) + v = _load_unicode_escapes(hexbytes[0], hexbytes[1:], + prefix) + v = _unescape(v) + if len(v) > 1 and v[1] == quotechar and (len(v) < 3 or + v[1] == v[2]): + v = v[2:-2] + return (v[1:-1], "str") + elif v[0] == '[': + return (self.load_array(v), "array") + elif v[0] == '{': + inline_object = self.get_empty_inline_table() + self.load_inline_object(v, inline_object) + return (inline_object, "inline_object") + elif TIME_RE.match(v): + h, m, s, _, ms = TIME_RE.match(v).groups() + time = datetime.time(int(h), int(m), int(s), int(ms) if ms else 0) + return (time, "time") + else: + parsed_date = _load_date(v) + if parsed_date is not None: + return (parsed_date, "date") + if not strictly_valid: + raise ValueError("Weirdness with leading zeroes or " + "underscores in your number.") + itype = "int" + neg = False + if v[0] == '-': + neg = True + v = v[1:] + elif v[0] == '+': + v = v[1:] + v = v.replace('_', '') + lowerv = v.lower() + if '.' in v or ('x' not in v and ('e' in v or 'E' in v)): + if '.' in v and v.split('.', 1)[1] == '': + raise ValueError("This float is missing digits after " + "the point") + if v[0] not in '0123456789': + raise ValueError("This float doesn't have a leading " + "digit") + v = float(v) + itype = "float" + elif len(lowerv) == 3 and (lowerv == 'inf' or lowerv == 'nan'): + v = float(v) + itype = "float" + if itype == "int": + v = int(v, 0) + if neg: + return (0 - v, itype) + return (v, itype) + + def bounded_string(self, s): + if len(s) == 0: + return True + if s[-1] != s[0]: + return False + i = -2 + backslash = False + while len(s) + i > 0: + if s[i] == "\\": + backslash = not backslash + i -= 1 + else: + break + return not backslash + + def _load_array_isstrarray(self, a): + a = a[1:-1].strip() + if a != '' and (a[0] == '"' or a[0] == "'"): + return True + return False + + def load_array(self, a): + atype = None + retval = [] + a = a.strip() + if '[' not in a[1:-1] or "" != a[1:-1].split('[')[0].strip(): + strarray = self._load_array_isstrarray(a) + if not a[1:-1].strip().startswith('{'): + a = a[1:-1].split(',') + else: + # a is an inline object, we must find the matching parenthesis + # to define groups + new_a = [] + start_group_index = 1 + end_group_index = 2 + open_bracket_count = 1 if a[start_group_index] == '{' else 0 + in_str = False + while end_group_index < len(a[1:]): + if a[end_group_index] == '"' or a[end_group_index] == "'": + if in_str: + backslash_index = end_group_index - 1 + while (backslash_index > -1 and + a[backslash_index] == '\\'): + in_str = not in_str + backslash_index -= 1 + in_str = not in_str + if not in_str and a[end_group_index] == '{': + open_bracket_count += 1 + if in_str or a[end_group_index] != '}': + end_group_index += 1 + continue + elif a[end_group_index] == '}' and open_bracket_count > 1: + open_bracket_count -= 1 + end_group_index += 1 + continue + + # Increase end_group_index by 1 to get the closing bracket + end_group_index += 1 + + new_a.append(a[start_group_index:end_group_index]) + + # The next start index is at least after the closing + # bracket, a closing bracket can be followed by a comma + # since we are in an array. + start_group_index = end_group_index + 1 + while (start_group_index < len(a[1:]) and + a[start_group_index] != '{'): + start_group_index += 1 + end_group_index = start_group_index + 1 + a = new_a + b = 0 + if strarray: + while b < len(a) - 1: + ab = a[b].strip() + while (not self.bounded_string(ab) or + (len(ab) > 2 and + ab[0] == ab[1] == ab[2] and + ab[-2] != ab[0] and + ab[-3] != ab[0])): + a[b] = a[b] + ',' + a[b + 1] + ab = a[b].strip() + if b < len(a) - 2: + a = a[:b + 1] + a[b + 2:] + else: + a = a[:b + 1] + b += 1 + else: + al = list(a[1:-1]) + a = [] + openarr = 0 + j = 0 + for i in _range(len(al)): + if al[i] == '[': + openarr += 1 + elif al[i] == ']': + openarr -= 1 + elif al[i] == ',' and not openarr: + a.append(''.join(al[j:i])) + j = i + 1 + a.append(''.join(al[j:])) + for i in _range(len(a)): + a[i] = a[i].strip() + if a[i] != '': + nval, ntype = self.load_value(a[i]) + if atype: + if ntype != atype: + raise ValueError("Not a homogeneous array") + else: + atype = ntype + retval.append(nval) + return retval + + def preserve_comment(self, line_no, key, comment, beginline): + pass + + def embed_comments(self, idx, currentlevel): + pass + + +class TomlPreserveCommentDecoder(TomlDecoder): + + def __init__(self, _dict=dict): + self.saved_comments = {} + super(TomlPreserveCommentDecoder, self).__init__(_dict) + + def preserve_comment(self, line_no, key, comment, beginline): + self.saved_comments[line_no] = (key, comment, beginline) + + def embed_comments(self, idx, currentlevel): + if idx not in self.saved_comments: + return + + key, comment, beginline = self.saved_comments[idx] + currentlevel[key] = CommentValue(currentlevel[key], comment, beginline, + self._dict) diff --git a/venv/Lib/site-packages/pip/_vendor/toml/encoder.py b/venv/Lib/site-packages/pip/_vendor/toml/encoder.py new file mode 100644 index 00000000..7fb94da9 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/toml/encoder.py @@ -0,0 +1,304 @@ +import datetime +import re +import sys +from decimal import Decimal + +from pip._vendor.toml.decoder import InlineTableDict + +if sys.version_info >= (3,): + unicode = str + + +def dump(o, f, encoder=None): + """Writes out dict as toml to a file + + Args: + o: Object to dump into toml + f: File descriptor where the toml should be stored + encoder: The ``TomlEncoder`` to use for constructing the output string + + Returns: + String containing the toml corresponding to dictionary + + Raises: + TypeError: When anything other than file descriptor is passed + """ + + if not f.write: + raise TypeError("You can only dump an object to a file descriptor") + d = dumps(o, encoder=encoder) + f.write(d) + return d + + +def dumps(o, encoder=None): + """Stringifies input dict as toml + + Args: + o: Object to dump into toml + encoder: The ``TomlEncoder`` to use for constructing the output string + + Returns: + String containing the toml corresponding to dict + + Examples: + ```python + >>> import toml + >>> output = { + ... 'a': "I'm a string", + ... 'b': ["I'm", "a", "list"], + ... 'c': 2400 + ... } + >>> toml.dumps(output) + 'a = "I\'m a string"\nb = [ "I\'m", "a", "list",]\nc = 2400\n' + ``` + """ + + retval = "" + if encoder is None: + encoder = TomlEncoder(o.__class__) + addtoretval, sections = encoder.dump_sections(o, "") + retval += addtoretval + outer_objs = [id(o)] + while sections: + section_ids = [id(section) for section in sections.values()] + for outer_obj in outer_objs: + if outer_obj in section_ids: + raise ValueError("Circular reference detected") + outer_objs += section_ids + newsections = encoder.get_empty_table() + for section in sections: + addtoretval, addtosections = encoder.dump_sections( + sections[section], section) + + if addtoretval or (not addtoretval and not addtosections): + if retval and retval[-2:] != "\n\n": + retval += "\n" + retval += "[" + section + "]\n" + if addtoretval: + retval += addtoretval + for s in addtosections: + newsections[section + "." + s] = addtosections[s] + sections = newsections + return retval + + +def _dump_str(v): + if sys.version_info < (3,) and hasattr(v, 'decode') and isinstance(v, str): + v = v.decode('utf-8') + v = "%r" % v + if v[0] == 'u': + v = v[1:] + singlequote = v.startswith("'") + if singlequote or v.startswith('"'): + v = v[1:-1] + if singlequote: + v = v.replace("\\'", "'") + v = v.replace('"', '\\"') + v = v.split("\\x") + while len(v) > 1: + i = -1 + if not v[0]: + v = v[1:] + v[0] = v[0].replace("\\\\", "\\") + # No, I don't know why != works and == breaks + joinx = v[0][i] != "\\" + while v[0][:i] and v[0][i] == "\\": + joinx = not joinx + i -= 1 + if joinx: + joiner = "x" + else: + joiner = "u00" + v = [v[0] + joiner + v[1]] + v[2:] + return unicode('"' + v[0] + '"') + + +def _dump_float(v): + return "{}".format(v).replace("e+0", "e+").replace("e-0", "e-") + + +def _dump_time(v): + utcoffset = v.utcoffset() + if utcoffset is None: + return v.isoformat() + # The TOML norm specifies that it's local time thus we drop the offset + return v.isoformat()[:-6] + + +class TomlEncoder(object): + + def __init__(self, _dict=dict, preserve=False): + self._dict = _dict + self.preserve = preserve + self.dump_funcs = { + str: _dump_str, + unicode: _dump_str, + list: self.dump_list, + bool: lambda v: unicode(v).lower(), + int: lambda v: v, + float: _dump_float, + Decimal: _dump_float, + datetime.datetime: lambda v: v.isoformat().replace('+00:00', 'Z'), + datetime.time: _dump_time, + datetime.date: lambda v: v.isoformat() + } + + def get_empty_table(self): + return self._dict() + + def dump_list(self, v): + retval = "[" + for u in v: + retval += " " + unicode(self.dump_value(u)) + "," + retval += "]" + return retval + + def dump_inline_table(self, section): + """Preserve inline table in its compact syntax instead of expanding + into subsection. + + https://github.com/toml-lang/toml#user-content-inline-table + """ + retval = "" + if isinstance(section, dict): + val_list = [] + for k, v in section.items(): + val = self.dump_inline_table(v) + val_list.append(k + " = " + val) + retval += "{ " + ", ".join(val_list) + " }\n" + return retval + else: + return unicode(self.dump_value(section)) + + def dump_value(self, v): + # Lookup function corresponding to v's type + dump_fn = self.dump_funcs.get(type(v)) + if dump_fn is None and hasattr(v, '__iter__'): + dump_fn = self.dump_funcs[list] + # Evaluate function (if it exists) else return v + return dump_fn(v) if dump_fn is not None else self.dump_funcs[str](v) + + def dump_sections(self, o, sup): + retstr = "" + if sup != "" and sup[-1] != ".": + sup += '.' + retdict = self._dict() + arraystr = "" + for section in o: + section = unicode(section) + qsection = section + if not re.match(r'^[A-Za-z0-9_-]+$', section): + qsection = _dump_str(section) + if not isinstance(o[section], dict): + arrayoftables = False + if isinstance(o[section], list): + for a in o[section]: + if isinstance(a, dict): + arrayoftables = True + if arrayoftables: + for a in o[section]: + arraytabstr = "\n" + arraystr += "[[" + sup + qsection + "]]\n" + s, d = self.dump_sections(a, sup + qsection) + if s: + if s[0] == "[": + arraytabstr += s + else: + arraystr += s + while d: + newd = self._dict() + for dsec in d: + s1, d1 = self.dump_sections(d[dsec], sup + + qsection + "." + + dsec) + if s1: + arraytabstr += ("[" + sup + qsection + + "." + dsec + "]\n") + arraytabstr += s1 + for s1 in d1: + newd[dsec + "." + s1] = d1[s1] + d = newd + arraystr += arraytabstr + else: + if o[section] is not None: + retstr += (qsection + " = " + + unicode(self.dump_value(o[section])) + '\n') + elif self.preserve and isinstance(o[section], InlineTableDict): + retstr += (qsection + " = " + + self.dump_inline_table(o[section])) + else: + retdict[qsection] = o[section] + retstr += arraystr + return (retstr, retdict) + + +class TomlPreserveInlineDictEncoder(TomlEncoder): + + def __init__(self, _dict=dict): + super(TomlPreserveInlineDictEncoder, self).__init__(_dict, True) + + +class TomlArraySeparatorEncoder(TomlEncoder): + + def __init__(self, _dict=dict, preserve=False, separator=","): + super(TomlArraySeparatorEncoder, self).__init__(_dict, preserve) + if separator.strip() == "": + separator = "," + separator + elif separator.strip(' \t\n\r,'): + raise ValueError("Invalid separator for arrays") + self.separator = separator + + def dump_list(self, v): + t = [] + retval = "[" + for u in v: + t.append(self.dump_value(u)) + while t != []: + s = [] + for u in t: + if isinstance(u, list): + for r in u: + s.append(r) + else: + retval += " " + unicode(u) + self.separator + t = s + retval += "]" + return retval + + +class TomlNumpyEncoder(TomlEncoder): + + def __init__(self, _dict=dict, preserve=False): + import numpy as np + super(TomlNumpyEncoder, self).__init__(_dict, preserve) + self.dump_funcs[np.float16] = _dump_float + self.dump_funcs[np.float32] = _dump_float + self.dump_funcs[np.float64] = _dump_float + self.dump_funcs[np.int16] = self._dump_int + self.dump_funcs[np.int32] = self._dump_int + self.dump_funcs[np.int64] = self._dump_int + + def _dump_int(self, v): + return "{}".format(int(v)) + + +class TomlPreserveCommentEncoder(TomlEncoder): + + def __init__(self, _dict=dict, preserve=False): + from pip._vendor.toml.decoder import CommentValue + super(TomlPreserveCommentEncoder, self).__init__(_dict, preserve) + self.dump_funcs[CommentValue] = lambda v: v.dump(self.dump_value) + + +class TomlPathlibEncoder(TomlEncoder): + + def _dump_pathlib_path(self, v): + return _dump_str(str(v)) + + def dump_value(self, v): + if (3, 4) <= sys.version_info: + import pathlib + if isinstance(v, pathlib.PurePath): + v = str(v) + return super(TomlPathlibEncoder, self).dump_value(v) diff --git a/venv/Lib/site-packages/pip/_vendor/toml/ordered.py b/venv/Lib/site-packages/pip/_vendor/toml/ordered.py new file mode 100644 index 00000000..6052016e --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/toml/ordered.py @@ -0,0 +1,15 @@ +from collections import OrderedDict +from pip._vendor.toml import TomlEncoder +from pip._vendor.toml import TomlDecoder + + +class TomlOrderedDecoder(TomlDecoder): + + def __init__(self): + super(self.__class__, self).__init__(_dict=OrderedDict) + + +class TomlOrderedEncoder(TomlEncoder): + + def __init__(self): + super(self.__class__, self).__init__(_dict=OrderedDict) diff --git a/venv/Lib/site-packages/pip/_vendor/toml/tz.py b/venv/Lib/site-packages/pip/_vendor/toml/tz.py new file mode 100644 index 00000000..bf20593a --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/toml/tz.py @@ -0,0 +1,24 @@ +from datetime import tzinfo, timedelta + + +class TomlTz(tzinfo): + def __init__(self, toml_offset): + if toml_offset == "Z": + self._raw_offset = "+00:00" + else: + self._raw_offset = toml_offset + self._sign = -1 if self._raw_offset[0] == '-' else 1 + self._hours = int(self._raw_offset[1:3]) + self._minutes = int(self._raw_offset[4:6]) + + def __deepcopy__(self, memo): + return self.__class__(self._raw_offset) + + def tzname(self, dt): + return "UTC" + self._raw_offset + + def utcoffset(self, dt): + return self._sign * timedelta(hours=self._hours, minutes=self._minutes) + + def dst(self, dt): + return timedelta(0) diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/__init__.py b/venv/Lib/site-packages/pip/_vendor/urllib3/__init__.py new file mode 100644 index 00000000..fe86b59d --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/urllib3/__init__.py @@ -0,0 +1,85 @@ +""" +Python HTTP library with thread-safe connection pooling, file post support, user friendly, and more +""" +from __future__ import absolute_import + +# Set default logging handler to avoid "No handler found" warnings. +import logging +import warnings +from logging import NullHandler + +from . import exceptions +from ._version import __version__ +from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url +from .filepost import encode_multipart_formdata +from .poolmanager import PoolManager, ProxyManager, proxy_from_url +from .response import HTTPResponse +from .util.request import make_headers +from .util.retry import Retry +from .util.timeout import Timeout +from .util.url import get_host + +__author__ = "Andrey Petrov (andrey.petrov@shazow.net)" +__license__ = "MIT" +__version__ = __version__ + +__all__ = ( + "HTTPConnectionPool", + "HTTPSConnectionPool", + "PoolManager", + "ProxyManager", + "HTTPResponse", + "Retry", + "Timeout", + "add_stderr_logger", + "connection_from_url", + "disable_warnings", + "encode_multipart_formdata", + "get_host", + "make_headers", + "proxy_from_url", +) + +logging.getLogger(__name__).addHandler(NullHandler()) + + +def add_stderr_logger(level=logging.DEBUG): + """ + Helper for quickly adding a StreamHandler to the logger. Useful for + debugging. + + Returns the handler after adding it. + """ + # This method needs to be in this __init__.py to get the __name__ correct + # even if urllib3 is vendored within another package. + logger = logging.getLogger(__name__) + handler = logging.StreamHandler() + handler.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(message)s")) + logger.addHandler(handler) + logger.setLevel(level) + logger.debug("Added a stderr logging handler to logger: %s", __name__) + return handler + + +# ... Clean up. +del NullHandler + + +# All warning filters *must* be appended unless you're really certain that they +# shouldn't be: otherwise, it's very hard for users to use most Python +# mechanisms to silence them. +# SecurityWarning's always go off by default. +warnings.simplefilter("always", exceptions.SecurityWarning, append=True) +# SubjectAltNameWarning's should go off once per host +warnings.simplefilter("default", exceptions.SubjectAltNameWarning, append=True) +# InsecurePlatformWarning's don't vary between requests, so we keep it default. +warnings.simplefilter("default", exceptions.InsecurePlatformWarning, append=True) +# SNIMissingWarnings should go off only once. +warnings.simplefilter("default", exceptions.SNIMissingWarning, append=True) + + +def disable_warnings(category=exceptions.HTTPWarning): + """ + Helper for quickly disabling all urllib3 warnings. + """ + warnings.simplefilter("ignore", category) diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..1e4f2136 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/_collections.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/_collections.cpython-36.pyc new file mode 100644 index 00000000..b4d0d0cb Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/_collections.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/_version.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/_version.cpython-36.pyc new file mode 100644 index 00000000..302250fb Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/_version.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/connection.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/connection.cpython-36.pyc new file mode 100644 index 00000000..7a521e75 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/connection.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/connectionpool.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/connectionpool.cpython-36.pyc new file mode 100644 index 00000000..183f89b0 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/connectionpool.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/exceptions.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/exceptions.cpython-36.pyc new file mode 100644 index 00000000..e9193f7f Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/exceptions.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/fields.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/fields.cpython-36.pyc new file mode 100644 index 00000000..c19bca33 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/fields.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/filepost.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/filepost.cpython-36.pyc new file mode 100644 index 00000000..cbcc61b2 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/filepost.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/poolmanager.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/poolmanager.cpython-36.pyc new file mode 100644 index 00000000..96a91f51 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/poolmanager.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/request.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/request.cpython-36.pyc new file mode 100644 index 00000000..79aa5394 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/request.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/response.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/response.cpython-36.pyc new file mode 100644 index 00000000..a4c2967e Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/urllib3/__pycache__/response.cpython-36.pyc differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.py b/venv/Lib/site-packages/pip/_vendor/urllib3/_collections.py similarity index 90% rename from env/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.py rename to venv/Lib/site-packages/pip/_vendor/urllib3/_collections.py index 34f23811..da9857e9 100644 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.py +++ b/venv/Lib/site-packages/pip/_vendor/urllib3/_collections.py @@ -1,4 +1,5 @@ from __future__ import absolute_import + try: from collections.abc import Mapping, MutableMapping except ImportError: @@ -6,6 +7,7 @@ try: from threading import RLock except ImportError: # Platform-specific: No threads available + class RLock: def __enter__(self): pass @@ -15,11 +17,12 @@ def __exit__(self, exc_type, exc_value, traceback): from collections import OrderedDict -from .exceptions import InvalidHeader -from .packages.six import iterkeys, itervalues, PY3 +from .exceptions import InvalidHeader +from .packages import six +from .packages.six import iterkeys, itervalues -__all__ = ['RecentlyUsedContainer', 'HTTPHeaderDict'] +__all__ = ["RecentlyUsedContainer", "HTTPHeaderDict"] _Null = object() @@ -82,7 +85,9 @@ def __len__(self): return len(self._container) def __iter__(self): - raise NotImplementedError('Iteration over this class is unlikely to be threadsafe.') + raise NotImplementedError( + "Iteration over this class is unlikely to be threadsafe." + ) def clear(self): with self.lock: @@ -150,7 +155,7 @@ def __setitem__(self, key, val): def __getitem__(self, key): val = self._container[key.lower()] - return ', '.join(val[1:]) + return ", ".join(val[1:]) def __delitem__(self, key): del self._container[key.lower()] @@ -159,17 +164,18 @@ def __contains__(self, key): return key.lower() in self._container def __eq__(self, other): - if not isinstance(other, Mapping) and not hasattr(other, 'keys'): + if not isinstance(other, Mapping) and not hasattr(other, "keys"): return False if not isinstance(other, type(self)): other = type(self)(other) - return (dict((k.lower(), v) for k, v in self.itermerged()) == - dict((k.lower(), v) for k, v in other.itermerged())) + return dict((k.lower(), v) for k, v in self.itermerged()) == dict( + (k.lower(), v) for k, v in other.itermerged() + ) def __ne__(self, other): return not self.__eq__(other) - if not PY3: # Python 2 + if six.PY2: # Python 2 iterkeys = MutableMapping.iterkeys itervalues = MutableMapping.itervalues @@ -184,9 +190,9 @@ def __iter__(self): yield vals[0] def pop(self, key, default=__marker): - '''D.pop(k[,d]) -> v, remove specified key and return the corresponding value. - If key is not found, d is returned if given, otherwise KeyError is raised. - ''' + """D.pop(k[,d]) -> v, remove specified key and return the corresponding value. + If key is not found, d is returned if given, otherwise KeyError is raised. + """ # Using the MutableMapping function directly fails due to the private marker. # Using ordinary dict.pop would expose the internal structures. # So let's reinvent the wheel. @@ -228,8 +234,10 @@ def extend(self, *args, **kwargs): with self.add instead of self.__setitem__ """ if len(args) > 1: - raise TypeError("extend() takes at most 1 positional " - "arguments ({0} given)".format(len(args))) + raise TypeError( + "extend() takes at most 1 positional " + "arguments ({0} given)".format(len(args)) + ) other = args[0] if len(args) >= 1 else () if isinstance(other, HTTPHeaderDict): @@ -295,7 +303,7 @@ def itermerged(self): """Iterate over all headers, merging duplicate ones together.""" for key in self: val = self._container[key.lower()] - yield val[0], ', '.join(val[1:]) + yield val[0], ", ".join(val[1:]) def items(self): return list(self.iteritems()) @@ -306,7 +314,7 @@ def from_httplib(cls, message): # Python 2 # python2.7 does not expose a proper API for exporting multiheaders # efficiently. This function re-reads raw lines from the message # object and extracts the multiheaders properly. - obs_fold_continued_leaders = (' ', '\t') + obs_fold_continued_leaders = (" ", "\t") headers = [] for line in message.headers: @@ -316,14 +324,14 @@ def from_httplib(cls, message): # Python 2 # in RFC-7230 S3.2.4. This indicates a multiline header, but # there exists no previous header to which we can attach it. raise InvalidHeader( - 'Header continuation with no previous header: %s' % line + "Header continuation with no previous header: %s" % line ) else: key, value = headers[-1] - headers[-1] = (key, value + ' ' + line.strip()) + headers[-1] = (key, value + " " + line.strip()) continue - key, value = line.split(':', 1) + key, value = line.split(":", 1) headers.append((key, value.strip())) return cls(headers) diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/_version.py b/venv/Lib/site-packages/pip/_vendor/urllib3/_version.py new file mode 100644 index 00000000..97c98330 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/urllib3/_version.py @@ -0,0 +1,2 @@ +# This file is protected via CODEOWNERS +__version__ = "1.26.4" diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/connection.py b/venv/Lib/site-packages/pip/_vendor/urllib3/connection.py new file mode 100644 index 00000000..45580b7e --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/urllib3/connection.py @@ -0,0 +1,539 @@ +from __future__ import absolute_import + +import datetime +import logging +import os +import re +import socket +import warnings +from socket import error as SocketError +from socket import timeout as SocketTimeout + +from .packages import six +from .packages.six.moves.http_client import HTTPConnection as _HTTPConnection +from .packages.six.moves.http_client import HTTPException # noqa: F401 +from .util.proxy import create_proxy_ssl_context + +try: # Compiled with SSL? + import ssl + + BaseSSLError = ssl.SSLError +except (ImportError, AttributeError): # Platform-specific: No SSL. + ssl = None + + class BaseSSLError(BaseException): + pass + + +try: + # Python 3: not a no-op, we're adding this to the namespace so it can be imported. + ConnectionError = ConnectionError +except NameError: + # Python 2 + class ConnectionError(Exception): + pass + + +try: # Python 3: + # Not a no-op, we're adding this to the namespace so it can be imported. + BrokenPipeError = BrokenPipeError +except NameError: # Python 2: + + class BrokenPipeError(Exception): + pass + + +from ._collections import HTTPHeaderDict # noqa (historical, removed in v2) +from ._version import __version__ +from .exceptions import ( + ConnectTimeoutError, + NewConnectionError, + SubjectAltNameWarning, + SystemTimeWarning, +) +from .packages.ssl_match_hostname import CertificateError, match_hostname +from .util import SKIP_HEADER, SKIPPABLE_HEADERS, connection +from .util.ssl_ import ( + assert_fingerprint, + create_urllib3_context, + resolve_cert_reqs, + resolve_ssl_version, + ssl_wrap_socket, +) + +log = logging.getLogger(__name__) + +port_by_scheme = {"http": 80, "https": 443} + +# When it comes time to update this value as a part of regular maintenance +# (ie test_recent_date is failing) update it to ~6 months before the current date. +RECENT_DATE = datetime.date(2020, 7, 1) + +_CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]") + + +class HTTPConnection(_HTTPConnection, object): + """ + Based on :class:`http.client.HTTPConnection` but provides an extra constructor + backwards-compatibility layer between older and newer Pythons. + + Additional keyword parameters are used to configure attributes of the connection. + Accepted parameters include: + + - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool` + - ``source_address``: Set the source address for the current connection. + - ``socket_options``: Set specific options on the underlying socket. If not specified, then + defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling + Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy. + + For example, if you wish to enable TCP Keep Alive in addition to the defaults, + you might pass: + + .. code-block:: python + + HTTPConnection.default_socket_options + [ + (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1), + ] + + Or you may want to disable the defaults by passing an empty list (e.g., ``[]``). + """ + + default_port = port_by_scheme["http"] + + #: Disable Nagle's algorithm by default. + #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]`` + default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)] + + #: Whether this connection verifies the host's certificate. + is_verified = False + + def __init__(self, *args, **kw): + if not six.PY2: + kw.pop("strict", None) + + # Pre-set source_address. + self.source_address = kw.get("source_address") + + #: The socket options provided by the user. If no options are + #: provided, we use the default options. + self.socket_options = kw.pop("socket_options", self.default_socket_options) + + # Proxy options provided by the user. + self.proxy = kw.pop("proxy", None) + self.proxy_config = kw.pop("proxy_config", None) + + _HTTPConnection.__init__(self, *args, **kw) + + @property + def host(self): + """ + Getter method to remove any trailing dots that indicate the hostname is an FQDN. + + In general, SSL certificates don't include the trailing dot indicating a + fully-qualified domain name, and thus, they don't validate properly when + checked against a domain name that includes the dot. In addition, some + servers may not expect to receive the trailing dot when provided. + + However, the hostname with trailing dot is critical to DNS resolution; doing a + lookup with the trailing dot will properly only resolve the appropriate FQDN, + whereas a lookup without a trailing dot will search the system's search domain + list. Thus, it's important to keep the original host around for use only in + those cases where it's appropriate (i.e., when doing DNS lookup to establish the + actual TCP connection across which we're going to send HTTP requests). + """ + return self._dns_host.rstrip(".") + + @host.setter + def host(self, value): + """ + Setter for the `host` property. + + We assume that only urllib3 uses the _dns_host attribute; httplib itself + only uses `host`, and it seems reasonable that other libraries follow suit. + """ + self._dns_host = value + + def _new_conn(self): + """Establish a socket connection and set nodelay settings on it. + + :return: New socket connection. + """ + extra_kw = {} + if self.source_address: + extra_kw["source_address"] = self.source_address + + if self.socket_options: + extra_kw["socket_options"] = self.socket_options + + try: + conn = connection.create_connection( + (self._dns_host, self.port), self.timeout, **extra_kw + ) + + except SocketTimeout: + raise ConnectTimeoutError( + self, + "Connection to %s timed out. (connect timeout=%s)" + % (self.host, self.timeout), + ) + + except SocketError as e: + raise NewConnectionError( + self, "Failed to establish a new connection: %s" % e + ) + + return conn + + def _is_using_tunnel(self): + # Google App Engine's httplib does not define _tunnel_host + return getattr(self, "_tunnel_host", None) + + def _prepare_conn(self, conn): + self.sock = conn + if self._is_using_tunnel(): + # TODO: Fix tunnel so it doesn't depend on self.sock state. + self._tunnel() + # Mark this connection as not reusable + self.auto_open = 0 + + def connect(self): + conn = self._new_conn() + self._prepare_conn(conn) + + def putrequest(self, method, url, *args, **kwargs): + """""" + # Empty docstring because the indentation of CPython's implementation + # is broken but we don't want this method in our documentation. + match = _CONTAINS_CONTROL_CHAR_RE.search(method) + if match: + raise ValueError( + "Method cannot contain non-token characters %r (found at least %r)" + % (method, match.group()) + ) + + return _HTTPConnection.putrequest(self, method, url, *args, **kwargs) + + def putheader(self, header, *values): + """""" + if not any(isinstance(v, str) and v == SKIP_HEADER for v in values): + _HTTPConnection.putheader(self, header, *values) + elif six.ensure_str(header.lower()) not in SKIPPABLE_HEADERS: + raise ValueError( + "urllib3.util.SKIP_HEADER only supports '%s'" + % ("', '".join(map(str.title, sorted(SKIPPABLE_HEADERS))),) + ) + + def request(self, method, url, body=None, headers=None): + if headers is None: + headers = {} + else: + # Avoid modifying the headers passed into .request() + headers = headers.copy() + if "user-agent" not in (six.ensure_str(k.lower()) for k in headers): + headers["User-Agent"] = _get_default_user_agent() + super(HTTPConnection, self).request(method, url, body=body, headers=headers) + + def request_chunked(self, method, url, body=None, headers=None): + """ + Alternative to the common request method, which sends the + body with chunked encoding and not as one block + """ + headers = headers or {} + header_keys = set([six.ensure_str(k.lower()) for k in headers]) + skip_accept_encoding = "accept-encoding" in header_keys + skip_host = "host" in header_keys + self.putrequest( + method, url, skip_accept_encoding=skip_accept_encoding, skip_host=skip_host + ) + if "user-agent" not in header_keys: + self.putheader("User-Agent", _get_default_user_agent()) + for header, value in headers.items(): + self.putheader(header, value) + if "transfer-encoding" not in headers: + self.putheader("Transfer-Encoding", "chunked") + self.endheaders() + + if body is not None: + stringish_types = six.string_types + (bytes,) + if isinstance(body, stringish_types): + body = (body,) + for chunk in body: + if not chunk: + continue + if not isinstance(chunk, bytes): + chunk = chunk.encode("utf8") + len_str = hex(len(chunk))[2:] + to_send = bytearray(len_str.encode()) + to_send += b"\r\n" + to_send += chunk + to_send += b"\r\n" + self.send(to_send) + + # After the if clause, to always have a closed body + self.send(b"0\r\n\r\n") + + +class HTTPSConnection(HTTPConnection): + """ + Many of the parameters to this constructor are passed to the underlying SSL + socket by means of :py:func:`urllib3.util.ssl_wrap_socket`. + """ + + default_port = port_by_scheme["https"] + + cert_reqs = None + ca_certs = None + ca_cert_dir = None + ca_cert_data = None + ssl_version = None + assert_fingerprint = None + tls_in_tls_required = False + + def __init__( + self, + host, + port=None, + key_file=None, + cert_file=None, + key_password=None, + strict=None, + timeout=socket._GLOBAL_DEFAULT_TIMEOUT, + ssl_context=None, + server_hostname=None, + **kw + ): + + HTTPConnection.__init__(self, host, port, strict=strict, timeout=timeout, **kw) + + self.key_file = key_file + self.cert_file = cert_file + self.key_password = key_password + self.ssl_context = ssl_context + self.server_hostname = server_hostname + + # Required property for Google AppEngine 1.9.0 which otherwise causes + # HTTPS requests to go out as HTTP. (See Issue #356) + self._protocol = "https" + + def set_cert( + self, + key_file=None, + cert_file=None, + cert_reqs=None, + key_password=None, + ca_certs=None, + assert_hostname=None, + assert_fingerprint=None, + ca_cert_dir=None, + ca_cert_data=None, + ): + """ + This method should only be called once, before the connection is used. + """ + # If cert_reqs is not provided we'll assume CERT_REQUIRED unless we also + # have an SSLContext object in which case we'll use its verify_mode. + if cert_reqs is None: + if self.ssl_context is not None: + cert_reqs = self.ssl_context.verify_mode + else: + cert_reqs = resolve_cert_reqs(None) + + self.key_file = key_file + self.cert_file = cert_file + self.cert_reqs = cert_reqs + self.key_password = key_password + self.assert_hostname = assert_hostname + self.assert_fingerprint = assert_fingerprint + self.ca_certs = ca_certs and os.path.expanduser(ca_certs) + self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir) + self.ca_cert_data = ca_cert_data + + def connect(self): + # Add certificate verification + conn = self._new_conn() + hostname = self.host + tls_in_tls = False + + if self._is_using_tunnel(): + if self.tls_in_tls_required: + conn = self._connect_tls_proxy(hostname, conn) + tls_in_tls = True + + self.sock = conn + + # Calls self._set_hostport(), so self.host is + # self._tunnel_host below. + self._tunnel() + # Mark this connection as not reusable + self.auto_open = 0 + + # Override the host with the one we're requesting data from. + hostname = self._tunnel_host + + server_hostname = hostname + if self.server_hostname is not None: + server_hostname = self.server_hostname + + is_time_off = datetime.date.today() < RECENT_DATE + if is_time_off: + warnings.warn( + ( + "System time is way off (before {0}). This will probably " + "lead to SSL verification errors" + ).format(RECENT_DATE), + SystemTimeWarning, + ) + + # Wrap socket using verification with the root certs in + # trusted_root_certs + default_ssl_context = False + if self.ssl_context is None: + default_ssl_context = True + self.ssl_context = create_urllib3_context( + ssl_version=resolve_ssl_version(self.ssl_version), + cert_reqs=resolve_cert_reqs(self.cert_reqs), + ) + + context = self.ssl_context + context.verify_mode = resolve_cert_reqs(self.cert_reqs) + + # Try to load OS default certs if none are given. + # Works well on Windows (requires Python3.4+) + if ( + not self.ca_certs + and not self.ca_cert_dir + and not self.ca_cert_data + and default_ssl_context + and hasattr(context, "load_default_certs") + ): + context.load_default_certs() + + self.sock = ssl_wrap_socket( + sock=conn, + keyfile=self.key_file, + certfile=self.cert_file, + key_password=self.key_password, + ca_certs=self.ca_certs, + ca_cert_dir=self.ca_cert_dir, + ca_cert_data=self.ca_cert_data, + server_hostname=server_hostname, + ssl_context=context, + tls_in_tls=tls_in_tls, + ) + + # If we're using all defaults and the connection + # is TLSv1 or TLSv1.1 we throw a DeprecationWarning + # for the host. + if ( + default_ssl_context + and self.ssl_version is None + and hasattr(self.sock, "version") + and self.sock.version() in {"TLSv1", "TLSv1.1"} + ): + warnings.warn( + "Negotiating TLSv1/TLSv1.1 by default is deprecated " + "and will be disabled in urllib3 v2.0.0. Connecting to " + "'%s' with '%s' can be enabled by explicitly opting-in " + "with 'ssl_version'" % (self.host, self.sock.version()), + DeprecationWarning, + ) + + if self.assert_fingerprint: + assert_fingerprint( + self.sock.getpeercert(binary_form=True), self.assert_fingerprint + ) + elif ( + context.verify_mode != ssl.CERT_NONE + and not getattr(context, "check_hostname", False) + and self.assert_hostname is not False + ): + # While urllib3 attempts to always turn off hostname matching from + # the TLS library, this cannot always be done. So we check whether + # the TLS Library still thinks it's matching hostnames. + cert = self.sock.getpeercert() + if not cert.get("subjectAltName", ()): + warnings.warn( + ( + "Certificate for {0} has no `subjectAltName`, falling back to check for a " + "`commonName` for now. This feature is being removed by major browsers and " + "deprecated by RFC 2818. (See https://github.com/urllib3/urllib3/issues/497 " + "for details.)".format(hostname) + ), + SubjectAltNameWarning, + ) + _match_hostname(cert, self.assert_hostname or server_hostname) + + self.is_verified = ( + context.verify_mode == ssl.CERT_REQUIRED + or self.assert_fingerprint is not None + ) + + def _connect_tls_proxy(self, hostname, conn): + """ + Establish a TLS connection to the proxy using the provided SSL context. + """ + proxy_config = self.proxy_config + ssl_context = proxy_config.ssl_context + if ssl_context: + # If the user provided a proxy context, we assume CA and client + # certificates have already been set + return ssl_wrap_socket( + sock=conn, + server_hostname=hostname, + ssl_context=ssl_context, + ) + + ssl_context = create_proxy_ssl_context( + self.ssl_version, + self.cert_reqs, + self.ca_certs, + self.ca_cert_dir, + self.ca_cert_data, + ) + # By default urllib3's SSLContext disables `check_hostname` and uses + # a custom check. For proxies we're good with relying on the default + # verification. + ssl_context.check_hostname = True + + # If no cert was provided, use only the default options for server + # certificate validation + return ssl_wrap_socket( + sock=conn, + ca_certs=self.ca_certs, + ca_cert_dir=self.ca_cert_dir, + ca_cert_data=self.ca_cert_data, + server_hostname=hostname, + ssl_context=ssl_context, + ) + + +def _match_hostname(cert, asserted_hostname): + try: + match_hostname(cert, asserted_hostname) + except CertificateError as e: + log.warning( + "Certificate did not match expected hostname: %s. Certificate: %s", + asserted_hostname, + cert, + ) + # Add cert to exception and reraise so client code can inspect + # the cert when catching the exception, if they want to + e._peer_cert = cert + raise + + +def _get_default_user_agent(): + return "python-urllib3/%s" % __version__ + + +class DummyConnection(object): + """Used to detect a failed ConnectionCls import.""" + + pass + + +if not ssl: + HTTPSConnection = DummyConnection # noqa: F811 + + +VerifiedHTTPSConnection = HTTPSConnection diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/connectionpool.py b/venv/Lib/site-packages/pip/_vendor/urllib3/connectionpool.py new file mode 100644 index 00000000..4708c5bf --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/urllib3/connectionpool.py @@ -0,0 +1,1067 @@ +from __future__ import absolute_import + +import errno +import logging +import socket +import sys +import warnings +from socket import error as SocketError +from socket import timeout as SocketTimeout + +from .connection import ( + BaseSSLError, + BrokenPipeError, + DummyConnection, + HTTPConnection, + HTTPException, + HTTPSConnection, + VerifiedHTTPSConnection, + port_by_scheme, +) +from .exceptions import ( + ClosedPoolError, + EmptyPoolError, + HeaderParsingError, + HostChangedError, + InsecureRequestWarning, + LocationValueError, + MaxRetryError, + NewConnectionError, + ProtocolError, + ProxyError, + ReadTimeoutError, + SSLError, + TimeoutError, +) +from .packages import six +from .packages.six.moves import queue +from .packages.ssl_match_hostname import CertificateError +from .request import RequestMethods +from .response import HTTPResponse +from .util.connection import is_connection_dropped +from .util.proxy import connection_requires_http_tunnel +from .util.queue import LifoQueue +from .util.request import set_file_position +from .util.response import assert_header_parsing +from .util.retry import Retry +from .util.timeout import Timeout +from .util.url import Url, _encode_target +from .util.url import _normalize_host as normalize_host +from .util.url import get_host, parse_url + +xrange = six.moves.xrange + +log = logging.getLogger(__name__) + +_Default = object() + + +# Pool objects +class ConnectionPool(object): + """ + Base class for all connection pools, such as + :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`. + + .. note:: + ConnectionPool.urlopen() does not normalize or percent-encode target URIs + which is useful if your target server doesn't support percent-encoded + target URIs. + """ + + scheme = None + QueueCls = LifoQueue + + def __init__(self, host, port=None): + if not host: + raise LocationValueError("No host specified.") + + self.host = _normalize_host(host, scheme=self.scheme) + self._proxy_host = host.lower() + self.port = port + + def __str__(self): + return "%s(host=%r, port=%r)" % (type(self).__name__, self.host, self.port) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + # Return False to re-raise any potential exceptions + return False + + def close(self): + """ + Close all pooled connections and disable the pool. + """ + pass + + +# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252 +_blocking_errnos = {errno.EAGAIN, errno.EWOULDBLOCK} + + +class HTTPConnectionPool(ConnectionPool, RequestMethods): + """ + Thread-safe connection pool for one host. + + :param host: + Host used for this HTTP Connection (e.g. "localhost"), passed into + :class:`http.client.HTTPConnection`. + + :param port: + Port used for this HTTP Connection (None is equivalent to 80), passed + into :class:`http.client.HTTPConnection`. + + :param strict: + Causes BadStatusLine to be raised if the status line can't be parsed + as a valid HTTP/1.0 or 1.1 status line, passed into + :class:`http.client.HTTPConnection`. + + .. note:: + Only works in Python 2. This parameter is ignored in Python 3. + + :param timeout: + Socket timeout in seconds for each individual connection. This can + be a float or integer, which sets the timeout for the HTTP request, + or an instance of :class:`urllib3.util.Timeout` which gives you more + fine-grained control over request timeouts. After the constructor has + been parsed, this is always a `urllib3.util.Timeout` object. + + :param maxsize: + Number of connections to save that can be reused. More than 1 is useful + in multithreaded situations. If ``block`` is set to False, more + connections will be created but they will not be saved once they've + been used. + + :param block: + If set to True, no more than ``maxsize`` connections will be used at + a time. When no free connections are available, the call will block + until a connection has been released. This is a useful side effect for + particular multithreaded situations where one does not want to use more + than maxsize connections per host to prevent flooding. + + :param headers: + Headers to include with all requests, unless other headers are given + explicitly. + + :param retries: + Retry configuration to use by default with requests in this pool. + + :param _proxy: + Parsed proxy URL, should not be used directly, instead, see + :class:`urllib3.ProxyManager` + + :param _proxy_headers: + A dictionary with proxy headers, should not be used directly, + instead, see :class:`urllib3.ProxyManager` + + :param \\**conn_kw: + Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`, + :class:`urllib3.connection.HTTPSConnection` instances. + """ + + scheme = "http" + ConnectionCls = HTTPConnection + ResponseCls = HTTPResponse + + def __init__( + self, + host, + port=None, + strict=False, + timeout=Timeout.DEFAULT_TIMEOUT, + maxsize=1, + block=False, + headers=None, + retries=None, + _proxy=None, + _proxy_headers=None, + _proxy_config=None, + **conn_kw + ): + ConnectionPool.__init__(self, host, port) + RequestMethods.__init__(self, headers) + + self.strict = strict + + if not isinstance(timeout, Timeout): + timeout = Timeout.from_float(timeout) + + if retries is None: + retries = Retry.DEFAULT + + self.timeout = timeout + self.retries = retries + + self.pool = self.QueueCls(maxsize) + self.block = block + + self.proxy = _proxy + self.proxy_headers = _proxy_headers or {} + self.proxy_config = _proxy_config + + # Fill the queue up so that doing get() on it will block properly + for _ in xrange(maxsize): + self.pool.put(None) + + # These are mostly for testing and debugging purposes. + self.num_connections = 0 + self.num_requests = 0 + self.conn_kw = conn_kw + + if self.proxy: + # Enable Nagle's algorithm for proxies, to avoid packet fragmentation. + # We cannot know if the user has added default socket options, so we cannot replace the + # list. + self.conn_kw.setdefault("socket_options", []) + + self.conn_kw["proxy"] = self.proxy + self.conn_kw["proxy_config"] = self.proxy_config + + def _new_conn(self): + """ + Return a fresh :class:`HTTPConnection`. + """ + self.num_connections += 1 + log.debug( + "Starting new HTTP connection (%d): %s:%s", + self.num_connections, + self.host, + self.port or "80", + ) + + conn = self.ConnectionCls( + host=self.host, + port=self.port, + timeout=self.timeout.connect_timeout, + strict=self.strict, + **self.conn_kw + ) + return conn + + def _get_conn(self, timeout=None): + """ + Get a connection. Will return a pooled connection if one is available. + + If no connections are available and :prop:`.block` is ``False``, then a + fresh connection is returned. + + :param timeout: + Seconds to wait before giving up and raising + :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and + :prop:`.block` is ``True``. + """ + conn = None + try: + conn = self.pool.get(block=self.block, timeout=timeout) + + except AttributeError: # self.pool is None + raise ClosedPoolError(self, "Pool is closed.") + + except queue.Empty: + if self.block: + raise EmptyPoolError( + self, + "Pool reached maximum size and no more connections are allowed.", + ) + pass # Oh well, we'll create a new connection then + + # If this is a persistent connection, check if it got disconnected + if conn and is_connection_dropped(conn): + log.debug("Resetting dropped connection: %s", self.host) + conn.close() + if getattr(conn, "auto_open", 1) == 0: + # This is a proxied connection that has been mutated by + # http.client._tunnel() and cannot be reused (since it would + # attempt to bypass the proxy) + conn = None + + return conn or self._new_conn() + + def _put_conn(self, conn): + """ + Put a connection back into the pool. + + :param conn: + Connection object for the current host and port as returned by + :meth:`._new_conn` or :meth:`._get_conn`. + + If the pool is already full, the connection is closed and discarded + because we exceeded maxsize. If connections are discarded frequently, + then maxsize should be increased. + + If the pool is closed, then the connection will be closed and discarded. + """ + try: + self.pool.put(conn, block=False) + return # Everything is dandy, done. + except AttributeError: + # self.pool is None. + pass + except queue.Full: + # This should never happen if self.block == True + log.warning("Connection pool is full, discarding connection: %s", self.host) + + # Connection never got put back into the pool, close it. + if conn: + conn.close() + + def _validate_conn(self, conn): + """ + Called right before a request is made, after the socket is created. + """ + pass + + def _prepare_proxy(self, conn): + # Nothing to do for HTTP connections. + pass + + def _get_timeout(self, timeout): + """ Helper that always returns a :class:`urllib3.util.Timeout` """ + if timeout is _Default: + return self.timeout.clone() + + if isinstance(timeout, Timeout): + return timeout.clone() + else: + # User passed us an int/float. This is for backwards compatibility, + # can be removed later + return Timeout.from_float(timeout) + + def _raise_timeout(self, err, url, timeout_value): + """Is the error actually a timeout? Will raise a ReadTimeout or pass""" + + if isinstance(err, SocketTimeout): + raise ReadTimeoutError( + self, url, "Read timed out. (read timeout=%s)" % timeout_value + ) + + # See the above comment about EAGAIN in Python 3. In Python 2 we have + # to specifically catch it and throw the timeout error + if hasattr(err, "errno") and err.errno in _blocking_errnos: + raise ReadTimeoutError( + self, url, "Read timed out. (read timeout=%s)" % timeout_value + ) + + # Catch possible read timeouts thrown as SSL errors. If not the + # case, rethrow the original. We need to do this because of: + # http://bugs.python.org/issue10272 + if "timed out" in str(err) or "did not complete (read)" in str( + err + ): # Python < 2.7.4 + raise ReadTimeoutError( + self, url, "Read timed out. (read timeout=%s)" % timeout_value + ) + + def _make_request( + self, conn, method, url, timeout=_Default, chunked=False, **httplib_request_kw + ): + """ + Perform a request on a given urllib connection object taken from our + pool. + + :param conn: + a connection from one of our connection pools + + :param timeout: + Socket timeout in seconds for the request. This can be a + float or integer, which will set the same timeout value for + the socket connect and the socket read, or an instance of + :class:`urllib3.util.Timeout`, which gives you more fine-grained + control over your timeouts. + """ + self.num_requests += 1 + + timeout_obj = self._get_timeout(timeout) + timeout_obj.start_connect() + conn.timeout = timeout_obj.connect_timeout + + # Trigger any extra validation we need to do. + try: + self._validate_conn(conn) + except (SocketTimeout, BaseSSLError) as e: + # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout. + self._raise_timeout(err=e, url=url, timeout_value=conn.timeout) + raise + + # conn.request() calls http.client.*.request, not the method in + # urllib3.request. It also calls makefile (recv) on the socket. + try: + if chunked: + conn.request_chunked(method, url, **httplib_request_kw) + else: + conn.request(method, url, **httplib_request_kw) + + # We are swallowing BrokenPipeError (errno.EPIPE) since the server is + # legitimately able to close the connection after sending a valid response. + # With this behaviour, the received response is still readable. + except BrokenPipeError: + # Python 3 + pass + except IOError as e: + # Python 2 and macOS/Linux + # EPIPE and ESHUTDOWN are BrokenPipeError on Python 2, and EPROTOTYPE is needed on macOS + # https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/ + if e.errno not in { + errno.EPIPE, + errno.ESHUTDOWN, + errno.EPROTOTYPE, + }: + raise + + # Reset the timeout for the recv() on the socket + read_timeout = timeout_obj.read_timeout + + # App Engine doesn't have a sock attr + if getattr(conn, "sock", None): + # In Python 3 socket.py will catch EAGAIN and return None when you + # try and read into the file pointer created by http.client, which + # instead raises a BadStatusLine exception. Instead of catching + # the exception and assuming all BadStatusLine exceptions are read + # timeouts, check for a zero timeout before making the request. + if read_timeout == 0: + raise ReadTimeoutError( + self, url, "Read timed out. (read timeout=%s)" % read_timeout + ) + if read_timeout is Timeout.DEFAULT_TIMEOUT: + conn.sock.settimeout(socket.getdefaulttimeout()) + else: # None or a value + conn.sock.settimeout(read_timeout) + + # Receive the response from the server + try: + try: + # Python 2.7, use buffering of HTTP responses + httplib_response = conn.getresponse(buffering=True) + except TypeError: + # Python 3 + try: + httplib_response = conn.getresponse() + except BaseException as e: + # Remove the TypeError from the exception chain in + # Python 3 (including for exceptions like SystemExit). + # Otherwise it looks like a bug in the code. + six.raise_from(e, None) + except (SocketTimeout, BaseSSLError, SocketError) as e: + self._raise_timeout(err=e, url=url, timeout_value=read_timeout) + raise + + # AppEngine doesn't have a version attr. + http_version = getattr(conn, "_http_vsn_str", "HTTP/?") + log.debug( + '%s://%s:%s "%s %s %s" %s %s', + self.scheme, + self.host, + self.port, + method, + url, + http_version, + httplib_response.status, + httplib_response.length, + ) + + try: + assert_header_parsing(httplib_response.msg) + except (HeaderParsingError, TypeError) as hpe: # Platform-specific: Python 3 + log.warning( + "Failed to parse headers (url=%s): %s", + self._absolute_url(url), + hpe, + exc_info=True, + ) + + return httplib_response + + def _absolute_url(self, path): + return Url(scheme=self.scheme, host=self.host, port=self.port, path=path).url + + def close(self): + """ + Close all pooled connections and disable the pool. + """ + if self.pool is None: + return + # Disable access to the pool + old_pool, self.pool = self.pool, None + + try: + while True: + conn = old_pool.get(block=False) + if conn: + conn.close() + + except queue.Empty: + pass # Done. + + def is_same_host(self, url): + """ + Check if the given ``url`` is a member of the same host as this + connection pool. + """ + if url.startswith("/"): + return True + + # TODO: Add optional support for socket.gethostbyname checking. + scheme, host, port = get_host(url) + if host is not None: + host = _normalize_host(host, scheme=scheme) + + # Use explicit default port for comparison when none is given + if self.port and not port: + port = port_by_scheme.get(scheme) + elif not self.port and port == port_by_scheme.get(scheme): + port = None + + return (scheme, host, port) == (self.scheme, self.host, self.port) + + def urlopen( + self, + method, + url, + body=None, + headers=None, + retries=None, + redirect=True, + assert_same_host=True, + timeout=_Default, + pool_timeout=None, + release_conn=None, + chunked=False, + body_pos=None, + **response_kw + ): + """ + Get a connection from the pool and perform an HTTP request. This is the + lowest level call for making a request, so you'll need to specify all + the raw details. + + .. note:: + + More commonly, it's appropriate to use a convenience method provided + by :class:`.RequestMethods`, such as :meth:`request`. + + .. note:: + + `release_conn` will only behave as expected if + `preload_content=False` because we want to make + `preload_content=False` the default behaviour someday soon without + breaking backwards compatibility. + + :param method: + HTTP request method (such as GET, POST, PUT, etc.) + + :param url: + The URL to perform the request on. + + :param body: + Data to send in the request body, either :class:`str`, :class:`bytes`, + an iterable of :class:`str`/:class:`bytes`, or a file-like object. + + :param headers: + Dictionary of custom headers to send, such as User-Agent, + If-None-Match, etc. If None, pool headers are used. If provided, + these headers completely replace any pool-specific headers. + + :param retries: + Configure the number of retries to allow before raising a + :class:`~urllib3.exceptions.MaxRetryError` exception. + + Pass ``None`` to retry until you receive a response. Pass a + :class:`~urllib3.util.retry.Retry` object for fine-grained control + over different types of retries. + Pass an integer number to retry connection errors that many times, + but no other types of errors. Pass zero to never retry. + + If ``False``, then retries are disabled and any exception is raised + immediately. Also, instead of raising a MaxRetryError on redirects, + the redirect response will be returned. + + :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int. + + :param redirect: + If True, automatically handle redirects (status codes 301, 302, + 303, 307, 308). Each redirect counts as a retry. Disabling retries + will disable redirect, too. + + :param assert_same_host: + If ``True``, will make sure that the host of the pool requests is + consistent else will raise HostChangedError. When ``False``, you can + use the pool on an HTTP proxy and request foreign hosts. + + :param timeout: + If specified, overrides the default timeout for this one + request. It may be a float (in seconds) or an instance of + :class:`urllib3.util.Timeout`. + + :param pool_timeout: + If set and the pool is set to block=True, then this method will + block for ``pool_timeout`` seconds and raise EmptyPoolError if no + connection is available within the time period. + + :param release_conn: + If False, then the urlopen call will not release the connection + back into the pool once a response is received (but will release if + you read the entire contents of the response such as when + `preload_content=True`). This is useful if you're not preloading + the response's content immediately. You will need to call + ``r.release_conn()`` on the response ``r`` to return the connection + back into the pool. If None, it takes the value of + ``response_kw.get('preload_content', True)``. + + :param chunked: + If True, urllib3 will send the body using chunked transfer + encoding. Otherwise, urllib3 will send the body using the standard + content-length form. Defaults to False. + + :param int body_pos: + Position to seek to in file-like body in the event of a retry or + redirect. Typically this won't need to be set because urllib3 will + auto-populate the value when needed. + + :param \\**response_kw: + Additional parameters are passed to + :meth:`urllib3.response.HTTPResponse.from_httplib` + """ + + parsed_url = parse_url(url) + destination_scheme = parsed_url.scheme + + if headers is None: + headers = self.headers + + if not isinstance(retries, Retry): + retries = Retry.from_int(retries, redirect=redirect, default=self.retries) + + if release_conn is None: + release_conn = response_kw.get("preload_content", True) + + # Check host + if assert_same_host and not self.is_same_host(url): + raise HostChangedError(self, url, retries) + + # Ensure that the URL we're connecting to is properly encoded + if url.startswith("/"): + url = six.ensure_str(_encode_target(url)) + else: + url = six.ensure_str(parsed_url.url) + + conn = None + + # Track whether `conn` needs to be released before + # returning/raising/recursing. Update this variable if necessary, and + # leave `release_conn` constant throughout the function. That way, if + # the function recurses, the original value of `release_conn` will be + # passed down into the recursive call, and its value will be respected. + # + # See issue #651 [1] for details. + # + # [1] + release_this_conn = release_conn + + http_tunnel_required = connection_requires_http_tunnel( + self.proxy, self.proxy_config, destination_scheme + ) + + # Merge the proxy headers. Only done when not using HTTP CONNECT. We + # have to copy the headers dict so we can safely change it without those + # changes being reflected in anyone else's copy. + if not http_tunnel_required: + headers = headers.copy() + headers.update(self.proxy_headers) + + # Must keep the exception bound to a separate variable or else Python 3 + # complains about UnboundLocalError. + err = None + + # Keep track of whether we cleanly exited the except block. This + # ensures we do proper cleanup in finally. + clean_exit = False + + # Rewind body position, if needed. Record current position + # for future rewinds in the event of a redirect/retry. + body_pos = set_file_position(body, body_pos) + + try: + # Request a connection from the queue. + timeout_obj = self._get_timeout(timeout) + conn = self._get_conn(timeout=pool_timeout) + + conn.timeout = timeout_obj.connect_timeout + + is_new_proxy_conn = self.proxy is not None and not getattr( + conn, "sock", None + ) + if is_new_proxy_conn and http_tunnel_required: + self._prepare_proxy(conn) + + # Make the request on the httplib connection object. + httplib_response = self._make_request( + conn, + method, + url, + timeout=timeout_obj, + body=body, + headers=headers, + chunked=chunked, + ) + + # If we're going to release the connection in ``finally:``, then + # the response doesn't need to know about the connection. Otherwise + # it will also try to release it and we'll have a double-release + # mess. + response_conn = conn if not release_conn else None + + # Pass method to Response for length checking + response_kw["request_method"] = method + + # Import httplib's response into our own wrapper object + response = self.ResponseCls.from_httplib( + httplib_response, + pool=self, + connection=response_conn, + retries=retries, + **response_kw + ) + + # Everything went great! + clean_exit = True + + except EmptyPoolError: + # Didn't get a connection from the pool, no need to clean up + clean_exit = True + release_this_conn = False + raise + + except ( + TimeoutError, + HTTPException, + SocketError, + ProtocolError, + BaseSSLError, + SSLError, + CertificateError, + ) as e: + # Discard the connection for these exceptions. It will be + # replaced during the next _get_conn() call. + clean_exit = False + if isinstance(e, (BaseSSLError, CertificateError)): + e = SSLError(e) + elif isinstance(e, (SocketError, NewConnectionError)) and self.proxy: + e = ProxyError("Cannot connect to proxy.", e) + elif isinstance(e, (SocketError, HTTPException)): + e = ProtocolError("Connection aborted.", e) + + retries = retries.increment( + method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2] + ) + retries.sleep() + + # Keep track of the error for the retry warning. + err = e + + finally: + if not clean_exit: + # We hit some kind of exception, handled or otherwise. We need + # to throw the connection away unless explicitly told not to. + # Close the connection, set the variable to None, and make sure + # we put the None back in the pool to avoid leaking it. + conn = conn and conn.close() + release_this_conn = True + + if release_this_conn: + # Put the connection back to be reused. If the connection is + # expired then it will be None, which will get replaced with a + # fresh connection during _get_conn. + self._put_conn(conn) + + if not conn: + # Try again + log.warning( + "Retrying (%r) after connection broken by '%r': %s", retries, err, url + ) + return self.urlopen( + method, + url, + body, + headers, + retries, + redirect, + assert_same_host, + timeout=timeout, + pool_timeout=pool_timeout, + release_conn=release_conn, + chunked=chunked, + body_pos=body_pos, + **response_kw + ) + + # Handle redirect? + redirect_location = redirect and response.get_redirect_location() + if redirect_location: + if response.status == 303: + method = "GET" + + try: + retries = retries.increment(method, url, response=response, _pool=self) + except MaxRetryError: + if retries.raise_on_redirect: + response.drain_conn() + raise + return response + + response.drain_conn() + retries.sleep_for_retry(response) + log.debug("Redirecting %s -> %s", url, redirect_location) + return self.urlopen( + method, + redirect_location, + body, + headers, + retries=retries, + redirect=redirect, + assert_same_host=assert_same_host, + timeout=timeout, + pool_timeout=pool_timeout, + release_conn=release_conn, + chunked=chunked, + body_pos=body_pos, + **response_kw + ) + + # Check if we should retry the HTTP response. + has_retry_after = bool(response.getheader("Retry-After")) + if retries.is_retry(method, response.status, has_retry_after): + try: + retries = retries.increment(method, url, response=response, _pool=self) + except MaxRetryError: + if retries.raise_on_status: + response.drain_conn() + raise + return response + + response.drain_conn() + retries.sleep(response) + log.debug("Retry: %s", url) + return self.urlopen( + method, + url, + body, + headers, + retries=retries, + redirect=redirect, + assert_same_host=assert_same_host, + timeout=timeout, + pool_timeout=pool_timeout, + release_conn=release_conn, + chunked=chunked, + body_pos=body_pos, + **response_kw + ) + + return response + + +class HTTPSConnectionPool(HTTPConnectionPool): + """ + Same as :class:`.HTTPConnectionPool`, but HTTPS. + + :class:`.HTTPSConnection` uses one of ``assert_fingerprint``, + ``assert_hostname`` and ``host`` in this order to verify connections. + If ``assert_hostname`` is False, no verification is done. + + The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``, + ``ca_cert_dir``, ``ssl_version``, ``key_password`` are only used if :mod:`ssl` + is available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade + the connection socket into an SSL socket. + """ + + scheme = "https" + ConnectionCls = HTTPSConnection + + def __init__( + self, + host, + port=None, + strict=False, + timeout=Timeout.DEFAULT_TIMEOUT, + maxsize=1, + block=False, + headers=None, + retries=None, + _proxy=None, + _proxy_headers=None, + key_file=None, + cert_file=None, + cert_reqs=None, + key_password=None, + ca_certs=None, + ssl_version=None, + assert_hostname=None, + assert_fingerprint=None, + ca_cert_dir=None, + **conn_kw + ): + + HTTPConnectionPool.__init__( + self, + host, + port, + strict, + timeout, + maxsize, + block, + headers, + retries, + _proxy, + _proxy_headers, + **conn_kw + ) + + self.key_file = key_file + self.cert_file = cert_file + self.cert_reqs = cert_reqs + self.key_password = key_password + self.ca_certs = ca_certs + self.ca_cert_dir = ca_cert_dir + self.ssl_version = ssl_version + self.assert_hostname = assert_hostname + self.assert_fingerprint = assert_fingerprint + + def _prepare_conn(self, conn): + """ + Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket` + and establish the tunnel if proxy is used. + """ + + if isinstance(conn, VerifiedHTTPSConnection): + conn.set_cert( + key_file=self.key_file, + key_password=self.key_password, + cert_file=self.cert_file, + cert_reqs=self.cert_reqs, + ca_certs=self.ca_certs, + ca_cert_dir=self.ca_cert_dir, + assert_hostname=self.assert_hostname, + assert_fingerprint=self.assert_fingerprint, + ) + conn.ssl_version = self.ssl_version + return conn + + def _prepare_proxy(self, conn): + """ + Establishes a tunnel connection through HTTP CONNECT. + + Tunnel connection is established early because otherwise httplib would + improperly set Host: header to proxy's IP:port. + """ + + conn.set_tunnel(self._proxy_host, self.port, self.proxy_headers) + + if self.proxy.scheme == "https": + conn.tls_in_tls_required = True + + conn.connect() + + def _new_conn(self): + """ + Return a fresh :class:`http.client.HTTPSConnection`. + """ + self.num_connections += 1 + log.debug( + "Starting new HTTPS connection (%d): %s:%s", + self.num_connections, + self.host, + self.port or "443", + ) + + if not self.ConnectionCls or self.ConnectionCls is DummyConnection: + raise SSLError( + "Can't connect to HTTPS URL because the SSL module is not available." + ) + + actual_host = self.host + actual_port = self.port + if self.proxy is not None: + actual_host = self.proxy.host + actual_port = self.proxy.port + + conn = self.ConnectionCls( + host=actual_host, + port=actual_port, + timeout=self.timeout.connect_timeout, + strict=self.strict, + cert_file=self.cert_file, + key_file=self.key_file, + key_password=self.key_password, + **self.conn_kw + ) + + return self._prepare_conn(conn) + + def _validate_conn(self, conn): + """ + Called right before a request is made, after the socket is created. + """ + super(HTTPSConnectionPool, self)._validate_conn(conn) + + # Force connect early to allow us to validate the connection. + if not getattr(conn, "sock", None): # AppEngine might not have `.sock` + conn.connect() + + if not conn.is_verified: + warnings.warn( + ( + "Unverified HTTPS request is being made to host '%s'. " + "Adding certificate verification is strongly advised. See: " + "https://urllib3.readthedocs.io/en/latest/advanced-usage.html" + "#ssl-warnings" % conn.host + ), + InsecureRequestWarning, + ) + + +def connection_from_url(url, **kw): + """ + Given a url, return an :class:`.ConnectionPool` instance of its host. + + This is a shortcut for not having to parse out the scheme, host, and port + of the url before creating an :class:`.ConnectionPool` instance. + + :param url: + Absolute URL string that must include the scheme. Port is optional. + + :param \\**kw: + Passes additional parameters to the constructor of the appropriate + :class:`.ConnectionPool`. Useful for specifying things like + timeout, maxsize, headers, etc. + + Example:: + + >>> conn = connection_from_url('http://google.com/') + >>> r = conn.request('GET', '/') + """ + scheme, host, port = get_host(url) + port = port or port_by_scheme.get(scheme, 80) + if scheme == "https": + return HTTPSConnectionPool(host, port=port, **kw) + else: + return HTTPConnectionPool(host, port=port, **kw) + + +def _normalize_host(host, scheme): + """ + Normalize hosts for comparisons and use with sockets. + """ + + host = normalize_host(host, scheme) + + # httplib doesn't like it when we include brackets in IPv6 addresses + # Specifically, if we include brackets but also pass the port then + # httplib crazily doubles up the square brackets on the Host header. + # Instead, we need to make sure we never pass ``None`` as the port. + # However, for backward compatibility reasons we can't actually + # *assert* that. See http://bugs.python.org/issue28539 + if host.startswith("[") and host.endswith("]"): + host = host[1:-1] + return host diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/__init__.py b/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..a341ab9b Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-36.pyc new file mode 100644 index 00000000..6a26f25a Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-36.pyc new file mode 100644 index 00000000..b8ebf7c3 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-36.pyc new file mode 100644 index 00000000..d63fb889 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-36.pyc new file mode 100644 index 00000000..d9138755 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-36.pyc new file mode 100644 index 00000000..c82b0bb5 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-36.pyc new file mode 100644 index 00000000..360712ce Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/_appengine_environ.py b/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/_appengine_environ.py new file mode 100644 index 00000000..8765b907 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/_appengine_environ.py @@ -0,0 +1,36 @@ +""" +This module provides means to detect the App Engine environment. +""" + +import os + + +def is_appengine(): + return is_local_appengine() or is_prod_appengine() + + +def is_appengine_sandbox(): + """Reports if the app is running in the first generation sandbox. + + The second generation runtimes are technically still in a sandbox, but it + is much less restrictive, so generally you shouldn't need to check for it. + see https://cloud.google.com/appengine/docs/standard/runtimes + """ + return is_appengine() and os.environ["APPENGINE_RUNTIME"] == "python27" + + +def is_local_appengine(): + return "APPENGINE_RUNTIME" in os.environ and os.environ.get( + "SERVER_SOFTWARE", "" + ).startswith("Development/") + + +def is_prod_appengine(): + return "APPENGINE_RUNTIME" in os.environ and os.environ.get( + "SERVER_SOFTWARE", "" + ).startswith("Google App Engine/") + + +def is_prod_appengine_mvms(): + """Deprecated.""" + return False diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__init__.py b/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..e1e6689e Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-36.pyc new file mode 100644 index 00000000..ab2fa070 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-36.pyc new file mode 100644 index 00000000..bb6f227f Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.py b/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.py new file mode 100644 index 00000000..42526be7 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.py @@ -0,0 +1,519 @@ +""" +This module uses ctypes to bind a whole bunch of functions and constants from +SecureTransport. The goal here is to provide the low-level API to +SecureTransport. These are essentially the C-level functions and constants, and +they're pretty gross to work with. + +This code is a bastardised version of the code found in Will Bond's oscrypto +library. An enormous debt is owed to him for blazing this trail for us. For +that reason, this code should be considered to be covered both by urllib3's +license and by oscrypto's: + + Copyright (c) 2015-2016 Will Bond + + Permission is hereby granted, free of charge, to any person obtaining a + copy of this software and associated documentation files (the "Software"), + to deal in the Software without restriction, including without limitation + the rights to use, copy, modify, merge, publish, distribute, sublicense, + and/or sell copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. +""" +from __future__ import absolute_import + +import platform +from ctypes import ( + CDLL, + CFUNCTYPE, + POINTER, + c_bool, + c_byte, + c_char_p, + c_int32, + c_long, + c_size_t, + c_uint32, + c_ulong, + c_void_p, +) +from ctypes.util import find_library + +from pip._vendor.urllib3.packages.six import raise_from + +if platform.system() != "Darwin": + raise ImportError("Only macOS is supported") + +version = platform.mac_ver()[0] +version_info = tuple(map(int, version.split("."))) +if version_info < (10, 8): + raise OSError( + "Only OS X 10.8 and newer are supported, not %s.%s" + % (version_info[0], version_info[1]) + ) + + +def load_cdll(name, macos10_16_path): + """Loads a CDLL by name, falling back to known path on 10.16+""" + try: + # Big Sur is technically 11 but we use 10.16 due to the Big Sur + # beta being labeled as 10.16. + if version_info >= (10, 16): + path = macos10_16_path + else: + path = find_library(name) + if not path: + raise OSError # Caught and reraised as 'ImportError' + return CDLL(path, use_errno=True) + except OSError: + raise_from(ImportError("The library %s failed to load" % name), None) + + +Security = load_cdll( + "Security", "/System/Library/Frameworks/Security.framework/Security" +) +CoreFoundation = load_cdll( + "CoreFoundation", + "/System/Library/Frameworks/CoreFoundation.framework/CoreFoundation", +) + + +Boolean = c_bool +CFIndex = c_long +CFStringEncoding = c_uint32 +CFData = c_void_p +CFString = c_void_p +CFArray = c_void_p +CFMutableArray = c_void_p +CFDictionary = c_void_p +CFError = c_void_p +CFType = c_void_p +CFTypeID = c_ulong + +CFTypeRef = POINTER(CFType) +CFAllocatorRef = c_void_p + +OSStatus = c_int32 + +CFDataRef = POINTER(CFData) +CFStringRef = POINTER(CFString) +CFArrayRef = POINTER(CFArray) +CFMutableArrayRef = POINTER(CFMutableArray) +CFDictionaryRef = POINTER(CFDictionary) +CFArrayCallBacks = c_void_p +CFDictionaryKeyCallBacks = c_void_p +CFDictionaryValueCallBacks = c_void_p + +SecCertificateRef = POINTER(c_void_p) +SecExternalFormat = c_uint32 +SecExternalItemType = c_uint32 +SecIdentityRef = POINTER(c_void_p) +SecItemImportExportFlags = c_uint32 +SecItemImportExportKeyParameters = c_void_p +SecKeychainRef = POINTER(c_void_p) +SSLProtocol = c_uint32 +SSLCipherSuite = c_uint32 +SSLContextRef = POINTER(c_void_p) +SecTrustRef = POINTER(c_void_p) +SSLConnectionRef = c_uint32 +SecTrustResultType = c_uint32 +SecTrustOptionFlags = c_uint32 +SSLProtocolSide = c_uint32 +SSLConnectionType = c_uint32 +SSLSessionOption = c_uint32 + + +try: + Security.SecItemImport.argtypes = [ + CFDataRef, + CFStringRef, + POINTER(SecExternalFormat), + POINTER(SecExternalItemType), + SecItemImportExportFlags, + POINTER(SecItemImportExportKeyParameters), + SecKeychainRef, + POINTER(CFArrayRef), + ] + Security.SecItemImport.restype = OSStatus + + Security.SecCertificateGetTypeID.argtypes = [] + Security.SecCertificateGetTypeID.restype = CFTypeID + + Security.SecIdentityGetTypeID.argtypes = [] + Security.SecIdentityGetTypeID.restype = CFTypeID + + Security.SecKeyGetTypeID.argtypes = [] + Security.SecKeyGetTypeID.restype = CFTypeID + + Security.SecCertificateCreateWithData.argtypes = [CFAllocatorRef, CFDataRef] + Security.SecCertificateCreateWithData.restype = SecCertificateRef + + Security.SecCertificateCopyData.argtypes = [SecCertificateRef] + Security.SecCertificateCopyData.restype = CFDataRef + + Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p] + Security.SecCopyErrorMessageString.restype = CFStringRef + + Security.SecIdentityCreateWithCertificate.argtypes = [ + CFTypeRef, + SecCertificateRef, + POINTER(SecIdentityRef), + ] + Security.SecIdentityCreateWithCertificate.restype = OSStatus + + Security.SecKeychainCreate.argtypes = [ + c_char_p, + c_uint32, + c_void_p, + Boolean, + c_void_p, + POINTER(SecKeychainRef), + ] + Security.SecKeychainCreate.restype = OSStatus + + Security.SecKeychainDelete.argtypes = [SecKeychainRef] + Security.SecKeychainDelete.restype = OSStatus + + Security.SecPKCS12Import.argtypes = [ + CFDataRef, + CFDictionaryRef, + POINTER(CFArrayRef), + ] + Security.SecPKCS12Import.restype = OSStatus + + SSLReadFunc = CFUNCTYPE(OSStatus, SSLConnectionRef, c_void_p, POINTER(c_size_t)) + SSLWriteFunc = CFUNCTYPE( + OSStatus, SSLConnectionRef, POINTER(c_byte), POINTER(c_size_t) + ) + + Security.SSLSetIOFuncs.argtypes = [SSLContextRef, SSLReadFunc, SSLWriteFunc] + Security.SSLSetIOFuncs.restype = OSStatus + + Security.SSLSetPeerID.argtypes = [SSLContextRef, c_char_p, c_size_t] + Security.SSLSetPeerID.restype = OSStatus + + Security.SSLSetCertificate.argtypes = [SSLContextRef, CFArrayRef] + Security.SSLSetCertificate.restype = OSStatus + + Security.SSLSetCertificateAuthorities.argtypes = [SSLContextRef, CFTypeRef, Boolean] + Security.SSLSetCertificateAuthorities.restype = OSStatus + + Security.SSLSetConnection.argtypes = [SSLContextRef, SSLConnectionRef] + Security.SSLSetConnection.restype = OSStatus + + Security.SSLSetPeerDomainName.argtypes = [SSLContextRef, c_char_p, c_size_t] + Security.SSLSetPeerDomainName.restype = OSStatus + + Security.SSLHandshake.argtypes = [SSLContextRef] + Security.SSLHandshake.restype = OSStatus + + Security.SSLRead.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)] + Security.SSLRead.restype = OSStatus + + Security.SSLWrite.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)] + Security.SSLWrite.restype = OSStatus + + Security.SSLClose.argtypes = [SSLContextRef] + Security.SSLClose.restype = OSStatus + + Security.SSLGetNumberSupportedCiphers.argtypes = [SSLContextRef, POINTER(c_size_t)] + Security.SSLGetNumberSupportedCiphers.restype = OSStatus + + Security.SSLGetSupportedCiphers.argtypes = [ + SSLContextRef, + POINTER(SSLCipherSuite), + POINTER(c_size_t), + ] + Security.SSLGetSupportedCiphers.restype = OSStatus + + Security.SSLSetEnabledCiphers.argtypes = [ + SSLContextRef, + POINTER(SSLCipherSuite), + c_size_t, + ] + Security.SSLSetEnabledCiphers.restype = OSStatus + + Security.SSLGetNumberEnabledCiphers.argtype = [SSLContextRef, POINTER(c_size_t)] + Security.SSLGetNumberEnabledCiphers.restype = OSStatus + + Security.SSLGetEnabledCiphers.argtypes = [ + SSLContextRef, + POINTER(SSLCipherSuite), + POINTER(c_size_t), + ] + Security.SSLGetEnabledCiphers.restype = OSStatus + + Security.SSLGetNegotiatedCipher.argtypes = [SSLContextRef, POINTER(SSLCipherSuite)] + Security.SSLGetNegotiatedCipher.restype = OSStatus + + Security.SSLGetNegotiatedProtocolVersion.argtypes = [ + SSLContextRef, + POINTER(SSLProtocol), + ] + Security.SSLGetNegotiatedProtocolVersion.restype = OSStatus + + Security.SSLCopyPeerTrust.argtypes = [SSLContextRef, POINTER(SecTrustRef)] + Security.SSLCopyPeerTrust.restype = OSStatus + + Security.SecTrustSetAnchorCertificates.argtypes = [SecTrustRef, CFArrayRef] + Security.SecTrustSetAnchorCertificates.restype = OSStatus + + Security.SecTrustSetAnchorCertificatesOnly.argstypes = [SecTrustRef, Boolean] + Security.SecTrustSetAnchorCertificatesOnly.restype = OSStatus + + Security.SecTrustEvaluate.argtypes = [SecTrustRef, POINTER(SecTrustResultType)] + Security.SecTrustEvaluate.restype = OSStatus + + Security.SecTrustGetCertificateCount.argtypes = [SecTrustRef] + Security.SecTrustGetCertificateCount.restype = CFIndex + + Security.SecTrustGetCertificateAtIndex.argtypes = [SecTrustRef, CFIndex] + Security.SecTrustGetCertificateAtIndex.restype = SecCertificateRef + + Security.SSLCreateContext.argtypes = [ + CFAllocatorRef, + SSLProtocolSide, + SSLConnectionType, + ] + Security.SSLCreateContext.restype = SSLContextRef + + Security.SSLSetSessionOption.argtypes = [SSLContextRef, SSLSessionOption, Boolean] + Security.SSLSetSessionOption.restype = OSStatus + + Security.SSLSetProtocolVersionMin.argtypes = [SSLContextRef, SSLProtocol] + Security.SSLSetProtocolVersionMin.restype = OSStatus + + Security.SSLSetProtocolVersionMax.argtypes = [SSLContextRef, SSLProtocol] + Security.SSLSetProtocolVersionMax.restype = OSStatus + + try: + Security.SSLSetALPNProtocols.argtypes = [SSLContextRef, CFArrayRef] + Security.SSLSetALPNProtocols.restype = OSStatus + except AttributeError: + # Supported only in 10.12+ + pass + + Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p] + Security.SecCopyErrorMessageString.restype = CFStringRef + + Security.SSLReadFunc = SSLReadFunc + Security.SSLWriteFunc = SSLWriteFunc + Security.SSLContextRef = SSLContextRef + Security.SSLProtocol = SSLProtocol + Security.SSLCipherSuite = SSLCipherSuite + Security.SecIdentityRef = SecIdentityRef + Security.SecKeychainRef = SecKeychainRef + Security.SecTrustRef = SecTrustRef + Security.SecTrustResultType = SecTrustResultType + Security.SecExternalFormat = SecExternalFormat + Security.OSStatus = OSStatus + + Security.kSecImportExportPassphrase = CFStringRef.in_dll( + Security, "kSecImportExportPassphrase" + ) + Security.kSecImportItemIdentity = CFStringRef.in_dll( + Security, "kSecImportItemIdentity" + ) + + # CoreFoundation time! + CoreFoundation.CFRetain.argtypes = [CFTypeRef] + CoreFoundation.CFRetain.restype = CFTypeRef + + CoreFoundation.CFRelease.argtypes = [CFTypeRef] + CoreFoundation.CFRelease.restype = None + + CoreFoundation.CFGetTypeID.argtypes = [CFTypeRef] + CoreFoundation.CFGetTypeID.restype = CFTypeID + + CoreFoundation.CFStringCreateWithCString.argtypes = [ + CFAllocatorRef, + c_char_p, + CFStringEncoding, + ] + CoreFoundation.CFStringCreateWithCString.restype = CFStringRef + + CoreFoundation.CFStringGetCStringPtr.argtypes = [CFStringRef, CFStringEncoding] + CoreFoundation.CFStringGetCStringPtr.restype = c_char_p + + CoreFoundation.CFStringGetCString.argtypes = [ + CFStringRef, + c_char_p, + CFIndex, + CFStringEncoding, + ] + CoreFoundation.CFStringGetCString.restype = c_bool + + CoreFoundation.CFDataCreate.argtypes = [CFAllocatorRef, c_char_p, CFIndex] + CoreFoundation.CFDataCreate.restype = CFDataRef + + CoreFoundation.CFDataGetLength.argtypes = [CFDataRef] + CoreFoundation.CFDataGetLength.restype = CFIndex + + CoreFoundation.CFDataGetBytePtr.argtypes = [CFDataRef] + CoreFoundation.CFDataGetBytePtr.restype = c_void_p + + CoreFoundation.CFDictionaryCreate.argtypes = [ + CFAllocatorRef, + POINTER(CFTypeRef), + POINTER(CFTypeRef), + CFIndex, + CFDictionaryKeyCallBacks, + CFDictionaryValueCallBacks, + ] + CoreFoundation.CFDictionaryCreate.restype = CFDictionaryRef + + CoreFoundation.CFDictionaryGetValue.argtypes = [CFDictionaryRef, CFTypeRef] + CoreFoundation.CFDictionaryGetValue.restype = CFTypeRef + + CoreFoundation.CFArrayCreate.argtypes = [ + CFAllocatorRef, + POINTER(CFTypeRef), + CFIndex, + CFArrayCallBacks, + ] + CoreFoundation.CFArrayCreate.restype = CFArrayRef + + CoreFoundation.CFArrayCreateMutable.argtypes = [ + CFAllocatorRef, + CFIndex, + CFArrayCallBacks, + ] + CoreFoundation.CFArrayCreateMutable.restype = CFMutableArrayRef + + CoreFoundation.CFArrayAppendValue.argtypes = [CFMutableArrayRef, c_void_p] + CoreFoundation.CFArrayAppendValue.restype = None + + CoreFoundation.CFArrayGetCount.argtypes = [CFArrayRef] + CoreFoundation.CFArrayGetCount.restype = CFIndex + + CoreFoundation.CFArrayGetValueAtIndex.argtypes = [CFArrayRef, CFIndex] + CoreFoundation.CFArrayGetValueAtIndex.restype = c_void_p + + CoreFoundation.kCFAllocatorDefault = CFAllocatorRef.in_dll( + CoreFoundation, "kCFAllocatorDefault" + ) + CoreFoundation.kCFTypeArrayCallBacks = c_void_p.in_dll( + CoreFoundation, "kCFTypeArrayCallBacks" + ) + CoreFoundation.kCFTypeDictionaryKeyCallBacks = c_void_p.in_dll( + CoreFoundation, "kCFTypeDictionaryKeyCallBacks" + ) + CoreFoundation.kCFTypeDictionaryValueCallBacks = c_void_p.in_dll( + CoreFoundation, "kCFTypeDictionaryValueCallBacks" + ) + + CoreFoundation.CFTypeRef = CFTypeRef + CoreFoundation.CFArrayRef = CFArrayRef + CoreFoundation.CFStringRef = CFStringRef + CoreFoundation.CFDictionaryRef = CFDictionaryRef + +except (AttributeError): + raise ImportError("Error initializing ctypes") + + +class CFConst(object): + """ + A class object that acts as essentially a namespace for CoreFoundation + constants. + """ + + kCFStringEncodingUTF8 = CFStringEncoding(0x08000100) + + +class SecurityConst(object): + """ + A class object that acts as essentially a namespace for Security constants. + """ + + kSSLSessionOptionBreakOnServerAuth = 0 + + kSSLProtocol2 = 1 + kSSLProtocol3 = 2 + kTLSProtocol1 = 4 + kTLSProtocol11 = 7 + kTLSProtocol12 = 8 + # SecureTransport does not support TLS 1.3 even if there's a constant for it + kTLSProtocol13 = 10 + kTLSProtocolMaxSupported = 999 + + kSSLClientSide = 1 + kSSLStreamType = 0 + + kSecFormatPEMSequence = 10 + + kSecTrustResultInvalid = 0 + kSecTrustResultProceed = 1 + # This gap is present on purpose: this was kSecTrustResultConfirm, which + # is deprecated. + kSecTrustResultDeny = 3 + kSecTrustResultUnspecified = 4 + kSecTrustResultRecoverableTrustFailure = 5 + kSecTrustResultFatalTrustFailure = 6 + kSecTrustResultOtherError = 7 + + errSSLProtocol = -9800 + errSSLWouldBlock = -9803 + errSSLClosedGraceful = -9805 + errSSLClosedNoNotify = -9816 + errSSLClosedAbort = -9806 + + errSSLXCertChainInvalid = -9807 + errSSLCrypto = -9809 + errSSLInternal = -9810 + errSSLCertExpired = -9814 + errSSLCertNotYetValid = -9815 + errSSLUnknownRootCert = -9812 + errSSLNoRootCert = -9813 + errSSLHostNameMismatch = -9843 + errSSLPeerHandshakeFail = -9824 + errSSLPeerUserCancelled = -9839 + errSSLWeakPeerEphemeralDHKey = -9850 + errSSLServerAuthCompleted = -9841 + errSSLRecordOverflow = -9847 + + errSecVerifyFailed = -67808 + errSecNoTrustSettings = -25263 + errSecItemNotFound = -25300 + errSecInvalidTrustSettings = -25262 + + # Cipher suites. We only pick the ones our default cipher string allows. + # Source: https://developer.apple.com/documentation/security/1550981-ssl_cipher_suite_values + TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 = 0xC02C + TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 = 0xC030 + TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 = 0xC02B + TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 = 0xC02F + TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA9 + TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA8 + TLS_DHE_RSA_WITH_AES_256_GCM_SHA384 = 0x009F + TLS_DHE_RSA_WITH_AES_128_GCM_SHA256 = 0x009E + TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384 = 0xC024 + TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384 = 0xC028 + TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA = 0xC00A + TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA = 0xC014 + TLS_DHE_RSA_WITH_AES_256_CBC_SHA256 = 0x006B + TLS_DHE_RSA_WITH_AES_256_CBC_SHA = 0x0039 + TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256 = 0xC023 + TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256 = 0xC027 + TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA = 0xC009 + TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA = 0xC013 + TLS_DHE_RSA_WITH_AES_128_CBC_SHA256 = 0x0067 + TLS_DHE_RSA_WITH_AES_128_CBC_SHA = 0x0033 + TLS_RSA_WITH_AES_256_GCM_SHA384 = 0x009D + TLS_RSA_WITH_AES_128_GCM_SHA256 = 0x009C + TLS_RSA_WITH_AES_256_CBC_SHA256 = 0x003D + TLS_RSA_WITH_AES_128_CBC_SHA256 = 0x003C + TLS_RSA_WITH_AES_256_CBC_SHA = 0x0035 + TLS_RSA_WITH_AES_128_CBC_SHA = 0x002F + TLS_AES_128_GCM_SHA256 = 0x1301 + TLS_AES_256_GCM_SHA384 = 0x1302 + TLS_AES_128_CCM_8_SHA256 = 0x1305 + TLS_AES_128_CCM_SHA256 = 0x1304 diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.py b/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.py similarity index 78% rename from env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.py rename to venv/Lib/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.py index b13cd9e7..ed812019 100644 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.py +++ b/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.py @@ -10,13 +10,13 @@ import base64 import ctypes import itertools -import re import os +import re import ssl +import struct import tempfile -from .bindings import Security, CoreFoundation, CFConst - +from .bindings import CFConst, CoreFoundation, Security # This regular expression is used to grab PEM data out of a PEM bundle. _PEM_CERTS_RE = re.compile( @@ -56,6 +56,51 @@ def _cf_dictionary_from_tuples(tuples): ) +def _cfstr(py_bstr): + """ + Given a Python binary data, create a CFString. + The string must be CFReleased by the caller. + """ + c_str = ctypes.c_char_p(py_bstr) + cf_str = CoreFoundation.CFStringCreateWithCString( + CoreFoundation.kCFAllocatorDefault, + c_str, + CFConst.kCFStringEncodingUTF8, + ) + return cf_str + + +def _create_cfstring_array(lst): + """ + Given a list of Python binary data, create an associated CFMutableArray. + The array must be CFReleased by the caller. + + Raises an ssl.SSLError on failure. + """ + cf_arr = None + try: + cf_arr = CoreFoundation.CFArrayCreateMutable( + CoreFoundation.kCFAllocatorDefault, + 0, + ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks), + ) + if not cf_arr: + raise MemoryError("Unable to allocate memory!") + for item in lst: + cf_str = _cfstr(item) + if not cf_str: + raise MemoryError("Unable to allocate memory!") + try: + CoreFoundation.CFArrayAppendValue(cf_arr, cf_str) + finally: + CoreFoundation.CFRelease(cf_str) + except BaseException as e: + if cf_arr: + CoreFoundation.CFRelease(cf_arr) + raise ssl.SSLError("Unable to allocate array: %s" % (e,)) + return cf_arr + + def _cf_string_to_unicode(value): """ Creates a Unicode string from a CFString object. Used entirely for error @@ -66,22 +111,18 @@ def _cf_string_to_unicode(value): value_as_void_p = ctypes.cast(value, ctypes.POINTER(ctypes.c_void_p)) string = CoreFoundation.CFStringGetCStringPtr( - value_as_void_p, - CFConst.kCFStringEncodingUTF8 + value_as_void_p, CFConst.kCFStringEncodingUTF8 ) if string is None: buffer = ctypes.create_string_buffer(1024) result = CoreFoundation.CFStringGetCString( - value_as_void_p, - buffer, - 1024, - CFConst.kCFStringEncodingUTF8 + value_as_void_p, buffer, 1024, CFConst.kCFStringEncodingUTF8 ) if not result: - raise OSError('Error copying C string from CFStringRef') + raise OSError("Error copying C string from CFStringRef") string = buffer.value if string is not None: - string = string.decode('utf-8') + string = string.decode("utf-8") return string @@ -97,8 +138,8 @@ def _assert_no_error(error, exception_class=None): output = _cf_string_to_unicode(cf_error_string) CoreFoundation.CFRelease(cf_error_string) - if output is None or output == u'': - output = u'OSStatus %s' % error + if output is None or output == u"": + output = u"OSStatus %s" % error if exception_class is None: exception_class = ssl.SSLError @@ -115,8 +156,7 @@ def _cert_array_from_pem(pem_bundle): pem_bundle = pem_bundle.replace(b"\r\n", b"\n") der_certs = [ - base64.b64decode(match.group(1)) - for match in _PEM_CERTS_RE.finditer(pem_bundle) + base64.b64decode(match.group(1)) for match in _PEM_CERTS_RE.finditer(pem_bundle) ] if not der_certs: raise ssl.SSLError("No root certificates specified") @@ -124,7 +164,7 @@ def _cert_array_from_pem(pem_bundle): cert_array = CoreFoundation.CFArrayCreateMutable( CoreFoundation.kCFAllocatorDefault, 0, - ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks) + ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks), ) if not cert_array: raise ssl.SSLError("Unable to allocate memory!") @@ -186,21 +226,16 @@ def _temporary_keychain(): # some random bytes to password-protect the keychain we're creating, so we # ask for 40 random bytes. random_bytes = os.urandom(40) - filename = base64.b16encode(random_bytes[:8]).decode('utf-8') + filename = base64.b16encode(random_bytes[:8]).decode("utf-8") password = base64.b16encode(random_bytes[8:]) # Must be valid UTF-8 tempdirectory = tempfile.mkdtemp() - keychain_path = os.path.join(tempdirectory, filename).encode('utf-8') + keychain_path = os.path.join(tempdirectory, filename).encode("utf-8") # We now want to create the keychain itself. keychain = Security.SecKeychainRef() status = Security.SecKeychainCreate( - keychain_path, - len(password), - password, - False, - None, - ctypes.byref(keychain) + keychain_path, len(password), password, False, None, ctypes.byref(keychain) ) _assert_no_error(status) @@ -219,14 +254,12 @@ def _load_items_from_file(keychain, path): identities = [] result_array = None - with open(path, 'rb') as f: + with open(path, "rb") as f: raw_filedata = f.read() try: filedata = CoreFoundation.CFDataCreate( - CoreFoundation.kCFAllocatorDefault, - raw_filedata, - len(raw_filedata) + CoreFoundation.kCFAllocatorDefault, raw_filedata, len(raw_filedata) ) result_array = CoreFoundation.CFArrayRef() result = Security.SecItemImport( @@ -237,7 +270,7 @@ def _load_items_from_file(keychain, path): 0, # import flags None, # key params, can include passphrase in the future keychain, # The keychain to insert into - ctypes.byref(result_array) # Results + ctypes.byref(result_array), # Results ) _assert_no_error(result) @@ -247,9 +280,7 @@ def _load_items_from_file(keychain, path): # keychain already has them! result_count = CoreFoundation.CFArrayGetCount(result_array) for index in range(result_count): - item = CoreFoundation.CFArrayGetValueAtIndex( - result_array, index - ) + item = CoreFoundation.CFArrayGetValueAtIndex(result_array, index) item = ctypes.cast(item, CoreFoundation.CFTypeRef) if _is_cert(item): @@ -307,9 +338,7 @@ def _load_client_cert_chain(keychain, *paths): try: for file_path in paths: - new_identities, new_certs = _load_items_from_file( - keychain, file_path - ) + new_identities, new_certs = _load_items_from_file(keychain, file_path) identities.extend(new_identities) certificates.extend(new_certs) @@ -318,9 +347,7 @@ def _load_client_cert_chain(keychain, *paths): if not identities: new_identity = Security.SecIdentityRef() status = Security.SecIdentityCreateWithCertificate( - keychain, - certificates[0], - ctypes.byref(new_identity) + keychain, certificates[0], ctypes.byref(new_identity) ) _assert_no_error(status) identities.append(new_identity) @@ -344,3 +371,26 @@ def _load_client_cert_chain(keychain, *paths): finally: for obj in itertools.chain(identities, certificates): CoreFoundation.CFRelease(obj) + + +TLS_PROTOCOL_VERSIONS = { + "SSLv2": (0, 2), + "SSLv3": (3, 0), + "TLSv1": (3, 1), + "TLSv1.1": (3, 2), + "TLSv1.2": (3, 3), +} + + +def _build_tls_unknown_ca_alert(version): + """ + Builds a TLS alert record for an unknown CA. + """ + ver_maj, ver_min = TLS_PROTOCOL_VERSIONS[version] + severity_fatal = 0x02 + description_unknown_ca = 0x30 + msg = struct.pack(">BB", severity_fatal, description_unknown_ca) + msg_len = len(msg) + record_type_alert = 0x15 + record = struct.pack(">BBBH", record_type_alert, ver_maj, ver_min, msg_len) + msg + return record diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/appengine.py b/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/appengine.py similarity index 77% rename from env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/appengine.py rename to venv/Lib/site-packages/pip/_vendor/urllib3/contrib/appengine.py index 9b42952d..b9d2a690 100644 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/appengine.py +++ b/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/appengine.py @@ -39,24 +39,24 @@ """ from __future__ import absolute_import + import io import logging import warnings -from ..packages.six.moves.urllib.parse import urljoin from ..exceptions import ( HTTPError, HTTPWarning, MaxRetryError, ProtocolError, + SSLError, TimeoutError, - SSLError ) - +from ..packages.six.moves.urllib.parse import urljoin from ..request import RequestMethods from ..response import HTTPResponse -from ..util.timeout import Timeout from ..util.retry import Retry +from ..util.timeout import Timeout from . import _appengine_environ try: @@ -90,29 +90,30 @@ class AppEngineManager(RequestMethods): * If you attempt to use this on App Engine Flexible, as full socket support is available. * If a request size is more than 10 megabytes. - * If a response size is more than 32 megabtyes. + * If a response size is more than 32 megabytes. * If you use an unsupported request method such as OPTIONS. Beyond those cases, it will raise normal urllib3 errors. """ - def __init__(self, headers=None, retries=None, validate_certificate=True, - urlfetch_retries=True): + def __init__( + self, + headers=None, + retries=None, + validate_certificate=True, + urlfetch_retries=True, + ): if not urlfetch: raise AppEnginePlatformError( - "URLFetch is not available in this environment.") - - if is_prod_appengine_mvms(): - raise AppEnginePlatformError( - "Use normal urllib3.PoolManager instead of AppEngineManager" - "on Managed VMs, as using URLFetch is not necessary in " - "this environment.") + "URLFetch is not available in this environment." + ) warnings.warn( "urllib3 is using URLFetch on Google App Engine sandbox instead " "of sockets. To use sockets directly instead of URLFetch see " "https://urllib3.readthedocs.io/en/latest/reference/urllib3.contrib.html.", - AppEnginePlatformWarning) + AppEnginePlatformWarning, + ) RequestMethods.__init__(self, headers) self.validate_certificate = validate_certificate @@ -127,17 +128,22 @@ def __exit__(self, exc_type, exc_val, exc_tb): # Return False to re-raise any potential exceptions return False - def urlopen(self, method, url, body=None, headers=None, - retries=None, redirect=True, timeout=Timeout.DEFAULT_TIMEOUT, - **response_kw): + def urlopen( + self, + method, + url, + body=None, + headers=None, + retries=None, + redirect=True, + timeout=Timeout.DEFAULT_TIMEOUT, + **response_kw + ): retries = self._get_retries(retries, redirect) try: - follow_redirects = ( - redirect and - retries.redirect != 0 and - retries.total) + follow_redirects = redirect and retries.redirect != 0 and retries.total response = urlfetch.fetch( url, payload=body, @@ -152,44 +158,52 @@ def urlopen(self, method, url, body=None, headers=None, raise TimeoutError(self, e) except urlfetch.InvalidURLError as e: - if 'too large' in str(e): + if "too large" in str(e): raise AppEnginePlatformError( "URLFetch request too large, URLFetch only " - "supports requests up to 10mb in size.", e) + "supports requests up to 10mb in size.", + e, + ) raise ProtocolError(e) except urlfetch.DownloadError as e: - if 'Too many redirects' in str(e): + if "Too many redirects" in str(e): raise MaxRetryError(self, url, reason=e) raise ProtocolError(e) except urlfetch.ResponseTooLargeError as e: raise AppEnginePlatformError( "URLFetch response too large, URLFetch only supports" - "responses up to 32mb in size.", e) + "responses up to 32mb in size.", + e, + ) except urlfetch.SSLCertificateError as e: raise SSLError(e) except urlfetch.InvalidMethodError as e: raise AppEnginePlatformError( - "URLFetch does not support method: %s" % method, e) + "URLFetch does not support method: %s" % method, e + ) http_response = self._urlfetch_response_to_http_response( - response, retries=retries, **response_kw) + response, retries=retries, **response_kw + ) # Handle redirect? redirect_location = redirect and http_response.get_redirect_location() if redirect_location: # Check for redirect response - if (self.urlfetch_retries and retries.raise_on_redirect): + if self.urlfetch_retries and retries.raise_on_redirect: raise MaxRetryError(self, url, "too many redirects") else: if http_response.status == 303: - method = 'GET' + method = "GET" try: - retries = retries.increment(method, url, response=http_response, _pool=self) + retries = retries.increment( + method, url, response=http_response, _pool=self + ) except MaxRetryError: if retries.raise_on_redirect: raise MaxRetryError(self, url, "too many redirects") @@ -199,22 +213,32 @@ def urlopen(self, method, url, body=None, headers=None, log.debug("Redirecting %s -> %s", url, redirect_location) redirect_url = urljoin(url, redirect_location) return self.urlopen( - method, redirect_url, body, headers, - retries=retries, redirect=redirect, - timeout=timeout, **response_kw) + method, + redirect_url, + body, + headers, + retries=retries, + redirect=redirect, + timeout=timeout, + **response_kw + ) # Check if we should retry the HTTP response. - has_retry_after = bool(http_response.getheader('Retry-After')) + has_retry_after = bool(http_response.getheader("Retry-After")) if retries.is_retry(method, http_response.status, has_retry_after): - retries = retries.increment( - method, url, response=http_response, _pool=self) + retries = retries.increment(method, url, response=http_response, _pool=self) log.debug("Retry: %s", url) retries.sleep(http_response) return self.urlopen( - method, url, - body=body, headers=headers, - retries=retries, redirect=redirect, - timeout=timeout, **response_kw) + method, + url, + body=body, + headers=headers, + retries=retries, + redirect=redirect, + timeout=timeout, + **response_kw + ) return http_response @@ -223,18 +247,18 @@ def _urlfetch_response_to_http_response(self, urlfetch_resp, **response_kw): if is_prod_appengine(): # Production GAE handles deflate encoding automatically, but does # not remove the encoding header. - content_encoding = urlfetch_resp.headers.get('content-encoding') + content_encoding = urlfetch_resp.headers.get("content-encoding") - if content_encoding == 'deflate': - del urlfetch_resp.headers['content-encoding'] + if content_encoding == "deflate": + del urlfetch_resp.headers["content-encoding"] - transfer_encoding = urlfetch_resp.headers.get('transfer-encoding') + transfer_encoding = urlfetch_resp.headers.get("transfer-encoding") # We have a full response's content, # so let's make sure we don't report ourselves as chunked data. - if transfer_encoding == 'chunked': + if transfer_encoding == "chunked": encodings = transfer_encoding.split(",") - encodings.remove('chunked') - urlfetch_resp.headers['transfer-encoding'] = ','.join(encodings) + encodings.remove("chunked") + urlfetch_resp.headers["transfer-encoding"] = ",".join(encodings) original_response = HTTPResponse( # In order for decoding to work, we must present the content as @@ -262,20 +286,21 @@ def _get_absolute_timeout(self, timeout): warnings.warn( "URLFetch does not support granular timeout settings, " "reverting to total or default URLFetch timeout.", - AppEnginePlatformWarning) + AppEnginePlatformWarning, + ) return timeout.total return timeout def _get_retries(self, retries, redirect): if not isinstance(retries, Retry): - retries = Retry.from_int( - retries, redirect=redirect, default=self.retries) + retries = Retry.from_int(retries, redirect=redirect, default=self.retries) if retries.connect or retries.read or retries.redirect: warnings.warn( "URLFetch only supports total retries and does not " "recognize connect, read, or redirect retry parameters.", - AppEnginePlatformWarning) + AppEnginePlatformWarning, + ) return retries diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/ntlmpool.py b/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/ntlmpool.py new file mode 100644 index 00000000..b2df45dc --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/ntlmpool.py @@ -0,0 +1,121 @@ +""" +NTLM authenticating pool, contributed by erikcederstran + +Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10 +""" +from __future__ import absolute_import + +from logging import getLogger + +from ntlm import ntlm + +from .. import HTTPSConnectionPool +from ..packages.six.moves.http_client import HTTPSConnection + +log = getLogger(__name__) + + +class NTLMConnectionPool(HTTPSConnectionPool): + """ + Implements an NTLM authentication version of an urllib3 connection pool + """ + + scheme = "https" + + def __init__(self, user, pw, authurl, *args, **kwargs): + """ + authurl is a random URL on the server that is protected by NTLM. + user is the Windows user, probably in the DOMAIN\\username format. + pw is the password for the user. + """ + super(NTLMConnectionPool, self).__init__(*args, **kwargs) + self.authurl = authurl + self.rawuser = user + user_parts = user.split("\\", 1) + self.domain = user_parts[0].upper() + self.user = user_parts[1] + self.pw = pw + + def _new_conn(self): + # Performs the NTLM handshake that secures the connection. The socket + # must be kept open while requests are performed. + self.num_connections += 1 + log.debug( + "Starting NTLM HTTPS connection no. %d: https://%s%s", + self.num_connections, + self.host, + self.authurl, + ) + + headers = {"Connection": "Keep-Alive"} + req_header = "Authorization" + resp_header = "www-authenticate" + + conn = HTTPSConnection(host=self.host, port=self.port) + + # Send negotiation message + headers[req_header] = "NTLM %s" % ntlm.create_NTLM_NEGOTIATE_MESSAGE( + self.rawuser + ) + log.debug("Request headers: %s", headers) + conn.request("GET", self.authurl, None, headers) + res = conn.getresponse() + reshdr = dict(res.getheaders()) + log.debug("Response status: %s %s", res.status, res.reason) + log.debug("Response headers: %s", reshdr) + log.debug("Response data: %s [...]", res.read(100)) + + # Remove the reference to the socket, so that it can not be closed by + # the response object (we want to keep the socket open) + res.fp = None + + # Server should respond with a challenge message + auth_header_values = reshdr[resp_header].split(", ") + auth_header_value = None + for s in auth_header_values: + if s[:5] == "NTLM ": + auth_header_value = s[5:] + if auth_header_value is None: + raise Exception( + "Unexpected %s response header: %s" % (resp_header, reshdr[resp_header]) + ) + + # Send authentication message + ServerChallenge, NegotiateFlags = ntlm.parse_NTLM_CHALLENGE_MESSAGE( + auth_header_value + ) + auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE( + ServerChallenge, self.user, self.domain, self.pw, NegotiateFlags + ) + headers[req_header] = "NTLM %s" % auth_msg + log.debug("Request headers: %s", headers) + conn.request("GET", self.authurl, None, headers) + res = conn.getresponse() + log.debug("Response status: %s %s", res.status, res.reason) + log.debug("Response headers: %s", dict(res.getheaders())) + log.debug("Response data: %s [...]", res.read()[:100]) + if res.status != 200: + if res.status == 401: + raise Exception("Server rejected request: wrong username or password") + raise Exception("Wrong server response: %s %s" % (res.status, res.reason)) + + res.fp = None + log.debug("Connection established") + return conn + + def urlopen( + self, + method, + url, + body=None, + headers=None, + retries=3, + redirect=True, + assert_same_host=True, + ): + if headers is None: + headers = {} + headers["Connection"] = "Keep-Alive" + return super(NTLMConnectionPool, self).urlopen( + method, url, body, headers, retries, redirect, assert_same_host + ) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.py b/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.py similarity index 76% rename from env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.py rename to venv/Lib/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.py index abfc3191..bc5c114f 100644 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.py +++ b/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.py @@ -1,27 +1,31 @@ """ -SSL with SNI_-support for Python 2. Follow these instructions if you would -like to verify SSL certificates in Python 2. Note, the default libraries do +TLS with SNI_-support for Python 2. Follow these instructions if you would +like to verify TLS certificates in Python 2. Note, the default libraries do *not* do certificate checking; you need to do additional work to validate certificates yourself. This needs the following packages installed: -* pyOpenSSL (tested with 16.0.0) -* cryptography (minimum 1.3.4, from pyopenssl) -* idna (minimum 2.0, from cryptography) +* `pyOpenSSL`_ (tested with 16.0.0) +* `cryptography`_ (minimum 1.3.4, from pyopenssl) +* `idna`_ (minimum 2.0, from cryptography) However, pyopenssl depends on cryptography, which depends on idna, so while we use all three directly here we end up having relatively few packages required. You can install them with the following command: - pip install pyopenssl cryptography idna +.. code-block:: bash + + $ python -m pip install pyopenssl cryptography idna To activate certificate checking, call :func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code before you begin making HTTP requests. This can be done in a ``sitecustomize`` module, or at any other time before your application begins using ``urllib3``, -like this:: +like this: + +.. code-block:: python try: import urllib3.contrib.pyopenssl @@ -35,11 +39,11 @@ Activating this module also has the positive side effect of disabling SSL/TLS compression in Python 2 (see `CRIME attack`_). -If you want to configure the default list of supported cipher suites, you can -set the ``urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST`` variable. - .. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication .. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit) +.. _pyopenssl: https://www.pyopenssl.org +.. _cryptography: https://cryptography.io +.. _idna: https://github.com/kjd/idna """ from __future__ import absolute_import @@ -47,6 +51,7 @@ from cryptography import x509 from cryptography.hazmat.backends.openssl import backend as openssl_backend from cryptography.hazmat.backends.openssl.x509 import _Certificate + try: from cryptography.x509 import UnsupportedExtension except ImportError: @@ -54,8 +59,10 @@ class UnsupportedExtension(Exception): pass -from socket import timeout, error as SocketError + from io import BytesIO +from socket import error as SocketError +from socket import timeout try: # Platform-specific: Python 2 from socket import _fileobject @@ -65,13 +72,12 @@ class UnsupportedExtension(Exception): import logging import ssl -from ..packages import six import sys from .. import util +from ..packages import six - -__all__ = ['inject_into_urllib3', 'extract_from_urllib3'] +__all__ = ["inject_into_urllib3", "extract_from_urllib3"] # SNI always works. HAS_SNI = True @@ -82,25 +88,23 @@ class UnsupportedExtension(Exception): ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD, } -if hasattr(ssl, 'PROTOCOL_SSLv3') and hasattr(OpenSSL.SSL, 'SSLv3_METHOD'): +if hasattr(ssl, "PROTOCOL_SSLv3") and hasattr(OpenSSL.SSL, "SSLv3_METHOD"): _openssl_versions[ssl.PROTOCOL_SSLv3] = OpenSSL.SSL.SSLv3_METHOD -if hasattr(ssl, 'PROTOCOL_TLSv1_1') and hasattr(OpenSSL.SSL, 'TLSv1_1_METHOD'): +if hasattr(ssl, "PROTOCOL_TLSv1_1") and hasattr(OpenSSL.SSL, "TLSv1_1_METHOD"): _openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD -if hasattr(ssl, 'PROTOCOL_TLSv1_2') and hasattr(OpenSSL.SSL, 'TLSv1_2_METHOD'): +if hasattr(ssl, "PROTOCOL_TLSv1_2") and hasattr(OpenSSL.SSL, "TLSv1_2_METHOD"): _openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD _stdlib_to_openssl_verify = { ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE, ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER, - ssl.CERT_REQUIRED: - OpenSSL.SSL.VERIFY_PEER + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT, + ssl.CERT_REQUIRED: OpenSSL.SSL.VERIFY_PEER + + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT, } -_openssl_to_stdlib_verify = dict( - (v, k) for k, v in _stdlib_to_openssl_verify.items() -) +_openssl_to_stdlib_verify = dict((v, k) for k, v in _stdlib_to_openssl_verify.items()) # OpenSSL will only write 16K at a time SSL_WRITE_BLOCKSIZE = 16384 @@ -113,7 +117,7 @@ class UnsupportedExtension(Exception): def inject_into_urllib3(): - 'Monkey-patch urllib3 with PyOpenSSL-backed SSL-support.' + "Monkey-patch urllib3 with PyOpenSSL-backed SSL-support." _validate_dependencies_met() @@ -126,7 +130,7 @@ def inject_into_urllib3(): def extract_from_urllib3(): - 'Undo monkey-patching by :func:`inject_into_urllib3`.' + "Undo monkey-patching by :func:`inject_into_urllib3`." util.SSLContext = orig_util_SSLContext util.ssl_.SSLContext = orig_util_SSLContext @@ -143,17 +147,23 @@ def _validate_dependencies_met(): """ # Method added in `cryptography==1.1`; not available in older versions from cryptography.x509.extensions import Extensions + if getattr(Extensions, "get_extension_for_class", None) is None: - raise ImportError("'cryptography' module missing required functionality. " - "Try upgrading to v1.3.4 or newer.") + raise ImportError( + "'cryptography' module missing required functionality. " + "Try upgrading to v1.3.4 or newer." + ) # pyOpenSSL 0.14 and above use cryptography for OpenSSL bindings. The _x509 # attribute is only present on those versions. from OpenSSL.crypto import X509 + x509 = X509() if getattr(x509, "_x509", None) is None: - raise ImportError("'pyOpenSSL' module missing required functionality. " - "Try upgrading to v0.14 or newer.") + raise ImportError( + "'pyOpenSSL' module missing required functionality. " + "Try upgrading to v0.14 or newer." + ) def _dnsname_to_stdlib(name): @@ -169,6 +179,7 @@ def _dnsname_to_stdlib(name): If the name cannot be idna-encoded then we return None signalling that the name given should be skipped. """ + def idna_encode(name): """ Borrowed wholesale from the Python Cryptography Project. It turns out @@ -178,23 +189,23 @@ def idna_encode(name): from pip._vendor import idna try: - for prefix in [u'*.', u'.']: + for prefix in [u"*.", u"."]: if name.startswith(prefix): - name = name[len(prefix):] - return prefix.encode('ascii') + idna.encode(name) + name = name[len(prefix) :] + return prefix.encode("ascii") + idna.encode(name) return idna.encode(name) except idna.core.IDNAError: return None # Don't send IPv6 addresses through the IDNA encoder. - if ':' in name: + if ":" in name: return name name = idna_encode(name) if name is None: return None elif sys.version_info >= (3, 0): - name = name.decode('utf-8') + name = name.decode("utf-8") return name @@ -213,14 +224,16 @@ def get_subj_alt_name(peer_cert): # We want to find the SAN extension. Ask Cryptography to locate it (it's # faster than looping in Python) try: - ext = cert.extensions.get_extension_for_class( - x509.SubjectAlternativeName - ).value + ext = cert.extensions.get_extension_for_class(x509.SubjectAlternativeName).value except x509.ExtensionNotFound: # No such extension, return the empty list. return [] - except (x509.DuplicateExtension, UnsupportedExtension, - x509.UnsupportedGeneralNameType, UnicodeError) as e: + except ( + x509.DuplicateExtension, + UnsupportedExtension, + x509.UnsupportedGeneralNameType, + UnicodeError, + ) as e: # A problem has been found with the quality of the certificate. Assume # no SAN field is present. log.warning( @@ -239,23 +252,23 @@ def get_subj_alt_name(peer_cert): # does with certificates, and so we need to attempt to do the same. # We also want to skip over names which cannot be idna encoded. names = [ - ('DNS', name) for name in map(_dnsname_to_stdlib, ext.get_values_for_type(x509.DNSName)) + ("DNS", name) + for name in map(_dnsname_to_stdlib, ext.get_values_for_type(x509.DNSName)) if name is not None ] names.extend( - ('IP Address', str(name)) - for name in ext.get_values_for_type(x509.IPAddress) + ("IP Address", str(name)) for name in ext.get_values_for_type(x509.IPAddress) ) return names class WrappedSocket(object): - '''API-compatibility wrapper for Python OpenSSL's Connection-class. + """API-compatibility wrapper for Python OpenSSL's Connection-class. Note: _makefile_refs, _drop() and _reuse() are needed for the garbage collector of pypy. - ''' + """ def __init__(self, connection, socket, suppress_ragged_eofs=True): self.connection = connection @@ -278,18 +291,18 @@ def recv(self, *args, **kwargs): try: data = self.connection.recv(*args, **kwargs) except OpenSSL.SSL.SysCallError as e: - if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'): - return b'' + if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"): + return b"" else: raise SocketError(str(e)) except OpenSSL.SSL.ZeroReturnError: if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN: - return b'' + return b"" else: raise except OpenSSL.SSL.WantReadError: if not util.wait_for_read(self.socket, self.socket.gettimeout()): - raise timeout('The read operation timed out') + raise timeout("The read operation timed out") else: return self.recv(*args, **kwargs) @@ -303,7 +316,7 @@ def recv_into(self, *args, **kwargs): try: return self.connection.recv_into(*args, **kwargs) except OpenSSL.SSL.SysCallError as e: - if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'): + if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"): return 0 else: raise SocketError(str(e)) @@ -314,7 +327,7 @@ def recv_into(self, *args, **kwargs): raise except OpenSSL.SSL.WantReadError: if not util.wait_for_read(self.socket, self.socket.gettimeout()): - raise timeout('The read operation timed out') + raise timeout("The read operation timed out") else: return self.recv_into(*args, **kwargs) @@ -339,7 +352,9 @@ def _send_until_done(self, data): def sendall(self, data): total_sent = 0 while total_sent < len(data): - sent = self._send_until_done(data[total_sent:total_sent + SSL_WRITE_BLOCKSIZE]) + sent = self._send_until_done( + data[total_sent : total_sent + SSL_WRITE_BLOCKSIZE] + ) total_sent += sent def shutdown(self): @@ -363,15 +378,11 @@ def getpeercert(self, binary_form=False): return x509 if binary_form: - return OpenSSL.crypto.dump_certificate( - OpenSSL.crypto.FILETYPE_ASN1, - x509) + return OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_ASN1, x509) return { - 'subject': ( - (('commonName', x509.get_subject().CN),), - ), - 'subjectAltName': get_subj_alt_name(x509) + "subject": ((("commonName", x509.get_subject().CN),),), + "subjectAltName": get_subj_alt_name(x509), } def version(self): @@ -388,9 +399,12 @@ def _drop(self): if _fileobject: # Platform-specific: Python 2 + def makefile(self, mode, bufsize=-1): self._makefile_refs += 1 return _fileobject(self, mode, bufsize, close=True) + + else: # Platform-specific: Python 3 makefile = backport_makefile @@ -403,6 +417,7 @@ class PyOpenSSLContext(object): for translating the interface of the standard library ``SSLContext`` object to calls into PyOpenSSL. """ + def __init__(self, protocol): self.protocol = _openssl_versions[protocol] self._ctx = OpenSSL.SSL.Context(self.protocol) @@ -424,43 +439,52 @@ def verify_mode(self): @verify_mode.setter def verify_mode(self, value): - self._ctx.set_verify( - _stdlib_to_openssl_verify[value], - _verify_callback - ) + self._ctx.set_verify(_stdlib_to_openssl_verify[value], _verify_callback) def set_default_verify_paths(self): self._ctx.set_default_verify_paths() def set_ciphers(self, ciphers): if isinstance(ciphers, six.text_type): - ciphers = ciphers.encode('utf-8') + ciphers = ciphers.encode("utf-8") self._ctx.set_cipher_list(ciphers) def load_verify_locations(self, cafile=None, capath=None, cadata=None): if cafile is not None: - cafile = cafile.encode('utf-8') + cafile = cafile.encode("utf-8") if capath is not None: - capath = capath.encode('utf-8') - self._ctx.load_verify_locations(cafile, capath) - if cadata is not None: - self._ctx.load_verify_locations(BytesIO(cadata)) + capath = capath.encode("utf-8") + try: + self._ctx.load_verify_locations(cafile, capath) + if cadata is not None: + self._ctx.load_verify_locations(BytesIO(cadata)) + except OpenSSL.SSL.Error as e: + raise ssl.SSLError("unable to load trusted certificates: %r" % e) def load_cert_chain(self, certfile, keyfile=None, password=None): self._ctx.use_certificate_chain_file(certfile) if password is not None: if not isinstance(password, six.binary_type): - password = password.encode('utf-8') + password = password.encode("utf-8") self._ctx.set_passwd_cb(lambda *_: password) self._ctx.use_privatekey_file(keyfile or certfile) - def wrap_socket(self, sock, server_side=False, - do_handshake_on_connect=True, suppress_ragged_eofs=True, - server_hostname=None): + def set_alpn_protocols(self, protocols): + protocols = [six.ensure_binary(p) for p in protocols] + return self._ctx.set_alpn_protos(protocols) + + def wrap_socket( + self, + sock, + server_side=False, + do_handshake_on_connect=True, + suppress_ragged_eofs=True, + server_hostname=None, + ): cnx = OpenSSL.SSL.Connection(self._ctx, sock) if isinstance(server_hostname, six.text_type): # Platform-specific: Python 3 - server_hostname = server_hostname.encode('utf-8') + server_hostname = server_hostname.encode("utf-8") if server_hostname is not None: cnx.set_tlsext_host_name(server_hostname) @@ -472,10 +496,10 @@ def wrap_socket(self, sock, server_side=False, cnx.do_handshake() except OpenSSL.SSL.WantReadError: if not util.wait_for_read(sock, sock.gettimeout()): - raise timeout('select timed out') + raise timeout("select timed out") continue except OpenSSL.SSL.Error as e: - raise ssl.SSLError('bad handshake: %r' % e) + raise ssl.SSLError("bad handshake: %r" % e) break return WrappedSocket(cnx, sock) diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.py b/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/securetransport.py similarity index 84% rename from env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.py rename to venv/Lib/site-packages/pip/_vendor/urllib3/contrib/securetransport.py index 4dc48484..8f058f50 100644 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.py +++ b/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/securetransport.py @@ -29,6 +29,8 @@ that reason, this code should be considered to be covered both by urllib3's license and by oscrypto's: +.. code-block:: + Copyright (c) 2015-2016 Will Bond Permission is hereby granted, free of charge, to any person obtaining a @@ -58,16 +60,21 @@ import shutil import socket import ssl +import struct import threading import weakref +from pip._vendor import six + from .. import util -from ._securetransport.bindings import ( - Security, SecurityConst, CoreFoundation -) +from ._securetransport.bindings import CoreFoundation, Security, SecurityConst from ._securetransport.low_level import ( - _assert_no_error, _cert_array_from_pem, _temporary_keychain, - _load_client_cert_chain + _assert_no_error, + _build_tls_unknown_ca_alert, + _cert_array_from_pem, + _create_cfstring_array, + _load_client_cert_chain, + _temporary_keychain, ) try: # Platform-specific: Python 2 @@ -76,7 +83,7 @@ _fileobject = None from ..packages.backports.makefile import backport_makefile -__all__ = ['inject_into_urllib3', 'extract_from_urllib3'] +__all__ = ["inject_into_urllib3", "extract_from_urllib3"] # SNI always works HAS_SNI = True @@ -144,31 +151,36 @@ ] # Basically this is simple: for PROTOCOL_SSLv23 we turn it into a low of -# TLSv1 and a high of TLSv1.3. For everything else, we pin to that version. -# TLSv1 to 1.2 are supported on macOS 10.8+ and TLSv1.3 is macOS 10.13+ +# TLSv1 and a high of TLSv1.2. For everything else, we pin to that version. +# TLSv1 to 1.2 are supported on macOS 10.8+ _protocol_to_min_max = { - util.PROTOCOL_TLS: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocolMaxSupported), + util.PROTOCOL_TLS: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12) } if hasattr(ssl, "PROTOCOL_SSLv2"): _protocol_to_min_max[ssl.PROTOCOL_SSLv2] = ( - SecurityConst.kSSLProtocol2, SecurityConst.kSSLProtocol2 + SecurityConst.kSSLProtocol2, + SecurityConst.kSSLProtocol2, ) if hasattr(ssl, "PROTOCOL_SSLv3"): _protocol_to_min_max[ssl.PROTOCOL_SSLv3] = ( - SecurityConst.kSSLProtocol3, SecurityConst.kSSLProtocol3 + SecurityConst.kSSLProtocol3, + SecurityConst.kSSLProtocol3, ) if hasattr(ssl, "PROTOCOL_TLSv1"): _protocol_to_min_max[ssl.PROTOCOL_TLSv1] = ( - SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol1 + SecurityConst.kTLSProtocol1, + SecurityConst.kTLSProtocol1, ) if hasattr(ssl, "PROTOCOL_TLSv1_1"): _protocol_to_min_max[ssl.PROTOCOL_TLSv1_1] = ( - SecurityConst.kTLSProtocol11, SecurityConst.kTLSProtocol11 + SecurityConst.kTLSProtocol11, + SecurityConst.kTLSProtocol11, ) if hasattr(ssl, "PROTOCOL_TLSv1_2"): _protocol_to_min_max[ssl.PROTOCOL_TLSv1_2] = ( - SecurityConst.kTLSProtocol12, SecurityConst.kTLSProtocol12 + SecurityConst.kTLSProtocol12, + SecurityConst.kTLSProtocol12, ) @@ -218,7 +230,7 @@ def _read_callback(connection_id, data_buffer, data_length_pointer): while read_count < requested_length: if timeout is None or timeout >= 0: if not util.wait_for_read(base_socket, timeout): - raise socket.error(errno.EAGAIN, 'timed out') + raise socket.error(errno.EAGAIN, "timed out") remaining = requested_length - read_count buffer = (ctypes.c_char * remaining).from_address( @@ -274,7 +286,7 @@ def _write_callback(connection_id, data_buffer, data_length_pointer): while sent < bytes_to_write: if timeout is None or timeout >= 0: if not util.wait_for_write(base_socket, timeout): - raise socket.error(errno.EAGAIN, 'timed out') + raise socket.error(errno.EAGAIN, "timed out") chunk_sent = base_socket.send(data) sent += chunk_sent @@ -316,6 +328,7 @@ class WrappedSocket(object): Note: _makefile_refs, _drop(), and _reuse() are needed for the garbage collector of PyPy. """ + def __init__(self, socket): self.socket = socket self.context = None @@ -368,19 +381,58 @@ def _set_ciphers(self): ) _assert_no_error(result) + def _set_alpn_protocols(self, protocols): + """ + Sets up the ALPN protocols on the context. + """ + if not protocols: + return + protocols_arr = _create_cfstring_array(protocols) + try: + result = Security.SSLSetALPNProtocols(self.context, protocols_arr) + _assert_no_error(result) + finally: + CoreFoundation.CFRelease(protocols_arr) + def _custom_validate(self, verify, trust_bundle): """ Called when we have set custom validation. We do this in two cases: first, when cert validation is entirely disabled; and second, when using a custom trust DB. + Raises an SSLError if the connection is not trusted. """ # If we disabled cert validation, just say: cool. if not verify: return + successes = ( + SecurityConst.kSecTrustResultUnspecified, + SecurityConst.kSecTrustResultProceed, + ) + try: + trust_result = self._evaluate_trust(trust_bundle) + if trust_result in successes: + return + reason = "error code: %d" % (trust_result,) + except Exception as e: + # Do not trust on error + reason = "exception: %r" % (e,) + + # SecureTransport does not send an alert nor shuts down the connection. + rec = _build_tls_unknown_ca_alert(self.version()) + self.socket.sendall(rec) + # close the connection immediately + # l_onoff = 1, activate linger + # l_linger = 0, linger for 0 seoncds + opts = struct.pack("ii", 1, 0) + self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, opts) + self.close() + raise ssl.SSLError("certificate verify failed, %s" % reason) + + def _evaluate_trust(self, trust_bundle): # We want data in memory, so load it up. if os.path.isfile(trust_bundle): - with open(trust_bundle, 'rb') as f: + with open(trust_bundle, "rb") as f: trust_bundle = f.read() cert_array = None @@ -394,9 +446,7 @@ def _custom_validate(self, verify, trust_bundle): # created for this connection, shove our CAs into it, tell ST to # ignore everything else it knows, and then ask if it can build a # chain. This is a buuuunch of code. - result = Security.SSLCopyPeerTrust( - self.context, ctypes.byref(trust) - ) + result = Security.SSLCopyPeerTrust(self.context, ctypes.byref(trust)) _assert_no_error(result) if not trust: raise ssl.SSLError("Failed to copy trust reference") @@ -408,9 +458,7 @@ def _custom_validate(self, verify, trust_bundle): _assert_no_error(result) trust_result = Security.SecTrustResultType() - result = Security.SecTrustEvaluate( - trust, ctypes.byref(trust_result) - ) + result = Security.SecTrustEvaluate(trust, ctypes.byref(trust_result)) _assert_no_error(result) finally: if trust: @@ -419,26 +467,20 @@ def _custom_validate(self, verify, trust_bundle): if cert_array is not None: CoreFoundation.CFRelease(cert_array) - # Ok, now we can look at what the result was. - successes = ( - SecurityConst.kSecTrustResultUnspecified, - SecurityConst.kSecTrustResultProceed - ) - if trust_result.value not in successes: - raise ssl.SSLError( - "certificate verify failed, error code: %d" % - trust_result.value - ) - - def handshake(self, - server_hostname, - verify, - trust_bundle, - min_version, - max_version, - client_cert, - client_key, - client_key_passphrase): + return trust_result.value + + def handshake( + self, + server_hostname, + verify, + trust_bundle, + min_version, + max_version, + client_cert, + client_key, + client_key_passphrase, + alpn_protocols, + ): """ Actually performs the TLS handshake. This is run automatically by wrapped socket, and shouldn't be needed in user code. @@ -468,7 +510,7 @@ def handshake(self, # If we have a server hostname, we should set that too. if server_hostname: if not isinstance(server_hostname, bytes): - server_hostname = server_hostname.encode('utf-8') + server_hostname = server_hostname.encode("utf-8") result = Security.SSLSetPeerDomainName( self.context, server_hostname, len(server_hostname) @@ -478,17 +520,14 @@ def handshake(self, # Setup the ciphers. self._set_ciphers() + # Setup the ALPN protocols. + self._set_alpn_protocols(alpn_protocols) + # Set the minimum and maximum TLS versions. result = Security.SSLSetProtocolVersionMin(self.context, min_version) _assert_no_error(result) - # TLS 1.3 isn't necessarily enabled by the OS - # so we have to detect when we error out and try - # setting TLS 1.3 if it's allowed. kTLSProtocolMaxSupported - # was added in macOS 10.13 along with kTLSProtocol13. result = Security.SSLSetProtocolVersionMax(self.context, max_version) - if result != 0 and max_version == SecurityConst.kTLSProtocolMaxSupported: - result = Security.SSLSetProtocolVersionMax(self.context, SecurityConst.kTLSProtocol12) _assert_no_error(result) # If there's a trust DB, we need to use it. We do that by telling @@ -497,9 +536,7 @@ def handshake(self, # authing in that case. if not verify or trust_bundle is not None: result = Security.SSLSetSessionOption( - self.context, - SecurityConst.kSSLSessionOptionBreakOnServerAuth, - True + self.context, SecurityConst.kSSLSessionOptionBreakOnServerAuth, True ) _assert_no_error(result) @@ -509,9 +546,7 @@ def handshake(self, self._client_cert_chain = _load_client_cert_chain( self._keychain, client_cert, client_key ) - result = Security.SSLSetCertificate( - self.context, self._client_cert_chain - ) + result = Security.SSLSetCertificate(self.context, self._client_cert_chain) _assert_no_error(result) while True: @@ -562,7 +597,7 @@ def recv_into(self, buffer, nbytes=None): # There are some result codes that we want to treat as "not always # errors". Specifically, those are errSSLWouldBlock, # errSSLClosedGraceful, and errSSLClosedNoNotify. - if (result == SecurityConst.errSSLWouldBlock): + if result == SecurityConst.errSSLWouldBlock: # If we didn't process any bytes, then this was just a time out. # However, we can get errSSLWouldBlock in situations when we *did* # read some data, and in those cases we should just read "short" @@ -570,7 +605,10 @@ def recv_into(self, buffer, nbytes=None): if processed_bytes.value == 0: # Timed out, no data read. raise socket.timeout("recv timed out") - elif result in (SecurityConst.errSSLClosedGraceful, SecurityConst.errSSLClosedNoNotify): + elif result in ( + SecurityConst.errSSLClosedGraceful, + SecurityConst.errSSLClosedNoNotify, + ): # The remote peer has closed this connection. We should do so as # well. Note that we don't actually return here because in # principle this could actually be fired along with return data. @@ -609,7 +647,7 @@ def send(self, data): def sendall(self, data): total_sent = 0 while total_sent < len(data): - sent = self.send(data[total_sent:total_sent + SSL_WRITE_BLOCKSIZE]) + sent = self.send(data[total_sent : total_sent + SSL_WRITE_BLOCKSIZE]) total_sent += sent def shutdown(self): @@ -656,18 +694,14 @@ def getpeercert(self, binary_form=False): # instead to just flag to urllib3 that it shouldn't do its own hostname # validation when using SecureTransport. if not binary_form: - raise ValueError( - "SecureTransport only supports dumping binary certs" - ) + raise ValueError("SecureTransport only supports dumping binary certs") trust = Security.SecTrustRef() certdata = None der_bytes = None try: # Grab the trust store. - result = Security.SSLCopyPeerTrust( - self.context, ctypes.byref(trust) - ) + result = Security.SSLCopyPeerTrust(self.context, ctypes.byref(trust)) _assert_no_error(result) if not trust: # Probably we haven't done the handshake yet. No biggie. @@ -699,22 +733,24 @@ def getpeercert(self, binary_form=False): def version(self): protocol = Security.SSLProtocol() - result = Security.SSLGetNegotiatedProtocolVersion(self.context, ctypes.byref(protocol)) + result = Security.SSLGetNegotiatedProtocolVersion( + self.context, ctypes.byref(protocol) + ) _assert_no_error(result) if protocol.value == SecurityConst.kTLSProtocol13: - return 'TLSv1.3' + raise ssl.SSLError("SecureTransport does not support TLS 1.3") elif protocol.value == SecurityConst.kTLSProtocol12: - return 'TLSv1.2' + return "TLSv1.2" elif protocol.value == SecurityConst.kTLSProtocol11: - return 'TLSv1.1' + return "TLSv1.1" elif protocol.value == SecurityConst.kTLSProtocol1: - return 'TLSv1' + return "TLSv1" elif protocol.value == SecurityConst.kSSLProtocol3: - return 'SSLv3' + return "SSLv3" elif protocol.value == SecurityConst.kSSLProtocol2: - return 'SSLv2' + return "SSLv2" else: - raise ssl.SSLError('Unknown TLS version: %r' % protocol) + raise ssl.SSLError("Unknown TLS version: %r" % protocol) def _reuse(self): self._makefile_refs += 1 @@ -727,16 +763,21 @@ def _drop(self): if _fileobject: # Platform-specific: Python 2 + def makefile(self, mode, bufsize=-1): self._makefile_refs += 1 return _fileobject(self, mode, bufsize, close=True) + + else: # Platform-specific: Python 3 + def makefile(self, mode="r", buffering=None, *args, **kwargs): # We disable buffering with SecureTransport because it conflicts with # the buffering that ST does internally (see issue #1153 for more). buffering = 0 return backport_makefile(self, mode, buffering, *args, **kwargs) + WrappedSocket.makefile = makefile @@ -746,6 +787,7 @@ class SecureTransportContext(object): interface of the standard library ``SSLContext`` object to calls into SecureTransport. """ + def __init__(self, protocol): self._min_version, self._max_version = _protocol_to_min_max[protocol] self._options = 0 @@ -754,6 +796,7 @@ def __init__(self, protocol): self._client_cert = None self._client_key = None self._client_key_passphrase = None + self._alpn_protocols = None @property def check_hostname(self): @@ -812,16 +855,17 @@ def load_default_certs(self): def set_ciphers(self, ciphers): # For now, we just require the default cipher string. if ciphers != util.ssl_.DEFAULT_CIPHERS: - raise ValueError( - "SecureTransport doesn't support custom cipher strings" - ) + raise ValueError("SecureTransport doesn't support custom cipher strings") def load_verify_locations(self, cafile=None, capath=None, cadata=None): # OK, we only really support cadata and cafile. if capath is not None: - raise ValueError( - "SecureTransport does not support cert directories" - ) + raise ValueError("SecureTransport does not support cert directories") + + # Raise if cafile does not exist. + if cafile is not None: + with open(cafile): + pass self._trust_bundle = cafile or cadata @@ -830,9 +874,26 @@ def load_cert_chain(self, certfile, keyfile=None, password=None): self._client_key = keyfile self._client_cert_passphrase = password - def wrap_socket(self, sock, server_side=False, - do_handshake_on_connect=True, suppress_ragged_eofs=True, - server_hostname=None): + def set_alpn_protocols(self, protocols): + """ + Sets the ALPN protocols that will later be set on the context. + + Raises a NotImplementedError if ALPN is not supported. + """ + if not hasattr(Security, "SSLSetALPNProtocols"): + raise NotImplementedError( + "SecureTransport supports ALPN only in macOS 10.12+" + ) + self._alpn_protocols = [six.ensure_binary(p) for p in protocols] + + def wrap_socket( + self, + sock, + server_side=False, + do_handshake_on_connect=True, + suppress_ragged_eofs=True, + server_hostname=None, + ): # So, what do we do here? Firstly, we assert some properties. This is a # stripped down shim, so there is some functionality we don't support. # See PEP 543 for the real deal. @@ -846,8 +907,14 @@ def wrap_socket(self, sock, server_side=False, # Now we can handshake wrapped_socket.handshake( - server_hostname, self._verify, self._trust_bundle, - self._min_version, self._max_version, self._client_cert, - self._client_key, self._client_key_passphrase + server_hostname, + self._verify, + self._trust_bundle, + self._min_version, + self._max_version, + self._client_cert, + self._client_key, + self._client_key_passphrase, + self._alpn_protocols, ) return wrapped_socket diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/socks.py b/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/socks.py new file mode 100644 index 00000000..93df8325 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/urllib3/contrib/socks.py @@ -0,0 +1,216 @@ +# -*- coding: utf-8 -*- +""" +This module contains provisional support for SOCKS proxies from within +urllib3. This module supports SOCKS4, SOCKS4A (an extension of SOCKS4), and +SOCKS5. To enable its functionality, either install PySocks or install this +module with the ``socks`` extra. + +The SOCKS implementation supports the full range of urllib3 features. It also +supports the following SOCKS features: + +- SOCKS4A (``proxy_url='socks4a://...``) +- SOCKS4 (``proxy_url='socks4://...``) +- SOCKS5 with remote DNS (``proxy_url='socks5h://...``) +- SOCKS5 with local DNS (``proxy_url='socks5://...``) +- Usernames and passwords for the SOCKS proxy + +.. note:: + It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in + your ``proxy_url`` to ensure that DNS resolution is done from the remote + server instead of client-side when connecting to a domain name. + +SOCKS4 supports IPv4 and domain names with the SOCKS4A extension. SOCKS5 +supports IPv4, IPv6, and domain names. + +When connecting to a SOCKS4 proxy the ``username`` portion of the ``proxy_url`` +will be sent as the ``userid`` section of the SOCKS request: + +.. code-block:: python + + proxy_url="socks4a://@proxy-host" + +When connecting to a SOCKS5 proxy the ``username`` and ``password`` portion +of the ``proxy_url`` will be sent as the username/password to authenticate +with the proxy: + +.. code-block:: python + + proxy_url="socks5h://:@proxy-host" + +""" +from __future__ import absolute_import + +try: + import socks +except ImportError: + import warnings + + from ..exceptions import DependencyWarning + + warnings.warn( + ( + "SOCKS support in urllib3 requires the installation of optional " + "dependencies: specifically, PySocks. For more information, see " + "https://urllib3.readthedocs.io/en/latest/contrib.html#socks-proxies" + ), + DependencyWarning, + ) + raise + +from socket import error as SocketError +from socket import timeout as SocketTimeout + +from ..connection import HTTPConnection, HTTPSConnection +from ..connectionpool import HTTPConnectionPool, HTTPSConnectionPool +from ..exceptions import ConnectTimeoutError, NewConnectionError +from ..poolmanager import PoolManager +from ..util.url import parse_url + +try: + import ssl +except ImportError: + ssl = None + + +class SOCKSConnection(HTTPConnection): + """ + A plain-text HTTP connection that connects via a SOCKS proxy. + """ + + def __init__(self, *args, **kwargs): + self._socks_options = kwargs.pop("_socks_options") + super(SOCKSConnection, self).__init__(*args, **kwargs) + + def _new_conn(self): + """ + Establish a new connection via the SOCKS proxy. + """ + extra_kw = {} + if self.source_address: + extra_kw["source_address"] = self.source_address + + if self.socket_options: + extra_kw["socket_options"] = self.socket_options + + try: + conn = socks.create_connection( + (self.host, self.port), + proxy_type=self._socks_options["socks_version"], + proxy_addr=self._socks_options["proxy_host"], + proxy_port=self._socks_options["proxy_port"], + proxy_username=self._socks_options["username"], + proxy_password=self._socks_options["password"], + proxy_rdns=self._socks_options["rdns"], + timeout=self.timeout, + **extra_kw + ) + + except SocketTimeout: + raise ConnectTimeoutError( + self, + "Connection to %s timed out. (connect timeout=%s)" + % (self.host, self.timeout), + ) + + except socks.ProxyError as e: + # This is fragile as hell, but it seems to be the only way to raise + # useful errors here. + if e.socket_err: + error = e.socket_err + if isinstance(error, SocketTimeout): + raise ConnectTimeoutError( + self, + "Connection to %s timed out. (connect timeout=%s)" + % (self.host, self.timeout), + ) + else: + raise NewConnectionError( + self, "Failed to establish a new connection: %s" % error + ) + else: + raise NewConnectionError( + self, "Failed to establish a new connection: %s" % e + ) + + except SocketError as e: # Defensive: PySocks should catch all these. + raise NewConnectionError( + self, "Failed to establish a new connection: %s" % e + ) + + return conn + + +# We don't need to duplicate the Verified/Unverified distinction from +# urllib3/connection.py here because the HTTPSConnection will already have been +# correctly set to either the Verified or Unverified form by that module. This +# means the SOCKSHTTPSConnection will automatically be the correct type. +class SOCKSHTTPSConnection(SOCKSConnection, HTTPSConnection): + pass + + +class SOCKSHTTPConnectionPool(HTTPConnectionPool): + ConnectionCls = SOCKSConnection + + +class SOCKSHTTPSConnectionPool(HTTPSConnectionPool): + ConnectionCls = SOCKSHTTPSConnection + + +class SOCKSProxyManager(PoolManager): + """ + A version of the urllib3 ProxyManager that routes connections via the + defined SOCKS proxy. + """ + + pool_classes_by_scheme = { + "http": SOCKSHTTPConnectionPool, + "https": SOCKSHTTPSConnectionPool, + } + + def __init__( + self, + proxy_url, + username=None, + password=None, + num_pools=10, + headers=None, + **connection_pool_kw + ): + parsed = parse_url(proxy_url) + + if username is None and password is None and parsed.auth is not None: + split = parsed.auth.split(":") + if len(split) == 2: + username, password = split + if parsed.scheme == "socks5": + socks_version = socks.PROXY_TYPE_SOCKS5 + rdns = False + elif parsed.scheme == "socks5h": + socks_version = socks.PROXY_TYPE_SOCKS5 + rdns = True + elif parsed.scheme == "socks4": + socks_version = socks.PROXY_TYPE_SOCKS4 + rdns = False + elif parsed.scheme == "socks4a": + socks_version = socks.PROXY_TYPE_SOCKS4 + rdns = True + else: + raise ValueError("Unable to determine SOCKS version from %s" % proxy_url) + + self.proxy_url = proxy_url + + socks_options = { + "socks_version": socks_version, + "proxy_host": parsed.host, + "proxy_port": parsed.port, + "username": username, + "password": password, + "rdns": rdns, + } + connection_pool_kw["_socks_options"] = socks_options + + super(SOCKSProxyManager, self).__init__( + num_pools, headers, **connection_pool_kw + ) + + self.pool_classes_by_scheme = SOCKSProxyManager.pool_classes_by_scheme diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/exceptions.py b/venv/Lib/site-packages/pip/_vendor/urllib3/exceptions.py new file mode 100644 index 00000000..cba6f3f5 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/urllib3/exceptions.py @@ -0,0 +1,323 @@ +from __future__ import absolute_import + +from .packages.six.moves.http_client import IncompleteRead as httplib_IncompleteRead + +# Base Exceptions + + +class HTTPError(Exception): + """Base exception used by this module.""" + + pass + + +class HTTPWarning(Warning): + """Base warning used by this module.""" + + pass + + +class PoolError(HTTPError): + """Base exception for errors caused within a pool.""" + + def __init__(self, pool, message): + self.pool = pool + HTTPError.__init__(self, "%s: %s" % (pool, message)) + + def __reduce__(self): + # For pickling purposes. + return self.__class__, (None, None) + + +class RequestError(PoolError): + """Base exception for PoolErrors that have associated URLs.""" + + def __init__(self, pool, url, message): + self.url = url + PoolError.__init__(self, pool, message) + + def __reduce__(self): + # For pickling purposes. + return self.__class__, (None, self.url, None) + + +class SSLError(HTTPError): + """Raised when SSL certificate fails in an HTTPS connection.""" + + pass + + +class ProxyError(HTTPError): + """Raised when the connection to a proxy fails.""" + + def __init__(self, message, error, *args): + super(ProxyError, self).__init__(message, error, *args) + self.original_error = error + + +class DecodeError(HTTPError): + """Raised when automatic decoding based on Content-Type fails.""" + + pass + + +class ProtocolError(HTTPError): + """Raised when something unexpected happens mid-request/response.""" + + pass + + +#: Renamed to ProtocolError but aliased for backwards compatibility. +ConnectionError = ProtocolError + + +# Leaf Exceptions + + +class MaxRetryError(RequestError): + """Raised when the maximum number of retries is exceeded. + + :param pool: The connection pool + :type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool` + :param string url: The requested Url + :param exceptions.Exception reason: The underlying error + + """ + + def __init__(self, pool, url, reason=None): + self.reason = reason + + message = "Max retries exceeded with url: %s (Caused by %r)" % (url, reason) + + RequestError.__init__(self, pool, url, message) + + +class HostChangedError(RequestError): + """Raised when an existing pool gets a request for a foreign host.""" + + def __init__(self, pool, url, retries=3): + message = "Tried to open a foreign host with url: %s" % url + RequestError.__init__(self, pool, url, message) + self.retries = retries + + +class TimeoutStateError(HTTPError): + """Raised when passing an invalid state to a timeout""" + + pass + + +class TimeoutError(HTTPError): + """Raised when a socket timeout error occurs. + + Catching this error will catch both :exc:`ReadTimeoutErrors + ` and :exc:`ConnectTimeoutErrors `. + """ + + pass + + +class ReadTimeoutError(TimeoutError, RequestError): + """Raised when a socket timeout occurs while receiving data from a server""" + + pass + + +# This timeout error does not have a URL attached and needs to inherit from the +# base HTTPError +class ConnectTimeoutError(TimeoutError): + """Raised when a socket timeout occurs while connecting to a server""" + + pass + + +class NewConnectionError(ConnectTimeoutError, PoolError): + """Raised when we fail to establish a new connection. Usually ECONNREFUSED.""" + + pass + + +class EmptyPoolError(PoolError): + """Raised when a pool runs out of connections and no more are allowed.""" + + pass + + +class ClosedPoolError(PoolError): + """Raised when a request enters a pool after the pool has been closed.""" + + pass + + +class LocationValueError(ValueError, HTTPError): + """Raised when there is something wrong with a given URL input.""" + + pass + + +class LocationParseError(LocationValueError): + """Raised when get_host or similar fails to parse the URL input.""" + + def __init__(self, location): + message = "Failed to parse: %s" % location + HTTPError.__init__(self, message) + + self.location = location + + +class URLSchemeUnknown(LocationValueError): + """Raised when a URL input has an unsupported scheme.""" + + def __init__(self, scheme): + message = "Not supported URL scheme %s" % scheme + super(URLSchemeUnknown, self).__init__(message) + + self.scheme = scheme + + +class ResponseError(HTTPError): + """Used as a container for an error reason supplied in a MaxRetryError.""" + + GENERIC_ERROR = "too many error responses" + SPECIFIC_ERROR = "too many {status_code} error responses" + + +class SecurityWarning(HTTPWarning): + """Warned when performing security reducing actions""" + + pass + + +class SubjectAltNameWarning(SecurityWarning): + """Warned when connecting to a host with a certificate missing a SAN.""" + + pass + + +class InsecureRequestWarning(SecurityWarning): + """Warned when making an unverified HTTPS request.""" + + pass + + +class SystemTimeWarning(SecurityWarning): + """Warned when system time is suspected to be wrong""" + + pass + + +class InsecurePlatformWarning(SecurityWarning): + """Warned when certain TLS/SSL configuration is not available on a platform.""" + + pass + + +class SNIMissingWarning(HTTPWarning): + """Warned when making a HTTPS request without SNI available.""" + + pass + + +class DependencyWarning(HTTPWarning): + """ + Warned when an attempt is made to import a module with missing optional + dependencies. + """ + + pass + + +class ResponseNotChunked(ProtocolError, ValueError): + """Response needs to be chunked in order to read it as chunks.""" + + pass + + +class BodyNotHttplibCompatible(HTTPError): + """ + Body should be :class:`http.client.HTTPResponse` like + (have an fp attribute which returns raw chunks) for read_chunked(). + """ + + pass + + +class IncompleteRead(HTTPError, httplib_IncompleteRead): + """ + Response length doesn't match expected Content-Length + + Subclass of :class:`http.client.IncompleteRead` to allow int value + for ``partial`` to avoid creating large objects on streamed reads. + """ + + def __init__(self, partial, expected): + super(IncompleteRead, self).__init__(partial, expected) + + def __repr__(self): + return "IncompleteRead(%i bytes read, %i more expected)" % ( + self.partial, + self.expected, + ) + + +class InvalidChunkLength(HTTPError, httplib_IncompleteRead): + """Invalid chunk length in a chunked response.""" + + def __init__(self, response, length): + super(InvalidChunkLength, self).__init__( + response.tell(), response.length_remaining + ) + self.response = response + self.length = length + + def __repr__(self): + return "InvalidChunkLength(got length %r, %i bytes read)" % ( + self.length, + self.partial, + ) + + +class InvalidHeader(HTTPError): + """The header provided was somehow invalid.""" + + pass + + +class ProxySchemeUnknown(AssertionError, URLSchemeUnknown): + """ProxyManager does not support the supplied scheme""" + + # TODO(t-8ch): Stop inheriting from AssertionError in v2.0. + + def __init__(self, scheme): + # 'localhost' is here because our URL parser parses + # localhost:8080 -> scheme=localhost, remove if we fix this. + if scheme == "localhost": + scheme = None + if scheme is None: + message = "Proxy URL had no scheme, should start with http:// or https://" + else: + message = ( + "Proxy URL had unsupported scheme %s, should use http:// or https://" + % scheme + ) + super(ProxySchemeUnknown, self).__init__(message) + + +class ProxySchemeUnsupported(ValueError): + """Fetching HTTPS resources through HTTPS proxies is unsupported""" + + pass + + +class HeaderParsingError(HTTPError): + """Raised by assert_header_parsing, but we convert it to a log.warning statement.""" + + def __init__(self, defects, unparsed_data): + message = "%s, unparsed data: %r" % (defects or "Unknown", unparsed_data) + super(HeaderParsingError, self).__init__(message) + + +class UnrewindableBodyError(HTTPError): + """urllib3 encountered an error when trying to rewind a body""" + + pass diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/fields.py b/venv/Lib/site-packages/pip/_vendor/urllib3/fields.py similarity index 78% rename from env/lib/python2.7/site-packages/pip/_vendor/urllib3/fields.py rename to venv/Lib/site-packages/pip/_vendor/urllib3/fields.py index 6a9a5a7f..9d630f49 100644 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/fields.py +++ b/venv/Lib/site-packages/pip/_vendor/urllib3/fields.py @@ -1,4 +1,5 @@ from __future__ import absolute_import + import email.utils import mimetypes import re @@ -6,7 +7,7 @@ from .packages import six -def guess_content_type(filename, default='application/octet-stream'): +def guess_content_type(filename, default="application/octet-stream"): """ Guess the "Content-Type" of a file. @@ -26,7 +27,8 @@ def format_header_param_rfc2231(name, value): strategy defined in RFC 2231. Particularly useful for header parameters which might contain - non-ASCII values, like file names. This follows RFC 2388 Section 4.4. + non-ASCII values, like file names. This follows + `RFC 2388 Section 4.4 `_. :param name: The name of the parameter, a string expected to be ASCII only. @@ -41,22 +43,22 @@ def format_header_param_rfc2231(name, value): if not any(ch in value for ch in '"\\\r\n'): result = u'%s="%s"' % (name, value) try: - result.encode('ascii') + result.encode("ascii") except (UnicodeEncodeError, UnicodeDecodeError): pass else: return result - if not six.PY3: # Python 2: - value = value.encode('utf-8') + if six.PY2: # Python 2: + value = value.encode("utf-8") # encode_rfc2231 accepts an encoded string and returns an ascii-encoded # string in Python 2 but accepts and returns unicode strings in Python 3 - value = email.utils.encode_rfc2231(value, 'utf-8') - value = '%s*=%s' % (name, value) + value = email.utils.encode_rfc2231(value, "utf-8") + value = "%s*=%s" % (name, value) - if not six.PY3: # Python 2: - value = value.decode('utf-8') + if six.PY2: # Python 2: + value = value.decode("utf-8") return value @@ -65,27 +67,24 @@ def format_header_param_rfc2231(name, value): u"\u0022": u"%22", # Replace "\" with "\\". u"\u005C": u"\u005C\u005C", - u"\u005C": u"\u005C\u005C", } # All control characters from 0x00 to 0x1F *except* 0x1B. -_HTML5_REPLACEMENTS.update({ - six.unichr(cc): u"%{:02X}".format(cc) - for cc - in range(0x00, 0x1F+1) - if cc not in (0x1B,) -}) +_HTML5_REPLACEMENTS.update( + { + six.unichr(cc): u"%{:02X}".format(cc) + for cc in range(0x00, 0x1F + 1) + if cc not in (0x1B,) + } +) def _replace_multiple(value, needles_and_replacements): - def replacer(match): return needles_and_replacements[match.group(0)] pattern = re.compile( - r"|".join([ - re.escape(needle) for needle in needles_and_replacements.keys() - ]) + r"|".join([re.escape(needle) for needle in needles_and_replacements.keys()]) ) result = pattern.sub(replacer, value) @@ -140,13 +139,15 @@ class RequestField(object): An optional callable that is used to encode and format the headers. By default, this is :func:`format_header_param_html5`. """ + def __init__( - self, - name, - data, - filename=None, - headers=None, - header_formatter=format_header_param_html5): + self, + name, + data, + filename=None, + headers=None, + header_formatter=format_header_param_html5, + ): self._name = name self._filename = filename self.data = data @@ -156,11 +157,7 @@ def __init__( self.header_formatter = header_formatter @classmethod - def from_tuples( - cls, - fieldname, - value, - header_formatter=format_header_param_html5): + def from_tuples(cls, fieldname, value, header_formatter=format_header_param_html5): """ A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters. @@ -189,7 +186,8 @@ def from_tuples( data = value request_param = cls( - fieldname, data, filename=filename, header_formatter=header_formatter) + fieldname, data, filename=filename, header_formatter=header_formatter + ) request_param.make_multipart(content_type=content_type) return request_param @@ -227,7 +225,7 @@ def _render_parts(self, header_parts): if value is not None: parts.append(self._render_part(name, value)) - return u'; '.join(parts) + return u"; ".join(parts) def render_headers(self): """ @@ -235,21 +233,22 @@ def render_headers(self): """ lines = [] - sort_keys = ['Content-Disposition', 'Content-Type', 'Content-Location'] + sort_keys = ["Content-Disposition", "Content-Type", "Content-Location"] for sort_key in sort_keys: if self.headers.get(sort_key, False): - lines.append(u'%s: %s' % (sort_key, self.headers[sort_key])) + lines.append(u"%s: %s" % (sort_key, self.headers[sort_key])) for header_name, header_value in self.headers.items(): if header_name not in sort_keys: if header_value: - lines.append(u'%s: %s' % (header_name, header_value)) + lines.append(u"%s: %s" % (header_name, header_value)) - lines.append(u'\r\n') - return u'\r\n'.join(lines) + lines.append(u"\r\n") + return u"\r\n".join(lines) - def make_multipart(self, content_disposition=None, content_type=None, - content_location=None): + def make_multipart( + self, content_disposition=None, content_type=None, content_location=None + ): """ Makes this request field into a multipart request field. @@ -262,11 +261,14 @@ def make_multipart(self, content_disposition=None, content_type=None, The 'Content-Location' of the request body. """ - self.headers['Content-Disposition'] = content_disposition or u'form-data' - self.headers['Content-Disposition'] += u'; '.join([ - u'', self._render_parts( - ((u'name', self._name), (u'filename', self._filename)) - ) - ]) - self.headers['Content-Type'] = content_type - self.headers['Content-Location'] = content_location + self.headers["Content-Disposition"] = content_disposition or u"form-data" + self.headers["Content-Disposition"] += u"; ".join( + [ + u"", + self._render_parts( + ((u"name", self._name), (u"filename", self._filename)) + ), + ] + ) + self.headers["Content-Type"] = content_type + self.headers["Content-Location"] = content_location diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/filepost.py b/venv/Lib/site-packages/pip/_vendor/urllib3/filepost.py similarity index 88% rename from env/lib/python2.7/site-packages/pip/_vendor/urllib3/filepost.py rename to venv/Lib/site-packages/pip/_vendor/urllib3/filepost.py index 78f1e19b..36c9252c 100644 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/filepost.py +++ b/venv/Lib/site-packages/pip/_vendor/urllib3/filepost.py @@ -1,15 +1,15 @@ from __future__ import absolute_import + import binascii import codecs import os - from io import BytesIO +from .fields import RequestField from .packages import six from .packages.six import b -from .fields import RequestField -writer = codecs.lookup('utf-8')[3] +writer = codecs.lookup("utf-8")[3] def choose_boundary(): @@ -17,8 +17,8 @@ def choose_boundary(): Our embarrassingly-simple replacement for mimetools.choose_boundary. """ boundary = binascii.hexlify(os.urandom(16)) - if six.PY3: - boundary = boundary.decode('ascii') + if not six.PY2: + boundary = boundary.decode("ascii") return boundary @@ -76,7 +76,7 @@ def encode_multipart_formdata(fields, boundary=None): boundary = choose_boundary() for field in iter_field_objects(fields): - body.write(b('--%s\r\n' % (boundary))) + body.write(b("--%s\r\n" % (boundary))) writer(body).write(field.render_headers()) data = field.data @@ -89,10 +89,10 @@ def encode_multipart_formdata(fields, boundary=None): else: body.write(data) - body.write(b'\r\n') + body.write(b"\r\n") - body.write(b('--%s--\r\n' % (boundary))) + body.write(b("--%s--\r\n" % (boundary))) - content_type = str('multipart/form-data; boundary=%s' % boundary) + content_type = str("multipart/form-data; boundary=%s" % boundary) return body.getvalue(), content_type diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/packages/__init__.py b/venv/Lib/site-packages/pip/_vendor/urllib3/packages/__init__.py new file mode 100644 index 00000000..fce4caa6 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/urllib3/packages/__init__.py @@ -0,0 +1,5 @@ +from __future__ import absolute_import + +from . import ssl_match_hostname + +__all__ = ("ssl_match_hostname",) diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..16e0ea22 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/packages/__pycache__/six.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/urllib3/packages/__pycache__/six.cpython-36.pyc new file mode 100644 index 00000000..8692890e Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/urllib3/packages/__pycache__/six.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/packages/backports/__init__.py b/venv/Lib/site-packages/pip/_vendor/urllib3/packages/backports/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..77bd3866 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/makefile.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/makefile.cpython-36.pyc new file mode 100644 index 00000000..2ce0c812 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/makefile.cpython-36.pyc differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/backports/makefile.py b/venv/Lib/site-packages/pip/_vendor/urllib3/packages/backports/makefile.py similarity index 84% rename from env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/backports/makefile.py rename to venv/Lib/site-packages/pip/_vendor/urllib3/packages/backports/makefile.py index 740db377..b8fb2154 100644 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/backports/makefile.py +++ b/venv/Lib/site-packages/pip/_vendor/urllib3/packages/backports/makefile.py @@ -7,19 +7,17 @@ wants to create a "fake" socket object. """ import io - from socket import SocketIO -def backport_makefile(self, mode="r", buffering=None, encoding=None, - errors=None, newline=None): +def backport_makefile( + self, mode="r", buffering=None, encoding=None, errors=None, newline=None +): """ Backport of ``socket.makefile`` from Python 3.5. """ if not set(mode) <= {"r", "w", "b"}: - raise ValueError( - "invalid mode %r (only r, w, b allowed)" % (mode,) - ) + raise ValueError("invalid mode %r (only r, w, b allowed)" % (mode,)) writing = "w" in mode reading = "r" in mode or not writing assert reading or writing diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/packages/six.py b/venv/Lib/site-packages/pip/_vendor/urllib3/packages/six.py new file mode 100644 index 00000000..31442409 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/urllib3/packages/six.py @@ -0,0 +1,1021 @@ +# Copyright (c) 2010-2019 Benjamin Peterson +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Utilities for writing code that runs on Python 2 and 3""" + +from __future__ import absolute_import + +import functools +import itertools +import operator +import sys +import types + +__author__ = "Benjamin Peterson " +__version__ = "1.12.0" + + +# Useful for very coarse version differentiation. +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 +PY34 = sys.version_info[0:2] >= (3, 4) + +if PY3: + string_types = (str,) + integer_types = (int,) + class_types = (type,) + text_type = str + binary_type = bytes + + MAXSIZE = sys.maxsize +else: + string_types = (basestring,) + integer_types = (int, long) + class_types = (type, types.ClassType) + text_type = unicode + binary_type = str + + if sys.platform.startswith("java"): + # Jython always uses 32 bits. + MAXSIZE = int((1 << 31) - 1) + else: + # It's possible to have sizeof(long) != sizeof(Py_ssize_t). + class X(object): + def __len__(self): + return 1 << 31 + + try: + len(X()) + except OverflowError: + # 32-bit + MAXSIZE = int((1 << 31) - 1) + else: + # 64-bit + MAXSIZE = int((1 << 63) - 1) + del X + + +def _add_doc(func, doc): + """Add documentation to a function.""" + func.__doc__ = doc + + +def _import_module(name): + """Import module, returning the module after the last dot.""" + __import__(name) + return sys.modules[name] + + +class _LazyDescr(object): + def __init__(self, name): + self.name = name + + def __get__(self, obj, tp): + result = self._resolve() + setattr(obj, self.name, result) # Invokes __set__. + try: + # This is a bit ugly, but it avoids running this again by + # removing this descriptor. + delattr(obj.__class__, self.name) + except AttributeError: + pass + return result + + +class MovedModule(_LazyDescr): + def __init__(self, name, old, new=None): + super(MovedModule, self).__init__(name) + if PY3: + if new is None: + new = name + self.mod = new + else: + self.mod = old + + def _resolve(self): + return _import_module(self.mod) + + def __getattr__(self, attr): + _module = self._resolve() + value = getattr(_module, attr) + setattr(self, attr, value) + return value + + +class _LazyModule(types.ModuleType): + def __init__(self, name): + super(_LazyModule, self).__init__(name) + self.__doc__ = self.__class__.__doc__ + + def __dir__(self): + attrs = ["__doc__", "__name__"] + attrs += [attr.name for attr in self._moved_attributes] + return attrs + + # Subclasses should override this + _moved_attributes = [] + + +class MovedAttribute(_LazyDescr): + def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): + super(MovedAttribute, self).__init__(name) + if PY3: + if new_mod is None: + new_mod = name + self.mod = new_mod + if new_attr is None: + if old_attr is None: + new_attr = name + else: + new_attr = old_attr + self.attr = new_attr + else: + self.mod = old_mod + if old_attr is None: + old_attr = name + self.attr = old_attr + + def _resolve(self): + module = _import_module(self.mod) + return getattr(module, self.attr) + + +class _SixMetaPathImporter(object): + + """ + A meta path importer to import six.moves and its submodules. + + This class implements a PEP302 finder and loader. It should be compatible + with Python 2.5 and all existing versions of Python3 + """ + + def __init__(self, six_module_name): + self.name = six_module_name + self.known_modules = {} + + def _add_module(self, mod, *fullnames): + for fullname in fullnames: + self.known_modules[self.name + "." + fullname] = mod + + def _get_module(self, fullname): + return self.known_modules[self.name + "." + fullname] + + def find_module(self, fullname, path=None): + if fullname in self.known_modules: + return self + return None + + def __get_module(self, fullname): + try: + return self.known_modules[fullname] + except KeyError: + raise ImportError("This loader does not know module " + fullname) + + def load_module(self, fullname): + try: + # in case of a reload + return sys.modules[fullname] + except KeyError: + pass + mod = self.__get_module(fullname) + if isinstance(mod, MovedModule): + mod = mod._resolve() + else: + mod.__loader__ = self + sys.modules[fullname] = mod + return mod + + def is_package(self, fullname): + """ + Return true, if the named module is a package. + + We need this method to get correct spec objects with + Python 3.4 (see PEP451) + """ + return hasattr(self.__get_module(fullname), "__path__") + + def get_code(self, fullname): + """Return None + + Required, if is_package is implemented""" + self.__get_module(fullname) # eventually raises ImportError + return None + + get_source = get_code # same as get_code + + +_importer = _SixMetaPathImporter(__name__) + + +class _MovedItems(_LazyModule): + + """Lazy loading of moved objects""" + + __path__ = [] # mark as package + + +_moved_attributes = [ + MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), + MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), + MovedAttribute( + "filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse" + ), + MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), + MovedAttribute("intern", "__builtin__", "sys"), + MovedAttribute("map", "itertools", "builtins", "imap", "map"), + MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), + MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), + MovedAttribute("getoutput", "commands", "subprocess"), + MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute( + "reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload" + ), + MovedAttribute("reduce", "__builtin__", "functools"), + MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), + MovedAttribute("StringIO", "StringIO", "io"), + MovedAttribute("UserDict", "UserDict", "collections"), + MovedAttribute("UserList", "UserList", "collections"), + MovedAttribute("UserString", "UserString", "collections"), + MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), + MovedAttribute( + "zip_longest", "itertools", "itertools", "izip_longest", "zip_longest" + ), + MovedModule("builtins", "__builtin__"), + MovedModule("configparser", "ConfigParser"), + MovedModule("copyreg", "copy_reg"), + MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), + MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), + MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), + MovedModule("http_cookies", "Cookie", "http.cookies"), + MovedModule("html_entities", "htmlentitydefs", "html.entities"), + MovedModule("html_parser", "HTMLParser", "html.parser"), + MovedModule("http_client", "httplib", "http.client"), + MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), + MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"), + MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), + MovedModule( + "email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart" + ), + MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), + MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), + MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), + MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), + MovedModule("cPickle", "cPickle", "pickle"), + MovedModule("queue", "Queue"), + MovedModule("reprlib", "repr"), + MovedModule("socketserver", "SocketServer"), + MovedModule("_thread", "thread", "_thread"), + MovedModule("tkinter", "Tkinter"), + MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), + MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), + MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), + MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), + MovedModule("tkinter_tix", "Tix", "tkinter.tix"), + MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), + MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), + MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), + MovedModule("tkinter_colorchooser", "tkColorChooser", "tkinter.colorchooser"), + MovedModule("tkinter_commondialog", "tkCommonDialog", "tkinter.commondialog"), + MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), + MovedModule("tkinter_font", "tkFont", "tkinter.font"), + MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), + MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", "tkinter.simpledialog"), + MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), + MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), + MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), + MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), + MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), + MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), +] +# Add windows specific modules. +if sys.platform == "win32": + _moved_attributes += [MovedModule("winreg", "_winreg")] + +for attr in _moved_attributes: + setattr(_MovedItems, attr.name, attr) + if isinstance(attr, MovedModule): + _importer._add_module(attr, "moves." + attr.name) +del attr + +_MovedItems._moved_attributes = _moved_attributes + +moves = _MovedItems(__name__ + ".moves") +_importer._add_module(moves, "moves") + + +class Module_six_moves_urllib_parse(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_parse""" + + +_urllib_parse_moved_attributes = [ + MovedAttribute("ParseResult", "urlparse", "urllib.parse"), + MovedAttribute("SplitResult", "urlparse", "urllib.parse"), + MovedAttribute("parse_qs", "urlparse", "urllib.parse"), + MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), + MovedAttribute("urldefrag", "urlparse", "urllib.parse"), + MovedAttribute("urljoin", "urlparse", "urllib.parse"), + MovedAttribute("urlparse", "urlparse", "urllib.parse"), + MovedAttribute("urlsplit", "urlparse", "urllib.parse"), + MovedAttribute("urlunparse", "urlparse", "urllib.parse"), + MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), + MovedAttribute("quote", "urllib", "urllib.parse"), + MovedAttribute("quote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote", "urllib", "urllib.parse"), + MovedAttribute("unquote_plus", "urllib", "urllib.parse"), + MovedAttribute( + "unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes" + ), + MovedAttribute("urlencode", "urllib", "urllib.parse"), + MovedAttribute("splitquery", "urllib", "urllib.parse"), + MovedAttribute("splittag", "urllib", "urllib.parse"), + MovedAttribute("splituser", "urllib", "urllib.parse"), + MovedAttribute("splitvalue", "urllib", "urllib.parse"), + MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), + MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), + MovedAttribute("uses_params", "urlparse", "urllib.parse"), + MovedAttribute("uses_query", "urlparse", "urllib.parse"), + MovedAttribute("uses_relative", "urlparse", "urllib.parse"), +] +for attr in _urllib_parse_moved_attributes: + setattr(Module_six_moves_urllib_parse, attr.name, attr) +del attr + +Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes + +_importer._add_module( + Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), + "moves.urllib_parse", + "moves.urllib.parse", +) + + +class Module_six_moves_urllib_error(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_error""" + + +_urllib_error_moved_attributes = [ + MovedAttribute("URLError", "urllib2", "urllib.error"), + MovedAttribute("HTTPError", "urllib2", "urllib.error"), + MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), +] +for attr in _urllib_error_moved_attributes: + setattr(Module_six_moves_urllib_error, attr.name, attr) +del attr + +Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes + +_importer._add_module( + Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), + "moves.urllib_error", + "moves.urllib.error", +) + + +class Module_six_moves_urllib_request(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_request""" + + +_urllib_request_moved_attributes = [ + MovedAttribute("urlopen", "urllib2", "urllib.request"), + MovedAttribute("install_opener", "urllib2", "urllib.request"), + MovedAttribute("build_opener", "urllib2", "urllib.request"), + MovedAttribute("pathname2url", "urllib", "urllib.request"), + MovedAttribute("url2pathname", "urllib", "urllib.request"), + MovedAttribute("getproxies", "urllib", "urllib.request"), + MovedAttribute("Request", "urllib2", "urllib.request"), + MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), + MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), + MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), + MovedAttribute("BaseHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), + MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), + MovedAttribute("FileHandler", "urllib2", "urllib.request"), + MovedAttribute("FTPHandler", "urllib2", "urllib.request"), + MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), + MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), + MovedAttribute("urlretrieve", "urllib", "urllib.request"), + MovedAttribute("urlcleanup", "urllib", "urllib.request"), + MovedAttribute("URLopener", "urllib", "urllib.request"), + MovedAttribute("FancyURLopener", "urllib", "urllib.request"), + MovedAttribute("proxy_bypass", "urllib", "urllib.request"), + MovedAttribute("parse_http_list", "urllib2", "urllib.request"), + MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"), +] +for attr in _urllib_request_moved_attributes: + setattr(Module_six_moves_urllib_request, attr.name, attr) +del attr + +Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes + +_importer._add_module( + Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), + "moves.urllib_request", + "moves.urllib.request", +) + + +class Module_six_moves_urllib_response(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_response""" + + +_urllib_response_moved_attributes = [ + MovedAttribute("addbase", "urllib", "urllib.response"), + MovedAttribute("addclosehook", "urllib", "urllib.response"), + MovedAttribute("addinfo", "urllib", "urllib.response"), + MovedAttribute("addinfourl", "urllib", "urllib.response"), +] +for attr in _urllib_response_moved_attributes: + setattr(Module_six_moves_urllib_response, attr.name, attr) +del attr + +Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes + +_importer._add_module( + Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), + "moves.urllib_response", + "moves.urllib.response", +) + + +class Module_six_moves_urllib_robotparser(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_robotparser""" + + +_urllib_robotparser_moved_attributes = [ + MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser") +] +for attr in _urllib_robotparser_moved_attributes: + setattr(Module_six_moves_urllib_robotparser, attr.name, attr) +del attr + +Module_six_moves_urllib_robotparser._moved_attributes = ( + _urllib_robotparser_moved_attributes +) + +_importer._add_module( + Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), + "moves.urllib_robotparser", + "moves.urllib.robotparser", +) + + +class Module_six_moves_urllib(types.ModuleType): + + """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" + + __path__ = [] # mark as package + parse = _importer._get_module("moves.urllib_parse") + error = _importer._get_module("moves.urllib_error") + request = _importer._get_module("moves.urllib_request") + response = _importer._get_module("moves.urllib_response") + robotparser = _importer._get_module("moves.urllib_robotparser") + + def __dir__(self): + return ["parse", "error", "request", "response", "robotparser"] + + +_importer._add_module( + Module_six_moves_urllib(__name__ + ".moves.urllib"), "moves.urllib" +) + + +def add_move(move): + """Add an item to six.moves.""" + setattr(_MovedItems, move.name, move) + + +def remove_move(name): + """Remove item from six.moves.""" + try: + delattr(_MovedItems, name) + except AttributeError: + try: + del moves.__dict__[name] + except KeyError: + raise AttributeError("no such move, %r" % (name,)) + + +if PY3: + _meth_func = "__func__" + _meth_self = "__self__" + + _func_closure = "__closure__" + _func_code = "__code__" + _func_defaults = "__defaults__" + _func_globals = "__globals__" +else: + _meth_func = "im_func" + _meth_self = "im_self" + + _func_closure = "func_closure" + _func_code = "func_code" + _func_defaults = "func_defaults" + _func_globals = "func_globals" + + +try: + advance_iterator = next +except NameError: + + def advance_iterator(it): + return it.next() + + +next = advance_iterator + + +try: + callable = callable +except NameError: + + def callable(obj): + return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) + + +if PY3: + + def get_unbound_function(unbound): + return unbound + + create_bound_method = types.MethodType + + def create_unbound_method(func, cls): + return func + + Iterator = object +else: + + def get_unbound_function(unbound): + return unbound.im_func + + def create_bound_method(func, obj): + return types.MethodType(func, obj, obj.__class__) + + def create_unbound_method(func, cls): + return types.MethodType(func, None, cls) + + class Iterator(object): + def next(self): + return type(self).__next__(self) + + callable = callable +_add_doc( + get_unbound_function, """Get the function out of a possibly unbound function""" +) + + +get_method_function = operator.attrgetter(_meth_func) +get_method_self = operator.attrgetter(_meth_self) +get_function_closure = operator.attrgetter(_func_closure) +get_function_code = operator.attrgetter(_func_code) +get_function_defaults = operator.attrgetter(_func_defaults) +get_function_globals = operator.attrgetter(_func_globals) + + +if PY3: + + def iterkeys(d, **kw): + return iter(d.keys(**kw)) + + def itervalues(d, **kw): + return iter(d.values(**kw)) + + def iteritems(d, **kw): + return iter(d.items(**kw)) + + def iterlists(d, **kw): + return iter(d.lists(**kw)) + + viewkeys = operator.methodcaller("keys") + + viewvalues = operator.methodcaller("values") + + viewitems = operator.methodcaller("items") +else: + + def iterkeys(d, **kw): + return d.iterkeys(**kw) + + def itervalues(d, **kw): + return d.itervalues(**kw) + + def iteritems(d, **kw): + return d.iteritems(**kw) + + def iterlists(d, **kw): + return d.iterlists(**kw) + + viewkeys = operator.methodcaller("viewkeys") + + viewvalues = operator.methodcaller("viewvalues") + + viewitems = operator.methodcaller("viewitems") + +_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") +_add_doc(itervalues, "Return an iterator over the values of a dictionary.") +_add_doc(iteritems, "Return an iterator over the (key, value) pairs of a dictionary.") +_add_doc( + iterlists, "Return an iterator over the (key, [values]) pairs of a dictionary." +) + + +if PY3: + + def b(s): + return s.encode("latin-1") + + def u(s): + return s + + unichr = chr + import struct + + int2byte = struct.Struct(">B").pack + del struct + byte2int = operator.itemgetter(0) + indexbytes = operator.getitem + iterbytes = iter + import io + + StringIO = io.StringIO + BytesIO = io.BytesIO + del io + _assertCountEqual = "assertCountEqual" + if sys.version_info[1] <= 1: + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" + else: + _assertRaisesRegex = "assertRaisesRegex" + _assertRegex = "assertRegex" +else: + + def b(s): + return s + + # Workaround for standalone backslash + + def u(s): + return unicode(s.replace(r"\\", r"\\\\"), "unicode_escape") + + unichr = unichr + int2byte = chr + + def byte2int(bs): + return ord(bs[0]) + + def indexbytes(buf, i): + return ord(buf[i]) + + iterbytes = functools.partial(itertools.imap, ord) + import StringIO + + StringIO = BytesIO = StringIO.StringIO + _assertCountEqual = "assertItemsEqual" + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" +_add_doc(b, """Byte literal""") +_add_doc(u, """Text literal""") + + +def assertCountEqual(self, *args, **kwargs): + return getattr(self, _assertCountEqual)(*args, **kwargs) + + +def assertRaisesRegex(self, *args, **kwargs): + return getattr(self, _assertRaisesRegex)(*args, **kwargs) + + +def assertRegex(self, *args, **kwargs): + return getattr(self, _assertRegex)(*args, **kwargs) + + +if PY3: + exec_ = getattr(moves.builtins, "exec") + + def reraise(tp, value, tb=None): + try: + if value is None: + value = tp() + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + finally: + value = None + tb = None + + +else: + + def exec_(_code_, _globs_=None, _locs_=None): + """Execute code in a namespace.""" + if _globs_ is None: + frame = sys._getframe(1) + _globs_ = frame.f_globals + if _locs_ is None: + _locs_ = frame.f_locals + del frame + elif _locs_ is None: + _locs_ = _globs_ + exec("""exec _code_ in _globs_, _locs_""") + + exec_( + """def reraise(tp, value, tb=None): + try: + raise tp, value, tb + finally: + tb = None +""" + ) + + +if sys.version_info[:2] == (3, 2): + exec_( + """def raise_from(value, from_value): + try: + if from_value is None: + raise value + raise value from from_value + finally: + value = None +""" + ) +elif sys.version_info[:2] > (3, 2): + exec_( + """def raise_from(value, from_value): + try: + raise value from from_value + finally: + value = None +""" + ) +else: + + def raise_from(value, from_value): + raise value + + +print_ = getattr(moves.builtins, "print", None) +if print_ is None: + + def print_(*args, **kwargs): + """The new-style print function for Python 2.4 and 2.5.""" + fp = kwargs.pop("file", sys.stdout) + if fp is None: + return + + def write(data): + if not isinstance(data, basestring): + data = str(data) + # If the file has an encoding, encode unicode with it. + if ( + isinstance(fp, file) + and isinstance(data, unicode) + and fp.encoding is not None + ): + errors = getattr(fp, "errors", None) + if errors is None: + errors = "strict" + data = data.encode(fp.encoding, errors) + fp.write(data) + + want_unicode = False + sep = kwargs.pop("sep", None) + if sep is not None: + if isinstance(sep, unicode): + want_unicode = True + elif not isinstance(sep, str): + raise TypeError("sep must be None or a string") + end = kwargs.pop("end", None) + if end is not None: + if isinstance(end, unicode): + want_unicode = True + elif not isinstance(end, str): + raise TypeError("end must be None or a string") + if kwargs: + raise TypeError("invalid keyword arguments to print()") + if not want_unicode: + for arg in args: + if isinstance(arg, unicode): + want_unicode = True + break + if want_unicode: + newline = unicode("\n") + space = unicode(" ") + else: + newline = "\n" + space = " " + if sep is None: + sep = space + if end is None: + end = newline + for i, arg in enumerate(args): + if i: + write(sep) + write(arg) + write(end) + + +if sys.version_info[:2] < (3, 3): + _print = print_ + + def print_(*args, **kwargs): + fp = kwargs.get("file", sys.stdout) + flush = kwargs.pop("flush", False) + _print(*args, **kwargs) + if flush and fp is not None: + fp.flush() + + +_add_doc(reraise, """Reraise an exception.""") + +if sys.version_info[0:2] < (3, 4): + + def wraps( + wrapped, + assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES, + ): + def wrapper(f): + f = functools.wraps(wrapped, assigned, updated)(f) + f.__wrapped__ = wrapped + return f + + return wrapper + + +else: + wraps = functools.wraps + + +def with_metaclass(meta, *bases): + """Create a base class with a metaclass.""" + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(type): + def __new__(cls, name, this_bases, d): + return meta(name, bases, d) + + @classmethod + def __prepare__(cls, name, this_bases): + return meta.__prepare__(name, bases) + + return type.__new__(metaclass, "temporary_class", (), {}) + + +def add_metaclass(metaclass): + """Class decorator for creating a class with a metaclass.""" + + def wrapper(cls): + orig_vars = cls.__dict__.copy() + slots = orig_vars.get("__slots__") + if slots is not None: + if isinstance(slots, str): + slots = [slots] + for slots_var in slots: + orig_vars.pop(slots_var) + orig_vars.pop("__dict__", None) + orig_vars.pop("__weakref__", None) + if hasattr(cls, "__qualname__"): + orig_vars["__qualname__"] = cls.__qualname__ + return metaclass(cls.__name__, cls.__bases__, orig_vars) + + return wrapper + + +def ensure_binary(s, encoding="utf-8", errors="strict"): + """Coerce **s** to six.binary_type. + + For Python 2: + - `unicode` -> encoded to `str` + - `str` -> `str` + + For Python 3: + - `str` -> encoded to `bytes` + - `bytes` -> `bytes` + """ + if isinstance(s, text_type): + return s.encode(encoding, errors) + elif isinstance(s, binary_type): + return s + else: + raise TypeError("not expecting type '%s'" % type(s)) + + +def ensure_str(s, encoding="utf-8", errors="strict"): + """Coerce *s* to `str`. + + For Python 2: + - `unicode` -> encoded to `str` + - `str` -> `str` + + For Python 3: + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + if not isinstance(s, (text_type, binary_type)): + raise TypeError("not expecting type '%s'" % type(s)) + if PY2 and isinstance(s, text_type): + s = s.encode(encoding, errors) + elif PY3 and isinstance(s, binary_type): + s = s.decode(encoding, errors) + return s + + +def ensure_text(s, encoding="utf-8", errors="strict"): + """Coerce *s* to six.text_type. + + For Python 2: + - `unicode` -> `unicode` + - `str` -> `unicode` + + For Python 3: + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + if isinstance(s, binary_type): + return s.decode(encoding, errors) + elif isinstance(s, text_type): + return s + else: + raise TypeError("not expecting type '%s'" % type(s)) + + +def python_2_unicode_compatible(klass): + """ + A decorator that defines __unicode__ and __str__ methods under Python 2. + Under Python 3 it does nothing. + + To support Python 2 and 3 with a single code base, define a __str__ method + returning text and apply this decorator to the class. + """ + if PY2: + if "__str__" not in klass.__dict__: + raise ValueError( + "@python_2_unicode_compatible cannot be applied " + "to %s because it doesn't define __str__()." % klass.__name__ + ) + klass.__unicode__ = klass.__str__ + klass.__str__ = lambda self: self.__unicode__().encode("utf-8") + return klass + + +# Complete the moves implementation. +# This code is at the end of this module to speed up module loading. +# Turn this module into a package. +__path__ = [] # required for PEP 302 and PEP 451 +__package__ = __name__ # see PEP 366 @ReservedAssignment +if globals().get("__spec__") is not None: + __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable +# Remove other six meta path importers, since they cause problems. This can +# happen if six is removed from sys.modules and then reloaded. (Setuptools does +# this for some reason.) +if sys.meta_path: + for i, importer in enumerate(sys.meta_path): + # Here's some real nastiness: Another "instance" of the six module might + # be floating around. Therefore, we can't use isinstance() to check for + # the six meta path importer, since the other six instance will have + # inserted an importer with different class. + if ( + type(importer).__name__ == "_SixMetaPathImporter" + and importer.name == __name__ + ): + del sys.meta_path[i] + break + del i, importer +# Finally, add the importer to the meta path import hook. +sys.meta_path.append(_importer) diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py b/venv/Lib/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py new file mode 100644 index 00000000..6b12fd90 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py @@ -0,0 +1,22 @@ +import sys + +try: + # Our match_hostname function is the same as 3.5's, so we only want to + # import the match_hostname function if it's at least that good. + if sys.version_info < (3, 5): + raise ImportError("Fallback to vendored code") + + from ssl import CertificateError, match_hostname +except ImportError: + try: + # Backport of the function from a pypi module + from backports.ssl_match_hostname import ( # type: ignore + CertificateError, + match_hostname, + ) + except ImportError: + # Our vendored copy + from ._implementation import CertificateError, match_hostname # type: ignore + +# Not needed, but documenting what we provide. +__all__ = ("CertificateError", "match_hostname") diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..35111157 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__pycache__/_implementation.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__pycache__/_implementation.cpython-36.pyc new file mode 100644 index 00000000..46ed0527 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__pycache__/_implementation.cpython-36.pyc differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py b/venv/Lib/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py similarity index 78% rename from env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py rename to venv/Lib/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py index 970cf653..689208d3 100644 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py +++ b/venv/Lib/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py @@ -11,11 +11,11 @@ # python-3.5) otherwise only do DNS matching. This allows # backports.ssl_match_hostname to continue to be used in Python 2.7. try: - from pip._vendor import ipaddress + import ipaddress except ImportError: ipaddress = None -__version__ = '3.5.0.1' +__version__ = "3.5.0.1" class CertificateError(ValueError): @@ -33,18 +33,19 @@ def _dnsname_match(dn, hostname, max_wildcards=1): # Ported from python3-syntax: # leftmost, *remainder = dn.split(r'.') - parts = dn.split(r'.') + parts = dn.split(r".") leftmost = parts[0] remainder = parts[1:] - wildcards = leftmost.count('*') + wildcards = leftmost.count("*") if wildcards > max_wildcards: # Issue #17980: avoid denials of service by refusing more # than one wildcard per fragment. A survey of established # policy among SSL implementations showed it to be a # reasonable choice. raise CertificateError( - "too many wildcards in certificate DNS name: " + repr(dn)) + "too many wildcards in certificate DNS name: " + repr(dn) + ) # speed up common case w/o wildcards if not wildcards: @@ -53,11 +54,11 @@ def _dnsname_match(dn, hostname, max_wildcards=1): # RFC 6125, section 6.4.3, subitem 1. # The client SHOULD NOT attempt to match a presented identifier in which # the wildcard character comprises a label other than the left-most label. - if leftmost == '*': + if leftmost == "*": # When '*' is a fragment by itself, it matches a non-empty dotless # fragment. - pats.append('[^.]+') - elif leftmost.startswith('xn--') or hostname.startswith('xn--'): + pats.append("[^.]+") + elif leftmost.startswith("xn--") or hostname.startswith("xn--"): # RFC 6125, section 6.4.3, subitem 3. # The client SHOULD NOT attempt to match a presented identifier # where the wildcard character is embedded within an A-label or @@ -65,21 +66,22 @@ def _dnsname_match(dn, hostname, max_wildcards=1): pats.append(re.escape(leftmost)) else: # Otherwise, '*' matches any dotless string, e.g. www* - pats.append(re.escape(leftmost).replace(r'\*', '[^.]*')) + pats.append(re.escape(leftmost).replace(r"\*", "[^.]*")) # add the remaining fragments, ignore any wildcards for frag in remainder: pats.append(re.escape(frag)) - pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) + pat = re.compile(r"\A" + r"\.".join(pats) + r"\Z", re.IGNORECASE) return pat.match(hostname) def _to_unicode(obj): if isinstance(obj, str) and sys.version_info < (3,): - obj = unicode(obj, encoding='ascii', errors='strict') + obj = unicode(obj, encoding="ascii", errors="strict") return obj + def _ipaddress_match(ipname, host_ip): """Exact matching of IP addresses. @@ -101,9 +103,11 @@ def match_hostname(cert, hostname): returns nothing. """ if not cert: - raise ValueError("empty or no certificate, match_hostname needs a " - "SSL socket or SSL context with either " - "CERT_OPTIONAL or CERT_REQUIRED") + raise ValueError( + "empty or no certificate, match_hostname needs a " + "SSL socket or SSL context with either " + "CERT_OPTIONAL or CERT_REQUIRED" + ) try: # Divergence from upstream: ipaddress can't handle byte str host_ip = ipaddress.ip_address(_to_unicode(hostname)) @@ -122,35 +126,35 @@ def match_hostname(cert, hostname): else: raise dnsnames = [] - san = cert.get('subjectAltName', ()) + san = cert.get("subjectAltName", ()) for key, value in san: - if key == 'DNS': + if key == "DNS": if host_ip is None and _dnsname_match(value, hostname): return dnsnames.append(value) - elif key == 'IP Address': + elif key == "IP Address": if host_ip is not None and _ipaddress_match(value, host_ip): return dnsnames.append(value) if not dnsnames: # The subject is only checked when there is no dNSName entry # in subjectAltName - for sub in cert.get('subject', ()): + for sub in cert.get("subject", ()): for key, value in sub: # XXX according to RFC 2818, the most specific Common Name # must be used. - if key == 'commonName': + if key == "commonName": if _dnsname_match(value, hostname): return dnsnames.append(value) if len(dnsnames) > 1: - raise CertificateError("hostname %r " - "doesn't match either of %s" - % (hostname, ', '.join(map(repr, dnsnames)))) + raise CertificateError( + "hostname %r " + "doesn't match either of %s" % (hostname, ", ".join(map(repr, dnsnames))) + ) elif len(dnsnames) == 1: - raise CertificateError("hostname %r " - "doesn't match %r" - % (hostname, dnsnames[0])) + raise CertificateError("hostname %r doesn't match %r" % (hostname, dnsnames[0])) else: - raise CertificateError("no appropriate commonName or " - "subjectAltName fields were found") + raise CertificateError( + "no appropriate commonName or subjectAltName fields were found" + ) diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/poolmanager.py b/venv/Lib/site-packages/pip/_vendor/urllib3/poolmanager.py new file mode 100644 index 00000000..3a31a285 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/urllib3/poolmanager.py @@ -0,0 +1,536 @@ +from __future__ import absolute_import + +import collections +import functools +import logging + +from ._collections import RecentlyUsedContainer +from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, port_by_scheme +from .exceptions import ( + LocationValueError, + MaxRetryError, + ProxySchemeUnknown, + ProxySchemeUnsupported, + URLSchemeUnknown, +) +from .packages import six +from .packages.six.moves.urllib.parse import urljoin +from .request import RequestMethods +from .util.proxy import connection_requires_http_tunnel +from .util.retry import Retry +from .util.url import parse_url + +__all__ = ["PoolManager", "ProxyManager", "proxy_from_url"] + + +log = logging.getLogger(__name__) + +SSL_KEYWORDS = ( + "key_file", + "cert_file", + "cert_reqs", + "ca_certs", + "ssl_version", + "ca_cert_dir", + "ssl_context", + "key_password", +) + +# All known keyword arguments that could be provided to the pool manager, its +# pools, or the underlying connections. This is used to construct a pool key. +_key_fields = ( + "key_scheme", # str + "key_host", # str + "key_port", # int + "key_timeout", # int or float or Timeout + "key_retries", # int or Retry + "key_strict", # bool + "key_block", # bool + "key_source_address", # str + "key_key_file", # str + "key_key_password", # str + "key_cert_file", # str + "key_cert_reqs", # str + "key_ca_certs", # str + "key_ssl_version", # str + "key_ca_cert_dir", # str + "key_ssl_context", # instance of ssl.SSLContext or urllib3.util.ssl_.SSLContext + "key_maxsize", # int + "key_headers", # dict + "key__proxy", # parsed proxy url + "key__proxy_headers", # dict + "key__proxy_config", # class + "key_socket_options", # list of (level (int), optname (int), value (int or str)) tuples + "key__socks_options", # dict + "key_assert_hostname", # bool or string + "key_assert_fingerprint", # str + "key_server_hostname", # str +) + +#: The namedtuple class used to construct keys for the connection pool. +#: All custom key schemes should include the fields in this key at a minimum. +PoolKey = collections.namedtuple("PoolKey", _key_fields) + +_proxy_config_fields = ("ssl_context", "use_forwarding_for_https") +ProxyConfig = collections.namedtuple("ProxyConfig", _proxy_config_fields) + + +def _default_key_normalizer(key_class, request_context): + """ + Create a pool key out of a request context dictionary. + + According to RFC 3986, both the scheme and host are case-insensitive. + Therefore, this function normalizes both before constructing the pool + key for an HTTPS request. If you wish to change this behaviour, provide + alternate callables to ``key_fn_by_scheme``. + + :param key_class: + The class to use when constructing the key. This should be a namedtuple + with the ``scheme`` and ``host`` keys at a minimum. + :type key_class: namedtuple + :param request_context: + A dictionary-like object that contain the context for a request. + :type request_context: dict + + :return: A namedtuple that can be used as a connection pool key. + :rtype: PoolKey + """ + # Since we mutate the dictionary, make a copy first + context = request_context.copy() + context["scheme"] = context["scheme"].lower() + context["host"] = context["host"].lower() + + # These are both dictionaries and need to be transformed into frozensets + for key in ("headers", "_proxy_headers", "_socks_options"): + if key in context and context[key] is not None: + context[key] = frozenset(context[key].items()) + + # The socket_options key may be a list and needs to be transformed into a + # tuple. + socket_opts = context.get("socket_options") + if socket_opts is not None: + context["socket_options"] = tuple(socket_opts) + + # Map the kwargs to the names in the namedtuple - this is necessary since + # namedtuples can't have fields starting with '_'. + for key in list(context.keys()): + context["key_" + key] = context.pop(key) + + # Default to ``None`` for keys missing from the context + for field in key_class._fields: + if field not in context: + context[field] = None + + return key_class(**context) + + +#: A dictionary that maps a scheme to a callable that creates a pool key. +#: This can be used to alter the way pool keys are constructed, if desired. +#: Each PoolManager makes a copy of this dictionary so they can be configured +#: globally here, or individually on the instance. +key_fn_by_scheme = { + "http": functools.partial(_default_key_normalizer, PoolKey), + "https": functools.partial(_default_key_normalizer, PoolKey), +} + +pool_classes_by_scheme = {"http": HTTPConnectionPool, "https": HTTPSConnectionPool} + + +class PoolManager(RequestMethods): + """ + Allows for arbitrary requests while transparently keeping track of + necessary connection pools for you. + + :param num_pools: + Number of connection pools to cache before discarding the least + recently used pool. + + :param headers: + Headers to include with all requests, unless other headers are given + explicitly. + + :param \\**connection_pool_kw: + Additional parameters are used to create fresh + :class:`urllib3.connectionpool.ConnectionPool` instances. + + Example:: + + >>> manager = PoolManager(num_pools=2) + >>> r = manager.request('GET', 'http://google.com/') + >>> r = manager.request('GET', 'http://google.com/mail') + >>> r = manager.request('GET', 'http://yahoo.com/') + >>> len(manager.pools) + 2 + + """ + + proxy = None + proxy_config = None + + def __init__(self, num_pools=10, headers=None, **connection_pool_kw): + RequestMethods.__init__(self, headers) + self.connection_pool_kw = connection_pool_kw + self.pools = RecentlyUsedContainer(num_pools, dispose_func=lambda p: p.close()) + + # Locally set the pool classes and keys so other PoolManagers can + # override them. + self.pool_classes_by_scheme = pool_classes_by_scheme + self.key_fn_by_scheme = key_fn_by_scheme.copy() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.clear() + # Return False to re-raise any potential exceptions + return False + + def _new_pool(self, scheme, host, port, request_context=None): + """ + Create a new :class:`urllib3.connectionpool.ConnectionPool` based on host, port, scheme, and + any additional pool keyword arguments. + + If ``request_context`` is provided, it is provided as keyword arguments + to the pool class used. This method is used to actually create the + connection pools handed out by :meth:`connection_from_url` and + companion methods. It is intended to be overridden for customization. + """ + pool_cls = self.pool_classes_by_scheme[scheme] + if request_context is None: + request_context = self.connection_pool_kw.copy() + + # Although the context has everything necessary to create the pool, + # this function has historically only used the scheme, host, and port + # in the positional args. When an API change is acceptable these can + # be removed. + for key in ("scheme", "host", "port"): + request_context.pop(key, None) + + if scheme == "http": + for kw in SSL_KEYWORDS: + request_context.pop(kw, None) + + return pool_cls(host, port, **request_context) + + def clear(self): + """ + Empty our store of pools and direct them all to close. + + This will not affect in-flight connections, but they will not be + re-used after completion. + """ + self.pools.clear() + + def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None): + """ + Get a :class:`urllib3.connectionpool.ConnectionPool` based on the host, port, and scheme. + + If ``port`` isn't given, it will be derived from the ``scheme`` using + ``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is + provided, it is merged with the instance's ``connection_pool_kw`` + variable and used to create the new connection pool, if one is + needed. + """ + + if not host: + raise LocationValueError("No host specified.") + + request_context = self._merge_pool_kwargs(pool_kwargs) + request_context["scheme"] = scheme or "http" + if not port: + port = port_by_scheme.get(request_context["scheme"].lower(), 80) + request_context["port"] = port + request_context["host"] = host + + return self.connection_from_context(request_context) + + def connection_from_context(self, request_context): + """ + Get a :class:`urllib3.connectionpool.ConnectionPool` based on the request context. + + ``request_context`` must at least contain the ``scheme`` key and its + value must be a key in ``key_fn_by_scheme`` instance variable. + """ + scheme = request_context["scheme"].lower() + pool_key_constructor = self.key_fn_by_scheme.get(scheme) + if not pool_key_constructor: + raise URLSchemeUnknown(scheme) + pool_key = pool_key_constructor(request_context) + + return self.connection_from_pool_key(pool_key, request_context=request_context) + + def connection_from_pool_key(self, pool_key, request_context=None): + """ + Get a :class:`urllib3.connectionpool.ConnectionPool` based on the provided pool key. + + ``pool_key`` should be a namedtuple that only contains immutable + objects. At a minimum it must have the ``scheme``, ``host``, and + ``port`` fields. + """ + with self.pools.lock: + # If the scheme, host, or port doesn't match existing open + # connections, open a new ConnectionPool. + pool = self.pools.get(pool_key) + if pool: + return pool + + # Make a fresh ConnectionPool of the desired type + scheme = request_context["scheme"] + host = request_context["host"] + port = request_context["port"] + pool = self._new_pool(scheme, host, port, request_context=request_context) + self.pools[pool_key] = pool + + return pool + + def connection_from_url(self, url, pool_kwargs=None): + """ + Similar to :func:`urllib3.connectionpool.connection_from_url`. + + If ``pool_kwargs`` is not provided and a new pool needs to be + constructed, ``self.connection_pool_kw`` is used to initialize + the :class:`urllib3.connectionpool.ConnectionPool`. If ``pool_kwargs`` + is provided, it is used instead. Note that if a new pool does not + need to be created for the request, the provided ``pool_kwargs`` are + not used. + """ + u = parse_url(url) + return self.connection_from_host( + u.host, port=u.port, scheme=u.scheme, pool_kwargs=pool_kwargs + ) + + def _merge_pool_kwargs(self, override): + """ + Merge a dictionary of override values for self.connection_pool_kw. + + This does not modify self.connection_pool_kw and returns a new dict. + Any keys in the override dictionary with a value of ``None`` are + removed from the merged dictionary. + """ + base_pool_kwargs = self.connection_pool_kw.copy() + if override: + for key, value in override.items(): + if value is None: + try: + del base_pool_kwargs[key] + except KeyError: + pass + else: + base_pool_kwargs[key] = value + return base_pool_kwargs + + def _proxy_requires_url_absolute_form(self, parsed_url): + """ + Indicates if the proxy requires the complete destination URL in the + request. Normally this is only needed when not using an HTTP CONNECT + tunnel. + """ + if self.proxy is None: + return False + + return not connection_requires_http_tunnel( + self.proxy, self.proxy_config, parsed_url.scheme + ) + + def _validate_proxy_scheme_url_selection(self, url_scheme): + """ + Validates that were not attempting to do TLS in TLS connections on + Python2 or with unsupported SSL implementations. + """ + if self.proxy is None or url_scheme != "https": + return + + if self.proxy.scheme != "https": + return + + if six.PY2 and not self.proxy_config.use_forwarding_for_https: + raise ProxySchemeUnsupported( + "Contacting HTTPS destinations through HTTPS proxies " + "'via CONNECT tunnels' is not supported in Python 2" + ) + + def urlopen(self, method, url, redirect=True, **kw): + """ + Same as :meth:`urllib3.HTTPConnectionPool.urlopen` + with custom cross-host redirect logic and only sends the request-uri + portion of the ``url``. + + The given ``url`` parameter must be absolute, such that an appropriate + :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it. + """ + u = parse_url(url) + self._validate_proxy_scheme_url_selection(u.scheme) + + conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme) + + kw["assert_same_host"] = False + kw["redirect"] = False + + if "headers" not in kw: + kw["headers"] = self.headers.copy() + + if self._proxy_requires_url_absolute_form(u): + response = conn.urlopen(method, url, **kw) + else: + response = conn.urlopen(method, u.request_uri, **kw) + + redirect_location = redirect and response.get_redirect_location() + if not redirect_location: + return response + + # Support relative URLs for redirecting. + redirect_location = urljoin(url, redirect_location) + + # RFC 7231, Section 6.4.4 + if response.status == 303: + method = "GET" + + retries = kw.get("retries") + if not isinstance(retries, Retry): + retries = Retry.from_int(retries, redirect=redirect) + + # Strip headers marked as unsafe to forward to the redirected location. + # Check remove_headers_on_redirect to avoid a potential network call within + # conn.is_same_host() which may use socket.gethostbyname() in the future. + if retries.remove_headers_on_redirect and not conn.is_same_host( + redirect_location + ): + headers = list(six.iterkeys(kw["headers"])) + for header in headers: + if header.lower() in retries.remove_headers_on_redirect: + kw["headers"].pop(header, None) + + try: + retries = retries.increment(method, url, response=response, _pool=conn) + except MaxRetryError: + if retries.raise_on_redirect: + response.drain_conn() + raise + return response + + kw["retries"] = retries + kw["redirect"] = redirect + + log.info("Redirecting %s -> %s", url, redirect_location) + + response.drain_conn() + return self.urlopen(method, redirect_location, **kw) + + +class ProxyManager(PoolManager): + """ + Behaves just like :class:`PoolManager`, but sends all requests through + the defined proxy, using the CONNECT method for HTTPS URLs. + + :param proxy_url: + The URL of the proxy to be used. + + :param proxy_headers: + A dictionary containing headers that will be sent to the proxy. In case + of HTTP they are being sent with each request, while in the + HTTPS/CONNECT case they are sent only once. Could be used for proxy + authentication. + + :param proxy_ssl_context: + The proxy SSL context is used to establish the TLS connection to the + proxy when using HTTPS proxies. + + :param use_forwarding_for_https: + (Defaults to False) If set to True will forward requests to the HTTPS + proxy to be made on behalf of the client instead of creating a TLS + tunnel via the CONNECT method. **Enabling this flag means that request + and response headers and content will be visible from the HTTPS proxy** + whereas tunneling keeps request and response headers and content + private. IP address, target hostname, SNI, and port are always visible + to an HTTPS proxy even when this flag is disabled. + + Example: + >>> proxy = urllib3.ProxyManager('http://localhost:3128/') + >>> r1 = proxy.request('GET', 'http://google.com/') + >>> r2 = proxy.request('GET', 'http://httpbin.org/') + >>> len(proxy.pools) + 1 + >>> r3 = proxy.request('GET', 'https://httpbin.org/') + >>> r4 = proxy.request('GET', 'https://twitter.com/') + >>> len(proxy.pools) + 3 + + """ + + def __init__( + self, + proxy_url, + num_pools=10, + headers=None, + proxy_headers=None, + proxy_ssl_context=None, + use_forwarding_for_https=False, + **connection_pool_kw + ): + + if isinstance(proxy_url, HTTPConnectionPool): + proxy_url = "%s://%s:%i" % ( + proxy_url.scheme, + proxy_url.host, + proxy_url.port, + ) + proxy = parse_url(proxy_url) + + if proxy.scheme not in ("http", "https"): + raise ProxySchemeUnknown(proxy.scheme) + + if not proxy.port: + port = port_by_scheme.get(proxy.scheme, 80) + proxy = proxy._replace(port=port) + + self.proxy = proxy + self.proxy_headers = proxy_headers or {} + self.proxy_ssl_context = proxy_ssl_context + self.proxy_config = ProxyConfig(proxy_ssl_context, use_forwarding_for_https) + + connection_pool_kw["_proxy"] = self.proxy + connection_pool_kw["_proxy_headers"] = self.proxy_headers + connection_pool_kw["_proxy_config"] = self.proxy_config + + super(ProxyManager, self).__init__(num_pools, headers, **connection_pool_kw) + + def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None): + if scheme == "https": + return super(ProxyManager, self).connection_from_host( + host, port, scheme, pool_kwargs=pool_kwargs + ) + + return super(ProxyManager, self).connection_from_host( + self.proxy.host, self.proxy.port, self.proxy.scheme, pool_kwargs=pool_kwargs + ) + + def _set_proxy_headers(self, url, headers=None): + """ + Sets headers needed by proxies: specifically, the Accept and Host + headers. Only sets headers not provided by the user. + """ + headers_ = {"Accept": "*/*"} + + netloc = parse_url(url).netloc + if netloc: + headers_["Host"] = netloc + + if headers: + headers_.update(headers) + return headers_ + + def urlopen(self, method, url, redirect=True, **kw): + "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute." + u = parse_url(url) + if not connection_requires_http_tunnel(self.proxy, self.proxy_config, u.scheme): + # For connections using HTTP CONNECT, httplib sets the necessary + # headers on the CONNECT to the proxy. If we're not using CONNECT, + # we'll definitely need to set 'Host' at the very least. + headers = kw.get("headers", self.headers) + kw["headers"] = self._set_proxy_headers(url, headers) + + return super(ProxyManager, self).urlopen(method, url, redirect=redirect, **kw) + + +def proxy_from_url(url, **kw): + return ProxyManager(proxy_url=url, **kw) diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/request.py b/venv/Lib/site-packages/pip/_vendor/urllib3/request.py new file mode 100644 index 00000000..398386a5 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/urllib3/request.py @@ -0,0 +1,170 @@ +from __future__ import absolute_import + +from .filepost import encode_multipart_formdata +from .packages.six.moves.urllib.parse import urlencode + +__all__ = ["RequestMethods"] + + +class RequestMethods(object): + """ + Convenience mixin for classes who implement a :meth:`urlopen` method, such + as :class:`urllib3.HTTPConnectionPool` and + :class:`urllib3.PoolManager`. + + Provides behavior for making common types of HTTP request methods and + decides which type of request field encoding to use. + + Specifically, + + :meth:`.request_encode_url` is for sending requests whose fields are + encoded in the URL (such as GET, HEAD, DELETE). + + :meth:`.request_encode_body` is for sending requests whose fields are + encoded in the *body* of the request using multipart or www-form-urlencoded + (such as for POST, PUT, PATCH). + + :meth:`.request` is for making any kind of request, it will look up the + appropriate encoding format and use one of the above two methods to make + the request. + + Initializer parameters: + + :param headers: + Headers to include with all requests, unless other headers are given + explicitly. + """ + + _encode_url_methods = {"DELETE", "GET", "HEAD", "OPTIONS"} + + def __init__(self, headers=None): + self.headers = headers or {} + + def urlopen( + self, + method, + url, + body=None, + headers=None, + encode_multipart=True, + multipart_boundary=None, + **kw + ): # Abstract + raise NotImplementedError( + "Classes extending RequestMethods must implement " + "their own ``urlopen`` method." + ) + + def request(self, method, url, fields=None, headers=None, **urlopen_kw): + """ + Make a request using :meth:`urlopen` with the appropriate encoding of + ``fields`` based on the ``method`` used. + + This is a convenience method that requires the least amount of manual + effort. It can be used in most situations, while still having the + option to drop down to more specific methods when necessary, such as + :meth:`request_encode_url`, :meth:`request_encode_body`, + or even the lowest level :meth:`urlopen`. + """ + method = method.upper() + + urlopen_kw["request_url"] = url + + if method in self._encode_url_methods: + return self.request_encode_url( + method, url, fields=fields, headers=headers, **urlopen_kw + ) + else: + return self.request_encode_body( + method, url, fields=fields, headers=headers, **urlopen_kw + ) + + def request_encode_url(self, method, url, fields=None, headers=None, **urlopen_kw): + """ + Make a request using :meth:`urlopen` with the ``fields`` encoded in + the url. This is useful for request methods like GET, HEAD, DELETE, etc. + """ + if headers is None: + headers = self.headers + + extra_kw = {"headers": headers} + extra_kw.update(urlopen_kw) + + if fields: + url += "?" + urlencode(fields) + + return self.urlopen(method, url, **extra_kw) + + def request_encode_body( + self, + method, + url, + fields=None, + headers=None, + encode_multipart=True, + multipart_boundary=None, + **urlopen_kw + ): + """ + Make a request using :meth:`urlopen` with the ``fields`` encoded in + the body. This is useful for request methods like POST, PUT, PATCH, etc. + + When ``encode_multipart=True`` (default), then + :func:`urllib3.encode_multipart_formdata` is used to encode + the payload with the appropriate content type. Otherwise + :func:`urllib.parse.urlencode` is used with the + 'application/x-www-form-urlencoded' content type. + + Multipart encoding must be used when posting files, and it's reasonably + safe to use it in other times too. However, it may break request + signing, such as with OAuth. + + Supports an optional ``fields`` parameter of key/value strings AND + key/filetuple. A filetuple is a (filename, data, MIME type) tuple where + the MIME type is optional. For example:: + + fields = { + 'foo': 'bar', + 'fakefile': ('foofile.txt', 'contents of foofile'), + 'realfile': ('barfile.txt', open('realfile').read()), + 'typedfile': ('bazfile.bin', open('bazfile').read(), + 'image/jpeg'), + 'nonamefile': 'contents of nonamefile field', + } + + When uploading a file, providing a filename (the first parameter of the + tuple) is optional but recommended to best mimic behavior of browsers. + + Note that if ``headers`` are supplied, the 'Content-Type' header will + be overwritten because it depends on the dynamic random boundary string + which is used to compose the body of the request. The random boundary + string can be explicitly set with the ``multipart_boundary`` parameter. + """ + if headers is None: + headers = self.headers + + extra_kw = {"headers": {}} + + if fields: + if "body" in urlopen_kw: + raise TypeError( + "request got values for both 'fields' and 'body', can only specify one." + ) + + if encode_multipart: + body, content_type = encode_multipart_formdata( + fields, boundary=multipart_boundary + ) + else: + body, content_type = ( + urlencode(fields), + "application/x-www-form-urlencoded", + ) + + extra_kw["body"] = body + extra_kw["headers"] = {"Content-Type": content_type} + + extra_kw["headers"].update(headers) + extra_kw.update(urlopen_kw) + + return self.urlopen(method, url, **extra_kw) diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/response.py b/venv/Lib/site-packages/pip/_vendor/urllib3/response.py new file mode 100644 index 00000000..38693f4f --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/urllib3/response.py @@ -0,0 +1,821 @@ +from __future__ import absolute_import + +import io +import logging +import zlib +from contextlib import contextmanager +from socket import error as SocketError +from socket import timeout as SocketTimeout + +try: + import brotli +except ImportError: + brotli = None + +from ._collections import HTTPHeaderDict +from .connection import BaseSSLError, HTTPException +from .exceptions import ( + BodyNotHttplibCompatible, + DecodeError, + HTTPError, + IncompleteRead, + InvalidChunkLength, + InvalidHeader, + ProtocolError, + ReadTimeoutError, + ResponseNotChunked, + SSLError, +) +from .packages import six +from .util.response import is_fp_closed, is_response_to_head + +log = logging.getLogger(__name__) + + +class DeflateDecoder(object): + def __init__(self): + self._first_try = True + self._data = b"" + self._obj = zlib.decompressobj() + + def __getattr__(self, name): + return getattr(self._obj, name) + + def decompress(self, data): + if not data: + return data + + if not self._first_try: + return self._obj.decompress(data) + + self._data += data + try: + decompressed = self._obj.decompress(data) + if decompressed: + self._first_try = False + self._data = None + return decompressed + except zlib.error: + self._first_try = False + self._obj = zlib.decompressobj(-zlib.MAX_WBITS) + try: + return self.decompress(self._data) + finally: + self._data = None + + +class GzipDecoderState(object): + + FIRST_MEMBER = 0 + OTHER_MEMBERS = 1 + SWALLOW_DATA = 2 + + +class GzipDecoder(object): + def __init__(self): + self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS) + self._state = GzipDecoderState.FIRST_MEMBER + + def __getattr__(self, name): + return getattr(self._obj, name) + + def decompress(self, data): + ret = bytearray() + if self._state == GzipDecoderState.SWALLOW_DATA or not data: + return bytes(ret) + while True: + try: + ret += self._obj.decompress(data) + except zlib.error: + previous_state = self._state + # Ignore data after the first error + self._state = GzipDecoderState.SWALLOW_DATA + if previous_state == GzipDecoderState.OTHER_MEMBERS: + # Allow trailing garbage acceptable in other gzip clients + return bytes(ret) + raise + data = self._obj.unused_data + if not data: + return bytes(ret) + self._state = GzipDecoderState.OTHER_MEMBERS + self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS) + + +if brotli is not None: + + class BrotliDecoder(object): + # Supports both 'brotlipy' and 'Brotli' packages + # since they share an import name. The top branches + # are for 'brotlipy' and bottom branches for 'Brotli' + def __init__(self): + self._obj = brotli.Decompressor() + if hasattr(self._obj, "decompress"): + self.decompress = self._obj.decompress + else: + self.decompress = self._obj.process + + def flush(self): + if hasattr(self._obj, "flush"): + return self._obj.flush() + return b"" + + +class MultiDecoder(object): + """ + From RFC7231: + If one or more encodings have been applied to a representation, the + sender that applied the encodings MUST generate a Content-Encoding + header field that lists the content codings in the order in which + they were applied. + """ + + def __init__(self, modes): + self._decoders = [_get_decoder(m.strip()) for m in modes.split(",")] + + def flush(self): + return self._decoders[0].flush() + + def decompress(self, data): + for d in reversed(self._decoders): + data = d.decompress(data) + return data + + +def _get_decoder(mode): + if "," in mode: + return MultiDecoder(mode) + + if mode == "gzip": + return GzipDecoder() + + if brotli is not None and mode == "br": + return BrotliDecoder() + + return DeflateDecoder() + + +class HTTPResponse(io.IOBase): + """ + HTTP Response container. + + Backwards-compatible with :class:`http.client.HTTPResponse` but the response ``body`` is + loaded and decoded on-demand when the ``data`` property is accessed. This + class is also compatible with the Python standard library's :mod:`io` + module, and can hence be treated as a readable object in the context of that + framework. + + Extra parameters for behaviour not present in :class:`http.client.HTTPResponse`: + + :param preload_content: + If True, the response's body will be preloaded during construction. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + + :param original_response: + When this HTTPResponse wrapper is generated from an :class:`http.client.HTTPResponse` + object, it's convenient to include the original for debug purposes. It's + otherwise unused. + + :param retries: + The retries contains the last :class:`~urllib3.util.retry.Retry` that + was used during the request. + + :param enforce_content_length: + Enforce content length checking. Body returned by server must match + value of Content-Length header, if present. Otherwise, raise error. + """ + + CONTENT_DECODERS = ["gzip", "deflate"] + if brotli is not None: + CONTENT_DECODERS += ["br"] + REDIRECT_STATUSES = [301, 302, 303, 307, 308] + + def __init__( + self, + body="", + headers=None, + status=0, + version=0, + reason=None, + strict=0, + preload_content=True, + decode_content=True, + original_response=None, + pool=None, + connection=None, + msg=None, + retries=None, + enforce_content_length=False, + request_method=None, + request_url=None, + auto_close=True, + ): + + if isinstance(headers, HTTPHeaderDict): + self.headers = headers + else: + self.headers = HTTPHeaderDict(headers) + self.status = status + self.version = version + self.reason = reason + self.strict = strict + self.decode_content = decode_content + self.retries = retries + self.enforce_content_length = enforce_content_length + self.auto_close = auto_close + + self._decoder = None + self._body = None + self._fp = None + self._original_response = original_response + self._fp_bytes_read = 0 + self.msg = msg + self._request_url = request_url + + if body and isinstance(body, (six.string_types, bytes)): + self._body = body + + self._pool = pool + self._connection = connection + + if hasattr(body, "read"): + self._fp = body + + # Are we using the chunked-style of transfer encoding? + self.chunked = False + self.chunk_left = None + tr_enc = self.headers.get("transfer-encoding", "").lower() + # Don't incur the penalty of creating a list and then discarding it + encodings = (enc.strip() for enc in tr_enc.split(",")) + if "chunked" in encodings: + self.chunked = True + + # Determine length of response + self.length_remaining = self._init_length(request_method) + + # If requested, preload the body. + if preload_content and not self._body: + self._body = self.read(decode_content=decode_content) + + def get_redirect_location(self): + """ + Should we redirect and where to? + + :returns: Truthy redirect location string if we got a redirect status + code and valid location. ``None`` if redirect status and no + location. ``False`` if not a redirect status code. + """ + if self.status in self.REDIRECT_STATUSES: + return self.headers.get("location") + + return False + + def release_conn(self): + if not self._pool or not self._connection: + return + + self._pool._put_conn(self._connection) + self._connection = None + + def drain_conn(self): + """ + Read and discard any remaining HTTP response data in the response connection. + + Unread data in the HTTPResponse connection blocks the connection from being released back to the pool. + """ + try: + self.read() + except (HTTPError, SocketError, BaseSSLError, HTTPException): + pass + + @property + def data(self): + # For backwards-compat with earlier urllib3 0.4 and earlier. + if self._body: + return self._body + + if self._fp: + return self.read(cache_content=True) + + @property + def connection(self): + return self._connection + + def isclosed(self): + return is_fp_closed(self._fp) + + def tell(self): + """ + Obtain the number of bytes pulled over the wire so far. May differ from + the amount of content returned by :meth:``urllib3.response.HTTPResponse.read`` + if bytes are encoded on the wire (e.g, compressed). + """ + return self._fp_bytes_read + + def _init_length(self, request_method): + """ + Set initial length value for Response content if available. + """ + length = self.headers.get("content-length") + + if length is not None: + if self.chunked: + # This Response will fail with an IncompleteRead if it can't be + # received as chunked. This method falls back to attempt reading + # the response before raising an exception. + log.warning( + "Received response with both Content-Length and " + "Transfer-Encoding set. This is expressly forbidden " + "by RFC 7230 sec 3.3.2. Ignoring Content-Length and " + "attempting to process response as Transfer-Encoding: " + "chunked." + ) + return None + + try: + # RFC 7230 section 3.3.2 specifies multiple content lengths can + # be sent in a single Content-Length header + # (e.g. Content-Length: 42, 42). This line ensures the values + # are all valid ints and that as long as the `set` length is 1, + # all values are the same. Otherwise, the header is invalid. + lengths = set([int(val) for val in length.split(",")]) + if len(lengths) > 1: + raise InvalidHeader( + "Content-Length contained multiple " + "unmatching values (%s)" % length + ) + length = lengths.pop() + except ValueError: + length = None + else: + if length < 0: + length = None + + # Convert status to int for comparison + # In some cases, httplib returns a status of "_UNKNOWN" + try: + status = int(self.status) + except ValueError: + status = 0 + + # Check for responses that shouldn't include a body + if status in (204, 304) or 100 <= status < 200 or request_method == "HEAD": + length = 0 + + return length + + def _init_decoder(self): + """ + Set-up the _decoder attribute if necessary. + """ + # Note: content-encoding value should be case-insensitive, per RFC 7230 + # Section 3.2 + content_encoding = self.headers.get("content-encoding", "").lower() + if self._decoder is None: + if content_encoding in self.CONTENT_DECODERS: + self._decoder = _get_decoder(content_encoding) + elif "," in content_encoding: + encodings = [ + e.strip() + for e in content_encoding.split(",") + if e.strip() in self.CONTENT_DECODERS + ] + if len(encodings): + self._decoder = _get_decoder(content_encoding) + + DECODER_ERROR_CLASSES = (IOError, zlib.error) + if brotli is not None: + DECODER_ERROR_CLASSES += (brotli.error,) + + def _decode(self, data, decode_content, flush_decoder): + """ + Decode the data passed in and potentially flush the decoder. + """ + if not decode_content: + return data + + try: + if self._decoder: + data = self._decoder.decompress(data) + except self.DECODER_ERROR_CLASSES as e: + content_encoding = self.headers.get("content-encoding", "").lower() + raise DecodeError( + "Received response with content-encoding: %s, but " + "failed to decode it." % content_encoding, + e, + ) + if flush_decoder: + data += self._flush_decoder() + + return data + + def _flush_decoder(self): + """ + Flushes the decoder. Should only be called if the decoder is actually + being used. + """ + if self._decoder: + buf = self._decoder.decompress(b"") + return buf + self._decoder.flush() + + return b"" + + @contextmanager + def _error_catcher(self): + """ + Catch low-level python exceptions, instead re-raising urllib3 + variants, so that low-level exceptions are not leaked in the + high-level api. + + On exit, release the connection back to the pool. + """ + clean_exit = False + + try: + try: + yield + + except SocketTimeout: + # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but + # there is yet no clean way to get at it from this context. + raise ReadTimeoutError(self._pool, None, "Read timed out.") + + except BaseSSLError as e: + # FIXME: Is there a better way to differentiate between SSLErrors? + if "read operation timed out" not in str(e): + # SSL errors related to framing/MAC get wrapped and reraised here + raise SSLError(e) + + raise ReadTimeoutError(self._pool, None, "Read timed out.") + + except (HTTPException, SocketError) as e: + # This includes IncompleteRead. + raise ProtocolError("Connection broken: %r" % e, e) + + # If no exception is thrown, we should avoid cleaning up + # unnecessarily. + clean_exit = True + finally: + # If we didn't terminate cleanly, we need to throw away our + # connection. + if not clean_exit: + # The response may not be closed but we're not going to use it + # anymore so close it now to ensure that the connection is + # released back to the pool. + if self._original_response: + self._original_response.close() + + # Closing the response may not actually be sufficient to close + # everything, so if we have a hold of the connection close that + # too. + if self._connection: + self._connection.close() + + # If we hold the original response but it's closed now, we should + # return the connection back to the pool. + if self._original_response and self._original_response.isclosed(): + self.release_conn() + + def read(self, amt=None, decode_content=None, cache_content=False): + """ + Similar to :meth:`http.client.HTTPResponse.read`, but with two additional + parameters: ``decode_content`` and ``cache_content``. + + :param amt: + How much of the content to read. If specified, caching is skipped + because it doesn't make sense to cache partial content as the full + response. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + + :param cache_content: + If True, will save the returned data such that the same result is + returned despite of the state of the underlying file object. This + is useful if you want the ``.data`` property to continue working + after having ``.read()`` the file object. (Overridden if ``amt`` is + set.) + """ + self._init_decoder() + if decode_content is None: + decode_content = self.decode_content + + if self._fp is None: + return + + flush_decoder = False + fp_closed = getattr(self._fp, "closed", False) + + with self._error_catcher(): + if amt is None: + # cStringIO doesn't like amt=None + data = self._fp.read() if not fp_closed else b"" + flush_decoder = True + else: + cache_content = False + data = self._fp.read(amt) if not fp_closed else b"" + if ( + amt != 0 and not data + ): # Platform-specific: Buggy versions of Python. + # Close the connection when no data is returned + # + # This is redundant to what httplib/http.client _should_ + # already do. However, versions of python released before + # December 15, 2012 (http://bugs.python.org/issue16298) do + # not properly close the connection in all cases. There is + # no harm in redundantly calling close. + self._fp.close() + flush_decoder = True + if self.enforce_content_length and self.length_remaining not in ( + 0, + None, + ): + # This is an edge case that httplib failed to cover due + # to concerns of backward compatibility. We're + # addressing it here to make sure IncompleteRead is + # raised during streaming, so all calls with incorrect + # Content-Length are caught. + raise IncompleteRead(self._fp_bytes_read, self.length_remaining) + + if data: + self._fp_bytes_read += len(data) + if self.length_remaining is not None: + self.length_remaining -= len(data) + + data = self._decode(data, decode_content, flush_decoder) + + if cache_content: + self._body = data + + return data + + def stream(self, amt=2 ** 16, decode_content=None): + """ + A generator wrapper for the read() method. A call will block until + ``amt`` bytes have been read from the connection or until the + connection is closed. + + :param amt: + How much of the content to read. The generator will return up to + much data per iteration, but may return less. This is particularly + likely when using compressed data. However, the empty string will + never be returned. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + """ + if self.chunked and self.supports_chunked_reads(): + for line in self.read_chunked(amt, decode_content=decode_content): + yield line + else: + while not is_fp_closed(self._fp): + data = self.read(amt=amt, decode_content=decode_content) + + if data: + yield data + + @classmethod + def from_httplib(ResponseCls, r, **response_kw): + """ + Given an :class:`http.client.HTTPResponse` instance ``r``, return a + corresponding :class:`urllib3.response.HTTPResponse` object. + + Remaining parameters are passed to the HTTPResponse constructor, along + with ``original_response=r``. + """ + headers = r.msg + + if not isinstance(headers, HTTPHeaderDict): + if six.PY2: + # Python 2.7 + headers = HTTPHeaderDict.from_httplib(headers) + else: + headers = HTTPHeaderDict(headers.items()) + + # HTTPResponse objects in Python 3 don't have a .strict attribute + strict = getattr(r, "strict", 0) + resp = ResponseCls( + body=r, + headers=headers, + status=r.status, + version=r.version, + reason=r.reason, + strict=strict, + original_response=r, + **response_kw + ) + return resp + + # Backwards-compatibility methods for http.client.HTTPResponse + def getheaders(self): + return self.headers + + def getheader(self, name, default=None): + return self.headers.get(name, default) + + # Backwards compatibility for http.cookiejar + def info(self): + return self.headers + + # Overrides from io.IOBase + def close(self): + if not self.closed: + self._fp.close() + + if self._connection: + self._connection.close() + + if not self.auto_close: + io.IOBase.close(self) + + @property + def closed(self): + if not self.auto_close: + return io.IOBase.closed.__get__(self) + elif self._fp is None: + return True + elif hasattr(self._fp, "isclosed"): + return self._fp.isclosed() + elif hasattr(self._fp, "closed"): + return self._fp.closed + else: + return True + + def fileno(self): + if self._fp is None: + raise IOError("HTTPResponse has no file to get a fileno from") + elif hasattr(self._fp, "fileno"): + return self._fp.fileno() + else: + raise IOError( + "The file-like object this HTTPResponse is wrapped " + "around has no file descriptor" + ) + + def flush(self): + if ( + self._fp is not None + and hasattr(self._fp, "flush") + and not getattr(self._fp, "closed", False) + ): + return self._fp.flush() + + def readable(self): + # This method is required for `io` module compatibility. + return True + + def readinto(self, b): + # This method is required for `io` module compatibility. + temp = self.read(len(b)) + if len(temp) == 0: + return 0 + else: + b[: len(temp)] = temp + return len(temp) + + def supports_chunked_reads(self): + """ + Checks if the underlying file-like object looks like a + :class:`http.client.HTTPResponse` object. We do this by testing for + the fp attribute. If it is present we assume it returns raw chunks as + processed by read_chunked(). + """ + return hasattr(self._fp, "fp") + + def _update_chunk_length(self): + # First, we'll figure out length of a chunk and then + # we'll try to read it from socket. + if self.chunk_left is not None: + return + line = self._fp.fp.readline() + line = line.split(b";", 1)[0] + try: + self.chunk_left = int(line, 16) + except ValueError: + # Invalid chunked protocol response, abort. + self.close() + raise InvalidChunkLength(self, line) + + def _handle_chunk(self, amt): + returned_chunk = None + if amt is None: + chunk = self._fp._safe_read(self.chunk_left) + returned_chunk = chunk + self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. + self.chunk_left = None + elif amt < self.chunk_left: + value = self._fp._safe_read(amt) + self.chunk_left = self.chunk_left - amt + returned_chunk = value + elif amt == self.chunk_left: + value = self._fp._safe_read(amt) + self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. + self.chunk_left = None + returned_chunk = value + else: # amt > self.chunk_left + returned_chunk = self._fp._safe_read(self.chunk_left) + self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. + self.chunk_left = None + return returned_chunk + + def read_chunked(self, amt=None, decode_content=None): + """ + Similar to :meth:`HTTPResponse.read`, but with an additional + parameter: ``decode_content``. + + :param amt: + How much of the content to read. If specified, caching is skipped + because it doesn't make sense to cache partial content as the full + response. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + """ + self._init_decoder() + # FIXME: Rewrite this method and make it a class with a better structured logic. + if not self.chunked: + raise ResponseNotChunked( + "Response is not chunked. " + "Header 'transfer-encoding: chunked' is missing." + ) + if not self.supports_chunked_reads(): + raise BodyNotHttplibCompatible( + "Body should be http.client.HTTPResponse like. " + "It should have have an fp attribute which returns raw chunks." + ) + + with self._error_catcher(): + # Don't bother reading the body of a HEAD request. + if self._original_response and is_response_to_head(self._original_response): + self._original_response.close() + return + + # If a response is already read and closed + # then return immediately. + if self._fp.fp is None: + return + + while True: + self._update_chunk_length() + if self.chunk_left == 0: + break + chunk = self._handle_chunk(amt) + decoded = self._decode( + chunk, decode_content=decode_content, flush_decoder=False + ) + if decoded: + yield decoded + + if decode_content: + # On CPython and PyPy, we should never need to flush the + # decoder. However, on Jython we *might* need to, so + # lets defensively do it anyway. + decoded = self._flush_decoder() + if decoded: # Platform-specific: Jython. + yield decoded + + # Chunk content ends with \r\n: discard it. + while True: + line = self._fp.fp.readline() + if not line: + # Some sites may not end with '\r\n'. + break + if line == b"\r\n": + break + + # We read everything; close the "file". + if self._original_response: + self._original_response.close() + + def geturl(self): + """ + Returns the URL that was the source of this response. + If the request that generated this response redirected, this method + will return the final redirect location. + """ + if self.retries is not None and len(self.retries.history): + return self.retries.history[-1].redirect_location + else: + return self._request_url + + def __iter__(self): + buffer = [] + for chunk in self.stream(decode_content=True): + if b"\n" in chunk: + chunk = chunk.split(b"\n") + yield b"".join(buffer) + chunk[0] + b"\n" + for x in chunk[1:-1]: + yield x + b"\n" + if chunk[-1]: + buffer = [chunk[-1]] + else: + buffer = [] + else: + buffer.append(chunk) + if buffer: + yield b"".join(buffer) diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/util/__init__.py b/venv/Lib/site-packages/pip/_vendor/urllib3/util/__init__.py new file mode 100644 index 00000000..4547fc52 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/urllib3/util/__init__.py @@ -0,0 +1,49 @@ +from __future__ import absolute_import + +# For backwards compatibility, provide imports that used to be here. +from .connection import is_connection_dropped +from .request import SKIP_HEADER, SKIPPABLE_HEADERS, make_headers +from .response import is_fp_closed +from .retry import Retry +from .ssl_ import ( + ALPN_PROTOCOLS, + HAS_SNI, + IS_PYOPENSSL, + IS_SECURETRANSPORT, + PROTOCOL_TLS, + SSLContext, + assert_fingerprint, + resolve_cert_reqs, + resolve_ssl_version, + ssl_wrap_socket, +) +from .timeout import Timeout, current_time +from .url import Url, get_host, parse_url, split_first +from .wait import wait_for_read, wait_for_write + +__all__ = ( + "HAS_SNI", + "IS_PYOPENSSL", + "IS_SECURETRANSPORT", + "SSLContext", + "PROTOCOL_TLS", + "ALPN_PROTOCOLS", + "Retry", + "Timeout", + "Url", + "assert_fingerprint", + "current_time", + "is_connection_dropped", + "is_fp_closed", + "get_host", + "parse_url", + "make_headers", + "resolve_cert_reqs", + "resolve_ssl_version", + "split_first", + "ssl_wrap_socket", + "wait_for_read", + "wait_for_write", + "SKIP_HEADER", + "SKIPPABLE_HEADERS", +) diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..85736c6a Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/connection.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/connection.cpython-36.pyc new file mode 100644 index 00000000..f10da300 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/connection.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/proxy.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/proxy.cpython-36.pyc new file mode 100644 index 00000000..eed8c131 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/proxy.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/queue.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/queue.cpython-36.pyc new file mode 100644 index 00000000..60df8f94 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/queue.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/request.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/request.cpython-36.pyc new file mode 100644 index 00000000..11da31b0 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/request.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/response.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/response.cpython-36.pyc new file mode 100644 index 00000000..15f1fd50 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/response.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/retry.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/retry.cpython-36.pyc new file mode 100644 index 00000000..56dcfcef Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/retry.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-36.pyc new file mode 100644 index 00000000..ebd0ed6c Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/ssltransport.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/ssltransport.cpython-36.pyc new file mode 100644 index 00000000..19064ca6 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/ssltransport.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/timeout.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/timeout.cpython-36.pyc new file mode 100644 index 00000000..1292a338 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/timeout.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/url.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/url.cpython-36.pyc new file mode 100644 index 00000000..e88fdb9b Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/url.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/wait.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/wait.cpython-36.pyc new file mode 100644 index 00000000..14884176 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/urllib3/util/__pycache__/wait.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/util/connection.py b/venv/Lib/site-packages/pip/_vendor/urllib3/util/connection.py new file mode 100644 index 00000000..f1e5d37f --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/urllib3/util/connection.py @@ -0,0 +1,150 @@ +from __future__ import absolute_import + +import socket + +from pip._vendor.urllib3.exceptions import LocationParseError + +from ..contrib import _appengine_environ +from ..packages import six +from .wait import NoWayToWaitForSocketError, wait_for_read + + +def is_connection_dropped(conn): # Platform-specific + """ + Returns True if the connection is dropped and should be closed. + + :param conn: + :class:`http.client.HTTPConnection` object. + + Note: For platforms like AppEngine, this will always return ``False`` to + let the platform handle connection recycling transparently for us. + """ + sock = getattr(conn, "sock", False) + if sock is False: # Platform-specific: AppEngine + return False + if sock is None: # Connection already closed (such as by httplib). + return True + try: + # Returns True if readable, which here means it's been dropped + return wait_for_read(sock, timeout=0.0) + except NoWayToWaitForSocketError: # Platform-specific: AppEngine + return False + + +# This function is copied from socket.py in the Python 2.7 standard +# library test suite. Added to its signature is only `socket_options`. +# One additional modification is that we avoid binding to IPv6 servers +# discovered in DNS if the system doesn't have IPv6 functionality. +def create_connection( + address, + timeout=socket._GLOBAL_DEFAULT_TIMEOUT, + source_address=None, + socket_options=None, +): + """Connect to *address* and return the socket object. + + Convenience function. Connect to *address* (a 2-tuple ``(host, + port)``) and return the socket object. Passing the optional + *timeout* parameter will set the timeout on the socket instance + before attempting to connect. If no *timeout* is supplied, the + global default timeout setting returned by :func:`socket.getdefaulttimeout` + is used. If *source_address* is set it must be a tuple of (host, port) + for the socket to bind as a source address before making the connection. + An host of '' or port 0 tells the OS to use the default. + """ + + host, port = address + if host.startswith("["): + host = host.strip("[]") + err = None + + # Using the value from allowed_gai_family() in the context of getaddrinfo lets + # us select whether to work with IPv4 DNS records, IPv6 records, or both. + # The original create_connection function always returns all records. + family = allowed_gai_family() + + try: + host.encode("idna") + except UnicodeError: + return six.raise_from( + LocationParseError(u"'%s', label empty or too long" % host), None + ) + + for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM): + af, socktype, proto, canonname, sa = res + sock = None + try: + sock = socket.socket(af, socktype, proto) + + # If provided, set socket level options before connecting. + _set_socket_options(sock, socket_options) + + if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT: + sock.settimeout(timeout) + if source_address: + sock.bind(source_address) + sock.connect(sa) + return sock + + except socket.error as e: + err = e + if sock is not None: + sock.close() + sock = None + + if err is not None: + raise err + + raise socket.error("getaddrinfo returns an empty list") + + +def _set_socket_options(sock, options): + if options is None: + return + + for opt in options: + sock.setsockopt(*opt) + + +def allowed_gai_family(): + """This function is designed to work in the context of + getaddrinfo, where family=socket.AF_UNSPEC is the default and + will perform a DNS search for both IPv6 and IPv4 records.""" + + family = socket.AF_INET + if HAS_IPV6: + family = socket.AF_UNSPEC + return family + + +def _has_ipv6(host): + """ Returns True if the system can bind an IPv6 address. """ + sock = None + has_ipv6 = False + + # App Engine doesn't support IPV6 sockets and actually has a quota on the + # number of sockets that can be used, so just early out here instead of + # creating a socket needlessly. + # See https://github.com/urllib3/urllib3/issues/1446 + if _appengine_environ.is_appengine_sandbox(): + return False + + if socket.has_ipv6: + # has_ipv6 returns true if cPython was compiled with IPv6 support. + # It does not tell us if the system has IPv6 support enabled. To + # determine that we must bind to an IPv6 address. + # https://github.com/urllib3/urllib3/pull/611 + # https://bugs.python.org/issue658327 + try: + sock = socket.socket(socket.AF_INET6) + sock.bind((host, 0)) + has_ipv6 = True + except Exception: + pass + + if sock: + sock.close() + return has_ipv6 + + +HAS_IPV6 = _has_ipv6("::1") diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/util/proxy.py b/venv/Lib/site-packages/pip/_vendor/urllib3/util/proxy.py new file mode 100644 index 00000000..34f884d5 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/urllib3/util/proxy.py @@ -0,0 +1,56 @@ +from .ssl_ import create_urllib3_context, resolve_cert_reqs, resolve_ssl_version + + +def connection_requires_http_tunnel( + proxy_url=None, proxy_config=None, destination_scheme=None +): + """ + Returns True if the connection requires an HTTP CONNECT through the proxy. + + :param URL proxy_url: + URL of the proxy. + :param ProxyConfig proxy_config: + Proxy configuration from poolmanager.py + :param str destination_scheme: + The scheme of the destination. (i.e https, http, etc) + """ + # If we're not using a proxy, no way to use a tunnel. + if proxy_url is None: + return False + + # HTTP destinations never require tunneling, we always forward. + if destination_scheme == "http": + return False + + # Support for forwarding with HTTPS proxies and HTTPS destinations. + if ( + proxy_url.scheme == "https" + and proxy_config + and proxy_config.use_forwarding_for_https + ): + return False + + # Otherwise always use a tunnel. + return True + + +def create_proxy_ssl_context( + ssl_version, cert_reqs, ca_certs=None, ca_cert_dir=None, ca_cert_data=None +): + """ + Generates a default proxy ssl context if one hasn't been provided by the + user. + """ + ssl_context = create_urllib3_context( + ssl_version=resolve_ssl_version(ssl_version), + cert_reqs=resolve_cert_reqs(cert_reqs), + ) + if ( + not ca_certs + and not ca_cert_dir + and not ca_cert_data + and hasattr(ssl_context, "load_default_certs") + ): + ssl_context.load_default_certs() + + return ssl_context diff --git a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/queue.py b/venv/Lib/site-packages/pip/_vendor/urllib3/util/queue.py similarity index 99% rename from env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/queue.py rename to venv/Lib/site-packages/pip/_vendor/urllib3/util/queue.py index d3d379a1..41784104 100644 --- a/env/lib/python2.7/site-packages/pip/_vendor/urllib3/util/queue.py +++ b/venv/Lib/site-packages/pip/_vendor/urllib3/util/queue.py @@ -1,4 +1,5 @@ import collections + from ..packages import six from ..packages.six.moves import queue diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/util/request.py b/venv/Lib/site-packages/pip/_vendor/urllib3/util/request.py new file mode 100644 index 00000000..25103383 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/urllib3/util/request.py @@ -0,0 +1,143 @@ +from __future__ import absolute_import + +from base64 import b64encode + +from ..exceptions import UnrewindableBodyError +from ..packages.six import b, integer_types + +# Pass as a value within ``headers`` to skip +# emitting some HTTP headers that are added automatically. +# The only headers that are supported are ``Accept-Encoding``, +# ``Host``, and ``User-Agent``. +SKIP_HEADER = "@@@SKIP_HEADER@@@" +SKIPPABLE_HEADERS = frozenset(["accept-encoding", "host", "user-agent"]) + +ACCEPT_ENCODING = "gzip,deflate" +try: + import brotli as _unused_module_brotli # noqa: F401 +except ImportError: + pass +else: + ACCEPT_ENCODING += ",br" + +_FAILEDTELL = object() + + +def make_headers( + keep_alive=None, + accept_encoding=None, + user_agent=None, + basic_auth=None, + proxy_basic_auth=None, + disable_cache=None, +): + """ + Shortcuts for generating request headers. + + :param keep_alive: + If ``True``, adds 'connection: keep-alive' header. + + :param accept_encoding: + Can be a boolean, list, or string. + ``True`` translates to 'gzip,deflate'. + List will get joined by comma. + String will be used as provided. + + :param user_agent: + String representing the user-agent you want, such as + "python-urllib3/0.6" + + :param basic_auth: + Colon-separated username:password string for 'authorization: basic ...' + auth header. + + :param proxy_basic_auth: + Colon-separated username:password string for 'proxy-authorization: basic ...' + auth header. + + :param disable_cache: + If ``True``, adds 'cache-control: no-cache' header. + + Example:: + + >>> make_headers(keep_alive=True, user_agent="Batman/1.0") + {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'} + >>> make_headers(accept_encoding=True) + {'accept-encoding': 'gzip,deflate'} + """ + headers = {} + if accept_encoding: + if isinstance(accept_encoding, str): + pass + elif isinstance(accept_encoding, list): + accept_encoding = ",".join(accept_encoding) + else: + accept_encoding = ACCEPT_ENCODING + headers["accept-encoding"] = accept_encoding + + if user_agent: + headers["user-agent"] = user_agent + + if keep_alive: + headers["connection"] = "keep-alive" + + if basic_auth: + headers["authorization"] = "Basic " + b64encode(b(basic_auth)).decode("utf-8") + + if proxy_basic_auth: + headers["proxy-authorization"] = "Basic " + b64encode( + b(proxy_basic_auth) + ).decode("utf-8") + + if disable_cache: + headers["cache-control"] = "no-cache" + + return headers + + +def set_file_position(body, pos): + """ + If a position is provided, move file to that point. + Otherwise, we'll attempt to record a position for future use. + """ + if pos is not None: + rewind_body(body, pos) + elif getattr(body, "tell", None) is not None: + try: + pos = body.tell() + except (IOError, OSError): + # This differentiates from None, allowing us to catch + # a failed `tell()` later when trying to rewind the body. + pos = _FAILEDTELL + + return pos + + +def rewind_body(body, body_pos): + """ + Attempt to rewind body to a certain position. + Primarily used for request redirects and retries. + + :param body: + File-like object that supports seek. + + :param int pos: + Position to seek to in file. + """ + body_seek = getattr(body, "seek", None) + if body_seek is not None and isinstance(body_pos, integer_types): + try: + body_seek(body_pos) + except (IOError, OSError): + raise UnrewindableBodyError( + "An error occurred when rewinding request body for redirect/retry." + ) + elif body_pos is _FAILEDTELL: + raise UnrewindableBodyError( + "Unable to record file position for rewinding " + "request body during a redirect/retry." + ) + else: + raise ValueError( + "body_pos must be of type integer, instead it was %s." % type(body_pos) + ) diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/util/response.py b/venv/Lib/site-packages/pip/_vendor/urllib3/util/response.py new file mode 100644 index 00000000..5ea609cc --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/urllib3/util/response.py @@ -0,0 +1,107 @@ +from __future__ import absolute_import + +from email.errors import MultipartInvariantViolationDefect, StartBoundaryNotFoundDefect + +from ..exceptions import HeaderParsingError +from ..packages.six.moves import http_client as httplib + + +def is_fp_closed(obj): + """ + Checks whether a given file-like object is closed. + + :param obj: + The file-like object to check. + """ + + try: + # Check `isclosed()` first, in case Python3 doesn't set `closed`. + # GH Issue #928 + return obj.isclosed() + except AttributeError: + pass + + try: + # Check via the official file-like-object way. + return obj.closed + except AttributeError: + pass + + try: + # Check if the object is a container for another file-like object that + # gets released on exhaustion (e.g. HTTPResponse). + return obj.fp is None + except AttributeError: + pass + + raise ValueError("Unable to determine whether fp is closed.") + + +def assert_header_parsing(headers): + """ + Asserts whether all headers have been successfully parsed. + Extracts encountered errors from the result of parsing headers. + + Only works on Python 3. + + :param http.client.HTTPMessage headers: Headers to verify. + + :raises urllib3.exceptions.HeaderParsingError: + If parsing errors are found. + """ + + # This will fail silently if we pass in the wrong kind of parameter. + # To make debugging easier add an explicit check. + if not isinstance(headers, httplib.HTTPMessage): + raise TypeError("expected httplib.Message, got {0}.".format(type(headers))) + + defects = getattr(headers, "defects", None) + get_payload = getattr(headers, "get_payload", None) + + unparsed_data = None + if get_payload: + # get_payload is actually email.message.Message.get_payload; + # we're only interested in the result if it's not a multipart message + if not headers.is_multipart(): + payload = get_payload() + + if isinstance(payload, (bytes, str)): + unparsed_data = payload + if defects: + # httplib is assuming a response body is available + # when parsing headers even when httplib only sends + # header data to parse_headers() This results in + # defects on multipart responses in particular. + # See: https://github.com/urllib3/urllib3/issues/800 + + # So we ignore the following defects: + # - StartBoundaryNotFoundDefect: + # The claimed start boundary was never found. + # - MultipartInvariantViolationDefect: + # A message claimed to be a multipart but no subparts were found. + defects = [ + defect + for defect in defects + if not isinstance( + defect, (StartBoundaryNotFoundDefect, MultipartInvariantViolationDefect) + ) + ] + + if defects or unparsed_data: + raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data) + + +def is_response_to_head(response): + """ + Checks whether the request of a response has been a HEAD-request. + Handles the quirks of AppEngine. + + :param http.client.HTTPResponse response: + Response to check if the originating request + used 'HEAD' as a method. + """ + # FIXME: Can we do this somehow without accessing private httplib _method? + method = response._method + if isinstance(method, int): # Platform-specific: Appengine + return method == 3 + return method.upper() == "HEAD" diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/util/retry.py b/venv/Lib/site-packages/pip/_vendor/urllib3/util/retry.py new file mode 100644 index 00000000..d25a41b4 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/urllib3/util/retry.py @@ -0,0 +1,602 @@ +from __future__ import absolute_import + +import email +import logging +import re +import time +import warnings +from collections import namedtuple +from itertools import takewhile + +from ..exceptions import ( + ConnectTimeoutError, + InvalidHeader, + MaxRetryError, + ProtocolError, + ProxyError, + ReadTimeoutError, + ResponseError, +) +from ..packages import six + +log = logging.getLogger(__name__) + + +# Data structure for representing the metadata of requests that result in a retry. +RequestHistory = namedtuple( + "RequestHistory", ["method", "url", "error", "status", "redirect_location"] +) + + +# TODO: In v2 we can remove this sentinel and metaclass with deprecated options. +_Default = object() + + +class _RetryMeta(type): + @property + def DEFAULT_METHOD_WHITELIST(cls): + warnings.warn( + "Using 'Retry.DEFAULT_METHOD_WHITELIST' is deprecated and " + "will be removed in v2.0. Use 'Retry.DEFAULT_METHODS_ALLOWED' instead", + DeprecationWarning, + ) + return cls.DEFAULT_ALLOWED_METHODS + + @DEFAULT_METHOD_WHITELIST.setter + def DEFAULT_METHOD_WHITELIST(cls, value): + warnings.warn( + "Using 'Retry.DEFAULT_METHOD_WHITELIST' is deprecated and " + "will be removed in v2.0. Use 'Retry.DEFAULT_ALLOWED_METHODS' instead", + DeprecationWarning, + ) + cls.DEFAULT_ALLOWED_METHODS = value + + @property + def DEFAULT_REDIRECT_HEADERS_BLACKLIST(cls): + warnings.warn( + "Using 'Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST' is deprecated and " + "will be removed in v2.0. Use 'Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT' instead", + DeprecationWarning, + ) + return cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT + + @DEFAULT_REDIRECT_HEADERS_BLACKLIST.setter + def DEFAULT_REDIRECT_HEADERS_BLACKLIST(cls, value): + warnings.warn( + "Using 'Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST' is deprecated and " + "will be removed in v2.0. Use 'Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT' instead", + DeprecationWarning, + ) + cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT = value + + +@six.add_metaclass(_RetryMeta) +class Retry(object): + """Retry configuration. + + Each retry attempt will create a new Retry object with updated values, so + they can be safely reused. + + Retries can be defined as a default for a pool:: + + retries = Retry(connect=5, read=2, redirect=5) + http = PoolManager(retries=retries) + response = http.request('GET', 'http://example.com/') + + Or per-request (which overrides the default for the pool):: + + response = http.request('GET', 'http://example.com/', retries=Retry(10)) + + Retries can be disabled by passing ``False``:: + + response = http.request('GET', 'http://example.com/', retries=False) + + Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless + retries are disabled, in which case the causing exception will be raised. + + :param int total: + Total number of retries to allow. Takes precedence over other counts. + + Set to ``None`` to remove this constraint and fall back on other + counts. + + Set to ``0`` to fail on the first retry. + + Set to ``False`` to disable and imply ``raise_on_redirect=False``. + + :param int connect: + How many connection-related errors to retry on. + + These are errors raised before the request is sent to the remote server, + which we assume has not triggered the server to process the request. + + Set to ``0`` to fail on the first retry of this type. + + :param int read: + How many times to retry on read errors. + + These errors are raised after the request was sent to the server, so the + request may have side-effects. + + Set to ``0`` to fail on the first retry of this type. + + :param int redirect: + How many redirects to perform. Limit this to avoid infinite redirect + loops. + + A redirect is a HTTP response with a status code 301, 302, 303, 307 or + 308. + + Set to ``0`` to fail on the first retry of this type. + + Set to ``False`` to disable and imply ``raise_on_redirect=False``. + + :param int status: + How many times to retry on bad status codes. + + These are retries made on responses, where status code matches + ``status_forcelist``. + + Set to ``0`` to fail on the first retry of this type. + + :param int other: + How many times to retry on other errors. + + Other errors are errors that are not connect, read, redirect or status errors. + These errors might be raised after the request was sent to the server, so the + request might have side-effects. + + Set to ``0`` to fail on the first retry of this type. + + If ``total`` is not set, it's a good idea to set this to 0 to account + for unexpected edge cases and avoid infinite retry loops. + + :param iterable allowed_methods: + Set of uppercased HTTP method verbs that we should retry on. + + By default, we only retry on methods which are considered to be + idempotent (multiple requests with the same parameters end with the + same state). See :attr:`Retry.DEFAULT_ALLOWED_METHODS`. + + Set to a ``False`` value to retry on any verb. + + .. warning:: + + Previously this parameter was named ``method_whitelist``, that + usage is deprecated in v1.26.0 and will be removed in v2.0. + + :param iterable status_forcelist: + A set of integer HTTP status codes that we should force a retry on. + A retry is initiated if the request method is in ``allowed_methods`` + and the response status code is in ``status_forcelist``. + + By default, this is disabled with ``None``. + + :param float backoff_factor: + A backoff factor to apply between attempts after the second try + (most errors are resolved immediately by a second try without a + delay). urllib3 will sleep for:: + + {backoff factor} * (2 ** ({number of total retries} - 1)) + + seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep + for [0.0s, 0.2s, 0.4s, ...] between retries. It will never be longer + than :attr:`Retry.BACKOFF_MAX`. + + By default, backoff is disabled (set to 0). + + :param bool raise_on_redirect: Whether, if the number of redirects is + exhausted, to raise a MaxRetryError, or to return a response with a + response code in the 3xx range. + + :param bool raise_on_status: Similar meaning to ``raise_on_redirect``: + whether we should raise an exception, or return a response, + if status falls in ``status_forcelist`` range and retries have + been exhausted. + + :param tuple history: The history of the request encountered during + each call to :meth:`~Retry.increment`. The list is in the order + the requests occurred. Each list item is of class :class:`RequestHistory`. + + :param bool respect_retry_after_header: + Whether to respect Retry-After header on status codes defined as + :attr:`Retry.RETRY_AFTER_STATUS_CODES` or not. + + :param iterable remove_headers_on_redirect: + Sequence of headers to remove from the request when a response + indicating a redirect is returned before firing off the redirected + request. + """ + + #: Default methods to be used for ``allowed_methods`` + DEFAULT_ALLOWED_METHODS = frozenset( + ["HEAD", "GET", "PUT", "DELETE", "OPTIONS", "TRACE"] + ) + + #: Default status codes to be used for ``status_forcelist`` + RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503]) + + #: Default headers to be used for ``remove_headers_on_redirect`` + DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Authorization"]) + + #: Maximum backoff time. + BACKOFF_MAX = 120 + + def __init__( + self, + total=10, + connect=None, + read=None, + redirect=None, + status=None, + other=None, + allowed_methods=_Default, + status_forcelist=None, + backoff_factor=0, + raise_on_redirect=True, + raise_on_status=True, + history=None, + respect_retry_after_header=True, + remove_headers_on_redirect=_Default, + # TODO: Deprecated, remove in v2.0 + method_whitelist=_Default, + ): + + if method_whitelist is not _Default: + if allowed_methods is not _Default: + raise ValueError( + "Using both 'allowed_methods' and " + "'method_whitelist' together is not allowed. " + "Instead only use 'allowed_methods'" + ) + warnings.warn( + "Using 'method_whitelist' with Retry is deprecated and " + "will be removed in v2.0. Use 'allowed_methods' instead", + DeprecationWarning, + stacklevel=2, + ) + allowed_methods = method_whitelist + if allowed_methods is _Default: + allowed_methods = self.DEFAULT_ALLOWED_METHODS + if remove_headers_on_redirect is _Default: + remove_headers_on_redirect = self.DEFAULT_REMOVE_HEADERS_ON_REDIRECT + + self.total = total + self.connect = connect + self.read = read + self.status = status + self.other = other + + if redirect is False or total is False: + redirect = 0 + raise_on_redirect = False + + self.redirect = redirect + self.status_forcelist = status_forcelist or set() + self.allowed_methods = allowed_methods + self.backoff_factor = backoff_factor + self.raise_on_redirect = raise_on_redirect + self.raise_on_status = raise_on_status + self.history = history or tuple() + self.respect_retry_after_header = respect_retry_after_header + self.remove_headers_on_redirect = frozenset( + [h.lower() for h in remove_headers_on_redirect] + ) + + def new(self, **kw): + params = dict( + total=self.total, + connect=self.connect, + read=self.read, + redirect=self.redirect, + status=self.status, + other=self.other, + status_forcelist=self.status_forcelist, + backoff_factor=self.backoff_factor, + raise_on_redirect=self.raise_on_redirect, + raise_on_status=self.raise_on_status, + history=self.history, + remove_headers_on_redirect=self.remove_headers_on_redirect, + respect_retry_after_header=self.respect_retry_after_header, + ) + + # TODO: If already given in **kw we use what's given to us + # If not given we need to figure out what to pass. We decide + # based on whether our class has the 'method_whitelist' property + # and if so we pass the deprecated 'method_whitelist' otherwise + # we use 'allowed_methods'. Remove in v2.0 + if "method_whitelist" not in kw and "allowed_methods" not in kw: + if "method_whitelist" in self.__dict__: + warnings.warn( + "Using 'method_whitelist' with Retry is deprecated and " + "will be removed in v2.0. Use 'allowed_methods' instead", + DeprecationWarning, + ) + params["method_whitelist"] = self.allowed_methods + else: + params["allowed_methods"] = self.allowed_methods + + params.update(kw) + return type(self)(**params) + + @classmethod + def from_int(cls, retries, redirect=True, default=None): + """ Backwards-compatibility for the old retries format.""" + if retries is None: + retries = default if default is not None else cls.DEFAULT + + if isinstance(retries, Retry): + return retries + + redirect = bool(redirect) and None + new_retries = cls(retries, redirect=redirect) + log.debug("Converted retries value: %r -> %r", retries, new_retries) + return new_retries + + def get_backoff_time(self): + """Formula for computing the current backoff + + :rtype: float + """ + # We want to consider only the last consecutive errors sequence (Ignore redirects). + consecutive_errors_len = len( + list( + takewhile(lambda x: x.redirect_location is None, reversed(self.history)) + ) + ) + if consecutive_errors_len <= 1: + return 0 + + backoff_value = self.backoff_factor * (2 ** (consecutive_errors_len - 1)) + return min(self.BACKOFF_MAX, backoff_value) + + def parse_retry_after(self, retry_after): + # Whitespace: https://tools.ietf.org/html/rfc7230#section-3.2.4 + if re.match(r"^\s*[0-9]+\s*$", retry_after): + seconds = int(retry_after) + else: + retry_date_tuple = email.utils.parsedate_tz(retry_after) + if retry_date_tuple is None: + raise InvalidHeader("Invalid Retry-After header: %s" % retry_after) + if retry_date_tuple[9] is None: # Python 2 + # Assume UTC if no timezone was specified + # On Python2.7, parsedate_tz returns None for a timezone offset + # instead of 0 if no timezone is given, where mktime_tz treats + # a None timezone offset as local time. + retry_date_tuple = retry_date_tuple[:9] + (0,) + retry_date_tuple[10:] + + retry_date = email.utils.mktime_tz(retry_date_tuple) + seconds = retry_date - time.time() + + if seconds < 0: + seconds = 0 + + return seconds + + def get_retry_after(self, response): + """ Get the value of Retry-After in seconds. """ + + retry_after = response.getheader("Retry-After") + + if retry_after is None: + return None + + return self.parse_retry_after(retry_after) + + def sleep_for_retry(self, response=None): + retry_after = self.get_retry_after(response) + if retry_after: + time.sleep(retry_after) + return True + + return False + + def _sleep_backoff(self): + backoff = self.get_backoff_time() + if backoff <= 0: + return + time.sleep(backoff) + + def sleep(self, response=None): + """Sleep between retry attempts. + + This method will respect a server's ``Retry-After`` response header + and sleep the duration of the time requested. If that is not present, it + will use an exponential backoff. By default, the backoff factor is 0 and + this method will return immediately. + """ + + if self.respect_retry_after_header and response: + slept = self.sleep_for_retry(response) + if slept: + return + + self._sleep_backoff() + + def _is_connection_error(self, err): + """Errors when we're fairly sure that the server did not receive the + request, so it should be safe to retry. + """ + if isinstance(err, ProxyError): + err = err.original_error + return isinstance(err, ConnectTimeoutError) + + def _is_read_error(self, err): + """Errors that occur after the request has been started, so we should + assume that the server began processing it. + """ + return isinstance(err, (ReadTimeoutError, ProtocolError)) + + def _is_method_retryable(self, method): + """Checks if a given HTTP method should be retried upon, depending if + it is included in the allowed_methods + """ + # TODO: For now favor if the Retry implementation sets its own method_whitelist + # property outside of our constructor to avoid breaking custom implementations. + if "method_whitelist" in self.__dict__: + warnings.warn( + "Using 'method_whitelist' with Retry is deprecated and " + "will be removed in v2.0. Use 'allowed_methods' instead", + DeprecationWarning, + ) + allowed_methods = self.method_whitelist + else: + allowed_methods = self.allowed_methods + + if allowed_methods and method.upper() not in allowed_methods: + return False + return True + + def is_retry(self, method, status_code, has_retry_after=False): + """Is this method/status code retryable? (Based on allowlists and control + variables such as the number of total retries to allow, whether to + respect the Retry-After header, whether this header is present, and + whether the returned status code is on the list of status codes to + be retried upon on the presence of the aforementioned header) + """ + if not self._is_method_retryable(method): + return False + + if self.status_forcelist and status_code in self.status_forcelist: + return True + + return ( + self.total + and self.respect_retry_after_header + and has_retry_after + and (status_code in self.RETRY_AFTER_STATUS_CODES) + ) + + def is_exhausted(self): + """ Are we out of retries? """ + retry_counts = ( + self.total, + self.connect, + self.read, + self.redirect, + self.status, + self.other, + ) + retry_counts = list(filter(None, retry_counts)) + if not retry_counts: + return False + + return min(retry_counts) < 0 + + def increment( + self, + method=None, + url=None, + response=None, + error=None, + _pool=None, + _stacktrace=None, + ): + """Return a new Retry object with incremented retry counters. + + :param response: A response object, or None, if the server did not + return a response. + :type response: :class:`~urllib3.response.HTTPResponse` + :param Exception error: An error encountered during the request, or + None if the response was received successfully. + + :return: A new ``Retry`` object. + """ + if self.total is False and error: + # Disabled, indicate to re-raise the error. + raise six.reraise(type(error), error, _stacktrace) + + total = self.total + if total is not None: + total -= 1 + + connect = self.connect + read = self.read + redirect = self.redirect + status_count = self.status + other = self.other + cause = "unknown" + status = None + redirect_location = None + + if error and self._is_connection_error(error): + # Connect retry? + if connect is False: + raise six.reraise(type(error), error, _stacktrace) + elif connect is not None: + connect -= 1 + + elif error and self._is_read_error(error): + # Read retry? + if read is False or not self._is_method_retryable(method): + raise six.reraise(type(error), error, _stacktrace) + elif read is not None: + read -= 1 + + elif error: + # Other retry? + if other is not None: + other -= 1 + + elif response and response.get_redirect_location(): + # Redirect retry? + if redirect is not None: + redirect -= 1 + cause = "too many redirects" + redirect_location = response.get_redirect_location() + status = response.status + + else: + # Incrementing because of a server error like a 500 in + # status_forcelist and the given method is in the allowed_methods + cause = ResponseError.GENERIC_ERROR + if response and response.status: + if status_count is not None: + status_count -= 1 + cause = ResponseError.SPECIFIC_ERROR.format(status_code=response.status) + status = response.status + + history = self.history + ( + RequestHistory(method, url, error, status, redirect_location), + ) + + new_retry = self.new( + total=total, + connect=connect, + read=read, + redirect=redirect, + status=status_count, + other=other, + history=history, + ) + + if new_retry.is_exhausted(): + raise MaxRetryError(_pool, url, error or ResponseError(cause)) + + log.debug("Incremented Retry for (url='%s'): %r", url, new_retry) + + return new_retry + + def __repr__(self): + return ( + "{cls.__name__}(total={self.total}, connect={self.connect}, " + "read={self.read}, redirect={self.redirect}, status={self.status})" + ).format(cls=type(self), self=self) + + def __getattr__(self, item): + if item == "method_whitelist": + # TODO: Remove this deprecated alias in v2.0 + warnings.warn( + "Using 'method_whitelist' with Retry is deprecated and " + "will be removed in v2.0. Use 'allowed_methods' instead", + DeprecationWarning, + ) + return self.allowed_methods + try: + return getattr(super(Retry, self), item) + except AttributeError: + return getattr(Retry, item) + + +# For backwards compatibility (equivalent to pre-v1.9): +Retry.DEFAULT = Retry(3) diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/util/ssl_.py b/venv/Lib/site-packages/pip/_vendor/urllib3/util/ssl_.py new file mode 100644 index 00000000..763da82b --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/urllib3/util/ssl_.py @@ -0,0 +1,474 @@ +from __future__ import absolute_import + +import hmac +import os +import sys +import warnings +from binascii import hexlify, unhexlify +from hashlib import md5, sha1, sha256 + +from ..exceptions import ( + InsecurePlatformWarning, + ProxySchemeUnsupported, + SNIMissingWarning, + SSLError, +) +from ..packages import six +from .url import BRACELESS_IPV6_ADDRZ_RE, IPV4_RE + +SSLContext = None +SSLTransport = None +HAS_SNI = False +IS_PYOPENSSL = False +IS_SECURETRANSPORT = False +ALPN_PROTOCOLS = ["http/1.1"] + +# Maps the length of a digest to a possible hash function producing this digest +HASHFUNC_MAP = {32: md5, 40: sha1, 64: sha256} + + +def _const_compare_digest_backport(a, b): + """ + Compare two digests of equal length in constant time. + + The digests must be of type str/bytes. + Returns True if the digests match, and False otherwise. + """ + result = abs(len(a) - len(b)) + for left, right in zip(bytearray(a), bytearray(b)): + result |= left ^ right + return result == 0 + + +_const_compare_digest = getattr(hmac, "compare_digest", _const_compare_digest_backport) + +try: # Test for SSL features + import ssl + from ssl import CERT_REQUIRED, wrap_socket +except ImportError: + pass + +try: + from ssl import HAS_SNI # Has SNI? +except ImportError: + pass + +try: + from .ssltransport import SSLTransport +except ImportError: + pass + + +try: # Platform-specific: Python 3.6 + from ssl import PROTOCOL_TLS + + PROTOCOL_SSLv23 = PROTOCOL_TLS +except ImportError: + try: + from ssl import PROTOCOL_SSLv23 as PROTOCOL_TLS + + PROTOCOL_SSLv23 = PROTOCOL_TLS + except ImportError: + PROTOCOL_SSLv23 = PROTOCOL_TLS = 2 + + +try: + from ssl import OP_NO_COMPRESSION, OP_NO_SSLv2, OP_NO_SSLv3 +except ImportError: + OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000 + OP_NO_COMPRESSION = 0x20000 + + +try: # OP_NO_TICKET was added in Python 3.6 + from ssl import OP_NO_TICKET +except ImportError: + OP_NO_TICKET = 0x4000 + + +# A secure default. +# Sources for more information on TLS ciphers: +# +# - https://wiki.mozilla.org/Security/Server_Side_TLS +# - https://www.ssllabs.com/projects/best-practices/index.html +# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/ +# +# The general intent is: +# - prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE), +# - prefer ECDHE over DHE for better performance, +# - prefer any AES-GCM and ChaCha20 over any AES-CBC for better performance and +# security, +# - prefer AES-GCM over ChaCha20 because hardware-accelerated AES is common, +# - disable NULL authentication, MD5 MACs, DSS, and other +# insecure ciphers for security reasons. +# - NOTE: TLS 1.3 cipher suites are managed through a different interface +# not exposed by CPython (yet!) and are enabled by default if they're available. +DEFAULT_CIPHERS = ":".join( + [ + "ECDHE+AESGCM", + "ECDHE+CHACHA20", + "DHE+AESGCM", + "DHE+CHACHA20", + "ECDH+AESGCM", + "DH+AESGCM", + "ECDH+AES", + "DH+AES", + "RSA+AESGCM", + "RSA+AES", + "!aNULL", + "!eNULL", + "!MD5", + "!DSS", + ] +) + +try: + from ssl import SSLContext # Modern SSL? +except ImportError: + + class SSLContext(object): # Platform-specific: Python 2 + def __init__(self, protocol_version): + self.protocol = protocol_version + # Use default values from a real SSLContext + self.check_hostname = False + self.verify_mode = ssl.CERT_NONE + self.ca_certs = None + self.options = 0 + self.certfile = None + self.keyfile = None + self.ciphers = None + + def load_cert_chain(self, certfile, keyfile): + self.certfile = certfile + self.keyfile = keyfile + + def load_verify_locations(self, cafile=None, capath=None, cadata=None): + self.ca_certs = cafile + + if capath is not None: + raise SSLError("CA directories not supported in older Pythons") + + if cadata is not None: + raise SSLError("CA data not supported in older Pythons") + + def set_ciphers(self, cipher_suite): + self.ciphers = cipher_suite + + def wrap_socket(self, socket, server_hostname=None, server_side=False): + warnings.warn( + "A true SSLContext object is not available. This prevents " + "urllib3 from configuring SSL appropriately and may cause " + "certain SSL connections to fail. You can upgrade to a newer " + "version of Python to solve this. For more information, see " + "https://urllib3.readthedocs.io/en/latest/advanced-usage.html" + "#ssl-warnings", + InsecurePlatformWarning, + ) + kwargs = { + "keyfile": self.keyfile, + "certfile": self.certfile, + "ca_certs": self.ca_certs, + "cert_reqs": self.verify_mode, + "ssl_version": self.protocol, + "server_side": server_side, + } + return wrap_socket(socket, ciphers=self.ciphers, **kwargs) + + +def assert_fingerprint(cert, fingerprint): + """ + Checks if given fingerprint matches the supplied certificate. + + :param cert: + Certificate as bytes object. + :param fingerprint: + Fingerprint as string of hexdigits, can be interspersed by colons. + """ + + fingerprint = fingerprint.replace(":", "").lower() + digest_length = len(fingerprint) + hashfunc = HASHFUNC_MAP.get(digest_length) + if not hashfunc: + raise SSLError("Fingerprint of invalid length: {0}".format(fingerprint)) + + # We need encode() here for py32; works on py2 and p33. + fingerprint_bytes = unhexlify(fingerprint.encode()) + + cert_digest = hashfunc(cert).digest() + + if not _const_compare_digest(cert_digest, fingerprint_bytes): + raise SSLError( + 'Fingerprints did not match. Expected "{0}", got "{1}".'.format( + fingerprint, hexlify(cert_digest) + ) + ) + + +def resolve_cert_reqs(candidate): + """ + Resolves the argument to a numeric constant, which can be passed to + the wrap_socket function/method from the ssl module. + Defaults to :data:`ssl.CERT_REQUIRED`. + If given a string it is assumed to be the name of the constant in the + :mod:`ssl` module or its abbreviation. + (So you can specify `REQUIRED` instead of `CERT_REQUIRED`. + If it's neither `None` nor a string we assume it is already the numeric + constant which can directly be passed to wrap_socket. + """ + if candidate is None: + return CERT_REQUIRED + + if isinstance(candidate, str): + res = getattr(ssl, candidate, None) + if res is None: + res = getattr(ssl, "CERT_" + candidate) + return res + + return candidate + + +def resolve_ssl_version(candidate): + """ + like resolve_cert_reqs + """ + if candidate is None: + return PROTOCOL_TLS + + if isinstance(candidate, str): + res = getattr(ssl, candidate, None) + if res is None: + res = getattr(ssl, "PROTOCOL_" + candidate) + return res + + return candidate + + +def create_urllib3_context( + ssl_version=None, cert_reqs=None, options=None, ciphers=None +): + """All arguments have the same meaning as ``ssl_wrap_socket``. + + By default, this function does a lot of the same work that + ``ssl.create_default_context`` does on Python 3.4+. It: + + - Disables SSLv2, SSLv3, and compression + - Sets a restricted set of server ciphers + + If you wish to enable SSLv3, you can do:: + + from pip._vendor.urllib3.util import ssl_ + context = ssl_.create_urllib3_context() + context.options &= ~ssl_.OP_NO_SSLv3 + + You can do the same to enable compression (substituting ``COMPRESSION`` + for ``SSLv3`` in the last line above). + + :param ssl_version: + The desired protocol version to use. This will default to + PROTOCOL_SSLv23 which will negotiate the highest protocol that both + the server and your installation of OpenSSL support. + :param cert_reqs: + Whether to require the certificate verification. This defaults to + ``ssl.CERT_REQUIRED``. + :param options: + Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``, + ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``, and ``ssl.OP_NO_TICKET``. + :param ciphers: + Which cipher suites to allow the server to select. + :returns: + Constructed SSLContext object with specified options + :rtype: SSLContext + """ + context = SSLContext(ssl_version or PROTOCOL_TLS) + + context.set_ciphers(ciphers or DEFAULT_CIPHERS) + + # Setting the default here, as we may have no ssl module on import + cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs + + if options is None: + options = 0 + # SSLv2 is easily broken and is considered harmful and dangerous + options |= OP_NO_SSLv2 + # SSLv3 has several problems and is now dangerous + options |= OP_NO_SSLv3 + # Disable compression to prevent CRIME attacks for OpenSSL 1.0+ + # (issue #309) + options |= OP_NO_COMPRESSION + # TLSv1.2 only. Unless set explicitly, do not request tickets. + # This may save some bandwidth on wire, and although the ticket is encrypted, + # there is a risk associated with it being on wire, + # if the server is not rotating its ticketing keys properly. + options |= OP_NO_TICKET + + context.options |= options + + # Enable post-handshake authentication for TLS 1.3, see GH #1634. PHA is + # necessary for conditional client cert authentication with TLS 1.3. + # The attribute is None for OpenSSL <= 1.1.0 or does not exist in older + # versions of Python. We only enable on Python 3.7.4+ or if certificate + # verification is enabled to work around Python issue #37428 + # See: https://bugs.python.org/issue37428 + if (cert_reqs == ssl.CERT_REQUIRED or sys.version_info >= (3, 7, 4)) and getattr( + context, "post_handshake_auth", None + ) is not None: + context.post_handshake_auth = True + + context.verify_mode = cert_reqs + if ( + getattr(context, "check_hostname", None) is not None + ): # Platform-specific: Python 3.2 + # We do our own verification, including fingerprints and alternative + # hostnames. So disable it here + context.check_hostname = False + + # Enable logging of TLS session keys via defacto standard environment variable + # 'SSLKEYLOGFILE', if the feature is available (Python 3.8+). Skip empty values. + if hasattr(context, "keylog_filename"): + sslkeylogfile = os.environ.get("SSLKEYLOGFILE") + if sslkeylogfile: + context.keylog_filename = sslkeylogfile + + return context + + +def ssl_wrap_socket( + sock, + keyfile=None, + certfile=None, + cert_reqs=None, + ca_certs=None, + server_hostname=None, + ssl_version=None, + ciphers=None, + ssl_context=None, + ca_cert_dir=None, + key_password=None, + ca_cert_data=None, + tls_in_tls=False, +): + """ + All arguments except for server_hostname, ssl_context, and ca_cert_dir have + the same meaning as they do when using :func:`ssl.wrap_socket`. + + :param server_hostname: + When SNI is supported, the expected hostname of the certificate + :param ssl_context: + A pre-made :class:`SSLContext` object. If none is provided, one will + be created using :func:`create_urllib3_context`. + :param ciphers: + A string of ciphers we wish the client to support. + :param ca_cert_dir: + A directory containing CA certificates in multiple separate files, as + supported by OpenSSL's -CApath flag or the capath argument to + SSLContext.load_verify_locations(). + :param key_password: + Optional password if the keyfile is encrypted. + :param ca_cert_data: + Optional string containing CA certificates in PEM format suitable for + passing as the cadata parameter to SSLContext.load_verify_locations() + :param tls_in_tls: + Use SSLTransport to wrap the existing socket. + """ + context = ssl_context + if context is None: + # Note: This branch of code and all the variables in it are no longer + # used by urllib3 itself. We should consider deprecating and removing + # this code. + context = create_urllib3_context(ssl_version, cert_reqs, ciphers=ciphers) + + if ca_certs or ca_cert_dir or ca_cert_data: + try: + context.load_verify_locations(ca_certs, ca_cert_dir, ca_cert_data) + except (IOError, OSError) as e: + raise SSLError(e) + + elif ssl_context is None and hasattr(context, "load_default_certs"): + # try to load OS default certs; works well on Windows (require Python3.4+) + context.load_default_certs() + + # Attempt to detect if we get the goofy behavior of the + # keyfile being encrypted and OpenSSL asking for the + # passphrase via the terminal and instead error out. + if keyfile and key_password is None and _is_key_file_encrypted(keyfile): + raise SSLError("Client private key is encrypted, password is required") + + if certfile: + if key_password is None: + context.load_cert_chain(certfile, keyfile) + else: + context.load_cert_chain(certfile, keyfile, key_password) + + try: + if hasattr(context, "set_alpn_protocols"): + context.set_alpn_protocols(ALPN_PROTOCOLS) + except NotImplementedError: + pass + + # If we detect server_hostname is an IP address then the SNI + # extension should not be used according to RFC3546 Section 3.1 + use_sni_hostname = server_hostname and not is_ipaddress(server_hostname) + # SecureTransport uses server_hostname in certificate verification. + send_sni = (use_sni_hostname and HAS_SNI) or ( + IS_SECURETRANSPORT and server_hostname + ) + # Do not warn the user if server_hostname is an invalid SNI hostname. + if not HAS_SNI and use_sni_hostname: + warnings.warn( + "An HTTPS request has been made, but the SNI (Server Name " + "Indication) extension to TLS is not available on this platform. " + "This may cause the server to present an incorrect TLS " + "certificate, which can cause validation failures. You can upgrade to " + "a newer version of Python to solve this. For more information, see " + "https://urllib3.readthedocs.io/en/latest/advanced-usage.html" + "#ssl-warnings", + SNIMissingWarning, + ) + + if send_sni: + ssl_sock = _ssl_wrap_socket_impl( + sock, context, tls_in_tls, server_hostname=server_hostname + ) + else: + ssl_sock = _ssl_wrap_socket_impl(sock, context, tls_in_tls) + return ssl_sock + + +def is_ipaddress(hostname): + """Detects whether the hostname given is an IPv4 or IPv6 address. + Also detects IPv6 addresses with Zone IDs. + + :param str hostname: Hostname to examine. + :return: True if the hostname is an IP address, False otherwise. + """ + if not six.PY2 and isinstance(hostname, bytes): + # IDN A-label bytes are ASCII compatible. + hostname = hostname.decode("ascii") + return bool(IPV4_RE.match(hostname) or BRACELESS_IPV6_ADDRZ_RE.match(hostname)) + + +def _is_key_file_encrypted(key_file): + """Detects if a key file is encrypted or not.""" + with open(key_file, "r") as f: + for line in f: + # Look for Proc-Type: 4,ENCRYPTED + if "ENCRYPTED" in line: + return True + + return False + + +def _ssl_wrap_socket_impl(sock, ssl_context, tls_in_tls, server_hostname=None): + if tls_in_tls: + if not SSLTransport: + # Import error, ssl is not available. + raise ProxySchemeUnsupported( + "TLS in TLS requires support for the 'ssl' module" + ) + + SSLTransport._validate_ssl_context_for_tls_in_tls(ssl_context) + return SSLTransport(sock, ssl_context, server_hostname) + + if server_hostname: + return ssl_context.wrap_socket(sock, server_hostname=server_hostname) + else: + return ssl_context.wrap_socket(sock) diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/util/ssltransport.py b/venv/Lib/site-packages/pip/_vendor/urllib3/util/ssltransport.py new file mode 100644 index 00000000..ca00233c --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/urllib3/util/ssltransport.py @@ -0,0 +1,221 @@ +import io +import socket +import ssl + +from pip._vendor.urllib3.exceptions import ProxySchemeUnsupported +from pip._vendor.urllib3.packages import six + +SSL_BLOCKSIZE = 16384 + + +class SSLTransport: + """ + The SSLTransport wraps an existing socket and establishes an SSL connection. + + Contrary to Python's implementation of SSLSocket, it allows you to chain + multiple TLS connections together. It's particularly useful if you need to + implement TLS within TLS. + + The class supports most of the socket API operations. + """ + + @staticmethod + def _validate_ssl_context_for_tls_in_tls(ssl_context): + """ + Raises a ProxySchemeUnsupported if the provided ssl_context can't be used + for TLS in TLS. + + The only requirement is that the ssl_context provides the 'wrap_bio' + methods. + """ + + if not hasattr(ssl_context, "wrap_bio"): + if six.PY2: + raise ProxySchemeUnsupported( + "TLS in TLS requires SSLContext.wrap_bio() which isn't " + "supported on Python 2" + ) + else: + raise ProxySchemeUnsupported( + "TLS in TLS requires SSLContext.wrap_bio() which isn't " + "available on non-native SSLContext" + ) + + def __init__( + self, socket, ssl_context, server_hostname=None, suppress_ragged_eofs=True + ): + """ + Create an SSLTransport around socket using the provided ssl_context. + """ + self.incoming = ssl.MemoryBIO() + self.outgoing = ssl.MemoryBIO() + + self.suppress_ragged_eofs = suppress_ragged_eofs + self.socket = socket + + self.sslobj = ssl_context.wrap_bio( + self.incoming, self.outgoing, server_hostname=server_hostname + ) + + # Perform initial handshake. + self._ssl_io_loop(self.sslobj.do_handshake) + + def __enter__(self): + return self + + def __exit__(self, *_): + self.close() + + def fileno(self): + return self.socket.fileno() + + def read(self, len=1024, buffer=None): + return self._wrap_ssl_read(len, buffer) + + def recv(self, len=1024, flags=0): + if flags != 0: + raise ValueError("non-zero flags not allowed in calls to recv") + return self._wrap_ssl_read(len) + + def recv_into(self, buffer, nbytes=None, flags=0): + if flags != 0: + raise ValueError("non-zero flags not allowed in calls to recv_into") + if buffer and (nbytes is None): + nbytes = len(buffer) + elif nbytes is None: + nbytes = 1024 + return self.read(nbytes, buffer) + + def sendall(self, data, flags=0): + if flags != 0: + raise ValueError("non-zero flags not allowed in calls to sendall") + count = 0 + with memoryview(data) as view, view.cast("B") as byte_view: + amount = len(byte_view) + while count < amount: + v = self.send(byte_view[count:]) + count += v + + def send(self, data, flags=0): + if flags != 0: + raise ValueError("non-zero flags not allowed in calls to send") + response = self._ssl_io_loop(self.sslobj.write, data) + return response + + def makefile( + self, mode="r", buffering=None, encoding=None, errors=None, newline=None + ): + """ + Python's httpclient uses makefile and buffered io when reading HTTP + messages and we need to support it. + + This is unfortunately a copy and paste of socket.py makefile with small + changes to point to the socket directly. + """ + if not set(mode) <= {"r", "w", "b"}: + raise ValueError("invalid mode %r (only r, w, b allowed)" % (mode,)) + + writing = "w" in mode + reading = "r" in mode or not writing + assert reading or writing + binary = "b" in mode + rawmode = "" + if reading: + rawmode += "r" + if writing: + rawmode += "w" + raw = socket.SocketIO(self, rawmode) + self.socket._io_refs += 1 + if buffering is None: + buffering = -1 + if buffering < 0: + buffering = io.DEFAULT_BUFFER_SIZE + if buffering == 0: + if not binary: + raise ValueError("unbuffered streams must be binary") + return raw + if reading and writing: + buffer = io.BufferedRWPair(raw, raw, buffering) + elif reading: + buffer = io.BufferedReader(raw, buffering) + else: + assert writing + buffer = io.BufferedWriter(raw, buffering) + if binary: + return buffer + text = io.TextIOWrapper(buffer, encoding, errors, newline) + text.mode = mode + return text + + def unwrap(self): + self._ssl_io_loop(self.sslobj.unwrap) + + def close(self): + self.socket.close() + + def getpeercert(self, binary_form=False): + return self.sslobj.getpeercert(binary_form) + + def version(self): + return self.sslobj.version() + + def cipher(self): + return self.sslobj.cipher() + + def selected_alpn_protocol(self): + return self.sslobj.selected_alpn_protocol() + + def selected_npn_protocol(self): + return self.sslobj.selected_npn_protocol() + + def shared_ciphers(self): + return self.sslobj.shared_ciphers() + + def compression(self): + return self.sslobj.compression() + + def settimeout(self, value): + self.socket.settimeout(value) + + def gettimeout(self): + return self.socket.gettimeout() + + def _decref_socketios(self): + self.socket._decref_socketios() + + def _wrap_ssl_read(self, len, buffer=None): + try: + return self._ssl_io_loop(self.sslobj.read, len, buffer) + except ssl.SSLError as e: + if e.errno == ssl.SSL_ERROR_EOF and self.suppress_ragged_eofs: + return 0 # eof, return 0. + else: + raise + + def _ssl_io_loop(self, func, *args): + """ Performs an I/O loop between incoming/outgoing and the socket.""" + should_loop = True + ret = None + + while should_loop: + errno = None + try: + ret = func(*args) + except ssl.SSLError as e: + if e.errno not in (ssl.SSL_ERROR_WANT_READ, ssl.SSL_ERROR_WANT_WRITE): + # WANT_READ, and WANT_WRITE are expected, others are not. + raise e + errno = e.errno + + buf = self.outgoing.read() + self.socket.sendall(buf) + + if errno is None: + should_loop = False + elif errno == ssl.SSL_ERROR_WANT_READ: + buf = self.socket.recv(SSL_BLOCKSIZE) + if buf: + self.incoming.write(buf) + else: + self.incoming.write_eof() + return ret diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/util/timeout.py b/venv/Lib/site-packages/pip/_vendor/urllib3/util/timeout.py new file mode 100644 index 00000000..ff69593b --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/urllib3/util/timeout.py @@ -0,0 +1,268 @@ +from __future__ import absolute_import + +import time + +# The default socket timeout, used by httplib to indicate that no timeout was +# specified by the user +from socket import _GLOBAL_DEFAULT_TIMEOUT + +from ..exceptions import TimeoutStateError + +# A sentinel value to indicate that no timeout was specified by the user in +# urllib3 +_Default = object() + + +# Use time.monotonic if available. +current_time = getattr(time, "monotonic", time.time) + + +class Timeout(object): + """Timeout configuration. + + Timeouts can be defined as a default for a pool: + + .. code-block:: python + + timeout = Timeout(connect=2.0, read=7.0) + http = PoolManager(timeout=timeout) + response = http.request('GET', 'http://example.com/') + + Or per-request (which overrides the default for the pool): + + .. code-block:: python + + response = http.request('GET', 'http://example.com/', timeout=Timeout(10)) + + Timeouts can be disabled by setting all the parameters to ``None``: + + .. code-block:: python + + no_timeout = Timeout(connect=None, read=None) + response = http.request('GET', 'http://example.com/, timeout=no_timeout) + + + :param total: + This combines the connect and read timeouts into one; the read timeout + will be set to the time leftover from the connect attempt. In the + event that both a connect timeout and a total are specified, or a read + timeout and a total are specified, the shorter timeout will be applied. + + Defaults to None. + + :type total: int, float, or None + + :param connect: + The maximum amount of time (in seconds) to wait for a connection + attempt to a server to succeed. Omitting the parameter will default the + connect timeout to the system default, probably `the global default + timeout in socket.py + `_. + None will set an infinite timeout for connection attempts. + + :type connect: int, float, or None + + :param read: + The maximum amount of time (in seconds) to wait between consecutive + read operations for a response from the server. Omitting the parameter + will default the read timeout to the system default, probably `the + global default timeout in socket.py + `_. + None will set an infinite timeout. + + :type read: int, float, or None + + .. note:: + + Many factors can affect the total amount of time for urllib3 to return + an HTTP response. + + For example, Python's DNS resolver does not obey the timeout specified + on the socket. Other factors that can affect total request time include + high CPU load, high swap, the program running at a low priority level, + or other behaviors. + + In addition, the read and total timeouts only measure the time between + read operations on the socket connecting the client and the server, + not the total amount of time for the request to return a complete + response. For most requests, the timeout is raised because the server + has not sent the first byte in the specified time. This is not always + the case; if a server streams one byte every fifteen seconds, a timeout + of 20 seconds will not trigger, even though the request will take + several minutes to complete. + + If your goal is to cut off any request after a set amount of wall clock + time, consider having a second "watcher" thread to cut off a slow + request. + """ + + #: A sentinel object representing the default timeout value + DEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUT + + def __init__(self, total=None, connect=_Default, read=_Default): + self._connect = self._validate_timeout(connect, "connect") + self._read = self._validate_timeout(read, "read") + self.total = self._validate_timeout(total, "total") + self._start_connect = None + + def __repr__(self): + return "%s(connect=%r, read=%r, total=%r)" % ( + type(self).__name__, + self._connect, + self._read, + self.total, + ) + + # __str__ provided for backwards compatibility + __str__ = __repr__ + + @classmethod + def _validate_timeout(cls, value, name): + """Check that a timeout attribute is valid. + + :param value: The timeout value to validate + :param name: The name of the timeout attribute to validate. This is + used to specify in error messages. + :return: The validated and casted version of the given value. + :raises ValueError: If it is a numeric value less than or equal to + zero, or the type is not an integer, float, or None. + """ + if value is _Default: + return cls.DEFAULT_TIMEOUT + + if value is None or value is cls.DEFAULT_TIMEOUT: + return value + + if isinstance(value, bool): + raise ValueError( + "Timeout cannot be a boolean value. It must " + "be an int, float or None." + ) + try: + float(value) + except (TypeError, ValueError): + raise ValueError( + "Timeout value %s was %s, but it must be an " + "int, float or None." % (name, value) + ) + + try: + if value <= 0: + raise ValueError( + "Attempted to set %s timeout to %s, but the " + "timeout cannot be set to a value less " + "than or equal to 0." % (name, value) + ) + except TypeError: + # Python 3 + raise ValueError( + "Timeout value %s was %s, but it must be an " + "int, float or None." % (name, value) + ) + + return value + + @classmethod + def from_float(cls, timeout): + """Create a new Timeout from a legacy timeout value. + + The timeout value used by httplib.py sets the same timeout on the + connect(), and recv() socket requests. This creates a :class:`Timeout` + object that sets the individual timeouts to the ``timeout`` value + passed to this function. + + :param timeout: The legacy timeout value. + :type timeout: integer, float, sentinel default object, or None + :return: Timeout object + :rtype: :class:`Timeout` + """ + return Timeout(read=timeout, connect=timeout) + + def clone(self): + """Create a copy of the timeout object + + Timeout properties are stored per-pool but each request needs a fresh + Timeout object to ensure each one has its own start/stop configured. + + :return: a copy of the timeout object + :rtype: :class:`Timeout` + """ + # We can't use copy.deepcopy because that will also create a new object + # for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to + # detect the user default. + return Timeout(connect=self._connect, read=self._read, total=self.total) + + def start_connect(self): + """Start the timeout clock, used during a connect() attempt + + :raises urllib3.exceptions.TimeoutStateError: if you attempt + to start a timer that has been started already. + """ + if self._start_connect is not None: + raise TimeoutStateError("Timeout timer has already been started.") + self._start_connect = current_time() + return self._start_connect + + def get_connect_duration(self): + """Gets the time elapsed since the call to :meth:`start_connect`. + + :return: Elapsed time in seconds. + :rtype: float + :raises urllib3.exceptions.TimeoutStateError: if you attempt + to get duration for a timer that hasn't been started. + """ + if self._start_connect is None: + raise TimeoutStateError( + "Can't get connect duration for timer that has not started." + ) + return current_time() - self._start_connect + + @property + def connect_timeout(self): + """Get the value to use when setting a connection timeout. + + This will be a positive float or integer, the value None + (never timeout), or the default system timeout. + + :return: Connect timeout. + :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None + """ + if self.total is None: + return self._connect + + if self._connect is None or self._connect is self.DEFAULT_TIMEOUT: + return self.total + + return min(self._connect, self.total) + + @property + def read_timeout(self): + """Get the value for the read timeout. + + This assumes some time has elapsed in the connection timeout and + computes the read timeout appropriately. + + If self.total is set, the read timeout is dependent on the amount of + time taken by the connect timeout. If the connection time has not been + established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be + raised. + + :return: Value to use for the read timeout. + :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None + :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect` + has not yet been called on this object. + """ + if ( + self.total is not None + and self.total is not self.DEFAULT_TIMEOUT + and self._read is not None + and self._read is not self.DEFAULT_TIMEOUT + ): + # In case the connect timeout has not yet been established. + if self._start_connect is None: + return self._read + return max(0, min(self.total - self.get_connect_duration(), self._read)) + elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT: + return max(0, self.total - self.get_connect_duration()) + else: + return self._read diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/util/url.py b/venv/Lib/site-packages/pip/_vendor/urllib3/util/url.py new file mode 100644 index 00000000..66c8795b --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/urllib3/util/url.py @@ -0,0 +1,430 @@ +from __future__ import absolute_import + +import re +from collections import namedtuple + +from ..exceptions import LocationParseError +from ..packages import six + +url_attrs = ["scheme", "auth", "host", "port", "path", "query", "fragment"] + +# We only want to normalize urls with an HTTP(S) scheme. +# urllib3 infers URLs without a scheme (None) to be http. +NORMALIZABLE_SCHEMES = ("http", "https", None) + +# Almost all of these patterns were derived from the +# 'rfc3986' module: https://github.com/python-hyper/rfc3986 +PERCENT_RE = re.compile(r"%[a-fA-F0-9]{2}") +SCHEME_RE = re.compile(r"^(?:[a-zA-Z][a-zA-Z0-9+-]*:|/)") +URI_RE = re.compile( + r"^(?:([a-zA-Z][a-zA-Z0-9+.-]*):)?" + r"(?://([^\\/?#]*))?" + r"([^?#]*)" + r"(?:\?([^#]*))?" + r"(?:#(.*))?$", + re.UNICODE | re.DOTALL, +) + +IPV4_PAT = r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}" +HEX_PAT = "[0-9A-Fa-f]{1,4}" +LS32_PAT = "(?:{hex}:{hex}|{ipv4})".format(hex=HEX_PAT, ipv4=IPV4_PAT) +_subs = {"hex": HEX_PAT, "ls32": LS32_PAT} +_variations = [ + # 6( h16 ":" ) ls32 + "(?:%(hex)s:){6}%(ls32)s", + # "::" 5( h16 ":" ) ls32 + "::(?:%(hex)s:){5}%(ls32)s", + # [ h16 ] "::" 4( h16 ":" ) ls32 + "(?:%(hex)s)?::(?:%(hex)s:){4}%(ls32)s", + # [ *1( h16 ":" ) h16 ] "::" 3( h16 ":" ) ls32 + "(?:(?:%(hex)s:)?%(hex)s)?::(?:%(hex)s:){3}%(ls32)s", + # [ *2( h16 ":" ) h16 ] "::" 2( h16 ":" ) ls32 + "(?:(?:%(hex)s:){0,2}%(hex)s)?::(?:%(hex)s:){2}%(ls32)s", + # [ *3( h16 ":" ) h16 ] "::" h16 ":" ls32 + "(?:(?:%(hex)s:){0,3}%(hex)s)?::%(hex)s:%(ls32)s", + # [ *4( h16 ":" ) h16 ] "::" ls32 + "(?:(?:%(hex)s:){0,4}%(hex)s)?::%(ls32)s", + # [ *5( h16 ":" ) h16 ] "::" h16 + "(?:(?:%(hex)s:){0,5}%(hex)s)?::%(hex)s", + # [ *6( h16 ":" ) h16 ] "::" + "(?:(?:%(hex)s:){0,6}%(hex)s)?::", +] + +UNRESERVED_PAT = r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._!\-~" +IPV6_PAT = "(?:" + "|".join([x % _subs for x in _variations]) + ")" +ZONE_ID_PAT = "(?:%25|%)(?:[" + UNRESERVED_PAT + "]|%[a-fA-F0-9]{2})+" +IPV6_ADDRZ_PAT = r"\[" + IPV6_PAT + r"(?:" + ZONE_ID_PAT + r")?\]" +REG_NAME_PAT = r"(?:[^\[\]%:/?#]|%[a-fA-F0-9]{2})*" +TARGET_RE = re.compile(r"^(/[^?#]*)(?:\?([^#]*))?(?:#.*)?$") + +IPV4_RE = re.compile("^" + IPV4_PAT + "$") +IPV6_RE = re.compile("^" + IPV6_PAT + "$") +IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT + "$") +BRACELESS_IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT[2:-2] + "$") +ZONE_ID_RE = re.compile("(" + ZONE_ID_PAT + r")\]$") + +SUBAUTHORITY_PAT = (u"^(?:(.*)@)?(%s|%s|%s)(?::([0-9]{0,5}))?$") % ( + REG_NAME_PAT, + IPV4_PAT, + IPV6_ADDRZ_PAT, +) +SUBAUTHORITY_RE = re.compile(SUBAUTHORITY_PAT, re.UNICODE | re.DOTALL) + +UNRESERVED_CHARS = set( + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._-~" +) +SUB_DELIM_CHARS = set("!$&'()*+,;=") +USERINFO_CHARS = UNRESERVED_CHARS | SUB_DELIM_CHARS | {":"} +PATH_CHARS = USERINFO_CHARS | {"@", "/"} +QUERY_CHARS = FRAGMENT_CHARS = PATH_CHARS | {"?"} + + +class Url(namedtuple("Url", url_attrs)): + """ + Data structure for representing an HTTP URL. Used as a return value for + :func:`parse_url`. Both the scheme and host are normalized as they are + both case-insensitive according to RFC 3986. + """ + + __slots__ = () + + def __new__( + cls, + scheme=None, + auth=None, + host=None, + port=None, + path=None, + query=None, + fragment=None, + ): + if path and not path.startswith("/"): + path = "/" + path + if scheme is not None: + scheme = scheme.lower() + return super(Url, cls).__new__( + cls, scheme, auth, host, port, path, query, fragment + ) + + @property + def hostname(self): + """For backwards-compatibility with urlparse. We're nice like that.""" + return self.host + + @property + def request_uri(self): + """Absolute path including the query string.""" + uri = self.path or "/" + + if self.query is not None: + uri += "?" + self.query + + return uri + + @property + def netloc(self): + """Network location including host and port""" + if self.port: + return "%s:%d" % (self.host, self.port) + return self.host + + @property + def url(self): + """ + Convert self into a url + + This function should more or less round-trip with :func:`.parse_url`. The + returned url may not be exactly the same as the url inputted to + :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls + with a blank port will have : removed). + + Example: :: + + >>> U = parse_url('http://google.com/mail/') + >>> U.url + 'http://google.com/mail/' + >>> Url('http', 'username:password', 'host.com', 80, + ... '/path', 'query', 'fragment').url + 'http://username:password@host.com:80/path?query#fragment' + """ + scheme, auth, host, port, path, query, fragment = self + url = u"" + + # We use "is not None" we want things to happen with empty strings (or 0 port) + if scheme is not None: + url += scheme + u"://" + if auth is not None: + url += auth + u"@" + if host is not None: + url += host + if port is not None: + url += u":" + str(port) + if path is not None: + url += path + if query is not None: + url += u"?" + query + if fragment is not None: + url += u"#" + fragment + + return url + + def __str__(self): + return self.url + + +def split_first(s, delims): + """ + .. deprecated:: 1.25 + + Given a string and an iterable of delimiters, split on the first found + delimiter. Return two split parts and the matched delimiter. + + If not found, then the first part is the full input string. + + Example:: + + >>> split_first('foo/bar?baz', '?/=') + ('foo', 'bar?baz', '/') + >>> split_first('foo/bar?baz', '123') + ('foo/bar?baz', '', None) + + Scales linearly with number of delims. Not ideal for large number of delims. + """ + min_idx = None + min_delim = None + for d in delims: + idx = s.find(d) + if idx < 0: + continue + + if min_idx is None or idx < min_idx: + min_idx = idx + min_delim = d + + if min_idx is None or min_idx < 0: + return s, "", None + + return s[:min_idx], s[min_idx + 1 :], min_delim + + +def _encode_invalid_chars(component, allowed_chars, encoding="utf-8"): + """Percent-encodes a URI component without reapplying + onto an already percent-encoded component. + """ + if component is None: + return component + + component = six.ensure_text(component) + + # Normalize existing percent-encoded bytes. + # Try to see if the component we're encoding is already percent-encoded + # so we can skip all '%' characters but still encode all others. + component, percent_encodings = PERCENT_RE.subn( + lambda match: match.group(0).upper(), component + ) + + uri_bytes = component.encode("utf-8", "surrogatepass") + is_percent_encoded = percent_encodings == uri_bytes.count(b"%") + encoded_component = bytearray() + + for i in range(0, len(uri_bytes)): + # Will return a single character bytestring on both Python 2 & 3 + byte = uri_bytes[i : i + 1] + byte_ord = ord(byte) + if (is_percent_encoded and byte == b"%") or ( + byte_ord < 128 and byte.decode() in allowed_chars + ): + encoded_component += byte + continue + encoded_component.extend(b"%" + (hex(byte_ord)[2:].encode().zfill(2).upper())) + + return encoded_component.decode(encoding) + + +def _remove_path_dot_segments(path): + # See http://tools.ietf.org/html/rfc3986#section-5.2.4 for pseudo-code + segments = path.split("/") # Turn the path into a list of segments + output = [] # Initialize the variable to use to store output + + for segment in segments: + # '.' is the current directory, so ignore it, it is superfluous + if segment == ".": + continue + # Anything other than '..', should be appended to the output + elif segment != "..": + output.append(segment) + # In this case segment == '..', if we can, we should pop the last + # element + elif output: + output.pop() + + # If the path starts with '/' and the output is empty or the first string + # is non-empty + if path.startswith("/") and (not output or output[0]): + output.insert(0, "") + + # If the path starts with '/.' or '/..' ensure we add one more empty + # string to add a trailing '/' + if path.endswith(("/.", "/..")): + output.append("") + + return "/".join(output) + + +def _normalize_host(host, scheme): + if host: + if isinstance(host, six.binary_type): + host = six.ensure_str(host) + + if scheme in NORMALIZABLE_SCHEMES: + is_ipv6 = IPV6_ADDRZ_RE.match(host) + if is_ipv6: + match = ZONE_ID_RE.search(host) + if match: + start, end = match.span(1) + zone_id = host[start:end] + + if zone_id.startswith("%25") and zone_id != "%25": + zone_id = zone_id[3:] + else: + zone_id = zone_id[1:] + zone_id = "%" + _encode_invalid_chars(zone_id, UNRESERVED_CHARS) + return host[:start].lower() + zone_id + host[end:] + else: + return host.lower() + elif not IPV4_RE.match(host): + return six.ensure_str( + b".".join([_idna_encode(label) for label in host.split(".")]) + ) + return host + + +def _idna_encode(name): + if name and any([ord(x) > 128 for x in name]): + try: + from pip._vendor import idna + except ImportError: + six.raise_from( + LocationParseError("Unable to parse URL without the 'idna' module"), + None, + ) + try: + return idna.encode(name.lower(), strict=True, std3_rules=True) + except idna.IDNAError: + six.raise_from( + LocationParseError(u"Name '%s' is not a valid IDNA label" % name), None + ) + return name.lower().encode("ascii") + + +def _encode_target(target): + """Percent-encodes a request target so that there are no invalid characters""" + path, query = TARGET_RE.match(target).groups() + target = _encode_invalid_chars(path, PATH_CHARS) + query = _encode_invalid_chars(query, QUERY_CHARS) + if query is not None: + target += "?" + query + return target + + +def parse_url(url): + """ + Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is + performed to parse incomplete urls. Fields not provided will be None. + This parser is RFC 3986 compliant. + + The parser logic and helper functions are based heavily on + work done in the ``rfc3986`` module. + + :param str url: URL to parse into a :class:`.Url` namedtuple. + + Partly backwards-compatible with :mod:`urlparse`. + + Example:: + + >>> parse_url('http://google.com/mail/') + Url(scheme='http', host='google.com', port=None, path='/mail/', ...) + >>> parse_url('google.com:80') + Url(scheme=None, host='google.com', port=80, path=None, ...) + >>> parse_url('/foo?bar') + Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...) + """ + if not url: + # Empty + return Url() + + source_url = url + if not SCHEME_RE.search(url): + url = "//" + url + + try: + scheme, authority, path, query, fragment = URI_RE.match(url).groups() + normalize_uri = scheme is None or scheme.lower() in NORMALIZABLE_SCHEMES + + if scheme: + scheme = scheme.lower() + + if authority: + auth, host, port = SUBAUTHORITY_RE.match(authority).groups() + if auth and normalize_uri: + auth = _encode_invalid_chars(auth, USERINFO_CHARS) + if port == "": + port = None + else: + auth, host, port = None, None, None + + if port is not None: + port = int(port) + if not (0 <= port <= 65535): + raise LocationParseError(url) + + host = _normalize_host(host, scheme) + + if normalize_uri and path: + path = _remove_path_dot_segments(path) + path = _encode_invalid_chars(path, PATH_CHARS) + if normalize_uri and query: + query = _encode_invalid_chars(query, QUERY_CHARS) + if normalize_uri and fragment: + fragment = _encode_invalid_chars(fragment, FRAGMENT_CHARS) + + except (ValueError, AttributeError): + return six.raise_from(LocationParseError(source_url), None) + + # For the sake of backwards compatibility we put empty + # string values for path if there are any defined values + # beyond the path in the URL. + # TODO: Remove this when we break backwards compatibility. + if not path: + if query is not None or fragment is not None: + path = "" + else: + path = None + + # Ensure that each part of the URL is a `str` for + # backwards compatibility. + if isinstance(url, six.text_type): + ensure_func = six.ensure_text + else: + ensure_func = six.ensure_str + + def ensure_type(x): + return x if x is None else ensure_func(x) + + return Url( + scheme=ensure_type(scheme), + auth=ensure_type(auth), + host=ensure_type(host), + port=port, + path=ensure_type(path), + query=ensure_type(query), + fragment=ensure_type(fragment), + ) + + +def get_host(url): + """ + Deprecated. Use :func:`parse_url` instead. + """ + p = parse_url(url) + return p.scheme or "http", p.hostname, p.port diff --git a/venv/Lib/site-packages/pip/_vendor/urllib3/util/wait.py b/venv/Lib/site-packages/pip/_vendor/urllib3/util/wait.py new file mode 100644 index 00000000..c280646c --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/urllib3/util/wait.py @@ -0,0 +1,153 @@ +import errno +import select +import sys +from functools import partial + +try: + from time import monotonic +except ImportError: + from time import time as monotonic + +__all__ = ["NoWayToWaitForSocketError", "wait_for_read", "wait_for_write"] + + +class NoWayToWaitForSocketError(Exception): + pass + + +# How should we wait on sockets? +# +# There are two types of APIs you can use for waiting on sockets: the fancy +# modern stateful APIs like epoll/kqueue, and the older stateless APIs like +# select/poll. The stateful APIs are more efficient when you have a lots of +# sockets to keep track of, because you can set them up once and then use them +# lots of times. But we only ever want to wait on a single socket at a time +# and don't want to keep track of state, so the stateless APIs are actually +# more efficient. So we want to use select() or poll(). +# +# Now, how do we choose between select() and poll()? On traditional Unixes, +# select() has a strange calling convention that makes it slow, or fail +# altogether, for high-numbered file descriptors. The point of poll() is to fix +# that, so on Unixes, we prefer poll(). +# +# On Windows, there is no poll() (or at least Python doesn't provide a wrapper +# for it), but that's OK, because on Windows, select() doesn't have this +# strange calling convention; plain select() works fine. +# +# So: on Windows we use select(), and everywhere else we use poll(). We also +# fall back to select() in case poll() is somehow broken or missing. + +if sys.version_info >= (3, 5): + # Modern Python, that retries syscalls by default + def _retry_on_intr(fn, timeout): + return fn(timeout) + + +else: + # Old and broken Pythons. + def _retry_on_intr(fn, timeout): + if timeout is None: + deadline = float("inf") + else: + deadline = monotonic() + timeout + + while True: + try: + return fn(timeout) + # OSError for 3 <= pyver < 3.5, select.error for pyver <= 2.7 + except (OSError, select.error) as e: + # 'e.args[0]' incantation works for both OSError and select.error + if e.args[0] != errno.EINTR: + raise + else: + timeout = deadline - monotonic() + if timeout < 0: + timeout = 0 + if timeout == float("inf"): + timeout = None + continue + + +def select_wait_for_socket(sock, read=False, write=False, timeout=None): + if not read and not write: + raise RuntimeError("must specify at least one of read=True, write=True") + rcheck = [] + wcheck = [] + if read: + rcheck.append(sock) + if write: + wcheck.append(sock) + # When doing a non-blocking connect, most systems signal success by + # marking the socket writable. Windows, though, signals success by marked + # it as "exceptional". We paper over the difference by checking the write + # sockets for both conditions. (The stdlib selectors module does the same + # thing.) + fn = partial(select.select, rcheck, wcheck, wcheck) + rready, wready, xready = _retry_on_intr(fn, timeout) + return bool(rready or wready or xready) + + +def poll_wait_for_socket(sock, read=False, write=False, timeout=None): + if not read and not write: + raise RuntimeError("must specify at least one of read=True, write=True") + mask = 0 + if read: + mask |= select.POLLIN + if write: + mask |= select.POLLOUT + poll_obj = select.poll() + poll_obj.register(sock, mask) + + # For some reason, poll() takes timeout in milliseconds + def do_poll(t): + if t is not None: + t *= 1000 + return poll_obj.poll(t) + + return bool(_retry_on_intr(do_poll, timeout)) + + +def null_wait_for_socket(*args, **kwargs): + raise NoWayToWaitForSocketError("no select-equivalent available") + + +def _have_working_poll(): + # Apparently some systems have a select.poll that fails as soon as you try + # to use it, either due to strange configuration or broken monkeypatching + # from libraries like eventlet/greenlet. + try: + poll_obj = select.poll() + _retry_on_intr(poll_obj.poll, 0) + except (AttributeError, OSError): + return False + else: + return True + + +def wait_for_socket(*args, **kwargs): + # We delay choosing which implementation to use until the first time we're + # called. We could do it at import time, but then we might make the wrong + # decision if someone goes wild with monkeypatching select.poll after + # we're imported. + global wait_for_socket + if _have_working_poll(): + wait_for_socket = poll_wait_for_socket + elif hasattr(select, "select"): + wait_for_socket = select_wait_for_socket + else: # Platform-specific: Appengine. + wait_for_socket = null_wait_for_socket + return wait_for_socket(*args, **kwargs) + + +def wait_for_read(sock, timeout=None): + """Waits for reading to be available on a given socket. + Returns True if the socket is readable, or False if the timeout expired. + """ + return wait_for_socket(sock, read=True, timeout=timeout) + + +def wait_for_write(sock, timeout=None): + """Waits for writing to be available on a given socket. + Returns True if the socket is readable, or False if the timeout expired. + """ + return wait_for_socket(sock, write=True, timeout=timeout) diff --git a/venv/Lib/site-packages/pip/_vendor/vendor.txt b/venv/Lib/site-packages/pip/_vendor/vendor.txt new file mode 100644 index 00000000..6c9732e9 --- /dev/null +++ b/venv/Lib/site-packages/pip/_vendor/vendor.txt @@ -0,0 +1,22 @@ +appdirs==1.4.4 +CacheControl==0.12.6 +colorama==0.4.4 +distlib==0.3.1 +distro==1.5.0 +html5lib==1.1 +msgpack==1.0.2 +packaging==20.9 +pep517==0.10.0 +progress==1.5 +pyparsing==2.4.7 +requests==2.25.1 + certifi==2020.12.05 + chardet==4.0.0 + idna==3.1 + urllib3==1.26.4 +resolvelib==0.7.0 +setuptools==44.0.0 +six==1.15.0 +tenacity==7.0.0 +toml==0.10.2 +webencodings==0.5.1 diff --git a/env/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.py b/venv/Lib/site-packages/pip/_vendor/webencodings/__init__.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.py rename to venv/Lib/site-packages/pip/_vendor/webencodings/__init__.py diff --git a/venv/Lib/site-packages/pip/_vendor/webencodings/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/webencodings/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..99fd4918 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/webencodings/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/webencodings/__pycache__/labels.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/webencodings/__pycache__/labels.cpython-36.pyc new file mode 100644 index 00000000..b014415b Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/webencodings/__pycache__/labels.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/webencodings/__pycache__/mklabels.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/webencodings/__pycache__/mklabels.cpython-36.pyc new file mode 100644 index 00000000..b85db396 Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/webencodings/__pycache__/mklabels.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/webencodings/__pycache__/tests.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/webencodings/__pycache__/tests.cpython-36.pyc new file mode 100644 index 00000000..5bd6068c Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/webencodings/__pycache__/tests.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pip/_vendor/webencodings/__pycache__/x_user_defined.cpython-36.pyc b/venv/Lib/site-packages/pip/_vendor/webencodings/__pycache__/x_user_defined.cpython-36.pyc new file mode 100644 index 00000000..b2ee592f Binary files /dev/null and b/venv/Lib/site-packages/pip/_vendor/webencodings/__pycache__/x_user_defined.cpython-36.pyc differ diff --git a/env/lib/python2.7/site-packages/pip/_vendor/webencodings/labels.py b/venv/Lib/site-packages/pip/_vendor/webencodings/labels.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/webencodings/labels.py rename to venv/Lib/site-packages/pip/_vendor/webencodings/labels.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/webencodings/mklabels.py b/venv/Lib/site-packages/pip/_vendor/webencodings/mklabels.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/webencodings/mklabels.py rename to venv/Lib/site-packages/pip/_vendor/webencodings/mklabels.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/webencodings/tests.py b/venv/Lib/site-packages/pip/_vendor/webencodings/tests.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/webencodings/tests.py rename to venv/Lib/site-packages/pip/_vendor/webencodings/tests.py diff --git a/env/lib/python2.7/site-packages/pip/_vendor/webencodings/x_user_defined.py b/venv/Lib/site-packages/pip/_vendor/webencodings/x_user_defined.py similarity index 100% rename from env/lib/python2.7/site-packages/pip/_vendor/webencodings/x_user_defined.py rename to venv/Lib/site-packages/pip/_vendor/webencodings/x_user_defined.py diff --git a/venv/Lib/site-packages/pip/py.typed b/venv/Lib/site-packages/pip/py.typed new file mode 100644 index 00000000..0b44fd9b --- /dev/null +++ b/venv/Lib/site-packages/pip/py.typed @@ -0,0 +1,4 @@ +pip is a command line program. While it is implemented in Python, and so is +available for import, you must not use pip's internal APIs in this way. Typing +information is provided as a convenience only and is not a gaurantee. Expect +unannounced changes to the API and types in releases. diff --git a/venv/Lib/site-packages/pkg_resources/__init__.py b/venv/Lib/site-packages/pkg_resources/__init__.py new file mode 100644 index 00000000..c84f1dd9 --- /dev/null +++ b/venv/Lib/site-packages/pkg_resources/__init__.py @@ -0,0 +1,3288 @@ +""" +Package resource API +-------------------- + +A resource is a logical file contained within a package, or a logical +subdirectory thereof. The package resource API expects resource names +to have their path parts separated with ``/``, *not* whatever the local +path separator is. Do not use os.path operations to manipulate resource +names being passed into the API. + +The package resource API is designed to work with normal filesystem packages, +.egg files, and unpacked .egg files. It can also work in a limited way with +.zip files and with custom PEP 302 loaders that support the ``get_data()`` +method. +""" + +import sys +import os +import io +import time +import re +import types +import zipfile +import zipimport +import warnings +import stat +import functools +import pkgutil +import operator +import platform +import collections +import plistlib +import email.parser +import errno +import tempfile +import textwrap +import itertools +import inspect +import ntpath +import posixpath +import importlib +from pkgutil import get_importer + +try: + import _imp +except ImportError: + # Python 3.2 compatibility + import imp as _imp + +try: + FileExistsError +except NameError: + FileExistsError = OSError + +# capture these to bypass sandboxing +from os import utime +try: + from os import mkdir, rename, unlink + WRITE_SUPPORT = True +except ImportError: + # no write support, probably under GAE + WRITE_SUPPORT = False + +from os import open as os_open +from os.path import isdir, split + +try: + import importlib.machinery as importlib_machinery + # access attribute to force import under delayed import mechanisms. + importlib_machinery.__name__ +except ImportError: + importlib_machinery = None + +from pkg_resources.extern import appdirs +from pkg_resources.extern import packaging +__import__('pkg_resources.extern.packaging.version') +__import__('pkg_resources.extern.packaging.specifiers') +__import__('pkg_resources.extern.packaging.requirements') +__import__('pkg_resources.extern.packaging.markers') + +if sys.version_info < (3, 5): + raise RuntimeError("Python 3.5 or later is required") + +# declare some globals that will be defined later to +# satisfy the linters. +require = None +working_set = None +add_activation_listener = None +resources_stream = None +cleanup_resources = None +resource_dir = None +resource_stream = None +set_extraction_path = None +resource_isdir = None +resource_string = None +iter_entry_points = None +resource_listdir = None +resource_filename = None +resource_exists = None +_distribution_finders = None +_namespace_handlers = None +_namespace_packages = None + + +class PEP440Warning(RuntimeWarning): + """ + Used when there is an issue with a version or specifier not complying with + PEP 440. + """ + + +def parse_version(v): + try: + return packaging.version.Version(v) + except packaging.version.InvalidVersion: + return packaging.version.LegacyVersion(v) + + +_state_vars = {} + + +def _declare_state(vartype, **kw): + globals().update(kw) + _state_vars.update(dict.fromkeys(kw, vartype)) + + +def __getstate__(): + state = {} + g = globals() + for k, v in _state_vars.items(): + state[k] = g['_sget_' + v](g[k]) + return state + + +def __setstate__(state): + g = globals() + for k, v in state.items(): + g['_sset_' + _state_vars[k]](k, g[k], v) + return state + + +def _sget_dict(val): + return val.copy() + + +def _sset_dict(key, ob, state): + ob.clear() + ob.update(state) + + +def _sget_object(val): + return val.__getstate__() + + +def _sset_object(key, ob, state): + ob.__setstate__(state) + + +_sget_none = _sset_none = lambda *args: None + + +def get_supported_platform(): + """Return this platform's maximum compatible version. + + distutils.util.get_platform() normally reports the minimum version + of macOS that would be required to *use* extensions produced by + distutils. But what we want when checking compatibility is to know the + version of macOS that we are *running*. To allow usage of packages that + explicitly require a newer version of macOS, we must also know the + current version of the OS. + + If this condition occurs for any other platform with a version in its + platform strings, this function should be extended accordingly. + """ + plat = get_build_platform() + m = macosVersionString.match(plat) + if m is not None and sys.platform == "darwin": + try: + plat = 'macosx-%s-%s' % ('.'.join(_macos_vers()[:2]), m.group(3)) + except ValueError: + # not macOS + pass + return plat + + +__all__ = [ + # Basic resource access and distribution/entry point discovery + 'require', 'run_script', 'get_provider', 'get_distribution', + 'load_entry_point', 'get_entry_map', 'get_entry_info', + 'iter_entry_points', + 'resource_string', 'resource_stream', 'resource_filename', + 'resource_listdir', 'resource_exists', 'resource_isdir', + + # Environmental control + 'declare_namespace', 'working_set', 'add_activation_listener', + 'find_distributions', 'set_extraction_path', 'cleanup_resources', + 'get_default_cache', + + # Primary implementation classes + 'Environment', 'WorkingSet', 'ResourceManager', + 'Distribution', 'Requirement', 'EntryPoint', + + # Exceptions + 'ResolutionError', 'VersionConflict', 'DistributionNotFound', + 'UnknownExtra', 'ExtractionError', + + # Warnings + 'PEP440Warning', + + # Parsing functions and string utilities + 'parse_requirements', 'parse_version', 'safe_name', 'safe_version', + 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections', + 'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker', + + # filesystem utilities + 'ensure_directory', 'normalize_path', + + # Distribution "precedence" constants + 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST', + + # "Provider" interfaces, implementations, and registration/lookup APIs + 'IMetadataProvider', 'IResourceProvider', 'FileMetadata', + 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider', + 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider', + 'register_finder', 'register_namespace_handler', 'register_loader_type', + 'fixup_namespace_packages', 'get_importer', + + # Warnings + 'PkgResourcesDeprecationWarning', + + # Deprecated/backward compatibility only + 'run_main', 'AvailableDistributions', +] + + +class ResolutionError(Exception): + """Abstract base for dependency resolution errors""" + + def __repr__(self): + return self.__class__.__name__ + repr(self.args) + + +class VersionConflict(ResolutionError): + """ + An already-installed version conflicts with the requested version. + + Should be initialized with the installed Distribution and the requested + Requirement. + """ + + _template = "{self.dist} is installed but {self.req} is required" + + @property + def dist(self): + return self.args[0] + + @property + def req(self): + return self.args[1] + + def report(self): + return self._template.format(**locals()) + + def with_context(self, required_by): + """ + If required_by is non-empty, return a version of self that is a + ContextualVersionConflict. + """ + if not required_by: + return self + args = self.args + (required_by,) + return ContextualVersionConflict(*args) + + +class ContextualVersionConflict(VersionConflict): + """ + A VersionConflict that accepts a third parameter, the set of the + requirements that required the installed Distribution. + """ + + _template = VersionConflict._template + ' by {self.required_by}' + + @property + def required_by(self): + return self.args[2] + + +class DistributionNotFound(ResolutionError): + """A requested distribution was not found""" + + _template = ("The '{self.req}' distribution was not found " + "and is required by {self.requirers_str}") + + @property + def req(self): + return self.args[0] + + @property + def requirers(self): + return self.args[1] + + @property + def requirers_str(self): + if not self.requirers: + return 'the application' + return ', '.join(self.requirers) + + def report(self): + return self._template.format(**locals()) + + def __str__(self): + return self.report() + + +class UnknownExtra(ResolutionError): + """Distribution doesn't have an "extra feature" of the given name""" + + +_provider_factories = {} + +PY_MAJOR = '{}.{}'.format(*sys.version_info) +EGG_DIST = 3 +BINARY_DIST = 2 +SOURCE_DIST = 1 +CHECKOUT_DIST = 0 +DEVELOP_DIST = -1 + + +def register_loader_type(loader_type, provider_factory): + """Register `provider_factory` to make providers for `loader_type` + + `loader_type` is the type or class of a PEP 302 ``module.__loader__``, + and `provider_factory` is a function that, passed a *module* object, + returns an ``IResourceProvider`` for that module. + """ + _provider_factories[loader_type] = provider_factory + + +def get_provider(moduleOrReq): + """Return an IResourceProvider for the named module or requirement""" + if isinstance(moduleOrReq, Requirement): + return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0] + try: + module = sys.modules[moduleOrReq] + except KeyError: + __import__(moduleOrReq) + module = sys.modules[moduleOrReq] + loader = getattr(module, '__loader__', None) + return _find_adapter(_provider_factories, loader)(module) + + +def _macos_vers(_cache=[]): + if not _cache: + version = platform.mac_ver()[0] + # fallback for MacPorts + if version == '': + plist = '/System/Library/CoreServices/SystemVersion.plist' + if os.path.exists(plist): + if hasattr(plistlib, 'readPlist'): + plist_content = plistlib.readPlist(plist) + if 'ProductVersion' in plist_content: + version = plist_content['ProductVersion'] + + _cache.append(version.split('.')) + return _cache[0] + + +def _macos_arch(machine): + return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine) + + +def get_build_platform(): + """Return this platform's string for platform-specific distributions + + XXX Currently this is the same as ``distutils.util.get_platform()``, but it + needs some hacks for Linux and macOS. + """ + from sysconfig import get_platform + + plat = get_platform() + if sys.platform == "darwin" and not plat.startswith('macosx-'): + try: + version = _macos_vers() + machine = os.uname()[4].replace(" ", "_") + return "macosx-%d.%d-%s" % ( + int(version[0]), int(version[1]), + _macos_arch(machine), + ) + except ValueError: + # if someone is running a non-Mac darwin system, this will fall + # through to the default implementation + pass + return plat + + +macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)") +darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)") +# XXX backward compat +get_platform = get_build_platform + + +def compatible_platforms(provided, required): + """Can code for the `provided` platform run on the `required` platform? + + Returns true if either platform is ``None``, or the platforms are equal. + + XXX Needs compatibility checks for Linux and other unixy OSes. + """ + if provided is None or required is None or provided == required: + # easy case + return True + + # macOS special cases + reqMac = macosVersionString.match(required) + if reqMac: + provMac = macosVersionString.match(provided) + + # is this a Mac package? + if not provMac: + # this is backwards compatibility for packages built before + # setuptools 0.6. All packages built after this point will + # use the new macOS designation. + provDarwin = darwinVersionString.match(provided) + if provDarwin: + dversion = int(provDarwin.group(1)) + macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2)) + if dversion == 7 and macosversion >= "10.3" or \ + dversion == 8 and macosversion >= "10.4": + return True + # egg isn't macOS or legacy darwin + return False + + # are they the same major version and machine type? + if provMac.group(1) != reqMac.group(1) or \ + provMac.group(3) != reqMac.group(3): + return False + + # is the required OS major update >= the provided one? + if int(provMac.group(2)) > int(reqMac.group(2)): + return False + + return True + + # XXX Linux and other platforms' special cases should go here + return False + + +def run_script(dist_spec, script_name): + """Locate distribution `dist_spec` and run its `script_name` script""" + ns = sys._getframe(1).f_globals + name = ns['__name__'] + ns.clear() + ns['__name__'] = name + require(dist_spec)[0].run_script(script_name, ns) + + +# backward compatibility +run_main = run_script + + +def get_distribution(dist): + """Return a current distribution object for a Requirement or string""" + if isinstance(dist, str): + dist = Requirement.parse(dist) + if isinstance(dist, Requirement): + dist = get_provider(dist) + if not isinstance(dist, Distribution): + raise TypeError("Expected string, Requirement, or Distribution", dist) + return dist + + +def load_entry_point(dist, group, name): + """Return `name` entry point of `group` for `dist` or raise ImportError""" + return get_distribution(dist).load_entry_point(group, name) + + +def get_entry_map(dist, group=None): + """Return the entry point map for `group`, or the full entry map""" + return get_distribution(dist).get_entry_map(group) + + +def get_entry_info(dist, group, name): + """Return the EntryPoint object for `group`+`name`, or ``None``""" + return get_distribution(dist).get_entry_info(group, name) + + +class IMetadataProvider: + def has_metadata(name): + """Does the package's distribution contain the named metadata?""" + + def get_metadata(name): + """The named metadata resource as a string""" + + def get_metadata_lines(name): + """Yield named metadata resource as list of non-blank non-comment lines + + Leading and trailing whitespace is stripped from each line, and lines + with ``#`` as the first non-blank character are omitted.""" + + def metadata_isdir(name): + """Is the named metadata a directory? (like ``os.path.isdir()``)""" + + def metadata_listdir(name): + """List of metadata names in the directory (like ``os.listdir()``)""" + + def run_script(script_name, namespace): + """Execute the named script in the supplied namespace dictionary""" + + +class IResourceProvider(IMetadataProvider): + """An object that provides access to package resources""" + + def get_resource_filename(manager, resource_name): + """Return a true filesystem path for `resource_name` + + `manager` must be an ``IResourceManager``""" + + def get_resource_stream(manager, resource_name): + """Return a readable file-like object for `resource_name` + + `manager` must be an ``IResourceManager``""" + + def get_resource_string(manager, resource_name): + """Return a string containing the contents of `resource_name` + + `manager` must be an ``IResourceManager``""" + + def has_resource(resource_name): + """Does the package contain the named resource?""" + + def resource_isdir(resource_name): + """Is the named resource a directory? (like ``os.path.isdir()``)""" + + def resource_listdir(resource_name): + """List of resource names in the directory (like ``os.listdir()``)""" + + +class WorkingSet: + """A collection of active distributions on sys.path (or a similar list)""" + + def __init__(self, entries=None): + """Create working set from list of path entries (default=sys.path)""" + self.entries = [] + self.entry_keys = {} + self.by_key = {} + self.callbacks = [] + + if entries is None: + entries = sys.path + + for entry in entries: + self.add_entry(entry) + + @classmethod + def _build_master(cls): + """ + Prepare the master working set. + """ + ws = cls() + try: + from __main__ import __requires__ + except ImportError: + # The main program does not list any requirements + return ws + + # ensure the requirements are met + try: + ws.require(__requires__) + except VersionConflict: + return cls._build_from_requirements(__requires__) + + return ws + + @classmethod + def _build_from_requirements(cls, req_spec): + """ + Build a working set from a requirement spec. Rewrites sys.path. + """ + # try it without defaults already on sys.path + # by starting with an empty path + ws = cls([]) + reqs = parse_requirements(req_spec) + dists = ws.resolve(reqs, Environment()) + for dist in dists: + ws.add(dist) + + # add any missing entries from sys.path + for entry in sys.path: + if entry not in ws.entries: + ws.add_entry(entry) + + # then copy back to sys.path + sys.path[:] = ws.entries + return ws + + def add_entry(self, entry): + """Add a path item to ``.entries``, finding any distributions on it + + ``find_distributions(entry, True)`` is used to find distributions + corresponding to the path entry, and they are added. `entry` is + always appended to ``.entries``, even if it is already present. + (This is because ``sys.path`` can contain the same value more than + once, and the ``.entries`` of the ``sys.path`` WorkingSet should always + equal ``sys.path``.) + """ + self.entry_keys.setdefault(entry, []) + self.entries.append(entry) + for dist in find_distributions(entry, True): + self.add(dist, entry, False) + + def __contains__(self, dist): + """True if `dist` is the active distribution for its project""" + return self.by_key.get(dist.key) == dist + + def find(self, req): + """Find a distribution matching requirement `req` + + If there is an active distribution for the requested project, this + returns it as long as it meets the version requirement specified by + `req`. But, if there is an active distribution for the project and it + does *not* meet the `req` requirement, ``VersionConflict`` is raised. + If there is no active distribution for the requested project, ``None`` + is returned. + """ + dist = self.by_key.get(req.key) + if dist is not None and dist not in req: + # XXX add more info + raise VersionConflict(dist, req) + return dist + + def iter_entry_points(self, group, name=None): + """Yield entry point objects from `group` matching `name` + + If `name` is None, yields all entry points in `group` from all + distributions in the working set, otherwise only ones matching + both `group` and `name` are yielded (in distribution order). + """ + return ( + entry + for dist in self + for entry in dist.get_entry_map(group).values() + if name is None or name == entry.name + ) + + def run_script(self, requires, script_name): + """Locate distribution for `requires` and run `script_name` script""" + ns = sys._getframe(1).f_globals + name = ns['__name__'] + ns.clear() + ns['__name__'] = name + self.require(requires)[0].run_script(script_name, ns) + + def __iter__(self): + """Yield distributions for non-duplicate projects in the working set + + The yield order is the order in which the items' path entries were + added to the working set. + """ + seen = {} + for item in self.entries: + if item not in self.entry_keys: + # workaround a cache issue + continue + + for key in self.entry_keys[item]: + if key not in seen: + seen[key] = 1 + yield self.by_key[key] + + def add(self, dist, entry=None, insert=True, replace=False): + """Add `dist` to working set, associated with `entry` + + If `entry` is unspecified, it defaults to the ``.location`` of `dist`. + On exit from this routine, `entry` is added to the end of the working + set's ``.entries`` (if it wasn't already present). + + `dist` is only added to the working set if it's for a project that + doesn't already have a distribution in the set, unless `replace=True`. + If it's added, any callbacks registered with the ``subscribe()`` method + will be called. + """ + if insert: + dist.insert_on(self.entries, entry, replace=replace) + + if entry is None: + entry = dist.location + keys = self.entry_keys.setdefault(entry, []) + keys2 = self.entry_keys.setdefault(dist.location, []) + if not replace and dist.key in self.by_key: + # ignore hidden distros + return + + self.by_key[dist.key] = dist + if dist.key not in keys: + keys.append(dist.key) + if dist.key not in keys2: + keys2.append(dist.key) + self._added_new(dist) + + # FIXME: 'WorkingSet.resolve' is too complex (11) + def resolve(self, requirements, env=None, installer=None, # noqa: C901 + replace_conflicting=False, extras=None): + """List all distributions needed to (recursively) meet `requirements` + + `requirements` must be a sequence of ``Requirement`` objects. `env`, + if supplied, should be an ``Environment`` instance. If + not supplied, it defaults to all distributions available within any + entry or distribution in the working set. `installer`, if supplied, + will be invoked with each requirement that cannot be met by an + already-installed distribution; it should return a ``Distribution`` or + ``None``. + + Unless `replace_conflicting=True`, raises a VersionConflict exception + if + any requirements are found on the path that have the correct name but + the wrong version. Otherwise, if an `installer` is supplied it will be + invoked to obtain the correct version of the requirement and activate + it. + + `extras` is a list of the extras to be used with these requirements. + This is important because extra requirements may look like `my_req; + extra = "my_extra"`, which would otherwise be interpreted as a purely + optional requirement. Instead, we want to be able to assert that these + requirements are truly required. + """ + + # set up the stack + requirements = list(requirements)[::-1] + # set of processed requirements + processed = {} + # key -> dist + best = {} + to_activate = [] + + req_extras = _ReqExtras() + + # Mapping of requirement to set of distributions that required it; + # useful for reporting info about conflicts. + required_by = collections.defaultdict(set) + + while requirements: + # process dependencies breadth-first + req = requirements.pop(0) + if req in processed: + # Ignore cyclic or redundant dependencies + continue + + if not req_extras.markers_pass(req, extras): + continue + + dist = best.get(req.key) + if dist is None: + # Find the best distribution and add it to the map + dist = self.by_key.get(req.key) + if dist is None or (dist not in req and replace_conflicting): + ws = self + if env is None: + if dist is None: + env = Environment(self.entries) + else: + # Use an empty environment and workingset to avoid + # any further conflicts with the conflicting + # distribution + env = Environment([]) + ws = WorkingSet([]) + dist = best[req.key] = env.best_match( + req, ws, installer, + replace_conflicting=replace_conflicting + ) + if dist is None: + requirers = required_by.get(req, None) + raise DistributionNotFound(req, requirers) + to_activate.append(dist) + if dist not in req: + # Oops, the "best" so far conflicts with a dependency + dependent_req = required_by[req] + raise VersionConflict(dist, req).with_context(dependent_req) + + # push the new requirements onto the stack + new_requirements = dist.requires(req.extras)[::-1] + requirements.extend(new_requirements) + + # Register the new requirements needed by req + for new_requirement in new_requirements: + required_by[new_requirement].add(req.project_name) + req_extras[new_requirement] = req.extras + + processed[req] = True + + # return list of distros to activate + return to_activate + + def find_plugins( + self, plugin_env, full_env=None, installer=None, fallback=True): + """Find all activatable distributions in `plugin_env` + + Example usage:: + + distributions, errors = working_set.find_plugins( + Environment(plugin_dirlist) + ) + # add plugins+libs to sys.path + map(working_set.add, distributions) + # display errors + print('Could not load', errors) + + The `plugin_env` should be an ``Environment`` instance that contains + only distributions that are in the project's "plugin directory" or + directories. The `full_env`, if supplied, should be an ``Environment`` + contains all currently-available distributions. If `full_env` is not + supplied, one is created automatically from the ``WorkingSet`` this + method is called on, which will typically mean that every directory on + ``sys.path`` will be scanned for distributions. + + `installer` is a standard installer callback as used by the + ``resolve()`` method. The `fallback` flag indicates whether we should + attempt to resolve older versions of a plugin if the newest version + cannot be resolved. + + This method returns a 2-tuple: (`distributions`, `error_info`), where + `distributions` is a list of the distributions found in `plugin_env` + that were loadable, along with any other distributions that are needed + to resolve their dependencies. `error_info` is a dictionary mapping + unloadable plugin distributions to an exception instance describing the + error that occurred. Usually this will be a ``DistributionNotFound`` or + ``VersionConflict`` instance. + """ + + plugin_projects = list(plugin_env) + # scan project names in alphabetic order + plugin_projects.sort() + + error_info = {} + distributions = {} + + if full_env is None: + env = Environment(self.entries) + env += plugin_env + else: + env = full_env + plugin_env + + shadow_set = self.__class__([]) + # put all our entries in shadow_set + list(map(shadow_set.add, self)) + + for project_name in plugin_projects: + + for dist in plugin_env[project_name]: + + req = [dist.as_requirement()] + + try: + resolvees = shadow_set.resolve(req, env, installer) + + except ResolutionError as v: + # save error info + error_info[dist] = v + if fallback: + # try the next older version of project + continue + else: + # give up on this project, keep going + break + + else: + list(map(shadow_set.add, resolvees)) + distributions.update(dict.fromkeys(resolvees)) + + # success, no need to try any more versions of this project + break + + distributions = list(distributions) + distributions.sort() + + return distributions, error_info + + def require(self, *requirements): + """Ensure that distributions matching `requirements` are activated + + `requirements` must be a string or a (possibly-nested) sequence + thereof, specifying the distributions and versions required. The + return value is a sequence of the distributions that needed to be + activated to fulfill the requirements; all relevant distributions are + included, even if they were already activated in this working set. + """ + needed = self.resolve(parse_requirements(requirements)) + + for dist in needed: + self.add(dist) + + return needed + + def subscribe(self, callback, existing=True): + """Invoke `callback` for all distributions + + If `existing=True` (default), + call on all existing ones, as well. + """ + if callback in self.callbacks: + return + self.callbacks.append(callback) + if not existing: + return + for dist in self: + callback(dist) + + def _added_new(self, dist): + for callback in self.callbacks: + callback(dist) + + def __getstate__(self): + return ( + self.entries[:], self.entry_keys.copy(), self.by_key.copy(), + self.callbacks[:] + ) + + def __setstate__(self, e_k_b_c): + entries, keys, by_key, callbacks = e_k_b_c + self.entries = entries[:] + self.entry_keys = keys.copy() + self.by_key = by_key.copy() + self.callbacks = callbacks[:] + + +class _ReqExtras(dict): + """ + Map each requirement to the extras that demanded it. + """ + + def markers_pass(self, req, extras=None): + """ + Evaluate markers for req against each extra that + demanded it. + + Return False if the req has a marker and fails + evaluation. Otherwise, return True. + """ + extra_evals = ( + req.marker.evaluate({'extra': extra}) + for extra in self.get(req, ()) + (extras or (None,)) + ) + return not req.marker or any(extra_evals) + + +class Environment: + """Searchable snapshot of distributions on a search path""" + + def __init__( + self, search_path=None, platform=get_supported_platform(), + python=PY_MAJOR): + """Snapshot distributions available on a search path + + Any distributions found on `search_path` are added to the environment. + `search_path` should be a sequence of ``sys.path`` items. If not + supplied, ``sys.path`` is used. + + `platform` is an optional string specifying the name of the platform + that platform-specific distributions must be compatible with. If + unspecified, it defaults to the current platform. `python` is an + optional string naming the desired version of Python (e.g. ``'3.6'``); + it defaults to the current version. + + You may explicitly set `platform` (and/or `python`) to ``None`` if you + wish to map *all* distributions, not just those compatible with the + running platform or Python version. + """ + self._distmap = {} + self.platform = platform + self.python = python + self.scan(search_path) + + def can_add(self, dist): + """Is distribution `dist` acceptable for this environment? + + The distribution must match the platform and python version + requirements specified when this environment was created, or False + is returned. + """ + py_compat = ( + self.python is None + or dist.py_version is None + or dist.py_version == self.python + ) + return py_compat and compatible_platforms(dist.platform, self.platform) + + def remove(self, dist): + """Remove `dist` from the environment""" + self._distmap[dist.key].remove(dist) + + def scan(self, search_path=None): + """Scan `search_path` for distributions usable in this environment + + Any distributions found are added to the environment. + `search_path` should be a sequence of ``sys.path`` items. If not + supplied, ``sys.path`` is used. Only distributions conforming to + the platform/python version defined at initialization are added. + """ + if search_path is None: + search_path = sys.path + + for item in search_path: + for dist in find_distributions(item): + self.add(dist) + + def __getitem__(self, project_name): + """Return a newest-to-oldest list of distributions for `project_name` + + Uses case-insensitive `project_name` comparison, assuming all the + project's distributions use their project's name converted to all + lowercase as their key. + + """ + distribution_key = project_name.lower() + return self._distmap.get(distribution_key, []) + + def add(self, dist): + """Add `dist` if we ``can_add()`` it and it has not already been added + """ + if self.can_add(dist) and dist.has_version(): + dists = self._distmap.setdefault(dist.key, []) + if dist not in dists: + dists.append(dist) + dists.sort(key=operator.attrgetter('hashcmp'), reverse=True) + + def best_match( + self, req, working_set, installer=None, replace_conflicting=False): + """Find distribution best matching `req` and usable on `working_set` + + This calls the ``find(req)`` method of the `working_set` to see if a + suitable distribution is already active. (This may raise + ``VersionConflict`` if an unsuitable version of the project is already + active in the specified `working_set`.) If a suitable distribution + isn't active, this method returns the newest distribution in the + environment that meets the ``Requirement`` in `req`. If no suitable + distribution is found, and `installer` is supplied, then the result of + calling the environment's ``obtain(req, installer)`` method will be + returned. + """ + try: + dist = working_set.find(req) + except VersionConflict: + if not replace_conflicting: + raise + dist = None + if dist is not None: + return dist + for dist in self[req.key]: + if dist in req: + return dist + # try to download/install + return self.obtain(req, installer) + + def obtain(self, requirement, installer=None): + """Obtain a distribution matching `requirement` (e.g. via download) + + Obtain a distro that matches requirement (e.g. via download). In the + base ``Environment`` class, this routine just returns + ``installer(requirement)``, unless `installer` is None, in which case + None is returned instead. This method is a hook that allows subclasses + to attempt other ways of obtaining a distribution before falling back + to the `installer` argument.""" + if installer is not None: + return installer(requirement) + + def __iter__(self): + """Yield the unique project names of the available distributions""" + for key in self._distmap.keys(): + if self[key]: + yield key + + def __iadd__(self, other): + """In-place addition of a distribution or environment""" + if isinstance(other, Distribution): + self.add(other) + elif isinstance(other, Environment): + for project in other: + for dist in other[project]: + self.add(dist) + else: + raise TypeError("Can't add %r to environment" % (other,)) + return self + + def __add__(self, other): + """Add an environment or distribution to an environment""" + new = self.__class__([], platform=None, python=None) + for env in self, other: + new += env + return new + + +# XXX backward compatibility +AvailableDistributions = Environment + + +class ExtractionError(RuntimeError): + """An error occurred extracting a resource + + The following attributes are available from instances of this exception: + + manager + The resource manager that raised this exception + + cache_path + The base directory for resource extraction + + original_error + The exception instance that caused extraction to fail + """ + + +class ResourceManager: + """Manage resource extraction and packages""" + extraction_path = None + + def __init__(self): + self.cached_files = {} + + def resource_exists(self, package_or_requirement, resource_name): + """Does the named resource exist?""" + return get_provider(package_or_requirement).has_resource(resource_name) + + def resource_isdir(self, package_or_requirement, resource_name): + """Is the named resource an existing directory?""" + return get_provider(package_or_requirement).resource_isdir( + resource_name + ) + + def resource_filename(self, package_or_requirement, resource_name): + """Return a true filesystem path for specified resource""" + return get_provider(package_or_requirement).get_resource_filename( + self, resource_name + ) + + def resource_stream(self, package_or_requirement, resource_name): + """Return a readable file-like object for specified resource""" + return get_provider(package_or_requirement).get_resource_stream( + self, resource_name + ) + + def resource_string(self, package_or_requirement, resource_name): + """Return specified resource as a string""" + return get_provider(package_or_requirement).get_resource_string( + self, resource_name + ) + + def resource_listdir(self, package_or_requirement, resource_name): + """List the contents of the named resource directory""" + return get_provider(package_or_requirement).resource_listdir( + resource_name + ) + + def extraction_error(self): + """Give an error message for problems extracting file(s)""" + + old_exc = sys.exc_info()[1] + cache_path = self.extraction_path or get_default_cache() + + tmpl = textwrap.dedent(""" + Can't extract file(s) to egg cache + + The following error occurred while trying to extract file(s) + to the Python egg cache: + + {old_exc} + + The Python egg cache directory is currently set to: + + {cache_path} + + Perhaps your account does not have write access to this directory? + You can change the cache directory by setting the PYTHON_EGG_CACHE + environment variable to point to an accessible directory. + """).lstrip() + err = ExtractionError(tmpl.format(**locals())) + err.manager = self + err.cache_path = cache_path + err.original_error = old_exc + raise err + + def get_cache_path(self, archive_name, names=()): + """Return absolute location in cache for `archive_name` and `names` + + The parent directory of the resulting path will be created if it does + not already exist. `archive_name` should be the base filename of the + enclosing egg (which may not be the name of the enclosing zipfile!), + including its ".egg" extension. `names`, if provided, should be a + sequence of path name parts "under" the egg's extraction location. + + This method should only be called by resource providers that need to + obtain an extraction location, and only for names they intend to + extract, as it tracks the generated names for possible cleanup later. + """ + extract_path = self.extraction_path or get_default_cache() + target_path = os.path.join(extract_path, archive_name + '-tmp', *names) + try: + _bypass_ensure_directory(target_path) + except Exception: + self.extraction_error() + + self._warn_unsafe_extraction_path(extract_path) + + self.cached_files[target_path] = 1 + return target_path + + @staticmethod + def _warn_unsafe_extraction_path(path): + """ + If the default extraction path is overridden and set to an insecure + location, such as /tmp, it opens up an opportunity for an attacker to + replace an extracted file with an unauthorized payload. Warn the user + if a known insecure location is used. + + See Distribute #375 for more details. + """ + if os.name == 'nt' and not path.startswith(os.environ['windir']): + # On Windows, permissions are generally restrictive by default + # and temp directories are not writable by other users, so + # bypass the warning. + return + mode = os.stat(path).st_mode + if mode & stat.S_IWOTH or mode & stat.S_IWGRP: + msg = ( + "Extraction path is writable by group/others " + "and vulnerable to attack when " + "used with get_resource_filename ({path}). " + "Consider a more secure " + "location (set with .set_extraction_path or the " + "PYTHON_EGG_CACHE environment variable)." + ).format(**locals()) + warnings.warn(msg, UserWarning) + + def postprocess(self, tempname, filename): + """Perform any platform-specific postprocessing of `tempname` + + This is where Mac header rewrites should be done; other platforms don't + have anything special they should do. + + Resource providers should call this method ONLY after successfully + extracting a compressed resource. They must NOT call it on resources + that are already in the filesystem. + + `tempname` is the current (temporary) name of the file, and `filename` + is the name it will be renamed to by the caller after this routine + returns. + """ + + if os.name == 'posix': + # Make the resource executable + mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777 + os.chmod(tempname, mode) + + def set_extraction_path(self, path): + """Set the base path where resources will be extracted to, if needed. + + If you do not call this routine before any extractions take place, the + path defaults to the return value of ``get_default_cache()``. (Which + is based on the ``PYTHON_EGG_CACHE`` environment variable, with various + platform-specific fallbacks. See that routine's documentation for more + details.) + + Resources are extracted to subdirectories of this path based upon + information given by the ``IResourceProvider``. You may set this to a + temporary directory, but then you must call ``cleanup_resources()`` to + delete the extracted files when done. There is no guarantee that + ``cleanup_resources()`` will be able to remove all extracted files. + + (Note: you may not change the extraction path for a given resource + manager once resources have been extracted, unless you first call + ``cleanup_resources()``.) + """ + if self.cached_files: + raise ValueError( + "Can't change extraction path, files already extracted" + ) + + self.extraction_path = path + + def cleanup_resources(self, force=False): + """ + Delete all extracted resource files and directories, returning a list + of the file and directory names that could not be successfully removed. + This function does not have any concurrency protection, so it should + generally only be called when the extraction path is a temporary + directory exclusive to a single process. This method is not + automatically called; you must call it explicitly or register it as an + ``atexit`` function if you wish to ensure cleanup of a temporary + directory used for extractions. + """ + # XXX + + +def get_default_cache(): + """ + Return the ``PYTHON_EGG_CACHE`` environment variable + or a platform-relevant user cache dir for an app + named "Python-Eggs". + """ + return ( + os.environ.get('PYTHON_EGG_CACHE') + or appdirs.user_cache_dir(appname='Python-Eggs') + ) + + +def safe_name(name): + """Convert an arbitrary string to a standard distribution name + + Any runs of non-alphanumeric/. characters are replaced with a single '-'. + """ + return re.sub('[^A-Za-z0-9.]+', '-', name) + + +def safe_version(version): + """ + Convert an arbitrary string to a standard version string + """ + try: + # normalize the version + return str(packaging.version.Version(version)) + except packaging.version.InvalidVersion: + version = version.replace(' ', '.') + return re.sub('[^A-Za-z0-9.]+', '-', version) + + +def safe_extra(extra): + """Convert an arbitrary string to a standard 'extra' name + + Any runs of non-alphanumeric characters are replaced with a single '_', + and the result is always lowercased. + """ + return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower() + + +def to_filename(name): + """Convert a project or version name to its filename-escaped form + + Any '-' characters are currently replaced with '_'. + """ + return name.replace('-', '_') + + +def invalid_marker(text): + """ + Validate text as a PEP 508 environment marker; return an exception + if invalid or False otherwise. + """ + try: + evaluate_marker(text) + except SyntaxError as e: + e.filename = None + e.lineno = None + return e + return False + + +def evaluate_marker(text, extra=None): + """ + Evaluate a PEP 508 environment marker. + Return a boolean indicating the marker result in this environment. + Raise SyntaxError if marker is invalid. + + This implementation uses the 'pyparsing' module. + """ + try: + marker = packaging.markers.Marker(text) + return marker.evaluate() + except packaging.markers.InvalidMarker as e: + raise SyntaxError(e) from e + + +class NullProvider: + """Try to implement resources and metadata for arbitrary PEP 302 loaders""" + + egg_name = None + egg_info = None + loader = None + + def __init__(self, module): + self.loader = getattr(module, '__loader__', None) + self.module_path = os.path.dirname(getattr(module, '__file__', '')) + + def get_resource_filename(self, manager, resource_name): + return self._fn(self.module_path, resource_name) + + def get_resource_stream(self, manager, resource_name): + return io.BytesIO(self.get_resource_string(manager, resource_name)) + + def get_resource_string(self, manager, resource_name): + return self._get(self._fn(self.module_path, resource_name)) + + def has_resource(self, resource_name): + return self._has(self._fn(self.module_path, resource_name)) + + def _get_metadata_path(self, name): + return self._fn(self.egg_info, name) + + def has_metadata(self, name): + if not self.egg_info: + return self.egg_info + + path = self._get_metadata_path(name) + return self._has(path) + + def get_metadata(self, name): + if not self.egg_info: + return "" + path = self._get_metadata_path(name) + value = self._get(path) + try: + return value.decode('utf-8') + except UnicodeDecodeError as exc: + # Include the path in the error message to simplify + # troubleshooting, and without changing the exception type. + exc.reason += ' in {} file at path: {}'.format(name, path) + raise + + def get_metadata_lines(self, name): + return yield_lines(self.get_metadata(name)) + + def resource_isdir(self, resource_name): + return self._isdir(self._fn(self.module_path, resource_name)) + + def metadata_isdir(self, name): + return self.egg_info and self._isdir(self._fn(self.egg_info, name)) + + def resource_listdir(self, resource_name): + return self._listdir(self._fn(self.module_path, resource_name)) + + def metadata_listdir(self, name): + if self.egg_info: + return self._listdir(self._fn(self.egg_info, name)) + return [] + + def run_script(self, script_name, namespace): + script = 'scripts/' + script_name + if not self.has_metadata(script): + raise ResolutionError( + "Script {script!r} not found in metadata at {self.egg_info!r}" + .format(**locals()), + ) + script_text = self.get_metadata(script).replace('\r\n', '\n') + script_text = script_text.replace('\r', '\n') + script_filename = self._fn(self.egg_info, script) + namespace['__file__'] = script_filename + if os.path.exists(script_filename): + with open(script_filename) as fid: + source = fid.read() + code = compile(source, script_filename, 'exec') + exec(code, namespace, namespace) + else: + from linecache import cache + cache[script_filename] = ( + len(script_text), 0, script_text.split('\n'), script_filename + ) + script_code = compile(script_text, script_filename, 'exec') + exec(script_code, namespace, namespace) + + def _has(self, path): + raise NotImplementedError( + "Can't perform this operation for unregistered loader type" + ) + + def _isdir(self, path): + raise NotImplementedError( + "Can't perform this operation for unregistered loader type" + ) + + def _listdir(self, path): + raise NotImplementedError( + "Can't perform this operation for unregistered loader type" + ) + + def _fn(self, base, resource_name): + self._validate_resource_path(resource_name) + if resource_name: + return os.path.join(base, *resource_name.split('/')) + return base + + @staticmethod + def _validate_resource_path(path): + """ + Validate the resource paths according to the docs. + https://setuptools.readthedocs.io/en/latest/pkg_resources.html#basic-resource-access + + >>> warned = getfixture('recwarn') + >>> warnings.simplefilter('always') + >>> vrp = NullProvider._validate_resource_path + >>> vrp('foo/bar.txt') + >>> bool(warned) + False + >>> vrp('../foo/bar.txt') + >>> bool(warned) + True + >>> warned.clear() + >>> vrp('/foo/bar.txt') + >>> bool(warned) + True + >>> vrp('foo/../../bar.txt') + >>> bool(warned) + True + >>> warned.clear() + >>> vrp('foo/f../bar.txt') + >>> bool(warned) + False + + Windows path separators are straight-up disallowed. + >>> vrp(r'\\foo/bar.txt') + Traceback (most recent call last): + ... + ValueError: Use of .. or absolute path in a resource path \ +is not allowed. + + >>> vrp(r'C:\\foo/bar.txt') + Traceback (most recent call last): + ... + ValueError: Use of .. or absolute path in a resource path \ +is not allowed. + + Blank values are allowed + + >>> vrp('') + >>> bool(warned) + False + + Non-string values are not. + + >>> vrp(None) + Traceback (most recent call last): + ... + AttributeError: ... + """ + invalid = ( + os.path.pardir in path.split(posixpath.sep) or + posixpath.isabs(path) or + ntpath.isabs(path) + ) + if not invalid: + return + + msg = "Use of .. or absolute path in a resource path is not allowed." + + # Aggressively disallow Windows absolute paths + if ntpath.isabs(path) and not posixpath.isabs(path): + raise ValueError(msg) + + # for compatibility, warn; in future + # raise ValueError(msg) + warnings.warn( + msg[:-1] + " and will raise exceptions in a future release.", + DeprecationWarning, + stacklevel=4, + ) + + def _get(self, path): + if hasattr(self.loader, 'get_data'): + return self.loader.get_data(path) + raise NotImplementedError( + "Can't perform this operation for loaders without 'get_data()'" + ) + + +register_loader_type(object, NullProvider) + + +def _parents(path): + """ + yield all parents of path including path + """ + last = None + while path != last: + yield path + last = path + path, _ = os.path.split(path) + + +class EggProvider(NullProvider): + """Provider based on a virtual filesystem""" + + def __init__(self, module): + NullProvider.__init__(self, module) + self._setup_prefix() + + def _setup_prefix(self): + # Assume that metadata may be nested inside a "basket" + # of multiple eggs and use module_path instead of .archive. + eggs = filter(_is_egg_path, _parents(self.module_path)) + egg = next(eggs, None) + egg and self._set_egg(egg) + + def _set_egg(self, path): + self.egg_name = os.path.basename(path) + self.egg_info = os.path.join(path, 'EGG-INFO') + self.egg_root = path + + +class DefaultProvider(EggProvider): + """Provides access to package resources in the filesystem""" + + def _has(self, path): + return os.path.exists(path) + + def _isdir(self, path): + return os.path.isdir(path) + + def _listdir(self, path): + return os.listdir(path) + + def get_resource_stream(self, manager, resource_name): + return open(self._fn(self.module_path, resource_name), 'rb') + + def _get(self, path): + with open(path, 'rb') as stream: + return stream.read() + + @classmethod + def _register(cls): + loader_names = 'SourceFileLoader', 'SourcelessFileLoader', + for name in loader_names: + loader_cls = getattr(importlib_machinery, name, type(None)) + register_loader_type(loader_cls, cls) + + +DefaultProvider._register() + + +class EmptyProvider(NullProvider): + """Provider that returns nothing for all requests""" + + module_path = None + + _isdir = _has = lambda self, path: False + + def _get(self, path): + return '' + + def _listdir(self, path): + return [] + + def __init__(self): + pass + + +empty_provider = EmptyProvider() + + +class ZipManifests(dict): + """ + zip manifest builder + """ + + @classmethod + def build(cls, path): + """ + Build a dictionary similar to the zipimport directory + caches, except instead of tuples, store ZipInfo objects. + + Use a platform-specific path separator (os.sep) for the path keys + for compatibility with pypy on Windows. + """ + with zipfile.ZipFile(path) as zfile: + items = ( + ( + name.replace('/', os.sep), + zfile.getinfo(name), + ) + for name in zfile.namelist() + ) + return dict(items) + + load = build + + +class MemoizedZipManifests(ZipManifests): + """ + Memoized zipfile manifests. + """ + manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime') + + def load(self, path): + """ + Load a manifest at path or return a suitable manifest already loaded. + """ + path = os.path.normpath(path) + mtime = os.stat(path).st_mtime + + if path not in self or self[path].mtime != mtime: + manifest = self.build(path) + self[path] = self.manifest_mod(manifest, mtime) + + return self[path].manifest + + +class ZipProvider(EggProvider): + """Resource support for zips and eggs""" + + eagers = None + _zip_manifests = MemoizedZipManifests() + + def __init__(self, module): + EggProvider.__init__(self, module) + self.zip_pre = self.loader.archive + os.sep + + def _zipinfo_name(self, fspath): + # Convert a virtual filename (full path to file) into a zipfile subpath + # usable with the zipimport directory cache for our target archive + fspath = fspath.rstrip(os.sep) + if fspath == self.loader.archive: + return '' + if fspath.startswith(self.zip_pre): + return fspath[len(self.zip_pre):] + raise AssertionError( + "%s is not a subpath of %s" % (fspath, self.zip_pre) + ) + + def _parts(self, zip_path): + # Convert a zipfile subpath into an egg-relative path part list. + # pseudo-fs path + fspath = self.zip_pre + zip_path + if fspath.startswith(self.egg_root + os.sep): + return fspath[len(self.egg_root) + 1:].split(os.sep) + raise AssertionError( + "%s is not a subpath of %s" % (fspath, self.egg_root) + ) + + @property + def zipinfo(self): + return self._zip_manifests.load(self.loader.archive) + + def get_resource_filename(self, manager, resource_name): + if not self.egg_name: + raise NotImplementedError( + "resource_filename() only supported for .egg, not .zip" + ) + # no need to lock for extraction, since we use temp names + zip_path = self._resource_to_zip(resource_name) + eagers = self._get_eager_resources() + if '/'.join(self._parts(zip_path)) in eagers: + for name in eagers: + self._extract_resource(manager, self._eager_to_zip(name)) + return self._extract_resource(manager, zip_path) + + @staticmethod + def _get_date_and_size(zip_stat): + size = zip_stat.file_size + # ymdhms+wday, yday, dst + date_time = zip_stat.date_time + (0, 0, -1) + # 1980 offset already done + timestamp = time.mktime(date_time) + return timestamp, size + + # FIXME: 'ZipProvider._extract_resource' is too complex (12) + def _extract_resource(self, manager, zip_path): # noqa: C901 + + if zip_path in self._index(): + for name in self._index()[zip_path]: + last = self._extract_resource( + manager, os.path.join(zip_path, name) + ) + # return the extracted directory name + return os.path.dirname(last) + + timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) + + if not WRITE_SUPPORT: + raise IOError('"os.rename" and "os.unlink" are not supported ' + 'on this platform') + try: + + real_path = manager.get_cache_path( + self.egg_name, self._parts(zip_path) + ) + + if self._is_current(real_path, zip_path): + return real_path + + outf, tmpnam = _mkstemp( + ".$extract", + dir=os.path.dirname(real_path), + ) + os.write(outf, self.loader.get_data(zip_path)) + os.close(outf) + utime(tmpnam, (timestamp, timestamp)) + manager.postprocess(tmpnam, real_path) + + try: + rename(tmpnam, real_path) + + except os.error: + if os.path.isfile(real_path): + if self._is_current(real_path, zip_path): + # the file became current since it was checked above, + # so proceed. + return real_path + # Windows, del old file and retry + elif os.name == 'nt': + unlink(real_path) + rename(tmpnam, real_path) + return real_path + raise + + except os.error: + # report a user-friendly error + manager.extraction_error() + + return real_path + + def _is_current(self, file_path, zip_path): + """ + Return True if the file_path is current for this zip_path + """ + timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) + if not os.path.isfile(file_path): + return False + stat = os.stat(file_path) + if stat.st_size != size or stat.st_mtime != timestamp: + return False + # check that the contents match + zip_contents = self.loader.get_data(zip_path) + with open(file_path, 'rb') as f: + file_contents = f.read() + return zip_contents == file_contents + + def _get_eager_resources(self): + if self.eagers is None: + eagers = [] + for name in ('native_libs.txt', 'eager_resources.txt'): + if self.has_metadata(name): + eagers.extend(self.get_metadata_lines(name)) + self.eagers = eagers + return self.eagers + + def _index(self): + try: + return self._dirindex + except AttributeError: + ind = {} + for path in self.zipinfo: + parts = path.split(os.sep) + while parts: + parent = os.sep.join(parts[:-1]) + if parent in ind: + ind[parent].append(parts[-1]) + break + else: + ind[parent] = [parts.pop()] + self._dirindex = ind + return ind + + def _has(self, fspath): + zip_path = self._zipinfo_name(fspath) + return zip_path in self.zipinfo or zip_path in self._index() + + def _isdir(self, fspath): + return self._zipinfo_name(fspath) in self._index() + + def _listdir(self, fspath): + return list(self._index().get(self._zipinfo_name(fspath), ())) + + def _eager_to_zip(self, resource_name): + return self._zipinfo_name(self._fn(self.egg_root, resource_name)) + + def _resource_to_zip(self, resource_name): + return self._zipinfo_name(self._fn(self.module_path, resource_name)) + + +register_loader_type(zipimport.zipimporter, ZipProvider) + + +class FileMetadata(EmptyProvider): + """Metadata handler for standalone PKG-INFO files + + Usage:: + + metadata = FileMetadata("/path/to/PKG-INFO") + + This provider rejects all data and metadata requests except for PKG-INFO, + which is treated as existing, and will be the contents of the file at + the provided location. + """ + + def __init__(self, path): + self.path = path + + def _get_metadata_path(self, name): + return self.path + + def has_metadata(self, name): + return name == 'PKG-INFO' and os.path.isfile(self.path) + + def get_metadata(self, name): + if name != 'PKG-INFO': + raise KeyError("No metadata except PKG-INFO is available") + + with io.open(self.path, encoding='utf-8', errors="replace") as f: + metadata = f.read() + self._warn_on_replacement(metadata) + return metadata + + def _warn_on_replacement(self, metadata): + replacement_char = '�' + if replacement_char in metadata: + tmpl = "{self.path} could not be properly decoded in UTF-8" + msg = tmpl.format(**locals()) + warnings.warn(msg) + + def get_metadata_lines(self, name): + return yield_lines(self.get_metadata(name)) + + +class PathMetadata(DefaultProvider): + """Metadata provider for egg directories + + Usage:: + + # Development eggs: + + egg_info = "/path/to/PackageName.egg-info" + base_dir = os.path.dirname(egg_info) + metadata = PathMetadata(base_dir, egg_info) + dist_name = os.path.splitext(os.path.basename(egg_info))[0] + dist = Distribution(basedir, project_name=dist_name, metadata=metadata) + + # Unpacked egg directories: + + egg_path = "/path/to/PackageName-ver-pyver-etc.egg" + metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO')) + dist = Distribution.from_filename(egg_path, metadata=metadata) + """ + + def __init__(self, path, egg_info): + self.module_path = path + self.egg_info = egg_info + + +class EggMetadata(ZipProvider): + """Metadata provider for .egg files""" + + def __init__(self, importer): + """Create a metadata provider from a zipimporter""" + + self.zip_pre = importer.archive + os.sep + self.loader = importer + if importer.prefix: + self.module_path = os.path.join(importer.archive, importer.prefix) + else: + self.module_path = importer.archive + self._setup_prefix() + + +_declare_state('dict', _distribution_finders={}) + + +def register_finder(importer_type, distribution_finder): + """Register `distribution_finder` to find distributions in sys.path items + + `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item + handler), and `distribution_finder` is a callable that, passed a path + item and the importer instance, yields ``Distribution`` instances found on + that path item. See ``pkg_resources.find_on_path`` for an example.""" + _distribution_finders[importer_type] = distribution_finder + + +def find_distributions(path_item, only=False): + """Yield distributions accessible via `path_item`""" + importer = get_importer(path_item) + finder = _find_adapter(_distribution_finders, importer) + return finder(importer, path_item, only) + + +def find_eggs_in_zip(importer, path_item, only=False): + """ + Find eggs in zip files; possibly multiple nested eggs. + """ + if importer.archive.endswith('.whl'): + # wheels are not supported with this finder + # they don't have PKG-INFO metadata, and won't ever contain eggs + return + metadata = EggMetadata(importer) + if metadata.has_metadata('PKG-INFO'): + yield Distribution.from_filename(path_item, metadata=metadata) + if only: + # don't yield nested distros + return + for subitem in metadata.resource_listdir(''): + if _is_egg_path(subitem): + subpath = os.path.join(path_item, subitem) + dists = find_eggs_in_zip(zipimport.zipimporter(subpath), subpath) + for dist in dists: + yield dist + elif subitem.lower().endswith(('.dist-info', '.egg-info')): + subpath = os.path.join(path_item, subitem) + submeta = EggMetadata(zipimport.zipimporter(subpath)) + submeta.egg_info = subpath + yield Distribution.from_location(path_item, subitem, submeta) + + +register_finder(zipimport.zipimporter, find_eggs_in_zip) + + +def find_nothing(importer, path_item, only=False): + return () + + +register_finder(object, find_nothing) + + +def _by_version_descending(names): + """ + Given a list of filenames, return them in descending order + by version number. + + >>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg' + >>> _by_version_descending(names) + ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'foo', 'bar'] + >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg' + >>> _by_version_descending(names) + ['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg'] + >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg' + >>> _by_version_descending(names) + ['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg'] + """ + def _by_version(name): + """ + Parse each component of the filename + """ + name, ext = os.path.splitext(name) + parts = itertools.chain(name.split('-'), [ext]) + return [packaging.version.parse(part) for part in parts] + + return sorted(names, key=_by_version, reverse=True) + + +def find_on_path(importer, path_item, only=False): + """Yield distributions accessible on a sys.path directory""" + path_item = _normalize_cached(path_item) + + if _is_unpacked_egg(path_item): + yield Distribution.from_filename( + path_item, metadata=PathMetadata( + path_item, os.path.join(path_item, 'EGG-INFO') + ) + ) + return + + entries = ( + os.path.join(path_item, child) + for child in safe_listdir(path_item) + ) + + # for performance, before sorting by version, + # screen entries for only those that will yield + # distributions + filtered = ( + entry + for entry in entries + if dist_factory(path_item, entry, only) + ) + + # scan for .egg and .egg-info in directory + path_item_entries = _by_version_descending(filtered) + for entry in path_item_entries: + fullpath = os.path.join(path_item, entry) + factory = dist_factory(path_item, entry, only) + for dist in factory(fullpath): + yield dist + + +def dist_factory(path_item, entry, only): + """Return a dist_factory for the given entry.""" + lower = entry.lower() + is_egg_info = lower.endswith('.egg-info') + is_dist_info = ( + lower.endswith('.dist-info') and + os.path.isdir(os.path.join(path_item, entry)) + ) + is_meta = is_egg_info or is_dist_info + return ( + distributions_from_metadata + if is_meta else + find_distributions + if not only and _is_egg_path(entry) else + resolve_egg_link + if not only and lower.endswith('.egg-link') else + NoDists() + ) + + +class NoDists: + """ + >>> bool(NoDists()) + False + + >>> list(NoDists()('anything')) + [] + """ + def __bool__(self): + return False + + def __call__(self, fullpath): + return iter(()) + + +def safe_listdir(path): + """ + Attempt to list contents of path, but suppress some exceptions. + """ + try: + return os.listdir(path) + except (PermissionError, NotADirectoryError): + pass + except OSError as e: + # Ignore the directory if does not exist, not a directory or + # permission denied + if e.errno not in (errno.ENOTDIR, errno.EACCES, errno.ENOENT): + raise + return () + + +def distributions_from_metadata(path): + root = os.path.dirname(path) + if os.path.isdir(path): + if len(os.listdir(path)) == 0: + # empty metadata dir; skip + return + metadata = PathMetadata(root, path) + else: + metadata = FileMetadata(path) + entry = os.path.basename(path) + yield Distribution.from_location( + root, entry, metadata, precedence=DEVELOP_DIST, + ) + + +def non_empty_lines(path): + """ + Yield non-empty lines from file at path + """ + with open(path) as f: + for line in f: + line = line.strip() + if line: + yield line + + +def resolve_egg_link(path): + """ + Given a path to an .egg-link, resolve distributions + present in the referenced path. + """ + referenced_paths = non_empty_lines(path) + resolved_paths = ( + os.path.join(os.path.dirname(path), ref) + for ref in referenced_paths + ) + dist_groups = map(find_distributions, resolved_paths) + return next(dist_groups, ()) + + +register_finder(pkgutil.ImpImporter, find_on_path) + +if hasattr(importlib_machinery, 'FileFinder'): + register_finder(importlib_machinery.FileFinder, find_on_path) + +_declare_state('dict', _namespace_handlers={}) +_declare_state('dict', _namespace_packages={}) + + +def register_namespace_handler(importer_type, namespace_handler): + """Register `namespace_handler` to declare namespace packages + + `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item + handler), and `namespace_handler` is a callable like this:: + + def namespace_handler(importer, path_entry, moduleName, module): + # return a path_entry to use for child packages + + Namespace handlers are only called if the importer object has already + agreed that it can handle the relevant path item, and they should only + return a subpath if the module __path__ does not already contain an + equivalent subpath. For an example namespace handler, see + ``pkg_resources.file_ns_handler``. + """ + _namespace_handlers[importer_type] = namespace_handler + + +def _handle_ns(packageName, path_item): + """Ensure that named package includes a subpath of path_item (if needed)""" + + importer = get_importer(path_item) + if importer is None: + return None + + # use find_spec (PEP 451) and fall-back to find_module (PEP 302) + try: + loader = importer.find_spec(packageName).loader + except AttributeError: + # capture warnings due to #1111 + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + loader = importer.find_module(packageName) + + if loader is None: + return None + module = sys.modules.get(packageName) + if module is None: + module = sys.modules[packageName] = types.ModuleType(packageName) + module.__path__ = [] + _set_parent_ns(packageName) + elif not hasattr(module, '__path__'): + raise TypeError("Not a package:", packageName) + handler = _find_adapter(_namespace_handlers, importer) + subpath = handler(importer, path_item, packageName, module) + if subpath is not None: + path = module.__path__ + path.append(subpath) + importlib.import_module(packageName) + _rebuild_mod_path(path, packageName, module) + return subpath + + +def _rebuild_mod_path(orig_path, package_name, module): + """ + Rebuild module.__path__ ensuring that all entries are ordered + corresponding to their sys.path order + """ + sys_path = [_normalize_cached(p) for p in sys.path] + + def safe_sys_path_index(entry): + """ + Workaround for #520 and #513. + """ + try: + return sys_path.index(entry) + except ValueError: + return float('inf') + + def position_in_sys_path(path): + """ + Return the ordinal of the path based on its position in sys.path + """ + path_parts = path.split(os.sep) + module_parts = package_name.count('.') + 1 + parts = path_parts[:-module_parts] + return safe_sys_path_index(_normalize_cached(os.sep.join(parts))) + + new_path = sorted(orig_path, key=position_in_sys_path) + new_path = [_normalize_cached(p) for p in new_path] + + if isinstance(module.__path__, list): + module.__path__[:] = new_path + else: + module.__path__ = new_path + + +def declare_namespace(packageName): + """Declare that package 'packageName' is a namespace package""" + + _imp.acquire_lock() + try: + if packageName in _namespace_packages: + return + + path = sys.path + parent, _, _ = packageName.rpartition('.') + + if parent: + declare_namespace(parent) + if parent not in _namespace_packages: + __import__(parent) + try: + path = sys.modules[parent].__path__ + except AttributeError as e: + raise TypeError("Not a package:", parent) from e + + # Track what packages are namespaces, so when new path items are added, + # they can be updated + _namespace_packages.setdefault(parent or None, []).append(packageName) + _namespace_packages.setdefault(packageName, []) + + for path_item in path: + # Ensure all the parent's path items are reflected in the child, + # if they apply + _handle_ns(packageName, path_item) + + finally: + _imp.release_lock() + + +def fixup_namespace_packages(path_item, parent=None): + """Ensure that previously-declared namespace packages include path_item""" + _imp.acquire_lock() + try: + for package in _namespace_packages.get(parent, ()): + subpath = _handle_ns(package, path_item) + if subpath: + fixup_namespace_packages(subpath, package) + finally: + _imp.release_lock() + + +def file_ns_handler(importer, path_item, packageName, module): + """Compute an ns-package subpath for a filesystem or zipfile importer""" + + subpath = os.path.join(path_item, packageName.split('.')[-1]) + normalized = _normalize_cached(subpath) + for item in module.__path__: + if _normalize_cached(item) == normalized: + break + else: + # Only return the path if it's not already there + return subpath + + +register_namespace_handler(pkgutil.ImpImporter, file_ns_handler) +register_namespace_handler(zipimport.zipimporter, file_ns_handler) + +if hasattr(importlib_machinery, 'FileFinder'): + register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler) + + +def null_ns_handler(importer, path_item, packageName, module): + return None + + +register_namespace_handler(object, null_ns_handler) + + +def normalize_path(filename): + """Normalize a file/dir name for comparison purposes""" + return os.path.normcase(os.path.realpath(os.path.normpath( + _cygwin_patch(filename)))) + + +def _cygwin_patch(filename): # pragma: nocover + """ + Contrary to POSIX 2008, on Cygwin, getcwd (3) contains + symlink components. Using + os.path.abspath() works around this limitation. A fix in os.getcwd() + would probably better, in Cygwin even more so, except + that this seems to be by design... + """ + return os.path.abspath(filename) if sys.platform == 'cygwin' else filename + + +def _normalize_cached(filename, _cache={}): + try: + return _cache[filename] + except KeyError: + _cache[filename] = result = normalize_path(filename) + return result + + +def _is_egg_path(path): + """ + Determine if given path appears to be an egg. + """ + return _is_zip_egg(path) or _is_unpacked_egg(path) + + +def _is_zip_egg(path): + return ( + path.lower().endswith('.egg') and + os.path.isfile(path) and + zipfile.is_zipfile(path) + ) + + +def _is_unpacked_egg(path): + """ + Determine if given path appears to be an unpacked egg. + """ + return ( + path.lower().endswith('.egg') and + os.path.isfile(os.path.join(path, 'EGG-INFO', 'PKG-INFO')) + ) + + +def _set_parent_ns(packageName): + parts = packageName.split('.') + name = parts.pop() + if parts: + parent = '.'.join(parts) + setattr(sys.modules[parent], name, sys.modules[packageName]) + + +def yield_lines(strs): + """Yield non-empty/non-comment lines of a string or sequence""" + if isinstance(strs, str): + for s in strs.splitlines(): + s = s.strip() + # skip blank lines/comments + if s and not s.startswith('#'): + yield s + else: + for ss in strs: + for s in yield_lines(ss): + yield s + + +MODULE = re.compile(r"\w+(\.\w+)*$").match +EGG_NAME = re.compile( + r""" + (?P[^-]+) ( + -(?P[^-]+) ( + -py(?P[^-]+) ( + -(?P.+) + )? + )? + )? + """, + re.VERBOSE | re.IGNORECASE, +).match + + +class EntryPoint: + """Object representing an advertised importable object""" + + def __init__(self, name, module_name, attrs=(), extras=(), dist=None): + if not MODULE(module_name): + raise ValueError("Invalid module name", module_name) + self.name = name + self.module_name = module_name + self.attrs = tuple(attrs) + self.extras = tuple(extras) + self.dist = dist + + def __str__(self): + s = "%s = %s" % (self.name, self.module_name) + if self.attrs: + s += ':' + '.'.join(self.attrs) + if self.extras: + s += ' [%s]' % ','.join(self.extras) + return s + + def __repr__(self): + return "EntryPoint.parse(%r)" % str(self) + + def load(self, require=True, *args, **kwargs): + """ + Require packages for this EntryPoint, then resolve it. + """ + if not require or args or kwargs: + warnings.warn( + "Parameters to load are deprecated. Call .resolve and " + ".require separately.", + PkgResourcesDeprecationWarning, + stacklevel=2, + ) + if require: + self.require(*args, **kwargs) + return self.resolve() + + def resolve(self): + """ + Resolve the entry point from its module and attrs. + """ + module = __import__(self.module_name, fromlist=['__name__'], level=0) + try: + return functools.reduce(getattr, self.attrs, module) + except AttributeError as exc: + raise ImportError(str(exc)) from exc + + def require(self, env=None, installer=None): + if self.extras and not self.dist: + raise UnknownExtra("Can't require() without a distribution", self) + + # Get the requirements for this entry point with all its extras and + # then resolve them. We have to pass `extras` along when resolving so + # that the working set knows what extras we want. Otherwise, for + # dist-info distributions, the working set will assume that the + # requirements for that extra are purely optional and skip over them. + reqs = self.dist.requires(self.extras) + items = working_set.resolve(reqs, env, installer, extras=self.extras) + list(map(working_set.add, items)) + + pattern = re.compile( + r'\s*' + r'(?P.+?)\s*' + r'=\s*' + r'(?P[\w.]+)\s*' + r'(:\s*(?P[\w.]+))?\s*' + r'(?P\[.*\])?\s*$' + ) + + @classmethod + def parse(cls, src, dist=None): + """Parse a single entry point from string `src` + + Entry point syntax follows the form:: + + name = some.module:some.attr [extra1, extra2] + + The entry name and module name are required, but the ``:attrs`` and + ``[extras]`` parts are optional + """ + m = cls.pattern.match(src) + if not m: + msg = "EntryPoint must be in 'name=module:attrs [extras]' format" + raise ValueError(msg, src) + res = m.groupdict() + extras = cls._parse_extras(res['extras']) + attrs = res['attr'].split('.') if res['attr'] else () + return cls(res['name'], res['module'], attrs, extras, dist) + + @classmethod + def _parse_extras(cls, extras_spec): + if not extras_spec: + return () + req = Requirement.parse('x' + extras_spec) + if req.specs: + raise ValueError() + return req.extras + + @classmethod + def parse_group(cls, group, lines, dist=None): + """Parse an entry point group""" + if not MODULE(group): + raise ValueError("Invalid group name", group) + this = {} + for line in yield_lines(lines): + ep = cls.parse(line, dist) + if ep.name in this: + raise ValueError("Duplicate entry point", group, ep.name) + this[ep.name] = ep + return this + + @classmethod + def parse_map(cls, data, dist=None): + """Parse a map of entry point groups""" + if isinstance(data, dict): + data = data.items() + else: + data = split_sections(data) + maps = {} + for group, lines in data: + if group is None: + if not lines: + continue + raise ValueError("Entry points must be listed in groups") + group = group.strip() + if group in maps: + raise ValueError("Duplicate group name", group) + maps[group] = cls.parse_group(group, lines, dist) + return maps + + +def _version_from_file(lines): + """ + Given an iterable of lines from a Metadata file, return + the value of the Version field, if present, or None otherwise. + """ + def is_version_line(line): + return line.lower().startswith('version:') + version_lines = filter(is_version_line, lines) + line = next(iter(version_lines), '') + _, _, value = line.partition(':') + return safe_version(value.strip()) or None + + +class Distribution: + """Wrap an actual or potential sys.path entry w/metadata""" + PKG_INFO = 'PKG-INFO' + + def __init__( + self, location=None, metadata=None, project_name=None, + version=None, py_version=PY_MAJOR, platform=None, + precedence=EGG_DIST): + self.project_name = safe_name(project_name or 'Unknown') + if version is not None: + self._version = safe_version(version) + self.py_version = py_version + self.platform = platform + self.location = location + self.precedence = precedence + self._provider = metadata or empty_provider + + @classmethod + def from_location(cls, location, basename, metadata=None, **kw): + project_name, version, py_version, platform = [None] * 4 + basename, ext = os.path.splitext(basename) + if ext.lower() in _distributionImpl: + cls = _distributionImpl[ext.lower()] + + match = EGG_NAME(basename) + if match: + project_name, version, py_version, platform = match.group( + 'name', 'ver', 'pyver', 'plat' + ) + return cls( + location, metadata, project_name=project_name, version=version, + py_version=py_version, platform=platform, **kw + )._reload_version() + + def _reload_version(self): + return self + + @property + def hashcmp(self): + return ( + self.parsed_version, + self.precedence, + self.key, + self.location, + self.py_version or '', + self.platform or '', + ) + + def __hash__(self): + return hash(self.hashcmp) + + def __lt__(self, other): + return self.hashcmp < other.hashcmp + + def __le__(self, other): + return self.hashcmp <= other.hashcmp + + def __gt__(self, other): + return self.hashcmp > other.hashcmp + + def __ge__(self, other): + return self.hashcmp >= other.hashcmp + + def __eq__(self, other): + if not isinstance(other, self.__class__): + # It's not a Distribution, so they are not equal + return False + return self.hashcmp == other.hashcmp + + def __ne__(self, other): + return not self == other + + # These properties have to be lazy so that we don't have to load any + # metadata until/unless it's actually needed. (i.e., some distributions + # may not know their name or version without loading PKG-INFO) + + @property + def key(self): + try: + return self._key + except AttributeError: + self._key = key = self.project_name.lower() + return key + + @property + def parsed_version(self): + if not hasattr(self, "_parsed_version"): + self._parsed_version = parse_version(self.version) + + return self._parsed_version + + def _warn_legacy_version(self): + LV = packaging.version.LegacyVersion + is_legacy = isinstance(self._parsed_version, LV) + if not is_legacy: + return + + # While an empty version is technically a legacy version and + # is not a valid PEP 440 version, it's also unlikely to + # actually come from someone and instead it is more likely that + # it comes from setuptools attempting to parse a filename and + # including it in the list. So for that we'll gate this warning + # on if the version is anything at all or not. + if not self.version: + return + + tmpl = textwrap.dedent(""" + '{project_name} ({version})' is being parsed as a legacy, + non PEP 440, + version. You may find odd behavior and sort order. + In particular it will be sorted as less than 0.0. It + is recommended to migrate to PEP 440 compatible + versions. + """).strip().replace('\n', ' ') + + warnings.warn(tmpl.format(**vars(self)), PEP440Warning) + + @property + def version(self): + try: + return self._version + except AttributeError as e: + version = self._get_version() + if version is None: + path = self._get_metadata_path_for_display(self.PKG_INFO) + msg = ( + "Missing 'Version:' header and/or {} file at path: {}" + ).format(self.PKG_INFO, path) + raise ValueError(msg, self) from e + + return version + + @property + def _dep_map(self): + """ + A map of extra to its list of (direct) requirements + for this distribution, including the null extra. + """ + try: + return self.__dep_map + except AttributeError: + self.__dep_map = self._filter_extras(self._build_dep_map()) + return self.__dep_map + + @staticmethod + def _filter_extras(dm): + """ + Given a mapping of extras to dependencies, strip off + environment markers and filter out any dependencies + not matching the markers. + """ + for extra in list(filter(None, dm)): + new_extra = extra + reqs = dm.pop(extra) + new_extra, _, marker = extra.partition(':') + fails_marker = marker and ( + invalid_marker(marker) + or not evaluate_marker(marker) + ) + if fails_marker: + reqs = [] + new_extra = safe_extra(new_extra) or None + + dm.setdefault(new_extra, []).extend(reqs) + return dm + + def _build_dep_map(self): + dm = {} + for name in 'requires.txt', 'depends.txt': + for extra, reqs in split_sections(self._get_metadata(name)): + dm.setdefault(extra, []).extend(parse_requirements(reqs)) + return dm + + def requires(self, extras=()): + """List of Requirements needed for this distro if `extras` are used""" + dm = self._dep_map + deps = [] + deps.extend(dm.get(None, ())) + for ext in extras: + try: + deps.extend(dm[safe_extra(ext)]) + except KeyError as e: + raise UnknownExtra( + "%s has no such extra feature %r" % (self, ext) + ) from e + return deps + + def _get_metadata_path_for_display(self, name): + """ + Return the path to the given metadata file, if available. + """ + try: + # We need to access _get_metadata_path() on the provider object + # directly rather than through this class's __getattr__() + # since _get_metadata_path() is marked private. + path = self._provider._get_metadata_path(name) + + # Handle exceptions e.g. in case the distribution's metadata + # provider doesn't support _get_metadata_path(). + except Exception: + return '[could not detect]' + + return path + + def _get_metadata(self, name): + if self.has_metadata(name): + for line in self.get_metadata_lines(name): + yield line + + def _get_version(self): + lines = self._get_metadata(self.PKG_INFO) + version = _version_from_file(lines) + + return version + + def activate(self, path=None, replace=False): + """Ensure distribution is importable on `path` (default=sys.path)""" + if path is None: + path = sys.path + self.insert_on(path, replace=replace) + if path is sys.path: + fixup_namespace_packages(self.location) + for pkg in self._get_metadata('namespace_packages.txt'): + if pkg in sys.modules: + declare_namespace(pkg) + + def egg_name(self): + """Return what this distribution's standard .egg filename should be""" + filename = "%s-%s-py%s" % ( + to_filename(self.project_name), to_filename(self.version), + self.py_version or PY_MAJOR + ) + + if self.platform: + filename += '-' + self.platform + return filename + + def __repr__(self): + if self.location: + return "%s (%s)" % (self, self.location) + else: + return str(self) + + def __str__(self): + try: + version = getattr(self, 'version', None) + except ValueError: + version = None + version = version or "[unknown version]" + return "%s %s" % (self.project_name, version) + + def __getattr__(self, attr): + """Delegate all unrecognized public attributes to .metadata provider""" + if attr.startswith('_'): + raise AttributeError(attr) + return getattr(self._provider, attr) + + def __dir__(self): + return list( + set(super(Distribution, self).__dir__()) + | set( + attr for attr in self._provider.__dir__() + if not attr.startswith('_') + ) + ) + + @classmethod + def from_filename(cls, filename, metadata=None, **kw): + return cls.from_location( + _normalize_cached(filename), os.path.basename(filename), metadata, + **kw + ) + + def as_requirement(self): + """Return a ``Requirement`` that matches this distribution exactly""" + if isinstance(self.parsed_version, packaging.version.Version): + spec = "%s==%s" % (self.project_name, self.parsed_version) + else: + spec = "%s===%s" % (self.project_name, self.parsed_version) + + return Requirement.parse(spec) + + def load_entry_point(self, group, name): + """Return the `name` entry point of `group` or raise ImportError""" + ep = self.get_entry_info(group, name) + if ep is None: + raise ImportError("Entry point %r not found" % ((group, name),)) + return ep.load() + + def get_entry_map(self, group=None): + """Return the entry point map for `group`, or the full entry map""" + try: + ep_map = self._ep_map + except AttributeError: + ep_map = self._ep_map = EntryPoint.parse_map( + self._get_metadata('entry_points.txt'), self + ) + if group is not None: + return ep_map.get(group, {}) + return ep_map + + def get_entry_info(self, group, name): + """Return the EntryPoint object for `group`+`name`, or ``None``""" + return self.get_entry_map(group).get(name) + + # FIXME: 'Distribution.insert_on' is too complex (13) + def insert_on(self, path, loc=None, replace=False): # noqa: C901 + """Ensure self.location is on path + + If replace=False (default): + - If location is already in path anywhere, do nothing. + - Else: + - If it's an egg and its parent directory is on path, + insert just ahead of the parent. + - Else: add to the end of path. + If replace=True: + - If location is already on path anywhere (not eggs) + or higher priority than its parent (eggs) + do nothing. + - Else: + - If it's an egg and its parent directory is on path, + insert just ahead of the parent, + removing any lower-priority entries. + - Else: add it to the front of path. + """ + + loc = loc or self.location + if not loc: + return + + nloc = _normalize_cached(loc) + bdir = os.path.dirname(nloc) + npath = [(p and _normalize_cached(p) or p) for p in path] + + for p, item in enumerate(npath): + if item == nloc: + if replace: + break + else: + # don't modify path (even removing duplicates) if + # found and not replace + return + elif item == bdir and self.precedence == EGG_DIST: + # if it's an .egg, give it precedence over its directory + # UNLESS it's already been added to sys.path and replace=False + if (not replace) and nloc in npath[p:]: + return + if path is sys.path: + self.check_version_conflict() + path.insert(p, loc) + npath.insert(p, nloc) + break + else: + if path is sys.path: + self.check_version_conflict() + if replace: + path.insert(0, loc) + else: + path.append(loc) + return + + # p is the spot where we found or inserted loc; now remove duplicates + while True: + try: + np = npath.index(nloc, p + 1) + except ValueError: + break + else: + del npath[np], path[np] + # ha! + p = np + + return + + def check_version_conflict(self): + if self.key == 'setuptools': + # ignore the inevitable setuptools self-conflicts :( + return + + nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt')) + loc = normalize_path(self.location) + for modname in self._get_metadata('top_level.txt'): + if (modname not in sys.modules or modname in nsp + or modname in _namespace_packages): + continue + if modname in ('pkg_resources', 'setuptools', 'site'): + continue + fn = getattr(sys.modules[modname], '__file__', None) + if fn and (normalize_path(fn).startswith(loc) or + fn.startswith(self.location)): + continue + issue_warning( + "Module %s was already imported from %s, but %s is being added" + " to sys.path" % (modname, fn, self.location), + ) + + def has_version(self): + try: + self.version + except ValueError: + issue_warning("Unbuilt egg for " + repr(self)) + return False + return True + + def clone(self, **kw): + """Copy this distribution, substituting in any changed keyword args""" + names = 'project_name version py_version platform location precedence' + for attr in names.split(): + kw.setdefault(attr, getattr(self, attr, None)) + kw.setdefault('metadata', self._provider) + return self.__class__(**kw) + + @property + def extras(self): + return [dep for dep in self._dep_map if dep] + + +class EggInfoDistribution(Distribution): + def _reload_version(self): + """ + Packages installed by distutils (e.g. numpy or scipy), + which uses an old safe_version, and so + their version numbers can get mangled when + converted to filenames (e.g., 1.11.0.dev0+2329eae to + 1.11.0.dev0_2329eae). These distributions will not be + parsed properly + downstream by Distribution and safe_version, so + take an extra step and try to get the version number from + the metadata file itself instead of the filename. + """ + md_version = self._get_version() + if md_version: + self._version = md_version + return self + + +class DistInfoDistribution(Distribution): + """ + Wrap an actual or potential sys.path entry + w/metadata, .dist-info style. + """ + PKG_INFO = 'METADATA' + EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])") + + @property + def _parsed_pkg_info(self): + """Parse and cache metadata""" + try: + return self._pkg_info + except AttributeError: + metadata = self.get_metadata(self.PKG_INFO) + self._pkg_info = email.parser.Parser().parsestr(metadata) + return self._pkg_info + + @property + def _dep_map(self): + try: + return self.__dep_map + except AttributeError: + self.__dep_map = self._compute_dependencies() + return self.__dep_map + + def _compute_dependencies(self): + """Recompute this distribution's dependencies.""" + dm = self.__dep_map = {None: []} + + reqs = [] + # Including any condition expressions + for req in self._parsed_pkg_info.get_all('Requires-Dist') or []: + reqs.extend(parse_requirements(req)) + + def reqs_for_extra(extra): + for req in reqs: + if not req.marker or req.marker.evaluate({'extra': extra}): + yield req + + common = frozenset(reqs_for_extra(None)) + dm[None].extend(common) + + for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []: + s_extra = safe_extra(extra.strip()) + dm[s_extra] = list(frozenset(reqs_for_extra(extra)) - common) + + return dm + + +_distributionImpl = { + '.egg': Distribution, + '.egg-info': EggInfoDistribution, + '.dist-info': DistInfoDistribution, +} + + +def issue_warning(*args, **kw): + level = 1 + g = globals() + try: + # find the first stack frame that is *not* code in + # the pkg_resources module, to use for the warning + while sys._getframe(level).f_globals is g: + level += 1 + except ValueError: + pass + warnings.warn(stacklevel=level + 1, *args, **kw) + + +def parse_requirements(strs): + """Yield ``Requirement`` objects for each specification in `strs` + + `strs` must be a string, or a (possibly-nested) iterable thereof. + """ + # create a steppable iterator, so we can handle \-continuations + lines = iter(yield_lines(strs)) + + for line in lines: + # Drop comments -- a hash without a space may be in a URL. + if ' #' in line: + line = line[:line.find(' #')] + # If there is a line continuation, drop it, and append the next line. + if line.endswith('\\'): + line = line[:-2].strip() + try: + line += next(lines) + except StopIteration: + return + yield Requirement(line) + + +class RequirementParseError(packaging.requirements.InvalidRequirement): + "Compatibility wrapper for InvalidRequirement" + + +class Requirement(packaging.requirements.Requirement): + def __init__(self, requirement_string): + """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!""" + super(Requirement, self).__init__(requirement_string) + self.unsafe_name = self.name + project_name = safe_name(self.name) + self.project_name, self.key = project_name, project_name.lower() + self.specs = [ + (spec.operator, spec.version) for spec in self.specifier] + self.extras = tuple(map(safe_extra, self.extras)) + self.hashCmp = ( + self.key, + self.url, + self.specifier, + frozenset(self.extras), + str(self.marker) if self.marker else None, + ) + self.__hash = hash(self.hashCmp) + + def __eq__(self, other): + return ( + isinstance(other, Requirement) and + self.hashCmp == other.hashCmp + ) + + def __ne__(self, other): + return not self == other + + def __contains__(self, item): + if isinstance(item, Distribution): + if item.key != self.key: + return False + + item = item.version + + # Allow prereleases always in order to match the previous behavior of + # this method. In the future this should be smarter and follow PEP 440 + # more accurately. + return self.specifier.contains(item, prereleases=True) + + def __hash__(self): + return self.__hash + + def __repr__(self): + return "Requirement.parse(%r)" % str(self) + + @staticmethod + def parse(s): + req, = parse_requirements(s) + return req + + +def _always_object(classes): + """ + Ensure object appears in the mro even + for old-style classes. + """ + if object not in classes: + return classes + (object,) + return classes + + +def _find_adapter(registry, ob): + """Return an adapter factory for `ob` from `registry`""" + types = _always_object(inspect.getmro(getattr(ob, '__class__', type(ob)))) + for t in types: + if t in registry: + return registry[t] + + +def ensure_directory(path): + """Ensure that the parent directory of `path` exists""" + dirname = os.path.dirname(path) + os.makedirs(dirname, exist_ok=True) + + +def _bypass_ensure_directory(path): + """Sandbox-bypassing version of ensure_directory()""" + if not WRITE_SUPPORT: + raise IOError('"os.mkdir" not supported on this platform.') + dirname, filename = split(path) + if dirname and filename and not isdir(dirname): + _bypass_ensure_directory(dirname) + try: + mkdir(dirname, 0o755) + except FileExistsError: + pass + + +def split_sections(s): + """Split a string or iterable thereof into (section, content) pairs + + Each ``section`` is a stripped version of the section header ("[section]") + and each ``content`` is a list of stripped lines excluding blank lines and + comment-only lines. If there are any such lines before the first section + header, they're returned in a first ``section`` of ``None``. + """ + section = None + content = [] + for line in yield_lines(s): + if line.startswith("["): + if line.endswith("]"): + if section or content: + yield section, content + section = line[1:-1].strip() + content = [] + else: + raise ValueError("Invalid section heading", line) + else: + content.append(line) + + # wrap up last segment + yield section, content + + +def _mkstemp(*args, **kw): + old_open = os.open + try: + # temporarily bypass sandboxing + os.open = os_open + return tempfile.mkstemp(*args, **kw) + finally: + # and then put it back + os.open = old_open + + +# Silence the PEP440Warning by default, so that end users don't get hit by it +# randomly just because they use pkg_resources. We want to append the rule +# because we want earlier uses of filterwarnings to take precedence over this +# one. +warnings.filterwarnings("ignore", category=PEP440Warning, append=True) + + +# from jaraco.functools 1.3 +def _call_aside(f, *args, **kwargs): + f(*args, **kwargs) + return f + + +@_call_aside +def _initialize(g=globals()): + "Set up global resource manager (deliberately not state-saved)" + manager = ResourceManager() + g['_manager'] = manager + g.update( + (name, getattr(manager, name)) + for name in dir(manager) + if not name.startswith('_') + ) + + +@_call_aside +def _initialize_master_working_set(): + """ + Prepare the master working set and make the ``require()`` + API available. + + This function has explicit effects on the global state + of pkg_resources. It is intended to be invoked once at + the initialization of this module. + + Invocation by other packages is unsupported and done + at their own risk. + """ + working_set = WorkingSet._build_master() + _declare_state('object', working_set=working_set) + + require = working_set.require + iter_entry_points = working_set.iter_entry_points + add_activation_listener = working_set.subscribe + run_script = working_set.run_script + # backward compatibility + run_main = run_script + # Activate all distributions already on sys.path with replace=False and + # ensure that all distributions added to the working set in the future + # (e.g. by calling ``require()``) will get activated as well, + # with higher priority (replace=True). + tuple( + dist.activate(replace=False) + for dist in working_set + ) + add_activation_listener( + lambda dist: dist.activate(replace=True), + existing=False, + ) + working_set.entries = [] + # match order + list(map(working_set.add_entry, sys.path)) + globals().update(locals()) + + +class PkgResourcesDeprecationWarning(Warning): + """ + Base class for warning about deprecations in ``pkg_resources`` + + This class is not derived from ``DeprecationWarning``, and as such is + visible by default. + """ diff --git a/venv/Lib/site-packages/pkg_resources/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pkg_resources/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..95607a7c Binary files /dev/null and b/venv/Lib/site-packages/pkg_resources/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pkg_resources/_vendor/__init__.py b/venv/Lib/site-packages/pkg_resources/_vendor/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/pkg_resources/_vendor/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pkg_resources/_vendor/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..595ca7c2 Binary files /dev/null and b/venv/Lib/site-packages/pkg_resources/_vendor/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pkg_resources/_vendor/__pycache__/appdirs.cpython-36.pyc b/venv/Lib/site-packages/pkg_resources/_vendor/__pycache__/appdirs.cpython-36.pyc new file mode 100644 index 00000000..422b73de Binary files /dev/null and b/venv/Lib/site-packages/pkg_resources/_vendor/__pycache__/appdirs.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pkg_resources/_vendor/__pycache__/pyparsing.cpython-36.pyc b/venv/Lib/site-packages/pkg_resources/_vendor/__pycache__/pyparsing.cpython-36.pyc new file mode 100644 index 00000000..db9d411b Binary files /dev/null and b/venv/Lib/site-packages/pkg_resources/_vendor/__pycache__/pyparsing.cpython-36.pyc differ diff --git a/env/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.py b/venv/Lib/site-packages/pkg_resources/_vendor/appdirs.py similarity index 100% rename from env/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.py rename to venv/Lib/site-packages/pkg_resources/_vendor/appdirs.py diff --git a/venv/Lib/site-packages/pkg_resources/_vendor/packaging/__about__.py b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/__about__.py new file mode 100644 index 00000000..4d998578 --- /dev/null +++ b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/__about__.py @@ -0,0 +1,27 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +__all__ = [ + "__title__", + "__summary__", + "__uri__", + "__version__", + "__author__", + "__email__", + "__license__", + "__copyright__", +] + +__title__ = "packaging" +__summary__ = "Core utilities for Python packages" +__uri__ = "https://github.com/pypa/packaging" + +__version__ = "20.4" + +__author__ = "Donald Stufft and individual contributors" +__email__ = "donald@stufft.io" + +__license__ = "BSD-2-Clause or Apache-2.0" +__copyright__ = "Copyright 2014-2019 %s" % __author__ diff --git a/venv/Lib/site-packages/pkg_resources/_vendor/packaging/__init__.py b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/__init__.py new file mode 100644 index 00000000..a0cf67df --- /dev/null +++ b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/__init__.py @@ -0,0 +1,26 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +from .__about__ import ( + __author__, + __copyright__, + __email__, + __license__, + __summary__, + __title__, + __uri__, + __version__, +) + +__all__ = [ + "__title__", + "__summary__", + "__uri__", + "__version__", + "__author__", + "__email__", + "__license__", + "__copyright__", +] diff --git a/venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/__about__.cpython-36.pyc b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/__about__.cpython-36.pyc new file mode 100644 index 00000000..f52fdce0 Binary files /dev/null and b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/__about__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..50d2dead Binary files /dev/null and b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/_compat.cpython-36.pyc b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/_compat.cpython-36.pyc new file mode 100644 index 00000000..1115b62f Binary files /dev/null and b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/_compat.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/_structures.cpython-36.pyc b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/_structures.cpython-36.pyc new file mode 100644 index 00000000..1d8f10aa Binary files /dev/null and b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/_structures.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/_typing.cpython-36.pyc b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/_typing.cpython-36.pyc new file mode 100644 index 00000000..0c246667 Binary files /dev/null and b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/_typing.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/markers.cpython-36.pyc b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/markers.cpython-36.pyc new file mode 100644 index 00000000..fdc8cdf9 Binary files /dev/null and b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/markers.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/requirements.cpython-36.pyc b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/requirements.cpython-36.pyc new file mode 100644 index 00000000..14b4a92f Binary files /dev/null and b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/requirements.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/specifiers.cpython-36.pyc b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/specifiers.cpython-36.pyc new file mode 100644 index 00000000..e1482cb5 Binary files /dev/null and b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/specifiers.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/tags.cpython-36.pyc b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/tags.cpython-36.pyc new file mode 100644 index 00000000..c50374f8 Binary files /dev/null and b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/tags.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/utils.cpython-36.pyc b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/utils.cpython-36.pyc new file mode 100644 index 00000000..dccde88b Binary files /dev/null and b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/utils.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/version.cpython-36.pyc b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/version.cpython-36.pyc new file mode 100644 index 00000000..975924c7 Binary files /dev/null and b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/__pycache__/version.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pkg_resources/_vendor/packaging/_compat.py b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/_compat.py new file mode 100644 index 00000000..e54bd4ed --- /dev/null +++ b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/_compat.py @@ -0,0 +1,38 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import sys + +from ._typing import TYPE_CHECKING + +if TYPE_CHECKING: # pragma: no cover + from typing import Any, Dict, Tuple, Type + + +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 + +# flake8: noqa + +if PY3: + string_types = (str,) +else: + string_types = (basestring,) + + +def with_metaclass(meta, *bases): + # type: (Type[Any], Tuple[Type[Any], ...]) -> Any + """ + Create a base class with a metaclass. + """ + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(meta): # type: ignore + def __new__(cls, name, this_bases, d): + # type: (Type[Any], str, Tuple[Any], Dict[Any, Any]) -> Any + return meta(name, bases, d) + + return type.__new__(metaclass, "temporary_class", (), {}) diff --git a/venv/Lib/site-packages/pkg_resources/_vendor/packaging/_structures.py b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/_structures.py new file mode 100644 index 00000000..800d5c55 --- /dev/null +++ b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/_structures.py @@ -0,0 +1,86 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + + +class InfinityType(object): + def __repr__(self): + # type: () -> str + return "Infinity" + + def __hash__(self): + # type: () -> int + return hash(repr(self)) + + def __lt__(self, other): + # type: (object) -> bool + return False + + def __le__(self, other): + # type: (object) -> bool + return False + + def __eq__(self, other): + # type: (object) -> bool + return isinstance(other, self.__class__) + + def __ne__(self, other): + # type: (object) -> bool + return not isinstance(other, self.__class__) + + def __gt__(self, other): + # type: (object) -> bool + return True + + def __ge__(self, other): + # type: (object) -> bool + return True + + def __neg__(self): + # type: (object) -> NegativeInfinityType + return NegativeInfinity + + +Infinity = InfinityType() + + +class NegativeInfinityType(object): + def __repr__(self): + # type: () -> str + return "-Infinity" + + def __hash__(self): + # type: () -> int + return hash(repr(self)) + + def __lt__(self, other): + # type: (object) -> bool + return True + + def __le__(self, other): + # type: (object) -> bool + return True + + def __eq__(self, other): + # type: (object) -> bool + return isinstance(other, self.__class__) + + def __ne__(self, other): + # type: (object) -> bool + return not isinstance(other, self.__class__) + + def __gt__(self, other): + # type: (object) -> bool + return False + + def __ge__(self, other): + # type: (object) -> bool + return False + + def __neg__(self): + # type: (object) -> InfinityType + return Infinity + + +NegativeInfinity = NegativeInfinityType() diff --git a/venv/Lib/site-packages/pkg_resources/_vendor/packaging/_typing.py b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/_typing.py new file mode 100644 index 00000000..77a8b918 --- /dev/null +++ b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/_typing.py @@ -0,0 +1,48 @@ +"""For neatly implementing static typing in packaging. + +`mypy` - the static type analysis tool we use - uses the `typing` module, which +provides core functionality fundamental to mypy's functioning. + +Generally, `typing` would be imported at runtime and used in that fashion - +it acts as a no-op at runtime and does not have any run-time overhead by +design. + +As it turns out, `typing` is not vendorable - it uses separate sources for +Python 2/Python 3. Thus, this codebase can not expect it to be present. +To work around this, mypy allows the typing import to be behind a False-y +optional to prevent it from running at runtime and type-comments can be used +to remove the need for the types to be accessible directly during runtime. + +This module provides the False-y guard in a nicely named fashion so that a +curious maintainer can reach here to read this. + +In packaging, all static-typing related imports should be guarded as follows: + + from packaging._typing import TYPE_CHECKING + + if TYPE_CHECKING: + from typing import ... + +Ref: https://github.com/python/mypy/issues/3216 +""" + +__all__ = ["TYPE_CHECKING", "cast"] + +# The TYPE_CHECKING constant defined by the typing module is False at runtime +# but True while type checking. +if False: # pragma: no cover + from typing import TYPE_CHECKING +else: + TYPE_CHECKING = False + +# typing's cast syntax requires calling typing.cast at runtime, but we don't +# want to import typing at runtime. Here, we inform the type checkers that +# we're importing `typing.cast` as `cast` and re-implement typing.cast's +# runtime behavior in a block that is ignored by type checkers. +if TYPE_CHECKING: # pragma: no cover + # not executed at runtime + from typing import cast +else: + # executed at runtime + def cast(type_, value): # noqa + return value diff --git a/venv/Lib/site-packages/pkg_resources/_vendor/packaging/markers.py b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/markers.py new file mode 100644 index 00000000..fd1559c1 --- /dev/null +++ b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/markers.py @@ -0,0 +1,328 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import operator +import os +import platform +import sys + +from pkg_resources.extern.pyparsing import ParseException, ParseResults, stringStart, stringEnd +from pkg_resources.extern.pyparsing import ZeroOrMore, Group, Forward, QuotedString +from pkg_resources.extern.pyparsing import Literal as L # noqa + +from ._compat import string_types +from ._typing import TYPE_CHECKING +from .specifiers import Specifier, InvalidSpecifier + +if TYPE_CHECKING: # pragma: no cover + from typing import Any, Callable, Dict, List, Optional, Tuple, Union + + Operator = Callable[[str, str], bool] + + +__all__ = [ + "InvalidMarker", + "UndefinedComparison", + "UndefinedEnvironmentName", + "Marker", + "default_environment", +] + + +class InvalidMarker(ValueError): + """ + An invalid marker was found, users should refer to PEP 508. + """ + + +class UndefinedComparison(ValueError): + """ + An invalid operation was attempted on a value that doesn't support it. + """ + + +class UndefinedEnvironmentName(ValueError): + """ + A name was attempted to be used that does not exist inside of the + environment. + """ + + +class Node(object): + def __init__(self, value): + # type: (Any) -> None + self.value = value + + def __str__(self): + # type: () -> str + return str(self.value) + + def __repr__(self): + # type: () -> str + return "<{0}({1!r})>".format(self.__class__.__name__, str(self)) + + def serialize(self): + # type: () -> str + raise NotImplementedError + + +class Variable(Node): + def serialize(self): + # type: () -> str + return str(self) + + +class Value(Node): + def serialize(self): + # type: () -> str + return '"{0}"'.format(self) + + +class Op(Node): + def serialize(self): + # type: () -> str + return str(self) + + +VARIABLE = ( + L("implementation_version") + | L("platform_python_implementation") + | L("implementation_name") + | L("python_full_version") + | L("platform_release") + | L("platform_version") + | L("platform_machine") + | L("platform_system") + | L("python_version") + | L("sys_platform") + | L("os_name") + | L("os.name") # PEP-345 + | L("sys.platform") # PEP-345 + | L("platform.version") # PEP-345 + | L("platform.machine") # PEP-345 + | L("platform.python_implementation") # PEP-345 + | L("python_implementation") # undocumented setuptools legacy + | L("extra") # PEP-508 +) +ALIASES = { + "os.name": "os_name", + "sys.platform": "sys_platform", + "platform.version": "platform_version", + "platform.machine": "platform_machine", + "platform.python_implementation": "platform_python_implementation", + "python_implementation": "platform_python_implementation", +} +VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0]))) + +VERSION_CMP = ( + L("===") | L("==") | L(">=") | L("<=") | L("!=") | L("~=") | L(">") | L("<") +) + +MARKER_OP = VERSION_CMP | L("not in") | L("in") +MARKER_OP.setParseAction(lambda s, l, t: Op(t[0])) + +MARKER_VALUE = QuotedString("'") | QuotedString('"') +MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0])) + +BOOLOP = L("and") | L("or") + +MARKER_VAR = VARIABLE | MARKER_VALUE + +MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR) +MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0])) + +LPAREN = L("(").suppress() +RPAREN = L(")").suppress() + +MARKER_EXPR = Forward() +MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN) +MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR) + +MARKER = stringStart + MARKER_EXPR + stringEnd + + +def _coerce_parse_result(results): + # type: (Union[ParseResults, List[Any]]) -> List[Any] + if isinstance(results, ParseResults): + return [_coerce_parse_result(i) for i in results] + else: + return results + + +def _format_marker(marker, first=True): + # type: (Union[List[str], Tuple[Node, ...], str], Optional[bool]) -> str + + assert isinstance(marker, (list, tuple, string_types)) + + # Sometimes we have a structure like [[...]] which is a single item list + # where the single item is itself it's own list. In that case we want skip + # the rest of this function so that we don't get extraneous () on the + # outside. + if ( + isinstance(marker, list) + and len(marker) == 1 + and isinstance(marker[0], (list, tuple)) + ): + return _format_marker(marker[0]) + + if isinstance(marker, list): + inner = (_format_marker(m, first=False) for m in marker) + if first: + return " ".join(inner) + else: + return "(" + " ".join(inner) + ")" + elif isinstance(marker, tuple): + return " ".join([m.serialize() for m in marker]) + else: + return marker + + +_operators = { + "in": lambda lhs, rhs: lhs in rhs, + "not in": lambda lhs, rhs: lhs not in rhs, + "<": operator.lt, + "<=": operator.le, + "==": operator.eq, + "!=": operator.ne, + ">=": operator.ge, + ">": operator.gt, +} # type: Dict[str, Operator] + + +def _eval_op(lhs, op, rhs): + # type: (str, Op, str) -> bool + try: + spec = Specifier("".join([op.serialize(), rhs])) + except InvalidSpecifier: + pass + else: + return spec.contains(lhs) + + oper = _operators.get(op.serialize()) # type: Optional[Operator] + if oper is None: + raise UndefinedComparison( + "Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs) + ) + + return oper(lhs, rhs) + + +class Undefined(object): + pass + + +_undefined = Undefined() + + +def _get_env(environment, name): + # type: (Dict[str, str], str) -> str + value = environment.get(name, _undefined) # type: Union[str, Undefined] + + if isinstance(value, Undefined): + raise UndefinedEnvironmentName( + "{0!r} does not exist in evaluation environment.".format(name) + ) + + return value + + +def _evaluate_markers(markers, environment): + # type: (List[Any], Dict[str, str]) -> bool + groups = [[]] # type: List[List[bool]] + + for marker in markers: + assert isinstance(marker, (list, tuple, string_types)) + + if isinstance(marker, list): + groups[-1].append(_evaluate_markers(marker, environment)) + elif isinstance(marker, tuple): + lhs, op, rhs = marker + + if isinstance(lhs, Variable): + lhs_value = _get_env(environment, lhs.value) + rhs_value = rhs.value + else: + lhs_value = lhs.value + rhs_value = _get_env(environment, rhs.value) + + groups[-1].append(_eval_op(lhs_value, op, rhs_value)) + else: + assert marker in ["and", "or"] + if marker == "or": + groups.append([]) + + return any(all(item) for item in groups) + + +def format_full_version(info): + # type: (sys._version_info) -> str + version = "{0.major}.{0.minor}.{0.micro}".format(info) + kind = info.releaselevel + if kind != "final": + version += kind[0] + str(info.serial) + return version + + +def default_environment(): + # type: () -> Dict[str, str] + if hasattr(sys, "implementation"): + # Ignoring the `sys.implementation` reference for type checking due to + # mypy not liking that the attribute doesn't exist in Python 2.7 when + # run with the `--py27` flag. + iver = format_full_version(sys.implementation.version) # type: ignore + implementation_name = sys.implementation.name # type: ignore + else: + iver = "0" + implementation_name = "" + + return { + "implementation_name": implementation_name, + "implementation_version": iver, + "os_name": os.name, + "platform_machine": platform.machine(), + "platform_release": platform.release(), + "platform_system": platform.system(), + "platform_version": platform.version(), + "python_full_version": platform.python_version(), + "platform_python_implementation": platform.python_implementation(), + "python_version": ".".join(platform.python_version_tuple()[:2]), + "sys_platform": sys.platform, + } + + +class Marker(object): + def __init__(self, marker): + # type: (str) -> None + try: + self._markers = _coerce_parse_result(MARKER.parseString(marker)) + except ParseException as e: + err_str = "Invalid marker: {0!r}, parse error at {1!r}".format( + marker, marker[e.loc : e.loc + 8] + ) + raise InvalidMarker(err_str) + + def __str__(self): + # type: () -> str + return _format_marker(self._markers) + + def __repr__(self): + # type: () -> str + return "".format(str(self)) + + def evaluate(self, environment=None): + # type: (Optional[Dict[str, str]]) -> bool + """Evaluate a marker. + + Return the boolean from evaluating the given marker against the + environment. environment is an optional argument to override all or + part of the determined environment. + + The environment is determined from the current Python process. + """ + current_environment = default_environment() + if environment is not None: + current_environment.update(environment) + + return _evaluate_markers(self._markers, current_environment) diff --git a/venv/Lib/site-packages/pkg_resources/_vendor/packaging/requirements.py b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/requirements.py new file mode 100644 index 00000000..9495a1df --- /dev/null +++ b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/requirements.py @@ -0,0 +1,145 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import string +import re + +from pkg_resources.extern.pyparsing import stringStart, stringEnd, originalTextFor, ParseException +from pkg_resources.extern.pyparsing import ZeroOrMore, Word, Optional, Regex, Combine +from pkg_resources.extern.pyparsing import Literal as L # noqa +from urllib import parse as urlparse + +from ._typing import TYPE_CHECKING +from .markers import MARKER_EXPR, Marker +from .specifiers import LegacySpecifier, Specifier, SpecifierSet + +if TYPE_CHECKING: # pragma: no cover + from typing import List + + +class InvalidRequirement(ValueError): + """ + An invalid requirement was found, users should refer to PEP 508. + """ + + +ALPHANUM = Word(string.ascii_letters + string.digits) + +LBRACKET = L("[").suppress() +RBRACKET = L("]").suppress() +LPAREN = L("(").suppress() +RPAREN = L(")").suppress() +COMMA = L(",").suppress() +SEMICOLON = L(";").suppress() +AT = L("@").suppress() + +PUNCTUATION = Word("-_.") +IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM) +IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END)) + +NAME = IDENTIFIER("name") +EXTRA = IDENTIFIER + +URI = Regex(r"[^ ]+")("url") +URL = AT + URI + +EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA) +EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras") + +VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE) +VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE) + +VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY +VERSION_MANY = Combine( + VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), joinString=",", adjacent=False +)("_raw_spec") +_VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY)) +_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or "") + +VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier") +VERSION_SPEC.setParseAction(lambda s, l, t: t[1]) + +MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker") +MARKER_EXPR.setParseAction( + lambda s, l, t: Marker(s[t._original_start : t._original_end]) +) +MARKER_SEPARATOR = SEMICOLON +MARKER = MARKER_SEPARATOR + MARKER_EXPR + +VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER) +URL_AND_MARKER = URL + Optional(MARKER) + +NAMED_REQUIREMENT = NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER) + +REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd +# pkg_resources.extern.pyparsing isn't thread safe during initialization, so we do it eagerly, see +# issue #104 +REQUIREMENT.parseString("x[]") + + +class Requirement(object): + """Parse a requirement. + + Parse a given requirement string into its parts, such as name, specifier, + URL, and extras. Raises InvalidRequirement on a badly-formed requirement + string. + """ + + # TODO: Can we test whether something is contained within a requirement? + # If so how do we do that? Do we need to test against the _name_ of + # the thing as well as the version? What about the markers? + # TODO: Can we normalize the name and extra name? + + def __init__(self, requirement_string): + # type: (str) -> None + try: + req = REQUIREMENT.parseString(requirement_string) + except ParseException as e: + raise InvalidRequirement( + 'Parse error at "{0!r}": {1}'.format( + requirement_string[e.loc : e.loc + 8], e.msg + ) + ) + + self.name = req.name + if req.url: + parsed_url = urlparse.urlparse(req.url) + if parsed_url.scheme == "file": + if urlparse.urlunparse(parsed_url) != req.url: + raise InvalidRequirement("Invalid URL given") + elif not (parsed_url.scheme and parsed_url.netloc) or ( + not parsed_url.scheme and not parsed_url.netloc + ): + raise InvalidRequirement("Invalid URL: {0}".format(req.url)) + self.url = req.url + else: + self.url = None + self.extras = set(req.extras.asList() if req.extras else []) + self.specifier = SpecifierSet(req.specifier) + self.marker = req.marker if req.marker else None + + def __str__(self): + # type: () -> str + parts = [self.name] # type: List[str] + + if self.extras: + parts.append("[{0}]".format(",".join(sorted(self.extras)))) + + if self.specifier: + parts.append(str(self.specifier)) + + if self.url: + parts.append("@ {0}".format(self.url)) + if self.marker: + parts.append(" ") + + if self.marker: + parts.append("; {0}".format(self.marker)) + + return "".join(parts) + + def __repr__(self): + # type: () -> str + return "".format(str(self)) diff --git a/venv/Lib/site-packages/pkg_resources/_vendor/packaging/specifiers.py b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/specifiers.py new file mode 100644 index 00000000..fe09bb1d --- /dev/null +++ b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/specifiers.py @@ -0,0 +1,863 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import abc +import functools +import itertools +import re + +from ._compat import string_types, with_metaclass +from ._typing import TYPE_CHECKING +from .utils import canonicalize_version +from .version import Version, LegacyVersion, parse + +if TYPE_CHECKING: # pragma: no cover + from typing import ( + List, + Dict, + Union, + Iterable, + Iterator, + Optional, + Callable, + Tuple, + FrozenSet, + ) + + ParsedVersion = Union[Version, LegacyVersion] + UnparsedVersion = Union[Version, LegacyVersion, str] + CallableOperator = Callable[[ParsedVersion, str], bool] + + +class InvalidSpecifier(ValueError): + """ + An invalid specifier was found, users should refer to PEP 440. + """ + + +class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): # type: ignore + @abc.abstractmethod + def __str__(self): + # type: () -> str + """ + Returns the str representation of this Specifier like object. This + should be representative of the Specifier itself. + """ + + @abc.abstractmethod + def __hash__(self): + # type: () -> int + """ + Returns a hash value for this Specifier like object. + """ + + @abc.abstractmethod + def __eq__(self, other): + # type: (object) -> bool + """ + Returns a boolean representing whether or not the two Specifier like + objects are equal. + """ + + @abc.abstractmethod + def __ne__(self, other): + # type: (object) -> bool + """ + Returns a boolean representing whether or not the two Specifier like + objects are not equal. + """ + + @abc.abstractproperty + def prereleases(self): + # type: () -> Optional[bool] + """ + Returns whether or not pre-releases as a whole are allowed by this + specifier. + """ + + @prereleases.setter + def prereleases(self, value): + # type: (bool) -> None + """ + Sets whether or not pre-releases as a whole are allowed by this + specifier. + """ + + @abc.abstractmethod + def contains(self, item, prereleases=None): + # type: (str, Optional[bool]) -> bool + """ + Determines if the given item is contained within this specifier. + """ + + @abc.abstractmethod + def filter(self, iterable, prereleases=None): + # type: (Iterable[UnparsedVersion], Optional[bool]) -> Iterable[UnparsedVersion] + """ + Takes an iterable of items and filters them so that only items which + are contained within this specifier are allowed in it. + """ + + +class _IndividualSpecifier(BaseSpecifier): + + _operators = {} # type: Dict[str, str] + + def __init__(self, spec="", prereleases=None): + # type: (str, Optional[bool]) -> None + match = self._regex.search(spec) + if not match: + raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec)) + + self._spec = ( + match.group("operator").strip(), + match.group("version").strip(), + ) # type: Tuple[str, str] + + # Store whether or not this Specifier should accept prereleases + self._prereleases = prereleases + + def __repr__(self): + # type: () -> str + pre = ( + ", prereleases={0!r}".format(self.prereleases) + if self._prereleases is not None + else "" + ) + + return "<{0}({1!r}{2})>".format(self.__class__.__name__, str(self), pre) + + def __str__(self): + # type: () -> str + return "{0}{1}".format(*self._spec) + + @property + def _canonical_spec(self): + # type: () -> Tuple[str, Union[Version, str]] + return self._spec[0], canonicalize_version(self._spec[1]) + + def __hash__(self): + # type: () -> int + return hash(self._canonical_spec) + + def __eq__(self, other): + # type: (object) -> bool + if isinstance(other, string_types): + try: + other = self.__class__(str(other)) + except InvalidSpecifier: + return NotImplemented + elif not isinstance(other, self.__class__): + return NotImplemented + + return self._canonical_spec == other._canonical_spec + + def __ne__(self, other): + # type: (object) -> bool + if isinstance(other, string_types): + try: + other = self.__class__(str(other)) + except InvalidSpecifier: + return NotImplemented + elif not isinstance(other, self.__class__): + return NotImplemented + + return self._spec != other._spec + + def _get_operator(self, op): + # type: (str) -> CallableOperator + operator_callable = getattr( + self, "_compare_{0}".format(self._operators[op]) + ) # type: CallableOperator + return operator_callable + + def _coerce_version(self, version): + # type: (UnparsedVersion) -> ParsedVersion + if not isinstance(version, (LegacyVersion, Version)): + version = parse(version) + return version + + @property + def operator(self): + # type: () -> str + return self._spec[0] + + @property + def version(self): + # type: () -> str + return self._spec[1] + + @property + def prereleases(self): + # type: () -> Optional[bool] + return self._prereleases + + @prereleases.setter + def prereleases(self, value): + # type: (bool) -> None + self._prereleases = value + + def __contains__(self, item): + # type: (str) -> bool + return self.contains(item) + + def contains(self, item, prereleases=None): + # type: (UnparsedVersion, Optional[bool]) -> bool + + # Determine if prereleases are to be allowed or not. + if prereleases is None: + prereleases = self.prereleases + + # Normalize item to a Version or LegacyVersion, this allows us to have + # a shortcut for ``"2.0" in Specifier(">=2") + normalized_item = self._coerce_version(item) + + # Determine if we should be supporting prereleases in this specifier + # or not, if we do not support prereleases than we can short circuit + # logic if this version is a prereleases. + if normalized_item.is_prerelease and not prereleases: + return False + + # Actually do the comparison to determine if this item is contained + # within this Specifier or not. + operator_callable = self._get_operator(self.operator) # type: CallableOperator + return operator_callable(normalized_item, self.version) + + def filter(self, iterable, prereleases=None): + # type: (Iterable[UnparsedVersion], Optional[bool]) -> Iterable[UnparsedVersion] + + yielded = False + found_prereleases = [] + + kw = {"prereleases": prereleases if prereleases is not None else True} + + # Attempt to iterate over all the values in the iterable and if any of + # them match, yield them. + for version in iterable: + parsed_version = self._coerce_version(version) + + if self.contains(parsed_version, **kw): + # If our version is a prerelease, and we were not set to allow + # prereleases, then we'll store it for later incase nothing + # else matches this specifier. + if parsed_version.is_prerelease and not ( + prereleases or self.prereleases + ): + found_prereleases.append(version) + # Either this is not a prerelease, or we should have been + # accepting prereleases from the beginning. + else: + yielded = True + yield version + + # Now that we've iterated over everything, determine if we've yielded + # any values, and if we have not and we have any prereleases stored up + # then we will go ahead and yield the prereleases. + if not yielded and found_prereleases: + for version in found_prereleases: + yield version + + +class LegacySpecifier(_IndividualSpecifier): + + _regex_str = r""" + (?P(==|!=|<=|>=|<|>)) + \s* + (?P + [^,;\s)]* # Since this is a "legacy" specifier, and the version + # string can be just about anything, we match everything + # except for whitespace, a semi-colon for marker support, + # a closing paren since versions can be enclosed in + # them, and a comma since it's a version separator. + ) + """ + + _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) + + _operators = { + "==": "equal", + "!=": "not_equal", + "<=": "less_than_equal", + ">=": "greater_than_equal", + "<": "less_than", + ">": "greater_than", + } + + def _coerce_version(self, version): + # type: (Union[ParsedVersion, str]) -> LegacyVersion + if not isinstance(version, LegacyVersion): + version = LegacyVersion(str(version)) + return version + + def _compare_equal(self, prospective, spec): + # type: (LegacyVersion, str) -> bool + return prospective == self._coerce_version(spec) + + def _compare_not_equal(self, prospective, spec): + # type: (LegacyVersion, str) -> bool + return prospective != self._coerce_version(spec) + + def _compare_less_than_equal(self, prospective, spec): + # type: (LegacyVersion, str) -> bool + return prospective <= self._coerce_version(spec) + + def _compare_greater_than_equal(self, prospective, spec): + # type: (LegacyVersion, str) -> bool + return prospective >= self._coerce_version(spec) + + def _compare_less_than(self, prospective, spec): + # type: (LegacyVersion, str) -> bool + return prospective < self._coerce_version(spec) + + def _compare_greater_than(self, prospective, spec): + # type: (LegacyVersion, str) -> bool + return prospective > self._coerce_version(spec) + + +def _require_version_compare( + fn # type: (Callable[[Specifier, ParsedVersion, str], bool]) +): + # type: (...) -> Callable[[Specifier, ParsedVersion, str], bool] + @functools.wraps(fn) + def wrapped(self, prospective, spec): + # type: (Specifier, ParsedVersion, str) -> bool + if not isinstance(prospective, Version): + return False + return fn(self, prospective, spec) + + return wrapped + + +class Specifier(_IndividualSpecifier): + + _regex_str = r""" + (?P(~=|==|!=|<=|>=|<|>|===)) + (?P + (?: + # The identity operators allow for an escape hatch that will + # do an exact string match of the version you wish to install. + # This will not be parsed by PEP 440 and we cannot determine + # any semantic meaning from it. This operator is discouraged + # but included entirely as an escape hatch. + (?<====) # Only match for the identity operator + \s* + [^\s]* # We just match everything, except for whitespace + # since we are only testing for strict identity. + ) + | + (?: + # The (non)equality operators allow for wild card and local + # versions to be specified so we have to define these two + # operators separately to enable that. + (?<===|!=) # Only match for equals and not equals + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)* # release + (?: # pre release + [-_\.]? + (a|b|c|rc|alpha|beta|pre|preview) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + + # You cannot use a wild card and a dev or local version + # together so group them with a | and make them optional. + (?: + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local + | + \.\* # Wild card syntax of .* + )? + ) + | + (?: + # The compatible operator requires at least two digits in the + # release segment. + (?<=~=) # Only match for the compatible operator + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) + (?: # pre release + [-_\.]? + (a|b|c|rc|alpha|beta|pre|preview) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + ) + | + (?: + # All other operators only allow a sub set of what the + # (non)equality operators do. Specifically they do not allow + # local versions to be specified nor do they allow the prefix + # matching wild cards. + (?=": "greater_than_equal", + "<": "less_than", + ">": "greater_than", + "===": "arbitrary", + } + + @_require_version_compare + def _compare_compatible(self, prospective, spec): + # type: (ParsedVersion, str) -> bool + + # Compatible releases have an equivalent combination of >= and ==. That + # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to + # implement this in terms of the other specifiers instead of + # implementing it ourselves. The only thing we need to do is construct + # the other specifiers. + + # We want everything but the last item in the version, but we want to + # ignore post and dev releases and we want to treat the pre-release as + # it's own separate segment. + prefix = ".".join( + list( + itertools.takewhile( + lambda x: (not x.startswith("post") and not x.startswith("dev")), + _version_split(spec), + ) + )[:-1] + ) + + # Add the prefix notation to the end of our string + prefix += ".*" + + return self._get_operator(">=")(prospective, spec) and self._get_operator("==")( + prospective, prefix + ) + + @_require_version_compare + def _compare_equal(self, prospective, spec): + # type: (ParsedVersion, str) -> bool + + # We need special logic to handle prefix matching + if spec.endswith(".*"): + # In the case of prefix matching we want to ignore local segment. + prospective = Version(prospective.public) + # Split the spec out by dots, and pretend that there is an implicit + # dot in between a release segment and a pre-release segment. + split_spec = _version_split(spec[:-2]) # Remove the trailing .* + + # Split the prospective version out by dots, and pretend that there + # is an implicit dot in between a release segment and a pre-release + # segment. + split_prospective = _version_split(str(prospective)) + + # Shorten the prospective version to be the same length as the spec + # so that we can determine if the specifier is a prefix of the + # prospective version or not. + shortened_prospective = split_prospective[: len(split_spec)] + + # Pad out our two sides with zeros so that they both equal the same + # length. + padded_spec, padded_prospective = _pad_version( + split_spec, shortened_prospective + ) + + return padded_prospective == padded_spec + else: + # Convert our spec string into a Version + spec_version = Version(spec) + + # If the specifier does not have a local segment, then we want to + # act as if the prospective version also does not have a local + # segment. + if not spec_version.local: + prospective = Version(prospective.public) + + return prospective == spec_version + + @_require_version_compare + def _compare_not_equal(self, prospective, spec): + # type: (ParsedVersion, str) -> bool + return not self._compare_equal(prospective, spec) + + @_require_version_compare + def _compare_less_than_equal(self, prospective, spec): + # type: (ParsedVersion, str) -> bool + + # NB: Local version identifiers are NOT permitted in the version + # specifier, so local version labels can be universally removed from + # the prospective version. + return Version(prospective.public) <= Version(spec) + + @_require_version_compare + def _compare_greater_than_equal(self, prospective, spec): + # type: (ParsedVersion, str) -> bool + + # NB: Local version identifiers are NOT permitted in the version + # specifier, so local version labels can be universally removed from + # the prospective version. + return Version(prospective.public) >= Version(spec) + + @_require_version_compare + def _compare_less_than(self, prospective, spec_str): + # type: (ParsedVersion, str) -> bool + + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = Version(spec_str) + + # Check to see if the prospective version is less than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective < spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a pre-release version, that we do not accept pre-release + # versions for the version mentioned in the specifier (e.g. <3.1 should + # not match 3.1.dev0, but should match 3.0.dev0). + if not spec.is_prerelease and prospective.is_prerelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # If we've gotten to here, it means that prospective version is both + # less than the spec version *and* it's not a pre-release of the same + # version in the spec. + return True + + @_require_version_compare + def _compare_greater_than(self, prospective, spec_str): + # type: (ParsedVersion, str) -> bool + + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = Version(spec_str) + + # Check to see if the prospective version is greater than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective > spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a post-release version, that we do not accept + # post-release versions for the version mentioned in the specifier + # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). + if not spec.is_postrelease and prospective.is_postrelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # Ensure that we do not allow a local version of the version mentioned + # in the specifier, which is technically greater than, to match. + if prospective.local is not None: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # If we've gotten to here, it means that prospective version is both + # greater than the spec version *and* it's not a pre-release of the + # same version in the spec. + return True + + def _compare_arbitrary(self, prospective, spec): + # type: (Version, str) -> bool + return str(prospective).lower() == str(spec).lower() + + @property + def prereleases(self): + # type: () -> bool + + # If there is an explicit prereleases set for this, then we'll just + # blindly use that. + if self._prereleases is not None: + return self._prereleases + + # Look at all of our specifiers and determine if they are inclusive + # operators, and if they are if they are including an explicit + # prerelease. + operator, version = self._spec + if operator in ["==", ">=", "<=", "~=", "==="]: + # The == specifier can include a trailing .*, if it does we + # want to remove before parsing. + if operator == "==" and version.endswith(".*"): + version = version[:-2] + + # Parse the version, and if it is a pre-release than this + # specifier allows pre-releases. + if parse(version).is_prerelease: + return True + + return False + + @prereleases.setter + def prereleases(self, value): + # type: (bool) -> None + self._prereleases = value + + +_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") + + +def _version_split(version): + # type: (str) -> List[str] + result = [] # type: List[str] + for item in version.split("."): + match = _prefix_regex.search(item) + if match: + result.extend(match.groups()) + else: + result.append(item) + return result + + +def _pad_version(left, right): + # type: (List[str], List[str]) -> Tuple[List[str], List[str]] + left_split, right_split = [], [] + + # Get the release segment of our versions + left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) + right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) + + # Get the rest of our versions + left_split.append(left[len(left_split[0]) :]) + right_split.append(right[len(right_split[0]) :]) + + # Insert our padding + left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0]))) + right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0]))) + + return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split))) + + +class SpecifierSet(BaseSpecifier): + def __init__(self, specifiers="", prereleases=None): + # type: (str, Optional[bool]) -> None + + # Split on , to break each individual specifier into it's own item, and + # strip each item to remove leading/trailing whitespace. + split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] + + # Parsed each individual specifier, attempting first to make it a + # Specifier and falling back to a LegacySpecifier. + parsed = set() + for specifier in split_specifiers: + try: + parsed.add(Specifier(specifier)) + except InvalidSpecifier: + parsed.add(LegacySpecifier(specifier)) + + # Turn our parsed specifiers into a frozen set and save them for later. + self._specs = frozenset(parsed) + + # Store our prereleases value so we can use it later to determine if + # we accept prereleases or not. + self._prereleases = prereleases + + def __repr__(self): + # type: () -> str + pre = ( + ", prereleases={0!r}".format(self.prereleases) + if self._prereleases is not None + else "" + ) + + return "".format(str(self), pre) + + def __str__(self): + # type: () -> str + return ",".join(sorted(str(s) for s in self._specs)) + + def __hash__(self): + # type: () -> int + return hash(self._specs) + + def __and__(self, other): + # type: (Union[SpecifierSet, str]) -> SpecifierSet + if isinstance(other, string_types): + other = SpecifierSet(other) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + specifier = SpecifierSet() + specifier._specs = frozenset(self._specs | other._specs) + + if self._prereleases is None and other._prereleases is not None: + specifier._prereleases = other._prereleases + elif self._prereleases is not None and other._prereleases is None: + specifier._prereleases = self._prereleases + elif self._prereleases == other._prereleases: + specifier._prereleases = self._prereleases + else: + raise ValueError( + "Cannot combine SpecifierSets with True and False prerelease " + "overrides." + ) + + return specifier + + def __eq__(self, other): + # type: (object) -> bool + if isinstance(other, (string_types, _IndividualSpecifier)): + other = SpecifierSet(str(other)) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + return self._specs == other._specs + + def __ne__(self, other): + # type: (object) -> bool + if isinstance(other, (string_types, _IndividualSpecifier)): + other = SpecifierSet(str(other)) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + return self._specs != other._specs + + def __len__(self): + # type: () -> int + return len(self._specs) + + def __iter__(self): + # type: () -> Iterator[FrozenSet[_IndividualSpecifier]] + return iter(self._specs) + + @property + def prereleases(self): + # type: () -> Optional[bool] + + # If we have been given an explicit prerelease modifier, then we'll + # pass that through here. + if self._prereleases is not None: + return self._prereleases + + # If we don't have any specifiers, and we don't have a forced value, + # then we'll just return None since we don't know if this should have + # pre-releases or not. + if not self._specs: + return None + + # Otherwise we'll see if any of the given specifiers accept + # prereleases, if any of them do we'll return True, otherwise False. + return any(s.prereleases for s in self._specs) + + @prereleases.setter + def prereleases(self, value): + # type: (bool) -> None + self._prereleases = value + + def __contains__(self, item): + # type: (Union[ParsedVersion, str]) -> bool + return self.contains(item) + + def contains(self, item, prereleases=None): + # type: (Union[ParsedVersion, str], Optional[bool]) -> bool + + # Ensure that our item is a Version or LegacyVersion instance. + if not isinstance(item, (LegacyVersion, Version)): + item = parse(item) + + # Determine if we're forcing a prerelease or not, if we're not forcing + # one for this particular filter call, then we'll use whatever the + # SpecifierSet thinks for whether or not we should support prereleases. + if prereleases is None: + prereleases = self.prereleases + + # We can determine if we're going to allow pre-releases by looking to + # see if any of the underlying items supports them. If none of them do + # and this item is a pre-release then we do not allow it and we can + # short circuit that here. + # Note: This means that 1.0.dev1 would not be contained in something + # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0 + if not prereleases and item.is_prerelease: + return False + + # We simply dispatch to the underlying specs here to make sure that the + # given version is contained within all of them. + # Note: This use of all() here means that an empty set of specifiers + # will always return True, this is an explicit design decision. + return all(s.contains(item, prereleases=prereleases) for s in self._specs) + + def filter( + self, + iterable, # type: Iterable[Union[ParsedVersion, str]] + prereleases=None, # type: Optional[bool] + ): + # type: (...) -> Iterable[Union[ParsedVersion, str]] + + # Determine if we're forcing a prerelease or not, if we're not forcing + # one for this particular filter call, then we'll use whatever the + # SpecifierSet thinks for whether or not we should support prereleases. + if prereleases is None: + prereleases = self.prereleases + + # If we have any specifiers, then we want to wrap our iterable in the + # filter method for each one, this will act as a logical AND amongst + # each specifier. + if self._specs: + for spec in self._specs: + iterable = spec.filter(iterable, prereleases=bool(prereleases)) + return iterable + # If we do not have any specifiers, then we need to have a rough filter + # which will filter out any pre-releases, unless there are no final + # releases, and which will filter out LegacyVersion in general. + else: + filtered = [] # type: List[Union[ParsedVersion, str]] + found_prereleases = [] # type: List[Union[ParsedVersion, str]] + + for item in iterable: + # Ensure that we some kind of Version class for this item. + if not isinstance(item, (LegacyVersion, Version)): + parsed_version = parse(item) + else: + parsed_version = item + + # Filter out any item which is parsed as a LegacyVersion + if isinstance(parsed_version, LegacyVersion): + continue + + # Store any item which is a pre-release for later unless we've + # already found a final version or we are accepting prereleases + if parsed_version.is_prerelease and not prereleases: + if not filtered: + found_prereleases.append(item) + else: + filtered.append(item) + + # If we've found no items except for pre-releases, then we'll go + # ahead and use the pre-releases + if not filtered and found_prereleases and prereleases is None: + return found_prereleases + + return filtered diff --git a/venv/Lib/site-packages/pkg_resources/_vendor/packaging/tags.py b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/tags.py new file mode 100644 index 00000000..9064910b --- /dev/null +++ b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/tags.py @@ -0,0 +1,751 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import + +import distutils.util + +try: + from importlib.machinery import EXTENSION_SUFFIXES +except ImportError: # pragma: no cover + import imp + + EXTENSION_SUFFIXES = [x[0] for x in imp.get_suffixes()] + del imp +import logging +import os +import platform +import re +import struct +import sys +import sysconfig +import warnings + +from ._typing import TYPE_CHECKING, cast + +if TYPE_CHECKING: # pragma: no cover + from typing import ( + Dict, + FrozenSet, + IO, + Iterable, + Iterator, + List, + Optional, + Sequence, + Tuple, + Union, + ) + + PythonVersion = Sequence[int] + MacVersion = Tuple[int, int] + GlibcVersion = Tuple[int, int] + + +logger = logging.getLogger(__name__) + +INTERPRETER_SHORT_NAMES = { + "python": "py", # Generic. + "cpython": "cp", + "pypy": "pp", + "ironpython": "ip", + "jython": "jy", +} # type: Dict[str, str] + + +_32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32 + + +class Tag(object): + """ + A representation of the tag triple for a wheel. + + Instances are considered immutable and thus are hashable. Equality checking + is also supported. + """ + + __slots__ = ["_interpreter", "_abi", "_platform"] + + def __init__(self, interpreter, abi, platform): + # type: (str, str, str) -> None + self._interpreter = interpreter.lower() + self._abi = abi.lower() + self._platform = platform.lower() + + @property + def interpreter(self): + # type: () -> str + return self._interpreter + + @property + def abi(self): + # type: () -> str + return self._abi + + @property + def platform(self): + # type: () -> str + return self._platform + + def __eq__(self, other): + # type: (object) -> bool + if not isinstance(other, Tag): + return NotImplemented + + return ( + (self.platform == other.platform) + and (self.abi == other.abi) + and (self.interpreter == other.interpreter) + ) + + def __hash__(self): + # type: () -> int + return hash((self._interpreter, self._abi, self._platform)) + + def __str__(self): + # type: () -> str + return "{}-{}-{}".format(self._interpreter, self._abi, self._platform) + + def __repr__(self): + # type: () -> str + return "<{self} @ {self_id}>".format(self=self, self_id=id(self)) + + +def parse_tag(tag): + # type: (str) -> FrozenSet[Tag] + """ + Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances. + + Returning a set is required due to the possibility that the tag is a + compressed tag set. + """ + tags = set() + interpreters, abis, platforms = tag.split("-") + for interpreter in interpreters.split("."): + for abi in abis.split("."): + for platform_ in platforms.split("."): + tags.add(Tag(interpreter, abi, platform_)) + return frozenset(tags) + + +def _warn_keyword_parameter(func_name, kwargs): + # type: (str, Dict[str, bool]) -> bool + """ + Backwards-compatibility with Python 2.7 to allow treating 'warn' as keyword-only. + """ + if not kwargs: + return False + elif len(kwargs) > 1 or "warn" not in kwargs: + kwargs.pop("warn", None) + arg = next(iter(kwargs.keys())) + raise TypeError( + "{}() got an unexpected keyword argument {!r}".format(func_name, arg) + ) + return kwargs["warn"] + + +def _get_config_var(name, warn=False): + # type: (str, bool) -> Union[int, str, None] + value = sysconfig.get_config_var(name) + if value is None and warn: + logger.debug( + "Config variable '%s' is unset, Python ABI tag may be incorrect", name + ) + return value + + +def _normalize_string(string): + # type: (str) -> str + return string.replace(".", "_").replace("-", "_") + + +def _abi3_applies(python_version): + # type: (PythonVersion) -> bool + """ + Determine if the Python version supports abi3. + + PEP 384 was first implemented in Python 3.2. + """ + return len(python_version) > 1 and tuple(python_version) >= (3, 2) + + +def _cpython_abis(py_version, warn=False): + # type: (PythonVersion, bool) -> List[str] + py_version = tuple(py_version) # To allow for version comparison. + abis = [] + version = _version_nodot(py_version[:2]) + debug = pymalloc = ucs4 = "" + with_debug = _get_config_var("Py_DEBUG", warn) + has_refcount = hasattr(sys, "gettotalrefcount") + # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled + # extension modules is the best option. + # https://github.com/pypa/pip/issues/3383#issuecomment-173267692 + has_ext = "_d.pyd" in EXTENSION_SUFFIXES + if with_debug or (with_debug is None and (has_refcount or has_ext)): + debug = "d" + if py_version < (3, 8): + with_pymalloc = _get_config_var("WITH_PYMALLOC", warn) + if with_pymalloc or with_pymalloc is None: + pymalloc = "m" + if py_version < (3, 3): + unicode_size = _get_config_var("Py_UNICODE_SIZE", warn) + if unicode_size == 4 or ( + unicode_size is None and sys.maxunicode == 0x10FFFF + ): + ucs4 = "u" + elif debug: + # Debug builds can also load "normal" extension modules. + # We can also assume no UCS-4 or pymalloc requirement. + abis.append("cp{version}".format(version=version)) + abis.insert( + 0, + "cp{version}{debug}{pymalloc}{ucs4}".format( + version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4 + ), + ) + return abis + + +def cpython_tags( + python_version=None, # type: Optional[PythonVersion] + abis=None, # type: Optional[Iterable[str]] + platforms=None, # type: Optional[Iterable[str]] + **kwargs # type: bool +): + # type: (...) -> Iterator[Tag] + """ + Yields the tags for a CPython interpreter. + + The tags consist of: + - cp-- + - cp-abi3- + - cp-none- + - cp-abi3- # Older Python versions down to 3.2. + + If python_version only specifies a major version then user-provided ABIs and + the 'none' ABItag will be used. + + If 'abi3' or 'none' are specified in 'abis' then they will be yielded at + their normal position and not at the beginning. + """ + warn = _warn_keyword_parameter("cpython_tags", kwargs) + if not python_version: + python_version = sys.version_info[:2] + + interpreter = "cp{}".format(_version_nodot(python_version[:2])) + + if abis is None: + if len(python_version) > 1: + abis = _cpython_abis(python_version, warn) + else: + abis = [] + abis = list(abis) + # 'abi3' and 'none' are explicitly handled later. + for explicit_abi in ("abi3", "none"): + try: + abis.remove(explicit_abi) + except ValueError: + pass + + platforms = list(platforms or _platform_tags()) + for abi in abis: + for platform_ in platforms: + yield Tag(interpreter, abi, platform_) + if _abi3_applies(python_version): + for tag in (Tag(interpreter, "abi3", platform_) for platform_ in platforms): + yield tag + for tag in (Tag(interpreter, "none", platform_) for platform_ in platforms): + yield tag + + if _abi3_applies(python_version): + for minor_version in range(python_version[1] - 1, 1, -1): + for platform_ in platforms: + interpreter = "cp{version}".format( + version=_version_nodot((python_version[0], minor_version)) + ) + yield Tag(interpreter, "abi3", platform_) + + +def _generic_abi(): + # type: () -> Iterator[str] + abi = sysconfig.get_config_var("SOABI") + if abi: + yield _normalize_string(abi) + + +def generic_tags( + interpreter=None, # type: Optional[str] + abis=None, # type: Optional[Iterable[str]] + platforms=None, # type: Optional[Iterable[str]] + **kwargs # type: bool +): + # type: (...) -> Iterator[Tag] + """ + Yields the tags for a generic interpreter. + + The tags consist of: + - -- + + The "none" ABI will be added if it was not explicitly provided. + """ + warn = _warn_keyword_parameter("generic_tags", kwargs) + if not interpreter: + interp_name = interpreter_name() + interp_version = interpreter_version(warn=warn) + interpreter = "".join([interp_name, interp_version]) + if abis is None: + abis = _generic_abi() + platforms = list(platforms or _platform_tags()) + abis = list(abis) + if "none" not in abis: + abis.append("none") + for abi in abis: + for platform_ in platforms: + yield Tag(interpreter, abi, platform_) + + +def _py_interpreter_range(py_version): + # type: (PythonVersion) -> Iterator[str] + """ + Yields Python versions in descending order. + + After the latest version, the major-only version will be yielded, and then + all previous versions of that major version. + """ + if len(py_version) > 1: + yield "py{version}".format(version=_version_nodot(py_version[:2])) + yield "py{major}".format(major=py_version[0]) + if len(py_version) > 1: + for minor in range(py_version[1] - 1, -1, -1): + yield "py{version}".format(version=_version_nodot((py_version[0], minor))) + + +def compatible_tags( + python_version=None, # type: Optional[PythonVersion] + interpreter=None, # type: Optional[str] + platforms=None, # type: Optional[Iterable[str]] +): + # type: (...) -> Iterator[Tag] + """ + Yields the sequence of tags that are compatible with a specific version of Python. + + The tags consist of: + - py*-none- + - -none-any # ... if `interpreter` is provided. + - py*-none-any + """ + if not python_version: + python_version = sys.version_info[:2] + platforms = list(platforms or _platform_tags()) + for version in _py_interpreter_range(python_version): + for platform_ in platforms: + yield Tag(version, "none", platform_) + if interpreter: + yield Tag(interpreter, "none", "any") + for version in _py_interpreter_range(python_version): + yield Tag(version, "none", "any") + + +def _mac_arch(arch, is_32bit=_32_BIT_INTERPRETER): + # type: (str, bool) -> str + if not is_32bit: + return arch + + if arch.startswith("ppc"): + return "ppc" + + return "i386" + + +def _mac_binary_formats(version, cpu_arch): + # type: (MacVersion, str) -> List[str] + formats = [cpu_arch] + if cpu_arch == "x86_64": + if version < (10, 4): + return [] + formats.extend(["intel", "fat64", "fat32"]) + + elif cpu_arch == "i386": + if version < (10, 4): + return [] + formats.extend(["intel", "fat32", "fat"]) + + elif cpu_arch == "ppc64": + # TODO: Need to care about 32-bit PPC for ppc64 through 10.2? + if version > (10, 5) or version < (10, 4): + return [] + formats.append("fat64") + + elif cpu_arch == "ppc": + if version > (10, 6): + return [] + formats.extend(["fat32", "fat"]) + + formats.append("universal") + return formats + + +def mac_platforms(version=None, arch=None): + # type: (Optional[MacVersion], Optional[str]) -> Iterator[str] + """ + Yields the platform tags for a macOS system. + + The `version` parameter is a two-item tuple specifying the macOS version to + generate platform tags for. The `arch` parameter is the CPU architecture to + generate platform tags for. Both parameters default to the appropriate value + for the current system. + """ + version_str, _, cpu_arch = platform.mac_ver() # type: ignore + if version is None: + version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2]))) + else: + version = version + if arch is None: + arch = _mac_arch(cpu_arch) + else: + arch = arch + for minor_version in range(version[1], -1, -1): + compat_version = version[0], minor_version + binary_formats = _mac_binary_formats(compat_version, arch) + for binary_format in binary_formats: + yield "macosx_{major}_{minor}_{binary_format}".format( + major=compat_version[0], + minor=compat_version[1], + binary_format=binary_format, + ) + + +# From PEP 513. +def _is_manylinux_compatible(name, glibc_version): + # type: (str, GlibcVersion) -> bool + # Check for presence of _manylinux module. + try: + import _manylinux # noqa + + return bool(getattr(_manylinux, name + "_compatible")) + except (ImportError, AttributeError): + # Fall through to heuristic check below. + pass + + return _have_compatible_glibc(*glibc_version) + + +def _glibc_version_string(): + # type: () -> Optional[str] + # Returns glibc version string, or None if not using glibc. + return _glibc_version_string_confstr() or _glibc_version_string_ctypes() + + +def _glibc_version_string_confstr(): + # type: () -> Optional[str] + """ + Primary implementation of glibc_version_string using os.confstr. + """ + # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely + # to be broken or missing. This strategy is used in the standard library + # platform module. + # https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183 + try: + # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17". + version_string = os.confstr( # type: ignore[attr-defined] # noqa: F821 + "CS_GNU_LIBC_VERSION" + ) + assert version_string is not None + _, version = version_string.split() # type: Tuple[str, str] + except (AssertionError, AttributeError, OSError, ValueError): + # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... + return None + return version + + +def _glibc_version_string_ctypes(): + # type: () -> Optional[str] + """ + Fallback implementation of glibc_version_string using ctypes. + """ + try: + import ctypes + except ImportError: + return None + + # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen + # manpage says, "If filename is NULL, then the returned handle is for the + # main program". This way we can let the linker do the work to figure out + # which libc our process is actually using. + # + # Note: typeshed is wrong here so we are ignoring this line. + process_namespace = ctypes.CDLL(None) # type: ignore + try: + gnu_get_libc_version = process_namespace.gnu_get_libc_version + except AttributeError: + # Symbol doesn't exist -> therefore, we are not linked to + # glibc. + return None + + # Call gnu_get_libc_version, which returns a string like "2.5" + gnu_get_libc_version.restype = ctypes.c_char_p + version_str = gnu_get_libc_version() # type: str + # py2 / py3 compatibility: + if not isinstance(version_str, str): + version_str = version_str.decode("ascii") + + return version_str + + +# Separated out from have_compatible_glibc for easier unit testing. +def _check_glibc_version(version_str, required_major, minimum_minor): + # type: (str, int, int) -> bool + # Parse string and check against requested version. + # + # We use a regexp instead of str.split because we want to discard any + # random junk that might come after the minor version -- this might happen + # in patched/forked versions of glibc (e.g. Linaro's version of glibc + # uses version strings like "2.20-2014.11"). See gh-3588. + m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str) + if not m: + warnings.warn( + "Expected glibc version with 2 components major.minor," + " got: %s" % version_str, + RuntimeWarning, + ) + return False + return ( + int(m.group("major")) == required_major + and int(m.group("minor")) >= minimum_minor + ) + + +def _have_compatible_glibc(required_major, minimum_minor): + # type: (int, int) -> bool + version_str = _glibc_version_string() + if version_str is None: + return False + return _check_glibc_version(version_str, required_major, minimum_minor) + + +# Python does not provide platform information at sufficient granularity to +# identify the architecture of the running executable in some cases, so we +# determine it dynamically by reading the information from the running +# process. This only applies on Linux, which uses the ELF format. +class _ELFFileHeader(object): + # https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header + class _InvalidELFFileHeader(ValueError): + """ + An invalid ELF file header was found. + """ + + ELF_MAGIC_NUMBER = 0x7F454C46 + ELFCLASS32 = 1 + ELFCLASS64 = 2 + ELFDATA2LSB = 1 + ELFDATA2MSB = 2 + EM_386 = 3 + EM_S390 = 22 + EM_ARM = 40 + EM_X86_64 = 62 + EF_ARM_ABIMASK = 0xFF000000 + EF_ARM_ABI_VER5 = 0x05000000 + EF_ARM_ABI_FLOAT_HARD = 0x00000400 + + def __init__(self, file): + # type: (IO[bytes]) -> None + def unpack(fmt): + # type: (str) -> int + try: + (result,) = struct.unpack( + fmt, file.read(struct.calcsize(fmt)) + ) # type: (int, ) + except struct.error: + raise _ELFFileHeader._InvalidELFFileHeader() + return result + + self.e_ident_magic = unpack(">I") + if self.e_ident_magic != self.ELF_MAGIC_NUMBER: + raise _ELFFileHeader._InvalidELFFileHeader() + self.e_ident_class = unpack("B") + if self.e_ident_class not in {self.ELFCLASS32, self.ELFCLASS64}: + raise _ELFFileHeader._InvalidELFFileHeader() + self.e_ident_data = unpack("B") + if self.e_ident_data not in {self.ELFDATA2LSB, self.ELFDATA2MSB}: + raise _ELFFileHeader._InvalidELFFileHeader() + self.e_ident_version = unpack("B") + self.e_ident_osabi = unpack("B") + self.e_ident_abiversion = unpack("B") + self.e_ident_pad = file.read(7) + format_h = "H" + format_i = "I" + format_q = "Q" + format_p = format_i if self.e_ident_class == self.ELFCLASS32 else format_q + self.e_type = unpack(format_h) + self.e_machine = unpack(format_h) + self.e_version = unpack(format_i) + self.e_entry = unpack(format_p) + self.e_phoff = unpack(format_p) + self.e_shoff = unpack(format_p) + self.e_flags = unpack(format_i) + self.e_ehsize = unpack(format_h) + self.e_phentsize = unpack(format_h) + self.e_phnum = unpack(format_h) + self.e_shentsize = unpack(format_h) + self.e_shnum = unpack(format_h) + self.e_shstrndx = unpack(format_h) + + +def _get_elf_header(): + # type: () -> Optional[_ELFFileHeader] + try: + with open(sys.executable, "rb") as f: + elf_header = _ELFFileHeader(f) + except (IOError, OSError, TypeError, _ELFFileHeader._InvalidELFFileHeader): + return None + return elf_header + + +def _is_linux_armhf(): + # type: () -> bool + # hard-float ABI can be detected from the ELF header of the running + # process + # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf + elf_header = _get_elf_header() + if elf_header is None: + return False + result = elf_header.e_ident_class == elf_header.ELFCLASS32 + result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB + result &= elf_header.e_machine == elf_header.EM_ARM + result &= ( + elf_header.e_flags & elf_header.EF_ARM_ABIMASK + ) == elf_header.EF_ARM_ABI_VER5 + result &= ( + elf_header.e_flags & elf_header.EF_ARM_ABI_FLOAT_HARD + ) == elf_header.EF_ARM_ABI_FLOAT_HARD + return result + + +def _is_linux_i686(): + # type: () -> bool + elf_header = _get_elf_header() + if elf_header is None: + return False + result = elf_header.e_ident_class == elf_header.ELFCLASS32 + result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB + result &= elf_header.e_machine == elf_header.EM_386 + return result + + +def _have_compatible_manylinux_abi(arch): + # type: (str) -> bool + if arch == "armv7l": + return _is_linux_armhf() + if arch == "i686": + return _is_linux_i686() + return True + + +def _linux_platforms(is_32bit=_32_BIT_INTERPRETER): + # type: (bool) -> Iterator[str] + linux = _normalize_string(distutils.util.get_platform()) + if is_32bit: + if linux == "linux_x86_64": + linux = "linux_i686" + elif linux == "linux_aarch64": + linux = "linux_armv7l" + manylinux_support = [] + _, arch = linux.split("_", 1) + if _have_compatible_manylinux_abi(arch): + if arch in {"x86_64", "i686", "aarch64", "armv7l", "ppc64", "ppc64le", "s390x"}: + manylinux_support.append( + ("manylinux2014", (2, 17)) + ) # CentOS 7 w/ glibc 2.17 (PEP 599) + if arch in {"x86_64", "i686"}: + manylinux_support.append( + ("manylinux2010", (2, 12)) + ) # CentOS 6 w/ glibc 2.12 (PEP 571) + manylinux_support.append( + ("manylinux1", (2, 5)) + ) # CentOS 5 w/ glibc 2.5 (PEP 513) + manylinux_support_iter = iter(manylinux_support) + for name, glibc_version in manylinux_support_iter: + if _is_manylinux_compatible(name, glibc_version): + yield linux.replace("linux", name) + break + # Support for a later manylinux implies support for an earlier version. + for name, _ in manylinux_support_iter: + yield linux.replace("linux", name) + yield linux + + +def _generic_platforms(): + # type: () -> Iterator[str] + yield _normalize_string(distutils.util.get_platform()) + + +def _platform_tags(): + # type: () -> Iterator[str] + """ + Provides the platform tags for this installation. + """ + if platform.system() == "Darwin": + return mac_platforms() + elif platform.system() == "Linux": + return _linux_platforms() + else: + return _generic_platforms() + + +def interpreter_name(): + # type: () -> str + """ + Returns the name of the running interpreter. + """ + try: + name = sys.implementation.name # type: ignore + except AttributeError: # pragma: no cover + # Python 2.7 compatibility. + name = platform.python_implementation().lower() + return INTERPRETER_SHORT_NAMES.get(name) or name + + +def interpreter_version(**kwargs): + # type: (bool) -> str + """ + Returns the version of the running interpreter. + """ + warn = _warn_keyword_parameter("interpreter_version", kwargs) + version = _get_config_var("py_version_nodot", warn=warn) + if version: + version = str(version) + else: + version = _version_nodot(sys.version_info[:2]) + return version + + +def _version_nodot(version): + # type: (PythonVersion) -> str + if any(v >= 10 for v in version): + sep = "_" + else: + sep = "" + return sep.join(map(str, version)) + + +def sys_tags(**kwargs): + # type: (bool) -> Iterator[Tag] + """ + Returns the sequence of tag triples for the running interpreter. + + The order of the sequence corresponds to priority order for the + interpreter, from most to least important. + """ + warn = _warn_keyword_parameter("sys_tags", kwargs) + + interp_name = interpreter_name() + if interp_name == "cp": + for tag in cpython_tags(warn=warn): + yield tag + else: + for tag in generic_tags(): + yield tag + + for tag in compatible_tags(): + yield tag diff --git a/venv/Lib/site-packages/pkg_resources/_vendor/packaging/utils.py b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/utils.py new file mode 100644 index 00000000..19579c1a --- /dev/null +++ b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/utils.py @@ -0,0 +1,65 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import re + +from ._typing import TYPE_CHECKING, cast +from .version import InvalidVersion, Version + +if TYPE_CHECKING: # pragma: no cover + from typing import NewType, Union + + NormalizedName = NewType("NormalizedName", str) + +_canonicalize_regex = re.compile(r"[-_.]+") + + +def canonicalize_name(name): + # type: (str) -> NormalizedName + # This is taken from PEP 503. + value = _canonicalize_regex.sub("-", name).lower() + return cast("NormalizedName", value) + + +def canonicalize_version(_version): + # type: (str) -> Union[Version, str] + """ + This is very similar to Version.__str__, but has one subtle difference + with the way it handles the release segment. + """ + + try: + version = Version(_version) + except InvalidVersion: + # Legacy versions cannot be normalized + return _version + + parts = [] + + # Epoch + if version.epoch != 0: + parts.append("{0}!".format(version.epoch)) + + # Release segment + # NB: This strips trailing '.0's to normalize + parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in version.release))) + + # Pre-release + if version.pre is not None: + parts.append("".join(str(x) for x in version.pre)) + + # Post-release + if version.post is not None: + parts.append(".post{0}".format(version.post)) + + # Development release + if version.dev is not None: + parts.append(".dev{0}".format(version.dev)) + + # Local version segment + if version.local is not None: + parts.append("+{0}".format(version.local)) + + return "".join(parts) diff --git a/venv/Lib/site-packages/pkg_resources/_vendor/packaging/version.py b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/version.py new file mode 100644 index 00000000..00371e86 --- /dev/null +++ b/venv/Lib/site-packages/pkg_resources/_vendor/packaging/version.py @@ -0,0 +1,535 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import collections +import itertools +import re + +from ._structures import Infinity, NegativeInfinity +from ._typing import TYPE_CHECKING + +if TYPE_CHECKING: # pragma: no cover + from typing import Callable, Iterator, List, Optional, SupportsInt, Tuple, Union + + from ._structures import InfinityType, NegativeInfinityType + + InfiniteTypes = Union[InfinityType, NegativeInfinityType] + PrePostDevType = Union[InfiniteTypes, Tuple[str, int]] + SubLocalType = Union[InfiniteTypes, int, str] + LocalType = Union[ + NegativeInfinityType, + Tuple[ + Union[ + SubLocalType, + Tuple[SubLocalType, str], + Tuple[NegativeInfinityType, SubLocalType], + ], + ..., + ], + ] + CmpKey = Tuple[ + int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType + ] + LegacyCmpKey = Tuple[int, Tuple[str, ...]] + VersionComparisonMethod = Callable[ + [Union[CmpKey, LegacyCmpKey], Union[CmpKey, LegacyCmpKey]], bool + ] + +__all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"] + + +_Version = collections.namedtuple( + "_Version", ["epoch", "release", "dev", "pre", "post", "local"] +) + + +def parse(version): + # type: (str) -> Union[LegacyVersion, Version] + """ + Parse the given version string and return either a :class:`Version` object + or a :class:`LegacyVersion` object depending on if the given version is + a valid PEP 440 version or a legacy version. + """ + try: + return Version(version) + except InvalidVersion: + return LegacyVersion(version) + + +class InvalidVersion(ValueError): + """ + An invalid version was found, users should refer to PEP 440. + """ + + +class _BaseVersion(object): + _key = None # type: Union[CmpKey, LegacyCmpKey] + + def __hash__(self): + # type: () -> int + return hash(self._key) + + def __lt__(self, other): + # type: (_BaseVersion) -> bool + return self._compare(other, lambda s, o: s < o) + + def __le__(self, other): + # type: (_BaseVersion) -> bool + return self._compare(other, lambda s, o: s <= o) + + def __eq__(self, other): + # type: (object) -> bool + return self._compare(other, lambda s, o: s == o) + + def __ge__(self, other): + # type: (_BaseVersion) -> bool + return self._compare(other, lambda s, o: s >= o) + + def __gt__(self, other): + # type: (_BaseVersion) -> bool + return self._compare(other, lambda s, o: s > o) + + def __ne__(self, other): + # type: (object) -> bool + return self._compare(other, lambda s, o: s != o) + + def _compare(self, other, method): + # type: (object, VersionComparisonMethod) -> Union[bool, NotImplemented] + if not isinstance(other, _BaseVersion): + return NotImplemented + + return method(self._key, other._key) + + +class LegacyVersion(_BaseVersion): + def __init__(self, version): + # type: (str) -> None + self._version = str(version) + self._key = _legacy_cmpkey(self._version) + + def __str__(self): + # type: () -> str + return self._version + + def __repr__(self): + # type: () -> str + return "".format(repr(str(self))) + + @property + def public(self): + # type: () -> str + return self._version + + @property + def base_version(self): + # type: () -> str + return self._version + + @property + def epoch(self): + # type: () -> int + return -1 + + @property + def release(self): + # type: () -> None + return None + + @property + def pre(self): + # type: () -> None + return None + + @property + def post(self): + # type: () -> None + return None + + @property + def dev(self): + # type: () -> None + return None + + @property + def local(self): + # type: () -> None + return None + + @property + def is_prerelease(self): + # type: () -> bool + return False + + @property + def is_postrelease(self): + # type: () -> bool + return False + + @property + def is_devrelease(self): + # type: () -> bool + return False + + +_legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE) + +_legacy_version_replacement_map = { + "pre": "c", + "preview": "c", + "-": "final-", + "rc": "c", + "dev": "@", +} + + +def _parse_version_parts(s): + # type: (str) -> Iterator[str] + for part in _legacy_version_component_re.split(s): + part = _legacy_version_replacement_map.get(part, part) + + if not part or part == ".": + continue + + if part[:1] in "0123456789": + # pad for numeric comparison + yield part.zfill(8) + else: + yield "*" + part + + # ensure that alpha/beta/candidate are before final + yield "*final" + + +def _legacy_cmpkey(version): + # type: (str) -> LegacyCmpKey + + # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch + # greater than or equal to 0. This will effectively put the LegacyVersion, + # which uses the defacto standard originally implemented by setuptools, + # as before all PEP 440 versions. + epoch = -1 + + # This scheme is taken from pkg_resources.parse_version setuptools prior to + # it's adoption of the packaging library. + parts = [] # type: List[str] + for part in _parse_version_parts(version.lower()): + if part.startswith("*"): + # remove "-" before a prerelease tag + if part < "*final": + while parts and parts[-1] == "*final-": + parts.pop() + + # remove trailing zeros from each series of numeric parts + while parts and parts[-1] == "00000000": + parts.pop() + + parts.append(part) + + return epoch, tuple(parts) + + +# Deliberately not anchored to the start and end of the string, to make it +# easier for 3rd party code to reuse +VERSION_PATTERN = r""" + v? + (?: + (?:(?P[0-9]+)!)? # epoch + (?P[0-9]+(?:\.[0-9]+)*) # release segment + (?P
                                          # pre-release
+            [-_\.]?
+            (?P(a|b|c|rc|alpha|beta|pre|preview))
+            [-_\.]?
+            (?P[0-9]+)?
+        )?
+        (?P                                         # post release
+            (?:-(?P[0-9]+))
+            |
+            (?:
+                [-_\.]?
+                (?Ppost|rev|r)
+                [-_\.]?
+                (?P[0-9]+)?
+            )
+        )?
+        (?P                                          # dev release
+            [-_\.]?
+            (?Pdev)
+            [-_\.]?
+            (?P[0-9]+)?
+        )?
+    )
+    (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
+"""
+
+
+class Version(_BaseVersion):
+
+    _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
+
+    def __init__(self, version):
+        # type: (str) -> None
+
+        # Validate the version and parse it into pieces
+        match = self._regex.search(version)
+        if not match:
+            raise InvalidVersion("Invalid version: '{0}'".format(version))
+
+        # Store the parsed out pieces of the version
+        self._version = _Version(
+            epoch=int(match.group("epoch")) if match.group("epoch") else 0,
+            release=tuple(int(i) for i in match.group("release").split(".")),
+            pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
+            post=_parse_letter_version(
+                match.group("post_l"), match.group("post_n1") or match.group("post_n2")
+            ),
+            dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
+            local=_parse_local_version(match.group("local")),
+        )
+
+        # Generate a key which will be used for sorting
+        self._key = _cmpkey(
+            self._version.epoch,
+            self._version.release,
+            self._version.pre,
+            self._version.post,
+            self._version.dev,
+            self._version.local,
+        )
+
+    def __repr__(self):
+        # type: () -> str
+        return "".format(repr(str(self)))
+
+    def __str__(self):
+        # type: () -> str
+        parts = []
+
+        # Epoch
+        if self.epoch != 0:
+            parts.append("{0}!".format(self.epoch))
+
+        # Release segment
+        parts.append(".".join(str(x) for x in self.release))
+
+        # Pre-release
+        if self.pre is not None:
+            parts.append("".join(str(x) for x in self.pre))
+
+        # Post-release
+        if self.post is not None:
+            parts.append(".post{0}".format(self.post))
+
+        # Development release
+        if self.dev is not None:
+            parts.append(".dev{0}".format(self.dev))
+
+        # Local version segment
+        if self.local is not None:
+            parts.append("+{0}".format(self.local))
+
+        return "".join(parts)
+
+    @property
+    def epoch(self):
+        # type: () -> int
+        _epoch = self._version.epoch  # type: int
+        return _epoch
+
+    @property
+    def release(self):
+        # type: () -> Tuple[int, ...]
+        _release = self._version.release  # type: Tuple[int, ...]
+        return _release
+
+    @property
+    def pre(self):
+        # type: () -> Optional[Tuple[str, int]]
+        _pre = self._version.pre  # type: Optional[Tuple[str, int]]
+        return _pre
+
+    @property
+    def post(self):
+        # type: () -> Optional[Tuple[str, int]]
+        return self._version.post[1] if self._version.post else None
+
+    @property
+    def dev(self):
+        # type: () -> Optional[Tuple[str, int]]
+        return self._version.dev[1] if self._version.dev else None
+
+    @property
+    def local(self):
+        # type: () -> Optional[str]
+        if self._version.local:
+            return ".".join(str(x) for x in self._version.local)
+        else:
+            return None
+
+    @property
+    def public(self):
+        # type: () -> str
+        return str(self).split("+", 1)[0]
+
+    @property
+    def base_version(self):
+        # type: () -> str
+        parts = []
+
+        # Epoch
+        if self.epoch != 0:
+            parts.append("{0}!".format(self.epoch))
+
+        # Release segment
+        parts.append(".".join(str(x) for x in self.release))
+
+        return "".join(parts)
+
+    @property
+    def is_prerelease(self):
+        # type: () -> bool
+        return self.dev is not None or self.pre is not None
+
+    @property
+    def is_postrelease(self):
+        # type: () -> bool
+        return self.post is not None
+
+    @property
+    def is_devrelease(self):
+        # type: () -> bool
+        return self.dev is not None
+
+    @property
+    def major(self):
+        # type: () -> int
+        return self.release[0] if len(self.release) >= 1 else 0
+
+    @property
+    def minor(self):
+        # type: () -> int
+        return self.release[1] if len(self.release) >= 2 else 0
+
+    @property
+    def micro(self):
+        # type: () -> int
+        return self.release[2] if len(self.release) >= 3 else 0
+
+
+def _parse_letter_version(
+    letter,  # type: str
+    number,  # type: Union[str, bytes, SupportsInt]
+):
+    # type: (...) -> Optional[Tuple[str, int]]
+
+    if letter:
+        # We consider there to be an implicit 0 in a pre-release if there is
+        # not a numeral associated with it.
+        if number is None:
+            number = 0
+
+        # We normalize any letters to their lower case form
+        letter = letter.lower()
+
+        # We consider some words to be alternate spellings of other words and
+        # in those cases we want to normalize the spellings to our preferred
+        # spelling.
+        if letter == "alpha":
+            letter = "a"
+        elif letter == "beta":
+            letter = "b"
+        elif letter in ["c", "pre", "preview"]:
+            letter = "rc"
+        elif letter in ["rev", "r"]:
+            letter = "post"
+
+        return letter, int(number)
+    if not letter and number:
+        # We assume if we are given a number, but we are not given a letter
+        # then this is using the implicit post release syntax (e.g. 1.0-1)
+        letter = "post"
+
+        return letter, int(number)
+
+    return None
+
+
+_local_version_separators = re.compile(r"[\._-]")
+
+
+def _parse_local_version(local):
+    # type: (str) -> Optional[LocalType]
+    """
+    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
+    """
+    if local is not None:
+        return tuple(
+            part.lower() if not part.isdigit() else int(part)
+            for part in _local_version_separators.split(local)
+        )
+    return None
+
+
+def _cmpkey(
+    epoch,  # type: int
+    release,  # type: Tuple[int, ...]
+    pre,  # type: Optional[Tuple[str, int]]
+    post,  # type: Optional[Tuple[str, int]]
+    dev,  # type: Optional[Tuple[str, int]]
+    local,  # type: Optional[Tuple[SubLocalType]]
+):
+    # type: (...) -> CmpKey
+
+    # When we compare a release version, we want to compare it with all of the
+    # trailing zeros removed. So we'll use a reverse the list, drop all the now
+    # leading zeros until we come to something non zero, then take the rest
+    # re-reverse it back into the correct order and make it a tuple and use
+    # that for our sorting key.
+    _release = tuple(
+        reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
+    )
+
+    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
+    # We'll do this by abusing the pre segment, but we _only_ want to do this
+    # if there is not a pre or a post segment. If we have one of those then
+    # the normal sorting rules will handle this case correctly.
+    if pre is None and post is None and dev is not None:
+        _pre = NegativeInfinity  # type: PrePostDevType
+    # Versions without a pre-release (except as noted above) should sort after
+    # those with one.
+    elif pre is None:
+        _pre = Infinity
+    else:
+        _pre = pre
+
+    # Versions without a post segment should sort before those with one.
+    if post is None:
+        _post = NegativeInfinity  # type: PrePostDevType
+
+    else:
+        _post = post
+
+    # Versions without a development segment should sort after those with one.
+    if dev is None:
+        _dev = Infinity  # type: PrePostDevType
+
+    else:
+        _dev = dev
+
+    if local is None:
+        # Versions without a local segment should sort before those with one.
+        _local = NegativeInfinity  # type: LocalType
+    else:
+        # Versions with a local segment need that segment parsed to implement
+        # the sorting rules in PEP440.
+        # - Alpha numeric segments sort before numeric segments
+        # - Alpha numeric segments sort lexicographically
+        # - Numeric segments sort numerically
+        # - Shorter versions sort before longer versions when the prefixes
+        #   match exactly
+        _local = tuple(
+            (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
+        )
+
+    return epoch, _release, _pre, _post, _dev, _local
diff --git a/venv/Lib/site-packages/pkg_resources/_vendor/pyparsing.py b/venv/Lib/site-packages/pkg_resources/_vendor/pyparsing.py
new file mode 100644
index 00000000..1333c00e
--- /dev/null
+++ b/venv/Lib/site-packages/pkg_resources/_vendor/pyparsing.py
@@ -0,0 +1,5742 @@
+# module pyparsing.py
+#
+# Copyright (c) 2003-2018  Paul T. McGuire
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__doc__ = \
+"""
+pyparsing module - Classes and methods to define and execute parsing grammars
+=============================================================================
+
+The pyparsing module is an alternative approach to creating and executing simple grammars,
+vs. the traditional lex/yacc approach, or the use of regular expressions.  With pyparsing, you
+don't need to learn a new syntax for defining grammars or matching expressions - the parsing module
+provides a library of classes that you use to construct the grammar directly in Python.
+
+Here is a program to parse "Hello, World!" (or any greeting of the form 
+C{", !"}), built up using L{Word}, L{Literal}, and L{And} elements 
+(L{'+'} operator gives L{And} expressions, strings are auto-converted to
+L{Literal} expressions)::
+
+    from pyparsing import Word, alphas
+
+    # define grammar of a greeting
+    greet = Word(alphas) + "," + Word(alphas) + "!"
+
+    hello = "Hello, World!"
+    print (hello, "->", greet.parseString(hello))
+
+The program outputs the following::
+
+    Hello, World! -> ['Hello', ',', 'World', '!']
+
+The Python representation of the grammar is quite readable, owing to the self-explanatory
+class names, and the use of '+', '|' and '^' operators.
+
+The L{ParseResults} object returned from L{ParserElement.parseString} can be accessed as a nested list, a dictionary, or an
+object with named attributes.
+
+The pyparsing module handles some of the problems that are typically vexing when writing text parsers:
+ - extra or missing whitespace (the above program will also handle "Hello,World!", "Hello  ,  World  !", etc.)
+ - quoted strings
+ - embedded comments
+
+
+Getting Started -
+-----------------
+Visit the classes L{ParserElement} and L{ParseResults} to see the base classes that most other pyparsing
+classes inherit from. Use the docstrings for examples of how to:
+ - construct literal match expressions from L{Literal} and L{CaselessLiteral} classes
+ - construct character word-group expressions using the L{Word} class
+ - see how to create repetitive expressions using L{ZeroOrMore} and L{OneOrMore} classes
+ - use L{'+'}, L{'|'}, L{'^'}, and L{'&'} operators to combine simple expressions into more complex ones
+ - associate names with your parsed results using L{ParserElement.setResultsName}
+ - find some helpful expression short-cuts like L{delimitedList} and L{oneOf}
+ - find more useful common expressions in the L{pyparsing_common} namespace class
+"""
+
+__version__ = "2.2.1"
+__versionTime__ = "18 Sep 2018 00:49 UTC"
+__author__ = "Paul McGuire "
+
+import string
+from weakref import ref as wkref
+import copy
+import sys
+import warnings
+import re
+import sre_constants
+import collections
+import pprint
+import traceback
+import types
+from datetime import datetime
+
+try:
+    from _thread import RLock
+except ImportError:
+    from threading import RLock
+
+try:
+    # Python 3
+    from collections.abc import Iterable
+    from collections.abc import MutableMapping
+except ImportError:
+    # Python 2.7
+    from collections import Iterable
+    from collections import MutableMapping
+
+try:
+    from collections import OrderedDict as _OrderedDict
+except ImportError:
+    try:
+        from ordereddict import OrderedDict as _OrderedDict
+    except ImportError:
+        _OrderedDict = None
+
+#~ sys.stderr.write( "testing pyparsing module, version %s, %s\n" % (__version__,__versionTime__ ) )
+
+__all__ = [
+'And', 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 'Empty',
+'FollowedBy', 'Forward', 'GoToColumn', 'Group', 'Keyword', 'LineEnd', 'LineStart', 'Literal',
+'MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or',
+'ParseBaseException', 'ParseElementEnhance', 'ParseException', 'ParseExpression', 'ParseFatalException',
+'ParseResults', 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException',
+'Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter', 
+'White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore',
+'alphanums', 'alphas', 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col',
+'commaSeparatedList', 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString',
+'dblSlashComment', 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'hexnums',
+'htmlComment', 'javaStyleComment', 'line', 'lineEnd', 'lineStart', 'lineno',
+'makeHTMLTags', 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral',
+'nestedExpr', 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables',
+'punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity', 
+'replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd',
+'stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute',
+'indentedBlock', 'originalTextFor', 'ungroup', 'infixNotation','locatedExpr', 'withClass',
+'CloseMatch', 'tokenMap', 'pyparsing_common',
+]
+
+system_version = tuple(sys.version_info)[:3]
+PY_3 = system_version[0] == 3
+if PY_3:
+    _MAX_INT = sys.maxsize
+    basestring = str
+    unichr = chr
+    _ustr = str
+
+    # build list of single arg builtins, that can be used as parse actions
+    singleArgBuiltins = [sum, len, sorted, reversed, list, tuple, set, any, all, min, max]
+
+else:
+    _MAX_INT = sys.maxint
+    range = xrange
+
+    def _ustr(obj):
+        """Drop-in replacement for str(obj) that tries to be Unicode friendly. It first tries
+           str(obj). If that fails with a UnicodeEncodeError, then it tries unicode(obj). It
+           then < returns the unicode object | encodes it with the default encoding | ... >.
+        """
+        if isinstance(obj,unicode):
+            return obj
+
+        try:
+            # If this works, then _ustr(obj) has the same behaviour as str(obj), so
+            # it won't break any existing code.
+            return str(obj)
+
+        except UnicodeEncodeError:
+            # Else encode it
+            ret = unicode(obj).encode(sys.getdefaultencoding(), 'xmlcharrefreplace')
+            xmlcharref = Regex(r'&#\d+;')
+            xmlcharref.setParseAction(lambda t: '\\u' + hex(int(t[0][2:-1]))[2:])
+            return xmlcharref.transformString(ret)
+
+    # build list of single arg builtins, tolerant of Python version, that can be used as parse actions
+    singleArgBuiltins = []
+    import __builtin__
+    for fname in "sum len sorted reversed list tuple set any all min max".split():
+        try:
+            singleArgBuiltins.append(getattr(__builtin__,fname))
+        except AttributeError:
+            continue
+            
+_generatorType = type((y for y in range(1)))
+ 
+def _xml_escape(data):
+    """Escape &, <, >, ", ', etc. in a string of data."""
+
+    # ampersand must be replaced first
+    from_symbols = '&><"\''
+    to_symbols = ('&'+s+';' for s in "amp gt lt quot apos".split())
+    for from_,to_ in zip(from_symbols, to_symbols):
+        data = data.replace(from_, to_)
+    return data
+
+class _Constants(object):
+    pass
+
+alphas     = string.ascii_uppercase + string.ascii_lowercase
+nums       = "0123456789"
+hexnums    = nums + "ABCDEFabcdef"
+alphanums  = alphas + nums
+_bslash    = chr(92)
+printables = "".join(c for c in string.printable if c not in string.whitespace)
+
+class ParseBaseException(Exception):
+    """base exception class for all parsing runtime exceptions"""
+    # Performance tuning: we construct a *lot* of these, so keep this
+    # constructor as small and fast as possible
+    def __init__( self, pstr, loc=0, msg=None, elem=None ):
+        self.loc = loc
+        if msg is None:
+            self.msg = pstr
+            self.pstr = ""
+        else:
+            self.msg = msg
+            self.pstr = pstr
+        self.parserElement = elem
+        self.args = (pstr, loc, msg)
+
+    @classmethod
+    def _from_exception(cls, pe):
+        """
+        internal factory method to simplify creating one type of ParseException 
+        from another - avoids having __init__ signature conflicts among subclasses
+        """
+        return cls(pe.pstr, pe.loc, pe.msg, pe.parserElement)
+
+    def __getattr__( self, aname ):
+        """supported attributes by name are:
+            - lineno - returns the line number of the exception text
+            - col - returns the column number of the exception text
+            - line - returns the line containing the exception text
+        """
+        if( aname == "lineno" ):
+            return lineno( self.loc, self.pstr )
+        elif( aname in ("col", "column") ):
+            return col( self.loc, self.pstr )
+        elif( aname == "line" ):
+            return line( self.loc, self.pstr )
+        else:
+            raise AttributeError(aname)
+
+    def __str__( self ):
+        return "%s (at char %d), (line:%d, col:%d)" % \
+                ( self.msg, self.loc, self.lineno, self.column )
+    def __repr__( self ):
+        return _ustr(self)
+    def markInputline( self, markerString = ">!<" ):
+        """Extracts the exception line from the input string, and marks
+           the location of the exception with a special symbol.
+        """
+        line_str = self.line
+        line_column = self.column - 1
+        if markerString:
+            line_str = "".join((line_str[:line_column],
+                                markerString, line_str[line_column:]))
+        return line_str.strip()
+    def __dir__(self):
+        return "lineno col line".split() + dir(type(self))
+
+class ParseException(ParseBaseException):
+    """
+    Exception thrown when parse expressions don't match class;
+    supported attributes by name are:
+     - lineno - returns the line number of the exception text
+     - col - returns the column number of the exception text
+     - line - returns the line containing the exception text
+        
+    Example::
+        try:
+            Word(nums).setName("integer").parseString("ABC")
+        except ParseException as pe:
+            print(pe)
+            print("column: {}".format(pe.col))
+            
+    prints::
+       Expected integer (at char 0), (line:1, col:1)
+        column: 1
+    """
+    pass
+
+class ParseFatalException(ParseBaseException):
+    """user-throwable exception thrown when inconsistent parse content
+       is found; stops all parsing immediately"""
+    pass
+
+class ParseSyntaxException(ParseFatalException):
+    """just like L{ParseFatalException}, but thrown internally when an
+       L{ErrorStop} ('-' operator) indicates that parsing is to stop 
+       immediately because an unbacktrackable syntax error has been found"""
+    pass
+
+#~ class ReparseException(ParseBaseException):
+    #~ """Experimental class - parse actions can raise this exception to cause
+       #~ pyparsing to reparse the input string:
+        #~ - with a modified input string, and/or
+        #~ - with a modified start location
+       #~ Set the values of the ReparseException in the constructor, and raise the
+       #~ exception in a parse action to cause pyparsing to use the new string/location.
+       #~ Setting the values as None causes no change to be made.
+       #~ """
+    #~ def __init_( self, newstring, restartLoc ):
+        #~ self.newParseText = newstring
+        #~ self.reparseLoc = restartLoc
+
+class RecursiveGrammarException(Exception):
+    """exception thrown by L{ParserElement.validate} if the grammar could be improperly recursive"""
+    def __init__( self, parseElementList ):
+        self.parseElementTrace = parseElementList
+
+    def __str__( self ):
+        return "RecursiveGrammarException: %s" % self.parseElementTrace
+
+class _ParseResultsWithOffset(object):
+    def __init__(self,p1,p2):
+        self.tup = (p1,p2)
+    def __getitem__(self,i):
+        return self.tup[i]
+    def __repr__(self):
+        return repr(self.tup[0])
+    def setOffset(self,i):
+        self.tup = (self.tup[0],i)
+
+class ParseResults(object):
+    """
+    Structured parse results, to provide multiple means of access to the parsed data:
+       - as a list (C{len(results)})
+       - by list index (C{results[0], results[1]}, etc.)
+       - by attribute (C{results.} - see L{ParserElement.setResultsName})
+
+    Example::
+        integer = Word(nums)
+        date_str = (integer.setResultsName("year") + '/' 
+                        + integer.setResultsName("month") + '/' 
+                        + integer.setResultsName("day"))
+        # equivalent form:
+        # date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+        # parseString returns a ParseResults object
+        result = date_str.parseString("1999/12/31")
+
+        def test(s, fn=repr):
+            print("%s -> %s" % (s, fn(eval(s))))
+        test("list(result)")
+        test("result[0]")
+        test("result['month']")
+        test("result.day")
+        test("'month' in result")
+        test("'minutes' in result")
+        test("result.dump()", str)
+    prints::
+        list(result) -> ['1999', '/', '12', '/', '31']
+        result[0] -> '1999'
+        result['month'] -> '12'
+        result.day -> '31'
+        'month' in result -> True
+        'minutes' in result -> False
+        result.dump() -> ['1999', '/', '12', '/', '31']
+        - day: 31
+        - month: 12
+        - year: 1999
+    """
+    def __new__(cls, toklist=None, name=None, asList=True, modal=True ):
+        if isinstance(toklist, cls):
+            return toklist
+        retobj = object.__new__(cls)
+        retobj.__doinit = True
+        return retobj
+
+    # Performance tuning: we construct a *lot* of these, so keep this
+    # constructor as small and fast as possible
+    def __init__( self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance ):
+        if self.__doinit:
+            self.__doinit = False
+            self.__name = None
+            self.__parent = None
+            self.__accumNames = {}
+            self.__asList = asList
+            self.__modal = modal
+            if toklist is None:
+                toklist = []
+            if isinstance(toklist, list):
+                self.__toklist = toklist[:]
+            elif isinstance(toklist, _generatorType):
+                self.__toklist = list(toklist)
+            else:
+                self.__toklist = [toklist]
+            self.__tokdict = dict()
+
+        if name is not None and name:
+            if not modal:
+                self.__accumNames[name] = 0
+            if isinstance(name,int):
+                name = _ustr(name) # will always return a str, but use _ustr for consistency
+            self.__name = name
+            if not (isinstance(toklist, (type(None), basestring, list)) and toklist in (None,'',[])):
+                if isinstance(toklist,basestring):
+                    toklist = [ toklist ]
+                if asList:
+                    if isinstance(toklist,ParseResults):
+                        self[name] = _ParseResultsWithOffset(toklist.copy(),0)
+                    else:
+                        self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]),0)
+                    self[name].__name = name
+                else:
+                    try:
+                        self[name] = toklist[0]
+                    except (KeyError,TypeError,IndexError):
+                        self[name] = toklist
+
+    def __getitem__( self, i ):
+        if isinstance( i, (int,slice) ):
+            return self.__toklist[i]
+        else:
+            if i not in self.__accumNames:
+                return self.__tokdict[i][-1][0]
+            else:
+                return ParseResults([ v[0] for v in self.__tokdict[i] ])
+
+    def __setitem__( self, k, v, isinstance=isinstance ):
+        if isinstance(v,_ParseResultsWithOffset):
+            self.__tokdict[k] = self.__tokdict.get(k,list()) + [v]
+            sub = v[0]
+        elif isinstance(k,(int,slice)):
+            self.__toklist[k] = v
+            sub = v
+        else:
+            self.__tokdict[k] = self.__tokdict.get(k,list()) + [_ParseResultsWithOffset(v,0)]
+            sub = v
+        if isinstance(sub,ParseResults):
+            sub.__parent = wkref(self)
+
+    def __delitem__( self, i ):
+        if isinstance(i,(int,slice)):
+            mylen = len( self.__toklist )
+            del self.__toklist[i]
+
+            # convert int to slice
+            if isinstance(i, int):
+                if i < 0:
+                    i += mylen
+                i = slice(i, i+1)
+            # get removed indices
+            removed = list(range(*i.indices(mylen)))
+            removed.reverse()
+            # fixup indices in token dictionary
+            for name,occurrences in self.__tokdict.items():
+                for j in removed:
+                    for k, (value, position) in enumerate(occurrences):
+                        occurrences[k] = _ParseResultsWithOffset(value, position - (position > j))
+        else:
+            del self.__tokdict[i]
+
+    def __contains__( self, k ):
+        return k in self.__tokdict
+
+    def __len__( self ): return len( self.__toklist )
+    def __bool__(self): return ( not not self.__toklist )
+    __nonzero__ = __bool__
+    def __iter__( self ): return iter( self.__toklist )
+    def __reversed__( self ): return iter( self.__toklist[::-1] )
+    def _iterkeys( self ):
+        if hasattr(self.__tokdict, "iterkeys"):
+            return self.__tokdict.iterkeys()
+        else:
+            return iter(self.__tokdict)
+
+    def _itervalues( self ):
+        return (self[k] for k in self._iterkeys())
+            
+    def _iteritems( self ):
+        return ((k, self[k]) for k in self._iterkeys())
+
+    if PY_3:
+        keys = _iterkeys       
+        """Returns an iterator of all named result keys (Python 3.x only)."""
+
+        values = _itervalues
+        """Returns an iterator of all named result values (Python 3.x only)."""
+
+        items = _iteritems
+        """Returns an iterator of all named result key-value tuples (Python 3.x only)."""
+
+    else:
+        iterkeys = _iterkeys
+        """Returns an iterator of all named result keys (Python 2.x only)."""
+
+        itervalues = _itervalues
+        """Returns an iterator of all named result values (Python 2.x only)."""
+
+        iteritems = _iteritems
+        """Returns an iterator of all named result key-value tuples (Python 2.x only)."""
+
+        def keys( self ):
+            """Returns all named result keys (as a list in Python 2.x, as an iterator in Python 3.x)."""
+            return list(self.iterkeys())
+
+        def values( self ):
+            """Returns all named result values (as a list in Python 2.x, as an iterator in Python 3.x)."""
+            return list(self.itervalues())
+                
+        def items( self ):
+            """Returns all named result key-values (as a list of tuples in Python 2.x, as an iterator in Python 3.x)."""
+            return list(self.iteritems())
+
+    def haskeys( self ):
+        """Since keys() returns an iterator, this method is helpful in bypassing
+           code that looks for the existence of any defined results names."""
+        return bool(self.__tokdict)
+        
+    def pop( self, *args, **kwargs):
+        """
+        Removes and returns item at specified index (default=C{last}).
+        Supports both C{list} and C{dict} semantics for C{pop()}. If passed no
+        argument or an integer argument, it will use C{list} semantics
+        and pop tokens from the list of parsed tokens. If passed a 
+        non-integer argument (most likely a string), it will use C{dict}
+        semantics and pop the corresponding value from any defined 
+        results names. A second default return value argument is 
+        supported, just as in C{dict.pop()}.
+
+        Example::
+            def remove_first(tokens):
+                tokens.pop(0)
+            print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
+            print(OneOrMore(Word(nums)).addParseAction(remove_first).parseString("0 123 321")) # -> ['123', '321']
+
+            label = Word(alphas)
+            patt = label("LABEL") + OneOrMore(Word(nums))
+            print(patt.parseString("AAB 123 321").dump())
+
+            # Use pop() in a parse action to remove named result (note that corresponding value is not
+            # removed from list form of results)
+            def remove_LABEL(tokens):
+                tokens.pop("LABEL")
+                return tokens
+            patt.addParseAction(remove_LABEL)
+            print(patt.parseString("AAB 123 321").dump())
+        prints::
+            ['AAB', '123', '321']
+            - LABEL: AAB
+
+            ['AAB', '123', '321']
+        """
+        if not args:
+            args = [-1]
+        for k,v in kwargs.items():
+            if k == 'default':
+                args = (args[0], v)
+            else:
+                raise TypeError("pop() got an unexpected keyword argument '%s'" % k)
+        if (isinstance(args[0], int) or 
+                        len(args) == 1 or 
+                        args[0] in self):
+            index = args[0]
+            ret = self[index]
+            del self[index]
+            return ret
+        else:
+            defaultvalue = args[1]
+            return defaultvalue
+
+    def get(self, key, defaultValue=None):
+        """
+        Returns named result matching the given key, or if there is no
+        such name, then returns the given C{defaultValue} or C{None} if no
+        C{defaultValue} is specified.
+
+        Similar to C{dict.get()}.
+        
+        Example::
+            integer = Word(nums)
+            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")           
+
+            result = date_str.parseString("1999/12/31")
+            print(result.get("year")) # -> '1999'
+            print(result.get("hour", "not specified")) # -> 'not specified'
+            print(result.get("hour")) # -> None
+        """
+        if key in self:
+            return self[key]
+        else:
+            return defaultValue
+
+    def insert( self, index, insStr ):
+        """
+        Inserts new element at location index in the list of parsed tokens.
+        
+        Similar to C{list.insert()}.
+
+        Example::
+            print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
+
+            # use a parse action to insert the parse location in the front of the parsed results
+            def insert_locn(locn, tokens):
+                tokens.insert(0, locn)
+            print(OneOrMore(Word(nums)).addParseAction(insert_locn).parseString("0 123 321")) # -> [0, '0', '123', '321']
+        """
+        self.__toklist.insert(index, insStr)
+        # fixup indices in token dictionary
+        for name,occurrences in self.__tokdict.items():
+            for k, (value, position) in enumerate(occurrences):
+                occurrences[k] = _ParseResultsWithOffset(value, position + (position > index))
+
+    def append( self, item ):
+        """
+        Add single element to end of ParseResults list of elements.
+
+        Example::
+            print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
+            
+            # use a parse action to compute the sum of the parsed integers, and add it to the end
+            def append_sum(tokens):
+                tokens.append(sum(map(int, tokens)))
+            print(OneOrMore(Word(nums)).addParseAction(append_sum).parseString("0 123 321")) # -> ['0', '123', '321', 444]
+        """
+        self.__toklist.append(item)
+
+    def extend( self, itemseq ):
+        """
+        Add sequence of elements to end of ParseResults list of elements.
+
+        Example::
+            patt = OneOrMore(Word(alphas))
+            
+            # use a parse action to append the reverse of the matched strings, to make a palindrome
+            def make_palindrome(tokens):
+                tokens.extend(reversed([t[::-1] for t in tokens]))
+                return ''.join(tokens)
+            print(patt.addParseAction(make_palindrome).parseString("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl'
+        """
+        if isinstance(itemseq, ParseResults):
+            self += itemseq
+        else:
+            self.__toklist.extend(itemseq)
+
+    def clear( self ):
+        """
+        Clear all elements and results names.
+        """
+        del self.__toklist[:]
+        self.__tokdict.clear()
+
+    def __getattr__( self, name ):
+        try:
+            return self[name]
+        except KeyError:
+            return ""
+            
+        if name in self.__tokdict:
+            if name not in self.__accumNames:
+                return self.__tokdict[name][-1][0]
+            else:
+                return ParseResults([ v[0] for v in self.__tokdict[name] ])
+        else:
+            return ""
+
+    def __add__( self, other ):
+        ret = self.copy()
+        ret += other
+        return ret
+
+    def __iadd__( self, other ):
+        if other.__tokdict:
+            offset = len(self.__toklist)
+            addoffset = lambda a: offset if a<0 else a+offset
+            otheritems = other.__tokdict.items()
+            otherdictitems = [(k, _ParseResultsWithOffset(v[0],addoffset(v[1])) )
+                                for (k,vlist) in otheritems for v in vlist]
+            for k,v in otherdictitems:
+                self[k] = v
+                if isinstance(v[0],ParseResults):
+                    v[0].__parent = wkref(self)
+            
+        self.__toklist += other.__toklist
+        self.__accumNames.update( other.__accumNames )
+        return self
+
+    def __radd__(self, other):
+        if isinstance(other,int) and other == 0:
+            # useful for merging many ParseResults using sum() builtin
+            return self.copy()
+        else:
+            # this may raise a TypeError - so be it
+            return other + self
+        
+    def __repr__( self ):
+        return "(%s, %s)" % ( repr( self.__toklist ), repr( self.__tokdict ) )
+
+    def __str__( self ):
+        return '[' + ', '.join(_ustr(i) if isinstance(i, ParseResults) else repr(i) for i in self.__toklist) + ']'
+
+    def _asStringList( self, sep='' ):
+        out = []
+        for item in self.__toklist:
+            if out and sep:
+                out.append(sep)
+            if isinstance( item, ParseResults ):
+                out += item._asStringList()
+            else:
+                out.append( _ustr(item) )
+        return out
+
+    def asList( self ):
+        """
+        Returns the parse results as a nested list of matching tokens, all converted to strings.
+
+        Example::
+            patt = OneOrMore(Word(alphas))
+            result = patt.parseString("sldkj lsdkj sldkj")
+            # even though the result prints in string-like form, it is actually a pyparsing ParseResults
+            print(type(result), result) # ->  ['sldkj', 'lsdkj', 'sldkj']
+            
+            # Use asList() to create an actual list
+            result_list = result.asList()
+            print(type(result_list), result_list) # ->  ['sldkj', 'lsdkj', 'sldkj']
+        """
+        return [res.asList() if isinstance(res,ParseResults) else res for res in self.__toklist]
+
+    def asDict( self ):
+        """
+        Returns the named parse results as a nested dictionary.
+
+        Example::
+            integer = Word(nums)
+            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+            
+            result = date_str.parseString('12/31/1999')
+            print(type(result), repr(result)) # ->  (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]})
+            
+            result_dict = result.asDict()
+            print(type(result_dict), repr(result_dict)) # ->  {'day': '1999', 'year': '12', 'month': '31'}
+
+            # even though a ParseResults supports dict-like access, sometime you just need to have a dict
+            import json
+            print(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializable
+            print(json.dumps(result.asDict())) # -> {"month": "31", "day": "1999", "year": "12"}
+        """
+        if PY_3:
+            item_fn = self.items
+        else:
+            item_fn = self.iteritems
+            
+        def toItem(obj):
+            if isinstance(obj, ParseResults):
+                if obj.haskeys():
+                    return obj.asDict()
+                else:
+                    return [toItem(v) for v in obj]
+            else:
+                return obj
+                
+        return dict((k,toItem(v)) for k,v in item_fn())
+
+    def copy( self ):
+        """
+        Returns a new copy of a C{ParseResults} object.
+        """
+        ret = ParseResults( self.__toklist )
+        ret.__tokdict = self.__tokdict.copy()
+        ret.__parent = self.__parent
+        ret.__accumNames.update( self.__accumNames )
+        ret.__name = self.__name
+        return ret
+
+    def asXML( self, doctag=None, namedItemsOnly=False, indent="", formatted=True ):
+        """
+        (Deprecated) Returns the parse results as XML. Tags are created for tokens and lists that have defined results names.
+        """
+        nl = "\n"
+        out = []
+        namedItems = dict((v[1],k) for (k,vlist) in self.__tokdict.items()
+                                                            for v in vlist)
+        nextLevelIndent = indent + "  "
+
+        # collapse out indents if formatting is not desired
+        if not formatted:
+            indent = ""
+            nextLevelIndent = ""
+            nl = ""
+
+        selfTag = None
+        if doctag is not None:
+            selfTag = doctag
+        else:
+            if self.__name:
+                selfTag = self.__name
+
+        if not selfTag:
+            if namedItemsOnly:
+                return ""
+            else:
+                selfTag = "ITEM"
+
+        out += [ nl, indent, "<", selfTag, ">" ]
+
+        for i,res in enumerate(self.__toklist):
+            if isinstance(res,ParseResults):
+                if i in namedItems:
+                    out += [ res.asXML(namedItems[i],
+                                        namedItemsOnly and doctag is None,
+                                        nextLevelIndent,
+                                        formatted)]
+                else:
+                    out += [ res.asXML(None,
+                                        namedItemsOnly and doctag is None,
+                                        nextLevelIndent,
+                                        formatted)]
+            else:
+                # individual token, see if there is a name for it
+                resTag = None
+                if i in namedItems:
+                    resTag = namedItems[i]
+                if not resTag:
+                    if namedItemsOnly:
+                        continue
+                    else:
+                        resTag = "ITEM"
+                xmlBodyText = _xml_escape(_ustr(res))
+                out += [ nl, nextLevelIndent, "<", resTag, ">",
+                                                xmlBodyText,
+                                                "" ]
+
+        out += [ nl, indent, "" ]
+        return "".join(out)
+
+    def __lookup(self,sub):
+        for k,vlist in self.__tokdict.items():
+            for v,loc in vlist:
+                if sub is v:
+                    return k
+        return None
+
+    def getName(self):
+        r"""
+        Returns the results name for this token expression. Useful when several 
+        different expressions might match at a particular location.
+
+        Example::
+            integer = Word(nums)
+            ssn_expr = Regex(r"\d\d\d-\d\d-\d\d\d\d")
+            house_number_expr = Suppress('#') + Word(nums, alphanums)
+            user_data = (Group(house_number_expr)("house_number") 
+                        | Group(ssn_expr)("ssn")
+                        | Group(integer)("age"))
+            user_info = OneOrMore(user_data)
+            
+            result = user_info.parseString("22 111-22-3333 #221B")
+            for item in result:
+                print(item.getName(), ':', item[0])
+        prints::
+            age : 22
+            ssn : 111-22-3333
+            house_number : 221B
+        """
+        if self.__name:
+            return self.__name
+        elif self.__parent:
+            par = self.__parent()
+            if par:
+                return par.__lookup(self)
+            else:
+                return None
+        elif (len(self) == 1 and
+               len(self.__tokdict) == 1 and
+               next(iter(self.__tokdict.values()))[0][1] in (0,-1)):
+            return next(iter(self.__tokdict.keys()))
+        else:
+            return None
+
+    def dump(self, indent='', depth=0, full=True):
+        """
+        Diagnostic method for listing out the contents of a C{ParseResults}.
+        Accepts an optional C{indent} argument so that this string can be embedded
+        in a nested display of other data.
+
+        Example::
+            integer = Word(nums)
+            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+            
+            result = date_str.parseString('12/31/1999')
+            print(result.dump())
+        prints::
+            ['12', '/', '31', '/', '1999']
+            - day: 1999
+            - month: 31
+            - year: 12
+        """
+        out = []
+        NL = '\n'
+        out.append( indent+_ustr(self.asList()) )
+        if full:
+            if self.haskeys():
+                items = sorted((str(k), v) for k,v in self.items())
+                for k,v in items:
+                    if out:
+                        out.append(NL)
+                    out.append( "%s%s- %s: " % (indent,('  '*depth), k) )
+                    if isinstance(v,ParseResults):
+                        if v:
+                            out.append( v.dump(indent,depth+1) )
+                        else:
+                            out.append(_ustr(v))
+                    else:
+                        out.append(repr(v))
+            elif any(isinstance(vv,ParseResults) for vv in self):
+                v = self
+                for i,vv in enumerate(v):
+                    if isinstance(vv,ParseResults):
+                        out.append("\n%s%s[%d]:\n%s%s%s" % (indent,('  '*(depth)),i,indent,('  '*(depth+1)),vv.dump(indent,depth+1) ))
+                    else:
+                        out.append("\n%s%s[%d]:\n%s%s%s" % (indent,('  '*(depth)),i,indent,('  '*(depth+1)),_ustr(vv)))
+            
+        return "".join(out)
+
+    def pprint(self, *args, **kwargs):
+        """
+        Pretty-printer for parsed results as a list, using the C{pprint} module.
+        Accepts additional positional or keyword args as defined for the 
+        C{pprint.pprint} method. (U{http://docs.python.org/3/library/pprint.html#pprint.pprint})
+
+        Example::
+            ident = Word(alphas, alphanums)
+            num = Word(nums)
+            func = Forward()
+            term = ident | num | Group('(' + func + ')')
+            func <<= ident + Group(Optional(delimitedList(term)))
+            result = func.parseString("fna a,b,(fnb c,d,200),100")
+            result.pprint(width=40)
+        prints::
+            ['fna',
+             ['a',
+              'b',
+              ['(', 'fnb', ['c', 'd', '200'], ')'],
+              '100']]
+        """
+        pprint.pprint(self.asList(), *args, **kwargs)
+
+    # add support for pickle protocol
+    def __getstate__(self):
+        return ( self.__toklist,
+                 ( self.__tokdict.copy(),
+                   self.__parent is not None and self.__parent() or None,
+                   self.__accumNames,
+                   self.__name ) )
+
+    def __setstate__(self,state):
+        self.__toklist = state[0]
+        (self.__tokdict,
+         par,
+         inAccumNames,
+         self.__name) = state[1]
+        self.__accumNames = {}
+        self.__accumNames.update(inAccumNames)
+        if par is not None:
+            self.__parent = wkref(par)
+        else:
+            self.__parent = None
+
+    def __getnewargs__(self):
+        return self.__toklist, self.__name, self.__asList, self.__modal
+
+    def __dir__(self):
+        return (dir(type(self)) + list(self.keys()))
+
+MutableMapping.register(ParseResults)
+
+def col (loc,strg):
+    """Returns current column within a string, counting newlines as line separators.
+   The first column is number 1.
+
+   Note: the default parsing behavior is to expand tabs in the input string
+   before starting the parsing process.  See L{I{ParserElement.parseString}} for more information
+   on parsing strings containing C{}s, and suggested methods to maintain a
+   consistent view of the parsed string, the parse location, and line and column
+   positions within the parsed string.
+   """
+    s = strg
+    return 1 if 0} for more information
+   on parsing strings containing C{}s, and suggested methods to maintain a
+   consistent view of the parsed string, the parse location, and line and column
+   positions within the parsed string.
+   """
+    return strg.count("\n",0,loc) + 1
+
+def line( loc, strg ):
+    """Returns the line of text containing loc within a string, counting newlines as line separators.
+       """
+    lastCR = strg.rfind("\n", 0, loc)
+    nextCR = strg.find("\n", loc)
+    if nextCR >= 0:
+        return strg[lastCR+1:nextCR]
+    else:
+        return strg[lastCR+1:]
+
+def _defaultStartDebugAction( instring, loc, expr ):
+    print (("Match " + _ustr(expr) + " at loc " + _ustr(loc) + "(%d,%d)" % ( lineno(loc,instring), col(loc,instring) )))
+
+def _defaultSuccessDebugAction( instring, startloc, endloc, expr, toks ):
+    print ("Matched " + _ustr(expr) + " -> " + str(toks.asList()))
+
+def _defaultExceptionDebugAction( instring, loc, expr, exc ):
+    print ("Exception raised:" + _ustr(exc))
+
+def nullDebugAction(*args):
+    """'Do-nothing' debug action, to suppress debugging output during parsing."""
+    pass
+
+# Only works on Python 3.x - nonlocal is toxic to Python 2 installs
+#~ 'decorator to trim function calls to match the arity of the target'
+#~ def _trim_arity(func, maxargs=3):
+    #~ if func in singleArgBuiltins:
+        #~ return lambda s,l,t: func(t)
+    #~ limit = 0
+    #~ foundArity = False
+    #~ def wrapper(*args):
+        #~ nonlocal limit,foundArity
+        #~ while 1:
+            #~ try:
+                #~ ret = func(*args[limit:])
+                #~ foundArity = True
+                #~ return ret
+            #~ except TypeError:
+                #~ if limit == maxargs or foundArity:
+                    #~ raise
+                #~ limit += 1
+                #~ continue
+    #~ return wrapper
+
+# this version is Python 2.x-3.x cross-compatible
+'decorator to trim function calls to match the arity of the target'
+def _trim_arity(func, maxargs=2):
+    if func in singleArgBuiltins:
+        return lambda s,l,t: func(t)
+    limit = [0]
+    foundArity = [False]
+    
+    # traceback return data structure changed in Py3.5 - normalize back to plain tuples
+    if system_version[:2] >= (3,5):
+        def extract_stack(limit=0):
+            # special handling for Python 3.5.0 - extra deep call stack by 1
+            offset = -3 if system_version == (3,5,0) else -2
+            frame_summary = traceback.extract_stack(limit=-offset+limit-1)[offset]
+            return [frame_summary[:2]]
+        def extract_tb(tb, limit=0):
+            frames = traceback.extract_tb(tb, limit=limit)
+            frame_summary = frames[-1]
+            return [frame_summary[:2]]
+    else:
+        extract_stack = traceback.extract_stack
+        extract_tb = traceback.extract_tb
+    
+    # synthesize what would be returned by traceback.extract_stack at the call to 
+    # user's parse action 'func', so that we don't incur call penalty at parse time
+    
+    LINE_DIFF = 6
+    # IF ANY CODE CHANGES, EVEN JUST COMMENTS OR BLANK LINES, BETWEEN THE NEXT LINE AND 
+    # THE CALL TO FUNC INSIDE WRAPPER, LINE_DIFF MUST BE MODIFIED!!!!
+    this_line = extract_stack(limit=2)[-1]
+    pa_call_line_synth = (this_line[0], this_line[1]+LINE_DIFF)
+
+    def wrapper(*args):
+        while 1:
+            try:
+                ret = func(*args[limit[0]:])
+                foundArity[0] = True
+                return ret
+            except TypeError:
+                # re-raise TypeErrors if they did not come from our arity testing
+                if foundArity[0]:
+                    raise
+                else:
+                    try:
+                        tb = sys.exc_info()[-1]
+                        if not extract_tb(tb, limit=2)[-1][:2] == pa_call_line_synth:
+                            raise
+                    finally:
+                        del tb
+
+                if limit[0] <= maxargs:
+                    limit[0] += 1
+                    continue
+                raise
+
+    # copy func name to wrapper for sensible debug output
+    func_name = ""
+    try:
+        func_name = getattr(func, '__name__', 
+                            getattr(func, '__class__').__name__)
+    except Exception:
+        func_name = str(func)
+    wrapper.__name__ = func_name
+
+    return wrapper
+
+class ParserElement(object):
+    """Abstract base level parser element class."""
+    DEFAULT_WHITE_CHARS = " \n\t\r"
+    verbose_stacktrace = False
+
+    @staticmethod
+    def setDefaultWhitespaceChars( chars ):
+        r"""
+        Overrides the default whitespace chars
+
+        Example::
+            # default whitespace chars are space,  and newline
+            OneOrMore(Word(alphas)).parseString("abc def\nghi jkl")  # -> ['abc', 'def', 'ghi', 'jkl']
+            
+            # change to just treat newline as significant
+            ParserElement.setDefaultWhitespaceChars(" \t")
+            OneOrMore(Word(alphas)).parseString("abc def\nghi jkl")  # -> ['abc', 'def']
+        """
+        ParserElement.DEFAULT_WHITE_CHARS = chars
+
+    @staticmethod
+    def inlineLiteralsUsing(cls):
+        """
+        Set class to be used for inclusion of string literals into a parser.
+        
+        Example::
+            # default literal class used is Literal
+            integer = Word(nums)
+            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")           
+
+            date_str.parseString("1999/12/31")  # -> ['1999', '/', '12', '/', '31']
+
+
+            # change to Suppress
+            ParserElement.inlineLiteralsUsing(Suppress)
+            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")           
+
+            date_str.parseString("1999/12/31")  # -> ['1999', '12', '31']
+        """
+        ParserElement._literalStringClass = cls
+
+    def __init__( self, savelist=False ):
+        self.parseAction = list()
+        self.failAction = None
+        #~ self.name = ""  # don't define self.name, let subclasses try/except upcall
+        self.strRepr = None
+        self.resultsName = None
+        self.saveAsList = savelist
+        self.skipWhitespace = True
+        self.whiteChars = ParserElement.DEFAULT_WHITE_CHARS
+        self.copyDefaultWhiteChars = True
+        self.mayReturnEmpty = False # used when checking for left-recursion
+        self.keepTabs = False
+        self.ignoreExprs = list()
+        self.debug = False
+        self.streamlined = False
+        self.mayIndexError = True # used to optimize exception handling for subclasses that don't advance parse index
+        self.errmsg = ""
+        self.modalResults = True # used to mark results names as modal (report only last) or cumulative (list all)
+        self.debugActions = ( None, None, None ) #custom debug actions
+        self.re = None
+        self.callPreparse = True # used to avoid redundant calls to preParse
+        self.callDuringTry = False
+
+    def copy( self ):
+        """
+        Make a copy of this C{ParserElement}.  Useful for defining different parse actions
+        for the same parsing pattern, using copies of the original parse element.
+        
+        Example::
+            integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
+            integerK = integer.copy().addParseAction(lambda toks: toks[0]*1024) + Suppress("K")
+            integerM = integer.copy().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M")
+            
+            print(OneOrMore(integerK | integerM | integer).parseString("5K 100 640K 256M"))
+        prints::
+            [5120, 100, 655360, 268435456]
+        Equivalent form of C{expr.copy()} is just C{expr()}::
+            integerM = integer().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M")
+        """
+        cpy = copy.copy( self )
+        cpy.parseAction = self.parseAction[:]
+        cpy.ignoreExprs = self.ignoreExprs[:]
+        if self.copyDefaultWhiteChars:
+            cpy.whiteChars = ParserElement.DEFAULT_WHITE_CHARS
+        return cpy
+
+    def setName( self, name ):
+        """
+        Define name for this expression, makes debugging and exception messages clearer.
+        
+        Example::
+            Word(nums).parseString("ABC")  # -> Exception: Expected W:(0123...) (at char 0), (line:1, col:1)
+            Word(nums).setName("integer").parseString("ABC")  # -> Exception: Expected integer (at char 0), (line:1, col:1)
+        """
+        self.name = name
+        self.errmsg = "Expected " + self.name
+        if hasattr(self,"exception"):
+            self.exception.msg = self.errmsg
+        return self
+
+    def setResultsName( self, name, listAllMatches=False ):
+        """
+        Define name for referencing matching tokens as a nested attribute
+        of the returned parse results.
+        NOTE: this returns a *copy* of the original C{ParserElement} object;
+        this is so that the client can define a basic element, such as an
+        integer, and reference it in multiple places with different names.
+
+        You can also set results names using the abbreviated syntax,
+        C{expr("name")} in place of C{expr.setResultsName("name")} - 
+        see L{I{__call__}<__call__>}.
+
+        Example::
+            date_str = (integer.setResultsName("year") + '/' 
+                        + integer.setResultsName("month") + '/' 
+                        + integer.setResultsName("day"))
+
+            # equivalent form:
+            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+        """
+        newself = self.copy()
+        if name.endswith("*"):
+            name = name[:-1]
+            listAllMatches=True
+        newself.resultsName = name
+        newself.modalResults = not listAllMatches
+        return newself
+
+    def setBreak(self,breakFlag = True):
+        """Method to invoke the Python pdb debugger when this element is
+           about to be parsed. Set C{breakFlag} to True to enable, False to
+           disable.
+        """
+        if breakFlag:
+            _parseMethod = self._parse
+            def breaker(instring, loc, doActions=True, callPreParse=True):
+                import pdb
+                pdb.set_trace()
+                return _parseMethod( instring, loc, doActions, callPreParse )
+            breaker._originalParseMethod = _parseMethod
+            self._parse = breaker
+        else:
+            if hasattr(self._parse,"_originalParseMethod"):
+                self._parse = self._parse._originalParseMethod
+        return self
+
+    def setParseAction( self, *fns, **kwargs ):
+        """
+        Define one or more actions to perform when successfully matching parse element definition.
+        Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)},
+        C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where:
+         - s   = the original string being parsed (see note below)
+         - loc = the location of the matching substring
+         - toks = a list of the matched tokens, packaged as a C{L{ParseResults}} object
+        If the functions in fns modify the tokens, they can return them as the return
+        value from fn, and the modified list of tokens will replace the original.
+        Otherwise, fn does not need to return any value.
+
+        Optional keyword arguments:
+         - callDuringTry = (default=C{False}) indicate if parse action should be run during lookaheads and alternate testing
+
+        Note: the default parsing behavior is to expand tabs in the input string
+        before starting the parsing process.  See L{I{parseString}} for more information
+        on parsing strings containing C{}s, and suggested methods to maintain a
+        consistent view of the parsed string, the parse location, and line and column
+        positions within the parsed string.
+        
+        Example::
+            integer = Word(nums)
+            date_str = integer + '/' + integer + '/' + integer
+
+            date_str.parseString("1999/12/31")  # -> ['1999', '/', '12', '/', '31']
+
+            # use parse action to convert to ints at parse time
+            integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
+            date_str = integer + '/' + integer + '/' + integer
+
+            # note that integer fields are now ints, not strings
+            date_str.parseString("1999/12/31")  # -> [1999, '/', 12, '/', 31]
+        """
+        self.parseAction = list(map(_trim_arity, list(fns)))
+        self.callDuringTry = kwargs.get("callDuringTry", False)
+        return self
+
+    def addParseAction( self, *fns, **kwargs ):
+        """
+        Add one or more parse actions to expression's list of parse actions. See L{I{setParseAction}}.
+        
+        See examples in L{I{copy}}.
+        """
+        self.parseAction += list(map(_trim_arity, list(fns)))
+        self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False)
+        return self
+
+    def addCondition(self, *fns, **kwargs):
+        """Add a boolean predicate function to expression's list of parse actions. See 
+        L{I{setParseAction}} for function call signatures. Unlike C{setParseAction}, 
+        functions passed to C{addCondition} need to return boolean success/fail of the condition.
+
+        Optional keyword arguments:
+         - message = define a custom message to be used in the raised exception
+         - fatal   = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise ParseException
+         
+        Example::
+            integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
+            year_int = integer.copy()
+            year_int.addCondition(lambda toks: toks[0] >= 2000, message="Only support years 2000 and later")
+            date_str = year_int + '/' + integer + '/' + integer
+
+            result = date_str.parseString("1999/12/31")  # -> Exception: Only support years 2000 and later (at char 0), (line:1, col:1)
+        """
+        msg = kwargs.get("message", "failed user-defined condition")
+        exc_type = ParseFatalException if kwargs.get("fatal", False) else ParseException
+        for fn in fns:
+            def pa(s,l,t):
+                if not bool(_trim_arity(fn)(s,l,t)):
+                    raise exc_type(s,l,msg)
+            self.parseAction.append(pa)
+        self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False)
+        return self
+
+    def setFailAction( self, fn ):
+        """Define action to perform if parsing fails at this expression.
+           Fail acton fn is a callable function that takes the arguments
+           C{fn(s,loc,expr,err)} where:
+            - s = string being parsed
+            - loc = location where expression match was attempted and failed
+            - expr = the parse expression that failed
+            - err = the exception thrown
+           The function returns no value.  It may throw C{L{ParseFatalException}}
+           if it is desired to stop parsing immediately."""
+        self.failAction = fn
+        return self
+
+    def _skipIgnorables( self, instring, loc ):
+        exprsFound = True
+        while exprsFound:
+            exprsFound = False
+            for e in self.ignoreExprs:
+                try:
+                    while 1:
+                        loc,dummy = e._parse( instring, loc )
+                        exprsFound = True
+                except ParseException:
+                    pass
+        return loc
+
+    def preParse( self, instring, loc ):
+        if self.ignoreExprs:
+            loc = self._skipIgnorables( instring, loc )
+
+        if self.skipWhitespace:
+            wt = self.whiteChars
+            instrlen = len(instring)
+            while loc < instrlen and instring[loc] in wt:
+                loc += 1
+
+        return loc
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        return loc, []
+
+    def postParse( self, instring, loc, tokenlist ):
+        return tokenlist
+
+    #~ @profile
+    def _parseNoCache( self, instring, loc, doActions=True, callPreParse=True ):
+        debugging = ( self.debug ) #and doActions )
+
+        if debugging or self.failAction:
+            #~ print ("Match",self,"at loc",loc,"(%d,%d)" % ( lineno(loc,instring), col(loc,instring) ))
+            if (self.debugActions[0] ):
+                self.debugActions[0]( instring, loc, self )
+            if callPreParse and self.callPreparse:
+                preloc = self.preParse( instring, loc )
+            else:
+                preloc = loc
+            tokensStart = preloc
+            try:
+                try:
+                    loc,tokens = self.parseImpl( instring, preloc, doActions )
+                except IndexError:
+                    raise ParseException( instring, len(instring), self.errmsg, self )
+            except ParseBaseException as err:
+                #~ print ("Exception raised:", err)
+                if self.debugActions[2]:
+                    self.debugActions[2]( instring, tokensStart, self, err )
+                if self.failAction:
+                    self.failAction( instring, tokensStart, self, err )
+                raise
+        else:
+            if callPreParse and self.callPreparse:
+                preloc = self.preParse( instring, loc )
+            else:
+                preloc = loc
+            tokensStart = preloc
+            if self.mayIndexError or preloc >= len(instring):
+                try:
+                    loc,tokens = self.parseImpl( instring, preloc, doActions )
+                except IndexError:
+                    raise ParseException( instring, len(instring), self.errmsg, self )
+            else:
+                loc,tokens = self.parseImpl( instring, preloc, doActions )
+
+        tokens = self.postParse( instring, loc, tokens )
+
+        retTokens = ParseResults( tokens, self.resultsName, asList=self.saveAsList, modal=self.modalResults )
+        if self.parseAction and (doActions or self.callDuringTry):
+            if debugging:
+                try:
+                    for fn in self.parseAction:
+                        tokens = fn( instring, tokensStart, retTokens )
+                        if tokens is not None:
+                            retTokens = ParseResults( tokens,
+                                                      self.resultsName,
+                                                      asList=self.saveAsList and isinstance(tokens,(ParseResults,list)),
+                                                      modal=self.modalResults )
+                except ParseBaseException as err:
+                    #~ print "Exception raised in user parse action:", err
+                    if (self.debugActions[2] ):
+                        self.debugActions[2]( instring, tokensStart, self, err )
+                    raise
+            else:
+                for fn in self.parseAction:
+                    tokens = fn( instring, tokensStart, retTokens )
+                    if tokens is not None:
+                        retTokens = ParseResults( tokens,
+                                                  self.resultsName,
+                                                  asList=self.saveAsList and isinstance(tokens,(ParseResults,list)),
+                                                  modal=self.modalResults )
+        if debugging:
+            #~ print ("Matched",self,"->",retTokens.asList())
+            if (self.debugActions[1] ):
+                self.debugActions[1]( instring, tokensStart, loc, self, retTokens )
+
+        return loc, retTokens
+
+    def tryParse( self, instring, loc ):
+        try:
+            return self._parse( instring, loc, doActions=False )[0]
+        except ParseFatalException:
+            raise ParseException( instring, loc, self.errmsg, self)
+    
+    def canParseNext(self, instring, loc):
+        try:
+            self.tryParse(instring, loc)
+        except (ParseException, IndexError):
+            return False
+        else:
+            return True
+
+    class _UnboundedCache(object):
+        def __init__(self):
+            cache = {}
+            self.not_in_cache = not_in_cache = object()
+
+            def get(self, key):
+                return cache.get(key, not_in_cache)
+
+            def set(self, key, value):
+                cache[key] = value
+
+            def clear(self):
+                cache.clear()
+                
+            def cache_len(self):
+                return len(cache)
+
+            self.get = types.MethodType(get, self)
+            self.set = types.MethodType(set, self)
+            self.clear = types.MethodType(clear, self)
+            self.__len__ = types.MethodType(cache_len, self)
+
+    if _OrderedDict is not None:
+        class _FifoCache(object):
+            def __init__(self, size):
+                self.not_in_cache = not_in_cache = object()
+
+                cache = _OrderedDict()
+
+                def get(self, key):
+                    return cache.get(key, not_in_cache)
+
+                def set(self, key, value):
+                    cache[key] = value
+                    while len(cache) > size:
+                        try:
+                            cache.popitem(False)
+                        except KeyError:
+                            pass
+
+                def clear(self):
+                    cache.clear()
+
+                def cache_len(self):
+                    return len(cache)
+
+                self.get = types.MethodType(get, self)
+                self.set = types.MethodType(set, self)
+                self.clear = types.MethodType(clear, self)
+                self.__len__ = types.MethodType(cache_len, self)
+
+    else:
+        class _FifoCache(object):
+            def __init__(self, size):
+                self.not_in_cache = not_in_cache = object()
+
+                cache = {}
+                key_fifo = collections.deque([], size)
+
+                def get(self, key):
+                    return cache.get(key, not_in_cache)
+
+                def set(self, key, value):
+                    cache[key] = value
+                    while len(key_fifo) > size:
+                        cache.pop(key_fifo.popleft(), None)
+                    key_fifo.append(key)
+
+                def clear(self):
+                    cache.clear()
+                    key_fifo.clear()
+
+                def cache_len(self):
+                    return len(cache)
+
+                self.get = types.MethodType(get, self)
+                self.set = types.MethodType(set, self)
+                self.clear = types.MethodType(clear, self)
+                self.__len__ = types.MethodType(cache_len, self)
+
+    # argument cache for optimizing repeated calls when backtracking through recursive expressions
+    packrat_cache = {} # this is set later by enabledPackrat(); this is here so that resetCache() doesn't fail
+    packrat_cache_lock = RLock()
+    packrat_cache_stats = [0, 0]
+
+    # this method gets repeatedly called during backtracking with the same arguments -
+    # we can cache these arguments and save ourselves the trouble of re-parsing the contained expression
+    def _parseCache( self, instring, loc, doActions=True, callPreParse=True ):
+        HIT, MISS = 0, 1
+        lookup = (self, instring, loc, callPreParse, doActions)
+        with ParserElement.packrat_cache_lock:
+            cache = ParserElement.packrat_cache
+            value = cache.get(lookup)
+            if value is cache.not_in_cache:
+                ParserElement.packrat_cache_stats[MISS] += 1
+                try:
+                    value = self._parseNoCache(instring, loc, doActions, callPreParse)
+                except ParseBaseException as pe:
+                    # cache a copy of the exception, without the traceback
+                    cache.set(lookup, pe.__class__(*pe.args))
+                    raise
+                else:
+                    cache.set(lookup, (value[0], value[1].copy()))
+                    return value
+            else:
+                ParserElement.packrat_cache_stats[HIT] += 1
+                if isinstance(value, Exception):
+                    raise value
+                return (value[0], value[1].copy())
+
+    _parse = _parseNoCache
+
+    @staticmethod
+    def resetCache():
+        ParserElement.packrat_cache.clear()
+        ParserElement.packrat_cache_stats[:] = [0] * len(ParserElement.packrat_cache_stats)
+
+    _packratEnabled = False
+    @staticmethod
+    def enablePackrat(cache_size_limit=128):
+        """Enables "packrat" parsing, which adds memoizing to the parsing logic.
+           Repeated parse attempts at the same string location (which happens
+           often in many complex grammars) can immediately return a cached value,
+           instead of re-executing parsing/validating code.  Memoizing is done of
+           both valid results and parsing exceptions.
+           
+           Parameters:
+            - cache_size_limit - (default=C{128}) - if an integer value is provided
+              will limit the size of the packrat cache; if None is passed, then
+              the cache size will be unbounded; if 0 is passed, the cache will
+              be effectively disabled.
+            
+           This speedup may break existing programs that use parse actions that
+           have side-effects.  For this reason, packrat parsing is disabled when
+           you first import pyparsing.  To activate the packrat feature, your
+           program must call the class method C{ParserElement.enablePackrat()}.  If
+           your program uses C{psyco} to "compile as you go", you must call
+           C{enablePackrat} before calling C{psyco.full()}.  If you do not do this,
+           Python will crash.  For best results, call C{enablePackrat()} immediately
+           after importing pyparsing.
+           
+           Example::
+               import pyparsing
+               pyparsing.ParserElement.enablePackrat()
+        """
+        if not ParserElement._packratEnabled:
+            ParserElement._packratEnabled = True
+            if cache_size_limit is None:
+                ParserElement.packrat_cache = ParserElement._UnboundedCache()
+            else:
+                ParserElement.packrat_cache = ParserElement._FifoCache(cache_size_limit)
+            ParserElement._parse = ParserElement._parseCache
+
+    def parseString( self, instring, parseAll=False ):
+        """
+        Execute the parse expression with the given string.
+        This is the main interface to the client code, once the complete
+        expression has been built.
+
+        If you want the grammar to require that the entire input string be
+        successfully parsed, then set C{parseAll} to True (equivalent to ending
+        the grammar with C{L{StringEnd()}}).
+
+        Note: C{parseString} implicitly calls C{expandtabs()} on the input string,
+        in order to report proper column numbers in parse actions.
+        If the input string contains tabs and
+        the grammar uses parse actions that use the C{loc} argument to index into the
+        string being parsed, you can ensure you have a consistent view of the input
+        string by:
+         - calling C{parseWithTabs} on your grammar before calling C{parseString}
+           (see L{I{parseWithTabs}})
+         - define your parse action using the full C{(s,loc,toks)} signature, and
+           reference the input string using the parse action's C{s} argument
+         - explicitly expand the tabs in your input string before calling
+           C{parseString}
+        
+        Example::
+            Word('a').parseString('aaaaabaaa')  # -> ['aaaaa']
+            Word('a').parseString('aaaaabaaa', parseAll=True)  # -> Exception: Expected end of text
+        """
+        ParserElement.resetCache()
+        if not self.streamlined:
+            self.streamline()
+            #~ self.saveAsList = True
+        for e in self.ignoreExprs:
+            e.streamline()
+        if not self.keepTabs:
+            instring = instring.expandtabs()
+        try:
+            loc, tokens = self._parse( instring, 0 )
+            if parseAll:
+                loc = self.preParse( instring, loc )
+                se = Empty() + StringEnd()
+                se._parse( instring, loc )
+        except ParseBaseException as exc:
+            if ParserElement.verbose_stacktrace:
+                raise
+            else:
+                # catch and re-raise exception from here, clears out pyparsing internal stack trace
+                raise exc
+        else:
+            return tokens
+
+    def scanString( self, instring, maxMatches=_MAX_INT, overlap=False ):
+        """
+        Scan the input string for expression matches.  Each match will return the
+        matching tokens, start location, and end location.  May be called with optional
+        C{maxMatches} argument, to clip scanning after 'n' matches are found.  If
+        C{overlap} is specified, then overlapping matches will be reported.
+
+        Note that the start and end locations are reported relative to the string
+        being parsed.  See L{I{parseString}} for more information on parsing
+        strings with embedded tabs.
+
+        Example::
+            source = "sldjf123lsdjjkf345sldkjf879lkjsfd987"
+            print(source)
+            for tokens,start,end in Word(alphas).scanString(source):
+                print(' '*start + '^'*(end-start))
+                print(' '*start + tokens[0])
+        
+        prints::
+        
+            sldjf123lsdjjkf345sldkjf879lkjsfd987
+            ^^^^^
+            sldjf
+                    ^^^^^^^
+                    lsdjjkf
+                              ^^^^^^
+                              sldkjf
+                                       ^^^^^^
+                                       lkjsfd
+        """
+        if not self.streamlined:
+            self.streamline()
+        for e in self.ignoreExprs:
+            e.streamline()
+
+        if not self.keepTabs:
+            instring = _ustr(instring).expandtabs()
+        instrlen = len(instring)
+        loc = 0
+        preparseFn = self.preParse
+        parseFn = self._parse
+        ParserElement.resetCache()
+        matches = 0
+        try:
+            while loc <= instrlen and matches < maxMatches:
+                try:
+                    preloc = preparseFn( instring, loc )
+                    nextLoc,tokens = parseFn( instring, preloc, callPreParse=False )
+                except ParseException:
+                    loc = preloc+1
+                else:
+                    if nextLoc > loc:
+                        matches += 1
+                        yield tokens, preloc, nextLoc
+                        if overlap:
+                            nextloc = preparseFn( instring, loc )
+                            if nextloc > loc:
+                                loc = nextLoc
+                            else:
+                                loc += 1
+                        else:
+                            loc = nextLoc
+                    else:
+                        loc = preloc+1
+        except ParseBaseException as exc:
+            if ParserElement.verbose_stacktrace:
+                raise
+            else:
+                # catch and re-raise exception from here, clears out pyparsing internal stack trace
+                raise exc
+
+    def transformString( self, instring ):
+        """
+        Extension to C{L{scanString}}, to modify matching text with modified tokens that may
+        be returned from a parse action.  To use C{transformString}, define a grammar and
+        attach a parse action to it that modifies the returned token list.
+        Invoking C{transformString()} on a target string will then scan for matches,
+        and replace the matched text patterns according to the logic in the parse
+        action.  C{transformString()} returns the resulting transformed string.
+        
+        Example::
+            wd = Word(alphas)
+            wd.setParseAction(lambda toks: toks[0].title())
+            
+            print(wd.transformString("now is the winter of our discontent made glorious summer by this sun of york."))
+        Prints::
+            Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York.
+        """
+        out = []
+        lastE = 0
+        # force preservation of s, to minimize unwanted transformation of string, and to
+        # keep string locs straight between transformString and scanString
+        self.keepTabs = True
+        try:
+            for t,s,e in self.scanString( instring ):
+                out.append( instring[lastE:s] )
+                if t:
+                    if isinstance(t,ParseResults):
+                        out += t.asList()
+                    elif isinstance(t,list):
+                        out += t
+                    else:
+                        out.append(t)
+                lastE = e
+            out.append(instring[lastE:])
+            out = [o for o in out if o]
+            return "".join(map(_ustr,_flatten(out)))
+        except ParseBaseException as exc:
+            if ParserElement.verbose_stacktrace:
+                raise
+            else:
+                # catch and re-raise exception from here, clears out pyparsing internal stack trace
+                raise exc
+
+    def searchString( self, instring, maxMatches=_MAX_INT ):
+        """
+        Another extension to C{L{scanString}}, simplifying the access to the tokens found
+        to match the given parse expression.  May be called with optional
+        C{maxMatches} argument, to clip searching after 'n' matches are found.
+        
+        Example::
+            # a capitalized word starts with an uppercase letter, followed by zero or more lowercase letters
+            cap_word = Word(alphas.upper(), alphas.lower())
+            
+            print(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity"))
+
+            # the sum() builtin can be used to merge results into a single ParseResults object
+            print(sum(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity")))
+        prints::
+            [['More'], ['Iron'], ['Lead'], ['Gold'], ['I'], ['Electricity']]
+            ['More', 'Iron', 'Lead', 'Gold', 'I', 'Electricity']
+        """
+        try:
+            return ParseResults([ t for t,s,e in self.scanString( instring, maxMatches ) ])
+        except ParseBaseException as exc:
+            if ParserElement.verbose_stacktrace:
+                raise
+            else:
+                # catch and re-raise exception from here, clears out pyparsing internal stack trace
+                raise exc
+
+    def split(self, instring, maxsplit=_MAX_INT, includeSeparators=False):
+        """
+        Generator method to split a string using the given expression as a separator.
+        May be called with optional C{maxsplit} argument, to limit the number of splits;
+        and the optional C{includeSeparators} argument (default=C{False}), if the separating
+        matching text should be included in the split results.
+        
+        Example::        
+            punc = oneOf(list(".,;:/-!?"))
+            print(list(punc.split("This, this?, this sentence, is badly punctuated!")))
+        prints::
+            ['This', ' this', '', ' this sentence', ' is badly punctuated', '']
+        """
+        splits = 0
+        last = 0
+        for t,s,e in self.scanString(instring, maxMatches=maxsplit):
+            yield instring[last:s]
+            if includeSeparators:
+                yield t[0]
+            last = e
+        yield instring[last:]
+
+    def __add__(self, other ):
+        """
+        Implementation of + operator - returns C{L{And}}. Adding strings to a ParserElement
+        converts them to L{Literal}s by default.
+        
+        Example::
+            greet = Word(alphas) + "," + Word(alphas) + "!"
+            hello = "Hello, World!"
+            print (hello, "->", greet.parseString(hello))
+        Prints::
+            Hello, World! -> ['Hello', ',', 'World', '!']
+        """
+        if isinstance( other, basestring ):
+            other = ParserElement._literalStringClass( other )
+        if not isinstance( other, ParserElement ):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                    SyntaxWarning, stacklevel=2)
+            return None
+        return And( [ self, other ] )
+
+    def __radd__(self, other ):
+        """
+        Implementation of + operator when left operand is not a C{L{ParserElement}}
+        """
+        if isinstance( other, basestring ):
+            other = ParserElement._literalStringClass( other )
+        if not isinstance( other, ParserElement ):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                    SyntaxWarning, stacklevel=2)
+            return None
+        return other + self
+
+    def __sub__(self, other):
+        """
+        Implementation of - operator, returns C{L{And}} with error stop
+        """
+        if isinstance( other, basestring ):
+            other = ParserElement._literalStringClass( other )
+        if not isinstance( other, ParserElement ):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                    SyntaxWarning, stacklevel=2)
+            return None
+        return self + And._ErrorStop() + other
+
+    def __rsub__(self, other ):
+        """
+        Implementation of - operator when left operand is not a C{L{ParserElement}}
+        """
+        if isinstance( other, basestring ):
+            other = ParserElement._literalStringClass( other )
+        if not isinstance( other, ParserElement ):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                    SyntaxWarning, stacklevel=2)
+            return None
+        return other - self
+
+    def __mul__(self,other):
+        """
+        Implementation of * operator, allows use of C{expr * 3} in place of
+        C{expr + expr + expr}.  Expressions may also me multiplied by a 2-integer
+        tuple, similar to C{{min,max}} multipliers in regular expressions.  Tuples
+        may also include C{None} as in:
+         - C{expr*(n,None)} or C{expr*(n,)} is equivalent
+              to C{expr*n + L{ZeroOrMore}(expr)}
+              (read as "at least n instances of C{expr}")
+         - C{expr*(None,n)} is equivalent to C{expr*(0,n)}
+              (read as "0 to n instances of C{expr}")
+         - C{expr*(None,None)} is equivalent to C{L{ZeroOrMore}(expr)}
+         - C{expr*(1,None)} is equivalent to C{L{OneOrMore}(expr)}
+
+        Note that C{expr*(None,n)} does not raise an exception if
+        more than n exprs exist in the input stream; that is,
+        C{expr*(None,n)} does not enforce a maximum number of expr
+        occurrences.  If this behavior is desired, then write
+        C{expr*(None,n) + ~expr}
+        """
+        if isinstance(other,int):
+            minElements, optElements = other,0
+        elif isinstance(other,tuple):
+            other = (other + (None, None))[:2]
+            if other[0] is None:
+                other = (0, other[1])
+            if isinstance(other[0],int) and other[1] is None:
+                if other[0] == 0:
+                    return ZeroOrMore(self)
+                if other[0] == 1:
+                    return OneOrMore(self)
+                else:
+                    return self*other[0] + ZeroOrMore(self)
+            elif isinstance(other[0],int) and isinstance(other[1],int):
+                minElements, optElements = other
+                optElements -= minElements
+            else:
+                raise TypeError("cannot multiply 'ParserElement' and ('%s','%s') objects", type(other[0]),type(other[1]))
+        else:
+            raise TypeError("cannot multiply 'ParserElement' and '%s' objects", type(other))
+
+        if minElements < 0:
+            raise ValueError("cannot multiply ParserElement by negative value")
+        if optElements < 0:
+            raise ValueError("second tuple value must be greater or equal to first tuple value")
+        if minElements == optElements == 0:
+            raise ValueError("cannot multiply ParserElement by 0 or (0,0)")
+
+        if (optElements):
+            def makeOptionalList(n):
+                if n>1:
+                    return Optional(self + makeOptionalList(n-1))
+                else:
+                    return Optional(self)
+            if minElements:
+                if minElements == 1:
+                    ret = self + makeOptionalList(optElements)
+                else:
+                    ret = And([self]*minElements) + makeOptionalList(optElements)
+            else:
+                ret = makeOptionalList(optElements)
+        else:
+            if minElements == 1:
+                ret = self
+            else:
+                ret = And([self]*minElements)
+        return ret
+
+    def __rmul__(self, other):
+        return self.__mul__(other)
+
+    def __or__(self, other ):
+        """
+        Implementation of | operator - returns C{L{MatchFirst}}
+        """
+        if isinstance( other, basestring ):
+            other = ParserElement._literalStringClass( other )
+        if not isinstance( other, ParserElement ):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                    SyntaxWarning, stacklevel=2)
+            return None
+        return MatchFirst( [ self, other ] )
+
+    def __ror__(self, other ):
+        """
+        Implementation of | operator when left operand is not a C{L{ParserElement}}
+        """
+        if isinstance( other, basestring ):
+            other = ParserElement._literalStringClass( other )
+        if not isinstance( other, ParserElement ):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                    SyntaxWarning, stacklevel=2)
+            return None
+        return other | self
+
+    def __xor__(self, other ):
+        """
+        Implementation of ^ operator - returns C{L{Or}}
+        """
+        if isinstance( other, basestring ):
+            other = ParserElement._literalStringClass( other )
+        if not isinstance( other, ParserElement ):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                    SyntaxWarning, stacklevel=2)
+            return None
+        return Or( [ self, other ] )
+
+    def __rxor__(self, other ):
+        """
+        Implementation of ^ operator when left operand is not a C{L{ParserElement}}
+        """
+        if isinstance( other, basestring ):
+            other = ParserElement._literalStringClass( other )
+        if not isinstance( other, ParserElement ):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                    SyntaxWarning, stacklevel=2)
+            return None
+        return other ^ self
+
+    def __and__(self, other ):
+        """
+        Implementation of & operator - returns C{L{Each}}
+        """
+        if isinstance( other, basestring ):
+            other = ParserElement._literalStringClass( other )
+        if not isinstance( other, ParserElement ):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                    SyntaxWarning, stacklevel=2)
+            return None
+        return Each( [ self, other ] )
+
+    def __rand__(self, other ):
+        """
+        Implementation of & operator when left operand is not a C{L{ParserElement}}
+        """
+        if isinstance( other, basestring ):
+            other = ParserElement._literalStringClass( other )
+        if not isinstance( other, ParserElement ):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                    SyntaxWarning, stacklevel=2)
+            return None
+        return other & self
+
+    def __invert__( self ):
+        """
+        Implementation of ~ operator - returns C{L{NotAny}}
+        """
+        return NotAny( self )
+
+    def __call__(self, name=None):
+        """
+        Shortcut for C{L{setResultsName}}, with C{listAllMatches=False}.
+        
+        If C{name} is given with a trailing C{'*'} character, then C{listAllMatches} will be
+        passed as C{True}.
+           
+        If C{name} is omitted, same as calling C{L{copy}}.
+
+        Example::
+            # these are equivalent
+            userdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno")
+            userdata = Word(alphas)("name") + Word(nums+"-")("socsecno")             
+        """
+        if name is not None:
+            return self.setResultsName(name)
+        else:
+            return self.copy()
+
+    def suppress( self ):
+        """
+        Suppresses the output of this C{ParserElement}; useful to keep punctuation from
+        cluttering up returned output.
+        """
+        return Suppress( self )
+
+    def leaveWhitespace( self ):
+        """
+        Disables the skipping of whitespace before matching the characters in the
+        C{ParserElement}'s defined pattern.  This is normally only used internally by
+        the pyparsing module, but may be needed in some whitespace-sensitive grammars.
+        """
+        self.skipWhitespace = False
+        return self
+
+    def setWhitespaceChars( self, chars ):
+        """
+        Overrides the default whitespace chars
+        """
+        self.skipWhitespace = True
+        self.whiteChars = chars
+        self.copyDefaultWhiteChars = False
+        return self
+
+    def parseWithTabs( self ):
+        """
+        Overrides default behavior to expand C{}s to spaces before parsing the input string.
+        Must be called before C{parseString} when the input grammar contains elements that
+        match C{} characters.
+        """
+        self.keepTabs = True
+        return self
+
+    def ignore( self, other ):
+        """
+        Define expression to be ignored (e.g., comments) while doing pattern
+        matching; may be called repeatedly, to define multiple comment or other
+        ignorable patterns.
+        
+        Example::
+            patt = OneOrMore(Word(alphas))
+            patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj']
+            
+            patt.ignore(cStyleComment)
+            patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj', 'lskjd']
+        """
+        if isinstance(other, basestring):
+            other = Suppress(other)
+
+        if isinstance( other, Suppress ):
+            if other not in self.ignoreExprs:
+                self.ignoreExprs.append(other)
+        else:
+            self.ignoreExprs.append( Suppress( other.copy() ) )
+        return self
+
+    def setDebugActions( self, startAction, successAction, exceptionAction ):
+        """
+        Enable display of debugging messages while doing pattern matching.
+        """
+        self.debugActions = (startAction or _defaultStartDebugAction,
+                             successAction or _defaultSuccessDebugAction,
+                             exceptionAction or _defaultExceptionDebugAction)
+        self.debug = True
+        return self
+
+    def setDebug( self, flag=True ):
+        """
+        Enable display of debugging messages while doing pattern matching.
+        Set C{flag} to True to enable, False to disable.
+
+        Example::
+            wd = Word(alphas).setName("alphaword")
+            integer = Word(nums).setName("numword")
+            term = wd | integer
+            
+            # turn on debugging for wd
+            wd.setDebug()
+
+            OneOrMore(term).parseString("abc 123 xyz 890")
+        
+        prints::
+            Match alphaword at loc 0(1,1)
+            Matched alphaword -> ['abc']
+            Match alphaword at loc 3(1,4)
+            Exception raised:Expected alphaword (at char 4), (line:1, col:5)
+            Match alphaword at loc 7(1,8)
+            Matched alphaword -> ['xyz']
+            Match alphaword at loc 11(1,12)
+            Exception raised:Expected alphaword (at char 12), (line:1, col:13)
+            Match alphaword at loc 15(1,16)
+            Exception raised:Expected alphaword (at char 15), (line:1, col:16)
+
+        The output shown is that produced by the default debug actions - custom debug actions can be
+        specified using L{setDebugActions}. Prior to attempting
+        to match the C{wd} expression, the debugging message C{"Match  at loc (,)"}
+        is shown. Then if the parse succeeds, a C{"Matched"} message is shown, or an C{"Exception raised"}
+        message is shown. Also note the use of L{setName} to assign a human-readable name to the expression,
+        which makes debugging and exception messages easier to understand - for instance, the default
+        name created for the C{Word} expression without calling C{setName} is C{"W:(ABCD...)"}.
+        """
+        if flag:
+            self.setDebugActions( _defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction )
+        else:
+            self.debug = False
+        return self
+
+    def __str__( self ):
+        return self.name
+
+    def __repr__( self ):
+        return _ustr(self)
+
+    def streamline( self ):
+        self.streamlined = True
+        self.strRepr = None
+        return self
+
+    def checkRecursion( self, parseElementList ):
+        pass
+
+    def validate( self, validateTrace=[] ):
+        """
+        Check defined expressions for valid structure, check for infinite recursive definitions.
+        """
+        self.checkRecursion( [] )
+
+    def parseFile( self, file_or_filename, parseAll=False ):
+        """
+        Execute the parse expression on the given file or filename.
+        If a filename is specified (instead of a file object),
+        the entire file is opened, read, and closed before parsing.
+        """
+        try:
+            file_contents = file_or_filename.read()
+        except AttributeError:
+            with open(file_or_filename, "r") as f:
+                file_contents = f.read()
+        try:
+            return self.parseString(file_contents, parseAll)
+        except ParseBaseException as exc:
+            if ParserElement.verbose_stacktrace:
+                raise
+            else:
+                # catch and re-raise exception from here, clears out pyparsing internal stack trace
+                raise exc
+
+    def __eq__(self,other):
+        if isinstance(other, ParserElement):
+            return self is other or vars(self) == vars(other)
+        elif isinstance(other, basestring):
+            return self.matches(other)
+        else:
+            return super(ParserElement,self)==other
+
+    def __ne__(self,other):
+        return not (self == other)
+
+    def __hash__(self):
+        return hash(id(self))
+
+    def __req__(self,other):
+        return self == other
+
+    def __rne__(self,other):
+        return not (self == other)
+
+    def matches(self, testString, parseAll=True):
+        """
+        Method for quick testing of a parser against a test string. Good for simple 
+        inline microtests of sub expressions while building up larger parser.
+           
+        Parameters:
+         - testString - to test against this expression for a match
+         - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests
+            
+        Example::
+            expr = Word(nums)
+            assert expr.matches("100")
+        """
+        try:
+            self.parseString(_ustr(testString), parseAll=parseAll)
+            return True
+        except ParseBaseException:
+            return False
+                
+    def runTests(self, tests, parseAll=True, comment='#', fullDump=True, printResults=True, failureTests=False):
+        """
+        Execute the parse expression on a series of test strings, showing each
+        test, the parsed results or where the parse failed. Quick and easy way to
+        run a parse expression against a list of sample strings.
+           
+        Parameters:
+         - tests - a list of separate test strings, or a multiline string of test strings
+         - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests           
+         - comment - (default=C{'#'}) - expression for indicating embedded comments in the test 
+              string; pass None to disable comment filtering
+         - fullDump - (default=C{True}) - dump results as list followed by results names in nested outline;
+              if False, only dump nested list
+         - printResults - (default=C{True}) prints test output to stdout
+         - failureTests - (default=C{False}) indicates if these tests are expected to fail parsing
+
+        Returns: a (success, results) tuple, where success indicates that all tests succeeded
+        (or failed if C{failureTests} is True), and the results contain a list of lines of each 
+        test's output
+        
+        Example::
+            number_expr = pyparsing_common.number.copy()
+
+            result = number_expr.runTests('''
+                # unsigned integer
+                100
+                # negative integer
+                -100
+                # float with scientific notation
+                6.02e23
+                # integer with scientific notation
+                1e-12
+                ''')
+            print("Success" if result[0] else "Failed!")
+
+            result = number_expr.runTests('''
+                # stray character
+                100Z
+                # missing leading digit before '.'
+                -.100
+                # too many '.'
+                3.14.159
+                ''', failureTests=True)
+            print("Success" if result[0] else "Failed!")
+        prints::
+            # unsigned integer
+            100
+            [100]
+
+            # negative integer
+            -100
+            [-100]
+
+            # float with scientific notation
+            6.02e23
+            [6.02e+23]
+
+            # integer with scientific notation
+            1e-12
+            [1e-12]
+
+            Success
+            
+            # stray character
+            100Z
+               ^
+            FAIL: Expected end of text (at char 3), (line:1, col:4)
+
+            # missing leading digit before '.'
+            -.100
+            ^
+            FAIL: Expected {real number with scientific notation | real number | signed integer} (at char 0), (line:1, col:1)
+
+            # too many '.'
+            3.14.159
+                ^
+            FAIL: Expected end of text (at char 4), (line:1, col:5)
+
+            Success
+
+        Each test string must be on a single line. If you want to test a string that spans multiple
+        lines, create a test like this::
+
+            expr.runTest(r"this is a test\\n of strings that spans \\n 3 lines")
+        
+        (Note that this is a raw string literal, you must include the leading 'r'.)
+        """
+        if isinstance(tests, basestring):
+            tests = list(map(str.strip, tests.rstrip().splitlines()))
+        if isinstance(comment, basestring):
+            comment = Literal(comment)
+        allResults = []
+        comments = []
+        success = True
+        for t in tests:
+            if comment is not None and comment.matches(t, False) or comments and not t:
+                comments.append(t)
+                continue
+            if not t:
+                continue
+            out = ['\n'.join(comments), t]
+            comments = []
+            try:
+                t = t.replace(r'\n','\n')
+                result = self.parseString(t, parseAll=parseAll)
+                out.append(result.dump(full=fullDump))
+                success = success and not failureTests
+            except ParseBaseException as pe:
+                fatal = "(FATAL)" if isinstance(pe, ParseFatalException) else ""
+                if '\n' in t:
+                    out.append(line(pe.loc, t))
+                    out.append(' '*(col(pe.loc,t)-1) + '^' + fatal)
+                else:
+                    out.append(' '*pe.loc + '^' + fatal)
+                out.append("FAIL: " + str(pe))
+                success = success and failureTests
+                result = pe
+            except Exception as exc:
+                out.append("FAIL-EXCEPTION: " + str(exc))
+                success = success and failureTests
+                result = exc
+
+            if printResults:
+                if fullDump:
+                    out.append('')
+                print('\n'.join(out))
+
+            allResults.append((t, result))
+        
+        return success, allResults
+
+        
+class Token(ParserElement):
+    """
+    Abstract C{ParserElement} subclass, for defining atomic matching patterns.
+    """
+    def __init__( self ):
+        super(Token,self).__init__( savelist=False )
+
+
+class Empty(Token):
+    """
+    An empty token, will always match.
+    """
+    def __init__( self ):
+        super(Empty,self).__init__()
+        self.name = "Empty"
+        self.mayReturnEmpty = True
+        self.mayIndexError = False
+
+
+class NoMatch(Token):
+    """
+    A token that will never match.
+    """
+    def __init__( self ):
+        super(NoMatch,self).__init__()
+        self.name = "NoMatch"
+        self.mayReturnEmpty = True
+        self.mayIndexError = False
+        self.errmsg = "Unmatchable token"
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        raise ParseException(instring, loc, self.errmsg, self)
+
+
+class Literal(Token):
+    """
+    Token to exactly match a specified string.
+    
+    Example::
+        Literal('blah').parseString('blah')  # -> ['blah']
+        Literal('blah').parseString('blahfooblah')  # -> ['blah']
+        Literal('blah').parseString('bla')  # -> Exception: Expected "blah"
+    
+    For case-insensitive matching, use L{CaselessLiteral}.
+    
+    For keyword matching (force word break before and after the matched string),
+    use L{Keyword} or L{CaselessKeyword}.
+    """
+    def __init__( self, matchString ):
+        super(Literal,self).__init__()
+        self.match = matchString
+        self.matchLen = len(matchString)
+        try:
+            self.firstMatchChar = matchString[0]
+        except IndexError:
+            warnings.warn("null string passed to Literal; use Empty() instead",
+                            SyntaxWarning, stacklevel=2)
+            self.__class__ = Empty
+        self.name = '"%s"' % _ustr(self.match)
+        self.errmsg = "Expected " + self.name
+        self.mayReturnEmpty = False
+        self.mayIndexError = False
+
+    # Performance tuning: this routine gets called a *lot*
+    # if this is a single character match string  and the first character matches,
+    # short-circuit as quickly as possible, and avoid calling startswith
+    #~ @profile
+    def parseImpl( self, instring, loc, doActions=True ):
+        if (instring[loc] == self.firstMatchChar and
+            (self.matchLen==1 or instring.startswith(self.match,loc)) ):
+            return loc+self.matchLen, self.match
+        raise ParseException(instring, loc, self.errmsg, self)
+_L = Literal
+ParserElement._literalStringClass = Literal
+
+class Keyword(Token):
+    """
+    Token to exactly match a specified string as a keyword, that is, it must be
+    immediately followed by a non-keyword character.  Compare with C{L{Literal}}:
+     - C{Literal("if")} will match the leading C{'if'} in C{'ifAndOnlyIf'}.
+     - C{Keyword("if")} will not; it will only match the leading C{'if'} in C{'if x=1'}, or C{'if(y==2)'}
+    Accepts two optional constructor arguments in addition to the keyword string:
+     - C{identChars} is a string of characters that would be valid identifier characters,
+          defaulting to all alphanumerics + "_" and "$"
+     - C{caseless} allows case-insensitive matching, default is C{False}.
+       
+    Example::
+        Keyword("start").parseString("start")  # -> ['start']
+        Keyword("start").parseString("starting")  # -> Exception
+
+    For case-insensitive matching, use L{CaselessKeyword}.
+    """
+    DEFAULT_KEYWORD_CHARS = alphanums+"_$"
+
+    def __init__( self, matchString, identChars=None, caseless=False ):
+        super(Keyword,self).__init__()
+        if identChars is None:
+            identChars = Keyword.DEFAULT_KEYWORD_CHARS
+        self.match = matchString
+        self.matchLen = len(matchString)
+        try:
+            self.firstMatchChar = matchString[0]
+        except IndexError:
+            warnings.warn("null string passed to Keyword; use Empty() instead",
+                            SyntaxWarning, stacklevel=2)
+        self.name = '"%s"' % self.match
+        self.errmsg = "Expected " + self.name
+        self.mayReturnEmpty = False
+        self.mayIndexError = False
+        self.caseless = caseless
+        if caseless:
+            self.caselessmatch = matchString.upper()
+            identChars = identChars.upper()
+        self.identChars = set(identChars)
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        if self.caseless:
+            if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and
+                 (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) and
+                 (loc == 0 or instring[loc-1].upper() not in self.identChars) ):
+                return loc+self.matchLen, self.match
+        else:
+            if (instring[loc] == self.firstMatchChar and
+                (self.matchLen==1 or instring.startswith(self.match,loc)) and
+                (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen] not in self.identChars) and
+                (loc == 0 or instring[loc-1] not in self.identChars) ):
+                return loc+self.matchLen, self.match
+        raise ParseException(instring, loc, self.errmsg, self)
+
+    def copy(self):
+        c = super(Keyword,self).copy()
+        c.identChars = Keyword.DEFAULT_KEYWORD_CHARS
+        return c
+
+    @staticmethod
+    def setDefaultKeywordChars( chars ):
+        """Overrides the default Keyword chars
+        """
+        Keyword.DEFAULT_KEYWORD_CHARS = chars
+
+class CaselessLiteral(Literal):
+    """
+    Token to match a specified string, ignoring case of letters.
+    Note: the matched results will always be in the case of the given
+    match string, NOT the case of the input text.
+
+    Example::
+        OneOrMore(CaselessLiteral("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD', 'CMD']
+        
+    (Contrast with example for L{CaselessKeyword}.)
+    """
+    def __init__( self, matchString ):
+        super(CaselessLiteral,self).__init__( matchString.upper() )
+        # Preserve the defining literal.
+        self.returnString = matchString
+        self.name = "'%s'" % self.returnString
+        self.errmsg = "Expected " + self.name
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        if instring[ loc:loc+self.matchLen ].upper() == self.match:
+            return loc+self.matchLen, self.returnString
+        raise ParseException(instring, loc, self.errmsg, self)
+
+class CaselessKeyword(Keyword):
+    """
+    Caseless version of L{Keyword}.
+
+    Example::
+        OneOrMore(CaselessKeyword("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD']
+        
+    (Contrast with example for L{CaselessLiteral}.)
+    """
+    def __init__( self, matchString, identChars=None ):
+        super(CaselessKeyword,self).__init__( matchString, identChars, caseless=True )
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and
+             (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) ):
+            return loc+self.matchLen, self.match
+        raise ParseException(instring, loc, self.errmsg, self)
+
+class CloseMatch(Token):
+    """
+    A variation on L{Literal} which matches "close" matches, that is, 
+    strings with at most 'n' mismatching characters. C{CloseMatch} takes parameters:
+     - C{match_string} - string to be matched
+     - C{maxMismatches} - (C{default=1}) maximum number of mismatches allowed to count as a match
+    
+    The results from a successful parse will contain the matched text from the input string and the following named results:
+     - C{mismatches} - a list of the positions within the match_string where mismatches were found
+     - C{original} - the original match_string used to compare against the input string
+    
+    If C{mismatches} is an empty list, then the match was an exact match.
+    
+    Example::
+        patt = CloseMatch("ATCATCGAATGGA")
+        patt.parseString("ATCATCGAAXGGA") # -> (['ATCATCGAAXGGA'], {'mismatches': [[9]], 'original': ['ATCATCGAATGGA']})
+        patt.parseString("ATCAXCGAAXGGA") # -> Exception: Expected 'ATCATCGAATGGA' (with up to 1 mismatches) (at char 0), (line:1, col:1)
+
+        # exact match
+        patt.parseString("ATCATCGAATGGA") # -> (['ATCATCGAATGGA'], {'mismatches': [[]], 'original': ['ATCATCGAATGGA']})
+
+        # close match allowing up to 2 mismatches
+        patt = CloseMatch("ATCATCGAATGGA", maxMismatches=2)
+        patt.parseString("ATCAXCGAAXGGA") # -> (['ATCAXCGAAXGGA'], {'mismatches': [[4, 9]], 'original': ['ATCATCGAATGGA']})
+    """
+    def __init__(self, match_string, maxMismatches=1):
+        super(CloseMatch,self).__init__()
+        self.name = match_string
+        self.match_string = match_string
+        self.maxMismatches = maxMismatches
+        self.errmsg = "Expected %r (with up to %d mismatches)" % (self.match_string, self.maxMismatches)
+        self.mayIndexError = False
+        self.mayReturnEmpty = False
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        start = loc
+        instrlen = len(instring)
+        maxloc = start + len(self.match_string)
+
+        if maxloc <= instrlen:
+            match_string = self.match_string
+            match_stringloc = 0
+            mismatches = []
+            maxMismatches = self.maxMismatches
+
+            for match_stringloc,s_m in enumerate(zip(instring[loc:maxloc], self.match_string)):
+                src,mat = s_m
+                if src != mat:
+                    mismatches.append(match_stringloc)
+                    if len(mismatches) > maxMismatches:
+                        break
+            else:
+                loc = match_stringloc + 1
+                results = ParseResults([instring[start:loc]])
+                results['original'] = self.match_string
+                results['mismatches'] = mismatches
+                return loc, results
+
+        raise ParseException(instring, loc, self.errmsg, self)
+
+
+class Word(Token):
+    """
+    Token for matching words composed of allowed character sets.
+    Defined with string containing all allowed initial characters,
+    an optional string containing allowed body characters (if omitted,
+    defaults to the initial character set), and an optional minimum,
+    maximum, and/or exact length.  The default value for C{min} is 1 (a
+    minimum value < 1 is not valid); the default values for C{max} and C{exact}
+    are 0, meaning no maximum or exact length restriction. An optional
+    C{excludeChars} parameter can list characters that might be found in 
+    the input C{bodyChars} string; useful to define a word of all printables
+    except for one or two characters, for instance.
+    
+    L{srange} is useful for defining custom character set strings for defining 
+    C{Word} expressions, using range notation from regular expression character sets.
+    
+    A common mistake is to use C{Word} to match a specific literal string, as in 
+    C{Word("Address")}. Remember that C{Word} uses the string argument to define
+    I{sets} of matchable characters. This expression would match "Add", "AAA",
+    "dAred", or any other word made up of the characters 'A', 'd', 'r', 'e', and 's'.
+    To match an exact literal string, use L{Literal} or L{Keyword}.
+
+    pyparsing includes helper strings for building Words:
+     - L{alphas}
+     - L{nums}
+     - L{alphanums}
+     - L{hexnums}
+     - L{alphas8bit} (alphabetic characters in ASCII range 128-255 - accented, tilded, umlauted, etc.)
+     - L{punc8bit} (non-alphabetic characters in ASCII range 128-255 - currency, symbols, superscripts, diacriticals, etc.)
+     - L{printables} (any non-whitespace character)
+
+    Example::
+        # a word composed of digits
+        integer = Word(nums) # equivalent to Word("0123456789") or Word(srange("0-9"))
+        
+        # a word with a leading capital, and zero or more lowercase
+        capital_word = Word(alphas.upper(), alphas.lower())
+
+        # hostnames are alphanumeric, with leading alpha, and '-'
+        hostname = Word(alphas, alphanums+'-')
+        
+        # roman numeral (not a strict parser, accepts invalid mix of characters)
+        roman = Word("IVXLCDM")
+        
+        # any string of non-whitespace characters, except for ','
+        csv_value = Word(printables, excludeChars=",")
+    """
+    def __init__( self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False, excludeChars=None ):
+        super(Word,self).__init__()
+        if excludeChars:
+            initChars = ''.join(c for c in initChars if c not in excludeChars)
+            if bodyChars:
+                bodyChars = ''.join(c for c in bodyChars if c not in excludeChars)
+        self.initCharsOrig = initChars
+        self.initChars = set(initChars)
+        if bodyChars :
+            self.bodyCharsOrig = bodyChars
+            self.bodyChars = set(bodyChars)
+        else:
+            self.bodyCharsOrig = initChars
+            self.bodyChars = set(initChars)
+
+        self.maxSpecified = max > 0
+
+        if min < 1:
+            raise ValueError("cannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permitted")
+
+        self.minLen = min
+
+        if max > 0:
+            self.maxLen = max
+        else:
+            self.maxLen = _MAX_INT
+
+        if exact > 0:
+            self.maxLen = exact
+            self.minLen = exact
+
+        self.name = _ustr(self)
+        self.errmsg = "Expected " + self.name
+        self.mayIndexError = False
+        self.asKeyword = asKeyword
+
+        if ' ' not in self.initCharsOrig+self.bodyCharsOrig and (min==1 and max==0 and exact==0):
+            if self.bodyCharsOrig == self.initCharsOrig:
+                self.reString = "[%s]+" % _escapeRegexRangeChars(self.initCharsOrig)
+            elif len(self.initCharsOrig) == 1:
+                self.reString = "%s[%s]*" % \
+                                      (re.escape(self.initCharsOrig),
+                                      _escapeRegexRangeChars(self.bodyCharsOrig),)
+            else:
+                self.reString = "[%s][%s]*" % \
+                                      (_escapeRegexRangeChars(self.initCharsOrig),
+                                      _escapeRegexRangeChars(self.bodyCharsOrig),)
+            if self.asKeyword:
+                self.reString = r"\b"+self.reString+r"\b"
+            try:
+                self.re = re.compile( self.reString )
+            except Exception:
+                self.re = None
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        if self.re:
+            result = self.re.match(instring,loc)
+            if not result:
+                raise ParseException(instring, loc, self.errmsg, self)
+
+            loc = result.end()
+            return loc, result.group()
+
+        if not(instring[ loc ] in self.initChars):
+            raise ParseException(instring, loc, self.errmsg, self)
+
+        start = loc
+        loc += 1
+        instrlen = len(instring)
+        bodychars = self.bodyChars
+        maxloc = start + self.maxLen
+        maxloc = min( maxloc, instrlen )
+        while loc < maxloc and instring[loc] in bodychars:
+            loc += 1
+
+        throwException = False
+        if loc - start < self.minLen:
+            throwException = True
+        if self.maxSpecified and loc < instrlen and instring[loc] in bodychars:
+            throwException = True
+        if self.asKeyword:
+            if (start>0 and instring[start-1] in bodychars) or (loc4:
+                    return s[:4]+"..."
+                else:
+                    return s
+
+            if ( self.initCharsOrig != self.bodyCharsOrig ):
+                self.strRepr = "W:(%s,%s)" % ( charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig) )
+            else:
+                self.strRepr = "W:(%s)" % charsAsStr(self.initCharsOrig)
+
+        return self.strRepr
+
+
+class Regex(Token):
+    r"""
+    Token for matching strings that match a given regular expression.
+    Defined with string specifying the regular expression in a form recognized by the inbuilt Python re module.
+    If the given regex contains named groups (defined using C{(?P...)}), these will be preserved as 
+    named parse results.
+
+    Example::
+        realnum = Regex(r"[+-]?\d+\.\d*")
+        date = Regex(r'(?P\d{4})-(?P\d\d?)-(?P\d\d?)')
+        # ref: http://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expression
+        roman = Regex(r"M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})")
+    """
+    compiledREtype = type(re.compile("[A-Z]"))
+    def __init__( self, pattern, flags=0):
+        """The parameters C{pattern} and C{flags} are passed to the C{re.compile()} function as-is. See the Python C{re} module for an explanation of the acceptable patterns and flags."""
+        super(Regex,self).__init__()
+
+        if isinstance(pattern, basestring):
+            if not pattern:
+                warnings.warn("null string passed to Regex; use Empty() instead",
+                        SyntaxWarning, stacklevel=2)
+
+            self.pattern = pattern
+            self.flags = flags
+
+            try:
+                self.re = re.compile(self.pattern, self.flags)
+                self.reString = self.pattern
+            except sre_constants.error:
+                warnings.warn("invalid pattern (%s) passed to Regex" % pattern,
+                    SyntaxWarning, stacklevel=2)
+                raise
+
+        elif isinstance(pattern, Regex.compiledREtype):
+            self.re = pattern
+            self.pattern = \
+            self.reString = str(pattern)
+            self.flags = flags
+            
+        else:
+            raise ValueError("Regex may only be constructed with a string or a compiled RE object")
+
+        self.name = _ustr(self)
+        self.errmsg = "Expected " + self.name
+        self.mayIndexError = False
+        self.mayReturnEmpty = True
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        result = self.re.match(instring,loc)
+        if not result:
+            raise ParseException(instring, loc, self.errmsg, self)
+
+        loc = result.end()
+        d = result.groupdict()
+        ret = ParseResults(result.group())
+        if d:
+            for k in d:
+                ret[k] = d[k]
+        return loc,ret
+
+    def __str__( self ):
+        try:
+            return super(Regex,self).__str__()
+        except Exception:
+            pass
+
+        if self.strRepr is None:
+            self.strRepr = "Re:(%s)" % repr(self.pattern)
+
+        return self.strRepr
+
+
+class QuotedString(Token):
+    r"""
+    Token for matching strings that are delimited by quoting characters.
+    
+    Defined with the following parameters:
+        - quoteChar - string of one or more characters defining the quote delimiting string
+        - escChar - character to escape quotes, typically backslash (default=C{None})
+        - escQuote - special quote sequence to escape an embedded quote string (such as SQL's "" to escape an embedded ") (default=C{None})
+        - multiline - boolean indicating whether quotes can span multiple lines (default=C{False})
+        - unquoteResults - boolean indicating whether the matched text should be unquoted (default=C{True})
+        - endQuoteChar - string of one or more characters defining the end of the quote delimited string (default=C{None} => same as quoteChar)
+        - convertWhitespaceEscapes - convert escaped whitespace (C{'\t'}, C{'\n'}, etc.) to actual whitespace (default=C{True})
+
+    Example::
+        qs = QuotedString('"')
+        print(qs.searchString('lsjdf "This is the quote" sldjf'))
+        complex_qs = QuotedString('{{', endQuoteChar='}}')
+        print(complex_qs.searchString('lsjdf {{This is the "quote"}} sldjf'))
+        sql_qs = QuotedString('"', escQuote='""')
+        print(sql_qs.searchString('lsjdf "This is the quote with ""embedded"" quotes" sldjf'))
+    prints::
+        [['This is the quote']]
+        [['This is the "quote"']]
+        [['This is the quote with "embedded" quotes']]
+    """
+    def __init__( self, quoteChar, escChar=None, escQuote=None, multiline=False, unquoteResults=True, endQuoteChar=None, convertWhitespaceEscapes=True):
+        super(QuotedString,self).__init__()
+
+        # remove white space from quote chars - wont work anyway
+        quoteChar = quoteChar.strip()
+        if not quoteChar:
+            warnings.warn("quoteChar cannot be the empty string",SyntaxWarning,stacklevel=2)
+            raise SyntaxError()
+
+        if endQuoteChar is None:
+            endQuoteChar = quoteChar
+        else:
+            endQuoteChar = endQuoteChar.strip()
+            if not endQuoteChar:
+                warnings.warn("endQuoteChar cannot be the empty string",SyntaxWarning,stacklevel=2)
+                raise SyntaxError()
+
+        self.quoteChar = quoteChar
+        self.quoteCharLen = len(quoteChar)
+        self.firstQuoteChar = quoteChar[0]
+        self.endQuoteChar = endQuoteChar
+        self.endQuoteCharLen = len(endQuoteChar)
+        self.escChar = escChar
+        self.escQuote = escQuote
+        self.unquoteResults = unquoteResults
+        self.convertWhitespaceEscapes = convertWhitespaceEscapes
+
+        if multiline:
+            self.flags = re.MULTILINE | re.DOTALL
+            self.pattern = r'%s(?:[^%s%s]' % \
+                ( re.escape(self.quoteChar),
+                  _escapeRegexRangeChars(self.endQuoteChar[0]),
+                  (escChar is not None and _escapeRegexRangeChars(escChar) or '') )
+        else:
+            self.flags = 0
+            self.pattern = r'%s(?:[^%s\n\r%s]' % \
+                ( re.escape(self.quoteChar),
+                  _escapeRegexRangeChars(self.endQuoteChar[0]),
+                  (escChar is not None and _escapeRegexRangeChars(escChar) or '') )
+        if len(self.endQuoteChar) > 1:
+            self.pattern += (
+                '|(?:' + ')|(?:'.join("%s[^%s]" % (re.escape(self.endQuoteChar[:i]),
+                                               _escapeRegexRangeChars(self.endQuoteChar[i]))
+                                    for i in range(len(self.endQuoteChar)-1,0,-1)) + ')'
+                )
+        if escQuote:
+            self.pattern += (r'|(?:%s)' % re.escape(escQuote))
+        if escChar:
+            self.pattern += (r'|(?:%s.)' % re.escape(escChar))
+            self.escCharReplacePattern = re.escape(self.escChar)+"(.)"
+        self.pattern += (r')*%s' % re.escape(self.endQuoteChar))
+
+        try:
+            self.re = re.compile(self.pattern, self.flags)
+            self.reString = self.pattern
+        except sre_constants.error:
+            warnings.warn("invalid pattern (%s) passed to Regex" % self.pattern,
+                SyntaxWarning, stacklevel=2)
+            raise
+
+        self.name = _ustr(self)
+        self.errmsg = "Expected " + self.name
+        self.mayIndexError = False
+        self.mayReturnEmpty = True
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        result = instring[loc] == self.firstQuoteChar and self.re.match(instring,loc) or None
+        if not result:
+            raise ParseException(instring, loc, self.errmsg, self)
+
+        loc = result.end()
+        ret = result.group()
+
+        if self.unquoteResults:
+
+            # strip off quotes
+            ret = ret[self.quoteCharLen:-self.endQuoteCharLen]
+
+            if isinstance(ret,basestring):
+                # replace escaped whitespace
+                if '\\' in ret and self.convertWhitespaceEscapes:
+                    ws_map = {
+                        r'\t' : '\t',
+                        r'\n' : '\n',
+                        r'\f' : '\f',
+                        r'\r' : '\r',
+                    }
+                    for wslit,wschar in ws_map.items():
+                        ret = ret.replace(wslit, wschar)
+
+                # replace escaped characters
+                if self.escChar:
+                    ret = re.sub(self.escCharReplacePattern, r"\g<1>", ret)
+
+                # replace escaped quotes
+                if self.escQuote:
+                    ret = ret.replace(self.escQuote, self.endQuoteChar)
+
+        return loc, ret
+
+    def __str__( self ):
+        try:
+            return super(QuotedString,self).__str__()
+        except Exception:
+            pass
+
+        if self.strRepr is None:
+            self.strRepr = "quoted string, starting with %s ending with %s" % (self.quoteChar, self.endQuoteChar)
+
+        return self.strRepr
+
+
+class CharsNotIn(Token):
+    """
+    Token for matching words composed of characters I{not} in a given set (will
+    include whitespace in matched characters if not listed in the provided exclusion set - see example).
+    Defined with string containing all disallowed characters, and an optional
+    minimum, maximum, and/or exact length.  The default value for C{min} is 1 (a
+    minimum value < 1 is not valid); the default values for C{max} and C{exact}
+    are 0, meaning no maximum or exact length restriction.
+
+    Example::
+        # define a comma-separated-value as anything that is not a ','
+        csv_value = CharsNotIn(',')
+        print(delimitedList(csv_value).parseString("dkls,lsdkjf,s12 34,@!#,213"))
+    prints::
+        ['dkls', 'lsdkjf', 's12 34', '@!#', '213']
+    """
+    def __init__( self, notChars, min=1, max=0, exact=0 ):
+        super(CharsNotIn,self).__init__()
+        self.skipWhitespace = False
+        self.notChars = notChars
+
+        if min < 1:
+            raise ValueError("cannot specify a minimum length < 1; use Optional(CharsNotIn()) if zero-length char group is permitted")
+
+        self.minLen = min
+
+        if max > 0:
+            self.maxLen = max
+        else:
+            self.maxLen = _MAX_INT
+
+        if exact > 0:
+            self.maxLen = exact
+            self.minLen = exact
+
+        self.name = _ustr(self)
+        self.errmsg = "Expected " + self.name
+        self.mayReturnEmpty = ( self.minLen == 0 )
+        self.mayIndexError = False
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        if instring[loc] in self.notChars:
+            raise ParseException(instring, loc, self.errmsg, self)
+
+        start = loc
+        loc += 1
+        notchars = self.notChars
+        maxlen = min( start+self.maxLen, len(instring) )
+        while loc < maxlen and \
+              (instring[loc] not in notchars):
+            loc += 1
+
+        if loc - start < self.minLen:
+            raise ParseException(instring, loc, self.errmsg, self)
+
+        return loc, instring[start:loc]
+
+    def __str__( self ):
+        try:
+            return super(CharsNotIn, self).__str__()
+        except Exception:
+            pass
+
+        if self.strRepr is None:
+            if len(self.notChars) > 4:
+                self.strRepr = "!W:(%s...)" % self.notChars[:4]
+            else:
+                self.strRepr = "!W:(%s)" % self.notChars
+
+        return self.strRepr
+
+class White(Token):
+    """
+    Special matching class for matching whitespace.  Normally, whitespace is ignored
+    by pyparsing grammars.  This class is included when some whitespace structures
+    are significant.  Define with a string containing the whitespace characters to be
+    matched; default is C{" \\t\\r\\n"}.  Also takes optional C{min}, C{max}, and C{exact} arguments,
+    as defined for the C{L{Word}} class.
+    """
+    whiteStrs = {
+        " " : "",
+        "\t": "",
+        "\n": "",
+        "\r": "",
+        "\f": "",
+        }
+    def __init__(self, ws=" \t\r\n", min=1, max=0, exact=0):
+        super(White,self).__init__()
+        self.matchWhite = ws
+        self.setWhitespaceChars( "".join(c for c in self.whiteChars if c not in self.matchWhite) )
+        #~ self.leaveWhitespace()
+        self.name = ("".join(White.whiteStrs[c] for c in self.matchWhite))
+        self.mayReturnEmpty = True
+        self.errmsg = "Expected " + self.name
+
+        self.minLen = min
+
+        if max > 0:
+            self.maxLen = max
+        else:
+            self.maxLen = _MAX_INT
+
+        if exact > 0:
+            self.maxLen = exact
+            self.minLen = exact
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        if not(instring[ loc ] in self.matchWhite):
+            raise ParseException(instring, loc, self.errmsg, self)
+        start = loc
+        loc += 1
+        maxloc = start + self.maxLen
+        maxloc = min( maxloc, len(instring) )
+        while loc < maxloc and instring[loc] in self.matchWhite:
+            loc += 1
+
+        if loc - start < self.minLen:
+            raise ParseException(instring, loc, self.errmsg, self)
+
+        return loc, instring[start:loc]
+
+
+class _PositionToken(Token):
+    def __init__( self ):
+        super(_PositionToken,self).__init__()
+        self.name=self.__class__.__name__
+        self.mayReturnEmpty = True
+        self.mayIndexError = False
+
+class GoToColumn(_PositionToken):
+    """
+    Token to advance to a specific column of input text; useful for tabular report scraping.
+    """
+    def __init__( self, colno ):
+        super(GoToColumn,self).__init__()
+        self.col = colno
+
+    def preParse( self, instring, loc ):
+        if col(loc,instring) != self.col:
+            instrlen = len(instring)
+            if self.ignoreExprs:
+                loc = self._skipIgnorables( instring, loc )
+            while loc < instrlen and instring[loc].isspace() and col( loc, instring ) != self.col :
+                loc += 1
+        return loc
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        thiscol = col( loc, instring )
+        if thiscol > self.col:
+            raise ParseException( instring, loc, "Text not in expected column", self )
+        newloc = loc + self.col - thiscol
+        ret = instring[ loc: newloc ]
+        return newloc, ret
+
+
+class LineStart(_PositionToken):
+    """
+    Matches if current position is at the beginning of a line within the parse string
+    
+    Example::
+    
+        test = '''\
+        AAA this line
+        AAA and this line
+          AAA but not this one
+        B AAA and definitely not this one
+        '''
+
+        for t in (LineStart() + 'AAA' + restOfLine).searchString(test):
+            print(t)
+    
+    Prints::
+        ['AAA', ' this line']
+        ['AAA', ' and this line']    
+
+    """
+    def __init__( self ):
+        super(LineStart,self).__init__()
+        self.errmsg = "Expected start of line"
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        if col(loc, instring) == 1:
+            return loc, []
+        raise ParseException(instring, loc, self.errmsg, self)
+
+class LineEnd(_PositionToken):
+    """
+    Matches if current position is at the end of a line within the parse string
+    """
+    def __init__( self ):
+        super(LineEnd,self).__init__()
+        self.setWhitespaceChars( ParserElement.DEFAULT_WHITE_CHARS.replace("\n","") )
+        self.errmsg = "Expected end of line"
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        if loc len(instring):
+            return loc, []
+        else:
+            raise ParseException(instring, loc, self.errmsg, self)
+
+class WordStart(_PositionToken):
+    """
+    Matches if the current position is at the beginning of a Word, and
+    is not preceded by any character in a given set of C{wordChars}
+    (default=C{printables}). To emulate the C{\b} behavior of regular expressions,
+    use C{WordStart(alphanums)}. C{WordStart} will also match at the beginning of
+    the string being parsed, or at the beginning of a line.
+    """
+    def __init__(self, wordChars = printables):
+        super(WordStart,self).__init__()
+        self.wordChars = set(wordChars)
+        self.errmsg = "Not at the start of a word"
+
+    def parseImpl(self, instring, loc, doActions=True ):
+        if loc != 0:
+            if (instring[loc-1] in self.wordChars or
+                instring[loc] not in self.wordChars):
+                raise ParseException(instring, loc, self.errmsg, self)
+        return loc, []
+
+class WordEnd(_PositionToken):
+    """
+    Matches if the current position is at the end of a Word, and
+    is not followed by any character in a given set of C{wordChars}
+    (default=C{printables}). To emulate the C{\b} behavior of regular expressions,
+    use C{WordEnd(alphanums)}. C{WordEnd} will also match at the end of
+    the string being parsed, or at the end of a line.
+    """
+    def __init__(self, wordChars = printables):
+        super(WordEnd,self).__init__()
+        self.wordChars = set(wordChars)
+        self.skipWhitespace = False
+        self.errmsg = "Not at the end of a word"
+
+    def parseImpl(self, instring, loc, doActions=True ):
+        instrlen = len(instring)
+        if instrlen>0 and loc maxExcLoc:
+                    maxException = err
+                    maxExcLoc = err.loc
+            except IndexError:
+                if len(instring) > maxExcLoc:
+                    maxException = ParseException(instring,len(instring),e.errmsg,self)
+                    maxExcLoc = len(instring)
+            else:
+                # save match among all matches, to retry longest to shortest
+                matches.append((loc2, e))
+
+        if matches:
+            matches.sort(key=lambda x: -x[0])
+            for _,e in matches:
+                try:
+                    return e._parse( instring, loc, doActions )
+                except ParseException as err:
+                    err.__traceback__ = None
+                    if err.loc > maxExcLoc:
+                        maxException = err
+                        maxExcLoc = err.loc
+
+        if maxException is not None:
+            maxException.msg = self.errmsg
+            raise maxException
+        else:
+            raise ParseException(instring, loc, "no defined alternatives to match", self)
+
+
+    def __ixor__(self, other ):
+        if isinstance( other, basestring ):
+            other = ParserElement._literalStringClass( other )
+        return self.append( other ) #Or( [ self, other ] )
+
+    def __str__( self ):
+        if hasattr(self,"name"):
+            return self.name
+
+        if self.strRepr is None:
+            self.strRepr = "{" + " ^ ".join(_ustr(e) for e in self.exprs) + "}"
+
+        return self.strRepr
+
+    def checkRecursion( self, parseElementList ):
+        subRecCheckList = parseElementList[:] + [ self ]
+        for e in self.exprs:
+            e.checkRecursion( subRecCheckList )
+
+
+class MatchFirst(ParseExpression):
+    """
+    Requires that at least one C{ParseExpression} is found.
+    If two expressions match, the first one listed is the one that will match.
+    May be constructed using the C{'|'} operator.
+
+    Example::
+        # construct MatchFirst using '|' operator
+        
+        # watch the order of expressions to match
+        number = Word(nums) | Combine(Word(nums) + '.' + Word(nums))
+        print(number.searchString("123 3.1416 789")) #  Fail! -> [['123'], ['3'], ['1416'], ['789']]
+
+        # put more selective expression first
+        number = Combine(Word(nums) + '.' + Word(nums)) | Word(nums)
+        print(number.searchString("123 3.1416 789")) #  Better -> [['123'], ['3.1416'], ['789']]
+    """
+    def __init__( self, exprs, savelist = False ):
+        super(MatchFirst,self).__init__(exprs, savelist)
+        if self.exprs:
+            self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs)
+        else:
+            self.mayReturnEmpty = True
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        maxExcLoc = -1
+        maxException = None
+        for e in self.exprs:
+            try:
+                ret = e._parse( instring, loc, doActions )
+                return ret
+            except ParseException as err:
+                if err.loc > maxExcLoc:
+                    maxException = err
+                    maxExcLoc = err.loc
+            except IndexError:
+                if len(instring) > maxExcLoc:
+                    maxException = ParseException(instring,len(instring),e.errmsg,self)
+                    maxExcLoc = len(instring)
+
+        # only got here if no expression matched, raise exception for match that made it the furthest
+        else:
+            if maxException is not None:
+                maxException.msg = self.errmsg
+                raise maxException
+            else:
+                raise ParseException(instring, loc, "no defined alternatives to match", self)
+
+    def __ior__(self, other ):
+        if isinstance( other, basestring ):
+            other = ParserElement._literalStringClass( other )
+        return self.append( other ) #MatchFirst( [ self, other ] )
+
+    def __str__( self ):
+        if hasattr(self,"name"):
+            return self.name
+
+        if self.strRepr is None:
+            self.strRepr = "{" + " | ".join(_ustr(e) for e in self.exprs) + "}"
+
+        return self.strRepr
+
+    def checkRecursion( self, parseElementList ):
+        subRecCheckList = parseElementList[:] + [ self ]
+        for e in self.exprs:
+            e.checkRecursion( subRecCheckList )
+
+
+class Each(ParseExpression):
+    """
+    Requires all given C{ParseExpression}s to be found, but in any order.
+    Expressions may be separated by whitespace.
+    May be constructed using the C{'&'} operator.
+
+    Example::
+        color = oneOf("RED ORANGE YELLOW GREEN BLUE PURPLE BLACK WHITE BROWN")
+        shape_type = oneOf("SQUARE CIRCLE TRIANGLE STAR HEXAGON OCTAGON")
+        integer = Word(nums)
+        shape_attr = "shape:" + shape_type("shape")
+        posn_attr = "posn:" + Group(integer("x") + ',' + integer("y"))("posn")
+        color_attr = "color:" + color("color")
+        size_attr = "size:" + integer("size")
+
+        # use Each (using operator '&') to accept attributes in any order 
+        # (shape and posn are required, color and size are optional)
+        shape_spec = shape_attr & posn_attr & Optional(color_attr) & Optional(size_attr)
+
+        shape_spec.runTests('''
+            shape: SQUARE color: BLACK posn: 100, 120
+            shape: CIRCLE size: 50 color: BLUE posn: 50,80
+            color:GREEN size:20 shape:TRIANGLE posn:20,40
+            '''
+            )
+    prints::
+        shape: SQUARE color: BLACK posn: 100, 120
+        ['shape:', 'SQUARE', 'color:', 'BLACK', 'posn:', ['100', ',', '120']]
+        - color: BLACK
+        - posn: ['100', ',', '120']
+          - x: 100
+          - y: 120
+        - shape: SQUARE
+
+
+        shape: CIRCLE size: 50 color: BLUE posn: 50,80
+        ['shape:', 'CIRCLE', 'size:', '50', 'color:', 'BLUE', 'posn:', ['50', ',', '80']]
+        - color: BLUE
+        - posn: ['50', ',', '80']
+          - x: 50
+          - y: 80
+        - shape: CIRCLE
+        - size: 50
+
+
+        color: GREEN size: 20 shape: TRIANGLE posn: 20,40
+        ['color:', 'GREEN', 'size:', '20', 'shape:', 'TRIANGLE', 'posn:', ['20', ',', '40']]
+        - color: GREEN
+        - posn: ['20', ',', '40']
+          - x: 20
+          - y: 40
+        - shape: TRIANGLE
+        - size: 20
+    """
+    def __init__( self, exprs, savelist = True ):
+        super(Each,self).__init__(exprs, savelist)
+        self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs)
+        self.skipWhitespace = True
+        self.initExprGroups = True
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        if self.initExprGroups:
+            self.opt1map = dict((id(e.expr),e) for e in self.exprs if isinstance(e,Optional))
+            opt1 = [ e.expr for e in self.exprs if isinstance(e,Optional) ]
+            opt2 = [ e for e in self.exprs if e.mayReturnEmpty and not isinstance(e,Optional)]
+            self.optionals = opt1 + opt2
+            self.multioptionals = [ e.expr for e in self.exprs if isinstance(e,ZeroOrMore) ]
+            self.multirequired = [ e.expr for e in self.exprs if isinstance(e,OneOrMore) ]
+            self.required = [ e for e in self.exprs if not isinstance(e,(Optional,ZeroOrMore,OneOrMore)) ]
+            self.required += self.multirequired
+            self.initExprGroups = False
+        tmpLoc = loc
+        tmpReqd = self.required[:]
+        tmpOpt  = self.optionals[:]
+        matchOrder = []
+
+        keepMatching = True
+        while keepMatching:
+            tmpExprs = tmpReqd + tmpOpt + self.multioptionals + self.multirequired
+            failed = []
+            for e in tmpExprs:
+                try:
+                    tmpLoc = e.tryParse( instring, tmpLoc )
+                except ParseException:
+                    failed.append(e)
+                else:
+                    matchOrder.append(self.opt1map.get(id(e),e))
+                    if e in tmpReqd:
+                        tmpReqd.remove(e)
+                    elif e in tmpOpt:
+                        tmpOpt.remove(e)
+            if len(failed) == len(tmpExprs):
+                keepMatching = False
+
+        if tmpReqd:
+            missing = ", ".join(_ustr(e) for e in tmpReqd)
+            raise ParseException(instring,loc,"Missing one or more required elements (%s)" % missing )
+
+        # add any unmatched Optionals, in case they have default values defined
+        matchOrder += [e for e in self.exprs if isinstance(e,Optional) and e.expr in tmpOpt]
+
+        resultlist = []
+        for e in matchOrder:
+            loc,results = e._parse(instring,loc,doActions)
+            resultlist.append(results)
+
+        finalResults = sum(resultlist, ParseResults([]))
+        return loc, finalResults
+
+    def __str__( self ):
+        if hasattr(self,"name"):
+            return self.name
+
+        if self.strRepr is None:
+            self.strRepr = "{" + " & ".join(_ustr(e) for e in self.exprs) + "}"
+
+        return self.strRepr
+
+    def checkRecursion( self, parseElementList ):
+        subRecCheckList = parseElementList[:] + [ self ]
+        for e in self.exprs:
+            e.checkRecursion( subRecCheckList )
+
+
+class ParseElementEnhance(ParserElement):
+    """
+    Abstract subclass of C{ParserElement}, for combining and post-processing parsed tokens.
+    """
+    def __init__( self, expr, savelist=False ):
+        super(ParseElementEnhance,self).__init__(savelist)
+        if isinstance( expr, basestring ):
+            if issubclass(ParserElement._literalStringClass, Token):
+                expr = ParserElement._literalStringClass(expr)
+            else:
+                expr = ParserElement._literalStringClass(Literal(expr))
+        self.expr = expr
+        self.strRepr = None
+        if expr is not None:
+            self.mayIndexError = expr.mayIndexError
+            self.mayReturnEmpty = expr.mayReturnEmpty
+            self.setWhitespaceChars( expr.whiteChars )
+            self.skipWhitespace = expr.skipWhitespace
+            self.saveAsList = expr.saveAsList
+            self.callPreparse = expr.callPreparse
+            self.ignoreExprs.extend(expr.ignoreExprs)
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        if self.expr is not None:
+            return self.expr._parse( instring, loc, doActions, callPreParse=False )
+        else:
+            raise ParseException("",loc,self.errmsg,self)
+
+    def leaveWhitespace( self ):
+        self.skipWhitespace = False
+        self.expr = self.expr.copy()
+        if self.expr is not None:
+            self.expr.leaveWhitespace()
+        return self
+
+    def ignore( self, other ):
+        if isinstance( other, Suppress ):
+            if other not in self.ignoreExprs:
+                super( ParseElementEnhance, self).ignore( other )
+                if self.expr is not None:
+                    self.expr.ignore( self.ignoreExprs[-1] )
+        else:
+            super( ParseElementEnhance, self).ignore( other )
+            if self.expr is not None:
+                self.expr.ignore( self.ignoreExprs[-1] )
+        return self
+
+    def streamline( self ):
+        super(ParseElementEnhance,self).streamline()
+        if self.expr is not None:
+            self.expr.streamline()
+        return self
+
+    def checkRecursion( self, parseElementList ):
+        if self in parseElementList:
+            raise RecursiveGrammarException( parseElementList+[self] )
+        subRecCheckList = parseElementList[:] + [ self ]
+        if self.expr is not None:
+            self.expr.checkRecursion( subRecCheckList )
+
+    def validate( self, validateTrace=[] ):
+        tmp = validateTrace[:]+[self]
+        if self.expr is not None:
+            self.expr.validate(tmp)
+        self.checkRecursion( [] )
+
+    def __str__( self ):
+        try:
+            return super(ParseElementEnhance,self).__str__()
+        except Exception:
+            pass
+
+        if self.strRepr is None and self.expr is not None:
+            self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.expr) )
+        return self.strRepr
+
+
+class FollowedBy(ParseElementEnhance):
+    """
+    Lookahead matching of the given parse expression.  C{FollowedBy}
+    does I{not} advance the parsing position within the input string, it only
+    verifies that the specified parse expression matches at the current
+    position.  C{FollowedBy} always returns a null token list.
+
+    Example::
+        # use FollowedBy to match a label only if it is followed by a ':'
+        data_word = Word(alphas)
+        label = data_word + FollowedBy(':')
+        attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
+        
+        OneOrMore(attr_expr).parseString("shape: SQUARE color: BLACK posn: upper left").pprint()
+    prints::
+        [['shape', 'SQUARE'], ['color', 'BLACK'], ['posn', 'upper left']]
+    """
+    def __init__( self, expr ):
+        super(FollowedBy,self).__init__(expr)
+        self.mayReturnEmpty = True
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        self.expr.tryParse( instring, loc )
+        return loc, []
+
+
+class NotAny(ParseElementEnhance):
+    """
+    Lookahead to disallow matching with the given parse expression.  C{NotAny}
+    does I{not} advance the parsing position within the input string, it only
+    verifies that the specified parse expression does I{not} match at the current
+    position.  Also, C{NotAny} does I{not} skip over leading whitespace. C{NotAny}
+    always returns a null token list.  May be constructed using the '~' operator.
+
+    Example::
+        
+    """
+    def __init__( self, expr ):
+        super(NotAny,self).__init__(expr)
+        #~ self.leaveWhitespace()
+        self.skipWhitespace = False  # do NOT use self.leaveWhitespace(), don't want to propagate to exprs
+        self.mayReturnEmpty = True
+        self.errmsg = "Found unwanted token, "+_ustr(self.expr)
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        if self.expr.canParseNext(instring, loc):
+            raise ParseException(instring, loc, self.errmsg, self)
+        return loc, []
+
+    def __str__( self ):
+        if hasattr(self,"name"):
+            return self.name
+
+        if self.strRepr is None:
+            self.strRepr = "~{" + _ustr(self.expr) + "}"
+
+        return self.strRepr
+
+class _MultipleMatch(ParseElementEnhance):
+    def __init__( self, expr, stopOn=None):
+        super(_MultipleMatch, self).__init__(expr)
+        self.saveAsList = True
+        ender = stopOn
+        if isinstance(ender, basestring):
+            ender = ParserElement._literalStringClass(ender)
+        self.not_ender = ~ender if ender is not None else None
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        self_expr_parse = self.expr._parse
+        self_skip_ignorables = self._skipIgnorables
+        check_ender = self.not_ender is not None
+        if check_ender:
+            try_not_ender = self.not_ender.tryParse
+        
+        # must be at least one (but first see if we are the stopOn sentinel;
+        # if so, fail)
+        if check_ender:
+            try_not_ender(instring, loc)
+        loc, tokens = self_expr_parse( instring, loc, doActions, callPreParse=False )
+        try:
+            hasIgnoreExprs = (not not self.ignoreExprs)
+            while 1:
+                if check_ender:
+                    try_not_ender(instring, loc)
+                if hasIgnoreExprs:
+                    preloc = self_skip_ignorables( instring, loc )
+                else:
+                    preloc = loc
+                loc, tmptokens = self_expr_parse( instring, preloc, doActions )
+                if tmptokens or tmptokens.haskeys():
+                    tokens += tmptokens
+        except (ParseException,IndexError):
+            pass
+
+        return loc, tokens
+        
+class OneOrMore(_MultipleMatch):
+    """
+    Repetition of one or more of the given expression.
+    
+    Parameters:
+     - expr - expression that must match one or more times
+     - stopOn - (default=C{None}) - expression for a terminating sentinel
+          (only required if the sentinel would ordinarily match the repetition 
+          expression)          
+
+    Example::
+        data_word = Word(alphas)
+        label = data_word + FollowedBy(':')
+        attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join))
+
+        text = "shape: SQUARE posn: upper left color: BLACK"
+        OneOrMore(attr_expr).parseString(text).pprint()  # Fail! read 'color' as data instead of next label -> [['shape', 'SQUARE color']]
+
+        # use stopOn attribute for OneOrMore to avoid reading label string as part of the data
+        attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
+        OneOrMore(attr_expr).parseString(text).pprint() # Better -> [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'BLACK']]
+        
+        # could also be written as
+        (attr_expr * (1,)).parseString(text).pprint()
+    """
+
+    def __str__( self ):
+        if hasattr(self,"name"):
+            return self.name
+
+        if self.strRepr is None:
+            self.strRepr = "{" + _ustr(self.expr) + "}..."
+
+        return self.strRepr
+
+class ZeroOrMore(_MultipleMatch):
+    """
+    Optional repetition of zero or more of the given expression.
+    
+    Parameters:
+     - expr - expression that must match zero or more times
+     - stopOn - (default=C{None}) - expression for a terminating sentinel
+          (only required if the sentinel would ordinarily match the repetition 
+          expression)          
+
+    Example: similar to L{OneOrMore}
+    """
+    def __init__( self, expr, stopOn=None):
+        super(ZeroOrMore,self).__init__(expr, stopOn=stopOn)
+        self.mayReturnEmpty = True
+        
+    def parseImpl( self, instring, loc, doActions=True ):
+        try:
+            return super(ZeroOrMore, self).parseImpl(instring, loc, doActions)
+        except (ParseException,IndexError):
+            return loc, []
+
+    def __str__( self ):
+        if hasattr(self,"name"):
+            return self.name
+
+        if self.strRepr is None:
+            self.strRepr = "[" + _ustr(self.expr) + "]..."
+
+        return self.strRepr
+
+class _NullToken(object):
+    def __bool__(self):
+        return False
+    __nonzero__ = __bool__
+    def __str__(self):
+        return ""
+
+_optionalNotMatched = _NullToken()
+class Optional(ParseElementEnhance):
+    """
+    Optional matching of the given expression.
+
+    Parameters:
+     - expr - expression that must match zero or more times
+     - default (optional) - value to be returned if the optional expression is not found.
+
+    Example::
+        # US postal code can be a 5-digit zip, plus optional 4-digit qualifier
+        zip = Combine(Word(nums, exact=5) + Optional('-' + Word(nums, exact=4)))
+        zip.runTests('''
+            # traditional ZIP code
+            12345
+            
+            # ZIP+4 form
+            12101-0001
+            
+            # invalid ZIP
+            98765-
+            ''')
+    prints::
+        # traditional ZIP code
+        12345
+        ['12345']
+
+        # ZIP+4 form
+        12101-0001
+        ['12101-0001']
+
+        # invalid ZIP
+        98765-
+             ^
+        FAIL: Expected end of text (at char 5), (line:1, col:6)
+    """
+    def __init__( self, expr, default=_optionalNotMatched ):
+        super(Optional,self).__init__( expr, savelist=False )
+        self.saveAsList = self.expr.saveAsList
+        self.defaultValue = default
+        self.mayReturnEmpty = True
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        try:
+            loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False )
+        except (ParseException,IndexError):
+            if self.defaultValue is not _optionalNotMatched:
+                if self.expr.resultsName:
+                    tokens = ParseResults([ self.defaultValue ])
+                    tokens[self.expr.resultsName] = self.defaultValue
+                else:
+                    tokens = [ self.defaultValue ]
+            else:
+                tokens = []
+        return loc, tokens
+
+    def __str__( self ):
+        if hasattr(self,"name"):
+            return self.name
+
+        if self.strRepr is None:
+            self.strRepr = "[" + _ustr(self.expr) + "]"
+
+        return self.strRepr
+
+class SkipTo(ParseElementEnhance):
+    """
+    Token for skipping over all undefined text until the matched expression is found.
+
+    Parameters:
+     - expr - target expression marking the end of the data to be skipped
+     - include - (default=C{False}) if True, the target expression is also parsed 
+          (the skipped text and target expression are returned as a 2-element list).
+     - ignore - (default=C{None}) used to define grammars (typically quoted strings and 
+          comments) that might contain false matches to the target expression
+     - failOn - (default=C{None}) define expressions that are not allowed to be 
+          included in the skipped test; if found before the target expression is found, 
+          the SkipTo is not a match
+
+    Example::
+        report = '''
+            Outstanding Issues Report - 1 Jan 2000
+
+               # | Severity | Description                               |  Days Open
+            -----+----------+-------------------------------------------+-----------
+             101 | Critical | Intermittent system crash                 |          6
+              94 | Cosmetic | Spelling error on Login ('log|n')         |         14
+              79 | Minor    | System slow when running too many reports |         47
+            '''
+        integer = Word(nums)
+        SEP = Suppress('|')
+        # use SkipTo to simply match everything up until the next SEP
+        # - ignore quoted strings, so that a '|' character inside a quoted string does not match
+        # - parse action will call token.strip() for each matched token, i.e., the description body
+        string_data = SkipTo(SEP, ignore=quotedString)
+        string_data.setParseAction(tokenMap(str.strip))
+        ticket_expr = (integer("issue_num") + SEP 
+                      + string_data("sev") + SEP 
+                      + string_data("desc") + SEP 
+                      + integer("days_open"))
+        
+        for tkt in ticket_expr.searchString(report):
+            print tkt.dump()
+    prints::
+        ['101', 'Critical', 'Intermittent system crash', '6']
+        - days_open: 6
+        - desc: Intermittent system crash
+        - issue_num: 101
+        - sev: Critical
+        ['94', 'Cosmetic', "Spelling error on Login ('log|n')", '14']
+        - days_open: 14
+        - desc: Spelling error on Login ('log|n')
+        - issue_num: 94
+        - sev: Cosmetic
+        ['79', 'Minor', 'System slow when running too many reports', '47']
+        - days_open: 47
+        - desc: System slow when running too many reports
+        - issue_num: 79
+        - sev: Minor
+    """
+    def __init__( self, other, include=False, ignore=None, failOn=None ):
+        super( SkipTo, self ).__init__( other )
+        self.ignoreExpr = ignore
+        self.mayReturnEmpty = True
+        self.mayIndexError = False
+        self.includeMatch = include
+        self.asList = False
+        if isinstance(failOn, basestring):
+            self.failOn = ParserElement._literalStringClass(failOn)
+        else:
+            self.failOn = failOn
+        self.errmsg = "No match found for "+_ustr(self.expr)
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        startloc = loc
+        instrlen = len(instring)
+        expr = self.expr
+        expr_parse = self.expr._parse
+        self_failOn_canParseNext = self.failOn.canParseNext if self.failOn is not None else None
+        self_ignoreExpr_tryParse = self.ignoreExpr.tryParse if self.ignoreExpr is not None else None
+        
+        tmploc = loc
+        while tmploc <= instrlen:
+            if self_failOn_canParseNext is not None:
+                # break if failOn expression matches
+                if self_failOn_canParseNext(instring, tmploc):
+                    break
+                    
+            if self_ignoreExpr_tryParse is not None:
+                # advance past ignore expressions
+                while 1:
+                    try:
+                        tmploc = self_ignoreExpr_tryParse(instring, tmploc)
+                    except ParseBaseException:
+                        break
+            
+            try:
+                expr_parse(instring, tmploc, doActions=False, callPreParse=False)
+            except (ParseException, IndexError):
+                # no match, advance loc in string
+                tmploc += 1
+            else:
+                # matched skipto expr, done
+                break
+
+        else:
+            # ran off the end of the input string without matching skipto expr, fail
+            raise ParseException(instring, loc, self.errmsg, self)
+
+        # build up return values
+        loc = tmploc
+        skiptext = instring[startloc:loc]
+        skipresult = ParseResults(skiptext)
+        
+        if self.includeMatch:
+            loc, mat = expr_parse(instring,loc,doActions,callPreParse=False)
+            skipresult += mat
+
+        return loc, skipresult
+
+class Forward(ParseElementEnhance):
+    """
+    Forward declaration of an expression to be defined later -
+    used for recursive grammars, such as algebraic infix notation.
+    When the expression is known, it is assigned to the C{Forward} variable using the '<<' operator.
+
+    Note: take care when assigning to C{Forward} not to overlook precedence of operators.
+    Specifically, '|' has a lower precedence than '<<', so that::
+        fwdExpr << a | b | c
+    will actually be evaluated as::
+        (fwdExpr << a) | b | c
+    thereby leaving b and c out as parseable alternatives.  It is recommended that you
+    explicitly group the values inserted into the C{Forward}::
+        fwdExpr << (a | b | c)
+    Converting to use the '<<=' operator instead will avoid this problem.
+
+    See L{ParseResults.pprint} for an example of a recursive parser created using
+    C{Forward}.
+    """
+    def __init__( self, other=None ):
+        super(Forward,self).__init__( other, savelist=False )
+
+    def __lshift__( self, other ):
+        if isinstance( other, basestring ):
+            other = ParserElement._literalStringClass(other)
+        self.expr = other
+        self.strRepr = None
+        self.mayIndexError = self.expr.mayIndexError
+        self.mayReturnEmpty = self.expr.mayReturnEmpty
+        self.setWhitespaceChars( self.expr.whiteChars )
+        self.skipWhitespace = self.expr.skipWhitespace
+        self.saveAsList = self.expr.saveAsList
+        self.ignoreExprs.extend(self.expr.ignoreExprs)
+        return self
+        
+    def __ilshift__(self, other):
+        return self << other
+    
+    def leaveWhitespace( self ):
+        self.skipWhitespace = False
+        return self
+
+    def streamline( self ):
+        if not self.streamlined:
+            self.streamlined = True
+            if self.expr is not None:
+                self.expr.streamline()
+        return self
+
+    def validate( self, validateTrace=[] ):
+        if self not in validateTrace:
+            tmp = validateTrace[:]+[self]
+            if self.expr is not None:
+                self.expr.validate(tmp)
+        self.checkRecursion([])
+
+    def __str__( self ):
+        if hasattr(self,"name"):
+            return self.name
+        return self.__class__.__name__ + ": ..."
+
+        # stubbed out for now - creates awful memory and perf issues
+        self._revertClass = self.__class__
+        self.__class__ = _ForwardNoRecurse
+        try:
+            if self.expr is not None:
+                retString = _ustr(self.expr)
+            else:
+                retString = "None"
+        finally:
+            self.__class__ = self._revertClass
+        return self.__class__.__name__ + ": " + retString
+
+    def copy(self):
+        if self.expr is not None:
+            return super(Forward,self).copy()
+        else:
+            ret = Forward()
+            ret <<= self
+            return ret
+
+class _ForwardNoRecurse(Forward):
+    def __str__( self ):
+        return "..."
+
+class TokenConverter(ParseElementEnhance):
+    """
+    Abstract subclass of C{ParseExpression}, for converting parsed results.
+    """
+    def __init__( self, expr, savelist=False ):
+        super(TokenConverter,self).__init__( expr )#, savelist )
+        self.saveAsList = False
+
+class Combine(TokenConverter):
+    """
+    Converter to concatenate all matching tokens to a single string.
+    By default, the matching patterns must also be contiguous in the input string;
+    this can be disabled by specifying C{'adjacent=False'} in the constructor.
+
+    Example::
+        real = Word(nums) + '.' + Word(nums)
+        print(real.parseString('3.1416')) # -> ['3', '.', '1416']
+        # will also erroneously match the following
+        print(real.parseString('3. 1416')) # -> ['3', '.', '1416']
+
+        real = Combine(Word(nums) + '.' + Word(nums))
+        print(real.parseString('3.1416')) # -> ['3.1416']
+        # no match when there are internal spaces
+        print(real.parseString('3. 1416')) # -> Exception: Expected W:(0123...)
+    """
+    def __init__( self, expr, joinString="", adjacent=True ):
+        super(Combine,self).__init__( expr )
+        # suppress whitespace-stripping in contained parse expressions, but re-enable it on the Combine itself
+        if adjacent:
+            self.leaveWhitespace()
+        self.adjacent = adjacent
+        self.skipWhitespace = True
+        self.joinString = joinString
+        self.callPreparse = True
+
+    def ignore( self, other ):
+        if self.adjacent:
+            ParserElement.ignore(self, other)
+        else:
+            super( Combine, self).ignore( other )
+        return self
+
+    def postParse( self, instring, loc, tokenlist ):
+        retToks = tokenlist.copy()
+        del retToks[:]
+        retToks += ParseResults([ "".join(tokenlist._asStringList(self.joinString)) ], modal=self.modalResults)
+
+        if self.resultsName and retToks.haskeys():
+            return [ retToks ]
+        else:
+            return retToks
+
+class Group(TokenConverter):
+    """
+    Converter to return the matched tokens as a list - useful for returning tokens of C{L{ZeroOrMore}} and C{L{OneOrMore}} expressions.
+
+    Example::
+        ident = Word(alphas)
+        num = Word(nums)
+        term = ident | num
+        func = ident + Optional(delimitedList(term))
+        print(func.parseString("fn a,b,100"))  # -> ['fn', 'a', 'b', '100']
+
+        func = ident + Group(Optional(delimitedList(term)))
+        print(func.parseString("fn a,b,100"))  # -> ['fn', ['a', 'b', '100']]
+    """
+    def __init__( self, expr ):
+        super(Group,self).__init__( expr )
+        self.saveAsList = True
+
+    def postParse( self, instring, loc, tokenlist ):
+        return [ tokenlist ]
+
+class Dict(TokenConverter):
+    """
+    Converter to return a repetitive expression as a list, but also as a dictionary.
+    Each element can also be referenced using the first token in the expression as its key.
+    Useful for tabular report scraping when the first column can be used as a item key.
+
+    Example::
+        data_word = Word(alphas)
+        label = data_word + FollowedBy(':')
+        attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join))
+
+        text = "shape: SQUARE posn: upper left color: light blue texture: burlap"
+        attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
+        
+        # print attributes as plain groups
+        print(OneOrMore(attr_expr).parseString(text).dump())
+        
+        # instead of OneOrMore(expr), parse using Dict(OneOrMore(Group(expr))) - Dict will auto-assign names
+        result = Dict(OneOrMore(Group(attr_expr))).parseString(text)
+        print(result.dump())
+        
+        # access named fields as dict entries, or output as dict
+        print(result['shape'])        
+        print(result.asDict())
+    prints::
+        ['shape', 'SQUARE', 'posn', 'upper left', 'color', 'light blue', 'texture', 'burlap']
+
+        [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']]
+        - color: light blue
+        - posn: upper left
+        - shape: SQUARE
+        - texture: burlap
+        SQUARE
+        {'color': 'light blue', 'posn': 'upper left', 'texture': 'burlap', 'shape': 'SQUARE'}
+    See more examples at L{ParseResults} of accessing fields by results name.
+    """
+    def __init__( self, expr ):
+        super(Dict,self).__init__( expr )
+        self.saveAsList = True
+
+    def postParse( self, instring, loc, tokenlist ):
+        for i,tok in enumerate(tokenlist):
+            if len(tok) == 0:
+                continue
+            ikey = tok[0]
+            if isinstance(ikey,int):
+                ikey = _ustr(tok[0]).strip()
+            if len(tok)==1:
+                tokenlist[ikey] = _ParseResultsWithOffset("",i)
+            elif len(tok)==2 and not isinstance(tok[1],ParseResults):
+                tokenlist[ikey] = _ParseResultsWithOffset(tok[1],i)
+            else:
+                dictvalue = tok.copy() #ParseResults(i)
+                del dictvalue[0]
+                if len(dictvalue)!= 1 or (isinstance(dictvalue,ParseResults) and dictvalue.haskeys()):
+                    tokenlist[ikey] = _ParseResultsWithOffset(dictvalue,i)
+                else:
+                    tokenlist[ikey] = _ParseResultsWithOffset(dictvalue[0],i)
+
+        if self.resultsName:
+            return [ tokenlist ]
+        else:
+            return tokenlist
+
+
+class Suppress(TokenConverter):
+    """
+    Converter for ignoring the results of a parsed expression.
+
+    Example::
+        source = "a, b, c,d"
+        wd = Word(alphas)
+        wd_list1 = wd + ZeroOrMore(',' + wd)
+        print(wd_list1.parseString(source))
+
+        # often, delimiters that are useful during parsing are just in the
+        # way afterward - use Suppress to keep them out of the parsed output
+        wd_list2 = wd + ZeroOrMore(Suppress(',') + wd)
+        print(wd_list2.parseString(source))
+    prints::
+        ['a', ',', 'b', ',', 'c', ',', 'd']
+        ['a', 'b', 'c', 'd']
+    (See also L{delimitedList}.)
+    """
+    def postParse( self, instring, loc, tokenlist ):
+        return []
+
+    def suppress( self ):
+        return self
+
+
+class OnlyOnce(object):
+    """
+    Wrapper for parse actions, to ensure they are only called once.
+    """
+    def __init__(self, methodCall):
+        self.callable = _trim_arity(methodCall)
+        self.called = False
+    def __call__(self,s,l,t):
+        if not self.called:
+            results = self.callable(s,l,t)
+            self.called = True
+            return results
+        raise ParseException(s,l,"")
+    def reset(self):
+        self.called = False
+
+def traceParseAction(f):
+    """
+    Decorator for debugging parse actions. 
+    
+    When the parse action is called, this decorator will print C{">> entering I{method-name}(line:I{current_source_line}, I{parse_location}, I{matched_tokens})".}
+    When the parse action completes, the decorator will print C{"<<"} followed by the returned value, or any exception that the parse action raised.
+
+    Example::
+        wd = Word(alphas)
+
+        @traceParseAction
+        def remove_duplicate_chars(tokens):
+            return ''.join(sorted(set(''.join(tokens))))
+
+        wds = OneOrMore(wd).setParseAction(remove_duplicate_chars)
+        print(wds.parseString("slkdjs sld sldd sdlf sdljf"))
+    prints::
+        >>entering remove_duplicate_chars(line: 'slkdjs sld sldd sdlf sdljf', 0, (['slkdjs', 'sld', 'sldd', 'sdlf', 'sdljf'], {}))
+        <3:
+            thisFunc = paArgs[0].__class__.__name__ + '.' + thisFunc
+        sys.stderr.write( ">>entering %s(line: '%s', %d, %r)\n" % (thisFunc,line(l,s),l,t) )
+        try:
+            ret = f(*paArgs)
+        except Exception as exc:
+            sys.stderr.write( "< ['aa', 'bb', 'cc']
+        delimitedList(Word(hexnums), delim=':', combine=True).parseString("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE']
+    """
+    dlName = _ustr(expr)+" ["+_ustr(delim)+" "+_ustr(expr)+"]..."
+    if combine:
+        return Combine( expr + ZeroOrMore( delim + expr ) ).setName(dlName)
+    else:
+        return ( expr + ZeroOrMore( Suppress( delim ) + expr ) ).setName(dlName)
+
+def countedArray( expr, intExpr=None ):
+    """
+    Helper to define a counted list of expressions.
+    This helper defines a pattern of the form::
+        integer expr expr expr...
+    where the leading integer tells how many expr expressions follow.
+    The matched tokens returns the array of expr tokens as a list - the leading count token is suppressed.
+    
+    If C{intExpr} is specified, it should be a pyparsing expression that produces an integer value.
+
+    Example::
+        countedArray(Word(alphas)).parseString('2 ab cd ef')  # -> ['ab', 'cd']
+
+        # in this parser, the leading integer value is given in binary,
+        # '10' indicating that 2 values are in the array
+        binaryConstant = Word('01').setParseAction(lambda t: int(t[0], 2))
+        countedArray(Word(alphas), intExpr=binaryConstant).parseString('10 ab cd ef')  # -> ['ab', 'cd']
+    """
+    arrayExpr = Forward()
+    def countFieldParseAction(s,l,t):
+        n = t[0]
+        arrayExpr << (n and Group(And([expr]*n)) or Group(empty))
+        return []
+    if intExpr is None:
+        intExpr = Word(nums).setParseAction(lambda t:int(t[0]))
+    else:
+        intExpr = intExpr.copy()
+    intExpr.setName("arrayLen")
+    intExpr.addParseAction(countFieldParseAction, callDuringTry=True)
+    return ( intExpr + arrayExpr ).setName('(len) ' + _ustr(expr) + '...')
+
+def _flatten(L):
+    ret = []
+    for i in L:
+        if isinstance(i,list):
+            ret.extend(_flatten(i))
+        else:
+            ret.append(i)
+    return ret
+
+def matchPreviousLiteral(expr):
+    """
+    Helper to define an expression that is indirectly defined from
+    the tokens matched in a previous expression, that is, it looks
+    for a 'repeat' of a previous expression.  For example::
+        first = Word(nums)
+        second = matchPreviousLiteral(first)
+        matchExpr = first + ":" + second
+    will match C{"1:1"}, but not C{"1:2"}.  Because this matches a
+    previous literal, will also match the leading C{"1:1"} in C{"1:10"}.
+    If this is not desired, use C{matchPreviousExpr}.
+    Do I{not} use with packrat parsing enabled.
+    """
+    rep = Forward()
+    def copyTokenToRepeater(s,l,t):
+        if t:
+            if len(t) == 1:
+                rep << t[0]
+            else:
+                # flatten t tokens
+                tflat = _flatten(t.asList())
+                rep << And(Literal(tt) for tt in tflat)
+        else:
+            rep << Empty()
+    expr.addParseAction(copyTokenToRepeater, callDuringTry=True)
+    rep.setName('(prev) ' + _ustr(expr))
+    return rep
+
+def matchPreviousExpr(expr):
+    """
+    Helper to define an expression that is indirectly defined from
+    the tokens matched in a previous expression, that is, it looks
+    for a 'repeat' of a previous expression.  For example::
+        first = Word(nums)
+        second = matchPreviousExpr(first)
+        matchExpr = first + ":" + second
+    will match C{"1:1"}, but not C{"1:2"}.  Because this matches by
+    expressions, will I{not} match the leading C{"1:1"} in C{"1:10"};
+    the expressions are evaluated first, and then compared, so
+    C{"1"} is compared with C{"10"}.
+    Do I{not} use with packrat parsing enabled.
+    """
+    rep = Forward()
+    e2 = expr.copy()
+    rep <<= e2
+    def copyTokenToRepeater(s,l,t):
+        matchTokens = _flatten(t.asList())
+        def mustMatchTheseTokens(s,l,t):
+            theseTokens = _flatten(t.asList())
+            if  theseTokens != matchTokens:
+                raise ParseException("",0,"")
+        rep.setParseAction( mustMatchTheseTokens, callDuringTry=True )
+    expr.addParseAction(copyTokenToRepeater, callDuringTry=True)
+    rep.setName('(prev) ' + _ustr(expr))
+    return rep
+
+def _escapeRegexRangeChars(s):
+    #~  escape these chars: ^-]
+    for c in r"\^-]":
+        s = s.replace(c,_bslash+c)
+    s = s.replace("\n",r"\n")
+    s = s.replace("\t",r"\t")
+    return _ustr(s)
+
+def oneOf( strs, caseless=False, useRegex=True ):
+    """
+    Helper to quickly define a set of alternative Literals, and makes sure to do
+    longest-first testing when there is a conflict, regardless of the input order,
+    but returns a C{L{MatchFirst}} for best performance.
+
+    Parameters:
+     - strs - a string of space-delimited literals, or a collection of string literals
+     - caseless - (default=C{False}) - treat all literals as caseless
+     - useRegex - (default=C{True}) - as an optimization, will generate a Regex
+          object; otherwise, will generate a C{MatchFirst} object (if C{caseless=True}, or
+          if creating a C{Regex} raises an exception)
+
+    Example::
+        comp_oper = oneOf("< = > <= >= !=")
+        var = Word(alphas)
+        number = Word(nums)
+        term = var | number
+        comparison_expr = term + comp_oper + term
+        print(comparison_expr.searchString("B = 12  AA=23 B<=AA AA>12"))
+    prints::
+        [['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']]
+    """
+    if caseless:
+        isequal = ( lambda a,b: a.upper() == b.upper() )
+        masks = ( lambda a,b: b.upper().startswith(a.upper()) )
+        parseElementClass = CaselessLiteral
+    else:
+        isequal = ( lambda a,b: a == b )
+        masks = ( lambda a,b: b.startswith(a) )
+        parseElementClass = Literal
+
+    symbols = []
+    if isinstance(strs,basestring):
+        symbols = strs.split()
+    elif isinstance(strs, Iterable):
+        symbols = list(strs)
+    else:
+        warnings.warn("Invalid argument to oneOf, expected string or iterable",
+                SyntaxWarning, stacklevel=2)
+    if not symbols:
+        return NoMatch()
+
+    i = 0
+    while i < len(symbols)-1:
+        cur = symbols[i]
+        for j,other in enumerate(symbols[i+1:]):
+            if ( isequal(other, cur) ):
+                del symbols[i+j+1]
+                break
+            elif ( masks(cur, other) ):
+                del symbols[i+j+1]
+                symbols.insert(i,other)
+                cur = other
+                break
+        else:
+            i += 1
+
+    if not caseless and useRegex:
+        #~ print (strs,"->", "|".join( [ _escapeRegexChars(sym) for sym in symbols] ))
+        try:
+            if len(symbols)==len("".join(symbols)):
+                return Regex( "[%s]" % "".join(_escapeRegexRangeChars(sym) for sym in symbols) ).setName(' | '.join(symbols))
+            else:
+                return Regex( "|".join(re.escape(sym) for sym in symbols) ).setName(' | '.join(symbols))
+        except Exception:
+            warnings.warn("Exception creating Regex for oneOf, building MatchFirst",
+                    SyntaxWarning, stacklevel=2)
+
+
+    # last resort, just use MatchFirst
+    return MatchFirst(parseElementClass(sym) for sym in symbols).setName(' | '.join(symbols))
+
+def dictOf( key, value ):
+    """
+    Helper to easily and clearly define a dictionary by specifying the respective patterns
+    for the key and value.  Takes care of defining the C{L{Dict}}, C{L{ZeroOrMore}}, and C{L{Group}} tokens
+    in the proper order.  The key pattern can include delimiting markers or punctuation,
+    as long as they are suppressed, thereby leaving the significant key text.  The value
+    pattern can include named results, so that the C{Dict} results can include named token
+    fields.
+
+    Example::
+        text = "shape: SQUARE posn: upper left color: light blue texture: burlap"
+        attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
+        print(OneOrMore(attr_expr).parseString(text).dump())
+        
+        attr_label = label
+        attr_value = Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)
+
+        # similar to Dict, but simpler call format
+        result = dictOf(attr_label, attr_value).parseString(text)
+        print(result.dump())
+        print(result['shape'])
+        print(result.shape)  # object attribute access works too
+        print(result.asDict())
+    prints::
+        [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']]
+        - color: light blue
+        - posn: upper left
+        - shape: SQUARE
+        - texture: burlap
+        SQUARE
+        SQUARE
+        {'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'}
+    """
+    return Dict( ZeroOrMore( Group ( key + value ) ) )
+
+def originalTextFor(expr, asString=True):
+    """
+    Helper to return the original, untokenized text for a given expression.  Useful to
+    restore the parsed fields of an HTML start tag into the raw tag text itself, or to
+    revert separate tokens with intervening whitespace back to the original matching
+    input text. By default, returns astring containing the original parsed text.  
+       
+    If the optional C{asString} argument is passed as C{False}, then the return value is a 
+    C{L{ParseResults}} containing any results names that were originally matched, and a 
+    single token containing the original matched text from the input string.  So if 
+    the expression passed to C{L{originalTextFor}} contains expressions with defined
+    results names, you must set C{asString} to C{False} if you want to preserve those
+    results name values.
+
+    Example::
+        src = "this is test  bold text  normal text "
+        for tag in ("b","i"):
+            opener,closer = makeHTMLTags(tag)
+            patt = originalTextFor(opener + SkipTo(closer) + closer)
+            print(patt.searchString(src)[0])
+    prints::
+        [' bold text ']
+        ['text']
+    """
+    locMarker = Empty().setParseAction(lambda s,loc,t: loc)
+    endlocMarker = locMarker.copy()
+    endlocMarker.callPreparse = False
+    matchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end")
+    if asString:
+        extractText = lambda s,l,t: s[t._original_start:t._original_end]
+    else:
+        def extractText(s,l,t):
+            t[:] = [s[t.pop('_original_start'):t.pop('_original_end')]]
+    matchExpr.setParseAction(extractText)
+    matchExpr.ignoreExprs = expr.ignoreExprs
+    return matchExpr
+
+def ungroup(expr): 
+    """
+    Helper to undo pyparsing's default grouping of And expressions, even
+    if all but one are non-empty.
+    """
+    return TokenConverter(expr).setParseAction(lambda t:t[0])
+
+def locatedExpr(expr):
+    """
+    Helper to decorate a returned token with its starting and ending locations in the input string.
+    This helper adds the following results names:
+     - locn_start = location where matched expression begins
+     - locn_end = location where matched expression ends
+     - value = the actual parsed results
+
+    Be careful if the input text contains C{} characters, you may want to call
+    C{L{ParserElement.parseWithTabs}}
+
+    Example::
+        wd = Word(alphas)
+        for match in locatedExpr(wd).searchString("ljsdf123lksdjjf123lkkjj1222"):
+            print(match)
+    prints::
+        [[0, 'ljsdf', 5]]
+        [[8, 'lksdjjf', 15]]
+        [[18, 'lkkjj', 23]]
+    """
+    locator = Empty().setParseAction(lambda s,l,t: l)
+    return Group(locator("locn_start") + expr("value") + locator.copy().leaveWhitespace()("locn_end"))
+
+
+# convenience constants for positional expressions
+empty       = Empty().setName("empty")
+lineStart   = LineStart().setName("lineStart")
+lineEnd     = LineEnd().setName("lineEnd")
+stringStart = StringStart().setName("stringStart")
+stringEnd   = StringEnd().setName("stringEnd")
+
+_escapedPunc = Word( _bslash, r"\[]-*.$+^?()~ ", exact=2 ).setParseAction(lambda s,l,t:t[0][1])
+_escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s,l,t:unichr(int(t[0].lstrip(r'\0x'),16)))
+_escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s,l,t:unichr(int(t[0][1:],8)))
+_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | CharsNotIn(r'\]', exact=1)
+_charRange = Group(_singleChar + Suppress("-") + _singleChar)
+_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group( OneOrMore( _charRange | _singleChar ) ).setResultsName("body") + "]"
+
+def srange(s):
+    r"""
+    Helper to easily define string ranges for use in Word construction.  Borrows
+    syntax from regexp '[]' string range definitions::
+        srange("[0-9]")   -> "0123456789"
+        srange("[a-z]")   -> "abcdefghijklmnopqrstuvwxyz"
+        srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_"
+    The input string must be enclosed in []'s, and the returned string is the expanded
+    character set joined into a single string.
+    The values enclosed in the []'s may be:
+     - a single character
+     - an escaped character with a leading backslash (such as C{\-} or C{\]})
+     - an escaped hex character with a leading C{'\x'} (C{\x21}, which is a C{'!'} character) 
+         (C{\0x##} is also supported for backwards compatibility) 
+     - an escaped octal character with a leading C{'\0'} (C{\041}, which is a C{'!'} character)
+     - a range of any of the above, separated by a dash (C{'a-z'}, etc.)
+     - any combination of the above (C{'aeiouy'}, C{'a-zA-Z0-9_$'}, etc.)
+    """
+    _expanded = lambda p: p if not isinstance(p,ParseResults) else ''.join(unichr(c) for c in range(ord(p[0]),ord(p[1])+1))
+    try:
+        return "".join(_expanded(part) for part in _reBracketExpr.parseString(s).body)
+    except Exception:
+        return ""
+
+def matchOnlyAtCol(n):
+    """
+    Helper method for defining parse actions that require matching at a specific
+    column in the input text.
+    """
+    def verifyCol(strg,locn,toks):
+        if col(locn,strg) != n:
+            raise ParseException(strg,locn,"matched token not at column %d" % n)
+    return verifyCol
+
+def replaceWith(replStr):
+    """
+    Helper method for common parse actions that simply return a literal value.  Especially
+    useful when used with C{L{transformString}()}.
+
+    Example::
+        num = Word(nums).setParseAction(lambda toks: int(toks[0]))
+        na = oneOf("N/A NA").setParseAction(replaceWith(math.nan))
+        term = na | num
+        
+        OneOrMore(term).parseString("324 234 N/A 234") # -> [324, 234, nan, 234]
+    """
+    return lambda s,l,t: [replStr]
+
+def removeQuotes(s,l,t):
+    """
+    Helper parse action for removing quotation marks from parsed quoted strings.
+
+    Example::
+        # by default, quotation marks are included in parsed results
+        quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["'Now is the Winter of our Discontent'"]
+
+        # use removeQuotes to strip quotation marks from parsed results
+        quotedString.setParseAction(removeQuotes)
+        quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["Now is the Winter of our Discontent"]
+    """
+    return t[0][1:-1]
+
+def tokenMap(func, *args):
+    """
+    Helper to define a parse action by mapping a function to all elements of a ParseResults list.If any additional 
+    args are passed, they are forwarded to the given function as additional arguments after
+    the token, as in C{hex_integer = Word(hexnums).setParseAction(tokenMap(int, 16))}, which will convert the
+    parsed data to an integer using base 16.
+
+    Example (compare the last to example in L{ParserElement.transformString}::
+        hex_ints = OneOrMore(Word(hexnums)).setParseAction(tokenMap(int, 16))
+        hex_ints.runTests('''
+            00 11 22 aa FF 0a 0d 1a
+            ''')
+        
+        upperword = Word(alphas).setParseAction(tokenMap(str.upper))
+        OneOrMore(upperword).runTests('''
+            my kingdom for a horse
+            ''')
+
+        wd = Word(alphas).setParseAction(tokenMap(str.title))
+        OneOrMore(wd).setParseAction(' '.join).runTests('''
+            now is the winter of our discontent made glorious summer by this sun of york
+            ''')
+    prints::
+        00 11 22 aa FF 0a 0d 1a
+        [0, 17, 34, 170, 255, 10, 13, 26]
+
+        my kingdom for a horse
+        ['MY', 'KINGDOM', 'FOR', 'A', 'HORSE']
+
+        now is the winter of our discontent made glorious summer by this sun of york
+        ['Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York']
+    """
+    def pa(s,l,t):
+        return [func(tokn, *args) for tokn in t]
+
+    try:
+        func_name = getattr(func, '__name__', 
+                            getattr(func, '__class__').__name__)
+    except Exception:
+        func_name = str(func)
+    pa.__name__ = func_name
+
+    return pa
+
+upcaseTokens = tokenMap(lambda t: _ustr(t).upper())
+"""(Deprecated) Helper parse action to convert tokens to upper case. Deprecated in favor of L{pyparsing_common.upcaseTokens}"""
+
+downcaseTokens = tokenMap(lambda t: _ustr(t).lower())
+"""(Deprecated) Helper parse action to convert tokens to lower case. Deprecated in favor of L{pyparsing_common.downcaseTokens}"""
+    
+def _makeTags(tagStr, xml):
+    """Internal helper to construct opening and closing tag expressions, given a tag name"""
+    if isinstance(tagStr,basestring):
+        resname = tagStr
+        tagStr = Keyword(tagStr, caseless=not xml)
+    else:
+        resname = tagStr.name
+
+    tagAttrName = Word(alphas,alphanums+"_-:")
+    if (xml):
+        tagAttrValue = dblQuotedString.copy().setParseAction( removeQuotes )
+        openTag = Suppress("<") + tagStr("tag") + \
+                Dict(ZeroOrMore(Group( tagAttrName + Suppress("=") + tagAttrValue ))) + \
+                Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">")
+    else:
+        printablesLessRAbrack = "".join(c for c in printables if c not in ">")
+        tagAttrValue = quotedString.copy().setParseAction( removeQuotes ) | Word(printablesLessRAbrack)
+        openTag = Suppress("<") + tagStr("tag") + \
+                Dict(ZeroOrMore(Group( tagAttrName.setParseAction(downcaseTokens) + \
+                Optional( Suppress("=") + tagAttrValue ) ))) + \
+                Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">")
+    closeTag = Combine(_L("")
+
+    openTag = openTag.setResultsName("start"+"".join(resname.replace(":"," ").title().split())).setName("<%s>" % resname)
+    closeTag = closeTag.setResultsName("end"+"".join(resname.replace(":"," ").title().split())).setName("" % resname)
+    openTag.tag = resname
+    closeTag.tag = resname
+    return openTag, closeTag
+
+def makeHTMLTags(tagStr):
+    """
+    Helper to construct opening and closing tag expressions for HTML, given a tag name. Matches
+    tags in either upper or lower case, attributes with namespaces and with quoted or unquoted values.
+
+    Example::
+        text = 'More info at the pyparsing wiki page'
+        # makeHTMLTags returns pyparsing expressions for the opening and closing tags as a 2-tuple
+        a,a_end = makeHTMLTags("A")
+        link_expr = a + SkipTo(a_end)("link_text") + a_end
+        
+        for link in link_expr.searchString(text):
+            # attributes in the  tag (like "href" shown here) are also accessible as named results
+            print(link.link_text, '->', link.href)
+    prints::
+        pyparsing -> http://pyparsing.wikispaces.com
+    """
+    return _makeTags( tagStr, False )
+
+def makeXMLTags(tagStr):
+    """
+    Helper to construct opening and closing tag expressions for XML, given a tag name. Matches
+    tags only in the given upper/lower case.
+
+    Example: similar to L{makeHTMLTags}
+    """
+    return _makeTags( tagStr, True )
+
+def withAttribute(*args,**attrDict):
+    """
+    Helper to create a validating parse action to be used with start tags created
+    with C{L{makeXMLTags}} or C{L{makeHTMLTags}}. Use C{withAttribute} to qualify a starting tag
+    with a required attribute value, to avoid false matches on common tags such as
+    C{} or C{
}. + + Call C{withAttribute} with a series of attribute names and values. Specify the list + of filter attributes names and values as: + - keyword arguments, as in C{(align="right")}, or + - as an explicit dict with C{**} operator, when an attribute name is also a Python + reserved word, as in C{**{"class":"Customer", "align":"right"}} + - a list of name-value tuples, as in ( ("ns1:class", "Customer"), ("ns2:align","right") ) + For attribute names with a namespace prefix, you must use the second form. Attribute + names are matched insensitive to upper/lower case. + + If just testing for C{class} (with or without a namespace), use C{L{withClass}}. + + To verify that the attribute exists, but without specifying a value, pass + C{withAttribute.ANY_VALUE} as the value. + + Example:: + html = ''' +
+ Some text +
1 4 0 1 0
+
1,3 2,3 1,1
+
this has no type
+
+ + ''' + div,div_end = makeHTMLTags("div") + + # only match div tag having a type attribute with value "grid" + div_grid = div().setParseAction(withAttribute(type="grid")) + grid_expr = div_grid + SkipTo(div | div_end)("body") + for grid_header in grid_expr.searchString(html): + print(grid_header.body) + + # construct a match with any div tag having a type attribute, regardless of the value + div_any_type = div().setParseAction(withAttribute(type=withAttribute.ANY_VALUE)) + div_expr = div_any_type + SkipTo(div | div_end)("body") + for div_header in div_expr.searchString(html): + print(div_header.body) + prints:: + 1 4 0 1 0 + + 1 4 0 1 0 + 1,3 2,3 1,1 + """ + if args: + attrs = args[:] + else: + attrs = attrDict.items() + attrs = [(k,v) for k,v in attrs] + def pa(s,l,tokens): + for attrName,attrValue in attrs: + if attrName not in tokens: + raise ParseException(s,l,"no matching attribute " + attrName) + if attrValue != withAttribute.ANY_VALUE and tokens[attrName] != attrValue: + raise ParseException(s,l,"attribute '%s' has value '%s', must be '%s'" % + (attrName, tokens[attrName], attrValue)) + return pa +withAttribute.ANY_VALUE = object() + +def withClass(classname, namespace=''): + """ + Simplified version of C{L{withAttribute}} when matching on a div class - made + difficult because C{class} is a reserved word in Python. + + Example:: + html = ''' +
+ Some text +
1 4 0 1 0
+
1,3 2,3 1,1
+
this <div> has no class
+
+ + ''' + div,div_end = makeHTMLTags("div") + div_grid = div().setParseAction(withClass("grid")) + + grid_expr = div_grid + SkipTo(div | div_end)("body") + for grid_header in grid_expr.searchString(html): + print(grid_header.body) + + div_any_type = div().setParseAction(withClass(withAttribute.ANY_VALUE)) + div_expr = div_any_type + SkipTo(div | div_end)("body") + for div_header in div_expr.searchString(html): + print(div_header.body) + prints:: + 1 4 0 1 0 + + 1 4 0 1 0 + 1,3 2,3 1,1 + """ + classattr = "%s:class" % namespace if namespace else "class" + return withAttribute(**{classattr : classname}) + +opAssoc = _Constants() +opAssoc.LEFT = object() +opAssoc.RIGHT = object() + +def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ): + """ + Helper method for constructing grammars of expressions made up of + operators working in a precedence hierarchy. Operators may be unary or + binary, left- or right-associative. Parse actions can also be attached + to operator expressions. The generated parser will also recognize the use + of parentheses to override operator precedences (see example below). + + Note: if you define a deep operator list, you may see performance issues + when using infixNotation. See L{ParserElement.enablePackrat} for a + mechanism to potentially improve your parser performance. + + Parameters: + - baseExpr - expression representing the most basic element for the nested + - opList - list of tuples, one for each operator precedence level in the + expression grammar; each tuple is of the form + (opExpr, numTerms, rightLeftAssoc, parseAction), where: + - opExpr is the pyparsing expression for the operator; + may also be a string, which will be converted to a Literal; + if numTerms is 3, opExpr is a tuple of two expressions, for the + two operators separating the 3 terms + - numTerms is the number of terms for this operator (must + be 1, 2, or 3) + - rightLeftAssoc is the indicator whether the operator is + right or left associative, using the pyparsing-defined + constants C{opAssoc.RIGHT} and C{opAssoc.LEFT}. + - parseAction is the parse action to be associated with + expressions matching this operator expression (the + parse action tuple member may be omitted); if the parse action + is passed a tuple or list of functions, this is equivalent to + calling C{setParseAction(*fn)} (L{ParserElement.setParseAction}) + - lpar - expression for matching left-parentheses (default=C{Suppress('(')}) + - rpar - expression for matching right-parentheses (default=C{Suppress(')')}) + + Example:: + # simple example of four-function arithmetic with ints and variable names + integer = pyparsing_common.signed_integer + varname = pyparsing_common.identifier + + arith_expr = infixNotation(integer | varname, + [ + ('-', 1, opAssoc.RIGHT), + (oneOf('* /'), 2, opAssoc.LEFT), + (oneOf('+ -'), 2, opAssoc.LEFT), + ]) + + arith_expr.runTests(''' + 5+3*6 + (5+3)*6 + -2--11 + ''', fullDump=False) + prints:: + 5+3*6 + [[5, '+', [3, '*', 6]]] + + (5+3)*6 + [[[5, '+', 3], '*', 6]] + + -2--11 + [[['-', 2], '-', ['-', 11]]] + """ + ret = Forward() + lastExpr = baseExpr | ( lpar + ret + rpar ) + for i,operDef in enumerate(opList): + opExpr,arity,rightLeftAssoc,pa = (operDef + (None,))[:4] + termName = "%s term" % opExpr if arity < 3 else "%s%s term" % opExpr + if arity == 3: + if opExpr is None or len(opExpr) != 2: + raise ValueError("if numterms=3, opExpr must be a tuple or list of two expressions") + opExpr1, opExpr2 = opExpr + thisExpr = Forward().setName(termName) + if rightLeftAssoc == opAssoc.LEFT: + if arity == 1: + matchExpr = FollowedBy(lastExpr + opExpr) + Group( lastExpr + OneOrMore( opExpr ) ) + elif arity == 2: + if opExpr is not None: + matchExpr = FollowedBy(lastExpr + opExpr + lastExpr) + Group( lastExpr + OneOrMore( opExpr + lastExpr ) ) + else: + matchExpr = FollowedBy(lastExpr+lastExpr) + Group( lastExpr + OneOrMore(lastExpr) ) + elif arity == 3: + matchExpr = FollowedBy(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) + \ + Group( lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr ) + else: + raise ValueError("operator must be unary (1), binary (2), or ternary (3)") + elif rightLeftAssoc == opAssoc.RIGHT: + if arity == 1: + # try to avoid LR with this extra test + if not isinstance(opExpr, Optional): + opExpr = Optional(opExpr) + matchExpr = FollowedBy(opExpr.expr + thisExpr) + Group( opExpr + thisExpr ) + elif arity == 2: + if opExpr is not None: + matchExpr = FollowedBy(lastExpr + opExpr + thisExpr) + Group( lastExpr + OneOrMore( opExpr + thisExpr ) ) + else: + matchExpr = FollowedBy(lastExpr + thisExpr) + Group( lastExpr + OneOrMore( thisExpr ) ) + elif arity == 3: + matchExpr = FollowedBy(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + \ + Group( lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr ) + else: + raise ValueError("operator must be unary (1), binary (2), or ternary (3)") + else: + raise ValueError("operator must indicate right or left associativity") + if pa: + if isinstance(pa, (tuple, list)): + matchExpr.setParseAction(*pa) + else: + matchExpr.setParseAction(pa) + thisExpr <<= ( matchExpr.setName(termName) | lastExpr ) + lastExpr = thisExpr + ret <<= lastExpr + return ret + +operatorPrecedence = infixNotation +"""(Deprecated) Former name of C{L{infixNotation}}, will be dropped in a future release.""" + +dblQuotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"').setName("string enclosed in double quotes") +sglQuotedString = Combine(Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("string enclosed in single quotes") +quotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"'| + Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("quotedString using single or double quotes") +unicodeString = Combine(_L('u') + quotedString.copy()).setName("unicode string literal") + +def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.copy()): + """ + Helper method for defining nested lists enclosed in opening and closing + delimiters ("(" and ")" are the default). + + Parameters: + - opener - opening character for a nested list (default=C{"("}); can also be a pyparsing expression + - closer - closing character for a nested list (default=C{")"}); can also be a pyparsing expression + - content - expression for items within the nested lists (default=C{None}) + - ignoreExpr - expression for ignoring opening and closing delimiters (default=C{quotedString}) + + If an expression is not provided for the content argument, the nested + expression will capture all whitespace-delimited content between delimiters + as a list of separate values. + + Use the C{ignoreExpr} argument to define expressions that may contain + opening or closing characters that should not be treated as opening + or closing characters for nesting, such as quotedString or a comment + expression. Specify multiple expressions using an C{L{Or}} or C{L{MatchFirst}}. + The default is L{quotedString}, but if no expressions are to be ignored, + then pass C{None} for this argument. + + Example:: + data_type = oneOf("void int short long char float double") + decl_data_type = Combine(data_type + Optional(Word('*'))) + ident = Word(alphas+'_', alphanums+'_') + number = pyparsing_common.number + arg = Group(decl_data_type + ident) + LPAR,RPAR = map(Suppress, "()") + + code_body = nestedExpr('{', '}', ignoreExpr=(quotedString | cStyleComment)) + + c_function = (decl_data_type("type") + + ident("name") + + LPAR + Optional(delimitedList(arg), [])("args") + RPAR + + code_body("body")) + c_function.ignore(cStyleComment) + + source_code = ''' + int is_odd(int x) { + return (x%2); + } + + int dec_to_hex(char hchar) { + if (hchar >= '0' && hchar <= '9') { + return (ord(hchar)-ord('0')); + } else { + return (10+ord(hchar)-ord('A')); + } + } + ''' + for func in c_function.searchString(source_code): + print("%(name)s (%(type)s) args: %(args)s" % func) + + prints:: + is_odd (int) args: [['int', 'x']] + dec_to_hex (int) args: [['char', 'hchar']] + """ + if opener == closer: + raise ValueError("opening and closing strings cannot be the same") + if content is None: + if isinstance(opener,basestring) and isinstance(closer,basestring): + if len(opener) == 1 and len(closer)==1: + if ignoreExpr is not None: + content = (Combine(OneOrMore(~ignoreExpr + + CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS,exact=1)) + ).setParseAction(lambda t:t[0].strip())) + else: + content = (empty.copy()+CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS + ).setParseAction(lambda t:t[0].strip())) + else: + if ignoreExpr is not None: + content = (Combine(OneOrMore(~ignoreExpr + + ~Literal(opener) + ~Literal(closer) + + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) + ).setParseAction(lambda t:t[0].strip())) + else: + content = (Combine(OneOrMore(~Literal(opener) + ~Literal(closer) + + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) + ).setParseAction(lambda t:t[0].strip())) + else: + raise ValueError("opening and closing arguments must be strings if no content expression is given") + ret = Forward() + if ignoreExpr is not None: + ret <<= Group( Suppress(opener) + ZeroOrMore( ignoreExpr | ret | content ) + Suppress(closer) ) + else: + ret <<= Group( Suppress(opener) + ZeroOrMore( ret | content ) + Suppress(closer) ) + ret.setName('nested %s%s expression' % (opener,closer)) + return ret + +def indentedBlock(blockStatementExpr, indentStack, indent=True): + """ + Helper method for defining space-delimited indentation blocks, such as + those used to define block statements in Python source code. + + Parameters: + - blockStatementExpr - expression defining syntax of statement that + is repeated within the indented block + - indentStack - list created by caller to manage indentation stack + (multiple statementWithIndentedBlock expressions within a single grammar + should share a common indentStack) + - indent - boolean indicating whether block must be indented beyond the + the current level; set to False for block of left-most statements + (default=C{True}) + + A valid block must contain at least one C{blockStatement}. + + Example:: + data = ''' + def A(z): + A1 + B = 100 + G = A2 + A2 + A3 + B + def BB(a,b,c): + BB1 + def BBA(): + bba1 + bba2 + bba3 + C + D + def spam(x,y): + def eggs(z): + pass + ''' + + + indentStack = [1] + stmt = Forward() + + identifier = Word(alphas, alphanums) + funcDecl = ("def" + identifier + Group( "(" + Optional( delimitedList(identifier) ) + ")" ) + ":") + func_body = indentedBlock(stmt, indentStack) + funcDef = Group( funcDecl + func_body ) + + rvalue = Forward() + funcCall = Group(identifier + "(" + Optional(delimitedList(rvalue)) + ")") + rvalue << (funcCall | identifier | Word(nums)) + assignment = Group(identifier + "=" + rvalue) + stmt << ( funcDef | assignment | identifier ) + + module_body = OneOrMore(stmt) + + parseTree = module_body.parseString(data) + parseTree.pprint() + prints:: + [['def', + 'A', + ['(', 'z', ')'], + ':', + [['A1'], [['B', '=', '100']], [['G', '=', 'A2']], ['A2'], ['A3']]], + 'B', + ['def', + 'BB', + ['(', 'a', 'b', 'c', ')'], + ':', + [['BB1'], [['def', 'BBA', ['(', ')'], ':', [['bba1'], ['bba2'], ['bba3']]]]]], + 'C', + 'D', + ['def', + 'spam', + ['(', 'x', 'y', ')'], + ':', + [[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]] + """ + def checkPeerIndent(s,l,t): + if l >= len(s): return + curCol = col(l,s) + if curCol != indentStack[-1]: + if curCol > indentStack[-1]: + raise ParseFatalException(s,l,"illegal nesting") + raise ParseException(s,l,"not a peer entry") + + def checkSubIndent(s,l,t): + curCol = col(l,s) + if curCol > indentStack[-1]: + indentStack.append( curCol ) + else: + raise ParseException(s,l,"not a subentry") + + def checkUnindent(s,l,t): + if l >= len(s): return + curCol = col(l,s) + if not(indentStack and curCol < indentStack[-1] and curCol <= indentStack[-2]): + raise ParseException(s,l,"not an unindent") + indentStack.pop() + + NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress()) + INDENT = (Empty() + Empty().setParseAction(checkSubIndent)).setName('INDENT') + PEER = Empty().setParseAction(checkPeerIndent).setName('') + UNDENT = Empty().setParseAction(checkUnindent).setName('UNINDENT') + if indent: + smExpr = Group( Optional(NL) + + #~ FollowedBy(blockStatementExpr) + + INDENT + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) + UNDENT) + else: + smExpr = Group( Optional(NL) + + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) ) + blockStatementExpr.ignore(_bslash + LineEnd()) + return smExpr.setName('indented block') + +alphas8bit = srange(r"[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]") +punc8bit = srange(r"[\0xa1-\0xbf\0xd7\0xf7]") + +anyOpenTag,anyCloseTag = makeHTMLTags(Word(alphas,alphanums+"_:").setName('any tag')) +_htmlEntityMap = dict(zip("gt lt amp nbsp quot apos".split(),'><& "\'')) +commonHTMLEntity = Regex('&(?P' + '|'.join(_htmlEntityMap.keys()) +");").setName("common HTML entity") +def replaceHTMLEntity(t): + """Helper parser action to replace common HTML entities with their special characters""" + return _htmlEntityMap.get(t.entity) + +# it's easy to get these comment structures wrong - they're very common, so may as well make them available +cStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/').setName("C style comment") +"Comment of the form C{/* ... */}" + +htmlComment = Regex(r"").setName("HTML comment") +"Comment of the form C{}" + +restOfLine = Regex(r".*").leaveWhitespace().setName("rest of line") +dblSlashComment = Regex(r"//(?:\\\n|[^\n])*").setName("// comment") +"Comment of the form C{// ... (to end of line)}" + +cppStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/'| dblSlashComment).setName("C++ style comment") +"Comment of either form C{L{cStyleComment}} or C{L{dblSlashComment}}" + +javaStyleComment = cppStyleComment +"Same as C{L{cppStyleComment}}" + +pythonStyleComment = Regex(r"#.*").setName("Python style comment") +"Comment of the form C{# ... (to end of line)}" + +_commasepitem = Combine(OneOrMore(Word(printables, excludeChars=',') + + Optional( Word(" \t") + + ~Literal(",") + ~LineEnd() ) ) ).streamline().setName("commaItem") +commaSeparatedList = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("commaSeparatedList") +"""(Deprecated) Predefined expression of 1 or more printable words or quoted strings, separated by commas. + This expression is deprecated in favor of L{pyparsing_common.comma_separated_list}.""" + +# some other useful expressions - using lower-case class name since we are really using this as a namespace +class pyparsing_common: + """ + Here are some common low-level expressions that may be useful in jump-starting parser development: + - numeric forms (L{integers}, L{reals}, L{scientific notation}) + - common L{programming identifiers} + - network addresses (L{MAC}, L{IPv4}, L{IPv6}) + - ISO8601 L{dates} and L{datetime} + - L{UUID} + - L{comma-separated list} + Parse actions: + - C{L{convertToInteger}} + - C{L{convertToFloat}} + - C{L{convertToDate}} + - C{L{convertToDatetime}} + - C{L{stripHTMLTags}} + - C{L{upcaseTokens}} + - C{L{downcaseTokens}} + + Example:: + pyparsing_common.number.runTests(''' + # any int or real number, returned as the appropriate type + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + ''') + + pyparsing_common.fnumber.runTests(''' + # any int or real number, returned as float + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + ''') + + pyparsing_common.hex_integer.runTests(''' + # hex numbers + 100 + FF + ''') + + pyparsing_common.fraction.runTests(''' + # fractions + 1/2 + -3/4 + ''') + + pyparsing_common.mixed_integer.runTests(''' + # mixed fractions + 1 + 1/2 + -3/4 + 1-3/4 + ''') + + import uuid + pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) + pyparsing_common.uuid.runTests(''' + # uuid + 12345678-1234-5678-1234-567812345678 + ''') + prints:: + # any int or real number, returned as the appropriate type + 100 + [100] + + -100 + [-100] + + +100 + [100] + + 3.14159 + [3.14159] + + 6.02e23 + [6.02e+23] + + 1e-12 + [1e-12] + + # any int or real number, returned as float + 100 + [100.0] + + -100 + [-100.0] + + +100 + [100.0] + + 3.14159 + [3.14159] + + 6.02e23 + [6.02e+23] + + 1e-12 + [1e-12] + + # hex numbers + 100 + [256] + + FF + [255] + + # fractions + 1/2 + [0.5] + + -3/4 + [-0.75] + + # mixed fractions + 1 + [1] + + 1/2 + [0.5] + + -3/4 + [-0.75] + + 1-3/4 + [1.75] + + # uuid + 12345678-1234-5678-1234-567812345678 + [UUID('12345678-1234-5678-1234-567812345678')] + """ + + convertToInteger = tokenMap(int) + """ + Parse action for converting parsed integers to Python int + """ + + convertToFloat = tokenMap(float) + """ + Parse action for converting parsed numbers to Python float + """ + + integer = Word(nums).setName("integer").setParseAction(convertToInteger) + """expression that parses an unsigned integer, returns an int""" + + hex_integer = Word(hexnums).setName("hex integer").setParseAction(tokenMap(int,16)) + """expression that parses a hexadecimal integer, returns an int""" + + signed_integer = Regex(r'[+-]?\d+').setName("signed integer").setParseAction(convertToInteger) + """expression that parses an integer with optional leading sign, returns an int""" + + fraction = (signed_integer().setParseAction(convertToFloat) + '/' + signed_integer().setParseAction(convertToFloat)).setName("fraction") + """fractional expression of an integer divided by an integer, returns a float""" + fraction.addParseAction(lambda t: t[0]/t[-1]) + + mixed_integer = (fraction | signed_integer + Optional(Optional('-').suppress() + fraction)).setName("fraction or mixed integer-fraction") + """mixed integer of the form 'integer - fraction', with optional leading integer, returns float""" + mixed_integer.addParseAction(sum) + + real = Regex(r'[+-]?\d+\.\d*').setName("real number").setParseAction(convertToFloat) + """expression that parses a floating point number and returns a float""" + + sci_real = Regex(r'[+-]?\d+([eE][+-]?\d+|\.\d*([eE][+-]?\d+)?)').setName("real number with scientific notation").setParseAction(convertToFloat) + """expression that parses a floating point number with optional scientific notation and returns a float""" + + # streamlining this expression makes the docs nicer-looking + number = (sci_real | real | signed_integer).streamline() + """any numeric expression, returns the corresponding Python type""" + + fnumber = Regex(r'[+-]?\d+\.?\d*([eE][+-]?\d+)?').setName("fnumber").setParseAction(convertToFloat) + """any int or real number, returned as float""" + + identifier = Word(alphas+'_', alphanums+'_').setName("identifier") + """typical code identifier (leading alpha or '_', followed by 0 or more alphas, nums, or '_')""" + + ipv4_address = Regex(r'(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}').setName("IPv4 address") + "IPv4 address (C{0.0.0.0 - 255.255.255.255})" + + _ipv6_part = Regex(r'[0-9a-fA-F]{1,4}').setName("hex_integer") + _full_ipv6_address = (_ipv6_part + (':' + _ipv6_part)*7).setName("full IPv6 address") + _short_ipv6_address = (Optional(_ipv6_part + (':' + _ipv6_part)*(0,6)) + "::" + Optional(_ipv6_part + (':' + _ipv6_part)*(0,6))).setName("short IPv6 address") + _short_ipv6_address.addCondition(lambda t: sum(1 for tt in t if pyparsing_common._ipv6_part.matches(tt)) < 8) + _mixed_ipv6_address = ("::ffff:" + ipv4_address).setName("mixed IPv6 address") + ipv6_address = Combine((_full_ipv6_address | _mixed_ipv6_address | _short_ipv6_address).setName("IPv6 address")).setName("IPv6 address") + "IPv6 address (long, short, or mixed form)" + + mac_address = Regex(r'[0-9a-fA-F]{2}([:.-])[0-9a-fA-F]{2}(?:\1[0-9a-fA-F]{2}){4}').setName("MAC address") + "MAC address xx:xx:xx:xx:xx (may also have '-' or '.' delimiters)" + + @staticmethod + def convertToDate(fmt="%Y-%m-%d"): + """ + Helper to create a parse action for converting parsed date string to Python datetime.date + + Params - + - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%d"}) + + Example:: + date_expr = pyparsing_common.iso8601_date.copy() + date_expr.setParseAction(pyparsing_common.convertToDate()) + print(date_expr.parseString("1999-12-31")) + prints:: + [datetime.date(1999, 12, 31)] + """ + def cvt_fn(s,l,t): + try: + return datetime.strptime(t[0], fmt).date() + except ValueError as ve: + raise ParseException(s, l, str(ve)) + return cvt_fn + + @staticmethod + def convertToDatetime(fmt="%Y-%m-%dT%H:%M:%S.%f"): + """ + Helper to create a parse action for converting parsed datetime string to Python datetime.datetime + + Params - + - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%dT%H:%M:%S.%f"}) + + Example:: + dt_expr = pyparsing_common.iso8601_datetime.copy() + dt_expr.setParseAction(pyparsing_common.convertToDatetime()) + print(dt_expr.parseString("1999-12-31T23:59:59.999")) + prints:: + [datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)] + """ + def cvt_fn(s,l,t): + try: + return datetime.strptime(t[0], fmt) + except ValueError as ve: + raise ParseException(s, l, str(ve)) + return cvt_fn + + iso8601_date = Regex(r'(?P\d{4})(?:-(?P\d\d)(?:-(?P\d\d))?)?').setName("ISO8601 date") + "ISO8601 date (C{yyyy-mm-dd})" + + iso8601_datetime = Regex(r'(?P\d{4})-(?P\d\d)-(?P\d\d)[T ](?P\d\d):(?P\d\d)(:(?P\d\d(\.\d*)?)?)?(?PZ|[+-]\d\d:?\d\d)?').setName("ISO8601 datetime") + "ISO8601 datetime (C{yyyy-mm-ddThh:mm:ss.s(Z|+-00:00)}) - trailing seconds, milliseconds, and timezone optional; accepts separating C{'T'} or C{' '}" + + uuid = Regex(r'[0-9a-fA-F]{8}(-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12}').setName("UUID") + "UUID (C{xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx})" + + _html_stripper = anyOpenTag.suppress() | anyCloseTag.suppress() + @staticmethod + def stripHTMLTags(s, l, tokens): + """ + Parse action to remove HTML tags from web page HTML source + + Example:: + # strip HTML links from normal text + text = 'More info at the
pyparsing wiki page' + td,td_end = makeHTMLTags("TD") + table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end + + print(table_text.parseString(text).body) # -> 'More info at the pyparsing wiki page' + """ + return pyparsing_common._html_stripper.transformString(tokens[0]) + + _commasepitem = Combine(OneOrMore(~Literal(",") + ~LineEnd() + Word(printables, excludeChars=',') + + Optional( White(" \t") ) ) ).streamline().setName("commaItem") + comma_separated_list = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("comma separated list") + """Predefined expression of 1 or more printable words or quoted strings, separated by commas.""" + + upcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).upper())) + """Parse action to convert tokens to upper case.""" + + downcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).lower())) + """Parse action to convert tokens to lower case.""" + + +if __name__ == "__main__": + + selectToken = CaselessLiteral("select") + fromToken = CaselessLiteral("from") + + ident = Word(alphas, alphanums + "_$") + + columnName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) + columnNameList = Group(delimitedList(columnName)).setName("columns") + columnSpec = ('*' | columnNameList) + + tableName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) + tableNameList = Group(delimitedList(tableName)).setName("tables") + + simpleSQL = selectToken("command") + columnSpec("columns") + fromToken + tableNameList("tables") + + # demo runTests method, including embedded comments in test string + simpleSQL.runTests(""" + # '*' as column list and dotted table name + select * from SYS.XYZZY + + # caseless match on "SELECT", and casts back to "select" + SELECT * from XYZZY, ABC + + # list of column names, and mixed case SELECT keyword + Select AA,BB,CC from Sys.dual + + # multiple tables + Select A, B, C from Sys.dual, Table2 + + # invalid SELECT keyword - should fail + Xelect A, B, C from Sys.dual + + # incomplete command - should fail + Select + + # invalid column name - should fail + Select ^^^ frox Sys.dual + + """) + + pyparsing_common.number.runTests(""" + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + """) + + # any int or real number, returned as float + pyparsing_common.fnumber.runTests(""" + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + """) + + pyparsing_common.hex_integer.runTests(""" + 100 + FF + """) + + import uuid + pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) + pyparsing_common.uuid.runTests(""" + 12345678-1234-5678-1234-567812345678 + """) diff --git a/venv/Lib/site-packages/pkg_resources/extern/__init__.py b/venv/Lib/site-packages/pkg_resources/extern/__init__.py new file mode 100644 index 00000000..fed59295 --- /dev/null +++ b/venv/Lib/site-packages/pkg_resources/extern/__init__.py @@ -0,0 +1,73 @@ +import importlib.util +import sys + + +class VendorImporter: + """ + A PEP 302 meta path importer for finding optionally-vendored + or otherwise naturally-installed packages from root_name. + """ + + def __init__(self, root_name, vendored_names=(), vendor_pkg=None): + self.root_name = root_name + self.vendored_names = set(vendored_names) + self.vendor_pkg = vendor_pkg or root_name.replace('extern', '_vendor') + + @property + def search_path(self): + """ + Search first the vendor package then as a natural package. + """ + yield self.vendor_pkg + '.' + yield '' + + def _module_matches_namespace(self, fullname): + """Figure out if the target module is vendored.""" + root, base, target = fullname.partition(self.root_name + '.') + return not root and any(map(target.startswith, self.vendored_names)) + + def load_module(self, fullname): + """ + Iterate over the search path to locate and load fullname. + """ + root, base, target = fullname.partition(self.root_name + '.') + for prefix in self.search_path: + try: + extant = prefix + target + __import__(extant) + mod = sys.modules[extant] + sys.modules[fullname] = mod + return mod + except ImportError: + pass + else: + raise ImportError( + "The '{target}' package is required; " + "normally this is bundled with this package so if you get " + "this warning, consult the packager of your " + "distribution.".format(**locals()) + ) + + def create_module(self, spec): + return self.load_module(spec.name) + + def exec_module(self, module): + pass + + def find_spec(self, fullname, path=None, target=None): + """Return a module spec for vendored names.""" + return ( + importlib.util.spec_from_loader(fullname, self) + if self._module_matches_namespace(fullname) else None + ) + + def install(self): + """ + Install this importer into sys.meta_path if not already present. + """ + if self not in sys.meta_path: + sys.meta_path.append(self) + + +names = 'packaging', 'pyparsing', 'appdirs' +VendorImporter(__name__, names).install() diff --git a/venv/Lib/site-packages/pkg_resources/extern/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pkg_resources/extern/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..41855009 Binary files /dev/null and b/venv/Lib/site-packages/pkg_resources/extern/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pkg_resources/tests/data/my-test-package-source/__pycache__/setup.cpython-36.pyc b/venv/Lib/site-packages/pkg_resources/tests/data/my-test-package-source/__pycache__/setup.cpython-36.pyc new file mode 100644 index 00000000..3f3ef2e0 Binary files /dev/null and b/venv/Lib/site-packages/pkg_resources/tests/data/my-test-package-source/__pycache__/setup.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pkg_resources/tests/data/my-test-package-source/setup.py b/venv/Lib/site-packages/pkg_resources/tests/data/my-test-package-source/setup.py new file mode 100644 index 00000000..fe80d28f --- /dev/null +++ b/venv/Lib/site-packages/pkg_resources/tests/data/my-test-package-source/setup.py @@ -0,0 +1,6 @@ +import setuptools +setuptools.setup( + name="my-test-package", + version="1.0", + zip_safe=True, +) diff --git a/env/lib/python2.7/site-packages/pyobjc-5.2.dist-info/INSTALLER b/venv/Lib/site-packages/python_dateutil-2.8.1.dist-info/INSTALLER similarity index 100% rename from env/lib/python2.7/site-packages/pyobjc-5.2.dist-info/INSTALLER rename to venv/Lib/site-packages/python_dateutil-2.8.1.dist-info/INSTALLER diff --git a/venv/Lib/site-packages/python_dateutil-2.8.1.dist-info/LICENSE b/venv/Lib/site-packages/python_dateutil-2.8.1.dist-info/LICENSE new file mode 100644 index 00000000..1e65815c --- /dev/null +++ b/venv/Lib/site-packages/python_dateutil-2.8.1.dist-info/LICENSE @@ -0,0 +1,54 @@ +Copyright 2017- Paul Ganssle +Copyright 2017- dateutil contributors (see AUTHORS file) + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +The above license applies to all contributions after 2017-12-01, as well as +all contributions that have been re-licensed (see AUTHORS file for the list of +contributors who have re-licensed their code). +-------------------------------------------------------------------------------- +dateutil - Extensions to the standard Python datetime module. + +Copyright (c) 2003-2011 - Gustavo Niemeyer +Copyright (c) 2012-2014 - Tomi Pieviläinen +Copyright (c) 2014-2016 - Yaron de Leeuw +Copyright (c) 2015- - Paul Ganssle +Copyright (c) 2015- - dateutil contributors (see AUTHORS file) + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +The above BSD License Applies to all code, even that also covered by Apache 2.0. \ No newline at end of file diff --git a/venv/Lib/site-packages/python_dateutil-2.8.1.dist-info/METADATA b/venv/Lib/site-packages/python_dateutil-2.8.1.dist-info/METADATA new file mode 100644 index 00000000..2e476a14 --- /dev/null +++ b/venv/Lib/site-packages/python_dateutil-2.8.1.dist-info/METADATA @@ -0,0 +1,200 @@ +Metadata-Version: 2.1 +Name: python-dateutil +Version: 2.8.1 +Summary: Extensions to the standard Python datetime module +Home-page: https://dateutil.readthedocs.io +Author: Gustavo Niemeyer +Author-email: gustavo@niemeyer.net +Maintainer: Paul Ganssle +Maintainer-email: dateutil@python.org +License: Dual License +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Topic :: Software Development :: Libraries +Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,>=2.7 +Description-Content-Type: text/x-rst +Requires-Dist: six (>=1.5) + +dateutil - powerful extensions to datetime +========================================== + +|pypi| |support| |licence| + +|gitter| |readthedocs| + +|travis| |appveyor| |pipelines| |coverage| + +.. |pypi| image:: https://img.shields.io/pypi/v/python-dateutil.svg?style=flat-square + :target: https://pypi.org/project/python-dateutil/ + :alt: pypi version + +.. |support| image:: https://img.shields.io/pypi/pyversions/python-dateutil.svg?style=flat-square + :target: https://pypi.org/project/python-dateutil/ + :alt: supported Python version + +.. |travis| image:: https://img.shields.io/travis/dateutil/dateutil/master.svg?style=flat-square&label=Travis%20Build + :target: https://travis-ci.org/dateutil/dateutil + :alt: travis build status + +.. |appveyor| image:: https://img.shields.io/appveyor/ci/dateutil/dateutil/master.svg?style=flat-square&logo=appveyor + :target: https://ci.appveyor.com/project/dateutil/dateutil + :alt: appveyor build status + +.. |pipelines| image:: https://dev.azure.com/pythondateutilazure/dateutil/_apis/build/status/dateutil.dateutil?branchName=master + :target: https://dev.azure.com/pythondateutilazure/dateutil/_build/latest?definitionId=1&branchName=master + :alt: azure pipelines build status + +.. |coverage| image:: https://codecov.io/github/dateutil/dateutil/coverage.svg?branch=master + :target: https://codecov.io/github/dateutil/dateutil?branch=master + :alt: Code coverage + +.. |gitter| image:: https://badges.gitter.im/dateutil/dateutil.svg + :alt: Join the chat at https://gitter.im/dateutil/dateutil + :target: https://gitter.im/dateutil/dateutil + +.. |licence| image:: https://img.shields.io/pypi/l/python-dateutil.svg?style=flat-square + :target: https://pypi.org/project/python-dateutil/ + :alt: licence + +.. |readthedocs| image:: https://img.shields.io/readthedocs/dateutil/latest.svg?style=flat-square&label=Read%20the%20Docs + :alt: Read the documentation at https://dateutil.readthedocs.io/en/latest/ + :target: https://dateutil.readthedocs.io/en/latest/ + +The `dateutil` module provides powerful extensions to +the standard `datetime` module, available in Python. + +Installation +============ +`dateutil` can be installed from PyPI using `pip` (note that the package name is +different from the importable name):: + + pip install python-dateutil + +Download +======== +dateutil is available on PyPI +https://pypi.org/project/python-dateutil/ + +The documentation is hosted at: +https://dateutil.readthedocs.io/en/stable/ + +Code +==== +The code and issue tracker are hosted on GitHub: +https://github.com/dateutil/dateutil/ + +Features +======== + +* Computing of relative deltas (next month, next year, + next Monday, last week of month, etc); +* Computing of relative deltas between two given + date and/or datetime objects; +* Computing of dates based on very flexible recurrence rules, + using a superset of the `iCalendar `_ + specification. Parsing of RFC strings is supported as well. +* Generic parsing of dates in almost any string format; +* Timezone (tzinfo) implementations for tzfile(5) format + files (/etc/localtime, /usr/share/zoneinfo, etc), TZ + environment string (in all known formats), iCalendar + format files, given ranges (with help from relative deltas), + local machine timezone, fixed offset timezone, UTC timezone, + and Windows registry-based time zones. +* Internal up-to-date world timezone information based on + Olson's database. +* Computing of Easter Sunday dates for any given year, + using Western, Orthodox or Julian algorithms; +* A comprehensive test suite. + +Quick example +============= +Here's a snapshot, just to give an idea about the power of the +package. For more examples, look at the documentation. + +Suppose you want to know how much time is left, in +years/months/days/etc, before the next easter happening on a +year with a Friday 13th in August, and you want to get today's +date out of the "date" unix system command. Here is the code: + +.. code-block:: python3 + + >>> from dateutil.relativedelta import * + >>> from dateutil.easter import * + >>> from dateutil.rrule import * + >>> from dateutil.parser import * + >>> from datetime import * + >>> now = parse("Sat Oct 11 17:13:46 UTC 2003") + >>> today = now.date() + >>> year = rrule(YEARLY,dtstart=now,bymonth=8,bymonthday=13,byweekday=FR)[0].year + >>> rdelta = relativedelta(easter(year), today) + >>> print("Today is: %s" % today) + Today is: 2003-10-11 + >>> print("Year with next Aug 13th on a Friday is: %s" % year) + Year with next Aug 13th on a Friday is: 2004 + >>> print("How far is the Easter of that year: %s" % rdelta) + How far is the Easter of that year: relativedelta(months=+6) + >>> print("And the Easter of that year is: %s" % (today+rdelta)) + And the Easter of that year is: 2004-04-11 + +Being exactly 6 months ahead was **really** a coincidence :) + +Contributing +============ + +We welcome many types of contributions - bug reports, pull requests (code, infrastructure or documentation fixes). For more information about how to contribute to the project, see the ``CONTRIBUTING.md`` file in the repository. + + +Author +====== +The dateutil module was written by Gustavo Niemeyer +in 2003. + +It is maintained by: + +* Gustavo Niemeyer 2003-2011 +* Tomi Pieviläinen 2012-2014 +* Yaron de Leeuw 2014-2016 +* Paul Ganssle 2015- + +Starting with version 2.4.1, all source and binary distributions will be signed +by a PGP key that has, at the very least, been signed by the key which made the +previous release. A table of release signing keys can be found below: + +=========== ============================ +Releases Signing key fingerprint +=========== ============================ +2.4.1- `6B49 ACBA DCF6 BD1C A206 67AB CD54 FCE3 D964 BEFB`_ (|pgp_mirror|_) +=========== ============================ + + +Contact +======= +Our mailing list is available at `dateutil@python.org `_. As it is hosted by the PSF, it is subject to the `PSF code of +conduct `_. + +License +======= + +All contributions after December 1, 2017 released under dual license - either `Apache 2.0 License `_ or the `BSD 3-Clause License `_. Contributions before December 1, 2017 - except those those explicitly relicensed - are released only under the BSD 3-Clause License. + + +.. _6B49 ACBA DCF6 BD1C A206 67AB CD54 FCE3 D964 BEFB: + https://pgp.mit.edu/pks/lookup?op=vindex&search=0xCD54FCE3D964BEFB + +.. |pgp_mirror| replace:: mirror +.. _pgp_mirror: https://sks-keyservers.net/pks/lookup?op=vindex&search=0xCD54FCE3D964BEFB + + diff --git a/venv/Lib/site-packages/python_dateutil-2.8.1.dist-info/RECORD b/venv/Lib/site-packages/python_dateutil-2.8.1.dist-info/RECORD new file mode 100644 index 00000000..06db557f --- /dev/null +++ b/venv/Lib/site-packages/python_dateutil-2.8.1.dist-info/RECORD @@ -0,0 +1,45 @@ +dateutil/__init__.py,sha256=lXElASqwYGwqlrSWSeX19JwF5Be9tNecDa9ebk-0gmk,222 +dateutil/__pycache__/__init__.cpython-36.pyc,, +dateutil/__pycache__/_common.cpython-36.pyc,, +dateutil/__pycache__/_version.cpython-36.pyc,, +dateutil/__pycache__/easter.cpython-36.pyc,, +dateutil/__pycache__/relativedelta.cpython-36.pyc,, +dateutil/__pycache__/rrule.cpython-36.pyc,, +dateutil/__pycache__/tzwin.cpython-36.pyc,, +dateutil/__pycache__/utils.cpython-36.pyc,, +dateutil/_common.py,sha256=77w0yytkrxlYbSn--lDVPUMabUXRR9I3lBv_vQRUqUY,932 +dateutil/_version.py,sha256=U1JNX8P5pUNBtcStwfGyAUIMMHGZXhiTDTVXgAUWxs4,116 +dateutil/easter.py,sha256=0liVsgqSx-NPhaFevOJaYgEbrSu2oQQ2o9m_OEBdc-s,2684 +dateutil/parser/__init__.py,sha256=wWk6GFuxTpjoggCGtgkceJoti4pVjl4_fHQXpNOaSYg,1766 +dateutil/parser/__pycache__/__init__.cpython-36.pyc,, +dateutil/parser/__pycache__/_parser.cpython-36.pyc,, +dateutil/parser/__pycache__/isoparser.cpython-36.pyc,, +dateutil/parser/_parser.py,sha256=F0w8h9txvatnYAmeJ1MMbIAvZHRzy3iFjv-AZqRovNs,58804 +dateutil/parser/isoparser.py,sha256=BeEEqIeqhcgik5Cp1_G5Aztsqayp-MAr3aVqAKo1XRc,13098 +dateutil/relativedelta.py,sha256=GjVxqpAVWnG67rdbf7pkoIlJvQqmju9NSfGCcqblc7U,24904 +dateutil/rrule.py,sha256=dStRcOIj8jul-BurMKguc_IBckY-Qci1K6EYqNW8eUg,66514 +dateutil/tz/__init__.py,sha256=F-Mz13v6jYseklQf9Te9J6nzcLDmq47gORa61K35_FA,444 +dateutil/tz/__pycache__/__init__.cpython-36.pyc,, +dateutil/tz/__pycache__/_common.cpython-36.pyc,, +dateutil/tz/__pycache__/_factories.cpython-36.pyc,, +dateutil/tz/__pycache__/tz.cpython-36.pyc,, +dateutil/tz/__pycache__/win.cpython-36.pyc,, +dateutil/tz/_common.py,sha256=cgzDTANsOXvEc86cYF77EsliuSab8Puwpsl5-bX3_S4,12977 +dateutil/tz/_factories.py,sha256=unb6XQNXrPMveksTCU-Ag8jmVZs4SojoPUcAHpWnrvU,2569 +dateutil/tz/tz.py,sha256=npaGnA2M2LGUUerXzAml9rMM-BE771igYFcFETeC3JE,62851 +dateutil/tz/win.py,sha256=xJszWgSwE1xPx_HJj4ZkepyukC_hNy016WMcXhbRaB8,12935 +dateutil/tzwin.py,sha256=7Ar4vdQCnnM0mKR3MUjbIKsZrBVfHgdwsJZc_mGYRew,59 +dateutil/utils.py,sha256=Agvhi7i3HuJdwHYCe9lDS63l_LNFUUlB2hmR3ZKNYwE,1959 +dateutil/zoneinfo/__init__.py,sha256=KYg0pthCMjcp5MXSEiBJn3nMjZeNZav7rlJw5-tz1S4,5889 +dateutil/zoneinfo/__pycache__/__init__.cpython-36.pyc,, +dateutil/zoneinfo/__pycache__/rebuild.cpython-36.pyc,, +dateutil/zoneinfo/dateutil-zoneinfo.tar.gz,sha256=6bZJKrN3mhnCqMgQgFSllQNNbtld9AnuPaRIXWoSH4o,153315 +dateutil/zoneinfo/rebuild.py,sha256=2uFJQiW3Fl8fVogrSXisJMpLeHI1zGwpvBFF43QdeF0,1719 +python_dateutil-2.8.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +python_dateutil-2.8.1.dist-info/LICENSE,sha256=ugD1Gg2SgjtaHN4n2LW50jIeZ-2NqbwWPv-W1eF-V34,2889 +python_dateutil-2.8.1.dist-info/METADATA,sha256=u7pGPxvY3bP0MsvsWab9OeTybTnbLX011vZxRW12I1Y,7988 +python_dateutil-2.8.1.dist-info/RECORD,, +python_dateutil-2.8.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +python_dateutil-2.8.1.dist-info/WHEEL,sha256=8zNYZbwQSXoB9IfXOjPfeNwvAsALAjffgk27FqvCWbo,110 +python_dateutil-2.8.1.dist-info/top_level.txt,sha256=4tjdWkhRZvF7LA_BYe_L9gB2w_p2a-z5y6ArjaRkot8,9 +python_dateutil-2.8.1.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 diff --git a/venv/Lib/site-packages/python_dateutil-2.8.1.dist-info/REQUESTED b/venv/Lib/site-packages/python_dateutil-2.8.1.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b diff --git a/env/lib/python2.7/site-packages/wheel-0.33.6.dist-info/WHEEL b/venv/Lib/site-packages/python_dateutil-2.8.1.dist-info/WHEEL similarity index 100% rename from env/lib/python2.7/site-packages/wheel-0.33.6.dist-info/WHEEL rename to venv/Lib/site-packages/python_dateutil-2.8.1.dist-info/WHEEL diff --git a/venv/Lib/site-packages/python_dateutil-2.8.1.dist-info/top_level.txt b/venv/Lib/site-packages/python_dateutil-2.8.1.dist-info/top_level.txt new file mode 100644 index 00000000..66501480 --- /dev/null +++ b/venv/Lib/site-packages/python_dateutil-2.8.1.dist-info/top_level.txt @@ -0,0 +1 @@ +dateutil diff --git a/env/lib/python2.7/site-packages/pyobjc-5.2.dist-info/zip-safe b/venv/Lib/site-packages/python_dateutil-2.8.1.dist-info/zip-safe similarity index 100% rename from env/lib/python2.7/site-packages/pyobjc-5.2.dist-info/zip-safe rename to venv/Lib/site-packages/python_dateutil-2.8.1.dist-info/zip-safe diff --git a/venv/Lib/site-packages/pythoncom.py b/venv/Lib/site-packages/pythoncom.py new file mode 100644 index 00000000..ca7fda4d --- /dev/null +++ b/venv/Lib/site-packages/pythoncom.py @@ -0,0 +1,3 @@ +# Magic utility that "redirects" to pythoncomxx.dll +import pywintypes +pywintypes.__import_pywin32_system_module__("pythoncom", globals()) diff --git a/venv/Lib/site-packages/pythonwin/Pythonwin.exe b/venv/Lib/site-packages/pythonwin/Pythonwin.exe new file mode 100644 index 00000000..bfc1ddb5 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/Pythonwin.exe differ diff --git a/venv/Lib/site-packages/pythonwin/dde.pyd b/venv/Lib/site-packages/pythonwin/dde.pyd new file mode 100644 index 00000000..67911c8f Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/dde.pyd differ diff --git a/venv/Lib/site-packages/pythonwin/license.txt b/venv/Lib/site-packages/pythonwin/license.txt new file mode 100644 index 00000000..fa340d74 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/license.txt @@ -0,0 +1,30 @@ +Unless stated in the specfic source file, this work is +Copyright (c) 1994-2008, Mark Hammond +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + +Redistributions of source code must retain the above copyright notice, +this list of conditions and the following disclaimer. + +Redistributions in binary form must reproduce the above copyright +notice, this list of conditions and the following disclaimer in +the documentation and/or other materials provided with the distribution. + +Neither name of Mark Hammond nor the name of contributors may be used +to endorse or promote products derived from this software without +specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS +IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED +TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/Lib/site-packages/pythonwin/mfc140u.dll b/venv/Lib/site-packages/pythonwin/mfc140u.dll new file mode 100644 index 00000000..20064e10 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/mfc140u.dll differ diff --git a/venv/Lib/site-packages/pythonwin/mfcm140u.dll b/venv/Lib/site-packages/pythonwin/mfcm140u.dll new file mode 100644 index 00000000..d96c26c2 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/mfcm140u.dll differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/cmdserver.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/cmdserver.cpython-36.pyc new file mode 100644 index 00000000..87e1f077 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/cmdserver.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/createwin.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/createwin.cpython-36.pyc new file mode 100644 index 00000000..e475fe9b Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/createwin.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/demoutils.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/demoutils.cpython-36.pyc new file mode 100644 index 00000000..6298900a Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/demoutils.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/dibdemo.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/dibdemo.cpython-36.pyc new file mode 100644 index 00000000..60f22cc8 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/dibdemo.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/dlgtest.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/dlgtest.cpython-36.pyc new file mode 100644 index 00000000..f55a04ef Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/dlgtest.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/dyndlg.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/dyndlg.cpython-36.pyc new file mode 100644 index 00000000..47fbe68b Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/dyndlg.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/fontdemo.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/fontdemo.cpython-36.pyc new file mode 100644 index 00000000..16eea447 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/fontdemo.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/guidemo.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/guidemo.cpython-36.pyc new file mode 100644 index 00000000..93466386 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/guidemo.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/hiertest.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/hiertest.cpython-36.pyc new file mode 100644 index 00000000..e927cd54 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/hiertest.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/menutest.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/menutest.cpython-36.pyc new file mode 100644 index 00000000..2db6a487 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/menutest.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/objdoc.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/objdoc.cpython-36.pyc new file mode 100644 index 00000000..3439825b Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/objdoc.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/openGLDemo.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/openGLDemo.cpython-36.pyc new file mode 100644 index 00000000..f31ea9c3 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/openGLDemo.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/progressbar.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/progressbar.cpython-36.pyc new file mode 100644 index 00000000..f7b7d7df Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/progressbar.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/sliderdemo.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/sliderdemo.cpython-36.pyc new file mode 100644 index 00000000..b8bb7063 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/sliderdemo.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/splittst.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/splittst.cpython-36.pyc new file mode 100644 index 00000000..6b0a7252 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/splittst.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/threadedgui.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/threadedgui.cpython-36.pyc new file mode 100644 index 00000000..66afe018 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/threadedgui.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/toolbar.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/toolbar.cpython-36.pyc new file mode 100644 index 00000000..c0b764e3 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/Demos/__pycache__/toolbar.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/app/__pycache__/basictimerapp.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/Demos/app/__pycache__/basictimerapp.cpython-36.pyc new file mode 100644 index 00000000..81bddf69 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/Demos/app/__pycache__/basictimerapp.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/app/__pycache__/customprint.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/Demos/app/__pycache__/customprint.cpython-36.pyc new file mode 100644 index 00000000..b826f433 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/Demos/app/__pycache__/customprint.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/app/__pycache__/demoutils.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/Demos/app/__pycache__/demoutils.cpython-36.pyc new file mode 100644 index 00000000..b67da715 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/Demos/app/__pycache__/demoutils.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/app/__pycache__/dlgappdemo.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/Demos/app/__pycache__/dlgappdemo.cpython-36.pyc new file mode 100644 index 00000000..60e47146 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/Demos/app/__pycache__/dlgappdemo.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/app/__pycache__/dojobapp.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/Demos/app/__pycache__/dojobapp.cpython-36.pyc new file mode 100644 index 00000000..283323da Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/Demos/app/__pycache__/dojobapp.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/app/__pycache__/helloapp.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/Demos/app/__pycache__/helloapp.cpython-36.pyc new file mode 100644 index 00000000..f0471a57 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/Demos/app/__pycache__/helloapp.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/app/basictimerapp.py b/venv/Lib/site-packages/pythonwin/pywin/Demos/app/basictimerapp.py new file mode 100644 index 00000000..def9791a --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/Demos/app/basictimerapp.py @@ -0,0 +1,221 @@ +# basictimerapp - a really simple timer application. +# This should be run using the command line: +# pythonwin /app demos\basictimerapp.py +import win32ui +import win32api +import win32con +import sys +from pywin.framework import app, cmdline, dlgappcore, cmdline +import timer +import time +import string + +class TimerAppDialog(dlgappcore.AppDialog): + softspace=1 + def __init__(self, appName = ""): + dlgappcore.AppDialog.__init__(self, win32ui.IDD_GENERAL_STATUS) + self.timerAppName = appName + self.argOff = 0 + if len(self.timerAppName)==0: + if len(sys.argv)>1 and sys.argv[1][0]!='/': + self.timerAppName = sys.argv[1] + self.argOff = 1 + + def PreDoModal(self): +# sys.stderr = sys.stdout + pass + + def ProcessArgs(self, args): + for arg in args: + if arg=="/now": + self.OnOK() + + def OnInitDialog(self): + win32ui.SetProfileFileName('pytimer.ini') + self.title = win32ui.GetProfileVal(self.timerAppName, "Title", "Remote System Timer") + self.buildTimer = win32ui.GetProfileVal(self.timerAppName, "Timer", "EachMinuteIntervaler()") + self.doWork = win32ui.GetProfileVal(self.timerAppName, "Work", "DoDemoWork()") + # replace "\n" with real \n. + self.doWork = self.doWork.replace('\\n','\n') + dlgappcore.AppDialog.OnInitDialog(self) + + self.SetWindowText(self.title) + self.prompt1 = self.GetDlgItem(win32ui.IDC_PROMPT1) + self.prompt2 = self.GetDlgItem(win32ui.IDC_PROMPT2) + self.prompt3 = self.GetDlgItem(win32ui.IDC_PROMPT3) + self.butOK = self.GetDlgItem(win32con.IDOK) + self.butCancel = self.GetDlgItem(win32con.IDCANCEL) + self.prompt1.SetWindowText("Python Timer App") + self.prompt2.SetWindowText("") + self.prompt3.SetWindowText("") + self.butOK.SetWindowText("Do it now") + self.butCancel.SetWindowText("Close") + + self.timerManager = TimerManager(self) + self.ProcessArgs(sys.argv[self.argOff:]) + self.timerManager.go() + return 1 + + def OnDestroy(self,msg): + dlgappcore.AppDialog.OnDestroy(self, msg) + self.timerManager.stop() + def OnOK(self): + # stop the timer, then restart after setting special boolean + self.timerManager.stop() + self.timerManager.bConnectNow = 1 + self.timerManager.go() + return +# def OnCancel(self): default behaviour - cancel == close. +# return + +class TimerManager: + def __init__(self, dlg): + self.dlg = dlg + self.timerId = None + self.intervaler = eval(self.dlg.buildTimer) + self.bConnectNow = 0 + self.bHaveSetPrompt1 = 0 + def CaptureOutput(self): + self.oldOut = sys.stdout + self.oldErr = sys.stderr + sys.stdout = sys.stderr = self + self.bHaveSetPrompt1 = 0 + def ReleaseOutput(self): + sys.stdout = self.oldOut + sys.stderr = self.oldErr + def write(self, str): + s = str.strip() + if len(s): + if self.bHaveSetPrompt1: + dest = self.dlg.prompt3 + else: + dest = self.dlg.prompt1 + self.bHaveSetPrompt1 = 1 + dest.SetWindowText(s) + def go(self): + self.OnTimer(None,None) + def stop(self): + if self.timerId: timer.kill_timer (self.timerId) + self.timerId = None + + def OnTimer(self, id, timeVal): + if id: timer.kill_timer (id) + if self.intervaler.IsTime() or self.bConnectNow : + # do the work. + try: + self.dlg.SetWindowText(self.dlg.title + " - Working...") + self.dlg.butOK.EnableWindow(0) + self.dlg.butCancel.EnableWindow(0) + self.CaptureOutput() + try: + exec(self.dlg.doWork) + print("The last operation completed successfully.") + except: + t, v, tb = sys.exc_info() + str = "Failed: %s: %s" % (t, repr(v)) + print(str) + self.oldErr.write(str) + tb = None # Prevent cycle + finally: + self.ReleaseOutput() + self.dlg.butOK.EnableWindow() + self.dlg.butCancel.EnableWindow() + self.dlg.SetWindowText(self.dlg.title) + else: + now = time.time() + nextTime = self.intervaler.GetNextTime() + if nextTime: + timeDiffSeconds = nextTime - now + timeDiffMinutes = int(timeDiffSeconds / 60) + timeDiffSeconds = timeDiffSeconds % 60 + timeDiffHours = int(timeDiffMinutes / 60) + timeDiffMinutes = timeDiffMinutes % 60 + self.dlg.prompt1.SetWindowText("Next connection due in %02d:%02d:%02d" % (timeDiffHours,timeDiffMinutes,timeDiffSeconds)) + self.timerId = timer.set_timer (self.intervaler.GetWakeupInterval(), self.OnTimer) + self.bConnectNow = 0 + +class TimerIntervaler: + def __init__(self): + self.nextTime = None + self.wakeUpInterval = 2000 + def GetWakeupInterval(self): + return self.wakeUpInterval + def GetNextTime(self): + return self.nextTime + def IsTime(self): + now = time.time() + if self.nextTime is None: + self.nextTime = self.SetFirstTime(now) + ret = 0 + if now >= self.nextTime: + ret = 1 + self.nextTime = self.SetNextTime(self.nextTime, now) + # do the work. + return ret + +class EachAnyIntervaler(TimerIntervaler): + def __init__(self, timeAt, timePos, timeAdd, wakeUpInterval = None): + TimerIntervaler.__init__(self) + self.timeAt = timeAt + self.timePos = timePos + self.timeAdd = timeAdd + if wakeUpInterval: + self.wakeUpInterval = wakeUpInterval + def SetFirstTime(self, now): + timeTup = time.localtime(now) + lst = [] + for item in timeTup: + lst.append(item) + bAdd = timeTup[self.timePos] > self.timeAt + lst[self.timePos] = self.timeAt + for pos in range(self.timePos+1, 6): + lst[pos]=0 + ret = time.mktime(tuple(lst)) + if (bAdd): + ret = ret + self.timeAdd + return ret; + + def SetNextTime(self, lastTime, now): + return lastTime + self.timeAdd + +class EachMinuteIntervaler(EachAnyIntervaler): + def __init__(self, at=0): + EachAnyIntervaler.__init__(self, at, 5, 60, 2000) + +class EachHourIntervaler(EachAnyIntervaler): + def __init__(self, at=0): + EachAnyIntervaler.__init__(self, at, 4, 3600, 10000) + +class EachDayIntervaler(EachAnyIntervaler): + def __init__(self,at=0): + EachAnyIntervaler.__init__(self, at, 3, 86400, 10000) + +class TimerDialogApp(dlgappcore.DialogApp): + def CreateDialog(self): + return TimerAppDialog() + +def DoDemoWork(): + print("Doing the work...") + print("About to connect") + win32api.MessageBeep(win32con.MB_ICONASTERISK) + win32api.Sleep(2000) + print("Doing something else...") + win32api.MessageBeep(win32con.MB_ICONEXCLAMATION) + win32api.Sleep(2000) + print("More work.") + win32api.MessageBeep(win32con.MB_ICONHAND) + win32api.Sleep(2000) + print("The last bit.") + win32api.MessageBeep(win32con.MB_OK) + win32api.Sleep(2000) + +app = TimerDialogApp() + +def t(): + t = TimerAppDialog("Test Dialog") + t.DoModal() + return t + +if __name__=='__main__': + import demoutils + demoutils.NeedApp() diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/app/customprint.py b/venv/Lib/site-packages/pythonwin/pywin/Demos/app/customprint.py new file mode 100644 index 00000000..ba8313b7 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/Demos/app/customprint.py @@ -0,0 +1,194 @@ +# A demo of an Application object that has some custom print functionality. + +# If you desire, you can also run this from inside Pythonwin, in which +# case it will do the demo inside the Pythonwin environment. + +# This sample was contributed by Roger Burnham. + +from pywin.mfc import docview, dialog, afxres +from pywin.framework import app + +import win32con +import win32ui +import win32api + +PRINTDLGORD = 1538 +IDC_PRINT_MAG_EDIT = 1010 + + +class PrintDemoTemplate(docview.DocTemplate): + def _SetupSharedMenu_(self): + pass + +class PrintDemoView(docview.ScrollView): + + def OnInitialUpdate(self): + ret = self._obj_.OnInitialUpdate() + self.colors = {'Black' : (0x00<<0) + (0x00<<8) + (0x00<<16), + 'Red' : (0xff<<0) + (0x00<<8) + (0x00<<16), + 'Green' : (0x00<<0) + (0xff<<8) + (0x00<<16), + 'Blue' : (0x00<<0) + (0x00<<8) + (0xff<<16), + 'Cyan' : (0x00<<0) + (0xff<<8) + (0xff<<16), + 'Magenta': (0xff<<0) + (0x00<<8) + (0xff<<16), + 'Yellow' : (0xff<<0) + (0xff<<8) + (0x00<<16), + } + self.pens = {} + for name, color in self.colors.items(): + self.pens[name] = win32ui.CreatePen(win32con.PS_SOLID, + 5, color) + self.pen = None + self.size = (128,128) + self.SetScaleToFitSize(self.size) + self.HookCommand(self.OnFilePrint, afxres.ID_FILE_PRINT) + self.HookCommand(self.OnFilePrintPreview, + win32ui.ID_FILE_PRINT_PREVIEW) + return ret + + def OnDraw(self, dc): + oldPen = None + x,y = self.size + delta = 2 + colors = list(self.colors.keys()) + colors.sort() + colors = colors*2 + for color in colors: + if oldPen is None: + oldPen = dc.SelectObject(self.pens[color]) + else: + dc.SelectObject(self.pens[color]) + dc.MoveTo(( delta, delta)) + dc.LineTo((x-delta, delta)) + dc.LineTo((x-delta, y-delta)) + dc.LineTo(( delta, y-delta)) + dc.LineTo(( delta, delta)) + delta = delta + 4 + if x-delta <= 0 or y-delta <= 0: + break + dc.SelectObject(oldPen) + + def OnPrepareDC (self, dc, pInfo): + if dc.IsPrinting(): + mag = self.prtDlg['mag'] + dc.SetMapMode(win32con.MM_ANISOTROPIC); + dc.SetWindowOrg((0, 0)) + dc.SetWindowExt((1, 1)) + dc.SetViewportOrg((0, 0)) + dc.SetViewportExt((mag, mag)) + + def OnPreparePrinting(self, pInfo): + flags = (win32ui.PD_USEDEVMODECOPIES| + win32ui.PD_PAGENUMS| + win32ui.PD_NOPAGENUMS| + win32ui.PD_NOSELECTION) + self.prtDlg = ImagePrintDialog(pInfo, PRINTDLGORD, flags) + pInfo.SetPrintDialog(self.prtDlg) + pInfo.SetMinPage(1) + pInfo.SetMaxPage(1) + pInfo.SetFromPage(1) + pInfo.SetToPage(1) + ret = self.DoPreparePrinting(pInfo) + return ret + + def OnBeginPrinting(self, dc, pInfo): + return self._obj_.OnBeginPrinting(dc, pInfo) + + def OnEndPrinting(self, dc, pInfo): + del self.prtDlg + return self._obj_.OnEndPrinting(dc, pInfo) + + def OnFilePrintPreview(self, *arg): + self._obj_.OnFilePrintPreview() + + def OnFilePrint(self, *arg): + self._obj_.OnFilePrint() + + def OnPrint(self, dc, pInfo): + doc = self.GetDocument() + metrics = dc.GetTextMetrics() + cxChar = metrics['tmAveCharWidth'] + cyChar = metrics['tmHeight'] + left, top, right, bottom = pInfo.GetDraw() + dc.TextOut(0, 2*cyChar, doc.GetTitle()) + top = top + (7*cyChar)/2 + dc.MoveTo(left, top) + dc.LineTo(right, top) + top = top + cyChar + # this seems to have not effect... + # get what I want with the dc.SetWindowOrg calls + pInfo.SetDraw((left, top, right, bottom)) + dc.SetWindowOrg((0, -top)) + + self.OnDraw(dc) + dc.SetTextAlign(win32con.TA_LEFT|win32con.TA_BOTTOM) + + rect = self.GetWindowRect() + rect = self.ScreenToClient(rect) + height = (rect[3]-rect[1]) + dc.SetWindowOrg((0, -(top+height+cyChar))) + dc.MoveTo(left, 0) + dc.LineTo(right, 0) + + x = 0 + y = (3*cyChar)/2 + + dc.TextOut(x, y, doc.GetTitle()) + y = y + cyChar + + +class PrintDemoApp(app.CApp): + def __init__(self): + app.CApp.__init__(self) + + def InitInstance(self): + template = PrintDemoTemplate(None, None, + None, PrintDemoView) + self.AddDocTemplate(template) + self._obj_.InitMDIInstance() + self.LoadMainFrame() + doc = template.OpenDocumentFile(None) + doc.SetTitle('Custom Print Document') + + +class ImagePrintDialog(dialog.PrintDialog): + + sectionPos = 'Image Print Demo' + + def __init__(self, pInfo, dlgID, flags=win32ui.PD_USEDEVMODECOPIES): + dialog.PrintDialog.__init__(self, pInfo, dlgID, flags=flags) + mag = win32ui.GetProfileVal(self.sectionPos, + 'Document Magnification', + 0) + if mag <= 0: + mag = 2 + win32ui.WriteProfileVal(self.sectionPos, + 'Document Magnification', + mag) + + self['mag'] = mag + + def OnInitDialog(self): + self.magCtl = self.GetDlgItem(IDC_PRINT_MAG_EDIT) + self.magCtl.SetWindowText(repr(self['mag'])) + return dialog.PrintDialog.OnInitDialog(self) + def OnOK(self): + dialog.PrintDialog.OnOK(self) + strMag = self.magCtl.GetWindowText() + try: + self['mag'] = int(strMag) + except: + pass + win32ui.WriteProfileVal(self.sectionPos, + 'Document Magnification', + self['mag']) + + +if __name__=='__main__': + # Running under Pythonwin + def test(): + template = PrintDemoTemplate(None, None, + None, PrintDemoView) + template.OpenDocumentFile(None) + test() +else: + app = PrintDemoApp() + diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/app/demoutils.py b/venv/Lib/site-packages/pythonwin/pywin/Demos/app/demoutils.py new file mode 100644 index 00000000..8e27b0d1 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/Demos/app/demoutils.py @@ -0,0 +1,52 @@ +# Utilities for the demos + +import sys, win32api, win32con, win32ui + +NotScriptMsg = """\ +This demo program is not designed to be run as a Script, but is +probably used by some other test program. Please try another demo. +""" + +NeedGUIMsg = """\ +This demo program can only be run from inside of Pythonwin + +You must start Pythonwin, and select 'Run' from the toolbar or File menu +""" + + +NeedAppMsg = """\ +This demo program is a 'Pythonwin Application'. + +It is more demo code than an example of Pythonwin's capabilities. + +To run it, you must execute the command: +pythonwin.exe /app "%s" + +Would you like to execute it now? +""" + +def NotAScript(): + import win32ui + win32ui.MessageBox(NotScriptMsg, "Demos") + +def NeedGoodGUI(): + from pywin.framework.app import HaveGoodGUI + rc = HaveGoodGUI() + if not rc: + win32ui.MessageBox(NeedGUIMsg, "Demos") + return rc + +def NeedApp(): + import win32ui + rc = win32ui.MessageBox(NeedAppMsg % sys.argv[0], "Demos", win32con.MB_YESNO) + if rc==win32con.IDYES: + try: + parent = win32ui.GetMainFrame().GetSafeHwnd() + win32api.ShellExecute(parent, None, 'pythonwin.exe', '/app "%s"' % sys.argv[0], None, 1) + except win32api.error as details: + win32ui.MessageBox("Error executing command - %s" % (details), "Demos") + + +if __name__=='__main__': + import demoutils + demoutils.NotAScript() diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/app/dlgappdemo.py b/venv/Lib/site-packages/pythonwin/pywin/Demos/app/dlgappdemo.py new file mode 100644 index 00000000..e826e57b --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/Demos/app/dlgappdemo.py @@ -0,0 +1,46 @@ +# dlgappdemo - a demo of a dialog application. +# This is a demonstration of both a custom "application" module, +# and a Python program in a dialog box. +# +# NOTE: You CAN NOT import this module from either PythonWin or Python. +# This module must be specified on the commandline to PythonWin only. +# eg, PythonWin /app dlgappdemo.py + +from pywin.framework import dlgappcore, app +import win32ui +import sys + +class TestDialogApp(dlgappcore.DialogApp): + def CreateDialog(self): + return TestAppDialog() + + +class TestAppDialog(dlgappcore.AppDialog): + def __init__(self): + self.edit = None + dlgappcore.AppDialog.__init__(self, win32ui.IDD_LARGE_EDIT) + def OnInitDialog(self): + self.SetWindowText('Test dialog application') + self.edit = self.GetDlgItem(win32ui.IDC_EDIT1) + print("Hello from Python") + print("args are:", end=' ') + for arg in sys.argv: + print(arg) + return 1 + + def PreDoModal(self): + sys.stdout = sys.stderr = self + + def write(self, str): + if self.edit: + self.edit.SetSel(-2) + # translate \n to \n\r + self.edit.ReplaceSel(str.replace('\n','\r\n')) + else: + win32ui.OutputDebug("dlgapp - no edit control! >>\n%s\n<<\n" % str ) + +app.AppBuilder = TestDialogApp + +if __name__=='__main__': + import demoutils + demoutils.NeedApp() diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/app/dojobapp.py b/venv/Lib/site-packages/pythonwin/pywin/Demos/app/dojobapp.py new file mode 100644 index 00000000..bdd22cd9 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/Demos/app/dojobapp.py @@ -0,0 +1,62 @@ +# dojobapp - do a job, show the result in a dialog, and exit. +# +# Very simple - faily minimal dialog based app. +# +# This should be run using the command line: +# pythonwin /app demos\dojobapp.py + +import win32ui +import win32api +import win32con +import sys +from pywin.framework import app, dlgappcore +import string + +class DoJobAppDialog(dlgappcore.AppDialog): + softspace=1 + def __init__(self, appName = ""): + self.appName = appName + dlgappcore.AppDialog.__init__(self, win32ui.IDD_GENERAL_STATUS) + + def PreDoModal(self): + pass + + def ProcessArgs(self, args): + pass + + def OnInitDialog(self): + self.SetWindowText(self.appName) + butCancel = self.GetDlgItem(win32con.IDCANCEL) + butCancel.ShowWindow(win32con.SW_HIDE) + p1 = self.GetDlgItem(win32ui.IDC_PROMPT1) + p2 = self.GetDlgItem(win32ui.IDC_PROMPT2) + + # Do something here! + + p1.SetWindowText("Hello there") + p2.SetWindowText("from the demo") + def OnDestroy(self,msg): + pass +# def OnOK(self): +# pass +# def OnCancel(self): default behaviour - cancel == close. +# return + +class DoJobDialogApp(dlgappcore.DialogApp): + def CreateDialog(self): + return DoJobAppDialog("Do Something") + +class CopyToDialogApp(DoJobDialogApp): + def __init__(self): + DoJobDialogApp.__init__(self) + +app.AppBuilder = DoJobDialogApp + +def t(): + t = DoJobAppDialog("Copy To") + t.DoModal() + return t + +if __name__=='__main__': + import demoutils + demoutils.NeedApp() diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/app/helloapp.py b/venv/Lib/site-packages/pythonwin/pywin/Demos/app/helloapp.py new file mode 100644 index 00000000..532f4c5f --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/Demos/app/helloapp.py @@ -0,0 +1,45 @@ +## +## helloapp.py +## +## +## A nice, small 'hello world' Pythonwin application. +## NOT an MDI application - just a single, normal, top-level window. +## +## MUST be run with the command line "pythonwin.exe /app helloapp.py" +## (or if you are really keen, rename "pythonwin.exe" to something else, then +## using MSVC or similar, edit the string section in the .EXE to name this file) +## +## Originally by Willy Heineman + + +import win32con +import win32ui +from pywin.mfc import window, dialog, afxres +from pywin.mfc.thread import WinApp + +# The main frame. +# Does almost nothing at all - doesnt even create a child window! +class HelloWindow(window.Wnd): + def __init__(self): + # The window.Wnd ctor creates a Window object, and places it in + # self._obj_. Note the window object exists, but the window itself + # does not! + window.Wnd.__init__(self, win32ui.CreateWnd()) + + # Now we ask the window object to create the window itself. + self._obj_.CreateWindowEx(win32con.WS_EX_CLIENTEDGE, \ + win32ui.RegisterWndClass(0, 0, win32con.COLOR_WINDOW + 1), \ + 'Hello World!', win32con.WS_OVERLAPPEDWINDOW, \ + (100, 100, 400, 300), None, 0, None) + +# The application object itself. +class HelloApp(WinApp): + + def InitInstance(self): + self.frame = HelloWindow() + self.frame.ShowWindow(win32con.SW_SHOWNORMAL) + # We need to tell MFC what our main frame is. + self.SetMainFrame(self.frame) + +# Now create the application object itself! +app = HelloApp() diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/cmdserver.py b/venv/Lib/site-packages/pythonwin/pywin/Demos/cmdserver.py new file mode 100644 index 00000000..ae43fa4b --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/Demos/cmdserver.py @@ -0,0 +1,108 @@ +# cmdserver.py + +# Demo code that is not Pythonwin related, but too good to throw away... + +import win32api +import sys +from pywin.framework import winout + +import _thread, sys + +import traceback + +class ThreadWriter: + "Assign an instance to sys.stdout for per-thread printing objects - Courtesy Guido!" + def __init__(self): + "Constructor -- initialize the table of writers" + self.writers = {} + self.origStdOut = None + def register(self, writer): + "Register the writer for the current thread" + self.writers[_thread.get_ident()] = writer + if self.origStdOut is None: + self.origStdOut = sys.stdout + sys.stdout = self + + def unregister(self): + "Remove the writer for the current thread, if any" + try: + del self.writers[_thread.get_ident()] + except KeyError: + pass + if len(self.writers)==0: + sys.stdout = self.origStdOut + self.origStdOut = None + + def getwriter(self): + "Return the current thread's writer, default sys.stdout" + try: + return self.writers[_thread.get_ident()] + except KeyError: + return self.origStdOut + + def write(self, str): + "Write to the current thread's writer, default sys.stdout" + self.getwriter().write(str) + +def Test(): + num=1 + while num<1000: + print('Hello there no ' + str(num)) + win32api.Sleep(50) + num = num + 1 + +class flags: + SERVER_BEST = 0 + SERVER_IMMEDIATE = 1 + SERVER_THREAD = 2 + SERVER_PROCESS = 3 + +def StartServer( cmd, title=None, bCloseOnEnd=0, serverFlags = flags.SERVER_BEST ): + out = winout.WindowOutput( title, None, winout.flags.WQ_IDLE ) + if not title: + title=cmd + out.Create(title) +# ServerThread((out, cmd, title, bCloseOnEnd)) +# out = sys.stdout + _thread.start_new_thread( ServerThread, (out, cmd, title, bCloseOnEnd) ) + +def ServerThread(myout, cmd, title, bCloseOnEnd): + try: + writer.register(myout) + print('Executing "%s"\n' % cmd) + bOK = 1 + try: + import __main__ + exec (cmd+'\n', __main__.__dict__) + except: + bOK = 0 + if bOK: + print("Command terminated without errors.") + else: + t, v, tb = sys.exc_info() + print(t, ': ', v) + traceback.print_tb(tb) + tb = None # prevent a cycle + print("Command terminated with an unhandled exception") + writer.unregister() + if bOK and bCloseOnEnd: + myout.frame.DestroyWindow() + + # Unhandled exception of any kind in a thread kills the gui! + except: + t, v, tb = sys.exc_info() + print(t, ': ', v) + traceback.print_tb(tb) + tb = None + print("Thread failed") + +# assist for reloading (when debugging) - use only 1 tracer object, +# else a large chain of tracer objects will exist. +#try: +# writer +#except NameError: +# writer=ThreadWriter() +if __name__=='__main__': + import demoutils + demoutils.NotAScript() + diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/createwin.py b/venv/Lib/site-packages/pythonwin/pywin/Demos/createwin.py new file mode 100644 index 00000000..f57dc881 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/Demos/createwin.py @@ -0,0 +1,98 @@ +# +# Window creation example +# +# This example creates a minimal "control" that just fills in its +# window with red. To make your own control, subclass Control and +# write your own OnPaint() method. See PyCWnd.HookMessage for what +# the parameters to OnPaint are. +# + +from pywin.mfc import dialog, window +import win32ui +import win32con +import win32api + +class Control(window.Wnd): + """Generic control class""" + def __init__ (self): + window.Wnd.__init__(self, win32ui.CreateWnd ()) + + def OnPaint (self): + dc, paintStruct = self.BeginPaint() + self.DoPaint(dc) + self.EndPaint(paintStruct) + + def DoPaint (self, dc): # Override this! + pass + +class RedBox (Control): + def DoPaint (self, dc): + dc.FillSolidRect (self.GetClientRect(), win32api.RGB(255,0,0)) + + +class RedBoxWithPie (RedBox): + def DoPaint (self, dc): + RedBox.DoPaint(self, dc) + r = self.GetClientRect() + dc.Pie(r[0], r[1], r[2], r[3], 0,0,r[2], r[3]//2) + +def MakeDlgTemplate(): + style = (win32con.DS_MODALFRAME | + win32con.WS_POPUP | + win32con.WS_VISIBLE | + win32con.WS_CAPTION | + win32con.WS_SYSMENU | + win32con.DS_SETFONT) + cs = (win32con.WS_CHILD | + win32con.WS_VISIBLE) + + w = 64 + h = 64 + + dlg = [["Red box", + (0, 0, w, h), + style, + None, + (8, "MS Sans Serif")], + ] + + s = win32con.WS_TABSTOP | cs + + dlg.append([128, + "Cancel", + win32con.IDCANCEL, + (7, h - 18, 50, 14), s | win32con.BS_PUSHBUTTON]) + + return dlg + +class TestDialog(dialog.Dialog): + def OnInitDialog(self): + rc = dialog.Dialog.OnInitDialog(self) + self.redbox = RedBox () + self.redbox.CreateWindow (None, "RedBox", + win32con.WS_CHILD | + win32con.WS_VISIBLE, + (5, 5, 90, 68), + self, 1003) + return rc + +class TestPieDialog(dialog.Dialog): + def OnInitDialog(self): + rc = dialog.Dialog.OnInitDialog(self) + self.control = RedBoxWithPie() + self.control.CreateWindow (None, "RedBox with Pie", + win32con.WS_CHILD | + win32con.WS_VISIBLE, + (5, 5, 90, 68), + self, 1003) + +def demo(modal=0): + d = TestPieDialog (MakeDlgTemplate()) + if modal: + d.DoModal() + else: + d.CreateWindow() + +if __name__=='__main__': + demo(1) + diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/demoutils.py b/venv/Lib/site-packages/pythonwin/pywin/Demos/demoutils.py new file mode 100644 index 00000000..389abae3 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/Demos/demoutils.py @@ -0,0 +1,54 @@ +# Utilities for the demos + +import sys, win32api, win32con, win32ui + +NotScriptMsg = """\ +This demo program is not designed to be run as a Script, but is +probably used by some other test program. Please try another demo. +""" + +NeedGUIMsg = """\ +This demo program can only be run from inside of Pythonwin + +You must start Pythonwin, and select 'Run' from the toolbar or File menu +""" + + +NeedAppMsg = """\ +This demo program is a 'Pythonwin Application'. + +It is more demo code than an example of Pythonwin's capabilities. + +To run it, you must execute the command: +pythonwin.exe /app "%s" + +Would you like to execute it now? +""" + +def NotAScript(): + import win32ui + win32ui.MessageBox(NotScriptMsg, "Demos") + +def NeedGoodGUI(): + from pywin.framework.app import HaveGoodGUI + rc = HaveGoodGUI() + if not rc: + win32ui.MessageBox(NeedGUIMsg, "Demos") + return rc + +def NeedApp(): + import win32ui + rc = win32ui.MessageBox(NeedAppMsg % sys.argv[0], "Demos", win32con.MB_YESNO) + if rc==win32con.IDYES: + try: + parent = win32ui.GetMainFrame().GetSafeHwnd() + win32api.ShellExecute(parent, None, 'pythonwin.exe', '/app "%s"' % sys.argv[0], None, 1) + except win32api.error as details: + win32ui.MessageBox("Error executing command - %s" % (details), "Demos") + + +from pywin.framework.app import HaveGoodGUI + +if __name__=='__main__': + import demoutils + demoutils.NotAScript() diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/dibdemo.py b/venv/Lib/site-packages/pythonwin/pywin/Demos/dibdemo.py new file mode 100644 index 00000000..46d5ec5e --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/Demos/dibdemo.py @@ -0,0 +1,69 @@ +# A demo which creates a view and a frame which displays a PPM format bitmap +# +# This hasnnt been run in a while, as I dont have many of that format around! +import win32ui +import win32con +import win32api +import string + +class DIBView: + def __init__(self, doc, dib): + self.dib = dib + self.view = win32ui.CreateView(doc) + self.width = self.height = 0 + # set up message handlers +# self.view.OnPrepareDC = self.OnPrepareDC + self.view.HookMessage (self.OnSize, win32con.WM_SIZE) + + def OnSize (self, params): + lParam = params[3] + self.width = win32api.LOWORD(lParam) + self.height = win32api.HIWORD(lParam) + + def OnDraw (self, ob, dc): + # set sizes used for "non strecth" mode. + self.view.SetScrollSizes(win32con.MM_TEXT, self.dib.GetSize()) + dibSize = self.dib.GetSize() + dibRect = (0,0,dibSize[0], dibSize[1]) + # stretch BMP. + #self.dib.Paint(dc, (0,0,self.width, self.height),dibRect) + # non stretch. + self.dib.Paint(dc) + +class DIBDemo: + def __init__(self, filename, * bPBM): + # init data members + f = open(filename, 'rb') + dib=win32ui.CreateDIBitmap() + if len(bPBM)>0: + magic=f.readline() + if magic != "P6\n": + print("The file is not a PBM format file") + raise ValueError("Failed - The file is not a PBM format file") + # check magic? + rowcollist=f.readline().split() + cols=int(rowcollist[0]) + rows=int(rowcollist[1]) + f.readline() # whats this one? + dib.LoadPBMData(f,(cols,rows)) + else: + dib.LoadWindowsFormatFile(f) + f.close() + # create doc/view + self.doc = win32ui.CreateDoc() + self.dibView = DIBView( self.doc, dib ) + self.frame = win32ui.CreateMDIFrame() + self.frame.LoadFrame() # this will force OnCreateClient + self.doc.SetTitle ('DIB Demo') + self.frame.ShowWindow() + + # display the sucka + self.frame.ActivateFrame() + + def OnCreateClient( self, createparams, context ): + self.dibView.view.CreateWindow(self.frame) + return 1 + +if __name__=='__main__': + import demoutils + demoutils.NotAScript() \ No newline at end of file diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/dlgtest.py b/venv/Lib/site-packages/pythonwin/pywin/Demos/dlgtest.py new file mode 100644 index 00000000..81c17355 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/Demos/dlgtest.py @@ -0,0 +1,137 @@ +# A Demo of Pythonwin's Dialog and Property Page support. + +################### +# +# First demo - use the built-in to Pythonwin "Tab Stop" dialog, but +# customise it heavily. +# +# ID's for the tabstop dialog - out test. +# +from win32ui import IDD_SET_TABSTOPS +from win32ui import IDC_EDIT_TABS +from win32ui import IDC_PROMPT_TABS +from win32con import IDOK +from win32con import IDCANCEL + +import win32ui +import win32con + +from pywin.mfc import dialog + +class TestDialog(dialog.Dialog): + def __init__(self, modal=1): + dialog.Dialog.__init__(self, IDD_SET_TABSTOPS) + self.counter=0 + if modal: + self.DoModal() + else: + self.CreateWindow() + + def OnInitDialog(self): + # Set the caption of the dialog itself. + self.SetWindowText("Used to be Tab Stops!") + # Get a child control, remember it, and change its text. + self.edit=self.GetDlgItem(IDC_EDIT_TABS) # the text box. + self.edit.SetWindowText("Test") + # Hook a Windows message for the dialog. + self.edit.HookMessage(self.KillFocus, win32con.WM_KILLFOCUS) + # Get the prompt control, and change its next. + prompt=self.GetDlgItem(IDC_PROMPT_TABS) # the prompt box. + prompt.SetWindowText("Prompt") + # And the same for the button.. + cancel=self.GetDlgItem(IDCANCEL) # the cancel button + cancel.SetWindowText("&Kill me") + + # And just for demonstration purposes, we hook the notify message for the dialog. + # This allows us to be notified when the Edit Control text changes. + self.HookCommand(self.OnNotify, IDC_EDIT_TABS) + + def OnNotify(self, controlid, code): + if code==win32con.EN_CHANGE: + print("Edit text changed!") + return 1 # I handled this, so no need to call defaults! + + # kill focus for the edit box. + # Simply increment the value in the text box. + def KillFocus(self,msg): + self.counter=self.counter+1 + if self.edit != None: + self.edit.SetWindowText(str(self.counter)) + + # Called when the dialog box is terminating... + def OnDestroy(self,msg): + del self.edit + del self.counter + +# A very simply Property Sheet. +# We only make a new class for demonstration purposes. +class TestSheet(dialog.PropertySheet): + def __init__(self, title): + dialog.PropertySheet.__init__(self, title) + self.HookMessage(self.OnActivate, win32con.WM_ACTIVATE) + def OnActivate(self, msg): + pass + +# A very simply Property Page, which will be "owned" by the above +# Property Sheet. +# We create a new class, just so we can hook a control notification. +class TestPage(dialog.PropertyPage): + def OnInitDialog(self): + # We use the HookNotify function to allow Python to respond to + # Windows WM_NOTIFY messages. + # In this case, we are interested in BN_CLICKED messages. + self.HookNotify(self.OnNotify, win32con.BN_CLICKED) + def OnNotify(self, std, extra): + print("OnNotify", std, extra) + +# Some code that actually uses these objects. +def demo(modal = 0): + TestDialog(modal) + + # property sheet/page demo + ps=win32ui.CreatePropertySheet('Property Sheet/Page Demo') + # Create a completely standard PropertyPage. + page1=win32ui.CreatePropertyPage(win32ui.IDD_PROPDEMO1) + # Create our custom property page. + page2=TestPage(win32ui.IDD_PROPDEMO2) + ps.AddPage(page1) + ps.AddPage(page2) + if modal: + ps.DoModal() + else: + style = win32con.WS_SYSMENU|win32con.WS_POPUP|win32con.WS_CAPTION|win32con.DS_MODALFRAME|win32con.WS_VISIBLE + styleex = win32con.WS_EX_DLGMODALFRAME | win32con.WS_EX_PALETTEWINDOW + ps.CreateWindow(win32ui.GetMainFrame(), style, styleex) + + +def test(modal=1): + +# dlg=dialog.Dialog(1010) +# dlg.CreateWindow() +# dlg.EndDialog(0) +# del dlg +# return + # property sheet/page demo + ps=TestSheet('Property Sheet/Page Demo') + page1=win32ui.CreatePropertyPage(win32ui.IDD_PROPDEMO1) + page2=win32ui.CreatePropertyPage(win32ui.IDD_PROPDEMO2) + ps.AddPage(page1) + ps.AddPage(page2) + del page1 + del page2 + if modal: + ps.DoModal() + else: + ps.CreateWindow(win32ui.GetMainFrame()) + return ps + +def d(): + dlg = win32ui.CreateDialog(win32ui.IDD_DEBUGGER) + dlg.datalist.append((win32ui.IDC_DBG_RADIOSTACK, "radio")) + print("data list is ", dlg.datalist) + dlg.data['radio']=1 + dlg.DoModal() + print(dlg.data['radio']) + +if __name__=='__main__': + demo(1) diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/dyndlg.py b/venv/Lib/site-packages/pythonwin/pywin/Demos/dyndlg.py new file mode 100644 index 00000000..5d36690e --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/Demos/dyndlg.py @@ -0,0 +1,73 @@ +# dyndlg.py +# contributed by Curt Hagenlocher + +# Dialog Template params: +# Parameter 0 - Window caption +# Parameter 1 - Bounds (rect tuple) +# Parameter 2 - Window style +# Parameter 3 - Extended style +# Parameter 4 - Font tuple +# Parameter 5 - Menu name +# Parameter 6 - Window class +# Dialog item params: +# Parameter 0 - Window class +# Parameter 1 - Text +# Parameter 2 - ID +# Parameter 3 - Bounds +# Parameter 4 - Style +# Parameter 5 - Extended style +# Parameter 6 - Extra data + + +import win32ui +import win32con +from pywin.mfc import dialog, window + +def MakeDlgTemplate(): + style = win32con.DS_MODALFRAME | win32con.WS_POPUP | win32con.WS_VISIBLE | win32con.WS_CAPTION | win32con.WS_SYSMENU | win32con.DS_SETFONT + cs = win32con.WS_CHILD | win32con.WS_VISIBLE + dlg = [ ["Select Warehouse", (0, 0, 177, 93), style, None, (8, "MS Sans Serif")], ] + dlg.append([130, "Current Warehouse:", -1, (7, 7, 69, 9), cs | win32con.SS_LEFT]) + dlg.append([130, "ASTORIA", 128, (16, 17, 99, 7), cs | win32con.SS_LEFT]) + dlg.append([130, "New &Warehouse:", -1, (7, 29, 69, 9), cs | win32con.SS_LEFT]) + s = win32con.WS_TABSTOP | cs +# dlg.append([131, None, 130, (5, 40, 110, 48), +# s | win32con.LBS_NOTIFY | win32con.LBS_SORT | win32con.LBS_NOINTEGRALHEIGHT | win32con.WS_VSCROLL | win32con.WS_BORDER]) + dlg.append(["{8E27C92B-1264-101C-8A2F-040224009C02}", None, 131, (5, 40, 110, 48),win32con.WS_TABSTOP]) + + dlg.append([128, "OK", win32con.IDOK, (124, 5, 50, 14), s | win32con.BS_DEFPUSHBUTTON]) + s = win32con.BS_PUSHBUTTON | s + dlg.append([128, "Cancel", win32con.IDCANCEL, (124, 22, 50, 14), s]) + dlg.append([128, "&Help", 100, (124, 74, 50, 14), s]) + + return dlg + +def test1(): + win32ui.CreateDialogIndirect( MakeDlgTemplate() ).DoModal() + +def test2(): + dialog.Dialog( MakeDlgTemplate() ).DoModal() + +def test3(): + dlg = win32ui.LoadDialogResource(win32ui.IDD_SET_TABSTOPS) + dlg[0][0] = 'New Dialog Title' + dlg[0][1] = (80, 20, 161, 60) + dlg[1][1] = '&Confusion:' + cs = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_TABSTOP | win32con.BS_PUSHBUTTON + dlg.append([128, "&Help", 100, (111, 41, 40, 14), cs]) + dialog.Dialog( dlg ).DoModal() + +def test4(): + page1=dialog.PropertyPage(win32ui.LoadDialogResource(win32ui.IDD_PROPDEMO1)) + page2=dialog.PropertyPage(win32ui.LoadDialogResource(win32ui.IDD_PROPDEMO2)) + ps=dialog.PropertySheet('Property Sheet/Page Demo', None, [page1, page2]) + ps.DoModal() + +def testall(): + test1() + test2() + test3() + test4() + +if __name__=='__main__': + testall() \ No newline at end of file diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/fontdemo.py b/venv/Lib/site-packages/pythonwin/pywin/Demos/fontdemo.py new file mode 100644 index 00000000..7dd0ed7e --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/Demos/fontdemo.py @@ -0,0 +1,79 @@ +# Demo of Generic document windows, DC, and Font usage +# by Dave Brennan (brennan@hal.com) + +# usage examples: + +# >>> from fontdemo import * +# >>> d = FontDemo('Hello, Python') +# >>> f1 = { 'name':'Arial', 'height':36, 'weight':win32con.FW_BOLD} +# >>> d.SetFont(f1) +# >>> f2 = {'name':'Courier New', 'height':24, 'italic':1} +# >>> d.SetFont (f2) + +import win32ui +import win32con +import win32api + +from pywin.mfc import docview + + +# font is a dictionary in which the following elements matter: +# (the best matching font to supplied parameters is returned) +# name string name of the font as known by Windows +# size point size of font in logical units +# weight weight of font (win32con.FW_NORMAL, win32con.FW_BOLD) +# italic boolean; true if set to anything but None +# underline boolean; true if set to anything but None + +class FontView(docview.ScrollView): + def __init__(self, doc, text = 'Python Rules!', font_spec = {'name':'Arial', 'height':42}): + docview.ScrollView.__init__(self, doc) + self.font = win32ui.CreateFont (font_spec) + self.text = text + self.width = self.height = 0 + # set up message handlers + self.HookMessage (self.OnSize, win32con.WM_SIZE) + def OnAttachedObjectDeath(self): + docview.ScrollView.OnAttachedObjectDeath(self) + del self.font + + def SetFont (self, new_font): + # Change font on the fly + self.font = win32ui.CreateFont (new_font) + # redraw the entire client window + selfInvalidateRect (None) + def OnSize (self, params): + lParam = params[3] + self.width = win32api.LOWORD(lParam) + self.height = win32api.HIWORD(lParam) + + def OnPrepareDC (self, dc, printinfo): + # Set up the DC for forthcoming OnDraw call + self.SetScrollSizes(win32con.MM_TEXT, (100,100)) + dc.SetTextColor (win32api.RGB(0,0,255)) + dc.SetBkColor (win32api.GetSysColor (win32con.COLOR_WINDOW)) + dc.SelectObject (self.font) + dc.SetTextAlign (win32con.TA_CENTER | win32con.TA_BASELINE) + + def OnDraw (self, dc): + if (self.width == 0 and self.height == 0): + left, top, right, bottom = self.GetClientRect() + self.width = right - left + self.height = bottom - top + x, y = self.width // 2, self.height // 2 + dc.TextOut (x, y, self.text) + +def FontDemo(): + # create doc/view + template = docview.DocTemplate(win32ui.IDR_PYTHONTYPE, None, None, FontView) + doc=template.OpenDocumentFile(None) + doc.SetTitle ('Font Demo') +# print "template is ", template, "obj is", template._obj_ + template.close() +# print "closed" +# del template + +if __name__=='__main__': + import demoutils + if demoutils.NeedGoodGUI(): + FontDemo() diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/guidemo.py b/venv/Lib/site-packages/pythonwin/pywin/Demos/guidemo.py new file mode 100644 index 00000000..3a0c8e37 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/Demos/guidemo.py @@ -0,0 +1,68 @@ +# GUI Demo - just a worker script to invoke all the other demo/test scripts. +import win32ui +import __main__ +import sys +import regutil +import win32api + +demos = [ \ +# ('Font', 'import fontdemo;fontdemo.FontDemo()'), + ('Open GL Demo', 'import openGLDemo;openGLDemo.test()'), + ('Threaded GUI', 'import threadedgui;threadedgui.ThreadedDemo()'), + ('Tree View Demo', 'import hiertest;hiertest.demoboth()'), + ('3-Way Splitter Window', 'import splittst;splittst.demo()'), + ('Custom Toolbars and Tooltips', 'import toolbar;toolbar.test()'), + ('Progress Bar', 'import progressbar;progressbar.demo()'), + ('Slider Control', 'import sliderdemo;sliderdemo.demo()'), + ('Dynamic window creation', 'import createwin;createwin.demo()'), + ('Various Dialog demos', 'import dlgtest;dlgtest.demo()'), + ('OCX Control Demo', 'from ocx import ocxtest;ocxtest.demo()'), + ('OCX Serial Port Demo', 'from ocx import ocxserialtest; ocxserialtest.test()'), + ('IE4 Control Demo', 'from ocx import webbrowser; webbrowser.Demo("http://www.python.org")'), +] + +def demo(): + try: + # seeif I can locate the demo files. + import fontdemo + except ImportError: + # else put the demos direectory on the path (if not already) + try: + instPath = regutil.GetRegistryDefaultValue(regutil.BuildDefaultPythonKey() + "\\InstallPath") + except win32api.error: + print("The InstallPath can not be located, and the Demos directory is not on the path") + instPath="." + + demosDir = win32ui.FullPath(instPath + "\\Demos") + for path in sys.path: + if win32ui.FullPath(path)==demosDir: + break + else: + sys.path.append(demosDir) + import fontdemo + + import sys + if "/go" in sys.argv: + for name, cmd in demos: + try: + exec(cmd) + except: + print("Demo of %s failed - %s:%s" % (cmd,sys.exc_info()[0], sys.exc_info()[1])) + return + # Otherwise allow the user to select the demo to run + + import pywin.dialogs.list + while 1: + rc = pywin.dialogs.list.SelectFromLists( "Select a Demo", demos, ['Demo Title'] ) + if rc is None: + break + title, cmd = demos[rc] + try: + exec(cmd) + except: + print("Demo of %s failed - %s:%s" % (title,sys.exc_info()[0], sys.exc_info()[1])) + +if __name__==__main__.__name__: + import demoutils + if demoutils.NeedGoodGUI(): + demo() diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/hiertest.py b/venv/Lib/site-packages/pythonwin/pywin/Demos/hiertest.py new file mode 100644 index 00000000..aaeb1cff --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/Demos/hiertest.py @@ -0,0 +1,104 @@ +import win32ui +import os +import commctrl + +from pywin.tools import hierlist +from pywin.mfc import docview, window + +# directory listbox +# This has obvious limitations - doesnt track subdirs, etc. Demonstrates +# simple use of Python code for querying the tree as needed. +# Only use strings, and lists of strings (from curdir()) +class DirHierList(hierlist.HierList): + def __init__(self, root, listBoxID = win32ui.IDC_LIST1): + hierlist.HierList.__init__(self, root, win32ui.IDB_HIERFOLDERS, listBoxID) + def GetText(self, item): + return os.path.basename(item) + def GetSubList(self, item): + if os.path.isdir(item): + ret = [os.path.join(item, fname) for fname in os.listdir(item)] + else: + ret = None + return ret + # if the item is a dir, it is expandable. + def IsExpandable(self, item): + return os.path.isdir(item) + def GetSelectedBitmapColumn(self, item): + return self.GetBitmapColumn(item)+6 # Use different color for selection + +class TestDocument(docview.Document): + def __init__(self, template): + docview.Document.__init__(self, template) + self.hierlist = hierlist.HierListWithItems(HLIFileDir("\\"), win32ui.IDB_HIERFOLDERS, win32ui.AFX_IDW_PANE_FIRST) + +class HierListView(docview.TreeView): + def OnInitialUpdate(self): + rc = self._obj_.OnInitialUpdate() + self.hierList = self.GetDocument().hierlist + self.hierList.HierInit(self.GetParent()) + self.hierList.SetStyle(commctrl.TVS_HASLINES | commctrl.TVS_LINESATROOT | commctrl.TVS_HASBUTTONS) + return rc + +class HierListFrame(window.MDIChildWnd): + pass + +def GetTestRoot(): + tree1 = ('Tree 1',[('Item 1','Item 1 data'),'Item 2',3]) + tree2 = ('Tree 2',[('Item 2.1','Item 2 data'),'Item 2.2',2.3]) + return ('Root',[tree1,tree2,'Item 3']) + +def demoboth(): + template = docview.DocTemplate(win32ui.IDR_PYTHONTYPE, TestDocument, HierListFrame, HierListView) + template.OpenDocumentFile(None).SetTitle("Hierlist demo") + + demomodeless() + +def demomodeless(): + testList2=DirHierList("\\") + dlg=hierlist.HierDialog('hier list test',testList2) + dlg.CreateWindow() + +def demodlg (): + testList2=DirHierList("\\") + dlg=hierlist.HierDialog('hier list test',testList2) + dlg.DoModal() + +def demo(): + template = docview.DocTemplate(win32ui.IDR_PYTHONTYPE, TestDocument, HierListFrame, HierListView) + template.OpenDocumentFile(None).SetTitle("Hierlist demo") + +# +# Demo/Test for HierList items. +# +# Easy to make a better directory program. +# +class HLIFileDir(hierlist.HierListItem): + def __init__( self, filename ): + self.filename = filename + hierlist.HierListItem.__init__(self) + def GetText(self): + try: + return "%-20s %d bytes" % (os.path.basename(self.filename), os.stat(self.filename)[6]) + except os.error as details: + return "%-20s - %s" % (self.filename, details[1]) + + def IsExpandable(self): + return os.path.isdir(self.filename) + def GetSubList(self): + ret = [] + for newname in os.listdir(self.filename): + if newname not in ['.', '..']: + ret.append( HLIFileDir( os.path.join(self.filename,newname ) ) ) + return ret + + +def demohli(): + template = docview.DocTemplate(win32ui.IDR_PYTHONTYPE, TestDocument, hierlist.HierListFrame, hierlist.HierListView) + template.OpenDocumentFile(None).SetTitle("Hierlist demo") + +if __name__=='__main__': + import demoutils + if demoutils.HaveGoodGUI(): + demoboth() + else: + demodlg() diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/menutest.py b/venv/Lib/site-packages/pythonwin/pywin/Demos/menutest.py new file mode 100644 index 00000000..2082f1ff --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/Demos/menutest.py @@ -0,0 +1,12 @@ +# Run this as a python script, to gray "close" off the edit window system menu. +from pywin.framework import interact +import win32con + +if __name__=='__main__': + import demoutils + if demoutils.NeedGoodGUI(): + win=interact.edit.currentView.GetParent() + menu=win.GetSystemMenu() + id=menu.GetMenuItemID(6) + menu.EnableMenuItem(id,win32con.MF_BYCOMMAND|win32con.MF_GRAYED) + print("The interactive window's 'Close' menu item is now disabled.") diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/objdoc.py b/venv/Lib/site-packages/pythonwin/pywin/Demos/objdoc.py new file mode 100644 index 00000000..1cdd8ef2 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/Demos/objdoc.py @@ -0,0 +1,49 @@ +# This is a sample file, and shows the basic framework for using an "Object" based +# document, rather than a "filename" based document. +# This is referenced by the Pythonwin .html documentation. + +# In the example below, the OpenObject() method is used instead of OpenDocumentFile, +# and all the core MFC document open functionality is retained. + +import win32ui +from pywin.mfc import docview + +class object_template (docview.DocTemplate): + def __init__(self): + docview.DocTemplate.__init__(self, None, None, None, object_view) + def OpenObject(self, object): # Use this instead of OpenDocumentFile. + # Look for existing open document + for doc in self.GetDocumentList(): + print("document is ", doc) + if doc.object is object: + doc.GetFirstView().ActivateFrame() + return doc + # not found - new one. + doc = object_document(self, object) + frame = self.CreateNewFrame(doc) + doc.OnNewDocument() + doc.SetTitle(str(object)) + self.InitialUpdateFrame(frame, doc) + return doc + +class object_document (docview.Document): + def __init__(self, template, object): + docview.Document.__init__(self, template) + self.object = object + def OnOpenDocument (self, name): + raise RuntimeError("Should not be called if template strings set up correctly") + return 0 + +class object_view (docview.EditView): + def OnInitialUpdate (self): + self.ReplaceSel("Object is %s" % repr(self.GetDocument().object)) + +def demo (): + t = object_template() + d = t.OpenObject(win32ui) + return (t, d) + +if __name__=='__main__': + import demoutils + if demoutils.NeedGoodGUI(): + demo() diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/__init__.py b/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..be443c6d Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/__pycache__/demoutils.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/__pycache__/demoutils.cpython-36.pyc new file mode 100644 index 00000000..bf40ed03 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/__pycache__/demoutils.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/__pycache__/flash.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/__pycache__/flash.cpython-36.pyc new file mode 100644 index 00000000..3ced2408 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/__pycache__/flash.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/__pycache__/msoffice.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/__pycache__/msoffice.cpython-36.pyc new file mode 100644 index 00000000..d52db8cf Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/__pycache__/msoffice.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/__pycache__/ocxserialtest.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/__pycache__/ocxserialtest.cpython-36.pyc new file mode 100644 index 00000000..ea3f3c36 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/__pycache__/ocxserialtest.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/__pycache__/ocxtest.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/__pycache__/ocxtest.cpython-36.pyc new file mode 100644 index 00000000..bb15e118 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/__pycache__/ocxtest.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/__pycache__/webbrowser.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/__pycache__/webbrowser.cpython-36.pyc new file mode 100644 index 00000000..1fc2703a Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/__pycache__/webbrowser.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/demoutils.py b/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/demoutils.py new file mode 100644 index 00000000..89bb552a --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/demoutils.py @@ -0,0 +1,54 @@ +# Utilities for the demos + +import sys, win32api, win32con, win32ui + +NotScriptMsg = """\ +This demo program is not designed to be run as a Script, but is +probably used by some other test program. Please try another demo. +""" + +NeedGUIMsg = """\ +This demo program can only be run from inside of Pythonwin + +You must start Pythonwin, and select 'Run' from the toolbar or File menu +""" + + +NeedAppMsg = """\ +This demo program is a 'Pythonwin Application'. + +It is more demo code than an example of Pythonwin's capabilities. + +To run it, you must execute the command: +pythonwin.exe /app "%s" + +Would you like to execute it now? +""" + +def NotAScript(): + import win32ui + win32ui.MessageBox(NotScriptMsg, "Demos") + +def NeedGoodGUI(): + from pywin.framework.app import HaveGoodGUI + rc = HaveGoodGUI() + if not rc: + win32ui.MessageBox(NeedGUIMsg, "Demos") + return rc + +def NeedApp(): + import win32ui + rc = win32ui.MessageBox(NeedAppMsg % sys.argv[0], "Demos", win32con.MB_YESNO) + if rc==win32con.IDYES: + try: + parent = win32ui.GetMainFrame().GetSafeHwnd() + win32api.ShellExecute(parent, None, 'pythonwin.exe', '/app "%s"' % sys.argv[0], None, 1) + except win32api.error as details: + win32ui.MessageBox("Error executing command - %s" % (details), "Demos") + + +from pywin.framework.app import HaveGoodGUI + +if __name__=='__main__': + from . import demoutils + demoutils.NotAScript() diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/flash.py b/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/flash.py new file mode 100644 index 00000000..dd4fdde4 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/flash.py @@ -0,0 +1,84 @@ +# By Bradley Schatz +# simple flash/python application demonstrating bidirectional +# communicaion between flash and python. Click the sphere to see +# behavior. Uses Bounce.swf from FlashBounce.zip, available from +# http://pages.cpsc.ucalgary.ca/~saul/vb_examples/tutorial12/ + +# Update to the path of the .swf file (note it could be a true URL) +flash_url = "c:\\bounce.swf" + +import win32ui, win32con, win32api, regutil +from pywin.mfc import window, activex +from win32com.client import gencache +import sys + +FlashModule = gencache.EnsureModule("{D27CDB6B-AE6D-11CF-96B8-444553540000}", 0, 1, 0) + +if FlashModule is None: + raise ImportError("Flash does not appear to be installed.") + +class MyFlashComponent(activex.Control, FlashModule.ShockwaveFlash): + def __init__(self): + activex.Control.__init__(self) + FlashModule.ShockwaveFlash.__init__(self) + self.x = 50 + self.y = 50 + self.angle = 30 + self.started = 0 + + def OnFSCommand(self, command, args): + print("FSCommend" , command, args) + self.x = self.x + 20 + self.y = self.y + 20 + self.angle = self.angle + 20 + if self.x > 200 or self.y > 200: + self.x = 0 + self.y = 0 + if self.angle > 360: + self.angle = 0 + self.SetVariable("xVal", self.x) + self.SetVariable("yVal", self.y) + self.SetVariable("angle", self.angle) + self.TPlay("_root.mikeBall") + + def OnProgress(self, percentDone): + print("PercentDone", percentDone) + def OnReadyStateChange(self, newState): + # 0=Loading, 1=Uninitialized, 2=Loaded, 3=Interactive, 4=Complete + print("State", newState) + + +class BrowserFrame(window.MDIChildWnd): + def __init__(self, url = None): + if url is None: + self.url = regutil.GetRegisteredHelpFile("Main Python Documentation") + else: + self.url = url + pass # Dont call base class doc/view version... + def Create(self, title, rect = None, parent = None): + style = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_OVERLAPPEDWINDOW + self._obj_ = win32ui.CreateMDIChild() + self._obj_.AttachObject(self) + self._obj_.CreateWindow(None, title, style, rect, parent) + rect = self.GetClientRect() + rect = (0,0,rect[2]-rect[0], rect[3]-rect[1]) + self.ocx = MyFlashComponent() + self.ocx.CreateControl("Flash Player", win32con.WS_VISIBLE | win32con.WS_CHILD, rect, self, 1000) + self.ocx.LoadMovie(0,flash_url) + self.ocx.Play() + self.HookMessage (self.OnSize, win32con.WM_SIZE) + + def OnSize (self, params): + rect = self.GetClientRect() + rect = (0,0,rect[2]-rect[0], rect[3]-rect[1]) + self.ocx.SetWindowPos(0, rect, 0) + +def Demo(): + url = None + if len(sys.argv)>1: + url = win32api.GetFullPathName(sys.argv[1]) + f = BrowserFrame(url) + f.Create("Flash Player") + +if __name__=='__main__': + Demo() diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/msoffice.py b/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/msoffice.py new file mode 100644 index 00000000..8ec42ff4 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/msoffice.py @@ -0,0 +1,127 @@ +# This demo uses some of the Microsoft Office components. +# +# It was taken from an MSDN article showing how to embed excel. +# It is not comlpete yet, but it _does_ show an Excel spreadsheet in a frame! +# + +import win32ui, win32uiole, win32con, regutil +from pywin.mfc import window, activex, object, docview +from win32com.client import gencache + +#WordModule = gencache.EnsureModule('{00020905-0000-0000-C000-000000000046}', 1033, 8, 0) +#if WordModule is None: +# raise ImportError, "Microsoft Word version 8 does not appear to be installed." + + +class OleClientItem(object.CmdTarget): + def __init__(self, doc): + object.CmdTarget.__init__(self, win32uiole.CreateOleClientItem(doc)) + + def OnGetItemPosition(self): + # For now return a hard-coded rect. + return (10, 10, 210, 210) + + def OnActivate(self): + # Allow only one inplace activate item per frame + view = self.GetActiveView() + item = self.GetDocument().GetInPlaceActiveItem(view) + if item is not None and item._obj_ != self._obj_: + item.Close() + self._obj_.OnActivate() + + def OnChange(self, oleNotification, dwParam): + self._obj_.OnChange(oleNotification, dwParam) + self.GetDocument().UpdateAllViews(None) + + def OnChangeItemPosition(self, rect): + # During in-place activation CEmbed_ExcelCntrItem::OnChangeItemPosition + # is called by the server to change the position of the in-place + # window. Usually, this is a result of the data in the server + # document changing such that the extent has changed or as a result + # of in-place resizing. + # + # The default here is to call the base class, which will call + # COleClientItem::SetItemRects to move the item + # to the new position. + if not self._obj_.OnChangeItemPosition(self, rect): + return 0 + + # TODO: update any cache you may have of the item's rectangle/extent + return 1 + +class OleDocument(object.CmdTarget): + def __init__(self, template): + object.CmdTarget.__init__(self, win32uiole.CreateOleDocument(template)) + self.EnableCompoundFile() + +class ExcelView(docview.ScrollView): + def OnInitialUpdate(self): + self.HookMessage(self.OnSetFocus, win32con.WM_SETFOCUS) + self.HookMessage (self.OnSize, win32con.WM_SIZE) + + self.SetScrollSizes(win32con.MM_TEXT, (100, 100)) + rc = self._obj_.OnInitialUpdate() + self.EmbedExcel() + return rc + + def EmbedExcel(self): + doc = self.GetDocument() + self.clientItem = OleClientItem(doc) + self.clientItem.CreateNewItem("Excel.Sheet") + self.clientItem.DoVerb(-1, self) + doc.UpdateAllViews(None) + + def OnDraw(self, dc): + doc = self.GetDocument() + pos = doc.GetStartPosition() + clientItem, pos = doc.GetNextItem(pos) + clientItem.Draw(dc, (10, 10, 210, 210) ) + + # Special handling of OnSetFocus and OnSize are required for a container + # when an object is being edited in-place. + def OnSetFocus(self, msg): + item = self.GetDocument().GetInPlaceActiveItem(self) + if item is not None and item.GetItemState()==win32uiole.COleClientItem_activeUIState: + wnd = item.GetInPlaceWindow() + if wnd is not None: + wnd.SetFocus() + return 0 # Dont get the base version called. + return 1 # Call the base version. + + def OnSize (self, params): + item = self.GetDocument().GetInPlaceActiveItem(self) + if item is not None: + item.SetItemRects() + return 1 # do call the base! + +class OleTemplate(docview.DocTemplate): + def __init__(self, resourceId=None, MakeDocument=None, MakeFrame=None, MakeView=None): + if MakeDocument is None: MakeDocument = OleDocument + if MakeView is None: MakeView = ExcelView + docview.DocTemplate.__init__(self, resourceId, MakeDocument, MakeFrame, MakeView) + +class WordFrame(window.MDIChildWnd): + def __init__(self, doc = None): + self._obj_ = win32ui.CreateMDIChild() + self._obj_.AttachObject(self) + # Dont call base class doc/view version... + def Create(self, title, rect = None, parent = None): + style = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_OVERLAPPEDWINDOW + self._obj_.CreateWindow(None, title, style, rect, parent) + + rect = self.GetClientRect() + rect = (0,0,rect[2]-rect[0], rect[3]-rect[1]) + self.ocx = MyWordControl() + self.ocx.CreateControl("Microsoft Word", win32con.WS_VISIBLE | win32con.WS_CHILD, rect, self, 20000) + +def Demo(): + import sys, win32api + docName = None + if len(sys.argv)>1: + docName = win32api.GetFullPathName(sys.argv[1]) + OleTemplate().OpenDocumentFile(None) +# f = WordFrame(docName) +# f.Create("Microsoft Office") + +if __name__=='__main__': + Demo() diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/ocxserialtest.py b/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/ocxserialtest.py new file mode 100644 index 00000000..4da0d989 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/ocxserialtest.py @@ -0,0 +1,101 @@ +# ocxserialtest.py +# +# Sample that uses the mscomm OCX to talk to a serial +# device. + +# Very simple - queries a modem for ATI responses + +import win32ui, win32uiole +import win32con +from pywin.mfc import dialog, activex +from win32com.client import gencache +import pythoncom + +SERIAL_SETTINGS = '19200,n,8,1' +SERIAL_PORT = 2 + +win32ui.DoWaitCursor(1) +serialModule = gencache.EnsureModule("{648A5603-2C6E-101B-82B6-000000000014}", 0, 1, 1) +win32ui.DoWaitCursor(0) +if serialModule is None: + raise ImportError("MS COMM Control does not appear to be installed on the PC") + + +def MakeDlgTemplate(): + style = win32con.DS_MODALFRAME | win32con.WS_POPUP \ + | win32con.WS_VISIBLE | win32con.WS_CAPTION \ + | win32con.WS_SYSMENU | win32con.DS_SETFONT + cs = win32con.WS_CHILD | win32con.WS_VISIBLE + dlg = [ ["Very Basic Terminal", + (0, 0, 350, 180), style, None, (8, "MS Sans Serif")], ] + s = win32con.WS_TABSTOP | cs + dlg.append(["RICHEDIT", None, 132, (5, 5, 340, 170),s | win32con.ES_WANTRETURN | win32con.ES_MULTILINE | win32con.ES_AUTOVSCROLL | win32con.WS_VSCROLL]) + return dlg + + +#################################### +# +# Serial Control +# +class MySerialControl(activex.Control, serialModule.MSComm): + def __init__(self, parent): + activex.Control.__init__(self) + serialModule.MSComm.__init__(self) + self.parent = parent + def OnComm(self): + self.parent.OnComm() + +class TestSerDialog(dialog.Dialog): + def __init__(self, *args): + dialog.Dialog.__init__(*(self,)+args) + self.olectl = None + def OnComm(self): + event = self.olectl.CommEvent + if event == serialModule.OnCommConstants.comEvReceive: + self.editwindow.ReplaceSel(self.olectl.Input) + + def OnKey(self, key): + if self.olectl: + self.olectl.Output = chr(key) + + def OnInitDialog(self): + rc = dialog.Dialog.OnInitDialog(self) + self.editwindow = self.GetDlgItem(132) + self.editwindow.HookAllKeyStrokes(self.OnKey) + + self.olectl = MySerialControl(self) + try: + self.olectl.CreateControl("OCX", + win32con.WS_TABSTOP | win32con.WS_VISIBLE, + (7,43,500,300), self._obj_, 131) + except win32ui.error: + self.MessageBox("The Serial Control could not be created") + self.olectl = None + self.EndDialog(win32con.IDCANCEL) + if self.olectl: + self.olectl.Settings = SERIAL_SETTINGS + self.olectl.CommPort = SERIAL_PORT + self.olectl.RThreshold = 1 + try: + self.olectl.PortOpen = 1 + except pythoncom.com_error as details: + print("Could not open the specified serial port - %s" % (details.excepinfo[2])) + self.EndDialog(win32con.IDCANCEL) + return rc + + def OnDestroy(self, msg): + if self.olectl: + try: + self.olectl.PortOpen = 0 + except pythoncom.com_error as details: + print("Error closing port - %s" % (details.excepinfo[2])) + return dialog.Dialog.OnDestroy(self, msg) + +def test(): + d = TestSerDialog(MakeDlgTemplate() ) + d.DoModal() + +if __name__ == "__main__": + from . import demoutils + if demoutils.NeedGoodGUI(): + test() diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/ocxtest.py b/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/ocxtest.py new file mode 100644 index 00000000..66801593 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/ocxtest.py @@ -0,0 +1,186 @@ +# OCX Tester for Pythonwin +# +# This file _is_ ready to run. All that is required is that the OCXs being tested +# are installed on your machine. +# +# The .py files behind the OCXs will be automatically generated and imported. + +from pywin.mfc import dialog, window, activex +import win32ui, win32uiole +import win32con +import os, sys, win32api, glob +from win32com.client import gencache + + +def MakeDlgTemplate(): + style = win32con.DS_MODALFRAME | win32con.WS_POPUP | win32con.WS_VISIBLE | win32con.WS_CAPTION | win32con.WS_SYSMENU | win32con.DS_SETFONT + cs = win32con.WS_CHILD | win32con.WS_VISIBLE + dlg = [ ["OCX Demos", (0, 0, 350, 350), style, None, (8, "MS Sans Serif")], ] + s = win32con.WS_TABSTOP | cs +# dlg.append([131, None, 130, (5, 40, 110, 48), +# s | win32con.LBS_NOTIFY | win32con.LBS_SORT | win32con.LBS_NOINTEGRALHEIGHT | win32con.WS_VSCROLL | win32con.WS_BORDER]) +# dlg.append(["{8E27C92B-1264-101C-8A2F-040224009C02}", None, 131, (5, 40, 110, 48),win32con.WS_TABSTOP]) + + dlg.append([128, "About", win32con.IDOK, (124, 5, 50, 14), s | win32con.BS_DEFPUSHBUTTON]) + s = win32con.BS_PUSHBUTTON | s + dlg.append([128, "Close", win32con.IDCANCEL, (124, 22, 50, 14), s]) + + return dlg + +#################################### +# +# Calendar test code +# + +def GetTestCalendarClass(): + global calendarParentModule + win32ui.DoWaitCursor(1) + calendarParentModule = gencache.EnsureModule("{8E27C92E-1264-101C-8A2F-040224009C02}", 0, 7, 0) + win32ui.DoWaitCursor(0) + if calendarParentModule is None: + return None + + class TestCalDialog(dialog.Dialog): + def OnInitDialog(self): + + class MyCal(activex.Control, calendarParentModule.Calendar): + def OnAfterUpdate(self): + print("OnAfterUpdate") + def OnClick(self): + print("OnClick") + def OnDblClick(self): + print("OnDblClick") + def OnKeyDown(self, KeyCode, Shift): + print("OnKeyDown", KeyCode, Shift) + def OnKeyPress(self, KeyAscii): + print("OnKeyPress", KeyAscii) + def OnKeyUp(self, KeyCode, Shift): + print("OnKeyUp", KeyCode, Shift) + def OnBeforeUpdate(self, Cancel): + print("OnBeforeUpdate", Cancel) + def OnNewMonth(self): + print("OnNewMonth") + def OnNewYear(self): + print("OnNewYear") + + rc = dialog.Dialog.OnInitDialog(self) + self.olectl = MyCal() + try: + self.olectl.CreateControl("OCX", win32con.WS_TABSTOP | win32con.WS_VISIBLE, (7,43,500,300), self._obj_, 131) + except win32ui.error: + self.MessageBox("The Calendar Control could not be created") + self.olectl = None + self.EndDialog(win32con.IDCANCEL) + + return rc + def OnOK(self): + self.olectl.AboutBox() + + return TestCalDialog + + +#################################### +# +# Video Control +# +def GetTestVideoModule(): + global videoControlModule, videoControlFileName + win32ui.DoWaitCursor(1) + videoControlModule = gencache.EnsureModule("{05589FA0-C356-11CE-BF01-00AA0055595A}", 0, 2, 0) + win32ui.DoWaitCursor(0) + if videoControlModule is None: + return None + fnames = glob.glob(os.path.join(win32api.GetWindowsDirectory(), "*.avi")) + if not fnames: + print("No AVI files available in system directory") + return None + videoControlFileName = fnames[0] + return videoControlModule + +def GetTestVideoDialogClass(): + if GetTestVideoModule() is None: + return None + class TestVideoDialog(dialog.Dialog): + def OnInitDialog(self): + rc = dialog.Dialog.OnInitDialog(self) + try: + self.olectl = activex.MakeControlInstance(videoControlModule.ActiveMovie) + self.olectl.CreateControl("", win32con.WS_TABSTOP | win32con.WS_VISIBLE, (7,43,500,300), self._obj_, 131) + except win32ui.error: + self.MessageBox("The Video Control could not be created") + self.olectl = None + self.EndDialog(win32con.IDCANCEL) + return + + self.olectl.FileName = videoControlFileName +# self.olectl.Run() + return rc + def OnOK(self): + self.olectl.AboutBox() + return TestVideoDialog + +############### +# +# An OCX in an MDI Frame +# +class OCXFrame(window.MDIChildWnd): + def __init__(self): + pass # Dont call base class doc/view version... + def Create(self, controlClass, title, rect = None, parent = None): + style = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_OVERLAPPEDWINDOW + self._obj_ = win32ui.CreateMDIChild() + self._obj_.AttachObject(self) + self._obj_.CreateWindow(None, title, style, rect, parent) + + rect = self.GetClientRect() + rect = (0,0,rect[2]-rect[0], rect[3]-rect[1]) + self.ocx = controlClass() + self.ocx.CreateControl("", win32con.WS_VISIBLE | win32con.WS_CHILD, rect, self, 1000) + +def MDITest(): + calendarParentModule = gencache.EnsureModule("{8E27C92E-1264-101C-8A2F-040224009C02}", 0, 7, 0) + class MyCal(activex.Control, calendarParentModule.Calendar): + def OnAfterUpdate(self): + print("OnAfterUpdate") + def OnClick(self): + print("OnClick") + + f = OCXFrame() + f.Create(MyCal, "Calendar Test") + + +def test1(): + klass = GetTestCalendarClass() + if klass is None: + print("Can not test the MSAccess Calendar control - it does not appear to be installed") + return + + d = klass(MakeDlgTemplate() ) + d.DoModal() + +def test2(): + klass = GetTestVideoDialogClass() + if klass is None: + print("Can not test the Video OCX - it does not appear to be installed,") + print("or no AVI files can be found.") + return + d = klass(MakeDlgTemplate() ) + d.DoModal() + d = None + +def test3(): + d = TestCOMMDialog(MakeDlgTemplate() ) + d.DoModal() + d = None + +def testall(): + test1() + test2() + +def demo(): + testall() + +if __name__=='__main__': + from . import demoutils + if demoutils.NeedGoodGUI(): + testall() diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/webbrowser.py b/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/webbrowser.py new file mode 100644 index 00000000..bd50d2ac --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/Demos/ocx/webbrowser.py @@ -0,0 +1,55 @@ +# This demo uses the IE4 Web Browser control. + +# It catches an "OnNavigate" event, and updates the frame title. +# (event stuff by Neil Hodgson) + +import win32ui, win32con, win32api, regutil +from pywin.mfc import window, activex +from win32com.client import gencache +import sys + +WebBrowserModule = gencache.EnsureModule("{EAB22AC0-30C1-11CF-A7EB-0000C05BAE0B}", 0, 1, 1) +if WebBrowserModule is None: + raise ImportError("IE4 does not appear to be installed.") + +class MyWebBrowser(activex.Control, WebBrowserModule.WebBrowser): + def OnBeforeNavigate2(self, pDisp, URL, Flags, TargetFrameName, PostData, Headers, Cancel): + self.GetParent().OnNavigate(URL) + #print "BeforeNavigate2", pDisp, URL, Flags, TargetFrameName, PostData, Headers, Cancel + +class BrowserFrame(window.MDIChildWnd): + def __init__(self, url = None): + if url is None: + self.url = regutil.GetRegisteredHelpFile("Main Python Documentation") + if self.url is None: + self.url = "http://www.python.org" + else: + self.url = url + pass # Dont call base class doc/view version... + def Create(self, title, rect = None, parent = None): + style = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_OVERLAPPEDWINDOW + self._obj_ = win32ui.CreateMDIChild() + self._obj_.AttachObject(self) + self._obj_.CreateWindow(None, title, style, rect, parent) + rect = self.GetClientRect() + rect = (0,0,rect[2]-rect[0], rect[3]-rect[1]) + self.ocx = MyWebBrowser() + self.ocx.CreateControl("Web Browser", win32con.WS_VISIBLE | win32con.WS_CHILD, rect, self, 1000) + self.ocx.Navigate(self.url) + self.HookMessage (self.OnSize, win32con.WM_SIZE) + def OnSize (self, params): + rect = self.GetClientRect() + rect = (0,0,rect[2]-rect[0], rect[3]-rect[1]) + self.ocx.SetWindowPos(0, rect, 0) + def OnNavigate(self, url): + title = "Web Browser - %s" % (url,) + self.SetWindowText(title) + +def Demo(url=None): + if url is None and len(sys.argv)>1: + url = win32api.GetFullPathName(sys.argv[1]) + f = BrowserFrame(url) + f.Create("Web Browser") + +if __name__=='__main__': + Demo() diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/openGLDemo.py b/venv/Lib/site-packages/pythonwin/pywin/Demos/openGLDemo.py new file mode 100644 index 00000000..f1991768 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/Demos/openGLDemo.py @@ -0,0 +1,358 @@ +# Ported from the win32 and MFC OpenGL Samples. + +from pywin.mfc import docview +import sys +try: + from OpenGL.GL import * + from OpenGL.GLU import * +except ImportError: + print("The OpenGL extensions do not appear to be installed.") + print("This Pythonwin demo can not run") + sys.exit(1) + +import win32con +import win32ui +import win32api +import timer + +PFD_TYPE_RGBA = 0 +PFD_TYPE_COLORINDEX = 1 +PFD_MAIN_PLANE = 0 +PFD_OVERLAY_PLANE = 1 +PFD_UNDERLAY_PLANE = (-1) +PFD_DOUBLEBUFFER = 0x00000001 +PFD_STEREO = 0x00000002 +PFD_DRAW_TO_WINDOW = 0x00000004 +PFD_DRAW_TO_BITMAP = 0x00000008 +PFD_SUPPORT_GDI = 0x00000010 +PFD_SUPPORT_OPENGL = 0x00000020 +PFD_GENERIC_FORMAT = 0x00000040 +PFD_NEED_PALETTE = 0x00000080 +PFD_NEED_SYSTEM_PALETTE = 0x00000100 +PFD_SWAP_EXCHANGE = 0x00000200 +PFD_SWAP_COPY = 0x00000400 +PFD_SWAP_LAYER_BUFFERS = 0x00000800 +PFD_GENERIC_ACCELERATED = 0x00001000 +PFD_DEPTH_DONTCARE = 0x20000000 +PFD_DOUBLEBUFFER_DONTCARE = 0x40000000 +PFD_STEREO_DONTCARE = 0x80000000 + + +#threeto8 = [0, 0o111>>1, 0o222>>1, 0o333>>1, 0o444>>1, 0o555>>1, 0o666>>1, 0o377] +threeto8 = [0, 73>>1, 146>>1, 219>>1, 292>>1, 365>>1, 438>>1, 255] +twoto8 = [0, 0x55, 0xaa, 0xff] +oneto8 = [0, 255] + +def ComponentFromIndex(i, nbits, shift): + # val = (unsigned char) (i >> shift); + val = (i >> shift) & 0xF; + if nbits==1: + val = val & 0x1 + return oneto8[val] + elif nbits==2: + val = val & 0x3 + return twoto8[val] + elif nbits==3: + val = val & 0x7 + return threeto8[val] + else: + return 0; + +OpenGLViewParent=docview.ScrollView +class OpenGLView(OpenGLViewParent): + def PreCreateWindow(self, cc): + self.HookMessage (self.OnSize, win32con.WM_SIZE) + # An OpenGL window must be created with the following flags and must not + # include CS_PARENTDC for the class style. Refer to SetPixelFormat + # documentation in the "Comments" section for further information. + style = cc[5] + style = style | win32con.WS_CLIPSIBLINGS | win32con.WS_CLIPCHILDREN + cc = cc[0], cc[1], cc[2], cc[3], cc[4], style, cc[6], cc[7], cc[8] + cc = self._obj_.PreCreateWindow(cc) + return cc + + def OnSize (self, params): + lParam = params[3] + cx = win32api.LOWORD(lParam) + cy = win32api.HIWORD(lParam) + glViewport(0, 0, cx, cy) + + if self.oldrect[2] > cx or self.oldrect[3] > cy: + self.RedrawWindow() + + self.OnSizeChange(cx, cy) + + self.oldrect = self.oldrect[0], self.oldrect[1], cx, cy + + def OnInitialUpdate(self): + self.SetScaleToFitSize((100,100)) # or SetScrollSizes() - A Pythonwin requirement + return self._obj_.OnInitialUpdate() +# return rc + + def OnCreate(self, cs): + self.oldrect = self.GetClientRect() + self._InitContexts() + self.Init() + + def OnDestroy(self, msg): + self.Term() + self._DestroyContexts() + return OpenGLViewParent.OnDestroy(self, msg) + + + def OnDraw(self, dc): + self.DrawScene() + + def OnEraseBkgnd(self, dc): + return 1 + + # The OpenGL helpers + def _SetupPixelFormat(self): + dc = self.dc.GetSafeHdc() + pfd = CreatePIXELFORMATDESCRIPTOR() + pfd.dwFlags = PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL | PFD_DOUBLEBUFFER + pfd.iPixelType = PFD_TYPE_RGBA + pfd.cColorBits = 24 + pfd.cDepthBits = 32 + pfd.iLayerType = PFD_MAIN_PLANE + pixelformat = ChoosePixelFormat(dc, pfd) + SetPixelFormat(dc, pixelformat, pfd) + self._CreateRGBPalette() + + def _CreateRGBPalette(self): + dc = self.dc.GetSafeHdc() + n = GetPixelFormat(dc) + pfd = DescribePixelFormat(dc, n) + if pfd.dwFlags & PFD_NEED_PALETTE: + n = 1 << pfd.cColorBits + pal = [] + for i in range(n): + this = ComponentFromIndex(i, pfd.cRedBits, pfd.cRedShift), \ + ComponentFromIndex(i, pfd.cGreenBits, pfd.cGreenShift), \ + ComponentFromIndex(i, pfd.cBlueBits, pfd.cBlueShift), \ + 0 + pal.append(this) + hpal = win32ui.CreatePalette(pal) + self.dc.SelectPalette(hpal, 0) + self.dc.RealizePalette() + + def _InitContexts(self): + self.dc = self.GetDC() + self._SetupPixelFormat() + hrc = wglCreateContext(self.dc.GetSafeHdc()) + wglMakeCurrent(self.dc.GetSafeHdc(), hrc) + + def _DestroyContexts(self): + hrc = wglGetCurrentContext() + wglMakeCurrent(0, 0) + if hrc: wglDeleteContext(hrc) + + # The methods to support OpenGL + def DrawScene(self): + assert 0, "You must override this method" + + def Init(self): + assert 0, "You must override this method" + + def OnSizeChange(self, cx, cy): + pass + + def Term(self): + pass + + +class TestView(OpenGLView): + + def OnSizeChange(self, right, bottom): + glClearColor( 0.0, 0.0, 0.0, 1.0 ); + glClearDepth( 1.0 ); + glEnable(GL_DEPTH_TEST) + + glMatrixMode( GL_PROJECTION ) + if bottom: + aspect = right / bottom + else: + aspect = 0 # When window created! + glLoadIdentity() + gluPerspective( 45.0, aspect, 3.0, 7.0 ) + glMatrixMode( GL_MODELVIEW ) + + near_plane = 3.0; + far_plane = 7.0; + maxObjectSize = 3.0; + self.radius = near_plane + maxObjectSize/2.0; + + + def Init(self): + pass + + def DrawScene(self): + glClearColor(0.0, 0.0, 0.0, 1.0) + glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT ) + + glPushMatrix() + glTranslatef(0.0, 0.0, -self.radius); + + self._DrawCone() + + self._DrawPyramid() + + glPopMatrix() + glFinish() + + SwapBuffers( wglGetCurrentDC() ) + + def _DrawCone(self): + glColor3f(0.0, 1.0, 0.0) + + glPushMatrix() + glTranslatef(-1.0, 0.0, 0.0); + quadObj = gluNewQuadric(); + gluQuadricDrawStyle(quadObj, GLU_FILL); + gluQuadricNormals(quadObj, GLU_SMOOTH); + gluCylinder(quadObj, 1.0, 0.0, 1.0, 20, 10); +# gluDeleteQuadric(quadObj); + glPopMatrix(); + + def _DrawPyramid(self): + glPushMatrix() + glTranslatef(1.0, 0.0, 0.0) + glBegin(GL_TRIANGLE_FAN) + glColor3f(1.0, 0.0, 0.0) + glVertex3f(0.0, 1.0, 0.0) + glColor3f(0.0, 1.0, 0.0) + glVertex3f(-1.0, 0.0, 0.0) + glColor3f(0.0, 0.0, 1.0) + glVertex3f(0.0, 0.0, 1.0) + glColor3f(0.0, 1.0, 0.0) + glVertex3f(1.0, 0.0, 0.0) + glEnd() + glPopMatrix() + +class CubeView(OpenGLView): + def OnSizeChange(self, right, bottom): + glClearColor( 0.0, 0.0, 0.0, 1.0 ); + glClearDepth( 1.0 ); + glEnable(GL_DEPTH_TEST) + + glMatrixMode( GL_PROJECTION ) + if bottom: + aspect = right / bottom + else: + aspect = 0 # When window created! + glLoadIdentity() + gluPerspective( 45.0, aspect, 3.0, 7.0 ) + glMatrixMode( GL_MODELVIEW ) + + near_plane = 3.0; + far_plane = 7.0; + maxObjectSize = 3.0; + self.radius = near_plane + maxObjectSize/2.0; + + def Init(self): + self.busy = 0 + self.wAngleY = 10.0 + self.wAngleX = 1.0 + self.wAngleZ = 5.0 + self.timerid = timer.set_timer (150, self.OnTimer) + + def OnTimer(self, id, timeVal): + self.DrawScene() + + def Term(self): + timer.kill_timer(self.timerid) + + def DrawScene(self): + if self.busy: return + self.busy = 1 + + glClearColor(0.0, 0.0, 0.0, 1.0); + glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); + + glPushMatrix(); + + glTranslatef(0.0, 0.0, -self.radius); + glRotatef(self.wAngleX, 1.0, 0.0, 0.0); + glRotatef(self.wAngleY, 0.0, 1.0, 0.0); + glRotatef(self.wAngleZ, 0.0, 0.0, 1.0); + + self.wAngleX = self.wAngleX + 1.0 + self.wAngleY = self.wAngleY + 10.0 + self.wAngleZ = self.wAngleZ + 5.0; + + + glBegin(GL_QUAD_STRIP); + glColor3f(1.0, 0.0, 1.0); + glVertex3f(-0.5, 0.5, 0.5); + + glColor3f(1.0, 0.0, 0.0); + glVertex3f(-0.5, -0.5, 0.5); + + glColor3f(1.0, 1.0, 1.0); + glVertex3f(0.5, 0.5, 0.5); + + glColor3f(1.0, 1.0, 0.0); + glVertex3f(0.5, -0.5, 0.5); + + glColor3f(0.0, 1.0, 1.0); + glVertex3f(0.5, 0.5, -0.5); + + glColor3f(0.0, 1.0, 0.0); + glVertex3f(0.5, -0.5, -0.5); + + glColor3f(0.0, 0.0, 1.0); + glVertex3f(-0.5, 0.5, -0.5); + + glColor3f(0.0, 0.0, 0.0); + glVertex3f(-0.5, -0.5, -0.5); + + glColor3f(1.0, 0.0, 1.0); + glVertex3f(-0.5, 0.5, 0.5); + + glColor3f(1.0, 0.0, 0.0); + glVertex3f(-0.5, -0.5, 0.5); + + glEnd(); + + glBegin(GL_QUADS); + glColor3f(1.0, 0.0, 1.0); + glVertex3f(-0.5, 0.5, 0.5); + + glColor3f(1.0, 1.0, 1.0); + glVertex3f(0.5, 0.5, 0.5); + + glColor3f(0.0, 1.0, 1.0); + glVertex3f(0.5, 0.5, -0.5); + + glColor3f(0.0, 0.0, 1.0); + glVertex3f(-0.5, 0.5, -0.5); + glEnd(); + + glBegin(GL_QUADS); + glColor3f(1.0, 0.0, 0.0); + glVertex3f(-0.5, -0.5, 0.5); + + glColor3f(1.0, 1.0, 0.0); + glVertex3f(0.5, -0.5, 0.5); + + glColor3f(0.0, 1.0, 0.0); + glVertex3f(0.5, -0.5, -0.5); + + glColor3f(0.0, 0.0, 0.0); + glVertex3f(-0.5, -0.5, -0.5); + glEnd(); + + glPopMatrix(); + + glFinish(); + SwapBuffers(wglGetCurrentDC()); + + self.busy = 0 + +def test(): + template = docview.DocTemplate(None, None, None, CubeView ) +# template = docview.DocTemplate(None, None, None, TestView ) + template.OpenDocumentFile(None) + +if __name__=='__main__': + test() diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/progressbar.py b/venv/Lib/site-packages/pythonwin/pywin/Demos/progressbar.py new file mode 100644 index 00000000..70b7a341 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/Demos/progressbar.py @@ -0,0 +1,88 @@ +# +# Progress bar control example +# +# PyCProgressCtrl encapsulates the MFC CProgressCtrl class. To use it, +# you: +# +# - Create the control with win32ui.CreateProgressCtrl() +# - Create the control window with PyCProgressCtrl.CreateWindow() +# - Initialize the range if you want it to be other than (0, 100) using +# PyCProgressCtrl.SetRange() +# - Either: +# - Set the step size with PyCProgressCtrl.SetStep(), and +# - Increment using PyCProgressCtrl.StepIt() +# or: +# - Set the amount completed using PyCProgressCtrl.SetPos() +# +# Example and progress bar code courtesy of KDL Technologies, Ltd., Hong Kong SAR, China. +# + +from pywin.mfc import dialog +import win32ui +import win32con + +def MakeDlgTemplate(): + style = (win32con.DS_MODALFRAME | + win32con.WS_POPUP | + win32con.WS_VISIBLE | + win32con.WS_CAPTION | + win32con.WS_SYSMENU | + win32con.DS_SETFONT) + cs = (win32con.WS_CHILD | + win32con.WS_VISIBLE) + + w = 215 + h = 36 + + dlg = [["Progress bar control example", + (0, 0, w, h), + style, + None, + (8, "MS Sans Serif")], + ] + + s = win32con.WS_TABSTOP | cs + + dlg.append([128, + "Tick", + win32con.IDOK, + (10, h - 18, 50, 14), s | win32con.BS_DEFPUSHBUTTON]) + + dlg.append([128, + "Cancel", + win32con.IDCANCEL, + (w - 60, h - 18, 50, 14), s | win32con.BS_PUSHBUTTON]) + + return dlg + +class TestDialog(dialog.Dialog): + def OnInitDialog(self): + rc = dialog.Dialog.OnInitDialog(self) + self.pbar = win32ui.CreateProgressCtrl() + self.pbar.CreateWindow (win32con.WS_CHILD | + win32con.WS_VISIBLE, + (10, 10, 310, 24), + self, 1001) + # self.pbar.SetStep (5) + self.progress = 0 + self.pincr = 5 + return rc + + def OnOK(self): + # NB: StepIt wraps at the end if you increment past the upper limit! + # self.pbar.StepIt() + self.progress = self.progress + self.pincr + if self.progress > 100: + self.progress = 100 + if self.progress <= 100: + self.pbar.SetPos(self.progress) + +def demo(modal = 0): + d = TestDialog (MakeDlgTemplate()) + if modal: + d.DoModal() + else: + d.CreateWindow () + +if __name__=='__main__': + demo(1) diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/sliderdemo.py b/venv/Lib/site-packages/pythonwin/pywin/Demos/sliderdemo.py new file mode 100644 index 00000000..d9bf1780 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/Demos/sliderdemo.py @@ -0,0 +1,54 @@ +# sliderdemo.py +# Demo of the slider control courtesy of Mike Fletcher. + +import win32con, win32ui +from pywin.mfc import dialog + +class MyDialog(dialog.Dialog): + ''' + Example using simple controls + ''' + _dialogstyle = (win32con.WS_MINIMIZEBOX | win32con.WS_DLGFRAME | + win32con.DS_MODALFRAME | win32con.WS_POPUP | win32con.WS_VISIBLE | + win32con.WS_CAPTION | win32con.WS_SYSMENU | win32con.DS_SETFONT ) + _buttonstyle = (win32con.BS_PUSHBUTTON | win32con.WS_TABSTOP | + win32con.WS_CHILD | win32con.WS_VISIBLE) + ### The static template, contains all "normal" dialog items + DIALOGTEMPLATE = [ + # the dialog itself is the first element in the template + ["Example slider", (0, 0, 50, 43), _dialogstyle, None, (8, "MS SansSerif")], + # rest of elements are the controls within the dialog + # standard "Close" button + [128, "Close", win32con.IDCANCEL, (0, 30, 50, 13), _buttonstyle], ] + ### ID of the control to be created during dialog initialisation + IDC_SLIDER = 9500 + def __init__(self ): + dialog.Dialog.__init__(self, self.DIALOGTEMPLATE) + def OnInitDialog(self): + rc = dialog.Dialog.OnInitDialog(self) + # now initialise your controls that you want to create + # programmatically, including those which are OLE controls + # those created directly by win32ui.Create* + # and your "custom controls" which are subclasses/whatever + win32ui.EnableControlContainer() + self.slider = win32ui.CreateSliderCtrl( ) + self.slider.CreateWindow( win32con.WS_TABSTOP | win32con.WS_VISIBLE, + (0,0,100,30), + self._obj_, + self.IDC_SLIDER) + self.HookMessage(self.OnSliderMove, win32con.WM_HSCROLL) + return rc + + def OnSliderMove(self, params): + print("Slider moved") + + def OnCancel(self): + print("The slider control is at position", self.slider.GetPos()) + self._obj_.OnCancel() +### +def demo(): + dia = MyDialog() + dia.DoModal() + +if __name__ == "__main__": + demo() \ No newline at end of file diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/splittst.py b/venv/Lib/site-packages/pythonwin/pywin/Demos/splittst.py new file mode 100644 index 00000000..1c9dbdf4 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/Demos/splittst.py @@ -0,0 +1,72 @@ +import win32ui +import win32con +import fontdemo +from pywin.mfc import window, docview +import commctrl + +# derive from CMDIChild. This does much work for us. + +class SplitterFrame(window.MDIChildWnd): + def __init__(self): + # call base CreateFrame + self.images = None + window.MDIChildWnd.__init__(self) + + def OnCreateClient(self, cp, context): + splitter = win32ui.CreateSplitter() + doc = context.doc + frame_rect = self.GetWindowRect() + size = ((frame_rect[2] - frame_rect[0]), + (frame_rect[3] - frame_rect[1])//2) + sub_size = (size[0]//2, size[1]) + splitter.CreateStatic (self, 2, 1) + self.v1 = win32ui.CreateEditView(doc) + self.v2 = fontdemo.FontView(doc) + # CListControl view + self.v3 = win32ui.CreateListView(doc) + sub_splitter = win32ui.CreateSplitter() + # pass "splitter" so each view knows how to get to the others + sub_splitter.CreateStatic (splitter, 1, 2) + sub_splitter.CreateView(self.v1, 0, 0, (sub_size)) + sub_splitter.CreateView(self.v2, 0, 1, (0,0)) # size ignored. + splitter.SetRowInfo(0, size[1] ,0) + splitter.CreateView (self.v3, 1, 0, (0,0)) # size ignored. + # Setup items in the imagelist + self.images = win32ui.CreateImageList(32,32,1,5,5) + self.images.Add(win32ui.GetApp().LoadIcon(win32ui.IDR_MAINFRAME)) + self.images.Add(win32ui.GetApp().LoadIcon(win32ui.IDR_PYTHONCONTYPE)) + self.images.Add(win32ui.GetApp().LoadIcon(win32ui.IDR_TEXTTYPE)) + self.v3.SetImageList(self.images, commctrl.LVSIL_NORMAL) + self.v3.InsertItem(0, "Icon 1", 0) + self.v3.InsertItem(0, "Icon 2", 1) + self.v3.InsertItem(0, "Icon 3", 2) +# self.v3.Arrange(commctrl.LVA_DEFAULT) Hmmm - win95 aligns left always??? + return 1 + def OnDestroy(self, msg): + window.MDIChildWnd.OnDestroy(self, msg) + if self.images: + self.images.DeleteImageList() + self.images = None + + def InitialUpdateFrame(self, doc, makeVisible): + self.v1.ReplaceSel("Hello from Edit Window 1") + self.v1.SetModifiedFlag(0) + +class SampleTemplate(docview.DocTemplate): + def __init__(self): + docview.DocTemplate.__init__(self, win32ui.IDR_PYTHONTYPE, None, SplitterFrame, None) + def InitialUpdateFrame(self, frame, doc, makeVisible): +# print "frame is ", frame, frame._obj_ +# print "doc is ", doc, doc._obj_ + self._obj_.InitialUpdateFrame(frame, doc, makeVisible) # call default handler. + frame.InitialUpdateFrame(doc, makeVisible) + +def demo(): + template = SampleTemplate() + doc=template.OpenDocumentFile(None) + doc.SetTitle("Splitter Demo") + +if __name__=='__main__': + import demoutils + if demoutils.NeedGoodGUI(): + demo() \ No newline at end of file diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/threadedgui.py b/venv/Lib/site-packages/pythonwin/pywin/Demos/threadedgui.py new file mode 100644 index 00000000..38a1afee --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/Demos/threadedgui.py @@ -0,0 +1,174 @@ +# Demo of using just windows, without documents and views. + +# Also demo of a GUI thread, pretty much direct from the MFC C++ sample MTMDI. + +import win32ui +import win32con +import win32api +import timer + +from pywin.mfc import window, docview, thread +from pywin.mfc.thread import WinThread + + +WM_USER_PREPARE_TO_CLOSE = win32con.WM_USER + 32 + +# font is a dictionary in which the following elements matter: +# (the best matching font to supplied parameters is returned) +# name string name of the font as known by Windows +# size point size of font in logical units +# weight weight of font (win32con.FW_NORMAL, win32con.FW_BOLD) +# italic boolean; true if set to anything but None +# underline boolean; true if set to anything but None + +# This window is a child window of a frame. It is not the frame window itself. +class FontWindow(window.Wnd): + def __init__(self, text = 'Python Rules!'): + window.Wnd.__init__(self) + self.text = text + self.index = 0 + self.incr = 1 + self.width = self.height = 0 + self.ChangeAttributes() + # set up message handlers + + def Create(self, title, style, rect, parent): + classStyle = win32con.CS_HREDRAW | win32con.CS_VREDRAW + className = win32ui.RegisterWndClass(classStyle, 0, win32con.COLOR_WINDOW+1, 0) + self._obj_ = win32ui.CreateWnd() + self._obj_.AttachObject(self) + self._obj_.CreateWindow(className, title, style, rect, parent, win32ui.AFX_IDW_PANE_FIRST) + self.HookMessage (self.OnSize, win32con.WM_SIZE) + self.HookMessage (self.OnPrepareToClose, WM_USER_PREPARE_TO_CLOSE) + self.HookMessage (self.OnDestroy, win32con.WM_DESTROY) + self.timerid = timer.set_timer (100, self.OnTimer) + self.InvalidateRect() + + def OnDestroy (self, msg): + timer.kill_timer(self.timerid) + + def OnTimer(self, id, timeVal): + self.index = self.index + self.incr + if self.index > len(self.text): + self.incr = -1 + self.index = len(self.text) + elif self.index < 0: + self.incr = 1 + self.index = 0 + self.InvalidateRect() + + def OnPaint (self): +# print "Paint message from thread", win32api.GetCurrentThreadId() + dc, paintStruct = self.BeginPaint() + self.OnPrepareDC(dc, None) + + if (self.width == 0 and self.height == 0): + left, top, right, bottom = self.GetClientRect() + self.width = right - left + self.height = bottom - top + x, y = self.width // 2, self.height // 2 + dc.TextOut (x, y, self.text[:self.index]) + self.EndPaint(paintStruct) + + def ChangeAttributes(self): + font_spec = {'name':'Arial', 'height':42} + self.font = win32ui.CreateFont (font_spec) + + def OnPrepareToClose(self, params): + self.DestroyWindow() + + def OnSize (self, params): + lParam = params[3] + self.width = win32api.LOWORD(lParam) + self.height = win32api.HIWORD(lParam) + + def OnPrepareDC (self, dc, printinfo): + # Set up the DC for forthcoming OnDraw call + dc.SetTextColor (win32api.RGB(0,0,255)) + dc.SetBkColor (win32api.GetSysColor (win32con.COLOR_WINDOW)) + dc.SelectObject (self.font) + dc.SetTextAlign (win32con.TA_CENTER | win32con.TA_BASELINE) + +class FontFrame(window.MDIChildWnd): + def __init__(self): + pass # Dont call base class doc/view version... + def Create(self, title, rect = None, parent = None): + style = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_OVERLAPPEDWINDOW + self._obj_ = win32ui.CreateMDIChild() + self._obj_.AttachObject(self) + + self._obj_.CreateWindow(None, title, style, rect, parent) + rect = self.GetClientRect() + rect = (0,0,rect[2]-rect[0], rect[3]-rect[1]) + self.child = FontWindow("Not threaded") + self.child.Create("FontDemo", win32con.WS_CHILD | win32con.WS_VISIBLE, rect, self) + + +class TestThread(WinThread): + def __init__(self, parentWindow): + self.parentWindow = parentWindow + self.child = None + WinThread.__init__(self) + def InitInstance(self): + rect = self.parentWindow.GetClientRect() + rect = (0,0,rect[2]-rect[0], rect[3]-rect[1]) + + self.child = FontWindow() + self.child.Create("FontDemo", win32con.WS_CHILD | win32con.WS_VISIBLE, rect, self.parentWindow) + self.SetMainFrame(self.child) + return WinThread.InitInstance(self) + + def ExitInstance(self): + return 0 + +class ThreadedFontFrame(window.MDIChildWnd): + def __init__(self): + pass # Dont call base class doc/view version... + self.thread = None + def Create(self, title, rect = None, parent = None): + style = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_OVERLAPPEDWINDOW + self._obj_ = win32ui.CreateMDIChild() + self._obj_.CreateWindow(None, title, style, rect, parent) + self._obj_.HookMessage(self.OnDestroy, win32con.WM_DESTROY) + self._obj_.HookMessage (self.OnSize, win32con.WM_SIZE) + + self.thread = TestThread(self) + self.thread.CreateThread() + + def OnSize(self, msg): + pass + + def OnDestroy(self, msg): + win32ui.OutputDebugString("OnDestroy\n") + if self.thread and self.thread.child: + child = self.thread.child + child.SendMessage(WM_USER_PREPARE_TO_CLOSE, 0, 0) + win32ui.OutputDebugString("Destroyed\n") + + +def Demo(): + f = FontFrame() + f.Create("Font Demo") + +def ThreadedDemo(): + rect = win32ui.GetMainFrame().GetMDIClient().GetClientRect() + rect = rect[0], int(rect[3]*3/4), int(rect[2]/4), rect[3] + incr = rect[2] + for i in range(4): + if i==0: + f = FontFrame() + title = "Not threaded" + else: + f = ThreadedFontFrame() + title = "Threaded GUI Demo" + f.Create(title, rect) + rect = rect[0] + incr, rect[1], rect[2]+incr, rect[3] + # Givem a chance to start + win32api.Sleep(100) + win32ui.PumpWaitingMessages() + +if __name__=='__main__': + import demoutils + if demoutils.NeedGoodGUI(): + ThreadedDemo() +# Demo() diff --git a/venv/Lib/site-packages/pythonwin/pywin/Demos/toolbar.py b/venv/Lib/site-packages/pythonwin/pywin/Demos/toolbar.py new file mode 100644 index 00000000..30e7346c --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/Demos/toolbar.py @@ -0,0 +1,93 @@ +# Demo of ToolBars + +# Shows the toolbar control. +# Demos how to make custom tooltips, etc. + +import win32ui +import win32con +import win32api +from pywin.mfc import docview, window, afxres +import commctrl + +class GenericFrame(window.MDIChildWnd): + def OnCreateClient(self, cp, context): + # handlers for toolbar buttons + self.HookCommand (self.OnPrevious, 401) + self.HookCommand (self.OnNext, 402) + # Its not necessary for us to hook both of these - the + # common controls should fall-back all by themselves. + # Indeed, given we hook TTN_NEEDTEXTW, commctrl.TTN_NEEDTEXTA + # will not be called. + self.HookNotify(self.GetTTText, commctrl.TTN_NEEDTEXT) + self.HookNotify(self.GetTTText, commctrl.TTN_NEEDTEXTW) + +# parent = win32ui.GetMainFrame() + parent = self + style = win32con.WS_CHILD | win32con.WS_VISIBLE | \ + afxres.CBRS_SIZE_DYNAMIC | afxres.CBRS_TOP | afxres.CBRS_TOOLTIPS | afxres.CBRS_FLYBY + + buttons = (win32ui.ID_APP_ABOUT,win32ui.ID_VIEW_INTERACTIVE) + bitmap = win32ui.IDB_BROWSER_HIER + tbid = 0xE840 + self.toolbar = tb = win32ui.CreateToolBar (parent, style, tbid) + tb.LoadBitmap(bitmap) + tb.SetButtons(buttons) + + tb.EnableDocking(afxres.CBRS_ALIGN_ANY) + tb.SetWindowText("Test") + parent.EnableDocking(afxres.CBRS_ALIGN_ANY) + parent.DockControlBar(tb) + parent.LoadBarState("ToolbarTest") + window.MDIChildWnd.OnCreateClient(self, cp, context) + return 1 + + def OnDestroy(self, msg): + self.SaveBarState("ToolbarTest") + + def GetTTText(self, std, extra): + (hwndFrom, idFrom, code) = std + text, hinst, flags = extra + if flags & commctrl.TTF_IDISHWND: + return # Not handled + if (idFrom==win32ui.ID_APP_ABOUT): + # our 'extra' return value needs to be the following + # entries from a NMTTDISPINFO[W] struct: + # (szText, hinst, uFlags). None means 'don't change + # the value' + return 0, ("It works!", None, None) + return None # not handled. + + def GetMessageString(self, id): + if id==win32ui.ID_APP_ABOUT: + return "Dialog Test\nTest" + else: + return self._obj_.GetMessageString(id) + + def OnSize (self, params): + print('OnSize called with ', params) + + def OnNext (self, id, cmd): + print('OnNext called') + + def OnPrevious (self, id, cmd): + print('OnPrevious called') + +msg = """\ +This toolbar was dynamically created.\r +\r +The first item's tooltips is provided by Python code.\r +\r +(Dont close the window with the toolbar in a floating state - it may not re-appear!)\r +""" + +def test(): + template = docview.DocTemplate( win32ui.IDR_PYTHONTYPE, None, GenericFrame, docview.EditView) + doc = template.OpenDocumentFile(None) + doc.SetTitle("Toolbar Test") + view = doc.GetFirstView() + view.SetWindowText(msg) + +if __name__=='__main__': + import demoutils + if demoutils.NeedGoodGUI(): + test() diff --git a/venv/Lib/site-packages/pythonwin/pywin/IDLE.cfg b/venv/Lib/site-packages/pythonwin/pywin/IDLE.cfg new file mode 100644 index 00000000..b1987b14 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/IDLE.cfg @@ -0,0 +1,29 @@ +[General] +# We base this configuration on the default config. +# You can list "Based On" as many times as you like +Based On = default + +[Keys] +# Only list keys different to default. +# Note you may wish to rebind some of the default +# Pythonwin keys to "Beep" or "DoNothing" + +Alt+L = LocateSelectedFile +Ctrl+Q = AppExit + +# Other non-default Pythonwin keys +Alt+A = EditSelectAll +Alt+M = LocateModule + +# Movement +Ctrl+D = GotoEndOfFile + +# Tabs and other indent features +Alt+T = <> +Ctrl+[ = <> +Ctrl+] = <> + +[Keys:Interactive] +Alt+P = <> +Alt+N = <> + diff --git a/venv/Lib/site-packages/pythonwin/pywin/__init__.py b/venv/Lib/site-packages/pythonwin/pywin/__init__.py new file mode 100644 index 00000000..e0c62fec --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/__init__.py @@ -0,0 +1,10 @@ +# is_platform_unicode is an old variable that was never correctly used and +# is no longer referenced in pywin32. It is staying for a few releases incase +# others are looking at it, but it will go away soon! +is_platform_unicode = 0 + +# Ditto default_platform_encoding - not referenced and will die. +default_platform_encoding = "mbcs" + +# This one *is* real and used - but in practice can't be changed. +default_scintilla_encoding = "utf-8" # Scintilla _only_ supports this ATM diff --git a/venv/Lib/site-packages/pythonwin/pywin/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..de2eaae0 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/debugger/__init__.py b/venv/Lib/site-packages/pythonwin/pywin/debugger/__init__.py new file mode 100644 index 00000000..76b9c7db --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/debugger/__init__.py @@ -0,0 +1,113 @@ +import sys + +# Some cruft to deal with the Pythonwin GUI booting up from a non GUI app. +def _MakeDebuggerGUI(): + app.InitInstance() + +isInprocApp = -1 +def _CheckNeedGUI(): + global isInprocApp + if isInprocApp==-1: + import win32ui + isInprocApp = win32ui.GetApp().IsInproc() + if isInprocApp: + # MAY Need it - may already have one + need = "pywin.debugger.dbgpyapp" not in sys.modules + else: + need = 0 + if need: + import pywin.framework.app + from . import dbgpyapp + pywin.framework.app.CreateDefaultGUI(dbgpyapp.DebuggerPythonApp) + + else: + # Check we have the appropriate editor + # No longer necessary! + pass + return need + +# Inject some methods in the top level name-space. +currentDebugger = None # Wipe out any old one on reload. + +def _GetCurrentDebugger(): + global currentDebugger + if currentDebugger is None: + _CheckNeedGUI() + from . import debugger + currentDebugger = debugger.Debugger() + return currentDebugger + +def GetDebugger(): + # An error here is not nice - as we are probably trying to + # break into the debugger on a Python error, any + # error raised by this is usually silent, and causes + # big problems later! + try: + rc = _GetCurrentDebugger() + rc.GUICheckInit() + return rc + except: + print("Could not create the debugger!") + import traceback + traceback.print_exc() + return None + +def close(): + if currentDebugger is not None: + currentDebugger.close() + +def run(cmd,globals=None, locals=None, start_stepping = 1): + _GetCurrentDebugger().run(cmd, globals,locals, start_stepping) + +def runeval(expression, globals=None, locals=None): + return _GetCurrentDebugger().runeval(expression, globals, locals) + +def runcall(*args): + return _GetCurrentDebugger().runcall(*args) + +def set_trace(): + import sys + d = _GetCurrentDebugger() + + if d.frameShutdown: return # App closing + + if d.stopframe != d.botframe: + # If im not "running" + return + + sys.settrace(None) # May be hooked + d.reset() + d.set_trace() + +# "brk" is an alias for "set_trace" ("break" is a reserved word :-( +brk = set_trace + +# Post-Mortem interface + +def post_mortem(t=None): + if t is None: + t = sys.exc_info()[2] # Will be valid if we are called from an except handler. + if t is None: + try: + t = sys.last_traceback + except AttributeError: + print("No traceback can be found from which to perform post-mortem debugging!") + print("No debugging can continue") + return + p = _GetCurrentDebugger() + if p.frameShutdown: return # App closing + # No idea why I need to settrace to None - it should have been reset by now? + sys.settrace(None) + p.reset() + while t.tb_next != None: t = t.tb_next + p.bAtPostMortem = 1 + p.prep_run(None) + try: + p.interaction(t.tb_frame, t) + finally: + t = None + p.bAtPostMortem = 0 + p.done_run() + +def pm(t=None): + post_mortem(t) diff --git a/venv/Lib/site-packages/pythonwin/pywin/debugger/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/debugger/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..531d3155 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/debugger/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/debugger/__pycache__/configui.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/debugger/__pycache__/configui.cpython-36.pyc new file mode 100644 index 00000000..a275e472 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/debugger/__pycache__/configui.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/debugger/__pycache__/dbgcon.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/debugger/__pycache__/dbgcon.cpython-36.pyc new file mode 100644 index 00000000..22943b5e Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/debugger/__pycache__/dbgcon.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/debugger/__pycache__/dbgpyapp.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/debugger/__pycache__/dbgpyapp.cpython-36.pyc new file mode 100644 index 00000000..798a32ce Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/debugger/__pycache__/dbgpyapp.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/debugger/__pycache__/debugger.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/debugger/__pycache__/debugger.cpython-36.pyc new file mode 100644 index 00000000..a6376e51 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/debugger/__pycache__/debugger.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/debugger/__pycache__/fail.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/debugger/__pycache__/fail.cpython-36.pyc new file mode 100644 index 00000000..16fb8d92 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/debugger/__pycache__/fail.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/debugger/configui.py b/venv/Lib/site-packages/pythonwin/pywin/debugger/configui.py new file mode 100644 index 00000000..e076dccc --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/debugger/configui.py @@ -0,0 +1,31 @@ +from . import dbgcon +from pywin.mfc import dialog +import win32ui + +class DebuggerOptionsPropPage(dialog.PropertyPage): + def __init__(self): + dialog.PropertyPage.__init__(self, win32ui.IDD_PP_DEBUGGER) + + def OnInitDialog(self): + options = self.options = dbgcon.LoadDebuggerOptions() + self.AddDDX(win32ui.IDC_CHECK1, dbgcon.OPT_HIDE) + self[dbgcon.OPT_STOP_EXCEPTIONS] = options[dbgcon.OPT_STOP_EXCEPTIONS] + self.AddDDX(win32ui.IDC_CHECK2, dbgcon.OPT_STOP_EXCEPTIONS) + self[dbgcon.OPT_HIDE] = options[dbgcon.OPT_HIDE] + return dialog.PropertyPage.OnInitDialog(self) + + def OnOK(self): + self.UpdateData() + dirty = 0 + for key, val in list(self.items()): + if key in self.options: + if self.options[key] != val: + self.options[key] = val + dirty = 1 + if dirty: + dbgcon.SaveDebuggerOptions(self.options) + # If there is a debugger open, then set its options. + import pywin.debugger + if pywin.debugger.currentDebugger is not None: + pywin.debugger.currentDebugger.options = self.options + return 1 diff --git a/venv/Lib/site-packages/pythonwin/pywin/debugger/dbgcon.py b/venv/Lib/site-packages/pythonwin/pywin/debugger/dbgcon.py new file mode 100644 index 00000000..1feb21f1 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/debugger/dbgcon.py @@ -0,0 +1,28 @@ +# General constants for the debugger + +DBGSTATE_NOT_DEBUGGING = 0 +DBGSTATE_RUNNING = 1 +DBGSTATE_BREAK = 2 +DBGSTATE_QUITTING = 3 # Attempting to back out of the debug session. + +LINESTATE_CURRENT = 0x1 # This line is where we are stopped +LINESTATE_BREAKPOINT = 0x2 # This line is a breakpoint +LINESTATE_CALLSTACK = 0x4 # This line is in the callstack. + +OPT_HIDE = 'hide' +OPT_STOP_EXCEPTIONS = 'stopatexceptions' + +import win32api, win32ui + +def DoGetOption(optsDict, optName, default): + optsDict[optName] = win32ui.GetProfileVal("Debugger Options", optName, default) + +def LoadDebuggerOptions(): + opts = {} + DoGetOption(opts, OPT_HIDE, 0) + DoGetOption(opts, OPT_STOP_EXCEPTIONS, 1) + return opts + +def SaveDebuggerOptions(opts): + for key, val in opts.items(): + win32ui.WriteProfileVal("Debugger Options", key, val) diff --git a/venv/Lib/site-packages/pythonwin/pywin/debugger/dbgpyapp.py b/venv/Lib/site-packages/pythonwin/pywin/debugger/dbgpyapp.py new file mode 100644 index 00000000..b624ee02 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/debugger/dbgpyapp.py @@ -0,0 +1,47 @@ +# dbgpyapp.py - Debugger Python application class +# +import win32con +import win32ui +import sys +import string +import os +from pywin.framework import intpyapp + +version = '0.3.0' + +class DebuggerPythonApp(intpyapp.InteractivePythonApp): + def LoadMainFrame(self): + " Create the main applications frame " + self.frame = self.CreateMainFrame() + self.SetMainFrame(self.frame) + self.frame.LoadFrame(win32ui.IDR_DEBUGGER, win32con.WS_OVERLAPPEDWINDOW) + self.frame.DragAcceptFiles() # we can accept these. + self.frame.ShowWindow(win32con.SW_HIDE); + self.frame.UpdateWindow(); + + # but we do rehook, hooking the new code objects. + self.HookCommands() + + def InitInstance(self): + # Use a registry path of "Python\Pythonwin Debugger + win32ui.SetAppName(win32ui.LoadString(win32ui.IDR_DEBUGGER)) + win32ui.SetRegistryKey("Python %s" % (sys.winver,)) + # We _need_ the Scintilla color editor. + # (and we _always_ get it now :-) + + numMRU = win32ui.GetProfileVal("Settings","Recent File List Size", 10) + win32ui.LoadStdProfileSettings(numMRU) + + self.LoadMainFrame() + + # Display the interactive window if the user wants it. + from pywin.framework import interact + interact.CreateInteractiveWindowUserPreference() + + # Load the modules we use internally. + self.LoadSystemModules() + # Load additional module the user may want. + self.LoadUserModules() + +# win32ui.CreateDebuggerThread() + win32ui.EnableControlContainer() diff --git a/venv/Lib/site-packages/pythonwin/pywin/debugger/debugger.py b/venv/Lib/site-packages/pythonwin/pywin/debugger/debugger.py new file mode 100644 index 00000000..06eabb14 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/debugger/debugger.py @@ -0,0 +1,985 @@ +# debugger.py + +# A debugger for Pythonwin. Built from pdb. + +# Mark Hammond (MHammond@skippinet.com.au) - Dec 94. + +# usage: +# >>> import pywin.debugger +# >>> pywin.debugger.GetDebugger().run("command") + +import pdb +import bdb +import sys +import string +import os +import types + +import win32ui +import win32api +import win32con +import pywin.docking.DockingBar +from pywin.mfc import dialog, object, afxres, window +from pywin.framework import app, interact, editor, scriptutils +from pywin.framework.editor.color.coloreditor import MARKER_CURRENT, MARKER_BREAKPOINT +from pywin.tools import browser, hierlist +import commctrl +import traceback + +#import win32traceutil +if win32ui.UNICODE: + LVN_ENDLABELEDIT = commctrl.LVN_ENDLABELEDITW +else: + LVN_ENDLABELEDIT = commctrl.LVN_ENDLABELEDITA + +from .dbgcon import * + +error = "pywin.debugger.error" + +def SetInteractiveContext(globs, locs): + if interact.edit is not None and interact.edit.currentView is not None: + interact.edit.currentView.SetContext(globs, locs) + +def _LineStateToMarker(ls): + if ls==LINESTATE_CURRENT: + return MARKER_CURRENT +# elif ls == LINESTATE_CALLSTACK: +# return MARKER_CALLSTACK + return MARKER_BREAKPOINT + +class HierListItem(browser.HLIPythonObject): + pass + +class HierFrameItem(HierListItem): + def __init__(self, frame, debugger): + HierListItem.__init__(self, frame, repr(frame)) + self.debugger = debugger + def GetText(self): + name = self.myobject.f_code.co_name + if not name or name == '?' : + # See if locals has a '__name__' (ie, a module) + if '__name__' in self.myobject.f_locals: + name = str(self.myobject.f_locals['__name__']) + " module" + else: + name = '' + + return "%s (%s:%d)" % (name, os.path.split(self.myobject.f_code.co_filename)[1], self.myobject.f_lineno) + def GetBitmapColumn(self): + if self.debugger.curframe is self.myobject: + return 7 + else: + return 8 + def GetSubList(self): + ret = [] + ret.append(HierFrameDict(self.myobject.f_locals, "Locals", 2)) + ret.append(HierFrameDict(self.myobject.f_globals, "Globals", 1)) + return ret + def IsExpandable(self): + return 1 + def TakeDefaultAction(self): + # Set the default frame to be this frame. + self.debugger.set_cur_frame(self.myobject) + return 1 + +class HierFrameDict(browser.HLIDict): + def __init__(self, dict, name, bitmapColumn): + self.bitmapColumn=bitmapColumn + browser.HLIDict.__init__(self, dict, name) + def GetBitmapColumn(self): + return self.bitmapColumn + +class NoStackAvailableItem(HierListItem): + def __init__(self, why): + HierListItem.__init__(self, None, why) + def IsExpandable(self): + return 0 + def GetText(self): + return self.name + def GetBitmapColumn(self): + return 8 + +class HierStackRoot(HierListItem): + def __init__( self, debugger ): + HierListItem.__init__(self, debugger, None) + self.last_stack = [] +## def __del__(self): +## print "HierStackRoot dieing" + def GetSubList(self): + debugger = self.myobject +# print self.debugger.stack, self.debugger.curframe + ret = [] + if debugger.debuggerState==DBGSTATE_BREAK: + stackUse=debugger.stack[:] + stackUse.reverse() + self.last_stack = [] + for frame, lineno in stackUse: + self.last_stack.append( (frame, lineno) ) + if frame is debugger.userbotframe: # Dont bother showing frames below our bottom frame. + break + for frame, lineno in self.last_stack: + ret.append( HierFrameItem( frame, debugger ) ) +## elif debugger.debuggerState==DBGSTATE_NOT_DEBUGGING: +## ret.append(NoStackAvailableItem('')) +## else: +## ret.append(NoStackAvailableItem('')) + return ret + def GetText(self): + return 'root item' + def IsExpandable(self): + return 1 + +class HierListDebugger(hierlist.HierListWithItems): + """ Hier List of stack frames, breakpoints, whatever """ + def __init__(self): + hierlist.HierListWithItems.__init__(self, None, win32ui.IDB_DEBUGGER_HIER, None, win32api.RGB(255,0,0)) + def Setup(self, debugger): + root = HierStackRoot(debugger) + self.AcceptRoot(root) +# def Refresh(self): +# self.Setup() + +class DebuggerWindow(window.Wnd): + def __init__(self, ob): + window.Wnd.__init__(self, ob) + self.debugger = None + + def Init(self, debugger): + self.debugger = debugger + + def GetDefRect(self): + defRect = app.LoadWindowSize("Debugger Windows\\" + self.title) + if defRect[2]-defRect[0]==0: + defRect = 0, 0, 150, 150 + return defRect + + def OnDestroy(self, msg): + newSize = self.GetWindowPlacement()[4] + pywin.framework.app.SaveWindowSize("Debugger Windows\\" + self.title, newSize) + return window.Wnd.OnDestroy(self, msg) + + def OnKeyDown(self, msg): + key = msg[2] + if key in [13, 27, 32]: return 1 + if key in [46,8]: # delete/BS key + self.DeleteSelected() + return 0 + view = scriptutils.GetActiveView() + try: + firer = view.bindings.fire_key_event + except AttributeError: + firer = None + if firer is not None: + return firer(msg) + else: + return 1 + + def DeleteSelected(self): + win32api.MessageBeep() + + def EditSelected(self): + win32api.MessageBeep() + +class DebuggerStackWindow(DebuggerWindow): + title = "Stack" + def __init__(self): + DebuggerWindow.__init__(self, win32ui.CreateTreeCtrl()) + self.list = HierListDebugger() + self.listOK = 0 + def SaveState(self): + self.list.DeleteAllItems() + self.listOK = 0 + win32ui.WriteProfileVal("Debugger Windows\\" + self.title, "Visible", self.IsWindowVisible()) + def CreateWindow(self, parent): + style = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_BORDER | commctrl.TVS_HASLINES | commctrl.TVS_LINESATROOT | commctrl.TVS_HASBUTTONS + self._obj_.CreateWindow(style, self.GetDefRect(), parent, win32ui.IDC_LIST1) + self.HookMessage(self.OnKeyDown, win32con.WM_KEYDOWN) + self.HookMessage(self.OnKeyDown, win32con.WM_SYSKEYDOWN) + self.list.HierInit (parent, self) + self.listOK = 0 # delayed setup + #self.list.Setup() + + def RespondDebuggerState(self, state): + assert self.debugger is not None, "Init not called" + if not self.listOK: + self.listOK = 1 + self.list.Setup(self.debugger) + else: + self.list.Refresh() + + def RespondDebuggerData(self): + try: + handle = self.GetChildItem(0) + except win32ui.error: + return # No items + while 1: + item = self.list.ItemFromHandle(handle) + col = self.list.GetBitmapColumn(item) + selCol = self.list.GetSelectedBitmapColumn(item) + if selCol is None: selCol = col + if self.list.GetItemImage(handle)!= (col, selCol): + self.list.SetItemImage(handle, col, selCol) + try: + handle = self.GetNextSiblingItem(handle) + except win32ui.error: + break + +class DebuggerListViewWindow(DebuggerWindow): + def __init__(self): + DebuggerWindow.__init__(self, win32ui.CreateListCtrl()) + def CreateWindow(self, parent): + list = self + style = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_BORDER | commctrl.LVS_EDITLABELS | commctrl.LVS_REPORT + self._obj_.CreateWindow(style, self.GetDefRect(), parent, win32ui.IDC_LIST1) + self.HookMessage(self.OnKeyDown, win32con.WM_KEYDOWN) + self.HookMessage(self.OnKeyDown, win32con.WM_SYSKEYDOWN) + list = self + title, width = self.columns[0] + itemDetails = (commctrl.LVCFMT_LEFT, width, title, 0) + list.InsertColumn(0, itemDetails) + col = 1 + for title, width in self.columns[1:]: + col = col + 1 + itemDetails = (commctrl.LVCFMT_LEFT, width, title, 0) + list.InsertColumn(col, itemDetails) + parent.HookNotify(self.OnListEndLabelEdit, LVN_ENDLABELEDIT) + parent.HookNotify(self.OnItemRightClick, commctrl.NM_RCLICK) + parent.HookNotify(self.OnItemDoubleClick, commctrl.NM_DBLCLK) + + def RespondDebuggerData(self): + pass + + def RespondDebuggerState(self, state): + pass + + def EditSelected(self): + try: + sel = self.GetNextItem(-1, commctrl.LVNI_SELECTED) + except win32ui.error: + return + self.EditLabel(sel) + + def OnKeyDown(self, msg): + key = msg[2] + # If someone starts typing, they probably are trying to edit the text! + if chr(key) in string.ascii_uppercase: + self.EditSelected() + return 0 + return DebuggerWindow.OnKeyDown(self, msg) + + def OnItemDoubleClick(self, notify_data, extra): + self.EditSelected() + + def OnItemRightClick(self, notify_data, extra): + # First select the item we right-clicked on. + pt = self.ScreenToClient(win32api.GetCursorPos()) + flags, hItem, subitem = self.HitTest(pt) + if hItem==-1 or commctrl.TVHT_ONITEM & flags==0: + return None + self.SetItemState(hItem, commctrl.LVIS_SELECTED, commctrl.LVIS_SELECTED) + + menu = win32ui.CreatePopupMenu() + menu.AppendMenu(win32con.MF_STRING|win32con.MF_ENABLED,1000, "Edit item") + menu.AppendMenu(win32con.MF_STRING|win32con.MF_ENABLED,1001, "Delete item") + dockbar = self.GetParent() + if dockbar.IsFloating(): + hook_parent = win32ui.GetMainFrame() + else: + hook_parent = self.GetParentFrame() + hook_parent.HookCommand(self.OnEditItem, 1000) + hook_parent.HookCommand(self.OnDeleteItem, 1001) + menu.TrackPopupMenu(win32api.GetCursorPos()) # track at mouse position. + return None + + def OnDeleteItem(self,command, code): + self.DeleteSelected() + def OnEditItem(self, command, code): + self.EditSelected() + +class DebuggerBreakpointsWindow(DebuggerListViewWindow): + title = "Breakpoints" + columns = [ ("Condition", 70), ("Location", 1024)] + + def SaveState(self): + items = [] + for i in range(self.GetItemCount()): + items.append(self.GetItemText(i,0)) + items.append(self.GetItemText(i,1)) + win32ui.WriteProfileVal("Debugger Windows\\" + self.title, "BreakpointList", "\t".join(items)) + win32ui.WriteProfileVal("Debugger Windows\\" + self.title, "Visible", self.IsWindowVisible()) + return 1 + + def OnListEndLabelEdit(self, std, extra): + item = extra[0] + text = item[4] + if text is None: return + + item_id = self.GetItem(item[0])[6] + + from bdb import Breakpoint + for bplist in Breakpoint.bplist.values(): + for bp in bplist: + if id(bp)==item_id: + if text.strip().lower()=="none": + text = None + bp.cond = text + break + self.RespondDebuggerData() + + def DeleteSelected(self): + try: + num = self.GetNextItem(-1, commctrl.LVNI_SELECTED) + item_id = self.GetItem(num)[6] + from bdb import Breakpoint + for bplist in list(Breakpoint.bplist.values()): + for bp in bplist: + if id(bp)==item_id: + self.debugger.clear_break(bp.file, bp.line) + break + except win32ui.error: + win32api.MessageBeep() + self.RespondDebuggerData() + + def RespondDebuggerData(self): + l = self + l.DeleteAllItems() + index = -1 + from bdb import Breakpoint + for bplist in Breakpoint.bplist.values(): + for bp in bplist: + baseName = os.path.split(bp.file)[1] + cond = bp.cond + item = index+1, 0, 0, 0, str(cond), 0, id(bp) + index = l.InsertItem(item) + l.SetItemText(index, 1, "%s: %s" % (baseName, bp.line)) + +class DebuggerWatchWindow(DebuggerListViewWindow): + title = "Watch" + columns = [ ("Expression", 70), ("Value", 1024)] + + def CreateWindow(self, parent): + DebuggerListViewWindow.CreateWindow(self, parent) + items = win32ui.GetProfileVal("Debugger Windows\\" + self.title, "Items", "").split("\t") + index = -1 + for item in items: + if item: + index = self.InsertItem(index+1, item) + self.InsertItem(index+1, "") + + def SaveState(self): + items = [] + for i in range(self.GetItemCount()-1): + items.append(self.GetItemText(i,0)) + win32ui.WriteProfileVal("Debugger Windows\\" + self.title, "Items", "\t".join(items)) + win32ui.WriteProfileVal("Debugger Windows\\" + self.title, "Visible", self.IsWindowVisible()) + return 1 + + def OnListEndLabelEdit(self, std, extra): + item = extra[0] + itemno = item[0] + text = item[4] + if text is None: return + self.SetItemText(itemno, 0, text) + if itemno == self.GetItemCount()-1: + self.InsertItem(itemno+1, "") + self.RespondDebuggerState(self.debugger.debuggerState) + + def DeleteSelected(self): + try: + num = self.GetNextItem(-1, commctrl.LVNI_SELECTED) + if num < self.GetItemCount()-1: # We cant delete the last + self.DeleteItem(num) + except win32ui.error: + win32api.MessageBeep() + + def RespondDebuggerState(self, state): + globs = locs = None + if state==DBGSTATE_BREAK: + if self.debugger.curframe: + globs = self.debugger.curframe.f_globals + locs = self.debugger.curframe.f_locals + elif state==DBGSTATE_NOT_DEBUGGING: + import __main__ + globs = locs = __main__.__dict__ + for i in range(self.GetItemCount()-1): + text = self.GetItemText(i, 0) + if globs is None: + val = "" + else: + try: + val = repr( eval( text, globs, locs) ) + except SyntaxError: + val = "Syntax Error" + except: + t, v, tb = sys.exc_info() + val = traceback.format_exception_only(t, v)[0].strip() + tb = None # prevent a cycle. + self.SetItemText(i, 1, val) + +def CreateDebuggerDialog(parent, klass): + control = klass() + control.CreateWindow(parent) + return control + +DebuggerDialogInfos = ( + (0xe810, DebuggerStackWindow, None), + (0xe811, DebuggerBreakpointsWindow, (10, 10)), + (0xe812, DebuggerWatchWindow, None), + ) + +# Prepare all the "control bars" for this package. +# If control bars are not all loaded when the toolbar-state functions are +# called, things go horribly wrong. +def PrepareControlBars(frame): + style = win32con.WS_CHILD | afxres.CBRS_SIZE_DYNAMIC | afxres.CBRS_TOP | afxres.CBRS_TOOLTIPS | afxres.CBRS_FLYBY + tbd = win32ui.CreateToolBar (frame, style, win32ui.ID_VIEW_TOOLBAR_DBG) + tbd.ModifyStyle(0, commctrl.TBSTYLE_FLAT) + tbd.LoadToolBar(win32ui.IDR_DEBUGGER) + tbd.EnableDocking(afxres.CBRS_ALIGN_ANY) + tbd.SetWindowText("Debugger") + frame.DockControlBar(tbd) + + # and the other windows. + for id, klass, float in DebuggerDialogInfos: + try: + frame.GetControlBar(id) + exists=1 + except win32ui.error: + exists=0 + if exists: continue + bar = pywin.docking.DockingBar.DockingBar() + style=win32con.WS_CHILD | afxres.CBRS_LEFT # don't create visible. + bar.CreateWindow(frame, CreateDebuggerDialog, klass.title, id, style, childCreatorArgs=(klass,)) + bar.SetBarStyle( bar.GetBarStyle()|afxres.CBRS_TOOLTIPS|afxres.CBRS_FLYBY|afxres.CBRS_SIZE_DYNAMIC) + bar.EnableDocking(afxres.CBRS_ALIGN_ANY) + if float is None: + frame.DockControlBar(bar) + else: + frame.FloatControlBar(bar, float, afxres.CBRS_ALIGN_ANY) + + ## frame.ShowControlBar(bar, 0, 1) + + +SKIP_NONE=0 +SKIP_STEP=1 +SKIP_RUN=2 + +debugger_parent=pdb.Pdb +class Debugger(debugger_parent): + def __init__(self): + self.inited = 0 + self.skipBotFrame = SKIP_NONE + self.userbotframe = None + self.frameShutdown = 0 + self.pumping = 0 + self.debuggerState = DBGSTATE_NOT_DEBUGGING # Assume so, anyway. + self.shownLineCurrent = None # The last filename I highlighted. + self.shownLineCallstack = None # The last filename I highlighted. + self.last_cmd_debugged = "" + self.abortClosed = 0 + self.isInitialBreakpoint = 0 + debugger_parent.__init__(self) + + # See if any break-points have been set in the editor + for doc in editor.editorTemplate.GetDocumentList(): + lineNo = -1 + while 1: + lineNo = doc.MarkerGetNext(lineNo+1, MARKER_BREAKPOINT) + if lineNo <= 0: break + self.set_break(doc.GetPathName(), lineNo) + + self.reset() + self.inForcedGUI = win32ui.GetApp().IsInproc() + self.options = LoadDebuggerOptions() + self.bAtException = self.bAtPostMortem = 0 + + def __del__(self): + self.close() + def close(self, frameShutdown = 0): + # abortClose indicates if we have total shutdown + # (ie, main window is dieing) + if self.pumping: + # Can stop pump here, as it only posts a message, and + # returns immediately. + if not self.StopDebuggerPump(): # User cancelled close. + return 0 + # NOTE - from this point on the close can not be + # stopped - the WM_QUIT message is already in the queue. + self.frameShutdown = frameShutdown + if not self.inited: return 1 + self.inited = 0 + + SetInteractiveContext(None, None) + + frame = win32ui.GetMainFrame() + # Hide the debuger toolbars (as they wont normally form part of the main toolbar state. + for id, klass, float in DebuggerDialogInfos: + try: + tb = frame.GetControlBar(id) + if tb.dialog is not None: # We may never have actually been shown. + tb.dialog.SaveState() + frame.ShowControlBar(tb, 0, 1) + except win32ui.error: + pass + + self._UnshowCurrentLine() + self.set_quit() + return 1 + + def StopDebuggerPump(self): + assert self.pumping, "Can't stop the debugger pump if Im not pumping!" + # After stopping a pump, I may never return. + if self.GUIAboutToFinishInteract(): + self.pumping = 0 + win32ui.StopDebuggerPump() # Posts a message, so we do return. + return 1 + return 0 + + def get_option(self, option): + """Public interface into debugger options + """ + try: + return self.options[option] + except KeyError: + raise error("Option %s is not a valid option" % option) + + def prep_run(self, cmd): + pass + def done_run(self, cmd=None): + self.RespondDebuggerState(DBGSTATE_NOT_DEBUGGING) + self.close() + def canonic(self, fname): + return os.path.abspath(fname).lower() + def reset(self): + debugger_parent.reset(self) + self.userbotframe = None + self.UpdateAllLineStates() + self._UnshowCurrentLine() + + + def setup(self, f, t): + debugger_parent.setup(self, f, t) + self.bAtException = t is not None + + def set_break(self, filename, lineno, temporary=0, cond = None): + filename = self.canonic(filename) + self.SetLineState(filename, lineno, LINESTATE_BREAKPOINT) + return debugger_parent.set_break(self, filename, lineno, temporary, cond) + + def clear_break(self, filename, lineno): + filename = self.canonic(filename) + self.ResetLineState(filename, lineno, LINESTATE_BREAKPOINT) + return debugger_parent.clear_break(self, filename, lineno) + + def cmdloop(self): + if self.frameShutdown: return # App in the process of closing - never break in! + self.GUIAboutToBreak() + + def print_stack_entry(self, frame): + # We dont want a stack printed - our GUI is better :-) + pass + + def user_return(self, frame, return_value): + # Same as parent, just no "print" + # This function is called when a return trap is set here + frame.f_locals['__return__'] = return_value + self.interaction(frame, None) + + def user_call(self, frame, args): + # base class has an annoying 'print' that adds no value to us... + if self.stop_here(frame): + self.interaction(frame, None) + + def user_exception(self, frame, exc_info): + # This function is called if an exception occurs, + # but only if we are to stop at or just below this level + (exc_type, exc_value, exc_traceback) = exc_info + if self.get_option(OPT_STOP_EXCEPTIONS): + frame.f_locals['__exception__'] = exc_type, exc_value + print("Unhandled exception while debugging...") + # on both py2k and py3k, we may be called with exc_value + # being the args to the exception, or it may already be + # instantiated (IOW, PyErr_Normalize() hasn't been + # called on the args). In py2k this is fine, but in + # py3k, traceback.print_exception fails. So on py3k + # we instantiate an exception instance to print. + if sys.version_info > (3,) and not isinstance(exc_value, BaseException): + # they are args - may be a single item or already a tuple + if not isinstance(exc_value, tuple): + exc_value = (exc_value,) + exc_value = exc_type(*exc_value) + + traceback.print_exception(exc_type, exc_value, exc_traceback) + self.interaction(frame, exc_traceback) + + def user_line(self, frame): + if frame.f_lineno==0: return + debugger_parent.user_line(self, frame) + + def stop_here(self, frame): + if self.isInitialBreakpoint: + self.isInitialBreakpoint = 0 + self.set_continue() + return 0 + if frame is self.botframe and self.skipBotFrame == SKIP_RUN: + self.set_continue() + return 0 + if frame is self.botframe and self.skipBotFrame == SKIP_STEP: + self.set_step() + return 0 + return debugger_parent.stop_here(self, frame) + + def run(self, cmd,globals=None, locals=None, start_stepping = 1): + if not isinstance(cmd, (str, types.CodeType)): + raise TypeError("Only strings can be run") + self.last_cmd_debugged = cmd + if start_stepping: + self.isInitialBreakpoint = 0 + else: + self.isInitialBreakpoint = 1 + try: + if globals is None: + import __main__ + globals = __main__.__dict__ + if locals is None: + locals = globals + self.reset() + self.prep_run(cmd) + sys.settrace(self.trace_dispatch) + if type(cmd) != types.CodeType: + cmd = cmd+'\n' + try: + try: + if start_stepping: self.skipBotFrame = SKIP_STEP + else: self.skipBotFrame = SKIP_RUN + exec(cmd, globals, locals) + except bdb.BdbQuit: + pass + finally: + self.skipBotFrame = SKIP_NONE + self.quitting = 1 + sys.settrace(None) + + finally: + self.done_run(cmd) + + def runeval(self, expr, globals=None, locals=None): + self.prep_run(expr) + try: + debugger_parent.runeval(self, expr, globals, locals) + finally: + self.done_run(expr) + + def runexec(self, what, globs=None, locs=None): + self.reset() + sys.settrace(self.trace_dispatch) + try: + try: + exec(what, globs, locs) + except bdb.BdbQuit: + pass + finally: + self.quitting = 1 + sys.settrace(None) + + def do_set_step(self): + if self.GUIAboutToRun(): + self.set_step() + + def do_set_next(self): + if self.GUIAboutToRun(): + self.set_next(self.curframe) + + def do_set_return(self): + if self.GUIAboutToRun(): + self.set_return(self.curframe) + + def do_set_continue(self): + if self.GUIAboutToRun(): + self.set_continue() + + def set_quit(self): + ok = 1 + if self.pumping: + ok = self.StopDebuggerPump() + if ok: + debugger_parent.set_quit(self) + + def _dump_frame_(self, frame,name=None): + if name is None: name = "" + if frame: + if frame.f_code and frame.f_code.co_filename: + fname = os.path.split(frame.f_code.co_filename)[1] + else: + fname = "??" + print(repr(name), fname, frame.f_lineno, frame) + else: + print(repr(name), "None") + + def set_trace(self): + # Start debugging from _2_ levels up! + try: + 1 + '' + except: + frame = sys.exc_info()[2].tb_frame.f_back.f_back + self.reset() + self.userbotframe = None + while frame: + # scriptutils.py creates a local variable with name + # '_debugger_stop_frame_', and we dont go past it + # (everything above this is Pythonwin framework code) + if "_debugger_stop_frame_" in frame.f_locals: + self.userbotframe = frame + break + + frame.f_trace = self.trace_dispatch + self.botframe = frame + frame = frame.f_back + self.set_step() + sys.settrace(self.trace_dispatch) + + def set_cur_frame(self, frame): + # Sets the "current" frame - ie, the frame with focus. This is the + # frame on which "step out" etc actions are taken. + # This may or may not be the top of the stack. + assert frame is not None, "You must pass a valid frame" + self.curframe = frame + for f, index in self.stack: + if f is frame: + self.curindex = index + break + else: + assert 0, "Can't find the frame in the stack." + SetInteractiveContext(frame.f_globals, frame.f_locals) + self.GUIRespondDebuggerData() + self.ShowCurrentLine() + + def IsBreak(self): + return self.debuggerState == DBGSTATE_BREAK + + def IsDebugging(self): + return self.debuggerState != DBGSTATE_NOT_DEBUGGING + + def RespondDebuggerState(self, state): + if state == self.debuggerState: return + if state==DBGSTATE_NOT_DEBUGGING: # Debugger exists, but not doing anything + title = "" + elif state==DBGSTATE_RUNNING: # Code is running under the debugger. + title = " - running" + elif state==DBGSTATE_BREAK: # We are at a breakpoint or stepping or whatever. + if self.bAtException: + if self.bAtPostMortem: + title = " - post mortem exception" + else: + title = " - exception" + else: + title = " - break" + else: + raise error("Invalid debugger state passed!") + win32ui.GetMainFrame().SetWindowText(win32ui.LoadString(win32ui.IDR_MAINFRAME) + title) + if self.debuggerState == DBGSTATE_QUITTING and state != DBGSTATE_NOT_DEBUGGING: + print("Ignoring state change cos Im trying to stop!", state) + return + self.debuggerState = state + try: + frame = win32ui.GetMainFrame() + except win32ui.error: + frame = None + if frame is not None: + for id, klass, float in DebuggerDialogInfos: + cb = win32ui.GetMainFrame().GetControlBar(id).dialog + cb.RespondDebuggerState(state) + # Tell each open editor window about the state transition + for doc in editor.editorTemplate.GetDocumentList(): + doc.OnDebuggerStateChange(state) + self.ShowCurrentLine() + + # + # GUI debugger interface. + # + def GUICheckInit(self): + if self.inited: return + self.inited = 1 + frame = win32ui.GetMainFrame() + + # Ensure the debugger windows are attached to the debugger. + for id, klass, float in DebuggerDialogInfos: + w = frame.GetControlBar(id) + w.dialog.Init(self) + # Show toolbar if it was visible during last debug session + # This would be better done using a CDockState, but that class is not wrapped yet + if win32ui.GetProfileVal("Debugger Windows\\" + w.dialog.title, "Visible", 0): + frame.ShowControlBar(w, 1, 1) + + # ALWAYS show debugging toolbar, regardless of saved state + tb = frame.GetControlBar(win32ui.ID_VIEW_TOOLBAR_DBG) + frame.ShowControlBar(tb, 1, 1) + self.GUIRespondDebuggerData() + +# frame.RecalcLayout() + + def GetDebuggerBar(self, barName): + frame = win32ui.GetMainFrame() + for id, klass, float in DebuggerDialogInfos: + if klass.title == barName: + return frame.GetControlBar(id) + assert 0, "Can't find a bar of that name!" + + def GUIRespondDebuggerData(self): + if not self.inited: # GUI not inited - no toolbars etc. + return + + for id, klass, float in DebuggerDialogInfos: + cb = win32ui.GetMainFrame().GetControlBar(id).dialog + cb.RespondDebuggerData() + + def GUIAboutToRun(self): + if not self.StopDebuggerPump(): + return 0 + self._UnshowCurrentLine() + self.RespondDebuggerState(DBGSTATE_RUNNING) + SetInteractiveContext(None, None) + return 1 + + def GUIAboutToBreak(self): + "Called as the GUI debugger is about to get context, and take control of the running program." + self.GUICheckInit() + self.RespondDebuggerState(DBGSTATE_BREAK) + self.GUIAboutToInteract() + if self.pumping: + print("!!! Already pumping - outa here") + return + self.pumping = 1 + win32ui.StartDebuggerPump() # NOTE - This will NOT return until the user is finished interacting + assert not self.pumping, "Should not be pumping once the pump has finished" + if self.frameShutdown: # User shut down app while debugging + win32ui.GetMainFrame().PostMessage(win32con.WM_CLOSE) + + def GUIAboutToInteract(self): + "Called as the GUI is about to perform any interaction with the user" + frame = win32ui.GetMainFrame() + # Remember the enabled state of our main frame + # may be disabled primarily if a modal dialog is displayed. + # Only get at enabled via GetWindowLong. + self.bFrameEnabled = frame.IsWindowEnabled() + self.oldForeground = None + fw = win32ui.GetForegroundWindow() + if fw is not frame: + self.oldForeground = fw +# fw.EnableWindow(0) Leave enabled for now? + self.oldFrameEnableState = frame.IsWindowEnabled() + frame.EnableWindow(1) + if self.inForcedGUI and not frame.IsWindowVisible(): + frame.ShowWindow(win32con.SW_SHOW) + frame.UpdateWindow() + if self.curframe: + SetInteractiveContext(self.curframe.f_globals, self.curframe.f_locals) + else: + SetInteractiveContext(None, None) + self.GUIRespondDebuggerData() + + def GUIAboutToFinishInteract(self): + """Called as the GUI is about to finish any interaction with the user + Returns non zero if we are allowed to stop interacting""" + if self.oldForeground is not None: + try: + win32ui.GetMainFrame().EnableWindow(self.oldFrameEnableState) + self.oldForeground.EnableWindow(1) + except win32ui.error: + # old window may be dead. + pass +# self.oldForeground.SetForegroundWindow() - fails?? + if not self.inForcedGUI: + return 1 # Never a problem, and nothing else to do. + # If we are running a forced GUI, we may never get an opportunity + # to interact again. Therefore we perform a "SaveAll", to makesure that + # any documents are saved before leaving. + for template in win32ui.GetApp().GetDocTemplateList(): + for doc in template.GetDocumentList(): + if not doc.SaveModified(): + return 0 + # All documents saved - now hide the app and debugger. + if self.get_option(OPT_HIDE): + frame = win32ui.GetMainFrame() + frame.ShowWindow(win32con.SW_HIDE) + return 1 + + # + # Pythonwin interface - all stuff to do with showing source files, + # changing line states etc. + # + def ShowLineState(self, fileName, lineNo, lineState): + # Set the state of a line, open if not already + self.ShowLineNo(fileName, lineNo) + self.SetLineState(fileName, lineNo, lineState) + + def SetLineState(self, fileName, lineNo, lineState): + # Set the state of a line if the document is open. + doc = editor.editorTemplate.FindOpenDocument(fileName) + if doc is not None: + marker = _LineStateToMarker(lineState) + if not doc.MarkerCheck(lineNo, marker): + doc.MarkerAdd(lineNo, marker) + + def ResetLineState(self, fileName, lineNo, lineState): + # Set the state of a line if the document is open. + doc = editor.editorTemplate.FindOpenDocument(fileName) + if doc is not None: + marker = _LineStateToMarker(lineState) + doc.MarkerDelete(lineNo, marker) + + def UpdateDocumentLineStates(self, doc): + # Show all lines in their special status color. If the doc is open + # all line states are reset. + doc.MarkerDeleteAll( MARKER_BREAKPOINT ) + doc.MarkerDeleteAll( MARKER_CURRENT ) + fname = self.canonic(doc.GetPathName()) + # Now loop over all break-points + for line in self.breaks.get(fname, []): + doc.MarkerAdd(line, MARKER_BREAKPOINT) + # And the current line if in this document. + if self.shownLineCurrent and fname == self.shownLineCurrent[0]: + lineNo = self.shownLineCurrent[1] + if not doc.MarkerCheck(lineNo, MARKER_CURRENT): + doc.MarkerAdd(lineNo, MARKER_CURRENT) +# if self.shownLineCallstack and fname == self.shownLineCallstack[0]: +# doc.MarkerAdd(self.shownLineCallstack[1], MARKER_CURRENT) + + def UpdateAllLineStates(self): + for doc in editor.editorTemplate.GetDocumentList(): + self.UpdateDocumentLineStates(doc) + + def ShowCurrentLine(self): + # Show the current line. Only ever 1 current line - undoes last current + # The "Current Line" is self.curframe. + # The "Callstack Line" is the top of the stack. + # If current == callstack, only show as current. + self._UnshowCurrentLine() # un-highlight the old one. + if self.curframe: + fileName = self.canonic(self.curframe.f_code.co_filename) + lineNo = self.curframe.f_lineno + self.shownLineCurrent = fileName, lineNo + self.ShowLineState(fileName, lineNo, LINESTATE_CURRENT) + + def _UnshowCurrentLine(self): + "Unshow the current line, and forget it" + if self.shownLineCurrent is not None: + fname, lineno = self.shownLineCurrent + self.ResetLineState(fname, lineno, LINESTATE_CURRENT) + self.shownLineCurrent = None + + def ShowLineNo( self, filename, lineno ): + wasOpen = editor.editorTemplate.FindOpenDocument(filename) is not None + if os.path.isfile(filename) and scriptutils.JumpToDocument(filename, lineno): + if not wasOpen: + doc = editor.editorTemplate.FindOpenDocument(filename) + if doc is not None: + self.UpdateDocumentLineStates(doc) + return 1 + return 0 + return 1 + else: + # Can't find the source file - linecache may have it? + import linecache + line = linecache.getline(filename, lineno) + print("%s(%d): %s" % (os.path.basename(filename), lineno, line[:-1].expandtabs(4))) + return 0 diff --git a/venv/Lib/site-packages/pythonwin/pywin/debugger/fail.py b/venv/Lib/site-packages/pythonwin/pywin/debugger/fail.py new file mode 100644 index 00000000..9550444b --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/debugger/fail.py @@ -0,0 +1,48 @@ +# NOTE NOTE - This module is designed to fail! +# +# The ONLY purpose for this script is testing/demoing the +# Pythonwin debugger package. + +# It does nothing useful, and it even doesnt do that! + +import pywin.debugger, sys, time +import traceback + +def a(): + a=1 + try: + b() + except: + # Break into the debugger with the exception information. + pywin.debugger.post_mortem(sys.exc_info()[2]) + a=1 + a=2 + a=3 + a=4 + pass + +def b(): + b=1 + pywin.debugger.set_trace() + # After importing or running this module, you are likely to be + # sitting at the next line. This is because we explicitely + # broke into the debugger using the "set_trace() function + # "pywin.debugger.brk()" is a shorter alias for this. + c() + pass + +def c(): + c=1 + d() + +def d(): + d=1 + e(d) + raise ValueError("Hi") + +def e(arg): + e=1 + time.sleep(1) + return e + +a() diff --git a/venv/Lib/site-packages/pythonwin/pywin/default.cfg b/venv/Lib/site-packages/pythonwin/pywin/default.cfg new file mode 100644 index 00000000..55371f6b --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/default.cfg @@ -0,0 +1,215 @@ +# The default keyboard etc configuration file for Pythonwin. +# +# The format of this file is very similar to a Windows INI file. +# Sections are identified with [Section] lines, but comments +# use the standatd Python # character. Depending on the section, +# lines may not be in the standard "key=value" format. + +# NOTE: You should not need to modify this file. +# Simply create a new .CFG file, and add an entry: +# [General] +# BasedOn = Default +# +# and add your customisations. Then select your new configuration +# from the Pythonwin View/Options/Editor dialog. +# This way you get to add your own customisations, +# but still take advantage of changes to the default +# configuration in new releases. + +# See IDLE.cfg for an example extension configuration. +# +########################################################################## + +[IDLE Extensions] + +# The list of IDLE extensions to load. The extensions +# AutoIndent, AutoFormat and possibly others are +# "built-in", so do not need specifying. + +FormatParagraph +CallTips + + +[Keys] + +# The list of _default_ key definitions. +# See [Keys:Interactive] and [Keys:Editor] below for further defs. + +#Events of the format <> +# are events defined in IDLE extensions. + +Alt+Q = <> + +Ctrl+W = ViewWhitespace +Ctrl+Shift+8 = ViewWhitespace # The MSVC default key def. + +Ctrl+Shift+F = ViewFixedFont + +# Auto-complete, call-tips, etc. +Alt+/ = <> +Ctrl+Space = <> +( = <> +) = <> +Up = <> +Down = <> +Left = <> +Right = <> +. = KeyDot + +# Debugger - These are the MSVC default keys, for want of a better choice. +F9 = DbgBreakpointToggle +F5 = DbgGo +Shift+F5 = DbgClose +F11 = DbgStep +F10 = DbgStepOver +Shift+F11 = DbgStepOut + +Ctrl+F3 = AutoFindNext + + +[Keys:Editor] +# Key bindings specific to the editor +F2 = GotoNextBookmark +Ctrl+F2 = ToggleBookmark +Ctrl+G = GotoLine + +Alt+I = ShowInteractiveWindow +Alt-B = AddBanner # A sample Event defined in this file. + +# Block operations +Alt+3 = <> +Shift+Alt+3 = <> +Alt+4 = <> # IDLE default. +Alt+5 = <> +Alt+6 = <> + +# Tabs and other indent features +Back = <> +Ctrl+T = <> +Alt+U = <> +Enter = EnterKey +Tab = TabKey +Shift-Tab = <> + +# Folding +Add = FoldExpand +Alt+Add = FoldExpandAll +Shift+Add = FoldExpandSecondLevel +Subtract = FoldCollapse +Alt+Subtract = FoldCollapseAll +Shift+Subtract = FoldCollapseSecondLevel +Multiply = FoldTopLevel + +[Keys:Interactive] +# Key bindings specific to the interactive window. +# History for the interactive window +Ctrl+Up = <> +Ctrl+Down = <> +Enter = ProcessEnter +Ctrl+Enter = ProcessEnter +Shift+Enter = ProcessEnter +Esc = ProcessEsc +Alt+I = WindowBack # Toggle back to previous window. +Home = InteractiveHome # A sample Event defined in this file. +Shift+Home = InteractiveHomeExtend # A sample Event defined in this file. + +# When docked, the Ctrl+Tab and Shift+Ctrl+Tab keys dont work as expected. +Ctrl+Tab = MDINext +Ctrl+Shift+Tab = MDIPrev + +[Extensions] +# Python event handlers specific to this config file. +# All functions not starting with an "_" are assumed +# to be events, and take 2 params: +# * editor_window is the same object passed to IDLE +# extensions. editor_window.text is a text widget +# that conforms to the Tk text widget interface. +# * event is the event being fired. Will always be None +# in the current implementation. + +# Simply by defining these functions, they are available as +# events. +# Note that we bind keystrokes to these events in the various +# [Keys] sections. + +# Add a simple file/class/function simple banner +def AddBanner(editor_window, event): + + text = editor_window.text + big_line = "#" * 70 + banner = "%s\n## \n## \n## \n%s\n" % (big_line, big_line) + + # Insert at the start of the current line. + pos = text.index("insert linestart") + + text.undo_block_start() # Allow action to be undone as a single unit. + text.insert(pos, banner) + text.undo_block_stop() + + # Now set the insert point to the middle of the banner. + line, col = [int(s) for s in pos.split(".")] + text.mark_set("insert", "%d.1 lineend" % (line+2, ) ) + + +# Here is a sample event bound to the "Home" key in the +# interactive window +def InteractiveHome(editor_window, event): + return _DoInteractiveHome(editor_window.text, 0) + +def InteractiveHomeExtend(editor_window, event): + return _DoInteractiveHome(editor_window.text, 1) + +def _DoInteractiveHome(text, extend): + import sys + # If Scintilla has an autocomplete window open, then let Scintilla handle it. + if text.edit.SCIAutoCActive(): + return 1 + of_interest = "insert linestart + %d c" % len(sys.ps1) + if not text.compare("insert", "==", of_interest) and \ + text.get("insert linestart", of_interest) in [sys.ps1, sys.ps2]: # Not sys.ps? line + end = of_interest + else: + end = "insert linestart" + + if extend: start = "insert" + else: start = end + text.tag_add("sel", start, end) + +# From Niki Spahie +def AutoFindNext(editor_window, event): + "find selected text or word under cursor" + + from pywin.scintilla import find + from pywin.scintilla import scintillacon + + try: + sci = editor_window.edit + word = sci.GetSelText() + if word: + find.lastSearch.findText = word + find.lastSearch.sel = sci.GetSel() + else: + pos = sci.SendScintilla( scintillacon.SCI_GETCURRENTPOS ) + start = sci.SendScintilla( scintillacon.SCI_WORDSTARTPOSITION, pos, 1 ) + end = sci.SendScintilla( scintillacon.SCI_WORDENDPOSITION, pos, 1 ) + word = sci.GetTextRange( start, end ) + if word: + find.lastSearch.findText = word + find.lastSearch.sel = (start,end) + except Exception: + import traceback + traceback.print_exc() + find.FindNext() + + +# A couple of generic events. +def Beep(editor_window, event): + editor_window.text.beep() + +def DoNothing(editor_window, event): + pass + +def ContinueEvent(editor_window, event): + # Almost an "unbind" - allows Pythonwin/MFC to handle the keystroke + return 1 + diff --git a/venv/Lib/site-packages/pythonwin/pywin/dialogs/__init__.py b/venv/Lib/site-packages/pythonwin/pywin/dialogs/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/pythonwin/pywin/dialogs/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/dialogs/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..3bcd46dc Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/dialogs/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/dialogs/__pycache__/ideoptions.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/dialogs/__pycache__/ideoptions.cpython-36.pyc new file mode 100644 index 00000000..27abe34d Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/dialogs/__pycache__/ideoptions.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/dialogs/__pycache__/list.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/dialogs/__pycache__/list.cpython-36.pyc new file mode 100644 index 00000000..db234789 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/dialogs/__pycache__/list.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/dialogs/__pycache__/login.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/dialogs/__pycache__/login.cpython-36.pyc new file mode 100644 index 00000000..196fd3e8 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/dialogs/__pycache__/login.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/dialogs/__pycache__/status.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/dialogs/__pycache__/status.cpython-36.pyc new file mode 100644 index 00000000..4e0bafc5 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/dialogs/__pycache__/status.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/dialogs/ideoptions.py b/venv/Lib/site-packages/pythonwin/pywin/dialogs/ideoptions.py new file mode 100644 index 00000000..3db1b9fa --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/dialogs/ideoptions.py @@ -0,0 +1,116 @@ +# The property page to define generic IDE options for Pythonwin + +from pywin.mfc import dialog +from pywin.framework import interact +import win32ui +import win32con + +buttonControlMap = { + win32ui.IDC_BUTTON1: win32ui.IDC_EDIT1, + win32ui.IDC_BUTTON2: win32ui.IDC_EDIT2, + win32ui.IDC_BUTTON3: win32ui.IDC_EDIT3, +} + +class OptionsPropPage(dialog.PropertyPage): + def __init__(self): + dialog.PropertyPage.__init__(self, win32ui.IDD_PP_IDE) + self.AddDDX(win32ui.IDC_CHECK1, "bShowAtStartup") + self.AddDDX(win32ui.IDC_CHECK2, "bDocking") + self.AddDDX(win32ui.IDC_EDIT4, 'MRUSize', "i") + + def OnInitDialog(self): + + edit = self.GetDlgItem(win32ui.IDC_EDIT1) + format = eval(win32ui.GetProfileVal(interact.sectionProfile, interact.STYLE_INTERACTIVE_PROMPT, str(interact.formatInput))) + edit.SetDefaultCharFormat(format) + edit.SetWindowText("Input Text") + + edit = self.GetDlgItem(win32ui.IDC_EDIT2) + format = eval(win32ui.GetProfileVal(interact.sectionProfile, interact.STYLE_INTERACTIVE_OUTPUT, str(interact.formatOutput))) + edit.SetDefaultCharFormat(format) + edit.SetWindowText("Output Text") + + edit = self.GetDlgItem(win32ui.IDC_EDIT3) + format = eval(win32ui.GetProfileVal(interact.sectionProfile, interact.STYLE_INTERACTIVE_ERROR, str(interact.formatOutputError))) + edit.SetDefaultCharFormat(format) + edit.SetWindowText("Error Text") + + self['bShowAtStartup'] = interact.LoadPreference("Show at startup", 1) + self['bDocking'] = interact.LoadPreference("Docking", 0) + self['MRUSize'] = win32ui.GetProfileVal("Settings","Recent File List Size", 10) + + # Hook the button clicks. + self.HookCommand(self.HandleCharFormatChange, win32ui.IDC_BUTTON1) + self.HookCommand(self.HandleCharFormatChange, win32ui.IDC_BUTTON2) + self.HookCommand(self.HandleCharFormatChange, win32ui.IDC_BUTTON3) + + # Ensure the spin control remains in range. + spinner = self.GetDlgItem(win32ui.IDC_SPIN1) + spinner.SetRange(1, 16) + + return dialog.PropertyPage.OnInitDialog(self) + + # Called to save away the new format tuple for the specified item. + def HandleCharFormatChange(self, id, code): + if code == win32con.BN_CLICKED: + editId = buttonControlMap.get(id) + assert editId is not None, "Format button has no associated edit control" + editControl = self.GetDlgItem(editId) + existingFormat = editControl.GetDefaultCharFormat() + flags = win32con.CF_SCREENFONTS + d=win32ui.CreateFontDialog(existingFormat, flags, None, self) + if d.DoModal()==win32con.IDOK: + cf = d.GetCharFormat() + editControl.SetDefaultCharFormat(cf) + self.SetModified(1) + return 0 # We handled this fully! + + def OnOK(self): + # Handle the edit controls - get all the fonts, put them back into interact, then + # get interact to save its stuff! + controlAttrs = [ + (win32ui.IDC_EDIT1, interact.STYLE_INTERACTIVE_PROMPT), + (win32ui.IDC_EDIT2, interact.STYLE_INTERACTIVE_OUTPUT), + (win32ui.IDC_EDIT3, interact.STYLE_INTERACTIVE_ERROR)] + for id, key in controlAttrs: + control = self.GetDlgItem(id) + fmt = control.GetDefaultCharFormat() + win32ui.WriteProfileVal(interact.sectionProfile, key, str(fmt)) + + # Save the other interactive window options. + interact.SavePreference("Show at startup", self['bShowAtStartup']) + interact.SavePreference("Docking", self['bDocking']) + + # And the other options. + win32ui.WriteProfileVal("Settings","Recent File List Size", self['MRUSize']) + + return 1 + def ChangeFormat(self, fmtAttribute, fmt): + dlg = win32ui.CreateFontDialog(fmt) + if dlg.DoModal() != win32con.IDOK: return None + return dlg.GetCharFormat() + + def OnFormatTitle(self, command, code): + fmt = self.GetFormat(interact.formatTitle) + if fmt: + formatTitle = fmt + SaveFontPreferences() + + def OnFormatInput(self, command, code): + global formatInput + fmt = self.GetFormat(formatInput) + if fmt: + formatInput = fmt + SaveFontPreferences() + def OnFormatOutput(self, command, code): + global formatOutput + fmt = self.GetFormat(formatOutput) + if fmt: + formatOutput = fmt + SaveFontPreferences() + def OnFormatError(self, command, code): + global formatOutputError + fmt = self.GetFormat(formatOutputError) + if fmt: + formatOutputError = fmt + SaveFontPreferences() diff --git a/venv/Lib/site-packages/pythonwin/pywin/dialogs/list.py b/venv/Lib/site-packages/pythonwin/pywin/dialogs/list.py new file mode 100644 index 00000000..6428b61b --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/dialogs/list.py @@ -0,0 +1,122 @@ +from pywin.mfc import dialog +import win32ui, win32con, commctrl, win32api + +class ListDialog (dialog.Dialog): + + def __init__ (self, title, list): + dialog.Dialog.__init__ (self, self._maketemplate(title)) + self.HookMessage (self.on_size, win32con.WM_SIZE) + self.HookNotify(self.OnListItemChange, commctrl.LVN_ITEMCHANGED) + self.HookCommand(self.OnListClick, win32ui.IDC_LIST1) + self.items = list + + def _maketemplate(self, title): + style = win32con.WS_DLGFRAME | win32con.WS_SYSMENU | win32con.WS_VISIBLE + ls = ( + win32con.WS_CHILD | + win32con.WS_VISIBLE | + commctrl.LVS_ALIGNLEFT | + commctrl.LVS_REPORT + ) + bs = ( + win32con.WS_CHILD | + win32con.WS_VISIBLE + ) + return [ [title, (0, 0, 200, 200), style, None, (8, "MS Sans Serif")], + ["SysListView32", None, win32ui.IDC_LIST1, (0, 0, 200, 200), ls], + [128, "OK", win32con.IDOK, (10, 0, 50, 14), bs | win32con.BS_DEFPUSHBUTTON], + [128, "Cancel",win32con.IDCANCEL,(0, 0, 50, 14), bs], + ] + + def FillList(self): + size = self.GetWindowRect() + width = size[2] - size[0] - (10) + itemDetails = (commctrl.LVCFMT_LEFT, width, "Item", 0) + self.itemsControl.InsertColumn(0, itemDetails) + index = 0 + for item in self.items: + index = self.itemsControl.InsertItem(index+1, str(item), 0) + + def OnListClick(self, id, code): + if code==commctrl.NM_DBLCLK: + self.EndDialog(win32con.IDOK) + return 1 + + def OnListItemChange(self,std, extra): + (hwndFrom, idFrom, code), (itemNotify, sub, newState, oldState, change, point, lparam) = std, extra + oldSel = (oldState & commctrl.LVIS_SELECTED)!=0 + newSel = (newState & commctrl.LVIS_SELECTED)!=0 + if oldSel != newSel: + try: + self.selecteditem = itemNotify + self.butOK.EnableWindow(1) + except win32ui.error: + self.selecteditem = None + + + def OnInitDialog (self): + rc = dialog.Dialog.OnInitDialog (self) + self.itemsControl = self.GetDlgItem(win32ui.IDC_LIST1) + self.butOK = self.GetDlgItem(win32con.IDOK) + self.butCancel = self.GetDlgItem(win32con.IDCANCEL) + + self.FillList() + + size = self.GetWindowRect() + self.LayoutControls(size[2]-size[0], size[3]-size[1]) + self.butOK.EnableWindow(0) # wait for first selection + return rc + + def LayoutControls(self, w, h): + self.itemsControl.MoveWindow((0,0,w,h-30)) + self.butCancel.MoveWindow((10, h-24, 60, h-4)) + self.butOK.MoveWindow((w-60, h-24, w-10, h-4)) + + def on_size (self, params): + lparam = params[3] + w = win32api.LOWORD(lparam) + h = win32api.HIWORD(lparam) + self.LayoutControls(w, h) + +class ListsDialog(ListDialog): + def __init__(self, title, list, colHeadings = ['Item']): + ListDialog.__init__(self, title, list) + self.colHeadings = colHeadings + + def FillList(self): + index = 0 + size = self.GetWindowRect() + width = size[2] - size[0] - (10) - win32api.GetSystemMetrics(win32con.SM_CXVSCROLL) + numCols = len(self.colHeadings) + + for col in self.colHeadings: + itemDetails = (commctrl.LVCFMT_LEFT, width/numCols, col, 0) + self.itemsControl.InsertColumn(index, itemDetails) + index = index + 1 + index = 0 + for items in self.items: + index = self.itemsControl.InsertItem(index+1, str(items[0]), 0) + for itemno in range(1,numCols): + item = items[itemno] + self.itemsControl.SetItemText(index, itemno, str(item)) + +def SelectFromList (title, lst): + dlg = ListDialog(title, lst) + if dlg.DoModal()==win32con.IDOK: + return dlg.selecteditem + else: + return None + +def SelectFromLists (title, lists, headings): + dlg = ListsDialog(title, lists, headings) + if dlg.DoModal()==win32con.IDOK: + return dlg.selecteditem + else: + return None + +def test(): +# print SelectFromList('Single list', [1,2,3]) + print(SelectFromLists('Multi-List', [ ('1',1, 'a'), ('2',2, 'b'), ('3',3, 'c' )], ['Col 1', 'Col 2'])) + +if __name__=='__main__': + test() diff --git a/venv/Lib/site-packages/pythonwin/pywin/dialogs/login.py b/venv/Lib/site-packages/pythonwin/pywin/dialogs/login.py new file mode 100644 index 00000000..b1e53984 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/dialogs/login.py @@ -0,0 +1,121 @@ +'''login -- PythonWin user ID and password dialog box + +(Adapted from originally distributed with Mark Hammond's PythonWin - +this now replaces it!) + +login.GetLogin() displays a modal "OK/Cancel" dialog box with input +fields for a user ID and password. The password field input is masked +with *'s. GetLogin takes two optional parameters, a window title, and a +default user ID. If these parameters are omitted, the title defaults to +"Login", and the user ID is left blank. GetLogin returns a (userid, password) +tuple. GetLogin can be called from scripts running on the console - i.e. you +don't need to write a full-blown GUI app to use it. + +login.GetPassword() is similar, except there is no username field. + +Example: +import pywin.dialogs.login +title = "FTP Login" +def_user = "fred" +userid, password = pywin.dialogs.login.GetLogin(title, def_user) + +Jim Eggleston, 28 August 1996 +Merged with dlgpass and moved to pywin.dialogs by Mark Hammond Jan 1998. +''' + +import win32ui +import win32api +import win32con +from pywin.mfc import dialog + +def MakeLoginDlgTemplate(title): + style = win32con.DS_MODALFRAME | win32con.WS_POPUP | win32con.WS_VISIBLE | win32con.WS_CAPTION | win32con.WS_SYSMENU | win32con.DS_SETFONT + cs = win32con.WS_CHILD | win32con.WS_VISIBLE + + # Window frame and title + dlg = [ [title, (0, 0, 184, 40), style, None, (8, "MS Sans Serif")], ] + + # ID label and text box + dlg.append([130, "User ID:", -1, (7, 9, 69, 9), cs | win32con.SS_LEFT]) + s = cs | win32con.WS_TABSTOP | win32con.WS_BORDER + dlg.append(['EDIT', None, win32ui.IDC_EDIT1, (50, 7, 60, 12), s]) + + # Password label and text box + dlg.append([130, "Password:", -1, (7, 22, 69, 9), cs | win32con.SS_LEFT]) + s = cs | win32con.WS_TABSTOP | win32con.WS_BORDER + dlg.append(['EDIT', None, win32ui.IDC_EDIT2, (50, 20, 60, 12), s | win32con.ES_PASSWORD]) + + # OK/Cancel Buttons + s = cs | win32con.WS_TABSTOP + dlg.append([128, "OK", win32con.IDOK, (124, 5, 50, 14), s | win32con.BS_DEFPUSHBUTTON]) + s = win32con.BS_PUSHBUTTON | s + dlg.append([128, "Cancel", win32con.IDCANCEL, (124, 20, 50, 14), s]) + return dlg + +def MakePasswordDlgTemplate(title): + style = win32con.DS_MODALFRAME | win32con.WS_POPUP | win32con.WS_VISIBLE | win32con.WS_CAPTION | win32con.WS_SYSMENU | win32con.DS_SETFONT + cs = win32con.WS_CHILD | win32con.WS_VISIBLE + # Window frame and title + dlg = [ [title, (0, 0, 177, 45), style, None, (8, "MS Sans Serif")], ] + + # Password label and text box + dlg.append([130, "Password:", -1, (7, 7, 69, 9), cs | win32con.SS_LEFT]) + s = cs | win32con.WS_TABSTOP | win32con.WS_BORDER + dlg.append(['EDIT', None, win32ui.IDC_EDIT1, (50, 7, 60, 12), s | win32con.ES_PASSWORD]) + + # OK/Cancel Buttons + s = cs | win32con.WS_TABSTOP | win32con.BS_PUSHBUTTON + dlg.append([128, "OK", win32con.IDOK, (124, 5, 50, 14), s | win32con.BS_DEFPUSHBUTTON]) + dlg.append([128, "Cancel", win32con.IDCANCEL, (124, 22, 50, 14), s]) + return dlg + +class LoginDlg(dialog.Dialog): + Cancel = 0 + def __init__(self, title): + dialog.Dialog.__init__(self, MakeLoginDlgTemplate(title) ) + self.AddDDX(win32ui.IDC_EDIT1,'userid') + self.AddDDX(win32ui.IDC_EDIT2,'password') + +def GetLogin(title='Login', userid='', password=''): + d = LoginDlg(title) + d['userid'] = userid + d['password'] = password + if d.DoModal() != win32con.IDOK: + return (None, None) + else: + return (d['userid'], d['password']) + +class PasswordDlg(dialog.Dialog): + def __init__(self, title): + dialog.Dialog.__init__(self, MakePasswordDlgTemplate(title) ) + self.AddDDX(win32ui.IDC_EDIT1,'password') + +def GetPassword(title='Password', password=''): + d = PasswordDlg(title) + d['password'] = password + if d.DoModal()!=win32con.IDOK: + return None + return d['password'] + +if __name__ == "__main__": + import sys + title = 'Login' + def_user = '' + if len(sys.argv) > 1: + title = sys.argv[1] + if len(sys.argv) > 2: + def_userid = sys.argv[2] + userid, password = GetLogin(title, def_user) + if userid == password == None: + print("User pressed Cancel") + else: + print("User ID: ", userid) + print("Password:", password) + newpassword = GetPassword("Reenter just for fun", password) + if newpassword is None: + print("User cancelled") + else: + what = "" + if newpassword != password: + what = "not " + print("The passwords did %smatch" % (what)) diff --git a/venv/Lib/site-packages/pythonwin/pywin/dialogs/status.py b/venv/Lib/site-packages/pythonwin/pywin/dialogs/status.py new file mode 100644 index 00000000..3a75d89c --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/dialogs/status.py @@ -0,0 +1,227 @@ +# No cancel button. + +from pywin.mfc import dialog +from pywin.mfc.thread import WinThread +import threading +import win32ui +import win32con +import win32api +import time + +def MakeProgressDlgTemplate(caption, staticText = ""): + style = (win32con.DS_MODALFRAME | + win32con.WS_POPUP | + win32con.WS_VISIBLE | + win32con.WS_CAPTION | + win32con.WS_SYSMENU | + win32con.DS_SETFONT) + cs = (win32con.WS_CHILD | + win32con.WS_VISIBLE) + + w = 215 + h = 36 # With button + h = 40 + + dlg = [[caption, + (0, 0, w, h), + style, + None, + (8, "MS Sans Serif")], + ] + + s = win32con.WS_TABSTOP | cs + + dlg.append([130, staticText, 1000, (7, 7, w-7, h-32), cs | win32con.SS_LEFT]) + +# dlg.append([128, +# "Cancel", +# win32con.IDCANCEL, +# (w - 60, h - 18, 50, 14), s | win32con.BS_PUSHBUTTON]) + + return dlg + +class CStatusProgressDialog(dialog.Dialog): + def __init__(self, title, msg = "", maxticks = 100, tickincr = 1): + self.initMsg = msg + templ = MakeProgressDlgTemplate(title, msg) + dialog.Dialog.__init__(self, templ) + self.maxticks = maxticks + self.tickincr = tickincr + self.pbar = None + + def OnInitDialog(self): + rc = dialog.Dialog.OnInitDialog(self) + self.static = self.GetDlgItem(1000) + self.pbar = win32ui.CreateProgressCtrl() + self.pbar.CreateWindow (win32con.WS_CHILD | + win32con.WS_VISIBLE, + (10, 30, 310, 44), + self, 1001) + self.pbar.SetRange(0, self.maxticks) + self.pbar.SetStep(self.tickincr) + self.progress = 0 + self.pincr = 5 + return rc + + def Close(self): + self.EndDialog(0) + + def SetMaxTicks(self, maxticks): + if self.pbar is not None: + self.pbar.SetRange(0, maxticks) + + def Tick(self): + if self.pbar is not None: + self.pbar.StepIt() + + def SetTitle(self, text): + self.SetWindowText(text) + + def SetText(self, text): + self.SetDlgItemText(1000, text) + + def Set(self, pos, max = None): + if self.pbar is not None: + self.pbar.SetPos(pos) + if max is not None: + self.pbar.SetRange(0, max) + +# a progress dialog created in a new thread - especially suitable for +# console apps with no message loop. +MYWM_SETTITLE = win32con.WM_USER+10 +MYWM_SETMSG = win32con.WM_USER+11 +MYWM_TICK = win32con.WM_USER+12 +MYWM_SETMAXTICKS = win32con.WM_USER+13 +MYWM_SET = win32con.WM_USER+14 + +class CThreadedStatusProcessDialog(CStatusProgressDialog): + def __init__(self, title, msg = "", maxticks = 100, tickincr = 1): + self.title = title + self.msg = msg + self.threadid = win32api.GetCurrentThreadId() + CStatusProgressDialog.__init__(self, title, msg, maxticks, tickincr) + + def OnInitDialog(self): + rc = CStatusProgressDialog.OnInitDialog(self) + self.HookMessage(self.OnTitle, MYWM_SETTITLE) + self.HookMessage(self.OnMsg, MYWM_SETMSG) + self.HookMessage(self.OnTick, MYWM_TICK) + self.HookMessage(self.OnMaxTicks, MYWM_SETMAXTICKS) + self.HookMessage(self.OnSet, MYWM_SET) + return rc + + def _Send(self, msg): + try: + self.PostMessage(msg) + except win32ui.error: + # the user closed the window - but this does not cancel the + # process - so just ignore it. + pass + + def OnTitle(self, msg): + CStatusProgressDialog.SetTitle(self, self.title) + + def OnMsg(self, msg): + CStatusProgressDialog.SetText(self, self.msg) + + def OnTick(self, msg): + CStatusProgressDialog.Tick(self) + + def OnMaxTicks(self, msg): + CStatusProgressDialog.SetMaxTicks(self, self.maxticks) + + def OnSet(self, msg): + CStatusProgressDialog.Set(self, self.pos, self.max) + + def Close(self): + assert self.threadid, "No thread!" + win32api.PostThreadMessage(self.threadid, win32con.WM_QUIT, 0, 0) + + def SetMaxTicks(self, maxticks): + self.maxticks = maxticks + self._Send(MYWM_SETMAXTICKS) + def SetTitle(self, title): + self.title = title + self._Send(MYWM_SETTITLE) + def SetText(self, text): + self.msg = text + self._Send(MYWM_SETMSG) + def Tick(self): + self._Send(MYWM_TICK) + def Set(self, pos, max = None): + self.pos = pos + self.max = max + self._Send(MYWM_SET) + +class ProgressThread(WinThread): + def __init__(self, title, msg = "", maxticks = 100, tickincr = 1): + self.title = title + self.msg = msg + self.maxticks = maxticks + self.tickincr = tickincr + self.dialog = None + WinThread.__init__(self) + self.createdEvent = threading.Event() + + def InitInstance(self): + self.dialog = CThreadedStatusProcessDialog( self.title, self.msg, self.maxticks, self.tickincr) + self.dialog.CreateWindow() + try: + self.dialog.SetForegroundWindow() + except win32ui.error: + pass + self.createdEvent.set() + return WinThread.InitInstance(self) + + def ExitInstance(self): + return 0 + + +def StatusProgressDialog(title, msg = "", maxticks = 100, parent = None): + d = CStatusProgressDialog (title, msg, maxticks) + d.CreateWindow (parent) + return d + +def ThreadedStatusProgressDialog(title, msg = "", maxticks = 100): + t = ProgressThread(title, msg, maxticks) + t.CreateThread() + # Need to run a basic "PumpWaitingMessages" loop just incase we are + # running inside Pythonwin. + # Basic timeout incase things go terribly wrong. Ideally we should use + # win32event.MsgWaitForMultipleObjects(), but we use a threading module + # event - so use a dumb strategy + end_time = time.time() + 10 + while time.time() < end_time: + if t.createdEvent.isSet(): + break + win32ui.PumpWaitingMessages() + time.sleep(0.1) + return t.dialog + +def demo(): + d = StatusProgressDialog("A Demo", "Doing something...") + import win32api + for i in range(100): + if i == 50: + d.SetText("Getting there...") + if i==90: + d.SetText("Nearly done...") + win32api.Sleep(20) + d.Tick() + d.Close() + +def thread_demo(): + d = ThreadedStatusProgressDialog("A threaded demo", "Doing something") + import win32api + for i in range(100): + if i == 50: + d.SetText("Getting there...") + if i==90: + d.SetText("Nearly done...") + win32api.Sleep(20) + d.Tick() + d.Close() + +if __name__=='__main__': + thread_demo() + #demo() diff --git a/venv/Lib/site-packages/pythonwin/pywin/docking/DockingBar.py b/venv/Lib/site-packages/pythonwin/pywin/docking/DockingBar.py new file mode 100644 index 00000000..cec8dc15 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/docking/DockingBar.py @@ -0,0 +1,541 @@ +# DockingBar.py + +# Ported directly (comments and all) from the samples at www.codeguru.com + +# WARNING: Use at your own risk, as this interface is highly likely to change. +# Currently we support only one child per DockingBar. Later we need to add +# support for multiple children. + +import win32api, win32con, win32ui +from pywin.mfc import afxres, window +import struct + +clrBtnHilight = win32api.GetSysColor(win32con.COLOR_BTNHILIGHT) +clrBtnShadow = win32api.GetSysColor(win32con.COLOR_BTNSHADOW) + +def CenterPoint(rect): + width = rect[2]-rect[0] + height = rect[3]-rect[1] + return rect[0] + width//2, rect[1] + height//2 + +def OffsetRect(rect, point): + (x, y) = point + return rect[0]+x, rect[1]+y, rect[2]+x, rect[3]+y + +def DeflateRect(rect, point): + (x, y) = point + return rect[0]+x, rect[1]+y, rect[2]-x, rect[3]-y + +def PtInRect(rect, pt): + return rect[0] <= pt[0] < rect[2] and rect[1] <= pt[1] < rect[3] + +class DockingBar(window.Wnd): + def __init__(self, obj=None): + if obj is None: + obj = win32ui.CreateControlBar() + window.Wnd.__init__(self, obj) + self.dialog = None + self.nDockBarID = 0 + self.sizeMin = 32, 32 + self.sizeHorz = 200, 200 + self.sizeVert = 200, 200 + self.sizeFloat = 200, 200 + self.bTracking = 0 + self.bInRecalcNC = 0 + self.cxEdge = 6 + self.cxBorder = 3 + self.cxGripper = 20 + self.brushBkgd = win32ui.CreateBrush() + self.brushBkgd.CreateSolidBrush(win32api.GetSysColor(win32con.COLOR_BTNFACE)) + + # Support for diagonal resizing + self.cyBorder = 3 + self.cCaptionSize = win32api.GetSystemMetrics(win32con.SM_CYSMCAPTION) + self.cMinWidth = win32api.GetSystemMetrics(win32con.SM_CXMIN) + self.cMinHeight = win32api.GetSystemMetrics(win32con.SM_CYMIN) + self.rectUndock = (0,0,0,0) + + def OnUpdateCmdUI(self, target, bDisableIfNoHndler): + return self.UpdateDialogControls(target, bDisableIfNoHndler) + + def CreateWindow(self, parent, childCreator, title, id, style=win32con.WS_CHILD | win32con.WS_VISIBLE | afxres.CBRS_LEFT, childCreatorArgs=()): + assert not ((style & afxres.CBRS_SIZE_FIXED) and (style & afxres.CBRS_SIZE_DYNAMIC)), "Invalid style" + self.rectClose = self.rectBorder = self.rectGripper = self.rectTracker = 0,0,0,0 + + # save the style + self._obj_.dwStyle = style & afxres.CBRS_ALL + + cursor = win32api.LoadCursor(0, win32con.IDC_ARROW) + wndClass = win32ui.RegisterWndClass(win32con.CS_DBLCLKS, cursor, self.brushBkgd.GetSafeHandle(), 0) + + self._obj_.CreateWindow(wndClass, title, style, (0,0,0,0), parent, id) + + # Create the child dialog + self.dialog = childCreator(*(self,) + childCreatorArgs) + + # use the dialog dimensions as default base dimensions + assert self.dialog.IsWindow(), "The childCreator function %s did not create a window!" % childCreator + rect = self.dialog.GetWindowRect() + self.sizeHorz = self.sizeVert = self.sizeFloat = rect[2]-rect[0], rect[3]-rect[1] + + self.sizeHorz = self.sizeHorz[0], self.sizeHorz[1] + self.cxEdge + self.cxBorder + self.sizeVert = self.sizeVert[0] + self.cxEdge + self.cxBorder, self.sizeVert[1] + self.HookMessages() + + def CalcFixedLayout(self, bStretch, bHorz): + rectTop = self.dockSite.GetControlBar(afxres.AFX_IDW_DOCKBAR_TOP).GetWindowRect() + rectLeft = self.dockSite.GetControlBar(afxres.AFX_IDW_DOCKBAR_LEFT).GetWindowRect() + if bStretch: + nHorzDockBarWidth = 32767 + nVertDockBarHeight = 32767 + else: + nHorzDockBarWidth = rectTop[2]-rectTop[0] + 4 + nVertDockBarHeight = rectLeft[3]-rectLeft[1] + 4 + + if self.IsFloating(): + return self.sizeFloat + if bHorz: + return nHorzDockBarWidth, self.sizeHorz[1] + return self.sizeVert[0], nVertDockBarHeight + + def CalcDynamicLayout(self, length, mode): + # Support for diagonal sizing. + if self.IsFloating(): + self.GetParent().GetParent().ModifyStyle(win32ui.MFS_4THICKFRAME, 0) + if mode & (win32ui.LM_HORZDOCK | win32ui.LM_VERTDOCK): + flags = win32con.SWP_NOSIZE | win32con.SWP_NOMOVE | win32con.SWP_NOZORDER |\ + win32con.SWP_NOACTIVATE | win32con.SWP_FRAMECHANGED + self.SetWindowPos(0, (0, 0, 0, 0,), flags) + self.dockSite.RecalcLayout() + return self._obj_.CalcDynamicLayout(length, mode) + + if mode & win32ui.LM_MRUWIDTH: + return self.sizeFloat + if mode & win32ui.LM_COMMIT: + self.sizeFloat = length, self.sizeFloat[1] + return self.sizeFloat + # More diagonal sizing. + if self.IsFloating(): + dc = self.dockContext + pt = win32api.GetCursorPos() + windowRect = self.GetParent().GetParent().GetWindowRect() + + hittest = dc.nHitTest + if hittest==win32con.HTTOPLEFT: + cx = max(windowRect[2] - pt[0], self.cMinWidth) - self.cxBorder + cy = max(windowRect[3] - self.cCaptionSize - pt[1],self.cMinHeight) - 1 + self.sizeFloat = cx, cy + + top = min(pt[1], windowRect[3] - self.cCaptionSize - self.cMinHeight) - self.cyBorder + left = min(pt[0], windowRect[2] - self.cMinWidth) - 1 + dc.rectFrameDragHorz = left, top, dc.rectFrameDragHorz[2], dc.rectFrameDragHorz[3] + return self.sizeFloat + if hittest==win32con.HTTOPRIGHT: + cx = max(pt[0] - windowRect[0], self.cMinWidth) + cy = max(windowRect[3] - self.cCaptionSize - pt[1], self.cMinHeight) - 1 + self.sizeFloat = cx, cy + + top = min(pt[1], windowRect[3] - self.cCaptionSize - self.cMinHeight) - self.cyBorder + dc.rectFrameDragHorz = dc.rectFrameDragHorz[0], top, dc.rectFrameDragHorz[2], dc.rectFrameDragHorz[3] + return self.sizeFloat + + if hittest==win32con.HTBOTTOMLEFT: + cx = max(windowRect[2] - pt[0], self.cMinWidth) - self.cxBorder + cy = max(pt[1] - windowRect[1] - self.cCaptionSize, self.cMinHeight) + self.sizeFloat = cx, cy + + left = min(pt[0], windowRect[2] -self.cMinWidth) - 1 + dc.rectFrameDragHorz = left, dc.rectFrameDragHorz[1], dc.rectFrameDragHorz[2], dc.rectFrameDragHorz[3] + return self.sizeFloat + + if hittest==win32con.HTBOTTOMRIGHT: + cx = max(pt[0] - windowRect[0], self.cMinWidth) + cy = max(pt[1] - windowRect[1] - self.cCaptionSize, self.cMinHeight) + self.sizeFloat = cx, cy + return self.sizeFloat + + if mode & win32ui.LM_LENGTHY: + self.sizeFloat = self.sizeFloat[0], max(self.sizeMin[1], length) + return self.sizeFloat + else: + return max(self.sizeMin[0], length), self.sizeFloat[1] + + def OnWindowPosChanged(self, msg): + if self.GetSafeHwnd()==0 or self.dialog is None: + return 0 + lparam = msg[3] + """ LPARAM used with WM_WINDOWPOSCHANGED: + typedef struct { + HWND hwnd; + HWND hwndInsertAfter; + int x; + int y; + int cx; + int cy; + UINT flags;} WINDOWPOS; + """ + format = "PPiiiii" + bytes = win32ui.GetBytes( lparam, struct.calcsize(format) ) + hwnd, hwndAfter, x, y, cx, cy, flags = struct.unpack(format, bytes) + + if self.bInRecalcNC: + rc = self.GetClientRect() + self.dialog.MoveWindow(rc) + return 0 + # Find on which side are we docked + nDockBarID = self.GetParent().GetDlgCtrlID() + # Return if dropped at same location + # no docking side change and no size change + if (nDockBarID == self.nDockBarID) and \ + (flags & win32con.SWP_NOSIZE) and \ + ((self._obj_.dwStyle & afxres.CBRS_BORDER_ANY) != afxres.CBRS_BORDER_ANY): + return + self.nDockBarID = nDockBarID + + # Force recalc the non-client area + self.bInRecalcNC = 1 + try: + swpflags = win32con.SWP_NOSIZE | win32con.SWP_NOMOVE | win32con.SWP_NOZORDER | win32con.SWP_FRAMECHANGED + self.SetWindowPos(0, (0,0,0,0), swpflags) + finally: + self.bInRecalcNC = 0 + return 0 + + # This is a virtual and not a message hook. + def OnSetCursor(self, window, nHitTest, wMouseMsg): + if nHitTest != win32con.HTSIZE or self.bTracking: + return self._obj_.OnSetCursor(window, nHitTest, wMouseMsg) + + if self.IsHorz(): + win32api.SetCursor(win32api.LoadCursor(0, win32con.IDC_SIZENS)) + else: + win32api.SetCursor(win32api.LoadCursor(0, win32con.IDC_SIZEWE)) + return 1 + + # Mouse Handling + def OnLButtonUp(self, msg): + if not self.bTracking: + return 1 # pass it on. + self.StopTracking(1) + return 0 # Dont pass on + + def OnLButtonDown(self, msg): + # UINT nFlags, CPoint point) + # only start dragging if clicked in "void" space + if self.dockBar is not None: + # start the drag + pt = msg[5] + pt = self.ClientToScreen(pt) + self.dockContext.StartDrag(pt) + return 0 + return 1 + + def OnNcLButtonDown(self, msg): + if self.bTracking: return 0 + nHitTest = wparam = msg[2] + pt = msg[5] + + if nHitTest==win32con.HTSYSMENU and not self.IsFloating(): + self.GetDockingFrame().ShowControlBar(self, 0, 0) + elif nHitTest == win32con.HTMINBUTTON and not self.IsFloating(): + self.dockContext.ToggleDocking() + elif nHitTest == win32con.HTCAPTION and not self.IsFloating() and self.dockBar is not None: + self.dockContext.StartDrag(pt) + elif nHitTest == win32con.HTSIZE and not self.IsFloating(): + self.StartTracking() + else: + return 1 + return 0 + + def OnLButtonDblClk(self, msg): + # only toggle docking if clicked in "void" space + if self.dockBar is not None: + # toggle docking + self.dockContext.ToggleDocking() + return 0 + return 1 + + def OnNcLButtonDblClk(self, msg): + nHitTest = wparam = msg[2] + # UINT nHitTest, CPoint point) + if self.dockBar is not None and nHitTest == win32con.HTCAPTION: + # toggle docking + self.dockContext.ToggleDocking() + return 0 + return 1 + + def OnMouseMove(self, msg): + flags = wparam = msg[2] + lparam = msg[3] + if self.IsFloating() or not self.bTracking: + return 1 + + # Convert unsigned 16 bit to signed 32 bit. + x=win32api.LOWORD(lparam) + if x & 32768: x = x | -65536 + y = win32api.HIWORD(lparam) + if y & 32768: y = y | -65536 + pt = x, y + cpt = CenterPoint(self.rectTracker) + pt = self.ClientToWnd(pt) + if self.IsHorz(): + if cpt[1] != pt[1]: + self.OnInvertTracker(self.rectTracker) + self.rectTracker = OffsetRect(self.rectTracker, (0, pt[1] - cpt[1])) + self.OnInvertTracker(self.rectTracker) + else: + if cpt[0] != pt[0]: + self.OnInvertTracker(self.rectTracker) + self.rectTracker = OffsetRect(self.rectTracker, (pt[0]-cpt[0], 0)) + self.OnInvertTracker(self.rectTracker) + + return 0 # Dont pass it on. + +# def OnBarStyleChange(self, old, new): + + def OnNcCalcSize(self, bCalcValid, size_info): + (rc0, rc1, rc2, pos) = size_info + self.rectBorder = self.GetWindowRect() + self.rectBorder = OffsetRect( self.rectBorder, (-self.rectBorder[0], -self.rectBorder[1]) ) + + dwBorderStyle = self._obj_.dwStyle | afxres.CBRS_BORDER_ANY + + if self.nDockBarID==afxres.AFX_IDW_DOCKBAR_TOP: + dwBorderStyle = dwBorderStyle & ~afxres.CBRS_BORDER_BOTTOM; + rc0.left = rc0.left + self.cxGripper + rc0.bottom = rc0.bottom-self.cxEdge + rc0.top = rc0.top + self.cxBorder + rc0.right = rc0.right - self.cxBorder + self.rectBorder = self.rectBorder[0], self.rectBorder[3]-self.cxEdge, self.rectBorder[2], self.rectBorder[3] + elif self.nDockBarID==afxres.AFX_IDW_DOCKBAR_BOTTOM: + dwBorderStyle = dwBorderStyle & ~afxres.CBRS_BORDER_TOP + rc0.left = rc0.left + self.cxGripper + rc0.top = rc0.top + self.cxEdge + rc0.bottom = rc0.bottom - self.cxBorder + rc0.right = rc0.right - self.cxBorder + self.rectBorder = self.rectBorder[0], self.rectBorder[1], self.rectBorder[2], self.rectBorder[1]+self.cxEdge + elif self.nDockBarID==afxres.AFX_IDW_DOCKBAR_LEFT: + dwBorderStyle = dwBorderStyle & ~afxres.CBRS_BORDER_RIGHT + rc0.right = rc0.right - self.cxEdge + rc0.left = rc0.left + self.cxBorder + rc0.bottom = rc0.bottom - self.cxBorder + rc0.top = rc0.top + self.cxGripper + self.rectBorder = self.rectBorder[2] - self.cxEdge, self.rectBorder[1], self.rectBorder[2], self.rectBorder[3] + elif self.nDockBarID==afxres.AFX_IDW_DOCKBAR_RIGHT: + dwBorderStyle = dwBorderStyle & ~afxres.CBRS_BORDER_LEFT + rc0.left = rc0.left + self.cxEdge + rc0.right = rc0.right - self.cxBorder + rc0.bottom = rc0.bottom - self.cxBorder + rc0.top = rc0.top + self.cxGripper + self.rectBorder = self.rectBorder[0], self.rectBorder[1], self.rectBorder[0]+self.cxEdge, self.rectBorder[3] + else: + self.rectBorder = 0,0,0,0 + + self.SetBarStyle(dwBorderStyle) + return 0 + + def OnNcPaint(self, msg): + self.EraseNonClient() + dc = self.GetWindowDC() + ctl = win32api.GetSysColor(win32con.COLOR_BTNHIGHLIGHT) + cbr = win32api.GetSysColor(win32con.COLOR_BTNSHADOW) + dc.Draw3dRect(self.rectBorder, ctl, cbr) + + self.DrawGripper(dc) + + rect = self.GetClientRect() + self.InvalidateRect( rect, 1) + return 0 + + def OnNcHitTest(self, pt): # A virtual, not a hooked message. + if self.IsFloating(): + return 1 + + ptOrig = pt + rect = self.GetWindowRect() + pt = pt[0] - rect[0], pt[1] - rect[1] + + if PtInRect(self.rectClose, pt): + return win32con.HTSYSMENU + elif PtInRect(self.rectUndock, pt): + return win32con.HTMINBUTTON + elif PtInRect(self.rectGripper, pt): + return win32con.HTCAPTION + elif PtInRect(self.rectBorder, pt): + return win32con.HTSIZE + else: + return self._obj_.OnNcHitTest(ptOrig) + + def StartTracking(self): + self.SetCapture() + + # make sure no updates are pending + self.RedrawWindow(None, None, win32con.RDW_ALLCHILDREN | win32con.RDW_UPDATENOW) + self.dockSite.LockWindowUpdate() + + self.ptOld = CenterPoint(self.rectBorder) + self.bTracking = 1 + + self.rectTracker = self.rectBorder; + if not self.IsHorz(): + l, t, r, b = self.rectTracker + b = b - 4 + self.rectTracker = l, t, r, b + + self.OnInvertTracker(self.rectTracker); + + def OnCaptureChanged(self, msg): + hwnd = lparam = msg[3] + if self.bTracking and hwnd != self.GetSafeHwnd(): + self.StopTracking(0) # cancel tracking + return 1 + + def StopTracking(self, bAccept): + self.OnInvertTracker(self.rectTracker) + self.dockSite.UnlockWindowUpdate() + self.bTracking = 0 + self.ReleaseCapture() + if not bAccept: return + + rcc = self.dockSite.GetWindowRect() + if self.IsHorz(): + newsize = self.sizeHorz[1] + maxsize = newsize + (rcc[3]-rcc[1]) + minsize = self.sizeMin[1] + else: + newsize = self.sizeVert[0] + maxsize = newsize + (rcc[2]-rcc[0]) + minsize = self.sizeMin[0] + + pt = CenterPoint(self.rectTracker) + if self.nDockBarID== afxres.AFX_IDW_DOCKBAR_TOP: + newsize = newsize + (pt[1] - self.ptOld[1]) + elif self.nDockBarID== afxres.AFX_IDW_DOCKBAR_BOTTOM: + newsize = newsize + (- pt[1] + self.ptOld[1]) + elif self.nDockBarID== afxres.AFX_IDW_DOCKBAR_LEFT: + newsize = newsize + (pt[0] - self.ptOld[0]) + elif self.nDockBarID== afxres.AFX_IDW_DOCKBAR_RIGHT: + newsize = newsize + (- pt[0] + self.ptOld[0]) + newsize = max(minsize, min(maxsize, newsize)) + if self.IsHorz(): + self.sizeHorz = self.sizeHorz[0], newsize + else: + self.sizeVert = newsize, self.sizeVert[1] + self.dockSite.RecalcLayout() + return 0 + + def OnInvertTracker(self, rect): + assert rect[2]-rect[0]>0 and rect[3]-rect[1]>0, "rect is empty" + assert self.bTracking + rcc = self.GetWindowRect() + rcf = self.dockSite.GetWindowRect() + + rect = OffsetRect(rect, (rcc[0] - rcf[0], rcc[1] - rcf[1])) + rect = DeflateRect(rect, (1, 1)); + + flags = win32con.DCX_WINDOW|win32con.DCX_CACHE|win32con.DCX_LOCKWINDOWUPDATE + dc = self.dockSite.GetDCEx(None, flags) + try: + brush = win32ui.GetHalftoneBrush() + oldBrush = dc.SelectObject(brush) + + dc.PatBlt((rect[0], rect[1]), (rect[2]-rect[0], rect[3]-rect[1]), win32con.PATINVERT) + dc.SelectObject(oldBrush) + finally: + self.dockSite.ReleaseDC(dc) + + def IsHorz(self): + return self.nDockBarID == afxres.AFX_IDW_DOCKBAR_TOP or \ + self.nDockBarID == afxres.AFX_IDW_DOCKBAR_BOTTOM + + def ClientToWnd(self, pt): + x, y=pt + if self.nDockBarID == afxres.AFX_IDW_DOCKBAR_BOTTOM: + y = y + self.cxEdge + elif self.nDockBarID == afxres.AFX_IDW_DOCKBAR_RIGHT: + x = x + self.cxEdge + return x,y + + def DrawGripper(self, dc): + # no gripper if floating + if self._obj_.dwStyle & afxres.CBRS_FLOATING: + return + + # -==HACK==- + # in order to calculate the client area properly after docking, + # the client area must be recalculated twice (I have no idea why) + self.dockSite.RecalcLayout() + # -==END HACK==- + + gripper = self.GetWindowRect() + gripper = self.ScreenToClient( gripper ) + gripper = OffsetRect( gripper, (-gripper[0], -gripper[1]) ) + gl, gt, gr, gb = gripper + + if self._obj_.dwStyle & afxres.CBRS_ORIENT_HORZ: + # gripper at left + self.rectGripper = gl, gt + 40, gl+20, gb + # draw close box + self.rectClose = gl+7, gt + 10, gl+19, gt+22 + dc.DrawFrameControl(self.rectClose, win32con.DFC_CAPTION, win32con.DFCS_CAPTIONCLOSE) + # draw docking toggle box + self.rectUndock = OffsetRect(self.rectClose, (0,13)) + dc.DrawFrameControl(self.rectUndock, win32con.DFC_CAPTION, win32con.DFCS_CAPTIONMAX); + + gt = gt + 38 + gb = gb - 10 + gl = gl + 10 + gr = gl + 3 + gripper = gl, gt, gr, gb + dc.Draw3dRect( gripper, clrBtnHilight, clrBtnShadow ) + dc.Draw3dRect( OffsetRect(gripper, (4,0)), clrBtnHilight, clrBtnShadow ) + else: + # gripper at top + self.rectGripper = gl, gt, gr-40, gt+20 + # draw close box + self.rectClose = gr-21, gt+7, gr-10, gt+18 + dc.DrawFrameControl(self.rectClose, win32con.DFC_CAPTION, win32con.DFCS_CAPTIONCLOSE) + # draw docking toggle box + self.rectUndock = OffsetRect( self.rectClose, (-13,0) ) + dc.DrawFrameControl(self.rectUndock, win32con.DFC_CAPTION, win32con.DFCS_CAPTIONMAX) + gr = gr - 38; + gl = gl + 10 + gt = gt + 10 + gb = gt + 3 + + gripper = gl, gt, gr, gb + dc.Draw3dRect( gripper, clrBtnHilight, clrBtnShadow ) + dc.Draw3dRect( OffsetRect(gripper, (0,4) ), clrBtnHilight, clrBtnShadow ) + + def HookMessages(self): + self.HookMessage(self.OnLButtonUp, win32con.WM_LBUTTONUP) + self.HookMessage(self.OnLButtonDown, win32con.WM_LBUTTONDOWN) + self.HookMessage(self.OnLButtonDblClk, win32con.WM_LBUTTONDBLCLK) + self.HookMessage(self.OnNcLButtonDown, win32con.WM_NCLBUTTONDOWN) + self.HookMessage(self.OnNcLButtonDblClk, win32con.WM_NCLBUTTONDBLCLK) + self.HookMessage(self.OnMouseMove, win32con.WM_MOUSEMOVE) + self.HookMessage(self.OnNcPaint, win32con.WM_NCPAINT) + self.HookMessage(self.OnCaptureChanged, win32con.WM_CAPTURECHANGED) + self.HookMessage(self.OnWindowPosChanged, win32con.WM_WINDOWPOSCHANGED) +# self.HookMessage(self.OnSize, win32con.WM_SIZE) + +def EditCreator(parent): + d = win32ui.CreateEdit() + es = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_BORDER | win32con.ES_MULTILINE | win32con.ES_WANTRETURN + d.CreateWindow( es, (0,0,150,150), parent, 1000) + return d + +def test(): + import pywin.mfc.dialog + global bar + bar = DockingBar() + creator = EditCreator + bar.CreateWindow(win32ui.GetMainFrame(), creator, "Coolbar Demo",0xfffff) +# win32ui.GetMainFrame().ShowControlBar(bar, 1, 0) + bar.SetBarStyle( bar.GetBarStyle()|afxres.CBRS_TOOLTIPS|afxres.CBRS_FLYBY|afxres.CBRS_SIZE_DYNAMIC) + bar.EnableDocking(afxres.CBRS_ALIGN_ANY) + win32ui.GetMainFrame().DockControlBar(bar, afxres.AFX_IDW_DOCKBAR_BOTTOM) + + +if __name__=='__main__': + test() diff --git a/venv/Lib/site-packages/pythonwin/pywin/docking/__init__.py b/venv/Lib/site-packages/pythonwin/pywin/docking/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/pythonwin/pywin/docking/__pycache__/DockingBar.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/docking/__pycache__/DockingBar.cpython-36.pyc new file mode 100644 index 00000000..4c3205cc Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/docking/__pycache__/DockingBar.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/docking/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/docking/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..4a69fd58 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/docking/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/__init__.py b/venv/Lib/site-packages/pythonwin/pywin/framework/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..5e541255 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/app.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/app.cpython-36.pyc new file mode 100644 index 00000000..5e78d432 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/app.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/bitmap.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/bitmap.cpython-36.pyc new file mode 100644 index 00000000..a337f906 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/bitmap.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/cmdline.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/cmdline.cpython-36.pyc new file mode 100644 index 00000000..0bf8fa2a Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/cmdline.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/dbgcommands.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/dbgcommands.cpython-36.pyc new file mode 100644 index 00000000..20c94413 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/dbgcommands.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/dlgappcore.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/dlgappcore.cpython-36.pyc new file mode 100644 index 00000000..23cfd1a9 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/dlgappcore.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/help.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/help.cpython-36.pyc new file mode 100644 index 00000000..84445cb9 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/help.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/interact.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/interact.cpython-36.pyc new file mode 100644 index 00000000..3ca7a4f7 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/interact.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/intpyapp.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/intpyapp.cpython-36.pyc new file mode 100644 index 00000000..ed734e2d Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/intpyapp.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/intpydde.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/intpydde.cpython-36.pyc new file mode 100644 index 00000000..ebff9169 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/intpydde.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/mdi_pychecker.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/mdi_pychecker.cpython-36.pyc new file mode 100644 index 00000000..e622d5c7 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/mdi_pychecker.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/scriptutils.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/scriptutils.cpython-36.pyc new file mode 100644 index 00000000..d02c53f0 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/scriptutils.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/sgrepmdi.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/sgrepmdi.cpython-36.pyc new file mode 100644 index 00000000..da1e3056 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/sgrepmdi.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/startup.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/startup.cpython-36.pyc new file mode 100644 index 00000000..83607f61 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/startup.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/stdin.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/stdin.cpython-36.pyc new file mode 100644 index 00000000..3265e0c7 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/stdin.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/toolmenu.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/toolmenu.cpython-36.pyc new file mode 100644 index 00000000..e73e8e3c Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/toolmenu.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/window.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/window.cpython-36.pyc new file mode 100644 index 00000000..d678ae53 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/window.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/winout.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/winout.cpython-36.pyc new file mode 100644 index 00000000..6ae3eee8 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/framework/__pycache__/winout.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/app.py b/venv/Lib/site-packages/pythonwin/pywin/framework/app.py new file mode 100644 index 00000000..f2e141fc --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/framework/app.py @@ -0,0 +1,408 @@ +# App.py +# Application stuff. +# The application is responsible for managing the main frame window. +# +# We also grab the FileOpen command, to invoke our Python editor +" The PythonWin application code. Manages most aspects of MDI, etc " +import win32con +import win32api +import win32ui +import sys +import string +import os +from pywin.mfc import window, dialog, afxres +from pywin.mfc.thread import WinApp +import traceback +import regutil + +from . import scriptutils + +## NOTE: App and AppBuild should NOT be used - instead, you should contruct your +## APP class manually whenever you like (just ensure you leave these 2 params None!) +## Whoever wants the generic "Application" should get it via win32iu.GetApp() + +# These are "legacy" +AppBuilder = None +App = None # default - if used, must end up a CApp derived class. + +# Helpers that should one day be removed! +def AddIdleHandler(handler): + print("app.AddIdleHandler is deprecated - please use win32ui.GetApp().AddIdleHandler() instead.") + return win32ui.GetApp().AddIdleHandler(handler) +def DeleteIdleHandler(handler): + print("app.DeleteIdleHandler is deprecated - please use win32ui.GetApp().DeleteIdleHandler() instead.") + return win32ui.GetApp().DeleteIdleHandler(handler) + +# Helper for writing a Window position by name, and later loading it. +def SaveWindowSize(section,rect,state=""): + """ Writes a rectangle to an INI file + Args: section = section name in the applications INI file + rect = a rectangle in a (cy, cx, y, x) tuple + (same format as CREATESTRUCT position tuples).""" + left, top, right, bottom = rect + if state: state = state + " " + win32ui.WriteProfileVal(section,state+"left",left) + win32ui.WriteProfileVal(section,state+"top",top) + win32ui.WriteProfileVal(section,state+"right",right) + win32ui.WriteProfileVal(section,state+"bottom",bottom) + +def LoadWindowSize(section, state=""): + """ Loads a section from an INI file, and returns a rect in a tuple (see SaveWindowSize)""" + if state: state = state + " " + left = win32ui.GetProfileVal(section,state+"left",0) + top = win32ui.GetProfileVal(section,state+"top",0) + right = win32ui.GetProfileVal(section,state+"right",0) + bottom = win32ui.GetProfileVal(section,state+"bottom",0) + return (left, top, right, bottom) + +def RectToCreateStructRect(rect): + return (rect[3]-rect[1], rect[2]-rect[0], rect[1], rect[0] ) + + +# Define FrameWindow and Application objects +# +# The Main Frame of the application. +class MainFrame(window.MDIFrameWnd): + sectionPos = "Main Window" + statusBarIndicators = ( afxres.ID_SEPARATOR, #// status line indicator + afxres.ID_INDICATOR_CAPS, + afxres.ID_INDICATOR_NUM, + afxres.ID_INDICATOR_SCRL, + win32ui.ID_INDICATOR_LINENUM, + win32ui.ID_INDICATOR_COLNUM ) + + def OnCreate(self, cs): + self._CreateStatusBar() + return 0 + + def _CreateStatusBar(self): + self.statusBar = win32ui.CreateStatusBar(self) + self.statusBar.SetIndicators(self.statusBarIndicators) + self.HookCommandUpdate(self.OnUpdatePosIndicator, win32ui.ID_INDICATOR_LINENUM) + self.HookCommandUpdate(self.OnUpdatePosIndicator, win32ui.ID_INDICATOR_COLNUM) + + def OnUpdatePosIndicator(self, cmdui): + editControl = scriptutils.GetActiveEditControl() + value = " " * 5 + if editControl is not None: + try: + startChar, endChar = editControl.GetSel() + lineNo = editControl.LineFromChar(startChar) + colNo = endChar - editControl.LineIndex(lineNo) + + if cmdui.m_nID==win32ui.ID_INDICATOR_LINENUM: + value = "%0*d" % (5, lineNo + 1) + else: + value = "%0*d" % (3, colNo + 1) + except win32ui.error: + pass + cmdui.SetText(value) + cmdui.Enable() + + def PreCreateWindow(self, cc): + cc = self._obj_.PreCreateWindow(cc) + pos = LoadWindowSize(self.sectionPos) + self.startRect = pos + if pos[2] - pos[0]: + rect = RectToCreateStructRect(pos) + cc = cc[0], cc[1], cc[2], cc[3], rect, cc[5], cc[6], cc[7], cc[8] + return cc + + def OnDestroy(self, msg): + # use GetWindowPlacement(), as it works even when min'd or max'd + rectNow = self.GetWindowPlacement()[4] + if rectNow != self.startRect: + SaveWindowSize(self.sectionPos, rectNow) + return 0 + +class CApp(WinApp): + " A class for the application " + def __init__(self): + self.oldCallbackCaller = None + WinApp.__init__(self, win32ui.GetApp() ) + self.idleHandlers = [] + + def InitInstance(self): + " Called to crank up the app " + HookInput() + numMRU = win32ui.GetProfileVal("Settings","Recent File List Size", 10) + win32ui.LoadStdProfileSettings(numMRU) +# self._obj_.InitMDIInstance() + if win32api.GetVersionEx()[0]<4: + win32ui.SetDialogBkColor() + win32ui.Enable3dControls() + + # install a "callback caller" - a manager for the callbacks +# self.oldCallbackCaller = win32ui.InstallCallbackCaller(self.CallbackManager) + self.LoadMainFrame() + self.SetApplicationPaths() + + def ExitInstance(self): + " Called as the app dies - too late to prevent it here! " + win32ui.OutputDebug("Application shutdown\n") + # Restore the callback manager, if any. + try: + win32ui.InstallCallbackCaller(self.oldCallbackCaller) + except AttributeError: + pass + if self.oldCallbackCaller: + del self.oldCallbackCaller + self.frame=None # clean Python references to the now destroyed window object. + self.idleHandlers = [] + # Attempt cleanup if not already done! + if self._obj_: self._obj_.AttachObject(None) + self._obj_ = None + global App + global AppBuilder + App = None + AppBuilder = None + return 0 + + def HaveIdleHandler(self, handler): + return handler in self.idleHandlers + def AddIdleHandler(self, handler): + self.idleHandlers.append(handler) + def DeleteIdleHandler(self, handler): + self.idleHandlers.remove(handler) + def OnIdle(self, count): + try: + ret = 0 + handlers = self.idleHandlers[:] # copy list, as may be modified during loop + for handler in handlers: + try: + thisRet = handler(handler, count) + except: + print("Idle handler %s failed" % (repr(handler))) + traceback.print_exc() + print("Idle handler removed from list") + try: + self.DeleteIdleHandler(handler) + except ValueError: # Item not in list. + pass + thisRet = 0 + ret = ret or thisRet + return ret + except KeyboardInterrupt: + pass + def CreateMainFrame(self): + return MainFrame() + + def LoadMainFrame(self): + " Create the main applications frame " + self.frame = self.CreateMainFrame() + self.SetMainFrame(self.frame) + self.frame.LoadFrame(win32ui.IDR_MAINFRAME, win32con.WS_OVERLAPPEDWINDOW) + self.frame.DragAcceptFiles() # we can accept these. + self.frame.ShowWindow(win32ui.GetInitialStateRequest()) + self.frame.UpdateWindow() + self.HookCommands() + + def OnHelp(self,id, code): + try: + if id==win32ui.ID_HELP_GUI_REF: + helpFile = regutil.GetRegisteredHelpFile("Pythonwin Reference") + helpCmd = win32con.HELP_CONTENTS + else: + helpFile = regutil.GetRegisteredHelpFile("Main Python Documentation") + helpCmd = win32con.HELP_FINDER + if helpFile is None: + win32ui.MessageBox("The help file is not registered!") + else: + from . import help + help.OpenHelpFile(helpFile, helpCmd) + except: + t, v, tb = sys.exc_info() + win32ui.MessageBox("Internal error in help file processing\r\n%s: %s" % (t,v)) + tb = None # Prevent a cycle + + def DoLoadModules(self, modules): + # XXX - this should go, but the debugger uses it :-( + # dont do much checking! + for module in modules: + __import__(module) + + def HookCommands(self): + self.frame.HookMessage(self.OnDropFiles,win32con.WM_DROPFILES) + self.HookCommand(self.HandleOnFileOpen,win32ui.ID_FILE_OPEN) + self.HookCommand(self.HandleOnFileNew,win32ui.ID_FILE_NEW) + self.HookCommand(self.OnFileMRU,win32ui.ID_FILE_MRU_FILE1) + self.HookCommand(self.OnHelpAbout,win32ui.ID_APP_ABOUT) + self.HookCommand(self.OnHelp, win32ui.ID_HELP_PYTHON) + self.HookCommand(self.OnHelp, win32ui.ID_HELP_GUI_REF) + # Hook for the right-click menu. + self.frame.GetWindow(win32con.GW_CHILD).HookMessage(self.OnRClick,win32con.WM_RBUTTONDOWN) + + def SetApplicationPaths(self): + # Load the users/application paths + new_path = [] + apppath=win32ui.GetProfileVal('Python','Application Path','').split(';') + for path in apppath: + if len(path)>0: + new_path.append(win32ui.FullPath(path)) + for extra_num in range(1,11): + apppath=win32ui.GetProfileVal('Python','Application Path %d'%extra_num,'').split(';') + if len(apppath) == 0: + break + for path in apppath: + if len(path)>0: + new_path.append(win32ui.FullPath(path)) + sys.path = new_path + sys.path + + def OnRClick(self,params): + " Handle right click message " + # put up the entire FILE menu! + menu = win32ui.LoadMenu(win32ui.IDR_TEXTTYPE).GetSubMenu(0) + menu.TrackPopupMenu(params[5]) # track at mouse position. + return 0 + + def OnDropFiles(self,msg): + " Handle a file being dropped from file manager " + hDropInfo = msg[2] + self.frame.SetActiveWindow() # active us + nFiles = win32api.DragQueryFile(hDropInfo) + try: + for iFile in range(0,nFiles): + fileName = win32api.DragQueryFile(hDropInfo, iFile) + win32ui.GetApp().OpenDocumentFile( fileName ) + finally: + win32api.DragFinish(hDropInfo); + + return 0 + +# No longer used by Pythonwin, as the C++ code has this same basic functionality +# but handles errors slightly better. +# It all still works, tho, so if you need similar functionality, you can use it. +# Therefore I havent deleted this code completely! +# def CallbackManager( self, ob, args = () ): +# """Manage win32 callbacks. Trap exceptions, report on them, then return 'All OK' +# to the frame-work. """ +# import traceback +# try: +# ret = apply(ob, args) +# return ret +# except: +# # take copies of the exception values, else other (handled) exceptions may get +# # copied over by the other fns called. +# win32ui.SetStatusText('An exception occured in a windows command handler.') +# t, v, tb = sys.exc_info() +# traceback.print_exception(t, v, tb.tb_next) +# try: +# sys.stdout.flush() +# except (NameError, AttributeError): +# pass + + # Command handlers. + def OnFileMRU( self, id, code ): + " Called when a File 1-n message is recieved " + fileName = win32ui.GetRecentFileList()[id - win32ui.ID_FILE_MRU_FILE1] + win32ui.GetApp().OpenDocumentFile(fileName) + + def HandleOnFileOpen( self, id, code ): + " Called when FileOpen message is received " + win32ui.GetApp().OnFileOpen() + + def HandleOnFileNew( self, id, code ): + " Called when FileNew message is received " + win32ui.GetApp().OnFileNew() + + def OnHelpAbout( self, id, code ): + " Called when HelpAbout message is received. Displays the About dialog. " + win32ui.InitRichEdit() + dlg=AboutBox() + dlg.DoModal() + +def _GetRegistryValue(key, val, default = None): + # val is registry value - None for default val. + try: + hkey = win32api.RegOpenKey(win32con.HKEY_CURRENT_USER, key) + return win32api.RegQueryValueEx(hkey, val)[0] + except win32api.error: + try: + hkey = win32api.RegOpenKey(win32con.HKEY_LOCAL_MACHINE, key) + return win32api.RegQueryValueEx(hkey, val)[0] + except win32api.error: + return default + +scintilla = "Scintilla is Copyright 1998-2008 Neil Hodgson (http://www.scintilla.org)" +idle = "This program uses IDLE extensions by Guido van Rossum, Tim Peters and others." +contributors = "Thanks to the following people for making significant contributions: Roger Upole, Sidnei da Silva, Sam Rushing, Curt Hagenlocher, Dave Brennan, Roger Burnham, Gordon McMillan, Neil Hodgson, Laramie Leavitt. (let me know if I have forgotten you!)" +# The About Box +class AboutBox(dialog.Dialog): + def __init__(self, idd=win32ui.IDD_ABOUTBOX): + dialog.Dialog.__init__(self, idd) + def OnInitDialog(self): + text = "Pythonwin - Python IDE and GUI Framework for Windows.\n\n%s\n\nPython is %s\n\n%s\n\n%s\n\n%s" % (win32ui.copyright, sys.copyright, scintilla, idle, contributors) + self.SetDlgItemText(win32ui.IDC_EDIT1, text) + # Get the build number - written by installers. + # For distutils build, read pywin32.version.txt + import distutils.sysconfig + site_packages = distutils.sysconfig.get_python_lib(plat_specific=1) + try: + build_no = open(os.path.join(site_packages, "pywin32.version.txt")).read().strip() + ver = "pywin32 build %s" % build_no + except EnvironmentError: + ver = None + if ver is None: + # See if we are Part of Active Python + ver = _GetRegistryValue("SOFTWARE\\ActiveState\\ActivePython", "CurrentVersion") + if ver is not None: + ver = "ActivePython build %s" % (ver,) + if ver is None: + ver = "" + self.SetDlgItemText(win32ui.IDC_ABOUT_VERSION, ver) + self.HookCommand(self.OnButHomePage, win32ui.IDC_BUTTON1) + + def OnButHomePage(self, id, code): + if code == win32con.BN_CLICKED: + win32api.ShellExecute(0, "open", "https://github.com/mhammond/pywin32", None, "", 1) + +def Win32RawInput(prompt=None): + "Provide raw_input() for gui apps" + # flush stderr/out first. + try: + sys.stdout.flush() + sys.stderr.flush() + except: + pass + if prompt is None: prompt = "" + ret=dialog.GetSimpleInput(prompt) + if ret==None: + raise KeyboardInterrupt("operation cancelled") + return ret + +def Win32Input(prompt=None): + "Provide input() for gui apps" + return eval(input(prompt)) + +def HookInput(): + try: + raw_input + # must be py2x... + sys.modules['__builtin__'].raw_input=Win32RawInput + sys.modules['__builtin__'].input=Win32Input + except NameError: + # must be py3k + import code + sys.modules['builtins'].input=Win32RawInput + +def HaveGoodGUI(): + """Returns true if we currently have a good gui available. + """ + return "pywin.framework.startup" in sys.modules + +def CreateDefaultGUI( appClass = None): + """Creates a default GUI environment + """ + if appClass is None: + from . import intpyapp # Bring in the default app - could be param'd later. + appClass = intpyapp.InteractivePythonApp + # Create and init the app. + appClass().InitInstance() + +def CheckCreateDefaultGUI(): + """Checks and creates if necessary a default GUI environment. + """ + rc = HaveGoodGUI() + if not rc: + CreateDefaultGUI() + return rc diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/bitmap.py b/venv/Lib/site-packages/pythonwin/pywin/framework/bitmap.py new file mode 100644 index 00000000..eb8bea98 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/framework/bitmap.py @@ -0,0 +1,143 @@ +import win32ui +import win32con +import win32api +import string +import os +from . import app +import sys + +from pywin.mfc import docview, window + +bStretch = 1 + +class BitmapDocument(docview.Document): + "A bitmap document. Holds the bitmap data itself." + def __init__(self, template): + docview.Document.__init__(self, template) + self.bitmap=None + def OnNewDocument(self): + # I can not create new bitmaps. + win32ui.MessageBox("Bitmaps can not be created.") + def OnOpenDocument(self, filename): + self.bitmap=win32ui.CreateBitmap() + # init data members + f = open(filename, 'rb') + try: + try: + self.bitmap.LoadBitmapFile(f) + except IOError: + win32ui.MessageBox("Could not load the bitmap from %s" % filename) + return 0 + finally: + f.close() + self.size = self.bitmap.GetSize() + return 1 + def DeleteContents(self): + self.bitmap=None + +class BitmapView(docview.ScrollView): + "A view of a bitmap. Obtains data from document." + def __init__(self, doc): + docview.ScrollView.__init__(self, doc) + self.width = self.height = 0 + # set up message handlers + self.HookMessage (self.OnSize, win32con.WM_SIZE) + + def OnInitialUpdate(self): + doc = self.GetDocument() + if doc.bitmap: + bitmapSize = doc.bitmap.GetSize() + self.SetScrollSizes(win32con.MM_TEXT, bitmapSize) + + def OnSize (self, params): + lParam = params[3] + self.width = win32api.LOWORD(lParam) + self.height = win32api.HIWORD(lParam) + + def OnDraw (self, dc): + # set sizes used for "non stretch" mode. + doc = self.GetDocument() + if doc.bitmap is None: return + bitmapSize = doc.bitmap.GetSize() + if bStretch: + # stretch BMP. + viewRect = (0,0,self.width, self.height) + bitmapRect = (0,0,bitmapSize[0], bitmapSize[1]) + doc.bitmap.Paint(dc, viewRect, bitmapRect) + else: + # non stretch. + doc.bitmap.Paint(dc) + +class BitmapFrame(window.MDIChildWnd): + def OnCreateClient( self, createparams, context ): + borderX = win32api.GetSystemMetrics(win32con.SM_CXFRAME) + borderY = win32api.GetSystemMetrics(win32con.SM_CYFRAME) + titleY = win32api.GetSystemMetrics(win32con.SM_CYCAPTION) # includes border + # try and maintain default window pos, else adjust if cant fit + # get the main client window dimensions. + mdiClient = win32ui.GetMainFrame().GetWindow(win32con.GW_CHILD) + clientWindowRect=mdiClient.ScreenToClient(mdiClient.GetWindowRect()) + clientWindowSize=(clientWindowRect[2]-clientWindowRect[0],clientWindowRect[3]-clientWindowRect[1]) + left, top, right, bottom=mdiClient.ScreenToClient(self.GetWindowRect()) +# width, height=context.doc.size[0], context.doc.size[1] +# width = width+borderX*2 +# height= height+titleY+borderY*2-1 +# if (left+width)>clientWindowSize[0]: +# left = clientWindowSize[0] - width +# if left<0: +# left = 0 +# width = clientWindowSize[0] +# if (top+height)>clientWindowSize[1]: +# top = clientWindowSize[1] - height +# if top<0: +# top = 0 +# height = clientWindowSize[1] +# self.frame.MoveWindow((left, top, left+width, top+height),0) + window.MDIChildWnd.OnCreateClient(self, createparams, context) + return 1 + + +class BitmapTemplate(docview.DocTemplate): + def __init__(self): + docview.DocTemplate.__init__(self, win32ui.IDR_PYTHONTYPE, BitmapDocument, BitmapFrame, BitmapView) + def MatchDocType(self, fileName, fileType): + doc = self.FindOpenDocument(fileName) + if doc: return doc + ext = os.path.splitext(fileName)[1].lower() + if ext =='.bmp': # removed due to PIL! or ext=='.ppm': + return win32ui.CDocTemplate_Confidence_yesAttemptNative + return win32ui.CDocTemplate_Confidence_maybeAttemptForeign +# return win32ui.CDocTemplate_Confidence_noAttempt + +# For debugging purposes, when this module may be reloaded many times. +try: + win32ui.GetApp().RemoveDocTemplate(bitmapTemplate) +except NameError: + pass + +bitmapTemplate = BitmapTemplate() +bitmapTemplate.SetDocStrings('\nBitmap\nBitmap\nBitmap (*.bmp)\n.bmp\nPythonBitmapFileType\nPython Bitmap File') +win32ui.GetApp().AddDocTemplate(bitmapTemplate) + +# This works, but just didnt make it through the code reorg. +#class PPMBitmap(Bitmap): +# def LoadBitmapFile(self, file ): +# magic=file.readline() +# if magic <> "P6\n": +# raise TypeError, "The file is not a PPM format file" +# rowcollist=string.split(file.readline()) +# cols=string.atoi(rowcollist[0]) +# rows=string.atoi(rowcollist[1]) +# file.readline() # whats this one? +# self.bitmap.LoadPPMFile(file,(cols,rows)) + + +def t(): + bitmapTemplate.OpenDocumentFile('d:\\winnt\\arcade.bmp') + #OpenBMPFile( 'd:\\winnt\\arcade.bmp') + +def demo(): + import glob + winDir=win32api.GetWindowsDirectory() + for fileName in glob.glob1(winDir, '*.bmp')[:2]: + bitmapTemplate.OpenDocumentFile(os.path.join(winDir, fileName)) diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/cmdline.py b/venv/Lib/site-packages/pythonwin/pywin/framework/cmdline.py new file mode 100644 index 00000000..01f48f0d --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/framework/cmdline.py @@ -0,0 +1,49 @@ +# cmdline - command line utilities. +import sys +import win32ui +import string + +def ParseArgs( str ): + import string + ret=[] + pos = 0 + length=len(str) + while pos=length: + break + if str[pos]=='"': + pos=pos+1 + try: + endPos = str.index('"', pos)-1 + nextPos = endPos+2 + except ValueError: + endPos=length + nextPos=endPos+1 + else: + endPos = pos + while endPos> 1 + top = (bottom - win32api.GetSystemMetrics(win32con.SM_CYICON)) >> 1 + hIcon = win32ui.GetApp().LoadIcon(self.iconId) + self.GetDC().DrawIcon((left, top), hIcon) + + # Only needed to provide a minimized icon (and this seems + # less important under win95/NT4 + def OnEraseBkgnd(self, dc): + if self.IsIconic(): + return 1 + else: + return self._obj_.OnEraseBkgnd(dc) + def OnQueryDragIcon(self): + return win32ui.GetApp().LoadIcon(self.iconId) + + def PreDoModal(self): + pass + + +class DialogApp(app.CApp): + "An application class, for an app with main dialog box" + def InitInstance(self): +# win32ui.SetProfileFileName('dlgapp.ini') + win32ui.LoadStdProfileSettings() + win32ui.EnableControlContainer() + win32ui.Enable3dControls() + self.dlg = self.frame = self.CreateDialog() + + if self.frame is None: + raise error("No dialog was created by CreateDialog()") + return + + self._obj_.InitDlgInstance(self.dlg) + self.PreDoModal() + self.dlg.PreDoModal() + self.dlg.DoModal() + + def CreateDialog(self): + pass + def PreDoModal(self): + pass + diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/editor/ModuleBrowser.py b/venv/Lib/site-packages/pythonwin/pywin/framework/editor/ModuleBrowser.py new file mode 100644 index 00000000..93abf107 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/framework/editor/ModuleBrowser.py @@ -0,0 +1,200 @@ +# ModuleBrowser.py - A view that provides a module browser for an editor document. +import pywin.mfc.docview +import win32ui +import win32con +import commctrl +import win32api +from pywin.tools import hierlist, browser +import pywin.framework.scriptutils +import afxres + +import pyclbr + +class HierListCLBRModule(hierlist.HierListItem): + def __init__(self, modName, clbrdata): + self.modName = modName + self.clbrdata = clbrdata + def GetText(self): + return self.modName + def GetSubList(self): + ret = [] + for item in self.clbrdata.values(): + if item.__class__ != pyclbr.Class: # ie, it is a pyclbr Function instance (only introduced post 1.5.2) + ret.append(HierListCLBRFunction( item ) ) + else: + ret.append(HierListCLBRClass( item) ) + ret.sort() + return ret + def IsExpandable(self): + return 1 + +class HierListCLBRItem(hierlist.HierListItem): + def __init__(self, name, file, lineno, suffix = ""): + self.name = str(name) + self.file = file + self.lineno = lineno + self.suffix = suffix + def __lt__(self, other): + return self.name < other.name + def __eq__(self, other): + return self.name == other.name + def GetText(self): + return self.name + self.suffix + def TakeDefaultAction(self): + if self.file: + pywin.framework.scriptutils.JumpToDocument(self.file, self.lineno, bScrollToTop = 1) + else: + win32ui.SetStatusText("Can not locate the source code for this object.") + def PerformItemSelected(self): + if self.file is None: + msg = "%s - source can not be located." % (self.name, ) + else: + msg = "%s defined at line %d of %s" % (self.name, self.lineno, self.file) + win32ui.SetStatusText(msg) + +class HierListCLBRClass(HierListCLBRItem): + def __init__(self, clbrclass, suffix = ""): + try: + name = clbrclass.name + file = clbrclass.file + lineno = clbrclass.lineno + self.super = clbrclass.super + self.methods = clbrclass.methods + except AttributeError: + name = clbrclass + file = lineno = None + self.super = []; self.methods = {} + HierListCLBRItem.__init__(self, name, file, lineno, suffix) + def GetSubList(self): + r1 = [] + for c in self.super: + r1.append(HierListCLBRClass(c, " (Parent class)")) + r1.sort() + r2=[] + for meth, lineno in self.methods.items(): + r2.append(HierListCLBRMethod(meth, self.file, lineno)) + r2.sort() + return r1+r2 + def IsExpandable(self): + return len(self.methods) + len(self.super) + def GetBitmapColumn(self): + return 21 + +class HierListCLBRFunction(HierListCLBRItem): + def __init__(self, clbrfunc, suffix = ""): + name = clbrfunc.name + file = clbrfunc.file + lineno = clbrfunc.lineno + HierListCLBRItem.__init__(self, name, file, lineno, suffix) + def GetBitmapColumn(self): + return 22 + +class HierListCLBRMethod(HierListCLBRItem): + def GetBitmapColumn(self): + return 22 + +class HierListCLBRErrorItem(hierlist.HierListItem): + def __init__(self, text): + self.text = text + def GetText(self): + return self.text + def GetSubList(self): + return [HierListCLBRErrorItem(self.text)] + def IsExpandable(self): + return 0 + +class HierListCLBRErrorRoot(HierListCLBRErrorItem): + def IsExpandable(self): + return 1 + +class BrowserView(pywin.mfc.docview.TreeView): + def OnInitialUpdate(self): + self.list = None + rc = self._obj_.OnInitialUpdate() + self.HookMessage(self.OnSize, win32con.WM_SIZE) + self.bDirty = 0 + self.destroying = 0 + return rc + + def DestroyBrowser(self): + self.DestroyList() + + def OnActivateView(self, activate, av, dv): +# print "AV", self.bDirty, activate + if activate: + self.CheckRefreshList() + return self._obj_.OnActivateView(activate, av, dv) + + def _MakeRoot(self): + path = self.GetDocument().GetPathName() + if not path: + return HierListCLBRErrorRoot("Error: Can not browse a file until it is saved") + else: + mod, path = pywin.framework.scriptutils.GetPackageModuleName(path) + if self.bDirty: + what = "Refreshing" + # Hack for pyclbr being too smart + try: + del pyclbr._modules[mod] + except (KeyError, AttributeError): + pass + else: + what = "Building" + win32ui.SetStatusText("%s class list - please wait..." % (what,), 1) + win32ui.DoWaitCursor(1) + try: + reader = pyclbr.readmodule_ex # new version post 1.5.2 + except AttributeError: + reader = pyclbr.readmodule + try: + data = reader(mod, [path]) + if data: + return HierListCLBRModule(mod, data) + else: + return HierListCLBRErrorRoot("No Python classes in module.") + + finally: + win32ui.DoWaitCursor(0) + win32ui.SetStatusText(win32ui.LoadString(afxres.AFX_IDS_IDLEMESSAGE)) + + def DestroyList(self): + self.destroying = 1 + list = getattr(self, "list", None) # If the document was not successfully opened, we may not have a list. + self.list = None + if list is not None: + list.HierTerm() + self.destroying = 0 + + def CheckMadeList(self): + if self.list is not None or self.destroying: return + self.rootitem = root = self._MakeRoot() + self.list = list = hierlist.HierListWithItems( root, win32ui.IDB_BROWSER_HIER) + list.HierInit(self.GetParentFrame(), self) + list.SetStyle(commctrl.TVS_HASLINES | commctrl.TVS_LINESATROOT | commctrl.TVS_HASBUTTONS) + + def CheckRefreshList(self): + if self.bDirty: + if self.list is None: + self.CheckMadeList() + else: + new_root = self._MakeRoot() + if self.rootitem.__class__==new_root.__class__==HierListCLBRModule: + self.rootitem.modName = new_root.modName + self.rootitem.clbrdata = new_root.clbrdata + self.list.Refresh() + else: + self.list.AcceptRoot(self._MakeRoot()) + self.bDirty = 0 + + def OnSize(self, params): + lparam = params[3] + w = win32api.LOWORD(lparam) + h = win32api.HIWORD(lparam) + if w != 0: + self.CheckMadeList() + elif w == 0: + self.DestroyList() + return 1 + + def _UpdateUIForState(self): + self.bDirty = 1 diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/editor/__init__.py b/venv/Lib/site-packages/pythonwin/pywin/framework/editor/__init__.py new file mode 100644 index 00000000..ffd1c073 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/framework/editor/__init__.py @@ -0,0 +1,90 @@ +# __init__ for the Pythonwin editor package. +# +# We used to support optional editors - eg, color or non-color. +# +# This really isnt necessary with Scintilla, and scintilla +# is getting so deeply embedded that it was too much work. + +import win32ui, sys, win32con + +defaultCharacterFormat = (-402653169, 0, 200, 0, 0, 0, 49, 'Courier New') + +##def GetDefaultEditorModuleName(): +## import pywin +## # If someone has set pywin.editormodulename, then this is what we use +## try: +## prefModule = pywin.editormodulename +## except AttributeError: +## prefModule = win32ui.GetProfileVal("Editor","Module", "") +## return prefModule +## +##def WriteDefaultEditorModule(module): +## try: +## module = module.__name__ +## except: +## pass +## win32ui.WriteProfileVal("Editor", "Module", module) + +def LoadDefaultEditor(): + pass +## prefModule = GetDefaultEditorModuleName() +## restorePrefModule = None +## mod = None +## if prefModule: +## try: +## mod = __import__(prefModule) +## except 'xx': +## msg = "Importing your preferred editor ('%s') failed.\n\nError %s: %s\n\nAn attempt will be made to load the default editor.\n\nWould you like this editor disabled in the future?" % (prefModule, sys.exc_info()[0], sys.exc_info()[1]) +## rc = win32ui.MessageBox(msg, "Error importing editor", win32con.MB_YESNO) +## if rc == win32con.IDNO: +## restorePrefModule = prefModule +## WriteDefaultEditorModule("") +## del rc +## +## try: +## # Try and load the default one - dont catch errors here. +## if mod is None: +## prefModule = "pywin.framework.editor.color.coloreditor" +## mod = __import__(prefModule) +## +## # Get at the real module. +## mod = sys.modules[prefModule] +## +## # Do a "from mod import *" +## globals().update(mod.__dict__) +## +## finally: +## # Restore the users default editor if it failed and they requested not to disable it. +## if restorePrefModule: +## WriteDefaultEditorModule(restorePrefModule) + +def GetEditorOption(option, defaultValue, min=None, max = None): + rc = win32ui.GetProfileVal("Editor", option, defaultValue) + if min is not None and rc < min: rc = defaultValue + if max is not None and rc > max: rc = defaultValue + return rc + +def SetEditorOption(option, newValue): + win32ui.WriteProfileVal("Editor", option, newValue) + +def DeleteEditorOption(option): + try: + win32ui.WriteProfileVal("Editor", option, None) + except win32ui.error: + pass + +# Load and save font tuples +def GetEditorFontOption(option, default = None): + if default is None: default = defaultCharacterFormat + fmt = GetEditorOption( option, "" ) + if fmt == "": return default + try: + return eval(fmt) + except: + print("WARNING: Invalid font setting in registry - setting ignored") + return default + +def SetEditorFontOption(option, newValue): + SetEditorOption(option, str(newValue)) + +from pywin.framework.editor.color.coloreditor import editorTemplate diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/editor/__pycache__/ModuleBrowser.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/framework/editor/__pycache__/ModuleBrowser.cpython-36.pyc new file mode 100644 index 00000000..8f2c48b4 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/framework/editor/__pycache__/ModuleBrowser.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/editor/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/framework/editor/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..b3981ea2 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/framework/editor/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/editor/__pycache__/configui.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/framework/editor/__pycache__/configui.cpython-36.pyc new file mode 100644 index 00000000..6562045f Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/framework/editor/__pycache__/configui.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/editor/__pycache__/document.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/framework/editor/__pycache__/document.cpython-36.pyc new file mode 100644 index 00000000..38be1e90 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/framework/editor/__pycache__/document.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/editor/__pycache__/editor.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/framework/editor/__pycache__/editor.cpython-36.pyc new file mode 100644 index 00000000..b8c102d8 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/framework/editor/__pycache__/editor.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/editor/__pycache__/frame.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/framework/editor/__pycache__/frame.cpython-36.pyc new file mode 100644 index 00000000..d4fb8f9a Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/framework/editor/__pycache__/frame.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/editor/__pycache__/template.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/framework/editor/__pycache__/template.cpython-36.pyc new file mode 100644 index 00000000..888fe524 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/framework/editor/__pycache__/template.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/editor/__pycache__/vss.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/framework/editor/__pycache__/vss.cpython-36.pyc new file mode 100644 index 00000000..80af3578 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/framework/editor/__pycache__/vss.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/editor/color/__init__.py b/venv/Lib/site-packages/pythonwin/pywin/framework/editor/color/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/editor/color/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/framework/editor/color/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..d8ad6e42 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/framework/editor/color/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/editor/color/__pycache__/coloreditor.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/framework/editor/color/__pycache__/coloreditor.cpython-36.pyc new file mode 100644 index 00000000..17311aa5 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/framework/editor/color/__pycache__/coloreditor.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/editor/color/coloreditor.py b/venv/Lib/site-packages/pythonwin/pywin/framework/editor/color/coloreditor.py new file mode 100644 index 00000000..a58d58b2 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/framework/editor/color/coloreditor.py @@ -0,0 +1,525 @@ +# Color Editor originally by Neil Hodgson, but restructured by mh to integrate +# even tighter into Pythonwin. +import win32ui +import win32con +import win32api +import sys + +import pywin.scintilla.keycodes +from pywin.scintilla import bindings + +from pywin.framework.editor import GetEditorOption, SetEditorOption, GetEditorFontOption, SetEditorFontOption, defaultCharacterFormat +#from pywin.framework.editor import EditorPropertyPage + +MSG_CHECK_EXTERNAL_FILE = win32con.WM_USER+1999 ## WARNING: Duplicated in document.py and editor.py + +# Define a few common markers +MARKER_BOOKMARK = 0 +MARKER_BREAKPOINT = 1 +MARKER_CURRENT = 2 + +from pywin.debugger import dbgcon +from pywin.scintilla.document import CScintillaDocument +from pywin.framework.editor.document import EditorDocumentBase +from pywin.scintilla import scintillacon # For the marker definitions +import pywin.scintilla.view + +class SyntEditDocument(EditorDocumentBase): + "A SyntEdit document. " + def OnDebuggerStateChange(self, state): + self._ApplyOptionalToViews("OnDebuggerStateChange", state) + def HookViewNotifications(self, view): + EditorDocumentBase.HookViewNotifications(self, view) + view.SCISetUndoCollection(1) + def FinalizeViewCreation(self, view): + EditorDocumentBase.FinalizeViewCreation(self, view) + if view==self.GetFirstView(): + self.GetDocTemplate().CheckIDLEMenus(view.idle) + +SyntEditViewParent=pywin.scintilla.view.CScintillaView +class SyntEditView(SyntEditViewParent): + "A view of a SyntEdit. Obtains data from document." + def __init__(self, doc): + SyntEditViewParent.__init__(self, doc) + self.bCheckingFile = 0 + + def OnInitialUpdate(self): + SyntEditViewParent.OnInitialUpdate(self) + + self.HookMessage(self.OnRClick,win32con.WM_RBUTTONDOWN) + + for id in [win32ui.ID_VIEW_FOLD_COLLAPSE, win32ui.ID_VIEW_FOLD_COLLAPSE_ALL, + win32ui.ID_VIEW_FOLD_EXPAND, win32ui.ID_VIEW_FOLD_EXPAND_ALL]: + + self.HookCommand(self.OnCmdViewFold, id) + self.HookCommandUpdate(self.OnUpdateViewFold, id) + self.HookCommand(self.OnCmdViewFoldTopLevel, win32ui.ID_VIEW_FOLD_TOPLEVEL) + + # Define the markers +# self.SCIMarkerDeleteAll() + self.SCIMarkerDefineAll(MARKER_BOOKMARK, scintillacon.SC_MARK_ROUNDRECT, win32api.RGB(0x0, 0x0, 0x0), win32api.RGB(0, 0xff, 0xff)) + + self.SCIMarkerDefine(MARKER_CURRENT, scintillacon.SC_MARK_ARROW) + self.SCIMarkerSetBack(MARKER_CURRENT, win32api.RGB(0xff, 0xff, 0x00)) + + # Define the folding markers + if 1: #traditional markers + self.SCIMarkerDefineAll(scintillacon.SC_MARKNUM_FOLDEROPEN, scintillacon.SC_MARK_MINUS, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0)) + self.SCIMarkerDefineAll(scintillacon.SC_MARKNUM_FOLDER, scintillacon.SC_MARK_PLUS, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0)) + self.SCIMarkerDefineAll(scintillacon.SC_MARKNUM_FOLDERSUB, scintillacon.SC_MARK_EMPTY, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0)) + self.SCIMarkerDefineAll(scintillacon.SC_MARKNUM_FOLDERTAIL, scintillacon.SC_MARK_EMPTY, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0)) + self.SCIMarkerDefineAll(scintillacon.SC_MARKNUM_FOLDEREND, scintillacon.SC_MARK_EMPTY, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0)) + self.SCIMarkerDefineAll(scintillacon.SC_MARKNUM_FOLDEROPENMID, scintillacon.SC_MARK_EMPTY, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0)) + self.SCIMarkerDefineAll(scintillacon.SC_MARKNUM_FOLDERMIDTAIL, scintillacon.SC_MARK_EMPTY, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0)) + else: # curved markers + self.SCIMarkerDefineAll(scintillacon.SC_MARKNUM_FOLDEROPEN, scintillacon.SC_MARK_CIRCLEMINUS, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0)) + self.SCIMarkerDefineAll(scintillacon.SC_MARKNUM_FOLDER, scintillacon.SC_MARK_CIRCLEPLUS, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0)) + self.SCIMarkerDefineAll(scintillacon.SC_MARKNUM_FOLDERSUB, scintillacon.SC_MARK_VLINE, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0)) + self.SCIMarkerDefineAll(scintillacon.SC_MARKNUM_FOLDERTAIL, scintillacon.SC_MARK_LCORNERCURVE, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0)) + self.SCIMarkerDefineAll(scintillacon.SC_MARKNUM_FOLDEREND, scintillacon.SC_MARK_CIRCLEPLUSCONNECTED, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0)) + self.SCIMarkerDefineAll(scintillacon.SC_MARKNUM_FOLDEROPENMID, scintillacon.SC_MARK_CIRCLEMINUSCONNECTED, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0)) + self.SCIMarkerDefineAll(scintillacon.SC_MARKNUM_FOLDERMIDTAIL, scintillacon.SC_MARK_TCORNERCURVE, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0)) + + self.SCIMarkerDefine(MARKER_BREAKPOINT, scintillacon.SC_MARK_CIRCLE) + # Marker background depends on debugger state + self.SCIMarkerSetFore(MARKER_BREAKPOINT, win32api.RGB(0x0, 0, 0)) + # Get the current debugger state. + try: + import pywin.debugger + if pywin.debugger.currentDebugger is None: + state = dbgcon.DBGSTATE_NOT_DEBUGGING + else: + state = pywin.debugger.currentDebugger.debuggerState + except ImportError: + state = dbgcon.DBGSTATE_NOT_DEBUGGING + self.OnDebuggerStateChange(state) + + def _GetSubConfigNames(self): + return ["editor"] # Allow [Keys:Editor] sections to be specific to us + + def DoConfigChange(self): + SyntEditViewParent.DoConfigChange(self) + tabSize = GetEditorOption("Tab Size", 4, 2) + indentSize = GetEditorOption("Indent Size", 4, 2) + bUseTabs = GetEditorOption("Use Tabs", 0) + bSmartTabs = GetEditorOption("Smart Tabs", 1) + ext = self.idle.IDLEExtension("AutoIndent") # Required extension. + + self.SCISetViewWS( GetEditorOption("View Whitespace", 0) ) + self.SCISetViewEOL( GetEditorOption("View EOL", 0) ) + self.SCISetIndentationGuides( GetEditorOption("View Indentation Guides", 0) ) + + if GetEditorOption("Right Edge Enabled", 0): + mode = scintillacon.EDGE_BACKGROUND + else: + mode = scintillacon.EDGE_NONE + self.SCISetEdgeMode(mode) + self.SCISetEdgeColumn( GetEditorOption("Right Edge Column", 75) ) + self.SCISetEdgeColor( GetEditorOption("Right Edge Color", win32api.RGB(0xef, 0xef, 0xef))) + + width = GetEditorOption("Marker Margin Width", 16) + self.SCISetMarginWidthN(1, width) + width = GetEditorOption("Fold Margin Width", 12) + self.SCISetMarginWidthN(2, width) + width = GetEditorOption("Line Number Margin Width", 0) + self.SCISetMarginWidthN(0, width) + self.bFolding = GetEditorOption("Enable Folding", 1) + fold_flags = 0 + self.SendScintilla(scintillacon.SCI_SETMODEVENTMASK, scintillacon.SC_MOD_CHANGEFOLD); + if self.bFolding: + if GetEditorOption("Fold Lines", 1): + fold_flags = 16 + + self.SCISetProperty("fold", self.bFolding) + self.SCISetFoldFlags(fold_flags) + + tt_color = GetEditorOption("Tab Timmy Color", win32api.RGB(0xff, 0, 0)) + self.SendScintilla(scintillacon.SCI_INDICSETFORE, 1, tt_color) + + tt_use = GetEditorOption("Use Tab Timmy", 1) + if tt_use: + self.SCISetProperty("tab.timmy.whinge.level", "1") + + # Auto-indent has very complicated behaviour. In a nutshell, the only + # way to get sensible behaviour from it is to ensure tabwidth != indentsize. + # Further, usetabs will only ever go from 1->0, never 0->1. + # This is _not_ the behaviour Pythonwin wants: + # * Tab width is arbitary, so should have no impact on smarts. + # * bUseTabs setting should reflect how new files are created, and + # if Smart Tabs disabled, existing files are edited + # * If "Smart Tabs" is enabled, bUseTabs should have no bearing + # for existing files (unless of course no context can be determined) + # + # So for smart tabs we configure the widget with completely dummy + # values (ensuring tabwidth != indentwidth), ask it to guess, then + # look at the values it has guessed, and re-configure + if bSmartTabs: + ext.config(usetabs=1, tabwidth=5, indentwidth=4) + ext.set_indentation_params(1) + if ext.indentwidth==5: + # Either 5 literal spaces, or a single tab character. Assume a tab + usetabs = 1 + indentwidth = tabSize + else: + # Either Indented with spaces, and indent size has been guessed or + # an empty file (or no context found - tough!) + if self.GetTextLength()==0: # emtpy + usetabs = bUseTabs + indentwidth = indentSize + else: # guessed. + indentwidth = ext.indentwidth + usetabs = 0 + # Tab size can never be guessed - set at user preference. + ext.config(usetabs=usetabs, indentwidth=indentwidth, tabwidth=tabSize) + else: + # Dont want smart-tabs - just set the options! + ext.config(usetabs=bUseTabs, tabwidth=tabSize, indentwidth=indentSize) + self.SCISetIndent(indentSize) + self.SCISetTabWidth(tabSize) + + def OnDebuggerStateChange(self, state): + if state == dbgcon.DBGSTATE_NOT_DEBUGGING: + # Indicate breakpoints arent really usable. + # Not quite white - useful when no marker margin, so set as background color. + self.SCIMarkerSetBack(MARKER_BREAKPOINT, win32api.RGB(0xef, 0xef, 0xef)) + else: + # A light-red, so still readable when no marker margin. + self.SCIMarkerSetBack(MARKER_BREAKPOINT, win32api.RGB(0xff, 0x80, 0x80)) + + def HookDocumentHandlers(self): + SyntEditViewParent.HookDocumentHandlers(self) + self.HookMessage(self.OnCheckExternalDocumentUpdated,MSG_CHECK_EXTERNAL_FILE) + + def HookHandlers(self): + SyntEditViewParent.HookHandlers(self) + self.HookMessage(self.OnSetFocus, win32con.WM_SETFOCUS) + + def _PrepareUserStateChange(self): + return self.GetSel(), self.GetFirstVisibleLine() + def _EndUserStateChange(self, info): + scrollOff = info[1] - self.GetFirstVisibleLine() + if scrollOff: + self.LineScroll(scrollOff) + # Make sure we dont reset the cursor beyond the buffer. + max = self.GetTextLength() + newPos = min(info[0][0], max), min(info[0][1], max) + self.SetSel(newPos) + + ####################################### + # The Windows Message or Notify handlers. + ####################################### + def OnMarginClick(self, std, extra): + notify = self.SCIUnpackNotifyMessage(extra) + if notify.margin==2: # Our fold margin + line_click = self.LineFromChar(notify.position) +# max_line = self.GetLineCount() + if self.SCIGetFoldLevel(line_click) & scintillacon.SC_FOLDLEVELHEADERFLAG: + # If a fold point. + self.SCIToggleFold(line_click) + return 1 + + def OnSetFocus(self,msg): + # Even though we use file change notifications, we should be very sure about it here. + self.OnCheckExternalDocumentUpdated(msg) + return 1 + + def OnCheckExternalDocumentUpdated(self, msg): + if self.bCheckingFile: return + self.bCheckingFile = 1 + self.GetDocument().CheckExternalDocumentUpdated() + self.bCheckingFile = 0 + + def OnRClick(self,params): + menu = win32ui.CreatePopupMenu() + self.AppendMenu(menu, "&Locate module", "LocateModule") + self.AppendMenu(menu, flags=win32con.MF_SEPARATOR) + self.AppendMenu(menu, "&Undo", "EditUndo") + self.AppendMenu(menu, '&Redo', 'EditRedo') + self.AppendMenu(menu, flags=win32con.MF_SEPARATOR) + self.AppendMenu(menu, 'Cu&t', 'EditCut') + self.AppendMenu(menu, '&Copy', 'EditCopy') + self.AppendMenu(menu, '&Paste', 'EditPaste') + self.AppendMenu(menu, flags=win32con.MF_SEPARATOR) + self.AppendMenu(menu, '&Select all', 'EditSelectAll') + self.AppendMenu(menu, 'View &Whitespace', 'ViewWhitespace', checked=self.SCIGetViewWS()) + self.AppendMenu(menu, "&Fixed Font", "ViewFixedFont", checked = self._GetColorizer().bUseFixed) + self.AppendMenu(menu, flags=win32con.MF_SEPARATOR) + self.AppendMenu(menu, "&Goto line...", "GotoLine") + + submenu = win32ui.CreatePopupMenu() + newitems = self.idle.GetMenuItems("edit") + for text, event in newitems: + self.AppendMenu(submenu, text, event) + + flags=win32con.MF_STRING|win32con.MF_ENABLED|win32con.MF_POPUP + menu.AppendMenu(flags, submenu.GetHandle(), "&Source code") + + flags = win32con.TPM_LEFTALIGN|win32con.TPM_LEFTBUTTON|win32con.TPM_RIGHTBUTTON + menu.TrackPopupMenu(params[5], flags, self) + return 0 + def OnCmdViewFold(self, cid, code): # Handle the menu command + if cid == win32ui.ID_VIEW_FOLD_EXPAND_ALL: + self.FoldExpandAllEvent(None) + elif cid == win32ui.ID_VIEW_FOLD_EXPAND: + self.FoldExpandEvent(None) + elif cid == win32ui.ID_VIEW_FOLD_COLLAPSE_ALL: + self.FoldCollapseAllEvent(None) + elif cid == win32ui.ID_VIEW_FOLD_COLLAPSE: + self.FoldCollapseEvent(None) + else: + print("Unknown collapse/expand ID") + def OnUpdateViewFold(self, cmdui): # Update the tick on the UI. + if not self.bFolding: + cmdui.Enable(0) + return + id = cmdui.m_nID + if id in [win32ui.ID_VIEW_FOLD_EXPAND_ALL, win32ui.ID_VIEW_FOLD_COLLAPSE_ALL]: + cmdui.Enable() + else: + enable = 0 + lineno = self.LineFromChar(self.GetSel()[0]) + foldable = self.SCIGetFoldLevel(lineno) & scintillacon.SC_FOLDLEVELHEADERFLAG + is_expanded = self.SCIGetFoldExpanded(lineno) + if id == win32ui.ID_VIEW_FOLD_EXPAND: + if foldable and not is_expanded: + enable = 1 + elif id == win32ui.ID_VIEW_FOLD_COLLAPSE: + if foldable and is_expanded: + enable = 1 + cmdui.Enable(enable) + + def OnCmdViewFoldTopLevel(self, cid, code): # Handle the menu command + self.FoldTopLevelEvent(None) + + ####################################### + # The Events + ####################################### + def ToggleBookmarkEvent(self, event, pos = -1): + """Toggle a bookmark at the specified or current position + """ + if pos==-1: + pos, end = self.GetSel() + startLine = self.LineFromChar(pos) + self.GetDocument().MarkerToggle(startLine+1, MARKER_BOOKMARK) + return 0 + + def GotoNextBookmarkEvent(self, event, fromPos=-1): + """ Move to the next bookmark + """ + if fromPos==-1: + fromPos, end = self.GetSel() + startLine = self.LineFromChar(fromPos)+1 # Zero based line to start + nextLine = self.GetDocument().MarkerGetNext(startLine+1, MARKER_BOOKMARK)-1 + if nextLine<0: + nextLine = self.GetDocument().MarkerGetNext(0, MARKER_BOOKMARK)-1 + if nextLine <0 or nextLine == startLine-1: + win32api.MessageBeep() + else: + self.SCIEnsureVisible(nextLine) + self.SCIGotoLine(nextLine) + return 0 + + def TabKeyEvent(self, event): + """Insert an indent. If no selection, a single indent, otherwise a block indent + """ + # Handle auto-complete first. + if self.SCIAutoCActive(): + self.SCIAutoCComplete() + return 0 + # Call the IDLE event. + return self.bindings.fire("<>", event) + + def EnterKeyEvent(self, event): + """Handle the enter key with special handling for auto-complete + """ + # Handle auto-complete first. + if self.SCIAutoCActive(): + self.SCIAutoCComplete() + self.SCIAutoCCancel() + # Call the IDLE event. + return self.bindings.fire("<>", event) + + def ShowInteractiveWindowEvent(self, event): + import pywin.framework.interact + pywin.framework.interact.ShowInteractiveWindow() + + def FoldTopLevelEvent(self, event = None): + if not self.bFolding: + return 1 + + win32ui.DoWaitCursor(1) + try: + self.Colorize() + maxLine = self.GetLineCount() + # Find the first line, and check out its state. + for lineSeek in range(maxLine): + if self.SCIGetFoldLevel(lineSeek) & scintillacon.SC_FOLDLEVELHEADERFLAG: + expanding = not self.SCIGetFoldExpanded(lineSeek) + break + else: + # no folds here! + return + for lineSeek in range(lineSeek, maxLine): + level = self.SCIGetFoldLevel(lineSeek) + level_no = level & scintillacon.SC_FOLDLEVELNUMBERMASK - scintillacon.SC_FOLDLEVELBASE + is_header = level & scintillacon.SC_FOLDLEVELHEADERFLAG + # print lineSeek, level_no, is_header + if level_no == 0 and is_header: + if (expanding and not self.SCIGetFoldExpanded(lineSeek)) or \ + (not expanding and self.SCIGetFoldExpanded(lineSeek)): + self.SCIToggleFold(lineSeek) + finally: + win32ui.DoWaitCursor(-1) + + def FoldExpandSecondLevelEvent(self, event): + if not self.bFolding: + return 1 + win32ui.DoWaitCursor(1) + ## I think this is needed since Scintilla may not have + ## already formatted parts of file outside visible window. + self.Colorize() + levels=[scintillacon.SC_FOLDLEVELBASE] + ## Scintilla's level number is based on amount of whitespace indentation + for lineno in range(self.GetLineCount()): + level = self.SCIGetFoldLevel(lineno) + if not level & scintillacon.SC_FOLDLEVELHEADERFLAG: + continue + curr_level = level & scintillacon.SC_FOLDLEVELNUMBERMASK + if curr_level > levels[-1]: + levels.append(curr_level) + try: + level_ind=levels.index(curr_level) + except ValueError: + ## probably syntax error in source file, bail + break + levels=levels[:level_ind+1] + if level_ind == 1 and not self.SCIGetFoldExpanded(lineno): + self.SCIToggleFold(lineno) + win32ui.DoWaitCursor(-1) + + def FoldCollapseSecondLevelEvent(self, event): + if not self.bFolding: + return 1 + win32ui.DoWaitCursor(1) + ## I think this is needed since Scintilla may not have + ## already formatted parts of file outside visible window. + self.Colorize() + levels=[scintillacon.SC_FOLDLEVELBASE] + ## Scintilla's level number is based on amount of whitespace indentation + for lineno in range(self.GetLineCount()): + level = self.SCIGetFoldLevel(lineno) + if not level & scintillacon.SC_FOLDLEVELHEADERFLAG: + continue + curr_level = level & scintillacon.SC_FOLDLEVELNUMBERMASK + if curr_level > levels[-1]: + levels.append(curr_level) + try: + level_ind=levels.index(curr_level) + except ValueError: + ## probably syntax error in source file, bail + break + levels=levels[:level_ind+1] + if level_ind == 1 and self.SCIGetFoldExpanded(lineno): + self.SCIToggleFold(lineno) + win32ui.DoWaitCursor(-1) + + def FoldExpandEvent(self, event): + if not self.bFolding: + return 1 + win32ui.DoWaitCursor(1) + lineno = self.LineFromChar(self.GetSel()[0]) + if self.SCIGetFoldLevel(lineno) & scintillacon.SC_FOLDLEVELHEADERFLAG and \ + not self.SCIGetFoldExpanded(lineno): + self.SCIToggleFold(lineno) + win32ui.DoWaitCursor(-1) + + def FoldExpandAllEvent(self, event): + if not self.bFolding: + return 1 + win32ui.DoWaitCursor(1) + for lineno in range(0, self.GetLineCount()): + if self.SCIGetFoldLevel(lineno) & scintillacon.SC_FOLDLEVELHEADERFLAG and \ + not self.SCIGetFoldExpanded(lineno): + self.SCIToggleFold(lineno) + win32ui.DoWaitCursor(-1) + + def FoldCollapseEvent(self, event): + if not self.bFolding: + return 1 + win32ui.DoWaitCursor(1) + lineno = self.LineFromChar(self.GetSel()[0]) + if self.SCIGetFoldLevel(lineno) & scintillacon.SC_FOLDLEVELHEADERFLAG and \ + self.SCIGetFoldExpanded(lineno): + self.SCIToggleFold(lineno) + win32ui.DoWaitCursor(-1) + + def FoldCollapseAllEvent(self, event): + if not self.bFolding: + return 1 + win32ui.DoWaitCursor(1) + self.Colorize() + for lineno in range(0, self.GetLineCount()): + if self.SCIGetFoldLevel(lineno) & scintillacon.SC_FOLDLEVELHEADERFLAG and \ + self.SCIGetFoldExpanded(lineno): + self.SCIToggleFold(lineno) + win32ui.DoWaitCursor(-1) + + +from pywin.framework.editor.frame import EditorFrame +class SplitterFrame(EditorFrame): + def OnCreate(self, cs): + self.HookCommand(self.OnWindowSplit, win32ui.ID_WINDOW_SPLIT) + return 1 + def OnWindowSplit(self, id, code): + self.GetDlgItem(win32ui.AFX_IDW_PANE_FIRST).DoKeyboardSplit() + return 1 + +from pywin.framework.editor.template import EditorTemplateBase +class SyntEditTemplate(EditorTemplateBase): + def __init__(self, res=win32ui.IDR_TEXTTYPE, makeDoc=None, makeFrame=None, makeView=None): + if makeDoc is None: makeDoc = SyntEditDocument + if makeView is None: makeView = SyntEditView + if makeFrame is None: makeFrame = SplitterFrame + self.bSetMenus = 0 + EditorTemplateBase.__init__(self, res, makeDoc, makeFrame, makeView) + + def CheckIDLEMenus(self, idle): + if self.bSetMenus: return + self.bSetMenus = 1 + + submenu = win32ui.CreatePopupMenu() + newitems = idle.GetMenuItems("edit") + flags=win32con.MF_STRING|win32con.MF_ENABLED + for text, event in newitems: + id = bindings.event_to_commands.get(event) + if id is not None: + keyname = pywin.scintilla.view.configManager.get_key_binding( event, ["editor"] ) + if keyname is not None: + text = text + "\t" + keyname + submenu.AppendMenu(flags, id, text) + + mainMenu = self.GetSharedMenu() + editMenu = mainMenu.GetSubMenu(1) + editMenu.AppendMenu(win32con.MF_SEPARATOR, 0, "") + editMenu.AppendMenu(win32con.MF_STRING | win32con.MF_POPUP | win32con.MF_ENABLED, submenu.GetHandle(), "&Source Code") + + def _CreateDocTemplate(self, resourceId): + return win32ui.CreateDocTemplate(resourceId) + + def CreateWin32uiDocument(self): + return self.DoCreateDoc() + + def GetPythonPropertyPages(self): + """Returns a list of property pages + """ + from pywin.scintilla import configui + return EditorTemplateBase.GetPythonPropertyPages(self) + [configui.ScintillaFormatPropertyPage()] + +# For debugging purposes, when this module may be reloaded many times. +try: + win32ui.GetApp().RemoveDocTemplate(editorTemplate) +except NameError: + pass + +editorTemplate = SyntEditTemplate() +win32ui.GetApp().AddDocTemplate(editorTemplate) diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/editor/configui.py b/venv/Lib/site-packages/pythonwin/pywin/framework/editor/configui.py new file mode 100644 index 00000000..f1b9a387 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/framework/editor/configui.py @@ -0,0 +1,257 @@ +from pywin.mfc import dialog +from . import document +import win32ui +import win32con +import win32api + +from pywin.framework.editor import GetEditorOption, SetEditorOption, DeleteEditorOption, GetEditorFontOption, SetEditorFontOption, defaultCharacterFormat, editorTemplate +import pywin.scintilla.config + +# The standard 16 color VGA palette should always be possible +paletteVGA = ( ("Black",0,0,0), ("Navy",0,0,128), ("Green",0,128,0), ("Cyan",0,128,128), + ("Maroon",128,0,0), ("Purple",128,0,128), ("Olive",128,128,0), ("Gray",128,128,128), + ("Silver",192,192,192), ("Blue",0,0,255), ("Lime",0,255,0), ("Aqua",0,255,255), + ("Red",255,0,0), ("Fuchsia",255,0,255), ("Yellow",255,255,0), ("White",255,255,255) ) + +###################################################### +# +# Property Page for editor options +# +class EditorPropertyPage(dialog.PropertyPage): + def __init__(self): + dialog.PropertyPage.__init__(self, win32ui.IDD_PP_EDITOR) + self.autooptions = [] + self._AddEditorOption(win32ui.IDC_AUTO_RELOAD, "i", "Auto Reload", 1) + self._AddEditorOption(win32ui.IDC_COMBO1, "i", "Backup Type", document.BAK_DOT_BAK_BAK_DIR) + self._AddEditorOption(win32ui.IDC_AUTOCOMPLETE, "i", "Autocomplete Attributes", 1) + self._AddEditorOption(win32ui.IDC_CALLTIPS, "i", "Show Call Tips", 1) + self._AddEditorOption(win32ui.IDC_MARGIN_LINENUMBER, "i", "Line Number Margin Width", 0) + self._AddEditorOption(win32ui.IDC_RADIO1, "i", "MarkersInMargin", None) + self._AddEditorOption(win32ui.IDC_MARGIN_MARKER, "i", "Marker Margin Width", None) + self["Marker Margin Width"] = GetEditorOption("Marker Margin Width", 16) + + # Folding + self._AddEditorOption(win32ui.IDC_MARGIN_FOLD, "i", "Fold Margin Width", 12) + self._AddEditorOption(win32ui.IDC_FOLD_ENABLE, "i", "Enable Folding", 1) + self._AddEditorOption(win32ui.IDC_FOLD_ON_OPEN, "i", "Fold On Open", 0) + self._AddEditorOption(win32ui.IDC_FOLD_SHOW_LINES, "i", "Fold Lines", 1) + + # Right edge. + self._AddEditorOption(win32ui.IDC_RIGHTEDGE_ENABLE, "i", "Right Edge Enabled", 0) + self._AddEditorOption(win32ui.IDC_RIGHTEDGE_COLUMN, "i", "Right Edge Column", 75) + + # Source control, etc + self.AddDDX(win32ui.IDC_VSS_INTEGRATE, "bVSS") + self.AddDDX(win32ui.IDC_KEYBOARD_CONFIG, "Configs", "l") + self["Configs"] = pywin.scintilla.config.find_config_files() + + + + def _AddEditorOption(self, idd, typ, optionName, defaultVal): + self.AddDDX(idd, optionName, typ) + # some options are "derived" - ie, can be implied from others + # (eg, "view markers in background" is implied from "markerMarginWidth==0" + # So we don't actually store these values, but they do still get DDX support. + if defaultVal is not None: + self[optionName] = GetEditorOption(optionName, defaultVal) + self.autooptions.append((optionName, defaultVal)) + + def OnInitDialog(self): + for name, val in self.autooptions: + self[name] = GetEditorOption(name, val) + + # Note that these MUST be in the same order as the BAK constants. + cbo = self.GetDlgItem(win32ui.IDC_COMBO1) + cbo.AddString("None") + cbo.AddString(".BAK File") + cbo.AddString("TEMP dir") + cbo.AddString("Own dir") + + # Source Safe + bVSS = GetEditorOption("Source Control Module", "") == "pywin.framework.editor.vss" + self['bVSS'] = bVSS + + edit = self.GetDlgItem(win32ui.IDC_RIGHTEDGE_SAMPLE) + edit.SetWindowText("Sample Color") + + rc = dialog.PropertyPage.OnInitDialog(self) + + try: + self.GetDlgItem(win32ui.IDC_KEYBOARD_CONFIG).SelectString(-1, GetEditorOption("Keyboard Config", "default")) + except win32ui.error: + import traceback + traceback.print_exc() + pass + + self.HookCommand(self.OnButSimple, win32ui.IDC_FOLD_ENABLE) + self.HookCommand(self.OnButSimple, win32ui.IDC_RADIO1) + self.HookCommand(self.OnButSimple, win32ui.IDC_RADIO2) + self.HookCommand(self.OnButSimple, win32ui.IDC_RIGHTEDGE_ENABLE) + self.HookCommand(self.OnButEdgeColor, win32ui.IDC_RIGHTEDGE_DEFINE) + + butMarginEnabled = self['Marker Margin Width'] > 0 + self.GetDlgItem(win32ui.IDC_RADIO1).SetCheck(butMarginEnabled) + self.GetDlgItem(win32ui.IDC_RADIO2).SetCheck(not butMarginEnabled) + + self.edgeColor = self.initialEdgeColor = GetEditorOption("Right Edge Color", win32api.RGB(0xef, 0xef, 0xef)) + for spinner_id in (win32ui.IDC_SPIN1, win32ui.IDC_SPIN2, win32ui.IDC_SPIN3): + spinner=self.GetDlgItem(spinner_id) + spinner.SetRange(0,100) + self.UpdateUIForState() + + return rc + + def OnButSimple(self, id, code): + if code == win32con.BN_CLICKED: + self.UpdateUIForState() + + def OnButEdgeColor(self, id, code): + if code == win32con.BN_CLICKED: + d = win32ui.CreateColorDialog(self.edgeColor, 0, self) + # Ensure the current color is a custom color (as it may not be in the swatch) + # plus some other nice gray scales. + ccs = [self.edgeColor] + for c in range(0xef, 0x4f, -0x10): + ccs.append(win32api.RGB(c,c,c)) + d.SetCustomColors( ccs ) + if d.DoModal() == win32con.IDOK: + self.edgeColor = d.GetColor() + self.UpdateUIForState() + + def UpdateUIForState(self): + folding = self.GetDlgItem(win32ui.IDC_FOLD_ENABLE).GetCheck() + self.GetDlgItem(win32ui.IDC_FOLD_ON_OPEN).EnableWindow(folding) + self.GetDlgItem(win32ui.IDC_FOLD_SHOW_LINES).EnableWindow(folding) + + widthEnabled = self.GetDlgItem(win32ui.IDC_RADIO1).GetCheck() + self.GetDlgItem(win32ui.IDC_MARGIN_MARKER).EnableWindow(widthEnabled) + self.UpdateData() # Ensure self[] is up to date with the control data. + if widthEnabled and self["Marker Margin Width"] == 0: + self["Marker Margin Width"] = 16 + self.UpdateData(0) # Ensure control up to date with self[] + + # Right edge + edgeEnabled = self.GetDlgItem(win32ui.IDC_RIGHTEDGE_ENABLE).GetCheck() + self.GetDlgItem(win32ui.IDC_RIGHTEDGE_COLUMN).EnableWindow(edgeEnabled) + self.GetDlgItem(win32ui.IDC_RIGHTEDGE_SAMPLE).EnableWindow(edgeEnabled) + self.GetDlgItem(win32ui.IDC_RIGHTEDGE_DEFINE).EnableWindow(edgeEnabled) + + edit = self.GetDlgItem(win32ui.IDC_RIGHTEDGE_SAMPLE) + edit.SetBackgroundColor(0, self.edgeColor) + + def OnOK(self): + for name, defVal in self.autooptions: + SetEditorOption(name, self[name]) + # Margin width gets handled differently. + if self['MarkersInMargin'] == 0: + SetEditorOption("Marker Margin Width", self["Marker Margin Width"]) + else: + SetEditorOption("Marker Margin Width", 0) + if self.edgeColor != self.initialEdgeColor: + SetEditorOption("Right Edge Color", self.edgeColor) + if self['bVSS']: + SetEditorOption("Source Control Module", "pywin.framework.editor.vss") + else: + if GetEditorOption("Source Control Module", "")=='pywin.framework.editor.vss': + SetEditorOption("Source Control Module", "") + # Keyboard config + configname = self.GetDlgItem(win32ui.IDC_KEYBOARD_CONFIG).GetWindowText() + if configname: + if configname == "default": + DeleteEditorOption("Keyboard Config") + else: + SetEditorOption("Keyboard Config", configname) + + import pywin.scintilla.view + pywin.scintilla.view.LoadConfiguration() + + # Now tell all views we have changed. +## for doc in editorTemplate.GetDocumentList(): +## for view in doc.GetAllViews(): +## try: +## fn = view.OnConfigChange +## except AttributeError: +## continue +## fn() + return 1 + +class EditorWhitespacePropertyPage(dialog.PropertyPage): + def __init__(self): + dialog.PropertyPage.__init__(self, win32ui.IDD_PP_TABS) + self.autooptions = [] + self._AddEditorOption(win32ui.IDC_TAB_SIZE, "i", "Tab Size", 4) + self._AddEditorOption(win32ui.IDC_INDENT_SIZE, "i", "Indent Size", 4) + self._AddEditorOption(win32ui.IDC_USE_SMART_TABS, "i", "Smart Tabs", 1) + self._AddEditorOption(win32ui.IDC_VIEW_WHITESPACE, "i", "View Whitespace", 0) + self._AddEditorOption(win32ui.IDC_VIEW_EOL, "i", "View EOL", 0) + self._AddEditorOption(win32ui.IDC_VIEW_INDENTATIONGUIDES, "i", "View Indentation Guides", 0) + + def _AddEditorOption(self, idd, typ, optionName, defaultVal): + self.AddDDX(idd, optionName, typ) + self[optionName] = GetEditorOption(optionName, defaultVal) + self.autooptions.append((optionName, defaultVal)) + + def OnInitDialog(self): + for name, val in self.autooptions: + self[name] = GetEditorOption(name, val) + + rc = dialog.PropertyPage.OnInitDialog(self) + + idc = win32ui.IDC_TABTIMMY_NONE + if GetEditorOption("Use Tab Timmy", 1): + idc = win32ui.IDC_TABTIMMY_IND + self.GetDlgItem(idc).SetCheck(1) + + idc = win32ui.IDC_RADIO1 + if GetEditorOption("Use Tabs", 0): + idc = win32ui.IDC_USE_TABS + self.GetDlgItem(idc).SetCheck(1) + + tt_color = GetEditorOption("Tab Timmy Color", win32api.RGB(0xff, 0, 0)) + self.cbo = self.GetDlgItem(win32ui.IDC_COMBO1) + for c in paletteVGA: + self.cbo.AddString(c[0]) + sel = 0 + for c in paletteVGA: + if tt_color == win32api.RGB(c[1], c[2], c[3]): + break + sel = sel + 1 + else: + sel = -1 + self.cbo.SetCurSel(sel) + self.HookCommand(self.OnButSimple, win32ui.IDC_TABTIMMY_NONE) + self.HookCommand(self.OnButSimple, win32ui.IDC_TABTIMMY_IND) + self.HookCommand(self.OnButSimple, win32ui.IDC_TABTIMMY_BG) + # Set ranges for the spinners. + for spinner_id in [win32ui.IDC_SPIN1, win32ui.IDC_SPIN2]: + spinner = self.GetDlgItem(spinner_id) + spinner.SetRange(1, 16) + return rc + + def OnButSimple(self, id, code): + if code == win32con.BN_CLICKED: + self.UpdateUIForState() + + def UpdateUIForState(self): + timmy = self.GetDlgItem(win32ui.IDC_TABTIMMY_NONE).GetCheck() + self.GetDlgItem(win32ui.IDC_COMBO1).EnableWindow(not timmy) + + def OnOK(self): + for name, defVal in self.autooptions: + SetEditorOption(name, self[name]) + + SetEditorOption("Use Tabs", self.GetDlgItem(win32ui.IDC_USE_TABS).GetCheck()) + + SetEditorOption("Use Tab Timmy", self.GetDlgItem(win32ui.IDC_TABTIMMY_IND).GetCheck()) + c = paletteVGA[self.cbo.GetCurSel()] + SetEditorOption("Tab Timmy Color", win32api.RGB(c[1], c[2], c[3])) + + return 1 + +def testpp(): + ps = dialog.PropertySheet("Editor Options") + ps.AddPage(EditorWhitespacePropertyPage()) + ps.DoModal() + +if __name__=='__main__': + testpp() \ No newline at end of file diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/editor/document.py b/venv/Lib/site-packages/pythonwin/pywin/framework/editor/document.py new file mode 100644 index 00000000..c2c89e40 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/framework/editor/document.py @@ -0,0 +1,332 @@ +# We no longer support the old, non-colour editor! + +from pywin.mfc import docview, object +from pywin.framework.editor import GetEditorOption +import win32ui +import os +import win32con +import string +import traceback +import win32api +import shutil + +BAK_NONE=0 +BAK_DOT_BAK=1 +BAK_DOT_BAK_TEMP_DIR=2 +BAK_DOT_BAK_BAK_DIR=3 + +MSG_CHECK_EXTERNAL_FILE = win32con.WM_USER+1999 ## WARNING: Duplicated in editor.py and coloreditor.py + +import pywin.scintilla.document +ParentEditorDocument=pywin.scintilla.document.CScintillaDocument +class EditorDocumentBase(ParentEditorDocument): + def __init__(self, template): + self.bAutoReload = GetEditorOption("Auto Reload", 1) + self.bDeclinedReload = 0 # Has the user declined to reload. + self.fileStat = None + self.bReportedFileNotFound = 0 + + # what sort of bak file should I create. + # default to write to %temp%/bak/filename.ext + self.bakFileType=GetEditorOption("Backup Type", BAK_DOT_BAK_BAK_DIR) + + self.watcherThread = FileWatchingThread(self) + self.watcherThread.CreateThread() + # Should I try and use VSS integration? + self.scModuleName=GetEditorOption("Source Control Module", "") + self.scModule = None # Loaded when first used. + ParentEditorDocument.__init__(self, template, template.CreateWin32uiDocument()) + + def OnCloseDocument(self ): + self.watcherThread.SignalStop() + return self._obj_.OnCloseDocument() + +# def OnOpenDocument(self, name): +# rc = ParentEditorDocument.OnOpenDocument(self, name) +# self.GetFirstView()._SetLoadedText(self.text) +# self._DocumentStateChanged() +# return rc + + def OnSaveDocument( self, fileName ): + win32ui.SetStatusText("Saving file...",1) + # rename to bak if required. + dir, basename = os.path.split(fileName) + if self.bakFileType==BAK_DOT_BAK: + bakFileName=dir+'\\'+os.path.splitext(basename)[0]+'.bak' + elif self.bakFileType==BAK_DOT_BAK_TEMP_DIR: + bakFileName=win32api.GetTempPath()+'\\'+os.path.splitext(basename)[0]+'.bak' + elif self.bakFileType==BAK_DOT_BAK_BAK_DIR: + tempPath=os.path.join(win32api.GetTempPath(),'bak') + try: + os.mkdir(tempPath,0) + except os.error: + pass + bakFileName=os.path.join(tempPath,basename) + try: + os.unlink(bakFileName) # raise NameError if no bakups wanted. + except (os.error, NameError): + pass + try: + # Do a copy as it might be on different volumes, + # and the file may be a hard-link, causing the link + # to follow the backup. + shutil.copy2(fileName, bakFileName) + except (os.error, NameError, IOError): + pass + try: + self.SaveFile(fileName) + except IOError as details: + win32ui.MessageBox("Error - could not save file\r\n\r\n%s"%details) + return 0 + except (UnicodeEncodeError, LookupError) as details: + rc = win32ui.MessageBox("Encoding failed: \r\n%s"%details + + '\r\nPlease add desired source encoding as first line of file, eg \r\n' + + '# -*- coding: mbcs -*-\r\n\r\n' + + 'If you continue, the file will be saved as binary and will\r\n' + + 'not be valid in the declared encoding.\r\n\r\n' + + 'Save the file as binary with an invalid encoding?', + "File save failed", + win32con.MB_YESNO | win32con.MB_DEFBUTTON2) + if rc==win32con.IDYES: + try: + self.SaveFile(fileName, encoding="latin-1") + except IOError as details: + win32ui.MessageBox("Error - could not save file\r\n\r\n%s"%details) + return 0 + else: + return 0 + self.SetModifiedFlag(0) # No longer dirty + self.bDeclinedReload = 0 # They probably want to know if it changes again! + win32ui.AddToRecentFileList(fileName) + self.SetPathName(fileName) + win32ui.SetStatusText("Ready") + self._DocumentStateChanged() + return 1 + + def FinalizeViewCreation(self, view): + ParentEditorDocument.FinalizeViewCreation(self, view) + if view == self.GetFirstView(): + self._DocumentStateChanged() + if view.bFolding and GetEditorOption("Fold On Open", 0): + view.FoldTopLevelEvent() + + def HookViewNotifications(self, view): + ParentEditorDocument.HookViewNotifications(self, view) + + # Support for reloading the document from disk - presumably after some + # external application has modified it (or possibly source control has + # checked it out. + def ReloadDocument(self): + """Reloads the document from disk. Assumes the file has + been saved and user has been asked if necessary - it just does it! + """ + win32ui.SetStatusText("Reloading document. Please wait...", 1) + self.SetModifiedFlag(0) + # Loop over all views, saving their state, then reload the document + views = self.GetAllViews() + states = [] + for view in views: + try: + info = view._PrepareUserStateChange() + except AttributeError: # Not our editor view? + info = None + states.append(info) + self.OnOpenDocument(self.GetPathName()) + for view, info in zip(views, states): + if info is not None: + view._EndUserStateChange(info) + self._DocumentStateChanged() + win32ui.SetStatusText("Document reloaded.") + + # Reloading the file + def CheckExternalDocumentUpdated(self): + if self.bDeclinedReload or not self.GetPathName(): + return + try: + newstat = os.stat(self.GetPathName()) + except os.error as exc: + if not self.bReportedFileNotFound: + print("The file '%s' is open for editing, but\nchecking it for changes caused the error: %s" % (self.GetPathName(), exc.strerror)) + self.bReportedFileNotFound = 1 + return + if self.bReportedFileNotFound: + print("The file '%s' has re-appeared - continuing to watch for changes..." % (self.GetPathName(),)) + self.bReportedFileNotFound = 0 # Once found again we want to start complaining. + changed = (self.fileStat is None) or \ + self.fileStat[0] != newstat[0] or \ + self.fileStat[6] != newstat[6] or \ + self.fileStat[8] != newstat[8] or \ + self.fileStat[9] != newstat[9] + if changed: + question = None + if self.IsModified(): + question = "%s\r\n\r\nThis file has been modified outside of the source editor.\r\nDo you want to reload it and LOSE THE CHANGES in the source editor?" % self.GetPathName() + mbStyle = win32con.MB_YESNO | win32con.MB_DEFBUTTON2 # Default to "No" + else: + if not self.bAutoReload: + question = "%s\r\n\r\nThis file has been modified outside of the source editor.\r\nDo you want to reload it?" % self.GetPathName() + mbStyle = win32con.MB_YESNO # Default to "Yes" + if question: + rc = win32ui.MessageBox(question, None, mbStyle) + if rc!=win32con.IDYES: + self.bDeclinedReload = 1 + return + self.ReloadDocument() + + def _DocumentStateChanged(self): + """Called whenever the documents state (on disk etc) has been changed + by the editor (eg, as the result of a save operation) + """ + if self.GetPathName(): + try: + self.fileStat = os.stat(self.GetPathName()) + except os.error: + self.fileStat = None + else: + self.fileStat = None + self.watcherThread._DocumentStateChanged() + self._UpdateUIForState() + self._ApplyOptionalToViews("_UpdateUIForState") + self._ApplyOptionalToViews("SetReadOnly", self._IsReadOnly()) + self._ApplyOptionalToViews("SCISetSavePoint") + # Allow the debugger to reset us too. + import pywin.debugger + if pywin.debugger.currentDebugger is not None: + pywin.debugger.currentDebugger.UpdateDocumentLineStates(self) + + # Read-only document support - make it obvious to the user + # that the file is read-only. + def _IsReadOnly(self): + return self.fileStat is not None and (self.fileStat[0] & 128)==0 + + def _UpdateUIForState(self): + """Change the title to reflect the state of the document - + eg ReadOnly, Dirty, etc + """ + filename = self.GetPathName() + if not filename: return # New file - nothing to do + try: + # This seems necessary so the internal state of the window becomes + # "visible". without it, it is still shown, but certain functions + # (such as updating the title) dont immediately work? + self.GetFirstView().ShowWindow(win32con.SW_SHOW) + title = win32ui.GetFileTitle(filename) + except win32ui.error: + title = filename + if self._IsReadOnly(): + title = title + " (read-only)" + self.SetTitle(title) + + def MakeDocumentWritable(self): + pretend_ss = 0 # Set to 1 to test this without source safe :-) + if not self.scModuleName and not pretend_ss: # No Source Control support. + win32ui.SetStatusText("Document is read-only, and no source-control system is configured") + win32api.MessageBeep() + return 0 + + # We have source control support - check if the user wants to use it. + msg = "Would you like to check this file out?" + defButton = win32con.MB_YESNO + if self.IsModified(): + msg = msg + "\r\n\r\nALL CHANGES IN THE EDITOR WILL BE LOST" + defButton = win32con.MB_YESNO + if win32ui.MessageBox(msg, None, defButton)!=win32con.IDYES: + return 0 + + if pretend_ss: + print("We are only pretending to check it out!") + win32api.SetFileAttributes(self.GetPathName(), win32con.FILE_ATTRIBUTE_NORMAL) + self.ReloadDocument() + return 1 + + # Now call on the module to do it. + if self.scModule is None: + try: + self.scModule = __import__(self.scModuleName) + for part in self.scModuleName.split('.')[1:]: + self.scModule = getattr(self.scModule, part) + except: + traceback.print_exc() + print("Error loading source control module.") + return 0 + + if self.scModule.CheckoutFile(self.GetPathName()): + self.ReloadDocument() + return 1 + return 0 + + def CheckMakeDocumentWritable(self): + if self._IsReadOnly(): + return self.MakeDocumentWritable() + return 1 + + def SaveModified(self): + # Called as the document is closed. If we are about + # to prompt for a save, bring the document to the foreground. + if self.IsModified(): + frame = self.GetFirstView().GetParentFrame() + try: + frame.MDIActivate() + frame.AutoRestore() + except: + print("Could not bring document to foreground") + return self._obj_.SaveModified() + +# NOTE - I DONT use the standard threading module, +# as this waits for all threads to terminate at shutdown. +# When using the debugger, it is possible shutdown will +# occur without Pythonwin getting a complete shutdown, +# so we deadlock at the end - threading is waiting for +import pywin.mfc.thread +import win32event +class FileWatchingThread(pywin.mfc.thread.WinThread): + def __init__(self, doc): + self.doc = doc + self.adminEvent = win32event.CreateEvent(None, 0, 0, None) + self.stopEvent = win32event.CreateEvent(None, 0, 0, None) + self.watchEvent = None + pywin.mfc.thread.WinThread.__init__(self) + + def _DocumentStateChanged(self): + win32event.SetEvent(self.adminEvent) + + def RefreshEvent(self): + self.hwnd = self.doc.GetFirstView().GetSafeHwnd() + if self.watchEvent is not None: + win32api.FindCloseChangeNotification(self.watchEvent) + self.watchEvent = None + path = self.doc.GetPathName() + if path: path = os.path.dirname(path) + if path: + filter = win32con.FILE_NOTIFY_CHANGE_FILE_NAME | \ + win32con.FILE_NOTIFY_CHANGE_ATTRIBUTES | \ + win32con.FILE_NOTIFY_CHANGE_LAST_WRITE + try: + self.watchEvent = win32api.FindFirstChangeNotification(path, 0, filter) + except win32api.error as exc: + print("Can not watch file", path, "for changes -", exc.strerror) + def SignalStop(self): + win32event.SetEvent(self.stopEvent) + def Run(self): + while 1: + handles = [self.stopEvent, self.adminEvent] + if self.watchEvent is not None: + handles.append(self.watchEvent) + rc = win32event.WaitForMultipleObjects(handles, 0, win32event.INFINITE) + if rc == win32event.WAIT_OBJECT_0: + break + elif rc == win32event.WAIT_OBJECT_0+1: + self.RefreshEvent() + else: + win32api.PostMessage(self.hwnd, MSG_CHECK_EXTERNAL_FILE, 0, 0) + try: + # If the directory has been removed underneath us, we get this error. + win32api.FindNextChangeNotification(self.watchEvent) + except win32api.error as exc: + print("Can not watch file", self.doc.GetPathName(), "for changes -", exc.strerror) + break + + # close a circular reference + self.doc = None + if self.watchEvent: + win32api.FindCloseChangeNotification(self.watchEvent) diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/editor/editor.py b/venv/Lib/site-packages/pythonwin/pywin/framework/editor/editor.py new file mode 100644 index 00000000..9547a784 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/framework/editor/editor.py @@ -0,0 +1,465 @@ +##################################################################### +# +# editor.py +# +# A general purpose text editor, built on top of the win32ui edit +# type, which is built on an MFC CEditView +# +# +# We now support reloading of externally modified documented +# (eg, presumably by some other process, such as source control or +# another editor. +# We also suport auto-loading of externally modified files. +# - if the current document has not been modified in this +# editor, but has been modified on disk, then the file +# can be automatically reloaded. +# +# Note that it will _always_ prompt you if the file in the editor has been modified. + + +import win32ui +import win32api +import win32con +import regex +import re +import string +import sys, os +import traceback +from pywin.mfc import docview, dialog, afxres + +from pywin.framework.editor import GetEditorOption, SetEditorOption, GetEditorFontOption, SetEditorFontOption, defaultCharacterFormat + +patImport=regex.symcomp('import \(.*\)') +patIndent=regex.compile('^\\([ \t]*[~ \t]\\)') + +ID_LOCATE_FILE = 0xe200 +ID_GOTO_LINE = 0xe2001 +MSG_CHECK_EXTERNAL_FILE = win32con.WM_USER+1999 ## WARNING: Duplicated in document.py and coloreditor.py + +# Key Codes that modify the bufffer when Ctrl or Alt are NOT pressed. +MODIFYING_VK_KEYS = [win32con.VK_BACK, win32con.VK_TAB, win32con.VK_RETURN, win32con.VK_SPACE, win32con.VK_DELETE] +for k in range(48, 91): + MODIFYING_VK_KEYS.append(k) + +# Key Codes that modify the bufffer when Ctrl is pressed. +MODIFYING_VK_KEYS_CTRL = [win32con.VK_BACK, win32con.VK_RETURN, win32con.VK_SPACE, win32con.VK_DELETE] + +# Key Codes that modify the bufffer when Alt is pressed. +MODIFYING_VK_KEYS_ALT = [win32con.VK_BACK, win32con.VK_RETURN, win32con.VK_SPACE, win32con.VK_DELETE] + + +# The editor itself starts here. +# Using the MFC Document/View model, we have an EditorDocument, which is responsible for +# managing the contents of the file, and a view which is responsible for rendering it. +# +# Due to a limitation in the Windows edit controls, we are limited to one view +# per document, although nothing in this code assumes this (I hope!) + +isRichText=1 # We are using the Rich Text control. This has not been tested with value "0" for quite some time! + +#ParentEditorDocument=docview.Document +from .document import EditorDocumentBase +ParentEditorDocument=EditorDocumentBase +class EditorDocument(ParentEditorDocument): + # + # File loading and saving operations + # + def OnOpenDocument(self, filename): + # + # handle Unix and PC text file format. + # + + # Get the "long name" of the file name, as it may have been translated + # to short names by the shell. + self.SetPathName(filename) # Must set this early! + # Now do the work! + self.BeginWaitCursor() + win32ui.SetStatusText("Loading file...",1) + try: + f = open(filename,"rb") + except IOError: + win32ui.MessageBox(filename + '\nCan not find this file\nPlease verify that the correct path and file name are given') + self.EndWaitCursor() + return 0 + raw=f.read() + f.close() + contents = self.TranslateLoadedData(raw) + rc = 0 + if win32ui.IsWin32s() and len(contents)>62000: # give or take a few bytes + win32ui.MessageBox("This file is too big for Python on Windows 3.1\r\nPlease use another editor to view this file.") + else: + try: + self.GetFirstView().SetWindowText(contents) + rc = 1 + except TypeError: # Null byte in file. + win32ui.MessageBox("This file contains NULL bytes, and can not be edited") + rc = 0 + + self.EndWaitCursor() + self.SetModifiedFlag(0) # No longer dirty + self._DocumentStateChanged() + return rc + + def TranslateLoadedData(self, data): + """Given raw data read from a file, massage it suitable for the edit window""" + # if a CR in the first 250 chars, then perform the expensive translate + if data[:250].find('\r')==-1: + win32ui.SetStatusText("Translating from Unix file format - please wait...",1) + return re.sub('\r*\n','\r\n',data) + else: + return data + + def SaveFile(self, fileName, encoding=None): + if isRichText: + view = self.GetFirstView() + view.SaveTextFile(fileName, encoding=encoding) + else: # Old style edit view window. + self.GetFirstView().SaveFile(fileName) + try: + # Make sure line cache has updated info about me! + import linecache + linecache.checkcache() + except: + pass + + # + # Color state stuff + # + def SetAllLineColors(self, color = None): + for view in self.GetAllViews(): + view.SetAllLineColors(color) + + def SetLineColor(self, lineNo, color): + "Color a line of all views" + for view in self.GetAllViews(): + view.SetLineColor(lineNo, color) + + +# def StreamTextOut(self, data): ### This seems unreliable??? +# self.saveFileHandle.write(data) +# return 1 # keep em coming! + +#ParentEditorView=docview.EditView +ParentEditorView=docview.RichEditView +class EditorView(ParentEditorView): + def __init__(self, doc): + ParentEditorView.__init__(self, doc) + if isRichText: + self.SetWordWrap(win32ui.CRichEditView_WrapNone) + + self.addToMRU = 1 + self.HookHandlers() + self.bCheckingFile = 0 + + self.defCharFormat = GetEditorFontOption("Default Font", defaultCharacterFormat) + + # Smart tabs override everything else if context can be worked out. + self.bSmartTabs = GetEditorOption("Smart Tabs", 1) + + self.tabSize = GetEditorOption("Tab Size", 8) + self.indentSize = GetEditorOption("Indent Size", 8) + # If next indent is at a tab position, and useTabs is set, a tab will be inserted. + self.bUseTabs = GetEditorOption("Use Tabs", 1) + + def OnInitialUpdate(self): + rc = self._obj_.OnInitialUpdate() + self.SetDefaultCharFormat(self.defCharFormat) + return rc + + def CutCurLine(self): + curLine = self._obj_.LineFromChar() + nextLine = curLine+1 + start = self._obj_.LineIndex(curLine) + end = self._obj_.LineIndex(nextLine) + if end==0: # must be last line. + end = start + self.end.GetLineLength(curLine) + self._obj_.SetSel(start,end) + self._obj_.Cut() + def _PrepareUserStateChange(self): + "Return selection, lineindex, etc info, so it can be restored" + self.SetRedraw(0) + return self.GetModify(), self.GetSel(), self.GetFirstVisibleLine() + def _EndUserStateChange(self, info): + scrollOff = info[2] - self.GetFirstVisibleLine() + if scrollOff: + self.LineScroll(scrollOff) + self.SetSel(info[1]) + self.SetModify(info[0]) + self.SetRedraw(1) + self.InvalidateRect() + self.UpdateWindow() + + def _UpdateUIForState(self): + self.SetReadOnly(self.GetDocument()._IsReadOnly()) + + def SetAllLineColors(self, color = None): + if isRichText: + info = self._PrepareUserStateChange() + try: + if color is None: color = self.defCharFormat[4] + self.SetSel(0,-1) + self.SetSelectionCharFormat((win32con.CFM_COLOR, 0,0,0,color)) + finally: + self._EndUserStateChange(info) + + def SetLineColor(self, lineNo, color): + "lineNo is the 1 based line number to set. If color is None, default color is used." + if isRichText: + info = self._PrepareUserStateChange() + try: + if color is None: color = self.defCharFormat[4] + lineNo = lineNo-1 + startIndex = self.LineIndex(lineNo) + if startIndex!=-1: + self.SetSel(startIndex, self.LineIndex(lineNo+1)) + self.SetSelectionCharFormat((win32con.CFM_COLOR, 0,0,0,color)) + finally: + self._EndUserStateChange(info) + + def Indent(self): + """Insert an indent to move the cursor to the next tab position. + + Honors the tab size and 'use tabs' settings. Assumes the cursor is already at the + position to be indented, and the selection is a single character (ie, not a block) + """ + start, end = self._obj_.GetSel() + startLine = self._obj_.LineFromChar(start) + line = self._obj_.GetLine(startLine) + realCol = start - self._obj_.LineIndex(startLine) + # Calulate the next tab stop. + # Expand existing tabs. + curCol = 0 + for ch in line[:realCol]: + if ch=='\t': + curCol = ((curCol / self.tabSize) + 1) * self.tabSize + else: + curCol = curCol + 1 + nextColumn = ((curCol / self.indentSize) + 1) * self.indentSize +# print "curCol is", curCol, "nextColumn is", nextColumn + ins = None + if self.bSmartTabs: + # Look for some context. + if realCol==0: # Start of the line - see if the line above can tell us + lookLine = startLine-1 + while lookLine >= 0: + check = self._obj_.GetLine(lookLine)[0:1] + if check in ['\t', ' ']: + ins = check + break + lookLine = lookLine - 1 + else: # See if the previous char can tell us + check = line[realCol-1] + if check in ['\t', ' ']: + ins = check + + # Either smart tabs off, or not smart enough! + # Use the "old style" settings. + if ins is None: + if self.bUseTabs and nextColumn % self.tabSize==0: + ins = '\t' + else: + ins = ' ' + + if ins == ' ': + # Calc the number of spaces to take us to the next stop + ins = ins * (nextColumn - curCol) + + self._obj_.ReplaceSel(ins) + + + def BlockDent(self, isIndent, startLine, endLine): + " Indent/Undent all lines specified " + if not self.GetDocument().CheckMakeDocumentWritable(): return 0 + tabSize=self.tabSize # hard-code for now! + info = self._PrepareUserStateChange() + try: + for lineNo in range(startLine, endLine): + pos=self._obj_.LineIndex(lineNo) + self._obj_.SetSel(pos, pos) + if isIndent: + self.Indent() + else: + line = self._obj_.GetLine(lineNo) + try: + noToDel = 0 + if line[0]=='\t': + noToDel = 1 + elif line[0]==' ': + for noToDel in range(0,tabSize): + if line[noToDel]!=' ': + break + else: + noToDel=tabSize + if noToDel: + self._obj_.SetSel(pos, pos+noToDel) + self._obj_.Clear() + except IndexError: + pass + finally: + self._EndUserStateChange(info) + self.GetDocument().SetModifiedFlag(1) # Now dirty + self._obj_.SetSel(self.LineIndex(startLine), self.LineIndex(endLine)) + + def GotoLine(self, lineNo = None): + try: + if lineNo is None: + lineNo = int(input("Enter Line Number")) + except (ValueError, KeyboardInterrupt): + return 0 + self.GetLineCount() # Seems to be needed when file first opened??? + charNo = self.LineIndex(lineNo-1) + self.SetSel(charNo) + + def HookHandlers(self): # children can override, but should still call me! +# self.HookAllKeyStrokes(self.OnKey) + self.HookMessage(self.OnCheckExternalDocumentUpdated,MSG_CHECK_EXTERNAL_FILE) + self.HookMessage(self.OnRClick,win32con.WM_RBUTTONDOWN) + self.HookMessage(self.OnSetFocus, win32con.WM_SETFOCUS) + self.HookMessage(self.OnKeyDown, win32con.WM_KEYDOWN) + self.HookKeyStroke(self.OnKeyCtrlY, 25) # ^Y + self.HookKeyStroke(self.OnKeyCtrlG, 7) # ^G + self.HookKeyStroke(self.OnKeyTab, 9) # TAB + self.HookKeyStroke(self.OnKeyEnter, 13) # Enter + self.HookCommand(self.OnCmdLocateFile, ID_LOCATE_FILE) + self.HookCommand(self.OnCmdGotoLine, ID_GOTO_LINE) + self.HookCommand(self.OnEditPaste, afxres.ID_EDIT_PASTE) + self.HookCommand(self.OnEditCut, afxres.ID_EDIT_CUT) + + # Hook Handlers + def OnSetFocus(self,msg): + # Even though we use file change notifications, we should be very sure about it here. + self.OnCheckExternalDocumentUpdated(msg) + + def OnRClick(self,params): + menu = win32ui.CreatePopupMenu() + + # look for a module name + line=self._obj_.GetLine().strip() + flags=win32con.MF_STRING|win32con.MF_ENABLED + if patImport.match(line)==len(line): + menu.AppendMenu(flags, ID_LOCATE_FILE, "&Locate %s.py"%patImport.group('name')) + menu.AppendMenu(win32con.MF_SEPARATOR); + menu.AppendMenu(flags, win32ui.ID_EDIT_UNDO, '&Undo') + menu.AppendMenu(win32con.MF_SEPARATOR); + menu.AppendMenu(flags, win32ui.ID_EDIT_CUT, 'Cu&t') + menu.AppendMenu(flags, win32ui.ID_EDIT_COPY, '&Copy') + menu.AppendMenu(flags, win32ui.ID_EDIT_PASTE, '&Paste') + menu.AppendMenu(flags, win32con.MF_SEPARATOR); + menu.AppendMenu(flags, win32ui.ID_EDIT_SELECT_ALL, '&Select all') + menu.AppendMenu(flags, win32con.MF_SEPARATOR); + menu.AppendMenu(flags, ID_GOTO_LINE, '&Goto line...') + menu.TrackPopupMenu(params[5]) + return 0 + + def OnCmdGotoLine(self, cmd, code): + self.GotoLine() + return 0 + + def OnCmdLocateFile(self, cmd, code): + modName = patImport.group('name') + if not modName: + return 0 + import pywin.framework.scriptutils + fileName = pywin.framework.scriptutils.LocatePythonFile(modName) + if fileName is None: + win32ui.SetStatusText("Can't locate module %s" % modName) + else: + win32ui.GetApp().OpenDocumentFile(fileName) + return 0 + + # Key handlers + def OnKeyEnter(self, key): + if not self.GetDocument().CheckMakeDocumentWritable(): return 0 + curLine = self._obj_.GetLine() + self._obj_.ReplaceSel('\r\n') # insert the newline + # If the current line indicates the next should be indented, + # then copy the current indentation to this line. + res = patIndent.match(curLine,0) + if res>0 and curLine.strip(): + curIndent = patIndent.group(1) + self._obj_.ReplaceSel(curIndent) + return 0 # dont pass on + + def OnKeyCtrlY(self, key): + if not self.GetDocument().CheckMakeDocumentWritable(): return 0 + self.CutCurLine() + return 0 # dont let him have it! + def OnKeyCtrlG(self, key): + self.GotoLine() + return 0 # dont let him have it! + def OnKeyTab(self, key): + if not self.GetDocument().CheckMakeDocumentWritable(): return 0 + start, end = self._obj_.GetSel() + if start==end: # normal TAB key + self.Indent() + return 0 # we handled this. + + # Otherwise it is a block indent/dedent. + if start>end: + start, end = end, start # swap them. + startLine = self._obj_.LineFromChar(start) + endLine = self._obj_.LineFromChar(end) + + self.BlockDent(win32api.GetKeyState(win32con.VK_SHIFT)>=0, startLine, endLine) + return 0 + + + def OnEditPaste(self, id, code): + # Return 1 if we can make the file editable.(or it already is!) + return self.GetDocument().CheckMakeDocumentWritable() + + def OnEditCut(self, id, code): + # Return 1 if we can make the file editable.(or it already is!) + return self.GetDocument().CheckMakeDocumentWritable() + + def OnKeyDown(self, msg): + key = msg[2] + if win32api.GetKeyState(win32con.VK_CONTROL) & 0x8000: + modList = MODIFYING_VK_KEYS_CTRL + elif win32api.GetKeyState(win32con.VK_MENU) & 0x8000: + modList = MODIFYING_VK_KEYS_ALT + else: + modList = MODIFYING_VK_KEYS + + if key in modList: + # Return 1 if we can make the file editable.(or it already is!) + return self.GetDocument().CheckMakeDocumentWritable() + return 1 # Pass it on OK + +# def OnKey(self, key): +# return self.GetDocument().CheckMakeDocumentWritable() + + def OnCheckExternalDocumentUpdated(self, msg): + if self._obj_ is None or self.bCheckingFile: return + self.bCheckingFile = 1 + self.GetDocument().CheckExternalDocumentUpdated() + self.bCheckingFile = 0 + +from .template import EditorTemplateBase +class EditorTemplate(EditorTemplateBase): + def __init__(self, res=win32ui.IDR_TEXTTYPE, makeDoc=None, makeFrame=None, makeView=None): + if makeDoc is None: makeDoc = EditorDocument + if makeView is None: makeView = EditorView + EditorTemplateBase.__init__(self, res, makeDoc, makeFrame, makeView) + + def _CreateDocTemplate(self, resourceId): + return win32ui.CreateRichEditDocTemplate(resourceId) + + def CreateWin32uiDocument(self): + return self.DoCreateRichEditDoc() + +def Create(fileName = None, title=None, template = None): + return editorTemplate.OpenDocumentFile(fileName) + +from pywin.framework.editor import GetDefaultEditorModuleName +prefModule = GetDefaultEditorModuleName() +# Initialize only if this is the "default" editor. +if __name__==prefModule: + # For debugging purposes, when this module may be reloaded many times. + try: + win32ui.GetApp().RemoveDocTemplate(editorTemplate) + except (NameError, win32ui.error): + pass + + editorTemplate = EditorTemplate() + win32ui.GetApp().AddDocTemplate(editorTemplate) diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/editor/frame.py b/venv/Lib/site-packages/pythonwin/pywin/framework/editor/frame.py new file mode 100644 index 00000000..ad6375a4 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/framework/editor/frame.py @@ -0,0 +1,76 @@ +# frame.py - The MDI frame window for an editor. +import pywin.framework.window +import win32ui +import win32con +import afxres + +from . import ModuleBrowser + +class EditorFrame(pywin.framework.window.MDIChildWnd): + def OnCreateClient(self, cp, context): + + # Create the default view as specified by the template (ie, the editor view) + view = context.template.MakeView(context.doc) + # Create the browser view. + browserView = ModuleBrowser.BrowserView(context.doc) + view2 = context.template.MakeView(context.doc) + + splitter = win32ui.CreateSplitter() + style = win32con.WS_CHILD | win32con.WS_VISIBLE + splitter.CreateStatic (self, 1, 2, style, win32ui.AFX_IDW_PANE_FIRST) + sub_splitter = self.sub_splitter = win32ui.CreateSplitter() + sub_splitter.CreateStatic (splitter, 2, 1, style, win32ui.AFX_IDW_PANE_FIRST+1) + + # Note we must add the default view first, so that doc.GetFirstView() returns the editor view. + sub_splitter.CreateView(view, 1, 0, (0,0)) + splitter.CreateView (browserView, 0, 0, (0,0)) + sub_splitter.CreateView(view2,0, 0, (0,0)) + +## print "First view is", context.doc.GetFirstView() +## print "Views are", view, view2, browserView +## print "Parents are", view.GetParent(), view2.GetParent(), browserView.GetParent() +## print "Splitter is", splitter +## print "sub splitter is", sub_splitter + ## Old +## splitter.CreateStatic (self, 1, 2) +## splitter.CreateView(view, 0, 1, (0,0)) # size ignored. +## splitter.CreateView (browserView, 0, 0, (0, 0)) + + # Restrict the size of the browser splitter (and we can avoid filling + # it until it is shown) + splitter.SetColumnInfo(0, 10, 20) + # And the active view is our default view (so it gets initial focus) + self.SetActiveView(view) + + def GetEditorView(self): + # In a multi-view (eg, splitter) environment, get + # an editor (ie, scintilla) view + # Look for the splitter opened the most! + if self.sub_splitter is None: + return self.GetDlgItem(win32ui.AFX_IDW_PANE_FIRST) + v1 = self.sub_splitter.GetPane(0,0) + v2 = self.sub_splitter.GetPane(1,0) + r1 = v1.GetWindowRect() + r2 = v2.GetWindowRect() + if r1[3]-r1[1] > r2[3]-r2[1]: + return v1 + return v2 + + def GetBrowserView(self): + # XXX - should fix this :-) + return self.GetActiveDocument().GetAllViews()[1] + + def OnClose(self): + doc=self.GetActiveDocument() + if not doc.SaveModified(): + ## Cancel button selected from Save dialog, do not actually close + ## print 'close cancelled' + return 0 + ## So the 'Save' dialog doesn't come up twice + doc._obj_.SetModifiedFlag(False) + + # Must force the module browser to close itself here (OnDestroy for the view itself is too late!) + self.sub_splitter = None # ensure no circles! + self.GetBrowserView().DestroyBrowser() + return self._obj_.OnClose() + \ No newline at end of file diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/editor/template.py b/venv/Lib/site-packages/pythonwin/pywin/framework/editor/template.py new file mode 100644 index 00000000..afa4b0a5 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/framework/editor/template.py @@ -0,0 +1,50 @@ +import string +import win32ui +import win32api +from pywin.mfc import docview +import pywin.framework.window +import os +from . import frame + +ParentEditorTemplate=docview.DocTemplate +class EditorTemplateBase(ParentEditorTemplate): + def __init__(self, res=win32ui.IDR_TEXTTYPE, makeDoc=None, makeFrame=None, makeView=None): + if makeFrame is None: makeFrame = frame.EditorFrame + ParentEditorTemplate.__init__(self, res, makeDoc, makeFrame, makeView) + + def _CreateDocTemplate(self, resourceId): + assert 0, "You must override this" + def CreateWin32uiDocument(self): + assert 0, "You must override this" + def GetFileExtensions(self): + return ".txt", ".py" + def MatchDocType(self, fileName, fileType): + doc = self.FindOpenDocument(fileName) + if doc: return doc + ext = os.path.splitext(fileName)[1].lower() + if ext in self.GetFileExtensions(): + return win32ui.CDocTemplate_Confidence_yesAttemptNative + return win32ui.CDocTemplate_Confidence_maybeAttemptForeign + + def InitialUpdateFrame(self, frame, doc, makeVisible=1): + self._obj_.InitialUpdateFrame(frame, doc, makeVisible) # call default handler. + doc._UpdateUIForState() + + def GetPythonPropertyPages(self): + """Returns a list of property pages + """ + from . import configui + return [configui.EditorPropertyPage(), configui.EditorWhitespacePropertyPage()] + + def OpenDocumentFile(self, filename, bMakeVisible = 1): + if filename is not None: + try: + path = os.path.split(filename)[0] +# print "The editor is translating", `filename`,"to", + filename = win32api.FindFiles(filename)[0][8] + filename = os.path.join(path, filename) +# print `filename` + except (win32api.error, IndexError) as details: + pass +# print "Couldnt get the full filename!", details + return self._obj_.OpenDocumentFile(filename, bMakeVisible) diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/editor/vss.py b/venv/Lib/site-packages/pythonwin/pywin/framework/editor/vss.py new file mode 100644 index 00000000..ccab373e --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/framework/editor/vss.py @@ -0,0 +1,93 @@ +# vss.py -- Source Control using Microsoft VSS. + +# Provides routines for checking files out of VSS. +# +# Uses an INI file very similar to how VB integrates with VSS - even +# as far as using the same name. + +# The file must be named "Mssccprj.scc", and be in the format of +# an INI file. This file may be in a parent directory, in which +# case the project name will be built from what is specified in the +# ini file, plus the path from the INI file to the file itself. +# +# The INI file should have a [Python] section, and a +# Project=Project Name +# and optionally +# Database=?? + + +import win32ui, win32api, win32con, os, string, sys + +import traceback + +g_iniName = "Mssccprj.scc" # Use the same INI name as VB! + +g_sourceSafe = None + +def FindVssProjectInfo(fullfname): + """Looks up the file system for an INI file describing the project. + + Looking up the tree is for ni style packages. + + Returns (projectName, pathToFileName) where pathToFileName contains + the path from the ini file to the actual file. + """ + path, fnameonly = os.path.split(fullfname) + origPath = path + project = "" + retPaths = [fnameonly] + while not project: + iniName = os.path.join(path, g_iniName) + database = win32api.GetProfileVal("Python","Database", "", iniName) + project = win32api.GetProfileVal("Python","Project", "", iniName) + if project: + break; + # No valid INI file in this directory - look up a level. + path, addpath = os.path.split(path) + if not addpath: # Root? + break + retPaths.insert(0, addpath) + if not project: + win32ui.MessageBox("%s\r\n\r\nThis directory is not configured for Python/VSS" % origPath) + return + return project, "/".join(retPaths), database + + +def CheckoutFile(fileName): + global g_sourceSafe + import pythoncom + ok = 0 + # Assumes the fileName has a complete path, + # and that the INI file can be found in that path + # (or a parent path if a ni style package) + try: + import win32com.client, win32com.client.gencache + mod = win32com.client.gencache.EnsureModule('{783CD4E0-9D54-11CF-B8EE-00608CC9A71F}', 0, 5, 0) + if mod is None: + win32ui.MessageBox("VSS does not appear to be installed. The TypeInfo can not be created") + return ok + + rc = FindVssProjectInfo(fileName) + if rc is None: + return + project, vssFname, database = rc + if g_sourceSafe is None: + g_sourceSafe=win32com.client.Dispatch("SourceSafe") + # SS seems a bit wierd. It defaults the arguments as empty strings, but + # then complains when they are used - so we pass "Missing" + if not database: + database = pythoncom.Missing + g_sourceSafe.Open(database, pythoncom.Missing, pythoncom.Missing) + item = g_sourceSafe.VSSItem("$/%s/%s" % (project, vssFname)) + item.Checkout(None, fileName) + ok = 1 + except pythoncom.com_error as exc: + win32ui.MessageBox(exc.strerror, "Error checking out file") + except: + typ, val, tb = sys.exc_info() + traceback.print_exc() + win32ui.MessageBox("%s - %s" % (str(typ), str(val)),"Error checking out file") + tb = None # Cleanup a cycle + return ok + + diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/help.py b/venv/Lib/site-packages/pythonwin/pywin/framework/help.py new file mode 100644 index 00000000..78b82acb --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/framework/help.py @@ -0,0 +1,151 @@ + # help.py - help utilities for PythonWin. +import win32api +import win32con +import win32ui +import string +import sys +import regutil +import string, os + +htmlhelp_handle = None + +html_help_command_translators = { + win32con.HELP_CONTENTS : 1, # HH_DISPLAY_TOC + win32con.HELP_CONTEXT : 15, # HH_HELP_CONTEXT + win32con.HELP_FINDER : 1, # HH_DISPLAY_TOC +} + +def FinalizeHelp(): + global htmlhelp_handle + if htmlhelp_handle is not None: + import win32help + try: + #frame = win32ui.GetMainFrame().GetSafeHwnd() + frame = 0 + win32help.HtmlHelp(frame, None, win32help.HH_UNINITIALIZE, htmlhelp_handle) + except win32help.error: + print("Failed to finalize htmlhelp!") + htmlhelp_handle = None + +def OpenHelpFile(fileName, helpCmd = None, helpArg = None): + "Open a help file, given a full path" + # default help arg. + win32ui.DoWaitCursor(1) + try: + if helpCmd is None: helpCmd = win32con.HELP_CONTENTS + ext = os.path.splitext(fileName)[1].lower() + if ext == ".hlp": + win32api.WinHelp( win32ui.GetMainFrame().GetSafeHwnd(), fileName, helpCmd, helpArg) + # XXX - using the htmlhelp API wreaks havoc with keyboard shortcuts + # so we disable it, forcing ShellExecute, which works fine (but + # doesn't close the help file when Pythonwin is closed. + # Tom Heller also points out http://www.microsoft.com/mind/0499/faq/faq0499.asp, + # which may or may not be related. + elif 0 and ext == ".chm": + import win32help + global htmlhelp_handle + helpCmd = html_help_command_translators.get(helpCmd, helpCmd) + #frame = win32ui.GetMainFrame().GetSafeHwnd() + frame = 0 # Dont want it overlapping ours! + if htmlhelp_handle is None: + htmlhelp_hwnd, htmlhelp_handle = win32help.HtmlHelp(frame, None, win32help.HH_INITIALIZE) + win32help.HtmlHelp(frame, fileName, helpCmd, helpArg) + else: + # Hope that the extension is registered, and we know what to do! + win32api.ShellExecute(0, "open", fileName, None, "", win32con.SW_SHOW) + return fileName + finally: + win32ui.DoWaitCursor(-1) + +def ListAllHelpFiles(): + ret = [] + ret = _ListAllHelpFilesInRoot(win32con.HKEY_LOCAL_MACHINE) + # Ensure we don't get dups. + for item in _ListAllHelpFilesInRoot(win32con.HKEY_CURRENT_USER): + if item not in ret: + ret.append(item) + return ret + +def _ListAllHelpFilesInRoot(root): + """Returns a list of (helpDesc, helpFname) for all registered help files + """ + import regutil + retList = [] + try: + key = win32api.RegOpenKey(root, regutil.BuildDefaultPythonKey() + "\\Help", 0, win32con.KEY_READ) + except win32api.error as exc: + import winerror + if exc.winerror!=winerror.ERROR_FILE_NOT_FOUND: + raise + return retList + try: + keyNo = 0 + while 1: + try: + helpDesc = win32api.RegEnumKey(key, keyNo) + helpFile = win32api.RegQueryValue(key, helpDesc) + retList.append((helpDesc, helpFile)) + keyNo = keyNo + 1 + except win32api.error as exc: + import winerror + if exc.winerror!=winerror.ERROR_NO_MORE_ITEMS: + raise + break + finally: + win32api.RegCloseKey(key) + return retList + +def SelectAndRunHelpFile(): + from pywin.dialogs import list + helpFiles = ListAllHelpFiles() + if len(helpFiles)==1: + # only 1 help file registered - probably ours - no point asking + index = 0 + else: + index = list.SelectFromLists("Select Help file", helpFiles, ["Title"]) + if index is not None: + OpenHelpFile(helpFiles[index][1]) + + +helpIDMap = None + +def SetHelpMenuOtherHelp(mainMenu): + """Modifies the main Help Menu to handle all registered help files. + mainMenu -- The main menu to modify - usually from docTemplate.GetSharedMenu() + """ + + # Load all help files from the registry. + global helpIDMap + if helpIDMap is None: + helpIDMap = {} + cmdID = win32ui.ID_HELP_OTHER + excludeList = ['Main Python Documentation', 'Pythonwin Reference'] + firstList = ListAllHelpFiles() + # We actually want to not only exclude these entries, but + # their help file names (as many entries may share the same name) + excludeFnames = [] + for desc, fname in firstList: + if desc in excludeList: + excludeFnames.append(fname) + + helpDescs = [] + for desc, fname in firstList: + if fname not in excludeFnames: + helpIDMap[cmdID] = (desc, fname) + win32ui.GetMainFrame().HookCommand(HandleHelpOtherCommand, cmdID) + cmdID = cmdID + 1 + + helpMenu = mainMenu.GetSubMenu(mainMenu.GetMenuItemCount()-1) # Help menu always last. + otherHelpMenuPos = 2 # cant search for ID, as sub-menu has no ID. + otherMenu = helpMenu.GetSubMenu(otherHelpMenuPos) + while otherMenu.GetMenuItemCount(): + otherMenu.DeleteMenu(0, win32con.MF_BYPOSITION) + + if helpIDMap: + for id, (desc, fname) in helpIDMap.items(): + otherMenu.AppendMenu(win32con.MF_ENABLED|win32con.MF_STRING,id, desc) + else: + helpMenu.EnableMenuItem(otherHelpMenuPos, win32con.MF_BYPOSITION | win32con.MF_GRAYED) + +def HandleHelpOtherCommand(cmd, code): + OpenHelpFile(helpIDMap[cmd][1]) diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/interact.py b/venv/Lib/site-packages/pythonwin/pywin/framework/interact.py new file mode 100644 index 00000000..8d1e77f3 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/framework/interact.py @@ -0,0 +1,871 @@ + +################################################################## +## +## Interactive Shell Window +## + +import sys, os +import code +import string + +import win32ui +import win32api +import win32clipboard +import win32con +import traceback +import afxres +import array +import __main__ + +import pywin.scintilla.formatter +import pywin.scintilla.control +import pywin.scintilla.IDLEenvironment +import pywin.framework.app + +## sequential after ID_GOTO_LINE defined in editor.py +ID_EDIT_COPY_CODE = 0xe2002 +ID_EDIT_EXEC_CLIPBOARD = 0x2003 + +trace=pywin.scintilla.formatter.trace + +from . import winout + +import re +# from IDLE. +_is_block_opener = re.compile(r":\s*(#.*)?$").search +_is_block_closer = re.compile(r""" + \s* + ( return + | break + | continue + | raise + | pass + ) + \b +""", re.VERBOSE).match + +tracebackHeader = "Traceback (".encode("ascii") + +sectionProfile = "Interactive Window" +valueFormatTitle = "FormatTitle" +valueFormatInput = "FormatInput" +valueFormatOutput = "FormatOutput" +valueFormatOutputError = "FormatOutputError" + +# These are defaults only. Values are read from the registry. +formatTitle = (-536870897, 0, 220, 0, 16711680, 184, 34, 'Arial') +formatInput = (-402653169, 0, 200, 0, 0, 0, 49, 'Courier New') +formatOutput = (-402653169, 0, 200, 0, 8421376, 0, 49, 'Courier New') +formatOutputError = (-402653169, 0, 200, 0, 255, 0, 49, 'Courier New') + +try: + sys.ps1 +except AttributeError: + sys.ps1 = '>>> ' + sys.ps2 = '... ' + +def LoadPreference(preference, default = ""): + return win32ui.GetProfileVal(sectionProfile, preference, default) + +def SavePreference( prefName, prefValue ): + win32ui.WriteProfileVal( sectionProfile, prefName, prefValue ) + +def GetPromptPrefix(line): + ps1=sys.ps1 + if line[:len(ps1)]==ps1: return ps1 + ps2=sys.ps2 + if line[:len(ps2)]==ps2: return ps2 + +############################################################# +# +# Colorizer related code. +# +############################################################# +STYLE_INTERACTIVE_EOL = "Interactive EOL" +STYLE_INTERACTIVE_OUTPUT = "Interactive Output" +STYLE_INTERACTIVE_PROMPT = "Interactive Prompt" +STYLE_INTERACTIVE_BANNER = "Interactive Banner" +STYLE_INTERACTIVE_ERROR = "Interactive Error" +STYLE_INTERACTIVE_ERROR_FINALLINE = "Interactive Error (final line)" + +INTERACTIVE_STYLES = [STYLE_INTERACTIVE_EOL, STYLE_INTERACTIVE_OUTPUT, STYLE_INTERACTIVE_PROMPT, STYLE_INTERACTIVE_BANNER, STYLE_INTERACTIVE_ERROR, STYLE_INTERACTIVE_ERROR_FINALLINE] + +FormatterParent = pywin.scintilla.formatter.PythonSourceFormatter +class InteractiveFormatter(FormatterParent): + def __init__(self, scintilla): + FormatterParent.__init__(self, scintilla) + self.bannerDisplayed = False + + def SetStyles(self): + FormatterParent.SetStyles(self) + Style = pywin.scintilla.formatter.Style + self.RegisterStyle( Style(STYLE_INTERACTIVE_EOL, STYLE_INTERACTIVE_PROMPT ) ) + self.RegisterStyle( Style(STYLE_INTERACTIVE_PROMPT, formatInput ) ) + self.RegisterStyle( Style(STYLE_INTERACTIVE_OUTPUT, formatOutput) ) + self.RegisterStyle( Style(STYLE_INTERACTIVE_BANNER, formatTitle ) ) + self.RegisterStyle( Style(STYLE_INTERACTIVE_ERROR, formatOutputError ) ) + self.RegisterStyle( Style(STYLE_INTERACTIVE_ERROR_FINALLINE, STYLE_INTERACTIVE_ERROR ) ) + + def LoadPreference(self, name, default): + rc = win32ui.GetProfileVal("Format", name, default) + if rc==default: + rc = win32ui.GetProfileVal(sectionProfile, name, default) + return rc + + def ColorizeInteractiveCode(self, cdoc, styleStart, stylePyStart): + lengthDoc = len(cdoc) + if lengthDoc == 0: return + state = styleStart + # As per comments in Colorize(), we work with the raw utf8 + # bytes. To avoid too muych py3k pain, we treat each utf8 byte + # as a latin-1 unicode character - we only use it to compare + # against ascii chars anyway... + chNext = cdoc[0:1].decode('latin-1') + startSeg = 0 + i = 0 + lastState=state # debug only + while i < lengthDoc: + ch = chNext + chNext = cdoc[i+1:i+2].decode('latin-1') + +# trace("ch=%r, i=%d, next=%r, state=%s" % (ch, i, chNext, state)) + if state == STYLE_INTERACTIVE_EOL: + if ch not in '\r\n': + self.ColorSeg(startSeg, i-1, state) + startSeg = i + if ch in [sys.ps1[0], sys.ps2[0]]: + state = STYLE_INTERACTIVE_PROMPT + elif cdoc[i:i+len(tracebackHeader)]==tracebackHeader: + state = STYLE_INTERACTIVE_ERROR + else: + state = STYLE_INTERACTIVE_OUTPUT + elif state == STYLE_INTERACTIVE_PROMPT: + if ch not in sys.ps1 + sys.ps2 + " ": + self.ColorSeg(startSeg, i-1, state) + startSeg = i + if ch in '\r\n': + state = STYLE_INTERACTIVE_EOL + else: + state = stylePyStart # Start coloring Python code. + elif state in [STYLE_INTERACTIVE_OUTPUT]: + if ch in '\r\n': + self.ColorSeg(startSeg, i-1, state) + startSeg = i + state = STYLE_INTERACTIVE_EOL + elif state == STYLE_INTERACTIVE_ERROR: + if ch in '\r\n' and chNext and chNext not in string.whitespace: + # Everything including me + self.ColorSeg(startSeg, i, state) + startSeg = i+1 + state = STYLE_INTERACTIVE_ERROR_FINALLINE + elif i == 0 and ch not in string.whitespace: + # If we are coloring from the start of a line, + # we need this better check for the last line + # Color up to not including me + self.ColorSeg(startSeg, i-1, state) + startSeg = i + state = STYLE_INTERACTIVE_ERROR_FINALLINE + elif state == STYLE_INTERACTIVE_ERROR_FINALLINE: + if ch in '\r\n': + self.ColorSeg(startSeg, i-1, state) + startSeg = i + state = STYLE_INTERACTIVE_EOL + elif state == STYLE_INTERACTIVE_BANNER: + if ch in '\r\n' and (chNext=='' or chNext in ">["): + # Everything including me + self.ColorSeg(startSeg, i-1, state) + startSeg = i + state = STYLE_INTERACTIVE_EOL + else: + # It is a PythonColorizer state - seek past the end of the line + # and ask the Python colorizer to color that. + end = startSeg + while end < lengthDoc and cdoc[end] not in '\r\n'.encode('ascii'): + end = end + 1 + self.ColorizePythonCode( cdoc[:end], startSeg, state) + stylePyStart = self.GetStringStyle(end-1) + if stylePyStart is None: + stylePyStart = pywin.scintilla.formatter.STYLE_DEFAULT + else: + stylePyStart = stylePyStart.name + startSeg =end + i = end - 1 # ready for increment. + chNext = cdoc[end:end+1].decode('latin-1') + state = STYLE_INTERACTIVE_EOL + if lastState != state: + lastState = state + i = i + 1 + # and the rest + if startSeg 1: + # Likely we are being asked to color from the start of the line. + # We find the last formatted character on the previous line. + # If TQString, we continue it. Otherwise, we reset. + look = start -1 + while look and self.scintilla.SCIGetCharAt(look) in '\n\r': + look = look - 1 + if look and look < start-1: # Did we find a char before the \n\r sets? + strstyle = self.GetStringStyle(look) + quote_char = None + if strstyle is not None: + if strstyle.name == pywin.scintilla.formatter.STYLE_TQSSTRING: + quote_char = "'" + elif strstyle.name == pywin.scintilla.formatter.STYLE_TQDSTRING: + quote_char = '"' + if quote_char is not None: + # It is a TQS. If the TQS is not terminated, we + # carry the style through. + if look > 2: + look_str = self.scintilla.SCIGetCharAt(look-2) + self.scintilla.SCIGetCharAt(look-1) + self.scintilla.SCIGetCharAt(look) + if look_str != quote_char * 3: + stylePyStart = strstyle.name + if stylePyStart is None: stylePyStart = pywin.scintilla.formatter.STYLE_DEFAULT + + if start > 0: + stylenum = self.scintilla.SCIGetStyleAt(start - 1) + styleStart = self.GetStyleByNum(stylenum).name + elif self.bannerDisplayed: + styleStart = STYLE_INTERACTIVE_EOL + else: + styleStart = STYLE_INTERACTIVE_BANNER + self.bannerDisplayed = True + self.scintilla.SCIStartStyling(start, 31) + self.style_buffer = array.array("b", (0,)*len(stringVal)) + self.ColorizeInteractiveCode(stringVal, styleStart, stylePyStart) + self.scintilla.SCISetStylingEx(self.style_buffer) + self.style_buffer = None + +############################################################### +# +# This class handles the Python interactive interpreter. +# +# It uses a basic EditWindow, and does all the magic. +# This is triggered by the enter key hander attached by the +# start-up code. It determines if a command is to be executed +# or continued (ie, emit "... ") by snooping around the current +# line, looking for the prompts +# +class PythonwinInteractiveInterpreter(code.InteractiveInterpreter): + def __init__(self, locals = None, globals = None): + if locals is None: locals = __main__.__dict__ + if globals is None: globals = locals + self.globals = globals + code.InteractiveInterpreter.__init__(self, locals) + def showsyntaxerror(self, filename=None): + sys.stderr.write(tracebackHeader.decode('ascii')) # So the color syntaxer recognises it. + code.InteractiveInterpreter.showsyntaxerror(self, filename) + def runcode(self, code): + try: + exec(code, self.globals, self.locals) + except SystemExit: + raise + except: + self.showtraceback() + +class InteractiveCore: + def __init__(self, banner = None): + self.banner = banner +# LoadFontPreferences() + def Init(self): + self.oldStdOut = self.oldStdErr = None + +# self.SetWordWrap(win32ui.CRichEditView_WrapNone) + self.interp = PythonwinInteractiveInterpreter() + + self.OutputGrab() # Release at cleanup. + + if self.GetTextLength()==0: + if self.banner is None: + suffix = "" + if win32ui.debug: suffix = ", debug build" + sys.stderr.write("PythonWin %s on %s%s.\n" % (sys.version, sys.platform, suffix) ) + sys.stderr.write("Portions %s - see 'Help/About PythonWin' for further copyright information.\n" % (win32ui.copyright,) ) + else: + sys.stderr.write(banner) + rcfile = os.environ.get('PYTHONSTARTUP') + if rcfile: + import __main__ + try: + exec(compile(open(rcfile, "rb").read(), rcfile, 'exec', dont_inherit=True), + __main__.__dict__, __main__.__dict__) + except: + sys.stderr.write(">>> \nError executing PYTHONSTARTUP script %r\n" % (rcfile)) + traceback.print_exc(file=sys.stderr) + self.AppendToPrompt([]) + + def SetContext(self, globals, locals, name = "Dbg"): + oldPrompt = sys.ps1 + if globals is None: + # Reset + sys.ps1 = ">>> " + sys.ps2 = "... " + locals = globals = __main__.__dict__ + else: + sys.ps1 = "[%s]>>> " % name + sys.ps2 = "[%s]... " % name + self.interp.locals = locals + self.interp.globals = globals + self.AppendToPrompt([], oldPrompt) + + def GetContext(self): + return self.interp.globals, self.interp.locals + + def DoGetLine(self, line=-1): + if line==-1: line = self.LineFromChar() + line = self.GetLine(line) + while line and line[-1] in ['\r', '\n']: + line = line[:-1] + return line + def AppendToPrompt(self,bufLines, oldPrompt = None): + " Take a command and stick it at the end of the buffer (with python prompts inserted if required)." + self.flush() + lastLineNo = self.GetLineCount()-1 + line = self.DoGetLine(lastLineNo) + if oldPrompt and line==oldPrompt: + self.SetSel(self.GetTextLength()-len(oldPrompt), self.GetTextLength()) + self.ReplaceSel(sys.ps1) + elif (line!=str(sys.ps1)): + if len(line)!=0: self.write('\n') + self.write(sys.ps1) + self.flush() + self.idle.text.mark_set("iomark", "end-1c") + if not bufLines: + return + terms = (["\n" + sys.ps2] * (len(bufLines)-1)) + [''] + for bufLine, term in zip(bufLines, terms): + if bufLine.strip(): + self.write( bufLine + term ) + self.flush() + + def EnsureNoPrompt(self): + # Get ready to write some text NOT at a Python prompt. + self.flush() + lastLineNo = self.GetLineCount()-1 + line = self.DoGetLine(lastLineNo) + if not line or line in [sys.ps1, sys.ps2]: + self.SetSel(self.GetTextLength()-len(line), self.GetTextLength()) + self.ReplaceSel('') + else: + # Just add a new line. + self.write('\n') + + def _GetSubConfigNames(self): + return ["interactive"] # Allow [Keys:Interactive] sections to be specific + + def HookHandlers(self): + # Hook menu command (executed when a menu item with that ID is selected from a menu/toolbar + self.HookCommand(self.OnSelectBlock, win32ui.ID_EDIT_SELECT_BLOCK) + self.HookCommand(self.OnEditCopyCode, ID_EDIT_COPY_CODE) + self.HookCommand(self.OnEditExecClipboard, ID_EDIT_EXEC_CLIPBOARD) + mod = pywin.scintilla.IDLEenvironment.GetIDLEModule("IdleHistory") + if mod is not None: + self.history = mod.History(self.idle.text, "\n" + sys.ps2) + else: + self.history = None + # hack for now for event handling. + + # GetBlockBoundary takes a line number, and will return the + # start and and line numbers of the block, and a flag indicating if the + # block is a Python code block. + # If the line specified has a Python prompt, then the lines are parsed + # backwards and forwards, and the flag is true. + # If the line does not start with a prompt, the block is searched forward + # and backward until a prompt _is_ found, and all lines in between without + # prompts are returned, and the flag is false. + def GetBlockBoundary( self, lineNo ): + line = self.DoGetLine(lineNo) + maxLineNo = self.GetLineCount()-1 + prefix = GetPromptPrefix(line) + if prefix is None: # Non code block + flag = 0 + startLineNo = lineNo + while startLineNo>0: + if GetPromptPrefix(self.DoGetLine(startLineNo-1)) is not None: + break # there _is_ a prompt + startLineNo = startLineNo-1 + endLineNo = lineNo + while endLineNo0 and prefix!=str(sys.ps1): + prefix = GetPromptPrefix(self.DoGetLine(startLineNo-1)) + if prefix is None: + break; # there is no prompt. + startLineNo = startLineNo - 1 + endLineNo = lineNo + while endLineNo= start: + thisLine = self.DoGetLine(end) + promptLen = len(GetPromptPrefix(thisLine)) + retList = [thisLine[promptLen:]] + retList + end = end-1 + return retList + + def OutputGrab(self): +# import win32traceutil; return + self.oldStdOut = sys.stdout + self.oldStdErr = sys.stderr + sys.stdout=self + sys.stderr=self + self.flush() + + def OutputRelease(self): + # a command may have overwritten these - only restore if not. + if self.oldStdOut is not None: + if sys.stdout == self: + sys.stdout=self.oldStdOut + if self.oldStdErr is not None: + if sys.stderr == self: + sys.stderr=self.oldStdErr + self.oldStdOut = None + self.oldStdErr = None + self.flush() + + ################################### + # + # Message/Command/Key Hooks. + # + # Enter key handler + # + def ProcessEnterEvent(self, event ): + #If autocompletion has been triggered, complete and do not process event + if self.SCIAutoCActive(): + self.SCIAutoCComplete() + self.SCICancel() + return + + self.SCICancel() + # First, check for an error message + haveGrabbedOutput = 0 + if self.HandleSpecialLine(): return 0 + + lineNo = self.LineFromChar() + start, end, isCode = self.GetBlockBoundary(lineNo) + # If we are not in a code block just go to the prompt (or create a new one) + if not isCode: + self.AppendToPrompt([]) + win32ui.SetStatusText(win32ui.LoadString(afxres.AFX_IDS_IDLEMESSAGE)) + return + + lines = self.ExtractCommand((start,end)) + + # If we are in a code-block, but it isnt at the end of the buffer + # then copy it to the end ready for editing and subsequent execution + if end!=self.GetLineCount()-1: + win32ui.SetStatusText('Press ENTER to execute command') + self.AppendToPrompt(lines) + self.SetSel(-2) + return + + # If SHIFT held down, we want new code here and now! + bNeedIndent = win32api.GetKeyState(win32con.VK_SHIFT)<0 or win32api.GetKeyState(win32con.VK_CONTROL)<0 + if bNeedIndent: + self.ReplaceSel("\n") + else: + self.SetSel(-2) + self.ReplaceSel("\n") + source = '\n'.join(lines) + while source and source[-1] in '\t ': + source = source[:-1] + self.OutputGrab() # grab the output for the command exec. + try: + if self.interp.runsource(source, ""): # Need more input! + bNeedIndent = 1 + else: + # If the last line isnt empty, append a newline + if self.history is not None: + self.history.history_store(source) + self.AppendToPrompt([]) + win32ui.SetStatusText(win32ui.LoadString(afxres.AFX_IDS_IDLEMESSAGE)) +# win32ui.SetStatusText('Successfully executed statement') + finally: + self.OutputRelease() + if bNeedIndent: + win32ui.SetStatusText('Ready to continue the command') + # Now attempt correct indentation (should use IDLE?) + curLine = self.DoGetLine(lineNo)[len(sys.ps2):] + pos = 0 + indent='' + while len(curLine)>pos and curLine[pos] in string.whitespace: + indent = indent + curLine[pos] + pos = pos + 1 + if _is_block_opener(curLine): + indent = indent + '\t' + elif _is_block_closer(curLine): + indent = indent[:-1] + # use ReplaceSel to ensure it goes at the cursor rather than end of buffer. + self.ReplaceSel(sys.ps2+indent) + return 0 + + # ESC key handler + def ProcessEscEvent(self, event): + # Implement a cancel. + if self.SCIAutoCActive() or self.SCICallTipActive(): + self.SCICancel() + else: + win32ui.SetStatusText('Cancelled.') + self.AppendToPrompt(('',)) + return 0 + + def OnSelectBlock(self,command, code): + lineNo = self.LineFromChar() + start, end, isCode = self.GetBlockBoundary(lineNo) + startIndex = self.LineIndex(start) + endIndex = self.LineIndex(end+1)-2 # skip \r + \n + if endIndex<0: # must be beyond end of buffer + endIndex = -2 # self.Length() + self.SetSel(startIndex,endIndex) + + def OnEditCopyCode(self, command, code): + """ Sanitizes code from interactive window, removing prompts and output, + and inserts it in the clipboard.""" + code=self.GetSelText() + lines=code.splitlines() + out_lines=[] + for line in lines: + if line.startswith(sys.ps1): + line=line[len(sys.ps1):] + out_lines.append(line) + elif line.startswith(sys.ps2): + line=line[len(sys.ps2):] + out_lines.append(line) + out_code=os.linesep.join(out_lines) + win32clipboard.OpenClipboard() + try: + win32clipboard.SetClipboardData(win32clipboard.CF_UNICODETEXT, str(out_code)) + finally: + win32clipboard.CloseClipboard() + + def OnEditExecClipboard(self, command, code): + """ Executes python code directly from the clipboard.""" + win32clipboard.OpenClipboard() + try: + code=win32clipboard.GetClipboardData(win32clipboard.CF_UNICODETEXT) + finally: + win32clipboard.CloseClipboard() + + code=code.replace('\r\n','\n')+'\n' + try: + o=compile(code, '', 'exec') + exec(o, __main__.__dict__) + except: + traceback.print_exc() + + def GetRightMenuItems(self): + # Just override parents + ret = [] + flags = 0 + ret.append((flags, win32ui.ID_EDIT_UNDO, '&Undo')) + ret.append(win32con.MF_SEPARATOR) + ret.append((flags, win32ui.ID_EDIT_CUT, 'Cu&t')) + ret.append((flags, win32ui.ID_EDIT_COPY, '&Copy')) + + start, end=self.GetSel() + if start!=end: + ret.append((flags, ID_EDIT_COPY_CODE, 'Copy code without prompts')) + if win32clipboard.IsClipboardFormatAvailable(win32clipboard.CF_UNICODETEXT): + ret.append((flags, ID_EDIT_EXEC_CLIPBOARD, 'Execute python code from clipboard')) + + ret.append((flags, win32ui.ID_EDIT_PASTE, '&Paste')) + ret.append(win32con.MF_SEPARATOR) + ret.append((flags, win32ui.ID_EDIT_SELECT_ALL, '&Select all')) + ret.append((flags, win32ui.ID_EDIT_SELECT_BLOCK, 'Select &block')) + ret.append((flags, win32ui.ID_VIEW_WHITESPACE, "View &Whitespace")) + return ret + + def MDINextEvent(self, event): + win32ui.GetMainFrame().MDINext(0) + def MDIPrevEvent(self, event): + win32ui.GetMainFrame().MDINext(0) + + def WindowBackEvent(self, event): + parent = self.GetParentFrame() + if parent == win32ui.GetMainFrame(): + # It is docked. + try: + wnd, isactive = parent.MDIGetActive() + wnd.SetFocus() + except win32ui.error: + # No MDI window active! + pass + else: + # Normal Window + try: + lastActive = self.GetParentFrame().lastActive + # If the window is invalid, reset it. + if lastActive is not None and (lastActive._obj_ is None or lastActive.GetSafeHwnd()==0): + lastActive = self.GetParentFrame().lastActive = None + win32ui.SetStatusText("The last active Window has been closed.") + except AttributeError: + print("Can't find the last active window!") + lastActive = None + if lastActive is not None: + lastActive.MDIActivate() + +class InteractiveView(InteractiveCore, winout.WindowOutputView): + def __init__(self, doc): + InteractiveCore.__init__(self) + winout.WindowOutputView.__init__(self, doc) + self.encoding = pywin.default_scintilla_encoding + + def _MakeColorizer(self): + return InteractiveFormatter(self) + def OnInitialUpdate(self): + winout.WindowOutputView.OnInitialUpdate(self) + self.SetWordWrap() + self.Init() + def HookHandlers(self): + winout.WindowOutputView.HookHandlers(self) + InteractiveCore.HookHandlers(self) + +class CInteractivePython(winout.WindowOutput): + def __init__(self, makeDoc = None, makeFrame = None): + self.IsFinalDestroy = 0 + winout.WindowOutput.__init__(self, sectionProfile, sectionProfile, \ + winout.flags.WQ_LINE, 1, None, makeDoc, makeFrame, InteractiveView ) + self.Create() + + def OnViewDestroy(self, view): + if self.IsFinalDestroy: + view.OutputRelease() + winout.WindowOutput.OnViewDestroy(self, view) + + def Close(self): + self.IsFinalDestroy = 1 + winout.WindowOutput.Close(self) + +class InteractiveFrame(winout.WindowOutputFrame): + def __init__(self): + self.lastActive = None + winout.WindowOutputFrame.__init__(self) + + def OnMDIActivate(self, bActive, wndActive, wndDeactive): + if bActive: + self.lastActive = wndDeactive + +###################################################################### +## +## Dockable Window Support +## +###################################################################### +ID_DOCKED_INTERACTIVE_CONTROLBAR = 0xe802 + +DockedInteractiveViewParent = InteractiveView +class DockedInteractiveView(DockedInteractiveViewParent): + def HookHandlers(self): + DockedInteractiveViewParent.HookHandlers(self) + self.HookMessage(self.OnSetFocus, win32con.WM_SETFOCUS) + self.HookMessage(self.OnKillFocus, win32con.WM_KILLFOCUS) + def OnSetFocus(self, msg): + self.GetParentFrame().SetActiveView(self) + return 1 + def OnKillFocus(self, msg): + # If we are losing focus to another in this app, reset the main frame's active view. + hwnd = wparam = msg[2] + try: + wnd = win32ui.CreateWindowFromHandle(hwnd) + reset = wnd.GetTopLevelFrame()==self.GetTopLevelFrame() + except win32ui.error: + reset = 0 # Not my window + if reset: self.GetParentFrame().SetActiveView(None) + return 1 + def OnDestroy(self, msg): + newSize = self.GetWindowPlacement()[4] + pywin.framework.app.SaveWindowSize("Interactive Window", newSize, "docked") + try: + if self.GetParentFrame().GetActiveView==self: + self.GetParentFrame().SetActiveView(None) + except win32ui.error: + pass + try: + if win32ui.GetMainFrame().GetActiveView()==self: + win32ui.GetMainFrame().SetActiveView(None) + except win32ui.error: + pass + return DockedInteractiveViewParent.OnDestroy(self, msg) + +class CDockedInteractivePython(CInteractivePython): + def __init__(self, dockbar): + self.bFirstCreated = 0 + self.dockbar = dockbar + CInteractivePython.__init__(self) + def NeedRecreateWindow(self): + if self.bCreating: + return 0 + try: + frame = win32ui.GetMainFrame() + if frame.closing: + return 0 # Dieing! + except (win32ui.error, AttributeError): + return 0 # The app is dieing! + try: + cb = frame.GetControlBar(ID_DOCKED_INTERACTIVE_CONTROLBAR) + return not cb.IsWindowVisible() + except win32ui.error: + return 1 # Control bar does not exist! + def RecreateWindow(self): + try: + dockbar = win32ui.GetMainFrame().GetControlBar(ID_DOCKED_INTERACTIVE_CONTROLBAR) + win32ui.GetMainFrame().ShowControlBar(dockbar, 1, 1) + except win32ui.error: + CreateDockedInteractiveWindow() + + def Create(self): + self.bCreating = 1 + doc = InteractiveDocument(None, self.DoCreateDoc()) + view = DockedInteractiveView(doc) + defRect = pywin.framework.app.LoadWindowSize("Interactive Window", "docked") + if defRect[2]-defRect[0]==0: + defRect = 0, 0, 500, 200 + style = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_BORDER + id = 1050 # win32ui.AFX_IDW_PANE_FIRST + view.CreateWindow(self.dockbar, id, style, defRect) + view.OnInitialUpdate() + self.bFirstCreated = 1 + + self.currentView = doc.GetFirstView() + self.bCreating = 0 + if self.title: doc.SetTitle(self.title) + +# The factory we pass to the dockable window support. +def InteractiveViewCreator(parent): + global edit + edit = CDockedInteractivePython(parent) + return edit.currentView + +def CreateDockedInteractiveWindow(): + # Later, the DockingBar should be capable of hosting multiple + # children. + from pywin.docking.DockingBar import DockingBar + bar = DockingBar() + creator = InteractiveViewCreator + bar.CreateWindow(win32ui.GetMainFrame(), creator, "Interactive Window", ID_DOCKED_INTERACTIVE_CONTROLBAR) + bar.SetBarStyle( bar.GetBarStyle()|afxres.CBRS_TOOLTIPS|afxres.CBRS_FLYBY|afxres.CBRS_SIZE_DYNAMIC) + bar.EnableDocking(afxres.CBRS_ALIGN_ANY) + win32ui.GetMainFrame().DockControlBar(bar, afxres.AFX_IDW_DOCKBAR_BOTTOM) + +###################################################################### +# +# The public interface to this module. +# +###################################################################### +# No extra functionality now, but maybe later, so +# publicize these names. +InteractiveDocument = winout.WindowOutputDocument + +# We remember our one and only interactive window in the "edit" variable. +edit = None + +def CreateInteractiveWindowUserPreference(makeDoc = None, makeFrame = None): + """Create some sort of interactive window if the user's preference say we should. + """ + bCreate = LoadPreference("Show at startup", 1) + if bCreate: + CreateInteractiveWindow(makeDoc, makeFrame) + +def CreateInteractiveWindow(makeDoc = None, makeFrame = None): + """Create a standard or docked interactive window unconditionally + """ + assert edit is None, "Creating second interactive window!" + bDocking = LoadPreference("Docking", 0) + if bDocking: + CreateDockedInteractiveWindow() + else: + CreateMDIInteractiveWindow(makeDoc, makeFrame) + assert edit is not None, "Created interactive window, but did not set the global!" + edit.currentView.SetFocus() + +def CreateMDIInteractiveWindow(makeDoc = None, makeFrame = None): + """Create a standard (non-docked) interactive window unconditionally + """ + global edit + if makeDoc is None: makeDoc = InteractiveDocument + if makeFrame is None: makeFrame = InteractiveFrame + edit = CInteractivePython(makeDoc=makeDoc,makeFrame=makeFrame) + +def DestroyInteractiveWindow(): + """ Destroy the interactive window. + This is different to Closing the window, + which may automatically re-appear. Once destroyed, it can never be recreated, + and a complete new instance must be created (which the various other helper + functions will then do after making this call + """ + global edit + if edit is not None and edit.currentView is not None: + if edit.currentView.GetParentFrame() == win32ui.GetMainFrame(): + # It is docked - do nothing now (this is only called at shutdown!) + pass + else: + # It is a standard window - call Close on the container. + edit.Close() + edit = None + +def CloseInteractiveWindow(): + """Close the interactive window, allowing it to be re-created on demand. + """ + global edit + if edit is not None and edit.currentView is not None: + if edit.currentView.GetParentFrame() == win32ui.GetMainFrame(): + # It is docked, just hide the dock bar. + frame = win32ui.GetMainFrame() + cb = frame.GetControlBar(ID_DOCKED_INTERACTIVE_CONTROLBAR) + frame.ShowControlBar(cb, 0, 1) + else: + # It is a standard window - destroy the frame/view, allowing the object itself to remain. + edit.currentView.GetParentFrame().DestroyWindow() + +def ToggleInteractiveWindow(): + """If the interactive window is visible, hide it, otherwise show it. + """ + if edit is None: + CreateInteractiveWindow() + else: + if edit.NeedRecreateWindow(): + edit.RecreateWindow() + else: + # Close it, allowing a reopen. + CloseInteractiveWindow() + +def ShowInteractiveWindow(): + """Shows (or creates if necessary) an interactive window""" + if edit is None: + CreateInteractiveWindow() + else: + if edit.NeedRecreateWindow(): + edit.RecreateWindow() + else: + parent = edit.currentView.GetParentFrame() + if parent == win32ui.GetMainFrame(): # It is docked. + edit.currentView.SetFocus() + else: # It is a "normal" window + edit.currentView.GetParentFrame().AutoRestore() + win32ui.GetMainFrame().MDIActivate(edit.currentView.GetParentFrame()) + +def IsInteractiveWindowVisible(): + return edit is not None and not edit.NeedRecreateWindow() diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/intpyapp.py b/venv/Lib/site-packages/pythonwin/pywin/framework/intpyapp.py new file mode 100644 index 00000000..72d86cf4 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/framework/intpyapp.py @@ -0,0 +1,477 @@ +# intpyapp.py - Interactive Python application class +# +import win32con +import win32api +import win32ui +import __main__ +import sys +import os +from . import app +import traceback +from pywin.mfc import afxres, dialog +import commctrl +from . import dbgcommands + +lastLocateFileName = ".py" # used in the "File/Locate" dialog... + +# todo - _SetupSharedMenu should be moved to a framework class. +def _SetupSharedMenu_(self): + sharedMenu = self.GetSharedMenu() + from pywin.framework import toolmenu + toolmenu.SetToolsMenu(sharedMenu) + from pywin.framework import help + help.SetHelpMenuOtherHelp(sharedMenu) +from pywin.mfc import docview +docview.DocTemplate._SetupSharedMenu_=_SetupSharedMenu_ + +class MainFrame(app.MainFrame): + def OnCreate(self, createStruct): + self.closing = 0 + if app.MainFrame.OnCreate(self, createStruct)==-1: + return -1 + style = win32con.WS_CHILD | afxres.CBRS_SIZE_DYNAMIC | afxres.CBRS_TOP | afxres.CBRS_TOOLTIPS | afxres.CBRS_FLYBY + + self.EnableDocking(afxres.CBRS_ALIGN_ANY) + + tb = win32ui.CreateToolBar (self, style | win32con.WS_VISIBLE) + tb.ModifyStyle(0, commctrl.TBSTYLE_FLAT) + tb.LoadToolBar(win32ui.IDR_MAINFRAME) + tb.EnableDocking(afxres.CBRS_ALIGN_ANY) + tb.SetWindowText("Standard") + self.DockControlBar(tb) + # Any other packages which use toolbars + from pywin.debugger.debugger import PrepareControlBars + PrepareControlBars(self) + # Note "interact" also uses dockable windows, but they already happen + + # And a "Tools" menu on the main frame. + menu = self.GetMenu() + from . import toolmenu + toolmenu.SetToolsMenu(menu, 2) + # And fix the "Help" menu on the main frame + from pywin.framework import help + help.SetHelpMenuOtherHelp(menu) + + def OnClose(self): + try: + import pywin.debugger + if pywin.debugger.currentDebugger is not None and pywin.debugger.currentDebugger.pumping: + try: + pywin.debugger.currentDebugger.close(1) + except: + traceback.print_exc() + return + except win32ui.error: + pass + self.closing = 1 + self.SaveBarState("ToolbarDefault") + self.SetActiveView(None) # Otherwise MFC's OnClose may _not_ prompt for save. + + from pywin.framework import help + help.FinalizeHelp() + + self.DestroyControlBar(afxres.AFX_IDW_TOOLBAR) + self.DestroyControlBar(win32ui.ID_VIEW_TOOLBAR_DBG) + + return self._obj_.OnClose() + + def DestroyControlBar(self, id): + try: + bar = self.GetControlBar(id) + except win32ui.error: + return + bar.DestroyWindow() + + def OnCommand(self, wparam, lparam): + # By default, the current MDI child frame will process WM_COMMAND + # messages before any docked control bars - even if the control bar + # has focus. This is a problem for the interactive window when docked. + # Therefore, we detect the situation of a view having the main frame + # as its parent, and assume it must be a docked view (which it will in an MDI app) + try: + v = self.GetActiveView() # Raise an exception if none - good - then we want default handling + # Main frame _does_ have a current view (ie, a docking view) - see if it wants it. + if v.OnCommand(wparam, lparam): + return 1 + except (win32ui.error, AttributeError): + pass + return self._obj_.OnCommand(wparam, lparam) + +class InteractivePythonApp(app.CApp): + # This works if necessary - just we dont need to override the Run method. +# def Run(self): +# return self._obj_.Run() + + def HookCommands(self): + app.CApp.HookCommands(self) + dbgcommands.DebuggerCommandHandler().HookCommands() + self.HookCommand(self.OnViewBrowse,win32ui.ID_VIEW_BROWSE) + self.HookCommand(self.OnFileImport,win32ui.ID_FILE_IMPORT) + self.HookCommand(self.OnFileCheck,win32ui.ID_FILE_CHECK) + self.HookCommandUpdate(self.OnUpdateFileCheck, win32ui.ID_FILE_CHECK) + self.HookCommand(self.OnFileRun,win32ui.ID_FILE_RUN) + self.HookCommand(self.OnFileLocate,win32ui.ID_FILE_LOCATE) + self.HookCommand(self.OnInteractiveWindow, win32ui.ID_VIEW_INTERACTIVE) + self.HookCommandUpdate(self.OnUpdateInteractiveWindow, win32ui.ID_VIEW_INTERACTIVE) + self.HookCommand(self.OnViewOptions, win32ui.ID_VIEW_OPTIONS) + self.HookCommand(self.OnHelpIndex, afxres.ID_HELP_INDEX) + self.HookCommand(self.OnFileSaveAll, win32ui.ID_FILE_SAVE_ALL) + self.HookCommand(self.OnViewToolbarDbg, win32ui.ID_VIEW_TOOLBAR_DBG) + self.HookCommandUpdate(self.OnUpdateViewToolbarDbg, win32ui.ID_VIEW_TOOLBAR_DBG) + + def CreateMainFrame(self): + return MainFrame() + + def MakeExistingDDEConnection(self): + # Use DDE to connect to an existing instance + # Return None if no existing instance + try: + from . import intpydde + except ImportError: + # No dde support! + return None + conv = intpydde.CreateConversation(self.ddeServer) + try: + conv.ConnectTo("Pythonwin", "System") + return conv + except intpydde.error: + return None + + def InitDDE(self): + # Do all the magic DDE handling. + # Returns TRUE if we have pumped the arguments to our + # remote DDE app, and we should terminate. + try: + from . import intpydde + except ImportError: + self.ddeServer = None + intpydde = None + if intpydde is not None: + self.ddeServer = intpydde.DDEServer(self) + self.ddeServer.Create("Pythonwin", intpydde.CBF_FAIL_SELFCONNECTIONS ) + try: + # If there is an existing instance, pump the arguments to it. + connection = self.MakeExistingDDEConnection() + if connection is not None: + connection.Exec("self.Activate()") + if self.ProcessArgs(sys.argv, connection) is None: + return 1 + except: + # It is too early to 'print' an exception - we + # don't have stdout setup yet! + win32ui.DisplayTraceback(sys.exc_info(), " - error in DDE conversation with Pythonwin") + return 1 + + def InitInstance(self): + # Allow "/nodde" and "/new" to optimize this! + if ("/nodde" not in sys.argv and "/new" not in sys.argv + and "-nodde" not in sys.argv and "-new" not in sys.argv): + if self.InitDDE(): + return 1 # A remote DDE client is doing it for us! + else: + self.ddeServer = None + + win32ui.SetRegistryKey("Python %s" % (sys.winver,)) # MFC automatically puts the main frame caption on! + app.CApp.InitInstance(self) + + # Create the taskbar icon + win32ui.CreateDebuggerThread() + + # Allow Pythonwin to host OCX controls. + win32ui.EnableControlContainer() + + # Display the interactive window if the user wants it. + from . import interact + interact.CreateInteractiveWindowUserPreference() + + # Load the modules we use internally. + self.LoadSystemModules() + + # Load additional module the user may want. + self.LoadUserModules() + + # Load the ToolBar state near the end of the init process, as + # there may be Toolbar IDs created by the user or other modules. + # By now all these modules should be loaded, so all the toolbar IDs loaded. + try: + self.frame.LoadBarState("ToolbarDefault") + except win32ui.error: + # MFC sucks. It does essentially "GetDlgItem(x)->Something", so if the + # toolbar with ID x does not exist, MFC crashes! Pythonwin has a trap for this + # but I need to investigate more how to prevent it (AFAIK, ensuring all the + # toolbars are created by now _should_ stop it!) + pass + + # Finally process the command line arguments. + try: + self.ProcessArgs(sys.argv) + except: + # too early for printing anything. + win32ui.DisplayTraceback(sys.exc_info(), " - error processing command line args") + + def ExitInstance(self): + win32ui.DestroyDebuggerThread() + try: + from . import interact + interact.DestroyInteractiveWindow() + except: + pass + if self.ddeServer is not None: + self.ddeServer.Shutdown() + self.ddeServer = None + return app.CApp.ExitInstance(self) + + def Activate(self): + # Bring to the foreground. Mainly used when another app starts up, it asks + # this one to activate itself, then it terminates. + frame = win32ui.GetMainFrame() + frame.SetForegroundWindow() + if frame.GetWindowPlacement()[1]==win32con.SW_SHOWMINIMIZED: + frame.ShowWindow(win32con.SW_RESTORE) + + def ProcessArgs(self, args, dde = None): + # If we are going to talk to a remote app via DDE, then + # activate it! + if len(args)<1 or not args[0]: # argv[0]=='' when started without args, just like Python.exe! + return + + i = 0 + while i < len(args): + argType = args[i] + i += 1 + if argType.startswith('-'): + # Support dash options. Slash options are misinterpreted by python init + # as path and not finding usually 'C:\\' ends up in sys.path[0] + argType = '/' + argType[1:] + if not argType.startswith('/'): + argType = win32ui.GetProfileVal("Python","Default Arg Type","/edit").lower() + i -= 1 # arg is /edit's parameter + par = i < len(args) and args[i] or 'MISSING' + if argType in ['/nodde', '/new', '-nodde', '-new']: + # Already handled + pass + elif argType.startswith('/goto:'): + gotoline = int(argType[len('/goto:'):]) + if dde: + dde.Exec("from pywin.framework import scriptutils\n" + "ed = scriptutils.GetActiveEditControl()\n" + "if ed: ed.SetSel(ed.LineIndex(%s - 1))" % gotoline) + else: + from . import scriptutils + ed = scriptutils.GetActiveEditControl() + if ed: ed.SetSel(ed.LineIndex(gotoline - 1)) + elif argType == "/edit": + # Load up the default application. + i += 1 + fname = win32api.GetFullPathName(par) + if not os.path.isfile(fname): + # if we don't catch this, OpenDocumentFile() (actually + # PyCDocument.SetPathName() in + # pywin.scintilla.document.CScintillaDocument.OnOpenDocument) + # segfaults Pythonwin on recent PY3 builds (b228) + win32ui.MessageBox( + "No such file: %s\n\nCommand Line: %s" % ( + fname, win32api.GetCommandLine()), + "Open file for edit", win32con.MB_ICONERROR) + continue + if dde: + dde.Exec("win32ui.GetApp().OpenDocumentFile(%s)" % (repr(fname))) + else: + win32ui.GetApp().OpenDocumentFile(par) + elif argType=="/rundlg": + if dde: + dde.Exec("from pywin.framework import scriptutils;scriptutils.RunScript(%r, %r, 1)" % (par, ' '.join(args[i + 1:]))) + else: + from . import scriptutils + scriptutils.RunScript(par, ' '.join(args[i + 1:])) + return + elif argType=="/run": + if dde: + dde.Exec("from pywin.framework import scriptutils;scriptutils.RunScript(%r, %r, 0)" % (par, ' '.join(args[i + 1:]))) + else: + from . import scriptutils + scriptutils.RunScript(par, ' '.join(args[i + 1:]), 0) + return + elif argType=="/app": + raise RuntimeError("/app only supported for new instances of Pythonwin.exe") + elif argType=='/dde': # Send arbitary command + if dde is not None: + dde.Exec(par) + else: + win32ui.MessageBox("The /dde command can only be used\r\nwhen Pythonwin is already running") + i += 1 + else: + raise ValueError("Command line argument not recognised: %s" % argType) + + + def LoadSystemModules(self): + self.DoLoadModules("pywin.framework.editor,pywin.framework.stdin") + + def LoadUserModules(self, moduleNames = None): + # Load the users modules. + if moduleNames is None: + default = "pywin.framework.sgrepmdi,pywin.framework.mdi_pychecker" + moduleNames=win32ui.GetProfileVal('Python','Startup Modules',default) + self.DoLoadModules(moduleNames) + + def DoLoadModules(self, moduleNames): # ", sep string of module names. + if not moduleNames: return + modules = moduleNames.split(",") + for module in modules: + try: + __import__(module) + except: # Catch em all, else the app itself dies! 'ImportError: + traceback.print_exc() + msg = 'Startup import of user module "%s" failed' % module + print(msg) + win32ui.MessageBox(msg) + + # + # DDE Callback + # + def OnDDECommand(self, command): + try: + exec(command + "\n") + except: + print("ERROR executing DDE command: ", command) + traceback.print_exc() + raise + + # + # General handlers + # + def OnViewBrowse( self, id, code ): + " Called when ViewBrowse message is received " + from pywin.tools import browser + obName = dialog.GetSimpleInput('Object', '__builtins__', 'Browse Python Object') + if obName is None: + return + try: + browser.Browse(eval(obName, __main__.__dict__, __main__.__dict__)) + except NameError: + win32ui.MessageBox('This is no object with this name') + except AttributeError: + win32ui.MessageBox('The object has no attribute of that name') + except: + traceback.print_exc() + win32ui.MessageBox('This object can not be browsed') + + def OnFileImport( self, id, code ): + " Called when a FileImport message is received. Import the current or specified file" + from . import scriptutils + scriptutils.ImportFile() + + def OnFileCheck( self, id, code ): + " Called when a FileCheck message is received. Check the current file." + from . import scriptutils + scriptutils.CheckFile() + + def OnUpdateFileCheck(self, cmdui): + from . import scriptutils + cmdui.Enable( scriptutils.GetActiveFileName(0) is not None ) + + def OnFileRun( self, id, code ): + " Called when a FileRun message is received. " + from . import scriptutils + showDlg = win32api.GetKeyState(win32con.VK_SHIFT) >= 0 + scriptutils.RunScript(None, None, showDlg) + + def OnFileLocate( self, id, code ): + from . import scriptutils + global lastLocateFileName # save the new version away for next time... + + name = dialog.GetSimpleInput('File name', lastLocateFileName, 'Locate Python File') + if name is None: # Cancelled. + return + lastLocateFileName = name + # if ".py" supplied, rip it off! + # should also check for .pys and .pyw + if lastLocateFileName[-3:].lower()=='.py': + lastLocateFileName = lastLocateFileName[:-3] + lastLocateFileName = lastLocateFileName.replace(".","\\") + newName = scriptutils.LocatePythonFile(lastLocateFileName) + if newName is None: + win32ui.MessageBox("The file '%s' can not be located" % lastLocateFileName) + else: + win32ui.GetApp().OpenDocumentFile(newName) + + # Display all the "options" proprety pages we can find + def OnViewOptions(self, id, code): + win32ui.InitRichEdit() + sheet = dialog.PropertySheet("Pythonwin Options") + # Add property pages we know about that need manual work. + from pywin.dialogs import ideoptions + sheet.AddPage( ideoptions.OptionsPropPage() ) + + from . import toolmenu + sheet.AddPage( toolmenu.ToolMenuPropPage() ) + + # Get other dynamic pages from templates. + pages = [] + for template in self.GetDocTemplateList(): + try: + # Dont actually call the function with the exception handler. + getter = template.GetPythonPropertyPages + except AttributeError: + # Template does not provide property pages! + continue + pages = pages + getter() + + # Debugger template goes at the end + try: + from pywin.debugger import configui + except ImportError: + configui = None + if configui is not None: pages.append(configui.DebuggerOptionsPropPage()) + # Now simply add the pages, and display the dialog. + for page in pages: + sheet.AddPage(page) + + if sheet.DoModal()==win32con.IDOK: + win32ui.SetStatusText("Applying configuration changes...", 1) + win32ui.DoWaitCursor(1) + # Tell every Window in our app that win.ini has changed! + win32ui.GetMainFrame().SendMessageToDescendants(win32con.WM_WININICHANGE, 0, 0) + win32ui.DoWaitCursor(0) + + def OnInteractiveWindow(self, id, code): + # toggle the existing state. + from . import interact + interact.ToggleInteractiveWindow() + + def OnUpdateInteractiveWindow(self, cmdui): + try: + interact=sys.modules['pywin.framework.interact'] + state = interact.IsInteractiveWindowVisible() + except KeyError: # Interactive module hasnt ever been imported. + state = 0 + cmdui.Enable() + cmdui.SetCheck(state) + + def OnFileSaveAll(self, id, code): + # Only attempt to save editor documents. + from pywin.framework.editor import editorTemplate + num = 0 + for doc in editorTemplate.GetDocumentList(): + if doc.IsModified() and doc.GetPathName(): + num = num = 1 + doc.OnSaveDocument(doc.GetPathName()) + win32ui.SetStatusText("%d documents saved" % num, 1) + + def OnViewToolbarDbg(self, id, code): + if code==0: + return not win32ui.GetMainFrame().OnBarCheck(id) + + def OnUpdateViewToolbarDbg(self, cmdui): + win32ui.GetMainFrame().OnUpdateControlBarMenu(cmdui) + cmdui.Enable(1) + + def OnHelpIndex( self, id, code ): + from . import help + help.SelectAndRunHelpFile() + +# As per the comments in app.py, this use is depreciated. +# app.AppBuilder = InteractivePythonApp + +# Now all we do is create the application +thisApp = InteractivePythonApp() diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/intpydde.py b/venv/Lib/site-packages/pythonwin/pywin/framework/intpydde.py new file mode 100644 index 00000000..40c7415f --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/framework/intpydde.py @@ -0,0 +1,56 @@ +# DDE support for Pythonwin +# +# Seems to work fine (in the context that IE4 seems to have broken +# DDE on _all_ NT4 machines I have tried, but only when a "Command Prompt" window +# is open. Strange, but true. If you have problems with this, close all Command Prompts! + + +import win32ui +import win32api, win32con +from pywin.mfc import object +from dde import * +import sys, traceback + +class DDESystemTopic(object.Object): + def __init__(self, app): + self.app = app + object.Object.__init__(self, CreateServerSystemTopic()) + def Exec(self, data): + try: +# print "Executing", cmd + self.app.OnDDECommand(data) + except: + t,v,tb = sys.exc_info() + # The DDE Execution failed. + print("Error executing DDE command.") + traceback.print_exception(t,v,tb) + return 0 + +class DDEServer(object.Object): + def __init__(self, app): + self.app = app + object.Object.__init__(self, CreateServer()) + self.topic = self.item = None + + def CreateSystemTopic(self): + return DDESystemTopic(self.app) + + def Shutdown(self): + self._obj_.Shutdown() + self._obj_.Destroy() + if self.topic is not None: + self.topic.Destroy() + self.topic = None + if self.item is not None: + self.item.Destroy() + self.item = None + + def OnCreate(self): + return 1 + + def Status(self, msg): + try: + win32ui.SetStatusText(msg) + except win32ui.error: + pass + diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/mdi_pychecker.py b/venv/Lib/site-packages/pythonwin/pywin/framework/mdi_pychecker.py new file mode 100644 index 00000000..4d41332f --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/framework/mdi_pychecker.py @@ -0,0 +1,629 @@ +###################################################################### +## +## The Pychecker MDI Plug-In UserModule for Pythonwin +## +## contributed by Robert Kiendl +## +## Style is similar to (and inherited) from the SGrepMDI UserModule +## +## Usage: +## +## Start Pychecker on current file: Menu/File/New../Pychecker. +## Use it: Jump to Pychecker warning source lines by double-click. +## Auto-add "#$pycheck_no" / "#$pycheck_no=specific-re-pattern" tags +## to source lines by context/right-mouse-click on warning lines. +## +## It requires pychecker installed and the pychecker.bat to be on +## the PATH. Example pychecker.bat: +## +## REM pychecker.bat +## C:\bin\python.exe C:\PYTHON23\Lib\site-packages\pychecker\checker.py %1 %2 %3 %4 %5 %6 %7 %8 %9 +## +## Adding it as default module in PythonWin: +## +## +++ ./intpyapp.py 2006-10-02 17:59:32.974161600 +0200 +## @@ -272,7 +282,7 @@ +## def LoadUserModules(self, moduleNames = None): +## # Load the users modules. +## if moduleNames is None: +## - default = "sgrepmdi" +## + default = "sgrepmdi,mdi_pychecker" +## moduleNames=win32ui.GetProfileVal('Python','Startup Modules',default) +## self.DoLoadModules(moduleNames) +## +###################################################################### + +import win32ui +import win32api +from pywin.mfc import docview, dialog, window +import win32con +import sys, string, re, glob, os, stat, time +from . import scriptutils + +def getsubdirs(d): + dlist = [] + flist = glob.glob(d+'\\*') + for f in flist: + if os.path.isdir(f): + dlist.append(f) + dlist = dlist + getsubdirs(f) + return dlist + +class dirpath: + def __init__(self, str, recurse=0): + dp = str.split(';') + dirs = {} + for d in dp: + if os.path.isdir(d): + d = d.lower() + if d not in dirs: + dirs[d] = None + if recurse: + subdirs = getsubdirs(d) + for sd in subdirs: + sd = sd.lower() + if sd not in dirs: + dirs[sd] = None + elif os.path.isfile(d): + pass + else: + x = None + if d in os.environ: + x = dirpath(os.environ[d]) + elif d[:5] == 'HKEY_': + keystr = d.split('\\') + try: + root = eval('win32con.'+keystr[0]) + except: + win32ui.MessageBox("Can't interpret registry key name '%s'" % keystr[0]) + try: + subkey = '\\'.join(keystr[1:]) + val = win32api.RegQueryValue(root, subkey) + if val: + x = dirpath(val) + else: + win32ui.MessageBox("Registry path '%s' did not return a path entry" % d) + except: + win32ui.MessageBox("Can't interpret registry key value: %s" % keystr[1:]) + else: + win32ui.MessageBox("Directory '%s' not found" % d) + if x: + for xd in x: + if xd not in dirs: + dirs[xd] = None + if recurse: + subdirs = getsubdirs(xd) + for sd in subdirs: + sd = sd.lower() + if sd not in dirs: + dirs[sd] = None + self.dirs = [] + for d in dirs.keys(): + self.dirs.append(d) + + def __getitem__(self, key): + return self.dirs[key] + def __len__(self): + return len(self.dirs) + def __setitem__(self, key, value): + self.dirs[key] = value + def __delitem__(self, key): + del self.dirs[key] + def __getslice__(self, lo, hi): + return self.dirs[lo:hi] + def __setslice__(self, lo, hi, seq): + self.dirs[lo:hi] = seq + def __delslice__(self, lo, hi): + del self.dirs[lo:hi] + def __add__(self, other): + if type(other) == type(self) or type(other) == type([]): + return self.dirs + other.dirs + def __radd__(self, other): + if type(other) == type(self) or type(other) == type([]): + return other.dirs + self.dirs + +# Group(1) is the filename, group(2) is the lineno. +#regexGrepResult=regex.compile("^\\([a-zA-Z]:.*\\)(\\([0-9]+\\))") +#regexGrep=re.compile(r"^([a-zA-Z]:[^(]*)\((\d+)\)") +regexGrep=re.compile(r"^(..[^\(:]+)?[\(:](\d+)[\):]:?\s*(.*)") + +#these are the atom numbers defined by Windows for basic dialog controls + +BUTTON = 0x80 +EDIT = 0x81 +STATIC = 0x82 +LISTBOX = 0x83 +SCROLLBAR = 0x84 +COMBOBOX = 0x85 + +class TheTemplate(docview.RichEditDocTemplate): + def __init__(self): + docview.RichEditDocTemplate.__init__(self, win32ui.IDR_TEXTTYPE, TheDocument, TheFrame, TheView) + self.SetDocStrings("\nPychecker\nPychecker\nPychecker params (*.pychecker)\n.pychecker\n\n\n") + win32ui.GetApp().AddDocTemplate(self) + self.docparams = None + + def MatchDocType(self, fileName, fileType): + doc = self.FindOpenDocument(fileName) + if doc: return doc + ext = os.path.splitext(fileName)[1].lower() + if ext =='.pychecker': + return win32ui.CDocTemplate_Confidence_yesAttemptNative + return win32ui.CDocTemplate_Confidence_noAttempt + + def setParams(self, params): + self.docparams = params + + def readParams(self): + tmp = self.docparams + self.docparams = None + return tmp + +class TheFrame(window.MDIChildWnd): + # The template and doc params will one day be removed. + def __init__(self, wnd = None): + window.MDIChildWnd.__init__(self, wnd) + +class TheDocument(docview.RichEditDoc): + def __init__(self, template): + docview.RichEditDoc.__init__(self, template) + self.dirpattern = '' + self.filpattern = '' + self.greppattern = '' + self.casesensitive = 1 + self.recurse = 1 + self.verbose = 0 + + def OnOpenDocument(self, fnm): + #this bizarre stuff with params is so right clicking in a result window + #and starting a new grep can communicate the default parameters to the + #new grep. + try: + params = open(fnm,'r').read() + except: + params = None + self.setInitParams(params) + return self.OnNewDocument() + + def OnCloseDocument(self): + try: + win32ui.GetApp().DeleteIdleHandler(self.idleHandler) + except: + pass + return self._obj_.OnCloseDocument() + + def saveInitParams(self): + # Only save the flags, not the text boxes. + paramstr = "\t\t\t%d\t%d" % (self.casesensitive, self.recurse) + win32ui.WriteProfileVal("Pychecker", "Params", paramstr) + + def setInitParams(self, paramstr): + if paramstr is None: + paramstr = win32ui.GetProfileVal("Pychecker", "Params", '\t\t\t1\t0\t0') + params = paramstr.split('\t') + if len(params) < 3: + params = params + ['']*(3-len(params)) + if len(params) < 6: + params = params + [0]*(6-len(params)) + self.dirpattern = params[0] + self.filpattern = params[1] + self.greppattern = params[2] or '-#1000 --only' + self.casesensitive = int(params[3]) + self.recurse = int(params[4]) + self.verbose = int(params[5]) + # setup some reasonable defaults. + if not self.dirpattern: + try: + editor=win32ui.GetMainFrame().MDIGetActive()[0].GetEditorView() + self.dirpattern=os.path.abspath(os.path.dirname(editor.GetDocument().GetPathName())) + except (AttributeError,win32ui.error): + self.dirpattern = os.getcwd() + if not self.filpattern: + try: + editor=win32ui.GetMainFrame().MDIGetActive()[0].GetEditorView() + self.filpattern=editor.GetDocument().GetPathName() + except AttributeError: + self.filpattern = "*.py" + + def OnNewDocument(self): + if self.dirpattern == '': + self.setInitParams(greptemplate.readParams()) + d = TheDialog(self.dirpattern, self.filpattern, self.greppattern, self.casesensitive, self.recurse, self.verbose) + if d.DoModal() == win32con.IDOK: + self.dirpattern = d['dirpattern'] + self.filpattern = d['filpattern'] + self.greppattern = d['greppattern'] + #self.casesensitive = d['casesensitive'] + #self.recurse = d['recursive'] + #self.verbose = d['verbose'] + self.doSearch() + self.saveInitParams() + return 1 + return 0 # cancelled - return zero to stop frame creation. + + def doSearch(self): + self.dp = dirpath(self.dirpattern, self.recurse) + self.SetTitle("Pychecker Run '%s' (options: %s)" % (self.filpattern, self.greppattern)) + #self.text = [] + self.GetFirstView().Append('#Pychecker Run in '+self.dirpattern+' %s\n'%time.asctime()) + if self.verbose: + self.GetFirstView().Append('# ='+repr(self.dp.dirs)+'\n') + self.GetFirstView().Append('# Files '+self.filpattern+'\n') + self.GetFirstView().Append('# Options '+self.greppattern+'\n') + self.fplist = self.filpattern.split(';') + self.GetFirstView().Append('# Running... ( double click on result lines in order to jump to the source code ) \n') + win32ui.SetStatusText("Pychecker running. Please wait...", 0) + self.dpndx = self.fpndx = 0 + self.fndx = -1 + if not self.dp: + self.GetFirstView().Append("# ERROR: '%s' does not resolve to any search locations" % self.dirpattern) + self.SetModifiedFlag(0) + else: + ##self.flist = glob.glob(self.dp[0]+'\\'+self.fplist[0]) + import operator + self.flist = reduce(operator.add, list(map(glob.glob,self.fplist)) ) + #import pywin.debugger;pywin.debugger.set_trace() + self.startPycheckerRun() + def idleHandler(self,handler,count): + import time + time.sleep(0.001) + if self.result!=None: + win32ui.GetApp().DeleteIdleHandler(self.idleHandler) + return 0 + return 1 #more + def startPycheckerRun(self): + self.result=None + old=win32api.SetCursor(win32api.LoadCursor(0, win32con.IDC_APPSTARTING)) + win32ui.GetApp().AddIdleHandler(self.idleHandler) + import _thread + _thread.start_new(self.threadPycheckerRun,()) + ##win32api.SetCursor(old) + def threadPycheckerRun(self): + result='' + rc=-1 + try: + options = self.greppattern + files= ' '.join(self.flist) + # Recently MarkH has failed to run pychecker without it having + # been explicitly installed - so we assume it is and locate it + # from its default location. + # Step1 - get python.exe + py = os.path.join(sys.prefix, 'python.exe') + if not os.path.isfile(py): + if "64 bit" in sys.version: + py = os.path.join(sys.prefix, 'PCBuild', 'amd64', 'python.exe') + else: + py = os.path.join(sys.prefix, 'PCBuild', 'python.exe') + try: + py = win32api.GetShortPathName(py) + except win32api.error: + py = "" + # Find checker.py + from distutils.sysconfig import get_python_lib + pychecker = os.path.join(get_python_lib(), 'pychecker', 'checker.py') + if not os.path.isfile(py): + result = "Can't find python.exe!\n" + elif not os.path.isfile(pychecker): + result = "Can't find checker.py - please install pychecker " \ + "(or run 'setup.py install' if you have the source version)\n" + else: + cmd='%s "%s" %s %s 2>&1' % (py, pychecker, options,files) + ##fin,fout,ferr=os.popen3(cmd) + ##result=ferr.read()+fout.read() + result=os.popen(cmd).read() + ##rc=f.close() + self.GetFirstView().Append(result) + finally: + self.result=result + print('== Pychecker run finished ==') + self.GetFirstView().Append('\n'+'== Pychecker run finished ==') + self.SetModifiedFlag(0) + def _inactive_idleHandler(self, handler, count): + self.fndx = self.fndx + 1 + if self.fndx < len(self.flist): + f = self.flist[self.fndx] + if self.verbose: + self.GetFirstView().Append('# ..'+f+'\n') + win32ui.SetStatusText("Searching "+f, 0) + lines = open(f, 'r').readlines() + for i in range(len(lines)): + line = lines[i] + if self.pat.search(line) != None: + self.GetFirstView().Append(f+'('+repr(i+1) + ') '+line) + else: + self.fndx = -1 + self.fpndx = self.fpndx + 1 + if self.fpndx < len(self.fplist): + self.flist = glob.glob(self.dp[self.dpndx] + '\\' + self.fplist[self.fpndx]) + else: + self.fpndx = 0 + self.dpndx = self.dpndx + 1 + if self.dpndx < len(self.dp): + self.flist = glob.glob(self.dp[self.dpndx] + '\\' + self.fplist[self.fpndx]) + else: + win32ui.SetStatusText("Search complete.", 0) + self.SetModifiedFlag(0) # default to not modified. + try: + win32ui.GetApp().DeleteIdleHandler(self.idleHandler) + except: + pass + return 0 + return 1 + + def GetParams(self): + return self.dirpattern+'\t'+self.filpattern+'\t'+self.greppattern+'\t'+repr(self.casesensitive)+'\t'+repr(self.recurse)+'\t'+repr(self.verbose) + + def OnSaveDocument(self, filename): +# print 'OnSaveDocument() filename=',filename + savefile = open(filename,"wb") + txt = self.GetParams()+'\n' +# print 'writing',txt + savefile.write(txt) + savefile.close() + self.SetModifiedFlag(0) + return 1 + +ID_OPEN_FILE = 0xe500 +ID_PYCHECKER = 0xe501 +ID_SAVERESULTS = 0x502 +ID_TRYAGAIN = 0x503 +ID_ADDCOMMENT = 0x504 +ID_ADDPYCHECKNO2 = 0x505 + +class TheView(docview.RichEditView): + def __init__(self, doc): + docview.RichEditView.__init__(self, doc) + self.SetWordWrap(win32ui.CRichEditView_WrapNone) + self.HookHandlers() + + def OnInitialUpdate(self): + rc = self._obj_.OnInitialUpdate() + format = (-402653169, 0, 200, 0, 0, 0, 49, 'Courier New') + self.SetDefaultCharFormat(format) + return rc + + def HookHandlers(self): + self.HookMessage(self.OnRClick, win32con.WM_RBUTTONDOWN) + self.HookCommand(self.OnCmdOpenFile, ID_OPEN_FILE) + self.HookCommand(self.OnCmdThe, ID_PYCHECKER) + self.HookCommand(self.OnCmdSave, ID_SAVERESULTS) + self.HookCommand(self.OnTryAgain, ID_TRYAGAIN) + self.HookCommand(self.OnAddComment, ID_ADDCOMMENT) + self.HookCommand(self.OnAddComment, ID_ADDPYCHECKNO2) + self.HookMessage(self.OnLDblClick,win32con.WM_LBUTTONDBLCLK) + + def OnLDblClick(self,params): + line = self.GetLine() + regexGrepResult = regexGrep.match(line) + if regexGrepResult: + fname = regexGrepResult.group(1) + line = int(regexGrepResult.group(2)) + scriptutils.JumpToDocument(fname, line) + return 0 # dont pass on + return 1 # pass it on by default. + + def OnRClick(self, params): + menu = win32ui.CreatePopupMenu() + flags=win32con.MF_STRING|win32con.MF_ENABLED + lineno = self._obj_.LineFromChar(-1) #selection or current line + line = self._obj_.GetLine(lineno) + regexGrepResult = regexGrep.match(line) + charstart, charend = self._obj_.GetSel() + if regexGrepResult: + self.fnm = regexGrepResult.group(1) + self.lnnum = int(regexGrepResult.group(2)) + menu.AppendMenu(flags, ID_OPEN_FILE, "&Open "+self.fnm) + menu.AppendMenu(flags, ID_ADDCOMMENT, "&Add to source: Comment Tag/#$pycheck_no ..") + menu.AppendMenu(flags, ID_ADDPYCHECKNO2, "&Add to source: Specific #$pycheck_no=%(errtext)s ..") + menu.AppendMenu(win32con.MF_SEPARATOR) + menu.AppendMenu(flags, ID_TRYAGAIN, "&Try Again") + menu.AppendMenu(flags, win32ui.ID_EDIT_CUT, 'Cu&t') + menu.AppendMenu(flags, win32ui.ID_EDIT_COPY, '&Copy') + menu.AppendMenu(flags, win32ui.ID_EDIT_PASTE, '&Paste') + menu.AppendMenu(flags, win32con.MF_SEPARATOR); + menu.AppendMenu(flags, win32ui.ID_EDIT_SELECT_ALL, '&Select all') + menu.AppendMenu(flags, win32con.MF_SEPARATOR); + menu.AppendMenu(flags, ID_SAVERESULTS, 'Sa&ve results') + menu.TrackPopupMenu(params[5]) + return 0 + + def OnAddComment(self, cmd, code): + addspecific= cmd==ID_ADDPYCHECKNO2 + _=list(self.GetSel()) + _.sort() + start,end=_ + line_start, line_end = self.LineFromChar(start), self.LineFromChar(end) + first=1 + for i in range(line_start,line_end+1): + line = self.GetLine(i) + m = regexGrep.match(line) + if m: + if first: + first=0 + cmnt=dialog.GetSimpleInput( "Add to %s lines" % (line_end-line_start+1), + addspecific and " #$pycheck_no=%(errtext)s" or " #$pycheck_no" ) + if not cmnt: + return 0 + ##import pywin.debugger;pywin.debugger.set_trace() + fname = m.group(1) + line = int(m.group(2)) + view = scriptutils.JumpToDocument(fname,line) + pos=view.LineIndex(line)-1 + if view.GetTextRange(pos-1,pos) in ('\r','\n'): + pos -= 1 + view.SetSel(pos, pos) + errtext=m.group(3) + if start!=end and line_start==line_end: + errtext=self.GetSelText() + errtext=repr(re.escape(errtext).replace('\ ',' ')) + view.ReplaceSel( addspecific and cmnt % locals() + or cmnt ) + return 0 + + + def OnCmdOpenFile(self, cmd, code): + doc = win32ui.GetApp().OpenDocumentFile(self.fnm) + if doc: + vw = doc.GetFirstView() + #hope you have an editor that implements GotoLine()! + try: + vw.GotoLine(int(self.lnnum)) + except: + pass + return 0 + + def OnCmdThe(self, cmd, code): + curparamsstr = self.GetDocument().GetParams() + params = curparamsstr.split('\t') + params[2] = self.sel + greptemplate.setParams('\t'.join(params)) + greptemplate.OpenDocumentFile() + return 0 + + def OnTryAgain(self, cmd, code): + greptemplate.setParams(self.GetDocument().GetParams()) + greptemplate.OpenDocumentFile() + return 0 + + def OnCmdSave(self, cmd, code): + flags = win32con.OFN_OVERWRITEPROMPT + dlg = win32ui.CreateFileDialog(0, None, None, flags, "Text Files (*.txt)|*.txt||", self) + dlg.SetOFNTitle("Save Results As") + if dlg.DoModal() == win32con.IDOK: + pn = dlg.GetPathName() + self._obj_.SaveFile(pn) + return 0 + + def Append(self, strng): + numlines = self.GetLineCount() + endpos = self.LineIndex(numlines-1) + len(self.GetLine(numlines-1)) + self.SetSel(endpos, endpos) + self.ReplaceSel(strng) + + +class TheDialog(dialog.Dialog): + def __init__(self, dp, fp, gp, cs, r, v): + style = win32con.DS_MODALFRAME | win32con.WS_POPUP | win32con.WS_VISIBLE | win32con.WS_CAPTION | win32con.WS_SYSMENU | win32con.DS_SETFONT + CS = win32con.WS_CHILD | win32con.WS_VISIBLE + tmp = [ ["Pychecker Run", (0, 0, 210, 90), style, None, (8, "MS Sans Serif")], ] + tmp.append([STATIC, "Files:", -1, (7, 7, 50, 9), CS ]) + tmp.append([EDIT, gp, 103, (52, 7, 144, 11), CS | win32con.WS_TABSTOP | win32con.ES_AUTOHSCROLL | win32con.WS_BORDER]) + tmp.append([STATIC, "Directories:", -1, (7, 20, 50, 9), CS ]) + tmp.append([EDIT, dp, 102, (52, 20, 128, 11), CS | win32con.WS_TABSTOP | win32con.ES_AUTOHSCROLL | win32con.WS_BORDER]) + tmp.append([BUTTON, '...', 110, (182,20, 16, 11), CS | win32con.BS_PUSHBUTTON | win32con.WS_TABSTOP]) + tmp.append([STATIC, "Options:", -1, (7, 33, 50, 9), CS ]) + tmp.append([EDIT, fp, 101, (52, 33, 128, 11), CS | win32con.WS_TABSTOP | win32con.ES_AUTOHSCROLL | win32con.WS_BORDER ]) + tmp.append([BUTTON, '...', 111, (182,33, 16, 11), CS | win32con.BS_PUSHBUTTON | win32con.WS_TABSTOP]) + #tmp.append([BUTTON,'Case sensitive', 104, (7, 45, 72, 9), CS | win32con.BS_AUTOCHECKBOX | win32con.BS_LEFTTEXT| win32con.WS_TABSTOP]) + #tmp.append([BUTTON,'Subdirectories', 105, (7, 56, 72, 9), CS | win32con.BS_AUTOCHECKBOX | win32con.BS_LEFTTEXT| win32con.WS_TABSTOP]) + #tmp.append([BUTTON,'Verbose', 106, (7, 67, 72, 9), CS | win32con.BS_AUTOCHECKBOX | win32con.BS_LEFTTEXT| win32con.WS_TABSTOP]) + tmp.append([BUTTON,'OK', win32con.IDOK, (166,53, 32, 12), CS | win32con.BS_DEFPUSHBUTTON| win32con.WS_TABSTOP]) + tmp.append([BUTTON,'Cancel', win32con.IDCANCEL, (166,67, 32, 12), CS | win32con.BS_PUSHBUTTON| win32con.WS_TABSTOP]) + dialog.Dialog.__init__(self, tmp) + self.AddDDX(101,'greppattern') + self.AddDDX(102,'dirpattern') + self.AddDDX(103,'filpattern') + #self.AddDDX(104,'casesensitive') + #self.AddDDX(105,'recursive') + #self.AddDDX(106,'verbose') + self._obj_.data['greppattern'] = gp + self._obj_.data['dirpattern'] = dp + self._obj_.data['filpattern'] = fp + #self._obj_.data['casesensitive'] = cs + #self._obj_.data['recursive'] = r + #self._obj_.data['verbose'] = v + self.HookCommand(self.OnMoreDirectories, 110) + self.HookCommand(self.OnMoreFiles, 111) + + def OnMoreDirectories(self, cmd, code): + self.getMore('Pychecker\\Directories', 'dirpattern') + + def OnMoreFiles(self, cmd, code): + self.getMore('Pychecker\\File Types', 'filpattern') + + def getMore(self, section, key): + self.UpdateData(1) + #get the items out of the ini file + ini = win32ui.GetProfileFileName() + secitems = win32api.GetProfileSection(section, ini) + items = [] + for secitem in secitems: + items.append(secitem.split('=')[1]) + dlg = TheParamsDialog(items) + if dlg.DoModal() == win32con.IDOK: + itemstr = ';'.join(dlg.getItems()) + self._obj_.data[key] = itemstr + #update the ini file with dlg.getNew() + i = 0 + newitems = dlg.getNew() + if newitems: + items = items + newitems + for item in items: + win32api.WriteProfileVal(section, repr(i), item, ini) + i = i + 1 + self.UpdateData(0) + + def OnOK(self): + self.UpdateData(1) + for id, name in [(101,'greppattern'), (102,'dirpattern'), (103,'filpattern')]: + if not self[name]: + self.GetDlgItem(id).SetFocus() + win32api.MessageBeep() + win32ui.SetStatusText("Please enter a value") + return + self._obj_.OnOK() + +class TheParamsDialog(dialog.Dialog): + def __init__(self, items): + self.items = items + self.newitems = [] + style = win32con.DS_MODALFRAME | win32con.WS_POPUP | win32con.WS_VISIBLE | win32con.WS_CAPTION | win32con.WS_SYSMENU | win32con.DS_SETFONT + CS = win32con.WS_CHILD | win32con.WS_VISIBLE + tmp = [ ["Pychecker Parameters", (0, 0, 205, 100), style, None, (8, "MS Sans Serif")], ] + tmp.append([LISTBOX, '', 107, (7, 7, 150, 72), CS | win32con.LBS_MULTIPLESEL| win32con.LBS_STANDARD | win32con.LBS_HASSTRINGS | win32con.WS_TABSTOP | win32con.LBS_NOTIFY]) + tmp.append([BUTTON,'OK', win32con.IDOK, (167, 7, 32, 12), CS | win32con.BS_DEFPUSHBUTTON| win32con.WS_TABSTOP]) + tmp.append([BUTTON,'Cancel', win32con.IDCANCEL, (167,23, 32, 12), CS | win32con.BS_PUSHBUTTON| win32con.WS_TABSTOP]) + tmp.append([STATIC,'New:', -1, (2, 83, 15, 12), CS]) + tmp.append([EDIT, '', 108, (18, 83, 139, 12), CS | win32con.WS_TABSTOP | win32con.ES_AUTOHSCROLL | win32con.WS_BORDER]) + tmp.append([BUTTON,'Add', 109, (167,83, 32, 12), CS | win32con.BS_PUSHBUTTON| win32con.WS_TABSTOP]) + dialog.Dialog.__init__(self, tmp) + self.HookCommand(self.OnAddItem, 109) + self.HookCommand(self.OnListDoubleClick, 107) + + def OnInitDialog(self): + lb = self.GetDlgItem(107) + for item in self.items: + lb.AddString(item) + return self._obj_.OnInitDialog() + + def OnAddItem(self, cmd, code): + eb = self.GetDlgItem(108) + item = eb.GetLine(0) + self.newitems.append(item) + lb = self.GetDlgItem(107) + i = lb.AddString(item) + lb.SetSel(i, 1) + return 1 + + def OnListDoubleClick(self, cmd, code): + if code == win32con.LBN_DBLCLK: + self.OnOK() + return 1 + + def OnOK(self): + lb = self.GetDlgItem(107) + self.selections = lb.GetSelTextItems() + self._obj_.OnOK() + + def getItems(self): + return self.selections + + def getNew(self): + return self.newitems + +try: + win32ui.GetApp().RemoveDocTemplate(greptemplate) +except NameError: + pass + +greptemplate = TheTemplate() diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/scriptutils.py b/venv/Lib/site-packages/pythonwin/pywin/framework/scriptutils.py new file mode 100644 index 00000000..0f862a49 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/framework/scriptutils.py @@ -0,0 +1,617 @@ +""" +Various utilities for running/importing a script +""" +import sys +import win32ui +import win32api +import win32con +import __main__ +from pywin.mfc import dialog +from pywin.mfc.docview import TreeView +import os +import string +import traceback +import linecache +import bdb + +from .cmdline import ParseArgs + +RS_DEBUGGER_NONE=0 # Dont run under the debugger. +RS_DEBUGGER_STEP=1 # Start stepping under the debugger +RS_DEBUGGER_GO=2 # Just run under the debugger, stopping only at break-points. +RS_DEBUGGER_PM=3 # Dont run under debugger, but do post-mortem analysis on exception. + +debugging_options = """No debugging +Step-through in the debugger +Run in the debugger +Post-Mortem of unhandled exceptions""".split("\n") + +byte_cr = "\r".encode("ascii") +byte_lf = "\n".encode("ascii") +byte_crlf = "\r\n".encode("ascii") + +# A dialog box for the "Run Script" command. +class DlgRunScript(dialog.Dialog): + "A class for the 'run script' dialog" + def __init__(self, bHaveDebugger): + dialog.Dialog.__init__(self, win32ui.IDD_RUN_SCRIPT ) + self.AddDDX(win32ui.IDC_EDIT1, "script") + self.AddDDX(win32ui.IDC_EDIT2, "args") + self.AddDDX(win32ui.IDC_COMBO1, "debuggingType", "i") + self.HookCommand(self.OnBrowse, win32ui.IDC_BUTTON2) + self.bHaveDebugger = bHaveDebugger + def OnInitDialog(self): + rc = dialog.Dialog.OnInitDialog(self) + cbo = self.GetDlgItem(win32ui.IDC_COMBO1) + for o in debugging_options: + cbo.AddString(o) + cbo.SetCurSel(self['debuggingType']) + if not self.bHaveDebugger: + cbo.EnableWindow(0) + + def OnBrowse(self, id, cmd): + openFlags = win32con.OFN_OVERWRITEPROMPT|win32con.OFN_FILEMUSTEXIST + dlg = win32ui.CreateFileDialog(1,None,None,openFlags, "Python Scripts (*.py)|*.py||", self) + dlg.SetOFNTitle("Run Script") + if dlg.DoModal()!=win32con.IDOK: + return 0 + self['script'] = dlg.GetPathName() + self.UpdateData(0) + return 0 + +def GetDebugger(): + """Get the default Python debugger. Returns the debugger, or None. + + It is assumed the debugger has a standard "pdb" defined interface. + Currently always returns the 'pywin.debugger' debugger, or None + (pdb is _not_ returned as it is not effective in this GUI environment) + """ + try: + import pywin.debugger + return pywin.debugger + except ImportError: + return None + +def IsOnPythonPath(path): + "Given a path only, see if it is on the Pythonpath. Assumes path is a full path spec." + # must check that the command line arg's path is in sys.path + for syspath in sys.path: + try: + # Python 1.5 and later allows an empty sys.path entry. + if syspath and win32ui.FullPath(syspath)==path: + return 1 + except win32ui.error as details: + print("Warning: The sys.path entry '%s' is invalid\n%s" % (syspath, details)) + return 0 + +def GetPackageModuleName(fileName): + """Given a filename, return (module name, new path). + eg - given "c:\a\b\c\my.py", return ("b.c.my",None) if "c:\a" is on sys.path. + If no package found, will return ("my", "c:\a\b\c") + """ + path, fname = os.path.split(fileName) + path=origPath=win32ui.FullPath(path) + fname = os.path.splitext(fname)[0] + modBits = [] + newPathReturn = None + if not IsOnPythonPath(path): + # Module not directly on the search path - see if under a package. + while len(path)>3: # ie 'C:\' + path, modBit = os.path.split(path) + modBits.append(modBit) + # If on path, _and_ existing package of that name loaded. + if IsOnPythonPath(path) and modBit in sys.modules and \ + (os.path.exists(os.path.join(path, modBit, '__init__.py')) or \ + os.path.exists(os.path.join(path, modBit, '__init__.pyc')) or \ + os.path.exists(os.path.join(path, modBit, '__init__.pyo')) \ + ): + modBits.reverse() + return ".".join(modBits) + "." + fname, newPathReturn + # Not found - look a level higher + else: + newPathReturn = origPath + + return fname, newPathReturn + +def GetActiveView(): + """Gets the edit control (eg, EditView) with the focus, or None + """ + try: + childFrame, bIsMaximised = win32ui.GetMainFrame().MDIGetActive() + return childFrame.GetActiveView() + except win32ui.error: + return None + +def GetActiveEditControl(): + view = GetActiveView() + if view is None: return None + if hasattr(view, "SCIAddText"): # Is it a scintilla control? + return view + try: + return view.GetRichEditCtrl() + except AttributeError: + pass + try: + return view.GetEditCtrl() + except AttributeError: + pass + +def GetActiveEditorDocument(): + """Returns the active editor document and view, or (None,None) if no + active document or its not an editor document. + """ + view = GetActiveView() + if view is None or isinstance(view, TreeView): + return (None, None) + doc = view.GetDocument() + if hasattr(doc, "MarkerAdd"): # Is it an Editor document? + return doc, view + return (None, None) + +def GetActiveFileName(bAutoSave = 1): + """Gets the file name for the active frame, saving it if necessary. + + Returns None if it cant be found, or raises KeyboardInterrupt. + """ + pathName = None + active = GetActiveView() + if active is None: + return None + try: + doc = active.GetDocument() + pathName = doc.GetPathName() + + if bAutoSave and \ + (len(pathName)>0 or \ + doc.GetTitle()[:8]=="Untitled" or \ + doc.GetTitle()[:6]=="Script"): # if not a special purpose window + if doc.IsModified(): + try: + doc.OnSaveDocument(pathName) + pathName = doc.GetPathName() + + # clear the linecache buffer + linecache.clearcache() + + except win32ui.error: + raise KeyboardInterrupt + + except (win32ui.error, AttributeError): + pass + if not pathName: + return None + return pathName + +lastScript = '' +lastArgs = '' +lastDebuggingType = RS_DEBUGGER_NONE + +def RunScript(defName=None, defArgs=None, bShowDialog = 1, debuggingType=None): + global lastScript, lastArgs, lastDebuggingType + _debugger_stop_frame_ = 1 # Magic variable so the debugger will hide me! + + # Get the debugger - may be None! + debugger = GetDebugger() + + if defName is None: + try: + pathName = GetActiveFileName() + except KeyboardInterrupt: + return # User cancelled save. + else: + pathName = defName + if not pathName: + pathName = lastScript + if defArgs is None: + args = '' + if pathName==lastScript: + args = lastArgs + else: + args = defArgs + if debuggingType is None: debuggingType = lastDebuggingType + + if not pathName or bShowDialog: + dlg = DlgRunScript(debugger is not None) + dlg['script'] = pathName + dlg['args'] = args + dlg['debuggingType'] = debuggingType + if dlg.DoModal() != win32con.IDOK: + return + script=dlg['script'] + args=dlg['args'] + debuggingType = dlg['debuggingType'] + if not script: return + if debuggingType == RS_DEBUGGER_GO and debugger is not None: + # This may surprise users - they select "Run under debugger", but + # it appears not to! Only warn when they pick from the dialog! + # First - ensure the debugger is activated to pickup any break-points + # set in the editor. + try: + # Create the debugger, but _dont_ init the debugger GUI. + rd = debugger._GetCurrentDebugger() + except AttributeError: + rd = None + if rd is not None and len(rd.breaks)==0: + msg = "There are no active break-points.\r\n\r\nSelecting this debug option without any\r\nbreak-points is unlikely to have the desired effect\r\nas the debugger is unlikely to be invoked..\r\n\r\nWould you like to step-through in the debugger instead?" + rc = win32ui.MessageBox(msg, win32ui.LoadString(win32ui.IDR_DEBUGGER), win32con.MB_YESNOCANCEL | win32con.MB_ICONINFORMATION) + if rc == win32con.IDCANCEL: + return + if rc == win32con.IDYES: + debuggingType = RS_DEBUGGER_STEP + + lastDebuggingType = debuggingType + lastScript = script + lastArgs = args + else: + script = pathName + + # try and open the script. + if len(os.path.splitext(script)[1])==0: # check if no extension supplied, and give one. + script = script + '.py' + # If no path specified, try and locate the file + path, fnameonly = os.path.split(script) + if len(path)==0: + try: + os.stat(fnameonly) # See if it is OK as is... + script = fnameonly + except os.error: + fullScript = LocatePythonFile(script) + if fullScript is None: + win32ui.MessageBox("The file '%s' can not be located" % script ) + return + script = fullScript + else: + path = win32ui.FullPath(path) + if not IsOnPythonPath(path): sys.path.append(path) + + # py3k fun: If we use text mode to open the file, we get \r\n + # translated so Python allows the syntax (good!), but we get back + # text already decoded from the default encoding (bad!) and Python + # ignores any encoding decls (bad!). If we use binary mode we get + # the raw bytes and Python looks at the encoding (good!) but \r\n + # chars stay in place so Python throws a syntax error (bad!). + # So: so the binary thing and manually normalize \r\n. + try: + f = open(script, 'rb') + except IOError as exc: + win32ui.MessageBox("The file could not be opened - %s (%d)" % (exc.strerror, exc.errno)) + return + + # Get the source-code - as above, normalize \r\n + code = f.read().replace(byte_crlf, byte_lf).replace(byte_cr, byte_lf) + byte_lf + + # Remember and hack sys.argv for the script. + oldArgv = sys.argv + sys.argv = ParseArgs(args) + sys.argv.insert(0, script) + # sys.path[0] is the path of the script + oldPath0 = sys.path[0] + newPath0 = os.path.split(script)[0] + if not oldPath0: # if sys.path[0] is empty + sys.path[0] = newPath0 + insertedPath0 = 0 + else: + sys.path.insert(0, newPath0) + insertedPath0 = 1 + bWorked = 0 + win32ui.DoWaitCursor(1) + base = os.path.split(script)[1] + # Allow windows to repaint before starting. + win32ui.PumpWaitingMessages() + win32ui.SetStatusText('Running script %s...' % base,1 ) + exitCode = 0 + from pywin.framework import interact + # Check the debugger flags + if debugger is None and (debuggingType != RS_DEBUGGER_NONE): + win32ui.MessageBox("No debugger is installed. Debugging options have been ignored!") + debuggingType = RS_DEBUGGER_NONE + + # Get a code object - ignore the debugger for this, as it is probably a syntax error + # at this point + try: + codeObject = compile(code, script, "exec") + except: + # Almost certainly a syntax error! + _HandlePythonFailure("run script", script) + # No code object which to run/debug. + return + __main__.__file__=script + try: + if debuggingType == RS_DEBUGGER_STEP: + debugger.run(codeObject, __main__.__dict__, start_stepping=1) + elif debuggingType == RS_DEBUGGER_GO: + debugger.run(codeObject, __main__.__dict__, start_stepping=0) + else: + # Post mortem or no debugging + exec(codeObject, __main__.__dict__) + bWorked = 1 + except bdb.BdbQuit: + # Dont print tracebacks when the debugger quit, but do print a message. + print("Debugging session cancelled.") + exitCode = 1 + bWorked = 1 + except SystemExit as code: + exitCode = code + bWorked = 1 + except KeyboardInterrupt: + # Consider this successful, as we dont want the debugger. + # (but we do want a traceback!) + if interact.edit and interact.edit.currentView: + interact.edit.currentView.EnsureNoPrompt() + traceback.print_exc() + if interact.edit and interact.edit.currentView: + interact.edit.currentView.AppendToPrompt([]) + bWorked = 1 + except: + if interact.edit and interact.edit.currentView: + interact.edit.currentView.EnsureNoPrompt() + traceback.print_exc() + if interact.edit and interact.edit.currentView: + interact.edit.currentView.AppendToPrompt([]) + if debuggingType == RS_DEBUGGER_PM: + debugger.pm() + del __main__.__file__ + sys.argv = oldArgv + if insertedPath0: + del sys.path[0] + else: + sys.path[0] = oldPath0 + f.close() + if bWorked: + win32ui.SetStatusText("Script '%s' returned exit code %s" %(script, exitCode)) + else: + win32ui.SetStatusText('Exception raised while running script %s' % base) + try: + sys.stdout.flush() + except AttributeError: + pass + + win32ui.DoWaitCursor(0) + +def ImportFile(): + """ This code looks for the current window, and determines if it can be imported. If not, + it will prompt for a file name, and allow it to be imported. """ + try: + pathName = GetActiveFileName() + except KeyboardInterrupt: + pathName = None + + if pathName is not None: + if os.path.splitext(pathName)[1].lower() not in ('.py','.pyw','.pyx'): + pathName = None + + if pathName is None: + openFlags = win32con.OFN_OVERWRITEPROMPT|win32con.OFN_FILEMUSTEXIST + dlg = win32ui.CreateFileDialog(1,None,None,openFlags, "Python Scripts (*.py;*.pyw)|*.py;*.pyw;*.pyx||") + dlg.SetOFNTitle("Import Script") + if dlg.DoModal()!=win32con.IDOK: + return 0 + + pathName = dlg.GetPathName() + + # If already imported, dont look for package + path, modName = os.path.split(pathName) + modName, modExt = os.path.splitext(modName) + newPath = None + # note that some packages (*cough* email *cough*) use "lazy importers" + # meaning sys.modules can change as a side-effect of looking at + # module.__file__ - so we must take a copy (ie, items() in py2k, + # list(items()) in py3k) + for key, mod in list(sys.modules.items()): + if getattr(mod, '__file__', None): + fname = mod.__file__ + base, ext = os.path.splitext(fname) + if ext.lower() in ['.pyo', '.pyc']: + ext = '.py' + fname = base + ext + if win32ui.ComparePath(fname, pathName): + modName = key + break + else: # for not broken + modName, newPath = GetPackageModuleName(pathName) + if newPath: sys.path.append(newPath) + + if modName in sys.modules: + bNeedReload = 1 + what = "reload" + else: + what = "import" + bNeedReload = 0 + + win32ui.SetStatusText(what.capitalize()+'ing module...',1) + win32ui.DoWaitCursor(1) +# win32ui.GetMainFrame().BeginWaitCursor() + + try: + # always do an import, as it is cheap if it's already loaded. This ensures + # it is in our name space. + codeObj = compile('import '+modName,'','exec') + except SyntaxError: + win32ui.SetStatusText('Invalid filename for import: "' +modName+'"') + return + try: + exec(codeObj, __main__.__dict__) + mod = sys.modules.get(modName) + if bNeedReload: + from importlib import reload + mod = reload(sys.modules[modName]) + win32ui.SetStatusText('Successfully ' + what + "ed module '"+modName+"': %s" % getattr(mod,'__file__',"")) + except: + _HandlePythonFailure(what) + win32ui.DoWaitCursor(0) + +def CheckFile(): + """ This code looks for the current window, and gets Python to check it + without actually executing any code (ie, by compiling only) + """ + try: + pathName = GetActiveFileName() + except KeyboardInterrupt: + return + + what = "check" + win32ui.SetStatusText(what.capitalize()+'ing module...',1) + win32ui.DoWaitCursor(1) + try: + f = open(pathName) + except IOError as details: + print("Cant open file '%s' - %s" % (pathName, details)) + return + try: + code = f.read() + "\n" + finally: + f.close() + try: + codeObj = compile(code, pathName,'exec') + if RunTabNanny(pathName): + win32ui.SetStatusText("Python and the TabNanny successfully checked the file '"+os.path.basename(pathName)+"'") + except SyntaxError: + _HandlePythonFailure(what, pathName) + except: + traceback.print_exc() + _HandlePythonFailure(what) + win32ui.DoWaitCursor(0) + +def RunTabNanny(filename): + import io as io + tabnanny = FindTabNanny() + if tabnanny is None: + win32ui.MessageBox("The TabNanny is not around, so the children can run amok!" ) + return + + # Capture the tab-nanny output + newout = io.StringIO() + old_out = sys.stderr, sys.stdout + sys.stderr = sys.stdout = newout + try: + tabnanny.check(filename) + finally: + # Restore output + sys.stderr, sys.stdout = old_out + data = newout.getvalue() + if data: + try: + lineno = data.split()[1] + lineno = int(lineno) + _JumpToPosition(filename, lineno) + try: # Try and display whitespace + GetActiveEditControl().SCISetViewWS(1) + except: + pass + win32ui.SetStatusText("The TabNanny found trouble at line %d" % lineno) + except (IndexError, TypeError, ValueError): + print("The tab nanny complained, but I cant see where!") + print(data) + return 0 + return 1 + +def _JumpToPosition(fileName, lineno, col = 1): + JumpToDocument(fileName, lineno, col) + +def JumpToDocument(fileName, lineno=0, col = 1, nChars = 0, bScrollToTop = 0): + # Jump to the position in a file. + # If lineno is <= 0, dont move the position - just open/restore. + # if nChars > 0, select that many characters. + # if bScrollToTop, the specified line will be moved to the top of the window + # (eg, bScrollToTop should be false when jumping to an error line to retain the + # context, but true when jumping to a method defn, where we want the full body. + # Return the view which is editing the file, or None on error. + doc = win32ui.GetApp().OpenDocumentFile(fileName) + if doc is None: return None + frame = doc.GetFirstView().GetParentFrame() + try: + view = frame.GetEditorView() + if frame.GetActiveView() != view: + frame.SetActiveView(view) + frame.AutoRestore() + except AttributeError: # Not an editor frame?? + view = doc.GetFirstView() + if lineno > 0: + charNo = view.LineIndex(lineno-1) + start = charNo + col - 1 + size = view.GetTextLength() + try: + view.EnsureCharsVisible(charNo) + except AttributeError: + print("Doesnt appear to be one of our views?") + view.SetSel(min(start, size), min(start + nChars, size)) + if bScrollToTop: + curTop = view.GetFirstVisibleLine() + nScroll = (lineno-1) - curTop + view.LineScroll(nScroll, 0) + view.SetFocus() + return view + +def _HandlePythonFailure(what, syntaxErrorPathName = None): + typ, details, tb = sys.exc_info() + if isinstance(details, SyntaxError): + try: + msg, (fileName, line, col, text) = details + if (not fileName or fileName =="") and syntaxErrorPathName: + fileName = syntaxErrorPathName + _JumpToPosition(fileName, line, col) + except (TypeError, ValueError): + msg = str(details) + win32ui.SetStatusText('Failed to ' + what + ' - syntax error - %s' % msg) + else: + traceback.print_exc() + win32ui.SetStatusText('Failed to ' + what + ' - ' + str(details) ) + tb = None # Clean up a cycle. + +# Find the Python TabNanny in either the standard library or the Python Tools/Scripts directory. +def FindTabNanny(): + try: + return __import__("tabnanny") + except ImportError: + pass + # OK - not in the standard library - go looking. + filename = "tabnanny.py" + try: + path = win32api.RegQueryValue(win32con.HKEY_LOCAL_MACHINE, "SOFTWARE\\Python\\PythonCore\\%s\\InstallPath" % (sys.winver)) + except win32api.error: + print("WARNING - The Python registry does not have an 'InstallPath' setting") + print(" The file '%s' can not be located" % (filename)) + return None + fname = os.path.join(path, "Tools\\Scripts\\%s" % filename) + try: + os.stat(fname) + except os.error: + print("WARNING - The file '%s' can not be located in path '%s'" % (filename, path)) + return None + + tabnannyhome, tabnannybase = os.path.split(fname) + tabnannybase = os.path.splitext(tabnannybase)[0] + # Put tab nanny at the top of the path. + sys.path.insert(0, tabnannyhome) + try: + return __import__(tabnannybase) + finally: + # remove the tab-nanny from the path + del sys.path[0] + +def LocatePythonFile( fileName, bBrowseIfDir = 1 ): + " Given a file name, return a fully qualified file name, or None " + # first look for the exact file as specified + if not os.path.isfile(fileName): + # Go looking! + baseName = fileName + for path in sys.path: + fileName = os.path.abspath(os.path.join(path, baseName)) + if os.path.isdir(fileName): + if bBrowseIfDir: + d=win32ui.CreateFileDialog(1, "*.py", None, 0, "Python Files (*.py)|*.py|All files|*.*") + d.SetOFNInitialDir(fileName) + rc=d.DoModal() + if rc==win32con.IDOK: + fileName = d.GetPathName() + break + else: + return None + else: + fileName = fileName + ".py" + if os.path.isfile(fileName): + break # Found it! + + else: # for not broken out of + return None + return win32ui.FullPath(fileName) diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/sgrepmdi.py b/venv/Lib/site-packages/pythonwin/pywin/framework/sgrepmdi.py new file mode 100644 index 00000000..feeacb6e --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/framework/sgrepmdi.py @@ -0,0 +1,530 @@ +#SGrepMDI is by Gordon McMillan (gmcm@hypernet.com) +#It does basically what Find In Files does in MSVC with a couple enhancements. +# - It saves any directories in the app's ini file (if you want to get rid +# of them you'll have to edit the file) +# - "Directories" can be directories, +# - semicolon separated lists of "directories", +# - environment variables that evaluate to "directories", +# - registry path names that evaluate to "directories", +# - all of which is recursive, so you can mix them all up. +# - It is MDI, so you can 'nest' greps and return to earlier ones, +# (ie, have multiple results open at the same time) +# - Like FIF, double clicking a line opens an editor and takes you to the line. +# - You can highlight text, right click and start a new grep with the selected +# text as search pattern and same directories etc as before. +# - You can save grep parameters (so you don't lose your hardearned pattern) +# from File|Save +# - You can save grep results by right clicking in the result window. +# Hats off to Mark Hammond for providing an environment where I could cobble +# something like this together in a couple evenings! + +import win32ui +import win32api +from pywin.mfc import docview, dialog, window +import win32con +import string +import re +import glob +import os +import stat +import glob +from . import scriptutils + +def getsubdirs(d): + dlist = [] + flist = glob.glob(d+'\\*') + for f in flist: + if os.path.isdir(f): + dlist.append(f) + dlist = dlist + getsubdirs(f) + return dlist + +class dirpath: + def __init__(self, str, recurse=0): + dp = str.split(';') + dirs = {} + for d in dp: + if os.path.isdir(d): + d = d.lower() + if d not in dirs: + dirs[d] = None + if recurse: + subdirs = getsubdirs(d) + for sd in subdirs: + sd = sd.lower() + if sd not in dirs: + dirs[sd] = None + elif os.path.isfile(d): + pass + else: + x = None + if d in os.environ: + x = dirpath(os.environ[d]) + elif d[:5] == 'HKEY_': + keystr = d.split('\\') + try: + root = eval('win32con.'+keystr[0]) + except: + win32ui.MessageBox("Can't interpret registry key name '%s'" % keystr[0]) + try: + subkey = '\\'.join(keystr[1:]) + val = win32api.RegQueryValue(root, subkey) + if val: + x = dirpath(val) + else: + win32ui.MessageBox("Registry path '%s' did not return a path entry" % d) + except: + win32ui.MessageBox("Can't interpret registry key value: %s" % keystr[1:]) + else: + win32ui.MessageBox("Directory '%s' not found" % d) + if x: + for xd in x: + if xd not in dirs: + dirs[xd] = None + if recurse: + subdirs = getsubdirs(xd) + for sd in subdirs: + sd = sd.lower() + if sd not in dirs: + dirs[sd] = None + self.dirs = [] + for d in list(dirs.keys()): + self.dirs.append(d) + + def __getitem__(self, key): + return self.dirs[key] + def __len__(self): + return len(self.dirs) + def __setitem__(self, key, value): + self.dirs[key] = value + def __delitem__(self, key): + del self.dirs[key] + def __getslice__(self, lo, hi): + return self.dirs[lo:hi] + def __setslice__(self, lo, hi, seq): + self.dirs[lo:hi] = seq + def __delslice__(self, lo, hi): + del self.dirs[lo:hi] + def __add__(self, other): + if type(other) == type(self) or type(other) == type([]): + return self.dirs + other.dirs + def __radd__(self, other): + if type(other) == type(self) or type(other) == type([]): + return other.dirs + self.dirs + +# Group(1) is the filename, group(2) is the lineno. +#regexGrepResult=regex.compile("^\\([a-zA-Z]:.*\\)(\\([0-9]+\\))") + +regexGrep=re.compile(r"^([a-zA-Z]:[^(]*)\(([0-9]+)\)") + +#these are the atom numbers defined by Windows for basic dialog controls + +BUTTON = 0x80 +EDIT = 0x81 +STATIC = 0x82 +LISTBOX = 0x83 +SCROLLBAR = 0x84 +COMBOBOX = 0x85 + +class GrepTemplate(docview.RichEditDocTemplate): + def __init__(self): + docview.RichEditDocTemplate.__init__(self, win32ui.IDR_TEXTTYPE, GrepDocument, GrepFrame, GrepView) + self.SetDocStrings("\nGrep\nGrep\nGrep params (*.grep)\n.grep\n\n\n") + win32ui.GetApp().AddDocTemplate(self) + self.docparams = None + + def MatchDocType(self, fileName, fileType): + doc = self.FindOpenDocument(fileName) + if doc: return doc + ext = os.path.splitext(fileName)[1].lower() + if ext =='.grep': + return win32ui.CDocTemplate_Confidence_yesAttemptNative + return win32ui.CDocTemplate_Confidence_noAttempt + + def setParams(self, params): + self.docparams = params + + def readParams(self): + tmp = self.docparams + self.docparams = None + return tmp + +class GrepFrame(window.MDIChildWnd): + # The template and doc params will one day be removed. + def __init__(self, wnd = None): + window.MDIChildWnd.__init__(self, wnd) + +class GrepDocument(docview.RichEditDoc): + def __init__(self, template): + docview.RichEditDoc.__init__(self, template) + self.dirpattern = '' + self.filpattern = '' + self.greppattern = '' + self.casesensitive = 1 + self.recurse = 1 + self.verbose = 0 + + def OnOpenDocument(self, fnm): + #this bizarre stuff with params is so right clicking in a result window + #and starting a new grep can communicate the default parameters to the + #new grep. + try: + params = open(fnm,'r').read() + except: + params = None + self.setInitParams(params) + return self.OnNewDocument() + + def OnCloseDocument(self): + try: + win32ui.GetApp().DeleteIdleHandler(self.SearchFile) + except: + pass + return self._obj_.OnCloseDocument() + + def saveInitParams(self): + # Only save the flags, not the text boxes. + paramstr = "\t%s\t\t%d\t%d" % (self.filpattern, self.casesensitive, self.recurse) + win32ui.WriteProfileVal("Grep", "Params", paramstr) + + def setInitParams(self, paramstr): + if paramstr is None: + paramstr = win32ui.GetProfileVal("Grep", "Params", '\t\t\t1\t0\t0') + params = paramstr.split('\t') + if len(params) < 3: + params = params + ['']*(3-len(params)) + if len(params) < 6: + params = params + [0]*(6-len(params)) + self.dirpattern = params[0] + self.filpattern = params[1] + self.greppattern = params[2] + self.casesensitive = int(params[3]) + self.recurse = int(params[4]) + self.verbose = int(params[5]) + # setup some reasonable defaults. + if not self.dirpattern: + try: + editor=win32ui.GetMainFrame().MDIGetActive()[0].GetEditorView() + self.dirpattern=os.path.abspath(os.path.dirname(editor.GetDocument().GetPathName())) + except (AttributeError, win32ui.error): + self.dirpattern = os.getcwd() + if not self.filpattern: + self.filpattern = "*.py" + + def OnNewDocument(self): + if self.dirpattern == '': + self.setInitParams(greptemplate.readParams()) + d = GrepDialog(self.dirpattern, self.filpattern, self.greppattern, self.casesensitive, self.recurse, self.verbose) + if d.DoModal() == win32con.IDOK: + self.dirpattern = d['dirpattern'] + self.filpattern = d['filpattern'] + self.greppattern = d['greppattern'] + self.casesensitive = d['casesensitive'] + self.recurse = d['recursive'] + self.verbose = d['verbose'] + self.doSearch() + self.saveInitParams() + return 1 + return 0 # cancelled - return zero to stop frame creation. + + def doSearch(self): + self.dp = dirpath(self.dirpattern, self.recurse) + self.SetTitle("Grep for %s in %s" % (self.greppattern, self.filpattern)) + #self.text = [] + self.GetFirstView().Append('#Search '+self.dirpattern+'\n') + if self.verbose: + self.GetFirstView().Append('# ='+repr(self.dp.dirs)+'\n') + self.GetFirstView().Append('# Files '+self.filpattern+'\n') + self.GetFirstView().Append('# For '+self.greppattern+'\n') + self.fplist = self.filpattern.split(';') + if self.casesensitive: + self.pat = re.compile(self.greppattern) + else: + self.pat = re.compile(self.greppattern, re.IGNORECASE) + win32ui.SetStatusText("Searching. Please wait...", 0) + self.dpndx = self.fpndx = 0 + self.fndx = -1 + if not self.dp: + self.GetFirstView().Append("# ERROR: '%s' does not resolve to any search locations" % self.dirpattern) + self.SetModifiedFlag(0) + else: + self.flist = glob.glob(self.dp[0]+'\\'+self.fplist[0]) + win32ui.GetApp().AddIdleHandler(self.SearchFile) + + def SearchFile(self, handler, count): + self.fndx = self.fndx + 1 + if self.fndx < len(self.flist): + f = self.flist[self.fndx] + if self.verbose: + self.GetFirstView().Append('# ..'+f+'\n') + # Directories may match the file type pattern, and files may be removed + # while grep is running + if os.path.isfile(f): + win32ui.SetStatusText("Searching "+f, 0) + lines = open(f, 'r').readlines() + for i in range(len(lines)): + line = lines[i] + if self.pat.search(line) != None: + self.GetFirstView().Append(f+'('+repr(i+1) + ') '+line) + else: + self.fndx = -1 + self.fpndx = self.fpndx + 1 + if self.fpndx < len(self.fplist): + self.flist = glob.glob(self.dp[self.dpndx] + '\\' + self.fplist[self.fpndx]) + else: + self.fpndx = 0 + self.dpndx = self.dpndx + 1 + if self.dpndx < len(self.dp): + self.flist = glob.glob(self.dp[self.dpndx] + '\\' + self.fplist[self.fpndx]) + else: + win32ui.SetStatusText("Search complete.", 0) + self.SetModifiedFlag(0) # default to not modified. + try: + win32ui.GetApp().DeleteIdleHandler(self.SearchFile) + except: + pass + return 0 + return 1 + + def GetParams(self): + return self.dirpattern+'\t'+self.filpattern+'\t'+self.greppattern+'\t'+repr(self.casesensitive)+'\t'+repr(self.recurse)+'\t'+repr(self.verbose) + + def OnSaveDocument(self, filename): +# print 'OnSaveDocument() filename=',filename + savefile = open(filename,"wb") + txt = self.GetParams()+'\n' +# print 'writing',txt + savefile.write(txt) + savefile.close() + self.SetModifiedFlag(0) + return 1 + +ID_OPEN_FILE = 0xe400 +ID_GREP = 0xe401 +ID_SAVERESULTS = 0x402 +ID_TRYAGAIN = 0x403 + +class GrepView(docview.RichEditView): + def __init__(self, doc): + docview.RichEditView.__init__(self, doc) + self.SetWordWrap(win32ui.CRichEditView_WrapNone) + self.HookHandlers() + + def OnInitialUpdate(self): + rc = self._obj_.OnInitialUpdate() + format = (-402653169, 0, 200, 0, 0, 0, 49, 'Courier New') + self.SetDefaultCharFormat(format) + return rc + + def HookHandlers(self): + self.HookMessage(self.OnRClick, win32con.WM_RBUTTONDOWN) + self.HookCommand(self.OnCmdOpenFile, ID_OPEN_FILE) + self.HookCommand(self.OnCmdGrep, ID_GREP) + self.HookCommand(self.OnCmdSave, ID_SAVERESULTS) + self.HookCommand(self.OnTryAgain, ID_TRYAGAIN) + self.HookMessage(self.OnLDblClick,win32con.WM_LBUTTONDBLCLK) + + def OnLDblClick(self,params): + line = self.GetLine() + regexGrepResult = regexGrep.match(line) + if regexGrepResult: + fname = regexGrepResult.group(1) + line = int(regexGrepResult.group(2)) + scriptutils.JumpToDocument(fname, line) + return 0 # dont pass on + return 1 # pass it on by default. + + def OnRClick(self, params): + menu = win32ui.CreatePopupMenu() + flags=win32con.MF_STRING|win32con.MF_ENABLED + lineno = self._obj_.LineFromChar(-1) #selection or current line + line = self._obj_.GetLine(lineno) + regexGrepResult = regexGrep.match(line) + if regexGrepResult: + self.fnm = regexGrepResult.group(1) + self.lnnum = int(regexGrepResult.group(2)) + menu.AppendMenu(flags, ID_OPEN_FILE, "&Open "+self.fnm) + menu.AppendMenu(win32con.MF_SEPARATOR) + menu.AppendMenu(flags, ID_TRYAGAIN, "&Try Again") + charstart, charend = self._obj_.GetSel() + if charstart != charend: + linestart = self._obj_.LineIndex(lineno) + self.sel = line[charstart-linestart:charend-linestart] + menu.AppendMenu(flags, ID_GREP, "&Grep for "+self.sel) + menu.AppendMenu(win32con.MF_SEPARATOR) + menu.AppendMenu(flags, win32ui.ID_EDIT_CUT, 'Cu&t') + menu.AppendMenu(flags, win32ui.ID_EDIT_COPY, '&Copy') + menu.AppendMenu(flags, win32ui.ID_EDIT_PASTE, '&Paste') + menu.AppendMenu(flags, win32con.MF_SEPARATOR); + menu.AppendMenu(flags, win32ui.ID_EDIT_SELECT_ALL, '&Select all') + menu.AppendMenu(flags, win32con.MF_SEPARATOR); + menu.AppendMenu(flags, ID_SAVERESULTS, 'Sa&ve results') + menu.TrackPopupMenu(params[5]) + return 0 + + def OnCmdOpenFile(self, cmd, code): + doc = win32ui.GetApp().OpenDocumentFile(self.fnm) + if doc: + vw = doc.GetFirstView() + #hope you have an editor that implements GotoLine()! + try: + vw.GotoLine(int(self.lnnum)) + except: + pass + return 0 + + def OnCmdGrep(self, cmd, code): + curparamsstr = self.GetDocument().GetParams() + params = curparamsstr.split('\t') + params[2] = self.sel + greptemplate.setParams('\t'.join(params)) + greptemplate.OpenDocumentFile() + return 0 + + def OnTryAgain(self, cmd, code): + greptemplate.setParams(self.GetDocument().GetParams()) + greptemplate.OpenDocumentFile() + return 0 + + def OnCmdSave(self, cmd, code): + flags = win32con.OFN_OVERWRITEPROMPT + dlg = win32ui.CreateFileDialog(0, None, None, flags, "Text Files (*.txt)|*.txt||", self) + dlg.SetOFNTitle("Save Results As") + if dlg.DoModal() == win32con.IDOK: + pn = dlg.GetPathName() + self._obj_.SaveTextFile(pn) + return 0 + + def Append(self, strng): + numlines = self.GetLineCount() + endpos = self.LineIndex(numlines-1) + len(self.GetLine(numlines-1)) + self.SetSel(endpos, endpos) + self.ReplaceSel(strng) + + +class GrepDialog(dialog.Dialog): + def __init__(self, dp, fp, gp, cs, r, v): + style = win32con.DS_MODALFRAME | win32con.WS_POPUP | win32con.WS_VISIBLE | win32con.WS_CAPTION | win32con.WS_SYSMENU | win32con.DS_SETFONT + CS = win32con.WS_CHILD | win32con.WS_VISIBLE + tmp = [ ["Grep", (0, 0, 210, 90), style, None, (8, "MS Sans Serif")], ] + tmp.append([STATIC, "Grep For:", -1, (7, 7, 50, 9), CS ]) + tmp.append([EDIT, gp, 101, (52, 7, 144, 11), CS | win32con.WS_TABSTOP | win32con.ES_AUTOHSCROLL | win32con.WS_BORDER]) + tmp.append([STATIC, "Directories:", -1, (7, 20, 50, 9), CS ]) + tmp.append([EDIT, dp, 102, (52, 20, 128, 11), CS | win32con.WS_TABSTOP | win32con.ES_AUTOHSCROLL | win32con.WS_BORDER]) + tmp.append([BUTTON, '...', 110, (182,20, 16, 11), CS | win32con.BS_PUSHBUTTON | win32con.WS_TABSTOP]) + tmp.append([STATIC, "File types:", -1, (7, 33, 50, 9), CS ]) + tmp.append([EDIT, fp, 103, (52, 33, 128, 11), CS | win32con.WS_TABSTOP | win32con.ES_AUTOHSCROLL | win32con.WS_BORDER ]) + tmp.append([BUTTON, '...', 111, (182,33, 16, 11), CS | win32con.BS_PUSHBUTTON | win32con.WS_TABSTOP]) + tmp.append([BUTTON,'Case sensitive', 104, (7, 45, 72, 9), CS | win32con.BS_AUTOCHECKBOX | win32con.BS_LEFTTEXT| win32con.WS_TABSTOP]) + tmp.append([BUTTON,'Subdirectories', 105, (7, 56, 72, 9), CS | win32con.BS_AUTOCHECKBOX | win32con.BS_LEFTTEXT| win32con.WS_TABSTOP]) + tmp.append([BUTTON,'Verbose', 106, (7, 67, 72, 9), CS | win32con.BS_AUTOCHECKBOX | win32con.BS_LEFTTEXT| win32con.WS_TABSTOP]) + tmp.append([BUTTON,'OK', win32con.IDOK, (166,53, 32, 12), CS | win32con.BS_DEFPUSHBUTTON| win32con.WS_TABSTOP]) + tmp.append([BUTTON,'Cancel', win32con.IDCANCEL, (166,67, 32, 12), CS | win32con.BS_PUSHBUTTON| win32con.WS_TABSTOP]) + dialog.Dialog.__init__(self, tmp) + self.AddDDX(101,'greppattern') + self.AddDDX(102,'dirpattern') + self.AddDDX(103,'filpattern') + self.AddDDX(104,'casesensitive') + self.AddDDX(105,'recursive') + self.AddDDX(106,'verbose') + self._obj_.data['greppattern'] = gp + self._obj_.data['dirpattern'] = dp + self._obj_.data['filpattern'] = fp + self._obj_.data['casesensitive'] = cs + self._obj_.data['recursive'] = r + self._obj_.data['verbose'] = v + self.HookCommand(self.OnMoreDirectories, 110) + self.HookCommand(self.OnMoreFiles, 111) + + def OnMoreDirectories(self, cmd, code): + self.getMore('Grep\\Directories', 'dirpattern') + + def OnMoreFiles(self, cmd, code): + self.getMore('Grep\\File Types', 'filpattern') + + def getMore(self, section, key): + self.UpdateData(1) + #get the items out of the ini file + ini = win32ui.GetProfileFileName() + secitems = win32api.GetProfileSection(section, ini) + items = [] + for secitem in secitems: + items.append(secitem.split('=')[1]) + dlg = GrepParamsDialog(items) + if dlg.DoModal() == win32con.IDOK: + itemstr = ';'.join(dlg.getItems()) + self._obj_.data[key] = itemstr + #update the ini file with dlg.getNew() + i = 0 + newitems = dlg.getNew() + if newitems: + items = items + newitems + for item in items: + win32api.WriteProfileVal(section, repr(i), item, ini) + i = i + 1 + self.UpdateData(0) + + def OnOK(self): + self.UpdateData(1) + for id, name in [(101,'greppattern'), (102,'dirpattern'), (103,'filpattern')]: + if not self[name]: + self.GetDlgItem(id).SetFocus() + win32api.MessageBeep() + win32ui.SetStatusText("Please enter a value") + return + self._obj_.OnOK() + +class GrepParamsDialog(dialog.Dialog): + def __init__(self, items): + self.items = items + self.newitems = [] + style = win32con.DS_MODALFRAME | win32con.WS_POPUP | win32con.WS_VISIBLE | win32con.WS_CAPTION | win32con.WS_SYSMENU | win32con.DS_SETFONT + CS = win32con.WS_CHILD | win32con.WS_VISIBLE + tmp = [ ["Grep Parameters", (0, 0, 205, 100), style, None, (8, "MS Sans Serif")], ] + tmp.append([LISTBOX, '', 107, (7, 7, 150, 72), CS | win32con.LBS_MULTIPLESEL| win32con.LBS_STANDARD | win32con.LBS_HASSTRINGS | win32con.WS_TABSTOP | win32con.LBS_NOTIFY]) + tmp.append([BUTTON,'OK', win32con.IDOK, (167, 7, 32, 12), CS | win32con.BS_DEFPUSHBUTTON| win32con.WS_TABSTOP]) + tmp.append([BUTTON,'Cancel', win32con.IDCANCEL, (167,23, 32, 12), CS | win32con.BS_PUSHBUTTON| win32con.WS_TABSTOP]) + tmp.append([STATIC,'New:', -1, (2, 83, 15, 12), CS]) + tmp.append([EDIT, '', 108, (18, 83, 139, 12), CS | win32con.WS_TABSTOP | win32con.ES_AUTOHSCROLL | win32con.WS_BORDER]) + tmp.append([BUTTON,'Add', 109, (167,83, 32, 12), CS | win32con.BS_PUSHBUTTON| win32con.WS_TABSTOP]) + dialog.Dialog.__init__(self, tmp) + self.HookCommand(self.OnAddItem, 109) + self.HookCommand(self.OnListDoubleClick, 107) + + def OnInitDialog(self): + lb = self.GetDlgItem(107) + for item in self.items: + lb.AddString(item) + return self._obj_.OnInitDialog() + + def OnAddItem(self, cmd, code): + eb = self.GetDlgItem(108) + item = eb.GetLine(0) + self.newitems.append(item) + lb = self.GetDlgItem(107) + i = lb.AddString(item) + lb.SetSel(i, 1) + return 1 + + def OnListDoubleClick(self, cmd, code): + if code == win32con.LBN_DBLCLK: + self.OnOK() + return 1 + + def OnOK(self): + lb = self.GetDlgItem(107) + self.selections = lb.GetSelTextItems() + self._obj_.OnOK() + + def getItems(self): + return self.selections + + def getNew(self): + return self.newitems + +try: + win32ui.GetApp().RemoveDocTemplate(greptemplate) +except NameError: + pass + +greptemplate = GrepTemplate() diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/startup.py b/venv/Lib/site-packages/pythonwin/pywin/framework/startup.py new file mode 100644 index 00000000..0a03272b --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/framework/startup.py @@ -0,0 +1,75 @@ +# startup.py +# +"The main application startup code for PythonWin." + +# +# This does the basic command line handling. + +# Keep this as short as possible, cos error output is only redirected if +# this runs OK. Errors in imported modules are much better - the messages go somewhere (not any more :-) + +import sys +import os +import win32api +import win32ui + +if not sys.argv: + # Initialize sys.argv from commandline. When sys.argv is empty list ( + # different from [''] meaning "no cmd line arguments" ), then C + # bootstrapping or another method of invocation failed to initialize + # sys.argv and it will be done here. ( This was a workaround for a bug in + # win32ui but is retained for other situations. ) + argv = win32api.CommandLineToArgv(win32api.GetCommandLine()) + sys.argv = argv[1:] + if os.getcwd() not in sys.path and '.' not in sys.path: + sys.path.insert(0, os.getcwd()) + +# You may wish to redirect error output somewhere useful if you have startup errors. +# eg, 'import win32traceutil' will do this for you. +# import win32traceutil # Just uncomment this line to see error output! + +# An old class I used to use - generally only useful if Pythonwin is running under MSVC +#class DebugOutput: +# softspace=1 +# def write(self,message): +# win32ui.OutputDebug(message) +#sys.stderr=sys.stdout=DebugOutput() + +# To fix a problem with Pythonwin when started from the Pythonwin directory, +# we update the pywin path to ensure it is absolute. +# If it is indeed relative, it will be relative to our current directory. +# If its already absolute, then this will have no affect. +import pywin, pywin.framework +pywin.__path__[0] = win32ui.FullPath(pywin.__path__[0]) +pywin.framework.__path__[0] = win32ui.FullPath(pywin.framework.__path__[0]) + +# make a few wierd sys values. This is so later we can clobber sys.argv to trick +# scripts when running under a GUI environment. + +moduleName = "pywin.framework.intpyapp" +sys.appargvoffset = 0 +sys.appargv = sys.argv[:] +# Must check for /app param here. +if len(sys.argv) >= 2 and sys.argv[0].lower() in ('/app', '-app'): + from . import cmdline + moduleName = cmdline.FixArgFileName(sys.argv[1]) + sys.appargvoffset = 2 + newargv=sys.argv[sys.appargvoffset:] +# newargv.insert(0, sys.argv[0]) + sys.argv = newargv + +# Import the application module. +__import__(moduleName) + +try: + win32ui.GetApp()._obj_ + # This worked - an app already exists - do nothing more +except (AttributeError, win32ui.error): + # This means either no app object exists at all, or the one + # that does exist does not have a Python class (ie, was created + # by the host .EXE). In this case, we do the "old style" init... + from . import app + if app.AppBuilder is None: + raise TypeError("No application object has been registered") + + app.App = app.AppBuilder() diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/stdin.py b/venv/Lib/site-packages/pythonwin/pywin/framework/stdin.py new file mode 100644 index 00000000..f377b3f6 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/framework/stdin.py @@ -0,0 +1,173 @@ +# Copyright (c) 2000 David Abrahams. Permission to copy, use, modify, sell +# and distribute this software is granted provided this copyright +# notice appears in all copies. This software is provided "as is" without +# express or implied warranty, and with no claim as to its suitability for +# any purpose. +"""Provides a class Stdin which can be used to emulate the regular old +sys.stdin for the PythonWin interactive window. Right now it just pops +up a raw_input() dialog. With luck, someone will integrate it into the +actual PythonWin interactive window someday. + +WARNING: Importing this file automatically replaces sys.stdin with an +instance of Stdin (below). This is useful because you can just open +Stdin.py in PythonWin and hit the import button to get it set up right +if you don't feel like changing PythonWin's source. To put things back +the way they were, simply use this magic incantation: + import sys + sys.stdin = sys.stdin.real_file +""" +import sys + +try: + get_input_line = raw_input # py2x +except NameError: + get_input_line = input # py3k + +class Stdin: + def __init__(self): + self.real_file = sys.stdin # NOTE: Likely to be None in py3k + self.buffer = "" + self.closed = False + + def __getattr__(self, name): + """Forward most functions to the real sys.stdin for absolute realism. + """ + if self.real_file is None: + raise AttributeError(name) + return getattr(self.real_file, name) + + def isatty(self): + """Return 1 if the file is connected to a tty(-like) device, else 0. + """ + return 1 + + def read(self, size = -1): + """Read at most size bytes from the file (less if the read + hits EOF or no more data is immediately available on a pipe, + tty or similar device). If the size argument is negative or + omitted, read all data until EOF is reached. The bytes are + returned as a string object. An empty string is returned when + EOF is encountered immediately. (For certain files, like ttys, + it makes sense to continue reading after an EOF is hit.)""" + result_size = self.__get_lines(size) + return self.__extract_from_buffer(result_size) + + def readline(self, size = -1): + """Read one entire line from the file. A trailing newline + character is kept in the string2.6 (but may be absent when a file ends + with an incomplete line). If the size argument is present and + non-negative, it is a maximum byte count (including the trailing + newline) and an incomplete line may be returned. An empty string is + returned when EOF is hit immediately. Note: unlike stdio's fgets(), + the returned string contains null characters ('\0') if they occurred + in the input. + """ + maximum_result_size = self.__get_lines(size, lambda buffer: '\n' in buffer) + + if '\n' in self.buffer[:maximum_result_size]: + result_size = self.buffer.find('\n', 0, maximum_result_size) + 1 + assert(result_size > 0) + else: + result_size = maximum_result_size + + return self.__extract_from_buffer(result_size) + + def __extract_from_buffer(self, character_count): + """Remove the first character_count characters from the internal buffer and + return them. + """ + result = self.buffer[:character_count] + self.buffer = self.buffer[character_count:] + return result + + def __get_lines(self, desired_size, done_reading = lambda buffer: False): + """Keep adding lines to our internal buffer until done_reading(self.buffer) + is true or EOF has been reached or we have desired_size bytes in the buffer. + If desired_size < 0, we are never satisfied until we reach EOF. If done_reading + is not supplied, it is not consulted. + + If desired_size < 0, returns the length of the internal buffer. Otherwise, + returns desired_size. + """ + while not done_reading(self.buffer) and (desired_size < 0 + or len(self.buffer) < desired_size): + try: + self.__get_line() + except (EOFError, KeyboardInterrupt): # deal with cancellation of get_input_line dialog + desired_size = len(self.buffer) # Be satisfied! + + if desired_size < 0: + return len(self.buffer) + else: + return desired_size + + def __get_line(self): + """Grab one line from get_input_line() and append it to the buffer. + """ + line = get_input_line() + print('>>>',line) # echo input to console + self.buffer = self.buffer + line + '\n' + + def readlines(self, *sizehint): + """Read until EOF using readline() and return a list containing the lines + thus read. If the optional sizehint argument is present, instead of + reading up to EOF, whole lines totalling approximately sizehint bytes + (possibly after rounding up to an internal buffer size) are read. + """ + result = [] + total_read = 0 + while sizehint == () or total_read < sizehint[0]: + line = self.readline() + if line == '': + break + total_read = total_read + len(line) + result.append(line) + return result + +if __name__ == "__main__": + test_input = r"""this is some test +input that I am hoping +~ +will be very instructive +and when I am done +I will have tested everything. +Twelve and twenty blackbirds +baked in a pie. Patty cake +patty cake so am I. +~ +Thirty-five niggling idiots! +Sell you soul to the devil, baby +""" + + def fake_raw_input(prompt=None): + """Replacement for raw_input() which pulls lines out of global test_input. + For testing only! + """ + global test_input + if '\n' not in test_input: + end_of_line_pos = len(test_input) + else: + end_of_line_pos = test_input.find('\n') + result = test_input[:end_of_line_pos] + test_input = test_input[end_of_line_pos + 1:] + if len(result) == 0 or result[0] == '~': + raise EOFError() + return result + + get_input_line = fake_raw_input + + # Some completely inadequate tests, just to make sure the code's not totally broken + try: + x = Stdin() + print(x.read()) + print(x.readline()) + print(x.read(12)) + print(x.readline(47)) + print(x.readline(3)) + print(x.readlines()) + finally: + get_input_line = raw_input +else: + import sys + sys.stdin = Stdin() + \ No newline at end of file diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/toolmenu.py b/venv/Lib/site-packages/pythonwin/pywin/framework/toolmenu.py new file mode 100644 index 00000000..778c8ac7 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/framework/toolmenu.py @@ -0,0 +1,256 @@ +# toolmenu.py + +import win32ui +import win32con +import win32api +from . import app +import sys +import string + +tools = {} +idPos = 100 + +# The default items should no tools menu exist in the INI file. +defaultToolMenuItems = [ + ('Browser', 'win32ui.GetApp().OnViewBrowse(0,0)'), + ('Browse PythonPath', 'from pywin.tools import browseProjects;browseProjects.Browse()'), + ('Edit Python Path', 'from pywin.tools import regedit;regedit.EditRegistry()'), + ('COM Makepy utility', 'from win32com.client import makepy;makepy.main()'), + ('COM Browser', 'from win32com.client import combrowse;combrowse.main()'), + ('Trace Collector Debugging tool', 'from pywin.tools import TraceCollector;TraceCollector.MakeOutputWindow()'), +] + +def LoadToolMenuItems(): + # Load from the registry. + items = [] + lookNo = 1 + while 1: + menu = win32ui.GetProfileVal("Tools Menu\\%s" % lookNo, "", "") + if menu=="": + break + cmd = win32ui.GetProfileVal("Tools Menu\\%s" % lookNo, "Command", "") + items.append((menu, cmd)) + lookNo = lookNo + 1 + + if len(items)==0: + items = defaultToolMenuItems + return items + +def WriteToolMenuItems( items ): + # Items is a list of (menu, command) + # Delete the entire registry tree. + try: + mainKey = win32ui.GetAppRegistryKey() + toolKey = win32api.RegOpenKey(mainKey, "Tools Menu") + except win32ui.error: + toolKey = None + if toolKey is not None: + while 1: + try: + subkey = win32api.RegEnumKey(toolKey, 0) + except win32api.error: + break + win32api.RegDeleteKey(toolKey, subkey) + # Keys are now removed - write the new ones. + # But first check if we have the defaults - and if so, dont write anything! + if items==defaultToolMenuItems: + return + itemNo = 1 + for menu, cmd in items: + win32ui.WriteProfileVal("Tools Menu\\%s" % itemNo, "", menu) + win32ui.WriteProfileVal("Tools Menu\\%s" % itemNo, "Command", cmd) + itemNo = itemNo + 1 + +def SetToolsMenu(menu, menuPos = None): + global tools + global idPos + + # todo - check the menu does not already exist. + # Create the new menu + toolsMenu = win32ui.CreatePopupMenu() + + # Load from the ini file. + items = LoadToolMenuItems() + for menuString, cmd in items: + tools[idPos] = (menuString, cmd, menuString) + toolsMenu.AppendMenu(win32con.MF_ENABLED|win32con.MF_STRING,idPos, menuString) + win32ui.GetMainFrame().HookCommand(HandleToolCommand, idPos) + idPos=idPos+1 + + # Find the correct spot to insert the new tools menu. + if menuPos is None: + menuPos = menu.GetMenuItemCount()-2 + if menuPos<0: menuPos=0 + + menu.InsertMenu(menuPos, win32con.MF_BYPOSITION|win32con.MF_ENABLED|win32con.MF_STRING|win32con.MF_POPUP, toolsMenu.GetHandle(), '&Tools') + +def HandleToolCommand(cmd, code): + import traceback + import re + global tools + (menuString, pyCmd, desc) = tools[cmd] + win32ui.SetStatusText("Executing tool %s" % desc, 1) + pyCmd = re.sub('\\\\n','\n', pyCmd) + win32ui.DoWaitCursor(1) + oldFlag = None + try: + oldFlag = sys.stdout.template.writeQueueing + sys.stdout.template.writeQueueing = 0 + except (NameError, AttributeError): + pass + + try: + exec("%s\n" % pyCmd) + worked=1 + except SystemExit: + # The program raised a SystemExit - ignore it. + worked = 1 + except: + print("Failed to execute command:\n%s" % pyCmd) + traceback.print_exc() + worked=0 + if oldFlag is not None: + sys.stdout.template.writeQueueing = oldFlag + win32ui.DoWaitCursor(0) + if worked: + text = "Completed successfully." + else: + text = "Error executing %s." % desc + win32ui.SetStatusText(text, 1) + +# The property page for maintaing the items on the Tools menu. +import commctrl +from pywin.mfc import dialog + +if win32ui.UNICODE: + LVN_ENDLABELEDIT = commctrl.LVN_ENDLABELEDITW +else: + LVN_ENDLABELEDIT = commctrl.LVN_ENDLABELEDITA + +class ToolMenuPropPage(dialog.PropertyPage): + def __init__(self): + self.bImChangingEditControls = 0 # Am I programatically changing the controls? + dialog.PropertyPage.__init__(self, win32ui.IDD_PP_TOOLMENU) + + def OnInitDialog(self): + self.editMenuCommand = self.GetDlgItem(win32ui.IDC_EDIT2) + self.butNew = self.GetDlgItem(win32ui.IDC_BUTTON3) + + # Now hook the change notification messages for the edit controls. + self.HookCommand(self.OnCommandEditControls, win32ui.IDC_EDIT1) + self.HookCommand(self.OnCommandEditControls, win32ui.IDC_EDIT2) + + self.HookNotify(self.OnNotifyListControl, commctrl.LVN_ITEMCHANGED) + self.HookNotify(self.OnNotifyListControlEndLabelEdit, commctrl.LVN_ENDLABELEDIT) + + # Hook the button clicks. + self.HookCommand(self.OnButtonNew, win32ui.IDC_BUTTON3) # New Item + self.HookCommand(self.OnButtonDelete, win32ui.IDC_BUTTON4) # Delete item + self.HookCommand(self.OnButtonMove, win32ui.IDC_BUTTON1) # Move up + self.HookCommand(self.OnButtonMove, win32ui.IDC_BUTTON2) # Move down + + # Setup the columns in the list control + lc = self.GetDlgItem(win32ui.IDC_LIST1) + rect = lc.GetWindowRect() + cx = rect[2] - rect[0] + colSize = cx/2 - win32api.GetSystemMetrics(win32con.SM_CXBORDER) - 1 + + item = commctrl.LVCFMT_LEFT, colSize, "Menu Text" + lc.InsertColumn(0, item) + + item = commctrl.LVCFMT_LEFT, colSize, "Python Command" + lc.InsertColumn(1, item) + + # Insert the existing tools menu + itemNo = 0 + for desc, cmd in LoadToolMenuItems(): + lc.InsertItem(itemNo, desc) + lc.SetItemText(itemNo, 1, cmd) + itemNo = itemNo + 1 + + self.listControl = lc + return dialog.PropertyPage.OnInitDialog(self) + + def OnOK(self): + # Write the menu back to the registry. + items = [] + itemLook = 0 + while 1: + try: + text = self.listControl.GetItemText(itemLook, 0); + if not text: + break + items.append( (text, self.listControl.GetItemText(itemLook, 1)) ) + except win32ui.error: + # no more items! + break + itemLook = itemLook + 1 + WriteToolMenuItems( items ) + return self._obj_.OnOK() + + def OnCommandEditControls(self, id, cmd): +# print "OnEditControls", id, cmd + if cmd==win32con.EN_CHANGE and not self.bImChangingEditControls: + itemNo = self.listControl.GetNextItem(-1, commctrl.LVNI_SELECTED) + newText = self.editMenuCommand.GetWindowText() + self.listControl.SetItemText(itemNo, 1, newText) + + return 0 + + def OnNotifyListControlEndLabelEdit(self, id, cmd): + newText = self.listControl.GetEditControl().GetWindowText() + itemNo = self.listControl.GetNextItem(-1, commctrl.LVNI_SELECTED) + self.listControl.SetItemText(itemNo, 0, newText) + + def OnNotifyListControl(self, id, cmd): +# print id, cmd + try: + itemNo = self.listControl.GetNextItem(-1, commctrl.LVNI_SELECTED) + except win32ui.error: # No selection! + return + + self.bImChangingEditControls = 1 + try: + item = self.listControl.GetItem(itemNo, 1) + self.editMenuCommand.SetWindowText(item[4]) + finally: + self.bImChangingEditControls = 0 + + return 0 # we have handled this! + + def OnButtonNew(self, id, cmd): + if cmd==win32con.BN_CLICKED: + newIndex = self.listControl.GetItemCount() + self.listControl.InsertItem(newIndex, "Click to edit the text") + self.listControl.EnsureVisible(newIndex, 0) + + def OnButtonMove(self, id, cmd): + if cmd==win32con.BN_CLICKED: + try: + itemNo = self.listControl.GetNextItem(-1, commctrl.LVNI_SELECTED) + except win32ui.error: + return + menu = self.listControl.GetItemText(itemNo, 0) + cmd = self.listControl.GetItemText(itemNo, 1) + if id == win32ui.IDC_BUTTON1: + # Move up + if itemNo > 0: + self.listControl.DeleteItem(itemNo) + # reinsert it. + self.listControl.InsertItem(itemNo-1, menu) + self.listControl.SetItemText(itemNo-1, 1, cmd) + else: + # Move down. + if itemNo < self.listControl.GetItemCount()-1: + self.listControl.DeleteItem(itemNo) + # reinsert it. + self.listControl.InsertItem(itemNo+1, menu) + self.listControl.SetItemText(itemNo+1, 1, cmd) + + def OnButtonDelete(self, id, cmd): + if cmd==win32con.BN_CLICKED: + try: + itemNo = self.listControl.GetNextItem(-1, commctrl.LVNI_SELECTED) + except win32ui.error: # No selection! + return + self.listControl.DeleteItem(itemNo) diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/window.py b/venv/Lib/site-packages/pythonwin/pywin/framework/window.py new file mode 100644 index 00000000..c90b091f --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/framework/window.py @@ -0,0 +1,13 @@ +# Framework Window classes. + +# Most Pythonwin windows should use these classes rather than +# the raw MFC ones if they want Pythonwin specific functionality. +import pywin.mfc.window +import win32con + +class MDIChildWnd(pywin.mfc.window.MDIChildWnd): + def AutoRestore(self): + "If the window is minimised or maximised, restore it." + p = self.GetWindowPlacement() + if p[1]==win32con.SW_MINIMIZE or p[1]==win32con.SW_SHOWMINIMIZED: + self.SetWindowPlacement(p[0], win32con.SW_RESTORE, p[2], p[3], p[4]) diff --git a/venv/Lib/site-packages/pythonwin/pywin/framework/winout.py b/venv/Lib/site-packages/pythonwin/pywin/framework/winout.py new file mode 100644 index 00000000..d708982f --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/framework/winout.py @@ -0,0 +1,518 @@ +# winout.py +# +# generic "output window" +# +# This Window will detect itself closing, and recreate next time output is +# written to it. + +# This has the option of writing output at idle time (by hooking the +# idle message, and queueing output) or writing as each +# write is executed. +# Updating the window directly gives a jerky appearance as many writes +# take place between commands, and the windows scrolls, and updates etc +# Updating at idle-time may defer all output of a long process, giving the +# appearence nothing is happening. +# There is a compromise "line" mode, which will output whenever +# a complete line is available. + +# behaviour depends on self.writeQueueing + +# This module is thread safe - output can originate from any thread. If any thread +# other than the main thread attempts to print, it is always queued until next idle time + +import sys, string, re +from pywin.mfc import docview +from pywin.framework import app, window +import win32ui, win32api, win32con +import queue + +debug = lambda msg: None + +##debug=win32ui.OutputDebugString +##import win32trace;win32trace.InitWrite() # for debugging - delete me! +##debug = win32trace.write + +class flags: + # queueing of output. + WQ_NONE = 0 + WQ_LINE = 1 + WQ_IDLE = 2 + +#WindowOutputDocumentParent=docview.RichEditDoc +#WindowOutputDocumentParent=docview.Document +import pywin.scintilla.document +from pywin.scintilla import scintillacon +from pywin import default_scintilla_encoding + +WindowOutputDocumentParent=pywin.scintilla.document.CScintillaDocument +class WindowOutputDocument(WindowOutputDocumentParent): + def SaveModified(self): + return 1 # say it is OK to destroy my document + + def OnSaveDocument( self, fileName ): + win32ui.SetStatusText("Saving file...",1) + try: + self.SaveFile(fileName) + except IOError as details: + win32ui.MessageBox("Error - could not save file\r\n\r\n%s"%details) + return 0 + win32ui.SetStatusText("Ready") + return 1 + +class WindowOutputFrame(window.MDIChildWnd): + def __init__(self, wnd = None): + window.MDIChildWnd.__init__(self, wnd) + self.HookMessage(self.OnSizeMove, win32con.WM_SIZE) + self.HookMessage(self.OnSizeMove, win32con.WM_MOVE) + + def LoadFrame( self, idResource, style, wndParent, context ): + self.template = context.template + return self._obj_.LoadFrame(idResource, style, wndParent, context) + + def PreCreateWindow(self, cc): + cc = self._obj_.PreCreateWindow(cc) + if self.template.defSize and self.template.defSize[0] != self.template.defSize[1]: + rect = app.RectToCreateStructRect(self.template.defSize) + cc = cc[0], cc[1], cc[2], cc[3], rect, cc[5], cc[6], cc[7], cc[8] + return cc + def OnSizeMove(self, msg): + # so recreate maintains position. + # Need to map coordinates from the + # frame windows first child. + mdiClient = self.GetParent() + self.template.defSize = mdiClient.ScreenToClient(self.GetWindowRect()) + def OnDestroy(self, message): + self.template.OnFrameDestroy(self) + return 1 + +class WindowOutputViewImpl: + def __init__(self): + self.patErrorMessage=re.compile('\W*File "(.*)", line ([0-9]+)') + self.template = self.GetDocument().GetDocTemplate() + + def HookHandlers(self): + # Hook for the right-click menu. + self.HookMessage(self.OnRClick,win32con.WM_RBUTTONDOWN) + + def OnDestroy(self, msg): + self.template.OnViewDestroy(self) + + def OnInitialUpdate(self): + self.RestoreKillBuffer() + self.SetSel(-2) # end of buffer + + def GetRightMenuItems(self): + ret = [] + flags=win32con.MF_STRING|win32con.MF_ENABLED + ret.append((flags, win32ui.ID_EDIT_COPY, '&Copy')) + ret.append((flags, win32ui.ID_EDIT_SELECT_ALL, '&Select all')) + return ret + + # + # Windows command handlers, virtuals, etc. + # + def OnRClick(self,params): + paramsList = self.GetRightMenuItems() + menu = win32ui.CreatePopupMenu() + for appendParams in paramsList: + if type(appendParams)!=type(()): + appendParams = (appendParams,) + menu.AppendMenu(*appendParams) + menu.TrackPopupMenu(params[5]) # track at mouse position. + return 0 + + # as this is often used as an output window, exeptions will often + # be printed. Therefore, we support this functionality at this level. + # Returns TRUE if the current line is an error message line, and will + # jump to it. FALSE if no error (and no action taken) + def HandleSpecialLine(self): + from . import scriptutils + line = self.GetLine() + if line[:11]=="com_error: ": + # An OLE Exception - pull apart the exception + # and try and locate a help file. + try: + import win32api, win32con + det = eval(line[line.find(":")+1:].strip()) + win32ui.SetStatusText("Opening help file on OLE error..."); + from . import help + help.OpenHelpFile(det[2][3],win32con.HELP_CONTEXT, det[2][4]) + return 1 + except win32api.error as details: + win32ui.SetStatusText("The help file could not be opened - %s" % details.strerror) + return 1 + except: + win32ui.SetStatusText("Line is a COM error, but no WinHelp details can be parsed"); + # Look for a Python traceback. + matchResult = self.patErrorMessage.match(line) + if matchResult is None: + # No match - try the previous line + lineNo = self.LineFromChar() + if lineNo > 0: + line = self.GetLine(lineNo-1) + matchResult = self.patErrorMessage.match(line) + if matchResult is not None: + # we have an error line. + fileName = matchResult.group(1) + if fileName[0]=="<": + win32ui.SetStatusText("Can not load this file") + return 1 # still was an error message. + else: + lineNoString = matchResult.group(2) + # Attempt to locate the file (in case it is a relative spec) + fileNameSpec = fileName + fileName = scriptutils.LocatePythonFile(fileName) + if fileName is None: + # Dont force update, so it replaces the idle prompt. + win32ui.SetStatusText("Cant locate the file '%s'" % (fileNameSpec), 0) + return 1 + + win32ui.SetStatusText("Jumping to line "+lineNoString+" of file "+fileName,1) + if not scriptutils.JumpToDocument(fileName, int(lineNoString)): + win32ui.SetStatusText("Could not open %s" % fileName) + return 1 # still was an error message. + return 1 + return 0 # not an error line + def write(self, msg): + return self.template.write(msg) + def writelines(self, lines): + for line in lines: + self.write(line) + def flush(self): + self.template.flush() + +class WindowOutputViewRTF(docview.RichEditView, WindowOutputViewImpl): + def __init__(self, doc): + docview.RichEditView.__init__(self, doc) + WindowOutputViewImpl.__init__(self) + + def OnInitialUpdate(self): + WindowOutputViewImpl.OnInitialUpdate(self) + return docview.RichEditView.OnInitialUpdate(self) + + def OnDestroy(self, msg): + WindowOutputViewImpl.OnDestroy(self, msg) + docview.RichEditView.OnDestroy(self, msg) + + def HookHandlers(self): + WindowOutputViewImpl.HookHandlers(self) + # Hook for finding and locating error messages + self.HookMessage(self.OnLDoubleClick,win32con.WM_LBUTTONDBLCLK) +# docview.RichEditView.HookHandlers(self) + + def OnLDoubleClick(self,params): + if self.HandleSpecialLine(): + return 0 # dont pass on + return 1 # pass it on by default. + + def RestoreKillBuffer(self): + if len(self.template.killBuffer): + self.StreamIn(win32con.SF_RTF, self._StreamRTFIn) + self.template.killBuffer = [] + + def SaveKillBuffer(self): + self.StreamOut(win32con.SF_RTFNOOBJS, self._StreamRTFOut) + + def _StreamRTFOut(self, data): + self.template.killBuffer.append(data) + return 1 # keep em coming! + + def _StreamRTFIn(self, bytes): + try: + item = self.template.killBuffer[0] + self.template.killBuffer.remove(item) + if bytes < len(item): + print("Warning - output buffer not big enough!") + return item + except IndexError: + return None + def dowrite(self, str): + self.SetSel(-2) + self.ReplaceSel(str) + +import pywin.scintilla.view +class WindowOutputViewScintilla(pywin.scintilla.view.CScintillaView, WindowOutputViewImpl): + def __init__(self, doc): + pywin.scintilla.view.CScintillaView.__init__(self, doc) + WindowOutputViewImpl.__init__(self) + + def OnInitialUpdate(self): + pywin.scintilla.view.CScintillaView.OnInitialUpdate(self) + self.SCISetMarginWidth(3) + WindowOutputViewImpl.OnInitialUpdate(self) + + def OnDestroy(self, msg): + WindowOutputViewImpl.OnDestroy(self, msg) + pywin.scintilla.view.CScintillaView.OnDestroy(self, msg) + + def HookHandlers(self): + WindowOutputViewImpl.HookHandlers(self) + pywin.scintilla.view.CScintillaView.HookHandlers(self) + self.GetParent().HookNotify(self.OnScintillaDoubleClick, scintillacon.SCN_DOUBLECLICK) +## self.HookMessage(self.OnLDoubleClick,win32con.WM_LBUTTONDBLCLK) + + def OnScintillaDoubleClick(self, std, extra): + self.HandleSpecialLine() + +## def OnLDoubleClick(self,params): +## return 0 # never dont pass on + + def RestoreKillBuffer(self): + assert len(self.template.killBuffer) in [0,1], "Unexpected killbuffer contents" + if self.template.killBuffer: + self.SCIAddText(self.template.killBuffer[0]) + self.template.killBuffer = [] + def SaveKillBuffer(self): + self.template.killBuffer = [self.GetTextRange(0,-1)] + def dowrite(self, str): + end = self.GetTextLength() + atEnd = end==self.GetSel()[0] + self.SCIInsertText(str, end) + if atEnd: + self.SetSel(self.GetTextLength()) + + def SetWordWrap(self, bWrapOn = 1): + if bWrapOn: + wrap_mode = scintillacon.SC_WRAP_WORD + else: + wrap_mode = scintillacon.SC_WRAP_NONE + self.SCISetWrapMode(wrap_mode) + + def _MakeColorizer(self): + return None # No colorizer for me! + +WindowOutputView = WindowOutputViewScintilla +# The WindowOutput class is actually an MFC template. This is a conventient way of +# making sure that my state can exist beyond the life of the windows themselves. +# This is primarily to support the functionality of a WindowOutput window automatically +# being recreated if necessary when written to. +class WindowOutput(docview.DocTemplate): + """ Looks like a general Output Window - text can be written by the 'write' method. + Will auto-create itself on first write, and also on next write after being closed """ + softspace=1 + def __init__(self, title=None, defSize=None, queueing = flags.WQ_LINE, \ + bAutoRestore = 1, style=None, + makeDoc = None, makeFrame = None, makeView = None): + """ init the output window - + Params + title=None -- What is the title of the window + defSize=None -- What is the default size for the window - if this + is a string, the size will be loaded from the ini file. + queueing = flags.WQ_LINE -- When should output be written + bAutoRestore=1 -- Should a minimized window be restored. + style -- Style for Window, or None for default. + makeDoc, makeFrame, makeView -- Classes for frame, view and window respectively. + """ + if makeDoc is None: makeDoc = WindowOutputDocument + if makeFrame is None: makeFrame = WindowOutputFrame + if makeView is None: makeView = WindowOutputViewScintilla + docview.DocTemplate.__init__(self, win32ui.IDR_PYTHONTYPE, \ + makeDoc, makeFrame, makeView) + self.SetDocStrings("\nOutput\n\nText Documents (*.txt)\n.txt\n\n\n") + win32ui.GetApp().AddDocTemplate(self) + self.writeQueueing = queueing + self.errorCantRecreate = 0 + self.killBuffer=[] + self.style = style + self.bAutoRestore = bAutoRestore + self.title = title + self.bCreating = 0 + self.interruptCount = 0 + if type(defSize)==type(''): # is a string - maintain size pos from ini file. + self.iniSizeSection = defSize + self.defSize = app.LoadWindowSize(defSize) + self.loadedSize = self.defSize + else: + self.iniSizeSection = None + self.defSize=defSize + self.currentView = None + self.outputQueue = queue.Queue(-1) + self.mainThreadId = win32api.GetCurrentThreadId() + self.idleHandlerSet = 0 + self.SetIdleHandler() + + def __del__(self): + self.Close() + + def Create(self, title=None, style = None): + self.bCreating = 1 + if title: self.title = title + if style: self.style = style + doc=self.OpenDocumentFile() + if doc is None: return + self.currentView = doc.GetFirstView() + self.bCreating = 0 + if self.title: doc.SetTitle(self.title) + + def Close(self): + self.RemoveIdleHandler() + try: + parent = self.currentView.GetParent() + except (AttributeError, win32ui.error): # Already closed + return + parent.DestroyWindow() + + def SetTitle(self, title): + self.title = title + if self.currentView: self.currentView.GetDocument().SetTitle(self.title) + + def OnViewDestroy(self, view): + self.currentView.SaveKillBuffer() + self.currentView = None + + def OnFrameDestroy(self, frame): + if self.iniSizeSection: + # use GetWindowPlacement(), as it works even when min'd or max'd + newSize = frame.GetWindowPlacement()[4] + if self.loadedSize!=newSize: + app.SaveWindowSize(self.iniSizeSection, newSize) + + def SetIdleHandler(self): + if not self.idleHandlerSet: + debug("Idle handler set\n") + win32ui.GetApp().AddIdleHandler(self.QueueIdleHandler) + self.idleHandlerSet = 1 + + def RemoveIdleHandler(self): + if self.idleHandlerSet: + debug("Idle handler reset\n") + if (win32ui.GetApp().DeleteIdleHandler(self.QueueIdleHandler)==0): + debug('Error deleting idle handler\n') + self.idleHandlerSet = 0 + + def RecreateWindow(self): + if self.errorCantRecreate: + debug("Error = not trying again") + return 0 + try: + # This will fail if app shutting down + win32ui.GetMainFrame().GetSafeHwnd() + self.Create() + return 1 + except (win32ui.error, AttributeError): + self.errorCantRecreate = 1 + debug("Winout can not recreate the Window!\n") + return 0 + + # this handles the idle message, and does the printing. + def QueueIdleHandler(self,handler,count): + try: + bEmpty = self.QueueFlush(20) + # If the queue is empty, then we are back to idle and restart interrupt logic. + if bEmpty: self.interruptCount = 0 + except KeyboardInterrupt: + # First interrupt since idle we just pass on. + # later ones we dump the queue and give up. + self.interruptCount = self.interruptCount + 1 + if self.interruptCount > 1: + # Drop the queue quickly as the user is already annoyed :-) + self.outputQueue = queue.Queue(-1) + print("Interrupted.") + bEmpty = 1 + else: + raise # re-raise the error so the users exception filters up. + return not bEmpty # More to do if not empty. + + # Returns true if the Window needs to be recreated. + def NeedRecreateWindow(self): + try: + if self.currentView is not None and self.currentView.IsWindow(): + return 0 + except (win32ui.error, AttributeError): # Attribute error if the win32ui object has died. + pass + return 1 + + # Returns true if the Window is OK (either cos it was, or because it was recreated + def CheckRecreateWindow(self): + if self.bCreating: return 1 + if not self.NeedRecreateWindow(): + return 1 + if self.bAutoRestore: + if self.RecreateWindow(): + return 1 + return 0 + + def QueueFlush(self, max = None): + # Returns true if the queue is empty after the flush +# debug("Queueflush - %d, %d\n" % (max, self.outputQueue.qsize())) + if self.bCreating: return 1 + items = [] + rc = 0 + while max is None or max > 0: + try: + item = self.outputQueue.get_nowait() + items.append(item) + except queue.Empty: + rc = 1 + break + if max is not None: + max = max - 1 + if len(items) != 0: + if not self.CheckRecreateWindow(): + debug(":Recreate failed!\n") + return 1 # In trouble - so say we have nothing to do. + win32ui.PumpWaitingMessages() # Pump paint messages + self.currentView.dowrite(''.join(items)) + return rc + + def HandleOutput(self,message): +# debug("QueueOutput on thread %d, flags %d with '%s'...\n" % (win32api.GetCurrentThreadId(), self.writeQueueing, message )) + self.outputQueue.put(message) + if win32api.GetCurrentThreadId() != self.mainThreadId: + pass +# debug("not my thread - ignoring queue options!\n") + elif self.writeQueueing==flags.WQ_LINE: + pos = message.rfind('\n') + if pos>=0: +# debug("Line queueing - forcing flush\n") + self.QueueFlush() + return + elif self.writeQueueing==flags.WQ_NONE: +# debug("WQ_NONE - flushing!\n") + self.QueueFlush() + return + # Let our idle handler get it - wake it up + try: + win32ui.GetMainFrame().PostMessage(win32con.WM_USER) # Kick main thread off. + except win32ui.error: + # This can happen as the app is shutting down, so we send it to the C++ debugger + win32api.OutputDebugString(message) + + # delegate certain fns to my view. + def writelines(self, lines): + for line in lines: + self.write(line) + + def write(self,message): + self.HandleOutput(message) + + def flush(self): + self.QueueFlush() + + def HandleSpecialLine(self): + self.currentView.HandleSpecialLine() + +def RTFWindowOutput(*args, **kw): + kw['makeView'] = WindowOutputViewRTF + return WindowOutput(*args, **kw) + + +def thread_test(o): + for i in range(5): + o.write("Hi from thread %d\n" % (win32api.GetCurrentThreadId())) + win32api.Sleep(100) + +def test(): + w = WindowOutput(queueing=flags.WQ_IDLE) + w.write("First bit of text\n") + import _thread + for i in range(5): + w.write("Hello from the main thread\n") + _thread.start_new(thread_test, (w,)) + for i in range(2): + w.write("Hello from the main thread\n") + win32api.Sleep(50) + return w + +if __name__=='__main__': + test() diff --git a/venv/Lib/site-packages/pythonwin/pywin/idle/AutoExpand.py b/venv/Lib/site-packages/pythonwin/pywin/idle/AutoExpand.py new file mode 100644 index 00000000..f0a29d3c --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/idle/AutoExpand.py @@ -0,0 +1,92 @@ +import string +import re + +###$ event <> +###$ win +###$ unix + +class AutoExpand: + + keydefs = { + '<>': [''], + } + + unix_keydefs = { + '<>': [''], + } + + menudefs = [ + ('edit', [ + ('E_xpand word', '<>'), + ]), + ] + + wordchars = string.ascii_letters + string.digits + "_" + + def __init__(self, editwin): + self.text = editwin.text + self.text.wordlist = None # XXX what is this? + self.state = None + + def expand_word_event(self, event): + curinsert = self.text.index("insert") + curline = self.text.get("insert linestart", "insert lineend") + if not self.state: + words = self.getwords() + index = 0 + else: + words, index, insert, line = self.state + if insert != curinsert or line != curline: + words = self.getwords() + index = 0 + if not words: + self.text.bell() + return "break" + word = self.getprevword() + self.text.delete("insert - %d chars" % len(word), "insert") + newword = words[index] + index = (index + 1) % len(words) + if index == 0: + self.text.bell() # Warn we cycled around + self.text.insert("insert", newword) + curinsert = self.text.index("insert") + curline = self.text.get("insert linestart", "insert lineend") + self.state = words, index, curinsert, curline + return "break" + + def getwords(self): + word = self.getprevword() + if not word: + return [] + before = self.text.get("1.0", "insert wordstart") + wbefore = re.findall(r"\b" + word + r"\w+\b", before) + del before + after = self.text.get("insert wordend", "end") + wafter = re.findall(r"\b" + word + r"\w+\b", after) + del after + if not wbefore and not wafter: + return [] + words = [] + dict = {} + # search backwards through words before + wbefore.reverse() + for w in wbefore: + if dict.get(w): + continue + words.append(w) + dict[w] = w + # search onwards through words after + for w in wafter: + if dict.get(w): + continue + words.append(w) + dict[w] = w + words.append(word) + return words + + def getprevword(self): + line = self.text.get("insert linestart", "insert") + i = len(line) + while i > 0 and line[i-1] in self.wordchars: + i = i-1 + return line[i:] diff --git a/venv/Lib/site-packages/pythonwin/pywin/idle/AutoIndent.py b/venv/Lib/site-packages/pythonwin/pywin/idle/AutoIndent.py new file mode 100644 index 00000000..663837c9 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/idle/AutoIndent.py @@ -0,0 +1,529 @@ +import sys +import string, tokenize +from . import PyParse +from pywin import default_scintilla_encoding + +if sys.version_info < (3,): + # in py2k, tokenize() takes a 'token eater' callback, while + # generate_tokens is a generator that works with str objects. + token_generator = tokenize.generate_tokens +else: + # in py3k tokenize() is the generator working with 'byte' objects, and + # token_generator is the 'undocumented b/w compat' function that + # theoretically works with str objects - but actually seems to fail) + token_generator = tokenize.tokenize + +class AutoIndent: + + menudefs = [ + ('edit', [ + None, + ('_Indent region', '<>'), + ('_Dedent region', '<>'), + ('Comment _out region', '<>'), + ('U_ncomment region', '<>'), + ('Tabify region', '<>'), + ('Untabify region', '<>'), + ('Toggle tabs', '<>'), + ('New indent width', '<>'), + ]), + ] + + keydefs = { + '<>': [''], + '<>': ['', ''], + '<>': [''] + } + + windows_keydefs = { + '<>': [''], + '<>': [''], + '<>': [''], + '<>': [''], + '<>': [''], + '<>': [''], + '<>': [''], + '<>': [''], + } + + unix_keydefs = { + '<>': ['', + '', + ''], + '<>': ['', + '', + ''], + '<>': ['', ''], + '<>': ['', ''], + '<>': ['', ''], + '<>': ['', ''], + '<>': [''], + '<>': [''], + } + + # usetabs true -> literal tab characters are used by indent and + # dedent cmds, possibly mixed with spaces if + # indentwidth is not a multiple of tabwidth + # false -> tab characters are converted to spaces by indent + # and dedent cmds, and ditto TAB keystrokes + # indentwidth is the number of characters per logical indent level. + # tabwidth is the display width of a literal tab character. + # CAUTION: telling Tk to use anything other than its default + # tab setting causes it to use an entirely different tabbing algorithm, + # treating tab stops as fixed distances from the left margin. + # Nobody expects this, so for now tabwidth should never be changed. + usetabs = 1 + indentwidth = 4 + tabwidth = 8 # for IDLE use, must remain 8 until Tk is fixed + + # If context_use_ps1 is true, parsing searches back for a ps1 line; + # else searches for a popular (if, def, ...) Python stmt. + context_use_ps1 = 0 + + # When searching backwards for a reliable place to begin parsing, + # first start num_context_lines[0] lines back, then + # num_context_lines[1] lines back if that didn't work, and so on. + # The last value should be huge (larger than the # of lines in a + # conceivable file). + # Making the initial values larger slows things down more often. + num_context_lines = 50, 500, 5000000 + + def __init__(self, editwin): + self.editwin = editwin + self.text = editwin.text + + def config(self, **options): + for key, value in options.items(): + if key == 'usetabs': + self.usetabs = value + elif key == 'indentwidth': + self.indentwidth = value + elif key == 'tabwidth': + self.tabwidth = value + elif key == 'context_use_ps1': + self.context_use_ps1 = value + else: + raise KeyError("bad option name: %s" % repr(key)) + + # If ispythonsource and guess are true, guess a good value for + # indentwidth based on file content (if possible), and if + # indentwidth != tabwidth set usetabs false. + # In any case, adjust the Text widget's view of what a tab + # character means. + + def set_indentation_params(self, ispythonsource, guess=1): + if guess and ispythonsource: + i = self.guess_indent() + if 2 <= i <= 8: + self.indentwidth = i + if self.indentwidth != self.tabwidth: + self.usetabs = 0 + + self.editwin.set_tabwidth(self.tabwidth) + + def smart_backspace_event(self, event): + text = self.text + first, last = self.editwin.get_selection_indices() + if first and last: + text.delete(first, last) + text.mark_set("insert", first) + return "break" + # Delete whitespace left, until hitting a real char or closest + # preceding virtual tab stop. + chars = text.get("insert linestart", "insert") + if chars == '': + if text.compare("insert", ">", "1.0"): + # easy: delete preceding newline + text.delete("insert-1c") + else: + text.bell() # at start of buffer + return "break" + if chars[-1] not in " \t": + # easy: delete preceding real char + text.delete("insert-1c") + return "break" + # Ick. It may require *inserting* spaces if we back up over a + # tab character! This is written to be clear, not fast. + have = len(chars.expandtabs(self.tabwidth)) + assert have > 0 + want = int((have - 1) / self.indentwidth) * self.indentwidth + ncharsdeleted = 0 + while 1: + chars = chars[:-1] + ncharsdeleted = ncharsdeleted + 1 + have = len(chars.expandtabs(self.tabwidth)) + if have <= want or chars[-1] not in " \t": + break + text.undo_block_start() + text.delete("insert-%dc" % ncharsdeleted, "insert") + if have < want: + text.insert("insert", ' ' * (want - have)) + text.undo_block_stop() + return "break" + + def smart_indent_event(self, event): + # if intraline selection: + # delete it + # elif multiline selection: + # do indent-region & return + # indent one level + text = self.text + first, last = self.editwin.get_selection_indices() + text.undo_block_start() + try: + if first and last: + if index2line(first) != index2line(last): + return self.indent_region_event(event) + text.delete(first, last) + text.mark_set("insert", first) + prefix = text.get("insert linestart", "insert") + raw, effective = classifyws(prefix, self.tabwidth) + if raw == len(prefix): + # only whitespace to the left + self.reindent_to(effective + self.indentwidth) + else: + if self.usetabs: + pad = '\t' + else: + effective = len(prefix.expandtabs(self.tabwidth)) + n = self.indentwidth + pad = ' ' * (n - effective % n) + text.insert("insert", pad) + text.see("insert") + return "break" + finally: + text.undo_block_stop() + + def newline_and_indent_event(self, event): + text = self.text + first, last = self.editwin.get_selection_indices() + text.undo_block_start() + try: + if first and last: + text.delete(first, last) + text.mark_set("insert", first) + line = text.get("insert linestart", "insert") + i, n = 0, len(line) + while i < n and line[i] in " \t": + i = i+1 + if i == n: + # the cursor is in or at leading indentation; just inject + # an empty line at the start and strip space from current line + text.delete("insert - %d chars" % i, "insert") + text.insert("insert linestart", '\n') + return "break" + indent = line[:i] + # strip whitespace before insert point + i = 0 + while line and line[-1] in " \t": + line = line[:-1] + i = i+1 + if i: + text.delete("insert - %d chars" % i, "insert") + # strip whitespace after insert point + while text.get("insert") in " \t": + text.delete("insert") + # start new line + text.insert("insert", '\n') + + # adjust indentation for continuations and block + # open/close first need to find the last stmt + lno = index2line(text.index('insert')) + y = PyParse.Parser(self.indentwidth, self.tabwidth) + for context in self.num_context_lines: + startat = max(lno - context, 1) + startatindex = repr(startat) + ".0" + rawtext = text.get(startatindex, "insert") + y.set_str(rawtext) + bod = y.find_good_parse_start( + self.context_use_ps1, + self._build_char_in_string_func(startatindex)) + if bod is not None or startat == 1: + break + y.set_lo(bod or 0) + c = y.get_continuation_type() + if c != PyParse.C_NONE: + # The current stmt hasn't ended yet. + if c == PyParse.C_STRING: + # inside a string; just mimic the current indent + text.insert("insert", indent) + elif c == PyParse.C_BRACKET: + # line up with the first (if any) element of the + # last open bracket structure; else indent one + # level beyond the indent of the line with the + # last open bracket + self.reindent_to(y.compute_bracket_indent()) + elif c == PyParse.C_BACKSLASH: + # if more than one line in this stmt already, just + # mimic the current indent; else if initial line + # has a start on an assignment stmt, indent to + # beyond leftmost =; else to beyond first chunk of + # non-whitespace on initial line + if y.get_num_lines_in_stmt() > 1: + text.insert("insert", indent) + else: + self.reindent_to(y.compute_backslash_indent()) + else: + assert 0, "bogus continuation type " + repr(c) + return "break" + + # This line starts a brand new stmt; indent relative to + # indentation of initial line of closest preceding + # interesting stmt. + indent = y.get_base_indent_string() + text.insert("insert", indent) + if y.is_block_opener(): + self.smart_indent_event(event) + elif indent and y.is_block_closer(): + self.smart_backspace_event(event) + return "break" + finally: + text.see("insert") + text.undo_block_stop() + + auto_indent = newline_and_indent_event + + # Our editwin provides a is_char_in_string function that works + # with a Tk text index, but PyParse only knows about offsets into + # a string. This builds a function for PyParse that accepts an + # offset. + + def _build_char_in_string_func(self, startindex): + def inner(offset, _startindex=startindex, + _icis=self.editwin.is_char_in_string): + return _icis(_startindex + "+%dc" % offset) + return inner + + def indent_region_event(self, event): + head, tail, chars, lines = self.get_region() + for pos in range(len(lines)): + line = lines[pos] + if line: + raw, effective = classifyws(line, self.tabwidth) + effective = effective + self.indentwidth + lines[pos] = self._make_blanks(effective) + line[raw:] + self.set_region(head, tail, chars, lines) + return "break" + + def dedent_region_event(self, event): + head, tail, chars, lines = self.get_region() + for pos in range(len(lines)): + line = lines[pos] + if line: + raw, effective = classifyws(line, self.tabwidth) + effective = max(effective - self.indentwidth, 0) + lines[pos] = self._make_blanks(effective) + line[raw:] + self.set_region(head, tail, chars, lines) + return "break" + + def comment_region_event(self, event): + head, tail, chars, lines = self.get_region() + for pos in range(len(lines) - 1): + line = lines[pos] + lines[pos] = '##' + line + self.set_region(head, tail, chars, lines) + + def uncomment_region_event(self, event): + head, tail, chars, lines = self.get_region() + for pos in range(len(lines)): + line = lines[pos] + if not line: + continue + if line[:2] == '##': + line = line[2:] + elif line[:1] == '#': + line = line[1:] + lines[pos] = line + self.set_region(head, tail, chars, lines) + + def tabify_region_event(self, event): + head, tail, chars, lines = self.get_region() + tabwidth = self._asktabwidth() + for pos in range(len(lines)): + line = lines[pos] + if line: + raw, effective = classifyws(line, tabwidth) + ntabs, nspaces = divmod(effective, tabwidth) + lines[pos] = '\t' * ntabs + ' ' * nspaces + line[raw:] + self.set_region(head, tail, chars, lines) + + def untabify_region_event(self, event): + head, tail, chars, lines = self.get_region() + tabwidth = self._asktabwidth() + for pos in range(len(lines)): + lines[pos] = lines[pos].expandtabs(tabwidth) + self.set_region(head, tail, chars, lines) + + def toggle_tabs_event(self, event): + if self.editwin.askyesno( + "Toggle tabs", + "Turn tabs " + ("on", "off")[self.usetabs] + "?", + parent=self.text): + self.usetabs = not self.usetabs + return "break" + + # XXX this isn't bound to anything -- see class tabwidth comments + def change_tabwidth_event(self, event): + new = self._asktabwidth() + if new != self.tabwidth: + self.tabwidth = new + self.set_indentation_params(0, guess=0) + return "break" + + def change_indentwidth_event(self, event): + new = self.editwin.askinteger( + "Indent width", + "New indent width (1-16)", + parent=self.text, + initialvalue=self.indentwidth, + minvalue=1, + maxvalue=16) + if new and new != self.indentwidth: + self.indentwidth = new + return "break" + + def get_region(self): + text = self.text + first, last = self.editwin.get_selection_indices() + if first and last: + head = text.index(first + " linestart") + tail = text.index(last + "-1c lineend +1c") + else: + head = text.index("insert linestart") + tail = text.index("insert lineend +1c") + chars = text.get(head, tail) + lines = chars.split("\n") + return head, tail, chars, lines + + def set_region(self, head, tail, chars, lines): + text = self.text + newchars = "\n".join(lines) + if newchars == chars: + text.bell() + return + text.tag_remove("sel", "1.0", "end") + text.mark_set("insert", head) + text.undo_block_start() + text.delete(head, tail) + text.insert(head, newchars) + text.undo_block_stop() + text.tag_add("sel", head, "insert") + + # Make string that displays as n leading blanks. + + def _make_blanks(self, n): + if self.usetabs: + ntabs, nspaces = divmod(n, self.tabwidth) + return '\t' * ntabs + ' ' * nspaces + else: + return ' ' * n + + # Delete from beginning of line to insert point, then reinsert + # column logical (meaning use tabs if appropriate) spaces. + + def reindent_to(self, column): + text = self.text + text.undo_block_start() + if text.compare("insert linestart", "!=", "insert"): + text.delete("insert linestart", "insert") + if column: + text.insert("insert", self._make_blanks(column)) + text.undo_block_stop() + + def _asktabwidth(self): + return self.editwin.askinteger( + "Tab width", + "Spaces per tab?", + parent=self.text, + initialvalue=self.tabwidth, + minvalue=1, + maxvalue=16) or self.tabwidth + + # Guess indentwidth from text content. + # Return guessed indentwidth. This should not be believed unless + # it's in a reasonable range (e.g., it will be 0 if no indented + # blocks are found). + + def guess_indent(self): + opener, indented = IndentSearcher(self.text, self.tabwidth).run() + if opener and indented: + raw, indentsmall = classifyws(opener, self.tabwidth) + raw, indentlarge = classifyws(indented, self.tabwidth) + else: + indentsmall = indentlarge = 0 + return indentlarge - indentsmall + +# "line.col" -> line, as an int +def index2line(index): + return int(float(index)) + +# Look at the leading whitespace in s. +# Return pair (# of leading ws characters, +# effective # of leading blanks after expanding +# tabs to width tabwidth) + +def classifyws(s, tabwidth): + raw = effective = 0 + for ch in s: + if ch == ' ': + raw = raw + 1 + effective = effective + 1 + elif ch == '\t': + raw = raw + 1 + effective = (effective // tabwidth + 1) * tabwidth + else: + break + return raw, effective + +class IndentSearcher: + + # .run() chews over the Text widget, looking for a block opener + # and the stmt following it. Returns a pair, + # (line containing block opener, line containing stmt) + # Either or both may be None. + + def __init__(self, text, tabwidth): + self.text = text + self.tabwidth = tabwidth + self.i = self.finished = 0 + self.blkopenline = self.indentedline = None + + def readline(self): + if self.finished: + val = "" + else: + i = self.i = self.i + 1 + mark = repr(i) + ".0" + if self.text.compare(mark, ">=", "end"): + val = "" + else: + val = self.text.get(mark, mark + " lineend+1c") + # hrm - not sure this is correct in py3k - the source code may have + # an encoding declared, but the data will *always* be in + # default_scintilla_encoding - so if anyone looks at the encoding decl + # in the source they will be wrong. I think. Maybe. Or something... + return val.encode(default_scintilla_encoding) + + def run(self): + OPENERS=('class', 'def', 'for', 'if', 'try', 'while') + INDENT=tokenize.INDENT + NAME=tokenize.NAME + + save_tabsize = tokenize.tabsize + tokenize.tabsize = self.tabwidth + try: + try: + for (typ, token, start, end, line) in token_generator(self.readline): + if typ == NAME and token in OPENERS: + self.blkopenline = line + elif typ == INDENT and self.blkopenline: + self.indentedline = line + break + + except (tokenize.TokenError, IndentationError): + # since we cut off the tokenizer early, we can trigger + # spurious errors + pass + finally: + tokenize.tabsize = save_tabsize + return self.blkopenline, self.indentedline diff --git a/venv/Lib/site-packages/pythonwin/pywin/idle/CallTips.py b/venv/Lib/site-packages/pythonwin/pywin/idle/CallTips.py new file mode 100644 index 00000000..c52d6d40 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/idle/CallTips.py @@ -0,0 +1,185 @@ +# CallTips.py - An IDLE extension that provides "Call Tips" - ie, a floating window that +# displays parameter information as you open parens. + +import string +import sys +import inspect +import traceback + +class CallTips: + + menudefs = [ + ] + + keydefs = { + '<>': [''], + '<>': [''], + '<>': [''], + '<>': ['', ''], + } + + windows_keydefs = { + } + + unix_keydefs = { + } + + def __init__(self, editwin): + self.editwin = editwin + self.text = editwin.text + self.calltip = None + if hasattr(self.text, "make_calltip_window"): + self._make_calltip_window = self.text.make_calltip_window + else: + self._make_calltip_window = self._make_tk_calltip_window + + def close(self): + self._make_calltip_window = None + + # Makes a Tk based calltip window. Used by IDLE, but not Pythonwin. + # See __init__ above for how this is used. + def _make_tk_calltip_window(self): + import CallTipWindow + return CallTipWindow.CallTip(self.text) + + def _remove_calltip_window(self): + if self.calltip: + self.calltip.hidetip() + self.calltip = None + + def paren_open_event(self, event): + self._remove_calltip_window() + arg_text = get_arg_text(self.get_object_at_cursor()) + if arg_text: + self.calltip_start = self.text.index("insert") + self.calltip = self._make_calltip_window() + self.calltip.showtip(arg_text) + return "" #so the event is handled normally. + + def paren_close_event(self, event): + # Now just hides, but later we should check if other + # paren'd expressions remain open. + self._remove_calltip_window() + return "" #so the event is handled normally. + + def check_calltip_cancel_event(self, event): + if self.calltip: + # If we have moved before the start of the calltip, + # or off the calltip line, then cancel the tip. + # (Later need to be smarter about multi-line, etc) + if self.text.compare("insert", "<=", self.calltip_start) or \ + self.text.compare("insert", ">", self.calltip_start + " lineend"): + self._remove_calltip_window() + return "" #so the event is handled normally. + + def calltip_cancel_event(self, event): + self._remove_calltip_window() + return "" #so the event is handled normally. + + def get_object_at_cursor(self, + wordchars="._" + string.ascii_uppercase + string.ascii_lowercase + string.digits): + # XXX - This needs to be moved to a better place + # so the "." attribute lookup code can also use it. + text = self.text + chars = text.get("insert linestart", "insert") + i = len(chars) + while i and chars[i-1] in wordchars: + i = i-1 + word = chars[i:] + if word: + # How is this for a hack! + import sys, __main__ + namespace = sys.modules.copy() + namespace.update(__main__.__dict__) + try: + return eval(word, namespace) + except: + pass + return None # Can't find an object. + +def _find_constructor(class_ob): + # Given a class object, return a function object used for the + # constructor (ie, __init__() ) or None if we can't find one. + try: + return class_ob.__init__ + except AttributeError: + for base in class_ob.__bases__: + rc = _find_constructor(base) + if rc is not None: return rc + return None + +def get_arg_text(ob): + # Get a string describing the arguments for the given object. + argText = "" + if ob is not None: + argOffset = 0 + if inspect.isclass(ob): + # Look for the highest __init__ in the class chain. + fob = _find_constructor(ob) + if fob is None: + fob = lambda: None + else: + fob = ob + if inspect.isfunction(fob) or inspect.ismethod(fob): + try: + # py3k has a 'getfullargspec' which can handle py3k specific things. + arg_getter = getattr(inspect, "getfullargspec", inspect.getargspec) + argText = inspect.formatargspec(*arg_getter(fob)) + except: + print("Failed to format the args") + traceback.print_exc() + # See if we can use the docstring + if hasattr(ob, "__doc__"): + doc=ob.__doc__ + try: + doc = doc.strip() + pos = doc.find("\n") + except AttributeError: + ## New style classes may have __doc__ slot without actually + ## having a string assigned to it + pass + else: + if pos<0 or pos>70: pos=70 + if argText: argText = argText + "\n" + argText = argText + doc[:pos] + + return argText + +################################################# +# +# Test code +# +if __name__=='__main__': + + def t1(): "()" + def t2(a, b=None): "(a, b=None)" + def t3(a, *args): "(a, *args)" + def t4(*args): "(*args)" + def t5(a, *args): "(a, *args)" + def t6(a, b=None, *args, **kw): "(a, b=None, *args, **kw)" + + class TC: + "(self, a=None, *b)" + def __init__(self, a=None, *b): "(self, a=None, *b)" + def t1(self): "(self)" + def t2(self, a, b=None): "(self, a, b=None)" + def t3(self, a, *args): "(self, a, *args)" + def t4(self, *args): "(self, *args)" + def t5(self, a, *args): "(self, a, *args)" + def t6(self, a, b=None, *args, **kw): "(self, a, b=None, *args, **kw)" + + def test( tests ): + failed=[] + for t in tests: + expected = t.__doc__ + "\n" + t.__doc__ + if get_arg_text(t) != expected: + failed.append(t) + print("%s - expected %s, but got %s" % (t, repr(expected), repr(get_arg_text(t)))) + print("%d of %d tests failed" % (len(failed), len(tests))) + + tc = TC() + tests = t1, t2, t3, t4, t5, t6, \ + TC, tc.t1, tc.t2, tc.t3, tc.t4, tc.t5, tc.t6 + + test(tests) + diff --git a/venv/Lib/site-packages/pythonwin/pywin/idle/FormatParagraph.py b/venv/Lib/site-packages/pythonwin/pywin/idle/FormatParagraph.py new file mode 100644 index 00000000..ba8088d7 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/idle/FormatParagraph.py @@ -0,0 +1,155 @@ +# Extension to format a paragraph + +# Does basic, standard text formatting, and also understands Python +# comment blocks. Thus, for editing Python source code, this +# extension is really only suitable for reformatting these comment +# blocks or triple-quoted strings. + +# Known problems with comment reformatting: +# * If there is a selection marked, and the first line of the +# selection is not complete, the block will probably not be detected +# as comments, and will have the normal "text formatting" rules +# applied. +# * If a comment block has leading whitespace that mixes tabs and +# spaces, they will not be considered part of the same block. +# * Fancy comments, like this bulleted list, arent handled :-) + +import string +import re + +class FormatParagraph: + + menudefs = [ + ('edit', [ + ('Format Paragraph', '<>'), + ]) + ] + + keydefs = { + '<>': [''], + } + + unix_keydefs = { + '<>': [''], + } + + def __init__(self, editwin): + self.editwin = editwin + + def close(self): + self.editwin = None + + def format_paragraph_event(self, event): + text = self.editwin.text + first, last = self.editwin.get_selection_indices() + if first and last: + data = text.get(first, last) + comment_header = '' + else: + first, last, comment_header, data = \ + find_paragraph(text, text.index("insert")) + if comment_header: + # Reformat the comment lines - convert to text sans header. + lines = data.split("\n") + lines = map(lambda st, l=len(comment_header): st[l:], lines) + data = "\n".join(lines) + # Reformat to 70 chars or a 20 char width, whichever is greater. + format_width = max(70-len(comment_header), 20) + newdata = reformat_paragraph(data, format_width) + # re-split and re-insert the comment header. + newdata = newdata.split("\n") + # If the block ends in a \n, we dont want the comment + # prefix inserted after it. (Im not sure it makes sense to + # reformat a comment block that isnt made of complete + # lines, but whatever!) Can't think of a clean soltution, + # so we hack away + block_suffix = "" + if not newdata[-1]: + block_suffix = "\n" + newdata = newdata[:-1] + builder = lambda item, prefix=comment_header: prefix+item + newdata = '\n'.join([builder(d) for d in newdata]) + block_suffix + else: + # Just a normal text format + newdata = reformat_paragraph(data) + text.tag_remove("sel", "1.0", "end") + if newdata != data: + text.mark_set("insert", first) + text.undo_block_start() + text.delete(first, last) + text.insert(first, newdata) + text.undo_block_stop() + else: + text.mark_set("insert", last) + text.see("insert") + +def find_paragraph(text, mark): + lineno, col = list(map(int, mark.split("."))) + line = text.get("%d.0" % lineno, "%d.0 lineend" % lineno) + while text.compare("%d.0" % lineno, "<", "end") and is_all_white(line): + lineno = lineno + 1 + line = text.get("%d.0" % lineno, "%d.0 lineend" % lineno) + first_lineno = lineno + comment_header = get_comment_header(line) + comment_header_len = len(comment_header) + while get_comment_header(line)==comment_header and \ + not is_all_white(line[comment_header_len:]): + lineno = lineno + 1 + line = text.get("%d.0" % lineno, "%d.0 lineend" % lineno) + last = "%d.0" % lineno + # Search back to beginning of paragraph + lineno = first_lineno - 1 + line = text.get("%d.0" % lineno, "%d.0 lineend" % lineno) + while lineno > 0 and \ + get_comment_header(line)==comment_header and \ + not is_all_white(line[comment_header_len:]): + lineno = lineno - 1 + line = text.get("%d.0" % lineno, "%d.0 lineend" % lineno) + first = "%d.0" % (lineno+1) + return first, last, comment_header, text.get(first, last) + +def reformat_paragraph(data, limit=70): + lines = data.split("\n") + i = 0 + n = len(lines) + while i < n and is_all_white(lines[i]): + i = i+1 + if i >= n: + return data + indent1 = get_indent(lines[i]) + if i+1 < n and not is_all_white(lines[i+1]): + indent2 = get_indent(lines[i+1]) + else: + indent2 = indent1 + new = lines[:i] + partial = indent1 + while i < n and not is_all_white(lines[i]): + # XXX Should take double space after period (etc.) into account + words = re.split("(\s+)", lines[i]) + for j in range(0, len(words), 2): + word = words[j] + if not word: + continue # Can happen when line ends in whitespace + if len((partial + word).expandtabs()) > limit and \ + partial != indent1: + new.append(partial.rstrip()) + partial = indent2 + partial = partial + word + " " + if j+1 < len(words) and words[j+1] != " ": + partial = partial + " " + i = i+1 + new.append(partial.rstrip()) + # XXX Should reformat remaining paragraphs as well + new.extend(lines[i:]) + return "\n".join(new) + +def is_all_white(line): + return re.match(r"^\s*$", line) is not None + +def get_indent(line): + return re.match(r"^(\s*)", line).group() + +def get_comment_header(line): + m = re.match(r"^(\s*#*)", line) + if m is None: return "" + return m.group(1) diff --git a/venv/Lib/site-packages/pythonwin/pywin/idle/IdleHistory.py b/venv/Lib/site-packages/pythonwin/pywin/idle/IdleHistory.py new file mode 100644 index 00000000..c671af93 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/idle/IdleHistory.py @@ -0,0 +1,89 @@ +import string + +class History: + + def __init__(self, text, output_sep = "\n"): + self.text = text + self.history = [] + self.history_prefix = None + self.history_pointer = None + self.output_sep = output_sep + text.bind("<>", self.history_prev) + text.bind("<>", self.history_next) + + def history_next(self, event): + self.history_do(0) + return "break" + + def history_prev(self, event): + self.history_do(1) + return "break" + + def _get_source(self, start, end): + # Get source code from start index to end index. Lines in the + # text control may be separated by sys.ps2 . + lines = self.text.get(start, end).split(self.output_sep) + return "\n".join(lines) + + def _put_source(self, where, source): + output = self.output_sep.join(source.split("\n")) + self.text.insert(where, output) + + def history_do(self, reverse): + nhist = len(self.history) + pointer = self.history_pointer + prefix = self.history_prefix + if pointer is not None and prefix is not None: + if self.text.compare("insert", "!=", "end-1c") or \ + self._get_source("iomark", "end-1c") != self.history[pointer]: + pointer = prefix = None + if pointer is None or prefix is None: + prefix = self._get_source("iomark", "end-1c") + if reverse: + pointer = nhist + else: + pointer = -1 + nprefix = len(prefix) + while 1: + if reverse: + pointer = pointer - 1 + else: + pointer = pointer + 1 + if pointer < 0 or pointer >= nhist: + self.text.bell() + if self._get_source("iomark", "end-1c") != prefix: + self.text.delete("iomark", "end-1c") + self._put_source("iomark", prefix) + pointer = prefix = None + break + item = self.history[pointer] + if item[:nprefix] == prefix and len(item) > nprefix: + self.text.delete("iomark", "end-1c") + self._put_source("iomark", item) + break + self.text.mark_set("insert", "end-1c") + self.text.see("insert") + self.text.tag_remove("sel", "1.0", "end") + self.history_pointer = pointer + self.history_prefix = prefix + + def history_store(self, source): + source = source.strip() + if len(source) > 2: + # avoid duplicates + try: + self.history.remove(source) + except ValueError: + pass + self.history.append(source) + self.history_pointer = None + self.history_prefix = None + + def recall(self, s): + s = s.strip() + self.text.tag_remove("sel", "1.0", "end") + self.text.delete("iomark", "end-1c") + self.text.mark_set("insert", "end-1c") + self.text.insert("insert", s) + self.text.see("insert") + diff --git a/venv/Lib/site-packages/pythonwin/pywin/idle/PyParse.py b/venv/Lib/site-packages/pythonwin/pywin/idle/PyParse.py new file mode 100644 index 00000000..665b8404 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/idle/PyParse.py @@ -0,0 +1,568 @@ +import string +import re +import sys + +# Reason last stmt is continued (or C_NONE if it's not). +C_NONE, C_BACKSLASH, C_STRING, C_BRACKET = list(range(4)) + +if 0: # for throwaway debugging output + def dump(*stuff): + sys.__stdout__.write(" ".join(map(str, stuff)) + "\n") + +# Find what looks like the start of a popular stmt. + +_synchre = re.compile(r""" + ^ + [ \t]* + (?: if + | for + | while + | else + | def + | return + | assert + | break + | class + | continue + | elif + | try + | except + | raise + | import + ) + \b +""", re.VERBOSE | re.MULTILINE).search + +# Match blank line or non-indenting comment line. + +_junkre = re.compile(r""" + [ \t]* + (?: \# \S .* )? + \n +""", re.VERBOSE).match + +# Match any flavor of string; the terminating quote is optional +# so that we're robust in the face of incomplete program text. + +_match_stringre = re.compile(r""" + \""" [^"\\]* (?: + (?: \\. | "(?!"") ) + [^"\\]* + )* + (?: \""" )? + +| " [^"\\\n]* (?: \\. [^"\\\n]* )* "? + +| ''' [^'\\]* (?: + (?: \\. | '(?!'') ) + [^'\\]* + )* + (?: ''' )? + +| ' [^'\\\n]* (?: \\. [^'\\\n]* )* '? +""", re.VERBOSE | re.DOTALL).match + +# Match a line that starts with something interesting; +# used to find the first item of a bracket structure. + +_itemre = re.compile(r""" + [ \t]* + [^\s#\\] # if we match, m.end()-1 is the interesting char +""", re.VERBOSE).match + +# Match start of stmts that should be followed by a dedent. + +_closere = re.compile(r""" + \s* + (?: return + | break + | continue + | raise + | pass + ) + \b +""", re.VERBOSE).match + +# Chew up non-special chars as quickly as possible. If match is +# successful, m.end() less 1 is the index of the last boring char +# matched. If match is unsuccessful, the string starts with an +# interesting char. + +_chew_ordinaryre = re.compile(r""" + [^[\](){}#'"\\]+ +""", re.VERBOSE).match + +# Build translation table to map uninteresting chars to "x", open +# brackets to "(", and close brackets to ")". + +_tran = ['x'] * 256 +for ch in "({[": + _tran[ord(ch)] = '(' +for ch in ")}]": + _tran[ord(ch)] = ')' +for ch in "\"'\\\n#": + _tran[ord(ch)] = ch +# We are called with unicode strings, and str.translate is one of the few +# py2k functions which can't 'do the right thing' - so take care to ensure +# _tran is full of unicode... +_tran = ''.join(_tran) +del ch + +class Parser: + + def __init__(self, indentwidth, tabwidth): + self.indentwidth = indentwidth + self.tabwidth = tabwidth + + def set_str(self, str): + assert len(str) == 0 or str[-1] == '\n', "Oops - have str %r" % (str,) + self.str = str + self.study_level = 0 + + # Return index of a good place to begin parsing, as close to the + # end of the string as possible. This will be the start of some + # popular stmt like "if" or "def". Return None if none found: + # the caller should pass more prior context then, if possible, or + # if not (the entire program text up until the point of interest + # has already been tried) pass 0 to set_lo. + # + # This will be reliable iff given a reliable is_char_in_string + # function, meaning that when it says "no", it's absolutely + # guaranteed that the char is not in a string. + # + # Ack, hack: in the shell window this kills us, because there's + # no way to tell the differences between output, >>> etc and + # user input. Indeed, IDLE's first output line makes the rest + # look like it's in an unclosed paren!: + # Python 1.5.2 (#0, Apr 13 1999, ... + + def find_good_parse_start(self, use_ps1, is_char_in_string=None): + str, pos = self.str, None + if use_ps1: + # shell window + ps1 = '\n' + sys.ps1 + i = str.rfind(ps1) + if i >= 0: + pos = i + len(ps1) + # make it look like there's a newline instead + # of ps1 at the start -- hacking here once avoids + # repeated hackery later + self.str = str[:pos-1] + '\n' + str[pos:] + return pos + + # File window -- real work. + if not is_char_in_string: + # no clue -- make the caller pass everything + return None + + # Peek back from the end for a good place to start, + # but don't try too often; pos will be left None, or + # bumped to a legitimate synch point. + limit = len(str) + for tries in range(5): + i = str.rfind(":\n", 0, limit) + if i < 0: + break + i = str.rfind('\n', 0, i) + 1 # start of colon line + m = _synchre(str, i, limit) + if m and not is_char_in_string(m.start()): + pos = m.start() + break + limit = i + if pos is None: + # Nothing looks like a block-opener, or stuff does + # but is_char_in_string keeps returning true; most likely + # we're in or near a giant string, the colorizer hasn't + # caught up enough to be helpful, or there simply *aren't* + # any interesting stmts. In any of these cases we're + # going to have to parse the whole thing to be sure, so + # give it one last try from the start, but stop wasting + # time here regardless of the outcome. + m = _synchre(str) + if m and not is_char_in_string(m.start()): + pos = m.start() + return pos + + # Peeking back worked; look forward until _synchre no longer + # matches. + i = pos + 1 + while 1: + m = _synchre(str, i) + if m: + s, i = m.span() + if not is_char_in_string(s): + pos = s + else: + break + return pos + + # Throw away the start of the string. Intended to be called with + # find_good_parse_start's result. + + def set_lo(self, lo): + assert lo == 0 or self.str[lo-1] == '\n' + if lo > 0: + self.str = self.str[lo:] + + # As quickly as humanly possible , find the line numbers (0- + # based) of the non-continuation lines. + # Creates self.{goodlines, continuation}. + + def _study1(self): + if self.study_level >= 1: + return + self.study_level = 1 + + # Map all uninteresting characters to "x", all open brackets + # to "(", all close brackets to ")", then collapse runs of + # uninteresting characters. This can cut the number of chars + # by a factor of 10-40, and so greatly speed the following loop. + str = self.str + str = str.translate(_tran) + str = str.replace('xxxxxxxx', 'x') + str = str.replace('xxxx', 'x') + str = str.replace('xx', 'x') + str = str.replace('xx', 'x') + str = str.replace('\nx', '\n') + # note that replacing x\n with \n would be incorrect, because + # x may be preceded by a backslash + + # March over the squashed version of the program, accumulating + # the line numbers of non-continued stmts, and determining + # whether & why the last stmt is a continuation. + continuation = C_NONE + level = lno = 0 # level is nesting level; lno is line number + self.goodlines = goodlines = [0] + push_good = goodlines.append + i, n = 0, len(str) + while i < n: + ch = str[i] + i = i+1 + + # cases are checked in decreasing order of frequency + if ch == 'x': + continue + + if ch == '\n': + lno = lno + 1 + if level == 0: + push_good(lno) + # else we're in an unclosed bracket structure + continue + + if ch == '(': + level = level + 1 + continue + + if ch == ')': + if level: + level = level - 1 + # else the program is invalid, but we can't complain + continue + + if ch == '"' or ch == "'": + # consume the string + quote = ch + if str[i-1:i+2] == quote * 3: + quote = quote * 3 + w = len(quote) - 1 + i = i+w + while i < n: + ch = str[i] + i = i+1 + + if ch == 'x': + continue + + if str[i-1:i+w] == quote: + i = i+w + break + + if ch == '\n': + lno = lno + 1 + if w == 0: + # unterminated single-quoted string + if level == 0: + push_good(lno) + break + continue + + if ch == '\\': + assert i < n + if str[i] == '\n': + lno = lno + 1 + i = i+1 + continue + + # else comment char or paren inside string + + else: + # didn't break out of the loop, so we're still + # inside a string + continuation = C_STRING + continue # with outer loop + + if ch == '#': + # consume the comment + i = str.find('\n', i) + assert i >= 0 + continue + + assert ch == '\\' + assert i < n + if str[i] == '\n': + lno = lno + 1 + if i+1 == n: + continuation = C_BACKSLASH + i = i+1 + + # The last stmt may be continued for all 3 reasons. + # String continuation takes precedence over bracket + # continuation, which beats backslash continuation. + if continuation != C_STRING and level > 0: + continuation = C_BRACKET + self.continuation = continuation + + # Push the final line number as a sentinel value, regardless of + # whether it's continued. + assert (continuation == C_NONE) == (goodlines[-1] == lno) + if goodlines[-1] != lno: + push_good(lno) + + def get_continuation_type(self): + self._study1() + return self.continuation + + # study1 was sufficient to determine the continuation status, + # but doing more requires looking at every character. study2 + # does this for the last interesting statement in the block. + # Creates: + # self.stmt_start, stmt_end + # slice indices of last interesting stmt + # self.lastch + # last non-whitespace character before optional trailing + # comment + # self.lastopenbracketpos + # if continuation is C_BRACKET, index of last open bracket + + def _study2(self): + _ws=string.whitespace + if self.study_level >= 2: + return + self._study1() + self.study_level = 2 + + # Set p and q to slice indices of last interesting stmt. + str, goodlines = self.str, self.goodlines + i = len(goodlines) - 1 + p = len(str) # index of newest line + while i: + assert p + # p is the index of the stmt at line number goodlines[i]. + # Move p back to the stmt at line number goodlines[i-1]. + q = p + for nothing in range(goodlines[i-1], goodlines[i]): + # tricky: sets p to 0 if no preceding newline + p = str.rfind('\n', 0, p-1) + 1 + # The stmt str[p:q] isn't a continuation, but may be blank + # or a non-indenting comment line. + if _junkre(str, p): + i = i-1 + else: + break + if i == 0: + # nothing but junk! + assert p == 0 + q = p + self.stmt_start, self.stmt_end = p, q + + # Analyze this stmt, to find the last open bracket (if any) + # and last interesting character (if any). + lastch = "" + stack = [] # stack of open bracket indices + push_stack = stack.append + while p < q: + # suck up all except ()[]{}'"#\\ + m = _chew_ordinaryre(str, p, q) + if m: + # we skipped at least one boring char + newp = m.end() + # back up over totally boring whitespace + i = newp - 1 # index of last boring char + while i >= p and str[i] in " \t\n": + i = i-1 + if i >= p: + lastch = str[i] + p = newp + if p >= q: + break + + ch = str[p] + + if ch in "([{": + push_stack(p) + lastch = ch + p = p+1 + continue + + if ch in ")]}": + if stack: + del stack[-1] + lastch = ch + p = p+1 + continue + + if ch == '"' or ch == "'": + # consume string + # Note that study1 did this with a Python loop, but + # we use a regexp here; the reason is speed in both + # cases; the string may be huge, but study1 pre-squashed + # strings to a couple of characters per line. study1 + # also needed to keep track of newlines, and we don't + # have to. + lastch = ch + p = _match_stringre(str, p, q).end() + continue + + if ch == '#': + # consume comment and trailing newline + p = str.find('\n', p, q) + 1 + assert p > 0 + continue + + assert ch == '\\' + p = p+1 # beyond backslash + assert p < q + if str[p] != '\n': + # the program is invalid, but can't complain + lastch = ch + str[p] + p = p+1 # beyond escaped char + + # end while p < q: + + self.lastch = lastch + if stack: + self.lastopenbracketpos = stack[-1] + + # Assuming continuation is C_BRACKET, return the number + # of spaces the next line should be indented. + + def compute_bracket_indent(self): + self._study2() + assert self.continuation == C_BRACKET + j = self.lastopenbracketpos + str = self.str + n = len(str) + origi = i = str.rfind('\n', 0, j) + 1 + j = j+1 # one beyond open bracket + # find first list item; set i to start of its line + while j < n: + m = _itemre(str, j) + if m: + j = m.end() - 1 # index of first interesting char + extra = 0 + break + else: + # this line is junk; advance to next line + i = j = str.find('\n', j) + 1 + else: + # nothing interesting follows the bracket; + # reproduce the bracket line's indentation + a level + j = i = origi + while str[j] in " \t": + j = j+1 + extra = self.indentwidth + return len(str[i:j].expandtabs(self.tabwidth)) + extra + + # Return number of physical lines in last stmt (whether or not + # it's an interesting stmt! this is intended to be called when + # continuation is C_BACKSLASH). + + def get_num_lines_in_stmt(self): + self._study1() + goodlines = self.goodlines + return goodlines[-1] - goodlines[-2] + + # Assuming continuation is C_BACKSLASH, return the number of spaces + # the next line should be indented. Also assuming the new line is + # the first one following the initial line of the stmt. + + def compute_backslash_indent(self): + self._study2() + assert self.continuation == C_BACKSLASH + str = self.str + i = self.stmt_start + while str[i] in " \t": + i = i+1 + startpos = i + + # See whether the initial line starts an assignment stmt; i.e., + # look for an = operator + endpos = str.find('\n', startpos) + 1 + found = level = 0 + while i < endpos: + ch = str[i] + if ch in "([{": + level = level + 1 + i = i+1 + elif ch in ")]}": + if level: + level = level - 1 + i = i+1 + elif ch == '"' or ch == "'": + i = _match_stringre(str, i, endpos).end() + elif ch == '#': + break + elif level == 0 and ch == '=' and \ + (i == 0 or str[i-1] not in "=<>!") and \ + str[i+1] != '=': + found = 1 + break + else: + i = i+1 + + if found: + # found a legit =, but it may be the last interesting + # thing on the line + i = i+1 # move beyond the = + found = re.match(r"\s*\\", str[i:endpos]) is None + + if not found: + # oh well ... settle for moving beyond the first chunk + # of non-whitespace chars + i = startpos + while str[i] not in " \t\n": + i = i+1 + + return len(str[self.stmt_start : i].expandtabs(self.tabwidth)) + 1 + + # Return the leading whitespace on the initial line of the last + # interesting stmt. + + def get_base_indent_string(self): + self._study2() + i, n = self.stmt_start, self.stmt_end + j = i + str = self.str + while j < n and str[j] in " \t": + j = j + 1 + return str[i:j] + + # Did the last interesting stmt open a block? + + def is_block_opener(self): + self._study2() + return self.lastch == ':' + + # Did the last interesting stmt close a block? + + def is_block_closer(self): + self._study2() + return _closere(self.str, self.stmt_start) is not None + + # index of last open bracket ({[, or None if none + lastopenbracketpos = None + + def get_last_open_bracket_pos(self): + self._study2() + return self.lastopenbracketpos diff --git a/venv/Lib/site-packages/pythonwin/pywin/idle/__init__.py b/venv/Lib/site-packages/pythonwin/pywin/idle/__init__.py new file mode 100644 index 00000000..c9c70df4 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/idle/__init__.py @@ -0,0 +1 @@ +# This file denotes the directory as a Python package. \ No newline at end of file diff --git a/venv/Lib/site-packages/pythonwin/pywin/idle/__pycache__/AutoExpand.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/idle/__pycache__/AutoExpand.cpython-36.pyc new file mode 100644 index 00000000..371dc3d4 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/idle/__pycache__/AutoExpand.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/idle/__pycache__/AutoIndent.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/idle/__pycache__/AutoIndent.cpython-36.pyc new file mode 100644 index 00000000..89fc655d Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/idle/__pycache__/AutoIndent.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/idle/__pycache__/CallTips.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/idle/__pycache__/CallTips.cpython-36.pyc new file mode 100644 index 00000000..eb2b6d86 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/idle/__pycache__/CallTips.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/idle/__pycache__/FormatParagraph.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/idle/__pycache__/FormatParagraph.cpython-36.pyc new file mode 100644 index 00000000..01c25704 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/idle/__pycache__/FormatParagraph.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/idle/__pycache__/IdleHistory.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/idle/__pycache__/IdleHistory.cpython-36.pyc new file mode 100644 index 00000000..e31c17f2 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/idle/__pycache__/IdleHistory.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/idle/__pycache__/PyParse.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/idle/__pycache__/PyParse.cpython-36.pyc new file mode 100644 index 00000000..982dbf24 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/idle/__pycache__/PyParse.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/idle/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/idle/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..60df6eea Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/idle/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/mfc/__init__.py b/venv/Lib/site-packages/pythonwin/pywin/mfc/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/pythonwin/pywin/mfc/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/mfc/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..c704e3dc Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/mfc/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/mfc/__pycache__/activex.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/mfc/__pycache__/activex.cpython-36.pyc new file mode 100644 index 00000000..466b9ed8 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/mfc/__pycache__/activex.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/mfc/__pycache__/afxres.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/mfc/__pycache__/afxres.cpython-36.pyc new file mode 100644 index 00000000..a345ccb4 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/mfc/__pycache__/afxres.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/mfc/__pycache__/dialog.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/mfc/__pycache__/dialog.cpython-36.pyc new file mode 100644 index 00000000..acc8041c Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/mfc/__pycache__/dialog.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/mfc/__pycache__/docview.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/mfc/__pycache__/docview.cpython-36.pyc new file mode 100644 index 00000000..9077bbe9 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/mfc/__pycache__/docview.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/mfc/__pycache__/object.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/mfc/__pycache__/object.cpython-36.pyc new file mode 100644 index 00000000..dafe7feb Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/mfc/__pycache__/object.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/mfc/__pycache__/thread.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/mfc/__pycache__/thread.cpython-36.pyc new file mode 100644 index 00000000..af271ffe Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/mfc/__pycache__/thread.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/mfc/__pycache__/window.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/mfc/__pycache__/window.cpython-36.pyc new file mode 100644 index 00000000..1a6680cc Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/mfc/__pycache__/window.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/mfc/activex.py b/venv/Lib/site-packages/pythonwin/pywin/mfc/activex.py new file mode 100644 index 00000000..b776c0ae --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/mfc/activex.py @@ -0,0 +1,75 @@ +"""Support for ActiveX control hosting in Pythonwin. +""" +import win32ui, win32uiole +from . import window +# XXX - we are still "classic style" classes in py2x, so we need can't yet +# use 'type()' everywhere - revisit soon, as py2x will move to new-style too... +try: + from types import ClassType as new_type +except ImportError: + new_type = type # py3k + +class Control(window.Wnd): + """An ActiveX control base class. A new class must be derived from both + this class and the Events class. See the demos for more details. + """ + def __init__(self): + self.__dict__["_dispobj_"] = None + window.Wnd.__init__(self) + + def _GetControlCLSID( self ): + return self.CLSID + def _GetDispatchClass(self): + return self.default_interface + def _GetEventMap(self): + return self.default_source._dispid_to_func_ + + def CreateControl(self, windowTitle, style, rect, parent, id, lic_string=None): + clsid = str(self._GetControlCLSID()) + self.__dict__["_obj_"] = win32ui.CreateControl(clsid, windowTitle, style, rect, parent, id, None, False, lic_string) + klass = self._GetDispatchClass() + dispobj = klass(win32uiole.GetIDispatchForWindow(self._obj_)) + self.HookOleEvents() + self.__dict__["_dispobj_"] = dispobj + + def HookOleEvents(self): + dict = self._GetEventMap() + for dispid, methodName in dict.items(): + if hasattr(self, methodName): + self._obj_.HookOleEvent( getattr(self, methodName), dispid ) + + def __getattr__(self, attr): + # Delegate attributes to the windows and the Dispatch object for this class + try: + return window.Wnd.__getattr__(self, attr) + except AttributeError: + pass + return getattr(self._dispobj_, attr) + def __setattr__(self, attr, value): + if hasattr(self.__dict__, attr): + self.__dict__[attr] = value + return + try: + if self._dispobj_: + self._dispobj_.__setattr__(attr, value) + return + except AttributeError: + pass + self.__dict__[attr] = value + +def MakeControlClass( controlClass, name = None ): + """Given a CoClass in a generated .py file, this function will return a Class + object which can be used as an OCX control. + + This function is used when you do not want to handle any events from the OCX + control. If you need events, then you should derive a class from both the + activex.Control class and the CoClass + """ + if name is None: + name = controlClass.__name__ + return new_type("OCX" + name, (Control, controlClass), {}) + +def MakeControlInstance( controlClass, name = None ): + """As for MakeControlClass(), but returns an instance of the class. + """ + return MakeControlClass(controlClass, name)() diff --git a/venv/Lib/site-packages/pythonwin/pywin/mfc/afxres.py b/venv/Lib/site-packages/pythonwin/pywin/mfc/afxres.py new file mode 100644 index 00000000..d9fb1f2a --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/mfc/afxres.py @@ -0,0 +1,497 @@ +# Generated by h2py from stdin +TCS_MULTILINE = 0x0200 +CBRS_ALIGN_LEFT = 0x1000 +CBRS_ALIGN_TOP = 0x2000 +CBRS_ALIGN_RIGHT = 0x4000 +CBRS_ALIGN_BOTTOM = 0x8000 +CBRS_ALIGN_ANY = 0xF000 +CBRS_BORDER_LEFT = 0x0100 +CBRS_BORDER_TOP = 0x0200 +CBRS_BORDER_RIGHT = 0x0400 +CBRS_BORDER_BOTTOM = 0x0800 +CBRS_BORDER_ANY = 0x0F00 +CBRS_TOOLTIPS = 0x0010 +CBRS_FLYBY = 0x0020 +CBRS_FLOAT_MULTI = 0x0040 +CBRS_BORDER_3D = 0x0080 +CBRS_HIDE_INPLACE = 0x0008 +CBRS_SIZE_DYNAMIC = 0x0004 +CBRS_SIZE_FIXED = 0x0002 +CBRS_FLOATING = 0x0001 +CBRS_GRIPPER = 0x00400000 +CBRS_ORIENT_HORZ = (CBRS_ALIGN_TOP|CBRS_ALIGN_BOTTOM) +CBRS_ORIENT_VERT = (CBRS_ALIGN_LEFT|CBRS_ALIGN_RIGHT) +CBRS_ORIENT_ANY = (CBRS_ORIENT_HORZ|CBRS_ORIENT_VERT) +CBRS_ALL = 0xFFFF +CBRS_NOALIGN = 0x00000000 +CBRS_LEFT = (CBRS_ALIGN_LEFT|CBRS_BORDER_RIGHT) +CBRS_TOP = (CBRS_ALIGN_TOP|CBRS_BORDER_BOTTOM) +CBRS_RIGHT = (CBRS_ALIGN_RIGHT|CBRS_BORDER_LEFT) +CBRS_BOTTOM = (CBRS_ALIGN_BOTTOM|CBRS_BORDER_TOP) +SBPS_NORMAL = 0x0000 +SBPS_NOBORDERS = 0x0100 +SBPS_POPOUT = 0x0200 +SBPS_OWNERDRAW = 0x1000 +SBPS_DISABLED = 0x04000000 +SBPS_STRETCH = 0x08000000 +ID_INDICATOR_EXT = 0xE700 +ID_INDICATOR_CAPS = 0xE701 +ID_INDICATOR_NUM = 0xE702 +ID_INDICATOR_SCRL = 0xE703 +ID_INDICATOR_OVR = 0xE704 +ID_INDICATOR_REC = 0xE705 +ID_INDICATOR_KANA = 0xE706 +ID_SEPARATOR = 0 +AFX_IDW_CONTROLBAR_FIRST = 0xE800 +AFX_IDW_CONTROLBAR_LAST = 0xE8FF +AFX_IDW_TOOLBAR = 0xE800 +AFX_IDW_STATUS_BAR = 0xE801 +AFX_IDW_PREVIEW_BAR = 0xE802 +AFX_IDW_RESIZE_BAR = 0xE803 +AFX_IDW_DOCKBAR_TOP = 0xE81B +AFX_IDW_DOCKBAR_LEFT = 0xE81C +AFX_IDW_DOCKBAR_RIGHT = 0xE81D +AFX_IDW_DOCKBAR_BOTTOM = 0xE81E +AFX_IDW_DOCKBAR_FLOAT = 0xE81F +def AFX_CONTROLBAR_MASK(nIDC): return (1 << (nIDC - AFX_IDW_CONTROLBAR_FIRST)) + +AFX_IDW_PANE_FIRST = 0xE900 +AFX_IDW_PANE_LAST = 0xE9ff +AFX_IDW_HSCROLL_FIRST = 0xEA00 +AFX_IDW_VSCROLL_FIRST = 0xEA10 +AFX_IDW_SIZE_BOX = 0xEA20 +AFX_IDW_PANE_SAVE = 0xEA21 +AFX_IDS_APP_TITLE = 0xE000 +AFX_IDS_IDLEMESSAGE = 0xE001 +AFX_IDS_HELPMODEMESSAGE = 0xE002 +AFX_IDS_APP_TITLE_EMBEDDING = 0xE003 +AFX_IDS_COMPANY_NAME = 0xE004 +AFX_IDS_OBJ_TITLE_INPLACE = 0xE005 +ID_FILE_NEW = 0xE100 +ID_FILE_OPEN = 0xE101 +ID_FILE_CLOSE = 0xE102 +ID_FILE_SAVE = 0xE103 +ID_FILE_SAVE_AS = 0xE104 +ID_FILE_PAGE_SETUP = 0xE105 +ID_FILE_PRINT_SETUP = 0xE106 +ID_FILE_PRINT = 0xE107 +ID_FILE_PRINT_DIRECT = 0xE108 +ID_FILE_PRINT_PREVIEW = 0xE109 +ID_FILE_UPDATE = 0xE10A +ID_FILE_SAVE_COPY_AS = 0xE10B +ID_FILE_SEND_MAIL = 0xE10C +ID_FILE_MRU_FIRST = 0xE110 +ID_FILE_MRU_FILE1 = 0xE110 +ID_FILE_MRU_FILE2 = 0xE111 +ID_FILE_MRU_FILE3 = 0xE112 +ID_FILE_MRU_FILE4 = 0xE113 +ID_FILE_MRU_FILE5 = 0xE114 +ID_FILE_MRU_FILE6 = 0xE115 +ID_FILE_MRU_FILE7 = 0xE116 +ID_FILE_MRU_FILE8 = 0xE117 +ID_FILE_MRU_FILE9 = 0xE118 +ID_FILE_MRU_FILE10 = 0xE119 +ID_FILE_MRU_FILE11 = 0xE11A +ID_FILE_MRU_FILE12 = 0xE11B +ID_FILE_MRU_FILE13 = 0xE11C +ID_FILE_MRU_FILE14 = 0xE11D +ID_FILE_MRU_FILE15 = 0xE11E +ID_FILE_MRU_FILE16 = 0xE11F +ID_FILE_MRU_LAST = 0xE11F +ID_EDIT_CLEAR = 0xE120 +ID_EDIT_CLEAR_ALL = 0xE121 +ID_EDIT_COPY = 0xE122 +ID_EDIT_CUT = 0xE123 +ID_EDIT_FIND = 0xE124 +ID_EDIT_PASTE = 0xE125 +ID_EDIT_PASTE_LINK = 0xE126 +ID_EDIT_PASTE_SPECIAL = 0xE127 +ID_EDIT_REPEAT = 0xE128 +ID_EDIT_REPLACE = 0xE129 +ID_EDIT_SELECT_ALL = 0xE12A +ID_EDIT_UNDO = 0xE12B +ID_EDIT_REDO = 0xE12C +ID_WINDOW_NEW = 0xE130 +ID_WINDOW_ARRANGE = 0xE131 +ID_WINDOW_CASCADE = 0xE132 +ID_WINDOW_TILE_HORZ = 0xE133 +ID_WINDOW_TILE_VERT = 0xE134 +ID_WINDOW_SPLIT = 0xE135 +AFX_IDM_WINDOW_FIRST = 0xE130 +AFX_IDM_WINDOW_LAST = 0xE13F +AFX_IDM_FIRST_MDICHILD = 0xFF00 +ID_APP_ABOUT = 0xE140 +ID_APP_EXIT = 0xE141 +ID_HELP_INDEX = 0xE142 +ID_HELP_FINDER = 0xE143 +ID_HELP_USING = 0xE144 +ID_CONTEXT_HELP = 0xE145 +ID_HELP = 0xE146 +ID_DEFAULT_HELP = 0xE147 +ID_NEXT_PANE = 0xE150 +ID_PREV_PANE = 0xE151 +ID_FORMAT_FONT = 0xE160 +ID_OLE_INSERT_NEW = 0xE200 +ID_OLE_EDIT_LINKS = 0xE201 +ID_OLE_EDIT_CONVERT = 0xE202 +ID_OLE_EDIT_CHANGE_ICON = 0xE203 +ID_OLE_EDIT_PROPERTIES = 0xE204 +ID_OLE_VERB_FIRST = 0xE210 +ID_OLE_VERB_LAST = 0xE21F +AFX_ID_PREVIEW_CLOSE = 0xE300 +AFX_ID_PREVIEW_NUMPAGE = 0xE301 +AFX_ID_PREVIEW_NEXT = 0xE302 +AFX_ID_PREVIEW_PREV = 0xE303 +AFX_ID_PREVIEW_PRINT = 0xE304 +AFX_ID_PREVIEW_ZOOMIN = 0xE305 +AFX_ID_PREVIEW_ZOOMOUT = 0xE306 +ID_VIEW_TOOLBAR = 0xE800 +ID_VIEW_STATUS_BAR = 0xE801 +ID_RECORD_FIRST = 0xE900 +ID_RECORD_LAST = 0xE901 +ID_RECORD_NEXT = 0xE902 +ID_RECORD_PREV = 0xE903 +IDC_STATIC = (-1) +AFX_IDS_SCFIRST = 0xEF00 +AFX_IDS_SCSIZE = 0xEF00 +AFX_IDS_SCMOVE = 0xEF01 +AFX_IDS_SCMINIMIZE = 0xEF02 +AFX_IDS_SCMAXIMIZE = 0xEF03 +AFX_IDS_SCNEXTWINDOW = 0xEF04 +AFX_IDS_SCPREVWINDOW = 0xEF05 +AFX_IDS_SCCLOSE = 0xEF06 +AFX_IDS_SCRESTORE = 0xEF12 +AFX_IDS_SCTASKLIST = 0xEF13 +AFX_IDS_MDICHILD = 0xEF1F +AFX_IDS_DESKACCESSORY = 0xEFDA +AFX_IDS_OPENFILE = 0xF000 +AFX_IDS_SAVEFILE = 0xF001 +AFX_IDS_ALLFILTER = 0xF002 +AFX_IDS_UNTITLED = 0xF003 +AFX_IDS_SAVEFILECOPY = 0xF004 +AFX_IDS_PREVIEW_CLOSE = 0xF005 +AFX_IDS_UNNAMED_FILE = 0xF006 +AFX_IDS_ABOUT = 0xF010 +AFX_IDS_HIDE = 0xF011 +AFX_IDP_NO_ERROR_AVAILABLE = 0xF020 +AFX_IDS_NOT_SUPPORTED_EXCEPTION = 0xF021 +AFX_IDS_RESOURCE_EXCEPTION = 0xF022 +AFX_IDS_MEMORY_EXCEPTION = 0xF023 +AFX_IDS_USER_EXCEPTION = 0xF024 +AFX_IDS_PRINTONPORT = 0xF040 +AFX_IDS_ONEPAGE = 0xF041 +AFX_IDS_TWOPAGE = 0xF042 +AFX_IDS_PRINTPAGENUM = 0xF043 +AFX_IDS_PREVIEWPAGEDESC = 0xF044 +AFX_IDS_PRINTDEFAULTEXT = 0xF045 +AFX_IDS_PRINTDEFAULT = 0xF046 +AFX_IDS_PRINTFILTER = 0xF047 +AFX_IDS_PRINTCAPTION = 0xF048 +AFX_IDS_PRINTTOFILE = 0xF049 +AFX_IDS_OBJECT_MENUITEM = 0xF080 +AFX_IDS_EDIT_VERB = 0xF081 +AFX_IDS_ACTIVATE_VERB = 0xF082 +AFX_IDS_CHANGE_LINK = 0xF083 +AFX_IDS_AUTO = 0xF084 +AFX_IDS_MANUAL = 0xF085 +AFX_IDS_FROZEN = 0xF086 +AFX_IDS_ALL_FILES = 0xF087 +AFX_IDS_SAVE_MENU = 0xF088 +AFX_IDS_UPDATE_MENU = 0xF089 +AFX_IDS_SAVE_AS_MENU = 0xF08A +AFX_IDS_SAVE_COPY_AS_MENU = 0xF08B +AFX_IDS_EXIT_MENU = 0xF08C +AFX_IDS_UPDATING_ITEMS = 0xF08D +AFX_IDS_METAFILE_FORMAT = 0xF08E +AFX_IDS_DIB_FORMAT = 0xF08F +AFX_IDS_BITMAP_FORMAT = 0xF090 +AFX_IDS_LINKSOURCE_FORMAT = 0xF091 +AFX_IDS_EMBED_FORMAT = 0xF092 +AFX_IDS_PASTELINKEDTYPE = 0xF094 +AFX_IDS_UNKNOWNTYPE = 0xF095 +AFX_IDS_RTF_FORMAT = 0xF096 +AFX_IDS_TEXT_FORMAT = 0xF097 +AFX_IDS_INVALID_CURRENCY = 0xF098 +AFX_IDS_INVALID_DATETIME = 0xF099 +AFX_IDS_INVALID_DATETIMESPAN = 0xF09A +AFX_IDP_INVALID_FILENAME = 0xF100 +AFX_IDP_FAILED_TO_OPEN_DOC = 0xF101 +AFX_IDP_FAILED_TO_SAVE_DOC = 0xF102 +AFX_IDP_ASK_TO_SAVE = 0xF103 +AFX_IDP_FAILED_TO_CREATE_DOC = 0xF104 +AFX_IDP_FILE_TOO_LARGE = 0xF105 +AFX_IDP_FAILED_TO_START_PRINT = 0xF106 +AFX_IDP_FAILED_TO_LAUNCH_HELP = 0xF107 +AFX_IDP_INTERNAL_FAILURE = 0xF108 +AFX_IDP_COMMAND_FAILURE = 0xF109 +AFX_IDP_FAILED_MEMORY_ALLOC = 0xF10A +AFX_IDP_PARSE_INT = 0xF110 +AFX_IDP_PARSE_REAL = 0xF111 +AFX_IDP_PARSE_INT_RANGE = 0xF112 +AFX_IDP_PARSE_REAL_RANGE = 0xF113 +AFX_IDP_PARSE_STRING_SIZE = 0xF114 +AFX_IDP_PARSE_RADIO_BUTTON = 0xF115 +AFX_IDP_PARSE_BYTE = 0xF116 +AFX_IDP_PARSE_UINT = 0xF117 +AFX_IDP_PARSE_DATETIME = 0xF118 +AFX_IDP_PARSE_CURRENCY = 0xF119 +AFX_IDP_FAILED_INVALID_FORMAT = 0xF120 +AFX_IDP_FAILED_INVALID_PATH = 0xF121 +AFX_IDP_FAILED_DISK_FULL = 0xF122 +AFX_IDP_FAILED_ACCESS_READ = 0xF123 +AFX_IDP_FAILED_ACCESS_WRITE = 0xF124 +AFX_IDP_FAILED_IO_ERROR_READ = 0xF125 +AFX_IDP_FAILED_IO_ERROR_WRITE = 0xF126 +AFX_IDP_STATIC_OBJECT = 0xF180 +AFX_IDP_FAILED_TO_CONNECT = 0xF181 +AFX_IDP_SERVER_BUSY = 0xF182 +AFX_IDP_BAD_VERB = 0xF183 +AFX_IDP_FAILED_TO_NOTIFY = 0xF185 +AFX_IDP_FAILED_TO_LAUNCH = 0xF186 +AFX_IDP_ASK_TO_UPDATE = 0xF187 +AFX_IDP_FAILED_TO_UPDATE = 0xF188 +AFX_IDP_FAILED_TO_REGISTER = 0xF189 +AFX_IDP_FAILED_TO_AUTO_REGISTER = 0xF18A +AFX_IDP_FAILED_TO_CONVERT = 0xF18B +AFX_IDP_GET_NOT_SUPPORTED = 0xF18C +AFX_IDP_SET_NOT_SUPPORTED = 0xF18D +AFX_IDP_ASK_TO_DISCARD = 0xF18E +AFX_IDP_FAILED_TO_CREATE = 0xF18F +AFX_IDP_FAILED_MAPI_LOAD = 0xF190 +AFX_IDP_INVALID_MAPI_DLL = 0xF191 +AFX_IDP_FAILED_MAPI_SEND = 0xF192 +AFX_IDP_FILE_NONE = 0xF1A0 +AFX_IDP_FILE_GENERIC = 0xF1A1 +AFX_IDP_FILE_NOT_FOUND = 0xF1A2 +AFX_IDP_FILE_BAD_PATH = 0xF1A3 +AFX_IDP_FILE_TOO_MANY_OPEN = 0xF1A4 +AFX_IDP_FILE_ACCESS_DENIED = 0xF1A5 +AFX_IDP_FILE_INVALID_FILE = 0xF1A6 +AFX_IDP_FILE_REMOVE_CURRENT = 0xF1A7 +AFX_IDP_FILE_DIR_FULL = 0xF1A8 +AFX_IDP_FILE_BAD_SEEK = 0xF1A9 +AFX_IDP_FILE_HARD_IO = 0xF1AA +AFX_IDP_FILE_SHARING = 0xF1AB +AFX_IDP_FILE_LOCKING = 0xF1AC +AFX_IDP_FILE_DISKFULL = 0xF1AD +AFX_IDP_FILE_EOF = 0xF1AE +AFX_IDP_ARCH_NONE = 0xF1B0 +AFX_IDP_ARCH_GENERIC = 0xF1B1 +AFX_IDP_ARCH_READONLY = 0xF1B2 +AFX_IDP_ARCH_ENDOFFILE = 0xF1B3 +AFX_IDP_ARCH_WRITEONLY = 0xF1B4 +AFX_IDP_ARCH_BADINDEX = 0xF1B5 +AFX_IDP_ARCH_BADCLASS = 0xF1B6 +AFX_IDP_ARCH_BADSCHEMA = 0xF1B7 +AFX_IDS_OCC_SCALEUNITS_PIXELS = 0xF1C0 +AFX_IDS_STATUS_FONT = 0xF230 +AFX_IDS_TOOLTIP_FONT = 0xF231 +AFX_IDS_UNICODE_FONT = 0xF232 +AFX_IDS_MINI_FONT = 0xF233 +AFX_IDP_SQL_FIRST = 0xF280 +AFX_IDP_SQL_CONNECT_FAIL = 0xF281 +AFX_IDP_SQL_RECORDSET_FORWARD_ONLY = 0xF282 +AFX_IDP_SQL_EMPTY_COLUMN_LIST = 0xF283 +AFX_IDP_SQL_FIELD_SCHEMA_MISMATCH = 0xF284 +AFX_IDP_SQL_ILLEGAL_MODE = 0xF285 +AFX_IDP_SQL_MULTIPLE_ROWS_AFFECTED = 0xF286 +AFX_IDP_SQL_NO_CURRENT_RECORD = 0xF287 +AFX_IDP_SQL_NO_ROWS_AFFECTED = 0xF288 +AFX_IDP_SQL_RECORDSET_READONLY = 0xF289 +AFX_IDP_SQL_SQL_NO_TOTAL = 0xF28A +AFX_IDP_SQL_ODBC_LOAD_FAILED = 0xF28B +AFX_IDP_SQL_DYNASET_NOT_SUPPORTED = 0xF28C +AFX_IDP_SQL_SNAPSHOT_NOT_SUPPORTED = 0xF28D +AFX_IDP_SQL_API_CONFORMANCE = 0xF28E +AFX_IDP_SQL_SQL_CONFORMANCE = 0xF28F +AFX_IDP_SQL_NO_DATA_FOUND = 0xF290 +AFX_IDP_SQL_ROW_UPDATE_NOT_SUPPORTED = 0xF291 +AFX_IDP_SQL_ODBC_V2_REQUIRED = 0xF292 +AFX_IDP_SQL_NO_POSITIONED_UPDATES = 0xF293 +AFX_IDP_SQL_LOCK_MODE_NOT_SUPPORTED = 0xF294 +AFX_IDP_SQL_DATA_TRUNCATED = 0xF295 +AFX_IDP_SQL_ROW_FETCH = 0xF296 +AFX_IDP_SQL_INCORRECT_ODBC = 0xF297 +AFX_IDP_SQL_UPDATE_DELETE_FAILED = 0xF298 +AFX_IDP_SQL_DYNAMIC_CURSOR_NOT_SUPPORTED = 0xF299 +AFX_IDP_DAO_FIRST = 0xF2A0 +AFX_IDP_DAO_ENGINE_INITIALIZATION = 0xF2A0 +AFX_IDP_DAO_DFX_BIND = 0xF2A1 +AFX_IDP_DAO_OBJECT_NOT_OPEN = 0xF2A2 +AFX_IDP_DAO_ROWTOOSHORT = 0xF2A3 +AFX_IDP_DAO_BADBINDINFO = 0xF2A4 +AFX_IDP_DAO_COLUMNUNAVAILABLE = 0xF2A5 +AFX_IDC_LISTBOX = 100 +AFX_IDC_CHANGE = 101 +AFX_IDC_PRINT_DOCNAME = 201 +AFX_IDC_PRINT_PRINTERNAME = 202 +AFX_IDC_PRINT_PORTNAME = 203 +AFX_IDC_PRINT_PAGENUM = 204 +ID_APPLY_NOW = 0x3021 +ID_WIZBACK = 0x3023 +ID_WIZNEXT = 0x3024 +ID_WIZFINISH = 0x3025 +AFX_IDC_TAB_CONTROL = 0x3020 +AFX_IDD_FILEOPEN = 28676 +AFX_IDD_FILESAVE = 28677 +AFX_IDD_FONT = 28678 +AFX_IDD_COLOR = 28679 +AFX_IDD_PRINT = 28680 +AFX_IDD_PRINTSETUP = 28681 +AFX_IDD_FIND = 28682 +AFX_IDD_REPLACE = 28683 +AFX_IDD_NEWTYPEDLG = 30721 +AFX_IDD_PRINTDLG = 30722 +AFX_IDD_PREVIEW_TOOLBAR = 30723 +AFX_IDD_PREVIEW_SHORTTOOLBAR = 30731 +AFX_IDD_INSERTOBJECT = 30724 +AFX_IDD_CHANGEICON = 30725 +AFX_IDD_CONVERT = 30726 +AFX_IDD_PASTESPECIAL = 30727 +AFX_IDD_EDITLINKS = 30728 +AFX_IDD_FILEBROWSE = 30729 +AFX_IDD_BUSY = 30730 +AFX_IDD_OBJECTPROPERTIES = 30732 +AFX_IDD_CHANGESOURCE = 30733 +AFX_IDC_CONTEXTHELP = 30977 +AFX_IDC_MAGNIFY = 30978 +AFX_IDC_SMALLARROWS = 30979 +AFX_IDC_HSPLITBAR = 30980 +AFX_IDC_VSPLITBAR = 30981 +AFX_IDC_NODROPCRSR = 30982 +AFX_IDC_TRACKNWSE = 30983 +AFX_IDC_TRACKNESW = 30984 +AFX_IDC_TRACKNS = 30985 +AFX_IDC_TRACKWE = 30986 +AFX_IDC_TRACK4WAY = 30987 +AFX_IDC_MOVE4WAY = 30988 +AFX_IDB_MINIFRAME_MENU = 30994 +AFX_IDB_CHECKLISTBOX_NT = 30995 +AFX_IDB_CHECKLISTBOX_95 = 30996 +AFX_IDR_PREVIEW_ACCEL = 30997 +AFX_IDI_STD_MDIFRAME = 31233 +AFX_IDI_STD_FRAME = 31234 +AFX_IDC_FONTPROP = 1000 +AFX_IDC_FONTNAMES = 1001 +AFX_IDC_FONTSTYLES = 1002 +AFX_IDC_FONTSIZES = 1003 +AFX_IDC_STRIKEOUT = 1004 +AFX_IDC_UNDERLINE = 1005 +AFX_IDC_SAMPLEBOX = 1006 +AFX_IDC_COLOR_BLACK = 1100 +AFX_IDC_COLOR_WHITE = 1101 +AFX_IDC_COLOR_RED = 1102 +AFX_IDC_COLOR_GREEN = 1103 +AFX_IDC_COLOR_BLUE = 1104 +AFX_IDC_COLOR_YELLOW = 1105 +AFX_IDC_COLOR_MAGENTA = 1106 +AFX_IDC_COLOR_CYAN = 1107 +AFX_IDC_COLOR_GRAY = 1108 +AFX_IDC_COLOR_LIGHTGRAY = 1109 +AFX_IDC_COLOR_DARKRED = 1110 +AFX_IDC_COLOR_DARKGREEN = 1111 +AFX_IDC_COLOR_DARKBLUE = 1112 +AFX_IDC_COLOR_LIGHTBROWN = 1113 +AFX_IDC_COLOR_DARKMAGENTA = 1114 +AFX_IDC_COLOR_DARKCYAN = 1115 +AFX_IDC_COLORPROP = 1116 +AFX_IDC_SYSTEMCOLORS = 1117 +AFX_IDC_PROPNAME = 1201 +AFX_IDC_PICTURE = 1202 +AFX_IDC_BROWSE = 1203 +AFX_IDC_CLEAR = 1204 +AFX_IDD_PROPPAGE_COLOR = 32257 +AFX_IDD_PROPPAGE_FONT = 32258 +AFX_IDD_PROPPAGE_PICTURE = 32259 +AFX_IDB_TRUETYPE = 32384 +AFX_IDS_PROPPAGE_UNKNOWN = 0xFE01 +AFX_IDS_COLOR_DESKTOP = 0xFE04 +AFX_IDS_COLOR_APPWORKSPACE = 0xFE05 +AFX_IDS_COLOR_WNDBACKGND = 0xFE06 +AFX_IDS_COLOR_WNDTEXT = 0xFE07 +AFX_IDS_COLOR_MENUBAR = 0xFE08 +AFX_IDS_COLOR_MENUTEXT = 0xFE09 +AFX_IDS_COLOR_ACTIVEBAR = 0xFE0A +AFX_IDS_COLOR_INACTIVEBAR = 0xFE0B +AFX_IDS_COLOR_ACTIVETEXT = 0xFE0C +AFX_IDS_COLOR_INACTIVETEXT = 0xFE0D +AFX_IDS_COLOR_ACTIVEBORDER = 0xFE0E +AFX_IDS_COLOR_INACTIVEBORDER = 0xFE0F +AFX_IDS_COLOR_WNDFRAME = 0xFE10 +AFX_IDS_COLOR_SCROLLBARS = 0xFE11 +AFX_IDS_COLOR_BTNFACE = 0xFE12 +AFX_IDS_COLOR_BTNSHADOW = 0xFE13 +AFX_IDS_COLOR_BTNTEXT = 0xFE14 +AFX_IDS_COLOR_BTNHIGHLIGHT = 0xFE15 +AFX_IDS_COLOR_DISABLEDTEXT = 0xFE16 +AFX_IDS_COLOR_HIGHLIGHT = 0xFE17 +AFX_IDS_COLOR_HIGHLIGHTTEXT = 0xFE18 +AFX_IDS_REGULAR = 0xFE19 +AFX_IDS_BOLD = 0xFE1A +AFX_IDS_ITALIC = 0xFE1B +AFX_IDS_BOLDITALIC = 0xFE1C +AFX_IDS_SAMPLETEXT = 0xFE1D +AFX_IDS_DISPLAYSTRING_FONT = 0xFE1E +AFX_IDS_DISPLAYSTRING_COLOR = 0xFE1F +AFX_IDS_DISPLAYSTRING_PICTURE = 0xFE20 +AFX_IDS_PICTUREFILTER = 0xFE21 +AFX_IDS_PICTYPE_UNKNOWN = 0xFE22 +AFX_IDS_PICTYPE_NONE = 0xFE23 +AFX_IDS_PICTYPE_BITMAP = 0xFE24 +AFX_IDS_PICTYPE_METAFILE = 0xFE25 +AFX_IDS_PICTYPE_ICON = 0xFE26 +AFX_IDS_COLOR_PPG = 0xFE28 +AFX_IDS_COLOR_PPG_CAPTION = 0xFE29 +AFX_IDS_FONT_PPG = 0xFE2A +AFX_IDS_FONT_PPG_CAPTION = 0xFE2B +AFX_IDS_PICTURE_PPG = 0xFE2C +AFX_IDS_PICTURE_PPG_CAPTION = 0xFE2D +AFX_IDS_PICTUREBROWSETITLE = 0xFE30 +AFX_IDS_BORDERSTYLE_0 = 0xFE31 +AFX_IDS_BORDERSTYLE_1 = 0xFE32 +AFX_IDS_VERB_EDIT = 0xFE40 +AFX_IDS_VERB_PROPERTIES = 0xFE41 +AFX_IDP_PICTURECANTOPEN = 0xFE83 +AFX_IDP_PICTURECANTLOAD = 0xFE84 +AFX_IDP_PICTURETOOLARGE = 0xFE85 +AFX_IDP_PICTUREREADFAILED = 0xFE86 +AFX_IDP_E_ILLEGALFUNCTIONCALL = 0xFEA0 +AFX_IDP_E_OVERFLOW = 0xFEA1 +AFX_IDP_E_OUTOFMEMORY = 0xFEA2 +AFX_IDP_E_DIVISIONBYZERO = 0xFEA3 +AFX_IDP_E_OUTOFSTRINGSPACE = 0xFEA4 +AFX_IDP_E_OUTOFSTACKSPACE = 0xFEA5 +AFX_IDP_E_BADFILENAMEORNUMBER = 0xFEA6 +AFX_IDP_E_FILENOTFOUND = 0xFEA7 +AFX_IDP_E_BADFILEMODE = 0xFEA8 +AFX_IDP_E_FILEALREADYOPEN = 0xFEA9 +AFX_IDP_E_DEVICEIOERROR = 0xFEAA +AFX_IDP_E_FILEALREADYEXISTS = 0xFEAB +AFX_IDP_E_BADRECORDLENGTH = 0xFEAC +AFX_IDP_E_DISKFULL = 0xFEAD +AFX_IDP_E_BADRECORDNUMBER = 0xFEAE +AFX_IDP_E_BADFILENAME = 0xFEAF +AFX_IDP_E_TOOMANYFILES = 0xFEB0 +AFX_IDP_E_DEVICEUNAVAILABLE = 0xFEB1 +AFX_IDP_E_PERMISSIONDENIED = 0xFEB2 +AFX_IDP_E_DISKNOTREADY = 0xFEB3 +AFX_IDP_E_PATHFILEACCESSERROR = 0xFEB4 +AFX_IDP_E_PATHNOTFOUND = 0xFEB5 +AFX_IDP_E_INVALIDPATTERNSTRING = 0xFEB6 +AFX_IDP_E_INVALIDUSEOFNULL = 0xFEB7 +AFX_IDP_E_INVALIDFILEFORMAT = 0xFEB8 +AFX_IDP_E_INVALIDPROPERTYVALUE = 0xFEB9 +AFX_IDP_E_INVALIDPROPERTYARRAYINDEX = 0xFEBA +AFX_IDP_E_SETNOTSUPPORTEDATRUNTIME = 0xFEBB +AFX_IDP_E_SETNOTSUPPORTED = 0xFEBC +AFX_IDP_E_NEEDPROPERTYARRAYINDEX = 0xFEBD +AFX_IDP_E_SETNOTPERMITTED = 0xFEBE +AFX_IDP_E_GETNOTSUPPORTEDATRUNTIME = 0xFEBF +AFX_IDP_E_GETNOTSUPPORTED = 0xFEC0 +AFX_IDP_E_PROPERTYNOTFOUND = 0xFEC1 +AFX_IDP_E_INVALIDCLIPBOARDFORMAT = 0xFEC2 +AFX_IDP_E_INVALIDPICTURE = 0xFEC3 +AFX_IDP_E_PRINTERERROR = 0xFEC4 +AFX_IDP_E_CANTSAVEFILETOTEMP = 0xFEC5 +AFX_IDP_E_SEARCHTEXTNOTFOUND = 0xFEC6 +AFX_IDP_E_REPLACEMENTSTOOLONG = 0xFEC7 diff --git a/venv/Lib/site-packages/pythonwin/pywin/mfc/dialog.py b/venv/Lib/site-packages/pythonwin/pywin/mfc/dialog.py new file mode 100644 index 00000000..02118013 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/mfc/dialog.py @@ -0,0 +1,241 @@ +""" \ +Base class for Dialogs. Also contains a few useful utility functions +""" +# dialog.py +# Python class for Dialog Boxes in PythonWin. + +import win32ui +import win32con +# sob - 2to3 doesn't see this as a relative import :( +from pywin.mfc import window + +def dllFromDll(dllid): + " given a 'dll' (maybe a dll, filename, etc), return a DLL object " + if dllid==None: + return None + elif type('')==type(dllid): + return win32ui.LoadLibrary(dllid) + else: + try: + dllid.GetFileName() + except AttributeError: + raise TypeError("DLL parameter must be None, a filename or a dll object") + return dllid + +class Dialog(window.Wnd): + " Base class for a dialog" + def __init__( self, id, dllid=None ): + """ id is the resource ID, or a template + dllid may be None, a dll object, or a string with a dll name """ + # must take a reference to the DLL until InitDialog. + self.dll=dllFromDll(dllid) + if type(id)==type([]): # a template + dlg=win32ui.CreateDialogIndirect(id) + else: + dlg=win32ui.CreateDialog(id, self.dll) + window.Wnd.__init__(self, dlg) + self.HookCommands() + self.bHaveInit = None + + def HookCommands(self): + pass + + def OnAttachedObjectDeath(self): + self.data = self._obj_.data + window.Wnd.OnAttachedObjectDeath(self) + + # provide virtuals. + def OnOK(self): + self._obj_.OnOK() + def OnCancel(self): + self._obj_.OnCancel() + def OnInitDialog(self): + self.bHaveInit = 1 + if self._obj_.data: + self._obj_.UpdateData(0) + return 1 # I did NOT set focus to a child window. + def OnDestroy(self,msg): + self.dll = None # theoretically not needed if object destructs normally. + # DDX support + def AddDDX( self, *args ): + self._obj_.datalist.append(args) + # Make a dialog object look like a dictionary for the DDX support + def __bool__(self): + return True + def __len__(self): return len(self.data) + def __getitem__(self, key): return self.data[key] + def __setitem__(self, key, item): self._obj_.data[key] = item# self.UpdateData(0) + def keys(self): return list(self.data.keys()) + def items(self): return list(self.data.items()) + def values(self): return list(self.data.values()) + # XXX - needs py3k work! + def has_key(self, key): return key in self.data + +class PrintDialog(Dialog): + " Base class for a print dialog" + def __init__(self, pInfo, dlgID, + printSetupOnly = 0, + flags=(win32ui.PD_ALLPAGES| + win32ui.PD_USEDEVMODECOPIES| + win32ui.PD_NOPAGENUMS| + win32ui.PD_HIDEPRINTTOFILE| + win32ui.PD_NOSELECTION), + parent=None, + dllid=None): + self.dll=dllFromDll(dllid) + if type(dlgID)==type([]): # a template + raise TypeError("dlgID parameter must be an integer resource ID") + dlg=win32ui.CreatePrintDialog(dlgID, printSetupOnly, + flags, parent, + self.dll) + window.Wnd.__init__(self, dlg) + self.HookCommands() + self.bHaveInit = None + self.pInfo = pInfo + # init values (if PrintSetup is called, values still available) + flags = pInfo.GetFlags() + self['toFile'] = (flags&win32ui.PD_PRINTTOFILE != 0) + self['direct'] = pInfo.GetDirect() + self['preview'] = pInfo.GetPreview() + self['continuePrinting'] = pInfo.GetContinuePrinting() + self['curPage'] = pInfo.GetCurPage() + self['numPreviewPages'] = pInfo.GetNumPreviewPages() + self['userData'] = pInfo.GetUserData() + self['draw'] = pInfo.GetDraw() + self['pageDesc'] = pInfo.GetPageDesc() + self['minPage'] = pInfo.GetMinPage() + self['maxPage'] = pInfo.GetMaxPage() + self['offsetPage'] = pInfo.GetOffsetPage() + self['fromPage'] = pInfo.GetFromPage() + self['toPage'] = pInfo.GetToPage() + # these values updated after OnOK + self['copies'] = 0 + self['deviceName'] = '' + self['driverName'] = '' + self['printAll'] = 0 + self['printCollate'] = 0 + self['printRange'] = 0 + self['printSelection'] = 0 + + def OnInitDialog(self): + self.pInfo.CreatePrinterDC() # This also sets the hDC of the pInfo structure. + return self._obj_.OnInitDialog() + + def OnCancel(self): + del self.pInfo + def OnOK(self): + '''DoModal has finished. Can now access the users choices''' + self._obj_.OnOK() + pInfo = self.pInfo + # user values + flags = pInfo.GetFlags() + self['toFile'] = (flags&win32ui.PD_PRINTTOFILE != 0) + self['direct'] = pInfo.GetDirect() + self['preview'] = pInfo.GetPreview() + self['continuePrinting'] = pInfo.GetContinuePrinting() + self['curPage'] = pInfo.GetCurPage() + self['numPreviewPages'] = pInfo.GetNumPreviewPages() + self['userData'] = pInfo.GetUserData() + self['draw'] = pInfo.GetDraw() + self['pageDesc'] = pInfo.GetPageDesc() + self['minPage'] = pInfo.GetMinPage() + self['maxPage'] = pInfo.GetMaxPage() + self['offsetPage'] = pInfo.GetOffsetPage() + self['fromPage'] = pInfo.GetFromPage() + self['toPage'] = pInfo.GetToPage() + self['copies'] = pInfo.GetCopies() + self['deviceName'] = pInfo.GetDeviceName() + self['driverName'] = pInfo.GetDriverName() + self['printAll'] = pInfo.PrintAll() + self['printCollate'] = pInfo.PrintCollate() + self['printRange'] = pInfo.PrintRange() + self['printSelection'] = pInfo.PrintSelection() + del self.pInfo + +class PropertyPage(Dialog): + " Base class for a Property Page" + def __init__( self, id, dllid=None, caption=0 ): + """ id is the resource ID + dllid may be None, a dll object, or a string with a dll name """ + + self.dll = dllFromDll(dllid) + if self.dll: + oldRes = win32ui.SetResource(self.dll) + if type(id)==type([]): + dlg=win32ui.CreatePropertyPageIndirect(id) + else: + dlg=win32ui.CreatePropertyPage(id, caption) + if self.dll: + win32ui.SetResource(oldRes) + # dont call dialog init! + window.Wnd.__init__(self, dlg) + self.HookCommands() + +class PropertySheet(window.Wnd): + def __init__(self, caption, dll=None, pageList=None ):# parent=None, style,etc): + " Initialize a property sheet. pageList is a list of ID's " + # must take a reference to the DLL until InitDialog. + self.dll=dllFromDll(dll) + self.sheet = win32ui.CreatePropertySheet(caption) + window.Wnd.__init__(self, self.sheet) + if not pageList is None: + self.AddPage(pageList) + + def OnInitDialog(self): + return self._obj_.OnInitDialog() + + def DoModal(self): + if self.dll: + oldRes = win32ui.SetResource(self.dll) + rc = self.sheet.DoModal() + if self.dll: + win32ui.SetResource(oldRes) + return rc + + def AddPage(self, pages): + if self.dll: + oldRes = win32ui.SetResource(self.dll) + try: # try list style access + pages[0] + isSeq = 1 + except (TypeError,KeyError): + isSeq = 0 + if isSeq: + for page in pages: + self.DoAddSinglePage(page) + else: + self.DoAddSinglePage(pages) + if self.dll: + win32ui.SetResource(oldRes) + + def DoAddSinglePage(self, page): + "Page may be page, or int ID. Assumes DLL setup " + if type(page)==type(0): + self.sheet.AddPage(win32ui.CreatePropertyPage(page)) + else: + self.sheet.AddPage(page) + +# define some app utility functions. +def GetSimpleInput(prompt, defValue='', title=None ): + """ displays a dialog, and returns a string, or None if cancelled. + args prompt, defValue='', title=main frames title """ + # uses a simple dialog to return a string object. + if title is None: title=win32ui.GetMainFrame().GetWindowText() + # 2to3 insists on converting 'Dialog.__init__' to 'tkinter.dialog...' + DlgBaseClass = Dialog + class DlgSimpleInput(DlgBaseClass): + def __init__(self, prompt, defValue, title ): + self.title=title + DlgBaseClass.__init__(self, win32ui.IDD_SIMPLE_INPUT) + self.AddDDX(win32ui.IDC_EDIT1,'result') + self.AddDDX(win32ui.IDC_PROMPT1, 'prompt') + self._obj_.data['result']=defValue + self._obj_.data['prompt']=prompt + def OnInitDialog(self): + self.SetWindowText(self.title) + return DlgBaseClass.OnInitDialog(self) + + dlg=DlgSimpleInput( prompt, defValue, title) + if dlg.DoModal() != win32con.IDOK: + return None + return dlg['result'] diff --git a/venv/Lib/site-packages/pythonwin/pywin/mfc/docview.py b/venv/Lib/site-packages/pythonwin/pywin/mfc/docview.py new file mode 100644 index 00000000..fdf0d765 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/mfc/docview.py @@ -0,0 +1,116 @@ +# document and view classes for MFC. +import win32ui +import win32con +from . import object +from . import window + +class View(window.Wnd): + def __init__(self, initobj): + window.Wnd.__init__(self, initobj) + def OnInitialUpdate(self): + pass + +# Simple control based views. +class CtrlView(View): + def __init__(self, doc, wndclass, style=0): + View.__init__(self, win32ui.CreateCtrlView(doc, wndclass, style)) + +class EditView(CtrlView): + def __init__(self, doc): + View.__init__(self, win32ui.CreateEditView(doc)) + +class RichEditView(CtrlView): + def __init__(self, doc): + View.__init__(self, win32ui.CreateRichEditView(doc)) + +class ListView(CtrlView): + def __init__(self, doc): + View.__init__(self, win32ui.CreateListView(doc)) + +class TreeView(CtrlView): + def __init__(self, doc): + View.__init__(self, win32ui.CreateTreeView(doc)) + +# Other more advanced views. +class ScrollView(View): + def __init__(self, doc): + View.__init__(self, win32ui.CreateView(doc)) + +class FormView(View): + def __init__(self, doc, id): + View.__init__(self, win32ui.CreateFormView(doc, id)) + +class Document(object.CmdTarget): + def __init__(self, template, docobj=None): + if docobj is None: + docobj = template.DoCreateDoc() + object.CmdTarget.__init__(self, docobj) + +class RichEditDoc(object.CmdTarget): + def __init__(self, template): + object.CmdTarget.__init__(self, template.DoCreateRichEditDoc()) + +class CreateContext: + "A transient base class used as a CreateContext" + def __init__(self, template, doc = None): + self.template = template + self.doc = doc + def __del__(self): + self.close() + def close(self): + self.doc = None + self.template = None + +class DocTemplate(object.CmdTarget): + def __init__(self, resourceId=None, MakeDocument=None, MakeFrame=None, MakeView=None): + if resourceId is None: resourceId = win32ui.IDR_PYTHONTYPE + object.CmdTarget.__init__(self, self._CreateDocTemplate(resourceId)) + self.MakeDocument=MakeDocument + self.MakeFrame=MakeFrame + self.MakeView=MakeView + self._SetupSharedMenu_() + def _SetupSharedMenu_(self): + pass # to be overridden by each "app" + def _CreateDocTemplate(self, resourceId): + return win32ui.CreateDocTemplate(resourceId) + def __del__(self): + object.CmdTarget.__del__(self) + def CreateCreateContext(self, doc=None): + return CreateContext(self, doc) + def CreateNewFrame(self, doc): + makeFrame = self.MakeFrame + if makeFrame is None: makeFrame = window.MDIChildWnd + wnd = makeFrame() + context = self.CreateCreateContext(doc) + wnd.LoadFrame(self.GetResourceID(), -1, None, context) # triggers OnCreateClient... + return wnd + def CreateNewDocument(self): + makeDocument = self.MakeDocument + if makeDocument is None: + makeDocument = Document + return makeDocument(self) + def CreateView(self, frame, context): + makeView = self.MakeView + if makeView is None: makeView = EditView + view = makeView(context.doc) + view.CreateWindow(frame) + +class RichEditDocTemplate(DocTemplate): + def __init__(self, resourceId=None, MakeDocument=None, MakeFrame=None, MakeView=None): + if MakeView is None: MakeView = RichEditView + if MakeDocument is None: MakeDocument = RichEditDoc + DocTemplate.__init__(self, resourceId, MakeDocument, MakeFrame, MakeView) + + def _CreateDocTemplate(self, resourceId): + return win32ui.CreateRichEditDocTemplate(resourceId) + +def t(): + class FormTemplate(DocTemplate): + def CreateView(self, frame, context): + makeView = self.MakeView +# view = FormView(context.doc, win32ui.IDD_PROPDEMO1) + view = ListView(context.doc) + view.CreateWindow(frame) + + t=FormTemplate() + return t.OpenDocumentFile(None) diff --git a/venv/Lib/site-packages/pythonwin/pywin/mfc/object.py b/venv/Lib/site-packages/pythonwin/pywin/mfc/object.py new file mode 100644 index 00000000..505ef750 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/mfc/object.py @@ -0,0 +1,55 @@ +# MFC base classes. +import sys +import win32ui + +class Object: + def __init__(self, initObj = None): + self.__dict__['_obj_'] = initObj +# self._obj_ = initObj + if initObj is not None: initObj.AttachObject(self) + def __del__(self): + self.close() + def __getattr__(self, attr): # Make this object look like the underlying win32ui one. + # During cleanup __dict__ is not available, causing recursive death. + if not attr.startswith('__'): + try: + o = self.__dict__['_obj_'] + if o is not None: + return getattr(o, attr) + # Only raise this error for non "internal" names - + # Python may be calling __len__, __nonzero__, etc, so + # we dont want this exception + if attr[0]!= '_' and attr[-1] != '_': + raise win32ui.error("The MFC object has died.") + except KeyError: + # No _obj_ at all - dont report MFC object died when there isnt one! + pass + raise AttributeError(attr) + + def OnAttachedObjectDeath(self): +# print "object", self.__class__.__name__, "dieing" + self._obj_ = None + def close(self): + if '_obj_' in self.__dict__: + if self._obj_ is not None: + self._obj_.AttachObject(None) + self._obj_ = None + +class CmdTarget(Object): + def __init__(self, initObj): + Object.__init__(self, initObj) + def HookNotifyRange(self, handler, firstID, lastID): + oldhandlers = [] + for i in range(firstID, lastID + 1): + oldhandlers.append(self.HookNotify(handler, i)) + return oldhandlers + def HookCommandRange(self, handler, firstID, lastID): + oldhandlers = [] + for i in range(firstID, lastID + 1): + oldhandlers.append(self.HookCommand(handler, i)) + return oldhandlers + def HookCommandUpdateRange(self, handler, firstID, lastID): + oldhandlers = [] + for i in range(firstID, lastID + 1): + oldhandlers.append(self.HookCommandUpdate(handler, i)) + return oldhandlers diff --git a/venv/Lib/site-packages/pythonwin/pywin/mfc/thread.py b/venv/Lib/site-packages/pythonwin/pywin/mfc/thread.py new file mode 100644 index 00000000..05bc10dd --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/mfc/thread.py @@ -0,0 +1,22 @@ +# Thread and application objects + +from . import object +import win32ui + +class WinThread(object.CmdTarget): + def __init__(self, initObj = None): + if initObj is None: + initObj = win32ui.CreateThread() + object.CmdTarget.__init__(self, initObj) + + def InitInstance(self): + pass # Default None/0 return indicates success for InitInstance() + def ExitInstance(self): + pass + + +class WinApp(WinThread): + def __init__(self, initApp = None): + if initApp is None: + initApp = win32ui.GetApp() + WinThread.__init__(self, initApp) diff --git a/venv/Lib/site-packages/pythonwin/pywin/mfc/window.py b/venv/Lib/site-packages/pythonwin/pywin/mfc/window.py new file mode 100644 index 00000000..d13ba82d --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/mfc/window.py @@ -0,0 +1,41 @@ +# The MFCish window classes. +from . import object +import win32ui +import win32con + +class Wnd(object.CmdTarget): + def __init__(self, initobj=None): + object.CmdTarget.__init__(self, initobj) + if self._obj_: self._obj_.HookMessage(self.OnDestroy, win32con.WM_DESTROY) + def OnDestroy(self, msg): + pass + +# NOTE NOTE - This facility is currently disabled in Pythonwin!!!!! +# Note - to process all messages for your window, add the following method +# to a derived class. This code provides default message handling (ie, is +# identical, except presumably in speed, as if the method did not exist at +# all, so presumably will be modified to test for specific messages to be +# useful! +# def WindowProc(self, msg, wParam, lParam): +# rc, lResult = self._obj_.OnWndMsg(msg, wParam, lParam) +# if not rc: lResult = self._obj_.DefWindowProc(msg, wParam, lParam) +# return lResult + +class FrameWnd(Wnd): + def __init__(self, wnd): + Wnd.__init__(self, wnd) + +class MDIChildWnd(FrameWnd): + def __init__(self, wnd = None): + if wnd is None: + wnd=win32ui.CreateMDIChild() + FrameWnd.__init__(self, wnd) + def OnCreateClient(self, cp, context): + if context is not None and context.template is not None: + context.template.CreateView(self, context) + +class MDIFrameWnd(FrameWnd): + def __init__(self, wnd = None): + if wnd is None: + wnd=win32ui.CreateMDIFrame() + FrameWnd.__init__(self, wnd) diff --git a/venv/Lib/site-packages/pythonwin/pywin/scintilla/IDLEenvironment.py b/venv/Lib/site-packages/pythonwin/pywin/scintilla/IDLEenvironment.py new file mode 100644 index 00000000..f8f61f75 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/scintilla/IDLEenvironment.py @@ -0,0 +1,528 @@ +# Code that allows Pythonwin to pretend it is IDLE +# (at least as far as most IDLE extensions are concerned) + +import string +import win32api +import win32ui +import win32con +import sys + +from pywin.mfc.dialog import GetSimpleInput +from pywin import default_scintilla_encoding + +wordchars = string.ascii_uppercase + string.ascii_lowercase + string.digits + +class TextError(Exception): # When a TclError would normally be raised. + pass + + +class EmptyRange(Exception): # Internally raised. + pass + +def GetIDLEModule(module): + try: + # First get it from Pythonwin it is exists. + modname = "pywin.idle." + module + __import__(modname) + except ImportError as details: + msg = "The IDLE extension '%s' can not be located.\r\n\r\n" \ + "Please correct the installation and restart the" \ + " application.\r\n\r\n%s" % (module, details) + win32ui.MessageBox(msg) + return None + mod=sys.modules[modname] + mod.TclError = TextError # A hack that can go soon! + return mod + +# A class that is injected into the IDLE auto-indent extension. +# It allows for decent performance when opening a new file, +# as auto-indent uses the tokenizer module to determine indents. +# The default AutoIndent readline method works OK, but it goes through +# this layer of Tk index indirection for every single line. For large files +# without indents (and even small files with indents :-) it was pretty slow! +def fast_readline(self): + if self.finished: + val = "" + else: + if "_scint_lines" not in self.__dict__: + # XXX - note - assumes this is only called once the file is loaded! + self._scint_lines = self.text.edit.GetTextRange().split("\n") + sl = self._scint_lines + i = self.i = self.i + 1 + if i >= len(sl): + val = "" + else: + val = sl[i]+"\n" + return val.encode(default_scintilla_encoding) + +try: + GetIDLEModule("AutoIndent").IndentSearcher.readline = fast_readline +except AttributeError: # GetIDLEModule may return None + pass + +# A class that attempts to emulate an IDLE editor window. +# Construct with a Pythonwin view. +class IDLEEditorWindow: + def __init__(self, edit): + self.edit = edit + self.text = TkText(edit) + self.extensions = {} + self.extension_menus = {} + + def close(self): + self.edit = self.text = None + self.extension_menus = None + try: + for ext in self.extensions.values(): + closer = getattr(ext, "close", None) + if closer is not None: + closer() + finally: + self.extensions = {} + + def IDLEExtension(self, extension): + ext = self.extensions.get(extension) + if ext is not None: return ext + mod = GetIDLEModule(extension) + if mod is None: return None + klass = getattr(mod, extension) + ext = self.extensions[extension] = klass(self) + # Find and bind all the events defined in the extension. + events = [item for item in dir(klass) if item[-6:]=="_event"] + for event in events: + name = "<<%s>>" % (event[:-6].replace("_", "-"), ) + self.edit.bindings.bind(name, getattr(ext, event)) + return ext + + def GetMenuItems(self, menu_name): + # Get all menu items for the menu name (eg, "edit") + bindings = self.edit.bindings + ret = [] + for ext in self.extensions.values(): + menudefs = getattr(ext, "menudefs", []) + for name, items in menudefs: + if name == menu_name: + for text, event in [item for item in items if item is not None]: + text = text.replace("&", "&&") + text = text.replace("_", "&") + ret.append((text, event)) + return ret + + ###################################################################### + # The IDLE "Virtual UI" methods that are exposed to the IDLE extensions. + # + def askinteger(self, caption, prompt, parent=None, initialvalue=0, minvalue=None, maxvalue=None): + while 1: + rc = GetSimpleInput(prompt, str(initialvalue), caption) + if rc is None: return 0 # Correct "cancel" semantics? + err = None + try: + rc = int(rc) + except ValueError: + err = "Please enter an integer" + if not err and minvalue is not None and rc < minvalue: + err = "Please enter an integer greater then or equal to %s" % (minvalue,) + if not err and maxvalue is not None and rc > maxvalue: + err = "Please enter an integer less then or equal to %s" % (maxvalue,) + if err: + win32ui.MessageBox(err, caption, win32con.MB_OK) + continue + return rc + def askyesno(self, caption, prompt, parent=None): + return win32ui.MessageBox(prompt, caption, win32con.MB_YESNO)==win32con.IDYES + + ###################################################################### + # The IDLE "Virtual Text Widget" methods that are exposed to the IDLE extensions. + # + + # Is character at text_index in a Python string? Return 0 for + # "guaranteed no", true for anything else. + def is_char_in_string(self, text_index): + # A helper for the code analyser - we need internal knowledge of + # the colorizer to get this information + # This assumes the colorizer has got to this point! + text_index = self.text._getoffset(text_index) + c = self.text.edit._GetColorizer() + if c and c.GetStringStyle(text_index) is None: + return 0 + return 1 + + # If a selection is defined in the text widget, return + # (start, end) as Tkinter text indices, otherwise return + # (None, None) + def get_selection_indices(self): + try: + first = self.text.index("sel.first") + last = self.text.index("sel.last") + return first, last + except TextError: + return None, None + + def set_tabwidth(self, width ): + self.edit.SCISetTabWidth(width) + + def get_tabwidth(self): + return self.edit.GetTabWidth() + +# A class providing the generic "Call Tips" interface +class CallTips: + def __init__(self, edit): + self.edit = edit + def showtip(self, tip_text): + self.edit.SCICallTipShow(tip_text) + def hidetip(self): + self.edit.SCICallTipCancel() + +######################################## +# +# Helpers for the TkText emulation. +def TkOffsetToIndex(offset, edit): + lineoff = 0 + # May be 1 > actual end if we pretended there was a trailing '\n' + offset = min(offset, edit.GetTextLength()) + line = edit.LineFromChar(offset) + lineIndex = edit.LineIndex(line) + return "%d.%d" % (line+1, offset-lineIndex) + +def _NextTok(str, pos): + # Returns (token, endPos) + end = len(str) + if pos>=end: return None, 0 + while pos < end and str[pos] in string.whitespace: + pos = pos + 1 + # Special case for +- + if str[pos] in '+-': + return str[pos],pos+1 + # Digits also a special case. + endPos = pos + while endPos < end and str[endPos] in string.digits+".": + endPos = endPos + 1 + if pos!=endPos: return str[pos:endPos], endPos + endPos = pos + while endPos < end and str[endPos] not in string.whitespace + string.digits + "+-": + endPos = endPos + 1 + if pos!=endPos: return str[pos:endPos], endPos + return None, 0 + +def TkIndexToOffset(bm, edit, marks): + base, nextTokPos = _NextTok(bm, 0) + if base is None: raise ValueError("Empty bookmark ID!") + if base.find(".")>0: + try: + line, col = base.split(".", 2) + if col=="first" or col=="last": + # Tag name + if line != "sel": raise ValueError("Tags arent here!") + sel = edit.GetSel() + if sel[0]==sel[1]: + raise EmptyRange + if col=="first": + pos = sel[0] + else: + pos = sel[1] + else: + # Lines are 1 based for tkinter + line = int(line)-1 + if line > edit.GetLineCount(): + pos = edit.GetTextLength()+1 + else: + pos = edit.LineIndex(line) + if pos==-1: pos = edit.GetTextLength() + pos = pos + int(col) + except (ValueError, IndexError): + raise ValueError("Unexpected literal in '%s'" % base) + elif base == 'insert': + pos = edit.GetSel()[0] + elif base=='end': + pos = edit.GetTextLength() + # Pretend there is a trailing '\n' if necessary + if pos and edit.SCIGetCharAt(pos-1) != "\n": + pos = pos+1 + else: + try: + pos = marks[base] + except KeyError: + raise ValueError("Unsupported base offset or undefined mark '%s'" % base) + + while 1: + word, nextTokPos = _NextTok(bm, nextTokPos) + if word is None: break + if word in ['+','-']: + num, nextTokPos = _NextTok(bm, nextTokPos) + if num is None: raise ValueError("+/- operator needs 2 args") + what, nextTokPos = _NextTok(bm, nextTokPos) + if what is None: raise ValueError("+/- operator needs 2 args") + if what[0] != "c": raise ValueError("+/- only supports chars") + if word=='+': + pos = pos + int(num) + else: + pos = pos - int(num) + elif word=='wordstart': + while pos > 0 and edit.SCIGetCharAt(pos-1) in wordchars: + pos = pos - 1 + elif word=='wordend': + end = edit.GetTextLength() + while pos < end and edit.SCIGetCharAt(pos) in wordchars: + pos = pos + 1 + elif word=='linestart': + while pos > 0 and edit.SCIGetCharAt(pos-1) not in '\n\r': + pos = pos - 1 + elif word=='lineend': + end = edit.GetTextLength() + while pos < end and edit.SCIGetCharAt(pos) not in '\n\r': + pos = pos + 1 + else: + raise ValueError("Unsupported relative offset '%s'" % word) + return max(pos, 0) # Tkinter is tollerant of -ve indexes - we aren't + +# A class that resembles an IDLE (ie, a Tk) text widget. +# Construct with an edit object (eg, an editor view) +class TkText: + def __init__(self, edit): + self.calltips = None + self.edit = edit + self.marks = {} +## def __getattr__(self, attr): +## if attr=="tk": return self # So text.tk.call works. +## if attr=="master": return None # ditto! +## raise AttributeError, attr +## def __getitem__(self, item): +## if item=="tabs": +## size = self.edit.GetTabWidth() +## if size==8: return "" # Tk default +## return size # correct semantics? +## elif item=="font": # Used for measurements we dont need to do! +## return "Dont know the font" +## raise IndexError, "Invalid index '%s'" % item + def make_calltip_window(self): + if self.calltips is None: + self.calltips = CallTips(self.edit) + return self.calltips + def _getoffset(self, index): + return TkIndexToOffset(index, self.edit, self.marks) + def _getindex(self, off): + return TkOffsetToIndex(off, self.edit) + def _fix_indexes(self, start, end): + # first some magic to handle skipping over utf8 extended chars. + while start > 0 and ord(self.edit.SCIGetCharAt(start)) & 0xC0 == 0x80: + start -= 1 + while end < self.edit.GetTextLength() and ord(self.edit.SCIGetCharAt(end)) & 0xC0 == 0x80: + end += 1 + # now handling fixing \r\n->\n disparities... + if start>0 and self.edit.SCIGetCharAt(start)=='\n' and self.edit.SCIGetCharAt(start-1)=='\r': + start = start - 1 + if end < self.edit.GetTextLength() and self.edit.SCIGetCharAt(end-1)=='\r' and self.edit.SCIGetCharAt(end)=='\n': + end = end + 1 + return start, end +## def get_tab_width(self): +## return self.edit.GetTabWidth() +## def call(self, *rest): +## # Crap to support Tk measurement hacks for tab widths +## if rest[0] != "font" or rest[1] != "measure": +## raise ValueError, "Unsupport call type" +## return len(rest[5]) +## def configure(self, **kw): +## for name, val in kw.items(): +## if name=="tabs": +## self.edit.SCISetTabWidth(int(val)) +## else: +## raise ValueError, "Unsupported configuration item %s" % kw + def bind(self, binding, handler): + self.edit.bindings.bind(binding, handler) + def get(self, start, end = None): + try: + start = self._getoffset(start) + if end is None: + end = start+1 + else: + end = self._getoffset(end) + except EmptyRange: + return "" + # Simple semantic checks to conform to the Tk text interface + if end <= start: return "" + max = self.edit.GetTextLength() + checkEnd = 0 + if end > max: + end = max + checkEnd = 1 + start, end = self._fix_indexes(start, end) + ret = self.edit.GetTextRange(start, end) + # pretend a trailing '\n' exists if necessary. + if checkEnd and (not ret or ret[-1] != '\n'): ret = ret + '\n' + return ret.replace("\r", "") + def index(self, spec): + try: + return self._getindex(self._getoffset(spec)) + except EmptyRange: + return "" + def insert(self, pos, text): + try: + pos = self._getoffset(pos) + except EmptyRange: + raise TextError("Empty range") + self.edit.SetSel((pos, pos)) + # IDLE only deals with "\n" - we will be nicer + + bits = text.split('\n') + self.edit.SCIAddText(bits[0]) + for bit in bits[1:]: + self.edit.SCINewline() + self.edit.SCIAddText(bit) + + def delete(self, start, end=None): + try: + start = self._getoffset(start) + if end is not None: end = self._getoffset(end) + except EmptyRange: + raise TextError("Empty range") + # If end is specified and == start, then we must delete nothing. + if start==end: return + # If end is not specified, delete one char + if end is None: + end = start+1 + else: + # Tk says not to delete in this case, but our control would. + if end=start and old=end: + old = old - (end-start) + self.edit.SetSel(old) + + def bell(self): + win32api.MessageBeep() + + def see(self, pos): + # Most commands we use in Scintilla actually force the selection + # to be seen, making this unnecessary. + pass + + def mark_set(self, name, pos): + try: + pos = self._getoffset(pos) + except EmptyRange: + raise TextError("Empty range '%s'" % pos) + if name == "insert": + self.edit.SetSel( pos ) + else: + self.marks[name]=pos + + def tag_add(self, name, start, end): + if name != "sel": raise ValueError("Only sel tag is supported") + try: + start = self._getoffset(start) + end = self._getoffset(end) + except EmptyRange: + raise TextError("Empty range") + self.edit.SetSel( start, end ) + + def tag_remove(self, name, start, end): + if name !="sel" or start != "1.0" or end != "end": + raise ValueError("Cant remove this tag") + # Turn the sel into a cursor + self.edit.SetSel(self.edit.GetSel()[0]) + + def compare(self, i1, op, i2): + try: + i1=self._getoffset(i1) + except EmptyRange: + i1 = "" + try: + i2=self._getoffset(i2) + except EmptyRange: + i2 = "" + return eval("%d%s%d" % (i1,op,i2)) + + def undo_block_start(self): + self.edit.SCIBeginUndoAction() + + def undo_block_stop(self): + self.edit.SCIEndUndoAction() + +###################################################################### +# +# Test related code. +# +###################################################################### +def TestCheck(index, edit, expected=None): + rc = TkIndexToOffset(index, edit, {}) + if rc != expected: + print("ERROR: Index", index,", expected", expected, "but got", rc) + +def TestGet(fr, to, t, expected): + got = t.get(fr, to) + if got != expected: + print("ERROR: get(%s, %s) expected %s, but got %s" % (repr(fr), repr(to), repr(expected), repr(got))) + +def test(): + import pywin.framework.editor + d=pywin.framework.editor.editorTemplate.OpenDocumentFile(None) + e=d.GetFirstView() + t = TkText(e) + e.SCIAddText("hi there how\nare you today\r\nI hope you are well") + e.SetSel((4,4)) + + skip = """ + TestCheck("insert", e, 4) + TestCheck("insert wordstart", e, 3) + TestCheck("insert wordend", e, 8) + TestCheck("insert linestart", e, 0) + TestCheck("insert lineend", e, 12) + TestCheck("insert + 4 chars", e, 8) + TestCheck("insert +4c", e, 8) + TestCheck("insert - 2 chars", e, 2) + TestCheck("insert -2c", e, 2) + TestCheck("insert-2c", e, 2) + TestCheck("insert-2 c", e, 2) + TestCheck("insert- 2c", e, 2) + TestCheck("1.1", e, 1) + TestCheck("1.0", e, 0) + TestCheck("2.0", e, 13) + try: + TestCheck("sel.first", e, 0) + print "*** sel.first worked with an empty selection" + except TextError: + pass + e.SetSel((4,5)) + TestCheck("sel.first- 2c", e, 2) + TestCheck("sel.last- 2c", e, 3) + """ + # Check EOL semantics + e.SetSel((4,4)) + TestGet("insert lineend", "insert lineend +1c", t, "\n") + e.SetSel((20, 20)) + TestGet("insert lineend", "insert lineend +1c", t, "\n") + e.SetSel((35, 35)) + TestGet("insert lineend", "insert lineend +1c", t, "\n") + +class IDLEWrapper: + def __init__(self, control): + self.text = control + +def IDLETest(extension): + import sys, os + modname = "pywin.idle." + extension + __import__(modname) + mod=sys.modules[modname] + mod.TclError = TextError + klass = getattr(mod, extension) + + # Create a new Scintilla Window. + import pywin.framework.editor + d=pywin.framework.editor.editorTemplate.OpenDocumentFile(None) + v=d.GetFirstView() + fname=os.path.splitext(__file__)[0] + ".py" + v.SCIAddText(open(fname).read()) + d.SetModifiedFlag(0) + r=klass( IDLEWrapper( TkText(v) ) ) + return r + +if __name__=='__main__': + test() + diff --git a/venv/Lib/site-packages/pythonwin/pywin/scintilla/__init__.py b/venv/Lib/site-packages/pythonwin/pywin/scintilla/__init__.py new file mode 100644 index 00000000..8d8ea470 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/scintilla/__init__.py @@ -0,0 +1 @@ +# package init. \ No newline at end of file diff --git a/venv/Lib/site-packages/pythonwin/pywin/scintilla/__pycache__/IDLEenvironment.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/scintilla/__pycache__/IDLEenvironment.cpython-36.pyc new file mode 100644 index 00000000..52e26032 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/scintilla/__pycache__/IDLEenvironment.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/scintilla/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/scintilla/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..97d7b1c9 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/scintilla/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/scintilla/__pycache__/bindings.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/scintilla/__pycache__/bindings.cpython-36.pyc new file mode 100644 index 00000000..7a016d02 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/scintilla/__pycache__/bindings.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/scintilla/__pycache__/config.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/scintilla/__pycache__/config.cpython-36.pyc new file mode 100644 index 00000000..cfbd5c78 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/scintilla/__pycache__/config.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/scintilla/__pycache__/configui.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/scintilla/__pycache__/configui.cpython-36.pyc new file mode 100644 index 00000000..b2e654b2 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/scintilla/__pycache__/configui.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/scintilla/__pycache__/control.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/scintilla/__pycache__/control.cpython-36.pyc new file mode 100644 index 00000000..731f8dda Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/scintilla/__pycache__/control.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/scintilla/__pycache__/document.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/scintilla/__pycache__/document.cpython-36.pyc new file mode 100644 index 00000000..4790fbba Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/scintilla/__pycache__/document.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/scintilla/__pycache__/find.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/scintilla/__pycache__/find.cpython-36.pyc new file mode 100644 index 00000000..960f1566 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/scintilla/__pycache__/find.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/scintilla/__pycache__/formatter.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/scintilla/__pycache__/formatter.cpython-36.pyc new file mode 100644 index 00000000..8c72bf0b Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/scintilla/__pycache__/formatter.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/scintilla/__pycache__/keycodes.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/scintilla/__pycache__/keycodes.cpython-36.pyc new file mode 100644 index 00000000..49129025 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/scintilla/__pycache__/keycodes.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/scintilla/__pycache__/scintillacon.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/scintilla/__pycache__/scintillacon.cpython-36.pyc new file mode 100644 index 00000000..c8aab2ff Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/scintilla/__pycache__/scintillacon.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/scintilla/__pycache__/view.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/scintilla/__pycache__/view.cpython-36.pyc new file mode 100644 index 00000000..f6917998 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/scintilla/__pycache__/view.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/scintilla/bindings.py b/venv/Lib/site-packages/pythonwin/pywin/scintilla/bindings.py new file mode 100644 index 00000000..9b1e8327 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/scintilla/bindings.py @@ -0,0 +1,172 @@ +from . import IDLEenvironment +import string +import win32ui +import win32api +import win32con +from . import keycodes +import sys +import traceback + +HANDLER_ARGS_GUESS=0 +HANDLER_ARGS_NATIVE=1 +HANDLER_ARGS_IDLE=2 +HANDLER_ARGS_EXTENSION=3 + +next_id = 5000 + +event_to_commands = {}# dict of integer IDs to event names. +command_to_events = {}# dict of event names to int IDs + +def assign_command_id(event, id = 0): + global next_id + if id == 0: + id = event_to_commands.get(event, 0) + if id == 0: + id = next_id + next_id = next_id + 1 + # Only map the ones we allocated - specified ones are assumed to have a handler + command_to_events[id] = event + event_to_commands[event] = id + return id + +class SendCommandHandler: + def __init__(self, cmd): + self.cmd = cmd + def __call__(self, *args): + win32ui.GetMainFrame().SendMessage(win32con.WM_COMMAND, self.cmd) + +class Binding: + def __init__(self, handler, handler_args_type): + self.handler = handler + self.handler_args_type = handler_args_type + +class BindingsManager: + def __init__(self, parent_view): + self.parent_view = parent_view + self.bindings = {} # dict of Binding instances. + self.keymap = {} + + def prepare_configure(self): + self.keymap = {} + + def complete_configure(self): + for id in command_to_events.keys(): + self.parent_view.HookCommand(self._OnCommand, id) + + def close(self): + self.parent_view = self.bindings = self.keymap = None + + def report_error(self, problem): + try: + win32ui.SetStatusText(problem, 1) + except win32ui.error: + # No status bar! + print(problem) + + def update_keymap(self, keymap): + self.keymap.update(keymap) + + def bind(self, event, handler, handler_args_type = HANDLER_ARGS_GUESS, cid = 0): + if handler is None: + handler = SendCommandHandler(cid) + self.bindings[event] = self._new_binding(handler, handler_args_type) + self.bind_command(event, cid) + + def bind_command(self, event, id = 0): + "Binds an event to a Windows control/command ID" + id = assign_command_id(event, id) + return id + + def get_command_id(self, event): + id = event_to_commands.get(event) + if id is None: + # See if we even have an event of that name!? + if event not in self.bindings: + return None + id = self.bind_command(event) + return id + + def _OnCommand(self, id, code): + event = command_to_events.get(id) + if event is None: + self.report_error("No event associated with event ID %d" % id) + return 1 + return self.fire(event) + + def _new_binding(self, event, handler_args_type): + return Binding(event, handler_args_type) + + def _get_IDLE_handler(self, ext, handler): + try: + instance = self.parent_view.idle.IDLEExtension(ext) + name = handler.replace("-", "_") + "_event" + return getattr(instance, name) + except (ImportError, AttributeError): + msg = "Can not find event '%s' in IDLE extension '%s'" % (handler, ext) + self.report_error(msg) + return None + + def fire(self, event, event_param = None): + # Fire the specified event. Result is native Pythonwin result + # (ie, 1==pass one, 0 or None==handled) + + # First look up the event directly - if there, we are set. + binding = self.bindings.get(event) + if binding is None: + # If possible, find it! + # A native method name + handler = getattr(self.parent_view, event + "Event", None) + if handler is None: + # Can't decide if I should report an error?? + self.report_error("The event name '%s' can not be found." % event) + # Either way, just let the default handlers grab it. + return 1 + binding = self._new_binding(handler, HANDLER_ARGS_NATIVE) + # Cache it. + self.bindings[event] = binding + + handler_args_type = binding.handler_args_type + # Now actually fire it. + if handler_args_type==HANDLER_ARGS_GUESS: + # Can't be native, as natives are never added with "guess". + # Must be extension or IDLE. + if event[0]=="<": + handler_args_type = HANDLER_ARGS_IDLE + else: + handler_args_type = HANDLER_ARGS_EXTENSION + try: + if handler_args_type==HANDLER_ARGS_EXTENSION: + args = self.parent_view.idle, event_param + else: + args = (event_param,) + rc = binding.handler(*args) + if handler_args_type==HANDLER_ARGS_IDLE: + # Convert to our return code. + if rc in [None, "break"]: + rc = 0 + else: + rc = 1 + except: + message = "Firing event '%s' failed." % event + print(message) + traceback.print_exc() + self.report_error(message) + rc = 1 # Let any default handlers have a go! + return rc + + def fire_key_event(self, msg): + key = msg[2] + keyState = 0 + if win32api.GetKeyState(win32con.VK_CONTROL) & 0x8000: + keyState = keyState | win32con.RIGHT_CTRL_PRESSED | win32con.LEFT_CTRL_PRESSED + if win32api.GetKeyState(win32con.VK_SHIFT) & 0x8000: + keyState = keyState | win32con.SHIFT_PRESSED + if win32api.GetKeyState(win32con.VK_MENU) & 0x8000: + keyState = keyState | win32con.LEFT_ALT_PRESSED | win32con.RIGHT_ALT_PRESSED + keyinfo = key, keyState + # Special hacks for the dead-char key on non-US keyboards. + # (XXX - which do not work :-( + event = self.keymap.get( keyinfo ) + if event is None: + return 1 + return self.fire(event, None) diff --git a/venv/Lib/site-packages/pythonwin/pywin/scintilla/config.py b/venv/Lib/site-packages/pythonwin/pywin/scintilla/config.py new file mode 100644 index 00000000..e5857f93 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/scintilla/config.py @@ -0,0 +1,322 @@ +# config.py - deals with loading configuration information. + +# Loads config data from a .cfg file. Also caches the compiled +# data back into a .cfc file. + +# If you are wondering how to avoid needing .cfg files (eg, +# if you are freezing Pythonwin etc) I suggest you create a +# .py file, and put the config info in a docstring. Then +# pass a CStringIO file (rather than a filename) to the +# config manager. +import sys +import string +from . import keycodes +import marshal +import stat +import os +import types +import traceback +import pywin +import glob +import importlib.util + +import win32api + +debugging = 0 +if debugging: + import win32traceutil # Some trace statements fire before the interactive window is open. + def trace(*args): + sys.stderr.write(" ".join(map(str, args)) + "\n") +else: + trace = lambda *args: None + +compiled_config_version = 3 + +def split_line(line, lineno): + comment_pos = line.find("#") + if comment_pos>=0: line = line[:comment_pos] + sep_pos = line.rfind("=") + if sep_pos == -1: + if line.strip(): + print("Warning: Line %d: %s is an invalid entry" % (lineno, repr(line))) + return None, None + return "", "" + return line[:sep_pos].strip(), line[sep_pos+1:].strip() + +def get_section_header(line): + # Returns the section if the line is a section header, else None + if line[0] == "[": + end = line.find("]") + if end==-1: end=len(line) + rc = line[1:end].lower() + try: + i = rc.index(":") + return rc[:i], rc[i+1:] + except ValueError: + return rc, "" + return None, None + +def find_config_file(f): + return os.path.join(pywin.__path__[0], f + ".cfg") + +def find_config_files(): + return [os.path.split(x)[1] + for x in [os.path.splitext(x)[0] for x in glob.glob(os.path.join(pywin.__path__[0], "*.cfg"))] + ] + +class ConfigManager: + def __init__(self, f): + self.filename = "unknown" + self.last_error = None + self.key_to_events = {} + if hasattr(f, "readline"): + fp = f + self.filename = "" + compiled_name = None + else: + try: + f = find_config_file(f) + src_stat = os.stat(f) + except os.error: + self.report_error("Config file '%s' not found" % f) + return + self.filename = f + self.basename = os.path.basename(f) + trace("Loading configuration", self.basename) + compiled_name = os.path.splitext(f)[0] + ".cfc" + try: + cf = open(compiled_name, "rb") + try: + ver = marshal.load(cf) + ok = compiled_config_version == ver + if ok: + kblayoutname = marshal.load(cf) + magic = marshal.load(cf) + size = marshal.load(cf) + mtime = marshal.load(cf) + if magic == importlib.util.MAGIC_NUMBER and \ + win32api.GetKeyboardLayoutName() == kblayoutname and \ + src_stat[stat.ST_MTIME] == mtime and \ + src_stat[stat.ST_SIZE] == size: + self.cache = marshal.load(cf) + trace("Configuration loaded cached", compiled_name) + return # We are ready to roll! + finally: + cf.close() + except (os.error, IOError, EOFError): + pass + fp = open(f) + self.cache = {} + lineno = 1 + line = fp.readline() + while line: + # Skip to the next section (maybe already there!) + section, subsection = get_section_header(line) + while line and section is None: + line = fp.readline() + if not line: break + lineno = lineno + 1 + section, subsection = get_section_header(line) + if not line: break + + if section=="keys": + line, lineno = self._load_keys(subsection, fp, lineno) + elif section == "extensions": + line, lineno = self._load_extensions(subsection, fp, lineno) + elif section == "idle extensions": + line, lineno = self._load_idle_extensions(subsection, fp, lineno) + elif section == "general": + line, lineno = self._load_general(subsection, fp, lineno) + else: + self.report_error("Unrecognised section header '%s:%s'" % (section,subsection)) + line = fp.readline() + lineno = lineno + 1 + # Check critical data. + if not self.cache.get("keys"): + self.report_error("No keyboard definitions were loaded") + if not self.last_error and compiled_name: + try: + cf = open(compiled_name, "wb") + marshal.dump(compiled_config_version, cf) + marshal.dump(win32api.GetKeyboardLayoutName(), cf) + marshal.dump(importlib.util.MAGIC_NUMBER, cf) + marshal.dump(src_stat[stat.ST_SIZE], cf) + marshal.dump(src_stat[stat.ST_MTIME], cf) + marshal.dump(self.cache, cf) + cf.close() + except (IOError, EOFError): + pass # Ignore errors - may be read only. + + def configure(self, editor, subsections = None): + # Execute the extension code, and find any events. + # First, we "recursively" connect any we are based on. + if subsections is None: subsections = [] + subsections = [''] + subsections + general = self.get_data("general") + if general: + parents = general.get("based on", []) + for parent in parents: + trace("Configuration based on", parent, "- loading.") + parent = self.__class__(parent) + parent.configure(editor, subsections) + if parent.last_error: + self.report_error(parent.last_error) + + bindings = editor.bindings + codeob = self.get_data("extension code") + if codeob is not None: + ns = {} + try: + exec(codeob, ns) + except: + traceback.print_exc() + self.report_error("Executing extension code failed") + ns = None + if ns: + num = 0 + for name, func in list(ns.items()): + if type(func)==types.FunctionType and name[:1] != '_': + bindings.bind(name, func) + num = num + 1 + trace("Configuration Extension code loaded", num, "events") + # Load the idle extensions + for subsection in subsections: + for ext in self.get_data("idle extensions", {}).get(subsection, []): + try: + editor.idle.IDLEExtension(ext) + trace("Loaded IDLE extension", ext) + except: + self.report_error("Can not load the IDLE extension '%s'" % ext) + + # Now bind up the key-map (remembering a reverse map + subsection_keymap = self.get_data("keys") + num_bound = 0 + for subsection in subsections: + keymap = subsection_keymap.get(subsection, {}) + bindings.update_keymap(keymap) + num_bound = num_bound + len(keymap) + trace("Configuration bound", num_bound, "keys") + + def get_key_binding(self, event, subsections = None): + if subsections is None: subsections = [] + subsections = [''] + subsections + + subsection_keymap = self.get_data("keys") + for subsection in subsections: + map = self.key_to_events.get(subsection) + if map is None: # Build it + map = {} + keymap = subsection_keymap.get(subsection, {}) + for key_info, map_event in list(keymap.items()): + map[map_event] = key_info + self.key_to_events[subsection] = map + + info = map.get(event) + if info is not None: + return keycodes.make_key_name( info[0], info[1] ) + return None + + def report_error(self, msg): + self.last_error = msg + print("Error in %s: %s" % (self.filename, msg)) + def report_warning(self, msg): + print("Warning in %s: %s" % (self.filename, msg)) + + def _readline(self, fp, lineno, bStripComments = 1): + line = fp.readline() + lineno = lineno + 1 + if line: + bBreak = get_section_header(line)[0] is not None # A new section is starting + if bStripComments and not bBreak: + pos = line.find("#") + if pos>=0: line=line[:pos]+"\n" + else: + bBreak=1 + return line, lineno, bBreak + + def get_data(self, name, default=None): + return self.cache.get(name, default) + + def _save_data(self, name, data): + self.cache[name] = data + return data + + def _load_general(self, sub_section, fp, lineno): + map = {} + while 1: + line, lineno, bBreak = self._readline(fp, lineno) + if bBreak: break + + key, val = split_line(line, lineno) + if not key: continue + key = key.lower() + l = map.get(key, []) + l.append(val) + map[key]=l + self._save_data("general", map) + return line, lineno + + def _load_keys(self, sub_section, fp, lineno): + # Builds a nested dictionary of + # (scancode, flags) = event_name + main_map = self.get_data("keys", {}) + map = main_map.get(sub_section, {}) + while 1: + line, lineno, bBreak = self._readline(fp, lineno) + if bBreak: break + + key, event = split_line(line, lineno) + if not event: continue + sc, flag = keycodes.parse_key_name(key) + if sc is None: + self.report_warning("Line %d: Invalid key name '%s'" % (lineno, key)) + else: + map[sc, flag] = event + main_map[sub_section] = map + self._save_data("keys", main_map) + return line, lineno + + def _load_extensions(self, sub_section, fp, lineno): + start_lineno = lineno + lines = [] + while 1: + line, lineno, bBreak = self._readline(fp, lineno, 0) + if bBreak: break + lines.append(line) + try: + c = compile( + "\n" * start_lineno + # produces correct tracebacks + "".join(lines), self.filename, "exec") + self._save_data("extension code", c) + except SyntaxError as details: + errlineno = details.lineno + start_lineno + # Should handle syntax errors better here, and offset the lineno. + self.report_error("Compiling extension code failed:\r\nFile: %s\r\nLine %d\r\n%s" \ + % (details.filename, errlineno, details.msg)) + return line, lineno + + def _load_idle_extensions(self, sub_section, fp, lineno): + extension_map = self.get_data("idle extensions") + if extension_map is None: extension_map = {} + extensions = [] + while 1: + line, lineno, bBreak = self._readline(fp, lineno) + if bBreak: break + line = line.strip() + if line: + extensions.append(line) + extension_map[sub_section] = extensions + self._save_data("idle extensions", extension_map) + return line, lineno + +def test(): + import time + start = time.clock() + f="default" + cm = ConfigManager(f) + map = cm.get_data("keys") + took = time.clock()-start + print("Loaded %s items in %.4f secs" % (len(map), took)) + +if __name__=='__main__': + test() diff --git a/venv/Lib/site-packages/pythonwin/pywin/scintilla/configui.py b/venv/Lib/site-packages/pythonwin/pywin/scintilla/configui.py new file mode 100644 index 00000000..8223ee5c --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/scintilla/configui.py @@ -0,0 +1,266 @@ +from pywin.mfc import dialog +import win32api +import win32con +import win32ui +import copy +import string +from . import scintillacon + +# Used to indicate that style should use default color +from win32con import CLR_INVALID + +###################################################### +# Property Page for syntax formatting options + +# The standard 16 color VGA palette should always be possible +paletteVGA = ( + ("Black", win32api.RGB(0,0,0)), + ("Navy", win32api.RGB(0,0,128)), + ("Green", win32api.RGB(0,128,0)), + ("Cyan", win32api.RGB(0,128,128)), + ("Maroon", win32api.RGB(128,0,0)), + ("Purple", win32api.RGB(128,0,128)), + ("Olive", win32api.RGB(128,128,0)), + ("Gray", win32api.RGB(128,128,128)), + ("Silver", win32api.RGB(192,192,192)), + ("Blue", win32api.RGB(0,0,255)), + ("Lime", win32api.RGB(0,255,0)), + ("Aqua", win32api.RGB(0,255,255)), + ("Red", win32api.RGB(255,0,0)), + ("Fuchsia", win32api.RGB(255,0,255)), + ("Yellow", win32api.RGB(255,255,0)), + ("White", win32api.RGB(255,255,255)), +# and a few others will generally be possible. + ("DarkGrey", win32api.RGB(64,64,64)), + ("PurpleBlue", win32api.RGB(64,64,192)), + ("DarkGreen", win32api.RGB(0,96,0)), + ("DarkOlive", win32api.RGB(128,128,64)), + ("MediumBlue", win32api.RGB(0,0,192)), + ("DarkNavy", win32api.RGB(0,0,96)), + ("Magenta", win32api.RGB(96,0,96)), + ("OffWhite", win32api.RGB(255,255,220)), + ("LightPurple", win32api.RGB(220,220,255)), + ("", win32con.CLR_INVALID) +) + +class ScintillaFormatPropertyPage(dialog.PropertyPage): + def __init__(self, scintillaClass = None, caption = 0): + self.scintillaClass = scintillaClass + dialog.PropertyPage.__init__(self, win32ui.IDD_PP_FORMAT, caption=caption) + + def OnInitDialog(self): + try: + if self.scintillaClass is None: + from . import control + sc = control.CScintillaEdit + else: + sc = self.scintillaClass + + self.scintilla = sc() + style = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.ES_MULTILINE + # Convert the rect size + rect = self.MapDialogRect( (5, 5, 120, 75)) + self.scintilla.CreateWindow(style, rect, self, 111) + self.HookNotify(self.OnBraceMatch, scintillacon.SCN_CHECKBRACE) + self.scintilla.HookKeyStroke(self.OnEsc, 27) + self.scintilla.SCISetViewWS(1) + self.pos_bstart = self.pos_bend = self.pos_bbad = 0 + + colorizer = self.scintilla._GetColorizer() + text = colorizer.GetSampleText() + items = text.split('|', 2) + pos = len(items[0]) + self.scintilla.SCIAddText(''.join(items)) + self.scintilla.SetSel(pos, pos) + self.scintilla.ApplyFormattingStyles() + self.styles = self.scintilla._GetColorizer().styles + + self.cbo = self.GetDlgItem(win32ui.IDC_COMBO1) + for c in paletteVGA: + self.cbo.AddString(c[0]) + + self.cboBoldItalic = self.GetDlgItem(win32ui.IDC_COMBO2) + for item in ["Bold Italic", "Bold", "Italic", "Regular"]: + self.cboBoldItalic.InsertString(0, item) + + self.butIsDefault = self.GetDlgItem(win32ui.IDC_CHECK1) + self.butIsDefaultBackground = self.GetDlgItem(win32ui.IDC_CHECK2) + self.listbox = self.GetDlgItem(win32ui.IDC_LIST1) + self.HookCommand(self.OnListCommand, win32ui.IDC_LIST1) + names = list(self.styles.keys()) + names.sort() + for name in names: + if self.styles[name].aliased is None: + self.listbox.AddString(name) + self.listbox.SetCurSel(0) + + idc = win32ui.IDC_RADIO1 + if not self.scintilla._GetColorizer().bUseFixed: idc = win32ui.IDC_RADIO2 + self.GetDlgItem(idc).SetCheck(1) + self.UpdateUIForStyle(self.styles[names[0]]) + + self.scintilla.HookFormatter(self) + self.HookCommand(self.OnButDefaultFixedFont, win32ui.IDC_BUTTON1) + self.HookCommand(self.OnButDefaultPropFont, win32ui.IDC_BUTTON2) + self.HookCommand(self.OnButThisFont, win32ui.IDC_BUTTON3) + self.HookCommand(self.OnButUseDefaultFont, win32ui.IDC_CHECK1) + self.HookCommand(self.OnButThisBackground, win32ui.IDC_BUTTON4) + self.HookCommand(self.OnButUseDefaultBackground, win32ui.IDC_CHECK2) + self.HookCommand(self.OnStyleUIChanged, win32ui.IDC_COMBO1) + self.HookCommand(self.OnStyleUIChanged, win32ui.IDC_COMBO2) + self.HookCommand(self.OnButFixedOrDefault, win32ui.IDC_RADIO1) + self.HookCommand(self.OnButFixedOrDefault, win32ui.IDC_RADIO2) + except: + import traceback + traceback.print_exc() + + def OnEsc(self, ch): + self.GetParent().EndDialog(win32con.IDCANCEL) + + def OnBraceMatch(self, std, extra): + import pywin.scintilla.view + pywin.scintilla.view.DoBraceMatch(self.scintilla) + + def GetSelectedStyle(self): + return self.styles[self.listbox.GetText(self.listbox.GetCurSel())] + + def _DoButDefaultFont(self, extra_flags, attr): + baseFormat = getattr(self.scintilla._GetColorizer(), attr) + flags = extra_flags | win32con.CF_SCREENFONTS | win32con.CF_EFFECTS | win32con.CF_FORCEFONTEXIST + d=win32ui.CreateFontDialog(baseFormat, flags, None, self) + if d.DoModal()==win32con.IDOK: + setattr(self.scintilla._GetColorizer(), attr, d.GetCharFormat()) + self.OnStyleUIChanged(0, win32con.BN_CLICKED) + + def OnButDefaultFixedFont(self, id, code): + if code==win32con.BN_CLICKED: + self._DoButDefaultFont(win32con.CF_FIXEDPITCHONLY, "baseFormatFixed") + return 1 + + def OnButDefaultPropFont(self, id, code): + if code==win32con.BN_CLICKED: + self._DoButDefaultFont(win32con.CF_SCALABLEONLY, "baseFormatProp") + return 1 + + def OnButFixedOrDefault(self, id, code): + if code==win32con.BN_CLICKED: + bUseFixed = id == win32ui.IDC_RADIO1 + self.GetDlgItem(win32ui.IDC_RADIO1).GetCheck() != 0 + self.scintilla._GetColorizer().bUseFixed = bUseFixed + self.scintilla.ApplyFormattingStyles(0) + return 1 + + def OnButThisFont(self, id, code): + if code==win32con.BN_CLICKED: + flags = win32con.CF_SCREENFONTS | win32con.CF_EFFECTS | win32con.CF_FORCEFONTEXIST + style = self.GetSelectedStyle() + # If the selected style is based on the default, we need to apply + # the default to it. + def_format = self.scintilla._GetColorizer().GetDefaultFormat() + format = style.GetCompleteFormat(def_format) + d=win32ui.CreateFontDialog(format, flags, None, self) + if d.DoModal()==win32con.IDOK: + style.format = d.GetCharFormat() + self.scintilla.ApplyFormattingStyles(0) + return 1 + + def OnButUseDefaultFont(self, id, code): + if code == win32con.BN_CLICKED: + isDef = self.butIsDefault.GetCheck() + self.GetDlgItem(win32ui.IDC_BUTTON3).EnableWindow(not isDef) + if isDef: # Being reset to the default font. + style = self.GetSelectedStyle() + style.ForceAgainstDefault() + self.UpdateUIForStyle(style) + self.scintilla.ApplyFormattingStyles(0) + else: + # User wants to override default - + # do nothing! + pass + + def OnButThisBackground(self, id, code): + if code==win32con.BN_CLICKED: + style = self.GetSelectedStyle() + bg = win32api.RGB(0xff, 0xff, 0xff) + if style.background != CLR_INVALID: + bg = style.background + d=win32ui.CreateColorDialog(bg, 0, self) + if d.DoModal()==win32con.IDOK: + style.background = d.GetColor() + self.scintilla.ApplyFormattingStyles(0) + return 1 + + def OnButUseDefaultBackground(self, id, code): + if code == win32con.BN_CLICKED: + isDef = self.butIsDefaultBackground.GetCheck() + self.GetDlgItem(win32ui.IDC_BUTTON4).EnableWindow(not isDef) + if isDef: # Being reset to the default color + style = self.GetSelectedStyle() + style.background = style.default_background + self.UpdateUIForStyle(style) + self.scintilla.ApplyFormattingStyles(0) + else: + # User wants to override default - + # do nothing! + pass + + + def OnListCommand(self, id, code): + if code==win32con.LBN_SELCHANGE: + style = self.GetSelectedStyle() + self.UpdateUIForStyle(style) + return 1 + + def UpdateUIForStyle(self, style ): + format = style.format + sel = 0 + for c in paletteVGA: + if format[4] == c[1]: +# print "Style", style.name, "is", c[0] + break + sel = sel + 1 + else: + sel = -1 + self.cbo.SetCurSel(sel) + self.butIsDefault.SetCheck(style.IsBasedOnDefault()) + self.GetDlgItem(win32ui.IDC_BUTTON3).EnableWindow(not style.IsBasedOnDefault()) + + self.butIsDefaultBackground.SetCheck(style.background == style.default_background) + self.GetDlgItem(win32ui.IDC_BUTTON4).EnableWindow(style.background != style.default_background) + + bold = format[1] & win32con.CFE_BOLD != 0; italic = format[1] & win32con.CFE_ITALIC != 0 + self.cboBoldItalic.SetCurSel( bold*2 + italic ) + + def OnStyleUIChanged(self, id, code): + if code in [win32con.BN_CLICKED, win32con.CBN_SELCHANGE]: + style = self.GetSelectedStyle() + self.ApplyUIFormatToStyle(style) + self.scintilla.ApplyFormattingStyles(0) + return 0 + return 1 + + def ApplyUIFormatToStyle(self, style): + format = style.format + color = paletteVGA[self.cbo.GetCurSel()] + effect = 0 + sel = self.cboBoldItalic.GetCurSel() + if sel==0: + effect = 0 + elif sel==1: + effect = win32con.CFE_ITALIC + elif sel==2: + effect = win32con.CFE_BOLD + else: + effect = win32con.CFE_BOLD | win32con.CFE_ITALIC + maskFlags=format[0]|win32con.CFM_COLOR|win32con.CFM_BOLD|win32con.CFM_ITALIC + style.format = (maskFlags, effect, style.format[2], style.format[3], color[1]) + style.format[5:] + + def OnOK(self): + self.scintilla._GetColorizer().SavePreferences() + return 1 + +def test(): + page = ColorEditorPropertyPage() + sheet = pywin.mfc.dialog.PropertySheet("Test") + sheet.AddPage(page) + sheet.CreateWindow() diff --git a/venv/Lib/site-packages/pythonwin/pywin/scintilla/control.py b/venv/Lib/site-packages/pythonwin/pywin/scintilla/control.py new file mode 100644 index 00000000..f1383357 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/scintilla/control.py @@ -0,0 +1,434 @@ +# An Python interface to the Scintilla control. +# +# Exposes Python classes that allow you to use Scintilla as +# a "standard" MFC edit control (eg, control.GetTextLength(), control.GetSel() +# plus many Scintilla specific features (eg control.SCIAddStyledText()) + +from pywin.mfc import window +from pywin import default_scintilla_encoding +import win32con +import win32ui +import win32api +import array +import struct +import string +import os +from . import scintillacon + +# Load Scintilla.dll to get access to the control. +# We expect to find this in the same directory as win32ui.pyd +dllid = None +if win32ui.debug: # If running _d version of Pythonwin... + try: + dllid = win32api.LoadLibrary(os.path.join(os.path.split(win32ui.__file__)[0], "Scintilla_d.DLL")) + except win32api.error: # Not there - we dont _need_ a debug ver, so ignore this error. + pass +if dllid is None: + try: + dllid = win32api.LoadLibrary(os.path.join(os.path.split(win32ui.__file__)[0], "Scintilla.DLL")) + except win32api.error: + pass +if dllid is None: + # Still not there - lets see if Windows can find it by searching? + dllid = win32api.LoadLibrary("Scintilla.DLL") + +# null_byte is str in py2k, bytes on py3k +null_byte = "\0".encode('ascii') + +## These are from Richedit.h - need to add to win32con or commctrl +EM_GETTEXTRANGE = 1099 +EM_EXLINEFROMCHAR = 1078 +EM_FINDTEXTEX = 1103 +EM_GETSELTEXT = 1086 +EM_EXSETSEL = win32con.WM_USER + 55 + +class ScintillaNotification: + def __init__(self, **args): + self.__dict__.update(args) + +class ScintillaControlInterface: + def SCIUnpackNotifyMessage(self, msg): + format = "iiiiPiiiPPiiii" + bytes = win32ui.GetBytes( msg, struct.calcsize(format) ) + position, ch, modifiers, modificationType, text_ptr, \ + length, linesAdded, msg, wParam, lParam, line, \ + foldLevelNow, foldLevelPrev, margin \ + = struct.unpack(format, bytes) + return ScintillaNotification(position=position,ch=ch, + modifiers=modifiers, modificationType=modificationType, + text_ptr = text_ptr, length=length, linesAdded=linesAdded, + msg = msg, wParam = wParam, lParam = lParam, + line = line, foldLevelNow = foldLevelNow, foldLevelPrev = foldLevelPrev, + margin = margin) + + def SCIAddText(self, text): + self.SendMessage(scintillacon.SCI_ADDTEXT, text.encode(default_scintilla_encoding)) + def SCIAddStyledText(self, text, style = None): + # If style is None, text is assumed to be a "native" Scintilla buffer. + # If style is specified, text is a normal string, and the style is + # assumed to apply to the entire string. + if style is not None: + text = list(map(lambda char, style=style: char+chr(style), text)) + text = ''.join(text) + self.SendMessage(scintillacon.SCI_ADDSTYLEDTEXT, text.encode(default_scintilla_encoding)) + def SCIInsertText(self, text, pos=-1): + # SCIInsertText allows unicode or bytes - but if they are bytes, + # the caller must ensure it is encoded correctly. + if isinstance(text, str): + text = text.encode(default_scintilla_encoding) + self.SendScintilla(scintillacon.SCI_INSERTTEXT, pos, text + null_byte) + def SCISetSavePoint(self): + self.SendScintilla(scintillacon.SCI_SETSAVEPOINT) + def SCISetUndoCollection(self, collectFlag): + self.SendScintilla(scintillacon.SCI_SETUNDOCOLLECTION, collectFlag) + def SCIBeginUndoAction(self): + self.SendScintilla(scintillacon.SCI_BEGINUNDOACTION) + def SCIEndUndoAction(self): + self.SendScintilla(scintillacon.SCI_ENDUNDOACTION) + + def SCIGetCurrentPos(self): + return self.SendScintilla(scintillacon.SCI_GETCURRENTPOS) + def SCIGetCharAt(self, pos): + # Must ensure char is unsigned! + return chr(self.SendScintilla(scintillacon.SCI_GETCHARAT, pos) & 0xFF) + def SCIGotoLine(self, line): + self.SendScintilla(scintillacon.SCI_GOTOLINE, line) + def SCIBraceMatch(self, pos, maxReStyle): + return self.SendScintilla(scintillacon.SCI_BRACEMATCH, pos, maxReStyle) + def SCIBraceHighlight(self, pos, posOpposite): + return self.SendScintilla(scintillacon.SCI_BRACEHIGHLIGHT, pos, posOpposite) + def SCIBraceBadHighlight(self, pos): + return self.SendScintilla(scintillacon.SCI_BRACEBADLIGHT, pos) + + #################################### + # Styling +# def SCIColourise(self, start=0, end=-1): +# NOTE - dependent on of we use builtin lexer, so handled below. + def SCIGetEndStyled(self): + return self.SendScintilla(scintillacon.SCI_GETENDSTYLED) + def SCIStyleSetFore(self, num, v): + return self.SendScintilla(scintillacon.SCI_STYLESETFORE, num, v) + def SCIStyleSetBack(self, num, v): + return self.SendScintilla(scintillacon.SCI_STYLESETBACK, num, v) + def SCIStyleSetEOLFilled(self, num, v): + return self.SendScintilla(scintillacon.SCI_STYLESETEOLFILLED, num, v) + def SCIStyleSetFont(self, num, name, characterset=0): + buff = (name + "\0").encode(default_scintilla_encoding) + self.SendScintilla(scintillacon.SCI_STYLESETFONT, num, buff) + self.SendScintilla(scintillacon.SCI_STYLESETCHARACTERSET, num, characterset) + def SCIStyleSetBold(self, num, bBold): + self.SendScintilla(scintillacon.SCI_STYLESETBOLD, num, bBold) + def SCIStyleSetItalic(self, num, bItalic): + self.SendScintilla(scintillacon.SCI_STYLESETITALIC, num, bItalic) + def SCIStyleSetSize(self, num, size): + self.SendScintilla(scintillacon.SCI_STYLESETSIZE, num, size) + def SCIGetViewWS(self): + return self.SendScintilla(scintillacon.SCI_GETVIEWWS) + def SCISetViewWS(self, val): + self.SendScintilla(scintillacon.SCI_SETVIEWWS, not (val==0)) + self.InvalidateRect() + def SCISetIndentationGuides(self, val): + self.SendScintilla(scintillacon.SCI_SETINDENTATIONGUIDES, val) + def SCIGetIndentationGuides(self): + return self.SendScintilla(scintillacon.SCI_GETINDENTATIONGUIDES) + def SCISetIndent(self, val): + self.SendScintilla(scintillacon.SCI_SETINDENT, val) + def SCIGetIndent(self, val): + return self.SendScintilla(scintillacon.SCI_GETINDENT) + + def SCIGetViewEOL(self): + return self.SendScintilla(scintillacon.SCI_GETVIEWEOL) + def SCISetViewEOL(self, val): + self.SendScintilla(scintillacon.SCI_SETVIEWEOL, not(val==0)) + self.InvalidateRect() + def SCISetTabWidth(self, width): + self.SendScintilla(scintillacon.SCI_SETTABWIDTH, width, 0) + def SCIStartStyling(self, pos, mask): + self.SendScintilla(scintillacon.SCI_STARTSTYLING, pos, mask) + def SCISetStyling(self, pos, attr): + self.SendScintilla(scintillacon.SCI_SETSTYLING, pos, attr) + def SCISetStylingEx(self, ray): # ray is an array. + address, length = ray.buffer_info() + self.SendScintilla(scintillacon.SCI_SETSTYLINGEX, length, address) + def SCIGetStyleAt(self, pos): + return self.SendScintilla(scintillacon.SCI_GETSTYLEAT, pos) + def SCISetMarginWidth(self, width): + self.SendScintilla(scintillacon.SCI_SETMARGINWIDTHN, 1, width) + def SCISetMarginWidthN(self, n, width): + self.SendScintilla(scintillacon.SCI_SETMARGINWIDTHN, n, width) + def SCISetFoldFlags(self, flags): + self.SendScintilla(scintillacon.SCI_SETFOLDFLAGS, flags) + # Markers + def SCIMarkerDefineAll(self, markerNum, markerType, fore, back): + self.SCIMarkerDefine(markerNum, markerType) + self.SCIMarkerSetFore(markerNum, fore) + self.SCIMarkerSetBack(markerNum, back) + def SCIMarkerDefine(self, markerNum, markerType): + self.SendScintilla(scintillacon.SCI_MARKERDEFINE, markerNum, markerType) + def SCIMarkerSetFore(self, markerNum, fore): + self.SendScintilla(scintillacon.SCI_MARKERSETFORE, markerNum, fore) + def SCIMarkerSetBack(self, markerNum, back): + self.SendScintilla(scintillacon.SCI_MARKERSETBACK, markerNum, back) + def SCIMarkerAdd(self, lineNo, markerNum): + self.SendScintilla(scintillacon.SCI_MARKERADD, lineNo, markerNum) + def SCIMarkerDelete(self, lineNo, markerNum): + self.SendScintilla(scintillacon.SCI_MARKERDELETE, lineNo, markerNum) + def SCIMarkerDeleteAll(self, markerNum=-1): + self.SendScintilla(scintillacon.SCI_MARKERDELETEALL, markerNum) + def SCIMarkerGet(self, lineNo): + return self.SendScintilla(scintillacon.SCI_MARKERGET, lineNo) + def SCIMarkerNext(self, lineNo, markerNum): + return self.SendScintilla(scintillacon.SCI_MARKERNEXT, lineNo, markerNum) + def SCICancel(self): + self.SendScintilla(scintillacon.SCI_CANCEL) + # AutoComplete + def SCIAutoCShow(self, text): + if type(text) in [type([]), type(())]: + text = ' '.join(text) + buff = (text + "\0").encode(default_scintilla_encoding) + return self.SendScintilla(scintillacon.SCI_AUTOCSHOW, 0, buff) + def SCIAutoCCancel(self): + self.SendScintilla(scintillacon.SCI_AUTOCCANCEL) + def SCIAutoCActive(self): + return self.SendScintilla(scintillacon.SCI_AUTOCACTIVE) + def SCIAutoCComplete(self): + return self.SendScintilla(scintillacon.SCI_AUTOCCOMPLETE) + def SCIAutoCStops(self, stops): + buff = (stops + "\0").encode(default_scintilla_encoding) + self.SendScintilla(scintillacon.SCI_AUTOCSTOPS, 0, buff) + def SCIAutoCSetAutoHide(self, hide): + self.SendScintilla(scintillacon.SCI_AUTOCSETAUTOHIDE, hide) + def SCIAutoCSetFillups(self, fillups): + self.SendScintilla(scintillacon.SCI_AUTOCSETFILLUPS, fillups) + # Call tips + def SCICallTipShow(self, text, pos=-1): + if pos==-1: pos = self.GetSel()[0] + buff = (text + "\0").encode(default_scintilla_encoding) + self.SendScintilla(scintillacon.SCI_CALLTIPSHOW, pos, buff) + def SCICallTipCancel(self): + self.SendScintilla(scintillacon.SCI_CALLTIPCANCEL) + def SCICallTipActive(self): + return self.SendScintilla(scintillacon.SCI_CALLTIPACTIVE) + def SCICallTipPosStart(self): + return self.SendScintilla(scintillacon.SCI_CALLTIPPOSSTART) + def SCINewline(self): + self.SendScintilla(scintillacon.SCI_NEWLINE) + # Lexer etc + def SCISetKeywords(self, keywords, kw_list_no = 0): + buff = (keywords+"\0").encode(default_scintilla_encoding) + self.SendScintilla(scintillacon.SCI_SETKEYWORDS, kw_list_no, buff) + def SCISetProperty(self, name, value): + name_buff = array.array('b', (name + '\0').encode(default_scintilla_encoding)) + val_buff = array.array("b", (str(value)+'\0').encode(default_scintilla_encoding)) + address_name_buffer = name_buff.buffer_info()[0] + address_val_buffer = val_buff.buffer_info()[0] + self.SendScintilla(scintillacon.SCI_SETPROPERTY, address_name_buffer, address_val_buffer) + def SCISetStyleBits(self, nbits): + self.SendScintilla(scintillacon.SCI_SETSTYLEBITS, nbits) + # Folding + def SCIGetFoldLevel(self, lineno): + return self.SendScintilla(scintillacon.SCI_GETFOLDLEVEL, lineno) + def SCIToggleFold(self, lineno): + return self.SendScintilla(scintillacon.SCI_TOGGLEFOLD, lineno) + def SCIEnsureVisible(self, lineno): + self.SendScintilla(scintillacon.SCI_ENSUREVISIBLE, lineno) + def SCIGetFoldExpanded(self, lineno): + return self.SendScintilla(scintillacon.SCI_GETFOLDEXPANDED, lineno) + # right edge + def SCISetEdgeColumn(self, edge): + self.SendScintilla(scintillacon.SCI_SETEDGECOLUMN, edge) + def SCIGetEdgeColumn(self): + return self.SendScintilla(scintillacon.SCI_GETEDGECOLUMN) + def SCISetEdgeMode(self, mode): + self.SendScintilla(scintillacon.SCI_SETEDGEMODE, mode) + def SCIGetEdgeMode(self): + return self.SendScintilla(scintillacon.SCI_GETEDGEMODE) + def SCISetEdgeColor(self, color): + self.SendScintilla(scintillacon.SCI_SETEDGECOLOUR, color) + def SCIGetEdgeColor(self): + return self.SendScintilla(scintillacon.SCI_GETEDGECOLOR) + # Multi-doc + def SCIGetDocPointer(self): + return self.SendScintilla(scintillacon.SCI_GETDOCPOINTER) + def SCISetDocPointer(self, p): + return self.SendScintilla(scintillacon.SCI_SETDOCPOINTER, 0, p) + def SCISetWrapMode(self, mode): + return self.SendScintilla(scintillacon.SCI_SETWRAPMODE, mode) + def SCIGetWrapMode(self): + return self.SendScintilla(scintillacon.SCI_GETWRAPMODE) + +class CScintillaEditInterface(ScintillaControlInterface): + def close(self): + self.colorizer = None + def Clear(self): + self.SendScintilla(win32con.WM_CLEAR) + def Clear(self): + self.SendScintilla(win32con.WM_CLEAR) + def FindText(self, flags, range, findText): + """ LPARAM for EM_FINDTEXTEX: + typedef struct _findtextex { + CHARRANGE chrg; + LPCTSTR lpstrText; + CHARRANGE chrgText;} FINDTEXTEX; + typedef struct _charrange { + LONG cpMin; + LONG cpMax;} CHARRANGE; + """ + findtextex_fmt='llPll' + ## Scintilla does not handle unicode in EM_FINDTEXT msg (FINDTEXTEX struct) + txt_buff = (findText+'\0').encode(default_scintilla_encoding) + txt_array = array.array('b', txt_buff) + ft_buff = struct.pack(findtextex_fmt, range[0], range[1], txt_array.buffer_info()[0], 0, 0) + ft_array = array.array('b', ft_buff) + rc = self.SendScintilla(EM_FINDTEXTEX, flags, ft_array.buffer_info()[0]) + ftUnpacked = struct.unpack(findtextex_fmt, ft_array) + return rc, (ftUnpacked[3], ftUnpacked[4]) + + def GetSel(self): + currentPos = self.SendScintilla(scintillacon.SCI_GETCURRENTPOS) + anchorPos = self.SendScintilla(scintillacon.SCI_GETANCHOR) + if currentPos < anchorPos: + return (currentPos, anchorPos) + else: + return (anchorPos, currentPos) + return currentPos; + + def GetSelText(self): + start, end = self.GetSel() + txtBuf = array.array('b', null_byte * (end-start+1)) + addressTxtBuf = txtBuf.buffer_info()[0] + # EM_GETSELTEXT is documented as returning the number of chars + # not including the NULL, but scintilla includes the NULL. A + # quick glance at the scintilla impl doesn't make this + # obvious - the NULL is included in the 'selection' object + # and reflected in the length of that 'selection' object. + # I expect that is a bug in scintilla and may be fixed by now, + # but we just blindly assume that the last char is \0 and + # strip it. + self.SendScintilla(EM_GETSELTEXT, 0, addressTxtBuf) + return txtBuf.tobytes()[:-1].decode(default_scintilla_encoding) + + def SetSel(self, start=0, end=None): + if type(start)==type(()): + assert end is None, "If you pass a point in the first param, the second must be None" + start, end = start + elif end is None: + end = start + if start < 0: start = self.GetTextLength() + if end < 0: end = self.GetTextLength() + assert start <= self.GetTextLength(), "The start postion is invalid (%d/%d)" % (start, self.GetTextLength()) + assert end <= self.GetTextLength(), "The end postion is invalid (%d/%d)" % (end, self.GetTextLength()) + cr = struct.pack('ll', start, end) + crBuff = array.array('b', cr) + addressCrBuff = crBuff.buffer_info()[0] + rc = self.SendScintilla(EM_EXSETSEL, 0, addressCrBuff) + + def GetLineCount(self): + return self.SendScintilla(win32con.EM_GETLINECOUNT) + + def LineFromChar(self, charPos=-1): + if charPos==-1: charPos = self.GetSel()[0] + assert charPos >= 0 and charPos <= self.GetTextLength(), "The charPos postion (%s) is invalid (max=%s)" % (charPos, self.GetTextLength()) + #return self.SendScintilla(EM_EXLINEFROMCHAR, charPos) + # EM_EXLINEFROMCHAR puts charPos in lParam, not wParam + return self.SendScintilla(EM_EXLINEFROMCHAR, 0, charPos) + + def LineIndex(self, line): + return self.SendScintilla(win32con.EM_LINEINDEX, line) + + def ScrollCaret(self): + return self.SendScintilla(win32con.EM_SCROLLCARET) + + def GetCurLineNumber(self): + return self.LineFromChar(self.SCIGetCurrentPos()) + + def GetTextLength(self): + return self.SendScintilla(scintillacon.SCI_GETTEXTLENGTH) + + def GetTextRange(self, start = 0, end = -1, decode = True): + if end == -1: end = self.SendScintilla(scintillacon.SCI_GETTEXTLENGTH) + assert end>=start, "Negative index requested (%d/%d)" % (start, end) + assert start >= 0 and start <= self.GetTextLength(), "The start postion is invalid" + assert end >= 0 and end <= self.GetTextLength(), "The end postion is invalid" + initer = null_byte * (end - start + 1) + buff = array.array('b', initer) + addressBuffer = buff.buffer_info()[0] + tr = struct.pack('llP', start, end, addressBuffer) + trBuff = array.array('b', tr) + addressTrBuff = trBuff.buffer_info()[0] + num_bytes = self.SendScintilla(EM_GETTEXTRANGE, 0, addressTrBuff) + ret = buff.tobytes()[:num_bytes] + if decode: + ret = ret.decode(default_scintilla_encoding) + return ret + + def ReplaceSel(self, str): + buff = (str + "\0").encode(default_scintilla_encoding) + self.SendScintilla(scintillacon.SCI_REPLACESEL, 0, buff) + + def GetLine(self, line=-1): + if line == -1: line = self.GetCurLineNumber() + start = self.LineIndex(line) + end = self.LineIndex(line+1) + return self.GetTextRange(start, end) + + def SetReadOnly(self, flag = 1): + return self.SendScintilla(win32con.EM_SETREADONLY, flag) + + def LineScroll(self, lines, cols=0): + return self.SendScintilla(win32con.EM_LINESCROLL, cols, lines) + + def GetFirstVisibleLine(self): + return self.SendScintilla(win32con.EM_GETFIRSTVISIBLELINE) + + def SetWordWrap(self, mode): + if mode != win32ui.CRichEditView_WrapNone: + raise ValueError("We dont support word-wrap (I dont think :-)") + +class CScintillaColorEditInterface(CScintillaEditInterface): + ################################ + # Plug-in colorizer support + def _GetColorizer(self): + if not hasattr(self, "colorizer"): + self.colorizer = self._MakeColorizer() + return self.colorizer + def _MakeColorizer(self): + # Give parent a chance to hook. + parent_func = getattr(self.GetParentFrame(), "_MakeColorizer", None) + if parent_func is not None: + return parent_func() + from . import formatter +## return formatter.PythonSourceFormatter(self) + return formatter.BuiltinPythonSourceFormatter(self) + + def Colorize(self, start=0, end=-1): + c = self._GetColorizer() + if c is not None: c.Colorize(start, end) + + def ApplyFormattingStyles(self, bReload=1): + c = self._GetColorizer() + if c is not None: c.ApplyFormattingStyles(bReload) + + # The Parent window will normally hook + def HookFormatter(self, parent = None): + c = self._GetColorizer() + if c is not None: # No need if we have no color! + c.HookFormatter(parent) + +class CScintillaEdit(window.Wnd, CScintillaColorEditInterface): + def __init__(self, wnd=None): + if wnd is None: + wnd = win32ui.CreateWnd() + window.Wnd.__init__(self, wnd) + def SendScintilla(self, msg, w=0, l=0): + return self.SendMessage(msg, w, l) + def CreateWindow(self, style, rect, parent, id): + self._obj_.CreateWindow( + "Scintilla", + "Scintilla", + style, + rect, + parent, + id, + None) + diff --git a/venv/Lib/site-packages/pythonwin/pywin/scintilla/document.py b/venv/Lib/site-packages/pythonwin/pywin/scintilla/document.py new file mode 100644 index 00000000..eb76bece --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/scintilla/document.py @@ -0,0 +1,274 @@ +import win32ui +from pywin.mfc import docview +from pywin import default_scintilla_encoding +from . import scintillacon +import win32con +import string +import os +import codecs +import re + +crlf_bytes = "\r\n".encode("ascii") +lf_bytes = "\n".encode("ascii") + +# re from pep263 - but we use it both on bytes and strings. +re_encoding_bytes = re.compile("coding[:=]\s*([-\w.]+)".encode("ascii")) +re_encoding_text = re.compile("coding[:=]\s*([-\w.]+)") + +ParentScintillaDocument=docview.Document +class CScintillaDocument(ParentScintillaDocument): + "A SyntEdit document. " + def __init__(self, *args): + self.bom = None # the BOM, if any, read from the file. + # the encoding we detected from the source. Might have + # detected via the BOM or an encoding decl. Note that in + # the latter case (ie, while self.bom is None), it can't be + # trusted - the user may have edited the encoding decl between + # open and save. + self.source_encoding = None + ParentScintillaDocument.__init__(self, *args) + + def DeleteContents(self): + pass + + def OnOpenDocument(self, filename): + # init data members + #print "Opening", filename + self.SetPathName(filename) # Must set this early! + try: + # load the text as binary we can get smart + # about detecting any existing EOL conventions. + f = open(filename, 'rb') + try: + self._LoadTextFromFile(f) + finally: + f.close() + except IOError: + rc = win32ui.MessageBox("Could not load the file from %s\n\nDo you want to create a new file?" % filename, + "Pythonwin", win32con.MB_YESNO | win32con.MB_ICONWARNING) + if rc == win32con.IDNO: + return 0 + assert rc == win32con.IDYES, rc + try: + f = open(filename, 'wb+') + try: + self._LoadTextFromFile(f) + finally: + f.close() + except IOError as e: + rc = win32ui.MessageBox("Cannot create the file %s" % filename) + return 1 + + def SaveFile(self, fileName, encoding=None): + view = self.GetFirstView() + ok = view.SaveTextFile(fileName, encoding=encoding) + if ok: + view.SCISetSavePoint() + return ok + + def ApplyFormattingStyles(self): + self._ApplyOptionalToViews("ApplyFormattingStyles") + + # ##################### + # File related functions + # Helper to transfer text from the MFC document to the control. + def _LoadTextFromFile(self, f): + # detect EOL mode - we don't support \r only - so find the + # first '\n' and guess based on the char before. + l = f.readline() + l2 = f.readline() + # If line ends with \r\n or has no line ending, use CRLF. + if l.endswith(crlf_bytes) or not l.endswith(lf_bytes): + eol_mode = scintillacon.SC_EOL_CRLF + else: + eol_mode = scintillacon.SC_EOL_LF + + # Detect the encoding - first look for a BOM, and if not found, + # look for a pep263 encoding declaration. + for bom, encoding in ( + (codecs.BOM_UTF8, "utf8"), + (codecs.BOM_UTF16_LE, "utf_16_le"), + (codecs.BOM_UTF16_BE, "utf_16_be"), + ): + if l.startswith(bom): + self.bom = bom + self.source_encoding = encoding + l = l[len(bom):] # remove it. + break + else: + # no bom detected - look for pep263 encoding decl. + for look in (l, l2): + # Note we are looking at raw bytes here: so + # both the re itself uses bytes and the result + # is bytes - but we need the result as a string. + match = re_encoding_bytes.search(look) + if match is not None: + self.source_encoding = match.group(1).decode("ascii") + break + + # reading by lines would be too slow? Maybe we can use the + # incremental encoders? For now just stick with loading the + # entire file in memory. + text = l + l2 + f.read() + + # Translate from source encoding to UTF-8 bytes for Scintilla + source_encoding = self.source_encoding + # If we don't know an encoding, try utf-8 - if that fails we will + # fallback to latin-1 to treat it as bytes... + if source_encoding is None: + source_encoding = 'utf-8' + # we could optimize this by avoiding utf8 to-ing and from-ing, + # but then we would lose the ability to handle invalid utf8 + # (and even then, the use of encoding aliases makes this tricky) + # To create an invalid utf8 file: + # >>> open(filename, "wb").write(codecs.BOM_UTF8+"bad \xa9har\r\n") + try: + dec = text.decode(source_encoding) + except UnicodeError: + print("WARNING: Failed to decode bytes from '%s' encoding - treating as latin1" % source_encoding) + dec = text.decode('latin1') + except LookupError: + print("WARNING: Invalid encoding '%s' specified - treating as latin1" % source_encoding) + dec = text.decode('latin1') + # and put it back as utf8 - this shouldn't fail. + text = dec.encode(default_scintilla_encoding) + + view = self.GetFirstView() + if view.IsWindow(): + # Turn off undo collection while loading + view.SendScintilla(scintillacon.SCI_SETUNDOCOLLECTION, 0, 0) + # Make sure the control isnt read-only + view.SetReadOnly(0) + view.SendScintilla(scintillacon.SCI_CLEARALL) + view.SendMessage(scintillacon.SCI_ADDTEXT, text) + view.SendScintilla(scintillacon.SCI_SETUNDOCOLLECTION, 1, 0) + view.SendScintilla(win32con.EM_EMPTYUNDOBUFFER, 0, 0) + # set EOL mode + view.SendScintilla(scintillacon.SCI_SETEOLMODE, eol_mode) + + def _SaveTextToFile(self, view, filename, encoding=None): + s = view.GetTextRange() # already decoded from scintilla's encoding + source_encoding = encoding + if source_encoding is None: + if self.bom: + source_encoding = self.source_encoding + else: + # no BOM - look for an encoding. + bits = re.split("[\r\n]+", s, 3) + for look in bits[:-1]: + match = re_encoding_text.search(look) + if match is not None: + source_encoding = match.group(1) + self.source_encoding = source_encoding + break + + if source_encoding is None: + source_encoding = 'utf-8' + + ## encode data before opening file so script is not lost if encoding fails + file_contents = s.encode(source_encoding) + # Open in binary mode as scintilla itself ensures the + # line endings are already appropriate + f = open(filename, 'wb') + try: + if self.bom: + f.write(self.bom) + f.write(file_contents) + finally: + f.close() + self.SetModifiedFlag(0) + + def FinalizeViewCreation(self, view): + pass + + def HookViewNotifications(self, view): + parent = view.GetParentFrame() + parent.HookNotify(ViewNotifyDelegate(self, "OnBraceMatch"), scintillacon.SCN_CHECKBRACE) + parent.HookNotify(ViewNotifyDelegate(self, "OnMarginClick"), scintillacon.SCN_MARGINCLICK) + parent.HookNotify(ViewNotifyDelegate(self, "OnNeedShown"), scintillacon.SCN_NEEDSHOWN) + + parent.HookNotify(DocumentNotifyDelegate(self, "OnSavePointReached"), scintillacon.SCN_SAVEPOINTREACHED) + parent.HookNotify(DocumentNotifyDelegate(self, "OnSavePointLeft"), scintillacon.SCN_SAVEPOINTLEFT) + parent.HookNotify(DocumentNotifyDelegate(self, "OnModifyAttemptRO"), scintillacon.SCN_MODIFYATTEMPTRO) + # Tell scintilla what characters should abort auto-complete. + view.SCIAutoCStops(string.whitespace+"()[]:;+-/*=\\?'!#@$%^&,<>\"'|" ) + + if view != self.GetFirstView(): + view.SCISetDocPointer(self.GetFirstView().SCIGetDocPointer()) + + + def OnSavePointReached(self, std, extra): + self.SetModifiedFlag(0) + + def OnSavePointLeft(self, std, extra): + self.SetModifiedFlag(1) + + def OnModifyAttemptRO(self, std, extra): + self.MakeDocumentWritable() + + # All Marker functions are 1 based. + def MarkerAdd( self, lineNo, marker ): + self.GetEditorView().SCIMarkerAdd(lineNo-1, marker) + + def MarkerCheck(self, lineNo, marker ): + v = self.GetEditorView() + lineNo = lineNo - 1 # Make 0 based + markerState = v.SCIMarkerGet(lineNo) + return markerState & (1<= 0: + rc = FOUND_NORMAL + lineno = control.LineFromChar(posFind) + control.SCIEnsureVisible(lineno) + control.SetSel(foundSel) + control.SetFocus() + win32ui.SetStatusText(win32ui.LoadString(afxres.AFX_IDS_IDLEMESSAGE)) + if rc == FOUND_NOTHING and lastSearch.acrossFiles: + # Loop around all documents. First find this document. + try: + try: + doc = control.GetDocument() + except AttributeError: + try: + doc = control.GetParent().GetDocument() + except AttributeError: + print("Cant find a document for the control!") + doc = None + if doc is not None: + template = doc.GetDocTemplate() + alldocs = template.GetDocumentList() + mypos = lookpos = alldocs.index(doc) + while 1: + lookpos = (lookpos+1) % len(alldocs) + if lookpos == mypos: + break + view = alldocs[lookpos].GetFirstView() + posFind, foundSel = view.FindText(flags, (0, view.GetTextLength()), searchParams.findText) + if posFind >= 0: + nChars = foundSel[1]-foundSel[0] + lineNo = view.LineFromChar(posFind) # zero based. + lineStart = view.LineIndex(lineNo) + colNo = posFind - lineStart # zero based. + scriptutils.JumpToDocument(alldocs[lookpos].GetPathName(), lineNo+1, colNo+1, nChars) + rc = FOUND_NEXT_FILE + break + except win32ui.error: + pass + if rc == FOUND_NOTHING: + # Loop around this control - attempt to find from the start of the control. + posFind, foundSel = control.FindText(flags, (0, sel[0]-1), searchParams.findText) + if posFind >= 0: + control.SCIEnsureVisible(control.LineFromChar(foundSel[0])) + control.SetSel(foundSel) + control.SetFocus() + win32ui.SetStatusText("Not found! Searching from the top of the file.") + rc = FOUND_LOOPED_BACK + else: + lastSearch.sel=-1,-1 + win32ui.SetStatusText("Can not find '%s'" % searchParams.findText ) + + if rc != FOUND_NOTHING: + lastSearch.sel = foundSel + + if lastSearch.remember: + defaultSearch = lastSearch + + # track search history + try: + ix = searchHistory.index(searchParams.findText) + except ValueError: + if len(searchHistory) > 50: + searchHistory[50:] = [] + else: + del searchHistory[ix] + searchHistory.insert(0, searchParams.findText) + + return rc + +def _ReplaceIt(control): + control = _GetControl(control) + statusText = "Can not find '%s'." % lastSearch.findText + rc = FOUND_NOTHING + if control is not None and lastSearch.sel != (-1,-1): + control.ReplaceSel(lastSearch.replaceText) + rc = FindNext() + if rc !=FOUND_NOTHING: + statusText = win32ui.LoadString(afxres.AFX_IDS_IDLEMESSAGE) + win32ui.SetStatusText(statusText) + return rc + +class FindReplaceDialog(dialog.Dialog): + def __init__(self): + dialog.Dialog.__init__(self,self._GetDialogTemplate()) + self.HookCommand(self.OnFindNext, 109) + + def OnInitDialog(self): + self.editFindText = self.GetDlgItem(102) + self.butMatchWords = self.GetDlgItem(105) + self.butMatchCase = self.GetDlgItem(107) + self.butKeepDialogOpen = self.GetDlgItem(115) + self.butAcrossFiles = self.GetDlgItem(116) + self.butRemember = self.GetDlgItem(117) + + self.editFindText.SetWindowText(defaultSearch.findText) + control = _GetControl() + # _GetControl only gets normal MDI windows; if the interactive + # window is docked and no document open, we get None. + if control: + # If we have a selection, default to that. + sel = control.GetSelText() + if (len(sel) != 0): + self.editFindText.SetWindowText(sel) + if (defaultSearch.remember): + defaultSearch.findText = sel + for hist in searchHistory: + self.editFindText.AddString(hist) + + if hasattr(self.editFindText, 'SetEditSel'): + self.editFindText.SetEditSel(0, -2) + else: + self.editFindText.SetSel(0, -2) + self.editFindText.SetFocus() + self.butMatchWords.SetCheck(defaultSearch.matchWords) + self.butMatchCase.SetCheck(defaultSearch.matchCase) + self.butKeepDialogOpen.SetCheck(defaultSearch.keepDialogOpen) + self.butAcrossFiles.SetCheck(defaultSearch.acrossFiles) + self.butRemember.SetCheck(defaultSearch.remember) + return dialog.Dialog.OnInitDialog(self) + + def OnDestroy(self, msg): + global curDialog + curDialog = None + return dialog.Dialog.OnDestroy(self, msg) + + def DoFindNext(self): + params = SearchParams() + params.findText = self.editFindText.GetWindowText() + params.matchCase = self.butMatchCase.GetCheck() + params.matchWords = self.butMatchWords.GetCheck() + params.acrossFiles = self.butAcrossFiles.GetCheck() + params.remember = self.butRemember.GetCheck() + return _FindIt(None, params) + + def OnFindNext(self, id, code): + if not self.editFindText.GetWindowText(): + win32api.MessageBeep() + return + if self.DoFindNext() != FOUND_NOTHING: + if not self.butKeepDialogOpen.GetCheck(): + self.DestroyWindow() + +class FindDialog(FindReplaceDialog): + def _GetDialogTemplate(self): + style = win32con.DS_MODALFRAME | win32con.WS_POPUP | win32con.WS_VISIBLE | win32con.WS_CAPTION | win32con.WS_SYSMENU | win32con.DS_SETFONT + visible = win32con.WS_CHILD | win32con.WS_VISIBLE + dt = [ + ["Find", (0, 2, 240, 75), style, None, (8, "MS Sans Serif")], + ["Static", "Fi&nd What:", 101, (5, 8, 40, 10), visible], + ["ComboBox", "", 102, (50, 7, 120, 120), visible | win32con.WS_BORDER | win32con.WS_TABSTOP | + win32con.WS_VSCROLL |win32con.CBS_DROPDOWN |win32con.CBS_AUTOHSCROLL], + ["Button", "Match &whole word only", 105, (5, 23, 100, 10), visible | win32con.BS_AUTOCHECKBOX | win32con.WS_TABSTOP], + ["Button", "Match &case", 107, (5, 33, 100, 10), visible | win32con.BS_AUTOCHECKBOX | win32con.WS_TABSTOP], + ["Button", "Keep &dialog open", 115, (5, 43, 100, 10), visible | win32con.BS_AUTOCHECKBOX | win32con.WS_TABSTOP], + ["Button", "Across &open files", 116, (5, 52, 100, 10), visible | win32con.BS_AUTOCHECKBOX | win32con.WS_TABSTOP], + ["Button", "&Remember as default search", 117, (5, 61, 150, 10), visible | win32con.BS_AUTOCHECKBOX | win32con.WS_TABSTOP], + ["Button", "&Find Next", 109, (185, 5, 50, 14), visible | win32con.BS_DEFPUSHBUTTON | win32con.WS_TABSTOP], + ["Button", "Cancel", win32con.IDCANCEL, (185, 23, 50, 14), visible | win32con.WS_TABSTOP], + ] + return dt + +class ReplaceDialog(FindReplaceDialog): + def _GetDialogTemplate(self): + style = win32con.DS_MODALFRAME | win32con.WS_POPUP | win32con.WS_VISIBLE | win32con.WS_CAPTION | win32con.WS_SYSMENU | win32con.DS_SETFONT + visible = win32con.WS_CHILD | win32con.WS_VISIBLE + dt = [ + ["Replace", (0, 2, 240, 95), style, 0, (8, "MS Sans Serif")], + ["Static", "Fi&nd What:", 101, (5, 8, 40, 10), visible], + ["ComboBox", "", 102, (60, 7, 110, 120), visible | win32con.WS_BORDER | win32con.WS_TABSTOP | + win32con.WS_VSCROLL |win32con.CBS_DROPDOWN |win32con.CBS_AUTOHSCROLL], + ["Static", "Re&place with:", 103, (5, 25, 50, 10), visible], + ["ComboBox", "", 104, (60, 24, 110, 120), visible | win32con.WS_BORDER | win32con.WS_TABSTOP | + win32con.WS_VSCROLL |win32con.CBS_DROPDOWN |win32con.CBS_AUTOHSCROLL], + ["Button", "Match &whole word only", 105, (5, 42, 100, 10), visible | win32con.BS_AUTOCHECKBOX | win32con.WS_TABSTOP], + ["Button", "Match &case", 107, (5, 52, 100, 10), visible | win32con.BS_AUTOCHECKBOX | win32con.WS_TABSTOP], + ["Button", "Keep &dialog open", 115, (5, 62, 100, 10), visible | win32con.BS_AUTOCHECKBOX | win32con.WS_TABSTOP], + ["Button", "Across &open files", 116, (5, 72, 100, 10), visible | win32con.BS_AUTOCHECKBOX | win32con.WS_TABSTOP], + ["Button", "&Remember as default search", 117, (5, 81, 150, 10), visible | win32con.BS_AUTOCHECKBOX | win32con.WS_TABSTOP], + ["Button", "&Find Next", 109, (185, 5, 50, 14), visible | win32con.BS_DEFPUSHBUTTON | win32con.WS_TABSTOP], + ["Button", "&Replace", 110, (185, 23, 50, 14), visible | win32con.WS_TABSTOP], + ["Button", "Replace &All", 111, (185, 41, 50, 14), visible | win32con.WS_TABSTOP], + ["Button", "Cancel", win32con.IDCANCEL, (185, 59, 50, 14), visible | win32con.WS_TABSTOP], + + + ] + return dt + + def OnInitDialog(self): + rc = FindReplaceDialog.OnInitDialog(self) + self.HookCommand(self.OnReplace, 110) + self.HookCommand(self.OnReplaceAll, 111) + self.HookMessage(self.OnActivate, win32con.WM_ACTIVATE) + self.editReplaceText = self.GetDlgItem(104) + self.editReplaceText.SetWindowText(lastSearch.replaceText) + if hasattr(self.editReplaceText, 'SetEditSel'): + self.editReplaceText.SetEditSel(0, -2) + else: + self.editReplaceText.SetSel(0, -2) + self.butReplace = self.GetDlgItem(110) + self.butReplaceAll = self.GetDlgItem(111) + self.CheckButtonStates() + return rc + + def CheckButtonStates(self): + # We can do a "Replace" or "Replace All" if the current selection + # is the same as the search text. + ft = self.editFindText.GetWindowText() + control = _GetControl() +# bCanReplace = len(ft)>0 and control.GetSelText() == ft + bCanReplace = control is not None and lastSearch.sel == control.GetSel() + self.butReplace.EnableWindow(bCanReplace) +# self.butReplaceAll.EnableWindow(bCanReplace) + + def OnActivate(self, msg): + wparam = msg[2] + fActive = win32api.LOWORD(wparam) + if fActive != win32con.WA_INACTIVE: + self.CheckButtonStates() + + def OnFindNext(self, id, code): + self.DoFindNext() + self.CheckButtonStates() + + def OnReplace(self, id, code): + lastSearch.replaceText = self.editReplaceText.GetWindowText() + _ReplaceIt(None) + + def OnReplaceAll(self, id, code): + control = _GetControl(None) + if control is not None: + control.SetSel(0) + num = 0 + if self.DoFindNext() == FOUND_NORMAL: + num = 1 + lastSearch.replaceText = self.editReplaceText.GetWindowText() + while _ReplaceIt(control) == FOUND_NORMAL: + num = num + 1 + + win32ui.SetStatusText("Replaced %d occurrences" % num) + if num > 0 and not self.butKeepDialogOpen.GetCheck(): + self.DestroyWindow() + +if __name__=='__main__': + ShowFindDialog() diff --git a/venv/Lib/site-packages/pythonwin/pywin/scintilla/formatter.py b/venv/Lib/site-packages/pythonwin/pywin/scintilla/formatter.py new file mode 100644 index 00000000..da7bc7c0 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/scintilla/formatter.py @@ -0,0 +1,587 @@ +# Does Python source formatting for Scintilla controls. +import win32ui +import win32api +import win32con +import winerror +import string +import array +from . import scintillacon + +WM_KICKIDLE = 0x036A + +# Used to indicate that style should use default color +from win32con import CLR_INVALID + +debugging = 0 +if debugging: + # Output must go to another process else the result of + # the printing itself will trigger again trigger a trace. + import sys, win32traceutil, win32trace + def trace(*args): + win32trace.write(' '.join(map(str, args)) + "\n") +else: + trace = lambda *args: None + +class Style: + """Represents a single format + """ + def __init__(self, name, format, background = CLR_INVALID): + self.name = name # Name the format representes eg, "String", "Class" + # Default background for each style is only used when there are no + # saved settings (generally on first startup) + self.background = self.default_background = background + if type(format)==type(''): + self.aliased = format + self.format = None + else: + self.format = format + self.aliased = None + self.stylenum = None # Not yet registered. + def IsBasedOnDefault(self): + return len(self.format)==5 + # If the currently extended font defintion matches the + # default format, restore the format to the "simple" format. + def NormalizeAgainstDefault(self, defaultFormat): + if self.IsBasedOnDefault(): + return 0 # No more to do, and not changed. + bIsDefault = self.format[7] == defaultFormat[7] and \ + self.format[2] == defaultFormat[2] + if bIsDefault: + self.ForceAgainstDefault() + return bIsDefault + def ForceAgainstDefault(self): + self.format = self.format[:5] + def GetCompleteFormat(self, defaultFormat): + # Get the complete style after applying any relevant defaults. + if len(self.format)==5: # It is a default one + fmt = self.format + defaultFormat[5:] + else: + fmt = self.format + flags = win32con.CFM_BOLD | win32con.CFM_CHARSET | win32con.CFM_COLOR | win32con.CFM_FACE | win32con.CFM_ITALIC | win32con.CFM_SIZE + return (flags,) + fmt[1:] + +# The Formatter interface +# used primarily when the actual formatting is done by Scintilla! +class FormatterBase: + def __init__(self, scintilla): + self.scintilla = scintilla + self.baseFormatFixed = (-402653169, 0, 200, 0, 0, 0, 49, 'Courier New') + self.baseFormatProp = (-402653169, 0, 200, 0, 0, 0, 49, 'Arial') + self.bUseFixed = 1 + self.styles = {} # Indexed by name + self.styles_by_id = {} # Indexed by allocated ID. + self.SetStyles() + + def HookFormatter(self, parent = None): + raise NotImplementedError() + + # Used by the IDLE extensions to quickly determine if a character is a string. + def GetStringStyle(self, pos): + try: + style = self.styles_by_id[self.scintilla.SCIGetStyleAt(pos)] + except KeyError: + # A style we dont know about - probably not even a .py file - can't be a string + return None + if style.name in self.string_style_names: + return style + return None + + def RegisterStyle(self, style, stylenum): + assert stylenum is not None, "We must have a style number" + assert style.stylenum is None, "Style has already been registered" + assert stylenum not in self.styles, "We are reusing a style number!" + style.stylenum = stylenum + self.styles[style.name] = style + self.styles_by_id[stylenum] = style + + def SetStyles(self): + raise NotImplementedError() + + def GetSampleText(self): + return "Sample Text for the Format Dialog" + + def GetDefaultFormat(self): + if self.bUseFixed: + return self.baseFormatFixed + return self.baseFormatProp + + # Update the control with the new style format. + def _ReformatStyle(self, style): + ## Selection (background only for now) + ## Passing False for WPARAM to SCI_SETSELBACK is documented as resetting to scintilla default, + ## but does not work - selection background is not visible at all. + ## Default value in SPECIAL_STYLES taken from scintilla source. + if style.name == STYLE_SELECTION: + clr = style.background + self.scintilla.SendScintilla(scintillacon.SCI_SETSELBACK, True, clr) + + ## Can't change font for selection, but could set color + ## However, the font color dropbox has no option for default, and thus would + ## always override syntax coloring + ## clr = style.format[4] + ## self.scintilla.SendScintilla(scintillacon.SCI_SETSELFORE, clr != CLR_INVALID, clr) + return + + assert style.stylenum is not None, "Unregistered style." + #print "Reformat style", style.name, style.stylenum + scintilla=self.scintilla + stylenum = style.stylenum + # Now we have the style number, indirect for the actual style. + if style.aliased is not None: + style = self.styles[style.aliased] + f=style.format + if style.IsBasedOnDefault(): + baseFormat = self.GetDefaultFormat() + else: baseFormat = f + scintilla.SCIStyleSetFore(stylenum, f[4]) + scintilla.SCIStyleSetFont(stylenum, baseFormat[7], baseFormat[5]) + if f[1] & 1: scintilla.SCIStyleSetBold(stylenum, 1) + else: scintilla.SCIStyleSetBold(stylenum, 0) + if f[1] & 2: scintilla.SCIStyleSetItalic(stylenum, 1) + else: scintilla.SCIStyleSetItalic(stylenum, 0) + scintilla.SCIStyleSetSize(stylenum, int(baseFormat[2]/20)) + scintilla.SCIStyleSetEOLFilled(stylenum, 1) # Only needed for unclosed strings. + + ## Default style background to whitespace background if set, + ## otherwise use system window color + bg = style.background + if bg == CLR_INVALID: + bg = self.styles[STYLE_DEFAULT].background + if bg == CLR_INVALID: + bg = win32api.GetSysColor(win32con.COLOR_WINDOW) + scintilla.SCIStyleSetBack(stylenum, bg) + + + + def GetStyleByNum(self, stylenum): + return self.styles_by_id[stylenum] + + def ApplyFormattingStyles(self, bReload=1): + if bReload: + self.LoadPreferences() + baseFormat = self.GetDefaultFormat() + defaultStyle = Style("default", baseFormat) + defaultStyle.stylenum = scintillacon.STYLE_DEFAULT + self._ReformatStyle(defaultStyle) + for style in list(self.styles.values()): + if style.aliased is None: + style.NormalizeAgainstDefault(baseFormat) + self._ReformatStyle(style) + self.scintilla.InvalidateRect() + + # Some functions for loading and saving preferences. By default + # an INI file (well, MFC maps this to the registry) is used. + def LoadPreferences(self): + self.baseFormatFixed = eval(self.LoadPreference("Base Format Fixed", str(self.baseFormatFixed))) + self.baseFormatProp = eval(self.LoadPreference("Base Format Proportional", str(self.baseFormatProp))) + self.bUseFixed = int(self.LoadPreference("Use Fixed", 1)) + + for style in list(self.styles.values()): + new = self.LoadPreference(style.name, str(style.format)) + try: + style.format = eval(new) + except: + print("Error loading style data for", style.name) + # Use "vanilla" background hardcoded in PYTHON_STYLES if no settings in registry + style.background = int(self.LoadPreference(style.name + " background", style.default_background)) + + def LoadPreference(self, name, default): + return win32ui.GetProfileVal("Format", name, default) + + def SavePreferences(self): + self.SavePreference("Base Format Fixed", str(self.baseFormatFixed)) + self.SavePreference("Base Format Proportional", str(self.baseFormatProp)) + self.SavePreference("Use Fixed", self.bUseFixed) + for style in list(self.styles.values()): + if style.aliased is None: + self.SavePreference(style.name, str(style.format)) + bg_name = style.name + " background" + self.SavePreference(bg_name, style.background) + + def SavePreference(self, name, value): + win32ui.WriteProfileVal("Format", name, value) + +# An abstract formatter +# For all formatters we actually implement here. +# (as opposed to those formatters built in to Scintilla) +class Formatter(FormatterBase): + def __init__(self, scintilla): + self.bCompleteWhileIdle = 0 + self.bHaveIdleHandler = 0 # Dont currently have an idle handle + self.nextstylenum = 0 + FormatterBase.__init__(self, scintilla) + + def HookFormatter(self, parent = None): + if parent is None: parent = self.scintilla.GetParent() # was GetParentFrame()!? + parent.HookNotify(self.OnStyleNeeded, scintillacon.SCN_STYLENEEDED) + + def OnStyleNeeded(self, std, extra): + notify = self.scintilla.SCIUnpackNotifyMessage(extra) + endStyledChar = self.scintilla.SendScintilla(scintillacon.SCI_GETENDSTYLED) + lineEndStyled = self.scintilla.LineFromChar(endStyledChar) + endStyled = self.scintilla.LineIndex(lineEndStyled) + #print "enPosPaint %d endStyledChar %d lineEndStyled %d endStyled %d" % (endPosPaint, endStyledChar, lineEndStyled, endStyled) + self.Colorize(endStyled, notify.position) + + def ColorSeg(self, start, end, styleName): + end = end+1 +# assert end-start>=0, "Can't have negative styling" + stylenum = self.styles[styleName].stylenum + while start 0: + stylenum = scintilla.SCIGetStyleAt(start - 1) + styleStart = self.GetStyleByNum(stylenum).name + else: + styleStart = None +# trace("Coloring", start, end, end-start, len(stringVal), styleStart, self.scintilla.SCIGetCharAt(start)) + scintilla.SCIStartStyling(start, 31) + self.style_buffer = array.array("b", (0,)*len(stringVal)) + self.ColorizeString(stringVal, styleStart) + scintilla.SCISetStylingEx(self.style_buffer) + self.style_buffer = None +# trace("After styling, end styled is", self.scintilla.SCIGetEndStyled()) + if self.bCompleteWhileIdle and not self.bHaveIdleHandler and end!=-1 and end < scintilla.GetTextLength(): + self.bHaveIdleHandler = 1 + win32ui.GetApp().AddIdleHandler(self.DoMoreColoring) + # Kicking idle makes the app seem slower when initially repainting! +# win32ui.GetMainFrame().PostMessage(WM_KICKIDLE, 0, 0) + + def DoMoreColoring(self, handler, count): + try: + scintilla = self.scintilla + endStyled = scintilla.SCIGetEndStyled() + lineStartStyled = scintilla.LineFromChar(endStyled) + start = scintilla.LineIndex(lineStartStyled) + end = scintilla.LineIndex(lineStartStyled+1) + textlen = scintilla.GetTextLength() + if end < 0: end = textlen + + finished = end >= textlen + self.Colorize(start, end) + except (win32ui.error, AttributeError): + # Window may have closed before we finished - no big deal! + finished = 1 + + if finished: + self.bHaveIdleHandler = 0 + win32ui.GetApp().DeleteIdleHandler(handler) + return not finished + +# A Formatter that knows how to format Python source +from keyword import iskeyword, kwlist + +wordstarts = '_0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz' +wordchars = '._0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz' +operators = '%^&*()-+=|{}[]:;<>,/?!.~' + +STYLE_DEFAULT = "Whitespace" +STYLE_COMMENT = "Comment" +STYLE_COMMENT_BLOCK = "Comment Blocks" +STYLE_NUMBER = "Number" +STYLE_STRING = "String" +STYLE_SQSTRING = "SQ String" +STYLE_TQSSTRING = "TQS String" +STYLE_TQDSTRING = "TQD String" +STYLE_KEYWORD = "Keyword" +STYLE_CLASS = "Class" +STYLE_METHOD = "Method" +STYLE_OPERATOR = "Operator" +STYLE_IDENTIFIER = "Identifier" +STYLE_BRACE = "Brace/Paren - matching" +STYLE_BRACEBAD = "Brace/Paren - unmatched" +STYLE_STRINGEOL = "String with no terminator" +STYLE_LINENUMBER = "Line numbers" +STYLE_INDENTGUIDE = "Indent guide" +STYLE_SELECTION = "Selection" + +STRING_STYLES = [STYLE_STRING, STYLE_SQSTRING, STYLE_TQSSTRING, STYLE_TQDSTRING, STYLE_STRINGEOL] + +# These styles can have any ID - they are not special to scintilla itself. +# However, if we use the built-in lexer, then we must use its style numbers +# so in that case, they _are_ special. +# (name, format, background, scintilla id) +PYTHON_STYLES = [ + (STYLE_DEFAULT, (0, 0, 200, 0, 0x808080), CLR_INVALID, scintillacon.SCE_P_DEFAULT ), + (STYLE_COMMENT, (0, 2, 200, 0, 0x008000), CLR_INVALID, scintillacon.SCE_P_COMMENTLINE ), + (STYLE_COMMENT_BLOCK,(0, 2, 200, 0, 0x808080), CLR_INVALID, scintillacon.SCE_P_COMMENTBLOCK ), + (STYLE_NUMBER, (0, 0, 200, 0, 0x808000), CLR_INVALID, scintillacon.SCE_P_NUMBER ), + (STYLE_STRING, (0, 0, 200, 0, 0x008080), CLR_INVALID, scintillacon.SCE_P_STRING ), + (STYLE_SQSTRING, STYLE_STRING, CLR_INVALID, scintillacon.SCE_P_CHARACTER ), + (STYLE_TQSSTRING, STYLE_STRING, CLR_INVALID, scintillacon.SCE_P_TRIPLE ), + (STYLE_TQDSTRING, STYLE_STRING, CLR_INVALID, scintillacon.SCE_P_TRIPLEDOUBLE), + (STYLE_STRINGEOL, (0, 0, 200, 0, 0x000000), 0x008080, scintillacon.SCE_P_STRINGEOL), + (STYLE_KEYWORD, (0, 1, 200, 0, 0x800000), CLR_INVALID, scintillacon.SCE_P_WORD), + (STYLE_CLASS, (0, 1, 200, 0, 0xFF0000), CLR_INVALID, scintillacon.SCE_P_CLASSNAME ), + (STYLE_METHOD, (0, 1, 200, 0, 0x808000), CLR_INVALID, scintillacon.SCE_P_DEFNAME), + (STYLE_OPERATOR, (0, 0, 200, 0, 0x000000), CLR_INVALID, scintillacon.SCE_P_OPERATOR), + (STYLE_IDENTIFIER, (0, 0, 200, 0, 0x000000), CLR_INVALID, scintillacon.SCE_P_IDENTIFIER ), +] + +# These styles _always_ have this specific style number, regardless of +# internal or external formatter. +SPECIAL_STYLES = [ + (STYLE_BRACE, (0, 0, 200, 0, 0x000000), 0xffff80, scintillacon.STYLE_BRACELIGHT), + (STYLE_BRACEBAD, (0, 0, 200, 0, 0x000000), 0x8ea5f2, scintillacon.STYLE_BRACEBAD), + (STYLE_LINENUMBER, (0, 0, 200, 0, 0x000000), win32api.GetSysColor(win32con.COLOR_3DFACE), scintillacon.STYLE_LINENUMBER), + (STYLE_INDENTGUIDE, (0, 0, 200, 0, 0x000000), CLR_INVALID, scintillacon.STYLE_INDENTGUIDE), + ## Not actually a style; requires special handling to send appropriate messages to scintilla + (STYLE_SELECTION, (0, 0, 200, 0, CLR_INVALID), win32api.RGB(0xc0, 0xc0, 0xc0), 999999), +] + +PythonSampleCode = """\ +# Some Python +class Sample(Super): + def Fn(self): +\tself.v = 1024 +dest = 'dest.html' +x = func(a + 1)|) +s = "I forget... +## A large +## comment block""" + +class PythonSourceFormatter(Formatter): + string_style_names = STRING_STYLES + def GetSampleText(self): + return PythonSampleCode + + def LoadStyles(self): + pass + + def SetStyles(self): + for name, format, bg, ignore in PYTHON_STYLES: + self.RegisterStyle( Style(name, format, bg) ) + for name, format, bg, sc_id in SPECIAL_STYLES: + self.RegisterStyle( Style(name, format, bg), sc_id ) + + def ClassifyWord(self, cdoc, start, end, prevWord): + word = cdoc[start:end+1].decode('latin-1') + attr = STYLE_IDENTIFIER + if prevWord == "class": + attr = STYLE_CLASS + elif prevWord == "def": + attr = STYLE_METHOD + elif word[0] in string.digits: + attr = STYLE_NUMBER + elif iskeyword(word): + attr = STYLE_KEYWORD + self.ColorSeg(start, end, attr) + return word + + def ColorizeString(self, str, styleStart): + if styleStart is None: styleStart = STYLE_DEFAULT + return self.ColorizePythonCode(str, 0, styleStart) + + def ColorizePythonCode(self, cdoc, charStart, styleStart): + # Straight translation of C++, should do better + lengthDoc = len(cdoc) + if lengthDoc <= charStart: return + prevWord = "" + state = styleStart + chPrev = chPrev2 = chPrev3 = ' ' + chNext2 = chNext = cdoc[charStart:charStart+1].decode('latin-1') + startSeg = i = charStart + while i < lengthDoc: + ch = chNext + chNext = ' ' + if i+1 < lengthDoc: chNext = cdoc[i+1:i+2].decode('latin-1') + chNext2 = ' ' + if i+2 < lengthDoc: chNext2 = cdoc[i+2:i+3].decode('latin-1') + if state == STYLE_DEFAULT: + if ch in wordstarts: + self.ColorSeg(startSeg, i - 1, STYLE_DEFAULT) + state = STYLE_KEYWORD + startSeg = i + elif ch == '#': + self.ColorSeg(startSeg, i - 1, STYLE_DEFAULT) + if chNext == '#': + state = STYLE_COMMENT_BLOCK + else: + state = STYLE_COMMENT + startSeg = i + elif ch == '\"': + self.ColorSeg(startSeg, i - 1, STYLE_DEFAULT) + startSeg = i + state = STYLE_COMMENT + if chNext == '\"' and chNext2 == '\"': + i = i + 2 + state = STYLE_TQDSTRING + ch = ' ' + chPrev = ' ' + chNext = ' ' + if i+1 < lengthDoc: chNext = cdoc[i+1] + else: + state = STYLE_STRING + elif ch == '\'': + self.ColorSeg(startSeg, i - 1, STYLE_DEFAULT) + startSeg = i + state = STYLE_COMMENT + if chNext == '\'' and chNext2 == '\'': + i = i + 2 + state = STYLE_TQSSTRING + ch = ' ' + chPrev = ' ' + chNext = ' ' + if i+1 < lengthDoc: chNext = cdoc[i+1] + else: + state = STYLE_SQSTRING + elif ch in operators: + self.ColorSeg(startSeg, i - 1, STYLE_DEFAULT) + self.ColorSeg(i, i, STYLE_OPERATOR) + startSeg = i+1 + elif state == STYLE_KEYWORD: + if ch not in wordchars: + prevWord = self.ClassifyWord(cdoc, startSeg, i-1, prevWord) + state = STYLE_DEFAULT + startSeg = i + if ch == '#': + if chNext == '#': + state = STYLE_COMMENT_BLOCK + else: + state = STYLE_COMMENT + elif ch == '\"': + if chNext == '\"' and chNext2 == '\"': + i = i + 2 + state = STYLE_TQDSTRING + ch = ' ' + chPrev = ' ' + chNext = ' ' + if i+1 < lengthDoc: chNext = cdoc[i+1] + else: + state = STYLE_STRING + elif ch == '\'': + if chNext == '\'' and chNext2 == '\'': + i = i + 2 + state = STYLE_TQSSTRING + ch = ' ' + chPrev = ' ' + chNext = ' ' + if i+1 < lengthDoc: chNext = cdoc[i+1] + else: + state = STYLE_SQSTRING + elif ch in operators: + self.ColorSeg(startSeg, i, STYLE_OPERATOR) + startSeg = i+1 + elif state == STYLE_COMMENT or state == STYLE_COMMENT_BLOCK: + if ch == '\r' or ch == '\n': + self.ColorSeg(startSeg, i-1, state) + state = STYLE_DEFAULT + startSeg = i + elif state == STYLE_STRING: + if ch == '\\': + if chNext == '\"' or chNext == '\'' or chNext == '\\': + i = i + 1 + ch = chNext + chNext = ' ' + if i+1 < lengthDoc: chNext = cdoc[i+1] + elif ch == '\"': + self.ColorSeg(startSeg, i, STYLE_STRING) + state = STYLE_DEFAULT + startSeg = i+1 + elif state == STYLE_SQSTRING: + if ch == '\\': + if chNext == '\"' or chNext == '\'' or chNext == '\\': + i = i+1 + ch = chNext + chNext = ' ' + if i+1 < lengthDoc: chNext = cdoc[i+1] + elif ch == '\'': + self.ColorSeg(startSeg, i, STYLE_SQSTRING) + state = STYLE_DEFAULT + startSeg = i+1 + elif state == STYLE_TQSSTRING: + if ch == '\'' and chPrev == '\'' and chPrev2 == '\'' and chPrev3 != '\\': + self.ColorSeg(startSeg, i, STYLE_TQSSTRING) + state = STYLE_DEFAULT + startSeg = i+1 + elif state == STYLE_TQDSTRING and ch == '\"' and chPrev == '\"' and chPrev2 == '\"' and chPrev3 != '\\': + self.ColorSeg(startSeg, i, STYLE_TQDSTRING) + state = STYLE_DEFAULT + startSeg = i+1 + chPrev3 = chPrev2 + chPrev2 = chPrev + chPrev = ch + i = i + 1 + if startSeg < lengthDoc: + if state == STYLE_KEYWORD: + self.ClassifyWord(cdoc, startSeg, lengthDoc-1, prevWord) + else: + self.ColorSeg(startSeg, lengthDoc-1, state) + + +# These taken from the SciTE properties file. +source_formatter_extensions = [ + ( ".py .pys .pyw".split(), scintillacon.SCLEX_PYTHON ), + ( ".html .htm .asp .shtml".split(), scintillacon.SCLEX_HTML ), + ( "c .cc .cpp .cxx .h .hh .hpp .hxx .idl .odl .php3 .phtml .inc .js".split(), scintillacon.SCLEX_CPP ), + ( ".vbs .frm .ctl .cls".split(), scintillacon.SCLEX_VB ), + ( ".pl .pm .cgi .pod".split(), scintillacon.SCLEX_PERL ), + ( ".sql .spec .body .sps .spb .sf .sp".split(), scintillacon.SCLEX_SQL ), + ( ".tex .sty".split(), scintillacon.SCLEX_LATEX ), + ( ".xml .xul".split(), scintillacon.SCLEX_XML ), + ( ".err".split(), scintillacon.SCLEX_ERRORLIST ), + ( ".mak".split(), scintillacon.SCLEX_MAKEFILE ), + ( ".bat .cmd".split(), scintillacon.SCLEX_BATCH ), +] + +class BuiltinSourceFormatter(FormatterBase): + # A class that represents a formatter built-in to Scintilla + def __init__(self, scintilla, ext): + self.ext = ext + FormatterBase.__init__(self, scintilla) + + def Colorize(self, start=0, end=-1): + self.scintilla.SendScintilla(scintillacon.SCI_COLOURISE, start, end) + def RegisterStyle(self, style, stylenum = None): + assert style.stylenum is None, "Style has already been registered" + if stylenum is None: + stylenum = self.nextstylenum + self.nextstylenum = self.nextstylenum + 1 + assert self.styles.get(stylenum) is None, "We are reusing a style number!" + style.stylenum = stylenum + self.styles[style.name] = style + self.styles_by_id[stylenum] = style + + def HookFormatter(self, parent = None): + sc = self.scintilla + for exts, formatter in source_formatter_extensions: + if self.ext in exts: + formatter_use = formatter + break + else: + formatter_use = scintillacon.SCLEX_PYTHON + sc.SendScintilla(scintillacon.SCI_SETLEXER, formatter_use) + keywords = ' '.join(kwlist) + sc.SCISetKeywords(keywords) + +class BuiltinPythonSourceFormatter(BuiltinSourceFormatter): + sci_lexer_name = scintillacon.SCLEX_PYTHON + string_style_names = STRING_STYLES + def __init__(self, sc, ext = ".py"): + BuiltinSourceFormatter.__init__(self, sc, ext) + def SetStyles(self): + for name, format, bg, sc_id in PYTHON_STYLES: + self.RegisterStyle( Style(name, format, bg), sc_id ) + for name, format, bg, sc_id in SPECIAL_STYLES: + self.RegisterStyle( Style(name, format, bg), sc_id ) + def GetSampleText(self): + return PythonSampleCode diff --git a/venv/Lib/site-packages/pythonwin/pywin/scintilla/keycodes.py b/venv/Lib/site-packages/pythonwin/pywin/scintilla/keycodes.py new file mode 100644 index 00000000..56aebe1f --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/scintilla/keycodes.py @@ -0,0 +1,179 @@ +import string +import win32con +import win32api +import win32ui + +MAPVK_VK_TO_CHAR = 2 + +key_name_to_vk = {} +key_code_to_name = {} + +_better_names = { + "escape": "esc", + "return": "enter", + "back": "pgup", + "next": "pgdn", +} + +def _fillvkmap(): + # Pull the VK_names from win32con + names = [entry for entry in win32con.__dict__ if entry.startswith("VK_")] + for name in names: + code = getattr(win32con, name) + n = name[3:].lower() + key_name_to_vk[n] = code + if n in _better_names: + n = _better_names[n] + key_name_to_vk[n] = code + key_code_to_name[code] = n + + +_fillvkmap() + +def get_vk(chardesc): + if len(chardesc)==1: + # it is a character. + info = win32api.VkKeyScan(chardesc) + if info==-1: + # Note: returning None, None causes an error when keyboard layout is non-English, see the report below + # https://stackoverflow.com/questions/45138084/pythonwin-occasionally-gives-an-error-on-opening + return 0, 0 + vk = win32api.LOBYTE(info) + state = win32api.HIBYTE(info) + modifiers = 0 + if state & 0x1: + modifiers |= win32con.SHIFT_PRESSED + if state & 0x2: + modifiers |= win32con.LEFT_CTRL_PRESSED | win32con.RIGHT_CTRL_PRESSED + if state & 0x4: + modifiers |= win32con.LEFT_ALT_PRESSED | win32con.RIGHT_ALT_PRESSED + return vk, modifiers + # must be a 'key name' + return key_name_to_vk.get(chardesc.lower()), 0 + +modifiers = { + "alt" : win32con.LEFT_ALT_PRESSED | win32con.RIGHT_ALT_PRESSED, + "lalt" : win32con.LEFT_ALT_PRESSED, + "ralt" : win32con.RIGHT_ALT_PRESSED, + "ctrl" : win32con.LEFT_CTRL_PRESSED | win32con.RIGHT_CTRL_PRESSED, + "ctl" : win32con.LEFT_CTRL_PRESSED | win32con.RIGHT_CTRL_PRESSED, + "control" : win32con.LEFT_CTRL_PRESSED | win32con.RIGHT_CTRL_PRESSED, + "lctrl" : win32con.LEFT_CTRL_PRESSED, + "lctl" : win32con.LEFT_CTRL_PRESSED, + "rctrl" : win32con.RIGHT_CTRL_PRESSED, + "rctl" : win32con.RIGHT_CTRL_PRESSED, + "shift" : win32con.SHIFT_PRESSED, + "key" : 0, # ignore key tag. +} + +def parse_key_name(name): + name = name + "-" # Add a sentinal + start = pos = 0 + max = len(name) + toks = [] + while pos") + else: + try: + parts.append(key_code_to_name[vk]) + except KeyError: + # Not in our virtual key map - ask Windows what character this + # key corresponds to. + scancode = win32api.MapVirtualKey(vk, MAPVK_VK_TO_CHAR) + parts.append(chr(scancode)) + sep = "+" + if sep in parts: sep = "-" + return sep.join([p.capitalize() for p in parts]) + +def _psc(char): + sc, mods = get_vk(char) + print("Char %s -> %d -> %s" % (repr(char), sc, key_code_to_name.get(sc))) + +def test1(): + for ch in """aA0/?[{}];:'"`~_-+=\\|,<.>/?""": + _psc(ch) + for code in ["Home", "End", "Left", "Right", "Up", "Down", "Menu", "Next"]: + _psc(code) + +def _pkn(n): + vk, flags = parse_key_name(n) + print("%s -> %s,%s -> %s" % (n, vk, flags, make_key_name(vk, flags))) + +def test2(): + _pkn("ctrl+alt-shift+x") + _pkn("ctrl-home") + _pkn("Shift-+") + _pkn("Shift--") + _pkn("Shift+-") + _pkn("Shift++") + _pkn("LShift-+") + _pkn("ctl+home") + _pkn("ctl+enter") + _pkn("alt+return") + _pkn("Alt+/") + _pkn("Alt+BadKeyName") + _pkn("A") # an ascii char - should be seen as 'a' + _pkn("a") + _pkn("Shift-A") + _pkn("Shift-a") + _pkn("a") + _pkn("(") + _pkn("Ctrl+(") + _pkn("Ctrl+Shift-8") + _pkn("Ctrl+*") + _pkn("{") + _pkn("!") + _pkn(".") + +if __name__=='__main__': + test2() diff --git a/venv/Lib/site-packages/pythonwin/pywin/scintilla/scintillacon.py b/venv/Lib/site-packages/pythonwin/pywin/scintilla/scintillacon.py new file mode 100644 index 00000000..da5e7123 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/scintilla/scintillacon.py @@ -0,0 +1,1992 @@ +# Generated by h2py from Include\scintilla.h + +# Included from BaseTsd.h +def HandleToUlong(h): return HandleToULong(h) + +def UlongToHandle(ul): return ULongToHandle(ul) + +def UlongToPtr(ul): return ULongToPtr(ul) + +def UintToPtr(ui): return UIntToPtr(ui) + +INVALID_POSITION = -1 +SCI_START = 2000 +SCI_OPTIONAL_START = 3000 +SCI_LEXER_START = 4000 +SCI_ADDTEXT = 2001 +SCI_ADDSTYLEDTEXT = 2002 +SCI_INSERTTEXT = 2003 +SCI_CLEARALL = 2004 +SCI_CLEARDOCUMENTSTYLE = 2005 +SCI_GETLENGTH = 2006 +SCI_GETCHARAT = 2007 +SCI_GETCURRENTPOS = 2008 +SCI_GETANCHOR = 2009 +SCI_GETSTYLEAT = 2010 +SCI_REDO = 2011 +SCI_SETUNDOCOLLECTION = 2012 +SCI_SELECTALL = 2013 +SCI_SETSAVEPOINT = 2014 +SCI_GETSTYLEDTEXT = 2015 +SCI_CANREDO = 2016 +SCI_MARKERLINEFROMHANDLE = 2017 +SCI_MARKERDELETEHANDLE = 2018 +SCI_GETUNDOCOLLECTION = 2019 +SCWS_INVISIBLE = 0 +SCWS_VISIBLEALWAYS = 1 +SCWS_VISIBLEAFTERINDENT = 2 +SCI_GETVIEWWS = 2020 +SCI_SETVIEWWS = 2021 +SCI_POSITIONFROMPOINT = 2022 +SCI_POSITIONFROMPOINTCLOSE = 2023 +SCI_GOTOLINE = 2024 +SCI_GOTOPOS = 2025 +SCI_SETANCHOR = 2026 +SCI_GETCURLINE = 2027 +SCI_GETENDSTYLED = 2028 +SC_EOL_CRLF = 0 +SC_EOL_CR = 1 +SC_EOL_LF = 2 +SCI_CONVERTEOLS = 2029 +SCI_GETEOLMODE = 2030 +SCI_SETEOLMODE = 2031 +SCI_STARTSTYLING = 2032 +SCI_SETSTYLING = 2033 +SCI_GETBUFFEREDDRAW = 2034 +SCI_SETBUFFEREDDRAW = 2035 +SCI_SETTABWIDTH = 2036 +SCI_GETTABWIDTH = 2121 +SC_CP_UTF8 = 65001 +SC_CP_DBCS = 1 +SCI_SETCODEPAGE = 2037 +SCI_SETUSEPALETTE = 2039 +MARKER_MAX = 31 +SC_MARK_CIRCLE = 0 +SC_MARK_ROUNDRECT = 1 +SC_MARK_ARROW = 2 +SC_MARK_SMALLRECT = 3 +SC_MARK_SHORTARROW = 4 +SC_MARK_EMPTY = 5 +SC_MARK_ARROWDOWN = 6 +SC_MARK_MINUS = 7 +SC_MARK_PLUS = 8 +SC_MARK_VLINE = 9 +SC_MARK_LCORNER = 10 +SC_MARK_TCORNER = 11 +SC_MARK_BOXPLUS = 12 +SC_MARK_BOXPLUSCONNECTED = 13 +SC_MARK_BOXMINUS = 14 +SC_MARK_BOXMINUSCONNECTED = 15 +SC_MARK_LCORNERCURVE = 16 +SC_MARK_TCORNERCURVE = 17 +SC_MARK_CIRCLEPLUS = 18 +SC_MARK_CIRCLEPLUSCONNECTED = 19 +SC_MARK_CIRCLEMINUS = 20 +SC_MARK_CIRCLEMINUSCONNECTED = 21 +SC_MARK_BACKGROUND = 22 +SC_MARK_DOTDOTDOT = 23 +SC_MARK_ARROWS = 24 +SC_MARK_PIXMAP = 25 +SC_MARK_FULLRECT = 26 +SC_MARK_LEFTRECT = 27 +SC_MARK_CHARACTER = 10000 +SC_MARKNUM_FOLDEREND = 25 +SC_MARKNUM_FOLDEROPENMID = 26 +SC_MARKNUM_FOLDERMIDTAIL = 27 +SC_MARKNUM_FOLDERTAIL = 28 +SC_MARKNUM_FOLDERSUB = 29 +SC_MARKNUM_FOLDER = 30 +SC_MARKNUM_FOLDEROPEN = 31 +SC_MASK_FOLDERS = (-33554432) +SCI_MARKERDEFINE = 2040 +SCI_MARKERSETFORE = 2041 +SCI_MARKERSETBACK = 2042 +SCI_MARKERADD = 2043 +SCI_MARKERDELETE = 2044 +SCI_MARKERDELETEALL = 2045 +SCI_MARKERGET = 2046 +SCI_MARKERNEXT = 2047 +SCI_MARKERPREVIOUS = 2048 +SCI_MARKERDEFINEPIXMAP = 2049 +SCI_MARKERADDSET = 2466 +SCI_MARKERSETALPHA = 2476 +SC_MARGIN_SYMBOL = 0 +SC_MARGIN_NUMBER = 1 +SC_MARGIN_BACK = 2 +SC_MARGIN_FORE = 3 +SCI_SETMARGINTYPEN = 2240 +SCI_GETMARGINTYPEN = 2241 +SCI_SETMARGINWIDTHN = 2242 +SCI_GETMARGINWIDTHN = 2243 +SCI_SETMARGINMASKN = 2244 +SCI_GETMARGINMASKN = 2245 +SCI_SETMARGINSENSITIVEN = 2246 +SCI_GETMARGINSENSITIVEN = 2247 +STYLE_DEFAULT = 32 +STYLE_LINENUMBER = 33 +STYLE_BRACELIGHT = 34 +STYLE_BRACEBAD = 35 +STYLE_CONTROLCHAR = 36 +STYLE_INDENTGUIDE = 37 +STYLE_CALLTIP = 38 +STYLE_LASTPREDEFINED = 39 +STYLE_MAX = 255 +SC_CHARSET_ANSI = 0 +SC_CHARSET_DEFAULT = 1 +SC_CHARSET_BALTIC = 186 +SC_CHARSET_CHINESEBIG5 = 136 +SC_CHARSET_EASTEUROPE = 238 +SC_CHARSET_GB2312 = 134 +SC_CHARSET_GREEK = 161 +SC_CHARSET_HANGUL = 129 +SC_CHARSET_MAC = 77 +SC_CHARSET_OEM = 255 +SC_CHARSET_RUSSIAN = 204 +SC_CHARSET_CYRILLIC = 1251 +SC_CHARSET_SHIFTJIS = 128 +SC_CHARSET_SYMBOL = 2 +SC_CHARSET_TURKISH = 162 +SC_CHARSET_JOHAB = 130 +SC_CHARSET_HEBREW = 177 +SC_CHARSET_ARABIC = 178 +SC_CHARSET_VIETNAMESE = 163 +SC_CHARSET_THAI = 222 +SC_CHARSET_8859_15 = 1000 +SCI_STYLECLEARALL = 2050 +SCI_STYLESETFORE = 2051 +SCI_STYLESETBACK = 2052 +SCI_STYLESETBOLD = 2053 +SCI_STYLESETITALIC = 2054 +SCI_STYLESETSIZE = 2055 +SCI_STYLESETFONT = 2056 +SCI_STYLESETEOLFILLED = 2057 +SCI_STYLERESETDEFAULT = 2058 +SCI_STYLESETUNDERLINE = 2059 +SC_CASE_MIXED = 0 +SC_CASE_UPPER = 1 +SC_CASE_LOWER = 2 +SCI_STYLEGETFORE = 2481 +SCI_STYLEGETBACK = 2482 +SCI_STYLEGETBOLD = 2483 +SCI_STYLEGETITALIC = 2484 +SCI_STYLEGETSIZE = 2485 +SCI_STYLEGETFONT = 2486 +SCI_STYLEGETEOLFILLED = 2487 +SCI_STYLEGETUNDERLINE = 2488 +SCI_STYLEGETCASE = 2489 +SCI_STYLEGETCHARACTERSET = 2490 +SCI_STYLEGETVISIBLE = 2491 +SCI_STYLEGETCHANGEABLE = 2492 +SCI_STYLEGETHOTSPOT = 2493 +SCI_STYLESETCASE = 2060 +SCI_STYLESETCHARACTERSET = 2066 +SCI_STYLESETHOTSPOT = 2409 +SCI_SETSELFORE = 2067 +SCI_SETSELBACK = 2068 +SCI_GETSELALPHA = 2477 +SCI_SETSELALPHA = 2478 +SCI_GETSELEOLFILLED = 2479 +SCI_SETSELEOLFILLED = 2480 +SCI_SETCARETFORE = 2069 +SCI_ASSIGNCMDKEY = 2070 +SCI_CLEARCMDKEY = 2071 +SCI_CLEARALLCMDKEYS = 2072 +SCI_SETSTYLINGEX = 2073 +SCI_STYLESETVISIBLE = 2074 +SCI_GETCARETPERIOD = 2075 +SCI_SETCARETPERIOD = 2076 +SCI_SETWORDCHARS = 2077 +SCI_BEGINUNDOACTION = 2078 +SCI_ENDUNDOACTION = 2079 +INDIC_PLAIN = 0 +INDIC_SQUIGGLE = 1 +INDIC_TT = 2 +INDIC_DIAGONAL = 3 +INDIC_STRIKE = 4 +INDIC_HIDDEN = 5 +INDIC_BOX = 6 +INDIC_ROUNDBOX = 7 +INDIC_MAX = 31 +INDIC_CONTAINER = 8 +INDIC0_MASK = 0x20 +INDIC1_MASK = 0x40 +INDIC2_MASK = 0x80 +INDICS_MASK = 0xE0 +SCI_INDICSETSTYLE = 2080 +SCI_INDICGETSTYLE = 2081 +SCI_INDICSETFORE = 2082 +SCI_INDICGETFORE = 2083 +SCI_INDICSETUNDER = 2510 +SCI_INDICGETUNDER = 2511 +SCI_SETWHITESPACEFORE = 2084 +SCI_SETWHITESPACEBACK = 2085 +SCI_SETSTYLEBITS = 2090 +SCI_GETSTYLEBITS = 2091 +SCI_SETLINESTATE = 2092 +SCI_GETLINESTATE = 2093 +SCI_GETMAXLINESTATE = 2094 +SCI_GETCARETLINEVISIBLE = 2095 +SCI_SETCARETLINEVISIBLE = 2096 +SCI_GETCARETLINEBACK = 2097 +SCI_SETCARETLINEBACK = 2098 +SCI_STYLESETCHANGEABLE = 2099 +SCI_AUTOCSHOW = 2100 +SCI_AUTOCCANCEL = 2101 +SCI_AUTOCACTIVE = 2102 +SCI_AUTOCPOSSTART = 2103 +SCI_AUTOCCOMPLETE = 2104 +SCI_AUTOCSTOPS = 2105 +SCI_AUTOCSETSEPARATOR = 2106 +SCI_AUTOCGETSEPARATOR = 2107 +SCI_AUTOCSELECT = 2108 +SCI_AUTOCSETCANCELATSTART = 2110 +SCI_AUTOCGETCANCELATSTART = 2111 +SCI_AUTOCSETFILLUPS = 2112 +SCI_AUTOCSETCHOOSESINGLE = 2113 +SCI_AUTOCGETCHOOSESINGLE = 2114 +SCI_AUTOCSETIGNORECASE = 2115 +SCI_AUTOCGETIGNORECASE = 2116 +SCI_USERLISTSHOW = 2117 +SCI_AUTOCSETAUTOHIDE = 2118 +SCI_AUTOCGETAUTOHIDE = 2119 +SCI_AUTOCSETDROPRESTOFWORD = 2270 +SCI_AUTOCGETDROPRESTOFWORD = 2271 +SCI_REGISTERIMAGE = 2405 +SCI_CLEARREGISTEREDIMAGES = 2408 +SCI_AUTOCGETTYPESEPARATOR = 2285 +SCI_AUTOCSETTYPESEPARATOR = 2286 +SCI_AUTOCSETMAXWIDTH = 2208 +SCI_AUTOCGETMAXWIDTH = 2209 +SCI_AUTOCSETMAXHEIGHT = 2210 +SCI_AUTOCGETMAXHEIGHT = 2211 +SCI_SETINDENT = 2122 +SCI_GETINDENT = 2123 +SCI_SETUSETABS = 2124 +SCI_GETUSETABS = 2125 +SCI_SETLINEINDENTATION = 2126 +SCI_GETLINEINDENTATION = 2127 +SCI_GETLINEINDENTPOSITION = 2128 +SCI_GETCOLUMN = 2129 +SCI_SETHSCROLLBAR = 2130 +SCI_GETHSCROLLBAR = 2131 +SC_IV_NONE = 0 +SC_IV_REAL = 1 +SC_IV_LOOKFORWARD = 2 +SC_IV_LOOKBOTH = 3 +SCI_SETINDENTATIONGUIDES = 2132 +SCI_GETINDENTATIONGUIDES = 2133 +SCI_SETHIGHLIGHTGUIDE = 2134 +SCI_GETHIGHLIGHTGUIDE = 2135 +SCI_GETLINEENDPOSITION = 2136 +SCI_GETCODEPAGE = 2137 +SCI_GETCARETFORE = 2138 +SCI_GETUSEPALETTE = 2139 +SCI_GETREADONLY = 2140 +SCI_SETCURRENTPOS = 2141 +SCI_SETSELECTIONSTART = 2142 +SCI_GETSELECTIONSTART = 2143 +SCI_SETSELECTIONEND = 2144 +SCI_GETSELECTIONEND = 2145 +SCI_SETPRINTMAGNIFICATION = 2146 +SCI_GETPRINTMAGNIFICATION = 2147 +SC_PRINT_NORMAL = 0 +SC_PRINT_INVERTLIGHT = 1 +SC_PRINT_BLACKONWHITE = 2 +SC_PRINT_COLOURONWHITE = 3 +SC_PRINT_COLOURONWHITEDEFAULTBG = 4 +SCI_SETPRINTCOLOURMODE = 2148 +SCI_GETPRINTCOLOURMODE = 2149 +SCFIND_WHOLEWORD = 2 +SCFIND_MATCHCASE = 4 +SCFIND_WORDSTART = 0x00100000 +SCFIND_REGEXP = 0x00200000 +SCFIND_POSIX = 0x00400000 +SCI_FINDTEXT = 2150 +SCI_FORMATRANGE = 2151 +SCI_GETFIRSTVISIBLELINE = 2152 +SCI_GETLINE = 2153 +SCI_GETLINECOUNT = 2154 +SCI_SETMARGINLEFT = 2155 +SCI_GETMARGINLEFT = 2156 +SCI_SETMARGINRIGHT = 2157 +SCI_GETMARGINRIGHT = 2158 +SCI_GETMODIFY = 2159 +SCI_SETSEL = 2160 +SCI_GETSELTEXT = 2161 +SCI_GETTEXTRANGE = 2162 +SCI_HIDESELECTION = 2163 +SCI_POINTXFROMPOSITION = 2164 +SCI_POINTYFROMPOSITION = 2165 +SCI_LINEFROMPOSITION = 2166 +SCI_POSITIONFROMLINE = 2167 +SCI_LINESCROLL = 2168 +SCI_SCROLLCARET = 2169 +SCI_REPLACESEL = 2170 +SCI_SETREADONLY = 2171 +SCI_NULL = 2172 +SCI_CANPASTE = 2173 +SCI_CANUNDO = 2174 +SCI_EMPTYUNDOBUFFER = 2175 +SCI_UNDO = 2176 +SCI_CUT = 2177 +SCI_COPY = 2178 +SCI_PASTE = 2179 +SCI_CLEAR = 2180 +SCI_SETTEXT = 2181 +SCI_GETTEXT = 2182 +SCI_GETTEXTLENGTH = 2183 +SCI_GETDIRECTFUNCTION = 2184 +SCI_GETDIRECTPOINTER = 2185 +SCI_SETOVERTYPE = 2186 +SCI_GETOVERTYPE = 2187 +SCI_SETCARETWIDTH = 2188 +SCI_GETCARETWIDTH = 2189 +SCI_SETTARGETSTART = 2190 +SCI_GETTARGETSTART = 2191 +SCI_SETTARGETEND = 2192 +SCI_GETTARGETEND = 2193 +SCI_REPLACETARGET = 2194 +SCI_REPLACETARGETRE = 2195 +SCI_SEARCHINTARGET = 2197 +SCI_SETSEARCHFLAGS = 2198 +SCI_GETSEARCHFLAGS = 2199 +SCI_CALLTIPSHOW = 2200 +SCI_CALLTIPCANCEL = 2201 +SCI_CALLTIPACTIVE = 2202 +SCI_CALLTIPPOSSTART = 2203 +SCI_CALLTIPSETHLT = 2204 +SCI_CALLTIPSETBACK = 2205 +SCI_CALLTIPSETFORE = 2206 +SCI_CALLTIPSETFOREHLT = 2207 +SCI_CALLTIPUSESTYLE = 2212 +SCI_VISIBLEFROMDOCLINE = 2220 +SCI_DOCLINEFROMVISIBLE = 2221 +SCI_WRAPCOUNT = 2235 +SC_FOLDLEVELBASE = 0x400 +SC_FOLDLEVELWHITEFLAG = 0x1000 +SC_FOLDLEVELHEADERFLAG = 0x2000 +SC_FOLDLEVELBOXHEADERFLAG = 0x4000 +SC_FOLDLEVELBOXFOOTERFLAG = 0x8000 +SC_FOLDLEVELCONTRACTED = 0x10000 +SC_FOLDLEVELUNINDENT = 0x20000 +SC_FOLDLEVELNUMBERMASK = 0x0FFF +SCI_SETFOLDLEVEL = 2222 +SCI_GETFOLDLEVEL = 2223 +SCI_GETLASTCHILD = 2224 +SCI_GETFOLDPARENT = 2225 +SCI_SHOWLINES = 2226 +SCI_HIDELINES = 2227 +SCI_GETLINEVISIBLE = 2228 +SCI_SETFOLDEXPANDED = 2229 +SCI_GETFOLDEXPANDED = 2230 +SCI_TOGGLEFOLD = 2231 +SCI_ENSUREVISIBLE = 2232 +SC_FOLDFLAG_LINEBEFORE_EXPANDED = 0x0002 +SC_FOLDFLAG_LINEBEFORE_CONTRACTED = 0x0004 +SC_FOLDFLAG_LINEAFTER_EXPANDED = 0x0008 +SC_FOLDFLAG_LINEAFTER_CONTRACTED = 0x0010 +SC_FOLDFLAG_LEVELNUMBERS = 0x0040 +SC_FOLDFLAG_BOX = 0x0001 +SCI_SETFOLDFLAGS = 2233 +SCI_ENSUREVISIBLEENFORCEPOLICY = 2234 +SCI_SETTABINDENTS = 2260 +SCI_GETTABINDENTS = 2261 +SCI_SETBACKSPACEUNINDENTS = 2262 +SCI_GETBACKSPACEUNINDENTS = 2263 +SC_TIME_FOREVER = 10000000 +SCI_SETMOUSEDWELLTIME = 2264 +SCI_GETMOUSEDWELLTIME = 2265 +SCI_WORDSTARTPOSITION = 2266 +SCI_WORDENDPOSITION = 2267 +SC_WRAP_NONE = 0 +SC_WRAP_WORD = 1 +SC_WRAP_CHAR = 2 +SCI_SETWRAPMODE = 2268 +SCI_GETWRAPMODE = 2269 +SC_WRAPVISUALFLAG_NONE = 0x0000 +SC_WRAPVISUALFLAG_END = 0x0001 +SC_WRAPVISUALFLAG_START = 0x0002 +SCI_SETWRAPVISUALFLAGS = 2460 +SCI_GETWRAPVISUALFLAGS = 2461 +SC_WRAPVISUALFLAGLOC_DEFAULT = 0x0000 +SC_WRAPVISUALFLAGLOC_END_BY_TEXT = 0x0001 +SC_WRAPVISUALFLAGLOC_START_BY_TEXT = 0x0002 +SCI_SETWRAPVISUALFLAGSLOCATION = 2462 +SCI_GETWRAPVISUALFLAGSLOCATION = 2463 +SCI_SETWRAPSTARTINDENT = 2464 +SCI_GETWRAPSTARTINDENT = 2465 +SC_CACHE_NONE = 0 +SC_CACHE_CARET = 1 +SC_CACHE_PAGE = 2 +SC_CACHE_DOCUMENT = 3 +SCI_SETLAYOUTCACHE = 2272 +SCI_GETLAYOUTCACHE = 2273 +SCI_SETSCROLLWIDTH = 2274 +SCI_GETSCROLLWIDTH = 2275 +SCI_SETSCROLLWIDTHTRACKING = 2516 +SCI_GETSCROLLWIDTHTRACKING = 2517 +SCI_TEXTWIDTH = 2276 +SCI_SETENDATLASTLINE = 2277 +SCI_GETENDATLASTLINE = 2278 +SCI_TEXTHEIGHT = 2279 +SCI_SETVSCROLLBAR = 2280 +SCI_GETVSCROLLBAR = 2281 +SCI_APPENDTEXT = 2282 +SCI_GETTWOPHASEDRAW = 2283 +SCI_SETTWOPHASEDRAW = 2284 +SCI_TARGETFROMSELECTION = 2287 +SCI_LINESJOIN = 2288 +SCI_LINESSPLIT = 2289 +SCI_SETFOLDMARGINCOLOUR = 2290 +SCI_SETFOLDMARGINHICOLOUR = 2291 +SCI_LINEDOWN = 2300 +SCI_LINEDOWNEXTEND = 2301 +SCI_LINEUP = 2302 +SCI_LINEUPEXTEND = 2303 +SCI_CHARLEFT = 2304 +SCI_CHARLEFTEXTEND = 2305 +SCI_CHARRIGHT = 2306 +SCI_CHARRIGHTEXTEND = 2307 +SCI_WORDLEFT = 2308 +SCI_WORDLEFTEXTEND = 2309 +SCI_WORDRIGHT = 2310 +SCI_WORDRIGHTEXTEND = 2311 +SCI_HOME = 2312 +SCI_HOMEEXTEND = 2313 +SCI_LINEEND = 2314 +SCI_LINEENDEXTEND = 2315 +SCI_DOCUMENTSTART = 2316 +SCI_DOCUMENTSTARTEXTEND = 2317 +SCI_DOCUMENTEND = 2318 +SCI_DOCUMENTENDEXTEND = 2319 +SCI_PAGEUP = 2320 +SCI_PAGEUPEXTEND = 2321 +SCI_PAGEDOWN = 2322 +SCI_PAGEDOWNEXTEND = 2323 +SCI_EDITTOGGLEOVERTYPE = 2324 +SCI_CANCEL = 2325 +SCI_DELETEBACK = 2326 +SCI_TAB = 2327 +SCI_BACKTAB = 2328 +SCI_NEWLINE = 2329 +SCI_FORMFEED = 2330 +SCI_VCHOME = 2331 +SCI_VCHOMEEXTEND = 2332 +SCI_ZOOMIN = 2333 +SCI_ZOOMOUT = 2334 +SCI_DELWORDLEFT = 2335 +SCI_DELWORDRIGHT = 2336 +SCI_DELWORDRIGHTEND = 2518 +SCI_LINECUT = 2337 +SCI_LINEDELETE = 2338 +SCI_LINETRANSPOSE = 2339 +SCI_LINEDUPLICATE = 2404 +SCI_LOWERCASE = 2340 +SCI_UPPERCASE = 2341 +SCI_LINESCROLLDOWN = 2342 +SCI_LINESCROLLUP = 2343 +SCI_DELETEBACKNOTLINE = 2344 +SCI_HOMEDISPLAY = 2345 +SCI_HOMEDISPLAYEXTEND = 2346 +SCI_LINEENDDISPLAY = 2347 +SCI_LINEENDDISPLAYEXTEND = 2348 +SCI_HOMEWRAP = 2349 +SCI_HOMEWRAPEXTEND = 2450 +SCI_LINEENDWRAP = 2451 +SCI_LINEENDWRAPEXTEND = 2452 +SCI_VCHOMEWRAP = 2453 +SCI_VCHOMEWRAPEXTEND = 2454 +SCI_LINECOPY = 2455 +SCI_MOVECARETINSIDEVIEW = 2401 +SCI_LINELENGTH = 2350 +SCI_BRACEHIGHLIGHT = 2351 +SCI_BRACEBADLIGHT = 2352 +SCI_BRACEMATCH = 2353 +SCI_GETVIEWEOL = 2355 +SCI_SETVIEWEOL = 2356 +SCI_GETDOCPOINTER = 2357 +SCI_SETDOCPOINTER = 2358 +SCI_SETMODEVENTMASK = 2359 +EDGE_NONE = 0 +EDGE_LINE = 1 +EDGE_BACKGROUND = 2 +SCI_GETEDGECOLUMN = 2360 +SCI_SETEDGECOLUMN = 2361 +SCI_GETEDGEMODE = 2362 +SCI_SETEDGEMODE = 2363 +SCI_GETEDGECOLOUR = 2364 +SCI_SETEDGECOLOUR = 2365 +SCI_SEARCHANCHOR = 2366 +SCI_SEARCHNEXT = 2367 +SCI_SEARCHPREV = 2368 +SCI_LINESONSCREEN = 2370 +SCI_USEPOPUP = 2371 +SCI_SELECTIONISRECTANGLE = 2372 +SCI_SETZOOM = 2373 +SCI_GETZOOM = 2374 +SCI_CREATEDOCUMENT = 2375 +SCI_ADDREFDOCUMENT = 2376 +SCI_RELEASEDOCUMENT = 2377 +SCI_GETMODEVENTMASK = 2378 +SCI_SETFOCUS = 2380 +SCI_GETFOCUS = 2381 +SCI_SETSTATUS = 2382 +SCI_GETSTATUS = 2383 +SCI_SETMOUSEDOWNCAPTURES = 2384 +SCI_GETMOUSEDOWNCAPTURES = 2385 +SC_CURSORNORMAL = -1 +SC_CURSORWAIT = 4 +SCI_SETCURSOR = 2386 +SCI_GETCURSOR = 2387 +SCI_SETCONTROLCHARSYMBOL = 2388 +SCI_GETCONTROLCHARSYMBOL = 2389 +SCI_WORDPARTLEFT = 2390 +SCI_WORDPARTLEFTEXTEND = 2391 +SCI_WORDPARTRIGHT = 2392 +SCI_WORDPARTRIGHTEXTEND = 2393 +VISIBLE_SLOP = 0x01 +VISIBLE_STRICT = 0x04 +SCI_SETVISIBLEPOLICY = 2394 +SCI_DELLINELEFT = 2395 +SCI_DELLINERIGHT = 2396 +SCI_SETXOFFSET = 2397 +SCI_GETXOFFSET = 2398 +SCI_CHOOSECARETX = 2399 +SCI_GRABFOCUS = 2400 +CARET_SLOP = 0x01 +CARET_STRICT = 0x04 +CARET_JUMPS = 0x10 +CARET_EVEN = 0x08 +SCI_SETXCARETPOLICY = 2402 +SCI_SETYCARETPOLICY = 2403 +SCI_SETPRINTWRAPMODE = 2406 +SCI_GETPRINTWRAPMODE = 2407 +SCI_SETHOTSPOTACTIVEFORE = 2410 +SCI_GETHOTSPOTACTIVEFORE = 2494 +SCI_SETHOTSPOTACTIVEBACK = 2411 +SCI_GETHOTSPOTACTIVEBACK = 2495 +SCI_SETHOTSPOTACTIVEUNDERLINE = 2412 +SCI_GETHOTSPOTACTIVEUNDERLINE = 2496 +SCI_SETHOTSPOTSINGLELINE = 2421 +SCI_GETHOTSPOTSINGLELINE = 2497 +SCI_PARADOWN = 2413 +SCI_PARADOWNEXTEND = 2414 +SCI_PARAUP = 2415 +SCI_PARAUPEXTEND = 2416 +SCI_POSITIONBEFORE = 2417 +SCI_POSITIONAFTER = 2418 +SCI_COPYRANGE = 2419 +SCI_COPYTEXT = 2420 +SC_SEL_STREAM = 0 +SC_SEL_RECTANGLE = 1 +SC_SEL_LINES = 2 +SCI_SETSELECTIONMODE = 2422 +SCI_GETSELECTIONMODE = 2423 +SCI_GETLINESELSTARTPOSITION = 2424 +SCI_GETLINESELENDPOSITION = 2425 +SCI_LINEDOWNRECTEXTEND = 2426 +SCI_LINEUPRECTEXTEND = 2427 +SCI_CHARLEFTRECTEXTEND = 2428 +SCI_CHARRIGHTRECTEXTEND = 2429 +SCI_HOMERECTEXTEND = 2430 +SCI_VCHOMERECTEXTEND = 2431 +SCI_LINEENDRECTEXTEND = 2432 +SCI_PAGEUPRECTEXTEND = 2433 +SCI_PAGEDOWNRECTEXTEND = 2434 +SCI_STUTTEREDPAGEUP = 2435 +SCI_STUTTEREDPAGEUPEXTEND = 2436 +SCI_STUTTEREDPAGEDOWN = 2437 +SCI_STUTTEREDPAGEDOWNEXTEND = 2438 +SCI_WORDLEFTEND = 2439 +SCI_WORDLEFTENDEXTEND = 2440 +SCI_WORDRIGHTEND = 2441 +SCI_WORDRIGHTENDEXTEND = 2442 +SCI_SETWHITESPACECHARS = 2443 +SCI_SETCHARSDEFAULT = 2444 +SCI_AUTOCGETCURRENT = 2445 +SCI_ALLOCATE = 2446 +SCI_TARGETASUTF8 = 2447 +SCI_SETLENGTHFORENCODE = 2448 +SCI_ENCODEDFROMUTF8 = 2449 +SCI_FINDCOLUMN = 2456 +SCI_GETCARETSTICKY = 2457 +SCI_SETCARETSTICKY = 2458 +SCI_TOGGLECARETSTICKY = 2459 +SCI_SETPASTECONVERTENDINGS = 2467 +SCI_GETPASTECONVERTENDINGS = 2468 +SCI_SELECTIONDUPLICATE = 2469 +SC_ALPHA_TRANSPARENT = 0 +SC_ALPHA_OPAQUE = 255 +SC_ALPHA_NOALPHA = 256 +SCI_SETCARETLINEBACKALPHA = 2470 +SCI_GETCARETLINEBACKALPHA = 2471 +CARETSTYLE_INVISIBLE = 0 +CARETSTYLE_LINE = 1 +CARETSTYLE_BLOCK = 2 +SCI_SETCARETSTYLE = 2512 +SCI_GETCARETSTYLE = 2513 +SCI_SETINDICATORCURRENT = 2500 +SCI_GETINDICATORCURRENT = 2501 +SCI_SETINDICATORVALUE = 2502 +SCI_GETINDICATORVALUE = 2503 +SCI_INDICATORFILLRANGE = 2504 +SCI_INDICATORCLEARRANGE = 2505 +SCI_INDICATORALLONFOR = 2506 +SCI_INDICATORVALUEAT = 2507 +SCI_INDICATORSTART = 2508 +SCI_INDICATOREND = 2509 +SCI_SETPOSITIONCACHE = 2514 +SCI_GETPOSITIONCACHE = 2515 +SCI_COPYALLOWLINE = 2519 +SCI_GETCHARACTERPOINTER = 2520 +SCI_SETKEYSUNICODE = 2521 +SCI_GETKEYSUNICODE = 2522 +SCI_STARTRECORD = 3001 +SCI_STOPRECORD = 3002 +SCI_SETLEXER = 4001 +SCI_GETLEXER = 4002 +SCI_COLOURISE = 4003 +SCI_SETPROPERTY = 4004 +KEYWORDSET_MAX = 8 +SCI_SETKEYWORDS = 4005 +SCI_SETLEXERLANGUAGE = 4006 +SCI_LOADLEXERLIBRARY = 4007 +SCI_GETPROPERTY = 4008 +SCI_GETPROPERTYEXPANDED = 4009 +SCI_GETPROPERTYINT = 4010 +SCI_GETSTYLEBITSNEEDED = 4011 +SC_MOD_INSERTTEXT = 0x1 +SC_MOD_DELETETEXT = 0x2 +SC_MOD_CHANGESTYLE = 0x4 +SC_MOD_CHANGEFOLD = 0x8 +SC_PERFORMED_USER = 0x10 +SC_PERFORMED_UNDO = 0x20 +SC_PERFORMED_REDO = 0x40 +SC_MULTISTEPUNDOREDO = 0x80 +SC_LASTSTEPINUNDOREDO = 0x100 +SC_MOD_CHANGEMARKER = 0x200 +SC_MOD_BEFOREINSERT = 0x400 +SC_MOD_BEFOREDELETE = 0x800 +SC_MULTILINEUNDOREDO = 0x1000 +SC_STARTACTION = 0x2000 +SC_MOD_CHANGEINDICATOR = 0x4000 +SC_MOD_CHANGELINESTATE = 0x8000 +SC_MODEVENTMASKALL = 0xFFFF +SCEN_CHANGE = 768 +SCEN_SETFOCUS = 512 +SCEN_KILLFOCUS = 256 +SCK_DOWN = 300 +SCK_UP = 301 +SCK_LEFT = 302 +SCK_RIGHT = 303 +SCK_HOME = 304 +SCK_END = 305 +SCK_PRIOR = 306 +SCK_NEXT = 307 +SCK_DELETE = 308 +SCK_INSERT = 309 +SCK_ESCAPE = 7 +SCK_BACK = 8 +SCK_TAB = 9 +SCK_RETURN = 13 +SCK_ADD = 310 +SCK_SUBTRACT = 311 +SCK_DIVIDE = 312 +SCK_WIN = 313 +SCK_RWIN = 314 +SCK_MENU = 315 +SCMOD_NORM = 0 +SCMOD_SHIFT = 1 +SCMOD_CTRL = 2 +SCMOD_ALT = 4 +SCN_STYLENEEDED = 2000 +SCN_CHARADDED = 2001 +SCN_SAVEPOINTREACHED = 2002 +SCN_SAVEPOINTLEFT = 2003 +SCN_MODIFYATTEMPTRO = 2004 +SCN_KEY = 2005 +SCN_DOUBLECLICK = 2006 +SCN_UPDATEUI = 2007 +SCN_MODIFIED = 2008 +SCN_MACRORECORD = 2009 +SCN_MARGINCLICK = 2010 +SCN_NEEDSHOWN = 2011 +SCN_PAINTED = 2013 +SCN_USERLISTSELECTION = 2014 +SCN_URIDROPPED = 2015 +SCN_DWELLSTART = 2016 +SCN_DWELLEND = 2017 +SCN_ZOOM = 2018 +SCN_HOTSPOTCLICK = 2019 +SCN_HOTSPOTDOUBLECLICK = 2020 +SCN_CALLTIPCLICK = 2021 +SCN_AUTOCSELECTION = 2022 +SCN_INDICATORCLICK = 2023 +SCN_INDICATORRELEASE = 2024 +SCN_AUTOCCANCELLED = 2025 +SCI_SETCARETPOLICY = 2369 +CARET_CENTER = 0x02 +CARET_XEVEN = 0x08 +CARET_XJUMPS = 0x10 +SCN_POSCHANGED = 2012 +SCN_CHECKBRACE = 2007 +# Generated by h2py from Include\scilexer.h +SCLEX_CONTAINER = 0 +SCLEX_NULL = 1 +SCLEX_PYTHON = 2 +SCLEX_CPP = 3 +SCLEX_HTML = 4 +SCLEX_XML = 5 +SCLEX_PERL = 6 +SCLEX_SQL = 7 +SCLEX_VB = 8 +SCLEX_PROPERTIES = 9 +SCLEX_ERRORLIST = 10 +SCLEX_MAKEFILE = 11 +SCLEX_BATCH = 12 +SCLEX_XCODE = 13 +SCLEX_LATEX = 14 +SCLEX_LUA = 15 +SCLEX_DIFF = 16 +SCLEX_CONF = 17 +SCLEX_PASCAL = 18 +SCLEX_AVE = 19 +SCLEX_ADA = 20 +SCLEX_LISP = 21 +SCLEX_RUBY = 22 +SCLEX_EIFFEL = 23 +SCLEX_EIFFELKW = 24 +SCLEX_TCL = 25 +SCLEX_NNCRONTAB = 26 +SCLEX_BULLANT = 27 +SCLEX_VBSCRIPT = 28 +SCLEX_BAAN = 31 +SCLEX_MATLAB = 32 +SCLEX_SCRIPTOL = 33 +SCLEX_ASM = 34 +SCLEX_CPPNOCASE = 35 +SCLEX_FORTRAN = 36 +SCLEX_F77 = 37 +SCLEX_CSS = 38 +SCLEX_POV = 39 +SCLEX_LOUT = 40 +SCLEX_ESCRIPT = 41 +SCLEX_PS = 42 +SCLEX_NSIS = 43 +SCLEX_MMIXAL = 44 +SCLEX_CLW = 45 +SCLEX_CLWNOCASE = 46 +SCLEX_LOT = 47 +SCLEX_YAML = 48 +SCLEX_TEX = 49 +SCLEX_METAPOST = 50 +SCLEX_POWERBASIC = 51 +SCLEX_FORTH = 52 +SCLEX_ERLANG = 53 +SCLEX_OCTAVE = 54 +SCLEX_MSSQL = 55 +SCLEX_VERILOG = 56 +SCLEX_KIX = 57 +SCLEX_GUI4CLI = 58 +SCLEX_SPECMAN = 59 +SCLEX_AU3 = 60 +SCLEX_APDL = 61 +SCLEX_BASH = 62 +SCLEX_ASN1 = 63 +SCLEX_VHDL = 64 +SCLEX_CAML = 65 +SCLEX_BLITZBASIC = 66 +SCLEX_PUREBASIC = 67 +SCLEX_HASKELL = 68 +SCLEX_PHPSCRIPT = 69 +SCLEX_TADS3 = 70 +SCLEX_REBOL = 71 +SCLEX_SMALLTALK = 72 +SCLEX_FLAGSHIP = 73 +SCLEX_CSOUND = 74 +SCLEX_FREEBASIC = 75 +SCLEX_INNOSETUP = 76 +SCLEX_OPAL = 77 +SCLEX_SPICE = 78 +SCLEX_D = 79 +SCLEX_CMAKE = 80 +SCLEX_GAP = 81 +SCLEX_PLM = 82 +SCLEX_PROGRESS = 83 +SCLEX_ABAQUS = 84 +SCLEX_ASYMPTOTE = 85 +SCLEX_R = 86 +SCLEX_MAGIK = 87 +SCLEX_POWERSHELL = 88 +SCLEX_MYSQL = 89 +SCLEX_PO = 90 +SCLEX_AUTOMATIC = 1000 +SCE_P_DEFAULT = 0 +SCE_P_COMMENTLINE = 1 +SCE_P_NUMBER = 2 +SCE_P_STRING = 3 +SCE_P_CHARACTER = 4 +SCE_P_WORD = 5 +SCE_P_TRIPLE = 6 +SCE_P_TRIPLEDOUBLE = 7 +SCE_P_CLASSNAME = 8 +SCE_P_DEFNAME = 9 +SCE_P_OPERATOR = 10 +SCE_P_IDENTIFIER = 11 +SCE_P_COMMENTBLOCK = 12 +SCE_P_STRINGEOL = 13 +SCE_P_WORD2 = 14 +SCE_P_DECORATOR = 15 +SCE_C_DEFAULT = 0 +SCE_C_COMMENT = 1 +SCE_C_COMMENTLINE = 2 +SCE_C_COMMENTDOC = 3 +SCE_C_NUMBER = 4 +SCE_C_WORD = 5 +SCE_C_STRING = 6 +SCE_C_CHARACTER = 7 +SCE_C_UUID = 8 +SCE_C_PREPROCESSOR = 9 +SCE_C_OPERATOR = 10 +SCE_C_IDENTIFIER = 11 +SCE_C_STRINGEOL = 12 +SCE_C_VERBATIM = 13 +SCE_C_REGEX = 14 +SCE_C_COMMENTLINEDOC = 15 +SCE_C_WORD2 = 16 +SCE_C_COMMENTDOCKEYWORD = 17 +SCE_C_COMMENTDOCKEYWORDERROR = 18 +SCE_C_GLOBALCLASS = 19 +SCE_D_DEFAULT = 0 +SCE_D_COMMENT = 1 +SCE_D_COMMENTLINE = 2 +SCE_D_COMMENTDOC = 3 +SCE_D_COMMENTNESTED = 4 +SCE_D_NUMBER = 5 +SCE_D_WORD = 6 +SCE_D_WORD2 = 7 +SCE_D_WORD3 = 8 +SCE_D_TYPEDEF = 9 +SCE_D_STRING = 10 +SCE_D_STRINGEOL = 11 +SCE_D_CHARACTER = 12 +SCE_D_OPERATOR = 13 +SCE_D_IDENTIFIER = 14 +SCE_D_COMMENTLINEDOC = 15 +SCE_D_COMMENTDOCKEYWORD = 16 +SCE_D_COMMENTDOCKEYWORDERROR = 17 +SCE_TCL_DEFAULT = 0 +SCE_TCL_COMMENT = 1 +SCE_TCL_COMMENTLINE = 2 +SCE_TCL_NUMBER = 3 +SCE_TCL_WORD_IN_QUOTE = 4 +SCE_TCL_IN_QUOTE = 5 +SCE_TCL_OPERATOR = 6 +SCE_TCL_IDENTIFIER = 7 +SCE_TCL_SUBSTITUTION = 8 +SCE_TCL_SUB_BRACE = 9 +SCE_TCL_MODIFIER = 10 +SCE_TCL_EXPAND = 11 +SCE_TCL_WORD = 12 +SCE_TCL_WORD2 = 13 +SCE_TCL_WORD3 = 14 +SCE_TCL_WORD4 = 15 +SCE_TCL_WORD5 = 16 +SCE_TCL_WORD6 = 17 +SCE_TCL_WORD7 = 18 +SCE_TCL_WORD8 = 19 +SCE_TCL_COMMENT_BOX = 20 +SCE_TCL_BLOCK_COMMENT = 21 +SCE_H_DEFAULT = 0 +SCE_H_TAG = 1 +SCE_H_TAGUNKNOWN = 2 +SCE_H_ATTRIBUTE = 3 +SCE_H_ATTRIBUTEUNKNOWN = 4 +SCE_H_NUMBER = 5 +SCE_H_DOUBLESTRING = 6 +SCE_H_SINGLESTRING = 7 +SCE_H_OTHER = 8 +SCE_H_COMMENT = 9 +SCE_H_ENTITY = 10 +SCE_H_TAGEND = 11 +SCE_H_XMLSTART = 12 +SCE_H_XMLEND = 13 +SCE_H_SCRIPT = 14 +SCE_H_ASP = 15 +SCE_H_ASPAT = 16 +SCE_H_CDATA = 17 +SCE_H_QUESTION = 18 +SCE_H_VALUE = 19 +SCE_H_XCCOMMENT = 20 +SCE_H_SGML_DEFAULT = 21 +SCE_H_SGML_COMMAND = 22 +SCE_H_SGML_1ST_PARAM = 23 +SCE_H_SGML_DOUBLESTRING = 24 +SCE_H_SGML_SIMPLESTRING = 25 +SCE_H_SGML_ERROR = 26 +SCE_H_SGML_SPECIAL = 27 +SCE_H_SGML_ENTITY = 28 +SCE_H_SGML_COMMENT = 29 +SCE_H_SGML_1ST_PARAM_COMMENT = 30 +SCE_H_SGML_BLOCK_DEFAULT = 31 +SCE_HJ_START = 40 +SCE_HJ_DEFAULT = 41 +SCE_HJ_COMMENT = 42 +SCE_HJ_COMMENTLINE = 43 +SCE_HJ_COMMENTDOC = 44 +SCE_HJ_NUMBER = 45 +SCE_HJ_WORD = 46 +SCE_HJ_KEYWORD = 47 +SCE_HJ_DOUBLESTRING = 48 +SCE_HJ_SINGLESTRING = 49 +SCE_HJ_SYMBOLS = 50 +SCE_HJ_STRINGEOL = 51 +SCE_HJ_REGEX = 52 +SCE_HJA_START = 55 +SCE_HJA_DEFAULT = 56 +SCE_HJA_COMMENT = 57 +SCE_HJA_COMMENTLINE = 58 +SCE_HJA_COMMENTDOC = 59 +SCE_HJA_NUMBER = 60 +SCE_HJA_WORD = 61 +SCE_HJA_KEYWORD = 62 +SCE_HJA_DOUBLESTRING = 63 +SCE_HJA_SINGLESTRING = 64 +SCE_HJA_SYMBOLS = 65 +SCE_HJA_STRINGEOL = 66 +SCE_HJA_REGEX = 67 +SCE_HB_START = 70 +SCE_HB_DEFAULT = 71 +SCE_HB_COMMENTLINE = 72 +SCE_HB_NUMBER = 73 +SCE_HB_WORD = 74 +SCE_HB_STRING = 75 +SCE_HB_IDENTIFIER = 76 +SCE_HB_STRINGEOL = 77 +SCE_HBA_START = 80 +SCE_HBA_DEFAULT = 81 +SCE_HBA_COMMENTLINE = 82 +SCE_HBA_NUMBER = 83 +SCE_HBA_WORD = 84 +SCE_HBA_STRING = 85 +SCE_HBA_IDENTIFIER = 86 +SCE_HBA_STRINGEOL = 87 +SCE_HP_START = 90 +SCE_HP_DEFAULT = 91 +SCE_HP_COMMENTLINE = 92 +SCE_HP_NUMBER = 93 +SCE_HP_STRING = 94 +SCE_HP_CHARACTER = 95 +SCE_HP_WORD = 96 +SCE_HP_TRIPLE = 97 +SCE_HP_TRIPLEDOUBLE = 98 +SCE_HP_CLASSNAME = 99 +SCE_HP_DEFNAME = 100 +SCE_HP_OPERATOR = 101 +SCE_HP_IDENTIFIER = 102 +SCE_HPHP_COMPLEX_VARIABLE = 104 +SCE_HPA_START = 105 +SCE_HPA_DEFAULT = 106 +SCE_HPA_COMMENTLINE = 107 +SCE_HPA_NUMBER = 108 +SCE_HPA_STRING = 109 +SCE_HPA_CHARACTER = 110 +SCE_HPA_WORD = 111 +SCE_HPA_TRIPLE = 112 +SCE_HPA_TRIPLEDOUBLE = 113 +SCE_HPA_CLASSNAME = 114 +SCE_HPA_DEFNAME = 115 +SCE_HPA_OPERATOR = 116 +SCE_HPA_IDENTIFIER = 117 +SCE_HPHP_DEFAULT = 118 +SCE_HPHP_HSTRING = 119 +SCE_HPHP_SIMPLESTRING = 120 +SCE_HPHP_WORD = 121 +SCE_HPHP_NUMBER = 122 +SCE_HPHP_VARIABLE = 123 +SCE_HPHP_COMMENT = 124 +SCE_HPHP_COMMENTLINE = 125 +SCE_HPHP_HSTRING_VARIABLE = 126 +SCE_HPHP_OPERATOR = 127 +SCE_PL_DEFAULT = 0 +SCE_PL_ERROR = 1 +SCE_PL_COMMENTLINE = 2 +SCE_PL_POD = 3 +SCE_PL_NUMBER = 4 +SCE_PL_WORD = 5 +SCE_PL_STRING = 6 +SCE_PL_CHARACTER = 7 +SCE_PL_PUNCTUATION = 8 +SCE_PL_PREPROCESSOR = 9 +SCE_PL_OPERATOR = 10 +SCE_PL_IDENTIFIER = 11 +SCE_PL_SCALAR = 12 +SCE_PL_ARRAY = 13 +SCE_PL_HASH = 14 +SCE_PL_SYMBOLTABLE = 15 +SCE_PL_VARIABLE_INDEXER = 16 +SCE_PL_REGEX = 17 +SCE_PL_REGSUBST = 18 +SCE_PL_LONGQUOTE = 19 +SCE_PL_BACKTICKS = 20 +SCE_PL_DATASECTION = 21 +SCE_PL_HERE_DELIM = 22 +SCE_PL_HERE_Q = 23 +SCE_PL_HERE_QQ = 24 +SCE_PL_HERE_QX = 25 +SCE_PL_STRING_Q = 26 +SCE_PL_STRING_QQ = 27 +SCE_PL_STRING_QX = 28 +SCE_PL_STRING_QR = 29 +SCE_PL_STRING_QW = 30 +SCE_PL_POD_VERB = 31 +SCE_PL_SUB_PROTOTYPE = 40 +SCE_PL_FORMAT_IDENT = 41 +SCE_PL_FORMAT = 42 +SCE_RB_DEFAULT = 0 +SCE_RB_ERROR = 1 +SCE_RB_COMMENTLINE = 2 +SCE_RB_POD = 3 +SCE_RB_NUMBER = 4 +SCE_RB_WORD = 5 +SCE_RB_STRING = 6 +SCE_RB_CHARACTER = 7 +SCE_RB_CLASSNAME = 8 +SCE_RB_DEFNAME = 9 +SCE_RB_OPERATOR = 10 +SCE_RB_IDENTIFIER = 11 +SCE_RB_REGEX = 12 +SCE_RB_GLOBAL = 13 +SCE_RB_SYMBOL = 14 +SCE_RB_MODULE_NAME = 15 +SCE_RB_INSTANCE_VAR = 16 +SCE_RB_CLASS_VAR = 17 +SCE_RB_BACKTICKS = 18 +SCE_RB_DATASECTION = 19 +SCE_RB_HERE_DELIM = 20 +SCE_RB_HERE_Q = 21 +SCE_RB_HERE_QQ = 22 +SCE_RB_HERE_QX = 23 +SCE_RB_STRING_Q = 24 +SCE_RB_STRING_QQ = 25 +SCE_RB_STRING_QX = 26 +SCE_RB_STRING_QR = 27 +SCE_RB_STRING_QW = 28 +SCE_RB_WORD_DEMOTED = 29 +SCE_RB_STDIN = 30 +SCE_RB_STDOUT = 31 +SCE_RB_STDERR = 40 +SCE_RB_UPPER_BOUND = 41 +SCE_B_DEFAULT = 0 +SCE_B_COMMENT = 1 +SCE_B_NUMBER = 2 +SCE_B_KEYWORD = 3 +SCE_B_STRING = 4 +SCE_B_PREPROCESSOR = 5 +SCE_B_OPERATOR = 6 +SCE_B_IDENTIFIER = 7 +SCE_B_DATE = 8 +SCE_B_STRINGEOL = 9 +SCE_B_KEYWORD2 = 10 +SCE_B_KEYWORD3 = 11 +SCE_B_KEYWORD4 = 12 +SCE_B_CONSTANT = 13 +SCE_B_ASM = 14 +SCE_B_LABEL = 15 +SCE_B_ERROR = 16 +SCE_B_HEXNUMBER = 17 +SCE_B_BINNUMBER = 18 +SCE_PROPS_DEFAULT = 0 +SCE_PROPS_COMMENT = 1 +SCE_PROPS_SECTION = 2 +SCE_PROPS_ASSIGNMENT = 3 +SCE_PROPS_DEFVAL = 4 +SCE_PROPS_KEY = 5 +SCE_L_DEFAULT = 0 +SCE_L_COMMAND = 1 +SCE_L_TAG = 2 +SCE_L_MATH = 3 +SCE_L_COMMENT = 4 +SCE_LUA_DEFAULT = 0 +SCE_LUA_COMMENT = 1 +SCE_LUA_COMMENTLINE = 2 +SCE_LUA_COMMENTDOC = 3 +SCE_LUA_NUMBER = 4 +SCE_LUA_WORD = 5 +SCE_LUA_STRING = 6 +SCE_LUA_CHARACTER = 7 +SCE_LUA_LITERALSTRING = 8 +SCE_LUA_PREPROCESSOR = 9 +SCE_LUA_OPERATOR = 10 +SCE_LUA_IDENTIFIER = 11 +SCE_LUA_STRINGEOL = 12 +SCE_LUA_WORD2 = 13 +SCE_LUA_WORD3 = 14 +SCE_LUA_WORD4 = 15 +SCE_LUA_WORD5 = 16 +SCE_LUA_WORD6 = 17 +SCE_LUA_WORD7 = 18 +SCE_LUA_WORD8 = 19 +SCE_ERR_DEFAULT = 0 +SCE_ERR_PYTHON = 1 +SCE_ERR_GCC = 2 +SCE_ERR_MS = 3 +SCE_ERR_CMD = 4 +SCE_ERR_BORLAND = 5 +SCE_ERR_PERL = 6 +SCE_ERR_NET = 7 +SCE_ERR_LUA = 8 +SCE_ERR_CTAG = 9 +SCE_ERR_DIFF_CHANGED = 10 +SCE_ERR_DIFF_ADDITION = 11 +SCE_ERR_DIFF_DELETION = 12 +SCE_ERR_DIFF_MESSAGE = 13 +SCE_ERR_PHP = 14 +SCE_ERR_ELF = 15 +SCE_ERR_IFC = 16 +SCE_ERR_IFORT = 17 +SCE_ERR_ABSF = 18 +SCE_ERR_TIDY = 19 +SCE_ERR_JAVA_STACK = 20 +SCE_ERR_VALUE = 21 +SCE_BAT_DEFAULT = 0 +SCE_BAT_COMMENT = 1 +SCE_BAT_WORD = 2 +SCE_BAT_LABEL = 3 +SCE_BAT_HIDE = 4 +SCE_BAT_COMMAND = 5 +SCE_BAT_IDENTIFIER = 6 +SCE_BAT_OPERATOR = 7 +SCE_MAKE_DEFAULT = 0 +SCE_MAKE_COMMENT = 1 +SCE_MAKE_PREPROCESSOR = 2 +SCE_MAKE_IDENTIFIER = 3 +SCE_MAKE_OPERATOR = 4 +SCE_MAKE_TARGET = 5 +SCE_MAKE_IDEOL = 9 +SCE_DIFF_DEFAULT = 0 +SCE_DIFF_COMMENT = 1 +SCE_DIFF_COMMAND = 2 +SCE_DIFF_HEADER = 3 +SCE_DIFF_POSITION = 4 +SCE_DIFF_DELETED = 5 +SCE_DIFF_ADDED = 6 +SCE_DIFF_CHANGED = 7 +SCE_CONF_DEFAULT = 0 +SCE_CONF_COMMENT = 1 +SCE_CONF_NUMBER = 2 +SCE_CONF_IDENTIFIER = 3 +SCE_CONF_EXTENSION = 4 +SCE_CONF_PARAMETER = 5 +SCE_CONF_STRING = 6 +SCE_CONF_OPERATOR = 7 +SCE_CONF_IP = 8 +SCE_CONF_DIRECTIVE = 9 +SCE_AVE_DEFAULT = 0 +SCE_AVE_COMMENT = 1 +SCE_AVE_NUMBER = 2 +SCE_AVE_WORD = 3 +SCE_AVE_STRING = 6 +SCE_AVE_ENUM = 7 +SCE_AVE_STRINGEOL = 8 +SCE_AVE_IDENTIFIER = 9 +SCE_AVE_OPERATOR = 10 +SCE_AVE_WORD1 = 11 +SCE_AVE_WORD2 = 12 +SCE_AVE_WORD3 = 13 +SCE_AVE_WORD4 = 14 +SCE_AVE_WORD5 = 15 +SCE_AVE_WORD6 = 16 +SCE_ADA_DEFAULT = 0 +SCE_ADA_WORD = 1 +SCE_ADA_IDENTIFIER = 2 +SCE_ADA_NUMBER = 3 +SCE_ADA_DELIMITER = 4 +SCE_ADA_CHARACTER = 5 +SCE_ADA_CHARACTEREOL = 6 +SCE_ADA_STRING = 7 +SCE_ADA_STRINGEOL = 8 +SCE_ADA_LABEL = 9 +SCE_ADA_COMMENTLINE = 10 +SCE_ADA_ILLEGAL = 11 +SCE_BAAN_DEFAULT = 0 +SCE_BAAN_COMMENT = 1 +SCE_BAAN_COMMENTDOC = 2 +SCE_BAAN_NUMBER = 3 +SCE_BAAN_WORD = 4 +SCE_BAAN_STRING = 5 +SCE_BAAN_PREPROCESSOR = 6 +SCE_BAAN_OPERATOR = 7 +SCE_BAAN_IDENTIFIER = 8 +SCE_BAAN_STRINGEOL = 9 +SCE_BAAN_WORD2 = 10 +SCE_LISP_DEFAULT = 0 +SCE_LISP_COMMENT = 1 +SCE_LISP_NUMBER = 2 +SCE_LISP_KEYWORD = 3 +SCE_LISP_KEYWORD_KW = 4 +SCE_LISP_SYMBOL = 5 +SCE_LISP_STRING = 6 +SCE_LISP_STRINGEOL = 8 +SCE_LISP_IDENTIFIER = 9 +SCE_LISP_OPERATOR = 10 +SCE_LISP_SPECIAL = 11 +SCE_LISP_MULTI_COMMENT = 12 +SCE_EIFFEL_DEFAULT = 0 +SCE_EIFFEL_COMMENTLINE = 1 +SCE_EIFFEL_NUMBER = 2 +SCE_EIFFEL_WORD = 3 +SCE_EIFFEL_STRING = 4 +SCE_EIFFEL_CHARACTER = 5 +SCE_EIFFEL_OPERATOR = 6 +SCE_EIFFEL_IDENTIFIER = 7 +SCE_EIFFEL_STRINGEOL = 8 +SCE_NNCRONTAB_DEFAULT = 0 +SCE_NNCRONTAB_COMMENT = 1 +SCE_NNCRONTAB_TASK = 2 +SCE_NNCRONTAB_SECTION = 3 +SCE_NNCRONTAB_KEYWORD = 4 +SCE_NNCRONTAB_MODIFIER = 5 +SCE_NNCRONTAB_ASTERISK = 6 +SCE_NNCRONTAB_NUMBER = 7 +SCE_NNCRONTAB_STRING = 8 +SCE_NNCRONTAB_ENVIRONMENT = 9 +SCE_NNCRONTAB_IDENTIFIER = 10 +SCE_FORTH_DEFAULT = 0 +SCE_FORTH_COMMENT = 1 +SCE_FORTH_COMMENT_ML = 2 +SCE_FORTH_IDENTIFIER = 3 +SCE_FORTH_CONTROL = 4 +SCE_FORTH_KEYWORD = 5 +SCE_FORTH_DEFWORD = 6 +SCE_FORTH_PREWORD1 = 7 +SCE_FORTH_PREWORD2 = 8 +SCE_FORTH_NUMBER = 9 +SCE_FORTH_STRING = 10 +SCE_FORTH_LOCALE = 11 +SCE_MATLAB_DEFAULT = 0 +SCE_MATLAB_COMMENT = 1 +SCE_MATLAB_COMMAND = 2 +SCE_MATLAB_NUMBER = 3 +SCE_MATLAB_KEYWORD = 4 +SCE_MATLAB_STRING = 5 +SCE_MATLAB_OPERATOR = 6 +SCE_MATLAB_IDENTIFIER = 7 +SCE_MATLAB_DOUBLEQUOTESTRING = 8 +SCE_SCRIPTOL_DEFAULT = 0 +SCE_SCRIPTOL_WHITE = 1 +SCE_SCRIPTOL_COMMENTLINE = 2 +SCE_SCRIPTOL_PERSISTENT = 3 +SCE_SCRIPTOL_CSTYLE = 4 +SCE_SCRIPTOL_COMMENTBLOCK = 5 +SCE_SCRIPTOL_NUMBER = 6 +SCE_SCRIPTOL_STRING = 7 +SCE_SCRIPTOL_CHARACTER = 8 +SCE_SCRIPTOL_STRINGEOL = 9 +SCE_SCRIPTOL_KEYWORD = 10 +SCE_SCRIPTOL_OPERATOR = 11 +SCE_SCRIPTOL_IDENTIFIER = 12 +SCE_SCRIPTOL_TRIPLE = 13 +SCE_SCRIPTOL_CLASSNAME = 14 +SCE_SCRIPTOL_PREPROCESSOR = 15 +SCE_ASM_DEFAULT = 0 +SCE_ASM_COMMENT = 1 +SCE_ASM_NUMBER = 2 +SCE_ASM_STRING = 3 +SCE_ASM_OPERATOR = 4 +SCE_ASM_IDENTIFIER = 5 +SCE_ASM_CPUINSTRUCTION = 6 +SCE_ASM_MATHINSTRUCTION = 7 +SCE_ASM_REGISTER = 8 +SCE_ASM_DIRECTIVE = 9 +SCE_ASM_DIRECTIVEOPERAND = 10 +SCE_ASM_COMMENTBLOCK = 11 +SCE_ASM_CHARACTER = 12 +SCE_ASM_STRINGEOL = 13 +SCE_ASM_EXTINSTRUCTION = 14 +SCE_F_DEFAULT = 0 +SCE_F_COMMENT = 1 +SCE_F_NUMBER = 2 +SCE_F_STRING1 = 3 +SCE_F_STRING2 = 4 +SCE_F_STRINGEOL = 5 +SCE_F_OPERATOR = 6 +SCE_F_IDENTIFIER = 7 +SCE_F_WORD = 8 +SCE_F_WORD2 = 9 +SCE_F_WORD3 = 10 +SCE_F_PREPROCESSOR = 11 +SCE_F_OPERATOR2 = 12 +SCE_F_LABEL = 13 +SCE_F_CONTINUATION = 14 +SCE_CSS_DEFAULT = 0 +SCE_CSS_TAG = 1 +SCE_CSS_CLASS = 2 +SCE_CSS_PSEUDOCLASS = 3 +SCE_CSS_UNKNOWN_PSEUDOCLASS = 4 +SCE_CSS_OPERATOR = 5 +SCE_CSS_IDENTIFIER = 6 +SCE_CSS_UNKNOWN_IDENTIFIER = 7 +SCE_CSS_VALUE = 8 +SCE_CSS_COMMENT = 9 +SCE_CSS_ID = 10 +SCE_CSS_IMPORTANT = 11 +SCE_CSS_DIRECTIVE = 12 +SCE_CSS_DOUBLESTRING = 13 +SCE_CSS_SINGLESTRING = 14 +SCE_CSS_IDENTIFIER2 = 15 +SCE_CSS_ATTRIBUTE = 16 +SCE_CSS_IDENTIFIER3 = 17 +SCE_CSS_PSEUDOELEMENT = 18 +SCE_CSS_EXTENDED_IDENTIFIER = 19 +SCE_CSS_EXTENDED_PSEUDOCLASS = 20 +SCE_CSS_EXTENDED_PSEUDOELEMENT = 21 +SCE_POV_DEFAULT = 0 +SCE_POV_COMMENT = 1 +SCE_POV_COMMENTLINE = 2 +SCE_POV_NUMBER = 3 +SCE_POV_OPERATOR = 4 +SCE_POV_IDENTIFIER = 5 +SCE_POV_STRING = 6 +SCE_POV_STRINGEOL = 7 +SCE_POV_DIRECTIVE = 8 +SCE_POV_BADDIRECTIVE = 9 +SCE_POV_WORD2 = 10 +SCE_POV_WORD3 = 11 +SCE_POV_WORD4 = 12 +SCE_POV_WORD5 = 13 +SCE_POV_WORD6 = 14 +SCE_POV_WORD7 = 15 +SCE_POV_WORD8 = 16 +SCE_LOUT_DEFAULT = 0 +SCE_LOUT_COMMENT = 1 +SCE_LOUT_NUMBER = 2 +SCE_LOUT_WORD = 3 +SCE_LOUT_WORD2 = 4 +SCE_LOUT_WORD3 = 5 +SCE_LOUT_WORD4 = 6 +SCE_LOUT_STRING = 7 +SCE_LOUT_OPERATOR = 8 +SCE_LOUT_IDENTIFIER = 9 +SCE_LOUT_STRINGEOL = 10 +SCE_ESCRIPT_DEFAULT = 0 +SCE_ESCRIPT_COMMENT = 1 +SCE_ESCRIPT_COMMENTLINE = 2 +SCE_ESCRIPT_COMMENTDOC = 3 +SCE_ESCRIPT_NUMBER = 4 +SCE_ESCRIPT_WORD = 5 +SCE_ESCRIPT_STRING = 6 +SCE_ESCRIPT_OPERATOR = 7 +SCE_ESCRIPT_IDENTIFIER = 8 +SCE_ESCRIPT_BRACE = 9 +SCE_ESCRIPT_WORD2 = 10 +SCE_ESCRIPT_WORD3 = 11 +SCE_PS_DEFAULT = 0 +SCE_PS_COMMENT = 1 +SCE_PS_DSC_COMMENT = 2 +SCE_PS_DSC_VALUE = 3 +SCE_PS_NUMBER = 4 +SCE_PS_NAME = 5 +SCE_PS_KEYWORD = 6 +SCE_PS_LITERAL = 7 +SCE_PS_IMMEVAL = 8 +SCE_PS_PAREN_ARRAY = 9 +SCE_PS_PAREN_DICT = 10 +SCE_PS_PAREN_PROC = 11 +SCE_PS_TEXT = 12 +SCE_PS_HEXSTRING = 13 +SCE_PS_BASE85STRING = 14 +SCE_PS_BADSTRINGCHAR = 15 +SCE_NSIS_DEFAULT = 0 +SCE_NSIS_COMMENT = 1 +SCE_NSIS_STRINGDQ = 2 +SCE_NSIS_STRINGLQ = 3 +SCE_NSIS_STRINGRQ = 4 +SCE_NSIS_FUNCTION = 5 +SCE_NSIS_VARIABLE = 6 +SCE_NSIS_LABEL = 7 +SCE_NSIS_USERDEFINED = 8 +SCE_NSIS_SECTIONDEF = 9 +SCE_NSIS_SUBSECTIONDEF = 10 +SCE_NSIS_IFDEFINEDEF = 11 +SCE_NSIS_MACRODEF = 12 +SCE_NSIS_STRINGVAR = 13 +SCE_NSIS_NUMBER = 14 +SCE_NSIS_SECTIONGROUP = 15 +SCE_NSIS_PAGEEX = 16 +SCE_NSIS_FUNCTIONDEF = 17 +SCE_NSIS_COMMENTBOX = 18 +SCE_MMIXAL_LEADWS = 0 +SCE_MMIXAL_COMMENT = 1 +SCE_MMIXAL_LABEL = 2 +SCE_MMIXAL_OPCODE = 3 +SCE_MMIXAL_OPCODE_PRE = 4 +SCE_MMIXAL_OPCODE_VALID = 5 +SCE_MMIXAL_OPCODE_UNKNOWN = 6 +SCE_MMIXAL_OPCODE_POST = 7 +SCE_MMIXAL_OPERANDS = 8 +SCE_MMIXAL_NUMBER = 9 +SCE_MMIXAL_REF = 10 +SCE_MMIXAL_CHAR = 11 +SCE_MMIXAL_STRING = 12 +SCE_MMIXAL_REGISTER = 13 +SCE_MMIXAL_HEX = 14 +SCE_MMIXAL_OPERATOR = 15 +SCE_MMIXAL_SYMBOL = 16 +SCE_MMIXAL_INCLUDE = 17 +SCE_CLW_DEFAULT = 0 +SCE_CLW_LABEL = 1 +SCE_CLW_COMMENT = 2 +SCE_CLW_STRING = 3 +SCE_CLW_USER_IDENTIFIER = 4 +SCE_CLW_INTEGER_CONSTANT = 5 +SCE_CLW_REAL_CONSTANT = 6 +SCE_CLW_PICTURE_STRING = 7 +SCE_CLW_KEYWORD = 8 +SCE_CLW_COMPILER_DIRECTIVE = 9 +SCE_CLW_RUNTIME_EXPRESSIONS = 10 +SCE_CLW_BUILTIN_PROCEDURES_FUNCTION = 11 +SCE_CLW_STRUCTURE_DATA_TYPE = 12 +SCE_CLW_ATTRIBUTE = 13 +SCE_CLW_STANDARD_EQUATE = 14 +SCE_CLW_ERROR = 15 +SCE_CLW_DEPRECATED = 16 +SCE_LOT_DEFAULT = 0 +SCE_LOT_HEADER = 1 +SCE_LOT_BREAK = 2 +SCE_LOT_SET = 3 +SCE_LOT_PASS = 4 +SCE_LOT_FAIL = 5 +SCE_LOT_ABORT = 6 +SCE_YAML_DEFAULT = 0 +SCE_YAML_COMMENT = 1 +SCE_YAML_IDENTIFIER = 2 +SCE_YAML_KEYWORD = 3 +SCE_YAML_NUMBER = 4 +SCE_YAML_REFERENCE = 5 +SCE_YAML_DOCUMENT = 6 +SCE_YAML_TEXT = 7 +SCE_YAML_ERROR = 8 +SCE_YAML_OPERATOR = 9 +SCE_TEX_DEFAULT = 0 +SCE_TEX_SPECIAL = 1 +SCE_TEX_GROUP = 2 +SCE_TEX_SYMBOL = 3 +SCE_TEX_COMMAND = 4 +SCE_TEX_TEXT = 5 +SCE_METAPOST_DEFAULT = 0 +SCE_METAPOST_SPECIAL = 1 +SCE_METAPOST_GROUP = 2 +SCE_METAPOST_SYMBOL = 3 +SCE_METAPOST_COMMAND = 4 +SCE_METAPOST_TEXT = 5 +SCE_METAPOST_EXTRA = 6 +SCE_ERLANG_DEFAULT = 0 +SCE_ERLANG_COMMENT = 1 +SCE_ERLANG_VARIABLE = 2 +SCE_ERLANG_NUMBER = 3 +SCE_ERLANG_KEYWORD = 4 +SCE_ERLANG_STRING = 5 +SCE_ERLANG_OPERATOR = 6 +SCE_ERLANG_ATOM = 7 +SCE_ERLANG_FUNCTION_NAME = 8 +SCE_ERLANG_CHARACTER = 9 +SCE_ERLANG_MACRO = 10 +SCE_ERLANG_RECORD = 11 +SCE_ERLANG_SEPARATOR = 12 +SCE_ERLANG_NODE_NAME = 13 +SCE_ERLANG_UNKNOWN = 31 +SCE_MSSQL_DEFAULT = 0 +SCE_MSSQL_COMMENT = 1 +SCE_MSSQL_LINE_COMMENT = 2 +SCE_MSSQL_NUMBER = 3 +SCE_MSSQL_STRING = 4 +SCE_MSSQL_OPERATOR = 5 +SCE_MSSQL_IDENTIFIER = 6 +SCE_MSSQL_VARIABLE = 7 +SCE_MSSQL_COLUMN_NAME = 8 +SCE_MSSQL_STATEMENT = 9 +SCE_MSSQL_DATATYPE = 10 +SCE_MSSQL_SYSTABLE = 11 +SCE_MSSQL_GLOBAL_VARIABLE = 12 +SCE_MSSQL_FUNCTION = 13 +SCE_MSSQL_STORED_PROCEDURE = 14 +SCE_MSSQL_DEFAULT_PREF_DATATYPE = 15 +SCE_MSSQL_COLUMN_NAME_2 = 16 +SCE_V_DEFAULT = 0 +SCE_V_COMMENT = 1 +SCE_V_COMMENTLINE = 2 +SCE_V_COMMENTLINEBANG = 3 +SCE_V_NUMBER = 4 +SCE_V_WORD = 5 +SCE_V_STRING = 6 +SCE_V_WORD2 = 7 +SCE_V_WORD3 = 8 +SCE_V_PREPROCESSOR = 9 +SCE_V_OPERATOR = 10 +SCE_V_IDENTIFIER = 11 +SCE_V_STRINGEOL = 12 +SCE_V_USER = 19 +SCE_KIX_DEFAULT = 0 +SCE_KIX_COMMENT = 1 +SCE_KIX_STRING1 = 2 +SCE_KIX_STRING2 = 3 +SCE_KIX_NUMBER = 4 +SCE_KIX_VAR = 5 +SCE_KIX_MACRO = 6 +SCE_KIX_KEYWORD = 7 +SCE_KIX_FUNCTIONS = 8 +SCE_KIX_OPERATOR = 9 +SCE_KIX_IDENTIFIER = 31 +SCE_GC_DEFAULT = 0 +SCE_GC_COMMENTLINE = 1 +SCE_GC_COMMENTBLOCK = 2 +SCE_GC_GLOBAL = 3 +SCE_GC_EVENT = 4 +SCE_GC_ATTRIBUTE = 5 +SCE_GC_CONTROL = 6 +SCE_GC_COMMAND = 7 +SCE_GC_STRING = 8 +SCE_GC_OPERATOR = 9 +SCE_SN_DEFAULT = 0 +SCE_SN_CODE = 1 +SCE_SN_COMMENTLINE = 2 +SCE_SN_COMMENTLINEBANG = 3 +SCE_SN_NUMBER = 4 +SCE_SN_WORD = 5 +SCE_SN_STRING = 6 +SCE_SN_WORD2 = 7 +SCE_SN_WORD3 = 8 +SCE_SN_PREPROCESSOR = 9 +SCE_SN_OPERATOR = 10 +SCE_SN_IDENTIFIER = 11 +SCE_SN_STRINGEOL = 12 +SCE_SN_REGEXTAG = 13 +SCE_SN_SIGNAL = 14 +SCE_SN_USER = 19 +SCE_AU3_DEFAULT = 0 +SCE_AU3_COMMENT = 1 +SCE_AU3_COMMENTBLOCK = 2 +SCE_AU3_NUMBER = 3 +SCE_AU3_FUNCTION = 4 +SCE_AU3_KEYWORD = 5 +SCE_AU3_MACRO = 6 +SCE_AU3_STRING = 7 +SCE_AU3_OPERATOR = 8 +SCE_AU3_VARIABLE = 9 +SCE_AU3_SENT = 10 +SCE_AU3_PREPROCESSOR = 11 +SCE_AU3_SPECIAL = 12 +SCE_AU3_EXPAND = 13 +SCE_AU3_COMOBJ = 14 +SCE_AU3_UDF = 15 +SCE_APDL_DEFAULT = 0 +SCE_APDL_COMMENT = 1 +SCE_APDL_COMMENTBLOCK = 2 +SCE_APDL_NUMBER = 3 +SCE_APDL_STRING = 4 +SCE_APDL_OPERATOR = 5 +SCE_APDL_WORD = 6 +SCE_APDL_PROCESSOR = 7 +SCE_APDL_COMMAND = 8 +SCE_APDL_SLASHCOMMAND = 9 +SCE_APDL_STARCOMMAND = 10 +SCE_APDL_ARGUMENT = 11 +SCE_APDL_FUNCTION = 12 +SCE_SH_DEFAULT = 0 +SCE_SH_ERROR = 1 +SCE_SH_COMMENTLINE = 2 +SCE_SH_NUMBER = 3 +SCE_SH_WORD = 4 +SCE_SH_STRING = 5 +SCE_SH_CHARACTER = 6 +SCE_SH_OPERATOR = 7 +SCE_SH_IDENTIFIER = 8 +SCE_SH_SCALAR = 9 +SCE_SH_PARAM = 10 +SCE_SH_BACKTICKS = 11 +SCE_SH_HERE_DELIM = 12 +SCE_SH_HERE_Q = 13 +SCE_ASN1_DEFAULT = 0 +SCE_ASN1_COMMENT = 1 +SCE_ASN1_IDENTIFIER = 2 +SCE_ASN1_STRING = 3 +SCE_ASN1_OID = 4 +SCE_ASN1_SCALAR = 5 +SCE_ASN1_KEYWORD = 6 +SCE_ASN1_ATTRIBUTE = 7 +SCE_ASN1_DESCRIPTOR = 8 +SCE_ASN1_TYPE = 9 +SCE_ASN1_OPERATOR = 10 +SCE_VHDL_DEFAULT = 0 +SCE_VHDL_COMMENT = 1 +SCE_VHDL_COMMENTLINEBANG = 2 +SCE_VHDL_NUMBER = 3 +SCE_VHDL_STRING = 4 +SCE_VHDL_OPERATOR = 5 +SCE_VHDL_IDENTIFIER = 6 +SCE_VHDL_STRINGEOL = 7 +SCE_VHDL_KEYWORD = 8 +SCE_VHDL_STDOPERATOR = 9 +SCE_VHDL_ATTRIBUTE = 10 +SCE_VHDL_STDFUNCTION = 11 +SCE_VHDL_STDPACKAGE = 12 +SCE_VHDL_STDTYPE = 13 +SCE_VHDL_USERWORD = 14 +SCE_CAML_DEFAULT = 0 +SCE_CAML_IDENTIFIER = 1 +SCE_CAML_TAGNAME = 2 +SCE_CAML_KEYWORD = 3 +SCE_CAML_KEYWORD2 = 4 +SCE_CAML_KEYWORD3 = 5 +SCE_CAML_LINENUM = 6 +SCE_CAML_OPERATOR = 7 +SCE_CAML_NUMBER = 8 +SCE_CAML_CHAR = 9 +SCE_CAML_STRING = 11 +SCE_CAML_COMMENT = 12 +SCE_CAML_COMMENT1 = 13 +SCE_CAML_COMMENT2 = 14 +SCE_CAML_COMMENT3 = 15 +SCE_HA_DEFAULT = 0 +SCE_HA_IDENTIFIER = 1 +SCE_HA_KEYWORD = 2 +SCE_HA_NUMBER = 3 +SCE_HA_STRING = 4 +SCE_HA_CHARACTER = 5 +SCE_HA_CLASS = 6 +SCE_HA_MODULE = 7 +SCE_HA_CAPITAL = 8 +SCE_HA_DATA = 9 +SCE_HA_IMPORT = 10 +SCE_HA_OPERATOR = 11 +SCE_HA_INSTANCE = 12 +SCE_HA_COMMENTLINE = 13 +SCE_HA_COMMENTBLOCK = 14 +SCE_HA_COMMENTBLOCK2 = 15 +SCE_HA_COMMENTBLOCK3 = 16 +SCE_T3_DEFAULT = 0 +SCE_T3_X_DEFAULT = 1 +SCE_T3_PREPROCESSOR = 2 +SCE_T3_BLOCK_COMMENT = 3 +SCE_T3_LINE_COMMENT = 4 +SCE_T3_OPERATOR = 5 +SCE_T3_KEYWORD = 6 +SCE_T3_NUMBER = 7 +SCE_T3_IDENTIFIER = 8 +SCE_T3_S_STRING = 9 +SCE_T3_D_STRING = 10 +SCE_T3_X_STRING = 11 +SCE_T3_LIB_DIRECTIVE = 12 +SCE_T3_MSG_PARAM = 13 +SCE_T3_HTML_TAG = 14 +SCE_T3_HTML_DEFAULT = 15 +SCE_T3_HTML_STRING = 16 +SCE_T3_USER1 = 17 +SCE_T3_USER2 = 18 +SCE_T3_USER3 = 19 +SCE_T3_BRACE = 20 +SCE_REBOL_DEFAULT = 0 +SCE_REBOL_COMMENTLINE = 1 +SCE_REBOL_COMMENTBLOCK = 2 +SCE_REBOL_PREFACE = 3 +SCE_REBOL_OPERATOR = 4 +SCE_REBOL_CHARACTER = 5 +SCE_REBOL_QUOTEDSTRING = 6 +SCE_REBOL_BRACEDSTRING = 7 +SCE_REBOL_NUMBER = 8 +SCE_REBOL_PAIR = 9 +SCE_REBOL_TUPLE = 10 +SCE_REBOL_BINARY = 11 +SCE_REBOL_MONEY = 12 +SCE_REBOL_ISSUE = 13 +SCE_REBOL_TAG = 14 +SCE_REBOL_FILE = 15 +SCE_REBOL_EMAIL = 16 +SCE_REBOL_URL = 17 +SCE_REBOL_DATE = 18 +SCE_REBOL_TIME = 19 +SCE_REBOL_IDENTIFIER = 20 +SCE_REBOL_WORD = 21 +SCE_REBOL_WORD2 = 22 +SCE_REBOL_WORD3 = 23 +SCE_REBOL_WORD4 = 24 +SCE_REBOL_WORD5 = 25 +SCE_REBOL_WORD6 = 26 +SCE_REBOL_WORD7 = 27 +SCE_REBOL_WORD8 = 28 +SCE_SQL_DEFAULT = 0 +SCE_SQL_COMMENT = 1 +SCE_SQL_COMMENTLINE = 2 +SCE_SQL_COMMENTDOC = 3 +SCE_SQL_NUMBER = 4 +SCE_SQL_WORD = 5 +SCE_SQL_STRING = 6 +SCE_SQL_CHARACTER = 7 +SCE_SQL_SQLPLUS = 8 +SCE_SQL_SQLPLUS_PROMPT = 9 +SCE_SQL_OPERATOR = 10 +SCE_SQL_IDENTIFIER = 11 +SCE_SQL_SQLPLUS_COMMENT = 13 +SCE_SQL_COMMENTLINEDOC = 15 +SCE_SQL_WORD2 = 16 +SCE_SQL_COMMENTDOCKEYWORD = 17 +SCE_SQL_COMMENTDOCKEYWORDERROR = 18 +SCE_SQL_USER1 = 19 +SCE_SQL_USER2 = 20 +SCE_SQL_USER3 = 21 +SCE_SQL_USER4 = 22 +SCE_SQL_QUOTEDIDENTIFIER = 23 +SCE_ST_DEFAULT = 0 +SCE_ST_STRING = 1 +SCE_ST_NUMBER = 2 +SCE_ST_COMMENT = 3 +SCE_ST_SYMBOL = 4 +SCE_ST_BINARY = 5 +SCE_ST_BOOL = 6 +SCE_ST_SELF = 7 +SCE_ST_SUPER = 8 +SCE_ST_NIL = 9 +SCE_ST_GLOBAL = 10 +SCE_ST_RETURN = 11 +SCE_ST_SPECIAL = 12 +SCE_ST_KWSEND = 13 +SCE_ST_ASSIGN = 14 +SCE_ST_CHARACTER = 15 +SCE_ST_SPEC_SEL = 16 +SCE_FS_DEFAULT = 0 +SCE_FS_COMMENT = 1 +SCE_FS_COMMENTLINE = 2 +SCE_FS_COMMENTDOC = 3 +SCE_FS_COMMENTLINEDOC = 4 +SCE_FS_COMMENTDOCKEYWORD = 5 +SCE_FS_COMMENTDOCKEYWORDERROR = 6 +SCE_FS_KEYWORD = 7 +SCE_FS_KEYWORD2 = 8 +SCE_FS_KEYWORD3 = 9 +SCE_FS_KEYWORD4 = 10 +SCE_FS_NUMBER = 11 +SCE_FS_STRING = 12 +SCE_FS_PREPROCESSOR = 13 +SCE_FS_OPERATOR = 14 +SCE_FS_IDENTIFIER = 15 +SCE_FS_DATE = 16 +SCE_FS_STRINGEOL = 17 +SCE_FS_CONSTANT = 18 +SCE_FS_ASM = 19 +SCE_FS_LABEL = 20 +SCE_FS_ERROR = 21 +SCE_FS_HEXNUMBER = 22 +SCE_FS_BINNUMBER = 23 +SCE_CSOUND_DEFAULT = 0 +SCE_CSOUND_COMMENT = 1 +SCE_CSOUND_NUMBER = 2 +SCE_CSOUND_OPERATOR = 3 +SCE_CSOUND_INSTR = 4 +SCE_CSOUND_IDENTIFIER = 5 +SCE_CSOUND_OPCODE = 6 +SCE_CSOUND_HEADERSTMT = 7 +SCE_CSOUND_USERKEYWORD = 8 +SCE_CSOUND_COMMENTBLOCK = 9 +SCE_CSOUND_PARAM = 10 +SCE_CSOUND_ARATE_VAR = 11 +SCE_CSOUND_KRATE_VAR = 12 +SCE_CSOUND_IRATE_VAR = 13 +SCE_CSOUND_GLOBAL_VAR = 14 +SCE_CSOUND_STRINGEOL = 15 +SCE_INNO_DEFAULT = 0 +SCE_INNO_COMMENT = 1 +SCE_INNO_KEYWORD = 2 +SCE_INNO_PARAMETER = 3 +SCE_INNO_SECTION = 4 +SCE_INNO_PREPROC = 5 +SCE_INNO_PREPROC_INLINE = 6 +SCE_INNO_COMMENT_PASCAL = 7 +SCE_INNO_KEYWORD_PASCAL = 8 +SCE_INNO_KEYWORD_USER = 9 +SCE_INNO_STRING_DOUBLE = 10 +SCE_INNO_STRING_SINGLE = 11 +SCE_INNO_IDENTIFIER = 12 +SCE_OPAL_SPACE = 0 +SCE_OPAL_COMMENT_BLOCK = 1 +SCE_OPAL_COMMENT_LINE = 2 +SCE_OPAL_INTEGER = 3 +SCE_OPAL_KEYWORD = 4 +SCE_OPAL_SORT = 5 +SCE_OPAL_STRING = 6 +SCE_OPAL_PAR = 7 +SCE_OPAL_BOOL_CONST = 8 +SCE_OPAL_DEFAULT = 32 +SCE_SPICE_DEFAULT = 0 +SCE_SPICE_IDENTIFIER = 1 +SCE_SPICE_KEYWORD = 2 +SCE_SPICE_KEYWORD2 = 3 +SCE_SPICE_KEYWORD3 = 4 +SCE_SPICE_NUMBER = 5 +SCE_SPICE_DELIMITER = 6 +SCE_SPICE_VALUE = 7 +SCE_SPICE_COMMENTLINE = 8 +SCE_CMAKE_DEFAULT = 0 +SCE_CMAKE_COMMENT = 1 +SCE_CMAKE_STRINGDQ = 2 +SCE_CMAKE_STRINGLQ = 3 +SCE_CMAKE_STRINGRQ = 4 +SCE_CMAKE_COMMANDS = 5 +SCE_CMAKE_PARAMETERS = 6 +SCE_CMAKE_VARIABLE = 7 +SCE_CMAKE_USERDEFINED = 8 +SCE_CMAKE_WHILEDEF = 9 +SCE_CMAKE_FOREACHDEF = 10 +SCE_CMAKE_IFDEFINEDEF = 11 +SCE_CMAKE_MACRODEF = 12 +SCE_CMAKE_STRINGVAR = 13 +SCE_CMAKE_NUMBER = 14 +SCE_GAP_DEFAULT = 0 +SCE_GAP_IDENTIFIER = 1 +SCE_GAP_KEYWORD = 2 +SCE_GAP_KEYWORD2 = 3 +SCE_GAP_KEYWORD3 = 4 +SCE_GAP_KEYWORD4 = 5 +SCE_GAP_STRING = 6 +SCE_GAP_CHAR = 7 +SCE_GAP_OPERATOR = 8 +SCE_GAP_COMMENT = 9 +SCE_GAP_NUMBER = 10 +SCE_GAP_STRINGEOL = 11 +SCE_PLM_DEFAULT = 0 +SCE_PLM_COMMENT = 1 +SCE_PLM_STRING = 2 +SCE_PLM_NUMBER = 3 +SCE_PLM_IDENTIFIER = 4 +SCE_PLM_OPERATOR = 5 +SCE_PLM_CONTROL = 6 +SCE_PLM_KEYWORD = 7 +SCE_4GL_DEFAULT = 0 +SCE_4GL_NUMBER = 1 +SCE_4GL_WORD = 2 +SCE_4GL_STRING = 3 +SCE_4GL_CHARACTER = 4 +SCE_4GL_PREPROCESSOR = 5 +SCE_4GL_OPERATOR = 6 +SCE_4GL_IDENTIFIER = 7 +SCE_4GL_BLOCK = 8 +SCE_4GL_END = 9 +SCE_4GL_COMMENT1 = 10 +SCE_4GL_COMMENT2 = 11 +SCE_4GL_COMMENT3 = 12 +SCE_4GL_COMMENT4 = 13 +SCE_4GL_COMMENT5 = 14 +SCE_4GL_COMMENT6 = 15 +SCE_4GL_DEFAULT_ = 16 +SCE_4GL_NUMBER_ = 17 +SCE_4GL_WORD_ = 18 +SCE_4GL_STRING_ = 19 +SCE_4GL_CHARACTER_ = 20 +SCE_4GL_PREPROCESSOR_ = 21 +SCE_4GL_OPERATOR_ = 22 +SCE_4GL_IDENTIFIER_ = 23 +SCE_4GL_BLOCK_ = 24 +SCE_4GL_END_ = 25 +SCE_4GL_COMMENT1_ = 26 +SCE_4GL_COMMENT2_ = 27 +SCE_4GL_COMMENT3_ = 28 +SCE_4GL_COMMENT4_ = 29 +SCE_4GL_COMMENT5_ = 30 +SCE_4GL_COMMENT6_ = 31 +SCE_ABAQUS_DEFAULT = 0 +SCE_ABAQUS_COMMENT = 1 +SCE_ABAQUS_COMMENTBLOCK = 2 +SCE_ABAQUS_NUMBER = 3 +SCE_ABAQUS_STRING = 4 +SCE_ABAQUS_OPERATOR = 5 +SCE_ABAQUS_WORD = 6 +SCE_ABAQUS_PROCESSOR = 7 +SCE_ABAQUS_COMMAND = 8 +SCE_ABAQUS_SLASHCOMMAND = 9 +SCE_ABAQUS_STARCOMMAND = 10 +SCE_ABAQUS_ARGUMENT = 11 +SCE_ABAQUS_FUNCTION = 12 +SCE_ASY_DEFAULT = 0 +SCE_ASY_COMMENT = 1 +SCE_ASY_COMMENTLINE = 2 +SCE_ASY_NUMBER = 3 +SCE_ASY_WORD = 4 +SCE_ASY_STRING = 5 +SCE_ASY_CHARACTER = 6 +SCE_ASY_OPERATOR = 7 +SCE_ASY_IDENTIFIER = 8 +SCE_ASY_STRINGEOL = 9 +SCE_ASY_COMMENTLINEDOC = 10 +SCE_ASY_WORD2 = 11 +SCE_R_DEFAULT = 0 +SCE_R_COMMENT = 1 +SCE_R_KWORD = 2 +SCE_R_BASEKWORD = 3 +SCE_R_OTHERKWORD = 4 +SCE_R_NUMBER = 5 +SCE_R_STRING = 6 +SCE_R_STRING2 = 7 +SCE_R_OPERATOR = 8 +SCE_R_IDENTIFIER = 9 +SCE_R_INFIX = 10 +SCE_R_INFIXEOL = 11 +SCE_MAGIK_DEFAULT = 0 +SCE_MAGIK_COMMENT = 1 +SCE_MAGIK_HYPER_COMMENT = 16 +SCE_MAGIK_STRING = 2 +SCE_MAGIK_CHARACTER = 3 +SCE_MAGIK_NUMBER = 4 +SCE_MAGIK_IDENTIFIER = 5 +SCE_MAGIK_OPERATOR = 6 +SCE_MAGIK_FLOW = 7 +SCE_MAGIK_CONTAINER = 8 +SCE_MAGIK_BRACKET_BLOCK = 9 +SCE_MAGIK_BRACE_BLOCK = 10 +SCE_MAGIK_SQBRACKET_BLOCK = 11 +SCE_MAGIK_UNKNOWN_KEYWORD = 12 +SCE_MAGIK_KEYWORD = 13 +SCE_MAGIK_PRAGMA = 14 +SCE_MAGIK_SYMBOL = 15 +SCE_POWERSHELL_DEFAULT = 0 +SCE_POWERSHELL_COMMENT = 1 +SCE_POWERSHELL_STRING = 2 +SCE_POWERSHELL_CHARACTER = 3 +SCE_POWERSHELL_NUMBER = 4 +SCE_POWERSHELL_VARIABLE = 5 +SCE_POWERSHELL_OPERATOR = 6 +SCE_POWERSHELL_IDENTIFIER = 7 +SCE_POWERSHELL_KEYWORD = 8 +SCE_POWERSHELL_CMDLET = 9 +SCE_POWERSHELL_ALIAS = 10 +SCE_MYSQL_DEFAULT = 0 +SCE_MYSQL_COMMENT = 1 +SCE_MYSQL_COMMENTLINE = 2 +SCE_MYSQL_VARIABLE = 3 +SCE_MYSQL_SYSTEMVARIABLE = 4 +SCE_MYSQL_KNOWNSYSTEMVARIABLE = 5 +SCE_MYSQL_NUMBER = 6 +SCE_MYSQL_MAJORKEYWORD = 7 +SCE_MYSQL_KEYWORD = 8 +SCE_MYSQL_DATABASEOBJECT = 9 +SCE_MYSQL_PROCEDUREKEYWORD = 10 +SCE_MYSQL_STRING = 11 +SCE_MYSQL_SQSTRING = 12 +SCE_MYSQL_DQSTRING = 13 +SCE_MYSQL_OPERATOR = 14 +SCE_MYSQL_FUNCTION = 15 +SCE_MYSQL_IDENTIFIER = 16 +SCE_MYSQL_QUOTEDIDENTIFIER = 17 +SCE_MYSQL_USER1 = 18 +SCE_MYSQL_USER2 = 19 +SCE_MYSQL_USER3 = 20 +SCE_PO_DEFAULT = 0 +SCE_PO_COMMENT = 1 +SCE_PO_MSGID = 2 +SCE_PO_MSGID_TEXT = 3 +SCE_PO_MSGSTR = 4 +SCE_PO_MSGSTR_TEXT = 5 +SCE_PO_MSGCTXT = 6 +SCE_PO_MSGCTXT_TEXT = 7 +SCE_PO_FUZZY = 8 +SCLEX_ASP = 29 +SCLEX_PHP = 30 diff --git a/venv/Lib/site-packages/pythonwin/pywin/scintilla/view.py b/venv/Lib/site-packages/pythonwin/pywin/scintilla/view.py new file mode 100644 index 00000000..645aced1 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/scintilla/view.py @@ -0,0 +1,722 @@ +# A general purpose MFC CCtrlView view that uses Scintilla. + +from . import control +from . import IDLEenvironment # IDLE emulation. +from pywin.mfc import docview +from pywin.mfc import dialog +from . import scintillacon +import win32con +import win32ui +import afxres +import string +import array +import sys +import types +import __main__ # for attribute lookup +from . import bindings +from . import keycodes +import struct +import re +import os + +PRINTDLGORD = 1538 +IDC_PRINT_MAG_EDIT = 1010 +EM_FORMATRANGE = win32con.WM_USER+57 + +wordbreaks = "._" + string.ascii_uppercase + string.ascii_lowercase + string.digits + +patImport=re.compile('import (?P.*)') + +_event_commands = [ + # File menu + "win32ui.ID_FILE_LOCATE", "win32ui.ID_FILE_CHECK", "afxres.ID_FILE_CLOSE", + "afxres.ID_FILE_NEW", "afxres.ID_FILE_OPEN", "afxres.ID_FILE_SAVE", + "afxres.ID_FILE_SAVE_AS", "win32ui.ID_FILE_SAVE_ALL", + # Edit menu + "afxres.ID_EDIT_UNDO", "afxres.ID_EDIT_REDO", "afxres.ID_EDIT_CUT", + "afxres.ID_EDIT_COPY", "afxres.ID_EDIT_PASTE", "afxres.ID_EDIT_SELECT_ALL", + "afxres.ID_EDIT_FIND", "afxres.ID_EDIT_REPEAT", "afxres.ID_EDIT_REPLACE", + # View menu + "win32ui.ID_VIEW_WHITESPACE", "win32ui.ID_VIEW_FIXED_FONT", + "win32ui.ID_VIEW_BROWSE", "win32ui.ID_VIEW_INTERACTIVE", + # Window menu + "afxres.ID_WINDOW_ARRANGE", "afxres.ID_WINDOW_CASCADE", + "afxres.ID_WINDOW_NEW", "afxres.ID_WINDOW_SPLIT", + "afxres.ID_WINDOW_TILE_HORZ", "afxres.ID_WINDOW_TILE_VERT", + # Others + "afxres.ID_APP_EXIT", "afxres.ID_APP_ABOUT", +] + +_extra_event_commands = [ + ("EditDelete", afxres.ID_EDIT_CLEAR), + ("LocateModule", win32ui.ID_FILE_LOCATE), + ("GotoLine", win32ui.ID_EDIT_GOTO_LINE), + ("DbgBreakpointToggle", win32ui.IDC_DBG_ADD), + ("DbgGo", win32ui.IDC_DBG_GO), + ("DbgStepOver", win32ui.IDC_DBG_STEPOVER), + ("DbgStep", win32ui.IDC_DBG_STEP), + ("DbgStepOut", win32ui.IDC_DBG_STEPOUT), + ("DbgBreakpointClearAll", win32ui.IDC_DBG_CLEAR), + ("DbgClose", win32ui.IDC_DBG_CLOSE), +] + +event_commands = [] +def _CreateEvents(): + for name in _event_commands: + val = eval(name) + name_parts = name.split("_")[1:] + name_parts = [p.capitalize() for p in name_parts] + event = ''.join(name_parts) + event_commands.append((event, val)) + for name, id in _extra_event_commands: + event_commands.append((name, id)) + +_CreateEvents() +del _event_commands; del _extra_event_commands + +command_reflectors = [ + (win32ui.ID_EDIT_UNDO, win32con.WM_UNDO), + (win32ui.ID_EDIT_REDO, scintillacon.SCI_REDO), + (win32ui.ID_EDIT_CUT, win32con.WM_CUT), + (win32ui.ID_EDIT_COPY, win32con.WM_COPY), + (win32ui.ID_EDIT_PASTE, win32con.WM_PASTE), + (win32ui.ID_EDIT_CLEAR, win32con.WM_CLEAR), + (win32ui.ID_EDIT_SELECT_ALL, scintillacon.SCI_SELECTALL), +] + +def DoBraceMatch(control): + curPos = control.SCIGetCurrentPos() + charBefore = ' ' + if curPos: charBefore = control.SCIGetCharAt(curPos-1) + charAt = control.SCIGetCharAt(curPos) + braceAtPos = braceOpposite = -1 + if charBefore in "[](){}": braceAtPos = curPos-1 + if braceAtPos==-1: + if charAt in "[](){}": braceAtPos = curPos + if braceAtPos != -1: + braceOpposite = control.SCIBraceMatch(braceAtPos, 0) + if braceAtPos != -1 and braceOpposite==-1: + control.SCIBraceBadHighlight(braceAtPos) + else: + # either clear them both or set them both. + control.SCIBraceHighlight(braceAtPos, braceOpposite) + +def _get_class_attributes(ob): + # Recurse into base classes looking for attributes + items = [] + try: + items = items + dir(ob) + for i in ob.__bases__: + for item in _get_class_attributes(i): + if item not in items: + items.append(item) + except AttributeError: + pass + return items + +# Supposed to look like an MFC CEditView, but +# also supports IDLE extensions and other source code generic features. +class CScintillaView(docview.CtrlView, control.CScintillaColorEditInterface): + def __init__(self, doc): + docview.CtrlView.__init__(self, doc, "Scintilla", win32con.WS_CHILD | win32con.WS_VSCROLL | win32con.WS_HSCROLL | win32con.WS_CLIPCHILDREN | win32con.WS_VISIBLE) + self._tabWidth = 8 # Mirror of what we send to Scintilla - never change this directly + self.bAutoCompleteAttributes = 1 + self.bShowCallTips = 1 + self.bMatchBraces = 0 # Editor option will default this to true later! + self.bindings = bindings.BindingsManager(self) + + self.idle = IDLEenvironment.IDLEEditorWindow(self) + self.idle.IDLEExtension("AutoExpand") + # SendScintilla is called so frequently it is worth optimizing. + self.SendScintilla = self._obj_.SendMessage + + def OnDestroy(self, msg): + self.SendScintilla = None + return docview.CtrlView.OnDestroy(self, msg) + + def _MakeColorizer(self): + ext = os.path.splitext(self.GetDocument().GetPathName())[1] + from . import formatter + return formatter.BuiltinPythonSourceFormatter(self, ext) + + +# def SendScintilla(self, msg, w=0, l=0): +# return self._obj_.SendMessage(msg, w, l) + + def SCISetTabWidth(self, width): + # I need to remember the tab-width for the AutoIndent extension. This may go. + self._tabWidth = width + control.CScintillaEditInterface.SCISetTabWidth(self, width) + + def GetTabWidth(self): + return self._tabWidth + + def HookHandlers(self): + # Create events for all the menu names. + for name, val in event_commands: +# handler = lambda id, code, tosend=val, parent=parent: parent.OnCommand(tosend, 0) and 0 + self.bindings.bind(name, None, cid=val) + + # Hook commands that do nothing other than send Scintilla messages. + for command, reflection in command_reflectors: + handler = lambda id, code, ss=self.SendScintilla, tosend=reflection: ss(tosend) and 0 + self.HookCommand(handler, command) + + self.HookCommand(self.OnCmdViewWS, win32ui.ID_VIEW_WHITESPACE) + self.HookCommandUpdate(self.OnUpdateViewWS, win32ui.ID_VIEW_WHITESPACE) + self.HookCommand(self.OnCmdViewIndentationGuides, win32ui.ID_VIEW_INDENTATIONGUIDES) + self.HookCommandUpdate(self.OnUpdateViewIndentationGuides, win32ui.ID_VIEW_INDENTATIONGUIDES) + self.HookCommand(self.OnCmdViewRightEdge, win32ui.ID_VIEW_RIGHT_EDGE) + self.HookCommandUpdate(self.OnUpdateViewRightEdge, win32ui.ID_VIEW_RIGHT_EDGE) + self.HookCommand(self.OnCmdViewEOL, win32ui.ID_VIEW_EOL) + self.HookCommandUpdate(self.OnUpdateViewEOL, win32ui.ID_VIEW_EOL) + self.HookCommand(self.OnCmdViewFixedFont, win32ui.ID_VIEW_FIXED_FONT) + self.HookCommandUpdate(self.OnUpdateViewFixedFont, win32ui.ID_VIEW_FIXED_FONT) + self.HookCommand(self.OnCmdFileLocate, win32ui.ID_FILE_LOCATE) + self.HookCommand(self.OnCmdEditFind, win32ui.ID_EDIT_FIND) + self.HookCommand(self.OnCmdEditRepeat, win32ui.ID_EDIT_REPEAT) + self.HookCommand(self.OnCmdEditReplace, win32ui.ID_EDIT_REPLACE) + self.HookCommand(self.OnCmdGotoLine, win32ui.ID_EDIT_GOTO_LINE) + self.HookCommand(self.OnFilePrint, afxres.ID_FILE_PRINT) + self.HookCommand(self.OnFilePrint, afxres.ID_FILE_PRINT_DIRECT) + self.HookCommand(self.OnFilePrintPreview, + win32ui.ID_FILE_PRINT_PREVIEW) + # Key bindings. + self.HookMessage(self.OnKeyDown, win32con.WM_KEYDOWN) + self.HookMessage(self.OnKeyDown, win32con.WM_SYSKEYDOWN) + # Hook wheeley mouse events +# self.HookMessage(self.OnMouseWheel, win32con.WM_MOUSEWHEEL) + self.HookFormatter() + + def OnInitialUpdate(self): + doc = self.GetDocument() + + # Enable Unicode + self.SendScintilla(scintillacon.SCI_SETCODEPAGE, scintillacon.SC_CP_UTF8, 0) + self.SendScintilla(scintillacon.SCI_SETKEYSUNICODE, 1, 0) + + # Create margins + self.SendScintilla(scintillacon.SCI_SETMARGINTYPEN, 1, scintillacon.SC_MARGIN_SYMBOL); + self.SendScintilla(scintillacon.SCI_SETMARGINMASKN, 1, 0xF); + self.SendScintilla(scintillacon.SCI_SETMARGINTYPEN, 2, scintillacon.SC_MARGIN_SYMBOL); + self.SendScintilla(scintillacon.SCI_SETMARGINMASKN, 2, scintillacon.SC_MASK_FOLDERS); + self.SendScintilla(scintillacon.SCI_SETMARGINSENSITIVEN, 2, 1); + + self.GetDocument().HookViewNotifications(self) # is there an MFC way to grab this? + self.HookHandlers() + + # Load the configuration information. + self.OnWinIniChange(None) + + self.SetSel() + + self.GetDocument().FinalizeViewCreation(self) # is there an MFC way to grab this? + + + def _GetSubConfigNames(self): + return None # By default we use only sections without sub-sections. + + def OnWinIniChange(self, section = None): + self.bindings.prepare_configure() + try: + self.DoConfigChange() + finally: + self.bindings.complete_configure() + + def DoConfigChange(self): + # Bit of a hack I dont kow what to do about - these should be "editor options" + from pywin.framework.editor import GetEditorOption + self.bAutoCompleteAttributes = GetEditorOption("Autocomplete Attributes", 1) + self.bShowCallTips = GetEditorOption("Show Call Tips", 1) + # Update the key map and extension data. + configManager.configure(self, self._GetSubConfigNames()) + if configManager.last_error: + win32ui.MessageBox(configManager.last_error, "Configuration Error") + self.bMatchBraces = GetEditorOption("Match Braces", 1) + self.ApplyFormattingStyles(1) + + def OnDestroy(self, msg): + self.bindings.close() + self.bindings = None + self.idle.close() + self.idle = None + control.CScintillaColorEditInterface.close(self) + return docview.CtrlView.OnDestroy(self, msg) + + def OnMouseWheel(self, msg): + zDelta = msg[2] >> 16 + vpos = self.GetScrollPos(win32con.SB_VERT) + vpos = vpos - zDelta/40 # 3 lines per notch + self.SetScrollPos(win32con.SB_VERT, vpos) + self.SendScintilla(win32con.WM_VSCROLL, + (vpos<<16) | win32con.SB_THUMBPOSITION, + 0) + + def OnBraceMatch(self, std, extra): + if not self.bMatchBraces: return + DoBraceMatch(self) + + def OnNeedShown(self, std, extra): + notify = self.SCIUnpackNotifyMessage(extra) + # OnNeedShown is called before an edit operation when + # text is folded (as it is possible the text insertion will happen + # in a folded region.) As this happens _before_ the insert, + # we ignore the length (if we are at EOF, pos + length may + # actually be beyond the end of buffer) + self.EnsureCharsVisible(notify.position) + + def EnsureCharsVisible(self, start, end = None): + if end is None: end = start + lineStart = self.LineFromChar(min(start, end)) + lineEnd = self.LineFromChar(max(start, end)) + while lineStart <= lineEnd: + self.SCIEnsureVisible(lineStart) + lineStart = lineStart + 1 + + # Helper to add an event to a menu. + def AppendMenu(self, menu, text="", event=None, flags = None, checked=0): + if event is None: + assert flags is not None, "No event or custom flags!" + cmdid = 0 + else: + cmdid = self.bindings.get_command_id(event) + if cmdid is None: + # No event of that name - no point displaying it. + print('View.AppendMenu(): Unknown event "%s" specified for menu text "%s" - ignored' % (event, text)) + return + keyname = configManager.get_key_binding( event, self._GetSubConfigNames() ) + if keyname is not None: + text = text + "\t" + keyname + if flags is None: flags = win32con.MF_STRING|win32con.MF_ENABLED + if checked: flags = flags | win32con.MF_CHECKED + menu.AppendMenu(flags, cmdid, text) + + def OnKeyDown(self, msg): + return self.bindings.fire_key_event( msg ) + + def GotoEndOfFileEvent(self, event): + self.SetSel(-1) + + def KeyDotEvent(self, event): + ## Don't trigger autocomplete if any text is selected + s,e = self.GetSel() + if s!=e: + return 1 + self.SCIAddText(".") + if self.bAutoCompleteAttributes: + self._AutoComplete() + + # View Whitespace/EOL/Indentation UI. + + def OnCmdViewWS(self, cmd, code): # Handle the menu command + viewWS = self.SCIGetViewWS() + self.SCISetViewWS(not viewWS) + def OnUpdateViewWS(self, cmdui): # Update the tick on the UI. + cmdui.SetCheck(self.SCIGetViewWS()) + cmdui.Enable() + def OnCmdViewIndentationGuides(self, cmd, code): # Handle the menu command + viewIG = self.SCIGetIndentationGuides() + self.SCISetIndentationGuides(not viewIG) + def OnUpdateViewIndentationGuides(self, cmdui): # Update the tick on the UI. + cmdui.SetCheck(self.SCIGetIndentationGuides()) + cmdui.Enable() + def OnCmdViewRightEdge(self, cmd, code): # Handle the menu command + if self.SCIGetEdgeMode() == scintillacon.EDGE_NONE: + mode = scintillacon.EDGE_BACKGROUND + else: + mode = scintillacon.EDGE_NONE + self.SCISetEdgeMode(mode) + def OnUpdateViewRightEdge(self, cmdui): # Update the tick on the UI. + cmdui.SetCheck(self.SCIGetEdgeMode() != scintillacon.EDGE_NONE) + cmdui.Enable() + def OnCmdViewEOL(self, cmd, code): # Handle the menu command + viewEOL = self.SCIGetViewEOL() + self.SCISetViewEOL(not viewEOL) + def OnUpdateViewEOL(self, cmdui): # Update the tick on the UI. + cmdui.SetCheck(self.SCIGetViewEOL()) + cmdui.Enable() + + def OnCmdViewFixedFont(self, cmd, code): # Handle the menu command + self._GetColorizer().bUseFixed = not self._GetColorizer().bUseFixed + self.ApplyFormattingStyles(0) + # Ensure the selection is visible! + self.ScrollCaret() + + def OnUpdateViewFixedFont(self, cmdui): # Update the tick on the UI. + c = self._GetColorizer() + if c is not None: cmdui.SetCheck(c.bUseFixed) + cmdui.Enable(c is not None) + + def OnCmdEditFind(self, cmd, code): + from . import find + find.ShowFindDialog() + def OnCmdEditRepeat(self, cmd, code): + from . import find + find.FindNext() + def OnCmdEditReplace(self, cmd, code): + from . import find + find.ShowReplaceDialog() + + def OnCmdFileLocate(self, cmd, id): + line = self.GetLine().strip() + import pywin.framework.scriptutils + m = patImport.match(line) + if m: + # Module name on this line - locate that! + modName = m.group('name') + fileName = pywin.framework.scriptutils.LocatePythonFile(modName) + if fileName is None: + win32ui.SetStatusText("Can't locate module %s" % modName) + return 1 # Let the default get it. + else: + win32ui.GetApp().OpenDocumentFile(fileName) + else: + # Just to a "normal" locate - let the default handler get it. + return 1 + return 0 + + def OnCmdGotoLine(self, cmd, id): + try: + lineNo = int(input("Enter Line Number"))-1 + except (ValueError, KeyboardInterrupt): + return 0 + self.SCIEnsureVisible(lineNo) + self.SCIGotoLine(lineNo) + return 0 + + def SaveTextFile(self, filename, encoding=None): + doc = self.GetDocument() + doc._SaveTextToFile(self, filename, encoding=encoding) + doc.SetModifiedFlag(0) + return 1 + + def _AutoComplete(self): + def list2dict(l): + ret={} + for i in l: + ret[i] = None + return ret + + self.SCIAutoCCancel() # Cancel old auto-complete lists. + # First try and get an object without evaluating calls + ob = self._GetObjectAtPos(bAllowCalls = 0) + # If that failed, try and process call or indexing to get the object. + if ob is None: + ob = self._GetObjectAtPos(bAllowCalls = 1) + items_dict = {} + if ob is not None: + try: # Catch unexpected errors when fetching attribute names from the object + # extra attributes of win32ui objects + if hasattr(ob, "_obj_"): + try: + items_dict.update(list2dict(dir(ob._obj_))) + except AttributeError: + pass # object has no __dict__ + + # normal attributes + try: + items_dict.update(list2dict(dir(ob))) + except AttributeError: + pass # object has no __dict__ + if hasattr(ob, "__class__"): + items_dict.update(list2dict(_get_class_attributes(ob.__class__))) + # The object may be a COM object with typelib support - lets see if we can get its props. + # (contributed by Stefan Migowsky) + try: + # Get the automation attributes + items_dict.update(ob.__class__._prop_map_get_) + # See if there is an write only property + # could be optimized + items_dict.update(ob.__class__._prop_map_put_) + # append to the already evaluated list + except AttributeError: + pass + # The object might be a pure COM dynamic dispatch with typelib support - lets see if we can get its props. + if hasattr(ob, "_oleobj_"): + try: + for iTI in range(0,ob._oleobj_.GetTypeInfoCount()): + typeInfo = ob._oleobj_.GetTypeInfo(iTI) + self._UpdateWithITypeInfo (items_dict, typeInfo) + except: + pass + except: + win32ui.SetStatusText("Error attempting to get object attributes - %s" % (repr(sys.exc_info()[0]),)) + + # ensure all keys are strings. + items = [str(k) for k in items_dict.keys()] + # All names that start with "_" go! + items = [k for k in items if not k.startswith('_')] + + if not items: + # Heuristics a-la AutoExpand + # The idea is to find other usages of the current binding + # and assume, that it refers to the same object (or at least, + # to an object of the same type) + # Contributed by Vadim Chugunov [vadimch@yahoo.com] + left, right = self._GetWordSplit() + if left=="": # Ignore standalone dots + return None + # We limit our search to the current class, if that + # information is available + minline, maxline, curclass = self._GetClassInfoFromBrowser() + endpos = self.LineIndex(maxline) + text = self.GetTextRange(self.LineIndex(minline),endpos) + try: + l = re.findall(r"\b"+left+"\.\w+",text) + except re.error: + # parens etc may make an invalid RE, but this code wouldnt + # benefit even if the RE did work :-) + l = [] + prefix = len(left)+1 + unique = {} + for li in l: + unique[li[prefix:]] = 1 + # Assuming traditional usage of self... + if curclass and left=="self": + self._UpdateWithClassMethods(unique,curclass) + + items = [word for word in unique.keys() if word[:2]!='__' or word[-2:]!='__'] + # Ignore the word currently to the right of the dot - probably a red-herring. + try: + items.remove(right[1:]) + except ValueError: + pass + if items: + items.sort() + self.SCIAutoCSetAutoHide(0) + self.SCIAutoCShow(items) + + def _UpdateWithITypeInfo (self, items_dict, typeInfo): + import pythoncom + typeInfos = [typeInfo] + # suppress IDispatch and IUnknown methods + inspectedIIDs = {pythoncom.IID_IDispatch:None} + + while len(typeInfos)>0: + typeInfo = typeInfos.pop() + typeAttr = typeInfo.GetTypeAttr() + + if typeAttr.iid not in inspectedIIDs: + inspectedIIDs[typeAttr.iid] = None + for iFun in range(0,typeAttr.cFuncs): + funDesc = typeInfo.GetFuncDesc(iFun) + funName = typeInfo.GetNames(funDesc.memid)[0] + if funName not in items_dict: + items_dict[funName] = None + + # Inspect the type info of all implemented types + # E.g. IShellDispatch5 implements IShellDispatch4 which implements IShellDispatch3 ... + for iImplType in range(0,typeAttr.cImplTypes): + iRefType = typeInfo.GetRefTypeOfImplType(iImplType) + refTypeInfo = typeInfo.GetRefTypeInfo(iRefType) + typeInfos.append(refTypeInfo) + + # TODO: This is kinda slow. Probably need some kind of cache + # here that is flushed upon file save + # Or maybe we don't need the superclass methods at all ? + def _UpdateWithClassMethods(self,dict,classinfo): + if not hasattr(classinfo,"methods"): + # No 'methods' - probably not what we think it is. + return + dict.update(classinfo.methods) + for super in classinfo.super: + if hasattr(super,"methods"): + self._UpdateWithClassMethods(dict,super) + + # Find which class definition caret is currently in and return + # indexes of the the first and the last lines of that class definition + # Data is obtained from module browser (if enabled) + def _GetClassInfoFromBrowser(self,pos=-1): + minline = 0 + maxline = self.GetLineCount()-1 + doc = self.GetParentFrame().GetActiveDocument() + browser = None + try: + if doc is not None: + browser = doc.GetAllViews()[1] + except IndexError: + pass + if browser is None: + return (minline,maxline,None) # Current window has no browser + if not browser.list: return (minline,maxline,None) # Not initialized + path = self.GetDocument().GetPathName() + if not path: return (minline,maxline,None) # No current path + + import pywin.framework.scriptutils + curmodule, path = pywin.framework.scriptutils.GetPackageModuleName(path) + try: + clbrdata = browser.list.root.clbrdata + except AttributeError: + return (minline,maxline,None) # No class data for this module. + curline = self.LineFromChar(pos) + curclass = None + # Find out which class we are in + for item in clbrdata.values(): + if item.module==curmodule: + item_lineno = item.lineno - 1 # Scintilla counts lines from 0, whereas pyclbr - from 1 + if minline < item_lineno <= curline: + minline = item_lineno + curclass = item + if curline < item_lineno < maxline: + maxline = item_lineno + return (minline,maxline,curclass) + + + def _GetObjectAtPos(self, pos = -1, bAllowCalls = 0): + left, right = self._GetWordSplit(pos, bAllowCalls) + if left: # It is an attribute lookup + # How is this for a hack! + namespace = sys.modules.copy() + namespace.update(__main__.__dict__) + # Get the debugger's context. + try: + from pywin.framework import interact + if interact.edit is not None and interact.edit.currentView is not None: + globs, locs = interact.edit.currentView.GetContext()[:2] + if globs: namespace.update(globs) + if locs: namespace.update(locs) + except ImportError: + pass + try: + return eval(left, namespace) + except: + pass + return None + + def _GetWordSplit(self, pos = -1, bAllowCalls = 0): + if pos==-1: pos = self.GetSel()[0]-1 # Character before current one + limit = self.GetTextLength() + before = [] + after = [] + index = pos-1 + wordbreaks_use = wordbreaks + if bAllowCalls: wordbreaks_use = wordbreaks_use + "()[]" + while index>=0: + char = self.SCIGetCharAt(index) + if char not in wordbreaks_use: break + before.insert(0, char) + index = index-1 + index = pos + while index<=limit: + char = self.SCIGetCharAt(index) + if char not in wordbreaks_use: break + after.append(char) + index=index+1 + return ''.join(before), ''.join(after) + + def OnPrepareDC (self, dc, pInfo): +# print "OnPrepareDC for page", pInfo.GetCurPage(), "of", pInfo.GetFromPage(), "to", pInfo.GetToPage(), ", starts=", self.starts + if dc.IsPrinting(): + # Check if we are beyond the end. + # (only do this when actually printing, else messes up print preview!) + if not pInfo.GetPreview() and self.starts is not None: + prevPage = pInfo.GetCurPage() - 1 + if prevPage > 0 and self.starts[prevPage] >= self.GetTextLength(): + # All finished. + pInfo.SetContinuePrinting(0) + return + dc.SetMapMode(win32con.MM_TEXT); + + def OnPreparePrinting(self, pInfo): + flags = win32ui.PD_USEDEVMODECOPIES | \ + win32ui.PD_ALLPAGES | \ + win32ui.PD_NOSELECTION # Dont support printing just a selection. +# NOTE: Custom print dialogs are stopping the user's values from coming back :-( +# self.prtDlg = PrintDialog(pInfo, PRINTDLGORD, flags) +# pInfo.SetPrintDialog(self.prtDlg) + pInfo.SetMinPage(1) + # max page remains undefined for now. + pInfo.SetFromPage(1) + pInfo.SetToPage(1) + ret = self.DoPreparePrinting(pInfo) + return ret + + def OnBeginPrinting(self, dc, pInfo): + self.starts = None + return self._obj_.OnBeginPrinting(dc, pInfo) + + def CalculatePageRanges(self, dc, pInfo): + # Calculate page ranges and max page + self.starts = {0:0} + metrics = dc.GetTextMetrics() + left, top, right, bottom = pInfo.GetDraw() + # Leave space at the top for the header. + rc = (left, top + int((9*metrics['tmHeight'])/2), right, bottom) + pageStart = 0 + maxPage = 0 + textLen = self.GetTextLength() + while pageStart < textLen: + pageStart = self.FormatRange(dc, pageStart, textLen, rc, 0) + maxPage = maxPage + 1 + self.starts[maxPage] = pageStart + # And a sentinal for one page past the end + self.starts[maxPage+1] = textLen + # When actually printing, maxPage doesnt have any effect at this late state. + # but is needed to make the Print Preview work correctly. + pInfo.SetMaxPage(maxPage) + + def OnFilePrintPreview(self, *arg): + self._obj_.OnFilePrintPreview() + + def OnFilePrint(self, *arg): + self._obj_.OnFilePrint() + + def FormatRange(self, dc, pageStart, lengthDoc, rc, draw): + """ + typedef struct _formatrange { + HDC hdc; + HDC hdcTarget; + RECT rc; + RECT rcPage; + CHARRANGE chrg;} FORMATRANGE; + """ + fmt='PPIIIIIIIIll' + hdcRender = dc.GetHandleOutput() + hdcFormat = dc.GetHandleAttrib() + fr = struct.pack(fmt, hdcRender, hdcFormat, rc[0], rc[1], rc[2], rc[3], rc[0], rc[1], rc[2], rc[3], pageStart, lengthDoc) + nextPageStart = self.SendScintilla(EM_FORMATRANGE, draw, fr) + return nextPageStart + + def OnPrint(self, dc, pInfo): + metrics = dc.GetTextMetrics() +# print "dev", w, h, l, metrics['tmAscent'], metrics['tmDescent'] + if self.starts is None: + self.CalculatePageRanges(dc, pInfo) + pageNum = pInfo.GetCurPage() - 1 + # Setup the header of the page - docname on left, pagenum on right. + doc = self.GetDocument() + cxChar = metrics['tmAveCharWidth'] + cyChar = metrics['tmHeight'] + left, top, right, bottom = pInfo.GetDraw() + dc.TextOut(0, 2*cyChar, doc.GetTitle()) + pagenum_str = win32ui.LoadString(afxres.AFX_IDS_PRINTPAGENUM) % (pageNum+1,) + dc.SetTextAlign(win32con.TA_RIGHT) + dc.TextOut(right, 2*cyChar, pagenum_str) + dc.SetTextAlign(win32con.TA_LEFT) + top = top + int((7*cyChar)/2) + dc.MoveTo(left, top) + dc.LineTo(right, top) + top = top + cyChar + rc = (left, top, right, bottom) + nextPageStart = self.FormatRange(dc, self.starts[pageNum], self.starts[pageNum+1], rc, 1) + +def LoadConfiguration(): + global configManager + # Bit of a hack I dont kow what to do about? + from .config import ConfigManager + configName = rc = win32ui.GetProfileVal("Editor", "Keyboard Config", "default") + configManager = ConfigManager(configName) + if configManager.last_error: + bTryDefault = 0 + msg = "Error loading configuration '%s'\n\n%s" % (configName, configManager.last_error) + if configName != "default": + msg = msg + "\n\nThe default configuration will be loaded." + bTryDefault = 1 + win32ui.MessageBox(msg) + if bTryDefault: + configManager = ConfigManager("default") + if configManager.last_error: + win32ui.MessageBox("Error loading configuration 'default'\n\n%s" % (configManager.last_error)) + +configManager = None +LoadConfiguration() diff --git a/venv/Lib/site-packages/pythonwin/pywin/tools/TraceCollector.py b/venv/Lib/site-packages/pythonwin/pywin/tools/TraceCollector.py new file mode 100644 index 00000000..e4fc8542 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/tools/TraceCollector.py @@ -0,0 +1,66 @@ +# win32traceutil like utility for Pythonwin +import _thread +import win32trace, win32event, win32api +from pywin.framework import winout + +outputWindow = None + +def CollectorThread(stopEvent, file): + win32trace.InitRead() + handle = win32trace.GetHandle() + # Run this thread at a lower priority to the main message-loop (and printing output) + # thread can keep up + import win32process + win32process.SetThreadPriority(win32api.GetCurrentThread(), win32process.THREAD_PRIORITY_BELOW_NORMAL) + + try: + while 1: + rc = win32event.WaitForMultipleObjects((handle, stopEvent), 0, win32event.INFINITE) + if rc == win32event.WAIT_OBJECT_0: + # About the only char we can't live with is \0! + file.write(win32trace.read().replace("\0", "")) + else: + # Stop event + break + finally: + win32trace.TermRead() + print("Thread dieing") + +class WindowOutput(winout.WindowOutput): + def __init__(self, *args): + winout.WindowOutput.__init__(*(self,)+args) + self.hStopThread = win32event.CreateEvent(None, 0, 0, None) + _thread.start_new(CollectorThread, (self.hStopThread, self)) + def _StopThread(self): + win32event.SetEvent(self.hStopThread) + self.hStopThread = None + def Close(self): + self._StopThread() + winout.WindowOutput.Close(self) +# def OnViewDestroy(self, frame): +# return winout.WindowOutput.OnViewDestroy(self, frame) +# def Create(self, title=None, style = None): +# rc = winout.WindowOutput.Create(self, title, style) + return rc + + +def MakeOutputWindow(): + # Note that it will not show until the first string written or + # you pass bShow = 1 + global outputWindow + if outputWindow is None: + title = "Python Trace Collector" + # queueingFlag doesnt matter, as all output will come from new thread + outputWindow = WindowOutput(title, title) + # Let people know what this does! + msg = """\ +# This window will display output from any programs that import win32traceutil +# win32com servers registered with '--debug' are in this category. +""" + outputWindow.write(msg) + # force existing window open + outputWindow.write('') + return outputWindow + +if __name__=='__main__': + MakeOutputWindow() diff --git a/venv/Lib/site-packages/pythonwin/pywin/tools/__init__.py b/venv/Lib/site-packages/pythonwin/pywin/tools/__init__.py new file mode 100644 index 00000000..139597f9 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/tools/__init__.py @@ -0,0 +1,2 @@ + + diff --git a/venv/Lib/site-packages/pythonwin/pywin/tools/__pycache__/TraceCollector.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/tools/__pycache__/TraceCollector.cpython-36.pyc new file mode 100644 index 00000000..173646e0 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/tools/__pycache__/TraceCollector.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/tools/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/tools/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..8ff16d79 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/tools/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/tools/__pycache__/browseProjects.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/tools/__pycache__/browseProjects.cpython-36.pyc new file mode 100644 index 00000000..4fde2fd1 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/tools/__pycache__/browseProjects.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/tools/__pycache__/browser.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/tools/__pycache__/browser.cpython-36.pyc new file mode 100644 index 00000000..0eead081 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/tools/__pycache__/browser.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/tools/__pycache__/hierlist.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/tools/__pycache__/hierlist.cpython-36.pyc new file mode 100644 index 00000000..1ba77ef4 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/tools/__pycache__/hierlist.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/tools/__pycache__/regedit.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/tools/__pycache__/regedit.cpython-36.pyc new file mode 100644 index 00000000..b670f0e9 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/tools/__pycache__/regedit.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/tools/__pycache__/regpy.cpython-36.pyc b/venv/Lib/site-packages/pythonwin/pywin/tools/__pycache__/regpy.cpython-36.pyc new file mode 100644 index 00000000..6eb790b6 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/pywin/tools/__pycache__/regpy.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/pythonwin/pywin/tools/browseProjects.py b/venv/Lib/site-packages/pythonwin/pywin/tools/browseProjects.py new file mode 100644 index 00000000..6a893933 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/tools/browseProjects.py @@ -0,0 +1,260 @@ +import regutil, os +from . import hierlist +import win32con, win32ui, win32api +import commctrl +from pywin.mfc import dialog +import glob +import pyclbr +import pywin.framework.scriptutils +import afxres + +class HLIErrorItem(hierlist.HierListItem): + def __init__(self, text): + self.text = text + hierlist.HierListItem.__init__(self) + def GetText(self): + return self.text + +class HLICLBRItem(hierlist.HierListItem): + def __init__(self, name, file, lineno, suffix = ""): + # If the 'name' object itself has a .name, use it. Not sure + # how this happens, but seems pyclbr related. + # See PyWin32 bug 817035 + self.name = getattr(name, "name", name) + self.file = file + self.lineno = lineno + self.suffix = suffix + def __lt__(self, other): + return self.name < other.name + def __eq__(self, other): + return self.name == other.name + def GetText(self): + return self.name + self.suffix + def TakeDefaultAction(self): + if self.file: + pywin.framework.scriptutils.JumpToDocument(self.file, self.lineno, bScrollToTop=1) + else: + win32ui.SetStatusText("The source of this object is unknown") + def PerformItemSelected(self): + if self.file is None: + msg = "%s - source can not be located." % (self.name, ) + else: + msg = "%s defined at line %d of %s" % (self.name, self.lineno, self.file) + win32ui.SetStatusText(msg) + +class HLICLBRClass(HLICLBRItem): + def __init__(self, clbrclass, suffix = ""): + try: + name = clbrclass.name + file = clbrclass.file + lineno = clbrclass.lineno + self.super = clbrclass.super + self.methods = clbrclass.methods + except AttributeError: + name = clbrclass + file = lineno = None + self.super = []; self.methods = {} + HLICLBRItem.__init__(self, name, file, lineno, suffix) + def GetSubList(self): + ret = [] + for c in self.super: + ret.append(HLICLBRClass(c, " (Parent class)")) + for meth, lineno in self.methods.items(): + ret.append(HLICLBRMethod(meth, self.file, lineno, " (method)")) + return ret + def IsExpandable(self): + return len(self.methods) + len(self.super) + def GetBitmapColumn(self): + return 21 + +class HLICLBRFunction(HLICLBRClass): + def GetBitmapColumn(self): + return 22 +class HLICLBRMethod(HLICLBRItem): + def GetBitmapColumn(self): + return 22 + +class HLIModuleItem(hierlist.HierListItem): + def __init__(self, path): + hierlist.HierListItem.__init__(self) + self.path = path + def GetText(self): + return os.path.split(self.path)[1] + " (module)" + def IsExpandable(self): + return 1 + def TakeDefaultAction(self): + win32ui.GetApp().OpenDocumentFile( self.path ) + def GetBitmapColumn(self): + col = 4 # Default + try: + if win32api.GetFileAttributes(self.path) & win32con.FILE_ATTRIBUTE_READONLY: + col = 5 + except win32api.error: + pass + return col + def GetSubList(self): + mod, path = pywin.framework.scriptutils.GetPackageModuleName(self.path) + win32ui.SetStatusText("Building class list - please wait...", 1) + win32ui.DoWaitCursor(1) + try: + try: + reader = pyclbr.readmodule_ex # Post 1.5.2 interface. + extra_msg = " or functions" + except AttributeError: + reader = pyclbr.readmodule + extra_msg = "" + data = reader(mod, [path]) + if data: + ret = [] + for item in data.values(): + if item.__class__ != pyclbr.Class: # ie, it is a pyclbr Function instance (only introduced post 1.5.2) + ret.append(HLICLBRFunction( item, " (function)" ) ) + else: + ret.append(HLICLBRClass( item, " (class)") ) + ret.sort() + return ret + else: + return [HLIErrorItem("No Python classes%s in module." % (extra_msg,))] + finally: + win32ui.DoWaitCursor(0) + win32ui.SetStatusText(win32ui.LoadString(afxres.AFX_IDS_IDLEMESSAGE)) + +def MakePathSubList(path): + ret = [] + for filename in glob.glob(os.path.join(path,'*')): + if os.path.isdir(filename) and os.path.isfile(os.path.join(filename, "__init__.py")): + ret.append(HLIDirectoryItem(filename, os.path.split(filename)[1])) + else: + if os.path.splitext(filename)[1].lower() in ['.py', '.pyw']: + ret.append(HLIModuleItem(filename)) + return ret + +class HLIDirectoryItem(hierlist.HierListItem): + def __init__(self, path, displayName = None, bSubDirs = 0): + hierlist.HierListItem.__init__(self) + self.path = path + self.bSubDirs = bSubDirs + if displayName: + self.displayName = displayName + else: + self.displayName = path + def IsExpandable(self): + return 1 + def GetText(self): + return self.displayName + + def GetSubList(self): + ret = MakePathSubList(self.path) + if os.path.split(self.path)[1] == "win32com": # Complete and utter hack for win32com. + try: + path = win32api.GetFullPathName(os.path.join(self.path, "..\\win32comext")) + ret = ret + MakePathSubList(path) + except win32ui.error: + pass + return ret + +class HLIProjectRoot(hierlist.HierListItem): + def __init__(self, projectName, displayName = None): + hierlist.HierListItem.__init__(self) + self.projectName = projectName + self.displayName = displayName or projectName + def GetText(self): + return self.displayName + def IsExpandable(self): + return 1 + def GetSubList(self): + paths = regutil.GetRegisteredNamedPath(self.projectName) + pathList = paths.split(";") + if len(pathList)==1: # Single dir - dont bother putting the dir in + ret = MakePathSubList(pathList[0]) + else: + ret = list(map( HLIDirectoryItem, pathList )) + return ret + +class HLIRoot(hierlist.HierListItem): + def __init__(self): + hierlist.HierListItem.__init__(self) + def IsExpandable(self): + return 1 + def GetSubList(self): + keyStr = regutil.BuildDefaultPythonKey() + "\\PythonPath" + hKey = win32api.RegOpenKey(regutil.GetRootKey(), keyStr) + try: + ret = [] + ret.append(HLIProjectRoot("", "Standard Python Library")) # The core path. + index = 0 + while 1: + try: + ret.append(HLIProjectRoot(win32api.RegEnumKey(hKey, index))) + index = index + 1 + except win32api.error: + break + return ret + finally: + win32api.RegCloseKey(hKey) + +class dynamic_browser (dialog.Dialog): + style = win32con.WS_OVERLAPPEDWINDOW | win32con.WS_VISIBLE + cs = ( + win32con.WS_CHILD | + win32con.WS_VISIBLE | + commctrl.TVS_HASLINES | + commctrl.TVS_LINESATROOT | + commctrl.TVS_HASBUTTONS + ) + + dt = [ + ["Python Projects", (0, 0, 200, 200), style, None, (8, "MS Sans Serif")], + ["SysTreeView32", None, win32ui.IDC_LIST1, (0, 0, 200, 200), cs] + ] + + def __init__ (self, hli_root): + dialog.Dialog.__init__ (self, self.dt) + self.hier_list = hierlist.HierListWithItems ( + hli_root, + win32ui.IDB_BROWSER_HIER + ) + self.HookMessage (self.on_size, win32con.WM_SIZE) + + def OnInitDialog (self): + self.hier_list.HierInit (self) + return dialog.Dialog.OnInitDialog (self) + + def on_size (self, params): + lparam = params[3] + w = win32api.LOWORD(lparam) + h = win32api.HIWORD(lparam) + self.GetDlgItem (win32ui.IDC_LIST1).MoveWindow((0,0,w,h)) + +def BrowseDialog(): + root = HLIRoot() + if not root.IsExpandable(): + raise TypeError("Browse() argument must have __dict__ attribute, or be a Browser supported type") + + dlg = dynamic_browser (root) + dlg.CreateWindow() + +def DockableBrowserCreator(parent): + root = HLIRoot() + hl = hierlist.HierListWithItems ( + root, + win32ui.IDB_BROWSER_HIER + ) + + style = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_BORDER | commctrl.TVS_HASLINES | commctrl.TVS_LINESATROOT | commctrl.TVS_HASBUTTONS + + control = win32ui.CreateTreeCtrl() + control.CreateWindow(style, (0, 0, 150, 300), parent, win32ui.IDC_LIST1) + list = hl.HierInit (parent, control) + return control + +def DockablePathBrowser(): + import pywin.docking.DockingBar + bar = pywin.docking.DockingBar.DockingBar() + bar.CreateWindow(win32ui.GetMainFrame(), DockableBrowserCreator, "Path Browser", 0x8e0a) + bar.SetBarStyle( bar.GetBarStyle()|afxres.CBRS_TOOLTIPS|afxres.CBRS_FLYBY|afxres.CBRS_SIZE_DYNAMIC) + bar.EnableDocking(afxres.CBRS_ALIGN_ANY) + win32ui.GetMainFrame().DockControlBar(bar) + +# The "default" entry point +Browse = DockablePathBrowser diff --git a/venv/Lib/site-packages/pythonwin/pywin/tools/browser.py b/venv/Lib/site-packages/pythonwin/pywin/tools/browser.py new file mode 100644 index 00000000..7cfb6a79 --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/tools/browser.py @@ -0,0 +1,434 @@ +# basic module browser. + +# usage: +# >>> import browser +# >>> browser.Browse() +# or +# >>> browser.Browse(your_module) +import sys +import types +import __main__ +import win32ui +from pywin.mfc import dialog + +from . import hierlist + +special_names = [ '__doc__', '__name__', '__self__' ] + +# +# HierList items +class HLIPythonObject(hierlist.HierListItem): + def __init__(self, myobject=None, name=None ): + hierlist.HierListItem.__init__(self) + self.myobject = myobject + self.knownExpandable = None + if name: + self.name=name + else: + try: + self.name=myobject.__name__ + except (AttributeError, TypeError): + try: + r = repr(myobject) + if len(r)>20: + r = r[:20] + "..." + self.name=r + except (AttributeError, TypeError): + self.name="???" + def __lt__(self, other): + return self.name < other.name + def __eq__(self, other): + return self.name == other.name + def __repr__(self): + try: + type = self.GetHLIType() + except: + type = "Generic" + return "HLIPythonObject("+type+") - name: "+ self.name + " object: " + repr(self.myobject) + def GetText(self): + try: + return str(self.name) + ' (' + self.GetHLIType() + ')' + except AttributeError: + return str(self.name) + ' = ' + repr(self.myobject) + def InsertDocString(self, lst): + ob = None + try: + ob = self.myobject.__doc__ + except (AttributeError, TypeError): + pass + # I don't quite grok descriptors enough to know how to + # best hook them up. Eg: + # >>> object.__getattribute__.__class__.__doc__ + # + if ob and isinstance(ob, str): + lst.insert(0, HLIDocString( ob, "Doc" )) + + def GetSubList(self): + ret = [] + try: + for (key, ob) in self.myobject.__dict__.items(): + if key not in special_names: + ret.append(MakeHLI( ob, key ) ) + except (AttributeError, TypeError): + pass + try: + for name in self.myobject.__methods__: + ret.append(HLIMethod( name )) # no MakeHLI, as cant auto detect + except (AttributeError, TypeError): + pass + try: + for member in self.myobject.__members__: + if not member in special_names: + ret.append(MakeHLI(getattr(self.myobject, member), member)) + except (AttributeError, TypeError): + pass + ret.sort() + self.InsertDocString(ret) + return ret + # if the has a dict, it is expandable. + def IsExpandable(self): + if self.knownExpandable is None: + self.knownExpandable = self.CalculateIsExpandable() + return self.knownExpandable + + def CalculateIsExpandable(self): + if hasattr(self.myobject, '__doc__'): + return 1 + try: + for key in self.myobject.__dict__.keys(): + if key not in special_names: + return 1 + except (AttributeError, TypeError): + pass + try: + self.myobject.__methods__ + return 1 + except (AttributeError, TypeError): + pass + try: + for item in self.myobject.__members__: + if item not in special_names: + return 1 + except (AttributeError, TypeError): + pass + return 0 + def GetBitmapColumn(self): + if self.IsExpandable(): + return 0 + else: + return 4 + def TakeDefaultAction(self): + ShowObject(self.myobject, self.name) + + +class HLIDocString(HLIPythonObject): + def GetHLIType(self): + return "DocString" + def GetText(self): + return self.myobject.strip() + def IsExpandable(self): + return 0 + def GetBitmapColumn(self): + return 6 + +class HLIModule(HLIPythonObject): + def GetHLIType(self): + return "Module" + +class HLIFrame(HLIPythonObject): + def GetHLIType(self): + return "Stack Frame" + +class HLITraceback(HLIPythonObject): + def GetHLIType(self): + return "Traceback" + +class HLIClass(HLIPythonObject): + def GetHLIType(self): + return "Class" + def GetSubList(self): + ret = [] + for base in self.myobject.__bases__: + ret.append( MakeHLI(base, 'Base class: ' + base.__name__ ) ) + ret = ret + HLIPythonObject.GetSubList(self) + return ret + +class HLIMethod(HLIPythonObject): + # myobject is just a string for methods. + def GetHLIType(self): + return "Method" + def GetText(self): + return "Method: " + self.myobject + '()' + +class HLICode(HLIPythonObject): + def GetHLIType(self): + return "Code" + def IsExpandable(self): + return self.myobject + def GetSubList(self): + ret = [] + ret.append( MakeHLI( self.myobject.co_consts, "Constants (co_consts)" )) + ret.append( MakeHLI( self.myobject.co_names, "Names (co_names)" )) + ret.append( MakeHLI( self.myobject.co_filename, "Filename (co_filename)" )) + ret.append( MakeHLI( self.myobject.co_argcount, "Number of args (co_argcount)")) + ret.append( MakeHLI( self.myobject.co_varnames, "Param names (co_varnames)")) + + return ret + +class HLIInstance(HLIPythonObject): + def GetHLIType(self): + return "Instance" + def GetText(self): + return str(self.name) + ' (Instance of class ' + str(self.myobject.__class__.__name__) + ')' + def IsExpandable(self): + return 1 + def GetSubList(self): + ret = [] + ret.append( MakeHLI( self.myobject.__class__) ) + ret = ret + HLIPythonObject.GetSubList(self) + return ret + + +class HLIBuiltinFunction(HLIPythonObject): + def GetHLIType(self): + return "Builtin Function" + +class HLIFunction(HLIPythonObject): + def GetHLIType(self): + return "Function" + def IsExpandable(self): + return 1 + def GetSubList(self): + ret = [] +# ret.append( MakeHLI( self.myobject.func_argcount, "Arg Count" )) + try: + ret.append( MakeHLI( self.myobject.func_argdefs, "Arg Defs" )) + except AttributeError: + pass + try: + code = self.myobject.__code__ + globs = self.myobject.__globals__ + except AttributeError: + # must be py2.5 or earlier... + code = self.myobject.func_code + globs = self.myobject.func_globals + ret.append(MakeHLI(code, "Code" )) + ret.append(MakeHLI(globs, "Globals" )) + self.InsertDocString(ret) + return ret + +class HLISeq(HLIPythonObject): + def GetHLIType(self): + return "Sequence (abstract!)" + def IsExpandable(self): + return len(self.myobject)>0 + def GetSubList(self): + ret = [] + pos=0 + for item in self.myobject: + ret.append(MakeHLI( item, '['+str(pos)+']' ) ) + pos=pos+1 + self.InsertDocString(ret) + return ret + +class HLIList(HLISeq): + def GetHLIType(self): + return "List" + +class HLITuple(HLISeq): + def GetHLIType(self): + return "Tuple" + +class HLIDict(HLIPythonObject): + def GetHLIType(self): + return "Dict" + def IsExpandable(self): + try: + self.myobject.__doc__ + return 1 + except (AttributeError, TypeError): + return len(self.myobject) > 0 + def GetSubList(self): + ret = [] + keys = list(self.myobject.keys()) + keys.sort() + for key in keys: + ob = self.myobject[key] + ret.append(MakeHLI( ob, str(key) ) ) + self.InsertDocString(ret) + return ret + +# In Python 1.6, strings and Unicode have builtin methods, but we dont really want to see these +class HLIString(HLIPythonObject): + def IsExpandable(self): + return 0 + +TypeMap = { type : HLIClass, + types.FunctionType: HLIFunction, + tuple: HLITuple, + dict: HLIDict, + list: HLIList, + types.ModuleType: HLIModule, + types.CodeType : HLICode, + types.BuiltinFunctionType : HLIBuiltinFunction, + types.FrameType : HLIFrame, + types.TracebackType : HLITraceback, + str : HLIString, + int: HLIPythonObject, + bool: HLIPythonObject, + float: HLIPythonObject, + } + +def MakeHLI( ob, name=None ): + try: + cls = TypeMap[type(ob)] + except KeyError: + # hrmph - this check gets more and more bogus as Python + # improves. Its possible we should just *always* use + # HLIInstance? + if hasattr(ob, '__class__'): # 'new style' class + cls = HLIInstance + else: + cls = HLIPythonObject + return cls( ob, name ) + +######################################### +# +# Dialog related. + + +class DialogShowObject(dialog.Dialog): + def __init__(self, object, title): + self.object = object + self.title = title + dialog.Dialog.__init__(self, win32ui.IDD_LARGE_EDIT) + def OnInitDialog(self): + import re + self.SetWindowText(self.title) + self.edit = self.GetDlgItem(win32ui.IDC_EDIT1) + try: + strval = str(self.object) + except: + t, v, tb = sys.exc_info() + strval = "Exception getting object value\n\n%s:%s" % (t, v) + tb = None + strval = re.sub('\n','\r\n', strval) + self.edit.ReplaceSel(strval) + +def ShowObject(object, title): + dlg = DialogShowObject(object, title) + dlg.DoModal() + +# And some mods for a sizable dialog from Sam Rushing! +import win32con +import win32api +import commctrl + +class dynamic_browser (dialog.Dialog): + style = win32con.WS_OVERLAPPEDWINDOW | win32con.WS_VISIBLE + cs = ( + win32con.WS_CHILD | + win32con.WS_VISIBLE | + commctrl.TVS_HASLINES | + commctrl.TVS_LINESATROOT | + commctrl.TVS_HASBUTTONS + ) + + dt = [ + ["Python Object Browser", (0, 0, 200, 200), style, None, (8, "MS Sans Serif")], + ["SysTreeView32", None, win32ui.IDC_LIST1, (0, 0, 200, 200), cs] + ] + + def __init__ (self, hli_root): + dialog.Dialog.__init__ (self, self.dt) + self.hier_list = hierlist.HierListWithItems ( + hli_root, + win32ui.IDB_BROWSER_HIER + ) + self.HookMessage (self.on_size, win32con.WM_SIZE) + + def OnInitDialog (self): + self.hier_list.HierInit (self) + return dialog.Dialog.OnInitDialog (self) + + def OnOK(self): + self.hier_list.HierTerm() + self.hier_list = None + return self._obj_.OnOK() + def OnCancel(self): + self.hier_list.HierTerm() + self.hier_list = None + return self._obj_.OnCancel() + + def on_size (self, params): + lparam = params[3] + w = win32api.LOWORD(lparam) + h = win32api.HIWORD(lparam) + self.GetDlgItem (win32ui.IDC_LIST1).MoveWindow((0,0,w,h)) + +def Browse (ob=__main__): + " Browse the argument, or the main dictionary " + root = MakeHLI (ob, 'root') + if not root.IsExpandable(): + raise TypeError("Browse() argument must have __dict__ attribute, or be a Browser supported type") + + dlg = dynamic_browser (root) + dlg.CreateWindow() + +# +# +# Classes for using the browser in an MDI window, rather than a dialog +# +from pywin.mfc import docview +class BrowserTemplate(docview.DocTemplate): + def __init__(self): + docview.DocTemplate.__init__(self, win32ui.IDR_PYTHONTYPE, BrowserDocument, None, BrowserView) + + def OpenObject(self, root): # Use this instead of OpenDocumentFile. + # Look for existing open document + for doc in self.GetDocumentList(): + if doc.root==root: + doc.GetFirstView().ActivateFrame() + return doc + # not found - new one. + doc = BrowserDocument(self, root) + frame = self.CreateNewFrame(doc) + doc.OnNewDocument() + self.InitialUpdateFrame(frame, doc, 1) + return doc + +class BrowserDocument (docview.Document): + def __init__(self, template, root): + docview.Document.__init__(self, template) + self.root = root + self.SetTitle("Browser: " + root.name) + def OnOpenDocument (self, name): + raise TypeError("This template can not open files") + return 0 + +class BrowserView(docview.TreeView): + def OnInitialUpdate(self): + import commctrl + rc = self._obj_.OnInitialUpdate() + list=hierlist.HierListWithItems( self.GetDocument().root, win32ui.IDB_BROWSER_HIER, win32ui.AFX_IDW_PANE_FIRST) + list.HierInit(self.GetParent()) + list.SetStyle(commctrl.TVS_HASLINES | commctrl.TVS_LINESATROOT | commctrl.TVS_HASBUTTONS) + return rc + +template = None +def MakeTemplate(): + global template + if template is None: + template = BrowserTemplate() #win32ui.IDR_PYTHONTYPE, BrowserDocument, None, BrowserView) + +def BrowseMDI(ob=__main__): + """Browse an object using an MDI window. + """ + + MakeTemplate() + root = MakeHLI(ob, repr(ob)) + if not root.IsExpandable(): + raise TypeError("Browse() argument must have __dict__ attribute, or be a Browser supported type") + + template.OpenObject(root) + diff --git a/venv/Lib/site-packages/pythonwin/pywin/tools/hierlist.py b/venv/Lib/site-packages/pythonwin/pywin/tools/hierlist.py new file mode 100644 index 00000000..a819c2ee --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/tools/hierlist.py @@ -0,0 +1,321 @@ +# hierlist +# +# IMPORTANT - Please read before using. + +# This module exposes an API for a Hierarchical Tree Control. +# Previously, a custom tree control was included in Pythonwin which +# has an API very similar to this. + +# The current control used is the common "Tree Control". This module exists now +# to provide an API similar to the old control, but for the new Tree control. + +# If you need to use the Tree Control, you may still find this API a reasonable +# choice. However, you should investigate using the tree control directly +# to provide maximum flexibility (but with extra work). + +import sys +import win32ui +import win32con +import win32api +from win32api import RGB + +from pywin.mfc import object, window, docview, dialog +import commctrl + +# helper to get the text of an arbitary item +def GetItemText(item): + if type(item)==type(()) or type(item)==type([]): + use = item[0] + else: + use = item + if type(use)==type(''): + return use + else: + return repr(item) + + +class HierDialog(dialog.Dialog): + def __init__(self, title, hierList, bitmapID = win32ui.IDB_HIERFOLDERS, dlgID = win32ui.IDD_TREE, dll = None, childListBoxID = win32ui.IDC_LIST1): + dialog.Dialog.__init__(self, dlgID, dll ) # reuse this dialog. + self.hierList=hierList + self.dlgID = dlgID + self.title=title +# self.childListBoxID = childListBoxID + def OnInitDialog(self): + self.SetWindowText(self.title) + self.hierList.HierInit(self) + return dialog.Dialog.OnInitDialog(self) + +class HierList(object.Object): + def __init__(self, root, bitmapID = win32ui.IDB_HIERFOLDERS, listBoxId = None, bitmapMask = None): # used to create object. + self.listControl = None + self.bitmapID = bitmapID + self.root = root + self.listBoxId = listBoxId + self.itemHandleMap = {} + self.filledItemHandlesMap = {} + self.bitmapMask = bitmapMask + def __getattr__(self, attr): + try: + return getattr(self.listControl, attr) + except AttributeError: + return object.Object.__getattr__(self, attr) + + def ItemFromHandle(self, handle): + return self.itemHandleMap[handle] + def SetStyle(self, newStyle): + hwnd = self.listControl.GetSafeHwnd() + style = win32api.GetWindowLong(hwnd, win32con.GWL_STYLE); + win32api.SetWindowLong(hwnd, win32con.GWL_STYLE, (style | newStyle) ) + + def HierInit(self, parent, listControl = None ): # Used when window first exists. + # this also calls "Create" on the listbox. + # params - id of listbbox, ID of bitmap, size of bitmaps + if self.bitmapMask is None: + bitmapMask = RGB(0,0,255) + else: + bitmapMask = self.bitmapMask + self.imageList = win32ui.CreateImageList(self.bitmapID, 16, 0, bitmapMask) + if listControl is None: + if self.listBoxId is None: self.listBoxId = win32ui.IDC_LIST1 + self.listControl = parent.GetDlgItem(self.listBoxId) + else: + self.listControl = listControl + lbid = listControl.GetDlgCtrlID() + assert self.listBoxId is None or self.listBoxId == lbid, "An invalid listbox control ID has been specified (specified as %s, but exists as %s)" % (self.listBoxId, lbid) + self.listBoxId = lbid + self.listControl.SetImageList(self.imageList, commctrl.LVSIL_NORMAL) +# self.list.AttachObject(self) + + ## ??? Need a better way to do this - either some way to detect if it's compiled with UNICODE + ## defined, and/or a way to switch the constants based on UNICODE ??? + if sys.version_info[0] < 3: + parent.HookNotify(self.OnTreeItemExpanding, commctrl.TVN_ITEMEXPANDINGA) + parent.HookNotify(self.OnTreeItemSelChanged, commctrl.TVN_SELCHANGEDA) + else: + parent.HookNotify(self.OnTreeItemExpanding, commctrl.TVN_ITEMEXPANDINGW) + parent.HookNotify(self.OnTreeItemSelChanged, commctrl.TVN_SELCHANGEDW) + parent.HookNotify(self.OnTreeItemDoubleClick, commctrl.NM_DBLCLK) + self.notify_parent = parent + + if self.root: + self.AcceptRoot(self.root) + + def DeleteAllItems(self): + self.listControl.DeleteAllItems() + self.root = None + self.itemHandleMap = {} + self.filledItemHandlesMap = {} + + def HierTerm(self): + # Dont want notifies as we kill the list. + parent = self.notify_parent # GetParentFrame() + if sys.version_info[0] < 3: + parent.HookNotify(None, commctrl.TVN_ITEMEXPANDINGA) + parent.HookNotify(None, commctrl.TVN_SELCHANGEDA) + else: + parent.HookNotify(None, commctrl.TVN_ITEMEXPANDINGW) + parent.HookNotify(None, commctrl.TVN_SELCHANGEDW) + parent.HookNotify(None, commctrl.NM_DBLCLK) + + self.DeleteAllItems() + self.listControl = None + self.notify_parent = None # Break a possible cycle + + def OnTreeItemDoubleClick(self, info, extra): + (hwndFrom, idFrom, code) = info + if idFrom != self.listBoxId: return None + item = self.itemHandleMap[self.listControl.GetSelectedItem()] + self.TakeDefaultAction(item) + return 1 + + def OnTreeItemExpanding(self, info, extra): + (hwndFrom, idFrom, code) = info + if idFrom != self.listBoxId: return None + action, itemOld, itemNew, pt = extra + itemHandle = itemNew[0] + if itemHandle not in self.filledItemHandlesMap: + item = self.itemHandleMap[itemHandle] + self.AddSubList(itemHandle, self.GetSubList(item)) + self.filledItemHandlesMap[itemHandle] = None + return 0 + + def OnTreeItemSelChanged(self, info, extra): + (hwndFrom, idFrom, code) = info + if idFrom != self.listBoxId: return None + action, itemOld, itemNew, pt = extra + itemHandle = itemNew[0] + item = self.itemHandleMap[itemHandle] + self.PerformItemSelected(item) + return 1 + + def AddSubList(self, parentHandle, subItems): + for item in subItems: + self.AddItem(parentHandle, item) + + def AddItem(self, parentHandle, item, hInsertAfter = commctrl.TVI_LAST): + text = self.GetText(item) + if self.IsExpandable(item): + cItems = 1 # Trick it !! + else: + cItems = 0 + bitmapCol = self.GetBitmapColumn(item) + bitmapSel = self.GetSelectedBitmapColumn(item) + if bitmapSel is None: bitmapSel = bitmapCol + ## if type(text) is str: + ## text = text.encode("mbcs") + hitem = self.listControl.InsertItem(parentHandle, hInsertAfter, (None, None, None, text, bitmapCol, bitmapSel, cItems, 0)) + self.itemHandleMap[hitem] = item + return hitem + + def _GetChildHandles(self, handle): + ret = [] + try: + handle = self.listControl.GetChildItem(handle) + while 1: + ret.append(handle) + handle = self.listControl.GetNextItem(handle, commctrl.TVGN_NEXT) + except win32ui.error: + # out of children + pass + return ret + def ItemFromHandle(self, handle): + return self.itemHandleMap[handle] + + def Refresh(self, hparent = None): + # Attempt to refresh the given item's sub-entries, but maintain the tree state + # (ie, the selected item, expanded items, etc) + if hparent is None: hparent = commctrl.TVI_ROOT + if hparent not in self.filledItemHandlesMap: + # This item has never been expanded, so no refresh can possibly be required. + return + root_item = self.itemHandleMap[hparent] + old_handles = self._GetChildHandles(hparent) + old_items = list(map( self.ItemFromHandle, old_handles )) + new_items = self.GetSubList(root_item) + # Now an inefficient technique for synching the items. + inew = 0 + hAfter = commctrl.TVI_FIRST + for iold in range(len(old_items)): + inewlook = inew + matched = 0 + while inewlook < len(new_items): + if old_items[iold] == new_items[inewlook]: + matched = 1 + break + inewlook = inewlook + 1 + if matched: + # Insert the new items. +# print "Inserting after", old_items[iold], old_handles[iold] + for i in range(inew, inewlook): +# print "Inserting index %d (%s)" % (i, new_items[i]) + hAfter = self.AddItem(hparent, new_items[i], hAfter) + + inew = inewlook + 1 + # And recursively refresh iold + hold = old_handles[iold] + if hold in self.filledItemHandlesMap: + self.Refresh(hold) + else: + # Remove the deleted items. +# print "Deleting %d (%s)" % (iold, old_items[iold]) + hdelete = old_handles[iold] + # First recurse and remove the children from the map. + for hchild in self._GetChildHandles(hdelete): + del self.itemHandleMap[hchild] + if hchild in self.filledItemHandlesMap: + del self.filledItemHandlesMap[hchild] + self.listControl.DeleteItem(hdelete) + hAfter = old_handles[iold] + # Fill any remaining new items: + for newItem in new_items[inew:]: +# print "Inserting new item", newItem + self.AddItem(hparent, newItem) + def AcceptRoot(self, root): + self.listControl.DeleteAllItems() + self.itemHandleMap = {commctrl.TVI_ROOT : root} + self.filledItemHandlesMap = {commctrl.TVI_ROOT : root} + subItems = self.GetSubList(root) + self.AddSubList(0, subItems) + + def GetBitmapColumn(self, item): + if self.IsExpandable(item): + return 0 + else: + return 4 + def GetSelectedBitmapColumn(self, item): + return None # Use standard. + + def GetSelectedBitmapColumn(self, item): + return 0 + + def CheckChangedChildren(self): + return self.listControl.CheckChangedChildren() + def GetText(self,item): + return GetItemText(item) + def PerformItemSelected(self, item): + try: + win32ui.SetStatusText('Selected ' + self.GetText(item)) + except win32ui.error: # No status bar! + pass + def TakeDefaultAction(self, item): + win32ui.MessageBox('Got item ' + self.GetText(item)) + +########################################################################## +# +# Classes for use with seperate HierListItems. +# +# +class HierListWithItems(HierList): + def __init__(self, root, bitmapID = win32ui.IDB_HIERFOLDERS, listBoxID = None, bitmapMask = None): # used to create object. + HierList.__init__(self, root, bitmapID, listBoxID, bitmapMask ) + def DelegateCall( self, fn): + return fn() + def GetBitmapColumn(self, item): + rc = self.DelegateCall(item.GetBitmapColumn) + if rc is None: + rc = HierList.GetBitmapColumn(self, item) + return rc + def GetSelectedBitmapColumn(self, item): + return self.DelegateCall(item.GetSelectedBitmapColumn) + def IsExpandable(self, item): + return self.DelegateCall( item.IsExpandable) + def GetText(self, item): + return self.DelegateCall( item.GetText ) + def GetSubList(self, item): + return self.DelegateCall(item.GetSubList) + def PerformItemSelected(self, item): + func = getattr(item, "PerformItemSelected", None) + if func is None: + return HierList.PerformItemSelected( self, item ) + else: + return self.DelegateCall(func) + + def TakeDefaultAction(self, item): + func = getattr(item, "TakeDefaultAction", None) + if func is None: + return HierList.TakeDefaultAction( self, item ) + else: + return self.DelegateCall(func) + +# A hier list item - for use with a HierListWithItems +class HierListItem: + def __init__(self): + pass + def GetText(self): + pass + def GetSubList(self): + pass + def IsExpandable(self): + pass + def GetBitmapColumn(self): + return None # indicate he should do it. + def GetSelectedBitmapColumn(self): + return None # same as other + # for py3k/rich-comp sorting compatibility. + def __lt__(self, other): + # we want unrelated items to be sortable... + return id(self) < id(other) + # for py3k/rich-comp equality compatibility. + def __eq__(self, other): + return False diff --git a/venv/Lib/site-packages/pythonwin/pywin/tools/regedit.py b/venv/Lib/site-packages/pythonwin/pywin/tools/regedit.py new file mode 100644 index 00000000..cec2cf3c --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/tools/regedit.py @@ -0,0 +1,329 @@ +# Regedit - a Registry Editor for Python +import win32api, win32ui, win32con, commctrl +from pywin.mfc import window, docview, dialog +from . import hierlist +import regutil +import string + +def SafeApply( fn, args, err_desc = "" ): + try: + fn(*args) + return 1 + except win32api.error as exc: + msg = "Error " + err_desc + "\r\n\r\n" + exc.strerror + win32ui.MessageBox(msg) + return 0 + +class SplitterFrame(window.MDIChildWnd): + def __init__(self): + # call base CreateFrame + self.images = None + window.MDIChildWnd.__init__(self) + + def OnCreateClient(self, cp, context): + splitter = win32ui.CreateSplitter() + doc = context.doc + frame_rect = self.GetWindowRect() + size = ((frame_rect[2] - frame_rect[0]), + (frame_rect[3] - frame_rect[1])//2) + sub_size = (size[0]//3, size[1]) + splitter.CreateStatic (self, 1, 2) + # CTreeControl view + self.keysview = RegistryTreeView(doc) + # CListControl view + self.valuesview = RegistryValueView(doc) + + splitter.CreatePane (self.keysview, 0, 0, (sub_size)) + splitter.CreatePane (self.valuesview, 0, 1, (0,0)) # size ignored. + splitter.SetRowInfo(0, size[1] ,0) + # Setup items in the imagelist + + return 1 + + def OnItemDoubleClick(self, info, extra): + (hwndFrom, idFrom, code) = info + if idFrom==win32ui.AFX_IDW_PANE_FIRST: + # Tree control + return None + elif idFrom==win32ui.AFX_IDW_PANE_FIRST + 1: + item = self.keysview.SelectedItem() + self.valuesview.EditValue(item) + return 0 + # List control + else: + return None # Pass it on + + def PerformItemSelected(self,item): + return self.valuesview.UpdateForRegItem(item) + + def OnDestroy(self, msg): + window.MDIChildWnd.OnDestroy(self, msg) + if self.images: + self.images.DeleteImageList() + self.images = None + +class RegistryTreeView(docview.TreeView): + def OnInitialUpdate(self): + rc = self._obj_.OnInitialUpdate() + self.frame = self.GetParent().GetParent() + self.hierList = hierlist.HierListWithItems( self.GetHLIRoot(), win32ui.IDB_HIERFOLDERS, win32ui.AFX_IDW_PANE_FIRST) + self.hierList.HierInit(self.frame, self.GetTreeCtrl()) + self.hierList.SetStyle(commctrl.TVS_HASLINES | commctrl.TVS_LINESATROOT | commctrl.TVS_HASBUTTONS) + self.hierList.PerformItemSelected = self.PerformItemSelected + + self.frame.HookNotify(self.frame.OnItemDoubleClick, commctrl.NM_DBLCLK) + self.frame.HookNotify(self.OnItemRightClick, commctrl.NM_RCLICK) +# self.HookMessage(self.OnItemRightClick, win32con.WM_RBUTTONUP) + + def GetHLIRoot(self): + doc = self.GetDocument() + regroot = doc.root + subkey = doc.subkey + return HLIRegistryKey(regroot, subkey, "Root") + + def OnItemRightClick(self, notify_data, extra): + # First select the item we right-clicked on. + pt = self.ScreenToClient(win32api.GetCursorPos()) + flags, hItem = self.HitTest(pt) + if hItem==0 or commctrl.TVHT_ONITEM & flags==0: + return None + self.Select(hItem, commctrl.TVGN_CARET) + + menu = win32ui.CreatePopupMenu() + menu.AppendMenu(win32con.MF_STRING|win32con.MF_ENABLED,1000, "Add Key") + menu.AppendMenu(win32con.MF_STRING|win32con.MF_ENABLED,1001, "Add Value") + menu.AppendMenu(win32con.MF_STRING|win32con.MF_ENABLED,1002, "Delete Key") + self.HookCommand(self.OnAddKey, 1000) + self.HookCommand(self.OnAddValue, 1001) + self.HookCommand(self.OnDeleteKey, 1002) + menu.TrackPopupMenu(win32api.GetCursorPos()) # track at mouse position. + return None + + def OnDeleteKey(self,command, code): + hitem = self.hierList.GetSelectedItem() + item = self.hierList.ItemFromHandle(hitem) + msg = "Are you sure you wish to delete the key '%s'?" % (item.keyName,) + id = win32ui.MessageBox(msg, None, win32con.MB_YESNO) + if id != win32con.IDYES: + return + if SafeApply(win32api.RegDeleteKey, (item.keyRoot, item.keyName), "deleting registry key" ): + # Get the items parent. + try: + hparent = self.GetParentItem(hitem) + except win32ui.error: + hparent = None + self.hierList.Refresh(hparent) + + def OnAddKey(self,command, code): + from pywin.mfc import dialog + val = dialog.GetSimpleInput("New key name", '', "Add new key") + if val is None: return # cancelled. + hitem = self.hierList.GetSelectedItem() + item = self.hierList.ItemFromHandle(hitem) + if SafeApply(win32api.RegCreateKey, (item.keyRoot, item.keyName + "\\" + val)): + self.hierList.Refresh(hitem) + + def OnAddValue(self,command, code): + from pywin.mfc import dialog + val = dialog.GetSimpleInput("New value", "", "Add new value") + if val is None: return # cancelled. + hitem = self.hierList.GetSelectedItem() + item = self.hierList.ItemFromHandle(hitem) + if SafeApply(win32api.RegSetValue, (item.keyRoot, item.keyName, win32con.REG_SZ, val)): + # Simply re-select the current item to refresh the right spitter. + self.PerformItemSelected(item) +# self.Select(hitem, commctrl.TVGN_CARET) + + def PerformItemSelected(self, item): + return self.frame.PerformItemSelected(item) + + def SelectedItem(self): + return self.hierList.ItemFromHandle(self.hierList.GetSelectedItem()) + + def SearchSelectedItem(self): + handle = self.hierList.GetChildItem(0) + while 1: +# print "State is", self.hierList.GetItemState(handle, -1) + if self.hierList.GetItemState(handle, commctrl.TVIS_SELECTED): +# print "Item is ", self.hierList.ItemFromHandle(handle) + return self.hierList.ItemFromHandle(handle) + handle = self.hierList.GetNextSiblingItem(handle) + +class RegistryValueView(docview.ListView): + def OnInitialUpdate(self): + hwnd = self._obj_.GetSafeHwnd() + style = win32api.GetWindowLong(hwnd, win32con.GWL_STYLE); + win32api.SetWindowLong(hwnd, win32con.GWL_STYLE, (style & ~commctrl.LVS_TYPEMASK) | commctrl.LVS_REPORT); + + itemDetails = (commctrl.LVCFMT_LEFT, 100, "Name", 0) + self.InsertColumn(0, itemDetails) + itemDetails = (commctrl.LVCFMT_LEFT, 500, "Data", 0) + self.InsertColumn(1, itemDetails) + + def UpdateForRegItem(self, item): + self.DeleteAllItems() + hkey = win32api.RegOpenKey(item.keyRoot, item.keyName) + try: + valNum = 0 + ret = [] + while 1: + try: + res = win32api.RegEnumValue(hkey, valNum) + except win32api.error: + break + name = res[0] + if not name: name = "(Default)" + self.InsertItem(valNum, name) + self.SetItemText(valNum, 1, str(res[1])) + valNum = valNum + 1 + finally: + win32api.RegCloseKey(hkey) + def EditValue(self, item): + # Edit the current value + class EditDialog(dialog.Dialog): + def __init__(self, item): + self.item = item + dialog.Dialog.__init__(self, win32ui.IDD_LARGE_EDIT) + def OnInitDialog(self): + self.SetWindowText("Enter new value") + self.GetDlgItem(win32con.IDCANCEL).ShowWindow(win32con.SW_SHOW) + self.edit = self.GetDlgItem(win32ui.IDC_EDIT1) + # Modify the edit windows style + style = win32api.GetWindowLong(self.edit.GetSafeHwnd(), win32con.GWL_STYLE) + style = style & (~win32con.ES_WANTRETURN) + win32api.SetWindowLong(self.edit.GetSafeHwnd(), win32con.GWL_STYLE, style) + self.edit.SetWindowText(str(self.item)) + self.edit.SetSel(-1) + return dialog.Dialog.OnInitDialog(self) + def OnDestroy(self,msg): + self.newvalue = self.edit.GetWindowText() + + try: + index = self.GetNextItem(-1, commctrl.LVNI_SELECTED) + except win32ui.error: + return # No item selected. + + if index==0: + keyVal = "" + else: + keyVal = self.GetItemText(index,0) + # Query for a new value. + try: + newVal = self.GetItemsCurrentValue(item, keyVal) + except TypeError as details: + win32ui.MessageBox(details) + return + + d = EditDialog(newVal) + if d.DoModal()==win32con.IDOK: + try: + self.SetItemsCurrentValue(item, keyVal, d.newvalue) + except win32api.error as exc: + win32ui.MessageBox("Error setting value\r\n\n%s" % exc.strerror) + self.UpdateForRegItem(item) + + def GetItemsCurrentValue(self, item, valueName): + hkey = win32api.RegOpenKey(item.keyRoot, item.keyName) + try: + val, type = win32api.RegQueryValueEx(hkey, valueName) + if type != win32con.REG_SZ: + raise TypeError("Only strings can be edited") + return val + finally: + win32api.RegCloseKey(hkey) + + def SetItemsCurrentValue(self, item, valueName, value): + # ** Assumes already checked is a string. + hkey = win32api.RegOpenKey(item.keyRoot, item.keyName , 0, win32con.KEY_SET_VALUE) + try: + win32api.RegSetValueEx(hkey, valueName, 0, win32con.REG_SZ, value) + finally: + win32api.RegCloseKey(hkey) + + +class RegTemplate(docview.DocTemplate): + def __init__(self): + docview.DocTemplate.__init__(self, win32ui.IDR_PYTHONTYPE, None, SplitterFrame, None) + +# def InitialUpdateFrame(self, frame, doc, makeVisible=1): +# self._obj_.InitialUpdateFrame(frame, doc, makeVisible) # call default handler. +# frame.InitialUpdateFrame(doc, makeVisible) + + def OpenRegistryKey(self, root = None, subkey = None): # Use this instead of OpenDocumentFile. + # Look for existing open document + if root is None: root = regutil.GetRootKey() + if subkey is None: subkey = regutil.BuildDefaultPythonKey() + for doc in self.GetDocumentList(): + if doc.root==root and doc.subkey==subkey: + doc.GetFirstView().ActivateFrame() + return doc + # not found - new one. + doc = RegDocument(self, root, subkey) + frame = self.CreateNewFrame(doc) + doc.OnNewDocument() + self.InitialUpdateFrame(frame, doc, 1) + return doc + +class RegDocument (docview.Document): + def __init__(self, template, root, subkey): + docview.Document.__init__(self, template) + self.root = root + self.subkey = subkey + self.SetTitle("Registry Editor: " + subkey) + + def OnOpenDocument (self, name): + raise TypeError("This template can not open files") + return 0 + + +class HLIRegistryKey(hierlist.HierListItem): + def __init__( self, keyRoot, keyName, userName ): + self.keyRoot = keyRoot + self.keyName = keyName + self.userName = userName + hierlist.HierListItem.__init__(self) + def __lt__(self, other): + return self.name < other.name + def __eq__(self, other): + return self.keyRoot==other.keyRoot and \ + self.keyName == other.keyName and \ + self.userName == other.userName + def __repr__(self): + return "<%s with root=%s, key=%s>" % (self.__class__.__name__, self.keyRoot, self.keyName) + def GetText(self): + return self.userName + def IsExpandable(self): + # All keys are expandable, even if they currently have zero children. + return 1 +## hkey = win32api.RegOpenKey(self.keyRoot, self.keyName) +## try: +## keys, vals, dt = win32api.RegQueryInfoKey(hkey) +## return (keys>0) +## finally: +## win32api.RegCloseKey(hkey) + + def GetSubList(self): + hkey = win32api.RegOpenKey(self.keyRoot, self.keyName) + win32ui.DoWaitCursor(1) + try: + keyNum = 0 + ret = [] + while 1: + try: + key = win32api.RegEnumKey(hkey, keyNum) + except win32api.error: + break + ret.append(HLIRegistryKey(self.keyRoot, self.keyName + "\\" + key, key)) + keyNum = keyNum + 1 + finally: + win32api.RegCloseKey(hkey) + win32ui.DoWaitCursor(0) + return ret + +template = RegTemplate() + +def EditRegistry(root = None, key = None): + doc=template.OpenRegistryKey(root, key) + +if __name__=='__main__': + EditRegistry() diff --git a/venv/Lib/site-packages/pythonwin/pywin/tools/regpy.py b/venv/Lib/site-packages/pythonwin/pywin/tools/regpy.py new file mode 100644 index 00000000..f2078f9f --- /dev/null +++ b/venv/Lib/site-packages/pythonwin/pywin/tools/regpy.py @@ -0,0 +1,56 @@ +# (sort-of) Registry editor +import win32ui +import dialog +import win32con +import commctrl + +class RegistryControl: + def __init__(self, key): + self.key = key + +class RegEditPropertyPage(dialog.PropertyPage): + IDC_LISTVIEW = 1000 + def GetTemplate(self): + "Return the template used to create this dialog" + + w = 152 # Dialog width + h = 122 # Dialog height + SS_STD = win32con.WS_CHILD | win32con.WS_VISIBLE + FRAMEDLG_STD = win32con.WS_CAPTION | win32con.WS_SYSMENU + style = FRAMEDLG_STD | win32con.WS_VISIBLE | win32con.DS_SETFONT | win32con.WS_MINIMIZEBOX + template = [[self.caption, (0, 0, w, h), style, None, (8, 'Helv')], ] + lvStyle = SS_STD | commctrl.LVS_EDITLABELS | commctrl.LVS_REPORT | commctrl.LVS_AUTOARRANGE | commctrl.LVS_ALIGNLEFT | win32con.WS_BORDER | win32con.WS_TABSTOP + template.append(["SysListView32", "", self.IDC_LISTVIEW, (10, 10, 185, 100), lvStyle]) + return template + +class RegistryPage(RegEditPropertyPage): + def __init__(self): + self.caption="Path" + RegEditPropertyPage.__init__(self, self.GetTemplate()) + def OnInitDialog(self): + self.listview = self.GetDlgItem(self.IDC_LISTVIEW) + RegEditPropertyPage.OnInitDialog(self) + # Setup the listview columns + itemDetails = (commctrl.LVCFMT_LEFT, 100, "App", 0) + self.listview.InsertColumn(0, itemDetails) + itemDetails = (commctrl.LVCFMT_LEFT, 1024, "Paths", 0) + self.listview.InsertColumn(1, itemDetails) + + index = self.listview.InsertItem(0,"App") + self.listview.SetItemText(index, 1, "Path") + + +class RegistrySheet(dialog.PropertySheet): + def __init__(self, title): + dialog.PropertySheet.__init__(self, title) + self.HookMessage(self.OnActivate, win32con.WM_ACTIVATE) + def OnActivate(self, msg): + print("OnAcivate") + +def t(): + ps=RegistrySheet('Registry Settings') + ps.AddPage(RegistryPage()) + ps.DoModal() + +if __name__=='__main__': + t() diff --git a/venv/Lib/site-packages/pythonwin/scintilla.dll b/venv/Lib/site-packages/pythonwin/scintilla.dll new file mode 100644 index 00000000..f1a6aff5 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/scintilla.dll differ diff --git a/venv/Lib/site-packages/pythonwin/win32ui.pyd b/venv/Lib/site-packages/pythonwin/win32ui.pyd new file mode 100644 index 00000000..303d7f4b Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/win32ui.pyd differ diff --git a/venv/Lib/site-packages/pythonwin/win32uiole.pyd b/venv/Lib/site-packages/pythonwin/win32uiole.pyd new file mode 100644 index 00000000..c510b5b1 Binary files /dev/null and b/venv/Lib/site-packages/pythonwin/win32uiole.pyd differ diff --git a/venv/Lib/site-packages/pywin32-301.dist-info/DESCRIPTION.rst b/venv/Lib/site-packages/pywin32-301.dist-info/DESCRIPTION.rst new file mode 100644 index 00000000..a93deaf0 --- /dev/null +++ b/venv/Lib/site-packages/pywin32-301.dist-info/DESCRIPTION.rst @@ -0,0 +1,99 @@ +# pywin32 + +[![CI](https://github.com/mhammond/pywin32/workflows/CI/badge.svg)](https://github.com/mhammond/pywin32/actions?query=workflow%3ACI) +[![PyPI - Version](https://img.shields.io/pypi/v/pywin32.svg)](https://pypi.org/project/pywin32) +[![PyPI - Python Version](https://img.shields.io/pypi/pyversions/pywin32.svg)](https://pypi.org/project/pywin32) +[![PyPI - Downloads](https://img.shields.io/pypi/dm/pywin32.svg)](https://pypi.org/project/pywin32) +[![License - PSF-2.0](https://img.shields.io/badge/license-PSF--2.0-9400d3.svg)](https://spdx.org/licenses/PSF-2.0.html) + +----- + +This is the readme for the Python for Win32 (pywin32) extensions, which provides access to many of the Windows APIs from Python. + +See [CHANGES.txt](https://github.com/mhammond/pywin32/blob/master/CHANGES.txt) for recent notable changes. + +Build 228 is the last build supporting Python 2, and as part of this transition, +all code in the repository is now using Python 3 syntax. +To highlight and celebrate this change, build 228 is the last numbered 2XX - the +following build numbers start at 300. + +In other words, there is no build 229 - the build numbers jump from 228 to 300. + +As of build 222, pywin32 has a new home at [github](https://github.com/mhammond/pywin32). +You can find build 221 and later on github and older versions can be found on +the old project home at [sourceforge](https://sourceforge.net/projects/pywin32/) + +A special shout-out to @xoviat who provided enormous help with the github move! + +## Support + +Feel free to [open issues](https://github.com/mhammond/pywin32/issues) for +all bugs (or suspected bugs) in pywin32. [pull-requests](https://github.com/mhammond/pywin32/pulls) +for all bugs or features are also welcome. + +However, please **do not open github issues for general support requests**, or +for problems or questions using the modules in this package - they will be +closed. For such issues, please email the +[python-win32 mailing list](http://mail.python.org/mailman/listinfo/python-win32) - +note that you must be subscribed to the list before posting. + +## Binaries +By far the easiest way to use pywin32 is to grab binaries from the [most recent release](https://github.com/mhammond/pywin32/releases) + +## Installing via PIP + +You can install pywin32 via pip: +> pip install pywin32 + +Note that if you want to use pywin32 for "system wide" features, such as +registering COM objects or implementing Windows Services, then you must run +the following command from an elevated command prompt: + +> python Scripts/pywin32_postinstall.py -install + +## Building from source + +Building from source has been simplified recently - you just need Visual Studio +and the Windows 10 SDK installed (the free compilers probably work too, but +haven't been tested - let me know your experiences!) + +`setup.py` is a standard distutils build script. You probably want: + +> python setup.py install + +or + +> python setup.py --help + +You can run `setup.py` without any arguments to see +specific information about dependencies. A vanilla MSVC installation should +be able to build most extensions and list any extensions that could not be +built due to missing libraries - if the build actually fails with your +configuration, please [open an issue](https://github.com/mhammond/pywin32/issues). + +## Release process + +The following steps are performed when making a new release - this is mainly +to form a checklist so mhammond doesn't forget what to do :) + +* Ensure CHANGES.txt has everything worth noting, commit it. + +* Update setup.py with the new build number. + +* Execute build.bat, wait forever, test the artifacts. + +* Commit setup.py (so the new build number is in the repo), create a new git tag + +* Update setup.py with the new build number + ".1" (eg, 123.1), to ensure + future test builds aren't mistaken for the real release. + +* Make sure everything is pushed to github, including the tag (ie, + `git push --tags`) + +* Upload the .exe installers to github (using the web UI), the .whl files to + pypi (using `py -3.5 -m twine upload dist/*XXX*.whl` where `XXX` is the build + number). + +* Send mail to python-win32 + + diff --git a/env/lib/python2.7/site-packages/pyobjc_core-5.2.dist-info/INSTALLER b/venv/Lib/site-packages/pywin32-301.dist-info/INSTALLER similarity index 100% rename from env/lib/python2.7/site-packages/pyobjc_core-5.2.dist-info/INSTALLER rename to venv/Lib/site-packages/pywin32-301.dist-info/INSTALLER diff --git a/venv/Lib/site-packages/pywin32-301.dist-info/METADATA b/venv/Lib/site-packages/pywin32-301.dist-info/METADATA new file mode 100644 index 00000000..c6ebc9b5 --- /dev/null +++ b/venv/Lib/site-packages/pywin32-301.dist-info/METADATA @@ -0,0 +1,119 @@ +Metadata-Version: 2.0 +Name: pywin32 +Version: 301 +Summary: Python for Window Extensions +Home-page: https://github.com/mhammond/pywin32 +Author: Mark Hammond (et al) +Author-email: mhammond@skippinet.com.au +License: PSF +Platform: UNKNOWN +Classifier: Environment :: Win32 (MS Windows) +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Python Software Foundation License +Classifier: Operating System :: Microsoft :: Windows +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: Implementation :: CPython + +# pywin32 + +[![CI](https://github.com/mhammond/pywin32/workflows/CI/badge.svg)](https://github.com/mhammond/pywin32/actions?query=workflow%3ACI) +[![PyPI - Version](https://img.shields.io/pypi/v/pywin32.svg)](https://pypi.org/project/pywin32) +[![PyPI - Python Version](https://img.shields.io/pypi/pyversions/pywin32.svg)](https://pypi.org/project/pywin32) +[![PyPI - Downloads](https://img.shields.io/pypi/dm/pywin32.svg)](https://pypi.org/project/pywin32) +[![License - PSF-2.0](https://img.shields.io/badge/license-PSF--2.0-9400d3.svg)](https://spdx.org/licenses/PSF-2.0.html) + +----- + +This is the readme for the Python for Win32 (pywin32) extensions, which provides access to many of the Windows APIs from Python. + +See [CHANGES.txt](https://github.com/mhammond/pywin32/blob/master/CHANGES.txt) for recent notable changes. + +Build 228 is the last build supporting Python 2, and as part of this transition, +all code in the repository is now using Python 3 syntax. +To highlight and celebrate this change, build 228 is the last numbered 2XX - the +following build numbers start at 300. + +In other words, there is no build 229 - the build numbers jump from 228 to 300. + +As of build 222, pywin32 has a new home at [github](https://github.com/mhammond/pywin32). +You can find build 221 and later on github and older versions can be found on +the old project home at [sourceforge](https://sourceforge.net/projects/pywin32/) + +A special shout-out to @xoviat who provided enormous help with the github move! + +## Support + +Feel free to [open issues](https://github.com/mhammond/pywin32/issues) for +all bugs (or suspected bugs) in pywin32. [pull-requests](https://github.com/mhammond/pywin32/pulls) +for all bugs or features are also welcome. + +However, please **do not open github issues for general support requests**, or +for problems or questions using the modules in this package - they will be +closed. For such issues, please email the +[python-win32 mailing list](http://mail.python.org/mailman/listinfo/python-win32) - +note that you must be subscribed to the list before posting. + +## Binaries +By far the easiest way to use pywin32 is to grab binaries from the [most recent release](https://github.com/mhammond/pywin32/releases) + +## Installing via PIP + +You can install pywin32 via pip: +> pip install pywin32 + +Note that if you want to use pywin32 for "system wide" features, such as +registering COM objects or implementing Windows Services, then you must run +the following command from an elevated command prompt: + +> python Scripts/pywin32_postinstall.py -install + +## Building from source + +Building from source has been simplified recently - you just need Visual Studio +and the Windows 10 SDK installed (the free compilers probably work too, but +haven't been tested - let me know your experiences!) + +`setup.py` is a standard distutils build script. You probably want: + +> python setup.py install + +or + +> python setup.py --help + +You can run `setup.py` without any arguments to see +specific information about dependencies. A vanilla MSVC installation should +be able to build most extensions and list any extensions that could not be +built due to missing libraries - if the build actually fails with your +configuration, please [open an issue](https://github.com/mhammond/pywin32/issues). + +## Release process + +The following steps are performed when making a new release - this is mainly +to form a checklist so mhammond doesn't forget what to do :) + +* Ensure CHANGES.txt has everything worth noting, commit it. + +* Update setup.py with the new build number. + +* Execute build.bat, wait forever, test the artifacts. + +* Commit setup.py (so the new build number is in the repo), create a new git tag + +* Update setup.py with the new build number + ".1" (eg, 123.1), to ensure + future test builds aren't mistaken for the real release. + +* Make sure everything is pushed to github, including the tag (ie, + `git push --tags`) + +* Upload the .exe installers to github (using the web UI), the .whl files to + pypi (using `py -3.5 -m twine upload dist/*XXX*.whl` where `XXX` is the build + number). + +* Send mail to python-win32 + + diff --git a/venv/Lib/site-packages/pywin32-301.dist-info/RECORD b/venv/Lib/site-packages/pywin32-301.dist-info/RECORD new file mode 100644 index 00000000..75dfdaa1 --- /dev/null +++ b/venv/Lib/site-packages/pywin32-301.dist-info/RECORD @@ -0,0 +1,1051 @@ +../../Scripts/__pycache__/pywin32_postinstall.cpython-36.pyc,, +../../Scripts/__pycache__/pywin32_testall.cpython-36.pyc,, +../../Scripts/pywin32_postinstall.py,sha256=ia1DFvlvvlx-_g2mcGwjmpoL_5VNwAJ36-Obvks3Enk,24930 +../../Scripts/pywin32_testall.py,sha256=TDYPFvs7vJiE0fe3YN-jvF4HAL9S690d5UmTNwgGFgY,3251 +PyWin32.chm,sha256=whobL-tgDhyt3aATvTJfwLpIECCcjDL-H3rCcJSXvoE,2663992 +__pycache__/pythoncom.cpython-36.pyc,, +adodbapi/__init__.py,sha256=3ok3sVS6DQUKQOV01fe1WzzYDdIsOGbudIS_PrOY9CE,2097 +adodbapi/__pycache__/__init__.cpython-36.pyc,, +adodbapi/__pycache__/ado_consts.cpython-36.pyc,, +adodbapi/__pycache__/adodbapi.cpython-36.pyc,, +adodbapi/__pycache__/apibase.cpython-36.pyc,, +adodbapi/__pycache__/is64bit.cpython-36.pyc,, +adodbapi/__pycache__/process_connect_string.cpython-36.pyc,, +adodbapi/__pycache__/remote.cpython-36.pyc,, +adodbapi/__pycache__/schema_table.cpython-36.pyc,, +adodbapi/__pycache__/setup.cpython-36.pyc,, +adodbapi/ado_consts.py,sha256=zjUg7mATcQT1xnrWVI17PeWO0LmxAFUFDoFmMdOywmU,10615 +adodbapi/adodbapi.py,sha256=OUPQ9Vvpq7MB9obBWxURfDR2n5ueeEIOQSfRr3IZbhQ,46685 +adodbapi/apibase.py,sha256=tzup7sh-aeVqTo0n8pYa80tSmGVepkATz8o4cABT00s,28881 +adodbapi/examples/__pycache__/db_print.cpython-36.pyc,, +adodbapi/examples/__pycache__/db_table_names.cpython-36.pyc,, +adodbapi/examples/__pycache__/xls_read.cpython-36.pyc,, +adodbapi/examples/__pycache__/xls_write.cpython-36.pyc,, +adodbapi/examples/db_print.py,sha256=JhG1LnrFGOWDTpMoqJ0urpDkB_JQY0nkW4X8lLmB5JY,2236 +adodbapi/examples/db_table_names.py,sha256=DnkXT98AEyl5oZ9MwzSJZpo0ax4ZfzXxzaF2-zCNYnw,522 +adodbapi/examples/xls_read.py,sha256=f_wDZHD2PZSp8RSDKGiP_1DdFAnKQyC0zGc_8evqQRg,1126 +adodbapi/examples/xls_write.py,sha256=bcB2FWU7q54UjcpjKREn4N2kG--PJnWC3pvBNh50ukI,1381 +adodbapi/is64bit.py,sha256=hBAkVmxEv-HIE1AYbyloxvBvqSeqqkW2JDp62MvtYy0,1203 +adodbapi/license.txt,sha256=YQ4MOiSiassEcPj16wKY35Zvw4DO6OD-vaxnkbYgnWw,26423 +adodbapi/process_connect_string.py,sha256=UYnrP3Mcp8TojlvMw_7He-TovdAi16ziyxdhwkl6ZBk,5494 +adodbapi/readme.txt,sha256=Lk7i9ZFICl7T4nULsEzKBiHw8bGVyaLzIMFO0FQd290,5077 +adodbapi/remote.py,sha256=PtumtNEm5wvMcf_IsjyS6eoXQx87Srlxw3VxWWDpens,19896 +adodbapi/schema_table.py,sha256=dh7ls8U4j38nNJFygCIHE04b_OveJ1gWATc7CzyrHYk,432 +adodbapi/setup.py,sha256=t6yqYnE6JJpyTWyHyaDW-4kGch2rNtSCKLCy8uTJnQg,2982 +adodbapi/test/__pycache__/adodbapitest.cpython-36.pyc,, +adodbapi/test/__pycache__/adodbapitestconfig.cpython-36.pyc,, +adodbapi/test/__pycache__/dbapi20.cpython-36.pyc,, +adodbapi/test/__pycache__/is64bit.cpython-36.pyc,, +adodbapi/test/__pycache__/setuptestframework.cpython-36.pyc,, +adodbapi/test/__pycache__/test_adodbapi_dbapi20.cpython-36.pyc,, +adodbapi/test/__pycache__/tryconnection.cpython-36.pyc,, +adodbapi/test/adodbapitest.py,sha256=QdEwB714eQMVMo8GlAcP0huaPTSG3cI2MqVh3x-Hs_w,55811 +adodbapi/test/adodbapitestconfig.py,sha256=paSMUSbug5dI0STdL5O4PE8-ExURNfeQrgoNpfy-90c,7361 +adodbapi/test/dbapi20.py,sha256=CxKpQlsefhMUjJ4D9USDyjNeT3TTPhHDarL1hXEU8q8,34080 +adodbapi/test/is64bit.py,sha256=Zh0OeyeH6UoQULCKb1Ri07WnfIsq2Bn_jOJkllt6GkY,1191 +adodbapi/test/setuptestframework.py,sha256=HFS0aWurhIHAgOsIJdpYgLTaIJ58d_AGj7Zognmm8Zw,4159 +adodbapi/test/test_adodbapi_dbapi20.py,sha256=lo92E-mcm-lle3GmoVOx4tc1xGmlG6wXHjxi-PgKbPg,6000 +adodbapi/test/tryconnection.py,sha256=qSDrckpkVKb6KTyGddOUWs238vYsOD-v75F1Bx2ZWNw,1079 +isapi/PyISAPI_loader.dll,sha256=TKZWIB_pL8SA2KuuuQtCGkCPXntbju_3RzH_8FPAoIw,65024 +isapi/README.txt,sha256=0ItkP01QDhdLobsX2askhZMJV8wBaPFMjQVmb7jD9VA,323 +isapi/__init__.py,sha256=vsFtJzww4nx3swoPXCjQZWAo6VbFxKP7RKWPHIn1KCA,1240 +isapi/__pycache__/__init__.cpython-36.pyc,, +isapi/__pycache__/install.cpython-36.pyc,, +isapi/__pycache__/isapicon.cpython-36.pyc,, +isapi/__pycache__/simple.cpython-36.pyc,, +isapi/__pycache__/threaded_extension.cpython-36.pyc,, +isapi/doc/isapi.html,sha256=C2fMHvBsz9iBwp2mHHdcUrY0x7yh6rWxmsKhaFsBZO4,4160 +isapi/install.py,sha256=gTqb_lpSDDQWv8ecb6AlNCcuw1rBVHSjUx1SQgENjoM,27608 +isapi/isapicon.py,sha256=wI8psXj2kZ30sTNgLTXTWCqaZ4VyNhnvWcjg8v-j8F4,5221 +isapi/samples/README.txt,sha256=xHvkVPsulzb8b-yrMWVqOZmZFCPVNO19qGtgeN_JJB4,1007 +isapi/samples/__pycache__/advanced.cpython-36.pyc,, +isapi/samples/__pycache__/redirector.cpython-36.pyc,, +isapi/samples/__pycache__/redirector_asynch.cpython-36.pyc,, +isapi/samples/__pycache__/redirector_with_filter.cpython-36.pyc,, +isapi/samples/__pycache__/test.cpython-36.pyc,, +isapi/samples/advanced.py,sha256=GdgCvWQVCZ3Rqvp11fEiZaKV7ZeqaHTrtncIIM2jqHs,8100 +isapi/samples/redirector.py,sha256=9x4RyyHL9d3DGubrN2uMcIQkhdhiLALGJ4JSfzrhVd0,4719 +isapi/samples/redirector_asynch.py,sha256=3nZWuYr9fJFEmq2l7z4MxuCyFoa5nw5JA6pq8POy6MY,2819 +isapi/samples/redirector_with_filter.py,sha256=8QlUrNjrXtoLJU1cr2aLre9yRT3vyDWOP6GP4XFUmtM,6630 +isapi/samples/test.py,sha256=_rtMCfmriYgDAomR37D1vRF61OsUvbQhJY8YIdpxRZI,6315 +isapi/simple.py,sha256=GWA81TbYFlOkiuHlPLRia7mKvAy8eKfzWPoy3pMEoDw,2490 +isapi/test/README.txt,sha256=0gZX7PtEg8dFwGzDVUqFOgAvhvo5NTjVwIeVpTvhNYc,111 +isapi/test/__pycache__/extension_simple.cpython-36.pyc,, +isapi/test/extension_simple.py,sha256=3JUAyyGRtEd_Tdv2yt2vcBo3cmSmfvpqWcIABdmH_OY,4480 +isapi/threaded_extension.py,sha256=0WJEEpNqXP6Sqo17uVgUvGBdOcVpzCKwkPLXNc91_aY,7214 +pythoncom.py,sha256=CCRsuwmh4inouqqm-GBwyyKfvPwqg7UXtR1e1BLEzoE,138 +pythonwin/Pythonwin.exe,sha256=KlacLeLO3LGjQO1r6qyETVV58lOTm8rDBc9iQ9RmREI,72704 +pythonwin/dde.pyd,sha256=cYjuQ36vmP8KadlyG-gGU-rhack07sPQt-HM57l3lKU,144384 +pythonwin/license.txt,sha256=ETzTz3hOWGiF8B-T5d9498fACzTXbMQQHgKc0v1iIRM,1520 +pythonwin/mfc140u.dll,sha256=dgMIz4vtrrxFAASWItCN3KygAkrL07a9yhYY7EipFZc,6065952 +pythonwin/mfcm140u.dll,sha256=G50Yc3dnT9R5FF3PoXJpYVwtaNMsfG6JzKjEn7Bpruk,106280 +pythonwin/pywin/Demos/__pycache__/cmdserver.cpython-36.pyc,, +pythonwin/pywin/Demos/__pycache__/createwin.cpython-36.pyc,, +pythonwin/pywin/Demos/__pycache__/demoutils.cpython-36.pyc,, +pythonwin/pywin/Demos/__pycache__/dibdemo.cpython-36.pyc,, +pythonwin/pywin/Demos/__pycache__/dlgtest.cpython-36.pyc,, +pythonwin/pywin/Demos/__pycache__/dyndlg.cpython-36.pyc,, +pythonwin/pywin/Demos/__pycache__/fontdemo.cpython-36.pyc,, +pythonwin/pywin/Demos/__pycache__/guidemo.cpython-36.pyc,, +pythonwin/pywin/Demos/__pycache__/hiertest.cpython-36.pyc,, +pythonwin/pywin/Demos/__pycache__/menutest.cpython-36.pyc,, +pythonwin/pywin/Demos/__pycache__/objdoc.cpython-36.pyc,, +pythonwin/pywin/Demos/__pycache__/openGLDemo.cpython-36.pyc,, +pythonwin/pywin/Demos/__pycache__/progressbar.cpython-36.pyc,, +pythonwin/pywin/Demos/__pycache__/sliderdemo.cpython-36.pyc,, +pythonwin/pywin/Demos/__pycache__/splittst.cpython-36.pyc,, +pythonwin/pywin/Demos/__pycache__/threadedgui.cpython-36.pyc,, +pythonwin/pywin/Demos/__pycache__/toolbar.cpython-36.pyc,, +pythonwin/pywin/Demos/app/__pycache__/basictimerapp.cpython-36.pyc,, +pythonwin/pywin/Demos/app/__pycache__/customprint.cpython-36.pyc,, +pythonwin/pywin/Demos/app/__pycache__/demoutils.cpython-36.pyc,, +pythonwin/pywin/Demos/app/__pycache__/dlgappdemo.cpython-36.pyc,, +pythonwin/pywin/Demos/app/__pycache__/dojobapp.cpython-36.pyc,, +pythonwin/pywin/Demos/app/__pycache__/helloapp.cpython-36.pyc,, +pythonwin/pywin/Demos/app/basictimerapp.py,sha256=TgUY9t36BPBBQ60utNk89T4-JK8rCrxIsGoSNBwbklk,6382 +pythonwin/pywin/Demos/app/customprint.py,sha256=7cxiMO65hRzLwHuoLMojVNpkJVUOAs2dlwbafcpUOL0,6403 +pythonwin/pywin/Demos/app/demoutils.py,sha256=RPMijxJoul3XuS_kCBXTXtrrr-9CsTFgUvnjb7NkXjU,1311 +pythonwin/pywin/Demos/app/dlgappdemo.py,sha256=xmm0jEPeDQbwkubrsSj3-WbmThCiCVkkM6XbvdAqtTE,1249 +pythonwin/pywin/Demos/app/dojobapp.py,sha256=ed8dlcIKynv1lw8pJZTmk2G7ZC_fkWRasxFHS0fNPAc,1394 +pythonwin/pywin/Demos/app/helloapp.py,sha256=Hlxtnc44SV0YZYQYy0z8pfOsUQatFB7LpcLU1c5BiF8,1577 +pythonwin/pywin/Demos/cmdserver.py,sha256=bdoVkq8sMUEDIKL8-yTb18LvFhTz8aD9U-TEERW33tw,2641 +pythonwin/pywin/Demos/createwin.py,sha256=MRunnlRHgSf_mXE0b4YUHqLq6DCZa2hzqseHcOZQbMw,2451 +pythonwin/pywin/Demos/demoutils.py,sha256=V6uSbTcwetplNJHU4DRHSoGwq7EiIXG4hVUAtXRMfS0,1356 +pythonwin/pywin/Demos/dibdemo.py,sha256=M_nsmbDADq8ujv61hIAKss-GDctiml2vjBHMDg7PI8c,1957 +pythonwin/pywin/Demos/dlgtest.py,sha256=sKIKDjjLN4fNGNcmf_xp5JcBdBLT7di7LrV0iNqtktI,4098 +pythonwin/pywin/Demos/dyndlg.py,sha256=1KKcS-SOQ93JdFm-LfFKzb5z3N9eNVxg478SfUeMMDg,2496 +pythonwin/pywin/Demos/fontdemo.py,sha256=Xs_WnPUmpaF4t815c_-F61DOymnObcG-mbjBHT1i-3U,2531 +pythonwin/pywin/Demos/guidemo.py,sha256=82cJsHTpYVlWmO71EKmqWDlr8u8fsKxX1WlMYneNEKw,2224 +pythonwin/pywin/Demos/hiertest.py,sha256=_jCC23x5Yr2rc3j8HO25_bEuxJxoE4jv6Zs_TNX8D1E,3277 +pythonwin/pywin/Demos/menutest.py,sha256=vQhwJkwCuOL8zEaLyTh7dzWvP797A3KbFEvSwelnQBk,442 +pythonwin/pywin/Demos/objdoc.py,sha256=t3tZij91Qy4lLY3r3vc7DeHpi5MCoTjqA31OoKREwYg,1556 +pythonwin/pywin/Demos/ocx/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pythonwin/pywin/Demos/ocx/__pycache__/__init__.cpython-36.pyc,, +pythonwin/pywin/Demos/ocx/__pycache__/demoutils.cpython-36.pyc,, +pythonwin/pywin/Demos/ocx/__pycache__/flash.cpython-36.pyc,, +pythonwin/pywin/Demos/ocx/__pycache__/msoffice.cpython-36.pyc,, +pythonwin/pywin/Demos/ocx/__pycache__/ocxserialtest.cpython-36.pyc,, +pythonwin/pywin/Demos/ocx/__pycache__/ocxtest.cpython-36.pyc,, +pythonwin/pywin/Demos/ocx/__pycache__/webbrowser.cpython-36.pyc,, +pythonwin/pywin/Demos/ocx/demoutils.py,sha256=7GACxwAWDTsiDmDZxvqofHRs3dR9czQOlverHArkMc4,1363 +pythonwin/pywin/Demos/ocx/flash.py,sha256=uAx88X_SNeKihIdxfgU15_J5_PW_Yu8usstj0p8_vqI,2667 +pythonwin/pywin/Demos/ocx/msoffice.py,sha256=Sd6BwfE3fQB07W_ePsT6-1WJ46g2OxqdW6sgaIazAVk,4421 +pythonwin/pywin/Demos/ocx/ocxserialtest.py,sha256=ff5kXQV6BkIdXgx9SiZKLtzr9NlPEd0BlZmwFpqDPmk,3039 +pythonwin/pywin/Demos/ocx/ocxtest.py,sha256=Vj-pSdSHkzCCTH8IQu-_SunSOYmS-BOEBrFbpEwdcTI,5495 +pythonwin/pywin/Demos/ocx/webbrowser.py,sha256=IUOIorNsyxFbkrG9U8hTA6_Dk22iBP9_hZBgCVL6t7Y,2022 +pythonwin/pywin/Demos/openGLDemo.py,sha256=sVSnnyvsAdfuakkaTBTmtx6mhstAOmwIyP9rMT90kHA,8583 +pythonwin/pywin/Demos/progressbar.py,sha256=N0aP-tkWAKRFW04dfENysuZgXmbxphRHJsbqkMNYQOI,2039 +pythonwin/pywin/Demos/sliderdemo.py,sha256=vpiC60p6tFXydv7rtrM6e76KOk1pjedtr2ih2LdHAK4,2007 +pythonwin/pywin/Demos/splittst.py,sha256=6UK5APHx4XO02IUcjgSgxkhH9PEuxCEzWCPy9J3ncDA,2528 +pythonwin/pywin/Demos/threadedgui.py,sha256=hBT0gH1YGYWmhm3zSqlf-0JwPsSx0jxTa7esA--_H3Q,5511 +pythonwin/pywin/Demos/toolbar.py,sha256=LHaN0JwVLD9UIil2KsX3PM7cbK-9MUQFa7xipcnZf9w,2745 +pythonwin/pywin/IDLE.cfg,sha256=Cc0zS72KlyM2CRPbY-HdNEu1-urNonC1dSnA2juK9z4,742 +pythonwin/pywin/__init__.py,sha256=f2n7Nv_-zuNvSWWd5aIo5la9nHpgw_3a_qyzxOXEiUU,475 +pythonwin/pywin/__pycache__/__init__.cpython-36.pyc,, +pythonwin/pywin/debugger/__init__.py,sha256=k39G6NFpEVZ00GaRLpq9Gdu1SqofMwqmLcnxvvTzNt4,2737 +pythonwin/pywin/debugger/__pycache__/__init__.cpython-36.pyc,, +pythonwin/pywin/debugger/__pycache__/configui.cpython-36.pyc,, +pythonwin/pywin/debugger/__pycache__/dbgcon.cpython-36.pyc,, +pythonwin/pywin/debugger/__pycache__/dbgpyapp.cpython-36.pyc,, +pythonwin/pywin/debugger/__pycache__/debugger.cpython-36.pyc,, +pythonwin/pywin/debugger/__pycache__/fail.cpython-36.pyc,, +pythonwin/pywin/debugger/configui.py,sha256=k-KTW-_0-pJuTYS4WeGFRKAWvMvY-0NAMAAIYS4xys4,1012 +pythonwin/pywin/debugger/dbgcon.py,sha256=VHefmXluDIQSTKsw8C6KGiSJ_Y8sGzNaWXJ2IPiUN7s,812 +pythonwin/pywin/debugger/dbgpyapp.py,sha256=0Wrz5ZKTHxjXHrzStJmN468Rxhi9vEhDlxIKTlUNl2s,1416 +pythonwin/pywin/debugger/debugger.py,sha256=b-rKI5HLg0L1zGTGAJYjbU1s4kH0kytmpLr_ezhP0PI,31176 +pythonwin/pywin/debugger/fail.py,sha256=CgP0pb4OptOxHNWyAvb2Jpe9fbzbzaSdOfawkSssvIE,824 +pythonwin/pywin/default.cfg,sha256=G56DWfOgVjBbjSUbUBfKL5Uq-0IMG-aj_31bt_X2sZ8,6785 +pythonwin/pywin/dialogs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pythonwin/pywin/dialogs/__pycache__/__init__.cpython-36.pyc,, +pythonwin/pywin/dialogs/__pycache__/ideoptions.cpython-36.pyc,, +pythonwin/pywin/dialogs/__pycache__/list.cpython-36.pyc,, +pythonwin/pywin/dialogs/__pycache__/login.cpython-36.pyc,, +pythonwin/pywin/dialogs/__pycache__/status.cpython-36.pyc,, +pythonwin/pywin/dialogs/ideoptions.py,sha256=HTaNdixfMNMtcYti5jfGfge6fV6-QeWAAFAAfCxrK8Q,4168 +pythonwin/pywin/dialogs/list.py,sha256=it9epzGT5nNlTCh6I2t3zvRDHW7KNvr_-brPDhQ2B8U,3803 +pythonwin/pywin/dialogs/login.py,sha256=1JeykzL1qBqh0K9AnnwsaqP5EUPvuaaPnFmQnGGZWc8,4250 +pythonwin/pywin/dialogs/status.py,sha256=a8fWhhPUtNA_SM0jy4tbxLC_EErsM2fnxLEk9NHjnto,5828 +pythonwin/pywin/docking/DockingBar.py,sha256=05Eh8fX2CSCt8KD0So7NB2QdycwAzrfi-BzJEKZ7WBM,18573 +pythonwin/pywin/docking/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pythonwin/pywin/docking/__pycache__/DockingBar.cpython-36.pyc,, +pythonwin/pywin/docking/__pycache__/__init__.cpython-36.pyc,, +pythonwin/pywin/framework/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pythonwin/pywin/framework/__pycache__/__init__.cpython-36.pyc,, +pythonwin/pywin/framework/__pycache__/app.cpython-36.pyc,, +pythonwin/pywin/framework/__pycache__/bitmap.cpython-36.pyc,, +pythonwin/pywin/framework/__pycache__/cmdline.cpython-36.pyc,, +pythonwin/pywin/framework/__pycache__/dbgcommands.cpython-36.pyc,, +pythonwin/pywin/framework/__pycache__/dlgappcore.cpython-36.pyc,, +pythonwin/pywin/framework/__pycache__/help.cpython-36.pyc,, +pythonwin/pywin/framework/__pycache__/interact.cpython-36.pyc,, +pythonwin/pywin/framework/__pycache__/intpyapp.cpython-36.pyc,, +pythonwin/pywin/framework/__pycache__/intpydde.cpython-36.pyc,, +pythonwin/pywin/framework/__pycache__/mdi_pychecker.cpython-36.pyc,, +pythonwin/pywin/framework/__pycache__/scriptutils.cpython-36.pyc,, +pythonwin/pywin/framework/__pycache__/sgrepmdi.cpython-36.pyc,, +pythonwin/pywin/framework/__pycache__/startup.cpython-36.pyc,, +pythonwin/pywin/framework/__pycache__/stdin.cpython-36.pyc,, +pythonwin/pywin/framework/__pycache__/toolmenu.cpython-36.pyc,, +pythonwin/pywin/framework/__pycache__/window.cpython-36.pyc,, +pythonwin/pywin/framework/__pycache__/winout.cpython-36.pyc,, +pythonwin/pywin/framework/app.py,sha256=mBAsUOLIpa0J6b0DL8-JV0LPZcYrl-AjK7ohRxWBs4o,13984 +pythonwin/pywin/framework/bitmap.py,sha256=BVBetLYUtfEcC8_wDQyzv7z19ge-2jqnTQ84Y3BsSNA,4653 +pythonwin/pywin/framework/cmdline.py,sha256=5PtghkIRvdIsDtDwbXQE23evNEq3PKBPLE6aDCDKGtw,1165 +pythonwin/pywin/framework/dbgcommands.py,sha256=f4hOw3b-e85fYqAg2kr8X15CE7S0uAzh3wVgo7PQSyQ,5760 +pythonwin/pywin/framework/dlgappcore.py,sha256=EOUBYCLrRJWemVoSgZFRBKMja3RbydncBblv2EeHZbw,1804 +pythonwin/pywin/framework/editor/ModuleBrowser.py,sha256=dhxJDjsi3r8nXHl6nmTY2kb2oKNd0u3jQ7eRoqJesOE,6871 +pythonwin/pywin/framework/editor/__init__.py,sha256=sQiJHEa9-JoPrSLy3aAtBHnuBuYJWBmNEvCJ-0fjgMo,2838 +pythonwin/pywin/framework/editor/__pycache__/ModuleBrowser.cpython-36.pyc,, +pythonwin/pywin/framework/editor/__pycache__/__init__.cpython-36.pyc,, +pythonwin/pywin/framework/editor/__pycache__/configui.cpython-36.pyc,, +pythonwin/pywin/framework/editor/__pycache__/document.cpython-36.pyc,, +pythonwin/pywin/framework/editor/__pycache__/editor.cpython-36.pyc,, +pythonwin/pywin/framework/editor/__pycache__/frame.cpython-36.pyc,, +pythonwin/pywin/framework/editor/__pycache__/template.cpython-36.pyc,, +pythonwin/pywin/framework/editor/__pycache__/vss.cpython-36.pyc,, +pythonwin/pywin/framework/editor/color/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pythonwin/pywin/framework/editor/color/__pycache__/__init__.cpython-36.pyc,, +pythonwin/pywin/framework/editor/color/__pycache__/coloreditor.cpython-36.pyc,, +pythonwin/pywin/framework/editor/color/coloreditor.py,sha256=nN8g2GCD2o1uU4whZ1digenwS3ikZSKW1zjyP3ALXks,20730 +pythonwin/pywin/framework/editor/configui.py,sha256=4ZpR3QWM_K6hJhF9uqLWkqOypmv9Sh3j_vPOX-2GNtY,9875 +pythonwin/pywin/framework/editor/document.py,sha256=UbkF66LDQHlrKzY_JJJc1SZznrgBDVQgoX6Wc9FrWfw,11877 +pythonwin/pywin/framework/editor/editor.py,sha256=RzgxntBb8I0ibvJaM3_18AfuGYpVzW6j6PTOgHyc0Zc,15576 +pythonwin/pywin/framework/editor/frame.py,sha256=lPoixRsgPfGTDYIMqJ7i3tcVMyb0WVpR-sj-RbsA18M,3107 +pythonwin/pywin/framework/editor/template.py,sha256=OqQZN9ghPWM1hurRxwFiYWjj8q0jDJX4GrCSSv6hbrM,1792 +pythonwin/pywin/framework/editor/vss.py,sha256=xfA7Se_-3NX4itrkSw_IF7tH2L8_zZ0mgkpwnrYz35k,2989 +pythonwin/pywin/framework/help.py,sha256=gsHhHEYmdNnqWjM2fqLNSyztRImi3daz3CfKBytDVic,4823 +pythonwin/pywin/framework/interact.py,sha256=uuHNF0xzNZOnbh3Y94_rAkxMTsu-AH6kbTQv96Zn-vk,29544 +pythonwin/pywin/framework/intpyapp.py,sha256=3Otab_c6fgILKmbfF1j9Yks33bfGt7wHRaizOch1Zeo,16225 +pythonwin/pywin/framework/intpydde.py,sha256=k73onrdw-PBGgb7H-wdUwufXqVChlk9Xr3UxmAxYx1c,1335 +pythonwin/pywin/framework/mdi_pychecker.py,sha256=FzzRihE7AyRGyvpnHvx_SeqvkC8PMhsYOle_cl8S3x4,26325 +pythonwin/pywin/framework/scriptutils.py,sha256=l6fh_64rOcRTt0lX9ehoEOZ3Ssc05tmQc8BkMHwuh-A,19550 +pythonwin/pywin/framework/sgrepmdi.py,sha256=FzO4FSvGDzFUS-F2Y7oHl37ZhYp2PP55UTuCaSkiUvI,18658 +pythonwin/pywin/framework/startup.py,sha256=xBx8sCxtWW2XWgkhCV8n09vu6jt-MGN27sf7hd53llA,2749 +pythonwin/pywin/framework/stdin.py,sha256=MXG4kC1aBD5N9CY7k4_w-vq2Yi1BdOK8h5OisANX4QA,6634 +pythonwin/pywin/framework/toolmenu.py,sha256=zAPZSDMKgnHPhTkcjeLtOBrE-g47T4O_dirWCBBB-Fg,7886 +pythonwin/pywin/framework/window.py,sha256=Gin2mYYde08RxV0ZW4oDqnXCwMBgK2h56_n1zs-X8Ks,500 +pythonwin/pywin/framework/winout.py,sha256=JSCbQyrCI4eLxtrWZ43pFhb1iiwwDVk52E_EDaib3LM,17005 +pythonwin/pywin/idle/AutoExpand.py,sha256=lsajkgYSqTvvOTb5O3id88cFgpfqhZE6PhyPJI6LFug,2677 +pythonwin/pywin/idle/AutoIndent.py,sha256=7FT_cuIWcF6o1ZNcFH1YVFl3zvUj-IkLD99IREh74P0,20636 +pythonwin/pywin/idle/CallTips.py,sha256=yXfElDVfIci9ywWdAgsqAJFKc_0JXLlvTeprwahkBYc,6309 +pythonwin/pywin/idle/FormatParagraph.py,sha256=STDTSIOx7XVN1aPbhAogMJdBAuqBtGfcqYFHg-WhHls,5677 +pythonwin/pywin/idle/IdleHistory.py,sha256=Hf0ITZvnS_8cSlx9VpRNcUj_aQH4TN43LWGgULXSI4s,3057 +pythonwin/pywin/idle/PyParse.py,sha256=P1vYFnfBsDs-aetN1ylAWXkIvbb8aucGMFGSDFk7-p0,18343 +pythonwin/pywin/idle/__init__.py,sha256=Kpc8ZrT8Zs36lqXfWPIsH7FCABCw3fSmCxCTpJ64HUg,54 +pythonwin/pywin/idle/__pycache__/AutoExpand.cpython-36.pyc,, +pythonwin/pywin/idle/__pycache__/AutoIndent.cpython-36.pyc,, +pythonwin/pywin/idle/__pycache__/CallTips.cpython-36.pyc,, +pythonwin/pywin/idle/__pycache__/FormatParagraph.cpython-36.pyc,, +pythonwin/pywin/idle/__pycache__/IdleHistory.cpython-36.pyc,, +pythonwin/pywin/idle/__pycache__/PyParse.cpython-36.pyc,, +pythonwin/pywin/idle/__pycache__/__init__.cpython-36.pyc,, +pythonwin/pywin/mfc/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pythonwin/pywin/mfc/__pycache__/__init__.cpython-36.pyc,, +pythonwin/pywin/mfc/__pycache__/activex.cpython-36.pyc,, +pythonwin/pywin/mfc/__pycache__/afxres.cpython-36.pyc,, +pythonwin/pywin/mfc/__pycache__/dialog.cpython-36.pyc,, +pythonwin/pywin/mfc/__pycache__/docview.cpython-36.pyc,, +pythonwin/pywin/mfc/__pycache__/object.cpython-36.pyc,, +pythonwin/pywin/mfc/__pycache__/thread.cpython-36.pyc,, +pythonwin/pywin/mfc/__pycache__/window.cpython-36.pyc,, +pythonwin/pywin/mfc/activex.py,sha256=6yNHt4BuHch1_JLks9vMqof2c7uguWETi5-cOOLI88A,2544 +pythonwin/pywin/mfc/afxres.py,sha256=J0WiA7MbfZnIs4Hi4Zs3BlJcv4AL2i_CDTGIzQ7PR4o,15091 +pythonwin/pywin/mfc/dialog.py,sha256=Jntd2ivenbOmjKdQ9hUPPpMuHPafN7rKZJuqjqRngHc,7931 +pythonwin/pywin/mfc/docview.py,sha256=NJnZhuU0ckd72hAOZIhBN7XB6jhgStsBmz5f9kAb1_E,3568 +pythonwin/pywin/mfc/object.py,sha256=NJg5cS5eJcc14XS98ui7IgWE2nAb_03JGbB9I7Osl_M,1798 +pythonwin/pywin/mfc/thread.py,sha256=JZbEZNIXlBwgeNK_W4bxSOP1tosHD2MtvthDSFMLBCI,526 +pythonwin/pywin/mfc/window.py,sha256=uRkGStHgXjcBIc7C24TuEHO8zjqIIsF4Z0ME0H7GbxY,1377 +pythonwin/pywin/scintilla/IDLEenvironment.py,sha256=VWjyo7xlQtHFuSZUDyxAb6jUvG_g9z5N5gWfH3GRYrA,16298 +pythonwin/pywin/scintilla/__init__.py,sha256=hebUgohJaxM7nAhbTIo_JMYu3l8uty3TIPTkVxw5YQ4,15 +pythonwin/pywin/scintilla/__pycache__/IDLEenvironment.cpython-36.pyc,, +pythonwin/pywin/scintilla/__pycache__/__init__.cpython-36.pyc,, +pythonwin/pywin/scintilla/__pycache__/bindings.cpython-36.pyc,, +pythonwin/pywin/scintilla/__pycache__/config.cpython-36.pyc,, +pythonwin/pywin/scintilla/__pycache__/configui.cpython-36.pyc,, +pythonwin/pywin/scintilla/__pycache__/control.cpython-36.pyc,, +pythonwin/pywin/scintilla/__pycache__/document.cpython-36.pyc,, +pythonwin/pywin/scintilla/__pycache__/find.cpython-36.pyc,, +pythonwin/pywin/scintilla/__pycache__/formatter.cpython-36.pyc,, +pythonwin/pywin/scintilla/__pycache__/keycodes.cpython-36.pyc,, +pythonwin/pywin/scintilla/__pycache__/scintillacon.cpython-36.pyc,, +pythonwin/pywin/scintilla/__pycache__/view.cpython-36.pyc,, +pythonwin/pywin/scintilla/bindings.py,sha256=E1ZsSwwMCq2R3XaUi7yffiyA46Md3O8npDj2fMhI5uA,5052 +pythonwin/pywin/scintilla/config.py,sha256=bpnBiqZx_FQdpMJWa3H9bLzY269BEaR8CQ3IVJX6NHQ,11940 +pythonwin/pywin/scintilla/configui.py,sha256=W_B-cyUZhXyxXKQ56a-ieuSHfseFOleYI0LpMb-42Sw,9244 +pythonwin/pywin/scintilla/control.py,sha256=I6xCRcVZxMKqxSi1OVGdlBA5lXWDPwiRZaBjFQ_ScyY,17919 +pythonwin/pywin/scintilla/document.py,sha256=vv-hzVTxXJdb4w6ouuZY3RW9NF-xCzb_CK3LLLlUyg8,9480 +pythonwin/pywin/scintilla/find.py,sha256=zkAYb2M-oHPNhqmQBQJHictOP_k8CUQCEvGiPU5GuBg,12053 +pythonwin/pywin/scintilla/formatter.py,sha256=97lo91qtTFVjaGLJUnTm4oUEv2ljc94lheZpK47IGj0,21743 +pythonwin/pywin/scintilla/keycodes.py,sha256=lTQAk_eCPZReQB72y1CPayF-wYKSqK6333TyD3mVAPY,5280 +pythonwin/pywin/scintilla/scintillacon.py,sha256=2LJgtE_EKepsYUr_7TQc7qH4KrvFsC0VcnlDNMfVe_8,45122 +pythonwin/pywin/scintilla/view.py,sha256=iZLlbG98MDnPr-8RnO_rAay0POI7d9061bMzSygNg9k,25662 +pythonwin/pywin/tools/TraceCollector.py,sha256=9MHHKAxtEW-2_XhRxxPT3eJinqEqiZYn4W8dr7T4kdE,2097 +pythonwin/pywin/tools/__init__.py,sha256=daEdpEyAJIa8b2VkCqSKcw8PaExcB6Qro80XNes_sHA,2 +pythonwin/pywin/tools/__pycache__/TraceCollector.cpython-36.pyc,, +pythonwin/pywin/tools/__pycache__/__init__.cpython-36.pyc,, +pythonwin/pywin/tools/__pycache__/browseProjects.cpython-36.pyc,, +pythonwin/pywin/tools/__pycache__/browser.cpython-36.pyc,, +pythonwin/pywin/tools/__pycache__/hierlist.cpython-36.pyc,, +pythonwin/pywin/tools/__pycache__/regedit.cpython-36.pyc,, +pythonwin/pywin/tools/__pycache__/regpy.cpython-36.pyc,, +pythonwin/pywin/tools/browseProjects.py,sha256=1sFWNPzBq1aVaML1tAGY24iB8ETwY8h8cl4VqyCE3xY,8090 +pythonwin/pywin/tools/browser.py,sha256=yHAykz06czAzG3Dm1HKSdkCea09RSzvj9oTqd9Iww0o,11726 +pythonwin/pywin/tools/hierlist.py,sha256=HA2ONipJNaazMwmnKxwqIRZ5_Q-bQb729soPbjTE-KQ,10867 +pythonwin/pywin/tools/regedit.py,sha256=JvJvzbmafhqhaBKADogrPro8uQ_pHpxAGVLk2t7d6zs,10914 +pythonwin/pywin/tools/regpy.py,sha256=fdDQbMSGnDDH2ukXgqImuCunZDoxLgIqRFbsX4CECx4,1830 +pythonwin/scintilla.dll,sha256=hIrXrKbzY6k6Oh4MfCNPggo5x8EWo2Ca6bgC6S3L0n4,627200 +pythonwin/win32ui.pyd,sha256=ohs0MFYI4nU1KCWnKigEpJugQP2kR78eqvZeDqWEhoc,1426944 +pythonwin/win32uiole.pyd,sha256=TTnciwi6f5JzdX1AEJ5hMhVvSR_a2kbbK5vA266-DGY,122880 +pywin32-301.dist-info/DESCRIPTION.rst,sha256=LIET_1_2tdYU4iZDx6Vvioouca0iE__dQBcN6Hv9l2I,4094 +pywin32-301.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pywin32-301.dist-info/METADATA,sha256=inhXKS59GsiCzuQDBlVhH-O_r2VzHLXvZMADXR1Grmg,4868 +pywin32-301.dist-info/RECORD,, +pywin32-301.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pywin32-301.dist-info/WHEEL,sha256=3o6dJ54ci-MtRFdJCPkLCK5G-vO7QSv3lIQ-NrhglGk,106 +pywin32-301.dist-info/metadata.json,sha256=lfyQVjSnQJBeyikJzKtGwtOuvzSfLDDFbK29t4_Uc6M,887 +pywin32-301.dist-info/top_level.txt,sha256=S3kBrNF1pgEH-_0lcMYRPEMGk4nszAIyA_yogPk8YuA,1303 +pywin32.pth,sha256=T2f_kq8Oo4vxisMI79l294HYTlb1ecYD7R6PDGmhf40,178 +pywin32.version.txt,sha256=gEjjGq_8VY9WND0f_dB1wVLjLlUga8LlXowVwpFpmgI,5 +pywin32_system32/pythoncom36.dll,sha256=Xcbp7NHSloqQARgmSDT5tIWMIOfKyeOTAPPd7PN6e7E,553984 +pywin32_system32/pywintypes36.dll,sha256=9HHlXv57dV6jvp7YDJpteYkhILt1fzxLv4LWefz2NDc,140800 +win32/Demos/BackupRead_BackupWrite.py,sha256=SKl4haIVQntkvCceTAJb6b4Uq7zdg3njeBYIAKZ_xjg,3558 +win32/Demos/BackupSeek_streamheaders.py,sha256=1xziPCy2azJmW2SD0hg9mv-rrho9SUr-V265SQF0c64,3458 +win32/Demos/CopyFileEx.py,sha256=2yKYBbu2hxuYwocD0HRGrcdoSt5wl3YRuDtiGz_fUxw,1074 +win32/Demos/CreateFileTransacted_MiniVersion.py,sha256=GSx7SbgBGNTr_m4l275lhgP7KKDoA1eoQtm5K1oPfyk,3319 +win32/Demos/EvtFormatMessage.py,sha256=s94O_fwOHRuKnBWdvY_fEqalCQEsmhtPivWtrWptt9s,2641 +win32/Demos/EvtSubscribe_pull.py,sha256=ygzYSSxgIohLrfsePT5eJ-V5-EdiXMu3WUrheMDCU0g,675 +win32/Demos/EvtSubscribe_push.py,sha256=Ici5oo9f8Fl11z79l7Fm10Z6T_MA3b5rQTY1tMwC28g,623 +win32/Demos/FileSecurityTest.py,sha256=sGYVWO-zJ3nD-ebAe2K0Rrp_cCrxGYxvs3jykt-A8Ag,3715 +win32/Demos/GetSaveFileName.py,sha256=8J3kHdhmM4m-q7r8SgTiAQlNI4pQ3zQ1oLGe6G35nrc,1141 +win32/Demos/NetValidatePasswordPolicy.py,sha256=bXKSnvcfuQA1CyvhhkMdtPPF12WDmzRepDWmSLlrIWs,3613 +win32/Demos/OpenEncryptedFileRaw.py,sha256=LyMoXzRqpeLmc3JTRjfWj37e_94YqpYAJkmuW-UFv4w,1955 +win32/Demos/RegCreateKeyTransacted.py,sha256=m7ow1gKZOJMiH8MTSL9Cj3SSbd_hbjF9eCRSoRBQEd8,1819 +win32/Demos/RegRestoreKey.py,sha256=4FrpBXaNW6OObobg6KRKMXuUx1pWjMaphE6nByy7kVY,1833 +win32/Demos/SystemParametersInfo.py,sha256=aEM8TOevMYpt-e3U5_UR1B1EBcswxXyH741crjFgDtc,7607 +win32/Demos/__pycache__/BackupRead_BackupWrite.cpython-36.pyc,, +win32/Demos/__pycache__/BackupSeek_streamheaders.cpython-36.pyc,, +win32/Demos/__pycache__/CopyFileEx.cpython-36.pyc,, +win32/Demos/__pycache__/CreateFileTransacted_MiniVersion.cpython-36.pyc,, +win32/Demos/__pycache__/EvtFormatMessage.cpython-36.pyc,, +win32/Demos/__pycache__/EvtSubscribe_pull.cpython-36.pyc,, +win32/Demos/__pycache__/EvtSubscribe_push.cpython-36.pyc,, +win32/Demos/__pycache__/FileSecurityTest.cpython-36.pyc,, +win32/Demos/__pycache__/GetSaveFileName.cpython-36.pyc,, +win32/Demos/__pycache__/NetValidatePasswordPolicy.cpython-36.pyc,, +win32/Demos/__pycache__/OpenEncryptedFileRaw.cpython-36.pyc,, +win32/Demos/__pycache__/RegCreateKeyTransacted.cpython-36.pyc,, +win32/Demos/__pycache__/RegRestoreKey.cpython-36.pyc,, +win32/Demos/__pycache__/SystemParametersInfo.cpython-36.pyc,, +win32/Demos/__pycache__/cerapi.cpython-36.pyc,, +win32/Demos/__pycache__/desktopmanager.cpython-36.pyc,, +win32/Demos/__pycache__/eventLogDemo.cpython-36.pyc,, +win32/Demos/__pycache__/getfilever.cpython-36.pyc,, +win32/Demos/__pycache__/mmapfile_demo.cpython-36.pyc,, +win32/Demos/__pycache__/print_desktop.cpython-36.pyc,, +win32/Demos/__pycache__/rastest.cpython-36.pyc,, +win32/Demos/__pycache__/timer_demo.cpython-36.pyc,, +win32/Demos/__pycache__/win32clipboardDemo.cpython-36.pyc,, +win32/Demos/__pycache__/win32clipboard_bitmapdemo.cpython-36.pyc,, +win32/Demos/__pycache__/win32comport_demo.cpython-36.pyc,, +win32/Demos/__pycache__/win32console_demo.cpython-36.pyc,, +win32/Demos/__pycache__/win32cred_demo.cpython-36.pyc,, +win32/Demos/__pycache__/win32fileDemo.cpython-36.pyc,, +win32/Demos/__pycache__/win32gui_demo.cpython-36.pyc,, +win32/Demos/__pycache__/win32gui_devicenotify.cpython-36.pyc,, +win32/Demos/__pycache__/win32gui_dialog.cpython-36.pyc,, +win32/Demos/__pycache__/win32gui_menu.cpython-36.pyc,, +win32/Demos/__pycache__/win32gui_taskbar.cpython-36.pyc,, +win32/Demos/__pycache__/win32netdemo.cpython-36.pyc,, +win32/Demos/__pycache__/win32rcparser_demo.cpython-36.pyc,, +win32/Demos/__pycache__/win32servicedemo.cpython-36.pyc,, +win32/Demos/__pycache__/win32ts_logoff_disconnected.cpython-36.pyc,, +win32/Demos/__pycache__/winprocess.cpython-36.pyc,, +win32/Demos/c_extension/__pycache__/setup.cpython-36.pyc,, +win32/Demos/c_extension/setup.py,sha256=wzCAfoBu8BaNyZFZjg4VzcNHgHSBlmwnSxu37Xr9WZI,803 +win32/Demos/cerapi.py,sha256=YS7zmT1tIfDaa_ewy4fkrfdic7ORMvvULffw9oVMYE4,7581 +win32/Demos/dde/__pycache__/ddeclient.cpython-36.pyc,, +win32/Demos/dde/__pycache__/ddeserver.cpython-36.pyc,, +win32/Demos/dde/ddeclient.py,sha256=AI-sUrn-YDPb8DJX2h7W-IP_TKxqIANI5c_Ed7xs1vo,432 +win32/Demos/dde/ddeserver.py,sha256=XXkah_zWE89TN-xd0GmisZDxeBKEJN7QwEqWEAURbmM,1015 +win32/Demos/desktopmanager.py,sha256=Rt4Tg7T_NGXZ3F0dXICmPwTFRzAbHBBmihxPzLKyYDY,7223 +win32/Demos/eventLogDemo.py,sha256=X-Kj9jG9t1rXYC58l1kHMcLUy8i2Q4JVN-poflZR6Jo,4034 +win32/Demos/getfilever.py,sha256=Stp8QhxRWJs3ysxo0oap9fSmPD268b7ZBbMK3uRTnb4,1064 +win32/Demos/images/frowny.bmp,sha256=RgGveVt053KlmV4qVGwdCtrPyRA0JT57KQvf9PNOIvU,3126 +win32/Demos/images/smiley.bmp,sha256=wi53jYCy523fFYj_FYgzG1dxQdErw-ow2__dfoX9gsA,3126 +win32/Demos/mmapfile_demo.py,sha256=9rPiixsZJt2nEa6GiZR0Gpwn7Xyi2RGHy84k3yJCTqw,2731 +win32/Demos/pipes/__pycache__/cat.cpython-36.pyc,, +win32/Demos/pipes/__pycache__/runproc.cpython-36.pyc,, +win32/Demos/pipes/cat.py,sha256=tdndccPRg6tskVBo6oei48IBdjyNVHBMpiYCseU7V3Q,334 +win32/Demos/pipes/runproc.py,sha256=03_HfQRYsQlguw-uzE1AMtIf30Dll-BUMo96-FazbEw,4144 +win32/Demos/print_desktop.py,sha256=WMqEXeGHrPyD-Gjxfa4QlkjP07GkQFKvgkoBJS9hXjU,2571 +win32/Demos/rastest.py,sha256=K3dC3DuAb7CdDxadyh3Xx1Btdb0w2L48vjXKLAwv3Fk,4837 +win32/Demos/security/GetTokenInformation.py,sha256=fNLMQSIWJY3Z6dJzFahtyMUiGtRFc1IC5eMBKR1UY8M,3431 +win32/Demos/security/__pycache__/GetTokenInformation.cpython-36.pyc,, +win32/Demos/security/__pycache__/account_rights.cpython-36.pyc,, +win32/Demos/security/__pycache__/explicit_entries.cpython-36.pyc,, +win32/Demos/security/__pycache__/get_policy_info.cpython-36.pyc,, +win32/Demos/security/__pycache__/list_rights.cpython-36.pyc,, +win32/Demos/security/__pycache__/localized_names.cpython-36.pyc,, +win32/Demos/security/__pycache__/lsaregevent.cpython-36.pyc,, +win32/Demos/security/__pycache__/lsastore.cpython-36.pyc,, +win32/Demos/security/__pycache__/query_information.cpython-36.pyc,, +win32/Demos/security/__pycache__/regsave_sa.cpython-36.pyc,, +win32/Demos/security/__pycache__/regsecurity.cpython-36.pyc,, +win32/Demos/security/__pycache__/sa_inherit.cpython-36.pyc,, +win32/Demos/security/__pycache__/security_enums.cpython-36.pyc,, +win32/Demos/security/__pycache__/set_file_audit.cpython-36.pyc,, +win32/Demos/security/__pycache__/set_file_owner.cpython-36.pyc,, +win32/Demos/security/__pycache__/set_policy_info.cpython-36.pyc,, +win32/Demos/security/__pycache__/setkernelobjectsecurity.cpython-36.pyc,, +win32/Demos/security/__pycache__/setnamedsecurityinfo.cpython-36.pyc,, +win32/Demos/security/__pycache__/setsecurityinfo.cpython-36.pyc,, +win32/Demos/security/__pycache__/setuserobjectsecurity.cpython-36.pyc,, +win32/Demos/security/account_rights.py,sha256=LlvKGFNp3522NGq22hNNwxvCPHb-w7gv0rxv01zUvBI,1474 +win32/Demos/security/explicit_entries.py,sha256=aOp7QY9NXRFVkyy2M5RxNiTKF3dN4htDOget_7FgBVw,4641 +win32/Demos/security/get_policy_info.py,sha256=OYh3udaM2pHewI_kUduXl-vyFz_HR7OQdm-I9AcntfI,1171 +win32/Demos/security/list_rights.py,sha256=CGzFjtmfDjyC_yD64Lfh9aDBaHXZ8sgyh1zZRqZLaSA,1040 +win32/Demos/security/localized_names.py,sha256=e-GY9l2PoVbEzI_oGqWYfLfKnBX3hFGSAvCH7q51uaQ,2032 +win32/Demos/security/lsaregevent.py,sha256=si3qDYp_GyzaZln_9Rj6puPQ5ENxisTwuHrUYY-fKWk,507 +win32/Demos/security/lsastore.py,sha256=OYZmAXg4hgtJofC_JKZdml5W4dZROIxeyjVgFpVVGsc,457 +win32/Demos/security/query_information.py,sha256=eUFKdTA59BmjJyClKZZNtvss0aWQNYtqGHwdZmkZ4OE,870 +win32/Demos/security/regsave_sa.py,sha256=FmlKklI68Q9NZ55kkf8X5UeEWGiblF0ezXdSJ277crs,1529 +win32/Demos/security/regsecurity.py,sha256=9B9y0claMgnQOufx6m-Oo52F9xkDGMu6wiI1KV7MN0w,1021 +win32/Demos/security/sa_inherit.py,sha256=YRo_rRLgha0C_qz0I15x0gANFysweOezillbYPNSQmE,266 +win32/Demos/security/security_enums.py,sha256=8CkGfJz9bL5nzvTvlN_nqHvqwG7a3O5JHQmdhSBqbVw,9460 +win32/Demos/security/set_file_audit.py,sha256=slzM5TeTVVAu5v4f2xbjFJ9JNFt1s1TT1GCFgpsFri8,3112 +win32/Demos/security/set_file_owner.py,sha256=vK1emYf3u9ZGbLNYZwvTCPzA_smMB3TigzQqLD-PARM,2046 +win32/Demos/security/set_policy_info.py,sha256=PKdm4jR5t7NkQdXeyJkMSoucEly4QjhFMoWmUYX8z8U,942 +win32/Demos/security/setkernelobjectsecurity.py,sha256=bbu3n6QmbfYJexcAP_HPPZVSIbv1yiYTtyA9ExFq2hA,4164 +win32/Demos/security/setnamedsecurityinfo.py,sha256=CDg30kz9vkNZvj1DjpZftI6I7QbqJL4fAft0bBE9h6c,3986 +win32/Demos/security/setsecurityinfo.py,sha256=SRSjbsYwxZ2LoG-RjB2zq1MNSBNspO09fMpMc_R1WHE,4124 +win32/Demos/security/setuserobjectsecurity.py,sha256=fiZ5TkZjuLRAYSXidO95VVTPAYQxXLTLn7nJB2H1GuA,3016 +win32/Demos/security/sspi/__pycache__/fetch_url.cpython-36.pyc,, +win32/Demos/security/sspi/__pycache__/simple_auth.cpython-36.pyc,, +win32/Demos/security/sspi/__pycache__/socket_server.cpython-36.pyc,, +win32/Demos/security/sspi/__pycache__/validate_password.cpython-36.pyc,, +win32/Demos/security/sspi/fetch_url.py,sha256=L5xk-EQcjpP9muNjg1xv6QImphTofHVK8tDSHbqKo-w,5429 +win32/Demos/security/sspi/simple_auth.py,sha256=hwAobJb0hNAWszB3PfOLB5I4NoHlbESdz-EYzPqlQ_A,2806 +win32/Demos/security/sspi/socket_server.py,sha256=Z8M98yyuHxq2eR1w2C3rLtDU1C3wAJUYIZN6Jnjt9-A,6331 +win32/Demos/security/sspi/validate_password.py,sha256=yQnXNKQKYqk6m8BGnEWS3byKk9cr8z1t3yCUqaV1flA,1127 +win32/Demos/service/__pycache__/nativePipeTestService.cpython-36.pyc,, +win32/Demos/service/__pycache__/pipeTestService.cpython-36.pyc,, +win32/Demos/service/__pycache__/pipeTestServiceClient.cpython-36.pyc,, +win32/Demos/service/__pycache__/serviceEvents.cpython-36.pyc,, +win32/Demos/service/nativePipeTestService.py,sha256=6mQhiymp0jCIpBeqMdZ2_sWQhjZg9J0m52tXMB80HvU,2088 +win32/Demos/service/pipeTestService.py,sha256=11xc_b7Yj4sWf8rtDHpsBqxg1VkWAWdK2Mf2j-nAgyI,6646 +win32/Demos/service/pipeTestServiceClient.py,sha256=zCjKKzI4K-Xje2A5i0fEo3sKvZi9Ar7lnOfGL1QG_XY,4134 +win32/Demos/service/serviceEvents.py,sha256=i5DCeQg9Oeym47WapyuTfWyxDeNZFF6ZI2APtlD6rvE,4127 +win32/Demos/timer_demo.py,sha256=OZBkOrbTUy0ZML_wDpgqxsZ-ZTtDfn3fk4ElWVXnmSY,2204 +win32/Demos/win32clipboardDemo.py,sha256=53QEaXnewa76fONILPtALFXM00WyzWsQGi21EMPzos8,4607 +win32/Demos/win32clipboard_bitmapdemo.py,sha256=iVgHt9SuyCqnImBCmLO2ofQpZnBPL6T1XMJL0q79rFc,3405 +win32/Demos/win32comport_demo.py,sha256=_7PWuPgCbFUl6jn-WPbcvtuf_ZxwOH-HNmdUZ6dVqJA,5765 +win32/Demos/win32console_demo.py,sha256=05I7zBtKebBja5RNGnxbfgxeG1sFAdeGKIUeylZyl-U,4464 +win32/Demos/win32cred_demo.py,sha256=T6TgMuDM39A8v-aEQ2CmNRYLNuxiOEMeSfsHDtdZLvQ,2240 +win32/Demos/win32fileDemo.py,sha256=mfn2VGb6AD77yCuvkasPmrPj8pkc66UpDQFtZw6PhB4,1541 +win32/Demos/win32gui_demo.py,sha256=1DdUW4aVsnyMr-adv1XaKgMe5S69mPErCkElpHN81EU,4257 +win32/Demos/win32gui_devicenotify.py,sha256=7lr3nAzs8P457aQb-oQeh4xDAedFxfVf8jK6X-C8O8M,3909 +win32/Demos/win32gui_dialog.py,sha256=D42yTd96Z134NPZoY4_qoMuZljYCGz6SSycee6h9Kn4,14822 +win32/Demos/win32gui_menu.py,sha256=pNs7aVygu_-NnplLQ5rDKqe3p33OdQgwb-jMXCdPAjI,16050 +win32/Demos/win32gui_taskbar.py,sha256=w_8isx6PAtrU-nAiOEMLJ2pfS0wxTl7SDX9k0RlPs-8,4948 +win32/Demos/win32netdemo.py,sha256=KtSzABsSNcaJRPf5-WfQUtkUT83lzjFV4yP-D7T72MA,8585 +win32/Demos/win32rcparser_demo.py,sha256=rjSLmwVBBGZ2u4rTKnz0D3gTC_mbs4edjHOXcWu6f1c,2746 +win32/Demos/win32servicedemo.py,sha256=c_b9h92gElbasTP6o3veAaHEOJsTZpRMFxf3Mhs6B78,579 +win32/Demos/win32ts_logoff_disconnected.py,sha256=bqSHob5NFOSKT6cSA25lcFdD3xbG8tPNnfwc_Lm0Qgw,950 +win32/Demos/win32wnet/__pycache__/testwnet.cpython-36.pyc,, +win32/Demos/win32wnet/__pycache__/winnetwk.cpython-36.pyc,, +win32/Demos/win32wnet/testwnet.py,sha256=qY2x3KeCm-_3p9n8MKb-EMMLxABIwNLisWS9UjjAGqE,3461 +win32/Demos/win32wnet/winnetwk.py,sha256=pO6M2_XGAVnFy6nPJ_iIy1s1RSChNzy_yOYaw6jz-ZA,3187 +win32/Demos/winprocess.py,sha256=ctm0AyY4Fp6HcP8BNwM4766kZO8Uuj5E_rJAbg5tkCc,7326 +win32/_win32sysloader.pyd,sha256=hp2H3bOo8XnCupOLHzMLqgmBK9UldZDaAfBAFqa6zmE,12288 +win32/_winxptheme.pyd,sha256=uvyzCaGIeZgQ14eXUFZnEh8ru2CV0s6he7zpStJIzdE,25088 +win32/include/PyWinTypes.h,sha256=pzat53qxXe-IrkuQGlg5ToWMSe3BZ2_p3PMjfl24RSs,30459 +win32/lib/__pycache__/afxres.cpython-36.pyc,, +win32/lib/__pycache__/commctrl.cpython-36.pyc,, +win32/lib/__pycache__/dbi.cpython-36.pyc,, +win32/lib/__pycache__/mmsystem.cpython-36.pyc,, +win32/lib/__pycache__/netbios.cpython-36.pyc,, +win32/lib/__pycache__/ntsecuritycon.cpython-36.pyc,, +win32/lib/__pycache__/pywin32_bootstrap.cpython-36.pyc,, +win32/lib/__pycache__/pywin32_testutil.cpython-36.pyc,, +win32/lib/__pycache__/pywintypes.cpython-36.pyc,, +win32/lib/__pycache__/rasutil.cpython-36.pyc,, +win32/lib/__pycache__/regcheck.cpython-36.pyc,, +win32/lib/__pycache__/regutil.cpython-36.pyc,, +win32/lib/__pycache__/sspi.cpython-36.pyc,, +win32/lib/__pycache__/sspicon.cpython-36.pyc,, +win32/lib/__pycache__/win32con.cpython-36.pyc,, +win32/lib/__pycache__/win32cryptcon.cpython-36.pyc,, +win32/lib/__pycache__/win32evtlogutil.cpython-36.pyc,, +win32/lib/__pycache__/win32gui_struct.cpython-36.pyc,, +win32/lib/__pycache__/win32inetcon.cpython-36.pyc,, +win32/lib/__pycache__/win32netcon.cpython-36.pyc,, +win32/lib/__pycache__/win32pdhquery.cpython-36.pyc,, +win32/lib/__pycache__/win32pdhutil.cpython-36.pyc,, +win32/lib/__pycache__/win32rcparser.cpython-36.pyc,, +win32/lib/__pycache__/win32serviceutil.cpython-36.pyc,, +win32/lib/__pycache__/win32timezone.cpython-36.pyc,, +win32/lib/__pycache__/win32traceutil.cpython-36.pyc,, +win32/lib/__pycache__/win32verstamp.cpython-36.pyc,, +win32/lib/__pycache__/winerror.cpython-36.pyc,, +win32/lib/__pycache__/winioctlcon.cpython-36.pyc,, +win32/lib/__pycache__/winnt.cpython-36.pyc,, +win32/lib/__pycache__/winperf.cpython-36.pyc,, +win32/lib/__pycache__/winxptheme.cpython-36.pyc,, +win32/lib/afxres.py,sha256=J0WiA7MbfZnIs4Hi4Zs3BlJcv4AL2i_CDTGIzQ7PR4o,15091 +win32/lib/commctrl.py,sha256=hTMfgzNm8AKgMJgQM2LvVhWC7LvkzXCqS7VRqJx8rac,47859 +win32/lib/dbi.py,sha256=1uHZYZI-bhBVfcnfqB5O-_apqBs7ALV6zUUz4Xcy24I,700 +win32/lib/mmsystem.py,sha256=ql6xrzFsGUDE6z3Jru3uI4cDhWsBniIIqH4QWvvf3do,30521 +win32/lib/netbios.py,sha256=fw0UnkS2C0FkWLkYTNhcCXaV0rCIdge-QRxXluPXH_Y,7051 +win32/lib/ntsecuritycon.py,sha256=yvs30oArI6XDIoPeNMV84r9a2DwGXUzzvcFfakXwG_c,24420 +win32/lib/pywin32_bootstrap.py,sha256=fjBBCs36aoErZVy2GEcpFyid_goH8hteM7uO5e3ImRs,1282 +win32/lib/pywin32_testutil.py,sha256=jwdkt9BL_kyOsELmZhSZXYkk8TLp6Bu-gNuKWQmzdag,10134 +win32/lib/pywintypes.py,sha256=mkadO3XHB6sPKpddYUgCRYSmOQtfzhA8zUwAolVwnHk,5694 +win32/lib/rasutil.py,sha256=xxowTgy0lIYfC8EKUGJFpFBtTkNQOyG1uxKGE78J3V4,1738 +win32/lib/regcheck.py,sha256=8L-IrUTCs31_0st4agv8pc4mzVgkUSuE8Sr7UkdsovU,3449 +win32/lib/regutil.py,sha256=z_f1Xn9vqFhSmk7njgE84FJFvnza8f36ORu-fx3OuiI,10979 +win32/lib/sspi.py,sha256=8A-4FhCqpoQSzmEOK7lP9Hpw7tKZDxrHc6hl-xJasR8,15185 +win32/lib/sspicon.py,sha256=1cHRL9ZQCsbNbf2aPogDGF1-cj992gTnTr56_m7tWR0,16070 +win32/lib/win32con.py,sha256=zrwgW4WX5KDatzCCbvRfGBsKE8Guj3O65aklsQEFuZM,120710 +win32/lib/win32cryptcon.py,sha256=1zQjSQsduB5bPL2NZ7YCM8XrknCW9lZ8_aXbmJuKhHE,73801 +win32/lib/win32evtlogutil.py,sha256=0J9CFOKIQyI9PgiZzfHL1p7YdfcTP7kF54n9E31WsPE,6481 +win32/lib/win32gui_struct.py,sha256=M7HwyfhCFs589sJcpWywmpngGBAisW1hMaLrafdbLAE,29361 +win32/lib/win32inetcon.py,sha256=c62JSfLCInM9kgFyUcVTgxi-VDCSP9FM42kscKHj1z8,47680 +win32/lib/win32netcon.py,sha256=FSLV74GixE28-JXAj53AMtd3D0WV6UShAmU3xuQt2EU,19739 +win32/lib/win32pdhquery.py,sha256=QQIhJ6EgmPVKOH_-_24w2Kj8vw5CDPx_1gq5roWrJXE,20300 +win32/lib/win32pdhutil.py,sha256=rtoXb-7YHYiX08310-SX5af58QD5C5NLb1MrdOP7tAY,7716 +win32/lib/win32rcparser.py,sha256=ReDEhpwiPka_PdHAxtRaSLSVHUeBpMCEgR0afFPhs3s,21175 +win32/lib/win32serviceutil.py,sha256=GBWvy4gNglCsLwsYuOxT96rDKKPDDwcneHTKz61OKB8,35816 +win32/lib/win32timezone.py,sha256=H_bmBJBm_Gcb4-x6bS6bOJv9iKf8xE3oSJNzOPgEx9c,34574 +win32/lib/win32traceutil.py,sha256=yqvfTJBKT8448ZhIRXbZ_NIWZgkJlGfCb1ouZsru5RA,1476 +win32/lib/win32verstamp.py,sha256=Q0TKMH9Zjj4G7T8J6uzPxlek4g4FYTxGxeVS0zILk-Y,7013 +win32/lib/winerror.py,sha256=Xz0-awwS5_gQC30HgGMjbk4ClLsYe7akRZq5hDBpSIs,101427 +win32/lib/winioctlcon.py,sha256=n20wWAT4q7HfERoJGZ1wqSNvN5_l9M-hyRWU4Bzf1LI,34287 +win32/lib/winnt.py,sha256=gbPMLmZGp3UW8iP2EjBYZZ_y0YpxXjcXvsGKcj39EvM,39603 +win32/lib/winperf.py,sha256=ZPwCkc3UFTnFit7tisAI98OWWB8r5NK6j93R-ROF2zs,6233 +win32/lib/winxptheme.py,sha256=sixkGIq1-sxGoQ1REgnNhKJblXH62k2ttYT8B0Pxdng,742 +win32/libs/pywintypes.lib,sha256=f70Ttr5GuPr9Hqo-ay0eq4bVCNbGiqd24rCBbeFYUnU,116652 +win32/license.txt,sha256=ETzTz3hOWGiF8B-T5d9498fACzTXbMQQHgKc0v1iIRM,1520 +win32/mmapfile.pyd,sha256=1oCBKAdW_CDcfcURd_x0TEc4yvH9HeQ1exGYDqAu7Zw,20992 +win32/odbc.pyd,sha256=nhgXG6vU1YbNUB9eJVrQqydQtxW8z02LZPCaOFEJVEA,39424 +win32/perfmon.pyd,sha256=51Y_Ybnr9BA45vhMA-3ji-suaFtBDvTz1J7PllJuxsY,27136 +win32/perfmondata.dll,sha256=9qI9WzAp0sFcdq20a7zvWZ3Tnj9lCp0eRVwXSszWcJI,17920 +win32/pythonservice.exe,sha256=ZxxNEZEB46gs4aUjSsyjVwjOVe_9muzQksDeAVUfc20,18432 +win32/scripts/ControlService.py,sha256=HKdiYRr877Z4j75lcJjkQWPAZ_wx1Sa3TDjHQnkgjqA,11803 +win32/scripts/VersionStamp/BrandProject.py,sha256=84tHa1fL2CooocozEYpvpYXJrAgQX23ZgsuKZEZNDLE,2509 +win32/scripts/VersionStamp/__pycache__/BrandProject.cpython-36.pyc,, +win32/scripts/VersionStamp/__pycache__/bulkstamp.cpython-36.pyc,, +win32/scripts/VersionStamp/__pycache__/vssutil.cpython-36.pyc,, +win32/scripts/VersionStamp/bulkstamp.py,sha256=VxvQgeHZfQwJOX7XgVEuha9N2aKsatnXRp3Yg3JBTwQ,3645 +win32/scripts/VersionStamp/vssutil.py,sha256=2EIcdBzRhaJQHHtuk7P5pkPZskdkElcLVOaaIlaipJ0,4694 +win32/scripts/__pycache__/ControlService.cpython-36.pyc,, +win32/scripts/__pycache__/backupEventLog.cpython-36.pyc,, +win32/scripts/__pycache__/killProcName.cpython-36.pyc,, +win32/scripts/__pycache__/rasutil.cpython-36.pyc,, +win32/scripts/__pycache__/regsetup.cpython-36.pyc,, +win32/scripts/__pycache__/setup_d.cpython-36.pyc,, +win32/scripts/backupEventLog.py,sha256=JJU7RfCtQPYd-4pCDKGL4_hAzuniV-OJ3GuLKSg0s1g,1040 +win32/scripts/ce/__pycache__/pysynch.cpython-36.pyc,, +win32/scripts/ce/pysynch.py,sha256=PRQeMi8IM9BAeY-7Ic2YizXgKEs1w2_pFt0TBYw2pnI,7933 +win32/scripts/killProcName.py,sha256=s_4ICQ3qFWxFjMHDiIx4bH2wUL2x8F6u3om5WBmdKS4,1770 +win32/scripts/rasutil.py,sha256=zUGxXKt3Tc0SdCx9TAuC0o9dwr4rdlESp3ac-_PGaBU,2388 +win32/scripts/regsetup.py,sha256=iyjs1UaSWxWN2R6EUFpoB1UfwVDZWSL7gQ4O66Igi-0,19996 +win32/scripts/setup_d.py,sha256=pMPjMScj97mw7qRlhuLAL6vhoqMVD5JvdhfvRmlIvKg,3329 +win32/servicemanager.pyd,sha256=2KDeNXLn4zzNCvkg84C4jZES_lGiTHQ4QCYAl7b66QU,39936 +win32/test/__pycache__/handles.cpython-36.pyc,, +win32/test/__pycache__/test_clipboard.cpython-36.pyc,, +win32/test/__pycache__/test_exceptions.cpython-36.pyc,, +win32/test/__pycache__/test_odbc.cpython-36.pyc,, +win32/test/__pycache__/test_pywintypes.cpython-36.pyc,, +win32/test/__pycache__/test_security.cpython-36.pyc,, +win32/test/__pycache__/test_sspi.cpython-36.pyc,, +win32/test/__pycache__/test_win32api.cpython-36.pyc,, +win32/test/__pycache__/test_win32crypt.cpython-36.pyc,, +win32/test/__pycache__/test_win32event.cpython-36.pyc,, +win32/test/__pycache__/test_win32file.cpython-36.pyc,, +win32/test/__pycache__/test_win32gui.cpython-36.pyc,, +win32/test/__pycache__/test_win32guistruct.cpython-36.pyc,, +win32/test/__pycache__/test_win32inet.cpython-36.pyc,, +win32/test/__pycache__/test_win32net.cpython-36.pyc,, +win32/test/__pycache__/test_win32pipe.cpython-36.pyc,, +win32/test/__pycache__/test_win32profile.cpython-36.pyc,, +win32/test/__pycache__/test_win32rcparser.cpython-36.pyc,, +win32/test/__pycache__/test_win32timezone.cpython-36.pyc,, +win32/test/__pycache__/test_win32trace.cpython-36.pyc,, +win32/test/__pycache__/test_win32wnet.cpython-36.pyc,, +win32/test/__pycache__/testall.cpython-36.pyc,, +win32/test/handles.py,sha256=FiGQ8OujQTCuMAx3upaVuRt3yi20Wnh2hqsB4r2cHqE,5513 +win32/test/test_clipboard.py,sha256=PnQsb02aKy-2C7H2uUp9X_sqXE90Ov-YBhd93pkRwhg,4232 +win32/test/test_exceptions.py,sha256=Sk3SorOYK4HgO2t73ANrD2qrX4q1Z9sNm_65XVPMhzA,8554 +win32/test/test_odbc.py,sha256=PYWsj88QSSBwwWuM5---Lo1_8UEns3ixnZdtVY3NE_E,7548 +win32/test/test_pywintypes.py,sha256=C6MajqIwDhmyCz_adyO2P6k7fiM2zzMnM4WemWfisQU,4140 +win32/test/test_security.py,sha256=vUNDurLEftjTRt8U0PD47jlaZxjMwdaohFFVPZAiR04,5909 +win32/test/test_sspi.py,sha256=Mfw1_UvzDx_epCrNDgoYhJGKrli1L1tZGmv858A8KTE,7887 +win32/test/test_win32api.py,sha256=XESmRjeOdRZRhAHqeM_7fEldiYk2FI83EBIWVgzAl-g,8832 +win32/test/test_win32crypt.py,sha256=UZiUogpvVidl7fbPebkzp3c-8prXYKCMPGZrtt0KjpU,1074 +win32/test/test_win32event.py,sha256=FCLRd81Jj7lbpq1WIC6KiZlgvxML7I9EgfrIOC_sce0,4334 +win32/test/test_win32file.py,sha256=qDeRMuwKJLKjQBoUz6KJ0-xbrW5j5ICYiIEU38O5Rxk,41488 +win32/test/test_win32gui.py,sha256=C6FtY8FpcB-wwBkWWxhC-wnxNoCGL2FvLU26Jzka8U4,2307 +win32/test/test_win32guistruct.py,sha256=qyOjpUQRNw4UEZw-g32-hO2wBRSyUPfmJtN6S2PYtVw,8721 +win32/test/test_win32inet.py,sha256=Kk5ebk5bAQaYnyYpFNA9GZy0oxaEQe35cuOVo6awvhk,2885 +win32/test/test_win32net.py,sha256=WWHn24tXyhOdprNMpH5XtZNqSibYCxvyxdoEXurWUKc,629 +win32/test/test_win32pipe.py,sha256=V3vHaeV6takobpqLGidfiJ-QIfVosuc0tlYnDcrobf4,5548 +win32/test/test_win32profile.py,sha256=J-zIeu0HHoo6LXRmuFEdCrJTyE9yz83tUioH3sQxkV4,386 +win32/test/test_win32rcparser.py,sha256=sfy_-wlNgnzFJuri5xekwG-5wEWTde_uV7E1VXAD738,2503 +win32/test/test_win32timezone.py,sha256=LW37hWtdlGQIK7Vm1JytR9i_zrYQJ7I-W5uvEJk2ywo,741 +win32/test/test_win32trace.py,sha256=9Y7Vn0D7dY4vVP_6IjlE8R73lXB1-7EPaJPE0QMw0nE,11269 +win32/test/test_win32wnet.py,sha256=8slcBiDv6X7WZkL8ewitiOe9cZ3Yf5ebwM4cMurvveQ,5943 +win32/test/testall.py,sha256=kb6T4qVUuJlnlEAI-XLBcnCicdfHxqb6Stv1xGaJcGg,6524 +win32/test/win32rcparser/python.bmp,sha256=GWIxMoFdranqXHxIgyJ_dovJBNfCxc_OQlnSGxTfHLE,778 +win32/test/win32rcparser/python.ico,sha256=LnQC7RaDp1G7UiKgN55dilDjRn41wNDTWyo8ymRTctw,766 +win32/test/win32rcparser/test.h,sha256=znpRBpUSDy67a76sDHuP0uTAJYv09jfjkr1ehf_wi8I,1169 +win32/test/win32rcparser/test.rc,sha256=i6RNZ9n5bukZdZkLqlGNswPm7pCvq77WlT9LISaM4Bs,6273 +win32/timer.pyd,sha256=hx5ClGxg5MMGmNlsZplAR_7zhwJkTj8AtqtvWJd1NCM,15360 +win32/win2kras.pyd,sha256=1Mqzsppk16sHQ9giRjVfixKDIZBmwx-3J9w4ip6JJAk,18432 +win32/win32api.pyd,sha256=m7cE0GZzcN8ZVfs7DF8nNsdwRYQozafmT3N1Y4u_c1I,134144 +win32/win32clipboard.pyd,sha256=W3ZSH-0t0Jwxci3cm5-5HhVlQSZkI4M00Q_ztpHUl-U,26112 +win32/win32console.pyd,sha256=2GJNOdqCaks0hdd4wnJlF7M86iZx7AvCk3gi2iMKXVw,61440 +win32/win32cred.pyd,sha256=a9I6ks1QZ1aZZpuA-HUDQHZe5Q4dbZc8Nc6_qZgs5vg,35328 +win32/win32crypt.pyd,sha256=rZ2GyR7DjnY8zAlbBnPPUVOgglhdb_ptcnw_rTcTRrI,128000 +win32/win32event.pyd,sha256=dulXPzVTCBOR-6pqkALUxZMqQq8yV3jjWo5xZEOLtVA,26624 +win32/win32evtlog.pyd,sha256=ulRGCxhrZdC7c8RKelOFXSsUI-GSsSylEJBnDt90JyQ,74752 +win32/win32file.pyd,sha256=aTA6NhKmHSAHJehufoLR1xm0PTOIQ9VINn4GZAU1lAc,156672 +win32/win32gui.pyd,sha256=EM4BJB21HkPIt2bQLquyjl_eoEizshHteglvDr6fNGU,228864 +win32/win32help.pyd,sha256=xAFJhmK5PuG3ggmh_xuivMsNojCKwqeAzab030y-COc,54784 +win32/win32inet.pyd,sha256=Qe_76mm0XG8jWIshh_TFya4nHXkYg0ywf6z_cW1rGQ8,53760 +win32/win32job.pyd,sha256=N-Nq_EI3FugaCOVj1TkL1FBJekf0_m5ApcpPLpdtzdw,27136 +win32/win32lz.pyd,sha256=9nrY_elWjLA2lRF_iq4lPXaNqhxAZ2ILxGJ_xN7qNCw,15360 +win32/win32net.pyd,sha256=Xq17DXWYQTRdLb6dHe_57o5EsW3ejkFLbMDByUL1BnU,93696 +win32/win32pdh.pyd,sha256=2f6M896HpasCEI3wN3Qi1YV6w4raeb_gRuQKordNZ2c,34304 +win32/win32pipe.pyd,sha256=DjsRXeAgXtqIRgvsmFk3SdYXRYX4naZ3PMH4fOrN5YI,27136 +win32/win32print.pyd,sha256=u-YWJfcH58zIytBERu8XWJGUeMOPJor5P3GThH2A9xY,73728 +win32/win32process.pyd,sha256=mE0f_tBSsFAp6V4iCQQnsiP77zKYE77sLmHVzRIqa1Q,54784 +win32/win32profile.pyd,sha256=OvjFDZ0D79a-56KxtWPHrzZj5G30Rwk40Z7LpceG6a8,26112 +win32/win32ras.pyd,sha256=ZVgG1JQ_cls7QafqZoc5DCEkCZ4VCQsfDf-okCCTDrQ,31232 +win32/win32security.pyd,sha256=JnmC4baSIgvtT876RO8Au2RbL3WtGKjxCrO3r9c3Phk,141312 +win32/win32service.pyd,sha256=bcY_GUSrKhWSGCgwgCwQExWcjhZn537SXrJI3MA6El8,58368 +win32/win32trace.pyd,sha256=FVsd923JDkU2P063yJUm80HvvibLwLmFUet6PZXtwuY,22528 +win32/win32transaction.pyd,sha256=otxTubnNq6cSpKQ-Vh7g_PcJn8SFsNEiliVBinF7Ylc,18432 +win32/win32ts.pyd,sha256=ORdQ0j4Bw_gbhQA1Sx1-EQNvFY2YXp9wgrKigS-HQRY,32768 +win32/win32wnet.pyd,sha256=_eb-d70c44qutPmVg3H4YEWRhYkn8vzS3IH5Yx8r08U,36864 +win32/winxpgui.pyd,sha256=BJ9kh9_DiGO3L5JL2fuK63tOc33Mk4KHMGr4lgg23Ew,626176 +win32com/HTML/GeneratedSupport.html,sha256=kZ9Ocb7nmMiJu7oeXJmpIdkURovpTBN5WO9iebjT4sU,6023 +win32com/HTML/PythonCOM.html,sha256=nOFjOANTKZfr4sMFJRvDNlSeGTPWiR8iPRSNtnidVMg,8943 +win32com/HTML/QuickStartClientCom.html,sha256=DiflgPTFf6zP7us8EbMIkIlizL9BkqPhDvmBM7PTue4,7310 +win32com/HTML/QuickStartServerCom.html,sha256=Tkw5Q4HG9V6YMTbXiUbOqJoLLQUaUbAJRH3nwI-LoPQ,12940 +win32com/HTML/docindex.html,sha256=Scj_acL7n0w9Whkd7s3Xx8u0Iwt71pK34K83ypsUIDU,1295 +win32com/HTML/image/BTN_HomePage.gif,sha256=nTV7ZQiN6x1fFcWKt4jHj3WsIzjv04XjJrCbqRpSIBk,211 +win32com/HTML/image/BTN_ManualTop.gif,sha256=LwVu_ClkEDG1xhVBiCAy-OLi9-ZJ6BIINjAyi2R7jJ4,215 +win32com/HTML/image/BTN_NextPage.gif,sha256=hGnR6jZJERMUsnduVHP4AlntrkgehcFpDyfhI4xvj4k,218 +win32com/HTML/image/BTN_PrevPage.gif,sha256=wFJ1YHrsOEzBr3jDEOqBGKQmqWGBkADtnCPEMJHpm-U,216 +win32com/HTML/image/blank.gif,sha256=iJO_Up8XRXUyA8YYNoftgJlVONefdsXEFNfIuQxWFMs,864 +win32com/HTML/image/pycom_blowing.gif,sha256=s8ZSBzs8dfWsgTgbb0S43urQZcY1xjdxoIBuSHeLr6o,20926 +win32com/HTML/image/pythoncom.gif,sha256=pBINoAg9LpAFllAeRM5vHHgNcSUtWlAty7bYkjMnBho,5767 +win32com/HTML/image/www_icon.gif,sha256=TJw2huqtQFld280AhhQ39etm1ITsh4cg896hMi2Pr4c,275 +win32com/HTML/index.html,sha256=hL3O1peZWaQv9OSS5FFUVigqXmGd07e0y4YILZvIeXI,1629 +win32com/HTML/misc.html,sha256=HLInjzAaBT90JWKVnFr53OuINhMBgMsZ-lNukSgwbds,1164 +win32com/HTML/package.html,sha256=MtR3YxZRP2iB2dRYPSMjooX5UKdXSGT_WXqz3FxODxc,3253 +win32com/HTML/variant.html,sha256=gbKY45CQuRXg_Wg7qlu-vYCH8KUiZ5Mn2GDEYJogOBk,5874 +win32com/License.txt,sha256=0YrRjNvh0WhYlo721oPOGjR1IqK4wGIM3p0rL6xCkxQ,1548 +win32com/__init__.py,sha256=NyTNKfkl-o1ivAo6-Go2t-AdztQR2XhDMhmD5vWHqWM,4306 +win32com/__pycache__/__init__.cpython-36.pyc,, +win32com/__pycache__/olectl.cpython-36.pyc,, +win32com/__pycache__/storagecon.cpython-36.pyc,, +win32com/__pycache__/universal.cpython-36.pyc,, +win32com/__pycache__/util.cpython-36.pyc,, +win32com/client/CLSIDToClass.py,sha256=iFPiZ8GYRSQpMddfbORUPIFY0MPER9O_73gOLipmqug,1697 +win32com/client/__init__.py,sha256=c67i418ecalp6QTBZ1U0heU9tsIs1z_h3suK2I5Wauc,23531 +win32com/client/__pycache__/CLSIDToClass.cpython-36.pyc,, +win32com/client/__pycache__/__init__.cpython-36.pyc,, +win32com/client/__pycache__/build.cpython-36.pyc,, +win32com/client/__pycache__/combrowse.cpython-36.pyc,, +win32com/client/__pycache__/connect.cpython-36.pyc,, +win32com/client/__pycache__/dynamic.cpython-36.pyc,, +win32com/client/__pycache__/gencache.cpython-36.pyc,, +win32com/client/__pycache__/genpy.cpython-36.pyc,, +win32com/client/__pycache__/makepy.cpython-36.pyc,, +win32com/client/__pycache__/selecttlb.cpython-36.pyc,, +win32com/client/__pycache__/tlbrowse.cpython-36.pyc,, +win32com/client/__pycache__/util.cpython-36.pyc,, +win32com/client/build.py,sha256=QtU_MlHSws7GGt76419Fh1y8Pi1avJfkdazGITgJ6yY,23772 +win32com/client/combrowse.py,sha256=-OW1r1sHyHRHkHRW2FZY1tglSDqWXbUaSqcrB3DJtD8,20342 +win32com/client/connect.py,sha256=clM0ReY9jB5Fo3QwPp9eYdIAv5O837t4rewlD_QfDYM,1308 +win32com/client/dynamic.py,sha256=7zlQdG25JZ2SYpB187__UXIpr5nSf59YQB0YoyaDqRQ,22897 +win32com/client/gencache.py,sha256=HiwImtWZkRm7imPncEQONdsB-4zUbPgmxU2zSaqr0sk,23606 +win32com/client/genpy.py,sha256=va2obYosX1-PLeBmq44rJmAgTi-dKLetB8Rsg678pdQ,48037 +win32com/client/makepy.py,sha256=ClvqB7YLZerjlwgwbvFIn4nV_0ad7ZXQvOeEhcZ1TKE,12377 +win32com/client/selecttlb.py,sha256=kJfVDdB2UftQ9lMQ7Miznc3hYD5Sza3yZxz81gdUnIo,5057 +win32com/client/tlbrowse.py,sha256=KojgfE8ZAl8OcGalH7ZcobpYHPUVTbjNmFYVCpEx-Fo,7812 +win32com/client/util.py,sha256=7BUaCBL9ZeEZ4oimqqOBmEa62Lgnu2PTEZQL4h4vXfI,2962 +win32com/demos/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +win32com/demos/__pycache__/__init__.cpython-36.pyc,, +win32com/demos/__pycache__/connect.cpython-36.pyc,, +win32com/demos/__pycache__/dump_clipboard.cpython-36.pyc,, +win32com/demos/__pycache__/eventsApartmentThreaded.cpython-36.pyc,, +win32com/demos/__pycache__/eventsFreeThreaded.cpython-36.pyc,, +win32com/demos/__pycache__/excelAddin.cpython-36.pyc,, +win32com/demos/__pycache__/excelRTDServer.cpython-36.pyc,, +win32com/demos/__pycache__/iebutton.cpython-36.pyc,, +win32com/demos/__pycache__/ietoolbar.cpython-36.pyc,, +win32com/demos/__pycache__/outlookAddin.cpython-36.pyc,, +win32com/demos/__pycache__/trybag.cpython-36.pyc,, +win32com/demos/connect.py,sha256=FBwc8vX3TrUfZF6nSaoPNnS8vd5LOQRDLdysCvsJGiQ,3658 +win32com/demos/dump_clipboard.py,sha256=qQ1P9QD3JioYgDQIanJniXGl3x7F0fODqQ_7fa7jqL4,2884 +win32com/demos/eventsApartmentThreaded.py,sha256=mJCvz0DmhuKeSQ3j3lzwUuHgzv1SVVtDtwgHk6ggqcI,3752 +win32com/demos/eventsFreeThreaded.py,sha256=xdgUDRbea1PyDuwkAQjs8JZaLZ58q-0wUk-gEpfG1C4,3527 +win32com/demos/excelAddin.py,sha256=wT5bV5zTem33QJAcQFRmu6NmqiCGbf9d8LEV0NAAQRM,5943 +win32com/demos/excelRTDServer.py,sha256=M9O_7rqO7nLEDsoQMNYk5I8iz9ZrbgpRy3XTAVn9jA0,15526 +win32com/demos/iebutton.py,sha256=vxMLmChX1sIfrTacrG2PR_OcBix81h8N_WvJe-3eUhU,7055 +win32com/demos/ietoolbar.py,sha256=33beySwAwkUGV1K7zhBP9uRxyjT3asE6muY7eex6daM,10752 +win32com/demos/outlookAddin.py,sha256=irIhX0dsmOj4F0mqzmjxDuspzwscpLWEMIM7sSCDIoA,4628 +win32com/demos/trybag.py,sha256=ecwFEiVDDn8TEN55-dawj9Glgdigks1DPY7-5e3sPu0,1988 +win32com/include/PythonCOM.h,sha256=LAUUG2LRn1cLNGltLsywn2dyqpoKtl78NBhkw0zVYjI,29959 +win32com/include/PythonCOMRegister.h,sha256=Rye8T8o018cPzAiXp437lLiNggKWaNDdAw5dvYxlT_8,4264 +win32com/include/PythonCOMServer.h,sha256=NCjQ1HuvxNCAjSTDdUxWZG_F_vSxnT5kQfnUunTx6xQ,9042 +win32com/libs/axscript.lib,sha256=eBhev-do65LmzmJ3m35ehkLhseXnP-aQD-tt_QEVB4M,72478 +win32com/libs/pythoncom.lib,sha256=LFoXrPhRlRnknCECYLbCoOVqBJeGEGiJEGka8grkHOk,158768 +win32com/makegw/__init__.py,sha256=_YglApLhpR2XszXs9oBtOgxSaAqUH53iH_trnoLJdus,30 +win32com/makegw/__pycache__/__init__.cpython-36.pyc,, +win32com/makegw/__pycache__/makegw.cpython-36.pyc,, +win32com/makegw/__pycache__/makegwenum.cpython-36.pyc,, +win32com/makegw/__pycache__/makegwparse.cpython-36.pyc,, +win32com/makegw/makegw.py,sha256=dkAWdpOgVACuHngNOp0EnHoTdD3S6nwlImlRJ7eOvV8,17010 +win32com/makegw/makegwenum.py,sha256=bsUfrtOysBDztggCsb7TMfoGPvmLaXS0Z6GNwYvsxQE,9750 +win32com/makegw/makegwparse.py,sha256=V6NysctwVPOOs37CNU5au-7nJuS7WcnIcavR8IFvM5M,30573 +win32com/olectl.py,sha256=BpMTFV51dc7FFHbTPn-BGpSRTz1PmPh63ZxkE6OvKkY,2984 +win32com/readme.htm,sha256=nPlGTK0fMOgYzsKwuKvFnbIIqDwGOmGMT-BfAnmFlRk,3757 +win32com/server/__init__.py,sha256=ZRa-wmxR9S3nwIZJquWn3eiMT20gdoVB8eQlspJ04Jo,49 +win32com/server/__pycache__/__init__.cpython-36.pyc,, +win32com/server/__pycache__/connect.cpython-36.pyc,, +win32com/server/__pycache__/dispatcher.cpython-36.pyc,, +win32com/server/__pycache__/exception.cpython-36.pyc,, +win32com/server/__pycache__/factory.cpython-36.pyc,, +win32com/server/__pycache__/localserver.cpython-36.pyc,, +win32com/server/__pycache__/policy.cpython-36.pyc,, +win32com/server/__pycache__/register.cpython-36.pyc,, +win32com/server/__pycache__/util.cpython-36.pyc,, +win32com/server/connect.py,sha256=cSVJ47kMbcrLuYuPq3vnlZrRNjPcbxJ9x0yi-3P4sf4,2426 +win32com/server/dispatcher.py,sha256=4AJBqmrVRGahKL_JDDo-uZp-EzaXyjsQ7bZzSEfo9lQ,8993 +win32com/server/exception.py,sha256=DUErfBhN06PhPMKV7jsbqtE940bNoFHC4tGO-GPRcpQ,3155 +win32com/server/factory.py,sha256=uPypJdzoUQq7MeUsvekcgC9mxPDKZVa3Qq4iuxreaNE,766 +win32com/server/localserver.py,sha256=37FFEQuwnn5CTKsvjIoB5FEFZ76VNF0YEOjYzbGIlzg,1153 +win32com/server/policy.py,sha256=_WWcnuqvyC_l8jJHEUiPd7OuSAVQJsJQq_KhJK6D8zM,30629 +win32com/server/register.py,sha256=0SMFP5B8poIJ12iJBbS72xC0hyUboGGUq1sVm9gT-ow,23042 +win32com/server/util.py,sha256=Z1hSixNtWVUkm4nLm7pOd0gKfUxMJ8hTO81tLmvjKdA,6364 +win32com/servers/PythonTools.py,sha256=hcrvQaD6wPFxqXzY8wYg6cYIIHP5DxQnB88-xWq4nAo,1086 +win32com/servers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +win32com/servers/__pycache__/PythonTools.cpython-36.pyc,, +win32com/servers/__pycache__/__init__.cpython-36.pyc,, +win32com/servers/__pycache__/dictionary.cpython-36.pyc,, +win32com/servers/__pycache__/interp.cpython-36.pyc,, +win32com/servers/__pycache__/perfmon.cpython-36.pyc,, +win32com/servers/__pycache__/test_pycomtest.cpython-36.pyc,, +win32com/servers/dictionary.py,sha256=0Rg3Cdnm-8tFzEnqF9XBbfTjprpoOos6blpWZKIE-OQ,4061 +win32com/servers/interp.py,sha256=zHa0A0WMh4cg-hPlvPmYhDkwzs6fEZVm-V8Ofo87gic,1679 +win32com/servers/perfmon.py,sha256=VCvKKAHdbKTWJnZBl-OJcT4TkpiXR4o0-_t2E63Ta1k,1050 +win32com/servers/test_pycomtest.py,sha256=kOHNbTBpfF-yECdQbHSbMollU5YPuY3BltGsZLQ_Msk,4627 +win32com/storagecon.py,sha256=9DEUdyJkrKhrlDbfhzW4AMguRv91FTu0LKz8kn5g6k8,3082 +win32com/test/GenTestScripts.py,sha256=5IljUWBphbo_unDtKeKn2aMWkC4GX1e391PlWkDIkUk,2533 +win32com/test/Testpys.sct,sha256=2Kt_ngKxBqmxcBsBpph4DReQPT1TitBLMgPaC7gACvU,1058 +win32com/test/__init__.py,sha256=b62TU9a3IDJpL-4jCbL_cO1Sa2iqfw8Q4hMehSwegwE,43 +win32com/test/__pycache__/GenTestScripts.cpython-36.pyc,, +win32com/test/__pycache__/__init__.cpython-36.pyc,, +win32com/test/__pycache__/daodump.cpython-36.pyc,, +win32com/test/__pycache__/errorSemantics.cpython-36.pyc,, +win32com/test/__pycache__/pippo_server.cpython-36.pyc,, +win32com/test/__pycache__/policySemantics.cpython-36.pyc,, +win32com/test/__pycache__/testADOEvents.cpython-36.pyc,, +win32com/test/__pycache__/testAXScript.cpython-36.pyc,, +win32com/test/__pycache__/testAccess.cpython-36.pyc,, +win32com/test/__pycache__/testArrays.cpython-36.pyc,, +win32com/test/__pycache__/testClipboard.cpython-36.pyc,, +win32com/test/__pycache__/testCollections.cpython-36.pyc,, +win32com/test/__pycache__/testConversionErrors.cpython-36.pyc,, +win32com/test/__pycache__/testDCOM.cpython-36.pyc,, +win32com/test/__pycache__/testDates.cpython-36.pyc,, +win32com/test/__pycache__/testDictionary.cpython-36.pyc,, +win32com/test/__pycache__/testDynamic.cpython-36.pyc,, +win32com/test/__pycache__/testExchange.cpython-36.pyc,, +win32com/test/__pycache__/testExplorer.cpython-36.pyc,, +win32com/test/__pycache__/testGIT.cpython-36.pyc,, +win32com/test/__pycache__/testGatewayAddresses.cpython-36.pyc,, +win32com/test/__pycache__/testIterators.cpython-36.pyc,, +win32com/test/__pycache__/testMSOffice.cpython-36.pyc,, +win32com/test/__pycache__/testMSOfficeEvents.cpython-36.pyc,, +win32com/test/__pycache__/testMarshal.cpython-36.pyc,, +win32com/test/__pycache__/testNetscape.cpython-36.pyc,, +win32com/test/__pycache__/testPersist.cpython-36.pyc,, +win32com/test/__pycache__/testPippo.cpython-36.pyc,, +win32com/test/__pycache__/testPyComTest.cpython-36.pyc,, +win32com/test/__pycache__/testROT.cpython-36.pyc,, +win32com/test/__pycache__/testServers.cpython-36.pyc,, +win32com/test/__pycache__/testShell.cpython-36.pyc,, +win32com/test/__pycache__/testStorage.cpython-36.pyc,, +win32com/test/__pycache__/testStreams.cpython-36.pyc,, +win32com/test/__pycache__/testWMI.cpython-36.pyc,, +win32com/test/__pycache__/testall.cpython-36.pyc,, +win32com/test/__pycache__/testmakepy.cpython-36.pyc,, +win32com/test/__pycache__/testvb.cpython-36.pyc,, +win32com/test/__pycache__/testvbscript_regexp.cpython-36.pyc,, +win32com/test/__pycache__/testxslt.cpython-36.pyc,, +win32com/test/__pycache__/util.cpython-36.pyc,, +win32com/test/daodump.py,sha256=PUBRtL67RS-xlGGfcWM3HsOS3LLZelQIDJVrOk-PPLU,2040 +win32com/test/errorSemantics.py,sha256=la0srGc7dLwmH4Fr3nEO3JDMWdfRwmxY-qbi7sN9D1M,5838 +win32com/test/pippo.idl,sha256=IKfyeUEzhBcbFsLdKC0fN7B1Kf8Y2yFwe30O6m5niNU,1852 +win32com/test/pippo_server.py,sha256=Gc-yBVNDJw65xKXiLvX1NK4ivvOfxfNJl8gVjNTmp8Y,2710 +win32com/test/policySemantics.py,sha256=JyUPXlr_YF-VW84dp8veDXiAiSWxE0p5ozvwUkAk5iE,3030 +win32com/test/readme.txt,sha256=SIaK0lTI8gVwn5r2MmbM1CNaOIk5PfW3g2CYgrSntDE,718 +win32com/test/testADOEvents.py,sha256=9sK40XFW-mEQ1TyFCdRUGWF3h4F_xC5O0Xxq6ABvcaY,2677 +win32com/test/testAXScript.py,sha256=_71BKtytL5s4t2-X7eCJf-TPZ3zAS6_rj5D_0MLYWWA,1268 +win32com/test/testAccess.py,sha256=JtMc3o5sGBBE2TpLUZYTELWIfwQfnvQCWsvyL8TZHXY,5688 +win32com/test/testArrays.py,sha256=D-GfFoWxdxnppAvt5T6KP4zIpXUiZu5rDVeYR758CYk,2505 +win32com/test/testClipboard.py,sha256=51qg-a87T_A449HCT5wOPPYqC2aCmTBjcXWQahX38TU,5924 +win32com/test/testCollections.py,sha256=cOt-4Wq11FtuIzJSOuB3NVZcAGsQ81szQM5lHoFxAjs,4239 +win32com/test/testConversionErrors.py,sha256=QNU1nVwClHnsdEGfJnPtZE5-IfNDpmkrHj44rvqzDFA,790 +win32com/test/testDCOM.py,sha256=t9NNWOx8LMNm2kiKLIzpSs-iK0fnbeChCMV2YQQZOsc,1665 +win32com/test/testDates.py,sha256=9uoj0IxEexm0qIQRKFn5MbL9nwOGII14Vwdl4U5GSvU,1392 +win32com/test/testDictionary.py,sha256=zbp9zqnIsOCsgyPJxsAkCReIY112t300acDLtXpxV5w,2858 +win32com/test/testDictionary.vbs,sha256=XuTmRxUQHvq7BMCF77shUTuECD23XaOC9tVVUOGIbd0,562 +win32com/test/testDynamic.py,sha256=ttoyIZA-HRxCe9qG5KiULWKa6B-9tF8nPXRjQEEBKkk,2657 +win32com/test/testExchange.py,sha256=T4pYuotQY_ulhGCu1X_NO7mKjK0xF5pfdYYNd0wcQps,3278 +win32com/test/testExplorer.py,sha256=Ffs-oUmHHk0fWX5eIyosb8xwYeME0kcRDRXWG9XLzeo,4635 +win32com/test/testGIT.py,sha256=IrdxRBJnleX_x4prSghgT-XIAEqLAsxLEKBisORy0NA,4581 +win32com/test/testGatewayAddresses.py,sha256=PIH_72pgo3rwIanA0mDpUVIx6w9tm9M0h8C1fM3EFiM,5146 +win32com/test/testInterp.vbs,sha256=5jsqgEH2g0kug8H9rDoMlOP2yynP-1T52X1OsGqaTgo,258 +win32com/test/testIterators.py,sha256=KRhXc22MJnbMEqXMLL_BKq7FdjmPida6Zua1mibpMMY,4609 +win32com/test/testMSOffice.py,sha256=eZJuocHVn0lUHhyvw1BGJskThJRq5mUnjCx2gmnDk7o,5909 +win32com/test/testMSOfficeEvents.py,sha256=KKRKT6AHx3ZVu1gtoEZofUb9awnIg7wdLeqbrb1gl4g,3862 +win32com/test/testMarshal.py,sha256=ekNC2_V7XVgJYyCm6xYeoNsEl3bHbL5Z-rqfbHIsplg,5781 +win32com/test/testNetscape.py,sha256=z1r3BgAbbNLfXmnywThIsHc2fJNWmLfRDhRPsHBJuFM,638 +win32com/test/testPersist.py,sha256=Uiiq03L2W34nsk-xkJ3Nd2uXAJUbQj6KrG2VCtYVluk,6284 +win32com/test/testPippo.py,sha256=v1GHfnt-PO37ShwD2lvScTFi9M2Pjg81hctVYlNOovI,2682 +win32com/test/testPyComTest.py,sha256=ahGcKJvMmlF8MdV5W9S5YasMvmkOnARGYg6hQVzxrYg,28815 +win32com/test/testPyScriptlet.js,sha256=DKTmlgw3MitKHXu9XCRl0_dJvROhj3FgmFWtgluKVz8,1087 +win32com/test/testROT.py,sha256=SixSNngN-E3X9qvWP0-uh2liof9Oa7T2Y074ac_XS3o,756 +win32com/test/testServers.py,sha256=Dbc-zFZmqHsD7_P2jXfzphHE_-_irGMVjtUK8T7eaT4,1365 +win32com/test/testShell.py,sha256=VQilOW0eZ1cfGQE26dFg84pDmrf0fo25hsxomxLHzwI,9656 +win32com/test/testStorage.py,sha256=qcwc1y0gkAusna5s5K76ILqDdfUmiszBJzO1A3Fts5E,3345 +win32com/test/testStreams.py,sha256=LSeTmZeQGVbTEPDBBXU_p7vV0Uv8mN7ODSS_UY0VJW8,4390 +win32com/test/testWMI.py,sha256=TSREx_Eg8x4JCa-XvYTAIER3UH_cHzcNPPfOUly5N88,458 +win32com/test/testall.py,sha256=FqTApylwHRm9MmpsNSWHMGCeysNkU5g7oiU5F2-QpJ0,8877 +win32com/test/testmakepy.py,sha256=n4Kkg014uSqPqjsOG9DvD1Rjosu96ABg_ZHaYOAkbfA,1931 +win32com/test/testvb.py,sha256=vUAG8MyU1bPjhNB3ZnoRm1cQd-2rO2m2Zg08GYWqTA4,20293 +win32com/test/testvbscript_regexp.py,sha256=Y6OfEfJJTg-Ac4aOIBdCARu7lvt5Exj8hNlIMBLhY8w,1108 +win32com/test/testxslt.js,sha256=dfBCany-kdVVYXD5Ommgf7Fld5DOVhWq7olM4Xo57xE,569 +win32com/test/testxslt.py,sha256=we85iJ_P8pNauihSuT77UjpCgaFjU5NVBUTcSMtK9mU,890 +win32com/test/testxslt.xsl,sha256=6-rByRIkFOQ1V68Hn0NBpzp6Ww5ch6uFvYJDC9jLjYE,2058 +win32com/test/util.py,sha256=huokTeMyMn1XmHzFVnz_mu3aYlbOb6K194ZNsXYtT9w,8123 +win32com/universal.py,sha256=yJt_byFv7eRd86wXYDRtNkOFRUcviBNRnc0v9KmEj5g,8237 +win32com/util.py,sha256=s4gFjuG0qmbBDSx4-AT2bQMyB-dAJS1ZFyEcySLnrFM,884 +win32comext/adsi/__init__.py,sha256=GZ0HFRrZXwiNamK2QQgt5--DFjEUYNBhjoZpBSNWCz4,3323 +win32comext/adsi/__pycache__/__init__.cpython-36.pyc,, +win32comext/adsi/__pycache__/adsicon.cpython-36.pyc,, +win32comext/adsi/adsi.pyd,sha256=Yo_wdavIoQNnPQily7Hw0ZcI8yi5vRpO7EZskMnpaoU,92672 +win32comext/adsi/adsicon.py,sha256=wRMSxis_XHxnIDx8u7YvK55NZLBOyYpTM6zA65n7E4A,12541 +win32comext/adsi/demos/__pycache__/objectPicker.cpython-36.pyc,, +win32comext/adsi/demos/__pycache__/scp.cpython-36.pyc,, +win32comext/adsi/demos/__pycache__/search.cpython-36.pyc,, +win32comext/adsi/demos/__pycache__/test.cpython-36.pyc,, +win32comext/adsi/demos/objectPicker.py,sha256=FJfgewwEhIAz9u3CVO62fJt_Kp_C9jdlkGpolcx9l1g,1987 +win32comext/adsi/demos/scp.py,sha256=ZnHzdFTWtZ-JzqqclRkrri34eej3DqzQaZai7mbypMA,19531 +win32comext/adsi/demos/search.py,sha256=SKViIT0wIsZ1PRqTjEYM9sk-zLNaTLfwVpnH3uZeTlo,4265 +win32comext/adsi/demos/test.py,sha256=LfkqV0v4qq7fpV-m9dY7G9m7oZmxa_JCm8-VitiW-SI,7276 +win32comext/authorization/__init__.py,sha256=i0-OSRbsfKDEXd3rDOejksMSOaZfMi4J9suZLeTYVw0,192 +win32comext/authorization/__pycache__/__init__.cpython-36.pyc,, +win32comext/authorization/authorization.pyd,sha256=e5x60OgeOwrtnqMBWaUTS0carWyxzw1PMe5fiB6gHes,27648 +win32comext/authorization/demos/EditSecurity.py,sha256=Yuc_zho6Gb_hS0SHzxyIL3q0BZ8kDimLQsLQGquLyNs,7800 +win32comext/authorization/demos/EditServiceSecurity.py,sha256=xsCz2QHqjqbTMIsjn8z8z4aEryjY6Ysx1ggoJDdqFTc,7465 +win32comext/authorization/demos/__pycache__/EditSecurity.cpython-36.pyc,, +win32comext/authorization/demos/__pycache__/EditServiceSecurity.cpython-36.pyc,, +win32comext/axcontrol/__init__.py,sha256=_TWahnZLICQjn0v7zlMUZYJyHzOq_Yjnui4TfbWO16o,135 +win32comext/axcontrol/__pycache__/__init__.cpython-36.pyc,, +win32comext/axcontrol/axcontrol.pyd,sha256=cOMP2JYZhNsu1ZXUr2MBdflpYFALuOq9d_UxH-0e3Ck,141312 +win32comext/axdebug/__init__.py,sha256=MQmkALGNRmHzBRXzm0F0uPsbs3_8Rb_4IwD4syfqOc4,134 +win32comext/axdebug/__pycache__/__init__.cpython-36.pyc,, +win32comext/axdebug/__pycache__/adb.cpython-36.pyc,, +win32comext/axdebug/__pycache__/codecontainer.cpython-36.pyc,, +win32comext/axdebug/__pycache__/contexts.cpython-36.pyc,, +win32comext/axdebug/__pycache__/debugger.cpython-36.pyc,, +win32comext/axdebug/__pycache__/documents.cpython-36.pyc,, +win32comext/axdebug/__pycache__/dump.cpython-36.pyc,, +win32comext/axdebug/__pycache__/expressions.cpython-36.pyc,, +win32comext/axdebug/__pycache__/gateways.cpython-36.pyc,, +win32comext/axdebug/__pycache__/stackframe.cpython-36.pyc,, +win32comext/axdebug/__pycache__/util.cpython-36.pyc,, +win32comext/axdebug/adb.py,sha256=mqGb5Hysj6zg99lMVGlg_X98O0LPVdSfFXR0kFaXvHs,17305 +win32comext/axdebug/axdebug.pyd,sha256=9uvaRKhbTX35iXN8mR1pdQDLh3gKNPepWI8xkHMJL90,287232 +win32comext/axdebug/codecontainer.py,sha256=xmJWpBPHoQQ4xh90CE6FjBzU3f_R7iq04d8LK4V99ok,8899 +win32comext/axdebug/contexts.py,sha256=22q2znB7l5uOs4PWv-iwfjFI4MvBcL8W8MQzvXmlGQ4,2164 +win32comext/axdebug/debugger.py,sha256=n_QZA0TIQy5o-6H2AavXMYwAFdEQBNWM2Dpz-Q-Wt64,6941 +win32comext/axdebug/documents.py,sha256=tlGdayNTmmJheKZjYJ6eSAVje94yh1jjtpfq9WA2tGI,4426 +win32comext/axdebug/dump.py,sha256=cH8HR1hJOaHt2vMv0cK1xg7D_-A3OgJI-WRMTYo7Vyg,1814 +win32comext/axdebug/expressions.py,sha256=pNmfsnwF2slv2kzB6Co4rV3sqyEfhGS02T4DogJVlNo,6199 +win32comext/axdebug/gateways.py,sha256=VqC6sVaepe4-6sHTd_tNqYeT9w87EwXXJEtVZqJAgYU,17122 +win32comext/axdebug/stackframe.py,sha256=Ka7NbYzKEdviZS_4e9SMSo0yUrvVqiqKFG1Xykrgo_Y,5818 +win32comext/axdebug/util.py,sha256=PCZFUAkLAfJrtxQ_KiGvEnc9urP8LVp6xhVxQ82dtd0,3672 +win32comext/axscript/Demos/client/asp/CreateObject.asp,sha256=Ab_XMgQqnGJSQPVJfC6t83_DnCbxf-i9oVEOgaL_gRM,494 +win32comext/axscript/Demos/client/asp/caps.asp,sha256=sGOLQB66zAKIYBuNzZ38BUkrgTAEcVPis9qqianVhOw,1315 +win32comext/axscript/Demos/client/asp/interrupt/test.asp,sha256=jJhHUg1VrUIFW79iITqxwq_g36QD4PCvmMNR3YWqjC4,73 +win32comext/axscript/Demos/client/asp/interrupt/test.html,sha256=JfBFiUkNMFyjI54LlmyWIjupDUtP4oyQqZyjZJlAHwc,156 +win32comext/axscript/Demos/client/asp/interrupt/test1.asp,sha256=q17ZW3dvKd0EAM2ChxFJnMEMfmkx6iD4OF_FM_0Yyfs,88 +win32comext/axscript/Demos/client/asp/interrupt/test1.html,sha256=LpebQ74Io9BaT9ECVTXLGmL9jBNUPeNJo6sjTXmnPxU,157 +win32comext/axscript/Demos/client/asp/tut1.asp,sha256=cPwYTeDSenN7vH-AkgGCQjdUMIpGOXJ0bYQC_TI-wTw,147 +win32comext/axscript/Demos/client/ie/MarqueeText1.htm,sha256=grlec9nDkqmnPBGnC-FIoxqwMZwKHGcakrXMriFE3N0,703 +win32comext/axscript/Demos/client/ie/calc.htm,sha256=oUwZ6saxEbER7tLJIf-6TGVoMzo3CeR8vnzA-Qd15Cg,4041 +win32comext/axscript/Demos/client/ie/dbgtest.htm,sha256=Cslod2hX_NdEg2_5qgJGbyldLBRA3_OwJAcpjwl1BUY,190 +win32comext/axscript/Demos/client/ie/demo.htm,sha256=v6gMz5uO9i3OMoWVOsK4YVe3uQnuEz0oLUBHx8QB20M,445 +win32comext/axscript/Demos/client/ie/demo_check.htm,sha256=3MGr54eyO4lN2o-8rLEuKSGuIs2AfZvV-fdbrFX6di4,1503 +win32comext/axscript/Demos/client/ie/demo_intro.htm,sha256=OVe1mY0N5OHuva2rAzLnbw8ivCQkugVMJU0xp39KvuI,1575 +win32comext/axscript/Demos/client/ie/demo_menu.htm,sha256=BLseA1TBxQqER5gdMXfTRKOO3sTPSIeyJDYxOlHD_ls,499 +win32comext/axscript/Demos/client/ie/docwrite.htm,sha256=OxMkqVDlqpK6kemfjIoPMu8CRFFvQV5-l0ac5CGLQpI,467 +win32comext/axscript/Demos/client/ie/foo2.htm,sha256=Z5KqLujmjhUnA_IhWWJkiil980cToEMdayeMfaZpqSg,3486 +win32comext/axscript/Demos/client/ie/form.htm,sha256=eNKi9irPf1Ig5qBJgZlVsf-GsOTXRI3JhNBN5GZGWsY,481 +win32comext/axscript/Demos/client/ie/marqueeDemo.htm,sha256=Q0HTmD73TgPrJ0Q8rcCkCxQ6vlntToHdKgK-dcHm78w,1167 +win32comext/axscript/Demos/client/ie/mousetrack.htm,sha256=FQFHkST8vtlHSlXduuSWS5eRvzyCjYnVI5mSARYPJHg,2219 +win32comext/axscript/Demos/client/ie/pycom_blowing.gif,sha256=s8ZSBzs8dfWsgTgbb0S43urQZcY1xjdxoIBuSHeLr6o,20926 +win32comext/axscript/Demos/client/wsh/blank.pys,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +win32comext/axscript/Demos/client/wsh/excel.pys,sha256=PtEytbnQb8JCVzrjLEzf6wl3S-yK1PewsfehMxc1TxQ,1049 +win32comext/axscript/Demos/client/wsh/registry.pys,sha256=NoKLD7xmsT_smd4cG4mT4kkXniaNVTKKJFf8hNUU6_k,1625 +win32comext/axscript/Demos/client/wsh/test.pys,sha256=bQo-cS0Tbay4hq7zS16Gbw7tP2jBWqVpu4gniTmpbcA,371 +win32comext/axscript/__init__.py,sha256=_TWahnZLICQjn0v7zlMUZYJyHzOq_Yjnui4TfbWO16o,135 +win32comext/axscript/__pycache__/__init__.cpython-36.pyc,, +win32comext/axscript/__pycache__/asputil.cpython-36.pyc,, +win32comext/axscript/asputil.py,sha256=vdf0yvpAJvOjEgDMBXxdITO30id57-iui4pBZrkq7VE,227 +win32comext/axscript/axscript.pyd,sha256=kTdeuz1yGAo4VPrIiob_DGO_dJwx5nKl-dLqJnm-bQE,93696 +win32comext/axscript/client/__init__.py,sha256=OGhvFgDvBsfP6lv7fbLJUtj7mgJmSytYdSjASiwDfBs,27 +win32comext/axscript/client/__pycache__/__init__.cpython-36.pyc,, +win32comext/axscript/client/__pycache__/debug.cpython-36.pyc,, +win32comext/axscript/client/__pycache__/error.cpython-36.pyc,, +win32comext/axscript/client/__pycache__/framework.cpython-36.pyc,, +win32comext/axscript/client/__pycache__/pydumper.cpython-36.pyc,, +win32comext/axscript/client/__pycache__/pyscript.cpython-36.pyc,, +win32comext/axscript/client/__pycache__/pyscript_rexec.cpython-36.pyc,, +win32comext/axscript/client/__pycache__/scriptdispatch.cpython-36.pyc,, +win32comext/axscript/client/debug.py,sha256=NPn8NKyiFY5VXQ0nc5xI7gIPnSK7IPrXowc05Vw2B6w,7334 +win32comext/axscript/client/error.py,sha256=6-zcjy4C_0Lvt1zJjb0AyXSgK1SRPUOyZHB_KavuU9w,7981 +win32comext/axscript/client/framework.py,sha256=ijFNJnWuDpnT03bLLHgJH4rDic75hvnPP2VXY8k8o0Y,36789 +win32comext/axscript/client/pydumper.py,sha256=Od26b35MoL9BzlZGJr3lKAdz7flGP1inDrMabbZ8cVg,2134 +win32comext/axscript/client/pyscript.py,sha256=TrN12qqAWPZAYNvhTXgUjWCSU-GgsoKsqxJ-Bnj50aA,12746 +win32comext/axscript/client/pyscript_rexec.py,sha256=EkH1RFWWqpF1RUtOep_52Wh5zK1Zy-XiBtjceOdvcH8,1986 +win32comext/axscript/client/scriptdispatch.py,sha256=CUYpzAfoN_wMz9tYYFxlVliyD4ljjDswylWhb7KOVJI,3304 +win32comext/axscript/server/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +win32comext/axscript/server/__pycache__/__init__.cpython-36.pyc,, +win32comext/axscript/server/__pycache__/axsite.cpython-36.pyc,, +win32comext/axscript/server/__pycache__/error.cpython-36.pyc,, +win32comext/axscript/server/axsite.py,sha256=IEqcEbawOAVTdAiADsK34kmAaoQyCXcerjjEyl511hA,3812 +win32comext/axscript/server/error.py,sha256=6Rmmk3QNnwYwWibrGOe4T43W4Ynorj1U1HeOmSXJB9E,470 +win32comext/axscript/test/__pycache__/leakTest.cpython-36.pyc,, +win32comext/axscript/test/__pycache__/testHost.cpython-36.pyc,, +win32comext/axscript/test/__pycache__/testHost4Dbg.cpython-36.pyc,, +win32comext/axscript/test/debugTest.pys,sha256=kwYIB_79cQeue5_4PnNp80vgzX5teuMxeuOAvPtcbe4,202 +win32comext/axscript/test/debugTest.vbs,sha256=O3a6y4VzTUdZ2BufrDibMeyc7lfquKNfNPTwPhe8t2s,84 +win32comext/axscript/test/leakTest.py,sha256=kZDgePTY1yoV_lX2m4wnLX-vD6EksD8yQZBQc7vbSgE,4532 +win32comext/axscript/test/test.html,sha256=0e0MYBAsr7ZzEpefq96fxmZsyTqQo2XtD7vyKQt2MEw,1414 +win32comext/axscript/test/testHost.py,sha256=FO5yS5rj-PYGFy7NySra9ANI92gH5Mumfy3aZA4FweM,7524 +win32comext/axscript/test/testHost4Dbg.py,sha256=tOownfg5KOS4L4NMToO16dMScdx5A9ClhP8oIr4jf5M,2552 +win32comext/bits/__init__.py,sha256=i0-OSRbsfKDEXd3rDOejksMSOaZfMi4J9suZLeTYVw0,192 +win32comext/bits/__pycache__/__init__.cpython-36.pyc,, +win32comext/bits/bits.pyd,sha256=Ysxu5PmTWOa7k6Yd_kL9I0bahT9lKwlrDeV45m9wIfc,60928 +win32comext/bits/test/__pycache__/show_all_jobs.cpython-36.pyc,, +win32comext/bits/test/__pycache__/test_bits.cpython-36.pyc,, +win32comext/bits/test/show_all_jobs.py,sha256=HDAABMiRZe-KLrTMrV3I3W_YyPyK5EfibVMOEK1717c,1652 +win32comext/bits/test/test_bits.py,sha256=p3YCr2kcORL6d54LymDU9ssTEFW1NWA1ZlyqBmcf2fY,3994 +win32comext/directsound/__init__.py,sha256=MQmkALGNRmHzBRXzm0F0uPsbs3_8Rb_4IwD4syfqOc4,134 +win32comext/directsound/__pycache__/__init__.cpython-36.pyc,, +win32comext/directsound/directsound.pyd,sha256=rJI8l6EyWBMCclcCw564PSm0tRmj5obv7tj8AXuUM4Q,77312 +win32comext/directsound/test/__init__.py,sha256=zXbiaII25PKM2hnu9yT8i7I7_p6gQfPMbt-euxHVn_I,65 +win32comext/directsound/test/__pycache__/__init__.cpython-36.pyc,, +win32comext/directsound/test/__pycache__/ds_record.cpython-36.pyc,, +win32comext/directsound/test/__pycache__/ds_test.cpython-36.pyc,, +win32comext/directsound/test/ds_record.py,sha256=NVgg-DmrZYfYmFnya-4emZ47sdaxmB-OVTMEfEwxl8E,1365 +win32comext/directsound/test/ds_test.py,sha256=YjCQHhddVyCxp99aN1iCOGgtKvqRc1xCO9XcbUfbfWc,12983 +win32comext/ifilter/__init__.py,sha256=PyA_K1FxiknS69_9xz7fAitw2hj27TXsGE27GzxF2VE,40 +win32comext/ifilter/__pycache__/__init__.cpython-36.pyc,, +win32comext/ifilter/__pycache__/ifiltercon.cpython-36.pyc,, +win32comext/ifilter/demo/__pycache__/filterDemo.cpython-36.pyc,, +win32comext/ifilter/demo/filterDemo.py,sha256=kMI4CNMah8pnvJTPIogMGfEdclQpYd6cS4iU2FUoVa0,11058 +win32comext/ifilter/ifilter.pyd,sha256=vHcx4yFm2KhcFIQcfawkVVtLjZc2T590xrNJRePiz0k,29184 +win32comext/ifilter/ifiltercon.py,sha256=F_d-wX75XBiulHxOF55VkZ2W8qLNekR1va_EIdpE5Vo,3260 +win32comext/internet/__init__.py,sha256=_TWahnZLICQjn0v7zlMUZYJyHzOq_Yjnui4TfbWO16o,135 +win32comext/internet/__pycache__/__init__.cpython-36.pyc,, +win32comext/internet/__pycache__/inetcon.cpython-36.pyc,, +win32comext/internet/inetcon.py,sha256=B_E7PvfsBKMfbe32aIjstC75NJQFEU1Hzf2o0NYIopo,11564 +win32comext/internet/internet.pyd,sha256=HeezDlnmAjd7XPmcq-YvTADQnm-Xisgq3xBOMboCtO4,90112 +win32comext/mapi/__init__.py,sha256=y653h5nzLZOEMs98wmtpTAYNH7xf031zGhpWZZrvYl0,505 +win32comext/mapi/__pycache__/__init__.cpython-36.pyc,, +win32comext/mapi/__pycache__/emsabtags.cpython-36.pyc,, +win32comext/mapi/__pycache__/mapitags.cpython-36.pyc,, +win32comext/mapi/__pycache__/mapiutil.cpython-36.pyc,, +win32comext/mapi/demos/__pycache__/mapisend.cpython-36.pyc,, +win32comext/mapi/demos/mapisend.py,sha256=q3HtvmSWpO8-nl15z1DT-PG85mtvK2axVhE4Heo8bI4,3453 +win32comext/mapi/emsabtags.py,sha256=9VbKktxqSDPr1B9EkPPUloWpn2CECEWx0TNOc4MUQJk,61422 +win32comext/mapi/exchange.pyd,sha256=_FqiNECLkbBKLpUND9rw9evIANMKk-fsGnOVgUgPqSk,80896 +win32comext/mapi/mapi.pyd,sha256=0RFZCtzAXVznUFjQoA3cuqtbUQg37KLmpO9rKQSLdmA,182272 +win32comext/mapi/mapitags.py,sha256=bFp1QvcSR1Gk38o_N5rcmnZXbjnS6ydcMD_1JY3dtZk,70790 +win32comext/mapi/mapiutil.py,sha256=uW1VW-KEu_duy56D-MDyBzm-1bb0Y-TTcmTlJngnoFs,6032 +win32comext/propsys/__init__.py,sha256=zAJTnC6qYGfhRKfJOR8-Wyrp7NspOnaeoY2FHnG4pDY,27 +win32comext/propsys/__pycache__/__init__.cpython-36.pyc,, +win32comext/propsys/__pycache__/pscon.cpython-36.pyc,, +win32comext/propsys/propsys.pyd,sha256=Uq0GVUr8ou4QluOOnyfGsvFqshoqMdtGhVx2_sd4gMY,121344 +win32comext/propsys/pscon.py,sha256=ogW-9LL03Sj0-zQtY06dPifEO8hb1vXMthoO-1R6r2Q,48201 +win32comext/propsys/test/__pycache__/testpropsys.cpython-36.pyc,, +win32comext/propsys/test/testpropsys.py,sha256=wcBoZSk4zmhK6IIJXdw9VmlPB1Atbur7OswpLghWtX0,204 +win32comext/shell/__init__.py,sha256=_TWahnZLICQjn0v7zlMUZYJyHzOq_Yjnui4TfbWO16o,135 +win32comext/shell/__pycache__/__init__.cpython-36.pyc,, +win32comext/shell/__pycache__/shellcon.cpython-36.pyc,, +win32comext/shell/demos/IActiveDesktop.py,sha256=lNVDoqiJ0TZIU4ooGFNHavLCPYf5bJjj6q6iFg2Ywr8,1892 +win32comext/shell/demos/IFileOperationProgressSink.py,sha256=EGhTU_RyeLOYcxn-TwvRfbCrGvBrfSz3rzUHnN3bLJg,4034 +win32comext/shell/demos/IShellLinkDataList.py,sha256=TOyaeSXUPAx6dVNaaiSMtOM4BZZtVgvOWv29Ro9xrZA,1666 +win32comext/shell/demos/ITransferAdviseSink.py,sha256=LGcOkRsBup07r-GLn6AHvwP2zUmnyGSHEPLZpvhSs4Y,2377 +win32comext/shell/demos/IUniformResourceLocator.py,sha256=e4jLksBQSJvBQocpKPlXoGwJP6NGHfxk1CZ4oyoU_gA,1521 +win32comext/shell/demos/__pycache__/IActiveDesktop.cpython-36.pyc,, +win32comext/shell/demos/__pycache__/IFileOperationProgressSink.cpython-36.pyc,, +win32comext/shell/demos/__pycache__/IShellLinkDataList.cpython-36.pyc,, +win32comext/shell/demos/__pycache__/ITransferAdviseSink.cpython-36.pyc,, +win32comext/shell/demos/__pycache__/IUniformResourceLocator.cpython-36.pyc,, +win32comext/shell/demos/__pycache__/browse_for_folder.cpython-36.pyc,, +win32comext/shell/demos/__pycache__/create_link.cpython-36.pyc,, +win32comext/shell/demos/__pycache__/dump_link.cpython-36.pyc,, +win32comext/shell/demos/__pycache__/explorer_browser.cpython-36.pyc,, +win32comext/shell/demos/__pycache__/shellexecuteex.cpython-36.pyc,, +win32comext/shell/demos/__pycache__/viewstate.cpython-36.pyc,, +win32comext/shell/demos/__pycache__/walk_shell_folders.cpython-36.pyc,, +win32comext/shell/demos/browse_for_folder.py,sha256=WbGUElZqyLmmaq6m42cA1VP5kdiRyzAxpnYmmRpIGwQ,1661 +win32comext/shell/demos/create_link.py,sha256=JB-RGhO7oZ-SnGSLAdQNOMg5gSXNtQ7meoT_VxQsKKg,1882 +win32comext/shell/demos/dump_link.py,sha256=XMfYHfP51010RRGt6Znmu1depXGghv1xF-leP0bp7gE,1436 +win32comext/shell/demos/explorer_browser.py,sha256=--7M6nYeB6uTWFUXySNG_Z3OrGmu7Vbez_y_lIHVQkI,4758 +win32comext/shell/demos/servers/__pycache__/column_provider.cpython-36.pyc,, +win32comext/shell/demos/servers/__pycache__/context_menu.cpython-36.pyc,, +win32comext/shell/demos/servers/__pycache__/copy_hook.cpython-36.pyc,, +win32comext/shell/demos/servers/__pycache__/empty_volume_cache.cpython-36.pyc,, +win32comext/shell/demos/servers/__pycache__/folder_view.cpython-36.pyc,, +win32comext/shell/demos/servers/__pycache__/icon_handler.cpython-36.pyc,, +win32comext/shell/demos/servers/__pycache__/shell_view.cpython-36.pyc,, +win32comext/shell/demos/servers/column_provider.py,sha256=BROCcRROfCBm-7zbGE1IEHDkemG1IiuysvBhHlTISMs,3883 +win32comext/shell/demos/servers/context_menu.py,sha256=SSPjVO6V6RS87E1LPODHn9joSbtbxQR4XNH_hZJnQbk,4442 +win32comext/shell/demos/servers/copy_hook.py,sha256=K7waf_F795YB8lP7E9KPZT8gKE9vLXdRuuUSy4muHmU,2881 +win32comext/shell/demos/servers/empty_volume_cache.py,sha256=eDlAgoX9R7rE3zT0JGbhxaPdrOFYdmDGwvz8Ia7Tecs,7594 +win32comext/shell/demos/servers/folder_view.py,sha256=1qIyRWfan_PsEdLJzVAjzX3GMvtCWck_ALBpW4eUREg,28874 +win32comext/shell/demos/servers/icon_handler.py,sha256=tTO43cf37VtDvNKNUTSu32s4SIjO4aOXMoQ16RxA24I,2600 +win32comext/shell/demos/servers/shell_view.py,sha256=MnDm7mmxobxWNniq4JaZs8trab8dnlHlSU6Z6urgik4,37745 +win32comext/shell/demos/shellexecuteex.py,sha256=Sj6XwHILXPFV07SxMxdsXr2QE61-Gc4dHNIy-QUpyUg,521 +win32comext/shell/demos/viewstate.py,sha256=4ECIVPam_JTQDTUwEE5cMz4NN3XDne4LE2V1t62yTlQ,1823 +win32comext/shell/demos/walk_shell_folders.py,sha256=JQxVL1ubqGuHuhQcp-ND9G6eF9kyDb6k2IwDH4i1jy4,740 +win32comext/shell/shell.pyd,sha256=10v-aZRF7EVQ72U7OnNnlMn0LWpMv5dZCZFtmdULkF0,523776 +win32comext/shell/shellcon.py,sha256=SHK0U7i6dZo_nZf-RPzvcZf5tPJfpmySV5bXnLRgFaE,53896 +win32comext/shell/test/__pycache__/testSHFileOperation.cpython-36.pyc,, +win32comext/shell/test/__pycache__/testShellFolder.cpython-36.pyc,, +win32comext/shell/test/__pycache__/testShellItem.cpython-36.pyc,, +win32comext/shell/test/testSHFileOperation.py,sha256=zWzp6JXRwHN1gpT1ftHjzd4jkgjMjHe8ZSFjIlIkt5s,1689 +win32comext/shell/test/testShellFolder.py,sha256=jGeHv25cVvt1BfrP3fqOznTWW6gtOuJ9ba6wr23x9R4,585 +win32comext/shell/test/testShellItem.py,sha256=lq_SyfB_egxrvAHMGUbw_711egmFzFe4P9ZH-PPfMK8,2918 +win32comext/taskscheduler/__init__.py,sha256=i0-OSRbsfKDEXd3rDOejksMSOaZfMi4J9suZLeTYVw0,192 +win32comext/taskscheduler/__pycache__/__init__.cpython-36.pyc,, +win32comext/taskscheduler/taskscheduler.pyd,sha256=ku1JAqsnG1lzfAboDhvA7Nl4aQ1GbKzsMVsSNHlkADU,51712 +win32comext/taskscheduler/test/__pycache__/test_addtask.cpython-36.pyc,, +win32comext/taskscheduler/test/__pycache__/test_addtask_1.cpython-36.pyc,, +win32comext/taskscheduler/test/__pycache__/test_addtask_2.cpython-36.pyc,, +win32comext/taskscheduler/test/__pycache__/test_localsystem.cpython-36.pyc,, +win32comext/taskscheduler/test/test_addtask.py,sha256=xnWpUy2lgzBSw9Upja5pAY6roE2_6AV7mrWX6Iku9C8,2120 +win32comext/taskscheduler/test/test_addtask_1.py,sha256=CX48q1k8wUg2Brf87yu6esRE4bgc7BUy-pwEBr-ZrZ8,2092 +win32comext/taskscheduler/test/test_addtask_2.py,sha256=4tH4fSTG0dczU788BWv3ICn1GkmONmwXNK4m0LqPTbk,1640 +win32comext/taskscheduler/test/test_localsystem.py,sha256=KedzqTAl4sbF-HW-oZbmjhnOQIwsOA_m5qGxdQ54zuk,69 diff --git a/venv/Lib/site-packages/pywin32-301.dist-info/REQUESTED b/venv/Lib/site-packages/pywin32-301.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/pywin32-301.dist-info/WHEEL b/venv/Lib/site-packages/pywin32-301.dist-info/WHEEL new file mode 100644 index 00000000..2be04293 --- /dev/null +++ b/venv/Lib/site-packages/pywin32-301.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.30.0) +Root-Is-Purelib: false +Tag: cp36-cp36m-win_amd64 + diff --git a/venv/Lib/site-packages/pywin32-301.dist-info/metadata.json b/venv/Lib/site-packages/pywin32-301.dist-info/metadata.json new file mode 100644 index 00000000..a7140084 --- /dev/null +++ b/venv/Lib/site-packages/pywin32-301.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Environment :: Win32 (MS Windows)", "Intended Audience :: Developers", "License :: OSI Approved :: Python Software Foundation License", "Operating System :: Microsoft :: Windows", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: Implementation :: CPython"], "extensions": {"python.details": {"contacts": [{"email": "mhammond@skippinet.com.au", "name": "Mark Hammond (et al)", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://github.com/mhammond/pywin32"}}}, "generator": "bdist_wheel (0.30.0)", "license": "PSF", "metadata_version": "2.0", "name": "pywin32", "summary": "Python for Window Extensions", "version": "301"} \ No newline at end of file diff --git a/venv/Lib/site-packages/pywin32-301.dist-info/top_level.txt b/venv/Lib/site-packages/pywin32-301.dist-info/top_level.txt new file mode 100644 index 00000000..715a2070 --- /dev/null +++ b/venv/Lib/site-packages/pywin32-301.dist-info/top_level.txt @@ -0,0 +1,94 @@ +PyISAPI_loader +_win32sysloader +_winxptheme +adodbapi +adsi +authorization +axcontrol +axdebug +axscript +bits +dde +directsound +exchange +exchdapi +ifilter +internet +isapi +mapi +mmapfile +odbc +perfmon +perfmondata +propsys +pythoncom +pythonwin +pywintypes +servicemanager +shell +taskscheduler +timer +win2kras +win32\lib\afxres +win32\lib\commctrl +win32\lib\dbi +win32\lib\mmsystem +win32\lib\netbios +win32\lib\ntsecuritycon +win32\lib\pywin32_bootstrap +win32\lib\pywin32_testutil +win32\lib\pywintypes +win32\lib\rasutil +win32\lib\regcheck +win32\lib\regutil +win32\lib\sspi +win32\lib\sspicon +win32\lib\win32con +win32\lib\win32cryptcon +win32\lib\win32evtlogutil +win32\lib\win32gui_struct +win32\lib\win32inetcon +win32\lib\win32netcon +win32\lib\win32pdhquery +win32\lib\win32pdhutil +win32\lib\win32rcparser +win32\lib\win32serviceutil +win32\lib\win32timezone +win32\lib\win32traceutil +win32\lib\win32verstamp +win32\lib\winerror +win32\lib\winioctlcon +win32\lib\winnt +win32\lib\winperf +win32\lib\winxptheme +win32api +win32clipboard +win32com +win32comext +win32console +win32cred +win32crypt +win32event +win32evtlog +win32file +win32gui +win32help +win32inet +win32job +win32lz +win32net +win32pdh +win32pipe +win32print +win32process +win32profile +win32ras +win32security +win32service +win32trace +win32transaction +win32ts +win32ui +win32uiole +win32wnet +winxpgui diff --git a/venv/Lib/site-packages/pywin32.pth b/venv/Lib/site-packages/pywin32.pth new file mode 100644 index 00000000..b57c4964 --- /dev/null +++ b/venv/Lib/site-packages/pywin32.pth @@ -0,0 +1,7 @@ +# .pth file for the PyWin32 extensions +win32 +win32\lib +Pythonwin +# And some hackery to deal with environments where the post_install script +# isn't run. +import pywin32_bootstrap diff --git a/venv/Lib/site-packages/pywin32.version.txt b/venv/Lib/site-packages/pywin32.version.txt new file mode 100644 index 00000000..d8fc48a4 --- /dev/null +++ b/venv/Lib/site-packages/pywin32.version.txt @@ -0,0 +1 @@ +301 diff --git a/venv/Lib/site-packages/pywin32_system32/pythoncom36.dll b/venv/Lib/site-packages/pywin32_system32/pythoncom36.dll new file mode 100644 index 00000000..7dd3a044 Binary files /dev/null and b/venv/Lib/site-packages/pywin32_system32/pythoncom36.dll differ diff --git a/venv/Lib/site-packages/pywin32_system32/pywintypes36.dll b/venv/Lib/site-packages/pywin32_system32/pywintypes36.dll new file mode 100644 index 00000000..24f66bef Binary files /dev/null and b/venv/Lib/site-packages/pywin32_system32/pywintypes36.dll differ diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_AVFoundation-5.2.dist-info/INSTALLER b/venv/Lib/site-packages/setuptools-57.0.0.dist-info/INSTALLER similarity index 100% rename from env/lib/python2.7/site-packages/pyobjc_framework_AVFoundation-5.2.dist-info/INSTALLER rename to venv/Lib/site-packages/setuptools-57.0.0.dist-info/INSTALLER diff --git a/venv/Lib/site-packages/setuptools-57.0.0.dist-info/LICENSE b/venv/Lib/site-packages/setuptools-57.0.0.dist-info/LICENSE new file mode 100644 index 00000000..353924be --- /dev/null +++ b/venv/Lib/site-packages/setuptools-57.0.0.dist-info/LICENSE @@ -0,0 +1,19 @@ +Copyright Jason R. Coombs + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/venv/Lib/site-packages/setuptools-57.0.0.dist-info/METADATA b/venv/Lib/site-packages/setuptools-57.0.0.dist-info/METADATA new file mode 100644 index 00000000..5f13696e --- /dev/null +++ b/venv/Lib/site-packages/setuptools-57.0.0.dist-info/METADATA @@ -0,0 +1,119 @@ +Metadata-Version: 2.1 +Name: setuptools +Version: 57.0.0 +Summary: Easily download, build, install, upgrade, and uninstall Python packages +Home-page: https://github.com/pypa/setuptools +Author: Python Packaging Authority +Author-email: distutils-sig@python.org +License: UNKNOWN +Project-URL: Documentation, https://setuptools.readthedocs.io/ +Keywords: CPAN PyPI distutils eggs package management +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: System :: Archiving :: Packaging +Classifier: Topic :: System :: Systems Administration +Classifier: Topic :: Utilities +Requires-Python: >=3.6 +License-File: LICENSE +Provides-Extra: certs +Requires-Dist: certifi (==2016.9.26) ; extra == 'certs' +Provides-Extra: docs +Requires-Dist: sphinx ; extra == 'docs' +Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs' +Requires-Dist: rst.linker (>=1.9) ; extra == 'docs' +Requires-Dist: pygments-github-lexers (==0.0.5) ; extra == 'docs' +Requires-Dist: sphinx-inline-tabs ; extra == 'docs' +Requires-Dist: sphinxcontrib-towncrier ; extra == 'docs' +Provides-Extra: ssl +Requires-Dist: wincertstore (==0.2) ; (sys_platform == "win32") and extra == 'ssl' +Provides-Extra: testing +Requires-Dist: pytest (>=4.6) ; extra == 'testing' +Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing' +Requires-Dist: pytest-flake8 ; extra == 'testing' +Requires-Dist: pytest-cov ; extra == 'testing' +Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing' +Requires-Dist: mock ; extra == 'testing' +Requires-Dist: flake8-2020 ; extra == 'testing' +Requires-Dist: virtualenv (>=13.0.0) ; extra == 'testing' +Requires-Dist: pytest-virtualenv (>=1.2.7) ; extra == 'testing' +Requires-Dist: wheel ; extra == 'testing' +Requires-Dist: paver ; extra == 'testing' +Requires-Dist: pip (>=19.1) ; extra == 'testing' +Requires-Dist: jaraco.envs ; extra == 'testing' +Requires-Dist: pytest-xdist ; extra == 'testing' +Requires-Dist: sphinx ; extra == 'testing' +Requires-Dist: jaraco.path (>=3.2.0) ; extra == 'testing' +Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy" and python_version < "3.10") and extra == 'testing' +Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy" and python_version < "3.10") and extra == 'testing' + +.. image:: https://img.shields.io/pypi/v/setuptools.svg + :target: `PyPI link`_ + +.. image:: https://img.shields.io/pypi/pyversions/setuptools.svg + :target: `PyPI link`_ + +.. _PyPI link: https://pypi.org/project/setuptools + +.. image:: https://github.com/pypa/setuptools/workflows/tests/badge.svg + :target: https://github.com/pypa/setuptools/actions?query=workflow%3A%22tests%22 + :alt: tests + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/psf/black + :alt: Code style: Black + +.. image:: https://img.shields.io/readthedocs/setuptools/latest.svg + :target: https://setuptools.readthedocs.io + +.. image:: https://img.shields.io/badge/skeleton-2021-informational + :target: https://blog.jaraco.com/skeleton + +.. image:: https://img.shields.io/codecov/c/github/pypa/setuptools/master.svg?logo=codecov&logoColor=white + :target: https://codecov.io/gh/pypa/setuptools + +.. image:: https://tidelift.com/badges/github/pypa/setuptools?style=flat + :target: https://tidelift.com/subscription/pkg/pypi-setuptools?utm_source=pypi-setuptools&utm_medium=readme + +See the `Installation Instructions +`_ in the Python Packaging +User's Guide for instructions on installing, upgrading, and uninstalling +Setuptools. + +Questions and comments should be directed to the `distutils-sig +mailing list `_. +Bug reports and especially tested patches may be +submitted directly to the `bug tracker +`_. + + +Code of Conduct +=============== + +Everyone interacting in the setuptools project's codebases, issue trackers, +chat rooms, and mailing lists is expected to follow the +`PSF Code of Conduct `_. + + +For Enterprise +============== + +Available as part of the Tidelift Subscription. + +Setuptools and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use. + +`Learn more `_. + + +Security Contact +================ + +To report a security vulnerability, please use the +`Tidelift security contact `_. +Tidelift will coordinate the fix and disclosure. + + diff --git a/venv/Lib/site-packages/setuptools-57.0.0.dist-info/RECORD b/venv/Lib/site-packages/setuptools-57.0.0.dist-info/RECORD new file mode 100644 index 00000000..a03340f4 --- /dev/null +++ b/venv/Lib/site-packages/setuptools-57.0.0.dist-info/RECORD @@ -0,0 +1,300 @@ +_distutils_hack/__init__.py,sha256=X3RUiA6KBPoEmco_CjACyltyQbFRGVUpZRAbSkPGwMs,3688 +_distutils_hack/__pycache__/__init__.cpython-36.pyc,, +_distutils_hack/__pycache__/override.cpython-36.pyc,, +_distutils_hack/override.py,sha256=Eu_s-NF6VIZ4Cqd0tbbA5wtWky2IZPNd8et6GLt1mzo,44 +distutils-precedence.pth,sha256=fqf_7z_ioRfuEsaO1lU2F_DX_S8FkCV8JcSElZo7c3M,152 +pkg_resources/__init__.py,sha256=P3PNN3_m8JJrYMp-i-Sq-3rhK5vuViqqjn1UXKHfe7Q,108202 +pkg_resources/__pycache__/__init__.cpython-36.pyc,, +pkg_resources/_vendor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pkg_resources/_vendor/__pycache__/__init__.cpython-36.pyc,, +pkg_resources/_vendor/__pycache__/appdirs.cpython-36.pyc,, +pkg_resources/_vendor/__pycache__/pyparsing.cpython-36.pyc,, +pkg_resources/_vendor/appdirs.py,sha256=MievUEuv3l_mQISH5SF0shDk_BNhHHzYiAPrT3ITN4I,24701 +pkg_resources/_vendor/packaging/__about__.py,sha256=PNMsaZn4UcCHyubgROH1bl6CluduPjI5kFrSp_Zgklo,736 +pkg_resources/_vendor/packaging/__init__.py,sha256=6enbp5XgRfjBjsI9-bn00HjHf5TH21PDMOKkJW8xw-w,562 +pkg_resources/_vendor/packaging/__pycache__/__about__.cpython-36.pyc,, +pkg_resources/_vendor/packaging/__pycache__/__init__.cpython-36.pyc,, +pkg_resources/_vendor/packaging/__pycache__/_compat.cpython-36.pyc,, +pkg_resources/_vendor/packaging/__pycache__/_structures.cpython-36.pyc,, +pkg_resources/_vendor/packaging/__pycache__/_typing.cpython-36.pyc,, +pkg_resources/_vendor/packaging/__pycache__/markers.cpython-36.pyc,, +pkg_resources/_vendor/packaging/__pycache__/requirements.cpython-36.pyc,, +pkg_resources/_vendor/packaging/__pycache__/specifiers.cpython-36.pyc,, +pkg_resources/_vendor/packaging/__pycache__/tags.cpython-36.pyc,, +pkg_resources/_vendor/packaging/__pycache__/utils.cpython-36.pyc,, +pkg_resources/_vendor/packaging/__pycache__/version.cpython-36.pyc,, +pkg_resources/_vendor/packaging/_compat.py,sha256=MXdsGpSE_W-ZrHoC87andI4LV2FAwU7HLL-eHe_CjhU,1128 +pkg_resources/_vendor/packaging/_structures.py,sha256=ozkCX8Q8f2qE1Eic3YiQ4buDVfgz2iYevY9e7R2y3iY,2022 +pkg_resources/_vendor/packaging/_typing.py,sha256=x59EhQ57TMT-kTRyLZV25HZvYGGwbucTo6iKh_O0tMw,1812 +pkg_resources/_vendor/packaging/markers.py,sha256=YSntQkMnKyw1_FG6oRNNnGxLL6bAxcGXOtuFE-YTS3k,9518 +pkg_resources/_vendor/packaging/requirements.py,sha256=R8K4H4xX_iD4LvpGw1U3ouuPbGN-wzsFgD7brhAM71Y,4929 +pkg_resources/_vendor/packaging/specifiers.py,sha256=uYp9l13F0LcknS6d4N60ytiBgFmIhKideOq9AnsxTco,31944 +pkg_resources/_vendor/packaging/tags.py,sha256=NKMS37Zo_nWrZxgsD6zbXsXgc9edn9m160cBiLmHJdE,24067 +pkg_resources/_vendor/packaging/utils.py,sha256=RShlvnjO2CtYSD8uri32frMMFMTmB-3ihsq1-ghzLEw,1811 +pkg_resources/_vendor/packaging/version.py,sha256=Cnbm-OO9D_qd8ZTFxzFcjSavexSYFZmyeaoPvMsjgPc,15470 +pkg_resources/_vendor/pyparsing.py,sha256=mahtkgcp3grNAD0re_9R0DLvBnvjzpeLwgJqT-3H1CE,232056 +pkg_resources/extern/__init__.py,sha256=3PixaT9Tzzd4NoyV6CVhGd7S_9Z-U5yvMWAftZKvC6k,2362 +pkg_resources/extern/__pycache__/__init__.cpython-36.pyc,, +pkg_resources/tests/data/my-test-package-source/__pycache__/setup.cpython-36.pyc,, +pkg_resources/tests/data/my-test-package-source/setup.py,sha256=Mrezl3nqxkYkjCYpIxmjhhg4AR8hgi4QZdEYmk-I7R8,104 +setuptools-57.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +setuptools-57.0.0.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050 +setuptools-57.0.0.dist-info/METADATA,sha256=z1EsLZKHS76u3n9k2w833JiOpjdrvEZSlPXROZ4UiJE,4952 +setuptools-57.0.0.dist-info/RECORD,, +setuptools-57.0.0.dist-info/WHEEL,sha256=OqRkF0eY5GHssMorFjlbTIq072vpHpF60fIQA6lS9xA,92 +setuptools-57.0.0.dist-info/dependency_links.txt,sha256=HlkCFkoK5TbZ5EMLbLKYhLcY_E31kBWD8TqW2EgmatQ,239 +setuptools-57.0.0.dist-info/entry_points.txt,sha256=lkeJaK21vluS2y7MfmO_tbLYjh8vaZDgZswzU4JD9gg,2869 +setuptools-57.0.0.dist-info/top_level.txt,sha256=d9yL39v_W7qmKDDSH6sT4bE0j_Ls1M3P161OGgdsm4g,41 +setuptools/__init__.py,sha256=0c232LRyOLGdL-Ywmgk8uMubx7I21w-ixJWiT0jQK-c,7681 +setuptools/__pycache__/__init__.cpython-36.pyc,, +setuptools/__pycache__/_deprecation_warning.cpython-36.pyc,, +setuptools/__pycache__/_imp.cpython-36.pyc,, +setuptools/__pycache__/archive_util.cpython-36.pyc,, +setuptools/__pycache__/build_meta.cpython-36.pyc,, +setuptools/__pycache__/config.cpython-36.pyc,, +setuptools/__pycache__/dep_util.cpython-36.pyc,, +setuptools/__pycache__/depends.cpython-36.pyc,, +setuptools/__pycache__/dist.cpython-36.pyc,, +setuptools/__pycache__/errors.cpython-36.pyc,, +setuptools/__pycache__/extension.cpython-36.pyc,, +setuptools/__pycache__/glob.cpython-36.pyc,, +setuptools/__pycache__/installer.cpython-36.pyc,, +setuptools/__pycache__/launch.cpython-36.pyc,, +setuptools/__pycache__/lib2to3_ex.cpython-36.pyc,, +setuptools/__pycache__/monkey.cpython-36.pyc,, +setuptools/__pycache__/msvc.cpython-36.pyc,, +setuptools/__pycache__/namespaces.cpython-36.pyc,, +setuptools/__pycache__/package_index.cpython-36.pyc,, +setuptools/__pycache__/py34compat.cpython-36.pyc,, +setuptools/__pycache__/sandbox.cpython-36.pyc,, +setuptools/__pycache__/ssl_support.cpython-36.pyc,, +setuptools/__pycache__/unicode_utils.cpython-36.pyc,, +setuptools/__pycache__/version.cpython-36.pyc,, +setuptools/__pycache__/wheel.cpython-36.pyc,, +setuptools/__pycache__/windows_support.cpython-36.pyc,, +setuptools/_deprecation_warning.py,sha256=jU9-dtfv6cKmtQJOXN8nP1mm7gONw5kKEtiPtbwnZyI,218 +setuptools/_distutils/__init__.py,sha256=lpQAphR_7uhWC2fbSEps4Ja9W4YwezN_IX_LJEt3khU,250 +setuptools/_distutils/__pycache__/__init__.cpython-36.pyc,, +setuptools/_distutils/__pycache__/_msvccompiler.cpython-36.pyc,, +setuptools/_distutils/__pycache__/archive_util.cpython-36.pyc,, +setuptools/_distutils/__pycache__/bcppcompiler.cpython-36.pyc,, +setuptools/_distutils/__pycache__/ccompiler.cpython-36.pyc,, +setuptools/_distutils/__pycache__/cmd.cpython-36.pyc,, +setuptools/_distutils/__pycache__/config.cpython-36.pyc,, +setuptools/_distutils/__pycache__/core.cpython-36.pyc,, +setuptools/_distutils/__pycache__/cygwinccompiler.cpython-36.pyc,, +setuptools/_distutils/__pycache__/debug.cpython-36.pyc,, +setuptools/_distutils/__pycache__/dep_util.cpython-36.pyc,, +setuptools/_distutils/__pycache__/dir_util.cpython-36.pyc,, +setuptools/_distutils/__pycache__/dist.cpython-36.pyc,, +setuptools/_distutils/__pycache__/errors.cpython-36.pyc,, +setuptools/_distutils/__pycache__/extension.cpython-36.pyc,, +setuptools/_distutils/__pycache__/fancy_getopt.cpython-36.pyc,, +setuptools/_distutils/__pycache__/file_util.cpython-36.pyc,, +setuptools/_distutils/__pycache__/filelist.cpython-36.pyc,, +setuptools/_distutils/__pycache__/log.cpython-36.pyc,, +setuptools/_distutils/__pycache__/msvc9compiler.cpython-36.pyc,, +setuptools/_distutils/__pycache__/msvccompiler.cpython-36.pyc,, +setuptools/_distutils/__pycache__/py35compat.cpython-36.pyc,, +setuptools/_distutils/__pycache__/py38compat.cpython-36.pyc,, +setuptools/_distutils/__pycache__/spawn.cpython-36.pyc,, +setuptools/_distutils/__pycache__/sysconfig.cpython-36.pyc,, +setuptools/_distutils/__pycache__/text_file.cpython-36.pyc,, +setuptools/_distutils/__pycache__/unixccompiler.cpython-36.pyc,, +setuptools/_distutils/__pycache__/util.cpython-36.pyc,, +setuptools/_distutils/__pycache__/version.cpython-36.pyc,, +setuptools/_distutils/__pycache__/versionpredicate.cpython-36.pyc,, +setuptools/_distutils/_msvccompiler.py,sha256=jR0JM5A1JMnZ6xMDicQzhXWgXTVXs1lWAeUexC1z198,20813 +setuptools/_distutils/archive_util.py,sha256=qW-uiGwYexTvK5e-iSel_31Dshx-CqTanNPK6snwf98,8572 +setuptools/_distutils/bcppcompiler.py,sha256=OJDVpCUmX6H8v_7lV1zifV1fcx92Cr2dhiUh6989UJI,14894 +setuptools/_distutils/ccompiler.py,sha256=G2tn9Q3zQ0VUNfW1LM-nrnLt_6OhtiUunugCv85D1PQ,47607 +setuptools/_distutils/cmd.py,sha256=eco6LAGUtobLuPafuhmgKgkwRRL_WY8KJ4YeDCHpcls,18079 +setuptools/_distutils/command/__init__.py,sha256=2TA-rlNDlzeI-csbWHXFjGD8uOYqALMfyWOhT49nC6g,799 +setuptools/_distutils/command/__pycache__/__init__.cpython-36.pyc,, +setuptools/_distutils/command/__pycache__/bdist.cpython-36.pyc,, +setuptools/_distutils/command/__pycache__/bdist_dumb.cpython-36.pyc,, +setuptools/_distutils/command/__pycache__/bdist_msi.cpython-36.pyc,, +setuptools/_distutils/command/__pycache__/bdist_rpm.cpython-36.pyc,, +setuptools/_distutils/command/__pycache__/bdist_wininst.cpython-36.pyc,, +setuptools/_distutils/command/__pycache__/build.cpython-36.pyc,, +setuptools/_distutils/command/__pycache__/build_clib.cpython-36.pyc,, +setuptools/_distutils/command/__pycache__/build_ext.cpython-36.pyc,, +setuptools/_distutils/command/__pycache__/build_py.cpython-36.pyc,, +setuptools/_distutils/command/__pycache__/build_scripts.cpython-36.pyc,, +setuptools/_distutils/command/__pycache__/check.cpython-36.pyc,, +setuptools/_distutils/command/__pycache__/clean.cpython-36.pyc,, +setuptools/_distutils/command/__pycache__/config.cpython-36.pyc,, +setuptools/_distutils/command/__pycache__/install.cpython-36.pyc,, +setuptools/_distutils/command/__pycache__/install_data.cpython-36.pyc,, +setuptools/_distutils/command/__pycache__/install_egg_info.cpython-36.pyc,, +setuptools/_distutils/command/__pycache__/install_headers.cpython-36.pyc,, +setuptools/_distutils/command/__pycache__/install_lib.cpython-36.pyc,, +setuptools/_distutils/command/__pycache__/install_scripts.cpython-36.pyc,, +setuptools/_distutils/command/__pycache__/py37compat.cpython-36.pyc,, +setuptools/_distutils/command/__pycache__/register.cpython-36.pyc,, +setuptools/_distutils/command/__pycache__/sdist.cpython-36.pyc,, +setuptools/_distutils/command/__pycache__/upload.cpython-36.pyc,, +setuptools/_distutils/command/bdist.py,sha256=2z4eudRl_n7m3lG9leL0IYqes4bsm8c0fxfZuiafjMg,5562 +setuptools/_distutils/command/bdist_dumb.py,sha256=BTur9jcIppyP7Piavjfsk7YjElqvxeYO2npUyPPOekc,4913 +setuptools/_distutils/command/bdist_msi.py,sha256=EVFQYN_X-ExeeP8gmdV9JcINsuUGsLJUz9afMU0Rt8c,35579 +setuptools/_distutils/command/bdist_rpm.py,sha256=gjOw22GhDSbcq0bdq25cTb-n6HWWm0bShLQad_mkJ4k,21537 +setuptools/_distutils/command/bdist_wininst.py,sha256=iGlaI-VfElHOneeczKHWnSN5a10-7IMcJaXuR1mdS3c,16030 +setuptools/_distutils/command/build.py,sha256=11NyR2UAUzalrkTZ2ph0BAHFWFC2jtSsN7gIaF-NC08,5767 +setuptools/_distutils/command/build_clib.py,sha256=bgVTHh28eLQA2Gkw68amApd_j7qQBX4MTI-zTvAK_J4,8022 +setuptools/_distutils/command/build_ext.py,sha256=Y_SYbd8SHcpgNPfv3ifVniZljYs1cLAFleBSi2_O3CY,31685 +setuptools/_distutils/command/build_py.py,sha256=S_Nlw4hZE8PnIgqX5OFMdmt-GSmOhPQQ4f2jr1uBnoU,17190 +setuptools/_distutils/command/build_scripts.py,sha256=aKycJJPx3LfZ1cvZgSJaxnD2LnvRM5WJ-8xkpdgcLsI,6232 +setuptools/_distutils/command/check.py,sha256=5qDtI75ccZg3sAItQWeaIu8y3FR314O4rr9Smz4HsEo,5637 +setuptools/_distutils/command/clean.py,sha256=2TCt47ru4hZZM0RfVfUYj5bbpicpGLP4Qhw5jBtvp9k,2776 +setuptools/_distutils/command/config.py,sha256=2aTjww3PwjMB8-ZibCe4P7B-qG1hM1gn_rJXYyxRz6c,13117 +setuptools/_distutils/command/install.py,sha256=oOM2rD7l_SglARNVDmiZn8u6DAfidXRF_yE5QS328B4,27482 +setuptools/_distutils/command/install_data.py,sha256=YhGOAwh3gJPqF7em5XA0rmpR42z1bLh80ooElzDyUvk,2822 +setuptools/_distutils/command/install_egg_info.py,sha256=0kW0liVMeadkjX0ZcRfMptKFen07Gw6gyw1VHT5KIwc,2603 +setuptools/_distutils/command/install_headers.py,sha256=XQ6idkbIDfr1ljXCOznuVUMvOFpHBn6cK0Wz9gIM2b4,1298 +setuptools/_distutils/command/install_lib.py,sha256=9AofR-MO9lAtjwwuukCptepOaJEKMZW2VHiyR5hU7HA,8397 +setuptools/_distutils/command/install_scripts.py,sha256=_CLUeQwGJRcY2kik7azPMn5IdtDCrjWdUvZ1khlG6ck,2017 +setuptools/_distutils/command/py37compat.py,sha256=qzRhhvTihqx_PZZt2ZYECxh1X3Oj255VqatzelYFAKw,671 +setuptools/_distutils/command/register.py,sha256=2jaq9968rt2puRVDBx1HbNiXv27uOk8idE_4lPf_3VM,11712 +setuptools/_distutils/command/sdist.py,sha256=qotJjAOzyhJjq2-oDImjNFrOtaSneEFDJTB-sEk1wnU,19005 +setuptools/_distutils/command/upload.py,sha256=BLO1w7eSAqsCjCLXtf_CRVSjwF1WmyOByGVGNdcQ8oY,7597 +setuptools/_distutils/config.py,sha256=dtHgblx9JhfyrKx1-J7Jlxw_f7s8ZbPFQii2UWMTZpY,4827 +setuptools/_distutils/core.py,sha256=jbdOkpOK09xi-56vhhwvn3fYdhLb5DJO8q3K1fnQz0Q,8876 +setuptools/_distutils/cygwinccompiler.py,sha256=9U4JAusUzlAGJl0Y5nToPkQ3ldzseAtiye434mwJ0ow,16380 +setuptools/_distutils/debug.py,sha256=N6MrTAqK6l9SVk6tWweR108PM8Ol7qNlfyV-nHcLhsY,139 +setuptools/_distutils/dep_util.py,sha256=GuR9Iw_jzZRkyemJ5HX8rB_wRGxkIBcBm1qh54r7zhk,3491 +setuptools/_distutils/dir_util.py,sha256=UwhBOUTcV65GTwce4SPuTXR8Z8q3LYEcmttqcGb0bYo,7778 +setuptools/_distutils/dist.py,sha256=Biuf6ca8uiFfMScRFsYUKtb5neMPtxKxRtXn50_1f3U,50421 +setuptools/_distutils/errors.py,sha256=Yr6tKZGdzBoNi53vBtiq0UJ__X05CmxSdQJqOWaw6SY,3577 +setuptools/_distutils/extension.py,sha256=bTb3Q0CoevGKYv5dX1ls--Ln8tlB0-UEOsi9BwzlZ-s,10515 +setuptools/_distutils/fancy_getopt.py,sha256=OPxp2CxHi1Yp_d1D8JxW4Ueq9fC71tegQFaafh58GGU,17784 +setuptools/_distutils/file_util.py,sha256=0hUqfItN_x2DVihR0MHdA4KCMVCOO8VoByaFp_a6MDg,8148 +setuptools/_distutils/filelist.py,sha256=8bRxhzp2FsaoHT7TuKD4Qjcuh_B9Ow_xTt_htZJvN2Q,12832 +setuptools/_distutils/log.py,sha256=hWBmdUC2K927QcVv3REMW3HMPclxccPQngxLSuUXQl0,1969 +setuptools/_distutils/msvc9compiler.py,sha256=X623B92g0v8A3BEM9qpRf396AEd_hfjkfDUVTKu0hcE,30453 +setuptools/_distutils/msvccompiler.py,sha256=qruALeGRq8-CjtjE2tLQ8W26QnchcYedWzFme8AxZ4Q,23540 +setuptools/_distutils/py35compat.py,sha256=-sk1vBIsOgH-AobjIYbK_OEjdJF_54Ul_D1EiE9XM_c,455 +setuptools/_distutils/py38compat.py,sha256=II7ddBxOijC7uNN4z_46HYUjwYTJYMNiLJoGTormZm0,212 +setuptools/_distutils/spawn.py,sha256=OmSnq3Eqditq8J5v5xXMmZSSHjLn4atbWSZbL65JulA,4691 +setuptools/_distutils/sysconfig.py,sha256=5z55MU7gXeceL_G9FK6ex-2OvdeIXJRZJafrtthJcfU,21349 +setuptools/_distutils/text_file.py,sha256=PsuAJeWdKJoLSV_6N6IpB5-0Pa84KzLUucJMFRazw3I,12483 +setuptools/_distutils/unixccompiler.py,sha256=E65edChYLoHY8wi4OxFu_wKt3hJe3GySF6v51G_ZzL0,14696 +setuptools/_distutils/util.py,sha256=Wlz9noChJjzem9mfgOu-KaN8soB4aNhRfe4VGltXd8w,20985 +setuptools/_distutils/version.py,sha256=8NogP6NPPQpp3EUMZcT9czEHia-ehqPo8spo_e7AgUU,12514 +setuptools/_distutils/versionpredicate.py,sha256=ZxpEA-TQv88mUWc6hetUO4qSqA2sa7ipjZ3QEK5evDk,5133 +setuptools/_imp.py,sha256=HmF91IbitRfsD5z-g4_wmcuH-RahyIONbPgiCOFgtzA,2392 +setuptools/_vendor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +setuptools/_vendor/__pycache__/__init__.cpython-36.pyc,, +setuptools/_vendor/__pycache__/ordered_set.cpython-36.pyc,, +setuptools/_vendor/__pycache__/pyparsing.cpython-36.pyc,, +setuptools/_vendor/more_itertools/__init__.py,sha256=C7sXffHTXM3P-iaLPPfqfmDoxOflQMJLcM7ed9p3jak,82 +setuptools/_vendor/more_itertools/__pycache__/__init__.cpython-36.pyc,, +setuptools/_vendor/more_itertools/__pycache__/more.cpython-36.pyc,, +setuptools/_vendor/more_itertools/__pycache__/recipes.cpython-36.pyc,, +setuptools/_vendor/more_itertools/more.py,sha256=DlZa8v6JihVwfQ5zHidOA-xDE0orcQIUyxVnCaUoDKE,117968 +setuptools/_vendor/more_itertools/recipes.py,sha256=UkNkrsZyqiwgLHANBTmvMhCvaNSvSNYhyOpz_Jc55DY,16256 +setuptools/_vendor/ordered_set.py,sha256=dbaCcs27dyN9gnMWGF5nA_BrVn6Q-NrjKYJpV9_fgBs,15130 +setuptools/_vendor/packaging/__about__.py,sha256=PNMsaZn4UcCHyubgROH1bl6CluduPjI5kFrSp_Zgklo,736 +setuptools/_vendor/packaging/__init__.py,sha256=6enbp5XgRfjBjsI9-bn00HjHf5TH21PDMOKkJW8xw-w,562 +setuptools/_vendor/packaging/__pycache__/__about__.cpython-36.pyc,, +setuptools/_vendor/packaging/__pycache__/__init__.cpython-36.pyc,, +setuptools/_vendor/packaging/__pycache__/_compat.cpython-36.pyc,, +setuptools/_vendor/packaging/__pycache__/_structures.cpython-36.pyc,, +setuptools/_vendor/packaging/__pycache__/_typing.cpython-36.pyc,, +setuptools/_vendor/packaging/__pycache__/markers.cpython-36.pyc,, +setuptools/_vendor/packaging/__pycache__/requirements.cpython-36.pyc,, +setuptools/_vendor/packaging/__pycache__/specifiers.cpython-36.pyc,, +setuptools/_vendor/packaging/__pycache__/tags.cpython-36.pyc,, +setuptools/_vendor/packaging/__pycache__/utils.cpython-36.pyc,, +setuptools/_vendor/packaging/__pycache__/version.cpython-36.pyc,, +setuptools/_vendor/packaging/_compat.py,sha256=MXdsGpSE_W-ZrHoC87andI4LV2FAwU7HLL-eHe_CjhU,1128 +setuptools/_vendor/packaging/_structures.py,sha256=ozkCX8Q8f2qE1Eic3YiQ4buDVfgz2iYevY9e7R2y3iY,2022 +setuptools/_vendor/packaging/_typing.py,sha256=x59EhQ57TMT-kTRyLZV25HZvYGGwbucTo6iKh_O0tMw,1812 +setuptools/_vendor/packaging/markers.py,sha256=BCCxZbt8xgysH8v5pqbLkdtQnRZHIGkJQqlNBGek4nQ,9509 +setuptools/_vendor/packaging/requirements.py,sha256=VHydZdk8m3qFxReomNwKr71cmpjantEV_xOhkEyyINI,4917 +setuptools/_vendor/packaging/specifiers.py,sha256=uYp9l13F0LcknS6d4N60ytiBgFmIhKideOq9AnsxTco,31944 +setuptools/_vendor/packaging/tags.py,sha256=NKMS37Zo_nWrZxgsD6zbXsXgc9edn9m160cBiLmHJdE,24067 +setuptools/_vendor/packaging/utils.py,sha256=RShlvnjO2CtYSD8uri32frMMFMTmB-3ihsq1-ghzLEw,1811 +setuptools/_vendor/packaging/version.py,sha256=Cnbm-OO9D_qd8ZTFxzFcjSavexSYFZmyeaoPvMsjgPc,15470 +setuptools/_vendor/pyparsing.py,sha256=mahtkgcp3grNAD0re_9R0DLvBnvjzpeLwgJqT-3H1CE,232056 +setuptools/archive_util.py,sha256=maJDbozRbDeSPw53VT0cb_IS3W0Ap73lJR8tX8RZDx0,7077 +setuptools/build_meta.py,sha256=x7FI1UPKCKxBBSopXocfGDnJa98rQO8atKXSwJtdid8,10280 +setuptools/cli-32.exe,sha256=dfEuovMNnA2HLa3jRfMPVi5tk4R7alCbpTvuxtCyw0Y,65536 +setuptools/cli-64.exe,sha256=KLABu5pyrnokJCv6skjXZ6GsXeyYHGcqOUT3oHI3Xpo,74752 +setuptools/cli.exe,sha256=dfEuovMNnA2HLa3jRfMPVi5tk4R7alCbpTvuxtCyw0Y,65536 +setuptools/command/__init__.py,sha256=e-8TJOikUe3St0fw2b2p9u5EDdSxl5zHUBJJKifbcQ8,217 +setuptools/command/__pycache__/__init__.cpython-36.pyc,, +setuptools/command/__pycache__/alias.cpython-36.pyc,, +setuptools/command/__pycache__/bdist_egg.cpython-36.pyc,, +setuptools/command/__pycache__/bdist_rpm.cpython-36.pyc,, +setuptools/command/__pycache__/build_clib.cpython-36.pyc,, +setuptools/command/__pycache__/build_ext.cpython-36.pyc,, +setuptools/command/__pycache__/build_py.cpython-36.pyc,, +setuptools/command/__pycache__/develop.cpython-36.pyc,, +setuptools/command/__pycache__/dist_info.cpython-36.pyc,, +setuptools/command/__pycache__/easy_install.cpython-36.pyc,, +setuptools/command/__pycache__/egg_info.cpython-36.pyc,, +setuptools/command/__pycache__/install.cpython-36.pyc,, +setuptools/command/__pycache__/install_egg_info.cpython-36.pyc,, +setuptools/command/__pycache__/install_lib.cpython-36.pyc,, +setuptools/command/__pycache__/install_scripts.cpython-36.pyc,, +setuptools/command/__pycache__/py36compat.cpython-36.pyc,, +setuptools/command/__pycache__/register.cpython-36.pyc,, +setuptools/command/__pycache__/rotate.cpython-36.pyc,, +setuptools/command/__pycache__/saveopts.cpython-36.pyc,, +setuptools/command/__pycache__/sdist.cpython-36.pyc,, +setuptools/command/__pycache__/setopt.cpython-36.pyc,, +setuptools/command/__pycache__/test.cpython-36.pyc,, +setuptools/command/__pycache__/upload.cpython-36.pyc,, +setuptools/command/__pycache__/upload_docs.cpython-36.pyc,, +setuptools/command/alias.py,sha256=1sLQxZcNh6dDQpDmm4G7UGGTol83nY1NTPmNBbm2siI,2381 +setuptools/command/bdist_egg.py,sha256=-upiB6fFtm8cQSQj1LRDVpG1-T143DsXCvV0fh03u7U,16604 +setuptools/command/bdist_rpm.py,sha256=_4Y7tVAzu1zEuDc8tpRdE_sy3zST3h3LPTtzioos5Ck,900 +setuptools/command/build_clib.py,sha256=fWHSFGkk10VCddBWCszvNhowbG9Z9CZXVjQ2uSInoOs,4415 +setuptools/command/build_ext.py,sha256=aI_qnK9m8lULZDS6XMv_p2j2pIehVbSarb4PJHDA7dw,13027 +setuptools/command/build_py.py,sha256=fP7K6X8VS92D-_hHK5jM99wnqSjTQhJcx5-8Wp74MB8,8930 +setuptools/command/develop.py,sha256=B0p5dh7VrSMdEfhdUE_AJlWk2UxAesOOY14CAV5_DEA,8045 +setuptools/command/dist_info.py,sha256=5t6kOfrdgALT-P3ogss6PF9k-Leyesueycuk3dUyZnI,960 +setuptools/command/easy_install.py,sha256=13BpU0YW0UNJY-k1OSVCPj7EnCZ0ep5fZCS0uKuZ0mY,85308 +setuptools/command/egg_info.py,sha256=se-FhYI1sZMzKd6lndV_-vNkJ31hX4HY4ZcMUu71l9k,25335 +setuptools/command/install.py,sha256=8doMxeQEDoK4Eco0mO2WlXXzzp9QnsGJQ7Z7yWkZPG8,4705 +setuptools/command/install_egg_info.py,sha256=bMgeIeRiXzQ4DAGPV1328kcjwQjHjOWU4FngAWLV78Q,2203 +setuptools/command/install_lib.py,sha256=Uz42McsyHZAjrB6cw9E7Bz0xsaTbzxnM1PI9CBhiPtE,3875 +setuptools/command/install_scripts.py,sha256=o0jN_ex7yYYk8W5clymTFOXwkFMKzW9q_zd9Npcex7M,2593 +setuptools/command/launcher manifest.xml,sha256=xlLbjWrB01tKC0-hlVkOKkiSPbzMml2eOPtJ_ucCnbE,628 +setuptools/command/py36compat.py,sha256=7yLWzQj179Enx3pJ8V1cDDCzeLMFMd9XJXlK-iZTq5Y,4946 +setuptools/command/register.py,sha256=kk3DxXCb5lXTvqnhfwx2g6q7iwbUmgTyXUCaBooBOUk,468 +setuptools/command/rotate.py,sha256=SvsQPasezIojPjvMnfkqzh8P0U0tCj0daczF8uc3NQM,2128 +setuptools/command/saveopts.py,sha256=za7QCBcQimKKriWcoCcbhxPjUz30gSB74zuTL47xpP4,658 +setuptools/command/sdist.py,sha256=pEMF0GMVuaznNK6GFamK4GSXG9_qef0ic8z7jEsPmKo,5967 +setuptools/command/setopt.py,sha256=LicqlXockLqBOHYPNv1J032HxoBKD4HOHB11qm_t-Bs,5051 +setuptools/command/test.py,sha256=-O0xrH8adJKTXC8dSZL_bAh21domGM-zCB5RRNfndNc,9490 +setuptools/command/upload.py,sha256=XT3YFVfYPAmA5qhGg0euluU98ftxRUW-PzKcODMLxUs,462 +setuptools/command/upload_docs.py,sha256=ba5kOyedD_u62weinrxqqnvpuQvBIuamXehJG6tAvO0,7218 +setuptools/config.py,sha256=a_S9dqb5l18vlqe5BrU303qZ6eFoagC5PZjYwRYX_mw,22279 +setuptools/dep_util.py,sha256=BDx1BkzNQntvAB4alypHbW5UVBzjqths000PrUL4Zqc,949 +setuptools/depends.py,sha256=iHfZdLdlCu2BllSF9bRg7NU0oqbPWMH8ljm4BuwQDY0,5474 +setuptools/dist.py,sha256=QVZb2ypghTXFuzzp753K34HyKUEAWCMvAeYSBoLbDlk,42374 +setuptools/errors.py,sha256=MVOcv381HNSajDgEUWzOQ4J6B5BHCBMSjHfaWcEwA1o,524 +setuptools/extension.py,sha256=NMM46XjNdVelWemc0x8CyVKA5Ks6Zm3xTWSA2SS6xZM,1684 +setuptools/extern/__init__.py,sha256=Hhf9W73WAitw9TdRJfDIb6YFjmK56CF61afds1Mg0HY,2407 +setuptools/extern/__pycache__/__init__.cpython-36.pyc,, +setuptools/glob.py,sha256=1oZjbfjAHSXbgdhSuR6YGU8jKob9L8NtEmBYqcPTLYk,4873 +setuptools/gui-32.exe,sha256=XBr0bHMA6Hpz2s9s9Bzjl-PwXfa9nH4ie0rFn4V2kWA,65536 +setuptools/gui-64.exe,sha256=aYKMhX1IJLn4ULHgWX0sE0yREUt6B3TEHf_jOw6yNyE,75264 +setuptools/gui.exe,sha256=XBr0bHMA6Hpz2s9s9Bzjl-PwXfa9nH4ie0rFn4V2kWA,65536 +setuptools/installer.py,sha256=jbhb7ZVkNV_bSUMgfnLcZw0IHr6REFnKF4o7_1Jqxm0,3567 +setuptools/launch.py,sha256=TyPT-Ic1T2EnYvGO26gfNRP4ysBlrhpbRjQxWsiO414,812 +setuptools/lib2to3_ex.py,sha256=YKA7CmdIJWwy0-yuZAxUgoNHbXFmT4p53iNadWdBQCk,2335 +setuptools/monkey.py,sha256=0e3HdVKXHL415O7np-AUqhEFXPPuDdJKbI47chQ_DE4,5217 +setuptools/msvc.py,sha256=3LLt938e6OR7wWPzIvCQu7LCWZSIKqoKV6w3r8jV3kY,50561 +setuptools/namespaces.py,sha256=PMqGVPXPYQgjUTvEg9bGccRAkIODrQ6NmsDg_fwErwI,3093 +setuptools/package_index.py,sha256=C_FSnKm7WZM0weypNZNTZS9i9I69wtl3BzE-qRd3jSA,40173 +setuptools/py34compat.py,sha256=KYOd6ybRxjBW8NJmYD8t_UyyVmysppFXqHpFLdslGXU,245 +setuptools/sandbox.py,sha256=IirxmeCHbl1CHT7pEPgQ6tTx9wU854n-d2p80Su8t5c,14151 +setuptools/script (dev).tmpl,sha256=RUzQzCQUaXtwdLtYHWYbIQmOaES5Brqq1FvUA_tu-5I,218 +setuptools/script.tmpl,sha256=WGTt5piezO27c-Dbx6l5Q4T3Ff20A5z7872hv3aAhYY,138 +setuptools/ssl_support.py,sha256=CPU_41S-x2V6qOuENr2wQsOSxlHvJAoXOxuAPbrxjpM,8565 +setuptools/unicode_utils.py,sha256=aOOFo4JGwAsiBttGYDsqFS7YqWQeZ2j6DWiCuctR_00,941 +setuptools/version.py,sha256=og_cuZQb0QI6ukKZFfZWPlr1HgJBPPn2vO2m_bI9ZTE,144 +setuptools/wheel.py,sha256=0P8tSk105uF_Ub-30N2HU2X2v7MKDSdjpeQlRRW3SkI,8288 +setuptools/windows_support.py,sha256=5GrfqSP2-dLGJoZTq2g6dCKkyQxxa2n5IQiXlJCoYEE,714 diff --git a/venv/Lib/site-packages/setuptools-57.0.0.dist-info/WHEEL b/venv/Lib/site-packages/setuptools-57.0.0.dist-info/WHEEL new file mode 100644 index 00000000..385faab0 --- /dev/null +++ b/venv/Lib/site-packages/setuptools-57.0.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.36.2) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/env/lib/python2.7/site-packages/setuptools-41.1.0.dist-info/dependency_links.txt b/venv/Lib/site-packages/setuptools-57.0.0.dist-info/dependency_links.txt similarity index 100% rename from env/lib/python2.7/site-packages/setuptools-41.1.0.dist-info/dependency_links.txt rename to venv/Lib/site-packages/setuptools-57.0.0.dist-info/dependency_links.txt diff --git a/venv/Lib/site-packages/setuptools-57.0.0.dist-info/entry_points.txt b/venv/Lib/site-packages/setuptools-57.0.0.dist-info/entry_points.txt new file mode 100644 index 00000000..d117e2ce --- /dev/null +++ b/venv/Lib/site-packages/setuptools-57.0.0.dist-info/entry_points.txt @@ -0,0 +1,60 @@ +[distutils.commands] +alias = setuptools.command.alias:alias +bdist_egg = setuptools.command.bdist_egg:bdist_egg +bdist_rpm = setuptools.command.bdist_rpm:bdist_rpm +build_clib = setuptools.command.build_clib:build_clib +build_ext = setuptools.command.build_ext:build_ext +build_py = setuptools.command.build_py:build_py +develop = setuptools.command.develop:develop +dist_info = setuptools.command.dist_info:dist_info +easy_install = setuptools.command.easy_install:easy_install +egg_info = setuptools.command.egg_info:egg_info +install = setuptools.command.install:install +install_egg_info = setuptools.command.install_egg_info:install_egg_info +install_lib = setuptools.command.install_lib:install_lib +install_scripts = setuptools.command.install_scripts:install_scripts +rotate = setuptools.command.rotate:rotate +saveopts = setuptools.command.saveopts:saveopts +sdist = setuptools.command.sdist:sdist +setopt = setuptools.command.setopt:setopt +test = setuptools.command.test:test +upload_docs = setuptools.command.upload_docs:upload_docs + +[distutils.setup_keywords] +convert_2to3_doctests = setuptools.dist:assert_string_list +dependency_links = setuptools.dist:assert_string_list +eager_resources = setuptools.dist:assert_string_list +entry_points = setuptools.dist:check_entry_points +exclude_package_data = setuptools.dist:check_package_data +extras_require = setuptools.dist:check_extras +include_package_data = setuptools.dist:assert_bool +install_requires = setuptools.dist:check_requirements +namespace_packages = setuptools.dist:check_nsp +package_data = setuptools.dist:check_package_data +packages = setuptools.dist:check_packages +python_requires = setuptools.dist:check_specifier +setup_requires = setuptools.dist:check_requirements +test_loader = setuptools.dist:check_importable +test_runner = setuptools.dist:check_importable +test_suite = setuptools.dist:check_test_suite +tests_require = setuptools.dist:check_requirements +use_2to3 = setuptools.dist:assert_bool +use_2to3_exclude_fixers = setuptools.dist:assert_string_list +use_2to3_fixers = setuptools.dist:assert_string_list +zip_safe = setuptools.dist:assert_bool + +[egg_info.writers] +PKG-INFO = setuptools.command.egg_info:write_pkg_info +dependency_links.txt = setuptools.command.egg_info:overwrite_arg +depends.txt = setuptools.command.egg_info:warn_depends_obsolete +eager_resources.txt = setuptools.command.egg_info:overwrite_arg +entry_points.txt = setuptools.command.egg_info:write_entries +namespace_packages.txt = setuptools.command.egg_info:overwrite_arg +requires.txt = setuptools.command.egg_info:write_requirements +top_level.txt = setuptools.command.egg_info:write_toplevel_names + +[setuptools.finalize_distribution_options] +2to3_doctests = setuptools.dist:Distribution._finalize_2to3_doctests +keywords = setuptools.dist:Distribution._finalize_setup_keywords +parent_finalize = setuptools.dist:_Distribution.finalize_options + diff --git a/venv/Lib/site-packages/setuptools-57.0.0.dist-info/top_level.txt b/venv/Lib/site-packages/setuptools-57.0.0.dist-info/top_level.txt new file mode 100644 index 00000000..b5ac1070 --- /dev/null +++ b/venv/Lib/site-packages/setuptools-57.0.0.dist-info/top_level.txt @@ -0,0 +1,3 @@ +_distutils_hack +pkg_resources +setuptools diff --git a/venv/Lib/site-packages/setuptools/__init__.py b/venv/Lib/site-packages/setuptools/__init__.py new file mode 100644 index 00000000..4d9b8357 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/__init__.py @@ -0,0 +1,241 @@ +"""Extensions to the 'distutils' for large or complex distributions""" + +from fnmatch import fnmatchcase +import functools +import os +import re + +import _distutils_hack.override # noqa: F401 + +import distutils.core +from distutils.errors import DistutilsOptionError +from distutils.util import convert_path + +from ._deprecation_warning import SetuptoolsDeprecationWarning + +import setuptools.version +from setuptools.extension import Extension +from setuptools.dist import Distribution +from setuptools.depends import Require +from . import monkey + + +__all__ = [ + 'setup', 'Distribution', 'Command', 'Extension', 'Require', + 'SetuptoolsDeprecationWarning', + 'find_packages', 'find_namespace_packages', +] + +__version__ = setuptools.version.__version__ + +bootstrap_install_from = None + +# If we run 2to3 on .py files, should we also convert docstrings? +# Default: yes; assume that we can detect doctests reliably +run_2to3_on_doctests = True +# Standard package names for fixer packages +lib2to3_fixer_packages = ['lib2to3.fixes'] + + +class PackageFinder: + """ + Generate a list of all Python packages found within a directory + """ + + @classmethod + def find(cls, where='.', exclude=(), include=('*',)): + """Return a list all Python packages found within directory 'where' + + 'where' is the root directory which will be searched for packages. It + should be supplied as a "cross-platform" (i.e. URL-style) path; it will + be converted to the appropriate local path syntax. + + 'exclude' is a sequence of package names to exclude; '*' can be used + as a wildcard in the names, such that 'foo.*' will exclude all + subpackages of 'foo' (but not 'foo' itself). + + 'include' is a sequence of package names to include. If it's + specified, only the named packages will be included. If it's not + specified, all found packages will be included. 'include' can contain + shell style wildcard patterns just like 'exclude'. + """ + + return list(cls._find_packages_iter( + convert_path(where), + cls._build_filter('ez_setup', '*__pycache__', *exclude), + cls._build_filter(*include))) + + @classmethod + def _find_packages_iter(cls, where, exclude, include): + """ + All the packages found in 'where' that pass the 'include' filter, but + not the 'exclude' filter. + """ + for root, dirs, files in os.walk(where, followlinks=True): + # Copy dirs to iterate over it, then empty dirs. + all_dirs = dirs[:] + dirs[:] = [] + + for dir in all_dirs: + full_path = os.path.join(root, dir) + rel_path = os.path.relpath(full_path, where) + package = rel_path.replace(os.path.sep, '.') + + # Skip directory trees that are not valid packages + if ('.' in dir or not cls._looks_like_package(full_path)): + continue + + # Should this package be included? + if include(package) and not exclude(package): + yield package + + # Keep searching subdirectories, as there may be more packages + # down there, even if the parent was excluded. + dirs.append(dir) + + @staticmethod + def _looks_like_package(path): + """Does a directory look like a package?""" + return os.path.isfile(os.path.join(path, '__init__.py')) + + @staticmethod + def _build_filter(*patterns): + """ + Given a list of patterns, return a callable that will be true only if + the input matches at least one of the patterns. + """ + return lambda name: any(fnmatchcase(name, pat=pat) for pat in patterns) + + +class PEP420PackageFinder(PackageFinder): + @staticmethod + def _looks_like_package(path): + return True + + +find_packages = PackageFinder.find +find_namespace_packages = PEP420PackageFinder.find + + +def _install_setup_requires(attrs): + # Note: do not use `setuptools.Distribution` directly, as + # our PEP 517 backend patch `distutils.core.Distribution`. + class MinimalDistribution(distutils.core.Distribution): + """ + A minimal version of a distribution for supporting the + fetch_build_eggs interface. + """ + def __init__(self, attrs): + _incl = 'dependency_links', 'setup_requires' + filtered = { + k: attrs[k] + for k in set(_incl) & set(attrs) + } + distutils.core.Distribution.__init__(self, filtered) + + def finalize_options(self): + """ + Disable finalize_options to avoid building the working set. + Ref #2158. + """ + + dist = MinimalDistribution(attrs) + + # Honor setup.cfg's options. + dist.parse_config_files(ignore_option_errors=True) + if dist.setup_requires: + dist.fetch_build_eggs(dist.setup_requires) + + +def setup(**attrs): + # Make sure we have any requirements needed to interpret 'attrs'. + _install_setup_requires(attrs) + return distutils.core.setup(**attrs) + + +setup.__doc__ = distutils.core.setup.__doc__ + + +_Command = monkey.get_unpatched(distutils.core.Command) + + +class Command(_Command): + __doc__ = _Command.__doc__ + + command_consumes_arguments = False + + def __init__(self, dist, **kw): + """ + Construct the command for dist, updating + vars(self) with any keyword parameters. + """ + _Command.__init__(self, dist) + vars(self).update(kw) + + def _ensure_stringlike(self, option, what, default=None): + val = getattr(self, option) + if val is None: + setattr(self, option, default) + return default + elif not isinstance(val, str): + raise DistutilsOptionError("'%s' must be a %s (got `%s`)" + % (option, what, val)) + return val + + def ensure_string_list(self, option): + r"""Ensure that 'option' is a list of strings. If 'option' is + currently a string, we split it either on /,\s*/ or /\s+/, so + "foo bar baz", "foo,bar,baz", and "foo, bar baz" all become + ["foo", "bar", "baz"]. + """ + val = getattr(self, option) + if val is None: + return + elif isinstance(val, str): + setattr(self, option, re.split(r',\s*|\s+', val)) + else: + if isinstance(val, list): + ok = all(isinstance(v, str) for v in val) + else: + ok = False + if not ok: + raise DistutilsOptionError( + "'%s' must be a list of strings (got %r)" + % (option, val)) + + def reinitialize_command(self, command, reinit_subcommands=0, **kw): + cmd = _Command.reinitialize_command(self, command, reinit_subcommands) + vars(cmd).update(kw) + return cmd + + +def _find_all_simple(path): + """ + Find all files under 'path' + """ + results = ( + os.path.join(base, file) + for base, dirs, files in os.walk(path, followlinks=True) + for file in files + ) + return filter(os.path.isfile, results) + + +def findall(dir=os.curdir): + """ + Find all files under 'dir' and return the list of full filenames. + Unless dir is '.', return full filenames with dir prepended. + """ + files = _find_all_simple(dir) + if dir == os.curdir: + make_rel = functools.partial(os.path.relpath, start=dir) + files = map(make_rel, files) + return list(files) + + +class sic(str): + """Treat this string as-is (https://en.wikipedia.org/wiki/Sic)""" + + +# Apply monkey patches +monkey.patch_all() diff --git a/venv/Lib/site-packages/setuptools/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/setuptools/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..a8cf578a Binary files /dev/null and b/venv/Lib/site-packages/setuptools/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/_deprecation_warning.cpython-36.pyc b/venv/Lib/site-packages/setuptools/__pycache__/_deprecation_warning.cpython-36.pyc new file mode 100644 index 00000000..749c08d6 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/__pycache__/_deprecation_warning.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/_imp.cpython-36.pyc b/venv/Lib/site-packages/setuptools/__pycache__/_imp.cpython-36.pyc new file mode 100644 index 00000000..120d9d75 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/__pycache__/_imp.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/archive_util.cpython-36.pyc b/venv/Lib/site-packages/setuptools/__pycache__/archive_util.cpython-36.pyc new file mode 100644 index 00000000..ac86a89b Binary files /dev/null and b/venv/Lib/site-packages/setuptools/__pycache__/archive_util.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/build_meta.cpython-36.pyc b/venv/Lib/site-packages/setuptools/__pycache__/build_meta.cpython-36.pyc new file mode 100644 index 00000000..e56fcb37 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/__pycache__/build_meta.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/config.cpython-36.pyc b/venv/Lib/site-packages/setuptools/__pycache__/config.cpython-36.pyc new file mode 100644 index 00000000..34eb4d70 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/__pycache__/config.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/dep_util.cpython-36.pyc b/venv/Lib/site-packages/setuptools/__pycache__/dep_util.cpython-36.pyc new file mode 100644 index 00000000..f3e8e375 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/__pycache__/dep_util.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/depends.cpython-36.pyc b/venv/Lib/site-packages/setuptools/__pycache__/depends.cpython-36.pyc new file mode 100644 index 00000000..e0a141e4 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/__pycache__/depends.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/dist.cpython-36.pyc b/venv/Lib/site-packages/setuptools/__pycache__/dist.cpython-36.pyc new file mode 100644 index 00000000..220b9f15 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/__pycache__/dist.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/errors.cpython-36.pyc b/venv/Lib/site-packages/setuptools/__pycache__/errors.cpython-36.pyc new file mode 100644 index 00000000..d15558d1 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/__pycache__/errors.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/extension.cpython-36.pyc b/venv/Lib/site-packages/setuptools/__pycache__/extension.cpython-36.pyc new file mode 100644 index 00000000..4ff97d4a Binary files /dev/null and b/venv/Lib/site-packages/setuptools/__pycache__/extension.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/glob.cpython-36.pyc b/venv/Lib/site-packages/setuptools/__pycache__/glob.cpython-36.pyc new file mode 100644 index 00000000..8d5f4b20 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/__pycache__/glob.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/installer.cpython-36.pyc b/venv/Lib/site-packages/setuptools/__pycache__/installer.cpython-36.pyc new file mode 100644 index 00000000..9423adea Binary files /dev/null and b/venv/Lib/site-packages/setuptools/__pycache__/installer.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/launch.cpython-36.pyc b/venv/Lib/site-packages/setuptools/__pycache__/launch.cpython-36.pyc new file mode 100644 index 00000000..71431a4a Binary files /dev/null and b/venv/Lib/site-packages/setuptools/__pycache__/launch.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/lib2to3_ex.cpython-36.pyc b/venv/Lib/site-packages/setuptools/__pycache__/lib2to3_ex.cpython-36.pyc new file mode 100644 index 00000000..b2faa7dd Binary files /dev/null and b/venv/Lib/site-packages/setuptools/__pycache__/lib2to3_ex.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/monkey.cpython-36.pyc b/venv/Lib/site-packages/setuptools/__pycache__/monkey.cpython-36.pyc new file mode 100644 index 00000000..10b425d4 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/__pycache__/monkey.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/msvc.cpython-36.pyc b/venv/Lib/site-packages/setuptools/__pycache__/msvc.cpython-36.pyc new file mode 100644 index 00000000..b79db40d Binary files /dev/null and b/venv/Lib/site-packages/setuptools/__pycache__/msvc.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/namespaces.cpython-36.pyc b/venv/Lib/site-packages/setuptools/__pycache__/namespaces.cpython-36.pyc new file mode 100644 index 00000000..efe10588 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/__pycache__/namespaces.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/package_index.cpython-36.pyc b/venv/Lib/site-packages/setuptools/__pycache__/package_index.cpython-36.pyc new file mode 100644 index 00000000..3510c97c Binary files /dev/null and b/venv/Lib/site-packages/setuptools/__pycache__/package_index.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/py34compat.cpython-36.pyc b/venv/Lib/site-packages/setuptools/__pycache__/py34compat.cpython-36.pyc new file mode 100644 index 00000000..283a36dc Binary files /dev/null and b/venv/Lib/site-packages/setuptools/__pycache__/py34compat.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/sandbox.cpython-36.pyc b/venv/Lib/site-packages/setuptools/__pycache__/sandbox.cpython-36.pyc new file mode 100644 index 00000000..0677e87f Binary files /dev/null and b/venv/Lib/site-packages/setuptools/__pycache__/sandbox.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/ssl_support.cpython-36.pyc b/venv/Lib/site-packages/setuptools/__pycache__/ssl_support.cpython-36.pyc new file mode 100644 index 00000000..724d8ac5 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/__pycache__/ssl_support.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/unicode_utils.cpython-36.pyc b/venv/Lib/site-packages/setuptools/__pycache__/unicode_utils.cpython-36.pyc new file mode 100644 index 00000000..b71c53d2 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/__pycache__/unicode_utils.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/version.cpython-36.pyc b/venv/Lib/site-packages/setuptools/__pycache__/version.cpython-36.pyc new file mode 100644 index 00000000..8f99ccf0 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/__pycache__/version.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/wheel.cpython-36.pyc b/venv/Lib/site-packages/setuptools/__pycache__/wheel.cpython-36.pyc new file mode 100644 index 00000000..8eba32d1 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/__pycache__/wheel.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/windows_support.cpython-36.pyc b/venv/Lib/site-packages/setuptools/__pycache__/windows_support.cpython-36.pyc new file mode 100644 index 00000000..0af08c36 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/__pycache__/windows_support.cpython-36.pyc differ diff --git a/env/lib/python2.7/site-packages/setuptools/_deprecation_warning.py b/venv/Lib/site-packages/setuptools/_deprecation_warning.py similarity index 100% rename from env/lib/python2.7/site-packages/setuptools/_deprecation_warning.py rename to venv/Lib/site-packages/setuptools/_deprecation_warning.py diff --git a/venv/Lib/site-packages/setuptools/_distutils/__init__.py b/venv/Lib/site-packages/setuptools/_distutils/__init__.py new file mode 100644 index 00000000..7dac55b6 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/__init__.py @@ -0,0 +1,15 @@ +"""distutils + +The main package for the Python Module Distribution Utilities. Normally +used from a setup script as + + from distutils.core import setup + + setup (...) +""" + +import sys + +__version__ = sys.version[:sys.version.index(' ')] + +local = True diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..80eac579 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/_msvccompiler.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/_msvccompiler.cpython-36.pyc new file mode 100644 index 00000000..a554920c Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/_msvccompiler.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/archive_util.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/archive_util.cpython-36.pyc new file mode 100644 index 00000000..578eae9f Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/archive_util.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/bcppcompiler.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/bcppcompiler.cpython-36.pyc new file mode 100644 index 00000000..8c0cad30 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/bcppcompiler.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/ccompiler.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/ccompiler.cpython-36.pyc new file mode 100644 index 00000000..1c4b47a8 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/ccompiler.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/cmd.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/cmd.cpython-36.pyc new file mode 100644 index 00000000..f6a90549 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/cmd.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/config.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/config.cpython-36.pyc new file mode 100644 index 00000000..5c30feb5 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/config.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/core.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/core.cpython-36.pyc new file mode 100644 index 00000000..a9c32207 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/core.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/cygwinccompiler.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/cygwinccompiler.cpython-36.pyc new file mode 100644 index 00000000..975993e2 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/cygwinccompiler.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/debug.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/debug.cpython-36.pyc new file mode 100644 index 00000000..d7aea51f Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/debug.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/dep_util.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/dep_util.cpython-36.pyc new file mode 100644 index 00000000..40304484 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/dep_util.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/dir_util.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/dir_util.cpython-36.pyc new file mode 100644 index 00000000..aef9dad4 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/dir_util.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/dist.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/dist.cpython-36.pyc new file mode 100644 index 00000000..25d2e841 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/dist.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/errors.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/errors.cpython-36.pyc new file mode 100644 index 00000000..4b3adb5a Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/errors.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/extension.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/extension.cpython-36.pyc new file mode 100644 index 00000000..0bd9ba2b Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/extension.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/fancy_getopt.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/fancy_getopt.cpython-36.pyc new file mode 100644 index 00000000..0bca58c6 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/fancy_getopt.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/file_util.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/file_util.cpython-36.pyc new file mode 100644 index 00000000..0a120ae4 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/file_util.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/filelist.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/filelist.cpython-36.pyc new file mode 100644 index 00000000..78e381a9 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/filelist.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/log.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/log.cpython-36.pyc new file mode 100644 index 00000000..ff4e8903 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/log.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/msvc9compiler.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/msvc9compiler.cpython-36.pyc new file mode 100644 index 00000000..fadab1fa Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/msvc9compiler.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/msvccompiler.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/msvccompiler.cpython-36.pyc new file mode 100644 index 00000000..f9907731 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/msvccompiler.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/py35compat.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/py35compat.cpython-36.pyc new file mode 100644 index 00000000..2a6af7bb Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/py35compat.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/py38compat.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/py38compat.cpython-36.pyc new file mode 100644 index 00000000..c6b9afa1 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/py38compat.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/spawn.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/spawn.cpython-36.pyc new file mode 100644 index 00000000..ce6bb383 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/spawn.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/sysconfig.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/sysconfig.cpython-36.pyc new file mode 100644 index 00000000..e0f5088a Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/sysconfig.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/text_file.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/text_file.cpython-36.pyc new file mode 100644 index 00000000..252881a1 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/text_file.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/unixccompiler.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/unixccompiler.cpython-36.pyc new file mode 100644 index 00000000..de393e74 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/unixccompiler.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/util.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/util.cpython-36.pyc new file mode 100644 index 00000000..5a247883 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/util.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/version.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/version.cpython-36.pyc new file mode 100644 index 00000000..60b79b9f Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/version.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/versionpredicate.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/versionpredicate.cpython-36.pyc new file mode 100644 index 00000000..5fc36fce Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/versionpredicate.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/_msvccompiler.py b/venv/Lib/site-packages/setuptools/_distutils/_msvccompiler.py new file mode 100644 index 00000000..b7a06082 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/_msvccompiler.py @@ -0,0 +1,561 @@ +"""distutils._msvccompiler + +Contains MSVCCompiler, an implementation of the abstract CCompiler class +for Microsoft Visual Studio 2015. + +The module is compatible with VS 2015 and later. You can find legacy support +for older versions in distutils.msvc9compiler and distutils.msvccompiler. +""" + +# Written by Perry Stoll +# hacked by Robin Becker and Thomas Heller to do a better job of +# finding DevStudio (through the registry) +# ported to VS 2005 and VS 2008 by Christian Heimes +# ported to VS 2015 by Steve Dower + +import os +import subprocess +import contextlib +import warnings +import unittest.mock +with contextlib.suppress(ImportError): + import winreg + +from distutils.errors import DistutilsExecError, DistutilsPlatformError, \ + CompileError, LibError, LinkError +from distutils.ccompiler import CCompiler, gen_lib_options +from distutils import log +from distutils.util import get_platform + +from itertools import count + +def _find_vc2015(): + try: + key = winreg.OpenKeyEx( + winreg.HKEY_LOCAL_MACHINE, + r"Software\Microsoft\VisualStudio\SxS\VC7", + access=winreg.KEY_READ | winreg.KEY_WOW64_32KEY + ) + except OSError: + log.debug("Visual C++ is not registered") + return None, None + + best_version = 0 + best_dir = None + with key: + for i in count(): + try: + v, vc_dir, vt = winreg.EnumValue(key, i) + except OSError: + break + if v and vt == winreg.REG_SZ and os.path.isdir(vc_dir): + try: + version = int(float(v)) + except (ValueError, TypeError): + continue + if version >= 14 and version > best_version: + best_version, best_dir = version, vc_dir + return best_version, best_dir + +def _find_vc2017(): + """Returns "15, path" based on the result of invoking vswhere.exe + If no install is found, returns "None, None" + + The version is returned to avoid unnecessarily changing the function + result. It may be ignored when the path is not None. + + If vswhere.exe is not available, by definition, VS 2017 is not + installed. + """ + root = os.environ.get("ProgramFiles(x86)") or os.environ.get("ProgramFiles") + if not root: + return None, None + + try: + path = subprocess.check_output([ + os.path.join(root, "Microsoft Visual Studio", "Installer", "vswhere.exe"), + "-latest", + "-prerelease", + "-requires", "Microsoft.VisualStudio.Component.VC.Tools.x86.x64", + "-property", "installationPath", + "-products", "*", + ], encoding="mbcs", errors="strict").strip() + except (subprocess.CalledProcessError, OSError, UnicodeDecodeError): + return None, None + + path = os.path.join(path, "VC", "Auxiliary", "Build") + if os.path.isdir(path): + return 15, path + + return None, None + +PLAT_SPEC_TO_RUNTIME = { + 'x86' : 'x86', + 'x86_amd64' : 'x64', + 'x86_arm' : 'arm', + 'x86_arm64' : 'arm64' +} + +def _find_vcvarsall(plat_spec): + # bpo-38597: Removed vcruntime return value + _, best_dir = _find_vc2017() + + if not best_dir: + best_version, best_dir = _find_vc2015() + + if not best_dir: + log.debug("No suitable Visual C++ version found") + return None, None + + vcvarsall = os.path.join(best_dir, "vcvarsall.bat") + if not os.path.isfile(vcvarsall): + log.debug("%s cannot be found", vcvarsall) + return None, None + + return vcvarsall, None + +def _get_vc_env(plat_spec): + if os.getenv("DISTUTILS_USE_SDK"): + return { + key.lower(): value + for key, value in os.environ.items() + } + + vcvarsall, _ = _find_vcvarsall(plat_spec) + if not vcvarsall: + raise DistutilsPlatformError("Unable to find vcvarsall.bat") + + try: + out = subprocess.check_output( + 'cmd /u /c "{}" {} && set'.format(vcvarsall, plat_spec), + stderr=subprocess.STDOUT, + ).decode('utf-16le', errors='replace') + except subprocess.CalledProcessError as exc: + log.error(exc.output) + raise DistutilsPlatformError("Error executing {}" + .format(exc.cmd)) + + env = { + key.lower(): value + for key, _, value in + (line.partition('=') for line in out.splitlines()) + if key and value + } + + return env + +def _find_exe(exe, paths=None): + """Return path to an MSVC executable program. + + Tries to find the program in several places: first, one of the + MSVC program search paths from the registry; next, the directories + in the PATH environment variable. If any of those work, return an + absolute path that is known to exist. If none of them work, just + return the original program name, 'exe'. + """ + if not paths: + paths = os.getenv('path').split(os.pathsep) + for p in paths: + fn = os.path.join(os.path.abspath(p), exe) + if os.path.isfile(fn): + return fn + return exe + +# A map keyed by get_platform() return values to values accepted by +# 'vcvarsall.bat'. Always cross-compile from x86 to work with the +# lighter-weight MSVC installs that do not include native 64-bit tools. +PLAT_TO_VCVARS = { + 'win32' : 'x86', + 'win-amd64' : 'x86_amd64', + 'win-arm32' : 'x86_arm', + 'win-arm64' : 'x86_arm64' +} + +class MSVCCompiler(CCompiler) : + """Concrete class that implements an interface to Microsoft Visual C++, + as defined by the CCompiler abstract class.""" + + compiler_type = 'msvc' + + # Just set this so CCompiler's constructor doesn't barf. We currently + # don't use the 'set_executables()' bureaucracy provided by CCompiler, + # as it really isn't necessary for this sort of single-compiler class. + # Would be nice to have a consistent interface with UnixCCompiler, + # though, so it's worth thinking about. + executables = {} + + # Private class data (need to distinguish C from C++ source for compiler) + _c_extensions = ['.c'] + _cpp_extensions = ['.cc', '.cpp', '.cxx'] + _rc_extensions = ['.rc'] + _mc_extensions = ['.mc'] + + # Needed for the filename generation methods provided by the + # base class, CCompiler. + src_extensions = (_c_extensions + _cpp_extensions + + _rc_extensions + _mc_extensions) + res_extension = '.res' + obj_extension = '.obj' + static_lib_extension = '.lib' + shared_lib_extension = '.dll' + static_lib_format = shared_lib_format = '%s%s' + exe_extension = '.exe' + + + def __init__(self, verbose=0, dry_run=0, force=0): + CCompiler.__init__ (self, verbose, dry_run, force) + # target platform (.plat_name is consistent with 'bdist') + self.plat_name = None + self.initialized = False + + def initialize(self, plat_name=None): + # multi-init means we would need to check platform same each time... + assert not self.initialized, "don't init multiple times" + if plat_name is None: + plat_name = get_platform() + # sanity check for platforms to prevent obscure errors later. + if plat_name not in PLAT_TO_VCVARS: + raise DistutilsPlatformError("--plat-name must be one of {}" + .format(tuple(PLAT_TO_VCVARS))) + + # Get the vcvarsall.bat spec for the requested platform. + plat_spec = PLAT_TO_VCVARS[plat_name] + + vc_env = _get_vc_env(plat_spec) + if not vc_env: + raise DistutilsPlatformError("Unable to find a compatible " + "Visual Studio installation.") + + self._paths = vc_env.get('path', '') + paths = self._paths.split(os.pathsep) + self.cc = _find_exe("cl.exe", paths) + self.linker = _find_exe("link.exe", paths) + self.lib = _find_exe("lib.exe", paths) + self.rc = _find_exe("rc.exe", paths) # resource compiler + self.mc = _find_exe("mc.exe", paths) # message compiler + self.mt = _find_exe("mt.exe", paths) # message compiler + + for dir in vc_env.get('include', '').split(os.pathsep): + if dir: + self.add_include_dir(dir.rstrip(os.sep)) + + for dir in vc_env.get('lib', '').split(os.pathsep): + if dir: + self.add_library_dir(dir.rstrip(os.sep)) + + self.preprocess_options = None + # bpo-38597: Always compile with dynamic linking + # Future releases of Python 3.x will include all past + # versions of vcruntime*.dll for compatibility. + self.compile_options = [ + '/nologo', '/O2', '/W3', '/GL', '/DNDEBUG', '/MD' + ] + + self.compile_options_debug = [ + '/nologo', '/Od', '/MDd', '/Zi', '/W3', '/D_DEBUG' + ] + + ldflags = [ + '/nologo', '/INCREMENTAL:NO', '/LTCG' + ] + + ldflags_debug = [ + '/nologo', '/INCREMENTAL:NO', '/LTCG', '/DEBUG:FULL' + ] + + self.ldflags_exe = [*ldflags, '/MANIFEST:EMBED,ID=1'] + self.ldflags_exe_debug = [*ldflags_debug, '/MANIFEST:EMBED,ID=1'] + self.ldflags_shared = [*ldflags, '/DLL', '/MANIFEST:EMBED,ID=2', '/MANIFESTUAC:NO'] + self.ldflags_shared_debug = [*ldflags_debug, '/DLL', '/MANIFEST:EMBED,ID=2', '/MANIFESTUAC:NO'] + self.ldflags_static = [*ldflags] + self.ldflags_static_debug = [*ldflags_debug] + + self._ldflags = { + (CCompiler.EXECUTABLE, None): self.ldflags_exe, + (CCompiler.EXECUTABLE, False): self.ldflags_exe, + (CCompiler.EXECUTABLE, True): self.ldflags_exe_debug, + (CCompiler.SHARED_OBJECT, None): self.ldflags_shared, + (CCompiler.SHARED_OBJECT, False): self.ldflags_shared, + (CCompiler.SHARED_OBJECT, True): self.ldflags_shared_debug, + (CCompiler.SHARED_LIBRARY, None): self.ldflags_static, + (CCompiler.SHARED_LIBRARY, False): self.ldflags_static, + (CCompiler.SHARED_LIBRARY, True): self.ldflags_static_debug, + } + + self.initialized = True + + # -- Worker methods ------------------------------------------------ + + def object_filenames(self, + source_filenames, + strip_dir=0, + output_dir=''): + ext_map = { + **{ext: self.obj_extension for ext in self.src_extensions}, + **{ext: self.res_extension for ext in self._rc_extensions + self._mc_extensions}, + } + + output_dir = output_dir or '' + + def make_out_path(p): + base, ext = os.path.splitext(p) + if strip_dir: + base = os.path.basename(base) + else: + _, base = os.path.splitdrive(base) + if base.startswith((os.path.sep, os.path.altsep)): + base = base[1:] + try: + # XXX: This may produce absurdly long paths. We should check + # the length of the result and trim base until we fit within + # 260 characters. + return os.path.join(output_dir, base + ext_map[ext]) + except LookupError: + # Better to raise an exception instead of silently continuing + # and later complain about sources and targets having + # different lengths + raise CompileError("Don't know how to compile {}".format(p)) + + return list(map(make_out_path, source_filenames)) + + + def compile(self, sources, + output_dir=None, macros=None, include_dirs=None, debug=0, + extra_preargs=None, extra_postargs=None, depends=None): + + if not self.initialized: + self.initialize() + compile_info = self._setup_compile(output_dir, macros, include_dirs, + sources, depends, extra_postargs) + macros, objects, extra_postargs, pp_opts, build = compile_info + + compile_opts = extra_preargs or [] + compile_opts.append('/c') + if debug: + compile_opts.extend(self.compile_options_debug) + else: + compile_opts.extend(self.compile_options) + + + add_cpp_opts = False + + for obj in objects: + try: + src, ext = build[obj] + except KeyError: + continue + if debug: + # pass the full pathname to MSVC in debug mode, + # this allows the debugger to find the source file + # without asking the user to browse for it + src = os.path.abspath(src) + + if ext in self._c_extensions: + input_opt = "/Tc" + src + elif ext in self._cpp_extensions: + input_opt = "/Tp" + src + add_cpp_opts = True + elif ext in self._rc_extensions: + # compile .RC to .RES file + input_opt = src + output_opt = "/fo" + obj + try: + self.spawn([self.rc] + pp_opts + [output_opt, input_opt]) + except DistutilsExecError as msg: + raise CompileError(msg) + continue + elif ext in self._mc_extensions: + # Compile .MC to .RC file to .RES file. + # * '-h dir' specifies the directory for the + # generated include file + # * '-r dir' specifies the target directory of the + # generated RC file and the binary message resource + # it includes + # + # For now (since there are no options to change this), + # we use the source-directory for the include file and + # the build directory for the RC file and message + # resources. This works at least for win32all. + h_dir = os.path.dirname(src) + rc_dir = os.path.dirname(obj) + try: + # first compile .MC to .RC and .H file + self.spawn([self.mc, '-h', h_dir, '-r', rc_dir, src]) + base, _ = os.path.splitext(os.path.basename (src)) + rc_file = os.path.join(rc_dir, base + '.rc') + # then compile .RC to .RES file + self.spawn([self.rc, "/fo" + obj, rc_file]) + + except DistutilsExecError as msg: + raise CompileError(msg) + continue + else: + # how to handle this file? + raise CompileError("Don't know how to compile {} to {}" + .format(src, obj)) + + args = [self.cc] + compile_opts + pp_opts + if add_cpp_opts: + args.append('/EHsc') + args.append(input_opt) + args.append("/Fo" + obj) + args.extend(extra_postargs) + + try: + self.spawn(args) + except DistutilsExecError as msg: + raise CompileError(msg) + + return objects + + + def create_static_lib(self, + objects, + output_libname, + output_dir=None, + debug=0, + target_lang=None): + + if not self.initialized: + self.initialize() + objects, output_dir = self._fix_object_args(objects, output_dir) + output_filename = self.library_filename(output_libname, + output_dir=output_dir) + + if self._need_link(objects, output_filename): + lib_args = objects + ['/OUT:' + output_filename] + if debug: + pass # XXX what goes here? + try: + log.debug('Executing "%s" %s', self.lib, ' '.join(lib_args)) + self.spawn([self.lib] + lib_args) + except DistutilsExecError as msg: + raise LibError(msg) + else: + log.debug("skipping %s (up-to-date)", output_filename) + + + def link(self, + target_desc, + objects, + output_filename, + output_dir=None, + libraries=None, + library_dirs=None, + runtime_library_dirs=None, + export_symbols=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + build_temp=None, + target_lang=None): + + if not self.initialized: + self.initialize() + objects, output_dir = self._fix_object_args(objects, output_dir) + fixed_args = self._fix_lib_args(libraries, library_dirs, + runtime_library_dirs) + libraries, library_dirs, runtime_library_dirs = fixed_args + + if runtime_library_dirs: + self.warn("I don't know what to do with 'runtime_library_dirs': " + + str(runtime_library_dirs)) + + lib_opts = gen_lib_options(self, + library_dirs, runtime_library_dirs, + libraries) + if output_dir is not None: + output_filename = os.path.join(output_dir, output_filename) + + if self._need_link(objects, output_filename): + ldflags = self._ldflags[target_desc, debug] + + export_opts = ["/EXPORT:" + sym for sym in (export_symbols or [])] + + ld_args = (ldflags + lib_opts + export_opts + + objects + ['/OUT:' + output_filename]) + + # The MSVC linker generates .lib and .exp files, which cannot be + # suppressed by any linker switches. The .lib files may even be + # needed! Make sure they are generated in the temporary build + # directory. Since they have different names for debug and release + # builds, they can go into the same directory. + build_temp = os.path.dirname(objects[0]) + if export_symbols is not None: + (dll_name, dll_ext) = os.path.splitext( + os.path.basename(output_filename)) + implib_file = os.path.join( + build_temp, + self.library_filename(dll_name)) + ld_args.append ('/IMPLIB:' + implib_file) + + if extra_preargs: + ld_args[:0] = extra_preargs + if extra_postargs: + ld_args.extend(extra_postargs) + + output_dir = os.path.dirname(os.path.abspath(output_filename)) + self.mkpath(output_dir) + try: + log.debug('Executing "%s" %s', self.linker, ' '.join(ld_args)) + self.spawn([self.linker] + ld_args) + except DistutilsExecError as msg: + raise LinkError(msg) + else: + log.debug("skipping %s (up-to-date)", output_filename) + + def spawn(self, cmd): + env = dict(os.environ, PATH=self._paths) + with self._fallback_spawn(cmd, env) as fallback: + return super().spawn(cmd, env=env) + return fallback.value + + @contextlib.contextmanager + def _fallback_spawn(self, cmd, env): + """ + Discovered in pypa/distutils#15, some tools monkeypatch the compiler, + so the 'env' kwarg causes a TypeError. Detect this condition and + restore the legacy, unsafe behavior. + """ + bag = type('Bag', (), {})() + try: + yield bag + except TypeError as exc: + if "unexpected keyword argument 'env'" not in str(exc): + raise + else: + return + warnings.warn( + "Fallback spawn triggered. Please update distutils monkeypatch.") + with unittest.mock.patch('os.environ', env): + bag.value = super().spawn(cmd) + + # -- Miscellaneous methods ----------------------------------------- + # These are all used by the 'gen_lib_options() function, in + # ccompiler.py. + + def library_dir_option(self, dir): + return "/LIBPATH:" + dir + + def runtime_library_dir_option(self, dir): + raise DistutilsPlatformError( + "don't know how to set runtime library search path for MSVC") + + def library_option(self, lib): + return self.library_filename(lib) + + def find_library_file(self, dirs, lib, debug=0): + # Prefer a debugging library if found (and requested), but deal + # with it if we don't have one. + if debug: + try_names = [lib + "_d", lib] + else: + try_names = [lib] + for dir in dirs: + for name in try_names: + libfile = os.path.join(dir, self.library_filename(name)) + if os.path.isfile(libfile): + return libfile + else: + # Oops, didn't find it in *any* of 'dirs' + return None diff --git a/venv/Lib/site-packages/setuptools/_distutils/archive_util.py b/venv/Lib/site-packages/setuptools/_distutils/archive_util.py new file mode 100644 index 00000000..565a3117 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/archive_util.py @@ -0,0 +1,256 @@ +"""distutils.archive_util + +Utility functions for creating archive files (tarballs, zip files, +that sort of thing).""" + +import os +from warnings import warn +import sys + +try: + import zipfile +except ImportError: + zipfile = None + + +from distutils.errors import DistutilsExecError +from distutils.spawn import spawn +from distutils.dir_util import mkpath +from distutils import log + +try: + from pwd import getpwnam +except ImportError: + getpwnam = None + +try: + from grp import getgrnam +except ImportError: + getgrnam = None + +def _get_gid(name): + """Returns a gid, given a group name.""" + if getgrnam is None or name is None: + return None + try: + result = getgrnam(name) + except KeyError: + result = None + if result is not None: + return result[2] + return None + +def _get_uid(name): + """Returns an uid, given a user name.""" + if getpwnam is None or name is None: + return None + try: + result = getpwnam(name) + except KeyError: + result = None + if result is not None: + return result[2] + return None + +def make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0, + owner=None, group=None): + """Create a (possibly compressed) tar file from all the files under + 'base_dir'. + + 'compress' must be "gzip" (the default), "bzip2", "xz", "compress", or + None. ("compress" will be deprecated in Python 3.2) + + 'owner' and 'group' can be used to define an owner and a group for the + archive that is being built. If not provided, the current owner and group + will be used. + + The output tar file will be named 'base_dir' + ".tar", possibly plus + the appropriate compression extension (".gz", ".bz2", ".xz" or ".Z"). + + Returns the output filename. + """ + tar_compression = {'gzip': 'gz', 'bzip2': 'bz2', 'xz': 'xz', None: '', + 'compress': ''} + compress_ext = {'gzip': '.gz', 'bzip2': '.bz2', 'xz': '.xz', + 'compress': '.Z'} + + # flags for compression program, each element of list will be an argument + if compress is not None and compress not in compress_ext.keys(): + raise ValueError( + "bad value for 'compress': must be None, 'gzip', 'bzip2', " + "'xz' or 'compress'") + + archive_name = base_name + '.tar' + if compress != 'compress': + archive_name += compress_ext.get(compress, '') + + mkpath(os.path.dirname(archive_name), dry_run=dry_run) + + # creating the tarball + import tarfile # late import so Python build itself doesn't break + + log.info('Creating tar archive') + + uid = _get_uid(owner) + gid = _get_gid(group) + + def _set_uid_gid(tarinfo): + if gid is not None: + tarinfo.gid = gid + tarinfo.gname = group + if uid is not None: + tarinfo.uid = uid + tarinfo.uname = owner + return tarinfo + + if not dry_run: + tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress]) + try: + tar.add(base_dir, filter=_set_uid_gid) + finally: + tar.close() + + # compression using `compress` + if compress == 'compress': + warn("'compress' will be deprecated.", PendingDeprecationWarning) + # the option varies depending on the platform + compressed_name = archive_name + compress_ext[compress] + if sys.platform == 'win32': + cmd = [compress, archive_name, compressed_name] + else: + cmd = [compress, '-f', archive_name] + spawn(cmd, dry_run=dry_run) + return compressed_name + + return archive_name + +def make_zipfile(base_name, base_dir, verbose=0, dry_run=0): + """Create a zip file from all the files under 'base_dir'. + + The output zip file will be named 'base_name' + ".zip". Uses either the + "zipfile" Python module (if available) or the InfoZIP "zip" utility + (if installed and found on the default search path). If neither tool is + available, raises DistutilsExecError. Returns the name of the output zip + file. + """ + zip_filename = base_name + ".zip" + mkpath(os.path.dirname(zip_filename), dry_run=dry_run) + + # If zipfile module is not available, try spawning an external + # 'zip' command. + if zipfile is None: + if verbose: + zipoptions = "-r" + else: + zipoptions = "-rq" + + try: + spawn(["zip", zipoptions, zip_filename, base_dir], + dry_run=dry_run) + except DistutilsExecError: + # XXX really should distinguish between "couldn't find + # external 'zip' command" and "zip failed". + raise DistutilsExecError(("unable to create zip file '%s': " + "could neither import the 'zipfile' module nor " + "find a standalone zip utility") % zip_filename) + + else: + log.info("creating '%s' and adding '%s' to it", + zip_filename, base_dir) + + if not dry_run: + try: + zip = zipfile.ZipFile(zip_filename, "w", + compression=zipfile.ZIP_DEFLATED) + except RuntimeError: + zip = zipfile.ZipFile(zip_filename, "w", + compression=zipfile.ZIP_STORED) + + with zip: + if base_dir != os.curdir: + path = os.path.normpath(os.path.join(base_dir, '')) + zip.write(path, path) + log.info("adding '%s'", path) + for dirpath, dirnames, filenames in os.walk(base_dir): + for name in dirnames: + path = os.path.normpath(os.path.join(dirpath, name, '')) + zip.write(path, path) + log.info("adding '%s'", path) + for name in filenames: + path = os.path.normpath(os.path.join(dirpath, name)) + if os.path.isfile(path): + zip.write(path, path) + log.info("adding '%s'", path) + + return zip_filename + +ARCHIVE_FORMATS = { + 'gztar': (make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"), + 'bztar': (make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"), + 'xztar': (make_tarball, [('compress', 'xz')], "xz'ed tar-file"), + 'ztar': (make_tarball, [('compress', 'compress')], "compressed tar file"), + 'tar': (make_tarball, [('compress', None)], "uncompressed tar file"), + 'zip': (make_zipfile, [],"ZIP file") + } + +def check_archive_formats(formats): + """Returns the first format from the 'format' list that is unknown. + + If all formats are known, returns None + """ + for format in formats: + if format not in ARCHIVE_FORMATS: + return format + return None + +def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0, + dry_run=0, owner=None, group=None): + """Create an archive file (eg. zip or tar). + + 'base_name' is the name of the file to create, minus any format-specific + extension; 'format' is the archive format: one of "zip", "tar", "gztar", + "bztar", "xztar", or "ztar". + + 'root_dir' is a directory that will be the root directory of the + archive; ie. we typically chdir into 'root_dir' before creating the + archive. 'base_dir' is the directory where we start archiving from; + ie. 'base_dir' will be the common prefix of all files and + directories in the archive. 'root_dir' and 'base_dir' both default + to the current directory. Returns the name of the archive file. + + 'owner' and 'group' are used when creating a tar archive. By default, + uses the current owner and group. + """ + save_cwd = os.getcwd() + if root_dir is not None: + log.debug("changing into '%s'", root_dir) + base_name = os.path.abspath(base_name) + if not dry_run: + os.chdir(root_dir) + + if base_dir is None: + base_dir = os.curdir + + kwargs = {'dry_run': dry_run} + + try: + format_info = ARCHIVE_FORMATS[format] + except KeyError: + raise ValueError("unknown archive format '%s'" % format) + + func = format_info[0] + for arg, val in format_info[1]: + kwargs[arg] = val + + if format != 'zip': + kwargs['owner'] = owner + kwargs['group'] = group + + try: + filename = func(base_name, base_dir, **kwargs) + finally: + if root_dir is not None: + log.debug("changing back to '%s'", save_cwd) + os.chdir(save_cwd) + + return filename diff --git a/venv/Lib/site-packages/setuptools/_distutils/bcppcompiler.py b/venv/Lib/site-packages/setuptools/_distutils/bcppcompiler.py new file mode 100644 index 00000000..071fea5d --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/bcppcompiler.py @@ -0,0 +1,393 @@ +"""distutils.bcppcompiler + +Contains BorlandCCompiler, an implementation of the abstract CCompiler class +for the Borland C++ compiler. +""" + +# This implementation by Lyle Johnson, based on the original msvccompiler.py +# module and using the directions originally published by Gordon Williams. + +# XXX looks like there's a LOT of overlap between these two classes: +# someone should sit down and factor out the common code as +# WindowsCCompiler! --GPW + + +import os +from distutils.errors import \ + DistutilsExecError, \ + CompileError, LibError, LinkError, UnknownFileError +from distutils.ccompiler import \ + CCompiler, gen_preprocess_options +from distutils.file_util import write_file +from distutils.dep_util import newer +from distutils import log + +class BCPPCompiler(CCompiler) : + """Concrete class that implements an interface to the Borland C/C++ + compiler, as defined by the CCompiler abstract class. + """ + + compiler_type = 'bcpp' + + # Just set this so CCompiler's constructor doesn't barf. We currently + # don't use the 'set_executables()' bureaucracy provided by CCompiler, + # as it really isn't necessary for this sort of single-compiler class. + # Would be nice to have a consistent interface with UnixCCompiler, + # though, so it's worth thinking about. + executables = {} + + # Private class data (need to distinguish C from C++ source for compiler) + _c_extensions = ['.c'] + _cpp_extensions = ['.cc', '.cpp', '.cxx'] + + # Needed for the filename generation methods provided by the + # base class, CCompiler. + src_extensions = _c_extensions + _cpp_extensions + obj_extension = '.obj' + static_lib_extension = '.lib' + shared_lib_extension = '.dll' + static_lib_format = shared_lib_format = '%s%s' + exe_extension = '.exe' + + + def __init__ (self, + verbose=0, + dry_run=0, + force=0): + + CCompiler.__init__ (self, verbose, dry_run, force) + + # These executables are assumed to all be in the path. + # Borland doesn't seem to use any special registry settings to + # indicate their installation locations. + + self.cc = "bcc32.exe" + self.linker = "ilink32.exe" + self.lib = "tlib.exe" + + self.preprocess_options = None + self.compile_options = ['/tWM', '/O2', '/q', '/g0'] + self.compile_options_debug = ['/tWM', '/Od', '/q', '/g0'] + + self.ldflags_shared = ['/Tpd', '/Gn', '/q', '/x'] + self.ldflags_shared_debug = ['/Tpd', '/Gn', '/q', '/x'] + self.ldflags_static = [] + self.ldflags_exe = ['/Gn', '/q', '/x'] + self.ldflags_exe_debug = ['/Gn', '/q', '/x','/r'] + + + # -- Worker methods ------------------------------------------------ + + def compile(self, sources, + output_dir=None, macros=None, include_dirs=None, debug=0, + extra_preargs=None, extra_postargs=None, depends=None): + + macros, objects, extra_postargs, pp_opts, build = \ + self._setup_compile(output_dir, macros, include_dirs, sources, + depends, extra_postargs) + compile_opts = extra_preargs or [] + compile_opts.append ('-c') + if debug: + compile_opts.extend (self.compile_options_debug) + else: + compile_opts.extend (self.compile_options) + + for obj in objects: + try: + src, ext = build[obj] + except KeyError: + continue + # XXX why do the normpath here? + src = os.path.normpath(src) + obj = os.path.normpath(obj) + # XXX _setup_compile() did a mkpath() too but before the normpath. + # Is it possible to skip the normpath? + self.mkpath(os.path.dirname(obj)) + + if ext == '.res': + # This is already a binary file -- skip it. + continue # the 'for' loop + if ext == '.rc': + # This needs to be compiled to a .res file -- do it now. + try: + self.spawn (["brcc32", "-fo", obj, src]) + except DistutilsExecError as msg: + raise CompileError(msg) + continue # the 'for' loop + + # The next two are both for the real compiler. + if ext in self._c_extensions: + input_opt = "" + elif ext in self._cpp_extensions: + input_opt = "-P" + else: + # Unknown file type -- no extra options. The compiler + # will probably fail, but let it just in case this is a + # file the compiler recognizes even if we don't. + input_opt = "" + + output_opt = "-o" + obj + + # Compiler command line syntax is: "bcc32 [options] file(s)". + # Note that the source file names must appear at the end of + # the command line. + try: + self.spawn ([self.cc] + compile_opts + pp_opts + + [input_opt, output_opt] + + extra_postargs + [src]) + except DistutilsExecError as msg: + raise CompileError(msg) + + return objects + + # compile () + + + def create_static_lib (self, + objects, + output_libname, + output_dir=None, + debug=0, + target_lang=None): + + (objects, output_dir) = self._fix_object_args (objects, output_dir) + output_filename = \ + self.library_filename (output_libname, output_dir=output_dir) + + if self._need_link (objects, output_filename): + lib_args = [output_filename, '/u'] + objects + if debug: + pass # XXX what goes here? + try: + self.spawn ([self.lib] + lib_args) + except DistutilsExecError as msg: + raise LibError(msg) + else: + log.debug("skipping %s (up-to-date)", output_filename) + + # create_static_lib () + + + def link (self, + target_desc, + objects, + output_filename, + output_dir=None, + libraries=None, + library_dirs=None, + runtime_library_dirs=None, + export_symbols=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + build_temp=None, + target_lang=None): + + # XXX this ignores 'build_temp'! should follow the lead of + # msvccompiler.py + + (objects, output_dir) = self._fix_object_args (objects, output_dir) + (libraries, library_dirs, runtime_library_dirs) = \ + self._fix_lib_args (libraries, library_dirs, runtime_library_dirs) + + if runtime_library_dirs: + log.warn("I don't know what to do with 'runtime_library_dirs': %s", + str(runtime_library_dirs)) + + if output_dir is not None: + output_filename = os.path.join (output_dir, output_filename) + + if self._need_link (objects, output_filename): + + # Figure out linker args based on type of target. + if target_desc == CCompiler.EXECUTABLE: + startup_obj = 'c0w32' + if debug: + ld_args = self.ldflags_exe_debug[:] + else: + ld_args = self.ldflags_exe[:] + else: + startup_obj = 'c0d32' + if debug: + ld_args = self.ldflags_shared_debug[:] + else: + ld_args = self.ldflags_shared[:] + + + # Create a temporary exports file for use by the linker + if export_symbols is None: + def_file = '' + else: + head, tail = os.path.split (output_filename) + modname, ext = os.path.splitext (tail) + temp_dir = os.path.dirname(objects[0]) # preserve tree structure + def_file = os.path.join (temp_dir, '%s.def' % modname) + contents = ['EXPORTS'] + for sym in (export_symbols or []): + contents.append(' %s=_%s' % (sym, sym)) + self.execute(write_file, (def_file, contents), + "writing %s" % def_file) + + # Borland C++ has problems with '/' in paths + objects2 = map(os.path.normpath, objects) + # split objects in .obj and .res files + # Borland C++ needs them at different positions in the command line + objects = [startup_obj] + resources = [] + for file in objects2: + (base, ext) = os.path.splitext(os.path.normcase(file)) + if ext == '.res': + resources.append(file) + else: + objects.append(file) + + + for l in library_dirs: + ld_args.append("/L%s" % os.path.normpath(l)) + ld_args.append("/L.") # we sometimes use relative paths + + # list of object files + ld_args.extend(objects) + + # XXX the command-line syntax for Borland C++ is a bit wonky; + # certain filenames are jammed together in one big string, but + # comma-delimited. This doesn't mesh too well with the + # Unix-centric attitude (with a DOS/Windows quoting hack) of + # 'spawn()', so constructing the argument list is a bit + # awkward. Note that doing the obvious thing and jamming all + # the filenames and commas into one argument would be wrong, + # because 'spawn()' would quote any filenames with spaces in + # them. Arghghh!. Apparently it works fine as coded... + + # name of dll/exe file + ld_args.extend([',',output_filename]) + # no map file and start libraries + ld_args.append(',,') + + for lib in libraries: + # see if we find it and if there is a bcpp specific lib + # (xxx_bcpp.lib) + libfile = self.find_library_file(library_dirs, lib, debug) + if libfile is None: + ld_args.append(lib) + # probably a BCPP internal library -- don't warn + else: + # full name which prefers bcpp_xxx.lib over xxx.lib + ld_args.append(libfile) + + # some default libraries + ld_args.append ('import32') + ld_args.append ('cw32mt') + + # def file for export symbols + ld_args.extend([',',def_file]) + # add resource files + ld_args.append(',') + ld_args.extend(resources) + + + if extra_preargs: + ld_args[:0] = extra_preargs + if extra_postargs: + ld_args.extend(extra_postargs) + + self.mkpath (os.path.dirname (output_filename)) + try: + self.spawn ([self.linker] + ld_args) + except DistutilsExecError as msg: + raise LinkError(msg) + + else: + log.debug("skipping %s (up-to-date)", output_filename) + + # link () + + # -- Miscellaneous methods ----------------------------------------- + + + def find_library_file (self, dirs, lib, debug=0): + # List of effective library names to try, in order of preference: + # xxx_bcpp.lib is better than xxx.lib + # and xxx_d.lib is better than xxx.lib if debug is set + # + # The "_bcpp" suffix is to handle a Python installation for people + # with multiple compilers (primarily Distutils hackers, I suspect + # ;-). The idea is they'd have one static library for each + # compiler they care about, since (almost?) every Windows compiler + # seems to have a different format for static libraries. + if debug: + dlib = (lib + "_d") + try_names = (dlib + "_bcpp", lib + "_bcpp", dlib, lib) + else: + try_names = (lib + "_bcpp", lib) + + for dir in dirs: + for name in try_names: + libfile = os.path.join(dir, self.library_filename(name)) + if os.path.exists(libfile): + return libfile + else: + # Oops, didn't find it in *any* of 'dirs' + return None + + # overwrite the one from CCompiler to support rc and res-files + def object_filenames (self, + source_filenames, + strip_dir=0, + output_dir=''): + if output_dir is None: output_dir = '' + obj_names = [] + for src_name in source_filenames: + # use normcase to make sure '.rc' is really '.rc' and not '.RC' + (base, ext) = os.path.splitext (os.path.normcase(src_name)) + if ext not in (self.src_extensions + ['.rc','.res']): + raise UnknownFileError("unknown file type '%s' (from '%s')" % \ + (ext, src_name)) + if strip_dir: + base = os.path.basename (base) + if ext == '.res': + # these can go unchanged + obj_names.append (os.path.join (output_dir, base + ext)) + elif ext == '.rc': + # these need to be compiled to .res-files + obj_names.append (os.path.join (output_dir, base + '.res')) + else: + obj_names.append (os.path.join (output_dir, + base + self.obj_extension)) + return obj_names + + # object_filenames () + + def preprocess (self, + source, + output_file=None, + macros=None, + include_dirs=None, + extra_preargs=None, + extra_postargs=None): + + (_, macros, include_dirs) = \ + self._fix_compile_args(None, macros, include_dirs) + pp_opts = gen_preprocess_options(macros, include_dirs) + pp_args = ['cpp32.exe'] + pp_opts + if output_file is not None: + pp_args.append('-o' + output_file) + if extra_preargs: + pp_args[:0] = extra_preargs + if extra_postargs: + pp_args.extend(extra_postargs) + pp_args.append(source) + + # We need to preprocess: either we're being forced to, or the + # source file is newer than the target (or the target doesn't + # exist). + if self.force or output_file is None or newer(source, output_file): + if output_file: + self.mkpath(os.path.dirname(output_file)) + try: + self.spawn(pp_args) + except DistutilsExecError as msg: + print(msg) + raise CompileError(msg) + + # preprocess() diff --git a/venv/Lib/site-packages/setuptools/_distutils/ccompiler.py b/venv/Lib/site-packages/setuptools/_distutils/ccompiler.py new file mode 100644 index 00000000..48d160d2 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/ccompiler.py @@ -0,0 +1,1123 @@ +"""distutils.ccompiler + +Contains CCompiler, an abstract base class that defines the interface +for the Distutils compiler abstraction model.""" + +import sys, os, re +from distutils.errors import * +from distutils.spawn import spawn +from distutils.file_util import move_file +from distutils.dir_util import mkpath +from distutils.dep_util import newer_group +from distutils.util import split_quoted, execute +from distutils import log + +class CCompiler: + """Abstract base class to define the interface that must be implemented + by real compiler classes. Also has some utility methods used by + several compiler classes. + + The basic idea behind a compiler abstraction class is that each + instance can be used for all the compile/link steps in building a + single project. Thus, attributes common to all of those compile and + link steps -- include directories, macros to define, libraries to link + against, etc. -- are attributes of the compiler instance. To allow for + variability in how individual files are treated, most of those + attributes may be varied on a per-compilation or per-link basis. + """ + + # 'compiler_type' is a class attribute that identifies this class. It + # keeps code that wants to know what kind of compiler it's dealing with + # from having to import all possible compiler classes just to do an + # 'isinstance'. In concrete CCompiler subclasses, 'compiler_type' + # should really, really be one of the keys of the 'compiler_class' + # dictionary (see below -- used by the 'new_compiler()' factory + # function) -- authors of new compiler interface classes are + # responsible for updating 'compiler_class'! + compiler_type = None + + # XXX things not handled by this compiler abstraction model: + # * client can't provide additional options for a compiler, + # e.g. warning, optimization, debugging flags. Perhaps this + # should be the domain of concrete compiler abstraction classes + # (UnixCCompiler, MSVCCompiler, etc.) -- or perhaps the base + # class should have methods for the common ones. + # * can't completely override the include or library searchg + # path, ie. no "cc -I -Idir1 -Idir2" or "cc -L -Ldir1 -Ldir2". + # I'm not sure how widely supported this is even by Unix + # compilers, much less on other platforms. And I'm even less + # sure how useful it is; maybe for cross-compiling, but + # support for that is a ways off. (And anyways, cross + # compilers probably have a dedicated binary with the + # right paths compiled in. I hope.) + # * can't do really freaky things with the library list/library + # dirs, e.g. "-Ldir1 -lfoo -Ldir2 -lfoo" to link against + # different versions of libfoo.a in different locations. I + # think this is useless without the ability to null out the + # library search path anyways. + + + # Subclasses that rely on the standard filename generation methods + # implemented below should override these; see the comment near + # those methods ('object_filenames()' et. al.) for details: + src_extensions = None # list of strings + obj_extension = None # string + static_lib_extension = None + shared_lib_extension = None # string + static_lib_format = None # format string + shared_lib_format = None # prob. same as static_lib_format + exe_extension = None # string + + # Default language settings. language_map is used to detect a source + # file or Extension target language, checking source filenames. + # language_order is used to detect the language precedence, when deciding + # what language to use when mixing source types. For example, if some + # extension has two files with ".c" extension, and one with ".cpp", it + # is still linked as c++. + language_map = {".c" : "c", + ".cc" : "c++", + ".cpp" : "c++", + ".cxx" : "c++", + ".m" : "objc", + } + language_order = ["c++", "objc", "c"] + + def __init__(self, verbose=0, dry_run=0, force=0): + self.dry_run = dry_run + self.force = force + self.verbose = verbose + + # 'output_dir': a common output directory for object, library, + # shared object, and shared library files + self.output_dir = None + + # 'macros': a list of macro definitions (or undefinitions). A + # macro definition is a 2-tuple (name, value), where the value is + # either a string or None (no explicit value). A macro + # undefinition is a 1-tuple (name,). + self.macros = [] + + # 'include_dirs': a list of directories to search for include files + self.include_dirs = [] + + # 'libraries': a list of libraries to include in any link + # (library names, not filenames: eg. "foo" not "libfoo.a") + self.libraries = [] + + # 'library_dirs': a list of directories to search for libraries + self.library_dirs = [] + + # 'runtime_library_dirs': a list of directories to search for + # shared libraries/objects at runtime + self.runtime_library_dirs = [] + + # 'objects': a list of object files (or similar, such as explicitly + # named library files) to include on any link + self.objects = [] + + for key in self.executables.keys(): + self.set_executable(key, self.executables[key]) + + def set_executables(self, **kwargs): + """Define the executables (and options for them) that will be run + to perform the various stages of compilation. The exact set of + executables that may be specified here depends on the compiler + class (via the 'executables' class attribute), but most will have: + compiler the C/C++ compiler + linker_so linker used to create shared objects and libraries + linker_exe linker used to create binary executables + archiver static library creator + + On platforms with a command-line (Unix, DOS/Windows), each of these + is a string that will be split into executable name and (optional) + list of arguments. (Splitting the string is done similarly to how + Unix shells operate: words are delimited by spaces, but quotes and + backslashes can override this. See + 'distutils.util.split_quoted()'.) + """ + + # Note that some CCompiler implementation classes will define class + # attributes 'cpp', 'cc', etc. with hard-coded executable names; + # this is appropriate when a compiler class is for exactly one + # compiler/OS combination (eg. MSVCCompiler). Other compiler + # classes (UnixCCompiler, in particular) are driven by information + # discovered at run-time, since there are many different ways to do + # basically the same things with Unix C compilers. + + for key in kwargs: + if key not in self.executables: + raise ValueError("unknown executable '%s' for class %s" % + (key, self.__class__.__name__)) + self.set_executable(key, kwargs[key]) + + def set_executable(self, key, value): + if isinstance(value, str): + setattr(self, key, split_quoted(value)) + else: + setattr(self, key, value) + + def _find_macro(self, name): + i = 0 + for defn in self.macros: + if defn[0] == name: + return i + i += 1 + return None + + def _check_macro_definitions(self, definitions): + """Ensures that every element of 'definitions' is a valid macro + definition, ie. either (name,value) 2-tuple or a (name,) tuple. Do + nothing if all definitions are OK, raise TypeError otherwise. + """ + for defn in definitions: + if not (isinstance(defn, tuple) and + (len(defn) in (1, 2) and + (isinstance (defn[1], str) or defn[1] is None)) and + isinstance (defn[0], str)): + raise TypeError(("invalid macro definition '%s': " % defn) + \ + "must be tuple (string,), (string, string), or " + \ + "(string, None)") + + + # -- Bookkeeping methods ------------------------------------------- + + def define_macro(self, name, value=None): + """Define a preprocessor macro for all compilations driven by this + compiler object. The optional parameter 'value' should be a + string; if it is not supplied, then the macro will be defined + without an explicit value and the exact outcome depends on the + compiler used (XXX true? does ANSI say anything about this?) + """ + # Delete from the list of macro definitions/undefinitions if + # already there (so that this one will take precedence). + i = self._find_macro (name) + if i is not None: + del self.macros[i] + + self.macros.append((name, value)) + + def undefine_macro(self, name): + """Undefine a preprocessor macro for all compilations driven by + this compiler object. If the same macro is defined by + 'define_macro()' and undefined by 'undefine_macro()' the last call + takes precedence (including multiple redefinitions or + undefinitions). If the macro is redefined/undefined on a + per-compilation basis (ie. in the call to 'compile()'), then that + takes precedence. + """ + # Delete from the list of macro definitions/undefinitions if + # already there (so that this one will take precedence). + i = self._find_macro (name) + if i is not None: + del self.macros[i] + + undefn = (name,) + self.macros.append(undefn) + + def add_include_dir(self, dir): + """Add 'dir' to the list of directories that will be searched for + header files. The compiler is instructed to search directories in + the order in which they are supplied by successive calls to + 'add_include_dir()'. + """ + self.include_dirs.append(dir) + + def set_include_dirs(self, dirs): + """Set the list of directories that will be searched to 'dirs' (a + list of strings). Overrides any preceding calls to + 'add_include_dir()'; subsequence calls to 'add_include_dir()' add + to the list passed to 'set_include_dirs()'. This does not affect + any list of standard include directories that the compiler may + search by default. + """ + self.include_dirs = dirs[:] + + def add_library(self, libname): + """Add 'libname' to the list of libraries that will be included in + all links driven by this compiler object. Note that 'libname' + should *not* be the name of a file containing a library, but the + name of the library itself: the actual filename will be inferred by + the linker, the compiler, or the compiler class (depending on the + platform). + + The linker will be instructed to link against libraries in the + order they were supplied to 'add_library()' and/or + 'set_libraries()'. It is perfectly valid to duplicate library + names; the linker will be instructed to link against libraries as + many times as they are mentioned. + """ + self.libraries.append(libname) + + def set_libraries(self, libnames): + """Set the list of libraries to be included in all links driven by + this compiler object to 'libnames' (a list of strings). This does + not affect any standard system libraries that the linker may + include by default. + """ + self.libraries = libnames[:] + + def add_library_dir(self, dir): + """Add 'dir' to the list of directories that will be searched for + libraries specified to 'add_library()' and 'set_libraries()'. The + linker will be instructed to search for libraries in the order they + are supplied to 'add_library_dir()' and/or 'set_library_dirs()'. + """ + self.library_dirs.append(dir) + + def set_library_dirs(self, dirs): + """Set the list of library search directories to 'dirs' (a list of + strings). This does not affect any standard library search path + that the linker may search by default. + """ + self.library_dirs = dirs[:] + + def add_runtime_library_dir(self, dir): + """Add 'dir' to the list of directories that will be searched for + shared libraries at runtime. + """ + self.runtime_library_dirs.append(dir) + + def set_runtime_library_dirs(self, dirs): + """Set the list of directories to search for shared libraries at + runtime to 'dirs' (a list of strings). This does not affect any + standard search path that the runtime linker may search by + default. + """ + self.runtime_library_dirs = dirs[:] + + def add_link_object(self, object): + """Add 'object' to the list of object files (or analogues, such as + explicitly named library files or the output of "resource + compilers") to be included in every link driven by this compiler + object. + """ + self.objects.append(object) + + def set_link_objects(self, objects): + """Set the list of object files (or analogues) to be included in + every link to 'objects'. This does not affect any standard object + files that the linker may include by default (such as system + libraries). + """ + self.objects = objects[:] + + + # -- Private utility methods -------------------------------------- + # (here for the convenience of subclasses) + + # Helper method to prep compiler in subclass compile() methods + + def _setup_compile(self, outdir, macros, incdirs, sources, depends, + extra): + """Process arguments and decide which source files to compile.""" + if outdir is None: + outdir = self.output_dir + elif not isinstance(outdir, str): + raise TypeError("'output_dir' must be a string or None") + + if macros is None: + macros = self.macros + elif isinstance(macros, list): + macros = macros + (self.macros or []) + else: + raise TypeError("'macros' (if supplied) must be a list of tuples") + + if incdirs is None: + incdirs = self.include_dirs + elif isinstance(incdirs, (list, tuple)): + incdirs = list(incdirs) + (self.include_dirs or []) + else: + raise TypeError( + "'include_dirs' (if supplied) must be a list of strings") + + if extra is None: + extra = [] + + # Get the list of expected output (object) files + objects = self.object_filenames(sources, strip_dir=0, + output_dir=outdir) + assert len(objects) == len(sources) + + pp_opts = gen_preprocess_options(macros, incdirs) + + build = {} + for i in range(len(sources)): + src = sources[i] + obj = objects[i] + ext = os.path.splitext(src)[1] + self.mkpath(os.path.dirname(obj)) + build[obj] = (src, ext) + + return macros, objects, extra, pp_opts, build + + def _get_cc_args(self, pp_opts, debug, before): + # works for unixccompiler, cygwinccompiler + cc_args = pp_opts + ['-c'] + if debug: + cc_args[:0] = ['-g'] + if before: + cc_args[:0] = before + return cc_args + + def _fix_compile_args(self, output_dir, macros, include_dirs): + """Typecheck and fix-up some of the arguments to the 'compile()' + method, and return fixed-up values. Specifically: if 'output_dir' + is None, replaces it with 'self.output_dir'; ensures that 'macros' + is a list, and augments it with 'self.macros'; ensures that + 'include_dirs' is a list, and augments it with 'self.include_dirs'. + Guarantees that the returned values are of the correct type, + i.e. for 'output_dir' either string or None, and for 'macros' and + 'include_dirs' either list or None. + """ + if output_dir is None: + output_dir = self.output_dir + elif not isinstance(output_dir, str): + raise TypeError("'output_dir' must be a string or None") + + if macros is None: + macros = self.macros + elif isinstance(macros, list): + macros = macros + (self.macros or []) + else: + raise TypeError("'macros' (if supplied) must be a list of tuples") + + if include_dirs is None: + include_dirs = self.include_dirs + elif isinstance(include_dirs, (list, tuple)): + include_dirs = list(include_dirs) + (self.include_dirs or []) + else: + raise TypeError( + "'include_dirs' (if supplied) must be a list of strings") + + return output_dir, macros, include_dirs + + def _prep_compile(self, sources, output_dir, depends=None): + """Decide which source files must be recompiled. + + Determine the list of object files corresponding to 'sources', + and figure out which ones really need to be recompiled. + Return a list of all object files and a dictionary telling + which source files can be skipped. + """ + # Get the list of expected output (object) files + objects = self.object_filenames(sources, output_dir=output_dir) + assert len(objects) == len(sources) + + # Return an empty dict for the "which source files can be skipped" + # return value to preserve API compatibility. + return objects, {} + + def _fix_object_args(self, objects, output_dir): + """Typecheck and fix up some arguments supplied to various methods. + Specifically: ensure that 'objects' is a list; if output_dir is + None, replace with self.output_dir. Return fixed versions of + 'objects' and 'output_dir'. + """ + if not isinstance(objects, (list, tuple)): + raise TypeError("'objects' must be a list or tuple of strings") + objects = list(objects) + + if output_dir is None: + output_dir = self.output_dir + elif not isinstance(output_dir, str): + raise TypeError("'output_dir' must be a string or None") + + return (objects, output_dir) + + def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs): + """Typecheck and fix up some of the arguments supplied to the + 'link_*' methods. Specifically: ensure that all arguments are + lists, and augment them with their permanent versions + (eg. 'self.libraries' augments 'libraries'). Return a tuple with + fixed versions of all arguments. + """ + if libraries is None: + libraries = self.libraries + elif isinstance(libraries, (list, tuple)): + libraries = list (libraries) + (self.libraries or []) + else: + raise TypeError( + "'libraries' (if supplied) must be a list of strings") + + if library_dirs is None: + library_dirs = self.library_dirs + elif isinstance(library_dirs, (list, tuple)): + library_dirs = list (library_dirs) + (self.library_dirs or []) + else: + raise TypeError( + "'library_dirs' (if supplied) must be a list of strings") + + if runtime_library_dirs is None: + runtime_library_dirs = self.runtime_library_dirs + elif isinstance(runtime_library_dirs, (list, tuple)): + runtime_library_dirs = (list(runtime_library_dirs) + + (self.runtime_library_dirs or [])) + else: + raise TypeError("'runtime_library_dirs' (if supplied) " + "must be a list of strings") + + return (libraries, library_dirs, runtime_library_dirs) + + def _need_link(self, objects, output_file): + """Return true if we need to relink the files listed in 'objects' + to recreate 'output_file'. + """ + if self.force: + return True + else: + if self.dry_run: + newer = newer_group (objects, output_file, missing='newer') + else: + newer = newer_group (objects, output_file) + return newer + + def detect_language(self, sources): + """Detect the language of a given file, or list of files. Uses + language_map, and language_order to do the job. + """ + if not isinstance(sources, list): + sources = [sources] + lang = None + index = len(self.language_order) + for source in sources: + base, ext = os.path.splitext(source) + extlang = self.language_map.get(ext) + try: + extindex = self.language_order.index(extlang) + if extindex < index: + lang = extlang + index = extindex + except ValueError: + pass + return lang + + + # -- Worker methods ------------------------------------------------ + # (must be implemented by subclasses) + + def preprocess(self, source, output_file=None, macros=None, + include_dirs=None, extra_preargs=None, extra_postargs=None): + """Preprocess a single C/C++ source file, named in 'source'. + Output will be written to file named 'output_file', or stdout if + 'output_file' not supplied. 'macros' is a list of macro + definitions as for 'compile()', which will augment the macros set + with 'define_macro()' and 'undefine_macro()'. 'include_dirs' is a + list of directory names that will be added to the default list. + + Raises PreprocessError on failure. + """ + pass + + def compile(self, sources, output_dir=None, macros=None, + include_dirs=None, debug=0, extra_preargs=None, + extra_postargs=None, depends=None): + """Compile one or more source files. + + 'sources' must be a list of filenames, most likely C/C++ + files, but in reality anything that can be handled by a + particular compiler and compiler class (eg. MSVCCompiler can + handle resource files in 'sources'). Return a list of object + filenames, one per source filename in 'sources'. Depending on + the implementation, not all source files will necessarily be + compiled, but all corresponding object filenames will be + returned. + + If 'output_dir' is given, object files will be put under it, while + retaining their original path component. That is, "foo/bar.c" + normally compiles to "foo/bar.o" (for a Unix implementation); if + 'output_dir' is "build", then it would compile to + "build/foo/bar.o". + + 'macros', if given, must be a list of macro definitions. A macro + definition is either a (name, value) 2-tuple or a (name,) 1-tuple. + The former defines a macro; if the value is None, the macro is + defined without an explicit value. The 1-tuple case undefines a + macro. Later definitions/redefinitions/ undefinitions take + precedence. + + 'include_dirs', if given, must be a list of strings, the + directories to add to the default include file search path for this + compilation only. + + 'debug' is a boolean; if true, the compiler will be instructed to + output debug symbols in (or alongside) the object file(s). + + 'extra_preargs' and 'extra_postargs' are implementation- dependent. + On platforms that have the notion of a command-line (e.g. Unix, + DOS/Windows), they are most likely lists of strings: extra + command-line arguments to prepend/append to the compiler command + line. On other platforms, consult the implementation class + documentation. In any event, they are intended as an escape hatch + for those occasions when the abstract compiler framework doesn't + cut the mustard. + + 'depends', if given, is a list of filenames that all targets + depend on. If a source file is older than any file in + depends, then the source file will be recompiled. This + supports dependency tracking, but only at a coarse + granularity. + + Raises CompileError on failure. + """ + # A concrete compiler class can either override this method + # entirely or implement _compile(). + macros, objects, extra_postargs, pp_opts, build = \ + self._setup_compile(output_dir, macros, include_dirs, sources, + depends, extra_postargs) + cc_args = self._get_cc_args(pp_opts, debug, extra_preargs) + + for obj in objects: + try: + src, ext = build[obj] + except KeyError: + continue + self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts) + + # Return *all* object filenames, not just the ones we just built. + return objects + + def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): + """Compile 'src' to product 'obj'.""" + # A concrete compiler class that does not override compile() + # should implement _compile(). + pass + + def create_static_lib(self, objects, output_libname, output_dir=None, + debug=0, target_lang=None): + """Link a bunch of stuff together to create a static library file. + The "bunch of stuff" consists of the list of object files supplied + as 'objects', the extra object files supplied to + 'add_link_object()' and/or 'set_link_objects()', the libraries + supplied to 'add_library()' and/or 'set_libraries()', and the + libraries supplied as 'libraries' (if any). + + 'output_libname' should be a library name, not a filename; the + filename will be inferred from the library name. 'output_dir' is + the directory where the library file will be put. + + 'debug' is a boolean; if true, debugging information will be + included in the library (note that on most platforms, it is the + compile step where this matters: the 'debug' flag is included here + just for consistency). + + 'target_lang' is the target language for which the given objects + are being compiled. This allows specific linkage time treatment of + certain languages. + + Raises LibError on failure. + """ + pass + + + # values for target_desc parameter in link() + SHARED_OBJECT = "shared_object" + SHARED_LIBRARY = "shared_library" + EXECUTABLE = "executable" + + def link(self, + target_desc, + objects, + output_filename, + output_dir=None, + libraries=None, + library_dirs=None, + runtime_library_dirs=None, + export_symbols=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + build_temp=None, + target_lang=None): + """Link a bunch of stuff together to create an executable or + shared library file. + + The "bunch of stuff" consists of the list of object files supplied + as 'objects'. 'output_filename' should be a filename. If + 'output_dir' is supplied, 'output_filename' is relative to it + (i.e. 'output_filename' can provide directory components if + needed). + + 'libraries' is a list of libraries to link against. These are + library names, not filenames, since they're translated into + filenames in a platform-specific way (eg. "foo" becomes "libfoo.a" + on Unix and "foo.lib" on DOS/Windows). However, they can include a + directory component, which means the linker will look in that + specific directory rather than searching all the normal locations. + + 'library_dirs', if supplied, should be a list of directories to + search for libraries that were specified as bare library names + (ie. no directory component). These are on top of the system + default and those supplied to 'add_library_dir()' and/or + 'set_library_dirs()'. 'runtime_library_dirs' is a list of + directories that will be embedded into the shared library and used + to search for other shared libraries that *it* depends on at + run-time. (This may only be relevant on Unix.) + + 'export_symbols' is a list of symbols that the shared library will + export. (This appears to be relevant only on Windows.) + + 'debug' is as for 'compile()' and 'create_static_lib()', with the + slight distinction that it actually matters on most platforms (as + opposed to 'create_static_lib()', which includes a 'debug' flag + mostly for form's sake). + + 'extra_preargs' and 'extra_postargs' are as for 'compile()' (except + of course that they supply command-line arguments for the + particular linker being used). + + 'target_lang' is the target language for which the given objects + are being compiled. This allows specific linkage time treatment of + certain languages. + + Raises LinkError on failure. + """ + raise NotImplementedError + + + # Old 'link_*()' methods, rewritten to use the new 'link()' method. + + def link_shared_lib(self, + objects, + output_libname, + output_dir=None, + libraries=None, + library_dirs=None, + runtime_library_dirs=None, + export_symbols=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + build_temp=None, + target_lang=None): + self.link(CCompiler.SHARED_LIBRARY, objects, + self.library_filename(output_libname, lib_type='shared'), + output_dir, + libraries, library_dirs, runtime_library_dirs, + export_symbols, debug, + extra_preargs, extra_postargs, build_temp, target_lang) + + + def link_shared_object(self, + objects, + output_filename, + output_dir=None, + libraries=None, + library_dirs=None, + runtime_library_dirs=None, + export_symbols=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + build_temp=None, + target_lang=None): + self.link(CCompiler.SHARED_OBJECT, objects, + output_filename, output_dir, + libraries, library_dirs, runtime_library_dirs, + export_symbols, debug, + extra_preargs, extra_postargs, build_temp, target_lang) + + + def link_executable(self, + objects, + output_progname, + output_dir=None, + libraries=None, + library_dirs=None, + runtime_library_dirs=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + target_lang=None): + self.link(CCompiler.EXECUTABLE, objects, + self.executable_filename(output_progname), output_dir, + libraries, library_dirs, runtime_library_dirs, None, + debug, extra_preargs, extra_postargs, None, target_lang) + + + # -- Miscellaneous methods ----------------------------------------- + # These are all used by the 'gen_lib_options() function; there is + # no appropriate default implementation so subclasses should + # implement all of these. + + def library_dir_option(self, dir): + """Return the compiler option to add 'dir' to the list of + directories searched for libraries. + """ + raise NotImplementedError + + def runtime_library_dir_option(self, dir): + """Return the compiler option to add 'dir' to the list of + directories searched for runtime libraries. + """ + raise NotImplementedError + + def library_option(self, lib): + """Return the compiler option to add 'lib' to the list of libraries + linked into the shared library or executable. + """ + raise NotImplementedError + + def has_function(self, funcname, includes=None, include_dirs=None, + libraries=None, library_dirs=None): + """Return a boolean indicating whether funcname is supported on + the current platform. The optional arguments can be used to + augment the compilation environment. + """ + # this can't be included at module scope because it tries to + # import math which might not be available at that point - maybe + # the necessary logic should just be inlined? + import tempfile + if includes is None: + includes = [] + if include_dirs is None: + include_dirs = [] + if libraries is None: + libraries = [] + if library_dirs is None: + library_dirs = [] + fd, fname = tempfile.mkstemp(".c", funcname, text=True) + f = os.fdopen(fd, "w") + try: + for incl in includes: + f.write("""#include "%s"\n""" % incl) + f.write("""\ +int main (int argc, char **argv) { + %s(); + return 0; +} +""" % funcname) + finally: + f.close() + try: + objects = self.compile([fname], include_dirs=include_dirs) + except CompileError: + return False + finally: + os.remove(fname) + + try: + self.link_executable(objects, "a.out", + libraries=libraries, + library_dirs=library_dirs) + except (LinkError, TypeError): + return False + else: + os.remove("a.out") + finally: + for fn in objects: + os.remove(fn) + return True + + def find_library_file (self, dirs, lib, debug=0): + """Search the specified list of directories for a static or shared + library file 'lib' and return the full path to that file. If + 'debug' true, look for a debugging version (if that makes sense on + the current platform). Return None if 'lib' wasn't found in any of + the specified directories. + """ + raise NotImplementedError + + # -- Filename generation methods ----------------------------------- + + # The default implementation of the filename generating methods are + # prejudiced towards the Unix/DOS/Windows view of the world: + # * object files are named by replacing the source file extension + # (eg. .c/.cpp -> .o/.obj) + # * library files (shared or static) are named by plugging the + # library name and extension into a format string, eg. + # "lib%s.%s" % (lib_name, ".a") for Unix static libraries + # * executables are named by appending an extension (possibly + # empty) to the program name: eg. progname + ".exe" for + # Windows + # + # To reduce redundant code, these methods expect to find + # several attributes in the current object (presumably defined + # as class attributes): + # * src_extensions - + # list of C/C++ source file extensions, eg. ['.c', '.cpp'] + # * obj_extension - + # object file extension, eg. '.o' or '.obj' + # * static_lib_extension - + # extension for static library files, eg. '.a' or '.lib' + # * shared_lib_extension - + # extension for shared library/object files, eg. '.so', '.dll' + # * static_lib_format - + # format string for generating static library filenames, + # eg. 'lib%s.%s' or '%s.%s' + # * shared_lib_format + # format string for generating shared library filenames + # (probably same as static_lib_format, since the extension + # is one of the intended parameters to the format string) + # * exe_extension - + # extension for executable files, eg. '' or '.exe' + + def object_filenames(self, source_filenames, strip_dir=0, output_dir=''): + if output_dir is None: + output_dir = '' + obj_names = [] + for src_name in source_filenames: + base, ext = os.path.splitext(src_name) + base = os.path.splitdrive(base)[1] # Chop off the drive + base = base[os.path.isabs(base):] # If abs, chop off leading / + if ext not in self.src_extensions: + raise UnknownFileError( + "unknown file type '%s' (from '%s')" % (ext, src_name)) + if strip_dir: + base = os.path.basename(base) + obj_names.append(os.path.join(output_dir, + base + self.obj_extension)) + return obj_names + + def shared_object_filename(self, basename, strip_dir=0, output_dir=''): + assert output_dir is not None + if strip_dir: + basename = os.path.basename(basename) + return os.path.join(output_dir, basename + self.shared_lib_extension) + + def executable_filename(self, basename, strip_dir=0, output_dir=''): + assert output_dir is not None + if strip_dir: + basename = os.path.basename(basename) + return os.path.join(output_dir, basename + (self.exe_extension or '')) + + def library_filename(self, libname, lib_type='static', # or 'shared' + strip_dir=0, output_dir=''): + assert output_dir is not None + if lib_type not in ("static", "shared", "dylib", "xcode_stub"): + raise ValueError( + "'lib_type' must be \"static\", \"shared\", \"dylib\", or \"xcode_stub\"") + fmt = getattr(self, lib_type + "_lib_format") + ext = getattr(self, lib_type + "_lib_extension") + + dir, base = os.path.split(libname) + filename = fmt % (base, ext) + if strip_dir: + dir = '' + + return os.path.join(output_dir, dir, filename) + + + # -- Utility methods ----------------------------------------------- + + def announce(self, msg, level=1): + log.debug(msg) + + def debug_print(self, msg): + from distutils.debug import DEBUG + if DEBUG: + print(msg) + + def warn(self, msg): + sys.stderr.write("warning: %s\n" % msg) + + def execute(self, func, args, msg=None, level=1): + execute(func, args, msg, self.dry_run) + + def spawn(self, cmd, **kwargs): + spawn(cmd, dry_run=self.dry_run, **kwargs) + + def move_file(self, src, dst): + return move_file(src, dst, dry_run=self.dry_run) + + def mkpath (self, name, mode=0o777): + mkpath(name, mode, dry_run=self.dry_run) + + +# Map a sys.platform/os.name ('posix', 'nt') to the default compiler +# type for that platform. Keys are interpreted as re match +# patterns. Order is important; platform mappings are preferred over +# OS names. +_default_compilers = ( + + # Platform string mappings + + # on a cygwin built python we can use gcc like an ordinary UNIXish + # compiler + ('cygwin.*', 'unix'), + + # OS name mappings + ('posix', 'unix'), + ('nt', 'msvc'), + + ) + +def get_default_compiler(osname=None, platform=None): + """Determine the default compiler to use for the given platform. + + osname should be one of the standard Python OS names (i.e. the + ones returned by os.name) and platform the common value + returned by sys.platform for the platform in question. + + The default values are os.name and sys.platform in case the + parameters are not given. + """ + if osname is None: + osname = os.name + if platform is None: + platform = sys.platform + for pattern, compiler in _default_compilers: + if re.match(pattern, platform) is not None or \ + re.match(pattern, osname) is not None: + return compiler + # Default to Unix compiler + return 'unix' + +# Map compiler types to (module_name, class_name) pairs -- ie. where to +# find the code that implements an interface to this compiler. (The module +# is assumed to be in the 'distutils' package.) +compiler_class = { 'unix': ('unixccompiler', 'UnixCCompiler', + "standard UNIX-style compiler"), + 'msvc': ('_msvccompiler', 'MSVCCompiler', + "Microsoft Visual C++"), + 'cygwin': ('cygwinccompiler', 'CygwinCCompiler', + "Cygwin port of GNU C Compiler for Win32"), + 'mingw32': ('cygwinccompiler', 'Mingw32CCompiler', + "Mingw32 port of GNU C Compiler for Win32"), + 'bcpp': ('bcppcompiler', 'BCPPCompiler', + "Borland C++ Compiler"), + } + +def show_compilers(): + """Print list of available compilers (used by the "--help-compiler" + options to "build", "build_ext", "build_clib"). + """ + # XXX this "knows" that the compiler option it's describing is + # "--compiler", which just happens to be the case for the three + # commands that use it. + from distutils.fancy_getopt import FancyGetopt + compilers = [] + for compiler in compiler_class.keys(): + compilers.append(("compiler="+compiler, None, + compiler_class[compiler][2])) + compilers.sort() + pretty_printer = FancyGetopt(compilers) + pretty_printer.print_help("List of available compilers:") + + +def new_compiler(plat=None, compiler=None, verbose=0, dry_run=0, force=0): + """Generate an instance of some CCompiler subclass for the supplied + platform/compiler combination. 'plat' defaults to 'os.name' + (eg. 'posix', 'nt'), and 'compiler' defaults to the default compiler + for that platform. Currently only 'posix' and 'nt' are supported, and + the default compilers are "traditional Unix interface" (UnixCCompiler + class) and Visual C++ (MSVCCompiler class). Note that it's perfectly + possible to ask for a Unix compiler object under Windows, and a + Microsoft compiler object under Unix -- if you supply a value for + 'compiler', 'plat' is ignored. + """ + if plat is None: + plat = os.name + + try: + if compiler is None: + compiler = get_default_compiler(plat) + + (module_name, class_name, long_description) = compiler_class[compiler] + except KeyError: + msg = "don't know how to compile C/C++ code on platform '%s'" % plat + if compiler is not None: + msg = msg + " with '%s' compiler" % compiler + raise DistutilsPlatformError(msg) + + try: + module_name = "distutils." + module_name + __import__ (module_name) + module = sys.modules[module_name] + klass = vars(module)[class_name] + except ImportError: + raise DistutilsModuleError( + "can't compile C/C++ code: unable to load module '%s'" % \ + module_name) + except KeyError: + raise DistutilsModuleError( + "can't compile C/C++ code: unable to find class '%s' " + "in module '%s'" % (class_name, module_name)) + + # XXX The None is necessary to preserve backwards compatibility + # with classes that expect verbose to be the first positional + # argument. + return klass(None, dry_run, force) + + +def gen_preprocess_options(macros, include_dirs): + """Generate C pre-processor options (-D, -U, -I) as used by at least + two types of compilers: the typical Unix compiler and Visual C++. + 'macros' is the usual thing, a list of 1- or 2-tuples, where (name,) + means undefine (-U) macro 'name', and (name,value) means define (-D) + macro 'name' to 'value'. 'include_dirs' is just a list of directory + names to be added to the header file search path (-I). Returns a list + of command-line options suitable for either Unix compilers or Visual + C++. + """ + # XXX it would be nice (mainly aesthetic, and so we don't generate + # stupid-looking command lines) to go over 'macros' and eliminate + # redundant definitions/undefinitions (ie. ensure that only the + # latest mention of a particular macro winds up on the command + # line). I don't think it's essential, though, since most (all?) + # Unix C compilers only pay attention to the latest -D or -U + # mention of a macro on their command line. Similar situation for + # 'include_dirs'. I'm punting on both for now. Anyways, weeding out + # redundancies like this should probably be the province of + # CCompiler, since the data structures used are inherited from it + # and therefore common to all CCompiler classes. + pp_opts = [] + for macro in macros: + if not (isinstance(macro, tuple) and 1 <= len(macro) <= 2): + raise TypeError( + "bad macro definition '%s': " + "each element of 'macros' list must be a 1- or 2-tuple" + % macro) + + if len(macro) == 1: # undefine this macro + pp_opts.append("-U%s" % macro[0]) + elif len(macro) == 2: + if macro[1] is None: # define with no explicit value + pp_opts.append("-D%s" % macro[0]) + else: + # XXX *don't* need to be clever about quoting the + # macro value here, because we're going to avoid the + # shell at all costs when we spawn the command! + pp_opts.append("-D%s=%s" % macro) + + for dir in include_dirs: + pp_opts.append("-I%s" % dir) + return pp_opts + + +def gen_lib_options (compiler, library_dirs, runtime_library_dirs, libraries): + """Generate linker options for searching library directories and + linking with specific libraries. 'libraries' and 'library_dirs' are, + respectively, lists of library names (not filenames!) and search + directories. Returns a list of command-line options suitable for use + with some compiler (depending on the two format strings passed in). + """ + lib_opts = [] + + for dir in library_dirs: + lib_opts.append(compiler.library_dir_option(dir)) + + for dir in runtime_library_dirs: + opt = compiler.runtime_library_dir_option(dir) + if isinstance(opt, list): + lib_opts = lib_opts + opt + else: + lib_opts.append(opt) + + # XXX it's important that we *not* remove redundant library mentions! + # sometimes you really do have to say "-lfoo -lbar -lfoo" in order to + # resolve all symbols. I just hope we never have to say "-lfoo obj.o + # -lbar" to get things to work -- that's certainly a possibility, but a + # pretty nasty way to arrange your C code. + + for lib in libraries: + (lib_dir, lib_name) = os.path.split(lib) + if lib_dir: + lib_file = compiler.find_library_file([lib_dir], lib_name) + if lib_file: + lib_opts.append(lib_file) + else: + compiler.warn("no library file corresponding to " + "'%s' found (skipping)" % lib) + else: + lib_opts.append(compiler.library_option (lib)) + return lib_opts diff --git a/venv/Lib/site-packages/setuptools/_distutils/cmd.py b/venv/Lib/site-packages/setuptools/_distutils/cmd.py new file mode 100644 index 00000000..dba3191e --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/cmd.py @@ -0,0 +1,403 @@ +"""distutils.cmd + +Provides the Command class, the base class for the command classes +in the distutils.command package. +""" + +import sys, os, re +from distutils.errors import DistutilsOptionError +from distutils import util, dir_util, file_util, archive_util, dep_util +from distutils import log + +class Command: + """Abstract base class for defining command classes, the "worker bees" + of the Distutils. A useful analogy for command classes is to think of + them as subroutines with local variables called "options". The options + are "declared" in 'initialize_options()' and "defined" (given their + final values, aka "finalized") in 'finalize_options()', both of which + must be defined by every command class. The distinction between the + two is necessary because option values might come from the outside + world (command line, config file, ...), and any options dependent on + other options must be computed *after* these outside influences have + been processed -- hence 'finalize_options()'. The "body" of the + subroutine, where it does all its work based on the values of its + options, is the 'run()' method, which must also be implemented by every + command class. + """ + + # 'sub_commands' formalizes the notion of a "family" of commands, + # eg. "install" as the parent with sub-commands "install_lib", + # "install_headers", etc. The parent of a family of commands + # defines 'sub_commands' as a class attribute; it's a list of + # (command_name : string, predicate : unbound_method | string | None) + # tuples, where 'predicate' is a method of the parent command that + # determines whether the corresponding command is applicable in the + # current situation. (Eg. we "install_headers" is only applicable if + # we have any C header files to install.) If 'predicate' is None, + # that command is always applicable. + # + # 'sub_commands' is usually defined at the *end* of a class, because + # predicates can be unbound methods, so they must already have been + # defined. The canonical example is the "install" command. + sub_commands = [] + + + # -- Creation/initialization methods ------------------------------- + + def __init__(self, dist): + """Create and initialize a new Command object. Most importantly, + invokes the 'initialize_options()' method, which is the real + initializer and depends on the actual command being + instantiated. + """ + # late import because of mutual dependence between these classes + from distutils.dist import Distribution + + if not isinstance(dist, Distribution): + raise TypeError("dist must be a Distribution instance") + if self.__class__ is Command: + raise RuntimeError("Command is an abstract class") + + self.distribution = dist + self.initialize_options() + + # Per-command versions of the global flags, so that the user can + # customize Distutils' behaviour command-by-command and let some + # commands fall back on the Distribution's behaviour. None means + # "not defined, check self.distribution's copy", while 0 or 1 mean + # false and true (duh). Note that this means figuring out the real + # value of each flag is a touch complicated -- hence "self._dry_run" + # will be handled by __getattr__, below. + # XXX This needs to be fixed. + self._dry_run = None + + # verbose is largely ignored, but needs to be set for + # backwards compatibility (I think)? + self.verbose = dist.verbose + + # Some commands define a 'self.force' option to ignore file + # timestamps, but methods defined *here* assume that + # 'self.force' exists for all commands. So define it here + # just to be safe. + self.force = None + + # The 'help' flag is just used for command-line parsing, so + # none of that complicated bureaucracy is needed. + self.help = 0 + + # 'finalized' records whether or not 'finalize_options()' has been + # called. 'finalize_options()' itself should not pay attention to + # this flag: it is the business of 'ensure_finalized()', which + # always calls 'finalize_options()', to respect/update it. + self.finalized = 0 + + # XXX A more explicit way to customize dry_run would be better. + def __getattr__(self, attr): + if attr == 'dry_run': + myval = getattr(self, "_" + attr) + if myval is None: + return getattr(self.distribution, attr) + else: + return myval + else: + raise AttributeError(attr) + + def ensure_finalized(self): + if not self.finalized: + self.finalize_options() + self.finalized = 1 + + # Subclasses must define: + # initialize_options() + # provide default values for all options; may be customized by + # setup script, by options from config file(s), or by command-line + # options + # finalize_options() + # decide on the final values for all options; this is called + # after all possible intervention from the outside world + # (command-line, option file, etc.) has been processed + # run() + # run the command: do whatever it is we're here to do, + # controlled by the command's various option values + + def initialize_options(self): + """Set default values for all the options that this command + supports. Note that these defaults may be overridden by other + commands, by the setup script, by config files, or by the + command-line. Thus, this is not the place to code dependencies + between options; generally, 'initialize_options()' implementations + are just a bunch of "self.foo = None" assignments. + + This method must be implemented by all command classes. + """ + raise RuntimeError("abstract method -- subclass %s must override" + % self.__class__) + + def finalize_options(self): + """Set final values for all the options that this command supports. + This is always called as late as possible, ie. after any option + assignments from the command-line or from other commands have been + done. Thus, this is the place to code option dependencies: if + 'foo' depends on 'bar', then it is safe to set 'foo' from 'bar' as + long as 'foo' still has the same value it was assigned in + 'initialize_options()'. + + This method must be implemented by all command classes. + """ + raise RuntimeError("abstract method -- subclass %s must override" + % self.__class__) + + + def dump_options(self, header=None, indent=""): + from distutils.fancy_getopt import longopt_xlate + if header is None: + header = "command options for '%s':" % self.get_command_name() + self.announce(indent + header, level=log.INFO) + indent = indent + " " + for (option, _, _) in self.user_options: + option = option.translate(longopt_xlate) + if option[-1] == "=": + option = option[:-1] + value = getattr(self, option) + self.announce(indent + "%s = %s" % (option, value), + level=log.INFO) + + def run(self): + """A command's raison d'etre: carry out the action it exists to + perform, controlled by the options initialized in + 'initialize_options()', customized by other commands, the setup + script, the command-line, and config files, and finalized in + 'finalize_options()'. All terminal output and filesystem + interaction should be done by 'run()'. + + This method must be implemented by all command classes. + """ + raise RuntimeError("abstract method -- subclass %s must override" + % self.__class__) + + def announce(self, msg, level=1): + """If the current verbosity level is of greater than or equal to + 'level' print 'msg' to stdout. + """ + log.log(level, msg) + + def debug_print(self, msg): + """Print 'msg' to stdout if the global DEBUG (taken from the + DISTUTILS_DEBUG environment variable) flag is true. + """ + from distutils.debug import DEBUG + if DEBUG: + print(msg) + sys.stdout.flush() + + + # -- Option validation methods ------------------------------------- + # (these are very handy in writing the 'finalize_options()' method) + # + # NB. the general philosophy here is to ensure that a particular option + # value meets certain type and value constraints. If not, we try to + # force it into conformance (eg. if we expect a list but have a string, + # split the string on comma and/or whitespace). If we can't force the + # option into conformance, raise DistutilsOptionError. Thus, command + # classes need do nothing more than (eg.) + # self.ensure_string_list('foo') + # and they can be guaranteed that thereafter, self.foo will be + # a list of strings. + + def _ensure_stringlike(self, option, what, default=None): + val = getattr(self, option) + if val is None: + setattr(self, option, default) + return default + elif not isinstance(val, str): + raise DistutilsOptionError("'%s' must be a %s (got `%s`)" + % (option, what, val)) + return val + + def ensure_string(self, option, default=None): + """Ensure that 'option' is a string; if not defined, set it to + 'default'. + """ + self._ensure_stringlike(option, "string", default) + + def ensure_string_list(self, option): + r"""Ensure that 'option' is a list of strings. If 'option' is + currently a string, we split it either on /,\s*/ or /\s+/, so + "foo bar baz", "foo,bar,baz", and "foo, bar baz" all become + ["foo", "bar", "baz"]. + """ + val = getattr(self, option) + if val is None: + return + elif isinstance(val, str): + setattr(self, option, re.split(r',\s*|\s+', val)) + else: + if isinstance(val, list): + ok = all(isinstance(v, str) for v in val) + else: + ok = False + if not ok: + raise DistutilsOptionError( + "'%s' must be a list of strings (got %r)" + % (option, val)) + + def _ensure_tested_string(self, option, tester, what, error_fmt, + default=None): + val = self._ensure_stringlike(option, what, default) + if val is not None and not tester(val): + raise DistutilsOptionError(("error in '%s' option: " + error_fmt) + % (option, val)) + + def ensure_filename(self, option): + """Ensure that 'option' is the name of an existing file.""" + self._ensure_tested_string(option, os.path.isfile, + "filename", + "'%s' does not exist or is not a file") + + def ensure_dirname(self, option): + self._ensure_tested_string(option, os.path.isdir, + "directory name", + "'%s' does not exist or is not a directory") + + + # -- Convenience methods for commands ------------------------------ + + def get_command_name(self): + if hasattr(self, 'command_name'): + return self.command_name + else: + return self.__class__.__name__ + + def set_undefined_options(self, src_cmd, *option_pairs): + """Set the values of any "undefined" options from corresponding + option values in some other command object. "Undefined" here means + "is None", which is the convention used to indicate that an option + has not been changed between 'initialize_options()' and + 'finalize_options()'. Usually called from 'finalize_options()' for + options that depend on some other command rather than another + option of the same command. 'src_cmd' is the other command from + which option values will be taken (a command object will be created + for it if necessary); the remaining arguments are + '(src_option,dst_option)' tuples which mean "take the value of + 'src_option' in the 'src_cmd' command object, and copy it to + 'dst_option' in the current command object". + """ + # Option_pairs: list of (src_option, dst_option) tuples + src_cmd_obj = self.distribution.get_command_obj(src_cmd) + src_cmd_obj.ensure_finalized() + for (src_option, dst_option) in option_pairs: + if getattr(self, dst_option) is None: + setattr(self, dst_option, getattr(src_cmd_obj, src_option)) + + def get_finalized_command(self, command, create=1): + """Wrapper around Distribution's 'get_command_obj()' method: find + (create if necessary and 'create' is true) the command object for + 'command', call its 'ensure_finalized()' method, and return the + finalized command object. + """ + cmd_obj = self.distribution.get_command_obj(command, create) + cmd_obj.ensure_finalized() + return cmd_obj + + # XXX rename to 'get_reinitialized_command()'? (should do the + # same in dist.py, if so) + def reinitialize_command(self, command, reinit_subcommands=0): + return self.distribution.reinitialize_command(command, + reinit_subcommands) + + def run_command(self, command): + """Run some other command: uses the 'run_command()' method of + Distribution, which creates and finalizes the command object if + necessary and then invokes its 'run()' method. + """ + self.distribution.run_command(command) + + def get_sub_commands(self): + """Determine the sub-commands that are relevant in the current + distribution (ie., that need to be run). This is based on the + 'sub_commands' class attribute: each tuple in that list may include + a method that we call to determine if the subcommand needs to be + run for the current distribution. Return a list of command names. + """ + commands = [] + for (cmd_name, method) in self.sub_commands: + if method is None or method(self): + commands.append(cmd_name) + return commands + + + # -- External world manipulation ----------------------------------- + + def warn(self, msg): + log.warn("warning: %s: %s\n", self.get_command_name(), msg) + + def execute(self, func, args, msg=None, level=1): + util.execute(func, args, msg, dry_run=self.dry_run) + + def mkpath(self, name, mode=0o777): + dir_util.mkpath(name, mode, dry_run=self.dry_run) + + def copy_file(self, infile, outfile, preserve_mode=1, preserve_times=1, + link=None, level=1): + """Copy a file respecting verbose, dry-run and force flags. (The + former two default to whatever is in the Distribution object, and + the latter defaults to false for commands that don't define it.)""" + return file_util.copy_file(infile, outfile, preserve_mode, + preserve_times, not self.force, link, + dry_run=self.dry_run) + + def copy_tree(self, infile, outfile, preserve_mode=1, preserve_times=1, + preserve_symlinks=0, level=1): + """Copy an entire directory tree respecting verbose, dry-run, + and force flags. + """ + return dir_util.copy_tree(infile, outfile, preserve_mode, + preserve_times, preserve_symlinks, + not self.force, dry_run=self.dry_run) + + def move_file (self, src, dst, level=1): + """Move a file respecting dry-run flag.""" + return file_util.move_file(src, dst, dry_run=self.dry_run) + + def spawn(self, cmd, search_path=1, level=1): + """Spawn an external command respecting dry-run flag.""" + from distutils.spawn import spawn + spawn(cmd, search_path, dry_run=self.dry_run) + + def make_archive(self, base_name, format, root_dir=None, base_dir=None, + owner=None, group=None): + return archive_util.make_archive(base_name, format, root_dir, base_dir, + dry_run=self.dry_run, + owner=owner, group=group) + + def make_file(self, infiles, outfile, func, args, + exec_msg=None, skip_msg=None, level=1): + """Special case of 'execute()' for operations that process one or + more input files and generate one output file. Works just like + 'execute()', except the operation is skipped and a different + message printed if 'outfile' already exists and is newer than all + files listed in 'infiles'. If the command defined 'self.force', + and it is true, then the command is unconditionally run -- does no + timestamp checks. + """ + if skip_msg is None: + skip_msg = "skipping %s (inputs unchanged)" % outfile + + # Allow 'infiles' to be a single string + if isinstance(infiles, str): + infiles = (infiles,) + elif not isinstance(infiles, (list, tuple)): + raise TypeError( + "'infiles' must be a string, or a list or tuple of strings") + + if exec_msg is None: + exec_msg = "generating %s from %s" % (outfile, ', '.join(infiles)) + + # If 'outfile' must be regenerated (either because it doesn't + # exist, is out-of-date, or the 'force' flag is true) then + # perform the action that presumably regenerates it + if self.force or dep_util.newer_group(infiles, outfile): + self.execute(func, args, exec_msg, level) + # Otherwise, print the "skip" message + else: + log.debug(skip_msg) diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__init__.py b/venv/Lib/site-packages/setuptools/_distutils/command/__init__.py new file mode 100644 index 00000000..481eea9f --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/command/__init__.py @@ -0,0 +1,31 @@ +"""distutils.command + +Package containing implementation of all the standard Distutils +commands.""" + +__all__ = ['build', + 'build_py', + 'build_ext', + 'build_clib', + 'build_scripts', + 'clean', + 'install', + 'install_lib', + 'install_headers', + 'install_scripts', + 'install_data', + 'sdist', + 'register', + 'bdist', + 'bdist_dumb', + 'bdist_rpm', + 'bdist_wininst', + 'check', + 'upload', + # These two are reserved for future use: + #'bdist_sdux', + #'bdist_pkgtool', + # Note: + # bdist_packager is not included because it only provides + # an abstract base class + ] diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..0b1ff52e Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/bdist.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/bdist.cpython-36.pyc new file mode 100644 index 00000000..607d5882 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/bdist.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/bdist_dumb.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/bdist_dumb.cpython-36.pyc new file mode 100644 index 00000000..f82770ba Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/bdist_dumb.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/bdist_msi.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/bdist_msi.cpython-36.pyc new file mode 100644 index 00000000..870b43a3 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/bdist_msi.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/bdist_rpm.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/bdist_rpm.cpython-36.pyc new file mode 100644 index 00000000..c0e44a87 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/bdist_rpm.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/bdist_wininst.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/bdist_wininst.cpython-36.pyc new file mode 100644 index 00000000..709d86b8 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/bdist_wininst.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build.cpython-36.pyc new file mode 100644 index 00000000..e7ee2ac3 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build_clib.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build_clib.cpython-36.pyc new file mode 100644 index 00000000..0fbc8afd Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build_clib.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build_ext.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build_ext.cpython-36.pyc new file mode 100644 index 00000000..c0faa277 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build_ext.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build_py.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build_py.cpython-36.pyc new file mode 100644 index 00000000..9d4d0892 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build_py.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build_scripts.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build_scripts.cpython-36.pyc new file mode 100644 index 00000000..2bfdcc5d Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build_scripts.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/check.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/check.cpython-36.pyc new file mode 100644 index 00000000..8a1a4152 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/check.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/clean.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/clean.cpython-36.pyc new file mode 100644 index 00000000..d638892d Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/clean.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/config.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/config.cpython-36.pyc new file mode 100644 index 00000000..87a3a592 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/config.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install.cpython-36.pyc new file mode 100644 index 00000000..b7a4f95c Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install_data.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install_data.cpython-36.pyc new file mode 100644 index 00000000..aeecbdf5 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install_data.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install_egg_info.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install_egg_info.cpython-36.pyc new file mode 100644 index 00000000..7b3afb25 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install_egg_info.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install_headers.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install_headers.cpython-36.pyc new file mode 100644 index 00000000..3119396e Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install_headers.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install_lib.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install_lib.cpython-36.pyc new file mode 100644 index 00000000..7120074c Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install_lib.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install_scripts.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install_scripts.cpython-36.pyc new file mode 100644 index 00000000..7c85e288 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install_scripts.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/py37compat.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/py37compat.cpython-36.pyc new file mode 100644 index 00000000..f3897a9b Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/py37compat.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/register.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/register.cpython-36.pyc new file mode 100644 index 00000000..0fce1f1f Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/register.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/sdist.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/sdist.cpython-36.pyc new file mode 100644 index 00000000..85c940bb Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/sdist.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/upload.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/upload.cpython-36.pyc new file mode 100644 index 00000000..e1a2efe5 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/upload.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/bdist.py b/venv/Lib/site-packages/setuptools/_distutils/command/bdist.py new file mode 100644 index 00000000..014871d2 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/command/bdist.py @@ -0,0 +1,143 @@ +"""distutils.command.bdist + +Implements the Distutils 'bdist' command (create a built [binary] +distribution).""" + +import os +from distutils.core import Command +from distutils.errors import * +from distutils.util import get_platform + + +def show_formats(): + """Print list of available formats (arguments to "--format" option). + """ + from distutils.fancy_getopt import FancyGetopt + formats = [] + for format in bdist.format_commands: + formats.append(("formats=" + format, None, + bdist.format_command[format][1])) + pretty_printer = FancyGetopt(formats) + pretty_printer.print_help("List of available distribution formats:") + + +class bdist(Command): + + description = "create a built (binary) distribution" + + user_options = [('bdist-base=', 'b', + "temporary directory for creating built distributions"), + ('plat-name=', 'p', + "platform name to embed in generated filenames " + "(default: %s)" % get_platform()), + ('formats=', None, + "formats for distribution (comma-separated list)"), + ('dist-dir=', 'd', + "directory to put final built distributions in " + "[default: dist]"), + ('skip-build', None, + "skip rebuilding everything (for testing/debugging)"), + ('owner=', 'u', + "Owner name used when creating a tar file" + " [default: current user]"), + ('group=', 'g', + "Group name used when creating a tar file" + " [default: current group]"), + ] + + boolean_options = ['skip-build'] + + help_options = [ + ('help-formats', None, + "lists available distribution formats", show_formats), + ] + + # The following commands do not take a format option from bdist + no_format_option = ('bdist_rpm',) + + # This won't do in reality: will need to distinguish RPM-ish Linux, + # Debian-ish Linux, Solaris, FreeBSD, ..., Windows, Mac OS. + default_format = {'posix': 'gztar', + 'nt': 'zip'} + + # Establish the preferred order (for the --help-formats option). + format_commands = ['rpm', 'gztar', 'bztar', 'xztar', 'ztar', 'tar', + 'wininst', 'zip', 'msi'] + + # And the real information. + format_command = {'rpm': ('bdist_rpm', "RPM distribution"), + 'gztar': ('bdist_dumb', "gzip'ed tar file"), + 'bztar': ('bdist_dumb', "bzip2'ed tar file"), + 'xztar': ('bdist_dumb', "xz'ed tar file"), + 'ztar': ('bdist_dumb', "compressed tar file"), + 'tar': ('bdist_dumb', "tar file"), + 'wininst': ('bdist_wininst', + "Windows executable installer"), + 'zip': ('bdist_dumb', "ZIP file"), + 'msi': ('bdist_msi', "Microsoft Installer") + } + + + def initialize_options(self): + self.bdist_base = None + self.plat_name = None + self.formats = None + self.dist_dir = None + self.skip_build = 0 + self.group = None + self.owner = None + + def finalize_options(self): + # have to finalize 'plat_name' before 'bdist_base' + if self.plat_name is None: + if self.skip_build: + self.plat_name = get_platform() + else: + self.plat_name = self.get_finalized_command('build').plat_name + + # 'bdist_base' -- parent of per-built-distribution-format + # temporary directories (eg. we'll probably have + # "build/bdist./dumb", "build/bdist./rpm", etc.) + if self.bdist_base is None: + build_base = self.get_finalized_command('build').build_base + self.bdist_base = os.path.join(build_base, + 'bdist.' + self.plat_name) + + self.ensure_string_list('formats') + if self.formats is None: + try: + self.formats = [self.default_format[os.name]] + except KeyError: + raise DistutilsPlatformError( + "don't know how to create built distributions " + "on platform %s" % os.name) + + if self.dist_dir is None: + self.dist_dir = "dist" + + def run(self): + # Figure out which sub-commands we need to run. + commands = [] + for format in self.formats: + try: + commands.append(self.format_command[format][0]) + except KeyError: + raise DistutilsOptionError("invalid format '%s'" % format) + + # Reinitialize and run each command. + for i in range(len(self.formats)): + cmd_name = commands[i] + sub_cmd = self.reinitialize_command(cmd_name) + if cmd_name not in self.no_format_option: + sub_cmd.format = self.formats[i] + + # passing the owner and group names for tar archiving + if cmd_name == 'bdist_dumb': + sub_cmd.owner = self.owner + sub_cmd.group = self.group + + # If we're going to need to run this command again, tell it to + # keep its temporary files around so subsequent runs go faster. + if cmd_name in commands[i+1:]: + sub_cmd.keep_temp = 1 + self.run_command(cmd_name) diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/bdist_dumb.py b/venv/Lib/site-packages/setuptools/_distutils/command/bdist_dumb.py new file mode 100644 index 00000000..f0d6b5b8 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/command/bdist_dumb.py @@ -0,0 +1,123 @@ +"""distutils.command.bdist_dumb + +Implements the Distutils 'bdist_dumb' command (create a "dumb" built +distribution -- i.e., just an archive to be unpacked under $prefix or +$exec_prefix).""" + +import os +from distutils.core import Command +from distutils.util import get_platform +from distutils.dir_util import remove_tree, ensure_relative +from distutils.errors import * +from distutils.sysconfig import get_python_version +from distutils import log + +class bdist_dumb(Command): + + description = "create a \"dumb\" built distribution" + + user_options = [('bdist-dir=', 'd', + "temporary directory for creating the distribution"), + ('plat-name=', 'p', + "platform name to embed in generated filenames " + "(default: %s)" % get_platform()), + ('format=', 'f', + "archive format to create (tar, gztar, bztar, xztar, " + "ztar, zip)"), + ('keep-temp', 'k', + "keep the pseudo-installation tree around after " + + "creating the distribution archive"), + ('dist-dir=', 'd', + "directory to put final built distributions in"), + ('skip-build', None, + "skip rebuilding everything (for testing/debugging)"), + ('relative', None, + "build the archive using relative paths " + "(default: false)"), + ('owner=', 'u', + "Owner name used when creating a tar file" + " [default: current user]"), + ('group=', 'g', + "Group name used when creating a tar file" + " [default: current group]"), + ] + + boolean_options = ['keep-temp', 'skip-build', 'relative'] + + default_format = { 'posix': 'gztar', + 'nt': 'zip' } + + def initialize_options(self): + self.bdist_dir = None + self.plat_name = None + self.format = None + self.keep_temp = 0 + self.dist_dir = None + self.skip_build = None + self.relative = 0 + self.owner = None + self.group = None + + def finalize_options(self): + if self.bdist_dir is None: + bdist_base = self.get_finalized_command('bdist').bdist_base + self.bdist_dir = os.path.join(bdist_base, 'dumb') + + if self.format is None: + try: + self.format = self.default_format[os.name] + except KeyError: + raise DistutilsPlatformError( + "don't know how to create dumb built distributions " + "on platform %s" % os.name) + + self.set_undefined_options('bdist', + ('dist_dir', 'dist_dir'), + ('plat_name', 'plat_name'), + ('skip_build', 'skip_build')) + + def run(self): + if not self.skip_build: + self.run_command('build') + + install = self.reinitialize_command('install', reinit_subcommands=1) + install.root = self.bdist_dir + install.skip_build = self.skip_build + install.warn_dir = 0 + + log.info("installing to %s", self.bdist_dir) + self.run_command('install') + + # And make an archive relative to the root of the + # pseudo-installation tree. + archive_basename = "%s.%s" % (self.distribution.get_fullname(), + self.plat_name) + + pseudoinstall_root = os.path.join(self.dist_dir, archive_basename) + if not self.relative: + archive_root = self.bdist_dir + else: + if (self.distribution.has_ext_modules() and + (install.install_base != install.install_platbase)): + raise DistutilsPlatformError( + "can't make a dumb built distribution where " + "base and platbase are different (%s, %s)" + % (repr(install.install_base), + repr(install.install_platbase))) + else: + archive_root = os.path.join(self.bdist_dir, + ensure_relative(install.install_base)) + + # Make the archive + filename = self.make_archive(pseudoinstall_root, + self.format, root_dir=archive_root, + owner=self.owner, group=self.group) + if self.distribution.has_ext_modules(): + pyversion = get_python_version() + else: + pyversion = 'any' + self.distribution.dist_files.append(('bdist_dumb', pyversion, + filename)) + + if not self.keep_temp: + remove_tree(self.bdist_dir, dry_run=self.dry_run) diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/bdist_msi.py b/venv/Lib/site-packages/setuptools/_distutils/command/bdist_msi.py new file mode 100644 index 00000000..0863a188 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/command/bdist_msi.py @@ -0,0 +1,749 @@ +# Copyright (C) 2005, 2006 Martin von Löwis +# Licensed to PSF under a Contributor Agreement. +# The bdist_wininst command proper +# based on bdist_wininst +""" +Implements the bdist_msi command. +""" + +import os +import sys +import warnings +from distutils.core import Command +from distutils.dir_util import remove_tree +from distutils.sysconfig import get_python_version +from distutils.version import StrictVersion +from distutils.errors import DistutilsOptionError +from distutils.util import get_platform +from distutils import log +import msilib +from msilib import schema, sequence, text +from msilib import Directory, Feature, Dialog, add_data + +class PyDialog(Dialog): + """Dialog class with a fixed layout: controls at the top, then a ruler, + then a list of buttons: back, next, cancel. Optionally a bitmap at the + left.""" + def __init__(self, *args, **kw): + """Dialog(database, name, x, y, w, h, attributes, title, first, + default, cancel, bitmap=true)""" + Dialog.__init__(self, *args) + ruler = self.h - 36 + bmwidth = 152*ruler/328 + #if kw.get("bitmap", True): + # self.bitmap("Bitmap", 0, 0, bmwidth, ruler, "PythonWin") + self.line("BottomLine", 0, ruler, self.w, 0) + + def title(self, title): + "Set the title text of the dialog at the top." + # name, x, y, w, h, flags=Visible|Enabled|Transparent|NoPrefix, + # text, in VerdanaBold10 + self.text("Title", 15, 10, 320, 60, 0x30003, + r"{\VerdanaBold10}%s" % title) + + def back(self, title, next, name = "Back", active = 1): + """Add a back button with a given title, the tab-next button, + its name in the Control table, possibly initially disabled. + + Return the button, so that events can be associated""" + if active: + flags = 3 # Visible|Enabled + else: + flags = 1 # Visible + return self.pushbutton(name, 180, self.h-27 , 56, 17, flags, title, next) + + def cancel(self, title, next, name = "Cancel", active = 1): + """Add a cancel button with a given title, the tab-next button, + its name in the Control table, possibly initially disabled. + + Return the button, so that events can be associated""" + if active: + flags = 3 # Visible|Enabled + else: + flags = 1 # Visible + return self.pushbutton(name, 304, self.h-27, 56, 17, flags, title, next) + + def next(self, title, next, name = "Next", active = 1): + """Add a Next button with a given title, the tab-next button, + its name in the Control table, possibly initially disabled. + + Return the button, so that events can be associated""" + if active: + flags = 3 # Visible|Enabled + else: + flags = 1 # Visible + return self.pushbutton(name, 236, self.h-27, 56, 17, flags, title, next) + + def xbutton(self, name, title, next, xpos): + """Add a button with a given title, the tab-next button, + its name in the Control table, giving its x position; the + y-position is aligned with the other buttons. + + Return the button, so that events can be associated""" + return self.pushbutton(name, int(self.w*xpos - 28), self.h-27, 56, 17, 3, title, next) + +class bdist_msi(Command): + + description = "create a Microsoft Installer (.msi) binary distribution" + + user_options = [('bdist-dir=', None, + "temporary directory for creating the distribution"), + ('plat-name=', 'p', + "platform name to embed in generated filenames " + "(default: %s)" % get_platform()), + ('keep-temp', 'k', + "keep the pseudo-installation tree around after " + + "creating the distribution archive"), + ('target-version=', None, + "require a specific python version" + + " on the target system"), + ('no-target-compile', 'c', + "do not compile .py to .pyc on the target system"), + ('no-target-optimize', 'o', + "do not compile .py to .pyo (optimized) " + "on the target system"), + ('dist-dir=', 'd', + "directory to put final built distributions in"), + ('skip-build', None, + "skip rebuilding everything (for testing/debugging)"), + ('install-script=', None, + "basename of installation script to be run after " + "installation or before deinstallation"), + ('pre-install-script=', None, + "Fully qualified filename of a script to be run before " + "any files are installed. This script need not be in the " + "distribution"), + ] + + boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize', + 'skip-build'] + + all_versions = ['2.0', '2.1', '2.2', '2.3', '2.4', + '2.5', '2.6', '2.7', '2.8', '2.9', + '3.0', '3.1', '3.2', '3.3', '3.4', + '3.5', '3.6', '3.7', '3.8', '3.9'] + other_version = 'X' + + def __init__(self, *args, **kw): + super().__init__(*args, **kw) + warnings.warn("bdist_msi command is deprecated since Python 3.9, " + "use bdist_wheel (wheel packages) instead", + DeprecationWarning, 2) + + def initialize_options(self): + self.bdist_dir = None + self.plat_name = None + self.keep_temp = 0 + self.no_target_compile = 0 + self.no_target_optimize = 0 + self.target_version = None + self.dist_dir = None + self.skip_build = None + self.install_script = None + self.pre_install_script = None + self.versions = None + + def finalize_options(self): + self.set_undefined_options('bdist', ('skip_build', 'skip_build')) + + if self.bdist_dir is None: + bdist_base = self.get_finalized_command('bdist').bdist_base + self.bdist_dir = os.path.join(bdist_base, 'msi') + + short_version = get_python_version() + if (not self.target_version) and self.distribution.has_ext_modules(): + self.target_version = short_version + + if self.target_version: + self.versions = [self.target_version] + if not self.skip_build and self.distribution.has_ext_modules()\ + and self.target_version != short_version: + raise DistutilsOptionError( + "target version can only be %s, or the '--skip-build'" + " option must be specified" % (short_version,)) + else: + self.versions = list(self.all_versions) + + self.set_undefined_options('bdist', + ('dist_dir', 'dist_dir'), + ('plat_name', 'plat_name'), + ) + + if self.pre_install_script: + raise DistutilsOptionError( + "the pre-install-script feature is not yet implemented") + + if self.install_script: + for script in self.distribution.scripts: + if self.install_script == os.path.basename(script): + break + else: + raise DistutilsOptionError( + "install_script '%s' not found in scripts" + % self.install_script) + self.install_script_key = None + + def run(self): + if not self.skip_build: + self.run_command('build') + + install = self.reinitialize_command('install', reinit_subcommands=1) + install.prefix = self.bdist_dir + install.skip_build = self.skip_build + install.warn_dir = 0 + + install_lib = self.reinitialize_command('install_lib') + # we do not want to include pyc or pyo files + install_lib.compile = 0 + install_lib.optimize = 0 + + if self.distribution.has_ext_modules(): + # If we are building an installer for a Python version other + # than the one we are currently running, then we need to ensure + # our build_lib reflects the other Python version rather than ours. + # Note that for target_version!=sys.version, we must have skipped the + # build step, so there is no issue with enforcing the build of this + # version. + target_version = self.target_version + if not target_version: + assert self.skip_build, "Should have already checked this" + target_version = '%d.%d' % sys.version_info[:2] + plat_specifier = ".%s-%s" % (self.plat_name, target_version) + build = self.get_finalized_command('build') + build.build_lib = os.path.join(build.build_base, + 'lib' + plat_specifier) + + log.info("installing to %s", self.bdist_dir) + install.ensure_finalized() + + # avoid warning of 'install_lib' about installing + # into a directory not in sys.path + sys.path.insert(0, os.path.join(self.bdist_dir, 'PURELIB')) + + install.run() + + del sys.path[0] + + self.mkpath(self.dist_dir) + fullname = self.distribution.get_fullname() + installer_name = self.get_installer_filename(fullname) + installer_name = os.path.abspath(installer_name) + if os.path.exists(installer_name): os.unlink(installer_name) + + metadata = self.distribution.metadata + author = metadata.author + if not author: + author = metadata.maintainer + if not author: + author = "UNKNOWN" + version = metadata.get_version() + # ProductVersion must be strictly numeric + # XXX need to deal with prerelease versions + sversion = "%d.%d.%d" % StrictVersion(version).version + # Prefix ProductName with Python x.y, so that + # it sorts together with the other Python packages + # in Add-Remove-Programs (APR) + fullname = self.distribution.get_fullname() + if self.target_version: + product_name = "Python %s %s" % (self.target_version, fullname) + else: + product_name = "Python %s" % (fullname) + self.db = msilib.init_database(installer_name, schema, + product_name, msilib.gen_uuid(), + sversion, author) + msilib.add_tables(self.db, sequence) + props = [('DistVersion', version)] + email = metadata.author_email or metadata.maintainer_email + if email: + props.append(("ARPCONTACT", email)) + if metadata.url: + props.append(("ARPURLINFOABOUT", metadata.url)) + if props: + add_data(self.db, 'Property', props) + + self.add_find_python() + self.add_files() + self.add_scripts() + self.add_ui() + self.db.Commit() + + if hasattr(self.distribution, 'dist_files'): + tup = 'bdist_msi', self.target_version or 'any', fullname + self.distribution.dist_files.append(tup) + + if not self.keep_temp: + remove_tree(self.bdist_dir, dry_run=self.dry_run) + + def add_files(self): + db = self.db + cab = msilib.CAB("distfiles") + rootdir = os.path.abspath(self.bdist_dir) + + root = Directory(db, cab, None, rootdir, "TARGETDIR", "SourceDir") + f = Feature(db, "Python", "Python", "Everything", + 0, 1, directory="TARGETDIR") + + items = [(f, root, '')] + for version in self.versions + [self.other_version]: + target = "TARGETDIR" + version + name = default = "Python" + version + desc = "Everything" + if version is self.other_version: + title = "Python from another location" + level = 2 + else: + title = "Python %s from registry" % version + level = 1 + f = Feature(db, name, title, desc, 1, level, directory=target) + dir = Directory(db, cab, root, rootdir, target, default) + items.append((f, dir, version)) + db.Commit() + + seen = {} + for feature, dir, version in items: + todo = [dir] + while todo: + dir = todo.pop() + for file in os.listdir(dir.absolute): + afile = os.path.join(dir.absolute, file) + if os.path.isdir(afile): + short = "%s|%s" % (dir.make_short(file), file) + default = file + version + newdir = Directory(db, cab, dir, file, default, short) + todo.append(newdir) + else: + if not dir.component: + dir.start_component(dir.logical, feature, 0) + if afile not in seen: + key = seen[afile] = dir.add_file(file) + if file==self.install_script: + if self.install_script_key: + raise DistutilsOptionError( + "Multiple files with name %s" % file) + self.install_script_key = '[#%s]' % key + else: + key = seen[afile] + add_data(self.db, "DuplicateFile", + [(key + version, dir.component, key, None, dir.logical)]) + db.Commit() + cab.commit(db) + + def add_find_python(self): + """Adds code to the installer to compute the location of Python. + + Properties PYTHON.MACHINE.X.Y and PYTHON.USER.X.Y will be set from the + registry for each version of Python. + + Properties TARGETDIRX.Y will be set from PYTHON.USER.X.Y if defined, + else from PYTHON.MACHINE.X.Y. + + Properties PYTHONX.Y will be set to TARGETDIRX.Y\\python.exe""" + + start = 402 + for ver in self.versions: + install_path = r"SOFTWARE\Python\PythonCore\%s\InstallPath" % ver + machine_reg = "python.machine." + ver + user_reg = "python.user." + ver + machine_prop = "PYTHON.MACHINE." + ver + user_prop = "PYTHON.USER." + ver + machine_action = "PythonFromMachine" + ver + user_action = "PythonFromUser" + ver + exe_action = "PythonExe" + ver + target_dir_prop = "TARGETDIR" + ver + exe_prop = "PYTHON" + ver + if msilib.Win64: + # type: msidbLocatorTypeRawValue + msidbLocatorType64bit + Type = 2+16 + else: + Type = 2 + add_data(self.db, "RegLocator", + [(machine_reg, 2, install_path, None, Type), + (user_reg, 1, install_path, None, Type)]) + add_data(self.db, "AppSearch", + [(machine_prop, machine_reg), + (user_prop, user_reg)]) + add_data(self.db, "CustomAction", + [(machine_action, 51+256, target_dir_prop, "[" + machine_prop + "]"), + (user_action, 51+256, target_dir_prop, "[" + user_prop + "]"), + (exe_action, 51+256, exe_prop, "[" + target_dir_prop + "]\\python.exe"), + ]) + add_data(self.db, "InstallExecuteSequence", + [(machine_action, machine_prop, start), + (user_action, user_prop, start + 1), + (exe_action, None, start + 2), + ]) + add_data(self.db, "InstallUISequence", + [(machine_action, machine_prop, start), + (user_action, user_prop, start + 1), + (exe_action, None, start + 2), + ]) + add_data(self.db, "Condition", + [("Python" + ver, 0, "NOT TARGETDIR" + ver)]) + start += 4 + assert start < 500 + + def add_scripts(self): + if self.install_script: + start = 6800 + for ver in self.versions + [self.other_version]: + install_action = "install_script." + ver + exe_prop = "PYTHON" + ver + add_data(self.db, "CustomAction", + [(install_action, 50, exe_prop, self.install_script_key)]) + add_data(self.db, "InstallExecuteSequence", + [(install_action, "&Python%s=3" % ver, start)]) + start += 1 + # XXX pre-install scripts are currently refused in finalize_options() + # but if this feature is completed, it will also need to add + # entries for each version as the above code does + if self.pre_install_script: + scriptfn = os.path.join(self.bdist_dir, "preinstall.bat") + with open(scriptfn, "w") as f: + # The batch file will be executed with [PYTHON], so that %1 + # is the path to the Python interpreter; %0 will be the path + # of the batch file. + # rem =""" + # %1 %0 + # exit + # """ + # + f.write('rem ="""\n%1 %0\nexit\n"""\n') + with open(self.pre_install_script) as fin: + f.write(fin.read()) + add_data(self.db, "Binary", + [("PreInstall", msilib.Binary(scriptfn)) + ]) + add_data(self.db, "CustomAction", + [("PreInstall", 2, "PreInstall", None) + ]) + add_data(self.db, "InstallExecuteSequence", + [("PreInstall", "NOT Installed", 450)]) + + + def add_ui(self): + db = self.db + x = y = 50 + w = 370 + h = 300 + title = "[ProductName] Setup" + + # see "Dialog Style Bits" + modal = 3 # visible | modal + modeless = 1 # visible + track_disk_space = 32 + + # UI customization properties + add_data(db, "Property", + # See "DefaultUIFont Property" + [("DefaultUIFont", "DlgFont8"), + # See "ErrorDialog Style Bit" + ("ErrorDialog", "ErrorDlg"), + ("Progress1", "Install"), # modified in maintenance type dlg + ("Progress2", "installs"), + ("MaintenanceForm_Action", "Repair"), + # possible values: ALL, JUSTME + ("WhichUsers", "ALL") + ]) + + # Fonts, see "TextStyle Table" + add_data(db, "TextStyle", + [("DlgFont8", "Tahoma", 9, None, 0), + ("DlgFontBold8", "Tahoma", 8, None, 1), #bold + ("VerdanaBold10", "Verdana", 10, None, 1), + ("VerdanaRed9", "Verdana", 9, 255, 0), + ]) + + # UI Sequences, see "InstallUISequence Table", "Using a Sequence Table" + # Numbers indicate sequence; see sequence.py for how these action integrate + add_data(db, "InstallUISequence", + [("PrepareDlg", "Not Privileged or Windows9x or Installed", 140), + ("WhichUsersDlg", "Privileged and not Windows9x and not Installed", 141), + # In the user interface, assume all-users installation if privileged. + ("SelectFeaturesDlg", "Not Installed", 1230), + # XXX no support for resume installations yet + #("ResumeDlg", "Installed AND (RESUME OR Preselected)", 1240), + ("MaintenanceTypeDlg", "Installed AND NOT RESUME AND NOT Preselected", 1250), + ("ProgressDlg", None, 1280)]) + + add_data(db, 'ActionText', text.ActionText) + add_data(db, 'UIText', text.UIText) + ##################################################################### + # Standard dialogs: FatalError, UserExit, ExitDialog + fatal=PyDialog(db, "FatalError", x, y, w, h, modal, title, + "Finish", "Finish", "Finish") + fatal.title("[ProductName] Installer ended prematurely") + fatal.back("< Back", "Finish", active = 0) + fatal.cancel("Cancel", "Back", active = 0) + fatal.text("Description1", 15, 70, 320, 80, 0x30003, + "[ProductName] setup ended prematurely because of an error. Your system has not been modified. To install this program at a later time, please run the installation again.") + fatal.text("Description2", 15, 155, 320, 20, 0x30003, + "Click the Finish button to exit the Installer.") + c=fatal.next("Finish", "Cancel", name="Finish") + c.event("EndDialog", "Exit") + + user_exit=PyDialog(db, "UserExit", x, y, w, h, modal, title, + "Finish", "Finish", "Finish") + user_exit.title("[ProductName] Installer was interrupted") + user_exit.back("< Back", "Finish", active = 0) + user_exit.cancel("Cancel", "Back", active = 0) + user_exit.text("Description1", 15, 70, 320, 80, 0x30003, + "[ProductName] setup was interrupted. Your system has not been modified. " + "To install this program at a later time, please run the installation again.") + user_exit.text("Description2", 15, 155, 320, 20, 0x30003, + "Click the Finish button to exit the Installer.") + c = user_exit.next("Finish", "Cancel", name="Finish") + c.event("EndDialog", "Exit") + + exit_dialog = PyDialog(db, "ExitDialog", x, y, w, h, modal, title, + "Finish", "Finish", "Finish") + exit_dialog.title("Completing the [ProductName] Installer") + exit_dialog.back("< Back", "Finish", active = 0) + exit_dialog.cancel("Cancel", "Back", active = 0) + exit_dialog.text("Description", 15, 235, 320, 20, 0x30003, + "Click the Finish button to exit the Installer.") + c = exit_dialog.next("Finish", "Cancel", name="Finish") + c.event("EndDialog", "Return") + + ##################################################################### + # Required dialog: FilesInUse, ErrorDlg + inuse = PyDialog(db, "FilesInUse", + x, y, w, h, + 19, # KeepModeless|Modal|Visible + title, + "Retry", "Retry", "Retry", bitmap=False) + inuse.text("Title", 15, 6, 200, 15, 0x30003, + r"{\DlgFontBold8}Files in Use") + inuse.text("Description", 20, 23, 280, 20, 0x30003, + "Some files that need to be updated are currently in use.") + inuse.text("Text", 20, 55, 330, 50, 3, + "The following applications are using files that need to be updated by this setup. Close these applications and then click Retry to continue the installation or Cancel to exit it.") + inuse.control("List", "ListBox", 20, 107, 330, 130, 7, "FileInUseProcess", + None, None, None) + c=inuse.back("Exit", "Ignore", name="Exit") + c.event("EndDialog", "Exit") + c=inuse.next("Ignore", "Retry", name="Ignore") + c.event("EndDialog", "Ignore") + c=inuse.cancel("Retry", "Exit", name="Retry") + c.event("EndDialog","Retry") + + # See "Error Dialog". See "ICE20" for the required names of the controls. + error = Dialog(db, "ErrorDlg", + 50, 10, 330, 101, + 65543, # Error|Minimize|Modal|Visible + title, + "ErrorText", None, None) + error.text("ErrorText", 50,9,280,48,3, "") + #error.control("ErrorIcon", "Icon", 15, 9, 24, 24, 5242881, None, "py.ico", None, None) + error.pushbutton("N",120,72,81,21,3,"No",None).event("EndDialog","ErrorNo") + error.pushbutton("Y",240,72,81,21,3,"Yes",None).event("EndDialog","ErrorYes") + error.pushbutton("A",0,72,81,21,3,"Abort",None).event("EndDialog","ErrorAbort") + error.pushbutton("C",42,72,81,21,3,"Cancel",None).event("EndDialog","ErrorCancel") + error.pushbutton("I",81,72,81,21,3,"Ignore",None).event("EndDialog","ErrorIgnore") + error.pushbutton("O",159,72,81,21,3,"Ok",None).event("EndDialog","ErrorOk") + error.pushbutton("R",198,72,81,21,3,"Retry",None).event("EndDialog","ErrorRetry") + + ##################################################################### + # Global "Query Cancel" dialog + cancel = Dialog(db, "CancelDlg", 50, 10, 260, 85, 3, title, + "No", "No", "No") + cancel.text("Text", 48, 15, 194, 30, 3, + "Are you sure you want to cancel [ProductName] installation?") + #cancel.control("Icon", "Icon", 15, 15, 24, 24, 5242881, None, + # "py.ico", None, None) + c=cancel.pushbutton("Yes", 72, 57, 56, 17, 3, "Yes", "No") + c.event("EndDialog", "Exit") + + c=cancel.pushbutton("No", 132, 57, 56, 17, 3, "No", "Yes") + c.event("EndDialog", "Return") + + ##################################################################### + # Global "Wait for costing" dialog + costing = Dialog(db, "WaitForCostingDlg", 50, 10, 260, 85, modal, title, + "Return", "Return", "Return") + costing.text("Text", 48, 15, 194, 30, 3, + "Please wait while the installer finishes determining your disk space requirements.") + c = costing.pushbutton("Return", 102, 57, 56, 17, 3, "Return", None) + c.event("EndDialog", "Exit") + + ##################################################################### + # Preparation dialog: no user input except cancellation + prep = PyDialog(db, "PrepareDlg", x, y, w, h, modeless, title, + "Cancel", "Cancel", "Cancel") + prep.text("Description", 15, 70, 320, 40, 0x30003, + "Please wait while the Installer prepares to guide you through the installation.") + prep.title("Welcome to the [ProductName] Installer") + c=prep.text("ActionText", 15, 110, 320, 20, 0x30003, "Pondering...") + c.mapping("ActionText", "Text") + c=prep.text("ActionData", 15, 135, 320, 30, 0x30003, None) + c.mapping("ActionData", "Text") + prep.back("Back", None, active=0) + prep.next("Next", None, active=0) + c=prep.cancel("Cancel", None) + c.event("SpawnDialog", "CancelDlg") + + ##################################################################### + # Feature (Python directory) selection + seldlg = PyDialog(db, "SelectFeaturesDlg", x, y, w, h, modal, title, + "Next", "Next", "Cancel") + seldlg.title("Select Python Installations") + + seldlg.text("Hint", 15, 30, 300, 20, 3, + "Select the Python locations where %s should be installed." + % self.distribution.get_fullname()) + + seldlg.back("< Back", None, active=0) + c = seldlg.next("Next >", "Cancel") + order = 1 + c.event("[TARGETDIR]", "[SourceDir]", ordering=order) + for version in self.versions + [self.other_version]: + order += 1 + c.event("[TARGETDIR]", "[TARGETDIR%s]" % version, + "FEATURE_SELECTED AND &Python%s=3" % version, + ordering=order) + c.event("SpawnWaitDialog", "WaitForCostingDlg", ordering=order + 1) + c.event("EndDialog", "Return", ordering=order + 2) + c = seldlg.cancel("Cancel", "Features") + c.event("SpawnDialog", "CancelDlg") + + c = seldlg.control("Features", "SelectionTree", 15, 60, 300, 120, 3, + "FEATURE", None, "PathEdit", None) + c.event("[FEATURE_SELECTED]", "1") + ver = self.other_version + install_other_cond = "FEATURE_SELECTED AND &Python%s=3" % ver + dont_install_other_cond = "FEATURE_SELECTED AND &Python%s<>3" % ver + + c = seldlg.text("Other", 15, 200, 300, 15, 3, + "Provide an alternate Python location") + c.condition("Enable", install_other_cond) + c.condition("Show", install_other_cond) + c.condition("Disable", dont_install_other_cond) + c.condition("Hide", dont_install_other_cond) + + c = seldlg.control("PathEdit", "PathEdit", 15, 215, 300, 16, 1, + "TARGETDIR" + ver, None, "Next", None) + c.condition("Enable", install_other_cond) + c.condition("Show", install_other_cond) + c.condition("Disable", dont_install_other_cond) + c.condition("Hide", dont_install_other_cond) + + ##################################################################### + # Disk cost + cost = PyDialog(db, "DiskCostDlg", x, y, w, h, modal, title, + "OK", "OK", "OK", bitmap=False) + cost.text("Title", 15, 6, 200, 15, 0x30003, + r"{\DlgFontBold8}Disk Space Requirements") + cost.text("Description", 20, 20, 280, 20, 0x30003, + "The disk space required for the installation of the selected features.") + cost.text("Text", 20, 53, 330, 60, 3, + "The highlighted volumes (if any) do not have enough disk space " + "available for the currently selected features. You can either " + "remove some files from the highlighted volumes, or choose to " + "install less features onto local drive(s), or select different " + "destination drive(s).") + cost.control("VolumeList", "VolumeCostList", 20, 100, 330, 150, 393223, + None, "{120}{70}{70}{70}{70}", None, None) + cost.xbutton("OK", "Ok", None, 0.5).event("EndDialog", "Return") + + ##################################################################### + # WhichUsers Dialog. Only available on NT, and for privileged users. + # This must be run before FindRelatedProducts, because that will + # take into account whether the previous installation was per-user + # or per-machine. We currently don't support going back to this + # dialog after "Next" was selected; to support this, we would need to + # find how to reset the ALLUSERS property, and how to re-run + # FindRelatedProducts. + # On Windows9x, the ALLUSERS property is ignored on the command line + # and in the Property table, but installer fails according to the documentation + # if a dialog attempts to set ALLUSERS. + whichusers = PyDialog(db, "WhichUsersDlg", x, y, w, h, modal, title, + "AdminInstall", "Next", "Cancel") + whichusers.title("Select whether to install [ProductName] for all users of this computer.") + # A radio group with two options: allusers, justme + g = whichusers.radiogroup("AdminInstall", 15, 60, 260, 50, 3, + "WhichUsers", "", "Next") + g.add("ALL", 0, 5, 150, 20, "Install for all users") + g.add("JUSTME", 0, 25, 150, 20, "Install just for me") + + whichusers.back("Back", None, active=0) + + c = whichusers.next("Next >", "Cancel") + c.event("[ALLUSERS]", "1", 'WhichUsers="ALL"', 1) + c.event("EndDialog", "Return", ordering = 2) + + c = whichusers.cancel("Cancel", "AdminInstall") + c.event("SpawnDialog", "CancelDlg") + + ##################################################################### + # Installation Progress dialog (modeless) + progress = PyDialog(db, "ProgressDlg", x, y, w, h, modeless, title, + "Cancel", "Cancel", "Cancel", bitmap=False) + progress.text("Title", 20, 15, 200, 15, 0x30003, + r"{\DlgFontBold8}[Progress1] [ProductName]") + progress.text("Text", 35, 65, 300, 30, 3, + "Please wait while the Installer [Progress2] [ProductName]. " + "This may take several minutes.") + progress.text("StatusLabel", 35, 100, 35, 20, 3, "Status:") + + c=progress.text("ActionText", 70, 100, w-70, 20, 3, "Pondering...") + c.mapping("ActionText", "Text") + + #c=progress.text("ActionData", 35, 140, 300, 20, 3, None) + #c.mapping("ActionData", "Text") + + c=progress.control("ProgressBar", "ProgressBar", 35, 120, 300, 10, 65537, + None, "Progress done", None, None) + c.mapping("SetProgress", "Progress") + + progress.back("< Back", "Next", active=False) + progress.next("Next >", "Cancel", active=False) + progress.cancel("Cancel", "Back").event("SpawnDialog", "CancelDlg") + + ################################################################### + # Maintenance type: repair/uninstall + maint = PyDialog(db, "MaintenanceTypeDlg", x, y, w, h, modal, title, + "Next", "Next", "Cancel") + maint.title("Welcome to the [ProductName] Setup Wizard") + maint.text("BodyText", 15, 63, 330, 42, 3, + "Select whether you want to repair or remove [ProductName].") + g=maint.radiogroup("RepairRadioGroup", 15, 108, 330, 60, 3, + "MaintenanceForm_Action", "", "Next") + #g.add("Change", 0, 0, 200, 17, "&Change [ProductName]") + g.add("Repair", 0, 18, 200, 17, "&Repair [ProductName]") + g.add("Remove", 0, 36, 200, 17, "Re&move [ProductName]") + + maint.back("< Back", None, active=False) + c=maint.next("Finish", "Cancel") + # Change installation: Change progress dialog to "Change", then ask + # for feature selection + #c.event("[Progress1]", "Change", 'MaintenanceForm_Action="Change"', 1) + #c.event("[Progress2]", "changes", 'MaintenanceForm_Action="Change"', 2) + + # Reinstall: Change progress dialog to "Repair", then invoke reinstall + # Also set list of reinstalled features to "ALL" + c.event("[REINSTALL]", "ALL", 'MaintenanceForm_Action="Repair"', 5) + c.event("[Progress1]", "Repairing", 'MaintenanceForm_Action="Repair"', 6) + c.event("[Progress2]", "repairs", 'MaintenanceForm_Action="Repair"', 7) + c.event("Reinstall", "ALL", 'MaintenanceForm_Action="Repair"', 8) + + # Uninstall: Change progress to "Remove", then invoke uninstall + # Also set list of removed features to "ALL" + c.event("[REMOVE]", "ALL", 'MaintenanceForm_Action="Remove"', 11) + c.event("[Progress1]", "Removing", 'MaintenanceForm_Action="Remove"', 12) + c.event("[Progress2]", "removes", 'MaintenanceForm_Action="Remove"', 13) + c.event("Remove", "ALL", 'MaintenanceForm_Action="Remove"', 14) + + # Close dialog when maintenance action scheduled + c.event("EndDialog", "Return", 'MaintenanceForm_Action<>"Change"', 20) + #c.event("NewDialog", "SelectFeaturesDlg", 'MaintenanceForm_Action="Change"', 21) + + maint.cancel("Cancel", "RepairRadioGroup").event("SpawnDialog", "CancelDlg") + + def get_installer_filename(self, fullname): + # Factored out to allow overriding in subclasses + if self.target_version: + base_name = "%s.%s-py%s.msi" % (fullname, self.plat_name, + self.target_version) + else: + base_name = "%s.%s.msi" % (fullname, self.plat_name) + installer_name = os.path.join(self.dist_dir, base_name) + return installer_name diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/bdist_rpm.py b/venv/Lib/site-packages/setuptools/_distutils/command/bdist_rpm.py new file mode 100644 index 00000000..550cbfa1 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/command/bdist_rpm.py @@ -0,0 +1,579 @@ +"""distutils.command.bdist_rpm + +Implements the Distutils 'bdist_rpm' command (create RPM source and binary +distributions).""" + +import subprocess, sys, os +from distutils.core import Command +from distutils.debug import DEBUG +from distutils.file_util import write_file +from distutils.errors import * +from distutils.sysconfig import get_python_version +from distutils import log + +class bdist_rpm(Command): + + description = "create an RPM distribution" + + user_options = [ + ('bdist-base=', None, + "base directory for creating built distributions"), + ('rpm-base=', None, + "base directory for creating RPMs (defaults to \"rpm\" under " + "--bdist-base; must be specified for RPM 2)"), + ('dist-dir=', 'd', + "directory to put final RPM files in " + "(and .spec files if --spec-only)"), + ('python=', None, + "path to Python interpreter to hard-code in the .spec file " + "(default: \"python\")"), + ('fix-python', None, + "hard-code the exact path to the current Python interpreter in " + "the .spec file"), + ('spec-only', None, + "only regenerate spec file"), + ('source-only', None, + "only generate source RPM"), + ('binary-only', None, + "only generate binary RPM"), + ('use-bzip2', None, + "use bzip2 instead of gzip to create source distribution"), + + # More meta-data: too RPM-specific to put in the setup script, + # but needs to go in the .spec file -- so we make these options + # to "bdist_rpm". The idea is that packagers would put this + # info in setup.cfg, although they are of course free to + # supply it on the command line. + ('distribution-name=', None, + "name of the (Linux) distribution to which this " + "RPM applies (*not* the name of the module distribution!)"), + ('group=', None, + "package classification [default: \"Development/Libraries\"]"), + ('release=', None, + "RPM release number"), + ('serial=', None, + "RPM serial number"), + ('vendor=', None, + "RPM \"vendor\" (eg. \"Joe Blow \") " + "[default: maintainer or author from setup script]"), + ('packager=', None, + "RPM packager (eg. \"Jane Doe \") " + "[default: vendor]"), + ('doc-files=', None, + "list of documentation files (space or comma-separated)"), + ('changelog=', None, + "RPM changelog"), + ('icon=', None, + "name of icon file"), + ('provides=', None, + "capabilities provided by this package"), + ('requires=', None, + "capabilities required by this package"), + ('conflicts=', None, + "capabilities which conflict with this package"), + ('build-requires=', None, + "capabilities required to build this package"), + ('obsoletes=', None, + "capabilities made obsolete by this package"), + ('no-autoreq', None, + "do not automatically calculate dependencies"), + + # Actions to take when building RPM + ('keep-temp', 'k', + "don't clean up RPM build directory"), + ('no-keep-temp', None, + "clean up RPM build directory [default]"), + ('use-rpm-opt-flags', None, + "compile with RPM_OPT_FLAGS when building from source RPM"), + ('no-rpm-opt-flags', None, + "do not pass any RPM CFLAGS to compiler"), + ('rpm3-mode', None, + "RPM 3 compatibility mode (default)"), + ('rpm2-mode', None, + "RPM 2 compatibility mode"), + + # Add the hooks necessary for specifying custom scripts + ('prep-script=', None, + "Specify a script for the PREP phase of RPM building"), + ('build-script=', None, + "Specify a script for the BUILD phase of RPM building"), + + ('pre-install=', None, + "Specify a script for the pre-INSTALL phase of RPM building"), + ('install-script=', None, + "Specify a script for the INSTALL phase of RPM building"), + ('post-install=', None, + "Specify a script for the post-INSTALL phase of RPM building"), + + ('pre-uninstall=', None, + "Specify a script for the pre-UNINSTALL phase of RPM building"), + ('post-uninstall=', None, + "Specify a script for the post-UNINSTALL phase of RPM building"), + + ('clean-script=', None, + "Specify a script for the CLEAN phase of RPM building"), + + ('verify-script=', None, + "Specify a script for the VERIFY phase of the RPM build"), + + # Allow a packager to explicitly force an architecture + ('force-arch=', None, + "Force an architecture onto the RPM build process"), + + ('quiet', 'q', + "Run the INSTALL phase of RPM building in quiet mode"), + ] + + boolean_options = ['keep-temp', 'use-rpm-opt-flags', 'rpm3-mode', + 'no-autoreq', 'quiet'] + + negative_opt = {'no-keep-temp': 'keep-temp', + 'no-rpm-opt-flags': 'use-rpm-opt-flags', + 'rpm2-mode': 'rpm3-mode'} + + + def initialize_options(self): + self.bdist_base = None + self.rpm_base = None + self.dist_dir = None + self.python = None + self.fix_python = None + self.spec_only = None + self.binary_only = None + self.source_only = None + self.use_bzip2 = None + + self.distribution_name = None + self.group = None + self.release = None + self.serial = None + self.vendor = None + self.packager = None + self.doc_files = None + self.changelog = None + self.icon = None + + self.prep_script = None + self.build_script = None + self.install_script = None + self.clean_script = None + self.verify_script = None + self.pre_install = None + self.post_install = None + self.pre_uninstall = None + self.post_uninstall = None + self.prep = None + self.provides = None + self.requires = None + self.conflicts = None + self.build_requires = None + self.obsoletes = None + + self.keep_temp = 0 + self.use_rpm_opt_flags = 1 + self.rpm3_mode = 1 + self.no_autoreq = 0 + + self.force_arch = None + self.quiet = 0 + + def finalize_options(self): + self.set_undefined_options('bdist', ('bdist_base', 'bdist_base')) + if self.rpm_base is None: + if not self.rpm3_mode: + raise DistutilsOptionError( + "you must specify --rpm-base in RPM 2 mode") + self.rpm_base = os.path.join(self.bdist_base, "rpm") + + if self.python is None: + if self.fix_python: + self.python = sys.executable + else: + self.python = "python3" + elif self.fix_python: + raise DistutilsOptionError( + "--python and --fix-python are mutually exclusive options") + + if os.name != 'posix': + raise DistutilsPlatformError("don't know how to create RPM " + "distributions on platform %s" % os.name) + if self.binary_only and self.source_only: + raise DistutilsOptionError( + "cannot supply both '--source-only' and '--binary-only'") + + # don't pass CFLAGS to pure python distributions + if not self.distribution.has_ext_modules(): + self.use_rpm_opt_flags = 0 + + self.set_undefined_options('bdist', ('dist_dir', 'dist_dir')) + self.finalize_package_data() + + def finalize_package_data(self): + self.ensure_string('group', "Development/Libraries") + self.ensure_string('vendor', + "%s <%s>" % (self.distribution.get_contact(), + self.distribution.get_contact_email())) + self.ensure_string('packager') + self.ensure_string_list('doc_files') + if isinstance(self.doc_files, list): + for readme in ('README', 'README.txt'): + if os.path.exists(readme) and readme not in self.doc_files: + self.doc_files.append(readme) + + self.ensure_string('release', "1") + self.ensure_string('serial') # should it be an int? + + self.ensure_string('distribution_name') + + self.ensure_string('changelog') + # Format changelog correctly + self.changelog = self._format_changelog(self.changelog) + + self.ensure_filename('icon') + + self.ensure_filename('prep_script') + self.ensure_filename('build_script') + self.ensure_filename('install_script') + self.ensure_filename('clean_script') + self.ensure_filename('verify_script') + self.ensure_filename('pre_install') + self.ensure_filename('post_install') + self.ensure_filename('pre_uninstall') + self.ensure_filename('post_uninstall') + + # XXX don't forget we punted on summaries and descriptions -- they + # should be handled here eventually! + + # Now *this* is some meta-data that belongs in the setup script... + self.ensure_string_list('provides') + self.ensure_string_list('requires') + self.ensure_string_list('conflicts') + self.ensure_string_list('build_requires') + self.ensure_string_list('obsoletes') + + self.ensure_string('force_arch') + + def run(self): + if DEBUG: + print("before _get_package_data():") + print("vendor =", self.vendor) + print("packager =", self.packager) + print("doc_files =", self.doc_files) + print("changelog =", self.changelog) + + # make directories + if self.spec_only: + spec_dir = self.dist_dir + self.mkpath(spec_dir) + else: + rpm_dir = {} + for d in ('SOURCES', 'SPECS', 'BUILD', 'RPMS', 'SRPMS'): + rpm_dir[d] = os.path.join(self.rpm_base, d) + self.mkpath(rpm_dir[d]) + spec_dir = rpm_dir['SPECS'] + + # Spec file goes into 'dist_dir' if '--spec-only specified', + # build/rpm. otherwise. + spec_path = os.path.join(spec_dir, + "%s.spec" % self.distribution.get_name()) + self.execute(write_file, + (spec_path, + self._make_spec_file()), + "writing '%s'" % spec_path) + + if self.spec_only: # stop if requested + return + + # Make a source distribution and copy to SOURCES directory with + # optional icon. + saved_dist_files = self.distribution.dist_files[:] + sdist = self.reinitialize_command('sdist') + if self.use_bzip2: + sdist.formats = ['bztar'] + else: + sdist.formats = ['gztar'] + self.run_command('sdist') + self.distribution.dist_files = saved_dist_files + + source = sdist.get_archive_files()[0] + source_dir = rpm_dir['SOURCES'] + self.copy_file(source, source_dir) + + if self.icon: + if os.path.exists(self.icon): + self.copy_file(self.icon, source_dir) + else: + raise DistutilsFileError( + "icon file '%s' does not exist" % self.icon) + + # build package + log.info("building RPMs") + rpm_cmd = ['rpmbuild'] + + if self.source_only: # what kind of RPMs? + rpm_cmd.append('-bs') + elif self.binary_only: + rpm_cmd.append('-bb') + else: + rpm_cmd.append('-ba') + rpm_cmd.extend(['--define', '__python %s' % self.python]) + if self.rpm3_mode: + rpm_cmd.extend(['--define', + '_topdir %s' % os.path.abspath(self.rpm_base)]) + if not self.keep_temp: + rpm_cmd.append('--clean') + + if self.quiet: + rpm_cmd.append('--quiet') + + rpm_cmd.append(spec_path) + # Determine the binary rpm names that should be built out of this spec + # file + # Note that some of these may not be really built (if the file + # list is empty) + nvr_string = "%{name}-%{version}-%{release}" + src_rpm = nvr_string + ".src.rpm" + non_src_rpm = "%{arch}/" + nvr_string + ".%{arch}.rpm" + q_cmd = r"rpm -q --qf '%s %s\n' --specfile '%s'" % ( + src_rpm, non_src_rpm, spec_path) + + out = os.popen(q_cmd) + try: + binary_rpms = [] + source_rpm = None + while True: + line = out.readline() + if not line: + break + l = line.strip().split() + assert(len(l) == 2) + binary_rpms.append(l[1]) + # The source rpm is named after the first entry in the spec file + if source_rpm is None: + source_rpm = l[0] + + status = out.close() + if status: + raise DistutilsExecError("Failed to execute: %s" % repr(q_cmd)) + + finally: + out.close() + + self.spawn(rpm_cmd) + + if not self.dry_run: + if self.distribution.has_ext_modules(): + pyversion = get_python_version() + else: + pyversion = 'any' + + if not self.binary_only: + srpm = os.path.join(rpm_dir['SRPMS'], source_rpm) + assert(os.path.exists(srpm)) + self.move_file(srpm, self.dist_dir) + filename = os.path.join(self.dist_dir, source_rpm) + self.distribution.dist_files.append( + ('bdist_rpm', pyversion, filename)) + + if not self.source_only: + for rpm in binary_rpms: + rpm = os.path.join(rpm_dir['RPMS'], rpm) + if os.path.exists(rpm): + self.move_file(rpm, self.dist_dir) + filename = os.path.join(self.dist_dir, + os.path.basename(rpm)) + self.distribution.dist_files.append( + ('bdist_rpm', pyversion, filename)) + + def _dist_path(self, path): + return os.path.join(self.dist_dir, os.path.basename(path)) + + def _make_spec_file(self): + """Generate the text of an RPM spec file and return it as a + list of strings (one per line). + """ + # definitions and headers + spec_file = [ + '%define name ' + self.distribution.get_name(), + '%define version ' + self.distribution.get_version().replace('-','_'), + '%define unmangled_version ' + self.distribution.get_version(), + '%define release ' + self.release.replace('-','_'), + '', + 'Summary: ' + self.distribution.get_description(), + ] + + # Workaround for #14443 which affects some RPM based systems such as + # RHEL6 (and probably derivatives) + vendor_hook = subprocess.getoutput('rpm --eval %{__os_install_post}') + # Generate a potential replacement value for __os_install_post (whilst + # normalizing the whitespace to simplify the test for whether the + # invocation of brp-python-bytecompile passes in __python): + vendor_hook = '\n'.join([' %s \\' % line.strip() + for line in vendor_hook.splitlines()]) + problem = "brp-python-bytecompile \\\n" + fixed = "brp-python-bytecompile %{__python} \\\n" + fixed_hook = vendor_hook.replace(problem, fixed) + if fixed_hook != vendor_hook: + spec_file.append('# Workaround for http://bugs.python.org/issue14443') + spec_file.append('%define __os_install_post ' + fixed_hook + '\n') + + # put locale summaries into spec file + # XXX not supported for now (hard to put a dictionary + # in a config file -- arg!) + #for locale in self.summaries.keys(): + # spec_file.append('Summary(%s): %s' % (locale, + # self.summaries[locale])) + + spec_file.extend([ + 'Name: %{name}', + 'Version: %{version}', + 'Release: %{release}',]) + + # XXX yuck! this filename is available from the "sdist" command, + # but only after it has run: and we create the spec file before + # running "sdist", in case of --spec-only. + if self.use_bzip2: + spec_file.append('Source0: %{name}-%{unmangled_version}.tar.bz2') + else: + spec_file.append('Source0: %{name}-%{unmangled_version}.tar.gz') + + spec_file.extend([ + 'License: ' + self.distribution.get_license(), + 'Group: ' + self.group, + 'BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-buildroot', + 'Prefix: %{_prefix}', ]) + + if not self.force_arch: + # noarch if no extension modules + if not self.distribution.has_ext_modules(): + spec_file.append('BuildArch: noarch') + else: + spec_file.append( 'BuildArch: %s' % self.force_arch ) + + for field in ('Vendor', + 'Packager', + 'Provides', + 'Requires', + 'Conflicts', + 'Obsoletes', + ): + val = getattr(self, field.lower()) + if isinstance(val, list): + spec_file.append('%s: %s' % (field, ' '.join(val))) + elif val is not None: + spec_file.append('%s: %s' % (field, val)) + + + if self.distribution.get_url() != 'UNKNOWN': + spec_file.append('Url: ' + self.distribution.get_url()) + + if self.distribution_name: + spec_file.append('Distribution: ' + self.distribution_name) + + if self.build_requires: + spec_file.append('BuildRequires: ' + + ' '.join(self.build_requires)) + + if self.icon: + spec_file.append('Icon: ' + os.path.basename(self.icon)) + + if self.no_autoreq: + spec_file.append('AutoReq: 0') + + spec_file.extend([ + '', + '%description', + self.distribution.get_long_description() + ]) + + # put locale descriptions into spec file + # XXX again, suppressed because config file syntax doesn't + # easily support this ;-( + #for locale in self.descriptions.keys(): + # spec_file.extend([ + # '', + # '%description -l ' + locale, + # self.descriptions[locale], + # ]) + + # rpm scripts + # figure out default build script + def_setup_call = "%s %s" % (self.python,os.path.basename(sys.argv[0])) + def_build = "%s build" % def_setup_call + if self.use_rpm_opt_flags: + def_build = 'env CFLAGS="$RPM_OPT_FLAGS" ' + def_build + + # insert contents of files + + # XXX this is kind of misleading: user-supplied options are files + # that we open and interpolate into the spec file, but the defaults + # are just text that we drop in as-is. Hmmm. + + install_cmd = ('%s install -O1 --root=$RPM_BUILD_ROOT ' + '--record=INSTALLED_FILES') % def_setup_call + + script_options = [ + ('prep', 'prep_script', "%setup -n %{name}-%{unmangled_version}"), + ('build', 'build_script', def_build), + ('install', 'install_script', install_cmd), + ('clean', 'clean_script', "rm -rf $RPM_BUILD_ROOT"), + ('verifyscript', 'verify_script', None), + ('pre', 'pre_install', None), + ('post', 'post_install', None), + ('preun', 'pre_uninstall', None), + ('postun', 'post_uninstall', None), + ] + + for (rpm_opt, attr, default) in script_options: + # Insert contents of file referred to, if no file is referred to + # use 'default' as contents of script + val = getattr(self, attr) + if val or default: + spec_file.extend([ + '', + '%' + rpm_opt,]) + if val: + with open(val) as f: + spec_file.extend(f.read().split('\n')) + else: + spec_file.append(default) + + + # files section + spec_file.extend([ + '', + '%files -f INSTALLED_FILES', + '%defattr(-,root,root)', + ]) + + if self.doc_files: + spec_file.append('%doc ' + ' '.join(self.doc_files)) + + if self.changelog: + spec_file.extend([ + '', + '%changelog',]) + spec_file.extend(self.changelog) + + return spec_file + + def _format_changelog(self, changelog): + """Format the changelog correctly and convert it to a list of strings + """ + if not changelog: + return changelog + new_changelog = [] + for line in changelog.strip().split('\n'): + line = line.strip() + if line[0] == '*': + new_changelog.extend(['', line]) + elif line[0] == '-': + new_changelog.append(line) + else: + new_changelog.append(' ' + line) + + # strip trailing newline inserted by first changelog entry + if not new_changelog[0]: + del new_changelog[0] + + return new_changelog diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/bdist_wininst.py b/venv/Lib/site-packages/setuptools/_distutils/command/bdist_wininst.py new file mode 100644 index 00000000..0e9ddaa2 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/command/bdist_wininst.py @@ -0,0 +1,377 @@ +"""distutils.command.bdist_wininst + +Implements the Distutils 'bdist_wininst' command: create a windows installer +exe-program.""" + +import os +import sys +import warnings +from distutils.core import Command +from distutils.util import get_platform +from distutils.dir_util import remove_tree +from distutils.errors import * +from distutils.sysconfig import get_python_version +from distutils import log + +class bdist_wininst(Command): + + description = "create an executable installer for MS Windows" + + user_options = [('bdist-dir=', None, + "temporary directory for creating the distribution"), + ('plat-name=', 'p', + "platform name to embed in generated filenames " + "(default: %s)" % get_platform()), + ('keep-temp', 'k', + "keep the pseudo-installation tree around after " + + "creating the distribution archive"), + ('target-version=', None, + "require a specific python version" + + " on the target system"), + ('no-target-compile', 'c', + "do not compile .py to .pyc on the target system"), + ('no-target-optimize', 'o', + "do not compile .py to .pyo (optimized) " + "on the target system"), + ('dist-dir=', 'd', + "directory to put final built distributions in"), + ('bitmap=', 'b', + "bitmap to use for the installer instead of python-powered logo"), + ('title=', 't', + "title to display on the installer background instead of default"), + ('skip-build', None, + "skip rebuilding everything (for testing/debugging)"), + ('install-script=', None, + "basename of installation script to be run after " + "installation or before deinstallation"), + ('pre-install-script=', None, + "Fully qualified filename of a script to be run before " + "any files are installed. This script need not be in the " + "distribution"), + ('user-access-control=', None, + "specify Vista's UAC handling - 'none'/default=no " + "handling, 'auto'=use UAC if target Python installed for " + "all users, 'force'=always use UAC"), + ] + + boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize', + 'skip-build'] + + # bpo-10945: bdist_wininst requires mbcs encoding only available on Windows + _unsupported = (sys.platform != "win32") + + def __init__(self, *args, **kw): + super().__init__(*args, **kw) + warnings.warn("bdist_wininst command is deprecated since Python 3.8, " + "use bdist_wheel (wheel packages) instead", + DeprecationWarning, 2) + + def initialize_options(self): + self.bdist_dir = None + self.plat_name = None + self.keep_temp = 0 + self.no_target_compile = 0 + self.no_target_optimize = 0 + self.target_version = None + self.dist_dir = None + self.bitmap = None + self.title = None + self.skip_build = None + self.install_script = None + self.pre_install_script = None + self.user_access_control = None + + + def finalize_options(self): + self.set_undefined_options('bdist', ('skip_build', 'skip_build')) + + if self.bdist_dir is None: + if self.skip_build and self.plat_name: + # If build is skipped and plat_name is overridden, bdist will + # not see the correct 'plat_name' - so set that up manually. + bdist = self.distribution.get_command_obj('bdist') + bdist.plat_name = self.plat_name + # next the command will be initialized using that name + bdist_base = self.get_finalized_command('bdist').bdist_base + self.bdist_dir = os.path.join(bdist_base, 'wininst') + + if not self.target_version: + self.target_version = "" + + if not self.skip_build and self.distribution.has_ext_modules(): + short_version = get_python_version() + if self.target_version and self.target_version != short_version: + raise DistutilsOptionError( + "target version can only be %s, or the '--skip-build'" \ + " option must be specified" % (short_version,)) + self.target_version = short_version + + self.set_undefined_options('bdist', + ('dist_dir', 'dist_dir'), + ('plat_name', 'plat_name'), + ) + + if self.install_script: + for script in self.distribution.scripts: + if self.install_script == os.path.basename(script): + break + else: + raise DistutilsOptionError( + "install_script '%s' not found in scripts" + % self.install_script) + + def run(self): + if (sys.platform != "win32" and + (self.distribution.has_ext_modules() or + self.distribution.has_c_libraries())): + raise DistutilsPlatformError \ + ("distribution contains extensions and/or C libraries; " + "must be compiled on a Windows 32 platform") + + if not self.skip_build: + self.run_command('build') + + install = self.reinitialize_command('install', reinit_subcommands=1) + install.root = self.bdist_dir + install.skip_build = self.skip_build + install.warn_dir = 0 + install.plat_name = self.plat_name + + install_lib = self.reinitialize_command('install_lib') + # we do not want to include pyc or pyo files + install_lib.compile = 0 + install_lib.optimize = 0 + + if self.distribution.has_ext_modules(): + # If we are building an installer for a Python version other + # than the one we are currently running, then we need to ensure + # our build_lib reflects the other Python version rather than ours. + # Note that for target_version!=sys.version, we must have skipped the + # build step, so there is no issue with enforcing the build of this + # version. + target_version = self.target_version + if not target_version: + assert self.skip_build, "Should have already checked this" + target_version = '%d.%d' % sys.version_info[:2] + plat_specifier = ".%s-%s" % (self.plat_name, target_version) + build = self.get_finalized_command('build') + build.build_lib = os.path.join(build.build_base, + 'lib' + plat_specifier) + + # Use a custom scheme for the zip-file, because we have to decide + # at installation time which scheme to use. + for key in ('purelib', 'platlib', 'headers', 'scripts', 'data'): + value = key.upper() + if key == 'headers': + value = value + '/Include/$dist_name' + setattr(install, + 'install_' + key, + value) + + log.info("installing to %s", self.bdist_dir) + install.ensure_finalized() + + # avoid warning of 'install_lib' about installing + # into a directory not in sys.path + sys.path.insert(0, os.path.join(self.bdist_dir, 'PURELIB')) + + install.run() + + del sys.path[0] + + # And make an archive relative to the root of the + # pseudo-installation tree. + from tempfile import mktemp + archive_basename = mktemp() + fullname = self.distribution.get_fullname() + arcname = self.make_archive(archive_basename, "zip", + root_dir=self.bdist_dir) + # create an exe containing the zip-file + self.create_exe(arcname, fullname, self.bitmap) + if self.distribution.has_ext_modules(): + pyversion = get_python_version() + else: + pyversion = 'any' + self.distribution.dist_files.append(('bdist_wininst', pyversion, + self.get_installer_filename(fullname))) + # remove the zip-file again + log.debug("removing temporary file '%s'", arcname) + os.remove(arcname) + + if not self.keep_temp: + remove_tree(self.bdist_dir, dry_run=self.dry_run) + + def get_inidata(self): + # Return data describing the installation. + lines = [] + metadata = self.distribution.metadata + + # Write the [metadata] section. + lines.append("[metadata]") + + # 'info' will be displayed in the installer's dialog box, + # describing the items to be installed. + info = (metadata.long_description or '') + '\n' + + # Escape newline characters + def escape(s): + return s.replace("\n", "\\n") + + for name in ["author", "author_email", "description", "maintainer", + "maintainer_email", "name", "url", "version"]: + data = getattr(metadata, name, "") + if data: + info = info + ("\n %s: %s" % \ + (name.capitalize(), escape(data))) + lines.append("%s=%s" % (name, escape(data))) + + # The [setup] section contains entries controlling + # the installer runtime. + lines.append("\n[Setup]") + if self.install_script: + lines.append("install_script=%s" % self.install_script) + lines.append("info=%s" % escape(info)) + lines.append("target_compile=%d" % (not self.no_target_compile)) + lines.append("target_optimize=%d" % (not self.no_target_optimize)) + if self.target_version: + lines.append("target_version=%s" % self.target_version) + if self.user_access_control: + lines.append("user_access_control=%s" % self.user_access_control) + + title = self.title or self.distribution.get_fullname() + lines.append("title=%s" % escape(title)) + import time + import distutils + build_info = "Built %s with distutils-%s" % \ + (time.ctime(time.time()), distutils.__version__) + lines.append("build_info=%s" % build_info) + return "\n".join(lines) + + def create_exe(self, arcname, fullname, bitmap=None): + import struct + + self.mkpath(self.dist_dir) + + cfgdata = self.get_inidata() + + installer_name = self.get_installer_filename(fullname) + self.announce("creating %s" % installer_name) + + if bitmap: + with open(bitmap, "rb") as f: + bitmapdata = f.read() + bitmaplen = len(bitmapdata) + else: + bitmaplen = 0 + + with open(installer_name, "wb") as file: + file.write(self.get_exe_bytes()) + if bitmap: + file.write(bitmapdata) + + # Convert cfgdata from unicode to ascii, mbcs encoded + if isinstance(cfgdata, str): + cfgdata = cfgdata.encode("mbcs") + + # Append the pre-install script + cfgdata = cfgdata + b"\0" + if self.pre_install_script: + # We need to normalize newlines, so we open in text mode and + # convert back to bytes. "latin-1" simply avoids any possible + # failures. + with open(self.pre_install_script, "r", + encoding="latin-1") as script: + script_data = script.read().encode("latin-1") + cfgdata = cfgdata + script_data + b"\n\0" + else: + # empty pre-install script + cfgdata = cfgdata + b"\0" + file.write(cfgdata) + + # The 'magic number' 0x1234567B is used to make sure that the + # binary layout of 'cfgdata' is what the wininst.exe binary + # expects. If the layout changes, increment that number, make + # the corresponding changes to the wininst.exe sources, and + # recompile them. + header = struct.pack("' under the base build directory. We only use one of + # them for a given distribution, though -- + if self.build_purelib is None: + self.build_purelib = os.path.join(self.build_base, 'lib') + if self.build_platlib is None: + self.build_platlib = os.path.join(self.build_base, + 'lib' + plat_specifier) + + # 'build_lib' is the actual directory that we will use for this + # particular module distribution -- if user didn't supply it, pick + # one of 'build_purelib' or 'build_platlib'. + if self.build_lib is None: + if self.distribution.ext_modules: + self.build_lib = self.build_platlib + else: + self.build_lib = self.build_purelib + + # 'build_temp' -- temporary directory for compiler turds, + # "build/temp." + if self.build_temp is None: + self.build_temp = os.path.join(self.build_base, + 'temp' + plat_specifier) + if self.build_scripts is None: + self.build_scripts = os.path.join(self.build_base, + 'scripts-%d.%d' % sys.version_info[:2]) + + if self.executable is None and sys.executable: + self.executable = os.path.normpath(sys.executable) + + if isinstance(self.parallel, str): + try: + self.parallel = int(self.parallel) + except ValueError: + raise DistutilsOptionError("parallel should be an integer") + + def run(self): + # Run all relevant sub-commands. This will be some subset of: + # - build_py - pure Python modules + # - build_clib - standalone C libraries + # - build_ext - Python extensions + # - build_scripts - (Python) scripts + for cmd_name in self.get_sub_commands(): + self.run_command(cmd_name) + + + # -- Predicates for the sub-command list --------------------------- + + def has_pure_modules(self): + return self.distribution.has_pure_modules() + + def has_c_libraries(self): + return self.distribution.has_c_libraries() + + def has_ext_modules(self): + return self.distribution.has_ext_modules() + + def has_scripts(self): + return self.distribution.has_scripts() + + + sub_commands = [('build_py', has_pure_modules), + ('build_clib', has_c_libraries), + ('build_ext', has_ext_modules), + ('build_scripts', has_scripts), + ] diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/build_clib.py b/venv/Lib/site-packages/setuptools/_distutils/command/build_clib.py new file mode 100644 index 00000000..3e20ef23 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/command/build_clib.py @@ -0,0 +1,209 @@ +"""distutils.command.build_clib + +Implements the Distutils 'build_clib' command, to build a C/C++ library +that is included in the module distribution and needed by an extension +module.""" + + +# XXX this module has *lots* of code ripped-off quite transparently from +# build_ext.py -- not surprisingly really, as the work required to build +# a static library from a collection of C source files is not really all +# that different from what's required to build a shared object file from +# a collection of C source files. Nevertheless, I haven't done the +# necessary refactoring to account for the overlap in code between the +# two modules, mainly because a number of subtle details changed in the +# cut 'n paste. Sigh. + +import os +from distutils.core import Command +from distutils.errors import * +from distutils.sysconfig import customize_compiler +from distutils import log + +def show_compilers(): + from distutils.ccompiler import show_compilers + show_compilers() + + +class build_clib(Command): + + description = "build C/C++ libraries used by Python extensions" + + user_options = [ + ('build-clib=', 'b', + "directory to build C/C++ libraries to"), + ('build-temp=', 't', + "directory to put temporary build by-products"), + ('debug', 'g', + "compile with debugging information"), + ('force', 'f', + "forcibly build everything (ignore file timestamps)"), + ('compiler=', 'c', + "specify the compiler type"), + ] + + boolean_options = ['debug', 'force'] + + help_options = [ + ('help-compiler', None, + "list available compilers", show_compilers), + ] + + def initialize_options(self): + self.build_clib = None + self.build_temp = None + + # List of libraries to build + self.libraries = None + + # Compilation options for all libraries + self.include_dirs = None + self.define = None + self.undef = None + self.debug = None + self.force = 0 + self.compiler = None + + + def finalize_options(self): + # This might be confusing: both build-clib and build-temp default + # to build-temp as defined by the "build" command. This is because + # I think that C libraries are really just temporary build + # by-products, at least from the point of view of building Python + # extensions -- but I want to keep my options open. + self.set_undefined_options('build', + ('build_temp', 'build_clib'), + ('build_temp', 'build_temp'), + ('compiler', 'compiler'), + ('debug', 'debug'), + ('force', 'force')) + + self.libraries = self.distribution.libraries + if self.libraries: + self.check_library_list(self.libraries) + + if self.include_dirs is None: + self.include_dirs = self.distribution.include_dirs or [] + if isinstance(self.include_dirs, str): + self.include_dirs = self.include_dirs.split(os.pathsep) + + # XXX same as for build_ext -- what about 'self.define' and + # 'self.undef' ? + + + def run(self): + if not self.libraries: + return + + # Yech -- this is cut 'n pasted from build_ext.py! + from distutils.ccompiler import new_compiler + self.compiler = new_compiler(compiler=self.compiler, + dry_run=self.dry_run, + force=self.force) + customize_compiler(self.compiler) + + if self.include_dirs is not None: + self.compiler.set_include_dirs(self.include_dirs) + if self.define is not None: + # 'define' option is a list of (name,value) tuples + for (name,value) in self.define: + self.compiler.define_macro(name, value) + if self.undef is not None: + for macro in self.undef: + self.compiler.undefine_macro(macro) + + self.build_libraries(self.libraries) + + + def check_library_list(self, libraries): + """Ensure that the list of libraries is valid. + + `library` is presumably provided as a command option 'libraries'. + This method checks that it is a list of 2-tuples, where the tuples + are (library_name, build_info_dict). + + Raise DistutilsSetupError if the structure is invalid anywhere; + just returns otherwise. + """ + if not isinstance(libraries, list): + raise DistutilsSetupError( + "'libraries' option must be a list of tuples") + + for lib in libraries: + if not isinstance(lib, tuple) and len(lib) != 2: + raise DistutilsSetupError( + "each element of 'libraries' must a 2-tuple") + + name, build_info = lib + + if not isinstance(name, str): + raise DistutilsSetupError( + "first element of each tuple in 'libraries' " + "must be a string (the library name)") + + if '/' in name or (os.sep != '/' and os.sep in name): + raise DistutilsSetupError("bad library name '%s': " + "may not contain directory separators" % lib[0]) + + if not isinstance(build_info, dict): + raise DistutilsSetupError( + "second element of each tuple in 'libraries' " + "must be a dictionary (build info)") + + + def get_library_names(self): + # Assume the library list is valid -- 'check_library_list()' is + # called from 'finalize_options()', so it should be! + if not self.libraries: + return None + + lib_names = [] + for (lib_name, build_info) in self.libraries: + lib_names.append(lib_name) + return lib_names + + + def get_source_files(self): + self.check_library_list(self.libraries) + filenames = [] + for (lib_name, build_info) in self.libraries: + sources = build_info.get('sources') + if sources is None or not isinstance(sources, (list, tuple)): + raise DistutilsSetupError( + "in 'libraries' option (library '%s'), " + "'sources' must be present and must be " + "a list of source filenames" % lib_name) + + filenames.extend(sources) + return filenames + + + def build_libraries(self, libraries): + for (lib_name, build_info) in libraries: + sources = build_info.get('sources') + if sources is None or not isinstance(sources, (list, tuple)): + raise DistutilsSetupError( + "in 'libraries' option (library '%s'), " + "'sources' must be present and must be " + "a list of source filenames" % lib_name) + sources = list(sources) + + log.info("building '%s' library", lib_name) + + # First, compile the source code to object files in the library + # directory. (This should probably change to putting object + # files in a temporary build directory.) + macros = build_info.get('macros') + include_dirs = build_info.get('include_dirs') + objects = self.compiler.compile(sources, + output_dir=self.build_temp, + macros=macros, + include_dirs=include_dirs, + debug=self.debug) + + # Now "link" the object files together into a static library. + # (On Unix at least, this isn't really linking -- it just + # builds an archive. Whatever.) + self.compiler.create_static_lib(objects, lib_name, + output_dir=self.build_clib, + debug=self.debug) diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/build_ext.py b/venv/Lib/site-packages/setuptools/_distutils/command/build_ext.py new file mode 100644 index 00000000..bbb34833 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/command/build_ext.py @@ -0,0 +1,755 @@ +"""distutils.command.build_ext + +Implements the Distutils 'build_ext' command, for building extension +modules (currently limited to C extensions, should accommodate C++ +extensions ASAP).""" + +import contextlib +import os +import re +import sys +from distutils.core import Command +from distutils.errors import * +from distutils.sysconfig import customize_compiler, get_python_version +from distutils.sysconfig import get_config_h_filename +from distutils.dep_util import newer_group +from distutils.extension import Extension +from distutils.util import get_platform +from distutils import log +from . import py37compat + +from site import USER_BASE + +# An extension name is just a dot-separated list of Python NAMEs (ie. +# the same as a fully-qualified module name). +extension_name_re = re.compile \ + (r'^[a-zA-Z_][a-zA-Z_0-9]*(\.[a-zA-Z_][a-zA-Z_0-9]*)*$') + + +def show_compilers (): + from distutils.ccompiler import show_compilers + show_compilers() + + +class build_ext(Command): + + description = "build C/C++ extensions (compile/link to build directory)" + + # XXX thoughts on how to deal with complex command-line options like + # these, i.e. how to make it so fancy_getopt can suck them off the + # command line and make it look like setup.py defined the appropriate + # lists of tuples of what-have-you. + # - each command needs a callback to process its command-line options + # - Command.__init__() needs access to its share of the whole + # command line (must ultimately come from + # Distribution.parse_command_line()) + # - it then calls the current command class' option-parsing + # callback to deal with weird options like -D, which have to + # parse the option text and churn out some custom data + # structure + # - that data structure (in this case, a list of 2-tuples) + # will then be present in the command object by the time + # we get to finalize_options() (i.e. the constructor + # takes care of both command-line and client options + # in between initialize_options() and finalize_options()) + + sep_by = " (separated by '%s')" % os.pathsep + user_options = [ + ('build-lib=', 'b', + "directory for compiled extension modules"), + ('build-temp=', 't', + "directory for temporary files (build by-products)"), + ('plat-name=', 'p', + "platform name to cross-compile for, if supported " + "(default: %s)" % get_platform()), + ('inplace', 'i', + "ignore build-lib and put compiled extensions into the source " + + "directory alongside your pure Python modules"), + ('include-dirs=', 'I', + "list of directories to search for header files" + sep_by), + ('define=', 'D', + "C preprocessor macros to define"), + ('undef=', 'U', + "C preprocessor macros to undefine"), + ('libraries=', 'l', + "external C libraries to link with"), + ('library-dirs=', 'L', + "directories to search for external C libraries" + sep_by), + ('rpath=', 'R', + "directories to search for shared C libraries at runtime"), + ('link-objects=', 'O', + "extra explicit link objects to include in the link"), + ('debug', 'g', + "compile/link with debugging information"), + ('force', 'f', + "forcibly build everything (ignore file timestamps)"), + ('compiler=', 'c', + "specify the compiler type"), + ('parallel=', 'j', + "number of parallel build jobs"), + ('swig-cpp', None, + "make SWIG create C++ files (default is C)"), + ('swig-opts=', None, + "list of SWIG command line options"), + ('swig=', None, + "path to the SWIG executable"), + ('user', None, + "add user include, library and rpath") + ] + + boolean_options = ['inplace', 'debug', 'force', 'swig-cpp', 'user'] + + help_options = [ + ('help-compiler', None, + "list available compilers", show_compilers), + ] + + def initialize_options(self): + self.extensions = None + self.build_lib = None + self.plat_name = None + self.build_temp = None + self.inplace = 0 + self.package = None + + self.include_dirs = None + self.define = None + self.undef = None + self.libraries = None + self.library_dirs = None + self.rpath = None + self.link_objects = None + self.debug = None + self.force = None + self.compiler = None + self.swig = None + self.swig_cpp = None + self.swig_opts = None + self.user = None + self.parallel = None + + def finalize_options(self): + from distutils import sysconfig + + self.set_undefined_options('build', + ('build_lib', 'build_lib'), + ('build_temp', 'build_temp'), + ('compiler', 'compiler'), + ('debug', 'debug'), + ('force', 'force'), + ('parallel', 'parallel'), + ('plat_name', 'plat_name'), + ) + + if self.package is None: + self.package = self.distribution.ext_package + + self.extensions = self.distribution.ext_modules + + # Make sure Python's include directories (for Python.h, pyconfig.h, + # etc.) are in the include search path. + py_include = sysconfig.get_python_inc() + plat_py_include = sysconfig.get_python_inc(plat_specific=1) + if self.include_dirs is None: + self.include_dirs = self.distribution.include_dirs or [] + if isinstance(self.include_dirs, str): + self.include_dirs = self.include_dirs.split(os.pathsep) + + # If in a virtualenv, add its include directory + # Issue 16116 + if sys.exec_prefix != sys.base_exec_prefix: + self.include_dirs.append(os.path.join(sys.exec_prefix, 'include')) + + # Put the Python "system" include dir at the end, so that + # any local include dirs take precedence. + self.include_dirs.extend(py_include.split(os.path.pathsep)) + if plat_py_include != py_include: + self.include_dirs.extend( + plat_py_include.split(os.path.pathsep)) + + self.ensure_string_list('libraries') + self.ensure_string_list('link_objects') + + # Life is easier if we're not forever checking for None, so + # simplify these options to empty lists if unset + if self.libraries is None: + self.libraries = [] + if self.library_dirs is None: + self.library_dirs = [] + elif isinstance(self.library_dirs, str): + self.library_dirs = self.library_dirs.split(os.pathsep) + + if self.rpath is None: + self.rpath = [] + elif isinstance(self.rpath, str): + self.rpath = self.rpath.split(os.pathsep) + + # for extensions under windows use different directories + # for Release and Debug builds. + # also Python's library directory must be appended to library_dirs + if os.name == 'nt': + # the 'libs' directory is for binary installs - we assume that + # must be the *native* platform. But we don't really support + # cross-compiling via a binary install anyway, so we let it go. + self.library_dirs.append(os.path.join(sys.exec_prefix, 'libs')) + if sys.base_exec_prefix != sys.prefix: # Issue 16116 + self.library_dirs.append(os.path.join(sys.base_exec_prefix, 'libs')) + if self.debug: + self.build_temp = os.path.join(self.build_temp, "Debug") + else: + self.build_temp = os.path.join(self.build_temp, "Release") + + # Append the source distribution include and library directories, + # this allows distutils on windows to work in the source tree + self.include_dirs.append(os.path.dirname(get_config_h_filename())) + _sys_home = getattr(sys, '_home', None) + if _sys_home: + self.library_dirs.append(_sys_home) + + # Use the .lib files for the correct architecture + if self.plat_name == 'win32': + suffix = 'win32' + else: + # win-amd64 + suffix = self.plat_name[4:] + new_lib = os.path.join(sys.exec_prefix, 'PCbuild') + if suffix: + new_lib = os.path.join(new_lib, suffix) + self.library_dirs.append(new_lib) + + # For extensions under Cygwin, Python's library directory must be + # appended to library_dirs + if sys.platform[:6] == 'cygwin': + if sys.executable.startswith(os.path.join(sys.exec_prefix, "bin")): + # building third party extensions + self.library_dirs.append(os.path.join(sys.prefix, "lib", + "python" + get_python_version(), + "config")) + else: + # building python standard extensions + self.library_dirs.append('.') + + # For building extensions with a shared Python library, + # Python's library directory must be appended to library_dirs + # See Issues: #1600860, #4366 + if (sysconfig.get_config_var('Py_ENABLE_SHARED')): + if not sysconfig.python_build: + # building third party extensions + self.library_dirs.append(sysconfig.get_config_var('LIBDIR')) + else: + # building python standard extensions + self.library_dirs.append('.') + + # The argument parsing will result in self.define being a string, but + # it has to be a list of 2-tuples. All the preprocessor symbols + # specified by the 'define' option will be set to '1'. Multiple + # symbols can be separated with commas. + + if self.define: + defines = self.define.split(',') + self.define = [(symbol, '1') for symbol in defines] + + # The option for macros to undefine is also a string from the + # option parsing, but has to be a list. Multiple symbols can also + # be separated with commas here. + if self.undef: + self.undef = self.undef.split(',') + + if self.swig_opts is None: + self.swig_opts = [] + else: + self.swig_opts = self.swig_opts.split(' ') + + # Finally add the user include and library directories if requested + if self.user: + user_include = os.path.join(USER_BASE, "include") + user_lib = os.path.join(USER_BASE, "lib") + if os.path.isdir(user_include): + self.include_dirs.append(user_include) + if os.path.isdir(user_lib): + self.library_dirs.append(user_lib) + self.rpath.append(user_lib) + + if isinstance(self.parallel, str): + try: + self.parallel = int(self.parallel) + except ValueError: + raise DistutilsOptionError("parallel should be an integer") + + def run(self): + from distutils.ccompiler import new_compiler + + # 'self.extensions', as supplied by setup.py, is a list of + # Extension instances. See the documentation for Extension (in + # distutils.extension) for details. + # + # For backwards compatibility with Distutils 0.8.2 and earlier, we + # also allow the 'extensions' list to be a list of tuples: + # (ext_name, build_info) + # where build_info is a dictionary containing everything that + # Extension instances do except the name, with a few things being + # differently named. We convert these 2-tuples to Extension + # instances as needed. + + if not self.extensions: + return + + # If we were asked to build any C/C++ libraries, make sure that the + # directory where we put them is in the library search path for + # linking extensions. + if self.distribution.has_c_libraries(): + build_clib = self.get_finalized_command('build_clib') + self.libraries.extend(build_clib.get_library_names() or []) + self.library_dirs.append(build_clib.build_clib) + + # Setup the CCompiler object that we'll use to do all the + # compiling and linking + self.compiler = new_compiler(compiler=self.compiler, + verbose=self.verbose, + dry_run=self.dry_run, + force=self.force) + customize_compiler(self.compiler) + # If we are cross-compiling, init the compiler now (if we are not + # cross-compiling, init would not hurt, but people may rely on + # late initialization of compiler even if they shouldn't...) + if os.name == 'nt' and self.plat_name != get_platform(): + self.compiler.initialize(self.plat_name) + + # And make sure that any compile/link-related options (which might + # come from the command-line or from the setup script) are set in + # that CCompiler object -- that way, they automatically apply to + # all compiling and linking done here. + if self.include_dirs is not None: + self.compiler.set_include_dirs(self.include_dirs) + if self.define is not None: + # 'define' option is a list of (name,value) tuples + for (name, value) in self.define: + self.compiler.define_macro(name, value) + if self.undef is not None: + for macro in self.undef: + self.compiler.undefine_macro(macro) + if self.libraries is not None: + self.compiler.set_libraries(self.libraries) + if self.library_dirs is not None: + self.compiler.set_library_dirs(self.library_dirs) + if self.rpath is not None: + self.compiler.set_runtime_library_dirs(self.rpath) + if self.link_objects is not None: + self.compiler.set_link_objects(self.link_objects) + + # Now actually compile and link everything. + self.build_extensions() + + def check_extensions_list(self, extensions): + """Ensure that the list of extensions (presumably provided as a + command option 'extensions') is valid, i.e. it is a list of + Extension objects. We also support the old-style list of 2-tuples, + where the tuples are (ext_name, build_info), which are converted to + Extension instances here. + + Raise DistutilsSetupError if the structure is invalid anywhere; + just returns otherwise. + """ + if not isinstance(extensions, list): + raise DistutilsSetupError( + "'ext_modules' option must be a list of Extension instances") + + for i, ext in enumerate(extensions): + if isinstance(ext, Extension): + continue # OK! (assume type-checking done + # by Extension constructor) + + if not isinstance(ext, tuple) or len(ext) != 2: + raise DistutilsSetupError( + "each element of 'ext_modules' option must be an " + "Extension instance or 2-tuple") + + ext_name, build_info = ext + + log.warn("old-style (ext_name, build_info) tuple found in " + "ext_modules for extension '%s' " + "-- please convert to Extension instance", ext_name) + + if not (isinstance(ext_name, str) and + extension_name_re.match(ext_name)): + raise DistutilsSetupError( + "first element of each tuple in 'ext_modules' " + "must be the extension name (a string)") + + if not isinstance(build_info, dict): + raise DistutilsSetupError( + "second element of each tuple in 'ext_modules' " + "must be a dictionary (build info)") + + # OK, the (ext_name, build_info) dict is type-safe: convert it + # to an Extension instance. + ext = Extension(ext_name, build_info['sources']) + + # Easy stuff: one-to-one mapping from dict elements to + # instance attributes. + for key in ('include_dirs', 'library_dirs', 'libraries', + 'extra_objects', 'extra_compile_args', + 'extra_link_args'): + val = build_info.get(key) + if val is not None: + setattr(ext, key, val) + + # Medium-easy stuff: same syntax/semantics, different names. + ext.runtime_library_dirs = build_info.get('rpath') + if 'def_file' in build_info: + log.warn("'def_file' element of build info dict " + "no longer supported") + + # Non-trivial stuff: 'macros' split into 'define_macros' + # and 'undef_macros'. + macros = build_info.get('macros') + if macros: + ext.define_macros = [] + ext.undef_macros = [] + for macro in macros: + if not (isinstance(macro, tuple) and len(macro) in (1, 2)): + raise DistutilsSetupError( + "'macros' element of build info dict " + "must be 1- or 2-tuple") + if len(macro) == 1: + ext.undef_macros.append(macro[0]) + elif len(macro) == 2: + ext.define_macros.append(macro) + + extensions[i] = ext + + def get_source_files(self): + self.check_extensions_list(self.extensions) + filenames = [] + + # Wouldn't it be neat if we knew the names of header files too... + for ext in self.extensions: + filenames.extend(ext.sources) + return filenames + + def get_outputs(self): + # Sanity check the 'extensions' list -- can't assume this is being + # done in the same run as a 'build_extensions()' call (in fact, we + # can probably assume that it *isn't*!). + self.check_extensions_list(self.extensions) + + # And build the list of output (built) filenames. Note that this + # ignores the 'inplace' flag, and assumes everything goes in the + # "build" tree. + outputs = [] + for ext in self.extensions: + outputs.append(self.get_ext_fullpath(ext.name)) + return outputs + + def build_extensions(self): + # First, sanity-check the 'extensions' list + self.check_extensions_list(self.extensions) + if self.parallel: + self._build_extensions_parallel() + else: + self._build_extensions_serial() + + def _build_extensions_parallel(self): + workers = self.parallel + if self.parallel is True: + workers = os.cpu_count() # may return None + try: + from concurrent.futures import ThreadPoolExecutor + except ImportError: + workers = None + + if workers is None: + self._build_extensions_serial() + return + + with ThreadPoolExecutor(max_workers=workers) as executor: + futures = [executor.submit(self.build_extension, ext) + for ext in self.extensions] + for ext, fut in zip(self.extensions, futures): + with self._filter_build_errors(ext): + fut.result() + + def _build_extensions_serial(self): + for ext in self.extensions: + with self._filter_build_errors(ext): + self.build_extension(ext) + + @contextlib.contextmanager + def _filter_build_errors(self, ext): + try: + yield + except (CCompilerError, DistutilsError, CompileError) as e: + if not ext.optional: + raise + self.warn('building extension "%s" failed: %s' % + (ext.name, e)) + + def build_extension(self, ext): + sources = ext.sources + if sources is None or not isinstance(sources, (list, tuple)): + raise DistutilsSetupError( + "in 'ext_modules' option (extension '%s'), " + "'sources' must be present and must be " + "a list of source filenames" % ext.name) + # sort to make the resulting .so file build reproducible + sources = sorted(sources) + + ext_path = self.get_ext_fullpath(ext.name) + depends = sources + ext.depends + if not (self.force or newer_group(depends, ext_path, 'newer')): + log.debug("skipping '%s' extension (up-to-date)", ext.name) + return + else: + log.info("building '%s' extension", ext.name) + + # First, scan the sources for SWIG definition files (.i), run + # SWIG on 'em to create .c files, and modify the sources list + # accordingly. + sources = self.swig_sources(sources, ext) + + # Next, compile the source code to object files. + + # XXX not honouring 'define_macros' or 'undef_macros' -- the + # CCompiler API needs to change to accommodate this, and I + # want to do one thing at a time! + + # Two possible sources for extra compiler arguments: + # - 'extra_compile_args' in Extension object + # - CFLAGS environment variable (not particularly + # elegant, but people seem to expect it and I + # guess it's useful) + # The environment variable should take precedence, and + # any sensible compiler will give precedence to later + # command line args. Hence we combine them in order: + extra_args = ext.extra_compile_args or [] + + macros = ext.define_macros[:] + for undef in ext.undef_macros: + macros.append((undef,)) + + objects = self.compiler.compile(sources, + output_dir=self.build_temp, + macros=macros, + include_dirs=ext.include_dirs, + debug=self.debug, + extra_postargs=extra_args, + depends=ext.depends) + + # XXX outdated variable, kept here in case third-part code + # needs it. + self._built_objects = objects[:] + + # Now link the object files together into a "shared object" -- + # of course, first we have to figure out all the other things + # that go into the mix. + if ext.extra_objects: + objects.extend(ext.extra_objects) + extra_args = ext.extra_link_args or [] + + # Detect target language, if not provided + language = ext.language or self.compiler.detect_language(sources) + + self.compiler.link_shared_object( + objects, ext_path, + libraries=self.get_libraries(ext), + library_dirs=ext.library_dirs, + runtime_library_dirs=ext.runtime_library_dirs, + extra_postargs=extra_args, + export_symbols=self.get_export_symbols(ext), + debug=self.debug, + build_temp=self.build_temp, + target_lang=language) + + def swig_sources(self, sources, extension): + """Walk the list of source files in 'sources', looking for SWIG + interface (.i) files. Run SWIG on all that are found, and + return a modified 'sources' list with SWIG source files replaced + by the generated C (or C++) files. + """ + new_sources = [] + swig_sources = [] + swig_targets = {} + + # XXX this drops generated C/C++ files into the source tree, which + # is fine for developers who want to distribute the generated + # source -- but there should be an option to put SWIG output in + # the temp dir. + + if self.swig_cpp: + log.warn("--swig-cpp is deprecated - use --swig-opts=-c++") + + if self.swig_cpp or ('-c++' in self.swig_opts) or \ + ('-c++' in extension.swig_opts): + target_ext = '.cpp' + else: + target_ext = '.c' + + for source in sources: + (base, ext) = os.path.splitext(source) + if ext == ".i": # SWIG interface file + new_sources.append(base + '_wrap' + target_ext) + swig_sources.append(source) + swig_targets[source] = new_sources[-1] + else: + new_sources.append(source) + + if not swig_sources: + return new_sources + + swig = self.swig or self.find_swig() + swig_cmd = [swig, "-python"] + swig_cmd.extend(self.swig_opts) + if self.swig_cpp: + swig_cmd.append("-c++") + + # Do not override commandline arguments + if not self.swig_opts: + for o in extension.swig_opts: + swig_cmd.append(o) + + for source in swig_sources: + target = swig_targets[source] + log.info("swigging %s to %s", source, target) + self.spawn(swig_cmd + ["-o", target, source]) + + return new_sources + + def find_swig(self): + """Return the name of the SWIG executable. On Unix, this is + just "swig" -- it should be in the PATH. Tries a bit harder on + Windows. + """ + if os.name == "posix": + return "swig" + elif os.name == "nt": + # Look for SWIG in its standard installation directory on + # Windows (or so I presume!). If we find it there, great; + # if not, act like Unix and assume it's in the PATH. + for vers in ("1.3", "1.2", "1.1"): + fn = os.path.join("c:\\swig%s" % vers, "swig.exe") + if os.path.isfile(fn): + return fn + else: + return "swig.exe" + else: + raise DistutilsPlatformError( + "I don't know how to find (much less run) SWIG " + "on platform '%s'" % os.name) + + # -- Name generators ----------------------------------------------- + # (extension names, filenames, whatever) + def get_ext_fullpath(self, ext_name): + """Returns the path of the filename for a given extension. + + The file is located in `build_lib` or directly in the package + (inplace option). + """ + fullname = self.get_ext_fullname(ext_name) + modpath = fullname.split('.') + filename = self.get_ext_filename(modpath[-1]) + + if not self.inplace: + # no further work needed + # returning : + # build_dir/package/path/filename + filename = os.path.join(*modpath[:-1]+[filename]) + return os.path.join(self.build_lib, filename) + + # the inplace option requires to find the package directory + # using the build_py command for that + package = '.'.join(modpath[0:-1]) + build_py = self.get_finalized_command('build_py') + package_dir = os.path.abspath(build_py.get_package_dir(package)) + + # returning + # package_dir/filename + return os.path.join(package_dir, filename) + + def get_ext_fullname(self, ext_name): + """Returns the fullname of a given extension name. + + Adds the `package.` prefix""" + if self.package is None: + return ext_name + else: + return self.package + '.' + ext_name + + def get_ext_filename(self, ext_name): + r"""Convert the name of an extension (eg. "foo.bar") into the name + of the file from which it will be loaded (eg. "foo/bar.so", or + "foo\bar.pyd"). + """ + from distutils.sysconfig import get_config_var + ext_path = ext_name.split('.') + ext_suffix = get_config_var('EXT_SUFFIX') + return os.path.join(*ext_path) + ext_suffix + + def get_export_symbols(self, ext): + """Return the list of symbols that a shared extension has to + export. This either uses 'ext.export_symbols' or, if it's not + provided, "PyInit_" + module_name. Only relevant on Windows, where + the .pyd file (DLL) must export the module "PyInit_" function. + """ + suffix = '_' + ext.name.split('.')[-1] + try: + # Unicode module name support as defined in PEP-489 + # https://www.python.org/dev/peps/pep-0489/#export-hook-name + suffix.encode('ascii') + except UnicodeEncodeError: + suffix = 'U' + suffix.encode('punycode').replace(b'-', b'_').decode('ascii') + + initfunc_name = "PyInit" + suffix + if initfunc_name not in ext.export_symbols: + ext.export_symbols.append(initfunc_name) + return ext.export_symbols + + def get_libraries(self, ext): + """Return the list of libraries to link against when building a + shared extension. On most platforms, this is just 'ext.libraries'; + on Windows, we add the Python library (eg. python20.dll). + """ + # The python library is always needed on Windows. For MSVC, this + # is redundant, since the library is mentioned in a pragma in + # pyconfig.h that MSVC groks. The other Windows compilers all seem + # to need it mentioned explicitly, though, so that's what we do. + # Append '_d' to the python import library on debug builds. + if sys.platform == "win32": + from distutils._msvccompiler import MSVCCompiler + if not isinstance(self.compiler, MSVCCompiler): + template = "python%d%d" + if self.debug: + template = template + '_d' + pythonlib = (template % + (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) + # don't extend ext.libraries, it may be shared with other + # extensions, it is a reference to the original list + return ext.libraries + [pythonlib] + else: + # On Android only the main executable and LD_PRELOADs are considered + # to be RTLD_GLOBAL, all the dependencies of the main executable + # remain RTLD_LOCAL and so the shared libraries must be linked with + # libpython when python is built with a shared python library (issue + # bpo-21536). + # On Cygwin (and if required, other POSIX-like platforms based on + # Windows like MinGW) it is simply necessary that all symbols in + # shared libraries are resolved at link time. + from distutils.sysconfig import get_config_var + link_libpython = False + if get_config_var('Py_ENABLE_SHARED'): + # A native build on an Android device or on Cygwin + if hasattr(sys, 'getandroidapilevel'): + link_libpython = True + elif sys.platform == 'cygwin': + link_libpython = True + elif '_PYTHON_HOST_PLATFORM' in os.environ: + # We are cross-compiling for one of the relevant platforms + if get_config_var('ANDROID_API_LEVEL') != 0: + link_libpython = True + elif get_config_var('MACHDEP') == 'cygwin': + link_libpython = True + + if link_libpython: + ldversion = get_config_var('LDVERSION') + return ext.libraries + ['python' + ldversion] + + return ext.libraries + py37compat.pythonlib() diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/build_py.py b/venv/Lib/site-packages/setuptools/_distutils/command/build_py.py new file mode 100644 index 00000000..edc2171c --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/command/build_py.py @@ -0,0 +1,416 @@ +"""distutils.command.build_py + +Implements the Distutils 'build_py' command.""" + +import os +import importlib.util +import sys +import glob + +from distutils.core import Command +from distutils.errors import * +from distutils.util import convert_path, Mixin2to3 +from distutils import log + +class build_py (Command): + + description = "\"build\" pure Python modules (copy to build directory)" + + user_options = [ + ('build-lib=', 'd', "directory to \"build\" (copy) to"), + ('compile', 'c', "compile .py to .pyc"), + ('no-compile', None, "don't compile .py files [default]"), + ('optimize=', 'O', + "also compile with optimization: -O1 for \"python -O\", " + "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), + ('force', 'f', "forcibly build everything (ignore file timestamps)"), + ] + + boolean_options = ['compile', 'force'] + negative_opt = {'no-compile' : 'compile'} + + def initialize_options(self): + self.build_lib = None + self.py_modules = None + self.package = None + self.package_data = None + self.package_dir = None + self.compile = 0 + self.optimize = 0 + self.force = None + + def finalize_options(self): + self.set_undefined_options('build', + ('build_lib', 'build_lib'), + ('force', 'force')) + + # Get the distribution options that are aliases for build_py + # options -- list of packages and list of modules. + self.packages = self.distribution.packages + self.py_modules = self.distribution.py_modules + self.package_data = self.distribution.package_data + self.package_dir = {} + if self.distribution.package_dir: + for name, path in self.distribution.package_dir.items(): + self.package_dir[name] = convert_path(path) + self.data_files = self.get_data_files() + + # Ick, copied straight from install_lib.py (fancy_getopt needs a + # type system! Hell, *everything* needs a type system!!!) + if not isinstance(self.optimize, int): + try: + self.optimize = int(self.optimize) + assert 0 <= self.optimize <= 2 + except (ValueError, AssertionError): + raise DistutilsOptionError("optimize must be 0, 1, or 2") + + def run(self): + # XXX copy_file by default preserves atime and mtime. IMHO this is + # the right thing to do, but perhaps it should be an option -- in + # particular, a site administrator might want installed files to + # reflect the time of installation rather than the last + # modification time before the installed release. + + # XXX copy_file by default preserves mode, which appears to be the + # wrong thing to do: if a file is read-only in the working + # directory, we want it to be installed read/write so that the next + # installation of the same module distribution can overwrite it + # without problems. (This might be a Unix-specific issue.) Thus + # we turn off 'preserve_mode' when copying to the build directory, + # since the build directory is supposed to be exactly what the + # installation will look like (ie. we preserve mode when + # installing). + + # Two options control which modules will be installed: 'packages' + # and 'py_modules'. The former lets us work with whole packages, not + # specifying individual modules at all; the latter is for + # specifying modules one-at-a-time. + + if self.py_modules: + self.build_modules() + if self.packages: + self.build_packages() + self.build_package_data() + + self.byte_compile(self.get_outputs(include_bytecode=0)) + + def get_data_files(self): + """Generate list of '(package,src_dir,build_dir,filenames)' tuples""" + data = [] + if not self.packages: + return data + for package in self.packages: + # Locate package source directory + src_dir = self.get_package_dir(package) + + # Compute package build directory + build_dir = os.path.join(*([self.build_lib] + package.split('.'))) + + # Length of path to strip from found files + plen = 0 + if src_dir: + plen = len(src_dir)+1 + + # Strip directory from globbed filenames + filenames = [ + file[plen:] for file in self.find_data_files(package, src_dir) + ] + data.append((package, src_dir, build_dir, filenames)) + return data + + def find_data_files(self, package, src_dir): + """Return filenames for package's data files in 'src_dir'""" + globs = (self.package_data.get('', []) + + self.package_data.get(package, [])) + files = [] + for pattern in globs: + # Each pattern has to be converted to a platform-specific path + filelist = glob.glob(os.path.join(glob.escape(src_dir), convert_path(pattern))) + # Files that match more than one pattern are only added once + files.extend([fn for fn in filelist if fn not in files + and os.path.isfile(fn)]) + return files + + def build_package_data(self): + """Copy data files into build directory""" + lastdir = None + for package, src_dir, build_dir, filenames in self.data_files: + for filename in filenames: + target = os.path.join(build_dir, filename) + self.mkpath(os.path.dirname(target)) + self.copy_file(os.path.join(src_dir, filename), target, + preserve_mode=False) + + def get_package_dir(self, package): + """Return the directory, relative to the top of the source + distribution, where package 'package' should be found + (at least according to the 'package_dir' option, if any).""" + path = package.split('.') + + if not self.package_dir: + if path: + return os.path.join(*path) + else: + return '' + else: + tail = [] + while path: + try: + pdir = self.package_dir['.'.join(path)] + except KeyError: + tail.insert(0, path[-1]) + del path[-1] + else: + tail.insert(0, pdir) + return os.path.join(*tail) + else: + # Oops, got all the way through 'path' without finding a + # match in package_dir. If package_dir defines a directory + # for the root (nameless) package, then fallback on it; + # otherwise, we might as well have not consulted + # package_dir at all, as we just use the directory implied + # by 'tail' (which should be the same as the original value + # of 'path' at this point). + pdir = self.package_dir.get('') + if pdir is not None: + tail.insert(0, pdir) + + if tail: + return os.path.join(*tail) + else: + return '' + + def check_package(self, package, package_dir): + # Empty dir name means current directory, which we can probably + # assume exists. Also, os.path.exists and isdir don't know about + # my "empty string means current dir" convention, so we have to + # circumvent them. + if package_dir != "": + if not os.path.exists(package_dir): + raise DistutilsFileError( + "package directory '%s' does not exist" % package_dir) + if not os.path.isdir(package_dir): + raise DistutilsFileError( + "supposed package directory '%s' exists, " + "but is not a directory" % package_dir) + + # Require __init__.py for all but the "root package" + if package: + init_py = os.path.join(package_dir, "__init__.py") + if os.path.isfile(init_py): + return init_py + else: + log.warn(("package init file '%s' not found " + + "(or not a regular file)"), init_py) + + # Either not in a package at all (__init__.py not expected), or + # __init__.py doesn't exist -- so don't return the filename. + return None + + def check_module(self, module, module_file): + if not os.path.isfile(module_file): + log.warn("file %s (for module %s) not found", module_file, module) + return False + else: + return True + + def find_package_modules(self, package, package_dir): + self.check_package(package, package_dir) + module_files = glob.glob(os.path.join(glob.escape(package_dir), "*.py")) + modules = [] + setup_script = os.path.abspath(self.distribution.script_name) + + for f in module_files: + abs_f = os.path.abspath(f) + if abs_f != setup_script: + module = os.path.splitext(os.path.basename(f))[0] + modules.append((package, module, f)) + else: + self.debug_print("excluding %s" % setup_script) + return modules + + def find_modules(self): + """Finds individually-specified Python modules, ie. those listed by + module name in 'self.py_modules'. Returns a list of tuples (package, + module_base, filename): 'package' is a tuple of the path through + package-space to the module; 'module_base' is the bare (no + packages, no dots) module name, and 'filename' is the path to the + ".py" file (relative to the distribution root) that implements the + module. + """ + # Map package names to tuples of useful info about the package: + # (package_dir, checked) + # package_dir - the directory where we'll find source files for + # this package + # checked - true if we have checked that the package directory + # is valid (exists, contains __init__.py, ... ?) + packages = {} + + # List of (package, module, filename) tuples to return + modules = [] + + # We treat modules-in-packages almost the same as toplevel modules, + # just the "package" for a toplevel is empty (either an empty + # string or empty list, depending on context). Differences: + # - don't check for __init__.py in directory for empty package + for module in self.py_modules: + path = module.split('.') + package = '.'.join(path[0:-1]) + module_base = path[-1] + + try: + (package_dir, checked) = packages[package] + except KeyError: + package_dir = self.get_package_dir(package) + checked = 0 + + if not checked: + init_py = self.check_package(package, package_dir) + packages[package] = (package_dir, 1) + if init_py: + modules.append((package, "__init__", init_py)) + + # XXX perhaps we should also check for just .pyc files + # (so greedy closed-source bastards can distribute Python + # modules too) + module_file = os.path.join(package_dir, module_base + ".py") + if not self.check_module(module, module_file): + continue + + modules.append((package, module_base, module_file)) + + return modules + + def find_all_modules(self): + """Compute the list of all modules that will be built, whether + they are specified one-module-at-a-time ('self.py_modules') or + by whole packages ('self.packages'). Return a list of tuples + (package, module, module_file), just like 'find_modules()' and + 'find_package_modules()' do.""" + modules = [] + if self.py_modules: + modules.extend(self.find_modules()) + if self.packages: + for package in self.packages: + package_dir = self.get_package_dir(package) + m = self.find_package_modules(package, package_dir) + modules.extend(m) + return modules + + def get_source_files(self): + return [module[-1] for module in self.find_all_modules()] + + def get_module_outfile(self, build_dir, package, module): + outfile_path = [build_dir] + list(package) + [module + ".py"] + return os.path.join(*outfile_path) + + def get_outputs(self, include_bytecode=1): + modules = self.find_all_modules() + outputs = [] + for (package, module, module_file) in modules: + package = package.split('.') + filename = self.get_module_outfile(self.build_lib, package, module) + outputs.append(filename) + if include_bytecode: + if self.compile: + outputs.append(importlib.util.cache_from_source( + filename, optimization='')) + if self.optimize > 0: + outputs.append(importlib.util.cache_from_source( + filename, optimization=self.optimize)) + + outputs += [ + os.path.join(build_dir, filename) + for package, src_dir, build_dir, filenames in self.data_files + for filename in filenames + ] + + return outputs + + def build_module(self, module, module_file, package): + if isinstance(package, str): + package = package.split('.') + elif not isinstance(package, (list, tuple)): + raise TypeError( + "'package' must be a string (dot-separated), list, or tuple") + + # Now put the module source file into the "build" area -- this is + # easy, we just copy it somewhere under self.build_lib (the build + # directory for Python source). + outfile = self.get_module_outfile(self.build_lib, package, module) + dir = os.path.dirname(outfile) + self.mkpath(dir) + return self.copy_file(module_file, outfile, preserve_mode=0) + + def build_modules(self): + modules = self.find_modules() + for (package, module, module_file) in modules: + # Now "build" the module -- ie. copy the source file to + # self.build_lib (the build directory for Python source). + # (Actually, it gets copied to the directory for this package + # under self.build_lib.) + self.build_module(module, module_file, package) + + def build_packages(self): + for package in self.packages: + # Get list of (package, module, module_file) tuples based on + # scanning the package directory. 'package' is only included + # in the tuple so that 'find_modules()' and + # 'find_package_tuples()' have a consistent interface; it's + # ignored here (apart from a sanity check). Also, 'module' is + # the *unqualified* module name (ie. no dots, no package -- we + # already know its package!), and 'module_file' is the path to + # the .py file, relative to the current directory + # (ie. including 'package_dir'). + package_dir = self.get_package_dir(package) + modules = self.find_package_modules(package, package_dir) + + # Now loop over the modules we found, "building" each one (just + # copy it to self.build_lib). + for (package_, module, module_file) in modules: + assert package == package_ + self.build_module(module, module_file, package) + + def byte_compile(self, files): + if sys.dont_write_bytecode: + self.warn('byte-compiling is disabled, skipping.') + return + + from distutils.util import byte_compile + prefix = self.build_lib + if prefix[-1] != os.sep: + prefix = prefix + os.sep + + # XXX this code is essentially the same as the 'byte_compile() + # method of the "install_lib" command, except for the determination + # of the 'prefix' string. Hmmm. + if self.compile: + byte_compile(files, optimize=0, + force=self.force, prefix=prefix, dry_run=self.dry_run) + if self.optimize > 0: + byte_compile(files, optimize=self.optimize, + force=self.force, prefix=prefix, dry_run=self.dry_run) + +class build_py_2to3(build_py, Mixin2to3): + def run(self): + self.updated_files = [] + + # Base class code + if self.py_modules: + self.build_modules() + if self.packages: + self.build_packages() + self.build_package_data() + + # 2to3 + self.run_2to3(self.updated_files) + + # Remaining base class code + self.byte_compile(self.get_outputs(include_bytecode=0)) + + def build_module(self, module, module_file, package): + res = build_py.build_module(self, module, module_file, package) + if res[1]: + # file was copied + self.updated_files.append(res[0]) + return res diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/build_scripts.py b/venv/Lib/site-packages/setuptools/_distutils/command/build_scripts.py new file mode 100644 index 00000000..ccc70e64 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/command/build_scripts.py @@ -0,0 +1,160 @@ +"""distutils.command.build_scripts + +Implements the Distutils 'build_scripts' command.""" + +import os, re +from stat import ST_MODE +from distutils import sysconfig +from distutils.core import Command +from distutils.dep_util import newer +from distutils.util import convert_path, Mixin2to3 +from distutils import log +import tokenize + +# check if Python is called on the first line with this expression +first_line_re = re.compile(b'^#!.*python[0-9.]*([ \t].*)?$') + +class build_scripts(Command): + + description = "\"build\" scripts (copy and fixup #! line)" + + user_options = [ + ('build-dir=', 'd', "directory to \"build\" (copy) to"), + ('force', 'f', "forcibly build everything (ignore file timestamps"), + ('executable=', 'e', "specify final destination interpreter path"), + ] + + boolean_options = ['force'] + + + def initialize_options(self): + self.build_dir = None + self.scripts = None + self.force = None + self.executable = None + self.outfiles = None + + def finalize_options(self): + self.set_undefined_options('build', + ('build_scripts', 'build_dir'), + ('force', 'force'), + ('executable', 'executable')) + self.scripts = self.distribution.scripts + + def get_source_files(self): + return self.scripts + + def run(self): + if not self.scripts: + return + self.copy_scripts() + + + def copy_scripts(self): + r"""Copy each script listed in 'self.scripts'; if it's marked as a + Python script in the Unix way (first line matches 'first_line_re', + ie. starts with "\#!" and contains "python"), then adjust the first + line to refer to the current Python interpreter as we copy. + """ + self.mkpath(self.build_dir) + outfiles = [] + updated_files = [] + for script in self.scripts: + adjust = False + script = convert_path(script) + outfile = os.path.join(self.build_dir, os.path.basename(script)) + outfiles.append(outfile) + + if not self.force and not newer(script, outfile): + log.debug("not copying %s (up-to-date)", script) + continue + + # Always open the file, but ignore failures in dry-run mode -- + # that way, we'll get accurate feedback if we can read the + # script. + try: + f = open(script, "rb") + except OSError: + if not self.dry_run: + raise + f = None + else: + encoding, lines = tokenize.detect_encoding(f.readline) + f.seek(0) + first_line = f.readline() + if not first_line: + self.warn("%s is an empty file (skipping)" % script) + continue + + match = first_line_re.match(first_line) + if match: + adjust = True + post_interp = match.group(1) or b'' + + if adjust: + log.info("copying and adjusting %s -> %s", script, + self.build_dir) + updated_files.append(outfile) + if not self.dry_run: + if not sysconfig.python_build: + executable = self.executable + else: + executable = os.path.join( + sysconfig.get_config_var("BINDIR"), + "python%s%s" % (sysconfig.get_config_var("VERSION"), + sysconfig.get_config_var("EXE"))) + executable = os.fsencode(executable) + shebang = b"#!" + executable + post_interp + b"\n" + # Python parser starts to read a script using UTF-8 until + # it gets a #coding:xxx cookie. The shebang has to be the + # first line of a file, the #coding:xxx cookie cannot be + # written before. So the shebang has to be decodable from + # UTF-8. + try: + shebang.decode('utf-8') + except UnicodeDecodeError: + raise ValueError( + "The shebang ({!r}) is not decodable " + "from utf-8".format(shebang)) + # If the script is encoded to a custom encoding (use a + # #coding:xxx cookie), the shebang has to be decodable from + # the script encoding too. + try: + shebang.decode(encoding) + except UnicodeDecodeError: + raise ValueError( + "The shebang ({!r}) is not decodable " + "from the script encoding ({})" + .format(shebang, encoding)) + with open(outfile, "wb") as outf: + outf.write(shebang) + outf.writelines(f.readlines()) + if f: + f.close() + else: + if f: + f.close() + updated_files.append(outfile) + self.copy_file(script, outfile) + + if os.name == 'posix': + for file in outfiles: + if self.dry_run: + log.info("changing mode of %s", file) + else: + oldmode = os.stat(file)[ST_MODE] & 0o7777 + newmode = (oldmode | 0o555) & 0o7777 + if newmode != oldmode: + log.info("changing mode of %s from %o to %o", + file, oldmode, newmode) + os.chmod(file, newmode) + # XXX should we modify self.outfiles? + return outfiles, updated_files + +class build_scripts_2to3(build_scripts, Mixin2to3): + + def copy_scripts(self): + outfiles, updated_files = build_scripts.copy_scripts(self) + if not self.dry_run: + self.run_2to3(updated_files) + return outfiles, updated_files diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/check.py b/venv/Lib/site-packages/setuptools/_distutils/command/check.py new file mode 100644 index 00000000..ada25006 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/command/check.py @@ -0,0 +1,148 @@ +"""distutils.command.check + +Implements the Distutils 'check' command. +""" +from distutils.core import Command +from distutils.errors import DistutilsSetupError + +try: + # docutils is installed + from docutils.utils import Reporter + from docutils.parsers.rst import Parser + from docutils import frontend + from docutils import nodes + + class SilentReporter(Reporter): + + def __init__(self, source, report_level, halt_level, stream=None, + debug=0, encoding='ascii', error_handler='replace'): + self.messages = [] + Reporter.__init__(self, source, report_level, halt_level, stream, + debug, encoding, error_handler) + + def system_message(self, level, message, *children, **kwargs): + self.messages.append((level, message, children, kwargs)) + return nodes.system_message(message, level=level, + type=self.levels[level], + *children, **kwargs) + + HAS_DOCUTILS = True +except Exception: + # Catch all exceptions because exceptions besides ImportError probably + # indicate that docutils is not ported to Py3k. + HAS_DOCUTILS = False + +class check(Command): + """This command checks the meta-data of the package. + """ + description = ("perform some checks on the package") + user_options = [('metadata', 'm', 'Verify meta-data'), + ('restructuredtext', 'r', + ('Checks if long string meta-data syntax ' + 'are reStructuredText-compliant')), + ('strict', 's', + 'Will exit with an error if a check fails')] + + boolean_options = ['metadata', 'restructuredtext', 'strict'] + + def initialize_options(self): + """Sets default values for options.""" + self.restructuredtext = 0 + self.metadata = 1 + self.strict = 0 + self._warnings = 0 + + def finalize_options(self): + pass + + def warn(self, msg): + """Counts the number of warnings that occurs.""" + self._warnings += 1 + return Command.warn(self, msg) + + def run(self): + """Runs the command.""" + # perform the various tests + if self.metadata: + self.check_metadata() + if self.restructuredtext: + if HAS_DOCUTILS: + self.check_restructuredtext() + elif self.strict: + raise DistutilsSetupError('The docutils package is needed.') + + # let's raise an error in strict mode, if we have at least + # one warning + if self.strict and self._warnings > 0: + raise DistutilsSetupError('Please correct your package.') + + def check_metadata(self): + """Ensures that all required elements of meta-data are supplied. + + Required fields: + name, version, URL + + Recommended fields: + (author and author_email) or (maintainer and maintainer_email)) + + Warns if any are missing. + """ + metadata = self.distribution.metadata + + missing = [] + for attr in ('name', 'version', 'url'): + if not (hasattr(metadata, attr) and getattr(metadata, attr)): + missing.append(attr) + + if missing: + self.warn("missing required meta-data: %s" % ', '.join(missing)) + if metadata.author: + if not metadata.author_email: + self.warn("missing meta-data: if 'author' supplied, " + + "'author_email' should be supplied too") + elif metadata.maintainer: + if not metadata.maintainer_email: + self.warn("missing meta-data: if 'maintainer' supplied, " + + "'maintainer_email' should be supplied too") + else: + self.warn("missing meta-data: either (author and author_email) " + + "or (maintainer and maintainer_email) " + + "should be supplied") + + def check_restructuredtext(self): + """Checks if the long string fields are reST-compliant.""" + data = self.distribution.get_long_description() + for warning in self._check_rst_data(data): + line = warning[-1].get('line') + if line is None: + warning = warning[1] + else: + warning = '%s (line %s)' % (warning[1], line) + self.warn(warning) + + def _check_rst_data(self, data): + """Returns warnings when the provided data doesn't compile.""" + # the include and csv_table directives need this to be a path + source_path = self.distribution.script_name or 'setup.py' + parser = Parser() + settings = frontend.OptionParser(components=(Parser,)).get_default_values() + settings.tab_width = 4 + settings.pep_references = None + settings.rfc_references = None + reporter = SilentReporter(source_path, + settings.report_level, + settings.halt_level, + stream=settings.warning_stream, + debug=settings.debug, + encoding=settings.error_encoding, + error_handler=settings.error_encoding_error_handler) + + document = nodes.document(settings, reporter, source=source_path) + document.note_source(source_path, -1) + try: + parser.parse(data, document) + except AttributeError as e: + reporter.messages.append( + (-1, 'Could not finish the parsing: %s.' % e, '', {})) + + return reporter.messages diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/clean.py b/venv/Lib/site-packages/setuptools/_distutils/command/clean.py new file mode 100644 index 00000000..0cb27016 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/command/clean.py @@ -0,0 +1,76 @@ +"""distutils.command.clean + +Implements the Distutils 'clean' command.""" + +# contributed by Bastian Kleineidam , added 2000-03-18 + +import os +from distutils.core import Command +from distutils.dir_util import remove_tree +from distutils import log + +class clean(Command): + + description = "clean up temporary files from 'build' command" + user_options = [ + ('build-base=', 'b', + "base build directory (default: 'build.build-base')"), + ('build-lib=', None, + "build directory for all modules (default: 'build.build-lib')"), + ('build-temp=', 't', + "temporary build directory (default: 'build.build-temp')"), + ('build-scripts=', None, + "build directory for scripts (default: 'build.build-scripts')"), + ('bdist-base=', None, + "temporary directory for built distributions"), + ('all', 'a', + "remove all build output, not just temporary by-products") + ] + + boolean_options = ['all'] + + def initialize_options(self): + self.build_base = None + self.build_lib = None + self.build_temp = None + self.build_scripts = None + self.bdist_base = None + self.all = None + + def finalize_options(self): + self.set_undefined_options('build', + ('build_base', 'build_base'), + ('build_lib', 'build_lib'), + ('build_scripts', 'build_scripts'), + ('build_temp', 'build_temp')) + self.set_undefined_options('bdist', + ('bdist_base', 'bdist_base')) + + def run(self): + # remove the build/temp. directory (unless it's already + # gone) + if os.path.exists(self.build_temp): + remove_tree(self.build_temp, dry_run=self.dry_run) + else: + log.debug("'%s' does not exist -- can't clean it", + self.build_temp) + + if self.all: + # remove build directories + for directory in (self.build_lib, + self.bdist_base, + self.build_scripts): + if os.path.exists(directory): + remove_tree(directory, dry_run=self.dry_run) + else: + log.warn("'%s' does not exist -- can't clean it", + directory) + + # just for the heck of it, try to remove the base build directory: + # we might have emptied it right now, but if not we don't care + if not self.dry_run: + try: + os.rmdir(self.build_base) + log.info("removing '%s'", self.build_base) + except OSError: + pass diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/config.py b/venv/Lib/site-packages/setuptools/_distutils/command/config.py new file mode 100644 index 00000000..aeda408e --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/command/config.py @@ -0,0 +1,344 @@ +"""distutils.command.config + +Implements the Distutils 'config' command, a (mostly) empty command class +that exists mainly to be sub-classed by specific module distributions and +applications. The idea is that while every "config" command is different, +at least they're all named the same, and users always see "config" in the +list of standard commands. Also, this is a good place to put common +configure-like tasks: "try to compile this C code", or "figure out where +this header file lives". +""" + +import os, re + +from distutils.core import Command +from distutils.errors import DistutilsExecError +from distutils.sysconfig import customize_compiler +from distutils import log + +LANG_EXT = {"c": ".c", "c++": ".cxx"} + +class config(Command): + + description = "prepare to build" + + user_options = [ + ('compiler=', None, + "specify the compiler type"), + ('cc=', None, + "specify the compiler executable"), + ('include-dirs=', 'I', + "list of directories to search for header files"), + ('define=', 'D', + "C preprocessor macros to define"), + ('undef=', 'U', + "C preprocessor macros to undefine"), + ('libraries=', 'l', + "external C libraries to link with"), + ('library-dirs=', 'L', + "directories to search for external C libraries"), + + ('noisy', None, + "show every action (compile, link, run, ...) taken"), + ('dump-source', None, + "dump generated source files before attempting to compile them"), + ] + + + # The three standard command methods: since the "config" command + # does nothing by default, these are empty. + + def initialize_options(self): + self.compiler = None + self.cc = None + self.include_dirs = None + self.libraries = None + self.library_dirs = None + + # maximal output for now + self.noisy = 1 + self.dump_source = 1 + + # list of temporary files generated along-the-way that we have + # to clean at some point + self.temp_files = [] + + def finalize_options(self): + if self.include_dirs is None: + self.include_dirs = self.distribution.include_dirs or [] + elif isinstance(self.include_dirs, str): + self.include_dirs = self.include_dirs.split(os.pathsep) + + if self.libraries is None: + self.libraries = [] + elif isinstance(self.libraries, str): + self.libraries = [self.libraries] + + if self.library_dirs is None: + self.library_dirs = [] + elif isinstance(self.library_dirs, str): + self.library_dirs = self.library_dirs.split(os.pathsep) + + def run(self): + pass + + # Utility methods for actual "config" commands. The interfaces are + # loosely based on Autoconf macros of similar names. Sub-classes + # may use these freely. + + def _check_compiler(self): + """Check that 'self.compiler' really is a CCompiler object; + if not, make it one. + """ + # We do this late, and only on-demand, because this is an expensive + # import. + from distutils.ccompiler import CCompiler, new_compiler + if not isinstance(self.compiler, CCompiler): + self.compiler = new_compiler(compiler=self.compiler, + dry_run=self.dry_run, force=1) + customize_compiler(self.compiler) + if self.include_dirs: + self.compiler.set_include_dirs(self.include_dirs) + if self.libraries: + self.compiler.set_libraries(self.libraries) + if self.library_dirs: + self.compiler.set_library_dirs(self.library_dirs) + + def _gen_temp_sourcefile(self, body, headers, lang): + filename = "_configtest" + LANG_EXT[lang] + with open(filename, "w") as file: + if headers: + for header in headers: + file.write("#include <%s>\n" % header) + file.write("\n") + file.write(body) + if body[-1] != "\n": + file.write("\n") + return filename + + def _preprocess(self, body, headers, include_dirs, lang): + src = self._gen_temp_sourcefile(body, headers, lang) + out = "_configtest.i" + self.temp_files.extend([src, out]) + self.compiler.preprocess(src, out, include_dirs=include_dirs) + return (src, out) + + def _compile(self, body, headers, include_dirs, lang): + src = self._gen_temp_sourcefile(body, headers, lang) + if self.dump_source: + dump_file(src, "compiling '%s':" % src) + (obj,) = self.compiler.object_filenames([src]) + self.temp_files.extend([src, obj]) + self.compiler.compile([src], include_dirs=include_dirs) + return (src, obj) + + def _link(self, body, headers, include_dirs, libraries, library_dirs, + lang): + (src, obj) = self._compile(body, headers, include_dirs, lang) + prog = os.path.splitext(os.path.basename(src))[0] + self.compiler.link_executable([obj], prog, + libraries=libraries, + library_dirs=library_dirs, + target_lang=lang) + + if self.compiler.exe_extension is not None: + prog = prog + self.compiler.exe_extension + self.temp_files.append(prog) + + return (src, obj, prog) + + def _clean(self, *filenames): + if not filenames: + filenames = self.temp_files + self.temp_files = [] + log.info("removing: %s", ' '.join(filenames)) + for filename in filenames: + try: + os.remove(filename) + except OSError: + pass + + + # XXX these ignore the dry-run flag: what to do, what to do? even if + # you want a dry-run build, you still need some sort of configuration + # info. My inclination is to make it up to the real config command to + # consult 'dry_run', and assume a default (minimal) configuration if + # true. The problem with trying to do it here is that you'd have to + # return either true or false from all the 'try' methods, neither of + # which is correct. + + # XXX need access to the header search path and maybe default macros. + + def try_cpp(self, body=None, headers=None, include_dirs=None, lang="c"): + """Construct a source file from 'body' (a string containing lines + of C/C++ code) and 'headers' (a list of header files to include) + and run it through the preprocessor. Return true if the + preprocessor succeeded, false if there were any errors. + ('body' probably isn't of much use, but what the heck.) + """ + from distutils.ccompiler import CompileError + self._check_compiler() + ok = True + try: + self._preprocess(body, headers, include_dirs, lang) + except CompileError: + ok = False + + self._clean() + return ok + + def search_cpp(self, pattern, body=None, headers=None, include_dirs=None, + lang="c"): + """Construct a source file (just like 'try_cpp()'), run it through + the preprocessor, and return true if any line of the output matches + 'pattern'. 'pattern' should either be a compiled regex object or a + string containing a regex. If both 'body' and 'headers' are None, + preprocesses an empty file -- which can be useful to determine the + symbols the preprocessor and compiler set by default. + """ + self._check_compiler() + src, out = self._preprocess(body, headers, include_dirs, lang) + + if isinstance(pattern, str): + pattern = re.compile(pattern) + + with open(out) as file: + match = False + while True: + line = file.readline() + if line == '': + break + if pattern.search(line): + match = True + break + + self._clean() + return match + + def try_compile(self, body, headers=None, include_dirs=None, lang="c"): + """Try to compile a source file built from 'body' and 'headers'. + Return true on success, false otherwise. + """ + from distutils.ccompiler import CompileError + self._check_compiler() + try: + self._compile(body, headers, include_dirs, lang) + ok = True + except CompileError: + ok = False + + log.info(ok and "success!" or "failure.") + self._clean() + return ok + + def try_link(self, body, headers=None, include_dirs=None, libraries=None, + library_dirs=None, lang="c"): + """Try to compile and link a source file, built from 'body' and + 'headers', to executable form. Return true on success, false + otherwise. + """ + from distutils.ccompiler import CompileError, LinkError + self._check_compiler() + try: + self._link(body, headers, include_dirs, + libraries, library_dirs, lang) + ok = True + except (CompileError, LinkError): + ok = False + + log.info(ok and "success!" or "failure.") + self._clean() + return ok + + def try_run(self, body, headers=None, include_dirs=None, libraries=None, + library_dirs=None, lang="c"): + """Try to compile, link to an executable, and run a program + built from 'body' and 'headers'. Return true on success, false + otherwise. + """ + from distutils.ccompiler import CompileError, LinkError + self._check_compiler() + try: + src, obj, exe = self._link(body, headers, include_dirs, + libraries, library_dirs, lang) + self.spawn([exe]) + ok = True + except (CompileError, LinkError, DistutilsExecError): + ok = False + + log.info(ok and "success!" or "failure.") + self._clean() + return ok + + + # -- High-level methods -------------------------------------------- + # (these are the ones that are actually likely to be useful + # when implementing a real-world config command!) + + def check_func(self, func, headers=None, include_dirs=None, + libraries=None, library_dirs=None, decl=0, call=0): + """Determine if function 'func' is available by constructing a + source file that refers to 'func', and compiles and links it. + If everything succeeds, returns true; otherwise returns false. + + The constructed source file starts out by including the header + files listed in 'headers'. If 'decl' is true, it then declares + 'func' (as "int func()"); you probably shouldn't supply 'headers' + and set 'decl' true in the same call, or you might get errors about + a conflicting declarations for 'func'. Finally, the constructed + 'main()' function either references 'func' or (if 'call' is true) + calls it. 'libraries' and 'library_dirs' are used when + linking. + """ + self._check_compiler() + body = [] + if decl: + body.append("int %s ();" % func) + body.append("int main () {") + if call: + body.append(" %s();" % func) + else: + body.append(" %s;" % func) + body.append("}") + body = "\n".join(body) + "\n" + + return self.try_link(body, headers, include_dirs, + libraries, library_dirs) + + def check_lib(self, library, library_dirs=None, headers=None, + include_dirs=None, other_libraries=[]): + """Determine if 'library' is available to be linked against, + without actually checking that any particular symbols are provided + by it. 'headers' will be used in constructing the source file to + be compiled, but the only effect of this is to check if all the + header files listed are available. Any libraries listed in + 'other_libraries' will be included in the link, in case 'library' + has symbols that depend on other libraries. + """ + self._check_compiler() + return self.try_link("int main (void) { }", headers, include_dirs, + [library] + other_libraries, library_dirs) + + def check_header(self, header, include_dirs=None, library_dirs=None, + lang="c"): + """Determine if the system header file named by 'header_file' + exists and can be found by the preprocessor; return true if so, + false otherwise. + """ + return self.try_cpp(body="/* No body */", headers=[header], + include_dirs=include_dirs) + +def dump_file(filename, head=None): + """Dumps a file content into log.info. + + If head is not None, will be dumped before the file content. + """ + if head is None: + log.info('%s', filename) + else: + log.info(head) + file = open(filename) + try: + log.info(file.read()) + finally: + file.close() diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/install.py b/venv/Lib/site-packages/setuptools/_distutils/command/install.py new file mode 100644 index 00000000..13feeb89 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/command/install.py @@ -0,0 +1,677 @@ +"""distutils.command.install + +Implements the Distutils 'install' command.""" + +import sys +import os + +from distutils import log +from distutils.core import Command +from distutils.debug import DEBUG +from distutils.sysconfig import get_config_vars +from distutils.errors import DistutilsPlatformError +from distutils.file_util import write_file +from distutils.util import convert_path, subst_vars, change_root +from distutils.util import get_platform +from distutils.errors import DistutilsOptionError + +from site import USER_BASE +from site import USER_SITE +HAS_USER_SITE = True + +WINDOWS_SCHEME = { + 'purelib': '$base/Lib/site-packages', + 'platlib': '$base/Lib/site-packages', + 'headers': '$base/Include/$dist_name', + 'scripts': '$base/Scripts', + 'data' : '$base', +} + +INSTALL_SCHEMES = { + 'unix_prefix': { + 'purelib': '$base/lib/python$py_version_short/site-packages', + 'platlib': '$platbase/$platlibdir/python$py_version_short/site-packages', + 'headers': '$base/include/python$py_version_short$abiflags/$dist_name', + 'scripts': '$base/bin', + 'data' : '$base', + }, + 'unix_home': { + 'purelib': '$base/lib/python', + 'platlib': '$base/$platlibdir/python', + 'headers': '$base/include/python/$dist_name', + 'scripts': '$base/bin', + 'data' : '$base', + }, + 'nt': WINDOWS_SCHEME, + 'pypy': { + 'purelib': '$base/site-packages', + 'platlib': '$base/site-packages', + 'headers': '$base/include/$dist_name', + 'scripts': '$base/bin', + 'data' : '$base', + }, + 'pypy_nt': { + 'purelib': '$base/site-packages', + 'platlib': '$base/site-packages', + 'headers': '$base/include/$dist_name', + 'scripts': '$base/Scripts', + 'data' : '$base', + }, + } + +# user site schemes +if HAS_USER_SITE: + INSTALL_SCHEMES['nt_user'] = { + 'purelib': '$usersite', + 'platlib': '$usersite', + 'headers': '$userbase/Python$py_version_nodot/Include/$dist_name', + 'scripts': '$userbase/Python$py_version_nodot/Scripts', + 'data' : '$userbase', + } + + INSTALL_SCHEMES['unix_user'] = { + 'purelib': '$usersite', + 'platlib': '$usersite', + 'headers': + '$userbase/include/python$py_version_short$abiflags/$dist_name', + 'scripts': '$userbase/bin', + 'data' : '$userbase', + } + +# The keys to an installation scheme; if any new types of files are to be +# installed, be sure to add an entry to every installation scheme above, +# and to SCHEME_KEYS here. +SCHEME_KEYS = ('purelib', 'platlib', 'headers', 'scripts', 'data') + + +class install(Command): + + description = "install everything from build directory" + + user_options = [ + # Select installation scheme and set base director(y|ies) + ('prefix=', None, + "installation prefix"), + ('exec-prefix=', None, + "(Unix only) prefix for platform-specific files"), + ('home=', None, + "(Unix only) home directory to install under"), + + # Or, just set the base director(y|ies) + ('install-base=', None, + "base installation directory (instead of --prefix or --home)"), + ('install-platbase=', None, + "base installation directory for platform-specific files " + + "(instead of --exec-prefix or --home)"), + ('root=', None, + "install everything relative to this alternate root directory"), + + # Or, explicitly set the installation scheme + ('install-purelib=', None, + "installation directory for pure Python module distributions"), + ('install-platlib=', None, + "installation directory for non-pure module distributions"), + ('install-lib=', None, + "installation directory for all module distributions " + + "(overrides --install-purelib and --install-platlib)"), + + ('install-headers=', None, + "installation directory for C/C++ headers"), + ('install-scripts=', None, + "installation directory for Python scripts"), + ('install-data=', None, + "installation directory for data files"), + + # Byte-compilation options -- see install_lib.py for details, as + # these are duplicated from there (but only install_lib does + # anything with them). + ('compile', 'c', "compile .py to .pyc [default]"), + ('no-compile', None, "don't compile .py files"), + ('optimize=', 'O', + "also compile with optimization: -O1 for \"python -O\", " + "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), + + # Miscellaneous control options + ('force', 'f', + "force installation (overwrite any existing files)"), + ('skip-build', None, + "skip rebuilding everything (for testing/debugging)"), + + # Where to install documentation (eventually!) + #('doc-format=', None, "format of documentation to generate"), + #('install-man=', None, "directory for Unix man pages"), + #('install-html=', None, "directory for HTML documentation"), + #('install-info=', None, "directory for GNU info files"), + + ('record=', None, + "filename in which to record list of installed files"), + ] + + boolean_options = ['compile', 'force', 'skip-build'] + + if HAS_USER_SITE: + user_options.append(('user', None, + "install in user site-package '%s'" % USER_SITE)) + boolean_options.append('user') + + negative_opt = {'no-compile' : 'compile'} + + + def initialize_options(self): + """Initializes options.""" + # High-level options: these select both an installation base + # and scheme. + self.prefix = None + self.exec_prefix = None + self.home = None + self.user = 0 + + # These select only the installation base; it's up to the user to + # specify the installation scheme (currently, that means supplying + # the --install-{platlib,purelib,scripts,data} options). + self.install_base = None + self.install_platbase = None + self.root = None + + # These options are the actual installation directories; if not + # supplied by the user, they are filled in using the installation + # scheme implied by prefix/exec-prefix/home and the contents of + # that installation scheme. + self.install_purelib = None # for pure module distributions + self.install_platlib = None # non-pure (dists w/ extensions) + self.install_headers = None # for C/C++ headers + self.install_lib = None # set to either purelib or platlib + self.install_scripts = None + self.install_data = None + self.install_userbase = USER_BASE + self.install_usersite = USER_SITE + + self.compile = None + self.optimize = None + + # Deprecated + # These two are for putting non-packagized distributions into their + # own directory and creating a .pth file if it makes sense. + # 'extra_path' comes from the setup file; 'install_path_file' can + # be turned off if it makes no sense to install a .pth file. (But + # better to install it uselessly than to guess wrong and not + # install it when it's necessary and would be used!) Currently, + # 'install_path_file' is always true unless some outsider meddles + # with it. + self.extra_path = None + self.install_path_file = 1 + + # 'force' forces installation, even if target files are not + # out-of-date. 'skip_build' skips running the "build" command, + # handy if you know it's not necessary. 'warn_dir' (which is *not* + # a user option, it's just there so the bdist_* commands can turn + # it off) determines whether we warn about installing to a + # directory not in sys.path. + self.force = 0 + self.skip_build = 0 + self.warn_dir = 1 + + # These are only here as a conduit from the 'build' command to the + # 'install_*' commands that do the real work. ('build_base' isn't + # actually used anywhere, but it might be useful in future.) They + # are not user options, because if the user told the install + # command where the build directory is, that wouldn't affect the + # build command. + self.build_base = None + self.build_lib = None + + # Not defined yet because we don't know anything about + # documentation yet. + #self.install_man = None + #self.install_html = None + #self.install_info = None + + self.record = None + + + # -- Option finalizing methods ------------------------------------- + # (This is rather more involved than for most commands, + # because this is where the policy for installing third- + # party Python modules on various platforms given a wide + # array of user input is decided. Yes, it's quite complex!) + + def finalize_options(self): + """Finalizes options.""" + # This method (and its helpers, like 'finalize_unix()', + # 'finalize_other()', and 'select_scheme()') is where the default + # installation directories for modules, extension modules, and + # anything else we care to install from a Python module + # distribution. Thus, this code makes a pretty important policy + # statement about how third-party stuff is added to a Python + # installation! Note that the actual work of installation is done + # by the relatively simple 'install_*' commands; they just take + # their orders from the installation directory options determined + # here. + + # Check for errors/inconsistencies in the options; first, stuff + # that's wrong on any platform. + + if ((self.prefix or self.exec_prefix or self.home) and + (self.install_base or self.install_platbase)): + raise DistutilsOptionError( + "must supply either prefix/exec-prefix/home or " + + "install-base/install-platbase -- not both") + + if self.home and (self.prefix or self.exec_prefix): + raise DistutilsOptionError( + "must supply either home or prefix/exec-prefix -- not both") + + if self.user and (self.prefix or self.exec_prefix or self.home or + self.install_base or self.install_platbase): + raise DistutilsOptionError("can't combine user with prefix, " + "exec_prefix/home, or install_(plat)base") + + # Next, stuff that's wrong (or dubious) only on certain platforms. + if os.name != "posix": + if self.exec_prefix: + self.warn("exec-prefix option ignored on this platform") + self.exec_prefix = None + + # Now the interesting logic -- so interesting that we farm it out + # to other methods. The goal of these methods is to set the final + # values for the install_{lib,scripts,data,...} options, using as + # input a heady brew of prefix, exec_prefix, home, install_base, + # install_platbase, user-supplied versions of + # install_{purelib,platlib,lib,scripts,data,...}, and the + # INSTALL_SCHEME dictionary above. Phew! + + self.dump_dirs("pre-finalize_{unix,other}") + + if os.name == 'posix': + self.finalize_unix() + else: + self.finalize_other() + + self.dump_dirs("post-finalize_{unix,other}()") + + # Expand configuration variables, tilde, etc. in self.install_base + # and self.install_platbase -- that way, we can use $base or + # $platbase in the other installation directories and not worry + # about needing recursive variable expansion (shudder). + + py_version = sys.version.split()[0] + (prefix, exec_prefix) = get_config_vars('prefix', 'exec_prefix') + try: + abiflags = sys.abiflags + except AttributeError: + # sys.abiflags may not be defined on all platforms. + abiflags = '' + self.config_vars = {'dist_name': self.distribution.get_name(), + 'dist_version': self.distribution.get_version(), + 'dist_fullname': self.distribution.get_fullname(), + 'py_version': py_version, + 'py_version_short': '%d.%d' % sys.version_info[:2], + 'py_version_nodot': '%d%d' % sys.version_info[:2], + 'sys_prefix': prefix, + 'prefix': prefix, + 'sys_exec_prefix': exec_prefix, + 'exec_prefix': exec_prefix, + 'abiflags': abiflags, + 'platlibdir': getattr(sys, 'platlibdir', 'lib'), + } + + if HAS_USER_SITE: + self.config_vars['userbase'] = self.install_userbase + self.config_vars['usersite'] = self.install_usersite + + self.expand_basedirs() + + self.dump_dirs("post-expand_basedirs()") + + # Now define config vars for the base directories so we can expand + # everything else. + self.config_vars['base'] = self.install_base + self.config_vars['platbase'] = self.install_platbase + + if DEBUG: + from pprint import pprint + print("config vars:") + pprint(self.config_vars) + + # Expand "~" and configuration variables in the installation + # directories. + self.expand_dirs() + + self.dump_dirs("post-expand_dirs()") + + # Create directories in the home dir: + if self.user: + self.create_home_path() + + # Pick the actual directory to install all modules to: either + # install_purelib or install_platlib, depending on whether this + # module distribution is pure or not. Of course, if the user + # already specified install_lib, use their selection. + if self.install_lib is None: + if self.distribution.ext_modules: # has extensions: non-pure + self.install_lib = self.install_platlib + else: + self.install_lib = self.install_purelib + + + # Convert directories from Unix /-separated syntax to the local + # convention. + self.convert_paths('lib', 'purelib', 'platlib', + 'scripts', 'data', 'headers', + 'userbase', 'usersite') + + # Deprecated + # Well, we're not actually fully completely finalized yet: we still + # have to deal with 'extra_path', which is the hack for allowing + # non-packagized module distributions (hello, Numerical Python!) to + # get their own directories. + self.handle_extra_path() + self.install_libbase = self.install_lib # needed for .pth file + self.install_lib = os.path.join(self.install_lib, self.extra_dirs) + + # If a new root directory was supplied, make all the installation + # dirs relative to it. + if self.root is not None: + self.change_roots('libbase', 'lib', 'purelib', 'platlib', + 'scripts', 'data', 'headers') + + self.dump_dirs("after prepending root") + + # Find out the build directories, ie. where to install from. + self.set_undefined_options('build', + ('build_base', 'build_base'), + ('build_lib', 'build_lib')) + + # Punt on doc directories for now -- after all, we're punting on + # documentation completely! + + def dump_dirs(self, msg): + """Dumps the list of user options.""" + if not DEBUG: + return + from distutils.fancy_getopt import longopt_xlate + log.debug(msg + ":") + for opt in self.user_options: + opt_name = opt[0] + if opt_name[-1] == "=": + opt_name = opt_name[0:-1] + if opt_name in self.negative_opt: + opt_name = self.negative_opt[opt_name] + opt_name = opt_name.translate(longopt_xlate) + val = not getattr(self, opt_name) + else: + opt_name = opt_name.translate(longopt_xlate) + val = getattr(self, opt_name) + log.debug(" %s: %s", opt_name, val) + + def finalize_unix(self): + """Finalizes options for posix platforms.""" + if self.install_base is not None or self.install_platbase is not None: + if ((self.install_lib is None and + self.install_purelib is None and + self.install_platlib is None) or + self.install_headers is None or + self.install_scripts is None or + self.install_data is None): + raise DistutilsOptionError( + "install-base or install-platbase supplied, but " + "installation scheme is incomplete") + return + + if self.user: + if self.install_userbase is None: + raise DistutilsPlatformError( + "User base directory is not specified") + self.install_base = self.install_platbase = self.install_userbase + self.select_scheme("unix_user") + elif self.home is not None: + self.install_base = self.install_platbase = self.home + self.select_scheme("unix_home") + else: + if self.prefix is None: + if self.exec_prefix is not None: + raise DistutilsOptionError( + "must not supply exec-prefix without prefix") + + self.prefix = os.path.normpath(sys.prefix) + self.exec_prefix = os.path.normpath(sys.exec_prefix) + + else: + if self.exec_prefix is None: + self.exec_prefix = self.prefix + + self.install_base = self.prefix + self.install_platbase = self.exec_prefix + self.select_scheme("unix_prefix") + + def finalize_other(self): + """Finalizes options for non-posix platforms""" + if self.user: + if self.install_userbase is None: + raise DistutilsPlatformError( + "User base directory is not specified") + self.install_base = self.install_platbase = self.install_userbase + self.select_scheme(os.name + "_user") + elif self.home is not None: + self.install_base = self.install_platbase = self.home + self.select_scheme("unix_home") + else: + if self.prefix is None: + self.prefix = os.path.normpath(sys.prefix) + + self.install_base = self.install_platbase = self.prefix + try: + self.select_scheme(os.name) + except KeyError: + raise DistutilsPlatformError( + "I don't know how to install stuff on '%s'" % os.name) + + def select_scheme(self, name): + """Sets the install directories by applying the install schemes.""" + # it's the caller's problem if they supply a bad name! + if (hasattr(sys, 'pypy_version_info') and + not name.endswith(('_user', '_home'))): + if os.name == 'nt': + name = 'pypy_nt' + else: + name = 'pypy' + scheme = INSTALL_SCHEMES[name] + for key in SCHEME_KEYS: + attrname = 'install_' + key + if getattr(self, attrname) is None: + setattr(self, attrname, scheme[key]) + + def _expand_attrs(self, attrs): + for attr in attrs: + val = getattr(self, attr) + if val is not None: + if os.name == 'posix' or os.name == 'nt': + val = os.path.expanduser(val) + val = subst_vars(val, self.config_vars) + setattr(self, attr, val) + + def expand_basedirs(self): + """Calls `os.path.expanduser` on install_base, install_platbase and + root.""" + self._expand_attrs(['install_base', 'install_platbase', 'root']) + + def expand_dirs(self): + """Calls `os.path.expanduser` on install dirs.""" + self._expand_attrs(['install_purelib', 'install_platlib', + 'install_lib', 'install_headers', + 'install_scripts', 'install_data',]) + + def convert_paths(self, *names): + """Call `convert_path` over `names`.""" + for name in names: + attr = "install_" + name + setattr(self, attr, convert_path(getattr(self, attr))) + + def handle_extra_path(self): + """Set `path_file` and `extra_dirs` using `extra_path`.""" + if self.extra_path is None: + self.extra_path = self.distribution.extra_path + + if self.extra_path is not None: + log.warn( + "Distribution option extra_path is deprecated. " + "See issue27919 for details." + ) + if isinstance(self.extra_path, str): + self.extra_path = self.extra_path.split(',') + + if len(self.extra_path) == 1: + path_file = extra_dirs = self.extra_path[0] + elif len(self.extra_path) == 2: + path_file, extra_dirs = self.extra_path + else: + raise DistutilsOptionError( + "'extra_path' option must be a list, tuple, or " + "comma-separated string with 1 or 2 elements") + + # convert to local form in case Unix notation used (as it + # should be in setup scripts) + extra_dirs = convert_path(extra_dirs) + else: + path_file = None + extra_dirs = '' + + # XXX should we warn if path_file and not extra_dirs? (in which + # case the path file would be harmless but pointless) + self.path_file = path_file + self.extra_dirs = extra_dirs + + def change_roots(self, *names): + """Change the install directories pointed by name using root.""" + for name in names: + attr = "install_" + name + setattr(self, attr, change_root(self.root, getattr(self, attr))) + + def create_home_path(self): + """Create directories under ~.""" + if not self.user: + return + home = convert_path(os.path.expanduser("~")) + for name, path in self.config_vars.items(): + if path.startswith(home) and not os.path.isdir(path): + self.debug_print("os.makedirs('%s', 0o700)" % path) + os.makedirs(path, 0o700) + + # -- Command execution methods ------------------------------------- + + def run(self): + """Runs the command.""" + # Obviously have to build before we can install + if not self.skip_build: + self.run_command('build') + # If we built for any other platform, we can't install. + build_plat = self.distribution.get_command_obj('build').plat_name + # check warn_dir - it is a clue that the 'install' is happening + # internally, and not to sys.path, so we don't check the platform + # matches what we are running. + if self.warn_dir and build_plat != get_platform(): + raise DistutilsPlatformError("Can't install when " + "cross-compiling") + + # Run all sub-commands (at least those that need to be run) + for cmd_name in self.get_sub_commands(): + self.run_command(cmd_name) + + if self.path_file: + self.create_path_file() + + # write list of installed files, if requested. + if self.record: + outputs = self.get_outputs() + if self.root: # strip any package prefix + root_len = len(self.root) + for counter in range(len(outputs)): + outputs[counter] = outputs[counter][root_len:] + self.execute(write_file, + (self.record, outputs), + "writing list of installed files to '%s'" % + self.record) + + sys_path = map(os.path.normpath, sys.path) + sys_path = map(os.path.normcase, sys_path) + install_lib = os.path.normcase(os.path.normpath(self.install_lib)) + if (self.warn_dir and + not (self.path_file and self.install_path_file) and + install_lib not in sys_path): + log.debug(("modules installed to '%s', which is not in " + "Python's module search path (sys.path) -- " + "you'll have to change the search path yourself"), + self.install_lib) + + def create_path_file(self): + """Creates the .pth file""" + filename = os.path.join(self.install_libbase, + self.path_file + ".pth") + if self.install_path_file: + self.execute(write_file, + (filename, [self.extra_dirs]), + "creating %s" % filename) + else: + self.warn("path file '%s' not created" % filename) + + + # -- Reporting methods --------------------------------------------- + + def get_outputs(self): + """Assembles the outputs of all the sub-commands.""" + outputs = [] + for cmd_name in self.get_sub_commands(): + cmd = self.get_finalized_command(cmd_name) + # Add the contents of cmd.get_outputs(), ensuring + # that outputs doesn't contain duplicate entries + for filename in cmd.get_outputs(): + if filename not in outputs: + outputs.append(filename) + + if self.path_file and self.install_path_file: + outputs.append(os.path.join(self.install_libbase, + self.path_file + ".pth")) + + return outputs + + def get_inputs(self): + """Returns the inputs of all the sub-commands""" + # XXX gee, this looks familiar ;-( + inputs = [] + for cmd_name in self.get_sub_commands(): + cmd = self.get_finalized_command(cmd_name) + inputs.extend(cmd.get_inputs()) + + return inputs + + # -- Predicates for sub-command list ------------------------------- + + def has_lib(self): + """Returns true if the current distribution has any Python + modules to install.""" + return (self.distribution.has_pure_modules() or + self.distribution.has_ext_modules()) + + def has_headers(self): + """Returns true if the current distribution has any headers to + install.""" + return self.distribution.has_headers() + + def has_scripts(self): + """Returns true if the current distribution has any scripts to. + install.""" + return self.distribution.has_scripts() + + def has_data(self): + """Returns true if the current distribution has any data to. + install.""" + return self.distribution.has_data_files() + + # 'sub_commands': a list of commands this command might have to run to + # get its work done. See cmd.py for more info. + sub_commands = [('install_lib', has_lib), + ('install_headers', has_headers), + ('install_scripts', has_scripts), + ('install_data', has_data), + ('install_egg_info', lambda self:True), + ] diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/install_data.py b/venv/Lib/site-packages/setuptools/_distutils/command/install_data.py new file mode 100644 index 00000000..947cd76a --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/command/install_data.py @@ -0,0 +1,79 @@ +"""distutils.command.install_data + +Implements the Distutils 'install_data' command, for installing +platform-independent data files.""" + +# contributed by Bastian Kleineidam + +import os +from distutils.core import Command +from distutils.util import change_root, convert_path + +class install_data(Command): + + description = "install data files" + + user_options = [ + ('install-dir=', 'd', + "base directory for installing data files " + "(default: installation base dir)"), + ('root=', None, + "install everything relative to this alternate root directory"), + ('force', 'f', "force installation (overwrite existing files)"), + ] + + boolean_options = ['force'] + + def initialize_options(self): + self.install_dir = None + self.outfiles = [] + self.root = None + self.force = 0 + self.data_files = self.distribution.data_files + self.warn_dir = 1 + + def finalize_options(self): + self.set_undefined_options('install', + ('install_data', 'install_dir'), + ('root', 'root'), + ('force', 'force'), + ) + + def run(self): + self.mkpath(self.install_dir) + for f in self.data_files: + if isinstance(f, str): + # it's a simple file, so copy it + f = convert_path(f) + if self.warn_dir: + self.warn("setup script did not provide a directory for " + "'%s' -- installing right in '%s'" % + (f, self.install_dir)) + (out, _) = self.copy_file(f, self.install_dir) + self.outfiles.append(out) + else: + # it's a tuple with path to install to and a list of files + dir = convert_path(f[0]) + if not os.path.isabs(dir): + dir = os.path.join(self.install_dir, dir) + elif self.root: + dir = change_root(self.root, dir) + self.mkpath(dir) + + if f[1] == []: + # If there are no files listed, the user must be + # trying to create an empty directory, so add the + # directory to the list of output files. + self.outfiles.append(dir) + else: + # Copy files, adding them to the list of output files. + for data in f[1]: + data = convert_path(data) + (out, _) = self.copy_file(data, dir) + self.outfiles.append(out) + + def get_inputs(self): + return self.data_files or [] + + def get_outputs(self): + return self.outfiles diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/install_egg_info.py b/venv/Lib/site-packages/setuptools/_distutils/command/install_egg_info.py new file mode 100644 index 00000000..0ddc7367 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/command/install_egg_info.py @@ -0,0 +1,77 @@ +"""distutils.command.install_egg_info + +Implements the Distutils 'install_egg_info' command, for installing +a package's PKG-INFO metadata.""" + + +from distutils.cmd import Command +from distutils import log, dir_util +import os, sys, re + +class install_egg_info(Command): + """Install an .egg-info file for the package""" + + description = "Install package's PKG-INFO metadata as an .egg-info file" + user_options = [ + ('install-dir=', 'd', "directory to install to"), + ] + + def initialize_options(self): + self.install_dir = None + + def finalize_options(self): + self.set_undefined_options('install_lib',('install_dir','install_dir')) + basename = "%s-%s-py%d.%d.egg-info" % ( + to_filename(safe_name(self.distribution.get_name())), + to_filename(safe_version(self.distribution.get_version())), + *sys.version_info[:2] + ) + self.target = os.path.join(self.install_dir, basename) + self.outputs = [self.target] + + def run(self): + target = self.target + if os.path.isdir(target) and not os.path.islink(target): + dir_util.remove_tree(target, dry_run=self.dry_run) + elif os.path.exists(target): + self.execute(os.unlink,(self.target,),"Removing "+target) + elif not os.path.isdir(self.install_dir): + self.execute(os.makedirs, (self.install_dir,), + "Creating "+self.install_dir) + log.info("Writing %s", target) + if not self.dry_run: + with open(target, 'w', encoding='UTF-8') as f: + self.distribution.metadata.write_pkg_file(f) + + def get_outputs(self): + return self.outputs + + +# The following routines are taken from setuptools' pkg_resources module and +# can be replaced by importing them from pkg_resources once it is included +# in the stdlib. + +def safe_name(name): + """Convert an arbitrary string to a standard distribution name + + Any runs of non-alphanumeric/. characters are replaced with a single '-'. + """ + return re.sub('[^A-Za-z0-9.]+', '-', name) + + +def safe_version(version): + """Convert an arbitrary string to a standard version string + + Spaces become dots, and all other non-alphanumeric characters become + dashes, with runs of multiple dashes condensed to a single dash. + """ + version = version.replace(' ','.') + return re.sub('[^A-Za-z0-9.]+', '-', version) + + +def to_filename(name): + """Convert a project or version name to its filename-escaped form + + Any '-' characters are currently replaced with '_'. + """ + return name.replace('-','_') diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/install_headers.py b/venv/Lib/site-packages/setuptools/_distutils/command/install_headers.py new file mode 100644 index 00000000..9bb0b18d --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/command/install_headers.py @@ -0,0 +1,47 @@ +"""distutils.command.install_headers + +Implements the Distutils 'install_headers' command, to install C/C++ header +files to the Python include directory.""" + +from distutils.core import Command + + +# XXX force is never used +class install_headers(Command): + + description = "install C/C++ header files" + + user_options = [('install-dir=', 'd', + "directory to install header files to"), + ('force', 'f', + "force installation (overwrite existing files)"), + ] + + boolean_options = ['force'] + + def initialize_options(self): + self.install_dir = None + self.force = 0 + self.outfiles = [] + + def finalize_options(self): + self.set_undefined_options('install', + ('install_headers', 'install_dir'), + ('force', 'force')) + + + def run(self): + headers = self.distribution.headers + if not headers: + return + + self.mkpath(self.install_dir) + for header in headers: + (out, _) = self.copy_file(header, self.install_dir) + self.outfiles.append(out) + + def get_inputs(self): + return self.distribution.headers or [] + + def get_outputs(self): + return self.outfiles diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/install_lib.py b/venv/Lib/site-packages/setuptools/_distutils/command/install_lib.py new file mode 100644 index 00000000..6154cf09 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/command/install_lib.py @@ -0,0 +1,217 @@ +"""distutils.command.install_lib + +Implements the Distutils 'install_lib' command +(install all Python modules).""" + +import os +import importlib.util +import sys + +from distutils.core import Command +from distutils.errors import DistutilsOptionError + + +# Extension for Python source files. +PYTHON_SOURCE_EXTENSION = ".py" + +class install_lib(Command): + + description = "install all Python modules (extensions and pure Python)" + + # The byte-compilation options are a tad confusing. Here are the + # possible scenarios: + # 1) no compilation at all (--no-compile --no-optimize) + # 2) compile .pyc only (--compile --no-optimize; default) + # 3) compile .pyc and "opt-1" .pyc (--compile --optimize) + # 4) compile "opt-1" .pyc only (--no-compile --optimize) + # 5) compile .pyc and "opt-2" .pyc (--compile --optimize-more) + # 6) compile "opt-2" .pyc only (--no-compile --optimize-more) + # + # The UI for this is two options, 'compile' and 'optimize'. + # 'compile' is strictly boolean, and only decides whether to + # generate .pyc files. 'optimize' is three-way (0, 1, or 2), and + # decides both whether to generate .pyc files and what level of + # optimization to use. + + user_options = [ + ('install-dir=', 'd', "directory to install to"), + ('build-dir=','b', "build directory (where to install from)"), + ('force', 'f', "force installation (overwrite existing files)"), + ('compile', 'c', "compile .py to .pyc [default]"), + ('no-compile', None, "don't compile .py files"), + ('optimize=', 'O', + "also compile with optimization: -O1 for \"python -O\", " + "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), + ('skip-build', None, "skip the build steps"), + ] + + boolean_options = ['force', 'compile', 'skip-build'] + negative_opt = {'no-compile' : 'compile'} + + def initialize_options(self): + # let the 'install' command dictate our installation directory + self.install_dir = None + self.build_dir = None + self.force = 0 + self.compile = None + self.optimize = None + self.skip_build = None + + def finalize_options(self): + # Get all the information we need to install pure Python modules + # from the umbrella 'install' command -- build (source) directory, + # install (target) directory, and whether to compile .py files. + self.set_undefined_options('install', + ('build_lib', 'build_dir'), + ('install_lib', 'install_dir'), + ('force', 'force'), + ('compile', 'compile'), + ('optimize', 'optimize'), + ('skip_build', 'skip_build'), + ) + + if self.compile is None: + self.compile = True + if self.optimize is None: + self.optimize = False + + if not isinstance(self.optimize, int): + try: + self.optimize = int(self.optimize) + if self.optimize not in (0, 1, 2): + raise AssertionError + except (ValueError, AssertionError): + raise DistutilsOptionError("optimize must be 0, 1, or 2") + + def run(self): + # Make sure we have built everything we need first + self.build() + + # Install everything: simply dump the entire contents of the build + # directory to the installation directory (that's the beauty of + # having a build directory!) + outfiles = self.install() + + # (Optionally) compile .py to .pyc + if outfiles is not None and self.distribution.has_pure_modules(): + self.byte_compile(outfiles) + + # -- Top-level worker functions ------------------------------------ + # (called from 'run()') + + def build(self): + if not self.skip_build: + if self.distribution.has_pure_modules(): + self.run_command('build_py') + if self.distribution.has_ext_modules(): + self.run_command('build_ext') + + def install(self): + if os.path.isdir(self.build_dir): + outfiles = self.copy_tree(self.build_dir, self.install_dir) + else: + self.warn("'%s' does not exist -- no Python modules to install" % + self.build_dir) + return + return outfiles + + def byte_compile(self, files): + if sys.dont_write_bytecode: + self.warn('byte-compiling is disabled, skipping.') + return + + from distutils.util import byte_compile + + # Get the "--root" directory supplied to the "install" command, + # and use it as a prefix to strip off the purported filename + # encoded in bytecode files. This is far from complete, but it + # should at least generate usable bytecode in RPM distributions. + install_root = self.get_finalized_command('install').root + + if self.compile: + byte_compile(files, optimize=0, + force=self.force, prefix=install_root, + dry_run=self.dry_run) + if self.optimize > 0: + byte_compile(files, optimize=self.optimize, + force=self.force, prefix=install_root, + verbose=self.verbose, dry_run=self.dry_run) + + + # -- Utility methods ----------------------------------------------- + + def _mutate_outputs(self, has_any, build_cmd, cmd_option, output_dir): + if not has_any: + return [] + + build_cmd = self.get_finalized_command(build_cmd) + build_files = build_cmd.get_outputs() + build_dir = getattr(build_cmd, cmd_option) + + prefix_len = len(build_dir) + len(os.sep) + outputs = [] + for file in build_files: + outputs.append(os.path.join(output_dir, file[prefix_len:])) + + return outputs + + def _bytecode_filenames(self, py_filenames): + bytecode_files = [] + for py_file in py_filenames: + # Since build_py handles package data installation, the + # list of outputs can contain more than just .py files. + # Make sure we only report bytecode for the .py files. + ext = os.path.splitext(os.path.normcase(py_file))[1] + if ext != PYTHON_SOURCE_EXTENSION: + continue + if self.compile: + bytecode_files.append(importlib.util.cache_from_source( + py_file, optimization='')) + if self.optimize > 0: + bytecode_files.append(importlib.util.cache_from_source( + py_file, optimization=self.optimize)) + + return bytecode_files + + + # -- External interface -------------------------------------------- + # (called by outsiders) + + def get_outputs(self): + """Return the list of files that would be installed if this command + were actually run. Not affected by the "dry-run" flag or whether + modules have actually been built yet. + """ + pure_outputs = \ + self._mutate_outputs(self.distribution.has_pure_modules(), + 'build_py', 'build_lib', + self.install_dir) + if self.compile: + bytecode_outputs = self._bytecode_filenames(pure_outputs) + else: + bytecode_outputs = [] + + ext_outputs = \ + self._mutate_outputs(self.distribution.has_ext_modules(), + 'build_ext', 'build_lib', + self.install_dir) + + return pure_outputs + bytecode_outputs + ext_outputs + + def get_inputs(self): + """Get the list of files that are input to this command, ie. the + files that get installed as they are named in the build tree. + The files in this list correspond one-to-one to the output + filenames returned by 'get_outputs()'. + """ + inputs = [] + + if self.distribution.has_pure_modules(): + build_py = self.get_finalized_command('build_py') + inputs.extend(build_py.get_outputs()) + + if self.distribution.has_ext_modules(): + build_ext = self.get_finalized_command('build_ext') + inputs.extend(build_ext.get_outputs()) + + return inputs diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/install_scripts.py b/venv/Lib/site-packages/setuptools/_distutils/command/install_scripts.py new file mode 100644 index 00000000..31a1130e --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/command/install_scripts.py @@ -0,0 +1,60 @@ +"""distutils.command.install_scripts + +Implements the Distutils 'install_scripts' command, for installing +Python scripts.""" + +# contributed by Bastian Kleineidam + +import os +from distutils.core import Command +from distutils import log +from stat import ST_MODE + + +class install_scripts(Command): + + description = "install scripts (Python or otherwise)" + + user_options = [ + ('install-dir=', 'd', "directory to install scripts to"), + ('build-dir=','b', "build directory (where to install from)"), + ('force', 'f', "force installation (overwrite existing files)"), + ('skip-build', None, "skip the build steps"), + ] + + boolean_options = ['force', 'skip-build'] + + def initialize_options(self): + self.install_dir = None + self.force = 0 + self.build_dir = None + self.skip_build = None + + def finalize_options(self): + self.set_undefined_options('build', ('build_scripts', 'build_dir')) + self.set_undefined_options('install', + ('install_scripts', 'install_dir'), + ('force', 'force'), + ('skip_build', 'skip_build'), + ) + + def run(self): + if not self.skip_build: + self.run_command('build_scripts') + self.outfiles = self.copy_tree(self.build_dir, self.install_dir) + if os.name == 'posix': + # Set the executable bits (owner, group, and world) on + # all the scripts we just installed. + for file in self.get_outputs(): + if self.dry_run: + log.info("changing mode of %s", file) + else: + mode = ((os.stat(file)[ST_MODE]) | 0o555) & 0o7777 + log.info("changing mode of %s to %o", file, mode) + os.chmod(file, mode) + + def get_inputs(self): + return self.distribution.scripts or [] + + def get_outputs(self): + return self.outfiles or [] diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/py37compat.py b/venv/Lib/site-packages/setuptools/_distutils/command/py37compat.py new file mode 100644 index 00000000..754715a5 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/command/py37compat.py @@ -0,0 +1,30 @@ +import sys + + +def _pythonlib_compat(): + """ + On Python 3.7 and earlier, distutils would include the Python + library. See pypa/distutils#9. + """ + from distutils import sysconfig + if not sysconfig.get_config_var('Py_ENABLED_SHARED'): + return + + yield 'python{}.{}{}'.format( + sys.hexversion >> 24, + (sys.hexversion >> 16) & 0xff, + sysconfig.get_config_var('ABIFLAGS'), + ) + + +def compose(f1, f2): + return lambda *args, **kwargs: f1(f2(*args, **kwargs)) + + +pythonlib = ( + compose(list, _pythonlib_compat) + if sys.version_info < (3, 8) + and sys.platform != 'darwin' + and sys.platform[:3] != 'aix' + else list +) diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/register.py b/venv/Lib/site-packages/setuptools/_distutils/command/register.py new file mode 100644 index 00000000..0fac94e9 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/command/register.py @@ -0,0 +1,304 @@ +"""distutils.command.register + +Implements the Distutils 'register' command (register with the repository). +""" + +# created 2002/10/21, Richard Jones + +import getpass +import io +import urllib.parse, urllib.request +from warnings import warn + +from distutils.core import PyPIRCCommand +from distutils.errors import * +from distutils import log + +class register(PyPIRCCommand): + + description = ("register the distribution with the Python package index") + user_options = PyPIRCCommand.user_options + [ + ('list-classifiers', None, + 'list the valid Trove classifiers'), + ('strict', None , + 'Will stop the registering if the meta-data are not fully compliant') + ] + boolean_options = PyPIRCCommand.boolean_options + [ + 'verify', 'list-classifiers', 'strict'] + + sub_commands = [('check', lambda self: True)] + + def initialize_options(self): + PyPIRCCommand.initialize_options(self) + self.list_classifiers = 0 + self.strict = 0 + + def finalize_options(self): + PyPIRCCommand.finalize_options(self) + # setting options for the `check` subcommand + check_options = {'strict': ('register', self.strict), + 'restructuredtext': ('register', 1)} + self.distribution.command_options['check'] = check_options + + def run(self): + self.finalize_options() + self._set_config() + + # Run sub commands + for cmd_name in self.get_sub_commands(): + self.run_command(cmd_name) + + if self.dry_run: + self.verify_metadata() + elif self.list_classifiers: + self.classifiers() + else: + self.send_metadata() + + def check_metadata(self): + """Deprecated API.""" + warn("distutils.command.register.check_metadata is deprecated, \ + use the check command instead", PendingDeprecationWarning) + check = self.distribution.get_command_obj('check') + check.ensure_finalized() + check.strict = self.strict + check.restructuredtext = 1 + check.run() + + def _set_config(self): + ''' Reads the configuration file and set attributes. + ''' + config = self._read_pypirc() + if config != {}: + self.username = config['username'] + self.password = config['password'] + self.repository = config['repository'] + self.realm = config['realm'] + self.has_config = True + else: + if self.repository not in ('pypi', self.DEFAULT_REPOSITORY): + raise ValueError('%s not found in .pypirc' % self.repository) + if self.repository == 'pypi': + self.repository = self.DEFAULT_REPOSITORY + self.has_config = False + + def classifiers(self): + ''' Fetch the list of classifiers from the server. + ''' + url = self.repository+'?:action=list_classifiers' + response = urllib.request.urlopen(url) + log.info(self._read_pypi_response(response)) + + def verify_metadata(self): + ''' Send the metadata to the package index server to be checked. + ''' + # send the info to the server and report the result + (code, result) = self.post_to_server(self.build_post_data('verify')) + log.info('Server response (%s): %s', code, result) + + def send_metadata(self): + ''' Send the metadata to the package index server. + + Well, do the following: + 1. figure who the user is, and then + 2. send the data as a Basic auth'ed POST. + + First we try to read the username/password from $HOME/.pypirc, + which is a ConfigParser-formatted file with a section + [distutils] containing username and password entries (both + in clear text). Eg: + + [distutils] + index-servers = + pypi + + [pypi] + username: fred + password: sekrit + + Otherwise, to figure who the user is, we offer the user three + choices: + + 1. use existing login, + 2. register as a new user, or + 3. set the password to a random string and email the user. + + ''' + # see if we can short-cut and get the username/password from the + # config + if self.has_config: + choice = '1' + username = self.username + password = self.password + else: + choice = 'x' + username = password = '' + + # get the user's login info + choices = '1 2 3 4'.split() + while choice not in choices: + self.announce('''\ +We need to know who you are, so please choose either: + 1. use your existing login, + 2. register as a new user, + 3. have the server generate a new password for you (and email it to you), or + 4. quit +Your selection [default 1]: ''', log.INFO) + choice = input() + if not choice: + choice = '1' + elif choice not in choices: + print('Please choose one of the four options!') + + if choice == '1': + # get the username and password + while not username: + username = input('Username: ') + while not password: + password = getpass.getpass('Password: ') + + # set up the authentication + auth = urllib.request.HTTPPasswordMgr() + host = urllib.parse.urlparse(self.repository)[1] + auth.add_password(self.realm, host, username, password) + # send the info to the server and report the result + code, result = self.post_to_server(self.build_post_data('submit'), + auth) + self.announce('Server response (%s): %s' % (code, result), + log.INFO) + + # possibly save the login + if code == 200: + if self.has_config: + # sharing the password in the distribution instance + # so the upload command can reuse it + self.distribution.password = password + else: + self.announce(('I can store your PyPI login so future ' + 'submissions will be faster.'), log.INFO) + self.announce('(the login will be stored in %s)' % \ + self._get_rc_file(), log.INFO) + choice = 'X' + while choice.lower() not in 'yn': + choice = input('Save your login (y/N)?') + if not choice: + choice = 'n' + if choice.lower() == 'y': + self._store_pypirc(username, password) + + elif choice == '2': + data = {':action': 'user'} + data['name'] = data['password'] = data['email'] = '' + data['confirm'] = None + while not data['name']: + data['name'] = input('Username: ') + while data['password'] != data['confirm']: + while not data['password']: + data['password'] = getpass.getpass('Password: ') + while not data['confirm']: + data['confirm'] = getpass.getpass(' Confirm: ') + if data['password'] != data['confirm']: + data['password'] = '' + data['confirm'] = None + print("Password and confirm don't match!") + while not data['email']: + data['email'] = input(' EMail: ') + code, result = self.post_to_server(data) + if code != 200: + log.info('Server response (%s): %s', code, result) + else: + log.info('You will receive an email shortly.') + log.info(('Follow the instructions in it to ' + 'complete registration.')) + elif choice == '3': + data = {':action': 'password_reset'} + data['email'] = '' + while not data['email']: + data['email'] = input('Your email address: ') + code, result = self.post_to_server(data) + log.info('Server response (%s): %s', code, result) + + def build_post_data(self, action): + # figure the data to send - the metadata plus some additional + # information used by the package server + meta = self.distribution.metadata + data = { + ':action': action, + 'metadata_version' : '1.0', + 'name': meta.get_name(), + 'version': meta.get_version(), + 'summary': meta.get_description(), + 'home_page': meta.get_url(), + 'author': meta.get_contact(), + 'author_email': meta.get_contact_email(), + 'license': meta.get_licence(), + 'description': meta.get_long_description(), + 'keywords': meta.get_keywords(), + 'platform': meta.get_platforms(), + 'classifiers': meta.get_classifiers(), + 'download_url': meta.get_download_url(), + # PEP 314 + 'provides': meta.get_provides(), + 'requires': meta.get_requires(), + 'obsoletes': meta.get_obsoletes(), + } + if data['provides'] or data['requires'] or data['obsoletes']: + data['metadata_version'] = '1.1' + return data + + def post_to_server(self, data, auth=None): + ''' Post a query to the server, and return a string response. + ''' + if 'name' in data: + self.announce('Registering %s to %s' % (data['name'], + self.repository), + log.INFO) + # Build up the MIME payload for the urllib2 POST data + boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' + sep_boundary = '\n--' + boundary + end_boundary = sep_boundary + '--' + body = io.StringIO() + for key, value in data.items(): + # handle multiple entries for the same name + if type(value) not in (type([]), type( () )): + value = [value] + for value in value: + value = str(value) + body.write(sep_boundary) + body.write('\nContent-Disposition: form-data; name="%s"'%key) + body.write("\n\n") + body.write(value) + if value and value[-1] == '\r': + body.write('\n') # write an extra newline (lurve Macs) + body.write(end_boundary) + body.write("\n") + body = body.getvalue().encode("utf-8") + + # build the Request + headers = { + 'Content-type': 'multipart/form-data; boundary=%s; charset=utf-8'%boundary, + 'Content-length': str(len(body)) + } + req = urllib.request.Request(self.repository, body, headers) + + # handle HTTP and include the Basic Auth handler + opener = urllib.request.build_opener( + urllib.request.HTTPBasicAuthHandler(password_mgr=auth) + ) + data = '' + try: + result = opener.open(req) + except urllib.error.HTTPError as e: + if self.show_response: + data = e.fp.read() + result = e.code, e.msg + except urllib.error.URLError as e: + result = 500, str(e) + else: + if self.show_response: + data = self._read_pypi_response(result) + result = 200, 'OK' + if self.show_response: + msg = '\n'.join(('-' * 75, data, '-' * 75)) + self.announce(msg, log.INFO) + return result diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/sdist.py b/venv/Lib/site-packages/setuptools/_distutils/command/sdist.py new file mode 100644 index 00000000..b4996fcb --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/command/sdist.py @@ -0,0 +1,494 @@ +"""distutils.command.sdist + +Implements the Distutils 'sdist' command (create a source distribution).""" + +import os +import sys +from glob import glob +from warnings import warn + +from distutils.core import Command +from distutils import dir_util +from distutils import file_util +from distutils import archive_util +from distutils.text_file import TextFile +from distutils.filelist import FileList +from distutils import log +from distutils.util import convert_path +from distutils.errors import DistutilsTemplateError, DistutilsOptionError + + +def show_formats(): + """Print all possible values for the 'formats' option (used by + the "--help-formats" command-line option). + """ + from distutils.fancy_getopt import FancyGetopt + from distutils.archive_util import ARCHIVE_FORMATS + formats = [] + for format in ARCHIVE_FORMATS.keys(): + formats.append(("formats=" + format, None, + ARCHIVE_FORMATS[format][2])) + formats.sort() + FancyGetopt(formats).print_help( + "List of available source distribution formats:") + + +class sdist(Command): + + description = "create a source distribution (tarball, zip file, etc.)" + + def checking_metadata(self): + """Callable used for the check sub-command. + + Placed here so user_options can view it""" + return self.metadata_check + + user_options = [ + ('template=', 't', + "name of manifest template file [default: MANIFEST.in]"), + ('manifest=', 'm', + "name of manifest file [default: MANIFEST]"), + ('use-defaults', None, + "include the default file set in the manifest " + "[default; disable with --no-defaults]"), + ('no-defaults', None, + "don't include the default file set"), + ('prune', None, + "specifically exclude files/directories that should not be " + "distributed (build tree, RCS/CVS dirs, etc.) " + "[default; disable with --no-prune]"), + ('no-prune', None, + "don't automatically exclude anything"), + ('manifest-only', 'o', + "just regenerate the manifest and then stop " + "(implies --force-manifest)"), + ('force-manifest', 'f', + "forcibly regenerate the manifest and carry on as usual. " + "Deprecated: now the manifest is always regenerated."), + ('formats=', None, + "formats for source distribution (comma-separated list)"), + ('keep-temp', 'k', + "keep the distribution tree around after creating " + + "archive file(s)"), + ('dist-dir=', 'd', + "directory to put the source distribution archive(s) in " + "[default: dist]"), + ('metadata-check', None, + "Ensure that all required elements of meta-data " + "are supplied. Warn if any missing. [default]"), + ('owner=', 'u', + "Owner name used when creating a tar file [default: current user]"), + ('group=', 'g', + "Group name used when creating a tar file [default: current group]"), + ] + + boolean_options = ['use-defaults', 'prune', + 'manifest-only', 'force-manifest', + 'keep-temp', 'metadata-check'] + + help_options = [ + ('help-formats', None, + "list available distribution formats", show_formats), + ] + + negative_opt = {'no-defaults': 'use-defaults', + 'no-prune': 'prune' } + + sub_commands = [('check', checking_metadata)] + + READMES = ('README', 'README.txt', 'README.rst') + + def initialize_options(self): + # 'template' and 'manifest' are, respectively, the names of + # the manifest template and manifest file. + self.template = None + self.manifest = None + + # 'use_defaults': if true, we will include the default file set + # in the manifest + self.use_defaults = 1 + self.prune = 1 + + self.manifest_only = 0 + self.force_manifest = 0 + + self.formats = ['gztar'] + self.keep_temp = 0 + self.dist_dir = None + + self.archive_files = None + self.metadata_check = 1 + self.owner = None + self.group = None + + def finalize_options(self): + if self.manifest is None: + self.manifest = "MANIFEST" + if self.template is None: + self.template = "MANIFEST.in" + + self.ensure_string_list('formats') + + bad_format = archive_util.check_archive_formats(self.formats) + if bad_format: + raise DistutilsOptionError( + "unknown archive format '%s'" % bad_format) + + if self.dist_dir is None: + self.dist_dir = "dist" + + def run(self): + # 'filelist' contains the list of files that will make up the + # manifest + self.filelist = FileList() + + # Run sub commands + for cmd_name in self.get_sub_commands(): + self.run_command(cmd_name) + + # Do whatever it takes to get the list of files to process + # (process the manifest template, read an existing manifest, + # whatever). File list is accumulated in 'self.filelist'. + self.get_file_list() + + # If user just wanted us to regenerate the manifest, stop now. + if self.manifest_only: + return + + # Otherwise, go ahead and create the source distribution tarball, + # or zipfile, or whatever. + self.make_distribution() + + def check_metadata(self): + """Deprecated API.""" + warn("distutils.command.sdist.check_metadata is deprecated, \ + use the check command instead", PendingDeprecationWarning) + check = self.distribution.get_command_obj('check') + check.ensure_finalized() + check.run() + + def get_file_list(self): + """Figure out the list of files to include in the source + distribution, and put it in 'self.filelist'. This might involve + reading the manifest template (and writing the manifest), or just + reading the manifest, or just using the default file set -- it all + depends on the user's options. + """ + # new behavior when using a template: + # the file list is recalculated every time because + # even if MANIFEST.in or setup.py are not changed + # the user might have added some files in the tree that + # need to be included. + # + # This makes --force the default and only behavior with templates. + template_exists = os.path.isfile(self.template) + if not template_exists and self._manifest_is_not_generated(): + self.read_manifest() + self.filelist.sort() + self.filelist.remove_duplicates() + return + + if not template_exists: + self.warn(("manifest template '%s' does not exist " + + "(using default file list)") % + self.template) + self.filelist.findall() + + if self.use_defaults: + self.add_defaults() + + if template_exists: + self.read_template() + + if self.prune: + self.prune_file_list() + + self.filelist.sort() + self.filelist.remove_duplicates() + self.write_manifest() + + def add_defaults(self): + """Add all the default files to self.filelist: + - README or README.txt + - setup.py + - test/test*.py + - all pure Python modules mentioned in setup script + - all files pointed by package_data (build_py) + - all files defined in data_files. + - all files defined as scripts. + - all C sources listed as part of extensions or C libraries + in the setup script (doesn't catch C headers!) + Warns if (README or README.txt) or setup.py are missing; everything + else is optional. + """ + self._add_defaults_standards() + self._add_defaults_optional() + self._add_defaults_python() + self._add_defaults_data_files() + self._add_defaults_ext() + self._add_defaults_c_libs() + self._add_defaults_scripts() + + @staticmethod + def _cs_path_exists(fspath): + """ + Case-sensitive path existence check + + >>> sdist._cs_path_exists(__file__) + True + >>> sdist._cs_path_exists(__file__.upper()) + False + """ + if not os.path.exists(fspath): + return False + # make absolute so we always have a directory + abspath = os.path.abspath(fspath) + directory, filename = os.path.split(abspath) + return filename in os.listdir(directory) + + def _add_defaults_standards(self): + standards = [self.READMES, self.distribution.script_name] + for fn in standards: + if isinstance(fn, tuple): + alts = fn + got_it = False + for fn in alts: + if self._cs_path_exists(fn): + got_it = True + self.filelist.append(fn) + break + + if not got_it: + self.warn("standard file not found: should have one of " + + ', '.join(alts)) + else: + if self._cs_path_exists(fn): + self.filelist.append(fn) + else: + self.warn("standard file '%s' not found" % fn) + + def _add_defaults_optional(self): + optional = ['test/test*.py', 'setup.cfg'] + for pattern in optional: + files = filter(os.path.isfile, glob(pattern)) + self.filelist.extend(files) + + def _add_defaults_python(self): + # build_py is used to get: + # - python modules + # - files defined in package_data + build_py = self.get_finalized_command('build_py') + + # getting python files + if self.distribution.has_pure_modules(): + self.filelist.extend(build_py.get_source_files()) + + # getting package_data files + # (computed in build_py.data_files by build_py.finalize_options) + for pkg, src_dir, build_dir, filenames in build_py.data_files: + for filename in filenames: + self.filelist.append(os.path.join(src_dir, filename)) + + def _add_defaults_data_files(self): + # getting distribution.data_files + if self.distribution.has_data_files(): + for item in self.distribution.data_files: + if isinstance(item, str): + # plain file + item = convert_path(item) + if os.path.isfile(item): + self.filelist.append(item) + else: + # a (dirname, filenames) tuple + dirname, filenames = item + for f in filenames: + f = convert_path(f) + if os.path.isfile(f): + self.filelist.append(f) + + def _add_defaults_ext(self): + if self.distribution.has_ext_modules(): + build_ext = self.get_finalized_command('build_ext') + self.filelist.extend(build_ext.get_source_files()) + + def _add_defaults_c_libs(self): + if self.distribution.has_c_libraries(): + build_clib = self.get_finalized_command('build_clib') + self.filelist.extend(build_clib.get_source_files()) + + def _add_defaults_scripts(self): + if self.distribution.has_scripts(): + build_scripts = self.get_finalized_command('build_scripts') + self.filelist.extend(build_scripts.get_source_files()) + + def read_template(self): + """Read and parse manifest template file named by self.template. + + (usually "MANIFEST.in") The parsing and processing is done by + 'self.filelist', which updates itself accordingly. + """ + log.info("reading manifest template '%s'", self.template) + template = TextFile(self.template, strip_comments=1, skip_blanks=1, + join_lines=1, lstrip_ws=1, rstrip_ws=1, + collapse_join=1) + + try: + while True: + line = template.readline() + if line is None: # end of file + break + + try: + self.filelist.process_template_line(line) + # the call above can raise a DistutilsTemplateError for + # malformed lines, or a ValueError from the lower-level + # convert_path function + except (DistutilsTemplateError, ValueError) as msg: + self.warn("%s, line %d: %s" % (template.filename, + template.current_line, + msg)) + finally: + template.close() + + def prune_file_list(self): + """Prune off branches that might slip into the file list as created + by 'read_template()', but really don't belong there: + * the build tree (typically "build") + * the release tree itself (only an issue if we ran "sdist" + previously with --keep-temp, or it aborted) + * any RCS, CVS, .svn, .hg, .git, .bzr, _darcs directories + """ + build = self.get_finalized_command('build') + base_dir = self.distribution.get_fullname() + + self.filelist.exclude_pattern(None, prefix=build.build_base) + self.filelist.exclude_pattern(None, prefix=base_dir) + + if sys.platform == 'win32': + seps = r'/|\\' + else: + seps = '/' + + vcs_dirs = ['RCS', 'CVS', r'\.svn', r'\.hg', r'\.git', r'\.bzr', + '_darcs'] + vcs_ptrn = r'(^|%s)(%s)(%s).*' % (seps, '|'.join(vcs_dirs), seps) + self.filelist.exclude_pattern(vcs_ptrn, is_regex=1) + + def write_manifest(self): + """Write the file list in 'self.filelist' (presumably as filled in + by 'add_defaults()' and 'read_template()') to the manifest file + named by 'self.manifest'. + """ + if self._manifest_is_not_generated(): + log.info("not writing to manually maintained " + "manifest file '%s'" % self.manifest) + return + + content = self.filelist.files[:] + content.insert(0, '# file GENERATED by distutils, do NOT edit') + self.execute(file_util.write_file, (self.manifest, content), + "writing manifest file '%s'" % self.manifest) + + def _manifest_is_not_generated(self): + # check for special comment used in 3.1.3 and higher + if not os.path.isfile(self.manifest): + return False + + fp = open(self.manifest) + try: + first_line = fp.readline() + finally: + fp.close() + return first_line != '# file GENERATED by distutils, do NOT edit\n' + + def read_manifest(self): + """Read the manifest file (named by 'self.manifest') and use it to + fill in 'self.filelist', the list of files to include in the source + distribution. + """ + log.info("reading manifest file '%s'", self.manifest) + with open(self.manifest) as manifest: + for line in manifest: + # ignore comments and blank lines + line = line.strip() + if line.startswith('#') or not line: + continue + self.filelist.append(line) + + def make_release_tree(self, base_dir, files): + """Create the directory tree that will become the source + distribution archive. All directories implied by the filenames in + 'files' are created under 'base_dir', and then we hard link or copy + (if hard linking is unavailable) those files into place. + Essentially, this duplicates the developer's source tree, but in a + directory named after the distribution, containing only the files + to be distributed. + """ + # Create all the directories under 'base_dir' necessary to + # put 'files' there; the 'mkpath()' is just so we don't die + # if the manifest happens to be empty. + self.mkpath(base_dir) + dir_util.create_tree(base_dir, files, dry_run=self.dry_run) + + # And walk over the list of files, either making a hard link (if + # os.link exists) to each one that doesn't already exist in its + # corresponding location under 'base_dir', or copying each file + # that's out-of-date in 'base_dir'. (Usually, all files will be + # out-of-date, because by default we blow away 'base_dir' when + # we're done making the distribution archives.) + + if hasattr(os, 'link'): # can make hard links on this system + link = 'hard' + msg = "making hard links in %s..." % base_dir + else: # nope, have to copy + link = None + msg = "copying files to %s..." % base_dir + + if not files: + log.warn("no files to distribute -- empty manifest?") + else: + log.info(msg) + for file in files: + if not os.path.isfile(file): + log.warn("'%s' not a regular file -- skipping", file) + else: + dest = os.path.join(base_dir, file) + self.copy_file(file, dest, link=link) + + self.distribution.metadata.write_pkg_info(base_dir) + + def make_distribution(self): + """Create the source distribution(s). First, we create the release + tree with 'make_release_tree()'; then, we create all required + archive files (according to 'self.formats') from the release tree. + Finally, we clean up by blowing away the release tree (unless + 'self.keep_temp' is true). The list of archive files created is + stored so it can be retrieved later by 'get_archive_files()'. + """ + # Don't warn about missing meta-data here -- should be (and is!) + # done elsewhere. + base_dir = self.distribution.get_fullname() + base_name = os.path.join(self.dist_dir, base_dir) + + self.make_release_tree(base_dir, self.filelist.files) + archive_files = [] # remember names of files we create + # tar archive must be created last to avoid overwrite and remove + if 'tar' in self.formats: + self.formats.append(self.formats.pop(self.formats.index('tar'))) + + for fmt in self.formats: + file = self.make_archive(base_name, fmt, base_dir=base_dir, + owner=self.owner, group=self.group) + archive_files.append(file) + self.distribution.dist_files.append(('sdist', '', file)) + + self.archive_files = archive_files + + if not self.keep_temp: + dir_util.remove_tree(base_dir, dry_run=self.dry_run) + + def get_archive_files(self): + """Return the list of archive files created when the command + was run, or None if the command hasn't run yet. + """ + return self.archive_files diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/upload.py b/venv/Lib/site-packages/setuptools/_distutils/command/upload.py new file mode 100644 index 00000000..95e9fda1 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/command/upload.py @@ -0,0 +1,214 @@ +""" +distutils.command.upload + +Implements the Distutils 'upload' subcommand (upload package to a package +index). +""" + +import os +import io +import hashlib +from base64 import standard_b64encode +from urllib.request import urlopen, Request, HTTPError +from urllib.parse import urlparse +from distutils.errors import DistutilsError, DistutilsOptionError +from distutils.core import PyPIRCCommand +from distutils.spawn import spawn +from distutils import log + + +# PyPI Warehouse supports MD5, SHA256, and Blake2 (blake2-256) +# https://bugs.python.org/issue40698 +_FILE_CONTENT_DIGESTS = { + "md5_digest": getattr(hashlib, "md5", None), + "sha256_digest": getattr(hashlib, "sha256", None), + "blake2_256_digest": getattr(hashlib, "blake2b", None), +} + + +class upload(PyPIRCCommand): + + description = "upload binary package to PyPI" + + user_options = PyPIRCCommand.user_options + [ + ('sign', 's', + 'sign files to upload using gpg'), + ('identity=', 'i', 'GPG identity used to sign files'), + ] + + boolean_options = PyPIRCCommand.boolean_options + ['sign'] + + def initialize_options(self): + PyPIRCCommand.initialize_options(self) + self.username = '' + self.password = '' + self.show_response = 0 + self.sign = False + self.identity = None + + def finalize_options(self): + PyPIRCCommand.finalize_options(self) + if self.identity and not self.sign: + raise DistutilsOptionError( + "Must use --sign for --identity to have meaning" + ) + config = self._read_pypirc() + if config != {}: + self.username = config['username'] + self.password = config['password'] + self.repository = config['repository'] + self.realm = config['realm'] + + # getting the password from the distribution + # if previously set by the register command + if not self.password and self.distribution.password: + self.password = self.distribution.password + + def run(self): + if not self.distribution.dist_files: + msg = ("Must create and upload files in one command " + "(e.g. setup.py sdist upload)") + raise DistutilsOptionError(msg) + for command, pyversion, filename in self.distribution.dist_files: + self.upload_file(command, pyversion, filename) + + def upload_file(self, command, pyversion, filename): + # Makes sure the repository URL is compliant + schema, netloc, url, params, query, fragments = \ + urlparse(self.repository) + if params or query or fragments: + raise AssertionError("Incompatible url %s" % self.repository) + + if schema not in ('http', 'https'): + raise AssertionError("unsupported schema " + schema) + + # Sign if requested + if self.sign: + gpg_args = ["gpg", "--detach-sign", "-a", filename] + if self.identity: + gpg_args[2:2] = ["--local-user", self.identity] + spawn(gpg_args, + dry_run=self.dry_run) + + # Fill in the data - send all the meta-data in case we need to + # register a new release + f = open(filename,'rb') + try: + content = f.read() + finally: + f.close() + + meta = self.distribution.metadata + data = { + # action + ':action': 'file_upload', + 'protocol_version': '1', + + # identify release + 'name': meta.get_name(), + 'version': meta.get_version(), + + # file content + 'content': (os.path.basename(filename),content), + 'filetype': command, + 'pyversion': pyversion, + + # additional meta-data + 'metadata_version': '1.0', + 'summary': meta.get_description(), + 'home_page': meta.get_url(), + 'author': meta.get_contact(), + 'author_email': meta.get_contact_email(), + 'license': meta.get_licence(), + 'description': meta.get_long_description(), + 'keywords': meta.get_keywords(), + 'platform': meta.get_platforms(), + 'classifiers': meta.get_classifiers(), + 'download_url': meta.get_download_url(), + # PEP 314 + 'provides': meta.get_provides(), + 'requires': meta.get_requires(), + 'obsoletes': meta.get_obsoletes(), + } + + data['comment'] = '' + + # file content digests + for digest_name, digest_cons in _FILE_CONTENT_DIGESTS.items(): + if digest_cons is None: + continue + try: + data[digest_name] = digest_cons(content).hexdigest() + except ValueError: + # hash digest not available or blocked by security policy + pass + + if self.sign: + with open(filename + ".asc", "rb") as f: + data['gpg_signature'] = (os.path.basename(filename) + ".asc", + f.read()) + + # set up the authentication + user_pass = (self.username + ":" + self.password).encode('ascii') + # The exact encoding of the authentication string is debated. + # Anyway PyPI only accepts ascii for both username or password. + auth = "Basic " + standard_b64encode(user_pass).decode('ascii') + + # Build up the MIME payload for the POST data + boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' + sep_boundary = b'\r\n--' + boundary.encode('ascii') + end_boundary = sep_boundary + b'--\r\n' + body = io.BytesIO() + for key, value in data.items(): + title = '\r\nContent-Disposition: form-data; name="%s"' % key + # handle multiple entries for the same name + if not isinstance(value, list): + value = [value] + for value in value: + if type(value) is tuple: + title += '; filename="%s"' % value[0] + value = value[1] + else: + value = str(value).encode('utf-8') + body.write(sep_boundary) + body.write(title.encode('utf-8')) + body.write(b"\r\n\r\n") + body.write(value) + body.write(end_boundary) + body = body.getvalue() + + msg = "Submitting %s to %s" % (filename, self.repository) + self.announce(msg, log.INFO) + + # build the Request + headers = { + 'Content-type': 'multipart/form-data; boundary=%s' % boundary, + 'Content-length': str(len(body)), + 'Authorization': auth, + } + + request = Request(self.repository, data=body, + headers=headers) + # send the data + try: + result = urlopen(request) + status = result.getcode() + reason = result.msg + except HTTPError as e: + status = e.code + reason = e.msg + except OSError as e: + self.announce(str(e), log.ERROR) + raise + + if status == 200: + self.announce('Server response (%s): %s' % (status, reason), + log.INFO) + if self.show_response: + text = self._read_pypi_response(result) + msg = '\n'.join(('-' * 75, text, '-' * 75)) + self.announce(msg, log.INFO) + else: + msg = 'Upload failed (%s): %s' % (status, reason) + self.announce(msg, log.ERROR) + raise DistutilsError(msg) diff --git a/venv/Lib/site-packages/setuptools/_distutils/config.py b/venv/Lib/site-packages/setuptools/_distutils/config.py new file mode 100644 index 00000000..2171abd6 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/config.py @@ -0,0 +1,130 @@ +"""distutils.pypirc + +Provides the PyPIRCCommand class, the base class for the command classes +that uses .pypirc in the distutils.command package. +""" +import os +from configparser import RawConfigParser + +from distutils.cmd import Command + +DEFAULT_PYPIRC = """\ +[distutils] +index-servers = + pypi + +[pypi] +username:%s +password:%s +""" + +class PyPIRCCommand(Command): + """Base command that knows how to handle the .pypirc file + """ + DEFAULT_REPOSITORY = 'https://upload.pypi.org/legacy/' + DEFAULT_REALM = 'pypi' + repository = None + realm = None + + user_options = [ + ('repository=', 'r', + "url of repository [default: %s]" % \ + DEFAULT_REPOSITORY), + ('show-response', None, + 'display full response text from server')] + + boolean_options = ['show-response'] + + def _get_rc_file(self): + """Returns rc file path.""" + return os.path.join(os.path.expanduser('~'), '.pypirc') + + def _store_pypirc(self, username, password): + """Creates a default .pypirc file.""" + rc = self._get_rc_file() + with os.fdopen(os.open(rc, os.O_CREAT | os.O_WRONLY, 0o600), 'w') as f: + f.write(DEFAULT_PYPIRC % (username, password)) + + def _read_pypirc(self): + """Reads the .pypirc file.""" + rc = self._get_rc_file() + if os.path.exists(rc): + self.announce('Using PyPI login from %s' % rc) + repository = self.repository or self.DEFAULT_REPOSITORY + + config = RawConfigParser() + config.read(rc) + sections = config.sections() + if 'distutils' in sections: + # let's get the list of servers + index_servers = config.get('distutils', 'index-servers') + _servers = [server.strip() for server in + index_servers.split('\n') + if server.strip() != ''] + if _servers == []: + # nothing set, let's try to get the default pypi + if 'pypi' in sections: + _servers = ['pypi'] + else: + # the file is not properly defined, returning + # an empty dict + return {} + for server in _servers: + current = {'server': server} + current['username'] = config.get(server, 'username') + + # optional params + for key, default in (('repository', + self.DEFAULT_REPOSITORY), + ('realm', self.DEFAULT_REALM), + ('password', None)): + if config.has_option(server, key): + current[key] = config.get(server, key) + else: + current[key] = default + + # work around people having "repository" for the "pypi" + # section of their config set to the HTTP (rather than + # HTTPS) URL + if (server == 'pypi' and + repository in (self.DEFAULT_REPOSITORY, 'pypi')): + current['repository'] = self.DEFAULT_REPOSITORY + return current + + if (current['server'] == repository or + current['repository'] == repository): + return current + elif 'server-login' in sections: + # old format + server = 'server-login' + if config.has_option(server, 'repository'): + repository = config.get(server, 'repository') + else: + repository = self.DEFAULT_REPOSITORY + return {'username': config.get(server, 'username'), + 'password': config.get(server, 'password'), + 'repository': repository, + 'server': server, + 'realm': self.DEFAULT_REALM} + + return {} + + def _read_pypi_response(self, response): + """Read and decode a PyPI HTTP response.""" + import cgi + content_type = response.getheader('content-type', 'text/plain') + encoding = cgi.parse_header(content_type)[1].get('charset', 'ascii') + return response.read().decode(encoding) + + def initialize_options(self): + """Initialize options.""" + self.repository = None + self.realm = None + self.show_response = 0 + + def finalize_options(self): + """Finalizes options.""" + if self.repository is None: + self.repository = self.DEFAULT_REPOSITORY + if self.realm is None: + self.realm = self.DEFAULT_REALM diff --git a/venv/Lib/site-packages/setuptools/_distutils/core.py b/venv/Lib/site-packages/setuptools/_distutils/core.py new file mode 100644 index 00000000..d603d4a4 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/core.py @@ -0,0 +1,234 @@ +"""distutils.core + +The only module that needs to be imported to use the Distutils; provides +the 'setup' function (which is to be called from the setup script). Also +indirectly provides the Distribution and Command classes, although they are +really defined in distutils.dist and distutils.cmd. +""" + +import os +import sys + +from distutils.debug import DEBUG +from distutils.errors import * + +# Mainly import these so setup scripts can "from distutils.core import" them. +from distutils.dist import Distribution +from distutils.cmd import Command +from distutils.config import PyPIRCCommand +from distutils.extension import Extension + +# This is a barebones help message generated displayed when the user +# runs the setup script with no arguments at all. More useful help +# is generated with various --help options: global help, list commands, +# and per-command help. +USAGE = """\ +usage: %(script)s [global_opts] cmd1 [cmd1_opts] [cmd2 [cmd2_opts] ...] + or: %(script)s --help [cmd1 cmd2 ...] + or: %(script)s --help-commands + or: %(script)s cmd --help +""" + +def gen_usage (script_name): + script = os.path.basename(script_name) + return USAGE % vars() + + +# Some mild magic to control the behaviour of 'setup()' from 'run_setup()'. +_setup_stop_after = None +_setup_distribution = None + +# Legal keyword arguments for the setup() function +setup_keywords = ('distclass', 'script_name', 'script_args', 'options', + 'name', 'version', 'author', 'author_email', + 'maintainer', 'maintainer_email', 'url', 'license', + 'description', 'long_description', 'keywords', + 'platforms', 'classifiers', 'download_url', + 'requires', 'provides', 'obsoletes', + ) + +# Legal keyword arguments for the Extension constructor +extension_keywords = ('name', 'sources', 'include_dirs', + 'define_macros', 'undef_macros', + 'library_dirs', 'libraries', 'runtime_library_dirs', + 'extra_objects', 'extra_compile_args', 'extra_link_args', + 'swig_opts', 'export_symbols', 'depends', 'language') + +def setup (**attrs): + """The gateway to the Distutils: do everything your setup script needs + to do, in a highly flexible and user-driven way. Briefly: create a + Distribution instance; find and parse config files; parse the command + line; run each Distutils command found there, customized by the options + supplied to 'setup()' (as keyword arguments), in config files, and on + the command line. + + The Distribution instance might be an instance of a class supplied via + the 'distclass' keyword argument to 'setup'; if no such class is + supplied, then the Distribution class (in dist.py) is instantiated. + All other arguments to 'setup' (except for 'cmdclass') are used to set + attributes of the Distribution instance. + + The 'cmdclass' argument, if supplied, is a dictionary mapping command + names to command classes. Each command encountered on the command line + will be turned into a command class, which is in turn instantiated; any + class found in 'cmdclass' is used in place of the default, which is + (for command 'foo_bar') class 'foo_bar' in module + 'distutils.command.foo_bar'. The command class must provide a + 'user_options' attribute which is a list of option specifiers for + 'distutils.fancy_getopt'. Any command-line options between the current + and the next command are used to set attributes of the current command + object. + + When the entire command-line has been successfully parsed, calls the + 'run()' method on each command object in turn. This method will be + driven entirely by the Distribution object (which each command object + has a reference to, thanks to its constructor), and the + command-specific options that became attributes of each command + object. + """ + + global _setup_stop_after, _setup_distribution + + # Determine the distribution class -- either caller-supplied or + # our Distribution (see below). + klass = attrs.get('distclass') + if klass: + del attrs['distclass'] + else: + klass = Distribution + + if 'script_name' not in attrs: + attrs['script_name'] = os.path.basename(sys.argv[0]) + if 'script_args' not in attrs: + attrs['script_args'] = sys.argv[1:] + + # Create the Distribution instance, using the remaining arguments + # (ie. everything except distclass) to initialize it + try: + _setup_distribution = dist = klass(attrs) + except DistutilsSetupError as msg: + if 'name' not in attrs: + raise SystemExit("error in setup command: %s" % msg) + else: + raise SystemExit("error in %s setup command: %s" % \ + (attrs['name'], msg)) + + if _setup_stop_after == "init": + return dist + + # Find and parse the config file(s): they will override options from + # the setup script, but be overridden by the command line. + dist.parse_config_files() + + if DEBUG: + print("options (after parsing config files):") + dist.dump_option_dicts() + + if _setup_stop_after == "config": + return dist + + # Parse the command line and override config files; any + # command-line errors are the end user's fault, so turn them into + # SystemExit to suppress tracebacks. + try: + ok = dist.parse_command_line() + except DistutilsArgError as msg: + raise SystemExit(gen_usage(dist.script_name) + "\nerror: %s" % msg) + + if DEBUG: + print("options (after parsing command line):") + dist.dump_option_dicts() + + if _setup_stop_after == "commandline": + return dist + + # And finally, run all the commands found on the command line. + if ok: + try: + dist.run_commands() + except KeyboardInterrupt: + raise SystemExit("interrupted") + except OSError as exc: + if DEBUG: + sys.stderr.write("error: %s\n" % (exc,)) + raise + else: + raise SystemExit("error: %s" % (exc,)) + + except (DistutilsError, + CCompilerError) as msg: + if DEBUG: + raise + else: + raise SystemExit("error: " + str(msg)) + + return dist + +# setup () + + +def run_setup (script_name, script_args=None, stop_after="run"): + """Run a setup script in a somewhat controlled environment, and + return the Distribution instance that drives things. This is useful + if you need to find out the distribution meta-data (passed as + keyword args from 'script' to 'setup()', or the contents of the + config files or command-line. + + 'script_name' is a file that will be read and run with 'exec()'; + 'sys.argv[0]' will be replaced with 'script' for the duration of the + call. 'script_args' is a list of strings; if supplied, + 'sys.argv[1:]' will be replaced by 'script_args' for the duration of + the call. + + 'stop_after' tells 'setup()' when to stop processing; possible + values: + init + stop after the Distribution instance has been created and + populated with the keyword arguments to 'setup()' + config + stop after config files have been parsed (and their data + stored in the Distribution instance) + commandline + stop after the command-line ('sys.argv[1:]' or 'script_args') + have been parsed (and the data stored in the Distribution) + run [default] + stop after all commands have been run (the same as if 'setup()' + had been called in the usual way + + Returns the Distribution instance, which provides all information + used to drive the Distutils. + """ + if stop_after not in ('init', 'config', 'commandline', 'run'): + raise ValueError("invalid value for 'stop_after': %r" % (stop_after,)) + + global _setup_stop_after, _setup_distribution + _setup_stop_after = stop_after + + save_argv = sys.argv.copy() + g = {'__file__': script_name} + try: + try: + sys.argv[0] = script_name + if script_args is not None: + sys.argv[1:] = script_args + with open(script_name, 'rb') as f: + exec(f.read(), g) + finally: + sys.argv = save_argv + _setup_stop_after = None + except SystemExit: + # Hmm, should we do something if exiting with a non-zero code + # (ie. error)? + pass + + if _setup_distribution is None: + raise RuntimeError(("'distutils.core.setup()' was never called -- " + "perhaps '%s' is not a Distutils setup script?") % \ + script_name) + + # I wonder if the setup script's namespace -- g and l -- would be of + # any interest to callers? + #print "_setup_distribution:", _setup_distribution + return _setup_distribution + +# run_setup () diff --git a/venv/Lib/site-packages/setuptools/_distutils/cygwinccompiler.py b/venv/Lib/site-packages/setuptools/_distutils/cygwinccompiler.py new file mode 100644 index 00000000..66c12dd3 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/cygwinccompiler.py @@ -0,0 +1,403 @@ +"""distutils.cygwinccompiler + +Provides the CygwinCCompiler class, a subclass of UnixCCompiler that +handles the Cygwin port of the GNU C compiler to Windows. It also contains +the Mingw32CCompiler class which handles the mingw32 port of GCC (same as +cygwin in no-cygwin mode). +""" + +# problems: +# +# * if you use a msvc compiled python version (1.5.2) +# 1. you have to insert a __GNUC__ section in its config.h +# 2. you have to generate an import library for its dll +# - create a def-file for python??.dll +# - create an import library using +# dlltool --dllname python15.dll --def python15.def \ +# --output-lib libpython15.a +# +# see also http://starship.python.net/crew/kernr/mingw32/Notes.html +# +# * We put export_symbols in a def-file, and don't use +# --export-all-symbols because it doesn't worked reliable in some +# tested configurations. And because other windows compilers also +# need their symbols specified this no serious problem. +# +# tested configurations: +# +# * cygwin gcc 2.91.57/ld 2.9.4/dllwrap 0.2.4 works +# (after patching python's config.h and for C++ some other include files) +# see also http://starship.python.net/crew/kernr/mingw32/Notes.html +# * mingw32 gcc 2.95.2/ld 2.9.4/dllwrap 0.2.4 works +# (ld doesn't support -shared, so we use dllwrap) +# * cygwin gcc 2.95.2/ld 2.10.90/dllwrap 2.10.90 works now +# - its dllwrap doesn't work, there is a bug in binutils 2.10.90 +# see also http://sources.redhat.com/ml/cygwin/2000-06/msg01274.html +# - using gcc -mdll instead dllwrap doesn't work without -static because +# it tries to link against dlls instead their import libraries. (If +# it finds the dll first.) +# By specifying -static we force ld to link against the import libraries, +# this is windows standard and there are normally not the necessary symbols +# in the dlls. +# *** only the version of June 2000 shows these problems +# * cygwin gcc 3.2/ld 2.13.90 works +# (ld supports -shared) +# * mingw gcc 3.2/ld 2.13 works +# (ld supports -shared) + +import os +import sys +import copy +from subprocess import Popen, PIPE, check_output +import re + +from distutils.unixccompiler import UnixCCompiler +from distutils.file_util import write_file +from distutils.errors import (DistutilsExecError, CCompilerError, + CompileError, UnknownFileError) +from distutils.version import LooseVersion +from distutils.spawn import find_executable + +def get_msvcr(): + """Include the appropriate MSVC runtime library if Python was built + with MSVC 7.0 or later. + """ + msc_pos = sys.version.find('MSC v.') + if msc_pos != -1: + msc_ver = sys.version[msc_pos+6:msc_pos+10] + if msc_ver == '1300': + # MSVC 7.0 + return ['msvcr70'] + elif msc_ver == '1310': + # MSVC 7.1 + return ['msvcr71'] + elif msc_ver == '1400': + # VS2005 / MSVC 8.0 + return ['msvcr80'] + elif msc_ver == '1500': + # VS2008 / MSVC 9.0 + return ['msvcr90'] + elif msc_ver == '1600': + # VS2010 / MSVC 10.0 + return ['msvcr100'] + else: + raise ValueError("Unknown MS Compiler version %s " % msc_ver) + + +class CygwinCCompiler(UnixCCompiler): + """ Handles the Cygwin port of the GNU C compiler to Windows. + """ + compiler_type = 'cygwin' + obj_extension = ".o" + static_lib_extension = ".a" + shared_lib_extension = ".dll" + static_lib_format = "lib%s%s" + shared_lib_format = "%s%s" + exe_extension = ".exe" + + def __init__(self, verbose=0, dry_run=0, force=0): + + UnixCCompiler.__init__(self, verbose, dry_run, force) + + status, details = check_config_h() + self.debug_print("Python's GCC status: %s (details: %s)" % + (status, details)) + if status is not CONFIG_H_OK: + self.warn( + "Python's pyconfig.h doesn't seem to support your compiler. " + "Reason: %s. " + "Compiling may fail because of undefined preprocessor macros." + % details) + + self.gcc_version, self.ld_version, self.dllwrap_version = \ + get_versions() + self.debug_print(self.compiler_type + ": gcc %s, ld %s, dllwrap %s\n" % + (self.gcc_version, + self.ld_version, + self.dllwrap_version) ) + + # ld_version >= "2.10.90" and < "2.13" should also be able to use + # gcc -mdll instead of dllwrap + # Older dllwraps had own version numbers, newer ones use the + # same as the rest of binutils ( also ld ) + # dllwrap 2.10.90 is buggy + if self.ld_version >= "2.10.90": + self.linker_dll = "gcc" + else: + self.linker_dll = "dllwrap" + + # ld_version >= "2.13" support -shared so use it instead of + # -mdll -static + if self.ld_version >= "2.13": + shared_option = "-shared" + else: + shared_option = "-mdll -static" + + # Hard-code GCC because that's what this is all about. + # XXX optimization, warnings etc. should be customizable. + self.set_executables(compiler='gcc -mcygwin -O -Wall', + compiler_so='gcc -mcygwin -mdll -O -Wall', + compiler_cxx='g++ -mcygwin -O -Wall', + linker_exe='gcc -mcygwin', + linker_so=('%s -mcygwin %s' % + (self.linker_dll, shared_option))) + + # cygwin and mingw32 need different sets of libraries + if self.gcc_version == "2.91.57": + # cygwin shouldn't need msvcrt, but without the dlls will crash + # (gcc version 2.91.57) -- perhaps something about initialization + self.dll_libraries=["msvcrt"] + self.warn( + "Consider upgrading to a newer version of gcc") + else: + # Include the appropriate MSVC runtime library if Python was built + # with MSVC 7.0 or later. + self.dll_libraries = get_msvcr() + + def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): + """Compiles the source by spawning GCC and windres if needed.""" + if ext == '.rc' or ext == '.res': + # gcc needs '.res' and '.rc' compiled to object files !!! + try: + self.spawn(["windres", "-i", src, "-o", obj]) + except DistutilsExecError as msg: + raise CompileError(msg) + else: # for other files use the C-compiler + try: + self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + + extra_postargs) + except DistutilsExecError as msg: + raise CompileError(msg) + + def link(self, target_desc, objects, output_filename, output_dir=None, + libraries=None, library_dirs=None, runtime_library_dirs=None, + export_symbols=None, debug=0, extra_preargs=None, + extra_postargs=None, build_temp=None, target_lang=None): + """Link the objects.""" + # use separate copies, so we can modify the lists + extra_preargs = copy.copy(extra_preargs or []) + libraries = copy.copy(libraries or []) + objects = copy.copy(objects or []) + + # Additional libraries + libraries.extend(self.dll_libraries) + + # handle export symbols by creating a def-file + # with executables this only works with gcc/ld as linker + if ((export_symbols is not None) and + (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")): + # (The linker doesn't do anything if output is up-to-date. + # So it would probably better to check if we really need this, + # but for this we had to insert some unchanged parts of + # UnixCCompiler, and this is not what we want.) + + # we want to put some files in the same directory as the + # object files are, build_temp doesn't help much + # where are the object files + temp_dir = os.path.dirname(objects[0]) + # name of dll to give the helper files the same base name + (dll_name, dll_extension) = os.path.splitext( + os.path.basename(output_filename)) + + # generate the filenames for these files + def_file = os.path.join(temp_dir, dll_name + ".def") + lib_file = os.path.join(temp_dir, 'lib' + dll_name + ".a") + + # Generate .def file + contents = [ + "LIBRARY %s" % os.path.basename(output_filename), + "EXPORTS"] + for sym in export_symbols: + contents.append(sym) + self.execute(write_file, (def_file, contents), + "writing %s" % def_file) + + # next add options for def-file and to creating import libraries + + # dllwrap uses different options than gcc/ld + if self.linker_dll == "dllwrap": + extra_preargs.extend(["--output-lib", lib_file]) + # for dllwrap we have to use a special option + extra_preargs.extend(["--def", def_file]) + # we use gcc/ld here and can be sure ld is >= 2.9.10 + else: + # doesn't work: bfd_close build\...\libfoo.a: Invalid operation + #extra_preargs.extend(["-Wl,--out-implib,%s" % lib_file]) + # for gcc/ld the def-file is specified as any object files + objects.append(def_file) + + #end: if ((export_symbols is not None) and + # (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")): + + # who wants symbols and a many times larger output file + # should explicitly switch the debug mode on + # otherwise we let dllwrap/ld strip the output file + # (On my machine: 10KiB < stripped_file < ??100KiB + # unstripped_file = stripped_file + XXX KiB + # ( XXX=254 for a typical python extension)) + if not debug: + extra_preargs.append("-s") + + UnixCCompiler.link(self, target_desc, objects, output_filename, + output_dir, libraries, library_dirs, + runtime_library_dirs, + None, # export_symbols, we do this in our def-file + debug, extra_preargs, extra_postargs, build_temp, + target_lang) + + # -- Miscellaneous methods ----------------------------------------- + + def object_filenames(self, source_filenames, strip_dir=0, output_dir=''): + """Adds supports for rc and res files.""" + if output_dir is None: + output_dir = '' + obj_names = [] + for src_name in source_filenames: + # use normcase to make sure '.rc' is really '.rc' and not '.RC' + base, ext = os.path.splitext(os.path.normcase(src_name)) + if ext not in (self.src_extensions + ['.rc','.res']): + raise UnknownFileError("unknown file type '%s' (from '%s')" % \ + (ext, src_name)) + if strip_dir: + base = os.path.basename (base) + if ext in ('.res', '.rc'): + # these need to be compiled to object files + obj_names.append (os.path.join(output_dir, + base + ext + self.obj_extension)) + else: + obj_names.append (os.path.join(output_dir, + base + self.obj_extension)) + return obj_names + +# the same as cygwin plus some additional parameters +class Mingw32CCompiler(CygwinCCompiler): + """ Handles the Mingw32 port of the GNU C compiler to Windows. + """ + compiler_type = 'mingw32' + + def __init__(self, verbose=0, dry_run=0, force=0): + + CygwinCCompiler.__init__ (self, verbose, dry_run, force) + + # ld_version >= "2.13" support -shared so use it instead of + # -mdll -static + if self.ld_version >= "2.13": + shared_option = "-shared" + else: + shared_option = "-mdll -static" + + # A real mingw32 doesn't need to specify a different entry point, + # but cygwin 2.91.57 in no-cygwin-mode needs it. + if self.gcc_version <= "2.91.57": + entry_point = '--entry _DllMain@12' + else: + entry_point = '' + + if is_cygwingcc(): + raise CCompilerError( + 'Cygwin gcc cannot be used with --compiler=mingw32') + + self.set_executables(compiler='gcc -O -Wall', + compiler_so='gcc -mdll -O -Wall', + compiler_cxx='g++ -O -Wall', + linker_exe='gcc', + linker_so='%s %s %s' + % (self.linker_dll, shared_option, + entry_point)) + # Maybe we should also append -mthreads, but then the finished + # dlls need another dll (mingwm10.dll see Mingw32 docs) + # (-mthreads: Support thread-safe exception handling on `Mingw32') + + # no additional libraries needed + self.dll_libraries=[] + + # Include the appropriate MSVC runtime library if Python was built + # with MSVC 7.0 or later. + self.dll_libraries = get_msvcr() + +# Because these compilers aren't configured in Python's pyconfig.h file by +# default, we should at least warn the user if he is using an unmodified +# version. + +CONFIG_H_OK = "ok" +CONFIG_H_NOTOK = "not ok" +CONFIG_H_UNCERTAIN = "uncertain" + +def check_config_h(): + """Check if the current Python installation appears amenable to building + extensions with GCC. + + Returns a tuple (status, details), where 'status' is one of the following + constants: + + - CONFIG_H_OK: all is well, go ahead and compile + - CONFIG_H_NOTOK: doesn't look good + - CONFIG_H_UNCERTAIN: not sure -- unable to read pyconfig.h + + 'details' is a human-readable string explaining the situation. + + Note there are two ways to conclude "OK": either 'sys.version' contains + the string "GCC" (implying that this Python was built with GCC), or the + installed "pyconfig.h" contains the string "__GNUC__". + """ + + # XXX since this function also checks sys.version, it's not strictly a + # "pyconfig.h" check -- should probably be renamed... + + from distutils import sysconfig + + # if sys.version contains GCC then python was compiled with GCC, and the + # pyconfig.h file should be OK + if "GCC" in sys.version: + return CONFIG_H_OK, "sys.version mentions 'GCC'" + + # let's see if __GNUC__ is mentioned in python.h + fn = sysconfig.get_config_h_filename() + try: + config_h = open(fn) + try: + if "__GNUC__" in config_h.read(): + return CONFIG_H_OK, "'%s' mentions '__GNUC__'" % fn + else: + return CONFIG_H_NOTOK, "'%s' does not mention '__GNUC__'" % fn + finally: + config_h.close() + except OSError as exc: + return (CONFIG_H_UNCERTAIN, + "couldn't read '%s': %s" % (fn, exc.strerror)) + +RE_VERSION = re.compile(br'(\d+\.\d+(\.\d+)*)') + +def _find_exe_version(cmd): + """Find the version of an executable by running `cmd` in the shell. + + If the command is not found, or the output does not match + `RE_VERSION`, returns None. + """ + executable = cmd.split()[0] + if find_executable(executable) is None: + return None + out = Popen(cmd, shell=True, stdout=PIPE).stdout + try: + out_string = out.read() + finally: + out.close() + result = RE_VERSION.search(out_string) + if result is None: + return None + # LooseVersion works with strings + # so we need to decode our bytes + return LooseVersion(result.group(1).decode()) + +def get_versions(): + """ Try to find out the versions of gcc, ld and dllwrap. + + If not possible it returns None for it. + """ + commands = ['gcc -dumpversion', 'ld -v', 'dllwrap --version'] + return tuple([_find_exe_version(cmd) for cmd in commands]) + +def is_cygwingcc(): + '''Try to determine if the gcc that would be used is from cygwin.''' + out_string = check_output(['gcc', '-dumpmachine']) + return out_string.strip().endswith(b'cygwin') diff --git a/venv/Lib/site-packages/setuptools/_distutils/debug.py b/venv/Lib/site-packages/setuptools/_distutils/debug.py new file mode 100644 index 00000000..daf1660f --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/debug.py @@ -0,0 +1,5 @@ +import os + +# If DISTUTILS_DEBUG is anything other than the empty string, we run in +# debug mode. +DEBUG = os.environ.get('DISTUTILS_DEBUG') diff --git a/venv/Lib/site-packages/setuptools/_distutils/dep_util.py b/venv/Lib/site-packages/setuptools/_distutils/dep_util.py new file mode 100644 index 00000000..d74f5e4e --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/dep_util.py @@ -0,0 +1,92 @@ +"""distutils.dep_util + +Utility functions for simple, timestamp-based dependency of files +and groups of files; also, function based entirely on such +timestamp dependency analysis.""" + +import os +from distutils.errors import DistutilsFileError + + +def newer (source, target): + """Return true if 'source' exists and is more recently modified than + 'target', or if 'source' exists and 'target' doesn't. Return false if + both exist and 'target' is the same age or younger than 'source'. + Raise DistutilsFileError if 'source' does not exist. + """ + if not os.path.exists(source): + raise DistutilsFileError("file '%s' does not exist" % + os.path.abspath(source)) + if not os.path.exists(target): + return 1 + + from stat import ST_MTIME + mtime1 = os.stat(source)[ST_MTIME] + mtime2 = os.stat(target)[ST_MTIME] + + return mtime1 > mtime2 + +# newer () + + +def newer_pairwise (sources, targets): + """Walk two filename lists in parallel, testing if each source is newer + than its corresponding target. Return a pair of lists (sources, + targets) where source is newer than target, according to the semantics + of 'newer()'. + """ + if len(sources) != len(targets): + raise ValueError("'sources' and 'targets' must be same length") + + # build a pair of lists (sources, targets) where source is newer + n_sources = [] + n_targets = [] + for i in range(len(sources)): + if newer(sources[i], targets[i]): + n_sources.append(sources[i]) + n_targets.append(targets[i]) + + return (n_sources, n_targets) + +# newer_pairwise () + + +def newer_group (sources, target, missing='error'): + """Return true if 'target' is out-of-date with respect to any file + listed in 'sources'. In other words, if 'target' exists and is newer + than every file in 'sources', return false; otherwise return true. + 'missing' controls what we do when a source file is missing; the + default ("error") is to blow up with an OSError from inside 'stat()'; + if it is "ignore", we silently drop any missing source files; if it is + "newer", any missing source files make us assume that 'target' is + out-of-date (this is handy in "dry-run" mode: it'll make you pretend to + carry out commands that wouldn't work because inputs are missing, but + that doesn't matter because you're not actually going to run the + commands). + """ + # If the target doesn't even exist, then it's definitely out-of-date. + if not os.path.exists(target): + return 1 + + # Otherwise we have to find out the hard way: if *any* source file + # is more recent than 'target', then 'target' is out-of-date and + # we can immediately return true. If we fall through to the end + # of the loop, then 'target' is up-to-date and we return false. + from stat import ST_MTIME + target_mtime = os.stat(target)[ST_MTIME] + for source in sources: + if not os.path.exists(source): + if missing == 'error': # blow up when we stat() the file + pass + elif missing == 'ignore': # missing source dropped from + continue # target's dependency list + elif missing == 'newer': # missing source means target is + return 1 # out-of-date + + source_mtime = os.stat(source)[ST_MTIME] + if source_mtime > target_mtime: + return 1 + else: + return 0 + +# newer_group () diff --git a/venv/Lib/site-packages/setuptools/_distutils/dir_util.py b/venv/Lib/site-packages/setuptools/_distutils/dir_util.py new file mode 100644 index 00000000..d5cd8e3e --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/dir_util.py @@ -0,0 +1,210 @@ +"""distutils.dir_util + +Utility functions for manipulating directories and directory trees.""" + +import os +import errno +from distutils.errors import DistutilsFileError, DistutilsInternalError +from distutils import log + +# cache for by mkpath() -- in addition to cheapening redundant calls, +# eliminates redundant "creating /foo/bar/baz" messages in dry-run mode +_path_created = {} + +# I don't use os.makedirs because a) it's new to Python 1.5.2, and +# b) it blows up if the directory already exists (I want to silently +# succeed in that case). +def mkpath(name, mode=0o777, verbose=1, dry_run=0): + """Create a directory and any missing ancestor directories. + + If the directory already exists (or if 'name' is the empty string, which + means the current directory, which of course exists), then do nothing. + Raise DistutilsFileError if unable to create some directory along the way + (eg. some sub-path exists, but is a file rather than a directory). + If 'verbose' is true, print a one-line summary of each mkdir to stdout. + Return the list of directories actually created. + """ + + global _path_created + + # Detect a common bug -- name is None + if not isinstance(name, str): + raise DistutilsInternalError( + "mkpath: 'name' must be a string (got %r)" % (name,)) + + # XXX what's the better way to handle verbosity? print as we create + # each directory in the path (the current behaviour), or only announce + # the creation of the whole path? (quite easy to do the latter since + # we're not using a recursive algorithm) + + name = os.path.normpath(name) + created_dirs = [] + if os.path.isdir(name) or name == '': + return created_dirs + if _path_created.get(os.path.abspath(name)): + return created_dirs + + (head, tail) = os.path.split(name) + tails = [tail] # stack of lone dirs to create + + while head and tail and not os.path.isdir(head): + (head, tail) = os.path.split(head) + tails.insert(0, tail) # push next higher dir onto stack + + # now 'head' contains the deepest directory that already exists + # (that is, the child of 'head' in 'name' is the highest directory + # that does *not* exist) + for d in tails: + #print "head = %s, d = %s: " % (head, d), + head = os.path.join(head, d) + abs_head = os.path.abspath(head) + + if _path_created.get(abs_head): + continue + + if verbose >= 1: + log.info("creating %s", head) + + if not dry_run: + try: + os.mkdir(head, mode) + except OSError as exc: + if not (exc.errno == errno.EEXIST and os.path.isdir(head)): + raise DistutilsFileError( + "could not create '%s': %s" % (head, exc.args[-1])) + created_dirs.append(head) + + _path_created[abs_head] = 1 + return created_dirs + +def create_tree(base_dir, files, mode=0o777, verbose=1, dry_run=0): + """Create all the empty directories under 'base_dir' needed to put 'files' + there. + + 'base_dir' is just the name of a directory which doesn't necessarily + exist yet; 'files' is a list of filenames to be interpreted relative to + 'base_dir'. 'base_dir' + the directory portion of every file in 'files' + will be created if it doesn't already exist. 'mode', 'verbose' and + 'dry_run' flags are as for 'mkpath()'. + """ + # First get the list of directories to create + need_dir = set() + for file in files: + need_dir.add(os.path.join(base_dir, os.path.dirname(file))) + + # Now create them + for dir in sorted(need_dir): + mkpath(dir, mode, verbose=verbose, dry_run=dry_run) + +def copy_tree(src, dst, preserve_mode=1, preserve_times=1, + preserve_symlinks=0, update=0, verbose=1, dry_run=0): + """Copy an entire directory tree 'src' to a new location 'dst'. + + Both 'src' and 'dst' must be directory names. If 'src' is not a + directory, raise DistutilsFileError. If 'dst' does not exist, it is + created with 'mkpath()'. The end result of the copy is that every + file in 'src' is copied to 'dst', and directories under 'src' are + recursively copied to 'dst'. Return the list of files that were + copied or might have been copied, using their output name. The + return value is unaffected by 'update' or 'dry_run': it is simply + the list of all files under 'src', with the names changed to be + under 'dst'. + + 'preserve_mode' and 'preserve_times' are the same as for + 'copy_file'; note that they only apply to regular files, not to + directories. If 'preserve_symlinks' is true, symlinks will be + copied as symlinks (on platforms that support them!); otherwise + (the default), the destination of the symlink will be copied. + 'update' and 'verbose' are the same as for 'copy_file'. + """ + from distutils.file_util import copy_file + + if not dry_run and not os.path.isdir(src): + raise DistutilsFileError( + "cannot copy tree '%s': not a directory" % src) + try: + names = os.listdir(src) + except OSError as e: + if dry_run: + names = [] + else: + raise DistutilsFileError( + "error listing files in '%s': %s" % (src, e.strerror)) + + if not dry_run: + mkpath(dst, verbose=verbose) + + outputs = [] + + for n in names: + src_name = os.path.join(src, n) + dst_name = os.path.join(dst, n) + + if n.startswith('.nfs'): + # skip NFS rename files + continue + + if preserve_symlinks and os.path.islink(src_name): + link_dest = os.readlink(src_name) + if verbose >= 1: + log.info("linking %s -> %s", dst_name, link_dest) + if not dry_run: + os.symlink(link_dest, dst_name) + outputs.append(dst_name) + + elif os.path.isdir(src_name): + outputs.extend( + copy_tree(src_name, dst_name, preserve_mode, + preserve_times, preserve_symlinks, update, + verbose=verbose, dry_run=dry_run)) + else: + copy_file(src_name, dst_name, preserve_mode, + preserve_times, update, verbose=verbose, + dry_run=dry_run) + outputs.append(dst_name) + + return outputs + +def _build_cmdtuple(path, cmdtuples): + """Helper for remove_tree().""" + for f in os.listdir(path): + real_f = os.path.join(path,f) + if os.path.isdir(real_f) and not os.path.islink(real_f): + _build_cmdtuple(real_f, cmdtuples) + else: + cmdtuples.append((os.remove, real_f)) + cmdtuples.append((os.rmdir, path)) + +def remove_tree(directory, verbose=1, dry_run=0): + """Recursively remove an entire directory tree. + + Any errors are ignored (apart from being reported to stdout if 'verbose' + is true). + """ + global _path_created + + if verbose >= 1: + log.info("removing '%s' (and everything under it)", directory) + if dry_run: + return + cmdtuples = [] + _build_cmdtuple(directory, cmdtuples) + for cmd in cmdtuples: + try: + cmd[0](cmd[1]) + # remove dir from cache if it's already there + abspath = os.path.abspath(cmd[1]) + if abspath in _path_created: + del _path_created[abspath] + except OSError as exc: + log.warn("error removing %s: %s", directory, exc) + +def ensure_relative(path): + """Take the full path 'path', and make it a relative path. + + This is useful to make 'path' the second argument to os.path.join(). + """ + drive, path = os.path.splitdrive(path) + if path[0:1] == os.sep: + path = drive + path[1:] + return path diff --git a/venv/Lib/site-packages/setuptools/_distutils/dist.py b/venv/Lib/site-packages/setuptools/_distutils/dist.py new file mode 100644 index 00000000..37db4d6c --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/dist.py @@ -0,0 +1,1257 @@ +"""distutils.dist + +Provides the Distribution class, which represents the module distribution +being built/installed/distributed. +""" + +import sys +import os +import re +from email import message_from_file + +try: + import warnings +except ImportError: + warnings = None + +from distutils.errors import * +from distutils.fancy_getopt import FancyGetopt, translate_longopt +from distutils.util import check_environ, strtobool, rfc822_escape +from distutils import log +from distutils.debug import DEBUG + +# Regex to define acceptable Distutils command names. This is not *quite* +# the same as a Python NAME -- I don't allow leading underscores. The fact +# that they're very similar is no coincidence; the default naming scheme is +# to look for a Python module named after the command. +command_re = re.compile(r'^[a-zA-Z]([a-zA-Z0-9_]*)$') + + +def _ensure_list(value, fieldname): + if isinstance(value, str): + # a string containing comma separated values is okay. It will + # be converted to a list by Distribution.finalize_options(). + pass + elif not isinstance(value, list): + # passing a tuple or an iterator perhaps, warn and convert + typename = type(value).__name__ + msg = "Warning: '{fieldname}' should be a list, got type '{typename}'" + msg = msg.format(**locals()) + log.log(log.WARN, msg) + value = list(value) + return value + + +class Distribution: + """The core of the Distutils. Most of the work hiding behind 'setup' + is really done within a Distribution instance, which farms the work out + to the Distutils commands specified on the command line. + + Setup scripts will almost never instantiate Distribution directly, + unless the 'setup()' function is totally inadequate to their needs. + However, it is conceivable that a setup script might wish to subclass + Distribution for some specialized purpose, and then pass the subclass + to 'setup()' as the 'distclass' keyword argument. If so, it is + necessary to respect the expectations that 'setup' has of Distribution. + See the code for 'setup()', in core.py, for details. + """ + + # 'global_options' describes the command-line options that may be + # supplied to the setup script prior to any actual commands. + # Eg. "./setup.py -n" or "./setup.py --quiet" both take advantage of + # these global options. This list should be kept to a bare minimum, + # since every global option is also valid as a command option -- and we + # don't want to pollute the commands with too many options that they + # have minimal control over. + # The fourth entry for verbose means that it can be repeated. + global_options = [ + ('verbose', 'v', "run verbosely (default)", 1), + ('quiet', 'q', "run quietly (turns verbosity off)"), + ('dry-run', 'n', "don't actually do anything"), + ('help', 'h', "show detailed help message"), + ('no-user-cfg', None, + 'ignore pydistutils.cfg in your home directory'), + ] + + # 'common_usage' is a short (2-3 line) string describing the common + # usage of the setup script. + common_usage = """\ +Common commands: (see '--help-commands' for more) + + setup.py build will build the package underneath 'build/' + setup.py install will install the package +""" + + # options that are not propagated to the commands + display_options = [ + ('help-commands', None, + "list all available commands"), + ('name', None, + "print package name"), + ('version', 'V', + "print package version"), + ('fullname', None, + "print -"), + ('author', None, + "print the author's name"), + ('author-email', None, + "print the author's email address"), + ('maintainer', None, + "print the maintainer's name"), + ('maintainer-email', None, + "print the maintainer's email address"), + ('contact', None, + "print the maintainer's name if known, else the author's"), + ('contact-email', None, + "print the maintainer's email address if known, else the author's"), + ('url', None, + "print the URL for this package"), + ('license', None, + "print the license of the package"), + ('licence', None, + "alias for --license"), + ('description', None, + "print the package description"), + ('long-description', None, + "print the long package description"), + ('platforms', None, + "print the list of platforms"), + ('classifiers', None, + "print the list of classifiers"), + ('keywords', None, + "print the list of keywords"), + ('provides', None, + "print the list of packages/modules provided"), + ('requires', None, + "print the list of packages/modules required"), + ('obsoletes', None, + "print the list of packages/modules made obsolete") + ] + display_option_names = [translate_longopt(x[0]) for x in display_options] + + # negative options are options that exclude other options + negative_opt = {'quiet': 'verbose'} + + # -- Creation/initialization methods ------------------------------- + + def __init__(self, attrs=None): + """Construct a new Distribution instance: initialize all the + attributes of a Distribution, and then use 'attrs' (a dictionary + mapping attribute names to values) to assign some of those + attributes their "real" values. (Any attributes not mentioned in + 'attrs' will be assigned to some null value: 0, None, an empty list + or dictionary, etc.) Most importantly, initialize the + 'command_obj' attribute to the empty dictionary; this will be + filled in with real command objects by 'parse_command_line()'. + """ + + # Default values for our command-line options + self.verbose = 1 + self.dry_run = 0 + self.help = 0 + for attr in self.display_option_names: + setattr(self, attr, 0) + + # Store the distribution meta-data (name, version, author, and so + # forth) in a separate object -- we're getting to have enough + # information here (and enough command-line options) that it's + # worth it. Also delegate 'get_XXX()' methods to the 'metadata' + # object in a sneaky and underhanded (but efficient!) way. + self.metadata = DistributionMetadata() + for basename in self.metadata._METHOD_BASENAMES: + method_name = "get_" + basename + setattr(self, method_name, getattr(self.metadata, method_name)) + + # 'cmdclass' maps command names to class objects, so we + # can 1) quickly figure out which class to instantiate when + # we need to create a new command object, and 2) have a way + # for the setup script to override command classes + self.cmdclass = {} + + # 'command_packages' is a list of packages in which commands + # are searched for. The factory for command 'foo' is expected + # to be named 'foo' in the module 'foo' in one of the packages + # named here. This list is searched from the left; an error + # is raised if no named package provides the command being + # searched for. (Always access using get_command_packages().) + self.command_packages = None + + # 'script_name' and 'script_args' are usually set to sys.argv[0] + # and sys.argv[1:], but they can be overridden when the caller is + # not necessarily a setup script run from the command-line. + self.script_name = None + self.script_args = None + + # 'command_options' is where we store command options between + # parsing them (from config files, the command-line, etc.) and when + # they are actually needed -- ie. when the command in question is + # instantiated. It is a dictionary of dictionaries of 2-tuples: + # command_options = { command_name : { option : (source, value) } } + self.command_options = {} + + # 'dist_files' is the list of (command, pyversion, file) that + # have been created by any dist commands run so far. This is + # filled regardless of whether the run is dry or not. pyversion + # gives sysconfig.get_python_version() if the dist file is + # specific to a Python version, 'any' if it is good for all + # Python versions on the target platform, and '' for a source + # file. pyversion should not be used to specify minimum or + # maximum required Python versions; use the metainfo for that + # instead. + self.dist_files = [] + + # These options are really the business of various commands, rather + # than of the Distribution itself. We provide aliases for them in + # Distribution as a convenience to the developer. + self.packages = None + self.package_data = {} + self.package_dir = None + self.py_modules = None + self.libraries = None + self.headers = None + self.ext_modules = None + self.ext_package = None + self.include_dirs = None + self.extra_path = None + self.scripts = None + self.data_files = None + self.password = '' + + # And now initialize bookkeeping stuff that can't be supplied by + # the caller at all. 'command_obj' maps command names to + # Command instances -- that's how we enforce that every command + # class is a singleton. + self.command_obj = {} + + # 'have_run' maps command names to boolean values; it keeps track + # of whether we have actually run a particular command, to make it + # cheap to "run" a command whenever we think we might need to -- if + # it's already been done, no need for expensive filesystem + # operations, we just check the 'have_run' dictionary and carry on. + # It's only safe to query 'have_run' for a command class that has + # been instantiated -- a false value will be inserted when the + # command object is created, and replaced with a true value when + # the command is successfully run. Thus it's probably best to use + # '.get()' rather than a straight lookup. + self.have_run = {} + + # Now we'll use the attrs dictionary (ultimately, keyword args from + # the setup script) to possibly override any or all of these + # distribution options. + + if attrs: + # Pull out the set of command options and work on them + # specifically. Note that this order guarantees that aliased + # command options will override any supplied redundantly + # through the general options dictionary. + options = attrs.get('options') + if options is not None: + del attrs['options'] + for (command, cmd_options) in options.items(): + opt_dict = self.get_option_dict(command) + for (opt, val) in cmd_options.items(): + opt_dict[opt] = ("setup script", val) + + if 'licence' in attrs: + attrs['license'] = attrs['licence'] + del attrs['licence'] + msg = "'licence' distribution option is deprecated; use 'license'" + if warnings is not None: + warnings.warn(msg) + else: + sys.stderr.write(msg + "\n") + + # Now work on the rest of the attributes. Any attribute that's + # not already defined is invalid! + for (key, val) in attrs.items(): + if hasattr(self.metadata, "set_" + key): + getattr(self.metadata, "set_" + key)(val) + elif hasattr(self.metadata, key): + setattr(self.metadata, key, val) + elif hasattr(self, key): + setattr(self, key, val) + else: + msg = "Unknown distribution option: %s" % repr(key) + warnings.warn(msg) + + # no-user-cfg is handled before other command line args + # because other args override the config files, and this + # one is needed before we can load the config files. + # If attrs['script_args'] wasn't passed, assume false. + # + # This also make sure we just look at the global options + self.want_user_cfg = True + + if self.script_args is not None: + for arg in self.script_args: + if not arg.startswith('-'): + break + if arg == '--no-user-cfg': + self.want_user_cfg = False + break + + self.finalize_options() + + def get_option_dict(self, command): + """Get the option dictionary for a given command. If that + command's option dictionary hasn't been created yet, then create it + and return the new dictionary; otherwise, return the existing + option dictionary. + """ + dict = self.command_options.get(command) + if dict is None: + dict = self.command_options[command] = {} + return dict + + def dump_option_dicts(self, header=None, commands=None, indent=""): + from pprint import pformat + + if commands is None: # dump all command option dicts + commands = sorted(self.command_options.keys()) + + if header is not None: + self.announce(indent + header) + indent = indent + " " + + if not commands: + self.announce(indent + "no commands known yet") + return + + for cmd_name in commands: + opt_dict = self.command_options.get(cmd_name) + if opt_dict is None: + self.announce(indent + + "no option dict for '%s' command" % cmd_name) + else: + self.announce(indent + + "option dict for '%s' command:" % cmd_name) + out = pformat(opt_dict) + for line in out.split('\n'): + self.announce(indent + " " + line) + + # -- Config file finding/parsing methods --------------------------- + + def find_config_files(self): + """Find as many configuration files as should be processed for this + platform, and return a list of filenames in the order in which they + should be parsed. The filenames returned are guaranteed to exist + (modulo nasty race conditions). + + There are three possible config files: distutils.cfg in the + Distutils installation directory (ie. where the top-level + Distutils __inst__.py file lives), a file in the user's home + directory named .pydistutils.cfg on Unix and pydistutils.cfg + on Windows/Mac; and setup.cfg in the current directory. + + The file in the user's home directory can be disabled with the + --no-user-cfg option. + """ + files = [] + check_environ() + + # Where to look for the system-wide Distutils config file + sys_dir = os.path.dirname(sys.modules['distutils'].__file__) + + # Look for the system config file + sys_file = os.path.join(sys_dir, "distutils.cfg") + if os.path.isfile(sys_file): + files.append(sys_file) + + # What to call the per-user config file + if os.name == 'posix': + user_filename = ".pydistutils.cfg" + else: + user_filename = "pydistutils.cfg" + + # And look for the user config file + if self.want_user_cfg: + user_file = os.path.join(os.path.expanduser('~'), user_filename) + if os.path.isfile(user_file): + files.append(user_file) + + # All platforms support local setup.cfg + local_file = "setup.cfg" + if os.path.isfile(local_file): + files.append(local_file) + + if DEBUG: + self.announce("using config files: %s" % ', '.join(files)) + + return files + + def parse_config_files(self, filenames=None): + from configparser import ConfigParser + + # Ignore install directory options if we have a venv + if sys.prefix != sys.base_prefix: + ignore_options = [ + 'install-base', 'install-platbase', 'install-lib', + 'install-platlib', 'install-purelib', 'install-headers', + 'install-scripts', 'install-data', 'prefix', 'exec-prefix', + 'home', 'user', 'root'] + else: + ignore_options = [] + + ignore_options = frozenset(ignore_options) + + if filenames is None: + filenames = self.find_config_files() + + if DEBUG: + self.announce("Distribution.parse_config_files():") + + parser = ConfigParser() + for filename in filenames: + if DEBUG: + self.announce(" reading %s" % filename) + parser.read(filename) + for section in parser.sections(): + options = parser.options(section) + opt_dict = self.get_option_dict(section) + + for opt in options: + if opt != '__name__' and opt not in ignore_options: + val = parser.get(section,opt) + opt = opt.replace('-', '_') + opt_dict[opt] = (filename, val) + + # Make the ConfigParser forget everything (so we retain + # the original filenames that options come from) + parser.__init__() + + # If there was a "global" section in the config file, use it + # to set Distribution options. + + if 'global' in self.command_options: + for (opt, (src, val)) in self.command_options['global'].items(): + alias = self.negative_opt.get(opt) + try: + if alias: + setattr(self, alias, not strtobool(val)) + elif opt in ('verbose', 'dry_run'): # ugh! + setattr(self, opt, strtobool(val)) + else: + setattr(self, opt, val) + except ValueError as msg: + raise DistutilsOptionError(msg) + + # -- Command-line parsing methods ---------------------------------- + + def parse_command_line(self): + """Parse the setup script's command line, taken from the + 'script_args' instance attribute (which defaults to 'sys.argv[1:]' + -- see 'setup()' in core.py). This list is first processed for + "global options" -- options that set attributes of the Distribution + instance. Then, it is alternately scanned for Distutils commands + and options for that command. Each new command terminates the + options for the previous command. The allowed options for a + command are determined by the 'user_options' attribute of the + command class -- thus, we have to be able to load command classes + in order to parse the command line. Any error in that 'options' + attribute raises DistutilsGetoptError; any error on the + command-line raises DistutilsArgError. If no Distutils commands + were found on the command line, raises DistutilsArgError. Return + true if command-line was successfully parsed and we should carry + on with executing commands; false if no errors but we shouldn't + execute commands (currently, this only happens if user asks for + help). + """ + # + # We now have enough information to show the Macintosh dialog + # that allows the user to interactively specify the "command line". + # + toplevel_options = self._get_toplevel_options() + + # We have to parse the command line a bit at a time -- global + # options, then the first command, then its options, and so on -- + # because each command will be handled by a different class, and + # the options that are valid for a particular class aren't known + # until we have loaded the command class, which doesn't happen + # until we know what the command is. + + self.commands = [] + parser = FancyGetopt(toplevel_options + self.display_options) + parser.set_negative_aliases(self.negative_opt) + parser.set_aliases({'licence': 'license'}) + args = parser.getopt(args=self.script_args, object=self) + option_order = parser.get_option_order() + log.set_verbosity(self.verbose) + + # for display options we return immediately + if self.handle_display_options(option_order): + return + while args: + args = self._parse_command_opts(parser, args) + if args is None: # user asked for help (and got it) + return + + # Handle the cases of --help as a "global" option, ie. + # "setup.py --help" and "setup.py --help command ...". For the + # former, we show global options (--verbose, --dry-run, etc.) + # and display-only options (--name, --version, etc.); for the + # latter, we omit the display-only options and show help for + # each command listed on the command line. + if self.help: + self._show_help(parser, + display_options=len(self.commands) == 0, + commands=self.commands) + return + + # Oops, no commands found -- an end-user error + if not self.commands: + raise DistutilsArgError("no commands supplied") + + # All is well: return true + return True + + def _get_toplevel_options(self): + """Return the non-display options recognized at the top level. + + This includes options that are recognized *only* at the top + level as well as options recognized for commands. + """ + return self.global_options + [ + ("command-packages=", None, + "list of packages that provide distutils commands"), + ] + + def _parse_command_opts(self, parser, args): + """Parse the command-line options for a single command. + 'parser' must be a FancyGetopt instance; 'args' must be the list + of arguments, starting with the current command (whose options + we are about to parse). Returns a new version of 'args' with + the next command at the front of the list; will be the empty + list if there are no more commands on the command line. Returns + None if the user asked for help on this command. + """ + # late import because of mutual dependence between these modules + from distutils.cmd import Command + + # Pull the current command from the head of the command line + command = args[0] + if not command_re.match(command): + raise SystemExit("invalid command name '%s'" % command) + self.commands.append(command) + + # Dig up the command class that implements this command, so we + # 1) know that it's a valid command, and 2) know which options + # it takes. + try: + cmd_class = self.get_command_class(command) + except DistutilsModuleError as msg: + raise DistutilsArgError(msg) + + # Require that the command class be derived from Command -- want + # to be sure that the basic "command" interface is implemented. + if not issubclass(cmd_class, Command): + raise DistutilsClassError( + "command class %s must subclass Command" % cmd_class) + + # Also make sure that the command object provides a list of its + # known options. + if not (hasattr(cmd_class, 'user_options') and + isinstance(cmd_class.user_options, list)): + msg = ("command class %s must provide " + "'user_options' attribute (a list of tuples)") + raise DistutilsClassError(msg % cmd_class) + + # If the command class has a list of negative alias options, + # merge it in with the global negative aliases. + negative_opt = self.negative_opt + if hasattr(cmd_class, 'negative_opt'): + negative_opt = negative_opt.copy() + negative_opt.update(cmd_class.negative_opt) + + # Check for help_options in command class. They have a different + # format (tuple of four) so we need to preprocess them here. + if (hasattr(cmd_class, 'help_options') and + isinstance(cmd_class.help_options, list)): + help_options = fix_help_options(cmd_class.help_options) + else: + help_options = [] + + # All commands support the global options too, just by adding + # in 'global_options'. + parser.set_option_table(self.global_options + + cmd_class.user_options + + help_options) + parser.set_negative_aliases(negative_opt) + (args, opts) = parser.getopt(args[1:]) + if hasattr(opts, 'help') and opts.help: + self._show_help(parser, display_options=0, commands=[cmd_class]) + return + + if (hasattr(cmd_class, 'help_options') and + isinstance(cmd_class.help_options, list)): + help_option_found=0 + for (help_option, short, desc, func) in cmd_class.help_options: + if hasattr(opts, parser.get_attr_name(help_option)): + help_option_found=1 + if callable(func): + func() + else: + raise DistutilsClassError( + "invalid help function %r for help option '%s': " + "must be a callable object (function, etc.)" + % (func, help_option)) + + if help_option_found: + return + + # Put the options from the command-line into their official + # holding pen, the 'command_options' dictionary. + opt_dict = self.get_option_dict(command) + for (name, value) in vars(opts).items(): + opt_dict[name] = ("command line", value) + + return args + + def finalize_options(self): + """Set final values for all the options on the Distribution + instance, analogous to the .finalize_options() method of Command + objects. + """ + for attr in ('keywords', 'platforms'): + value = getattr(self.metadata, attr) + if value is None: + continue + if isinstance(value, str): + value = [elm.strip() for elm in value.split(',')] + setattr(self.metadata, attr, value) + + def _show_help(self, parser, global_options=1, display_options=1, + commands=[]): + """Show help for the setup script command-line in the form of + several lists of command-line options. 'parser' should be a + FancyGetopt instance; do not expect it to be returned in the + same state, as its option table will be reset to make it + generate the correct help text. + + If 'global_options' is true, lists the global options: + --verbose, --dry-run, etc. If 'display_options' is true, lists + the "display-only" options: --name, --version, etc. Finally, + lists per-command help for every command name or command class + in 'commands'. + """ + # late import because of mutual dependence between these modules + from distutils.core import gen_usage + from distutils.cmd import Command + + if global_options: + if display_options: + options = self._get_toplevel_options() + else: + options = self.global_options + parser.set_option_table(options) + parser.print_help(self.common_usage + "\nGlobal options:") + print('') + + if display_options: + parser.set_option_table(self.display_options) + parser.print_help( + "Information display options (just display " + + "information, ignore any commands)") + print('') + + for command in self.commands: + if isinstance(command, type) and issubclass(command, Command): + klass = command + else: + klass = self.get_command_class(command) + if (hasattr(klass, 'help_options') and + isinstance(klass.help_options, list)): + parser.set_option_table(klass.user_options + + fix_help_options(klass.help_options)) + else: + parser.set_option_table(klass.user_options) + parser.print_help("Options for '%s' command:" % klass.__name__) + print('') + + print(gen_usage(self.script_name)) + + def handle_display_options(self, option_order): + """If there were any non-global "display-only" options + (--help-commands or the metadata display options) on the command + line, display the requested info and return true; else return + false. + """ + from distutils.core import gen_usage + + # User just wants a list of commands -- we'll print it out and stop + # processing now (ie. if they ran "setup --help-commands foo bar", + # we ignore "foo bar"). + if self.help_commands: + self.print_commands() + print('') + print(gen_usage(self.script_name)) + return 1 + + # If user supplied any of the "display metadata" options, then + # display that metadata in the order in which the user supplied the + # metadata options. + any_display_options = 0 + is_display_option = {} + for option in self.display_options: + is_display_option[option[0]] = 1 + + for (opt, val) in option_order: + if val and is_display_option.get(opt): + opt = translate_longopt(opt) + value = getattr(self.metadata, "get_"+opt)() + if opt in ['keywords', 'platforms']: + print(','.join(value)) + elif opt in ('classifiers', 'provides', 'requires', + 'obsoletes'): + print('\n'.join(value)) + else: + print(value) + any_display_options = 1 + + return any_display_options + + def print_command_list(self, commands, header, max_length): + """Print a subset of the list of all commands -- used by + 'print_commands()'. + """ + print(header + ":") + + for cmd in commands: + klass = self.cmdclass.get(cmd) + if not klass: + klass = self.get_command_class(cmd) + try: + description = klass.description + except AttributeError: + description = "(no description available)" + + print(" %-*s %s" % (max_length, cmd, description)) + + def print_commands(self): + """Print out a help message listing all available commands with a + description of each. The list is divided into "standard commands" + (listed in distutils.command.__all__) and "extra commands" + (mentioned in self.cmdclass, but not a standard command). The + descriptions come from the command class attribute + 'description'. + """ + import distutils.command + std_commands = distutils.command.__all__ + is_std = {} + for cmd in std_commands: + is_std[cmd] = 1 + + extra_commands = [] + for cmd in self.cmdclass.keys(): + if not is_std.get(cmd): + extra_commands.append(cmd) + + max_length = 0 + for cmd in (std_commands + extra_commands): + if len(cmd) > max_length: + max_length = len(cmd) + + self.print_command_list(std_commands, + "Standard commands", + max_length) + if extra_commands: + print() + self.print_command_list(extra_commands, + "Extra commands", + max_length) + + def get_command_list(self): + """Get a list of (command, description) tuples. + The list is divided into "standard commands" (listed in + distutils.command.__all__) and "extra commands" (mentioned in + self.cmdclass, but not a standard command). The descriptions come + from the command class attribute 'description'. + """ + # Currently this is only used on Mac OS, for the Mac-only GUI + # Distutils interface (by Jack Jansen) + import distutils.command + std_commands = distutils.command.__all__ + is_std = {} + for cmd in std_commands: + is_std[cmd] = 1 + + extra_commands = [] + for cmd in self.cmdclass.keys(): + if not is_std.get(cmd): + extra_commands.append(cmd) + + rv = [] + for cmd in (std_commands + extra_commands): + klass = self.cmdclass.get(cmd) + if not klass: + klass = self.get_command_class(cmd) + try: + description = klass.description + except AttributeError: + description = "(no description available)" + rv.append((cmd, description)) + return rv + + # -- Command class/object methods ---------------------------------- + + def get_command_packages(self): + """Return a list of packages from which commands are loaded.""" + pkgs = self.command_packages + if not isinstance(pkgs, list): + if pkgs is None: + pkgs = '' + pkgs = [pkg.strip() for pkg in pkgs.split(',') if pkg != ''] + if "distutils.command" not in pkgs: + pkgs.insert(0, "distutils.command") + self.command_packages = pkgs + return pkgs + + def get_command_class(self, command): + """Return the class that implements the Distutils command named by + 'command'. First we check the 'cmdclass' dictionary; if the + command is mentioned there, we fetch the class object from the + dictionary and return it. Otherwise we load the command module + ("distutils.command." + command) and fetch the command class from + the module. The loaded class is also stored in 'cmdclass' + to speed future calls to 'get_command_class()'. + + Raises DistutilsModuleError if the expected module could not be + found, or if that module does not define the expected class. + """ + klass = self.cmdclass.get(command) + if klass: + return klass + + for pkgname in self.get_command_packages(): + module_name = "%s.%s" % (pkgname, command) + klass_name = command + + try: + __import__(module_name) + module = sys.modules[module_name] + except ImportError: + continue + + try: + klass = getattr(module, klass_name) + except AttributeError: + raise DistutilsModuleError( + "invalid command '%s' (no class '%s' in module '%s')" + % (command, klass_name, module_name)) + + self.cmdclass[command] = klass + return klass + + raise DistutilsModuleError("invalid command '%s'" % command) + + def get_command_obj(self, command, create=1): + """Return the command object for 'command'. Normally this object + is cached on a previous call to 'get_command_obj()'; if no command + object for 'command' is in the cache, then we either create and + return it (if 'create' is true) or return None. + """ + cmd_obj = self.command_obj.get(command) + if not cmd_obj and create: + if DEBUG: + self.announce("Distribution.get_command_obj(): " + "creating '%s' command object" % command) + + klass = self.get_command_class(command) + cmd_obj = self.command_obj[command] = klass(self) + self.have_run[command] = 0 + + # Set any options that were supplied in config files + # or on the command line. (NB. support for error + # reporting is lame here: any errors aren't reported + # until 'finalize_options()' is called, which means + # we won't report the source of the error.) + options = self.command_options.get(command) + if options: + self._set_command_options(cmd_obj, options) + + return cmd_obj + + def _set_command_options(self, command_obj, option_dict=None): + """Set the options for 'command_obj' from 'option_dict'. Basically + this means copying elements of a dictionary ('option_dict') to + attributes of an instance ('command'). + + 'command_obj' must be a Command instance. If 'option_dict' is not + supplied, uses the standard option dictionary for this command + (from 'self.command_options'). + """ + command_name = command_obj.get_command_name() + if option_dict is None: + option_dict = self.get_option_dict(command_name) + + if DEBUG: + self.announce(" setting options for '%s' command:" % command_name) + for (option, (source, value)) in option_dict.items(): + if DEBUG: + self.announce(" %s = %s (from %s)" % (option, value, + source)) + try: + bool_opts = [translate_longopt(o) + for o in command_obj.boolean_options] + except AttributeError: + bool_opts = [] + try: + neg_opt = command_obj.negative_opt + except AttributeError: + neg_opt = {} + + try: + is_string = isinstance(value, str) + if option in neg_opt and is_string: + setattr(command_obj, neg_opt[option], not strtobool(value)) + elif option in bool_opts and is_string: + setattr(command_obj, option, strtobool(value)) + elif hasattr(command_obj, option): + setattr(command_obj, option, value) + else: + raise DistutilsOptionError( + "error in %s: command '%s' has no such option '%s'" + % (source, command_name, option)) + except ValueError as msg: + raise DistutilsOptionError(msg) + + def reinitialize_command(self, command, reinit_subcommands=0): + """Reinitializes a command to the state it was in when first + returned by 'get_command_obj()': ie., initialized but not yet + finalized. This provides the opportunity to sneak option + values in programmatically, overriding or supplementing + user-supplied values from the config files and command line. + You'll have to re-finalize the command object (by calling + 'finalize_options()' or 'ensure_finalized()') before using it for + real. + + 'command' should be a command name (string) or command object. If + 'reinit_subcommands' is true, also reinitializes the command's + sub-commands, as declared by the 'sub_commands' class attribute (if + it has one). See the "install" command for an example. Only + reinitializes the sub-commands that actually matter, ie. those + whose test predicates return true. + + Returns the reinitialized command object. + """ + from distutils.cmd import Command + if not isinstance(command, Command): + command_name = command + command = self.get_command_obj(command_name) + else: + command_name = command.get_command_name() + + if not command.finalized: + return command + command.initialize_options() + command.finalized = 0 + self.have_run[command_name] = 0 + self._set_command_options(command) + + if reinit_subcommands: + for sub in command.get_sub_commands(): + self.reinitialize_command(sub, reinit_subcommands) + + return command + + # -- Methods that operate on the Distribution ---------------------- + + def announce(self, msg, level=log.INFO): + log.log(level, msg) + + def run_commands(self): + """Run each command that was seen on the setup script command line. + Uses the list of commands found and cache of command objects + created by 'get_command_obj()'. + """ + for cmd in self.commands: + self.run_command(cmd) + + # -- Methods that operate on its Commands -------------------------- + + def run_command(self, command): + """Do whatever it takes to run a command (including nothing at all, + if the command has already been run). Specifically: if we have + already created and run the command named by 'command', return + silently without doing anything. If the command named by 'command' + doesn't even have a command object yet, create one. Then invoke + 'run()' on that command object (or an existing one). + """ + # Already been here, done that? then return silently. + if self.have_run.get(command): + return + + log.info("running %s", command) + cmd_obj = self.get_command_obj(command) + cmd_obj.ensure_finalized() + cmd_obj.run() + self.have_run[command] = 1 + + # -- Distribution query methods ------------------------------------ + + def has_pure_modules(self): + return len(self.packages or self.py_modules or []) > 0 + + def has_ext_modules(self): + return self.ext_modules and len(self.ext_modules) > 0 + + def has_c_libraries(self): + return self.libraries and len(self.libraries) > 0 + + def has_modules(self): + return self.has_pure_modules() or self.has_ext_modules() + + def has_headers(self): + return self.headers and len(self.headers) > 0 + + def has_scripts(self): + return self.scripts and len(self.scripts) > 0 + + def has_data_files(self): + return self.data_files and len(self.data_files) > 0 + + def is_pure(self): + return (self.has_pure_modules() and + not self.has_ext_modules() and + not self.has_c_libraries()) + + # -- Metadata query methods ---------------------------------------- + + # If you're looking for 'get_name()', 'get_version()', and so forth, + # they are defined in a sneaky way: the constructor binds self.get_XXX + # to self.metadata.get_XXX. The actual code is in the + # DistributionMetadata class, below. + +class DistributionMetadata: + """Dummy class to hold the distribution meta-data: name, version, + author, and so forth. + """ + + _METHOD_BASENAMES = ("name", "version", "author", "author_email", + "maintainer", "maintainer_email", "url", + "license", "description", "long_description", + "keywords", "platforms", "fullname", "contact", + "contact_email", "classifiers", "download_url", + # PEP 314 + "provides", "requires", "obsoletes", + ) + + def __init__(self, path=None): + if path is not None: + self.read_pkg_file(open(path)) + else: + self.name = None + self.version = None + self.author = None + self.author_email = None + self.maintainer = None + self.maintainer_email = None + self.url = None + self.license = None + self.description = None + self.long_description = None + self.keywords = None + self.platforms = None + self.classifiers = None + self.download_url = None + # PEP 314 + self.provides = None + self.requires = None + self.obsoletes = None + + def read_pkg_file(self, file): + """Reads the metadata values from a file object.""" + msg = message_from_file(file) + + def _read_field(name): + value = msg[name] + if value == 'UNKNOWN': + return None + return value + + def _read_list(name): + values = msg.get_all(name, None) + if values == []: + return None + return values + + metadata_version = msg['metadata-version'] + self.name = _read_field('name') + self.version = _read_field('version') + self.description = _read_field('summary') + # we are filling author only. + self.author = _read_field('author') + self.maintainer = None + self.author_email = _read_field('author-email') + self.maintainer_email = None + self.url = _read_field('home-page') + self.license = _read_field('license') + + if 'download-url' in msg: + self.download_url = _read_field('download-url') + else: + self.download_url = None + + self.long_description = _read_field('description') + self.description = _read_field('summary') + + if 'keywords' in msg: + self.keywords = _read_field('keywords').split(',') + + self.platforms = _read_list('platform') + self.classifiers = _read_list('classifier') + + # PEP 314 - these fields only exist in 1.1 + if metadata_version == '1.1': + self.requires = _read_list('requires') + self.provides = _read_list('provides') + self.obsoletes = _read_list('obsoletes') + else: + self.requires = None + self.provides = None + self.obsoletes = None + + def write_pkg_info(self, base_dir): + """Write the PKG-INFO file into the release tree. + """ + with open(os.path.join(base_dir, 'PKG-INFO'), 'w', + encoding='UTF-8') as pkg_info: + self.write_pkg_file(pkg_info) + + def write_pkg_file(self, file): + """Write the PKG-INFO format data to a file object. + """ + version = '1.0' + if (self.provides or self.requires or self.obsoletes or + self.classifiers or self.download_url): + version = '1.1' + + file.write('Metadata-Version: %s\n' % version) + file.write('Name: %s\n' % self.get_name()) + file.write('Version: %s\n' % self.get_version()) + file.write('Summary: %s\n' % self.get_description()) + file.write('Home-page: %s\n' % self.get_url()) + file.write('Author: %s\n' % self.get_contact()) + file.write('Author-email: %s\n' % self.get_contact_email()) + file.write('License: %s\n' % self.get_license()) + if self.download_url: + file.write('Download-URL: %s\n' % self.download_url) + + long_desc = rfc822_escape(self.get_long_description()) + file.write('Description: %s\n' % long_desc) + + keywords = ','.join(self.get_keywords()) + if keywords: + file.write('Keywords: %s\n' % keywords) + + self._write_list(file, 'Platform', self.get_platforms()) + self._write_list(file, 'Classifier', self.get_classifiers()) + + # PEP 314 + self._write_list(file, 'Requires', self.get_requires()) + self._write_list(file, 'Provides', self.get_provides()) + self._write_list(file, 'Obsoletes', self.get_obsoletes()) + + def _write_list(self, file, name, values): + for value in values: + file.write('%s: %s\n' % (name, value)) + + # -- Metadata query methods ---------------------------------------- + + def get_name(self): + return self.name or "UNKNOWN" + + def get_version(self): + return self.version or "0.0.0" + + def get_fullname(self): + return "%s-%s" % (self.get_name(), self.get_version()) + + def get_author(self): + return self.author or "UNKNOWN" + + def get_author_email(self): + return self.author_email or "UNKNOWN" + + def get_maintainer(self): + return self.maintainer or "UNKNOWN" + + def get_maintainer_email(self): + return self.maintainer_email or "UNKNOWN" + + def get_contact(self): + return self.maintainer or self.author or "UNKNOWN" + + def get_contact_email(self): + return self.maintainer_email or self.author_email or "UNKNOWN" + + def get_url(self): + return self.url or "UNKNOWN" + + def get_license(self): + return self.license or "UNKNOWN" + get_licence = get_license + + def get_description(self): + return self.description or "UNKNOWN" + + def get_long_description(self): + return self.long_description or "UNKNOWN" + + def get_keywords(self): + return self.keywords or [] + + def set_keywords(self, value): + self.keywords = _ensure_list(value, 'keywords') + + def get_platforms(self): + return self.platforms or ["UNKNOWN"] + + def set_platforms(self, value): + self.platforms = _ensure_list(value, 'platforms') + + def get_classifiers(self): + return self.classifiers or [] + + def set_classifiers(self, value): + self.classifiers = _ensure_list(value, 'classifiers') + + def get_download_url(self): + return self.download_url or "UNKNOWN" + + # PEP 314 + def get_requires(self): + return self.requires or [] + + def set_requires(self, value): + import distutils.versionpredicate + for v in value: + distutils.versionpredicate.VersionPredicate(v) + self.requires = list(value) + + def get_provides(self): + return self.provides or [] + + def set_provides(self, value): + value = [v.strip() for v in value] + for v in value: + import distutils.versionpredicate + distutils.versionpredicate.split_provision(v) + self.provides = value + + def get_obsoletes(self): + return self.obsoletes or [] + + def set_obsoletes(self, value): + import distutils.versionpredicate + for v in value: + distutils.versionpredicate.VersionPredicate(v) + self.obsoletes = list(value) + +def fix_help_options(options): + """Convert a 4-tuple 'help_options' list as found in various command + classes to the 3-tuple form required by FancyGetopt. + """ + new_options = [] + for help_tuple in options: + new_options.append(help_tuple[0:3]) + return new_options diff --git a/venv/Lib/site-packages/setuptools/_distutils/errors.py b/venv/Lib/site-packages/setuptools/_distutils/errors.py new file mode 100644 index 00000000..8b93059e --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/errors.py @@ -0,0 +1,97 @@ +"""distutils.errors + +Provides exceptions used by the Distutils modules. Note that Distutils +modules may raise standard exceptions; in particular, SystemExit is +usually raised for errors that are obviously the end-user's fault +(eg. bad command-line arguments). + +This module is safe to use in "from ... import *" mode; it only exports +symbols whose names start with "Distutils" and end with "Error".""" + +class DistutilsError (Exception): + """The root of all Distutils evil.""" + pass + +class DistutilsModuleError (DistutilsError): + """Unable to load an expected module, or to find an expected class + within some module (in particular, command modules and classes).""" + pass + +class DistutilsClassError (DistutilsError): + """Some command class (or possibly distribution class, if anyone + feels a need to subclass Distribution) is found not to be holding + up its end of the bargain, ie. implementing some part of the + "command "interface.""" + pass + +class DistutilsGetoptError (DistutilsError): + """The option table provided to 'fancy_getopt()' is bogus.""" + pass + +class DistutilsArgError (DistutilsError): + """Raised by fancy_getopt in response to getopt.error -- ie. an + error in the command line usage.""" + pass + +class DistutilsFileError (DistutilsError): + """Any problems in the filesystem: expected file not found, etc. + Typically this is for problems that we detect before OSError + could be raised.""" + pass + +class DistutilsOptionError (DistutilsError): + """Syntactic/semantic errors in command options, such as use of + mutually conflicting options, or inconsistent options, + badly-spelled values, etc. No distinction is made between option + values originating in the setup script, the command line, config + files, or what-have-you -- but if we *know* something originated in + the setup script, we'll raise DistutilsSetupError instead.""" + pass + +class DistutilsSetupError (DistutilsError): + """For errors that can be definitely blamed on the setup script, + such as invalid keyword arguments to 'setup()'.""" + pass + +class DistutilsPlatformError (DistutilsError): + """We don't know how to do something on the current platform (but + we do know how to do it on some platform) -- eg. trying to compile + C files on a platform not supported by a CCompiler subclass.""" + pass + +class DistutilsExecError (DistutilsError): + """Any problems executing an external program (such as the C + compiler, when compiling C files).""" + pass + +class DistutilsInternalError (DistutilsError): + """Internal inconsistencies or impossibilities (obviously, this + should never be seen if the code is working!).""" + pass + +class DistutilsTemplateError (DistutilsError): + """Syntax error in a file list template.""" + +class DistutilsByteCompileError(DistutilsError): + """Byte compile error.""" + +# Exception classes used by the CCompiler implementation classes +class CCompilerError (Exception): + """Some compile/link operation failed.""" + +class PreprocessError (CCompilerError): + """Failure to preprocess one or more C/C++ files.""" + +class CompileError (CCompilerError): + """Failure to compile one or more C/C++ source files.""" + +class LibError (CCompilerError): + """Failure to create a static library from one or more C/C++ object + files.""" + +class LinkError (CCompilerError): + """Failure to link one or more C/C++ object files into an executable + or shared library file.""" + +class UnknownFileError (CCompilerError): + """Attempt to process an unknown file type.""" diff --git a/venv/Lib/site-packages/setuptools/_distutils/extension.py b/venv/Lib/site-packages/setuptools/_distutils/extension.py new file mode 100644 index 00000000..c507da36 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/extension.py @@ -0,0 +1,240 @@ +"""distutils.extension + +Provides the Extension class, used to describe C/C++ extension +modules in setup scripts.""" + +import os +import warnings + +# This class is really only used by the "build_ext" command, so it might +# make sense to put it in distutils.command.build_ext. However, that +# module is already big enough, and I want to make this class a bit more +# complex to simplify some common cases ("foo" module in "foo.c") and do +# better error-checking ("foo.c" actually exists). +# +# Also, putting this in build_ext.py means every setup script would have to +# import that large-ish module (indirectly, through distutils.core) in +# order to do anything. + +class Extension: + """Just a collection of attributes that describes an extension + module and everything needed to build it (hopefully in a portable + way, but there are hooks that let you be as unportable as you need). + + Instance attributes: + name : string + the full name of the extension, including any packages -- ie. + *not* a filename or pathname, but Python dotted name + sources : [string] + list of source filenames, relative to the distribution root + (where the setup script lives), in Unix form (slash-separated) + for portability. Source files may be C, C++, SWIG (.i), + platform-specific resource files, or whatever else is recognized + by the "build_ext" command as source for a Python extension. + include_dirs : [string] + list of directories to search for C/C++ header files (in Unix + form for portability) + define_macros : [(name : string, value : string|None)] + list of macros to define; each macro is defined using a 2-tuple, + where 'value' is either the string to define it to or None to + define it without a particular value (equivalent of "#define + FOO" in source or -DFOO on Unix C compiler command line) + undef_macros : [string] + list of macros to undefine explicitly + library_dirs : [string] + list of directories to search for C/C++ libraries at link time + libraries : [string] + list of library names (not filenames or paths) to link against + runtime_library_dirs : [string] + list of directories to search for C/C++ libraries at run time + (for shared extensions, this is when the extension is loaded) + extra_objects : [string] + list of extra files to link with (eg. object files not implied + by 'sources', static library that must be explicitly specified, + binary resource files, etc.) + extra_compile_args : [string] + any extra platform- and compiler-specific information to use + when compiling the source files in 'sources'. For platforms and + compilers where "command line" makes sense, this is typically a + list of command-line arguments, but for other platforms it could + be anything. + extra_link_args : [string] + any extra platform- and compiler-specific information to use + when linking object files together to create the extension (or + to create a new static Python interpreter). Similar + interpretation as for 'extra_compile_args'. + export_symbols : [string] + list of symbols to be exported from a shared extension. Not + used on all platforms, and not generally necessary for Python + extensions, which typically export exactly one symbol: "init" + + extension_name. + swig_opts : [string] + any extra options to pass to SWIG if a source file has the .i + extension. + depends : [string] + list of files that the extension depends on + language : string + extension language (i.e. "c", "c++", "objc"). Will be detected + from the source extensions if not provided. + optional : boolean + specifies that a build failure in the extension should not abort the + build process, but simply not install the failing extension. + """ + + # When adding arguments to this constructor, be sure to update + # setup_keywords in core.py. + def __init__(self, name, sources, + include_dirs=None, + define_macros=None, + undef_macros=None, + library_dirs=None, + libraries=None, + runtime_library_dirs=None, + extra_objects=None, + extra_compile_args=None, + extra_link_args=None, + export_symbols=None, + swig_opts = None, + depends=None, + language=None, + optional=None, + **kw # To catch unknown keywords + ): + if not isinstance(name, str): + raise AssertionError("'name' must be a string") + if not (isinstance(sources, list) and + all(isinstance(v, str) for v in sources)): + raise AssertionError("'sources' must be a list of strings") + + self.name = name + self.sources = sources + self.include_dirs = include_dirs or [] + self.define_macros = define_macros or [] + self.undef_macros = undef_macros or [] + self.library_dirs = library_dirs or [] + self.libraries = libraries or [] + self.runtime_library_dirs = runtime_library_dirs or [] + self.extra_objects = extra_objects or [] + self.extra_compile_args = extra_compile_args or [] + self.extra_link_args = extra_link_args or [] + self.export_symbols = export_symbols or [] + self.swig_opts = swig_opts or [] + self.depends = depends or [] + self.language = language + self.optional = optional + + # If there are unknown keyword options, warn about them + if len(kw) > 0: + options = [repr(option) for option in kw] + options = ', '.join(sorted(options)) + msg = "Unknown Extension options: %s" % options + warnings.warn(msg) + + def __repr__(self): + return '<%s.%s(%r) at %#x>' % ( + self.__class__.__module__, + self.__class__.__qualname__, + self.name, + id(self)) + + +def read_setup_file(filename): + """Reads a Setup file and returns Extension instances.""" + from distutils.sysconfig import (parse_makefile, expand_makefile_vars, + _variable_rx) + + from distutils.text_file import TextFile + from distutils.util import split_quoted + + # First pass over the file to gather "VAR = VALUE" assignments. + vars = parse_makefile(filename) + + # Second pass to gobble up the real content: lines of the form + # ... [ ...] [ ...] [ ...] + file = TextFile(filename, + strip_comments=1, skip_blanks=1, join_lines=1, + lstrip_ws=1, rstrip_ws=1) + try: + extensions = [] + + while True: + line = file.readline() + if line is None: # eof + break + if _variable_rx.match(line): # VAR=VALUE, handled in first pass + continue + + if line[0] == line[-1] == "*": + file.warn("'%s' lines not handled yet" % line) + continue + + line = expand_makefile_vars(line, vars) + words = split_quoted(line) + + # NB. this parses a slightly different syntax than the old + # makesetup script: here, there must be exactly one extension per + # line, and it must be the first word of the line. I have no idea + # why the old syntax supported multiple extensions per line, as + # they all wind up being the same. + + module = words[0] + ext = Extension(module, []) + append_next_word = None + + for word in words[1:]: + if append_next_word is not None: + append_next_word.append(word) + append_next_word = None + continue + + suffix = os.path.splitext(word)[1] + switch = word[0:2] ; value = word[2:] + + if suffix in (".c", ".cc", ".cpp", ".cxx", ".c++", ".m", ".mm"): + # hmm, should we do something about C vs. C++ sources? + # or leave it up to the CCompiler implementation to + # worry about? + ext.sources.append(word) + elif switch == "-I": + ext.include_dirs.append(value) + elif switch == "-D": + equals = value.find("=") + if equals == -1: # bare "-DFOO" -- no value + ext.define_macros.append((value, None)) + else: # "-DFOO=blah" + ext.define_macros.append((value[0:equals], + value[equals+2:])) + elif switch == "-U": + ext.undef_macros.append(value) + elif switch == "-C": # only here 'cause makesetup has it! + ext.extra_compile_args.append(word) + elif switch == "-l": + ext.libraries.append(value) + elif switch == "-L": + ext.library_dirs.append(value) + elif switch == "-R": + ext.runtime_library_dirs.append(value) + elif word == "-rpath": + append_next_word = ext.runtime_library_dirs + elif word == "-Xlinker": + append_next_word = ext.extra_link_args + elif word == "-Xcompiler": + append_next_word = ext.extra_compile_args + elif switch == "-u": + ext.extra_link_args.append(word) + if not value: + append_next_word = ext.extra_link_args + elif suffix in (".a", ".so", ".sl", ".o", ".dylib"): + # NB. a really faithful emulation of makesetup would + # append a .o file to extra_objects only if it + # had a slash in it; otherwise, it would s/.o/.c/ + # and append it to sources. Hmmmm. + ext.extra_objects.append(word) + else: + file.warn("unrecognized argument '%s'" % word) + + extensions.append(ext) + finally: + file.close() + + return extensions diff --git a/venv/Lib/site-packages/setuptools/_distutils/fancy_getopt.py b/venv/Lib/site-packages/setuptools/_distutils/fancy_getopt.py new file mode 100644 index 00000000..7d170dd2 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/fancy_getopt.py @@ -0,0 +1,457 @@ +"""distutils.fancy_getopt + +Wrapper around the standard getopt module that provides the following +additional features: + * short and long options are tied together + * options have help strings, so fancy_getopt could potentially + create a complete usage summary + * options set attributes of a passed-in object +""" + +import sys, string, re +import getopt +from distutils.errors import * + +# Much like command_re in distutils.core, this is close to but not quite +# the same as a Python NAME -- except, in the spirit of most GNU +# utilities, we use '-' in place of '_'. (The spirit of LISP lives on!) +# The similarities to NAME are again not a coincidence... +longopt_pat = r'[a-zA-Z](?:[a-zA-Z0-9-]*)' +longopt_re = re.compile(r'^%s$' % longopt_pat) + +# For recognizing "negative alias" options, eg. "quiet=!verbose" +neg_alias_re = re.compile("^(%s)=!(%s)$" % (longopt_pat, longopt_pat)) + +# This is used to translate long options to legitimate Python identifiers +# (for use as attributes of some object). +longopt_xlate = str.maketrans('-', '_') + +class FancyGetopt: + """Wrapper around the standard 'getopt()' module that provides some + handy extra functionality: + * short and long options are tied together + * options have help strings, and help text can be assembled + from them + * options set attributes of a passed-in object + * boolean options can have "negative aliases" -- eg. if + --quiet is the "negative alias" of --verbose, then "--quiet" + on the command line sets 'verbose' to false + """ + + def __init__(self, option_table=None): + # The option table is (currently) a list of tuples. The + # tuples may have 3 or four values: + # (long_option, short_option, help_string [, repeatable]) + # if an option takes an argument, its long_option should have '=' + # appended; short_option should just be a single character, no ':' + # in any case. If a long_option doesn't have a corresponding + # short_option, short_option should be None. All option tuples + # must have long options. + self.option_table = option_table + + # 'option_index' maps long option names to entries in the option + # table (ie. those 3-tuples). + self.option_index = {} + if self.option_table: + self._build_index() + + # 'alias' records (duh) alias options; {'foo': 'bar'} means + # --foo is an alias for --bar + self.alias = {} + + # 'negative_alias' keeps track of options that are the boolean + # opposite of some other option + self.negative_alias = {} + + # These keep track of the information in the option table. We + # don't actually populate these structures until we're ready to + # parse the command-line, since the 'option_table' passed in here + # isn't necessarily the final word. + self.short_opts = [] + self.long_opts = [] + self.short2long = {} + self.attr_name = {} + self.takes_arg = {} + + # And 'option_order' is filled up in 'getopt()'; it records the + # original order of options (and their values) on the command-line, + # but expands short options, converts aliases, etc. + self.option_order = [] + + def _build_index(self): + self.option_index.clear() + for option in self.option_table: + self.option_index[option[0]] = option + + def set_option_table(self, option_table): + self.option_table = option_table + self._build_index() + + def add_option(self, long_option, short_option=None, help_string=None): + if long_option in self.option_index: + raise DistutilsGetoptError( + "option conflict: already an option '%s'" % long_option) + else: + option = (long_option, short_option, help_string) + self.option_table.append(option) + self.option_index[long_option] = option + + def has_option(self, long_option): + """Return true if the option table for this parser has an + option with long name 'long_option'.""" + return long_option in self.option_index + + def get_attr_name(self, long_option): + """Translate long option name 'long_option' to the form it + has as an attribute of some object: ie., translate hyphens + to underscores.""" + return long_option.translate(longopt_xlate) + + def _check_alias_dict(self, aliases, what): + assert isinstance(aliases, dict) + for (alias, opt) in aliases.items(): + if alias not in self.option_index: + raise DistutilsGetoptError(("invalid %s '%s': " + "option '%s' not defined") % (what, alias, alias)) + if opt not in self.option_index: + raise DistutilsGetoptError(("invalid %s '%s': " + "aliased option '%s' not defined") % (what, alias, opt)) + + def set_aliases(self, alias): + """Set the aliases for this option parser.""" + self._check_alias_dict(alias, "alias") + self.alias = alias + + def set_negative_aliases(self, negative_alias): + """Set the negative aliases for this option parser. + 'negative_alias' should be a dictionary mapping option names to + option names, both the key and value must already be defined + in the option table.""" + self._check_alias_dict(negative_alias, "negative alias") + self.negative_alias = negative_alias + + def _grok_option_table(self): + """Populate the various data structures that keep tabs on the + option table. Called by 'getopt()' before it can do anything + worthwhile. + """ + self.long_opts = [] + self.short_opts = [] + self.short2long.clear() + self.repeat = {} + + for option in self.option_table: + if len(option) == 3: + long, short, help = option + repeat = 0 + elif len(option) == 4: + long, short, help, repeat = option + else: + # the option table is part of the code, so simply + # assert that it is correct + raise ValueError("invalid option tuple: %r" % (option,)) + + # Type- and value-check the option names + if not isinstance(long, str) or len(long) < 2: + raise DistutilsGetoptError(("invalid long option '%s': " + "must be a string of length >= 2") % long) + + if (not ((short is None) or + (isinstance(short, str) and len(short) == 1))): + raise DistutilsGetoptError("invalid short option '%s': " + "must a single character or None" % short) + + self.repeat[long] = repeat + self.long_opts.append(long) + + if long[-1] == '=': # option takes an argument? + if short: short = short + ':' + long = long[0:-1] + self.takes_arg[long] = 1 + else: + # Is option is a "negative alias" for some other option (eg. + # "quiet" == "!verbose")? + alias_to = self.negative_alias.get(long) + if alias_to is not None: + if self.takes_arg[alias_to]: + raise DistutilsGetoptError( + "invalid negative alias '%s': " + "aliased option '%s' takes a value" + % (long, alias_to)) + + self.long_opts[-1] = long # XXX redundant?! + self.takes_arg[long] = 0 + + # If this is an alias option, make sure its "takes arg" flag is + # the same as the option it's aliased to. + alias_to = self.alias.get(long) + if alias_to is not None: + if self.takes_arg[long] != self.takes_arg[alias_to]: + raise DistutilsGetoptError( + "invalid alias '%s': inconsistent with " + "aliased option '%s' (one of them takes a value, " + "the other doesn't" + % (long, alias_to)) + + # Now enforce some bondage on the long option name, so we can + # later translate it to an attribute name on some object. Have + # to do this a bit late to make sure we've removed any trailing + # '='. + if not longopt_re.match(long): + raise DistutilsGetoptError( + "invalid long option name '%s' " + "(must be letters, numbers, hyphens only" % long) + + self.attr_name[long] = self.get_attr_name(long) + if short: + self.short_opts.append(short) + self.short2long[short[0]] = long + + def getopt(self, args=None, object=None): + """Parse command-line options in args. Store as attributes on object. + + If 'args' is None or not supplied, uses 'sys.argv[1:]'. If + 'object' is None or not supplied, creates a new OptionDummy + object, stores option values there, and returns a tuple (args, + object). If 'object' is supplied, it is modified in place and + 'getopt()' just returns 'args'; in both cases, the returned + 'args' is a modified copy of the passed-in 'args' list, which + is left untouched. + """ + if args is None: + args = sys.argv[1:] + if object is None: + object = OptionDummy() + created_object = True + else: + created_object = False + + self._grok_option_table() + + short_opts = ' '.join(self.short_opts) + try: + opts, args = getopt.getopt(args, short_opts, self.long_opts) + except getopt.error as msg: + raise DistutilsArgError(msg) + + for opt, val in opts: + if len(opt) == 2 and opt[0] == '-': # it's a short option + opt = self.short2long[opt[1]] + else: + assert len(opt) > 2 and opt[:2] == '--' + opt = opt[2:] + + alias = self.alias.get(opt) + if alias: + opt = alias + + if not self.takes_arg[opt]: # boolean option? + assert val == '', "boolean option can't have value" + alias = self.negative_alias.get(opt) + if alias: + opt = alias + val = 0 + else: + val = 1 + + attr = self.attr_name[opt] + # The only repeating option at the moment is 'verbose'. + # It has a negative option -q quiet, which should set verbose = 0. + if val and self.repeat.get(attr) is not None: + val = getattr(object, attr, 0) + 1 + setattr(object, attr, val) + self.option_order.append((opt, val)) + + # for opts + if created_object: + return args, object + else: + return args + + def get_option_order(self): + """Returns the list of (option, value) tuples processed by the + previous run of 'getopt()'. Raises RuntimeError if + 'getopt()' hasn't been called yet. + """ + if self.option_order is None: + raise RuntimeError("'getopt()' hasn't been called yet") + else: + return self.option_order + + def generate_help(self, header=None): + """Generate help text (a list of strings, one per suggested line of + output) from the option table for this FancyGetopt object. + """ + # Blithely assume the option table is good: probably wouldn't call + # 'generate_help()' unless you've already called 'getopt()'. + + # First pass: determine maximum length of long option names + max_opt = 0 + for option in self.option_table: + long = option[0] + short = option[1] + l = len(long) + if long[-1] == '=': + l = l - 1 + if short is not None: + l = l + 5 # " (-x)" where short == 'x' + if l > max_opt: + max_opt = l + + opt_width = max_opt + 2 + 2 + 2 # room for indent + dashes + gutter + + # Typical help block looks like this: + # --foo controls foonabulation + # Help block for longest option looks like this: + # --flimflam set the flim-flam level + # and with wrapped text: + # --flimflam set the flim-flam level (must be between + # 0 and 100, except on Tuesdays) + # Options with short names will have the short name shown (but + # it doesn't contribute to max_opt): + # --foo (-f) controls foonabulation + # If adding the short option would make the left column too wide, + # we push the explanation off to the next line + # --flimflam (-l) + # set the flim-flam level + # Important parameters: + # - 2 spaces before option block start lines + # - 2 dashes for each long option name + # - min. 2 spaces between option and explanation (gutter) + # - 5 characters (incl. space) for short option name + + # Now generate lines of help text. (If 80 columns were good enough + # for Jesus, then 78 columns are good enough for me!) + line_width = 78 + text_width = line_width - opt_width + big_indent = ' ' * opt_width + if header: + lines = [header] + else: + lines = ['Option summary:'] + + for option in self.option_table: + long, short, help = option[:3] + text = wrap_text(help, text_width) + if long[-1] == '=': + long = long[0:-1] + + # Case 1: no short option at all (makes life easy) + if short is None: + if text: + lines.append(" --%-*s %s" % (max_opt, long, text[0])) + else: + lines.append(" --%-*s " % (max_opt, long)) + + # Case 2: we have a short option, so we have to include it + # just after the long option + else: + opt_names = "%s (-%s)" % (long, short) + if text: + lines.append(" --%-*s %s" % + (max_opt, opt_names, text[0])) + else: + lines.append(" --%-*s" % opt_names) + + for l in text[1:]: + lines.append(big_indent + l) + return lines + + def print_help(self, header=None, file=None): + if file is None: + file = sys.stdout + for line in self.generate_help(header): + file.write(line + "\n") + + +def fancy_getopt(options, negative_opt, object, args): + parser = FancyGetopt(options) + parser.set_negative_aliases(negative_opt) + return parser.getopt(args, object) + + +WS_TRANS = {ord(_wschar) : ' ' for _wschar in string.whitespace} + +def wrap_text(text, width): + """wrap_text(text : string, width : int) -> [string] + + Split 'text' into multiple lines of no more than 'width' characters + each, and return the list of strings that results. + """ + if text is None: + return [] + if len(text) <= width: + return [text] + + text = text.expandtabs() + text = text.translate(WS_TRANS) + chunks = re.split(r'( +|-+)', text) + chunks = [ch for ch in chunks if ch] # ' - ' results in empty strings + lines = [] + + while chunks: + cur_line = [] # list of chunks (to-be-joined) + cur_len = 0 # length of current line + + while chunks: + l = len(chunks[0]) + if cur_len + l <= width: # can squeeze (at least) this chunk in + cur_line.append(chunks[0]) + del chunks[0] + cur_len = cur_len + l + else: # this line is full + # drop last chunk if all space + if cur_line and cur_line[-1][0] == ' ': + del cur_line[-1] + break + + if chunks: # any chunks left to process? + # if the current line is still empty, then we had a single + # chunk that's too big too fit on a line -- so we break + # down and break it up at the line width + if cur_len == 0: + cur_line.append(chunks[0][0:width]) + chunks[0] = chunks[0][width:] + + # all-whitespace chunks at the end of a line can be discarded + # (and we know from the re.split above that if a chunk has + # *any* whitespace, it is *all* whitespace) + if chunks[0][0] == ' ': + del chunks[0] + + # and store this line in the list-of-all-lines -- as a single + # string, of course! + lines.append(''.join(cur_line)) + + return lines + + +def translate_longopt(opt): + """Convert a long option name to a valid Python identifier by + changing "-" to "_". + """ + return opt.translate(longopt_xlate) + + +class OptionDummy: + """Dummy class just used as a place to hold command-line option + values as instance attributes.""" + + def __init__(self, options=[]): + """Create a new OptionDummy instance. The attributes listed in + 'options' will be initialized to None.""" + for opt in options: + setattr(self, opt, None) + + +if __name__ == "__main__": + text = """\ +Tra-la-la, supercalifragilisticexpialidocious. +How *do* you spell that odd word, anyways? +(Someone ask Mary -- she'll know [or she'll +say, "How should I know?"].)""" + + for w in (10, 20, 30, 40): + print("width: %d" % w) + print("\n".join(wrap_text(text, w))) + print() diff --git a/venv/Lib/site-packages/setuptools/_distutils/file_util.py b/venv/Lib/site-packages/setuptools/_distutils/file_util.py new file mode 100644 index 00000000..b3fee35a --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/file_util.py @@ -0,0 +1,238 @@ +"""distutils.file_util + +Utility functions for operating on single files. +""" + +import os +from distutils.errors import DistutilsFileError +from distutils import log + +# for generating verbose output in 'copy_file()' +_copy_action = { None: 'copying', + 'hard': 'hard linking', + 'sym': 'symbolically linking' } + + +def _copy_file_contents(src, dst, buffer_size=16*1024): + """Copy the file 'src' to 'dst'; both must be filenames. Any error + opening either file, reading from 'src', or writing to 'dst', raises + DistutilsFileError. Data is read/written in chunks of 'buffer_size' + bytes (default 16k). No attempt is made to handle anything apart from + regular files. + """ + # Stolen from shutil module in the standard library, but with + # custom error-handling added. + fsrc = None + fdst = None + try: + try: + fsrc = open(src, 'rb') + except OSError as e: + raise DistutilsFileError("could not open '%s': %s" % (src, e.strerror)) + + if os.path.exists(dst): + try: + os.unlink(dst) + except OSError as e: + raise DistutilsFileError( + "could not delete '%s': %s" % (dst, e.strerror)) + + try: + fdst = open(dst, 'wb') + except OSError as e: + raise DistutilsFileError( + "could not create '%s': %s" % (dst, e.strerror)) + + while True: + try: + buf = fsrc.read(buffer_size) + except OSError as e: + raise DistutilsFileError( + "could not read from '%s': %s" % (src, e.strerror)) + + if not buf: + break + + try: + fdst.write(buf) + except OSError as e: + raise DistutilsFileError( + "could not write to '%s': %s" % (dst, e.strerror)) + finally: + if fdst: + fdst.close() + if fsrc: + fsrc.close() + +def copy_file(src, dst, preserve_mode=1, preserve_times=1, update=0, + link=None, verbose=1, dry_run=0): + """Copy a file 'src' to 'dst'. If 'dst' is a directory, then 'src' is + copied there with the same name; otherwise, it must be a filename. (If + the file exists, it will be ruthlessly clobbered.) If 'preserve_mode' + is true (the default), the file's mode (type and permission bits, or + whatever is analogous on the current platform) is copied. If + 'preserve_times' is true (the default), the last-modified and + last-access times are copied as well. If 'update' is true, 'src' will + only be copied if 'dst' does not exist, or if 'dst' does exist but is + older than 'src'. + + 'link' allows you to make hard links (os.link) or symbolic links + (os.symlink) instead of copying: set it to "hard" or "sym"; if it is + None (the default), files are copied. Don't set 'link' on systems that + don't support it: 'copy_file()' doesn't check if hard or symbolic + linking is available. If hardlink fails, falls back to + _copy_file_contents(). + + Under Mac OS, uses the native file copy function in macostools; on + other systems, uses '_copy_file_contents()' to copy file contents. + + Return a tuple (dest_name, copied): 'dest_name' is the actual name of + the output file, and 'copied' is true if the file was copied (or would + have been copied, if 'dry_run' true). + """ + # XXX if the destination file already exists, we clobber it if + # copying, but blow up if linking. Hmmm. And I don't know what + # macostools.copyfile() does. Should definitely be consistent, and + # should probably blow up if destination exists and we would be + # changing it (ie. it's not already a hard/soft link to src OR + # (not update) and (src newer than dst). + + from distutils.dep_util import newer + from stat import ST_ATIME, ST_MTIME, ST_MODE, S_IMODE + + if not os.path.isfile(src): + raise DistutilsFileError( + "can't copy '%s': doesn't exist or not a regular file" % src) + + if os.path.isdir(dst): + dir = dst + dst = os.path.join(dst, os.path.basename(src)) + else: + dir = os.path.dirname(dst) + + if update and not newer(src, dst): + if verbose >= 1: + log.debug("not copying %s (output up-to-date)", src) + return (dst, 0) + + try: + action = _copy_action[link] + except KeyError: + raise ValueError("invalid value '%s' for 'link' argument" % link) + + if verbose >= 1: + if os.path.basename(dst) == os.path.basename(src): + log.info("%s %s -> %s", action, src, dir) + else: + log.info("%s %s -> %s", action, src, dst) + + if dry_run: + return (dst, 1) + + # If linking (hard or symbolic), use the appropriate system call + # (Unix only, of course, but that's the caller's responsibility) + elif link == 'hard': + if not (os.path.exists(dst) and os.path.samefile(src, dst)): + try: + os.link(src, dst) + return (dst, 1) + except OSError: + # If hard linking fails, fall back on copying file + # (some special filesystems don't support hard linking + # even under Unix, see issue #8876). + pass + elif link == 'sym': + if not (os.path.exists(dst) and os.path.samefile(src, dst)): + os.symlink(src, dst) + return (dst, 1) + + # Otherwise (non-Mac, not linking), copy the file contents and + # (optionally) copy the times and mode. + _copy_file_contents(src, dst) + if preserve_mode or preserve_times: + st = os.stat(src) + + # According to David Ascher , utime() should be done + # before chmod() (at least under NT). + if preserve_times: + os.utime(dst, (st[ST_ATIME], st[ST_MTIME])) + if preserve_mode: + os.chmod(dst, S_IMODE(st[ST_MODE])) + + return (dst, 1) + + +# XXX I suspect this is Unix-specific -- need porting help! +def move_file (src, dst, + verbose=1, + dry_run=0): + + """Move a file 'src' to 'dst'. If 'dst' is a directory, the file will + be moved into it with the same name; otherwise, 'src' is just renamed + to 'dst'. Return the new full name of the file. + + Handles cross-device moves on Unix using 'copy_file()'. What about + other systems??? + """ + from os.path import exists, isfile, isdir, basename, dirname + import errno + + if verbose >= 1: + log.info("moving %s -> %s", src, dst) + + if dry_run: + return dst + + if not isfile(src): + raise DistutilsFileError("can't move '%s': not a regular file" % src) + + if isdir(dst): + dst = os.path.join(dst, basename(src)) + elif exists(dst): + raise DistutilsFileError( + "can't move '%s': destination '%s' already exists" % + (src, dst)) + + if not isdir(dirname(dst)): + raise DistutilsFileError( + "can't move '%s': destination '%s' not a valid path" % + (src, dst)) + + copy_it = False + try: + os.rename(src, dst) + except OSError as e: + (num, msg) = e.args + if num == errno.EXDEV: + copy_it = True + else: + raise DistutilsFileError( + "couldn't move '%s' to '%s': %s" % (src, dst, msg)) + + if copy_it: + copy_file(src, dst, verbose=verbose) + try: + os.unlink(src) + except OSError as e: + (num, msg) = e.args + try: + os.unlink(dst) + except OSError: + pass + raise DistutilsFileError( + "couldn't move '%s' to '%s' by copy/delete: " + "delete '%s' failed: %s" + % (src, dst, src, msg)) + return dst + + +def write_file (filename, contents): + """Create a file with the specified name and write 'contents' (a + sequence of strings without line terminators) to it. + """ + f = open(filename, "w") + try: + for line in contents: + f.write(line + "\n") + finally: + f.close() diff --git a/venv/Lib/site-packages/setuptools/_distutils/filelist.py b/venv/Lib/site-packages/setuptools/_distutils/filelist.py new file mode 100644 index 00000000..c92d5fdb --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/filelist.py @@ -0,0 +1,327 @@ +"""distutils.filelist + +Provides the FileList class, used for poking about the filesystem +and building lists of files. +""" + +import os, re +import fnmatch +import functools +from distutils.util import convert_path +from distutils.errors import DistutilsTemplateError, DistutilsInternalError +from distutils import log + +class FileList: + """A list of files built by on exploring the filesystem and filtered by + applying various patterns to what we find there. + + Instance attributes: + dir + directory from which files will be taken -- only used if + 'allfiles' not supplied to constructor + files + list of filenames currently being built/filtered/manipulated + allfiles + complete list of files under consideration (ie. without any + filtering applied) + """ + + def __init__(self, warn=None, debug_print=None): + # ignore argument to FileList, but keep them for backwards + # compatibility + self.allfiles = None + self.files = [] + + def set_allfiles(self, allfiles): + self.allfiles = allfiles + + def findall(self, dir=os.curdir): + self.allfiles = findall(dir) + + def debug_print(self, msg): + """Print 'msg' to stdout if the global DEBUG (taken from the + DISTUTILS_DEBUG environment variable) flag is true. + """ + from distutils.debug import DEBUG + if DEBUG: + print(msg) + + # -- List-like methods --------------------------------------------- + + def append(self, item): + self.files.append(item) + + def extend(self, items): + self.files.extend(items) + + def sort(self): + # Not a strict lexical sort! + sortable_files = sorted(map(os.path.split, self.files)) + self.files = [] + for sort_tuple in sortable_files: + self.files.append(os.path.join(*sort_tuple)) + + + # -- Other miscellaneous utility methods --------------------------- + + def remove_duplicates(self): + # Assumes list has been sorted! + for i in range(len(self.files) - 1, 0, -1): + if self.files[i] == self.files[i - 1]: + del self.files[i] + + + # -- "File template" methods --------------------------------------- + + def _parse_template_line(self, line): + words = line.split() + action = words[0] + + patterns = dir = dir_pattern = None + + if action in ('include', 'exclude', + 'global-include', 'global-exclude'): + if len(words) < 2: + raise DistutilsTemplateError( + "'%s' expects ..." % action) + patterns = [convert_path(w) for w in words[1:]] + elif action in ('recursive-include', 'recursive-exclude'): + if len(words) < 3: + raise DistutilsTemplateError( + "'%s' expects ..." % action) + dir = convert_path(words[1]) + patterns = [convert_path(w) for w in words[2:]] + elif action in ('graft', 'prune'): + if len(words) != 2: + raise DistutilsTemplateError( + "'%s' expects a single " % action) + dir_pattern = convert_path(words[1]) + else: + raise DistutilsTemplateError("unknown action '%s'" % action) + + return (action, patterns, dir, dir_pattern) + + def process_template_line(self, line): + # Parse the line: split it up, make sure the right number of words + # is there, and return the relevant words. 'action' is always + # defined: it's the first word of the line. Which of the other + # three are defined depends on the action; it'll be either + # patterns, (dir and patterns), or (dir_pattern). + (action, patterns, dir, dir_pattern) = self._parse_template_line(line) + + # OK, now we know that the action is valid and we have the + # right number of words on the line for that action -- so we + # can proceed with minimal error-checking. + if action == 'include': + self.debug_print("include " + ' '.join(patterns)) + for pattern in patterns: + if not self.include_pattern(pattern, anchor=1): + log.warn("warning: no files found matching '%s'", + pattern) + + elif action == 'exclude': + self.debug_print("exclude " + ' '.join(patterns)) + for pattern in patterns: + if not self.exclude_pattern(pattern, anchor=1): + log.warn(("warning: no previously-included files " + "found matching '%s'"), pattern) + + elif action == 'global-include': + self.debug_print("global-include " + ' '.join(patterns)) + for pattern in patterns: + if not self.include_pattern(pattern, anchor=0): + log.warn(("warning: no files found matching '%s' " + "anywhere in distribution"), pattern) + + elif action == 'global-exclude': + self.debug_print("global-exclude " + ' '.join(patterns)) + for pattern in patterns: + if not self.exclude_pattern(pattern, anchor=0): + log.warn(("warning: no previously-included files matching " + "'%s' found anywhere in distribution"), + pattern) + + elif action == 'recursive-include': + self.debug_print("recursive-include %s %s" % + (dir, ' '.join(patterns))) + for pattern in patterns: + if not self.include_pattern(pattern, prefix=dir): + log.warn(("warning: no files found matching '%s' " + "under directory '%s'"), + pattern, dir) + + elif action == 'recursive-exclude': + self.debug_print("recursive-exclude %s %s" % + (dir, ' '.join(patterns))) + for pattern in patterns: + if not self.exclude_pattern(pattern, prefix=dir): + log.warn(("warning: no previously-included files matching " + "'%s' found under directory '%s'"), + pattern, dir) + + elif action == 'graft': + self.debug_print("graft " + dir_pattern) + if not self.include_pattern(None, prefix=dir_pattern): + log.warn("warning: no directories found matching '%s'", + dir_pattern) + + elif action == 'prune': + self.debug_print("prune " + dir_pattern) + if not self.exclude_pattern(None, prefix=dir_pattern): + log.warn(("no previously-included directories found " + "matching '%s'"), dir_pattern) + else: + raise DistutilsInternalError( + "this cannot happen: invalid action '%s'" % action) + + + # -- Filtering/selection methods ----------------------------------- + + def include_pattern(self, pattern, anchor=1, prefix=None, is_regex=0): + """Select strings (presumably filenames) from 'self.files' that + match 'pattern', a Unix-style wildcard (glob) pattern. Patterns + are not quite the same as implemented by the 'fnmatch' module: '*' + and '?' match non-special characters, where "special" is platform- + dependent: slash on Unix; colon, slash, and backslash on + DOS/Windows; and colon on Mac OS. + + If 'anchor' is true (the default), then the pattern match is more + stringent: "*.py" will match "foo.py" but not "foo/bar.py". If + 'anchor' is false, both of these will match. + + If 'prefix' is supplied, then only filenames starting with 'prefix' + (itself a pattern) and ending with 'pattern', with anything in between + them, will match. 'anchor' is ignored in this case. + + If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and + 'pattern' is assumed to be either a string containing a regex or a + regex object -- no translation is done, the regex is just compiled + and used as-is. + + Selected strings will be added to self.files. + + Return True if files are found, False otherwise. + """ + # XXX docstring lying about what the special chars are? + files_found = False + pattern_re = translate_pattern(pattern, anchor, prefix, is_regex) + self.debug_print("include_pattern: applying regex r'%s'" % + pattern_re.pattern) + + # delayed loading of allfiles list + if self.allfiles is None: + self.findall() + + for name in self.allfiles: + if pattern_re.search(name): + self.debug_print(" adding " + name) + self.files.append(name) + files_found = True + return files_found + + + def exclude_pattern (self, pattern, + anchor=1, prefix=None, is_regex=0): + """Remove strings (presumably filenames) from 'files' that match + 'pattern'. Other parameters are the same as for + 'include_pattern()', above. + The list 'self.files' is modified in place. + Return True if files are found, False otherwise. + """ + files_found = False + pattern_re = translate_pattern(pattern, anchor, prefix, is_regex) + self.debug_print("exclude_pattern: applying regex r'%s'" % + pattern_re.pattern) + for i in range(len(self.files)-1, -1, -1): + if pattern_re.search(self.files[i]): + self.debug_print(" removing " + self.files[i]) + del self.files[i] + files_found = True + return files_found + + +# ---------------------------------------------------------------------- +# Utility functions + +def _find_all_simple(path): + """ + Find all files under 'path' + """ + results = ( + os.path.join(base, file) + for base, dirs, files in os.walk(path, followlinks=True) + for file in files + ) + return filter(os.path.isfile, results) + + +def findall(dir=os.curdir): + """ + Find all files under 'dir' and return the list of full filenames. + Unless dir is '.', return full filenames with dir prepended. + """ + files = _find_all_simple(dir) + if dir == os.curdir: + make_rel = functools.partial(os.path.relpath, start=dir) + files = map(make_rel, files) + return list(files) + + +def glob_to_re(pattern): + """Translate a shell-like glob pattern to a regular expression; return + a string containing the regex. Differs from 'fnmatch.translate()' in + that '*' does not match "special characters" (which are + platform-specific). + """ + pattern_re = fnmatch.translate(pattern) + + # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which + # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix, + # and by extension they shouldn't match such "special characters" under + # any OS. So change all non-escaped dots in the RE to match any + # character except the special characters (currently: just os.sep). + sep = os.sep + if os.sep == '\\': + # we're using a regex to manipulate a regex, so we need + # to escape the backslash twice + sep = r'\\\\' + escaped = r'\1[^%s]' % sep + pattern_re = re.sub(r'((?= self.threshold: + if args: + msg = msg % args + if level in (WARN, ERROR, FATAL): + stream = sys.stderr + else: + stream = sys.stdout + try: + stream.write('%s\n' % msg) + except UnicodeEncodeError: + # emulate backslashreplace error handler + encoding = stream.encoding + msg = msg.encode(encoding, "backslashreplace").decode(encoding) + stream.write('%s\n' % msg) + stream.flush() + + def log(self, level, msg, *args): + self._log(level, msg, args) + + def debug(self, msg, *args): + self._log(DEBUG, msg, args) + + def info(self, msg, *args): + self._log(INFO, msg, args) + + def warn(self, msg, *args): + self._log(WARN, msg, args) + + def error(self, msg, *args): + self._log(ERROR, msg, args) + + def fatal(self, msg, *args): + self._log(FATAL, msg, args) + +_global_log = Log() +log = _global_log.log +debug = _global_log.debug +info = _global_log.info +warn = _global_log.warn +error = _global_log.error +fatal = _global_log.fatal + +def set_threshold(level): + # return the old threshold for use from tests + old = _global_log.threshold + _global_log.threshold = level + return old + +def set_verbosity(v): + if v <= 0: + set_threshold(WARN) + elif v == 1: + set_threshold(INFO) + elif v >= 2: + set_threshold(DEBUG) diff --git a/venv/Lib/site-packages/setuptools/_distutils/msvc9compiler.py b/venv/Lib/site-packages/setuptools/_distutils/msvc9compiler.py new file mode 100644 index 00000000..a1b3b02f --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/msvc9compiler.py @@ -0,0 +1,788 @@ +"""distutils.msvc9compiler + +Contains MSVCCompiler, an implementation of the abstract CCompiler class +for the Microsoft Visual Studio 2008. + +The module is compatible with VS 2005 and VS 2008. You can find legacy support +for older versions of VS in distutils.msvccompiler. +""" + +# Written by Perry Stoll +# hacked by Robin Becker and Thomas Heller to do a better job of +# finding DevStudio (through the registry) +# ported to VS2005 and VS 2008 by Christian Heimes + +import os +import subprocess +import sys +import re + +from distutils.errors import DistutilsExecError, DistutilsPlatformError, \ + CompileError, LibError, LinkError +from distutils.ccompiler import CCompiler, gen_lib_options +from distutils import log +from distutils.util import get_platform + +import winreg + +RegOpenKeyEx = winreg.OpenKeyEx +RegEnumKey = winreg.EnumKey +RegEnumValue = winreg.EnumValue +RegError = winreg.error + +HKEYS = (winreg.HKEY_USERS, + winreg.HKEY_CURRENT_USER, + winreg.HKEY_LOCAL_MACHINE, + winreg.HKEY_CLASSES_ROOT) + +NATIVE_WIN64 = (sys.platform == 'win32' and sys.maxsize > 2**32) +if NATIVE_WIN64: + # Visual C++ is a 32-bit application, so we need to look in + # the corresponding registry branch, if we're running a + # 64-bit Python on Win64 + VS_BASE = r"Software\Wow6432Node\Microsoft\VisualStudio\%0.1f" + WINSDK_BASE = r"Software\Wow6432Node\Microsoft\Microsoft SDKs\Windows" + NET_BASE = r"Software\Wow6432Node\Microsoft\.NETFramework" +else: + VS_BASE = r"Software\Microsoft\VisualStudio\%0.1f" + WINSDK_BASE = r"Software\Microsoft\Microsoft SDKs\Windows" + NET_BASE = r"Software\Microsoft\.NETFramework" + +# A map keyed by get_platform() return values to values accepted by +# 'vcvarsall.bat'. Note a cross-compile may combine these (eg, 'x86_amd64' is +# the param to cross-compile on x86 targeting amd64.) +PLAT_TO_VCVARS = { + 'win32' : 'x86', + 'win-amd64' : 'amd64', +} + +class Reg: + """Helper class to read values from the registry + """ + + def get_value(cls, path, key): + for base in HKEYS: + d = cls.read_values(base, path) + if d and key in d: + return d[key] + raise KeyError(key) + get_value = classmethod(get_value) + + def read_keys(cls, base, key): + """Return list of registry keys.""" + try: + handle = RegOpenKeyEx(base, key) + except RegError: + return None + L = [] + i = 0 + while True: + try: + k = RegEnumKey(handle, i) + except RegError: + break + L.append(k) + i += 1 + return L + read_keys = classmethod(read_keys) + + def read_values(cls, base, key): + """Return dict of registry keys and values. + + All names are converted to lowercase. + """ + try: + handle = RegOpenKeyEx(base, key) + except RegError: + return None + d = {} + i = 0 + while True: + try: + name, value, type = RegEnumValue(handle, i) + except RegError: + break + name = name.lower() + d[cls.convert_mbcs(name)] = cls.convert_mbcs(value) + i += 1 + return d + read_values = classmethod(read_values) + + def convert_mbcs(s): + dec = getattr(s, "decode", None) + if dec is not None: + try: + s = dec("mbcs") + except UnicodeError: + pass + return s + convert_mbcs = staticmethod(convert_mbcs) + +class MacroExpander: + + def __init__(self, version): + self.macros = {} + self.vsbase = VS_BASE % version + self.load_macros(version) + + def set_macro(self, macro, path, key): + self.macros["$(%s)" % macro] = Reg.get_value(path, key) + + def load_macros(self, version): + self.set_macro("VCInstallDir", self.vsbase + r"\Setup\VC", "productdir") + self.set_macro("VSInstallDir", self.vsbase + r"\Setup\VS", "productdir") + self.set_macro("FrameworkDir", NET_BASE, "installroot") + try: + if version >= 8.0: + self.set_macro("FrameworkSDKDir", NET_BASE, + "sdkinstallrootv2.0") + else: + raise KeyError("sdkinstallrootv2.0") + except KeyError: + raise DistutilsPlatformError( + """Python was built with Visual Studio 2008; +extensions must be built with a compiler than can generate compatible binaries. +Visual Studio 2008 was not found on this system. If you have Cygwin installed, +you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""") + + if version >= 9.0: + self.set_macro("FrameworkVersion", self.vsbase, "clr version") + self.set_macro("WindowsSdkDir", WINSDK_BASE, "currentinstallfolder") + else: + p = r"Software\Microsoft\NET Framework Setup\Product" + for base in HKEYS: + try: + h = RegOpenKeyEx(base, p) + except RegError: + continue + key = RegEnumKey(h, 0) + d = Reg.get_value(base, r"%s\%s" % (p, key)) + self.macros["$(FrameworkVersion)"] = d["version"] + + def sub(self, s): + for k, v in self.macros.items(): + s = s.replace(k, v) + return s + +def get_build_version(): + """Return the version of MSVC that was used to build Python. + + For Python 2.3 and up, the version number is included in + sys.version. For earlier versions, assume the compiler is MSVC 6. + """ + prefix = "MSC v." + i = sys.version.find(prefix) + if i == -1: + return 6 + i = i + len(prefix) + s, rest = sys.version[i:].split(" ", 1) + majorVersion = int(s[:-2]) - 6 + if majorVersion >= 13: + # v13 was skipped and should be v14 + majorVersion += 1 + minorVersion = int(s[2:3]) / 10.0 + # I don't think paths are affected by minor version in version 6 + if majorVersion == 6: + minorVersion = 0 + if majorVersion >= 6: + return majorVersion + minorVersion + # else we don't know what version of the compiler this is + return None + +def normalize_and_reduce_paths(paths): + """Return a list of normalized paths with duplicates removed. + + The current order of paths is maintained. + """ + # Paths are normalized so things like: /a and /a/ aren't both preserved. + reduced_paths = [] + for p in paths: + np = os.path.normpath(p) + # XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set. + if np not in reduced_paths: + reduced_paths.append(np) + return reduced_paths + +def removeDuplicates(variable): + """Remove duplicate values of an environment variable. + """ + oldList = variable.split(os.pathsep) + newList = [] + for i in oldList: + if i not in newList: + newList.append(i) + newVariable = os.pathsep.join(newList) + return newVariable + +def find_vcvarsall(version): + """Find the vcvarsall.bat file + + At first it tries to find the productdir of VS 2008 in the registry. If + that fails it falls back to the VS90COMNTOOLS env var. + """ + vsbase = VS_BASE % version + try: + productdir = Reg.get_value(r"%s\Setup\VC" % vsbase, + "productdir") + except KeyError: + log.debug("Unable to find productdir in registry") + productdir = None + + if not productdir or not os.path.isdir(productdir): + toolskey = "VS%0.f0COMNTOOLS" % version + toolsdir = os.environ.get(toolskey, None) + + if toolsdir and os.path.isdir(toolsdir): + productdir = os.path.join(toolsdir, os.pardir, os.pardir, "VC") + productdir = os.path.abspath(productdir) + if not os.path.isdir(productdir): + log.debug("%s is not a valid directory" % productdir) + return None + else: + log.debug("Env var %s is not set or invalid" % toolskey) + if not productdir: + log.debug("No productdir found") + return None + vcvarsall = os.path.join(productdir, "vcvarsall.bat") + if os.path.isfile(vcvarsall): + return vcvarsall + log.debug("Unable to find vcvarsall.bat") + return None + +def query_vcvarsall(version, arch="x86"): + """Launch vcvarsall.bat and read the settings from its environment + """ + vcvarsall = find_vcvarsall(version) + interesting = {"include", "lib", "libpath", "path"} + result = {} + + if vcvarsall is None: + raise DistutilsPlatformError("Unable to find vcvarsall.bat") + log.debug("Calling 'vcvarsall.bat %s' (version=%s)", arch, version) + popen = subprocess.Popen('"%s" %s & set' % (vcvarsall, arch), + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + try: + stdout, stderr = popen.communicate() + if popen.wait() != 0: + raise DistutilsPlatformError(stderr.decode("mbcs")) + + stdout = stdout.decode("mbcs") + for line in stdout.split("\n"): + line = Reg.convert_mbcs(line) + if '=' not in line: + continue + line = line.strip() + key, value = line.split('=', 1) + key = key.lower() + if key in interesting: + if value.endswith(os.pathsep): + value = value[:-1] + result[key] = removeDuplicates(value) + + finally: + popen.stdout.close() + popen.stderr.close() + + if len(result) != len(interesting): + raise ValueError(str(list(result.keys()))) + + return result + +# More globals +VERSION = get_build_version() +if VERSION < 8.0: + raise DistutilsPlatformError("VC %0.1f is not supported by this module" % VERSION) +# MACROS = MacroExpander(VERSION) + +class MSVCCompiler(CCompiler) : + """Concrete class that implements an interface to Microsoft Visual C++, + as defined by the CCompiler abstract class.""" + + compiler_type = 'msvc' + + # Just set this so CCompiler's constructor doesn't barf. We currently + # don't use the 'set_executables()' bureaucracy provided by CCompiler, + # as it really isn't necessary for this sort of single-compiler class. + # Would be nice to have a consistent interface with UnixCCompiler, + # though, so it's worth thinking about. + executables = {} + + # Private class data (need to distinguish C from C++ source for compiler) + _c_extensions = ['.c'] + _cpp_extensions = ['.cc', '.cpp', '.cxx'] + _rc_extensions = ['.rc'] + _mc_extensions = ['.mc'] + + # Needed for the filename generation methods provided by the + # base class, CCompiler. + src_extensions = (_c_extensions + _cpp_extensions + + _rc_extensions + _mc_extensions) + res_extension = '.res' + obj_extension = '.obj' + static_lib_extension = '.lib' + shared_lib_extension = '.dll' + static_lib_format = shared_lib_format = '%s%s' + exe_extension = '.exe' + + def __init__(self, verbose=0, dry_run=0, force=0): + CCompiler.__init__ (self, verbose, dry_run, force) + self.__version = VERSION + self.__root = r"Software\Microsoft\VisualStudio" + # self.__macros = MACROS + self.__paths = [] + # target platform (.plat_name is consistent with 'bdist') + self.plat_name = None + self.__arch = None # deprecated name + self.initialized = False + + def initialize(self, plat_name=None): + # multi-init means we would need to check platform same each time... + assert not self.initialized, "don't init multiple times" + if plat_name is None: + plat_name = get_platform() + # sanity check for platforms to prevent obscure errors later. + ok_plats = 'win32', 'win-amd64' + if plat_name not in ok_plats: + raise DistutilsPlatformError("--plat-name must be one of %s" % + (ok_plats,)) + + if "DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and self.find_exe("cl.exe"): + # Assume that the SDK set up everything alright; don't try to be + # smarter + self.cc = "cl.exe" + self.linker = "link.exe" + self.lib = "lib.exe" + self.rc = "rc.exe" + self.mc = "mc.exe" + else: + # On x86, 'vcvars32.bat amd64' creates an env that doesn't work; + # to cross compile, you use 'x86_amd64'. + # On AMD64, 'vcvars32.bat amd64' is a native build env; to cross + # compile use 'x86' (ie, it runs the x86 compiler directly) + if plat_name == get_platform() or plat_name == 'win32': + # native build or cross-compile to win32 + plat_spec = PLAT_TO_VCVARS[plat_name] + else: + # cross compile from win32 -> some 64bit + plat_spec = PLAT_TO_VCVARS[get_platform()] + '_' + \ + PLAT_TO_VCVARS[plat_name] + + vc_env = query_vcvarsall(VERSION, plat_spec) + + self.__paths = vc_env['path'].split(os.pathsep) + os.environ['lib'] = vc_env['lib'] + os.environ['include'] = vc_env['include'] + + if len(self.__paths) == 0: + raise DistutilsPlatformError("Python was built with %s, " + "and extensions need to be built with the same " + "version of the compiler, but it isn't installed." + % self.__product) + + self.cc = self.find_exe("cl.exe") + self.linker = self.find_exe("link.exe") + self.lib = self.find_exe("lib.exe") + self.rc = self.find_exe("rc.exe") # resource compiler + self.mc = self.find_exe("mc.exe") # message compiler + #self.set_path_env_var('lib') + #self.set_path_env_var('include') + + # extend the MSVC path with the current path + try: + for p in os.environ['path'].split(';'): + self.__paths.append(p) + except KeyError: + pass + self.__paths = normalize_and_reduce_paths(self.__paths) + os.environ['path'] = ";".join(self.__paths) + + self.preprocess_options = None + if self.__arch == "x86": + self.compile_options = [ '/nologo', '/O2', '/MD', '/W3', + '/DNDEBUG'] + self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', + '/Z7', '/D_DEBUG'] + else: + # Win64 + self.compile_options = [ '/nologo', '/O2', '/MD', '/W3', '/GS-' , + '/DNDEBUG'] + self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-', + '/Z7', '/D_DEBUG'] + + self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO'] + if self.__version >= 7: + self.ldflags_shared_debug = [ + '/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG' + ] + self.ldflags_static = [ '/nologo'] + + self.initialized = True + + # -- Worker methods ------------------------------------------------ + + def object_filenames(self, + source_filenames, + strip_dir=0, + output_dir=''): + # Copied from ccompiler.py, extended to return .res as 'object'-file + # for .rc input file + if output_dir is None: output_dir = '' + obj_names = [] + for src_name in source_filenames: + (base, ext) = os.path.splitext (src_name) + base = os.path.splitdrive(base)[1] # Chop off the drive + base = base[os.path.isabs(base):] # If abs, chop off leading / + if ext not in self.src_extensions: + # Better to raise an exception instead of silently continuing + # and later complain about sources and targets having + # different lengths + raise CompileError ("Don't know how to compile %s" % src_name) + if strip_dir: + base = os.path.basename (base) + if ext in self._rc_extensions: + obj_names.append (os.path.join (output_dir, + base + self.res_extension)) + elif ext in self._mc_extensions: + obj_names.append (os.path.join (output_dir, + base + self.res_extension)) + else: + obj_names.append (os.path.join (output_dir, + base + self.obj_extension)) + return obj_names + + + def compile(self, sources, + output_dir=None, macros=None, include_dirs=None, debug=0, + extra_preargs=None, extra_postargs=None, depends=None): + + if not self.initialized: + self.initialize() + compile_info = self._setup_compile(output_dir, macros, include_dirs, + sources, depends, extra_postargs) + macros, objects, extra_postargs, pp_opts, build = compile_info + + compile_opts = extra_preargs or [] + compile_opts.append ('/c') + if debug: + compile_opts.extend(self.compile_options_debug) + else: + compile_opts.extend(self.compile_options) + + for obj in objects: + try: + src, ext = build[obj] + except KeyError: + continue + if debug: + # pass the full pathname to MSVC in debug mode, + # this allows the debugger to find the source file + # without asking the user to browse for it + src = os.path.abspath(src) + + if ext in self._c_extensions: + input_opt = "/Tc" + src + elif ext in self._cpp_extensions: + input_opt = "/Tp" + src + elif ext in self._rc_extensions: + # compile .RC to .RES file + input_opt = src + output_opt = "/fo" + obj + try: + self.spawn([self.rc] + pp_opts + + [output_opt] + [input_opt]) + except DistutilsExecError as msg: + raise CompileError(msg) + continue + elif ext in self._mc_extensions: + # Compile .MC to .RC file to .RES file. + # * '-h dir' specifies the directory for the + # generated include file + # * '-r dir' specifies the target directory of the + # generated RC file and the binary message resource + # it includes + # + # For now (since there are no options to change this), + # we use the source-directory for the include file and + # the build directory for the RC file and message + # resources. This works at least for win32all. + h_dir = os.path.dirname(src) + rc_dir = os.path.dirname(obj) + try: + # first compile .MC to .RC and .H file + self.spawn([self.mc] + + ['-h', h_dir, '-r', rc_dir] + [src]) + base, _ = os.path.splitext (os.path.basename (src)) + rc_file = os.path.join (rc_dir, base + '.rc') + # then compile .RC to .RES file + self.spawn([self.rc] + + ["/fo" + obj] + [rc_file]) + + except DistutilsExecError as msg: + raise CompileError(msg) + continue + else: + # how to handle this file? + raise CompileError("Don't know how to compile %s to %s" + % (src, obj)) + + output_opt = "/Fo" + obj + try: + self.spawn([self.cc] + compile_opts + pp_opts + + [input_opt, output_opt] + + extra_postargs) + except DistutilsExecError as msg: + raise CompileError(msg) + + return objects + + + def create_static_lib(self, + objects, + output_libname, + output_dir=None, + debug=0, + target_lang=None): + + if not self.initialized: + self.initialize() + (objects, output_dir) = self._fix_object_args(objects, output_dir) + output_filename = self.library_filename(output_libname, + output_dir=output_dir) + + if self._need_link(objects, output_filename): + lib_args = objects + ['/OUT:' + output_filename] + if debug: + pass # XXX what goes here? + try: + self.spawn([self.lib] + lib_args) + except DistutilsExecError as msg: + raise LibError(msg) + else: + log.debug("skipping %s (up-to-date)", output_filename) + + + def link(self, + target_desc, + objects, + output_filename, + output_dir=None, + libraries=None, + library_dirs=None, + runtime_library_dirs=None, + export_symbols=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + build_temp=None, + target_lang=None): + + if not self.initialized: + self.initialize() + (objects, output_dir) = self._fix_object_args(objects, output_dir) + fixed_args = self._fix_lib_args(libraries, library_dirs, + runtime_library_dirs) + (libraries, library_dirs, runtime_library_dirs) = fixed_args + + if runtime_library_dirs: + self.warn ("I don't know what to do with 'runtime_library_dirs': " + + str (runtime_library_dirs)) + + lib_opts = gen_lib_options(self, + library_dirs, runtime_library_dirs, + libraries) + if output_dir is not None: + output_filename = os.path.join(output_dir, output_filename) + + if self._need_link(objects, output_filename): + if target_desc == CCompiler.EXECUTABLE: + if debug: + ldflags = self.ldflags_shared_debug[1:] + else: + ldflags = self.ldflags_shared[1:] + else: + if debug: + ldflags = self.ldflags_shared_debug + else: + ldflags = self.ldflags_shared + + export_opts = [] + for sym in (export_symbols or []): + export_opts.append("/EXPORT:" + sym) + + ld_args = (ldflags + lib_opts + export_opts + + objects + ['/OUT:' + output_filename]) + + # The MSVC linker generates .lib and .exp files, which cannot be + # suppressed by any linker switches. The .lib files may even be + # needed! Make sure they are generated in the temporary build + # directory. Since they have different names for debug and release + # builds, they can go into the same directory. + build_temp = os.path.dirname(objects[0]) + if export_symbols is not None: + (dll_name, dll_ext) = os.path.splitext( + os.path.basename(output_filename)) + implib_file = os.path.join( + build_temp, + self.library_filename(dll_name)) + ld_args.append ('/IMPLIB:' + implib_file) + + self.manifest_setup_ldargs(output_filename, build_temp, ld_args) + + if extra_preargs: + ld_args[:0] = extra_preargs + if extra_postargs: + ld_args.extend(extra_postargs) + + self.mkpath(os.path.dirname(output_filename)) + try: + self.spawn([self.linker] + ld_args) + except DistutilsExecError as msg: + raise LinkError(msg) + + # embed the manifest + # XXX - this is somewhat fragile - if mt.exe fails, distutils + # will still consider the DLL up-to-date, but it will not have a + # manifest. Maybe we should link to a temp file? OTOH, that + # implies a build environment error that shouldn't go undetected. + mfinfo = self.manifest_get_embed_info(target_desc, ld_args) + if mfinfo is not None: + mffilename, mfid = mfinfo + out_arg = '-outputresource:%s;%s' % (output_filename, mfid) + try: + self.spawn(['mt.exe', '-nologo', '-manifest', + mffilename, out_arg]) + except DistutilsExecError as msg: + raise LinkError(msg) + else: + log.debug("skipping %s (up-to-date)", output_filename) + + def manifest_setup_ldargs(self, output_filename, build_temp, ld_args): + # If we need a manifest at all, an embedded manifest is recommended. + # See MSDN article titled + # "How to: Embed a Manifest Inside a C/C++ Application" + # (currently at http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx) + # Ask the linker to generate the manifest in the temp dir, so + # we can check it, and possibly embed it, later. + temp_manifest = os.path.join( + build_temp, + os.path.basename(output_filename) + ".manifest") + ld_args.append('/MANIFESTFILE:' + temp_manifest) + + def manifest_get_embed_info(self, target_desc, ld_args): + # If a manifest should be embedded, return a tuple of + # (manifest_filename, resource_id). Returns None if no manifest + # should be embedded. See http://bugs.python.org/issue7833 for why + # we want to avoid any manifest for extension modules if we can) + for arg in ld_args: + if arg.startswith("/MANIFESTFILE:"): + temp_manifest = arg.split(":", 1)[1] + break + else: + # no /MANIFESTFILE so nothing to do. + return None + if target_desc == CCompiler.EXECUTABLE: + # by default, executables always get the manifest with the + # CRT referenced. + mfid = 1 + else: + # Extension modules try and avoid any manifest if possible. + mfid = 2 + temp_manifest = self._remove_visual_c_ref(temp_manifest) + if temp_manifest is None: + return None + return temp_manifest, mfid + + def _remove_visual_c_ref(self, manifest_file): + try: + # Remove references to the Visual C runtime, so they will + # fall through to the Visual C dependency of Python.exe. + # This way, when installed for a restricted user (e.g. + # runtimes are not in WinSxS folder, but in Python's own + # folder), the runtimes do not need to be in every folder + # with .pyd's. + # Returns either the filename of the modified manifest or + # None if no manifest should be embedded. + manifest_f = open(manifest_file) + try: + manifest_buf = manifest_f.read() + finally: + manifest_f.close() + pattern = re.compile( + r"""|)""", + re.DOTALL) + manifest_buf = re.sub(pattern, "", manifest_buf) + pattern = r"\s*" + manifest_buf = re.sub(pattern, "", manifest_buf) + # Now see if any other assemblies are referenced - if not, we + # don't want a manifest embedded. + pattern = re.compile( + r"""|)""", re.DOTALL) + if re.search(pattern, manifest_buf) is None: + return None + + manifest_f = open(manifest_file, 'w') + try: + manifest_f.write(manifest_buf) + return manifest_file + finally: + manifest_f.close() + except OSError: + pass + + # -- Miscellaneous methods ----------------------------------------- + # These are all used by the 'gen_lib_options() function, in + # ccompiler.py. + + def library_dir_option(self, dir): + return "/LIBPATH:" + dir + + def runtime_library_dir_option(self, dir): + raise DistutilsPlatformError( + "don't know how to set runtime library search path for MSVC++") + + def library_option(self, lib): + return self.library_filename(lib) + + + def find_library_file(self, dirs, lib, debug=0): + # Prefer a debugging library if found (and requested), but deal + # with it if we don't have one. + if debug: + try_names = [lib + "_d", lib] + else: + try_names = [lib] + for dir in dirs: + for name in try_names: + libfile = os.path.join(dir, self.library_filename (name)) + if os.path.exists(libfile): + return libfile + else: + # Oops, didn't find it in *any* of 'dirs' + return None + + # Helper methods for using the MSVC registry settings + + def find_exe(self, exe): + """Return path to an MSVC executable program. + + Tries to find the program in several places: first, one of the + MSVC program search paths from the registry; next, the directories + in the PATH environment variable. If any of those work, return an + absolute path that is known to exist. If none of them work, just + return the original program name, 'exe'. + """ + for p in self.__paths: + fn = os.path.join(os.path.abspath(p), exe) + if os.path.isfile(fn): + return fn + + # didn't find it; try existing path + for p in os.environ['Path'].split(';'): + fn = os.path.join(os.path.abspath(p),exe) + if os.path.isfile(fn): + return fn + + return exe diff --git a/venv/Lib/site-packages/setuptools/_distutils/msvccompiler.py b/venv/Lib/site-packages/setuptools/_distutils/msvccompiler.py new file mode 100644 index 00000000..2d447b85 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/msvccompiler.py @@ -0,0 +1,643 @@ +"""distutils.msvccompiler + +Contains MSVCCompiler, an implementation of the abstract CCompiler class +for the Microsoft Visual Studio. +""" + +# Written by Perry Stoll +# hacked by Robin Becker and Thomas Heller to do a better job of +# finding DevStudio (through the registry) + +import sys, os +from distutils.errors import \ + DistutilsExecError, DistutilsPlatformError, \ + CompileError, LibError, LinkError +from distutils.ccompiler import \ + CCompiler, gen_lib_options +from distutils import log + +_can_read_reg = False +try: + import winreg + + _can_read_reg = True + hkey_mod = winreg + + RegOpenKeyEx = winreg.OpenKeyEx + RegEnumKey = winreg.EnumKey + RegEnumValue = winreg.EnumValue + RegError = winreg.error + +except ImportError: + try: + import win32api + import win32con + _can_read_reg = True + hkey_mod = win32con + + RegOpenKeyEx = win32api.RegOpenKeyEx + RegEnumKey = win32api.RegEnumKey + RegEnumValue = win32api.RegEnumValue + RegError = win32api.error + except ImportError: + log.info("Warning: Can't read registry to find the " + "necessary compiler setting\n" + "Make sure that Python modules winreg, " + "win32api or win32con are installed.") + pass + +if _can_read_reg: + HKEYS = (hkey_mod.HKEY_USERS, + hkey_mod.HKEY_CURRENT_USER, + hkey_mod.HKEY_LOCAL_MACHINE, + hkey_mod.HKEY_CLASSES_ROOT) + +def read_keys(base, key): + """Return list of registry keys.""" + try: + handle = RegOpenKeyEx(base, key) + except RegError: + return None + L = [] + i = 0 + while True: + try: + k = RegEnumKey(handle, i) + except RegError: + break + L.append(k) + i += 1 + return L + +def read_values(base, key): + """Return dict of registry keys and values. + + All names are converted to lowercase. + """ + try: + handle = RegOpenKeyEx(base, key) + except RegError: + return None + d = {} + i = 0 + while True: + try: + name, value, type = RegEnumValue(handle, i) + except RegError: + break + name = name.lower() + d[convert_mbcs(name)] = convert_mbcs(value) + i += 1 + return d + +def convert_mbcs(s): + dec = getattr(s, "decode", None) + if dec is not None: + try: + s = dec("mbcs") + except UnicodeError: + pass + return s + +class MacroExpander: + def __init__(self, version): + self.macros = {} + self.load_macros(version) + + def set_macro(self, macro, path, key): + for base in HKEYS: + d = read_values(base, path) + if d: + self.macros["$(%s)" % macro] = d[key] + break + + def load_macros(self, version): + vsbase = r"Software\Microsoft\VisualStudio\%0.1f" % version + self.set_macro("VCInstallDir", vsbase + r"\Setup\VC", "productdir") + self.set_macro("VSInstallDir", vsbase + r"\Setup\VS", "productdir") + net = r"Software\Microsoft\.NETFramework" + self.set_macro("FrameworkDir", net, "installroot") + try: + if version > 7.0: + self.set_macro("FrameworkSDKDir", net, "sdkinstallrootv1.1") + else: + self.set_macro("FrameworkSDKDir", net, "sdkinstallroot") + except KeyError as exc: # + raise DistutilsPlatformError( + """Python was built with Visual Studio 2003; +extensions must be built with a compiler than can generate compatible binaries. +Visual Studio 2003 was not found on this system. If you have Cygwin installed, +you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""") + + p = r"Software\Microsoft\NET Framework Setup\Product" + for base in HKEYS: + try: + h = RegOpenKeyEx(base, p) + except RegError: + continue + key = RegEnumKey(h, 0) + d = read_values(base, r"%s\%s" % (p, key)) + self.macros["$(FrameworkVersion)"] = d["version"] + + def sub(self, s): + for k, v in self.macros.items(): + s = s.replace(k, v) + return s + +def get_build_version(): + """Return the version of MSVC that was used to build Python. + + For Python 2.3 and up, the version number is included in + sys.version. For earlier versions, assume the compiler is MSVC 6. + """ + prefix = "MSC v." + i = sys.version.find(prefix) + if i == -1: + return 6 + i = i + len(prefix) + s, rest = sys.version[i:].split(" ", 1) + majorVersion = int(s[:-2]) - 6 + if majorVersion >= 13: + # v13 was skipped and should be v14 + majorVersion += 1 + minorVersion = int(s[2:3]) / 10.0 + # I don't think paths are affected by minor version in version 6 + if majorVersion == 6: + minorVersion = 0 + if majorVersion >= 6: + return majorVersion + minorVersion + # else we don't know what version of the compiler this is + return None + +def get_build_architecture(): + """Return the processor architecture. + + Possible results are "Intel" or "AMD64". + """ + + prefix = " bit (" + i = sys.version.find(prefix) + if i == -1: + return "Intel" + j = sys.version.find(")", i) + return sys.version[i+len(prefix):j] + +def normalize_and_reduce_paths(paths): + """Return a list of normalized paths with duplicates removed. + + The current order of paths is maintained. + """ + # Paths are normalized so things like: /a and /a/ aren't both preserved. + reduced_paths = [] + for p in paths: + np = os.path.normpath(p) + # XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set. + if np not in reduced_paths: + reduced_paths.append(np) + return reduced_paths + + +class MSVCCompiler(CCompiler) : + """Concrete class that implements an interface to Microsoft Visual C++, + as defined by the CCompiler abstract class.""" + + compiler_type = 'msvc' + + # Just set this so CCompiler's constructor doesn't barf. We currently + # don't use the 'set_executables()' bureaucracy provided by CCompiler, + # as it really isn't necessary for this sort of single-compiler class. + # Would be nice to have a consistent interface with UnixCCompiler, + # though, so it's worth thinking about. + executables = {} + + # Private class data (need to distinguish C from C++ source for compiler) + _c_extensions = ['.c'] + _cpp_extensions = ['.cc', '.cpp', '.cxx'] + _rc_extensions = ['.rc'] + _mc_extensions = ['.mc'] + + # Needed for the filename generation methods provided by the + # base class, CCompiler. + src_extensions = (_c_extensions + _cpp_extensions + + _rc_extensions + _mc_extensions) + res_extension = '.res' + obj_extension = '.obj' + static_lib_extension = '.lib' + shared_lib_extension = '.dll' + static_lib_format = shared_lib_format = '%s%s' + exe_extension = '.exe' + + def __init__(self, verbose=0, dry_run=0, force=0): + CCompiler.__init__ (self, verbose, dry_run, force) + self.__version = get_build_version() + self.__arch = get_build_architecture() + if self.__arch == "Intel": + # x86 + if self.__version >= 7: + self.__root = r"Software\Microsoft\VisualStudio" + self.__macros = MacroExpander(self.__version) + else: + self.__root = r"Software\Microsoft\Devstudio" + self.__product = "Visual Studio version %s" % self.__version + else: + # Win64. Assume this was built with the platform SDK + self.__product = "Microsoft SDK compiler %s" % (self.__version + 6) + + self.initialized = False + + def initialize(self): + self.__paths = [] + if "DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and self.find_exe("cl.exe"): + # Assume that the SDK set up everything alright; don't try to be + # smarter + self.cc = "cl.exe" + self.linker = "link.exe" + self.lib = "lib.exe" + self.rc = "rc.exe" + self.mc = "mc.exe" + else: + self.__paths = self.get_msvc_paths("path") + + if len(self.__paths) == 0: + raise DistutilsPlatformError("Python was built with %s, " + "and extensions need to be built with the same " + "version of the compiler, but it isn't installed." + % self.__product) + + self.cc = self.find_exe("cl.exe") + self.linker = self.find_exe("link.exe") + self.lib = self.find_exe("lib.exe") + self.rc = self.find_exe("rc.exe") # resource compiler + self.mc = self.find_exe("mc.exe") # message compiler + self.set_path_env_var('lib') + self.set_path_env_var('include') + + # extend the MSVC path with the current path + try: + for p in os.environ['path'].split(';'): + self.__paths.append(p) + except KeyError: + pass + self.__paths = normalize_and_reduce_paths(self.__paths) + os.environ['path'] = ";".join(self.__paths) + + self.preprocess_options = None + if self.__arch == "Intel": + self.compile_options = [ '/nologo', '/O2', '/MD', '/W3', '/GX' , + '/DNDEBUG'] + self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GX', + '/Z7', '/D_DEBUG'] + else: + # Win64 + self.compile_options = [ '/nologo', '/O2', '/MD', '/W3', '/GS-' , + '/DNDEBUG'] + self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-', + '/Z7', '/D_DEBUG'] + + self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO'] + if self.__version >= 7: + self.ldflags_shared_debug = [ + '/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG' + ] + else: + self.ldflags_shared_debug = [ + '/DLL', '/nologo', '/INCREMENTAL:no', '/pdb:None', '/DEBUG' + ] + self.ldflags_static = [ '/nologo'] + + self.initialized = True + + # -- Worker methods ------------------------------------------------ + + def object_filenames(self, + source_filenames, + strip_dir=0, + output_dir=''): + # Copied from ccompiler.py, extended to return .res as 'object'-file + # for .rc input file + if output_dir is None: output_dir = '' + obj_names = [] + for src_name in source_filenames: + (base, ext) = os.path.splitext (src_name) + base = os.path.splitdrive(base)[1] # Chop off the drive + base = base[os.path.isabs(base):] # If abs, chop off leading / + if ext not in self.src_extensions: + # Better to raise an exception instead of silently continuing + # and later complain about sources and targets having + # different lengths + raise CompileError ("Don't know how to compile %s" % src_name) + if strip_dir: + base = os.path.basename (base) + if ext in self._rc_extensions: + obj_names.append (os.path.join (output_dir, + base + self.res_extension)) + elif ext in self._mc_extensions: + obj_names.append (os.path.join (output_dir, + base + self.res_extension)) + else: + obj_names.append (os.path.join (output_dir, + base + self.obj_extension)) + return obj_names + + + def compile(self, sources, + output_dir=None, macros=None, include_dirs=None, debug=0, + extra_preargs=None, extra_postargs=None, depends=None): + + if not self.initialized: + self.initialize() + compile_info = self._setup_compile(output_dir, macros, include_dirs, + sources, depends, extra_postargs) + macros, objects, extra_postargs, pp_opts, build = compile_info + + compile_opts = extra_preargs or [] + compile_opts.append ('/c') + if debug: + compile_opts.extend(self.compile_options_debug) + else: + compile_opts.extend(self.compile_options) + + for obj in objects: + try: + src, ext = build[obj] + except KeyError: + continue + if debug: + # pass the full pathname to MSVC in debug mode, + # this allows the debugger to find the source file + # without asking the user to browse for it + src = os.path.abspath(src) + + if ext in self._c_extensions: + input_opt = "/Tc" + src + elif ext in self._cpp_extensions: + input_opt = "/Tp" + src + elif ext in self._rc_extensions: + # compile .RC to .RES file + input_opt = src + output_opt = "/fo" + obj + try: + self.spawn([self.rc] + pp_opts + + [output_opt] + [input_opt]) + except DistutilsExecError as msg: + raise CompileError(msg) + continue + elif ext in self._mc_extensions: + # Compile .MC to .RC file to .RES file. + # * '-h dir' specifies the directory for the + # generated include file + # * '-r dir' specifies the target directory of the + # generated RC file and the binary message resource + # it includes + # + # For now (since there are no options to change this), + # we use the source-directory for the include file and + # the build directory for the RC file and message + # resources. This works at least for win32all. + h_dir = os.path.dirname(src) + rc_dir = os.path.dirname(obj) + try: + # first compile .MC to .RC and .H file + self.spawn([self.mc] + + ['-h', h_dir, '-r', rc_dir] + [src]) + base, _ = os.path.splitext (os.path.basename (src)) + rc_file = os.path.join (rc_dir, base + '.rc') + # then compile .RC to .RES file + self.spawn([self.rc] + + ["/fo" + obj] + [rc_file]) + + except DistutilsExecError as msg: + raise CompileError(msg) + continue + else: + # how to handle this file? + raise CompileError("Don't know how to compile %s to %s" + % (src, obj)) + + output_opt = "/Fo" + obj + try: + self.spawn([self.cc] + compile_opts + pp_opts + + [input_opt, output_opt] + + extra_postargs) + except DistutilsExecError as msg: + raise CompileError(msg) + + return objects + + + def create_static_lib(self, + objects, + output_libname, + output_dir=None, + debug=0, + target_lang=None): + + if not self.initialized: + self.initialize() + (objects, output_dir) = self._fix_object_args(objects, output_dir) + output_filename = self.library_filename(output_libname, + output_dir=output_dir) + + if self._need_link(objects, output_filename): + lib_args = objects + ['/OUT:' + output_filename] + if debug: + pass # XXX what goes here? + try: + self.spawn([self.lib] + lib_args) + except DistutilsExecError as msg: + raise LibError(msg) + else: + log.debug("skipping %s (up-to-date)", output_filename) + + + def link(self, + target_desc, + objects, + output_filename, + output_dir=None, + libraries=None, + library_dirs=None, + runtime_library_dirs=None, + export_symbols=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + build_temp=None, + target_lang=None): + + if not self.initialized: + self.initialize() + (objects, output_dir) = self._fix_object_args(objects, output_dir) + fixed_args = self._fix_lib_args(libraries, library_dirs, + runtime_library_dirs) + (libraries, library_dirs, runtime_library_dirs) = fixed_args + + if runtime_library_dirs: + self.warn ("I don't know what to do with 'runtime_library_dirs': " + + str (runtime_library_dirs)) + + lib_opts = gen_lib_options(self, + library_dirs, runtime_library_dirs, + libraries) + if output_dir is not None: + output_filename = os.path.join(output_dir, output_filename) + + if self._need_link(objects, output_filename): + if target_desc == CCompiler.EXECUTABLE: + if debug: + ldflags = self.ldflags_shared_debug[1:] + else: + ldflags = self.ldflags_shared[1:] + else: + if debug: + ldflags = self.ldflags_shared_debug + else: + ldflags = self.ldflags_shared + + export_opts = [] + for sym in (export_symbols or []): + export_opts.append("/EXPORT:" + sym) + + ld_args = (ldflags + lib_opts + export_opts + + objects + ['/OUT:' + output_filename]) + + # The MSVC linker generates .lib and .exp files, which cannot be + # suppressed by any linker switches. The .lib files may even be + # needed! Make sure they are generated in the temporary build + # directory. Since they have different names for debug and release + # builds, they can go into the same directory. + if export_symbols is not None: + (dll_name, dll_ext) = os.path.splitext( + os.path.basename(output_filename)) + implib_file = os.path.join( + os.path.dirname(objects[0]), + self.library_filename(dll_name)) + ld_args.append ('/IMPLIB:' + implib_file) + + if extra_preargs: + ld_args[:0] = extra_preargs + if extra_postargs: + ld_args.extend(extra_postargs) + + self.mkpath(os.path.dirname(output_filename)) + try: + self.spawn([self.linker] + ld_args) + except DistutilsExecError as msg: + raise LinkError(msg) + + else: + log.debug("skipping %s (up-to-date)", output_filename) + + + # -- Miscellaneous methods ----------------------------------------- + # These are all used by the 'gen_lib_options() function, in + # ccompiler.py. + + def library_dir_option(self, dir): + return "/LIBPATH:" + dir + + def runtime_library_dir_option(self, dir): + raise DistutilsPlatformError( + "don't know how to set runtime library search path for MSVC++") + + def library_option(self, lib): + return self.library_filename(lib) + + + def find_library_file(self, dirs, lib, debug=0): + # Prefer a debugging library if found (and requested), but deal + # with it if we don't have one. + if debug: + try_names = [lib + "_d", lib] + else: + try_names = [lib] + for dir in dirs: + for name in try_names: + libfile = os.path.join(dir, self.library_filename (name)) + if os.path.exists(libfile): + return libfile + else: + # Oops, didn't find it in *any* of 'dirs' + return None + + # Helper methods for using the MSVC registry settings + + def find_exe(self, exe): + """Return path to an MSVC executable program. + + Tries to find the program in several places: first, one of the + MSVC program search paths from the registry; next, the directories + in the PATH environment variable. If any of those work, return an + absolute path that is known to exist. If none of them work, just + return the original program name, 'exe'. + """ + for p in self.__paths: + fn = os.path.join(os.path.abspath(p), exe) + if os.path.isfile(fn): + return fn + + # didn't find it; try existing path + for p in os.environ['Path'].split(';'): + fn = os.path.join(os.path.abspath(p),exe) + if os.path.isfile(fn): + return fn + + return exe + + def get_msvc_paths(self, path, platform='x86'): + """Get a list of devstudio directories (include, lib or path). + + Return a list of strings. The list will be empty if unable to + access the registry or appropriate registry keys not found. + """ + if not _can_read_reg: + return [] + + path = path + " dirs" + if self.__version >= 7: + key = (r"%s\%0.1f\VC\VC_OBJECTS_PLATFORM_INFO\Win32\Directories" + % (self.__root, self.__version)) + else: + key = (r"%s\6.0\Build System\Components\Platforms" + r"\Win32 (%s)\Directories" % (self.__root, platform)) + + for base in HKEYS: + d = read_values(base, key) + if d: + if self.__version >= 7: + return self.__macros.sub(d[path]).split(";") + else: + return d[path].split(";") + # MSVC 6 seems to create the registry entries we need only when + # the GUI is run. + if self.__version == 6: + for base in HKEYS: + if read_values(base, r"%s\6.0" % self.__root) is not None: + self.warn("It seems you have Visual Studio 6 installed, " + "but the expected registry settings are not present.\n" + "You must at least run the Visual Studio GUI once " + "so that these entries are created.") + break + return [] + + def set_path_env_var(self, name): + """Set environment variable 'name' to an MSVC path type value. + + This is equivalent to a SET command prior to execution of spawned + commands. + """ + + if name == "lib": + p = self.get_msvc_paths("library") + else: + p = self.get_msvc_paths(name) + if p: + os.environ[name] = ';'.join(p) + + +if get_build_version() >= 8.0: + log.debug("Importing new compiler from distutils.msvc9compiler") + OldMSVCCompiler = MSVCCompiler + from distutils.msvc9compiler import MSVCCompiler + # get_build_architecture not really relevant now we support cross-compile + from distutils.msvc9compiler import MacroExpander diff --git a/venv/Lib/site-packages/setuptools/_distutils/py35compat.py b/venv/Lib/site-packages/setuptools/_distutils/py35compat.py new file mode 100644 index 00000000..79b2e7f3 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/py35compat.py @@ -0,0 +1,19 @@ +import sys +import subprocess + + +def __optim_args_from_interpreter_flags(): + """Return a list of command-line arguments reproducing the current + optimization settings in sys.flags.""" + args = [] + value = sys.flags.optimize + if value > 0: + args.append("-" + "O" * value) + return args + + +_optim_args_from_interpreter_flags = getattr( + subprocess, + "_optim_args_from_interpreter_flags", + __optim_args_from_interpreter_flags, +) diff --git a/venv/Lib/site-packages/setuptools/_distutils/py38compat.py b/venv/Lib/site-packages/setuptools/_distutils/py38compat.py new file mode 100644 index 00000000..7dbe8cef --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/py38compat.py @@ -0,0 +1,7 @@ +def aix_platform(osname, version, release): + try: + import _aix_support + return _aix_support.aix_platform() + except ImportError: + pass + return "%s-%s.%s" % (osname, version, release) diff --git a/venv/Lib/site-packages/setuptools/_distutils/spawn.py b/venv/Lib/site-packages/setuptools/_distutils/spawn.py new file mode 100644 index 00000000..b012d00d --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/spawn.py @@ -0,0 +1,129 @@ +"""distutils.spawn + +Provides the 'spawn()' function, a front-end to various platform- +specific functions for launching another program in a sub-process. +Also provides the 'find_executable()' to search the path for a given +executable name. +""" + +import sys +import os +import subprocess + +from distutils.errors import DistutilsPlatformError, DistutilsExecError +from distutils.debug import DEBUG +from distutils import log + + +if sys.platform == 'darwin': + _cfg_target = None + _cfg_target_split = None + + +def spawn(cmd, search_path=1, verbose=0, dry_run=0, env=None): + """Run another program, specified as a command list 'cmd', in a new process. + + 'cmd' is just the argument list for the new process, ie. + cmd[0] is the program to run and cmd[1:] are the rest of its arguments. + There is no way to run a program with a name different from that of its + executable. + + If 'search_path' is true (the default), the system's executable + search path will be used to find the program; otherwise, cmd[0] + must be the exact path to the executable. If 'dry_run' is true, + the command will not actually be run. + + Raise DistutilsExecError if running the program fails in any way; just + return on success. + """ + # cmd is documented as a list, but just in case some code passes a tuple + # in, protect our %-formatting code against horrible death + cmd = list(cmd) + + log.info(subprocess.list2cmdline(cmd)) + if dry_run: + return + + if search_path: + executable = find_executable(cmd[0]) + if executable is not None: + cmd[0] = executable + + env = env if env is not None else dict(os.environ) + + if sys.platform == 'darwin': + global _cfg_target, _cfg_target_split + if _cfg_target is None: + from distutils import sysconfig + _cfg_target = sysconfig.get_config_var( + 'MACOSX_DEPLOYMENT_TARGET') or '' + if _cfg_target: + _cfg_target_split = [int(x) for x in _cfg_target.split('.')] + if _cfg_target: + # Ensure that the deployment target of the build process is not + # less than 10.3 if the interpreter was built for 10.3 or later. + # This ensures extension modules are built with correct + # compatibility values, specifically LDSHARED which can use + # '-undefined dynamic_lookup' which only works on >= 10.3. + cur_target = os.environ.get('MACOSX_DEPLOYMENT_TARGET', _cfg_target) + cur_target_split = [int(x) for x in cur_target.split('.')] + if _cfg_target_split[:2] >= [10, 3] and cur_target_split[:2] < [10, 3]: + my_msg = ('$MACOSX_DEPLOYMENT_TARGET mismatch: ' + 'now "%s" but "%s" during configure;' + 'must use 10.3 or later' + % (cur_target, _cfg_target)) + raise DistutilsPlatformError(my_msg) + env.update(MACOSX_DEPLOYMENT_TARGET=cur_target) + + try: + proc = subprocess.Popen(cmd, env=env) + proc.wait() + exitcode = proc.returncode + except OSError as exc: + if not DEBUG: + cmd = cmd[0] + raise DistutilsExecError( + "command %r failed: %s" % (cmd, exc.args[-1])) from exc + + if exitcode: + if not DEBUG: + cmd = cmd[0] + raise DistutilsExecError( + "command %r failed with exit code %s" % (cmd, exitcode)) + + +def find_executable(executable, path=None): + """Tries to find 'executable' in the directories listed in 'path'. + + A string listing directories separated by 'os.pathsep'; defaults to + os.environ['PATH']. Returns the complete filename or None if not found. + """ + _, ext = os.path.splitext(executable) + if (sys.platform == 'win32') and (ext != '.exe'): + executable = executable + '.exe' + + if os.path.isfile(executable): + return executable + + if path is None: + path = os.environ.get('PATH', None) + if path is None: + try: + path = os.confstr("CS_PATH") + except (AttributeError, ValueError): + # os.confstr() or CS_PATH is not available + path = os.defpath + # bpo-35755: Don't use os.defpath if the PATH environment variable is + # set to an empty string + + # PATH='' doesn't match, whereas PATH=':' looks in the current directory + if not path: + return None + + paths = path.split(os.pathsep) + for p in paths: + f = os.path.join(p, executable) + if os.path.isfile(f): + # the file exists, we have a shot at spawn working + return f + return None diff --git a/venv/Lib/site-packages/setuptools/_distutils/sysconfig.py b/venv/Lib/site-packages/setuptools/_distutils/sysconfig.py new file mode 100644 index 00000000..879b6981 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/sysconfig.py @@ -0,0 +1,573 @@ +"""Provide access to Python's configuration information. The specific +configuration variables available depend heavily on the platform and +configuration. The values may be retrieved using +get_config_var(name), and the list of variables is available via +get_config_vars().keys(). Additional convenience functions are also +available. + +Written by: Fred L. Drake, Jr. +Email: +""" + +import _imp +import os +import re +import sys + +from .errors import DistutilsPlatformError + +IS_PYPY = '__pypy__' in sys.builtin_module_names + +# These are needed in a couple of spots, so just compute them once. +PREFIX = os.path.normpath(sys.prefix) +EXEC_PREFIX = os.path.normpath(sys.exec_prefix) +BASE_PREFIX = os.path.normpath(sys.base_prefix) +BASE_EXEC_PREFIX = os.path.normpath(sys.base_exec_prefix) + +# Path to the base directory of the project. On Windows the binary may +# live in project/PCbuild/win32 or project/PCbuild/amd64. +# set for cross builds +if "_PYTHON_PROJECT_BASE" in os.environ: + project_base = os.path.abspath(os.environ["_PYTHON_PROJECT_BASE"]) +else: + if sys.executable: + project_base = os.path.dirname(os.path.abspath(sys.executable)) + else: + # sys.executable can be empty if argv[0] has been changed and Python is + # unable to retrieve the real program name + project_base = os.getcwd() + + +# python_build: (Boolean) if true, we're either building Python or +# building an extension with an un-installed Python, so we use +# different (hard-wired) directories. +def _is_python_source_dir(d): + for fn in ("Setup", "Setup.local"): + if os.path.isfile(os.path.join(d, "Modules", fn)): + return True + return False + +_sys_home = getattr(sys, '_home', None) + +if os.name == 'nt': + def _fix_pcbuild(d): + if d and os.path.normcase(d).startswith( + os.path.normcase(os.path.join(PREFIX, "PCbuild"))): + return PREFIX + return d + project_base = _fix_pcbuild(project_base) + _sys_home = _fix_pcbuild(_sys_home) + +def _python_build(): + if _sys_home: + return _is_python_source_dir(_sys_home) + return _is_python_source_dir(project_base) + +python_build = _python_build() + + +# Calculate the build qualifier flags if they are defined. Adding the flags +# to the include and lib directories only makes sense for an installation, not +# an in-source build. +build_flags = '' +try: + if not python_build: + build_flags = sys.abiflags +except AttributeError: + # It's not a configure-based build, so the sys module doesn't have + # this attribute, which is fine. + pass + +def get_python_version(): + """Return a string containing the major and minor Python version, + leaving off the patchlevel. Sample return values could be '1.5' + or '2.2'. + """ + return '%d.%d' % sys.version_info[:2] + + +def get_python_inc(plat_specific=0, prefix=None): + """Return the directory containing installed Python header files. + + If 'plat_specific' is false (the default), this is the path to the + non-platform-specific header files, i.e. Python.h and so on; + otherwise, this is the path to platform-specific header files + (namely pyconfig.h). + + If 'prefix' is supplied, use it instead of sys.base_prefix or + sys.base_exec_prefix -- i.e., ignore 'plat_specific'. + """ + if prefix is None: + prefix = plat_specific and BASE_EXEC_PREFIX or BASE_PREFIX + if IS_PYPY: + return os.path.join(prefix, 'include') + elif os.name == "posix": + if python_build: + # Assume the executable is in the build directory. The + # pyconfig.h file should be in the same directory. Since + # the build directory may not be the source directory, we + # must use "srcdir" from the makefile to find the "Include" + # directory. + if plat_specific: + return _sys_home or project_base + else: + incdir = os.path.join(get_config_var('srcdir'), 'Include') + return os.path.normpath(incdir) + python_dir = 'python' + get_python_version() + build_flags + return os.path.join(prefix, "include", python_dir) + elif os.name == "nt": + if python_build: + # Include both the include and PC dir to ensure we can find + # pyconfig.h + return (os.path.join(prefix, "include") + os.path.pathsep + + os.path.join(prefix, "PC")) + return os.path.join(prefix, "include") + else: + raise DistutilsPlatformError( + "I don't know where Python installs its C header files " + "on platform '%s'" % os.name) + + +def get_python_lib(plat_specific=0, standard_lib=0, prefix=None): + """Return the directory containing the Python library (standard or + site additions). + + If 'plat_specific' is true, return the directory containing + platform-specific modules, i.e. any module from a non-pure-Python + module distribution; otherwise, return the platform-shared library + directory. If 'standard_lib' is true, return the directory + containing standard Python library modules; otherwise, return the + directory for site-specific modules. + + If 'prefix' is supplied, use it instead of sys.base_prefix or + sys.base_exec_prefix -- i.e., ignore 'plat_specific'. + """ + if IS_PYPY: + # PyPy-specific schema + if prefix is None: + prefix = PREFIX + if standard_lib: + return os.path.join(prefix, "lib-python", sys.version[0]) + return os.path.join(prefix, 'site-packages') + + if prefix is None: + if standard_lib: + prefix = plat_specific and BASE_EXEC_PREFIX or BASE_PREFIX + else: + prefix = plat_specific and EXEC_PREFIX or PREFIX + + if os.name == "posix": + if plat_specific or standard_lib: + # Platform-specific modules (any module from a non-pure-Python + # module distribution) or standard Python library modules. + libdir = getattr(sys, "platlibdir", "lib") + else: + # Pure Python + libdir = "lib" + libpython = os.path.join(prefix, libdir, + "python" + get_python_version()) + if standard_lib: + return libpython + else: + return os.path.join(libpython, "site-packages") + elif os.name == "nt": + if standard_lib: + return os.path.join(prefix, "Lib") + else: + return os.path.join(prefix, "Lib", "site-packages") + else: + raise DistutilsPlatformError( + "I don't know where Python installs its library " + "on platform '%s'" % os.name) + + + +def customize_compiler(compiler): + """Do any platform-specific customization of a CCompiler instance. + + Mainly needed on Unix, so we can plug in the information that + varies across Unices and is stored in Python's Makefile. + """ + if compiler.compiler_type == "unix": + if sys.platform == "darwin": + # Perform first-time customization of compiler-related + # config vars on OS X now that we know we need a compiler. + # This is primarily to support Pythons from binary + # installers. The kind and paths to build tools on + # the user system may vary significantly from the system + # that Python itself was built on. Also the user OS + # version and build tools may not support the same set + # of CPU architectures for universal builds. + global _config_vars + # Use get_config_var() to ensure _config_vars is initialized. + if not get_config_var('CUSTOMIZED_OSX_COMPILER'): + import _osx_support + _osx_support.customize_compiler(_config_vars) + _config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True' + + (cc, cxx, cflags, ccshared, ldshared, shlib_suffix, ar, ar_flags) = \ + get_config_vars('CC', 'CXX', 'CFLAGS', + 'CCSHARED', 'LDSHARED', 'SHLIB_SUFFIX', 'AR', 'ARFLAGS') + + if 'CC' in os.environ: + newcc = os.environ['CC'] + if (sys.platform == 'darwin' + and 'LDSHARED' not in os.environ + and ldshared.startswith(cc)): + # On OS X, if CC is overridden, use that as the default + # command for LDSHARED as well + ldshared = newcc + ldshared[len(cc):] + cc = newcc + if 'CXX' in os.environ: + cxx = os.environ['CXX'] + if 'LDSHARED' in os.environ: + ldshared = os.environ['LDSHARED'] + if 'CPP' in os.environ: + cpp = os.environ['CPP'] + else: + cpp = cc + " -E" # not always + if 'LDFLAGS' in os.environ: + ldshared = ldshared + ' ' + os.environ['LDFLAGS'] + if 'CFLAGS' in os.environ: + cflags = cflags + ' ' + os.environ['CFLAGS'] + ldshared = ldshared + ' ' + os.environ['CFLAGS'] + if 'CPPFLAGS' in os.environ: + cpp = cpp + ' ' + os.environ['CPPFLAGS'] + cflags = cflags + ' ' + os.environ['CPPFLAGS'] + ldshared = ldshared + ' ' + os.environ['CPPFLAGS'] + if 'AR' in os.environ: + ar = os.environ['AR'] + if 'ARFLAGS' in os.environ: + archiver = ar + ' ' + os.environ['ARFLAGS'] + else: + archiver = ar + ' ' + ar_flags + + cc_cmd = cc + ' ' + cflags + compiler.set_executables( + preprocessor=cpp, + compiler=cc_cmd, + compiler_so=cc_cmd + ' ' + ccshared, + compiler_cxx=cxx, + linker_so=ldshared, + linker_exe=cc, + archiver=archiver) + + compiler.shared_lib_extension = shlib_suffix + + +def get_config_h_filename(): + """Return full pathname of installed pyconfig.h file.""" + if python_build: + if os.name == "nt": + inc_dir = os.path.join(_sys_home or project_base, "PC") + else: + inc_dir = _sys_home or project_base + else: + inc_dir = get_python_inc(plat_specific=1) + + return os.path.join(inc_dir, 'pyconfig.h') + + +def get_makefile_filename(): + """Return full pathname of installed Makefile from the Python build.""" + if python_build: + return os.path.join(_sys_home or project_base, "Makefile") + lib_dir = get_python_lib(plat_specific=0, standard_lib=1) + config_file = 'config-{}{}'.format(get_python_version(), build_flags) + if hasattr(sys.implementation, '_multiarch'): + config_file += '-%s' % sys.implementation._multiarch + return os.path.join(lib_dir, config_file, 'Makefile') + + +def parse_config_h(fp, g=None): + """Parse a config.h-style file. + + A dictionary containing name/value pairs is returned. If an + optional dictionary is passed in as the second argument, it is + used instead of a new dictionary. + """ + if g is None: + g = {} + define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n") + undef_rx = re.compile("/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\n") + # + while True: + line = fp.readline() + if not line: + break + m = define_rx.match(line) + if m: + n, v = m.group(1, 2) + try: v = int(v) + except ValueError: pass + g[n] = v + else: + m = undef_rx.match(line) + if m: + g[m.group(1)] = 0 + return g + + +# Regexes needed for parsing Makefile (and similar syntaxes, +# like old-style Setup files). +_variable_rx = re.compile(r"([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)") +_findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)") +_findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}") + +def parse_makefile(fn, g=None): + """Parse a Makefile-style file. + + A dictionary containing name/value pairs is returned. If an + optional dictionary is passed in as the second argument, it is + used instead of a new dictionary. + """ + from distutils.text_file import TextFile + fp = TextFile(fn, strip_comments=1, skip_blanks=1, join_lines=1, errors="surrogateescape") + + if g is None: + g = {} + done = {} + notdone = {} + + while True: + line = fp.readline() + if line is None: # eof + break + m = _variable_rx.match(line) + if m: + n, v = m.group(1, 2) + v = v.strip() + # `$$' is a literal `$' in make + tmpv = v.replace('$$', '') + + if "$" in tmpv: + notdone[n] = v + else: + try: + v = int(v) + except ValueError: + # insert literal `$' + done[n] = v.replace('$$', '$') + else: + done[n] = v + + # Variables with a 'PY_' prefix in the makefile. These need to + # be made available without that prefix through sysconfig. + # Special care is needed to ensure that variable expansion works, even + # if the expansion uses the name without a prefix. + renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS') + + # do variable interpolation here + while notdone: + for name in list(notdone): + value = notdone[name] + m = _findvar1_rx.search(value) or _findvar2_rx.search(value) + if m: + n = m.group(1) + found = True + if n in done: + item = str(done[n]) + elif n in notdone: + # get it on a subsequent round + found = False + elif n in os.environ: + # do it like make: fall back to environment + item = os.environ[n] + + elif n in renamed_variables: + if name.startswith('PY_') and name[3:] in renamed_variables: + item = "" + + elif 'PY_' + n in notdone: + found = False + + else: + item = str(done['PY_' + n]) + else: + done[n] = item = "" + if found: + after = value[m.end():] + value = value[:m.start()] + item + after + if "$" in after: + notdone[name] = value + else: + try: value = int(value) + except ValueError: + done[name] = value.strip() + else: + done[name] = value + del notdone[name] + + if name.startswith('PY_') \ + and name[3:] in renamed_variables: + + name = name[3:] + if name not in done: + done[name] = value + else: + # bogus variable reference; just drop it since we can't deal + del notdone[name] + + fp.close() + + # strip spurious spaces + for k, v in done.items(): + if isinstance(v, str): + done[k] = v.strip() + + # save the results in the global dictionary + g.update(done) + return g + + +def expand_makefile_vars(s, vars): + """Expand Makefile-style variables -- "${foo}" or "$(foo)" -- in + 'string' according to 'vars' (a dictionary mapping variable names to + values). Variables not present in 'vars' are silently expanded to the + empty string. The variable values in 'vars' should not contain further + variable expansions; if 'vars' is the output of 'parse_makefile()', + you're fine. Returns a variable-expanded version of 's'. + """ + + # This algorithm does multiple expansion, so if vars['foo'] contains + # "${bar}", it will expand ${foo} to ${bar}, and then expand + # ${bar}... and so forth. This is fine as long as 'vars' comes from + # 'parse_makefile()', which takes care of such expansions eagerly, + # according to make's variable expansion semantics. + + while True: + m = _findvar1_rx.search(s) or _findvar2_rx.search(s) + if m: + (beg, end) = m.span() + s = s[0:beg] + vars.get(m.group(1)) + s[end:] + else: + break + return s + + +_config_vars = None + +def _init_posix(): + """Initialize the module as appropriate for POSIX systems.""" + # _sysconfigdata is generated at build time, see the sysconfig module + name = os.environ.get('_PYTHON_SYSCONFIGDATA_NAME', + '_sysconfigdata_{abi}_{platform}_{multiarch}'.format( + abi=sys.abiflags, + platform=sys.platform, + multiarch=getattr(sys.implementation, '_multiarch', ''), + )) + try: + _temp = __import__(name, globals(), locals(), ['build_time_vars'], 0) + except ImportError: + # Python 3.5 and pypy 7.3.1 + _temp = __import__( + '_sysconfigdata', globals(), locals(), ['build_time_vars'], 0) + build_time_vars = _temp.build_time_vars + global _config_vars + _config_vars = {} + _config_vars.update(build_time_vars) + + +def _init_nt(): + """Initialize the module as appropriate for NT""" + g = {} + # set basic install directories + g['LIBDEST'] = get_python_lib(plat_specific=0, standard_lib=1) + g['BINLIBDEST'] = get_python_lib(plat_specific=1, standard_lib=1) + + # XXX hmmm.. a normal install puts include files here + g['INCLUDEPY'] = get_python_inc(plat_specific=0) + + g['EXT_SUFFIX'] = _imp.extension_suffixes()[0] + g['EXE'] = ".exe" + g['VERSION'] = get_python_version().replace(".", "") + g['BINDIR'] = os.path.dirname(os.path.abspath(sys.executable)) + + global _config_vars + _config_vars = g + + +def get_config_vars(*args): + """With no arguments, return a dictionary of all configuration + variables relevant for the current platform. Generally this includes + everything needed to build extensions and install both pure modules and + extensions. On Unix, this means every variable defined in Python's + installed Makefile; on Windows it's a much smaller set. + + With arguments, return a list of values that result from looking up + each argument in the configuration variable dictionary. + """ + global _config_vars + if _config_vars is None: + func = globals().get("_init_" + os.name) + if func: + func() + else: + _config_vars = {} + + # Normalized versions of prefix and exec_prefix are handy to have; + # in fact, these are the standard versions used most places in the + # Distutils. + _config_vars['prefix'] = PREFIX + _config_vars['exec_prefix'] = EXEC_PREFIX + + if not IS_PYPY: + # For backward compatibility, see issue19555 + SO = _config_vars.get('EXT_SUFFIX') + if SO is not None: + _config_vars['SO'] = SO + + # Always convert srcdir to an absolute path + srcdir = _config_vars.get('srcdir', project_base) + if os.name == 'posix': + if python_build: + # If srcdir is a relative path (typically '.' or '..') + # then it should be interpreted relative to the directory + # containing Makefile. + base = os.path.dirname(get_makefile_filename()) + srcdir = os.path.join(base, srcdir) + else: + # srcdir is not meaningful since the installation is + # spread about the filesystem. We choose the + # directory containing the Makefile since we know it + # exists. + srcdir = os.path.dirname(get_makefile_filename()) + _config_vars['srcdir'] = os.path.abspath(os.path.normpath(srcdir)) + + # Convert srcdir into an absolute path if it appears necessary. + # Normally it is relative to the build directory. However, during + # testing, for example, we might be running a non-installed python + # from a different directory. + if python_build and os.name == "posix": + base = project_base + if (not os.path.isabs(_config_vars['srcdir']) and + base != os.getcwd()): + # srcdir is relative and we are not in the same directory + # as the executable. Assume executable is in the build + # directory and make srcdir absolute. + srcdir = os.path.join(base, _config_vars['srcdir']) + _config_vars['srcdir'] = os.path.normpath(srcdir) + + # OS X platforms require special customization to handle + # multi-architecture, multi-os-version installers + if sys.platform == 'darwin': + import _osx_support + _osx_support.customize_config_vars(_config_vars) + + if args: + vals = [] + for name in args: + vals.append(_config_vars.get(name)) + return vals + else: + return _config_vars + +def get_config_var(name): + """Return the value of a single variable using the dictionary + returned by 'get_config_vars()'. Equivalent to + get_config_vars().get(name) + """ + if name == 'SO': + import warnings + warnings.warn('SO is deprecated, use EXT_SUFFIX', DeprecationWarning, 2) + return get_config_vars().get(name) diff --git a/venv/Lib/site-packages/setuptools/_distutils/text_file.py b/venv/Lib/site-packages/setuptools/_distutils/text_file.py new file mode 100644 index 00000000..93abad38 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/text_file.py @@ -0,0 +1,286 @@ +"""text_file + +provides the TextFile class, which gives an interface to text files +that (optionally) takes care of stripping comments, ignoring blank +lines, and joining lines with backslashes.""" + +import sys, io + + +class TextFile: + """Provides a file-like object that takes care of all the things you + commonly want to do when processing a text file that has some + line-by-line syntax: strip comments (as long as "#" is your + comment character), skip blank lines, join adjacent lines by + escaping the newline (ie. backslash at end of line), strip + leading and/or trailing whitespace. All of these are optional + and independently controllable. + + Provides a 'warn()' method so you can generate warning messages that + report physical line number, even if the logical line in question + spans multiple physical lines. Also provides 'unreadline()' for + implementing line-at-a-time lookahead. + + Constructor is called as: + + TextFile (filename=None, file=None, **options) + + It bombs (RuntimeError) if both 'filename' and 'file' are None; + 'filename' should be a string, and 'file' a file object (or + something that provides 'readline()' and 'close()' methods). It is + recommended that you supply at least 'filename', so that TextFile + can include it in warning messages. If 'file' is not supplied, + TextFile creates its own using 'io.open()'. + + The options are all boolean, and affect the value returned by + 'readline()': + strip_comments [default: true] + strip from "#" to end-of-line, as well as any whitespace + leading up to the "#" -- unless it is escaped by a backslash + lstrip_ws [default: false] + strip leading whitespace from each line before returning it + rstrip_ws [default: true] + strip trailing whitespace (including line terminator!) from + each line before returning it + skip_blanks [default: true} + skip lines that are empty *after* stripping comments and + whitespace. (If both lstrip_ws and rstrip_ws are false, + then some lines may consist of solely whitespace: these will + *not* be skipped, even if 'skip_blanks' is true.) + join_lines [default: false] + if a backslash is the last non-newline character on a line + after stripping comments and whitespace, join the following line + to it to form one "logical line"; if N consecutive lines end + with a backslash, then N+1 physical lines will be joined to + form one logical line. + collapse_join [default: false] + strip leading whitespace from lines that are joined to their + predecessor; only matters if (join_lines and not lstrip_ws) + errors [default: 'strict'] + error handler used to decode the file content + + Note that since 'rstrip_ws' can strip the trailing newline, the + semantics of 'readline()' must differ from those of the builtin file + object's 'readline()' method! In particular, 'readline()' returns + None for end-of-file: an empty string might just be a blank line (or + an all-whitespace line), if 'rstrip_ws' is true but 'skip_blanks' is + not.""" + + default_options = { 'strip_comments': 1, + 'skip_blanks': 1, + 'lstrip_ws': 0, + 'rstrip_ws': 1, + 'join_lines': 0, + 'collapse_join': 0, + 'errors': 'strict', + } + + def __init__(self, filename=None, file=None, **options): + """Construct a new TextFile object. At least one of 'filename' + (a string) and 'file' (a file-like object) must be supplied. + They keyword argument options are described above and affect + the values returned by 'readline()'.""" + if filename is None and file is None: + raise RuntimeError("you must supply either or both of 'filename' and 'file'") + + # set values for all options -- either from client option hash + # or fallback to default_options + for opt in self.default_options.keys(): + if opt in options: + setattr(self, opt, options[opt]) + else: + setattr(self, opt, self.default_options[opt]) + + # sanity check client option hash + for opt in options.keys(): + if opt not in self.default_options: + raise KeyError("invalid TextFile option '%s'" % opt) + + if file is None: + self.open(filename) + else: + self.filename = filename + self.file = file + self.current_line = 0 # assuming that file is at BOF! + + # 'linebuf' is a stack of lines that will be emptied before we + # actually read from the file; it's only populated by an + # 'unreadline()' operation + self.linebuf = [] + + def open(self, filename): + """Open a new file named 'filename'. This overrides both the + 'filename' and 'file' arguments to the constructor.""" + self.filename = filename + self.file = io.open(self.filename, 'r', errors=self.errors) + self.current_line = 0 + + def close(self): + """Close the current file and forget everything we know about it + (filename, current line number).""" + file = self.file + self.file = None + self.filename = None + self.current_line = None + file.close() + + def gen_error(self, msg, line=None): + outmsg = [] + if line is None: + line = self.current_line + outmsg.append(self.filename + ", ") + if isinstance(line, (list, tuple)): + outmsg.append("lines %d-%d: " % tuple(line)) + else: + outmsg.append("line %d: " % line) + outmsg.append(str(msg)) + return "".join(outmsg) + + def error(self, msg, line=None): + raise ValueError("error: " + self.gen_error(msg, line)) + + def warn(self, msg, line=None): + """Print (to stderr) a warning message tied to the current logical + line in the current file. If the current logical line in the + file spans multiple physical lines, the warning refers to the + whole range, eg. "lines 3-5". If 'line' supplied, it overrides + the current line number; it may be a list or tuple to indicate a + range of physical lines, or an integer for a single physical + line.""" + sys.stderr.write("warning: " + self.gen_error(msg, line) + "\n") + + def readline(self): + """Read and return a single logical line from the current file (or + from an internal buffer if lines have previously been "unread" + with 'unreadline()'). If the 'join_lines' option is true, this + may involve reading multiple physical lines concatenated into a + single string. Updates the current line number, so calling + 'warn()' after 'readline()' emits a warning about the physical + line(s) just read. Returns None on end-of-file, since the empty + string can occur if 'rstrip_ws' is true but 'strip_blanks' is + not.""" + # If any "unread" lines waiting in 'linebuf', return the top + # one. (We don't actually buffer read-ahead data -- lines only + # get put in 'linebuf' if the client explicitly does an + # 'unreadline()'. + if self.linebuf: + line = self.linebuf[-1] + del self.linebuf[-1] + return line + + buildup_line = '' + + while True: + # read the line, make it None if EOF + line = self.file.readline() + if line == '': + line = None + + if self.strip_comments and line: + + # Look for the first "#" in the line. If none, never + # mind. If we find one and it's the first character, or + # is not preceded by "\", then it starts a comment -- + # strip the comment, strip whitespace before it, and + # carry on. Otherwise, it's just an escaped "#", so + # unescape it (and any other escaped "#"'s that might be + # lurking in there) and otherwise leave the line alone. + + pos = line.find("#") + if pos == -1: # no "#" -- no comments + pass + + # It's definitely a comment -- either "#" is the first + # character, or it's elsewhere and unescaped. + elif pos == 0 or line[pos-1] != "\\": + # Have to preserve the trailing newline, because it's + # the job of a later step (rstrip_ws) to remove it -- + # and if rstrip_ws is false, we'd better preserve it! + # (NB. this means that if the final line is all comment + # and has no trailing newline, we will think that it's + # EOF; I think that's OK.) + eol = (line[-1] == '\n') and '\n' or '' + line = line[0:pos] + eol + + # If all that's left is whitespace, then skip line + # *now*, before we try to join it to 'buildup_line' -- + # that way constructs like + # hello \\ + # # comment that should be ignored + # there + # result in "hello there". + if line.strip() == "": + continue + else: # it's an escaped "#" + line = line.replace("\\#", "#") + + # did previous line end with a backslash? then accumulate + if self.join_lines and buildup_line: + # oops: end of file + if line is None: + self.warn("continuation line immediately precedes " + "end-of-file") + return buildup_line + + if self.collapse_join: + line = line.lstrip() + line = buildup_line + line + + # careful: pay attention to line number when incrementing it + if isinstance(self.current_line, list): + self.current_line[1] = self.current_line[1] + 1 + else: + self.current_line = [self.current_line, + self.current_line + 1] + # just an ordinary line, read it as usual + else: + if line is None: # eof + return None + + # still have to be careful about incrementing the line number! + if isinstance(self.current_line, list): + self.current_line = self.current_line[1] + 1 + else: + self.current_line = self.current_line + 1 + + # strip whitespace however the client wants (leading and + # trailing, or one or the other, or neither) + if self.lstrip_ws and self.rstrip_ws: + line = line.strip() + elif self.lstrip_ws: + line = line.lstrip() + elif self.rstrip_ws: + line = line.rstrip() + + # blank line (whether we rstrip'ed or not)? skip to next line + # if appropriate + if (line == '' or line == '\n') and self.skip_blanks: + continue + + if self.join_lines: + if line[-1] == '\\': + buildup_line = line[:-1] + continue + + if line[-2:] == '\\\n': + buildup_line = line[0:-2] + '\n' + continue + + # well, I guess there's some actual content there: return it + return line + + def readlines(self): + """Read and return the list of all logical lines remaining in the + current file.""" + lines = [] + while True: + line = self.readline() + if line is None: + return lines + lines.append(line) + + def unreadline(self, line): + """Push 'line' (a string) onto an internal buffer that will be + checked by future 'readline()' calls. Handy for implementing + a parser with line-at-a-time lookahead.""" + self.linebuf.append(line) diff --git a/venv/Lib/site-packages/setuptools/_distutils/unixccompiler.py b/venv/Lib/site-packages/setuptools/_distutils/unixccompiler.py new file mode 100644 index 00000000..4d7a6de7 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/unixccompiler.py @@ -0,0 +1,328 @@ +"""distutils.unixccompiler + +Contains the UnixCCompiler class, a subclass of CCompiler that handles +the "typical" Unix-style command-line C compiler: + * macros defined with -Dname[=value] + * macros undefined with -Uname + * include search directories specified with -Idir + * libraries specified with -lllib + * library search directories specified with -Ldir + * compile handled by 'cc' (or similar) executable with -c option: + compiles .c to .o + * link static library handled by 'ar' command (possibly with 'ranlib') + * link shared library handled by 'cc -shared' +""" + +import os, sys, re + +from distutils import sysconfig +from distutils.dep_util import newer +from distutils.ccompiler import \ + CCompiler, gen_preprocess_options, gen_lib_options +from distutils.errors import \ + DistutilsExecError, CompileError, LibError, LinkError +from distutils import log + +if sys.platform == 'darwin': + import _osx_support + +# XXX Things not currently handled: +# * optimization/debug/warning flags; we just use whatever's in Python's +# Makefile and live with it. Is this adequate? If not, we might +# have to have a bunch of subclasses GNUCCompiler, SGICCompiler, +# SunCCompiler, and I suspect down that road lies madness. +# * even if we don't know a warning flag from an optimization flag, +# we need some way for outsiders to feed preprocessor/compiler/linker +# flags in to us -- eg. a sysadmin might want to mandate certain flags +# via a site config file, or a user might want to set something for +# compiling this module distribution only via the setup.py command +# line, whatever. As long as these options come from something on the +# current system, they can be as system-dependent as they like, and we +# should just happily stuff them into the preprocessor/compiler/linker +# options and carry on. + + +class UnixCCompiler(CCompiler): + + compiler_type = 'unix' + + # These are used by CCompiler in two places: the constructor sets + # instance attributes 'preprocessor', 'compiler', etc. from them, and + # 'set_executable()' allows any of these to be set. The defaults here + # are pretty generic; they will probably have to be set by an outsider + # (eg. using information discovered by the sysconfig about building + # Python extensions). + executables = {'preprocessor' : None, + 'compiler' : ["cc"], + 'compiler_so' : ["cc"], + 'compiler_cxx' : ["cc"], + 'linker_so' : ["cc", "-shared"], + 'linker_exe' : ["cc"], + 'archiver' : ["ar", "-cr"], + 'ranlib' : None, + } + + if sys.platform[:6] == "darwin": + executables['ranlib'] = ["ranlib"] + + # Needed for the filename generation methods provided by the base + # class, CCompiler. NB. whoever instantiates/uses a particular + # UnixCCompiler instance should set 'shared_lib_ext' -- we set a + # reasonable common default here, but it's not necessarily used on all + # Unices! + + src_extensions = [".c",".C",".cc",".cxx",".cpp",".m"] + obj_extension = ".o" + static_lib_extension = ".a" + shared_lib_extension = ".so" + dylib_lib_extension = ".dylib" + xcode_stub_lib_extension = ".tbd" + static_lib_format = shared_lib_format = dylib_lib_format = "lib%s%s" + xcode_stub_lib_format = dylib_lib_format + if sys.platform == "cygwin": + exe_extension = ".exe" + + def preprocess(self, source, output_file=None, macros=None, + include_dirs=None, extra_preargs=None, extra_postargs=None): + fixed_args = self._fix_compile_args(None, macros, include_dirs) + ignore, macros, include_dirs = fixed_args + pp_opts = gen_preprocess_options(macros, include_dirs) + pp_args = self.preprocessor + pp_opts + if output_file: + pp_args.extend(['-o', output_file]) + if extra_preargs: + pp_args[:0] = extra_preargs + if extra_postargs: + pp_args.extend(extra_postargs) + pp_args.append(source) + + # We need to preprocess: either we're being forced to, or we're + # generating output to stdout, or there's a target output file and + # the source file is newer than the target (or the target doesn't + # exist). + if self.force or output_file is None or newer(source, output_file): + if output_file: + self.mkpath(os.path.dirname(output_file)) + try: + self.spawn(pp_args) + except DistutilsExecError as msg: + raise CompileError(msg) + + def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): + compiler_so = self.compiler_so + if sys.platform == 'darwin': + compiler_so = _osx_support.compiler_fixup(compiler_so, + cc_args + extra_postargs) + try: + self.spawn(compiler_so + cc_args + [src, '-o', obj] + + extra_postargs) + except DistutilsExecError as msg: + raise CompileError(msg) + + def create_static_lib(self, objects, output_libname, + output_dir=None, debug=0, target_lang=None): + objects, output_dir = self._fix_object_args(objects, output_dir) + + output_filename = \ + self.library_filename(output_libname, output_dir=output_dir) + + if self._need_link(objects, output_filename): + self.mkpath(os.path.dirname(output_filename)) + self.spawn(self.archiver + + [output_filename] + + objects + self.objects) + + # Not many Unices required ranlib anymore -- SunOS 4.x is, I + # think the only major Unix that does. Maybe we need some + # platform intelligence here to skip ranlib if it's not + # needed -- or maybe Python's configure script took care of + # it for us, hence the check for leading colon. + if self.ranlib: + try: + self.spawn(self.ranlib + [output_filename]) + except DistutilsExecError as msg: + raise LibError(msg) + else: + log.debug("skipping %s (up-to-date)", output_filename) + + def link(self, target_desc, objects, + output_filename, output_dir=None, libraries=None, + library_dirs=None, runtime_library_dirs=None, + export_symbols=None, debug=0, extra_preargs=None, + extra_postargs=None, build_temp=None, target_lang=None): + objects, output_dir = self._fix_object_args(objects, output_dir) + fixed_args = self._fix_lib_args(libraries, library_dirs, + runtime_library_dirs) + libraries, library_dirs, runtime_library_dirs = fixed_args + + lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs, + libraries) + if not isinstance(output_dir, (str, type(None))): + raise TypeError("'output_dir' must be a string or None") + if output_dir is not None: + output_filename = os.path.join(output_dir, output_filename) + + if self._need_link(objects, output_filename): + ld_args = (objects + self.objects + + lib_opts + ['-o', output_filename]) + if debug: + ld_args[:0] = ['-g'] + if extra_preargs: + ld_args[:0] = extra_preargs + if extra_postargs: + ld_args.extend(extra_postargs) + self.mkpath(os.path.dirname(output_filename)) + try: + if target_desc == CCompiler.EXECUTABLE: + linker = self.linker_exe[:] + else: + linker = self.linker_so[:] + if target_lang == "c++" and self.compiler_cxx: + # skip over environment variable settings if /usr/bin/env + # is used to set up the linker's environment. + # This is needed on OSX. Note: this assumes that the + # normal and C++ compiler have the same environment + # settings. + i = 0 + if os.path.basename(linker[0]) == "env": + i = 1 + while '=' in linker[i]: + i += 1 + + if os.path.basename(linker[i]) == 'ld_so_aix': + # AIX platforms prefix the compiler with the ld_so_aix + # script, so we need to adjust our linker index + offset = 1 + else: + offset = 0 + + linker[i+offset] = self.compiler_cxx[i] + + if sys.platform == 'darwin': + linker = _osx_support.compiler_fixup(linker, ld_args) + + self.spawn(linker + ld_args) + except DistutilsExecError as msg: + raise LinkError(msg) + else: + log.debug("skipping %s (up-to-date)", output_filename) + + # -- Miscellaneous methods ----------------------------------------- + # These are all used by the 'gen_lib_options() function, in + # ccompiler.py. + + def library_dir_option(self, dir): + return "-L" + dir + + def _is_gcc(self, compiler_name): + return "gcc" in compiler_name or "g++" in compiler_name + + def runtime_library_dir_option(self, dir): + # XXX Hackish, at the very least. See Python bug #445902: + # http://sourceforge.net/tracker/index.php + # ?func=detail&aid=445902&group_id=5470&atid=105470 + # Linkers on different platforms need different options to + # specify that directories need to be added to the list of + # directories searched for dependencies when a dynamic library + # is sought. GCC on GNU systems (Linux, FreeBSD, ...) has to + # be told to pass the -R option through to the linker, whereas + # other compilers and gcc on other systems just know this. + # Other compilers may need something slightly different. At + # this time, there's no way to determine this information from + # the configuration data stored in the Python installation, so + # we use this hack. + compiler = os.path.basename(sysconfig.get_config_var("CC")) + if sys.platform[:6] == "darwin": + # MacOSX's linker doesn't understand the -R flag at all + return "-L" + dir + elif sys.platform[:7] == "freebsd": + return "-Wl,-rpath=" + dir + elif sys.platform[:5] == "hp-ux": + if self._is_gcc(compiler): + return ["-Wl,+s", "-L" + dir] + return ["+s", "-L" + dir] + else: + if self._is_gcc(compiler): + # gcc on non-GNU systems does not need -Wl, but can + # use it anyway. Since distutils has always passed in + # -Wl whenever gcc was used in the past it is probably + # safest to keep doing so. + if sysconfig.get_config_var("GNULD") == "yes": + # GNU ld needs an extra option to get a RUNPATH + # instead of just an RPATH. + return "-Wl,--enable-new-dtags,-R" + dir + else: + return "-Wl,-R" + dir + else: + # No idea how --enable-new-dtags would be passed on to + # ld if this system was using GNU ld. Don't know if a + # system like this even exists. + return "-R" + dir + + def library_option(self, lib): + return "-l" + lib + + def find_library_file(self, dirs, lib, debug=0): + shared_f = self.library_filename(lib, lib_type='shared') + dylib_f = self.library_filename(lib, lib_type='dylib') + xcode_stub_f = self.library_filename(lib, lib_type='xcode_stub') + static_f = self.library_filename(lib, lib_type='static') + + if sys.platform == 'darwin': + # On OSX users can specify an alternate SDK using + # '-isysroot', calculate the SDK root if it is specified + # (and use it further on) + # + # Note that, as of Xcode 7, Apple SDKs may contain textual stub + # libraries with .tbd extensions rather than the normal .dylib + # shared libraries installed in /. The Apple compiler tool + # chain handles this transparently but it can cause problems + # for programs that are being built with an SDK and searching + # for specific libraries. Callers of find_library_file need to + # keep in mind that the base filename of the returned SDK library + # file might have a different extension from that of the library + # file installed on the running system, for example: + # /Applications/Xcode.app/Contents/Developer/Platforms/ + # MacOSX.platform/Developer/SDKs/MacOSX10.11.sdk/ + # usr/lib/libedit.tbd + # vs + # /usr/lib/libedit.dylib + cflags = sysconfig.get_config_var('CFLAGS') + m = re.search(r'-isysroot\s*(\S+)', cflags) + if m is None: + sysroot = '/' + else: + sysroot = m.group(1) + + + + for dir in dirs: + shared = os.path.join(dir, shared_f) + dylib = os.path.join(dir, dylib_f) + static = os.path.join(dir, static_f) + xcode_stub = os.path.join(dir, xcode_stub_f) + + if sys.platform == 'darwin' and ( + dir.startswith('/System/') or ( + dir.startswith('/usr/') and not dir.startswith('/usr/local/'))): + + shared = os.path.join(sysroot, dir[1:], shared_f) + dylib = os.path.join(sysroot, dir[1:], dylib_f) + static = os.path.join(sysroot, dir[1:], static_f) + xcode_stub = os.path.join(sysroot, dir[1:], xcode_stub_f) + + # We're second-guessing the linker here, with not much hard + # data to go on: GCC seems to prefer the shared library, so I'm + # assuming that *all* Unix C compilers do. And of course I'm + # ignoring even GCC's "-static" option. So sue me. + if os.path.exists(dylib): + return dylib + elif os.path.exists(xcode_stub): + return xcode_stub + elif os.path.exists(shared): + return shared + elif os.path.exists(static): + return static + + # Oops, didn't find it in *any* of 'dirs' + return None diff --git a/venv/Lib/site-packages/setuptools/_distutils/util.py b/venv/Lib/site-packages/setuptools/_distutils/util.py new file mode 100644 index 00000000..f5aca794 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/util.py @@ -0,0 +1,561 @@ +"""distutils.util + +Miscellaneous utility functions -- anything that doesn't fit into +one of the other *util.py modules. +""" + +import os +import re +import importlib.util +import string +import sys +from distutils.errors import DistutilsPlatformError +from distutils.dep_util import newer +from distutils.spawn import spawn +from distutils import log +from distutils.errors import DistutilsByteCompileError +from .py35compat import _optim_args_from_interpreter_flags + + +def get_host_platform(): + """Return a string that identifies the current platform. This is used mainly to + distinguish platform-specific build directories and platform-specific built + distributions. Typically includes the OS name and version and the + architecture (as supplied by 'os.uname()'), although the exact information + included depends on the OS; eg. on Linux, the kernel version isn't + particularly important. + + Examples of returned values: + linux-i586 + linux-alpha (?) + solaris-2.6-sun4u + + Windows will return one of: + win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc) + win32 (all others - specifically, sys.platform is returned) + + For other non-POSIX platforms, currently just returns 'sys.platform'. + + """ + if os.name == 'nt': + if 'amd64' in sys.version.lower(): + return 'win-amd64' + if '(arm)' in sys.version.lower(): + return 'win-arm32' + if '(arm64)' in sys.version.lower(): + return 'win-arm64' + return sys.platform + + # Set for cross builds explicitly + if "_PYTHON_HOST_PLATFORM" in os.environ: + return os.environ["_PYTHON_HOST_PLATFORM"] + + if os.name != "posix" or not hasattr(os, 'uname'): + # XXX what about the architecture? NT is Intel or Alpha, + # Mac OS is M68k or PPC, etc. + return sys.platform + + # Try to distinguish various flavours of Unix + + (osname, host, release, version, machine) = os.uname() + + # Convert the OS name to lowercase, remove '/' characters, and translate + # spaces (for "Power Macintosh") + osname = osname.lower().replace('/', '') + machine = machine.replace(' ', '_') + machine = machine.replace('/', '-') + + if osname[:5] == "linux": + # At least on Linux/Intel, 'machine' is the processor -- + # i386, etc. + # XXX what about Alpha, SPARC, etc? + return "%s-%s" % (osname, machine) + elif osname[:5] == "sunos": + if release[0] >= "5": # SunOS 5 == Solaris 2 + osname = "solaris" + release = "%d.%s" % (int(release[0]) - 3, release[2:]) + # We can't use "platform.architecture()[0]" because a + # bootstrap problem. We use a dict to get an error + # if some suspicious happens. + bitness = {2147483647:"32bit", 9223372036854775807:"64bit"} + machine += ".%s" % bitness[sys.maxsize] + # fall through to standard osname-release-machine representation + elif osname[:3] == "aix": + from .py38compat import aix_platform + return aix_platform(osname, version, release) + elif osname[:6] == "cygwin": + osname = "cygwin" + rel_re = re.compile (r'[\d.]+', re.ASCII) + m = rel_re.match(release) + if m: + release = m.group() + elif osname[:6] == "darwin": + import _osx_support, distutils.sysconfig + osname, release, machine = _osx_support.get_platform_osx( + distutils.sysconfig.get_config_vars(), + osname, release, machine) + + return "%s-%s-%s" % (osname, release, machine) + +def get_platform(): + if os.name == 'nt': + TARGET_TO_PLAT = { + 'x86' : 'win32', + 'x64' : 'win-amd64', + 'arm' : 'win-arm32', + } + return TARGET_TO_PLAT.get(os.environ.get('VSCMD_ARG_TGT_ARCH')) or get_host_platform() + else: + return get_host_platform() + +def convert_path (pathname): + """Return 'pathname' as a name that will work on the native filesystem, + i.e. split it on '/' and put it back together again using the current + directory separator. Needed because filenames in the setup script are + always supplied in Unix style, and have to be converted to the local + convention before we can actually use them in the filesystem. Raises + ValueError on non-Unix-ish systems if 'pathname' either starts or + ends with a slash. + """ + if os.sep == '/': + return pathname + if not pathname: + return pathname + if pathname[0] == '/': + raise ValueError("path '%s' cannot be absolute" % pathname) + if pathname[-1] == '/': + raise ValueError("path '%s' cannot end with '/'" % pathname) + + paths = pathname.split('/') + while '.' in paths: + paths.remove('.') + if not paths: + return os.curdir + return os.path.join(*paths) + +# convert_path () + + +def change_root (new_root, pathname): + """Return 'pathname' with 'new_root' prepended. If 'pathname' is + relative, this is equivalent to "os.path.join(new_root,pathname)". + Otherwise, it requires making 'pathname' relative and then joining the + two, which is tricky on DOS/Windows and Mac OS. + """ + if os.name == 'posix': + if not os.path.isabs(pathname): + return os.path.join(new_root, pathname) + else: + return os.path.join(new_root, pathname[1:]) + + elif os.name == 'nt': + (drive, path) = os.path.splitdrive(pathname) + if path[0] == '\\': + path = path[1:] + return os.path.join(new_root, path) + + else: + raise DistutilsPlatformError("nothing known about platform '%s'" % os.name) + + +_environ_checked = 0 +def check_environ (): + """Ensure that 'os.environ' has all the environment variables we + guarantee that users can use in config files, command-line options, + etc. Currently this includes: + HOME - user's home directory (Unix only) + PLAT - description of the current platform, including hardware + and OS (see 'get_platform()') + """ + global _environ_checked + if _environ_checked: + return + + if os.name == 'posix' and 'HOME' not in os.environ: + try: + import pwd + os.environ['HOME'] = pwd.getpwuid(os.getuid())[5] + except (ImportError, KeyError): + # bpo-10496: if the current user identifier doesn't exist in the + # password database, do nothing + pass + + if 'PLAT' not in os.environ: + os.environ['PLAT'] = get_platform() + + _environ_checked = 1 + + +def subst_vars (s, local_vars): + """Perform shell/Perl-style variable substitution on 'string'. Every + occurrence of '$' followed by a name is considered a variable, and + variable is substituted by the value found in the 'local_vars' + dictionary, or in 'os.environ' if it's not in 'local_vars'. + 'os.environ' is first checked/augmented to guarantee that it contains + certain values: see 'check_environ()'. Raise ValueError for any + variables not found in either 'local_vars' or 'os.environ'. + """ + check_environ() + def _subst (match, local_vars=local_vars): + var_name = match.group(1) + if var_name in local_vars: + return str(local_vars[var_name]) + else: + return os.environ[var_name] + + try: + return re.sub(r'\$([a-zA-Z_][a-zA-Z_0-9]*)', _subst, s) + except KeyError as var: + raise ValueError("invalid variable '$%s'" % var) + +# subst_vars () + + +def grok_environment_error (exc, prefix="error: "): + # Function kept for backward compatibility. + # Used to try clever things with EnvironmentErrors, + # but nowadays str(exception) produces good messages. + return prefix + str(exc) + + +# Needed by 'split_quoted()' +_wordchars_re = _squote_re = _dquote_re = None +def _init_regex(): + global _wordchars_re, _squote_re, _dquote_re + _wordchars_re = re.compile(r'[^\\\'\"%s ]*' % string.whitespace) + _squote_re = re.compile(r"'(?:[^'\\]|\\.)*'") + _dquote_re = re.compile(r'"(?:[^"\\]|\\.)*"') + +def split_quoted (s): + """Split a string up according to Unix shell-like rules for quotes and + backslashes. In short: words are delimited by spaces, as long as those + spaces are not escaped by a backslash, or inside a quoted string. + Single and double quotes are equivalent, and the quote characters can + be backslash-escaped. The backslash is stripped from any two-character + escape sequence, leaving only the escaped character. The quote + characters are stripped from any quoted string. Returns a list of + words. + """ + + # This is a nice algorithm for splitting up a single string, since it + # doesn't require character-by-character examination. It was a little + # bit of a brain-bender to get it working right, though... + if _wordchars_re is None: _init_regex() + + s = s.strip() + words = [] + pos = 0 + + while s: + m = _wordchars_re.match(s, pos) + end = m.end() + if end == len(s): + words.append(s[:end]) + break + + if s[end] in string.whitespace: # unescaped, unquoted whitespace: now + words.append(s[:end]) # we definitely have a word delimiter + s = s[end:].lstrip() + pos = 0 + + elif s[end] == '\\': # preserve whatever is being escaped; + # will become part of the current word + s = s[:end] + s[end+1:] + pos = end+1 + + else: + if s[end] == "'": # slurp singly-quoted string + m = _squote_re.match(s, end) + elif s[end] == '"': # slurp doubly-quoted string + m = _dquote_re.match(s, end) + else: + raise RuntimeError("this can't happen (bad char '%c')" % s[end]) + + if m is None: + raise ValueError("bad string (mismatched %s quotes?)" % s[end]) + + (beg, end) = m.span() + s = s[:beg] + s[beg+1:end-1] + s[end:] + pos = m.end() - 2 + + if pos >= len(s): + words.append(s) + break + + return words + +# split_quoted () + + +def execute (func, args, msg=None, verbose=0, dry_run=0): + """Perform some action that affects the outside world (eg. by + writing to the filesystem). Such actions are special because they + are disabled by the 'dry_run' flag. This method takes care of all + that bureaucracy for you; all you have to do is supply the + function to call and an argument tuple for it (to embody the + "external action" being performed), and an optional message to + print. + """ + if msg is None: + msg = "%s%r" % (func.__name__, args) + if msg[-2:] == ',)': # correct for singleton tuple + msg = msg[0:-2] + ')' + + log.info(msg) + if not dry_run: + func(*args) + + +def strtobool (val): + """Convert a string representation of truth to true (1) or false (0). + + True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values + are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if + 'val' is anything else. + """ + val = val.lower() + if val in ('y', 'yes', 't', 'true', 'on', '1'): + return 1 + elif val in ('n', 'no', 'f', 'false', 'off', '0'): + return 0 + else: + raise ValueError("invalid truth value %r" % (val,)) + + +def byte_compile (py_files, + optimize=0, force=0, + prefix=None, base_dir=None, + verbose=1, dry_run=0, + direct=None): + """Byte-compile a collection of Python source files to .pyc + files in a __pycache__ subdirectory. 'py_files' is a list + of files to compile; any files that don't end in ".py" are silently + skipped. 'optimize' must be one of the following: + 0 - don't optimize + 1 - normal optimization (like "python -O") + 2 - extra optimization (like "python -OO") + If 'force' is true, all files are recompiled regardless of + timestamps. + + The source filename encoded in each bytecode file defaults to the + filenames listed in 'py_files'; you can modify these with 'prefix' and + 'basedir'. 'prefix' is a string that will be stripped off of each + source filename, and 'base_dir' is a directory name that will be + prepended (after 'prefix' is stripped). You can supply either or both + (or neither) of 'prefix' and 'base_dir', as you wish. + + If 'dry_run' is true, doesn't actually do anything that would + affect the filesystem. + + Byte-compilation is either done directly in this interpreter process + with the standard py_compile module, or indirectly by writing a + temporary script and executing it. Normally, you should let + 'byte_compile()' figure out to use direct compilation or not (see + the source for details). The 'direct' flag is used by the script + generated in indirect mode; unless you know what you're doing, leave + it set to None. + """ + + # Late import to fix a bootstrap issue: _posixsubprocess is built by + # setup.py, but setup.py uses distutils. + import subprocess + + # nothing is done if sys.dont_write_bytecode is True + if sys.dont_write_bytecode: + raise DistutilsByteCompileError('byte-compiling is disabled.') + + # First, if the caller didn't force us into direct or indirect mode, + # figure out which mode we should be in. We take a conservative + # approach: choose direct mode *only* if the current interpreter is + # in debug mode and optimize is 0. If we're not in debug mode (-O + # or -OO), we don't know which level of optimization this + # interpreter is running with, so we can't do direct + # byte-compilation and be certain that it's the right thing. Thus, + # always compile indirectly if the current interpreter is in either + # optimize mode, or if either optimization level was requested by + # the caller. + if direct is None: + direct = (__debug__ and optimize == 0) + + # "Indirect" byte-compilation: write a temporary script and then + # run it with the appropriate flags. + if not direct: + try: + from tempfile import mkstemp + (script_fd, script_name) = mkstemp(".py") + except ImportError: + from tempfile import mktemp + (script_fd, script_name) = None, mktemp(".py") + log.info("writing byte-compilation script '%s'", script_name) + if not dry_run: + if script_fd is not None: + script = os.fdopen(script_fd, "w") + else: + script = open(script_name, "w") + + with script: + script.write("""\ +from distutils.util import byte_compile +files = [ +""") + + # XXX would be nice to write absolute filenames, just for + # safety's sake (script should be more robust in the face of + # chdir'ing before running it). But this requires abspath'ing + # 'prefix' as well, and that breaks the hack in build_lib's + # 'byte_compile()' method that carefully tacks on a trailing + # slash (os.sep really) to make sure the prefix here is "just + # right". This whole prefix business is rather delicate -- the + # problem is that it's really a directory, but I'm treating it + # as a dumb string, so trailing slashes and so forth matter. + + #py_files = map(os.path.abspath, py_files) + #if prefix: + # prefix = os.path.abspath(prefix) + + script.write(",\n".join(map(repr, py_files)) + "]\n") + script.write(""" +byte_compile(files, optimize=%r, force=%r, + prefix=%r, base_dir=%r, + verbose=%r, dry_run=0, + direct=1) +""" % (optimize, force, prefix, base_dir, verbose)) + + cmd = [sys.executable] + cmd.extend(_optim_args_from_interpreter_flags()) + cmd.append(script_name) + spawn(cmd, dry_run=dry_run) + execute(os.remove, (script_name,), "removing %s" % script_name, + dry_run=dry_run) + + # "Direct" byte-compilation: use the py_compile module to compile + # right here, right now. Note that the script generated in indirect + # mode simply calls 'byte_compile()' in direct mode, a weird sort of + # cross-process recursion. Hey, it works! + else: + from py_compile import compile + + for file in py_files: + if file[-3:] != ".py": + # This lets us be lazy and not filter filenames in + # the "install_lib" command. + continue + + # Terminology from the py_compile module: + # cfile - byte-compiled file + # dfile - purported source filename (same as 'file' by default) + if optimize >= 0: + opt = '' if optimize == 0 else optimize + cfile = importlib.util.cache_from_source( + file, optimization=opt) + else: + cfile = importlib.util.cache_from_source(file) + dfile = file + if prefix: + if file[:len(prefix)] != prefix: + raise ValueError("invalid prefix: filename %r doesn't start with %r" + % (file, prefix)) + dfile = dfile[len(prefix):] + if base_dir: + dfile = os.path.join(base_dir, dfile) + + cfile_base = os.path.basename(cfile) + if direct: + if force or newer(file, cfile): + log.info("byte-compiling %s to %s", file, cfile_base) + if not dry_run: + compile(file, cfile, dfile) + else: + log.debug("skipping byte-compilation of %s to %s", + file, cfile_base) + +# byte_compile () + +def rfc822_escape (header): + """Return a version of the string escaped for inclusion in an + RFC-822 header, by ensuring there are 8 spaces space after each newline. + """ + lines = header.split('\n') + sep = '\n' + 8 * ' ' + return sep.join(lines) + +# 2to3 support + +def run_2to3(files, fixer_names=None, options=None, explicit=None): + """Invoke 2to3 on a list of Python files. + The files should all come from the build area, as the + modification is done in-place. To reduce the build time, + only files modified since the last invocation of this + function should be passed in the files argument.""" + + if not files: + return + + # Make this class local, to delay import of 2to3 + from lib2to3.refactor import RefactoringTool, get_fixers_from_package + class DistutilsRefactoringTool(RefactoringTool): + def log_error(self, msg, *args, **kw): + log.error(msg, *args) + + def log_message(self, msg, *args): + log.info(msg, *args) + + def log_debug(self, msg, *args): + log.debug(msg, *args) + + if fixer_names is None: + fixer_names = get_fixers_from_package('lib2to3.fixes') + r = DistutilsRefactoringTool(fixer_names, options=options) + r.refactor(files, write=True) + +def copydir_run_2to3(src, dest, template=None, fixer_names=None, + options=None, explicit=None): + """Recursively copy a directory, only copying new and changed files, + running run_2to3 over all newly copied Python modules afterward. + + If you give a template string, it's parsed like a MANIFEST.in. + """ + from distutils.dir_util import mkpath + from distutils.file_util import copy_file + from distutils.filelist import FileList + filelist = FileList() + curdir = os.getcwd() + os.chdir(src) + try: + filelist.findall() + finally: + os.chdir(curdir) + filelist.files[:] = filelist.allfiles + if template: + for line in template.splitlines(): + line = line.strip() + if not line: continue + filelist.process_template_line(line) + copied = [] + for filename in filelist.files: + outname = os.path.join(dest, filename) + mkpath(os.path.dirname(outname)) + res = copy_file(os.path.join(src, filename), outname, update=1) + if res[1]: copied.append(outname) + run_2to3([fn for fn in copied if fn.lower().endswith('.py')], + fixer_names=fixer_names, options=options, explicit=explicit) + return copied + +class Mixin2to3: + '''Mixin class for commands that run 2to3. + To configure 2to3, setup scripts may either change + the class variables, or inherit from individual commands + to override how 2to3 is invoked.''' + + # provide list of fixers to run; + # defaults to all from lib2to3.fixers + fixer_names = None + + # options dictionary + options = None + + # list of fixers to invoke even though they are marked as explicit + explicit = None + + def run_2to3(self, files): + return run_2to3(files, self.fixer_names, self.options, self.explicit) diff --git a/venv/Lib/site-packages/setuptools/_distutils/version.py b/venv/Lib/site-packages/setuptools/_distutils/version.py new file mode 100644 index 00000000..c33bebae --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/version.py @@ -0,0 +1,347 @@ +# +# distutils/version.py +# +# Implements multiple version numbering conventions for the +# Python Module Distribution Utilities. +# +# $Id$ +# + +"""Provides classes to represent module version numbers (one class for +each style of version numbering). There are currently two such classes +implemented: StrictVersion and LooseVersion. + +Every version number class implements the following interface: + * the 'parse' method takes a string and parses it to some internal + representation; if the string is an invalid version number, + 'parse' raises a ValueError exception + * the class constructor takes an optional string argument which, + if supplied, is passed to 'parse' + * __str__ reconstructs the string that was passed to 'parse' (or + an equivalent string -- ie. one that will generate an equivalent + version number instance) + * __repr__ generates Python code to recreate the version number instance + * _cmp compares the current instance with either another instance + of the same class or a string (which will be parsed to an instance + of the same class, thus must follow the same rules) +""" + +import re + +class Version: + """Abstract base class for version numbering classes. Just provides + constructor (__init__) and reproducer (__repr__), because those + seem to be the same for all version numbering classes; and route + rich comparisons to _cmp. + """ + + def __init__ (self, vstring=None): + if vstring: + self.parse(vstring) + + def __repr__ (self): + return "%s ('%s')" % (self.__class__.__name__, str(self)) + + def __eq__(self, other): + c = self._cmp(other) + if c is NotImplemented: + return c + return c == 0 + + def __lt__(self, other): + c = self._cmp(other) + if c is NotImplemented: + return c + return c < 0 + + def __le__(self, other): + c = self._cmp(other) + if c is NotImplemented: + return c + return c <= 0 + + def __gt__(self, other): + c = self._cmp(other) + if c is NotImplemented: + return c + return c > 0 + + def __ge__(self, other): + c = self._cmp(other) + if c is NotImplemented: + return c + return c >= 0 + + +# Interface for version-number classes -- must be implemented +# by the following classes (the concrete ones -- Version should +# be treated as an abstract class). +# __init__ (string) - create and take same action as 'parse' +# (string parameter is optional) +# parse (string) - convert a string representation to whatever +# internal representation is appropriate for +# this style of version numbering +# __str__ (self) - convert back to a string; should be very similar +# (if not identical to) the string supplied to parse +# __repr__ (self) - generate Python code to recreate +# the instance +# _cmp (self, other) - compare two version numbers ('other' may +# be an unparsed version string, or another +# instance of your version class) + + +class StrictVersion (Version): + + """Version numbering for anal retentives and software idealists. + Implements the standard interface for version number classes as + described above. A version number consists of two or three + dot-separated numeric components, with an optional "pre-release" tag + on the end. The pre-release tag consists of the letter 'a' or 'b' + followed by a number. If the numeric components of two version + numbers are equal, then one with a pre-release tag will always + be deemed earlier (lesser) than one without. + + The following are valid version numbers (shown in the order that + would be obtained by sorting according to the supplied cmp function): + + 0.4 0.4.0 (these two are equivalent) + 0.4.1 + 0.5a1 + 0.5b3 + 0.5 + 0.9.6 + 1.0 + 1.0.4a3 + 1.0.4b1 + 1.0.4 + + The following are examples of invalid version numbers: + + 1 + 2.7.2.2 + 1.3.a4 + 1.3pl1 + 1.3c4 + + The rationale for this version numbering system will be explained + in the distutils documentation. + """ + + version_re = re.compile(r'^(\d+) \. (\d+) (\. (\d+))? ([ab](\d+))?$', + re.VERBOSE | re.ASCII) + + + def parse (self, vstring): + match = self.version_re.match(vstring) + if not match: + raise ValueError("invalid version number '%s'" % vstring) + + (major, minor, patch, prerelease, prerelease_num) = \ + match.group(1, 2, 4, 5, 6) + + if patch: + self.version = tuple(map(int, [major, minor, patch])) + else: + self.version = tuple(map(int, [major, minor])) + (0,) + + if prerelease: + self.prerelease = (prerelease[0], int(prerelease_num)) + else: + self.prerelease = None + + + def __str__ (self): + + if self.version[2] == 0: + vstring = '.'.join(map(str, self.version[0:2])) + else: + vstring = '.'.join(map(str, self.version)) + + if self.prerelease: + vstring = vstring + self.prerelease[0] + str(self.prerelease[1]) + + return vstring + + + def _cmp (self, other): + if isinstance(other, str): + other = StrictVersion(other) + elif not isinstance(other, StrictVersion): + return NotImplemented + + if self.version != other.version: + # numeric versions don't match + # prerelease stuff doesn't matter + if self.version < other.version: + return -1 + else: + return 1 + + # have to compare prerelease + # case 1: neither has prerelease; they're equal + # case 2: self has prerelease, other doesn't; other is greater + # case 3: self doesn't have prerelease, other does: self is greater + # case 4: both have prerelease: must compare them! + + if (not self.prerelease and not other.prerelease): + return 0 + elif (self.prerelease and not other.prerelease): + return -1 + elif (not self.prerelease and other.prerelease): + return 1 + elif (self.prerelease and other.prerelease): + if self.prerelease == other.prerelease: + return 0 + elif self.prerelease < other.prerelease: + return -1 + else: + return 1 + else: + assert False, "never get here" + +# end class StrictVersion + + +# The rules according to Greg Stein: +# 1) a version number has 1 or more numbers separated by a period or by +# sequences of letters. If only periods, then these are compared +# left-to-right to determine an ordering. +# 2) sequences of letters are part of the tuple for comparison and are +# compared lexicographically +# 3) recognize the numeric components may have leading zeroes +# +# The LooseVersion class below implements these rules: a version number +# string is split up into a tuple of integer and string components, and +# comparison is a simple tuple comparison. This means that version +# numbers behave in a predictable and obvious way, but a way that might +# not necessarily be how people *want* version numbers to behave. There +# wouldn't be a problem if people could stick to purely numeric version +# numbers: just split on period and compare the numbers as tuples. +# However, people insist on putting letters into their version numbers; +# the most common purpose seems to be: +# - indicating a "pre-release" version +# ('alpha', 'beta', 'a', 'b', 'pre', 'p') +# - indicating a post-release patch ('p', 'pl', 'patch') +# but of course this can't cover all version number schemes, and there's +# no way to know what a programmer means without asking him. +# +# The problem is what to do with letters (and other non-numeric +# characters) in a version number. The current implementation does the +# obvious and predictable thing: keep them as strings and compare +# lexically within a tuple comparison. This has the desired effect if +# an appended letter sequence implies something "post-release": +# eg. "0.99" < "0.99pl14" < "1.0", and "5.001" < "5.001m" < "5.002". +# +# However, if letters in a version number imply a pre-release version, +# the "obvious" thing isn't correct. Eg. you would expect that +# "1.5.1" < "1.5.2a2" < "1.5.2", but under the tuple/lexical comparison +# implemented here, this just isn't so. +# +# Two possible solutions come to mind. The first is to tie the +# comparison algorithm to a particular set of semantic rules, as has +# been done in the StrictVersion class above. This works great as long +# as everyone can go along with bondage and discipline. Hopefully a +# (large) subset of Python module programmers will agree that the +# particular flavour of bondage and discipline provided by StrictVersion +# provides enough benefit to be worth using, and will submit their +# version numbering scheme to its domination. The free-thinking +# anarchists in the lot will never give in, though, and something needs +# to be done to accommodate them. +# +# Perhaps a "moderately strict" version class could be implemented that +# lets almost anything slide (syntactically), and makes some heuristic +# assumptions about non-digits in version number strings. This could +# sink into special-case-hell, though; if I was as talented and +# idiosyncratic as Larry Wall, I'd go ahead and implement a class that +# somehow knows that "1.2.1" < "1.2.2a2" < "1.2.2" < "1.2.2pl3", and is +# just as happy dealing with things like "2g6" and "1.13++". I don't +# think I'm smart enough to do it right though. +# +# In any case, I've coded the test suite for this module (see +# ../test/test_version.py) specifically to fail on things like comparing +# "1.2a2" and "1.2". That's not because the *code* is doing anything +# wrong, it's because the simple, obvious design doesn't match my +# complicated, hairy expectations for real-world version numbers. It +# would be a snap to fix the test suite to say, "Yep, LooseVersion does +# the Right Thing" (ie. the code matches the conception). But I'd rather +# have a conception that matches common notions about version numbers. + +class LooseVersion (Version): + + """Version numbering for anarchists and software realists. + Implements the standard interface for version number classes as + described above. A version number consists of a series of numbers, + separated by either periods or strings of letters. When comparing + version numbers, the numeric components will be compared + numerically, and the alphabetic components lexically. The following + are all valid version numbers, in no particular order: + + 1.5.1 + 1.5.2b2 + 161 + 3.10a + 8.02 + 3.4j + 1996.07.12 + 3.2.pl0 + 3.1.1.6 + 2g6 + 11g + 0.960923 + 2.2beta29 + 1.13++ + 5.5.kw + 2.0b1pl0 + + In fact, there is no such thing as an invalid version number under + this scheme; the rules for comparison are simple and predictable, + but may not always give the results you want (for some definition + of "want"). + """ + + component_re = re.compile(r'(\d+ | [a-z]+ | \.)', re.VERBOSE) + + def __init__ (self, vstring=None): + if vstring: + self.parse(vstring) + + + def parse (self, vstring): + # I've given up on thinking I can reconstruct the version string + # from the parsed tuple -- so I just store the string here for + # use by __str__ + self.vstring = vstring + components = [x for x in self.component_re.split(vstring) + if x and x != '.'] + for i, obj in enumerate(components): + try: + components[i] = int(obj) + except ValueError: + pass + + self.version = components + + + def __str__ (self): + return self.vstring + + + def __repr__ (self): + return "LooseVersion ('%s')" % str(self) + + + def _cmp (self, other): + if isinstance(other, str): + other = LooseVersion(other) + elif not isinstance(other, LooseVersion): + return NotImplemented + + if self.version == other.version: + return 0 + if self.version < other.version: + return -1 + if self.version > other.version: + return 1 + + +# end class LooseVersion diff --git a/venv/Lib/site-packages/setuptools/_distutils/versionpredicate.py b/venv/Lib/site-packages/setuptools/_distutils/versionpredicate.py new file mode 100644 index 00000000..062c98f2 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_distutils/versionpredicate.py @@ -0,0 +1,166 @@ +"""Module for parsing and testing package version predicate strings. +""" +import re +import distutils.version +import operator + + +re_validPackage = re.compile(r"(?i)^\s*([a-z_]\w*(?:\.[a-z_]\w*)*)(.*)", + re.ASCII) +# (package) (rest) + +re_paren = re.compile(r"^\s*\((.*)\)\s*$") # (list) inside of parentheses +re_splitComparison = re.compile(r"^\s*(<=|>=|<|>|!=|==)\s*([^\s,]+)\s*$") +# (comp) (version) + + +def splitUp(pred): + """Parse a single version comparison. + + Return (comparison string, StrictVersion) + """ + res = re_splitComparison.match(pred) + if not res: + raise ValueError("bad package restriction syntax: %r" % pred) + comp, verStr = res.groups() + return (comp, distutils.version.StrictVersion(verStr)) + +compmap = {"<": operator.lt, "<=": operator.le, "==": operator.eq, + ">": operator.gt, ">=": operator.ge, "!=": operator.ne} + +class VersionPredicate: + """Parse and test package version predicates. + + >>> v = VersionPredicate('pyepat.abc (>1.0, <3333.3a1, !=1555.1b3)') + + The `name` attribute provides the full dotted name that is given:: + + >>> v.name + 'pyepat.abc' + + The str() of a `VersionPredicate` provides a normalized + human-readable version of the expression:: + + >>> print(v) + pyepat.abc (> 1.0, < 3333.3a1, != 1555.1b3) + + The `satisfied_by()` method can be used to determine with a given + version number is included in the set described by the version + restrictions:: + + >>> v.satisfied_by('1.1') + True + >>> v.satisfied_by('1.4') + True + >>> v.satisfied_by('1.0') + False + >>> v.satisfied_by('4444.4') + False + >>> v.satisfied_by('1555.1b3') + False + + `VersionPredicate` is flexible in accepting extra whitespace:: + + >>> v = VersionPredicate(' pat( == 0.1 ) ') + >>> v.name + 'pat' + >>> v.satisfied_by('0.1') + True + >>> v.satisfied_by('0.2') + False + + If any version numbers passed in do not conform to the + restrictions of `StrictVersion`, a `ValueError` is raised:: + + >>> v = VersionPredicate('p1.p2.p3.p4(>=1.0, <=1.3a1, !=1.2zb3)') + Traceback (most recent call last): + ... + ValueError: invalid version number '1.2zb3' + + It the module or package name given does not conform to what's + allowed as a legal module or package name, `ValueError` is + raised:: + + >>> v = VersionPredicate('foo-bar') + Traceback (most recent call last): + ... + ValueError: expected parenthesized list: '-bar' + + >>> v = VersionPredicate('foo bar (12.21)') + Traceback (most recent call last): + ... + ValueError: expected parenthesized list: 'bar (12.21)' + + """ + + def __init__(self, versionPredicateStr): + """Parse a version predicate string. + """ + # Fields: + # name: package name + # pred: list of (comparison string, StrictVersion) + + versionPredicateStr = versionPredicateStr.strip() + if not versionPredicateStr: + raise ValueError("empty package restriction") + match = re_validPackage.match(versionPredicateStr) + if not match: + raise ValueError("bad package name in %r" % versionPredicateStr) + self.name, paren = match.groups() + paren = paren.strip() + if paren: + match = re_paren.match(paren) + if not match: + raise ValueError("expected parenthesized list: %r" % paren) + str = match.groups()[0] + self.pred = [splitUp(aPred) for aPred in str.split(",")] + if not self.pred: + raise ValueError("empty parenthesized list in %r" + % versionPredicateStr) + else: + self.pred = [] + + def __str__(self): + if self.pred: + seq = [cond + " " + str(ver) for cond, ver in self.pred] + return self.name + " (" + ", ".join(seq) + ")" + else: + return self.name + + def satisfied_by(self, version): + """True if version is compatible with all the predicates in self. + The parameter version must be acceptable to the StrictVersion + constructor. It may be either a string or StrictVersion. + """ + for cond, ver in self.pred: + if not compmap[cond](version, ver): + return False + return True + + +_provision_rx = None + +def split_provision(value): + """Return the name and optional version number of a provision. + + The version number, if given, will be returned as a `StrictVersion` + instance, otherwise it will be `None`. + + >>> split_provision('mypkg') + ('mypkg', None) + >>> split_provision(' mypkg( 1.2 ) ') + ('mypkg', StrictVersion ('1.2')) + """ + global _provision_rx + if _provision_rx is None: + _provision_rx = re.compile( + r"([a-zA-Z_]\w*(?:\.[a-zA-Z_]\w*)*)(?:\s*\(\s*([^)\s]+)\s*\))?$", + re.ASCII) + value = value.strip() + m = _provision_rx.match(value) + if not m: + raise ValueError("illegal provides specification: %r" % value) + ver = m.group(2) or None + if ver: + ver = distutils.version.StrictVersion(ver) + return m.group(1), ver diff --git a/venv/Lib/site-packages/setuptools/_imp.py b/venv/Lib/site-packages/setuptools/_imp.py new file mode 100644 index 00000000..47efd792 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_imp.py @@ -0,0 +1,82 @@ +""" +Re-implementation of find_module and get_frozen_object +from the deprecated imp module. +""" + +import os +import importlib.util +import importlib.machinery + +from .py34compat import module_from_spec + + +PY_SOURCE = 1 +PY_COMPILED = 2 +C_EXTENSION = 3 +C_BUILTIN = 6 +PY_FROZEN = 7 + + +def find_spec(module, paths): + finder = ( + importlib.machinery.PathFinder().find_spec + if isinstance(paths, list) else + importlib.util.find_spec + ) + return finder(module, paths) + + +def find_module(module, paths=None): + """Just like 'imp.find_module()', but with package support""" + spec = find_spec(module, paths) + if spec is None: + raise ImportError("Can't find %s" % module) + if not spec.has_location and hasattr(spec, 'submodule_search_locations'): + spec = importlib.util.spec_from_loader('__init__.py', spec.loader) + + kind = -1 + file = None + static = isinstance(spec.loader, type) + if spec.origin == 'frozen' or static and issubclass( + spec.loader, importlib.machinery.FrozenImporter): + kind = PY_FROZEN + path = None # imp compabilty + suffix = mode = '' # imp compatibility + elif spec.origin == 'built-in' or static and issubclass( + spec.loader, importlib.machinery.BuiltinImporter): + kind = C_BUILTIN + path = None # imp compabilty + suffix = mode = '' # imp compatibility + elif spec.has_location: + path = spec.origin + suffix = os.path.splitext(path)[1] + mode = 'r' if suffix in importlib.machinery.SOURCE_SUFFIXES else 'rb' + + if suffix in importlib.machinery.SOURCE_SUFFIXES: + kind = PY_SOURCE + elif suffix in importlib.machinery.BYTECODE_SUFFIXES: + kind = PY_COMPILED + elif suffix in importlib.machinery.EXTENSION_SUFFIXES: + kind = C_EXTENSION + + if kind in {PY_SOURCE, PY_COMPILED}: + file = open(path, mode) + else: + path = None + suffix = mode = '' + + return file, path, (suffix, mode, kind) + + +def get_frozen_object(module, paths=None): + spec = find_spec(module, paths) + if not spec: + raise ImportError("Can't find %s" % module) + return spec.loader.get_code(module) + + +def get_module(module, paths, info): + spec = find_spec(module, paths) + if not spec: + raise ImportError("Can't find %s" % module) + return module_from_spec(spec) diff --git a/venv/Lib/site-packages/setuptools/_vendor/__init__.py b/venv/Lib/site-packages/setuptools/_vendor/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/setuptools/_vendor/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_vendor/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..d672a114 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_vendor/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/__pycache__/ordered_set.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_vendor/__pycache__/ordered_set.cpython-36.pyc new file mode 100644 index 00000000..20acc0da Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_vendor/__pycache__/ordered_set.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/__pycache__/pyparsing.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_vendor/__pycache__/pyparsing.cpython-36.pyc new file mode 100644 index 00000000..d16fa3fb Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_vendor/__pycache__/pyparsing.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/more_itertools/__init__.py b/venv/Lib/site-packages/setuptools/_vendor/more_itertools/__init__.py new file mode 100644 index 00000000..19a169fc --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_vendor/more_itertools/__init__.py @@ -0,0 +1,4 @@ +from .more import * # noqa +from .recipes import * # noqa + +__version__ = '8.8.0' diff --git a/venv/Lib/site-packages/setuptools/_vendor/more_itertools/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_vendor/more_itertools/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..c9e3efb9 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_vendor/more_itertools/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/more_itertools/__pycache__/more.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_vendor/more_itertools/__pycache__/more.cpython-36.pyc new file mode 100644 index 00000000..7bbe8ec2 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_vendor/more_itertools/__pycache__/more.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/more_itertools/__pycache__/recipes.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_vendor/more_itertools/__pycache__/recipes.cpython-36.pyc new file mode 100644 index 00000000..775be8a0 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_vendor/more_itertools/__pycache__/recipes.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/more_itertools/more.py b/venv/Lib/site-packages/setuptools/_vendor/more_itertools/more.py new file mode 100644 index 00000000..0f7d282a --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_vendor/more_itertools/more.py @@ -0,0 +1,3825 @@ +import warnings + +from collections import Counter, defaultdict, deque, abc +from collections.abc import Sequence +from concurrent.futures import ThreadPoolExecutor +from functools import partial, reduce, wraps +from heapq import merge, heapify, heapreplace, heappop +from itertools import ( + chain, + compress, + count, + cycle, + dropwhile, + groupby, + islice, + repeat, + starmap, + takewhile, + tee, + zip_longest, +) +from math import exp, factorial, floor, log +from queue import Empty, Queue +from random import random, randrange, uniform +from operator import itemgetter, mul, sub, gt, lt +from sys import hexversion, maxsize +from time import monotonic + +from .recipes import ( + consume, + flatten, + pairwise, + powerset, + take, + unique_everseen, +) + +__all__ = [ + 'AbortThread', + 'adjacent', + 'always_iterable', + 'always_reversible', + 'bucket', + 'callback_iter', + 'chunked', + 'circular_shifts', + 'collapse', + 'collate', + 'consecutive_groups', + 'consumer', + 'countable', + 'count_cycle', + 'mark_ends', + 'difference', + 'distinct_combinations', + 'distinct_permutations', + 'distribute', + 'divide', + 'exactly_n', + 'filter_except', + 'first', + 'groupby_transform', + 'ilen', + 'interleave_longest', + 'interleave', + 'intersperse', + 'islice_extended', + 'iterate', + 'ichunked', + 'is_sorted', + 'last', + 'locate', + 'lstrip', + 'make_decorator', + 'map_except', + 'map_reduce', + 'nth_or_last', + 'nth_permutation', + 'nth_product', + 'numeric_range', + 'one', + 'only', + 'padded', + 'partitions', + 'set_partitions', + 'peekable', + 'repeat_last', + 'replace', + 'rlocate', + 'rstrip', + 'run_length', + 'sample', + 'seekable', + 'SequenceView', + 'side_effect', + 'sliced', + 'sort_together', + 'split_at', + 'split_after', + 'split_before', + 'split_when', + 'split_into', + 'spy', + 'stagger', + 'strip', + 'substrings', + 'substrings_indexes', + 'time_limited', + 'unique_to_each', + 'unzip', + 'windowed', + 'with_iter', + 'UnequalIterablesError', + 'zip_equal', + 'zip_offset', + 'windowed_complete', + 'all_unique', + 'value_chain', + 'product_index', + 'combination_index', + 'permutation_index', +] + +_marker = object() + + +def chunked(iterable, n, strict=False): + """Break *iterable* into lists of length *n*: + + >>> list(chunked([1, 2, 3, 4, 5, 6], 3)) + [[1, 2, 3], [4, 5, 6]] + + By the default, the last yielded list will have fewer than *n* elements + if the length of *iterable* is not divisible by *n*: + + >>> list(chunked([1, 2, 3, 4, 5, 6, 7, 8], 3)) + [[1, 2, 3], [4, 5, 6], [7, 8]] + + To use a fill-in value instead, see the :func:`grouper` recipe. + + If the length of *iterable* is not divisible by *n* and *strict* is + ``True``, then ``ValueError`` will be raised before the last + list is yielded. + + """ + iterator = iter(partial(take, n, iter(iterable)), []) + if strict: + + def ret(): + for chunk in iterator: + if len(chunk) != n: + raise ValueError('iterable is not divisible by n.') + yield chunk + + return iter(ret()) + else: + return iterator + + +def first(iterable, default=_marker): + """Return the first item of *iterable*, or *default* if *iterable* is + empty. + + >>> first([0, 1, 2, 3]) + 0 + >>> first([], 'some default') + 'some default' + + If *default* is not provided and there are no items in the iterable, + raise ``ValueError``. + + :func:`first` is useful when you have a generator of expensive-to-retrieve + values and want any arbitrary one. It is marginally shorter than + ``next(iter(iterable), default)``. + + """ + try: + return next(iter(iterable)) + except StopIteration as e: + if default is _marker: + raise ValueError( + 'first() was called on an empty iterable, and no ' + 'default value was provided.' + ) from e + return default + + +def last(iterable, default=_marker): + """Return the last item of *iterable*, or *default* if *iterable* is + empty. + + >>> last([0, 1, 2, 3]) + 3 + >>> last([], 'some default') + 'some default' + + If *default* is not provided and there are no items in the iterable, + raise ``ValueError``. + """ + try: + if isinstance(iterable, Sequence): + return iterable[-1] + # Work around https://bugs.python.org/issue38525 + elif hasattr(iterable, '__reversed__') and (hexversion != 0x030800F0): + return next(reversed(iterable)) + else: + return deque(iterable, maxlen=1)[-1] + except (IndexError, TypeError, StopIteration): + if default is _marker: + raise ValueError( + 'last() was called on an empty iterable, and no default was ' + 'provided.' + ) + return default + + +def nth_or_last(iterable, n, default=_marker): + """Return the nth or the last item of *iterable*, + or *default* if *iterable* is empty. + + >>> nth_or_last([0, 1, 2, 3], 2) + 2 + >>> nth_or_last([0, 1], 2) + 1 + >>> nth_or_last([], 0, 'some default') + 'some default' + + If *default* is not provided and there are no items in the iterable, + raise ``ValueError``. + """ + return last(islice(iterable, n + 1), default=default) + + +class peekable: + """Wrap an iterator to allow lookahead and prepending elements. + + Call :meth:`peek` on the result to get the value that will be returned + by :func:`next`. This won't advance the iterator: + + >>> p = peekable(['a', 'b']) + >>> p.peek() + 'a' + >>> next(p) + 'a' + + Pass :meth:`peek` a default value to return that instead of raising + ``StopIteration`` when the iterator is exhausted. + + >>> p = peekable([]) + >>> p.peek('hi') + 'hi' + + peekables also offer a :meth:`prepend` method, which "inserts" items + at the head of the iterable: + + >>> p = peekable([1, 2, 3]) + >>> p.prepend(10, 11, 12) + >>> next(p) + 10 + >>> p.peek() + 11 + >>> list(p) + [11, 12, 1, 2, 3] + + peekables can be indexed. Index 0 is the item that will be returned by + :func:`next`, index 1 is the item after that, and so on: + The values up to the given index will be cached. + + >>> p = peekable(['a', 'b', 'c', 'd']) + >>> p[0] + 'a' + >>> p[1] + 'b' + >>> next(p) + 'a' + + Negative indexes are supported, but be aware that they will cache the + remaining items in the source iterator, which may require significant + storage. + + To check whether a peekable is exhausted, check its truth value: + + >>> p = peekable(['a', 'b']) + >>> if p: # peekable has items + ... list(p) + ['a', 'b'] + >>> if not p: # peekable is exhausted + ... list(p) + [] + + """ + + def __init__(self, iterable): + self._it = iter(iterable) + self._cache = deque() + + def __iter__(self): + return self + + def __bool__(self): + try: + self.peek() + except StopIteration: + return False + return True + + def peek(self, default=_marker): + """Return the item that will be next returned from ``next()``. + + Return ``default`` if there are no items left. If ``default`` is not + provided, raise ``StopIteration``. + + """ + if not self._cache: + try: + self._cache.append(next(self._it)) + except StopIteration: + if default is _marker: + raise + return default + return self._cache[0] + + def prepend(self, *items): + """Stack up items to be the next ones returned from ``next()`` or + ``self.peek()``. The items will be returned in + first in, first out order:: + + >>> p = peekable([1, 2, 3]) + >>> p.prepend(10, 11, 12) + >>> next(p) + 10 + >>> list(p) + [11, 12, 1, 2, 3] + + It is possible, by prepending items, to "resurrect" a peekable that + previously raised ``StopIteration``. + + >>> p = peekable([]) + >>> next(p) + Traceback (most recent call last): + ... + StopIteration + >>> p.prepend(1) + >>> next(p) + 1 + >>> next(p) + Traceback (most recent call last): + ... + StopIteration + + """ + self._cache.extendleft(reversed(items)) + + def __next__(self): + if self._cache: + return self._cache.popleft() + + return next(self._it) + + def _get_slice(self, index): + # Normalize the slice's arguments + step = 1 if (index.step is None) else index.step + if step > 0: + start = 0 if (index.start is None) else index.start + stop = maxsize if (index.stop is None) else index.stop + elif step < 0: + start = -1 if (index.start is None) else index.start + stop = (-maxsize - 1) if (index.stop is None) else index.stop + else: + raise ValueError('slice step cannot be zero') + + # If either the start or stop index is negative, we'll need to cache + # the rest of the iterable in order to slice from the right side. + if (start < 0) or (stop < 0): + self._cache.extend(self._it) + # Otherwise we'll need to find the rightmost index and cache to that + # point. + else: + n = min(max(start, stop) + 1, maxsize) + cache_len = len(self._cache) + if n >= cache_len: + self._cache.extend(islice(self._it, n - cache_len)) + + return list(self._cache)[index] + + def __getitem__(self, index): + if isinstance(index, slice): + return self._get_slice(index) + + cache_len = len(self._cache) + if index < 0: + self._cache.extend(self._it) + elif index >= cache_len: + self._cache.extend(islice(self._it, index + 1 - cache_len)) + + return self._cache[index] + + +def collate(*iterables, **kwargs): + """Return a sorted merge of the items from each of several already-sorted + *iterables*. + + >>> list(collate('ACDZ', 'AZ', 'JKL')) + ['A', 'A', 'C', 'D', 'J', 'K', 'L', 'Z', 'Z'] + + Works lazily, keeping only the next value from each iterable in memory. Use + :func:`collate` to, for example, perform a n-way mergesort of items that + don't fit in memory. + + If a *key* function is specified, the iterables will be sorted according + to its result: + + >>> key = lambda s: int(s) # Sort by numeric value, not by string + >>> list(collate(['1', '10'], ['2', '11'], key=key)) + ['1', '2', '10', '11'] + + + If the *iterables* are sorted in descending order, set *reverse* to + ``True``: + + >>> list(collate([5, 3, 1], [4, 2, 0], reverse=True)) + [5, 4, 3, 2, 1, 0] + + If the elements of the passed-in iterables are out of order, you might get + unexpected results. + + On Python 3.5+, this function is an alias for :func:`heapq.merge`. + + """ + warnings.warn( + "collate is no longer part of more_itertools, use heapq.merge", + DeprecationWarning, + ) + return merge(*iterables, **kwargs) + + +def consumer(func): + """Decorator that automatically advances a PEP-342-style "reverse iterator" + to its first yield point so you don't have to call ``next()`` on it + manually. + + >>> @consumer + ... def tally(): + ... i = 0 + ... while True: + ... print('Thing number %s is %s.' % (i, (yield))) + ... i += 1 + ... + >>> t = tally() + >>> t.send('red') + Thing number 0 is red. + >>> t.send('fish') + Thing number 1 is fish. + + Without the decorator, you would have to call ``next(t)`` before + ``t.send()`` could be used. + + """ + + @wraps(func) + def wrapper(*args, **kwargs): + gen = func(*args, **kwargs) + next(gen) + return gen + + return wrapper + + +def ilen(iterable): + """Return the number of items in *iterable*. + + >>> ilen(x for x in range(1000000) if x % 3 == 0) + 333334 + + This consumes the iterable, so handle with care. + + """ + # This approach was selected because benchmarks showed it's likely the + # fastest of the known implementations at the time of writing. + # See GitHub tracker: #236, #230. + counter = count() + deque(zip(iterable, counter), maxlen=0) + return next(counter) + + +def iterate(func, start): + """Return ``start``, ``func(start)``, ``func(func(start))``, ... + + >>> from itertools import islice + >>> list(islice(iterate(lambda x: 2*x, 1), 10)) + [1, 2, 4, 8, 16, 32, 64, 128, 256, 512] + + """ + while True: + yield start + start = func(start) + + +def with_iter(context_manager): + """Wrap an iterable in a ``with`` statement, so it closes once exhausted. + + For example, this will close the file when the iterator is exhausted:: + + upper_lines = (line.upper() for line in with_iter(open('foo'))) + + Any context manager which returns an iterable is a candidate for + ``with_iter``. + + """ + with context_manager as iterable: + yield from iterable + + +def one(iterable, too_short=None, too_long=None): + """Return the first item from *iterable*, which is expected to contain only + that item. Raise an exception if *iterable* is empty or has more than one + item. + + :func:`one` is useful for ensuring that an iterable contains only one item. + For example, it can be used to retrieve the result of a database query + that is expected to return a single row. + + If *iterable* is empty, ``ValueError`` will be raised. You may specify a + different exception with the *too_short* keyword: + + >>> it = [] + >>> one(it) # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + ValueError: too many items in iterable (expected 1)' + >>> too_short = IndexError('too few items') + >>> one(it, too_short=too_short) # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + IndexError: too few items + + Similarly, if *iterable* contains more than one item, ``ValueError`` will + be raised. You may specify a different exception with the *too_long* + keyword: + + >>> it = ['too', 'many'] + >>> one(it) # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + ValueError: Expected exactly one item in iterable, but got 'too', + 'many', and perhaps more. + >>> too_long = RuntimeError + >>> one(it, too_long=too_long) # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + RuntimeError + + Note that :func:`one` attempts to advance *iterable* twice to ensure there + is only one item. See :func:`spy` or :func:`peekable` to check iterable + contents less destructively. + + """ + it = iter(iterable) + + try: + first_value = next(it) + except StopIteration as e: + raise ( + too_short or ValueError('too few items in iterable (expected 1)') + ) from e + + try: + second_value = next(it) + except StopIteration: + pass + else: + msg = ( + 'Expected exactly one item in iterable, but got {!r}, {!r}, ' + 'and perhaps more.'.format(first_value, second_value) + ) + raise too_long or ValueError(msg) + + return first_value + + +def distinct_permutations(iterable, r=None): + """Yield successive distinct permutations of the elements in *iterable*. + + >>> sorted(distinct_permutations([1, 0, 1])) + [(0, 1, 1), (1, 0, 1), (1, 1, 0)] + + Equivalent to ``set(permutations(iterable))``, except duplicates are not + generated and thrown away. For larger input sequences this is much more + efficient. + + Duplicate permutations arise when there are duplicated elements in the + input iterable. The number of items returned is + `n! / (x_1! * x_2! * ... * x_n!)`, where `n` is the total number of + items input, and each `x_i` is the count of a distinct item in the input + sequence. + + If *r* is given, only the *r*-length permutations are yielded. + + >>> sorted(distinct_permutations([1, 0, 1], r=2)) + [(0, 1), (1, 0), (1, 1)] + >>> sorted(distinct_permutations(range(3), r=2)) + [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)] + + """ + # Algorithm: https://w.wiki/Qai + def _full(A): + while True: + # Yield the permutation we have + yield tuple(A) + + # Find the largest index i such that A[i] < A[i + 1] + for i in range(size - 2, -1, -1): + if A[i] < A[i + 1]: + break + # If no such index exists, this permutation is the last one + else: + return + + # Find the largest index j greater than j such that A[i] < A[j] + for j in range(size - 1, i, -1): + if A[i] < A[j]: + break + + # Swap the value of A[i] with that of A[j], then reverse the + # sequence from A[i + 1] to form the new permutation + A[i], A[j] = A[j], A[i] + A[i + 1 :] = A[: i - size : -1] # A[i + 1:][::-1] + + # Algorithm: modified from the above + def _partial(A, r): + # Split A into the first r items and the last r items + head, tail = A[:r], A[r:] + right_head_indexes = range(r - 1, -1, -1) + left_tail_indexes = range(len(tail)) + + while True: + # Yield the permutation we have + yield tuple(head) + + # Starting from the right, find the first index of the head with + # value smaller than the maximum value of the tail - call it i. + pivot = tail[-1] + for i in right_head_indexes: + if head[i] < pivot: + break + pivot = head[i] + else: + return + + # Starting from the left, find the first value of the tail + # with a value greater than head[i] and swap. + for j in left_tail_indexes: + if tail[j] > head[i]: + head[i], tail[j] = tail[j], head[i] + break + # If we didn't find one, start from the right and find the first + # index of the head with a value greater than head[i] and swap. + else: + for j in right_head_indexes: + if head[j] > head[i]: + head[i], head[j] = head[j], head[i] + break + + # Reverse head[i + 1:] and swap it with tail[:r - (i + 1)] + tail += head[: i - r : -1] # head[i + 1:][::-1] + i += 1 + head[i:], tail[:] = tail[: r - i], tail[r - i :] + + items = sorted(iterable) + + size = len(items) + if r is None: + r = size + + if 0 < r <= size: + return _full(items) if (r == size) else _partial(items, r) + + return iter(() if r else ((),)) + + +def intersperse(e, iterable, n=1): + """Intersperse filler element *e* among the items in *iterable*, leaving + *n* items between each filler element. + + >>> list(intersperse('!', [1, 2, 3, 4, 5])) + [1, '!', 2, '!', 3, '!', 4, '!', 5] + + >>> list(intersperse(None, [1, 2, 3, 4, 5], n=2)) + [1, 2, None, 3, 4, None, 5] + + """ + if n == 0: + raise ValueError('n must be > 0') + elif n == 1: + # interleave(repeat(e), iterable) -> e, x_0, e, e, x_1, e, x_2... + # islice(..., 1, None) -> x_0, e, e, x_1, e, x_2... + return islice(interleave(repeat(e), iterable), 1, None) + else: + # interleave(filler, chunks) -> [e], [x_0, x_1], [e], [x_2, x_3]... + # islice(..., 1, None) -> [x_0, x_1], [e], [x_2, x_3]... + # flatten(...) -> x_0, x_1, e, x_2, x_3... + filler = repeat([e]) + chunks = chunked(iterable, n) + return flatten(islice(interleave(filler, chunks), 1, None)) + + +def unique_to_each(*iterables): + """Return the elements from each of the input iterables that aren't in the + other input iterables. + + For example, suppose you have a set of packages, each with a set of + dependencies:: + + {'pkg_1': {'A', 'B'}, 'pkg_2': {'B', 'C'}, 'pkg_3': {'B', 'D'}} + + If you remove one package, which dependencies can also be removed? + + If ``pkg_1`` is removed, then ``A`` is no longer necessary - it is not + associated with ``pkg_2`` or ``pkg_3``. Similarly, ``C`` is only needed for + ``pkg_2``, and ``D`` is only needed for ``pkg_3``:: + + >>> unique_to_each({'A', 'B'}, {'B', 'C'}, {'B', 'D'}) + [['A'], ['C'], ['D']] + + If there are duplicates in one input iterable that aren't in the others + they will be duplicated in the output. Input order is preserved:: + + >>> unique_to_each("mississippi", "missouri") + [['p', 'p'], ['o', 'u', 'r']] + + It is assumed that the elements of each iterable are hashable. + + """ + pool = [list(it) for it in iterables] + counts = Counter(chain.from_iterable(map(set, pool))) + uniques = {element for element in counts if counts[element] == 1} + return [list(filter(uniques.__contains__, it)) for it in pool] + + +def windowed(seq, n, fillvalue=None, step=1): + """Return a sliding window of width *n* over the given iterable. + + >>> all_windows = windowed([1, 2, 3, 4, 5], 3) + >>> list(all_windows) + [(1, 2, 3), (2, 3, 4), (3, 4, 5)] + + When the window is larger than the iterable, *fillvalue* is used in place + of missing values: + + >>> list(windowed([1, 2, 3], 4)) + [(1, 2, 3, None)] + + Each window will advance in increments of *step*: + + >>> list(windowed([1, 2, 3, 4, 5, 6], 3, fillvalue='!', step=2)) + [(1, 2, 3), (3, 4, 5), (5, 6, '!')] + + To slide into the iterable's items, use :func:`chain` to add filler items + to the left: + + >>> iterable = [1, 2, 3, 4] + >>> n = 3 + >>> padding = [None] * (n - 1) + >>> list(windowed(chain(padding, iterable), 3)) + [(None, None, 1), (None, 1, 2), (1, 2, 3), (2, 3, 4)] + """ + if n < 0: + raise ValueError('n must be >= 0') + if n == 0: + yield tuple() + return + if step < 1: + raise ValueError('step must be >= 1') + + window = deque(maxlen=n) + i = n + for _ in map(window.append, seq): + i -= 1 + if not i: + i = step + yield tuple(window) + + size = len(window) + if size < n: + yield tuple(chain(window, repeat(fillvalue, n - size))) + elif 0 < i < min(step, n): + window += (fillvalue,) * i + yield tuple(window) + + +def substrings(iterable): + """Yield all of the substrings of *iterable*. + + >>> [''.join(s) for s in substrings('more')] + ['m', 'o', 'r', 'e', 'mo', 'or', 're', 'mor', 'ore', 'more'] + + Note that non-string iterables can also be subdivided. + + >>> list(substrings([0, 1, 2])) + [(0,), (1,), (2,), (0, 1), (1, 2), (0, 1, 2)] + + """ + # The length-1 substrings + seq = [] + for item in iter(iterable): + seq.append(item) + yield (item,) + seq = tuple(seq) + item_count = len(seq) + + # And the rest + for n in range(2, item_count + 1): + for i in range(item_count - n + 1): + yield seq[i : i + n] + + +def substrings_indexes(seq, reverse=False): + """Yield all substrings and their positions in *seq* + + The items yielded will be a tuple of the form ``(substr, i, j)``, where + ``substr == seq[i:j]``. + + This function only works for iterables that support slicing, such as + ``str`` objects. + + >>> for item in substrings_indexes('more'): + ... print(item) + ('m', 0, 1) + ('o', 1, 2) + ('r', 2, 3) + ('e', 3, 4) + ('mo', 0, 2) + ('or', 1, 3) + ('re', 2, 4) + ('mor', 0, 3) + ('ore', 1, 4) + ('more', 0, 4) + + Set *reverse* to ``True`` to yield the same items in the opposite order. + + + """ + r = range(1, len(seq) + 1) + if reverse: + r = reversed(r) + return ( + (seq[i : i + L], i, i + L) for L in r for i in range(len(seq) - L + 1) + ) + + +class bucket: + """Wrap *iterable* and return an object that buckets it iterable into + child iterables based on a *key* function. + + >>> iterable = ['a1', 'b1', 'c1', 'a2', 'b2', 'c2', 'b3'] + >>> s = bucket(iterable, key=lambda x: x[0]) # Bucket by 1st character + >>> sorted(list(s)) # Get the keys + ['a', 'b', 'c'] + >>> a_iterable = s['a'] + >>> next(a_iterable) + 'a1' + >>> next(a_iterable) + 'a2' + >>> list(s['b']) + ['b1', 'b2', 'b3'] + + The original iterable will be advanced and its items will be cached until + they are used by the child iterables. This may require significant storage. + + By default, attempting to select a bucket to which no items belong will + exhaust the iterable and cache all values. + If you specify a *validator* function, selected buckets will instead be + checked against it. + + >>> from itertools import count + >>> it = count(1, 2) # Infinite sequence of odd numbers + >>> key = lambda x: x % 10 # Bucket by last digit + >>> validator = lambda x: x in {1, 3, 5, 7, 9} # Odd digits only + >>> s = bucket(it, key=key, validator=validator) + >>> 2 in s + False + >>> list(s[2]) + [] + + """ + + def __init__(self, iterable, key, validator=None): + self._it = iter(iterable) + self._key = key + self._cache = defaultdict(deque) + self._validator = validator or (lambda x: True) + + def __contains__(self, value): + if not self._validator(value): + return False + + try: + item = next(self[value]) + except StopIteration: + return False + else: + self._cache[value].appendleft(item) + + return True + + def _get_values(self, value): + """ + Helper to yield items from the parent iterator that match *value*. + Items that don't match are stored in the local cache as they + are encountered. + """ + while True: + # If we've cached some items that match the target value, emit + # the first one and evict it from the cache. + if self._cache[value]: + yield self._cache[value].popleft() + # Otherwise we need to advance the parent iterator to search for + # a matching item, caching the rest. + else: + while True: + try: + item = next(self._it) + except StopIteration: + return + item_value = self._key(item) + if item_value == value: + yield item + break + elif self._validator(item_value): + self._cache[item_value].append(item) + + def __iter__(self): + for item in self._it: + item_value = self._key(item) + if self._validator(item_value): + self._cache[item_value].append(item) + + yield from self._cache.keys() + + def __getitem__(self, value): + if not self._validator(value): + return iter(()) + + return self._get_values(value) + + +def spy(iterable, n=1): + """Return a 2-tuple with a list containing the first *n* elements of + *iterable*, and an iterator with the same items as *iterable*. + This allows you to "look ahead" at the items in the iterable without + advancing it. + + There is one item in the list by default: + + >>> iterable = 'abcdefg' + >>> head, iterable = spy(iterable) + >>> head + ['a'] + >>> list(iterable) + ['a', 'b', 'c', 'd', 'e', 'f', 'g'] + + You may use unpacking to retrieve items instead of lists: + + >>> (head,), iterable = spy('abcdefg') + >>> head + 'a' + >>> (first, second), iterable = spy('abcdefg', 2) + >>> first + 'a' + >>> second + 'b' + + The number of items requested can be larger than the number of items in + the iterable: + + >>> iterable = [1, 2, 3, 4, 5] + >>> head, iterable = spy(iterable, 10) + >>> head + [1, 2, 3, 4, 5] + >>> list(iterable) + [1, 2, 3, 4, 5] + + """ + it = iter(iterable) + head = take(n, it) + + return head.copy(), chain(head, it) + + +def interleave(*iterables): + """Return a new iterable yielding from each iterable in turn, + until the shortest is exhausted. + + >>> list(interleave([1, 2, 3], [4, 5], [6, 7, 8])) + [1, 4, 6, 2, 5, 7] + + For a version that doesn't terminate after the shortest iterable is + exhausted, see :func:`interleave_longest`. + + """ + return chain.from_iterable(zip(*iterables)) + + +def interleave_longest(*iterables): + """Return a new iterable yielding from each iterable in turn, + skipping any that are exhausted. + + >>> list(interleave_longest([1, 2, 3], [4, 5], [6, 7, 8])) + [1, 4, 6, 2, 5, 7, 3, 8] + + This function produces the same output as :func:`roundrobin`, but may + perform better for some inputs (in particular when the number of iterables + is large). + + """ + i = chain.from_iterable(zip_longest(*iterables, fillvalue=_marker)) + return (x for x in i if x is not _marker) + + +def collapse(iterable, base_type=None, levels=None): + """Flatten an iterable with multiple levels of nesting (e.g., a list of + lists of tuples) into non-iterable types. + + >>> iterable = [(1, 2), ([3, 4], [[5], [6]])] + >>> list(collapse(iterable)) + [1, 2, 3, 4, 5, 6] + + Binary and text strings are not considered iterable and + will not be collapsed. + + To avoid collapsing other types, specify *base_type*: + + >>> iterable = ['ab', ('cd', 'ef'), ['gh', 'ij']] + >>> list(collapse(iterable, base_type=tuple)) + ['ab', ('cd', 'ef'), 'gh', 'ij'] + + Specify *levels* to stop flattening after a certain level: + + >>> iterable = [('a', ['b']), ('c', ['d'])] + >>> list(collapse(iterable)) # Fully flattened + ['a', 'b', 'c', 'd'] + >>> list(collapse(iterable, levels=1)) # Only one level flattened + ['a', ['b'], 'c', ['d']] + + """ + + def walk(node, level): + if ( + ((levels is not None) and (level > levels)) + or isinstance(node, (str, bytes)) + or ((base_type is not None) and isinstance(node, base_type)) + ): + yield node + return + + try: + tree = iter(node) + except TypeError: + yield node + return + else: + for child in tree: + yield from walk(child, level + 1) + + yield from walk(iterable, 0) + + +def side_effect(func, iterable, chunk_size=None, before=None, after=None): + """Invoke *func* on each item in *iterable* (or on each *chunk_size* group + of items) before yielding the item. + + `func` must be a function that takes a single argument. Its return value + will be discarded. + + *before* and *after* are optional functions that take no arguments. They + will be executed before iteration starts and after it ends, respectively. + + `side_effect` can be used for logging, updating progress bars, or anything + that is not functionally "pure." + + Emitting a status message: + + >>> from more_itertools import consume + >>> func = lambda item: print('Received {}'.format(item)) + >>> consume(side_effect(func, range(2))) + Received 0 + Received 1 + + Operating on chunks of items: + + >>> pair_sums = [] + >>> func = lambda chunk: pair_sums.append(sum(chunk)) + >>> list(side_effect(func, [0, 1, 2, 3, 4, 5], 2)) + [0, 1, 2, 3, 4, 5] + >>> list(pair_sums) + [1, 5, 9] + + Writing to a file-like object: + + >>> from io import StringIO + >>> from more_itertools import consume + >>> f = StringIO() + >>> func = lambda x: print(x, file=f) + >>> before = lambda: print(u'HEADER', file=f) + >>> after = f.close + >>> it = [u'a', u'b', u'c'] + >>> consume(side_effect(func, it, before=before, after=after)) + >>> f.closed + True + + """ + try: + if before is not None: + before() + + if chunk_size is None: + for item in iterable: + func(item) + yield item + else: + for chunk in chunked(iterable, chunk_size): + func(chunk) + yield from chunk + finally: + if after is not None: + after() + + +def sliced(seq, n, strict=False): + """Yield slices of length *n* from the sequence *seq*. + + >>> list(sliced((1, 2, 3, 4, 5, 6), 3)) + [(1, 2, 3), (4, 5, 6)] + + By the default, the last yielded slice will have fewer than *n* elements + if the length of *seq* is not divisible by *n*: + + >>> list(sliced((1, 2, 3, 4, 5, 6, 7, 8), 3)) + [(1, 2, 3), (4, 5, 6), (7, 8)] + + If the length of *seq* is not divisible by *n* and *strict* is + ``True``, then ``ValueError`` will be raised before the last + slice is yielded. + + This function will only work for iterables that support slicing. + For non-sliceable iterables, see :func:`chunked`. + + """ + iterator = takewhile(len, (seq[i : i + n] for i in count(0, n))) + if strict: + + def ret(): + for _slice in iterator: + if len(_slice) != n: + raise ValueError("seq is not divisible by n.") + yield _slice + + return iter(ret()) + else: + return iterator + + +def split_at(iterable, pred, maxsplit=-1, keep_separator=False): + """Yield lists of items from *iterable*, where each list is delimited by + an item where callable *pred* returns ``True``. + + >>> list(split_at('abcdcba', lambda x: x == 'b')) + [['a'], ['c', 'd', 'c'], ['a']] + + >>> list(split_at(range(10), lambda n: n % 2 == 1)) + [[0], [2], [4], [6], [8], []] + + At most *maxsplit* splits are done. If *maxsplit* is not specified or -1, + then there is no limit on the number of splits: + + >>> list(split_at(range(10), lambda n: n % 2 == 1, maxsplit=2)) + [[0], [2], [4, 5, 6, 7, 8, 9]] + + By default, the delimiting items are not included in the output. + The include them, set *keep_separator* to ``True``. + + >>> list(split_at('abcdcba', lambda x: x == 'b', keep_separator=True)) + [['a'], ['b'], ['c', 'd', 'c'], ['b'], ['a']] + + """ + if maxsplit == 0: + yield list(iterable) + return + + buf = [] + it = iter(iterable) + for item in it: + if pred(item): + yield buf + if keep_separator: + yield [item] + if maxsplit == 1: + yield list(it) + return + buf = [] + maxsplit -= 1 + else: + buf.append(item) + yield buf + + +def split_before(iterable, pred, maxsplit=-1): + """Yield lists of items from *iterable*, where each list ends just before + an item for which callable *pred* returns ``True``: + + >>> list(split_before('OneTwo', lambda s: s.isupper())) + [['O', 'n', 'e'], ['T', 'w', 'o']] + + >>> list(split_before(range(10), lambda n: n % 3 == 0)) + [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9]] + + At most *maxsplit* splits are done. If *maxsplit* is not specified or -1, + then there is no limit on the number of splits: + + >>> list(split_before(range(10), lambda n: n % 3 == 0, maxsplit=2)) + [[0, 1, 2], [3, 4, 5], [6, 7, 8, 9]] + """ + if maxsplit == 0: + yield list(iterable) + return + + buf = [] + it = iter(iterable) + for item in it: + if pred(item) and buf: + yield buf + if maxsplit == 1: + yield [item] + list(it) + return + buf = [] + maxsplit -= 1 + buf.append(item) + if buf: + yield buf + + +def split_after(iterable, pred, maxsplit=-1): + """Yield lists of items from *iterable*, where each list ends with an + item where callable *pred* returns ``True``: + + >>> list(split_after('one1two2', lambda s: s.isdigit())) + [['o', 'n', 'e', '1'], ['t', 'w', 'o', '2']] + + >>> list(split_after(range(10), lambda n: n % 3 == 0)) + [[0], [1, 2, 3], [4, 5, 6], [7, 8, 9]] + + At most *maxsplit* splits are done. If *maxsplit* is not specified or -1, + then there is no limit on the number of splits: + + >>> list(split_after(range(10), lambda n: n % 3 == 0, maxsplit=2)) + [[0], [1, 2, 3], [4, 5, 6, 7, 8, 9]] + + """ + if maxsplit == 0: + yield list(iterable) + return + + buf = [] + it = iter(iterable) + for item in it: + buf.append(item) + if pred(item) and buf: + yield buf + if maxsplit == 1: + yield list(it) + return + buf = [] + maxsplit -= 1 + if buf: + yield buf + + +def split_when(iterable, pred, maxsplit=-1): + """Split *iterable* into pieces based on the output of *pred*. + *pred* should be a function that takes successive pairs of items and + returns ``True`` if the iterable should be split in between them. + + For example, to find runs of increasing numbers, split the iterable when + element ``i`` is larger than element ``i + 1``: + + >>> list(split_when([1, 2, 3, 3, 2, 5, 2, 4, 2], lambda x, y: x > y)) + [[1, 2, 3, 3], [2, 5], [2, 4], [2]] + + At most *maxsplit* splits are done. If *maxsplit* is not specified or -1, + then there is no limit on the number of splits: + + >>> list(split_when([1, 2, 3, 3, 2, 5, 2, 4, 2], + ... lambda x, y: x > y, maxsplit=2)) + [[1, 2, 3, 3], [2, 5], [2, 4, 2]] + + """ + if maxsplit == 0: + yield list(iterable) + return + + it = iter(iterable) + try: + cur_item = next(it) + except StopIteration: + return + + buf = [cur_item] + for next_item in it: + if pred(cur_item, next_item): + yield buf + if maxsplit == 1: + yield [next_item] + list(it) + return + buf = [] + maxsplit -= 1 + + buf.append(next_item) + cur_item = next_item + + yield buf + + +def split_into(iterable, sizes): + """Yield a list of sequential items from *iterable* of length 'n' for each + integer 'n' in *sizes*. + + >>> list(split_into([1,2,3,4,5,6], [1,2,3])) + [[1], [2, 3], [4, 5, 6]] + + If the sum of *sizes* is smaller than the length of *iterable*, then the + remaining items of *iterable* will not be returned. + + >>> list(split_into([1,2,3,4,5,6], [2,3])) + [[1, 2], [3, 4, 5]] + + If the sum of *sizes* is larger than the length of *iterable*, fewer items + will be returned in the iteration that overruns *iterable* and further + lists will be empty: + + >>> list(split_into([1,2,3,4], [1,2,3,4])) + [[1], [2, 3], [4], []] + + When a ``None`` object is encountered in *sizes*, the returned list will + contain items up to the end of *iterable* the same way that itertools.slice + does: + + >>> list(split_into([1,2,3,4,5,6,7,8,9,0], [2,3,None])) + [[1, 2], [3, 4, 5], [6, 7, 8, 9, 0]] + + :func:`split_into` can be useful for grouping a series of items where the + sizes of the groups are not uniform. An example would be where in a row + from a table, multiple columns represent elements of the same feature + (e.g. a point represented by x,y,z) but, the format is not the same for + all columns. + """ + # convert the iterable argument into an iterator so its contents can + # be consumed by islice in case it is a generator + it = iter(iterable) + + for size in sizes: + if size is None: + yield list(it) + return + else: + yield list(islice(it, size)) + + +def padded(iterable, fillvalue=None, n=None, next_multiple=False): + """Yield the elements from *iterable*, followed by *fillvalue*, such that + at least *n* items are emitted. + + >>> list(padded([1, 2, 3], '?', 5)) + [1, 2, 3, '?', '?'] + + If *next_multiple* is ``True``, *fillvalue* will be emitted until the + number of items emitted is a multiple of *n*:: + + >>> list(padded([1, 2, 3, 4], n=3, next_multiple=True)) + [1, 2, 3, 4, None, None] + + If *n* is ``None``, *fillvalue* will be emitted indefinitely. + + """ + it = iter(iterable) + if n is None: + yield from chain(it, repeat(fillvalue)) + elif n < 1: + raise ValueError('n must be at least 1') + else: + item_count = 0 + for item in it: + yield item + item_count += 1 + + remaining = (n - item_count) % n if next_multiple else n - item_count + for _ in range(remaining): + yield fillvalue + + +def repeat_last(iterable, default=None): + """After the *iterable* is exhausted, keep yielding its last element. + + >>> list(islice(repeat_last(range(3)), 5)) + [0, 1, 2, 2, 2] + + If the iterable is empty, yield *default* forever:: + + >>> list(islice(repeat_last(range(0), 42), 5)) + [42, 42, 42, 42, 42] + + """ + item = _marker + for item in iterable: + yield item + final = default if item is _marker else item + yield from repeat(final) + + +def distribute(n, iterable): + """Distribute the items from *iterable* among *n* smaller iterables. + + >>> group_1, group_2 = distribute(2, [1, 2, 3, 4, 5, 6]) + >>> list(group_1) + [1, 3, 5] + >>> list(group_2) + [2, 4, 6] + + If the length of *iterable* is not evenly divisible by *n*, then the + length of the returned iterables will not be identical: + + >>> children = distribute(3, [1, 2, 3, 4, 5, 6, 7]) + >>> [list(c) for c in children] + [[1, 4, 7], [2, 5], [3, 6]] + + If the length of *iterable* is smaller than *n*, then the last returned + iterables will be empty: + + >>> children = distribute(5, [1, 2, 3]) + >>> [list(c) for c in children] + [[1], [2], [3], [], []] + + This function uses :func:`itertools.tee` and may require significant + storage. If you need the order items in the smaller iterables to match the + original iterable, see :func:`divide`. + + """ + if n < 1: + raise ValueError('n must be at least 1') + + children = tee(iterable, n) + return [islice(it, index, None, n) for index, it in enumerate(children)] + + +def stagger(iterable, offsets=(-1, 0, 1), longest=False, fillvalue=None): + """Yield tuples whose elements are offset from *iterable*. + The amount by which the `i`-th item in each tuple is offset is given by + the `i`-th item in *offsets*. + + >>> list(stagger([0, 1, 2, 3])) + [(None, 0, 1), (0, 1, 2), (1, 2, 3)] + >>> list(stagger(range(8), offsets=(0, 2, 4))) + [(0, 2, 4), (1, 3, 5), (2, 4, 6), (3, 5, 7)] + + By default, the sequence will end when the final element of a tuple is the + last item in the iterable. To continue until the first element of a tuple + is the last item in the iterable, set *longest* to ``True``:: + + >>> list(stagger([0, 1, 2, 3], longest=True)) + [(None, 0, 1), (0, 1, 2), (1, 2, 3), (2, 3, None), (3, None, None)] + + By default, ``None`` will be used to replace offsets beyond the end of the + sequence. Specify *fillvalue* to use some other value. + + """ + children = tee(iterable, len(offsets)) + + return zip_offset( + *children, offsets=offsets, longest=longest, fillvalue=fillvalue + ) + + +class UnequalIterablesError(ValueError): + def __init__(self, details=None): + msg = 'Iterables have different lengths' + if details is not None: + msg += (': index 0 has length {}; index {} has length {}').format( + *details + ) + + super().__init__(msg) + + +def _zip_equal_generator(iterables): + for combo in zip_longest(*iterables, fillvalue=_marker): + for val in combo: + if val is _marker: + raise UnequalIterablesError() + yield combo + + +def zip_equal(*iterables): + """``zip`` the input *iterables* together, but raise + ``UnequalIterablesError`` if they aren't all the same length. + + >>> it_1 = range(3) + >>> it_2 = iter('abc') + >>> list(zip_equal(it_1, it_2)) + [(0, 'a'), (1, 'b'), (2, 'c')] + + >>> it_1 = range(3) + >>> it_2 = iter('abcd') + >>> list(zip_equal(it_1, it_2)) # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + more_itertools.more.UnequalIterablesError: Iterables have different + lengths + + """ + if hexversion >= 0x30A00A6: + warnings.warn( + ( + 'zip_equal will be removed in a future version of ' + 'more-itertools. Use the builtin zip function with ' + 'strict=True instead.' + ), + DeprecationWarning, + ) + # Check whether the iterables are all the same size. + try: + first_size = len(iterables[0]) + for i, it in enumerate(iterables[1:], 1): + size = len(it) + if size != first_size: + break + else: + # If we didn't break out, we can use the built-in zip. + return zip(*iterables) + + # If we did break out, there was a mismatch. + raise UnequalIterablesError(details=(first_size, i, size)) + # If any one of the iterables didn't have a length, start reading + # them until one runs out. + except TypeError: + return _zip_equal_generator(iterables) + + +def zip_offset(*iterables, offsets, longest=False, fillvalue=None): + """``zip`` the input *iterables* together, but offset the `i`-th iterable + by the `i`-th item in *offsets*. + + >>> list(zip_offset('0123', 'abcdef', offsets=(0, 1))) + [('0', 'b'), ('1', 'c'), ('2', 'd'), ('3', 'e')] + + This can be used as a lightweight alternative to SciPy or pandas to analyze + data sets in which some series have a lead or lag relationship. + + By default, the sequence will end when the shortest iterable is exhausted. + To continue until the longest iterable is exhausted, set *longest* to + ``True``. + + >>> list(zip_offset('0123', 'abcdef', offsets=(0, 1), longest=True)) + [('0', 'b'), ('1', 'c'), ('2', 'd'), ('3', 'e'), (None, 'f')] + + By default, ``None`` will be used to replace offsets beyond the end of the + sequence. Specify *fillvalue* to use some other value. + + """ + if len(iterables) != len(offsets): + raise ValueError("Number of iterables and offsets didn't match") + + staggered = [] + for it, n in zip(iterables, offsets): + if n < 0: + staggered.append(chain(repeat(fillvalue, -n), it)) + elif n > 0: + staggered.append(islice(it, n, None)) + else: + staggered.append(it) + + if longest: + return zip_longest(*staggered, fillvalue=fillvalue) + + return zip(*staggered) + + +def sort_together(iterables, key_list=(0,), key=None, reverse=False): + """Return the input iterables sorted together, with *key_list* as the + priority for sorting. All iterables are trimmed to the length of the + shortest one. + + This can be used like the sorting function in a spreadsheet. If each + iterable represents a column of data, the key list determines which + columns are used for sorting. + + By default, all iterables are sorted using the ``0``-th iterable:: + + >>> iterables = [(4, 3, 2, 1), ('a', 'b', 'c', 'd')] + >>> sort_together(iterables) + [(1, 2, 3, 4), ('d', 'c', 'b', 'a')] + + Set a different key list to sort according to another iterable. + Specifying multiple keys dictates how ties are broken:: + + >>> iterables = [(3, 1, 2), (0, 1, 0), ('c', 'b', 'a')] + >>> sort_together(iterables, key_list=(1, 2)) + [(2, 3, 1), (0, 0, 1), ('a', 'c', 'b')] + + To sort by a function of the elements of the iterable, pass a *key* + function. Its arguments are the elements of the iterables corresponding to + the key list:: + + >>> names = ('a', 'b', 'c') + >>> lengths = (1, 2, 3) + >>> widths = (5, 2, 1) + >>> def area(length, width): + ... return length * width + >>> sort_together([names, lengths, widths], key_list=(1, 2), key=area) + [('c', 'b', 'a'), (3, 2, 1), (1, 2, 5)] + + Set *reverse* to ``True`` to sort in descending order. + + >>> sort_together([(1, 2, 3), ('c', 'b', 'a')], reverse=True) + [(3, 2, 1), ('a', 'b', 'c')] + + """ + if key is None: + # if there is no key function, the key argument to sorted is an + # itemgetter + key_argument = itemgetter(*key_list) + else: + # if there is a key function, call it with the items at the offsets + # specified by the key function as arguments + key_list = list(key_list) + if len(key_list) == 1: + # if key_list contains a single item, pass the item at that offset + # as the only argument to the key function + key_offset = key_list[0] + key_argument = lambda zipped_items: key(zipped_items[key_offset]) + else: + # if key_list contains multiple items, use itemgetter to return a + # tuple of items, which we pass as *args to the key function + get_key_items = itemgetter(*key_list) + key_argument = lambda zipped_items: key( + *get_key_items(zipped_items) + ) + + return list( + zip(*sorted(zip(*iterables), key=key_argument, reverse=reverse)) + ) + + +def unzip(iterable): + """The inverse of :func:`zip`, this function disaggregates the elements + of the zipped *iterable*. + + The ``i``-th iterable contains the ``i``-th element from each element + of the zipped iterable. The first element is used to to determine the + length of the remaining elements. + + >>> iterable = [('a', 1), ('b', 2), ('c', 3), ('d', 4)] + >>> letters, numbers = unzip(iterable) + >>> list(letters) + ['a', 'b', 'c', 'd'] + >>> list(numbers) + [1, 2, 3, 4] + + This is similar to using ``zip(*iterable)``, but it avoids reading + *iterable* into memory. Note, however, that this function uses + :func:`itertools.tee` and thus may require significant storage. + + """ + head, iterable = spy(iter(iterable)) + if not head: + # empty iterable, e.g. zip([], [], []) + return () + # spy returns a one-length iterable as head + head = head[0] + iterables = tee(iterable, len(head)) + + def itemgetter(i): + def getter(obj): + try: + return obj[i] + except IndexError: + # basically if we have an iterable like + # iter([(1, 2, 3), (4, 5), (6,)]) + # the second unzipped iterable would fail at the third tuple + # since it would try to access tup[1] + # same with the third unzipped iterable and the second tuple + # to support these "improperly zipped" iterables, + # we create a custom itemgetter + # which just stops the unzipped iterables + # at first length mismatch + raise StopIteration + + return getter + + return tuple(map(itemgetter(i), it) for i, it in enumerate(iterables)) + + +def divide(n, iterable): + """Divide the elements from *iterable* into *n* parts, maintaining + order. + + >>> group_1, group_2 = divide(2, [1, 2, 3, 4, 5, 6]) + >>> list(group_1) + [1, 2, 3] + >>> list(group_2) + [4, 5, 6] + + If the length of *iterable* is not evenly divisible by *n*, then the + length of the returned iterables will not be identical: + + >>> children = divide(3, [1, 2, 3, 4, 5, 6, 7]) + >>> [list(c) for c in children] + [[1, 2, 3], [4, 5], [6, 7]] + + If the length of the iterable is smaller than n, then the last returned + iterables will be empty: + + >>> children = divide(5, [1, 2, 3]) + >>> [list(c) for c in children] + [[1], [2], [3], [], []] + + This function will exhaust the iterable before returning and may require + significant storage. If order is not important, see :func:`distribute`, + which does not first pull the iterable into memory. + + """ + if n < 1: + raise ValueError('n must be at least 1') + + try: + iterable[:0] + except TypeError: + seq = tuple(iterable) + else: + seq = iterable + + q, r = divmod(len(seq), n) + + ret = [] + stop = 0 + for i in range(1, n + 1): + start = stop + stop += q + 1 if i <= r else q + ret.append(iter(seq[start:stop])) + + return ret + + +def always_iterable(obj, base_type=(str, bytes)): + """If *obj* is iterable, return an iterator over its items:: + + >>> obj = (1, 2, 3) + >>> list(always_iterable(obj)) + [1, 2, 3] + + If *obj* is not iterable, return a one-item iterable containing *obj*:: + + >>> obj = 1 + >>> list(always_iterable(obj)) + [1] + + If *obj* is ``None``, return an empty iterable: + + >>> obj = None + >>> list(always_iterable(None)) + [] + + By default, binary and text strings are not considered iterable:: + + >>> obj = 'foo' + >>> list(always_iterable(obj)) + ['foo'] + + If *base_type* is set, objects for which ``isinstance(obj, base_type)`` + returns ``True`` won't be considered iterable. + + >>> obj = {'a': 1} + >>> list(always_iterable(obj)) # Iterate over the dict's keys + ['a'] + >>> list(always_iterable(obj, base_type=dict)) # Treat dicts as a unit + [{'a': 1}] + + Set *base_type* to ``None`` to avoid any special handling and treat objects + Python considers iterable as iterable: + + >>> obj = 'foo' + >>> list(always_iterable(obj, base_type=None)) + ['f', 'o', 'o'] + """ + if obj is None: + return iter(()) + + if (base_type is not None) and isinstance(obj, base_type): + return iter((obj,)) + + try: + return iter(obj) + except TypeError: + return iter((obj,)) + + +def adjacent(predicate, iterable, distance=1): + """Return an iterable over `(bool, item)` tuples where the `item` is + drawn from *iterable* and the `bool` indicates whether + that item satisfies the *predicate* or is adjacent to an item that does. + + For example, to find whether items are adjacent to a ``3``:: + + >>> list(adjacent(lambda x: x == 3, range(6))) + [(False, 0), (False, 1), (True, 2), (True, 3), (True, 4), (False, 5)] + + Set *distance* to change what counts as adjacent. For example, to find + whether items are two places away from a ``3``: + + >>> list(adjacent(lambda x: x == 3, range(6), distance=2)) + [(False, 0), (True, 1), (True, 2), (True, 3), (True, 4), (True, 5)] + + This is useful for contextualizing the results of a search function. + For example, a code comparison tool might want to identify lines that + have changed, but also surrounding lines to give the viewer of the diff + context. + + The predicate function will only be called once for each item in the + iterable. + + See also :func:`groupby_transform`, which can be used with this function + to group ranges of items with the same `bool` value. + + """ + # Allow distance=0 mainly for testing that it reproduces results with map() + if distance < 0: + raise ValueError('distance must be at least 0') + + i1, i2 = tee(iterable) + padding = [False] * distance + selected = chain(padding, map(predicate, i1), padding) + adjacent_to_selected = map(any, windowed(selected, 2 * distance + 1)) + return zip(adjacent_to_selected, i2) + + +def groupby_transform(iterable, keyfunc=None, valuefunc=None, reducefunc=None): + """An extension of :func:`itertools.groupby` that can apply transformations + to the grouped data. + + * *keyfunc* is a function computing a key value for each item in *iterable* + * *valuefunc* is a function that transforms the individual items from + *iterable* after grouping + * *reducefunc* is a function that transforms each group of items + + >>> iterable = 'aAAbBBcCC' + >>> keyfunc = lambda k: k.upper() + >>> valuefunc = lambda v: v.lower() + >>> reducefunc = lambda g: ''.join(g) + >>> list(groupby_transform(iterable, keyfunc, valuefunc, reducefunc)) + [('A', 'aaa'), ('B', 'bbb'), ('C', 'ccc')] + + Each optional argument defaults to an identity function if not specified. + + :func:`groupby_transform` is useful when grouping elements of an iterable + using a separate iterable as the key. To do this, :func:`zip` the iterables + and pass a *keyfunc* that extracts the first element and a *valuefunc* + that extracts the second element:: + + >>> from operator import itemgetter + >>> keys = [0, 0, 1, 1, 1, 2, 2, 2, 3] + >>> values = 'abcdefghi' + >>> iterable = zip(keys, values) + >>> grouper = groupby_transform(iterable, itemgetter(0), itemgetter(1)) + >>> [(k, ''.join(g)) for k, g in grouper] + [(0, 'ab'), (1, 'cde'), (2, 'fgh'), (3, 'i')] + + Note that the order of items in the iterable is significant. + Only adjacent items are grouped together, so if you don't want any + duplicate groups, you should sort the iterable by the key function. + + """ + ret = groupby(iterable, keyfunc) + if valuefunc: + ret = ((k, map(valuefunc, g)) for k, g in ret) + if reducefunc: + ret = ((k, reducefunc(g)) for k, g in ret) + + return ret + + +class numeric_range(abc.Sequence, abc.Hashable): + """An extension of the built-in ``range()`` function whose arguments can + be any orderable numeric type. + + With only *stop* specified, *start* defaults to ``0`` and *step* + defaults to ``1``. The output items will match the type of *stop*: + + >>> list(numeric_range(3.5)) + [0.0, 1.0, 2.0, 3.0] + + With only *start* and *stop* specified, *step* defaults to ``1``. The + output items will match the type of *start*: + + >>> from decimal import Decimal + >>> start = Decimal('2.1') + >>> stop = Decimal('5.1') + >>> list(numeric_range(start, stop)) + [Decimal('2.1'), Decimal('3.1'), Decimal('4.1')] + + With *start*, *stop*, and *step* specified the output items will match + the type of ``start + step``: + + >>> from fractions import Fraction + >>> start = Fraction(1, 2) # Start at 1/2 + >>> stop = Fraction(5, 2) # End at 5/2 + >>> step = Fraction(1, 2) # Count by 1/2 + >>> list(numeric_range(start, stop, step)) + [Fraction(1, 2), Fraction(1, 1), Fraction(3, 2), Fraction(2, 1)] + + If *step* is zero, ``ValueError`` is raised. Negative steps are supported: + + >>> list(numeric_range(3, -1, -1.0)) + [3.0, 2.0, 1.0, 0.0] + + Be aware of the limitations of floating point numbers; the representation + of the yielded numbers may be surprising. + + ``datetime.datetime`` objects can be used for *start* and *stop*, if *step* + is a ``datetime.timedelta`` object: + + >>> import datetime + >>> start = datetime.datetime(2019, 1, 1) + >>> stop = datetime.datetime(2019, 1, 3) + >>> step = datetime.timedelta(days=1) + >>> items = iter(numeric_range(start, stop, step)) + >>> next(items) + datetime.datetime(2019, 1, 1, 0, 0) + >>> next(items) + datetime.datetime(2019, 1, 2, 0, 0) + + """ + + _EMPTY_HASH = hash(range(0, 0)) + + def __init__(self, *args): + argc = len(args) + if argc == 1: + (self._stop,) = args + self._start = type(self._stop)(0) + self._step = type(self._stop - self._start)(1) + elif argc == 2: + self._start, self._stop = args + self._step = type(self._stop - self._start)(1) + elif argc == 3: + self._start, self._stop, self._step = args + elif argc == 0: + raise TypeError( + 'numeric_range expected at least ' + '1 argument, got {}'.format(argc) + ) + else: + raise TypeError( + 'numeric_range expected at most ' + '3 arguments, got {}'.format(argc) + ) + + self._zero = type(self._step)(0) + if self._step == self._zero: + raise ValueError('numeric_range() arg 3 must not be zero') + self._growing = self._step > self._zero + self._init_len() + + def __bool__(self): + if self._growing: + return self._start < self._stop + else: + return self._start > self._stop + + def __contains__(self, elem): + if self._growing: + if self._start <= elem < self._stop: + return (elem - self._start) % self._step == self._zero + else: + if self._start >= elem > self._stop: + return (self._start - elem) % (-self._step) == self._zero + + return False + + def __eq__(self, other): + if isinstance(other, numeric_range): + empty_self = not bool(self) + empty_other = not bool(other) + if empty_self or empty_other: + return empty_self and empty_other # True if both empty + else: + return ( + self._start == other._start + and self._step == other._step + and self._get_by_index(-1) == other._get_by_index(-1) + ) + else: + return False + + def __getitem__(self, key): + if isinstance(key, int): + return self._get_by_index(key) + elif isinstance(key, slice): + step = self._step if key.step is None else key.step * self._step + + if key.start is None or key.start <= -self._len: + start = self._start + elif key.start >= self._len: + start = self._stop + else: # -self._len < key.start < self._len + start = self._get_by_index(key.start) + + if key.stop is None or key.stop >= self._len: + stop = self._stop + elif key.stop <= -self._len: + stop = self._start + else: # -self._len < key.stop < self._len + stop = self._get_by_index(key.stop) + + return numeric_range(start, stop, step) + else: + raise TypeError( + 'numeric range indices must be ' + 'integers or slices, not {}'.format(type(key).__name__) + ) + + def __hash__(self): + if self: + return hash((self._start, self._get_by_index(-1), self._step)) + else: + return self._EMPTY_HASH + + def __iter__(self): + values = (self._start + (n * self._step) for n in count()) + if self._growing: + return takewhile(partial(gt, self._stop), values) + else: + return takewhile(partial(lt, self._stop), values) + + def __len__(self): + return self._len + + def _init_len(self): + if self._growing: + start = self._start + stop = self._stop + step = self._step + else: + start = self._stop + stop = self._start + step = -self._step + distance = stop - start + if distance <= self._zero: + self._len = 0 + else: # distance > 0 and step > 0: regular euclidean division + q, r = divmod(distance, step) + self._len = int(q) + int(r != self._zero) + + def __reduce__(self): + return numeric_range, (self._start, self._stop, self._step) + + def __repr__(self): + if self._step == 1: + return "numeric_range({}, {})".format( + repr(self._start), repr(self._stop) + ) + else: + return "numeric_range({}, {}, {})".format( + repr(self._start), repr(self._stop), repr(self._step) + ) + + def __reversed__(self): + return iter( + numeric_range( + self._get_by_index(-1), self._start - self._step, -self._step + ) + ) + + def count(self, value): + return int(value in self) + + def index(self, value): + if self._growing: + if self._start <= value < self._stop: + q, r = divmod(value - self._start, self._step) + if r == self._zero: + return int(q) + else: + if self._start >= value > self._stop: + q, r = divmod(self._start - value, -self._step) + if r == self._zero: + return int(q) + + raise ValueError("{} is not in numeric range".format(value)) + + def _get_by_index(self, i): + if i < 0: + i += self._len + if i < 0 or i >= self._len: + raise IndexError("numeric range object index out of range") + return self._start + i * self._step + + +def count_cycle(iterable, n=None): + """Cycle through the items from *iterable* up to *n* times, yielding + the number of completed cycles along with each item. If *n* is omitted the + process repeats indefinitely. + + >>> list(count_cycle('AB', 3)) + [(0, 'A'), (0, 'B'), (1, 'A'), (1, 'B'), (2, 'A'), (2, 'B')] + + """ + iterable = tuple(iterable) + if not iterable: + return iter(()) + counter = count() if n is None else range(n) + return ((i, item) for i in counter for item in iterable) + + +def mark_ends(iterable): + """Yield 3-tuples of the form ``(is_first, is_last, item)``. + + >>> list(mark_ends('ABC')) + [(True, False, 'A'), (False, False, 'B'), (False, True, 'C')] + + Use this when looping over an iterable to take special action on its first + and/or last items: + + >>> iterable = ['Header', 100, 200, 'Footer'] + >>> total = 0 + >>> for is_first, is_last, item in mark_ends(iterable): + ... if is_first: + ... continue # Skip the header + ... if is_last: + ... continue # Skip the footer + ... total += item + >>> print(total) + 300 + """ + it = iter(iterable) + + try: + b = next(it) + except StopIteration: + return + + try: + for i in count(): + a = b + b = next(it) + yield i == 0, False, a + + except StopIteration: + yield i == 0, True, a + + +def locate(iterable, pred=bool, window_size=None): + """Yield the index of each item in *iterable* for which *pred* returns + ``True``. + + *pred* defaults to :func:`bool`, which will select truthy items: + + >>> list(locate([0, 1, 1, 0, 1, 0, 0])) + [1, 2, 4] + + Set *pred* to a custom function to, e.g., find the indexes for a particular + item. + + >>> list(locate(['a', 'b', 'c', 'b'], lambda x: x == 'b')) + [1, 3] + + If *window_size* is given, then the *pred* function will be called with + that many items. This enables searching for sub-sequences: + + >>> iterable = [0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3] + >>> pred = lambda *args: args == (1, 2, 3) + >>> list(locate(iterable, pred=pred, window_size=3)) + [1, 5, 9] + + Use with :func:`seekable` to find indexes and then retrieve the associated + items: + + >>> from itertools import count + >>> from more_itertools import seekable + >>> source = (3 * n + 1 if (n % 2) else n // 2 for n in count()) + >>> it = seekable(source) + >>> pred = lambda x: x > 100 + >>> indexes = locate(it, pred=pred) + >>> i = next(indexes) + >>> it.seek(i) + >>> next(it) + 106 + + """ + if window_size is None: + return compress(count(), map(pred, iterable)) + + if window_size < 1: + raise ValueError('window size must be at least 1') + + it = windowed(iterable, window_size, fillvalue=_marker) + return compress(count(), starmap(pred, it)) + + +def lstrip(iterable, pred): + """Yield the items from *iterable*, but strip any from the beginning + for which *pred* returns ``True``. + + For example, to remove a set of items from the start of an iterable: + + >>> iterable = (None, False, None, 1, 2, None, 3, False, None) + >>> pred = lambda x: x in {None, False, ''} + >>> list(lstrip(iterable, pred)) + [1, 2, None, 3, False, None] + + This function is analogous to to :func:`str.lstrip`, and is essentially + an wrapper for :func:`itertools.dropwhile`. + + """ + return dropwhile(pred, iterable) + + +def rstrip(iterable, pred): + """Yield the items from *iterable*, but strip any from the end + for which *pred* returns ``True``. + + For example, to remove a set of items from the end of an iterable: + + >>> iterable = (None, False, None, 1, 2, None, 3, False, None) + >>> pred = lambda x: x in {None, False, ''} + >>> list(rstrip(iterable, pred)) + [None, False, None, 1, 2, None, 3] + + This function is analogous to :func:`str.rstrip`. + + """ + cache = [] + cache_append = cache.append + cache_clear = cache.clear + for x in iterable: + if pred(x): + cache_append(x) + else: + yield from cache + cache_clear() + yield x + + +def strip(iterable, pred): + """Yield the items from *iterable*, but strip any from the + beginning and end for which *pred* returns ``True``. + + For example, to remove a set of items from both ends of an iterable: + + >>> iterable = (None, False, None, 1, 2, None, 3, False, None) + >>> pred = lambda x: x in {None, False, ''} + >>> list(strip(iterable, pred)) + [1, 2, None, 3] + + This function is analogous to :func:`str.strip`. + + """ + return rstrip(lstrip(iterable, pred), pred) + + +class islice_extended: + """An extension of :func:`itertools.islice` that supports negative values + for *stop*, *start*, and *step*. + + >>> iterable = iter('abcdefgh') + >>> list(islice_extended(iterable, -4, -1)) + ['e', 'f', 'g'] + + Slices with negative values require some caching of *iterable*, but this + function takes care to minimize the amount of memory required. + + For example, you can use a negative step with an infinite iterator: + + >>> from itertools import count + >>> list(islice_extended(count(), 110, 99, -2)) + [110, 108, 106, 104, 102, 100] + + You can also use slice notation directly: + + >>> iterable = map(str, count()) + >>> it = islice_extended(iterable)[10:20:2] + >>> list(it) + ['10', '12', '14', '16', '18'] + + """ + + def __init__(self, iterable, *args): + it = iter(iterable) + if args: + self._iterable = _islice_helper(it, slice(*args)) + else: + self._iterable = it + + def __iter__(self): + return self + + def __next__(self): + return next(self._iterable) + + def __getitem__(self, key): + if isinstance(key, slice): + return islice_extended(_islice_helper(self._iterable, key)) + + raise TypeError('islice_extended.__getitem__ argument must be a slice') + + +def _islice_helper(it, s): + start = s.start + stop = s.stop + if s.step == 0: + raise ValueError('step argument must be a non-zero integer or None.') + step = s.step or 1 + + if step > 0: + start = 0 if (start is None) else start + + if start < 0: + # Consume all but the last -start items + cache = deque(enumerate(it, 1), maxlen=-start) + len_iter = cache[-1][0] if cache else 0 + + # Adjust start to be positive + i = max(len_iter + start, 0) + + # Adjust stop to be positive + if stop is None: + j = len_iter + elif stop >= 0: + j = min(stop, len_iter) + else: + j = max(len_iter + stop, 0) + + # Slice the cache + n = j - i + if n <= 0: + return + + for index, item in islice(cache, 0, n, step): + yield item + elif (stop is not None) and (stop < 0): + # Advance to the start position + next(islice(it, start, start), None) + + # When stop is negative, we have to carry -stop items while + # iterating + cache = deque(islice(it, -stop), maxlen=-stop) + + for index, item in enumerate(it): + cached_item = cache.popleft() + if index % step == 0: + yield cached_item + cache.append(item) + else: + # When both start and stop are positive we have the normal case + yield from islice(it, start, stop, step) + else: + start = -1 if (start is None) else start + + if (stop is not None) and (stop < 0): + # Consume all but the last items + n = -stop - 1 + cache = deque(enumerate(it, 1), maxlen=n) + len_iter = cache[-1][0] if cache else 0 + + # If start and stop are both negative they are comparable and + # we can just slice. Otherwise we can adjust start to be negative + # and then slice. + if start < 0: + i, j = start, stop + else: + i, j = min(start - len_iter, -1), None + + for index, item in list(cache)[i:j:step]: + yield item + else: + # Advance to the stop position + if stop is not None: + m = stop + 1 + next(islice(it, m, m), None) + + # stop is positive, so if start is negative they are not comparable + # and we need the rest of the items. + if start < 0: + i = start + n = None + # stop is None and start is positive, so we just need items up to + # the start index. + elif stop is None: + i = None + n = start + 1 + # Both stop and start are positive, so they are comparable. + else: + i = None + n = start - stop + if n <= 0: + return + + cache = list(islice(it, n)) + + yield from cache[i::step] + + +def always_reversible(iterable): + """An extension of :func:`reversed` that supports all iterables, not + just those which implement the ``Reversible`` or ``Sequence`` protocols. + + >>> print(*always_reversible(x for x in range(3))) + 2 1 0 + + If the iterable is already reversible, this function returns the + result of :func:`reversed()`. If the iterable is not reversible, + this function will cache the remaining items in the iterable and + yield them in reverse order, which may require significant storage. + """ + try: + return reversed(iterable) + except TypeError: + return reversed(list(iterable)) + + +def consecutive_groups(iterable, ordering=lambda x: x): + """Yield groups of consecutive items using :func:`itertools.groupby`. + The *ordering* function determines whether two items are adjacent by + returning their position. + + By default, the ordering function is the identity function. This is + suitable for finding runs of numbers: + + >>> iterable = [1, 10, 11, 12, 20, 30, 31, 32, 33, 40] + >>> for group in consecutive_groups(iterable): + ... print(list(group)) + [1] + [10, 11, 12] + [20] + [30, 31, 32, 33] + [40] + + For finding runs of adjacent letters, try using the :meth:`index` method + of a string of letters: + + >>> from string import ascii_lowercase + >>> iterable = 'abcdfgilmnop' + >>> ordering = ascii_lowercase.index + >>> for group in consecutive_groups(iterable, ordering): + ... print(list(group)) + ['a', 'b', 'c', 'd'] + ['f', 'g'] + ['i'] + ['l', 'm', 'n', 'o', 'p'] + + Each group of consecutive items is an iterator that shares it source with + *iterable*. When an an output group is advanced, the previous group is + no longer available unless its elements are copied (e.g., into a ``list``). + + >>> iterable = [1, 2, 11, 12, 21, 22] + >>> saved_groups = [] + >>> for group in consecutive_groups(iterable): + ... saved_groups.append(list(group)) # Copy group elements + >>> saved_groups + [[1, 2], [11, 12], [21, 22]] + + """ + for k, g in groupby( + enumerate(iterable), key=lambda x: x[0] - ordering(x[1]) + ): + yield map(itemgetter(1), g) + + +def difference(iterable, func=sub, *, initial=None): + """This function is the inverse of :func:`itertools.accumulate`. By default + it will compute the first difference of *iterable* using + :func:`operator.sub`: + + >>> from itertools import accumulate + >>> iterable = accumulate([0, 1, 2, 3, 4]) # produces 0, 1, 3, 6, 10 + >>> list(difference(iterable)) + [0, 1, 2, 3, 4] + + *func* defaults to :func:`operator.sub`, but other functions can be + specified. They will be applied as follows:: + + A, B, C, D, ... --> A, func(B, A), func(C, B), func(D, C), ... + + For example, to do progressive division: + + >>> iterable = [1, 2, 6, 24, 120] + >>> func = lambda x, y: x // y + >>> list(difference(iterable, func)) + [1, 2, 3, 4, 5] + + If the *initial* keyword is set, the first element will be skipped when + computing successive differences. + + >>> it = [10, 11, 13, 16] # from accumulate([1, 2, 3], initial=10) + >>> list(difference(it, initial=10)) + [1, 2, 3] + + """ + a, b = tee(iterable) + try: + first = [next(b)] + except StopIteration: + return iter([]) + + if initial is not None: + first = [] + + return chain(first, starmap(func, zip(b, a))) + + +class SequenceView(Sequence): + """Return a read-only view of the sequence object *target*. + + :class:`SequenceView` objects are analogous to Python's built-in + "dictionary view" types. They provide a dynamic view of a sequence's items, + meaning that when the sequence updates, so does the view. + + >>> seq = ['0', '1', '2'] + >>> view = SequenceView(seq) + >>> view + SequenceView(['0', '1', '2']) + >>> seq.append('3') + >>> view + SequenceView(['0', '1', '2', '3']) + + Sequence views support indexing, slicing, and length queries. They act + like the underlying sequence, except they don't allow assignment: + + >>> view[1] + '1' + >>> view[1:-1] + ['1', '2'] + >>> len(view) + 4 + + Sequence views are useful as an alternative to copying, as they don't + require (much) extra storage. + + """ + + def __init__(self, target): + if not isinstance(target, Sequence): + raise TypeError + self._target = target + + def __getitem__(self, index): + return self._target[index] + + def __len__(self): + return len(self._target) + + def __repr__(self): + return '{}({})'.format(self.__class__.__name__, repr(self._target)) + + +class seekable: + """Wrap an iterator to allow for seeking backward and forward. This + progressively caches the items in the source iterable so they can be + re-visited. + + Call :meth:`seek` with an index to seek to that position in the source + iterable. + + To "reset" an iterator, seek to ``0``: + + >>> from itertools import count + >>> it = seekable((str(n) for n in count())) + >>> next(it), next(it), next(it) + ('0', '1', '2') + >>> it.seek(0) + >>> next(it), next(it), next(it) + ('0', '1', '2') + >>> next(it) + '3' + + You can also seek forward: + + >>> it = seekable((str(n) for n in range(20))) + >>> it.seek(10) + >>> next(it) + '10' + >>> it.seek(20) # Seeking past the end of the source isn't a problem + >>> list(it) + [] + >>> it.seek(0) # Resetting works even after hitting the end + >>> next(it), next(it), next(it) + ('0', '1', '2') + + Call :meth:`peek` to look ahead one item without advancing the iterator: + + >>> it = seekable('1234') + >>> it.peek() + '1' + >>> list(it) + ['1', '2', '3', '4'] + >>> it.peek(default='empty') + 'empty' + + Before the iterator is at its end, calling :func:`bool` on it will return + ``True``. After it will return ``False``: + + >>> it = seekable('5678') + >>> bool(it) + True + >>> list(it) + ['5', '6', '7', '8'] + >>> bool(it) + False + + You may view the contents of the cache with the :meth:`elements` method. + That returns a :class:`SequenceView`, a view that updates automatically: + + >>> it = seekable((str(n) for n in range(10))) + >>> next(it), next(it), next(it) + ('0', '1', '2') + >>> elements = it.elements() + >>> elements + SequenceView(['0', '1', '2']) + >>> next(it) + '3' + >>> elements + SequenceView(['0', '1', '2', '3']) + + By default, the cache grows as the source iterable progresses, so beware of + wrapping very large or infinite iterables. Supply *maxlen* to limit the + size of the cache (this of course limits how far back you can seek). + + >>> from itertools import count + >>> it = seekable((str(n) for n in count()), maxlen=2) + >>> next(it), next(it), next(it), next(it) + ('0', '1', '2', '3') + >>> list(it.elements()) + ['2', '3'] + >>> it.seek(0) + >>> next(it), next(it), next(it), next(it) + ('2', '3', '4', '5') + >>> next(it) + '6' + + """ + + def __init__(self, iterable, maxlen=None): + self._source = iter(iterable) + if maxlen is None: + self._cache = [] + else: + self._cache = deque([], maxlen) + self._index = None + + def __iter__(self): + return self + + def __next__(self): + if self._index is not None: + try: + item = self._cache[self._index] + except IndexError: + self._index = None + else: + self._index += 1 + return item + + item = next(self._source) + self._cache.append(item) + return item + + def __bool__(self): + try: + self.peek() + except StopIteration: + return False + return True + + def peek(self, default=_marker): + try: + peeked = next(self) + except StopIteration: + if default is _marker: + raise + return default + if self._index is None: + self._index = len(self._cache) + self._index -= 1 + return peeked + + def elements(self): + return SequenceView(self._cache) + + def seek(self, index): + self._index = index + remainder = index - len(self._cache) + if remainder > 0: + consume(self, remainder) + + +class run_length: + """ + :func:`run_length.encode` compresses an iterable with run-length encoding. + It yields groups of repeated items with the count of how many times they + were repeated: + + >>> uncompressed = 'abbcccdddd' + >>> list(run_length.encode(uncompressed)) + [('a', 1), ('b', 2), ('c', 3), ('d', 4)] + + :func:`run_length.decode` decompresses an iterable that was previously + compressed with run-length encoding. It yields the items of the + decompressed iterable: + + >>> compressed = [('a', 1), ('b', 2), ('c', 3), ('d', 4)] + >>> list(run_length.decode(compressed)) + ['a', 'b', 'b', 'c', 'c', 'c', 'd', 'd', 'd', 'd'] + + """ + + @staticmethod + def encode(iterable): + return ((k, ilen(g)) for k, g in groupby(iterable)) + + @staticmethod + def decode(iterable): + return chain.from_iterable(repeat(k, n) for k, n in iterable) + + +def exactly_n(iterable, n, predicate=bool): + """Return ``True`` if exactly ``n`` items in the iterable are ``True`` + according to the *predicate* function. + + >>> exactly_n([True, True, False], 2) + True + >>> exactly_n([True, True, False], 1) + False + >>> exactly_n([0, 1, 2, 3, 4, 5], 3, lambda x: x < 3) + True + + The iterable will be advanced until ``n + 1`` truthy items are encountered, + so avoid calling it on infinite iterables. + + """ + return len(take(n + 1, filter(predicate, iterable))) == n + + +def circular_shifts(iterable): + """Return a list of circular shifts of *iterable*. + + >>> circular_shifts(range(4)) + [(0, 1, 2, 3), (1, 2, 3, 0), (2, 3, 0, 1), (3, 0, 1, 2)] + """ + lst = list(iterable) + return take(len(lst), windowed(cycle(lst), len(lst))) + + +def make_decorator(wrapping_func, result_index=0): + """Return a decorator version of *wrapping_func*, which is a function that + modifies an iterable. *result_index* is the position in that function's + signature where the iterable goes. + + This lets you use itertools on the "production end," i.e. at function + definition. This can augment what the function returns without changing the + function's code. + + For example, to produce a decorator version of :func:`chunked`: + + >>> from more_itertools import chunked + >>> chunker = make_decorator(chunked, result_index=0) + >>> @chunker(3) + ... def iter_range(n): + ... return iter(range(n)) + ... + >>> list(iter_range(9)) + [[0, 1, 2], [3, 4, 5], [6, 7, 8]] + + To only allow truthy items to be returned: + + >>> truth_serum = make_decorator(filter, result_index=1) + >>> @truth_serum(bool) + ... def boolean_test(): + ... return [0, 1, '', ' ', False, True] + ... + >>> list(boolean_test()) + [1, ' ', True] + + The :func:`peekable` and :func:`seekable` wrappers make for practical + decorators: + + >>> from more_itertools import peekable + >>> peekable_function = make_decorator(peekable) + >>> @peekable_function() + ... def str_range(*args): + ... return (str(x) for x in range(*args)) + ... + >>> it = str_range(1, 20, 2) + >>> next(it), next(it), next(it) + ('1', '3', '5') + >>> it.peek() + '7' + >>> next(it) + '7' + + """ + # See https://sites.google.com/site/bbayles/index/decorator_factory for + # notes on how this works. + def decorator(*wrapping_args, **wrapping_kwargs): + def outer_wrapper(f): + def inner_wrapper(*args, **kwargs): + result = f(*args, **kwargs) + wrapping_args_ = list(wrapping_args) + wrapping_args_.insert(result_index, result) + return wrapping_func(*wrapping_args_, **wrapping_kwargs) + + return inner_wrapper + + return outer_wrapper + + return decorator + + +def map_reduce(iterable, keyfunc, valuefunc=None, reducefunc=None): + """Return a dictionary that maps the items in *iterable* to categories + defined by *keyfunc*, transforms them with *valuefunc*, and + then summarizes them by category with *reducefunc*. + + *valuefunc* defaults to the identity function if it is unspecified. + If *reducefunc* is unspecified, no summarization takes place: + + >>> keyfunc = lambda x: x.upper() + >>> result = map_reduce('abbccc', keyfunc) + >>> sorted(result.items()) + [('A', ['a']), ('B', ['b', 'b']), ('C', ['c', 'c', 'c'])] + + Specifying *valuefunc* transforms the categorized items: + + >>> keyfunc = lambda x: x.upper() + >>> valuefunc = lambda x: 1 + >>> result = map_reduce('abbccc', keyfunc, valuefunc) + >>> sorted(result.items()) + [('A', [1]), ('B', [1, 1]), ('C', [1, 1, 1])] + + Specifying *reducefunc* summarizes the categorized items: + + >>> keyfunc = lambda x: x.upper() + >>> valuefunc = lambda x: 1 + >>> reducefunc = sum + >>> result = map_reduce('abbccc', keyfunc, valuefunc, reducefunc) + >>> sorted(result.items()) + [('A', 1), ('B', 2), ('C', 3)] + + You may want to filter the input iterable before applying the map/reduce + procedure: + + >>> all_items = range(30) + >>> items = [x for x in all_items if 10 <= x <= 20] # Filter + >>> keyfunc = lambda x: x % 2 # Evens map to 0; odds to 1 + >>> categories = map_reduce(items, keyfunc=keyfunc) + >>> sorted(categories.items()) + [(0, [10, 12, 14, 16, 18, 20]), (1, [11, 13, 15, 17, 19])] + >>> summaries = map_reduce(items, keyfunc=keyfunc, reducefunc=sum) + >>> sorted(summaries.items()) + [(0, 90), (1, 75)] + + Note that all items in the iterable are gathered into a list before the + summarization step, which may require significant storage. + + The returned object is a :obj:`collections.defaultdict` with the + ``default_factory`` set to ``None``, such that it behaves like a normal + dictionary. + + """ + valuefunc = (lambda x: x) if (valuefunc is None) else valuefunc + + ret = defaultdict(list) + for item in iterable: + key = keyfunc(item) + value = valuefunc(item) + ret[key].append(value) + + if reducefunc is not None: + for key, value_list in ret.items(): + ret[key] = reducefunc(value_list) + + ret.default_factory = None + return ret + + +def rlocate(iterable, pred=bool, window_size=None): + """Yield the index of each item in *iterable* for which *pred* returns + ``True``, starting from the right and moving left. + + *pred* defaults to :func:`bool`, which will select truthy items: + + >>> list(rlocate([0, 1, 1, 0, 1, 0, 0])) # Truthy at 1, 2, and 4 + [4, 2, 1] + + Set *pred* to a custom function to, e.g., find the indexes for a particular + item: + + >>> iterable = iter('abcb') + >>> pred = lambda x: x == 'b' + >>> list(rlocate(iterable, pred)) + [3, 1] + + If *window_size* is given, then the *pred* function will be called with + that many items. This enables searching for sub-sequences: + + >>> iterable = [0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3] + >>> pred = lambda *args: args == (1, 2, 3) + >>> list(rlocate(iterable, pred=pred, window_size=3)) + [9, 5, 1] + + Beware, this function won't return anything for infinite iterables. + If *iterable* is reversible, ``rlocate`` will reverse it and search from + the right. Otherwise, it will search from the left and return the results + in reverse order. + + See :func:`locate` to for other example applications. + + """ + if window_size is None: + try: + len_iter = len(iterable) + return (len_iter - i - 1 for i in locate(reversed(iterable), pred)) + except TypeError: + pass + + return reversed(list(locate(iterable, pred, window_size))) + + +def replace(iterable, pred, substitutes, count=None, window_size=1): + """Yield the items from *iterable*, replacing the items for which *pred* + returns ``True`` with the items from the iterable *substitutes*. + + >>> iterable = [1, 1, 0, 1, 1, 0, 1, 1] + >>> pred = lambda x: x == 0 + >>> substitutes = (2, 3) + >>> list(replace(iterable, pred, substitutes)) + [1, 1, 2, 3, 1, 1, 2, 3, 1, 1] + + If *count* is given, the number of replacements will be limited: + + >>> iterable = [1, 1, 0, 1, 1, 0, 1, 1, 0] + >>> pred = lambda x: x == 0 + >>> substitutes = [None] + >>> list(replace(iterable, pred, substitutes, count=2)) + [1, 1, None, 1, 1, None, 1, 1, 0] + + Use *window_size* to control the number of items passed as arguments to + *pred*. This allows for locating and replacing subsequences. + + >>> iterable = [0, 1, 2, 5, 0, 1, 2, 5] + >>> window_size = 3 + >>> pred = lambda *args: args == (0, 1, 2) # 3 items passed to pred + >>> substitutes = [3, 4] # Splice in these items + >>> list(replace(iterable, pred, substitutes, window_size=window_size)) + [3, 4, 5, 3, 4, 5] + + """ + if window_size < 1: + raise ValueError('window_size must be at least 1') + + # Save the substitutes iterable, since it's used more than once + substitutes = tuple(substitutes) + + # Add padding such that the number of windows matches the length of the + # iterable + it = chain(iterable, [_marker] * (window_size - 1)) + windows = windowed(it, window_size) + + n = 0 + for w in windows: + # If the current window matches our predicate (and we haven't hit + # our maximum number of replacements), splice in the substitutes + # and then consume the following windows that overlap with this one. + # For example, if the iterable is (0, 1, 2, 3, 4...) + # and the window size is 2, we have (0, 1), (1, 2), (2, 3)... + # If the predicate matches on (0, 1), we need to zap (0, 1) and (1, 2) + if pred(*w): + if (count is None) or (n < count): + n += 1 + yield from substitutes + consume(windows, window_size - 1) + continue + + # If there was no match (or we've reached the replacement limit), + # yield the first item from the window. + if w and (w[0] is not _marker): + yield w[0] + + +def partitions(iterable): + """Yield all possible order-preserving partitions of *iterable*. + + >>> iterable = 'abc' + >>> for part in partitions(iterable): + ... print([''.join(p) for p in part]) + ['abc'] + ['a', 'bc'] + ['ab', 'c'] + ['a', 'b', 'c'] + + This is unrelated to :func:`partition`. + + """ + sequence = list(iterable) + n = len(sequence) + for i in powerset(range(1, n)): + yield [sequence[i:j] for i, j in zip((0,) + i, i + (n,))] + + +def set_partitions(iterable, k=None): + """ + Yield the set partitions of *iterable* into *k* parts. Set partitions are + not order-preserving. + + >>> iterable = 'abc' + >>> for part in set_partitions(iterable, 2): + ... print([''.join(p) for p in part]) + ['a', 'bc'] + ['ab', 'c'] + ['b', 'ac'] + + + If *k* is not given, every set partition is generated. + + >>> iterable = 'abc' + >>> for part in set_partitions(iterable): + ... print([''.join(p) for p in part]) + ['abc'] + ['a', 'bc'] + ['ab', 'c'] + ['b', 'ac'] + ['a', 'b', 'c'] + + """ + L = list(iterable) + n = len(L) + if k is not None: + if k < 1: + raise ValueError( + "Can't partition in a negative or zero number of groups" + ) + elif k > n: + return + + def set_partitions_helper(L, k): + n = len(L) + if k == 1: + yield [L] + elif n == k: + yield [[s] for s in L] + else: + e, *M = L + for p in set_partitions_helper(M, k - 1): + yield [[e], *p] + for p in set_partitions_helper(M, k): + for i in range(len(p)): + yield p[:i] + [[e] + p[i]] + p[i + 1 :] + + if k is None: + for k in range(1, n + 1): + yield from set_partitions_helper(L, k) + else: + yield from set_partitions_helper(L, k) + + +class time_limited: + """ + Yield items from *iterable* until *limit_seconds* have passed. + If the time limit expires before all items have been yielded, the + ``timed_out`` parameter will be set to ``True``. + + >>> from time import sleep + >>> def generator(): + ... yield 1 + ... yield 2 + ... sleep(0.2) + ... yield 3 + >>> iterable = time_limited(0.1, generator()) + >>> list(iterable) + [1, 2] + >>> iterable.timed_out + True + + Note that the time is checked before each item is yielded, and iteration + stops if the time elapsed is greater than *limit_seconds*. If your time + limit is 1 second, but it takes 2 seconds to generate the first item from + the iterable, the function will run for 2 seconds and not yield anything. + + """ + + def __init__(self, limit_seconds, iterable): + if limit_seconds < 0: + raise ValueError('limit_seconds must be positive') + self.limit_seconds = limit_seconds + self._iterable = iter(iterable) + self._start_time = monotonic() + self.timed_out = False + + def __iter__(self): + return self + + def __next__(self): + item = next(self._iterable) + if monotonic() - self._start_time > self.limit_seconds: + self.timed_out = True + raise StopIteration + + return item + + +def only(iterable, default=None, too_long=None): + """If *iterable* has only one item, return it. + If it has zero items, return *default*. + If it has more than one item, raise the exception given by *too_long*, + which is ``ValueError`` by default. + + >>> only([], default='missing') + 'missing' + >>> only([1]) + 1 + >>> only([1, 2]) # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + ValueError: Expected exactly one item in iterable, but got 1, 2, + and perhaps more.' + >>> only([1, 2], too_long=TypeError) # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + TypeError + + Note that :func:`only` attempts to advance *iterable* twice to ensure there + is only one item. See :func:`spy` or :func:`peekable` to check + iterable contents less destructively. + """ + it = iter(iterable) + first_value = next(it, default) + + try: + second_value = next(it) + except StopIteration: + pass + else: + msg = ( + 'Expected exactly one item in iterable, but got {!r}, {!r}, ' + 'and perhaps more.'.format(first_value, second_value) + ) + raise too_long or ValueError(msg) + + return first_value + + +def ichunked(iterable, n): + """Break *iterable* into sub-iterables with *n* elements each. + :func:`ichunked` is like :func:`chunked`, but it yields iterables + instead of lists. + + If the sub-iterables are read in order, the elements of *iterable* + won't be stored in memory. + If they are read out of order, :func:`itertools.tee` is used to cache + elements as necessary. + + >>> from itertools import count + >>> all_chunks = ichunked(count(), 4) + >>> c_1, c_2, c_3 = next(all_chunks), next(all_chunks), next(all_chunks) + >>> list(c_2) # c_1's elements have been cached; c_3's haven't been + [4, 5, 6, 7] + >>> list(c_1) + [0, 1, 2, 3] + >>> list(c_3) + [8, 9, 10, 11] + + """ + source = iter(iterable) + + while True: + # Check to see whether we're at the end of the source iterable + item = next(source, _marker) + if item is _marker: + return + + # Clone the source and yield an n-length slice + source, it = tee(chain([item], source)) + yield islice(it, n) + + # Advance the source iterable + consume(source, n) + + +def distinct_combinations(iterable, r): + """Yield the distinct combinations of *r* items taken from *iterable*. + + >>> list(distinct_combinations([0, 0, 1], 2)) + [(0, 0), (0, 1)] + + Equivalent to ``set(combinations(iterable))``, except duplicates are not + generated and thrown away. For larger input sequences this is much more + efficient. + + """ + if r < 0: + raise ValueError('r must be non-negative') + elif r == 0: + yield () + return + pool = tuple(iterable) + generators = [unique_everseen(enumerate(pool), key=itemgetter(1))] + current_combo = [None] * r + level = 0 + while generators: + try: + cur_idx, p = next(generators[-1]) + except StopIteration: + generators.pop() + level -= 1 + continue + current_combo[level] = p + if level + 1 == r: + yield tuple(current_combo) + else: + generators.append( + unique_everseen( + enumerate(pool[cur_idx + 1 :], cur_idx + 1), + key=itemgetter(1), + ) + ) + level += 1 + + +def filter_except(validator, iterable, *exceptions): + """Yield the items from *iterable* for which the *validator* function does + not raise one of the specified *exceptions*. + + *validator* is called for each item in *iterable*. + It should be a function that accepts one argument and raises an exception + if that item is not valid. + + >>> iterable = ['1', '2', 'three', '4', None] + >>> list(filter_except(int, iterable, ValueError, TypeError)) + ['1', '2', '4'] + + If an exception other than one given by *exceptions* is raised by + *validator*, it is raised like normal. + """ + for item in iterable: + try: + validator(item) + except exceptions: + pass + else: + yield item + + +def map_except(function, iterable, *exceptions): + """Transform each item from *iterable* with *function* and yield the + result, unless *function* raises one of the specified *exceptions*. + + *function* is called to transform each item in *iterable*. + It should be a accept one argument. + + >>> iterable = ['1', '2', 'three', '4', None] + >>> list(map_except(int, iterable, ValueError, TypeError)) + [1, 2, 4] + + If an exception other than one given by *exceptions* is raised by + *function*, it is raised like normal. + """ + for item in iterable: + try: + yield function(item) + except exceptions: + pass + + +def _sample_unweighted(iterable, k): + # Implementation of "Algorithm L" from the 1994 paper by Kim-Hung Li: + # "Reservoir-Sampling Algorithms of Time Complexity O(n(1+log(N/n)))". + + # Fill up the reservoir (collection of samples) with the first `k` samples + reservoir = take(k, iterable) + + # Generate random number that's the largest in a sample of k U(0,1) numbers + # Largest order statistic: https://en.wikipedia.org/wiki/Order_statistic + W = exp(log(random()) / k) + + # The number of elements to skip before changing the reservoir is a random + # number with a geometric distribution. Sample it using random() and logs. + next_index = k + floor(log(random()) / log(1 - W)) + + for index, element in enumerate(iterable, k): + + if index == next_index: + reservoir[randrange(k)] = element + # The new W is the largest in a sample of k U(0, `old_W`) numbers + W *= exp(log(random()) / k) + next_index += floor(log(random()) / log(1 - W)) + 1 + + return reservoir + + +def _sample_weighted(iterable, k, weights): + # Implementation of "A-ExpJ" from the 2006 paper by Efraimidis et al. : + # "Weighted random sampling with a reservoir". + + # Log-transform for numerical stability for weights that are small/large + weight_keys = (log(random()) / weight for weight in weights) + + # Fill up the reservoir (collection of samples) with the first `k` + # weight-keys and elements, then heapify the list. + reservoir = take(k, zip(weight_keys, iterable)) + heapify(reservoir) + + # The number of jumps before changing the reservoir is a random variable + # with an exponential distribution. Sample it using random() and logs. + smallest_weight_key, _ = reservoir[0] + weights_to_skip = log(random()) / smallest_weight_key + + for weight, element in zip(weights, iterable): + if weight >= weights_to_skip: + # The notation here is consistent with the paper, but we store + # the weight-keys in log-space for better numerical stability. + smallest_weight_key, _ = reservoir[0] + t_w = exp(weight * smallest_weight_key) + r_2 = uniform(t_w, 1) # generate U(t_w, 1) + weight_key = log(r_2) / weight + heapreplace(reservoir, (weight_key, element)) + smallest_weight_key, _ = reservoir[0] + weights_to_skip = log(random()) / smallest_weight_key + else: + weights_to_skip -= weight + + # Equivalent to [element for weight_key, element in sorted(reservoir)] + return [heappop(reservoir)[1] for _ in range(k)] + + +def sample(iterable, k, weights=None): + """Return a *k*-length list of elements chosen (without replacement) + from the *iterable*. Like :func:`random.sample`, but works on iterables + of unknown length. + + >>> iterable = range(100) + >>> sample(iterable, 5) # doctest: +SKIP + [81, 60, 96, 16, 4] + + An iterable with *weights* may also be given: + + >>> iterable = range(100) + >>> weights = (i * i + 1 for i in range(100)) + >>> sampled = sample(iterable, 5, weights=weights) # doctest: +SKIP + [79, 67, 74, 66, 78] + + The algorithm can also be used to generate weighted random permutations. + The relative weight of each item determines the probability that it + appears late in the permutation. + + >>> data = "abcdefgh" + >>> weights = range(1, len(data) + 1) + >>> sample(data, k=len(data), weights=weights) # doctest: +SKIP + ['c', 'a', 'b', 'e', 'g', 'd', 'h', 'f'] + """ + if k == 0: + return [] + + iterable = iter(iterable) + if weights is None: + return _sample_unweighted(iterable, k) + else: + weights = iter(weights) + return _sample_weighted(iterable, k, weights) + + +def is_sorted(iterable, key=None, reverse=False): + """Returns ``True`` if the items of iterable are in sorted order, and + ``False`` otherwise. *key* and *reverse* have the same meaning that they do + in the built-in :func:`sorted` function. + + >>> is_sorted(['1', '2', '3', '4', '5'], key=int) + True + >>> is_sorted([5, 4, 3, 1, 2], reverse=True) + False + + The function returns ``False`` after encountering the first out-of-order + item. If there are no out-of-order items, the iterable is exhausted. + """ + + compare = lt if reverse else gt + it = iterable if (key is None) else map(key, iterable) + return not any(starmap(compare, pairwise(it))) + + +class AbortThread(BaseException): + pass + + +class callback_iter: + """Convert a function that uses callbacks to an iterator. + + Let *func* be a function that takes a `callback` keyword argument. + For example: + + >>> def func(callback=None): + ... for i, c in [(1, 'a'), (2, 'b'), (3, 'c')]: + ... if callback: + ... callback(i, c) + ... return 4 + + + Use ``with callback_iter(func)`` to get an iterator over the parameters + that are delivered to the callback. + + >>> with callback_iter(func) as it: + ... for args, kwargs in it: + ... print(args) + (1, 'a') + (2, 'b') + (3, 'c') + + The function will be called in a background thread. The ``done`` property + indicates whether it has completed execution. + + >>> it.done + True + + If it completes successfully, its return value will be available + in the ``result`` property. + + >>> it.result + 4 + + Notes: + + * If the function uses some keyword argument besides ``callback``, supply + *callback_kwd*. + * If it finished executing, but raised an exception, accessing the + ``result`` property will raise the same exception. + * If it hasn't finished executing, accessing the ``result`` + property from within the ``with`` block will raise ``RuntimeError``. + * If it hasn't finished executing, accessing the ``result`` property from + outside the ``with`` block will raise a + ``more_itertools.AbortThread`` exception. + * Provide *wait_seconds* to adjust how frequently the it is polled for + output. + + """ + + def __init__(self, func, callback_kwd='callback', wait_seconds=0.1): + self._func = func + self._callback_kwd = callback_kwd + self._aborted = False + self._future = None + self._wait_seconds = wait_seconds + self._executor = ThreadPoolExecutor(max_workers=1) + self._iterator = self._reader() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + self._aborted = True + self._executor.shutdown() + + def __iter__(self): + return self + + def __next__(self): + return next(self._iterator) + + @property + def done(self): + if self._future is None: + return False + return self._future.done() + + @property + def result(self): + if not self.done: + raise RuntimeError('Function has not yet completed') + + return self._future.result() + + def _reader(self): + q = Queue() + + def callback(*args, **kwargs): + if self._aborted: + raise AbortThread('canceled by user') + + q.put((args, kwargs)) + + self._future = self._executor.submit( + self._func, **{self._callback_kwd: callback} + ) + + while True: + try: + item = q.get(timeout=self._wait_seconds) + except Empty: + pass + else: + q.task_done() + yield item + + if self._future.done(): + break + + remaining = [] + while True: + try: + item = q.get_nowait() + except Empty: + break + else: + q.task_done() + remaining.append(item) + q.join() + yield from remaining + + +def windowed_complete(iterable, n): + """ + Yield ``(beginning, middle, end)`` tuples, where: + + * Each ``middle`` has *n* items from *iterable* + * Each ``beginning`` has the items before the ones in ``middle`` + * Each ``end`` has the items after the ones in ``middle`` + + >>> iterable = range(7) + >>> n = 3 + >>> for beginning, middle, end in windowed_complete(iterable, n): + ... print(beginning, middle, end) + () (0, 1, 2) (3, 4, 5, 6) + (0,) (1, 2, 3) (4, 5, 6) + (0, 1) (2, 3, 4) (5, 6) + (0, 1, 2) (3, 4, 5) (6,) + (0, 1, 2, 3) (4, 5, 6) () + + Note that *n* must be at least 0 and most equal to the length of + *iterable*. + + This function will exhaust the iterable and may require significant + storage. + """ + if n < 0: + raise ValueError('n must be >= 0') + + seq = tuple(iterable) + size = len(seq) + + if n > size: + raise ValueError('n must be <= len(seq)') + + for i in range(size - n + 1): + beginning = seq[:i] + middle = seq[i : i + n] + end = seq[i + n :] + yield beginning, middle, end + + +def all_unique(iterable, key=None): + """ + Returns ``True`` if all the elements of *iterable* are unique (no two + elements are equal). + + >>> all_unique('ABCB') + False + + If a *key* function is specified, it will be used to make comparisons. + + >>> all_unique('ABCb') + True + >>> all_unique('ABCb', str.lower) + False + + The function returns as soon as the first non-unique element is + encountered. Iterables with a mix of hashable and unhashable items can + be used, but the function will be slower for unhashable items. + """ + seenset = set() + seenset_add = seenset.add + seenlist = [] + seenlist_add = seenlist.append + for element in map(key, iterable) if key else iterable: + try: + if element in seenset: + return False + seenset_add(element) + except TypeError: + if element in seenlist: + return False + seenlist_add(element) + return True + + +def nth_product(index, *args): + """Equivalent to ``list(product(*args))[index]``. + + The products of *args* can be ordered lexicographically. + :func:`nth_product` computes the product at sort position *index* without + computing the previous products. + + >>> nth_product(8, range(2), range(2), range(2), range(2)) + (1, 0, 0, 0) + + ``IndexError`` will be raised if the given *index* is invalid. + """ + pools = list(map(tuple, reversed(args))) + ns = list(map(len, pools)) + + c = reduce(mul, ns) + + if index < 0: + index += c + + if not 0 <= index < c: + raise IndexError + + result = [] + for pool, n in zip(pools, ns): + result.append(pool[index % n]) + index //= n + + return tuple(reversed(result)) + + +def nth_permutation(iterable, r, index): + """Equivalent to ``list(permutations(iterable, r))[index]``` + + The subsequences of *iterable* that are of length *r* where order is + important can be ordered lexicographically. :func:`nth_permutation` + computes the subsequence at sort position *index* directly, without + computing the previous subsequences. + + >>> nth_permutation('ghijk', 2, 5) + ('h', 'i') + + ``ValueError`` will be raised If *r* is negative or greater than the length + of *iterable*. + ``IndexError`` will be raised if the given *index* is invalid. + """ + pool = list(iterable) + n = len(pool) + + if r is None or r == n: + r, c = n, factorial(n) + elif not 0 <= r < n: + raise ValueError + else: + c = factorial(n) // factorial(n - r) + + if index < 0: + index += c + + if not 0 <= index < c: + raise IndexError + + if c == 0: + return tuple() + + result = [0] * r + q = index * factorial(n) // c if r < n else index + for d in range(1, n + 1): + q, i = divmod(q, d) + if 0 <= n - d < r: + result[n - d] = i + if q == 0: + break + + return tuple(map(pool.pop, result)) + + +def value_chain(*args): + """Yield all arguments passed to the function in the same order in which + they were passed. If an argument itself is iterable then iterate over its + values. + + >>> list(value_chain(1, 2, 3, [4, 5, 6])) + [1, 2, 3, 4, 5, 6] + + Binary and text strings are not considered iterable and are emitted + as-is: + + >>> list(value_chain('12', '34', ['56', '78'])) + ['12', '34', '56', '78'] + + + Multiple levels of nesting are not flattened. + + """ + for value in args: + if isinstance(value, (str, bytes)): + yield value + continue + try: + yield from value + except TypeError: + yield value + + +def product_index(element, *args): + """Equivalent to ``list(product(*args)).index(element)`` + + The products of *args* can be ordered lexicographically. + :func:`product_index` computes the first index of *element* without + computing the previous products. + + >>> product_index([8, 2], range(10), range(5)) + 42 + + ``ValueError`` will be raised if the given *element* isn't in the product + of *args*. + """ + index = 0 + + for x, pool in zip_longest(element, args, fillvalue=_marker): + if x is _marker or pool is _marker: + raise ValueError('element is not a product of args') + + pool = tuple(pool) + index = index * len(pool) + pool.index(x) + + return index + + +def combination_index(element, iterable): + """Equivalent to ``list(combinations(iterable, r)).index(element)`` + + The subsequences of *iterable* that are of length *r* can be ordered + lexicographically. :func:`combination_index` computes the index of the + first *element*, without computing the previous combinations. + + >>> combination_index('adf', 'abcdefg') + 10 + + ``ValueError`` will be raised if the given *element* isn't one of the + combinations of *iterable*. + """ + element = enumerate(element) + k, y = next(element, (None, None)) + if k is None: + return 0 + + indexes = [] + pool = enumerate(iterable) + for n, x in pool: + if x == y: + indexes.append(n) + tmp, y = next(element, (None, None)) + if tmp is None: + break + else: + k = tmp + else: + raise ValueError('element is not a combination of iterable') + + n, _ = last(pool, default=(n, None)) + + # Python versiosn below 3.8 don't have math.comb + index = 1 + for i, j in enumerate(reversed(indexes), start=1): + j = n - j + if i <= j: + index += factorial(j) // (factorial(i) * factorial(j - i)) + + return factorial(n + 1) // (factorial(k + 1) * factorial(n - k)) - index + + +def permutation_index(element, iterable): + """Equivalent to ``list(permutations(iterable, r)).index(element)``` + + The subsequences of *iterable* that are of length *r* where order is + important can be ordered lexicographically. :func:`permutation_index` + computes the index of the first *element* directly, without computing + the previous permutations. + + >>> permutation_index([1, 3, 2], range(5)) + 19 + + ``ValueError`` will be raised if the given *element* isn't one of the + permutations of *iterable*. + """ + index = 0 + pool = list(iterable) + for i, x in zip(range(len(pool), -1, -1), element): + r = pool.index(x) + index = index * i + r + del pool[r] + + return index + + +class countable: + """Wrap *iterable* and keep a count of how many items have been consumed. + + The ``items_seen`` attribute starts at ``0`` and increments as the iterable + is consumed: + + >>> iterable = map(str, range(10)) + >>> it = countable(iterable) + >>> it.items_seen + 0 + >>> next(it), next(it) + ('0', '1') + >>> list(it) + ['2', '3', '4', '5', '6', '7', '8', '9'] + >>> it.items_seen + 10 + """ + + def __init__(self, iterable): + self._it = iter(iterable) + self.items_seen = 0 + + def __iter__(self): + return self + + def __next__(self): + item = next(self._it) + self.items_seen += 1 + + return item diff --git a/venv/Lib/site-packages/setuptools/_vendor/more_itertools/recipes.py b/venv/Lib/site-packages/setuptools/_vendor/more_itertools/recipes.py new file mode 100644 index 00000000..521abd7c --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_vendor/more_itertools/recipes.py @@ -0,0 +1,620 @@ +"""Imported from the recipes section of the itertools documentation. + +All functions taken from the recipes section of the itertools library docs +[1]_. +Some backward-compatible usability improvements have been made. + +.. [1] http://docs.python.org/library/itertools.html#recipes + +""" +import warnings +from collections import deque +from itertools import ( + chain, + combinations, + count, + cycle, + groupby, + islice, + repeat, + starmap, + tee, + zip_longest, +) +import operator +from random import randrange, sample, choice + +__all__ = [ + 'all_equal', + 'consume', + 'convolve', + 'dotproduct', + 'first_true', + 'flatten', + 'grouper', + 'iter_except', + 'ncycles', + 'nth', + 'nth_combination', + 'padnone', + 'pad_none', + 'pairwise', + 'partition', + 'powerset', + 'prepend', + 'quantify', + 'random_combination_with_replacement', + 'random_combination', + 'random_permutation', + 'random_product', + 'repeatfunc', + 'roundrobin', + 'tabulate', + 'tail', + 'take', + 'unique_everseen', + 'unique_justseen', +] + + +def take(n, iterable): + """Return first *n* items of the iterable as a list. + + >>> take(3, range(10)) + [0, 1, 2] + + If there are fewer than *n* items in the iterable, all of them are + returned. + + >>> take(10, range(3)) + [0, 1, 2] + + """ + return list(islice(iterable, n)) + + +def tabulate(function, start=0): + """Return an iterator over the results of ``func(start)``, + ``func(start + 1)``, ``func(start + 2)``... + + *func* should be a function that accepts one integer argument. + + If *start* is not specified it defaults to 0. It will be incremented each + time the iterator is advanced. + + >>> square = lambda x: x ** 2 + >>> iterator = tabulate(square, -3) + >>> take(4, iterator) + [9, 4, 1, 0] + + """ + return map(function, count(start)) + + +def tail(n, iterable): + """Return an iterator over the last *n* items of *iterable*. + + >>> t = tail(3, 'ABCDEFG') + >>> list(t) + ['E', 'F', 'G'] + + """ + return iter(deque(iterable, maxlen=n)) + + +def consume(iterator, n=None): + """Advance *iterable* by *n* steps. If *n* is ``None``, consume it + entirely. + + Efficiently exhausts an iterator without returning values. Defaults to + consuming the whole iterator, but an optional second argument may be + provided to limit consumption. + + >>> i = (x for x in range(10)) + >>> next(i) + 0 + >>> consume(i, 3) + >>> next(i) + 4 + >>> consume(i) + >>> next(i) + Traceback (most recent call last): + File "", line 1, in + StopIteration + + If the iterator has fewer items remaining than the provided limit, the + whole iterator will be consumed. + + >>> i = (x for x in range(3)) + >>> consume(i, 5) + >>> next(i) + Traceback (most recent call last): + File "", line 1, in + StopIteration + + """ + # Use functions that consume iterators at C speed. + if n is None: + # feed the entire iterator into a zero-length deque + deque(iterator, maxlen=0) + else: + # advance to the empty slice starting at position n + next(islice(iterator, n, n), None) + + +def nth(iterable, n, default=None): + """Returns the nth item or a default value. + + >>> l = range(10) + >>> nth(l, 3) + 3 + >>> nth(l, 20, "zebra") + 'zebra' + + """ + return next(islice(iterable, n, None), default) + + +def all_equal(iterable): + """ + Returns ``True`` if all the elements are equal to each other. + + >>> all_equal('aaaa') + True + >>> all_equal('aaab') + False + + """ + g = groupby(iterable) + return next(g, True) and not next(g, False) + + +def quantify(iterable, pred=bool): + """Return the how many times the predicate is true. + + >>> quantify([True, False, True]) + 2 + + """ + return sum(map(pred, iterable)) + + +def pad_none(iterable): + """Returns the sequence of elements and then returns ``None`` indefinitely. + + >>> take(5, pad_none(range(3))) + [0, 1, 2, None, None] + + Useful for emulating the behavior of the built-in :func:`map` function. + + See also :func:`padded`. + + """ + return chain(iterable, repeat(None)) + + +padnone = pad_none + + +def ncycles(iterable, n): + """Returns the sequence elements *n* times + + >>> list(ncycles(["a", "b"], 3)) + ['a', 'b', 'a', 'b', 'a', 'b'] + + """ + return chain.from_iterable(repeat(tuple(iterable), n)) + + +def dotproduct(vec1, vec2): + """Returns the dot product of the two iterables. + + >>> dotproduct([10, 10], [20, 20]) + 400 + + """ + return sum(map(operator.mul, vec1, vec2)) + + +def flatten(listOfLists): + """Return an iterator flattening one level of nesting in a list of lists. + + >>> list(flatten([[0, 1], [2, 3]])) + [0, 1, 2, 3] + + See also :func:`collapse`, which can flatten multiple levels of nesting. + + """ + return chain.from_iterable(listOfLists) + + +def repeatfunc(func, times=None, *args): + """Call *func* with *args* repeatedly, returning an iterable over the + results. + + If *times* is specified, the iterable will terminate after that many + repetitions: + + >>> from operator import add + >>> times = 4 + >>> args = 3, 5 + >>> list(repeatfunc(add, times, *args)) + [8, 8, 8, 8] + + If *times* is ``None`` the iterable will not terminate: + + >>> from random import randrange + >>> times = None + >>> args = 1, 11 + >>> take(6, repeatfunc(randrange, times, *args)) # doctest:+SKIP + [2, 4, 8, 1, 8, 4] + + """ + if times is None: + return starmap(func, repeat(args)) + return starmap(func, repeat(args, times)) + + +def _pairwise(iterable): + """Returns an iterator of paired items, overlapping, from the original + + >>> take(4, pairwise(count())) + [(0, 1), (1, 2), (2, 3), (3, 4)] + + On Python 3.10 and above, this is an alias for :func:`itertools.pairwise`. + + """ + a, b = tee(iterable) + next(b, None) + yield from zip(a, b) + + +try: + from itertools import pairwise as itertools_pairwise +except ImportError: + pairwise = _pairwise +else: + + def pairwise(iterable): + yield from itertools_pairwise(iterable) + + pairwise.__doc__ = _pairwise.__doc__ + + +def grouper(iterable, n, fillvalue=None): + """Collect data into fixed-length chunks or blocks. + + >>> list(grouper('ABCDEFG', 3, 'x')) + [('A', 'B', 'C'), ('D', 'E', 'F'), ('G', 'x', 'x')] + + """ + if isinstance(iterable, int): + warnings.warn( + "grouper expects iterable as first parameter", DeprecationWarning + ) + n, iterable = iterable, n + args = [iter(iterable)] * n + return zip_longest(fillvalue=fillvalue, *args) + + +def roundrobin(*iterables): + """Yields an item from each iterable, alternating between them. + + >>> list(roundrobin('ABC', 'D', 'EF')) + ['A', 'D', 'E', 'B', 'F', 'C'] + + This function produces the same output as :func:`interleave_longest`, but + may perform better for some inputs (in particular when the number of + iterables is small). + + """ + # Recipe credited to George Sakkis + pending = len(iterables) + nexts = cycle(iter(it).__next__ for it in iterables) + while pending: + try: + for next in nexts: + yield next() + except StopIteration: + pending -= 1 + nexts = cycle(islice(nexts, pending)) + + +def partition(pred, iterable): + """ + Returns a 2-tuple of iterables derived from the input iterable. + The first yields the items that have ``pred(item) == False``. + The second yields the items that have ``pred(item) == True``. + + >>> is_odd = lambda x: x % 2 != 0 + >>> iterable = range(10) + >>> even_items, odd_items = partition(is_odd, iterable) + >>> list(even_items), list(odd_items) + ([0, 2, 4, 6, 8], [1, 3, 5, 7, 9]) + + If *pred* is None, :func:`bool` is used. + + >>> iterable = [0, 1, False, True, '', ' '] + >>> false_items, true_items = partition(None, iterable) + >>> list(false_items), list(true_items) + ([0, False, ''], [1, True, ' ']) + + """ + if pred is None: + pred = bool + + evaluations = ((pred(x), x) for x in iterable) + t1, t2 = tee(evaluations) + return ( + (x for (cond, x) in t1 if not cond), + (x for (cond, x) in t2 if cond), + ) + + +def powerset(iterable): + """Yields all possible subsets of the iterable. + + >>> list(powerset([1, 2, 3])) + [(), (1,), (2,), (3,), (1, 2), (1, 3), (2, 3), (1, 2, 3)] + + :func:`powerset` will operate on iterables that aren't :class:`set` + instances, so repeated elements in the input will produce repeated elements + in the output. Use :func:`unique_everseen` on the input to avoid generating + duplicates: + + >>> seq = [1, 1, 0] + >>> list(powerset(seq)) + [(), (1,), (1,), (0,), (1, 1), (1, 0), (1, 0), (1, 1, 0)] + >>> from more_itertools import unique_everseen + >>> list(powerset(unique_everseen(seq))) + [(), (1,), (0,), (1, 0)] + + """ + s = list(iterable) + return chain.from_iterable(combinations(s, r) for r in range(len(s) + 1)) + + +def unique_everseen(iterable, key=None): + """ + Yield unique elements, preserving order. + + >>> list(unique_everseen('AAAABBBCCDAABBB')) + ['A', 'B', 'C', 'D'] + >>> list(unique_everseen('ABBCcAD', str.lower)) + ['A', 'B', 'C', 'D'] + + Sequences with a mix of hashable and unhashable items can be used. + The function will be slower (i.e., `O(n^2)`) for unhashable items. + + Remember that ``list`` objects are unhashable - you can use the *key* + parameter to transform the list to a tuple (which is hashable) to + avoid a slowdown. + + >>> iterable = ([1, 2], [2, 3], [1, 2]) + >>> list(unique_everseen(iterable)) # Slow + [[1, 2], [2, 3]] + >>> list(unique_everseen(iterable, key=tuple)) # Faster + [[1, 2], [2, 3]] + + Similary, you may want to convert unhashable ``set`` objects with + ``key=frozenset``. For ``dict`` objects, + ``key=lambda x: frozenset(x.items())`` can be used. + + """ + seenset = set() + seenset_add = seenset.add + seenlist = [] + seenlist_add = seenlist.append + use_key = key is not None + + for element in iterable: + k = key(element) if use_key else element + try: + if k not in seenset: + seenset_add(k) + yield element + except TypeError: + if k not in seenlist: + seenlist_add(k) + yield element + + +def unique_justseen(iterable, key=None): + """Yields elements in order, ignoring serial duplicates + + >>> list(unique_justseen('AAAABBBCCDAABBB')) + ['A', 'B', 'C', 'D', 'A', 'B'] + >>> list(unique_justseen('ABBCcAD', str.lower)) + ['A', 'B', 'C', 'A', 'D'] + + """ + return map(next, map(operator.itemgetter(1), groupby(iterable, key))) + + +def iter_except(func, exception, first=None): + """Yields results from a function repeatedly until an exception is raised. + + Converts a call-until-exception interface to an iterator interface. + Like ``iter(func, sentinel)``, but uses an exception instead of a sentinel + to end the loop. + + >>> l = [0, 1, 2] + >>> list(iter_except(l.pop, IndexError)) + [2, 1, 0] + + """ + try: + if first is not None: + yield first() + while 1: + yield func() + except exception: + pass + + +def first_true(iterable, default=None, pred=None): + """ + Returns the first true value in the iterable. + + If no true value is found, returns *default* + + If *pred* is not None, returns the first item for which + ``pred(item) == True`` . + + >>> first_true(range(10)) + 1 + >>> first_true(range(10), pred=lambda x: x > 5) + 6 + >>> first_true(range(10), default='missing', pred=lambda x: x > 9) + 'missing' + + """ + return next(filter(pred, iterable), default) + + +def random_product(*args, repeat=1): + """Draw an item at random from each of the input iterables. + + >>> random_product('abc', range(4), 'XYZ') # doctest:+SKIP + ('c', 3, 'Z') + + If *repeat* is provided as a keyword argument, that many items will be + drawn from each iterable. + + >>> random_product('abcd', range(4), repeat=2) # doctest:+SKIP + ('a', 2, 'd', 3) + + This equivalent to taking a random selection from + ``itertools.product(*args, **kwarg)``. + + """ + pools = [tuple(pool) for pool in args] * repeat + return tuple(choice(pool) for pool in pools) + + +def random_permutation(iterable, r=None): + """Return a random *r* length permutation of the elements in *iterable*. + + If *r* is not specified or is ``None``, then *r* defaults to the length of + *iterable*. + + >>> random_permutation(range(5)) # doctest:+SKIP + (3, 4, 0, 1, 2) + + This equivalent to taking a random selection from + ``itertools.permutations(iterable, r)``. + + """ + pool = tuple(iterable) + r = len(pool) if r is None else r + return tuple(sample(pool, r)) + + +def random_combination(iterable, r): + """Return a random *r* length subsequence of the elements in *iterable*. + + >>> random_combination(range(5), 3) # doctest:+SKIP + (2, 3, 4) + + This equivalent to taking a random selection from + ``itertools.combinations(iterable, r)``. + + """ + pool = tuple(iterable) + n = len(pool) + indices = sorted(sample(range(n), r)) + return tuple(pool[i] for i in indices) + + +def random_combination_with_replacement(iterable, r): + """Return a random *r* length subsequence of elements in *iterable*, + allowing individual elements to be repeated. + + >>> random_combination_with_replacement(range(3), 5) # doctest:+SKIP + (0, 0, 1, 2, 2) + + This equivalent to taking a random selection from + ``itertools.combinations_with_replacement(iterable, r)``. + + """ + pool = tuple(iterable) + n = len(pool) + indices = sorted(randrange(n) for i in range(r)) + return tuple(pool[i] for i in indices) + + +def nth_combination(iterable, r, index): + """Equivalent to ``list(combinations(iterable, r))[index]``. + + The subsequences of *iterable* that are of length *r* can be ordered + lexicographically. :func:`nth_combination` computes the subsequence at + sort position *index* directly, without computing the previous + subsequences. + + >>> nth_combination(range(5), 3, 5) + (0, 3, 4) + + ``ValueError`` will be raised If *r* is negative or greater than the length + of *iterable*. + ``IndexError`` will be raised if the given *index* is invalid. + """ + pool = tuple(iterable) + n = len(pool) + if (r < 0) or (r > n): + raise ValueError + + c = 1 + k = min(r, n - r) + for i in range(1, k + 1): + c = c * (n - k + i) // i + + if index < 0: + index += c + + if (index < 0) or (index >= c): + raise IndexError + + result = [] + while r: + c, n, r = c * r // n, n - 1, r - 1 + while index >= c: + index -= c + c, n = c * (n - r) // n, n - 1 + result.append(pool[-1 - n]) + + return tuple(result) + + +def prepend(value, iterator): + """Yield *value*, followed by the elements in *iterator*. + + >>> value = '0' + >>> iterator = ['1', '2', '3'] + >>> list(prepend(value, iterator)) + ['0', '1', '2', '3'] + + To prepend multiple values, see :func:`itertools.chain` + or :func:`value_chain`. + + """ + return chain([value], iterator) + + +def convolve(signal, kernel): + """Convolve the iterable *signal* with the iterable *kernel*. + + >>> signal = (1, 2, 3, 4, 5) + >>> kernel = [3, 2, 1] + >>> list(convolve(signal, kernel)) + [3, 8, 14, 20, 26, 14, 5] + + Note: the input arguments are not interchangeable, as the *kernel* + is immediately consumed and stored. + + """ + kernel = tuple(kernel)[::-1] + n = len(kernel) + window = deque([0], maxlen=n) * n + for x in chain(signal, repeat(0, n - 1)): + window.append(x) + yield sum(map(operator.mul, kernel, window)) diff --git a/venv/Lib/site-packages/setuptools/_vendor/ordered_set.py b/venv/Lib/site-packages/setuptools/_vendor/ordered_set.py new file mode 100644 index 00000000..14876000 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_vendor/ordered_set.py @@ -0,0 +1,488 @@ +""" +An OrderedSet is a custom MutableSet that remembers its order, so that every +entry has an index that can be looked up. + +Based on a recipe originally posted to ActiveState Recipes by Raymond Hettiger, +and released under the MIT license. +""" +import itertools as it +from collections import deque + +try: + # Python 3 + from collections.abc import MutableSet, Sequence +except ImportError: + # Python 2.7 + from collections import MutableSet, Sequence + +SLICE_ALL = slice(None) +__version__ = "3.1" + + +def is_iterable(obj): + """ + Are we being asked to look up a list of things, instead of a single thing? + We check for the `__iter__` attribute so that this can cover types that + don't have to be known by this module, such as NumPy arrays. + + Strings, however, should be considered as atomic values to look up, not + iterables. The same goes for tuples, since they are immutable and therefore + valid entries. + + We don't need to check for the Python 2 `unicode` type, because it doesn't + have an `__iter__` attribute anyway. + """ + return ( + hasattr(obj, "__iter__") + and not isinstance(obj, str) + and not isinstance(obj, tuple) + ) + + +class OrderedSet(MutableSet, Sequence): + """ + An OrderedSet is a custom MutableSet that remembers its order, so that + every entry has an index that can be looked up. + + Example: + >>> OrderedSet([1, 1, 2, 3, 2]) + OrderedSet([1, 2, 3]) + """ + + def __init__(self, iterable=None): + self.items = [] + self.map = {} + if iterable is not None: + self |= iterable + + def __len__(self): + """ + Returns the number of unique elements in the ordered set + + Example: + >>> len(OrderedSet([])) + 0 + >>> len(OrderedSet([1, 2])) + 2 + """ + return len(self.items) + + def __getitem__(self, index): + """ + Get the item at a given index. + + If `index` is a slice, you will get back that slice of items, as a + new OrderedSet. + + If `index` is a list or a similar iterable, you'll get a list of + items corresponding to those indices. This is similar to NumPy's + "fancy indexing". The result is not an OrderedSet because you may ask + for duplicate indices, and the number of elements returned should be + the number of elements asked for. + + Example: + >>> oset = OrderedSet([1, 2, 3]) + >>> oset[1] + 2 + """ + if isinstance(index, slice) and index == SLICE_ALL: + return self.copy() + elif is_iterable(index): + return [self.items[i] for i in index] + elif hasattr(index, "__index__") or isinstance(index, slice): + result = self.items[index] + if isinstance(result, list): + return self.__class__(result) + else: + return result + else: + raise TypeError("Don't know how to index an OrderedSet by %r" % index) + + def copy(self): + """ + Return a shallow copy of this object. + + Example: + >>> this = OrderedSet([1, 2, 3]) + >>> other = this.copy() + >>> this == other + True + >>> this is other + False + """ + return self.__class__(self) + + def __getstate__(self): + if len(self) == 0: + # The state can't be an empty list. + # We need to return a truthy value, or else __setstate__ won't be run. + # + # This could have been done more gracefully by always putting the state + # in a tuple, but this way is backwards- and forwards- compatible with + # previous versions of OrderedSet. + return (None,) + else: + return list(self) + + def __setstate__(self, state): + if state == (None,): + self.__init__([]) + else: + self.__init__(state) + + def __contains__(self, key): + """ + Test if the item is in this ordered set + + Example: + >>> 1 in OrderedSet([1, 3, 2]) + True + >>> 5 in OrderedSet([1, 3, 2]) + False + """ + return key in self.map + + def add(self, key): + """ + Add `key` as an item to this OrderedSet, then return its index. + + If `key` is already in the OrderedSet, return the index it already + had. + + Example: + >>> oset = OrderedSet() + >>> oset.append(3) + 0 + >>> print(oset) + OrderedSet([3]) + """ + if key not in self.map: + self.map[key] = len(self.items) + self.items.append(key) + return self.map[key] + + append = add + + def update(self, sequence): + """ + Update the set with the given iterable sequence, then return the index + of the last element inserted. + + Example: + >>> oset = OrderedSet([1, 2, 3]) + >>> oset.update([3, 1, 5, 1, 4]) + 4 + >>> print(oset) + OrderedSet([1, 2, 3, 5, 4]) + """ + item_index = None + try: + for item in sequence: + item_index = self.add(item) + except TypeError: + raise ValueError( + "Argument needs to be an iterable, got %s" % type(sequence) + ) + return item_index + + def index(self, key): + """ + Get the index of a given entry, raising an IndexError if it's not + present. + + `key` can be an iterable of entries that is not a string, in which case + this returns a list of indices. + + Example: + >>> oset = OrderedSet([1, 2, 3]) + >>> oset.index(2) + 1 + """ + if is_iterable(key): + return [self.index(subkey) for subkey in key] + return self.map[key] + + # Provide some compatibility with pd.Index + get_loc = index + get_indexer = index + + def pop(self): + """ + Remove and return the last element from the set. + + Raises KeyError if the set is empty. + + Example: + >>> oset = OrderedSet([1, 2, 3]) + >>> oset.pop() + 3 + """ + if not self.items: + raise KeyError("Set is empty") + + elem = self.items[-1] + del self.items[-1] + del self.map[elem] + return elem + + def discard(self, key): + """ + Remove an element. Do not raise an exception if absent. + + The MutableSet mixin uses this to implement the .remove() method, which + *does* raise an error when asked to remove a non-existent item. + + Example: + >>> oset = OrderedSet([1, 2, 3]) + >>> oset.discard(2) + >>> print(oset) + OrderedSet([1, 3]) + >>> oset.discard(2) + >>> print(oset) + OrderedSet([1, 3]) + """ + if key in self: + i = self.map[key] + del self.items[i] + del self.map[key] + for k, v in self.map.items(): + if v >= i: + self.map[k] = v - 1 + + def clear(self): + """ + Remove all items from this OrderedSet. + """ + del self.items[:] + self.map.clear() + + def __iter__(self): + """ + Example: + >>> list(iter(OrderedSet([1, 2, 3]))) + [1, 2, 3] + """ + return iter(self.items) + + def __reversed__(self): + """ + Example: + >>> list(reversed(OrderedSet([1, 2, 3]))) + [3, 2, 1] + """ + return reversed(self.items) + + def __repr__(self): + if not self: + return "%s()" % (self.__class__.__name__,) + return "%s(%r)" % (self.__class__.__name__, list(self)) + + def __eq__(self, other): + """ + Returns true if the containers have the same items. If `other` is a + Sequence, then order is checked, otherwise it is ignored. + + Example: + >>> oset = OrderedSet([1, 3, 2]) + >>> oset == [1, 3, 2] + True + >>> oset == [1, 2, 3] + False + >>> oset == [2, 3] + False + >>> oset == OrderedSet([3, 2, 1]) + False + """ + # In Python 2 deque is not a Sequence, so treat it as one for + # consistent behavior with Python 3. + if isinstance(other, (Sequence, deque)): + # Check that this OrderedSet contains the same elements, in the + # same order, as the other object. + return list(self) == list(other) + try: + other_as_set = set(other) + except TypeError: + # If `other` can't be converted into a set, it's not equal. + return False + else: + return set(self) == other_as_set + + def union(self, *sets): + """ + Combines all unique items. + Each items order is defined by its first appearance. + + Example: + >>> oset = OrderedSet.union(OrderedSet([3, 1, 4, 1, 5]), [1, 3], [2, 0]) + >>> print(oset) + OrderedSet([3, 1, 4, 5, 2, 0]) + >>> oset.union([8, 9]) + OrderedSet([3, 1, 4, 5, 2, 0, 8, 9]) + >>> oset | {10} + OrderedSet([3, 1, 4, 5, 2, 0, 10]) + """ + cls = self.__class__ if isinstance(self, OrderedSet) else OrderedSet + containers = map(list, it.chain([self], sets)) + items = it.chain.from_iterable(containers) + return cls(items) + + def __and__(self, other): + # the parent implementation of this is backwards + return self.intersection(other) + + def intersection(self, *sets): + """ + Returns elements in common between all sets. Order is defined only + by the first set. + + Example: + >>> oset = OrderedSet.intersection(OrderedSet([0, 1, 2, 3]), [1, 2, 3]) + >>> print(oset) + OrderedSet([1, 2, 3]) + >>> oset.intersection([2, 4, 5], [1, 2, 3, 4]) + OrderedSet([2]) + >>> oset.intersection() + OrderedSet([1, 2, 3]) + """ + cls = self.__class__ if isinstance(self, OrderedSet) else OrderedSet + if sets: + common = set.intersection(*map(set, sets)) + items = (item for item in self if item in common) + else: + items = self + return cls(items) + + def difference(self, *sets): + """ + Returns all elements that are in this set but not the others. + + Example: + >>> OrderedSet([1, 2, 3]).difference(OrderedSet([2])) + OrderedSet([1, 3]) + >>> OrderedSet([1, 2, 3]).difference(OrderedSet([2]), OrderedSet([3])) + OrderedSet([1]) + >>> OrderedSet([1, 2, 3]) - OrderedSet([2]) + OrderedSet([1, 3]) + >>> OrderedSet([1, 2, 3]).difference() + OrderedSet([1, 2, 3]) + """ + cls = self.__class__ + if sets: + other = set.union(*map(set, sets)) + items = (item for item in self if item not in other) + else: + items = self + return cls(items) + + def issubset(self, other): + """ + Report whether another set contains this set. + + Example: + >>> OrderedSet([1, 2, 3]).issubset({1, 2}) + False + >>> OrderedSet([1, 2, 3]).issubset({1, 2, 3, 4}) + True + >>> OrderedSet([1, 2, 3]).issubset({1, 4, 3, 5}) + False + """ + if len(self) > len(other): # Fast check for obvious cases + return False + return all(item in other for item in self) + + def issuperset(self, other): + """ + Report whether this set contains another set. + + Example: + >>> OrderedSet([1, 2]).issuperset([1, 2, 3]) + False + >>> OrderedSet([1, 2, 3, 4]).issuperset({1, 2, 3}) + True + >>> OrderedSet([1, 4, 3, 5]).issuperset({1, 2, 3}) + False + """ + if len(self) < len(other): # Fast check for obvious cases + return False + return all(item in self for item in other) + + def symmetric_difference(self, other): + """ + Return the symmetric difference of two OrderedSets as a new set. + That is, the new set will contain all elements that are in exactly + one of the sets. + + Their order will be preserved, with elements from `self` preceding + elements from `other`. + + Example: + >>> this = OrderedSet([1, 4, 3, 5, 7]) + >>> other = OrderedSet([9, 7, 1, 3, 2]) + >>> this.symmetric_difference(other) + OrderedSet([4, 5, 9, 2]) + """ + cls = self.__class__ if isinstance(self, OrderedSet) else OrderedSet + diff1 = cls(self).difference(other) + diff2 = cls(other).difference(self) + return diff1.union(diff2) + + def _update_items(self, items): + """ + Replace the 'items' list of this OrderedSet with a new one, updating + self.map accordingly. + """ + self.items = items + self.map = {item: idx for (idx, item) in enumerate(items)} + + def difference_update(self, *sets): + """ + Update this OrderedSet to remove items from one or more other sets. + + Example: + >>> this = OrderedSet([1, 2, 3]) + >>> this.difference_update(OrderedSet([2, 4])) + >>> print(this) + OrderedSet([1, 3]) + + >>> this = OrderedSet([1, 2, 3, 4, 5]) + >>> this.difference_update(OrderedSet([2, 4]), OrderedSet([1, 4, 6])) + >>> print(this) + OrderedSet([3, 5]) + """ + items_to_remove = set() + for other in sets: + items_to_remove |= set(other) + self._update_items([item for item in self.items if item not in items_to_remove]) + + def intersection_update(self, other): + """ + Update this OrderedSet to keep only items in another set, preserving + their order in this set. + + Example: + >>> this = OrderedSet([1, 4, 3, 5, 7]) + >>> other = OrderedSet([9, 7, 1, 3, 2]) + >>> this.intersection_update(other) + >>> print(this) + OrderedSet([1, 3, 7]) + """ + other = set(other) + self._update_items([item for item in self.items if item in other]) + + def symmetric_difference_update(self, other): + """ + Update this OrderedSet to remove items from another set, then + add items from the other set that were not present in this set. + + Example: + >>> this = OrderedSet([1, 4, 3, 5, 7]) + >>> other = OrderedSet([9, 7, 1, 3, 2]) + >>> this.symmetric_difference_update(other) + >>> print(this) + OrderedSet([4, 5, 9, 2]) + """ + items_to_add = [item for item in other if item not in self] + items_to_remove = set(other) + self._update_items( + [item for item in self.items if item not in items_to_remove] + items_to_add + ) diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/__about__.py b/venv/Lib/site-packages/setuptools/_vendor/packaging/__about__.py new file mode 100644 index 00000000..4d998578 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_vendor/packaging/__about__.py @@ -0,0 +1,27 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +__all__ = [ + "__title__", + "__summary__", + "__uri__", + "__version__", + "__author__", + "__email__", + "__license__", + "__copyright__", +] + +__title__ = "packaging" +__summary__ = "Core utilities for Python packages" +__uri__ = "https://github.com/pypa/packaging" + +__version__ = "20.4" + +__author__ = "Donald Stufft and individual contributors" +__email__ = "donald@stufft.io" + +__license__ = "BSD-2-Clause or Apache-2.0" +__copyright__ = "Copyright 2014-2019 %s" % __author__ diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/__init__.py b/venv/Lib/site-packages/setuptools/_vendor/packaging/__init__.py new file mode 100644 index 00000000..a0cf67df --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_vendor/packaging/__init__.py @@ -0,0 +1,26 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +from .__about__ import ( + __author__, + __copyright__, + __email__, + __license__, + __summary__, + __title__, + __uri__, + __version__, +) + +__all__ = [ + "__title__", + "__summary__", + "__uri__", + "__version__", + "__author__", + "__email__", + "__license__", + "__copyright__", +] diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/__about__.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/__about__.cpython-36.pyc new file mode 100644 index 00000000..f286d2bf Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/__about__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..2d5eacc4 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_compat.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_compat.cpython-36.pyc new file mode 100644 index 00000000..663fa35b Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_compat.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_structures.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_structures.cpython-36.pyc new file mode 100644 index 00000000..b4cae413 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_structures.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_typing.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_typing.cpython-36.pyc new file mode 100644 index 00000000..1ca5ab92 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_typing.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/markers.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/markers.cpython-36.pyc new file mode 100644 index 00000000..249706fd Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/markers.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/requirements.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/requirements.cpython-36.pyc new file mode 100644 index 00000000..1b27bc3e Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/requirements.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/specifiers.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/specifiers.cpython-36.pyc new file mode 100644 index 00000000..5d92e4ba Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/specifiers.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/tags.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/tags.cpython-36.pyc new file mode 100644 index 00000000..506b715b Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/tags.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/utils.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/utils.cpython-36.pyc new file mode 100644 index 00000000..de6a0144 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/utils.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/version.cpython-36.pyc b/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/version.cpython-36.pyc new file mode 100644 index 00000000..269bcf3d Binary files /dev/null and b/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/version.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/_compat.py b/venv/Lib/site-packages/setuptools/_vendor/packaging/_compat.py new file mode 100644 index 00000000..e54bd4ed --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_vendor/packaging/_compat.py @@ -0,0 +1,38 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import sys + +from ._typing import TYPE_CHECKING + +if TYPE_CHECKING: # pragma: no cover + from typing import Any, Dict, Tuple, Type + + +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 + +# flake8: noqa + +if PY3: + string_types = (str,) +else: + string_types = (basestring,) + + +def with_metaclass(meta, *bases): + # type: (Type[Any], Tuple[Type[Any], ...]) -> Any + """ + Create a base class with a metaclass. + """ + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(meta): # type: ignore + def __new__(cls, name, this_bases, d): + # type: (Type[Any], str, Tuple[Any], Dict[Any, Any]) -> Any + return meta(name, bases, d) + + return type.__new__(metaclass, "temporary_class", (), {}) diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/_structures.py b/venv/Lib/site-packages/setuptools/_vendor/packaging/_structures.py new file mode 100644 index 00000000..800d5c55 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_vendor/packaging/_structures.py @@ -0,0 +1,86 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + + +class InfinityType(object): + def __repr__(self): + # type: () -> str + return "Infinity" + + def __hash__(self): + # type: () -> int + return hash(repr(self)) + + def __lt__(self, other): + # type: (object) -> bool + return False + + def __le__(self, other): + # type: (object) -> bool + return False + + def __eq__(self, other): + # type: (object) -> bool + return isinstance(other, self.__class__) + + def __ne__(self, other): + # type: (object) -> bool + return not isinstance(other, self.__class__) + + def __gt__(self, other): + # type: (object) -> bool + return True + + def __ge__(self, other): + # type: (object) -> bool + return True + + def __neg__(self): + # type: (object) -> NegativeInfinityType + return NegativeInfinity + + +Infinity = InfinityType() + + +class NegativeInfinityType(object): + def __repr__(self): + # type: () -> str + return "-Infinity" + + def __hash__(self): + # type: () -> int + return hash(repr(self)) + + def __lt__(self, other): + # type: (object) -> bool + return True + + def __le__(self, other): + # type: (object) -> bool + return True + + def __eq__(self, other): + # type: (object) -> bool + return isinstance(other, self.__class__) + + def __ne__(self, other): + # type: (object) -> bool + return not isinstance(other, self.__class__) + + def __gt__(self, other): + # type: (object) -> bool + return False + + def __ge__(self, other): + # type: (object) -> bool + return False + + def __neg__(self): + # type: (object) -> InfinityType + return Infinity + + +NegativeInfinity = NegativeInfinityType() diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/_typing.py b/venv/Lib/site-packages/setuptools/_vendor/packaging/_typing.py new file mode 100644 index 00000000..77a8b918 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_vendor/packaging/_typing.py @@ -0,0 +1,48 @@ +"""For neatly implementing static typing in packaging. + +`mypy` - the static type analysis tool we use - uses the `typing` module, which +provides core functionality fundamental to mypy's functioning. + +Generally, `typing` would be imported at runtime and used in that fashion - +it acts as a no-op at runtime and does not have any run-time overhead by +design. + +As it turns out, `typing` is not vendorable - it uses separate sources for +Python 2/Python 3. Thus, this codebase can not expect it to be present. +To work around this, mypy allows the typing import to be behind a False-y +optional to prevent it from running at runtime and type-comments can be used +to remove the need for the types to be accessible directly during runtime. + +This module provides the False-y guard in a nicely named fashion so that a +curious maintainer can reach here to read this. + +In packaging, all static-typing related imports should be guarded as follows: + + from packaging._typing import TYPE_CHECKING + + if TYPE_CHECKING: + from typing import ... + +Ref: https://github.com/python/mypy/issues/3216 +""" + +__all__ = ["TYPE_CHECKING", "cast"] + +# The TYPE_CHECKING constant defined by the typing module is False at runtime +# but True while type checking. +if False: # pragma: no cover + from typing import TYPE_CHECKING +else: + TYPE_CHECKING = False + +# typing's cast syntax requires calling typing.cast at runtime, but we don't +# want to import typing at runtime. Here, we inform the type checkers that +# we're importing `typing.cast` as `cast` and re-implement typing.cast's +# runtime behavior in a block that is ignored by type checkers. +if TYPE_CHECKING: # pragma: no cover + # not executed at runtime + from typing import cast +else: + # executed at runtime + def cast(type_, value): # noqa + return value diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/markers.py b/venv/Lib/site-packages/setuptools/_vendor/packaging/markers.py new file mode 100644 index 00000000..03fbdfcc --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_vendor/packaging/markers.py @@ -0,0 +1,328 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import operator +import os +import platform +import sys + +from setuptools.extern.pyparsing import ParseException, ParseResults, stringStart, stringEnd +from setuptools.extern.pyparsing import ZeroOrMore, Group, Forward, QuotedString +from setuptools.extern.pyparsing import Literal as L # noqa + +from ._compat import string_types +from ._typing import TYPE_CHECKING +from .specifiers import Specifier, InvalidSpecifier + +if TYPE_CHECKING: # pragma: no cover + from typing import Any, Callable, Dict, List, Optional, Tuple, Union + + Operator = Callable[[str, str], bool] + + +__all__ = [ + "InvalidMarker", + "UndefinedComparison", + "UndefinedEnvironmentName", + "Marker", + "default_environment", +] + + +class InvalidMarker(ValueError): + """ + An invalid marker was found, users should refer to PEP 508. + """ + + +class UndefinedComparison(ValueError): + """ + An invalid operation was attempted on a value that doesn't support it. + """ + + +class UndefinedEnvironmentName(ValueError): + """ + A name was attempted to be used that does not exist inside of the + environment. + """ + + +class Node(object): + def __init__(self, value): + # type: (Any) -> None + self.value = value + + def __str__(self): + # type: () -> str + return str(self.value) + + def __repr__(self): + # type: () -> str + return "<{0}({1!r})>".format(self.__class__.__name__, str(self)) + + def serialize(self): + # type: () -> str + raise NotImplementedError + + +class Variable(Node): + def serialize(self): + # type: () -> str + return str(self) + + +class Value(Node): + def serialize(self): + # type: () -> str + return '"{0}"'.format(self) + + +class Op(Node): + def serialize(self): + # type: () -> str + return str(self) + + +VARIABLE = ( + L("implementation_version") + | L("platform_python_implementation") + | L("implementation_name") + | L("python_full_version") + | L("platform_release") + | L("platform_version") + | L("platform_machine") + | L("platform_system") + | L("python_version") + | L("sys_platform") + | L("os_name") + | L("os.name") # PEP-345 + | L("sys.platform") # PEP-345 + | L("platform.version") # PEP-345 + | L("platform.machine") # PEP-345 + | L("platform.python_implementation") # PEP-345 + | L("python_implementation") # undocumented setuptools legacy + | L("extra") # PEP-508 +) +ALIASES = { + "os.name": "os_name", + "sys.platform": "sys_platform", + "platform.version": "platform_version", + "platform.machine": "platform_machine", + "platform.python_implementation": "platform_python_implementation", + "python_implementation": "platform_python_implementation", +} +VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0]))) + +VERSION_CMP = ( + L("===") | L("==") | L(">=") | L("<=") | L("!=") | L("~=") | L(">") | L("<") +) + +MARKER_OP = VERSION_CMP | L("not in") | L("in") +MARKER_OP.setParseAction(lambda s, l, t: Op(t[0])) + +MARKER_VALUE = QuotedString("'") | QuotedString('"') +MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0])) + +BOOLOP = L("and") | L("or") + +MARKER_VAR = VARIABLE | MARKER_VALUE + +MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR) +MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0])) + +LPAREN = L("(").suppress() +RPAREN = L(")").suppress() + +MARKER_EXPR = Forward() +MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN) +MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR) + +MARKER = stringStart + MARKER_EXPR + stringEnd + + +def _coerce_parse_result(results): + # type: (Union[ParseResults, List[Any]]) -> List[Any] + if isinstance(results, ParseResults): + return [_coerce_parse_result(i) for i in results] + else: + return results + + +def _format_marker(marker, first=True): + # type: (Union[List[str], Tuple[Node, ...], str], Optional[bool]) -> str + + assert isinstance(marker, (list, tuple, string_types)) + + # Sometimes we have a structure like [[...]] which is a single item list + # where the single item is itself it's own list. In that case we want skip + # the rest of this function so that we don't get extraneous () on the + # outside. + if ( + isinstance(marker, list) + and len(marker) == 1 + and isinstance(marker[0], (list, tuple)) + ): + return _format_marker(marker[0]) + + if isinstance(marker, list): + inner = (_format_marker(m, first=False) for m in marker) + if first: + return " ".join(inner) + else: + return "(" + " ".join(inner) + ")" + elif isinstance(marker, tuple): + return " ".join([m.serialize() for m in marker]) + else: + return marker + + +_operators = { + "in": lambda lhs, rhs: lhs in rhs, + "not in": lambda lhs, rhs: lhs not in rhs, + "<": operator.lt, + "<=": operator.le, + "==": operator.eq, + "!=": operator.ne, + ">=": operator.ge, + ">": operator.gt, +} # type: Dict[str, Operator] + + +def _eval_op(lhs, op, rhs): + # type: (str, Op, str) -> bool + try: + spec = Specifier("".join([op.serialize(), rhs])) + except InvalidSpecifier: + pass + else: + return spec.contains(lhs) + + oper = _operators.get(op.serialize()) # type: Optional[Operator] + if oper is None: + raise UndefinedComparison( + "Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs) + ) + + return oper(lhs, rhs) + + +class Undefined(object): + pass + + +_undefined = Undefined() + + +def _get_env(environment, name): + # type: (Dict[str, str], str) -> str + value = environment.get(name, _undefined) # type: Union[str, Undefined] + + if isinstance(value, Undefined): + raise UndefinedEnvironmentName( + "{0!r} does not exist in evaluation environment.".format(name) + ) + + return value + + +def _evaluate_markers(markers, environment): + # type: (List[Any], Dict[str, str]) -> bool + groups = [[]] # type: List[List[bool]] + + for marker in markers: + assert isinstance(marker, (list, tuple, string_types)) + + if isinstance(marker, list): + groups[-1].append(_evaluate_markers(marker, environment)) + elif isinstance(marker, tuple): + lhs, op, rhs = marker + + if isinstance(lhs, Variable): + lhs_value = _get_env(environment, lhs.value) + rhs_value = rhs.value + else: + lhs_value = lhs.value + rhs_value = _get_env(environment, rhs.value) + + groups[-1].append(_eval_op(lhs_value, op, rhs_value)) + else: + assert marker in ["and", "or"] + if marker == "or": + groups.append([]) + + return any(all(item) for item in groups) + + +def format_full_version(info): + # type: (sys._version_info) -> str + version = "{0.major}.{0.minor}.{0.micro}".format(info) + kind = info.releaselevel + if kind != "final": + version += kind[0] + str(info.serial) + return version + + +def default_environment(): + # type: () -> Dict[str, str] + if hasattr(sys, "implementation"): + # Ignoring the `sys.implementation` reference for type checking due to + # mypy not liking that the attribute doesn't exist in Python 2.7 when + # run with the `--py27` flag. + iver = format_full_version(sys.implementation.version) # type: ignore + implementation_name = sys.implementation.name # type: ignore + else: + iver = "0" + implementation_name = "" + + return { + "implementation_name": implementation_name, + "implementation_version": iver, + "os_name": os.name, + "platform_machine": platform.machine(), + "platform_release": platform.release(), + "platform_system": platform.system(), + "platform_version": platform.version(), + "python_full_version": platform.python_version(), + "platform_python_implementation": platform.python_implementation(), + "python_version": ".".join(platform.python_version_tuple()[:2]), + "sys_platform": sys.platform, + } + + +class Marker(object): + def __init__(self, marker): + # type: (str) -> None + try: + self._markers = _coerce_parse_result(MARKER.parseString(marker)) + except ParseException as e: + err_str = "Invalid marker: {0!r}, parse error at {1!r}".format( + marker, marker[e.loc : e.loc + 8] + ) + raise InvalidMarker(err_str) + + def __str__(self): + # type: () -> str + return _format_marker(self._markers) + + def __repr__(self): + # type: () -> str + return "".format(str(self)) + + def evaluate(self, environment=None): + # type: (Optional[Dict[str, str]]) -> bool + """Evaluate a marker. + + Return the boolean from evaluating the given marker against the + environment. environment is an optional argument to override all or + part of the determined environment. + + The environment is determined from the current Python process. + """ + current_environment = default_environment() + if environment is not None: + current_environment.update(environment) + + return _evaluate_markers(self._markers, current_environment) diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/requirements.py b/venv/Lib/site-packages/setuptools/_vendor/packaging/requirements.py new file mode 100644 index 00000000..5d50c7d7 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_vendor/packaging/requirements.py @@ -0,0 +1,145 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import string +import re + +from setuptools.extern.pyparsing import stringStart, stringEnd, originalTextFor, ParseException +from setuptools.extern.pyparsing import ZeroOrMore, Word, Optional, Regex, Combine +from setuptools.extern.pyparsing import Literal as L # noqa +from urllib import parse as urlparse + +from ._typing import TYPE_CHECKING +from .markers import MARKER_EXPR, Marker +from .specifiers import LegacySpecifier, Specifier, SpecifierSet + +if TYPE_CHECKING: # pragma: no cover + from typing import List + + +class InvalidRequirement(ValueError): + """ + An invalid requirement was found, users should refer to PEP 508. + """ + + +ALPHANUM = Word(string.ascii_letters + string.digits) + +LBRACKET = L("[").suppress() +RBRACKET = L("]").suppress() +LPAREN = L("(").suppress() +RPAREN = L(")").suppress() +COMMA = L(",").suppress() +SEMICOLON = L(";").suppress() +AT = L("@").suppress() + +PUNCTUATION = Word("-_.") +IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM) +IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END)) + +NAME = IDENTIFIER("name") +EXTRA = IDENTIFIER + +URI = Regex(r"[^ ]+")("url") +URL = AT + URI + +EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA) +EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras") + +VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE) +VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE) + +VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY +VERSION_MANY = Combine( + VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), joinString=",", adjacent=False +)("_raw_spec") +_VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY)) +_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or "") + +VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier") +VERSION_SPEC.setParseAction(lambda s, l, t: t[1]) + +MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker") +MARKER_EXPR.setParseAction( + lambda s, l, t: Marker(s[t._original_start : t._original_end]) +) +MARKER_SEPARATOR = SEMICOLON +MARKER = MARKER_SEPARATOR + MARKER_EXPR + +VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER) +URL_AND_MARKER = URL + Optional(MARKER) + +NAMED_REQUIREMENT = NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER) + +REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd +# setuptools.extern.pyparsing isn't thread safe during initialization, so we do it eagerly, see +# issue #104 +REQUIREMENT.parseString("x[]") + + +class Requirement(object): + """Parse a requirement. + + Parse a given requirement string into its parts, such as name, specifier, + URL, and extras. Raises InvalidRequirement on a badly-formed requirement + string. + """ + + # TODO: Can we test whether something is contained within a requirement? + # If so how do we do that? Do we need to test against the _name_ of + # the thing as well as the version? What about the markers? + # TODO: Can we normalize the name and extra name? + + def __init__(self, requirement_string): + # type: (str) -> None + try: + req = REQUIREMENT.parseString(requirement_string) + except ParseException as e: + raise InvalidRequirement( + 'Parse error at "{0!r}": {1}'.format( + requirement_string[e.loc : e.loc + 8], e.msg + ) + ) + + self.name = req.name + if req.url: + parsed_url = urlparse.urlparse(req.url) + if parsed_url.scheme == "file": + if urlparse.urlunparse(parsed_url) != req.url: + raise InvalidRequirement("Invalid URL given") + elif not (parsed_url.scheme and parsed_url.netloc) or ( + not parsed_url.scheme and not parsed_url.netloc + ): + raise InvalidRequirement("Invalid URL: {0}".format(req.url)) + self.url = req.url + else: + self.url = None + self.extras = set(req.extras.asList() if req.extras else []) + self.specifier = SpecifierSet(req.specifier) + self.marker = req.marker if req.marker else None + + def __str__(self): + # type: () -> str + parts = [self.name] # type: List[str] + + if self.extras: + parts.append("[{0}]".format(",".join(sorted(self.extras)))) + + if self.specifier: + parts.append(str(self.specifier)) + + if self.url: + parts.append("@ {0}".format(self.url)) + if self.marker: + parts.append(" ") + + if self.marker: + parts.append("; {0}".format(self.marker)) + + return "".join(parts) + + def __repr__(self): + # type: () -> str + return "".format(str(self)) diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/specifiers.py b/venv/Lib/site-packages/setuptools/_vendor/packaging/specifiers.py new file mode 100644 index 00000000..fe09bb1d --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_vendor/packaging/specifiers.py @@ -0,0 +1,863 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import abc +import functools +import itertools +import re + +from ._compat import string_types, with_metaclass +from ._typing import TYPE_CHECKING +from .utils import canonicalize_version +from .version import Version, LegacyVersion, parse + +if TYPE_CHECKING: # pragma: no cover + from typing import ( + List, + Dict, + Union, + Iterable, + Iterator, + Optional, + Callable, + Tuple, + FrozenSet, + ) + + ParsedVersion = Union[Version, LegacyVersion] + UnparsedVersion = Union[Version, LegacyVersion, str] + CallableOperator = Callable[[ParsedVersion, str], bool] + + +class InvalidSpecifier(ValueError): + """ + An invalid specifier was found, users should refer to PEP 440. + """ + + +class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): # type: ignore + @abc.abstractmethod + def __str__(self): + # type: () -> str + """ + Returns the str representation of this Specifier like object. This + should be representative of the Specifier itself. + """ + + @abc.abstractmethod + def __hash__(self): + # type: () -> int + """ + Returns a hash value for this Specifier like object. + """ + + @abc.abstractmethod + def __eq__(self, other): + # type: (object) -> bool + """ + Returns a boolean representing whether or not the two Specifier like + objects are equal. + """ + + @abc.abstractmethod + def __ne__(self, other): + # type: (object) -> bool + """ + Returns a boolean representing whether or not the two Specifier like + objects are not equal. + """ + + @abc.abstractproperty + def prereleases(self): + # type: () -> Optional[bool] + """ + Returns whether or not pre-releases as a whole are allowed by this + specifier. + """ + + @prereleases.setter + def prereleases(self, value): + # type: (bool) -> None + """ + Sets whether or not pre-releases as a whole are allowed by this + specifier. + """ + + @abc.abstractmethod + def contains(self, item, prereleases=None): + # type: (str, Optional[bool]) -> bool + """ + Determines if the given item is contained within this specifier. + """ + + @abc.abstractmethod + def filter(self, iterable, prereleases=None): + # type: (Iterable[UnparsedVersion], Optional[bool]) -> Iterable[UnparsedVersion] + """ + Takes an iterable of items and filters them so that only items which + are contained within this specifier are allowed in it. + """ + + +class _IndividualSpecifier(BaseSpecifier): + + _operators = {} # type: Dict[str, str] + + def __init__(self, spec="", prereleases=None): + # type: (str, Optional[bool]) -> None + match = self._regex.search(spec) + if not match: + raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec)) + + self._spec = ( + match.group("operator").strip(), + match.group("version").strip(), + ) # type: Tuple[str, str] + + # Store whether or not this Specifier should accept prereleases + self._prereleases = prereleases + + def __repr__(self): + # type: () -> str + pre = ( + ", prereleases={0!r}".format(self.prereleases) + if self._prereleases is not None + else "" + ) + + return "<{0}({1!r}{2})>".format(self.__class__.__name__, str(self), pre) + + def __str__(self): + # type: () -> str + return "{0}{1}".format(*self._spec) + + @property + def _canonical_spec(self): + # type: () -> Tuple[str, Union[Version, str]] + return self._spec[0], canonicalize_version(self._spec[1]) + + def __hash__(self): + # type: () -> int + return hash(self._canonical_spec) + + def __eq__(self, other): + # type: (object) -> bool + if isinstance(other, string_types): + try: + other = self.__class__(str(other)) + except InvalidSpecifier: + return NotImplemented + elif not isinstance(other, self.__class__): + return NotImplemented + + return self._canonical_spec == other._canonical_spec + + def __ne__(self, other): + # type: (object) -> bool + if isinstance(other, string_types): + try: + other = self.__class__(str(other)) + except InvalidSpecifier: + return NotImplemented + elif not isinstance(other, self.__class__): + return NotImplemented + + return self._spec != other._spec + + def _get_operator(self, op): + # type: (str) -> CallableOperator + operator_callable = getattr( + self, "_compare_{0}".format(self._operators[op]) + ) # type: CallableOperator + return operator_callable + + def _coerce_version(self, version): + # type: (UnparsedVersion) -> ParsedVersion + if not isinstance(version, (LegacyVersion, Version)): + version = parse(version) + return version + + @property + def operator(self): + # type: () -> str + return self._spec[0] + + @property + def version(self): + # type: () -> str + return self._spec[1] + + @property + def prereleases(self): + # type: () -> Optional[bool] + return self._prereleases + + @prereleases.setter + def prereleases(self, value): + # type: (bool) -> None + self._prereleases = value + + def __contains__(self, item): + # type: (str) -> bool + return self.contains(item) + + def contains(self, item, prereleases=None): + # type: (UnparsedVersion, Optional[bool]) -> bool + + # Determine if prereleases are to be allowed or not. + if prereleases is None: + prereleases = self.prereleases + + # Normalize item to a Version or LegacyVersion, this allows us to have + # a shortcut for ``"2.0" in Specifier(">=2") + normalized_item = self._coerce_version(item) + + # Determine if we should be supporting prereleases in this specifier + # or not, if we do not support prereleases than we can short circuit + # logic if this version is a prereleases. + if normalized_item.is_prerelease and not prereleases: + return False + + # Actually do the comparison to determine if this item is contained + # within this Specifier or not. + operator_callable = self._get_operator(self.operator) # type: CallableOperator + return operator_callable(normalized_item, self.version) + + def filter(self, iterable, prereleases=None): + # type: (Iterable[UnparsedVersion], Optional[bool]) -> Iterable[UnparsedVersion] + + yielded = False + found_prereleases = [] + + kw = {"prereleases": prereleases if prereleases is not None else True} + + # Attempt to iterate over all the values in the iterable and if any of + # them match, yield them. + for version in iterable: + parsed_version = self._coerce_version(version) + + if self.contains(parsed_version, **kw): + # If our version is a prerelease, and we were not set to allow + # prereleases, then we'll store it for later incase nothing + # else matches this specifier. + if parsed_version.is_prerelease and not ( + prereleases or self.prereleases + ): + found_prereleases.append(version) + # Either this is not a prerelease, or we should have been + # accepting prereleases from the beginning. + else: + yielded = True + yield version + + # Now that we've iterated over everything, determine if we've yielded + # any values, and if we have not and we have any prereleases stored up + # then we will go ahead and yield the prereleases. + if not yielded and found_prereleases: + for version in found_prereleases: + yield version + + +class LegacySpecifier(_IndividualSpecifier): + + _regex_str = r""" + (?P(==|!=|<=|>=|<|>)) + \s* + (?P + [^,;\s)]* # Since this is a "legacy" specifier, and the version + # string can be just about anything, we match everything + # except for whitespace, a semi-colon for marker support, + # a closing paren since versions can be enclosed in + # them, and a comma since it's a version separator. + ) + """ + + _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) + + _operators = { + "==": "equal", + "!=": "not_equal", + "<=": "less_than_equal", + ">=": "greater_than_equal", + "<": "less_than", + ">": "greater_than", + } + + def _coerce_version(self, version): + # type: (Union[ParsedVersion, str]) -> LegacyVersion + if not isinstance(version, LegacyVersion): + version = LegacyVersion(str(version)) + return version + + def _compare_equal(self, prospective, spec): + # type: (LegacyVersion, str) -> bool + return prospective == self._coerce_version(spec) + + def _compare_not_equal(self, prospective, spec): + # type: (LegacyVersion, str) -> bool + return prospective != self._coerce_version(spec) + + def _compare_less_than_equal(self, prospective, spec): + # type: (LegacyVersion, str) -> bool + return prospective <= self._coerce_version(spec) + + def _compare_greater_than_equal(self, prospective, spec): + # type: (LegacyVersion, str) -> bool + return prospective >= self._coerce_version(spec) + + def _compare_less_than(self, prospective, spec): + # type: (LegacyVersion, str) -> bool + return prospective < self._coerce_version(spec) + + def _compare_greater_than(self, prospective, spec): + # type: (LegacyVersion, str) -> bool + return prospective > self._coerce_version(spec) + + +def _require_version_compare( + fn # type: (Callable[[Specifier, ParsedVersion, str], bool]) +): + # type: (...) -> Callable[[Specifier, ParsedVersion, str], bool] + @functools.wraps(fn) + def wrapped(self, prospective, spec): + # type: (Specifier, ParsedVersion, str) -> bool + if not isinstance(prospective, Version): + return False + return fn(self, prospective, spec) + + return wrapped + + +class Specifier(_IndividualSpecifier): + + _regex_str = r""" + (?P(~=|==|!=|<=|>=|<|>|===)) + (?P + (?: + # The identity operators allow for an escape hatch that will + # do an exact string match of the version you wish to install. + # This will not be parsed by PEP 440 and we cannot determine + # any semantic meaning from it. This operator is discouraged + # but included entirely as an escape hatch. + (?<====) # Only match for the identity operator + \s* + [^\s]* # We just match everything, except for whitespace + # since we are only testing for strict identity. + ) + | + (?: + # The (non)equality operators allow for wild card and local + # versions to be specified so we have to define these two + # operators separately to enable that. + (?<===|!=) # Only match for equals and not equals + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)* # release + (?: # pre release + [-_\.]? + (a|b|c|rc|alpha|beta|pre|preview) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + + # You cannot use a wild card and a dev or local version + # together so group them with a | and make them optional. + (?: + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local + | + \.\* # Wild card syntax of .* + )? + ) + | + (?: + # The compatible operator requires at least two digits in the + # release segment. + (?<=~=) # Only match for the compatible operator + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) + (?: # pre release + [-_\.]? + (a|b|c|rc|alpha|beta|pre|preview) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + ) + | + (?: + # All other operators only allow a sub set of what the + # (non)equality operators do. Specifically they do not allow + # local versions to be specified nor do they allow the prefix + # matching wild cards. + (?=": "greater_than_equal", + "<": "less_than", + ">": "greater_than", + "===": "arbitrary", + } + + @_require_version_compare + def _compare_compatible(self, prospective, spec): + # type: (ParsedVersion, str) -> bool + + # Compatible releases have an equivalent combination of >= and ==. That + # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to + # implement this in terms of the other specifiers instead of + # implementing it ourselves. The only thing we need to do is construct + # the other specifiers. + + # We want everything but the last item in the version, but we want to + # ignore post and dev releases and we want to treat the pre-release as + # it's own separate segment. + prefix = ".".join( + list( + itertools.takewhile( + lambda x: (not x.startswith("post") and not x.startswith("dev")), + _version_split(spec), + ) + )[:-1] + ) + + # Add the prefix notation to the end of our string + prefix += ".*" + + return self._get_operator(">=")(prospective, spec) and self._get_operator("==")( + prospective, prefix + ) + + @_require_version_compare + def _compare_equal(self, prospective, spec): + # type: (ParsedVersion, str) -> bool + + # We need special logic to handle prefix matching + if spec.endswith(".*"): + # In the case of prefix matching we want to ignore local segment. + prospective = Version(prospective.public) + # Split the spec out by dots, and pretend that there is an implicit + # dot in between a release segment and a pre-release segment. + split_spec = _version_split(spec[:-2]) # Remove the trailing .* + + # Split the prospective version out by dots, and pretend that there + # is an implicit dot in between a release segment and a pre-release + # segment. + split_prospective = _version_split(str(prospective)) + + # Shorten the prospective version to be the same length as the spec + # so that we can determine if the specifier is a prefix of the + # prospective version or not. + shortened_prospective = split_prospective[: len(split_spec)] + + # Pad out our two sides with zeros so that they both equal the same + # length. + padded_spec, padded_prospective = _pad_version( + split_spec, shortened_prospective + ) + + return padded_prospective == padded_spec + else: + # Convert our spec string into a Version + spec_version = Version(spec) + + # If the specifier does not have a local segment, then we want to + # act as if the prospective version also does not have a local + # segment. + if not spec_version.local: + prospective = Version(prospective.public) + + return prospective == spec_version + + @_require_version_compare + def _compare_not_equal(self, prospective, spec): + # type: (ParsedVersion, str) -> bool + return not self._compare_equal(prospective, spec) + + @_require_version_compare + def _compare_less_than_equal(self, prospective, spec): + # type: (ParsedVersion, str) -> bool + + # NB: Local version identifiers are NOT permitted in the version + # specifier, so local version labels can be universally removed from + # the prospective version. + return Version(prospective.public) <= Version(spec) + + @_require_version_compare + def _compare_greater_than_equal(self, prospective, spec): + # type: (ParsedVersion, str) -> bool + + # NB: Local version identifiers are NOT permitted in the version + # specifier, so local version labels can be universally removed from + # the prospective version. + return Version(prospective.public) >= Version(spec) + + @_require_version_compare + def _compare_less_than(self, prospective, spec_str): + # type: (ParsedVersion, str) -> bool + + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = Version(spec_str) + + # Check to see if the prospective version is less than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective < spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a pre-release version, that we do not accept pre-release + # versions for the version mentioned in the specifier (e.g. <3.1 should + # not match 3.1.dev0, but should match 3.0.dev0). + if not spec.is_prerelease and prospective.is_prerelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # If we've gotten to here, it means that prospective version is both + # less than the spec version *and* it's not a pre-release of the same + # version in the spec. + return True + + @_require_version_compare + def _compare_greater_than(self, prospective, spec_str): + # type: (ParsedVersion, str) -> bool + + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = Version(spec_str) + + # Check to see if the prospective version is greater than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective > spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a post-release version, that we do not accept + # post-release versions for the version mentioned in the specifier + # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). + if not spec.is_postrelease and prospective.is_postrelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # Ensure that we do not allow a local version of the version mentioned + # in the specifier, which is technically greater than, to match. + if prospective.local is not None: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # If we've gotten to here, it means that prospective version is both + # greater than the spec version *and* it's not a pre-release of the + # same version in the spec. + return True + + def _compare_arbitrary(self, prospective, spec): + # type: (Version, str) -> bool + return str(prospective).lower() == str(spec).lower() + + @property + def prereleases(self): + # type: () -> bool + + # If there is an explicit prereleases set for this, then we'll just + # blindly use that. + if self._prereleases is not None: + return self._prereleases + + # Look at all of our specifiers and determine if they are inclusive + # operators, and if they are if they are including an explicit + # prerelease. + operator, version = self._spec + if operator in ["==", ">=", "<=", "~=", "==="]: + # The == specifier can include a trailing .*, if it does we + # want to remove before parsing. + if operator == "==" and version.endswith(".*"): + version = version[:-2] + + # Parse the version, and if it is a pre-release than this + # specifier allows pre-releases. + if parse(version).is_prerelease: + return True + + return False + + @prereleases.setter + def prereleases(self, value): + # type: (bool) -> None + self._prereleases = value + + +_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") + + +def _version_split(version): + # type: (str) -> List[str] + result = [] # type: List[str] + for item in version.split("."): + match = _prefix_regex.search(item) + if match: + result.extend(match.groups()) + else: + result.append(item) + return result + + +def _pad_version(left, right): + # type: (List[str], List[str]) -> Tuple[List[str], List[str]] + left_split, right_split = [], [] + + # Get the release segment of our versions + left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) + right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) + + # Get the rest of our versions + left_split.append(left[len(left_split[0]) :]) + right_split.append(right[len(right_split[0]) :]) + + # Insert our padding + left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0]))) + right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0]))) + + return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split))) + + +class SpecifierSet(BaseSpecifier): + def __init__(self, specifiers="", prereleases=None): + # type: (str, Optional[bool]) -> None + + # Split on , to break each individual specifier into it's own item, and + # strip each item to remove leading/trailing whitespace. + split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] + + # Parsed each individual specifier, attempting first to make it a + # Specifier and falling back to a LegacySpecifier. + parsed = set() + for specifier in split_specifiers: + try: + parsed.add(Specifier(specifier)) + except InvalidSpecifier: + parsed.add(LegacySpecifier(specifier)) + + # Turn our parsed specifiers into a frozen set and save them for later. + self._specs = frozenset(parsed) + + # Store our prereleases value so we can use it later to determine if + # we accept prereleases or not. + self._prereleases = prereleases + + def __repr__(self): + # type: () -> str + pre = ( + ", prereleases={0!r}".format(self.prereleases) + if self._prereleases is not None + else "" + ) + + return "".format(str(self), pre) + + def __str__(self): + # type: () -> str + return ",".join(sorted(str(s) for s in self._specs)) + + def __hash__(self): + # type: () -> int + return hash(self._specs) + + def __and__(self, other): + # type: (Union[SpecifierSet, str]) -> SpecifierSet + if isinstance(other, string_types): + other = SpecifierSet(other) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + specifier = SpecifierSet() + specifier._specs = frozenset(self._specs | other._specs) + + if self._prereleases is None and other._prereleases is not None: + specifier._prereleases = other._prereleases + elif self._prereleases is not None and other._prereleases is None: + specifier._prereleases = self._prereleases + elif self._prereleases == other._prereleases: + specifier._prereleases = self._prereleases + else: + raise ValueError( + "Cannot combine SpecifierSets with True and False prerelease " + "overrides." + ) + + return specifier + + def __eq__(self, other): + # type: (object) -> bool + if isinstance(other, (string_types, _IndividualSpecifier)): + other = SpecifierSet(str(other)) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + return self._specs == other._specs + + def __ne__(self, other): + # type: (object) -> bool + if isinstance(other, (string_types, _IndividualSpecifier)): + other = SpecifierSet(str(other)) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + return self._specs != other._specs + + def __len__(self): + # type: () -> int + return len(self._specs) + + def __iter__(self): + # type: () -> Iterator[FrozenSet[_IndividualSpecifier]] + return iter(self._specs) + + @property + def prereleases(self): + # type: () -> Optional[bool] + + # If we have been given an explicit prerelease modifier, then we'll + # pass that through here. + if self._prereleases is not None: + return self._prereleases + + # If we don't have any specifiers, and we don't have a forced value, + # then we'll just return None since we don't know if this should have + # pre-releases or not. + if not self._specs: + return None + + # Otherwise we'll see if any of the given specifiers accept + # prereleases, if any of them do we'll return True, otherwise False. + return any(s.prereleases for s in self._specs) + + @prereleases.setter + def prereleases(self, value): + # type: (bool) -> None + self._prereleases = value + + def __contains__(self, item): + # type: (Union[ParsedVersion, str]) -> bool + return self.contains(item) + + def contains(self, item, prereleases=None): + # type: (Union[ParsedVersion, str], Optional[bool]) -> bool + + # Ensure that our item is a Version or LegacyVersion instance. + if not isinstance(item, (LegacyVersion, Version)): + item = parse(item) + + # Determine if we're forcing a prerelease or not, if we're not forcing + # one for this particular filter call, then we'll use whatever the + # SpecifierSet thinks for whether or not we should support prereleases. + if prereleases is None: + prereleases = self.prereleases + + # We can determine if we're going to allow pre-releases by looking to + # see if any of the underlying items supports them. If none of them do + # and this item is a pre-release then we do not allow it and we can + # short circuit that here. + # Note: This means that 1.0.dev1 would not be contained in something + # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0 + if not prereleases and item.is_prerelease: + return False + + # We simply dispatch to the underlying specs here to make sure that the + # given version is contained within all of them. + # Note: This use of all() here means that an empty set of specifiers + # will always return True, this is an explicit design decision. + return all(s.contains(item, prereleases=prereleases) for s in self._specs) + + def filter( + self, + iterable, # type: Iterable[Union[ParsedVersion, str]] + prereleases=None, # type: Optional[bool] + ): + # type: (...) -> Iterable[Union[ParsedVersion, str]] + + # Determine if we're forcing a prerelease or not, if we're not forcing + # one for this particular filter call, then we'll use whatever the + # SpecifierSet thinks for whether or not we should support prereleases. + if prereleases is None: + prereleases = self.prereleases + + # If we have any specifiers, then we want to wrap our iterable in the + # filter method for each one, this will act as a logical AND amongst + # each specifier. + if self._specs: + for spec in self._specs: + iterable = spec.filter(iterable, prereleases=bool(prereleases)) + return iterable + # If we do not have any specifiers, then we need to have a rough filter + # which will filter out any pre-releases, unless there are no final + # releases, and which will filter out LegacyVersion in general. + else: + filtered = [] # type: List[Union[ParsedVersion, str]] + found_prereleases = [] # type: List[Union[ParsedVersion, str]] + + for item in iterable: + # Ensure that we some kind of Version class for this item. + if not isinstance(item, (LegacyVersion, Version)): + parsed_version = parse(item) + else: + parsed_version = item + + # Filter out any item which is parsed as a LegacyVersion + if isinstance(parsed_version, LegacyVersion): + continue + + # Store any item which is a pre-release for later unless we've + # already found a final version or we are accepting prereleases + if parsed_version.is_prerelease and not prereleases: + if not filtered: + found_prereleases.append(item) + else: + filtered.append(item) + + # If we've found no items except for pre-releases, then we'll go + # ahead and use the pre-releases + if not filtered and found_prereleases and prereleases is None: + return found_prereleases + + return filtered diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/tags.py b/venv/Lib/site-packages/setuptools/_vendor/packaging/tags.py new file mode 100644 index 00000000..9064910b --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_vendor/packaging/tags.py @@ -0,0 +1,751 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import + +import distutils.util + +try: + from importlib.machinery import EXTENSION_SUFFIXES +except ImportError: # pragma: no cover + import imp + + EXTENSION_SUFFIXES = [x[0] for x in imp.get_suffixes()] + del imp +import logging +import os +import platform +import re +import struct +import sys +import sysconfig +import warnings + +from ._typing import TYPE_CHECKING, cast + +if TYPE_CHECKING: # pragma: no cover + from typing import ( + Dict, + FrozenSet, + IO, + Iterable, + Iterator, + List, + Optional, + Sequence, + Tuple, + Union, + ) + + PythonVersion = Sequence[int] + MacVersion = Tuple[int, int] + GlibcVersion = Tuple[int, int] + + +logger = logging.getLogger(__name__) + +INTERPRETER_SHORT_NAMES = { + "python": "py", # Generic. + "cpython": "cp", + "pypy": "pp", + "ironpython": "ip", + "jython": "jy", +} # type: Dict[str, str] + + +_32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32 + + +class Tag(object): + """ + A representation of the tag triple for a wheel. + + Instances are considered immutable and thus are hashable. Equality checking + is also supported. + """ + + __slots__ = ["_interpreter", "_abi", "_platform"] + + def __init__(self, interpreter, abi, platform): + # type: (str, str, str) -> None + self._interpreter = interpreter.lower() + self._abi = abi.lower() + self._platform = platform.lower() + + @property + def interpreter(self): + # type: () -> str + return self._interpreter + + @property + def abi(self): + # type: () -> str + return self._abi + + @property + def platform(self): + # type: () -> str + return self._platform + + def __eq__(self, other): + # type: (object) -> bool + if not isinstance(other, Tag): + return NotImplemented + + return ( + (self.platform == other.platform) + and (self.abi == other.abi) + and (self.interpreter == other.interpreter) + ) + + def __hash__(self): + # type: () -> int + return hash((self._interpreter, self._abi, self._platform)) + + def __str__(self): + # type: () -> str + return "{}-{}-{}".format(self._interpreter, self._abi, self._platform) + + def __repr__(self): + # type: () -> str + return "<{self} @ {self_id}>".format(self=self, self_id=id(self)) + + +def parse_tag(tag): + # type: (str) -> FrozenSet[Tag] + """ + Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances. + + Returning a set is required due to the possibility that the tag is a + compressed tag set. + """ + tags = set() + interpreters, abis, platforms = tag.split("-") + for interpreter in interpreters.split("."): + for abi in abis.split("."): + for platform_ in platforms.split("."): + tags.add(Tag(interpreter, abi, platform_)) + return frozenset(tags) + + +def _warn_keyword_parameter(func_name, kwargs): + # type: (str, Dict[str, bool]) -> bool + """ + Backwards-compatibility with Python 2.7 to allow treating 'warn' as keyword-only. + """ + if not kwargs: + return False + elif len(kwargs) > 1 or "warn" not in kwargs: + kwargs.pop("warn", None) + arg = next(iter(kwargs.keys())) + raise TypeError( + "{}() got an unexpected keyword argument {!r}".format(func_name, arg) + ) + return kwargs["warn"] + + +def _get_config_var(name, warn=False): + # type: (str, bool) -> Union[int, str, None] + value = sysconfig.get_config_var(name) + if value is None and warn: + logger.debug( + "Config variable '%s' is unset, Python ABI tag may be incorrect", name + ) + return value + + +def _normalize_string(string): + # type: (str) -> str + return string.replace(".", "_").replace("-", "_") + + +def _abi3_applies(python_version): + # type: (PythonVersion) -> bool + """ + Determine if the Python version supports abi3. + + PEP 384 was first implemented in Python 3.2. + """ + return len(python_version) > 1 and tuple(python_version) >= (3, 2) + + +def _cpython_abis(py_version, warn=False): + # type: (PythonVersion, bool) -> List[str] + py_version = tuple(py_version) # To allow for version comparison. + abis = [] + version = _version_nodot(py_version[:2]) + debug = pymalloc = ucs4 = "" + with_debug = _get_config_var("Py_DEBUG", warn) + has_refcount = hasattr(sys, "gettotalrefcount") + # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled + # extension modules is the best option. + # https://github.com/pypa/pip/issues/3383#issuecomment-173267692 + has_ext = "_d.pyd" in EXTENSION_SUFFIXES + if with_debug or (with_debug is None and (has_refcount or has_ext)): + debug = "d" + if py_version < (3, 8): + with_pymalloc = _get_config_var("WITH_PYMALLOC", warn) + if with_pymalloc or with_pymalloc is None: + pymalloc = "m" + if py_version < (3, 3): + unicode_size = _get_config_var("Py_UNICODE_SIZE", warn) + if unicode_size == 4 or ( + unicode_size is None and sys.maxunicode == 0x10FFFF + ): + ucs4 = "u" + elif debug: + # Debug builds can also load "normal" extension modules. + # We can also assume no UCS-4 or pymalloc requirement. + abis.append("cp{version}".format(version=version)) + abis.insert( + 0, + "cp{version}{debug}{pymalloc}{ucs4}".format( + version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4 + ), + ) + return abis + + +def cpython_tags( + python_version=None, # type: Optional[PythonVersion] + abis=None, # type: Optional[Iterable[str]] + platforms=None, # type: Optional[Iterable[str]] + **kwargs # type: bool +): + # type: (...) -> Iterator[Tag] + """ + Yields the tags for a CPython interpreter. + + The tags consist of: + - cp-- + - cp-abi3- + - cp-none- + - cp-abi3- # Older Python versions down to 3.2. + + If python_version only specifies a major version then user-provided ABIs and + the 'none' ABItag will be used. + + If 'abi3' or 'none' are specified in 'abis' then they will be yielded at + their normal position and not at the beginning. + """ + warn = _warn_keyword_parameter("cpython_tags", kwargs) + if not python_version: + python_version = sys.version_info[:2] + + interpreter = "cp{}".format(_version_nodot(python_version[:2])) + + if abis is None: + if len(python_version) > 1: + abis = _cpython_abis(python_version, warn) + else: + abis = [] + abis = list(abis) + # 'abi3' and 'none' are explicitly handled later. + for explicit_abi in ("abi3", "none"): + try: + abis.remove(explicit_abi) + except ValueError: + pass + + platforms = list(platforms or _platform_tags()) + for abi in abis: + for platform_ in platforms: + yield Tag(interpreter, abi, platform_) + if _abi3_applies(python_version): + for tag in (Tag(interpreter, "abi3", platform_) for platform_ in platforms): + yield tag + for tag in (Tag(interpreter, "none", platform_) for platform_ in platforms): + yield tag + + if _abi3_applies(python_version): + for minor_version in range(python_version[1] - 1, 1, -1): + for platform_ in platforms: + interpreter = "cp{version}".format( + version=_version_nodot((python_version[0], minor_version)) + ) + yield Tag(interpreter, "abi3", platform_) + + +def _generic_abi(): + # type: () -> Iterator[str] + abi = sysconfig.get_config_var("SOABI") + if abi: + yield _normalize_string(abi) + + +def generic_tags( + interpreter=None, # type: Optional[str] + abis=None, # type: Optional[Iterable[str]] + platforms=None, # type: Optional[Iterable[str]] + **kwargs # type: bool +): + # type: (...) -> Iterator[Tag] + """ + Yields the tags for a generic interpreter. + + The tags consist of: + - -- + + The "none" ABI will be added if it was not explicitly provided. + """ + warn = _warn_keyword_parameter("generic_tags", kwargs) + if not interpreter: + interp_name = interpreter_name() + interp_version = interpreter_version(warn=warn) + interpreter = "".join([interp_name, interp_version]) + if abis is None: + abis = _generic_abi() + platforms = list(platforms or _platform_tags()) + abis = list(abis) + if "none" not in abis: + abis.append("none") + for abi in abis: + for platform_ in platforms: + yield Tag(interpreter, abi, platform_) + + +def _py_interpreter_range(py_version): + # type: (PythonVersion) -> Iterator[str] + """ + Yields Python versions in descending order. + + After the latest version, the major-only version will be yielded, and then + all previous versions of that major version. + """ + if len(py_version) > 1: + yield "py{version}".format(version=_version_nodot(py_version[:2])) + yield "py{major}".format(major=py_version[0]) + if len(py_version) > 1: + for minor in range(py_version[1] - 1, -1, -1): + yield "py{version}".format(version=_version_nodot((py_version[0], minor))) + + +def compatible_tags( + python_version=None, # type: Optional[PythonVersion] + interpreter=None, # type: Optional[str] + platforms=None, # type: Optional[Iterable[str]] +): + # type: (...) -> Iterator[Tag] + """ + Yields the sequence of tags that are compatible with a specific version of Python. + + The tags consist of: + - py*-none- + - -none-any # ... if `interpreter` is provided. + - py*-none-any + """ + if not python_version: + python_version = sys.version_info[:2] + platforms = list(platforms or _platform_tags()) + for version in _py_interpreter_range(python_version): + for platform_ in platforms: + yield Tag(version, "none", platform_) + if interpreter: + yield Tag(interpreter, "none", "any") + for version in _py_interpreter_range(python_version): + yield Tag(version, "none", "any") + + +def _mac_arch(arch, is_32bit=_32_BIT_INTERPRETER): + # type: (str, bool) -> str + if not is_32bit: + return arch + + if arch.startswith("ppc"): + return "ppc" + + return "i386" + + +def _mac_binary_formats(version, cpu_arch): + # type: (MacVersion, str) -> List[str] + formats = [cpu_arch] + if cpu_arch == "x86_64": + if version < (10, 4): + return [] + formats.extend(["intel", "fat64", "fat32"]) + + elif cpu_arch == "i386": + if version < (10, 4): + return [] + formats.extend(["intel", "fat32", "fat"]) + + elif cpu_arch == "ppc64": + # TODO: Need to care about 32-bit PPC for ppc64 through 10.2? + if version > (10, 5) or version < (10, 4): + return [] + formats.append("fat64") + + elif cpu_arch == "ppc": + if version > (10, 6): + return [] + formats.extend(["fat32", "fat"]) + + formats.append("universal") + return formats + + +def mac_platforms(version=None, arch=None): + # type: (Optional[MacVersion], Optional[str]) -> Iterator[str] + """ + Yields the platform tags for a macOS system. + + The `version` parameter is a two-item tuple specifying the macOS version to + generate platform tags for. The `arch` parameter is the CPU architecture to + generate platform tags for. Both parameters default to the appropriate value + for the current system. + """ + version_str, _, cpu_arch = platform.mac_ver() # type: ignore + if version is None: + version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2]))) + else: + version = version + if arch is None: + arch = _mac_arch(cpu_arch) + else: + arch = arch + for minor_version in range(version[1], -1, -1): + compat_version = version[0], minor_version + binary_formats = _mac_binary_formats(compat_version, arch) + for binary_format in binary_formats: + yield "macosx_{major}_{minor}_{binary_format}".format( + major=compat_version[0], + minor=compat_version[1], + binary_format=binary_format, + ) + + +# From PEP 513. +def _is_manylinux_compatible(name, glibc_version): + # type: (str, GlibcVersion) -> bool + # Check for presence of _manylinux module. + try: + import _manylinux # noqa + + return bool(getattr(_manylinux, name + "_compatible")) + except (ImportError, AttributeError): + # Fall through to heuristic check below. + pass + + return _have_compatible_glibc(*glibc_version) + + +def _glibc_version_string(): + # type: () -> Optional[str] + # Returns glibc version string, or None if not using glibc. + return _glibc_version_string_confstr() or _glibc_version_string_ctypes() + + +def _glibc_version_string_confstr(): + # type: () -> Optional[str] + """ + Primary implementation of glibc_version_string using os.confstr. + """ + # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely + # to be broken or missing. This strategy is used in the standard library + # platform module. + # https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183 + try: + # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17". + version_string = os.confstr( # type: ignore[attr-defined] # noqa: F821 + "CS_GNU_LIBC_VERSION" + ) + assert version_string is not None + _, version = version_string.split() # type: Tuple[str, str] + except (AssertionError, AttributeError, OSError, ValueError): + # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... + return None + return version + + +def _glibc_version_string_ctypes(): + # type: () -> Optional[str] + """ + Fallback implementation of glibc_version_string using ctypes. + """ + try: + import ctypes + except ImportError: + return None + + # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen + # manpage says, "If filename is NULL, then the returned handle is for the + # main program". This way we can let the linker do the work to figure out + # which libc our process is actually using. + # + # Note: typeshed is wrong here so we are ignoring this line. + process_namespace = ctypes.CDLL(None) # type: ignore + try: + gnu_get_libc_version = process_namespace.gnu_get_libc_version + except AttributeError: + # Symbol doesn't exist -> therefore, we are not linked to + # glibc. + return None + + # Call gnu_get_libc_version, which returns a string like "2.5" + gnu_get_libc_version.restype = ctypes.c_char_p + version_str = gnu_get_libc_version() # type: str + # py2 / py3 compatibility: + if not isinstance(version_str, str): + version_str = version_str.decode("ascii") + + return version_str + + +# Separated out from have_compatible_glibc for easier unit testing. +def _check_glibc_version(version_str, required_major, minimum_minor): + # type: (str, int, int) -> bool + # Parse string and check against requested version. + # + # We use a regexp instead of str.split because we want to discard any + # random junk that might come after the minor version -- this might happen + # in patched/forked versions of glibc (e.g. Linaro's version of glibc + # uses version strings like "2.20-2014.11"). See gh-3588. + m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str) + if not m: + warnings.warn( + "Expected glibc version with 2 components major.minor," + " got: %s" % version_str, + RuntimeWarning, + ) + return False + return ( + int(m.group("major")) == required_major + and int(m.group("minor")) >= minimum_minor + ) + + +def _have_compatible_glibc(required_major, minimum_minor): + # type: (int, int) -> bool + version_str = _glibc_version_string() + if version_str is None: + return False + return _check_glibc_version(version_str, required_major, minimum_minor) + + +# Python does not provide platform information at sufficient granularity to +# identify the architecture of the running executable in some cases, so we +# determine it dynamically by reading the information from the running +# process. This only applies on Linux, which uses the ELF format. +class _ELFFileHeader(object): + # https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header + class _InvalidELFFileHeader(ValueError): + """ + An invalid ELF file header was found. + """ + + ELF_MAGIC_NUMBER = 0x7F454C46 + ELFCLASS32 = 1 + ELFCLASS64 = 2 + ELFDATA2LSB = 1 + ELFDATA2MSB = 2 + EM_386 = 3 + EM_S390 = 22 + EM_ARM = 40 + EM_X86_64 = 62 + EF_ARM_ABIMASK = 0xFF000000 + EF_ARM_ABI_VER5 = 0x05000000 + EF_ARM_ABI_FLOAT_HARD = 0x00000400 + + def __init__(self, file): + # type: (IO[bytes]) -> None + def unpack(fmt): + # type: (str) -> int + try: + (result,) = struct.unpack( + fmt, file.read(struct.calcsize(fmt)) + ) # type: (int, ) + except struct.error: + raise _ELFFileHeader._InvalidELFFileHeader() + return result + + self.e_ident_magic = unpack(">I") + if self.e_ident_magic != self.ELF_MAGIC_NUMBER: + raise _ELFFileHeader._InvalidELFFileHeader() + self.e_ident_class = unpack("B") + if self.e_ident_class not in {self.ELFCLASS32, self.ELFCLASS64}: + raise _ELFFileHeader._InvalidELFFileHeader() + self.e_ident_data = unpack("B") + if self.e_ident_data not in {self.ELFDATA2LSB, self.ELFDATA2MSB}: + raise _ELFFileHeader._InvalidELFFileHeader() + self.e_ident_version = unpack("B") + self.e_ident_osabi = unpack("B") + self.e_ident_abiversion = unpack("B") + self.e_ident_pad = file.read(7) + format_h = "H" + format_i = "I" + format_q = "Q" + format_p = format_i if self.e_ident_class == self.ELFCLASS32 else format_q + self.e_type = unpack(format_h) + self.e_machine = unpack(format_h) + self.e_version = unpack(format_i) + self.e_entry = unpack(format_p) + self.e_phoff = unpack(format_p) + self.e_shoff = unpack(format_p) + self.e_flags = unpack(format_i) + self.e_ehsize = unpack(format_h) + self.e_phentsize = unpack(format_h) + self.e_phnum = unpack(format_h) + self.e_shentsize = unpack(format_h) + self.e_shnum = unpack(format_h) + self.e_shstrndx = unpack(format_h) + + +def _get_elf_header(): + # type: () -> Optional[_ELFFileHeader] + try: + with open(sys.executable, "rb") as f: + elf_header = _ELFFileHeader(f) + except (IOError, OSError, TypeError, _ELFFileHeader._InvalidELFFileHeader): + return None + return elf_header + + +def _is_linux_armhf(): + # type: () -> bool + # hard-float ABI can be detected from the ELF header of the running + # process + # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf + elf_header = _get_elf_header() + if elf_header is None: + return False + result = elf_header.e_ident_class == elf_header.ELFCLASS32 + result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB + result &= elf_header.e_machine == elf_header.EM_ARM + result &= ( + elf_header.e_flags & elf_header.EF_ARM_ABIMASK + ) == elf_header.EF_ARM_ABI_VER5 + result &= ( + elf_header.e_flags & elf_header.EF_ARM_ABI_FLOAT_HARD + ) == elf_header.EF_ARM_ABI_FLOAT_HARD + return result + + +def _is_linux_i686(): + # type: () -> bool + elf_header = _get_elf_header() + if elf_header is None: + return False + result = elf_header.e_ident_class == elf_header.ELFCLASS32 + result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB + result &= elf_header.e_machine == elf_header.EM_386 + return result + + +def _have_compatible_manylinux_abi(arch): + # type: (str) -> bool + if arch == "armv7l": + return _is_linux_armhf() + if arch == "i686": + return _is_linux_i686() + return True + + +def _linux_platforms(is_32bit=_32_BIT_INTERPRETER): + # type: (bool) -> Iterator[str] + linux = _normalize_string(distutils.util.get_platform()) + if is_32bit: + if linux == "linux_x86_64": + linux = "linux_i686" + elif linux == "linux_aarch64": + linux = "linux_armv7l" + manylinux_support = [] + _, arch = linux.split("_", 1) + if _have_compatible_manylinux_abi(arch): + if arch in {"x86_64", "i686", "aarch64", "armv7l", "ppc64", "ppc64le", "s390x"}: + manylinux_support.append( + ("manylinux2014", (2, 17)) + ) # CentOS 7 w/ glibc 2.17 (PEP 599) + if arch in {"x86_64", "i686"}: + manylinux_support.append( + ("manylinux2010", (2, 12)) + ) # CentOS 6 w/ glibc 2.12 (PEP 571) + manylinux_support.append( + ("manylinux1", (2, 5)) + ) # CentOS 5 w/ glibc 2.5 (PEP 513) + manylinux_support_iter = iter(manylinux_support) + for name, glibc_version in manylinux_support_iter: + if _is_manylinux_compatible(name, glibc_version): + yield linux.replace("linux", name) + break + # Support for a later manylinux implies support for an earlier version. + for name, _ in manylinux_support_iter: + yield linux.replace("linux", name) + yield linux + + +def _generic_platforms(): + # type: () -> Iterator[str] + yield _normalize_string(distutils.util.get_platform()) + + +def _platform_tags(): + # type: () -> Iterator[str] + """ + Provides the platform tags for this installation. + """ + if platform.system() == "Darwin": + return mac_platforms() + elif platform.system() == "Linux": + return _linux_platforms() + else: + return _generic_platforms() + + +def interpreter_name(): + # type: () -> str + """ + Returns the name of the running interpreter. + """ + try: + name = sys.implementation.name # type: ignore + except AttributeError: # pragma: no cover + # Python 2.7 compatibility. + name = platform.python_implementation().lower() + return INTERPRETER_SHORT_NAMES.get(name) or name + + +def interpreter_version(**kwargs): + # type: (bool) -> str + """ + Returns the version of the running interpreter. + """ + warn = _warn_keyword_parameter("interpreter_version", kwargs) + version = _get_config_var("py_version_nodot", warn=warn) + if version: + version = str(version) + else: + version = _version_nodot(sys.version_info[:2]) + return version + + +def _version_nodot(version): + # type: (PythonVersion) -> str + if any(v >= 10 for v in version): + sep = "_" + else: + sep = "" + return sep.join(map(str, version)) + + +def sys_tags(**kwargs): + # type: (bool) -> Iterator[Tag] + """ + Returns the sequence of tag triples for the running interpreter. + + The order of the sequence corresponds to priority order for the + interpreter, from most to least important. + """ + warn = _warn_keyword_parameter("sys_tags", kwargs) + + interp_name = interpreter_name() + if interp_name == "cp": + for tag in cpython_tags(warn=warn): + yield tag + else: + for tag in generic_tags(): + yield tag + + for tag in compatible_tags(): + yield tag diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/utils.py b/venv/Lib/site-packages/setuptools/_vendor/packaging/utils.py new file mode 100644 index 00000000..19579c1a --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_vendor/packaging/utils.py @@ -0,0 +1,65 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import re + +from ._typing import TYPE_CHECKING, cast +from .version import InvalidVersion, Version + +if TYPE_CHECKING: # pragma: no cover + from typing import NewType, Union + + NormalizedName = NewType("NormalizedName", str) + +_canonicalize_regex = re.compile(r"[-_.]+") + + +def canonicalize_name(name): + # type: (str) -> NormalizedName + # This is taken from PEP 503. + value = _canonicalize_regex.sub("-", name).lower() + return cast("NormalizedName", value) + + +def canonicalize_version(_version): + # type: (str) -> Union[Version, str] + """ + This is very similar to Version.__str__, but has one subtle difference + with the way it handles the release segment. + """ + + try: + version = Version(_version) + except InvalidVersion: + # Legacy versions cannot be normalized + return _version + + parts = [] + + # Epoch + if version.epoch != 0: + parts.append("{0}!".format(version.epoch)) + + # Release segment + # NB: This strips trailing '.0's to normalize + parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in version.release))) + + # Pre-release + if version.pre is not None: + parts.append("".join(str(x) for x in version.pre)) + + # Post-release + if version.post is not None: + parts.append(".post{0}".format(version.post)) + + # Development release + if version.dev is not None: + parts.append(".dev{0}".format(version.dev)) + + # Local version segment + if version.local is not None: + parts.append("+{0}".format(version.local)) + + return "".join(parts) diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/version.py b/venv/Lib/site-packages/setuptools/_vendor/packaging/version.py new file mode 100644 index 00000000..00371e86 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/_vendor/packaging/version.py @@ -0,0 +1,535 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import collections +import itertools +import re + +from ._structures import Infinity, NegativeInfinity +from ._typing import TYPE_CHECKING + +if TYPE_CHECKING: # pragma: no cover + from typing import Callable, Iterator, List, Optional, SupportsInt, Tuple, Union + + from ._structures import InfinityType, NegativeInfinityType + + InfiniteTypes = Union[InfinityType, NegativeInfinityType] + PrePostDevType = Union[InfiniteTypes, Tuple[str, int]] + SubLocalType = Union[InfiniteTypes, int, str] + LocalType = Union[ + NegativeInfinityType, + Tuple[ + Union[ + SubLocalType, + Tuple[SubLocalType, str], + Tuple[NegativeInfinityType, SubLocalType], + ], + ..., + ], + ] + CmpKey = Tuple[ + int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType + ] + LegacyCmpKey = Tuple[int, Tuple[str, ...]] + VersionComparisonMethod = Callable[ + [Union[CmpKey, LegacyCmpKey], Union[CmpKey, LegacyCmpKey]], bool + ] + +__all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"] + + +_Version = collections.namedtuple( + "_Version", ["epoch", "release", "dev", "pre", "post", "local"] +) + + +def parse(version): + # type: (str) -> Union[LegacyVersion, Version] + """ + Parse the given version string and return either a :class:`Version` object + or a :class:`LegacyVersion` object depending on if the given version is + a valid PEP 440 version or a legacy version. + """ + try: + return Version(version) + except InvalidVersion: + return LegacyVersion(version) + + +class InvalidVersion(ValueError): + """ + An invalid version was found, users should refer to PEP 440. + """ + + +class _BaseVersion(object): + _key = None # type: Union[CmpKey, LegacyCmpKey] + + def __hash__(self): + # type: () -> int + return hash(self._key) + + def __lt__(self, other): + # type: (_BaseVersion) -> bool + return self._compare(other, lambda s, o: s < o) + + def __le__(self, other): + # type: (_BaseVersion) -> bool + return self._compare(other, lambda s, o: s <= o) + + def __eq__(self, other): + # type: (object) -> bool + return self._compare(other, lambda s, o: s == o) + + def __ge__(self, other): + # type: (_BaseVersion) -> bool + return self._compare(other, lambda s, o: s >= o) + + def __gt__(self, other): + # type: (_BaseVersion) -> bool + return self._compare(other, lambda s, o: s > o) + + def __ne__(self, other): + # type: (object) -> bool + return self._compare(other, lambda s, o: s != o) + + def _compare(self, other, method): + # type: (object, VersionComparisonMethod) -> Union[bool, NotImplemented] + if not isinstance(other, _BaseVersion): + return NotImplemented + + return method(self._key, other._key) + + +class LegacyVersion(_BaseVersion): + def __init__(self, version): + # type: (str) -> None + self._version = str(version) + self._key = _legacy_cmpkey(self._version) + + def __str__(self): + # type: () -> str + return self._version + + def __repr__(self): + # type: () -> str + return "".format(repr(str(self))) + + @property + def public(self): + # type: () -> str + return self._version + + @property + def base_version(self): + # type: () -> str + return self._version + + @property + def epoch(self): + # type: () -> int + return -1 + + @property + def release(self): + # type: () -> None + return None + + @property + def pre(self): + # type: () -> None + return None + + @property + def post(self): + # type: () -> None + return None + + @property + def dev(self): + # type: () -> None + return None + + @property + def local(self): + # type: () -> None + return None + + @property + def is_prerelease(self): + # type: () -> bool + return False + + @property + def is_postrelease(self): + # type: () -> bool + return False + + @property + def is_devrelease(self): + # type: () -> bool + return False + + +_legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE) + +_legacy_version_replacement_map = { + "pre": "c", + "preview": "c", + "-": "final-", + "rc": "c", + "dev": "@", +} + + +def _parse_version_parts(s): + # type: (str) -> Iterator[str] + for part in _legacy_version_component_re.split(s): + part = _legacy_version_replacement_map.get(part, part) + + if not part or part == ".": + continue + + if part[:1] in "0123456789": + # pad for numeric comparison + yield part.zfill(8) + else: + yield "*" + part + + # ensure that alpha/beta/candidate are before final + yield "*final" + + +def _legacy_cmpkey(version): + # type: (str) -> LegacyCmpKey + + # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch + # greater than or equal to 0. This will effectively put the LegacyVersion, + # which uses the defacto standard originally implemented by setuptools, + # as before all PEP 440 versions. + epoch = -1 + + # This scheme is taken from pkg_resources.parse_version setuptools prior to + # it's adoption of the packaging library. + parts = [] # type: List[str] + for part in _parse_version_parts(version.lower()): + if part.startswith("*"): + # remove "-" before a prerelease tag + if part < "*final": + while parts and parts[-1] == "*final-": + parts.pop() + + # remove trailing zeros from each series of numeric parts + while parts and parts[-1] == "00000000": + parts.pop() + + parts.append(part) + + return epoch, tuple(parts) + + +# Deliberately not anchored to the start and end of the string, to make it +# easier for 3rd party code to reuse +VERSION_PATTERN = r""" + v? + (?: + (?:(?P[0-9]+)!)? # epoch + (?P[0-9]+(?:\.[0-9]+)*) # release segment + (?P
                                          # pre-release
+            [-_\.]?
+            (?P(a|b|c|rc|alpha|beta|pre|preview))
+            [-_\.]?
+            (?P[0-9]+)?
+        )?
+        (?P                                         # post release
+            (?:-(?P[0-9]+))
+            |
+            (?:
+                [-_\.]?
+                (?Ppost|rev|r)
+                [-_\.]?
+                (?P[0-9]+)?
+            )
+        )?
+        (?P                                          # dev release
+            [-_\.]?
+            (?Pdev)
+            [-_\.]?
+            (?P[0-9]+)?
+        )?
+    )
+    (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
+"""
+
+
+class Version(_BaseVersion):
+
+    _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
+
+    def __init__(self, version):
+        # type: (str) -> None
+
+        # Validate the version and parse it into pieces
+        match = self._regex.search(version)
+        if not match:
+            raise InvalidVersion("Invalid version: '{0}'".format(version))
+
+        # Store the parsed out pieces of the version
+        self._version = _Version(
+            epoch=int(match.group("epoch")) if match.group("epoch") else 0,
+            release=tuple(int(i) for i in match.group("release").split(".")),
+            pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
+            post=_parse_letter_version(
+                match.group("post_l"), match.group("post_n1") or match.group("post_n2")
+            ),
+            dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
+            local=_parse_local_version(match.group("local")),
+        )
+
+        # Generate a key which will be used for sorting
+        self._key = _cmpkey(
+            self._version.epoch,
+            self._version.release,
+            self._version.pre,
+            self._version.post,
+            self._version.dev,
+            self._version.local,
+        )
+
+    def __repr__(self):
+        # type: () -> str
+        return "".format(repr(str(self)))
+
+    def __str__(self):
+        # type: () -> str
+        parts = []
+
+        # Epoch
+        if self.epoch != 0:
+            parts.append("{0}!".format(self.epoch))
+
+        # Release segment
+        parts.append(".".join(str(x) for x in self.release))
+
+        # Pre-release
+        if self.pre is not None:
+            parts.append("".join(str(x) for x in self.pre))
+
+        # Post-release
+        if self.post is not None:
+            parts.append(".post{0}".format(self.post))
+
+        # Development release
+        if self.dev is not None:
+            parts.append(".dev{0}".format(self.dev))
+
+        # Local version segment
+        if self.local is not None:
+            parts.append("+{0}".format(self.local))
+
+        return "".join(parts)
+
+    @property
+    def epoch(self):
+        # type: () -> int
+        _epoch = self._version.epoch  # type: int
+        return _epoch
+
+    @property
+    def release(self):
+        # type: () -> Tuple[int, ...]
+        _release = self._version.release  # type: Tuple[int, ...]
+        return _release
+
+    @property
+    def pre(self):
+        # type: () -> Optional[Tuple[str, int]]
+        _pre = self._version.pre  # type: Optional[Tuple[str, int]]
+        return _pre
+
+    @property
+    def post(self):
+        # type: () -> Optional[Tuple[str, int]]
+        return self._version.post[1] if self._version.post else None
+
+    @property
+    def dev(self):
+        # type: () -> Optional[Tuple[str, int]]
+        return self._version.dev[1] if self._version.dev else None
+
+    @property
+    def local(self):
+        # type: () -> Optional[str]
+        if self._version.local:
+            return ".".join(str(x) for x in self._version.local)
+        else:
+            return None
+
+    @property
+    def public(self):
+        # type: () -> str
+        return str(self).split("+", 1)[0]
+
+    @property
+    def base_version(self):
+        # type: () -> str
+        parts = []
+
+        # Epoch
+        if self.epoch != 0:
+            parts.append("{0}!".format(self.epoch))
+
+        # Release segment
+        parts.append(".".join(str(x) for x in self.release))
+
+        return "".join(parts)
+
+    @property
+    def is_prerelease(self):
+        # type: () -> bool
+        return self.dev is not None or self.pre is not None
+
+    @property
+    def is_postrelease(self):
+        # type: () -> bool
+        return self.post is not None
+
+    @property
+    def is_devrelease(self):
+        # type: () -> bool
+        return self.dev is not None
+
+    @property
+    def major(self):
+        # type: () -> int
+        return self.release[0] if len(self.release) >= 1 else 0
+
+    @property
+    def minor(self):
+        # type: () -> int
+        return self.release[1] if len(self.release) >= 2 else 0
+
+    @property
+    def micro(self):
+        # type: () -> int
+        return self.release[2] if len(self.release) >= 3 else 0
+
+
+def _parse_letter_version(
+    letter,  # type: str
+    number,  # type: Union[str, bytes, SupportsInt]
+):
+    # type: (...) -> Optional[Tuple[str, int]]
+
+    if letter:
+        # We consider there to be an implicit 0 in a pre-release if there is
+        # not a numeral associated with it.
+        if number is None:
+            number = 0
+
+        # We normalize any letters to their lower case form
+        letter = letter.lower()
+
+        # We consider some words to be alternate spellings of other words and
+        # in those cases we want to normalize the spellings to our preferred
+        # spelling.
+        if letter == "alpha":
+            letter = "a"
+        elif letter == "beta":
+            letter = "b"
+        elif letter in ["c", "pre", "preview"]:
+            letter = "rc"
+        elif letter in ["rev", "r"]:
+            letter = "post"
+
+        return letter, int(number)
+    if not letter and number:
+        # We assume if we are given a number, but we are not given a letter
+        # then this is using the implicit post release syntax (e.g. 1.0-1)
+        letter = "post"
+
+        return letter, int(number)
+
+    return None
+
+
+_local_version_separators = re.compile(r"[\._-]")
+
+
+def _parse_local_version(local):
+    # type: (str) -> Optional[LocalType]
+    """
+    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
+    """
+    if local is not None:
+        return tuple(
+            part.lower() if not part.isdigit() else int(part)
+            for part in _local_version_separators.split(local)
+        )
+    return None
+
+
+def _cmpkey(
+    epoch,  # type: int
+    release,  # type: Tuple[int, ...]
+    pre,  # type: Optional[Tuple[str, int]]
+    post,  # type: Optional[Tuple[str, int]]
+    dev,  # type: Optional[Tuple[str, int]]
+    local,  # type: Optional[Tuple[SubLocalType]]
+):
+    # type: (...) -> CmpKey
+
+    # When we compare a release version, we want to compare it with all of the
+    # trailing zeros removed. So we'll use a reverse the list, drop all the now
+    # leading zeros until we come to something non zero, then take the rest
+    # re-reverse it back into the correct order and make it a tuple and use
+    # that for our sorting key.
+    _release = tuple(
+        reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
+    )
+
+    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
+    # We'll do this by abusing the pre segment, but we _only_ want to do this
+    # if there is not a pre or a post segment. If we have one of those then
+    # the normal sorting rules will handle this case correctly.
+    if pre is None and post is None and dev is not None:
+        _pre = NegativeInfinity  # type: PrePostDevType
+    # Versions without a pre-release (except as noted above) should sort after
+    # those with one.
+    elif pre is None:
+        _pre = Infinity
+    else:
+        _pre = pre
+
+    # Versions without a post segment should sort before those with one.
+    if post is None:
+        _post = NegativeInfinity  # type: PrePostDevType
+
+    else:
+        _post = post
+
+    # Versions without a development segment should sort after those with one.
+    if dev is None:
+        _dev = Infinity  # type: PrePostDevType
+
+    else:
+        _dev = dev
+
+    if local is None:
+        # Versions without a local segment should sort before those with one.
+        _local = NegativeInfinity  # type: LocalType
+    else:
+        # Versions with a local segment need that segment parsed to implement
+        # the sorting rules in PEP440.
+        # - Alpha numeric segments sort before numeric segments
+        # - Alpha numeric segments sort lexicographically
+        # - Numeric segments sort numerically
+        # - Shorter versions sort before longer versions when the prefixes
+        #   match exactly
+        _local = tuple(
+            (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
+        )
+
+    return epoch, _release, _pre, _post, _dev, _local
diff --git a/venv/Lib/site-packages/setuptools/_vendor/pyparsing.py b/venv/Lib/site-packages/setuptools/_vendor/pyparsing.py
new file mode 100644
index 00000000..1333c00e
--- /dev/null
+++ b/venv/Lib/site-packages/setuptools/_vendor/pyparsing.py
@@ -0,0 +1,5742 @@
+# module pyparsing.py
+#
+# Copyright (c) 2003-2018  Paul T. McGuire
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__doc__ = \
+"""
+pyparsing module - Classes and methods to define and execute parsing grammars
+=============================================================================
+
+The pyparsing module is an alternative approach to creating and executing simple grammars,
+vs. the traditional lex/yacc approach, or the use of regular expressions.  With pyparsing, you
+don't need to learn a new syntax for defining grammars or matching expressions - the parsing module
+provides a library of classes that you use to construct the grammar directly in Python.
+
+Here is a program to parse "Hello, World!" (or any greeting of the form 
+C{", !"}), built up using L{Word}, L{Literal}, and L{And} elements 
+(L{'+'} operator gives L{And} expressions, strings are auto-converted to
+L{Literal} expressions)::
+
+    from pyparsing import Word, alphas
+
+    # define grammar of a greeting
+    greet = Word(alphas) + "," + Word(alphas) + "!"
+
+    hello = "Hello, World!"
+    print (hello, "->", greet.parseString(hello))
+
+The program outputs the following::
+
+    Hello, World! -> ['Hello', ',', 'World', '!']
+
+The Python representation of the grammar is quite readable, owing to the self-explanatory
+class names, and the use of '+', '|' and '^' operators.
+
+The L{ParseResults} object returned from L{ParserElement.parseString} can be accessed as a nested list, a dictionary, or an
+object with named attributes.
+
+The pyparsing module handles some of the problems that are typically vexing when writing text parsers:
+ - extra or missing whitespace (the above program will also handle "Hello,World!", "Hello  ,  World  !", etc.)
+ - quoted strings
+ - embedded comments
+
+
+Getting Started -
+-----------------
+Visit the classes L{ParserElement} and L{ParseResults} to see the base classes that most other pyparsing
+classes inherit from. Use the docstrings for examples of how to:
+ - construct literal match expressions from L{Literal} and L{CaselessLiteral} classes
+ - construct character word-group expressions using the L{Word} class
+ - see how to create repetitive expressions using L{ZeroOrMore} and L{OneOrMore} classes
+ - use L{'+'}, L{'|'}, L{'^'}, and L{'&'} operators to combine simple expressions into more complex ones
+ - associate names with your parsed results using L{ParserElement.setResultsName}
+ - find some helpful expression short-cuts like L{delimitedList} and L{oneOf}
+ - find more useful common expressions in the L{pyparsing_common} namespace class
+"""
+
+__version__ = "2.2.1"
+__versionTime__ = "18 Sep 2018 00:49 UTC"
+__author__ = "Paul McGuire "
+
+import string
+from weakref import ref as wkref
+import copy
+import sys
+import warnings
+import re
+import sre_constants
+import collections
+import pprint
+import traceback
+import types
+from datetime import datetime
+
+try:
+    from _thread import RLock
+except ImportError:
+    from threading import RLock
+
+try:
+    # Python 3
+    from collections.abc import Iterable
+    from collections.abc import MutableMapping
+except ImportError:
+    # Python 2.7
+    from collections import Iterable
+    from collections import MutableMapping
+
+try:
+    from collections import OrderedDict as _OrderedDict
+except ImportError:
+    try:
+        from ordereddict import OrderedDict as _OrderedDict
+    except ImportError:
+        _OrderedDict = None
+
+#~ sys.stderr.write( "testing pyparsing module, version %s, %s\n" % (__version__,__versionTime__ ) )
+
+__all__ = [
+'And', 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 'Empty',
+'FollowedBy', 'Forward', 'GoToColumn', 'Group', 'Keyword', 'LineEnd', 'LineStart', 'Literal',
+'MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or',
+'ParseBaseException', 'ParseElementEnhance', 'ParseException', 'ParseExpression', 'ParseFatalException',
+'ParseResults', 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException',
+'Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter', 
+'White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore',
+'alphanums', 'alphas', 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col',
+'commaSeparatedList', 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString',
+'dblSlashComment', 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'hexnums',
+'htmlComment', 'javaStyleComment', 'line', 'lineEnd', 'lineStart', 'lineno',
+'makeHTMLTags', 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral',
+'nestedExpr', 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables',
+'punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity', 
+'replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd',
+'stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute',
+'indentedBlock', 'originalTextFor', 'ungroup', 'infixNotation','locatedExpr', 'withClass',
+'CloseMatch', 'tokenMap', 'pyparsing_common',
+]
+
+system_version = tuple(sys.version_info)[:3]
+PY_3 = system_version[0] == 3
+if PY_3:
+    _MAX_INT = sys.maxsize
+    basestring = str
+    unichr = chr
+    _ustr = str
+
+    # build list of single arg builtins, that can be used as parse actions
+    singleArgBuiltins = [sum, len, sorted, reversed, list, tuple, set, any, all, min, max]
+
+else:
+    _MAX_INT = sys.maxint
+    range = xrange
+
+    def _ustr(obj):
+        """Drop-in replacement for str(obj) that tries to be Unicode friendly. It first tries
+           str(obj). If that fails with a UnicodeEncodeError, then it tries unicode(obj). It
+           then < returns the unicode object | encodes it with the default encoding | ... >.
+        """
+        if isinstance(obj,unicode):
+            return obj
+
+        try:
+            # If this works, then _ustr(obj) has the same behaviour as str(obj), so
+            # it won't break any existing code.
+            return str(obj)
+
+        except UnicodeEncodeError:
+            # Else encode it
+            ret = unicode(obj).encode(sys.getdefaultencoding(), 'xmlcharrefreplace')
+            xmlcharref = Regex(r'&#\d+;')
+            xmlcharref.setParseAction(lambda t: '\\u' + hex(int(t[0][2:-1]))[2:])
+            return xmlcharref.transformString(ret)
+
+    # build list of single arg builtins, tolerant of Python version, that can be used as parse actions
+    singleArgBuiltins = []
+    import __builtin__
+    for fname in "sum len sorted reversed list tuple set any all min max".split():
+        try:
+            singleArgBuiltins.append(getattr(__builtin__,fname))
+        except AttributeError:
+            continue
+            
+_generatorType = type((y for y in range(1)))
+ 
+def _xml_escape(data):
+    """Escape &, <, >, ", ', etc. in a string of data."""
+
+    # ampersand must be replaced first
+    from_symbols = '&><"\''
+    to_symbols = ('&'+s+';' for s in "amp gt lt quot apos".split())
+    for from_,to_ in zip(from_symbols, to_symbols):
+        data = data.replace(from_, to_)
+    return data
+
+class _Constants(object):
+    pass
+
+alphas     = string.ascii_uppercase + string.ascii_lowercase
+nums       = "0123456789"
+hexnums    = nums + "ABCDEFabcdef"
+alphanums  = alphas + nums
+_bslash    = chr(92)
+printables = "".join(c for c in string.printable if c not in string.whitespace)
+
+class ParseBaseException(Exception):
+    """base exception class for all parsing runtime exceptions"""
+    # Performance tuning: we construct a *lot* of these, so keep this
+    # constructor as small and fast as possible
+    def __init__( self, pstr, loc=0, msg=None, elem=None ):
+        self.loc = loc
+        if msg is None:
+            self.msg = pstr
+            self.pstr = ""
+        else:
+            self.msg = msg
+            self.pstr = pstr
+        self.parserElement = elem
+        self.args = (pstr, loc, msg)
+
+    @classmethod
+    def _from_exception(cls, pe):
+        """
+        internal factory method to simplify creating one type of ParseException 
+        from another - avoids having __init__ signature conflicts among subclasses
+        """
+        return cls(pe.pstr, pe.loc, pe.msg, pe.parserElement)
+
+    def __getattr__( self, aname ):
+        """supported attributes by name are:
+            - lineno - returns the line number of the exception text
+            - col - returns the column number of the exception text
+            - line - returns the line containing the exception text
+        """
+        if( aname == "lineno" ):
+            return lineno( self.loc, self.pstr )
+        elif( aname in ("col", "column") ):
+            return col( self.loc, self.pstr )
+        elif( aname == "line" ):
+            return line( self.loc, self.pstr )
+        else:
+            raise AttributeError(aname)
+
+    def __str__( self ):
+        return "%s (at char %d), (line:%d, col:%d)" % \
+                ( self.msg, self.loc, self.lineno, self.column )
+    def __repr__( self ):
+        return _ustr(self)
+    def markInputline( self, markerString = ">!<" ):
+        """Extracts the exception line from the input string, and marks
+           the location of the exception with a special symbol.
+        """
+        line_str = self.line
+        line_column = self.column - 1
+        if markerString:
+            line_str = "".join((line_str[:line_column],
+                                markerString, line_str[line_column:]))
+        return line_str.strip()
+    def __dir__(self):
+        return "lineno col line".split() + dir(type(self))
+
+class ParseException(ParseBaseException):
+    """
+    Exception thrown when parse expressions don't match class;
+    supported attributes by name are:
+     - lineno - returns the line number of the exception text
+     - col - returns the column number of the exception text
+     - line - returns the line containing the exception text
+        
+    Example::
+        try:
+            Word(nums).setName("integer").parseString("ABC")
+        except ParseException as pe:
+            print(pe)
+            print("column: {}".format(pe.col))
+            
+    prints::
+       Expected integer (at char 0), (line:1, col:1)
+        column: 1
+    """
+    pass
+
+class ParseFatalException(ParseBaseException):
+    """user-throwable exception thrown when inconsistent parse content
+       is found; stops all parsing immediately"""
+    pass
+
+class ParseSyntaxException(ParseFatalException):
+    """just like L{ParseFatalException}, but thrown internally when an
+       L{ErrorStop} ('-' operator) indicates that parsing is to stop 
+       immediately because an unbacktrackable syntax error has been found"""
+    pass
+
+#~ class ReparseException(ParseBaseException):
+    #~ """Experimental class - parse actions can raise this exception to cause
+       #~ pyparsing to reparse the input string:
+        #~ - with a modified input string, and/or
+        #~ - with a modified start location
+       #~ Set the values of the ReparseException in the constructor, and raise the
+       #~ exception in a parse action to cause pyparsing to use the new string/location.
+       #~ Setting the values as None causes no change to be made.
+       #~ """
+    #~ def __init_( self, newstring, restartLoc ):
+        #~ self.newParseText = newstring
+        #~ self.reparseLoc = restartLoc
+
+class RecursiveGrammarException(Exception):
+    """exception thrown by L{ParserElement.validate} if the grammar could be improperly recursive"""
+    def __init__( self, parseElementList ):
+        self.parseElementTrace = parseElementList
+
+    def __str__( self ):
+        return "RecursiveGrammarException: %s" % self.parseElementTrace
+
+class _ParseResultsWithOffset(object):
+    def __init__(self,p1,p2):
+        self.tup = (p1,p2)
+    def __getitem__(self,i):
+        return self.tup[i]
+    def __repr__(self):
+        return repr(self.tup[0])
+    def setOffset(self,i):
+        self.tup = (self.tup[0],i)
+
+class ParseResults(object):
+    """
+    Structured parse results, to provide multiple means of access to the parsed data:
+       - as a list (C{len(results)})
+       - by list index (C{results[0], results[1]}, etc.)
+       - by attribute (C{results.} - see L{ParserElement.setResultsName})
+
+    Example::
+        integer = Word(nums)
+        date_str = (integer.setResultsName("year") + '/' 
+                        + integer.setResultsName("month") + '/' 
+                        + integer.setResultsName("day"))
+        # equivalent form:
+        # date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+        # parseString returns a ParseResults object
+        result = date_str.parseString("1999/12/31")
+
+        def test(s, fn=repr):
+            print("%s -> %s" % (s, fn(eval(s))))
+        test("list(result)")
+        test("result[0]")
+        test("result['month']")
+        test("result.day")
+        test("'month' in result")
+        test("'minutes' in result")
+        test("result.dump()", str)
+    prints::
+        list(result) -> ['1999', '/', '12', '/', '31']
+        result[0] -> '1999'
+        result['month'] -> '12'
+        result.day -> '31'
+        'month' in result -> True
+        'minutes' in result -> False
+        result.dump() -> ['1999', '/', '12', '/', '31']
+        - day: 31
+        - month: 12
+        - year: 1999
+    """
+    def __new__(cls, toklist=None, name=None, asList=True, modal=True ):
+        if isinstance(toklist, cls):
+            return toklist
+        retobj = object.__new__(cls)
+        retobj.__doinit = True
+        return retobj
+
+    # Performance tuning: we construct a *lot* of these, so keep this
+    # constructor as small and fast as possible
+    def __init__( self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance ):
+        if self.__doinit:
+            self.__doinit = False
+            self.__name = None
+            self.__parent = None
+            self.__accumNames = {}
+            self.__asList = asList
+            self.__modal = modal
+            if toklist is None:
+                toklist = []
+            if isinstance(toklist, list):
+                self.__toklist = toklist[:]
+            elif isinstance(toklist, _generatorType):
+                self.__toklist = list(toklist)
+            else:
+                self.__toklist = [toklist]
+            self.__tokdict = dict()
+
+        if name is not None and name:
+            if not modal:
+                self.__accumNames[name] = 0
+            if isinstance(name,int):
+                name = _ustr(name) # will always return a str, but use _ustr for consistency
+            self.__name = name
+            if not (isinstance(toklist, (type(None), basestring, list)) and toklist in (None,'',[])):
+                if isinstance(toklist,basestring):
+                    toklist = [ toklist ]
+                if asList:
+                    if isinstance(toklist,ParseResults):
+                        self[name] = _ParseResultsWithOffset(toklist.copy(),0)
+                    else:
+                        self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]),0)
+                    self[name].__name = name
+                else:
+                    try:
+                        self[name] = toklist[0]
+                    except (KeyError,TypeError,IndexError):
+                        self[name] = toklist
+
+    def __getitem__( self, i ):
+        if isinstance( i, (int,slice) ):
+            return self.__toklist[i]
+        else:
+            if i not in self.__accumNames:
+                return self.__tokdict[i][-1][0]
+            else:
+                return ParseResults([ v[0] for v in self.__tokdict[i] ])
+
+    def __setitem__( self, k, v, isinstance=isinstance ):
+        if isinstance(v,_ParseResultsWithOffset):
+            self.__tokdict[k] = self.__tokdict.get(k,list()) + [v]
+            sub = v[0]
+        elif isinstance(k,(int,slice)):
+            self.__toklist[k] = v
+            sub = v
+        else:
+            self.__tokdict[k] = self.__tokdict.get(k,list()) + [_ParseResultsWithOffset(v,0)]
+            sub = v
+        if isinstance(sub,ParseResults):
+            sub.__parent = wkref(self)
+
+    def __delitem__( self, i ):
+        if isinstance(i,(int,slice)):
+            mylen = len( self.__toklist )
+            del self.__toklist[i]
+
+            # convert int to slice
+            if isinstance(i, int):
+                if i < 0:
+                    i += mylen
+                i = slice(i, i+1)
+            # get removed indices
+            removed = list(range(*i.indices(mylen)))
+            removed.reverse()
+            # fixup indices in token dictionary
+            for name,occurrences in self.__tokdict.items():
+                for j in removed:
+                    for k, (value, position) in enumerate(occurrences):
+                        occurrences[k] = _ParseResultsWithOffset(value, position - (position > j))
+        else:
+            del self.__tokdict[i]
+
+    def __contains__( self, k ):
+        return k in self.__tokdict
+
+    def __len__( self ): return len( self.__toklist )
+    def __bool__(self): return ( not not self.__toklist )
+    __nonzero__ = __bool__
+    def __iter__( self ): return iter( self.__toklist )
+    def __reversed__( self ): return iter( self.__toklist[::-1] )
+    def _iterkeys( self ):
+        if hasattr(self.__tokdict, "iterkeys"):
+            return self.__tokdict.iterkeys()
+        else:
+            return iter(self.__tokdict)
+
+    def _itervalues( self ):
+        return (self[k] for k in self._iterkeys())
+            
+    def _iteritems( self ):
+        return ((k, self[k]) for k in self._iterkeys())
+
+    if PY_3:
+        keys = _iterkeys       
+        """Returns an iterator of all named result keys (Python 3.x only)."""
+
+        values = _itervalues
+        """Returns an iterator of all named result values (Python 3.x only)."""
+
+        items = _iteritems
+        """Returns an iterator of all named result key-value tuples (Python 3.x only)."""
+
+    else:
+        iterkeys = _iterkeys
+        """Returns an iterator of all named result keys (Python 2.x only)."""
+
+        itervalues = _itervalues
+        """Returns an iterator of all named result values (Python 2.x only)."""
+
+        iteritems = _iteritems
+        """Returns an iterator of all named result key-value tuples (Python 2.x only)."""
+
+        def keys( self ):
+            """Returns all named result keys (as a list in Python 2.x, as an iterator in Python 3.x)."""
+            return list(self.iterkeys())
+
+        def values( self ):
+            """Returns all named result values (as a list in Python 2.x, as an iterator in Python 3.x)."""
+            return list(self.itervalues())
+                
+        def items( self ):
+            """Returns all named result key-values (as a list of tuples in Python 2.x, as an iterator in Python 3.x)."""
+            return list(self.iteritems())
+
+    def haskeys( self ):
+        """Since keys() returns an iterator, this method is helpful in bypassing
+           code that looks for the existence of any defined results names."""
+        return bool(self.__tokdict)
+        
+    def pop( self, *args, **kwargs):
+        """
+        Removes and returns item at specified index (default=C{last}).
+        Supports both C{list} and C{dict} semantics for C{pop()}. If passed no
+        argument or an integer argument, it will use C{list} semantics
+        and pop tokens from the list of parsed tokens. If passed a 
+        non-integer argument (most likely a string), it will use C{dict}
+        semantics and pop the corresponding value from any defined 
+        results names. A second default return value argument is 
+        supported, just as in C{dict.pop()}.
+
+        Example::
+            def remove_first(tokens):
+                tokens.pop(0)
+            print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
+            print(OneOrMore(Word(nums)).addParseAction(remove_first).parseString("0 123 321")) # -> ['123', '321']
+
+            label = Word(alphas)
+            patt = label("LABEL") + OneOrMore(Word(nums))
+            print(patt.parseString("AAB 123 321").dump())
+
+            # Use pop() in a parse action to remove named result (note that corresponding value is not
+            # removed from list form of results)
+            def remove_LABEL(tokens):
+                tokens.pop("LABEL")
+                return tokens
+            patt.addParseAction(remove_LABEL)
+            print(patt.parseString("AAB 123 321").dump())
+        prints::
+            ['AAB', '123', '321']
+            - LABEL: AAB
+
+            ['AAB', '123', '321']
+        """
+        if not args:
+            args = [-1]
+        for k,v in kwargs.items():
+            if k == 'default':
+                args = (args[0], v)
+            else:
+                raise TypeError("pop() got an unexpected keyword argument '%s'" % k)
+        if (isinstance(args[0], int) or 
+                        len(args) == 1 or 
+                        args[0] in self):
+            index = args[0]
+            ret = self[index]
+            del self[index]
+            return ret
+        else:
+            defaultvalue = args[1]
+            return defaultvalue
+
+    def get(self, key, defaultValue=None):
+        """
+        Returns named result matching the given key, or if there is no
+        such name, then returns the given C{defaultValue} or C{None} if no
+        C{defaultValue} is specified.
+
+        Similar to C{dict.get()}.
+        
+        Example::
+            integer = Word(nums)
+            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")           
+
+            result = date_str.parseString("1999/12/31")
+            print(result.get("year")) # -> '1999'
+            print(result.get("hour", "not specified")) # -> 'not specified'
+            print(result.get("hour")) # -> None
+        """
+        if key in self:
+            return self[key]
+        else:
+            return defaultValue
+
+    def insert( self, index, insStr ):
+        """
+        Inserts new element at location index in the list of parsed tokens.
+        
+        Similar to C{list.insert()}.
+
+        Example::
+            print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
+
+            # use a parse action to insert the parse location in the front of the parsed results
+            def insert_locn(locn, tokens):
+                tokens.insert(0, locn)
+            print(OneOrMore(Word(nums)).addParseAction(insert_locn).parseString("0 123 321")) # -> [0, '0', '123', '321']
+        """
+        self.__toklist.insert(index, insStr)
+        # fixup indices in token dictionary
+        for name,occurrences in self.__tokdict.items():
+            for k, (value, position) in enumerate(occurrences):
+                occurrences[k] = _ParseResultsWithOffset(value, position + (position > index))
+
+    def append( self, item ):
+        """
+        Add single element to end of ParseResults list of elements.
+
+        Example::
+            print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
+            
+            # use a parse action to compute the sum of the parsed integers, and add it to the end
+            def append_sum(tokens):
+                tokens.append(sum(map(int, tokens)))
+            print(OneOrMore(Word(nums)).addParseAction(append_sum).parseString("0 123 321")) # -> ['0', '123', '321', 444]
+        """
+        self.__toklist.append(item)
+
+    def extend( self, itemseq ):
+        """
+        Add sequence of elements to end of ParseResults list of elements.
+
+        Example::
+            patt = OneOrMore(Word(alphas))
+            
+            # use a parse action to append the reverse of the matched strings, to make a palindrome
+            def make_palindrome(tokens):
+                tokens.extend(reversed([t[::-1] for t in tokens]))
+                return ''.join(tokens)
+            print(patt.addParseAction(make_palindrome).parseString("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl'
+        """
+        if isinstance(itemseq, ParseResults):
+            self += itemseq
+        else:
+            self.__toklist.extend(itemseq)
+
+    def clear( self ):
+        """
+        Clear all elements and results names.
+        """
+        del self.__toklist[:]
+        self.__tokdict.clear()
+
+    def __getattr__( self, name ):
+        try:
+            return self[name]
+        except KeyError:
+            return ""
+            
+        if name in self.__tokdict:
+            if name not in self.__accumNames:
+                return self.__tokdict[name][-1][0]
+            else:
+                return ParseResults([ v[0] for v in self.__tokdict[name] ])
+        else:
+            return ""
+
+    def __add__( self, other ):
+        ret = self.copy()
+        ret += other
+        return ret
+
+    def __iadd__( self, other ):
+        if other.__tokdict:
+            offset = len(self.__toklist)
+            addoffset = lambda a: offset if a<0 else a+offset
+            otheritems = other.__tokdict.items()
+            otherdictitems = [(k, _ParseResultsWithOffset(v[0],addoffset(v[1])) )
+                                for (k,vlist) in otheritems for v in vlist]
+            for k,v in otherdictitems:
+                self[k] = v
+                if isinstance(v[0],ParseResults):
+                    v[0].__parent = wkref(self)
+            
+        self.__toklist += other.__toklist
+        self.__accumNames.update( other.__accumNames )
+        return self
+
+    def __radd__(self, other):
+        if isinstance(other,int) and other == 0:
+            # useful for merging many ParseResults using sum() builtin
+            return self.copy()
+        else:
+            # this may raise a TypeError - so be it
+            return other + self
+        
+    def __repr__( self ):
+        return "(%s, %s)" % ( repr( self.__toklist ), repr( self.__tokdict ) )
+
+    def __str__( self ):
+        return '[' + ', '.join(_ustr(i) if isinstance(i, ParseResults) else repr(i) for i in self.__toklist) + ']'
+
+    def _asStringList( self, sep='' ):
+        out = []
+        for item in self.__toklist:
+            if out and sep:
+                out.append(sep)
+            if isinstance( item, ParseResults ):
+                out += item._asStringList()
+            else:
+                out.append( _ustr(item) )
+        return out
+
+    def asList( self ):
+        """
+        Returns the parse results as a nested list of matching tokens, all converted to strings.
+
+        Example::
+            patt = OneOrMore(Word(alphas))
+            result = patt.parseString("sldkj lsdkj sldkj")
+            # even though the result prints in string-like form, it is actually a pyparsing ParseResults
+            print(type(result), result) # ->  ['sldkj', 'lsdkj', 'sldkj']
+            
+            # Use asList() to create an actual list
+            result_list = result.asList()
+            print(type(result_list), result_list) # ->  ['sldkj', 'lsdkj', 'sldkj']
+        """
+        return [res.asList() if isinstance(res,ParseResults) else res for res in self.__toklist]
+
+    def asDict( self ):
+        """
+        Returns the named parse results as a nested dictionary.
+
+        Example::
+            integer = Word(nums)
+            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+            
+            result = date_str.parseString('12/31/1999')
+            print(type(result), repr(result)) # ->  (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]})
+            
+            result_dict = result.asDict()
+            print(type(result_dict), repr(result_dict)) # ->  {'day': '1999', 'year': '12', 'month': '31'}
+
+            # even though a ParseResults supports dict-like access, sometime you just need to have a dict
+            import json
+            print(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializable
+            print(json.dumps(result.asDict())) # -> {"month": "31", "day": "1999", "year": "12"}
+        """
+        if PY_3:
+            item_fn = self.items
+        else:
+            item_fn = self.iteritems
+            
+        def toItem(obj):
+            if isinstance(obj, ParseResults):
+                if obj.haskeys():
+                    return obj.asDict()
+                else:
+                    return [toItem(v) for v in obj]
+            else:
+                return obj
+                
+        return dict((k,toItem(v)) for k,v in item_fn())
+
+    def copy( self ):
+        """
+        Returns a new copy of a C{ParseResults} object.
+        """
+        ret = ParseResults( self.__toklist )
+        ret.__tokdict = self.__tokdict.copy()
+        ret.__parent = self.__parent
+        ret.__accumNames.update( self.__accumNames )
+        ret.__name = self.__name
+        return ret
+
+    def asXML( self, doctag=None, namedItemsOnly=False, indent="", formatted=True ):
+        """
+        (Deprecated) Returns the parse results as XML. Tags are created for tokens and lists that have defined results names.
+        """
+        nl = "\n"
+        out = []
+        namedItems = dict((v[1],k) for (k,vlist) in self.__tokdict.items()
+                                                            for v in vlist)
+        nextLevelIndent = indent + "  "
+
+        # collapse out indents if formatting is not desired
+        if not formatted:
+            indent = ""
+            nextLevelIndent = ""
+            nl = ""
+
+        selfTag = None
+        if doctag is not None:
+            selfTag = doctag
+        else:
+            if self.__name:
+                selfTag = self.__name
+
+        if not selfTag:
+            if namedItemsOnly:
+                return ""
+            else:
+                selfTag = "ITEM"
+
+        out += [ nl, indent, "<", selfTag, ">" ]
+
+        for i,res in enumerate(self.__toklist):
+            if isinstance(res,ParseResults):
+                if i in namedItems:
+                    out += [ res.asXML(namedItems[i],
+                                        namedItemsOnly and doctag is None,
+                                        nextLevelIndent,
+                                        formatted)]
+                else:
+                    out += [ res.asXML(None,
+                                        namedItemsOnly and doctag is None,
+                                        nextLevelIndent,
+                                        formatted)]
+            else:
+                # individual token, see if there is a name for it
+                resTag = None
+                if i in namedItems:
+                    resTag = namedItems[i]
+                if not resTag:
+                    if namedItemsOnly:
+                        continue
+                    else:
+                        resTag = "ITEM"
+                xmlBodyText = _xml_escape(_ustr(res))
+                out += [ nl, nextLevelIndent, "<", resTag, ">",
+                                                xmlBodyText,
+                                                "" ]
+
+        out += [ nl, indent, "" ]
+        return "".join(out)
+
+    def __lookup(self,sub):
+        for k,vlist in self.__tokdict.items():
+            for v,loc in vlist:
+                if sub is v:
+                    return k
+        return None
+
+    def getName(self):
+        r"""
+        Returns the results name for this token expression. Useful when several 
+        different expressions might match at a particular location.
+
+        Example::
+            integer = Word(nums)
+            ssn_expr = Regex(r"\d\d\d-\d\d-\d\d\d\d")
+            house_number_expr = Suppress('#') + Word(nums, alphanums)
+            user_data = (Group(house_number_expr)("house_number") 
+                        | Group(ssn_expr)("ssn")
+                        | Group(integer)("age"))
+            user_info = OneOrMore(user_data)
+            
+            result = user_info.parseString("22 111-22-3333 #221B")
+            for item in result:
+                print(item.getName(), ':', item[0])
+        prints::
+            age : 22
+            ssn : 111-22-3333
+            house_number : 221B
+        """
+        if self.__name:
+            return self.__name
+        elif self.__parent:
+            par = self.__parent()
+            if par:
+                return par.__lookup(self)
+            else:
+                return None
+        elif (len(self) == 1 and
+               len(self.__tokdict) == 1 and
+               next(iter(self.__tokdict.values()))[0][1] in (0,-1)):
+            return next(iter(self.__tokdict.keys()))
+        else:
+            return None
+
+    def dump(self, indent='', depth=0, full=True):
+        """
+        Diagnostic method for listing out the contents of a C{ParseResults}.
+        Accepts an optional C{indent} argument so that this string can be embedded
+        in a nested display of other data.
+
+        Example::
+            integer = Word(nums)
+            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+            
+            result = date_str.parseString('12/31/1999')
+            print(result.dump())
+        prints::
+            ['12', '/', '31', '/', '1999']
+            - day: 1999
+            - month: 31
+            - year: 12
+        """
+        out = []
+        NL = '\n'
+        out.append( indent+_ustr(self.asList()) )
+        if full:
+            if self.haskeys():
+                items = sorted((str(k), v) for k,v in self.items())
+                for k,v in items:
+                    if out:
+                        out.append(NL)
+                    out.append( "%s%s- %s: " % (indent,('  '*depth), k) )
+                    if isinstance(v,ParseResults):
+                        if v:
+                            out.append( v.dump(indent,depth+1) )
+                        else:
+                            out.append(_ustr(v))
+                    else:
+                        out.append(repr(v))
+            elif any(isinstance(vv,ParseResults) for vv in self):
+                v = self
+                for i,vv in enumerate(v):
+                    if isinstance(vv,ParseResults):
+                        out.append("\n%s%s[%d]:\n%s%s%s" % (indent,('  '*(depth)),i,indent,('  '*(depth+1)),vv.dump(indent,depth+1) ))
+                    else:
+                        out.append("\n%s%s[%d]:\n%s%s%s" % (indent,('  '*(depth)),i,indent,('  '*(depth+1)),_ustr(vv)))
+            
+        return "".join(out)
+
+    def pprint(self, *args, **kwargs):
+        """
+        Pretty-printer for parsed results as a list, using the C{pprint} module.
+        Accepts additional positional or keyword args as defined for the 
+        C{pprint.pprint} method. (U{http://docs.python.org/3/library/pprint.html#pprint.pprint})
+
+        Example::
+            ident = Word(alphas, alphanums)
+            num = Word(nums)
+            func = Forward()
+            term = ident | num | Group('(' + func + ')')
+            func <<= ident + Group(Optional(delimitedList(term)))
+            result = func.parseString("fna a,b,(fnb c,d,200),100")
+            result.pprint(width=40)
+        prints::
+            ['fna',
+             ['a',
+              'b',
+              ['(', 'fnb', ['c', 'd', '200'], ')'],
+              '100']]
+        """
+        pprint.pprint(self.asList(), *args, **kwargs)
+
+    # add support for pickle protocol
+    def __getstate__(self):
+        return ( self.__toklist,
+                 ( self.__tokdict.copy(),
+                   self.__parent is not None and self.__parent() or None,
+                   self.__accumNames,
+                   self.__name ) )
+
+    def __setstate__(self,state):
+        self.__toklist = state[0]
+        (self.__tokdict,
+         par,
+         inAccumNames,
+         self.__name) = state[1]
+        self.__accumNames = {}
+        self.__accumNames.update(inAccumNames)
+        if par is not None:
+            self.__parent = wkref(par)
+        else:
+            self.__parent = None
+
+    def __getnewargs__(self):
+        return self.__toklist, self.__name, self.__asList, self.__modal
+
+    def __dir__(self):
+        return (dir(type(self)) + list(self.keys()))
+
+MutableMapping.register(ParseResults)
+
+def col (loc,strg):
+    """Returns current column within a string, counting newlines as line separators.
+   The first column is number 1.
+
+   Note: the default parsing behavior is to expand tabs in the input string
+   before starting the parsing process.  See L{I{ParserElement.parseString}} for more information
+   on parsing strings containing C{}s, and suggested methods to maintain a
+   consistent view of the parsed string, the parse location, and line and column
+   positions within the parsed string.
+   """
+    s = strg
+    return 1 if 0} for more information
+   on parsing strings containing C{}s, and suggested methods to maintain a
+   consistent view of the parsed string, the parse location, and line and column
+   positions within the parsed string.
+   """
+    return strg.count("\n",0,loc) + 1
+
+def line( loc, strg ):
+    """Returns the line of text containing loc within a string, counting newlines as line separators.
+       """
+    lastCR = strg.rfind("\n", 0, loc)
+    nextCR = strg.find("\n", loc)
+    if nextCR >= 0:
+        return strg[lastCR+1:nextCR]
+    else:
+        return strg[lastCR+1:]
+
+def _defaultStartDebugAction( instring, loc, expr ):
+    print (("Match " + _ustr(expr) + " at loc " + _ustr(loc) + "(%d,%d)" % ( lineno(loc,instring), col(loc,instring) )))
+
+def _defaultSuccessDebugAction( instring, startloc, endloc, expr, toks ):
+    print ("Matched " + _ustr(expr) + " -> " + str(toks.asList()))
+
+def _defaultExceptionDebugAction( instring, loc, expr, exc ):
+    print ("Exception raised:" + _ustr(exc))
+
+def nullDebugAction(*args):
+    """'Do-nothing' debug action, to suppress debugging output during parsing."""
+    pass
+
+# Only works on Python 3.x - nonlocal is toxic to Python 2 installs
+#~ 'decorator to trim function calls to match the arity of the target'
+#~ def _trim_arity(func, maxargs=3):
+    #~ if func in singleArgBuiltins:
+        #~ return lambda s,l,t: func(t)
+    #~ limit = 0
+    #~ foundArity = False
+    #~ def wrapper(*args):
+        #~ nonlocal limit,foundArity
+        #~ while 1:
+            #~ try:
+                #~ ret = func(*args[limit:])
+                #~ foundArity = True
+                #~ return ret
+            #~ except TypeError:
+                #~ if limit == maxargs or foundArity:
+                    #~ raise
+                #~ limit += 1
+                #~ continue
+    #~ return wrapper
+
+# this version is Python 2.x-3.x cross-compatible
+'decorator to trim function calls to match the arity of the target'
+def _trim_arity(func, maxargs=2):
+    if func in singleArgBuiltins:
+        return lambda s,l,t: func(t)
+    limit = [0]
+    foundArity = [False]
+    
+    # traceback return data structure changed in Py3.5 - normalize back to plain tuples
+    if system_version[:2] >= (3,5):
+        def extract_stack(limit=0):
+            # special handling for Python 3.5.0 - extra deep call stack by 1
+            offset = -3 if system_version == (3,5,0) else -2
+            frame_summary = traceback.extract_stack(limit=-offset+limit-1)[offset]
+            return [frame_summary[:2]]
+        def extract_tb(tb, limit=0):
+            frames = traceback.extract_tb(tb, limit=limit)
+            frame_summary = frames[-1]
+            return [frame_summary[:2]]
+    else:
+        extract_stack = traceback.extract_stack
+        extract_tb = traceback.extract_tb
+    
+    # synthesize what would be returned by traceback.extract_stack at the call to 
+    # user's parse action 'func', so that we don't incur call penalty at parse time
+    
+    LINE_DIFF = 6
+    # IF ANY CODE CHANGES, EVEN JUST COMMENTS OR BLANK LINES, BETWEEN THE NEXT LINE AND 
+    # THE CALL TO FUNC INSIDE WRAPPER, LINE_DIFF MUST BE MODIFIED!!!!
+    this_line = extract_stack(limit=2)[-1]
+    pa_call_line_synth = (this_line[0], this_line[1]+LINE_DIFF)
+
+    def wrapper(*args):
+        while 1:
+            try:
+                ret = func(*args[limit[0]:])
+                foundArity[0] = True
+                return ret
+            except TypeError:
+                # re-raise TypeErrors if they did not come from our arity testing
+                if foundArity[0]:
+                    raise
+                else:
+                    try:
+                        tb = sys.exc_info()[-1]
+                        if not extract_tb(tb, limit=2)[-1][:2] == pa_call_line_synth:
+                            raise
+                    finally:
+                        del tb
+
+                if limit[0] <= maxargs:
+                    limit[0] += 1
+                    continue
+                raise
+
+    # copy func name to wrapper for sensible debug output
+    func_name = ""
+    try:
+        func_name = getattr(func, '__name__', 
+                            getattr(func, '__class__').__name__)
+    except Exception:
+        func_name = str(func)
+    wrapper.__name__ = func_name
+
+    return wrapper
+
+class ParserElement(object):
+    """Abstract base level parser element class."""
+    DEFAULT_WHITE_CHARS = " \n\t\r"
+    verbose_stacktrace = False
+
+    @staticmethod
+    def setDefaultWhitespaceChars( chars ):
+        r"""
+        Overrides the default whitespace chars
+
+        Example::
+            # default whitespace chars are space,  and newline
+            OneOrMore(Word(alphas)).parseString("abc def\nghi jkl")  # -> ['abc', 'def', 'ghi', 'jkl']
+            
+            # change to just treat newline as significant
+            ParserElement.setDefaultWhitespaceChars(" \t")
+            OneOrMore(Word(alphas)).parseString("abc def\nghi jkl")  # -> ['abc', 'def']
+        """
+        ParserElement.DEFAULT_WHITE_CHARS = chars
+
+    @staticmethod
+    def inlineLiteralsUsing(cls):
+        """
+        Set class to be used for inclusion of string literals into a parser.
+        
+        Example::
+            # default literal class used is Literal
+            integer = Word(nums)
+            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")           
+
+            date_str.parseString("1999/12/31")  # -> ['1999', '/', '12', '/', '31']
+
+
+            # change to Suppress
+            ParserElement.inlineLiteralsUsing(Suppress)
+            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")           
+
+            date_str.parseString("1999/12/31")  # -> ['1999', '12', '31']
+        """
+        ParserElement._literalStringClass = cls
+
+    def __init__( self, savelist=False ):
+        self.parseAction = list()
+        self.failAction = None
+        #~ self.name = ""  # don't define self.name, let subclasses try/except upcall
+        self.strRepr = None
+        self.resultsName = None
+        self.saveAsList = savelist
+        self.skipWhitespace = True
+        self.whiteChars = ParserElement.DEFAULT_WHITE_CHARS
+        self.copyDefaultWhiteChars = True
+        self.mayReturnEmpty = False # used when checking for left-recursion
+        self.keepTabs = False
+        self.ignoreExprs = list()
+        self.debug = False
+        self.streamlined = False
+        self.mayIndexError = True # used to optimize exception handling for subclasses that don't advance parse index
+        self.errmsg = ""
+        self.modalResults = True # used to mark results names as modal (report only last) or cumulative (list all)
+        self.debugActions = ( None, None, None ) #custom debug actions
+        self.re = None
+        self.callPreparse = True # used to avoid redundant calls to preParse
+        self.callDuringTry = False
+
+    def copy( self ):
+        """
+        Make a copy of this C{ParserElement}.  Useful for defining different parse actions
+        for the same parsing pattern, using copies of the original parse element.
+        
+        Example::
+            integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
+            integerK = integer.copy().addParseAction(lambda toks: toks[0]*1024) + Suppress("K")
+            integerM = integer.copy().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M")
+            
+            print(OneOrMore(integerK | integerM | integer).parseString("5K 100 640K 256M"))
+        prints::
+            [5120, 100, 655360, 268435456]
+        Equivalent form of C{expr.copy()} is just C{expr()}::
+            integerM = integer().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M")
+        """
+        cpy = copy.copy( self )
+        cpy.parseAction = self.parseAction[:]
+        cpy.ignoreExprs = self.ignoreExprs[:]
+        if self.copyDefaultWhiteChars:
+            cpy.whiteChars = ParserElement.DEFAULT_WHITE_CHARS
+        return cpy
+
+    def setName( self, name ):
+        """
+        Define name for this expression, makes debugging and exception messages clearer.
+        
+        Example::
+            Word(nums).parseString("ABC")  # -> Exception: Expected W:(0123...) (at char 0), (line:1, col:1)
+            Word(nums).setName("integer").parseString("ABC")  # -> Exception: Expected integer (at char 0), (line:1, col:1)
+        """
+        self.name = name
+        self.errmsg = "Expected " + self.name
+        if hasattr(self,"exception"):
+            self.exception.msg = self.errmsg
+        return self
+
+    def setResultsName( self, name, listAllMatches=False ):
+        """
+        Define name for referencing matching tokens as a nested attribute
+        of the returned parse results.
+        NOTE: this returns a *copy* of the original C{ParserElement} object;
+        this is so that the client can define a basic element, such as an
+        integer, and reference it in multiple places with different names.
+
+        You can also set results names using the abbreviated syntax,
+        C{expr("name")} in place of C{expr.setResultsName("name")} - 
+        see L{I{__call__}<__call__>}.
+
+        Example::
+            date_str = (integer.setResultsName("year") + '/' 
+                        + integer.setResultsName("month") + '/' 
+                        + integer.setResultsName("day"))
+
+            # equivalent form:
+            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+        """
+        newself = self.copy()
+        if name.endswith("*"):
+            name = name[:-1]
+            listAllMatches=True
+        newself.resultsName = name
+        newself.modalResults = not listAllMatches
+        return newself
+
+    def setBreak(self,breakFlag = True):
+        """Method to invoke the Python pdb debugger when this element is
+           about to be parsed. Set C{breakFlag} to True to enable, False to
+           disable.
+        """
+        if breakFlag:
+            _parseMethod = self._parse
+            def breaker(instring, loc, doActions=True, callPreParse=True):
+                import pdb
+                pdb.set_trace()
+                return _parseMethod( instring, loc, doActions, callPreParse )
+            breaker._originalParseMethod = _parseMethod
+            self._parse = breaker
+        else:
+            if hasattr(self._parse,"_originalParseMethod"):
+                self._parse = self._parse._originalParseMethod
+        return self
+
+    def setParseAction( self, *fns, **kwargs ):
+        """
+        Define one or more actions to perform when successfully matching parse element definition.
+        Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)},
+        C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where:
+         - s   = the original string being parsed (see note below)
+         - loc = the location of the matching substring
+         - toks = a list of the matched tokens, packaged as a C{L{ParseResults}} object
+        If the functions in fns modify the tokens, they can return them as the return
+        value from fn, and the modified list of tokens will replace the original.
+        Otherwise, fn does not need to return any value.
+
+        Optional keyword arguments:
+         - callDuringTry = (default=C{False}) indicate if parse action should be run during lookaheads and alternate testing
+
+        Note: the default parsing behavior is to expand tabs in the input string
+        before starting the parsing process.  See L{I{parseString}} for more information
+        on parsing strings containing C{}s, and suggested methods to maintain a
+        consistent view of the parsed string, the parse location, and line and column
+        positions within the parsed string.
+        
+        Example::
+            integer = Word(nums)
+            date_str = integer + '/' + integer + '/' + integer
+
+            date_str.parseString("1999/12/31")  # -> ['1999', '/', '12', '/', '31']
+
+            # use parse action to convert to ints at parse time
+            integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
+            date_str = integer + '/' + integer + '/' + integer
+
+            # note that integer fields are now ints, not strings
+            date_str.parseString("1999/12/31")  # -> [1999, '/', 12, '/', 31]
+        """
+        self.parseAction = list(map(_trim_arity, list(fns)))
+        self.callDuringTry = kwargs.get("callDuringTry", False)
+        return self
+
+    def addParseAction( self, *fns, **kwargs ):
+        """
+        Add one or more parse actions to expression's list of parse actions. See L{I{setParseAction}}.
+        
+        See examples in L{I{copy}}.
+        """
+        self.parseAction += list(map(_trim_arity, list(fns)))
+        self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False)
+        return self
+
+    def addCondition(self, *fns, **kwargs):
+        """Add a boolean predicate function to expression's list of parse actions. See 
+        L{I{setParseAction}} for function call signatures. Unlike C{setParseAction}, 
+        functions passed to C{addCondition} need to return boolean success/fail of the condition.
+
+        Optional keyword arguments:
+         - message = define a custom message to be used in the raised exception
+         - fatal   = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise ParseException
+         
+        Example::
+            integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
+            year_int = integer.copy()
+            year_int.addCondition(lambda toks: toks[0] >= 2000, message="Only support years 2000 and later")
+            date_str = year_int + '/' + integer + '/' + integer
+
+            result = date_str.parseString("1999/12/31")  # -> Exception: Only support years 2000 and later (at char 0), (line:1, col:1)
+        """
+        msg = kwargs.get("message", "failed user-defined condition")
+        exc_type = ParseFatalException if kwargs.get("fatal", False) else ParseException
+        for fn in fns:
+            def pa(s,l,t):
+                if not bool(_trim_arity(fn)(s,l,t)):
+                    raise exc_type(s,l,msg)
+            self.parseAction.append(pa)
+        self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False)
+        return self
+
+    def setFailAction( self, fn ):
+        """Define action to perform if parsing fails at this expression.
+           Fail acton fn is a callable function that takes the arguments
+           C{fn(s,loc,expr,err)} where:
+            - s = string being parsed
+            - loc = location where expression match was attempted and failed
+            - expr = the parse expression that failed
+            - err = the exception thrown
+           The function returns no value.  It may throw C{L{ParseFatalException}}
+           if it is desired to stop parsing immediately."""
+        self.failAction = fn
+        return self
+
+    def _skipIgnorables( self, instring, loc ):
+        exprsFound = True
+        while exprsFound:
+            exprsFound = False
+            for e in self.ignoreExprs:
+                try:
+                    while 1:
+                        loc,dummy = e._parse( instring, loc )
+                        exprsFound = True
+                except ParseException:
+                    pass
+        return loc
+
+    def preParse( self, instring, loc ):
+        if self.ignoreExprs:
+            loc = self._skipIgnorables( instring, loc )
+
+        if self.skipWhitespace:
+            wt = self.whiteChars
+            instrlen = len(instring)
+            while loc < instrlen and instring[loc] in wt:
+                loc += 1
+
+        return loc
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        return loc, []
+
+    def postParse( self, instring, loc, tokenlist ):
+        return tokenlist
+
+    #~ @profile
+    def _parseNoCache( self, instring, loc, doActions=True, callPreParse=True ):
+        debugging = ( self.debug ) #and doActions )
+
+        if debugging or self.failAction:
+            #~ print ("Match",self,"at loc",loc,"(%d,%d)" % ( lineno(loc,instring), col(loc,instring) ))
+            if (self.debugActions[0] ):
+                self.debugActions[0]( instring, loc, self )
+            if callPreParse and self.callPreparse:
+                preloc = self.preParse( instring, loc )
+            else:
+                preloc = loc
+            tokensStart = preloc
+            try:
+                try:
+                    loc,tokens = self.parseImpl( instring, preloc, doActions )
+                except IndexError:
+                    raise ParseException( instring, len(instring), self.errmsg, self )
+            except ParseBaseException as err:
+                #~ print ("Exception raised:", err)
+                if self.debugActions[2]:
+                    self.debugActions[2]( instring, tokensStart, self, err )
+                if self.failAction:
+                    self.failAction( instring, tokensStart, self, err )
+                raise
+        else:
+            if callPreParse and self.callPreparse:
+                preloc = self.preParse( instring, loc )
+            else:
+                preloc = loc
+            tokensStart = preloc
+            if self.mayIndexError or preloc >= len(instring):
+                try:
+                    loc,tokens = self.parseImpl( instring, preloc, doActions )
+                except IndexError:
+                    raise ParseException( instring, len(instring), self.errmsg, self )
+            else:
+                loc,tokens = self.parseImpl( instring, preloc, doActions )
+
+        tokens = self.postParse( instring, loc, tokens )
+
+        retTokens = ParseResults( tokens, self.resultsName, asList=self.saveAsList, modal=self.modalResults )
+        if self.parseAction and (doActions or self.callDuringTry):
+            if debugging:
+                try:
+                    for fn in self.parseAction:
+                        tokens = fn( instring, tokensStart, retTokens )
+                        if tokens is not None:
+                            retTokens = ParseResults( tokens,
+                                                      self.resultsName,
+                                                      asList=self.saveAsList and isinstance(tokens,(ParseResults,list)),
+                                                      modal=self.modalResults )
+                except ParseBaseException as err:
+                    #~ print "Exception raised in user parse action:", err
+                    if (self.debugActions[2] ):
+                        self.debugActions[2]( instring, tokensStart, self, err )
+                    raise
+            else:
+                for fn in self.parseAction:
+                    tokens = fn( instring, tokensStart, retTokens )
+                    if tokens is not None:
+                        retTokens = ParseResults( tokens,
+                                                  self.resultsName,
+                                                  asList=self.saveAsList and isinstance(tokens,(ParseResults,list)),
+                                                  modal=self.modalResults )
+        if debugging:
+            #~ print ("Matched",self,"->",retTokens.asList())
+            if (self.debugActions[1] ):
+                self.debugActions[1]( instring, tokensStart, loc, self, retTokens )
+
+        return loc, retTokens
+
+    def tryParse( self, instring, loc ):
+        try:
+            return self._parse( instring, loc, doActions=False )[0]
+        except ParseFatalException:
+            raise ParseException( instring, loc, self.errmsg, self)
+    
+    def canParseNext(self, instring, loc):
+        try:
+            self.tryParse(instring, loc)
+        except (ParseException, IndexError):
+            return False
+        else:
+            return True
+
+    class _UnboundedCache(object):
+        def __init__(self):
+            cache = {}
+            self.not_in_cache = not_in_cache = object()
+
+            def get(self, key):
+                return cache.get(key, not_in_cache)
+
+            def set(self, key, value):
+                cache[key] = value
+
+            def clear(self):
+                cache.clear()
+                
+            def cache_len(self):
+                return len(cache)
+
+            self.get = types.MethodType(get, self)
+            self.set = types.MethodType(set, self)
+            self.clear = types.MethodType(clear, self)
+            self.__len__ = types.MethodType(cache_len, self)
+
+    if _OrderedDict is not None:
+        class _FifoCache(object):
+            def __init__(self, size):
+                self.not_in_cache = not_in_cache = object()
+
+                cache = _OrderedDict()
+
+                def get(self, key):
+                    return cache.get(key, not_in_cache)
+
+                def set(self, key, value):
+                    cache[key] = value
+                    while len(cache) > size:
+                        try:
+                            cache.popitem(False)
+                        except KeyError:
+                            pass
+
+                def clear(self):
+                    cache.clear()
+
+                def cache_len(self):
+                    return len(cache)
+
+                self.get = types.MethodType(get, self)
+                self.set = types.MethodType(set, self)
+                self.clear = types.MethodType(clear, self)
+                self.__len__ = types.MethodType(cache_len, self)
+
+    else:
+        class _FifoCache(object):
+            def __init__(self, size):
+                self.not_in_cache = not_in_cache = object()
+
+                cache = {}
+                key_fifo = collections.deque([], size)
+
+                def get(self, key):
+                    return cache.get(key, not_in_cache)
+
+                def set(self, key, value):
+                    cache[key] = value
+                    while len(key_fifo) > size:
+                        cache.pop(key_fifo.popleft(), None)
+                    key_fifo.append(key)
+
+                def clear(self):
+                    cache.clear()
+                    key_fifo.clear()
+
+                def cache_len(self):
+                    return len(cache)
+
+                self.get = types.MethodType(get, self)
+                self.set = types.MethodType(set, self)
+                self.clear = types.MethodType(clear, self)
+                self.__len__ = types.MethodType(cache_len, self)
+
+    # argument cache for optimizing repeated calls when backtracking through recursive expressions
+    packrat_cache = {} # this is set later by enabledPackrat(); this is here so that resetCache() doesn't fail
+    packrat_cache_lock = RLock()
+    packrat_cache_stats = [0, 0]
+
+    # this method gets repeatedly called during backtracking with the same arguments -
+    # we can cache these arguments and save ourselves the trouble of re-parsing the contained expression
+    def _parseCache( self, instring, loc, doActions=True, callPreParse=True ):
+        HIT, MISS = 0, 1
+        lookup = (self, instring, loc, callPreParse, doActions)
+        with ParserElement.packrat_cache_lock:
+            cache = ParserElement.packrat_cache
+            value = cache.get(lookup)
+            if value is cache.not_in_cache:
+                ParserElement.packrat_cache_stats[MISS] += 1
+                try:
+                    value = self._parseNoCache(instring, loc, doActions, callPreParse)
+                except ParseBaseException as pe:
+                    # cache a copy of the exception, without the traceback
+                    cache.set(lookup, pe.__class__(*pe.args))
+                    raise
+                else:
+                    cache.set(lookup, (value[0], value[1].copy()))
+                    return value
+            else:
+                ParserElement.packrat_cache_stats[HIT] += 1
+                if isinstance(value, Exception):
+                    raise value
+                return (value[0], value[1].copy())
+
+    _parse = _parseNoCache
+
+    @staticmethod
+    def resetCache():
+        ParserElement.packrat_cache.clear()
+        ParserElement.packrat_cache_stats[:] = [0] * len(ParserElement.packrat_cache_stats)
+
+    _packratEnabled = False
+    @staticmethod
+    def enablePackrat(cache_size_limit=128):
+        """Enables "packrat" parsing, which adds memoizing to the parsing logic.
+           Repeated parse attempts at the same string location (which happens
+           often in many complex grammars) can immediately return a cached value,
+           instead of re-executing parsing/validating code.  Memoizing is done of
+           both valid results and parsing exceptions.
+           
+           Parameters:
+            - cache_size_limit - (default=C{128}) - if an integer value is provided
+              will limit the size of the packrat cache; if None is passed, then
+              the cache size will be unbounded; if 0 is passed, the cache will
+              be effectively disabled.
+            
+           This speedup may break existing programs that use parse actions that
+           have side-effects.  For this reason, packrat parsing is disabled when
+           you first import pyparsing.  To activate the packrat feature, your
+           program must call the class method C{ParserElement.enablePackrat()}.  If
+           your program uses C{psyco} to "compile as you go", you must call
+           C{enablePackrat} before calling C{psyco.full()}.  If you do not do this,
+           Python will crash.  For best results, call C{enablePackrat()} immediately
+           after importing pyparsing.
+           
+           Example::
+               import pyparsing
+               pyparsing.ParserElement.enablePackrat()
+        """
+        if not ParserElement._packratEnabled:
+            ParserElement._packratEnabled = True
+            if cache_size_limit is None:
+                ParserElement.packrat_cache = ParserElement._UnboundedCache()
+            else:
+                ParserElement.packrat_cache = ParserElement._FifoCache(cache_size_limit)
+            ParserElement._parse = ParserElement._parseCache
+
+    def parseString( self, instring, parseAll=False ):
+        """
+        Execute the parse expression with the given string.
+        This is the main interface to the client code, once the complete
+        expression has been built.
+
+        If you want the grammar to require that the entire input string be
+        successfully parsed, then set C{parseAll} to True (equivalent to ending
+        the grammar with C{L{StringEnd()}}).
+
+        Note: C{parseString} implicitly calls C{expandtabs()} on the input string,
+        in order to report proper column numbers in parse actions.
+        If the input string contains tabs and
+        the grammar uses parse actions that use the C{loc} argument to index into the
+        string being parsed, you can ensure you have a consistent view of the input
+        string by:
+         - calling C{parseWithTabs} on your grammar before calling C{parseString}
+           (see L{I{parseWithTabs}})
+         - define your parse action using the full C{(s,loc,toks)} signature, and
+           reference the input string using the parse action's C{s} argument
+         - explicitly expand the tabs in your input string before calling
+           C{parseString}
+        
+        Example::
+            Word('a').parseString('aaaaabaaa')  # -> ['aaaaa']
+            Word('a').parseString('aaaaabaaa', parseAll=True)  # -> Exception: Expected end of text
+        """
+        ParserElement.resetCache()
+        if not self.streamlined:
+            self.streamline()
+            #~ self.saveAsList = True
+        for e in self.ignoreExprs:
+            e.streamline()
+        if not self.keepTabs:
+            instring = instring.expandtabs()
+        try:
+            loc, tokens = self._parse( instring, 0 )
+            if parseAll:
+                loc = self.preParse( instring, loc )
+                se = Empty() + StringEnd()
+                se._parse( instring, loc )
+        except ParseBaseException as exc:
+            if ParserElement.verbose_stacktrace:
+                raise
+            else:
+                # catch and re-raise exception from here, clears out pyparsing internal stack trace
+                raise exc
+        else:
+            return tokens
+
+    def scanString( self, instring, maxMatches=_MAX_INT, overlap=False ):
+        """
+        Scan the input string for expression matches.  Each match will return the
+        matching tokens, start location, and end location.  May be called with optional
+        C{maxMatches} argument, to clip scanning after 'n' matches are found.  If
+        C{overlap} is specified, then overlapping matches will be reported.
+
+        Note that the start and end locations are reported relative to the string
+        being parsed.  See L{I{parseString}} for more information on parsing
+        strings with embedded tabs.
+
+        Example::
+            source = "sldjf123lsdjjkf345sldkjf879lkjsfd987"
+            print(source)
+            for tokens,start,end in Word(alphas).scanString(source):
+                print(' '*start + '^'*(end-start))
+                print(' '*start + tokens[0])
+        
+        prints::
+        
+            sldjf123lsdjjkf345sldkjf879lkjsfd987
+            ^^^^^
+            sldjf
+                    ^^^^^^^
+                    lsdjjkf
+                              ^^^^^^
+                              sldkjf
+                                       ^^^^^^
+                                       lkjsfd
+        """
+        if not self.streamlined:
+            self.streamline()
+        for e in self.ignoreExprs:
+            e.streamline()
+
+        if not self.keepTabs:
+            instring = _ustr(instring).expandtabs()
+        instrlen = len(instring)
+        loc = 0
+        preparseFn = self.preParse
+        parseFn = self._parse
+        ParserElement.resetCache()
+        matches = 0
+        try:
+            while loc <= instrlen and matches < maxMatches:
+                try:
+                    preloc = preparseFn( instring, loc )
+                    nextLoc,tokens = parseFn( instring, preloc, callPreParse=False )
+                except ParseException:
+                    loc = preloc+1
+                else:
+                    if nextLoc > loc:
+                        matches += 1
+                        yield tokens, preloc, nextLoc
+                        if overlap:
+                            nextloc = preparseFn( instring, loc )
+                            if nextloc > loc:
+                                loc = nextLoc
+                            else:
+                                loc += 1
+                        else:
+                            loc = nextLoc
+                    else:
+                        loc = preloc+1
+        except ParseBaseException as exc:
+            if ParserElement.verbose_stacktrace:
+                raise
+            else:
+                # catch and re-raise exception from here, clears out pyparsing internal stack trace
+                raise exc
+
+    def transformString( self, instring ):
+        """
+        Extension to C{L{scanString}}, to modify matching text with modified tokens that may
+        be returned from a parse action.  To use C{transformString}, define a grammar and
+        attach a parse action to it that modifies the returned token list.
+        Invoking C{transformString()} on a target string will then scan for matches,
+        and replace the matched text patterns according to the logic in the parse
+        action.  C{transformString()} returns the resulting transformed string.
+        
+        Example::
+            wd = Word(alphas)
+            wd.setParseAction(lambda toks: toks[0].title())
+            
+            print(wd.transformString("now is the winter of our discontent made glorious summer by this sun of york."))
+        Prints::
+            Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York.
+        """
+        out = []
+        lastE = 0
+        # force preservation of s, to minimize unwanted transformation of string, and to
+        # keep string locs straight between transformString and scanString
+        self.keepTabs = True
+        try:
+            for t,s,e in self.scanString( instring ):
+                out.append( instring[lastE:s] )
+                if t:
+                    if isinstance(t,ParseResults):
+                        out += t.asList()
+                    elif isinstance(t,list):
+                        out += t
+                    else:
+                        out.append(t)
+                lastE = e
+            out.append(instring[lastE:])
+            out = [o for o in out if o]
+            return "".join(map(_ustr,_flatten(out)))
+        except ParseBaseException as exc:
+            if ParserElement.verbose_stacktrace:
+                raise
+            else:
+                # catch and re-raise exception from here, clears out pyparsing internal stack trace
+                raise exc
+
+    def searchString( self, instring, maxMatches=_MAX_INT ):
+        """
+        Another extension to C{L{scanString}}, simplifying the access to the tokens found
+        to match the given parse expression.  May be called with optional
+        C{maxMatches} argument, to clip searching after 'n' matches are found.
+        
+        Example::
+            # a capitalized word starts with an uppercase letter, followed by zero or more lowercase letters
+            cap_word = Word(alphas.upper(), alphas.lower())
+            
+            print(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity"))
+
+            # the sum() builtin can be used to merge results into a single ParseResults object
+            print(sum(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity")))
+        prints::
+            [['More'], ['Iron'], ['Lead'], ['Gold'], ['I'], ['Electricity']]
+            ['More', 'Iron', 'Lead', 'Gold', 'I', 'Electricity']
+        """
+        try:
+            return ParseResults([ t for t,s,e in self.scanString( instring, maxMatches ) ])
+        except ParseBaseException as exc:
+            if ParserElement.verbose_stacktrace:
+                raise
+            else:
+                # catch and re-raise exception from here, clears out pyparsing internal stack trace
+                raise exc
+
+    def split(self, instring, maxsplit=_MAX_INT, includeSeparators=False):
+        """
+        Generator method to split a string using the given expression as a separator.
+        May be called with optional C{maxsplit} argument, to limit the number of splits;
+        and the optional C{includeSeparators} argument (default=C{False}), if the separating
+        matching text should be included in the split results.
+        
+        Example::        
+            punc = oneOf(list(".,;:/-!?"))
+            print(list(punc.split("This, this?, this sentence, is badly punctuated!")))
+        prints::
+            ['This', ' this', '', ' this sentence', ' is badly punctuated', '']
+        """
+        splits = 0
+        last = 0
+        for t,s,e in self.scanString(instring, maxMatches=maxsplit):
+            yield instring[last:s]
+            if includeSeparators:
+                yield t[0]
+            last = e
+        yield instring[last:]
+
+    def __add__(self, other ):
+        """
+        Implementation of + operator - returns C{L{And}}. Adding strings to a ParserElement
+        converts them to L{Literal}s by default.
+        
+        Example::
+            greet = Word(alphas) + "," + Word(alphas) + "!"
+            hello = "Hello, World!"
+            print (hello, "->", greet.parseString(hello))
+        Prints::
+            Hello, World! -> ['Hello', ',', 'World', '!']
+        """
+        if isinstance( other, basestring ):
+            other = ParserElement._literalStringClass( other )
+        if not isinstance( other, ParserElement ):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                    SyntaxWarning, stacklevel=2)
+            return None
+        return And( [ self, other ] )
+
+    def __radd__(self, other ):
+        """
+        Implementation of + operator when left operand is not a C{L{ParserElement}}
+        """
+        if isinstance( other, basestring ):
+            other = ParserElement._literalStringClass( other )
+        if not isinstance( other, ParserElement ):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                    SyntaxWarning, stacklevel=2)
+            return None
+        return other + self
+
+    def __sub__(self, other):
+        """
+        Implementation of - operator, returns C{L{And}} with error stop
+        """
+        if isinstance( other, basestring ):
+            other = ParserElement._literalStringClass( other )
+        if not isinstance( other, ParserElement ):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                    SyntaxWarning, stacklevel=2)
+            return None
+        return self + And._ErrorStop() + other
+
+    def __rsub__(self, other ):
+        """
+        Implementation of - operator when left operand is not a C{L{ParserElement}}
+        """
+        if isinstance( other, basestring ):
+            other = ParserElement._literalStringClass( other )
+        if not isinstance( other, ParserElement ):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                    SyntaxWarning, stacklevel=2)
+            return None
+        return other - self
+
+    def __mul__(self,other):
+        """
+        Implementation of * operator, allows use of C{expr * 3} in place of
+        C{expr + expr + expr}.  Expressions may also me multiplied by a 2-integer
+        tuple, similar to C{{min,max}} multipliers in regular expressions.  Tuples
+        may also include C{None} as in:
+         - C{expr*(n,None)} or C{expr*(n,)} is equivalent
+              to C{expr*n + L{ZeroOrMore}(expr)}
+              (read as "at least n instances of C{expr}")
+         - C{expr*(None,n)} is equivalent to C{expr*(0,n)}
+              (read as "0 to n instances of C{expr}")
+         - C{expr*(None,None)} is equivalent to C{L{ZeroOrMore}(expr)}
+         - C{expr*(1,None)} is equivalent to C{L{OneOrMore}(expr)}
+
+        Note that C{expr*(None,n)} does not raise an exception if
+        more than n exprs exist in the input stream; that is,
+        C{expr*(None,n)} does not enforce a maximum number of expr
+        occurrences.  If this behavior is desired, then write
+        C{expr*(None,n) + ~expr}
+        """
+        if isinstance(other,int):
+            minElements, optElements = other,0
+        elif isinstance(other,tuple):
+            other = (other + (None, None))[:2]
+            if other[0] is None:
+                other = (0, other[1])
+            if isinstance(other[0],int) and other[1] is None:
+                if other[0] == 0:
+                    return ZeroOrMore(self)
+                if other[0] == 1:
+                    return OneOrMore(self)
+                else:
+                    return self*other[0] + ZeroOrMore(self)
+            elif isinstance(other[0],int) and isinstance(other[1],int):
+                minElements, optElements = other
+                optElements -= minElements
+            else:
+                raise TypeError("cannot multiply 'ParserElement' and ('%s','%s') objects", type(other[0]),type(other[1]))
+        else:
+            raise TypeError("cannot multiply 'ParserElement' and '%s' objects", type(other))
+
+        if minElements < 0:
+            raise ValueError("cannot multiply ParserElement by negative value")
+        if optElements < 0:
+            raise ValueError("second tuple value must be greater or equal to first tuple value")
+        if minElements == optElements == 0:
+            raise ValueError("cannot multiply ParserElement by 0 or (0,0)")
+
+        if (optElements):
+            def makeOptionalList(n):
+                if n>1:
+                    return Optional(self + makeOptionalList(n-1))
+                else:
+                    return Optional(self)
+            if minElements:
+                if minElements == 1:
+                    ret = self + makeOptionalList(optElements)
+                else:
+                    ret = And([self]*minElements) + makeOptionalList(optElements)
+            else:
+                ret = makeOptionalList(optElements)
+        else:
+            if minElements == 1:
+                ret = self
+            else:
+                ret = And([self]*minElements)
+        return ret
+
+    def __rmul__(self, other):
+        return self.__mul__(other)
+
+    def __or__(self, other ):
+        """
+        Implementation of | operator - returns C{L{MatchFirst}}
+        """
+        if isinstance( other, basestring ):
+            other = ParserElement._literalStringClass( other )
+        if not isinstance( other, ParserElement ):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                    SyntaxWarning, stacklevel=2)
+            return None
+        return MatchFirst( [ self, other ] )
+
+    def __ror__(self, other ):
+        """
+        Implementation of | operator when left operand is not a C{L{ParserElement}}
+        """
+        if isinstance( other, basestring ):
+            other = ParserElement._literalStringClass( other )
+        if not isinstance( other, ParserElement ):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                    SyntaxWarning, stacklevel=2)
+            return None
+        return other | self
+
+    def __xor__(self, other ):
+        """
+        Implementation of ^ operator - returns C{L{Or}}
+        """
+        if isinstance( other, basestring ):
+            other = ParserElement._literalStringClass( other )
+        if not isinstance( other, ParserElement ):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                    SyntaxWarning, stacklevel=2)
+            return None
+        return Or( [ self, other ] )
+
+    def __rxor__(self, other ):
+        """
+        Implementation of ^ operator when left operand is not a C{L{ParserElement}}
+        """
+        if isinstance( other, basestring ):
+            other = ParserElement._literalStringClass( other )
+        if not isinstance( other, ParserElement ):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                    SyntaxWarning, stacklevel=2)
+            return None
+        return other ^ self
+
+    def __and__(self, other ):
+        """
+        Implementation of & operator - returns C{L{Each}}
+        """
+        if isinstance( other, basestring ):
+            other = ParserElement._literalStringClass( other )
+        if not isinstance( other, ParserElement ):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                    SyntaxWarning, stacklevel=2)
+            return None
+        return Each( [ self, other ] )
+
+    def __rand__(self, other ):
+        """
+        Implementation of & operator when left operand is not a C{L{ParserElement}}
+        """
+        if isinstance( other, basestring ):
+            other = ParserElement._literalStringClass( other )
+        if not isinstance( other, ParserElement ):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                    SyntaxWarning, stacklevel=2)
+            return None
+        return other & self
+
+    def __invert__( self ):
+        """
+        Implementation of ~ operator - returns C{L{NotAny}}
+        """
+        return NotAny( self )
+
+    def __call__(self, name=None):
+        """
+        Shortcut for C{L{setResultsName}}, with C{listAllMatches=False}.
+        
+        If C{name} is given with a trailing C{'*'} character, then C{listAllMatches} will be
+        passed as C{True}.
+           
+        If C{name} is omitted, same as calling C{L{copy}}.
+
+        Example::
+            # these are equivalent
+            userdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno")
+            userdata = Word(alphas)("name") + Word(nums+"-")("socsecno")             
+        """
+        if name is not None:
+            return self.setResultsName(name)
+        else:
+            return self.copy()
+
+    def suppress( self ):
+        """
+        Suppresses the output of this C{ParserElement}; useful to keep punctuation from
+        cluttering up returned output.
+        """
+        return Suppress( self )
+
+    def leaveWhitespace( self ):
+        """
+        Disables the skipping of whitespace before matching the characters in the
+        C{ParserElement}'s defined pattern.  This is normally only used internally by
+        the pyparsing module, but may be needed in some whitespace-sensitive grammars.
+        """
+        self.skipWhitespace = False
+        return self
+
+    def setWhitespaceChars( self, chars ):
+        """
+        Overrides the default whitespace chars
+        """
+        self.skipWhitespace = True
+        self.whiteChars = chars
+        self.copyDefaultWhiteChars = False
+        return self
+
+    def parseWithTabs( self ):
+        """
+        Overrides default behavior to expand C{}s to spaces before parsing the input string.
+        Must be called before C{parseString} when the input grammar contains elements that
+        match C{} characters.
+        """
+        self.keepTabs = True
+        return self
+
+    def ignore( self, other ):
+        """
+        Define expression to be ignored (e.g., comments) while doing pattern
+        matching; may be called repeatedly, to define multiple comment or other
+        ignorable patterns.
+        
+        Example::
+            patt = OneOrMore(Word(alphas))
+            patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj']
+            
+            patt.ignore(cStyleComment)
+            patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj', 'lskjd']
+        """
+        if isinstance(other, basestring):
+            other = Suppress(other)
+
+        if isinstance( other, Suppress ):
+            if other not in self.ignoreExprs:
+                self.ignoreExprs.append(other)
+        else:
+            self.ignoreExprs.append( Suppress( other.copy() ) )
+        return self
+
+    def setDebugActions( self, startAction, successAction, exceptionAction ):
+        """
+        Enable display of debugging messages while doing pattern matching.
+        """
+        self.debugActions = (startAction or _defaultStartDebugAction,
+                             successAction or _defaultSuccessDebugAction,
+                             exceptionAction or _defaultExceptionDebugAction)
+        self.debug = True
+        return self
+
+    def setDebug( self, flag=True ):
+        """
+        Enable display of debugging messages while doing pattern matching.
+        Set C{flag} to True to enable, False to disable.
+
+        Example::
+            wd = Word(alphas).setName("alphaword")
+            integer = Word(nums).setName("numword")
+            term = wd | integer
+            
+            # turn on debugging for wd
+            wd.setDebug()
+
+            OneOrMore(term).parseString("abc 123 xyz 890")
+        
+        prints::
+            Match alphaword at loc 0(1,1)
+            Matched alphaword -> ['abc']
+            Match alphaword at loc 3(1,4)
+            Exception raised:Expected alphaword (at char 4), (line:1, col:5)
+            Match alphaword at loc 7(1,8)
+            Matched alphaword -> ['xyz']
+            Match alphaword at loc 11(1,12)
+            Exception raised:Expected alphaword (at char 12), (line:1, col:13)
+            Match alphaword at loc 15(1,16)
+            Exception raised:Expected alphaword (at char 15), (line:1, col:16)
+
+        The output shown is that produced by the default debug actions - custom debug actions can be
+        specified using L{setDebugActions}. Prior to attempting
+        to match the C{wd} expression, the debugging message C{"Match  at loc (,)"}
+        is shown. Then if the parse succeeds, a C{"Matched"} message is shown, or an C{"Exception raised"}
+        message is shown. Also note the use of L{setName} to assign a human-readable name to the expression,
+        which makes debugging and exception messages easier to understand - for instance, the default
+        name created for the C{Word} expression without calling C{setName} is C{"W:(ABCD...)"}.
+        """
+        if flag:
+            self.setDebugActions( _defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction )
+        else:
+            self.debug = False
+        return self
+
+    def __str__( self ):
+        return self.name
+
+    def __repr__( self ):
+        return _ustr(self)
+
+    def streamline( self ):
+        self.streamlined = True
+        self.strRepr = None
+        return self
+
+    def checkRecursion( self, parseElementList ):
+        pass
+
+    def validate( self, validateTrace=[] ):
+        """
+        Check defined expressions for valid structure, check for infinite recursive definitions.
+        """
+        self.checkRecursion( [] )
+
+    def parseFile( self, file_or_filename, parseAll=False ):
+        """
+        Execute the parse expression on the given file or filename.
+        If a filename is specified (instead of a file object),
+        the entire file is opened, read, and closed before parsing.
+        """
+        try:
+            file_contents = file_or_filename.read()
+        except AttributeError:
+            with open(file_or_filename, "r") as f:
+                file_contents = f.read()
+        try:
+            return self.parseString(file_contents, parseAll)
+        except ParseBaseException as exc:
+            if ParserElement.verbose_stacktrace:
+                raise
+            else:
+                # catch and re-raise exception from here, clears out pyparsing internal stack trace
+                raise exc
+
+    def __eq__(self,other):
+        if isinstance(other, ParserElement):
+            return self is other or vars(self) == vars(other)
+        elif isinstance(other, basestring):
+            return self.matches(other)
+        else:
+            return super(ParserElement,self)==other
+
+    def __ne__(self,other):
+        return not (self == other)
+
+    def __hash__(self):
+        return hash(id(self))
+
+    def __req__(self,other):
+        return self == other
+
+    def __rne__(self,other):
+        return not (self == other)
+
+    def matches(self, testString, parseAll=True):
+        """
+        Method for quick testing of a parser against a test string. Good for simple 
+        inline microtests of sub expressions while building up larger parser.
+           
+        Parameters:
+         - testString - to test against this expression for a match
+         - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests
+            
+        Example::
+            expr = Word(nums)
+            assert expr.matches("100")
+        """
+        try:
+            self.parseString(_ustr(testString), parseAll=parseAll)
+            return True
+        except ParseBaseException:
+            return False
+                
+    def runTests(self, tests, parseAll=True, comment='#', fullDump=True, printResults=True, failureTests=False):
+        """
+        Execute the parse expression on a series of test strings, showing each
+        test, the parsed results or where the parse failed. Quick and easy way to
+        run a parse expression against a list of sample strings.
+           
+        Parameters:
+         - tests - a list of separate test strings, or a multiline string of test strings
+         - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests           
+         - comment - (default=C{'#'}) - expression for indicating embedded comments in the test 
+              string; pass None to disable comment filtering
+         - fullDump - (default=C{True}) - dump results as list followed by results names in nested outline;
+              if False, only dump nested list
+         - printResults - (default=C{True}) prints test output to stdout
+         - failureTests - (default=C{False}) indicates if these tests are expected to fail parsing
+
+        Returns: a (success, results) tuple, where success indicates that all tests succeeded
+        (or failed if C{failureTests} is True), and the results contain a list of lines of each 
+        test's output
+        
+        Example::
+            number_expr = pyparsing_common.number.copy()
+
+            result = number_expr.runTests('''
+                # unsigned integer
+                100
+                # negative integer
+                -100
+                # float with scientific notation
+                6.02e23
+                # integer with scientific notation
+                1e-12
+                ''')
+            print("Success" if result[0] else "Failed!")
+
+            result = number_expr.runTests('''
+                # stray character
+                100Z
+                # missing leading digit before '.'
+                -.100
+                # too many '.'
+                3.14.159
+                ''', failureTests=True)
+            print("Success" if result[0] else "Failed!")
+        prints::
+            # unsigned integer
+            100
+            [100]
+
+            # negative integer
+            -100
+            [-100]
+
+            # float with scientific notation
+            6.02e23
+            [6.02e+23]
+
+            # integer with scientific notation
+            1e-12
+            [1e-12]
+
+            Success
+            
+            # stray character
+            100Z
+               ^
+            FAIL: Expected end of text (at char 3), (line:1, col:4)
+
+            # missing leading digit before '.'
+            -.100
+            ^
+            FAIL: Expected {real number with scientific notation | real number | signed integer} (at char 0), (line:1, col:1)
+
+            # too many '.'
+            3.14.159
+                ^
+            FAIL: Expected end of text (at char 4), (line:1, col:5)
+
+            Success
+
+        Each test string must be on a single line. If you want to test a string that spans multiple
+        lines, create a test like this::
+
+            expr.runTest(r"this is a test\\n of strings that spans \\n 3 lines")
+        
+        (Note that this is a raw string literal, you must include the leading 'r'.)
+        """
+        if isinstance(tests, basestring):
+            tests = list(map(str.strip, tests.rstrip().splitlines()))
+        if isinstance(comment, basestring):
+            comment = Literal(comment)
+        allResults = []
+        comments = []
+        success = True
+        for t in tests:
+            if comment is not None and comment.matches(t, False) or comments and not t:
+                comments.append(t)
+                continue
+            if not t:
+                continue
+            out = ['\n'.join(comments), t]
+            comments = []
+            try:
+                t = t.replace(r'\n','\n')
+                result = self.parseString(t, parseAll=parseAll)
+                out.append(result.dump(full=fullDump))
+                success = success and not failureTests
+            except ParseBaseException as pe:
+                fatal = "(FATAL)" if isinstance(pe, ParseFatalException) else ""
+                if '\n' in t:
+                    out.append(line(pe.loc, t))
+                    out.append(' '*(col(pe.loc,t)-1) + '^' + fatal)
+                else:
+                    out.append(' '*pe.loc + '^' + fatal)
+                out.append("FAIL: " + str(pe))
+                success = success and failureTests
+                result = pe
+            except Exception as exc:
+                out.append("FAIL-EXCEPTION: " + str(exc))
+                success = success and failureTests
+                result = exc
+
+            if printResults:
+                if fullDump:
+                    out.append('')
+                print('\n'.join(out))
+
+            allResults.append((t, result))
+        
+        return success, allResults
+
+        
+class Token(ParserElement):
+    """
+    Abstract C{ParserElement} subclass, for defining atomic matching patterns.
+    """
+    def __init__( self ):
+        super(Token,self).__init__( savelist=False )
+
+
+class Empty(Token):
+    """
+    An empty token, will always match.
+    """
+    def __init__( self ):
+        super(Empty,self).__init__()
+        self.name = "Empty"
+        self.mayReturnEmpty = True
+        self.mayIndexError = False
+
+
+class NoMatch(Token):
+    """
+    A token that will never match.
+    """
+    def __init__( self ):
+        super(NoMatch,self).__init__()
+        self.name = "NoMatch"
+        self.mayReturnEmpty = True
+        self.mayIndexError = False
+        self.errmsg = "Unmatchable token"
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        raise ParseException(instring, loc, self.errmsg, self)
+
+
+class Literal(Token):
+    """
+    Token to exactly match a specified string.
+    
+    Example::
+        Literal('blah').parseString('blah')  # -> ['blah']
+        Literal('blah').parseString('blahfooblah')  # -> ['blah']
+        Literal('blah').parseString('bla')  # -> Exception: Expected "blah"
+    
+    For case-insensitive matching, use L{CaselessLiteral}.
+    
+    For keyword matching (force word break before and after the matched string),
+    use L{Keyword} or L{CaselessKeyword}.
+    """
+    def __init__( self, matchString ):
+        super(Literal,self).__init__()
+        self.match = matchString
+        self.matchLen = len(matchString)
+        try:
+            self.firstMatchChar = matchString[0]
+        except IndexError:
+            warnings.warn("null string passed to Literal; use Empty() instead",
+                            SyntaxWarning, stacklevel=2)
+            self.__class__ = Empty
+        self.name = '"%s"' % _ustr(self.match)
+        self.errmsg = "Expected " + self.name
+        self.mayReturnEmpty = False
+        self.mayIndexError = False
+
+    # Performance tuning: this routine gets called a *lot*
+    # if this is a single character match string  and the first character matches,
+    # short-circuit as quickly as possible, and avoid calling startswith
+    #~ @profile
+    def parseImpl( self, instring, loc, doActions=True ):
+        if (instring[loc] == self.firstMatchChar and
+            (self.matchLen==1 or instring.startswith(self.match,loc)) ):
+            return loc+self.matchLen, self.match
+        raise ParseException(instring, loc, self.errmsg, self)
+_L = Literal
+ParserElement._literalStringClass = Literal
+
+class Keyword(Token):
+    """
+    Token to exactly match a specified string as a keyword, that is, it must be
+    immediately followed by a non-keyword character.  Compare with C{L{Literal}}:
+     - C{Literal("if")} will match the leading C{'if'} in C{'ifAndOnlyIf'}.
+     - C{Keyword("if")} will not; it will only match the leading C{'if'} in C{'if x=1'}, or C{'if(y==2)'}
+    Accepts two optional constructor arguments in addition to the keyword string:
+     - C{identChars} is a string of characters that would be valid identifier characters,
+          defaulting to all alphanumerics + "_" and "$"
+     - C{caseless} allows case-insensitive matching, default is C{False}.
+       
+    Example::
+        Keyword("start").parseString("start")  # -> ['start']
+        Keyword("start").parseString("starting")  # -> Exception
+
+    For case-insensitive matching, use L{CaselessKeyword}.
+    """
+    DEFAULT_KEYWORD_CHARS = alphanums+"_$"
+
+    def __init__( self, matchString, identChars=None, caseless=False ):
+        super(Keyword,self).__init__()
+        if identChars is None:
+            identChars = Keyword.DEFAULT_KEYWORD_CHARS
+        self.match = matchString
+        self.matchLen = len(matchString)
+        try:
+            self.firstMatchChar = matchString[0]
+        except IndexError:
+            warnings.warn("null string passed to Keyword; use Empty() instead",
+                            SyntaxWarning, stacklevel=2)
+        self.name = '"%s"' % self.match
+        self.errmsg = "Expected " + self.name
+        self.mayReturnEmpty = False
+        self.mayIndexError = False
+        self.caseless = caseless
+        if caseless:
+            self.caselessmatch = matchString.upper()
+            identChars = identChars.upper()
+        self.identChars = set(identChars)
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        if self.caseless:
+            if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and
+                 (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) and
+                 (loc == 0 or instring[loc-1].upper() not in self.identChars) ):
+                return loc+self.matchLen, self.match
+        else:
+            if (instring[loc] == self.firstMatchChar and
+                (self.matchLen==1 or instring.startswith(self.match,loc)) and
+                (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen] not in self.identChars) and
+                (loc == 0 or instring[loc-1] not in self.identChars) ):
+                return loc+self.matchLen, self.match
+        raise ParseException(instring, loc, self.errmsg, self)
+
+    def copy(self):
+        c = super(Keyword,self).copy()
+        c.identChars = Keyword.DEFAULT_KEYWORD_CHARS
+        return c
+
+    @staticmethod
+    def setDefaultKeywordChars( chars ):
+        """Overrides the default Keyword chars
+        """
+        Keyword.DEFAULT_KEYWORD_CHARS = chars
+
+class CaselessLiteral(Literal):
+    """
+    Token to match a specified string, ignoring case of letters.
+    Note: the matched results will always be in the case of the given
+    match string, NOT the case of the input text.
+
+    Example::
+        OneOrMore(CaselessLiteral("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD', 'CMD']
+        
+    (Contrast with example for L{CaselessKeyword}.)
+    """
+    def __init__( self, matchString ):
+        super(CaselessLiteral,self).__init__( matchString.upper() )
+        # Preserve the defining literal.
+        self.returnString = matchString
+        self.name = "'%s'" % self.returnString
+        self.errmsg = "Expected " + self.name
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        if instring[ loc:loc+self.matchLen ].upper() == self.match:
+            return loc+self.matchLen, self.returnString
+        raise ParseException(instring, loc, self.errmsg, self)
+
+class CaselessKeyword(Keyword):
+    """
+    Caseless version of L{Keyword}.
+
+    Example::
+        OneOrMore(CaselessKeyword("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD']
+        
+    (Contrast with example for L{CaselessLiteral}.)
+    """
+    def __init__( self, matchString, identChars=None ):
+        super(CaselessKeyword,self).__init__( matchString, identChars, caseless=True )
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and
+             (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) ):
+            return loc+self.matchLen, self.match
+        raise ParseException(instring, loc, self.errmsg, self)
+
+class CloseMatch(Token):
+    """
+    A variation on L{Literal} which matches "close" matches, that is, 
+    strings with at most 'n' mismatching characters. C{CloseMatch} takes parameters:
+     - C{match_string} - string to be matched
+     - C{maxMismatches} - (C{default=1}) maximum number of mismatches allowed to count as a match
+    
+    The results from a successful parse will contain the matched text from the input string and the following named results:
+     - C{mismatches} - a list of the positions within the match_string where mismatches were found
+     - C{original} - the original match_string used to compare against the input string
+    
+    If C{mismatches} is an empty list, then the match was an exact match.
+    
+    Example::
+        patt = CloseMatch("ATCATCGAATGGA")
+        patt.parseString("ATCATCGAAXGGA") # -> (['ATCATCGAAXGGA'], {'mismatches': [[9]], 'original': ['ATCATCGAATGGA']})
+        patt.parseString("ATCAXCGAAXGGA") # -> Exception: Expected 'ATCATCGAATGGA' (with up to 1 mismatches) (at char 0), (line:1, col:1)
+
+        # exact match
+        patt.parseString("ATCATCGAATGGA") # -> (['ATCATCGAATGGA'], {'mismatches': [[]], 'original': ['ATCATCGAATGGA']})
+
+        # close match allowing up to 2 mismatches
+        patt = CloseMatch("ATCATCGAATGGA", maxMismatches=2)
+        patt.parseString("ATCAXCGAAXGGA") # -> (['ATCAXCGAAXGGA'], {'mismatches': [[4, 9]], 'original': ['ATCATCGAATGGA']})
+    """
+    def __init__(self, match_string, maxMismatches=1):
+        super(CloseMatch,self).__init__()
+        self.name = match_string
+        self.match_string = match_string
+        self.maxMismatches = maxMismatches
+        self.errmsg = "Expected %r (with up to %d mismatches)" % (self.match_string, self.maxMismatches)
+        self.mayIndexError = False
+        self.mayReturnEmpty = False
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        start = loc
+        instrlen = len(instring)
+        maxloc = start + len(self.match_string)
+
+        if maxloc <= instrlen:
+            match_string = self.match_string
+            match_stringloc = 0
+            mismatches = []
+            maxMismatches = self.maxMismatches
+
+            for match_stringloc,s_m in enumerate(zip(instring[loc:maxloc], self.match_string)):
+                src,mat = s_m
+                if src != mat:
+                    mismatches.append(match_stringloc)
+                    if len(mismatches) > maxMismatches:
+                        break
+            else:
+                loc = match_stringloc + 1
+                results = ParseResults([instring[start:loc]])
+                results['original'] = self.match_string
+                results['mismatches'] = mismatches
+                return loc, results
+
+        raise ParseException(instring, loc, self.errmsg, self)
+
+
+class Word(Token):
+    """
+    Token for matching words composed of allowed character sets.
+    Defined with string containing all allowed initial characters,
+    an optional string containing allowed body characters (if omitted,
+    defaults to the initial character set), and an optional minimum,
+    maximum, and/or exact length.  The default value for C{min} is 1 (a
+    minimum value < 1 is not valid); the default values for C{max} and C{exact}
+    are 0, meaning no maximum or exact length restriction. An optional
+    C{excludeChars} parameter can list characters that might be found in 
+    the input C{bodyChars} string; useful to define a word of all printables
+    except for one or two characters, for instance.
+    
+    L{srange} is useful for defining custom character set strings for defining 
+    C{Word} expressions, using range notation from regular expression character sets.
+    
+    A common mistake is to use C{Word} to match a specific literal string, as in 
+    C{Word("Address")}. Remember that C{Word} uses the string argument to define
+    I{sets} of matchable characters. This expression would match "Add", "AAA",
+    "dAred", or any other word made up of the characters 'A', 'd', 'r', 'e', and 's'.
+    To match an exact literal string, use L{Literal} or L{Keyword}.
+
+    pyparsing includes helper strings for building Words:
+     - L{alphas}
+     - L{nums}
+     - L{alphanums}
+     - L{hexnums}
+     - L{alphas8bit} (alphabetic characters in ASCII range 128-255 - accented, tilded, umlauted, etc.)
+     - L{punc8bit} (non-alphabetic characters in ASCII range 128-255 - currency, symbols, superscripts, diacriticals, etc.)
+     - L{printables} (any non-whitespace character)
+
+    Example::
+        # a word composed of digits
+        integer = Word(nums) # equivalent to Word("0123456789") or Word(srange("0-9"))
+        
+        # a word with a leading capital, and zero or more lowercase
+        capital_word = Word(alphas.upper(), alphas.lower())
+
+        # hostnames are alphanumeric, with leading alpha, and '-'
+        hostname = Word(alphas, alphanums+'-')
+        
+        # roman numeral (not a strict parser, accepts invalid mix of characters)
+        roman = Word("IVXLCDM")
+        
+        # any string of non-whitespace characters, except for ','
+        csv_value = Word(printables, excludeChars=",")
+    """
+    def __init__( self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False, excludeChars=None ):
+        super(Word,self).__init__()
+        if excludeChars:
+            initChars = ''.join(c for c in initChars if c not in excludeChars)
+            if bodyChars:
+                bodyChars = ''.join(c for c in bodyChars if c not in excludeChars)
+        self.initCharsOrig = initChars
+        self.initChars = set(initChars)
+        if bodyChars :
+            self.bodyCharsOrig = bodyChars
+            self.bodyChars = set(bodyChars)
+        else:
+            self.bodyCharsOrig = initChars
+            self.bodyChars = set(initChars)
+
+        self.maxSpecified = max > 0
+
+        if min < 1:
+            raise ValueError("cannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permitted")
+
+        self.minLen = min
+
+        if max > 0:
+            self.maxLen = max
+        else:
+            self.maxLen = _MAX_INT
+
+        if exact > 0:
+            self.maxLen = exact
+            self.minLen = exact
+
+        self.name = _ustr(self)
+        self.errmsg = "Expected " + self.name
+        self.mayIndexError = False
+        self.asKeyword = asKeyword
+
+        if ' ' not in self.initCharsOrig+self.bodyCharsOrig and (min==1 and max==0 and exact==0):
+            if self.bodyCharsOrig == self.initCharsOrig:
+                self.reString = "[%s]+" % _escapeRegexRangeChars(self.initCharsOrig)
+            elif len(self.initCharsOrig) == 1:
+                self.reString = "%s[%s]*" % \
+                                      (re.escape(self.initCharsOrig),
+                                      _escapeRegexRangeChars(self.bodyCharsOrig),)
+            else:
+                self.reString = "[%s][%s]*" % \
+                                      (_escapeRegexRangeChars(self.initCharsOrig),
+                                      _escapeRegexRangeChars(self.bodyCharsOrig),)
+            if self.asKeyword:
+                self.reString = r"\b"+self.reString+r"\b"
+            try:
+                self.re = re.compile( self.reString )
+            except Exception:
+                self.re = None
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        if self.re:
+            result = self.re.match(instring,loc)
+            if not result:
+                raise ParseException(instring, loc, self.errmsg, self)
+
+            loc = result.end()
+            return loc, result.group()
+
+        if not(instring[ loc ] in self.initChars):
+            raise ParseException(instring, loc, self.errmsg, self)
+
+        start = loc
+        loc += 1
+        instrlen = len(instring)
+        bodychars = self.bodyChars
+        maxloc = start + self.maxLen
+        maxloc = min( maxloc, instrlen )
+        while loc < maxloc and instring[loc] in bodychars:
+            loc += 1
+
+        throwException = False
+        if loc - start < self.minLen:
+            throwException = True
+        if self.maxSpecified and loc < instrlen and instring[loc] in bodychars:
+            throwException = True
+        if self.asKeyword:
+            if (start>0 and instring[start-1] in bodychars) or (loc4:
+                    return s[:4]+"..."
+                else:
+                    return s
+
+            if ( self.initCharsOrig != self.bodyCharsOrig ):
+                self.strRepr = "W:(%s,%s)" % ( charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig) )
+            else:
+                self.strRepr = "W:(%s)" % charsAsStr(self.initCharsOrig)
+
+        return self.strRepr
+
+
+class Regex(Token):
+    r"""
+    Token for matching strings that match a given regular expression.
+    Defined with string specifying the regular expression in a form recognized by the inbuilt Python re module.
+    If the given regex contains named groups (defined using C{(?P...)}), these will be preserved as 
+    named parse results.
+
+    Example::
+        realnum = Regex(r"[+-]?\d+\.\d*")
+        date = Regex(r'(?P\d{4})-(?P\d\d?)-(?P\d\d?)')
+        # ref: http://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expression
+        roman = Regex(r"M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})")
+    """
+    compiledREtype = type(re.compile("[A-Z]"))
+    def __init__( self, pattern, flags=0):
+        """The parameters C{pattern} and C{flags} are passed to the C{re.compile()} function as-is. See the Python C{re} module for an explanation of the acceptable patterns and flags."""
+        super(Regex,self).__init__()
+
+        if isinstance(pattern, basestring):
+            if not pattern:
+                warnings.warn("null string passed to Regex; use Empty() instead",
+                        SyntaxWarning, stacklevel=2)
+
+            self.pattern = pattern
+            self.flags = flags
+
+            try:
+                self.re = re.compile(self.pattern, self.flags)
+                self.reString = self.pattern
+            except sre_constants.error:
+                warnings.warn("invalid pattern (%s) passed to Regex" % pattern,
+                    SyntaxWarning, stacklevel=2)
+                raise
+
+        elif isinstance(pattern, Regex.compiledREtype):
+            self.re = pattern
+            self.pattern = \
+            self.reString = str(pattern)
+            self.flags = flags
+            
+        else:
+            raise ValueError("Regex may only be constructed with a string or a compiled RE object")
+
+        self.name = _ustr(self)
+        self.errmsg = "Expected " + self.name
+        self.mayIndexError = False
+        self.mayReturnEmpty = True
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        result = self.re.match(instring,loc)
+        if not result:
+            raise ParseException(instring, loc, self.errmsg, self)
+
+        loc = result.end()
+        d = result.groupdict()
+        ret = ParseResults(result.group())
+        if d:
+            for k in d:
+                ret[k] = d[k]
+        return loc,ret
+
+    def __str__( self ):
+        try:
+            return super(Regex,self).__str__()
+        except Exception:
+            pass
+
+        if self.strRepr is None:
+            self.strRepr = "Re:(%s)" % repr(self.pattern)
+
+        return self.strRepr
+
+
+class QuotedString(Token):
+    r"""
+    Token for matching strings that are delimited by quoting characters.
+    
+    Defined with the following parameters:
+        - quoteChar - string of one or more characters defining the quote delimiting string
+        - escChar - character to escape quotes, typically backslash (default=C{None})
+        - escQuote - special quote sequence to escape an embedded quote string (such as SQL's "" to escape an embedded ") (default=C{None})
+        - multiline - boolean indicating whether quotes can span multiple lines (default=C{False})
+        - unquoteResults - boolean indicating whether the matched text should be unquoted (default=C{True})
+        - endQuoteChar - string of one or more characters defining the end of the quote delimited string (default=C{None} => same as quoteChar)
+        - convertWhitespaceEscapes - convert escaped whitespace (C{'\t'}, C{'\n'}, etc.) to actual whitespace (default=C{True})
+
+    Example::
+        qs = QuotedString('"')
+        print(qs.searchString('lsjdf "This is the quote" sldjf'))
+        complex_qs = QuotedString('{{', endQuoteChar='}}')
+        print(complex_qs.searchString('lsjdf {{This is the "quote"}} sldjf'))
+        sql_qs = QuotedString('"', escQuote='""')
+        print(sql_qs.searchString('lsjdf "This is the quote with ""embedded"" quotes" sldjf'))
+    prints::
+        [['This is the quote']]
+        [['This is the "quote"']]
+        [['This is the quote with "embedded" quotes']]
+    """
+    def __init__( self, quoteChar, escChar=None, escQuote=None, multiline=False, unquoteResults=True, endQuoteChar=None, convertWhitespaceEscapes=True):
+        super(QuotedString,self).__init__()
+
+        # remove white space from quote chars - wont work anyway
+        quoteChar = quoteChar.strip()
+        if not quoteChar:
+            warnings.warn("quoteChar cannot be the empty string",SyntaxWarning,stacklevel=2)
+            raise SyntaxError()
+
+        if endQuoteChar is None:
+            endQuoteChar = quoteChar
+        else:
+            endQuoteChar = endQuoteChar.strip()
+            if not endQuoteChar:
+                warnings.warn("endQuoteChar cannot be the empty string",SyntaxWarning,stacklevel=2)
+                raise SyntaxError()
+
+        self.quoteChar = quoteChar
+        self.quoteCharLen = len(quoteChar)
+        self.firstQuoteChar = quoteChar[0]
+        self.endQuoteChar = endQuoteChar
+        self.endQuoteCharLen = len(endQuoteChar)
+        self.escChar = escChar
+        self.escQuote = escQuote
+        self.unquoteResults = unquoteResults
+        self.convertWhitespaceEscapes = convertWhitespaceEscapes
+
+        if multiline:
+            self.flags = re.MULTILINE | re.DOTALL
+            self.pattern = r'%s(?:[^%s%s]' % \
+                ( re.escape(self.quoteChar),
+                  _escapeRegexRangeChars(self.endQuoteChar[0]),
+                  (escChar is not None and _escapeRegexRangeChars(escChar) or '') )
+        else:
+            self.flags = 0
+            self.pattern = r'%s(?:[^%s\n\r%s]' % \
+                ( re.escape(self.quoteChar),
+                  _escapeRegexRangeChars(self.endQuoteChar[0]),
+                  (escChar is not None and _escapeRegexRangeChars(escChar) or '') )
+        if len(self.endQuoteChar) > 1:
+            self.pattern += (
+                '|(?:' + ')|(?:'.join("%s[^%s]" % (re.escape(self.endQuoteChar[:i]),
+                                               _escapeRegexRangeChars(self.endQuoteChar[i]))
+                                    for i in range(len(self.endQuoteChar)-1,0,-1)) + ')'
+                )
+        if escQuote:
+            self.pattern += (r'|(?:%s)' % re.escape(escQuote))
+        if escChar:
+            self.pattern += (r'|(?:%s.)' % re.escape(escChar))
+            self.escCharReplacePattern = re.escape(self.escChar)+"(.)"
+        self.pattern += (r')*%s' % re.escape(self.endQuoteChar))
+
+        try:
+            self.re = re.compile(self.pattern, self.flags)
+            self.reString = self.pattern
+        except sre_constants.error:
+            warnings.warn("invalid pattern (%s) passed to Regex" % self.pattern,
+                SyntaxWarning, stacklevel=2)
+            raise
+
+        self.name = _ustr(self)
+        self.errmsg = "Expected " + self.name
+        self.mayIndexError = False
+        self.mayReturnEmpty = True
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        result = instring[loc] == self.firstQuoteChar and self.re.match(instring,loc) or None
+        if not result:
+            raise ParseException(instring, loc, self.errmsg, self)
+
+        loc = result.end()
+        ret = result.group()
+
+        if self.unquoteResults:
+
+            # strip off quotes
+            ret = ret[self.quoteCharLen:-self.endQuoteCharLen]
+
+            if isinstance(ret,basestring):
+                # replace escaped whitespace
+                if '\\' in ret and self.convertWhitespaceEscapes:
+                    ws_map = {
+                        r'\t' : '\t',
+                        r'\n' : '\n',
+                        r'\f' : '\f',
+                        r'\r' : '\r',
+                    }
+                    for wslit,wschar in ws_map.items():
+                        ret = ret.replace(wslit, wschar)
+
+                # replace escaped characters
+                if self.escChar:
+                    ret = re.sub(self.escCharReplacePattern, r"\g<1>", ret)
+
+                # replace escaped quotes
+                if self.escQuote:
+                    ret = ret.replace(self.escQuote, self.endQuoteChar)
+
+        return loc, ret
+
+    def __str__( self ):
+        try:
+            return super(QuotedString,self).__str__()
+        except Exception:
+            pass
+
+        if self.strRepr is None:
+            self.strRepr = "quoted string, starting with %s ending with %s" % (self.quoteChar, self.endQuoteChar)
+
+        return self.strRepr
+
+
+class CharsNotIn(Token):
+    """
+    Token for matching words composed of characters I{not} in a given set (will
+    include whitespace in matched characters if not listed in the provided exclusion set - see example).
+    Defined with string containing all disallowed characters, and an optional
+    minimum, maximum, and/or exact length.  The default value for C{min} is 1 (a
+    minimum value < 1 is not valid); the default values for C{max} and C{exact}
+    are 0, meaning no maximum or exact length restriction.
+
+    Example::
+        # define a comma-separated-value as anything that is not a ','
+        csv_value = CharsNotIn(',')
+        print(delimitedList(csv_value).parseString("dkls,lsdkjf,s12 34,@!#,213"))
+    prints::
+        ['dkls', 'lsdkjf', 's12 34', '@!#', '213']
+    """
+    def __init__( self, notChars, min=1, max=0, exact=0 ):
+        super(CharsNotIn,self).__init__()
+        self.skipWhitespace = False
+        self.notChars = notChars
+
+        if min < 1:
+            raise ValueError("cannot specify a minimum length < 1; use Optional(CharsNotIn()) if zero-length char group is permitted")
+
+        self.minLen = min
+
+        if max > 0:
+            self.maxLen = max
+        else:
+            self.maxLen = _MAX_INT
+
+        if exact > 0:
+            self.maxLen = exact
+            self.minLen = exact
+
+        self.name = _ustr(self)
+        self.errmsg = "Expected " + self.name
+        self.mayReturnEmpty = ( self.minLen == 0 )
+        self.mayIndexError = False
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        if instring[loc] in self.notChars:
+            raise ParseException(instring, loc, self.errmsg, self)
+
+        start = loc
+        loc += 1
+        notchars = self.notChars
+        maxlen = min( start+self.maxLen, len(instring) )
+        while loc < maxlen and \
+              (instring[loc] not in notchars):
+            loc += 1
+
+        if loc - start < self.minLen:
+            raise ParseException(instring, loc, self.errmsg, self)
+
+        return loc, instring[start:loc]
+
+    def __str__( self ):
+        try:
+            return super(CharsNotIn, self).__str__()
+        except Exception:
+            pass
+
+        if self.strRepr is None:
+            if len(self.notChars) > 4:
+                self.strRepr = "!W:(%s...)" % self.notChars[:4]
+            else:
+                self.strRepr = "!W:(%s)" % self.notChars
+
+        return self.strRepr
+
+class White(Token):
+    """
+    Special matching class for matching whitespace.  Normally, whitespace is ignored
+    by pyparsing grammars.  This class is included when some whitespace structures
+    are significant.  Define with a string containing the whitespace characters to be
+    matched; default is C{" \\t\\r\\n"}.  Also takes optional C{min}, C{max}, and C{exact} arguments,
+    as defined for the C{L{Word}} class.
+    """
+    whiteStrs = {
+        " " : "",
+        "\t": "",
+        "\n": "",
+        "\r": "",
+        "\f": "",
+        }
+    def __init__(self, ws=" \t\r\n", min=1, max=0, exact=0):
+        super(White,self).__init__()
+        self.matchWhite = ws
+        self.setWhitespaceChars( "".join(c for c in self.whiteChars if c not in self.matchWhite) )
+        #~ self.leaveWhitespace()
+        self.name = ("".join(White.whiteStrs[c] for c in self.matchWhite))
+        self.mayReturnEmpty = True
+        self.errmsg = "Expected " + self.name
+
+        self.minLen = min
+
+        if max > 0:
+            self.maxLen = max
+        else:
+            self.maxLen = _MAX_INT
+
+        if exact > 0:
+            self.maxLen = exact
+            self.minLen = exact
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        if not(instring[ loc ] in self.matchWhite):
+            raise ParseException(instring, loc, self.errmsg, self)
+        start = loc
+        loc += 1
+        maxloc = start + self.maxLen
+        maxloc = min( maxloc, len(instring) )
+        while loc < maxloc and instring[loc] in self.matchWhite:
+            loc += 1
+
+        if loc - start < self.minLen:
+            raise ParseException(instring, loc, self.errmsg, self)
+
+        return loc, instring[start:loc]
+
+
+class _PositionToken(Token):
+    def __init__( self ):
+        super(_PositionToken,self).__init__()
+        self.name=self.__class__.__name__
+        self.mayReturnEmpty = True
+        self.mayIndexError = False
+
+class GoToColumn(_PositionToken):
+    """
+    Token to advance to a specific column of input text; useful for tabular report scraping.
+    """
+    def __init__( self, colno ):
+        super(GoToColumn,self).__init__()
+        self.col = colno
+
+    def preParse( self, instring, loc ):
+        if col(loc,instring) != self.col:
+            instrlen = len(instring)
+            if self.ignoreExprs:
+                loc = self._skipIgnorables( instring, loc )
+            while loc < instrlen and instring[loc].isspace() and col( loc, instring ) != self.col :
+                loc += 1
+        return loc
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        thiscol = col( loc, instring )
+        if thiscol > self.col:
+            raise ParseException( instring, loc, "Text not in expected column", self )
+        newloc = loc + self.col - thiscol
+        ret = instring[ loc: newloc ]
+        return newloc, ret
+
+
+class LineStart(_PositionToken):
+    """
+    Matches if current position is at the beginning of a line within the parse string
+    
+    Example::
+    
+        test = '''\
+        AAA this line
+        AAA and this line
+          AAA but not this one
+        B AAA and definitely not this one
+        '''
+
+        for t in (LineStart() + 'AAA' + restOfLine).searchString(test):
+            print(t)
+    
+    Prints::
+        ['AAA', ' this line']
+        ['AAA', ' and this line']    
+
+    """
+    def __init__( self ):
+        super(LineStart,self).__init__()
+        self.errmsg = "Expected start of line"
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        if col(loc, instring) == 1:
+            return loc, []
+        raise ParseException(instring, loc, self.errmsg, self)
+
+class LineEnd(_PositionToken):
+    """
+    Matches if current position is at the end of a line within the parse string
+    """
+    def __init__( self ):
+        super(LineEnd,self).__init__()
+        self.setWhitespaceChars( ParserElement.DEFAULT_WHITE_CHARS.replace("\n","") )
+        self.errmsg = "Expected end of line"
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        if loc len(instring):
+            return loc, []
+        else:
+            raise ParseException(instring, loc, self.errmsg, self)
+
+class WordStart(_PositionToken):
+    """
+    Matches if the current position is at the beginning of a Word, and
+    is not preceded by any character in a given set of C{wordChars}
+    (default=C{printables}). To emulate the C{\b} behavior of regular expressions,
+    use C{WordStart(alphanums)}. C{WordStart} will also match at the beginning of
+    the string being parsed, or at the beginning of a line.
+    """
+    def __init__(self, wordChars = printables):
+        super(WordStart,self).__init__()
+        self.wordChars = set(wordChars)
+        self.errmsg = "Not at the start of a word"
+
+    def parseImpl(self, instring, loc, doActions=True ):
+        if loc != 0:
+            if (instring[loc-1] in self.wordChars or
+                instring[loc] not in self.wordChars):
+                raise ParseException(instring, loc, self.errmsg, self)
+        return loc, []
+
+class WordEnd(_PositionToken):
+    """
+    Matches if the current position is at the end of a Word, and
+    is not followed by any character in a given set of C{wordChars}
+    (default=C{printables}). To emulate the C{\b} behavior of regular expressions,
+    use C{WordEnd(alphanums)}. C{WordEnd} will also match at the end of
+    the string being parsed, or at the end of a line.
+    """
+    def __init__(self, wordChars = printables):
+        super(WordEnd,self).__init__()
+        self.wordChars = set(wordChars)
+        self.skipWhitespace = False
+        self.errmsg = "Not at the end of a word"
+
+    def parseImpl(self, instring, loc, doActions=True ):
+        instrlen = len(instring)
+        if instrlen>0 and loc maxExcLoc:
+                    maxException = err
+                    maxExcLoc = err.loc
+            except IndexError:
+                if len(instring) > maxExcLoc:
+                    maxException = ParseException(instring,len(instring),e.errmsg,self)
+                    maxExcLoc = len(instring)
+            else:
+                # save match among all matches, to retry longest to shortest
+                matches.append((loc2, e))
+
+        if matches:
+            matches.sort(key=lambda x: -x[0])
+            for _,e in matches:
+                try:
+                    return e._parse( instring, loc, doActions )
+                except ParseException as err:
+                    err.__traceback__ = None
+                    if err.loc > maxExcLoc:
+                        maxException = err
+                        maxExcLoc = err.loc
+
+        if maxException is not None:
+            maxException.msg = self.errmsg
+            raise maxException
+        else:
+            raise ParseException(instring, loc, "no defined alternatives to match", self)
+
+
+    def __ixor__(self, other ):
+        if isinstance( other, basestring ):
+            other = ParserElement._literalStringClass( other )
+        return self.append( other ) #Or( [ self, other ] )
+
+    def __str__( self ):
+        if hasattr(self,"name"):
+            return self.name
+
+        if self.strRepr is None:
+            self.strRepr = "{" + " ^ ".join(_ustr(e) for e in self.exprs) + "}"
+
+        return self.strRepr
+
+    def checkRecursion( self, parseElementList ):
+        subRecCheckList = parseElementList[:] + [ self ]
+        for e in self.exprs:
+            e.checkRecursion( subRecCheckList )
+
+
+class MatchFirst(ParseExpression):
+    """
+    Requires that at least one C{ParseExpression} is found.
+    If two expressions match, the first one listed is the one that will match.
+    May be constructed using the C{'|'} operator.
+
+    Example::
+        # construct MatchFirst using '|' operator
+        
+        # watch the order of expressions to match
+        number = Word(nums) | Combine(Word(nums) + '.' + Word(nums))
+        print(number.searchString("123 3.1416 789")) #  Fail! -> [['123'], ['3'], ['1416'], ['789']]
+
+        # put more selective expression first
+        number = Combine(Word(nums) + '.' + Word(nums)) | Word(nums)
+        print(number.searchString("123 3.1416 789")) #  Better -> [['123'], ['3.1416'], ['789']]
+    """
+    def __init__( self, exprs, savelist = False ):
+        super(MatchFirst,self).__init__(exprs, savelist)
+        if self.exprs:
+            self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs)
+        else:
+            self.mayReturnEmpty = True
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        maxExcLoc = -1
+        maxException = None
+        for e in self.exprs:
+            try:
+                ret = e._parse( instring, loc, doActions )
+                return ret
+            except ParseException as err:
+                if err.loc > maxExcLoc:
+                    maxException = err
+                    maxExcLoc = err.loc
+            except IndexError:
+                if len(instring) > maxExcLoc:
+                    maxException = ParseException(instring,len(instring),e.errmsg,self)
+                    maxExcLoc = len(instring)
+
+        # only got here if no expression matched, raise exception for match that made it the furthest
+        else:
+            if maxException is not None:
+                maxException.msg = self.errmsg
+                raise maxException
+            else:
+                raise ParseException(instring, loc, "no defined alternatives to match", self)
+
+    def __ior__(self, other ):
+        if isinstance( other, basestring ):
+            other = ParserElement._literalStringClass( other )
+        return self.append( other ) #MatchFirst( [ self, other ] )
+
+    def __str__( self ):
+        if hasattr(self,"name"):
+            return self.name
+
+        if self.strRepr is None:
+            self.strRepr = "{" + " | ".join(_ustr(e) for e in self.exprs) + "}"
+
+        return self.strRepr
+
+    def checkRecursion( self, parseElementList ):
+        subRecCheckList = parseElementList[:] + [ self ]
+        for e in self.exprs:
+            e.checkRecursion( subRecCheckList )
+
+
+class Each(ParseExpression):
+    """
+    Requires all given C{ParseExpression}s to be found, but in any order.
+    Expressions may be separated by whitespace.
+    May be constructed using the C{'&'} operator.
+
+    Example::
+        color = oneOf("RED ORANGE YELLOW GREEN BLUE PURPLE BLACK WHITE BROWN")
+        shape_type = oneOf("SQUARE CIRCLE TRIANGLE STAR HEXAGON OCTAGON")
+        integer = Word(nums)
+        shape_attr = "shape:" + shape_type("shape")
+        posn_attr = "posn:" + Group(integer("x") + ',' + integer("y"))("posn")
+        color_attr = "color:" + color("color")
+        size_attr = "size:" + integer("size")
+
+        # use Each (using operator '&') to accept attributes in any order 
+        # (shape and posn are required, color and size are optional)
+        shape_spec = shape_attr & posn_attr & Optional(color_attr) & Optional(size_attr)
+
+        shape_spec.runTests('''
+            shape: SQUARE color: BLACK posn: 100, 120
+            shape: CIRCLE size: 50 color: BLUE posn: 50,80
+            color:GREEN size:20 shape:TRIANGLE posn:20,40
+            '''
+            )
+    prints::
+        shape: SQUARE color: BLACK posn: 100, 120
+        ['shape:', 'SQUARE', 'color:', 'BLACK', 'posn:', ['100', ',', '120']]
+        - color: BLACK
+        - posn: ['100', ',', '120']
+          - x: 100
+          - y: 120
+        - shape: SQUARE
+
+
+        shape: CIRCLE size: 50 color: BLUE posn: 50,80
+        ['shape:', 'CIRCLE', 'size:', '50', 'color:', 'BLUE', 'posn:', ['50', ',', '80']]
+        - color: BLUE
+        - posn: ['50', ',', '80']
+          - x: 50
+          - y: 80
+        - shape: CIRCLE
+        - size: 50
+
+
+        color: GREEN size: 20 shape: TRIANGLE posn: 20,40
+        ['color:', 'GREEN', 'size:', '20', 'shape:', 'TRIANGLE', 'posn:', ['20', ',', '40']]
+        - color: GREEN
+        - posn: ['20', ',', '40']
+          - x: 20
+          - y: 40
+        - shape: TRIANGLE
+        - size: 20
+    """
+    def __init__( self, exprs, savelist = True ):
+        super(Each,self).__init__(exprs, savelist)
+        self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs)
+        self.skipWhitespace = True
+        self.initExprGroups = True
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        if self.initExprGroups:
+            self.opt1map = dict((id(e.expr),e) for e in self.exprs if isinstance(e,Optional))
+            opt1 = [ e.expr for e in self.exprs if isinstance(e,Optional) ]
+            opt2 = [ e for e in self.exprs if e.mayReturnEmpty and not isinstance(e,Optional)]
+            self.optionals = opt1 + opt2
+            self.multioptionals = [ e.expr for e in self.exprs if isinstance(e,ZeroOrMore) ]
+            self.multirequired = [ e.expr for e in self.exprs if isinstance(e,OneOrMore) ]
+            self.required = [ e for e in self.exprs if not isinstance(e,(Optional,ZeroOrMore,OneOrMore)) ]
+            self.required += self.multirequired
+            self.initExprGroups = False
+        tmpLoc = loc
+        tmpReqd = self.required[:]
+        tmpOpt  = self.optionals[:]
+        matchOrder = []
+
+        keepMatching = True
+        while keepMatching:
+            tmpExprs = tmpReqd + tmpOpt + self.multioptionals + self.multirequired
+            failed = []
+            for e in tmpExprs:
+                try:
+                    tmpLoc = e.tryParse( instring, tmpLoc )
+                except ParseException:
+                    failed.append(e)
+                else:
+                    matchOrder.append(self.opt1map.get(id(e),e))
+                    if e in tmpReqd:
+                        tmpReqd.remove(e)
+                    elif e in tmpOpt:
+                        tmpOpt.remove(e)
+            if len(failed) == len(tmpExprs):
+                keepMatching = False
+
+        if tmpReqd:
+            missing = ", ".join(_ustr(e) for e in tmpReqd)
+            raise ParseException(instring,loc,"Missing one or more required elements (%s)" % missing )
+
+        # add any unmatched Optionals, in case they have default values defined
+        matchOrder += [e for e in self.exprs if isinstance(e,Optional) and e.expr in tmpOpt]
+
+        resultlist = []
+        for e in matchOrder:
+            loc,results = e._parse(instring,loc,doActions)
+            resultlist.append(results)
+
+        finalResults = sum(resultlist, ParseResults([]))
+        return loc, finalResults
+
+    def __str__( self ):
+        if hasattr(self,"name"):
+            return self.name
+
+        if self.strRepr is None:
+            self.strRepr = "{" + " & ".join(_ustr(e) for e in self.exprs) + "}"
+
+        return self.strRepr
+
+    def checkRecursion( self, parseElementList ):
+        subRecCheckList = parseElementList[:] + [ self ]
+        for e in self.exprs:
+            e.checkRecursion( subRecCheckList )
+
+
+class ParseElementEnhance(ParserElement):
+    """
+    Abstract subclass of C{ParserElement}, for combining and post-processing parsed tokens.
+    """
+    def __init__( self, expr, savelist=False ):
+        super(ParseElementEnhance,self).__init__(savelist)
+        if isinstance( expr, basestring ):
+            if issubclass(ParserElement._literalStringClass, Token):
+                expr = ParserElement._literalStringClass(expr)
+            else:
+                expr = ParserElement._literalStringClass(Literal(expr))
+        self.expr = expr
+        self.strRepr = None
+        if expr is not None:
+            self.mayIndexError = expr.mayIndexError
+            self.mayReturnEmpty = expr.mayReturnEmpty
+            self.setWhitespaceChars( expr.whiteChars )
+            self.skipWhitespace = expr.skipWhitespace
+            self.saveAsList = expr.saveAsList
+            self.callPreparse = expr.callPreparse
+            self.ignoreExprs.extend(expr.ignoreExprs)
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        if self.expr is not None:
+            return self.expr._parse( instring, loc, doActions, callPreParse=False )
+        else:
+            raise ParseException("",loc,self.errmsg,self)
+
+    def leaveWhitespace( self ):
+        self.skipWhitespace = False
+        self.expr = self.expr.copy()
+        if self.expr is not None:
+            self.expr.leaveWhitespace()
+        return self
+
+    def ignore( self, other ):
+        if isinstance( other, Suppress ):
+            if other not in self.ignoreExprs:
+                super( ParseElementEnhance, self).ignore( other )
+                if self.expr is not None:
+                    self.expr.ignore( self.ignoreExprs[-1] )
+        else:
+            super( ParseElementEnhance, self).ignore( other )
+            if self.expr is not None:
+                self.expr.ignore( self.ignoreExprs[-1] )
+        return self
+
+    def streamline( self ):
+        super(ParseElementEnhance,self).streamline()
+        if self.expr is not None:
+            self.expr.streamline()
+        return self
+
+    def checkRecursion( self, parseElementList ):
+        if self in parseElementList:
+            raise RecursiveGrammarException( parseElementList+[self] )
+        subRecCheckList = parseElementList[:] + [ self ]
+        if self.expr is not None:
+            self.expr.checkRecursion( subRecCheckList )
+
+    def validate( self, validateTrace=[] ):
+        tmp = validateTrace[:]+[self]
+        if self.expr is not None:
+            self.expr.validate(tmp)
+        self.checkRecursion( [] )
+
+    def __str__( self ):
+        try:
+            return super(ParseElementEnhance,self).__str__()
+        except Exception:
+            pass
+
+        if self.strRepr is None and self.expr is not None:
+            self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.expr) )
+        return self.strRepr
+
+
+class FollowedBy(ParseElementEnhance):
+    """
+    Lookahead matching of the given parse expression.  C{FollowedBy}
+    does I{not} advance the parsing position within the input string, it only
+    verifies that the specified parse expression matches at the current
+    position.  C{FollowedBy} always returns a null token list.
+
+    Example::
+        # use FollowedBy to match a label only if it is followed by a ':'
+        data_word = Word(alphas)
+        label = data_word + FollowedBy(':')
+        attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
+        
+        OneOrMore(attr_expr).parseString("shape: SQUARE color: BLACK posn: upper left").pprint()
+    prints::
+        [['shape', 'SQUARE'], ['color', 'BLACK'], ['posn', 'upper left']]
+    """
+    def __init__( self, expr ):
+        super(FollowedBy,self).__init__(expr)
+        self.mayReturnEmpty = True
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        self.expr.tryParse( instring, loc )
+        return loc, []
+
+
+class NotAny(ParseElementEnhance):
+    """
+    Lookahead to disallow matching with the given parse expression.  C{NotAny}
+    does I{not} advance the parsing position within the input string, it only
+    verifies that the specified parse expression does I{not} match at the current
+    position.  Also, C{NotAny} does I{not} skip over leading whitespace. C{NotAny}
+    always returns a null token list.  May be constructed using the '~' operator.
+
+    Example::
+        
+    """
+    def __init__( self, expr ):
+        super(NotAny,self).__init__(expr)
+        #~ self.leaveWhitespace()
+        self.skipWhitespace = False  # do NOT use self.leaveWhitespace(), don't want to propagate to exprs
+        self.mayReturnEmpty = True
+        self.errmsg = "Found unwanted token, "+_ustr(self.expr)
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        if self.expr.canParseNext(instring, loc):
+            raise ParseException(instring, loc, self.errmsg, self)
+        return loc, []
+
+    def __str__( self ):
+        if hasattr(self,"name"):
+            return self.name
+
+        if self.strRepr is None:
+            self.strRepr = "~{" + _ustr(self.expr) + "}"
+
+        return self.strRepr
+
+class _MultipleMatch(ParseElementEnhance):
+    def __init__( self, expr, stopOn=None):
+        super(_MultipleMatch, self).__init__(expr)
+        self.saveAsList = True
+        ender = stopOn
+        if isinstance(ender, basestring):
+            ender = ParserElement._literalStringClass(ender)
+        self.not_ender = ~ender if ender is not None else None
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        self_expr_parse = self.expr._parse
+        self_skip_ignorables = self._skipIgnorables
+        check_ender = self.not_ender is not None
+        if check_ender:
+            try_not_ender = self.not_ender.tryParse
+        
+        # must be at least one (but first see if we are the stopOn sentinel;
+        # if so, fail)
+        if check_ender:
+            try_not_ender(instring, loc)
+        loc, tokens = self_expr_parse( instring, loc, doActions, callPreParse=False )
+        try:
+            hasIgnoreExprs = (not not self.ignoreExprs)
+            while 1:
+                if check_ender:
+                    try_not_ender(instring, loc)
+                if hasIgnoreExprs:
+                    preloc = self_skip_ignorables( instring, loc )
+                else:
+                    preloc = loc
+                loc, tmptokens = self_expr_parse( instring, preloc, doActions )
+                if tmptokens or tmptokens.haskeys():
+                    tokens += tmptokens
+        except (ParseException,IndexError):
+            pass
+
+        return loc, tokens
+        
+class OneOrMore(_MultipleMatch):
+    """
+    Repetition of one or more of the given expression.
+    
+    Parameters:
+     - expr - expression that must match one or more times
+     - stopOn - (default=C{None}) - expression for a terminating sentinel
+          (only required if the sentinel would ordinarily match the repetition 
+          expression)          
+
+    Example::
+        data_word = Word(alphas)
+        label = data_word + FollowedBy(':')
+        attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join))
+
+        text = "shape: SQUARE posn: upper left color: BLACK"
+        OneOrMore(attr_expr).parseString(text).pprint()  # Fail! read 'color' as data instead of next label -> [['shape', 'SQUARE color']]
+
+        # use stopOn attribute for OneOrMore to avoid reading label string as part of the data
+        attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
+        OneOrMore(attr_expr).parseString(text).pprint() # Better -> [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'BLACK']]
+        
+        # could also be written as
+        (attr_expr * (1,)).parseString(text).pprint()
+    """
+
+    def __str__( self ):
+        if hasattr(self,"name"):
+            return self.name
+
+        if self.strRepr is None:
+            self.strRepr = "{" + _ustr(self.expr) + "}..."
+
+        return self.strRepr
+
+class ZeroOrMore(_MultipleMatch):
+    """
+    Optional repetition of zero or more of the given expression.
+    
+    Parameters:
+     - expr - expression that must match zero or more times
+     - stopOn - (default=C{None}) - expression for a terminating sentinel
+          (only required if the sentinel would ordinarily match the repetition 
+          expression)          
+
+    Example: similar to L{OneOrMore}
+    """
+    def __init__( self, expr, stopOn=None):
+        super(ZeroOrMore,self).__init__(expr, stopOn=stopOn)
+        self.mayReturnEmpty = True
+        
+    def parseImpl( self, instring, loc, doActions=True ):
+        try:
+            return super(ZeroOrMore, self).parseImpl(instring, loc, doActions)
+        except (ParseException,IndexError):
+            return loc, []
+
+    def __str__( self ):
+        if hasattr(self,"name"):
+            return self.name
+
+        if self.strRepr is None:
+            self.strRepr = "[" + _ustr(self.expr) + "]..."
+
+        return self.strRepr
+
+class _NullToken(object):
+    def __bool__(self):
+        return False
+    __nonzero__ = __bool__
+    def __str__(self):
+        return ""
+
+_optionalNotMatched = _NullToken()
+class Optional(ParseElementEnhance):
+    """
+    Optional matching of the given expression.
+
+    Parameters:
+     - expr - expression that must match zero or more times
+     - default (optional) - value to be returned if the optional expression is not found.
+
+    Example::
+        # US postal code can be a 5-digit zip, plus optional 4-digit qualifier
+        zip = Combine(Word(nums, exact=5) + Optional('-' + Word(nums, exact=4)))
+        zip.runTests('''
+            # traditional ZIP code
+            12345
+            
+            # ZIP+4 form
+            12101-0001
+            
+            # invalid ZIP
+            98765-
+            ''')
+    prints::
+        # traditional ZIP code
+        12345
+        ['12345']
+
+        # ZIP+4 form
+        12101-0001
+        ['12101-0001']
+
+        # invalid ZIP
+        98765-
+             ^
+        FAIL: Expected end of text (at char 5), (line:1, col:6)
+    """
+    def __init__( self, expr, default=_optionalNotMatched ):
+        super(Optional,self).__init__( expr, savelist=False )
+        self.saveAsList = self.expr.saveAsList
+        self.defaultValue = default
+        self.mayReturnEmpty = True
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        try:
+            loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False )
+        except (ParseException,IndexError):
+            if self.defaultValue is not _optionalNotMatched:
+                if self.expr.resultsName:
+                    tokens = ParseResults([ self.defaultValue ])
+                    tokens[self.expr.resultsName] = self.defaultValue
+                else:
+                    tokens = [ self.defaultValue ]
+            else:
+                tokens = []
+        return loc, tokens
+
+    def __str__( self ):
+        if hasattr(self,"name"):
+            return self.name
+
+        if self.strRepr is None:
+            self.strRepr = "[" + _ustr(self.expr) + "]"
+
+        return self.strRepr
+
+class SkipTo(ParseElementEnhance):
+    """
+    Token for skipping over all undefined text until the matched expression is found.
+
+    Parameters:
+     - expr - target expression marking the end of the data to be skipped
+     - include - (default=C{False}) if True, the target expression is also parsed 
+          (the skipped text and target expression are returned as a 2-element list).
+     - ignore - (default=C{None}) used to define grammars (typically quoted strings and 
+          comments) that might contain false matches to the target expression
+     - failOn - (default=C{None}) define expressions that are not allowed to be 
+          included in the skipped test; if found before the target expression is found, 
+          the SkipTo is not a match
+
+    Example::
+        report = '''
+            Outstanding Issues Report - 1 Jan 2000
+
+               # | Severity | Description                               |  Days Open
+            -----+----------+-------------------------------------------+-----------
+             101 | Critical | Intermittent system crash                 |          6
+              94 | Cosmetic | Spelling error on Login ('log|n')         |         14
+              79 | Minor    | System slow when running too many reports |         47
+            '''
+        integer = Word(nums)
+        SEP = Suppress('|')
+        # use SkipTo to simply match everything up until the next SEP
+        # - ignore quoted strings, so that a '|' character inside a quoted string does not match
+        # - parse action will call token.strip() for each matched token, i.e., the description body
+        string_data = SkipTo(SEP, ignore=quotedString)
+        string_data.setParseAction(tokenMap(str.strip))
+        ticket_expr = (integer("issue_num") + SEP 
+                      + string_data("sev") + SEP 
+                      + string_data("desc") + SEP 
+                      + integer("days_open"))
+        
+        for tkt in ticket_expr.searchString(report):
+            print tkt.dump()
+    prints::
+        ['101', 'Critical', 'Intermittent system crash', '6']
+        - days_open: 6
+        - desc: Intermittent system crash
+        - issue_num: 101
+        - sev: Critical
+        ['94', 'Cosmetic', "Spelling error on Login ('log|n')", '14']
+        - days_open: 14
+        - desc: Spelling error on Login ('log|n')
+        - issue_num: 94
+        - sev: Cosmetic
+        ['79', 'Minor', 'System slow when running too many reports', '47']
+        - days_open: 47
+        - desc: System slow when running too many reports
+        - issue_num: 79
+        - sev: Minor
+    """
+    def __init__( self, other, include=False, ignore=None, failOn=None ):
+        super( SkipTo, self ).__init__( other )
+        self.ignoreExpr = ignore
+        self.mayReturnEmpty = True
+        self.mayIndexError = False
+        self.includeMatch = include
+        self.asList = False
+        if isinstance(failOn, basestring):
+            self.failOn = ParserElement._literalStringClass(failOn)
+        else:
+            self.failOn = failOn
+        self.errmsg = "No match found for "+_ustr(self.expr)
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        startloc = loc
+        instrlen = len(instring)
+        expr = self.expr
+        expr_parse = self.expr._parse
+        self_failOn_canParseNext = self.failOn.canParseNext if self.failOn is not None else None
+        self_ignoreExpr_tryParse = self.ignoreExpr.tryParse if self.ignoreExpr is not None else None
+        
+        tmploc = loc
+        while tmploc <= instrlen:
+            if self_failOn_canParseNext is not None:
+                # break if failOn expression matches
+                if self_failOn_canParseNext(instring, tmploc):
+                    break
+                    
+            if self_ignoreExpr_tryParse is not None:
+                # advance past ignore expressions
+                while 1:
+                    try:
+                        tmploc = self_ignoreExpr_tryParse(instring, tmploc)
+                    except ParseBaseException:
+                        break
+            
+            try:
+                expr_parse(instring, tmploc, doActions=False, callPreParse=False)
+            except (ParseException, IndexError):
+                # no match, advance loc in string
+                tmploc += 1
+            else:
+                # matched skipto expr, done
+                break
+
+        else:
+            # ran off the end of the input string without matching skipto expr, fail
+            raise ParseException(instring, loc, self.errmsg, self)
+
+        # build up return values
+        loc = tmploc
+        skiptext = instring[startloc:loc]
+        skipresult = ParseResults(skiptext)
+        
+        if self.includeMatch:
+            loc, mat = expr_parse(instring,loc,doActions,callPreParse=False)
+            skipresult += mat
+
+        return loc, skipresult
+
+class Forward(ParseElementEnhance):
+    """
+    Forward declaration of an expression to be defined later -
+    used for recursive grammars, such as algebraic infix notation.
+    When the expression is known, it is assigned to the C{Forward} variable using the '<<' operator.
+
+    Note: take care when assigning to C{Forward} not to overlook precedence of operators.
+    Specifically, '|' has a lower precedence than '<<', so that::
+        fwdExpr << a | b | c
+    will actually be evaluated as::
+        (fwdExpr << a) | b | c
+    thereby leaving b and c out as parseable alternatives.  It is recommended that you
+    explicitly group the values inserted into the C{Forward}::
+        fwdExpr << (a | b | c)
+    Converting to use the '<<=' operator instead will avoid this problem.
+
+    See L{ParseResults.pprint} for an example of a recursive parser created using
+    C{Forward}.
+    """
+    def __init__( self, other=None ):
+        super(Forward,self).__init__( other, savelist=False )
+
+    def __lshift__( self, other ):
+        if isinstance( other, basestring ):
+            other = ParserElement._literalStringClass(other)
+        self.expr = other
+        self.strRepr = None
+        self.mayIndexError = self.expr.mayIndexError
+        self.mayReturnEmpty = self.expr.mayReturnEmpty
+        self.setWhitespaceChars( self.expr.whiteChars )
+        self.skipWhitespace = self.expr.skipWhitespace
+        self.saveAsList = self.expr.saveAsList
+        self.ignoreExprs.extend(self.expr.ignoreExprs)
+        return self
+        
+    def __ilshift__(self, other):
+        return self << other
+    
+    def leaveWhitespace( self ):
+        self.skipWhitespace = False
+        return self
+
+    def streamline( self ):
+        if not self.streamlined:
+            self.streamlined = True
+            if self.expr is not None:
+                self.expr.streamline()
+        return self
+
+    def validate( self, validateTrace=[] ):
+        if self not in validateTrace:
+            tmp = validateTrace[:]+[self]
+            if self.expr is not None:
+                self.expr.validate(tmp)
+        self.checkRecursion([])
+
+    def __str__( self ):
+        if hasattr(self,"name"):
+            return self.name
+        return self.__class__.__name__ + ": ..."
+
+        # stubbed out for now - creates awful memory and perf issues
+        self._revertClass = self.__class__
+        self.__class__ = _ForwardNoRecurse
+        try:
+            if self.expr is not None:
+                retString = _ustr(self.expr)
+            else:
+                retString = "None"
+        finally:
+            self.__class__ = self._revertClass
+        return self.__class__.__name__ + ": " + retString
+
+    def copy(self):
+        if self.expr is not None:
+            return super(Forward,self).copy()
+        else:
+            ret = Forward()
+            ret <<= self
+            return ret
+
+class _ForwardNoRecurse(Forward):
+    def __str__( self ):
+        return "..."
+
+class TokenConverter(ParseElementEnhance):
+    """
+    Abstract subclass of C{ParseExpression}, for converting parsed results.
+    """
+    def __init__( self, expr, savelist=False ):
+        super(TokenConverter,self).__init__( expr )#, savelist )
+        self.saveAsList = False
+
+class Combine(TokenConverter):
+    """
+    Converter to concatenate all matching tokens to a single string.
+    By default, the matching patterns must also be contiguous in the input string;
+    this can be disabled by specifying C{'adjacent=False'} in the constructor.
+
+    Example::
+        real = Word(nums) + '.' + Word(nums)
+        print(real.parseString('3.1416')) # -> ['3', '.', '1416']
+        # will also erroneously match the following
+        print(real.parseString('3. 1416')) # -> ['3', '.', '1416']
+
+        real = Combine(Word(nums) + '.' + Word(nums))
+        print(real.parseString('3.1416')) # -> ['3.1416']
+        # no match when there are internal spaces
+        print(real.parseString('3. 1416')) # -> Exception: Expected W:(0123...)
+    """
+    def __init__( self, expr, joinString="", adjacent=True ):
+        super(Combine,self).__init__( expr )
+        # suppress whitespace-stripping in contained parse expressions, but re-enable it on the Combine itself
+        if adjacent:
+            self.leaveWhitespace()
+        self.adjacent = adjacent
+        self.skipWhitespace = True
+        self.joinString = joinString
+        self.callPreparse = True
+
+    def ignore( self, other ):
+        if self.adjacent:
+            ParserElement.ignore(self, other)
+        else:
+            super( Combine, self).ignore( other )
+        return self
+
+    def postParse( self, instring, loc, tokenlist ):
+        retToks = tokenlist.copy()
+        del retToks[:]
+        retToks += ParseResults([ "".join(tokenlist._asStringList(self.joinString)) ], modal=self.modalResults)
+
+        if self.resultsName and retToks.haskeys():
+            return [ retToks ]
+        else:
+            return retToks
+
+class Group(TokenConverter):
+    """
+    Converter to return the matched tokens as a list - useful for returning tokens of C{L{ZeroOrMore}} and C{L{OneOrMore}} expressions.
+
+    Example::
+        ident = Word(alphas)
+        num = Word(nums)
+        term = ident | num
+        func = ident + Optional(delimitedList(term))
+        print(func.parseString("fn a,b,100"))  # -> ['fn', 'a', 'b', '100']
+
+        func = ident + Group(Optional(delimitedList(term)))
+        print(func.parseString("fn a,b,100"))  # -> ['fn', ['a', 'b', '100']]
+    """
+    def __init__( self, expr ):
+        super(Group,self).__init__( expr )
+        self.saveAsList = True
+
+    def postParse( self, instring, loc, tokenlist ):
+        return [ tokenlist ]
+
+class Dict(TokenConverter):
+    """
+    Converter to return a repetitive expression as a list, but also as a dictionary.
+    Each element can also be referenced using the first token in the expression as its key.
+    Useful for tabular report scraping when the first column can be used as a item key.
+
+    Example::
+        data_word = Word(alphas)
+        label = data_word + FollowedBy(':')
+        attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join))
+
+        text = "shape: SQUARE posn: upper left color: light blue texture: burlap"
+        attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
+        
+        # print attributes as plain groups
+        print(OneOrMore(attr_expr).parseString(text).dump())
+        
+        # instead of OneOrMore(expr), parse using Dict(OneOrMore(Group(expr))) - Dict will auto-assign names
+        result = Dict(OneOrMore(Group(attr_expr))).parseString(text)
+        print(result.dump())
+        
+        # access named fields as dict entries, or output as dict
+        print(result['shape'])        
+        print(result.asDict())
+    prints::
+        ['shape', 'SQUARE', 'posn', 'upper left', 'color', 'light blue', 'texture', 'burlap']
+
+        [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']]
+        - color: light blue
+        - posn: upper left
+        - shape: SQUARE
+        - texture: burlap
+        SQUARE
+        {'color': 'light blue', 'posn': 'upper left', 'texture': 'burlap', 'shape': 'SQUARE'}
+    See more examples at L{ParseResults} of accessing fields by results name.
+    """
+    def __init__( self, expr ):
+        super(Dict,self).__init__( expr )
+        self.saveAsList = True
+
+    def postParse( self, instring, loc, tokenlist ):
+        for i,tok in enumerate(tokenlist):
+            if len(tok) == 0:
+                continue
+            ikey = tok[0]
+            if isinstance(ikey,int):
+                ikey = _ustr(tok[0]).strip()
+            if len(tok)==1:
+                tokenlist[ikey] = _ParseResultsWithOffset("",i)
+            elif len(tok)==2 and not isinstance(tok[1],ParseResults):
+                tokenlist[ikey] = _ParseResultsWithOffset(tok[1],i)
+            else:
+                dictvalue = tok.copy() #ParseResults(i)
+                del dictvalue[0]
+                if len(dictvalue)!= 1 or (isinstance(dictvalue,ParseResults) and dictvalue.haskeys()):
+                    tokenlist[ikey] = _ParseResultsWithOffset(dictvalue,i)
+                else:
+                    tokenlist[ikey] = _ParseResultsWithOffset(dictvalue[0],i)
+
+        if self.resultsName:
+            return [ tokenlist ]
+        else:
+            return tokenlist
+
+
+class Suppress(TokenConverter):
+    """
+    Converter for ignoring the results of a parsed expression.
+
+    Example::
+        source = "a, b, c,d"
+        wd = Word(alphas)
+        wd_list1 = wd + ZeroOrMore(',' + wd)
+        print(wd_list1.parseString(source))
+
+        # often, delimiters that are useful during parsing are just in the
+        # way afterward - use Suppress to keep them out of the parsed output
+        wd_list2 = wd + ZeroOrMore(Suppress(',') + wd)
+        print(wd_list2.parseString(source))
+    prints::
+        ['a', ',', 'b', ',', 'c', ',', 'd']
+        ['a', 'b', 'c', 'd']
+    (See also L{delimitedList}.)
+    """
+    def postParse( self, instring, loc, tokenlist ):
+        return []
+
+    def suppress( self ):
+        return self
+
+
+class OnlyOnce(object):
+    """
+    Wrapper for parse actions, to ensure they are only called once.
+    """
+    def __init__(self, methodCall):
+        self.callable = _trim_arity(methodCall)
+        self.called = False
+    def __call__(self,s,l,t):
+        if not self.called:
+            results = self.callable(s,l,t)
+            self.called = True
+            return results
+        raise ParseException(s,l,"")
+    def reset(self):
+        self.called = False
+
+def traceParseAction(f):
+    """
+    Decorator for debugging parse actions. 
+    
+    When the parse action is called, this decorator will print C{">> entering I{method-name}(line:I{current_source_line}, I{parse_location}, I{matched_tokens})".}
+    When the parse action completes, the decorator will print C{"<<"} followed by the returned value, or any exception that the parse action raised.
+
+    Example::
+        wd = Word(alphas)
+
+        @traceParseAction
+        def remove_duplicate_chars(tokens):
+            return ''.join(sorted(set(''.join(tokens))))
+
+        wds = OneOrMore(wd).setParseAction(remove_duplicate_chars)
+        print(wds.parseString("slkdjs sld sldd sdlf sdljf"))
+    prints::
+        >>entering remove_duplicate_chars(line: 'slkdjs sld sldd sdlf sdljf', 0, (['slkdjs', 'sld', 'sldd', 'sdlf', 'sdljf'], {}))
+        <3:
+            thisFunc = paArgs[0].__class__.__name__ + '.' + thisFunc
+        sys.stderr.write( ">>entering %s(line: '%s', %d, %r)\n" % (thisFunc,line(l,s),l,t) )
+        try:
+            ret = f(*paArgs)
+        except Exception as exc:
+            sys.stderr.write( "< ['aa', 'bb', 'cc']
+        delimitedList(Word(hexnums), delim=':', combine=True).parseString("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE']
+    """
+    dlName = _ustr(expr)+" ["+_ustr(delim)+" "+_ustr(expr)+"]..."
+    if combine:
+        return Combine( expr + ZeroOrMore( delim + expr ) ).setName(dlName)
+    else:
+        return ( expr + ZeroOrMore( Suppress( delim ) + expr ) ).setName(dlName)
+
+def countedArray( expr, intExpr=None ):
+    """
+    Helper to define a counted list of expressions.
+    This helper defines a pattern of the form::
+        integer expr expr expr...
+    where the leading integer tells how many expr expressions follow.
+    The matched tokens returns the array of expr tokens as a list - the leading count token is suppressed.
+    
+    If C{intExpr} is specified, it should be a pyparsing expression that produces an integer value.
+
+    Example::
+        countedArray(Word(alphas)).parseString('2 ab cd ef')  # -> ['ab', 'cd']
+
+        # in this parser, the leading integer value is given in binary,
+        # '10' indicating that 2 values are in the array
+        binaryConstant = Word('01').setParseAction(lambda t: int(t[0], 2))
+        countedArray(Word(alphas), intExpr=binaryConstant).parseString('10 ab cd ef')  # -> ['ab', 'cd']
+    """
+    arrayExpr = Forward()
+    def countFieldParseAction(s,l,t):
+        n = t[0]
+        arrayExpr << (n and Group(And([expr]*n)) or Group(empty))
+        return []
+    if intExpr is None:
+        intExpr = Word(nums).setParseAction(lambda t:int(t[0]))
+    else:
+        intExpr = intExpr.copy()
+    intExpr.setName("arrayLen")
+    intExpr.addParseAction(countFieldParseAction, callDuringTry=True)
+    return ( intExpr + arrayExpr ).setName('(len) ' + _ustr(expr) + '...')
+
+def _flatten(L):
+    ret = []
+    for i in L:
+        if isinstance(i,list):
+            ret.extend(_flatten(i))
+        else:
+            ret.append(i)
+    return ret
+
+def matchPreviousLiteral(expr):
+    """
+    Helper to define an expression that is indirectly defined from
+    the tokens matched in a previous expression, that is, it looks
+    for a 'repeat' of a previous expression.  For example::
+        first = Word(nums)
+        second = matchPreviousLiteral(first)
+        matchExpr = first + ":" + second
+    will match C{"1:1"}, but not C{"1:2"}.  Because this matches a
+    previous literal, will also match the leading C{"1:1"} in C{"1:10"}.
+    If this is not desired, use C{matchPreviousExpr}.
+    Do I{not} use with packrat parsing enabled.
+    """
+    rep = Forward()
+    def copyTokenToRepeater(s,l,t):
+        if t:
+            if len(t) == 1:
+                rep << t[0]
+            else:
+                # flatten t tokens
+                tflat = _flatten(t.asList())
+                rep << And(Literal(tt) for tt in tflat)
+        else:
+            rep << Empty()
+    expr.addParseAction(copyTokenToRepeater, callDuringTry=True)
+    rep.setName('(prev) ' + _ustr(expr))
+    return rep
+
+def matchPreviousExpr(expr):
+    """
+    Helper to define an expression that is indirectly defined from
+    the tokens matched in a previous expression, that is, it looks
+    for a 'repeat' of a previous expression.  For example::
+        first = Word(nums)
+        second = matchPreviousExpr(first)
+        matchExpr = first + ":" + second
+    will match C{"1:1"}, but not C{"1:2"}.  Because this matches by
+    expressions, will I{not} match the leading C{"1:1"} in C{"1:10"};
+    the expressions are evaluated first, and then compared, so
+    C{"1"} is compared with C{"10"}.
+    Do I{not} use with packrat parsing enabled.
+    """
+    rep = Forward()
+    e2 = expr.copy()
+    rep <<= e2
+    def copyTokenToRepeater(s,l,t):
+        matchTokens = _flatten(t.asList())
+        def mustMatchTheseTokens(s,l,t):
+            theseTokens = _flatten(t.asList())
+            if  theseTokens != matchTokens:
+                raise ParseException("",0,"")
+        rep.setParseAction( mustMatchTheseTokens, callDuringTry=True )
+    expr.addParseAction(copyTokenToRepeater, callDuringTry=True)
+    rep.setName('(prev) ' + _ustr(expr))
+    return rep
+
+def _escapeRegexRangeChars(s):
+    #~  escape these chars: ^-]
+    for c in r"\^-]":
+        s = s.replace(c,_bslash+c)
+    s = s.replace("\n",r"\n")
+    s = s.replace("\t",r"\t")
+    return _ustr(s)
+
+def oneOf( strs, caseless=False, useRegex=True ):
+    """
+    Helper to quickly define a set of alternative Literals, and makes sure to do
+    longest-first testing when there is a conflict, regardless of the input order,
+    but returns a C{L{MatchFirst}} for best performance.
+
+    Parameters:
+     - strs - a string of space-delimited literals, or a collection of string literals
+     - caseless - (default=C{False}) - treat all literals as caseless
+     - useRegex - (default=C{True}) - as an optimization, will generate a Regex
+          object; otherwise, will generate a C{MatchFirst} object (if C{caseless=True}, or
+          if creating a C{Regex} raises an exception)
+
+    Example::
+        comp_oper = oneOf("< = > <= >= !=")
+        var = Word(alphas)
+        number = Word(nums)
+        term = var | number
+        comparison_expr = term + comp_oper + term
+        print(comparison_expr.searchString("B = 12  AA=23 B<=AA AA>12"))
+    prints::
+        [['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']]
+    """
+    if caseless:
+        isequal = ( lambda a,b: a.upper() == b.upper() )
+        masks = ( lambda a,b: b.upper().startswith(a.upper()) )
+        parseElementClass = CaselessLiteral
+    else:
+        isequal = ( lambda a,b: a == b )
+        masks = ( lambda a,b: b.startswith(a) )
+        parseElementClass = Literal
+
+    symbols = []
+    if isinstance(strs,basestring):
+        symbols = strs.split()
+    elif isinstance(strs, Iterable):
+        symbols = list(strs)
+    else:
+        warnings.warn("Invalid argument to oneOf, expected string or iterable",
+                SyntaxWarning, stacklevel=2)
+    if not symbols:
+        return NoMatch()
+
+    i = 0
+    while i < len(symbols)-1:
+        cur = symbols[i]
+        for j,other in enumerate(symbols[i+1:]):
+            if ( isequal(other, cur) ):
+                del symbols[i+j+1]
+                break
+            elif ( masks(cur, other) ):
+                del symbols[i+j+1]
+                symbols.insert(i,other)
+                cur = other
+                break
+        else:
+            i += 1
+
+    if not caseless and useRegex:
+        #~ print (strs,"->", "|".join( [ _escapeRegexChars(sym) for sym in symbols] ))
+        try:
+            if len(symbols)==len("".join(symbols)):
+                return Regex( "[%s]" % "".join(_escapeRegexRangeChars(sym) for sym in symbols) ).setName(' | '.join(symbols))
+            else:
+                return Regex( "|".join(re.escape(sym) for sym in symbols) ).setName(' | '.join(symbols))
+        except Exception:
+            warnings.warn("Exception creating Regex for oneOf, building MatchFirst",
+                    SyntaxWarning, stacklevel=2)
+
+
+    # last resort, just use MatchFirst
+    return MatchFirst(parseElementClass(sym) for sym in symbols).setName(' | '.join(symbols))
+
+def dictOf( key, value ):
+    """
+    Helper to easily and clearly define a dictionary by specifying the respective patterns
+    for the key and value.  Takes care of defining the C{L{Dict}}, C{L{ZeroOrMore}}, and C{L{Group}} tokens
+    in the proper order.  The key pattern can include delimiting markers or punctuation,
+    as long as they are suppressed, thereby leaving the significant key text.  The value
+    pattern can include named results, so that the C{Dict} results can include named token
+    fields.
+
+    Example::
+        text = "shape: SQUARE posn: upper left color: light blue texture: burlap"
+        attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
+        print(OneOrMore(attr_expr).parseString(text).dump())
+        
+        attr_label = label
+        attr_value = Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)
+
+        # similar to Dict, but simpler call format
+        result = dictOf(attr_label, attr_value).parseString(text)
+        print(result.dump())
+        print(result['shape'])
+        print(result.shape)  # object attribute access works too
+        print(result.asDict())
+    prints::
+        [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']]
+        - color: light blue
+        - posn: upper left
+        - shape: SQUARE
+        - texture: burlap
+        SQUARE
+        SQUARE
+        {'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'}
+    """
+    return Dict( ZeroOrMore( Group ( key + value ) ) )
+
+def originalTextFor(expr, asString=True):
+    """
+    Helper to return the original, untokenized text for a given expression.  Useful to
+    restore the parsed fields of an HTML start tag into the raw tag text itself, or to
+    revert separate tokens with intervening whitespace back to the original matching
+    input text. By default, returns astring containing the original parsed text.  
+       
+    If the optional C{asString} argument is passed as C{False}, then the return value is a 
+    C{L{ParseResults}} containing any results names that were originally matched, and a 
+    single token containing the original matched text from the input string.  So if 
+    the expression passed to C{L{originalTextFor}} contains expressions with defined
+    results names, you must set C{asString} to C{False} if you want to preserve those
+    results name values.
+
+    Example::
+        src = "this is test  bold text  normal text "
+        for tag in ("b","i"):
+            opener,closer = makeHTMLTags(tag)
+            patt = originalTextFor(opener + SkipTo(closer) + closer)
+            print(patt.searchString(src)[0])
+    prints::
+        [' bold text ']
+        ['text']
+    """
+    locMarker = Empty().setParseAction(lambda s,loc,t: loc)
+    endlocMarker = locMarker.copy()
+    endlocMarker.callPreparse = False
+    matchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end")
+    if asString:
+        extractText = lambda s,l,t: s[t._original_start:t._original_end]
+    else:
+        def extractText(s,l,t):
+            t[:] = [s[t.pop('_original_start'):t.pop('_original_end')]]
+    matchExpr.setParseAction(extractText)
+    matchExpr.ignoreExprs = expr.ignoreExprs
+    return matchExpr
+
+def ungroup(expr): 
+    """
+    Helper to undo pyparsing's default grouping of And expressions, even
+    if all but one are non-empty.
+    """
+    return TokenConverter(expr).setParseAction(lambda t:t[0])
+
+def locatedExpr(expr):
+    """
+    Helper to decorate a returned token with its starting and ending locations in the input string.
+    This helper adds the following results names:
+     - locn_start = location where matched expression begins
+     - locn_end = location where matched expression ends
+     - value = the actual parsed results
+
+    Be careful if the input text contains C{} characters, you may want to call
+    C{L{ParserElement.parseWithTabs}}
+
+    Example::
+        wd = Word(alphas)
+        for match in locatedExpr(wd).searchString("ljsdf123lksdjjf123lkkjj1222"):
+            print(match)
+    prints::
+        [[0, 'ljsdf', 5]]
+        [[8, 'lksdjjf', 15]]
+        [[18, 'lkkjj', 23]]
+    """
+    locator = Empty().setParseAction(lambda s,l,t: l)
+    return Group(locator("locn_start") + expr("value") + locator.copy().leaveWhitespace()("locn_end"))
+
+
+# convenience constants for positional expressions
+empty       = Empty().setName("empty")
+lineStart   = LineStart().setName("lineStart")
+lineEnd     = LineEnd().setName("lineEnd")
+stringStart = StringStart().setName("stringStart")
+stringEnd   = StringEnd().setName("stringEnd")
+
+_escapedPunc = Word( _bslash, r"\[]-*.$+^?()~ ", exact=2 ).setParseAction(lambda s,l,t:t[0][1])
+_escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s,l,t:unichr(int(t[0].lstrip(r'\0x'),16)))
+_escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s,l,t:unichr(int(t[0][1:],8)))
+_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | CharsNotIn(r'\]', exact=1)
+_charRange = Group(_singleChar + Suppress("-") + _singleChar)
+_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group( OneOrMore( _charRange | _singleChar ) ).setResultsName("body") + "]"
+
+def srange(s):
+    r"""
+    Helper to easily define string ranges for use in Word construction.  Borrows
+    syntax from regexp '[]' string range definitions::
+        srange("[0-9]")   -> "0123456789"
+        srange("[a-z]")   -> "abcdefghijklmnopqrstuvwxyz"
+        srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_"
+    The input string must be enclosed in []'s, and the returned string is the expanded
+    character set joined into a single string.
+    The values enclosed in the []'s may be:
+     - a single character
+     - an escaped character with a leading backslash (such as C{\-} or C{\]})
+     - an escaped hex character with a leading C{'\x'} (C{\x21}, which is a C{'!'} character) 
+         (C{\0x##} is also supported for backwards compatibility) 
+     - an escaped octal character with a leading C{'\0'} (C{\041}, which is a C{'!'} character)
+     - a range of any of the above, separated by a dash (C{'a-z'}, etc.)
+     - any combination of the above (C{'aeiouy'}, C{'a-zA-Z0-9_$'}, etc.)
+    """
+    _expanded = lambda p: p if not isinstance(p,ParseResults) else ''.join(unichr(c) for c in range(ord(p[0]),ord(p[1])+1))
+    try:
+        return "".join(_expanded(part) for part in _reBracketExpr.parseString(s).body)
+    except Exception:
+        return ""
+
+def matchOnlyAtCol(n):
+    """
+    Helper method for defining parse actions that require matching at a specific
+    column in the input text.
+    """
+    def verifyCol(strg,locn,toks):
+        if col(locn,strg) != n:
+            raise ParseException(strg,locn,"matched token not at column %d" % n)
+    return verifyCol
+
+def replaceWith(replStr):
+    """
+    Helper method for common parse actions that simply return a literal value.  Especially
+    useful when used with C{L{transformString}()}.
+
+    Example::
+        num = Word(nums).setParseAction(lambda toks: int(toks[0]))
+        na = oneOf("N/A NA").setParseAction(replaceWith(math.nan))
+        term = na | num
+        
+        OneOrMore(term).parseString("324 234 N/A 234") # -> [324, 234, nan, 234]
+    """
+    return lambda s,l,t: [replStr]
+
+def removeQuotes(s,l,t):
+    """
+    Helper parse action for removing quotation marks from parsed quoted strings.
+
+    Example::
+        # by default, quotation marks are included in parsed results
+        quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["'Now is the Winter of our Discontent'"]
+
+        # use removeQuotes to strip quotation marks from parsed results
+        quotedString.setParseAction(removeQuotes)
+        quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["Now is the Winter of our Discontent"]
+    """
+    return t[0][1:-1]
+
+def tokenMap(func, *args):
+    """
+    Helper to define a parse action by mapping a function to all elements of a ParseResults list.If any additional 
+    args are passed, they are forwarded to the given function as additional arguments after
+    the token, as in C{hex_integer = Word(hexnums).setParseAction(tokenMap(int, 16))}, which will convert the
+    parsed data to an integer using base 16.
+
+    Example (compare the last to example in L{ParserElement.transformString}::
+        hex_ints = OneOrMore(Word(hexnums)).setParseAction(tokenMap(int, 16))
+        hex_ints.runTests('''
+            00 11 22 aa FF 0a 0d 1a
+            ''')
+        
+        upperword = Word(alphas).setParseAction(tokenMap(str.upper))
+        OneOrMore(upperword).runTests('''
+            my kingdom for a horse
+            ''')
+
+        wd = Word(alphas).setParseAction(tokenMap(str.title))
+        OneOrMore(wd).setParseAction(' '.join).runTests('''
+            now is the winter of our discontent made glorious summer by this sun of york
+            ''')
+    prints::
+        00 11 22 aa FF 0a 0d 1a
+        [0, 17, 34, 170, 255, 10, 13, 26]
+
+        my kingdom for a horse
+        ['MY', 'KINGDOM', 'FOR', 'A', 'HORSE']
+
+        now is the winter of our discontent made glorious summer by this sun of york
+        ['Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York']
+    """
+    def pa(s,l,t):
+        return [func(tokn, *args) for tokn in t]
+
+    try:
+        func_name = getattr(func, '__name__', 
+                            getattr(func, '__class__').__name__)
+    except Exception:
+        func_name = str(func)
+    pa.__name__ = func_name
+
+    return pa
+
+upcaseTokens = tokenMap(lambda t: _ustr(t).upper())
+"""(Deprecated) Helper parse action to convert tokens to upper case. Deprecated in favor of L{pyparsing_common.upcaseTokens}"""
+
+downcaseTokens = tokenMap(lambda t: _ustr(t).lower())
+"""(Deprecated) Helper parse action to convert tokens to lower case. Deprecated in favor of L{pyparsing_common.downcaseTokens}"""
+    
+def _makeTags(tagStr, xml):
+    """Internal helper to construct opening and closing tag expressions, given a tag name"""
+    if isinstance(tagStr,basestring):
+        resname = tagStr
+        tagStr = Keyword(tagStr, caseless=not xml)
+    else:
+        resname = tagStr.name
+
+    tagAttrName = Word(alphas,alphanums+"_-:")
+    if (xml):
+        tagAttrValue = dblQuotedString.copy().setParseAction( removeQuotes )
+        openTag = Suppress("<") + tagStr("tag") + \
+                Dict(ZeroOrMore(Group( tagAttrName + Suppress("=") + tagAttrValue ))) + \
+                Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">")
+    else:
+        printablesLessRAbrack = "".join(c for c in printables if c not in ">")
+        tagAttrValue = quotedString.copy().setParseAction( removeQuotes ) | Word(printablesLessRAbrack)
+        openTag = Suppress("<") + tagStr("tag") + \
+                Dict(ZeroOrMore(Group( tagAttrName.setParseAction(downcaseTokens) + \
+                Optional( Suppress("=") + tagAttrValue ) ))) + \
+                Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">")
+    closeTag = Combine(_L("")
+
+    openTag = openTag.setResultsName("start"+"".join(resname.replace(":"," ").title().split())).setName("<%s>" % resname)
+    closeTag = closeTag.setResultsName("end"+"".join(resname.replace(":"," ").title().split())).setName("" % resname)
+    openTag.tag = resname
+    closeTag.tag = resname
+    return openTag, closeTag
+
+def makeHTMLTags(tagStr):
+    """
+    Helper to construct opening and closing tag expressions for HTML, given a tag name. Matches
+    tags in either upper or lower case, attributes with namespaces and with quoted or unquoted values.
+
+    Example::
+        text = 'More info at the pyparsing wiki page'
+        # makeHTMLTags returns pyparsing expressions for the opening and closing tags as a 2-tuple
+        a,a_end = makeHTMLTags("A")
+        link_expr = a + SkipTo(a_end)("link_text") + a_end
+        
+        for link in link_expr.searchString(text):
+            # attributes in the  tag (like "href" shown here) are also accessible as named results
+            print(link.link_text, '->', link.href)
+    prints::
+        pyparsing -> http://pyparsing.wikispaces.com
+    """
+    return _makeTags( tagStr, False )
+
+def makeXMLTags(tagStr):
+    """
+    Helper to construct opening and closing tag expressions for XML, given a tag name. Matches
+    tags only in the given upper/lower case.
+
+    Example: similar to L{makeHTMLTags}
+    """
+    return _makeTags( tagStr, True )
+
+def withAttribute(*args,**attrDict):
+    """
+    Helper to create a validating parse action to be used with start tags created
+    with C{L{makeXMLTags}} or C{L{makeHTMLTags}}. Use C{withAttribute} to qualify a starting tag
+    with a required attribute value, to avoid false matches on common tags such as
+    C{} or C{
}. + + Call C{withAttribute} with a series of attribute names and values. Specify the list + of filter attributes names and values as: + - keyword arguments, as in C{(align="right")}, or + - as an explicit dict with C{**} operator, when an attribute name is also a Python + reserved word, as in C{**{"class":"Customer", "align":"right"}} + - a list of name-value tuples, as in ( ("ns1:class", "Customer"), ("ns2:align","right") ) + For attribute names with a namespace prefix, you must use the second form. Attribute + names are matched insensitive to upper/lower case. + + If just testing for C{class} (with or without a namespace), use C{L{withClass}}. + + To verify that the attribute exists, but without specifying a value, pass + C{withAttribute.ANY_VALUE} as the value. + + Example:: + html = ''' +
+ Some text +
1 4 0 1 0
+
1,3 2,3 1,1
+
this has no type
+
+ + ''' + div,div_end = makeHTMLTags("div") + + # only match div tag having a type attribute with value "grid" + div_grid = div().setParseAction(withAttribute(type="grid")) + grid_expr = div_grid + SkipTo(div | div_end)("body") + for grid_header in grid_expr.searchString(html): + print(grid_header.body) + + # construct a match with any div tag having a type attribute, regardless of the value + div_any_type = div().setParseAction(withAttribute(type=withAttribute.ANY_VALUE)) + div_expr = div_any_type + SkipTo(div | div_end)("body") + for div_header in div_expr.searchString(html): + print(div_header.body) + prints:: + 1 4 0 1 0 + + 1 4 0 1 0 + 1,3 2,3 1,1 + """ + if args: + attrs = args[:] + else: + attrs = attrDict.items() + attrs = [(k,v) for k,v in attrs] + def pa(s,l,tokens): + for attrName,attrValue in attrs: + if attrName not in tokens: + raise ParseException(s,l,"no matching attribute " + attrName) + if attrValue != withAttribute.ANY_VALUE and tokens[attrName] != attrValue: + raise ParseException(s,l,"attribute '%s' has value '%s', must be '%s'" % + (attrName, tokens[attrName], attrValue)) + return pa +withAttribute.ANY_VALUE = object() + +def withClass(classname, namespace=''): + """ + Simplified version of C{L{withAttribute}} when matching on a div class - made + difficult because C{class} is a reserved word in Python. + + Example:: + html = ''' +
+ Some text +
1 4 0 1 0
+
1,3 2,3 1,1
+
this <div> has no class
+
+ + ''' + div,div_end = makeHTMLTags("div") + div_grid = div().setParseAction(withClass("grid")) + + grid_expr = div_grid + SkipTo(div | div_end)("body") + for grid_header in grid_expr.searchString(html): + print(grid_header.body) + + div_any_type = div().setParseAction(withClass(withAttribute.ANY_VALUE)) + div_expr = div_any_type + SkipTo(div | div_end)("body") + for div_header in div_expr.searchString(html): + print(div_header.body) + prints:: + 1 4 0 1 0 + + 1 4 0 1 0 + 1,3 2,3 1,1 + """ + classattr = "%s:class" % namespace if namespace else "class" + return withAttribute(**{classattr : classname}) + +opAssoc = _Constants() +opAssoc.LEFT = object() +opAssoc.RIGHT = object() + +def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ): + """ + Helper method for constructing grammars of expressions made up of + operators working in a precedence hierarchy. Operators may be unary or + binary, left- or right-associative. Parse actions can also be attached + to operator expressions. The generated parser will also recognize the use + of parentheses to override operator precedences (see example below). + + Note: if you define a deep operator list, you may see performance issues + when using infixNotation. See L{ParserElement.enablePackrat} for a + mechanism to potentially improve your parser performance. + + Parameters: + - baseExpr - expression representing the most basic element for the nested + - opList - list of tuples, one for each operator precedence level in the + expression grammar; each tuple is of the form + (opExpr, numTerms, rightLeftAssoc, parseAction), where: + - opExpr is the pyparsing expression for the operator; + may also be a string, which will be converted to a Literal; + if numTerms is 3, opExpr is a tuple of two expressions, for the + two operators separating the 3 terms + - numTerms is the number of terms for this operator (must + be 1, 2, or 3) + - rightLeftAssoc is the indicator whether the operator is + right or left associative, using the pyparsing-defined + constants C{opAssoc.RIGHT} and C{opAssoc.LEFT}. + - parseAction is the parse action to be associated with + expressions matching this operator expression (the + parse action tuple member may be omitted); if the parse action + is passed a tuple or list of functions, this is equivalent to + calling C{setParseAction(*fn)} (L{ParserElement.setParseAction}) + - lpar - expression for matching left-parentheses (default=C{Suppress('(')}) + - rpar - expression for matching right-parentheses (default=C{Suppress(')')}) + + Example:: + # simple example of four-function arithmetic with ints and variable names + integer = pyparsing_common.signed_integer + varname = pyparsing_common.identifier + + arith_expr = infixNotation(integer | varname, + [ + ('-', 1, opAssoc.RIGHT), + (oneOf('* /'), 2, opAssoc.LEFT), + (oneOf('+ -'), 2, opAssoc.LEFT), + ]) + + arith_expr.runTests(''' + 5+3*6 + (5+3)*6 + -2--11 + ''', fullDump=False) + prints:: + 5+3*6 + [[5, '+', [3, '*', 6]]] + + (5+3)*6 + [[[5, '+', 3], '*', 6]] + + -2--11 + [[['-', 2], '-', ['-', 11]]] + """ + ret = Forward() + lastExpr = baseExpr | ( lpar + ret + rpar ) + for i,operDef in enumerate(opList): + opExpr,arity,rightLeftAssoc,pa = (operDef + (None,))[:4] + termName = "%s term" % opExpr if arity < 3 else "%s%s term" % opExpr + if arity == 3: + if opExpr is None or len(opExpr) != 2: + raise ValueError("if numterms=3, opExpr must be a tuple or list of two expressions") + opExpr1, opExpr2 = opExpr + thisExpr = Forward().setName(termName) + if rightLeftAssoc == opAssoc.LEFT: + if arity == 1: + matchExpr = FollowedBy(lastExpr + opExpr) + Group( lastExpr + OneOrMore( opExpr ) ) + elif arity == 2: + if opExpr is not None: + matchExpr = FollowedBy(lastExpr + opExpr + lastExpr) + Group( lastExpr + OneOrMore( opExpr + lastExpr ) ) + else: + matchExpr = FollowedBy(lastExpr+lastExpr) + Group( lastExpr + OneOrMore(lastExpr) ) + elif arity == 3: + matchExpr = FollowedBy(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) + \ + Group( lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr ) + else: + raise ValueError("operator must be unary (1), binary (2), or ternary (3)") + elif rightLeftAssoc == opAssoc.RIGHT: + if arity == 1: + # try to avoid LR with this extra test + if not isinstance(opExpr, Optional): + opExpr = Optional(opExpr) + matchExpr = FollowedBy(opExpr.expr + thisExpr) + Group( opExpr + thisExpr ) + elif arity == 2: + if opExpr is not None: + matchExpr = FollowedBy(lastExpr + opExpr + thisExpr) + Group( lastExpr + OneOrMore( opExpr + thisExpr ) ) + else: + matchExpr = FollowedBy(lastExpr + thisExpr) + Group( lastExpr + OneOrMore( thisExpr ) ) + elif arity == 3: + matchExpr = FollowedBy(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + \ + Group( lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr ) + else: + raise ValueError("operator must be unary (1), binary (2), or ternary (3)") + else: + raise ValueError("operator must indicate right or left associativity") + if pa: + if isinstance(pa, (tuple, list)): + matchExpr.setParseAction(*pa) + else: + matchExpr.setParseAction(pa) + thisExpr <<= ( matchExpr.setName(termName) | lastExpr ) + lastExpr = thisExpr + ret <<= lastExpr + return ret + +operatorPrecedence = infixNotation +"""(Deprecated) Former name of C{L{infixNotation}}, will be dropped in a future release.""" + +dblQuotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"').setName("string enclosed in double quotes") +sglQuotedString = Combine(Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("string enclosed in single quotes") +quotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"'| + Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("quotedString using single or double quotes") +unicodeString = Combine(_L('u') + quotedString.copy()).setName("unicode string literal") + +def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.copy()): + """ + Helper method for defining nested lists enclosed in opening and closing + delimiters ("(" and ")" are the default). + + Parameters: + - opener - opening character for a nested list (default=C{"("}); can also be a pyparsing expression + - closer - closing character for a nested list (default=C{")"}); can also be a pyparsing expression + - content - expression for items within the nested lists (default=C{None}) + - ignoreExpr - expression for ignoring opening and closing delimiters (default=C{quotedString}) + + If an expression is not provided for the content argument, the nested + expression will capture all whitespace-delimited content between delimiters + as a list of separate values. + + Use the C{ignoreExpr} argument to define expressions that may contain + opening or closing characters that should not be treated as opening + or closing characters for nesting, such as quotedString or a comment + expression. Specify multiple expressions using an C{L{Or}} or C{L{MatchFirst}}. + The default is L{quotedString}, but if no expressions are to be ignored, + then pass C{None} for this argument. + + Example:: + data_type = oneOf("void int short long char float double") + decl_data_type = Combine(data_type + Optional(Word('*'))) + ident = Word(alphas+'_', alphanums+'_') + number = pyparsing_common.number + arg = Group(decl_data_type + ident) + LPAR,RPAR = map(Suppress, "()") + + code_body = nestedExpr('{', '}', ignoreExpr=(quotedString | cStyleComment)) + + c_function = (decl_data_type("type") + + ident("name") + + LPAR + Optional(delimitedList(arg), [])("args") + RPAR + + code_body("body")) + c_function.ignore(cStyleComment) + + source_code = ''' + int is_odd(int x) { + return (x%2); + } + + int dec_to_hex(char hchar) { + if (hchar >= '0' && hchar <= '9') { + return (ord(hchar)-ord('0')); + } else { + return (10+ord(hchar)-ord('A')); + } + } + ''' + for func in c_function.searchString(source_code): + print("%(name)s (%(type)s) args: %(args)s" % func) + + prints:: + is_odd (int) args: [['int', 'x']] + dec_to_hex (int) args: [['char', 'hchar']] + """ + if opener == closer: + raise ValueError("opening and closing strings cannot be the same") + if content is None: + if isinstance(opener,basestring) and isinstance(closer,basestring): + if len(opener) == 1 and len(closer)==1: + if ignoreExpr is not None: + content = (Combine(OneOrMore(~ignoreExpr + + CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS,exact=1)) + ).setParseAction(lambda t:t[0].strip())) + else: + content = (empty.copy()+CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS + ).setParseAction(lambda t:t[0].strip())) + else: + if ignoreExpr is not None: + content = (Combine(OneOrMore(~ignoreExpr + + ~Literal(opener) + ~Literal(closer) + + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) + ).setParseAction(lambda t:t[0].strip())) + else: + content = (Combine(OneOrMore(~Literal(opener) + ~Literal(closer) + + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) + ).setParseAction(lambda t:t[0].strip())) + else: + raise ValueError("opening and closing arguments must be strings if no content expression is given") + ret = Forward() + if ignoreExpr is not None: + ret <<= Group( Suppress(opener) + ZeroOrMore( ignoreExpr | ret | content ) + Suppress(closer) ) + else: + ret <<= Group( Suppress(opener) + ZeroOrMore( ret | content ) + Suppress(closer) ) + ret.setName('nested %s%s expression' % (opener,closer)) + return ret + +def indentedBlock(blockStatementExpr, indentStack, indent=True): + """ + Helper method for defining space-delimited indentation blocks, such as + those used to define block statements in Python source code. + + Parameters: + - blockStatementExpr - expression defining syntax of statement that + is repeated within the indented block + - indentStack - list created by caller to manage indentation stack + (multiple statementWithIndentedBlock expressions within a single grammar + should share a common indentStack) + - indent - boolean indicating whether block must be indented beyond the + the current level; set to False for block of left-most statements + (default=C{True}) + + A valid block must contain at least one C{blockStatement}. + + Example:: + data = ''' + def A(z): + A1 + B = 100 + G = A2 + A2 + A3 + B + def BB(a,b,c): + BB1 + def BBA(): + bba1 + bba2 + bba3 + C + D + def spam(x,y): + def eggs(z): + pass + ''' + + + indentStack = [1] + stmt = Forward() + + identifier = Word(alphas, alphanums) + funcDecl = ("def" + identifier + Group( "(" + Optional( delimitedList(identifier) ) + ")" ) + ":") + func_body = indentedBlock(stmt, indentStack) + funcDef = Group( funcDecl + func_body ) + + rvalue = Forward() + funcCall = Group(identifier + "(" + Optional(delimitedList(rvalue)) + ")") + rvalue << (funcCall | identifier | Word(nums)) + assignment = Group(identifier + "=" + rvalue) + stmt << ( funcDef | assignment | identifier ) + + module_body = OneOrMore(stmt) + + parseTree = module_body.parseString(data) + parseTree.pprint() + prints:: + [['def', + 'A', + ['(', 'z', ')'], + ':', + [['A1'], [['B', '=', '100']], [['G', '=', 'A2']], ['A2'], ['A3']]], + 'B', + ['def', + 'BB', + ['(', 'a', 'b', 'c', ')'], + ':', + [['BB1'], [['def', 'BBA', ['(', ')'], ':', [['bba1'], ['bba2'], ['bba3']]]]]], + 'C', + 'D', + ['def', + 'spam', + ['(', 'x', 'y', ')'], + ':', + [[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]] + """ + def checkPeerIndent(s,l,t): + if l >= len(s): return + curCol = col(l,s) + if curCol != indentStack[-1]: + if curCol > indentStack[-1]: + raise ParseFatalException(s,l,"illegal nesting") + raise ParseException(s,l,"not a peer entry") + + def checkSubIndent(s,l,t): + curCol = col(l,s) + if curCol > indentStack[-1]: + indentStack.append( curCol ) + else: + raise ParseException(s,l,"not a subentry") + + def checkUnindent(s,l,t): + if l >= len(s): return + curCol = col(l,s) + if not(indentStack and curCol < indentStack[-1] and curCol <= indentStack[-2]): + raise ParseException(s,l,"not an unindent") + indentStack.pop() + + NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress()) + INDENT = (Empty() + Empty().setParseAction(checkSubIndent)).setName('INDENT') + PEER = Empty().setParseAction(checkPeerIndent).setName('') + UNDENT = Empty().setParseAction(checkUnindent).setName('UNINDENT') + if indent: + smExpr = Group( Optional(NL) + + #~ FollowedBy(blockStatementExpr) + + INDENT + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) + UNDENT) + else: + smExpr = Group( Optional(NL) + + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) ) + blockStatementExpr.ignore(_bslash + LineEnd()) + return smExpr.setName('indented block') + +alphas8bit = srange(r"[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]") +punc8bit = srange(r"[\0xa1-\0xbf\0xd7\0xf7]") + +anyOpenTag,anyCloseTag = makeHTMLTags(Word(alphas,alphanums+"_:").setName('any tag')) +_htmlEntityMap = dict(zip("gt lt amp nbsp quot apos".split(),'><& "\'')) +commonHTMLEntity = Regex('&(?P' + '|'.join(_htmlEntityMap.keys()) +");").setName("common HTML entity") +def replaceHTMLEntity(t): + """Helper parser action to replace common HTML entities with their special characters""" + return _htmlEntityMap.get(t.entity) + +# it's easy to get these comment structures wrong - they're very common, so may as well make them available +cStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/').setName("C style comment") +"Comment of the form C{/* ... */}" + +htmlComment = Regex(r"").setName("HTML comment") +"Comment of the form C{}" + +restOfLine = Regex(r".*").leaveWhitespace().setName("rest of line") +dblSlashComment = Regex(r"//(?:\\\n|[^\n])*").setName("// comment") +"Comment of the form C{// ... (to end of line)}" + +cppStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/'| dblSlashComment).setName("C++ style comment") +"Comment of either form C{L{cStyleComment}} or C{L{dblSlashComment}}" + +javaStyleComment = cppStyleComment +"Same as C{L{cppStyleComment}}" + +pythonStyleComment = Regex(r"#.*").setName("Python style comment") +"Comment of the form C{# ... (to end of line)}" + +_commasepitem = Combine(OneOrMore(Word(printables, excludeChars=',') + + Optional( Word(" \t") + + ~Literal(",") + ~LineEnd() ) ) ).streamline().setName("commaItem") +commaSeparatedList = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("commaSeparatedList") +"""(Deprecated) Predefined expression of 1 or more printable words or quoted strings, separated by commas. + This expression is deprecated in favor of L{pyparsing_common.comma_separated_list}.""" + +# some other useful expressions - using lower-case class name since we are really using this as a namespace +class pyparsing_common: + """ + Here are some common low-level expressions that may be useful in jump-starting parser development: + - numeric forms (L{integers}, L{reals}, L{scientific notation}) + - common L{programming identifiers} + - network addresses (L{MAC}, L{IPv4}, L{IPv6}) + - ISO8601 L{dates} and L{datetime} + - L{UUID} + - L{comma-separated list} + Parse actions: + - C{L{convertToInteger}} + - C{L{convertToFloat}} + - C{L{convertToDate}} + - C{L{convertToDatetime}} + - C{L{stripHTMLTags}} + - C{L{upcaseTokens}} + - C{L{downcaseTokens}} + + Example:: + pyparsing_common.number.runTests(''' + # any int or real number, returned as the appropriate type + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + ''') + + pyparsing_common.fnumber.runTests(''' + # any int or real number, returned as float + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + ''') + + pyparsing_common.hex_integer.runTests(''' + # hex numbers + 100 + FF + ''') + + pyparsing_common.fraction.runTests(''' + # fractions + 1/2 + -3/4 + ''') + + pyparsing_common.mixed_integer.runTests(''' + # mixed fractions + 1 + 1/2 + -3/4 + 1-3/4 + ''') + + import uuid + pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) + pyparsing_common.uuid.runTests(''' + # uuid + 12345678-1234-5678-1234-567812345678 + ''') + prints:: + # any int or real number, returned as the appropriate type + 100 + [100] + + -100 + [-100] + + +100 + [100] + + 3.14159 + [3.14159] + + 6.02e23 + [6.02e+23] + + 1e-12 + [1e-12] + + # any int or real number, returned as float + 100 + [100.0] + + -100 + [-100.0] + + +100 + [100.0] + + 3.14159 + [3.14159] + + 6.02e23 + [6.02e+23] + + 1e-12 + [1e-12] + + # hex numbers + 100 + [256] + + FF + [255] + + # fractions + 1/2 + [0.5] + + -3/4 + [-0.75] + + # mixed fractions + 1 + [1] + + 1/2 + [0.5] + + -3/4 + [-0.75] + + 1-3/4 + [1.75] + + # uuid + 12345678-1234-5678-1234-567812345678 + [UUID('12345678-1234-5678-1234-567812345678')] + """ + + convertToInteger = tokenMap(int) + """ + Parse action for converting parsed integers to Python int + """ + + convertToFloat = tokenMap(float) + """ + Parse action for converting parsed numbers to Python float + """ + + integer = Word(nums).setName("integer").setParseAction(convertToInteger) + """expression that parses an unsigned integer, returns an int""" + + hex_integer = Word(hexnums).setName("hex integer").setParseAction(tokenMap(int,16)) + """expression that parses a hexadecimal integer, returns an int""" + + signed_integer = Regex(r'[+-]?\d+').setName("signed integer").setParseAction(convertToInteger) + """expression that parses an integer with optional leading sign, returns an int""" + + fraction = (signed_integer().setParseAction(convertToFloat) + '/' + signed_integer().setParseAction(convertToFloat)).setName("fraction") + """fractional expression of an integer divided by an integer, returns a float""" + fraction.addParseAction(lambda t: t[0]/t[-1]) + + mixed_integer = (fraction | signed_integer + Optional(Optional('-').suppress() + fraction)).setName("fraction or mixed integer-fraction") + """mixed integer of the form 'integer - fraction', with optional leading integer, returns float""" + mixed_integer.addParseAction(sum) + + real = Regex(r'[+-]?\d+\.\d*').setName("real number").setParseAction(convertToFloat) + """expression that parses a floating point number and returns a float""" + + sci_real = Regex(r'[+-]?\d+([eE][+-]?\d+|\.\d*([eE][+-]?\d+)?)').setName("real number with scientific notation").setParseAction(convertToFloat) + """expression that parses a floating point number with optional scientific notation and returns a float""" + + # streamlining this expression makes the docs nicer-looking + number = (sci_real | real | signed_integer).streamline() + """any numeric expression, returns the corresponding Python type""" + + fnumber = Regex(r'[+-]?\d+\.?\d*([eE][+-]?\d+)?').setName("fnumber").setParseAction(convertToFloat) + """any int or real number, returned as float""" + + identifier = Word(alphas+'_', alphanums+'_').setName("identifier") + """typical code identifier (leading alpha or '_', followed by 0 or more alphas, nums, or '_')""" + + ipv4_address = Regex(r'(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}').setName("IPv4 address") + "IPv4 address (C{0.0.0.0 - 255.255.255.255})" + + _ipv6_part = Regex(r'[0-9a-fA-F]{1,4}').setName("hex_integer") + _full_ipv6_address = (_ipv6_part + (':' + _ipv6_part)*7).setName("full IPv6 address") + _short_ipv6_address = (Optional(_ipv6_part + (':' + _ipv6_part)*(0,6)) + "::" + Optional(_ipv6_part + (':' + _ipv6_part)*(0,6))).setName("short IPv6 address") + _short_ipv6_address.addCondition(lambda t: sum(1 for tt in t if pyparsing_common._ipv6_part.matches(tt)) < 8) + _mixed_ipv6_address = ("::ffff:" + ipv4_address).setName("mixed IPv6 address") + ipv6_address = Combine((_full_ipv6_address | _mixed_ipv6_address | _short_ipv6_address).setName("IPv6 address")).setName("IPv6 address") + "IPv6 address (long, short, or mixed form)" + + mac_address = Regex(r'[0-9a-fA-F]{2}([:.-])[0-9a-fA-F]{2}(?:\1[0-9a-fA-F]{2}){4}').setName("MAC address") + "MAC address xx:xx:xx:xx:xx (may also have '-' or '.' delimiters)" + + @staticmethod + def convertToDate(fmt="%Y-%m-%d"): + """ + Helper to create a parse action for converting parsed date string to Python datetime.date + + Params - + - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%d"}) + + Example:: + date_expr = pyparsing_common.iso8601_date.copy() + date_expr.setParseAction(pyparsing_common.convertToDate()) + print(date_expr.parseString("1999-12-31")) + prints:: + [datetime.date(1999, 12, 31)] + """ + def cvt_fn(s,l,t): + try: + return datetime.strptime(t[0], fmt).date() + except ValueError as ve: + raise ParseException(s, l, str(ve)) + return cvt_fn + + @staticmethod + def convertToDatetime(fmt="%Y-%m-%dT%H:%M:%S.%f"): + """ + Helper to create a parse action for converting parsed datetime string to Python datetime.datetime + + Params - + - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%dT%H:%M:%S.%f"}) + + Example:: + dt_expr = pyparsing_common.iso8601_datetime.copy() + dt_expr.setParseAction(pyparsing_common.convertToDatetime()) + print(dt_expr.parseString("1999-12-31T23:59:59.999")) + prints:: + [datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)] + """ + def cvt_fn(s,l,t): + try: + return datetime.strptime(t[0], fmt) + except ValueError as ve: + raise ParseException(s, l, str(ve)) + return cvt_fn + + iso8601_date = Regex(r'(?P\d{4})(?:-(?P\d\d)(?:-(?P\d\d))?)?').setName("ISO8601 date") + "ISO8601 date (C{yyyy-mm-dd})" + + iso8601_datetime = Regex(r'(?P\d{4})-(?P\d\d)-(?P\d\d)[T ](?P\d\d):(?P\d\d)(:(?P\d\d(\.\d*)?)?)?(?PZ|[+-]\d\d:?\d\d)?').setName("ISO8601 datetime") + "ISO8601 datetime (C{yyyy-mm-ddThh:mm:ss.s(Z|+-00:00)}) - trailing seconds, milliseconds, and timezone optional; accepts separating C{'T'} or C{' '}" + + uuid = Regex(r'[0-9a-fA-F]{8}(-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12}').setName("UUID") + "UUID (C{xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx})" + + _html_stripper = anyOpenTag.suppress() | anyCloseTag.suppress() + @staticmethod + def stripHTMLTags(s, l, tokens): + """ + Parse action to remove HTML tags from web page HTML source + + Example:: + # strip HTML links from normal text + text = 'More info at the
pyparsing wiki page' + td,td_end = makeHTMLTags("TD") + table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end + + print(table_text.parseString(text).body) # -> 'More info at the pyparsing wiki page' + """ + return pyparsing_common._html_stripper.transformString(tokens[0]) + + _commasepitem = Combine(OneOrMore(~Literal(",") + ~LineEnd() + Word(printables, excludeChars=',') + + Optional( White(" \t") ) ) ).streamline().setName("commaItem") + comma_separated_list = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("comma separated list") + """Predefined expression of 1 or more printable words or quoted strings, separated by commas.""" + + upcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).upper())) + """Parse action to convert tokens to upper case.""" + + downcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).lower())) + """Parse action to convert tokens to lower case.""" + + +if __name__ == "__main__": + + selectToken = CaselessLiteral("select") + fromToken = CaselessLiteral("from") + + ident = Word(alphas, alphanums + "_$") + + columnName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) + columnNameList = Group(delimitedList(columnName)).setName("columns") + columnSpec = ('*' | columnNameList) + + tableName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) + tableNameList = Group(delimitedList(tableName)).setName("tables") + + simpleSQL = selectToken("command") + columnSpec("columns") + fromToken + tableNameList("tables") + + # demo runTests method, including embedded comments in test string + simpleSQL.runTests(""" + # '*' as column list and dotted table name + select * from SYS.XYZZY + + # caseless match on "SELECT", and casts back to "select" + SELECT * from XYZZY, ABC + + # list of column names, and mixed case SELECT keyword + Select AA,BB,CC from Sys.dual + + # multiple tables + Select A, B, C from Sys.dual, Table2 + + # invalid SELECT keyword - should fail + Xelect A, B, C from Sys.dual + + # incomplete command - should fail + Select + + # invalid column name - should fail + Select ^^^ frox Sys.dual + + """) + + pyparsing_common.number.runTests(""" + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + """) + + # any int or real number, returned as float + pyparsing_common.fnumber.runTests(""" + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + """) + + pyparsing_common.hex_integer.runTests(""" + 100 + FF + """) + + import uuid + pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) + pyparsing_common.uuid.runTests(""" + 12345678-1234-5678-1234-567812345678 + """) diff --git a/venv/Lib/site-packages/setuptools/archive_util.py b/venv/Lib/site-packages/setuptools/archive_util.py new file mode 100644 index 00000000..0f702848 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/archive_util.py @@ -0,0 +1,205 @@ +"""Utilities for extracting common archive formats""" + +import zipfile +import tarfile +import os +import shutil +import posixpath +import contextlib +from distutils.errors import DistutilsError + +from pkg_resources import ensure_directory + +__all__ = [ + "unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter", + "UnrecognizedFormat", "extraction_drivers", "unpack_directory", +] + + +class UnrecognizedFormat(DistutilsError): + """Couldn't recognize the archive type""" + + +def default_filter(src, dst): + """The default progress/filter callback; returns True for all files""" + return dst + + +def unpack_archive( + filename, extract_dir, progress_filter=default_filter, + drivers=None): + """Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat`` + + `progress_filter` is a function taking two arguments: a source path + internal to the archive ('/'-separated), and a filesystem path where it + will be extracted. The callback must return the desired extract path + (which may be the same as the one passed in), or else ``None`` to skip + that file or directory. The callback can thus be used to report on the + progress of the extraction, as well as to filter the items extracted or + alter their extraction paths. + + `drivers`, if supplied, must be a non-empty sequence of functions with the + same signature as this function (minus the `drivers` argument), that raise + ``UnrecognizedFormat`` if they do not support extracting the designated + archive type. The `drivers` are tried in sequence until one is found that + does not raise an error, or until all are exhausted (in which case + ``UnrecognizedFormat`` is raised). If you do not supply a sequence of + drivers, the module's ``extraction_drivers`` constant will be used, which + means that ``unpack_zipfile`` and ``unpack_tarfile`` will be tried, in that + order. + """ + for driver in drivers or extraction_drivers: + try: + driver(filename, extract_dir, progress_filter) + except UnrecognizedFormat: + continue + else: + return + else: + raise UnrecognizedFormat( + "Not a recognized archive type: %s" % filename + ) + + +def unpack_directory(filename, extract_dir, progress_filter=default_filter): + """"Unpack" a directory, using the same interface as for archives + + Raises ``UnrecognizedFormat`` if `filename` is not a directory + """ + if not os.path.isdir(filename): + raise UnrecognizedFormat("%s is not a directory" % filename) + + paths = { + filename: ('', extract_dir), + } + for base, dirs, files in os.walk(filename): + src, dst = paths[base] + for d in dirs: + paths[os.path.join(base, d)] = src + d + '/', os.path.join(dst, d) + for f in files: + target = os.path.join(dst, f) + target = progress_filter(src + f, target) + if not target: + # skip non-files + continue + ensure_directory(target) + f = os.path.join(base, f) + shutil.copyfile(f, target) + shutil.copystat(f, target) + + +def unpack_zipfile(filename, extract_dir, progress_filter=default_filter): + """Unpack zip `filename` to `extract_dir` + + Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determined + by ``zipfile.is_zipfile()``). See ``unpack_archive()`` for an explanation + of the `progress_filter` argument. + """ + + if not zipfile.is_zipfile(filename): + raise UnrecognizedFormat("%s is not a zip file" % (filename,)) + + with zipfile.ZipFile(filename) as z: + for info in z.infolist(): + name = info.filename + + # don't extract absolute paths or ones with .. in them + if name.startswith('/') or '..' in name.split('/'): + continue + + target = os.path.join(extract_dir, *name.split('/')) + target = progress_filter(name, target) + if not target: + continue + if name.endswith('/'): + # directory + ensure_directory(target) + else: + # file + ensure_directory(target) + data = z.read(info.filename) + with open(target, 'wb') as f: + f.write(data) + unix_attributes = info.external_attr >> 16 + if unix_attributes: + os.chmod(target, unix_attributes) + + +def _resolve_tar_file_or_dir(tar_obj, tar_member_obj): + """Resolve any links and extract link targets as normal files.""" + while tar_member_obj is not None and ( + tar_member_obj.islnk() or tar_member_obj.issym()): + linkpath = tar_member_obj.linkname + if tar_member_obj.issym(): + base = posixpath.dirname(tar_member_obj.name) + linkpath = posixpath.join(base, linkpath) + linkpath = posixpath.normpath(linkpath) + tar_member_obj = tar_obj._getmember(linkpath) + + is_file_or_dir = ( + tar_member_obj is not None and + (tar_member_obj.isfile() or tar_member_obj.isdir()) + ) + if is_file_or_dir: + return tar_member_obj + + raise LookupError('Got unknown file type') + + +def _iter_open_tar(tar_obj, extract_dir, progress_filter): + """Emit member-destination pairs from a tar archive.""" + # don't do any chowning! + tar_obj.chown = lambda *args: None + + with contextlib.closing(tar_obj): + for member in tar_obj: + name = member.name + # don't extract absolute paths or ones with .. in them + if name.startswith('/') or '..' in name.split('/'): + continue + + prelim_dst = os.path.join(extract_dir, *name.split('/')) + + try: + member = _resolve_tar_file_or_dir(tar_obj, member) + except LookupError: + continue + + final_dst = progress_filter(name, prelim_dst) + if not final_dst: + continue + + if final_dst.endswith(os.sep): + final_dst = final_dst[:-1] + + yield member, final_dst + + +def unpack_tarfile(filename, extract_dir, progress_filter=default_filter): + """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir` + + Raises ``UnrecognizedFormat`` if `filename` is not a tarfile (as determined + by ``tarfile.open()``). See ``unpack_archive()`` for an explanation + of the `progress_filter` argument. + """ + try: + tarobj = tarfile.open(filename) + except tarfile.TarError as e: + raise UnrecognizedFormat( + "%s is not a compressed or uncompressed tar file" % (filename,) + ) from e + + for member, final_dst in _iter_open_tar( + tarobj, extract_dir, progress_filter, + ): + try: + # XXX Ugh + tarobj._extract_member(member, final_dst) + except tarfile.ExtractError: + # chown/chmod/mkfifo/mknode/makedev failed + pass + + return True + + +extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile diff --git a/env/lib/python2.7/site-packages/setuptools/build_meta.py b/venv/Lib/site-packages/setuptools/build_meta.py similarity index 81% rename from env/lib/python2.7/site-packages/setuptools/build_meta.py rename to venv/Lib/site-packages/setuptools/build_meta.py index 10c4b528..9dfb2f24 100644 --- a/env/lib/python2.7/site-packages/setuptools/build_meta.py +++ b/venv/Lib/site-packages/setuptools/build_meta.py @@ -32,13 +32,12 @@ import tokenize import shutil import contextlib +import tempfile import setuptools import distutils -from setuptools.py31compat import TemporaryDirectory from pkg_resources import parse_requirements -from pkg_resources.py31compat import makedirs __all__ = ['get_requires_for_build_sdist', 'get_requires_for_build_wheel', @@ -48,6 +47,7 @@ '__legacy__', 'SetupRequirementsError'] + class SetupRequirementsError(BaseException): def __init__(self, specifiers): self.specifiers = specifiers @@ -75,17 +75,20 @@ def patch(cls): distutils.core.Distribution = orig -def _to_str(s): - """ - Convert a filename to a string (on Python 2, explicitly - a byte string, not Unicode) as distutils checks for the - exact type str. +@contextlib.contextmanager +def no_install_setup_requires(): + """Temporarily disable installing setup_requires + + Under PEP 517, the backend reports build dependencies to the frontend, + and the frontend is responsible for ensuring they're installed. + So setuptools (acting as a backend) should not try to install them. """ - if sys.version_info[0] == 2 and not isinstance(s, str): - # Assume it's Unicode, as that's what the PEP says - # should be provided. - return s.encode(sys.getfilesystemencoding()) - return s + orig = setuptools._install_setup_requires + setuptools._install_setup_requires = lambda attrs: None + try: + yield + finally: + setuptools._install_setup_requires = orig def _get_immediate_subdirectories(a_dir): @@ -98,7 +101,12 @@ def _file_with_extension(directory, extension): f for f in os.listdir(directory) if f.endswith(extension) ) - file, = matching + try: + file, = matching + except ValueError: + raise ValueError( + 'No distribution was found. Ensure that `setup.py` ' + 'is not empty and that it calls `setup()`.') return file @@ -143,7 +151,8 @@ def run_setup(self, setup_script='setup.py'): def get_requires_for_build_wheel(self, config_settings=None): config_settings = self._fix_config(config_settings) - return self._get_build_requires(config_settings, requirements=['wheel']) + return self._get_build_requires( + config_settings, requirements=['wheel']) def get_requires_for_build_sdist(self, config_settings=None): config_settings = self._fix_config(config_settings) @@ -151,17 +160,20 @@ def get_requires_for_build_sdist(self, config_settings=None): def prepare_metadata_for_build_wheel(self, metadata_directory, config_settings=None): - sys.argv = sys.argv[:1] + ['dist_info', '--egg-base', - _to_str(metadata_directory)] - self.run_setup() + sys.argv = sys.argv[:1] + [ + 'dist_info', '--egg-base', metadata_directory] + with no_install_setup_requires(): + self.run_setup() dist_info_directory = metadata_directory while True: dist_infos = [f for f in os.listdir(dist_info_directory) if f.endswith('.dist-info')] - if (len(dist_infos) == 0 and - len(_get_immediate_subdirectories(dist_info_directory)) == 1): + if ( + len(dist_infos) == 0 and + len(_get_immediate_subdirectories(dist_info_directory)) == 1 + ): dist_info_directory = os.path.join( dist_info_directory, os.listdir(dist_info_directory)[0]) @@ -186,14 +198,16 @@ def _build_with_temp_dir(self, setup_command, result_extension, result_directory = os.path.abspath(result_directory) # Build in a temporary directory, then copy to the target. - makedirs(result_directory, exist_ok=True) - with TemporaryDirectory(dir=result_directory) as tmp_dist_dir: + os.makedirs(result_directory, exist_ok=True) + with tempfile.TemporaryDirectory(dir=result_directory) as tmp_dist_dir: sys.argv = (sys.argv[:1] + setup_command + ['--dist-dir', tmp_dist_dir] + config_settings["--global-option"]) - self.run_setup() + with no_install_setup_requires(): + self.run_setup() - result_basename = _file_with_extension(tmp_dist_dir, result_extension) + result_basename = _file_with_extension( + tmp_dist_dir, result_extension) result_path = os.path.join(result_directory, result_basename) if os.path.exists(result_path): # os.rename will fail overwriting on non-Unix. @@ -202,7 +216,6 @@ def _build_with_temp_dir(self, setup_command, result_extension, return result_basename - def build_wheel(self, wheel_directory, config_settings=None, metadata_directory=None): return self._build_with_temp_dir(['bdist_wheel'], '.whl', @@ -217,9 +230,12 @@ def build_sdist(self, sdist_directory, config_settings=None): class _BuildMetaLegacyBackend(_BuildMetaBackend): """Compatibility backend for setuptools - This is a version of setuptools.build_meta that endeavors to maintain backwards - compatibility with pre-PEP 517 modes of invocation. It exists as a temporary - bridge between the old packaging mechanism and the new packaging mechanism, + This is a version of setuptools.build_meta that endeavors + to maintain backwards + compatibility with pre-PEP 517 modes of invocation. It + exists as a temporary + bridge between the old packaging mechanism and the new + packaging mechanism, and will eventually be removed. """ def run_setup(self, setup_script='setup.py'): @@ -232,6 +248,12 @@ def run_setup(self, setup_script='setup.py'): if script_dir not in sys.path: sys.path.insert(0, script_dir) + # Some setup.py scripts (e.g. in pygame and numpy) use sys.argv[0] to + # get the directory of the source code. They expect it to refer to the + # setup.py script. + sys_argv_0 = sys.argv[0] + sys.argv[0] = setup_script + try: super(_BuildMetaLegacyBackend, self).run_setup(setup_script=setup_script) @@ -242,6 +264,8 @@ def run_setup(self, setup_script='setup.py'): # the original path so that the path manipulation does not persist # within the hook after run_setup is called. sys.path[:] = sys_path + sys.argv[0] = sys_argv_0 + # The primary backend _BACKEND = _BuildMetaBackend() diff --git a/env/lib/python2.7/site-packages/setuptools/cli-32.exe b/venv/Lib/site-packages/setuptools/cli-32.exe similarity index 100% rename from env/lib/python2.7/site-packages/setuptools/cli-32.exe rename to venv/Lib/site-packages/setuptools/cli-32.exe diff --git a/env/lib/python2.7/site-packages/setuptools/cli-64.exe b/venv/Lib/site-packages/setuptools/cli-64.exe similarity index 100% rename from env/lib/python2.7/site-packages/setuptools/cli-64.exe rename to venv/Lib/site-packages/setuptools/cli-64.exe diff --git a/env/lib/python2.7/site-packages/setuptools/cli.exe b/venv/Lib/site-packages/setuptools/cli.exe similarity index 100% rename from env/lib/python2.7/site-packages/setuptools/cli.exe rename to venv/Lib/site-packages/setuptools/cli.exe diff --git a/venv/Lib/site-packages/setuptools/command/__init__.py b/venv/Lib/site-packages/setuptools/command/__init__.py new file mode 100644 index 00000000..b966dcea --- /dev/null +++ b/venv/Lib/site-packages/setuptools/command/__init__.py @@ -0,0 +1,8 @@ +from distutils.command.bdist import bdist +import sys + +if 'egg' not in bdist.format_commands: + bdist.format_command['egg'] = ('bdist_egg', "Python .egg file") + bdist.format_commands.append('egg') + +del bdist, sys diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..507c1d62 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/command/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/alias.cpython-36.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/alias.cpython-36.pyc new file mode 100644 index 00000000..190b5331 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/command/__pycache__/alias.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/bdist_egg.cpython-36.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/bdist_egg.cpython-36.pyc new file mode 100644 index 00000000..05a54fcb Binary files /dev/null and b/venv/Lib/site-packages/setuptools/command/__pycache__/bdist_egg.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/bdist_rpm.cpython-36.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/bdist_rpm.cpython-36.pyc new file mode 100644 index 00000000..294513bb Binary files /dev/null and b/venv/Lib/site-packages/setuptools/command/__pycache__/bdist_rpm.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/build_clib.cpython-36.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/build_clib.cpython-36.pyc new file mode 100644 index 00000000..8bc89f11 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/command/__pycache__/build_clib.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/build_ext.cpython-36.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/build_ext.cpython-36.pyc new file mode 100644 index 00000000..f48a5995 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/command/__pycache__/build_ext.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/build_py.cpython-36.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/build_py.cpython-36.pyc new file mode 100644 index 00000000..b79d4167 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/command/__pycache__/build_py.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/develop.cpython-36.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/develop.cpython-36.pyc new file mode 100644 index 00000000..5186d43a Binary files /dev/null and b/venv/Lib/site-packages/setuptools/command/__pycache__/develop.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/dist_info.cpython-36.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/dist_info.cpython-36.pyc new file mode 100644 index 00000000..ad0e0ccd Binary files /dev/null and b/venv/Lib/site-packages/setuptools/command/__pycache__/dist_info.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/easy_install.cpython-36.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/easy_install.cpython-36.pyc new file mode 100644 index 00000000..da500f38 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/command/__pycache__/easy_install.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/egg_info.cpython-36.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/egg_info.cpython-36.pyc new file mode 100644 index 00000000..82de1bc5 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/command/__pycache__/egg_info.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/install.cpython-36.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/install.cpython-36.pyc new file mode 100644 index 00000000..8aa5b07f Binary files /dev/null and b/venv/Lib/site-packages/setuptools/command/__pycache__/install.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/install_egg_info.cpython-36.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/install_egg_info.cpython-36.pyc new file mode 100644 index 00000000..93c8c147 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/command/__pycache__/install_egg_info.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/install_lib.cpython-36.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/install_lib.cpython-36.pyc new file mode 100644 index 00000000..4decd1cc Binary files /dev/null and b/venv/Lib/site-packages/setuptools/command/__pycache__/install_lib.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/install_scripts.cpython-36.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/install_scripts.cpython-36.pyc new file mode 100644 index 00000000..a2f80fa6 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/command/__pycache__/install_scripts.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/py36compat.cpython-36.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/py36compat.cpython-36.pyc new file mode 100644 index 00000000..7c625fef Binary files /dev/null and b/venv/Lib/site-packages/setuptools/command/__pycache__/py36compat.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/register.cpython-36.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/register.cpython-36.pyc new file mode 100644 index 00000000..f9a60a82 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/command/__pycache__/register.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/rotate.cpython-36.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/rotate.cpython-36.pyc new file mode 100644 index 00000000..72b44c07 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/command/__pycache__/rotate.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/saveopts.cpython-36.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/saveopts.cpython-36.pyc new file mode 100644 index 00000000..fc1aa014 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/command/__pycache__/saveopts.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/sdist.cpython-36.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/sdist.cpython-36.pyc new file mode 100644 index 00000000..8b3ea272 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/command/__pycache__/sdist.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/setopt.cpython-36.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/setopt.cpython-36.pyc new file mode 100644 index 00000000..9766ffe5 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/command/__pycache__/setopt.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/test.cpython-36.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/test.cpython-36.pyc new file mode 100644 index 00000000..304d5a78 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/command/__pycache__/test.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/upload.cpython-36.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/upload.cpython-36.pyc new file mode 100644 index 00000000..33ce88ca Binary files /dev/null and b/venv/Lib/site-packages/setuptools/command/__pycache__/upload.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/upload_docs.cpython-36.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/upload_docs.cpython-36.pyc new file mode 100644 index 00000000..9e300897 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/command/__pycache__/upload_docs.cpython-36.pyc differ diff --git a/env/lib/python2.7/site-packages/setuptools/command/alias.py b/venv/Lib/site-packages/setuptools/command/alias.py similarity index 98% rename from env/lib/python2.7/site-packages/setuptools/command/alias.py rename to venv/Lib/site-packages/setuptools/command/alias.py index 4532b1cc..452a9244 100644 --- a/env/lib/python2.7/site-packages/setuptools/command/alias.py +++ b/venv/Lib/site-packages/setuptools/command/alias.py @@ -1,7 +1,5 @@ from distutils.errors import DistutilsOptionError -from setuptools.extern.six.moves import map - from setuptools.command.setopt import edit_config, option_base, config_file diff --git a/env/lib/python2.7/site-packages/setuptools/command/bdist_egg.py b/venv/Lib/site-packages/setuptools/command/bdist_egg.py similarity index 88% rename from env/lib/python2.7/site-packages/setuptools/command/bdist_egg.py rename to venv/Lib/site-packages/setuptools/command/bdist_egg.py index 9f8df917..e6b1609f 100644 --- a/env/lib/python2.7/site-packages/setuptools/command/bdist_egg.py +++ b/venv/Lib/site-packages/setuptools/command/bdist_egg.py @@ -2,7 +2,6 @@ Build .egg distributions""" -from distutils.errors import DistutilsSetupError from distutils.dir_util import remove_tree, mkpath from distutils import log from types import CodeType @@ -12,24 +11,15 @@ import textwrap import marshal -from setuptools.extern import six - from pkg_resources import get_build_platform, Distribution, ensure_directory -from pkg_resources import EntryPoint from setuptools.extension import Library from setuptools import Command -try: - # Python 2.7 or >=3.2 - from sysconfig import get_path, get_python_version +from sysconfig import get_path, get_python_version - def _get_purelib(): - return get_path("purelib") -except ImportError: - from distutils.sysconfig import get_python_lib, get_python_version - def _get_purelib(): - return get_python_lib(False) +def _get_purelib(): + return get_path("purelib") def strip_module(filename): @@ -54,10 +44,12 @@ def write_stub(resource, pyfile): _stub_template = textwrap.dedent(""" def __bootstrap__(): global __bootstrap__, __loader__, __file__ - import sys, pkg_resources, imp + import sys, pkg_resources, importlib.util __file__ = pkg_resources.resource_filename(__name__, %r) __loader__ = None; del __bootstrap__, __loader__ - imp.load_dynamic(__name__,__file__) + spec = importlib.util.spec_from_file_location(__name__,__file__) + mod = importlib.util.module_from_spec(spec) + spec.loader.exec_module(mod) __bootstrap__() """).lstrip() with open(pyfile, 'w') as f: @@ -158,7 +150,7 @@ def call_command(self, cmdname, **kw): self.run_command(cmdname) return cmd - def run(self): + def run(self): # noqa: C901 # is too complex (14) # FIXME # Generate metadata first self.run_command("egg_info") # We run install_lib before install_data, because some data hacks @@ -273,43 +265,7 @@ def zip_safe(self): return analyze_egg(self.bdist_dir, self.stubs) def gen_header(self): - epm = EntryPoint.parse_map(self.distribution.entry_points or '') - ep = epm.get('setuptools.installation', {}).get('eggsecutable') - if ep is None: - return 'w' # not an eggsecutable, do it the usual way. - - if not ep.attrs or ep.extras: - raise DistutilsSetupError( - "eggsecutable entry point (%r) cannot have 'extras' " - "or refer to a module" % (ep,) - ) - - pyver = sys.version[:3] - pkg = ep.module_name - full = '.'.join(ep.attrs) - base = ep.attrs[0] - basename = os.path.basename(self.egg_output) - - header = ( - "#!/bin/sh\n" - 'if [ `basename $0` = "%(basename)s" ]\n' - 'then exec python%(pyver)s -c "' - "import sys, os; sys.path.insert(0, os.path.abspath('$0')); " - "from %(pkg)s import %(base)s; sys.exit(%(full)s())" - '" "$@"\n' - 'else\n' - ' echo $0 is not the correct name for this egg file.\n' - ' echo Please rename it back to %(basename)s and try again.\n' - ' exec false\n' - 'fi\n' - ) % locals() - - if not self.dry_run: - mkpath(os.path.dirname(self.egg_output), dry_run=self.dry_run) - f = open(self.egg_output, 'w') - f.write(header) - f.close() - return 'a' + return 'w' def copy_metadata_to(self, target_dir): "Copy metadata (egg info) to the target_dir" @@ -411,9 +367,7 @@ def scan_module(egg_dir, base, name, stubs): return True # Extension module pkg = base[len(egg_dir) + 1:].replace(os.sep, '.') module = pkg + (pkg and '.' or '') + os.path.splitext(name)[0] - if six.PY2: - skip = 8 # skip magic & date - elif sys.version_info < (3, 7): + if sys.version_info < (3, 7): skip = 12 # skip magic & date & file size else: skip = 16 # skip magic & reserved? & date & file size @@ -444,7 +398,7 @@ def iter_symbols(code): for name in code.co_names: yield name for const in code.co_consts: - if isinstance(const, six.string_types): + if isinstance(const, str): yield const elif isinstance(const, CodeType): for name in iter_symbols(const): diff --git a/venv/Lib/site-packages/setuptools/command/bdist_rpm.py b/venv/Lib/site-packages/setuptools/command/bdist_rpm.py new file mode 100644 index 00000000..0eb1b9c2 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/command/bdist_rpm.py @@ -0,0 +1,31 @@ +import distutils.command.bdist_rpm as orig + + +class bdist_rpm(orig.bdist_rpm): + """ + Override the default bdist_rpm behavior to do the following: + + 1. Run egg_info to ensure the name and version are properly calculated. + 2. Always run 'install' using --single-version-externally-managed to + disable eggs in RPM distributions. + """ + + def run(self): + # ensure distro name is up-to-date + self.run_command('egg_info') + + orig.bdist_rpm.run(self) + + def _make_spec_file(self): + spec = orig.bdist_rpm._make_spec_file(self) + spec = [ + line.replace( + "setup.py install ", + "setup.py install --single-version-externally-managed " + ).replace( + "%setup", + "%setup -n %{name}-%{unmangled_version}" + ) + for line in spec + ] + return spec diff --git a/venv/Lib/site-packages/setuptools/command/build_clib.py b/venv/Lib/site-packages/setuptools/command/build_clib.py new file mode 100644 index 00000000..67ce2444 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/command/build_clib.py @@ -0,0 +1,101 @@ +import distutils.command.build_clib as orig +from distutils.errors import DistutilsSetupError +from distutils import log +from setuptools.dep_util import newer_pairwise_group + + +class build_clib(orig.build_clib): + """ + Override the default build_clib behaviour to do the following: + + 1. Implement a rudimentary timestamp-based dependency system + so 'compile()' doesn't run every time. + 2. Add more keys to the 'build_info' dictionary: + * obj_deps - specify dependencies for each object compiled. + this should be a dictionary mapping a key + with the source filename to a list of + dependencies. Use an empty string for global + dependencies. + * cflags - specify a list of additional flags to pass to + the compiler. + """ + + def build_libraries(self, libraries): + for (lib_name, build_info) in libraries: + sources = build_info.get('sources') + if sources is None or not isinstance(sources, (list, tuple)): + raise DistutilsSetupError( + "in 'libraries' option (library '%s'), " + "'sources' must be present and must be " + "a list of source filenames" % lib_name) + sources = list(sources) + + log.info("building '%s' library", lib_name) + + # Make sure everything is the correct type. + # obj_deps should be a dictionary of keys as sources + # and a list/tuple of files that are its dependencies. + obj_deps = build_info.get('obj_deps', dict()) + if not isinstance(obj_deps, dict): + raise DistutilsSetupError( + "in 'libraries' option (library '%s'), " + "'obj_deps' must be a dictionary of " + "type 'source: list'" % lib_name) + dependencies = [] + + # Get the global dependencies that are specified by the '' key. + # These will go into every source's dependency list. + global_deps = obj_deps.get('', list()) + if not isinstance(global_deps, (list, tuple)): + raise DistutilsSetupError( + "in 'libraries' option (library '%s'), " + "'obj_deps' must be a dictionary of " + "type 'source: list'" % lib_name) + + # Build the list to be used by newer_pairwise_group + # each source will be auto-added to its dependencies. + for source in sources: + src_deps = [source] + src_deps.extend(global_deps) + extra_deps = obj_deps.get(source, list()) + if not isinstance(extra_deps, (list, tuple)): + raise DistutilsSetupError( + "in 'libraries' option (library '%s'), " + "'obj_deps' must be a dictionary of " + "type 'source: list'" % lib_name) + src_deps.extend(extra_deps) + dependencies.append(src_deps) + + expected_objects = self.compiler.object_filenames( + sources, + output_dir=self.build_temp, + ) + + if ( + newer_pairwise_group(dependencies, expected_objects) + != ([], []) + ): + # First, compile the source code to object files in the library + # directory. (This should probably change to putting object + # files in a temporary build directory.) + macros = build_info.get('macros') + include_dirs = build_info.get('include_dirs') + cflags = build_info.get('cflags') + self.compiler.compile( + sources, + output_dir=self.build_temp, + macros=macros, + include_dirs=include_dirs, + extra_postargs=cflags, + debug=self.debug + ) + + # Now "link" the object files together into a static library. + # (On Unix at least, this isn't really linking -- it just + # builds an archive. Whatever.) + self.compiler.create_static_lib( + expected_objects, + lib_name, + output_dir=self.build_clib, + debug=self.debug + ) diff --git a/venv/Lib/site-packages/setuptools/command/build_ext.py b/venv/Lib/site-packages/setuptools/command/build_ext.py new file mode 100644 index 00000000..03a72b4f --- /dev/null +++ b/venv/Lib/site-packages/setuptools/command/build_ext.py @@ -0,0 +1,322 @@ +import os +import sys +import itertools +from importlib.machinery import EXTENSION_SUFFIXES +from distutils.command.build_ext import build_ext as _du_build_ext +from distutils.file_util import copy_file +from distutils.ccompiler import new_compiler +from distutils.sysconfig import customize_compiler, get_config_var +from distutils.errors import DistutilsError +from distutils import log + +from setuptools.extension import Library + +try: + # Attempt to use Cython for building extensions, if available + from Cython.Distutils.build_ext import build_ext as _build_ext + # Additionally, assert that the compiler module will load + # also. Ref #1229. + __import__('Cython.Compiler.Main') +except ImportError: + _build_ext = _du_build_ext + +# make sure _config_vars is initialized +get_config_var("LDSHARED") +from distutils.sysconfig import _config_vars as _CONFIG_VARS # noqa + + +def _customize_compiler_for_shlib(compiler): + if sys.platform == "darwin": + # building .dylib requires additional compiler flags on OSX; here we + # temporarily substitute the pyconfig.h variables so that distutils' + # 'customize_compiler' uses them before we build the shared libraries. + tmp = _CONFIG_VARS.copy() + try: + # XXX Help! I don't have any idea whether these are right... + _CONFIG_VARS['LDSHARED'] = ( + "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup") + _CONFIG_VARS['CCSHARED'] = " -dynamiclib" + _CONFIG_VARS['SO'] = ".dylib" + customize_compiler(compiler) + finally: + _CONFIG_VARS.clear() + _CONFIG_VARS.update(tmp) + else: + customize_compiler(compiler) + + +have_rtld = False +use_stubs = False +libtype = 'shared' + +if sys.platform == "darwin": + use_stubs = True +elif os.name != 'nt': + try: + import dl + use_stubs = have_rtld = hasattr(dl, 'RTLD_NOW') + except ImportError: + pass + + +def if_dl(s): + return s if have_rtld else '' + + +def get_abi3_suffix(): + """Return the file extension for an abi3-compliant Extension()""" + for suffix in EXTENSION_SUFFIXES: + if '.abi3' in suffix: # Unix + return suffix + elif suffix == '.pyd': # Windows + return suffix + + +class build_ext(_build_ext): + def run(self): + """Build extensions in build directory, then copy if --inplace""" + old_inplace, self.inplace = self.inplace, 0 + _build_ext.run(self) + self.inplace = old_inplace + if old_inplace: + self.copy_extensions_to_source() + + def copy_extensions_to_source(self): + build_py = self.get_finalized_command('build_py') + for ext in self.extensions: + fullname = self.get_ext_fullname(ext.name) + filename = self.get_ext_filename(fullname) + modpath = fullname.split('.') + package = '.'.join(modpath[:-1]) + package_dir = build_py.get_package_dir(package) + dest_filename = os.path.join(package_dir, + os.path.basename(filename)) + src_filename = os.path.join(self.build_lib, filename) + + # Always copy, even if source is older than destination, to ensure + # that the right extensions for the current Python/platform are + # used. + copy_file( + src_filename, dest_filename, verbose=self.verbose, + dry_run=self.dry_run + ) + if ext._needs_stub: + self.write_stub(package_dir or os.curdir, ext, True) + + def get_ext_filename(self, fullname): + filename = _build_ext.get_ext_filename(self, fullname) + if fullname in self.ext_map: + ext = self.ext_map[fullname] + use_abi3 = getattr(ext, 'py_limited_api') and get_abi3_suffix() + if use_abi3: + so_ext = get_config_var('EXT_SUFFIX') + filename = filename[:-len(so_ext)] + filename = filename + get_abi3_suffix() + if isinstance(ext, Library): + fn, ext = os.path.splitext(filename) + return self.shlib_compiler.library_filename(fn, libtype) + elif use_stubs and ext._links_to_dynamic: + d, fn = os.path.split(filename) + return os.path.join(d, 'dl-' + fn) + return filename + + def initialize_options(self): + _build_ext.initialize_options(self) + self.shlib_compiler = None + self.shlibs = [] + self.ext_map = {} + + def finalize_options(self): + _build_ext.finalize_options(self) + self.extensions = self.extensions or [] + self.check_extensions_list(self.extensions) + self.shlibs = [ext for ext in self.extensions + if isinstance(ext, Library)] + if self.shlibs: + self.setup_shlib_compiler() + for ext in self.extensions: + ext._full_name = self.get_ext_fullname(ext.name) + for ext in self.extensions: + fullname = ext._full_name + self.ext_map[fullname] = ext + + # distutils 3.1 will also ask for module names + # XXX what to do with conflicts? + self.ext_map[fullname.split('.')[-1]] = ext + + ltd = self.shlibs and self.links_to_dynamic(ext) or False + ns = ltd and use_stubs and not isinstance(ext, Library) + ext._links_to_dynamic = ltd + ext._needs_stub = ns + filename = ext._file_name = self.get_ext_filename(fullname) + libdir = os.path.dirname(os.path.join(self.build_lib, filename)) + if ltd and libdir not in ext.library_dirs: + ext.library_dirs.append(libdir) + if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs: + ext.runtime_library_dirs.append(os.curdir) + + def setup_shlib_compiler(self): + compiler = self.shlib_compiler = new_compiler( + compiler=self.compiler, dry_run=self.dry_run, force=self.force + ) + _customize_compiler_for_shlib(compiler) + + if self.include_dirs is not None: + compiler.set_include_dirs(self.include_dirs) + if self.define is not None: + # 'define' option is a list of (name,value) tuples + for (name, value) in self.define: + compiler.define_macro(name, value) + if self.undef is not None: + for macro in self.undef: + compiler.undefine_macro(macro) + if self.libraries is not None: + compiler.set_libraries(self.libraries) + if self.library_dirs is not None: + compiler.set_library_dirs(self.library_dirs) + if self.rpath is not None: + compiler.set_runtime_library_dirs(self.rpath) + if self.link_objects is not None: + compiler.set_link_objects(self.link_objects) + + # hack so distutils' build_extension() builds a library instead + compiler.link_shared_object = link_shared_object.__get__(compiler) + + def get_export_symbols(self, ext): + if isinstance(ext, Library): + return ext.export_symbols + return _build_ext.get_export_symbols(self, ext) + + def build_extension(self, ext): + ext._convert_pyx_sources_to_lang() + _compiler = self.compiler + try: + if isinstance(ext, Library): + self.compiler = self.shlib_compiler + _build_ext.build_extension(self, ext) + if ext._needs_stub: + cmd = self.get_finalized_command('build_py').build_lib + self.write_stub(cmd, ext) + finally: + self.compiler = _compiler + + def links_to_dynamic(self, ext): + """Return true if 'ext' links to a dynamic lib in the same package""" + # XXX this should check to ensure the lib is actually being built + # XXX as dynamic, and not just using a locally-found version or a + # XXX static-compiled version + libnames = dict.fromkeys([lib._full_name for lib in self.shlibs]) + pkg = '.'.join(ext._full_name.split('.')[:-1] + ['']) + return any(pkg + libname in libnames for libname in ext.libraries) + + def get_outputs(self): + return _build_ext.get_outputs(self) + self.__get_stubs_outputs() + + def __get_stubs_outputs(self): + # assemble the base name for each extension that needs a stub + ns_ext_bases = ( + os.path.join(self.build_lib, *ext._full_name.split('.')) + for ext in self.extensions + if ext._needs_stub + ) + # pair each base with the extension + pairs = itertools.product(ns_ext_bases, self.__get_output_extensions()) + return list(base + fnext for base, fnext in pairs) + + def __get_output_extensions(self): + yield '.py' + yield '.pyc' + if self.get_finalized_command('build_py').optimize: + yield '.pyo' + + def write_stub(self, output_dir, ext, compile=False): + log.info("writing stub loader for %s to %s", ext._full_name, + output_dir) + stub_file = (os.path.join(output_dir, *ext._full_name.split('.')) + + '.py') + if compile and os.path.exists(stub_file): + raise DistutilsError(stub_file + " already exists! Please delete.") + if not self.dry_run: + f = open(stub_file, 'w') + f.write( + '\n'.join([ + "def __bootstrap__():", + " global __bootstrap__, __file__, __loader__", + " import sys, os, pkg_resources, importlib.util" + + if_dl(", dl"), + " __file__ = pkg_resources.resource_filename" + "(__name__,%r)" + % os.path.basename(ext._file_name), + " del __bootstrap__", + " if '__loader__' in globals():", + " del __loader__", + if_dl(" old_flags = sys.getdlopenflags()"), + " old_dir = os.getcwd()", + " try:", + " os.chdir(os.path.dirname(__file__))", + if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"), + " spec = importlib.util.spec_from_file_location(", + " __name__, __file__)", + " mod = importlib.util.module_from_spec(spec)", + " spec.loader.exec_module(mod)", + " finally:", + if_dl(" sys.setdlopenflags(old_flags)"), + " os.chdir(old_dir)", + "__bootstrap__()", + "" # terminal \n + ]) + ) + f.close() + if compile: + from distutils.util import byte_compile + + byte_compile([stub_file], optimize=0, + force=True, dry_run=self.dry_run) + optimize = self.get_finalized_command('install_lib').optimize + if optimize > 0: + byte_compile([stub_file], optimize=optimize, + force=True, dry_run=self.dry_run) + if os.path.exists(stub_file) and not self.dry_run: + os.unlink(stub_file) + + +if use_stubs or os.name == 'nt': + # Build shared libraries + # + def link_shared_object( + self, objects, output_libname, output_dir=None, libraries=None, + library_dirs=None, runtime_library_dirs=None, export_symbols=None, + debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, + target_lang=None): + self.link( + self.SHARED_LIBRARY, objects, output_libname, + output_dir, libraries, library_dirs, runtime_library_dirs, + export_symbols, debug, extra_preargs, extra_postargs, + build_temp, target_lang + ) +else: + # Build static libraries everywhere else + libtype = 'static' + + def link_shared_object( + self, objects, output_libname, output_dir=None, libraries=None, + library_dirs=None, runtime_library_dirs=None, export_symbols=None, + debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, + target_lang=None): + # XXX we need to either disallow these attrs on Library instances, + # or warn/abort here if set, or something... + # libraries=None, library_dirs=None, runtime_library_dirs=None, + # export_symbols=None, extra_preargs=None, extra_postargs=None, + # build_temp=None + + assert output_dir is None # distutils build_ext doesn't pass this + output_dir, filename = os.path.split(output_libname) + basename, ext = os.path.splitext(filename) + if self.library_filename("x").startswith('lib'): + # strip 'lib' prefix; this is kludgy if some platform uses + # a different prefix + basename = basename[3:] + + self.create_static_lib( + objects, basename, output_dir, debug, target_lang + ) diff --git a/venv/Lib/site-packages/setuptools/command/build_py.py b/venv/Lib/site-packages/setuptools/command/build_py.py new file mode 100644 index 00000000..df6fd323 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/command/build_py.py @@ -0,0 +1,252 @@ +from glob import glob +from distutils.util import convert_path +import distutils.command.build_py as orig +import os +import fnmatch +import textwrap +import io +import distutils.errors +import itertools +import stat +from setuptools.extern.more_itertools import unique_everseen + +try: + from setuptools.lib2to3_ex import Mixin2to3 +except Exception: + + class Mixin2to3: + def run_2to3(self, files, doctests=True): + "do nothing" + + +def make_writable(target): + os.chmod(target, os.stat(target).st_mode | stat.S_IWRITE) + + +class build_py(orig.build_py, Mixin2to3): + """Enhanced 'build_py' command that includes data files with packages + + The data files are specified via a 'package_data' argument to 'setup()'. + See 'setuptools.dist.Distribution' for more details. + + Also, this version of the 'build_py' command allows you to specify both + 'py_modules' and 'packages' in the same setup operation. + """ + + def finalize_options(self): + orig.build_py.finalize_options(self) + self.package_data = self.distribution.package_data + self.exclude_package_data = (self.distribution.exclude_package_data or + {}) + if 'data_files' in self.__dict__: + del self.__dict__['data_files'] + self.__updated_files = [] + self.__doctests_2to3 = [] + + def run(self): + """Build modules, packages, and copy data files to build directory""" + if not self.py_modules and not self.packages: + return + + if self.py_modules: + self.build_modules() + + if self.packages: + self.build_packages() + self.build_package_data() + + self.run_2to3(self.__updated_files, False) + self.run_2to3(self.__updated_files, True) + self.run_2to3(self.__doctests_2to3, True) + + # Only compile actual .py files, using our base class' idea of what our + # output files are. + self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=0)) + + def __getattr__(self, attr): + "lazily compute data files" + if attr == 'data_files': + self.data_files = self._get_data_files() + return self.data_files + return orig.build_py.__getattr__(self, attr) + + def build_module(self, module, module_file, package): + outfile, copied = orig.build_py.build_module(self, module, module_file, + package) + if copied: + self.__updated_files.append(outfile) + return outfile, copied + + def _get_data_files(self): + """Generate list of '(package,src_dir,build_dir,filenames)' tuples""" + self.analyze_manifest() + return list(map(self._get_pkg_data_files, self.packages or ())) + + def _get_pkg_data_files(self, package): + # Locate package source directory + src_dir = self.get_package_dir(package) + + # Compute package build directory + build_dir = os.path.join(*([self.build_lib] + package.split('.'))) + + # Strip directory from globbed filenames + filenames = [ + os.path.relpath(file, src_dir) + for file in self.find_data_files(package, src_dir) + ] + return package, src_dir, build_dir, filenames + + def find_data_files(self, package, src_dir): + """Return filenames for package's data files in 'src_dir'""" + patterns = self._get_platform_patterns( + self.package_data, + package, + src_dir, + ) + globs_expanded = map(glob, patterns) + # flatten the expanded globs into an iterable of matches + globs_matches = itertools.chain.from_iterable(globs_expanded) + glob_files = filter(os.path.isfile, globs_matches) + files = itertools.chain( + self.manifest_files.get(package, []), + glob_files, + ) + return self.exclude_data_files(package, src_dir, files) + + def build_package_data(self): + """Copy data files into build directory""" + for package, src_dir, build_dir, filenames in self.data_files: + for filename in filenames: + target = os.path.join(build_dir, filename) + self.mkpath(os.path.dirname(target)) + srcfile = os.path.join(src_dir, filename) + outf, copied = self.copy_file(srcfile, target) + make_writable(target) + srcfile = os.path.abspath(srcfile) + if (copied and + srcfile in self.distribution.convert_2to3_doctests): + self.__doctests_2to3.append(outf) + + def analyze_manifest(self): + self.manifest_files = mf = {} + if not self.distribution.include_package_data: + return + src_dirs = {} + for package in self.packages or (): + # Locate package source directory + src_dirs[assert_relative(self.get_package_dir(package))] = package + + self.run_command('egg_info') + ei_cmd = self.get_finalized_command('egg_info') + for path in ei_cmd.filelist.files: + d, f = os.path.split(assert_relative(path)) + prev = None + oldf = f + while d and d != prev and d not in src_dirs: + prev = d + d, df = os.path.split(d) + f = os.path.join(df, f) + if d in src_dirs: + if path.endswith('.py') and f == oldf: + continue # it's a module, not data + mf.setdefault(src_dirs[d], []).append(path) + + def get_data_files(self): + pass # Lazily compute data files in _get_data_files() function. + + def check_package(self, package, package_dir): + """Check namespace packages' __init__ for declare_namespace""" + try: + return self.packages_checked[package] + except KeyError: + pass + + init_py = orig.build_py.check_package(self, package, package_dir) + self.packages_checked[package] = init_py + + if not init_py or not self.distribution.namespace_packages: + return init_py + + for pkg in self.distribution.namespace_packages: + if pkg == package or pkg.startswith(package + '.'): + break + else: + return init_py + + with io.open(init_py, 'rb') as f: + contents = f.read() + if b'declare_namespace' not in contents: + raise distutils.errors.DistutilsError( + "Namespace package problem: %s is a namespace package, but " + "its\n__init__.py does not call declare_namespace()! Please " + 'fix it.\n(See the setuptools manual under ' + '"Namespace Packages" for details.)\n"' % (package,) + ) + return init_py + + def initialize_options(self): + self.packages_checked = {} + orig.build_py.initialize_options(self) + + def get_package_dir(self, package): + res = orig.build_py.get_package_dir(self, package) + if self.distribution.src_root is not None: + return os.path.join(self.distribution.src_root, res) + return res + + def exclude_data_files(self, package, src_dir, files): + """Filter filenames for package's data files in 'src_dir'""" + files = list(files) + patterns = self._get_platform_patterns( + self.exclude_package_data, + package, + src_dir, + ) + match_groups = ( + fnmatch.filter(files, pattern) + for pattern in patterns + ) + # flatten the groups of matches into an iterable of matches + matches = itertools.chain.from_iterable(match_groups) + bad = set(matches) + keepers = ( + fn + for fn in files + if fn not in bad + ) + # ditch dupes + return list(unique_everseen(keepers)) + + @staticmethod + def _get_platform_patterns(spec, package, src_dir): + """ + yield platform-specific path patterns (suitable for glob + or fn_match) from a glob-based spec (such as + self.package_data or self.exclude_package_data) + matching package in src_dir. + """ + raw_patterns = itertools.chain( + spec.get('', []), + spec.get(package, []), + ) + return ( + # Each pattern has to be converted to a platform-specific path + os.path.join(src_dir, convert_path(pattern)) + for pattern in raw_patterns + ) + + +def assert_relative(path): + if not os.path.isabs(path): + return path + from distutils.errors import DistutilsSetupError + + msg = textwrap.dedent(""" + Error: setup script specifies an absolute path: + + %s + + setup() arguments must *always* be /-separated paths relative to the + setup.py directory, *never* absolute paths. + """).lstrip() % path + raise DistutilsSetupError(msg) diff --git a/env/lib/python2.7/site-packages/setuptools/command/develop.py b/venv/Lib/site-packages/setuptools/command/develop.py similarity index 97% rename from env/lib/python2.7/site-packages/setuptools/command/develop.py rename to venv/Lib/site-packages/setuptools/command/develop.py index 009e4f93..faf8c988 100644 --- a/env/lib/python2.7/site-packages/setuptools/command/develop.py +++ b/venv/Lib/site-packages/setuptools/command/develop.py @@ -5,15 +5,11 @@ import glob import io -from setuptools.extern import six - import pkg_resources from setuptools.command.easy_install import easy_install from setuptools import namespaces import setuptools -__metaclass__ = type - class develop(namespaces.DevelopInstaller, easy_install): """Set up package for development""" @@ -108,7 +104,7 @@ def _resolve_setup_path(egg_base, install_dir, egg_path): return path_to_setup def install_for_development(self): - if six.PY3 and getattr(self.distribution, 'use_2to3', False): + if getattr(self.distribution, 'use_2to3', False): # If we run 2to3 we can not do this inplace: # Ensure metadata is up-to-date @@ -139,7 +135,6 @@ def install_for_development(self): self.reinitialize_command('build_ext', inplace=1) self.run_command('build_ext') - self.install_site_py() # ensure that target dir is site-safe if setuptools.bootstrap_install_from: self.easy_install(setuptools.bootstrap_install_from) setuptools.bootstrap_install_from = None diff --git a/env/lib/python2.7/site-packages/setuptools/command/dist_info.py b/venv/Lib/site-packages/setuptools/command/dist_info.py similarity index 100% rename from env/lib/python2.7/site-packages/setuptools/command/dist_info.py rename to venv/Lib/site-packages/setuptools/command/dist_info.py diff --git a/venv/Lib/site-packages/setuptools/command/easy_install.py b/venv/Lib/site-packages/setuptools/command/easy_install.py new file mode 100644 index 00000000..45adb6a1 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/command/easy_install.py @@ -0,0 +1,2290 @@ +""" +Easy Install +------------ + +A tool for doing automatic download/extract/build of distutils-based Python +packages. For detailed documentation, see the accompanying EasyInstall.txt +file, or visit the `EasyInstall home page`__. + +__ https://setuptools.readthedocs.io/en/latest/easy_install.html + +""" + +from glob import glob +from distutils.util import get_platform +from distutils.util import convert_path, subst_vars +from distutils.errors import ( + DistutilsArgError, DistutilsOptionError, + DistutilsError, DistutilsPlatformError, +) +from distutils.command.install import INSTALL_SCHEMES, SCHEME_KEYS +from distutils import log, dir_util +from distutils.command.build_scripts import first_line_re +from distutils.spawn import find_executable +import sys +import os +import zipimport +import shutil +import tempfile +import zipfile +import re +import stat +import random +import textwrap +import warnings +import site +import struct +import contextlib +import subprocess +import shlex +import io +import configparser + + +from sysconfig import get_config_vars, get_path + +from setuptools import SetuptoolsDeprecationWarning + +from setuptools import Command +from setuptools.sandbox import run_setup +from setuptools.command import setopt +from setuptools.archive_util import unpack_archive +from setuptools.package_index import ( + PackageIndex, parse_requirement_arg, URL_SCHEME, +) +from setuptools.command import bdist_egg, egg_info +from setuptools.wheel import Wheel +from pkg_resources import ( + yield_lines, normalize_path, resource_string, ensure_directory, + get_distribution, find_distributions, Environment, Requirement, + Distribution, PathMetadata, EggMetadata, WorkingSet, DistributionNotFound, + VersionConflict, DEVELOP_DIST, +) +import pkg_resources + +# Turn on PEP440Warnings +warnings.filterwarnings("default", category=pkg_resources.PEP440Warning) + +__all__ = [ + 'samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg', + 'get_exe_prefixes', +] + + +def is_64bit(): + return struct.calcsize("P") == 8 + + +def samefile(p1, p2): + """ + Determine if two paths reference the same file. + + Augments os.path.samefile to work on Windows and + suppresses errors if the path doesn't exist. + """ + both_exist = os.path.exists(p1) and os.path.exists(p2) + use_samefile = hasattr(os.path, 'samefile') and both_exist + if use_samefile: + return os.path.samefile(p1, p2) + norm_p1 = os.path.normpath(os.path.normcase(p1)) + norm_p2 = os.path.normpath(os.path.normcase(p2)) + return norm_p1 == norm_p2 + + +def _to_bytes(s): + return s.encode('utf8') + + +def isascii(s): + try: + s.encode('ascii') + return True + except UnicodeError: + return False + + +def _one_liner(text): + return textwrap.dedent(text).strip().replace('\n', '; ') + + +class easy_install(Command): + """Manage a download/build/install process""" + description = "Find/get/install Python packages" + command_consumes_arguments = True + + user_options = [ + ('prefix=', None, "installation prefix"), + ("zip-ok", "z", "install package as a zipfile"), + ("multi-version", "m", "make apps have to require() a version"), + ("upgrade", "U", "force upgrade (searches PyPI for latest versions)"), + ("install-dir=", "d", "install package to DIR"), + ("script-dir=", "s", "install scripts to DIR"), + ("exclude-scripts", "x", "Don't install scripts"), + ("always-copy", "a", "Copy all needed packages to install dir"), + ("index-url=", "i", "base URL of Python Package Index"), + ("find-links=", "f", "additional URL(s) to search for packages"), + ("build-directory=", "b", + "download/extract/build in DIR; keep the results"), + ('optimize=', 'O', + "also compile with optimization: -O1 for \"python -O\", " + "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), + ('record=', None, + "filename in which to record list of installed files"), + ('always-unzip', 'Z', "don't install as a zipfile, no matter what"), + ('site-dirs=', 'S', "list of directories where .pth files work"), + ('editable', 'e', "Install specified packages in editable form"), + ('no-deps', 'N', "don't install dependencies"), + ('allow-hosts=', 'H', "pattern(s) that hostnames must match"), + ('local-snapshots-ok', 'l', + "allow building eggs from local checkouts"), + ('version', None, "print version information and exit"), + ('no-find-links', None, + "Don't load find-links defined in packages being installed"), + ('user', None, "install in user site-package '%s'" % site.USER_SITE) + ] + boolean_options = [ + 'zip-ok', 'multi-version', 'exclude-scripts', 'upgrade', 'always-copy', + 'editable', + 'no-deps', 'local-snapshots-ok', 'version', + 'user' + ] + + negative_opt = {'always-unzip': 'zip-ok'} + create_index = PackageIndex + + def initialize_options(self): + # the --user option seems to be an opt-in one, + # so the default should be False. + self.user = 0 + self.zip_ok = self.local_snapshots_ok = None + self.install_dir = self.script_dir = self.exclude_scripts = None + self.index_url = None + self.find_links = None + self.build_directory = None + self.args = None + self.optimize = self.record = None + self.upgrade = self.always_copy = self.multi_version = None + self.editable = self.no_deps = self.allow_hosts = None + self.root = self.prefix = self.no_report = None + self.version = None + self.install_purelib = None # for pure module distributions + self.install_platlib = None # non-pure (dists w/ extensions) + self.install_headers = None # for C/C++ headers + self.install_lib = None # set to either purelib or platlib + self.install_scripts = None + self.install_data = None + self.install_base = None + self.install_platbase = None + if site.ENABLE_USER_SITE: + self.install_userbase = site.USER_BASE + self.install_usersite = site.USER_SITE + else: + self.install_userbase = None + self.install_usersite = None + self.no_find_links = None + + # Options not specifiable via command line + self.package_index = None + self.pth_file = self.always_copy_from = None + self.site_dirs = None + self.installed_projects = {} + # Always read easy_install options, even if we are subclassed, or have + # an independent instance created. This ensures that defaults will + # always come from the standard configuration file(s)' "easy_install" + # section, even if this is a "develop" or "install" command, or some + # other embedding. + self._dry_run = None + self.verbose = self.distribution.verbose + self.distribution._set_command_options( + self, self.distribution.get_option_dict('easy_install') + ) + + def delete_blockers(self, blockers): + extant_blockers = ( + filename for filename in blockers + if os.path.exists(filename) or os.path.islink(filename) + ) + list(map(self._delete_path, extant_blockers)) + + def _delete_path(self, path): + log.info("Deleting %s", path) + if self.dry_run: + return + + is_tree = os.path.isdir(path) and not os.path.islink(path) + remover = rmtree if is_tree else os.unlink + remover(path) + + @staticmethod + def _render_version(): + """ + Render the Setuptools version and installation details, then exit. + """ + ver = '{}.{}'.format(*sys.version_info) + dist = get_distribution('setuptools') + tmpl = 'setuptools {dist.version} from {dist.location} (Python {ver})' + print(tmpl.format(**locals())) + raise SystemExit() + + def finalize_options(self): # noqa: C901 # is too complex (25) # FIXME + self.version and self._render_version() + + py_version = sys.version.split()[0] + prefix, exec_prefix = get_config_vars('prefix', 'exec_prefix') + + self.config_vars = { + 'dist_name': self.distribution.get_name(), + 'dist_version': self.distribution.get_version(), + 'dist_fullname': self.distribution.get_fullname(), + 'py_version': py_version, + 'py_version_short': py_version[0:3], + 'py_version_nodot': py_version[0] + py_version[2], + 'sys_prefix': prefix, + 'prefix': prefix, + 'sys_exec_prefix': exec_prefix, + 'exec_prefix': exec_prefix, + # Only python 3.2+ has abiflags + 'abiflags': getattr(sys, 'abiflags', ''), + } + + if site.ENABLE_USER_SITE: + self.config_vars['userbase'] = self.install_userbase + self.config_vars['usersite'] = self.install_usersite + + elif self.user: + log.warn("WARNING: The user site-packages directory is disabled.") + + self._fix_install_dir_for_user_site() + + self.expand_basedirs() + self.expand_dirs() + + self._expand( + 'install_dir', 'script_dir', 'build_directory', + 'site_dirs', + ) + # If a non-default installation directory was specified, default the + # script directory to match it. + if self.script_dir is None: + self.script_dir = self.install_dir + + if self.no_find_links is None: + self.no_find_links = False + + # Let install_dir get set by install_lib command, which in turn + # gets its info from the install command, and takes into account + # --prefix and --home and all that other crud. + self.set_undefined_options( + 'install_lib', ('install_dir', 'install_dir') + ) + # Likewise, set default script_dir from 'install_scripts.install_dir' + self.set_undefined_options( + 'install_scripts', ('install_dir', 'script_dir') + ) + + if self.user and self.install_purelib: + self.install_dir = self.install_purelib + self.script_dir = self.install_scripts + # default --record from the install command + self.set_undefined_options('install', ('record', 'record')) + # Should this be moved to the if statement below? It's not used + # elsewhere + normpath = map(normalize_path, sys.path) + self.all_site_dirs = get_site_dirs() + if self.site_dirs is not None: + site_dirs = [ + os.path.expanduser(s.strip()) for s in + self.site_dirs.split(',') + ] + for d in site_dirs: + if not os.path.isdir(d): + log.warn("%s (in --site-dirs) does not exist", d) + elif normalize_path(d) not in normpath: + raise DistutilsOptionError( + d + " (in --site-dirs) is not on sys.path" + ) + else: + self.all_site_dirs.append(normalize_path(d)) + if not self.editable: + self.check_site_dir() + self.index_url = self.index_url or "https://pypi.org/simple/" + self.shadow_path = self.all_site_dirs[:] + for path_item in self.install_dir, normalize_path(self.script_dir): + if path_item not in self.shadow_path: + self.shadow_path.insert(0, path_item) + + if self.allow_hosts is not None: + hosts = [s.strip() for s in self.allow_hosts.split(',')] + else: + hosts = ['*'] + if self.package_index is None: + self.package_index = self.create_index( + self.index_url, search_path=self.shadow_path, hosts=hosts, + ) + self.local_index = Environment(self.shadow_path + sys.path) + + if self.find_links is not None: + if isinstance(self.find_links, str): + self.find_links = self.find_links.split() + else: + self.find_links = [] + if self.local_snapshots_ok: + self.package_index.scan_egg_links(self.shadow_path + sys.path) + if not self.no_find_links: + self.package_index.add_find_links(self.find_links) + self.set_undefined_options('install_lib', ('optimize', 'optimize')) + if not isinstance(self.optimize, int): + try: + self.optimize = int(self.optimize) + if not (0 <= self.optimize <= 2): + raise ValueError + except ValueError as e: + raise DistutilsOptionError( + "--optimize must be 0, 1, or 2" + ) from e + + if self.editable and not self.build_directory: + raise DistutilsArgError( + "Must specify a build directory (-b) when using --editable" + ) + if not self.args: + raise DistutilsArgError( + "No urls, filenames, or requirements specified (see --help)") + + self.outputs = [] + + def _fix_install_dir_for_user_site(self): + """ + Fix the install_dir if "--user" was used. + """ + if not self.user or not site.ENABLE_USER_SITE: + return + + self.create_home_path() + if self.install_userbase is None: + msg = "User base directory is not specified" + raise DistutilsPlatformError(msg) + self.install_base = self.install_platbase = self.install_userbase + scheme_name = os.name.replace('posix', 'unix') + '_user' + self.select_scheme(scheme_name) + + def _expand_attrs(self, attrs): + for attr in attrs: + val = getattr(self, attr) + if val is not None: + if os.name == 'posix' or os.name == 'nt': + val = os.path.expanduser(val) + val = subst_vars(val, self.config_vars) + setattr(self, attr, val) + + def expand_basedirs(self): + """Calls `os.path.expanduser` on install_base, install_platbase and + root.""" + self._expand_attrs(['install_base', 'install_platbase', 'root']) + + def expand_dirs(self): + """Calls `os.path.expanduser` on install dirs.""" + dirs = [ + 'install_purelib', + 'install_platlib', + 'install_lib', + 'install_headers', + 'install_scripts', + 'install_data', + ] + self._expand_attrs(dirs) + + def run(self, show_deprecation=True): + if show_deprecation: + self.announce( + "WARNING: The easy_install command is deprecated " + "and will be removed in a future version.", + log.WARN, + ) + if self.verbose != self.distribution.verbose: + log.set_verbosity(self.verbose) + try: + for spec in self.args: + self.easy_install(spec, not self.no_deps) + if self.record: + outputs = self.outputs + if self.root: # strip any package prefix + root_len = len(self.root) + for counter in range(len(outputs)): + outputs[counter] = outputs[counter][root_len:] + from distutils import file_util + + self.execute( + file_util.write_file, (self.record, outputs), + "writing list of installed files to '%s'" % + self.record + ) + self.warn_deprecated_options() + finally: + log.set_verbosity(self.distribution.verbose) + + def pseudo_tempname(self): + """Return a pseudo-tempname base in the install directory. + This code is intentionally naive; if a malicious party can write to + the target directory you're already in deep doodoo. + """ + try: + pid = os.getpid() + except Exception: + pid = random.randint(0, sys.maxsize) + return os.path.join(self.install_dir, "test-easy-install-%s" % pid) + + def warn_deprecated_options(self): + pass + + def check_site_dir(self): # noqa: C901 # is too complex (12) # FIXME + """Verify that self.install_dir is .pth-capable dir, if needed""" + + instdir = normalize_path(self.install_dir) + pth_file = os.path.join(instdir, 'easy-install.pth') + + if not os.path.exists(instdir): + try: + os.makedirs(instdir) + except (OSError, IOError): + self.cant_write_to_target() + + # Is it a configured, PYTHONPATH, implicit, or explicit site dir? + is_site_dir = instdir in self.all_site_dirs + + if not is_site_dir and not self.multi_version: + # No? Then directly test whether it does .pth file processing + is_site_dir = self.check_pth_processing() + else: + # make sure we can write to target dir + testfile = self.pseudo_tempname() + '.write-test' + test_exists = os.path.exists(testfile) + try: + if test_exists: + os.unlink(testfile) + open(testfile, 'w').close() + os.unlink(testfile) + except (OSError, IOError): + self.cant_write_to_target() + + if not is_site_dir and not self.multi_version: + # Can't install non-multi to non-site dir with easy_install + pythonpath = os.environ.get('PYTHONPATH', '') + log.warn(self.__no_default_msg, self.install_dir, pythonpath) + + if is_site_dir: + if self.pth_file is None: + self.pth_file = PthDistributions(pth_file, self.all_site_dirs) + else: + self.pth_file = None + + if self.multi_version and not os.path.exists(pth_file): + self.pth_file = None # don't create a .pth file + self.install_dir = instdir + + __cant_write_msg = textwrap.dedent(""" + can't create or remove files in install directory + + The following error occurred while trying to add or remove files in the + installation directory: + + %s + + The installation directory you specified (via --install-dir, --prefix, or + the distutils default setting) was: + + %s + """).lstrip() # noqa + + __not_exists_id = textwrap.dedent(""" + This directory does not currently exist. Please create it and try again, or + choose a different installation directory (using the -d or --install-dir + option). + """).lstrip() # noqa + + __access_msg = textwrap.dedent(""" + Perhaps your account does not have write access to this directory? If the + installation directory is a system-owned directory, you may need to sign in + as the administrator or "root" account. If you do not have administrative + access to this machine, you may wish to choose a different installation + directory, preferably one that is listed in your PYTHONPATH environment + variable. + + For information on other options, you may wish to consult the + documentation at: + + https://setuptools.readthedocs.io/en/latest/easy_install.html + + Please make the appropriate changes for your system and try again. + """).lstrip() # noqa + + def cant_write_to_target(self): + msg = self.__cant_write_msg % (sys.exc_info()[1], self.install_dir,) + + if not os.path.exists(self.install_dir): + msg += '\n' + self.__not_exists_id + else: + msg += '\n' + self.__access_msg + raise DistutilsError(msg) + + def check_pth_processing(self): + """Empirically verify whether .pth files are supported in inst. dir""" + instdir = self.install_dir + log.info("Checking .pth file support in %s", instdir) + pth_file = self.pseudo_tempname() + ".pth" + ok_file = pth_file + '.ok' + ok_exists = os.path.exists(ok_file) + tmpl = _one_liner(""" + import os + f = open({ok_file!r}, 'w') + f.write('OK') + f.close() + """) + '\n' + try: + if ok_exists: + os.unlink(ok_file) + dirname = os.path.dirname(ok_file) + os.makedirs(dirname, exist_ok=True) + f = open(pth_file, 'w') + except (OSError, IOError): + self.cant_write_to_target() + else: + try: + f.write(tmpl.format(**locals())) + f.close() + f = None + executable = sys.executable + if os.name == 'nt': + dirname, basename = os.path.split(executable) + alt = os.path.join(dirname, 'pythonw.exe') + use_alt = ( + basename.lower() == 'python.exe' and + os.path.exists(alt) + ) + if use_alt: + # use pythonw.exe to avoid opening a console window + executable = alt + + from distutils.spawn import spawn + + spawn([executable, '-E', '-c', 'pass'], 0) + + if os.path.exists(ok_file): + log.info( + "TEST PASSED: %s appears to support .pth files", + instdir + ) + return True + finally: + if f: + f.close() + if os.path.exists(ok_file): + os.unlink(ok_file) + if os.path.exists(pth_file): + os.unlink(pth_file) + if not self.multi_version: + log.warn("TEST FAILED: %s does NOT support .pth files", instdir) + return False + + def install_egg_scripts(self, dist): + """Write all the scripts for `dist`, unless scripts are excluded""" + if not self.exclude_scripts and dist.metadata_isdir('scripts'): + for script_name in dist.metadata_listdir('scripts'): + if dist.metadata_isdir('scripts/' + script_name): + # The "script" is a directory, likely a Python 3 + # __pycache__ directory, so skip it. + continue + self.install_script( + dist, script_name, + dist.get_metadata('scripts/' + script_name) + ) + self.install_wrapper_scripts(dist) + + def add_output(self, path): + if os.path.isdir(path): + for base, dirs, files in os.walk(path): + for filename in files: + self.outputs.append(os.path.join(base, filename)) + else: + self.outputs.append(path) + + def not_editable(self, spec): + if self.editable: + raise DistutilsArgError( + "Invalid argument %r: you can't use filenames or URLs " + "with --editable (except via the --find-links option)." + % (spec,) + ) + + def check_editable(self, spec): + if not self.editable: + return + + if os.path.exists(os.path.join(self.build_directory, spec.key)): + raise DistutilsArgError( + "%r already exists in %s; can't do a checkout there" % + (spec.key, self.build_directory) + ) + + @contextlib.contextmanager + def _tmpdir(self): + tmpdir = tempfile.mkdtemp(prefix=u"easy_install-") + try: + # cast to str as workaround for #709 and #710 and #712 + yield str(tmpdir) + finally: + os.path.exists(tmpdir) and rmtree(tmpdir) + + def easy_install(self, spec, deps=False): + with self._tmpdir() as tmpdir: + if not isinstance(spec, Requirement): + if URL_SCHEME(spec): + # It's a url, download it to tmpdir and process + self.not_editable(spec) + dl = self.package_index.download(spec, tmpdir) + return self.install_item(None, dl, tmpdir, deps, True) + + elif os.path.exists(spec): + # Existing file or directory, just process it directly + self.not_editable(spec) + return self.install_item(None, spec, tmpdir, deps, True) + else: + spec = parse_requirement_arg(spec) + + self.check_editable(spec) + dist = self.package_index.fetch_distribution( + spec, tmpdir, self.upgrade, self.editable, + not self.always_copy, self.local_index + ) + if dist is None: + msg = "Could not find suitable distribution for %r" % spec + if self.always_copy: + msg += " (--always-copy skips system and development eggs)" + raise DistutilsError(msg) + elif dist.precedence == DEVELOP_DIST: + # .egg-info dists don't need installing, just process deps + self.process_distribution(spec, dist, deps, "Using") + return dist + else: + return self.install_item(spec, dist.location, tmpdir, deps) + + def install_item(self, spec, download, tmpdir, deps, install_needed=False): + + # Installation is also needed if file in tmpdir or is not an egg + install_needed = install_needed or self.always_copy + install_needed = install_needed or os.path.dirname(download) == tmpdir + install_needed = install_needed or not download.endswith('.egg') + install_needed = install_needed or ( + self.always_copy_from is not None and + os.path.dirname(normalize_path(download)) == + normalize_path(self.always_copy_from) + ) + + if spec and not install_needed: + # at this point, we know it's a local .egg, we just don't know if + # it's already installed. + for dist in self.local_index[spec.project_name]: + if dist.location == download: + break + else: + install_needed = True # it's not in the local index + + log.info("Processing %s", os.path.basename(download)) + + if install_needed: + dists = self.install_eggs(spec, download, tmpdir) + for dist in dists: + self.process_distribution(spec, dist, deps) + else: + dists = [self.egg_distribution(download)] + self.process_distribution(spec, dists[0], deps, "Using") + + if spec is not None: + for dist in dists: + if dist in spec: + return dist + + def select_scheme(self, name): + """Sets the install directories by applying the install schemes.""" + # it's the caller's problem if they supply a bad name! + scheme = INSTALL_SCHEMES[name] + for key in SCHEME_KEYS: + attrname = 'install_' + key + if getattr(self, attrname) is None: + setattr(self, attrname, scheme[key]) + + # FIXME: 'easy_install.process_distribution' is too complex (12) + def process_distribution( # noqa: C901 + self, requirement, dist, deps=True, *info, + ): + self.update_pth(dist) + self.package_index.add(dist) + if dist in self.local_index[dist.key]: + self.local_index.remove(dist) + self.local_index.add(dist) + self.install_egg_scripts(dist) + self.installed_projects[dist.key] = dist + log.info(self.installation_report(requirement, dist, *info)) + if (dist.has_metadata('dependency_links.txt') and + not self.no_find_links): + self.package_index.add_find_links( + dist.get_metadata_lines('dependency_links.txt') + ) + if not deps and not self.always_copy: + return + elif requirement is not None and dist.key != requirement.key: + log.warn("Skipping dependencies for %s", dist) + return # XXX this is not the distribution we were looking for + elif requirement is None or dist not in requirement: + # if we wound up with a different version, resolve what we've got + distreq = dist.as_requirement() + requirement = Requirement(str(distreq)) + log.info("Processing dependencies for %s", requirement) + try: + distros = WorkingSet([]).resolve( + [requirement], self.local_index, self.easy_install + ) + except DistributionNotFound as e: + raise DistutilsError(str(e)) from e + except VersionConflict as e: + raise DistutilsError(e.report()) from e + if self.always_copy or self.always_copy_from: + # Force all the relevant distros to be copied or activated + for dist in distros: + if dist.key not in self.installed_projects: + self.easy_install(dist.as_requirement()) + log.info("Finished processing dependencies for %s", requirement) + + def should_unzip(self, dist): + if self.zip_ok is not None: + return not self.zip_ok + if dist.has_metadata('not-zip-safe'): + return True + if not dist.has_metadata('zip-safe'): + return True + return False + + def maybe_move(self, spec, dist_filename, setup_base): + dst = os.path.join(self.build_directory, spec.key) + if os.path.exists(dst): + msg = ( + "%r already exists in %s; build directory %s will not be kept" + ) + log.warn(msg, spec.key, self.build_directory, setup_base) + return setup_base + if os.path.isdir(dist_filename): + setup_base = dist_filename + else: + if os.path.dirname(dist_filename) == setup_base: + os.unlink(dist_filename) # get it out of the tmp dir + contents = os.listdir(setup_base) + if len(contents) == 1: + dist_filename = os.path.join(setup_base, contents[0]) + if os.path.isdir(dist_filename): + # if the only thing there is a directory, move it instead + setup_base = dist_filename + ensure_directory(dst) + shutil.move(setup_base, dst) + return dst + + def install_wrapper_scripts(self, dist): + if self.exclude_scripts: + return + for args in ScriptWriter.best().get_args(dist): + self.write_script(*args) + + def install_script(self, dist, script_name, script_text, dev_path=None): + """Generate a legacy script wrapper and install it""" + spec = str(dist.as_requirement()) + is_script = is_python_script(script_text, script_name) + + if is_script: + body = self._load_template(dev_path) % locals() + script_text = ScriptWriter.get_header(script_text) + body + self.write_script(script_name, _to_bytes(script_text), 'b') + + @staticmethod + def _load_template(dev_path): + """ + There are a couple of template scripts in the package. This + function loads one of them and prepares it for use. + """ + # See https://github.com/pypa/setuptools/issues/134 for info + # on script file naming and downstream issues with SVR4 + name = 'script.tmpl' + if dev_path: + name = name.replace('.tmpl', ' (dev).tmpl') + + raw_bytes = resource_string('setuptools', name) + return raw_bytes.decode('utf-8') + + def write_script(self, script_name, contents, mode="t", blockers=()): + """Write an executable file to the scripts directory""" + self.delete_blockers( # clean up old .py/.pyw w/o a script + [os.path.join(self.script_dir, x) for x in blockers] + ) + log.info("Installing %s script to %s", script_name, self.script_dir) + target = os.path.join(self.script_dir, script_name) + self.add_output(target) + + if self.dry_run: + return + + mask = current_umask() + ensure_directory(target) + if os.path.exists(target): + os.unlink(target) + with open(target, "w" + mode) as f: + f.write(contents) + chmod(target, 0o777 - mask) + + def install_eggs(self, spec, dist_filename, tmpdir): + # .egg dirs or files are already built, so just return them + installer_map = { + '.egg': self.install_egg, + '.exe': self.install_exe, + '.whl': self.install_wheel, + } + try: + install_dist = installer_map[ + dist_filename.lower()[-4:] + ] + except KeyError: + pass + else: + return [install_dist(dist_filename, tmpdir)] + + # Anything else, try to extract and build + setup_base = tmpdir + if os.path.isfile(dist_filename) and not dist_filename.endswith('.py'): + unpack_archive(dist_filename, tmpdir, self.unpack_progress) + elif os.path.isdir(dist_filename): + setup_base = os.path.abspath(dist_filename) + + if (setup_base.startswith(tmpdir) # something we downloaded + and self.build_directory and spec is not None): + setup_base = self.maybe_move(spec, dist_filename, setup_base) + + # Find the setup.py file + setup_script = os.path.join(setup_base, 'setup.py') + + if not os.path.exists(setup_script): + setups = glob(os.path.join(setup_base, '*', 'setup.py')) + if not setups: + raise DistutilsError( + "Couldn't find a setup script in %s" % + os.path.abspath(dist_filename) + ) + if len(setups) > 1: + raise DistutilsError( + "Multiple setup scripts in %s" % + os.path.abspath(dist_filename) + ) + setup_script = setups[0] + + # Now run it, and return the result + if self.editable: + log.info(self.report_editable(spec, setup_script)) + return [] + else: + return self.build_and_install(setup_script, setup_base) + + def egg_distribution(self, egg_path): + if os.path.isdir(egg_path): + metadata = PathMetadata(egg_path, os.path.join(egg_path, + 'EGG-INFO')) + else: + metadata = EggMetadata(zipimport.zipimporter(egg_path)) + return Distribution.from_filename(egg_path, metadata=metadata) + + # FIXME: 'easy_install.install_egg' is too complex (11) + def install_egg(self, egg_path, tmpdir): # noqa: C901 + destination = os.path.join( + self.install_dir, + os.path.basename(egg_path), + ) + destination = os.path.abspath(destination) + if not self.dry_run: + ensure_directory(destination) + + dist = self.egg_distribution(egg_path) + if not samefile(egg_path, destination): + if os.path.isdir(destination) and not os.path.islink(destination): + dir_util.remove_tree(destination, dry_run=self.dry_run) + elif os.path.exists(destination): + self.execute( + os.unlink, + (destination,), + "Removing " + destination, + ) + try: + new_dist_is_zipped = False + if os.path.isdir(egg_path): + if egg_path.startswith(tmpdir): + f, m = shutil.move, "Moving" + else: + f, m = shutil.copytree, "Copying" + elif self.should_unzip(dist): + self.mkpath(destination) + f, m = self.unpack_and_compile, "Extracting" + else: + new_dist_is_zipped = True + if egg_path.startswith(tmpdir): + f, m = shutil.move, "Moving" + else: + f, m = shutil.copy2, "Copying" + self.execute( + f, + (egg_path, destination), + (m + " %s to %s") % ( + os.path.basename(egg_path), + os.path.dirname(destination) + ), + ) + update_dist_caches( + destination, + fix_zipimporter_caches=new_dist_is_zipped, + ) + except Exception: + update_dist_caches(destination, fix_zipimporter_caches=False) + raise + + self.add_output(destination) + return self.egg_distribution(destination) + + def install_exe(self, dist_filename, tmpdir): + # See if it's valid, get data + cfg = extract_wininst_cfg(dist_filename) + if cfg is None: + raise DistutilsError( + "%s is not a valid distutils Windows .exe" % dist_filename + ) + # Create a dummy distribution object until we build the real distro + dist = Distribution( + None, + project_name=cfg.get('metadata', 'name'), + version=cfg.get('metadata', 'version'), platform=get_platform(), + ) + + # Convert the .exe to an unpacked egg + egg_path = os.path.join(tmpdir, dist.egg_name() + '.egg') + dist.location = egg_path + egg_tmp = egg_path + '.tmp' + _egg_info = os.path.join(egg_tmp, 'EGG-INFO') + pkg_inf = os.path.join(_egg_info, 'PKG-INFO') + ensure_directory(pkg_inf) # make sure EGG-INFO dir exists + dist._provider = PathMetadata(egg_tmp, _egg_info) # XXX + self.exe_to_egg(dist_filename, egg_tmp) + + # Write EGG-INFO/PKG-INFO + if not os.path.exists(pkg_inf): + f = open(pkg_inf, 'w') + f.write('Metadata-Version: 1.0\n') + for k, v in cfg.items('metadata'): + if k != 'target_version': + f.write('%s: %s\n' % (k.replace('_', '-').title(), v)) + f.close() + script_dir = os.path.join(_egg_info, 'scripts') + # delete entry-point scripts to avoid duping + self.delete_blockers([ + os.path.join(script_dir, args[0]) + for args in ScriptWriter.get_args(dist) + ]) + # Build .egg file from tmpdir + bdist_egg.make_zipfile( + egg_path, egg_tmp, verbose=self.verbose, dry_run=self.dry_run, + ) + # install the .egg + return self.install_egg(egg_path, tmpdir) + + # FIXME: 'easy_install.exe_to_egg' is too complex (12) + def exe_to_egg(self, dist_filename, egg_tmp): # noqa: C901 + """Extract a bdist_wininst to the directories an egg would use""" + # Check for .pth file and set up prefix translations + prefixes = get_exe_prefixes(dist_filename) + to_compile = [] + native_libs = [] + top_level = {} + + def process(src, dst): + s = src.lower() + for old, new in prefixes: + if s.startswith(old): + src = new + src[len(old):] + parts = src.split('/') + dst = os.path.join(egg_tmp, *parts) + dl = dst.lower() + if dl.endswith('.pyd') or dl.endswith('.dll'): + parts[-1] = bdist_egg.strip_module(parts[-1]) + top_level[os.path.splitext(parts[0])[0]] = 1 + native_libs.append(src) + elif dl.endswith('.py') and old != 'SCRIPTS/': + top_level[os.path.splitext(parts[0])[0]] = 1 + to_compile.append(dst) + return dst + if not src.endswith('.pth'): + log.warn("WARNING: can't process %s", src) + return None + + # extract, tracking .pyd/.dll->native_libs and .py -> to_compile + unpack_archive(dist_filename, egg_tmp, process) + stubs = [] + for res in native_libs: + if res.lower().endswith('.pyd'): # create stubs for .pyd's + parts = res.split('/') + resource = parts[-1] + parts[-1] = bdist_egg.strip_module(parts[-1]) + '.py' + pyfile = os.path.join(egg_tmp, *parts) + to_compile.append(pyfile) + stubs.append(pyfile) + bdist_egg.write_stub(resource, pyfile) + self.byte_compile(to_compile) # compile .py's + bdist_egg.write_safety_flag( + os.path.join(egg_tmp, 'EGG-INFO'), + bdist_egg.analyze_egg(egg_tmp, stubs)) # write zip-safety flag + + for name in 'top_level', 'native_libs': + if locals()[name]: + txt = os.path.join(egg_tmp, 'EGG-INFO', name + '.txt') + if not os.path.exists(txt): + f = open(txt, 'w') + f.write('\n'.join(locals()[name]) + '\n') + f.close() + + def install_wheel(self, wheel_path, tmpdir): + wheel = Wheel(wheel_path) + assert wheel.is_compatible() + destination = os.path.join(self.install_dir, wheel.egg_name()) + destination = os.path.abspath(destination) + if not self.dry_run: + ensure_directory(destination) + if os.path.isdir(destination) and not os.path.islink(destination): + dir_util.remove_tree(destination, dry_run=self.dry_run) + elif os.path.exists(destination): + self.execute( + os.unlink, + (destination,), + "Removing " + destination, + ) + try: + self.execute( + wheel.install_as_egg, + (destination,), + ("Installing %s to %s") % ( + os.path.basename(wheel_path), + os.path.dirname(destination) + ), + ) + finally: + update_dist_caches(destination, fix_zipimporter_caches=False) + self.add_output(destination) + return self.egg_distribution(destination) + + __mv_warning = textwrap.dedent(""" + Because this distribution was installed --multi-version, before you can + import modules from this package in an application, you will need to + 'import pkg_resources' and then use a 'require()' call similar to one of + these examples, in order to select the desired version: + + pkg_resources.require("%(name)s") # latest installed version + pkg_resources.require("%(name)s==%(version)s") # this exact version + pkg_resources.require("%(name)s>=%(version)s") # this version or higher + """).lstrip() # noqa + + __id_warning = textwrap.dedent(""" + Note also that the installation directory must be on sys.path at runtime for + this to work. (e.g. by being the application's script directory, by being on + PYTHONPATH, or by being added to sys.path by your code.) + """) # noqa + + def installation_report(self, req, dist, what="Installed"): + """Helpful installation message for display to package users""" + msg = "\n%(what)s %(eggloc)s%(extras)s" + if self.multi_version and not self.no_report: + msg += '\n' + self.__mv_warning + if self.install_dir not in map(normalize_path, sys.path): + msg += '\n' + self.__id_warning + + eggloc = dist.location + name = dist.project_name + version = dist.version + extras = '' # TODO: self.report_extras(req, dist) + return msg % locals() + + __editable_msg = textwrap.dedent(""" + Extracted editable version of %(spec)s to %(dirname)s + + If it uses setuptools in its setup script, you can activate it in + "development" mode by going to that directory and running:: + + %(python)s setup.py develop + + See the setuptools documentation for the "develop" command for more info. + """).lstrip() # noqa + + def report_editable(self, spec, setup_script): + dirname = os.path.dirname(setup_script) + python = sys.executable + return '\n' + self.__editable_msg % locals() + + def run_setup(self, setup_script, setup_base, args): + sys.modules.setdefault('distutils.command.bdist_egg', bdist_egg) + sys.modules.setdefault('distutils.command.egg_info', egg_info) + + args = list(args) + if self.verbose > 2: + v = 'v' * (self.verbose - 1) + args.insert(0, '-' + v) + elif self.verbose < 2: + args.insert(0, '-q') + if self.dry_run: + args.insert(0, '-n') + log.info( + "Running %s %s", setup_script[len(setup_base) + 1:], ' '.join(args) + ) + try: + run_setup(setup_script, args) + except SystemExit as v: + raise DistutilsError( + "Setup script exited with %s" % (v.args[0],) + ) from v + + def build_and_install(self, setup_script, setup_base): + args = ['bdist_egg', '--dist-dir'] + + dist_dir = tempfile.mkdtemp( + prefix='egg-dist-tmp-', dir=os.path.dirname(setup_script) + ) + try: + self._set_fetcher_options(os.path.dirname(setup_script)) + args.append(dist_dir) + + self.run_setup(setup_script, setup_base, args) + all_eggs = Environment([dist_dir]) + eggs = [] + for key in all_eggs: + for dist in all_eggs[key]: + eggs.append(self.install_egg(dist.location, setup_base)) + if not eggs and not self.dry_run: + log.warn("No eggs found in %s (setup script problem?)", + dist_dir) + return eggs + finally: + rmtree(dist_dir) + log.set_verbosity(self.verbose) # restore our log verbosity + + def _set_fetcher_options(self, base): + """ + When easy_install is about to run bdist_egg on a source dist, that + source dist might have 'setup_requires' directives, requiring + additional fetching. Ensure the fetcher options given to easy_install + are available to that command as well. + """ + # find the fetch options from easy_install and write them out + # to the setup.cfg file. + ei_opts = self.distribution.get_option_dict('easy_install').copy() + fetch_directives = ( + 'find_links', 'site_dirs', 'index_url', 'optimize', 'allow_hosts', + ) + fetch_options = {} + for key, val in ei_opts.items(): + if key not in fetch_directives: + continue + fetch_options[key] = val[1] + # create a settings dictionary suitable for `edit_config` + settings = dict(easy_install=fetch_options) + cfg_filename = os.path.join(base, 'setup.cfg') + setopt.edit_config(cfg_filename, settings) + + def update_pth(self, dist): # noqa: C901 # is too complex (11) # FIXME + if self.pth_file is None: + return + + for d in self.pth_file[dist.key]: # drop old entries + if not self.multi_version and d.location == dist.location: + continue + + log.info("Removing %s from easy-install.pth file", d) + self.pth_file.remove(d) + if d.location in self.shadow_path: + self.shadow_path.remove(d.location) + + if not self.multi_version: + if dist.location in self.pth_file.paths: + log.info( + "%s is already the active version in easy-install.pth", + dist, + ) + else: + log.info("Adding %s to easy-install.pth file", dist) + self.pth_file.add(dist) # add new entry + if dist.location not in self.shadow_path: + self.shadow_path.append(dist.location) + + if self.dry_run: + return + + self.pth_file.save() + + if dist.key != 'setuptools': + return + + # Ensure that setuptools itself never becomes unavailable! + # XXX should this check for latest version? + filename = os.path.join(self.install_dir, 'setuptools.pth') + if os.path.islink(filename): + os.unlink(filename) + with open(filename, 'wt') as f: + f.write(self.pth_file.make_relative(dist.location) + '\n') + + def unpack_progress(self, src, dst): + # Progress filter for unpacking + log.debug("Unpacking %s to %s", src, dst) + return dst # only unpack-and-compile skips files for dry run + + def unpack_and_compile(self, egg_path, destination): + to_compile = [] + to_chmod = [] + + def pf(src, dst): + if dst.endswith('.py') and not src.startswith('EGG-INFO/'): + to_compile.append(dst) + elif dst.endswith('.dll') or dst.endswith('.so'): + to_chmod.append(dst) + self.unpack_progress(src, dst) + return not self.dry_run and dst or None + + unpack_archive(egg_path, destination, pf) + self.byte_compile(to_compile) + if not self.dry_run: + for f in to_chmod: + mode = ((os.stat(f)[stat.ST_MODE]) | 0o555) & 0o7755 + chmod(f, mode) + + def byte_compile(self, to_compile): + if sys.dont_write_bytecode: + return + + from distutils.util import byte_compile + + try: + # try to make the byte compile messages quieter + log.set_verbosity(self.verbose - 1) + + byte_compile(to_compile, optimize=0, force=1, dry_run=self.dry_run) + if self.optimize: + byte_compile( + to_compile, optimize=self.optimize, force=1, + dry_run=self.dry_run, + ) + finally: + log.set_verbosity(self.verbose) # restore original verbosity + + __no_default_msg = textwrap.dedent(""" + bad install directory or PYTHONPATH + + You are attempting to install a package to a directory that is not + on PYTHONPATH and which Python does not read ".pth" files from. The + installation directory you specified (via --install-dir, --prefix, or + the distutils default setting) was: + + %s + + and your PYTHONPATH environment variable currently contains: + + %r + + Here are some of your options for correcting the problem: + + * You can choose a different installation directory, i.e., one that is + on PYTHONPATH or supports .pth files + + * You can add the installation directory to the PYTHONPATH environment + variable. (It must then also be on PYTHONPATH whenever you run + Python and want to use the package(s) you are installing.) + + * You can set up the installation directory to support ".pth" files by + using one of the approaches described here: + + https://setuptools.readthedocs.io/en/latest/easy_install.html#custom-installation-locations + + + Please make the appropriate changes for your system and try again. + """).strip() + + def create_home_path(self): + """Create directories under ~.""" + if not self.user: + return + home = convert_path(os.path.expanduser("~")) + for name, path in self.config_vars.items(): + if path.startswith(home) and not os.path.isdir(path): + self.debug_print("os.makedirs('%s', 0o700)" % path) + os.makedirs(path, 0o700) + + INSTALL_SCHEMES = dict( + posix=dict( + install_dir='$base/lib/python$py_version_short/site-packages', + script_dir='$base/bin', + ), + ) + + DEFAULT_SCHEME = dict( + install_dir='$base/Lib/site-packages', + script_dir='$base/Scripts', + ) + + def _expand(self, *attrs): + config_vars = self.get_finalized_command('install').config_vars + + if self.prefix: + # Set default install_dir/scripts from --prefix + config_vars = config_vars.copy() + config_vars['base'] = self.prefix + scheme = self.INSTALL_SCHEMES.get(os.name, self.DEFAULT_SCHEME) + for attr, val in scheme.items(): + if getattr(self, attr, None) is None: + setattr(self, attr, val) + + from distutils.util import subst_vars + + for attr in attrs: + val = getattr(self, attr) + if val is not None: + val = subst_vars(val, config_vars) + if os.name == 'posix': + val = os.path.expanduser(val) + setattr(self, attr, val) + + +def _pythonpath(): + items = os.environ.get('PYTHONPATH', '').split(os.pathsep) + return filter(None, items) + + +def get_site_dirs(): + """ + Return a list of 'site' dirs + """ + + sitedirs = [] + + # start with PYTHONPATH + sitedirs.extend(_pythonpath()) + + prefixes = [sys.prefix] + if sys.exec_prefix != sys.prefix: + prefixes.append(sys.exec_prefix) + for prefix in prefixes: + if not prefix: + continue + + if sys.platform in ('os2emx', 'riscos'): + sitedirs.append(os.path.join(prefix, "Lib", "site-packages")) + elif os.sep == '/': + sitedirs.extend([ + os.path.join( + prefix, + "lib", + "python{}.{}".format(*sys.version_info), + "site-packages", + ), + os.path.join(prefix, "lib", "site-python"), + ]) + else: + sitedirs.extend([ + prefix, + os.path.join(prefix, "lib", "site-packages"), + ]) + if sys.platform != 'darwin': + continue + + # for framework builds *only* we add the standard Apple + # locations. Currently only per-user, but /Library and + # /Network/Library could be added too + if 'Python.framework' not in prefix: + continue + + home = os.environ.get('HOME') + if not home: + continue + + home_sp = os.path.join( + home, + 'Library', + 'Python', + '{}.{}'.format(*sys.version_info), + 'site-packages', + ) + sitedirs.append(home_sp) + lib_paths = get_path('purelib'), get_path('platlib') + + sitedirs.extend(s for s in lib_paths if s not in sitedirs) + + if site.ENABLE_USER_SITE: + sitedirs.append(site.USER_SITE) + + with contextlib.suppress(AttributeError): + sitedirs.extend(site.getsitepackages()) + + sitedirs = list(map(normalize_path, sitedirs)) + + return sitedirs + + +def expand_paths(inputs): # noqa: C901 # is too complex (11) # FIXME + """Yield sys.path directories that might contain "old-style" packages""" + + seen = {} + + for dirname in inputs: + dirname = normalize_path(dirname) + if dirname in seen: + continue + + seen[dirname] = 1 + if not os.path.isdir(dirname): + continue + + files = os.listdir(dirname) + yield dirname, files + + for name in files: + if not name.endswith('.pth'): + # We only care about the .pth files + continue + if name in ('easy-install.pth', 'setuptools.pth'): + # Ignore .pth files that we control + continue + + # Read the .pth file + f = open(os.path.join(dirname, name)) + lines = list(yield_lines(f)) + f.close() + + # Yield existing non-dupe, non-import directory lines from it + for line in lines: + if line.startswith("import"): + continue + + line = normalize_path(line.rstrip()) + if line in seen: + continue + + seen[line] = 1 + if not os.path.isdir(line): + continue + + yield line, os.listdir(line) + + +def extract_wininst_cfg(dist_filename): + """Extract configuration data from a bdist_wininst .exe + + Returns a configparser.RawConfigParser, or None + """ + f = open(dist_filename, 'rb') + try: + endrec = zipfile._EndRecData(f) + if endrec is None: + return None + + prepended = (endrec[9] - endrec[5]) - endrec[6] + if prepended < 12: # no wininst data here + return None + f.seek(prepended - 12) + + tag, cfglen, bmlen = struct.unpack("egg path translations for a given .exe file""" + + prefixes = [ + ('PURELIB/', ''), + ('PLATLIB/pywin32_system32', ''), + ('PLATLIB/', ''), + ('SCRIPTS/', 'EGG-INFO/scripts/'), + ('DATA/lib/site-packages', ''), + ] + z = zipfile.ZipFile(exe_filename) + try: + for info in z.infolist(): + name = info.filename + parts = name.split('/') + if len(parts) == 3 and parts[2] == 'PKG-INFO': + if parts[1].endswith('.egg-info'): + prefixes.insert(0, ('/'.join(parts[:2]), 'EGG-INFO/')) + break + if len(parts) != 2 or not name.endswith('.pth'): + continue + if name.endswith('-nspkg.pth'): + continue + if parts[0].upper() in ('PURELIB', 'PLATLIB'): + contents = z.read(name).decode() + for pth in yield_lines(contents): + pth = pth.strip().replace('\\', '/') + if not pth.startswith('import'): + prefixes.append((('%s/%s/' % (parts[0], pth)), '')) + finally: + z.close() + prefixes = [(x.lower(), y) for x, y in prefixes] + prefixes.sort() + prefixes.reverse() + return prefixes + + +class PthDistributions(Environment): + """A .pth file with Distribution paths in it""" + + dirty = False + + def __init__(self, filename, sitedirs=()): + self.filename = filename + self.sitedirs = list(map(normalize_path, sitedirs)) + self.basedir = normalize_path(os.path.dirname(self.filename)) + self._load() + Environment.__init__(self, [], None, None) + for path in yield_lines(self.paths): + list(map(self.add, find_distributions(path, True))) + + def _load(self): + self.paths = [] + saw_import = False + seen = dict.fromkeys(self.sitedirs) + if os.path.isfile(self.filename): + f = open(self.filename, 'rt') + for line in f: + if line.startswith('import'): + saw_import = True + continue + path = line.rstrip() + self.paths.append(path) + if not path.strip() or path.strip().startswith('#'): + continue + # skip non-existent paths, in case somebody deleted a package + # manually, and duplicate paths as well + path = self.paths[-1] = normalize_path( + os.path.join(self.basedir, path) + ) + if not os.path.exists(path) or path in seen: + self.paths.pop() # skip it + self.dirty = True # we cleaned up, so we're dirty now :) + continue + seen[path] = 1 + f.close() + + if self.paths and not saw_import: + self.dirty = True # ensure anything we touch has import wrappers + while self.paths and not self.paths[-1].strip(): + self.paths.pop() + + def save(self): + """Write changed .pth file back to disk""" + if not self.dirty: + return + + rel_paths = list(map(self.make_relative, self.paths)) + if rel_paths: + log.debug("Saving %s", self.filename) + lines = self._wrap_lines(rel_paths) + data = '\n'.join(lines) + '\n' + + if os.path.islink(self.filename): + os.unlink(self.filename) + with open(self.filename, 'wt') as f: + f.write(data) + + elif os.path.exists(self.filename): + log.debug("Deleting empty %s", self.filename) + os.unlink(self.filename) + + self.dirty = False + + @staticmethod + def _wrap_lines(lines): + return lines + + def add(self, dist): + """Add `dist` to the distribution map""" + new_path = ( + dist.location not in self.paths and ( + dist.location not in self.sitedirs or + # account for '.' being in PYTHONPATH + dist.location == os.getcwd() + ) + ) + if new_path: + self.paths.append(dist.location) + self.dirty = True + Environment.add(self, dist) + + def remove(self, dist): + """Remove `dist` from the distribution map""" + while dist.location in self.paths: + self.paths.remove(dist.location) + self.dirty = True + Environment.remove(self, dist) + + def make_relative(self, path): + npath, last = os.path.split(normalize_path(path)) + baselen = len(self.basedir) + parts = [last] + sep = os.altsep == '/' and '/' or os.sep + while len(npath) >= baselen: + if npath == self.basedir: + parts.append(os.curdir) + parts.reverse() + return sep.join(parts) + npath, last = os.path.split(npath) + parts.append(last) + else: + return path + + +class RewritePthDistributions(PthDistributions): + @classmethod + def _wrap_lines(cls, lines): + yield cls.prelude + for line in lines: + yield line + yield cls.postlude + + prelude = _one_liner(""" + import sys + sys.__plen = len(sys.path) + """) + postlude = _one_liner(""" + import sys + new = sys.path[sys.__plen:] + del sys.path[sys.__plen:] + p = getattr(sys, '__egginsert', 0) + sys.path[p:p] = new + sys.__egginsert = p + len(new) + """) + + +if os.environ.get('SETUPTOOLS_SYS_PATH_TECHNIQUE', 'raw') == 'rewrite': + PthDistributions = RewritePthDistributions + + +def _first_line_re(): + """ + Return a regular expression based on first_line_re suitable for matching + strings. + """ + if isinstance(first_line_re.pattern, str): + return first_line_re + + # first_line_re in Python >=3.1.4 and >=3.2.1 is a bytes pattern. + return re.compile(first_line_re.pattern.decode()) + + +def auto_chmod(func, arg, exc): + if func in [os.unlink, os.remove] and os.name == 'nt': + chmod(arg, stat.S_IWRITE) + return func(arg) + et, ev, _ = sys.exc_info() + # TODO: This code doesn't make sense. What is it trying to do? + raise (ev[0], ev[1] + (" %s %s" % (func, arg))) + + +def update_dist_caches(dist_path, fix_zipimporter_caches): + """ + Fix any globally cached `dist_path` related data + + `dist_path` should be a path of a newly installed egg distribution (zipped + or unzipped). + + sys.path_importer_cache contains finder objects that have been cached when + importing data from the original distribution. Any such finders need to be + cleared since the replacement distribution might be packaged differently, + e.g. a zipped egg distribution might get replaced with an unzipped egg + folder or vice versa. Having the old finders cached may then cause Python + to attempt loading modules from the replacement distribution using an + incorrect loader. + + zipimport.zipimporter objects are Python loaders charged with importing + data packaged inside zip archives. If stale loaders referencing the + original distribution, are left behind, they can fail to load modules from + the replacement distribution. E.g. if an old zipimport.zipimporter instance + is used to load data from a new zipped egg archive, it may cause the + operation to attempt to locate the requested data in the wrong location - + one indicated by the original distribution's zip archive directory + information. Such an operation may then fail outright, e.g. report having + read a 'bad local file header', or even worse, it may fail silently & + return invalid data. + + zipimport._zip_directory_cache contains cached zip archive directory + information for all existing zipimport.zipimporter instances and all such + instances connected to the same archive share the same cached directory + information. + + If asked, and the underlying Python implementation allows it, we can fix + all existing zipimport.zipimporter instances instead of having to track + them down and remove them one by one, by updating their shared cached zip + archive directory information. This, of course, assumes that the + replacement distribution is packaged as a zipped egg. + + If not asked to fix existing zipimport.zipimporter instances, we still do + our best to clear any remaining zipimport.zipimporter related cached data + that might somehow later get used when attempting to load data from the new + distribution and thus cause such load operations to fail. Note that when + tracking down such remaining stale data, we can not catch every conceivable + usage from here, and we clear only those that we know of and have found to + cause problems if left alive. Any remaining caches should be updated by + whomever is in charge of maintaining them, i.e. they should be ready to + handle us replacing their zip archives with new distributions at runtime. + + """ + # There are several other known sources of stale zipimport.zipimporter + # instances that we do not clear here, but might if ever given a reason to + # do so: + # * Global setuptools pkg_resources.working_set (a.k.a. 'master working + # set') may contain distributions which may in turn contain their + # zipimport.zipimporter loaders. + # * Several zipimport.zipimporter loaders held by local variables further + # up the function call stack when running the setuptools installation. + # * Already loaded modules may have their __loader__ attribute set to the + # exact loader instance used when importing them. Python 3.4 docs state + # that this information is intended mostly for introspection and so is + # not expected to cause us problems. + normalized_path = normalize_path(dist_path) + _uncache(normalized_path, sys.path_importer_cache) + if fix_zipimporter_caches: + _replace_zip_directory_cache_data(normalized_path) + else: + # Here, even though we do not want to fix existing and now stale + # zipimporter cache information, we still want to remove it. Related to + # Python's zip archive directory information cache, we clear each of + # its stale entries in two phases: + # 1. Clear the entry so attempting to access zip archive information + # via any existing stale zipimport.zipimporter instances fails. + # 2. Remove the entry from the cache so any newly constructed + # zipimport.zipimporter instances do not end up using old stale + # zip archive directory information. + # This whole stale data removal step does not seem strictly necessary, + # but has been left in because it was done before we started replacing + # the zip archive directory information cache content if possible, and + # there are no relevant unit tests that we can depend on to tell us if + # this is really needed. + _remove_and_clear_zip_directory_cache_data(normalized_path) + + +def _collect_zipimporter_cache_entries(normalized_path, cache): + """ + Return zipimporter cache entry keys related to a given normalized path. + + Alternative path spellings (e.g. those using different character case or + those using alternative path separators) related to the same path are + included. Any sub-path entries are included as well, i.e. those + corresponding to zip archives embedded in other zip archives. + + """ + result = [] + prefix_len = len(normalized_path) + for p in cache: + np = normalize_path(p) + if (np.startswith(normalized_path) and + np[prefix_len:prefix_len + 1] in (os.sep, '')): + result.append(p) + return result + + +def _update_zipimporter_cache(normalized_path, cache, updater=None): + """ + Update zipimporter cache data for a given normalized path. + + Any sub-path entries are processed as well, i.e. those corresponding to zip + archives embedded in other zip archives. + + Given updater is a callable taking a cache entry key and the original entry + (after already removing the entry from the cache), and expected to update + the entry and possibly return a new one to be inserted in its place. + Returning None indicates that the entry should not be replaced with a new + one. If no updater is given, the cache entries are simply removed without + any additional processing, the same as if the updater simply returned None. + + """ + for p in _collect_zipimporter_cache_entries(normalized_path, cache): + # N.B. pypy's custom zipimport._zip_directory_cache implementation does + # not support the complete dict interface: + # * Does not support item assignment, thus not allowing this function + # to be used only for removing existing cache entries. + # * Does not support the dict.pop() method, forcing us to use the + # get/del patterns instead. For more detailed information see the + # following links: + # https://github.com/pypa/setuptools/issues/202#issuecomment-202913420 + # http://bit.ly/2h9itJX + old_entry = cache[p] + del cache[p] + new_entry = updater and updater(p, old_entry) + if new_entry is not None: + cache[p] = new_entry + + +def _uncache(normalized_path, cache): + _update_zipimporter_cache(normalized_path, cache) + + +def _remove_and_clear_zip_directory_cache_data(normalized_path): + def clear_and_remove_cached_zip_archive_directory_data(path, old_entry): + old_entry.clear() + + _update_zipimporter_cache( + normalized_path, zipimport._zip_directory_cache, + updater=clear_and_remove_cached_zip_archive_directory_data) + + +# PyPy Python implementation does not allow directly writing to the +# zipimport._zip_directory_cache and so prevents us from attempting to correct +# its content. The best we can do there is clear the problematic cache content +# and have PyPy repopulate it as needed. The downside is that if there are any +# stale zipimport.zipimporter instances laying around, attempting to use them +# will fail due to not having its zip archive directory information available +# instead of being automatically corrected to use the new correct zip archive +# directory information. +if '__pypy__' in sys.builtin_module_names: + _replace_zip_directory_cache_data = \ + _remove_and_clear_zip_directory_cache_data +else: + + def _replace_zip_directory_cache_data(normalized_path): + def replace_cached_zip_archive_directory_data(path, old_entry): + # N.B. In theory, we could load the zip directory information just + # once for all updated path spellings, and then copy it locally and + # update its contained path strings to contain the correct + # spelling, but that seems like a way too invasive move (this cache + # structure is not officially documented anywhere and could in + # theory change with new Python releases) for no significant + # benefit. + old_entry.clear() + zipimport.zipimporter(path) + old_entry.update(zipimport._zip_directory_cache[path]) + return old_entry + + _update_zipimporter_cache( + normalized_path, zipimport._zip_directory_cache, + updater=replace_cached_zip_archive_directory_data) + + +def is_python(text, filename=''): + "Is this string a valid Python script?" + try: + compile(text, filename, 'exec') + except (SyntaxError, TypeError): + return False + else: + return True + + +def is_sh(executable): + """Determine if the specified executable is a .sh (contains a #! line)""" + try: + with io.open(executable, encoding='latin-1') as fp: + magic = fp.read(2) + except (OSError, IOError): + return executable + return magic == '#!' + + +def nt_quote_arg(arg): + """Quote a command line argument according to Windows parsing rules""" + return subprocess.list2cmdline([arg]) + + +def is_python_script(script_text, filename): + """Is this text, as a whole, a Python script? (as opposed to shell/bat/etc. + """ + if filename.endswith('.py') or filename.endswith('.pyw'): + return True # extension says it's Python + if is_python(script_text, filename): + return True # it's syntactically valid Python + if script_text.startswith('#!'): + # It begins with a '#!' line, so check if 'python' is in it somewhere + return 'python' in script_text.splitlines()[0].lower() + + return False # Not any Python I can recognize + + +try: + from os import chmod as _chmod +except ImportError: + # Jython compatibility + def _chmod(*args): + pass + + +def chmod(path, mode): + log.debug("changing mode of %s to %o", path, mode) + try: + _chmod(path, mode) + except os.error as e: + log.debug("chmod failed: %s", e) + + +class CommandSpec(list): + """ + A command spec for a #! header, specified as a list of arguments akin to + those passed to Popen. + """ + + options = [] + split_args = dict() + + @classmethod + def best(cls): + """ + Choose the best CommandSpec class based on environmental conditions. + """ + return cls + + @classmethod + def _sys_executable(cls): + _default = os.path.normpath(sys.executable) + return os.environ.get('__PYVENV_LAUNCHER__', _default) + + @classmethod + def from_param(cls, param): + """ + Construct a CommandSpec from a parameter to build_scripts, which may + be None. + """ + if isinstance(param, cls): + return param + if isinstance(param, list): + return cls(param) + if param is None: + return cls.from_environment() + # otherwise, assume it's a string. + return cls.from_string(param) + + @classmethod + def from_environment(cls): + return cls([cls._sys_executable()]) + + @classmethod + def from_string(cls, string): + """ + Construct a command spec from a simple string representing a command + line parseable by shlex.split. + """ + items = shlex.split(string, **cls.split_args) + return cls(items) + + def install_options(self, script_text): + self.options = shlex.split(self._extract_options(script_text)) + cmdline = subprocess.list2cmdline(self) + if not isascii(cmdline): + self.options[:0] = ['-x'] + + @staticmethod + def _extract_options(orig_script): + """ + Extract any options from the first line of the script. + """ + first = (orig_script + '\n').splitlines()[0] + match = _first_line_re().match(first) + options = match.group(1) or '' if match else '' + return options.strip() + + def as_header(self): + return self._render(self + list(self.options)) + + @staticmethod + def _strip_quotes(item): + _QUOTES = '"\'' + for q in _QUOTES: + if item.startswith(q) and item.endswith(q): + return item[1:-1] + return item + + @staticmethod + def _render(items): + cmdline = subprocess.list2cmdline( + CommandSpec._strip_quotes(item.strip()) for item in items) + return '#!' + cmdline + '\n' + + +# For pbr compat; will be removed in a future version. +sys_executable = CommandSpec._sys_executable() + + +class WindowsCommandSpec(CommandSpec): + split_args = dict(posix=False) + + +class ScriptWriter: + """ + Encapsulates behavior around writing entry point scripts for console and + gui apps. + """ + + template = textwrap.dedent(r""" + # EASY-INSTALL-ENTRY-SCRIPT: %(spec)r,%(group)r,%(name)r + import re + import sys + + # for compatibility with easy_install; see #2198 + __requires__ = %(spec)r + + try: + from importlib.metadata import distribution + except ImportError: + try: + from importlib_metadata import distribution + except ImportError: + from pkg_resources import load_entry_point + + + def importlib_load_entry_point(spec, group, name): + dist_name, _, _ = spec.partition('==') + matches = ( + entry_point + for entry_point in distribution(dist_name).entry_points + if entry_point.group == group and entry_point.name == name + ) + return next(matches).load() + + + globals().setdefault('load_entry_point', importlib_load_entry_point) + + + if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(load_entry_point(%(spec)r, %(group)r, %(name)r)()) + """).lstrip() + + command_spec_class = CommandSpec + + @classmethod + def get_script_args(cls, dist, executable=None, wininst=False): + # for backward compatibility + warnings.warn("Use get_args", EasyInstallDeprecationWarning) + writer = (WindowsScriptWriter if wininst else ScriptWriter).best() + header = cls.get_script_header("", executable, wininst) + return writer.get_args(dist, header) + + @classmethod + def get_script_header(cls, script_text, executable=None, wininst=False): + # for backward compatibility + warnings.warn( + "Use get_header", EasyInstallDeprecationWarning, stacklevel=2) + if wininst: + executable = "python.exe" + return cls.get_header(script_text, executable) + + @classmethod + def get_args(cls, dist, header=None): + """ + Yield write_script() argument tuples for a distribution's + console_scripts and gui_scripts entry points. + """ + if header is None: + header = cls.get_header() + spec = str(dist.as_requirement()) + for type_ in 'console', 'gui': + group = type_ + '_scripts' + for name, ep in dist.get_entry_map(group).items(): + cls._ensure_safe_name(name) + script_text = cls.template % locals() + args = cls._get_script_args(type_, name, header, script_text) + for res in args: + yield res + + @staticmethod + def _ensure_safe_name(name): + """ + Prevent paths in *_scripts entry point names. + """ + has_path_sep = re.search(r'[\\/]', name) + if has_path_sep: + raise ValueError("Path separators not allowed in script names") + + @classmethod + def get_writer(cls, force_windows): + # for backward compatibility + warnings.warn("Use best", EasyInstallDeprecationWarning) + return WindowsScriptWriter.best() if force_windows else cls.best() + + @classmethod + def best(cls): + """ + Select the best ScriptWriter for this environment. + """ + if sys.platform == 'win32' or (os.name == 'java' and os._name == 'nt'): + return WindowsScriptWriter.best() + else: + return cls + + @classmethod + def _get_script_args(cls, type_, name, header, script_text): + # Simply write the stub with no extension. + yield (name, header + script_text) + + @classmethod + def get_header(cls, script_text="", executable=None): + """Create a #! line, getting options (if any) from script_text""" + cmd = cls.command_spec_class.best().from_param(executable) + cmd.install_options(script_text) + return cmd.as_header() + + +class WindowsScriptWriter(ScriptWriter): + command_spec_class = WindowsCommandSpec + + @classmethod + def get_writer(cls): + # for backward compatibility + warnings.warn("Use best", EasyInstallDeprecationWarning) + return cls.best() + + @classmethod + def best(cls): + """ + Select the best ScriptWriter suitable for Windows + """ + writer_lookup = dict( + executable=WindowsExecutableLauncherWriter, + natural=cls, + ) + # for compatibility, use the executable launcher by default + launcher = os.environ.get('SETUPTOOLS_LAUNCHER', 'executable') + return writer_lookup[launcher] + + @classmethod + def _get_script_args(cls, type_, name, header, script_text): + "For Windows, add a .py extension" + ext = dict(console='.pya', gui='.pyw')[type_] + if ext not in os.environ['PATHEXT'].lower().split(';'): + msg = ( + "{ext} not listed in PATHEXT; scripts will not be " + "recognized as executables." + ).format(**locals()) + warnings.warn(msg, UserWarning) + old = ['.pya', '.py', '-script.py', '.pyc', '.pyo', '.pyw', '.exe'] + old.remove(ext) + header = cls._adjust_header(type_, header) + blockers = [name + x for x in old] + yield name + ext, header + script_text, 't', blockers + + @classmethod + def _adjust_header(cls, type_, orig_header): + """ + Make sure 'pythonw' is used for gui and 'python' is used for + console (regardless of what sys.executable is). + """ + pattern = 'pythonw.exe' + repl = 'python.exe' + if type_ == 'gui': + pattern, repl = repl, pattern + pattern_ob = re.compile(re.escape(pattern), re.IGNORECASE) + new_header = pattern_ob.sub(string=orig_header, repl=repl) + return new_header if cls._use_header(new_header) else orig_header + + @staticmethod + def _use_header(new_header): + """ + Should _adjust_header use the replaced header? + + On non-windows systems, always use. On + Windows systems, only use the replaced header if it resolves + to an executable on the system. + """ + clean_header = new_header[2:-1].strip('"') + return sys.platform != 'win32' or find_executable(clean_header) + + +class WindowsExecutableLauncherWriter(WindowsScriptWriter): + @classmethod + def _get_script_args(cls, type_, name, header, script_text): + """ + For Windows, add a .py extension and an .exe launcher + """ + if type_ == 'gui': + launcher_type = 'gui' + ext = '-script.pyw' + old = ['.pyw'] + else: + launcher_type = 'cli' + ext = '-script.py' + old = ['.py', '.pyc', '.pyo'] + hdr = cls._adjust_header(type_, header) + blockers = [name + x for x in old] + yield (name + ext, hdr + script_text, 't', blockers) + yield ( + name + '.exe', get_win_launcher(launcher_type), + 'b' # write in binary mode + ) + if not is_64bit(): + # install a manifest for the launcher to prevent Windows + # from detecting it as an installer (which it will for + # launchers like easy_install.exe). Consider only + # adding a manifest for launchers detected as installers. + # See Distribute #143 for details. + m_name = name + '.exe.manifest' + yield (m_name, load_launcher_manifest(name), 't') + + +# for backward-compatibility +get_script_args = ScriptWriter.get_script_args +get_script_header = ScriptWriter.get_script_header + + +def get_win_launcher(type): + """ + Load the Windows launcher (executable) suitable for launching a script. + + `type` should be either 'cli' or 'gui' + + Returns the executable as a byte string. + """ + launcher_fn = '%s.exe' % type + if is_64bit(): + launcher_fn = launcher_fn.replace(".", "-64.") + else: + launcher_fn = launcher_fn.replace(".", "-32.") + return resource_string('setuptools', launcher_fn) + + +def load_launcher_manifest(name): + manifest = pkg_resources.resource_string(__name__, 'launcher manifest.xml') + return manifest.decode('utf-8') % vars() + + +def rmtree(path, ignore_errors=False, onerror=auto_chmod): + return shutil.rmtree(path, ignore_errors, onerror) + + +def current_umask(): + tmp = os.umask(0o022) + os.umask(tmp) + return tmp + + +class EasyInstallDeprecationWarning(SetuptoolsDeprecationWarning): + """ + Warning for EasyInstall deprecations, bypassing suppression. + """ diff --git a/env/lib/python2.7/site-packages/setuptools/command/egg_info.py b/venv/Lib/site-packages/setuptools/command/egg_info.py similarity index 85% rename from env/lib/python2.7/site-packages/setuptools/command/egg_info.py rename to venv/Lib/site-packages/setuptools/command/egg_info.py index 5d8f451e..18b81340 100644 --- a/env/lib/python2.7/site-packages/setuptools/command/egg_info.py +++ b/venv/Lib/site-packages/setuptools/command/egg_info.py @@ -8,6 +8,7 @@ from distutils import log import distutils.errors import distutils.filelist +import functools import os import re import sys @@ -16,9 +17,6 @@ import time import collections -from setuptools.extern import six -from setuptools.extern.six.moves import map - from setuptools import Command from setuptools.command.sdist import sdist from setuptools.command.sdist import walk_revctrl @@ -33,7 +31,8 @@ from setuptools.extern import packaging from setuptools import SetuptoolsDeprecationWarning -def translate_pattern(glob): + +def translate_pattern(glob): # noqa: C901 # is too complex (14) # FIXME """ Translate a file path glob like '*.txt' in to a regular expression. This differs from fnmatch.translate which allows wildcards to match @@ -113,7 +112,7 @@ def translate_pattern(glob): pat += sep pat += r'\Z' - return re.compile(pat, flags=re.MULTILINE|re.DOTALL) + return re.compile(pat, flags=re.MULTILINE | re.DOTALL) class InfoCommon: @@ -125,12 +124,17 @@ def name(self): return safe_name(self.distribution.get_name()) def tagged_version(self): - version = self.distribution.get_version() - # egg_info may be called more than once for a distribution, - # in which case the version string already contains all tags. - if self.vtags and version.endswith(self.vtags): - return safe_version(version) - return safe_version(version + self.vtags) + return safe_version(self._maybe_tag(self.distribution.get_version())) + + def _maybe_tag(self, version): + """ + egg_info may be called more than once for a distribution, + in which case the version string already contains all tags. + """ + return ( + version if self.vtags and version.endswith(self.vtags) + else version + self.vtags + ) def tags(self): version = '' @@ -207,11 +211,11 @@ def finalize_options(self): list( parse_requirements(spec % (self.egg_name, self.egg_version)) ) - except ValueError: + except ValueError as e: raise distutils.errors.DistutilsOptionError( "Invalid distribution name or version syntax: %s-%s" % (self.egg_name, self.egg_version) - ) + ) from e if self.egg_base is None: dirs = self.distribution.package_dir @@ -266,8 +270,7 @@ def write_file(self, what, filename, data): to the file. """ log.info("writing %s to %s", what, filename) - if six.PY3: - data = data.encode("utf-8") + data = data.encode("utf-8") if not self.dry_run: f = open(filename, 'wb') f.write(data) @@ -330,70 +333,74 @@ def process_template_line(self, line): # patterns, (dir and patterns), or (dir_pattern). (action, patterns, dir, dir_pattern) = self._parse_template_line(line) + action_map = { + 'include': self.include, + 'exclude': self.exclude, + 'global-include': self.global_include, + 'global-exclude': self.global_exclude, + 'recursive-include': functools.partial( + self.recursive_include, dir, + ), + 'recursive-exclude': functools.partial( + self.recursive_exclude, dir, + ), + 'graft': self.graft, + 'prune': self.prune, + } + log_map = { + 'include': "warning: no files found matching '%s'", + 'exclude': ( + "warning: no previously-included files found " + "matching '%s'" + ), + 'global-include': ( + "warning: no files found matching '%s' " + "anywhere in distribution" + ), + 'global-exclude': ( + "warning: no previously-included files matching " + "'%s' found anywhere in distribution" + ), + 'recursive-include': ( + "warning: no files found matching '%s' " + "under directory '%s'" + ), + 'recursive-exclude': ( + "warning: no previously-included files matching " + "'%s' found under directory '%s'" + ), + 'graft': "warning: no directories found matching '%s'", + 'prune': "no previously-included directories found matching '%s'", + } + + try: + process_action = action_map[action] + except KeyError: + raise DistutilsInternalError( + "this cannot happen: invalid action '{action!s}'". + format(action=action), + ) + # OK, now we know that the action is valid and we have the # right number of words on the line for that action -- so we # can proceed with minimal error-checking. - if action == 'include': - self.debug_print("include " + ' '.join(patterns)) - for pattern in patterns: - if not self.include(pattern): - log.warn("warning: no files found matching '%s'", pattern) - - elif action == 'exclude': - self.debug_print("exclude " + ' '.join(patterns)) - for pattern in patterns: - if not self.exclude(pattern): - log.warn(("warning: no previously-included files " - "found matching '%s'"), pattern) - - elif action == 'global-include': - self.debug_print("global-include " + ' '.join(patterns)) - for pattern in patterns: - if not self.global_include(pattern): - log.warn(("warning: no files found matching '%s' " - "anywhere in distribution"), pattern) - - elif action == 'global-exclude': - self.debug_print("global-exclude " + ' '.join(patterns)) - for pattern in patterns: - if not self.global_exclude(pattern): - log.warn(("warning: no previously-included files matching " - "'%s' found anywhere in distribution"), - pattern) - - elif action == 'recursive-include': - self.debug_print("recursive-include %s %s" % - (dir, ' '.join(patterns))) - for pattern in patterns: - if not self.recursive_include(dir, pattern): - log.warn(("warning: no files found matching '%s' " - "under directory '%s'"), - pattern, dir) - - elif action == 'recursive-exclude': - self.debug_print("recursive-exclude %s %s" % - (dir, ' '.join(patterns))) - for pattern in patterns: - if not self.recursive_exclude(dir, pattern): - log.warn(("warning: no previously-included files matching " - "'%s' found under directory '%s'"), - pattern, dir) - - elif action == 'graft': - self.debug_print("graft " + dir_pattern) - if not self.graft(dir_pattern): - log.warn("warning: no directories found matching '%s'", - dir_pattern) - - elif action == 'prune': - self.debug_print("prune " + dir_pattern) - if not self.prune(dir_pattern): - log.warn(("no previously-included directories found " - "matching '%s'"), dir_pattern) - - else: - raise DistutilsInternalError( - "this cannot happen: invalid action '%s'" % action) + + action_is_recursive = action.startswith('recursive-') + if action in {'graft', 'prune'}: + patterns = [dir_pattern] + extra_log_args = (dir, ) if action_is_recursive else () + log_tmpl = log_map[action] + + self.debug_print( + ' '.join( + [action] + + ([dir] if action_is_recursive else []) + + patterns, + ) + ) + for pattern in patterns: + if not process_action(pattern): + log.warn(log_tmpl, pattern, *extra_log_args) def _remove_files(self, predicate): """ @@ -534,6 +541,7 @@ def run(self): self.add_defaults() if os.path.exists(self.template): self.read_template() + self.add_license_files() self.prune_file_list() self.filelist.sort() self.filelist.remove_duplicates() @@ -568,7 +576,6 @@ def _should_suppress_warning(msg): def add_defaults(self): sdist.add_defaults(self) - self.check_license() self.filelist.append(self.template) self.filelist.append(self.manifest) rcfiles = list(walk_revctrl()) @@ -585,6 +592,13 @@ def add_defaults(self): ei_cmd = self.get_finalized_command('egg_info') self.filelist.graft(ei_cmd.egg_info) + def add_license_files(self): + license_files = self.distribution.metadata.license_files or [] + for lf in license_files: + log.info("adding license file '%s'", lf) + pass + self.filelist.extend(license_files) + def prune_file_list(self): build = self.get_finalized_command('build') base_dir = self.distribution.get_fullname() @@ -637,14 +651,16 @@ def warn_depends_obsolete(cmd, basename, filename): def _write_requirements(stream, reqs): lines = yield_lines(reqs or ()) - append_cr = lambda line: line + '\n' + + def append_cr(line): + return line + '\n' lines = map(append_cr, lines) stream.writelines(lines) def write_requirements(cmd, basename, filename): dist = cmd.distribution - data = six.StringIO() + data = io.StringIO() _write_requirements(data, dist.install_requires) extras_require = dist.extras_require or {} for extra in sorted(extras_require): @@ -684,12 +700,12 @@ def write_arg(cmd, basename, filename, force=False): def write_entries(cmd, basename, filename): ep = cmd.distribution.entry_points - if isinstance(ep, six.string_types) or ep is None: + if isinstance(ep, str) or ep is None: data = ep elif ep is not None: data = [] for section, contents in sorted(ep.items()): - if not isinstance(contents, six.string_types): + if not isinstance(contents, str): contents = EntryPoint.parse_group(section, contents) contents = '\n'.join(sorted(map(str, contents.values()))) data.append('[%s]\n%s\n\n' % (section, contents)) @@ -703,7 +719,8 @@ def get_pkg_info_revision(): Get a -r### off of PKG-INFO Version in case this is an sdist of a subversion revision. """ - warnings.warn("get_pkg_info_revision is deprecated.", EggInfoDeprecationWarning) + warnings.warn( + "get_pkg_info_revision is deprecated.", EggInfoDeprecationWarning) if os.path.exists('PKG-INFO'): with io.open('PKG-INFO') as f: for line in f: @@ -714,4 +731,4 @@ def get_pkg_info_revision(): class EggInfoDeprecationWarning(SetuptoolsDeprecationWarning): - """Class for warning about deprecations in eggInfo in setupTools. Not ignored by default, unlike DeprecationWarning.""" + """Deprecated behavior warning for EggInfo, bypassing suppression.""" diff --git a/venv/Lib/site-packages/setuptools/command/install.py b/venv/Lib/site-packages/setuptools/command/install.py new file mode 100644 index 00000000..72b9a3e4 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/command/install.py @@ -0,0 +1,125 @@ +from distutils.errors import DistutilsArgError +import inspect +import glob +import warnings +import platform +import distutils.command.install as orig + +import setuptools + +# Prior to numpy 1.9, NumPy relies on the '_install' name, so provide it for +# now. See https://github.com/pypa/setuptools/issues/199/ +_install = orig.install + + +class install(orig.install): + """Use easy_install to install the package, w/dependencies""" + + user_options = orig.install.user_options + [ + ('old-and-unmanageable', None, "Try not to use this!"), + ('single-version-externally-managed', None, + "used by system package builders to create 'flat' eggs"), + ] + boolean_options = orig.install.boolean_options + [ + 'old-and-unmanageable', 'single-version-externally-managed', + ] + new_commands = [ + ('install_egg_info', lambda self: True), + ('install_scripts', lambda self: True), + ] + _nc = dict(new_commands) + + def initialize_options(self): + orig.install.initialize_options(self) + self.old_and_unmanageable = None + self.single_version_externally_managed = None + + def finalize_options(self): + orig.install.finalize_options(self) + if self.root: + self.single_version_externally_managed = True + elif self.single_version_externally_managed: + if not self.root and not self.record: + raise DistutilsArgError( + "You must specify --record or --root when building system" + " packages" + ) + + def handle_extra_path(self): + if self.root or self.single_version_externally_managed: + # explicit backward-compatibility mode, allow extra_path to work + return orig.install.handle_extra_path(self) + + # Ignore extra_path when installing an egg (or being run by another + # command without --root or --single-version-externally-managed + self.path_file = None + self.extra_dirs = '' + + def run(self): + # Explicit request for old-style install? Just do it + if self.old_and_unmanageable or self.single_version_externally_managed: + return orig.install.run(self) + + if not self._called_from_setup(inspect.currentframe()): + # Run in backward-compatibility mode to support bdist_* commands. + orig.install.run(self) + else: + self.do_egg_install() + + @staticmethod + def _called_from_setup(run_frame): + """ + Attempt to detect whether run() was called from setup() or by another + command. If called by setup(), the parent caller will be the + 'run_command' method in 'distutils.dist', and *its* caller will be + the 'run_commands' method. If called any other way, the + immediate caller *might* be 'run_command', but it won't have been + called by 'run_commands'. Return True in that case or if a call stack + is unavailable. Return False otherwise. + """ + if run_frame is None: + msg = "Call stack not available. bdist_* commands may fail." + warnings.warn(msg) + if platform.python_implementation() == 'IronPython': + msg = "For best results, pass -X:Frames to enable call stack." + warnings.warn(msg) + return True + res = inspect.getouterframes(run_frame)[2] + caller, = res[:1] + info = inspect.getframeinfo(caller) + caller_module = caller.f_globals.get('__name__', '') + return ( + caller_module == 'distutils.dist' + and info.function == 'run_commands' + ) + + def do_egg_install(self): + + easy_install = self.distribution.get_command_class('easy_install') + + cmd = easy_install( + self.distribution, args="x", root=self.root, record=self.record, + ) + cmd.ensure_finalized() # finalize before bdist_egg munges install cmd + cmd.always_copy_from = '.' # make sure local-dir eggs get installed + + # pick up setup-dir .egg files only: no .egg-info + cmd.package_index.scan(glob.glob('*.egg')) + + self.run_command('bdist_egg') + args = [self.distribution.get_command_obj('bdist_egg').egg_output] + + if setuptools.bootstrap_install_from: + # Bootstrap self-installation of setuptools + args.insert(0, setuptools.bootstrap_install_from) + + cmd.args = args + cmd.run(show_deprecation=False) + setuptools.bootstrap_install_from = None + + +# XXX Python 3.1 doesn't see _nc if this is inside the class +install.sub_commands = ( + [cmd for cmd in orig.install.sub_commands if cmd[0] not in install._nc] + + install.new_commands +) diff --git a/env/lib/python2.7/site-packages/setuptools/command/install_egg_info.py b/venv/Lib/site-packages/setuptools/command/install_egg_info.py similarity index 100% rename from env/lib/python2.7/site-packages/setuptools/command/install_egg_info.py rename to venv/Lib/site-packages/setuptools/command/install_egg_info.py diff --git a/venv/Lib/site-packages/setuptools/command/install_lib.py b/venv/Lib/site-packages/setuptools/command/install_lib.py new file mode 100644 index 00000000..2e9d8757 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/command/install_lib.py @@ -0,0 +1,122 @@ +import os +import sys +from itertools import product, starmap +import distutils.command.install_lib as orig + + +class install_lib(orig.install_lib): + """Don't add compiled flags to filenames of non-Python files""" + + def run(self): + self.build() + outfiles = self.install() + if outfiles is not None: + # always compile, in case we have any extension stubs to deal with + self.byte_compile(outfiles) + + def get_exclusions(self): + """ + Return a collections.Sized collections.Container of paths to be + excluded for single_version_externally_managed installations. + """ + all_packages = ( + pkg + for ns_pkg in self._get_SVEM_NSPs() + for pkg in self._all_packages(ns_pkg) + ) + + excl_specs = product(all_packages, self._gen_exclusion_paths()) + return set(starmap(self._exclude_pkg_path, excl_specs)) + + def _exclude_pkg_path(self, pkg, exclusion_path): + """ + Given a package name and exclusion path within that package, + compute the full exclusion path. + """ + parts = pkg.split('.') + [exclusion_path] + return os.path.join(self.install_dir, *parts) + + @staticmethod + def _all_packages(pkg_name): + """ + >>> list(install_lib._all_packages('foo.bar.baz')) + ['foo.bar.baz', 'foo.bar', 'foo'] + """ + while pkg_name: + yield pkg_name + pkg_name, sep, child = pkg_name.rpartition('.') + + def _get_SVEM_NSPs(self): + """ + Get namespace packages (list) but only for + single_version_externally_managed installations and empty otherwise. + """ + # TODO: is it necessary to short-circuit here? i.e. what's the cost + # if get_finalized_command is called even when namespace_packages is + # False? + if not self.distribution.namespace_packages: + return [] + + install_cmd = self.get_finalized_command('install') + svem = install_cmd.single_version_externally_managed + + return self.distribution.namespace_packages if svem else [] + + @staticmethod + def _gen_exclusion_paths(): + """ + Generate file paths to be excluded for namespace packages (bytecode + cache files). + """ + # always exclude the package module itself + yield '__init__.py' + + yield '__init__.pyc' + yield '__init__.pyo' + + if not hasattr(sys, 'implementation'): + return + + base = os.path.join( + '__pycache__', '__init__.' + sys.implementation.cache_tag) + yield base + '.pyc' + yield base + '.pyo' + yield base + '.opt-1.pyc' + yield base + '.opt-2.pyc' + + def copy_tree( + self, infile, outfile, + preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1 + ): + assert preserve_mode and preserve_times and not preserve_symlinks + exclude = self.get_exclusions() + + if not exclude: + return orig.install_lib.copy_tree(self, infile, outfile) + + # Exclude namespace package __init__.py* files from the output + + from setuptools.archive_util import unpack_directory + from distutils import log + + outfiles = [] + + def pf(src, dst): + if dst in exclude: + log.warn("Skipping installation of %s (namespace package)", + dst) + return False + + log.info("copying %s -> %s", src, os.path.dirname(dst)) + outfiles.append(dst) + return dst + + unpack_directory(infile, outfile, pf) + return outfiles + + def get_outputs(self): + outputs = orig.install_lib.get_outputs(self) + exclude = self.get_exclusions() + if exclude: + return [f for f in outputs if f not in exclude] + return outputs diff --git a/venv/Lib/site-packages/setuptools/command/install_scripts.py b/venv/Lib/site-packages/setuptools/command/install_scripts.py new file mode 100644 index 00000000..9cd8eb06 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/command/install_scripts.py @@ -0,0 +1,69 @@ +from distutils import log +import distutils.command.install_scripts as orig +from distutils.errors import DistutilsModuleError +import os +import sys + +from pkg_resources import Distribution, PathMetadata, ensure_directory + + +class install_scripts(orig.install_scripts): + """Do normal script install, plus any egg_info wrapper scripts""" + + def initialize_options(self): + orig.install_scripts.initialize_options(self) + self.no_ep = False + + def run(self): + import setuptools.command.easy_install as ei + + self.run_command("egg_info") + if self.distribution.scripts: + orig.install_scripts.run(self) # run first to set up self.outfiles + else: + self.outfiles = [] + if self.no_ep: + # don't install entry point scripts into .egg file! + return + + ei_cmd = self.get_finalized_command("egg_info") + dist = Distribution( + ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info), + ei_cmd.egg_name, ei_cmd.egg_version, + ) + bs_cmd = self.get_finalized_command('build_scripts') + exec_param = getattr(bs_cmd, 'executable', None) + try: + bw_cmd = self.get_finalized_command("bdist_wininst") + is_wininst = getattr(bw_cmd, '_is_running', False) + except (ImportError, DistutilsModuleError): + is_wininst = False + writer = ei.ScriptWriter + if is_wininst: + exec_param = "python.exe" + writer = ei.WindowsScriptWriter + if exec_param == sys.executable: + # In case the path to the Python executable contains a space, wrap + # it so it's not split up. + exec_param = [exec_param] + # resolve the writer to the environment + writer = writer.best() + cmd = writer.command_spec_class.best().from_param(exec_param) + for args in writer.get_args(dist, cmd.as_header()): + self.write_script(*args) + + def write_script(self, script_name, contents, mode="t", *ignored): + """Write an executable file to the scripts directory""" + from setuptools.command.easy_install import chmod, current_umask + + log.info("Installing %s script to %s", script_name, self.install_dir) + target = os.path.join(self.install_dir, script_name) + self.outfiles.append(target) + + mask = current_umask() + if not self.dry_run: + ensure_directory(target) + f = open(target, "w" + mode) + f.write(contents) + f.close() + chmod(target, 0o777 - mask) diff --git a/env/lib/python2.7/site-packages/setuptools/command/launcher manifest.xml b/venv/Lib/site-packages/setuptools/command/launcher manifest.xml similarity index 100% rename from env/lib/python2.7/site-packages/setuptools/command/launcher manifest.xml rename to venv/Lib/site-packages/setuptools/command/launcher manifest.xml diff --git a/env/lib/python2.7/site-packages/setuptools/command/py36compat.py b/venv/Lib/site-packages/setuptools/command/py36compat.py similarity index 98% rename from env/lib/python2.7/site-packages/setuptools/command/py36compat.py rename to venv/Lib/site-packages/setuptools/command/py36compat.py index 61063e75..343547a4 100644 --- a/env/lib/python2.7/site-packages/setuptools/command/py36compat.py +++ b/venv/Lib/site-packages/setuptools/command/py36compat.py @@ -3,8 +3,6 @@ from distutils.util import convert_path from distutils.command import sdist -from setuptools.extern.six.moves import filter - class sdist_add_defaults: """ @@ -132,5 +130,5 @@ def _add_defaults_scripts(self): if hasattr(sdist.sdist, '_add_defaults_standards'): # disable the functionality already available upstream - class sdist_add_defaults: + class sdist_add_defaults: # noqa pass diff --git a/venv/Lib/site-packages/setuptools/command/register.py b/venv/Lib/site-packages/setuptools/command/register.py new file mode 100644 index 00000000..b8266b9a --- /dev/null +++ b/venv/Lib/site-packages/setuptools/command/register.py @@ -0,0 +1,18 @@ +from distutils import log +import distutils.command.register as orig + +from setuptools.errors import RemovedCommandError + + +class register(orig.register): + """Formerly used to register packages on PyPI.""" + + def run(self): + msg = ( + "The register command has been removed, use twine to upload " + + "instead (https://pypi.org/p/twine)" + ) + + self.announce("ERROR: " + msg, log.ERROR) + + raise RemovedCommandError(msg) diff --git a/env/lib/python2.7/site-packages/setuptools/command/rotate.py b/venv/Lib/site-packages/setuptools/command/rotate.py similarity index 94% rename from env/lib/python2.7/site-packages/setuptools/command/rotate.py rename to venv/Lib/site-packages/setuptools/command/rotate.py index b89353f5..74795ba9 100644 --- a/env/lib/python2.7/site-packages/setuptools/command/rotate.py +++ b/venv/Lib/site-packages/setuptools/command/rotate.py @@ -4,8 +4,6 @@ import os import shutil -from setuptools.extern import six - from setuptools import Command @@ -36,9 +34,9 @@ def finalize_options(self): raise DistutilsOptionError("Must specify number of files to keep") try: self.keep = int(self.keep) - except ValueError: - raise DistutilsOptionError("--keep must be an integer") - if isinstance(self.match, six.string_types): + except ValueError as e: + raise DistutilsOptionError("--keep must be an integer") from e + if isinstance(self.match, str): self.match = [ convert_path(p.strip()) for p in self.match.split(',') ] diff --git a/env/lib/python2.7/site-packages/setuptools/command/saveopts.py b/venv/Lib/site-packages/setuptools/command/saveopts.py similarity index 100% rename from env/lib/python2.7/site-packages/setuptools/command/saveopts.py rename to venv/Lib/site-packages/setuptools/command/saveopts.py diff --git a/venv/Lib/site-packages/setuptools/command/sdist.py b/venv/Lib/site-packages/setuptools/command/sdist.py new file mode 100644 index 00000000..4a014283 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/command/sdist.py @@ -0,0 +1,189 @@ +from distutils import log +import distutils.command.sdist as orig +import os +import sys +import io +import contextlib + +from .py36compat import sdist_add_defaults + +import pkg_resources + +_default_revctrl = list + + +def walk_revctrl(dirname=''): + """Find all files under revision control""" + for ep in pkg_resources.iter_entry_points('setuptools.file_finders'): + for item in ep.load()(dirname): + yield item + + +class sdist(sdist_add_defaults, orig.sdist): + """Smart sdist that finds anything supported by revision control""" + + user_options = [ + ('formats=', None, + "formats for source distribution (comma-separated list)"), + ('keep-temp', 'k', + "keep the distribution tree around after creating " + + "archive file(s)"), + ('dist-dir=', 'd', + "directory to put the source distribution archive(s) in " + "[default: dist]"), + ] + + negative_opt = {} + + README_EXTENSIONS = ['', '.rst', '.txt', '.md'] + READMES = tuple('README{0}'.format(ext) for ext in README_EXTENSIONS) + + def run(self): + self.run_command('egg_info') + ei_cmd = self.get_finalized_command('egg_info') + self.filelist = ei_cmd.filelist + self.filelist.append(os.path.join(ei_cmd.egg_info, 'SOURCES.txt')) + self.check_readme() + + # Run sub commands + for cmd_name in self.get_sub_commands(): + self.run_command(cmd_name) + + self.make_distribution() + + dist_files = getattr(self.distribution, 'dist_files', []) + for file in self.archive_files: + data = ('sdist', '', file) + if data not in dist_files: + dist_files.append(data) + + def initialize_options(self): + orig.sdist.initialize_options(self) + + self._default_to_gztar() + + def _default_to_gztar(self): + # only needed on Python prior to 3.6. + if sys.version_info >= (3, 6, 0, 'beta', 1): + return + self.formats = ['gztar'] + + def make_distribution(self): + """ + Workaround for #516 + """ + with self._remove_os_link(): + orig.sdist.make_distribution(self) + + @staticmethod + @contextlib.contextmanager + def _remove_os_link(): + """ + In a context, remove and restore os.link if it exists + """ + + class NoValue: + pass + + orig_val = getattr(os, 'link', NoValue) + try: + del os.link + except Exception: + pass + try: + yield + finally: + if orig_val is not NoValue: + setattr(os, 'link', orig_val) + + def _add_defaults_optional(self): + super()._add_defaults_optional() + if os.path.isfile('pyproject.toml'): + self.filelist.append('pyproject.toml') + + def _add_defaults_python(self): + """getting python files""" + if self.distribution.has_pure_modules(): + build_py = self.get_finalized_command('build_py') + self.filelist.extend(build_py.get_source_files()) + self._add_data_files(self._safe_data_files(build_py)) + + def _safe_data_files(self, build_py): + """ + Extracting data_files from build_py is known to cause + infinite recursion errors when `include_package_data` + is enabled, so suppress it in that case. + """ + if self.distribution.include_package_data: + return () + return build_py.data_files + + def _add_data_files(self, data_files): + """ + Add data files as found in build_py.data_files. + """ + self.filelist.extend( + os.path.join(src_dir, name) + for _, src_dir, _, filenames in data_files + for name in filenames + ) + + def _add_defaults_data_files(self): + try: + super()._add_defaults_data_files() + except TypeError: + log.warn("data_files contains unexpected objects") + + def check_readme(self): + for f in self.READMES: + if os.path.exists(f): + return + else: + self.warn( + "standard file not found: should have one of " + + ', '.join(self.READMES) + ) + + def make_release_tree(self, base_dir, files): + orig.sdist.make_release_tree(self, base_dir, files) + + # Save any egg_info command line options used to create this sdist + dest = os.path.join(base_dir, 'setup.cfg') + if hasattr(os, 'link') and os.path.exists(dest): + # unlink and re-copy, since it might be hard-linked, and + # we don't want to change the source version + os.unlink(dest) + self.copy_file('setup.cfg', dest) + + self.get_finalized_command('egg_info').save_version_info(dest) + + def _manifest_is_not_generated(self): + # check for special comment used in 2.7.1 and higher + if not os.path.isfile(self.manifest): + return False + + with io.open(self.manifest, 'rb') as fp: + first_line = fp.readline() + return (first_line != + '# file GENERATED by distutils, do NOT edit\n'.encode()) + + def read_manifest(self): + """Read the manifest file (named by 'self.manifest') and use it to + fill in 'self.filelist', the list of files to include in the source + distribution. + """ + log.info("reading manifest file '%s'", self.manifest) + manifest = open(self.manifest, 'rb') + for line in manifest: + # The manifest must contain UTF-8. See #303. + try: + line = line.decode('UTF-8') + except UnicodeDecodeError: + log.warn("%r not UTF-8 decodable -- skipping" % line) + continue + # ignore comments and blank lines + line = line.strip() + if line.startswith('#') or not line: + continue + self.filelist.append(line) + manifest.close() diff --git a/env/lib/python2.7/site-packages/setuptools/command/setopt.py b/venv/Lib/site-packages/setuptools/command/setopt.py similarity index 98% rename from env/lib/python2.7/site-packages/setuptools/command/setopt.py rename to venv/Lib/site-packages/setuptools/command/setopt.py index 7e57cc02..e18057c8 100644 --- a/env/lib/python2.7/site-packages/setuptools/command/setopt.py +++ b/venv/Lib/site-packages/setuptools/command/setopt.py @@ -3,8 +3,7 @@ from distutils.errors import DistutilsOptionError import distutils import os - -from setuptools.extern.six.moves import configparser +import configparser from setuptools import Command diff --git a/venv/Lib/site-packages/setuptools/command/test.py b/venv/Lib/site-packages/setuptools/command/test.py new file mode 100644 index 00000000..de4f3d11 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/command/test.py @@ -0,0 +1,274 @@ +import os +import operator +import sys +import contextlib +import itertools +import unittest +from distutils.errors import DistutilsError, DistutilsOptionError +from distutils import log +from unittest import TestLoader + +from pkg_resources import (resource_listdir, resource_exists, normalize_path, + working_set, _namespace_packages, evaluate_marker, + add_activation_listener, require, EntryPoint) +from setuptools import Command +from setuptools.extern.more_itertools import unique_everseen + + +class ScanningLoader(TestLoader): + + def __init__(self): + TestLoader.__init__(self) + self._visited = set() + + def loadTestsFromModule(self, module, pattern=None): + """Return a suite of all tests cases contained in the given module + + If the module is a package, load tests from all the modules in it. + If the module has an ``additional_tests`` function, call it and add + the return value to the tests. + """ + if module in self._visited: + return None + self._visited.add(module) + + tests = [] + tests.append(TestLoader.loadTestsFromModule(self, module)) + + if hasattr(module, "additional_tests"): + tests.append(module.additional_tests()) + + if hasattr(module, '__path__'): + for file in resource_listdir(module.__name__, ''): + if file.endswith('.py') and file != '__init__.py': + submodule = module.__name__ + '.' + file[:-3] + else: + if resource_exists(module.__name__, file + '/__init__.py'): + submodule = module.__name__ + '.' + file + else: + continue + tests.append(self.loadTestsFromName(submodule)) + + if len(tests) != 1: + return self.suiteClass(tests) + else: + return tests[0] # don't create a nested suite for only one return + + +# adapted from jaraco.classes.properties:NonDataProperty +class NonDataProperty: + def __init__(self, fget): + self.fget = fget + + def __get__(self, obj, objtype=None): + if obj is None: + return self + return self.fget(obj) + + +class test(Command): + """Command to run unit tests after in-place build""" + + description = "run unit tests after in-place build (deprecated)" + + user_options = [ + ('test-module=', 'm', "Run 'test_suite' in specified module"), + ('test-suite=', 's', + "Run single test, case or suite (e.g. 'module.test_suite')"), + ('test-runner=', 'r', "Test runner to use"), + ] + + def initialize_options(self): + self.test_suite = None + self.test_module = None + self.test_loader = None + self.test_runner = None + + def finalize_options(self): + + if self.test_suite and self.test_module: + msg = "You may specify a module or a suite, but not both" + raise DistutilsOptionError(msg) + + if self.test_suite is None: + if self.test_module is None: + self.test_suite = self.distribution.test_suite + else: + self.test_suite = self.test_module + ".test_suite" + + if self.test_loader is None: + self.test_loader = getattr(self.distribution, 'test_loader', None) + if self.test_loader is None: + self.test_loader = "setuptools.command.test:ScanningLoader" + if self.test_runner is None: + self.test_runner = getattr(self.distribution, 'test_runner', None) + + @NonDataProperty + def test_args(self): + return list(self._test_args()) + + def _test_args(self): + if not self.test_suite and sys.version_info >= (2, 7): + yield 'discover' + if self.verbose: + yield '--verbose' + if self.test_suite: + yield self.test_suite + + def with_project_on_sys_path(self, func): + """ + Backward compatibility for project_on_sys_path context. + """ + with self.project_on_sys_path(): + func() + + @contextlib.contextmanager + def project_on_sys_path(self, include_dists=[]): + with_2to3 = getattr(self.distribution, 'use_2to3', False) + + if with_2to3: + # If we run 2to3 we can not do this inplace: + + # Ensure metadata is up-to-date + self.reinitialize_command('build_py', inplace=0) + self.run_command('build_py') + bpy_cmd = self.get_finalized_command("build_py") + build_path = normalize_path(bpy_cmd.build_lib) + + # Build extensions + self.reinitialize_command('egg_info', egg_base=build_path) + self.run_command('egg_info') + + self.reinitialize_command('build_ext', inplace=0) + self.run_command('build_ext') + else: + # Without 2to3 inplace works fine: + self.run_command('egg_info') + + # Build extensions in-place + self.reinitialize_command('build_ext', inplace=1) + self.run_command('build_ext') + + ei_cmd = self.get_finalized_command("egg_info") + + old_path = sys.path[:] + old_modules = sys.modules.copy() + + try: + project_path = normalize_path(ei_cmd.egg_base) + sys.path.insert(0, project_path) + working_set.__init__() + add_activation_listener(lambda dist: dist.activate()) + require('%s==%s' % (ei_cmd.egg_name, ei_cmd.egg_version)) + with self.paths_on_pythonpath([project_path]): + yield + finally: + sys.path[:] = old_path + sys.modules.clear() + sys.modules.update(old_modules) + working_set.__init__() + + @staticmethod + @contextlib.contextmanager + def paths_on_pythonpath(paths): + """ + Add the indicated paths to the head of the PYTHONPATH environment + variable so that subprocesses will also see the packages at + these paths. + + Do this in a context that restores the value on exit. + """ + nothing = object() + orig_pythonpath = os.environ.get('PYTHONPATH', nothing) + current_pythonpath = os.environ.get('PYTHONPATH', '') + try: + prefix = os.pathsep.join(unique_everseen(paths)) + to_join = filter(None, [prefix, current_pythonpath]) + new_path = os.pathsep.join(to_join) + if new_path: + os.environ['PYTHONPATH'] = new_path + yield + finally: + if orig_pythonpath is nothing: + os.environ.pop('PYTHONPATH', None) + else: + os.environ['PYTHONPATH'] = orig_pythonpath + + @staticmethod + def install_dists(dist): + """ + Install the requirements indicated by self.distribution and + return an iterable of the dists that were built. + """ + ir_d = dist.fetch_build_eggs(dist.install_requires) + tr_d = dist.fetch_build_eggs(dist.tests_require or []) + er_d = dist.fetch_build_eggs( + v for k, v in dist.extras_require.items() + if k.startswith(':') and evaluate_marker(k[1:]) + ) + return itertools.chain(ir_d, tr_d, er_d) + + def run(self): + self.announce( + "WARNING: Testing via this command is deprecated and will be " + "removed in a future version. Users looking for a generic test " + "entry point independent of test runner are encouraged to use " + "tox.", + log.WARN, + ) + + installed_dists = self.install_dists(self.distribution) + + cmd = ' '.join(self._argv) + if self.dry_run: + self.announce('skipping "%s" (dry run)' % cmd) + return + + self.announce('running "%s"' % cmd) + + paths = map(operator.attrgetter('location'), installed_dists) + with self.paths_on_pythonpath(paths): + with self.project_on_sys_path(): + self.run_tests() + + def run_tests(self): + # Purge modules under test from sys.modules. The test loader will + # re-import them from the build location. Required when 2to3 is used + # with namespace packages. + if getattr(self.distribution, 'use_2to3', False): + module = self.test_suite.split('.')[0] + if module in _namespace_packages: + del_modules = [] + if module in sys.modules: + del_modules.append(module) + module += '.' + for name in sys.modules: + if name.startswith(module): + del_modules.append(name) + list(map(sys.modules.__delitem__, del_modules)) + + test = unittest.main( + None, None, self._argv, + testLoader=self._resolve_as_ep(self.test_loader), + testRunner=self._resolve_as_ep(self.test_runner), + exit=False, + ) + if not test.result.wasSuccessful(): + msg = 'Test failed: %s' % test.result + self.announce(msg, log.ERROR) + raise DistutilsError(msg) + + @property + def _argv(self): + return ['unittest'] + self.test_args + + @staticmethod + def _resolve_as_ep(val): + """ + Load the indicated attribute value, called, as a as if it were + specified as an entry point. + """ + if val is None: + return + parsed = EntryPoint.parse("x=" + val) + return parsed.resolve()() diff --git a/venv/Lib/site-packages/setuptools/command/upload.py b/venv/Lib/site-packages/setuptools/command/upload.py new file mode 100644 index 00000000..ec7f81e2 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/command/upload.py @@ -0,0 +1,17 @@ +from distutils import log +from distutils.command import upload as orig + +from setuptools.errors import RemovedCommandError + + +class upload(orig.upload): + """Formerly used to upload packages to PyPI.""" + + def run(self): + msg = ( + "The upload command has been removed, use twine to upload " + + "instead (https://pypi.org/p/twine)" + ) + + self.announce("ERROR: " + msg, log.ERROR) + raise RemovedCommandError(msg) diff --git a/env/lib/python2.7/site-packages/setuptools/command/upload_docs.py b/venv/Lib/site-packages/setuptools/command/upload_docs.py similarity index 89% rename from env/lib/python2.7/site-packages/setuptools/command/upload_docs.py rename to venv/Lib/site-packages/setuptools/command/upload_docs.py index 07aa564a..845bff44 100644 --- a/env/lib/python2.7/site-packages/setuptools/command/upload_docs.py +++ b/venv/Lib/site-packages/setuptools/command/upload_docs.py @@ -2,7 +2,7 @@ """upload_docs Implements a Distutils 'upload_docs' subcommand (upload documentation to -PyPI's pythonhosted.org). +sites other than PyPi such as devpi). """ from base64 import standard_b64encode @@ -15,17 +15,15 @@ import shutil import itertools import functools - -from setuptools.extern import six -from setuptools.extern.six.moves import http_client, urllib +import http.client +import urllib.parse from pkg_resources import iter_entry_points from .upload import upload def _encode(s): - errors = 'surrogateescape' if six.PY3 else 'strict' - return s.encode('utf-8', errors) + return s.encode('utf-8', 'surrogateescape') class upload_docs(upload): @@ -33,7 +31,7 @@ class upload_docs(upload): # supported by Warehouse (and won't be). DEFAULT_REPOSITORY = 'https://pypi.python.org/pypi/' - description = 'Upload documentation to PyPI' + description = 'Upload documentation to sites other than PyPi such as devpi' user_options = [ ('repository=', 'r', @@ -61,7 +59,7 @@ def finalize_options(self): if self.upload_dir is None: if self.has_sphinx(): build_sphinx = self.get_finalized_command('build_sphinx') - self.target_dir = build_sphinx.builder_target_dir + self.target_dir = dict(build_sphinx.builder_target_dirs)['html'] else: build = self.get_finalized_command('build') self.target_dir = os.path.join(build.build_base, 'docs') @@ -69,7 +67,7 @@ def finalize_options(self): self.ensure_dirname('upload_dir') self.target_dir = self.upload_dir if 'pypi.python.org' in self.repository: - log.warn("Upload_docs command is deprecated. Use RTD instead.") + log.warn("Upload_docs command is deprecated for PyPi. Use RTD instead.") self.announce('Using upload directory %s' % self.target_dir) def create_zipfile(self, filename): @@ -127,8 +125,8 @@ def _build_multipart(cls, data): """ Build up the MIME payload for the POST data """ - boundary = b'--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' - sep_boundary = b'\n--' + boundary + boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' + sep_boundary = b'\n--' + boundary.encode('ascii') end_boundary = sep_boundary + b'--' end_items = end_boundary, b"\n", builder = functools.partial( @@ -138,7 +136,7 @@ def _build_multipart(cls, data): part_groups = map(builder, data.items()) parts = itertools.chain.from_iterable(part_groups) body_items = itertools.chain(parts, end_items) - content_type = 'multipart/form-data; boundary=%s' % boundary.decode('ascii') + content_type = 'multipart/form-data; boundary=%s' % boundary return b''.join(body_items), content_type def upload_file(self, filename): @@ -152,9 +150,7 @@ def upload_file(self, filename): } # set up the authentication credentials = _encode(self.username + ':' + self.password) - credentials = standard_b64encode(credentials) - if six.PY3: - credentials = credentials.decode('ascii') + credentials = standard_b64encode(credentials).decode('ascii') auth = "Basic " + credentials body, ct = self._build_multipart(data) @@ -169,9 +165,9 @@ def upload_file(self, filename): urllib.parse.urlparse(self.repository) assert not params and not query and not fragments if schema == 'http': - conn = http_client.HTTPConnection(netloc) + conn = http.client.HTTPConnection(netloc) elif schema == 'https': - conn = http_client.HTTPSConnection(netloc) + conn = http.client.HTTPSConnection(netloc) else: raise AssertionError("unsupported schema " + schema) diff --git a/venv/Lib/site-packages/setuptools/config.py b/venv/Lib/site-packages/setuptools/config.py new file mode 100644 index 00000000..44de7cf5 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/config.py @@ -0,0 +1,715 @@ +import ast +import io +import os +import sys + +import warnings +import functools +import importlib +from collections import defaultdict +from functools import partial +from functools import wraps +import contextlib + +from distutils.errors import DistutilsOptionError, DistutilsFileError +from setuptools.extern.packaging.version import LegacyVersion, parse +from setuptools.extern.packaging.specifiers import SpecifierSet + + +class StaticModule: + """ + Attempt to load the module by the name + """ + def __init__(self, name): + spec = importlib.util.find_spec(name) + with open(spec.origin) as strm: + src = strm.read() + module = ast.parse(src) + vars(self).update(locals()) + del self.self + + def __getattr__(self, attr): + try: + return next( + ast.literal_eval(statement.value) + for statement in self.module.body + if isinstance(statement, ast.Assign) + for target in statement.targets + if isinstance(target, ast.Name) and target.id == attr + ) + except Exception as e: + raise AttributeError( + "{self.name} has no attribute {attr}".format(**locals()) + ) from e + + +@contextlib.contextmanager +def patch_path(path): + """ + Add path to front of sys.path for the duration of the context. + """ + try: + sys.path.insert(0, path) + yield + finally: + sys.path.remove(path) + + +def read_configuration( + filepath, find_others=False, ignore_option_errors=False): + """Read given configuration file and returns options from it as a dict. + + :param str|unicode filepath: Path to configuration file + to get options from. + + :param bool find_others: Whether to search for other configuration files + which could be on in various places. + + :param bool ignore_option_errors: Whether to silently ignore + options, values of which could not be resolved (e.g. due to exceptions + in directives such as file:, attr:, etc.). + If False exceptions are propagated as expected. + + :rtype: dict + """ + from setuptools.dist import Distribution, _Distribution + + filepath = os.path.abspath(filepath) + + if not os.path.isfile(filepath): + raise DistutilsFileError( + 'Configuration file %s does not exist.' % filepath) + + current_directory = os.getcwd() + os.chdir(os.path.dirname(filepath)) + + try: + dist = Distribution() + + filenames = dist.find_config_files() if find_others else [] + if filepath not in filenames: + filenames.append(filepath) + + _Distribution.parse_config_files(dist, filenames=filenames) + + handlers = parse_configuration( + dist, dist.command_options, + ignore_option_errors=ignore_option_errors) + + finally: + os.chdir(current_directory) + + return configuration_to_dict(handlers) + + +def _get_option(target_obj, key): + """ + Given a target object and option key, get that option from + the target object, either through a get_{key} method or + from an attribute directly. + """ + getter_name = 'get_{key}'.format(**locals()) + by_attribute = functools.partial(getattr, target_obj, key) + getter = getattr(target_obj, getter_name, by_attribute) + return getter() + + +def configuration_to_dict(handlers): + """Returns configuration data gathered by given handlers as a dict. + + :param list[ConfigHandler] handlers: Handlers list, + usually from parse_configuration() + + :rtype: dict + """ + config_dict = defaultdict(dict) + + for handler in handlers: + for option in handler.set_options: + value = _get_option(handler.target_obj, option) + config_dict[handler.section_prefix][option] = value + + return config_dict + + +def parse_configuration( + distribution, command_options, ignore_option_errors=False): + """Performs additional parsing of configuration options + for a distribution. + + Returns a list of used option handlers. + + :param Distribution distribution: + :param dict command_options: + :param bool ignore_option_errors: Whether to silently ignore + options, values of which could not be resolved (e.g. due to exceptions + in directives such as file:, attr:, etc.). + If False exceptions are propagated as expected. + :rtype: list + """ + options = ConfigOptionsHandler( + distribution, command_options, ignore_option_errors) + options.parse() + + meta = ConfigMetadataHandler( + distribution.metadata, command_options, ignore_option_errors, + distribution.package_dir) + meta.parse() + + return meta, options + + +class ConfigHandler: + """Handles metadata supplied in configuration files.""" + + section_prefix = None + """Prefix for config sections handled by this handler. + Must be provided by class heirs. + + """ + + aliases = {} + """Options aliases. + For compatibility with various packages. E.g.: d2to1 and pbr. + Note: `-` in keys is replaced with `_` by config parser. + + """ + + def __init__(self, target_obj, options, ignore_option_errors=False): + sections = {} + + section_prefix = self.section_prefix + for section_name, section_options in options.items(): + if not section_name.startswith(section_prefix): + continue + + section_name = section_name.replace(section_prefix, '').strip('.') + sections[section_name] = section_options + + self.ignore_option_errors = ignore_option_errors + self.target_obj = target_obj + self.sections = sections + self.set_options = [] + + @property + def parsers(self): + """Metadata item name to parser function mapping.""" + raise NotImplementedError( + '%s must provide .parsers property' % self.__class__.__name__) + + def __setitem__(self, option_name, value): + unknown = tuple() + target_obj = self.target_obj + + # Translate alias into real name. + option_name = self.aliases.get(option_name, option_name) + + current_value = getattr(target_obj, option_name, unknown) + + if current_value is unknown: + raise KeyError(option_name) + + if current_value: + # Already inhabited. Skipping. + return + + skip_option = False + parser = self.parsers.get(option_name) + if parser: + try: + value = parser(value) + + except Exception: + skip_option = True + if not self.ignore_option_errors: + raise + + if skip_option: + return + + setter = getattr(target_obj, 'set_%s' % option_name, None) + if setter is None: + setattr(target_obj, option_name, value) + else: + setter(value) + + self.set_options.append(option_name) + + @classmethod + def _parse_list(cls, value, separator=','): + """Represents value as a list. + + Value is split either by separator (defaults to comma) or by lines. + + :param value: + :param separator: List items separator character. + :rtype: list + """ + if isinstance(value, list): # _get_parser_compound case + return value + + if '\n' in value: + value = value.splitlines() + else: + value = value.split(separator) + + return [chunk.strip() for chunk in value if chunk.strip()] + + @classmethod + def _parse_dict(cls, value): + """Represents value as a dict. + + :param value: + :rtype: dict + """ + separator = '=' + result = {} + for line in cls._parse_list(value): + key, sep, val = line.partition(separator) + if sep != separator: + raise DistutilsOptionError( + 'Unable to parse option value to dict: %s' % value) + result[key.strip()] = val.strip() + + return result + + @classmethod + def _parse_bool(cls, value): + """Represents value as boolean. + + :param value: + :rtype: bool + """ + value = value.lower() + return value in ('1', 'true', 'yes') + + @classmethod + def _exclude_files_parser(cls, key): + """Returns a parser function to make sure field inputs + are not files. + + Parses a value after getting the key so error messages are + more informative. + + :param key: + :rtype: callable + """ + def parser(value): + exclude_directive = 'file:' + if value.startswith(exclude_directive): + raise ValueError( + 'Only strings are accepted for the {0} field, ' + 'files are not accepted'.format(key)) + return value + return parser + + @classmethod + def _parse_file(cls, value): + """Represents value as a string, allowing including text + from nearest files using `file:` directive. + + Directive is sandboxed and won't reach anything outside + directory with setup.py. + + Examples: + file: README.rst, CHANGELOG.md, src/file.txt + + :param str value: + :rtype: str + """ + include_directive = 'file:' + + if not isinstance(value, str): + return value + + if not value.startswith(include_directive): + return value + + spec = value[len(include_directive):] + filepaths = (os.path.abspath(path.strip()) for path in spec.split(',')) + return '\n'.join( + cls._read_file(path) + for path in filepaths + if (cls._assert_local(path) or True) + and os.path.isfile(path) + ) + + @staticmethod + def _assert_local(filepath): + if not filepath.startswith(os.getcwd()): + raise DistutilsOptionError( + '`file:` directive can not access %s' % filepath) + + @staticmethod + def _read_file(filepath): + with io.open(filepath, encoding='utf-8') as f: + return f.read() + + @classmethod + def _parse_attr(cls, value, package_dir=None): + """Represents value as a module attribute. + + Examples: + attr: package.attr + attr: package.module.attr + + :param str value: + :rtype: str + """ + attr_directive = 'attr:' + if not value.startswith(attr_directive): + return value + + attrs_path = value.replace(attr_directive, '').strip().split('.') + attr_name = attrs_path.pop() + + module_name = '.'.join(attrs_path) + module_name = module_name or '__init__' + + parent_path = os.getcwd() + if package_dir: + if attrs_path[0] in package_dir: + # A custom path was specified for the module we want to import + custom_path = package_dir[attrs_path[0]] + parts = custom_path.rsplit('/', 1) + if len(parts) > 1: + parent_path = os.path.join(os.getcwd(), parts[0]) + module_name = parts[1] + else: + module_name = custom_path + elif '' in package_dir: + # A custom parent directory was specified for all root modules + parent_path = os.path.join(os.getcwd(), package_dir['']) + + with patch_path(parent_path): + try: + # attempt to load value statically + return getattr(StaticModule(module_name), attr_name) + except Exception: + # fallback to simple import + module = importlib.import_module(module_name) + + return getattr(module, attr_name) + + @classmethod + def _get_parser_compound(cls, *parse_methods): + """Returns parser function to represents value as a list. + + Parses a value applying given methods one after another. + + :param parse_methods: + :rtype: callable + """ + def parse(value): + parsed = value + + for method in parse_methods: + parsed = method(parsed) + + return parsed + + return parse + + @classmethod + def _parse_section_to_dict(cls, section_options, values_parser=None): + """Parses section options into a dictionary. + + Optionally applies a given parser to values. + + :param dict section_options: + :param callable values_parser: + :rtype: dict + """ + value = {} + values_parser = values_parser or (lambda val: val) + for key, (_, val) in section_options.items(): + value[key] = values_parser(val) + return value + + def parse_section(self, section_options): + """Parses configuration file section. + + :param dict section_options: + """ + for (name, (_, value)) in section_options.items(): + try: + self[name] = value + + except KeyError: + pass # Keep silent for a new option may appear anytime. + + def parse(self): + """Parses configuration file items from one + or more related sections. + + """ + for section_name, section_options in self.sections.items(): + + method_postfix = '' + if section_name: # [section.option] variant + method_postfix = '_%s' % section_name + + section_parser_method = getattr( + self, + # Dots in section names are translated into dunderscores. + ('parse_section%s' % method_postfix).replace('.', '__'), + None) + + if section_parser_method is None: + raise DistutilsOptionError( + 'Unsupported distribution option section: [%s.%s]' % ( + self.section_prefix, section_name)) + + section_parser_method(section_options) + + def _deprecated_config_handler(self, func, msg, warning_class): + """ this function will wrap around parameters that are deprecated + + :param msg: deprecation message + :param warning_class: class of warning exception to be raised + :param func: function to be wrapped around + """ + @wraps(func) + def config_handler(*args, **kwargs): + warnings.warn(msg, warning_class) + return func(*args, **kwargs) + + return config_handler + + +class ConfigMetadataHandler(ConfigHandler): + + section_prefix = 'metadata' + + aliases = { + 'home_page': 'url', + 'summary': 'description', + 'classifier': 'classifiers', + 'platform': 'platforms', + } + + strict_mode = False + """We need to keep it loose, to be partially compatible with + `pbr` and `d2to1` packages which also uses `metadata` section. + + """ + + def __init__(self, target_obj, options, ignore_option_errors=False, + package_dir=None): + super(ConfigMetadataHandler, self).__init__(target_obj, options, + ignore_option_errors) + self.package_dir = package_dir + + @property + def parsers(self): + """Metadata item name to parser function mapping.""" + parse_list = self._parse_list + parse_file = self._parse_file + parse_dict = self._parse_dict + exclude_files_parser = self._exclude_files_parser + + return { + 'platforms': parse_list, + 'keywords': parse_list, + 'provides': parse_list, + 'requires': self._deprecated_config_handler( + parse_list, + "The requires parameter is deprecated, please use " + "install_requires for runtime dependencies.", + DeprecationWarning), + 'obsoletes': parse_list, + 'classifiers': self._get_parser_compound(parse_file, parse_list), + 'license': exclude_files_parser('license'), + 'license_file': self._deprecated_config_handler( + exclude_files_parser('license_file'), + "The license_file parameter is deprecated, " + "use license_files instead.", + DeprecationWarning), + 'license_files': parse_list, + 'description': parse_file, + 'long_description': parse_file, + 'version': self._parse_version, + 'project_urls': parse_dict, + } + + def _parse_version(self, value): + """Parses `version` option value. + + :param value: + :rtype: str + + """ + version = self._parse_file(value) + + if version != value: + version = version.strip() + # Be strict about versions loaded from file because it's easy to + # accidentally include newlines and other unintended content + if isinstance(parse(version), LegacyVersion): + tmpl = ( + 'Version loaded from {value} does not ' + 'comply with PEP 440: {version}' + ) + raise DistutilsOptionError(tmpl.format(**locals())) + + return version + + version = self._parse_attr(value, self.package_dir) + + if callable(version): + version = version() + + if not isinstance(version, str): + if hasattr(version, '__iter__'): + version = '.'.join(map(str, version)) + else: + version = '%s' % version + + return version + + +class ConfigOptionsHandler(ConfigHandler): + + section_prefix = 'options' + + @property + def parsers(self): + """Metadata item name to parser function mapping.""" + parse_list = self._parse_list + parse_list_semicolon = partial(self._parse_list, separator=';') + parse_bool = self._parse_bool + parse_dict = self._parse_dict + parse_cmdclass = self._parse_cmdclass + + return { + 'zip_safe': parse_bool, + 'use_2to3': parse_bool, + 'include_package_data': parse_bool, + 'package_dir': parse_dict, + 'use_2to3_fixers': parse_list, + 'use_2to3_exclude_fixers': parse_list, + 'convert_2to3_doctests': parse_list, + 'scripts': parse_list, + 'eager_resources': parse_list, + 'dependency_links': parse_list, + 'namespace_packages': parse_list, + 'install_requires': parse_list_semicolon, + 'setup_requires': parse_list_semicolon, + 'tests_require': parse_list_semicolon, + 'packages': self._parse_packages, + 'entry_points': self._parse_file, + 'py_modules': parse_list, + 'python_requires': SpecifierSet, + 'cmdclass': parse_cmdclass, + } + + def _parse_cmdclass(self, value): + def resolve_class(qualified_class_name): + idx = qualified_class_name.rfind('.') + class_name = qualified_class_name[idx+1:] + pkg_name = qualified_class_name[:idx] + + module = __import__(pkg_name) + + return getattr(module, class_name) + + return { + k: resolve_class(v) + for k, v in self._parse_dict(value).items() + } + + def _parse_packages(self, value): + """Parses `packages` option value. + + :param value: + :rtype: list + """ + find_directives = ['find:', 'find_namespace:'] + trimmed_value = value.strip() + + if trimmed_value not in find_directives: + return self._parse_list(value) + + findns = trimmed_value == find_directives[1] + + # Read function arguments from a dedicated section. + find_kwargs = self.parse_section_packages__find( + self.sections.get('packages.find', {})) + + if findns: + from setuptools import find_namespace_packages as find_packages + else: + from setuptools import find_packages + + return find_packages(**find_kwargs) + + def parse_section_packages__find(self, section_options): + """Parses `packages.find` configuration file section. + + To be used in conjunction with _parse_packages(). + + :param dict section_options: + """ + section_data = self._parse_section_to_dict( + section_options, self._parse_list) + + valid_keys = ['where', 'include', 'exclude'] + + find_kwargs = dict( + [(k, v) for k, v in section_data.items() if k in valid_keys and v]) + + where = find_kwargs.get('where') + if where is not None: + find_kwargs['where'] = where[0] # cast list to single val + + return find_kwargs + + def parse_section_entry_points(self, section_options): + """Parses `entry_points` configuration file section. + + :param dict section_options: + """ + parsed = self._parse_section_to_dict(section_options, self._parse_list) + self['entry_points'] = parsed + + def _parse_package_data(self, section_options): + parsed = self._parse_section_to_dict(section_options, self._parse_list) + + root = parsed.get('*') + if root: + parsed[''] = root + del parsed['*'] + + return parsed + + def parse_section_package_data(self, section_options): + """Parses `package_data` configuration file section. + + :param dict section_options: + """ + self['package_data'] = self._parse_package_data(section_options) + + def parse_section_exclude_package_data(self, section_options): + """Parses `exclude_package_data` configuration file section. + + :param dict section_options: + """ + self['exclude_package_data'] = self._parse_package_data( + section_options) + + def parse_section_extras_require(self, section_options): + """Parses `extras_require` configuration file section. + + :param dict section_options: + """ + parse_list = partial(self._parse_list, separator=';') + self['extras_require'] = self._parse_section_to_dict( + section_options, parse_list) + + def parse_section_data_files(self, section_options): + """Parses `data_files` configuration file section. + + :param dict section_options: + """ + parsed = self._parse_section_to_dict(section_options, self._parse_list) + self['data_files'] = [(k, v) for k, v in parsed.items()] diff --git a/venv/Lib/site-packages/setuptools/dep_util.py b/venv/Lib/site-packages/setuptools/dep_util.py new file mode 100644 index 00000000..521eb716 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/dep_util.py @@ -0,0 +1,25 @@ +from distutils.dep_util import newer_group + + +# yes, this is was almost entirely copy-pasted from +# 'newer_pairwise()', this is just another convenience +# function. +def newer_pairwise_group(sources_groups, targets): + """Walk both arguments in parallel, testing if each source group is newer + than its corresponding target. Returns a pair of lists (sources_groups, + targets) where sources is newer than target, according to the semantics + of 'newer_group()'. + """ + if len(sources_groups) != len(targets): + raise ValueError( + "'sources_group' and 'targets' must be the same length") + + # build a pair of lists (sources_groups, targets) where source is newer + n_sources = [] + n_targets = [] + for i in range(len(sources_groups)): + if newer_group(sources_groups[i], targets[i]): + n_sources.append(sources_groups[i]) + n_targets.append(targets[i]) + + return n_sources, n_targets diff --git a/env/lib/python2.7/site-packages/setuptools/depends.py b/venv/Lib/site-packages/setuptools/depends.py similarity index 82% rename from env/lib/python2.7/site-packages/setuptools/depends.py rename to venv/Lib/site-packages/setuptools/depends.py index 45e7052d..8be6928a 100644 --- a/env/lib/python2.7/site-packages/setuptools/depends.py +++ b/venv/Lib/site-packages/setuptools/depends.py @@ -1,10 +1,11 @@ import sys -import imp import marshal +import contextlib +import dis from distutils.version import StrictVersion -from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN -from .py33compat import Bytecode +from ._imp import find_module, PY_COMPILED, PY_FROZEN, PY_SOURCE +from . import _imp __all__ = [ @@ -15,7 +16,8 @@ class Require: """A prerequisite to building or installing a distribution""" - def __init__(self, name, requested_version, module, homepage='', + def __init__( + self, name, requested_version, module, homepage='', attribute=None, format=None): if format is None and requested_version is not None: @@ -79,23 +81,15 @@ def is_current(self, paths=None): return self.version_ok(version) -def find_module(module, paths=None): - """Just like 'imp.find_module()', but with package support""" - - parts = module.split('.') - - while parts: - part = parts.pop(0) - f, path, (suffix, mode, kind) = info = imp.find_module(part, paths) - - if kind == PKG_DIRECTORY: - parts = parts or ['__init__'] - paths = [path] - - elif parts: - raise ImportError("Can't find %r in %s" % (parts, module)) +def maybe_close(f): + @contextlib.contextmanager + def empty(): + yield + return + if not f: + return empty() - return info + return contextlib.closing(f) def get_module_constant(module, symbol, default=-1, paths=None): @@ -106,28 +100,23 @@ def get_module_constant(module, symbol, default=-1, paths=None): constant. Otherwise, return 'default'.""" try: - f, path, (suffix, mode, kind) = find_module(module, paths) + f, path, (suffix, mode, kind) = info = find_module(module, paths) except ImportError: # Module doesn't exist return None - try: + with maybe_close(f): if kind == PY_COMPILED: f.read(8) # skip magic & date code = marshal.load(f) elif kind == PY_FROZEN: - code = imp.get_frozen_object(module) + code = _imp.get_frozen_object(module, paths) elif kind == PY_SOURCE: code = compile(f.read(), path, 'exec') else: # Not something we can parse; we'll have to import it. :( - if module not in sys.modules: - imp.load_module(module, f, path, (suffix, mode, kind)) - return getattr(sys.modules[module], symbol, None) - - finally: - if f: - f.close() + imported = _imp.get_module(module, paths, info) + return getattr(imported, symbol, None) return extract_constant(code, symbol, default) @@ -156,7 +145,7 @@ def extract_constant(code, symbol, default=-1): const = default - for byte_code in Bytecode(code): + for byte_code in dis.Bytecode(code): op = byte_code.opcode arg = byte_code.arg diff --git a/venv/Lib/site-packages/setuptools/dist.py b/venv/Lib/site-packages/setuptools/dist.py new file mode 100644 index 00000000..6e3f25f9 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/dist.py @@ -0,0 +1,1115 @@ +# -*- coding: utf-8 -*- +__all__ = ['Distribution'] + +import io +import sys +import re +import os +import warnings +import numbers +import distutils.log +import distutils.core +import distutils.cmd +import distutils.dist +import distutils.command +from distutils.util import strtobool +from distutils.debug import DEBUG +from distutils.fancy_getopt import translate_longopt +from glob import iglob +import itertools +import textwrap +from typing import List, Optional, TYPE_CHECKING + +from collections import defaultdict +from email import message_from_file + +from distutils.errors import DistutilsOptionError, DistutilsSetupError +from distutils.util import rfc822_escape +from distutils.version import StrictVersion + +from setuptools.extern import packaging +from setuptools.extern import ordered_set +from setuptools.extern.more_itertools import unique_everseen + +from . import SetuptoolsDeprecationWarning + +import setuptools +import setuptools.command +from setuptools import windows_support +from setuptools.monkey import get_unpatched +from setuptools.config import parse_configuration +import pkg_resources + +if TYPE_CHECKING: + from email.message import Message + +__import__('setuptools.extern.packaging.specifiers') +__import__('setuptools.extern.packaging.version') + + +def _get_unpatched(cls): + warnings.warn("Do not call this function", DistDeprecationWarning) + return get_unpatched(cls) + + +def get_metadata_version(self): + mv = getattr(self, 'metadata_version', None) + if mv is None: + mv = StrictVersion('2.1') + self.metadata_version = mv + return mv + + +def rfc822_unescape(content: str) -> str: + """Reverse RFC-822 escaping by removing leading whitespaces from content.""" + lines = content.splitlines() + if len(lines) == 1: + return lines[0].lstrip() + return '\n'.join( + (lines[0].lstrip(), + textwrap.dedent('\n'.join(lines[1:])))) + + +def _read_field_from_msg(msg: "Message", field: str) -> Optional[str]: + """Read Message header field.""" + value = msg[field] + if value == 'UNKNOWN': + return None + return value + + +def _read_field_unescaped_from_msg(msg: "Message", field: str) -> Optional[str]: + """Read Message header field and apply rfc822_unescape.""" + value = _read_field_from_msg(msg, field) + if value is None: + return value + return rfc822_unescape(value) + + +def _read_list_from_msg(msg: "Message", field: str) -> Optional[List[str]]: + """Read Message header field and return all results as list.""" + values = msg.get_all(field, None) + if values == []: + return None + return values + + +def _read_payload_from_msg(msg: "Message") -> Optional[str]: + value = msg.get_payload().strip() + if value == 'UNKNOWN': + return None + return value + + +def read_pkg_file(self, file): + """Reads the metadata values from a file object.""" + msg = message_from_file(file) + + self.metadata_version = StrictVersion(msg['metadata-version']) + self.name = _read_field_from_msg(msg, 'name') + self.version = _read_field_from_msg(msg, 'version') + self.description = _read_field_from_msg(msg, 'summary') + # we are filling author only. + self.author = _read_field_from_msg(msg, 'author') + self.maintainer = None + self.author_email = _read_field_from_msg(msg, 'author-email') + self.maintainer_email = None + self.url = _read_field_from_msg(msg, 'home-page') + self.license = _read_field_unescaped_from_msg(msg, 'license') + + if 'download-url' in msg: + self.download_url = _read_field_from_msg(msg, 'download-url') + else: + self.download_url = None + + self.long_description = _read_field_unescaped_from_msg(msg, 'description') + if self.long_description is None and self.metadata_version >= StrictVersion('2.1'): + self.long_description = _read_payload_from_msg(msg) + self.description = _read_field_from_msg(msg, 'summary') + + if 'keywords' in msg: + self.keywords = _read_field_from_msg(msg, 'keywords').split(',') + + self.platforms = _read_list_from_msg(msg, 'platform') + self.classifiers = _read_list_from_msg(msg, 'classifier') + + # PEP 314 - these fields only exist in 1.1 + if self.metadata_version == StrictVersion('1.1'): + self.requires = _read_list_from_msg(msg, 'requires') + self.provides = _read_list_from_msg(msg, 'provides') + self.obsoletes = _read_list_from_msg(msg, 'obsoletes') + else: + self.requires = None + self.provides = None + self.obsoletes = None + + self.license_files = _read_list_from_msg(msg, 'license-file') + + +def single_line(val): + # quick and dirty validation for description pypa/setuptools#1390 + if '\n' in val: + # TODO after 2021-07-31: Replace with `raise ValueError("newlines not allowed")` + warnings.warn("newlines not allowed and will break in the future") + val = val.replace('\n', ' ') + return val + + +# Based on Python 3.5 version +def write_pkg_file(self, file): # noqa: C901 # is too complex (14) # FIXME + """Write the PKG-INFO format data to a file object. + """ + version = self.get_metadata_version() + + def write_field(key, value): + file.write("%s: %s\n" % (key, value)) + + write_field('Metadata-Version', str(version)) + write_field('Name', self.get_name()) + write_field('Version', self.get_version()) + write_field('Summary', single_line(self.get_description())) + write_field('Home-page', self.get_url()) + + optional_fields = ( + ('Author', 'author'), + ('Author-email', 'author_email'), + ('Maintainer', 'maintainer'), + ('Maintainer-email', 'maintainer_email'), + ) + + for field, attr in optional_fields: + attr_val = getattr(self, attr, None) + if attr_val is not None: + write_field(field, attr_val) + + license = rfc822_escape(self.get_license()) + write_field('License', license) + if self.download_url: + write_field('Download-URL', self.download_url) + for project_url in self.project_urls.items(): + write_field('Project-URL', '%s, %s' % project_url) + + keywords = ','.join(self.get_keywords()) + if keywords: + write_field('Keywords', keywords) + + for platform in self.get_platforms(): + write_field('Platform', platform) + + self._write_list(file, 'Classifier', self.get_classifiers()) + + # PEP 314 + self._write_list(file, 'Requires', self.get_requires()) + self._write_list(file, 'Provides', self.get_provides()) + self._write_list(file, 'Obsoletes', self.get_obsoletes()) + + # Setuptools specific for PEP 345 + if hasattr(self, 'python_requires'): + write_field('Requires-Python', self.python_requires) + + # PEP 566 + if self.long_description_content_type: + write_field( + 'Description-Content-Type', + self.long_description_content_type + ) + if self.provides_extras: + for extra in self.provides_extras: + write_field('Provides-Extra', extra) + + self._write_list(file, 'License-File', self.license_files or []) + + file.write("\n%s\n\n" % self.get_long_description()) + + +sequence = tuple, list + + +def check_importable(dist, attr, value): + try: + ep = pkg_resources.EntryPoint.parse('x=' + value) + assert not ep.extras + except (TypeError, ValueError, AttributeError, AssertionError) as e: + raise DistutilsSetupError( + "%r must be importable 'module:attrs' string (got %r)" + % (attr, value) + ) from e + + +def assert_string_list(dist, attr, value): + """Verify that value is a string list""" + try: + # verify that value is a list or tuple to exclude unordered + # or single-use iterables + assert isinstance(value, (list, tuple)) + # verify that elements of value are strings + assert ''.join(value) != value + except (TypeError, ValueError, AttributeError, AssertionError) as e: + raise DistutilsSetupError( + "%r must be a list of strings (got %r)" % (attr, value) + ) from e + + +def check_nsp(dist, attr, value): + """Verify that namespace packages are valid""" + ns_packages = value + assert_string_list(dist, attr, ns_packages) + for nsp in ns_packages: + if not dist.has_contents_for(nsp): + raise DistutilsSetupError( + "Distribution contains no modules or packages for " + + "namespace package %r" % nsp + ) + parent, sep, child = nsp.rpartition('.') + if parent and parent not in ns_packages: + distutils.log.warn( + "WARNING: %r is declared as a package namespace, but %r" + " is not: please correct this in setup.py", nsp, parent + ) + + +def check_extras(dist, attr, value): + """Verify that extras_require mapping is valid""" + try: + list(itertools.starmap(_check_extra, value.items())) + except (TypeError, ValueError, AttributeError) as e: + raise DistutilsSetupError( + "'extras_require' must be a dictionary whose values are " + "strings or lists of strings containing valid project/version " + "requirement specifiers." + ) from e + + +def _check_extra(extra, reqs): + name, sep, marker = extra.partition(':') + if marker and pkg_resources.invalid_marker(marker): + raise DistutilsSetupError("Invalid environment marker: " + marker) + list(pkg_resources.parse_requirements(reqs)) + + +def assert_bool(dist, attr, value): + """Verify that value is True, False, 0, or 1""" + if bool(value) != value: + tmpl = "{attr!r} must be a boolean value (got {value!r})" + raise DistutilsSetupError(tmpl.format(attr=attr, value=value)) + + +def check_requirements(dist, attr, value): + """Verify that install_requires is a valid requirements list""" + try: + list(pkg_resources.parse_requirements(value)) + if isinstance(value, (dict, set)): + raise TypeError("Unordered types are not allowed") + except (TypeError, ValueError) as error: + tmpl = ( + "{attr!r} must be a string or list of strings " + "containing valid project/version requirement specifiers; {error}" + ) + raise DistutilsSetupError( + tmpl.format(attr=attr, error=error) + ) from error + + +def check_specifier(dist, attr, value): + """Verify that value is a valid version specifier""" + try: + packaging.specifiers.SpecifierSet(value) + except (packaging.specifiers.InvalidSpecifier, AttributeError) as error: + tmpl = ( + "{attr!r} must be a string " + "containing valid version specifiers; {error}" + ) + raise DistutilsSetupError( + tmpl.format(attr=attr, error=error) + ) from error + + +def check_entry_points(dist, attr, value): + """Verify that entry_points map is parseable""" + try: + pkg_resources.EntryPoint.parse_map(value) + except ValueError as e: + raise DistutilsSetupError(e) from e + + +def check_test_suite(dist, attr, value): + if not isinstance(value, str): + raise DistutilsSetupError("test_suite must be a string") + + +def check_package_data(dist, attr, value): + """Verify that value is a dictionary of package names to glob lists""" + if not isinstance(value, dict): + raise DistutilsSetupError( + "{!r} must be a dictionary mapping package names to lists of " + "string wildcard patterns".format(attr)) + for k, v in value.items(): + if not isinstance(k, str): + raise DistutilsSetupError( + "keys of {!r} dict must be strings (got {!r})" + .format(attr, k) + ) + assert_string_list(dist, 'values of {!r} dict'.format(attr), v) + + +def check_packages(dist, attr, value): + for pkgname in value: + if not re.match(r'\w+(\.\w+)*', pkgname): + distutils.log.warn( + "WARNING: %r not a valid package name; please use only " + ".-separated package names in setup.py", pkgname + ) + + +_Distribution = get_unpatched(distutils.core.Distribution) + + +class Distribution(_Distribution): + """Distribution with support for tests and package data + + This is an enhanced version of 'distutils.dist.Distribution' that + effectively adds the following new optional keyword arguments to 'setup()': + + 'install_requires' -- a string or sequence of strings specifying project + versions that the distribution requires when installed, in the format + used by 'pkg_resources.require()'. They will be installed + automatically when the package is installed. If you wish to use + packages that are not available in PyPI, or want to give your users an + alternate download location, you can add a 'find_links' option to the + '[easy_install]' section of your project's 'setup.cfg' file, and then + setuptools will scan the listed web pages for links that satisfy the + requirements. + + 'extras_require' -- a dictionary mapping names of optional "extras" to the + additional requirement(s) that using those extras incurs. For example, + this:: + + extras_require = dict(reST = ["docutils>=0.3", "reSTedit"]) + + indicates that the distribution can optionally provide an extra + capability called "reST", but it can only be used if docutils and + reSTedit are installed. If the user installs your package using + EasyInstall and requests one of your extras, the corresponding + additional requirements will be installed if needed. + + 'test_suite' -- the name of a test suite to run for the 'test' command. + If the user runs 'python setup.py test', the package will be installed, + and the named test suite will be run. The format is the same as + would be used on a 'unittest.py' command line. That is, it is the + dotted name of an object to import and call to generate a test suite. + + 'package_data' -- a dictionary mapping package names to lists of filenames + or globs to use to find data files contained in the named packages. + If the dictionary has filenames or globs listed under '""' (the empty + string), those names will be searched for in every package, in addition + to any names for the specific package. Data files found using these + names/globs will be installed along with the package, in the same + location as the package. Note that globs are allowed to reference + the contents of non-package subdirectories, as long as you use '/' as + a path separator. (Globs are automatically converted to + platform-specific paths at runtime.) + + In addition to these new keywords, this class also has several new methods + for manipulating the distribution's contents. For example, the 'include()' + and 'exclude()' methods can be thought of as in-place add and subtract + commands that add or remove packages, modules, extensions, and so on from + the distribution. + """ + + _DISTUTILS_UNSUPPORTED_METADATA = { + 'long_description_content_type': lambda: None, + 'project_urls': dict, + 'provides_extras': ordered_set.OrderedSet, + 'license_file': lambda: None, + 'license_files': lambda: None, + } + + _patched_dist = None + + def patch_missing_pkg_info(self, attrs): + # Fake up a replacement for the data that would normally come from + # PKG-INFO, but which might not yet be built if this is a fresh + # checkout. + # + if not attrs or 'name' not in attrs or 'version' not in attrs: + return + key = pkg_resources.safe_name(str(attrs['name'])).lower() + dist = pkg_resources.working_set.by_key.get(key) + if dist is not None and not dist.has_metadata('PKG-INFO'): + dist._version = pkg_resources.safe_version(str(attrs['version'])) + self._patched_dist = dist + + def __init__(self, attrs=None): + have_package_data = hasattr(self, "package_data") + if not have_package_data: + self.package_data = {} + attrs = attrs or {} + self.dist_files = [] + # Filter-out setuptools' specific options. + self.src_root = attrs.pop("src_root", None) + self.patch_missing_pkg_info(attrs) + self.dependency_links = attrs.pop('dependency_links', []) + self.setup_requires = attrs.pop('setup_requires', []) + for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'): + vars(self).setdefault(ep.name, None) + _Distribution.__init__(self, { + k: v for k, v in attrs.items() + if k not in self._DISTUTILS_UNSUPPORTED_METADATA + }) + + self._set_metadata_defaults(attrs) + + self.metadata.version = self._normalize_version( + self._validate_version(self.metadata.version)) + self._finalize_requires() + + def _set_metadata_defaults(self, attrs): + """ + Fill-in missing metadata fields not supported by distutils. + Some fields may have been set by other tools (e.g. pbr). + Those fields (vars(self.metadata)) take precedence to + supplied attrs. + """ + for option, default in self._DISTUTILS_UNSUPPORTED_METADATA.items(): + vars(self.metadata).setdefault(option, attrs.get(option, default())) + + @staticmethod + def _normalize_version(version): + if isinstance(version, setuptools.sic) or version is None: + return version + + normalized = str(packaging.version.Version(version)) + if version != normalized: + tmpl = "Normalizing '{version}' to '{normalized}'" + warnings.warn(tmpl.format(**locals())) + return normalized + return version + + @staticmethod + def _validate_version(version): + if isinstance(version, numbers.Number): + # Some people apparently take "version number" too literally :) + version = str(version) + + if version is not None: + try: + packaging.version.Version(version) + except (packaging.version.InvalidVersion, TypeError): + warnings.warn( + "The version specified (%r) is an invalid version, this " + "may not work as expected with newer versions of " + "setuptools, pip, and PyPI. Please see PEP 440 for more " + "details." % version + ) + return setuptools.sic(version) + return version + + def _finalize_requires(self): + """ + Set `metadata.python_requires` and fix environment markers + in `install_requires` and `extras_require`. + """ + if getattr(self, 'python_requires', None): + self.metadata.python_requires = self.python_requires + + if getattr(self, 'extras_require', None): + for extra in self.extras_require.keys(): + # Since this gets called multiple times at points where the + # keys have become 'converted' extras, ensure that we are only + # truly adding extras we haven't seen before here. + extra = extra.split(':')[0] + if extra: + self.metadata.provides_extras.add(extra) + + self._convert_extras_requirements() + self._move_install_requirements_markers() + + def _convert_extras_requirements(self): + """ + Convert requirements in `extras_require` of the form + `"extra": ["barbazquux; {marker}"]` to + `"extra:{marker}": ["barbazquux"]`. + """ + spec_ext_reqs = getattr(self, 'extras_require', None) or {} + self._tmp_extras_require = defaultdict(list) + for section, v in spec_ext_reqs.items(): + # Do not strip empty sections. + self._tmp_extras_require[section] + for r in pkg_resources.parse_requirements(v): + suffix = self._suffix_for(r) + self._tmp_extras_require[section + suffix].append(r) + + @staticmethod + def _suffix_for(req): + """ + For a requirement, return the 'extras_require' suffix for + that requirement. + """ + return ':' + str(req.marker) if req.marker else '' + + def _move_install_requirements_markers(self): + """ + Move requirements in `install_requires` that are using environment + markers `extras_require`. + """ + + # divide the install_requires into two sets, simple ones still + # handled by install_requires and more complex ones handled + # by extras_require. + + def is_simple_req(req): + return not req.marker + + spec_inst_reqs = getattr(self, 'install_requires', None) or () + inst_reqs = list(pkg_resources.parse_requirements(spec_inst_reqs)) + simple_reqs = filter(is_simple_req, inst_reqs) + complex_reqs = itertools.filterfalse(is_simple_req, inst_reqs) + self.install_requires = list(map(str, simple_reqs)) + + for r in complex_reqs: + self._tmp_extras_require[':' + str(r.marker)].append(r) + self.extras_require = dict( + (k, [str(r) for r in map(self._clean_req, v)]) + for k, v in self._tmp_extras_require.items() + ) + + def _clean_req(self, req): + """ + Given a Requirement, remove environment markers and return it. + """ + req.marker = None + return req + + def _finalize_license_files(self): + """Compute names of all license files which should be included.""" + license_files: Optional[List[str]] = self.metadata.license_files + patterns: List[str] = license_files if license_files else [] + + license_file: Optional[str] = self.metadata.license_file + if license_file and license_file not in patterns: + patterns.append(license_file) + + if license_files is None and license_file is None: + # Default patterns match the ones wheel uses + # See https://wheel.readthedocs.io/en/stable/user_guide.html + # -> 'Including license files in the generated wheel file' + patterns = ('LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*') + + self.metadata.license_files = list( + unique_everseen(self._expand_patterns(patterns))) + + @staticmethod + def _expand_patterns(patterns): + return ( + path + for pattern in patterns + for path in iglob(pattern) + if not path.endswith('~') + and os.path.isfile(path) + ) + + # FIXME: 'Distribution._parse_config_files' is too complex (14) + def _parse_config_files(self, filenames=None): # noqa: C901 + """ + Adapted from distutils.dist.Distribution.parse_config_files, + this method provides the same functionality in subtly-improved + ways. + """ + from configparser import ConfigParser + + # Ignore install directory options if we have a venv + ignore_options = [] if sys.prefix == sys.base_prefix else [ + 'install-base', 'install-platbase', 'install-lib', + 'install-platlib', 'install-purelib', 'install-headers', + 'install-scripts', 'install-data', 'prefix', 'exec-prefix', + 'home', 'user', 'root', + ] + + ignore_options = frozenset(ignore_options) + + if filenames is None: + filenames = self.find_config_files() + + if DEBUG: + self.announce("Distribution.parse_config_files():") + + parser = ConfigParser() + parser.optionxform = str + for filename in filenames: + with io.open(filename, encoding='utf-8') as reader: + if DEBUG: + self.announce(" reading {filename}".format(**locals())) + parser.read_file(reader) + for section in parser.sections(): + options = parser.options(section) + opt_dict = self.get_option_dict(section) + + for opt in options: + if opt == '__name__' or opt in ignore_options: + continue + + val = parser.get(section, opt) + opt = self.warn_dash_deprecation(opt, section) + opt = self.make_option_lowercase(opt, section) + opt_dict[opt] = (filename, val) + + # Make the ConfigParser forget everything (so we retain + # the original filenames that options come from) + parser.__init__() + + if 'global' not in self.command_options: + return + + # If there was a "global" section in the config file, use it + # to set Distribution options. + + for (opt, (src, val)) in self.command_options['global'].items(): + alias = self.negative_opt.get(opt) + if alias: + val = not strtobool(val) + elif opt in ('verbose', 'dry_run'): # ugh! + val = strtobool(val) + + try: + setattr(self, alias or opt, val) + except ValueError as e: + raise DistutilsOptionError(e) from e + + def warn_dash_deprecation(self, opt, section): + if section in ( + 'options.extras_require', 'options.data_files', + ): + return opt + + underscore_opt = opt.replace('-', '_') + commands = distutils.command.__all__ + self._setuptools_commands() + if (not section.startswith('options') and section != 'metadata' + and section not in commands): + return underscore_opt + + if '-' in opt: + warnings.warn( + "Usage of dash-separated '%s' will not be supported in future " + "versions. Please use the underscore name '%s' instead" + % (opt, underscore_opt)) + return underscore_opt + + def _setuptools_commands(self): + try: + dist = pkg_resources.get_distribution('setuptools') + return list(dist.get_entry_map('distutils.commands')) + except pkg_resources.DistributionNotFound: + # during bootstrapping, distribution doesn't exist + return [] + + def make_option_lowercase(self, opt, section): + if section != 'metadata' or opt.islower(): + return opt + + lowercase_opt = opt.lower() + warnings.warn( + "Usage of uppercase key '%s' in '%s' will be deprecated in future " + "versions. Please use lowercase '%s' instead" + % (opt, section, lowercase_opt) + ) + return lowercase_opt + + # FIXME: 'Distribution._set_command_options' is too complex (14) + def _set_command_options(self, command_obj, option_dict=None): # noqa: C901 + """ + Set the options for 'command_obj' from 'option_dict'. Basically + this means copying elements of a dictionary ('option_dict') to + attributes of an instance ('command'). + + 'command_obj' must be a Command instance. If 'option_dict' is not + supplied, uses the standard option dictionary for this command + (from 'self.command_options'). + + (Adopted from distutils.dist.Distribution._set_command_options) + """ + command_name = command_obj.get_command_name() + if option_dict is None: + option_dict = self.get_option_dict(command_name) + + if DEBUG: + self.announce(" setting options for '%s' command:" % command_name) + for (option, (source, value)) in option_dict.items(): + if DEBUG: + self.announce(" %s = %s (from %s)" % (option, value, + source)) + try: + bool_opts = [translate_longopt(o) + for o in command_obj.boolean_options] + except AttributeError: + bool_opts = [] + try: + neg_opt = command_obj.negative_opt + except AttributeError: + neg_opt = {} + + try: + is_string = isinstance(value, str) + if option in neg_opt and is_string: + setattr(command_obj, neg_opt[option], not strtobool(value)) + elif option in bool_opts and is_string: + setattr(command_obj, option, strtobool(value)) + elif hasattr(command_obj, option): + setattr(command_obj, option, value) + else: + raise DistutilsOptionError( + "error in %s: command '%s' has no such option '%s'" + % (source, command_name, option)) + except ValueError as e: + raise DistutilsOptionError(e) from e + + def parse_config_files(self, filenames=None, ignore_option_errors=False): + """Parses configuration files from various levels + and loads configuration. + + """ + self._parse_config_files(filenames=filenames) + + parse_configuration(self, self.command_options, + ignore_option_errors=ignore_option_errors) + self._finalize_requires() + self._finalize_license_files() + + def fetch_build_eggs(self, requires): + """Resolve pre-setup requirements""" + resolved_dists = pkg_resources.working_set.resolve( + pkg_resources.parse_requirements(requires), + installer=self.fetch_build_egg, + replace_conflicting=True, + ) + for dist in resolved_dists: + pkg_resources.working_set.add(dist, replace=True) + return resolved_dists + + def finalize_options(self): + """ + Allow plugins to apply arbitrary operations to the + distribution. Each hook may optionally define a 'order' + to influence the order of execution. Smaller numbers + go first and the default is 0. + """ + group = 'setuptools.finalize_distribution_options' + + def by_order(hook): + return getattr(hook, 'order', 0) + eps = map(lambda e: e.load(), pkg_resources.iter_entry_points(group)) + for ep in sorted(eps, key=by_order): + ep(self) + + def _finalize_setup_keywords(self): + for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'): + value = getattr(self, ep.name, None) + if value is not None: + ep.require(installer=self.fetch_build_egg) + ep.load()(self, ep.name, value) + + def _finalize_2to3_doctests(self): + if getattr(self, 'convert_2to3_doctests', None): + # XXX may convert to set here when we can rely on set being builtin + self.convert_2to3_doctests = [ + os.path.abspath(p) + for p in self.convert_2to3_doctests + ] + else: + self.convert_2to3_doctests = [] + + def get_egg_cache_dir(self): + egg_cache_dir = os.path.join(os.curdir, '.eggs') + if not os.path.exists(egg_cache_dir): + os.mkdir(egg_cache_dir) + windows_support.hide_file(egg_cache_dir) + readme_txt_filename = os.path.join(egg_cache_dir, 'README.txt') + with open(readme_txt_filename, 'w') as f: + f.write('This directory contains eggs that were downloaded ' + 'by setuptools to build, test, and run plug-ins.\n\n') + f.write('This directory caches those eggs to prevent ' + 'repeated downloads.\n\n') + f.write('However, it is safe to delete this directory.\n\n') + + return egg_cache_dir + + def fetch_build_egg(self, req): + """Fetch an egg needed for building""" + from setuptools.installer import fetch_build_egg + return fetch_build_egg(self, req) + + def get_command_class(self, command): + """Pluggable version of get_command_class()""" + if command in self.cmdclass: + return self.cmdclass[command] + + eps = pkg_resources.iter_entry_points('distutils.commands', command) + for ep in eps: + ep.require(installer=self.fetch_build_egg) + self.cmdclass[command] = cmdclass = ep.load() + return cmdclass + else: + return _Distribution.get_command_class(self, command) + + def print_commands(self): + for ep in pkg_resources.iter_entry_points('distutils.commands'): + if ep.name not in self.cmdclass: + # don't require extras as the commands won't be invoked + cmdclass = ep.resolve() + self.cmdclass[ep.name] = cmdclass + return _Distribution.print_commands(self) + + def get_command_list(self): + for ep in pkg_resources.iter_entry_points('distutils.commands'): + if ep.name not in self.cmdclass: + # don't require extras as the commands won't be invoked + cmdclass = ep.resolve() + self.cmdclass[ep.name] = cmdclass + return _Distribution.get_command_list(self) + + def include(self, **attrs): + """Add items to distribution that are named in keyword arguments + + For example, 'dist.include(py_modules=["x"])' would add 'x' to + the distribution's 'py_modules' attribute, if it was not already + there. + + Currently, this method only supports inclusion for attributes that are + lists or tuples. If you need to add support for adding to other + attributes in this or a subclass, you can add an '_include_X' method, + where 'X' is the name of the attribute. The method will be called with + the value passed to 'include()'. So, 'dist.include(foo={"bar":"baz"})' + will try to call 'dist._include_foo({"bar":"baz"})', which can then + handle whatever special inclusion logic is needed. + """ + for k, v in attrs.items(): + include = getattr(self, '_include_' + k, None) + if include: + include(v) + else: + self._include_misc(k, v) + + def exclude_package(self, package): + """Remove packages, modules, and extensions in named package""" + + pfx = package + '.' + if self.packages: + self.packages = [ + p for p in self.packages + if p != package and not p.startswith(pfx) + ] + + if self.py_modules: + self.py_modules = [ + p for p in self.py_modules + if p != package and not p.startswith(pfx) + ] + + if self.ext_modules: + self.ext_modules = [ + p for p in self.ext_modules + if p.name != package and not p.name.startswith(pfx) + ] + + def has_contents_for(self, package): + """Return true if 'exclude_package(package)' would do something""" + + pfx = package + '.' + + for p in self.iter_distribution_names(): + if p == package or p.startswith(pfx): + return True + + def _exclude_misc(self, name, value): + """Handle 'exclude()' for list/tuple attrs without a special handler""" + if not isinstance(value, sequence): + raise DistutilsSetupError( + "%s: setting must be a list or tuple (%r)" % (name, value) + ) + try: + old = getattr(self, name) + except AttributeError as e: + raise DistutilsSetupError( + "%s: No such distribution setting" % name + ) from e + if old is not None and not isinstance(old, sequence): + raise DistutilsSetupError( + name + ": this setting cannot be changed via include/exclude" + ) + elif old: + setattr(self, name, [item for item in old if item not in value]) + + def _include_misc(self, name, value): + """Handle 'include()' for list/tuple attrs without a special handler""" + + if not isinstance(value, sequence): + raise DistutilsSetupError( + "%s: setting must be a list (%r)" % (name, value) + ) + try: + old = getattr(self, name) + except AttributeError as e: + raise DistutilsSetupError( + "%s: No such distribution setting" % name + ) from e + if old is None: + setattr(self, name, value) + elif not isinstance(old, sequence): + raise DistutilsSetupError( + name + ": this setting cannot be changed via include/exclude" + ) + else: + new = [item for item in value if item not in old] + setattr(self, name, old + new) + + def exclude(self, **attrs): + """Remove items from distribution that are named in keyword arguments + + For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from + the distribution's 'py_modules' attribute. Excluding packages uses + the 'exclude_package()' method, so all of the package's contained + packages, modules, and extensions are also excluded. + + Currently, this method only supports exclusion from attributes that are + lists or tuples. If you need to add support for excluding from other + attributes in this or a subclass, you can add an '_exclude_X' method, + where 'X' is the name of the attribute. The method will be called with + the value passed to 'exclude()'. So, 'dist.exclude(foo={"bar":"baz"})' + will try to call 'dist._exclude_foo({"bar":"baz"})', which can then + handle whatever special exclusion logic is needed. + """ + for k, v in attrs.items(): + exclude = getattr(self, '_exclude_' + k, None) + if exclude: + exclude(v) + else: + self._exclude_misc(k, v) + + def _exclude_packages(self, packages): + if not isinstance(packages, sequence): + raise DistutilsSetupError( + "packages: setting must be a list or tuple (%r)" % (packages,) + ) + list(map(self.exclude_package, packages)) + + def _parse_command_opts(self, parser, args): + # Remove --with-X/--without-X options when processing command args + self.global_options = self.__class__.global_options + self.negative_opt = self.__class__.negative_opt + + # First, expand any aliases + command = args[0] + aliases = self.get_option_dict('aliases') + while command in aliases: + src, alias = aliases[command] + del aliases[command] # ensure each alias can expand only once! + import shlex + args[:1] = shlex.split(alias, True) + command = args[0] + + nargs = _Distribution._parse_command_opts(self, parser, args) + + # Handle commands that want to consume all remaining arguments + cmd_class = self.get_command_class(command) + if getattr(cmd_class, 'command_consumes_arguments', None): + self.get_option_dict(command)['args'] = ("command line", nargs) + if nargs is not None: + return [] + + return nargs + + def get_cmdline_options(self): + """Return a '{cmd: {opt:val}}' map of all command-line options + + Option names are all long, but do not include the leading '--', and + contain dashes rather than underscores. If the option doesn't take + an argument (e.g. '--quiet'), the 'val' is 'None'. + + Note that options provided by config files are intentionally excluded. + """ + + d = {} + + for cmd, opts in self.command_options.items(): + + for opt, (src, val) in opts.items(): + + if src != "command line": + continue + + opt = opt.replace('_', '-') + + if val == 0: + cmdobj = self.get_command_obj(cmd) + neg_opt = self.negative_opt.copy() + neg_opt.update(getattr(cmdobj, 'negative_opt', {})) + for neg, pos in neg_opt.items(): + if pos == opt: + opt = neg + val = None + break + else: + raise AssertionError("Shouldn't be able to get here") + + elif val == 1: + val = None + + d.setdefault(cmd, {})[opt] = val + + return d + + def iter_distribution_names(self): + """Yield all packages, modules, and extension names in distribution""" + + for pkg in self.packages or (): + yield pkg + + for module in self.py_modules or (): + yield module + + for ext in self.ext_modules or (): + if isinstance(ext, tuple): + name, buildinfo = ext + else: + name = ext.name + if name.endswith('module'): + name = name[:-6] + yield name + + def handle_display_options(self, option_order): + """If there were any non-global "display-only" options + (--help-commands or the metadata display options) on the command + line, display the requested info and return true; else return + false. + """ + import sys + + if self.help_commands: + return _Distribution.handle_display_options(self, option_order) + + # Stdout may be StringIO (e.g. in tests) + if not isinstance(sys.stdout, io.TextIOWrapper): + return _Distribution.handle_display_options(self, option_order) + + # Don't wrap stdout if utf-8 is already the encoding. Provides + # workaround for #334. + if sys.stdout.encoding.lower() in ('utf-8', 'utf8'): + return _Distribution.handle_display_options(self, option_order) + + # Print metadata in UTF-8 no matter the platform + encoding = sys.stdout.encoding + errors = sys.stdout.errors + newline = sys.platform != 'win32' and '\n' or None + line_buffering = sys.stdout.line_buffering + + sys.stdout = io.TextIOWrapper( + sys.stdout.detach(), 'utf-8', errors, newline, line_buffering) + try: + return _Distribution.handle_display_options(self, option_order) + finally: + sys.stdout = io.TextIOWrapper( + sys.stdout.detach(), encoding, errors, newline, line_buffering) + + +class DistDeprecationWarning(SetuptoolsDeprecationWarning): + """Class for warning about deprecations in dist in + setuptools. Not ignored by default, unlike DeprecationWarning.""" diff --git a/venv/Lib/site-packages/setuptools/errors.py b/venv/Lib/site-packages/setuptools/errors.py new file mode 100644 index 00000000..2701747f --- /dev/null +++ b/venv/Lib/site-packages/setuptools/errors.py @@ -0,0 +1,16 @@ +"""setuptools.errors + +Provides exceptions used by setuptools modules. +""" + +from distutils.errors import DistutilsError + + +class RemovedCommandError(DistutilsError, RuntimeError): + """Error used for commands that have been removed in setuptools. + + Since ``setuptools`` is built on ``distutils``, simply removing a command + from ``setuptools`` will make the behavior fall back to ``distutils``; this + error is raised if a command exists in ``distutils`` but has been actively + removed in ``setuptools``. + """ diff --git a/venv/Lib/site-packages/setuptools/extension.py b/venv/Lib/site-packages/setuptools/extension.py new file mode 100644 index 00000000..1820722a --- /dev/null +++ b/venv/Lib/site-packages/setuptools/extension.py @@ -0,0 +1,55 @@ +import re +import functools +import distutils.core +import distutils.errors +import distutils.extension + +from .monkey import get_unpatched + + +def _have_cython(): + """ + Return True if Cython can be imported. + """ + cython_impl = 'Cython.Distutils.build_ext' + try: + # from (cython_impl) import build_ext + __import__(cython_impl, fromlist=['build_ext']).build_ext + return True + except Exception: + pass + return False + + +# for compatibility +have_pyrex = _have_cython + +_Extension = get_unpatched(distutils.core.Extension) + + +class Extension(_Extension): + """Extension that uses '.c' files in place of '.pyx' files""" + + def __init__(self, name, sources, *args, **kw): + # The *args is needed for compatibility as calls may use positional + # arguments. py_limited_api may be set only via keyword. + self.py_limited_api = kw.pop("py_limited_api", False) + _Extension.__init__(self, name, sources, *args, **kw) + + def _convert_pyx_sources_to_lang(self): + """ + Replace sources with .pyx extensions to sources with the target + language extension. This mechanism allows language authors to supply + pre-converted sources but to prefer the .pyx sources. + """ + if _have_cython(): + # the build has Cython, so allow it to compile the .pyx files + return + lang = self.language or '' + target_ext = '.cpp' if lang.lower() == 'c++' else '.c' + sub = functools.partial(re.sub, '.pyx$', target_ext) + self.sources = list(map(sub, self.sources)) + + +class Library(Extension): + """Just like a regular Extension, but built as a library instead""" diff --git a/venv/Lib/site-packages/setuptools/extern/__init__.py b/venv/Lib/site-packages/setuptools/extern/__init__.py new file mode 100644 index 00000000..baca1afa --- /dev/null +++ b/venv/Lib/site-packages/setuptools/extern/__init__.py @@ -0,0 +1,73 @@ +import importlib.util +import sys + + +class VendorImporter: + """ + A PEP 302 meta path importer for finding optionally-vendored + or otherwise naturally-installed packages from root_name. + """ + + def __init__(self, root_name, vendored_names=(), vendor_pkg=None): + self.root_name = root_name + self.vendored_names = set(vendored_names) + self.vendor_pkg = vendor_pkg or root_name.replace('extern', '_vendor') + + @property + def search_path(self): + """ + Search first the vendor package then as a natural package. + """ + yield self.vendor_pkg + '.' + yield '' + + def _module_matches_namespace(self, fullname): + """Figure out if the target module is vendored.""" + root, base, target = fullname.partition(self.root_name + '.') + return not root and any(map(target.startswith, self.vendored_names)) + + def load_module(self, fullname): + """ + Iterate over the search path to locate and load fullname. + """ + root, base, target = fullname.partition(self.root_name + '.') + for prefix in self.search_path: + try: + extant = prefix + target + __import__(extant) + mod = sys.modules[extant] + sys.modules[fullname] = mod + return mod + except ImportError: + pass + else: + raise ImportError( + "The '{target}' package is required; " + "normally this is bundled with this package so if you get " + "this warning, consult the packager of your " + "distribution.".format(**locals()) + ) + + def create_module(self, spec): + return self.load_module(spec.name) + + def exec_module(self, module): + pass + + def find_spec(self, fullname, path=None, target=None): + """Return a module spec for vendored names.""" + return ( + importlib.util.spec_from_loader(fullname, self) + if self._module_matches_namespace(fullname) else None + ) + + def install(self): + """ + Install this importer into sys.meta_path if not already present. + """ + if self not in sys.meta_path: + sys.meta_path.append(self) + + +names = 'packaging', 'pyparsing', 'ordered_set', 'more_itertools', +VendorImporter(__name__, names, 'setuptools._vendor').install() diff --git a/venv/Lib/site-packages/setuptools/extern/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/setuptools/extern/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..f40f6ed3 Binary files /dev/null and b/venv/Lib/site-packages/setuptools/extern/__pycache__/__init__.cpython-36.pyc differ diff --git a/env/lib/python2.7/site-packages/setuptools/glob.py b/venv/Lib/site-packages/setuptools/glob.py similarity index 92% rename from env/lib/python2.7/site-packages/setuptools/glob.py rename to venv/Lib/site-packages/setuptools/glob.py index 9d7cbc5d..87062b81 100644 --- a/env/lib/python2.7/site-packages/setuptools/glob.py +++ b/venv/Lib/site-packages/setuptools/glob.py @@ -47,6 +47,8 @@ def iglob(pathname, recursive=False): def _iglob(pathname, recursive): dirname, basename = os.path.split(pathname) + glob_in_dir = glob2 if recursive and _isrecursive(basename) else glob1 + if not has_magic(pathname): if basename: if os.path.lexists(pathname): @@ -56,13 +58,9 @@ def _iglob(pathname, recursive): if os.path.isdir(dirname): yield pathname return + if not dirname: - if recursive and _isrecursive(basename): - for x in glob2(dirname, basename): - yield x - else: - for x in glob1(dirname, basename): - yield x + yield from glob_in_dir(dirname, basename) return # `os.path.split()` returns the argument itself as a dirname if it is a # drive or UNC path. Prevent an infinite recursion if a drive or UNC path @@ -71,12 +69,7 @@ def _iglob(pathname, recursive): dirs = _iglob(dirname, recursive) else: dirs = [dirname] - if has_magic(basename): - if recursive and _isrecursive(basename): - glob_in_dir = glob2 - else: - glob_in_dir = glob1 - else: + if not has_magic(basename): glob_in_dir = glob0 for dirname in dirs: for name in glob_in_dir(dirname, basename): diff --git a/env/lib/python2.7/site-packages/setuptools/gui-32.exe b/venv/Lib/site-packages/setuptools/gui-32.exe similarity index 100% rename from env/lib/python2.7/site-packages/setuptools/gui-32.exe rename to venv/Lib/site-packages/setuptools/gui-32.exe diff --git a/env/lib/python2.7/site-packages/setuptools/gui-64.exe b/venv/Lib/site-packages/setuptools/gui-64.exe similarity index 100% rename from env/lib/python2.7/site-packages/setuptools/gui-64.exe rename to venv/Lib/site-packages/setuptools/gui-64.exe diff --git a/env/lib/python2.7/site-packages/setuptools/gui.exe b/venv/Lib/site-packages/setuptools/gui.exe similarity index 100% rename from env/lib/python2.7/site-packages/setuptools/gui.exe rename to venv/Lib/site-packages/setuptools/gui.exe diff --git a/venv/Lib/site-packages/setuptools/installer.py b/venv/Lib/site-packages/setuptools/installer.py new file mode 100644 index 00000000..57e2b587 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/installer.py @@ -0,0 +1,97 @@ +import glob +import os +import subprocess +import sys +import tempfile +from distutils import log +from distutils.errors import DistutilsError + +import pkg_resources +from setuptools.wheel import Wheel + + +def _fixup_find_links(find_links): + """Ensure find-links option end-up being a list of strings.""" + if isinstance(find_links, str): + return find_links.split() + assert isinstance(find_links, (tuple, list)) + return find_links + + +def fetch_build_egg(dist, req): # noqa: C901 # is too complex (16) # FIXME + """Fetch an egg needed for building. + + Use pip/wheel to fetch/build a wheel.""" + # Warn if wheel is not available + try: + pkg_resources.get_distribution('wheel') + except pkg_resources.DistributionNotFound: + dist.announce('WARNING: The wheel package is not available.', log.WARN) + # Ignore environment markers; if supplied, it is required. + req = strip_marker(req) + # Take easy_install options into account, but do not override relevant + # pip environment variables (like PIP_INDEX_URL or PIP_QUIET); they'll + # take precedence. + opts = dist.get_option_dict('easy_install') + if 'allow_hosts' in opts: + raise DistutilsError('the `allow-hosts` option is not supported ' + 'when using pip to install requirements.') + quiet = 'PIP_QUIET' not in os.environ and 'PIP_VERBOSE' not in os.environ + if 'PIP_INDEX_URL' in os.environ: + index_url = None + elif 'index_url' in opts: + index_url = opts['index_url'][1] + else: + index_url = None + find_links = ( + _fixup_find_links(opts['find_links'][1])[:] if 'find_links' in opts + else [] + ) + if dist.dependency_links: + find_links.extend(dist.dependency_links) + eggs_dir = os.path.realpath(dist.get_egg_cache_dir()) + environment = pkg_resources.Environment() + for egg_dist in pkg_resources.find_distributions(eggs_dir): + if egg_dist in req and environment.can_add(egg_dist): + return egg_dist + with tempfile.TemporaryDirectory() as tmpdir: + cmd = [ + sys.executable, '-m', 'pip', + '--disable-pip-version-check', + 'wheel', '--no-deps', + '-w', tmpdir, + ] + if quiet: + cmd.append('--quiet') + if index_url is not None: + cmd.extend(('--index-url', index_url)) + for link in find_links or []: + cmd.extend(('--find-links', link)) + # If requirement is a PEP 508 direct URL, directly pass + # the URL to pip, as `req @ url` does not work on the + # command line. + cmd.append(req.url or str(req)) + try: + subprocess.check_call(cmd) + except subprocess.CalledProcessError as e: + raise DistutilsError(str(e)) from e + wheel = Wheel(glob.glob(os.path.join(tmpdir, '*.whl'))[0]) + dist_location = os.path.join(eggs_dir, wheel.egg_name()) + wheel.install_as_egg(dist_location) + dist_metadata = pkg_resources.PathMetadata( + dist_location, os.path.join(dist_location, 'EGG-INFO')) + dist = pkg_resources.Distribution.from_filename( + dist_location, metadata=dist_metadata) + return dist + + +def strip_marker(req): + """ + Return a new requirement without the environment marker to avoid + calling pip with something like `babel; extra == "i18n"`, which + would always be ignored. + """ + # create a copy to avoid mutating the input + req = pkg_resources.Requirement.parse(str(req)) + req.marker = None + return req diff --git a/env/lib/python2.7/site-packages/setuptools/launch.py b/venv/Lib/site-packages/setuptools/launch.py similarity index 92% rename from env/lib/python2.7/site-packages/setuptools/launch.py rename to venv/Lib/site-packages/setuptools/launch.py index 308283ea..0208fdf3 100644 --- a/env/lib/python2.7/site-packages/setuptools/launch.py +++ b/venv/Lib/site-packages/setuptools/launch.py @@ -25,7 +25,8 @@ def run(): sys.argv[:] = sys.argv[1:] open_ = getattr(tokenize, 'open', open) - script = open_(script_name).read() + with open_(script_name) as fid: + script = fid.read() norm_script = script.replace('\\r\\n', '\\n') code = compile(norm_script, script_name, 'exec') exec(code, namespace) diff --git a/env/lib/python2.7/site-packages/setuptools/lib2to3_ex.py b/venv/Lib/site-packages/setuptools/lib2to3_ex.py similarity index 84% rename from env/lib/python2.7/site-packages/setuptools/lib2to3_ex.py rename to venv/Lib/site-packages/setuptools/lib2to3_ex.py index 4b1a73fe..c176abf6 100644 --- a/env/lib/python2.7/site-packages/setuptools/lib2to3_ex.py +++ b/venv/Lib/site-packages/setuptools/lib2to3_ex.py @@ -2,16 +2,15 @@ Customized Mixin2to3 support: - adds support for converting doctests - - -This module raises an ImportError on Python 2. """ +import warnings from distutils.util import Mixin2to3 as _Mixin2to3 from distutils import log from lib2to3.refactor import RefactoringTool, get_fixers_from_package import setuptools +from ._deprecation_warning import SetuptoolsDeprecationWarning class DistutilsRefactoringTool(RefactoringTool): @@ -33,6 +32,13 @@ def run_2to3(self, files, doctests=False): return if not files: return + + warnings.warn( + "2to3 support is deprecated. If the project still " + "requires Python 2 support, please migrate to " + "a single-codebase solution or employ an " + "independent conversion process.", + SetuptoolsDeprecationWarning) log.info("Fixing " + " ".join(files)) self.__build_fixer_names() self.__exclude_fixers() diff --git a/env/lib/python2.7/site-packages/setuptools/monkey.py b/venv/Lib/site-packages/setuptools/monkey.py similarity index 96% rename from env/lib/python2.7/site-packages/setuptools/monkey.py rename to venv/Lib/site-packages/setuptools/monkey.py index 3c77f8cf..fb36dc1a 100644 --- a/env/lib/python2.7/site-packages/setuptools/monkey.py +++ b/venv/Lib/site-packages/setuptools/monkey.py @@ -10,8 +10,6 @@ from importlib import import_module import inspect -from setuptools.extern import six - import setuptools __all__ = [] @@ -37,7 +35,7 @@ def _get_mro(cls): def get_unpatched(item): lookup = ( - get_unpatched_class if isinstance(item, six.class_types) else + get_unpatched_class if isinstance(item, type) else get_unpatched_function if isinstance(item, types.FunctionType) else lambda item: None ) @@ -138,7 +136,7 @@ def patch_for_msvc_specialized_compiler(): msvc = import_module('setuptools.msvc') if platform.system() != 'Windows': - # Compilers only availables on Microsoft Windows + # Compilers only available on Microsoft Windows return def patch_params(mod_name, func_name): diff --git a/venv/Lib/site-packages/setuptools/msvc.py b/venv/Lib/site-packages/setuptools/msvc.py new file mode 100644 index 00000000..281ea1c2 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/msvc.py @@ -0,0 +1,1805 @@ +""" +Improved support for Microsoft Visual C++ compilers. + +Known supported compilers: +-------------------------- +Microsoft Visual C++ 9.0: + Microsoft Visual C++ Compiler for Python 2.7 (x86, amd64) + Microsoft Windows SDK 6.1 (x86, x64, ia64) + Microsoft Windows SDK 7.0 (x86, x64, ia64) + +Microsoft Visual C++ 10.0: + Microsoft Windows SDK 7.1 (x86, x64, ia64) + +Microsoft Visual C++ 14.X: + Microsoft Visual C++ Build Tools 2015 (x86, x64, arm) + Microsoft Visual Studio Build Tools 2017 (x86, x64, arm, arm64) + Microsoft Visual Studio Build Tools 2019 (x86, x64, arm, arm64) + +This may also support compilers shipped with compatible Visual Studio versions. +""" + +import json +from io import open +from os import listdir, pathsep +from os.path import join, isfile, isdir, dirname +import sys +import contextlib +import platform +import itertools +import subprocess +import distutils.errors +from setuptools.extern.packaging.version import LegacyVersion +from setuptools.extern.more_itertools import unique_everseen + +from .monkey import get_unpatched + +if platform.system() == 'Windows': + import winreg + from os import environ +else: + # Mock winreg and environ so the module can be imported on this platform. + + class winreg: + HKEY_USERS = None + HKEY_CURRENT_USER = None + HKEY_LOCAL_MACHINE = None + HKEY_CLASSES_ROOT = None + + environ = dict() + +_msvc9_suppress_errors = ( + # msvc9compiler isn't available on some platforms + ImportError, + + # msvc9compiler raises DistutilsPlatformError in some + # environments. See #1118. + distutils.errors.DistutilsPlatformError, +) + +try: + from distutils.msvc9compiler import Reg +except _msvc9_suppress_errors: + pass + + +def msvc9_find_vcvarsall(version): + """ + Patched "distutils.msvc9compiler.find_vcvarsall" to use the standalone + compiler build for Python + (VCForPython / Microsoft Visual C++ Compiler for Python 2.7). + + Fall back to original behavior when the standalone compiler is not + available. + + Redirect the path of "vcvarsall.bat". + + Parameters + ---------- + version: float + Required Microsoft Visual C++ version. + + Return + ------ + str + vcvarsall.bat path + """ + vc_base = r'Software\%sMicrosoft\DevDiv\VCForPython\%0.1f' + key = vc_base % ('', version) + try: + # Per-user installs register the compiler path here + productdir = Reg.get_value(key, "installdir") + except KeyError: + try: + # All-user installs on a 64-bit system register here + key = vc_base % ('Wow6432Node\\', version) + productdir = Reg.get_value(key, "installdir") + except KeyError: + productdir = None + + if productdir: + vcvarsall = join(productdir, "vcvarsall.bat") + if isfile(vcvarsall): + return vcvarsall + + return get_unpatched(msvc9_find_vcvarsall)(version) + + +def msvc9_query_vcvarsall(ver, arch='x86', *args, **kwargs): + """ + Patched "distutils.msvc9compiler.query_vcvarsall" for support extra + Microsoft Visual C++ 9.0 and 10.0 compilers. + + Set environment without use of "vcvarsall.bat". + + Parameters + ---------- + ver: float + Required Microsoft Visual C++ version. + arch: str + Target architecture. + + Return + ------ + dict + environment + """ + # Try to get environment from vcvarsall.bat (Classical way) + try: + orig = get_unpatched(msvc9_query_vcvarsall) + return orig(ver, arch, *args, **kwargs) + except distutils.errors.DistutilsPlatformError: + # Pass error if Vcvarsall.bat is missing + pass + except ValueError: + # Pass error if environment not set after executing vcvarsall.bat + pass + + # If error, try to set environment directly + try: + return EnvironmentInfo(arch, ver).return_env() + except distutils.errors.DistutilsPlatformError as exc: + _augment_exception(exc, ver, arch) + raise + + +def _msvc14_find_vc2015(): + """Python 3.8 "distutils/_msvccompiler.py" backport""" + try: + key = winreg.OpenKey( + winreg.HKEY_LOCAL_MACHINE, + r"Software\Microsoft\VisualStudio\SxS\VC7", + 0, + winreg.KEY_READ | winreg.KEY_WOW64_32KEY + ) + except OSError: + return None, None + + best_version = 0 + best_dir = None + with key: + for i in itertools.count(): + try: + v, vc_dir, vt = winreg.EnumValue(key, i) + except OSError: + break + if v and vt == winreg.REG_SZ and isdir(vc_dir): + try: + version = int(float(v)) + except (ValueError, TypeError): + continue + if version >= 14 and version > best_version: + best_version, best_dir = version, vc_dir + return best_version, best_dir + + +def _msvc14_find_vc2017(): + """Python 3.8 "distutils/_msvccompiler.py" backport + + Returns "15, path" based on the result of invoking vswhere.exe + If no install is found, returns "None, None" + + The version is returned to avoid unnecessarily changing the function + result. It may be ignored when the path is not None. + + If vswhere.exe is not available, by definition, VS 2017 is not + installed. + """ + root = environ.get("ProgramFiles(x86)") or environ.get("ProgramFiles") + if not root: + return None, None + + try: + path = subprocess.check_output([ + join(root, "Microsoft Visual Studio", "Installer", "vswhere.exe"), + "-latest", + "-prerelease", + "-requiresAny", + "-requires", "Microsoft.VisualStudio.Component.VC.Tools.x86.x64", + "-requires", "Microsoft.VisualStudio.Workload.WDExpress", + "-property", "installationPath", + "-products", "*", + ]).decode(encoding="mbcs", errors="strict").strip() + except (subprocess.CalledProcessError, OSError, UnicodeDecodeError): + return None, None + + path = join(path, "VC", "Auxiliary", "Build") + if isdir(path): + return 15, path + + return None, None + + +PLAT_SPEC_TO_RUNTIME = { + 'x86': 'x86', + 'x86_amd64': 'x64', + 'x86_arm': 'arm', + 'x86_arm64': 'arm64' +} + + +def _msvc14_find_vcvarsall(plat_spec): + """Python 3.8 "distutils/_msvccompiler.py" backport""" + _, best_dir = _msvc14_find_vc2017() + vcruntime = None + + if plat_spec in PLAT_SPEC_TO_RUNTIME: + vcruntime_plat = PLAT_SPEC_TO_RUNTIME[plat_spec] + else: + vcruntime_plat = 'x64' if 'amd64' in plat_spec else 'x86' + + if best_dir: + vcredist = join(best_dir, "..", "..", "redist", "MSVC", "**", + vcruntime_plat, "Microsoft.VC14*.CRT", + "vcruntime140.dll") + try: + import glob + vcruntime = glob.glob(vcredist, recursive=True)[-1] + except (ImportError, OSError, LookupError): + vcruntime = None + + if not best_dir: + best_version, best_dir = _msvc14_find_vc2015() + if best_version: + vcruntime = join(best_dir, 'redist', vcruntime_plat, + "Microsoft.VC140.CRT", "vcruntime140.dll") + + if not best_dir: + return None, None + + vcvarsall = join(best_dir, "vcvarsall.bat") + if not isfile(vcvarsall): + return None, None + + if not vcruntime or not isfile(vcruntime): + vcruntime = None + + return vcvarsall, vcruntime + + +def _msvc14_get_vc_env(plat_spec): + """Python 3.8 "distutils/_msvccompiler.py" backport""" + if "DISTUTILS_USE_SDK" in environ: + return { + key.lower(): value + for key, value in environ.items() + } + + vcvarsall, vcruntime = _msvc14_find_vcvarsall(plat_spec) + if not vcvarsall: + raise distutils.errors.DistutilsPlatformError( + "Unable to find vcvarsall.bat" + ) + + try: + out = subprocess.check_output( + 'cmd /u /c "{}" {} && set'.format(vcvarsall, plat_spec), + stderr=subprocess.STDOUT, + ).decode('utf-16le', errors='replace') + except subprocess.CalledProcessError as exc: + raise distutils.errors.DistutilsPlatformError( + "Error executing {}".format(exc.cmd) + ) from exc + + env = { + key.lower(): value + for key, _, value in + (line.partition('=') for line in out.splitlines()) + if key and value + } + + if vcruntime: + env['py_vcruntime_redist'] = vcruntime + return env + + +def msvc14_get_vc_env(plat_spec): + """ + Patched "distutils._msvccompiler._get_vc_env" for support extra + Microsoft Visual C++ 14.X compilers. + + Set environment without use of "vcvarsall.bat". + + Parameters + ---------- + plat_spec: str + Target architecture. + + Return + ------ + dict + environment + """ + + # Always use backport from CPython 3.8 + try: + return _msvc14_get_vc_env(plat_spec) + except distutils.errors.DistutilsPlatformError as exc: + _augment_exception(exc, 14.0) + raise + + +def msvc14_gen_lib_options(*args, **kwargs): + """ + Patched "distutils._msvccompiler.gen_lib_options" for fix + compatibility between "numpy.distutils" and "distutils._msvccompiler" + (for Numpy < 1.11.2) + """ + if "numpy.distutils" in sys.modules: + import numpy as np + if LegacyVersion(np.__version__) < LegacyVersion('1.11.2'): + return np.distutils.ccompiler.gen_lib_options(*args, **kwargs) + return get_unpatched(msvc14_gen_lib_options)(*args, **kwargs) + + +def _augment_exception(exc, version, arch=''): + """ + Add details to the exception message to help guide the user + as to what action will resolve it. + """ + # Error if MSVC++ directory not found or environment not set + message = exc.args[0] + + if "vcvarsall" in message.lower() or "visual c" in message.lower(): + # Special error message if MSVC++ not installed + tmpl = 'Microsoft Visual C++ {version:0.1f} or greater is required.' + message = tmpl.format(**locals()) + msdownload = 'www.microsoft.com/download/details.aspx?id=%d' + if version == 9.0: + if arch.lower().find('ia64') > -1: + # For VC++ 9.0, if IA64 support is needed, redirect user + # to Windows SDK 7.0. + # Note: No download link available from Microsoft. + message += ' Get it with "Microsoft Windows SDK 7.0"' + else: + # For VC++ 9.0 redirect user to Vc++ for Python 2.7 : + # This redirection link is maintained by Microsoft. + # Contact vspython@microsoft.com if it needs updating. + message += ' Get it from http://aka.ms/vcpython27' + elif version == 10.0: + # For VC++ 10.0 Redirect user to Windows SDK 7.1 + message += ' Get it with "Microsoft Windows SDK 7.1": ' + message += msdownload % 8279 + elif version >= 14.0: + # For VC++ 14.X Redirect user to latest Visual C++ Build Tools + message += (' Get it with "Microsoft C++ Build Tools": ' + r'https://visualstudio.microsoft.com' + r'/visual-cpp-build-tools/') + + exc.args = (message, ) + + +class PlatformInfo: + """ + Current and Target Architectures information. + + Parameters + ---------- + arch: str + Target architecture. + """ + current_cpu = environ.get('processor_architecture', '').lower() + + def __init__(self, arch): + self.arch = arch.lower().replace('x64', 'amd64') + + @property + def target_cpu(self): + """ + Return Target CPU architecture. + + Return + ------ + str + Target CPU + """ + return self.arch[self.arch.find('_') + 1:] + + def target_is_x86(self): + """ + Return True if target CPU is x86 32 bits.. + + Return + ------ + bool + CPU is x86 32 bits + """ + return self.target_cpu == 'x86' + + def current_is_x86(self): + """ + Return True if current CPU is x86 32 bits.. + + Return + ------ + bool + CPU is x86 32 bits + """ + return self.current_cpu == 'x86' + + def current_dir(self, hidex86=False, x64=False): + """ + Current platform specific subfolder. + + Parameters + ---------- + hidex86: bool + return '' and not '\x86' if architecture is x86. + x64: bool + return '\x64' and not '\amd64' if architecture is amd64. + + Return + ------ + str + subfolder: '\target', or '' (see hidex86 parameter) + """ + return ( + '' if (self.current_cpu == 'x86' and hidex86) else + r'\x64' if (self.current_cpu == 'amd64' and x64) else + r'\%s' % self.current_cpu + ) + + def target_dir(self, hidex86=False, x64=False): + r""" + Target platform specific subfolder. + + Parameters + ---------- + hidex86: bool + return '' and not '\x86' if architecture is x86. + x64: bool + return '\x64' and not '\amd64' if architecture is amd64. + + Return + ------ + str + subfolder: '\current', or '' (see hidex86 parameter) + """ + return ( + '' if (self.target_cpu == 'x86' and hidex86) else + r'\x64' if (self.target_cpu == 'amd64' and x64) else + r'\%s' % self.target_cpu + ) + + def cross_dir(self, forcex86=False): + r""" + Cross platform specific subfolder. + + Parameters + ---------- + forcex86: bool + Use 'x86' as current architecture even if current architecture is + not x86. + + Return + ------ + str + subfolder: '' if target architecture is current architecture, + '\current_target' if not. + """ + current = 'x86' if forcex86 else self.current_cpu + return ( + '' if self.target_cpu == current else + self.target_dir().replace('\\', '\\%s_' % current) + ) + + +class RegistryInfo: + """ + Microsoft Visual Studio related registry information. + + Parameters + ---------- + platform_info: PlatformInfo + "PlatformInfo" instance. + """ + HKEYS = (winreg.HKEY_USERS, + winreg.HKEY_CURRENT_USER, + winreg.HKEY_LOCAL_MACHINE, + winreg.HKEY_CLASSES_ROOT) + + def __init__(self, platform_info): + self.pi = platform_info + + @property + def visualstudio(self): + """ + Microsoft Visual Studio root registry key. + + Return + ------ + str + Registry key + """ + return 'VisualStudio' + + @property + def sxs(self): + """ + Microsoft Visual Studio SxS registry key. + + Return + ------ + str + Registry key + """ + return join(self.visualstudio, 'SxS') + + @property + def vc(self): + """ + Microsoft Visual C++ VC7 registry key. + + Return + ------ + str + Registry key + """ + return join(self.sxs, 'VC7') + + @property + def vs(self): + """ + Microsoft Visual Studio VS7 registry key. + + Return + ------ + str + Registry key + """ + return join(self.sxs, 'VS7') + + @property + def vc_for_python(self): + """ + Microsoft Visual C++ for Python registry key. + + Return + ------ + str + Registry key + """ + return r'DevDiv\VCForPython' + + @property + def microsoft_sdk(self): + """ + Microsoft SDK registry key. + + Return + ------ + str + Registry key + """ + return 'Microsoft SDKs' + + @property + def windows_sdk(self): + """ + Microsoft Windows/Platform SDK registry key. + + Return + ------ + str + Registry key + """ + return join(self.microsoft_sdk, 'Windows') + + @property + def netfx_sdk(self): + """ + Microsoft .NET Framework SDK registry key. + + Return + ------ + str + Registry key + """ + return join(self.microsoft_sdk, 'NETFXSDK') + + @property + def windows_kits_roots(self): + """ + Microsoft Windows Kits Roots registry key. + + Return + ------ + str + Registry key + """ + return r'Windows Kits\Installed Roots' + + def microsoft(self, key, x86=False): + """ + Return key in Microsoft software registry. + + Parameters + ---------- + key: str + Registry key path where look. + x86: str + Force x86 software registry. + + Return + ------ + str + Registry key + """ + node64 = '' if self.pi.current_is_x86() or x86 else 'Wow6432Node' + return join('Software', node64, 'Microsoft', key) + + def lookup(self, key, name): + """ + Look for values in registry in Microsoft software registry. + + Parameters + ---------- + key: str + Registry key path where look. + name: str + Value name to find. + + Return + ------ + str + value + """ + key_read = winreg.KEY_READ + openkey = winreg.OpenKey + closekey = winreg.CloseKey + ms = self.microsoft + for hkey in self.HKEYS: + bkey = None + try: + bkey = openkey(hkey, ms(key), 0, key_read) + except (OSError, IOError): + if not self.pi.current_is_x86(): + try: + bkey = openkey(hkey, ms(key, True), 0, key_read) + except (OSError, IOError): + continue + else: + continue + try: + return winreg.QueryValueEx(bkey, name)[0] + except (OSError, IOError): + pass + finally: + if bkey: + closekey(bkey) + + +class SystemInfo: + """ + Microsoft Windows and Visual Studio related system information. + + Parameters + ---------- + registry_info: RegistryInfo + "RegistryInfo" instance. + vc_ver: float + Required Microsoft Visual C++ version. + """ + + # Variables and properties in this class use originals CamelCase variables + # names from Microsoft source files for more easy comparison. + WinDir = environ.get('WinDir', '') + ProgramFiles = environ.get('ProgramFiles', '') + ProgramFilesx86 = environ.get('ProgramFiles(x86)', ProgramFiles) + + def __init__(self, registry_info, vc_ver=None): + self.ri = registry_info + self.pi = self.ri.pi + + self.known_vs_paths = self.find_programdata_vs_vers() + + # Except for VS15+, VC version is aligned with VS version + self.vs_ver = self.vc_ver = ( + vc_ver or self._find_latest_available_vs_ver()) + + def _find_latest_available_vs_ver(self): + """ + Find the latest VC version + + Return + ------ + float + version + """ + reg_vc_vers = self.find_reg_vs_vers() + + if not (reg_vc_vers or self.known_vs_paths): + raise distutils.errors.DistutilsPlatformError( + 'No Microsoft Visual C++ version found') + + vc_vers = set(reg_vc_vers) + vc_vers.update(self.known_vs_paths) + return sorted(vc_vers)[-1] + + def find_reg_vs_vers(self): + """ + Find Microsoft Visual Studio versions available in registry. + + Return + ------ + list of float + Versions + """ + ms = self.ri.microsoft + vckeys = (self.ri.vc, self.ri.vc_for_python, self.ri.vs) + vs_vers = [] + for hkey, key in itertools.product(self.ri.HKEYS, vckeys): + try: + bkey = winreg.OpenKey(hkey, ms(key), 0, winreg.KEY_READ) + except (OSError, IOError): + continue + with bkey: + subkeys, values, _ = winreg.QueryInfoKey(bkey) + for i in range(values): + with contextlib.suppress(ValueError): + ver = float(winreg.EnumValue(bkey, i)[0]) + if ver not in vs_vers: + vs_vers.append(ver) + for i in range(subkeys): + with contextlib.suppress(ValueError): + ver = float(winreg.EnumKey(bkey, i)) + if ver not in vs_vers: + vs_vers.append(ver) + return sorted(vs_vers) + + def find_programdata_vs_vers(self): + r""" + Find Visual studio 2017+ versions from information in + "C:\ProgramData\Microsoft\VisualStudio\Packages\_Instances". + + Return + ------ + dict + float version as key, path as value. + """ + vs_versions = {} + instances_dir = \ + r'C:\ProgramData\Microsoft\VisualStudio\Packages\_Instances' + + try: + hashed_names = listdir(instances_dir) + + except (OSError, IOError): + # Directory not exists with all Visual Studio versions + return vs_versions + + for name in hashed_names: + try: + # Get VS installation path from "state.json" file + state_path = join(instances_dir, name, 'state.json') + with open(state_path, 'rt', encoding='utf-8') as state_file: + state = json.load(state_file) + vs_path = state['installationPath'] + + # Raises OSError if this VS installation does not contain VC + listdir(join(vs_path, r'VC\Tools\MSVC')) + + # Store version and path + vs_versions[self._as_float_version( + state['installationVersion'])] = vs_path + + except (OSError, IOError, KeyError): + # Skip if "state.json" file is missing or bad format + continue + + return vs_versions + + @staticmethod + def _as_float_version(version): + """ + Return a string version as a simplified float version (major.minor) + + Parameters + ---------- + version: str + Version. + + Return + ------ + float + version + """ + return float('.'.join(version.split('.')[:2])) + + @property + def VSInstallDir(self): + """ + Microsoft Visual Studio directory. + + Return + ------ + str + path + """ + # Default path + default = join(self.ProgramFilesx86, + 'Microsoft Visual Studio %0.1f' % self.vs_ver) + + # Try to get path from registry, if fail use default path + return self.ri.lookup(self.ri.vs, '%0.1f' % self.vs_ver) or default + + @property + def VCInstallDir(self): + """ + Microsoft Visual C++ directory. + + Return + ------ + str + path + """ + path = self._guess_vc() or self._guess_vc_legacy() + + if not isdir(path): + msg = 'Microsoft Visual C++ directory not found' + raise distutils.errors.DistutilsPlatformError(msg) + + return path + + def _guess_vc(self): + """ + Locate Visual C++ for VS2017+. + + Return + ------ + str + path + """ + if self.vs_ver <= 14.0: + return '' + + try: + # First search in known VS paths + vs_dir = self.known_vs_paths[self.vs_ver] + except KeyError: + # Else, search with path from registry + vs_dir = self.VSInstallDir + + guess_vc = join(vs_dir, r'VC\Tools\MSVC') + + # Subdir with VC exact version as name + try: + # Update the VC version with real one instead of VS version + vc_ver = listdir(guess_vc)[-1] + self.vc_ver = self._as_float_version(vc_ver) + return join(guess_vc, vc_ver) + except (OSError, IOError, IndexError): + return '' + + def _guess_vc_legacy(self): + """ + Locate Visual C++ for versions prior to 2017. + + Return + ------ + str + path + """ + default = join(self.ProgramFilesx86, + r'Microsoft Visual Studio %0.1f\VC' % self.vs_ver) + + # Try to get "VC++ for Python" path from registry as default path + reg_path = join(self.ri.vc_for_python, '%0.1f' % self.vs_ver) + python_vc = self.ri.lookup(reg_path, 'installdir') + default_vc = join(python_vc, 'VC') if python_vc else default + + # Try to get path from registry, if fail use default path + return self.ri.lookup(self.ri.vc, '%0.1f' % self.vs_ver) or default_vc + + @property + def WindowsSdkVersion(self): + """ + Microsoft Windows SDK versions for specified MSVC++ version. + + Return + ------ + tuple of str + versions + """ + if self.vs_ver <= 9.0: + return '7.0', '6.1', '6.0a' + elif self.vs_ver == 10.0: + return '7.1', '7.0a' + elif self.vs_ver == 11.0: + return '8.0', '8.0a' + elif self.vs_ver == 12.0: + return '8.1', '8.1a' + elif self.vs_ver >= 14.0: + return '10.0', '8.1' + + @property + def WindowsSdkLastVersion(self): + """ + Microsoft Windows SDK last version. + + Return + ------ + str + version + """ + return self._use_last_dir_name(join(self.WindowsSdkDir, 'lib')) + + @property # noqa: C901 + def WindowsSdkDir(self): # noqa: C901 # is too complex (12) # FIXME + """ + Microsoft Windows SDK directory. + + Return + ------ + str + path + """ + sdkdir = '' + for ver in self.WindowsSdkVersion: + # Try to get it from registry + loc = join(self.ri.windows_sdk, 'v%s' % ver) + sdkdir = self.ri.lookup(loc, 'installationfolder') + if sdkdir: + break + if not sdkdir or not isdir(sdkdir): + # Try to get "VC++ for Python" version from registry + path = join(self.ri.vc_for_python, '%0.1f' % self.vc_ver) + install_base = self.ri.lookup(path, 'installdir') + if install_base: + sdkdir = join(install_base, 'WinSDK') + if not sdkdir or not isdir(sdkdir): + # If fail, use default new path + for ver in self.WindowsSdkVersion: + intver = ver[:ver.rfind('.')] + path = r'Microsoft SDKs\Windows Kits\%s' % intver + d = join(self.ProgramFiles, path) + if isdir(d): + sdkdir = d + if not sdkdir or not isdir(sdkdir): + # If fail, use default old path + for ver in self.WindowsSdkVersion: + path = r'Microsoft SDKs\Windows\v%s' % ver + d = join(self.ProgramFiles, path) + if isdir(d): + sdkdir = d + if not sdkdir: + # If fail, use Platform SDK + sdkdir = join(self.VCInstallDir, 'PlatformSDK') + return sdkdir + + @property + def WindowsSDKExecutablePath(self): + """ + Microsoft Windows SDK executable directory. + + Return + ------ + str + path + """ + # Find WinSDK NetFx Tools registry dir name + if self.vs_ver <= 11.0: + netfxver = 35 + arch = '' + else: + netfxver = 40 + hidex86 = True if self.vs_ver <= 12.0 else False + arch = self.pi.current_dir(x64=True, hidex86=hidex86) + fx = 'WinSDK-NetFx%dTools%s' % (netfxver, arch.replace('\\', '-')) + + # list all possibles registry paths + regpaths = [] + if self.vs_ver >= 14.0: + for ver in self.NetFxSdkVersion: + regpaths += [join(self.ri.netfx_sdk, ver, fx)] + + for ver in self.WindowsSdkVersion: + regpaths += [join(self.ri.windows_sdk, 'v%sA' % ver, fx)] + + # Return installation folder from the more recent path + for path in regpaths: + execpath = self.ri.lookup(path, 'installationfolder') + if execpath: + return execpath + + @property + def FSharpInstallDir(self): + """ + Microsoft Visual F# directory. + + Return + ------ + str + path + """ + path = join(self.ri.visualstudio, r'%0.1f\Setup\F#' % self.vs_ver) + return self.ri.lookup(path, 'productdir') or '' + + @property + def UniversalCRTSdkDir(self): + """ + Microsoft Universal CRT SDK directory. + + Return + ------ + str + path + """ + # Set Kit Roots versions for specified MSVC++ version + vers = ('10', '81') if self.vs_ver >= 14.0 else () + + # Find path of the more recent Kit + for ver in vers: + sdkdir = self.ri.lookup(self.ri.windows_kits_roots, + 'kitsroot%s' % ver) + if sdkdir: + return sdkdir or '' + + @property + def UniversalCRTSdkLastVersion(self): + """ + Microsoft Universal C Runtime SDK last version. + + Return + ------ + str + version + """ + return self._use_last_dir_name(join(self.UniversalCRTSdkDir, 'lib')) + + @property + def NetFxSdkVersion(self): + """ + Microsoft .NET Framework SDK versions. + + Return + ------ + tuple of str + versions + """ + # Set FxSdk versions for specified VS version + return (('4.7.2', '4.7.1', '4.7', + '4.6.2', '4.6.1', '4.6', + '4.5.2', '4.5.1', '4.5') + if self.vs_ver >= 14.0 else ()) + + @property + def NetFxSdkDir(self): + """ + Microsoft .NET Framework SDK directory. + + Return + ------ + str + path + """ + sdkdir = '' + for ver in self.NetFxSdkVersion: + loc = join(self.ri.netfx_sdk, ver) + sdkdir = self.ri.lookup(loc, 'kitsinstallationfolder') + if sdkdir: + break + return sdkdir + + @property + def FrameworkDir32(self): + """ + Microsoft .NET Framework 32bit directory. + + Return + ------ + str + path + """ + # Default path + guess_fw = join(self.WinDir, r'Microsoft.NET\Framework') + + # Try to get path from registry, if fail use default path + return self.ri.lookup(self.ri.vc, 'frameworkdir32') or guess_fw + + @property + def FrameworkDir64(self): + """ + Microsoft .NET Framework 64bit directory. + + Return + ------ + str + path + """ + # Default path + guess_fw = join(self.WinDir, r'Microsoft.NET\Framework64') + + # Try to get path from registry, if fail use default path + return self.ri.lookup(self.ri.vc, 'frameworkdir64') or guess_fw + + @property + def FrameworkVersion32(self): + """ + Microsoft .NET Framework 32bit versions. + + Return + ------ + tuple of str + versions + """ + return self._find_dot_net_versions(32) + + @property + def FrameworkVersion64(self): + """ + Microsoft .NET Framework 64bit versions. + + Return + ------ + tuple of str + versions + """ + return self._find_dot_net_versions(64) + + def _find_dot_net_versions(self, bits): + """ + Find Microsoft .NET Framework versions. + + Parameters + ---------- + bits: int + Platform number of bits: 32 or 64. + + Return + ------ + tuple of str + versions + """ + # Find actual .NET version in registry + reg_ver = self.ri.lookup(self.ri.vc, 'frameworkver%d' % bits) + dot_net_dir = getattr(self, 'FrameworkDir%d' % bits) + ver = reg_ver or self._use_last_dir_name(dot_net_dir, 'v') or '' + + # Set .NET versions for specified MSVC++ version + if self.vs_ver >= 12.0: + return ver, 'v4.0' + elif self.vs_ver >= 10.0: + return 'v4.0.30319' if ver.lower()[:2] != 'v4' else ver, 'v3.5' + elif self.vs_ver == 9.0: + return 'v3.5', 'v2.0.50727' + elif self.vs_ver == 8.0: + return 'v3.0', 'v2.0.50727' + + @staticmethod + def _use_last_dir_name(path, prefix=''): + """ + Return name of the last dir in path or '' if no dir found. + + Parameters + ---------- + path: str + Use dirs in this path + prefix: str + Use only dirs starting by this prefix + + Return + ------ + str + name + """ + matching_dirs = ( + dir_name + for dir_name in reversed(listdir(path)) + if isdir(join(path, dir_name)) and + dir_name.startswith(prefix) + ) + return next(matching_dirs, None) or '' + + +class EnvironmentInfo: + """ + Return environment variables for specified Microsoft Visual C++ version + and platform : Lib, Include, Path and libpath. + + This function is compatible with Microsoft Visual C++ 9.0 to 14.X. + + Script created by analysing Microsoft environment configuration files like + "vcvars[...].bat", "SetEnv.Cmd", "vcbuildtools.bat", ... + + Parameters + ---------- + arch: str + Target architecture. + vc_ver: float + Required Microsoft Visual C++ version. If not set, autodetect the last + version. + vc_min_ver: float + Minimum Microsoft Visual C++ version. + """ + + # Variables and properties in this class use originals CamelCase variables + # names from Microsoft source files for more easy comparison. + + def __init__(self, arch, vc_ver=None, vc_min_ver=0): + self.pi = PlatformInfo(arch) + self.ri = RegistryInfo(self.pi) + self.si = SystemInfo(self.ri, vc_ver) + + if self.vc_ver < vc_min_ver: + err = 'No suitable Microsoft Visual C++ version found' + raise distutils.errors.DistutilsPlatformError(err) + + @property + def vs_ver(self): + """ + Microsoft Visual Studio. + + Return + ------ + float + version + """ + return self.si.vs_ver + + @property + def vc_ver(self): + """ + Microsoft Visual C++ version. + + Return + ------ + float + version + """ + return self.si.vc_ver + + @property + def VSTools(self): + """ + Microsoft Visual Studio Tools. + + Return + ------ + list of str + paths + """ + paths = [r'Common7\IDE', r'Common7\Tools'] + + if self.vs_ver >= 14.0: + arch_subdir = self.pi.current_dir(hidex86=True, x64=True) + paths += [r'Common7\IDE\CommonExtensions\Microsoft\TestWindow'] + paths += [r'Team Tools\Performance Tools'] + paths += [r'Team Tools\Performance Tools%s' % arch_subdir] + + return [join(self.si.VSInstallDir, path) for path in paths] + + @property + def VCIncludes(self): + """ + Microsoft Visual C++ & Microsoft Foundation Class Includes. + + Return + ------ + list of str + paths + """ + return [join(self.si.VCInstallDir, 'Include'), + join(self.si.VCInstallDir, r'ATLMFC\Include')] + + @property + def VCLibraries(self): + """ + Microsoft Visual C++ & Microsoft Foundation Class Libraries. + + Return + ------ + list of str + paths + """ + if self.vs_ver >= 15.0: + arch_subdir = self.pi.target_dir(x64=True) + else: + arch_subdir = self.pi.target_dir(hidex86=True) + paths = ['Lib%s' % arch_subdir, r'ATLMFC\Lib%s' % arch_subdir] + + if self.vs_ver >= 14.0: + paths += [r'Lib\store%s' % arch_subdir] + + return [join(self.si.VCInstallDir, path) for path in paths] + + @property + def VCStoreRefs(self): + """ + Microsoft Visual C++ store references Libraries. + + Return + ------ + list of str + paths + """ + if self.vs_ver < 14.0: + return [] + return [join(self.si.VCInstallDir, r'Lib\store\references')] + + @property + def VCTools(self): + """ + Microsoft Visual C++ Tools. + + Return + ------ + list of str + paths + """ + si = self.si + tools = [join(si.VCInstallDir, 'VCPackages')] + + forcex86 = True if self.vs_ver <= 10.0 else False + arch_subdir = self.pi.cross_dir(forcex86) + if arch_subdir: + tools += [join(si.VCInstallDir, 'Bin%s' % arch_subdir)] + + if self.vs_ver == 14.0: + path = 'Bin%s' % self.pi.current_dir(hidex86=True) + tools += [join(si.VCInstallDir, path)] + + elif self.vs_ver >= 15.0: + host_dir = (r'bin\HostX86%s' if self.pi.current_is_x86() else + r'bin\HostX64%s') + tools += [join( + si.VCInstallDir, host_dir % self.pi.target_dir(x64=True))] + + if self.pi.current_cpu != self.pi.target_cpu: + tools += [join( + si.VCInstallDir, host_dir % self.pi.current_dir(x64=True))] + + else: + tools += [join(si.VCInstallDir, 'Bin')] + + return tools + + @property + def OSLibraries(self): + """ + Microsoft Windows SDK Libraries. + + Return + ------ + list of str + paths + """ + if self.vs_ver <= 10.0: + arch_subdir = self.pi.target_dir(hidex86=True, x64=True) + return [join(self.si.WindowsSdkDir, 'Lib%s' % arch_subdir)] + + else: + arch_subdir = self.pi.target_dir(x64=True) + lib = join(self.si.WindowsSdkDir, 'lib') + libver = self._sdk_subdir + return [join(lib, '%sum%s' % (libver, arch_subdir))] + + @property + def OSIncludes(self): + """ + Microsoft Windows SDK Include. + + Return + ------ + list of str + paths + """ + include = join(self.si.WindowsSdkDir, 'include') + + if self.vs_ver <= 10.0: + return [include, join(include, 'gl')] + + else: + if self.vs_ver >= 14.0: + sdkver = self._sdk_subdir + else: + sdkver = '' + return [join(include, '%sshared' % sdkver), + join(include, '%sum' % sdkver), + join(include, '%swinrt' % sdkver)] + + @property + def OSLibpath(self): + """ + Microsoft Windows SDK Libraries Paths. + + Return + ------ + list of str + paths + """ + ref = join(self.si.WindowsSdkDir, 'References') + libpath = [] + + if self.vs_ver <= 9.0: + libpath += self.OSLibraries + + if self.vs_ver >= 11.0: + libpath += [join(ref, r'CommonConfiguration\Neutral')] + + if self.vs_ver >= 14.0: + libpath += [ + ref, + join(self.si.WindowsSdkDir, 'UnionMetadata'), + join( + ref, 'Windows.Foundation.UniversalApiContract', '1.0.0.0'), + join(ref, 'Windows.Foundation.FoundationContract', '1.0.0.0'), + join( + ref, 'Windows.Networking.Connectivity.WwanContract', + '1.0.0.0'), + join( + self.si.WindowsSdkDir, 'ExtensionSDKs', 'Microsoft.VCLibs', + '%0.1f' % self.vs_ver, 'References', 'CommonConfiguration', + 'neutral'), + ] + return libpath + + @property + def SdkTools(self): + """ + Microsoft Windows SDK Tools. + + Return + ------ + list of str + paths + """ + return list(self._sdk_tools()) + + def _sdk_tools(self): + """ + Microsoft Windows SDK Tools paths generator. + + Return + ------ + generator of str + paths + """ + if self.vs_ver < 15.0: + bin_dir = 'Bin' if self.vs_ver <= 11.0 else r'Bin\x86' + yield join(self.si.WindowsSdkDir, bin_dir) + + if not self.pi.current_is_x86(): + arch_subdir = self.pi.current_dir(x64=True) + path = 'Bin%s' % arch_subdir + yield join(self.si.WindowsSdkDir, path) + + if self.vs_ver in (10.0, 11.0): + if self.pi.target_is_x86(): + arch_subdir = '' + else: + arch_subdir = self.pi.current_dir(hidex86=True, x64=True) + path = r'Bin\NETFX 4.0 Tools%s' % arch_subdir + yield join(self.si.WindowsSdkDir, path) + + elif self.vs_ver >= 15.0: + path = join(self.si.WindowsSdkDir, 'Bin') + arch_subdir = self.pi.current_dir(x64=True) + sdkver = self.si.WindowsSdkLastVersion + yield join(path, '%s%s' % (sdkver, arch_subdir)) + + if self.si.WindowsSDKExecutablePath: + yield self.si.WindowsSDKExecutablePath + + @property + def _sdk_subdir(self): + """ + Microsoft Windows SDK version subdir. + + Return + ------ + str + subdir + """ + ucrtver = self.si.WindowsSdkLastVersion + return ('%s\\' % ucrtver) if ucrtver else '' + + @property + def SdkSetup(self): + """ + Microsoft Windows SDK Setup. + + Return + ------ + list of str + paths + """ + if self.vs_ver > 9.0: + return [] + + return [join(self.si.WindowsSdkDir, 'Setup')] + + @property + def FxTools(self): + """ + Microsoft .NET Framework Tools. + + Return + ------ + list of str + paths + """ + pi = self.pi + si = self.si + + if self.vs_ver <= 10.0: + include32 = True + include64 = not pi.target_is_x86() and not pi.current_is_x86() + else: + include32 = pi.target_is_x86() or pi.current_is_x86() + include64 = pi.current_cpu == 'amd64' or pi.target_cpu == 'amd64' + + tools = [] + if include32: + tools += [join(si.FrameworkDir32, ver) + for ver in si.FrameworkVersion32] + if include64: + tools += [join(si.FrameworkDir64, ver) + for ver in si.FrameworkVersion64] + return tools + + @property + def NetFxSDKLibraries(self): + """ + Microsoft .Net Framework SDK Libraries. + + Return + ------ + list of str + paths + """ + if self.vs_ver < 14.0 or not self.si.NetFxSdkDir: + return [] + + arch_subdir = self.pi.target_dir(x64=True) + return [join(self.si.NetFxSdkDir, r'lib\um%s' % arch_subdir)] + + @property + def NetFxSDKIncludes(self): + """ + Microsoft .Net Framework SDK Includes. + + Return + ------ + list of str + paths + """ + if self.vs_ver < 14.0 or not self.si.NetFxSdkDir: + return [] + + return [join(self.si.NetFxSdkDir, r'include\um')] + + @property + def VsTDb(self): + """ + Microsoft Visual Studio Team System Database. + + Return + ------ + list of str + paths + """ + return [join(self.si.VSInstallDir, r'VSTSDB\Deploy')] + + @property + def MSBuild(self): + """ + Microsoft Build Engine. + + Return + ------ + list of str + paths + """ + if self.vs_ver < 12.0: + return [] + elif self.vs_ver < 15.0: + base_path = self.si.ProgramFilesx86 + arch_subdir = self.pi.current_dir(hidex86=True) + else: + base_path = self.si.VSInstallDir + arch_subdir = '' + + path = r'MSBuild\%0.1f\bin%s' % (self.vs_ver, arch_subdir) + build = [join(base_path, path)] + + if self.vs_ver >= 15.0: + # Add Roslyn C# & Visual Basic Compiler + build += [join(base_path, path, 'Roslyn')] + + return build + + @property + def HTMLHelpWorkshop(self): + """ + Microsoft HTML Help Workshop. + + Return + ------ + list of str + paths + """ + if self.vs_ver < 11.0: + return [] + + return [join(self.si.ProgramFilesx86, 'HTML Help Workshop')] + + @property + def UCRTLibraries(self): + """ + Microsoft Universal C Runtime SDK Libraries. + + Return + ------ + list of str + paths + """ + if self.vs_ver < 14.0: + return [] + + arch_subdir = self.pi.target_dir(x64=True) + lib = join(self.si.UniversalCRTSdkDir, 'lib') + ucrtver = self._ucrt_subdir + return [join(lib, '%sucrt%s' % (ucrtver, arch_subdir))] + + @property + def UCRTIncludes(self): + """ + Microsoft Universal C Runtime SDK Include. + + Return + ------ + list of str + paths + """ + if self.vs_ver < 14.0: + return [] + + include = join(self.si.UniversalCRTSdkDir, 'include') + return [join(include, '%sucrt' % self._ucrt_subdir)] + + @property + def _ucrt_subdir(self): + """ + Microsoft Universal C Runtime SDK version subdir. + + Return + ------ + str + subdir + """ + ucrtver = self.si.UniversalCRTSdkLastVersion + return ('%s\\' % ucrtver) if ucrtver else '' + + @property + def FSharp(self): + """ + Microsoft Visual F#. + + Return + ------ + list of str + paths + """ + if 11.0 > self.vs_ver > 12.0: + return [] + + return [self.si.FSharpInstallDir] + + @property + def VCRuntimeRedist(self): + """ + Microsoft Visual C++ runtime redistributable dll. + + Return + ------ + str + path + """ + vcruntime = 'vcruntime%d0.dll' % self.vc_ver + arch_subdir = self.pi.target_dir(x64=True).strip('\\') + + # Installation prefixes candidates + prefixes = [] + tools_path = self.si.VCInstallDir + redist_path = dirname(tools_path.replace(r'\Tools', r'\Redist')) + if isdir(redist_path): + # Redist version may not be exactly the same as tools + redist_path = join(redist_path, listdir(redist_path)[-1]) + prefixes += [redist_path, join(redist_path, 'onecore')] + + prefixes += [join(tools_path, 'redist')] # VS14 legacy path + + # CRT directory + crt_dirs = ('Microsoft.VC%d.CRT' % (self.vc_ver * 10), + # Sometime store in directory with VS version instead of VC + 'Microsoft.VC%d.CRT' % (int(self.vs_ver) * 10)) + + # vcruntime path + for prefix, crt_dir in itertools.product(prefixes, crt_dirs): + path = join(prefix, arch_subdir, crt_dir, vcruntime) + if isfile(path): + return path + + def return_env(self, exists=True): + """ + Return environment dict. + + Parameters + ---------- + exists: bool + It True, only return existing paths. + + Return + ------ + dict + environment + """ + env = dict( + include=self._build_paths('include', + [self.VCIncludes, + self.OSIncludes, + self.UCRTIncludes, + self.NetFxSDKIncludes], + exists), + lib=self._build_paths('lib', + [self.VCLibraries, + self.OSLibraries, + self.FxTools, + self.UCRTLibraries, + self.NetFxSDKLibraries], + exists), + libpath=self._build_paths('libpath', + [self.VCLibraries, + self.FxTools, + self.VCStoreRefs, + self.OSLibpath], + exists), + path=self._build_paths('path', + [self.VCTools, + self.VSTools, + self.VsTDb, + self.SdkTools, + self.SdkSetup, + self.FxTools, + self.MSBuild, + self.HTMLHelpWorkshop, + self.FSharp], + exists), + ) + if self.vs_ver >= 14 and isfile(self.VCRuntimeRedist): + env['py_vcruntime_redist'] = self.VCRuntimeRedist + return env + + def _build_paths(self, name, spec_path_lists, exists): + """ + Given an environment variable name and specified paths, + return a pathsep-separated string of paths containing + unique, extant, directories from those paths and from + the environment variable. Raise an error if no paths + are resolved. + + Parameters + ---------- + name: str + Environment variable name + spec_path_lists: list of str + Paths + exists: bool + It True, only return existing paths. + + Return + ------ + str + Pathsep-separated paths + """ + # flatten spec_path_lists + spec_paths = itertools.chain.from_iterable(spec_path_lists) + env_paths = environ.get(name, '').split(pathsep) + paths = itertools.chain(spec_paths, env_paths) + extant_paths = list(filter(isdir, paths)) if exists else paths + if not extant_paths: + msg = "%s environment variable is empty" % name.upper() + raise distutils.errors.DistutilsPlatformError(msg) + unique_paths = unique_everseen(extant_paths) + return pathsep.join(unique_paths) diff --git a/env/lib/python2.7/site-packages/setuptools/namespaces.py b/venv/Lib/site-packages/setuptools/namespaces.py similarity index 88% rename from env/lib/python2.7/site-packages/setuptools/namespaces.py rename to venv/Lib/site-packages/setuptools/namespaces.py index dc16106d..44939e1c 100644 --- a/env/lib/python2.7/site-packages/setuptools/namespaces.py +++ b/venv/Lib/site-packages/setuptools/namespaces.py @@ -2,8 +2,6 @@ from distutils import log import itertools -from setuptools.extern.six.moves import map - flatten = itertools.chain.from_iterable @@ -47,13 +45,17 @@ def _get_target(self): "p = os.path.join(%(root)s, *%(pth)r)", "importlib = has_mfs and __import__('importlib.util')", "has_mfs and __import__('importlib.machinery')", - "m = has_mfs and " + ( + "m = has_mfs and " "sys.modules.setdefault(%(pkg)r, " - "importlib.util.module_from_spec(" - "importlib.machinery.PathFinder.find_spec(%(pkg)r, " - "[os.path.dirname(p)])))", - "m = m or " - "sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))", + "importlib.util.module_from_spec(" + "importlib.machinery.PathFinder.find_spec(%(pkg)r, " + "[os.path.dirname(p)])))" + ), + ( + "m = m or " + "sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))" + ), "mp = (m or []) and m.__dict__.setdefault('__path__',[])", "(p not in mp) and mp.append(p)", ) @@ -68,8 +70,6 @@ def _get_root(self): return "sys._getframe(1).f_locals['sitedir']" def _gen_nspkg_line(self, pkg): - # ensure pkg is not a unicode string under Python 2.7 - pkg = str(pkg) pth = tuple(pkg.split('.')) root = self._get_root() tmpl_lines = self._nspkg_tmpl diff --git a/env/lib/python2.7/site-packages/setuptools/package_index.py b/venv/Lib/site-packages/setuptools/package_index.py similarity index 91% rename from env/lib/python2.7/site-packages/setuptools/package_index.py rename to venv/Lib/site-packages/setuptools/package_index.py index 6b06f2ca..d1f13378 100644 --- a/env/lib/python2.7/site-packages/setuptools/package_index.py +++ b/venv/Lib/site-packages/setuptools/package_index.py @@ -2,17 +2,21 @@ import sys import os import re +import io import shutil import socket import base64 import hashlib import itertools import warnings +import configparser +import html +import http.client +import urllib.parse +import urllib.request +import urllib.error from functools import wraps -from setuptools.extern import six -from setuptools.extern.six.moves import urllib, http_client, configparser, map - import setuptools from pkg_resources import ( CHECKOUT_DIST, Distribution, BINARY_DIST, normalize_path, SOURCE_DIST, @@ -23,11 +27,9 @@ from distutils import log from distutils.errors import DistutilsError from fnmatch import translate -from setuptools.py27compat import get_all_headers -from setuptools.py33compat import unescape from setuptools.wheel import Wheel +from setuptools.extern.more_itertools import unique_everseen -__metaclass__ = type EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.+!]+)$') HREF = re.compile(r"""href\s*=\s*['"]?([^'"> ]+)""", re.I) @@ -46,16 +48,17 @@ _SOCKET_TIMEOUT = 15 _tmpl = "setuptools/{setuptools.__version__} Python-urllib/{py_major}" -user_agent = _tmpl.format(py_major=sys.version[:3], setuptools=setuptools) +user_agent = _tmpl.format( + py_major='{}.{}'.format(*sys.version_info), setuptools=setuptools) def parse_requirement_arg(spec): try: return Requirement.parse(spec) - except ValueError: + except ValueError as e: raise DistutilsError( "Not a URL, existing file, or requirement spec: %r" % (spec,) - ) + ) from e def parse_bdist_wininst(name): @@ -160,7 +163,7 @@ def interpret_distro_name( # Generate alternative interpretations of a source distro name # Because some packages are ambiguous as to name/versions split # e.g. "adns-python-1.1.0", "egenix-mx-commercial", etc. - # So, we generate each possible interepretation (e.g. "adns, python-1.1.0" + # So, we generate each possible interpretation (e.g. "adns, python-1.1.0" # "adns-python, 1.1.0", and "adns-python-1.1.0, no version"). In practice, # the spurious interpretations should be ignored, because in the event # there's also an "adns" package, the spurious "python-1.1.0" version will @@ -182,25 +185,6 @@ def interpret_distro_name( ) -# From Python 2.7 docs -def unique_everseen(iterable, key=None): - "List unique elements, preserving order. Remember all elements ever seen." - # unique_everseen('AAAABBBCCDAABBB') --> A B C D - # unique_everseen('ABBCcAD', str.lower) --> A B C D - seen = set() - seen_add = seen.add - if key is None: - for element in six.moves.filterfalse(seen.__contains__, iterable): - seen_add(element) - yield element - else: - for element in iterable: - k = key(element) - if k not in seen: - seen_add(k) - yield element - - def unique_values(func): """ Wrap a function returning an iterable such that the resulting iterable @@ -319,7 +303,8 @@ def __init__( else: self.opener = urllib.request.urlopen - def process_url(self, url, retrieve=False): + # FIXME: 'PackageIndex.process_url' is too complex (14) + def process_url(self, url, retrieve=False): # noqa: C901 """Evaluate a URL as a possible download, and maybe retrieve it""" if url in self.scanned_urls and not retrieve: return @@ -348,6 +333,8 @@ def process_url(self, url, retrieve=False): f = self.open_url(url, tmpl % url) if f is None: return + if isinstance(f, urllib.error.HTTPError) and f.code == 401: + self.info("Authentication error: %s" % f.msg) self.fetched_urls[f.url] = True if 'html' not in f.headers.get('content-type', '').lower(): f.close() # not html, we can't process it @@ -425,49 +412,53 @@ def scan_egg_link(self, path, entry): dist.precedence = SOURCE_DIST self.add(dist) + def _scan(self, link): + # Process a URL to see if it's for a package page + NO_MATCH_SENTINEL = None, None + if not link.startswith(self.index_url): + return NO_MATCH_SENTINEL + + parts = list(map( + urllib.parse.unquote, link[len(self.index_url):].split('/') + )) + if len(parts) != 2 or '#' in parts[1]: + return NO_MATCH_SENTINEL + + # it's a package page, sanitize and index it + pkg = safe_name(parts[0]) + ver = safe_version(parts[1]) + self.package_pages.setdefault(pkg.lower(), {})[link] = True + return to_filename(pkg), to_filename(ver) + def process_index(self, url, page): """Process the contents of a PyPI page""" - def scan(link): - # Process a URL to see if it's for a package page - if link.startswith(self.index_url): - parts = list(map( - urllib.parse.unquote, link[len(self.index_url):].split('/') - )) - if len(parts) == 2 and '#' not in parts[1]: - # it's a package page, sanitize and index it - pkg = safe_name(parts[0]) - ver = safe_version(parts[1]) - self.package_pages.setdefault(pkg.lower(), {})[link] = True - return to_filename(pkg), to_filename(ver) - return None, None - # process an index page into the package-page index for match in HREF.finditer(page): try: - scan(urllib.parse.urljoin(url, htmldecode(match.group(1)))) + self._scan(urllib.parse.urljoin(url, htmldecode(match.group(1)))) except ValueError: pass - pkg, ver = scan(url) # ensure this page is in the page index - if pkg: - # process individual package page - for new_url in find_external_links(url, page): - # Process the found URL - base, frag = egg_info_for_url(new_url) - if base.endswith('.py') and not frag: - if ver: - new_url += '#egg=%s-%s' % (pkg, ver) - else: - self.need_version_info(url) - self.scan_url(new_url) - - return PYPI_MD5.sub( - lambda m: '%s' % m.group(1, 3, 2), page - ) - else: + pkg, ver = self._scan(url) # ensure this page is in the page index + if not pkg: return "" # no sense double-scanning non-package pages + # process individual package page + for new_url in find_external_links(url, page): + # Process the found URL + base, frag = egg_info_for_url(new_url) + if base.endswith('.py') and not frag: + if ver: + new_url += '#egg=%s-%s' % (pkg, ver) + else: + self.need_version_info(url) + self.scan_url(new_url) + + return PYPI_MD5.sub( + lambda m: '%s' % m.group(1, 3, 2), page + ) + def need_version_info(self, url): self.scan_all( "Page at %s links to .py file(s) without version info; an index " @@ -588,7 +579,7 @@ def download(self, spec, tmpdir): spec = parse_requirement_arg(spec) return getattr(self.fetch_distribution(spec, tmpdir), 'location', None) - def fetch_distribution( + def fetch_distribution( # noqa: C901 # is too complex (14) # FIXME self, requirement, tmpdir, force_scan=False, source=False, develop_ok=False, local_index=None): """Obtain a distribution suitable for fulfilling `requirement` @@ -737,7 +728,7 @@ def _download_to(self, url, filename): size = -1 if "content-length" in headers: # Some servers return multiple Content-Length headers :( - sizes = get_all_headers(headers, 'Content-Length') + sizes = headers.get_all('Content-Length') size = max(map(int, sizes)) self.reporthook(url, filename, blocknum, bs, size) with open(filename, 'wb') as tfp: @@ -759,17 +750,18 @@ def _download_to(self, url, filename): def reporthook(self, url, filename, blocknum, blksize, size): pass # no-op - def open_url(self, url, warning=None): + # FIXME: + def open_url(self, url, warning=None): # noqa: C901 # is too complex (12) if url.startswith('file:'): return local_open(url) try: return open_with_auth(url, self.opener) - except (ValueError, http_client.InvalidURL) as v: + except (ValueError, http.client.InvalidURL) as v: msg = ' '.join([str(arg) for arg in v.args]) if warning: self.warn(warning, msg) else: - raise DistutilsError('%s %s' % (url, msg)) + raise DistutilsError('%s %s' % (url, msg)) from v except urllib.error.HTTPError as v: return v except urllib.error.URLError as v: @@ -777,8 +769,8 @@ def open_url(self, url, warning=None): self.warn(warning, v.reason) else: raise DistutilsError("Download error for %s: %s" - % (url, v.reason)) - except http_client.BadStatusLine as v: + % (url, v.reason)) from v + except http.client.BadStatusLine as v: if warning: self.warn(warning, v.line) else: @@ -786,13 +778,13 @@ def open_url(self, url, warning=None): '%s returned a bad status line. The server might be ' 'down, %s' % (url, v.line) - ) - except (http_client.HTTPException, socket.error) as v: + ) from v + except (http.client.HTTPException, socket.error) as v: if warning: self.warn(warning, v) else: raise DistutilsError("Download error for %s: %s" - % (url, v)) + % (url, v)) from v def _download_url(self, scheme, url, tmpdir): # Determine download filename @@ -937,7 +929,7 @@ def warn(self, msg, *args): def decode_entity(match): what = match.group(0) - return unescape(what) + return html.unescape(what) def htmldecode(text): @@ -969,8 +961,7 @@ def _socket_timeout(*args, **kwargs): def _encode_auth(auth): """ - A function compatible with Python 2.3-3.3 that will encode - auth from a URL suitable for an HTTP header. + Encode auth from a URL suitable for an HTTP header. >>> str(_encode_auth('username%3Apassword')) 'dXNlcm5hbWU6cGFzc3dvcmQ=' @@ -1050,10 +1041,10 @@ def open_with_auth(url, opener=urllib.request.urlopen): parsed = urllib.parse.urlparse(url) scheme, netloc, path, params, query, frag = parsed - # Double scheme does not raise on Mac OS X as revealed by a + # Double scheme does not raise on macOS as revealed by a # failing test. We would expect "nonnumeric port". Refs #20. if netloc.endswith(':'): - raise http_client.InvalidURL("nonnumeric port: ''") + raise http.client.InvalidURL("nonnumeric port: ''") if scheme in ('http', 'https'): auth, address = _splituser(netloc) @@ -1092,7 +1083,8 @@ def open_with_auth(url, opener=urllib.request.urlopen): # copy of urllib.parse._splituser from Python 3.8 def _splituser(host): - """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'.""" + """splituser('user[:passwd]@host[:port]') + --> 'user[:passwd]', 'host[:port]'.""" user, delim, host = host.rpartition('@') return (user if delim else None), host @@ -1132,5 +1124,5 @@ def local_open(url): status, message, body = 404, "Path not found", "Not found" headers = {'content-type': 'text/html'} - body_stream = six.StringIO(body) + body_stream = io.StringIO(body) return urllib.error.HTTPError(url, status, message, headers, body_stream) diff --git a/venv/Lib/site-packages/setuptools/py34compat.py b/venv/Lib/site-packages/setuptools/py34compat.py new file mode 100644 index 00000000..3ad91722 --- /dev/null +++ b/venv/Lib/site-packages/setuptools/py34compat.py @@ -0,0 +1,13 @@ +import importlib + +try: + import importlib.util +except ImportError: + pass + + +try: + module_from_spec = importlib.util.module_from_spec +except AttributeError: + def module_from_spec(spec): + return spec.loader.load_module(spec.name) diff --git a/env/lib/python2.7/site-packages/setuptools/sandbox.py b/venv/Lib/site-packages/setuptools/sandbox.py similarity index 94% rename from env/lib/python2.7/site-packages/setuptools/sandbox.py rename to venv/Lib/site-packages/setuptools/sandbox.py index 685f3f72..91b960d8 100644 --- a/env/lib/python2.7/site-packages/setuptools/sandbox.py +++ b/venv/Lib/site-packages/setuptools/sandbox.py @@ -8,11 +8,11 @@ import contextlib import pickle import textwrap +import builtins -from setuptools.extern import six -from setuptools.extern.six.moves import builtins, map - -import pkg_resources.py31compat +import pkg_resources +from distutils.errors import DistutilsError +from pkg_resources import working_set if sys.platform.startswith('java'): import org.python.modules.posix.PosixModule as _os @@ -23,8 +23,6 @@ except NameError: _file = None _open = open -from distutils.errors import DistutilsError -from pkg_resources import working_set __all__ = [ @@ -70,7 +68,7 @@ def override_temp(replacement): """ Monkey-patch tempfile.tempdir with replacement, ensuring it exists """ - pkg_resources.py31compat.makedirs(replacement, exist_ok=True) + os.makedirs(replacement, exist_ok=True) saved = tempfile.tempdir @@ -138,7 +136,7 @@ def resume(self): return type, exc = map(pickle.loads, self._saved) - six.reraise(type, exc, self._tb) + raise exc.with_traceback(self._tb) @contextlib.contextmanager @@ -185,8 +183,8 @@ def setup_context(setup_dir): temp_dir = os.path.join(setup_dir, 'temp') with save_pkg_resources_state(): with save_modules(): - hide_setuptools() with save_path(): + hide_setuptools() with save_argv(): with override_temp(temp_dir): with pushd(setup_dir): @@ -195,6 +193,15 @@ def setup_context(setup_dir): yield +_MODULES_TO_HIDE = { + 'setuptools', + 'distutils', + 'pkg_resources', + 'Cython', + '_distutils_hack', +} + + def _needs_hiding(mod_name): """ >>> _needs_hiding('setuptools') @@ -212,8 +219,8 @@ def _needs_hiding(mod_name): >>> _needs_hiding('Cython') True """ - pattern = re.compile(r'(setuptools|pkg_resources|distutils|Cython)(\.|$)') - return bool(pattern.match(mod_name)) + base_module = mod_name.split('.', 1)[0] + return base_module in _MODULES_TO_HIDE def hide_setuptools(): @@ -223,6 +230,10 @@ def hide_setuptools(): necessary to avoid issues such as #315 where setuptools upgrading itself would fail to find a function declared in the metadata. """ + _distutils_hack = sys.modules.get('_distutils_hack', None) + if _distutils_hack is not None: + _distutils_hack.remove_shim() + modules = filter(_needs_hiding, sys.modules) _clear_modules(modules) @@ -238,15 +249,8 @@ def run_setup(setup_script, args): working_set.__init__() working_set.callbacks.append(lambda dist: dist.activate()) - # __file__ should be a byte string on Python 2 (#712) - dunder_file = ( - setup_script - if isinstance(setup_script, str) else - setup_script.encode(sys.getfilesystemencoding()) - ) - with DirectorySandbox(setup_dir): - ns = dict(__file__=dunder_file, __name__='__main__') + ns = dict(__file__=setup_script, __name__='__main__') _execfile(setup_script, ns) except SystemExit as v: if v.args and v.args[0]: @@ -374,7 +378,7 @@ def _remap_pair(self, operation, src, dst, *args, **kw): if hasattr(os, 'devnull'): - _EXCEPTIONS = [os.devnull,] + _EXCEPTIONS = [os.devnull] else: _EXCEPTIONS = [] @@ -466,7 +470,8 @@ def open(self, file, flags, mode=0o777, *args, **kw): WRITE_FLAGS = functools.reduce( - operator.or_, [getattr(_os, a, 0) for a in + operator.or_, [ + getattr(_os, a, 0) for a in "O_WRONLY O_RDWR O_APPEND O_CREAT O_TRUNC O_TEMPORARY".split()] ) diff --git a/env/lib/python2.7/site-packages/setuptools/script (dev).tmpl b/venv/Lib/site-packages/setuptools/script (dev).tmpl similarity index 100% rename from env/lib/python2.7/site-packages/setuptools/script (dev).tmpl rename to venv/Lib/site-packages/setuptools/script (dev).tmpl diff --git a/env/lib/python2.7/site-packages/setuptools/script.tmpl b/venv/Lib/site-packages/setuptools/script.tmpl similarity index 100% rename from env/lib/python2.7/site-packages/setuptools/script.tmpl rename to venv/Lib/site-packages/setuptools/script.tmpl diff --git a/env/lib/python2.7/site-packages/setuptools/ssl_support.py b/venv/Lib/site-packages/setuptools/ssl_support.py similarity index 91% rename from env/lib/python2.7/site-packages/setuptools/ssl_support.py rename to venv/Lib/site-packages/setuptools/ssl_support.py index 226db694..b58cca37 100644 --- a/env/lib/python2.7/site-packages/setuptools/ssl_support.py +++ b/venv/Lib/site-packages/setuptools/ssl_support.py @@ -3,8 +3,9 @@ import atexit import re import functools +import urllib.request +import http.client -from setuptools.extern.six.moves import urllib, http_client, map, filter from pkg_resources import ResolutionError, ExtractionError @@ -31,11 +32,12 @@ try: HTTPSHandler = urllib.request.HTTPSHandler - HTTPSConnection = http_client.HTTPSConnection + HTTPSConnection = http.client.HTTPSConnection except AttributeError: HTTPSHandler = HTTPSConnection = object -is_available = ssl is not None and object not in (HTTPSHandler, HTTPSConnection) +is_available = ssl is not None and object not in ( + HTTPSHandler, HTTPSConnection) try: @@ -54,7 +56,7 @@ class CertificateError(ValueError): pass -if not match_hostname: +if not match_hostname: # noqa: C901 # 'If 59' is too complex (21) # FIXME def _dnsname_match(dn, hostname, max_wildcards=1): """Matching according to RFC 6125, section 6.4.3 @@ -85,8 +87,10 @@ def _dnsname_match(dn, hostname, max_wildcards=1): return dn.lower() == hostname.lower() # RFC 6125, section 6.4.3, subitem 1. - # The client SHOULD NOT attempt to match a presented identifier in which - # the wildcard character comprises a label other than the left-most label. + # The client SHOULD NOT attempt to match a + # presented identifier in which the wildcard + # character comprises a label other than the + # left-most label. if leftmost == '*': # When '*' is a fragment by itself, it matches a non-empty dotless # fragment. @@ -137,15 +141,16 @@ def match_hostname(cert, hostname): return dnsnames.append(value) if len(dnsnames) > 1: - raise CertificateError("hostname %r " - "doesn't match either of %s" + raise CertificateError( + "hostname %r doesn't match either of %s" % (hostname, ', '.join(map(repr, dnsnames)))) elif len(dnsnames) == 1: - raise CertificateError("hostname %r " - "doesn't match %r" + raise CertificateError( + "hostname %r doesn't match %r" % (hostname, dnsnames[0])) else: - raise CertificateError("no appropriate commonName or " + raise CertificateError( + "no appropriate commonName or " "subjectAltName fields were found") @@ -158,7 +163,8 @@ def __init__(self, ca_bundle): def https_open(self, req): return self.do_open( - lambda host, **kw: VerifyingHTTPSConn(host, self.ca_bundle, **kw), req + lambda host, **kw: VerifyingHTTPSConn(host, self.ca_bundle, **kw), + req ) diff --git a/env/lib/python2.7/site-packages/setuptools/unicode_utils.py b/venv/Lib/site-packages/setuptools/unicode_utils.py similarity index 88% rename from env/lib/python2.7/site-packages/setuptools/unicode_utils.py rename to venv/Lib/site-packages/setuptools/unicode_utils.py index 7c63efd2..e84e65e3 100644 --- a/env/lib/python2.7/site-packages/setuptools/unicode_utils.py +++ b/venv/Lib/site-packages/setuptools/unicode_utils.py @@ -1,12 +1,10 @@ import unicodedata import sys -from setuptools.extern import six - # HFS Plus uses decomposed UTF-8 def decompose(path): - if isinstance(path, six.text_type): + if isinstance(path, str): return unicodedata.normalize('NFD', path) try: path = path.decode('utf-8') @@ -23,7 +21,7 @@ def filesys_decode(path): NONE when no expected encoding works """ - if isinstance(path, six.text_type): + if isinstance(path, str): return path fs_enc = sys.getfilesystemencoding() or 'utf-8' diff --git a/env/lib/python2.7/site-packages/setuptools/version.py b/venv/Lib/site-packages/setuptools/version.py similarity index 100% rename from env/lib/python2.7/site-packages/setuptools/version.py rename to venv/Lib/site-packages/setuptools/version.py diff --git a/venv/Lib/site-packages/setuptools/wheel.py b/venv/Lib/site-packages/setuptools/wheel.py new file mode 100644 index 00000000..0be811af --- /dev/null +++ b/venv/Lib/site-packages/setuptools/wheel.py @@ -0,0 +1,213 @@ +"""Wheels support.""" + +from distutils.util import get_platform +from distutils import log +import email +import itertools +import os +import posixpath +import re +import zipfile + +import pkg_resources +import setuptools +from pkg_resources import parse_version +from setuptools.extern.packaging.tags import sys_tags +from setuptools.extern.packaging.utils import canonicalize_name +from setuptools.command.egg_info import write_requirements + + +WHEEL_NAME = re.compile( + r"""^(?P.+?)-(?P\d.*?) + ((-(?P\d.*?))?-(?P.+?)-(?P.+?)-(?P.+?) + )\.whl$""", + re.VERBOSE).match + +NAMESPACE_PACKAGE_INIT = \ + "__import__('pkg_resources').declare_namespace(__name__)\n" + + +def unpack(src_dir, dst_dir): + '''Move everything under `src_dir` to `dst_dir`, and delete the former.''' + for dirpath, dirnames, filenames in os.walk(src_dir): + subdir = os.path.relpath(dirpath, src_dir) + for f in filenames: + src = os.path.join(dirpath, f) + dst = os.path.join(dst_dir, subdir, f) + os.renames(src, dst) + for n, d in reversed(list(enumerate(dirnames))): + src = os.path.join(dirpath, d) + dst = os.path.join(dst_dir, subdir, d) + if not os.path.exists(dst): + # Directory does not exist in destination, + # rename it and prune it from os.walk list. + os.renames(src, dst) + del dirnames[n] + # Cleanup. + for dirpath, dirnames, filenames in os.walk(src_dir, topdown=True): + assert not filenames + os.rmdir(dirpath) + + +class Wheel: + + def __init__(self, filename): + match = WHEEL_NAME(os.path.basename(filename)) + if match is None: + raise ValueError('invalid wheel name: %r' % filename) + self.filename = filename + for k, v in match.groupdict().items(): + setattr(self, k, v) + + def tags(self): + '''List tags (py_version, abi, platform) supported by this wheel.''' + return itertools.product( + self.py_version.split('.'), + self.abi.split('.'), + self.platform.split('.'), + ) + + def is_compatible(self): + '''Is the wheel is compatible with the current platform?''' + supported_tags = set( + (t.interpreter, t.abi, t.platform) for t in sys_tags()) + return next((True for t in self.tags() if t in supported_tags), False) + + def egg_name(self): + return pkg_resources.Distribution( + project_name=self.project_name, version=self.version, + platform=(None if self.platform == 'any' else get_platform()), + ).egg_name() + '.egg' + + def get_dist_info(self, zf): + # find the correct name of the .dist-info dir in the wheel file + for member in zf.namelist(): + dirname = posixpath.dirname(member) + if (dirname.endswith('.dist-info') and + canonicalize_name(dirname).startswith( + canonicalize_name(self.project_name))): + return dirname + raise ValueError("unsupported wheel format. .dist-info not found") + + def install_as_egg(self, destination_eggdir): + '''Install wheel as an egg directory.''' + with zipfile.ZipFile(self.filename) as zf: + self._install_as_egg(destination_eggdir, zf) + + def _install_as_egg(self, destination_eggdir, zf): + dist_basename = '%s-%s' % (self.project_name, self.version) + dist_info = self.get_dist_info(zf) + dist_data = '%s.data' % dist_basename + egg_info = os.path.join(destination_eggdir, 'EGG-INFO') + + self._convert_metadata(zf, destination_eggdir, dist_info, egg_info) + self._move_data_entries(destination_eggdir, dist_data) + self._fix_namespace_packages(egg_info, destination_eggdir) + + @staticmethod + def _convert_metadata(zf, destination_eggdir, dist_info, egg_info): + def get_metadata(name): + with zf.open(posixpath.join(dist_info, name)) as fp: + value = fp.read().decode('utf-8') + return email.parser.Parser().parsestr(value) + + wheel_metadata = get_metadata('WHEEL') + # Check wheel format version is supported. + wheel_version = parse_version(wheel_metadata.get('Wheel-Version')) + wheel_v1 = ( + parse_version('1.0') <= wheel_version < parse_version('2.0dev0') + ) + if not wheel_v1: + raise ValueError( + 'unsupported wheel format version: %s' % wheel_version) + # Extract to target directory. + os.mkdir(destination_eggdir) + zf.extractall(destination_eggdir) + # Convert metadata. + dist_info = os.path.join(destination_eggdir, dist_info) + dist = pkg_resources.Distribution.from_location( + destination_eggdir, dist_info, + metadata=pkg_resources.PathMetadata(destination_eggdir, dist_info), + ) + + # Note: Evaluate and strip markers now, + # as it's difficult to convert back from the syntax: + # foobar; "linux" in sys_platform and extra == 'test' + def raw_req(req): + req.marker = None + return str(req) + install_requires = list(sorted(map(raw_req, dist.requires()))) + extras_require = { + extra: sorted( + req + for req in map(raw_req, dist.requires((extra,))) + if req not in install_requires + ) + for extra in dist.extras + } + os.rename(dist_info, egg_info) + os.rename( + os.path.join(egg_info, 'METADATA'), + os.path.join(egg_info, 'PKG-INFO'), + ) + setup_dist = setuptools.Distribution( + attrs=dict( + install_requires=install_requires, + extras_require=extras_require, + ), + ) + # Temporarily disable info traces. + log_threshold = log._global_log.threshold + log.set_threshold(log.WARN) + try: + write_requirements( + setup_dist.get_command_obj('egg_info'), + None, + os.path.join(egg_info, 'requires.txt'), + ) + finally: + log.set_threshold(log_threshold) + + @staticmethod + def _move_data_entries(destination_eggdir, dist_data): + """Move data entries to their correct location.""" + dist_data = os.path.join(destination_eggdir, dist_data) + dist_data_scripts = os.path.join(dist_data, 'scripts') + if os.path.exists(dist_data_scripts): + egg_info_scripts = os.path.join( + destination_eggdir, 'EGG-INFO', 'scripts') + os.mkdir(egg_info_scripts) + for entry in os.listdir(dist_data_scripts): + # Remove bytecode, as it's not properly handled + # during easy_install scripts install phase. + if entry.endswith('.pyc'): + os.unlink(os.path.join(dist_data_scripts, entry)) + else: + os.rename( + os.path.join(dist_data_scripts, entry), + os.path.join(egg_info_scripts, entry), + ) + os.rmdir(dist_data_scripts) + for subdir in filter(os.path.exists, ( + os.path.join(dist_data, d) + for d in ('data', 'headers', 'purelib', 'platlib') + )): + unpack(subdir, destination_eggdir) + if os.path.exists(dist_data): + os.rmdir(dist_data) + + @staticmethod + def _fix_namespace_packages(egg_info, destination_eggdir): + namespace_packages = os.path.join( + egg_info, 'namespace_packages.txt') + if os.path.exists(namespace_packages): + with open(namespace_packages) as fp: + namespace_packages = fp.read().split() + for mod in namespace_packages: + mod_dir = os.path.join(destination_eggdir, *mod.split('.')) + mod_init = os.path.join(mod_dir, '__init__.py') + if not os.path.exists(mod_dir): + os.mkdir(mod_dir) + if not os.path.exists(mod_init): + with open(mod_init, 'w') as fp: + fp.write(NAMESPACE_PACKAGE_INIT) diff --git a/env/lib/python2.7/site-packages/setuptools/windows_support.py b/venv/Lib/site-packages/setuptools/windows_support.py similarity index 100% rename from env/lib/python2.7/site-packages/setuptools/windows_support.py rename to venv/Lib/site-packages/setuptools/windows_support.py diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_AVKit-5.2.dist-info/INSTALLER b/venv/Lib/site-packages/six-1.16.0.dist-info/INSTALLER similarity index 100% rename from env/lib/python2.7/site-packages/pyobjc_framework_AVKit-5.2.dist-info/INSTALLER rename to venv/Lib/site-packages/six-1.16.0.dist-info/INSTALLER diff --git a/venv/Lib/site-packages/six-1.16.0.dist-info/LICENSE b/venv/Lib/site-packages/six-1.16.0.dist-info/LICENSE new file mode 100644 index 00000000..de663311 --- /dev/null +++ b/venv/Lib/site-packages/six-1.16.0.dist-info/LICENSE @@ -0,0 +1,18 @@ +Copyright (c) 2010-2020 Benjamin Peterson + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/venv/Lib/site-packages/six-1.16.0.dist-info/METADATA b/venv/Lib/site-packages/six-1.16.0.dist-info/METADATA new file mode 100644 index 00000000..6d7525c2 --- /dev/null +++ b/venv/Lib/site-packages/six-1.16.0.dist-info/METADATA @@ -0,0 +1,49 @@ +Metadata-Version: 2.1 +Name: six +Version: 1.16.0 +Summary: Python 2 and 3 compatibility utilities +Home-page: https://github.com/benjaminp/six +Author: Benjamin Peterson +Author-email: benjamin@python.org +License: MIT +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 3 +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: Utilities +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.* + +.. image:: https://img.shields.io/pypi/v/six.svg + :target: https://pypi.org/project/six/ + :alt: six on PyPI + +.. image:: https://travis-ci.org/benjaminp/six.svg?branch=master + :target: https://travis-ci.org/benjaminp/six + :alt: six on TravisCI + +.. image:: https://readthedocs.org/projects/six/badge/?version=latest + :target: https://six.readthedocs.io/ + :alt: six's documentation on Read the Docs + +.. image:: https://img.shields.io/badge/license-MIT-green.svg + :target: https://github.com/benjaminp/six/blob/master/LICENSE + :alt: MIT License badge + +Six is a Python 2 and 3 compatibility library. It provides utility functions +for smoothing over the differences between the Python versions with the goal of +writing Python code that is compatible on both Python versions. See the +documentation for more information on what is provided. + +Six supports Python 2.7 and 3.3+. It is contained in only one Python +file, so it can be easily copied into your project. (The copyright and license +notice must be retained.) + +Online documentation is at https://six.readthedocs.io/. + +Bugs can be reported to https://github.com/benjaminp/six. The code can also +be found there. + + diff --git a/venv/Lib/site-packages/six-1.16.0.dist-info/RECORD b/venv/Lib/site-packages/six-1.16.0.dist-info/RECORD new file mode 100644 index 00000000..59b5184d --- /dev/null +++ b/venv/Lib/site-packages/six-1.16.0.dist-info/RECORD @@ -0,0 +1,8 @@ +__pycache__/six.cpython-36.pyc,, +six-1.16.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +six-1.16.0.dist-info/LICENSE,sha256=i7hQxWWqOJ_cFvOkaWWtI9gq3_YPI5P8J2K2MYXo5sk,1066 +six-1.16.0.dist-info/METADATA,sha256=VQcGIFCAEmfZcl77E5riPCN4v2TIsc_qtacnjxKHJoI,1795 +six-1.16.0.dist-info/RECORD,, +six-1.16.0.dist-info/WHEEL,sha256=Z-nyYpwrcSqxfdux5Mbn_DQ525iP7J2DG3JgGvOYyTQ,110 +six-1.16.0.dist-info/top_level.txt,sha256=_iVH_iYEtEXnD8nYGQYpYFUvkUW9sEO1GYbkeKSAais,4 +six.py,sha256=TOOfQi7nFGfMrIvtdr6wX4wyHH8M7aknmuLfo2cBBrM,34549 diff --git a/venv/Lib/site-packages/six-1.16.0.dist-info/WHEEL b/venv/Lib/site-packages/six-1.16.0.dist-info/WHEEL new file mode 100644 index 00000000..01b8fc7d --- /dev/null +++ b/venv/Lib/site-packages/six-1.16.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.36.2) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/venv/Lib/site-packages/six-1.16.0.dist-info/top_level.txt b/venv/Lib/site-packages/six-1.16.0.dist-info/top_level.txt new file mode 100644 index 00000000..ffe2fce4 --- /dev/null +++ b/venv/Lib/site-packages/six-1.16.0.dist-info/top_level.txt @@ -0,0 +1 @@ +six diff --git a/venv/Lib/site-packages/six.py b/venv/Lib/site-packages/six.py new file mode 100644 index 00000000..4e15675d --- /dev/null +++ b/venv/Lib/site-packages/six.py @@ -0,0 +1,998 @@ +# Copyright (c) 2010-2020 Benjamin Peterson +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Utilities for writing code that runs on Python 2 and 3""" + +from __future__ import absolute_import + +import functools +import itertools +import operator +import sys +import types + +__author__ = "Benjamin Peterson " +__version__ = "1.16.0" + + +# Useful for very coarse version differentiation. +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 +PY34 = sys.version_info[0:2] >= (3, 4) + +if PY3: + string_types = str, + integer_types = int, + class_types = type, + text_type = str + binary_type = bytes + + MAXSIZE = sys.maxsize +else: + string_types = basestring, + integer_types = (int, long) + class_types = (type, types.ClassType) + text_type = unicode + binary_type = str + + if sys.platform.startswith("java"): + # Jython always uses 32 bits. + MAXSIZE = int((1 << 31) - 1) + else: + # It's possible to have sizeof(long) != sizeof(Py_ssize_t). + class X(object): + + def __len__(self): + return 1 << 31 + try: + len(X()) + except OverflowError: + # 32-bit + MAXSIZE = int((1 << 31) - 1) + else: + # 64-bit + MAXSIZE = int((1 << 63) - 1) + del X + +if PY34: + from importlib.util import spec_from_loader +else: + spec_from_loader = None + + +def _add_doc(func, doc): + """Add documentation to a function.""" + func.__doc__ = doc + + +def _import_module(name): + """Import module, returning the module after the last dot.""" + __import__(name) + return sys.modules[name] + + +class _LazyDescr(object): + + def __init__(self, name): + self.name = name + + def __get__(self, obj, tp): + result = self._resolve() + setattr(obj, self.name, result) # Invokes __set__. + try: + # This is a bit ugly, but it avoids running this again by + # removing this descriptor. + delattr(obj.__class__, self.name) + except AttributeError: + pass + return result + + +class MovedModule(_LazyDescr): + + def __init__(self, name, old, new=None): + super(MovedModule, self).__init__(name) + if PY3: + if new is None: + new = name + self.mod = new + else: + self.mod = old + + def _resolve(self): + return _import_module(self.mod) + + def __getattr__(self, attr): + _module = self._resolve() + value = getattr(_module, attr) + setattr(self, attr, value) + return value + + +class _LazyModule(types.ModuleType): + + def __init__(self, name): + super(_LazyModule, self).__init__(name) + self.__doc__ = self.__class__.__doc__ + + def __dir__(self): + attrs = ["__doc__", "__name__"] + attrs += [attr.name for attr in self._moved_attributes] + return attrs + + # Subclasses should override this + _moved_attributes = [] + + +class MovedAttribute(_LazyDescr): + + def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): + super(MovedAttribute, self).__init__(name) + if PY3: + if new_mod is None: + new_mod = name + self.mod = new_mod + if new_attr is None: + if old_attr is None: + new_attr = name + else: + new_attr = old_attr + self.attr = new_attr + else: + self.mod = old_mod + if old_attr is None: + old_attr = name + self.attr = old_attr + + def _resolve(self): + module = _import_module(self.mod) + return getattr(module, self.attr) + + +class _SixMetaPathImporter(object): + + """ + A meta path importer to import six.moves and its submodules. + + This class implements a PEP302 finder and loader. It should be compatible + with Python 2.5 and all existing versions of Python3 + """ + + def __init__(self, six_module_name): + self.name = six_module_name + self.known_modules = {} + + def _add_module(self, mod, *fullnames): + for fullname in fullnames: + self.known_modules[self.name + "." + fullname] = mod + + def _get_module(self, fullname): + return self.known_modules[self.name + "." + fullname] + + def find_module(self, fullname, path=None): + if fullname in self.known_modules: + return self + return None + + def find_spec(self, fullname, path, target=None): + if fullname in self.known_modules: + return spec_from_loader(fullname, self) + return None + + def __get_module(self, fullname): + try: + return self.known_modules[fullname] + except KeyError: + raise ImportError("This loader does not know module " + fullname) + + def load_module(self, fullname): + try: + # in case of a reload + return sys.modules[fullname] + except KeyError: + pass + mod = self.__get_module(fullname) + if isinstance(mod, MovedModule): + mod = mod._resolve() + else: + mod.__loader__ = self + sys.modules[fullname] = mod + return mod + + def is_package(self, fullname): + """ + Return true, if the named module is a package. + + We need this method to get correct spec objects with + Python 3.4 (see PEP451) + """ + return hasattr(self.__get_module(fullname), "__path__") + + def get_code(self, fullname): + """Return None + + Required, if is_package is implemented""" + self.__get_module(fullname) # eventually raises ImportError + return None + get_source = get_code # same as get_code + + def create_module(self, spec): + return self.load_module(spec.name) + + def exec_module(self, module): + pass + +_importer = _SixMetaPathImporter(__name__) + + +class _MovedItems(_LazyModule): + + """Lazy loading of moved objects""" + __path__ = [] # mark as package + + +_moved_attributes = [ + MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), + MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), + MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), + MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), + MovedAttribute("intern", "__builtin__", "sys"), + MovedAttribute("map", "itertools", "builtins", "imap", "map"), + MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), + MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), + MovedAttribute("getoutput", "commands", "subprocess"), + MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), + MovedAttribute("reduce", "__builtin__", "functools"), + MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), + MovedAttribute("StringIO", "StringIO", "io"), + MovedAttribute("UserDict", "UserDict", "collections"), + MovedAttribute("UserList", "UserList", "collections"), + MovedAttribute("UserString", "UserString", "collections"), + MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), + MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), + MovedModule("builtins", "__builtin__"), + MovedModule("configparser", "ConfigParser"), + MovedModule("collections_abc", "collections", "collections.abc" if sys.version_info >= (3, 3) else "collections"), + MovedModule("copyreg", "copy_reg"), + MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), + MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"), + MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread" if sys.version_info < (3, 9) else "_thread"), + MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), + MovedModule("http_cookies", "Cookie", "http.cookies"), + MovedModule("html_entities", "htmlentitydefs", "html.entities"), + MovedModule("html_parser", "HTMLParser", "html.parser"), + MovedModule("http_client", "httplib", "http.client"), + MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), + MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"), + MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), + MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), + MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), + MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), + MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), + MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), + MovedModule("cPickle", "cPickle", "pickle"), + MovedModule("queue", "Queue"), + MovedModule("reprlib", "repr"), + MovedModule("socketserver", "SocketServer"), + MovedModule("_thread", "thread", "_thread"), + MovedModule("tkinter", "Tkinter"), + MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), + MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), + MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), + MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), + MovedModule("tkinter_tix", "Tix", "tkinter.tix"), + MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), + MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), + MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), + MovedModule("tkinter_colorchooser", "tkColorChooser", + "tkinter.colorchooser"), + MovedModule("tkinter_commondialog", "tkCommonDialog", + "tkinter.commondialog"), + MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), + MovedModule("tkinter_font", "tkFont", "tkinter.font"), + MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), + MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", + "tkinter.simpledialog"), + MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), + MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), + MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), + MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), + MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), + MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), +] +# Add windows specific modules. +if sys.platform == "win32": + _moved_attributes += [ + MovedModule("winreg", "_winreg"), + ] + +for attr in _moved_attributes: + setattr(_MovedItems, attr.name, attr) + if isinstance(attr, MovedModule): + _importer._add_module(attr, "moves." + attr.name) +del attr + +_MovedItems._moved_attributes = _moved_attributes + +moves = _MovedItems(__name__ + ".moves") +_importer._add_module(moves, "moves") + + +class Module_six_moves_urllib_parse(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_parse""" + + +_urllib_parse_moved_attributes = [ + MovedAttribute("ParseResult", "urlparse", "urllib.parse"), + MovedAttribute("SplitResult", "urlparse", "urllib.parse"), + MovedAttribute("parse_qs", "urlparse", "urllib.parse"), + MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), + MovedAttribute("urldefrag", "urlparse", "urllib.parse"), + MovedAttribute("urljoin", "urlparse", "urllib.parse"), + MovedAttribute("urlparse", "urlparse", "urllib.parse"), + MovedAttribute("urlsplit", "urlparse", "urllib.parse"), + MovedAttribute("urlunparse", "urlparse", "urllib.parse"), + MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), + MovedAttribute("quote", "urllib", "urllib.parse"), + MovedAttribute("quote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote", "urllib", "urllib.parse"), + MovedAttribute("unquote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"), + MovedAttribute("urlencode", "urllib", "urllib.parse"), + MovedAttribute("splitquery", "urllib", "urllib.parse"), + MovedAttribute("splittag", "urllib", "urllib.parse"), + MovedAttribute("splituser", "urllib", "urllib.parse"), + MovedAttribute("splitvalue", "urllib", "urllib.parse"), + MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), + MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), + MovedAttribute("uses_params", "urlparse", "urllib.parse"), + MovedAttribute("uses_query", "urlparse", "urllib.parse"), + MovedAttribute("uses_relative", "urlparse", "urllib.parse"), +] +for attr in _urllib_parse_moved_attributes: + setattr(Module_six_moves_urllib_parse, attr.name, attr) +del attr + +Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes + +_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), + "moves.urllib_parse", "moves.urllib.parse") + + +class Module_six_moves_urllib_error(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_error""" + + +_urllib_error_moved_attributes = [ + MovedAttribute("URLError", "urllib2", "urllib.error"), + MovedAttribute("HTTPError", "urllib2", "urllib.error"), + MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), +] +for attr in _urllib_error_moved_attributes: + setattr(Module_six_moves_urllib_error, attr.name, attr) +del attr + +Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes + +_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), + "moves.urllib_error", "moves.urllib.error") + + +class Module_six_moves_urllib_request(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_request""" + + +_urllib_request_moved_attributes = [ + MovedAttribute("urlopen", "urllib2", "urllib.request"), + MovedAttribute("install_opener", "urllib2", "urllib.request"), + MovedAttribute("build_opener", "urllib2", "urllib.request"), + MovedAttribute("pathname2url", "urllib", "urllib.request"), + MovedAttribute("url2pathname", "urllib", "urllib.request"), + MovedAttribute("getproxies", "urllib", "urllib.request"), + MovedAttribute("Request", "urllib2", "urllib.request"), + MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), + MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), + MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), + MovedAttribute("BaseHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), + MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), + MovedAttribute("FileHandler", "urllib2", "urllib.request"), + MovedAttribute("FTPHandler", "urllib2", "urllib.request"), + MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), + MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), + MovedAttribute("urlretrieve", "urllib", "urllib.request"), + MovedAttribute("urlcleanup", "urllib", "urllib.request"), + MovedAttribute("URLopener", "urllib", "urllib.request"), + MovedAttribute("FancyURLopener", "urllib", "urllib.request"), + MovedAttribute("proxy_bypass", "urllib", "urllib.request"), + MovedAttribute("parse_http_list", "urllib2", "urllib.request"), + MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"), +] +for attr in _urllib_request_moved_attributes: + setattr(Module_six_moves_urllib_request, attr.name, attr) +del attr + +Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes + +_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), + "moves.urllib_request", "moves.urllib.request") + + +class Module_six_moves_urllib_response(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_response""" + + +_urllib_response_moved_attributes = [ + MovedAttribute("addbase", "urllib", "urllib.response"), + MovedAttribute("addclosehook", "urllib", "urllib.response"), + MovedAttribute("addinfo", "urllib", "urllib.response"), + MovedAttribute("addinfourl", "urllib", "urllib.response"), +] +for attr in _urllib_response_moved_attributes: + setattr(Module_six_moves_urllib_response, attr.name, attr) +del attr + +Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes + +_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), + "moves.urllib_response", "moves.urllib.response") + + +class Module_six_moves_urllib_robotparser(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_robotparser""" + + +_urllib_robotparser_moved_attributes = [ + MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), +] +for attr in _urllib_robotparser_moved_attributes: + setattr(Module_six_moves_urllib_robotparser, attr.name, attr) +del attr + +Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes + +_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), + "moves.urllib_robotparser", "moves.urllib.robotparser") + + +class Module_six_moves_urllib(types.ModuleType): + + """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" + __path__ = [] # mark as package + parse = _importer._get_module("moves.urllib_parse") + error = _importer._get_module("moves.urllib_error") + request = _importer._get_module("moves.urllib_request") + response = _importer._get_module("moves.urllib_response") + robotparser = _importer._get_module("moves.urllib_robotparser") + + def __dir__(self): + return ['parse', 'error', 'request', 'response', 'robotparser'] + +_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), + "moves.urllib") + + +def add_move(move): + """Add an item to six.moves.""" + setattr(_MovedItems, move.name, move) + + +def remove_move(name): + """Remove item from six.moves.""" + try: + delattr(_MovedItems, name) + except AttributeError: + try: + del moves.__dict__[name] + except KeyError: + raise AttributeError("no such move, %r" % (name,)) + + +if PY3: + _meth_func = "__func__" + _meth_self = "__self__" + + _func_closure = "__closure__" + _func_code = "__code__" + _func_defaults = "__defaults__" + _func_globals = "__globals__" +else: + _meth_func = "im_func" + _meth_self = "im_self" + + _func_closure = "func_closure" + _func_code = "func_code" + _func_defaults = "func_defaults" + _func_globals = "func_globals" + + +try: + advance_iterator = next +except NameError: + def advance_iterator(it): + return it.next() +next = advance_iterator + + +try: + callable = callable +except NameError: + def callable(obj): + return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) + + +if PY3: + def get_unbound_function(unbound): + return unbound + + create_bound_method = types.MethodType + + def create_unbound_method(func, cls): + return func + + Iterator = object +else: + def get_unbound_function(unbound): + return unbound.im_func + + def create_bound_method(func, obj): + return types.MethodType(func, obj, obj.__class__) + + def create_unbound_method(func, cls): + return types.MethodType(func, None, cls) + + class Iterator(object): + + def next(self): + return type(self).__next__(self) + + callable = callable +_add_doc(get_unbound_function, + """Get the function out of a possibly unbound function""") + + +get_method_function = operator.attrgetter(_meth_func) +get_method_self = operator.attrgetter(_meth_self) +get_function_closure = operator.attrgetter(_func_closure) +get_function_code = operator.attrgetter(_func_code) +get_function_defaults = operator.attrgetter(_func_defaults) +get_function_globals = operator.attrgetter(_func_globals) + + +if PY3: + def iterkeys(d, **kw): + return iter(d.keys(**kw)) + + def itervalues(d, **kw): + return iter(d.values(**kw)) + + def iteritems(d, **kw): + return iter(d.items(**kw)) + + def iterlists(d, **kw): + return iter(d.lists(**kw)) + + viewkeys = operator.methodcaller("keys") + + viewvalues = operator.methodcaller("values") + + viewitems = operator.methodcaller("items") +else: + def iterkeys(d, **kw): + return d.iterkeys(**kw) + + def itervalues(d, **kw): + return d.itervalues(**kw) + + def iteritems(d, **kw): + return d.iteritems(**kw) + + def iterlists(d, **kw): + return d.iterlists(**kw) + + viewkeys = operator.methodcaller("viewkeys") + + viewvalues = operator.methodcaller("viewvalues") + + viewitems = operator.methodcaller("viewitems") + +_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") +_add_doc(itervalues, "Return an iterator over the values of a dictionary.") +_add_doc(iteritems, + "Return an iterator over the (key, value) pairs of a dictionary.") +_add_doc(iterlists, + "Return an iterator over the (key, [values]) pairs of a dictionary.") + + +if PY3: + def b(s): + return s.encode("latin-1") + + def u(s): + return s + unichr = chr + import struct + int2byte = struct.Struct(">B").pack + del struct + byte2int = operator.itemgetter(0) + indexbytes = operator.getitem + iterbytes = iter + import io + StringIO = io.StringIO + BytesIO = io.BytesIO + del io + _assertCountEqual = "assertCountEqual" + if sys.version_info[1] <= 1: + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" + _assertNotRegex = "assertNotRegexpMatches" + else: + _assertRaisesRegex = "assertRaisesRegex" + _assertRegex = "assertRegex" + _assertNotRegex = "assertNotRegex" +else: + def b(s): + return s + # Workaround for standalone backslash + + def u(s): + return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") + unichr = unichr + int2byte = chr + + def byte2int(bs): + return ord(bs[0]) + + def indexbytes(buf, i): + return ord(buf[i]) + iterbytes = functools.partial(itertools.imap, ord) + import StringIO + StringIO = BytesIO = StringIO.StringIO + _assertCountEqual = "assertItemsEqual" + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" + _assertNotRegex = "assertNotRegexpMatches" +_add_doc(b, """Byte literal""") +_add_doc(u, """Text literal""") + + +def assertCountEqual(self, *args, **kwargs): + return getattr(self, _assertCountEqual)(*args, **kwargs) + + +def assertRaisesRegex(self, *args, **kwargs): + return getattr(self, _assertRaisesRegex)(*args, **kwargs) + + +def assertRegex(self, *args, **kwargs): + return getattr(self, _assertRegex)(*args, **kwargs) + + +def assertNotRegex(self, *args, **kwargs): + return getattr(self, _assertNotRegex)(*args, **kwargs) + + +if PY3: + exec_ = getattr(moves.builtins, "exec") + + def reraise(tp, value, tb=None): + try: + if value is None: + value = tp() + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + finally: + value = None + tb = None + +else: + def exec_(_code_, _globs_=None, _locs_=None): + """Execute code in a namespace.""" + if _globs_ is None: + frame = sys._getframe(1) + _globs_ = frame.f_globals + if _locs_ is None: + _locs_ = frame.f_locals + del frame + elif _locs_ is None: + _locs_ = _globs_ + exec("""exec _code_ in _globs_, _locs_""") + + exec_("""def reraise(tp, value, tb=None): + try: + raise tp, value, tb + finally: + tb = None +""") + + +if sys.version_info[:2] > (3,): + exec_("""def raise_from(value, from_value): + try: + raise value from from_value + finally: + value = None +""") +else: + def raise_from(value, from_value): + raise value + + +print_ = getattr(moves.builtins, "print", None) +if print_ is None: + def print_(*args, **kwargs): + """The new-style print function for Python 2.4 and 2.5.""" + fp = kwargs.pop("file", sys.stdout) + if fp is None: + return + + def write(data): + if not isinstance(data, basestring): + data = str(data) + # If the file has an encoding, encode unicode with it. + if (isinstance(fp, file) and + isinstance(data, unicode) and + fp.encoding is not None): + errors = getattr(fp, "errors", None) + if errors is None: + errors = "strict" + data = data.encode(fp.encoding, errors) + fp.write(data) + want_unicode = False + sep = kwargs.pop("sep", None) + if sep is not None: + if isinstance(sep, unicode): + want_unicode = True + elif not isinstance(sep, str): + raise TypeError("sep must be None or a string") + end = kwargs.pop("end", None) + if end is not None: + if isinstance(end, unicode): + want_unicode = True + elif not isinstance(end, str): + raise TypeError("end must be None or a string") + if kwargs: + raise TypeError("invalid keyword arguments to print()") + if not want_unicode: + for arg in args: + if isinstance(arg, unicode): + want_unicode = True + break + if want_unicode: + newline = unicode("\n") + space = unicode(" ") + else: + newline = "\n" + space = " " + if sep is None: + sep = space + if end is None: + end = newline + for i, arg in enumerate(args): + if i: + write(sep) + write(arg) + write(end) +if sys.version_info[:2] < (3, 3): + _print = print_ + + def print_(*args, **kwargs): + fp = kwargs.get("file", sys.stdout) + flush = kwargs.pop("flush", False) + _print(*args, **kwargs) + if flush and fp is not None: + fp.flush() + +_add_doc(reraise, """Reraise an exception.""") + +if sys.version_info[0:2] < (3, 4): + # This does exactly the same what the :func:`py3:functools.update_wrapper` + # function does on Python versions after 3.2. It sets the ``__wrapped__`` + # attribute on ``wrapper`` object and it doesn't raise an error if any of + # the attributes mentioned in ``assigned`` and ``updated`` are missing on + # ``wrapped`` object. + def _update_wrapper(wrapper, wrapped, + assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES): + for attr in assigned: + try: + value = getattr(wrapped, attr) + except AttributeError: + continue + else: + setattr(wrapper, attr, value) + for attr in updated: + getattr(wrapper, attr).update(getattr(wrapped, attr, {})) + wrapper.__wrapped__ = wrapped + return wrapper + _update_wrapper.__doc__ = functools.update_wrapper.__doc__ + + def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES): + return functools.partial(_update_wrapper, wrapped=wrapped, + assigned=assigned, updated=updated) + wraps.__doc__ = functools.wraps.__doc__ + +else: + wraps = functools.wraps + + +def with_metaclass(meta, *bases): + """Create a base class with a metaclass.""" + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(type): + + def __new__(cls, name, this_bases, d): + if sys.version_info[:2] >= (3, 7): + # This version introduced PEP 560 that requires a bit + # of extra care (we mimic what is done by __build_class__). + resolved_bases = types.resolve_bases(bases) + if resolved_bases is not bases: + d['__orig_bases__'] = bases + else: + resolved_bases = bases + return meta(name, resolved_bases, d) + + @classmethod + def __prepare__(cls, name, this_bases): + return meta.__prepare__(name, bases) + return type.__new__(metaclass, 'temporary_class', (), {}) + + +def add_metaclass(metaclass): + """Class decorator for creating a class with a metaclass.""" + def wrapper(cls): + orig_vars = cls.__dict__.copy() + slots = orig_vars.get('__slots__') + if slots is not None: + if isinstance(slots, str): + slots = [slots] + for slots_var in slots: + orig_vars.pop(slots_var) + orig_vars.pop('__dict__', None) + orig_vars.pop('__weakref__', None) + if hasattr(cls, '__qualname__'): + orig_vars['__qualname__'] = cls.__qualname__ + return metaclass(cls.__name__, cls.__bases__, orig_vars) + return wrapper + + +def ensure_binary(s, encoding='utf-8', errors='strict'): + """Coerce **s** to six.binary_type. + + For Python 2: + - `unicode` -> encoded to `str` + - `str` -> `str` + + For Python 3: + - `str` -> encoded to `bytes` + - `bytes` -> `bytes` + """ + if isinstance(s, binary_type): + return s + if isinstance(s, text_type): + return s.encode(encoding, errors) + raise TypeError("not expecting type '%s'" % type(s)) + + +def ensure_str(s, encoding='utf-8', errors='strict'): + """Coerce *s* to `str`. + + For Python 2: + - `unicode` -> encoded to `str` + - `str` -> `str` + + For Python 3: + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + # Optimization: Fast return for the common case. + if type(s) is str: + return s + if PY2 and isinstance(s, text_type): + return s.encode(encoding, errors) + elif PY3 and isinstance(s, binary_type): + return s.decode(encoding, errors) + elif not isinstance(s, (text_type, binary_type)): + raise TypeError("not expecting type '%s'" % type(s)) + return s + + +def ensure_text(s, encoding='utf-8', errors='strict'): + """Coerce *s* to six.text_type. + + For Python 2: + - `unicode` -> `unicode` + - `str` -> `unicode` + + For Python 3: + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + if isinstance(s, binary_type): + return s.decode(encoding, errors) + elif isinstance(s, text_type): + return s + else: + raise TypeError("not expecting type '%s'" % type(s)) + + +def python_2_unicode_compatible(klass): + """ + A class decorator that defines __unicode__ and __str__ methods under Python 2. + Under Python 3 it does nothing. + + To support Python 2 and 3 with a single code base, define a __str__ method + returning text and apply this decorator to the class. + """ + if PY2: + if '__str__' not in klass.__dict__: + raise ValueError("@python_2_unicode_compatible cannot be applied " + "to %s because it doesn't define __str__()." % + klass.__name__) + klass.__unicode__ = klass.__str__ + klass.__str__ = lambda self: self.__unicode__().encode('utf-8') + return klass + + +# Complete the moves implementation. +# This code is at the end of this module to speed up module loading. +# Turn this module into a package. +__path__ = [] # required for PEP 302 and PEP 451 +__package__ = __name__ # see PEP 366 @ReservedAssignment +if globals().get("__spec__") is not None: + __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable +# Remove other six meta path importers, since they cause problems. This can +# happen if six is removed from sys.modules and then reloaded. (Setuptools does +# this for some reason.) +if sys.meta_path: + for i, importer in enumerate(sys.meta_path): + # Here's some real nastiness: Another "instance" of the six module might + # be floating around. Therefore, we can't use isinstance() to check for + # the six meta path importer, since the other six instance will have + # inserted an importer with different class. + if (type(importer).__name__ == "_SixMetaPathImporter" and + importer.name == __name__): + del sys.meta_path[i] + break + del i, importer +# Finally, add the importer to the meta path import hook. +sys.meta_path.append(_importer) diff --git a/env/lib/python2.7/site-packages/pyobjc_framework_Accounts-5.2.dist-info/INSTALLER b/venv/Lib/site-packages/uiautomation-2.0.13.dist-info/INSTALLER similarity index 100% rename from env/lib/python2.7/site-packages/pyobjc_framework_Accounts-5.2.dist-info/INSTALLER rename to venv/Lib/site-packages/uiautomation-2.0.13.dist-info/INSTALLER diff --git a/venv/Lib/site-packages/uiautomation-2.0.13.dist-info/LICENSE b/venv/Lib/site-packages/uiautomation-2.0.13.dist-info/LICENSE new file mode 100644 index 00000000..39254fae --- /dev/null +++ b/venv/Lib/site-packages/uiautomation-2.0.13.dist-info/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright Yinkaisheng + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/venv/Lib/site-packages/uiautomation-2.0.13.dist-info/METADATA b/venv/Lib/site-packages/uiautomation-2.0.13.dist-info/METADATA new file mode 100644 index 00000000..9515f115 --- /dev/null +++ b/venv/Lib/site-packages/uiautomation-2.0.13.dist-info/METADATA @@ -0,0 +1,20 @@ +Metadata-Version: 2.1 +Name: uiautomation +Version: 2.0.13 +Summary: Python UIAutomation for Windows +Home-page: https://github.com/yinkaisheng/Python-UIAutomation-for-Windows +Author: yinkaisheng +Author-email: yinkaisheng@live.com +License: Apache 2.0 +Keywords: windows ui automation uiautomation inspect +Platform: Windows Only +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +License-File: LICENSE +Requires-Dist: comtypes (>=1.1.10) + +Python UIAutomation for Windows. Supports Python3.4+, x86, x64 + diff --git a/venv/Lib/site-packages/uiautomation-2.0.13.dist-info/RECORD b/venv/Lib/site-packages/uiautomation-2.0.13.dist-info/RECORD new file mode 100644 index 00000000..cc26b4d5 --- /dev/null +++ b/venv/Lib/site-packages/uiautomation-2.0.13.dist-info/RECORD @@ -0,0 +1,17 @@ +../../Scripts/__pycache__/automation.cpython-36.pyc,, +../../Scripts/automation.py,sha256=-Gw_o_c-lnQhKdYNN7UNRqfoRutET77c_ssCT4U3-3k,4746 +uiautomation-2.0.13.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +uiautomation-2.0.13.dist-info/LICENSE,sha256=fDYXKwn6sXZyAjK-hThMsdXMZO5A3Q9Q0zW6BKo9cfs,11537 +uiautomation-2.0.13.dist-info/METADATA,sha256=Od28F4yyS_TMgStAxFHjFkxhOmAeSbwq9pOZqAiruHA,695 +uiautomation-2.0.13.dist-info/RECORD,, +uiautomation-2.0.13.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +uiautomation-2.0.13.dist-info/WHEEL,sha256=Z-nyYpwrcSqxfdux5Mbn_DQ525iP7J2DG3JgGvOYyTQ,110 +uiautomation-2.0.13.dist-info/top_level.txt,sha256=XAA4nCZEKrHlEduVpO-0h_Zbp45jT9A0-T-392SBn24,13 +uiautomation/__init__.py,sha256=CGK0nDGraSGVTlMmV34QSpoIyq9U16vxS0lXeGLwoaM,101 +uiautomation/__pycache__/__init__.cpython-36.pyc,, +uiautomation/__pycache__/uiautomation.cpython-36.pyc,, +uiautomation/__pycache__/version.cpython-36.pyc,, +uiautomation/bin/UIAutomationClient_VC140_X64.dll,sha256=7nt9QbL94ADQK7BM3EBXcvn8dagXzEZ7mI0ydAv6Dzk,101888 +uiautomation/bin/UIAutomationClient_VC140_X86.dll,sha256=qunExbVLetZVHeLk2_B0AnO2BgZ5s2RVBMOXkpOUcno,84480 +uiautomation/uiautomation.py,sha256=YeJecpMJ65RWnw5NqL0H-pWQljyiA67oAmYFCUhO_lI,392649 +uiautomation/version.py,sha256=vjpsmIiQEONDEuptM7Qd857SAWHMkzzX1DNCtBIOC0g,20 diff --git a/venv/Lib/site-packages/uiautomation-2.0.13.dist-info/REQUESTED b/venv/Lib/site-packages/uiautomation-2.0.13.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/uiautomation-2.0.13.dist-info/WHEEL b/venv/Lib/site-packages/uiautomation-2.0.13.dist-info/WHEEL new file mode 100644 index 00000000..01b8fc7d --- /dev/null +++ b/venv/Lib/site-packages/uiautomation-2.0.13.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.36.2) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/venv/Lib/site-packages/uiautomation-2.0.13.dist-info/top_level.txt b/venv/Lib/site-packages/uiautomation-2.0.13.dist-info/top_level.txt new file mode 100644 index 00000000..9202cecb --- /dev/null +++ b/venv/Lib/site-packages/uiautomation-2.0.13.dist-info/top_level.txt @@ -0,0 +1 @@ +uiautomation diff --git a/venv/Lib/site-packages/uiautomation/__init__.py b/venv/Lib/site-packages/uiautomation/__init__.py new file mode 100644 index 00000000..447c7ec2 --- /dev/null +++ b/venv/Lib/site-packages/uiautomation/__init__.py @@ -0,0 +1,4 @@ +from __future__ import absolute_import + +from .version import VERSION +from .uiautomation import * diff --git a/venv/Lib/site-packages/uiautomation/__pycache__/__init__.cpython-36.pyc b/venv/Lib/site-packages/uiautomation/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 00000000..cd002154 Binary files /dev/null and b/venv/Lib/site-packages/uiautomation/__pycache__/__init__.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/uiautomation/__pycache__/uiautomation.cpython-36.pyc b/venv/Lib/site-packages/uiautomation/__pycache__/uiautomation.cpython-36.pyc new file mode 100644 index 00000000..4fc5ec60 Binary files /dev/null and b/venv/Lib/site-packages/uiautomation/__pycache__/uiautomation.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/uiautomation/__pycache__/version.cpython-36.pyc b/venv/Lib/site-packages/uiautomation/__pycache__/version.cpython-36.pyc new file mode 100644 index 00000000..1c728ae2 Binary files /dev/null and b/venv/Lib/site-packages/uiautomation/__pycache__/version.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/uiautomation/bin/UIAutomationClient_VC140_X64.dll b/venv/Lib/site-packages/uiautomation/bin/UIAutomationClient_VC140_X64.dll new file mode 100644 index 00000000..a643bd22 Binary files /dev/null and b/venv/Lib/site-packages/uiautomation/bin/UIAutomationClient_VC140_X64.dll differ diff --git a/venv/Lib/site-packages/uiautomation/bin/UIAutomationClient_VC140_X86.dll b/venv/Lib/site-packages/uiautomation/bin/UIAutomationClient_VC140_X86.dll new file mode 100644 index 00000000..7617a64f Binary files /dev/null and b/venv/Lib/site-packages/uiautomation/bin/UIAutomationClient_VC140_X86.dll differ diff --git a/venv/Lib/site-packages/uiautomation/uiautomation.py b/venv/Lib/site-packages/uiautomation/uiautomation.py new file mode 100644 index 00000000..d33e24bb --- /dev/null +++ b/venv/Lib/site-packages/uiautomation/uiautomation.py @@ -0,0 +1,8522 @@ +#!python3 +# -*- coding: utf-8 -*- +""" +uiautomation for Python 3. +Author: yinkaisheng@live.com +Source: https://github.com/yinkaisheng/Python-UIAutomation-for-Windows + +This module is for UIAutomation on Windows(Windows XP with SP3, Windows Vista and Windows 7/8/8.1/10). +It supports UIAutomation for the applications which implmented IUIAutomation, such as MFC, Windows Form, WPF, Modern UI(Metro UI), Qt, Firefox and Chrome. +Run 'automation.py -h' for help. + +uiautomation is shared under the Apache Licene 2.0. +This means that the code can be freely copied and distributed, and costs nothing to use. +""" +import os +import sys +import time +import datetime +import re +import threading +import ctypes +import ctypes.wintypes +import comtypes # need 'pip install comtypes' +import comtypes.client +from typing import (Any, Callable, Dict, List, Iterable, Tuple) # need 'pip install typing' for Python3.4 or lower +TreeNode = Any + + +AUTHOR_MAIL = 'yinkaisheng@live.com' +METRO_WINDOW_CLASS_NAME = 'Windows.UI.Core.CoreWindow' # for Windows 8 and 8.1 +SEARCH_INTERVAL = 0.5 # search control interval seconds +MAX_MOVE_SECOND = 1 # simulate mouse move or drag max seconds +TIME_OUT_SECOND = 10 +OPERATION_WAIT_TIME = 0.5 +MAX_PATH = 260 +DEBUG_SEARCH_TIME = False +DEBUG_EXIST_DISAPPEAR = False +S_OK = 0 + +IsNT6orHigher = os.sys.getwindowsversion().major >= 6 +ProcessTime = time.perf_counter # this returns nearly 0 when first call it if python version <= 3.6 +ProcessTime() # need to call it once if python version <= 3.6 + + +class _AutomationClient: + _instance = None + + @classmethod + def instance(cls) -> '_AutomationClient': + """Singleton instance (this prevents com creation on import).""" + if cls._instance is None: + cls._instance = cls() + return cls._instance + + def __init__(self): + tryCount = 3 + for retry in range(tryCount): + try: + self.UIAutomationCore = comtypes.client.GetModule("UIAutomationCore.dll") + self.IUIAutomation = comtypes.client.CreateObject("{ff48dba4-60ef-4201-aa87-54103eef594e}", interface=self.UIAutomationCore.IUIAutomation) + self.ViewWalker = self.IUIAutomation.RawViewWalker + #self.ViewWalker = self.IUIAutomation.ControlViewWalker + break + except Exception as ex: + if retry + 1 == tryCount: + Logger.WriteLine('''Can not load UIAutomationCore.dll. +1, You may need to install Windows Update KB971513 if your OS is Windows XP, see https://github.com/yinkaisheng/WindowsUpdateKB971513ForIUIAutomation +2, you need to use an UIAutomationInitializerInThread object in a thread, see demos/uiautomation_in_thread.py''', ConsoleColor.Yellow) + raise ex + # Windows dll + ctypes.windll.user32.GetAncestor.restype = ctypes.c_void_p + ctypes.windll.user32.GetClipboardData.restype = ctypes.c_void_p + ctypes.windll.user32.GetDC.restype = ctypes.c_void_p + ctypes.windll.user32.GetForegroundWindow.restype = ctypes.c_void_p + ctypes.windll.user32.GetWindowDC.restype = ctypes.c_void_p + ctypes.windll.user32.GetWindowLongW.restype = ctypes.wintypes.LONG + ctypes.windll.user32.OpenDesktopW.restype = ctypes.c_void_p + ctypes.windll.user32.SendMessageW.restype = ctypes.wintypes.LONG + ctypes.windll.user32.WindowFromPoint.restype = ctypes.c_void_p + ctypes.windll.gdi32.CreateBitmap.restype = ctypes.c_void_p + ctypes.windll.gdi32.CreateCompatibleDC.restype = ctypes.c_void_p + ctypes.windll.gdi32.SelectObject.restype = ctypes.c_void_p + ctypes.windll.kernel32.GetConsoleWindow.restype = ctypes.c_void_p + ctypes.windll.kernel32.GetStdHandle.restype = ctypes.c_void_p + ctypes.windll.kernel32.GlobalAlloc.restype = ctypes.c_void_p + ctypes.windll.kernel32.GlobalLock.restype = ctypes.c_void_p + ctypes.windll.kernel32.OpenProcess.restype = ctypes.c_void_p + + +class _DllClient: + _instance = None + + @classmethod + def instance(cls) -> '_DllClient': + """Singleton instance (this prevents com creation on import).""" + if cls._instance is None: + cls._instance = cls() + return cls._instance + + def __init__(self): + binPath = os.path.join(os.path.dirname(os.path.abspath(__file__)), "bin") + os.environ["PATH"] = binPath + os.pathsep + os.environ["PATH"] + load = False + if sys.version >= '3.8': + os.add_dll_directory(binPath) + if sys.maxsize > 0xFFFFFFFF: + try: + self.dll = ctypes.cdll.UIAutomationClient_VC140_X64 + load = True + except Exception as ex: + print(ex) + else: + try: + self.dll = ctypes.cdll.UIAutomationClient_VC140_X86 + load = True + except Exception as ex: + print(ex) + if load: + self.dll.BitmapCreate.restype = ctypes.c_size_t + self.dll.BitmapFromWindow.restype = ctypes.c_size_t + self.dll.BitmapFromHBITMAP.restype = ctypes.c_size_t + self.dll.BitmapToHBITMAP.restype = ctypes.c_size_t + self.dll.BitmapFromFile.restype = ctypes.c_size_t + self.dll.BitmapResizedFrom.restype = ctypes.c_size_t + self.dll.BitmapRotatedFrom.restype = ctypes.c_size_t + + self.dll.Initialize() + else: + self.dll = None + Logger.WriteLine('Can not load dll.\nFunctionalities related to Bitmap are not available.\nYou may need to install Microsoft Visual C++ 2015 Redistributable Package.', ConsoleColor.Yellow) + + def __del__(self): + if self.dll: + self.dll.Uninitialize() + + +class ControlType: + """ + ControlType from IUIAutomation. + Refer https://docs.microsoft.com/en-us/windows/win32/winauto/uiauto-controltype-ids + """ + AppBarControl = 50040 + ButtonControl = 50000 + CalendarControl = 50001 + CheckBoxControl = 50002 + ComboBoxControl = 50003 + CustomControl = 50025 + DataGridControl = 50028 + DataItemControl = 50029 + DocumentControl = 50030 + EditControl = 50004 + GroupControl = 50026 + HeaderControl = 50034 + HeaderItemControl = 50035 + HyperlinkControl = 50005 + ImageControl = 50006 + ListControl = 50008 + ListItemControl = 50007 + MenuBarControl = 50010 + MenuControl = 50009 + MenuItemControl = 50011 + PaneControl = 50033 + ProgressBarControl = 50012 + RadioButtonControl = 50013 + ScrollBarControl = 50014 + SemanticZoomControl = 50039 + SeparatorControl = 50038 + SliderControl = 50015 + SpinnerControl = 50016 + SplitButtonControl = 50031 + StatusBarControl = 50017 + TabControl = 50018 + TabItemControl = 50019 + TableControl = 50036 + TextControl = 50020 + ThumbControl = 50027 + TitleBarControl = 50037 + ToolBarControl = 50021 + ToolTipControl = 50022 + TreeControl = 50023 + TreeItemControl = 50024 + WindowControl = 50032 + + +ControlTypeNames = { + ControlType.AppBarControl: 'AppBarControl', + ControlType.ButtonControl: 'ButtonControl', + ControlType.CalendarControl: 'CalendarControl', + ControlType.CheckBoxControl: 'CheckBoxControl', + ControlType.ComboBoxControl: 'ComboBoxControl', + ControlType.CustomControl: 'CustomControl', + ControlType.DataGridControl: 'DataGridControl', + ControlType.DataItemControl: 'DataItemControl', + ControlType.DocumentControl: 'DocumentControl', + ControlType.EditControl: 'EditControl', + ControlType.GroupControl: 'GroupControl', + ControlType.HeaderControl: 'HeaderControl', + ControlType.HeaderItemControl: 'HeaderItemControl', + ControlType.HyperlinkControl: 'HyperlinkControl', + ControlType.ImageControl: 'ImageControl', + ControlType.ListControl: 'ListControl', + ControlType.ListItemControl: 'ListItemControl', + ControlType.MenuBarControl: 'MenuBarControl', + ControlType.MenuControl: 'MenuControl', + ControlType.MenuItemControl: 'MenuItemControl', + ControlType.PaneControl: 'PaneControl', + ControlType.ProgressBarControl: 'ProgressBarControl', + ControlType.RadioButtonControl: 'RadioButtonControl', + ControlType.ScrollBarControl: 'ScrollBarControl', + ControlType.SemanticZoomControl: 'SemanticZoomControl', + ControlType.SeparatorControl: 'SeparatorControl', + ControlType.SliderControl: 'SliderControl', + ControlType.SpinnerControl: 'SpinnerControl', + ControlType.SplitButtonControl: 'SplitButtonControl', + ControlType.StatusBarControl: 'StatusBarControl', + ControlType.TabControl: 'TabControl', + ControlType.TabItemControl: 'TabItemControl', + ControlType.TableControl: 'TableControl', + ControlType.TextControl: 'TextControl', + ControlType.ThumbControl: 'ThumbControl', + ControlType.TitleBarControl: 'TitleBarControl', + ControlType.ToolBarControl: 'ToolBarControl', + ControlType.ToolTipControl: 'ToolTipControl', + ControlType.TreeControl: 'TreeControl', + ControlType.TreeItemControl: 'TreeItemControl', + ControlType.WindowControl: 'WindowControl', +} + + +class PatternId: + """ + PatternId from IUIAutomation. + Refer https://docs.microsoft.com/en-us/windows/win32/winauto/uiauto-controlpattern-ids + """ + AnnotationPattern = 10023 + CustomNavigationPattern = 10033 + DockPattern = 10011 + DragPattern = 10030 + DropTargetPattern = 10031 + ExpandCollapsePattern = 10005 + GridItemPattern = 10007 + GridPattern = 10006 + InvokePattern = 10000 + ItemContainerPattern = 10019 + LegacyIAccessiblePattern = 10018 + MultipleViewPattern = 10008 + ObjectModelPattern = 10022 + RangeValuePattern = 10003 + ScrollItemPattern = 10017 + ScrollPattern = 10004 + SelectionItemPattern = 10010 + SelectionPattern = 10001 + SpreadsheetItemPattern = 10027 + SpreadsheetPattern = 10026 + StylesPattern = 10025 + SynchronizedInputPattern = 10021 + TableItemPattern = 10013 + TablePattern = 10012 + TextChildPattern = 10029 + TextEditPattern = 10032 + TextPattern = 10014 + TextPattern2 = 10024 + TogglePattern = 10015 + TransformPattern = 10016 + TransformPattern2 = 10028 + ValuePattern = 10002 + VirtualizedItemPattern = 10020 + WindowPattern = 10009 + + +PatternIdNames = { + PatternId.AnnotationPattern: 'AnnotationPattern', + PatternId.CustomNavigationPattern: 'CustomNavigationPattern', + PatternId.DockPattern: 'DockPattern', + PatternId.DragPattern: 'DragPattern', + PatternId.DropTargetPattern: 'DropTargetPattern', + PatternId.ExpandCollapsePattern: 'ExpandCollapsePattern', + PatternId.GridItemPattern: 'GridItemPattern', + PatternId.GridPattern: 'GridPattern', + PatternId.InvokePattern: 'InvokePattern', + PatternId.ItemContainerPattern: 'ItemContainerPattern', + PatternId.LegacyIAccessiblePattern: 'LegacyIAccessiblePattern', + PatternId.MultipleViewPattern: 'MultipleViewPattern', + PatternId.ObjectModelPattern: 'ObjectModelPattern', + PatternId.RangeValuePattern: 'RangeValuePattern', + PatternId.ScrollItemPattern: 'ScrollItemPattern', + PatternId.ScrollPattern: 'ScrollPattern', + PatternId.SelectionItemPattern: 'SelectionItemPattern', + PatternId.SelectionPattern: 'SelectionPattern', + PatternId.SpreadsheetItemPattern: 'SpreadsheetItemPattern', + PatternId.SpreadsheetPattern: 'SpreadsheetPattern', + PatternId.StylesPattern: 'StylesPattern', + PatternId.SynchronizedInputPattern: 'SynchronizedInputPattern', + PatternId.TableItemPattern: 'TableItemPattern', + PatternId.TablePattern: 'TablePattern', + PatternId.TextChildPattern: 'TextChildPattern', + PatternId.TextEditPattern: 'TextEditPattern', + PatternId.TextPattern: 'TextPattern', + PatternId.TextPattern2: 'TextPattern2', + PatternId.TogglePattern: 'TogglePattern', + PatternId.TransformPattern: 'TransformPattern', + PatternId.TransformPattern2: 'TransformPattern2', + PatternId.ValuePattern: 'ValuePattern', + PatternId.VirtualizedItemPattern: 'VirtualizedItemPattern', + PatternId.WindowPattern: 'WindowPattern', +} + + +class PropertyId: + """ + PropertyId from IUIAutomation. + Refer https://docs.microsoft.com/en-us/windows/win32/winauto/uiauto-automation-element-propids + Refer https://docs.microsoft.com/en-us/windows/win32/winauto/uiauto-control-pattern-propids + """ + AcceleratorKeyProperty = 30006 + AccessKeyProperty = 30007 + AnnotationAnnotationTypeIdProperty = 30113 + AnnotationAnnotationTypeNameProperty = 30114 + AnnotationAuthorProperty = 30115 + AnnotationDateTimeProperty = 30116 + AnnotationObjectsProperty = 30156 + AnnotationTargetProperty = 30117 + AnnotationTypesProperty = 30155 + AriaPropertiesProperty = 30102 + AriaRoleProperty = 30101 + AutomationIdProperty = 30011 + BoundingRectangleProperty = 30001 + CenterPointProperty = 30165 + ClassNameProperty = 30012 + ClickablePointProperty = 30014 + ControlTypeProperty = 30003 + ControllerForProperty = 30104 + CultureProperty = 30015 + DescribedByProperty = 30105 + DockDockPositionProperty = 30069 + DragDropEffectProperty = 30139 + DragDropEffectsProperty = 30140 + DragGrabbedItemsProperty = 30144 + DragIsGrabbedProperty = 30138 + DropTargetDropTargetEffectProperty = 30142 + DropTargetDropTargetEffectsProperty = 30143 + ExpandCollapseExpandCollapseStateProperty = 30070 + FillColorProperty = 30160 + FillTypeProperty = 30162 + FlowsFromProperty = 30148 + FlowsToProperty = 30106 + FrameworkIdProperty = 30024 + FullDescriptionProperty = 30159 + GridColumnCountProperty = 30063 + GridItemColumnProperty = 30065 + GridItemColumnSpanProperty = 30067 + GridItemContainingGridProperty = 30068 + GridItemRowProperty = 30064 + GridItemRowSpanProperty = 30066 + GridRowCountProperty = 30062 + HasKeyboardFocusProperty = 30008 + HelpTextProperty = 30013 + IsAnnotationPatternAvailableProperty = 30118 + IsContentElementProperty = 30017 + IsControlElementProperty = 30016 + IsCustomNavigationPatternAvailableProperty = 30151 + IsDataValidForFormProperty = 30103 + IsDockPatternAvailableProperty = 30027 + IsDragPatternAvailableProperty = 30137 + IsDropTargetPatternAvailableProperty = 30141 + IsEnabledProperty = 30010 + IsExpandCollapsePatternAvailableProperty = 30028 + IsGridItemPatternAvailableProperty = 30029 + IsGridPatternAvailableProperty = 30030 + IsInvokePatternAvailableProperty = 30031 + IsItemContainerPatternAvailableProperty = 30108 + IsKeyboardFocusableProperty = 30009 + IsLegacyIAccessiblePatternAvailableProperty = 30090 + IsMultipleViewPatternAvailableProperty = 30032 + IsObjectModelPatternAvailableProperty = 30112 + IsOffscreenProperty = 30022 + IsPasswordProperty = 30019 + IsPeripheralProperty = 30150 + IsRangeValuePatternAvailableProperty = 30033 + IsRequiredForFormProperty = 30025 + IsScrollItemPatternAvailableProperty = 30035 + IsScrollPatternAvailableProperty = 30034 + IsSelectionItemPatternAvailableProperty = 30036 + IsSelectionPattern2AvailableProperty = 30168 + IsSelectionPatternAvailableProperty = 30037 + IsSpreadsheetItemPatternAvailableProperty = 30132 + IsSpreadsheetPatternAvailableProperty = 30128 + IsStylesPatternAvailableProperty = 30127 + IsSynchronizedInputPatternAvailableProperty = 30110 + IsTableItemPatternAvailableProperty = 30039 + IsTablePatternAvailableProperty = 30038 + IsTextChildPatternAvailableProperty = 30136 + IsTextEditPatternAvailableProperty = 30149 + IsTextPattern2AvailableProperty = 30119 + IsTextPatternAvailableProperty = 30040 + IsTogglePatternAvailableProperty = 30041 + IsTransformPattern2AvailableProperty = 30134 + IsTransformPatternAvailableProperty = 30042 + IsValuePatternAvailableProperty = 30043 + IsVirtualizedItemPatternAvailableProperty = 30109 + IsWindowPatternAvailableProperty = 30044 + ItemStatusProperty = 30026 + ItemTypeProperty = 30021 + LabeledByProperty = 30018 + LandmarkTypeProperty = 30157 + LegacyIAccessibleChildIdProperty = 30091 + LegacyIAccessibleDefaultActionProperty = 30100 + LegacyIAccessibleDescriptionProperty = 30094 + LegacyIAccessibleHelpProperty = 30097 + LegacyIAccessibleKeyboardShortcutProperty = 30098 + LegacyIAccessibleNameProperty = 30092 + LegacyIAccessibleRoleProperty = 30095 + LegacyIAccessibleSelectionProperty = 30099 + LegacyIAccessibleStateProperty = 30096 + LegacyIAccessibleValueProperty = 30093 + LevelProperty = 30154 + LiveSettingProperty = 30135 + LocalizedControlTypeProperty = 30004 + LocalizedLandmarkTypeProperty = 30158 + MultipleViewCurrentViewProperty = 30071 + MultipleViewSupportedViewsProperty = 30072 + NameProperty = 30005 + NativeWindowHandleProperty = 30020 + OptimizeForVisualContentProperty = 30111 + OrientationProperty = 30023 + OutlineColorProperty = 30161 + OutlineThicknessProperty = 30164 + PositionInSetProperty = 30152 + ProcessIdProperty = 30002 + ProviderDescriptionProperty = 30107 + RangeValueIsReadOnlyProperty = 30048 + RangeValueLargeChangeProperty = 30051 + RangeValueMaximumProperty = 30050 + RangeValueMinimumProperty = 30049 + RangeValueSmallChangeProperty = 30052 + RangeValueValueProperty = 30047 + RotationProperty = 30166 + RuntimeIdProperty = 30000 + ScrollHorizontalScrollPercentProperty = 30053 + ScrollHorizontalViewSizeProperty = 30054 + ScrollHorizontallyScrollableProperty = 30057 + ScrollVerticalScrollPercentProperty = 30055 + ScrollVerticalViewSizeProperty = 30056 + ScrollVerticallyScrollableProperty = 30058 + Selection2CurrentSelectedItemProperty = 30171 + Selection2FirstSelectedItemProperty = 30169 + Selection2ItemCountProperty = 30172 + Selection2LastSelectedItemProperty = 30170 + SelectionCanSelectMultipleProperty = 30060 + SelectionIsSelectionRequiredProperty = 30061 + SelectionItemIsSelectedProperty = 30079 + SelectionItemSelectionContainerProperty = 30080 + SelectionSelectionProperty = 30059 + SizeOfSetProperty = 30153 + SizeProperty = 30167 + SpreadsheetItemAnnotationObjectsProperty = 30130 + SpreadsheetItemAnnotationTypesProperty = 30131 + SpreadsheetItemFormulaProperty = 30129 + StylesExtendedPropertiesProperty = 30126 + StylesFillColorProperty = 30122 + StylesFillPatternColorProperty = 30125 + StylesFillPatternStyleProperty = 30123 + StylesShapeProperty = 30124 + StylesStyleIdProperty = 30120 + StylesStyleNameProperty = 30121 + TableColumnHeadersProperty = 30082 + TableItemColumnHeaderItemsProperty = 30085 + TableItemRowHeaderItemsProperty = 30084 + TableRowHeadersProperty = 30081 + TableRowOrColumnMajorProperty = 30083 + ToggleToggleStateProperty = 30086 + Transform2CanZoomProperty = 30133 + Transform2ZoomLevelProperty = 30145 + Transform2ZoomMaximumProperty = 30147 + Transform2ZoomMinimumProperty = 30146 + TransformCanMoveProperty = 30087 + TransformCanResizeProperty = 30088 + TransformCanRotateProperty = 30089 + ValueIsReadOnlyProperty = 30046 + ValueValueProperty = 30045 + VisualEffectsProperty = 30163 + WindowCanMaximizeProperty = 30073 + WindowCanMinimizeProperty = 30074 + WindowIsModalProperty = 30077 + WindowIsTopmostProperty = 30078 + WindowWindowInteractionStateProperty = 30076 + WindowWindowVisualStateProperty = 30075 + + +PropertyIdNames = { + PropertyId.AcceleratorKeyProperty: 'AcceleratorKeyProperty', + PropertyId.AccessKeyProperty: 'AccessKeyProperty', + PropertyId.AnnotationAnnotationTypeIdProperty: 'AnnotationAnnotationTypeIdProperty', + PropertyId.AnnotationAnnotationTypeNameProperty: 'AnnotationAnnotationTypeNameProperty', + PropertyId.AnnotationAuthorProperty: 'AnnotationAuthorProperty', + PropertyId.AnnotationDateTimeProperty: 'AnnotationDateTimeProperty', + PropertyId.AnnotationObjectsProperty: 'AnnotationObjectsProperty', + PropertyId.AnnotationTargetProperty: 'AnnotationTargetProperty', + PropertyId.AnnotationTypesProperty: 'AnnotationTypesProperty', + PropertyId.AriaPropertiesProperty: 'AriaPropertiesProperty', + PropertyId.AriaRoleProperty: 'AriaRoleProperty', + PropertyId.AutomationIdProperty: 'AutomationIdProperty', + PropertyId.BoundingRectangleProperty: 'BoundingRectangleProperty', + PropertyId.CenterPointProperty: 'CenterPointProperty', + PropertyId.ClassNameProperty: 'ClassNameProperty', + PropertyId.ClickablePointProperty: 'ClickablePointProperty', + PropertyId.ControlTypeProperty: 'ControlTypeProperty', + PropertyId.ControllerForProperty: 'ControllerForProperty', + PropertyId.CultureProperty: 'CultureProperty', + PropertyId.DescribedByProperty: 'DescribedByProperty', + PropertyId.DockDockPositionProperty: 'DockDockPositionProperty', + PropertyId.DragDropEffectProperty: 'DragDropEffectProperty', + PropertyId.DragDropEffectsProperty: 'DragDropEffectsProperty', + PropertyId.DragGrabbedItemsProperty: 'DragGrabbedItemsProperty', + PropertyId.DragIsGrabbedProperty: 'DragIsGrabbedProperty', + PropertyId.DropTargetDropTargetEffectProperty: 'DropTargetDropTargetEffectProperty', + PropertyId.DropTargetDropTargetEffectsProperty: 'DropTargetDropTargetEffectsProperty', + PropertyId.ExpandCollapseExpandCollapseStateProperty: 'ExpandCollapseExpandCollapseStateProperty', + PropertyId.FillColorProperty: 'FillColorProperty', + PropertyId.FillTypeProperty: 'FillTypeProperty', + PropertyId.FlowsFromProperty: 'FlowsFromProperty', + PropertyId.FlowsToProperty: 'FlowsToProperty', + PropertyId.FrameworkIdProperty: 'FrameworkIdProperty', + PropertyId.FullDescriptionProperty: 'FullDescriptionProperty', + PropertyId.GridColumnCountProperty: 'GridColumnCountProperty', + PropertyId.GridItemColumnProperty: 'GridItemColumnProperty', + PropertyId.GridItemColumnSpanProperty: 'GridItemColumnSpanProperty', + PropertyId.GridItemContainingGridProperty: 'GridItemContainingGridProperty', + PropertyId.GridItemRowProperty: 'GridItemRowProperty', + PropertyId.GridItemRowSpanProperty: 'GridItemRowSpanProperty', + PropertyId.GridRowCountProperty: 'GridRowCountProperty', + PropertyId.HasKeyboardFocusProperty: 'HasKeyboardFocusProperty', + PropertyId.HelpTextProperty: 'HelpTextProperty', + PropertyId.IsAnnotationPatternAvailableProperty: 'IsAnnotationPatternAvailableProperty', + PropertyId.IsContentElementProperty: 'IsContentElementProperty', + PropertyId.IsControlElementProperty: 'IsControlElementProperty', + PropertyId.IsCustomNavigationPatternAvailableProperty: 'IsCustomNavigationPatternAvailableProperty', + PropertyId.IsDataValidForFormProperty: 'IsDataValidForFormProperty', + PropertyId.IsDockPatternAvailableProperty: 'IsDockPatternAvailableProperty', + PropertyId.IsDragPatternAvailableProperty: 'IsDragPatternAvailableProperty', + PropertyId.IsDropTargetPatternAvailableProperty: 'IsDropTargetPatternAvailableProperty', + PropertyId.IsEnabledProperty: 'IsEnabledProperty', + PropertyId.IsExpandCollapsePatternAvailableProperty: 'IsExpandCollapsePatternAvailableProperty', + PropertyId.IsGridItemPatternAvailableProperty: 'IsGridItemPatternAvailableProperty', + PropertyId.IsGridPatternAvailableProperty: 'IsGridPatternAvailableProperty', + PropertyId.IsInvokePatternAvailableProperty: 'IsInvokePatternAvailableProperty', + PropertyId.IsItemContainerPatternAvailableProperty: 'IsItemContainerPatternAvailableProperty', + PropertyId.IsKeyboardFocusableProperty: 'IsKeyboardFocusableProperty', + PropertyId.IsLegacyIAccessiblePatternAvailableProperty: 'IsLegacyIAccessiblePatternAvailableProperty', + PropertyId.IsMultipleViewPatternAvailableProperty: 'IsMultipleViewPatternAvailableProperty', + PropertyId.IsObjectModelPatternAvailableProperty: 'IsObjectModelPatternAvailableProperty', + PropertyId.IsOffscreenProperty: 'IsOffscreenProperty', + PropertyId.IsPasswordProperty: 'IsPasswordProperty', + PropertyId.IsPeripheralProperty: 'IsPeripheralProperty', + PropertyId.IsRangeValuePatternAvailableProperty: 'IsRangeValuePatternAvailableProperty', + PropertyId.IsRequiredForFormProperty: 'IsRequiredForFormProperty', + PropertyId.IsScrollItemPatternAvailableProperty: 'IsScrollItemPatternAvailableProperty', + PropertyId.IsScrollPatternAvailableProperty: 'IsScrollPatternAvailableProperty', + PropertyId.IsSelectionItemPatternAvailableProperty: 'IsSelectionItemPatternAvailableProperty', + PropertyId.IsSelectionPattern2AvailableProperty: 'IsSelectionPattern2AvailableProperty', + PropertyId.IsSelectionPatternAvailableProperty: 'IsSelectionPatternAvailableProperty', + PropertyId.IsSpreadsheetItemPatternAvailableProperty: 'IsSpreadsheetItemPatternAvailableProperty', + PropertyId.IsSpreadsheetPatternAvailableProperty: 'IsSpreadsheetPatternAvailableProperty', + PropertyId.IsStylesPatternAvailableProperty: 'IsStylesPatternAvailableProperty', + PropertyId.IsSynchronizedInputPatternAvailableProperty: 'IsSynchronizedInputPatternAvailableProperty', + PropertyId.IsTableItemPatternAvailableProperty: 'IsTableItemPatternAvailableProperty', + PropertyId.IsTablePatternAvailableProperty: 'IsTablePatternAvailableProperty', + PropertyId.IsTextChildPatternAvailableProperty: 'IsTextChildPatternAvailableProperty', + PropertyId.IsTextEditPatternAvailableProperty: 'IsTextEditPatternAvailableProperty', + PropertyId.IsTextPattern2AvailableProperty: 'IsTextPattern2AvailableProperty', + PropertyId.IsTextPatternAvailableProperty: 'IsTextPatternAvailableProperty', + PropertyId.IsTogglePatternAvailableProperty: 'IsTogglePatternAvailableProperty', + PropertyId.IsTransformPattern2AvailableProperty: 'IsTransformPattern2AvailableProperty', + PropertyId.IsTransformPatternAvailableProperty: 'IsTransformPatternAvailableProperty', + PropertyId.IsValuePatternAvailableProperty: 'IsValuePatternAvailableProperty', + PropertyId.IsVirtualizedItemPatternAvailableProperty: 'IsVirtualizedItemPatternAvailableProperty', + PropertyId.IsWindowPatternAvailableProperty: 'IsWindowPatternAvailableProperty', + PropertyId.ItemStatusProperty: 'ItemStatusProperty', + PropertyId.ItemTypeProperty: 'ItemTypeProperty', + PropertyId.LabeledByProperty: 'LabeledByProperty', + PropertyId.LandmarkTypeProperty: 'LandmarkTypeProperty', + PropertyId.LegacyIAccessibleChildIdProperty: 'LegacyIAccessibleChildIdProperty', + PropertyId.LegacyIAccessibleDefaultActionProperty: 'LegacyIAccessibleDefaultActionProperty', + PropertyId.LegacyIAccessibleDescriptionProperty: 'LegacyIAccessibleDescriptionProperty', + PropertyId.LegacyIAccessibleHelpProperty: 'LegacyIAccessibleHelpProperty', + PropertyId.LegacyIAccessibleKeyboardShortcutProperty: 'LegacyIAccessibleKeyboardShortcutProperty', + PropertyId.LegacyIAccessibleNameProperty: 'LegacyIAccessibleNameProperty', + PropertyId.LegacyIAccessibleRoleProperty: 'LegacyIAccessibleRoleProperty', + PropertyId.LegacyIAccessibleSelectionProperty: 'LegacyIAccessibleSelectionProperty', + PropertyId.LegacyIAccessibleStateProperty: 'LegacyIAccessibleStateProperty', + PropertyId.LegacyIAccessibleValueProperty: 'LegacyIAccessibleValueProperty', + PropertyId.LevelProperty: 'LevelProperty', + PropertyId.LiveSettingProperty: 'LiveSettingProperty', + PropertyId.LocalizedControlTypeProperty: 'LocalizedControlTypeProperty', + PropertyId.LocalizedLandmarkTypeProperty: 'LocalizedLandmarkTypeProperty', + PropertyId.MultipleViewCurrentViewProperty: 'MultipleViewCurrentViewProperty', + PropertyId.MultipleViewSupportedViewsProperty: 'MultipleViewSupportedViewsProperty', + PropertyId.NameProperty: 'NameProperty', + PropertyId.NativeWindowHandleProperty: 'NativeWindowHandleProperty', + PropertyId.OptimizeForVisualContentProperty: 'OptimizeForVisualContentProperty', + PropertyId.OrientationProperty: 'OrientationProperty', + PropertyId.OutlineColorProperty: 'OutlineColorProperty', + PropertyId.OutlineThicknessProperty: 'OutlineThicknessProperty', + PropertyId.PositionInSetProperty: 'PositionInSetProperty', + PropertyId.ProcessIdProperty: 'ProcessIdProperty', + PropertyId.ProviderDescriptionProperty: 'ProviderDescriptionProperty', + PropertyId.RangeValueIsReadOnlyProperty: 'RangeValueIsReadOnlyProperty', + PropertyId.RangeValueLargeChangeProperty: 'RangeValueLargeChangeProperty', + PropertyId.RangeValueMaximumProperty: 'RangeValueMaximumProperty', + PropertyId.RangeValueMinimumProperty: 'RangeValueMinimumProperty', + PropertyId.RangeValueSmallChangeProperty: 'RangeValueSmallChangeProperty', + PropertyId.RangeValueValueProperty: 'RangeValueValueProperty', + PropertyId.RotationProperty: 'RotationProperty', + PropertyId.RuntimeIdProperty: 'RuntimeIdProperty', + PropertyId.ScrollHorizontalScrollPercentProperty: 'ScrollHorizontalScrollPercentProperty', + PropertyId.ScrollHorizontalViewSizeProperty: 'ScrollHorizontalViewSizeProperty', + PropertyId.ScrollHorizontallyScrollableProperty: 'ScrollHorizontallyScrollableProperty', + PropertyId.ScrollVerticalScrollPercentProperty: 'ScrollVerticalScrollPercentProperty', + PropertyId.ScrollVerticalViewSizeProperty: 'ScrollVerticalViewSizeProperty', + PropertyId.ScrollVerticallyScrollableProperty: 'ScrollVerticallyScrollableProperty', + PropertyId.Selection2CurrentSelectedItemProperty: 'Selection2CurrentSelectedItemProperty', + PropertyId.Selection2FirstSelectedItemProperty: 'Selection2FirstSelectedItemProperty', + PropertyId.Selection2ItemCountProperty: 'Selection2ItemCountProperty', + PropertyId.Selection2LastSelectedItemProperty: 'Selection2LastSelectedItemProperty', + PropertyId.SelectionCanSelectMultipleProperty: 'SelectionCanSelectMultipleProperty', + PropertyId.SelectionIsSelectionRequiredProperty: 'SelectionIsSelectionRequiredProperty', + PropertyId.SelectionItemIsSelectedProperty: 'SelectionItemIsSelectedProperty', + PropertyId.SelectionItemSelectionContainerProperty: 'SelectionItemSelectionContainerProperty', + PropertyId.SelectionSelectionProperty: 'SelectionSelectionProperty', + PropertyId.SizeOfSetProperty: 'SizeOfSetProperty', + PropertyId.SizeProperty: 'SizeProperty', + PropertyId.SpreadsheetItemAnnotationObjectsProperty: 'SpreadsheetItemAnnotationObjectsProperty', + PropertyId.SpreadsheetItemAnnotationTypesProperty: 'SpreadsheetItemAnnotationTypesProperty', + PropertyId.SpreadsheetItemFormulaProperty: 'SpreadsheetItemFormulaProperty', + PropertyId.StylesExtendedPropertiesProperty: 'StylesExtendedPropertiesProperty', + PropertyId.StylesFillColorProperty: 'StylesFillColorProperty', + PropertyId.StylesFillPatternColorProperty: 'StylesFillPatternColorProperty', + PropertyId.StylesFillPatternStyleProperty: 'StylesFillPatternStyleProperty', + PropertyId.StylesShapeProperty: 'StylesShapeProperty', + PropertyId.StylesStyleIdProperty: 'StylesStyleIdProperty', + PropertyId.StylesStyleNameProperty: 'StylesStyleNameProperty', + PropertyId.TableColumnHeadersProperty: 'TableColumnHeadersProperty', + PropertyId.TableItemColumnHeaderItemsProperty: 'TableItemColumnHeaderItemsProperty', + PropertyId.TableItemRowHeaderItemsProperty: 'TableItemRowHeaderItemsProperty', + PropertyId.TableRowHeadersProperty: 'TableRowHeadersProperty', + PropertyId.TableRowOrColumnMajorProperty: 'TableRowOrColumnMajorProperty', + PropertyId.ToggleToggleStateProperty: 'ToggleToggleStateProperty', + PropertyId.Transform2CanZoomProperty: 'Transform2CanZoomProperty', + PropertyId.Transform2ZoomLevelProperty: 'Transform2ZoomLevelProperty', + PropertyId.Transform2ZoomMaximumProperty: 'Transform2ZoomMaximumProperty', + PropertyId.Transform2ZoomMinimumProperty: 'Transform2ZoomMinimumProperty', + PropertyId.TransformCanMoveProperty: 'TransformCanMoveProperty', + PropertyId.TransformCanResizeProperty: 'TransformCanResizeProperty', + PropertyId.TransformCanRotateProperty: 'TransformCanRotateProperty', + PropertyId.ValueIsReadOnlyProperty: 'ValueIsReadOnlyProperty', + PropertyId.ValueValueProperty: 'ValueValueProperty', + PropertyId.VisualEffectsProperty: 'VisualEffectsProperty', + PropertyId.WindowCanMaximizeProperty: 'WindowCanMaximizeProperty', + PropertyId.WindowCanMinimizeProperty: 'WindowCanMinimizeProperty', + PropertyId.WindowIsModalProperty: 'WindowIsModalProperty', + PropertyId.WindowIsTopmostProperty: 'WindowIsTopmostProperty', + PropertyId.WindowWindowInteractionStateProperty: 'WindowWindowInteractionStateProperty', + PropertyId.WindowWindowVisualStateProperty: 'WindowWindowVisualStateProperty', +} + + +class AccessibleRole: + """ + AccessibleRole from IUIAutomation. + Refer https://docs.microsoft.com/en-us/dotnet/api/system.windows.forms.accessiblerole?view=netframework-4.8 + """ + TitleBar = 0x1 + MenuBar = 0x2 + ScrollBar = 0x3 + Grip = 0x4 + Sound = 0x5 + Cursor = 0x6 + Caret = 0x7 + Alert = 0x8 + Window = 0x9 + Client = 0xa + MenuPopup = 0xb + MenuItem = 0xc + ToolTip = 0xd + Application = 0xe + Document = 0xf + Pane = 0x10 + Chart = 0x11 + Dialog = 0x12 + Border = 0x13 + Grouping = 0x14 + Separator = 0x15 + Toolbar = 0x16 + StatusBar = 0x17 + Table = 0x18 + ColumnHeader = 0x19 + RowHeader = 0x1a + Column = 0x1b + Row = 0x1c + Cell = 0x1d + Link = 0x1e + HelpBalloon = 0x1f + Character = 0x20 + List = 0x21 + ListItem = 0x22 + Outline = 0x23 + OutlineItem = 0x24 + PageTab = 0x25 + PropertyPage = 0x26 + Indicator = 0x27 + Graphic = 0x28 + StaticText = 0x29 + Text = 0x2a + PushButton = 0x2b + CheckButton = 0x2c + RadioButton = 0x2d + ComboBox = 0x2e + DropList = 0x2f + ProgressBar = 0x30 + Dial = 0x31 + HotkeyField = 0x32 + Slider = 0x33 + SpinButton = 0x34 + Diagram = 0x35 + Animation = 0x36 + Equation = 0x37 + ButtonDropDown = 0x38 + ButtonMenu = 0x39 + ButtonDropDownGrid = 0x3a + WhiteSpace = 0x3b + PageTabList = 0x3c + Clock = 0x3d + SplitButton = 0x3e + IpAddress = 0x3f + OutlineButton = 0x40 + + +class AccessibleState(): + """ + AccessibleState from IUIAutomation. + Refer https://docs.microsoft.com/en-us/dotnet/api/system.windows.forms.accessiblestates?view=netframework-4.8 + """ + Normal = 0 + Unavailable = 0x1 + Selected = 0x2 + Focused = 0x4 + Pressed = 0x8 + Checked = 0x10 + Mixed = 0x20 + Indeterminate = 0x20 + ReadOnly = 0x40 + HotTracked = 0x80 + Default = 0x100 + Expanded = 0x200 + Collapsed = 0x400 + Busy = 0x800 + Floating = 0x1000 + Marqueed = 0x2000 + Animated = 0x4000 + Invisible = 0x8000 + Offscreen = 0x10000 + Sizeable = 0x20000 + Moveable = 0x40000 + SelfVoicing = 0x80000 + Focusable = 0x100000 + Selectable = 0x200000 + Linked = 0x400000 + Traversed = 0x800000 + MultiSelectable = 0x1000000 + ExtSelectable = 0x2000000 + AlertLow = 0x4000000 + AlertMedium = 0x8000000 + AlertHigh = 0x10000000 + Protected = 0x20000000 + Valid = 0x7fffffff + HasPopup = 0x40000000 + + +class AccessibleSelection: + """ + AccessibleSelection from IUIAutomation. + Refer https://docs.microsoft.com/en-us/dotnet/api/system.windows.forms.accessibleselection?view=netframework-4.8 + """ + None_ = 0 + TakeFocus = 0x1 + TakeSelection = 0x2 + ExtendSelection = 0x4 + AddSelection = 0x8 + RemoveSelection = 0x10 + + +class AnnotationType: + """ + AnnotationType from IUIAutomation. + Refer https://docs.microsoft.com/en-us/windows/win32/winauto/uiauto-annotation-type-identifiers + """ + AdvancedProofingIssue = 60020 + Author = 60019 + CircularReferenceError = 60022 + Comment = 60003 + ConflictingChange = 60018 + DataValidationError = 60021 + DeletionChange = 60012 + EditingLockedChange = 60016 + Endnote = 60009 + ExternalChange = 60017 + Footer = 60007 + Footnote = 60010 + FormatChange = 60014 + FormulaError = 60004 + GrammarError = 60002 + Header = 60006 + Highlighted = 60008 + InsertionChange = 60011 + Mathematics = 60023 + MoveChange = 60013 + SpellingError = 60001 + TrackChanges = 60005 + Unknown = 60000 + UnsyncedChange = 60015 + + +class NavigateDirection: + """ + NavigateDirection from IUIAutomation. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationcore/ne-uiautomationcore-navigatedirection + """ + Parent = 0 + NextSibling = 1 + PreviousSibling = 2 + FirstChild = 3 + LastChild = 4 + + +class DockPosition: + """ + DockPosition from IUIAutomation. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationcore/ne-uiautomationcore-dockposition + """ + Top = 0 + Left = 1 + Bottom = 2 + Right = 3 + Fill = 4 + None_ = 5 + + +class ScrollAmount: + """ + ScrollAmount from IUIAutomation. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationcore/ne-uiautomationcore-scrollamount + """ + LargeDecrement = 0 + SmallDecrement = 1 + NoAmount = 2 + LargeIncrement = 3 + SmallIncrement = 4 + + +class StyleId: + """ + StyleId from IUIAutomation. + Refer https://docs.microsoft.com/en-us/windows/win32/winauto/uiauto-style-identifiers + """ + Custom = 70000 + Heading1 = 70001 + Heading2 = 70002 + Heading3 = 70003 + Heading4 = 70004 + Heading5 = 70005 + Heading6 = 70006 + Heading7 = 70007 + Heading8 = 70008 + Heading9 = 70009 + Title = 70010 + Subtitle = 70011 + Normal = 70012 + Emphasis = 70013 + Quote = 70014 + BulletedList = 70015 + NumberedList = 70016 + + +class RowOrColumnMajor: + """ + RowOrColumnMajor from IUIAutomation. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationcore/ne-uiautomationcore-roworcolumnmajor + """ + RowMajor = 0 + ColumnMajor = 1 + Indeterminate = 2 + + +class ExpandCollapseState: + """ + ExpandCollapseState from IUIAutomation. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationcore/ne-uiautomationcore-expandcollapsestate + """ + Collapsed = 0 + Expanded = 1 + PartiallyExpanded = 2 + LeafNode = 3 + + +class OrientationType: + """ + OrientationType from IUIAutomation. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationcore/ne-uiautomationcore-orientationtype + """ + None_ = 0 + Horizontal = 1 + Vertical = 2 + + +class ToggleState: + """ + ToggleState from IUIAutomation. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationcore/ne-uiautomationcore-togglestate + """ + Off = 0 + On = 1 + Indeterminate = 2 + + +class TextPatternRangeEndpoint: + """ + TextPatternRangeEndpoint from IUIAutomation. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationcore/ne-uiautomationcore-textpatternrangeendpoint + """ + Start = 0 + End = 1 + + +class TextAttributeId: + """ + TextAttributeId from IUIAutomation. + Refer https://docs.microsoft.com/en-us/windows/win32/winauto/uiauto-textattribute-ids + """ + AfterParagraphSpacingAttribute = 40042 + AnimationStyleAttribute = 40000 + AnnotationObjectsAttribute = 40032 + AnnotationTypesAttribute = 40031 + BackgroundColorAttribute = 40001 + BeforeParagraphSpacingAttribute = 40041 + BulletStyleAttribute = 40002 + CapStyleAttribute = 40003 + CaretBidiModeAttribute = 40039 + CaretPositionAttribute = 40038 + CultureAttribute = 40004 + FontNameAttribute = 40005 + FontSizeAttribute = 40006 + FontWeightAttribute = 40007 + ForegroundColorAttribute = 40008 + HorizontalTextAlignmentAttribute = 40009 + IndentationFirstLineAttribute = 40010 + IndentationLeadingAttribute = 40011 + IndentationTrailingAttribute = 40012 + IsActiveAttribute = 40036 + IsHiddenAttribute = 40013 + IsItalicAttribute = 40014 + IsReadOnlyAttribute = 40015 + IsSubscriptAttribute = 40016 + IsSuperscriptAttribute = 40017 + LineSpacingAttribute = 40040 + LinkAttribute = 40035 + MarginBottomAttribute = 40018 + MarginLeadingAttribute = 40019 + MarginTopAttribute = 40020 + MarginTrailingAttribute = 40021 + OutlineStylesAttribute = 40022 + OverlineColorAttribute = 40023 + OverlineStyleAttribute = 40024 + SayAsInterpretAsAttribute = 40043 + SelectionActiveEndAttribute = 40037 + StrikethroughColorAttribute = 40025 + StrikethroughStyleAttribute = 40026 + StyleIdAttribute = 40034 + StyleNameAttribute = 40033 + TabsAttribute = 40027 + TextFlowDirectionsAttribute = 40028 + UnderlineColorAttribute = 40029 + UnderlineStyleAttribute = 40030 + + +class TextUnit: + """ + TextUnit from IUIAutomation. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationcore/ne-uiautomationcore-textunit + """ + Character = 0 + Format = 1 + Word = 2 + Line = 3 + Paragraph = 4 + Page = 5 + Document = 6 + + +class ZoomUnit: + """ + ZoomUnit from IUIAutomation. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationcore/ne-uiautomationcore-zoomunit + """ + NoAmount = 0 + LargeDecrement = 1 + SmallDecrement = 2 + LargeIncrement = 3 + SmallIncrement = 4 + + +class WindowInteractionState: + """ + WindowInteractionState from IUIAutomation. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationcore/ne-uiautomationcore-windowinteractionstate + """ + Running = 0 + Closing = 1 + ReadyForUserInteraction = 2 + BlockedByModalWindow = 3 + NotResponding = 4 + + +class WindowVisualState: + """ + WindowVisualState from IUIAutomation. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationcore/ne-uiautomationcore-windowvisualstate + """ + Normal = 0 + Maximized = 1 + Minimized = 2 + + +class ConsoleColor: + """ConsoleColor from Win32.""" + Default = -1 + Black = 0 + DarkBlue = 1 + DarkGreen = 2 + DarkCyan = 3 + DarkRed = 4 + DarkMagenta = 5 + DarkYellow = 6 + Gray = 7 + DarkGray = 8 + Blue = 9 + Green = 10 + Cyan = 11 + Red = 12 + Magenta = 13 + Yellow = 14 + White = 15 + + +class GAFlag: + """GAFlag from Win32.""" + Parent = 1 + Root = 2 + RootOwner = 3 + + +class MouseEventFlag: + """MouseEventFlag from Win32.""" + Move = 0x0001 + LeftDown = 0x0002 + LeftUp = 0x0004 + RightDown = 0x0008 + RightUp = 0x0010 + MiddleDown = 0x0020 + MiddleUp = 0x0040 + XDown = 0x0080 + XUp = 0x0100 + Wheel = 0x0800 + HWheel = 0x1000 + MoveNoCoalesce = 0x2000 + VirtualDesk = 0x4000 + Absolute = 0x8000 + + +class KeyboardEventFlag: + """KeyboardEventFlag from Win32.""" + KeyDown = 0x0000 + ExtendedKey = 0x0001 + KeyUp = 0x0002 + KeyUnicode = 0x0004 + KeyScanCode = 0x0008 + + +class InputType: + """InputType from Win32""" + Mouse = 0 + Keyboard = 1 + Hardware = 2 + + +class ModifierKey: + """ModifierKey from Win32.""" + Alt = 0x0001 + Control = 0x0002 + Shift = 0x0004 + Win = 0x0008 + NoRepeat = 0x4000 + + +class SW: + """ShowWindow params from Win32.""" + Hide = 0 + ShowNormal = 1 + Normal = 1 + ShowMinimized = 2 + ShowMaximized = 3 + Maximize = 3 + ShowNoActivate = 4 + Show = 5 + Minimize = 6 + ShowMinNoActive = 7 + ShowNA = 8 + Restore = 9 + ShowDefault = 10 + ForceMinimize = 11 + Max = 11 + + +class SWP: + """SetWindowPos params from Win32.""" + HWND_Top = 0 + HWND_Bottom = 1 + HWND_Topmost = -1 + HWND_NoTopmost = -2 + SWP_NoSize = 0x0001 + SWP_NoMove = 0x0002 + SWP_NoZOrder = 0x0004 + SWP_NoRedraw = 0x0008 + SWP_NoActivate = 0x0010 + SWP_FrameChanged = 0x0020 # The frame changed: send WM_NCCALCSIZE + SWP_ShowWindow = 0x0040 + SWP_HideWindow = 0x0080 + SWP_NoCopyBits = 0x0100 + SWP_NoOwnerZOrder = 0x0200 # Don't do owner Z ordering + SWP_NoSendChanging = 0x0400 # Don't send WM_WINDOWPOSCHANGING + SWP_DrawFrame = SWP_FrameChanged + SWP_NoReposition = SWP_NoOwnerZOrder + SWP_DeferErase = 0x2000 + SWP_AsyncWindowPos = 0x4000 + + +class MB: + """MessageBox flags from Win32.""" + Ok = 0x00000000 + OkCancel = 0x00000001 + AbortRetryIgnore = 0x00000002 + YesNoCancel = 0x00000003 + YesNo = 0x00000004 + RetryCancel = 0x00000005 + CancelTryContinue = 0x00000006 + IconHand = 0x00000010 + IconQuestion = 0x00000020 + IconExclamation = 0x00000030 + IconAsterisk = 0x00000040 + UserIcon = 0x00000080 + IconWarning = 0x00000030 + IconError = 0x00000010 + IconInformation = 0x00000040 + IconStop = 0x00000010 + DefButton1 = 0x00000000 + DefButton2 = 0x00000100 + DefButton3 = 0x00000200 + DefButton4 = 0x00000300 + ApplModal = 0x00000000 + SystemModal = 0x00001000 + TaskModal = 0x00002000 + Help = 0x00004000 # help button + NoFocus = 0x00008000 + SetForeground = 0x00010000 + DefaultDesktopOnly = 0x00020000 + Topmost = 0x00040000 + Right = 0x00080000 + RtlReading = 0x00100000 + ServiceNotification = 0x00200000 + ServiceNotificationNT3X = 0x00040000 + + TypeMask = 0x0000000f + IconMask = 0x000000f0 + DefMask = 0x00000f00 + ModeMask = 0x00003000 + MiscMask = 0x0000c000 + + IdOk = 1 + IdCancel = 2 + IdAbort = 3 + IdRetry = 4 + IdIgnore = 5 + IdYes = 6 + IdNo = 7 + IdClose = 8 + IdHelp = 9 + IdTryAgain = 10 + IdContinue = 11 + IdTimeout = 32000 + + +class GWL: + ExStyle = -20 + HInstance = -6 + HwndParent = -8 + ID = -12 + Style = -16 + UserData = -21 + WndProc = -4 + + +class ProcessDpiAwareness: + DpiUnaware = 0 + SystemDpiAware = 1 + PerMonitorDpiAware = 2 + + +class DpiAwarenessContext: + Unaware = -1 + SystemAware = -2 + PerMonitorAware = -3 + PerMonitorAwareV2 = -4 + UnawareGdiScaled = -5 + + +class Keys: + """Key codes from Win32.""" + VK_LBUTTON = 0x01 #Left mouse button + VK_RBUTTON = 0x02 #Right mouse button + VK_CANCEL = 0x03 #Control-break processing + VK_MBUTTON = 0x04 #Middle mouse button (three-button mouse) + VK_XBUTTON1 = 0x05 #X1 mouse button + VK_XBUTTON2 = 0x06 #X2 mouse button + VK_BACK = 0x08 #BACKSPACE key + VK_TAB = 0x09 #TAB key + VK_CLEAR = 0x0C #CLEAR key + VK_RETURN = 0x0D #ENTER key + VK_ENTER = 0x0D + VK_SHIFT = 0x10 #SHIFT key + VK_CONTROL = 0x11 #CTRL key + VK_MENU = 0x12 #ALT key + VK_PAUSE = 0x13 #PAUSE key + VK_CAPITAL = 0x14 #CAPS LOCK key + VK_KANA = 0x15 #IME Kana mode + VK_HANGUEL = 0x15 #IME Hanguel mode (maintained for compatibility; use VK_HANGUL) + VK_HANGUL = 0x15 #IME Hangul mode + VK_JUNJA = 0x17 #IME Junja mode + VK_FINAL = 0x18 #IME final mode + VK_HANJA = 0x19 #IME Hanja mode + VK_KANJI = 0x19 #IME Kanji mode + VK_ESCAPE = 0x1B #ESC key + VK_CONVERT = 0x1C #IME convert + VK_NONCONVERT = 0x1D #IME nonconvert + VK_ACCEPT = 0x1E #IME accept + VK_MODECHANGE = 0x1F #IME mode change request + VK_SPACE = 0x20 #SPACEBAR + VK_PRIOR = 0x21 #PAGE UP key + VK_PAGEUP = 0x21 + VK_NEXT = 0x22 #PAGE DOWN key + VK_PAGEDOWN = 0x22 + VK_END = 0x23 #END key + VK_HOME = 0x24 #HOME key + VK_LEFT = 0x25 #LEFT ARROW key + VK_UP = 0x26 #UP ARROW key + VK_RIGHT = 0x27 #RIGHT ARROW key + VK_DOWN = 0x28 #DOWN ARROW key + VK_SELECT = 0x29 #SELECT key + VK_PRINT = 0x2A #PRINT key + VK_EXECUTE = 0x2B #EXECUTE key + VK_SNAPSHOT = 0x2C #PRINT SCREEN key + VK_INSERT = 0x2D #INS key + VK_DELETE = 0x2E #DEL key + VK_HELP = 0x2F #HELP key + VK_0 = 0x30 #0 key + VK_1 = 0x31 #1 key + VK_2 = 0x32 #2 key + VK_3 = 0x33 #3 key + VK_4 = 0x34 #4 key + VK_5 = 0x35 #5 key + VK_6 = 0x36 #6 key + VK_7 = 0x37 #7 key + VK_8 = 0x38 #8 key + VK_9 = 0x39 #9 key + VK_A = 0x41 #A key + VK_B = 0x42 #B key + VK_C = 0x43 #C key + VK_D = 0x44 #D key + VK_E = 0x45 #E key + VK_F = 0x46 #F key + VK_G = 0x47 #G key + VK_H = 0x48 #H key + VK_I = 0x49 #I key + VK_J = 0x4A #J key + VK_K = 0x4B #K key + VK_L = 0x4C #L key + VK_M = 0x4D #M key + VK_N = 0x4E #N key + VK_O = 0x4F #O key + VK_P = 0x50 #P key + VK_Q = 0x51 #Q key + VK_R = 0x52 #R key + VK_S = 0x53 #S key + VK_T = 0x54 #T key + VK_U = 0x55 #U key + VK_V = 0x56 #V key + VK_W = 0x57 #W key + VK_X = 0x58 #X key + VK_Y = 0x59 #Y key + VK_Z = 0x5A #Z key + VK_LWIN = 0x5B #Left Windows key (Natural keyboard) + VK_RWIN = 0x5C #Right Windows key (Natural keyboard) + VK_APPS = 0x5D #Applications key (Natural keyboard) + VK_SLEEP = 0x5F #Computer Sleep key + VK_NUMPAD0 = 0x60 #Numeric keypad 0 key + VK_NUMPAD1 = 0x61 #Numeric keypad 1 key + VK_NUMPAD2 = 0x62 #Numeric keypad 2 key + VK_NUMPAD3 = 0x63 #Numeric keypad 3 key + VK_NUMPAD4 = 0x64 #Numeric keypad 4 key + VK_NUMPAD5 = 0x65 #Numeric keypad 5 key + VK_NUMPAD6 = 0x66 #Numeric keypad 6 key + VK_NUMPAD7 = 0x67 #Numeric keypad 7 key + VK_NUMPAD8 = 0x68 #Numeric keypad 8 key + VK_NUMPAD9 = 0x69 #Numeric keypad 9 key + VK_MULTIPLY = 0x6A #Multiply key + VK_ADD = 0x6B #Add key + VK_SEPARATOR = 0x6C #Separator key + VK_SUBTRACT = 0x6D #Subtract key + VK_DECIMAL = 0x6E #Decimal key + VK_DIVIDE = 0x6F #Divide key + VK_F1 = 0x70 #F1 key + VK_F2 = 0x71 #F2 key + VK_F3 = 0x72 #F3 key + VK_F4 = 0x73 #F4 key + VK_F5 = 0x74 #F5 key + VK_F6 = 0x75 #F6 key + VK_F7 = 0x76 #F7 key + VK_F8 = 0x77 #F8 key + VK_F9 = 0x78 #F9 key + VK_F10 = 0x79 #F10 key + VK_F11 = 0x7A #F11 key + VK_F12 = 0x7B #F12 key + VK_F13 = 0x7C #F13 key + VK_F14 = 0x7D #F14 key + VK_F15 = 0x7E #F15 key + VK_F16 = 0x7F #F16 key + VK_F17 = 0x80 #F17 key + VK_F18 = 0x81 #F18 key + VK_F19 = 0x82 #F19 key + VK_F20 = 0x83 #F20 key + VK_F21 = 0x84 #F21 key + VK_F22 = 0x85 #F22 key + VK_F23 = 0x86 #F23 key + VK_F24 = 0x87 #F24 key + VK_NUMLOCK = 0x90 #NUM LOCK key + VK_SCROLL = 0x91 #SCROLL LOCK key + VK_LSHIFT = 0xA0 #Left SHIFT key + VK_RSHIFT = 0xA1 #Right SHIFT key + VK_LCONTROL = 0xA2 #Left CONTROL key + VK_RCONTROL = 0xA3 #Right CONTROL key + VK_LMENU = 0xA4 #Left MENU key + VK_RMENU = 0xA5 #Right MENU key + VK_BROWSER_BACK = 0xA6 #Browser Back key + VK_BROWSER_FORWARD = 0xA7 #Browser Forward key + VK_BROWSER_REFRESH = 0xA8 #Browser Refresh key + VK_BROWSER_STOP = 0xA9 #Browser Stop key + VK_BROWSER_SEARCH = 0xAA #Browser Search key + VK_BROWSER_FAVORITES = 0xAB #Browser Favorites key + VK_BROWSER_HOME = 0xAC #Browser Start and Home key + VK_VOLUME_MUTE = 0xAD #Volume Mute key + VK_VOLUME_DOWN = 0xAE #Volume Down key + VK_VOLUME_UP = 0xAF #Volume Up key + VK_MEDIA_NEXT_TRACK = 0xB0 #Next Track key + VK_MEDIA_PREV_TRACK = 0xB1 #Previous Track key + VK_MEDIA_STOP = 0xB2 #Stop Media key + VK_MEDIA_PLAY_PAUSE = 0xB3 #Play/Pause Media key + VK_LAUNCH_MAIL = 0xB4 #Start Mail key + VK_LAUNCH_MEDIA_SELECT = 0xB5 #Select Media key + VK_LAUNCH_APP1 = 0xB6 #Start Application 1 key + VK_LAUNCH_APP2 = 0xB7 #Start Application 2 key + VK_OEM_1 = 0xBA #Used for miscellaneous characters; it can vary by keyboard.For the US standard keyboard, the ';:' key + VK_OEM_PLUS = 0xBB #For any country/region, the '+' key + VK_OEM_COMMA = 0xBC #For any country/region, the ',' key + VK_OEM_MINUS = 0xBD #For any country/region, the '-' key + VK_OEM_PERIOD = 0xBE #For any country/region, the '.' key + VK_OEM_2 = 0xBF #Used for miscellaneous characters; it can vary by keyboard.For the US standard keyboard, the '/?' key + VK_OEM_3 = 0xC0 #Used for miscellaneous characters; it can vary by keyboard.For the US standard keyboard, the '`~' key + VK_OEM_4 = 0xDB #Used for miscellaneous characters; it can vary by keyboard.For the US standard keyboard, the '[{' key + VK_OEM_5 = 0xDC #Used for miscellaneous characters; it can vary by keyboard.For the US standard keyboard, the '\|' key + VK_OEM_6 = 0xDD #Used for miscellaneous characters; it can vary by keyboard.For the US standard keyboard, the ']}' key + VK_OEM_7 = 0xDE #Used for miscellaneous characters; it can vary by keyboard.For the US standard keyboard, the 'single-quote/double-quote' key + VK_OEM_8 = 0xDF #Used for miscellaneous characters; it can vary by keyboard. + VK_OEM_102 = 0xE2 #Either the angle bracket key or the backslash key on the RT 102-key keyboard + VK_PROCESSKEY = 0xE5 #IME PROCESS key + VK_PACKET = 0xE7 #Used to pass Unicode characters as if they were keystrokes. The VK_PACKET key is the low word of a 32-bit Virtual Key value used for non-keyboard input methods. For more information, see Remark in KEYBDINPUT, SendInput, WM_KEYDOWN, and WM_KeyUp + VK_ATTN = 0xF6 #Attn key + VK_CRSEL = 0xF7 #CrSel key + VK_EXSEL = 0xF8 #ExSel key + VK_EREOF = 0xF9 #Erase EOF key + VK_PLAY = 0xFA #Play key + VK_ZOOM = 0xFB #Zoom key + VK_NONAME = 0xFC #Reserved + VK_PA1 = 0xFD #PA1 key + VK_OEM_CLEAR = 0xFE #Clear key + + +SpecialKeyNames = { + 'LBUTTON': Keys.VK_LBUTTON, #Left mouse button + 'RBUTTON': Keys.VK_RBUTTON, #Right mouse button + 'CANCEL': Keys.VK_CANCEL, #Control-break processing + 'MBUTTON': Keys.VK_MBUTTON, #Middle mouse button (three-button mouse) + 'XBUTTON1': Keys.VK_XBUTTON1, #X1 mouse button + 'XBUTTON2': Keys.VK_XBUTTON2, #X2 mouse button + 'BACK': Keys.VK_BACK, #BACKSPACE key + 'TAB': Keys.VK_TAB, #TAB key + 'CLEAR': Keys.VK_CLEAR, #CLEAR key + 'RETURN': Keys.VK_RETURN, #ENTER key + 'ENTER': Keys.VK_RETURN, #ENTER key + 'SHIFT': Keys.VK_SHIFT, #SHIFT key + 'CTRL': Keys.VK_CONTROL, #CTRL key + 'CONTROL': Keys.VK_CONTROL, #CTRL key + 'ALT': Keys.VK_MENU, #ALT key + 'PAUSE': Keys.VK_PAUSE, #PAUSE key + 'CAPITAL': Keys.VK_CAPITAL, #CAPS LOCK key + 'KANA': Keys.VK_KANA, #IME Kana mode + 'HANGUEL': Keys.VK_HANGUEL, #IME Hanguel mode (maintained for compatibility; use VK_HANGUL) + 'HANGUL': Keys.VK_HANGUL, #IME Hangul mode + 'JUNJA': Keys.VK_JUNJA, #IME Junja mode + 'FINAL': Keys.VK_FINAL, #IME final mode + 'HANJA': Keys.VK_HANJA, #IME Hanja mode + 'KANJI': Keys.VK_KANJI, #IME Kanji mode + 'ESC': Keys.VK_ESCAPE, #ESC key + 'ESCAPE': Keys.VK_ESCAPE, #ESC key + 'CONVERT': Keys.VK_CONVERT, #IME convert + 'NONCONVERT': Keys.VK_NONCONVERT, #IME nonconvert + 'ACCEPT': Keys.VK_ACCEPT, #IME accept + 'MODECHANGE': Keys.VK_MODECHANGE, #IME mode change request + 'SPACE': Keys.VK_SPACE, #SPACEBAR + 'PRIOR': Keys.VK_PRIOR, #PAGE UP key + 'PAGEUP': Keys.VK_PRIOR, #PAGE UP key + 'NEXT': Keys.VK_NEXT, #PAGE DOWN key + 'PAGEDOWN': Keys.VK_NEXT, #PAGE DOWN key + 'END': Keys.VK_END, #END key + 'HOME': Keys.VK_HOME, #HOME key + 'LEFT': Keys.VK_LEFT, #LEFT ARROW key + 'UP': Keys.VK_UP, #UP ARROW key + 'RIGHT': Keys.VK_RIGHT, #RIGHT ARROW key + 'DOWN': Keys.VK_DOWN, #DOWN ARROW key + 'SELECT': Keys.VK_SELECT, #SELECT key + 'PRINT': Keys.VK_PRINT, #PRINT key + 'EXECUTE': Keys.VK_EXECUTE, #EXECUTE key + 'SNAPSHOT': Keys.VK_SNAPSHOT, #PRINT SCREEN key + 'PRINTSCREEN': Keys.VK_SNAPSHOT, #PRINT SCREEN key + 'INSERT': Keys.VK_INSERT, #INS key + 'INS': Keys.VK_INSERT, #INS key + 'DELETE': Keys.VK_DELETE, #DEL key + 'DEL': Keys.VK_DELETE, #DEL key + 'HELP': Keys.VK_HELP, #HELP key + 'WIN': Keys.VK_LWIN, #Left Windows key (Natural keyboard) + 'LWIN': Keys.VK_LWIN, #Left Windows key (Natural keyboard) + 'RWIN': Keys.VK_RWIN, #Right Windows key (Natural keyboard) + 'APPS': Keys.VK_APPS, #Applications key (Natural keyboard) + 'SLEEP': Keys.VK_SLEEP, #Computer Sleep key + 'NUMPAD0': Keys.VK_NUMPAD0, #Numeric keypad 0 key + 'NUMPAD1': Keys.VK_NUMPAD1, #Numeric keypad 1 key + 'NUMPAD2': Keys.VK_NUMPAD2, #Numeric keypad 2 key + 'NUMPAD3': Keys.VK_NUMPAD3, #Numeric keypad 3 key + 'NUMPAD4': Keys.VK_NUMPAD4, #Numeric keypad 4 key + 'NUMPAD5': Keys.VK_NUMPAD5, #Numeric keypad 5 key + 'NUMPAD6': Keys.VK_NUMPAD6, #Numeric keypad 6 key + 'NUMPAD7': Keys.VK_NUMPAD7, #Numeric keypad 7 key + 'NUMPAD8': Keys.VK_NUMPAD8, #Numeric keypad 8 key + 'NUMPAD9': Keys.VK_NUMPAD9, #Numeric keypad 9 key + 'MULTIPLY': Keys.VK_MULTIPLY, #Multiply key + 'ADD': Keys.VK_ADD, #Add key + 'SEPARATOR': Keys.VK_SEPARATOR, #Separator key + 'SUBTRACT': Keys.VK_SUBTRACT, #Subtract key + 'DECIMAL': Keys.VK_DECIMAL, #Decimal key + 'DIVIDE': Keys.VK_DIVIDE, #Divide key + 'F1': Keys.VK_F1, #F1 key + 'F2': Keys.VK_F2, #F2 key + 'F3': Keys.VK_F3, #F3 key + 'F4': Keys.VK_F4, #F4 key + 'F5': Keys.VK_F5, #F5 key + 'F6': Keys.VK_F6, #F6 key + 'F7': Keys.VK_F7, #F7 key + 'F8': Keys.VK_F8, #F8 key + 'F9': Keys.VK_F9, #F9 key + 'F10': Keys.VK_F10, #F10 key + 'F11': Keys.VK_F11, #F11 key + 'F12': Keys.VK_F12, #F12 key + 'F13': Keys.VK_F13, #F13 key + 'F14': Keys.VK_F14, #F14 key + 'F15': Keys.VK_F15, #F15 key + 'F16': Keys.VK_F16, #F16 key + 'F17': Keys.VK_F17, #F17 key + 'F18': Keys.VK_F18, #F18 key + 'F19': Keys.VK_F19, #F19 key + 'F20': Keys.VK_F20, #F20 key + 'F21': Keys.VK_F21, #F21 key + 'F22': Keys.VK_F22, #F22 key + 'F23': Keys.VK_F23, #F23 key + 'F24': Keys.VK_F24, #F24 key + 'NUMLOCK': Keys.VK_NUMLOCK, #NUM LOCK key + 'SCROLL': Keys.VK_SCROLL, #SCROLL LOCK key + 'LSHIFT': Keys.VK_LSHIFT, #Left SHIFT key + 'RSHIFT': Keys.VK_RSHIFT, #Right SHIFT key + 'LCONTROL': Keys.VK_LCONTROL, #Left CONTROL key + 'LCTRL': Keys.VK_LCONTROL, #Left CONTROL key + 'RCONTROL': Keys.VK_RCONTROL, #Right CONTROL key + 'RCTRL': Keys.VK_RCONTROL, #Right CONTROL key + 'LALT': Keys.VK_LMENU, #Left MENU key + 'RALT': Keys.VK_RMENU, #Right MENU key + 'BROWSER_BACK': Keys.VK_BROWSER_BACK, #Browser Back key + 'BROWSER_FORWARD': Keys.VK_BROWSER_FORWARD, #Browser Forward key + 'BROWSER_REFRESH': Keys.VK_BROWSER_REFRESH, #Browser Refresh key + 'BROWSER_STOP': Keys.VK_BROWSER_STOP, #Browser Stop key + 'BROWSER_SEARCH': Keys.VK_BROWSER_SEARCH, #Browser Search key + 'BROWSER_FAVORITES': Keys.VK_BROWSER_FAVORITES, #Browser Favorites key + 'BROWSER_HOME': Keys.VK_BROWSER_HOME, #Browser Start and Home key + 'VOLUME_MUTE': Keys.VK_VOLUME_MUTE, #Volume Mute key + 'VOLUME_DOWN': Keys.VK_VOLUME_DOWN, #Volume Down key + 'VOLUME_UP': Keys.VK_VOLUME_UP, #Volume Up key + 'MEDIA_NEXT_TRACK': Keys.VK_MEDIA_NEXT_TRACK, #Next Track key + 'MEDIA_PREV_TRACK': Keys.VK_MEDIA_PREV_TRACK, #Previous Track key + 'MEDIA_STOP': Keys.VK_MEDIA_STOP, #Stop Media key + 'MEDIA_PLAY_PAUSE': Keys.VK_MEDIA_PLAY_PAUSE, #Play/Pause Media key + 'LAUNCH_MAIL': Keys.VK_LAUNCH_MAIL, #Start Mail key + 'LAUNCH_MEDIA_SELECT': Keys.VK_LAUNCH_MEDIA_SELECT,#Select Media key + 'LAUNCH_APP1': Keys.VK_LAUNCH_APP1, #Start Application 1 key + 'LAUNCH_APP2': Keys.VK_LAUNCH_APP2, #Start Application 2 key + 'OEM_1': Keys.VK_OEM_1, #Used for miscellaneous characters; it can vary by keyboard.For the US standard keyboard, the ';:' key + 'OEM_PLUS': Keys.VK_OEM_PLUS, #For any country/region, the '+' key + 'OEM_COMMA': Keys.VK_OEM_COMMA, #For any country/region, the ',' key + 'OEM_MINUS': Keys.VK_OEM_MINUS, #For any country/region, the '-' key + 'OEM_PERIOD': Keys.VK_OEM_PERIOD, #For any country/region, the '.' key + 'OEM_2': Keys.VK_OEM_2, #Used for miscellaneous characters; it can vary by keyboard.For the US standard keyboard, the '/?' key + 'OEM_3': Keys.VK_OEM_3, #Used for miscellaneous characters; it can vary by keyboard.For the US standard keyboard, the '`~' key + 'OEM_4': Keys.VK_OEM_4, #Used for miscellaneous characters; it can vary by keyboard.For the US standard keyboard, the '[{' key + 'OEM_5': Keys.VK_OEM_5, #Used for miscellaneous characters; it can vary by keyboard.For the US standard keyboard, the '\|' key + 'OEM_6': Keys.VK_OEM_6, #Used for miscellaneous characters; it can vary by keyboard.For the US standard keyboard, the ']}' key + 'OEM_7': Keys.VK_OEM_7, #Used for miscellaneous characters; it can vary by keyboard.For the US standard keyboard, the 'single-quote/double-quote' key + 'OEM_8': Keys.VK_OEM_8, #Used for miscellaneous characters; it can vary by keyboard. + 'OEM_102': Keys.VK_OEM_102, #Either the angle bracket key or the backslash key on the RT 102-key keyboard + 'PROCESSKEY': Keys.VK_PROCESSKEY, #IME PROCESS key + 'PACKET': Keys.VK_PACKET, #Used to pass Unicode characters as if they were keystrokes. The VK_PACKET key is the low word of a 32-bit Virtual Key value used for non-keyboard input methods. For more information, see Remark in KEYBDINPUT, SendInput, WM_KEYDOWN, and WM_KeyUp + 'ATTN': Keys.VK_ATTN, #Attn key + 'CRSEL': Keys.VK_CRSEL, #CrSel key + 'EXSEL': Keys.VK_EXSEL, #ExSel key + 'EREOF': Keys.VK_EREOF, #Erase EOF key + 'PLAY': Keys.VK_PLAY, #Play key + 'ZOOM': Keys.VK_ZOOM, #Zoom key + 'NONAME': Keys.VK_NONAME, #Reserved + 'PA1': Keys.VK_PA1, #PA1 key + 'OEM_CLEAR': Keys.VK_OEM_CLEAR, #Clear key +} + + +CharacterCodes = { + '0': Keys.VK_0, #0 key + '1': Keys.VK_1, #1 key + '2': Keys.VK_2, #2 key + '3': Keys.VK_3, #3 key + '4': Keys.VK_4, #4 key + '5': Keys.VK_5, #5 key + '6': Keys.VK_6, #6 key + '7': Keys.VK_7, #7 key + '8': Keys.VK_8, #8 key + '9': Keys.VK_9, #9 key + 'a': Keys.VK_A, #A key + 'A': Keys.VK_A, #A key + 'b': Keys.VK_B, #B key + 'B': Keys.VK_B, #B key + 'c': Keys.VK_C, #C key + 'C': Keys.VK_C, #C key + 'd': Keys.VK_D, #D key + 'D': Keys.VK_D, #D key + 'e': Keys.VK_E, #E key + 'E': Keys.VK_E, #E key + 'f': Keys.VK_F, #F key + 'F': Keys.VK_F, #F key + 'g': Keys.VK_G, #G key + 'G': Keys.VK_G, #G key + 'h': Keys.VK_H, #H key + 'H': Keys.VK_H, #H key + 'i': Keys.VK_I, #I key + 'I': Keys.VK_I, #I key + 'j': Keys.VK_J, #J key + 'J': Keys.VK_J, #J key + 'k': Keys.VK_K, #K key + 'K': Keys.VK_K, #K key + 'l': Keys.VK_L, #L key + 'L': Keys.VK_L, #L key + 'm': Keys.VK_M, #M key + 'M': Keys.VK_M, #M key + 'n': Keys.VK_N, #N key + 'N': Keys.VK_N, #N key + 'o': Keys.VK_O, #O key + 'O': Keys.VK_O, #O key + 'p': Keys.VK_P, #P key + 'P': Keys.VK_P, #P key + 'q': Keys.VK_Q, #Q key + 'Q': Keys.VK_Q, #Q key + 'r': Keys.VK_R, #R key + 'R': Keys.VK_R, #R key + 's': Keys.VK_S, #S key + 'S': Keys.VK_S, #S key + 't': Keys.VK_T, #T key + 'T': Keys.VK_T, #T key + 'u': Keys.VK_U, #U key + 'U': Keys.VK_U, #U key + 'v': Keys.VK_V, #V key + 'V': Keys.VK_V, #V key + 'w': Keys.VK_W, #W key + 'W': Keys.VK_W, #W key + 'x': Keys.VK_X, #X key + 'X': Keys.VK_X, #X key + 'y': Keys.VK_Y, #Y key + 'Y': Keys.VK_Y, #Y key + 'z': Keys.VK_Z, #Z key + 'Z': Keys.VK_Z, #Z key + ' ': Keys.VK_SPACE, #Space key + '`': Keys.VK_OEM_3, #` key + #'~' : Keys.VK_OEM_3, #~ key + '-': Keys.VK_OEM_MINUS, #- key + #'_' : Keys.VK_OEM_MINUS, #_ key + '=': Keys.VK_OEM_PLUS, #= key + #'+' : Keys.VK_OEM_PLUS, #+ key + '[': Keys.VK_OEM_4, #[ key + #'{' : Keys.VK_OEM_4, #{ key + ']': Keys.VK_OEM_6, #] key + #'}' : Keys.VK_OEM_6, #} key + '\\': Keys.VK_OEM_5, #\ key + #'|' : Keys.VK_OEM_5, #| key + ';': Keys.VK_OEM_1, #; key + #':' : Keys.VK_OEM_1, #: key + '\'': Keys.VK_OEM_7, #' key + #'"' : Keys.VK_OEM_7, #" key + ',': Keys.VK_OEM_COMMA, #, key + #'<' : Keys.VK_OEM_COMMA, #< key + '.': Keys.VK_OEM_PERIOD, #. key + #'>' : Keys.VK_OEM_PERIOD, #> key + '/': Keys.VK_OEM_2, #/ key + #'?' : Keys.VK_OEM_2, #? key +} + + +class ConsoleScreenBufferInfo(ctypes.Structure): + _fields_ = [ + ('dwSize', ctypes.wintypes._COORD), + ('dwCursorPosition', ctypes.wintypes._COORD), + ('wAttributes', ctypes.c_uint), + ('srWindow', ctypes.wintypes.SMALL_RECT), + ('dwMaximumWindowSize', ctypes.wintypes._COORD), + ] + + +class MOUSEINPUT(ctypes.Structure): + _fields_ = (('dx', ctypes.wintypes.LONG), + ('dy', ctypes.wintypes.LONG), + ('mouseData', ctypes.wintypes.DWORD), + ('dwFlags', ctypes.wintypes.DWORD), + ('time', ctypes.wintypes.DWORD), + ('dwExtraInfo', ctypes.wintypes.PULONG)) + + +class KEYBDINPUT(ctypes.Structure): + _fields_ = (('wVk', ctypes.wintypes.WORD), + ('wScan', ctypes.wintypes.WORD), + ('dwFlags', ctypes.wintypes.DWORD), + ('time', ctypes.wintypes.DWORD), + ('dwExtraInfo', ctypes.wintypes.PULONG)) + + +class HARDWAREINPUT(ctypes.Structure): + _fields_ = (('uMsg', ctypes.wintypes.DWORD), + ('wParamL', ctypes.wintypes.WORD), + ('wParamH', ctypes.wintypes.WORD)) + + +class _INPUTUnion(ctypes.Union): + _fields_ = (('mi', MOUSEINPUT), + ('ki', KEYBDINPUT), + ('hi', HARDWAREINPUT)) + + +class INPUT(ctypes.Structure): + _fields_ = (('type', ctypes.wintypes.DWORD), + ('union', _INPUTUnion)) + + +class Rect(): + """ + class Rect, like `ctypes.wintypes.RECT`. + """ + + def __init__(self, left: int = 0, top: int = 0, right: int = 0, bottom: int = 0): + self.left = left + self.top = top + self.right = right + self.bottom = bottom + + def width(self) -> int: + return self.right - self.left + + def height(self) -> int: + return self.bottom - self.top + + def xcenter(self) -> int: + return self.left + self.width() // 2 + + def ycenter(self) -> int: + return self.top + self.height() // 2 + + def isempty(self) -> int: + return self.width() == 0 or self.height() == 0 + + def contains(self, x: int, y: int) -> bool: + return self.left <= x < self.right and self.top <= y < self.bottom + + def intersect(self, rect: 'Rect') -> 'Rect': + left, top, right, bottom = max(self.left, rect.left), max(self.top, rect.top), min(self.right, rect.right), min(self.bottom, rect.bottom) + return Rect(left, top, right, bottom) + + def offset(self, x: int, y: int) -> None: + self.left += x + self.right += x + self.top += y + self.bottom += y + + def __eq__(self, rect): + return self.left == rect.left and self.top == rect.top and self.right == rect.right and self.bottom == rect.bottom + + def __str__(self) -> str: + return '({},{},{},{})[{}x{}]'.format(self.left, self.top, self.right, self.bottom, self.width(), self.height()) + + def __repr__(self) -> str: + return '{}({},{},{},{})[{}x{}]'.format(self.__class__.__name__, self.left, self.top, self.right, self.bottom, self.width(), self.height()) + + +class ClipboardFormat: + CF_TEXT = 1 + CF_BITMAP = 2 + CF_METAFILEPICT = 3 + CF_SYLK = 4 + CF_DIF = 5 + CF_TIFF = 6 + CF_OEMTEXT = 7 + CF_DIB = 8 + CF_PALETTE = 9 + CF_PENDATA = 10 + CF_RIFF = 11 + CF_WAVE = 12 + CF_UNICODETEXT = 13 + CF_ENHMETAFILE = 14 + CF_HDROP = 15 + CF_LOCALE = 16 + CF_DIBV5 = 17 + CF_MAX = 18 + CF_HTML = ctypes.windll.user32.RegisterClipboardFormatW("HTML Format") + + +def _GetDictKeyName(theDict: Dict, theValue: Any, start: str = None) -> str: + for key, value in theDict.items(): + if theValue == value and ((start and key.startswith(start)) or True): + return key + return '' + + +_StdOutputHandle = -11 +_ConsoleOutputHandle = ctypes.c_void_p(0) +_DefaultConsoleColor = None + + +def SetConsoleColor(color: int) -> bool: + """ + Change the text color on console window. + color: int, a value in class `ConsoleColor`. + Return bool, True if succeed otherwise False. + """ + global _ConsoleOutputHandle + global _DefaultConsoleColor + if not _DefaultConsoleColor: + if not _ConsoleOutputHandle: + _ConsoleOutputHandle = ctypes.c_void_p(ctypes.windll.kernel32.GetStdHandle(_StdOutputHandle)) + bufferInfo = ConsoleScreenBufferInfo() + ctypes.windll.kernel32.GetConsoleScreenBufferInfo(_ConsoleOutputHandle, ctypes.byref(bufferInfo)) + _DefaultConsoleColor = int(bufferInfo.wAttributes & 0xFF) + if sys.stdout: + sys.stdout.flush() + return bool(ctypes.windll.kernel32.SetConsoleTextAttribute(_ConsoleOutputHandle, ctypes.c_ushort(color))) + + +def ResetConsoleColor() -> bool: + """ + Reset to the default text color on console window. + Return bool, True if succeed otherwise False. + """ + if sys.stdout: + sys.stdout.flush() + return bool(ctypes.windll.kernel32.SetConsoleTextAttribute(_ConsoleOutputHandle, ctypes.c_ushort(_DefaultConsoleColor))) + + +def WindowFromPoint(x: int, y: int) -> int: + """ + WindowFromPoint from Win32. + Return int, a native window handle. + """ + return ctypes.windll.user32.WindowFromPoint(ctypes.wintypes.POINT(x, y)) # or ctypes.windll.user32.WindowFromPoint(x, y) + + +def GetCursorPos() -> Tuple[int, int]: + """ + GetCursorPos from Win32. + Get current mouse cursor positon. + Return Tuple[int, int], two ints tuple (x, y). + """ + point = ctypes.wintypes.POINT(0, 0) + ctypes.windll.user32.GetCursorPos(ctypes.byref(point)) + return point.x, point.y + + +def GetPhysicalCursorPos() -> Tuple[int, int]: + """ + GetPhysicalCursorPos from Win32. + Get current mouse cursor positon. + Return Tuple[int, int], two ints tuple (x, y). + """ + point = ctypes.wintypes.POINT(0, 0) + ctypes.windll.user32.GetPhysicalCursorPos(ctypes.byref(point)) + return point.x, point.y + + +def SetCursorPos(x: int, y: int) -> bool: + """ + SetCursorPos from Win32. + Set mouse cursor to point x, y. + x: int. + y: int. + Return bool, True if succeed otherwise False. + """ + return bool(ctypes.windll.user32.SetCursorPos(x, y)) + + +def GetDoubleClickTime() -> int: + """ + GetDoubleClickTime from Win32. + Return int, in milliseconds. + """ + return ctypes.windll.user32.GetDoubleClickTime() + + +def mouse_event(dwFlags: int, dx: int, dy: int, dwData: int, dwExtraInfo: int) -> None: + """mouse_event from Win32.""" + ctypes.windll.user32.mouse_event(dwFlags, dx, dy, dwData, dwExtraInfo) + + +def keybd_event(bVk: int, bScan: int, dwFlags: int, dwExtraInfo: int) -> None: + """keybd_event from Win32.""" + ctypes.windll.user32.keybd_event(bVk, bScan, dwFlags, dwExtraInfo) + + +def PostMessage(handle: int, msg: int, wParam: int, lParam: int) -> bool: + """ + PostMessage from Win32. + Return bool, True if succeed otherwise False. + """ + return bool(ctypes.windll.user32.PostMessageW(ctypes.c_void_p(handle), ctypes.c_uint(msg), ctypes.wintypes.WPARAM(wParam), ctypes.wintypes.LPARAM(lParam))) + + +def SendMessage(handle: int, msg: int, wParam: int, lParam: int) -> int: + """ + SendMessage from Win32. + Return int, the return value specifies the result of the message processing; + it depends on the message sent. + """ + return ctypes.windll.user32.SendMessageW(ctypes.c_void_p(handle), ctypes.c_uint(msg), ctypes.wintypes.WPARAM(wParam), ctypes.wintypes.LPARAM(lParam)) + + +def Click(x: int, y: int, waitTime: float = OPERATION_WAIT_TIME) -> None: + """ + Simulate mouse click at point x, y. + x: int. + y: int. + waitTime: float. + """ + SetCursorPos(x, y) + screenWidth, screenHeight = GetScreenSize() + mouse_event(MouseEventFlag.LeftDown | MouseEventFlag.Absolute, x * 65535 // screenWidth, y * 65535 // screenHeight, 0, 0) + time.sleep(0.05) + mouse_event(MouseEventFlag.LeftUp | MouseEventFlag.Absolute, x * 65535 // screenWidth, y * 65535 // screenHeight, 0, 0) + time.sleep(waitTime) + + +def MiddleClick(x: int, y: int, waitTime: float = OPERATION_WAIT_TIME) -> None: + """ + Simulate mouse middle click at point x, y. + x: int. + y: int. + waitTime: float. + """ + SetCursorPos(x, y) + screenWidth, screenHeight = GetScreenSize() + mouse_event(MouseEventFlag.MiddleDown | MouseEventFlag.Absolute, x * 65535 // screenWidth, y * 65535 // screenHeight, 0, 0) + time.sleep(0.05) + mouse_event(MouseEventFlag.MiddleUp | MouseEventFlag.Absolute, x * 65535 // screenWidth, y * 65535 // screenHeight, 0, 0) + time.sleep(waitTime) + + +def RightClick(x: int, y: int, waitTime: float = OPERATION_WAIT_TIME) -> None: + """ + Simulate mouse right click at point x, y. + x: int. + y: int. + waitTime: float. + """ + SetCursorPos(x, y) + screenWidth, screenHeight = GetScreenSize() + mouse_event(MouseEventFlag.RightDown | MouseEventFlag.Absolute, x * 65535 // screenWidth, y * 65535 // screenHeight, 0, 0) + time.sleep(0.05) + mouse_event(MouseEventFlag.RightUp | MouseEventFlag.Absolute, x * 65535 // screenWidth, y * 65535 // screenHeight, 0, 0) + time.sleep(waitTime) + + +def PressMouse(x: int, y: int, waitTime: float = OPERATION_WAIT_TIME) -> None: + """ + Press left mouse. + x: int. + y: int. + waitTime: float. + """ + SetCursorPos(x, y) + screenWidth, screenHeight = GetScreenSize() + mouse_event(MouseEventFlag.LeftDown | MouseEventFlag.Absolute, x * 65535 // screenWidth, y * 65535 // screenHeight, 0, 0) + time.sleep(waitTime) + + +def ReleaseMouse(waitTime: float = OPERATION_WAIT_TIME) -> None: + """ + Release left mouse. + waitTime: float. + """ + x, y = GetCursorPos() + screenWidth, screenHeight = GetScreenSize() + mouse_event(MouseEventFlag.LeftUp | MouseEventFlag.Absolute, x * 65535 // screenWidth, y * 65535 // screenHeight, 0, 0) + time.sleep(waitTime) + + +def RightPressMouse(x: int, y: int, waitTime: float = OPERATION_WAIT_TIME) -> None: + """ + Press right mouse. + x: int. + y: int. + waitTime: float. + """ + SetCursorPos(x, y) + screenWidth, screenHeight = GetScreenSize() + mouse_event(MouseEventFlag.RightDown | MouseEventFlag.Absolute, x * 65535 // screenWidth, y * 65535 // screenHeight, 0, 0) + time.sleep(waitTime) + + +def RightReleaseMouse(waitTime: float = OPERATION_WAIT_TIME) -> None: + """ + Release right mouse. + waitTime: float. + """ + x, y = GetCursorPos() + screenWidth, screenHeight = GetScreenSize() + mouse_event(MouseEventFlag.RightUp | MouseEventFlag.Absolute, x * 65535 // screenWidth, y * 65535 // screenHeight, 0, 0) + time.sleep(waitTime) + + +def MiddlePressMouse(x: int, y: int, waitTime: float = OPERATION_WAIT_TIME) -> None: + """ + Press middle mouse. + x: int. + y: int. + waitTime: float. + """ + SetCursorPos(x, y) + screenWidth, screenHeight = GetScreenSize() + mouse_event(MouseEventFlag.MiddleDown | MouseEventFlag.Absolute, x * 65535 // screenWidth, y * 65535 // screenHeight, 0, 0) + time.sleep(waitTime) + + +def MiddleReleaseMouse(waitTime: float = OPERATION_WAIT_TIME) -> None: + """ + Release middle mouse. + waitTime: float. + """ + x, y = GetCursorPos() + screenWidth, screenHeight = GetScreenSize() + mouse_event(MouseEventFlag.MiddleUp | MouseEventFlag.Absolute, x * 65535 // screenWidth, y * 65535 // screenHeight, 0, 0) + time.sleep(waitTime) + + +def MoveTo(x: int, y: int, moveSpeed: float = 1, waitTime: float = OPERATION_WAIT_TIME) -> None: + """ + Simulate mouse move to point x, y from current cursor. + x: int. + y: int. + moveSpeed: float, 1 normal speed, < 1 move slower, > 1 move faster. + waitTime: float. + """ + if moveSpeed <= 0: + moveTime = 0 + else: + moveTime = MAX_MOVE_SECOND / moveSpeed + curX, curY = GetCursorPos() + xCount = abs(x - curX) + yCount = abs(y - curY) + maxPoint = max(xCount, yCount) + screenWidth, screenHeight = GetScreenSize() + maxSide = max(screenWidth, screenHeight) + minSide = min(screenWidth, screenHeight) + if maxPoint > minSide: + maxPoint = minSide + if maxPoint < maxSide: + maxPoint = 100 + int((maxSide - 100) / maxSide * maxPoint) + moveTime = moveTime * maxPoint * 1.0 / maxSide + stepCount = maxPoint // 20 + if stepCount > 1: + xStep = (x - curX) * 1.0 / stepCount + yStep = (y - curY) * 1.0 / stepCount + interval = moveTime / stepCount + for i in range(stepCount): + cx = curX + int(xStep * i) + cy = curY + int(yStep * i) + # upper-left(0,0), lower-right(65536,65536) + # mouse_event(MouseEventFlag.Move | MouseEventFlag.Absolute, cx*65536//screenWidth, cy*65536//screenHeight, 0, 0) + SetCursorPos(cx, cy) + time.sleep(interval) + SetCursorPos(x, y) + time.sleep(waitTime) + + +def DragDrop(x1: int, y1: int, x2: int, y2: int, moveSpeed: float = 1, waitTime: float = OPERATION_WAIT_TIME) -> None: + """ + Simulate mouse left button drag from point x1, y1 drop to point x2, y2. + x1: int. + y1: int. + x2: int. + y2: int. + moveSpeed: float, 1 normal speed, < 1 move slower, > 1 move faster. + waitTime: float. + """ + PressMouse(x1, y1, 0.05) + MoveTo(x2, y2, moveSpeed, 0.05) + ReleaseMouse(waitTime) + + +def RightDragDrop(x1: int, y1: int, x2: int, y2: int, moveSpeed: float = 1, waitTime: float = OPERATION_WAIT_TIME) -> None: + """ + Simulate mouse right button drag from point x1, y1 drop to point x2, y2. + x1: int. + y1: int. + x2: int. + y2: int. + moveSpeed: float, 1 normal speed, < 1 move slower, > 1 move faster. + waitTime: float. + """ + RightPressMouse(x1, y1, 0.05) + MoveTo(x2, y2, moveSpeed, 0.05) + RightReleaseMouse(waitTime) + + +def MiddleDragDrop(x1: int, y1: int, x2: int, y2: int, moveSpeed: float = 1, waitTime: float = OPERATION_WAIT_TIME) -> None: + """ + Simulate mouse middle button drag from point x1, y1 drop to point x2, y2. + x1: int. + y1: int. + x2: int. + y2: int. + moveSpeed: float, 1 normal speed, < 1 move slower, > 1 move faster. + waitTime: float. + """ + MiddlePressMouse(x1, y1, 0.05) + MoveTo(x2, y2, moveSpeed, 0.05) + MiddleReleaseMouse(waitTime) + + +def WheelDown(wheelTimes: int = 1, interval: float = 0.05, waitTime: float = OPERATION_WAIT_TIME) -> None: + """ + Simulate mouse wheel down. + wheelTimes: int. + interval: float. + waitTime: float. + """ + for i in range(wheelTimes): + mouse_event(MouseEventFlag.Wheel, 0, 0, -120, 0) # WHEEL_DELTA=120 + time.sleep(interval) + time.sleep(waitTime) + + +def WheelUp(wheelTimes: int = 1, interval: float = 0.05, waitTime: float = OPERATION_WAIT_TIME) -> None: + """ + Simulate mouse wheel up. + wheelTimes: int. + interval: float. + waitTime: float. + """ + for i in range(wheelTimes): + mouse_event(MouseEventFlag.Wheel, 0, 0, 120, 0) # WHEEL_DELTA=120 + time.sleep(interval) + time.sleep(waitTime) + + +def GetScreenSize() -> Tuple[int, int]: + """ + Return Tuple[int, int], two ints tuple (width, height). + """ + SM_CXSCREEN = 0 + SM_CYSCREEN = 1 + w = ctypes.windll.user32.GetSystemMetrics(SM_CXSCREEN) + h = ctypes.windll.user32.GetSystemMetrics(SM_CYSCREEN) + return w, h + + +def GetVirtualScreenSize() -> Tuple[int, int]: + """ + Return Tuple[int, int], two ints tuple (width, height). + """ + SM_CXVIRTUALSCREEN = 78 + SM_CYVIRTUALSCREEN = 79 + w = ctypes.windll.user32.GetSystemMetrics(SM_CXVIRTUALSCREEN) + h = ctypes.windll.user32.GetSystemMetrics(SM_CYVIRTUALSCREEN) + return w, h + + +def GetMonitorsRect() -> List[Rect]: + """ + Get monitors' rect. + Return List[Rect]. + """ + MonitorEnumProc = ctypes.WINFUNCTYPE(ctypes.c_int, ctypes.c_size_t, ctypes.c_size_t, ctypes.POINTER(ctypes.wintypes.RECT), ctypes.c_size_t) + rects = [] + + def MonitorCallback(hMonitor: int, hdcMonitor: int, lprcMonitor: ctypes.POINTER(ctypes.wintypes.RECT), dwData: int): + rect = Rect(lprcMonitor.contents.left, lprcMonitor.contents.top, lprcMonitor.contents.right, lprcMonitor.contents.bottom) + rects.append(rect) + return 1 + ret = ctypes.windll.user32.EnumDisplayMonitors(ctypes.c_void_p(0), ctypes.c_void_p(0), MonitorEnumProc(MonitorCallback), 0) + return rects + + +def GetPixelColor(x: int, y: int, handle: int = 0) -> int: + """ + Get pixel color of a native window. + x: int. + y: int. + handle: int, the handle of a native window. + Return int, the bgr value of point (x,y). + r = bgr & 0x0000FF + g = (bgr & 0x00FF00) >> 8 + b = (bgr & 0xFF0000) >> 16 + If handle is 0, get pixel from Desktop window(root control). + Note: + Not all devices support GetPixel. + An application should call GetDeviceCaps to determine whether a specified device supports this function. + For example, console window doesn't support. + """ + hdc = ctypes.windll.user32.GetWindowDC(ctypes.c_void_p(handle)) + bgr = ctypes.windll.gdi32.GetPixel(hdc, x, y) + ctypes.windll.user32.ReleaseDC(ctypes.c_void_p(handle), ctypes.c_void_p(hdc)) + return bgr + + +def MessageBox(content: str, title: str, flags: int = MB.Ok) -> int: + """ + MessageBox from Win32. + content: str. + title: str. + flags: int, a value or some combined values in class `MB`. + Return int, a value in MB whose name starts with Id, such as MB.IdOk + """ + return ctypes.windll.user32.MessageBoxW(ctypes.c_void_p(0), ctypes.c_wchar_p(content), ctypes.c_wchar_p(title), ctypes.c_uint(flags)) + + +def SetForegroundWindow(handle: int) -> bool: + """ + SetForegroundWindow from Win32. + handle: int, the handle of a native window. + Return bool, True if succeed otherwise False. + """ + return bool(ctypes.windll.user32.SetForegroundWindow(ctypes.c_void_p(handle))) + + +def BringWindowToTop(handle: int) -> bool: + """ + BringWindowToTop from Win32. + handle: int, the handle of a native window. + Return bool, True if succeed otherwise False. + """ + return bool(ctypes.windll.user32.BringWindowToTop(ctypes.c_void_p(handle))) + + +def SwitchToThisWindow(handle: int) -> None: + """ + SwitchToThisWindow from Win32. + handle: int, the handle of a native window. + """ + ctypes.windll.user32.SwitchToThisWindow(ctypes.c_void_p(handle), ctypes.c_int(1)) # void function, no return + + +def GetAncestor(handle: int, flag: int) -> int: + """ + GetAncestor from Win32. + handle: int, the handle of a native window. + index: int, a value in class `GAFlag`. + Return int, a native window handle. + """ + return ctypes.windll.user32.GetAncestor(ctypes.c_void_p(handle), ctypes.c_int(flag)) + + +def IsTopLevelWindow(handle: int) -> bool: + """ + IsTopLevelWindow from Win32. + handle: int, the handle of a native window. + Return bool. + Only available on Windows 7 or Higher. + """ + return bool(ctypes.windll.user32.IsTopLevelWindow(ctypes.c_void_p(handle))) + + +def GetWindowLong(handle: int, index: int) -> int: + """ + GetWindowLong from Win32. + handle: int, the handle of a native window. + index: int. + """ + return ctypes.windll.user32.GetWindowLongW(ctypes.c_void_p(handle), ctypes.c_int(index)) + + +def SetWindowLong(handle: int, index: int, value: int) -> int: + """ + SetWindowLong from Win32. + handle: int, the handle of a native window. + index: int. + value: int. + Return int, the previous value before set. + """ + return ctypes.windll.user32.SetWindowLongW(ctypes.c_void_p(handle), index, value) + + +def IsIconic(handle: int) -> bool: + """ + IsIconic from Win32. + Determine whether a native window is minimized. + handle: int, the handle of a native window. + Return bool. + """ + return bool(ctypes.windll.user32.IsIconic(ctypes.c_void_p(handle))) + + +def IsZoomed(handle: int) -> bool: + """ + IsZoomed from Win32. + Determine whether a native window is maximized. + handle: int, the handle of a native window. + Return bool. + """ + return bool(ctypes.windll.user32.IsZoomed(ctypes.c_void_p(handle))) + + +def IsWindowVisible(handle: int) -> bool: + """ + IsWindowVisible from Win32. + handle: int, the handle of a native window. + Return bool. + """ + return bool(ctypes.windll.user32.IsWindowVisible(ctypes.c_void_p(handle))) + + +def ShowWindow(handle: int, cmdShow: int) -> bool: + """ + ShowWindow from Win32. + handle: int, the handle of a native window. + cmdShow: int, a value in clas `SW`. + Return bool, True if succeed otherwise False. + """ + return ctypes.windll.user32.ShowWindow(ctypes.c_void_p(handle), ctypes.c_int(cmdShow)) + + +def MoveWindow(handle: int, x: int, y: int, width: int, height: int, repaint: int = 1) -> bool: + """ + MoveWindow from Win32. + handle: int, the handle of a native window. + x: int. + y: int. + width: int. + height: int. + repaint: int, use 1 or 0. + Return bool, True if succeed otherwise False. + """ + return bool(ctypes.windll.user32.MoveWindow(ctypes.c_void_p(handle), ctypes.c_int(x), ctypes.c_int(y), ctypes.c_int(width), ctypes.c_int(height), ctypes.c_int(repaint))) + + +def SetWindowPos(handle: int, hWndInsertAfter: int, x: int, y: int, width: int, height: int, flags: int) -> bool: + """ + SetWindowPos from Win32. + handle: int, the handle of a native window. + hWndInsertAfter: int, a value whose name starts with 'HWND' in class SWP. + x: int. + y: int. + width: int. + height: int. + flags: int, values whose name starts with 'SWP' in class `SWP`. + Return bool, True if succeed otherwise False. + """ + return ctypes.windll.user32.SetWindowPos(ctypes.c_void_p(handle), ctypes.c_void_p(hWndInsertAfter), ctypes.c_int(x), ctypes.c_int(y), ctypes.c_int(width), ctypes.c_int(height), ctypes.c_uint(flags)) + + +def SetWindowTopmost(handle: int, isTopmost: bool) -> bool: + """ + handle: int, the handle of a native window. + isTopmost: bool + Return bool, True if succeed otherwise False. + """ + topValue = SWP.HWND_Topmost if isTopmost else SWP.HWND_NoTopmost + return bool(SetWindowPos(handle, topValue, 0, 0, 0, 0, SWP.SWP_NoSize | SWP.SWP_NoMove)) + + +def GetWindowText(handle: int) -> str: + """ + GetWindowText from Win32. + handle: int, the handle of a native window. + Return str. + """ + arrayType = ctypes.c_wchar * MAX_PATH + values = arrayType() + ctypes.windll.user32.GetWindowTextW(ctypes.c_void_p(handle), values, ctypes.c_int(MAX_PATH)) + return values.value + + +def SetWindowText(handle: int, text: str) -> bool: + """ + SetWindowText from Win32. + handle: int, the handle of a native window. + text: str. + Return bool, True if succeed otherwise False. + """ + return bool(ctypes.windll.user32.SetWindowTextW(ctypes.c_void_p(handle), ctypes.c_wchar_p(text))) + + +def GetEditText(handle: int) -> str: + """ + Get text of a native Win32 Edit. + handle: int, the handle of a native window. + Return str. + """ + textLen = SendMessage(handle, 0x000E, 0, 0) + 1 # WM_GETTEXTLENGTH + arrayType = ctypes.c_wchar * textLen + values = arrayType() + SendMessage(handle, 0x000D, textLen, values) # WM_GETTEXT + return values.value + + +def GetConsoleOriginalTitle() -> str: + """ + GetConsoleOriginalTitle from Win32. + Return str. + Only available on Windows Vista or higher. + """ + if IsNT6orHigher: + arrayType = ctypes.c_wchar * MAX_PATH + values = arrayType() + ctypes.windll.kernel32.GetConsoleOriginalTitleW(values, ctypes.c_uint(MAX_PATH)) + return values.value + else: + raise RuntimeError('GetConsoleOriginalTitle is not supported on Windows XP or lower.') + + +def GetConsoleTitle() -> str: + """ + GetConsoleTitle from Win32. + Return str. + """ + arrayType = ctypes.c_wchar * MAX_PATH + values = arrayType() + ctypes.windll.kernel32.GetConsoleTitleW(values, ctypes.c_uint(MAX_PATH)) + return values.value + + +def SetConsoleTitle(text: str) -> bool: + """ + SetConsoleTitle from Win32. + text: str. + Return bool, True if succeed otherwise False. + """ + return bool(ctypes.windll.kernel32.SetConsoleTitleW(ctypes.c_wchar_p(text))) + + +def GetForegroundWindow() -> int: + """ + GetForegroundWindow from Win32. + Return int, the native handle of the foreground window. + """ + return ctypes.windll.user32.GetForegroundWindow() + + +def IsDesktopLocked() -> bool: + """ + Check if desktop is locked. + Return bool. + Desktop is locked if press Win+L, Ctrl+Alt+Del or in remote desktop mode. + """ + isLocked = False + desk = ctypes.windll.user32.OpenDesktopW(ctypes.c_wchar_p('Default'), ctypes.c_uint(0), ctypes.c_int(0), ctypes.c_uint(0x0100)) # DESKTOP_SWITCHDESKTOP = 0x0100 + if desk: + isLocked = not ctypes.windll.user32.SwitchDesktop(ctypes.c_void_p(desk)) + ctypes.windll.user32.CloseDesktop(ctypes.c_void_p(desk)) + return isLocked + + +def PlayWaveFile(filePath: str = r'C:\Windows\Media\notify.wav', isAsync: bool = False, isLoop: bool = False) -> bool: + """ + Call PlaySound from Win32. + filePath: str, if emtpy, stop playing the current sound. + isAsync: bool, if True, the sound is played asynchronously and returns immediately. + isLoop: bool, if True, the sound plays repeatedly until PlayWaveFile(None) is called again, must also set isAsync to True. + Return bool, True if succeed otherwise False. + """ + if filePath: + SND_ASYNC = 0x0001 + SND_NODEFAULT = 0x0002 + SND_LOOP = 0x0008 + SND_FILENAME = 0x20000 + flags = SND_NODEFAULT | SND_FILENAME + if isAsync: + flags |= SND_ASYNC + if isLoop: + flags |= SND_LOOP + flags |= SND_ASYNC + return bool(ctypes.windll.winmm.PlaySoundW(ctypes.c_wchar_p(filePath), ctypes.c_void_p(0), ctypes.c_uint(flags))) + else: + return bool(ctypes.windll.winmm.PlaySoundW(ctypes.c_wchar_p(0), ctypes.c_void_p(0), ctypes.c_uint(0))) + + +def IsProcess64Bit(processId: int) -> bool: + """ + Return True if process is 64 bit. + Return False if process is 32 bit. + Return None if unknown, maybe caused by having no acess right to the process. + """ + try: + func = ctypes.windll.ntdll.ZwWow64ReadVirtualMemory64 # only 64 bit OS has this function + except Exception as ex: + return False + try: + IsWow64Process = ctypes.windll.kernel32.IsWow64Process + except Exception as ex: + return False + hProcess = ctypes.windll.kernel32.OpenProcess(0x1000, 0, processId) # PROCESS_QUERY_INFORMATION=0x0400,PROCESS_QUERY_LIMITED_INFORMATION=0x1000 + if hProcess: + is64Bit = ctypes.c_int32() + if IsWow64Process(ctypes.c_void_p(hProcess), ctypes.byref(is64Bit)): + ctypes.windll.kernel32.CloseHandle(ctypes.c_void_p(hProcess)) + return False if is64Bit.value else True + else: + ctypes.windll.kernel32.CloseHandle(ctypes.c_void_p(hProcess)) + + +def IsUserAnAdmin() -> bool: + """ + IsUserAnAdmin from Win32. + Return bool. + Minimum supported OS: Windows XP, Windows Server 2003 + """ + return bool(ctypes.windll.shell32.IsUserAnAdmin()) + + +def RunScriptAsAdmin(argv: List[str], workingDirectory: str = None, showFlag: int = SW.ShowNormal) -> bool: + """ + Run a python script as administrator. + System will show a popup dialog askes you whether to elevate as administrator if UAC is enabled. + argv: List[str], a str list like sys.argv, argv[0] is the script file, argv[1:] are other arguments. + workingDirectory: str, the working directory for the script file. + showFlag: int, a value in class `SW`. + Return bool, True if succeed. + """ + args = ' '.join('"{}"'.format(arg) for arg in argv) + return ctypes.windll.shell32.ShellExecuteW(None, "runas", sys.executable, args, workingDirectory, showFlag) > 32 + + +def SendKey(key: int, waitTime: float = OPERATION_WAIT_TIME) -> None: + """ + Simulate typing a key. + key: int, a value in class `Keys`. + """ + keybd_event(key, 0, KeyboardEventFlag.KeyDown | KeyboardEventFlag.ExtendedKey, 0) + keybd_event(key, 0, KeyboardEventFlag.KeyUp | KeyboardEventFlag.ExtendedKey, 0) + time.sleep(waitTime) + + +def PressKey(key: int, waitTime: float = OPERATION_WAIT_TIME) -> None: + """ + Simulate a key down for key. + key: int, a value in class `Keys`. + waitTime: float. + """ + keybd_event(key, 0, KeyboardEventFlag.KeyDown | KeyboardEventFlag.ExtendedKey, 0) + time.sleep(waitTime) + + +def ReleaseKey(key: int, waitTime: float = OPERATION_WAIT_TIME) -> None: + """ + Simulate a key up for key. + key: int, a value in class `Keys`. + waitTime: float. + """ + keybd_event(key, 0, KeyboardEventFlag.KeyUp | KeyboardEventFlag.ExtendedKey, 0) + time.sleep(waitTime) + + +def IsKeyPressed(key: int) -> bool: + """ + key: int, a value in class `Keys`. + Return bool. + """ + state = ctypes.windll.user32.GetAsyncKeyState(key) + return bool(state & 0x8000) + + +def _CreateInput(structure) -> INPUT: + """ + Create Win32 struct `INPUT` for `SendInput`. + Return `INPUT`. + """ + if isinstance(structure, MOUSEINPUT): + return INPUT(InputType.Mouse, _INPUTUnion(mi=structure)) + if isinstance(structure, KEYBDINPUT): + return INPUT(InputType.Keyboard, _INPUTUnion(ki=structure)) + if isinstance(structure, HARDWAREINPUT): + return INPUT(InputType.Hardware, _INPUTUnion(hi=structure)) + raise TypeError('Cannot create INPUT structure!') + + +def MouseInput(dx: int, dy: int, mouseData: int = 0, dwFlags: int = MouseEventFlag.LeftDown, time_: int = 0) -> INPUT: + """ + Create Win32 struct `MOUSEINPUT` for `SendInput`. + Return `INPUT`. + """ + return _CreateInput(MOUSEINPUT(dx, dy, mouseData, dwFlags, time_, None)) + + +def KeyboardInput(wVk: int, wScan: int, dwFlags: int = KeyboardEventFlag.KeyDown, time_: int = 0) -> INPUT: + """Create Win32 struct `KEYBDINPUT` for `SendInput`.""" + return _CreateInput(KEYBDINPUT(wVk, wScan, dwFlags, time_, None)) + + +def HardwareInput(uMsg: int, param: int = 0) -> INPUT: + """Create Win32 struct `HARDWAREINPUT` for `SendInput`.""" + return _CreateInput(HARDWAREINPUT(uMsg, param & 0xFFFF, param >> 16 & 0xFFFF)) + + +def SendInput(*inputs) -> int: + """ + SendInput from Win32. + input: `INPUT`. + Return int, the number of events that it successfully inserted into the keyboard or mouse input stream. + If the function returns zero, the input was already blocked by another thread. + """ + cbSize = ctypes.c_int(ctypes.sizeof(INPUT)) + for ip in inputs: + ret = ctypes.windll.user32.SendInput(1, ctypes.byref(ip), cbSize) + return ret + # or one call + #nInputs = len(inputs) + #LPINPUT = INPUT * nInputs + #pInputs = LPINPUT(*inputs) + #cbSize = ctypes.c_int(ctypes.sizeof(INPUT)) + # return ctypes.windll.user32.SendInput(nInputs, ctypes.byref(pInputs), cbSize) + + +def SendUnicodeChar(char: str, charMode: bool = True) -> int: + """ + Type a single unicode char. + char: str, len(char) must equal to 1. + charMode: bool, if False, the char typied is depend on the input method if a input method is on. + Return int, the number of events that it successfully inserted into the keyboard or mouse input stream. + If the function returns zero, the input was already blocked by another thread. + """ + if charMode: + vk = 0 + scan = ord(char) + flag = KeyboardEventFlag.KeyUnicode + else: + res = ctypes.windll.user32.VkKeyScanW(ctypes.wintypes.WCHAR(char)) + if (res >> 8) & 0xFF == 0: + vk = res & 0xFF + scan = 0 + flag = 0 + else: + vk = 0 + scan = ord(char) + flag = KeyboardEventFlag.KeyUnicode + return SendInput(KeyboardInput(vk, scan, flag | KeyboardEventFlag.KeyDown), + KeyboardInput(vk, scan, flag | KeyboardEventFlag.KeyUp)) + + +_SCKeys = { + Keys.VK_LSHIFT: 0x02A, + Keys.VK_RSHIFT: 0x136, + Keys.VK_LCONTROL: 0x01D, + Keys.VK_RCONTROL: 0x11D, + Keys.VK_LMENU: 0x038, + Keys.VK_RMENU: 0x138, + Keys.VK_LWIN: 0x15B, + Keys.VK_RWIN: 0x15C, + Keys.VK_NUMPAD0: 0x52, + Keys.VK_NUMPAD1: 0x4F, + Keys.VK_NUMPAD2: 0x50, + Keys.VK_NUMPAD3: 0x51, + Keys.VK_NUMPAD4: 0x4B, + Keys.VK_NUMPAD5: 0x4C, + Keys.VK_NUMPAD6: 0x4D, + Keys.VK_NUMPAD7: 0x47, + Keys.VK_NUMPAD8: 0x48, + Keys.VK_NUMPAD9: 0x49, + Keys.VK_DECIMAL: 0x53, + Keys.VK_NUMLOCK: 0x145, + Keys.VK_DIVIDE: 0x135, + Keys.VK_MULTIPLY: 0x037, + Keys.VK_SUBTRACT: 0x04A, + Keys.VK_ADD: 0x04E, +} + + +def _VKtoSC(key: int) -> int: + """ + This function is only for internal use in SendKeys. + key: int, a value in class `Keys`. + Return int. + """ + if key in _SCKeys: + return _SCKeys[key] + scanCode = ctypes.windll.user32.MapVirtualKeyA(key, 0) + if not scanCode: + return 0 + keyList = [Keys.VK_APPS, Keys.VK_CANCEL, Keys.VK_SNAPSHOT, Keys.VK_DIVIDE, Keys.VK_NUMLOCK] + if key in keyList: + scanCode |= 0x0100 + return scanCode + + +def SendKeys(text: str, interval: float = 0.01, waitTime: float = OPERATION_WAIT_TIME, charMode: bool = True, debug: bool = False) -> None: + """ + Simulate typing keys on keyboard. + text: str, keys to type. + interval: float, seconds between keys. + waitTime: float. + charMode: bool, if False, the text typed is depend on the input method if a input method is on. + debug: bool, if True, print the keys. + Examples: + {Ctrl}, {Delete} ... are special keys' name in SpecialKeyNames. + SendKeys('{Ctrl}a{Delete}{Ctrl}v{Ctrl}s{Ctrl}{Shift}s{Win}e{PageDown}') #press Ctrl+a, Delete, Ctrl+v, Ctrl+s, Ctrl+Shift+s, Win+e, PageDown + SendKeys('{Ctrl}(AB)({Shift}(123))') #press Ctrl+A+B, type '(', press Shift+1+2+3, type ')', if '()' follows a hold key, hold key won't release util ')' + SendKeys('{Ctrl}{a 3}') #press Ctrl+a at the same time, release Ctrl+a, then type 'a' 2 times + SendKeys('{a 3}{B 5}') #type 'a' 3 times, type 'B' 5 times + SendKeys('{{}Hello{}}abc {a}{b}{c} test{} 3}{!}{a} (){(}{)}') #type: '{Hello}abc abc test}}}!a ()()' + SendKeys('0123456789{Enter}') + SendKeys('ABCDEFGHIJKLMNOPQRSTUVWXYZ{Enter}') + SendKeys('abcdefghijklmnopqrstuvwxyz{Enter}') + SendKeys('`~!@#$%^&*()-_=+{Enter}') + SendKeys('[]{{}{}}\\|;:\'\",<.>/?{Enter}') + """ + holdKeys = ('WIN', 'LWIN', 'RWIN', 'SHIFT', 'LSHIFT', 'RSHIFT', 'CTRL', 'CONTROL', 'LCTRL', 'RCTRL', 'LCONTROL', 'LCONTROL', 'ALT', 'LALT', 'RALT') + keys = [] + printKeys = [] + i = 0 + insertIndex = 0 + length = len(text) + hold = False + include = False + lastKeyValue = None + while True: + if text[i] == '{': + rindex = text.find('}', i) + if rindex == i + 1: # {}} + rindex = text.find('}', i + 2) + if rindex == -1: + raise ValueError('"{" or "{}" is not valid, use "{{}" for "{", use "{}}" for "}"') + key = text[i + 1:rindex] + key = [it for it in key.split(' ') if it] + if not key: + raise ValueError('"{}" is not valid, use "{{Space}}" or " " for " "'.format(text[i:rindex + 1])) + if (len(key) == 2 and not key[1].isdigit()) or len(key) > 2: + raise ValueError('"{}" is not valid'.format(text[i:rindex + 1])) + upperKey = key[0].upper() + count = 1 + if len(key) > 1: + count = int(key[1]) + for j in range(count): + if hold: + if upperKey in SpecialKeyNames: + keyValue = SpecialKeyNames[upperKey] + if type(lastKeyValue) == type(keyValue) and lastKeyValue == keyValue: + insertIndex += 1 + printKeys.insert(insertIndex, (key[0], 'KeyDown | ExtendedKey')) + printKeys.insert(insertIndex + 1, (key[0], 'KeyUp | ExtendedKey')) + keys.insert(insertIndex, (keyValue, KeyboardEventFlag.KeyDown | KeyboardEventFlag.ExtendedKey)) + keys.insert(insertIndex + 1, (keyValue, KeyboardEventFlag.KeyUp | KeyboardEventFlag.ExtendedKey)) + lastKeyValue = keyValue + elif key[0] in CharacterCodes: + keyValue = CharacterCodes[key[0]] + if type(lastKeyValue) == type(keyValue) and lastKeyValue == keyValue: + insertIndex += 1 + printKeys.insert(insertIndex, (key[0], 'KeyDown | ExtendedKey')) + printKeys.insert(insertIndex + 1, (key[0], 'KeyUp | ExtendedKey')) + keys.insert(insertIndex, (keyValue, KeyboardEventFlag.KeyDown | KeyboardEventFlag.ExtendedKey)) + keys.insert(insertIndex + 1, (keyValue, KeyboardEventFlag.KeyUp | KeyboardEventFlag.ExtendedKey)) + lastKeyValue = keyValue + else: + printKeys.insert(insertIndex, (key[0], 'UnicodeChar')) + keys.insert(insertIndex, (key[0], 'UnicodeChar')) + lastKeyValue = key[0] + if include: + insertIndex += 1 + else: + if upperKey in holdKeys: + insertIndex += 1 + else: + hold = False + else: + if upperKey in SpecialKeyNames: + keyValue = SpecialKeyNames[upperKey] + printKeys.append((key[0], 'KeyDown | ExtendedKey')) + printKeys.append((key[0], 'KeyUp | ExtendedKey')) + keys.append((keyValue, KeyboardEventFlag.KeyDown | KeyboardEventFlag.ExtendedKey)) + keys.append((keyValue, KeyboardEventFlag.KeyUp | KeyboardEventFlag.ExtendedKey)) + lastKeyValue = keyValue + if upperKey in holdKeys: + hold = True + insertIndex = len(keys) - 1 + else: + hold = False + else: + printKeys.append((key[0], 'UnicodeChar')) + keys.append((key[0], 'UnicodeChar')) + lastKeyValue = key[0] + i = rindex + 1 + elif text[i] == '(': + if hold: + include = True + else: + printKeys.append((text[i], 'UnicodeChar')) + keys.append((text[i], 'UnicodeChar')) + lastKeyValue = text[i] + i += 1 + elif text[i] == ')': + if hold: + include = False + hold = False + else: + printKeys.append((text[i], 'UnicodeChar')) + keys.append((text[i], 'UnicodeChar')) + lastKeyValue = text[i] + i += 1 + else: + if hold: + if text[i] in CharacterCodes: + keyValue = CharacterCodes[text[i]] + if include and type(lastKeyValue) == type(keyValue) and lastKeyValue == keyValue: + insertIndex += 1 + printKeys.insert(insertIndex, (text[i], 'KeyDown | ExtendedKey')) + printKeys.insert(insertIndex + 1, (text[i], 'KeyUp | ExtendedKey')) + keys.insert(insertIndex, (keyValue, KeyboardEventFlag.KeyDown | KeyboardEventFlag.ExtendedKey)) + keys.insert(insertIndex + 1, (keyValue, KeyboardEventFlag.KeyUp | KeyboardEventFlag.ExtendedKey)) + lastKeyValue = keyValue + else: + printKeys.append((text[i], 'UnicodeChar')) + keys.append((text[i], 'UnicodeChar')) + lastKeyValue = text[i] + if include: + insertIndex += 1 + else: + hold = False + else: + printKeys.append((text[i], 'UnicodeChar')) + keys.append((text[i], 'UnicodeChar')) + lastKeyValue = text[i] + i += 1 + if i >= length: + break + hotkeyInterval = 0.01 + for i, key in enumerate(keys): + if key[1] == 'UnicodeChar': + SendUnicodeChar(key[0], charMode) + time.sleep(interval) + if debug: + Logger.ColorfullyWrite('{}, sleep({})\n'.format(printKeys[i], interval), writeToFile=False) + else: + scanCode = _VKtoSC(key[0]) + keybd_event(key[0], scanCode, key[1], 0) + if debug: + Logger.Write(printKeys[i], ConsoleColor.DarkGreen, writeToFile=False) + if i + 1 == len(keys): + time.sleep(interval) + if debug: + Logger.Write(', sleep({})\n'.format(interval), writeToFile=False) + else: + if key[1] & KeyboardEventFlag.KeyUp: + if keys[i + 1][1] == 'UnicodeChar' or keys[i + 1][1] & KeyboardEventFlag.KeyUp == 0: + time.sleep(interval) + if debug: + Logger.Write(', sleep({})\n'.format(interval), writeToFile=False) + else: + time.sleep(hotkeyInterval) # must sleep for a while, otherwise combined keys may not be caught + if debug: + Logger.Write(', sleep({})\n'.format(hotkeyInterval), writeToFile=False) + else: # KeyboardEventFlag.KeyDown + time.sleep(hotkeyInterval) + if debug: + Logger.Write(', sleep({})\n'.format(hotkeyInterval), writeToFile=False) + # make sure hold keys are not pressed + #win = ctypes.windll.user32.GetAsyncKeyState(Keys.VK_LWIN) + #ctrl = ctypes.windll.user32.GetAsyncKeyState(Keys.VK_CONTROL) + #alt = ctypes.windll.user32.GetAsyncKeyState(Keys.VK_MENU) + #shift = ctypes.windll.user32.GetAsyncKeyState(Keys.VK_SHIFT) + # if win & 0x8000: + #Logger.WriteLine('ERROR: WIN is pressed, it should not be pressed!', ConsoleColor.Red) + #keybd_event(Keys.VK_LWIN, 0, KeyboardEventFlag.KeyUp | KeyboardEventFlag.ExtendedKey, 0) + # if ctrl & 0x8000: + #Logger.WriteLine('ERROR: CTRL is pressed, it should not be pressed!', ConsoleColor.Red) + #keybd_event(Keys.VK_CONTROL, 0, KeyboardEventFlag.KeyUp | KeyboardEventFlag.ExtendedKey, 0) + # if alt & 0x8000: + #Logger.WriteLine('ERROR: ALT is pressed, it should not be pressed!', ConsoleColor.Red) + #keybd_event(Keys.VK_MENU, 0, KeyboardEventFlag.KeyUp | KeyboardEventFlag.ExtendedKey, 0) + # if shift & 0x8000: + #Logger.WriteLine('ERROR: SHIFT is pressed, it should not be pressed!', ConsoleColor.Red) + #keybd_event(Keys.VK_SHIFT, 0, KeyboardEventFlag.KeyUp | KeyboardEventFlag.ExtendedKey, 0) + time.sleep(waitTime) + + +def SetThreadDpiAwarenessContext(dpiAwarenessContext: int): + """ + SetThreadDpiAwarenessContext from Win32. + dpiAwarenessContext: int, a value in class `DpiAwarenessContext` + """ + try: + # https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-setthreaddpiawarenesscontext + # Windows 10 1607+ + ctypes.windll.user32.SetThreadDpiAwarenessContext.restype = ctypes.c_void_p + oldContext = ctypes.windll.user32.SetThreadDpiAwarenessContext(ctypes.c_void_p(dpiAwarenessContext)) + return oldContext + except Exception as ex: + pass + + +def SetProcessDpiAwareness(dpiAwareness: int): + """ + ProcessDpiAwareness from Win32. + dpiAwareness: int, a value in class `ProcessDpiAwareness` + """ + try: + # https://docs.microsoft.com/en-us/windows/win32/api/shellscalingapi/nf-shellscalingapi-setprocessdpiawareness + # Once SetProcessDpiAwareness is set for an app, any future calls to SetProcessDpiAwareness will fail. + # Windows 8.1+ + return ctypes.windll.shcore.SetProcessDpiAwareness(dpiAwareness) + except Exception as ex: + pass + + +SetProcessDpiAwareness(ProcessDpiAwareness.PerMonitorDpiAware) + + +class Logger: + """ + Logger for print and log. Support for printing log with different colors on console. + """ + FileName = '@AutomationLog.txt' + _SelfFileName = os.path.split(__file__)[1] + ColorNames = { + "Black": ConsoleColor.Black, + "DarkBlue": ConsoleColor.DarkBlue, + "DarkGreen": ConsoleColor.DarkGreen, + "DarkCyan": ConsoleColor.DarkCyan, + "DarkRed": ConsoleColor.DarkRed, + "DarkMagenta": ConsoleColor.DarkMagenta, + "DarkYellow": ConsoleColor.DarkYellow, + "Gray": ConsoleColor.Gray, + "DarkGray": ConsoleColor.DarkGray, + "Blue": ConsoleColor.Blue, + "Green": ConsoleColor.Green, + "Cyan": ConsoleColor.Cyan, + "Red": ConsoleColor.Red, + "Magenta": ConsoleColor.Magenta, + "Yellow": ConsoleColor.Yellow, + "White": ConsoleColor.White, + } + + @staticmethod + def SetLogFile(path: str) -> None: + Logger.FileName = path + + @staticmethod + def Write(log: Any, consoleColor: int = ConsoleColor.Default, writeToFile: bool = True, printToStdout: bool = True, logFile: str = None, printTruncateLen: int = 0) -> None: + """ + log: any type. + consoleColor: int, a value in class `ConsoleColor`, such as `ConsoleColor.DarkGreen`. + writeToFile: bool. + printToStdout: bool. + logFile: str, log file path. + printTruncateLen: int, if <= 0, log is not truncated when print. + """ + if not isinstance(log, str): + log = str(log) + if printToStdout and sys.stdout: + isValidColor = (consoleColor >= ConsoleColor.Black and consoleColor <= ConsoleColor.White) + if isValidColor: + SetConsoleColor(consoleColor) + try: + if printTruncateLen > 0 and len(log) > printTruncateLen: + sys.stdout.write(log[:printTruncateLen] + '...') + else: + sys.stdout.write(log) + except Exception as ex: + SetConsoleColor(ConsoleColor.Red) + isValidColor = True + sys.stdout.write(ex.__class__.__name__ + ': can\'t print the log!') + if log.endswith('\n'): + sys.stdout.write('\n') + if isValidColor: + ResetConsoleColor() + sys.stdout.flush() + if not writeToFile: + return + fileName = logFile if logFile else Logger.FileName + fout = None + try: + fout = open(fileName, 'a+', encoding='utf-8') + fout.write(log) + except Exception as ex: + if sys.stdout: + sys.stdout.write(ex.__class__.__name__ + ': can\'t write the log!') + finally: + if fout: + fout.close() + + @staticmethod + def WriteLine(log: Any, consoleColor: int = -1, writeToFile: bool = True, printToStdout: bool = True, logFile: str = None) -> None: + """ + log: any type. + consoleColor: int, a value in class `ConsoleColor`, such as `ConsoleColor.DarkGreen`. + writeToFile: bool. + printToStdout: bool. + logFile: str, log file path. + """ + Logger.Write('{}\n'.format(log), consoleColor, writeToFile, printToStdout, logFile) + + @staticmethod + def ColorfullyWrite(log: str, consoleColor: int = -1, writeToFile: bool = True, printToStdout: bool = True, logFile: str = None) -> None: + """ + log: str. + consoleColor: int, a value in class `ConsoleColor`, such as `ConsoleColor.DarkGreen`. + writeToFile: bool. + printToStdout: bool. + logFile: str, log file path. + ColorfullyWrite('Hello Green Text !!!'), Color name must be in `Logger.ColorNames` and can't be nested. + """ + text = [] + start = 0 + while True: + index1 = log.find(' start: + text.append((log[start:index1], consoleColor)) + start = index1 + index2 = log.find('>', index1 + 7) + if index2 < 0: + text.append((log[start:], consoleColor)) + break + colorName = log[index1 + 7:index2] + if colorName not in Logger.ColorNames: + text.append((log[start:index1 + 7], consoleColor)) + start = index1 + 7 + continue + index3 = log.find('', index2 + 1) + if index3 < 0: + text.append((log[start:], consoleColor)) + break + text.append((log[index2 + 1:index3], Logger.ColorNames[colorName])) + start = index3 + 8 + for t, c in text: + Logger.Write(t, c, writeToFile, printToStdout, logFile) + + @staticmethod + def ColorfullyWriteLine(log: str, consoleColor: int = -1, writeToFile: bool = True, printToStdout: bool = True, logFile: str = None) -> None: + """ + log: str. + consoleColor: int, a value in class `ConsoleColor`, such as `ConsoleColor.DarkGreen`. + writeToFile: bool. + printToStdout: bool. + logFile: str, log file path. + + ColorfullyWriteLine('Hello Green Text !!!'), Color name must be in `Logger.ColorNames` and can't be nested. + """ + Logger.ColorfullyWrite(log + '\n', consoleColor, writeToFile, printToStdout, logFile) + + @staticmethod + def Log(log: Any = '', consoleColor: int = -1, writeToFile: bool = True, printToStdout: bool = True, logFile: str = None) -> None: + """ + log: any type. + consoleColor: int, a value in class `ConsoleColor`, such as `ConsoleColor.DarkGreen`. + writeToFile: bool. + printToStdout: bool. + logFile: str, log file path. + """ + frameCount = 1 + while True: + frame = sys._getframe(frameCount) + _, scriptFileName = os.path.split(frame.f_code.co_filename) + if scriptFileName != Logger._SelfFileName: + break + frameCount += 1 + + t = datetime.datetime.now() + log = '{}-{:02}-{:02} {:02}:{:02}:{:02}.{:03} {}[{}] {} -> {}\n'.format(t.year, t.month, t.day, + t.hour, t.minute, t.second, t.microsecond // 1000, scriptFileName, frame.f_lineno, frame.f_code.co_name, log) + Logger.Write(log, consoleColor, writeToFile, printToStdout, logFile) + + @staticmethod + def ColorfullyLog(log: str = '', consoleColor: int = -1, writeToFile: bool = True, printToStdout: bool = True, logFile: str = None) -> None: + """ + log: any type. + consoleColor: int, a value in class `ConsoleColor`, such as `ConsoleColor.DarkGreen`. + writeToFile: bool. + printToStdout: bool. + logFile: str, log file path. + + ColorfullyLog('Hello Green Text !!!'), Color name must be in `Logger.ColorNames` and can't be nested. + """ + frameCount = 1 + while True: + frame = sys._getframe(frameCount) + _, scriptFileName = os.path.split(frame.f_code.co_filename) + if scriptFileName != Logger._SelfFileName: + break + frameCount += 1 + + t = datetime.datetime.now() + log = '{}-{:02}-{:02} {:02}:{:02}:{:02}.{:03} {}[{}] {} -> {}\n'.format(t.year, t.month, t.day, + t.hour, t.minute, t.second, t.microsecond // 1000, scriptFileName, frame.f_lineno, frame.f_code.co_name, log) + Logger.ColorfullyWrite(log, consoleColor, writeToFile, printToStdout, logFile) + + @staticmethod + def DeleteLog() -> None: + """Delete log file.""" + if os.path.exists(Logger.FileName): + os.remove(Logger.FileName) + + LogColorfully = ColorfullyLog + WriteColorfully = ColorfullyWrite + WriteLineColorfully = ColorfullyWriteLine + + +class RotateFlipType: + RotateNoneFlipNone = 0 + Rotate90FlipNone = 1 + Rotate180FlipNone = 2 + Rotate270FlipNone = 3 + RotateNoneFlipX = 4 + Rotate90FlipX = 5 + Rotate180FlipX = 6 + Rotate270FlipX = 7 + RotateNoneFlipY = Rotate180FlipX + Rotate90FlipY = Rotate270FlipX + Rotate180FlipY = RotateNoneFlipX + Rotate270FlipY = Rotate90FlipX + RotateNoneFlipXY = Rotate180FlipNone + Rotate90FlipXY = Rotate270FlipNone + Rotate180FlipXY = RotateNoneFlipNone + Rotate270FlipXY = Rotate90FlipNone + + +class Bitmap: + """ + A simple Bitmap class wraps Windows GDI+ Gdiplus::Bitmap, but may not have high efficiency. + The color format is ARGB. + """ + + def __init__(self, width: int = 0, height: int = 0): + """ + Create a black transparent(ARGB=0x00000000) bimap of size(width, height). + """ + self._width = width + self._height = height + self._bitmap = 0 + if width > 0 and height > 0: + self._bitmap = _DllClient.instance().dll.BitmapCreate(width, height) + + def __del__(self): + self.Close() + + def __enter__(self): + return self + + def __exit__(self, exceptionType, exceptionValue, exceptionTraceback): + self.Close() + + def __bool__(self): + return self._bitmap > 0 + + def _getsize(self) -> None: + size = _DllClient.instance().dll.BitmapGetWidthAndHeight(ctypes.c_size_t(self._bitmap)) + self._width = size & 0xFFFF + self._height = size >> 16 + + def Close(self) -> None: + """Close the underlying Gdiplus::Bitmap object.""" + if self._bitmap: + _DllClient.instance().dll.BitmapRelease(ctypes.c_size_t(self._bitmap)) + self._bitmap = 0 + self._width = 0 + self._height = 0 + + Release = Close + + @property + def Width(self) -> int: + """ + Property Width. + Return int. + """ + return self._width + + @property + def Height(self) -> int: + """ + Property Height. + Return int. + """ + return self._height + + @staticmethod + def FromHandle(hwnd: int, left: int = 0, top: int = 0, right: int = 0, bottom: int = 0) -> 'Bitmap': + """ + Create a `Bitmap` from a native window handle. + hwnd: int, the handle of a native window. + left: int. + top: int. + right: int. + bottom: int. + left, top, right and bottom are control's internal postion(from 0,0). + Return `Bitmap` or None. + """ + rect = ctypes.wintypes.RECT() + if ctypes.windll.user32.GetWindowRect(hwnd, ctypes.byref(rect)): + root = GetRootControl() + left, top, right, bottom = left + rect.left, top + rect.top, right + rect.left, bottom + rect.top + bitmap = Bitmap() + bitmap._bitmap = _DllClient.instance().dll.BitmapFromWindow(ctypes.c_size_t(root.NativeWindowHandle), left, top, right, bottom) + if bitmap._bitmap: + bitmap._getsize() + return bitmap + + @staticmethod + def FromControl(control: 'Control', x: int = 0, y: int = 0, width: int = 0, height: int = 0) -> 'Bitmap': + """ + Create a `Bitmap` from a `Control`. + control: `Control` or its subclass. + x: int. + y: int. + width: int. + height: int. + x, y: the point in control's internal position(from 0,0) + width, height: image's width and height from x, y, use 0 for entire area, + If width(or height) < 0, image size will be control's width(or height) - width(or height). + Return `Bitmap` or None. + """ + rect = control.BoundingRectangle + while rect.width() == 0 or rect.height() == 0: + # some controls maybe visible but their BoundingRectangle are all 0, capture its parent util valid + control = control.GetParentControl() + if not control: + return + rect = control.BoundingRectangle + if width <= 0: + width = rect.width() + width + if height <= 0: + height = rect.height() + height + handle = control.NativeWindowHandle + if handle: + left = x + top = y + right = left + width + bottom = top + height + else: + while True: + control = control.GetParentControl() + handle = control.NativeWindowHandle + if handle: + pRect = control.BoundingRectangle + left = rect.left - pRect.left + x + top = rect.top - pRect.top + y + right = left + width + bottom = top + height + break + return Bitmap.FromHandle(handle, left, top, right, bottom) + + @staticmethod + def FromFile(filePath: str) -> 'Bitmap': + """ + Create a `Bitmap` from a file path. + filePath: str. + Return `Bitmap` or None. + """ + bitmap = Bitmap() + bitmap._bitmap = _DllClient.instance().dll.BitmapFromFile(ctypes.c_wchar_p(filePath)) + if bitmap._bitmap: + bitmap._getsize() + return bitmap + + def ToFile(self, savePath: str) -> bool: + """ + Save to a file. + savePath: str, should end with .bmp, .jpg, .jpeg, .png, .gif, .tif, .tiff. + Return bool, True if succeed otherwise False. + """ + name, ext = os.path.splitext(savePath) + extMap = {'.bmp': 'image/bmp', + '.jpg': 'image/jpeg', + '.jpeg': 'image/jpeg', + '.gif': 'image/gif', + '.tif': 'image/tiff', + '.tiff': 'image/tiff', + '.png': 'image/png', + } + gdiplusImageFormat = extMap.get(ext.lower(), 'image/png') + return bool(_DllClient.instance().dll.BitmapToFile(ctypes.c_size_t(self._bitmap), ctypes.c_wchar_p(savePath), ctypes.c_wchar_p(gdiplusImageFormat))) + + def GetPixelColor(self, x: int, y: int) -> int: + """ + Get color value of a pixel. + x: int. + y: int. + Return int, ARGB color format. + b = argb & 0x0000FF + g = (argb & 0x00FF00) >> 8 + r = (argb & 0xFF0000) >> 16 + a = (argb & 0xFF0000) >> 24 + """ + return _DllClient.instance().dll.BitmapGetPixel(ctypes.c_size_t(self._bitmap), x, y) + + def SetPixelColor(self, x: int, y: int, argb: int) -> bool: + """ + Set color value of a pixel. + x: int. + y: int. + argb: int, ARGB color format. + Return bool, True if succeed otherwise False. + """ + return _DllClient.instance().dll.BitmapSetPixel(ctypes.c_size_t(self._bitmap), x, y, argb) + + def GetPixelColorsHorizontally(self, x: int, y: int, count: int) -> ctypes.Array: + """ + x: int. + y: int. + count: int. + Return `ctypes.Array`, an iterable array of int values in ARGB color format form point x,y horizontally. + """ + #assert count <= self.Width * (self.Height - y) - x, 'count > max available from x,y' + arrayType = ctypes.c_uint32 * count + values = arrayType() + _DllClient.instance().dll.BitmapGetPixelsHorizontally(ctypes.c_size_t(self._bitmap), x, y, values, count) + return values + + def SetPixelColorsHorizontally(self, x: int, y: int, colors: Iterable[int]) -> bool: + """ + Set pixel colors form x,y horizontally. + x: int. + y: int. + colors: Iterable[int], an iterable list of int color values in ARGB color format, + use ctypes.Array for better performance, such as `ctypes.c_uint32 * length`. + Return bool, True if succeed otherwise False. + """ + count = len(colors) + #assert count <= self.Width * (self.Height - y) - x, 'len(colors) > max available from x,y' + if not isinstance(colors, ctypes.Array): + arrayType = ctypes.c_uint32 * count + colors = arrayType(*colors) + return _DllClient.instance().dll.BitmapSetPixelsHorizontally(ctypes.c_size_t(self._bitmap), x, y, colors, count) + + def GetPixelColorsVertically(self, x: int, y: int, count: int) -> ctypes.Array: + """ + x: int. + y: int. + count: int. + Return `ctypes.Array`, an iterable array of int values in ARGB color format form point x,y vertically. + """ + #assert count <= self.Height * (self.Width - x) - y, 'count > max available from x,y' + arrayType = ctypes.c_uint32 * count + values = arrayType() + _DllClient.instance().dll.BitmapGetPixelsVertically(ctypes.c_size_t(self._bitmap), x, y, values, count) + return values + + def SetPixelColorsVertically(self, x: int, y: int, colors: Iterable[int]) -> bool: + """ + Set pixel colors form x,y vertically. + x: int. + y: int. + colors: Iterable[int], an iterable list of int color values in ARGB color format, + use ctypes.Array for better performance, such as `ctypes.c_uint32 * length`. + Return bool, True if succeed otherwise False. + """ + count = len(colors) + #assert count <= self.Height * (self.Width - x) - y, 'len(colors) > max available from x,y' + if not isinstance(colors, ctypes.Array): + arrayType = ctypes.c_uint32 * count + colors = arrayType(*colors) + return _DllClient.instance().dll.BitmapSetPixelsVertically(ctypes.c_size_t(self._bitmap), x, y, colors, count) + + def GetPixelColorsOfRow(self, y: int) -> ctypes.Array: + """ + y: int, row index. + Return `ctypes.Array`, an iterable array of int values in ARGB color format of y row. + """ + return self.GetPixelColorsOfRect(0, y, self.Width, 1) + + def GetPixelColorsOfColumn(self, x: int) -> ctypes.Array: + """ + x: int, column index. + Return `ctypes.Array`, an iterable array of int values in ARGB color format of x column. + """ + return self.GetPixelColorsOfRect(x, 0, 1, self.Height) + + def GetPixelColorsOfRect(self, x: int, y: int, width: int, height: int) -> ctypes.Array: + """ + x: int. + y: int. + width: int. + height: int. + Return `ctypes.Array`, an iterable array of int values in ARGB color format of the input rect. + """ + arrayType = ctypes.c_uint32 * (width * height) + values = arrayType() + _DllClient.instance().dll.BitmapGetPixelsOfRect(ctypes.c_size_t(self._bitmap), x, y, width, height, values) + return values + + def SetPixelColorsOfRect(self, x: int, y: int, width: int, height: int, colors: Iterable[int]) -> bool: + """ + x: int. + y: int. + width: int. + height: int. + colors: Iterable[int], an iterable list of int values in ARGB color format, it's length must equal to width*height, + use ctypes.Array for better performance, such as `ctypes.c_uint32 * (width*height)`. + Return bool. + """ + #assert len(colors) == width * height, 'len(colors) != width * height' + if not isinstance(colors, ctypes.Array): + arrayType = ctypes.c_uint32 * (width * height) + colors = arrayType(*colors) + return bool(_DllClient.instance().dll.BitmapSetPixelsOfRect(ctypes.c_size_t(self._bitmap), x, y, width, height, colors)) + + def GetPixelColorsOfRects(self, rects: List[Tuple[int, int, int, int]]) -> List[ctypes.Array]: + """ + rects: List[Tuple[int, int, int, int]], such as [(0,0,10,10), (10,10,20,20), (x,y,width,height)]. + Return List[ctypes.Array], a list whose elements are ctypes.Array which is an iterable array of int values in ARGB color format. + """ + return [self.GetPixelColorsOfRect(x, y, width, height) for x, y, width, height in rects] + + def GetAllPixelColors(self) -> ctypes.Array: + """ + Return `ctypes.Array`, an iterable array of int values in ARGB color format. + """ + return self.GetPixelColorsOfRect(0, 0, self.Width, self.Height) + + def SetAllPixelColors(self, colors: Iterable[int]) -> bool: + """ + colors: Iterable[int], an iterable list of int values in ARGB color format, it's length must equal to width*height, + use ctypes.Array for better performance, such as `ctypes.c_uint32 * (width*height)`. + Return bool. + """ + return self.SetPixelColorsOfRect(0, 0, self.Width, self.Height, colors) + + def Clear(self, color: int = 0xFFFFFFFF, x: int = 0, y: int = 0, width: int = 0, height: int = 0) -> bool: + """ + Set the color of rect(x,y,width,height). + color: int, ARGB color format. + x: int. + y: int. + width: int, if == 0, the width will be self.Width-x + height: int, if == 0, the height will be self.Height-y + Return bool. + """ + if width == 0: + width = self.Width - x + if height == 0: + height = self.Height - y + arrayType = ctypes.c_uint * (width * height) + nativeArray = arrayType() + for i in range(len(nativeArray)): + nativeArray[i] = color + return self.SetPixelColorsOfRect(x, y, width, height, nativeArray) + + def Copy(self, x: int = 0, y: int = 0, width: int = 0, height: int = 0) -> 'Bitmap': + """ + x: int, must >= 0. + y: int, must >= 0. + width: int, must <= self.Width-x. + height: int, must <= self.Height-y. + Return `Bitmap`, a new Bitmap copied from (x,y,width,height). + """ + if width == 0: + width = self.Width - x + if height == 0: + height = self.Height - y + nativeArray = self.GetPixelColorsOfRect(x, y, width, height) + bitmap = Bitmap(width, height) + bitmap.SetPixelColorsOfRect(0, 0, width, height, nativeArray) + return bitmap + + def Paste(self, x: int, y: int, bitmap: 'Bitmap') -> bool: + """ + Paste bitmap to (x,y) of self, modify the original Bitmap, + if x < 0 or x+bitmap.Width > self.Width, only the intersection part of bitmap is pasted, + if y < 0 or y+bitmap.Height > self.Height, only the intersection part of bitmap is pasted. + x: int, can < 0. + y: int, can < 0. + bitmap: `Bitmap`. + Return bool, True if bitmap or a part of bitmap is pasted. + """ + left, top, right, bottom = max(0, x), max(0, y), min(self.Width, x + bitmap.Width), min(self.Height, y + bitmap.Height) + width, height = right - left, bottom - top + if width <= 0 or height <= 0: + return False + srcX = 0 if x >= 0 else -x + srcY = 0 if y >= 0 else -y + nativeArray = bitmap.GetPixelColorsOfRect(srcX, srcY, width, height) + return self.SetPixelColorsOfRect(left, top, width, height, nativeArray) + + def PastePart(self, dstX: int, dstY: int, srcBitmap: 'Bitmap', srcX: int = 0, srcY: int = 0, srcWidth: int = 0, srcHeight: int = 0) -> bool: + """ + Paste (srcX, srcY, srcWidth, srcHeight) of bitmap to (dstX, dstY) of self, modify the original Bitmap, + only the intersection part of the bitmap is pasted. + dstX: int, must >= 0. + dstY: int, must >= 0. + srcBitmap: `Bitmap`. + srcX: int, must >= 0. + srcY: int, must >= 0. + srcWidth: int, must >= 0 and <= srcBitmap.Width - srcX. + srcHeight: int, must >= 0 and <= srcBitmap.Height - srcY. + Return bool, True if a part of srcBitmap is pasted. + """ + if srcWidth == 0: + srcWidth = srcBitmap.Width - srcX + if srcHeight == 0: + srcHeight = srcBitmap.Height - srcY + left, top, right, bottom = max(0, dstX), max(0, dstY), min(self.Width, dstX + srcWidth), min(self.Height, dstY + srcHeight) + width, height = right - left, bottom - top + if width <= 0 or height <= 0: + return False + nativeArray = srcBitmap.GetPixelColorsOfRect(srcX, srcY, width, height) + return self.SetPixelColorsOfRect(dstX, dstY, width, height, nativeArray) + + def Resize(self, width: int, height: int) -> 'Bitmap': + """ + Resize a copy of the original to size (width, height), the original Bitmap is not modified. + width: int. + height: int. + Return a new `Bitmap`, the original is not modified. + """ + bitmap = Bitmap() + bitmap._bitmap = _DllClient.instance().dll.BitmapResizedFrom(ctypes.c_size_t(self._bitmap), width, height) + bitmap._getsize() + return bitmap + + def Rotate(self, angle: int, backgroundColor: int = 0xFFFFFFFF) -> 'Bitmap': + """ + Rotate a copy of the original with angle, the original Bitmap is not modified. + angle: int. + backgroundColor: int, ARGB color format. + Return a new `Bitmap`, the original is not modified. + """ + angle %= 360 + if angle == 0: + return self.Copy() + elif angle == 90: + return self.RotateFlip(RotateFlipType.Rotate90FlipNone) + elif angle == 180: + return self.RotateFlip(RotateFlipType.Rotate180FlipNone) + elif angle == 270: + return self.RotateFlip(RotateFlipType.Rotate270FlipNone) + else: + bitmap = Bitmap() + bitmap._bitmap = _DllClient.instance().dll.BitmapRotatedFrom(ctypes.c_size_t(self._bitmap), angle, backgroundColor) + bitmap._getsize() + return bitmap + + def RotateFlip(self, rotateFlip: int) -> 'Bitmap': + """ + Rotate 90*n or Filp a copy of the original, the original Bitmap is not modified. + rotateFlip: int, a value in class `RotateFlipType`. + Return a new `Bitmap`, the original is not modified. + """ + bitmap = self.Copy() + _DllClient.instance().dll.BitmapRotateFlip(ctypes.c_size_t(bitmap._bitmap), rotateFlip) + bitmap._getsize() + return bitmap + + def __str__(self) -> str: + return '{}(Width={}, Height={})'.format(self.__class__.__name__, self.Width, self.Height) + + def __repr__(self) -> str: + return '<{}(Width={}, Height={}) at 0x{:x}>'.format(self.__class__.__name__, self.Width, self.Height, id(self)) + + +_ClipboardLock = threading.Lock() + + +def _OpenClipboard(value): + end = ProcessTime() + 0.2 + while ProcessTime() < end: + ret = ctypes.windll.user32.OpenClipboard(value) + if ret: + return ret + time.sleep(0.005) + + +def GetClipboardFormats() -> Dict[int, str]: + ''' + Get clipboard formats that system clipboard has currently. + Return Dict[int, str]. + The key is a int value in class `ClipboardFormat` or othes values that apps registered by ctypes.windll.user32.RegisterClipboardFormatW + ''' + formats = {} + with _ClipboardLock: + if _OpenClipboard(0): + formatType = 0 + arrayType = ctypes.c_wchar * 64 + while True: + formatType = ctypes.windll.user32.EnumClipboardFormats(formatType) + if formatType == 0: + break + values = arrayType() + ctypes.windll.user32.GetClipboardFormatNameW(formatType, values, len(values)) + formatName = values.value + if not formatName: + formatName = _GetDictKeyName(ClipboardFormat.__dict__, formatType, 'CF_') + formats[formatType] = formatName + ctypes.windll.user32.CloseClipboard() + return formats + + +def GetClipboardText() -> str: + with _ClipboardLock: + if _OpenClipboard(0): + if ctypes.windll.user32.IsClipboardFormatAvailable(ClipboardFormat.CF_UNICODETEXT): + hClipboardData = ctypes.windll.user32.GetClipboardData(ClipboardFormat.CF_UNICODETEXT) + hText = ctypes.windll.kernel32.GlobalLock(ctypes.c_void_p(hClipboardData)) + text = ctypes.c_wchar_p(hText).value + ctypes.windll.kernel32.GlobalUnlock(ctypes.c_void_p(hClipboardData)) + ctypes.windll.user32.CloseClipboard() + return text + return '' + + +def SetClipboardText(text: str) -> bool: + """ + Return bool, True if succeed otherwise False. + """ + ret = False + with _ClipboardLock: + if _OpenClipboard(0): + ctypes.windll.user32.EmptyClipboard() + textByteLen = (len(text) + 1) * 2 + hClipboardData = ctypes.windll.kernel32.GlobalAlloc(0x2, textByteLen) # GMEM_MOVEABLE + hDestText = ctypes.windll.kernel32.GlobalLock(ctypes.c_void_p(hClipboardData)) + ctypes.cdll.msvcrt.wcsncpy(ctypes.c_wchar_p(hDestText), ctypes.c_wchar_p(text), ctypes.c_size_t(textByteLen // 2)) + ctypes.windll.kernel32.GlobalUnlock(ctypes.c_void_p(hClipboardData)) + # system owns hClipboardData after calling SetClipboardData, + # application can not write to or free the data once ownership has been transferred to the system + if ctypes.windll.user32.SetClipboardData(ctypes.c_uint(ClipboardFormat.CF_UNICODETEXT), ctypes.c_void_p(hClipboardData)): + ret = True + else: + ctypes.windll.kernel32.GlobalFree(ctypes.c_void_p(hClipboardData)) + ctypes.windll.user32.CloseClipboard() + return ret + + +def GetClipboardHtml() -> str: + """ + Return str. + Note: the positions(StartHTML, EndHTML ...) are valid for utf-8 encoding html text, + when the utf-8 encoding html text is decoded to Python unicode str, + the positions may not correspond to the actual positions in the returned str. + """ + with _ClipboardLock: + if _OpenClipboard(0): + if ctypes.windll.user32.IsClipboardFormatAvailable(ClipboardFormat.CF_HTML): + hClipboardData = ctypes.windll.user32.GetClipboardData(ClipboardFormat.CF_HTML) + hText = ctypes.windll.kernel32.GlobalLock(ctypes.c_void_p(hClipboardData)) + text = ctypes.c_char_p(hText).value.decode('utf-8') + ctypes.windll.kernel32.GlobalUnlock(ctypes.c_void_p(hClipboardData)) + ctypes.windll.user32.CloseClipboard() + return text + return '' + + +def SetClipboardHtml(htmlText: str) -> bool: + """ + htmlText: str, such as '

Title

Hello

hello world

' + Return bool, True if succeed otherwise False. + Refer: https://docs.microsoft.com/en-us/troubleshoot/cpp/add-html-code-clipboard + """ + u8Html = htmlText.encode('utf-8') + formatBytes = b'Version:0.9\r\nStartHTML:00000000\r\nEndHTML:00000000\r\nStartFragment:00000000\r\nEndFragment:00000000\r\n\r\n\r\n{}\r\n\r\n' + startHtml = formatBytes.find(b'') + endHtml = len(formatBytes) + len(u8Html) - 2 + startFragment = formatBytes.find(b'{}') + endFragment = formatBytes.find(b'') + len(u8Html) - 2 + formatBytes = formatBytes.replace(b'StartHTML:00000000', 'StartHTML:{:08}'.format(startHtml).encode('utf-8')) + formatBytes = formatBytes.replace(b'EndHTML:00000000', 'EndHTML:{:08}'.format(endHtml).encode('utf-8')) + formatBytes = formatBytes.replace(b'StartFragment:00000000', 'StartFragment:{:08}'.format(startFragment).encode('utf-8')) + formatBytes = formatBytes.replace(b'EndFragment:00000000', 'EndFragment:{:08}'.format(endFragment).encode('utf-8')) + u8Result = formatBytes.replace(b'{}', u8Html) + ret = False + with _ClipboardLock: + if _OpenClipboard(0): + ctypes.windll.user32.EmptyClipboard() + hClipboardData = ctypes.windll.kernel32.GlobalAlloc(0x2002, len(u8Result) + 4) # GMEM_MOVEABLE |GMEM_DDESHARE + hDestText = ctypes.windll.kernel32.GlobalLock(ctypes.c_void_p(hClipboardData)) + ctypes.cdll.msvcrt.strncpy(ctypes.c_char_p(hDestText), ctypes.c_char_p(u8Result), len(u8Result)) + ctypes.windll.kernel32.GlobalUnlock(ctypes.c_void_p(hClipboardData)) + # system owns hClipboardData after calling SetClipboardData, + # application can not write to or free the data once ownership has been transferred to the system + if ctypes.windll.user32.SetClipboardData(ctypes.c_uint(ClipboardFormat.CF_HTML), ctypes.c_void_p(hClipboardData)): + ret = True + else: + ctypes.windll.kernel32.GlobalFree(ctypes.c_void_p(hClipboardData)) + ctypes.windll.user32.CloseClipboard() + return ret + + +def GetClipboardBitmap() -> Bitmap: + with _ClipboardLock: + if _OpenClipboard(0): + if ctypes.windll.user32.IsClipboardFormatAvailable(ClipboardFormat.CF_BITMAP): + hClipboardData = ctypes.windll.user32.GetClipboardData(ClipboardFormat.CF_BITMAP) + bitmap = Bitmap() + bitmap._bitmap = _DllClient.instance().dll.BitmapFromHBITMAP(ctypes.c_size_t(hClipboardData), 0, 0, 0, 0) + bitmap._getsize() + ctypes.windll.user32.CloseClipboard() + return bitmap + + +def SetClipboardBitmap(bitmap: Bitmap) -> bool: + """ + Return bool, True if succeed otherwise False. + """ + ret = False + with _ClipboardLock: + if bitmap._bitmap and _OpenClipboard(0): + ctypes.windll.user32.EmptyClipboard() + hBitmap = _DllClient.instance().dll.BitmapToHBITMAP(ctypes.c_size_t(bitmap._bitmap), 0xFFFFFFFF) + hBitmap2 = ctypes.windll.gdi32.CreateBitmap(bitmap.Width, bitmap.Height, 1, 32, 0) + hdc = ctypes.windll.user32.GetDC(0) + hdc1 = ctypes.windll.gdi32.CreateCompatibleDC(ctypes.c_void_p(hdc)) + hdc2 = ctypes.windll.gdi32.CreateCompatibleDC(ctypes.c_void_p(hdc)) + ctypes.windll.user32.ReleaseDC(0, ctypes.c_void_p(hdc)) + hOldBmp1 = ctypes.windll.gdi32.SelectObject(ctypes.c_void_p(hdc1), ctypes.c_void_p(hBitmap)) + hOldBmp2 = ctypes.windll.gdi32.SelectObject(ctypes.c_void_p(hdc2), ctypes.c_void_p(hBitmap2)) + ctypes.windll.gdi32.BitBlt(ctypes.c_void_p(hdc2), 0, 0, bitmap.Width, bitmap.Height, ctypes.c_void_p(hdc1), 0, 0, 0x00CC0020) # SRCCOPY + ctypes.windll.gdi32.SelectObject(ctypes.c_void_p(hdc1), ctypes.c_void_p(hOldBmp1)) + ctypes.windll.gdi32.SelectObject(ctypes.c_void_p(hdc2), ctypes.c_void_p(hOldBmp2)) + ctypes.windll.gdi32.DeleteDC(ctypes.c_void_p(hdc1)) + ctypes.windll.gdi32.DeleteDC(ctypes.c_void_p(hdc2)) + ctypes.windll.gdi32.DeleteObject(ctypes.c_void_p(hBitmap)) + # system owns hClipboardData after calling SetClipboardData, + # application can not write to or free the data once ownership has been transferred to the system + if ctypes.windll.user32.SetClipboardData(ctypes.c_uint(ClipboardFormat.CF_BITMAP), ctypes.c_void_p(hBitmap2)): + ret = True + else: + ctypes.windll.gdi32.DeleteObject(ctypes.c_void_p(hBitmap2)) + ctypes.windll.user32.CloseClipboard() + return ret + + +def Input(prompt: str, consoleColor: int = ConsoleColor.Default) -> str: + Logger.Write(prompt, consoleColor, writeToFile=False) + return input() + + +def InputColorfully(prompt: str, consoleColor: int = ConsoleColor.Default) -> str: + Logger.ColorfullyWrite(prompt, consoleColor, writeToFile=False) + return input() + + +_PatternIdInterfaces = None + + +def GetPatternIdInterface(patternId: int): + """ + Get pattern COM interface by pattern id. + patternId: int, a value in class `PatternId`. + Return comtypes._cominterface_meta. + """ + global _PatternIdInterfaces + if not _PatternIdInterfaces: + _PatternIdInterfaces = { + # PatternId.AnnotationPattern: _AutomationClient.instance().UIAutomationCore.IUIAutomationAnnotationPattern, + # PatternId.CustomNavigationPattern: _AutomationClient.instance().UIAutomationCore.IUIAutomationCustomNavigationPattern, + PatternId.DockPattern: _AutomationClient.instance().UIAutomationCore.IUIAutomationDockPattern, + # PatternId.DragPattern: _AutomationClient.instance().UIAutomationCore.IUIAutomationDragPattern, + # PatternId.DropTargetPattern: _AutomationClient.instance().UIAutomationCore.IUIAutomationDropTargetPattern, + PatternId.ExpandCollapsePattern: _AutomationClient.instance().UIAutomationCore.IUIAutomationExpandCollapsePattern, + PatternId.GridItemPattern: _AutomationClient.instance().UIAutomationCore.IUIAutomationGridItemPattern, + PatternId.GridPattern: _AutomationClient.instance().UIAutomationCore.IUIAutomationGridPattern, + PatternId.InvokePattern: _AutomationClient.instance().UIAutomationCore.IUIAutomationInvokePattern, + PatternId.ItemContainerPattern: _AutomationClient.instance().UIAutomationCore.IUIAutomationItemContainerPattern, + PatternId.LegacyIAccessiblePattern: _AutomationClient.instance().UIAutomationCore.IUIAutomationLegacyIAccessiblePattern, + PatternId.MultipleViewPattern: _AutomationClient.instance().UIAutomationCore.IUIAutomationMultipleViewPattern, + # PatternId.ObjectModelPattern: _AutomationClient.instance().UIAutomationCore.IUIAutomationObjectModelPattern, + PatternId.RangeValuePattern: _AutomationClient.instance().UIAutomationCore.IUIAutomationRangeValuePattern, + PatternId.ScrollItemPattern: _AutomationClient.instance().UIAutomationCore.IUIAutomationScrollItemPattern, + PatternId.ScrollPattern: _AutomationClient.instance().UIAutomationCore.IUIAutomationScrollPattern, + PatternId.SelectionItemPattern: _AutomationClient.instance().UIAutomationCore.IUIAutomationSelectionItemPattern, + PatternId.SelectionPattern: _AutomationClient.instance().UIAutomationCore.IUIAutomationSelectionPattern, + # PatternId.SpreadsheetItemPattern: _AutomationClient.instance().UIAutomationCore.IUIAutomationSpreadsheetItemPattern, + # PatternId.SpreadsheetPattern: _AutomationClient.instance().UIAutomationCore.IUIAutomationSpreadsheetPattern, + # PatternId.StylesPattern: _AutomationClient.instance().UIAutomationCore.IUIAutomationStylesPattern, + PatternId.SynchronizedInputPattern: _AutomationClient.instance().UIAutomationCore.IUIAutomationSynchronizedInputPattern, + PatternId.TableItemPattern: _AutomationClient.instance().UIAutomationCore.IUIAutomationTableItemPattern, + PatternId.TablePattern: _AutomationClient.instance().UIAutomationCore.IUIAutomationTablePattern, + # PatternId.TextChildPattern: _AutomationClient.instance().UIAutomationCore.IUIAutomationTextChildPattern, + # PatternId.TextEditPattern: _AutomationClient.instance().UIAutomationCore.IUIAutomationTextEditPattern, + PatternId.TextPattern: _AutomationClient.instance().UIAutomationCore.IUIAutomationTextPattern, + # PatternId.TextPattern2: _AutomationClient.instance().UIAutomationCore.IUIAutomationTextPattern2, + PatternId.TogglePattern: _AutomationClient.instance().UIAutomationCore.IUIAutomationTogglePattern, + PatternId.TransformPattern: _AutomationClient.instance().UIAutomationCore.IUIAutomationTransformPattern, + # PatternId.TransformPattern2: _AutomationClient.instance().UIAutomationCore.IUIAutomationTransformPattern2, + PatternId.ValuePattern: _AutomationClient.instance().UIAutomationCore.IUIAutomationValuePattern, + PatternId.VirtualizedItemPattern: _AutomationClient.instance().UIAutomationCore.IUIAutomationVirtualizedItemPattern, + PatternId.WindowPattern: _AutomationClient.instance().UIAutomationCore.IUIAutomationWindowPattern, + } + debug = False + # the following patterns doesn't exist on Windows 7 or lower + try: + _PatternIdInterfaces[PatternId.AnnotationPattern] = _AutomationClient.instance().UIAutomationCore.IUIAutomationAnnotationPattern + except: + if debug: + Logger.WriteLine('UIAutomationCore does not have AnnotationPattern.', ConsoleColor.Yellow) + try: + _PatternIdInterfaces[PatternId.CustomNavigationPattern] = _AutomationClient.instance().UIAutomationCore.IUIAutomationCustomNavigationPattern + except: + if debug: + Logger.WriteLine('UIAutomationCore does not have CustomNavigationPattern.', ConsoleColor.Yellow) + try: + _PatternIdInterfaces[PatternId.DragPattern] = _AutomationClient.instance().UIAutomationCore.IUIAutomationDragPattern + except: + if debug: + Logger.WriteLine('UIAutomationCore does not have DragPattern.', ConsoleColor.Yellow) + try: + _PatternIdInterfaces[PatternId.DropTargetPattern] = _AutomationClient.instance().UIAutomationCore.IUIAutomationDropTargetPattern + except: + if debug: + Logger.WriteLine('UIAutomationCore does not have DropTargetPattern.', ConsoleColor.Yellow) + try: + _PatternIdInterfaces[PatternId.ObjectModelPattern] = _AutomationClient.instance().UIAutomationCore.IUIAutomationObjectModelPattern + except: + if debug: + Logger.WriteLine('UIAutomationCore does not have ObjectModelPattern.', ConsoleColor.Yellow) + try: + _PatternIdInterfaces[PatternId.SpreadsheetItemPattern] = _AutomationClient.instance().UIAutomationCore.IUIAutomationSpreadsheetItemPattern + except: + if debug: + Logger.WriteLine('UIAutomationCore does not have SpreadsheetItemPattern.', ConsoleColor.Yellow) + try: + _PatternIdInterfaces[PatternId.SpreadsheetPattern] = _AutomationClient.instance().UIAutomationCore.IUIAutomationSpreadsheetPattern + except: + if debug: + Logger.WriteLine('UIAutomationCore does not have SpreadsheetPattern.', ConsoleColor.Yellow) + try: + _PatternIdInterfaces[PatternId.StylesPattern] = _AutomationClient.instance().UIAutomationCore.IUIAutomationStylesPattern + except: + if debug: + Logger.WriteLine('UIAutomationCore does not have StylesPattern.', ConsoleColor.Yellow) + try: + _PatternIdInterfaces[PatternId.TextChildPattern] = _AutomationClient.instance().UIAutomationCore.IUIAutomationTextChildPattern + except: + if debug: + Logger.WriteLine('UIAutomationCore does not have TextChildPattern.', ConsoleColor.Yellow) + try: + _PatternIdInterfaces[PatternId.TextEditPattern] = _AutomationClient.instance().UIAutomationCore.IUIAutomationTextEditPattern + except: + if debug: + Logger.WriteLine('UIAutomationCore does not have TextEditPattern.', ConsoleColor.Yellow) + try: + _PatternIdInterfaces[PatternId.TextPattern2] = _AutomationClient.instance().UIAutomationCore.IUIAutomationTextPattern2 + except: + if debug: + Logger.WriteLine('UIAutomationCore does not have TextPattern2.', ConsoleColor.Yellow) + try: + _PatternIdInterfaces[PatternId.TransformPattern2] = _AutomationClient.instance().UIAutomationCore.IUIAutomationTransformPattern2 + except: + if debug: + Logger.WriteLine('UIAutomationCore does not have TransformPattern2.', ConsoleColor.Yellow) + return _PatternIdInterfaces[patternId] + + +""" +Control Pattern Mapping for UI Automation Clients. +Refer https://docs.microsoft.com/en-us/previous-versions//dd319586(v=vs.85) +""" + + +class AnnotationPattern(): + def __init__(self, pattern=None): + """Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nn-uiautomationclient-iuiautomationannotationpattern""" + self.pattern = pattern + + @property + def AnnotationTypeId(self) -> int: + """ + Property AnnotationTypeId. + Call IUIAutomationAnnotationPattern::get_CurrentAnnotationTypeId. + Return int, a value in class `AnnotationType`. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationannotationpattern-get_currentannotationtypeid + """ + return self.pattern.CurrentAnnotationTypeId + + @property + def AnnotationTypeName(self) -> str: + """ + Property AnnotationTypeName. + Call IUIAutomationAnnotationPattern::get_CurrentAnnotationTypeName. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationannotationpattern-get_currentannotationtypename + """ + return self.pattern.CurrentAnnotationTypeName + + @property + def Author(self) -> str: + """ + Property Author. + Call IUIAutomationAnnotationPattern::get_CurrentAuthor. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationannotationpattern-get_currentauthor + """ + return self.pattern.CurrentAuthor + + @property + def DateTime(self) -> str: + """ + Property DateTime. + Call IUIAutomationAnnotationPattern::get_CurrentDateTime. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationannotationpattern-get_currentdatetime + """ + return self.pattern.CurrentDateTime + + @property + def Target(self) -> 'Control': + """ + Property Target. + Call IUIAutomationAnnotationPattern::get_CurrentTarget. + Return `Control` subclass, the element that is being annotated. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationannotationpattern-get_currenttarget + """ + ele = self.pattern.CurrentTarget + return Control.CreateControlFromElement(ele) + + +class CustomNavigationPattern(): + def __init__(self, pattern=None): + """Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nn-uiautomationclient-iuiautomationcustomnavigationpattern""" + self.pattern = pattern + + def Navigate(self, direction: int) -> 'Control': + """ + Call IUIAutomationCustomNavigationPattern::Navigate. + Get the next control in the specified direction within the logical UI tree. + direction: int, a value in class `NavigateDirection`. + Return `Control` subclass or None. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationcustomnavigationpattern-navigate + """ + ele = self.pattern.Navigate(direction) + return Control.CreateControlFromElement(ele) + + +class DockPattern(): + def __init__(self, pattern=None): + """Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nn-uiautomationclient-iuiautomationdockpattern""" + self.pattern = pattern + + @property + def DockPosition(self) -> int: + """ + Property DockPosition. + Call IUIAutomationDockPattern::get_CurrentDockPosition. + Return int, a value in class `DockPosition`. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationdockpattern-get_currentdockposition + """ + return self.pattern.CurrentDockPosition + + def SetDockPosition(self, dockPosition: int, waitTime: float = OPERATION_WAIT_TIME) -> int: + """ + Call IUIAutomationDockPattern::SetDockPosition. + dockPosition: int, a value in class `DockPosition`. + waitTime: float. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationdockpattern-setdockposition + """ + ret = self.pattern.SetDockPosition(dockPosition) + time.sleep(waitTime) + return ret + + +class DragPattern(): + def __init__(self, pattern=None): + """Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nn-uiautomationclient-iuiautomationdragpattern""" + self.pattern = pattern + + @property + def DropEffect(self) -> str: + """ + Property DropEffect. + Call IUIAutomationDragPattern::get_CurrentDropEffect. + Return str, a localized string that indicates what happens + when the user drops this element as part of a drag-drop operation. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationdragpattern-get_currentdropeffect + """ + return self.pattern.CurrentDropEffect + + @property + def DropEffects(self) -> List[str]: + """ + Property DropEffects. + Call IUIAutomationDragPattern::get_CurrentDropEffects, todo SAFEARRAY. + Return List[str], a list of localized strings that enumerate the full set of effects + that can happen when this element as part of a drag-and-drop operation. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationdragpattern-get_currentdropeffects + """ + return self.pattern.CurrentDropEffects + + @property + def IsGrabbed(self) -> bool: + """ + Property IsGrabbed. + Call IUIAutomationDragPattern::get_CurrentIsGrabbed. + Return bool, indicates whether the user has grabbed this element as part of a drag-and-drop operation. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationdragpattern-get_currentisgrabbed + """ + return bool(self.pattern.CurrentIsGrabbed) + + def GetGrabbedItems(self) -> List['Control']: + """ + Call IUIAutomationDragPattern::GetCurrentGrabbedItems. + Return List[Control], a list of `Control` subclasses that represent the full set of items + that the user is dragging as part of a drag operation. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationdragpattern-getcurrentgrabbeditems + """ + eleArray = self.pattern.GetCurrentGrabbedItems() + if eleArray: + controls = [] + for i in range(eleArray.Length): + ele = eleArray.GetElement(i) + con = Control.CreateControlFromElement(element=ele) + if con: + controls.append(con) + return controls + return [] + + +class DropTargetPattern(): + def __init__(self, pattern=None): + """Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nn-uiautomationclient-iuiautomationdroptargetpattern""" + self.pattern = pattern + + @property + def DropTargetEffect(self) -> str: + """ + Property DropTargetEffect. + Call IUIAutomationDropTargetPattern::get_CurrentDropTargetEffect. + Return str, a localized string that describes what happens + when the user drops the grabbed element on this drop target. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationdragpattern-get_currentdroptargeteffect + """ + return self.pattern.CurrentDropTargetEffect + + @property + def DropTargetEffects(self) -> List[str]: + """ + Property DropTargetEffects. + Call IUIAutomationDropTargetPattern::get_CurrentDropTargetEffects, todo SAFEARRAY. + Return List[str], a list of localized strings that enumerate the full set of effects + that can happen when the user drops a grabbed element on this drop target + as part of a drag-and-drop operation. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationdragpattern-get_currentdroptargeteffects + """ + return self.pattern.CurrentDropTargetEffects + + +class ExpandCollapsePattern(): + def __init__(self, pattern=None): + """Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nn-uiautomationclient-iuiautomationexpandcollapsepattern""" + self.pattern = pattern + + @property + def ExpandCollapseState(self) -> int: + """ + Property ExpandCollapseState. + Call IUIAutomationExpandCollapsePattern::get_CurrentExpandCollapseState. + Return int, a value in class ExpandCollapseState. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationexpandcollapsepattern-get_currentexpandcollapsestate + """ + return self.pattern.CurrentExpandCollapseState + + def Collapse(self, waitTime: float = OPERATION_WAIT_TIME) -> bool: + """ + Call IUIAutomationExpandCollapsePattern::Collapse. + waitTime: float. + Return bool, True if succeed otherwise False. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationexpandcollapsepattern-collapse + """ + try: + ret = self.pattern.Collapse() == S_OK + time.sleep(waitTime) + return ret + except: + pass + return False + + def Expand(self, waitTime: float = OPERATION_WAIT_TIME) -> bool: + """ + Call IUIAutomationExpandCollapsePattern::Expand. + waitTime: float. + Return bool, True if succeed otherwise False. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationexpandcollapsepattern-expand + """ + try: + ret = self.pattern.Expand() == S_OK + time.sleep(waitTime) + return ret + except: + pass + return False + + +class GridItemPattern(): + def __init__(self, pattern=None): + """Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nn-uiautomationclient-iuiautomationgriditempattern""" + self.pattern = pattern + + @property + def Column(self) -> int: + """ + Property Column. + Call IUIAutomationGridItemPattern::get_CurrentColumn. + Return int, the zero-based index of the column that contains the item. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationgriditempattern-get_currentcolumn + """ + return self.pattern.CurrentColumn + + @property + def ColumnSpan(self) -> int: + """ + Property ColumnSpan. + Call IUIAutomationGridItemPattern::get_CurrentColumnSpan. + Return int, the number of columns spanned by the grid item. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationgriditempattern-get_currentcolumnspan + """ + return self.pattern.CurrentColumnSpan + + @property + def ContainingGrid(self) -> 'Control': + """ + Property ContainingGrid. + Call IUIAutomationGridItemPattern::get_CurrentContainingGrid. + Return `Control` subclass, the element that contains the grid item. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationgriditempattern-get_currentcontaininggrid + """ + return Control.CreateControlFromElement(self.pattern.CurrentContainingGrid) + + @property + def Row(self) -> int: + """ + Property Row. + Call IUIAutomationGridItemPattern::get_CurrentRow. + Return int, the zero-based index of the row that contains the grid item. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationgriditempattern-get_currentrow + """ + return self.pattern.CurrentRow + + @property + def RowSpan(self) -> int: + """ + Property RowSpan. + Call IUIAutomationGridItemPattern::get_CurrentRowSpan. + Return int, the number of rows spanned by the grid item. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationgriditempattern-get_currentrowspan + """ + return self.pattern.CurrentRowSpan + + +class GridPattern(): + def __init__(self, pattern=None): + """Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nn-uiautomationclient-iuiautomationgridpattern""" + self.pattern = pattern + + @property + def ColumnCount(self) -> int: + """ + Property ColumnCount. + Call IUIAutomationGridPattern::get_CurrentColumnCount. + Return int, the number of columns in the grid. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationgridpattern-get_currentcolumncount + """ + return self.pattern.CurrentColumnCount + + @property + def RowCount(self) -> int: + """ + Property RowCount. + Call IUIAutomationGridPattern::get_CurrentRowCount. + Return int, the number of rows in the grid. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationgridpattern-get_currentrowcount + """ + return self.pattern.CurrentRowCount + + def GetItem(self) -> 'Control': + """ + Call IUIAutomationGridPattern::GetItem. + Return `Control` subclass, a control representing an item in the grid. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationgridpattern-getitem + """ + return Control.CreateControlFromElement(self.pattern.GetItem()) + + +class InvokePattern(): + def __init__(self, pattern=None): + """Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nn-uiautomationclient-iuiautomationinvokepattern""" + self.pattern = pattern + + def Invoke(self, waitTime: float = OPERATION_WAIT_TIME) -> bool: + """ + Call IUIAutomationInvokePattern::Invoke. + Invoke the action of a control, such as a button click. + waitTime: float. + Return bool, True if succeed otherwise False. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationinvokepattern-invoke + """ + ret = self.pattern.Invoke() == S_OK + time.sleep(waitTime) + return ret + + +class ItemContainerPattern(): + def __init__(self, pattern=None): + """Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nn-uiautomationclient-iuiautomationitemcontainerpattern""" + self.pattern = pattern + + def FindItemByProperty(control: 'Control', propertyId: int, propertyValue) -> 'Control': + """ + Call IUIAutomationItemContainerPattern::FindItemByProperty. + control: `Control` or its subclass. + propertyValue: COM VARIANT according to propertyId? todo. + propertyId: int, a value in class `PropertyId`. + Return `Control` subclass, a control within a containing element, based on a specified property value. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationitemcontainerpattern-finditembyproperty + """ + ele = self.pattern.FindItemByProperty(control.Element, propertyId, propertyValue) + return Control.CreateControlFromElement(ele) + + +class LegacyIAccessiblePattern(): + def __init__(self, pattern=None): + """Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nn-uiautomationclient-iuiautomationlegacyiaccessiblepattern""" + self.pattern = pattern + + @property + def ChildId(self) -> int: + """ + Property ChildId. + Call IUIAutomationLegacyIAccessiblePattern::get_CurrentChildId. + Return int, the Microsoft Active Accessibility child identifier for the element. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationlegacyiaccessiblepattern-get_currentchildid + """ + return self.pattern.CurrentChildId + + @property + def DefaultAction(self) -> str: + """ + Property DefaultAction. + Call IUIAutomationLegacyIAccessiblePattern::get_CurrentDefaultAction. + Return str, the Microsoft Active Accessibility current default action for the element. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationlegacyiaccessiblepattern-get_currentdefaultaction + """ + return self.pattern.CurrentDefaultAction + + @property + def Description(self) -> str: + """ + Property Description. + Call IUIAutomationLegacyIAccessiblePattern::get_CurrentDescription. + Return str, the Microsoft Active Accessibility description of the element. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationlegacyiaccessiblepattern-get_currentdescription + """ + return self.pattern.CurrentDescription + + @property + def Help(self) -> str: + """ + Property Help. + Call IUIAutomationLegacyIAccessiblePattern::get_CurrentHelp. + Return str, the Microsoft Active Accessibility help string for the element. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationlegacyiaccessiblepattern-get_currenthelp + """ + return self.pattern.CurrentHelp + + @property + def KeyboardShortcut(self) -> str: + """ + Property KeyboardShortcut. + Call IUIAutomationLegacyIAccessiblePattern::get_CurrentKeyboardShortcut. + Return str, the Microsoft Active Accessibility keyboard shortcut property for the element. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationlegacyiaccessiblepattern-get_currentkeyboardshortcut + """ + return self.pattern.CurrentKeyboardShortcut + + @property + def Name(self) -> str: + """ + Property Name. + Call IUIAutomationLegacyIAccessiblePattern::get_CurrentName. + Return str, the Microsoft Active Accessibility name property of the element. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationlegacyiaccessiblepattern-get_currentname + """ + return self.pattern.CurrentName or '' # CurrentName may be None + + @property + def Role(self) -> int: + """ + Property Role. + Call IUIAutomationLegacyIAccessiblePattern::get_CurrentRole. + Return int, a value in calss `AccessibleRole`, the Microsoft Active Accessibility role identifier. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationlegacyiaccessiblepattern-get_currentrole + """ + return self.pattern.CurrentRole + + @property + def State(self) -> int: + """ + Property State. + Call IUIAutomationLegacyIAccessiblePattern::get_CurrentState. + Return int, a value in calss `AccessibleState`, the Microsoft Active Accessibility state identifier. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationlegacyiaccessiblepattern-get_currentstate + """ + return self.pattern.CurrentState + + @property + def Value(self) -> str: + """ + Property Value. + Call IUIAutomationLegacyIAccessiblePattern::get_CurrentValue. + Return str, the Microsoft Active Accessibility value property. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationlegacyiaccessiblepattern-get_currentvalue + """ + return self.pattern.CurrentValue + + def DoDefaultAction(self, waitTime: float = OPERATION_WAIT_TIME) -> bool: + """ + Call IUIAutomationLegacyIAccessiblePattern::DoDefaultAction. + Perform the Microsoft Active Accessibility default action for the element. + waitTime: float. + Return bool, True if succeed otherwise False. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationlegacyiaccessiblepattern-dodefaultaction + """ + ret = self.pattern.DoDefaultAction() == S_OK + time.sleep(waitTime) + return ret + + def GetSelection(self) -> List['Control']: + """ + Call IUIAutomationLegacyIAccessiblePattern::GetCurrentSelection. + Return List[Control], a list of `Control` subclasses, + the Microsoft Active Accessibility property that identifies the selected children of this element. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationlegacyiaccessiblepattern-getcurrentselection + """ + eleArray = self.pattern.GetCurrentSelection() + if eleArray: + controls = [] + for i in range(eleArray.Length): + ele = eleArray.GetElement(i) + con = Control.CreateControlFromElement(element=ele) + if con: + controls.append(con) + return controls + return [] + + def GetIAccessible(self): + """ + Call IUIAutomationLegacyIAccessiblePattern::GetIAccessible, todo. + Return an IAccessible object that corresponds to the Microsoft UI Automation element. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationlegacyiaccessiblepattern-getiaccessible + Refer https://docs.microsoft.com/en-us/windows/win32/api/oleacc/nn-oleacc-iaccessible + """ + return self.pattern.GetIAccessible() + + def Select(self, flagsSelect: int, waitTime: float = OPERATION_WAIT_TIME) -> bool: + """ + Call IUIAutomationLegacyIAccessiblePattern::Select. + Perform a Microsoft Active Accessibility selection. + flagsSelect: int, a value in `AccessibleSelection`. + waitTime: float. + Return bool, True if succeed otherwise False. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationlegacyiaccessiblepattern-select + """ + ret = self.pattern.Select(flagsSelect) == S_OK + time.sleep(waitTime) + return ret + + def SetValue(self, value: str, waitTime: float = OPERATION_WAIT_TIME) -> bool: + """ + Call IUIAutomationLegacyIAccessiblePattern::SetValue. + Set the Microsoft Active Accessibility value property for the element. + value: str. + waitTime: float. + Return bool, True if succeed otherwise False. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationlegacyiaccessiblepattern-setvalue + """ + ret = self.pattern.SetValue(value) == S_OK + time.sleep(waitTime) + return ret + + +class MultipleViewPattern(): + def __init__(self, pattern=None): + """Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nn-uiautomationclient-iuiautomationmultipleviewpattern""" + self.pattern = pattern + + @property + def CurrentView(self) -> int: + """ + Property CurrentView. + Call IUIAutomationMultipleViewPattern::get_CurrentCurrentView. + Return int, the control-specific identifier of the current view of the control. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationmultipleviewpattern-get_currentcurrentview + """ + return self.pattern.CurrentCurrentView + + def GetSupportedViews(self) -> List[int]: + """ + Call IUIAutomationMultipleViewPattern::GetCurrentSupportedViews, todo. + Return List[int], a list of int, control-specific view identifiers. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationmultipleviewpattern-getcurrentsupportedviews + """ + return self.pattern.GetCurrentSupportedViews() + + def GetViewName(self, view: int) -> str: + """ + Call IUIAutomationMultipleViewPattern::GetViewName. + view: int, the control-specific view identifier. + Return str, the name of a control-specific view. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationmultipleviewpattern-getviewname + """ + return self.pattern.GetViewName(view) + + def SetView(self, view: int) -> bool: + """ + Call IUIAutomationMultipleViewPattern::SetCurrentView. + Set the view of the control. + view: int, the control-specific view identifier. + Return bool, True if succeed otherwise False. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationmultipleviewpattern-setcurrentview + """ + return self.pattern.SetCurrentView(view) == S_OK + + +class ObjectModelPattern(): + def __init__(self, pattern=None): + """Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nn-uiautomationclient-iuiautomationobjectmodelpattern""" + self.pattern = pattern + + def GetUnderlyingObjectModel(self) -> ctypes.POINTER(comtypes.IUnknown): + """ + Call IUIAutomationObjectModelPattern::GetUnderlyingObjectModel, todo. + Return `ctypes.POINTER(comtypes.IUnknown)`, an interface used to access the underlying object model of the provider. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationobjectmodelpattern-getunderlyingobjectmodel + """ + return self.pattern.GetUnderlyingObjectModel() + + +class RangeValuePattern(): + def __init__(self, pattern=None): + """Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nn-uiautomationclient-iuiautomationrangevaluepattern""" + self.pattern = pattern + + @property + def IsReadOnly(self) -> bool: + """ + Property IsReadOnly. + Call IUIAutomationRangeValuePattern::get_CurrentIsReadOnly. + Return bool, indicates whether the value of the element can be changed. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationrangevaluepattern-get_currentisreadonly + """ + return self.pattern.CurrentIsReadOnly + + @property + def LargeChange(self) -> float: + """ + Property LargeChange. + Call IUIAutomationRangeValuePattern::get_CurrentLargeChange. + Return float, the value that is added to or subtracted from the value of the control + when a large change is made, such as when the PAGE DOWN key is pressed. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationrangevaluepattern-get_currentlargechange + """ + return self.pattern.CurrentLargeChange + + @property + def Maximum(self) -> float: + """ + Property Maximum. + Call IUIAutomationRangeValuePattern::get_CurrentMaximum. + Return float, the maximum value of the control. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationrangevaluepattern-get_currentmaximum + """ + return self.pattern.CurrentMaximum + + @property + def Minimum(self) -> float: + """ + Property Minimum. + Call IUIAutomationRangeValuePattern::get_CurrentMinimum. + Return float, the minimum value of the control. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationrangevaluepattern-get_currentminimum + """ + return self.pattern.CurrentMinimum + + @property + def SmallChange(self) -> float: + """ + Property SmallChange. + Call IUIAutomationRangeValuePattern::get_CurrentSmallChange. + Return float, the value that is added to or subtracted from the value of the control + when a small change is made, such as when an arrow key is pressed. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationrangevaluepattern-get_currentsmallchange + """ + return self.pattern.CurrentSmallChange + + @property + def Value(self) -> float: + """ + Property Value. + Call IUIAutomationRangeValuePattern::get_CurrentValue. + Return float, the value of the control. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationrangevaluepattern-get_currentvalue + """ + return self.pattern.CurrentValue + + def SetValue(self, value: float, waitTime: float = OPERATION_WAIT_TIME) -> bool: + """ + Call IUIAutomationRangeValuePattern::SetValue. + Set the value of the control. + value: int. + waitTime: float. + Return bool, True if succeed otherwise False. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationrangevaluepattern-setvalue + """ + ret = self.pattern.SetValue(value) == S_OK + time.sleep(waitTime) + return ret + + +class ScrollItemPattern(): + def __init__(self, pattern=None): + """Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nn-uiautomationclient-iuiautomationscrollitempattern""" + self.pattern = pattern + + def ScrollIntoView(self, waitTime: float = OPERATION_WAIT_TIME) -> bool: + """ + Call IUIAutomationScrollItemPattern::ScrollIntoView. + waitTime: float. + Return bool, True if succeed otherwise False. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationscrollitempattern-scrollintoview + """ + ret = self.pattern.ScrollIntoView() == S_OK + time.sleep(waitTime) + return ret + + +class ScrollPattern(): + NoScrollValue = -1 + + def __init__(self, pattern=None): + """Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nn-uiautomationclient-iuiautomationscrollpattern""" + self.pattern = pattern + + @property + def HorizontallyScrollable(self) -> bool: + """ + Property HorizontallyScrollable. + Call IUIAutomationScrollPattern::get_CurrentHorizontallyScrollable. + Return bool, indicates whether the element can scroll horizontally. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationscrollpattern-get_currenthorizontallyscrollable + """ + return bool(self.pattern.CurrentHorizontallyScrollable) + + @property + def HorizontalScrollPercent(self) -> float: + """ + Property HorizontalScrollPercent. + Call IUIAutomationScrollPattern::get_CurrentHorizontalScrollPercent. + Return float, the horizontal scroll position. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationscrollpattern-get_currenthorizontalscrollpercent + """ + return self.pattern.CurrentHorizontalScrollPercent + + @property + def HorizontalViewSize(self) -> float: + """ + Property HorizontalViewSize. + Call IUIAutomationScrollPattern::get_CurrentHorizontalViewSize. + Return float, the horizontal size of the viewable region of a scrollable element. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationscrollpattern-get_currenthorizontalviewsize + """ + return self.pattern.CurrentHorizontalViewSize + + @property + def VerticallyScrollable(self) -> bool: + """ + Property VerticallyScrollable. + Call IUIAutomationScrollPattern::get_CurrentVerticallyScrollable. + Return bool, indicates whether the element can scroll vertically. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationscrollpattern-get_currentverticallyscrollable + """ + return bool(self.pattern.CurrentVerticallyScrollable) + + @property + def VerticalScrollPercent(self) -> float: + """ + Property VerticalScrollPercent. + Call IUIAutomationScrollPattern::get_CurrentVerticalScrollPercent. + Return float, the vertical scroll position. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationscrollpattern-get_currentverticalscrollpercent + """ + return self.pattern.CurrentVerticalScrollPercent + + @property + def VerticalViewSize(self) -> float: + """ + Property VerticalViewSize. + Call IUIAutomationScrollPattern::get_CurrentVerticalViewSize. + Return float, the vertical size of the viewable region of a scrollable element. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationscrollpattern-get_currentverticalviewsize + """ + return self.pattern.CurrentVerticalViewSize + + def Scroll(self, horizontalAmount: int, verticalAmount: int, waitTime: float = OPERATION_WAIT_TIME) -> bool: + """ + Call IUIAutomationScrollPattern::Scroll. + Scroll the visible region of the content area horizontally and vertically. + horizontalAmount: int, a value in ScrollAmount. + verticalAmount: int, a value in ScrollAmount. + waitTime: float. + Return bool, True if succeed otherwise False. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationscrollpattern-scroll + """ + ret = self.pattern.Scroll(horizontalAmount, verticalAmount) == S_OK + time.sleep(waitTime) + return ret + + def SetScrollPercent(self, horizontalPercent: float, verticalPercent: float, waitTime: float = OPERATION_WAIT_TIME) -> bool: + """ + Call IUIAutomationScrollPattern::SetScrollPercent. + Set the horizontal and vertical scroll positions as a percentage of the total content area within the UI Automation element. + horizontalPercent: float or int, a value in [0, 100] or ScrollPattern.NoScrollValue(-1) if no scroll. + verticalPercent: float or int, a value in [0, 100] or ScrollPattern.NoScrollValue(-1) if no scroll. + waitTime: float. + Return bool, True if succeed otherwise False. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationscrollpattern-setscrollpercent + """ + ret = self.pattern.SetScrollPercent(horizontalPercent, verticalPercent) == S_OK + time.sleep(waitTime) + return ret + + +class SelectionItemPattern(): + def __init__(self, pattern=None): + """Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nn-uiautomationclient-iuiautomationselectionitempattern""" + self.pattern = pattern + + def AddToSelection(self, waitTime: float = OPERATION_WAIT_TIME) -> bool: + """ + Call IUIAutomationSelectionItemPattern::AddToSelection. + Add the current element to the collection of selected items. + waitTime: float. + Return bool, True if succeed otherwise False. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationselectionitempattern-addtoselection + """ + ret = self.pattern.AddToSelection() == S_OK + time.sleep(waitTime) + return ret + + @property + def IsSelected(self) -> bool: + """ + Property IsSelected. + Call IUIAutomationScrollPattern::get_CurrentIsSelected. + Return bool, indicates whether this item is selected. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationscrollpattern-get_currentisselected + """ + return bool(self.pattern.CurrentIsSelected) + + @property + def SelectionContainer(self) -> 'Control': + """ + Property SelectionContainer. + Call IUIAutomationScrollPattern::get_CurrentSelectionContainer. + Return `Control` subclass, the element that supports IUIAutomationSelectionPattern and acts as the container for this item. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationscrollpattern-get_currentselectioncontainer + """ + ele = self.pattern.CurrentSelectionContainer + return Control.CreateControlFromElement(ele) + + def RemoveFromSelection(self, waitTime: float = OPERATION_WAIT_TIME) -> bool: + """ + Call IUIAutomationSelectionItemPattern::RemoveFromSelection. + Remove this element from the selection. + waitTime: float. + Return bool, True if succeed otherwise False. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationselectionitempattern-removefromselection + """ + ret = self.pattern.RemoveFromSelection() == S_OK + time.sleep(waitTime) + return ret + + def Select(self, waitTime: float = OPERATION_WAIT_TIME) -> bool: + """ + Call IUIAutomationSelectionItemPattern::Select. + Clear any selected items and then select the current element. + waitTime: float. + Return bool, True if succeed otherwise False. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationselectionitempattern-select + """ + ret = self.pattern.Select() == S_OK + time.sleep(waitTime) + return ret + + +class SelectionPattern(): + def __init__(self, pattern=None): + """Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nn-uiautomationclient-iuiautomationselectionpattern""" + self.pattern = pattern + + @property + def CanSelectMultiple(self) -> bool: + """ + Property CanSelectMultiple. + Call IUIAutomationSelectionPattern::get_CurrentCanSelectMultiple. + Return bool, indicates whether more than one item in the container can be selected at one time. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationselectionpattern-get_currentcanselectmultiple + """ + return bool(self.pattern.CurrentCanSelectMultiple) + + @property + def IsSelectionRequired(self) -> bool: + """ + Property IsSelectionRequired. + Call IUIAutomationSelectionPattern::get_CurrentIsSelectionRequired. + Return bool, indicates whether at least one item must be selected at all times. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationselectionpattern-get_currentisselectionrequired + """ + return bool(self.pattern.CurrentIsSelectionRequired) + + def GetSelection(self) -> List['Control']: + """ + Call IUIAutomationSelectionPattern::GetCurrentSelection. + Return List[Control], a list of `Control` subclasses, the selected elements in the container.. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationselectionpattern-getcurrentselection + """ + eleArray = self.pattern.GetCurrentSelection() + if eleArray: + controls = [] + for i in range(eleArray.Length): + ele = eleArray.GetElement(i) + con = Control.CreateControlFromElement(element=ele) + if con: + controls.append(con) + return controls + return [] + + +class SpreadsheetItemPattern(): + def __init__(self, pattern=None): + """Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nn-uiautomationclient-iuiautomationspreadsheetitempattern""" + self.pattern = pattern + + @property + def Formula(self) -> str: + """ + Property Formula. + Call IUIAutomationSpreadsheetItemPattern::get_CurrentFormula. + Return str, the formula for this cell. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationspreadsheetitempattern-get_currentformula + """ + return self.pattern.CurrentFormula + + def GetAnnotationObjects(self) -> List['Control']: + """ + Call IUIAutomationSelectionPattern::GetCurrentAnnotationObjects. + Return List[Control], a list of `Control` subclasses representing the annotations associated with this spreadsheet cell. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationspreadsheetitempattern-getcurrentannotationobjects + """ + eleArray = self.pattern.GetCurrentAnnotationObjects() + if eleArray: + controls = [] + for i in range(eleArray.Length): + ele = eleArray.GetElement(i) + con = Control.CreateControlFromElement(element=ele) + if con: + controls.append(con) + return controls + return [] + + def GetAnnotationTypes(self) -> List[int]: + """ + Call IUIAutomationSelectionPattern::GetCurrentAnnotationTypes. + Return List[int], a list of int values in class `AnnotationType`, + indicating the types of annotations that are associated with this spreadsheet cell. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationselectionpattern-getcurrentannotationtypes + """ + return self.pattern.GetCurrentAnnotationTypes() + + +class SpreadsheetPattern(): + def __init__(self, pattern=None): + """Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nn-uiautomationclient-iuiautomationspreadsheetpattern""" + self.pattern = pattern + + def GetItemByName(self, name: str) -> 'Control': + """ + Call IUIAutomationSpreadsheetPattern::GetItemByName. + name: str. + Return `Control` subclass or None, represents the spreadsheet cell that has the specified name.. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationspreadsheetpattern-getitembyname + """ + ele = self.pattern.GetItemByName(name) + return Control.CreateControlFromElement(element=ele) + + +class StylesPattern(): + def __init__(self, pattern=None): + """Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nn-uiautomationclient-iuiautomationstylespattern""" + self.pattern = pattern + + @property + def ExtendedProperties(self) -> str: + """ + Property ExtendedProperties. + Call IUIAutomationStylesPattern::get_CurrentExtendedProperties. + Return str, a localized string that contains the list of extended properties for an element in a document. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationstylespattern-get_currentextendedproperties + """ + return self.pattern.CurrentExtendedProperties + + @property + def FillColor(self) -> int: + """ + Property FillColor. + Call IUIAutomationStylesPattern::get_CurrentFillColor. + Return int, the fill color of an element in a document. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationstylespattern-get_currentfillcolor + """ + return self.pattern.CurrentFillColor + + @property + def FillPatternColor(self) -> int: + """ + Property FillPatternColor. + Call IUIAutomationStylesPattern::get_CurrentFillPatternColor. + Return int, the color of the pattern used to fill an element in a document. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationstylespattern-get_currentfillpatterncolor + """ + return self.pattern.CurrentFillPatternColor + + @property + def Shape(self) -> str: + """ + Property Shape. + Call IUIAutomationStylesPattern::get_CurrentShape. + Return str, the shape of an element in a document. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationstylespattern-get_currentshape + """ + return self.pattern.CurrentShape + + @property + def StyleId(self) -> int: + """ + Property StyleId. + Call IUIAutomationStylesPattern::get_CurrentStyleId. + Return int, a value in class `StyleId`, the visual style associated with an element in a document. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationstylespattern-get_currentstyleid + """ + return self.pattern.CurrentStyleId + + @property + def StyleName(self) -> str: + """ + Property StyleName. + Call IUIAutomationStylesPattern::get_CurrentStyleName. + Return str, the name of the visual style associated with an element in a document. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationstylespattern-get_currentstylename + """ + return self.pattern.CurrentStyleName + + +class SynchronizedInputPattern(): + def __init__(self, pattern=None): + """Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nn-uiautomationclient-iuiautomationsynchronizedinputpattern""" + self.pattern = pattern + + def Cancel(self) -> bool: + """ + Call IUIAutomationSynchronizedInputPattern::Cancel. + Cause the Microsoft UI Automation provider to stop listening for mouse or keyboard input. + Return bool, True if succeed otherwise False. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationsynchronizedinputpattern-cancel + """ + return self.pattern.Cancel() == S_OK + + def StartListening(self) -> bool: + """ + Call IUIAutomationSynchronizedInputPattern::StartListening. + Cause the Microsoft UI Automation provider to start listening for mouse or keyboard input. + Return bool, True if succeed otherwise False. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationsynchronizedinputpattern-startlistening + """ + return self.pattern.StartListening() == S_OK + + +class TableItemPattern(): + def __init__(self, pattern=None): + """Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nn-uiautomationclient-iuiautomationtableitempattern""" + self.pattern = pattern + + def GetColumnHeaderItems(self) -> List['Control']: + """ + Call IUIAutomationTableItemPattern::GetCurrentColumnHeaderItems. + Return List[Control], a list of `Control` subclasses, the column headers associated with a table item or cell. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtableitempattern-getcurrentcolumnheaderitems + """ + eleArray = self.pattern.GetCurrentColumnHeaderItems() + if eleArray: + controls = [] + for i in range(eleArray.Length): + ele = eleArray.GetElement(i) + con = Control.CreateControlFromElement(element=ele) + if con: + controls.append(con) + return controls + return [] + + def GetRowHeaderItems(self) -> List['Control']: + """ + Call IUIAutomationTableItemPattern::GetCurrentRowHeaderItems. + Return List[Control], a list of `Control` subclasses, the row headers associated with a table item or cell. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtableitempattern-getcurrentrowheaderitems + """ + eleArray = self.pattern.GetCurrentRowHeaderItems() + if eleArray: + controls = [] + for i in range(eleArray.Length): + ele = eleArray.GetElement(i) + con = Control.CreateControlFromElement(element=ele) + if con: + controls.append(con) + return controls + return [] + + +class TablePattern(): + def __init__(self, pattern=None): + """Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nn-uiautomationclient-iuiautomationtablepattern""" + self.pattern = pattern + + @property + def RowOrColumnMajor(self) -> int: + """ + Property RowOrColumnMajor. + Call IUIAutomationTablePattern::get_CurrentRowOrColumnMajor. + Return int, a value in class `RowOrColumnMajor`, the primary direction of traversal for the table. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtablepattern-get_currentroworcolumnmajor + """ + return self.pattern.CurrentRowOrColumnMajor + + def GetColumnHeaders(self) -> List['Control']: + """ + Call IUIAutomationTablePattern::GetCurrentColumnHeaders. + Return List[Control], a list of `Control` subclasses, representing all the column headers in a table.. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtablepattern-getcurrentcolumnheaders + """ + eleArray = self.pattern.GetCurrentColumnHeaders() + if eleArray: + controls = [] + for i in range(eleArray.Length): + ele = eleArray.GetElement(i) + con = Control.CreateControlFromElement(element=ele) + if con: + controls.append(con) + return controls + return [] + + def GetRowHeaders(self) -> List['Control']: + """ + Call IUIAutomationTablePattern::GetCurrentRowHeaders. + Return List[Control], a list of `Control` subclasses, representing all the row headers in a table. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtablepattern-getcurrentrowheaders + """ + eleArray = self.pattern.GetCurrentRowHeaders() + if eleArray: + controls = [] + for i in range(eleArray.Length): + ele = eleArray.GetElement(i) + con = Control.CreateControlFromElement(element=ele) + if con: + controls.append(con) + return controls + return [] + + +class TextRange(): + def __init__(self, textRange=None): + """ + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nn-uiautomationclient-iuiautomationtextrange + """ + self.textRange = textRange + + def AddToSelection(self, waitTime: float = OPERATION_WAIT_TIME) -> bool: + """ + Call IUIAutomationTextRange::AddToSelection. + Add the text range to the collection of selected text ranges in a control that supports multiple, disjoint spans of selected text. + waitTime: float. + Return bool, True if succeed otherwise False. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextrange-addtoselection + """ + ret = self.textRange.AddToSelection() == S_OK + time.sleep(waitTime) + return ret + + def Clone(self) -> 'TextRange': + """ + Call IUIAutomationTextRange::Clone. + return `TextRange`, identical to the original and inheriting all properties of the original. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextrange-clone + """ + return TextRange(textRange=self.textRange.Clone()) + + def Compare(self, textRange: 'TextRange') -> bool: + """ + Call IUIAutomationTextRange::Compare. + textRange: `TextRange`. + Return bool, specifies whether this text range has the same endpoints as another text range. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextrange-compare + """ + return bool(self.textRange.Compare(textRange.textRange)) + + def CompareEndpoints(self, srcEndPoint: int, textRange: 'TextRange', targetEndPoint: int) -> int: + """ + Call IUIAutomationTextRange::CompareEndpoints. + srcEndPoint: int, a value in class `TextPatternRangeEndpoint`. + textRange: `TextRange`. + targetEndPoint: int, a value in class `TextPatternRangeEndpoint`. + Return int, a negative value if the caller's endpoint occurs earlier in the text than the target endpoint; + 0 if the caller's endpoint is at the same location as the target endpoint; + or a positive value if the caller's endpoint occurs later in the text than the target endpoint. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextrange-compareendpoints + """ + return self.textRange.CompareEndpoints(srcEndPoint, textRange, targetEndPoint) + + def ExpandToEnclosingUnit(self, waitTime: float = OPERATION_WAIT_TIME) -> bool: + """ + Call IUIAutomationTextRange::ExpandToEnclosingUnit. + Normalize the text range by the specified text unit. + The range is expanded if it is smaller than the specified unit, + or shortened if it is longer than the specified unit. + waitTime: float. + Return bool, True if succeed otherwise False. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextrange-expandtoenclosingunit + """ + ret = self.textRange.ExpandToEnclosingUnit() == S_OK + time.sleep(waitTime) + return ret + + def FindAttribute(self, textAttributeId: int, val, backward: bool) -> 'TextRange': + """ + Call IUIAutomationTextRange::FindAttribute. + textAttributeID: int, a value in class `TextAttributeId`. + val: COM VARIANT according to textAttributeId? todo. + backward: bool, True if the last occurring text range should be returned instead of the first; otherwise False. + return `TextRange` or None, a text range subset that has the specified text attribute value. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextrange-findattribute + """ + textRange = self.textRange.FindAttribute(textAttributeId, val, int(backward)) + if textRange: + return TextRange(textRange=textRange) + + def FindText(self, text: str, backward: bool, ignoreCase: bool) -> 'TextRange': + """ + Call IUIAutomationTextRange::FindText. + text: str, + backward: bool, True if the last occurring text range should be returned instead of the first; otherwise False. + ignoreCase: bool, True if case should be ignored; otherwise False. + return `TextRange` or None, a text range subset that contains the specified text. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextrange-findtext + """ + textRange = self.textRange.FindText(text, int(backward), int(ignoreCase)) + if textRange: + return TextRange(textRange=textRange) + + def GetAttributeValue(self, textAttributeId: int) -> ctypes.POINTER(comtypes.IUnknown): + """ + Call IUIAutomationTextRange::GetAttributeValue. + textAttributeId: int, a value in class `TextAttributeId`. + Return `ctypes.POINTER(comtypes.IUnknown)` or None, the value of the specified text attribute across the entire text range, todo. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextrange-getattributevalue + """ + return self.textRange.GetAttributeValue(textAttributeId) + + def GetBoundingRectangles(self) -> List[Rect]: + """ + Call IUIAutomationTextRange::GetBoundingRectangles. + textAttributeId: int, a value in class `TextAttributeId`. + Return List[Rect], a list of `Rect`. + bounding rectangles for each fully or partially visible line of text in a text range.. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextrange-getboundingrectangles + + for rect in textRange.GetBoundingRectangles(): + print(rect.left, rect.top, rect.right, rect.bottom, rect.width(), rect.height(), rect.xcenter(), rect.ycenter()) + """ + floats = self.textRange.GetBoundingRectangles() + rects = [] + for i in range(len(floats) // 4): + rect = Rect(int(floats[i * 4]), int(floats[i * 4 + 1]), + int(floats[i * 4]) + int(floats[i * 4 + 2]), int(floats[i * 4 + 1]) + int(floats[i * 4 + 3])) + rects.append(rect) + return rects + + def GetChildren(self) -> List['Control']: + """ + Call IUIAutomationTextRange::GetChildren. + textAttributeId: int, a value in class `TextAttributeId`. + Return List[Control], a list of `Control` subclasses, embedded objects that fall within the text range.. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextrange-getchildren + """ + eleArray = self.textRange.GetChildren() + if eleArray: + controls = [] + for i in range(eleArray.Length): + ele = eleArray.GetElement(i) + con = Control.CreateControlFromElement(element=ele) + if con: + controls.append(con) + return controls + return [] + + def GetEnclosingControl(self) -> 'Control': + """ + Call IUIAutomationTextRange::GetEnclosingElement. + Return `Control` subclass, the innermost UI Automation element that encloses the text range. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextrange-getenclosingelement + """ + return Control.CreateControlFromElement(self.textRange.GetEnclosingElement()) + + def GetText(self, maxLength: int = -1) -> str: + """ + Call IUIAutomationTextRange::GetText. + maxLength: int, the maximum length of the string to return, or -1 if no limit is required. + Return str, the plain text of the text range. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextrange-gettext + """ + return self.textRange.GetText(maxLength) + + def Move(self, unit: int, count: int, waitTime: float = OPERATION_WAIT_TIME) -> int: + """ + Call IUIAutomationTextRange::Move. + Move the text range forward or backward by the specified number of text units. + unit: int, a value in class `TextUnit`. + count: int, the number of text units to move. + A positive value moves the text range forward. + A negative value moves the text range backward. Zero has no effect. + waitTime: float. + Return: int, the number of text units actually moved. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextrange-move + """ + ret = self.textRange.Move(unit, count) + time.sleep(waitTime) + return ret + + def MoveEndpointByRange(self, srcEndPoint: int, textRange: 'TextRange', targetEndPoint: int, waitTime: float = OPERATION_WAIT_TIME) -> bool: + """ + Call IUIAutomationTextRange::MoveEndpointByRange. + Move one endpoint of the current text range to the specified endpoint of a second text range. + srcEndPoint: int, a value in class `TextPatternRangeEndpoint`. + textRange: `TextRange`. + targetEndPoint: int, a value in class `TextPatternRangeEndpoint`. + waitTime: float. + Return bool, True if succeed otherwise False. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextrange-moveendpointbyrange + """ + ret = self.textRange.MoveEndpointByRange(srcEndPoint, textRange.textRange, targetEndPoint) == S_OK + time.sleep(waitTime) + return ret + + def MoveEndpointByUnit(self, endPoint: int, unit: int, count: int, waitTime: float = OPERATION_WAIT_TIME) -> int: + """ + Call IUIAutomationTextRange::MoveEndpointByUnit. + Move one endpoint of the text range the specified number of text units within the document range. + endPoint: int, a value in class `TextPatternRangeEndpoint`. + unit: int, a value in class `TextUnit`. + count: int, the number of units to move. + A positive count moves the endpoint forward. + A negative count moves backward. + A count of 0 has no effect. + waitTime: float. + Return int, the count of units actually moved. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextrange-moveendpointbyunit + """ + ret = self.textRange.MoveEndpointByUnit(endPoint, unit, count) + time.sleep(waitTime) + return ret + + def RemoveFromSelection(self, waitTime: float = OPERATION_WAIT_TIME) -> bool: + """ + Call IUIAutomationTextRange::RemoveFromSelection. + Remove the text range from an existing collection of selected text in a text container that supports multiple, disjoint selections. + waitTime: float. + Return bool, True if succeed otherwise False. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextrange-removefromselection + """ + ret = self.textRange.RemoveFromSelection() == S_OK + time.sleep(waitTime) + return ret + + def ScrollIntoView(self, alignTop: bool = True, waitTime: float = OPERATION_WAIT_TIME) -> bool: + """ + Call IUIAutomationTextRange::ScrollIntoView. + Cause the text control to scroll until the text range is visible in the viewport. + alignTop: bool, True if the text control should be scrolled so that the text range is flush with the top of the viewport; + False if it should be flush with the bottom of the viewport. + waitTime: float. + Return bool, True if succeed otherwise False. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextrange-scrollintoview + """ + ret = self.textRange.ScrollIntoView(int(alignTop)) == S_OK + time.sleep(waitTime) + return ret + + def Select(self, waitTime: float = OPERATION_WAIT_TIME) -> bool: + """ + Call IUIAutomationTextRange::Select. + Select the span of text that corresponds to this text range, and remove any previous selection. + waitTime: float. + Return bool, True if succeed otherwise False. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextrange-select + """ + ret = self.textRange.Select() == S_OK + time.sleep(waitTime) + return ret + + +class TextChildPattern(): + def __init__(self, pattern=None): + """Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nn-uiautomationclient-iuiautomationtextchildpattern""" + self.pattern = pattern + + @property + def TextContainer(self) -> 'Control': + """ + Property TextContainer. + Call IUIAutomationSelectionContainer::get_TextContainer. + Return `Control` subclass, the nearest ancestor element that supports the Text control pattern. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextchildpattern-get_textcontainer + """ + return Control.CreateControlFromElement(self.pattern.TextContainer) + + @property + def TextRange(self) -> TextRange: + """ + Property TextRange. + Call IUIAutomationSelectionContainer::get_TextRange. + Return `TextRange`, a text range that encloses this child element. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextchildpattern-get_textrange + """ + return TextRange(self.pattern.TextRange) + + +class TextEditPattern(): + def __init__(self, pattern=None): + """Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nn-uiautomationclient-iuiautomationtexteditpattern""" + self.pattern = pattern + + def GetActiveComposition(self) -> TextRange: + """ + Call IUIAutomationTextEditPattern::GetActiveComposition. + Return `TextRange` or None, the active composition. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtexteditpattern-getactivecomposition + """ + textRange = self.pattern.GetActiveComposition() + if textRange: + return TextRange(textRange=textRange) + + def GetConversionTarget(self) -> TextRange: + """ + Call IUIAutomationTextEditPattern::GetConversionTarget. + Return `TextRange` or None, the current conversion target range.. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtexteditpattern-getconversiontarget + """ + textRange = self.pattern.GetConversionTarget() + if textRange: + return TextRange(textRange=textRange) + + +class TextPattern(): + def __init__(self, pattern=None): + """Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nn-uiautomationclient-iuiautomationtextpattern""" + self.pattern = pattern + + @property + def DocumentRange(self) -> TextRange: + """ + Property DocumentRange. + Call IUIAutomationTextPattern::get_DocumentRange. + Return `TextRange`, a text range that encloses the main text of a document. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextpattern-get_documentrange + """ + return TextRange(self.pattern.DocumentRange) + + @property + def SupportedTextSelection(self) -> bool: + """ + Property SupportedTextSelection. + Call IUIAutomationTextPattern::get_SupportedTextSelection. + Return bool, specifies the type of text selection that is supported by the control. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextpattern-get_supportedtextselection + """ + return bool(self.pattern.SupportedTextSelection) + + def GetSelection(self) -> List[TextRange]: + """ + Call IUIAutomationTextPattern::GetSelection. + Return List[TextRange], a list of `TextRange`, represents the currently selected text in a text-based control. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextpattern-getselection + """ + eleArray = self.pattern.GetSelection() + if eleArray: + textRanges = [] + for i in range(eleArray.Length): + ele = eleArray.GetElement(i) + textRanges.append(TextRange(textRange=ele)) + return textRanges + return [] + + def GetVisibleRanges(self) -> List[TextRange]: + """ + Call IUIAutomationTextPattern::GetVisibleRanges. + Return List[TextRange], a list of `TextRange`, disjoint text ranges from a text-based control + where each text range represents a contiguous span of visible text. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextpattern-getvisibleranges + """ + eleArray = self.pattern.GetVisibleRanges() + if eleArray: + textRanges = [] + for i in range(eleArray.Length): + ele = eleArray.GetElement(i) + textRanges.append(TextRange(textRange=ele)) + return textRanges + return [] + + def RangeFromChild(self, child) -> TextRange: + """ + Call IUIAutomationTextPattern::RangeFromChild. + child: `Control` or its subclass. + Return `TextRange` or None, a text range enclosing a child element such as an image, + hyperlink, Microsoft Excel spreadsheet, or other embedded object. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextpattern-rangefromchild + """ + textRange = self.pattern.RangeFromChild(Control.Element) + if textRange: + return TextRange(textRange=textRange) + + def RangeFromPoint(self, x: int, y: int) -> TextRange: + """ + Call IUIAutomationTextPattern::RangeFromPoint. + child: `Control` or its subclass. + Return `TextRange` or None, the degenerate (empty) text range nearest to the specified screen coordinates. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextpattern-rangefrompoint + """ + textRange = self.pattern.RangeFromPoint(ctypes.wintypes.POINT(x, y)) + if textRange: + return TextRange(textRange=textRange) + + +class TextPattern2(): + def __init__(self, pattern=None): + """Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nn-uiautomationclient-iuiautomationtextpattern2""" + self.pattern = pattern + + +class TogglePattern(): + def __init__(self, pattern=None): + """Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nn-uiautomationclient-iuiautomationtogglepattern""" + self.pattern = pattern + + @property + def ToggleState(self) -> int: + """ + Property ToggleState. + Call IUIAutomationTogglePattern::get_CurrentToggleState. + Return int, a value in class `ToggleState`, the state of the control. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtogglepattern-get_currenttogglestate + """ + return self.pattern.CurrentToggleState + + def Toggle(self, waitTime: float = OPERATION_WAIT_TIME) -> bool: + """ + Call IUIAutomationTogglePattern::Toggle. + Cycle through the toggle states of the control. + waitTime: float. + Return bool, True if succeed otherwise False. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtogglepattern-toggle + """ + ret = self.pattern.Toggle() == S_OK + time.sleep(waitTime) + return ret + + +class TransformPattern(): + def __init__(self, pattern=None): + """Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nn-uiautomationclient-iuiautomationtransformpattern""" + self.pattern = pattern + + @property + def CanMove(self) -> bool: + """ + Property CanMove. + Call IUIAutomationTransformPattern::get_CurrentCanMove. + Return bool, indicates whether the element can be moved. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtransformpattern-get_currentcanmove + """ + return bool(self.pattern.CurrentCanMove) + + @property + def CanResize(self) -> bool: + """ + Property CanResize. + Call IUIAutomationTransformPattern::get_CurrentCanResize. + Return bool, indicates whether the element can be resized. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtransformpattern-get_currentcanresize + """ + return bool(self.pattern.CurrentCanResize) + + @property + def CanRotate(self) -> bool: + """ + Property CanRotate. + Call IUIAutomationTransformPattern::get_CurrentCanRotate. + Return bool, indicates whether the element can be rotated. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtransformpattern-get_currentcanrotate + """ + return bool(self.pattern.CurrentCanRotate) + + def Move(self, x: int, y: int, waitTime: float = OPERATION_WAIT_TIME) -> bool: + """ + Call IUIAutomationTransformPattern::Move. + Move the UI Automation element. + x: int. + y: int. + waitTime: float. + Return bool, True if succeed otherwise False. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtransformpattern-move + """ + ret = self.pattern.Move(x, y) == S_OK + time.sleep(waitTime) + return ret + + def Resize(self, width: int, height: int, waitTime: float = OPERATION_WAIT_TIME) -> bool: + """ + Call IUIAutomationTransformPattern::Resize. + Resize the UI Automation element. + width: int. + height: int. + waitTime: float. + Return bool, True if succeed otherwise False. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtransformpattern-resize + """ + ret = self.pattern.Resize(width, height) == S_OK + time.sleep(waitTime) + return ret + + def Rotate(self, degrees: int, waitTime: float = OPERATION_WAIT_TIME) -> bool: + """ + Call IUIAutomationTransformPattern::Rotate. + Rotates the UI Automation element. + degrees: int. + waitTime: float. + Return bool, True if succeed otherwise False. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtransformpattern-rotate + """ + ret = self.pattern.Rotate(degrees) == S_OK + time.sleep(waitTime) + return ret + + +class TransformPattern2(): + def __init__(self, pattern=None): + """Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nn-uiautomationclient-iuiautomationtransformpattern2""" + self.pattern = pattern + + @property + def CanZoom(self) -> bool: + """ + Property CanZoom. + Call IUIAutomationTransformPattern2::get_CurrentCanZoom. + Return bool, indicates whether the control supports zooming of its viewport. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtransformpattern2-get_CurrentCanZoom + """ + return bool(self.pattern.CurrentCanZoom) + + @property + def ZoomLevel(self) -> float: + """ + Property ZoomLevel. + Call IUIAutomationTransformPattern2::get_CurrentZoomLevel. + Return float, the zoom level of the control's viewport. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtransformpattern2-get_currentzoomlevel + """ + return self.pattern.CurrentZoomLevel + + @property + def ZoomMaximum(self) -> float: + """ + Property ZoomMaximum. + Call IUIAutomationTransformPattern2::get_CurrentZoomMaximum. + Return float, the maximum zoom level of the control's viewport. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtransformpattern2-get_currentzoommaximum + """ + return self.pattern.CurrentZoomMaximum + + @property + def ZoomMinimum(self) -> float: + """ + Property ZoomMinimum. + Call IUIAutomationTransformPattern2::get_CurrentZoomMinimum. + Return float, the minimum zoom level of the control's viewport. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtransformpattern2-get_currentzoomminimum + """ + return self.pattern.CurrentZoomMinimum + + def Zoom(self, zoomLevel: float, waitTime: float = OPERATION_WAIT_TIME) -> bool: + """ + Call IUIAutomationTransformPattern2::Zoom. + Zoom the viewport of the control. + zoomLevel: float for int. + waitTime: float. + Return bool, True if succeed otherwise False. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtransformpattern2-zoom + """ + ret = self.pattern.Zoom(zoomLevel) == S_OK + time.sleep(waitTime) + return ret + + def ZoomByUnit(self, zoomUnit: int, waitTime: float = OPERATION_WAIT_TIME) -> bool: + """ + Call IUIAutomationTransformPattern2::ZoomByUnit. + Zoom the viewport of the control by the specified unit. + zoomUnit: int, a value in class `ZoomUnit`. + waitTime: float. + Return bool, True if succeed otherwise False. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtransformpattern2-zoombyunit + """ + ret = self.pattern.ZoomByUnit(zoomUnit) == S_OK + time.sleep(waitTime) + return ret + + +class ValuePattern(): + def __init__(self, pattern=None): + """Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nn-uiautomationclient-iuiautomationvaluepattern""" + self.pattern = pattern + + @property + def IsReadOnly(self) -> bool: + """ + Property IsReadOnly. + Call IUIAutomationTransformPattern2::IUIAutomationValuePattern::get_CurrentIsReadOnly. + Return bool, indicates whether the value of the element is read-only. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationvaluepattern-get_currentisreadonly + """ + return self.pattern.CurrentIsReadOnly + + @property + def Value(self) -> str: + """ + Property Value. + Call IUIAutomationTransformPattern2::IUIAutomationValuePattern::get_CurrentValue. + Return str, the value of the element. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationvaluepattern-get_currentvalue + """ + return self.pattern.CurrentValue + + def SetValue(self, value: str, waitTime: float = OPERATION_WAIT_TIME) -> bool: + """ + Call IUIAutomationTransformPattern2::IUIAutomationValuePattern::SetValue. + Set the value of the element. + value: str. + waitTime: float. + Return bool, True if succeed otherwise False. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationvaluepattern-setvalue + """ + ret = self.pattern.SetValue(value) == S_OK + time.sleep(waitTime) + return ret + + +class VirtualizedItemPattern(): + def __init__(self, pattern=None): + """Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nn-uiautomationclient-iuiautomationvirtualizeditempattern""" + self.pattern = pattern + + def Realize(self, waitTime: float = OPERATION_WAIT_TIME) -> bool: + """ + Call IUIAutomationVirtualizedItemPattern::Realize. + Create a full UI Automation element for a virtualized item. + waitTime: float. + Return bool, True if succeed otherwise False. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationvirtualizeditempattern-realize + """ + ret = self.pattern.Realize() == S_OK + time.sleep(waitTime) + return ret + + +class WindowPattern(): + def __init__(self, pattern=None): + """Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nn-uiautomationclient-iuiautomationwindowpattern""" + self.pattern = pattern + + def Close(self, waitTime: float = OPERATION_WAIT_TIME) -> bool: + """ + Call IUIAutomationWindowPattern::Close. + Close the window. + waitTime: float. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationwindowpattern-close + """ + ret = self.pattern.Close() == S_OK + time.sleep(waitTime) + return ret + + @property + def CanMaximize(self) -> bool: + """ + Property CanMaximize. + Call IUIAutomationWindowPattern::get_CurrentCanMaximize. + Return bool, indicates whether the window can be maximized. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationwindowpattern-get_currentcanmaximize + """ + return bool(self.pattern.CurrentCanMaximize) + + @property + def CanMinimize(self) -> bool: + """ + Property CanMinimize. + Call IUIAutomationWindowPattern::get_CurrentCanMinimize. + Return bool, indicates whether the window can be minimized. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationwindowpattern-get_currentcanminimize + """ + return bool(self.pattern.CurrentCanMinimize) + + @property + def IsModal(self) -> bool: + """ + Property IsModal. + Call IUIAutomationWindowPattern::get_CurrentIsModal. + Return bool, indicates whether the window is modal. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationwindowpattern-get_currentismodal + """ + return bool(self.pattern.CurrentIsModal) + + @property + def IsTopmost(self) -> bool: + """ + Property IsTopmost. + Call IUIAutomationWindowPattern::get_CurrentIsTopmost. + Return bool, indicates whether the window is the topmost element in the z-order. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationwindowpattern-get_currentistopmost + """ + return bool(self.pattern.CurrentIsTopmost) + + @property + def WindowInteractionState(self) -> int: + """ + Property WindowInteractionState. + Call IUIAutomationWindowPattern::get_CurrentWindowInteractionState. + Return int, a value in class `WindowInteractionState`, + the current state of the window for the purposes of user interaction. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationwindowpattern-get_currentwindowinteractionstate + """ + return self.pattern.CurrentWindowInteractionState + + @property + def WindowVisualState(self) -> int: + """ + Property WindowVisualState. + Call IUIAutomationWindowPattern::get_CurrentWindowVisualState. + Return int, a value in class `WindowVisualState`, + the visual state of the window; that is, whether it is in the normal, maximized, or minimized state. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationwindowpattern-get_currentwindowvisualstate + """ + return self.pattern.CurrentWindowVisualState + + def SetWindowVisualState(self, state: int, waitTime: float = OPERATION_WAIT_TIME) -> bool: + """ + Call IUIAutomationWindowPattern::SetWindowVisualState. + Minimize, maximize, or restore the window. + state: int, a value in class `WindowVisualState`. + waitTime: float. + Return bool, True if succeed otherwise False. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationwindowpattern-setwindowvisualstate + """ + ret = self.pattern.SetWindowVisualState(state) == S_OK + time.sleep(waitTime) + return ret + + def WaitForInputIdle(self, milliseconds: int) -> bool: + ''' + Call IUIAutomationWindowPattern::WaitForInputIdle. + Cause the calling code to block for the specified time or + until the associated process enters an idle state, whichever completes first. + milliseconds: int. + Return bool, True if succeed otherwise False. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationwindowpattern-waitforinputidle + ''' + return self.pattern.WaitForInputIdle(milliseconds) == S_OK + + +PatternConstructors = { + PatternId.AnnotationPattern: AnnotationPattern, + PatternId.CustomNavigationPattern: CustomNavigationPattern, + PatternId.DockPattern: DockPattern, + PatternId.DragPattern: DragPattern, + PatternId.DropTargetPattern: DropTargetPattern, + PatternId.ExpandCollapsePattern: ExpandCollapsePattern, + PatternId.GridItemPattern: GridItemPattern, + PatternId.GridPattern: GridPattern, + PatternId.InvokePattern: InvokePattern, + PatternId.ItemContainerPattern: ItemContainerPattern, + PatternId.LegacyIAccessiblePattern: LegacyIAccessiblePattern, + PatternId.MultipleViewPattern: MultipleViewPattern, + PatternId.ObjectModelPattern: ObjectModelPattern, + PatternId.RangeValuePattern: RangeValuePattern, + PatternId.ScrollItemPattern: ScrollItemPattern, + PatternId.ScrollPattern: ScrollPattern, + PatternId.SelectionItemPattern: SelectionItemPattern, + PatternId.SelectionPattern: SelectionPattern, + PatternId.SpreadsheetItemPattern: SpreadsheetItemPattern, + PatternId.SpreadsheetPattern: SpreadsheetPattern, + PatternId.StylesPattern: StylesPattern, + PatternId.SynchronizedInputPattern: SynchronizedInputPattern, + PatternId.TableItemPattern: TableItemPattern, + PatternId.TablePattern: TablePattern, + PatternId.TextChildPattern: TextChildPattern, + PatternId.TextEditPattern: TextEditPattern, + PatternId.TextPattern: TextPattern, + PatternId.TextPattern2: TextPattern2, + PatternId.TogglePattern: TogglePattern, + PatternId.TransformPattern: TransformPattern, + PatternId.TransformPattern2: TransformPattern2, + PatternId.ValuePattern: ValuePattern, + PatternId.VirtualizedItemPattern: VirtualizedItemPattern, + PatternId.WindowPattern: WindowPattern, +} + + +def CreatePattern(patternId: int, pattern: ctypes.POINTER(comtypes.IUnknown)): + """Create a concreate pattern by pattern id and pattern(POINTER(IUnknown)).""" + subPattern = pattern.QueryInterface(GetPatternIdInterface(patternId)) + if subPattern: + return PatternConstructors[patternId](pattern=subPattern) + + +class Control(): + ValidKeys = set(['ControlType', 'ClassName', 'AutomationId', 'Name', 'SubName', 'RegexName', 'Depth', 'Compare']) + + def __init__(self, searchFromControl: 'Control' = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + """ + searchFromControl: `Control` or its subclass, if it is None, search from root control(Desktop). + searchDepth: int, max search depth from searchFromControl. + foundIndex: int, starts with 1, >= 1. + searchInterval: float, wait searchInterval after every search in self.Refind and self.Exists, the global timeout is TIME_OUT_SECOND. + element: `ctypes.POINTER(IUIAutomationElement)`, internal use only. + searchProperties: defines how to search, the following keys can be used: + ControlType: int, a value in class `ControlType`. + ClassName: str. + AutomationId: str. + Name: str. + SubName: str, a part str in Name. + RegexName: str, supports regex using re.match. + You can only use one of Name, SubName, RegexName in searchProperties. + Depth: int, only search controls in relative depth from searchFromControl, ignore controls in depth(0~Depth-1), + if set, searchDepth will be set to Depth too. + Compare: Callable[[Control, int], bool], custom compare function(control: Control, depth: int) -> bool. + + `Control` wraps IUIAutomationElement. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nn-uiautomationclient-iuiautomationelement + """ + self._element = element + self._elementDirectAssign = True if element else False + self.searchFromControl = searchFromControl + self.searchDepth = searchProperties.get('Depth', searchDepth) + self.searchInterval = searchInterval + self.foundIndex = foundIndex + self.searchProperties = searchProperties + regName = searchProperties.get('RegexName', '') + self.regexName = re.compile(regName) if regName else None + self._supportedPatterns = {} + + def __str__(self) -> str: + rect = self.BoundingRectangle + return 'ControlType: {0} ClassName: {1} AutomationId: {2} Rect: {3} Name: {4} Handle: 0x{5:X}({5})'.format( + self.ControlTypeName, self.ClassName, self.AutomationId, rect, self.Name, self.NativeWindowHandle) + + @staticmethod + def CreateControlFromElement(element) -> 'Control': + """ + Create a concreate `Control` from a com type `IUIAutomationElement`. + element: `ctypes.POINTER(IUIAutomationElement)`. + Return a subclass of `Control`, an instance of the control's real type. + """ + if element: + controlType = element.CurrentControlType + if controlType in ControlConstructors: + return ControlConstructors[controlType](element=element) + else: + Logger.WriteLine("element.CurrentControlType returns {}, invalid ControlType!".format(controlType), ConsoleColor.Red) # rarely happens + + @staticmethod + def CreateControlFromControl(control: 'Control') -> 'Control': + """ + Create a concreate `Control` from a control instance, copy it. + control: `Control` or its subclass. + Return a subclass of `Control`, an instance of the control's real type. + For example: if control's ControlType is EditControl, return an EditControl. + """ + newControl = Control.CreateControlFromElement(control.Element) + return newControl + + def SetSearchFromControl(self, searchFromControl: 'Control') -> None: + """searchFromControl: `Control` or its subclass""" + self.searchFromControl = searchFromControl + + def SetSearchDepth(self, searchDepth: int) -> None: + self.searchDepth = searchDepth + + def AddSearchProperties(self, **searchProperties) -> None: + """ + Add search properties using `dict.update`. + searchProperties: dict, same as searchProperties in `Control.__init__`. + """ + self.searchProperties.update(searchProperties) + if 'Depth' in searchProperties: + self.searchDepth = searchProperties['Depth'] + if 'RegexName' in searchProperties: + regName = searchProperties['RegexName'] + self.regexName = re.compile(regName) if regName else None + + def RemoveSearchProperties(self, **searchProperties) -> None: + """ + searchProperties: dict, same as searchProperties in `Control.__init__`. + """ + for key in searchProperties: + del self.searchProperties[key] + if key == 'RegexName': + self.regexName = None + + def GetSearchPropertiesStr(self) -> str: + strs = ['{}: {}'.format(k, ControlTypeNames[v] if k == 'ControlType' else repr(v)) for k, v in self.searchProperties.items()] + return '{' + ', '.join(strs) + '}' + + def GetColorfulSearchPropertiesStr(self, keyColor='DarkGreen', valueColor='DarkCyan') -> str: + """keyColor, valueColor: str, color name in class ConsoleColor""" + strs = ['{}: {}'.format(keyColor if k in Control.ValidKeys else 'DarkYellow', k, valueColor, + ControlTypeNames[v] if k == 'ControlType' else repr(v)) for k, v in self.searchProperties.items()] + return '{' + ', '.join(strs) + '}' + + # BuildUpdatedCache + # CachedAcceleratorKey + # CachedAccessKey + # CachedAriaProperties + # CachedAriaRole + # CachedAutomationId + # CachedBoundingRectangle + # CachedClassName + # CachedControlType + # CachedControllerFor + # CachedCulture + # CachedDescribedBy + # CachedFlowsTo + # CachedFrameworkId + # CachedHasKeyboardFocus + # CachedHelpText + # CachedIsContentElement + # CachedIsControlElement + # CachedIsDataValidForForm + # CachedIsEnabled + # CachedIsKeyboardFocusable + # CachedIsOffscreen + # CachedIsPassword + # CachedIsRequiredForForm + # CachedItemStatus + # CachedItemType + # CachedLabeledBy + # CachedLocalizedControlType + # CachedName + # CachedNativeWindowHandle + # CachedOrientation + # CachedProcessId + # CachedProviderDescription + + @property + def AcceleratorKey(self) -> str: + """ + Property AcceleratorKey. + Call IUIAutomationElement::get_CurrentAcceleratorKey. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentacceleratorkey + """ + return self.Element.CurrentAcceleratorKey + + @property + def AccessKey(self) -> str: + """ + Property AccessKey. + Call IUIAutomationElement::get_CurrentAccessKey. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentaccesskey + """ + return self.Element.CurrentAccessKey + + @property + def AriaProperties(self) -> str: + """ + Property AriaProperties. + Call IUIAutomationElement::get_CurrentAriaProperties. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentariaproperties + """ + return self.Element.CurrentAriaProperties + + @property + def AriaRole(self) -> str: + """ + Property AriaRole. + Call IUIAutomationElement::get_CurrentAriaRole. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentariarole + """ + return self.Element.CurrentAriaRole + + @property + def AutomationId(self) -> str: + """ + Property AutomationId. + Call IUIAutomationElement::get_CurrentAutomationId. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentautomationid + """ + return self.Element.CurrentAutomationId + + @property + def BoundingRectangle(self) -> Rect: + """ + Property BoundingRectangle. + Call IUIAutomationElement::get_CurrentBoundingRectangle. + Return `Rect`. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentboundingrectangle + + rect = control.BoundingRectangle + print(rect.left, rect.top, rect.right, rect.bottom, rect.width(), rect.height(), rect.xcenter(), rect.ycenter()) + """ + rect = self.Element.CurrentBoundingRectangle + return Rect(rect.left, rect.top, rect.right, rect.bottom) + + @property + def ClassName(self) -> str: + """ + Property ClassName. + Call IUIAutomationElement::get_CurrentClassName. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentclassname + """ + return self.Element.CurrentClassName + + @property + def ControlType(self) -> int: + """ + Property ControlType. + Return int, a value in class `ControlType`. + Call IUIAutomationElement::get_CurrentControlType. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentcontroltype + """ + return self.Element.CurrentControlType + + # @property + # def ControllerFor(self): + # return self.Element.CurrentControllerFor + + @property + def Culture(self) -> int: + """ + Property Culture. + Call IUIAutomationElement::get_CurrentCulture. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentculture + """ + return self.Element.CurrentCulture + + # @property + # def DescribedBy(self): + # return self.Element.CurrentDescribedBy + + # @property + # def FlowsTo(self): + # return self.Element.CurrentFlowsTo + + @property + def FrameworkId(self) -> str: + """ + Property FrameworkId. + Call IUIAutomationElement::get_CurrentFrameworkId. + Return str, such as Win32, WPF... + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentframeworkid + """ + return self.Element.CurrentFrameworkId + + @property + def HasKeyboardFocus(self) -> bool: + """ + Property HasKeyboardFocus. + Call IUIAutomationElement::get_CurrentHasKeyboardFocus. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currenthaskeyboardfocus + """ + return bool(self.Element.CurrentHasKeyboardFocus) + + @property + def HelpText(self) -> str: + """ + Property HelpText. + Call IUIAutomationElement::get_CurrentHelpText. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currenthelptext + """ + return self.Element.CurrentHelpText + + @property + def IsContentElement(self) -> bool: + """ + Property IsContentElement. + Call IUIAutomationElement::get_CurrentIsContentElement. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentiscontentelement + """ + return bool(self.Element.CurrentIsContentElement) + + @property + def IsControlElement(self) -> bool: + """ + Property IsControlElement. + Call IUIAutomationElement::get_CurrentIsControlElement. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentiscontrolelement + """ + return bool(self.Element.CurrentIsControlElement) + + @property + def IsDataValidForForm(self) -> bool: + """ + Property IsDataValidForForm. + Call IUIAutomationElement::get_CurrentIsDataValidForForm. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentisdatavalidforform + """ + return bool(self.Element.CurrentIsDataValidForForm) + + @property + def IsEnabled(self) -> bool: + """ + Property IsEnabled. + Call IUIAutomationElement::get_CurrentIsEnabled. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentisenabled + """ + return self.Element.CurrentIsEnabled + + @property + def IsKeyboardFocusable(self) -> bool: + """ + Property IsKeyboardFocusable. + Call IUIAutomationElement::get_CurrentIsKeyboardFocusable. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentiskeyboardfocusable + """ + return self.Element.CurrentIsKeyboardFocusable + + @property + def IsOffscreen(self) -> bool: + """ + Property IsOffscreen. + Call IUIAutomationElement::get_CurrentIsOffscreen. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentisoffscreen + """ + return self.Element.CurrentIsOffscreen + + @property + def IsPassword(self) -> bool: + """ + Property IsPassword. + Call IUIAutomationElement::get_CurrentIsPassword. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentispassword + """ + return self.Element.CurrentIsPassword + + @property + def IsRequiredForForm(self) -> bool: + """ + Property IsRequiredForForm. + Call IUIAutomationElement::get_CurrentIsRequiredForForm. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentisrequiredforform + """ + return self.Element.CurrentIsRequiredForForm + + @property + def ItemStatus(self) -> str: + """ + Property ItemStatus. + Call IUIAutomationElement::get_CurrentItemStatus. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentitemstatus + """ + return self.Element.CurrentItemStatus + + @property + def ItemType(self) -> str: + """ + Property ItemType. + Call IUIAutomationElement::get_CurrentItemType. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentitemtype + """ + return self.Element.CurrentItemType + + # @property + # def LabeledBy(self): + # return self.Element.CurrentLabeledBy + + @property + def LocalizedControlType(self) -> str: + """ + Property LocalizedControlType. + Call IUIAutomationElement::get_CurrentLocalizedControlType. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentlocalizedcontroltype + """ + return self.Element.CurrentLocalizedControlType + + @property + def Name(self) -> str: + """ + Property Name. + Call IUIAutomationElement::get_CurrentName. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentname + """ + return self.Element.CurrentName or '' # CurrentName may be None + + @property + def NativeWindowHandle(self) -> str: + """ + Property NativeWindowHandle. + Call IUIAutomationElement::get_CurrentNativeWindowHandle. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentnativewindowhandle + """ + handle = self.Element.CurrentNativeWindowHandle + return 0 if handle is None else handle + + @property + def Orientation(self) -> int: + """ + Property Orientation. + Return int, a value in class `OrientationType`. + Call IUIAutomationElement::get_CurrentOrientation. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentorientation + """ + return self.Element.CurrentOrientation + + @property + def ProcessId(self) -> int: + """ + Property ProcessId. + Call IUIAutomationElement::get_CurrentProcessId. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentprocessid + """ + return self.Element.CurrentProcessId + + @property + def ProviderDescription(self) -> str: + """ + Property ProviderDescription. + Call IUIAutomationElement::get_CurrentProviderDescription. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentproviderdescription + """ + return self.Element.CurrentProviderDescription + + # FindAll + # FindAllBuildCache + # FindFirst + # FindFirstBuildCache + # GetCachedChildren + # GetCachedParent + # GetCachedPattern + # GetCachedPatternAs + # GetCachedPropertyValue + # GetCachedPropertyValueEx + + def GetClickablePoint(self) -> Tuple[int, int, bool]: + """ + Call IUIAutomationElement::GetClickablePoint. + Return Tuple[int, int, bool], three items tuple (x, y, gotClickable), such as (20, 10, True) + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-getclickablepoint + """ + point, gotClickable = self.Element.GetClickablePoint() + return (point.x, point.y, bool(gotClickable)) + + def GetPattern(self, patternId: int): + """ + Call IUIAutomationElement::GetCurrentPattern. + Get a new pattern by pattern id if it supports the pattern. + patternId: int, a value in class `PatternId`. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-getcurrentpattern + """ + try: + pattern = self.Element.GetCurrentPattern(patternId) + if pattern: + subPattern = CreatePattern(patternId, pattern) + self._supportedPatterns[patternId] = subPattern + return subPattern + except comtypes.COMError as ex: + pass + + def GetPatternAs(self, patternId: int, riid): + """ + Call IUIAutomationElement::GetCurrentPatternAs. + Get a new pattern by pattern id if it supports the pattern, todo. + patternId: int, a value in class `PatternId`. + riid: GUID. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-getcurrentpatternas + """ + return self.Element.GetCurrentPatternAs(patternId, riid) + + def GetPropertyValue(self, propertyId: int) -> Any: + """ + Call IUIAutomationElement::GetCurrentPropertyValue. + propertyId: int, a value in class `PropertyId`. + Return Any, corresponding type according to propertyId. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-getcurrentpropertyvalue + """ + return self.Element.GetCurrentPropertyValue(propertyId) + + def GetPropertyValueEx(self, propertyId: int, ignoreDefaultValue: int) -> Any: + """ + Call IUIAutomationElement::GetCurrentPropertyValueEx. + propertyId: int, a value in class `PropertyId`. + ignoreDefaultValue: int, 0 or 1. + Return Any, corresponding type according to propertyId. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-getcurrentpropertyvalueex + """ + return self.Element.GetCurrentPropertyValueEx(propertyId, ignoreDefaultValue) + + def GetRuntimeId(self) -> List[int]: + """ + Call IUIAutomationElement::GetRuntimeId. + Return List[int], a list of int. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-getruntimeid + """ + return self.Element.GetRuntimeId() + + # QueryInterface + # Release + + def SetFocus(self) -> bool: + """ + Call IUIAutomationElement::SetFocus. + Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-setfocus + """ + try: + return self.Element.SetFocus() == S_OK + except comtypes.COMError as ex: + return False + + @property + def Element(self): + """ + Property Element. + Return `ctypes.POINTER(IUIAutomationElement)`. + """ + if not self._element: + self.Refind(maxSearchSeconds=TIME_OUT_SECOND, searchIntervalSeconds=self.searchInterval) + return self._element + + @property + def ControlTypeName(self) -> str: + """ + Property ControlTypeName. + """ + return ControlTypeNames[self.ControlType] + + def GetCachedPattern(self, patternId: int, cache: bool): + """ + Get a pattern by patternId. + patternId: int, a value in class `PatternId`. + Return a pattern if it supports the pattern else None. + cache: bool, if True, store the pattern for later use, if False, get a new pattern by `self.GetPattern`. + """ + if cache: + pattern = self._supportedPatterns.get(patternId, None) + if pattern: + return pattern + else: + pattern = self.GetPattern(patternId) + if pattern: + self._supportedPatterns[patternId] = pattern + return pattern + else: + pattern = self.GetPattern(patternId) + if pattern: + self._supportedPatterns[patternId] = pattern + return pattern + + def GetLegacyIAccessiblePattern(self) -> LegacyIAccessiblePattern: + """ + Return `LegacyIAccessiblePattern` if it supports the pattern else None. + """ + return self.GetPattern(PatternId.LegacyIAccessiblePattern) + + def GetAncestorControl(self, condition: Callable[['Control', int], bool]) -> 'Control': + """ + Get an ancestor control that matches the condition. + condition: Callable[[Control, int], bool], function(control: Control, depth: int) -> bool, + depth starts with -1 and decreses when search goes up. + Return `Control` subclass or None. + """ + ancestor = self + depth = 0 + while True: + ancestor = ancestor.GetParentControl() + depth -= 1 + if ancestor: + if condition(ancestor, depth): + return ancestor + else: + break + + def GetParentControl(self) -> 'Control': + """ + Return `Control` subclass or None. + """ + ele = _AutomationClient.instance().ViewWalker.GetParentElement(self.Element) + return Control.CreateControlFromElement(ele) + + def GetFirstChildControl(self) -> 'Control': + """ + Return `Control` subclass or None. + """ + ele = _AutomationClient.instance().ViewWalker.GetFirstChildElement(self.Element) + return Control.CreateControlFromElement(ele) + + def GetLastChildControl(self) -> 'Control': + """ + Return `Control` subclass or None. + """ + ele = _AutomationClient.instance().ViewWalker.GetLastChildElement(self.Element) + return Control.CreateControlFromElement(ele) + + def GetNextSiblingControl(self) -> 'Control': + """ + Return `Control` subclass or None. + """ + ele = _AutomationClient.instance().ViewWalker.GetNextSiblingElement(self.Element) + return Control.CreateControlFromElement(ele) + + def GetPreviousSiblingControl(self) -> 'Control': + """ + Return `Control` subclass or None. + """ + ele = _AutomationClient.instance().ViewWalker.GetPreviousSiblingElement(self.Element) + return Control.CreateControlFromElement(ele) + + def GetSiblingControl(self, condition: Callable[['Control'], bool], forward: bool = True) -> 'Control': + """ + Get a sibling control that matches the condition. + forward: bool, if True, only search next siblings, if False, search pervious siblings first, then search next siblings. + condition: Callable[[Control], bool], function(control: Control) -> bool. + Return `Control` subclass or None. + """ + if not forward: + prev = self + while True: + prev = prev.GetPreviousSiblingControl() + if prev: + if condition(prev): + return prev + else: + break + next_ = self + while True: + next_ = next_.GetNextSiblingControl() + if next_: + if condition(next_): + return next_ + else: + break + + def GetChildren(self) -> List['Control']: + """ + Return List[Control], a list of `Control` subclasses. + """ + children = [] + child = self.GetFirstChildControl() + while child: + children.append(child) + child = child.GetNextSiblingControl() + return children + + def _CompareFunction(self, control: 'Control', depth: int) -> bool: + """ + Define how to search. + control: `Control` or its subclass. + depth: int, tree depth from searchFromControl. + Return bool. + """ + compareFunc = None + for key, value in self.searchProperties.items(): + if 'ControlType' == key: + if value != control.ControlType: + return False + elif 'ClassName' == key: + if value != control.ClassName: + return False + elif 'AutomationId' == key: + if value != control.AutomationId: + return False + elif 'Depth' == key: + if value != depth: + return False + elif 'Name' == key: + if value != control.Name: + return False + elif 'SubName' == key: + if value not in control.Name: + return False + elif 'RegexName' == key: + if not self.regexName.match(control.Name): + return False + elif 'Compare' == key: + compareFunc = value + # use Compare at last + if compareFunc and not compareFunc(control, depth): + return False + return True + + def Exists(self, maxSearchSeconds: float = 5, searchIntervalSeconds: float = SEARCH_INTERVAL, printIfNotExist: bool = False) -> bool: + """ + maxSearchSeconds: float + searchIntervalSeconds: float + Find control every searchIntervalSeconds seconds in maxSearchSeconds seconds. + Return bool, True if find + """ + if self._element and self._elementDirectAssign: + # if element is directly assigned, not by searching, just check whether self._element is valid + # but I can't find an API in UIAutomation that can directly check + rootElement = GetRootControl().Element + if self._element == rootElement: + return True + else: + parentElement = _AutomationClient.instance().ViewWalker.GetParentElement(self._element) + if parentElement: + return True + else: + return False + # find the element + if len(self.searchProperties) == 0: + raise LookupError("control's searchProperties must not be empty!") + self._element = None + startTime = ProcessTime() + # Use same timeout(s) parameters for resolve all parents + prev = self.searchFromControl + if prev and not prev._element and not prev.Exists(maxSearchSeconds, searchIntervalSeconds): + if printIfNotExist or DEBUG_EXIST_DISAPPEAR: + Logger.ColorfullyLog(self.GetColorfulSearchPropertiesStr() + ' does not exist.') + return False + startTime2 = ProcessTime() + if DEBUG_SEARCH_TIME: + startDateTime = datetime.datetime.now() + while True: + control = FindControl(self.searchFromControl, self._CompareFunction, self.searchDepth, False, self.foundIndex) + if control: + self._element = control.Element + control._element = 0 # control will be destroyed, but the element needs to be stroed in self._element + if DEBUG_SEARCH_TIME: + Logger.ColorfullyLog('{} TraverseControls: {}, SearchTime: {:.3f}s[{} - {}]'.format( + self.GetColorfulSearchPropertiesStr(), control.traverseCount, ProcessTime() - startTime2, + startDateTime.time(), datetime.datetime.now().time())) + return True + else: + remain = startTime + maxSearchSeconds - ProcessTime() + if remain > 0: + time.sleep(min(remain, searchIntervalSeconds)) + else: + if printIfNotExist or DEBUG_EXIST_DISAPPEAR: + Logger.ColorfullyLog(self.GetColorfulSearchPropertiesStr() + ' does not exist.') + return False + + def Disappears(self, maxSearchSeconds: float = 5, searchIntervalSeconds: float = SEARCH_INTERVAL, printIfNotDisappear: bool = False) -> bool: + """ + maxSearchSeconds: float + searchIntervalSeconds: float + Check if control disappears every searchIntervalSeconds seconds in maxSearchSeconds seconds. + Return bool, True if control disappears. + """ + global DEBUG_EXIST_DISAPPEAR + start = ProcessTime() + while True: + temp = DEBUG_EXIST_DISAPPEAR + DEBUG_EXIST_DISAPPEAR = False # do not print for Exists + if not self.Exists(0, 0, False): + DEBUG_EXIST_DISAPPEAR = temp + return True + DEBUG_EXIST_DISAPPEAR = temp + remain = start + maxSearchSeconds - ProcessTime() + if remain > 0: + time.sleep(min(remain, searchIntervalSeconds)) + else: + if printIfNotDisappear or DEBUG_EXIST_DISAPPEAR: + Logger.ColorfullyLog(self.GetColorfulSearchPropertiesStr() + ' does not disappear.') + return False + + def Refind(self, maxSearchSeconds: float = TIME_OUT_SECOND, searchIntervalSeconds: float = SEARCH_INTERVAL, raiseException: bool = True) -> bool: + """ + Refind the control every searchIntervalSeconds seconds in maxSearchSeconds seconds. + maxSearchSeconds: float. + searchIntervalSeconds: float. + raiseException: bool, if True, raise a LookupError if timeout. + Return bool, True if find. + """ + if not self.Exists(maxSearchSeconds, searchIntervalSeconds, False if raiseException else DEBUG_EXIST_DISAPPEAR): + if raiseException: + Logger.ColorfullyLog('Find Control Timeout: ' + self.GetColorfulSearchPropertiesStr()) + raise LookupError('Find Control Timeout: ' + self.GetSearchPropertiesStr()) + else: + return False + return True + + def MoveCursorToInnerPos(self, x: int = None, y: int = None, ratioX: float = 0.5, ratioY: float = 0.5, simulateMove: bool = True) -> Tuple[int, int]: + """ + Move cursor to control's internal position, default to center. + x: int, if < 0, move to self.BoundingRectangle.right + x, if not None, ignore ratioX. + y: int, if < 0, move to self.BoundingRectangle.bottom + y, if not None, ignore ratioY. + ratioX: float. + ratioY: float. + simulateMove: bool. + Return Tuple[int, int], two ints tuple (x, y), the cursor positon relative to screen(0, 0) + after moving or None if control's width or height is 0. + """ + rect = self.BoundingRectangle + if rect.width() == 0 or rect.height() == 0: + Logger.ColorfullyLog('Can not move cursor. {}\'s BoundingRectangle is {}. SearchProperties: {}'.format( + self.ControlTypeName, rect, self.GetColorfulSearchPropertiesStr())) + return + if x is None: + x = rect.left + int(rect.width() * ratioX) + else: + x = (rect.left if x >= 0 else rect.right) + x + if y is None: + y = rect.top + int(rect.height() * ratioY) + else: + y = (rect.top if y >= 0 else rect.bottom) + y + if simulateMove and MAX_MOVE_SECOND > 0: + MoveTo(x, y, waitTime=0) + else: + SetCursorPos(x, y) + return x, y + + def MoveCursorToMyCenter(self, simulateMove: bool = True) -> Tuple[int, int]: + """ + Move cursor to control's center. + Return Tuple[int, int], two ints tuple (x, y), the cursor positon relative to screen(0, 0) after moving. + """ + return self.MoveCursorToInnerPos(simulateMove=simulateMove) + + def Click(self, x: int = None, y: int = None, ratioX: float = 0.5, ratioY: float = 0.5, simulateMove: bool = True, waitTime: float = OPERATION_WAIT_TIME) -> None: + """ + x: int, if < 0, click self.BoundingRectangle.right + x, if not None, ignore ratioX. + y: int, if < 0, click self.BoundingRectangle.bottom + y, if not None, ignore ratioY. + ratioX: float. + ratioY: float. + simulateMove: bool, if True, first move cursor to control smoothly. + waitTime: float. + + Click(), Click(ratioX=0.5, ratioY=0.5): click center. + Click(10, 10): click left+10, top+10. + Click(-10, -10): click right-10, bottom-10. + """ + point = self.MoveCursorToInnerPos(x, y, ratioX, ratioY, simulateMove) + if point: + Click(point[0], point[1], waitTime) + + def MiddleClick(self, x: int = None, y: int = None, ratioX: float = 0.5, ratioY: float = 0.5, simulateMove: bool = True, waitTime: float = OPERATION_WAIT_TIME) -> None: + """ + x: int, if < 0, middle click self.BoundingRectangle.right + x, if not None, ignore ratioX. + y: int, if < 0, middle click self.BoundingRectangle.bottom + y, if not None, ignore ratioY. + ratioX: float. + ratioY: float. + simulateMove: bool, if True, first move cursor to control smoothly. + waitTime: float. + + MiddleClick(), MiddleClick(ratioX=0.5, ratioY=0.5): middle click center. + MiddleClick(10, 10): middle click left+10, top+10. + MiddleClick(-10, -10): middle click right-10, bottom-10. + """ + point = self.MoveCursorToInnerPos(x, y, ratioX, ratioY, simulateMove) + if point: + MiddleClick(point[0], point[1], waitTime) + + def RightClick(self, x: int = None, y: int = None, ratioX: float = 0.5, ratioY: float = 0.5, simulateMove: bool = True, waitTime: float = OPERATION_WAIT_TIME) -> None: + """ + x: int, if < 0, right click self.BoundingRectangle.right + x, if not None, ignore ratioX. + y: int, if < 0, right click self.BoundingRectangle.bottom + y, if not None, ignore ratioY. + ratioX: float. + ratioY: float. + simulateMove: bool, if True, first move cursor to control smoothly. + waitTime: float. + + RightClick(), RightClick(ratioX=0.5, ratioY=0.5): right click center. + RightClick(10, 10): right click left+10, top+10. + RightClick(-10, -10): right click right-10, bottom-10. + """ + point = self.MoveCursorToInnerPos(x, y, ratioX, ratioY, simulateMove) + if point: + RightClick(point[0], point[1], waitTime) + + def DoubleClick(self, x: int = None, y: int = None, ratioX: float = 0.5, ratioY: float = 0.5, simulateMove: bool = True, waitTime: float = OPERATION_WAIT_TIME) -> None: + """ + x: int, if < 0, right click self.BoundingRectangle.right + x, if not None, ignore ratioX. + y: int, if < 0, right click self.BoundingRectangle.bottom + y, if not None, ignore ratioY. + ratioX: float. + ratioY: float. + simulateMove: bool, if True, first move cursor to control smoothly. + waitTime: float. + + DoubleClick(), DoubleClick(ratioX=0.5, ratioY=0.5): double click center. + DoubleClick(10, 10): double click left+10, top+10. + DoubleClick(-10, -10): double click right-10, bottom-10. + """ + x, y = self.MoveCursorToInnerPos(x, y, ratioX, ratioY, simulateMove) + Click(x, y, GetDoubleClickTime() * 1.0 / 2000) + Click(x, y, waitTime) + + def DragDrop(self, x1: int, y1: int, x2: int, y2: int, moveSpeed: float = 1, waitTime: float = OPERATION_WAIT_TIME) -> None: + rect = self.BoundingRectangle + if rect.width() == 0 or rect.height() == 0: + Logger.ColorfullyLog('Can not move cursor. {}\'s BoundingRectangle is {}. SearchProperties: {}'.format( + self.ControlTypeName, rect, self.GetColorfulSearchPropertiesStr())) + return + x1 = (rect.left if x1 >= 0 else rect.right) + x1 + y1 = (rect.top if y1 >= 0 else rect.bottom) + y1 + x2 = (rect.left if x2 >= 0 else rect.right) + x2 + y2 = (rect.top if y2 >= 0 else rect.bottom) + y2 + DragDrop(x1, y1, x2, y2, moveSpeed, waitTime) + + def RightDragDrop(self, x1: int, y1: int, x2: int, y2: int, moveSpeed: float = 1, waitTime: float = OPERATION_WAIT_TIME) -> None: + rect = self.BoundingRectangle + if rect.width() == 0 or rect.height() == 0: + Logger.ColorfullyLog('Can not move cursor. {}\'s BoundingRectangle is {}. SearchProperties: {}'.format( + self.ControlTypeName, rect, self.GetColorfulSearchPropertiesStr())) + return + x1 = (rect.left if x1 >= 0 else rect.right) + x1 + y1 = (rect.top if y1 >= 0 else rect.bottom) + y1 + x2 = (rect.left if x2 >= 0 else rect.right) + x2 + y2 = (rect.top if y2 >= 0 else rect.bottom) + y2 + RightDragDrop(x1, y1, x2, y2, moveSpeed, waitTime) + + def WheelDown(self, x: int = None, y: int = None, ratioX: float = 0.5, ratioY: float = 0.5, wheelTimes: int = 1, interval: float = 0.05, waitTime: float = OPERATION_WAIT_TIME) -> None: + """ + Make control have focus first, move cursor to the specified position and mouse wheel down. + x: int, if < 0, move x cursor to self.BoundingRectangle.right + x, if not None, ignore ratioX. + y: int, if < 0, move y cursor to self.BoundingRectangle.bottom + y, if not None, ignore ratioY. + ratioX: float. + ratioY: float. + wheelTimes: int. + interval: float. + waitTime: float. + """ + cursorX, cursorY = GetCursorPos() + self.SetFocus() + self.MoveCursorToInnerPos(x, y, ratioX, ratioY, simulateMove=False) + WheelDown(wheelTimes, interval, waitTime) + SetCursorPos(cursorX, cursorY) + + def WheelUp(self, x: int = None, y: int = None, ratioX: float = 0.5, ratioY: float = 0.5, wheelTimes: int = 1, interval: float = 0.05, waitTime: float = OPERATION_WAIT_TIME) -> None: + """ + Make control have focus first, move cursor to the specified position and mouse wheel up. + x: int, if < 0, move x cursor to self.BoundingRectangle.right + x, if not None, ignore ratioX. + y: int, if < 0, move y cursor to self.BoundingRectangle.bottom + y, if not None, ignore ratioY. + ratioX: float. + ratioY: float. + wheelTimes: int. + interval: float. + waitTime: float. + """ + cursorX, cursorY = GetCursorPos() + self.SetFocus() + self.MoveCursorToInnerPos(x, y, ratioX, ratioY, simulateMove=False) + WheelUp(wheelTimes, interval, waitTime) + SetCursorPos(cursorX, cursorY) + + def ShowWindow(self, cmdShow: int, waitTime: float = OPERATION_WAIT_TIME) -> bool: + """ + Get a native handle from self or ancestors until valid and call native `ShowWindow` with cmdShow. + cmdShow: int, a value in in class `SW`. + waitTime: float. + Return bool, True if succeed otherwise False. + """ + handle = self.NativeWindowHandle + if not handle: + control = self + while not handle: + control = control.GetParentControl() + handle = control.NativeWindowHandle + if handle: + ret = ShowWindow(handle, cmdShow) + time.sleep(waitTime) + return ret + + def Show(self, waitTime: float = OPERATION_WAIT_TIME) -> bool: + """ + Call native `ShowWindow(SW.Show)`. + Return bool, True if succeed otherwise False. + """ + return self.ShowWindow(SW.Show, waitTime) + + def Hide(self, waitTime: float = OPERATION_WAIT_TIME) -> bool: + """ + Call native `ShowWindow(SW.Hide)`. + waitTime: float + Return bool, True if succeed otherwise False. + """ + return self.ShowWindow(SW.Hide, waitTime) + + def MoveWindow(self, x: int, y: int, width: int, height: int, repaint: bool = True) -> bool: + """ + Call native MoveWindow if control has a valid native handle. + x: int. + y: int. + width: int. + height: int. + repaint: bool. + Return bool, True if succeed otherwise False. + """ + handle = self.NativeWindowHandle + if handle: + return MoveWindow(handle, x, y, width, height, int(repaint)) + return False + + def GetWindowText(self) -> str: + """ + Call native GetWindowText if control has a valid native handle. + """ + handle = self.NativeWindowHandle + if handle: + return GetWindowText(handle) + + def SetWindowText(self, text: str) -> bool: + """ + Call native SetWindowText if control has a valid native handle. + """ + handle = self.NativeWindowHandle + if handle: + return SetWindowText(handle, text) + return False + + def SendKey(self, key: int, waitTime: float = OPERATION_WAIT_TIME) -> None: + """ + Make control have focus first and type a key. + `self.SetFocus` may not work for some controls, you may need to click it to make it have focus. + key: int, a key code value in class Keys. + waitTime: float. + """ + self.SetFocus() + SendKey(key, waitTime) + + def SendKeys(self, text: str, interval: float = 0.01, waitTime: float = OPERATION_WAIT_TIME, charMode: bool = True) -> None: + """ + Make control have focus first and type keys. + `self.SetFocus` may not work for some controls, you may need to click it to make it have focus. + text: str, keys to type, see the docstring of `SendKeys`. + interval: float, seconds between keys. + waitTime: float. + charMode: bool, if False, the text typied is depend on the input method if a input method is on. + """ + self.SetFocus() + SendKeys(text, interval, waitTime, charMode) + + def GetPixelColor(self, x: int, y: int) -> int: + """ + Call native `GetPixelColor` if control has a valid native handle. + Use `self.ToBitmap` if control doesn't have a valid native handle or you get many pixels. + x: int, internal x position. + y: int, internal y position. + Return int, a color value in bgr. + r = bgr & 0x0000FF + g = (bgr & 0x00FF00) >> 8 + b = (bgr & 0xFF0000) >> 16 + """ + handle = self.NativeWindowHandle + if handle: + return GetPixelColor(x, y, handle) + + def ToBitmap(self, x: int = 0, y: int = 0, width: int = 0, height: int = 0) -> Bitmap: + """ + Capture control to a `Bitmap` object. + x, y: int, the point in control's internal position(from 0,0). + width, height: int, image's width and height from x, y, use 0 for entire area. + If width(or height) < 0, image size will be control's width(or height) - width(or height). + """ + return Bitmap.FromControl(self, x, y, width, height) + + def CaptureToImage(self, savePath: str, x: int = 0, y: int = 0, width: int = 0, height: int = 0) -> bool: + """ + Capture control to a image file. + savePath: str, should end with .bmp, .jpg, .jpeg, .png, .gif, .tif, .tiff. + x, y: int, the point in control's internal position(from 0,0). + width, height: int, image's width and height from x, y, use 0 for entire area. + If width(or height) < 0, image size will be control's width(or height) - width(or height). + Return bool, True if succeed otherwise False. + """ + bitmap = Bitmap.FromControl(self, x, y, width, height) + if bitmap: + with bitmap: + return bitmap.ToFile(savePath) + return False + + def IsTopLevel(self) -> bool: + """Determine whether current control is top level.""" + handle = self.NativeWindowHandle + if handle: + return GetAncestor(handle, GAFlag.Root) == handle + return False + + def GetTopLevelControl(self) -> 'Control': + """ + Get the top level control which current control lays. + If current control is top level, return self. + If current control is root control, return None. + Return `PaneControl` or `WindowControl` or None. + """ + handle = self.NativeWindowHandle + if handle: + topHandle = GetAncestor(handle, GAFlag.Root) + if topHandle: + if topHandle == handle: + return self + else: + return ControlFromHandle(topHandle) + else: + # self is root control + pass + else: + control = self + while True: + control = control.GetParentControl() + handle = control.NativeWindowHandle + if handle: + topHandle = GetAncestor(handle, GAFlag.Root) + return ControlFromHandle(topHandle) + + def Control(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'Control': + return Control(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def ButtonControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'ButtonControl': + return ButtonControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def CalendarControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'CalendarControl': + return CalendarControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def CheckBoxControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'CheckBoxControl': + return CheckBoxControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def ComboBoxControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'ComboBoxControl': + return ComboBoxControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def CustomControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'CustomControl': + return CustomControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def DataGridControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'DataGridControl': + return DataGridControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def DataItemControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'DataItemControl': + return DataItemControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def DocumentControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'DocumentControl': + return DocumentControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def EditControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'EditControl': + return EditControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def GroupControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'GroupControl': + return GroupControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def HeaderControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'HeaderControl': + return HeaderControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def HeaderItemControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'HeaderItemControl': + return HeaderItemControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def HyperlinkControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'HyperlinkControl': + return HyperlinkControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def ImageControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'ImageControl': + return ImageControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def ListControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'ListControl': + return ListControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def ListItemControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'ListItemControl': + return ListItemControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def MenuControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'MenuControl': + return MenuControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def MenuBarControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'MenuBarControl': + return MenuBarControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def MenuItemControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'MenuItemControl': + return MenuItemControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def PaneControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'PaneControl': + return PaneControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def ProgressBarControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'ProgressBarControl': + return ProgressBarControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def RadioButtonControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'RadioButtonControl': + return RadioButtonControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def ScrollBarControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'ScrollBarControl': + return ScrollBarControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def SemanticZoomControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'SemanticZoomControl': + return SemanticZoomControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def SeparatorControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'SeparatorControl': + return SeparatorControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def SliderControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'SliderControl': + return SliderControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def SpinnerControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'SpinnerControl': + return SpinnerControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def SplitButtonControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'SplitButtonControl': + return SplitButtonControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def StatusBarControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'StatusBarControl': + return StatusBarControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def TabControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'TabControl': + return TabControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def TabItemControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'TabItemControl': + return TabItemControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def TableControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'TableControl': + return TableControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def TextControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'TextControl': + return TextControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def ThumbControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'ThumbControl': + return ThumbControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def TitleBarControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'TitleBarControl': + return TitleBarControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def ToolBarControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'ToolBarControl': + return ToolBarControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def ToolTipControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'ToolTipControl': + return ToolTipControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def TreeControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'TreeControl': + return TreeControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def TreeItemControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'TreeItemControl': + return TreeItemControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + def WindowControl(self, searchDepth=0xFFFFFFFF, searchInterval=SEARCH_INTERVAL, foundIndex=1, element=0, **searchProperties) -> 'WindowControl': + return WindowControl(searchDepth=searchDepth, searchInterval=searchInterval, foundIndex=foundIndex, element=element, searchFromControl=self, **searchProperties) + + +class AppBarControl(Control): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.AppBarControl) + + +class ButtonControl(Control): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.ButtonControl) + + def GetExpandCollapsePattern(self) -> ExpandCollapsePattern: + """ + Return `ExpandCollapsePattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.ExpandCollapsePattern) + + def GetInvokePattern(self) -> InvokePattern: + """ + Return `InvokePattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.InvokePattern) + + def GetTogglePattern(self) -> TogglePattern: + """ + Return `TogglePattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.TogglePattern) + + +class CalendarControl(Control): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.CalendarControl) + + def GetGridPattern(self) -> GridPattern: + """ + Return `GridPattern` if it supports the pattern else None(Must support according to MSDN). + """ + return self.GetPattern(PatternId.GridPattern) + + def GetTablePattern(self) -> TablePattern: + """ + Return `TablePattern` if it supports the pattern else None(Must support according to MSDN). + """ + return self.GetPattern(PatternId.TablePattern) + + def GetScrollPattern(self) -> ScrollPattern: + """ + Return `ScrollPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.ScrollPattern) + + def GetSelectionPattern(self) -> SelectionPattern: + """ + Return `SelectionPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.SelectionPattern) + + +class CheckBoxControl(Control): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.CheckBoxControl) + + def GetTogglePattern(self) -> TogglePattern: + """ + Return `TogglePattern` if it supports the pattern else None(Must support according to MSDN). + """ + return self.GetPattern(PatternId.TogglePattern) + + +class ComboBoxControl(Control): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.ComboBoxControl) + + def GetExpandCollapsePattern(self) -> ExpandCollapsePattern: + """ + Return `ExpandCollapsePattern` if it supports the pattern else None(Must support according to MSDN). + """ + return self.GetPattern(PatternId.ExpandCollapsePattern) + + def GetSelectionPattern(self) -> SelectionPattern: + """ + Return `SelectionPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.SelectionPattern) + + def GetValuePattern(self) -> ValuePattern: + """ + Return `ValuePattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.ValuePattern) + + def Select(self, itemName: str = '', condition: Callable[[str], bool] = None, simulateMove: bool = True, waitTime: float = OPERATION_WAIT_TIME) -> bool: + """ + Show combobox's popup menu and select a item by name. + itemName: str. + condition: Callable[[str], bool], function(comboBoxItemName: str) -> bool, if condition is valid, ignore itemName. + waitTime: float. + Some comboboxs doesn't support SelectionPattern, here is a workaround. + This method tries to add selection support. + It may not work for some comboboxes, such as comboboxes in older Qt version. + If it doesn't work, you should write your own version Select, or it doesn't support selection at all. + """ + expandCollapsePattern = self.GetExpandCollapsePattern() + if expandCollapsePattern: + expandCollapsePattern.Expand() + else: + # Windows Form's ComboBoxControl doesn't support ExpandCollapsePattern + self.Click(x=-10, ratioY=0.5, simulateMove=simulateMove) + find = False + if condition: + listItemControl = self.ListItemControl(Compare=lambda c, d: condition(c.Name)) + else: + listItemControl = self.ListItemControl(Name=itemName) + if listItemControl.Exists(1): + scrollItemPattern = listItemControl.GetScrollItemPattern() + if scrollItemPattern: + scrollItemPattern.ScrollIntoView(waitTime=0.1) + listItemControl.Click(simulateMove=simulateMove, waitTime=waitTime) + find = True + else: + # some ComboBox's popup window is a child of root control + listControl = ListControl(searchDepth=1) + if listControl.Exists(1): + if condition: + listItemControl = listControl.ListItemControl(Compare=lambda c, d: condition(c.Name)) + else: + listItemControl = listControl.ListItemControl(Name=itemName) + if listItemControl.Exists(0, 0): + scrollItemPattern = listItemControl.GetScrollItemPattern() + if scrollItemPattern: + scrollItemPattern.ScrollIntoView(waitTime=0.1) + listItemControl.Click(simulateMove=simulateMove, waitTime=waitTime) + find = True + if not find: + Logger.ColorfullyLog('Can\'t find {} in ComboBoxControl or it does not support selection.'.format(itemName), ConsoleColor.Yellow) + if expandCollapsePattern: + expandCollapsePattern.Collapse(waitTime) + else: + self.Click(x=-10, ratioY=0.5, simulateMove=simulateMove, waitTime=waitTime) + return find + + +class CustomControl(Control): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.CustomControl) + + +class DataGridControl(Control): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.DataGridControl) + + def GetGridPattern(self) -> GridPattern: + """ + Return `GridPattern` if it supports the pattern else None(Must support according to MSDN). + """ + return self.GetPattern(PatternId.GridPattern) + + def GetScrollPattern(self) -> ScrollPattern: + """ + Return `ScrollPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.ScrollPattern) + + def GetSelectionPattern(self) -> SelectionPattern: + """ + Return `SelectionPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.SelectionPattern) + + def GetTablePattern(self) -> TablePattern: + """ + Return `TablePattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.TablePattern) + + +class DataItemControl(Control): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.DataItemControl) + + def GetSelectionItemPattern(self) -> SelectionItemPattern: + """ + Return `SelectionItemPattern` if it supports the pattern else None(Must support according to MSDN). + """ + return self.GetPattern(PatternId.SelectionItemPattern) + + def GetExpandCollapsePattern(self) -> ExpandCollapsePattern: + """ + Return `ExpandCollapsePattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.ExpandCollapsePattern) + + def GetGridItemPattern(self) -> GridItemPattern: + """ + Return `GridItemPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.GridItemPattern) + + def GetScrollItemPattern(self) -> ScrollItemPattern: + """ + Return `ScrollItemPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.ScrollItemPattern) + + def GetTableItemPattern(self) -> TableItemPattern: + """ + Return `TableItemPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.TableItemPattern) + + def GetTogglePattern(self) -> TogglePattern: + """ + Return `TogglePattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.TogglePattern) + + def GetValuePattern(self) -> ValuePattern: + """ + Return `ValuePattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.ValuePattern) + + +class DocumentControl(Control): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.DocumentControl) + + def GetTextPattern(self) -> TextPattern: + """ + Return `TextPattern` if it supports the pattern else None(Must support according to MSDN). + """ + return self.GetPattern(PatternId.TextPattern) + + def GetScrollPattern(self) -> ScrollPattern: + """ + Return `ScrollPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.ScrollPattern) + + def GetValuePattern(self) -> ValuePattern: + """ + Return `ValuePattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.ValuePattern) + + +class EditControl(Control): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.EditControl) + + def GetRangeValuePattern(self) -> RangeValuePattern: + """ + Return `RangeValuePattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.RangeValuePattern) + + def GetTextPattern(self) -> TextPattern: + """ + Return `TextPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.TextPattern) + + def GetValuePattern(self) -> ValuePattern: + """ + Return `ValuePattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.ValuePattern) + + +class GroupControl(Control): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.GroupControl) + + def GetExpandCollapsePattern(self) -> ExpandCollapsePattern: + """ + Return `ExpandCollapsePattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.ExpandCollapsePattern) + + +class HeaderControl(Control): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.HeaderControl) + + def GetTransformPattern(self) -> TransformPattern: + """ + Return `TransformPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.TransformPattern) + + +class HeaderItemControl(Control): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.HeaderItemControl) + + def GetInvokePattern(self) -> InvokePattern: + """ + Return `InvokePattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.InvokePattern) + + def GetTransformPattern(self) -> TransformPattern: + """ + Return `TransformPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.TransformPattern) + + +class HyperlinkControl(Control): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.HyperlinkControl) + + def GetInvokePattern(self) -> InvokePattern: + """ + Return `InvokePattern` if it supports the pattern else None(Must support according to MSDN). + """ + return self.GetPattern(PatternId.InvokePattern) + + def GetValuePattern(self) -> ValuePattern: + """ + Return `ValuePattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.ValuePattern) + + +class ImageControl(Control): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.ImageControl) + + def GetGridItemPattern(self) -> GridItemPattern: + """ + Return `GridItemPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.GridItemPattern) + + def GetTableItemPattern(self) -> TableItemPattern: + """ + Return `TableItemPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.TableItemPattern) + + +class ListControl(Control): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.ListControl) + + def GetGridPattern(self) -> GridPattern: + """ + Return `GridPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.GridPattern) + + def GetMultipleViewPattern(self) -> MultipleViewPattern: + """ + Return `MultipleViewPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.MultipleViewPattern) + + def GetScrollPattern(self) -> ScrollPattern: + """ + Return `ScrollPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.ScrollPattern) + + def GetSelectionPattern(self) -> SelectionPattern: + """ + Return `SelectionPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.SelectionPattern) + + +class ListItemControl(Control): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.ListItemControl) + + def GetSelectionItemPattern(self) -> SelectionItemPattern: + """ + Return `SelectionItemPattern` if it supports the pattern else None(Must support according to MSDN). + """ + return self.GetPattern(PatternId.SelectionItemPattern) + + def GetExpandCollapsePattern(self) -> ExpandCollapsePattern: + """ + Return `ExpandCollapsePattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.ExpandCollapsePattern) + + def GetGridItemPattern(self) -> GridItemPattern: + """ + Return `GridItemPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.GridItemPattern) + + def GetInvokePattern(self) -> InvokePattern: + """ + Return `InvokePattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.InvokePattern) + + def GetScrollItemPattern(self) -> ScrollItemPattern: + """ + Return `ScrollItemPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.ScrollItemPattern) + + def GetTogglePattern(self) -> TogglePattern: + """ + Return `TogglePattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.TogglePattern) + + def GetValuePattern(self) -> ValuePattern: + """ + Return `ValuePattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.ValuePattern) + + +class MenuControl(Control): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.MenuControl) + + +class MenuBarControl(Control): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.MenuBarControl) + + def GetDockPattern(self) -> DockPattern: + """ + Return `DockPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.DockPattern) + + def GetExpandCollapsePattern(self) -> ExpandCollapsePattern: + """ + Return `ExpandCollapsePattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.ExpandCollapsePattern) + + def GetTransformPattern(self) -> TransformPattern: + """ + Return `TransformPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.TransformPattern) + + +class MenuItemControl(Control): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.MenuItemControl) + + def GetExpandCollapsePattern(self) -> ExpandCollapsePattern: + """ + Return `ExpandCollapsePattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.ExpandCollapsePattern) + + def GetInvokePattern(self) -> InvokePattern: + """ + Return `InvokePattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.InvokePattern) + + def GetSelectionItemPattern(self) -> SelectionItemPattern: + """ + Return `SelectionItemPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.SelectionItemPattern) + + def GetTogglePattern(self) -> TogglePattern: + """ + Return `TogglePattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.TogglePattern) + + +class TopLevel(): + """Class TopLevel""" + + def SetTopmost(self, isTopmost: bool = True, waitTime: float = OPERATION_WAIT_TIME) -> bool: + """ + Set top level window topmost. + isTopmost: bool. + waitTime: float. + """ + if self.IsTopLevel(): + ret = SetWindowTopmost(self.NativeWindowHandle, isTopmost) + time.sleep(waitTime) + return ret + return False + + def IsTopmost(self) -> bool: + if self.IsTopLevel(): + WS_EX_TOPMOST = 0x00000008 + return bool(GetWindowLong(self.NativeWindowHandle, GWL.ExStyle) & WS_EX_TOPMOST) + return False + + def SwitchToThisWindow(self, waitTime: float = OPERATION_WAIT_TIME) -> None: + if self.IsTopLevel(): + SwitchToThisWindow(self.NativeWindowHandle) + time.sleep(waitTime) + + def Maximize(self, waitTime: float = OPERATION_WAIT_TIME) -> bool: + """ + Set top level window maximize. + """ + if self.IsTopLevel(): + return self.ShowWindow(SW.ShowMaximized, waitTime) + return False + + def IsMaximize(self) -> bool: + if self.IsTopLevel(): + return bool(IsZoomed(self.NativeWindowHandle)) + return False + + def Minimize(self, waitTime: float = OPERATION_WAIT_TIME) -> bool: + if self.IsTopLevel(): + return self.ShowWindow(SW.Minimize, waitTime) + return False + + def IsMinimize(self) -> bool: + if self.IsTopLevel(): + return bool(IsIconic(self.NativeWindowHandle)) + return False + + def Restore(self, waitTime: float = OPERATION_WAIT_TIME) -> bool: + """ + Restore window to normal state. + Similar to SwitchToThisWindow. + """ + if self.IsTopLevel(): + return self.ShowWindow(SW.Restore, waitTime) + return False + + def MoveToCenter(self) -> bool: + """ + Move window to screen center. + """ + if self.IsTopLevel(): + rect = self.BoundingRectangle + screenWidth, screenHeight = GetScreenSize() + x, y = (screenWidth - rect.width()) // 2, (screenHeight - rect.height()) // 2 + if x < 0: + x = 0 + if y < 0: + y = 0 + return SetWindowPos(self.NativeWindowHandle, SWP.HWND_Top, x, y, 0, 0, SWP.SWP_NoSize) + return False + + def SetActive(self, waitTime: float = OPERATION_WAIT_TIME) -> bool: + """Set top level window active.""" + if self.IsTopLevel(): + handle = self.NativeWindowHandle + if IsIconic(handle): + ret = ShowWindow(handle, SW.Restore) + elif not IsWindowVisible(handle): + ret = ShowWindow(handle, SW.Show) + ret = SetForegroundWindow(handle) # may fail if foreground windows's process is not python + time.sleep(waitTime) + return ret + return False + + +class PaneControl(Control, TopLevel): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.PaneControl) + + def GetDockPattern(self) -> DockPattern: + """ + Return `DockPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.DockPattern) + + def GetScrollPattern(self) -> ScrollPattern: + """ + Return `ScrollPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.ScrollPattern) + + def GetTransformPattern(self) -> TransformPattern: + """ + Return `TransformPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.TransformPattern) + + +class ProgressBarControl(Control): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.ProgressBarControl) + + def GetRangeValuePattern(self) -> RangeValuePattern: + """ + Return `RangeValuePattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.RangeValuePattern) + + def GetValuePattern(self) -> ValuePattern: + """ + Return `ValuePattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.ValuePattern) + + +class RadioButtonControl(Control): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.RadioButtonControl) + + def GetSelectionItemPattern(self) -> SelectionItemPattern: + """ + Return `SelectionItemPattern` if it supports the pattern else None(Must support according to MSDN). + """ + return self.GetPattern(PatternId.SelectionItemPattern) + + +class ScrollBarControl(Control): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.ScrollBarControl) + + def GetRangeValuePattern(self) -> RangeValuePattern: + """ + Return `RangeValuePattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.RangeValuePattern) + + +class SemanticZoomControl(Control): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.SemanticZoomControl) + + +class SeparatorControl(Control): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.SeparatorControl) + + +class SliderControl(Control): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.SliderControl) + + def GetRangeValuePattern(self) -> RangeValuePattern: + """ + Return `RangeValuePattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.RangeValuePattern) + + def GetSelectionPattern(self) -> SelectionPattern: + """ + Return `SelectionPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.SelectionPattern) + + def GetValuePattern(self) -> ValuePattern: + """ + Return `ValuePattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.ValuePattern) + + +class SpinnerControl(Control): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.SpinnerControl) + + def GetRangeValuePattern(self) -> RangeValuePattern: + """ + Return `RangeValuePattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.RangeValuePattern) + + def GetSelectionPattern(self) -> SelectionPattern: + """ + Return `SelectionPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.SelectionPattern) + + def GetValuePattern(self) -> ValuePattern: + """ + Return `ValuePattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.ValuePattern) + + +class SplitButtonControl(Control): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.SplitButtonControl) + + def GetExpandCollapsePattern(self) -> ExpandCollapsePattern: + """ + Return `ExpandCollapsePattern` if it supports the pattern else None(Must support according to MSDN). + """ + return self.GetPattern(PatternId.ExpandCollapsePattern) + + def GetInvokePattern(self) -> InvokePattern: + """ + Return `InvokePattern` if it supports the pattern else None(Must support according to MSDN). + """ + return self.GetPattern(PatternId.InvokePattern) + + +class StatusBarControl(Control): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.StatusBarControl) + + def GetGridPattern(self) -> GridPattern: + """ + Return `GridPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.GridPattern) + + +class TabControl(Control): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.TabControl) + + def GetSelectionPattern(self) -> SelectionPattern: + """ + Return `SelectionPattern` if it supports the pattern else None(Must support according to MSDN). + """ + return self.GetPattern(PatternId.SelectionPattern) + + def GetScrollPattern(self) -> ScrollPattern: + """ + Return `ScrollPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.ScrollPattern) + + +class TabItemControl(Control): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.TabItemControl) + + def GetSelectionItemPattern(self) -> SelectionItemPattern: + """ + Return `SelectionItemPattern` if it supports the pattern else None(Must support according to MSDN). + """ + return self.GetPattern(PatternId.SelectionItemPattern) + + +class TableControl(Control): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.TableControl) + + def GetGridPattern(self) -> GridPattern: + """ + Return `GridPattern` if it supports the pattern else None(Must support according to MSDN). + """ + return self.GetPattern(PatternId.GridPattern) + + def GetGridItemPattern(self) -> GridItemPattern: + """ + Return `GridItemPattern` if it supports the pattern else None(Must support according to MSDN). + """ + return self.GetPattern(PatternId.GridItemPattern) + + def GetTablePattern(self) -> TablePattern: + """ + Return `TablePattern` if it supports the pattern else None(Must support according to MSDN). + """ + return self.GetPattern(PatternId.TablePattern) + + def GetTableItemPattern(self) -> TableItemPattern: + """ + Return `TableItemPattern` if it supports the pattern else None(Must support according to MSDN). + """ + return self.GetPattern(PatternId.TableItemPattern) + + +class TextControl(Control): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.TextControl) + + def GetGridItemPattern(self) -> GridItemPattern: + """ + Return `GridItemPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.GridItemPattern) + + def GetTableItemPattern(self) -> TableItemPattern: + """ + Return `TableItemPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.TableItemPattern) + + def GetTextPattern(self) -> TextPattern: + """ + Return `TextPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.TextPattern) + + +class ThumbControl(Control): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.ThumbControl) + + def GetTransformPattern(self) -> TransformPattern: + """ + Return `TransformPattern` if it supports the pattern else None(Must support according to MSDN). + """ + return self.GetPattern(PatternId.TransformPattern) + + +class TitleBarControl(Control): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.TitleBarControl) + + +class ToolBarControl(Control): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.ToolBarControl) + + def GetDockPattern(self) -> DockPattern: + """ + Return `DockPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.DockPattern) + + def GetExpandCollapsePattern(self) -> ExpandCollapsePattern: + """ + Return `ExpandCollapsePattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.ExpandCollapsePattern) + + def GetTransformPattern(self) -> TransformPattern: + """ + Return `TransformPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.TransformPattern) + + +class ToolTipControl(Control): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.ToolTipControl) + + def GetTextPattern(self) -> TextPattern: + """ + Return `TextPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.TextPattern) + + def GetWindowPattern(self) -> WindowPattern: + """ + Return `WindowPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.WindowPattern) + + +class TreeControl(Control): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.TreeControl) + + def GetScrollPattern(self) -> ScrollPattern: + """ + Return `ScrollPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.ScrollPattern) + + def GetSelectionPattern(self) -> SelectionPattern: + """ + Return `SelectionPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.SelectionPattern) + + +class TreeItemControl(Control): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.TreeItemControl) + + def GetExpandCollapsePattern(self) -> ExpandCollapsePattern: + """ + Return `ExpandCollapsePattern` if it supports the pattern else None(Must support according to MSDN). + """ + return self.GetPattern(PatternId.ExpandCollapsePattern) + + def GetInvokePattern(self) -> InvokePattern: + """ + Return `InvokePattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.InvokePattern) + + def GetScrollItemPattern(self) -> ScrollItemPattern: + """ + Return `ScrollItemPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.ScrollItemPattern) + + def GetSelectionItemPattern(self) -> SelectionItemPattern: + """ + Return `SelectionItemPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.SelectionItemPattern) + + def GetTogglePattern(self) -> TogglePattern: + """ + Return `TogglePattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.TogglePattern) + + +class WindowControl(Control, TopLevel): + def __init__(self, searchFromControl: Control = None, searchDepth: int = 0xFFFFFFFF, searchInterval: float = SEARCH_INTERVAL, foundIndex: int = 1, element=None, **searchProperties): + Control.__init__(self, searchFromControl, searchDepth, searchInterval, foundIndex, element, **searchProperties) + self.AddSearchProperties(ControlType=ControlType.WindowControl) + self._DockPattern = None + self._TransformPattern = None + + def GetTransformPattern(self) -> TransformPattern: + """ + Return `TransformPattern` if it supports the pattern else None(Must support according to MSDN). + """ + return self.GetPattern(PatternId.TransformPattern) + + def GetWindowPattern(self) -> WindowPattern: + """ + Return `WindowPattern` if it supports the pattern else None(Must support according to MSDN). + """ + return self.GetPattern(PatternId.WindowPattern) + + def GetDockPattern(self) -> DockPattern: + """ + Return `DockPattern` if it supports the pattern else None(Conditional support according to MSDN). + """ + return self.GetPattern(PatternId.DockPattern) + + def MetroClose(self, waitTime: float = OPERATION_WAIT_TIME) -> None: + """ + Only work on Windows 8/8.1, if current window is Metro UI. + waitTime: float. + """ + if self.ClassName == METRO_WINDOW_CLASS_NAME: + screenWidth, screenHeight = GetScreenSize() + MoveTo(screenWidth // 2, 0, waitTime=0) + DragDrop(screenWidth // 2, 0, screenWidth // 2, screenHeight, waitTime=waitTime) + else: + Logger.WriteLine('Window is not Metro!', ConsoleColor.Yellow) + + +ControlConstructors = { + ControlType.AppBarControl: AppBarControl, + ControlType.ButtonControl: ButtonControl, + ControlType.CalendarControl: CalendarControl, + ControlType.CheckBoxControl: CheckBoxControl, + ControlType.ComboBoxControl: ComboBoxControl, + ControlType.CustomControl: CustomControl, + ControlType.DataGridControl: DataGridControl, + ControlType.DataItemControl: DataItemControl, + ControlType.DocumentControl: DocumentControl, + ControlType.EditControl: EditControl, + ControlType.GroupControl: GroupControl, + ControlType.HeaderControl: HeaderControl, + ControlType.HeaderItemControl: HeaderItemControl, + ControlType.HyperlinkControl: HyperlinkControl, + ControlType.ImageControl: ImageControl, + ControlType.ListControl: ListControl, + ControlType.ListItemControl: ListItemControl, + ControlType.MenuBarControl: MenuBarControl, + ControlType.MenuControl: MenuControl, + ControlType.MenuItemControl: MenuItemControl, + ControlType.PaneControl: PaneControl, + ControlType.ProgressBarControl: ProgressBarControl, + ControlType.RadioButtonControl: RadioButtonControl, + ControlType.ScrollBarControl: ScrollBarControl, + ControlType.SemanticZoomControl: SemanticZoomControl, + ControlType.SeparatorControl: SeparatorControl, + ControlType.SliderControl: SliderControl, + ControlType.SpinnerControl: SpinnerControl, + ControlType.SplitButtonControl: SplitButtonControl, + ControlType.StatusBarControl: StatusBarControl, + ControlType.TabControl: TabControl, + ControlType.TabItemControl: TabItemControl, + ControlType.TableControl: TableControl, + ControlType.TextControl: TextControl, + ControlType.ThumbControl: ThumbControl, + ControlType.TitleBarControl: TitleBarControl, + ControlType.ToolBarControl: ToolBarControl, + ControlType.ToolTipControl: ToolTipControl, + ControlType.TreeControl: TreeControl, + ControlType.TreeItemControl: TreeItemControl, + ControlType.WindowControl: WindowControl, +} + + +class UIAutomationInitializerInThread: + def __init__(self, debug: bool = False): + self.debug = debug + InitializeUIAutomationInCurrentThread() + self.inited = True + if self.debug: + th = threading.currentThread() + print('\ncall InitializeUIAutomationInCurrentThread in {}, inited {}'.format(th, self.inited)) + + def __del__(self): + self.Uninitialize() + + def __enter__(self): + return self + + def __exit__(self, exceptionType, exceptionValue, exceptionTraceback): + self.Uninitialize() + + def Uninitialize(self): + if self.inited: + UninitializeUIAutomationInCurrentThread() + self.inited = False + if self.debug: + th = threading.currentThread() + print('\ncall UninitializeUIAutomationInCurrentThread in {}'.format(th)) + + +def InitializeUIAutomationInCurrentThread() -> None: + """ + Initialize UIAutomation in a new thread. + If you want to use functionalities related to Controls and Patterns in a new thread. + You must call this function first in the new thread. + But you can't use use a Control or a Pattern created in a different thread. + So you can't create a Control or a Pattern in main thread and then pass it to a new thread and use it. + """ + comtypes.CoInitializeEx() + + +def UninitializeUIAutomationInCurrentThread() -> None: + """ + Uninitialize UIAutomation in a new thread after calling InitializeUIAutomationInCurrentThread. + You must call this function when the new thread exits if you have called InitializeUIAutomationInCurrentThread in the same thread. + """ + comtypes.CoUninitialize() + + +def SetGlobalSearchTimeout(seconds: float) -> None: + """ + seconds: float. + To make this available, you need explicitly import uiautomation: + from uiautomation import uiautomation as auto + auto.SetGlobalSearchTimeout(10) + """ + global TIME_OUT_SECOND + TIME_OUT_SECOND = seconds + + +def WaitForExist(control: Control, timeout: float) -> bool: + """ + Check if control exists in timeout seconds. + control: `Control` or its subclass. + timeout: float. + Return bool. + """ + return control.Exists(timeout, 1) + + +def WaitForDisappear(control: Control, timeout: float) -> bool: + """ + Check if control disappears in timeout seconds. + control: `Control` or its subclass. + timeout: float. + Return bool. + """ + return control.Disappears(timeout, 1) + + +def WalkTree(top, getChildren: Callable[[TreeNode], List[TreeNode]] = None, + getFirstChild: Callable[[TreeNode], TreeNode] = None, getNextSibling: Callable[[TreeNode], TreeNode] = None, + yieldCondition: Callable[[TreeNode, int], bool] = None, includeTop: bool = False, maxDepth: int = 0xFFFFFFFF): + """ + Walk a tree not using recursive algorithm. + top: a tree node. + getChildren: Callable[[TreeNode], List[TreeNode]], function(treeNode: TreeNode) -> List[TreeNode]. + getNextSibling: Callable[[TreeNode], TreeNode], function(treeNode: TreeNode) -> TreeNode. + getNextSibling: Callable[[TreeNode], TreeNode], function(treeNode: TreeNode) -> TreeNode. + yieldCondition: Callable[[TreeNode, int], bool], function(treeNode: TreeNode, depth: int) -> bool. + includeTop: bool, if True yield top first. + maxDepth: int, enum depth. + + If getChildren is valid, ignore getFirstChild and getNextSibling, + yield 3 items tuple: (treeNode, depth, remain children count in current depth). + If getChildren is not valid, using getFirstChild and getNextSibling, + yield 2 items tuple: (treeNode, depth). + If yieldCondition is not None, only yield tree nodes that yieldCondition(treeNode: TreeNode, depth: int)->bool returns True. + + For example: + def GetDirChildren(dir_): + if os.path.isdir(dir_): + return [os.path.join(dir_, it) for it in os.listdir(dir_)] + for it, depth, leftCount in WalkTree('D:\\', getChildren= GetDirChildren): + print(it, depth, leftCount) + """ + if maxDepth <= 0: + return + depth = 0 + if getChildren: + if includeTop: + if not yieldCondition or yieldCondition(top, 0): + yield top, 0, 0 + children = getChildren(top) + childList = [children] + while depth >= 0: # or while childList: + lastItems = childList[-1] + if lastItems: + if not yieldCondition or yieldCondition(lastItems[0], depth + 1): + yield lastItems[0], depth + 1, len(lastItems) - 1 + if depth + 1 < maxDepth: + children = getChildren(lastItems[0]) + if children: + depth += 1 + childList.append(children) + del lastItems[0] + else: + del childList[depth] + depth -= 1 + elif getFirstChild and getNextSibling: + if includeTop: + if not yieldCondition or yieldCondition(top, 0): + yield top, 0 + child = getFirstChild(top) + childList = [child] + while depth >= 0: # or while childList: + lastItem = childList[-1] + if lastItem: + if not yieldCondition or yieldCondition(lastItem, depth + 1): + yield lastItem, depth + 1 + child = getNextSibling(lastItem) + childList[depth] = child + if depth + 1 < maxDepth: + child = getFirstChild(lastItem) + if child: + depth += 1 + childList.append(child) + else: + del childList[depth] + depth -= 1 + + +def GetRootControl() -> PaneControl: + """ + Get root control, the Desktop window. + Return `PaneControl`. + """ + return Control.CreateControlFromElement(_AutomationClient.instance().IUIAutomation.GetRootElement()) + + +def GetFocusedControl() -> Control: + """Return `Control` subclass.""" + return Control.CreateControlFromElement(_AutomationClient.instance().IUIAutomation.GetFocusedElement()) + + +def GetForegroundControl() -> Control: + """Return `Control` subclass.""" + return ControlFromHandle(GetForegroundWindow()) + # another implement + #focusedControl = GetFocusedControl() + #parentControl = focusedControl + #controlList = [] + # while parentControl: + #controlList.insert(0, parentControl) + #parentControl = parentControl.GetParentControl() + # if len(controlList) == 1: + #parentControl = controlList[0] + # else: + #parentControl = controlList[1] + # return parentControl + + +def GetConsoleWindow() -> WindowControl: + """Return `WindowControl` or None, a console window that runs python.""" + return ControlFromHandle(ctypes.windll.kernel32.GetConsoleWindow()) + + +def ControlFromPoint(x: int, y: int) -> Control: + """ + Call IUIAutomation ElementFromPoint x,y. May return None if mouse is over cmd's title bar icon. + Return `Control` subclass or None. + """ + element = _AutomationClient.instance().IUIAutomation.ElementFromPoint(ctypes.wintypes.POINT(x, y)) + return Control.CreateControlFromElement(element) + + +def ControlFromPoint2(x: int, y: int) -> Control: + """ + Get a native handle from point x,y and call IUIAutomation.ElementFromHandle. + Return `Control` subclass. + """ + return Control.CreateControlFromElement(_AutomationClient.instance().IUIAutomation.ElementFromHandle(WindowFromPoint(x, y))) + + +def ControlFromCursor() -> Control: + """ + Call ControlFromPoint with current cursor point. + Return `Control` subclass. + """ + x, y = GetCursorPos() + return ControlFromPoint(x, y) + + +def ControlFromCursor2() -> Control: + """ + Call ControlFromPoint2 with current cursor point. + Return `Control` subclass. + """ + x, y = GetCursorPos() + return ControlFromPoint2(x, y) + + +def ControlFromHandle(handle: int) -> Control: + """ + Call IUIAutomation.ElementFromHandle with a native handle. + handle: int, a native window handle. + Return `Control` subclass or None. + """ + if handle: + return Control.CreateControlFromElement(_AutomationClient.instance().IUIAutomation.ElementFromHandle(handle)) + + +def ControlsAreSame(control1: Control, control2: Control) -> bool: + """ + control1: `Control` or its subclass. + control2: `Control` or its subclass. + Return bool, True if control1 and control2 represent the same control otherwise False. + """ + return bool(_AutomationClient.instance().IUIAutomation.CompareElements(control1.Element, control2.Element)) + + +def WalkControl(control: Control, includeTop: bool = False, maxDepth: int = 0xFFFFFFFF): + """ + control: `Control` or its subclass. + includeTop: bool, if True, yield (control, 0) first. + maxDepth: int, enum depth. + Yield 2 items tuple (control: Control, depth: int). + """ + if includeTop: + yield control, 0 + if maxDepth <= 0: + return + depth = 0 + child = control.GetFirstChildControl() + controlList = [child] + while depth >= 0: + lastControl = controlList[-1] + if lastControl: + yield lastControl, depth + 1 + child = lastControl.GetNextSiblingControl() + controlList[depth] = child + if depth + 1 < maxDepth: + child = lastControl.GetFirstChildControl() + if child: + depth += 1 + controlList.append(child) + else: + del controlList[depth] + depth -= 1 + + +def LogControl(control: Control, depth: int = 0, showAllName: bool = True, showPid: bool = False) -> None: + """ + Print and log control's properties. + control: `Control` or its subclass. + depth: int, current depth. + showAllName: bool, if False, print the first 30 characters of control.Name. + """ + indent = ' ' * depth * 4 + Logger.Write('{0}ControlType: '.format(indent)) + Logger.Write(control.ControlTypeName, ConsoleColor.DarkGreen) + Logger.Write(' ClassName: ') + Logger.Write(control.ClassName, ConsoleColor.DarkGreen) + Logger.Write(' AutomationId: ') + Logger.Write(control.AutomationId, ConsoleColor.DarkGreen) + Logger.Write(' Rect: ') + Logger.Write(control.BoundingRectangle, ConsoleColor.DarkGreen) + Logger.Write(' Name: ') + Logger.Write(control.Name, ConsoleColor.DarkGreen, printTruncateLen=0 if showAllName else 30) + Logger.Write(' Handle: ') + Logger.Write('0x{0:X}({0})'.format(control.NativeWindowHandle), ConsoleColor.DarkGreen) + Logger.Write(' Depth: ') + Logger.Write(depth, ConsoleColor.DarkGreen) + if showPid: + Logger.Write(' ProcessId: ') + Logger.Write(control.ProcessId, ConsoleColor.DarkGreen) + supportedPatterns = list(filter(lambda t: t[0], ((control.GetPattern(id_), name) for id_, name in PatternIdNames.items()))) + for pt, name in supportedPatterns: + if isinstance(pt, ValuePattern): + Logger.Write(' ValuePattern.Value: ') + Logger.Write(pt.Value, ConsoleColor.DarkGreen, printTruncateLen=0 if showAllName else 30) + elif isinstance(pt, RangeValuePattern): + Logger.Write(' RangeValuePattern.Value: ') + Logger.Write(pt.Value, ConsoleColor.DarkGreen) + elif isinstance(pt, TogglePattern): + Logger.Write(' TogglePattern.ToggleState: ') + Logger.Write('ToggleState.' + _GetDictKeyName(ToggleState.__dict__, pt.ToggleState), ConsoleColor.DarkGreen) + elif isinstance(pt, SelectionItemPattern): + Logger.Write(' SelectionItemPattern.IsSelected: ') + Logger.Write(pt.IsSelected, ConsoleColor.DarkGreen) + elif isinstance(pt, ExpandCollapsePattern): + Logger.Write(' ExpandCollapsePattern.ExpandCollapseState: ') + Logger.Write('ExpandCollapseState.' + _GetDictKeyName(ExpandCollapseState.__dict__, pt.ExpandCollapseState), ConsoleColor.DarkGreen) + elif isinstance(pt, ScrollPattern): + Logger.Write(' ScrollPattern.HorizontalScrollPercent: ') + Logger.Write(pt.HorizontalScrollPercent, ConsoleColor.DarkGreen) + Logger.Write(' ScrollPattern.VerticalScrollPercent: ') + Logger.Write(pt.VerticalScrollPercent, ConsoleColor.DarkGreen) + elif isinstance(pt, GridPattern): + Logger.Write(' GridPattern.RowCount: ') + Logger.Write(pt.RowCount, ConsoleColor.DarkGreen) + Logger.Write(' GridPattern.ColumnCount: ') + Logger.Write(pt.ColumnCount, ConsoleColor.DarkGreen) + elif isinstance(pt, GridItemPattern): + Logger.Write(' GridItemPattern.Row: ') + Logger.Write(pt.Column, ConsoleColor.DarkGreen) + Logger.Write(' GridItemPattern.Column: ') + Logger.Write(pt.Column, ConsoleColor.DarkGreen) + elif isinstance(pt, TextPattern): + # issue 49: CEF Control as DocumentControl have no "TextPattern.Text" property, skip log this part. + # https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextpattern-get_documentrange + try: + Logger.Write(' TextPattern.Text: ') + Logger.Write(pt.DocumentRange.GetText(30), ConsoleColor.DarkGreen) + except comtypes.COMError as ex: + pass + Logger.Write(' SupportedPattern:') + for pt, name in supportedPatterns: + Logger.Write(' ' + name, ConsoleColor.DarkGreen) + Logger.Write('\n') + + +def EnumAndLogControl(control: Control, maxDepth: int = 0xFFFFFFFF, showAllName: bool = True, showPid: bool = False, startDepth: int = 0) -> None: + """ + Print and log control and its descendants' propertyies. + control: `Control` or its subclass. + maxDepth: int, enum depth. + showAllName: bool, if False, print the first 30 characters of control.Name. + startDepth: int, control's current depth. + """ + for c, d in WalkControl(control, True, maxDepth): + LogControl(c, d + startDepth, showAllName, showPid) + + +def EnumAndLogControlAncestors(control: Control, showAllName: bool = True, showPid: bool = False) -> None: + """ + Print and log control and its ancestors' propertyies. + control: `Control` or its subclass. + showAllName: bool, if False, print the first 30 characters of control.Name. + """ + lists = [] + while control: + lists.insert(0, control) + control = control.GetParentControl() + for i, control in enumerate(lists): + LogControl(control, i, showAllName, showPid) + + +def FindControl(control: Control, compare: Callable[[Control, int], bool], maxDepth: int = 0xFFFFFFFF, findFromSelf: bool = False, foundIndex: int = 1) -> Control: + """ + control: `Control` or its subclass. + compare: Callable[[Control, int], bool], function(control: Control, depth: int) -> bool. + maxDepth: int, enum depth. + findFromSelf: bool, if False, do not compare self. + foundIndex: int, starts with 1, >= 1. + Return `Control` subclass or None if not find. + """ + foundCount = 0 + if not control: + control = GetRootControl() + traverseCount = 0 + for child, depth in WalkControl(control, findFromSelf, maxDepth): + traverseCount += 1 + if compare(child, depth): + foundCount += 1 + if foundCount == foundIndex: + child.traverseCount = traverseCount + return child + + +def ShowDesktop(waitTime: float = 1) -> None: + """Show Desktop by pressing win + d""" + SendKeys('{Win}d', waitTime=waitTime) + # another implement + #paneTray = PaneControl(searchDepth = 1, ClassName = 'Shell_TrayWnd') + # if paneTray.Exists(): + #WM_COMMAND = 0x111 + #MIN_ALL = 419 + #MIN_ALL_UNDO = 416 + #PostMessage(paneTray.NativeWindowHandle, WM_COMMAND, MIN_ALL, 0) + # time.sleep(1) + + +def WaitHotKeyReleased(hotkey: Tuple[int, int]) -> None: + """hotkey: Tuple[int, int], two ints tuple (modifierKey, key)""" + mod = {ModifierKey.Alt: Keys.VK_MENU, + ModifierKey.Control: Keys.VK_CONTROL, + ModifierKey.Shift: Keys.VK_SHIFT, + ModifierKey.Win: Keys.VK_LWIN + } + while True: + time.sleep(0.05) + if IsKeyPressed(hotkey[1]): + continue + for k, v in mod.items(): + if k & hotkey[0]: + if IsKeyPressed(v): + break + else: + break + + +def RunByHotKey(keyFunctions: Dict[Tuple[int, int], Callable], stopHotKey: Tuple[int, int] = None, exitHotKey: Tuple[int, int] = (ModifierKey.Control, Keys.VK_D), waitHotKeyReleased: bool = True) -> None: + """ + Bind functions with hotkeys, the function will be run or stopped in another thread when the hotkey is pressed. + keyFunctions: Dict[Tuple[int, int], Callable], such as {(uiautomation.ModifierKey.Control, uiautomation.Keys.VK_1) : function} + stopHotKey: hotkey tuple + exitHotKey: hotkey tuple + waitHotKeyReleased: bool, if True, hotkey function will be triggered after the hotkey is released + + def main(stopEvent): + while True: + if stopEvent.is_set(): # must check stopEvent.is_set() if you want to stop when stop hotkey is pressed + break + print(n) + n += 1 + stopEvent.wait(1) + print('main exit') + + uiautomation.RunByHotKey({(uiautomation.ModifierKey.Control, uiautomation.Keys.VK_1) : main} + , (uiautomation.ModifierKey.Control | uiautomation.ModifierKey.Shift, uiautomation.Keys.VK_2)) + """ + import traceback + + def getModName(theDict, theValue): + name = '' + for key in theDict: + if isinstance(theDict[key], int) and theValue & theDict[key]: + if name: + name += '|' + name += key + return name + + def releaseAllKeys(): + for key, value in Keys.__dict__.items(): + if isinstance(value, int) and key.startswith('VK'): + if IsKeyPressed(value): + ReleaseKey(value) + + def threadFunc(function, stopEvent, hotkey, hotkeyName): + if waitHotKeyReleased: + WaitHotKeyReleased(hotkey) + try: + function(stopEvent) + except Exception as ex: + Logger.ColorfullyWrite('Catch an exception {} in thread for hotkey {}\n'.format( + ex.__class__.__name__, hotkeyName), writeToFile=False) + print(traceback.format_exc()) + finally: + releaseAllKeys() # need to release keys if some keys were pressed + Logger.ColorfullyWrite('{} for function {} exits, hotkey {}\n'.format( + threading.currentThread(), function.__name__, hotkeyName), ConsoleColor.DarkYellow, writeToFile=False) + + stopHotKeyId = 1 + exitHotKeyId = 2 + hotKeyId = 3 + registed = True + id2HotKey = {} + id2Function = {} + id2Thread = {} + id2Name = {} + for hotkey in keyFunctions: + id2HotKey[hotKeyId] = hotkey + id2Function[hotKeyId] = keyFunctions[hotkey] + id2Thread[hotKeyId] = None + modName = getModName(ModifierKey.__dict__, hotkey[0]) + keyName = _GetDictKeyName(Keys.__dict__, hotkey[1]) + id2Name[hotKeyId] = str((modName, keyName)) + if ctypes.windll.user32.RegisterHotKey(0, hotKeyId, hotkey[0], hotkey[1]): + Logger.ColorfullyWrite('Register hotkey {} successfully\n'.format((modName, keyName)), writeToFile=False) + else: + registed = False + Logger.ColorfullyWrite('Register hotkey {} unsuccessfully, maybe it was allready registered by another program\n'.format((modName, keyName)), writeToFile=False) + hotKeyId += 1 + if stopHotKey and len(stopHotKey) == 2: + modName = getModName(ModifierKey.__dict__, stopHotKey[0]) + keyName = _GetDictKeyName(Keys.__dict__, stopHotKey[1]) + if ctypes.windll.user32.RegisterHotKey(0, stopHotKeyId, stopHotKey[0], stopHotKey[1]): + Logger.ColorfullyWrite('Register stop hotkey {} successfully\n'.format((modName, keyName)), writeToFile=False) + else: + registed = False + Logger.ColorfullyWrite('Register stop hotkey {} unsuccessfully, maybe it was allready registered by another program\n'.format((modName, keyName)), writeToFile=False) + if not registed: + return + if exitHotKey and len(exitHotKey) == 2: + modName = getModName(ModifierKey.__dict__, exitHotKey[0]) + keyName = _GetDictKeyName(Keys.__dict__, exitHotKey[1]) + if ctypes.windll.user32.RegisterHotKey(0, exitHotKeyId, exitHotKey[0], exitHotKey[1]): + Logger.ColorfullyWrite('Register exit hotkey {} successfully\n'.format((modName, keyName)), writeToFile=False) + else: + Logger.ColorfullyWrite('Register exit hotkey {} unsuccessfully\n'.format((modName, keyName)), writeToFile=False) + funcThread = None + livingThreads = [] + stopEvent = threading.Event() + msg = ctypes.wintypes.MSG() + while ctypes.windll.user32.GetMessageW(ctypes.byref(msg), ctypes.c_void_p(0), ctypes.c_uint(0), ctypes.c_uint(0)) != 0: + if msg.message == 0x0312: # WM_HOTKEY=0x0312 + if msg.wParam in id2HotKey: + if msg.lParam & 0x0000FFFF == id2HotKey[msg.wParam][0] and msg.lParam >> 16 & 0x0000FFFF == id2HotKey[msg.wParam][1]: + Logger.ColorfullyWrite('----------hotkey {} pressed----------\n'.format(id2Name[msg.wParam]), writeToFile=False) + if not id2Thread[msg.wParam]: + stopEvent.clear() + funcThread = threading.Thread(None, threadFunc, args=(id2Function[msg.wParam], stopEvent, id2HotKey[msg.wParam], id2Name[msg.wParam])) + funcThread.start() + id2Thread[msg.wParam] = funcThread + else: + if id2Thread[msg.wParam].is_alive(): + Logger.WriteLine('There is a {} that is already running for hotkey {}'.format(id2Thread[msg.wParam], id2Name[msg.wParam]), ConsoleColor.Yellow, writeToFile=False) + else: + stopEvent.clear() + funcThread = threading.Thread(None, threadFunc, args=(id2Function[msg.wParam], stopEvent, id2HotKey[msg.wParam], id2Name[msg.wParam])) + funcThread.start() + id2Thread[msg.wParam] = funcThread + elif stopHotKeyId == msg.wParam: + if msg.lParam & 0x0000FFFF == stopHotKey[0] and msg.lParam >> 16 & 0x0000FFFF == stopHotKey[1]: + Logger.Write('----------stop hotkey pressed----------\n', ConsoleColor.DarkYellow, writeToFile=False) + stopEvent.set() + for id_ in id2Thread: + if id2Thread[id_]: + if id2Thread[id_].is_alive(): + livingThreads.append((id2Thread[id_], id2Name[id_])) + id2Thread[id_] = None + elif exitHotKeyId == msg.wParam: + if msg.lParam & 0x0000FFFF == exitHotKey[0] and msg.lParam >> 16 & 0x0000FFFF == exitHotKey[1]: + Logger.Write('Exit hotkey pressed. Exit\n', ConsoleColor.DarkYellow, writeToFile=False) + stopEvent.set() + for id_ in id2Thread: + if id2Thread[id_]: + if id2Thread[id_].is_alive(): + livingThreads.append((id2Thread[id_], id2Name[id_])) + id2Thread[id_] = None + break + for thread, hotkeyName in livingThreads: + if thread.is_alive(): + Logger.Write('join {} triggered by hotkey {}\n'.format(thread, hotkeyName), ConsoleColor.DarkYellow, writeToFile=False) + thread.join(2) + os._exit(0) + + +if __name__ == '__main__': + + print('\nUIAutomationCore:----') + for i in sorted([it for it in dir(_AutomationClient.instance().UIAutomationCore) if not it.startswith('_')]): + print(i) + + print('\nIUIAutomation:----') + for i in sorted([it for it in dir(_AutomationClient.instance().IUIAutomation) if not it.startswith('_')]): + print(i) + + print('\nViewWalker:----') + for i in sorted([it for it in dir(_AutomationClient.instance().ViewWalker) if not it.startswith('_')]): + print(i) + + print() + for ct, ctor in ControlConstructors.items(): + c = ctor() + print(type(c)) + + notepad = WindowControl(searchDepth=1, ClassName='Notepad') + if not notepad.Exists(0, 0): + import subprocess + subprocess.Popen('notepad.exe') + notepad.Refind() + + print('\n', notepad) + print('Control:----') + for i in sorted([it for it in dir(notepad) if not it.startswith('_')]): + print(i) + + print('\n', notepad.Element) + print('Control.Element:----') + for i in sorted([it for it in dir(notepad.Element) if not it.startswith('_')]): + print(i) + + lp = notepad.GetLegacyIAccessiblePattern() + print('\n', lp) + print('Control.LegacyIAccessiblePattern:----') + for i in sorted([it for it in dir(lp.pattern) if not it.startswith('_')]): + print(i) + + print('\nControl.Properties:----') + for k, v in PropertyIdNames.items(): + try: + value = notepad.GetPropertyValue(k) + print('GetPropertyValue, {} = {}, type: {}'.format(v, value, type(value))) + except (KeyError, comtypes.COMError) as ex: + print('GetPropertyValue, {}, error'.format(v)) + + children = notepad.GetChildren() + print('\n notepad children:----', len(children)) + for c in notepad.GetChildren(): + print(c) + + del lp + del notepad + + hello = '{Ctrl}{End}{Enter}Hello World!' + SendKeys(hello) diff --git a/venv/Lib/site-packages/uiautomation/version.py b/venv/Lib/site-packages/uiautomation/version.py new file mode 100644 index 00000000..394ef15e --- /dev/null +++ b/venv/Lib/site-packages/uiautomation/version.py @@ -0,0 +1 @@ +VERSION = "2.0.13" diff --git a/venv/Lib/site-packages/win32/Demos/BackupRead_BackupWrite.py b/venv/Lib/site-packages/win32/Demos/BackupRead_BackupWrite.py new file mode 100644 index 00000000..ebbf0237 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/BackupRead_BackupWrite.py @@ -0,0 +1,81 @@ +## demonstrates using BackupRead and BackupWrite to copy all of a file's data streams + +import win32file, win32api, win32con, win32security, ntsecuritycon +from win32com import storagecon +import pythoncom, pywintypes +import struct, traceback +from pywin32_testutil import str2bytes, ob2memory + +all_sd_info=win32security.DACL_SECURITY_INFORMATION|win32security.DACL_SECURITY_INFORMATION| \ + win32security.OWNER_SECURITY_INFORMATION|win32security.GROUP_SECURITY_INFORMATION + +tempdir=win32api.GetTempPath() +tempfile=win32api.GetTempFileName(tempdir,'bkr')[0] +outfile=win32api.GetTempFileName(tempdir,'out')[0] +print('Filename:',tempfile,'Output file:',outfile) + +f=open(tempfile,'w') +f.write('some random junk'+'x'*100) +f.close() + +## add a couple of alternate data streams +f=open(tempfile+':streamdata','w') +f.write('data written to alternate stream'+'y'*100) +f.close() + +f=open(tempfile+':anotherstream','w') +f.write('z'*100) +f.close() + +## add Summary Information, which is stored as a separate stream +m=storagecon.STGM_READWRITE | storagecon.STGM_SHARE_EXCLUSIVE |storagecon.STGM_DIRECT +pss=pythoncom.StgOpenStorageEx(tempfile, m, storagecon.STGFMT_FILE, 0 , pythoncom.IID_IPropertySetStorage,None) +ps=pss.Create(pythoncom.FMTID_SummaryInformation,pythoncom.IID_IPropertyStorage,0,storagecon.STGM_READWRITE|storagecon.STGM_SHARE_EXCLUSIVE) +ps.WriteMultiple((storagecon.PIDSI_KEYWORDS,storagecon.PIDSI_COMMENTS),('keywords','comments')) +ps=None +pss=None + +## add a custom security descriptor to make sure we don't +## get a default that would always be the same for both files in temp dir +new_sd=pywintypes.SECURITY_DESCRIPTOR() +sid=win32security.LookupAccountName('','EveryOne')[0] +acl=pywintypes.ACL() +acl.AddAccessAllowedAce(1, win32con.GENERIC_READ, sid) +acl.AddAccessAllowedAce(1, ntsecuritycon.FILE_APPEND_DATA, sid) +acl.AddAccessAllowedAce(1, win32con.GENERIC_WRITE, sid) +acl.AddAccessAllowedAce(1, ntsecuritycon.FILE_ALL_ACCESS, sid) + +new_sd.SetSecurityDescriptorDacl(True, acl, False) +win32security.SetFileSecurity(tempfile,win32security.DACL_SECURITY_INFORMATION,new_sd) + + +sa=pywintypes.SECURITY_ATTRIBUTES() +sa.bInheritHandle=True +h=win32file.CreateFile(tempfile, win32con.GENERIC_ALL ,win32con.FILE_SHARE_READ, + sa, win32con.OPEN_EXISTING, win32file.FILE_FLAG_BACKUP_SEMANTICS , None) + +outh=win32file.CreateFile(outfile, win32con.GENERIC_ALL ,win32con.FILE_SHARE_READ|win32con.FILE_SHARE_WRITE, + sa, win32con.OPEN_EXISTING, win32file.FILE_FLAG_BACKUP_SEMANTICS , None) + +ctxt=0 +outctxt=0 +buf=None +readsize=100 + +while 1: + bytes_read, buf, ctxt=win32file.BackupRead(h, readsize, buf, False, True, ctxt) + if bytes_read==0: + break + bytes_written, outctxt=win32file.BackupWrite(outh, bytes_read, buf, False, True, outctxt) + print('Written:',bytes_written,'Context:',outctxt) +win32file.BackupRead(h, 0, buf, True, True, ctxt) +win32file.BackupWrite(outh, 0, str2bytes(''), True, True, outctxt) +win32file.CloseHandle(h) +win32file.CloseHandle(outh) + +assert open(tempfile).read()==open(outfile).read(),"File contents differ !" +assert open(tempfile+':streamdata').read()==open(outfile+':streamdata').read(),"streamdata contents differ !" +assert open(tempfile+':anotherstream').read()==open(outfile+':anotherstream').read(),"anotherstream contents differ !" +assert ob2memory(win32security.GetFileSecurity(tempfile,all_sd_info))[:]== \ + ob2memory(win32security.GetFileSecurity(outfile, all_sd_info))[:], "Security descriptors are different !" +## also should check Summary Info programatically diff --git a/venv/Lib/site-packages/win32/Demos/BackupSeek_streamheaders.py b/venv/Lib/site-packages/win32/Demos/BackupSeek_streamheaders.py new file mode 100644 index 00000000..051ced5c --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/BackupSeek_streamheaders.py @@ -0,0 +1,85 @@ +## demonstrates using BackupSeek to enumerate data streams for a file +import win32file, win32api, win32con +from win32com import storagecon +import pythoncom, pywintypes +import struct, traceback + +stream_types={ + win32con.BACKUP_DATA:"Standard data", + win32con.BACKUP_EA_DATA:"Extended attribute data", + win32con.BACKUP_SECURITY_DATA:"Security descriptor data", + win32con.BACKUP_ALTERNATE_DATA:"Alternative data streams", + win32con.BACKUP_LINK:"Hard link information", + win32con.BACKUP_PROPERTY_DATA:"Property data", + win32con.BACKUP_OBJECT_ID:"Objects identifiers", + win32con.BACKUP_REPARSE_DATA:"Reparse points", + win32con.BACKUP_SPARSE_BLOCK:"Sparse file" +} + +tempdir=win32api.GetTempPath() +tempfile=win32api.GetTempFileName(tempdir,'bkr')[0] +print('Filename:',tempfile) + +f=open(tempfile,'w') +f.write('some random junk'+'x'*100) +f.close() + +f=open(tempfile+':streamdata','w') +f.write('data written to alternate stream'+'y'*100) +f.close() + +f=open(tempfile+':anotherstream','w') +f.write('z'*200) +f.close() + +## add Summary Information, which is stored as a separate stream +m=storagecon.STGM_READWRITE | storagecon.STGM_SHARE_EXCLUSIVE |storagecon.STGM_DIRECT +pss=pythoncom.StgOpenStorageEx(tempfile, m, storagecon.STGFMT_FILE, 0 , pythoncom.IID_IPropertySetStorage,None) +ps=pss.Create(pythoncom.FMTID_SummaryInformation,pythoncom.IID_IPropertyStorage,0,storagecon.STGM_READWRITE|storagecon.STGM_SHARE_EXCLUSIVE) +ps.WriteMultiple((storagecon.PIDSI_KEYWORDS,storagecon.PIDSI_COMMENTS),('keywords','comments')) +ps=None +pss=None + +sa=pywintypes.SECURITY_ATTRIBUTES() +sa.bInheritHandle=False +h=win32file.CreateFile(tempfile, win32con.GENERIC_ALL ,win32con.FILE_SHARE_READ, + sa, win32con.OPEN_EXISTING, win32file.FILE_FLAG_BACKUP_SEMANTICS , None) + + +""" stream header: +typedef struct _WIN32_STREAM_ID { + DWORD dwStreamId; DWORD dwStreamAttributes; LARGE_INTEGER Size; + DWORD dwStreamNameSize; WCHAR cStreamName[ANYSIZE_ARRAY]; +} +""" + +win32_stream_id_format="LLQL" +win32_stream_id_size=struct.calcsize(win32_stream_id_format) + +def parse_stream_header(h,ctxt,data): + stream_type, stream_attributes, stream_size, stream_name_size=struct.unpack(win32_stream_id_format,data) + print('\nType:',stream_type,stream_types[stream_type], 'Attributes:', stream_attributes, 'Size:', stream_size, 'Name len:',stream_name_size) + if stream_name_size>0: + ## ??? sdk says this size is in characters, but it appears to be number of bytes ??? + bytes_read, stream_name_buf, ctxt=win32file.BackupRead(h, stream_name_size, None, False, True, ctxt) + stream_name=pywintypes.UnicodeFromRaw(stream_name_buf[:]) + else: + stream_name='Unnamed' + print('Name:'+stream_name) + return ctxt, stream_type, stream_attributes, stream_size, stream_name_size, stream_name + +ctxt=0 +win32_stream_id_buf=None ## gets rebound to a writable buffer on first call and reused +while 1: + bytes_read, win32_stream_id_buf, ctxt=win32file.BackupRead(h, win32_stream_id_size, win32_stream_id_buf, False, True, ctxt) + if bytes_read==0: + break + ctxt, stream_type, stream_attributes, stream_size, stream_name_size, stream_name=\ + parse_stream_header(h, ctxt, win32_stream_id_buf[:]) + if stream_size>0: + bytes_moved=win32file.BackupSeek(h, stream_size, ctxt) + print('Moved: ',bytes_moved) + +win32file.BackupRead(h, win32_stream_id_size, win32_stream_id_buf, True, True, ctxt) +win32file.CloseHandle(h) + diff --git a/venv/Lib/site-packages/win32/Demos/CopyFileEx.py b/venv/Lib/site-packages/win32/Demos/CopyFileEx.py new file mode 100644 index 00000000..0b71294e --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/CopyFileEx.py @@ -0,0 +1,30 @@ +import win32file, win32api +import os + + +def ProgressRoutine(TotalFileSize, TotalBytesTransferred, StreamSize, StreamBytesTransferred, + StreamNumber, CallbackReason, SourceFile, DestinationFile, Data): + print(Data) + print(TotalFileSize, TotalBytesTransferred, StreamSize, StreamBytesTransferred, StreamNumber, CallbackReason, SourceFile, DestinationFile) + ##if TotalBytesTransferred > 100000: + ## return win32file.PROGRESS_STOP + return win32file.PROGRESS_CONTINUE + +temp_dir=win32api.GetTempPath() +fsrc=win32api.GetTempFileName(temp_dir,'cfe')[0] +fdst=win32api.GetTempFileName(temp_dir,'cfe')[0] +print(fsrc, fdst) + +f=open(fsrc,'w') +f.write('xxxxxxxxxxxxxxxx\n'*32768) +f.close() +## add a couple of extra data streams +f=open(fsrc+':stream_y','w') +f.write('yyyyyyyyyyyyyyyy\n'*32768) +f.close() +f=open(fsrc+':stream_z','w') +f.write('zzzzzzzzzzzzzzzz\n'*32768) +f.close() + +operation_desc='Copying '+fsrc+' to '+fdst +win32file.CopyFileEx(fsrc, fdst, ProgressRoutine, Data=operation_desc, Cancel=False, CopyFlags=win32file.COPY_FILE_RESTARTABLE, Transaction=None) diff --git a/venv/Lib/site-packages/win32/Demos/CreateFileTransacted_MiniVersion.py b/venv/Lib/site-packages/win32/Demos/CreateFileTransacted_MiniVersion.py new file mode 100644 index 00000000..bfb49946 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/CreateFileTransacted_MiniVersion.py @@ -0,0 +1,81 @@ +""" +This demonstrates the creation of miniversions of a file during a transaction. +The FSCTL_TXFS_CREATE_MINIVERSION control code saves any changes to a new +miniversion (effectively a savepoint within a transaction). +""" + +import win32file, win32api, win32transaction, winerror +import win32con, winioctlcon +import struct +import os +from pywin32_testutil import str2bytes # py3k-friendly helper + +def demo(): + """ + Definition of buffer used with FSCTL_TXFS_CREATE_MINIVERSION: + typedef struct _TXFS_CREATE_MINIVERSION_INFO{ + USHORT StructureVersion; + USHORT StructureLength; + ULONG BaseVersion; + USHORT MiniVersion;} + """ + buf_fmt='HHLH0L' ## buffer size must include struct padding + buf_size=struct.calcsize(buf_fmt) + + tempdir=win32api.GetTempPath() + tempfile=win32api.GetTempFileName(tempdir,'cft')[0] + print("Demonstrating transactions on tempfile", tempfile) + f=open(tempfile,'w') + f.write('This is original file.\n') + f.close() + + trans=win32transaction.CreateTransaction(Description='Test creating miniversions of a file') + hfile=win32file.CreateFileW(tempfile, win32con.GENERIC_READ|win32con.GENERIC_WRITE, + win32con.FILE_SHARE_READ|win32con.FILE_SHARE_WRITE, + None, win32con.OPEN_EXISTING, 0 , None, Transaction=trans) + + win32file.WriteFile(hfile, str2bytes('This is first miniversion.\n')) + buf=win32file.DeviceIoControl(hfile, winioctlcon.FSCTL_TXFS_CREATE_MINIVERSION,None,buf_size,None) + struct_ver, struct_len, base_ver, ver_1=struct.unpack(buf_fmt, buf) + + win32file.SetFilePointer(hfile, 0, win32con.FILE_BEGIN) + win32file.WriteFile(hfile, str2bytes('This is second miniversion!\n')) + buf=win32file.DeviceIoControl(hfile, winioctlcon.FSCTL_TXFS_CREATE_MINIVERSION,None,buf_size,None) + struct_ver, struct_len, base_ver, ver_2=struct.unpack(buf_fmt, buf) + hfile.Close() + + ## miniversions can't be opened with write access + hfile_0=win32file.CreateFileW(tempfile, win32con.GENERIC_READ, + win32con.FILE_SHARE_READ|win32con.FILE_SHARE_WRITE, + None, win32con.OPEN_EXISTING, 0 , None, Transaction=trans, MiniVersion=base_ver) + print('version:',base_ver,win32file.ReadFile(hfile_0, 100)) + hfile_0.Close() + + hfile_1=win32file.CreateFileW(tempfile, win32con.GENERIC_READ, + win32con.FILE_SHARE_READ|win32con.FILE_SHARE_WRITE, + None, win32con.OPEN_EXISTING, 0 , None, Transaction=trans, MiniVersion=ver_1) + print('version:',ver_1,win32file.ReadFile(hfile_1, 100)) + hfile_1.Close() + + hfile_2=win32file.CreateFileW(tempfile, win32con.GENERIC_READ, + win32con.FILE_SHARE_READ|win32con.FILE_SHARE_WRITE, + None, win32con.OPEN_EXISTING, 0 , None, Transaction=trans, MiniVersion=ver_2) + print('version:',ver_2,win32file.ReadFile(hfile_2, 100)) + hfile_2.Close() + + ## MiniVersions are destroyed when transaction is committed or rolled back + win32transaction.CommitTransaction(trans) + + os.unlink(tempfile) + +if __name__ == "__main__": + # When run on CI, this fails with NOT_SUPPORTED, so don't have that cause "failure" + try: + demo() + except win32file.error as e: + if e.winerror == winerror.ERROR_NOT_SUPPORTED: + print("These features are not supported by this filesystem.") + else: + raise + + diff --git a/venv/Lib/site-packages/win32/Demos/EvtFormatMessage.py b/venv/Lib/site-packages/win32/Demos/EvtFormatMessage.py new file mode 100644 index 00000000..b8e2285a --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/EvtFormatMessage.py @@ -0,0 +1,67 @@ +import sys + +import win32evtlog + + +def main(): + path = 'System' + num_events = 5 + if len(sys.argv) > 2: + path = sys.argv[1] + num_events = int(sys.argv[2]) + elif len(sys.argv) > 1: + path = sys.argv[1] + + query = win32evtlog.EvtQuery(path, win32evtlog.EvtQueryForwardDirection) + events = win32evtlog.EvtNext(query, num_events) + context = win32evtlog.EvtCreateRenderContext(win32evtlog.EvtRenderContextSystem) + + for i, event in enumerate(events, 1): + result = win32evtlog.EvtRender(event, win32evtlog.EvtRenderEventValues, Context=context) + + print('Event {}'.format(i)) + + level_value, level_variant = result[win32evtlog.EvtSystemLevel] + if level_variant != win32evtlog.EvtVarTypeNull: + if level_value == 1: + print(' Level: CRITICAL') + elif level_value == 2: + print(' Level: ERROR') + elif level_value == 3: + print(' Level: WARNING') + elif level_value == 4: + print(' Level: INFO') + elif level_value == 5: + print(' Level: VERBOSE') + else: + print(' Level: UNKNOWN') + + time_created_value, time_created_variant = result[win32evtlog.EvtSystemTimeCreated] + if time_created_variant != win32evtlog.EvtVarTypeNull: + print(' Timestamp: {}'.format(time_created_value.isoformat())) + + computer_value, computer_variant = result[win32evtlog.EvtSystemComputer] + if computer_variant != win32evtlog.EvtVarTypeNull: + print(' FQDN: {}'.format(computer_value)) + + provider_name_value, provider_name_variant = result[win32evtlog.EvtSystemProviderName] + if provider_name_variant != win32evtlog.EvtVarTypeNull: + print(' Provider: {}'.format(provider_name_value)) + + try: + metadata = win32evtlog.EvtOpenPublisherMetadata(provider_name_value) + # pywintypes.error: (2, 'EvtOpenPublisherMetadata', 'The system cannot find the file specified.') + except Exception: + pass + else: + try: + message = win32evtlog.EvtFormatMessage(metadata, event, win32evtlog.EvtFormatMessageEvent) + # pywintypes.error: (15027, 'EvtFormatMessage: allocated 0, need buffer of size 0', 'The message resource is present but the message was not found in the message table.') + except Exception: + pass + else: + print(' Message: {}'.format(message)) + + +if __name__=='__main__': + main() diff --git a/venv/Lib/site-packages/win32/Demos/EvtSubscribe_pull.py b/venv/Lib/site-packages/win32/Demos/EvtSubscribe_pull.py new file mode 100644 index 00000000..7540a12f --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/EvtSubscribe_pull.py @@ -0,0 +1,21 @@ +## Demonstrates how to create a "pull" subscription +import win32evtlog, win32event, win32con +query_text='*[System[Provider[@Name="Microsoft-Windows-Winlogon"]]]' + +h=win32event.CreateEvent(None, 0, 0, None) +s=win32evtlog.EvtSubscribe('System', win32evtlog.EvtSubscribeStartAtOldestRecord, SignalEvent=h, Query=query_text) + +while 1: + while 1: + events=win32evtlog.EvtNext(s, 10) + if len(events)==0: + break + ##for event in events: + ## print(win32evtlog.EvtRender(event, win32evtlog.EvtRenderEventXml)) + print('retrieved %s events' %len(events)) + while 1: + print ('waiting...') + w=win32event.WaitForSingleObjectEx(h, 2000, True) + if w==win32con.WAIT_OBJECT_0: + break + diff --git a/venv/Lib/site-packages/win32/Demos/EvtSubscribe_push.py b/venv/Lib/site-packages/win32/Demos/EvtSubscribe_push.py new file mode 100644 index 00000000..2e8db383 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/EvtSubscribe_push.py @@ -0,0 +1,16 @@ +## Demonstrates a "push" subscription with a callback function +import win32evtlog +query_text='*[System[Provider[@Name="Microsoft-Windows-Winlogon"]]]' + +def c(reason, context, evt): + if reason==win32evtlog.EvtSubscribeActionError: + print('EvtSubscribeActionError') + elif reason==win32evtlog.EvtSubscribeActionDeliver: + print('EvtSubscribeActionDeliver') + else: + print('??? Unknown action ???', reason) + context.append(win32evtlog.EvtRender(evt, win32evtlog.EvtRenderEventXml)) + return 0 + +evttext=[] +s=win32evtlog.EvtSubscribe('System', win32evtlog.EvtSubscribeStartAtOldestRecord, Query='*', Callback=c, Context=evttext) diff --git a/venv/Lib/site-packages/win32/Demos/FileSecurityTest.py b/venv/Lib/site-packages/win32/Demos/FileSecurityTest.py new file mode 100644 index 00000000..6388e5f8 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/FileSecurityTest.py @@ -0,0 +1,80 @@ +# Contributed by Kelly Kranabetter. +import os, sys +import win32security, ntsecuritycon, pywintypes, winerror + +# get security information +#name=r"c:\autoexec.bat" +#name= r"g:\!workgrp\lim" +name=sys.argv[0] + +if not os.path.exists(name): + print(name, "does not exist!") + sys.exit() + +print("On file " , name, "\n") + +# get owner SID +print("OWNER") +try: + sd= win32security.GetFileSecurity(name, win32security.OWNER_SECURITY_INFORMATION) + sid= sd.GetSecurityDescriptorOwner() + print(" ", win32security.LookupAccountSid(None, sid)) +except pywintypes.error as exc: + # in automation and network shares we see: + # pywintypes.error: (1332, 'LookupAccountName', 'No mapping between account names and security IDs was done.') + if exc.winerror != winerror.ERROR_NONE_MAPPED: + raise + print("No owner information is available") + +# get group SID +try: + print("GROUP") + sd= win32security.GetFileSecurity(name, win32security.GROUP_SECURITY_INFORMATION) + sid= sd.GetSecurityDescriptorGroup() + print(" ", win32security.LookupAccountSid(None, sid)) +except pywintypes.error as exc: + if exc.winerror != winerror.ERROR_NONE_MAPPED: + raise + print("No group information is available") + +# get ACEs +sd= win32security.GetFileSecurity(name, win32security.DACL_SECURITY_INFORMATION) +dacl= sd.GetSecurityDescriptorDacl() +if dacl == None: + print("No Discretionary ACL") +else: + for ace_no in range(0, dacl.GetAceCount()): + ace= dacl.GetAce(ace_no) + print("ACE", ace_no) + + print(" -Type") + for i in ("ACCESS_ALLOWED_ACE_TYPE", "ACCESS_DENIED_ACE_TYPE", "SYSTEM_AUDIT_ACE_TYPE", "SYSTEM_ALARM_ACE_TYPE"): + if getattr(ntsecuritycon, i) == ace[0][0]: + print(" ", i) + + print(" -Flags", hex(ace[0][1])) + for i in ("OBJECT_INHERIT_ACE", "CONTAINER_INHERIT_ACE", "NO_PROPAGATE_INHERIT_ACE", "INHERIT_ONLY_ACE", "SUCCESSFUL_ACCESS_ACE_FLAG", "FAILED_ACCESS_ACE_FLAG"): + if getattr(ntsecuritycon, i) & ace[0][1] == getattr(ntsecuritycon, i): + print(" ", i) + + print(" -mask", hex(ace[1])) + + # files and directories do permissions differently + permissions_file= ("DELETE", "READ_CONTROL", "WRITE_DAC", "WRITE_OWNER", "SYNCHRONIZE", "FILE_GENERIC_READ", "FILE_GENERIC_WRITE", "FILE_GENERIC_EXECUTE", "FILE_DELETE_CHILD") + permissions_dir= ("DELETE", "READ_CONTROL", "WRITE_DAC", "WRITE_OWNER", "SYNCHRONIZE", "FILE_ADD_SUBDIRECTORY", "FILE_ADD_FILE", "FILE_DELETE_CHILD", "FILE_LIST_DIRECTORY", "FILE_TRAVERSE", "FILE_READ_ATTRIBUTES", "FILE_WRITE_ATTRIBUTES", "FILE_READ_EA", "FILE_WRITE_EA") + permissions_dir_inherit= ("DELETE", "READ_CONTROL", "WRITE_DAC", "WRITE_OWNER", "SYNCHRONIZE", "GENERIC_READ", "GENERIC_WRITE", "GENERIC_EXECUTE", "GENERIC_ALL") + if os.path.isfile(name): + permissions= permissions_file + else: + permissions= permissions_dir + # directories also contain an ACE that is inherited by children (files) within them + if ace[0][1] & ntsecuritycon.OBJECT_INHERIT_ACE == ntsecuritycon.OBJECT_INHERIT_ACE and ace[0][1] & ntsecuritycon.INHERIT_ONLY_ACE == ntsecuritycon.INHERIT_ONLY_ACE: + permissions= permissions_dir_inherit + + calc_mask= 0 # calculate the mask so we can see if we are printing all of the permissions + for i in permissions: + if getattr(ntsecuritycon, i) & ace[1] == getattr(ntsecuritycon, i): + calc_mask= calc_mask | getattr(ntsecuritycon, i) + print(" ", i) + print(" ", "Calculated Check Mask=", hex(calc_mask)) + print(" -SID\n ", win32security.LookupAccountSid(None, ace[2])) diff --git a/venv/Lib/site-packages/win32/Demos/GetSaveFileName.py b/venv/Lib/site-packages/win32/Demos/GetSaveFileName.py new file mode 100644 index 00000000..6fcc6879 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/GetSaveFileName.py @@ -0,0 +1,36 @@ +import win32gui, win32con, os + +filter='Python Scripts\0*.py;*.pyw;*.pys\0Text files\0*.txt\0' +customfilter='Other file types\0*.*\0' + +fname, customfilter, flags=win32gui.GetSaveFileNameW( + InitialDir=os.environ['temp'], + Flags=win32con.OFN_ALLOWMULTISELECT|win32con.OFN_EXPLORER, + File='somefilename', DefExt='py', + Title='GetSaveFileNameW', + Filter=filter, + CustomFilter=customfilter, + FilterIndex=1) + +print('save file names:', repr(fname)) +print('filter used:', repr(customfilter)) +print('Flags:', flags) +for k,v in list(win32con.__dict__.items()): + if k.startswith('OFN_') and flags & v: + print('\t'+k) + +fname, customfilter, flags=win32gui.GetOpenFileNameW( + InitialDir=os.environ['temp'], + Flags=win32con.OFN_ALLOWMULTISELECT|win32con.OFN_EXPLORER, + File='somefilename', DefExt='py', + Title='GetOpenFileNameW', + Filter=filter, + CustomFilter=customfilter, + FilterIndex=0) + +print('open file names:', repr(fname)) +print('filter used:', repr(customfilter)) +print('Flags:', flags) +for k,v in list(win32con.__dict__.items()): + if k.startswith('OFN_') and flags & v: + print('\t'+k) diff --git a/venv/Lib/site-packages/win32/Demos/NetValidatePasswordPolicy.py b/venv/Lib/site-packages/win32/Demos/NetValidatePasswordPolicy.py new file mode 100644 index 00000000..16ae6e0f --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/NetValidatePasswordPolicy.py @@ -0,0 +1,101 @@ +"""A demo of using win32net.NetValidatePasswordPolicy. + +Example usage: + +% NetValidatePasswordPolicy.py --password=foo change +which might return: + +> Result of 'change' validation is 0: The operation completed successfully. + +or depending on the policy: + +> Result of 'change' validation is 2245: The password does not meet the +> password policy requirements. Check the minimum password length, +> password complexity and password history requirements. + +Adding --user doesn't seem to change the output (even the PasswordLastSet seen +when '-f' is used doesn't depend on the username), but theoretically it will +also check the password history for the specified user. + +% NetValidatePasswordPolicy.py auth + +which always (with and without '-m') seems to return: + +> Result of 'auth' validation is 2701: Password must change at next logon +""" + +import sys +import win32api +import win32net, win32netcon + +import optparse +from pprint import pprint + +def main(): + parser = optparse.OptionParser("%prog [options] auth|change ...", + description="A win32net.NetValidatePasswordPolicy demo.") + + parser.add_option("-u", "--username", + action="store", + help="The username to pass to the function (only for the " + "change command") + + parser.add_option("-p", "--password", + action="store", + help="The clear-text password to pass to the function " + "(only for the 'change' command)") + + parser.add_option("-m", "--password-matched", + action="store_false", default=True, + help="Used to specify the password does NOT match (ie, " + "uses False for the PasswordMatch/PasswordMatched " + "arg, both 'auth' and 'change' commands)") + + parser.add_option("-s", "--server", + action="store", + help="The name of the server to execute the command on") + + parser.add_option("-f", "--show_fields", + action="store_true", default=False, + help="Print the NET_VALIDATE_PERSISTED_FIELDS returned") + + options, args = parser.parse_args() + + if not args: + args = ["auth"] + + for arg in args: + if arg == "auth": + input = {"PasswordMatched": options.password_matched, + } + val_type = win32netcon.NetValidateAuthentication + elif arg == "change": + input = {"ClearPassword": options.password, + "PasswordMatch": options.password_matched, + "UserAccountName": options.username, + } + val_type = win32netcon.NetValidatePasswordChange + else: + parser.error("Invalid arg - must be 'auth' or 'change'") + + try: + fields, status = win32net.NetValidatePasswordPolicy(options.server, + None, val_type, input) + except NotImplementedError: + print("NetValidatePasswordPolicy not implemented on this platform.") + return 1 + except win32net.error as exc: + print("NetValidatePasswordPolicy failed: ", exc) + return 1 + + if options.show_fields: + print("NET_VALIDATE_PERSISTED_FIELDS fields:") + pprint(fields) + + print("Result of %r validation is %d: %s" % \ + (arg, status, win32api.FormatMessage(status).strip())) + + return 0 + +if __name__=='__main__': + sys.exit(main()) diff --git a/venv/Lib/site-packages/win32/Demos/OpenEncryptedFileRaw.py b/venv/Lib/site-packages/win32/Demos/OpenEncryptedFileRaw.py new file mode 100644 index 00000000..8bd5a7b2 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/OpenEncryptedFileRaw.py @@ -0,0 +1,64 @@ +import win32file, win32api, winerror +import os + +def ReadCallback(input_buffer, data, buflen): + fnamein, fnameout, f = data + ## print fnamein, fnameout, buflen + f.write(input_buffer) + ## python 2.3 throws an error if return value is a plain int + return winerror.ERROR_SUCCESS + +def WriteCallback(output_buffer, data, buflen): + fnamebackup, fnameout, f = data + file_data=f.read(buflen) + ## returning 0 as len terminates WriteEncryptedFileRaw + output_len=len(file_data) + output_buffer[:output_len]=file_data + return winerror.ERROR_SUCCESS, output_len + + +tmp_dir=win32api.GetTempPath() +dst_dir=win32api.GetTempFileName(tmp_dir,'oef')[0] +os.remove(dst_dir) +os.mkdir(dst_dir) +print('Destination dir:', dst_dir) + +## create an encrypted file +fname=win32api.GetTempFileName(dst_dir,'ref')[0] +print('orig file:',fname) +f=open(fname,'w') +f.write('xxxxxxxxxxxxxxxx\n'*32768) +f.close() +## add a couple of extra data streams +f=open(fname+':stream_y','w') +f.write('yyyyyyyyyyyyyyyy\n'*32768) +f.close() +f=open(fname+':stream_z','w') +f.write('zzzzzzzzzzzzzzzz\n'*32768) +f.close() +win32file.EncryptFile(fname) + +## backup raw data of encrypted file +bkup_fname=win32api.GetTempFileName(dst_dir,'bef')[0] +print('backup file:', bkup_fname) +f=open(bkup_fname,'wb') +ctxt=win32file.OpenEncryptedFileRaw(fname,0) +try: + win32file.ReadEncryptedFileRaw(ReadCallback, (fname,bkup_fname,f), ctxt) +finally: + ## if context is not closed, file remains locked even if calling process is killed + win32file.CloseEncryptedFileRaw(ctxt) + f.close() + +## restore data from backup to new encrypted file +dst_fname=win32api.GetTempFileName(dst_dir,'wef')[0] +print('restored file:', dst_fname) +f=open(bkup_fname,'rb') +ctxtout=win32file.OpenEncryptedFileRaw(dst_fname, win32file.CREATE_FOR_IMPORT) +try: + win32file.WriteEncryptedFileRaw(WriteCallback, (bkup_fname,dst_fname,f), ctxtout) +finally: + win32file.CloseEncryptedFileRaw(ctxtout) + f.close() + + diff --git a/venv/Lib/site-packages/win32/Demos/RegCreateKeyTransacted.py b/venv/Lib/site-packages/win32/Demos/RegCreateKeyTransacted.py new file mode 100644 index 00000000..d883cf2c --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/RegCreateKeyTransacted.py @@ -0,0 +1,43 @@ +import win32api, win32con, win32transaction + +keyname='Pywin32 test transacted registry functions' +subkeyname='test transacted subkey' +classname='Transacted Class' + +trans=win32transaction.CreateTransaction(Description='test RegCreateKeyTransacted') +key, disp=win32api.RegCreateKeyEx(win32con.HKEY_CURRENT_USER, keyname, + samDesired=win32con.KEY_ALL_ACCESS, Class=classname) +## clean up any existing keys +for subk in win32api.RegEnumKeyExW(key): + win32api.RegDeleteKey(key, subk[0]) + +## reopen key in transacted mode +transacted_key=win32api.RegOpenKeyTransacted(Key=win32con.HKEY_CURRENT_USER, SubKey=keyname, + Transaction=trans, samDesired=win32con.KEY_ALL_ACCESS) +subkey, disp=win32api.RegCreateKeyEx(transacted_key, subkeyname, Transaction=trans, + samDesired=win32con.KEY_ALL_ACCESS, Class=classname) + +## Newly created key should not be visible from non-transacted handle +subkeys=[s[0] for s in win32api.RegEnumKeyExW(key)] +assert subkeyname not in subkeys + +transacted_subkeys=[s[0] for s in win32api.RegEnumKeyExW(transacted_key)] +assert subkeyname in transacted_subkeys + +## Key should be visible to non-transacted handle after commit +win32transaction.CommitTransaction(trans) +subkeys=[s[0] for s in win32api.RegEnumKeyExW(key)] +assert subkeyname in subkeys + +## test transacted delete +del_trans=win32transaction.CreateTransaction(Description='test RegDeleteKeyTransacted') +win32api.RegDeleteKeyEx(key, subkeyname, Transaction=del_trans) +## subkey should still show up for non-transacted handle +subkeys=[s[0] for s in win32api.RegEnumKeyExW(key)] +assert subkeyname in subkeys +## ... and should be gone after commit +win32transaction.CommitTransaction(del_trans) +subkeys=[s[0] for s in win32api.RegEnumKeyExW(key)] +assert subkeyname not in subkeys + +win32api.RegDeleteKey(win32con.HKEY_CURRENT_USER, keyname) diff --git a/venv/Lib/site-packages/win32/Demos/RegRestoreKey.py b/venv/Lib/site-packages/win32/Demos/RegRestoreKey.py new file mode 100644 index 00000000..f93e0731 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/RegRestoreKey.py @@ -0,0 +1,38 @@ +import win32api, win32security +import win32con, ntsecuritycon, winnt +import os + +temp_dir=win32api.GetTempPath() +fname=win32api.GetTempFileName(temp_dir,'rsk')[0] +print(fname) +## file can't exist +os.remove(fname) + +## enable backup and restore privs +required_privs = ((win32security.LookupPrivilegeValue('',ntsecuritycon.SE_BACKUP_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',ntsecuritycon.SE_RESTORE_NAME),win32con.SE_PRIVILEGE_ENABLED) + ) +ph = win32api.GetCurrentProcess() +th = win32security.OpenProcessToken(ph, win32con.TOKEN_READ|win32con.TOKEN_ADJUST_PRIVILEGES) +adjusted_privs=win32security.AdjustTokenPrivileges(th,0,required_privs) + +try: + sa=win32security.SECURITY_ATTRIBUTES() + my_sid = win32security.GetTokenInformation(th,ntsecuritycon.TokenUser)[0] + sa.SECURITY_DESCRIPTOR.SetSecurityDescriptorOwner(my_sid,0) + + k, disp=win32api.RegCreateKeyEx(win32con.HKEY_CURRENT_USER, 'Python test key', SecurityAttributes=sa, + samDesired=win32con.KEY_ALL_ACCESS, Class='some class', Options=0) + win32api.RegSetValue(k, None, win32con.REG_SZ, 'Default value for python test key') + + subk, disp=win32api.RegCreateKeyEx(k, 'python test subkey', SecurityAttributes=sa, + samDesired=win32con.KEY_ALL_ACCESS, Class='some other class', Options=0) + win32api.RegSetValue(subk, None, win32con.REG_SZ, 'Default value for subkey') + + win32api.RegSaveKeyEx(k, fname, Flags=winnt.REG_STANDARD_FORMAT, SecurityAttributes=sa) + + restored_key, disp=win32api.RegCreateKeyEx(win32con.HKEY_CURRENT_USER, 'Python test key(restored)', SecurityAttributes=sa, + samDesired=win32con.KEY_ALL_ACCESS, Class='restored class', Options=0) + win32api.RegRestoreKey(restored_key, fname) +finally: + win32security.AdjustTokenPrivileges(th, 0, adjusted_privs) \ No newline at end of file diff --git a/venv/Lib/site-packages/win32/Demos/SystemParametersInfo.py b/venv/Lib/site-packages/win32/Demos/SystemParametersInfo.py new file mode 100644 index 00000000..9db52544 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/SystemParametersInfo.py @@ -0,0 +1,180 @@ +import win32gui, win32con, win32api, time, os, glob +## some of these tests will fail for systems prior to XP + +for pname in( + ## Set actions all take an unsigned int in pvParam + "SPI_GETMOUSESPEED", "SPI_GETACTIVEWNDTRKTIMEOUT", "SPI_GETCARETWIDTH", + "SPI_GETFOREGROUNDFLASHCOUNT", "SPI_GETFOREGROUNDLOCKTIMEOUT", + ## Set actions all take an unsigned int in uiParam + "SPI_GETWHEELSCROLLLINES", "SPI_GETKEYBOARDDELAY", + "SPI_GETKEYBOARDSPEED", + "SPI_GETMOUSEHOVERHEIGHT", "SPI_GETMOUSEHOVERWIDTH", + "SPI_GETMOUSEHOVERTIME", "SPI_GETSCREENSAVETIMEOUT", "SPI_GETMENUSHOWDELAY", + "SPI_GETLOWPOWERTIMEOUT", "SPI_GETPOWEROFFTIMEOUT", "SPI_GETBORDER", + ## below are winxp only: + "SPI_GETFONTSMOOTHINGCONTRAST", "SPI_GETFONTSMOOTHINGTYPE", "SPI_GETFOCUSBORDERHEIGHT", + "SPI_GETFOCUSBORDERWIDTH", "SPI_GETMOUSECLICKLOCKTIME"): + print(pname) + cget=getattr(win32con,pname) + cset=getattr(win32con,pname.replace('_GET','_SET')) + orig_value=win32gui.SystemParametersInfo(cget) + print('\toriginal setting:',orig_value) + win32gui.SystemParametersInfo(cset, orig_value+1) + new_value=win32gui.SystemParametersInfo(cget) + print('\tnew value:',new_value) + # On Vista, some of these values seem to be ignored. So only "fail" if + # the new value isn't what we set or the original + if new_value!=orig_value+1: + assert new_value == orig_value + print("Strange - setting %s seems to have been ignored" % (pname,)) + win32gui.SystemParametersInfo(cset, orig_value) + assert win32gui.SystemParametersInfo(cget)==orig_value + + + +# these take a boolean value in pvParam +# change to opposite, check that it was changed and change back +for pname in ("SPI_GETFLATMENU","SPI_GETDROPSHADOW","SPI_GETKEYBOARDCUES","SPI_GETMENUFADE", + "SPI_GETCOMBOBOXANIMATION", "SPI_GETCURSORSHADOW", "SPI_GETGRADIENTCAPTIONS", "SPI_GETHOTTRACKING", + "SPI_GETLISTBOXSMOOTHSCROLLING", "SPI_GETMENUANIMATION", "SPI_GETSELECTIONFADE", + "SPI_GETTOOLTIPANIMATION", "SPI_GETTOOLTIPFADE", "SPI_GETUIEFFECTS", "SPI_GETACTIVEWINDOWTRACKING", + "SPI_GETACTIVEWNDTRKZORDER"): + print(pname) + cget=getattr(win32con,pname) + cset=getattr(win32con,pname.replace('_GET','_SET')) + orig_value=win32gui.SystemParametersInfo(cget) + print(orig_value) + win32gui.SystemParametersInfo(cset, not orig_value) + new_value=win32gui.SystemParametersInfo(cget) + print(new_value) + assert orig_value!=new_value + win32gui.SystemParametersInfo(cset, orig_value) + assert win32gui.SystemParametersInfo(cget)==orig_value + + + +# these take a boolean in uiParam +# could combine with above section now that SystemParametersInfo only takes a single parameter +for pname in ("SPI_GETFONTSMOOTHING","SPI_GETICONTITLEWRAP","SPI_GETBEEP","SPI_GETBLOCKSENDINPUTRESETS", + "SPI_GETKEYBOARDPREF","SPI_GETSCREENSAVEACTIVE","SPI_GETMENUDROPALIGNMENT", + "SPI_GETDRAGFULLWINDOWS", "SPI_GETSHOWIMEUI"): + cget=getattr(win32con,pname) + cset=getattr(win32con,pname.replace('_GET','_SET')) + orig_value=win32gui.SystemParametersInfo(cget) + win32gui.SystemParametersInfo(cset, not orig_value) + new_value=win32gui.SystemParametersInfo(cget) + # Some of these also can't be changed (eg, SPI_GETSCREENSAVEACTIVE) so + # don't actually get upset. + if orig_value!=new_value: + print("successfully toggled", pname, "from", orig_value, "to", new_value) + else: + print("couldn't toggle", pname, "from", orig_value) + win32gui.SystemParametersInfo(cset, orig_value) + assert win32gui.SystemParametersInfo(cget)==orig_value + + + +print("SPI_GETICONTITLELOGFONT") +lf=win32gui.SystemParametersInfo(win32con.SPI_GETICONTITLELOGFONT) +orig_height=lf.lfHeight +orig_italic=lf.lfItalic +print('Height:', orig_height, 'Italic:',orig_italic) +lf.lfHeight+=2 +lf.lfItalic=not lf.lfItalic +win32gui.SystemParametersInfo(win32con.SPI_SETICONTITLELOGFONT, lf) +new_lf=win32gui.SystemParametersInfo(win32con.SPI_GETICONTITLELOGFONT) +print('New Height:', new_lf.lfHeight, 'New Italic:',new_lf.lfItalic) +assert new_lf.lfHeight==orig_height+2 +assert new_lf.lfItalic!=orig_italic + +lf.lfHeight=orig_height +lf.lfItalic=orig_italic +win32gui.SystemParametersInfo(win32con.SPI_SETICONTITLELOGFONT, lf) +new_lf=win32gui.SystemParametersInfo(win32con.SPI_GETICONTITLELOGFONT) +assert new_lf.lfHeight==orig_height +assert new_lf.lfItalic==orig_italic + + + +print("SPI_GETMOUSEHOVERWIDTH, SPI_GETMOUSEHOVERHEIGHT, SPI_GETMOUSEHOVERTIME") +w=win32gui.SystemParametersInfo(win32con.SPI_GETMOUSEHOVERWIDTH) +h=win32gui.SystemParametersInfo(win32con.SPI_GETMOUSEHOVERHEIGHT) +t=win32gui.SystemParametersInfo(win32con.SPI_GETMOUSEHOVERTIME) +print('w,h,t:', w,h,t) + +win32gui.SystemParametersInfo(win32con.SPI_SETMOUSEHOVERWIDTH,w+1) +win32gui.SystemParametersInfo(win32con.SPI_SETMOUSEHOVERHEIGHT,h+2) +win32gui.SystemParametersInfo(win32con.SPI_SETMOUSEHOVERTIME,t+3) +new_w=win32gui.SystemParametersInfo(win32con.SPI_GETMOUSEHOVERWIDTH) +new_h=win32gui.SystemParametersInfo(win32con.SPI_GETMOUSEHOVERHEIGHT) +new_t=win32gui.SystemParametersInfo(win32con.SPI_GETMOUSEHOVERTIME) +print('new w,h,t:', new_w, new_h, new_t) +assert new_w==w+1 +assert new_h==h+2 +assert new_t==t+3 + +win32gui.SystemParametersInfo(win32con.SPI_SETMOUSEHOVERWIDTH,w) +win32gui.SystemParametersInfo(win32con.SPI_SETMOUSEHOVERHEIGHT,h) +win32gui.SystemParametersInfo(win32con.SPI_SETMOUSEHOVERTIME,t) +new_w=win32gui.SystemParametersInfo(win32con.SPI_GETMOUSEHOVERWIDTH) +new_h=win32gui.SystemParametersInfo(win32con.SPI_GETMOUSEHOVERHEIGHT) +new_t=win32gui.SystemParametersInfo(win32con.SPI_GETMOUSEHOVERTIME) +assert new_w==w +assert new_h==h +assert new_t==t + + + +print("SPI_SETDOUBLECLKWIDTH, SPI_SETDOUBLECLKHEIGHT") +x=win32api.GetSystemMetrics(win32con.SM_CXDOUBLECLK) +y=win32api.GetSystemMetrics(win32con.SM_CYDOUBLECLK) +print('x,y:', x, y) +win32gui.SystemParametersInfo(win32con.SPI_SETDOUBLECLKWIDTH, x+1) +win32gui.SystemParametersInfo(win32con.SPI_SETDOUBLECLKHEIGHT, y+2) +new_x=win32api.GetSystemMetrics(win32con.SM_CXDOUBLECLK) +new_y=win32api.GetSystemMetrics(win32con.SM_CYDOUBLECLK) +print('new x,y:', new_x, new_y) +assert new_x==x+1 +assert new_y==y+2 +win32gui.SystemParametersInfo(win32con.SPI_SETDOUBLECLKWIDTH, x) +win32gui.SystemParametersInfo(win32con.SPI_SETDOUBLECLKHEIGHT, y) +new_x=win32api.GetSystemMetrics(win32con.SM_CXDOUBLECLK) +new_y=win32api.GetSystemMetrics(win32con.SM_CYDOUBLECLK) +assert new_x==x +assert new_y==y + + + +print("SPI_SETDRAGWIDTH, SPI_SETDRAGHEIGHT") +dw=win32api.GetSystemMetrics(win32con.SM_CXDRAG) +dh=win32api.GetSystemMetrics(win32con.SM_CYDRAG) +print('dw,dh:', dw, dh) +win32gui.SystemParametersInfo(win32con.SPI_SETDRAGWIDTH,dw+1) +win32gui.SystemParametersInfo(win32con.SPI_SETDRAGHEIGHT,dh+2) +new_dw=win32api.GetSystemMetrics(win32con.SM_CXDRAG) +new_dh=win32api.GetSystemMetrics(win32con.SM_CYDRAG) +print('new dw,dh:', new_dw, new_dh) +assert new_dw==dw+1 +assert new_dh==dh+2 +win32gui.SystemParametersInfo(win32con.SPI_SETDRAGWIDTH,dw) +win32gui.SystemParametersInfo(win32con.SPI_SETDRAGHEIGHT,dh) +new_dw=win32api.GetSystemMetrics(win32con.SM_CXDRAG) +new_dh=win32api.GetSystemMetrics(win32con.SM_CYDRAG) +assert new_dw==dw +assert new_dh==dh + + + +orig_wallpaper=win32gui.SystemParametersInfo(Action=win32con.SPI_GETDESKWALLPAPER) +print('Original: ',orig_wallpaper) +for bmp in glob.glob(os.path.join(os.environ['windir'],'*.bmp')): + print(bmp) + win32gui.SystemParametersInfo(win32con.SPI_SETDESKWALLPAPER, Param=bmp) + print(win32gui.SystemParametersInfo(Action=win32con.SPI_GETDESKWALLPAPER)) + time.sleep(1) + +win32gui.SystemParametersInfo(win32con.SPI_SETDESKWALLPAPER, Param=orig_wallpaper) + + + + diff --git a/venv/Lib/site-packages/win32/Demos/__pycache__/BackupRead_BackupWrite.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/__pycache__/BackupRead_BackupWrite.cpython-36.pyc new file mode 100644 index 00000000..9e4dba25 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/__pycache__/BackupRead_BackupWrite.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/__pycache__/BackupSeek_streamheaders.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/__pycache__/BackupSeek_streamheaders.cpython-36.pyc new file mode 100644 index 00000000..2e458e61 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/__pycache__/BackupSeek_streamheaders.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/__pycache__/CopyFileEx.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/__pycache__/CopyFileEx.cpython-36.pyc new file mode 100644 index 00000000..21e238c7 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/__pycache__/CopyFileEx.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/__pycache__/CreateFileTransacted_MiniVersion.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/__pycache__/CreateFileTransacted_MiniVersion.cpython-36.pyc new file mode 100644 index 00000000..f87857c9 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/__pycache__/CreateFileTransacted_MiniVersion.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/__pycache__/EvtFormatMessage.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/__pycache__/EvtFormatMessage.cpython-36.pyc new file mode 100644 index 00000000..2d7ced78 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/__pycache__/EvtFormatMessage.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/__pycache__/EvtSubscribe_pull.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/__pycache__/EvtSubscribe_pull.cpython-36.pyc new file mode 100644 index 00000000..482465bc Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/__pycache__/EvtSubscribe_pull.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/__pycache__/EvtSubscribe_push.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/__pycache__/EvtSubscribe_push.cpython-36.pyc new file mode 100644 index 00000000..8d7d351e Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/__pycache__/EvtSubscribe_push.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/__pycache__/FileSecurityTest.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/__pycache__/FileSecurityTest.cpython-36.pyc new file mode 100644 index 00000000..e8a4545a Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/__pycache__/FileSecurityTest.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/__pycache__/GetSaveFileName.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/__pycache__/GetSaveFileName.cpython-36.pyc new file mode 100644 index 00000000..7c3f3568 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/__pycache__/GetSaveFileName.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/__pycache__/NetValidatePasswordPolicy.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/__pycache__/NetValidatePasswordPolicy.cpython-36.pyc new file mode 100644 index 00000000..b3e348bb Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/__pycache__/NetValidatePasswordPolicy.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/__pycache__/OpenEncryptedFileRaw.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/__pycache__/OpenEncryptedFileRaw.cpython-36.pyc new file mode 100644 index 00000000..6e83471d Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/__pycache__/OpenEncryptedFileRaw.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/__pycache__/RegCreateKeyTransacted.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/__pycache__/RegCreateKeyTransacted.cpython-36.pyc new file mode 100644 index 00000000..4f9089ca Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/__pycache__/RegCreateKeyTransacted.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/__pycache__/RegRestoreKey.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/__pycache__/RegRestoreKey.cpython-36.pyc new file mode 100644 index 00000000..82b303ee Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/__pycache__/RegRestoreKey.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/__pycache__/SystemParametersInfo.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/__pycache__/SystemParametersInfo.cpython-36.pyc new file mode 100644 index 00000000..86962b0f Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/__pycache__/SystemParametersInfo.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/__pycache__/cerapi.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/__pycache__/cerapi.cpython-36.pyc new file mode 100644 index 00000000..4a79ffb8 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/__pycache__/cerapi.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/__pycache__/desktopmanager.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/__pycache__/desktopmanager.cpython-36.pyc new file mode 100644 index 00000000..37ebf855 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/__pycache__/desktopmanager.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/__pycache__/eventLogDemo.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/__pycache__/eventLogDemo.cpython-36.pyc new file mode 100644 index 00000000..34273e70 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/__pycache__/eventLogDemo.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/__pycache__/getfilever.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/__pycache__/getfilever.cpython-36.pyc new file mode 100644 index 00000000..01839357 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/__pycache__/getfilever.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/__pycache__/mmapfile_demo.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/__pycache__/mmapfile_demo.cpython-36.pyc new file mode 100644 index 00000000..60acdf33 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/__pycache__/mmapfile_demo.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/__pycache__/print_desktop.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/__pycache__/print_desktop.cpython-36.pyc new file mode 100644 index 00000000..178ebbf2 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/__pycache__/print_desktop.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/__pycache__/rastest.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/__pycache__/rastest.cpython-36.pyc new file mode 100644 index 00000000..245b5da9 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/__pycache__/rastest.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/__pycache__/timer_demo.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/__pycache__/timer_demo.cpython-36.pyc new file mode 100644 index 00000000..23c00347 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/__pycache__/timer_demo.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/__pycache__/win32clipboardDemo.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/__pycache__/win32clipboardDemo.cpython-36.pyc new file mode 100644 index 00000000..8e68e9fd Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/__pycache__/win32clipboardDemo.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/__pycache__/win32clipboard_bitmapdemo.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/__pycache__/win32clipboard_bitmapdemo.cpython-36.pyc new file mode 100644 index 00000000..d0432b23 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/__pycache__/win32clipboard_bitmapdemo.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/__pycache__/win32comport_demo.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/__pycache__/win32comport_demo.cpython-36.pyc new file mode 100644 index 00000000..78a9447f Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/__pycache__/win32comport_demo.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/__pycache__/win32console_demo.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/__pycache__/win32console_demo.cpython-36.pyc new file mode 100644 index 00000000..9b480ddc Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/__pycache__/win32console_demo.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/__pycache__/win32cred_demo.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/__pycache__/win32cred_demo.cpython-36.pyc new file mode 100644 index 00000000..2f5cd652 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/__pycache__/win32cred_demo.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/__pycache__/win32fileDemo.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/__pycache__/win32fileDemo.cpython-36.pyc new file mode 100644 index 00000000..78ed9ca2 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/__pycache__/win32fileDemo.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/__pycache__/win32gui_demo.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/__pycache__/win32gui_demo.cpython-36.pyc new file mode 100644 index 00000000..2f94f861 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/__pycache__/win32gui_demo.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/__pycache__/win32gui_devicenotify.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/__pycache__/win32gui_devicenotify.cpython-36.pyc new file mode 100644 index 00000000..b7f4259c Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/__pycache__/win32gui_devicenotify.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/__pycache__/win32gui_dialog.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/__pycache__/win32gui_dialog.cpython-36.pyc new file mode 100644 index 00000000..28376a23 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/__pycache__/win32gui_dialog.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/__pycache__/win32gui_menu.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/__pycache__/win32gui_menu.cpython-36.pyc new file mode 100644 index 00000000..4ffb0c30 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/__pycache__/win32gui_menu.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/__pycache__/win32gui_taskbar.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/__pycache__/win32gui_taskbar.cpython-36.pyc new file mode 100644 index 00000000..e4986146 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/__pycache__/win32gui_taskbar.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/__pycache__/win32netdemo.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/__pycache__/win32netdemo.cpython-36.pyc new file mode 100644 index 00000000..878379ac Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/__pycache__/win32netdemo.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/__pycache__/win32rcparser_demo.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/__pycache__/win32rcparser_demo.cpython-36.pyc new file mode 100644 index 00000000..05866c62 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/__pycache__/win32rcparser_demo.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/__pycache__/win32servicedemo.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/__pycache__/win32servicedemo.cpython-36.pyc new file mode 100644 index 00000000..03f9ce20 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/__pycache__/win32servicedemo.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/__pycache__/win32ts_logoff_disconnected.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/__pycache__/win32ts_logoff_disconnected.cpython-36.pyc new file mode 100644 index 00000000..dedbf63f Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/__pycache__/win32ts_logoff_disconnected.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/__pycache__/winprocess.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/__pycache__/winprocess.cpython-36.pyc new file mode 100644 index 00000000..b9efa3ff Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/__pycache__/winprocess.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/c_extension/__pycache__/setup.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/c_extension/__pycache__/setup.cpython-36.pyc new file mode 100644 index 00000000..67dc4b1e Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/c_extension/__pycache__/setup.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/c_extension/setup.py b/venv/Lib/site-packages/win32/Demos/c_extension/setup.py new file mode 100644 index 00000000..be59df34 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/c_extension/setup.py @@ -0,0 +1,23 @@ +# A sample distutils script to show to build your own +# extension module which extends pywintypes or pythoncom. +# +# Use 'python setup.py build' to build this extension. +import os +from distutils.core import setup, Extension +from distutils.sysconfig import get_python_lib + +sources = ["win32_extension.cpp"] + +# Specify the directory where the PyWin32 .h and .lib files are installed. +# If you are doing a win32com extension, you will also need to add +# win32com\Include and win32com\Libs. +ext = Extension("win32_extension", sources, + include_dirs = [os.path.join(get_python_lib(), "win32", "Include")], + library_dirs = [os.path.join(get_python_lib(), "win32", "Libs")], + ) + +setup( + name="win32 extension sample", + version="0.1", + ext_modules=[ext], +) diff --git a/venv/Lib/site-packages/win32/Demos/cerapi.py b/venv/Lib/site-packages/win32/Demos/cerapi.py new file mode 100644 index 00000000..5bcff0be --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/cerapi.py @@ -0,0 +1,213 @@ +# A demo of the Windows CE Remote API +# +# This connects to a CE device, and interacts with it. + +import wincerapi +import win32event +import win32api +import win32con +import os +import sys +import getopt + + +def DumpPythonRegistry(): + try: + h = wincerapi.CeRegOpenKeyEx(win32con.HKEY_LOCAL_MACHINE, "Software\\Python\\PythonCore\\%s\\PythonPath" % sys.winver) + except win32api.error: + print("The remote device does not appear to have Python installed") + return 0 + path, typ = wincerapi.CeRegQueryValueEx(h, None) + print("The remote PythonPath is '%s'" % (str(path), )) + h.Close() + return 1 + +def DumpRegistry(root, level=0): + # A recursive dump of the remote registry to test most functions. + h = wincerapi.CeRegOpenKeyEx(win32con.HKEY_LOCAL_MACHINE, None) + level_prefix = " " * level + index = 0 + # Enumerate values. + while 1: + try: + name, data, typ = wincerapi.CeRegEnumValue(root, index) + except win32api.error: + break + print("%s%s=%s" % (level_prefix, name, repr(str(data)))) + index = index+1 + # Now enumerate all keys. + index=0 + while 1: + try: + name, klass = wincerapi.CeRegEnumKeyEx(root, index) + except win32api.error: + break + print("%s%s\\" % (level_prefix, name)) + subkey = wincerapi.CeRegOpenKeyEx(root, name) + DumpRegistry(subkey, level+1) + index = index+1 + +def DemoCopyFile(): + # Create a file on the device, and write a string. + cefile = wincerapi.CeCreateFile("TestPython", win32con.GENERIC_WRITE, 0, None, win32con.OPEN_ALWAYS, 0, None) + wincerapi.CeWriteFile(cefile, "Hello from Python") + cefile.Close() + # reopen the file and check the data. + cefile = wincerapi.CeCreateFile("TestPython", win32con.GENERIC_READ, 0, None, win32con.OPEN_EXISTING, 0, None) + if wincerapi.CeReadFile(cefile, 100) != "Hello from Python": + print("Couldnt read the data from the device!") + cefile.Close() + # Delete the test file + wincerapi.CeDeleteFile("TestPython") + print("Created, wrote to, read from and deleted a test file!") + +def DemoCreateProcess(): + try: + hp, ht, pid, tid = wincerapi.CeCreateProcess("Windows\\Python.exe", "", None, None, 0, 0, None, "", None) + + # Not necessary, except to see if handle closing raises an exception + # (if auto-closed, the error is suppressed) + hp.Close() + ht.Close() + print("Python is running on the remote device!") + except win32api.error as xxx_todo_changeme1: + (hr, fn, msg) = xxx_todo_changeme1.args + print("Couldnt execute remote process -", msg) + +def DumpRemoteMachineStatus(): + ACLineStatus, BatteryFlag, BatteryLifePercent, BatteryLifeTime, BatteryFullLifeTime, BackupBatteryFlag, BackupBatteryLifePercent, BackupBatteryLifeTime, BackupBatteryLifeTime = \ + wincerapi.CeGetSystemPowerStatusEx() + if ACLineStatus: + power = "AC" + else: + power = "battery" + if BatteryLifePercent==255: + batPerc = "unknown" + else: + batPerc = BatteryLifePercent + print("The batteries are at %s%%, and is currently being powered by %s" % (batPerc, power)) + + memLoad, totalPhys, availPhys, totalPage, availPage, totalVirt, availVirt = \ + wincerapi.CeGlobalMemoryStatus() + + print("The memory is %d%% utilized." % (memLoad)) + print("%-20s%-10s%-10s" % ("", "Total", "Avail")) + print("%-20s%-10s%-10s" % ("Physical Memory", totalPhys, availPhys)) + print("%-20s%-10s%-10s" % ("Virtual Memory", totalVirt, availVirt)) + print("%-20s%-10s%-10s" % ("Paging file", totalPage, availPage)) + + + storeSize, freeSize = wincerapi.CeGetStoreInformation() + print("%-20s%-10s%-10s" % ("File store", storeSize, freeSize)) + + print("The CE temp path is", wincerapi.CeGetTempPath()) + print("The system info for the device is", wincerapi.CeGetSystemInfo()) + +def DumpRemoteFolders(): + # Dump all special folders possible. + for name, val in list(wincerapi.__dict__.items()): + if name[:6]=="CSIDL_": + try: + loc = str(wincerapi.CeGetSpecialFolderPath(val)) + print("Folder %s is at %s" % (name, loc)) + except win32api.error as details: + pass + + # Get the shortcut targets for the "Start Menu" + print("Dumping start menu shortcuts...") + try: + startMenu = str(wincerapi.CeGetSpecialFolderPath(wincerapi.CSIDL_STARTMENU)) + except win32api.error as details: + print("This device has no start menu!", details) + startMenu = None + + if startMenu: + for fileAttr in wincerapi.CeFindFiles(os.path.join(startMenu, "*")): + fileName = fileAttr[8] + fullPath = os.path.join(startMenu, str(fileName)) + try: + resolved = wincerapi.CeSHGetShortcutTarget(fullPath) + except win32api.error as xxx_todo_changeme: + (rc, fn, msg) = xxx_todo_changeme.args + resolved = "#Error - %s" % msg + print("%s->%s" % (fileName, resolved)) + + # print "The start menu is at", + # print wincerapi.CeSHGetShortcutTarget("\\Windows\\Start Menu\\Shortcut to Python.exe.lnk") + +def usage(): + print("Options:") + print("-a - Execute all demos") + print("-p - Execute Python process on remote device") + print("-r - Dump the remote registry") + print("-f - Dump all remote special folder locations") + print("-s - Dont dump machine status") + print("-y - Perform asynch init of CE connection") + +def main(): + async_init = bStartPython = bDumpRegistry = bDumpFolders = 0 + bDumpStatus = 1 + try: + opts, args = getopt.getopt(sys.argv[1:], "apr") + except getopt.error as why: + print("Invalid usage:", why) + usage() + return + + for o, v in opts: + if o=="-a": + bStartPython = bDumpRegistry = bDumpStatus = bDumpFolders = asynch_init = 1 + if o=="-p": + bStartPython=1 + if o=="-r": + bDumpRegistry=1 + if o=="-s": + bDumpStatus=0 + if o=="-f": + bDumpFolders = 1 + if o=="-y": + print("Doing asynch init of CE connection") + async_init = 1 + + if async_init: + event, rc = wincerapi.CeRapiInitEx() + while 1: + rc = win32event.WaitForSingleObject(event, 500) + if rc==win32event.WAIT_OBJECT_0: + # We connected. + break + else: + print("Waiting for Initialize to complete (picture a Cancel button here :)") + else: + wincerapi.CeRapiInit() + print("Connected to remote CE device.") + try: + verinfo = wincerapi.CeGetVersionEx() + print("The device is running windows CE version %d.%d - %s" % (verinfo[0], verinfo[1], verinfo[4])) + + if bDumpStatus: + print("Dumping remote machine status") + DumpRemoteMachineStatus() + + if bDumpRegistry: + print("Dumping remote registry...") + DumpRegistry(win32con.HKEY_LOCAL_MACHINE) + + if bDumpFolders: + print("Dumping remote folder information") + DumpRemoteFolders() + + DemoCopyFile() + if bStartPython: + print("Starting remote Python process") + if DumpPythonRegistry(): + DemoCreateProcess() + else: + print("Not trying to start Python, as it's not installed") + + finally: + wincerapi.CeRapiUninit() + print("Disconnected") + +if __name__=='__main__': + main() diff --git a/venv/Lib/site-packages/win32/Demos/dde/__pycache__/ddeclient.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/dde/__pycache__/ddeclient.cpython-36.pyc new file mode 100644 index 00000000..1d380792 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/dde/__pycache__/ddeclient.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/dde/__pycache__/ddeserver.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/dde/__pycache__/ddeserver.cpython-36.pyc new file mode 100644 index 00000000..d7b7b99f Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/dde/__pycache__/ddeserver.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/dde/ddeclient.py b/venv/Lib/site-packages/win32/Demos/dde/ddeclient.py new file mode 100644 index 00000000..8cd8fbdd --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/dde/ddeclient.py @@ -0,0 +1,19 @@ +# 'Request' example added jjk 11/20/98 + +import win32ui +import dde + +server = dde.CreateServer() +server.Create("TestClient") + +conversation = dde.CreateConversation(server) + +conversation.ConnectTo("RunAny", "RunAnyCommand") +conversation.Exec("DoSomething") +conversation.Exec("DoSomethingElse") + +conversation.ConnectTo("RunAny", "ComputeStringLength") +s = 'abcdefghi' +sl = conversation.Request(s) +print('length of "%s" is %s'%(s,sl)) + diff --git a/venv/Lib/site-packages/win32/Demos/dde/ddeserver.py b/venv/Lib/site-packages/win32/Demos/dde/ddeserver.py new file mode 100644 index 00000000..46d3dfa2 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/dde/ddeserver.py @@ -0,0 +1,38 @@ +# 'Request' example added jjk 11/20/98 + +import win32ui +from pywin.mfc import object +import dde + +class MySystemTopic(object.Object): + def __init__(self): + object.Object.__init__(self, dde.CreateServerSystemTopic()) + + def Exec(self, cmd): + print("System Topic asked to exec", cmd) + +class MyOtherTopic(object.Object): + def __init__(self, topicName): + object.Object.__init__(self, dde.CreateTopic(topicName)) + + def Exec(self, cmd): + print("Other Topic asked to exec", cmd) + +class MyRequestTopic(object.Object): + def __init__(self, topicName): + topic = dde.CreateTopic(topicName) + topic.AddItem(dde.CreateStringItem("")) + object.Object.__init__(self, topic) + + def Request(self, aString): + print("Request Topic asked to compute length of:", aString) + return(str(len(aString))) + +server = dde.CreateServer() +server.AddTopic(MySystemTopic()) +server.AddTopic(MyOtherTopic("RunAnyCommand")) +server.AddTopic(MyRequestTopic("ComputeStringLength")) +server.Create('RunAny') + +while 1: + win32ui.PumpWaitingMessages(0, -1) diff --git a/venv/Lib/site-packages/win32/Demos/desktopmanager.py b/venv/Lib/site-packages/win32/Demos/desktopmanager.py new file mode 100644 index 00000000..fc9964e7 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/desktopmanager.py @@ -0,0 +1,166 @@ +# Demonstrates using a taskbar icon to create and navigate between desktops + +import win32api, win32con, win32gui, win32service, win32process +import pywintypes +import traceback, _thread, time +import io + +## "Shell_TrayWnd" is class of system tray window, broadcasts "TaskbarCreated" when initialized + +def desktop_name_dlgproc(hwnd,msg,wparam,lparam): + """ Handles messages from the desktop name dialog box """ + if msg in (win32con.WM_CLOSE,win32con.WM_DESTROY): + win32gui.DestroyWindow(hwnd) + elif msg == win32con.WM_COMMAND: + if wparam == win32con.IDOK: + desktop_name=win32gui.GetDlgItemText(hwnd, 72) + print('new desktop name: ',desktop_name) + win32gui.DestroyWindow(hwnd) + create_desktop(desktop_name) + + elif wparam == win32con.IDCANCEL: + win32gui.DestroyWindow(hwnd) + +def get_new_desktop_name(parent_hwnd): + """ Create a dialog box to ask the user for name of desktop to be created """ + msgs={win32con.WM_COMMAND:desktop_name_dlgproc, + win32con.WM_CLOSE:desktop_name_dlgproc, + win32con.WM_DESTROY:desktop_name_dlgproc} + # dlg item [type, caption, id, (x,y,cx,cy), style, ex style + style=win32con.WS_BORDER|win32con.WS_VISIBLE|win32con.WS_CAPTION|win32con.WS_SYSMENU ## |win32con.DS_SYSMODAL + h=win32gui.CreateDialogIndirect( + win32api.GetModuleHandle(None), + [['One ugly dialog box !',(100,100,200,100),style,0], + ['Button','Create', win32con.IDOK, (10,10,30,20),win32con.WS_VISIBLE|win32con.WS_TABSTOP|win32con.BS_HOLLOW|win32con.BS_DEFPUSHBUTTON], + ['Button','Never mind', win32con.IDCANCEL, (45,10,50,20),win32con.WS_VISIBLE|win32con.WS_TABSTOP|win32con.BS_HOLLOW], + ['Static','Desktop name:',71,(10,40,70,10),win32con.WS_VISIBLE], + ['Edit','',72,(75,40,90,10),win32con.WS_VISIBLE]], + parent_hwnd, msgs) ## parent_hwnd, msgs) + + win32gui.EnableWindow(h,True) + hcontrol=win32gui.GetDlgItem(h,72) + win32gui.EnableWindow(hcontrol,True) + win32gui.SetFocus(hcontrol) + +def new_icon(hdesk,desktop_name): + """ Runs as a thread on each desktop to create a new tray icon and handle its messages """ + global id + id=id+1 + hdesk.SetThreadDesktop() + ## apparently the threads can't use same hinst, so each needs its own window class + windowclassname='PythonDesktopManager'+desktop_name + wc = win32gui.WNDCLASS() + wc.hInstance = win32api.GetModuleHandle(None) + wc.lpszClassName = windowclassname + wc.style = win32con.CS_VREDRAW | win32con.CS_HREDRAW | win32con.CS_GLOBALCLASS + wc.hCursor = win32gui.LoadCursor( 0, win32con.IDC_ARROW ) + wc.hbrBackground = win32con.COLOR_WINDOW + wc.lpfnWndProc = icon_wndproc + windowclass = win32gui.RegisterClass(wc) + style = win32con.WS_OVERLAPPED | win32con.WS_SYSMENU + hwnd = win32gui.CreateWindow(windowclass, 'dm_'+desktop_name, win32con.WS_SYSMENU, + 0, 0, win32con.CW_USEDEFAULT, win32con.CW_USEDEFAULT, + 0, 0, wc.hInstance, None) + win32gui.UpdateWindow(hwnd) + flags = win32gui.NIF_ICON | win32gui.NIF_MESSAGE | win32gui.NIF_TIP + notify_info = (hwnd, id, flags, win32con.WM_USER+20, hicon, 'Desktop Manager (%s)' %desktop_name) + window_info[hwnd]=notify_info + ## wait for explorer to initialize system tray for new desktop + tray_found=0 + while not tray_found: + try: + tray_found=win32gui.FindWindow("Shell_TrayWnd",None) + except win32gui.error: + traceback.print_exc + time.sleep(.5) + win32gui.Shell_NotifyIcon(win32gui.NIM_ADD, notify_info) + win32gui.PumpMessages() + +def create_desktop(desktop_name, start_explorer=1): + """ Creates a new desktop and spawns a thread running on it + Will also start a new icon thread on an existing desktop + """ + sa=pywintypes.SECURITY_ATTRIBUTES() + sa.bInheritHandle=1 + + try: + hdesk=win32service.CreateDesktop(desktop_name, 0, win32con.MAXIMUM_ALLOWED, sa) + except win32service.error: + traceback.print_exc() + errbuf=io.StringIO() + traceback.print_exc(None,errbuf) + win32api.MessageBox(0, errbuf.getvalue(), 'Desktop creation failed') + return + if start_explorer: + s=win32process.STARTUPINFO() + s.lpDesktop=desktop_name + prc_info=win32process.CreateProcess(None, "Explorer.exe",None,None,True,win32con.CREATE_NEW_CONSOLE,None,'c:\\',s) + + th=_thread.start_new_thread(new_icon,(hdesk,desktop_name)) + hdesk.SwitchDesktop() + +def icon_wndproc(hwnd, msg, wp, lp): + """ Window proc for the tray icons """ + if lp==win32con.WM_LBUTTONDOWN: + ## popup menu won't disappear if you don't do this + win32gui.SetForegroundWindow(hwnd) + + curr_desktop=win32service.OpenInputDesktop(0,True,win32con.MAXIMUM_ALLOWED) + curr_desktop_name=win32service.GetUserObjectInformation(curr_desktop,win32con.UOI_NAME) + winsta=win32service.GetProcessWindowStation() + desktops=winsta.EnumDesktops() + m=win32gui.CreatePopupMenu() + desktop_cnt=len(desktops) + ## *don't* create an item 0 + for d in range(1, desktop_cnt+1): + mf_flags=win32con.MF_STRING + ## if you switch to winlogon yourself, there's nothing there and you're stuck + if desktops[d-1].lower() in ('winlogon','disconnect'): + mf_flags=mf_flags|win32con.MF_GRAYED|win32con.MF_DISABLED + if desktops[d-1]==curr_desktop_name: + mf_flags=mf_flags|win32con.MF_CHECKED + win32gui.AppendMenu(m, mf_flags, d, desktops[d-1]) + win32gui.AppendMenu(m, win32con.MF_STRING, desktop_cnt+1, 'Create new ...') + win32gui.AppendMenu(m, win32con.MF_STRING, desktop_cnt+2, 'Exit') + + x,y=win32gui.GetCursorPos() + d=win32gui.TrackPopupMenu(m,win32con.TPM_LEFTBUTTON|win32con.TPM_RETURNCMD|win32con.TPM_NONOTIFY, + x,y, 0, hwnd, None) + win32gui.PumpWaitingMessages() + win32gui.DestroyMenu(m) + if d==desktop_cnt+1: ## Create new + get_new_desktop_name(hwnd) + elif d==desktop_cnt+2: ## Exit + win32gui.PostQuitMessage(0) + win32gui.Shell_NotifyIcon(win32gui.NIM_DELETE, window_info[hwnd]) + del window_info[hwnd] + origin_desktop.SwitchDesktop() + elif d>0: + hdesk=win32service.OpenDesktop(desktops[d-1],0,0,win32con.MAXIMUM_ALLOWED) + hdesk.SwitchDesktop() + return 0 + else: + return win32gui.DefWindowProc(hwnd, msg, wp, lp) + +window_info={} +origin_desktop=win32service.OpenInputDesktop(0, True, win32con.MAXIMUM_ALLOWED) +origin_desktop_name=win32service.GetUserObjectInformation(origin_desktop, win32service.UOI_NAME) + +hinst=win32api.GetModuleHandle(None) +try: + hicon=win32gui.LoadIcon(hinst, 1) ## python.exe and pythonw.exe +except win32gui.error: + hicon=win32gui.LoadIcon(hinst, 135) ## pythonwin's icon +id=0 + +create_desktop(str(origin_desktop_name),0) + +## wait for first thread to initialize its icon +while not window_info: + time.sleep(1) + +## exit when last tray icon goes away +while window_info: + win32gui.PumpWaitingMessages() + time.sleep(3) + diff --git a/venv/Lib/site-packages/win32/Demos/eventLogDemo.py b/venv/Lib/site-packages/win32/Demos/eventLogDemo.py new file mode 100644 index 00000000..53332dfd --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/eventLogDemo.py @@ -0,0 +1,111 @@ +import win32evtlog +import win32api +import win32con +import win32security # To translate NT Sids to account names. + +import win32evtlogutil + +def ReadLog(computer, logType="Application", dumpEachRecord = 0): + # read the entire log back. + h=win32evtlog.OpenEventLog(computer, logType) + numRecords = win32evtlog.GetNumberOfEventLogRecords(h) +# print "There are %d records" % numRecords + + num=0 + while 1: + objects = win32evtlog.ReadEventLog(h, win32evtlog.EVENTLOG_BACKWARDS_READ|win32evtlog.EVENTLOG_SEQUENTIAL_READ, 0) + if not objects: + break + for object in objects: + # get it for testing purposes, but dont print it. + msg = win32evtlogutil.SafeFormatMessage(object, logType) + if object.Sid is not None: + try: + domain, user, typ = win32security.LookupAccountSid(computer, object.Sid) + sidDesc = "%s/%s" % (domain, user) + except win32security.error: + sidDesc = str(object.Sid) + user_desc = "Event associated with user %s" % (sidDesc,) + else: + user_desc = None + if dumpEachRecord: + print("Event record from %r generated at %s" % (object.SourceName, object.TimeGenerated.Format())) + if user_desc: + print(user_desc) + try: + print(msg) + except UnicodeError: + print("(unicode error printing message: repr() follows...)") + print(repr(msg)) + + num = num + len(objects) + + if numRecords == num: + print("Successfully read all", numRecords, "records") + else: + print("Couldn't get all records - reported %d, but found %d" % (numRecords, num)) + print("(Note that some other app may have written records while we were running!)") + win32evtlog.CloseEventLog(h) + +def usage(): + print("Writes an event to the event log.") + print("-w : Dont write any test records.") + print("-r : Dont read the event log") + print("-c : computerName : Process the log on the specified computer") + print("-v : Verbose") + print("-t : LogType - Use the specified log - default = 'Application'") + + +def test(): + # check if running on Windows NT, if not, display notice and terminate + if win32api.GetVersion() & 0x80000000: + print("This sample only runs on NT") + return + + import sys, getopt + opts, args = getopt.getopt(sys.argv[1:], "rwh?c:t:v") + computer = None + do_read = do_write = 1 + + logType = "Application" + verbose = 0 + + if len(args)>0: + print("Invalid args") + usage() + return 1 + for opt, val in opts: + if opt == '-t': + logType = val + if opt == '-c': + computer = val + if opt in ['-h', '-?']: + usage() + return + if opt=='-r': + do_read = 0 + if opt=='-w': + do_write = 0 + if opt=='-v': + verbose = verbose + 1 + if do_write: + ph=win32api.GetCurrentProcess() + th = win32security.OpenProcessToken(ph,win32con.TOKEN_READ) + my_sid = win32security.GetTokenInformation(th,win32security.TokenUser)[0] + + win32evtlogutil.ReportEvent(logType, 2, + strings=["The message text for event 2","Another insert"], + data = "Raw\0Data".encode("ascii"), sid = my_sid) + win32evtlogutil.ReportEvent(logType, 1, eventType=win32evtlog.EVENTLOG_WARNING_TYPE, + strings=["A warning","An even more dire warning"], + data = "Raw\0Data".encode("ascii"), sid = my_sid) + win32evtlogutil.ReportEvent(logType, 1, eventType=win32evtlog.EVENTLOG_INFORMATION_TYPE, + strings=["An info","Too much info"], + data = "Raw\0Data".encode("ascii"), sid = my_sid) + print("Successfully wrote 3 records to the log") + + if do_read: + ReadLog(computer, logType, verbose > 0) + +if __name__=='__main__': + test() diff --git a/venv/Lib/site-packages/win32/Demos/getfilever.py b/venv/Lib/site-packages/win32/Demos/getfilever.py new file mode 100644 index 00000000..34026664 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/getfilever.py @@ -0,0 +1,21 @@ +import os, win32api + +ver_strings=('Comments','InternalName','ProductName', + 'CompanyName','LegalCopyright','ProductVersion', + 'FileDescription','LegalTrademarks','PrivateBuild', + 'FileVersion','OriginalFilename','SpecialBuild') +fname = os.environ["comspec"] +d=win32api.GetFileVersionInfo(fname, '\\') +## backslash as parm returns dictionary of numeric info corresponding to VS_FIXEDFILEINFO struc +for n, v in d.items(): + print(n, v) + +pairs=win32api.GetFileVersionInfo(fname, '\\VarFileInfo\\Translation') +## \VarFileInfo\Translation returns list of available (language, codepage) pairs that can be used to retreive string info +## any other must be of the form \StringfileInfo\%04X%04X\parm_name, middle two are language/codepage pair returned from above +for lang, codepage in pairs: + print('lang: ', lang, 'codepage:', codepage) + for ver_string in ver_strings: + str_info='\\StringFileInfo\\%04X%04X\\%s' %(lang,codepage,ver_string) + ## print str_info + print(ver_string, repr(win32api.GetFileVersionInfo(fname, str_info))) diff --git a/venv/Lib/site-packages/win32/Demos/images/frowny.bmp b/venv/Lib/site-packages/win32/Demos/images/frowny.bmp new file mode 100644 index 00000000..43e7621e Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/images/frowny.bmp differ diff --git a/venv/Lib/site-packages/win32/Demos/images/smiley.bmp b/venv/Lib/site-packages/win32/Demos/images/smiley.bmp new file mode 100644 index 00000000..12ed5dee Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/images/smiley.bmp differ diff --git a/venv/Lib/site-packages/win32/Demos/mmapfile_demo.py b/venv/Lib/site-packages/win32/Demos/mmapfile_demo.py new file mode 100644 index 00000000..9e991f07 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/mmapfile_demo.py @@ -0,0 +1,95 @@ +import win32api, mmapfile +import winerror +import tempfile, os +from pywin32_testutil import str2bytes + +system_info=win32api.GetSystemInfo() +page_size=system_info[1] +alloc_size=system_info[7] + +fname=tempfile.mktemp() +mapping_name=os.path.split(fname)[1] +fsize=8*page_size +print(fname, fsize, mapping_name) + +m1=mmapfile.mmapfile(File=fname, Name=mapping_name, MaximumSize=fsize) +m1.seek(100) +m1.write_byte(str2bytes('?')) +m1.seek(-1,1) +assert m1.read_byte()==str2bytes('?') + +## A reopened named mapping should have exact same size as original mapping +m2=mmapfile.mmapfile(Name=mapping_name, File=None, MaximumSize=fsize*2) +assert m2.size()==m1.size() +m1.seek(0,0) +m1.write(fsize*str2bytes('s')) +assert m2.read(fsize)==fsize*str2bytes('s') + +move_src=100 +move_dest=500 +move_size=150 + +m2.seek(move_src,0) +assert m2.tell()==move_src +m2.write(str2bytes('m')*move_size) +m2.move(move_dest, move_src, move_size) +m2.seek(move_dest, 0) +assert m2.read(move_size) == str2bytes('m') * move_size +## m2.write('x'* (fsize+1)) + +m2.close() +m1.resize(fsize*2) +assert m1.size()==fsize * 2 +m1.seek(fsize) +m1.write(str2bytes('w') * fsize) +m1.flush() +m1.close() +os.remove(fname) + + + +## Test a file with size larger than 32 bits +## need 10 GB free on drive where your temp folder lives +fname_large=tempfile.mktemp() +mapping_name='Pywin32_large_mmap' +offsetdata=str2bytes('This is start of offset') + +## Deliberately use odd numbers to test rounding logic +fsize = (1024*1024*1024*10) + 333 +offset = (1024*1024*32) + 42 +view_size = (1024*1024*16) + 111 + +## round mapping size and view size up to multiple of system page size +if fsize%page_size: + fsize += page_size - (fsize%page_size) +if view_size%page_size: + view_size += page_size - (view_size%page_size) +## round offset down to multiple of allocation granularity +offset -= offset%alloc_size + +m1=None +m2=None +try: + try: + m1=mmapfile.mmapfile(fname_large, mapping_name, fsize, 0, offset*2) + except mmapfile.error as exc: + # if we don't have enough disk-space, that's OK. + if exc.winerror!=winerror.ERROR_DISK_FULL: + raise + print("skipping large file test - need", fsize, "available bytes.") + else: + m1.seek(offset) + m1.write(offsetdata) + + ## When reopening an existing mapping without passing a file handle, you have + ## to specify a positive size even though it's ignored + m2=mmapfile.mmapfile(File=None, Name=mapping_name, MaximumSize=1, + FileOffset=offset, NumberOfBytesToMap=view_size) + assert m2.read(len(offsetdata))==offsetdata +finally: + if m1 is not None: + m1.close() + if m2 is not None: + m2.close() + if os.path.exists(fname_large): + os.remove(fname_large) diff --git a/venv/Lib/site-packages/win32/Demos/pipes/__pycache__/cat.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/pipes/__pycache__/cat.cpython-36.pyc new file mode 100644 index 00000000..81c389ac Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/pipes/__pycache__/cat.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/pipes/__pycache__/runproc.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/pipes/__pycache__/runproc.cpython-36.pyc new file mode 100644 index 00000000..af268ae1 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/pipes/__pycache__/runproc.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/pipes/cat.py b/venv/Lib/site-packages/win32/Demos/pipes/cat.py new file mode 100644 index 00000000..be6f3ed0 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/pipes/cat.py @@ -0,0 +1,16 @@ +'''cat.py +a version of unix cat, tweaked to show off runproc.py +''' + +import sys +data = sys.stdin.read(1) +sys.stdout.write(data) +sys.stdout.flush() +while data: + data = sys.stdin.read(1) + sys.stdout.write(data) + sys.stdout.flush() +# Just here to have something to read from stderr. +sys.stderr.write("Blah...") + +# end of cat.py diff --git a/venv/Lib/site-packages/win32/Demos/pipes/runproc.py b/venv/Lib/site-packages/win32/Demos/pipes/runproc.py new file mode 100644 index 00000000..cdb30b48 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/pipes/runproc.py @@ -0,0 +1,110 @@ +'''runproc.py + +start a process with three inherited pipes. +Try to write to and read from those. +''' + +import win32api +import win32pipe +import win32file +import win32process +import win32security +import win32con +import msvcrt +import os + +class Process: + def run(self, cmdline): + # security attributes for pipes + sAttrs = win32security.SECURITY_ATTRIBUTES() + sAttrs.bInheritHandle = 1 + + # create pipes + hStdin_r, self.hStdin_w = win32pipe.CreatePipe(sAttrs, 0) + self.hStdout_r, hStdout_w = win32pipe.CreatePipe(sAttrs, 0) + self.hStderr_r, hStderr_w = win32pipe.CreatePipe(sAttrs, 0) + + # set the info structure for the new process. + StartupInfo = win32process.STARTUPINFO() + StartupInfo.hStdInput = hStdin_r + StartupInfo.hStdOutput = hStdout_w + StartupInfo.hStdError = hStderr_w + StartupInfo.dwFlags = win32process.STARTF_USESTDHANDLES + # Mark doesn't support wShowWindow yet. + # StartupInfo.dwFlags = StartupInfo.dwFlags | win32process.STARTF_USESHOWWINDOW + # StartupInfo.wShowWindow = win32con.SW_HIDE + + # Create new output read handles and the input write handle. Set + # the inheritance properties to FALSE. Otherwise, the child inherits + # the these handles; resulting in non-closeable handles to the pipes + # being created. + pid = win32api.GetCurrentProcess() + + tmp = win32api.DuplicateHandle( + pid, + self.hStdin_w, + pid, + 0, + 0, # non-inheritable!! + win32con.DUPLICATE_SAME_ACCESS) + # Close the inhertible version of the handle + win32file.CloseHandle(self.hStdin_w) + self.hStdin_w = tmp + tmp = win32api.DuplicateHandle( + pid, + self.hStdout_r, + pid, + 0, + 0, # non-inheritable! + win32con.DUPLICATE_SAME_ACCESS) + # Close the inhertible version of the handle + win32file.CloseHandle(self.hStdout_r) + self.hStdout_r = tmp + + # start the process. + hProcess, hThread, dwPid, dwTid = win32process.CreateProcess( + None, # program + cmdline,# command line + None, # process security attributes + None, # thread attributes + 1, # inherit handles, or USESTDHANDLES won't work. + # creation flags. Don't access the console. + 0, # Don't need anything here. + # If you're in a GUI app, you should use + # CREATE_NEW_CONSOLE here, or any subprocesses + # might fall victim to the problem described in: + # KB article: Q156755, cmd.exe requires + # an NT console in order to perform redirection.. + None, # no new environment + None, # current directory (stay where we are) + StartupInfo) + # normally, we would save the pid etc. here... + + # Child is launched. Close the parents copy of those pipe handles + # that only the child should have open. + # You need to make sure that no handles to the write end of the + # output pipe are maintained in this process or else the pipe will + # not close when the child process exits and the ReadFile will hang. + win32file.CloseHandle(hStderr_w) + win32file.CloseHandle(hStdout_w) + win32file.CloseHandle(hStdin_r) + + self.stdin = os.fdopen(msvcrt.open_osfhandle(self.hStdin_w, 0), "wb") + self.stdin.write('hmmmmm\r\n') + self.stdin.flush() + self.stdin.close() + + self.stdout = os.fdopen(msvcrt.open_osfhandle(self.hStdout_r, 0), "rb") + print("Read on stdout: ", repr(self.stdout.read())) + + self.stderr = os.fdopen(msvcrt.open_osfhandle(self.hStderr_r, 0), "rb") + print("Read on stderr: ", repr(self.stderr.read())) + + +if __name__ == '__main__': + p = Process() + exe = win32api.GetModuleFileName(0) + p.run(exe + ' cat.py') + +# end of runproc.py + diff --git a/venv/Lib/site-packages/win32/Demos/print_desktop.py b/venv/Lib/site-packages/win32/Demos/print_desktop.py new file mode 100644 index 00000000..6be37f7e --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/print_desktop.py @@ -0,0 +1,67 @@ +import win32print, pywintypes, win32con, win32gui, win32api + +pname=win32print.GetDefaultPrinter() +print(pname) +p=win32print.OpenPrinter(pname) +print('Printer handle: ',p) +print_processor=win32print.GetPrinter(p,2)['pPrintProcessor'] +## call with last parm set to 0 to get total size needed for printer's DEVMODE +dmsize=win32print.DocumentProperties(0, p, pname, None, None, 0) +## dmDriverExtra should be total size - fixed size +driverextra=dmsize - pywintypes.DEVMODEType().Size ## need a better way to get DEVMODE.dmSize +dm=pywintypes.DEVMODEType(driverextra) +dm.Fields=dm.Fields|win32con.DM_ORIENTATION|win32con.DM_COPIES +dm.Orientation=win32con.DMORIENT_LANDSCAPE +dm.Copies=2 +win32print.DocumentProperties(0, p, pname, dm, dm, win32con.DM_IN_BUFFER|win32con.DM_OUT_BUFFER) + +pDC=win32gui.CreateDC(print_processor,pname,dm) +printerwidth=win32print.GetDeviceCaps(pDC, win32con.PHYSICALWIDTH) +printerheight=win32print.GetDeviceCaps(pDC, win32con.PHYSICALHEIGHT) + +hwnd=win32gui.GetDesktopWindow() +l,t,r,b=win32gui.GetWindowRect(hwnd) +desktopheight=b-t +desktopwidth=r-l +dDC = win32gui.GetWindowDC(hwnd) + +dcDC=win32gui.CreateCompatibleDC(dDC) +dcBM = win32gui.CreateCompatibleBitmap(dDC, desktopwidth, desktopheight); +win32gui.SelectObject(dcDC, dcBM) +win32gui.StretchBlt(dcDC, 0, 0, desktopwidth, desktopheight, dDC, 0, 0, desktopwidth, desktopheight, win32con.SRCCOPY) + +pcDC=win32gui.CreateCompatibleDC(pDC) +pcBM=win32gui.CreateCompatibleBitmap(pDC, printerwidth, printerheight) +win32gui.SelectObject(pcDC, pcBM) +win32gui.StretchBlt(pcDC, 0, 0, printerwidth, printerheight, dcDC, 0, 0, desktopwidth, desktopheight, win32con.SRCCOPY) + +win32print.StartDoc(pDC,('desktop.bmp',None,None,0)) +win32print.StartPage(pDC) +win32gui.StretchBlt(pDC, 0, 0, int(printerwidth*.9), int(printerheight*.9), pcDC, 0, 0, printerwidth, printerheight, win32con.SRCCOPY) + +font=win32gui.LOGFONT() +font.lfHeight=int(printerheight/20) +font.lfWidth=font.lfHeight +font.lfWeight=150 +font.lfItalic=1 +font.lfUnderline=1 +hf=win32gui.CreateFontIndirect(font) +win32gui.SelectObject(pDC,hf) +win32gui.SetBkMode(pDC, win32con.TRANSPARENT) +win32gui.SetTextColor(pDC,win32api.RGB(0,255,0)) +win32gui.DrawText(pDC,'Printed by Python!', -1, + (0,0, int(printerwidth*.9), int(printerheight*.9)), + win32con.DT_RIGHT|win32con.DT_BOTTOM|win32con.DT_SINGLELINE) +win32print.EndPage(pDC) +win32print.EndDoc(pDC) + +win32print.ClosePrinter(p) +win32gui.DeleteObject(dcBM) +win32gui.DeleteObject(pcBM) +win32gui.DeleteObject(hf) +win32gui.DeleteDC(dDC) +win32gui.DeleteDC(dcDC) +win32gui.DeleteDC(pDC) +win32gui.DeleteDC(pcDC) + + diff --git a/venv/Lib/site-packages/win32/Demos/rastest.py b/venv/Lib/site-packages/win32/Demos/rastest.py new file mode 100644 index 00000000..f0e24bfc --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/rastest.py @@ -0,0 +1,136 @@ +# rastest.py - test/demonstrate the win32ras module. +# Much of the code here contributed by Jethro Wright. + +import sys +import os +import win32ras + +# Build a little dictionary of RAS states to decent strings. +# eg win32ras.RASCS_OpenPort -> "OpenPort" +stateMap = {} +for name, val in list(win32ras.__dict__.items()): + if name[:6]=="RASCS_": + stateMap[val] = name[6:] + +# Use a lock so the callback can tell the main thread when it is finished. +import win32event +callbackEvent = win32event.CreateEvent(None, 0, 0, None) + +def Callback( hras, msg, state, error, exterror): +# print "Callback called with ", hras, msg, state, error, exterror + stateName = stateMap.get(state, "Unknown state?") + print("Status is %s (%04lx), error code is %d" % (stateName, state, error)) + finished = state in [win32ras.RASCS_Connected] + if finished: + win32event.SetEvent(callbackEvent) + if error != 0 or int( state ) == win32ras.RASCS_Disconnected: + # we know for sure this is a good place to hangup.... + print("Detected call failure: %s" % win32ras.GetErrorString( error )) + HangUp( hras ) + win32event.SetEvent(callbackEvent) + +def ShowConnections(): + print("All phone-book entries:") + for (name,) in win32ras.EnumEntries(): + print(" ", name) + print("Current Connections:") + for con in win32ras.EnumConnections(): + print(" ", con) + +def EditEntry(entryName): + try: + win32ras.EditPhonebookEntry(0,None,entryName) + except win32ras.error as xxx_todo_changeme: + (rc, function, msg) = xxx_todo_changeme.args + print("Can not edit/find the RAS entry -", msg) + +def HangUp( hras ): + # trap potential, irrelevant errors from win32ras.... + try: + win32ras.HangUp( hras ) + except: + print("Tried to hang up gracefully on error, but didn't work....") + return None + +def Connect(entryName, bUseCallback): + if bUseCallback: + theCallback = Callback + win32event.ResetEvent(callbackEvent) + else: + theCallback = None + # in order to *use* the username/password of a particular dun entry, one must + # explicitly get those params under win95.... + try: + dp, b = win32ras.GetEntryDialParams( None, entryName ) + except: + print("Couldn't find DUN entry: %s" % entryName) + else: + hras, rc = win32ras.Dial(None, None, (entryName, "", "", dp[ 3 ], dp[ 4 ], ""),theCallback) + # hras, rc = win32ras.Dial(None, None, (entryName, ),theCallback) + # print hras, rc + if not bUseCallback and rc != 0: + print("Could not dial the RAS connection:", win32ras.GetErrorString(rc)) + hras = HangUp( hras ) + # don't wait here if there's no need to.... + elif bUseCallback and win32event.WaitForSingleObject(callbackEvent, 60000)!=win32event.WAIT_OBJECT_0: + print("Gave up waiting for the process to complete!") + # sdk docs state one must explcitly hangup, even if there's an error.... + try: + cs = win32ras.GetConnectStatus( hras ) + except: + # on error, attempt a hang up anyway.... + hras = HangUp( hras ) + else: + if int( cs[ 0 ] ) == win32ras.RASCS_Disconnected: + hras = HangUp( hras ) + return hras, rc + +def Disconnect( rasEntry ): + # Need to find the entry + name = rasEntry.lower() + for hcon, entryName, devName, devType in win32ras.EnumConnections(): + if entryName.lower() == name: + win32ras.HangUp( hcon ) + print("Disconnected from", rasEntry) + break + else: + print("Could not find an open connection to", entryName) + +usage = """ +Usage: %s [-s] [-l] [-c connection] [-d connection] +-l : List phone-book entries and current connections. +-s : Show status while connecting/disconnecting (uses callbacks) +-c : Connect to the specified phonebook name. +-d : Disconnect from the specified phonebook name. +-e : Edit the specified phonebook entry. +""" + +def main(): + import getopt + try: + opts, args = getopt.getopt(sys.argv[1:], "slc:d:e:") + except getopt.error as why: + print(why) + print(usage % (os.path.basename(sys.argv[0],))) + return + + bCallback = 0 + if args or not opts: + print(usage % (os.path.basename(sys.argv[0],))) + return + for opt, val in opts: + if opt=="-s": + bCallback = 1 + if opt=="-l": + ShowConnections() + if opt=="-c": + hras, rc = Connect(val, bCallback) + if hras != None: + print("hras: 0x%8lx, rc: 0x%04x" % ( hras, rc )) + if opt=="-d": + Disconnect(val) + if opt=="-e": + EditEntry(val) + +if __name__=='__main__': + main() diff --git a/venv/Lib/site-packages/win32/Demos/security/GetTokenInformation.py b/venv/Lib/site-packages/win32/Demos/security/GetTokenInformation.py new file mode 100644 index 00000000..82f60563 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/security/GetTokenInformation.py @@ -0,0 +1,73 @@ +""" Lists various types of information about current user's access token, + including UAC status on Vista +""" + +import pywintypes, win32api, win32security +import win32con, winerror +from security_enums import TOKEN_GROUP_ATTRIBUTES, TOKEN_PRIVILEGE_ATTRIBUTES, \ + SECURITY_IMPERSONATION_LEVEL, TOKEN_TYPE, TOKEN_ELEVATION_TYPE + + +def dump_token(th): + token_type=win32security.GetTokenInformation(th, win32security.TokenType) + print('TokenType:', token_type, TOKEN_TYPE.lookup_name(token_type)) + if token_type==win32security.TokenImpersonation: + imp_lvl=win32security.GetTokenInformation(th, win32security.TokenImpersonationLevel) + print('TokenImpersonationLevel:', imp_lvl, SECURITY_IMPERSONATION_LEVEL.lookup_name(imp_lvl)) + + print('TokenSessionId:', win32security.GetTokenInformation(th, win32security.TokenSessionId)) + + privs=win32security.GetTokenInformation(th,win32security.TokenPrivileges) + print('TokenPrivileges:') + for priv_luid, priv_flags in privs: + flag_names, unk=TOKEN_PRIVILEGE_ATTRIBUTES.lookup_flags(priv_flags) + flag_desc = ' '.join(flag_names) + if (unk): + flag_desc += '(' + str(unk) + ')' + + priv_name=win32security.LookupPrivilegeName('',priv_luid) + priv_desc=win32security.LookupPrivilegeDisplayName('',priv_name) + print('\t', priv_name, priv_desc, priv_flags, flag_desc) + + print('TokenGroups:') + groups=win32security.GetTokenInformation(th,win32security.TokenGroups) + for group_sid, group_attr in groups: + flag_names, unk=TOKEN_GROUP_ATTRIBUTES.lookup_flags(group_attr) + flag_desc = ' '.join(flag_names) + if (unk): + flag_desc += '(' + str(unk) + ')' + if group_attr & TOKEN_GROUP_ATTRIBUTES.SE_GROUP_LOGON_ID: + sid_desc = 'Logon sid' + else: + sid_desc=win32security.LookupAccountSid('',group_sid) + print('\t',group_sid, sid_desc, group_attr, flag_desc) + + ## Vista token information types, will throw (87, 'GetTokenInformation', 'The parameter is incorrect.') on earier OS + try: + is_elevated=win32security.GetTokenInformation(th, win32security.TokenElevation) + print('TokenElevation:', is_elevated) + except pywintypes.error as details: + if details.winerror != winerror.ERROR_INVALID_PARAMETER: + raise + return None + print('TokenHasRestrictions:', win32security.GetTokenInformation(th, win32security.TokenHasRestrictions)) + print('TokenMandatoryPolicy', win32security.GetTokenInformation(th, win32security.TokenMandatoryPolicy)) + print('TokenVirtualizationAllowed:', win32security.GetTokenInformation(th, win32security.TokenVirtualizationAllowed)) + print('TokenVirtualizationEnabled:', win32security.GetTokenInformation(th, win32security.TokenVirtualizationEnabled)) + + elevation_type = win32security.GetTokenInformation(th, win32security.TokenElevationType) + print('TokenElevationType:', elevation_type, TOKEN_ELEVATION_TYPE.lookup_name(elevation_type)) + if elevation_type!=win32security.TokenElevationTypeDefault: + lt=win32security.GetTokenInformation(th, win32security.TokenLinkedToken) + print('TokenLinkedToken:', lt) + else: + lt=None + return lt + + +ph = win32api.GetCurrentProcess() +th = win32security.OpenProcessToken(ph,win32con.MAXIMUM_ALLOWED) +lt = dump_token(th) +if lt: + print('\n\nlinked token info:') + dump_token(lt) diff --git a/venv/Lib/site-packages/win32/Demos/security/__pycache__/GetTokenInformation.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/security/__pycache__/GetTokenInformation.cpython-36.pyc new file mode 100644 index 00000000..1b247c06 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/security/__pycache__/GetTokenInformation.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/security/__pycache__/account_rights.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/security/__pycache__/account_rights.cpython-36.pyc new file mode 100644 index 00000000..99bed9ad Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/security/__pycache__/account_rights.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/security/__pycache__/explicit_entries.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/security/__pycache__/explicit_entries.cpython-36.pyc new file mode 100644 index 00000000..fe636ba3 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/security/__pycache__/explicit_entries.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/security/__pycache__/get_policy_info.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/security/__pycache__/get_policy_info.cpython-36.pyc new file mode 100644 index 00000000..f4414f93 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/security/__pycache__/get_policy_info.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/security/__pycache__/list_rights.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/security/__pycache__/list_rights.cpython-36.pyc new file mode 100644 index 00000000..4fd42dfd Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/security/__pycache__/list_rights.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/security/__pycache__/localized_names.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/security/__pycache__/localized_names.cpython-36.pyc new file mode 100644 index 00000000..9ed873ac Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/security/__pycache__/localized_names.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/security/__pycache__/lsaregevent.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/security/__pycache__/lsaregevent.cpython-36.pyc new file mode 100644 index 00000000..ea933bab Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/security/__pycache__/lsaregevent.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/security/__pycache__/lsastore.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/security/__pycache__/lsastore.cpython-36.pyc new file mode 100644 index 00000000..fb5216b8 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/security/__pycache__/lsastore.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/security/__pycache__/query_information.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/security/__pycache__/query_information.cpython-36.pyc new file mode 100644 index 00000000..35d8e344 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/security/__pycache__/query_information.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/security/__pycache__/regsave_sa.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/security/__pycache__/regsave_sa.cpython-36.pyc new file mode 100644 index 00000000..bb75861a Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/security/__pycache__/regsave_sa.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/security/__pycache__/regsecurity.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/security/__pycache__/regsecurity.cpython-36.pyc new file mode 100644 index 00000000..690e96c9 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/security/__pycache__/regsecurity.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/security/__pycache__/sa_inherit.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/security/__pycache__/sa_inherit.cpython-36.pyc new file mode 100644 index 00000000..fa85abf0 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/security/__pycache__/sa_inherit.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/security/__pycache__/security_enums.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/security/__pycache__/security_enums.cpython-36.pyc new file mode 100644 index 00000000..95235ed5 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/security/__pycache__/security_enums.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/security/__pycache__/set_file_audit.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/security/__pycache__/set_file_audit.cpython-36.pyc new file mode 100644 index 00000000..3b11820c Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/security/__pycache__/set_file_audit.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/security/__pycache__/set_file_owner.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/security/__pycache__/set_file_owner.cpython-36.pyc new file mode 100644 index 00000000..89258b5a Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/security/__pycache__/set_file_owner.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/security/__pycache__/set_policy_info.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/security/__pycache__/set_policy_info.cpython-36.pyc new file mode 100644 index 00000000..00237467 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/security/__pycache__/set_policy_info.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/security/__pycache__/setkernelobjectsecurity.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/security/__pycache__/setkernelobjectsecurity.cpython-36.pyc new file mode 100644 index 00000000..771f48c8 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/security/__pycache__/setkernelobjectsecurity.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/security/__pycache__/setnamedsecurityinfo.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/security/__pycache__/setnamedsecurityinfo.cpython-36.pyc new file mode 100644 index 00000000..72cf224f Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/security/__pycache__/setnamedsecurityinfo.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/security/__pycache__/setsecurityinfo.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/security/__pycache__/setsecurityinfo.cpython-36.pyc new file mode 100644 index 00000000..f247ced6 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/security/__pycache__/setsecurityinfo.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/security/__pycache__/setuserobjectsecurity.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/security/__pycache__/setuserobjectsecurity.cpython-36.pyc new file mode 100644 index 00000000..156edd01 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/security/__pycache__/setuserobjectsecurity.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/security/account_rights.py b/venv/Lib/site-packages/win32/Demos/security/account_rights.py new file mode 100644 index 00000000..e20c080b --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/security/account_rights.py @@ -0,0 +1,31 @@ +import win32security,win32file,win32api,ntsecuritycon,win32con +from security_enums import TRUSTEE_TYPE,TRUSTEE_FORM,ACE_FLAGS,ACCESS_MODE + +new_privs = ((win32security.LookupPrivilegeValue('',ntsecuritycon.SE_SECURITY_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',ntsecuritycon.SE_CREATE_PERMANENT_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('','SeEnableDelegationPrivilege'),win32con.SE_PRIVILEGE_ENABLED) ##doesn't seem to be in ntsecuritycon.py ? + ) + +ph = win32api.GetCurrentProcess() +th = win32security.OpenProcessToken(ph,win32security.TOKEN_ALL_ACCESS) ##win32con.TOKEN_ADJUST_PRIVILEGES) +win32security.AdjustTokenPrivileges(th,0,new_privs) + +policy_handle = win32security.GetPolicyHandle('',win32security.POLICY_ALL_ACCESS) +tmp_sid = win32security.LookupAccountName('','tmp')[0] + +privs=[ntsecuritycon.SE_DEBUG_NAME,ntsecuritycon.SE_TCB_NAME,ntsecuritycon.SE_RESTORE_NAME,ntsecuritycon.SE_REMOTE_SHUTDOWN_NAME] +win32security.LsaAddAccountRights(policy_handle,tmp_sid,privs) + +privlist=win32security.LsaEnumerateAccountRights(policy_handle,tmp_sid) +for priv in privlist: + print(priv) + +privs=[ntsecuritycon.SE_DEBUG_NAME,ntsecuritycon.SE_TCB_NAME] +win32security.LsaRemoveAccountRights(policy_handle,tmp_sid,0,privs) + +privlist=win32security.LsaEnumerateAccountRights(policy_handle,tmp_sid) +for priv in privlist: + print(priv) + +win32security.LsaClose(policy_handle) + diff --git a/venv/Lib/site-packages/win32/Demos/security/explicit_entries.py b/venv/Lib/site-packages/win32/Demos/security/explicit_entries.py new file mode 100644 index 00000000..e0dea0e8 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/security/explicit_entries.py @@ -0,0 +1,144 @@ +import os +import win32security,win32file,win32api,ntsecuritycon,win32con +from security_enums import TRUSTEE_TYPE,TRUSTEE_FORM,ACE_FLAGS,ACCESS_MODE + +fname = os.path.join(win32api.GetTempPath(), "win32security_test.txt") +f=open(fname, "w") +f.write("Hello from Python\n"); +f.close() +print("Testing on file", fname) + +new_privs = ((win32security.LookupPrivilegeValue('',ntsecuritycon.SE_SECURITY_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',ntsecuritycon.SE_SHUTDOWN_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',ntsecuritycon.SE_RESTORE_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',ntsecuritycon.SE_TAKE_OWNERSHIP_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',ntsecuritycon.SE_CREATE_PERMANENT_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('','SeEnableDelegationPrivilege'),win32con.SE_PRIVILEGE_ENABLED) ##doesn't seem to be in ntsecuritycon.py ? + ) + +ph = win32api.GetCurrentProcess() +th = win32security.OpenProcessToken(ph,win32security.TOKEN_ALL_ACCESS) ##win32con.TOKEN_ADJUST_PRIVILEGES) +win32security.AdjustTokenPrivileges(th,0,new_privs) + +all_security_info = \ + win32security.OWNER_SECURITY_INFORMATION|win32security.GROUP_SECURITY_INFORMATION| \ + win32security.DACL_SECURITY_INFORMATION|win32security.SACL_SECURITY_INFORMATION + +sd=win32security.GetFileSecurity(fname,all_security_info) + +old_sacl=sd.GetSecurityDescriptorSacl() +if old_sacl==None: + old_sacl=win32security.ACL() +old_dacl=sd.GetSecurityDescriptorDacl() +if old_dacl==None: + old_dacl=win32security.ACL() + +my_sid = win32security.GetTokenInformation(th,ntsecuritycon.TokenUser)[0] +tmp_sid = win32security.LookupAccountName('','tmp')[0] +pwr_sid = win32security.LookupAccountName('','Power Users')[0] + + +## MultipleTrustee,MultipleTrusteeOperation,TrusteeForm,TrusteeType,Identifier +## first two are ignored +my_trustee = {} +my_trustee['MultipleTrustee']=None +my_trustee['MultipleTrusteeOperation']=0 +my_trustee['TrusteeForm']=TRUSTEE_FORM.TRUSTEE_IS_SID +my_trustee['TrusteeType']=TRUSTEE_TYPE.TRUSTEE_IS_USER +my_trustee['Identifier']=my_sid + +tmp_trustee = {} +tmp_trustee['MultipleTrustee']=None +tmp_trustee['MultipleTrusteeOperation']=0 +tmp_trustee['TrusteeForm']=TRUSTEE_FORM.TRUSTEE_IS_NAME +tmp_trustee['TrusteeType']=TRUSTEE_TYPE.TRUSTEE_IS_USER +tmp_trustee['Identifier']='rupole\\tmp' + +pwr_trustee = {} +pwr_trustee['MultipleTrustee']=None +pwr_trustee['MultipleTrusteeOperation']=0 +pwr_trustee['TrusteeForm']=TRUSTEE_FORM.TRUSTEE_IS_SID +pwr_trustee['TrusteeType']=TRUSTEE_TYPE.TRUSTEE_IS_USER +pwr_trustee['Identifier']=pwr_sid + +expl_list=[] +expl_list.append( + { + 'Trustee':my_trustee, + 'Inheritance':ACE_FLAGS.NO_INHERITANCE, + 'AccessMode':ACCESS_MODE.SET_AUDIT_SUCCESS, ##|ACCESS_MODE.SET_AUDIT_FAILURE, + 'AccessPermissions':win32con.GENERIC_ALL + } + ) + +expl_list.append( + { + 'Trustee':my_trustee, + 'Inheritance':ACE_FLAGS.NO_INHERITANCE, + 'AccessMode':ACCESS_MODE.SET_AUDIT_FAILURE, + 'AccessPermissions':win32con.GENERIC_ALL + } + ) + +expl_list.append( + { + 'Trustee':tmp_trustee, + 'Inheritance':ACE_FLAGS.NO_INHERITANCE, + 'AccessMode':ACCESS_MODE.SET_AUDIT_SUCCESS, + 'AccessPermissions':win32con.GENERIC_ALL + } + ) + +expl_list.append( + { + 'Trustee':tmp_trustee, + 'Inheritance':ACE_FLAGS.NO_INHERITANCE, + 'AccessMode':ACCESS_MODE.SET_AUDIT_FAILURE, + 'AccessPermissions':win32con.GENERIC_ALL + } + ) +old_sacl.SetEntriesInAcl(expl_list) + +expl_list=[] +expl_list.append( + { + 'Trustee':tmp_trustee, + 'Inheritance':ACE_FLAGS.NO_INHERITANCE, + 'AccessMode':ACCESS_MODE.DENY_ACCESS, + 'AccessPermissions':win32con.DELETE + } + ) + +expl_list.append( + { + 'Trustee':tmp_trustee, + 'Inheritance':ACE_FLAGS.NO_INHERITANCE, + 'AccessMode':ACCESS_MODE.GRANT_ACCESS, + 'AccessPermissions':win32con.WRITE_OWNER + } + ) +expl_list.append( + { + 'Trustee':pwr_trustee, + 'Inheritance':ACE_FLAGS.NO_INHERITANCE, + 'AccessMode':ACCESS_MODE.GRANT_ACCESS, + 'AccessPermissions':win32con.GENERIC_READ + } + ) +expl_list.append( + { + 'Trustee':my_trustee, + 'Inheritance':ACE_FLAGS.NO_INHERITANCE, + 'AccessMode':ACCESS_MODE.GRANT_ACCESS, + 'AccessPermissions':win32con.GENERIC_ALL + } + ) + +old_dacl.SetEntriesInAcl(expl_list) +sd.SetSecurityDescriptorSacl(1,old_sacl,1) +sd.SetSecurityDescriptorDacl(1,old_dacl,1) +sd.SetSecurityDescriptorOwner(pwr_sid,1) + +win32security.SetFileSecurity(fname, + all_security_info, + sd) diff --git a/venv/Lib/site-packages/win32/Demos/security/get_policy_info.py b/venv/Lib/site-packages/win32/Demos/security/get_policy_info.py new file mode 100644 index 00000000..3b05e5b0 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/security/get_policy_info.py @@ -0,0 +1,23 @@ +import win32security,win32file,win32api,ntsecuritycon,win32con +policy_handle = win32security.GetPolicyHandle('rupole',win32security.POLICY_ALL_ACCESS) + +## mod_nbr, mod_time = win32security.LsaQueryInformationPolicy(policy_handle,win32security.PolicyModificationInformation) +## print mod_nbr, mod_time + +domain_name,dns_domain_name, dns_forest_name, domain_guid, domain_sid = \ + win32security.LsaQueryInformationPolicy(policy_handle,win32security.PolicyDnsDomainInformation) +print(domain_name, dns_domain_name, dns_forest_name, domain_guid, domain_sid) + +event_audit_info=win32security.LsaQueryInformationPolicy(policy_handle,win32security.PolicyAuditEventsInformation) +print(event_audit_info) + +domain_name,sid =win32security.LsaQueryInformationPolicy(policy_handle,win32security.PolicyPrimaryDomainInformation) +print(domain_name, sid) + +domain_name,sid =win32security.LsaQueryInformationPolicy(policy_handle,win32security.PolicyAccountDomainInformation) +print(domain_name, sid) + +server_role = win32security.LsaQueryInformationPolicy(policy_handle,win32security.PolicyLsaServerRoleInformation) +print('server role: ',server_role) + +win32security.LsaClose(policy_handle) diff --git a/venv/Lib/site-packages/win32/Demos/security/list_rights.py b/venv/Lib/site-packages/win32/Demos/security/list_rights.py new file mode 100644 index 00000000..d3789e5f --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/security/list_rights.py @@ -0,0 +1,20 @@ +import win32security,win32file,win32api,ntsecuritycon,win32con +from security_enums import TRUSTEE_TYPE,TRUSTEE_FORM,ACE_FLAGS,ACCESS_MODE + +new_privs = ((win32security.LookupPrivilegeValue('',ntsecuritycon.SE_SECURITY_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',ntsecuritycon.SE_CREATE_PERMANENT_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('','SeEnableDelegationPrivilege'),win32con.SE_PRIVILEGE_ENABLED) ##doesn't seem to be in ntsecuritycon.py ? + ) + +ph = win32api.GetCurrentProcess() +th = win32security.OpenProcessToken(ph,win32security.TOKEN_ALL_ACCESS) ##win32con.TOKEN_ADJUST_PRIVILEGES) +win32security.AdjustTokenPrivileges(th,0,new_privs) + +policy_handle = win32security.GetPolicyHandle('',win32security.POLICY_ALL_ACCESS) + +sidlist=win32security.LsaEnumerateAccountsWithUserRight(policy_handle,ntsecuritycon.SE_RESTORE_NAME) +for sid in sidlist: + print(win32security.LookupAccountSid('',sid)) + +win32security.LsaClose(policy_handle) + diff --git a/venv/Lib/site-packages/win32/Demos/security/localized_names.py b/venv/Lib/site-packages/win32/Demos/security/localized_names.py new file mode 100644 index 00000000..3119e6d4 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/security/localized_names.py @@ -0,0 +1,61 @@ +# A Python port of the MS knowledge base article Q157234 +# "How to deal with localized and renamed user and group names" +# http://support.microsoft.com/default.aspx?kbid=157234 + +import sys +from win32net import NetUserModalsGet +from win32security import LookupAccountSid +import pywintypes +from ntsecuritycon import * + +def LookupAliasFromRid(TargetComputer, Rid): + # Sid is the same regardless of machine, since the well-known + # BUILTIN domain is referenced. + sid = pywintypes.SID() + sid.Initialize(SECURITY_NT_AUTHORITY, 2) + + for i, r in enumerate((SECURITY_BUILTIN_DOMAIN_RID, Rid)): + sid.SetSubAuthority(i, r) + + name, domain, typ = LookupAccountSid(TargetComputer, sid) + return name + +def LookupUserGroupFromRid(TargetComputer, Rid): + # get the account domain Sid on the target machine + # note: if you were looking up multiple sids based on the same + # account domain, only need to call this once. + umi2 = NetUserModalsGet(TargetComputer, 2) + domain_sid = umi2['domain_id'] + + SubAuthorityCount = domain_sid.GetSubAuthorityCount() + + # create and init new sid with acct domain Sid + acct Rid + sid = pywintypes.SID() + sid.Initialize(domain_sid.GetSidIdentifierAuthority(), + SubAuthorityCount+1) + + # copy existing subauthorities from account domain Sid into + # new Sid + for i in range(SubAuthorityCount): + sid.SetSubAuthority(i, domain_sid.GetSubAuthority(i)) + + # append Rid to new Sid + sid.SetSubAuthority(SubAuthorityCount, Rid) + + name, domain, typ = LookupAccountSid(TargetComputer, sid) + return name + +def main(): + if len(sys.argv) == 2: + targetComputer = sys.argv[1] + else: + targetComputer = None + + name = LookupUserGroupFromRid(targetComputer, DOMAIN_USER_RID_ADMIN) + print("'Administrator' user name = %s" % (name,)) + + name = LookupAliasFromRid(targetComputer, DOMAIN_ALIAS_RID_ADMINS) + print("'Administrators' local group/alias name = %s" % (name,)) + +if __name__=='__main__': + main() diff --git a/venv/Lib/site-packages/win32/Demos/security/lsaregevent.py b/venv/Lib/site-packages/win32/Demos/security/lsaregevent.py new file mode 100644 index 00000000..7c31ac91 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/security/lsaregevent.py @@ -0,0 +1,8 @@ +import win32security, win32event +evt = win32event.CreateEvent(None,0,0,None) +win32security.LsaRegisterPolicyChangeNotification(win32security.PolicyNotifyAuditEventsInformation, evt) +print("Waiting for you change Audit policy in Management console ...") +ret_code=win32event.WaitForSingleObject(evt,1000000000) +## should come back when you change Audit policy in Management console ... +print(ret_code) +win32security.LsaUnregisterPolicyChangeNotification(win32security.PolicyNotifyAuditEventsInformation, evt) diff --git a/venv/Lib/site-packages/win32/Demos/security/lsastore.py b/venv/Lib/site-packages/win32/Demos/security/lsastore.py new file mode 100644 index 00000000..4c53c470 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/security/lsastore.py @@ -0,0 +1,11 @@ +import win32security +policy_handle = win32security.GetPolicyHandle('',win32security.POLICY_ALL_ACCESS) +privatedata='some sensitive data' +keyname='tmp' +win32security.LsaStorePrivateData(policy_handle,keyname,privatedata) +retrieveddata=win32security.LsaRetrievePrivateData(policy_handle,keyname) +assert retrieveddata==privatedata + +## passing None deletes key +win32security.LsaStorePrivateData(policy_handle,keyname,None) +win32security.LsaClose(policy_handle) diff --git a/venv/Lib/site-packages/win32/Demos/security/query_information.py b/venv/Lib/site-packages/win32/Demos/security/query_information.py new file mode 100644 index 00000000..50a1a947 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/security/query_information.py @@ -0,0 +1,25 @@ +from ntsecuritycon import * +import win32api, win32security, winerror + +# This is a Python implementation of win32api.GetDomainName() +def GetDomainName(): + try: + tok = win32security.OpenThreadToken(win32api.GetCurrentThread(), + TOKEN_QUERY, 1) + except win32api.error as details: + if details[0] != winerror.ERROR_NO_TOKEN: + raise + # attempt to open the process token, since no thread token + # exists + tok = win32security.OpenProcessToken(win32api.GetCurrentProcess(), + TOKEN_QUERY) + sid, attr = win32security.GetTokenInformation(tok, TokenUser) + win32api.CloseHandle(tok) + + name, dom, typ = win32security.LookupAccountSid(None, sid) + return dom + +if __name__=='__main__': + print("Domain name is", GetDomainName()) + + diff --git a/venv/Lib/site-packages/win32/Demos/security/regsave_sa.py b/venv/Lib/site-packages/win32/Demos/security/regsave_sa.py new file mode 100644 index 00000000..4b29c1f1 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/security/regsave_sa.py @@ -0,0 +1,37 @@ +fname='h:\\tmp.reg' + +import win32api, win32con, win32security, ntsecuritycon, pywintypes,os +## regsave will not overwrite a file +if os.path.isfile(fname): + os.remove(fname) + +new_privs = ((win32security.LookupPrivilegeValue('',ntsecuritycon.SE_SECURITY_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',ntsecuritycon.SE_TCB_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',ntsecuritycon.SE_BACKUP_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',ntsecuritycon.SE_RESTORE_NAME),win32con.SE_PRIVILEGE_ENABLED) + + ) +ph = win32api.GetCurrentProcess() +th = win32security.OpenProcessToken(ph,win32security.TOKEN_ALL_ACCESS|win32con.TOKEN_ADJUST_PRIVILEGES) +win32security.AdjustTokenPrivileges(th,0,new_privs) +my_sid = win32security.GetTokenInformation(th,ntsecuritycon.TokenUser)[0] + +hklm=win32api.RegOpenKey(win32con.HKEY_LOCAL_MACHINE,None,0,win32con.KEY_ALL_ACCESS) +skey=win32api.RegOpenKey(hklm,'SYSTEM',0,win32con.KEY_ALL_ACCESS) + +sa=pywintypes.SECURITY_ATTRIBUTES() +sd=pywintypes.SECURITY_DESCRIPTOR() +sa.SECURITY_DESCRIPTOR=sd +acl=pywintypes.ACL() + +pwr_sid = win32security.LookupAccountName('','Power Users')[0] +acl.AddAccessAllowedAce(win32con.ACL_REVISION,win32con.GENERIC_READ|win32con.ACCESS_SYSTEM_SECURITY,my_sid) +sd.SetSecurityDescriptorDacl(1,acl,0) +sd.SetSecurityDescriptorOwner(pwr_sid,0) +sa.bInheritHandle=1 +assert sa.SECURITY_DESCRIPTOR is sd + +win32api.RegSaveKey(skey,fname,sa) + + + diff --git a/venv/Lib/site-packages/win32/Demos/security/regsecurity.py b/venv/Lib/site-packages/win32/Demos/security/regsecurity.py new file mode 100644 index 00000000..8e1d3470 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/security/regsecurity.py @@ -0,0 +1,20 @@ +import win32api, win32con, win32security, ntsecuritycon + +new_privs = ((win32security.LookupPrivilegeValue('',ntsecuritycon.SE_SECURITY_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',ntsecuritycon.SE_TCB_NAME),win32con.SE_PRIVILEGE_ENABLED) + ) +ph = win32api.GetCurrentProcess() +th = win32security.OpenProcessToken(ph,win32security.TOKEN_ALL_ACCESS|win32con.TOKEN_ADJUST_PRIVILEGES) + +win32security.AdjustTokenPrivileges(th,0,new_privs) +hkey=win32api.RegOpenKey(win32con.HKEY_LOCAL_MACHINE,None,0,win32con.KEY_ALL_ACCESS) +win32api.RegCreateKey(hkey,'SYSTEM\\NOTMP') +notmpkey=win32api.RegOpenKey(hkey,'SYSTEM\\notmp',0,win32con.ACCESS_SYSTEM_SECURITY) + +tmp_sid = win32security.LookupAccountName('','tmp')[0] +sacl=win32security.ACL() +sacl.AddAuditAccessAce(win32security.ACL_REVISION,win32con.GENERIC_ALL,tmp_sid,1,1) + +sd=win32security.SECURITY_DESCRIPTOR() +sd.SetSecurityDescriptorSacl(1,sacl,1) +win32api.RegSetKeySecurity(notmpkey,win32con.SACL_SECURITY_INFORMATION,sd) diff --git a/venv/Lib/site-packages/win32/Demos/security/sa_inherit.py b/venv/Lib/site-packages/win32/Demos/security/sa_inherit.py new file mode 100644 index 00000000..621aa6c1 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/security/sa_inherit.py @@ -0,0 +1,7 @@ +import pywintypes, win32security +sa=pywintypes.SECURITY_ATTRIBUTES() +tmp_sid=win32security.LookupAccountName('','tmp')[0] +sa.SetSecurityDescriptorOwner(tmp_sid,0) +sid=sa.SECURITY_DESCRIPTOR.GetSecurityDescriptorOwner() +print(win32security.LookupAccountSid('',sid)) + diff --git a/venv/Lib/site-packages/win32/Demos/security/security_enums.py b/venv/Lib/site-packages/win32/Demos/security/security_enums.py new file mode 100644 index 00000000..c0b7aff5 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/security/security_enums.py @@ -0,0 +1,316 @@ +import win32security, ntsecuritycon, winnt + +class Enum: + def __init__(self, *const_names): + """Accepts variable number of constant names that can be found in either + win32security, ntsecuritycon, or winnt.""" + for const_name in const_names: + try: + const_val=getattr(win32security,const_name) + except AttributeError: + try: + const_val=getattr(ntsecuritycon, const_name) + except AttributeError: + try: + const_val=getattr(winnt, const_name) + except AttributeError: + raise AttributeError('Constant "%s" not found in win32security, ntsecuritycon, or winnt.' %const_name) + setattr(self, const_name, const_val) + + def lookup_name(self, const_val): + """Looks up the name of a particular value.""" + for k,v in self.__dict__.items(): + if v==const_val: + return k + raise AttributeError('Value %s not found in enum' %const_val) + + def lookup_flags(self, flags): + """Returns the names of all recognized flags in input, and any flags not found in the enum.""" + flag_names=[] + unknown_flags=flags + for k,v in self.__dict__.items(): + if flags & v == v: + flag_names.append(k) + unknown_flags = unknown_flags & ~v + return flag_names, unknown_flags + +TOKEN_INFORMATION_CLASS = Enum( + 'TokenUser', + 'TokenGroups', + 'TokenPrivileges', + 'TokenOwner', + 'TokenPrimaryGroup', + 'TokenDefaultDacl', + 'TokenSource', + 'TokenType', + 'TokenImpersonationLevel', + 'TokenStatistics', + 'TokenRestrictedSids', + 'TokenSessionId', + 'TokenGroupsAndPrivileges', + 'TokenSessionReference', + 'TokenSandBoxInert', + 'TokenAuditPolicy', + 'TokenOrigin', + 'TokenElevationType', + 'TokenLinkedToken', + 'TokenElevation', + 'TokenHasRestrictions', + 'TokenAccessInformation', + 'TokenVirtualizationAllowed', + 'TokenVirtualizationEnabled', + 'TokenIntegrityLevel', + 'TokenUIAccess', + 'TokenMandatoryPolicy', + 'TokenLogonSid') + +TOKEN_TYPE = Enum( + 'TokenPrimary', + 'TokenImpersonation') + +TOKEN_ELEVATION_TYPE = Enum( + 'TokenElevationTypeDefault', + 'TokenElevationTypeFull', + 'TokenElevationTypeLimited') + +POLICY_AUDIT_EVENT_TYPE = Enum( + 'AuditCategorySystem', + 'AuditCategoryLogon', + 'AuditCategoryObjectAccess', + 'AuditCategoryPrivilegeUse', + 'AuditCategoryDetailedTracking', + 'AuditCategoryPolicyChange', + 'AuditCategoryAccountManagement', + 'AuditCategoryDirectoryServiceAccess', + 'AuditCategoryAccountLogon') + +POLICY_INFORMATION_CLASS = Enum( + 'PolicyAuditLogInformation', + 'PolicyAuditEventsInformation', + 'PolicyPrimaryDomainInformation', + 'PolicyPdAccountInformation', + 'PolicyAccountDomainInformation', + 'PolicyLsaServerRoleInformation', + 'PolicyReplicaSourceInformation', + 'PolicyDefaultQuotaInformation', + 'PolicyModificationInformation', + 'PolicyAuditFullSetInformation', + 'PolicyAuditFullQueryInformation', + 'PolicyDnsDomainInformation') + +POLICY_LSA_SERVER_ROLE = Enum( + 'PolicyServerRoleBackup', + 'PolicyServerRolePrimary') + +## access modes for opening a policy handle - this is not a real enum +POLICY_ACCESS_MODES = Enum( + 'POLICY_VIEW_LOCAL_INFORMATION', + 'POLICY_VIEW_AUDIT_INFORMATION', + 'POLICY_GET_PRIVATE_INFORMATION', + 'POLICY_TRUST_ADMIN', + 'POLICY_CREATE_ACCOUNT', + 'POLICY_CREATE_SECRET', + 'POLICY_CREATE_PRIVILEGE', + 'POLICY_SET_DEFAULT_QUOTA_LIMITS', + 'POLICY_SET_AUDIT_REQUIREMENTS', + 'POLICY_AUDIT_LOG_ADMIN', + 'POLICY_SERVER_ADMIN', + 'POLICY_LOOKUP_NAMES', + 'POLICY_NOTIFICATION', + 'POLICY_ALL_ACCESS', + 'POLICY_READ', + 'POLICY_WRITE', + 'POLICY_EXECUTE') + +## EventAuditingOptions flags - not a real enum +POLICY_AUDIT_EVENT_OPTIONS_FLAGS = Enum( + 'POLICY_AUDIT_EVENT_UNCHANGED', + 'POLICY_AUDIT_EVENT_SUCCESS', + 'POLICY_AUDIT_EVENT_FAILURE', + 'POLICY_AUDIT_EVENT_NONE') + +# AceType in ACE_HEADER - not a real enum +ACE_TYPE = Enum( + 'ACCESS_MIN_MS_ACE_TYPE', + 'ACCESS_ALLOWED_ACE_TYPE', + 'ACCESS_DENIED_ACE_TYPE', + 'SYSTEM_AUDIT_ACE_TYPE', + 'SYSTEM_ALARM_ACE_TYPE', + 'ACCESS_MAX_MS_V2_ACE_TYPE', + 'ACCESS_ALLOWED_COMPOUND_ACE_TYPE', + 'ACCESS_MAX_MS_V3_ACE_TYPE', + 'ACCESS_MIN_MS_OBJECT_ACE_TYPE', + 'ACCESS_ALLOWED_OBJECT_ACE_TYPE', + 'ACCESS_DENIED_OBJECT_ACE_TYPE', + 'SYSTEM_AUDIT_OBJECT_ACE_TYPE', + 'SYSTEM_ALARM_OBJECT_ACE_TYPE', + 'ACCESS_MAX_MS_OBJECT_ACE_TYPE', + 'ACCESS_MAX_MS_V4_ACE_TYPE', + 'ACCESS_MAX_MS_ACE_TYPE', + 'ACCESS_ALLOWED_CALLBACK_ACE_TYPE', + 'ACCESS_DENIED_CALLBACK_ACE_TYPE', + 'ACCESS_ALLOWED_CALLBACK_OBJECT_ACE_TYPE', + 'ACCESS_DENIED_CALLBACK_OBJECT_ACE_TYPE', + 'SYSTEM_AUDIT_CALLBACK_ACE_TYPE', + 'SYSTEM_ALARM_CALLBACK_ACE_TYPE', + 'SYSTEM_AUDIT_CALLBACK_OBJECT_ACE_TYPE', + 'SYSTEM_ALARM_CALLBACK_OBJECT_ACE_TYPE', + 'SYSTEM_MANDATORY_LABEL_ACE_TYPE', + 'ACCESS_MAX_MS_V5_ACE_TYPE') + +#bit flags for AceFlags - not a real enum +ACE_FLAGS = Enum( + 'CONTAINER_INHERIT_ACE', + 'FAILED_ACCESS_ACE_FLAG', + 'INHERIT_ONLY_ACE', + 'INHERITED_ACE', + 'NO_PROPAGATE_INHERIT_ACE', + 'OBJECT_INHERIT_ACE', + 'SUCCESSFUL_ACCESS_ACE_FLAG', + 'NO_INHERITANCE', + 'SUB_CONTAINERS_AND_OBJECTS_INHERIT', + 'SUB_CONTAINERS_ONLY_INHERIT', + 'SUB_OBJECTS_ONLY_INHERIT') + +# used in SetEntriesInAcl - very similar to ACE_TYPE +ACCESS_MODE = Enum( + 'NOT_USED_ACCESS', + 'GRANT_ACCESS', + 'SET_ACCESS', + 'DENY_ACCESS', + 'REVOKE_ACCESS', + 'SET_AUDIT_SUCCESS', + 'SET_AUDIT_FAILURE') + +# Bit flags in PSECURITY_DESCRIPTOR->Control - not a real enum +SECURITY_DESCRIPTOR_CONTROL_FLAGS = Enum( + 'SE_DACL_AUTO_INHERITED', ## win2k and up + 'SE_SACL_AUTO_INHERITED', ## win2k and up + 'SE_DACL_PROTECTED', ## win2k and up + 'SE_SACL_PROTECTED', ## win2k and up + 'SE_DACL_DEFAULTED', + 'SE_DACL_PRESENT', + 'SE_GROUP_DEFAULTED', + 'SE_OWNER_DEFAULTED', + 'SE_SACL_PRESENT', + 'SE_SELF_RELATIVE', + 'SE_SACL_DEFAULTED') + +# types of SID +SID_NAME_USE = Enum( + 'SidTypeUser', + 'SidTypeGroup', + 'SidTypeDomain', + 'SidTypeAlias', + 'SidTypeWellKnownGroup', + 'SidTypeDeletedAccount', + 'SidTypeInvalid', + 'SidTypeUnknown', + 'SidTypeComputer', + 'SidTypeLabel') + +## bit flags, not a real enum +TOKEN_ACCESS_PRIVILEGES = Enum( + 'TOKEN_ADJUST_DEFAULT', + 'TOKEN_ADJUST_GROUPS', + 'TOKEN_ADJUST_PRIVILEGES', + 'TOKEN_ALL_ACCESS', + 'TOKEN_ASSIGN_PRIMARY', + 'TOKEN_DUPLICATE', + 'TOKEN_EXECUTE', + 'TOKEN_IMPERSONATE', + 'TOKEN_QUERY', + 'TOKEN_QUERY_SOURCE', + 'TOKEN_READ', + 'TOKEN_WRITE') + +SECURITY_IMPERSONATION_LEVEL = Enum( + 'SecurityAnonymous', + 'SecurityIdentification', + 'SecurityImpersonation', + 'SecurityDelegation') + +POLICY_SERVER_ENABLE_STATE = Enum( + 'PolicyServerEnabled', + 'PolicyServerDisabled') + +POLICY_NOTIFICATION_INFORMATION_CLASS = Enum( + 'PolicyNotifyAuditEventsInformation', + 'PolicyNotifyAccountDomainInformation', + 'PolicyNotifyServerRoleInformation', + 'PolicyNotifyDnsDomainInformation', + 'PolicyNotifyDomainEfsInformation', + 'PolicyNotifyDomainKerberosTicketInformation', + 'PolicyNotifyMachineAccountPasswordInformation') + +TRUSTED_INFORMATION_CLASS = Enum( + 'TrustedDomainNameInformation', + 'TrustedControllersInformation', + 'TrustedPosixOffsetInformation', + 'TrustedPasswordInformation', + 'TrustedDomainInformationBasic', + 'TrustedDomainInformationEx', + 'TrustedDomainAuthInformation', + 'TrustedDomainFullInformation', + 'TrustedDomainAuthInformationInternal', + 'TrustedDomainFullInformationInternal', + 'TrustedDomainInformationEx2Internal', + 'TrustedDomainFullInformation2Internal') + +TRUSTEE_FORM = Enum( + 'TRUSTEE_IS_SID', + 'TRUSTEE_IS_NAME', + 'TRUSTEE_BAD_FORM', + 'TRUSTEE_IS_OBJECTS_AND_SID', + 'TRUSTEE_IS_OBJECTS_AND_NAME') + +TRUSTEE_TYPE = Enum( + 'TRUSTEE_IS_UNKNOWN', + 'TRUSTEE_IS_USER', + 'TRUSTEE_IS_GROUP', + 'TRUSTEE_IS_DOMAIN', + 'TRUSTEE_IS_ALIAS', + 'TRUSTEE_IS_WELL_KNOWN_GROUP', + 'TRUSTEE_IS_DELETED', + 'TRUSTEE_IS_INVALID', + 'TRUSTEE_IS_COMPUTER') + +## SE_OBJECT_TYPE - securable objects +SE_OBJECT_TYPE = Enum( + 'SE_UNKNOWN_OBJECT_TYPE', + 'SE_FILE_OBJECT', + 'SE_SERVICE', + 'SE_PRINTER', + 'SE_REGISTRY_KEY', + 'SE_LMSHARE', + 'SE_KERNEL_OBJECT', + 'SE_WINDOW_OBJECT', + 'SE_DS_OBJECT', + 'SE_DS_OBJECT_ALL', + 'SE_PROVIDER_DEFINED_OBJECT', + 'SE_WMIGUID_OBJECT', + 'SE_REGISTRY_WOW64_32KEY') + +PRIVILEGE_FLAGS = Enum( + 'SE_PRIVILEGE_ENABLED_BY_DEFAULT', + 'SE_PRIVILEGE_ENABLED', + 'SE_PRIVILEGE_USED_FOR_ACCESS') + +# Group flags used with TokenGroups +TOKEN_GROUP_ATTRIBUTES = Enum( + 'SE_GROUP_MANDATORY', + 'SE_GROUP_ENABLED_BY_DEFAULT', + 'SE_GROUP_ENABLED', + 'SE_GROUP_OWNER', + 'SE_GROUP_USE_FOR_DENY_ONLY', + 'SE_GROUP_INTEGRITY', + 'SE_GROUP_INTEGRITY_ENABLED', + 'SE_GROUP_LOGON_ID', + 'SE_GROUP_RESOURCE') + +# Privilege flags returned by TokenPrivileges +TOKEN_PRIVILEGE_ATTRIBUTES = Enum( + 'SE_PRIVILEGE_ENABLED_BY_DEFAULT', + 'SE_PRIVILEGE_ENABLED', + 'SE_PRIVILEGE_REMOVED', + 'SE_PRIVILEGE_USED_FOR_ACCESS') diff --git a/venv/Lib/site-packages/win32/Demos/security/set_file_audit.py b/venv/Lib/site-packages/win32/Demos/security/set_file_audit.py new file mode 100644 index 00000000..604db3bf --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/security/set_file_audit.py @@ -0,0 +1,58 @@ +import win32security,win32file,win32api,ntsecuritycon,win32con, os +from win32security import ACL_REVISION_DS, CONTAINER_INHERIT_ACE, OBJECT_INHERIT_ACE, \ + PROTECTED_DACL_SECURITY_INFORMATION, DACL_SECURITY_INFORMATION, SACL_SECURITY_INFORMATION, \ + OWNER_SECURITY_INFORMATION, GROUP_SECURITY_INFORMATION, SE_FILE_OBJECT + +## SE_SECURITY_NAME needed to access SACL, SE_RESTORE_NAME needed to change owner to someone other than yourself +new_privs = ((win32security.LookupPrivilegeValue('',ntsecuritycon.SE_SECURITY_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',ntsecuritycon.SE_RESTORE_NAME),win32con.SE_PRIVILEGE_ENABLED), + ) +ph = win32api.GetCurrentProcess() +th = win32security.OpenProcessToken(ph,win32security.TOKEN_ALL_ACCESS|win32con.TOKEN_ADJUST_PRIVILEGES) +modified_privs=win32security.AdjustTokenPrivileges(th,0,new_privs) + +## look up a few sids that should be available on most systems +my_sid = win32security.GetTokenInformation(th,ntsecuritycon.TokenUser)[0] +pwr_sid = win32security.LookupAccountName('','Power Users')[0] +admin_sid = win32security.LookupAccountName('','Administrators')[0] +everyone_sid=win32security.LookupAccountName('','EveryOne')[0] + +## create a dir and set security so Everyone has read permissions, and all files and subdirs inherit its ACLs +temp_dir=win32api.GetTempPath() +dir_name=win32api.GetTempFileName(temp_dir,'sfa')[0] +os.remove(dir_name) +os.mkdir(dir_name) +dir_dacl=win32security.ACL() +dir_dacl.AddAccessAllowedAceEx(ACL_REVISION_DS, CONTAINER_INHERIT_ACE|OBJECT_INHERIT_ACE, win32con.GENERIC_READ, everyone_sid) +## make sure current user has permissions on dir +dir_dacl.AddAccessAllowedAceEx(ACL_REVISION_DS, CONTAINER_INHERIT_ACE|OBJECT_INHERIT_ACE, win32con.GENERIC_ALL, my_sid) +## keep dir from inheriting any permissions so it only has ACEs explicitely set here +win32security.SetNamedSecurityInfo(dir_name, SE_FILE_OBJECT, + OWNER_SECURITY_INFORMATION|GROUP_SECURITY_INFORMATION|DACL_SECURITY_INFORMATION|PROTECTED_DACL_SECURITY_INFORMATION, + pwr_sid, pwr_sid, dir_dacl, None) + +## Create a file in the dir and add some specific permissions to it +fname=win32api.GetTempFileName(dir_name,'sfa')[0] +print(fname) +file_sd=win32security.GetNamedSecurityInfo(fname, SE_FILE_OBJECT, DACL_SECURITY_INFORMATION|SACL_SECURITY_INFORMATION) +file_dacl=file_sd.GetSecurityDescriptorDacl() +file_sacl=file_sd.GetSecurityDescriptorSacl() + +if file_dacl is None: + file_dacl=win32security.ACL() +if file_sacl is None: + file_sacl=win32security.ACL() + +file_dacl.AddAccessDeniedAce(file_dacl.GetAclRevision(),win32con.DELETE,admin_sid) +file_dacl.AddAccessDeniedAce(file_dacl.GetAclRevision(),win32con.DELETE,my_sid) +file_dacl.AddAccessAllowedAce(file_dacl.GetAclRevision(),win32con.GENERIC_ALL,pwr_sid) +file_sacl.AddAuditAccessAce(file_dacl.GetAclRevision(),win32con.GENERIC_ALL,my_sid,True,True) + +win32security.SetNamedSecurityInfo(fname, SE_FILE_OBJECT, + DACL_SECURITY_INFORMATION|SACL_SECURITY_INFORMATION, + None, None, file_dacl, file_sacl) + +win32security.AdjustTokenPrivileges(th, 0, modified_privs) + + + diff --git a/venv/Lib/site-packages/win32/Demos/security/set_file_owner.py b/venv/Lib/site-packages/win32/Demos/security/set_file_owner.py new file mode 100644 index 00000000..d913c1b4 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/security/set_file_owner.py @@ -0,0 +1,38 @@ +fname=r'h:\tmp.txt' + +import win32security,win32file,win32api,ntsecuritycon,win32con + +new_privs = ((win32security.LookupPrivilegeValue('',ntsecuritycon.SE_SECURITY_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',ntsecuritycon.SE_SHUTDOWN_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',ntsecuritycon.SE_TCB_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',ntsecuritycon.SE_RESTORE_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',ntsecuritycon.SE_TAKE_OWNERSHIP_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',ntsecuritycon.SE_CREATE_PERMANENT_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('','SeEnableDelegationPrivilege'),win32con.SE_PRIVILEGE_ENABLED) ##doesn't seem to be in ntsecuritycon.py ? + ) + +ph = win32api.GetCurrentProcess() +th = win32security.OpenProcessToken(ph,win32security.TOKEN_ALL_ACCESS|win32con.TOKEN_ADJUST_PRIVILEGES) +win32security.AdjustTokenPrivileges(th,0,new_privs) + +all_security_info = \ + win32security.OWNER_SECURITY_INFORMATION|win32security.GROUP_SECURITY_INFORMATION| \ + win32security.DACL_SECURITY_INFORMATION|win32security.SACL_SECURITY_INFORMATION + +sd=win32security.GetFileSecurity(fname,all_security_info) +old_dacl=sd.GetSecurityDescriptorDacl() +old_sacl=sd.GetSecurityDescriptorSacl() +old_group=sd.GetSecurityDescriptorGroup() + +new_sd=win32security.SECURITY_DESCRIPTOR() +print("relative, valid, size: ",new_sd.IsSelfRelative(), new_sd.IsValid(), new_sd.GetLength()) + +my_sid = win32security.GetTokenInformation(th,ntsecuritycon.TokenUser)[0] +tmp_sid = win32security.LookupAccountName('','tmp')[0] + +new_sd.SetSecurityDescriptorSacl(1,old_sacl,1) +new_sd.SetSecurityDescriptorDacl(1,old_dacl,1) +new_sd.SetSecurityDescriptorOwner(tmp_sid,0) +new_sd.SetSecurityDescriptorGroup(old_group,0) + +win32security.SetFileSecurity(fname,all_security_info,new_sd) diff --git a/venv/Lib/site-packages/win32/Demos/security/set_policy_info.py b/venv/Lib/site-packages/win32/Demos/security/set_policy_info.py new file mode 100644 index 00000000..3a778315 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/security/set_policy_info.py @@ -0,0 +1,17 @@ +import win32security,win32file,win32api,ntsecuritycon,win32con +policy_handle = win32security.GetPolicyHandle('rupole',win32security.POLICY_ALL_ACCESS) + +event_audit_info=win32security.LsaQueryInformationPolicy(policy_handle,win32security.PolicyAuditEventsInformation) +print(event_audit_info) + +new_audit_info=list(event_audit_info[1]) +new_audit_info[win32security.AuditCategoryPolicyChange]= \ + win32security.POLICY_AUDIT_EVENT_SUCCESS|win32security.POLICY_AUDIT_EVENT_FAILURE +new_audit_info[win32security.AuditCategoryAccountLogon]= \ + win32security.POLICY_AUDIT_EVENT_SUCCESS|win32security.POLICY_AUDIT_EVENT_FAILURE +new_audit_info[win32security.AuditCategoryLogon]= \ + win32security.POLICY_AUDIT_EVENT_SUCCESS|win32security.POLICY_AUDIT_EVENT_FAILURE + +win32security.LsaSetInformationPolicy(policy_handle, win32security.PolicyAuditEventsInformation, (1,new_audit_info)) + +win32security.LsaClose(policy_handle) diff --git a/venv/Lib/site-packages/win32/Demos/security/setkernelobjectsecurity.py b/venv/Lib/site-packages/win32/Demos/security/setkernelobjectsecurity.py new file mode 100644 index 00000000..9cfa01a4 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/security/setkernelobjectsecurity.py @@ -0,0 +1,67 @@ +import win32security,win32api,win32con, win32process +## You need SE_RESTORE_NAME to be able to set the owner of a security descriptor to anybody +## other than yourself or your primary group. Most admin logins don't have it by default, so +## enabling it may fail +new_privs = ((win32security.LookupPrivilegeValue('',win32security.SE_SECURITY_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_TCB_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_SHUTDOWN_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_RESTORE_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_TAKE_OWNERSHIP_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_CREATE_PERMANENT_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_ENABLE_DELEGATION_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_CHANGE_NOTIFY_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_DEBUG_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_PROF_SINGLE_PROCESS_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_SYSTEM_PROFILE_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_LOCK_MEMORY_NAME),win32con.SE_PRIVILEGE_ENABLED) + ) + +all_info=win32security.OWNER_SECURITY_INFORMATION|win32security.GROUP_SECURITY_INFORMATION| \ + win32security.DACL_SECURITY_INFORMATION|win32security.SACL_SECURITY_INFORMATION + +pid=win32api.GetCurrentProcessId() +ph=win32api.OpenProcess(win32con.PROCESS_ALL_ACCESS,0,pid) +## PROCESS_ALL_ACCESS does not contain ACCESS_SYSTEM_SECURITY (neccessy to do SACLs) +th = win32security.OpenProcessToken(ph,win32security.TOKEN_ALL_ACCESS) ##win32con.TOKEN_ADJUST_PRIVILEGES) +old_privs=win32security.AdjustTokenPrivileges(th,0,new_privs) +my_sid = win32security.GetTokenInformation(th,win32security.TokenUser)[0] +pwr_sid=win32security.LookupAccountName('','Power Users')[0] +## reopen process with ACCESS_SYSTEM_SECURITY now that sufficent privs are enabled +ph=win32api.OpenProcess(win32con.PROCESS_ALL_ACCESS|win32con.ACCESS_SYSTEM_SECURITY,0,pid) + +sd=win32security.GetKernelObjectSecurity(ph,all_info) +dacl=sd.GetSecurityDescriptorDacl() +if dacl is None: + dacl=win32security.ACL() +sacl=sd.GetSecurityDescriptorSacl() +if sacl is None: + sacl=win32security.ACL() + +dacl_ace_cnt=dacl.GetAceCount() +sacl_ace_cnt=sacl.GetAceCount() + +dacl.AddAccessAllowedAce(dacl.GetAclRevision(),win32con.ACCESS_SYSTEM_SECURITY|win32con.WRITE_DAC,my_sid) +sacl.AddAuditAccessAce(sacl.GetAclRevision(),win32con.GENERIC_ALL,my_sid,1,1) +sd.SetSecurityDescriptorDacl(1,dacl,0) +sd.SetSecurityDescriptorSacl(1,sacl,0) +sd.SetSecurityDescriptorGroup(pwr_sid,0) +sd.SetSecurityDescriptorOwner(pwr_sid,0) + +win32security.SetKernelObjectSecurity(ph,all_info,sd) +new_sd=win32security.GetKernelObjectSecurity(ph,all_info) + +if new_sd.GetSecurityDescriptorDacl().GetAceCount()!=dacl_ace_cnt+1: + print('New dacl doesn''t contain extra ace ????') +if new_sd.GetSecurityDescriptorSacl().GetAceCount()!=sacl_ace_cnt+1: + print('New Sacl doesn''t contain extra ace ????') +if win32security.LookupAccountSid('',new_sd.GetSecurityDescriptorOwner())[0]!='Power Users': + print('Owner not successfully set to Power Users !!!!!') +if win32security.LookupAccountSid('',new_sd.GetSecurityDescriptorGroup())[0]!='Power Users': + print('Group not successfully set to Power Users !!!!!') + +sd.SetSecurityDescriptorSacl(0,None,0) +win32security.SetKernelObjectSecurity(ph, win32security.SACL_SECURITY_INFORMATION, sd) +new_sd_1=win32security.GetKernelObjectSecurity(ph,win32security.SACL_SECURITY_INFORMATION) +if new_sd_1.GetSecurityDescriptorSacl() is not None: + print('Unable to set Sacl to NULL !!!!!!!!') + diff --git a/venv/Lib/site-packages/win32/Demos/security/setnamedsecurityinfo.py b/venv/Lib/site-packages/win32/Demos/security/setnamedsecurityinfo.py new file mode 100644 index 00000000..333eceaa --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/security/setnamedsecurityinfo.py @@ -0,0 +1,60 @@ +import win32security,win32api,win32con, win32process +fname, tmp = win32api.GetTempFileName(win32api.GetTempPath(),'tmp') +print(fname) +## You need SE_RESTORE_NAME to be able to set the owner of a security descriptor to anybody +## other than yourself or your primary group. Most admin logins don't have it by default, so +## enabling it may fail +new_privs = ((win32security.LookupPrivilegeValue('',win32security.SE_SECURITY_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_TCB_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_SHUTDOWN_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_RESTORE_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_TAKE_OWNERSHIP_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_CREATE_PERMANENT_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_ENABLE_DELEGATION_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_CHANGE_NOTIFY_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_DEBUG_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_PROF_SINGLE_PROCESS_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_SYSTEM_PROFILE_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_LOCK_MEMORY_NAME),win32con.SE_PRIVILEGE_ENABLED) + ) + +all_info=win32security.OWNER_SECURITY_INFORMATION|win32security.GROUP_SECURITY_INFORMATION| \ + win32security.DACL_SECURITY_INFORMATION|win32security.SACL_SECURITY_INFORMATION + +ph=win32process.GetCurrentProcess() +th = win32security.OpenProcessToken(ph,win32security.TOKEN_ALL_ACCESS) ##win32con.TOKEN_ADJUST_PRIVILEGES) +win32security.AdjustTokenPrivileges(th,0,new_privs) +my_sid = win32security.GetTokenInformation(th,win32security.TokenUser)[0] +pwr_sid=win32security.LookupAccountName('','Power Users')[0] + +sd=win32security.GetNamedSecurityInfo(fname,win32security.SE_FILE_OBJECT,all_info) +dacl=sd.GetSecurityDescriptorDacl() +if dacl is None: + dacl=win32security.ACL() +sacl=sd.GetSecurityDescriptorSacl() +if sacl is None: + sacl=win32security.ACL() + +dacl_ace_cnt=dacl.GetAceCount() +sacl_ace_cnt=sacl.GetAceCount() + +dacl.AddAccessAllowedAce(dacl.GetAclRevision(),win32con.ACCESS_SYSTEM_SECURITY|win32con.WRITE_DAC,my_sid) +sacl.AddAuditAccessAce(sacl.GetAclRevision(),win32con.GENERIC_ALL,my_sid,1,1) + +win32security.SetNamedSecurityInfo(fname,win32security.SE_FILE_OBJECT,all_info,pwr_sid, pwr_sid, dacl, sacl) +new_sd=win32security.GetNamedSecurityInfo(fname,win32security.SE_FILE_OBJECT,all_info) + +## could do additional checking to make sure added ACE contains expected info +if new_sd.GetSecurityDescriptorDacl().GetAceCount()!=dacl_ace_cnt+1: + print('New dacl doesn''t contain extra ace ????') +if new_sd.GetSecurityDescriptorSacl().GetAceCount()!=sacl_ace_cnt+1: + print('New Sacl doesn''t contain extra ace ????') +if win32security.LookupAccountSid('',new_sd.GetSecurityDescriptorOwner())[0]!='Power Users': + print('Owner not successfully set to Power Users !!!!!') +if win32security.LookupAccountSid('',new_sd.GetSecurityDescriptorGroup())[0]!='Power Users': + print('Group not successfully set to Power Users !!!!!') + +win32security.SetNamedSecurityInfo(fname,win32security.SE_FILE_OBJECT,win32security.SACL_SECURITY_INFORMATION, None, None, None, None) +new_sd_1=win32security.GetNamedSecurityInfo(fname,win32security.SE_FILE_OBJECT,win32security.SACL_SECURITY_INFORMATION) +if new_sd_1.GetSecurityDescriptorSacl() is not None: + print('Unable to set Sacl to NULL !!!!!!!!') diff --git a/venv/Lib/site-packages/win32/Demos/security/setsecurityinfo.py b/venv/Lib/site-packages/win32/Demos/security/setsecurityinfo.py new file mode 100644 index 00000000..02b00280 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/security/setsecurityinfo.py @@ -0,0 +1,61 @@ +import win32security,win32api,win32con, win32process +## You need SE_RESTORE_NAME to be able to set the owner of a security descriptor to anybody +## other than yourself or your primary group. Most admin logins don't have it by default, so +## enabling it may fail +new_privs = ((win32security.LookupPrivilegeValue('',win32security.SE_SECURITY_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_TCB_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_SHUTDOWN_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_RESTORE_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_TAKE_OWNERSHIP_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_CREATE_PERMANENT_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_ENABLE_DELEGATION_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_CHANGE_NOTIFY_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_DEBUG_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_PROF_SINGLE_PROCESS_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_SYSTEM_PROFILE_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_LOCK_MEMORY_NAME),win32con.SE_PRIVILEGE_ENABLED) + ) + +all_info=win32security.OWNER_SECURITY_INFORMATION|win32security.GROUP_SECURITY_INFORMATION| \ + win32security.DACL_SECURITY_INFORMATION|win32security.SACL_SECURITY_INFORMATION + +pid=win32api.GetCurrentProcessId() +ph=win32api.OpenProcess(win32con.PROCESS_ALL_ACCESS,0,pid) +## PROCESS_ALL_ACCESS does not contain ACCESS_SYSTEM_SECURITY (neccessy to do SACLs) +th = win32security.OpenProcessToken(ph,win32security.TOKEN_ALL_ACCESS) ##win32con.TOKEN_ADJUST_PRIVILEGES) +old_privs=win32security.AdjustTokenPrivileges(th,0,new_privs) +my_sid = win32security.GetTokenInformation(th,win32security.TokenUser)[0] +pwr_sid=win32security.LookupAccountName('','Power Users')[0] +## reopen process with ACCESS_SYSTEM_SECURITY now that sufficent privs are enabled +ph=win32api.OpenProcess(win32con.PROCESS_ALL_ACCESS|win32con.ACCESS_SYSTEM_SECURITY,0,pid) + +sd=win32security.GetSecurityInfo(ph,win32security.SE_KERNEL_OBJECT,all_info) +dacl=sd.GetSecurityDescriptorDacl() +if dacl is None: + dacl=win32security.ACL() +sacl=sd.GetSecurityDescriptorSacl() +if sacl is None: + sacl=win32security.ACL() + +dacl_ace_cnt=dacl.GetAceCount() +sacl_ace_cnt=sacl.GetAceCount() + +dacl.AddAccessAllowedAce(dacl.GetAclRevision(),win32con.ACCESS_SYSTEM_SECURITY|win32con.WRITE_DAC,my_sid) +sacl.AddAuditAccessAce(sacl.GetAclRevision(),win32con.GENERIC_ALL,my_sid,1,1) + +win32security.SetSecurityInfo(ph,win32security.SE_KERNEL_OBJECT,all_info,pwr_sid, pwr_sid, dacl, sacl) +new_sd=win32security.GetSecurityInfo(ph,win32security.SE_KERNEL_OBJECT,all_info) + +if new_sd.GetSecurityDescriptorDacl().GetAceCount()!=dacl_ace_cnt+1: + print('New dacl doesn''t contain extra ace ????') +if new_sd.GetSecurityDescriptorSacl().GetAceCount()!=sacl_ace_cnt+1: + print('New Sacl doesn''t contain extra ace ????') +if win32security.LookupAccountSid('',new_sd.GetSecurityDescriptorOwner())[0]!='Power Users': + print('Owner not successfully set to Power Users !!!!!') +if win32security.LookupAccountSid('',new_sd.GetSecurityDescriptorGroup())[0]!='Power Users': + print('Group not successfully set to Power Users !!!!!') + +win32security.SetSecurityInfo(ph,win32security.SE_KERNEL_OBJECT,win32security.SACL_SECURITY_INFORMATION, None, None, None, None) +new_sd_1=win32security.GetSecurityInfo(ph,win32security.SE_KERNEL_OBJECT,win32security.SACL_SECURITY_INFORMATION) +if new_sd_1.GetSecurityDescriptorSacl() is not None: + print('Unable to set Sacl to NULL !!!!!!!!') diff --git a/venv/Lib/site-packages/win32/Demos/security/setuserobjectsecurity.py b/venv/Lib/site-packages/win32/Demos/security/setuserobjectsecurity.py new file mode 100644 index 00000000..5535541d --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/security/setuserobjectsecurity.py @@ -0,0 +1,42 @@ +import win32security,win32api,win32con, win32process +new_privs = ((win32security.LookupPrivilegeValue('',win32security.SE_SECURITY_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_TCB_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_SHUTDOWN_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_RESTORE_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_TAKE_OWNERSHIP_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_CREATE_PERMANENT_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_ENABLE_DELEGATION_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_CHANGE_NOTIFY_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_DEBUG_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_PROF_SINGLE_PROCESS_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_SYSTEM_PROFILE_NAME),win32con.SE_PRIVILEGE_ENABLED), + (win32security.LookupPrivilegeValue('',win32security.SE_LOCK_MEMORY_NAME),win32con.SE_PRIVILEGE_ENABLED) + ) + +all_info=win32security.OWNER_SECURITY_INFORMATION|win32security.GROUP_SECURITY_INFORMATION| \ + win32security.DACL_SECURITY_INFORMATION|win32security.SACL_SECURITY_INFORMATION +info=win32security.OWNER_SECURITY_INFORMATION|win32security.GROUP_SECURITY_INFORMATION|win32security.DACL_SECURITY_INFORMATION + +ph=win32process.GetCurrentProcess() +th = win32security.OpenProcessToken(ph,win32security.TOKEN_ALL_ACCESS) ##win32con.TOKEN_ADJUST_PRIVILEGES) +win32security.AdjustTokenPrivileges(th,0,new_privs) +my_sid = win32security.GetTokenInformation(th,win32security.TokenUser)[0] +pwr_sid=win32security.LookupAccountName('','Power Users')[0] + +h=win32process.GetProcessWindowStation() +sd=win32security.GetUserObjectSecurity(h,info) +dacl=sd.GetSecurityDescriptorDacl() +ace_cnt=dacl.GetAceCount() + +dacl.AddAccessAllowedAce(dacl.GetAclRevision(),win32con.ACCESS_SYSTEM_SECURITY|win32con.WRITE_DAC,my_sid) +sd.SetSecurityDescriptorDacl(1,dacl,0) +sd.SetSecurityDescriptorGroup(pwr_sid,0) +sd.SetSecurityDescriptorOwner(pwr_sid,0) + +win32security.SetUserObjectSecurity(h,info,sd) +new_sd=win32security.GetUserObjectSecurity(h,info) +assert new_sd.GetSecurityDescriptorDacl().GetAceCount()==ace_cnt+1,'Did not add an ace to the Dacl !!!!!!' +assert win32security.LookupAccountSid('',new_sd.GetSecurityDescriptorOwner())[0]=='Power Users','Owner not successfully set to Power Users !!!!!' +assert win32security.LookupAccountSid('',new_sd.GetSecurityDescriptorGroup())[0]=='Power Users','Group not successfully set to Power Users !!!!!' + + diff --git a/venv/Lib/site-packages/win32/Demos/security/sspi/__pycache__/fetch_url.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/security/sspi/__pycache__/fetch_url.cpython-36.pyc new file mode 100644 index 00000000..a4b93549 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/security/sspi/__pycache__/fetch_url.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/security/sspi/__pycache__/simple_auth.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/security/sspi/__pycache__/simple_auth.cpython-36.pyc new file mode 100644 index 00000000..78264b51 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/security/sspi/__pycache__/simple_auth.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/security/sspi/__pycache__/socket_server.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/security/sspi/__pycache__/socket_server.cpython-36.pyc new file mode 100644 index 00000000..ca041975 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/security/sspi/__pycache__/socket_server.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/security/sspi/__pycache__/validate_password.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/security/sspi/__pycache__/validate_password.cpython-36.pyc new file mode 100644 index 00000000..7257be52 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/security/sspi/__pycache__/validate_password.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/security/sspi/fetch_url.py b/venv/Lib/site-packages/win32/Demos/security/sspi/fetch_url.py new file mode 100644 index 00000000..2dce13e6 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/security/sspi/fetch_url.py @@ -0,0 +1,146 @@ +""" +Fetches a URL from a web-server supporting NTLM authentication +eg, IIS. + +If no arguments are specified, a default of http://localhost/localstart.asp +is used. This script does follow simple 302 redirections, so pointing at the +root of an IIS server is should work. +""" + +import sys +import urllib.request, urllib.parse, urllib.error +import http.client +import urllib.parse +from base64 import encodestring, decodestring + +from sspi import ClientAuth + +import optparse # sorry, this demo needs 2.3+ + +options = None # set to optparse options object + +def open_url(host, url): + h = http.client.HTTPConnection(host) +# h.set_debuglevel(9) + h.putrequest('GET', url) + h.endheaders() + resp = h.getresponse() + print("Initial response is", resp.status, resp.reason) + body = resp.read() + if resp.status == 302: # object moved + url = "/" + resp.msg["location"] + resp.close() + h.putrequest('GET', url) + h.endheaders() + resp = h.getresponse() + print("After redirect response is", resp.status, resp.reason) + if options.show_headers: + print("Initial response headers:") + for name, val in list(resp.msg.items()): + print(" %s: %s" % (name, val)) + if options.show_body: + print(body) + if resp.status == 401: + # 401: Unauthorized - here is where the real work starts + auth_info = None + if options.user or options.domain or options.password: + auth_info = options.user, options.domain, options.password + ca = ClientAuth("NTLM", auth_info=auth_info) + auth_scheme = ca.pkg_info['Name'] + data = None + while 1: + err, out_buf = ca.authorize(data) + data = out_buf[0].Buffer + # Encode it as base64 as required by HTTP + auth = encodestring(data).replace("\012", "") + h.putrequest('GET', url) + h.putheader('Authorization', auth_scheme + ' ' + auth) + h.putheader('Content-Length', '0') + h.endheaders() + resp = h.getresponse() + if options.show_headers: + print("Token dance headers:") + for name, val in list(resp.msg.items()): + print(" %s: %s" % (name, val)) + + if err==0: + break + else: + if resp.status != 401: + print("Eeek - got response", resp.status) + cl = resp.msg.get("content-length") + if cl: + print(repr(resp.read(int(cl)))) + else: + print("no content!") + + assert resp.status == 401, resp.status + + assert not resp.will_close, "NTLM is per-connection - must not close" + schemes = [s.strip() for s in resp.msg.get("WWW-Authenticate", "").split(",")] + for scheme in schemes: + if scheme.startswith(auth_scheme): + data = decodestring(scheme[len(auth_scheme)+1:]) + break + else: + print("Could not find scheme '%s' in schemes %r" % (auth_scheme, schemes)) + break + + resp.read() + print("Final response status is", resp.status, resp.reason) + if resp.status == 200: + # Worked! + # Check we can read it again without re-authenticating. + if resp.will_close: + print("EEEK - response will close, but NTLM is per connection - it must stay open") + body = resp.read() + if options.show_body: + print("Final response body:") + print(body) + h.putrequest('GET', url) + h.endheaders() + resp = h.getresponse() + print("Second fetch response is", resp.status, resp.reason) + if options.show_headers: + print("Second response headers:") + for name, val in list(resp.msg.items()): + print(" %s: %s" % (name, val)) + + resp.read(int(resp.msg.get("content-length", 0))) + elif resp.status == 500: + print("Error text") + print(resp.read()) + else: + if options.show_body: + cl = resp.msg.get("content-length") + print(resp.read(int(cl))) + +if __name__=='__main__': + parser = optparse.OptionParser(description=__doc__) + + parser.add_option("", "--show-body", action="store_true", + help="print the body of each response as it is received") + + parser.add_option("", "--show-headers", action="store_true", + help="print the headers of each response as it is received") + + parser.add_option("", "--user", action="store", + help="The username to login with") + + parser.add_option("", "--password", action="store", + help="The password to login with") + + parser.add_option("", "--domain", action="store", + help="The domain to login to") + + options, args = parser.parse_args() + if not args: + print("Run with --help for usage details") + args = ["http://localhost/localstart.asp"] + for url in args: + scheme, netloc, path, params, query, fragment = urllib.parse.urlparse(url) + if (scheme != "http") or params or query or fragment: + parser.error("Scheme must be http, URL must be simple") + + print("Opening '%s' from '%s'" % (path, netloc)) + r = open_url(netloc, path) diff --git a/venv/Lib/site-packages/win32/Demos/security/sspi/simple_auth.py b/venv/Lib/site-packages/win32/Demos/security/sspi/simple_auth.py new file mode 100644 index 00000000..65b91fab --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/security/sspi/simple_auth.py @@ -0,0 +1,71 @@ +# A demo of basic SSPI authentication. +# There is a 'client' context and a 'server' context - typically these will +# be on different machines (here they are in the same process, but the same +# concepts apply) +import sspi +import win32security, sspicon, win32api + +def lookup_ret_code(err): + for k,v in list(sspicon.__dict__.items()): + if k[0:6] in ('SEC_I_','SEC_E_') and v==err: + return k + + +""" +pkg_name='Kerberos' +sspiclient=SSPIClient(pkg_name, win32api.GetUserName(), ## target spn is ourself + None, None, ## use none for client name and authentication information for current context + ## u'username', (u'username',u'domain.com',u'passwd'), + sspicon.ISC_REQ_INTEGRITY|sspicon.ISC_REQ_SEQUENCE_DETECT|sspicon.ISC_REQ_REPLAY_DETECT| \ + sspicon.ISC_REQ_DELEGATE|sspicon.ISC_REQ_CONFIDENTIALITY|sspicon.ISC_REQ_USE_SESSION_KEY) +sspiserver=SSPIServer(pkg_name, None, + sspicon.ASC_REQ_INTEGRITY|sspicon.ASC_REQ_SEQUENCE_DETECT|sspicon.ASC_REQ_REPLAY_DETECT| \ + sspicon.ASC_REQ_DELEGATE|sspicon.ASC_REQ_CONFIDENTIALITY|sspicon.ASC_REQ_STREAM|sspicon.ASC_REQ_USE_SESSION_KEY) +""" + +pkg_name='NTLM' + +# Setup the 2 contexts. +sspiclient=sspi.ClientAuth(pkg_name) +sspiserver=sspi.ServerAuth(pkg_name) + +# Perform the authentication dance, each loop exchanging more information +# on the way to completing authentication. +sec_buffer=None +while 1: + err, sec_buffer = sspiclient.authorize(sec_buffer) + err, sec_buffer = sspiserver.authorize(sec_buffer) + if err==0: + break + +# The server can now impersonate the client. In this demo the 2 users will +# always be the same. +sspiserver.ctxt.ImpersonateSecurityContext() +print('Impersonated user: ',win32api.GetUserNameEx(win32api.NameSamCompatible)) +sspiserver.ctxt.RevertSecurityContext() +print('Reverted to self: ',win32api.GetUserName()) + +pkg_size_info=sspiclient.ctxt.QueryContextAttributes(sspicon.SECPKG_ATTR_SIZES) +# Now sign some data +msg='some data to be encrypted ......' + +sigsize=pkg_size_info['MaxSignature'] +sigbuf=win32security.PySecBufferDescType() +sigbuf.append(win32security.PySecBufferType(len(msg), sspicon.SECBUFFER_DATA)) +sigbuf.append(win32security.PySecBufferType(sigsize, sspicon.SECBUFFER_TOKEN)) +sigbuf[0].Buffer=msg +sspiclient.ctxt.MakeSignature(0,sigbuf,1) +sspiserver.ctxt.VerifySignature(sigbuf,1) + +# And finally encrypt some. +trailersize=pkg_size_info['SecurityTrailer'] +encbuf=win32security.PySecBufferDescType() +encbuf.append(win32security.PySecBufferType(len(msg), sspicon.SECBUFFER_DATA)) +encbuf.append(win32security.PySecBufferType(trailersize, sspicon.SECBUFFER_TOKEN)) +encbuf[0].Buffer=msg +sspiclient.ctxt.EncryptMessage(0,encbuf,1) +print('Encrypted data:',repr(encbuf[0].Buffer)) +sspiserver.ctxt.DecryptMessage(encbuf,1) +print('Unencrypted data:',encbuf[0].Buffer) + + diff --git a/venv/Lib/site-packages/win32/Demos/security/sspi/socket_server.py b/venv/Lib/site-packages/win32/Demos/security/sspi/socket_server.py new file mode 100644 index 00000000..a823fea8 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/security/sspi/socket_server.py @@ -0,0 +1,178 @@ +"""A sample socket server and client using SSPI authentication and encryption. + +You must run with either 'client' or 'server' as arguments. A server must be +running before a client can connect. + +To use with Kerberos you should include in the client options +--target-spn=username, where 'username' is the user under which the server is +being run. + +Running either the client or server as a different user can be informative. +A command-line such as the following may be useful: +`runas /user:{user} {fqp}\python.exe {fqp}\socket_server.py --wait client|server` + +{fqp} should specify the relevant fully-qualified path names. + +To use 'runas' with Kerberos, the client program will need to +specify --target-spn with the username under which the *server* is running. + +See the SSPI documentation for more details. +""" + + +import sys +import struct +import socketserver +import win32api +import http.client +import traceback + +import win32security +import sspi, sspicon + +import optparse # sorry, this demo needs 2.3+ + +options = None # set to optparse object. + +def GetUserName(): + try: + return win32api.GetUserName() + except win32api.error as details: + # Seeing 'access denied' errors here for non-local users (presumably + # without permission to login locally). Get the fully-qualified + # username, although a side-effect of these permission-denied errors + # is a lack of Python codecs - so printing the Unicode value fails. + # So just return the repr(), and avoid codecs completely. + return repr(win32api.GetUserNameEx(win32api.NameSamCompatible)) + +# Send a simple "message" over a socket - send the number of bytes first, +# then the string. Ditto for receive. +def _send_msg(s, m): + s.send(struct.pack("i", len(m))) + s.send(m) + +def _get_msg(s): + size_data = s.recv(struct.calcsize("i")) + if not size_data: + return None + cb = struct.unpack("i", size_data)[0] + return s.recv(cb) + +class SSPISocketServer(socketserver.TCPServer): + def __init__(self, *args, **kw): + socketserver.TCPServer.__init__(self, *args, **kw) + self.sa = sspi.ServerAuth(options.package) + + def verify_request(self, sock, ca): + # Do the sspi auth dance + self.sa.reset() + while 1: + data = _get_msg(sock) + if data is None: + return False + try: + err, sec_buffer = self.sa.authorize(data) + except sspi.error as details: + print("FAILED to authorize client:", details) + return False + + if err==0: + break + _send_msg(sock, sec_buffer[0].Buffer) + return True + + def process_request(self, request, client_address): + # An example using the connection once it is established. + print("The server is running as user", GetUserName()) + self.sa.ctxt.ImpersonateSecurityContext() + try: + print("Having conversation with client as user", GetUserName()) + while 1: + # we need to grab 2 bits of data - the encrypted data, and the + # 'key' + data = _get_msg(request) + key = _get_msg(request) + if data is None or key is None: + break + data = self.sa.decrypt(data, key) + print("Client sent:", repr(data)) + finally: + self.sa.ctxt.RevertSecurityContext() + self.close_request(request) + print("The server is back to user", GetUserName()) + +def serve(): + s = SSPISocketServer(("localhost", options.port), None) + print("Running test server...") + s.serve_forever() + +def sspi_client(): + c = http.client.HTTPConnection("localhost", options.port) + c.connect() + # Do the auth dance. + ca = sspi.ClientAuth(options.package, targetspn=options.target_spn) + data = None + while 1: + err, out_buf = ca.authorize(data) + _send_msg(c.sock, out_buf[0].Buffer) + if err==0: + break + data = _get_msg(c.sock) + print("Auth dance complete - sending a few encryted messages") + # Assume out data is sensitive - encrypt the message. + for data in "Hello from the client".split(): + blob, key = ca.encrypt(data) + _send_msg(c.sock, blob) + _send_msg(c.sock, key) + c.sock.close() + print("Client completed.") + +if __name__=='__main__': + parser = optparse.OptionParser("%prog [options] client|server", + description=__doc__) + + parser.add_option("", "--package", action="store", default="NTLM", + help="The SSPI package to use (eg, Kerberos) - default is NTLM") + + parser.add_option("", "--target-spn", action="store", + help="""The target security provider name to use. The + string contents are security-package specific. For + example, 'Kerberos' or 'Negotiate' require the server + principal name (SPN) (ie, the username) of the remote + process. For NTLM this must be blank.""") + + parser.add_option("", "--port", action="store", default="8181", + help="The port number to use (default=8181)") + + parser.add_option("", "--wait", action="store_true", + help="""Cause the program to wait for input just before + terminating. Useful when using via runas to see + any error messages before termination. + """) + + options, args = parser.parse_args() + try: + options.port = int(options.port) + except (ValueError, TypeError): + parser.error("--port must be an integer") + + try: + try: + if not args: + args = [''] + if args[0]=="client": + sspi_client() + elif args[0]=="server": + serve() + else: + parser.error("You must supply 'client' or 'server' - " \ + "use --help for details") + except KeyboardInterrupt: + pass + except SystemExit: + pass + except: + traceback.print_exc() + finally: + if options.wait: + input("Press enter to continue") diff --git a/venv/Lib/site-packages/win32/Demos/security/sspi/validate_password.py b/venv/Lib/site-packages/win32/Demos/security/sspi/validate_password.py new file mode 100644 index 00000000..5a967047 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/security/sspi/validate_password.py @@ -0,0 +1,38 @@ +# Demonstrates how to validate a password. +# See also MSKB article Q180548 +# +# To use with Kerberos you need to jump through the 'targetspn' hoops. + +import win32security +import sys +from sspi import ClientAuth, ServerAuth + +def validate(username, password, domain = ""): + auth_info = username, domain, password + ca = ClientAuth("NTLM", auth_info = auth_info) + sa = ServerAuth("NTLM") + + data = err = None + while err != 0: + err, data = ca.authorize(data) + err, data = sa.authorize(data) + # If we get here without exception, we worked! + +if __name__=='__main__': + if len(sys.argv) not in [2,3,4]: + print("Usage: %s username [password [domain]]" % (__file__,)) + sys.exit(1) + + # password and domain are optional! + password = None + if len(sys.argv)>=3: + password = sys.argv[2] + domain = "" + if len(sys.argv)>=4: + domain = sys.argv[3] + try: + validate(sys.argv[1], password, domain) + print("Validated OK") + except win32security.error as details: + hr, func, msg = details + print("Validation failed: %s (%d)" % (msg, hr)) diff --git a/venv/Lib/site-packages/win32/Demos/service/__pycache__/nativePipeTestService.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/service/__pycache__/nativePipeTestService.cpython-36.pyc new file mode 100644 index 00000000..366a56b2 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/service/__pycache__/nativePipeTestService.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/service/__pycache__/pipeTestService.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/service/__pycache__/pipeTestService.cpython-36.pyc new file mode 100644 index 00000000..db0587c4 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/service/__pycache__/pipeTestService.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/service/__pycache__/pipeTestServiceClient.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/service/__pycache__/pipeTestServiceClient.cpython-36.pyc new file mode 100644 index 00000000..7ae721f1 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/service/__pycache__/pipeTestServiceClient.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/service/__pycache__/serviceEvents.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/service/__pycache__/serviceEvents.cpython-36.pyc new file mode 100644 index 00000000..9b06d267 Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/service/__pycache__/serviceEvents.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/service/nativePipeTestService.py b/venv/Lib/site-packages/win32/Demos/service/nativePipeTestService.py new file mode 100644 index 00000000..ce41ab2e --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/service/nativePipeTestService.py @@ -0,0 +1,56 @@ +# This is an example of a service hosted by python.exe rather than +# pythonservice.exe. + +# Note that it is very rare that using python.exe is a better option +# than the default pythonservice.exe - the latter has better error handling +# so that if Python itself can't be initialized or there are very early +# import errors, you will get error details written to the event log. When +# using python.exe instead, you are forced to wait for the interpreter startup +# and imports to succeed before you are able to effectively setup your own +# error handling. + +# So in short, please make sure you *really* want to do this, otherwise just +# stick with the default. + +import sys +import os +import win32serviceutil +import servicemanager + +from pipeTestService import TestPipeService + +class NativeTestPipeService(TestPipeService): + _svc_name_ = "PyNativePipeTestService" + _svc_display_name_ = "Python Native Pipe Test Service" + _svc_description_ = "Tests Python.exe hosted services" + # tell win32serviceutil we have a custom executable and custom args + # so registration does the right thing. + _exe_name_ = sys.executable + _exe_args_ = '"' + os.path.abspath(sys.argv[0]) + '"' + +def main(): + if len(sys.argv)==1: + # service must be starting... + # for the sake of debugging etc, we use win32traceutil to see + # any unhandled exceptions and print statements. + import win32traceutil + print("service is starting...") + print("(execute this script with '--help' if that isn't what you want)") + + servicemanager.Initialize() + servicemanager.PrepareToHostSingle(NativeTestPipeService) + # Now ask the service manager to fire things up for us... + servicemanager.StartServiceCtrlDispatcher() + print("service done!") + else: + win32serviceutil.HandleCommandLine(NativeTestPipeService) + +if __name__=='__main__': + try: + main() + except (SystemExit, KeyboardInterrupt): + raise + except: + print("Something went bad!") + import traceback + traceback.print_exc() diff --git a/venv/Lib/site-packages/win32/Demos/service/pipeTestService.py b/venv/Lib/site-packages/win32/Demos/service/pipeTestService.py new file mode 100644 index 00000000..15066898 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/service/pipeTestService.py @@ -0,0 +1,163 @@ +# A Demo of services and named pipes. + +# A multi-threaded service that simply echos back its input. + +# * Install as a service using "pipeTestService.py install" +# * Use Control Panel to change the user name of the service +# to a real user name (ie, NOT the SystemAccount) +# * Start the service. +# * Run the "pipeTestServiceClient.py" program as the client pipe side. + +import win32serviceutil, win32service +import pywintypes, win32con, winerror +# Use "import *" to keep this looking as much as a "normal" service +# as possible. Real code shouldn't do this. +from win32event import * +from win32file import * +from win32pipe import * +from win32api import * +from ntsecuritycon import * + +# Old versions of the service framework would not let you import this +# module at the top-level. Now you can, and can check 'Debugging()' and +# 'RunningAsService()' to check your context. +import servicemanager + +import traceback +import _thread + +def ApplyIgnoreError(fn, args): + try: + return fn(*args) + except error: # Ignore win32api errors. + return None + +class TestPipeService(win32serviceutil.ServiceFramework): + _svc_name_ = "PyPipeTestService" + _svc_display_name_ = "Python Pipe Test Service" + _svc_description_ = "Tests Python service framework by receiving and echoing messages over a named pipe" + + def __init__(self, args): + win32serviceutil.ServiceFramework.__init__(self, args) + self.hWaitStop = CreateEvent(None, 0, 0, None) + self.overlapped = pywintypes.OVERLAPPED() + self.overlapped.hEvent = CreateEvent(None,0,0,None) + self.thread_handles = [] + + def CreatePipeSecurityObject(self): + # Create a security object giving World read/write access, + # but only "Owner" modify access. + sa = pywintypes.SECURITY_ATTRIBUTES() + sidEveryone = pywintypes.SID() + sidEveryone.Initialize(SECURITY_WORLD_SID_AUTHORITY,1) + sidEveryone.SetSubAuthority(0, SECURITY_WORLD_RID) + sidCreator = pywintypes.SID() + sidCreator.Initialize(SECURITY_CREATOR_SID_AUTHORITY,1) + sidCreator.SetSubAuthority(0, SECURITY_CREATOR_OWNER_RID) + + acl = pywintypes.ACL() + acl.AddAccessAllowedAce(FILE_GENERIC_READ|FILE_GENERIC_WRITE, sidEveryone) + acl.AddAccessAllowedAce(FILE_ALL_ACCESS, sidCreator) + + sa.SetSecurityDescriptorDacl(1, acl, 0) + return sa + + # The functions executed in their own thread to process a client request. + def DoProcessClient(self, pipeHandle, tid): + try: + try: + # Create a loop, reading large data. If we knew the data stream was + # was small, a simple ReadFile would do. + d = ''.encode('ascii') # ensure bytes on py2k and py3k... + hr = winerror.ERROR_MORE_DATA + while hr==winerror.ERROR_MORE_DATA: + hr, thisd = ReadFile(pipeHandle, 256) + d = d + thisd + print("Read", d) + ok = 1 + except error: + # Client disconnection - do nothing + ok = 0 + + # A secure service would handle (and ignore!) errors writing to the + # pipe, but for the sake of this demo we dont (if only to see what errors + # we can get when our clients break at strange times :-) + if ok: + msg = ("%s (on thread %d) sent me %s" % (GetNamedPipeHandleState(pipeHandle)[4],tid, d)).encode('ascii') + WriteFile(pipeHandle, msg) + finally: + ApplyIgnoreError( DisconnectNamedPipe, (pipeHandle,) ) + ApplyIgnoreError( CloseHandle, (pipeHandle,) ) + + def ProcessClient(self, pipeHandle): + try: + procHandle = GetCurrentProcess() + th = DuplicateHandle(procHandle, GetCurrentThread(), procHandle, 0, 0, win32con.DUPLICATE_SAME_ACCESS) + try: + self.thread_handles.append(th) + try: + return self.DoProcessClient(pipeHandle, th) + except: + traceback.print_exc() + finally: + self.thread_handles.remove(th) + except: + traceback.print_exc() + + def SvcStop(self): + self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING) + SetEvent(self.hWaitStop) + + def SvcDoRun(self): + # Write an event log record - in debug mode we will also + # see this message printed. + servicemanager.LogMsg( + servicemanager.EVENTLOG_INFORMATION_TYPE, + servicemanager.PYS_SERVICE_STARTED, + (self._svc_name_, '') + ) + + num_connections = 0 + while 1: + pipeHandle = CreateNamedPipe("\\\\.\\pipe\\PyPipeTest", + PIPE_ACCESS_DUPLEX| FILE_FLAG_OVERLAPPED, + PIPE_TYPE_MESSAGE | PIPE_READMODE_BYTE, + PIPE_UNLIMITED_INSTANCES, # max instances + 0, 0, 6000, + self.CreatePipeSecurityObject()) + try: + hr = ConnectNamedPipe(pipeHandle, self.overlapped) + except error as details: + print("Error connecting pipe!", details) + CloseHandle(pipeHandle) + break + if hr==winerror.ERROR_PIPE_CONNECTED: + # Client is already connected - signal event + SetEvent(self.overlapped.hEvent) + rc = WaitForMultipleObjects((self.hWaitStop, self.overlapped.hEvent), 0, INFINITE) + if rc==WAIT_OBJECT_0: + # Stop event + break + else: + # Pipe event - spawn thread to deal with it. + _thread.start_new_thread(self.ProcessClient, (pipeHandle,)) + num_connections = num_connections + 1 + + # Sleep to ensure that any new threads are in the list, and then + # wait for all current threads to finish. + # What is a better way? + Sleep(500) + while self.thread_handles: + self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING, 5000) + print("Waiting for %d threads to finish..." % (len(self.thread_handles))) + WaitForMultipleObjects(self.thread_handles, 1, 3000) + # Write another event log record. + servicemanager.LogMsg( + servicemanager.EVENTLOG_INFORMATION_TYPE, + servicemanager.PYS_SERVICE_STOPPED, + (self._svc_name_, " after processing %d connections" % (num_connections,)) + ) + + +if __name__=='__main__': + win32serviceutil.HandleCommandLine(TestPipeService) diff --git a/venv/Lib/site-packages/win32/Demos/service/pipeTestServiceClient.py b/venv/Lib/site-packages/win32/Demos/service/pipeTestServiceClient.py new file mode 100644 index 00000000..61710e9b --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/service/pipeTestServiceClient.py @@ -0,0 +1,121 @@ +# A Test Program for pipeTestService.py +# +# Install and start the Pipe Test service, then run this test +# either from the same machine, or from another using the "-s" param. +# +# Eg: pipeTestServiceClient.py -s server_name Hi There +# Should work. + +from win32pipe import * +from win32file import * +from win32event import * +import pywintypes +import win32api +import winerror +import sys, os, traceback + +verbose = 0 + +#def ReadFromPipe(pipeName): + # Could (Should?) use CallNamedPipe, but this technique allows variable size + # messages (whereas you must supply a buffer size for CallNamedPipe! +# hPipe = CreateFile(pipeName, GENERIC_WRITE, 0, None, OPEN_EXISTING, FILE_ATTRIBUTE_NORMAL, 0) +# more = 1 +# while more: +# hr = ReadFile(hPipe, 256) +# if hr==0: +# more = 0 +# except win32api.error (hr, fn, desc): +# if hr==winerror.ERROR_MORE_DATA: +# data = dat +# + +def CallPipe(fn, args): + ret = None + retryCount = 0 + while retryCount < 8: # Keep looping until user cancels. + retryCount = retryCount + 1 + try: + return fn(*args) + except win32api.error as exc: + if exc.winerror==winerror.ERROR_PIPE_BUSY: + win32api.Sleep(5000) + continue + else: + raise + + raise RuntimeError("Could not make a connection to the server") + +def testClient(server,msg): + if verbose: + print("Sending", msg) + data = CallPipe(CallNamedPipe, ("\\\\%s\\pipe\\PyPipeTest" % server, msg, 256, NMPWAIT_WAIT_FOREVER)) + if verbose: + print("Server sent back '%s'" % data) + print("Sent and received a message!") + +def testLargeMessage(server, size = 4096): + if verbose: + print("Sending message of size %d" % (size)) + msg = "*" * size + data = CallPipe(CallNamedPipe, ("\\\\%s\\pipe\\PyPipeTest" % server, msg, 512, NMPWAIT_WAIT_FOREVER)) + if len(data)-size: + print("Sizes are all wrong - send %d, got back %d" % (size, len(data))) + +def stressThread(server, numMessages, wait): + try: + try: + for i in range(numMessages): + r = CallPipe(CallNamedPipe, ("\\\\%s\\pipe\\PyPipeTest" % server, "#" * 512, 1024, NMPWAIT_WAIT_FOREVER)) + except: + traceback.print_exc() + print("Failed after %d messages" % i) + finally: + SetEvent(wait) + +def stressTestClient(server, numThreads, numMessages): + import _thread + thread_waits = [] + for t_num in range(numThreads): + # Note I could just wait on thread handles (after calling DuplicateHandle) + # See the service itself for an example of waiting for the clients... + wait = CreateEvent(None, 0, 0, None) + thread_waits.append(wait) + _thread.start_new_thread(stressThread, (server,numMessages, wait)) + # Wait for all threads to finish. + WaitForMultipleObjects(thread_waits, 1, INFINITE) + +def main(): + import sys, getopt + server = "." + thread_count = 0 + msg_count = 500 + try: + opts, args = getopt.getopt(sys.argv[1:], 's:t:m:vl') + for o,a in opts: + if o=='-s': + server = a + if o=='-m': + msg_count = int(a) + if o=='-t': + thread_count = int(a) + if o=='-v': + global verbose + verbose = 1 + if o=='-l': + testLargeMessage(server) + msg = " ".join(args).encode("mbcs") + except getopt.error as msg: + print(msg) + my_name = os.path.split(sys.argv[0])[1] + print("Usage: %s [-v] [-s server] [-t thread_count=0] [-m msg_count=500] msg ..." % my_name) + print(" -v = verbose") + print(" Specifying a value for -t will stress test using that many threads.") + return + testClient(server, msg) + if thread_count > 0: + print("Spawning %d threads each sending %d messages..." % (thread_count, msg_count)) + stressTestClient(server, thread_count, msg_count) + +if __name__=='__main__': + main() diff --git a/venv/Lib/site-packages/win32/Demos/service/serviceEvents.py b/venv/Lib/site-packages/win32/Demos/service/serviceEvents.py new file mode 100644 index 00000000..b6012ebe --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/service/serviceEvents.py @@ -0,0 +1,88 @@ +# A Demo of a service that takes advantage of the additional notifications +# available in later Windows versions. + +# Note that all output is written as event log entries - so you must install +# and start the service, then look at the event log for messages as events +# are generated. + +# Events are generated for USB device insertion and removal, power state +# changes and hardware profile events - so try putting your computer to +# sleep and waking it, inserting a memory stick, etc then check the event log + +import win32serviceutil, win32service +import win32event +import servicemanager + +# Most event notification support lives around win32gui +import win32gui, win32gui_struct, win32con +GUID_DEVINTERFACE_USB_DEVICE = "{A5DCBF10-6530-11D2-901F-00C04FB951ED}" + +class EventDemoService(win32serviceutil.ServiceFramework): + _svc_name_ = "PyServiceEventDemo" + _svc_display_name_ = "Python Service Event Demo" + _svc_description_ = "Demonstrates a Python service which takes advantage of the extra notifications" + + def __init__(self, args): + win32serviceutil.ServiceFramework.__init__(self, args) + self.hWaitStop = win32event.CreateEvent(None, 0, 0, None) + # register for a device notification - we pass our service handle + # instead of a window handle. + filter = win32gui_struct.PackDEV_BROADCAST_DEVICEINTERFACE( + GUID_DEVINTERFACE_USB_DEVICE) + self.hdn = win32gui.RegisterDeviceNotification(self.ssh, filter, + win32con.DEVICE_NOTIFY_SERVICE_HANDLE) + + # Override the base class so we can accept additional events. + def GetAcceptedControls(self): + # say we accept them all. + rc = win32serviceutil.ServiceFramework.GetAcceptedControls(self) + rc |= win32service.SERVICE_ACCEPT_PARAMCHANGE \ + | win32service.SERVICE_ACCEPT_NETBINDCHANGE \ + | win32service.SERVICE_CONTROL_DEVICEEVENT \ + | win32service.SERVICE_ACCEPT_HARDWAREPROFILECHANGE \ + | win32service.SERVICE_ACCEPT_POWEREVENT \ + | win32service.SERVICE_ACCEPT_SESSIONCHANGE + return rc + + # All extra events are sent via SvcOtherEx (SvcOther remains as a + # function taking only the first args for backwards compat) + def SvcOtherEx(self, control, event_type, data): + # This is only showing a few of the extra events - see the MSDN + # docs for "HandlerEx callback" for more info. + if control == win32service.SERVICE_CONTROL_DEVICEEVENT: + info = win32gui_struct.UnpackDEV_BROADCAST(data) + msg = "A device event occurred: %x - %s" % (event_type, info) + elif control == win32service.SERVICE_CONTROL_HARDWAREPROFILECHANGE: + msg = "A hardware profile changed: type=%s, data=%s" % (event_type, data) + elif control == win32service.SERVICE_CONTROL_POWEREVENT: + msg = "A power event: setting %s" % data + elif control == win32service.SERVICE_CONTROL_SESSIONCHANGE: + # data is a single elt tuple, but this could potentially grow + # in the future if the win32 struct does + msg = "Session event: type=%s, data=%s" % (event_type, data) + else: + msg = "Other event: code=%d, type=%s, data=%s" \ + % (control, event_type, data) + + servicemanager.LogMsg( + servicemanager.EVENTLOG_INFORMATION_TYPE, + 0xF000, # generic message + (msg, '') + ) + + def SvcStop(self): + self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING) + win32event.SetEvent(self.hWaitStop) + + def SvcDoRun(self): + # do nothing at all - just wait to be stopped + win32event.WaitForSingleObject(self.hWaitStop, win32event.INFINITE) + # Write a stop message. + servicemanager.LogMsg( + servicemanager.EVENTLOG_INFORMATION_TYPE, + servicemanager.PYS_SERVICE_STOPPED, + (self._svc_name_, '') + ) + +if __name__=='__main__': + win32serviceutil.HandleCommandLine(EventDemoService) diff --git a/venv/Lib/site-packages/win32/Demos/timer_demo.py b/venv/Lib/site-packages/win32/Demos/timer_demo.py new file mode 100644 index 00000000..58a9a81a --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/timer_demo.py @@ -0,0 +1,68 @@ +# -*- Mode: Python; tab-width: 4 -*- +# + +# This module, and the timer.pyd core timer support, were written by +# Sam Rushing (rushing@nightmare.com) + +import timer +import time + +# Timers are based on Windows messages. So we need +# to do the event-loop thing! +import win32event, win32gui + +# glork holds a simple counter for us. + +class glork: + + def __init__ (self, delay=1000, max=10): + self.x = 0 + self.max = max + self.id = timer.set_timer (delay, self.increment) + # Could use the threading module, but this is + # a win32 extension test after all! :-) + self.event = win32event.CreateEvent(None, 0, 0, None) + + def increment (self, id, time): + print('x = %d' % self.x) + self.x = self.x + 1 + # if we've reached the max count, + # kill off the timer. + if self.x > self.max: + # we could have used 'self.id' here, too + timer.kill_timer (id) + win32event.SetEvent(self.event) + +# create a counter that will count from '1' thru '10', incrementing +# once a second, and then stop. + +def demo (delay=1000, stop=10): + g = glork(delay, stop) + # Timers are message based - so we need + # To run a message loop while waiting for our timers + # to expire. + start_time = time.time() + while 1: + # We can't simply give a timeout of 30 seconds, as + # we may continouusly be recieving other input messages, + # and therefore never expire. + rc = win32event.MsgWaitForMultipleObjects( + (g.event,), # list of objects + 0, # wait all + 500, # timeout + win32event.QS_ALLEVENTS, # type of input + ) + if rc == win32event.WAIT_OBJECT_0: + # Event signalled. + break + elif rc == win32event.WAIT_OBJECT_0+1: + # Message waiting. + if win32gui.PumpWaitingMessages(): + raise RuntimeError("We got an unexpected WM_QUIT message!") + else: + # This wait timed-out. + if time.time()-start_time > 30: + raise RuntimeError("We timed out waiting for the timers to expire!") + +if __name__=='__main__': + demo() diff --git a/venv/Lib/site-packages/win32/Demos/win32clipboardDemo.py b/venv/Lib/site-packages/win32/Demos/win32clipboardDemo.py new file mode 100644 index 00000000..c57b9605 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/win32clipboardDemo.py @@ -0,0 +1,134 @@ +# win32clipboardDemo.py +# +# Demo/test of the win32clipboard module. +from win32clipboard import * +from pywin32_testutil import str2bytes # py3k-friendly helper +import win32con +import types + +if not __debug__: + print("WARNING: The test code in this module uses assert") + print("This instance of Python has asserts disabled, so many tests will be skipped") + +cf_names = {} +# Build map of CF_* constants to names. +for name, val in list(win32con.__dict__.items()): + if name[:3]=="CF_" and name != "CF_SCREENFONTS": # CF_SCREEN_FONTS==CF_TEXT!?!? + cf_names[val] = name + +def TestEmptyClipboard(): + OpenClipboard() + try: + EmptyClipboard() + assert EnumClipboardFormats(0)==0, "Clipboard formats were available after emptying it!" + finally: + CloseClipboard() + +def TestText(): + OpenClipboard() + try: + text = "Hello from Python" + text_bytes = str2bytes(text) + SetClipboardText(text) + got = GetClipboardData(win32con.CF_TEXT) + # CF_TEXT always gives us 'bytes' back . + assert got == text_bytes, "Didnt get the correct result back - '%r'." % (got,) + finally: + CloseClipboard() + + OpenClipboard() + try: + # CF_UNICODE text always gives unicode objects back. + got = GetClipboardData(win32con.CF_UNICODETEXT) + assert got == text, "Didnt get the correct result back - '%r'." % (got,) + assert type(got)==str, "Didnt get the correct result back - '%r'." % (got,) + + # CF_OEMTEXT is a bytes-based format. + got = GetClipboardData(win32con.CF_OEMTEXT) + assert got == text_bytes, "Didnt get the correct result back - '%r'." % (got,) + + # Unicode tests + EmptyClipboard() + text = "Hello from Python unicode" + text_bytes = str2bytes(text) + # Now set the Unicode value + SetClipboardData(win32con.CF_UNICODETEXT, text) + # Get it in Unicode. + got = GetClipboardData(win32con.CF_UNICODETEXT) + assert got == text, "Didnt get the correct result back - '%r'." % (got,) + assert type(got)==str, "Didnt get the correct result back - '%r'." % (got,) + + # Close and open the clipboard to ensure auto-conversions take place. + finally: + CloseClipboard() + + OpenClipboard() + try: + + # Make sure I can still get the text as bytes + got = GetClipboardData(win32con.CF_TEXT) + assert got == text_bytes, "Didnt get the correct result back - '%r'." % (got,) + # Make sure we get back the correct types. + got = GetClipboardData(win32con.CF_UNICODETEXT) + assert type(got)==str, "Didnt get the correct result back - '%r'." % (got,) + got = GetClipboardData(win32con.CF_OEMTEXT) + assert got == text_bytes, "Didnt get the correct result back - '%r'." % (got,) + print("Clipboard text tests worked correctly") + finally: + CloseClipboard() + +def TestClipboardEnum(): + OpenClipboard() + try: + # Enumerate over the clipboard types + enum = 0 + while 1: + enum = EnumClipboardFormats(enum) + if enum==0: + break + assert IsClipboardFormatAvailable(enum), "Have format, but clipboard says it is not available!" + n = cf_names.get(enum,"") + if not n: + try: + n = GetClipboardFormatName(enum) + except error: + n = "unknown (%s)" % (enum,) + + print("Have format", n) + print("Clipboard enumerator tests worked correctly") + finally: + CloseClipboard() + +class Foo: + def __init__(self, **kw): + self.__dict__.update(kw) + def __cmp__(self, other): + return cmp(self.__dict__, other.__dict__) + def __eq__(self, other): + return self.__dict__==other.__dict__ + +def TestCustomFormat(): + OpenClipboard() + try: + # Just for the fun of it pickle Python objects through the clipboard + fmt = RegisterClipboardFormat("Python Pickle Format") + import pickle + pickled_object = Foo(a=1, b=2, Hi=3) + SetClipboardData(fmt, pickle.dumps( pickled_object ) ) + # Now read it back. + data = GetClipboardData(fmt) + loaded_object = pickle.loads(data) + assert pickle.loads(data) == pickled_object, "Didnt get the correct data!" + + print("Clipboard custom format tests worked correctly") + finally: + CloseClipboard() + + +if __name__=='__main__': + TestEmptyClipboard() + TestText() + TestCustomFormat() + TestClipboardEnum() + # And leave it empty at the end! + TestEmptyClipboard() diff --git a/venv/Lib/site-packages/win32/Demos/win32clipboard_bitmapdemo.py b/venv/Lib/site-packages/win32/Demos/win32clipboard_bitmapdemo.py new file mode 100644 index 00000000..d7eeb3d7 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/win32clipboard_bitmapdemo.py @@ -0,0 +1,85 @@ +import win32gui +import win32api +import win32clipboard +import win32con +import time + +class ViewerWindow: + def __init__(self): + self.hwndNextViewer = None + + def OnPaint(self, hwnd, msg, wp, lp): + dc, ps=win32gui.BeginPaint(hwnd) + wndrect = win32gui.GetClientRect(hwnd) + wndwidth = wndrect[2]-wndrect[0] + wndheight = wndrect[3]-wndrect[1] + win32clipboard.OpenClipboard() + try: + try: + hbitmap = win32clipboard.GetClipboardData(win32clipboard.CF_BITMAP) + except TypeError: + font=win32gui.LOGFONT() + font.lfHeight=15 #int(wndheight/20) + font.lfWidth=15 #font.lfHeight + # font.lfWeight=150 + hf=win32gui.CreateFontIndirect(font) + win32gui.SelectObject(dc,hf) + win32gui.SetBkMode(dc, win32con.TRANSPARENT) + win32gui.SetTextColor(dc,win32api.RGB(0,0,0)) + win32gui.DrawText(dc,'No bitmaps are in the clipboard\n(try pressing the PrtScn button)', -1, + (0,0, wndwidth, wndheight), + win32con.DT_CENTER) + else: + bminfo = win32gui.GetObject(hbitmap) + dcDC = win32gui.CreateCompatibleDC(None) + win32gui.SelectObject(dcDC, hbitmap) + win32gui.StretchBlt(dc, 0, 0, wndwidth, wndheight, dcDC, 0, 0, bminfo.bmWidth, bminfo.bmHeight, win32con.SRCCOPY) + win32gui.DeleteDC(dcDC) + win32gui.EndPaint(hwnd, ps) + finally: + win32clipboard.CloseClipboard() + return 0 + + def OnDrawClipboard(self, hwnd, msg, wp, lp): + win32gui.InvalidateRect(hwnd,None,True) + + def OnChangeCBChain(self, hwnd, msg, wp, lp): + # If the next window is closing, repair the chain. + if wp == self.hwndNextViewer: + self.hwndNextViewer = lp + # Otherwise, pass the message to the next link. + elif self.hwndNextViewer: + win32gui.SendMessage(self.hwndNextViewer, msg, wp, lp) + + def OnCreate(self, hwnd, msg, wp, lp): + self.hwndNextViewer = win32gui.SetClipboardViewer(hwnd); + + def OnClose(self, hwnd, msg, wp, lp): + win32clipboard.ChangeClipboardChain(hwnd, self.hwndNextViewer) + win32gui.DestroyWindow(hwnd) + win32gui.PostQuitMessage(0) + + def go(self): + wndproc={win32con.WM_PAINT: self.OnPaint, + win32con.WM_CLOSE: self.OnClose, + win32con.WM_CREATE: self.OnCreate, + win32con.WM_DRAWCLIPBOARD: self.OnDrawClipboard, + win32con.WM_CHANGECBCHAIN: self.OnChangeCBChain, + } + + wc = win32gui.WNDCLASS() + wc.lpszClassName = 'test_win32clipboard_bmp' + wc.style = win32con.CS_GLOBALCLASS|win32con.CS_VREDRAW | win32con.CS_HREDRAW + wc.hbrBackground = win32con.COLOR_WINDOW+1 + wc.lpfnWndProc=wndproc + class_atom=win32gui.RegisterClass(wc) + hwnd = win32gui.CreateWindowEx(0, class_atom,'ClipboardViewer', + win32con.WS_CAPTION|win32con.WS_VISIBLE|win32con.WS_THICKFRAME|win32con.WS_SYSMENU, + 100,100,900,900, 0, 0, 0, None) + win32clipboard.SetClipboardViewer(hwnd) + win32gui.PumpMessages() + win32gui.UnregisterClass(class_atom,None) + +if __name__=='__main__': + w = ViewerWindow() + w.go() diff --git a/venv/Lib/site-packages/win32/Demos/win32comport_demo.py b/venv/Lib/site-packages/win32/Demos/win32comport_demo.py new file mode 100644 index 00000000..2326d5ac --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/win32comport_demo.py @@ -0,0 +1,133 @@ +# This is a simple serial port terminal demo. +# +# Its primary purpose is to demonstrate the native serial port access offered via +# win32file. + +# It uses 3 threads: +# - The main thread, which cranks up the other 2 threads, then simply waits for them to exit. +# - The user-input thread - blocks waiting for a keyboard character, and when found sends it +# out the COM port. If the character is Ctrl+C, it stops, signalling the COM port thread to stop. +# - The COM port thread is simply listening for input on the COM port, and prints it to the screen. + +# This demo uses userlapped IO, so that none of the read or write operations actually block (however, +# in this sample, the very next thing we do _is_ block - so it shows off the concepts even though it +# doesnt exploit them. + +from win32file import * # The base COM port and file IO functions. +from win32event import * # We use events and the WaitFor[Multiple]Objects functions. +import win32con # constants. +import msvcrt # For the getch() function. + +import threading +import sys + +def FindModem(): + # Snoop over the comports, seeing if it is likely we have a modem. + for i in range(1,5): + port = "COM%d" % (i,) + try: + handle = CreateFile(port, + win32con.GENERIC_READ | win32con.GENERIC_WRITE, + 0, # exclusive access + None, # no security + win32con.OPEN_EXISTING, + win32con.FILE_ATTRIBUTE_NORMAL, + None) + # It appears that an available COM port will always success here, + # just return 0 for the status flags. We only care that it has _any_ status + # flags (and therefore probably a real modem) + if GetCommModemStatus(handle) != 0: + return port + except error: + pass # No port, or modem status failed. + return None + +# A basic synchronous COM port file-like object +class SerialTTY: + def __init__(self, port): + if type(port)==type(0): + port = "COM%d" % (port,) + self.handle = CreateFile(port, + win32con.GENERIC_READ | win32con.GENERIC_WRITE, + 0, # exclusive access + None, # no security + win32con.OPEN_EXISTING, + win32con.FILE_ATTRIBUTE_NORMAL | win32con.FILE_FLAG_OVERLAPPED, + None) + # Tell the port we want a notification on each char. + SetCommMask(self.handle, EV_RXCHAR) + # Setup a 4k buffer + SetupComm(self.handle, 4096, 4096) + # Remove anything that was there + PurgeComm(self.handle, PURGE_TXABORT | PURGE_RXABORT | PURGE_TXCLEAR | PURGE_RXCLEAR ) + # Setup for overlapped IO. + timeouts = 0xFFFFFFFF, 0, 1000, 0, 1000 + SetCommTimeouts(self.handle, timeouts) + # Setup the connection info. + dcb = GetCommState( self.handle ) + dcb.BaudRate = CBR_115200 + dcb.ByteSize = 8 + dcb.Parity = NOPARITY + dcb.StopBits = ONESTOPBIT + SetCommState(self.handle, dcb) + print("Connected to %s at %s baud" % (port, dcb.BaudRate)) + + def _UserInputReaderThread(self): + overlapped = OVERLAPPED() + overlapped.hEvent = CreateEvent(None, 1, 0, None) + try: + while 1: + ch = msvcrt.getch() + if ord(ch)==3: + break + WriteFile(self.handle, ch, overlapped) + # Wait for the write to complete. + WaitForSingleObject(overlapped.hEvent, INFINITE) + finally: + SetEvent(self.eventStop) + + def _ComPortThread(self): + overlapped = OVERLAPPED() + overlapped.hEvent = CreateEvent(None, 1, 0, None) + while 1: + # XXX - note we could _probably_ just use overlapped IO on the win32file.ReadFile() statement + # XXX but this tests the COM stuff! + rc, mask = WaitCommEvent(self.handle, overlapped) + if rc == 0: # Character already ready! + SetEvent(overlapped.hEvent) + rc = WaitForMultipleObjects([overlapped.hEvent, self.eventStop], 0, INFINITE) + if rc == WAIT_OBJECT_0: + # Some input - read and print it + flags, comstat = ClearCommError( self.handle ) + rc, data = ReadFile(self.handle, comstat.cbInQue, overlapped) + WaitForSingleObject(overlapped.hEvent, INFINITE) + sys.stdout.write(data) + else: + # Stop the thread! + # Just incase the user input thread uis still going, close it + sys.stdout.close() + break + + def Run(self): + self.eventStop = CreateEvent(None, 0, 0, None) + # Start the reader and writer threads. + user_thread = threading.Thread(target = self._UserInputReaderThread) + user_thread.start() + com_thread = threading.Thread(target = self._ComPortThread) + com_thread.start() + user_thread.join() + com_thread.join() + +if __name__=='__main__': + print("Serial port terminal demo - press Ctrl+C to exit") + if len(sys.argv)<=1: + port = FindModem() + if port is None: + print("No COM port specified, and no modem could be found") + print("Please re-run this script with the name of a COM port (eg COM3)") + sys.exit(1) + else: + port = sys.argv[1] + + tty = SerialTTY(port) + tty.Run() diff --git a/venv/Lib/site-packages/win32/Demos/win32console_demo.py b/venv/Lib/site-packages/win32/Demos/win32console_demo.py new file mode 100644 index 00000000..5bb50adc --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/win32console_demo.py @@ -0,0 +1,103 @@ +import win32console, win32con +import traceback, time + +virtual_keys={} +for k,v in list(win32con.__dict__.items()): + if k.startswith('VK_'): + virtual_keys[v]=k + +free_console=True +try: + win32console.AllocConsole() +except win32console.error as exc: + if exc.winerror!=5: + raise + ## only free console if one was created successfully + free_console=False + +stdout=win32console.GetStdHandle(win32console.STD_OUTPUT_HANDLE) +stdin=win32console.GetStdHandle(win32console.STD_INPUT_HANDLE) +newbuffer=win32console.CreateConsoleScreenBuffer() +newbuffer.SetConsoleActiveScreenBuffer() +newbuffer.SetConsoleTextAttribute(win32console.FOREGROUND_RED|win32console.FOREGROUND_INTENSITY + |win32console.BACKGROUND_GREEN|win32console.BACKGROUND_INTENSITY) +newbuffer.WriteConsole('This is a new screen buffer\n') + +## test setting screen buffer and window size +## screen buffer size cannot be smaller than window size +window_size=newbuffer.GetConsoleScreenBufferInfo()['Window'] +coord=win32console.PyCOORDType(X=window_size.Right+20, Y=window_size.Bottom+20) +newbuffer.SetConsoleScreenBufferSize(coord) + +window_size.Right+=10 +window_size.Bottom+=10 +newbuffer.SetConsoleWindowInfo(Absolute=True,ConsoleWindow=window_size) + +## write some records to the input queue +x=win32console.PyINPUT_RECORDType(win32console.KEY_EVENT) +x.Char='X' +x.KeyDown=True +x.RepeatCount=1 +x.VirtualKeyCode=0x58 +x.ControlKeyState=win32con.SHIFT_PRESSED + +z=win32console.PyINPUT_RECORDType(win32console.KEY_EVENT) +z.Char='Z' +z.KeyDown=True +z.RepeatCount=1 +z.VirtualKeyCode=0x5a +z.ControlKeyState=win32con.SHIFT_PRESSED + +stdin.WriteConsoleInput([x,z,x]) + +newbuffer.SetConsoleTextAttribute(win32console.FOREGROUND_RED|win32console.FOREGROUND_INTENSITY + |win32console.BACKGROUND_GREEN|win32console.BACKGROUND_INTENSITY) +newbuffer.WriteConsole('Press some keys, click some characters with the mouse\n') + +newbuffer.SetConsoleTextAttribute(win32console.FOREGROUND_BLUE|win32console.FOREGROUND_INTENSITY + |win32console.BACKGROUND_RED|win32console.BACKGROUND_INTENSITY) +newbuffer.WriteConsole('Hit "End" key to quit\n') + +breakout=False +while not breakout: + input_records=stdin.ReadConsoleInput(10) + for input_record in input_records: + if input_record.EventType==win32console.KEY_EVENT: + if input_record.KeyDown: + if input_record.Char=='\0': + newbuffer.WriteConsole(virtual_keys.get(input_record.VirtualKeyCode, 'VirtualKeyCode: %s' %input_record.VirtualKeyCode)) + else: + newbuffer.WriteConsole(input_record.Char) + if input_record.VirtualKeyCode==win32con.VK_END: + breakout=True + break + elif input_record.EventType==win32console.MOUSE_EVENT: + if input_record.EventFlags==0: ## 0 indicates a button event + if input_record.ButtonState!=0: ## exclude button releases + pos=input_record.MousePosition + # switch the foreground and background colors of the character that was clicked + attr=newbuffer.ReadConsoleOutputAttribute(Length=1, ReadCoord=pos)[0] + new_attr=attr + if attr&win32console.FOREGROUND_BLUE: + new_attr=(new_attr&~win32console.FOREGROUND_BLUE)|win32console.BACKGROUND_BLUE + if attr&win32console.FOREGROUND_RED: + new_attr=(new_attr&~win32console.FOREGROUND_RED)|win32console.BACKGROUND_RED + if attr&win32console.FOREGROUND_GREEN: + new_attr=(new_attr&~win32console.FOREGROUND_GREEN)|win32console.BACKGROUND_GREEN + + if attr&win32console.BACKGROUND_BLUE: + new_attr=(new_attr&~win32console.BACKGROUND_BLUE)|win32console.FOREGROUND_BLUE + if attr&win32console.BACKGROUND_RED: + new_attr=(new_attr&~win32console.BACKGROUND_RED)|win32console.FOREGROUND_RED + if attr&win32console.BACKGROUND_GREEN: + new_attr=(new_attr&~win32console.BACKGROUND_GREEN)|win32console.FOREGROUND_GREEN + newbuffer.WriteConsoleOutputAttribute((new_attr,),pos) + else: + newbuffer.WriteConsole(str(input_record)) + time.sleep(0.1) + +stdout.SetConsoleActiveScreenBuffer() +newbuffer.Close() +if free_console: + win32console.FreeConsole() + diff --git a/venv/Lib/site-packages/win32/Demos/win32cred_demo.py b/venv/Lib/site-packages/win32/Demos/win32cred_demo.py new file mode 100644 index 00000000..90e9c5f8 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/win32cred_demo.py @@ -0,0 +1,52 @@ +""" +Demonstrates prompting for credentials, saving, and loggging on with marshalled credential. +Also shows how to load user's profile +""" + +import win32net, win32security, win32api, win32con +import win32profile, win32cred + +## Prompt for a username/pwd for local computer +uiinfo={'MessageText':'Enter credentials for local machine','CaptionText':'win32cred_demo.py'} +target, pwd, save=win32cred.CredUIPromptForCredentials(TargetName=win32api.GetComputerName(), AuthError=0, + Flags=win32cred.CREDUI_FLAGS_DO_NOT_PERSIST|win32cred.CREDUI_FLAGS_SHOW_SAVE_CHECK_BOX, + Save=False, UiInfo=uiinfo) + +attrs=[ + {'Keyword':'attr1', 'Flags':0, 'Value':'unicode data'}, + {'Keyword':'attr2', 'Flags':0, 'Value':'character data'} + ] +cred={'Comment':'Created by win32cred_demo.py', 'UserName':target, 'TargetAlias': None, + 'TargetName':target,'CredentialBlob':pwd, 'Flags':win32cred.CRED_FLAGS_USERNAME_TARGET, + 'Persist':win32cred.CRED_PERSIST_ENTERPRISE,'Type':win32cred.CRED_TYPE_DOMAIN_PASSWORD, + 'Attributes':attrs} +win32cred.CredWrite(cred) +pwd=None +print(win32cred.CredRead(target, win32cred.CRED_TYPE_DOMAIN_PASSWORD)) + +## Marshal saved credential and use it to log on +mc=win32cred.CredMarshalCredential(win32cred.UsernameTargetCredential, target) +th=win32security.LogonUser(mc,None,'',win32con.LOGON32_LOGON_INTERACTIVE, win32con.LOGON32_PROVIDER_DEFAULT) +win32security.ImpersonateLoggedOnUser(th) +print('GetUserName:',win32api.GetUserName()) +win32security.RevertToSelf() + +## Load user's profile. (first check if user has a roaming profile) +username, domain=win32cred.CredUIParseUserName(target) +user_info_4=win32net.NetUserGetInfo(None, username, 4) +profilepath=user_info_4['profile'] +## LoadUserProfile apparently doesn't like an empty string +if not profilepath: + profilepath=None + +## leave Flags in since 2.3 still chokes on some types of optional keyword args +hk=win32profile.LoadUserProfile(th, {'UserName':username, 'Flags':0, 'ProfilePath':profilepath}) +## Get user's environment variables in a form that can be passed to win32process.CreateProcessAsUser +env=win32profile.CreateEnvironmentBlock(th,False) + + +## Cleanup should probably be in a finally block +win32profile.UnloadUserProfile(th, hk) +th.Close() + + diff --git a/venv/Lib/site-packages/win32/Demos/win32fileDemo.py b/venv/Lib/site-packages/win32/Demos/win32fileDemo.py new file mode 100644 index 00000000..f490f3e2 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/win32fileDemo.py @@ -0,0 +1,37 @@ +# This is a "demo" of win32file - it used to be more a test case than a +# demo, so has been moved to the test directory. + +# Please contribute your favourite simple little demo. +import win32file, win32api, win32con +import os + +# A very simple demo - note that this does no more than you can do with +# builtin Python file objects, so for something as simple as this, you +# generally *should* use builtin Python objects. Only use win32file etc +# when you need win32 specific features not available in Python. +def SimpleFileDemo(): + testName = os.path.join( win32api.GetTempPath(), "win32file_demo_test_file") + if os.path.exists(testName): os.unlink(testName) + # Open the file for writing. + handle = win32file.CreateFile(testName, + win32file.GENERIC_WRITE, + 0, + None, + win32con.CREATE_NEW, + 0, + None) + test_data = "Hello\0there".encode("ascii") + win32file.WriteFile(handle, test_data) + handle.Close() + # Open it for reading. + handle = win32file.CreateFile(testName, win32file.GENERIC_READ, 0, None, win32con.OPEN_EXISTING, 0, None) + rc, data = win32file.ReadFile(handle, 1024) + handle.Close() + if data == test_data: + print("Successfully wrote and read a file") + else: + raise Exception("Got different data back???") + os.unlink(testName) + +if __name__=='__main__': + SimpleFileDemo() diff --git a/venv/Lib/site-packages/win32/Demos/win32gui_demo.py b/venv/Lib/site-packages/win32/Demos/win32gui_demo.py new file mode 100644 index 00000000..bcc8d833 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/win32gui_demo.py @@ -0,0 +1,106 @@ +# The start of a win32gui generic demo. +# Feel free to contribute more demos back ;-) + +import win32gui, win32con, win32api +import time, math, random + +def _MyCallback( hwnd, extra ): + hwnds, classes = extra + hwnds.append(hwnd) + classes[win32gui.GetClassName(hwnd)] = 1 + +def TestEnumWindows(): + windows = [] + classes = {} + win32gui.EnumWindows(_MyCallback, (windows, classes)) + print("Enumerated a total of %d windows with %d classes" % (len(windows),len(classes))) + if "tooltips_class32" not in classes: + print("Hrmmmm - I'm very surprised to not find a 'tooltips_class32' class.") + + +def OnPaint_1(hwnd, msg, wp, lp): + dc, ps=win32gui.BeginPaint(hwnd) + win32gui.SetGraphicsMode(dc, win32con.GM_ADVANCED) + br=win32gui.CreateSolidBrush(win32api.RGB(255,0,0)) + win32gui.SelectObject(dc, br) + angle=win32gui.GetWindowLong(hwnd, win32con.GWL_USERDATA) + win32gui.SetWindowLong(hwnd, win32con.GWL_USERDATA, angle+2) + r_angle=angle*(math.pi/180) + win32gui.SetWorldTransform(dc, + {'M11':math.cos(r_angle), 'M12':math.sin(r_angle), 'M21':math.sin(r_angle)*-1, 'M22':math.cos(r_angle),'Dx':250,'Dy':250}) + win32gui.MoveToEx(dc,250,250) + win32gui.BeginPath(dc) + win32gui.Pie(dc, 10, 70, 200, 200, 350, 350, 75, 10) + win32gui.Chord(dc, 200, 200, 850, 0, 350, 350, 75, 10) + win32gui.LineTo(dc, 300,300) + win32gui.LineTo(dc, 100, 20) + win32gui.LineTo(dc, 20, 100) + win32gui.LineTo(dc, 400, 0) + win32gui.LineTo(dc, 0, 400) + win32gui.EndPath(dc) + win32gui.StrokeAndFillPath(dc) + win32gui.EndPaint(hwnd, ps) + return 0 +wndproc_1={win32con.WM_PAINT:OnPaint_1} + +def OnPaint_2(hwnd, msg, wp, lp): + dc, ps=win32gui.BeginPaint(hwnd) + win32gui.SetGraphicsMode(dc, win32con.GM_ADVANCED) + l,t,r,b=win32gui.GetClientRect(hwnd) + + for x in range(25): + vertices=( + {'x':int(random.random()*r), 'y':int(random.random()*b), 'Red':int(random.random()*0xff00), 'Green':0, 'Blue':0, 'Alpha':0}, + {'x':int(random.random()*r), 'y':int(random.random()*b), 'Red':0, 'Green':int(random.random()*0xff00), 'Blue':0, 'Alpha':0}, + {'x':int(random.random()*r), 'y':int(random.random()*b), 'Red':0, 'Green':0, 'Blue':int(random.random()*0xff00), 'Alpha':0}, + ) + mesh=((0,1,2),) + win32gui.GradientFill(dc,vertices, mesh, win32con.GRADIENT_FILL_TRIANGLE) + win32gui.EndPaint(hwnd, ps) + return 0 +wndproc_2={win32con.WM_PAINT:OnPaint_2} + +def TestSetWorldTransform(): + wc = win32gui.WNDCLASS() + wc.lpszClassName = 'test_win32gui_1' + wc.style = win32con.CS_GLOBALCLASS|win32con.CS_VREDRAW | win32con.CS_HREDRAW + wc.hbrBackground = win32con.COLOR_WINDOW+1 + wc.lpfnWndProc=wndproc_1 + class_atom=win32gui.RegisterClass(wc) + hwnd = win32gui.CreateWindow(wc.lpszClassName, + 'Spin the Lobster!', + win32con.WS_CAPTION|win32con.WS_VISIBLE, + 100,100,900,900, 0, 0, 0, None) + for x in range(500): + win32gui.InvalidateRect(hwnd,None,True) + win32gui.PumpWaitingMessages() + time.sleep(0.01) + win32gui.DestroyWindow(hwnd) + win32gui.UnregisterClass(wc.lpszClassName, None) + +def TestGradientFill(): + wc = win32gui.WNDCLASS() + wc.lpszClassName = 'test_win32gui_2' + wc.style = win32con.CS_GLOBALCLASS|win32con.CS_VREDRAW | win32con.CS_HREDRAW + wc.hbrBackground = win32con.COLOR_WINDOW+1 + wc.lpfnWndProc=wndproc_2 + class_atom=win32gui.RegisterClass(wc) + hwnd = win32gui.CreateWindowEx(0, class_atom,'Kaleidoscope', + win32con.WS_CAPTION|win32con.WS_VISIBLE|win32con.WS_THICKFRAME|win32con.WS_SYSMENU, + 100,100,900,900, 0, 0, 0, None) + s=win32gui.GetWindowLong(hwnd,win32con.GWL_EXSTYLE) + win32gui.SetWindowLong(hwnd, win32con.GWL_EXSTYLE, s|win32con.WS_EX_LAYERED) + win32gui.SetLayeredWindowAttributes(hwnd, 0, 175, win32con.LWA_ALPHA) + for x in range(30): + win32gui.InvalidateRect(hwnd,None,True) + win32gui.PumpWaitingMessages() + time.sleep(0.3) + win32gui.DestroyWindow(hwnd) + win32gui.UnregisterClass(class_atom,None) + +print("Enumerating all windows...") +TestEnumWindows() +print("Testing drawing functions ...") +TestSetWorldTransform() +TestGradientFill() +print("All tests done!") diff --git a/venv/Lib/site-packages/win32/Demos/win32gui_devicenotify.py b/venv/Lib/site-packages/win32/Demos/win32gui_devicenotify.py new file mode 100644 index 00000000..39bcdacb --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/win32gui_devicenotify.py @@ -0,0 +1,82 @@ +# Demo RegisterDeviceNotification etc. Creates a hidden window to receive +# notifications. See serviceEvents.py for an example of a service doing +# that. +import sys, time +import win32gui, win32con, win32api, win32file +import win32gui_struct, winnt + +# These device GUIDs are from Ioevent.h in the Windows SDK. Ideally they +# could be collected somewhere for pywin32... +GUID_DEVINTERFACE_USB_DEVICE = "{A5DCBF10-6530-11D2-901F-00C04FB951ED}" + +# WM_DEVICECHANGE message handler. +def OnDeviceChange(hwnd, msg, wp, lp): + # Unpack the 'lp' into the appropriate DEV_BROADCAST_* structure, + # using the self-identifying data inside the DEV_BROADCAST_HDR. + info = win32gui_struct.UnpackDEV_BROADCAST(lp) + print("Device change notification:", wp, str(info)) + if wp==win32con.DBT_DEVICEQUERYREMOVE and info.devicetype==win32con.DBT_DEVTYP_HANDLE: + # Our handle is stored away in the structure - just close it + print("Device being removed - closing handle") + win32file.CloseHandle(info.handle) + # and cancel our notifications - if it gets plugged back in we get + # the same notification and try and close the same handle... + win32gui.UnregisterDeviceNotification(info.hdevnotify) + return True + + +def TestDeviceNotifications(dir_names): + wc = win32gui.WNDCLASS() + wc.lpszClassName = 'test_devicenotify' + wc.style = win32con.CS_GLOBALCLASS|win32con.CS_VREDRAW | win32con.CS_HREDRAW + wc.hbrBackground = win32con.COLOR_WINDOW+1 + wc.lpfnWndProc={win32con.WM_DEVICECHANGE:OnDeviceChange} + class_atom=win32gui.RegisterClass(wc) + hwnd = win32gui.CreateWindow(wc.lpszClassName, + 'Testing some devices', + # no need for it to be visible. + win32con.WS_CAPTION, + 100,100,900,900, 0, 0, 0, None) + + hdevs = [] + # Watch for all USB device notifications + filter = win32gui_struct.PackDEV_BROADCAST_DEVICEINTERFACE( + GUID_DEVINTERFACE_USB_DEVICE) + hdev = win32gui.RegisterDeviceNotification(hwnd, filter, + win32con.DEVICE_NOTIFY_WINDOW_HANDLE) + hdevs.append(hdev) + # and create handles for all specified directories + for d in dir_names: + hdir = win32file.CreateFile(d, + winnt.FILE_LIST_DIRECTORY, + winnt.FILE_SHARE_READ | winnt.FILE_SHARE_WRITE | winnt.FILE_SHARE_DELETE, + None, # security attributes + win32con.OPEN_EXISTING, + win32con.FILE_FLAG_BACKUP_SEMANTICS | # required privileges: SE_BACKUP_NAME and SE_RESTORE_NAME. + win32con.FILE_FLAG_OVERLAPPED, + None) + + filter = win32gui_struct.PackDEV_BROADCAST_HANDLE(hdir) + hdev = win32gui.RegisterDeviceNotification(hwnd, filter, + win32con.DEVICE_NOTIFY_WINDOW_HANDLE) + hdevs.append(hdev) + + # now start a message pump and wait for messages to be delivered. + print("Watching", len(hdevs), "handles - press Ctrl+C to terminate, or") + print("add and remove some USB devices...") + if not dir_names: + print("(Note you can also pass paths to watch on the command-line - eg,") + print("pass the root of an inserted USB stick to see events specific to") + print("that volume)") + while 1: + win32gui.PumpWaitingMessages() + time.sleep(0.01) + win32gui.DestroyWindow(hwnd) + win32gui.UnregisterClass(wc.lpszClassName, None) + +if __name__=='__main__': + # optionally pass device/directory names to watch for notifications. + # Eg, plug in a USB device - assume it connects as E: - then execute: + # % win32gui_devicenotify.py E: + # Then remove and insert the device. + TestDeviceNotifications(sys.argv[1:]) diff --git a/venv/Lib/site-packages/win32/Demos/win32gui_dialog.py b/venv/Lib/site-packages/win32/Demos/win32gui_dialog.py new file mode 100644 index 00000000..3abd2272 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/win32gui_dialog.py @@ -0,0 +1,373 @@ +# A demo of a fairly complex dialog. +# +# Features: +# * Uses a "dynamic dialog resource" to build the dialog. +# * Uses a ListView control. +# * Dynamically resizes content. +# * Uses a second worker thread to fill the list. +# * Demostrates support for windows XP themes. + +# If you are on Windows XP, and specify a '--noxp' argument, you will see: +# * alpha-blend issues with icons +# * The buttons are "old" style, rather than based on the XP theme. +# Hence, using: +# import winxpgui as win32gui +# is recommened. +# Please report any problems. +import sys +if "--noxp" in sys.argv: + import win32gui +else: + import winxpgui as win32gui +import win32gui_struct +import win32api +import win32con, winerror +import struct, array +import commctrl +import queue +import os + +IDC_SEARCHTEXT = 1024 +IDC_BUTTON_SEARCH = 1025 +IDC_BUTTON_DISPLAY = 1026 +IDC_LISTBOX = 1027 + +WM_SEARCH_RESULT = win32con.WM_USER + 512 +WM_SEARCH_FINISHED = win32con.WM_USER + 513 + +class _WIN32MASKEDSTRUCT: + def __init__(self, **kw): + full_fmt = "" + for name, fmt, default, mask in self._struct_items_: + self.__dict__[name] = None + if fmt == "z": + full_fmt += "pi" + else: + full_fmt += fmt + for name, val in kw.items(): + if name not in self.__dict__: + raise ValueError("LVITEM structures do not have an item '%s'" % (name,)) + self.__dict__[name] = val + + def __setattr__(self, attr, val): + if not attr.startswith("_") and attr not in self.__dict__: + raise AttributeError(attr) + self.__dict__[attr] = val + + def toparam(self): + self._buffs = [] + full_fmt = "" + vals = [] + mask = 0 + # calc the mask + for name, fmt, default, this_mask in self._struct_items_: + if this_mask is not None and self.__dict__.get(name) is not None: + mask |= this_mask + self.mask = mask + for name, fmt, default, this_mask in self._struct_items_: + val = self.__dict__[name] + if fmt == "z": + fmt = "Pi" + if val is None: + vals.append(0) + vals.append(0) + else: + # Note this demo still works with byte strings. An + # alternate strategy would be to use unicode natively + # and use the 'W' version of the messages - eg, + # LVM_SETITEMW etc. + val = val + "\0" + if isinstance(val, str): + val = val.encode("mbcs") + str_buf = array.array("b", val) + vals.append(str_buf.buffer_info()[0]) + vals.append(len(val)) + self._buffs.append(str_buf) # keep alive during the call. + else: + if val is None: + val = default + vals.append(val) + full_fmt += fmt + return struct.pack(*(full_fmt,) + tuple(vals)) + + +# NOTE: See the win32gui_struct module for an alternative way of dealing +# with these structures +class LVITEM(_WIN32MASKEDSTRUCT): + _struct_items_ = [ + ("mask", "I", 0, None), + ("iItem", "i", 0, None), + ("iSubItem", "i", 0, None), + ("state", "I", 0, commctrl.LVIF_STATE), + ("stateMask", "I", 0, None), + ("text", "z", None, commctrl.LVIF_TEXT), + ("iImage", "i", 0, commctrl.LVIF_IMAGE), + ("lParam", "i", 0, commctrl.LVIF_PARAM), + ("iIdent", "i", 0, None), + ] + +class LVCOLUMN(_WIN32MASKEDSTRUCT): + _struct_items_ = [ + ("mask", "I", 0, None), + ("fmt", "i", 0, commctrl.LVCF_FMT), + ("cx", "i", 0, commctrl.LVCF_WIDTH), + ("text", "z", None, commctrl.LVCF_TEXT), + ("iSubItem", "i", 0, commctrl.LVCF_SUBITEM), + ("iImage", "i", 0, commctrl.LVCF_IMAGE), + ("iOrder", "i", 0, commctrl.LVCF_ORDER), + ] + +class DemoWindowBase: + def __init__(self): + win32gui.InitCommonControls() + self.hinst = win32gui.dllhandle + self.list_data = {} + + def _RegisterWndClass(self): + className = "PythonDocSearch" + message_map = {} + wc = win32gui.WNDCLASS() + wc.SetDialogProc() # Make it a dialog class. + wc.hInstance = self.hinst + wc.lpszClassName = className + wc.style = win32con.CS_VREDRAW | win32con.CS_HREDRAW + wc.hCursor = win32gui.LoadCursor( 0, win32con.IDC_ARROW ) + wc.hbrBackground = win32con.COLOR_WINDOW + 1 + wc.lpfnWndProc = message_map # could also specify a wndproc. + # C code: wc.cbWndExtra = DLGWINDOWEXTRA + sizeof(HBRUSH) + (sizeof(COLORREF)); + wc.cbWndExtra = win32con.DLGWINDOWEXTRA + struct.calcsize("Pi") + icon_flags = win32con.LR_LOADFROMFILE | win32con.LR_DEFAULTSIZE + + ## py.ico went away in python 2.5, load from executable instead + this_app=win32api.GetModuleHandle(None) + try: + wc.hIcon=win32gui.LoadIcon(this_app, 1) ## python.exe and pythonw.exe + except win32gui.error: + wc.hIcon=win32gui.LoadIcon(this_app, 135) ## pythonwin's icon + try: + classAtom = win32gui.RegisterClass(wc) + except win32gui.error as err_info: + if err_info.winerror!=winerror.ERROR_CLASS_ALREADY_EXISTS: + raise + return className + + def _GetDialogTemplate(self, dlgClassName): + style = win32con.WS_THICKFRAME | win32con.WS_POPUP | win32con.WS_VISIBLE | win32con.WS_CAPTION | win32con.WS_SYSMENU | win32con.DS_SETFONT | win32con.WS_MINIMIZEBOX + cs = win32con.WS_CHILD | win32con.WS_VISIBLE + title = "Dynamic Dialog Demo" + + # Window frame and title + dlg = [ [title, (0, 0, 210, 250), style, None, (8, "MS Sans Serif"), None, dlgClassName], ] + + # ID label and text box + dlg.append([130, "Enter something", -1, (5, 5, 200, 9), cs | win32con.SS_LEFT]) + s = cs | win32con.WS_TABSTOP | win32con.WS_BORDER + dlg.append(['EDIT', None, IDC_SEARCHTEXT, (5, 15, 200, 12), s]) + + # Search/Display Buttons + # (x positions don't matter here) + s = cs | win32con.WS_TABSTOP + dlg.append([128, "Fill List", IDC_BUTTON_SEARCH, (5, 35, 50, 14), s | win32con.BS_DEFPUSHBUTTON]) + s = win32con.BS_PUSHBUTTON | s + dlg.append([128, "Display", IDC_BUTTON_DISPLAY, (100, 35, 50, 14), s]) + + # List control. + # Can't make this work :( +## s = cs | win32con.WS_TABSTOP +## dlg.append(['SysListView32', "Title", IDC_LISTBOX, (5, 505, 200, 200), s]) + + return dlg + + def _DoCreate(self, fn): + message_map = { + win32con.WM_SIZE: self.OnSize, + win32con.WM_COMMAND: self.OnCommand, + win32con.WM_NOTIFY: self.OnNotify, + win32con.WM_INITDIALOG: self.OnInitDialog, + win32con.WM_CLOSE: self.OnClose, + win32con.WM_DESTROY: self.OnDestroy, + WM_SEARCH_RESULT: self.OnSearchResult, + WM_SEARCH_FINISHED: self.OnSearchFinished, + } + dlgClassName = self._RegisterWndClass() + template = self._GetDialogTemplate(dlgClassName) + return fn(self.hinst, template, 0, message_map) + + def _SetupList(self): + child_style = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_BORDER | win32con.WS_HSCROLL | win32con.WS_VSCROLL + child_style |= commctrl.LVS_SINGLESEL | commctrl.LVS_SHOWSELALWAYS | commctrl.LVS_REPORT + self.hwndList = win32gui.CreateWindow("SysListView32", None, child_style, 0, 0, 100, 100, self.hwnd, IDC_LISTBOX, self.hinst, None) + + child_ex_style = win32gui.SendMessage(self.hwndList, commctrl.LVM_GETEXTENDEDLISTVIEWSTYLE, 0, 0) + child_ex_style |= commctrl.LVS_EX_FULLROWSELECT + win32gui.SendMessage(self.hwndList, commctrl.LVM_SETEXTENDEDLISTVIEWSTYLE, 0, child_ex_style) + + # Add an image list - use the builtin shell folder icon - this + # demonstrates the problem with alpha-blending of icons on XP if + # winxpgui is not used in place of win32gui. + il = win32gui.ImageList_Create( + win32api.GetSystemMetrics(win32con.SM_CXSMICON), + win32api.GetSystemMetrics(win32con.SM_CYSMICON), + commctrl.ILC_COLOR32 | commctrl.ILC_MASK, + 1, # initial size + 0) # cGrow + + shell_dll = os.path.join(win32api.GetSystemDirectory(), "shell32.dll") + large, small = win32gui.ExtractIconEx(shell_dll, 4, 1) + win32gui.ImageList_ReplaceIcon(il, -1, small[0]) + win32gui.DestroyIcon(small[0]) + win32gui.DestroyIcon(large[0]) + win32gui.SendMessage(self.hwndList, commctrl.LVM_SETIMAGELIST, + commctrl.LVSIL_SMALL, il) + + # Setup the list control columns. + lvc = LVCOLUMN(mask = commctrl.LVCF_FMT | commctrl.LVCF_WIDTH | commctrl.LVCF_TEXT | commctrl.LVCF_SUBITEM) + lvc.fmt = commctrl.LVCFMT_LEFT + lvc.iSubItem = 1 + lvc.text = "Title" + lvc.cx = 200 + win32gui.SendMessage(self.hwndList, commctrl.LVM_INSERTCOLUMN, 0, lvc.toparam()) + lvc.iSubItem = 0 + lvc.text = "Order" + lvc.cx = 50 + win32gui.SendMessage(self.hwndList, commctrl.LVM_INSERTCOLUMN, 0, lvc.toparam()) + + win32gui.UpdateWindow(self.hwnd) + + def ClearListItems(self): + win32gui.SendMessage(self.hwndList, commctrl.LVM_DELETEALLITEMS) + self.list_data = {} + + def AddListItem(self, data, *columns): + num_items = win32gui.SendMessage(self.hwndList, commctrl.LVM_GETITEMCOUNT) + item = LVITEM(text=columns[0], iItem = num_items) + new_index = win32gui.SendMessage(self.hwndList, commctrl.LVM_INSERTITEM, 0, item.toparam()) + col_no = 1 + for col in columns[1:]: + item = LVITEM(text=col, iItem = new_index, iSubItem = col_no) + win32gui.SendMessage(self.hwndList, commctrl.LVM_SETITEM, 0, item.toparam()) + col_no += 1 + self.list_data[new_index] = data + + def OnInitDialog(self, hwnd, msg, wparam, lparam): + self.hwnd = hwnd + # centre the dialog + desktop = win32gui.GetDesktopWindow() + l,t,r,b = win32gui.GetWindowRect(self.hwnd) + dt_l, dt_t, dt_r, dt_b = win32gui.GetWindowRect(desktop) + centre_x, centre_y = win32gui.ClientToScreen( desktop, ( (dt_r-dt_l)//2, (dt_b-dt_t)//2) ) + win32gui.MoveWindow(hwnd, centre_x-(r//2), centre_y-(b//2), r-l, b-t, 0) + self._SetupList() + l,t,r,b = win32gui.GetClientRect(self.hwnd) + self._DoSize(r-l,b-t, 1) + + def _DoSize(self, cx, cy, repaint = 1): + # right-justify the textbox. + ctrl = win32gui.GetDlgItem(self.hwnd, IDC_SEARCHTEXT) + l, t, r, b = win32gui.GetWindowRect(ctrl) + l, t = win32gui.ScreenToClient(self.hwnd, (l,t) ) + r, b = win32gui.ScreenToClient(self.hwnd, (r,b) ) + win32gui.MoveWindow(ctrl, l, t, cx-l-5, b-t, repaint) + # The button. + ctrl = win32gui.GetDlgItem(self.hwnd, IDC_BUTTON_DISPLAY) + l, t, r, b = win32gui.GetWindowRect(ctrl) + l, t = win32gui.ScreenToClient(self.hwnd, (l,t) ) + r, b = win32gui.ScreenToClient(self.hwnd, (r,b) ) + list_y = b + 10 + w = r - l + win32gui.MoveWindow(ctrl, cx - 5 - w, t, w, b-t, repaint) + + # The list control + win32gui.MoveWindow(self.hwndList, 0, list_y, cx, cy-list_y, repaint) + # The last column of the list control. + new_width = cx - win32gui.SendMessage(self.hwndList, commctrl.LVM_GETCOLUMNWIDTH, 0) + win32gui.SendMessage(self.hwndList, commctrl.LVM_SETCOLUMNWIDTH, 1, new_width) + + def OnSize(self, hwnd, msg, wparam, lparam): + x = win32api.LOWORD(lparam) + y = win32api.HIWORD(lparam) + self._DoSize(x,y) + return 1 + + def OnSearchResult(self, hwnd, msg, wparam, lparam): + try: + while 1: + params = self.result_queue.get(0) + self.AddListItem(*params) + except queue.Empty: + pass + + def OnSearchFinished(self, hwnd, msg, wparam, lparam): + print("OnSearchFinished") + + def OnNotify(self, hwnd, msg, wparam, lparam): + info = win32gui_struct.UnpackNMITEMACTIVATE(lparam) + if info.code == commctrl.NM_DBLCLK: + print("Double click on item", info.iItem+1) + return 1 + + def OnCommand(self, hwnd, msg, wparam, lparam): + id = win32api.LOWORD(wparam) + if id == IDC_BUTTON_SEARCH: + self.ClearListItems() + def fill_slowly(q, hwnd): + import time + for i in range(20): + q.put(("whatever", str(i+1), "Search result " + str(i) )) + win32gui.PostMessage(hwnd, WM_SEARCH_RESULT, 0, 0) + time.sleep(.25) + win32gui.PostMessage(hwnd, WM_SEARCH_FINISHED, 0, 0) + + import threading + self.result_queue = queue.Queue() + thread = threading.Thread(target = fill_slowly, args=(self.result_queue, self.hwnd) ) + thread.start() + elif id == IDC_BUTTON_DISPLAY: + print("Display button selected") + sel = win32gui.SendMessage(self.hwndList, commctrl.LVM_GETNEXTITEM, -1, commctrl.LVNI_SELECTED) + print("The selected item is", sel+1) + + # These function differ based on how the window is used, so may be overridden + def OnClose(self, hwnd, msg, wparam, lparam): + raise NotImplementedError + + def OnDestroy(self, hwnd, msg, wparam, lparam): + pass + +# An implementation suitable for use with the Win32 Window functions (ie, not +# a true dialog) +class DemoWindow(DemoWindowBase): + def CreateWindow(self): + # Create the window via CreateDialogBoxIndirect - it can then + # work as a "normal" window, once a message loop is established. + self._DoCreate(win32gui.CreateDialogIndirect) + + def OnClose(self, hwnd, msg, wparam, lparam): + win32gui.DestroyWindow(hwnd) + + # We need to arrange to a WM_QUIT message to be sent to our + # PumpMessages() loop. + def OnDestroy(self, hwnd, msg, wparam, lparam): + win32gui.PostQuitMessage(0) # Terminate the app. + +# An implementation suitable for use with the Win32 Dialog functions. +class DemoDialog(DemoWindowBase): + def DoModal(self): + return self._DoCreate(win32gui.DialogBoxIndirect) + + def OnClose(self, hwnd, msg, wparam, lparam): + win32gui.EndDialog(hwnd, 0) + +def DemoModal(): + w=DemoDialog() + w.DoModal() + +def DemoCreateWindow(): + w=DemoWindow() + w.CreateWindow() + # PumpMessages runs until PostQuitMessage() is called by someone. + win32gui.PumpMessages() + +if __name__=='__main__': + DemoModal() + DemoCreateWindow() diff --git a/venv/Lib/site-packages/win32/Demos/win32gui_menu.py b/venv/Lib/site-packages/win32/Demos/win32gui_menu.py new file mode 100644 index 00000000..9adcb157 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/win32gui_menu.py @@ -0,0 +1,349 @@ +# Demonstrates some advanced menu concepts using win32gui. +# This creates a taskbar icon which has some fancy menus (but note that +# selecting the menu items does nothing useful - see win32gui_taskbar.py +# for examples of this. + +# NOTE: This is a work in progress. Todo: +# * The "Checked" menu items don't work correctly - I'm not sure why. +# * No support for GetMenuItemInfo. + +# Based on Andy McKay's demo code. +from win32api import * +# Try and use XP features, so we get alpha-blending etc. +try: + from winxpgui import * +except ImportError: + from win32gui import * +from win32gui_struct import * +import win32con +import sys, os +import struct +import array + +this_dir = os.path.split(sys.argv[0])[0] + +class MainWindow: + def __init__(self): + message_map = { + win32con.WM_DESTROY: self.OnDestroy, + win32con.WM_COMMAND: self.OnCommand, + win32con.WM_USER+20 : self.OnTaskbarNotify, + # owner-draw related handlers. + win32con.WM_MEASUREITEM: self.OnMeasureItem, + win32con.WM_DRAWITEM: self.OnDrawItem, + } + # Register the Window class. + wc = WNDCLASS() + hinst = wc.hInstance = GetModuleHandle(None) + wc.lpszClassName = "PythonTaskbarDemo" + wc.lpfnWndProc = message_map # could also specify a wndproc. + classAtom = RegisterClass(wc) + # Create the Window. + style = win32con.WS_OVERLAPPED | win32con.WS_SYSMENU + self.hwnd = CreateWindow( classAtom, "Taskbar Demo", style, \ + 0, 0, win32con.CW_USEDEFAULT, win32con.CW_USEDEFAULT, \ + 0, 0, hinst, None) + UpdateWindow(self.hwnd) + iconPathName = os.path.abspath(os.path.join( sys.prefix, "pyc.ico" )) + # py2.5 includes the .ico files in the DLLs dir for some reason. + if not os.path.isfile(iconPathName): + iconPathName = os.path.abspath(os.path.join( os.path.split(sys.executable)[0], "DLLs", "pyc.ico" )) + if not os.path.isfile(iconPathName): + # Look in the source tree. + iconPathName = os.path.abspath(os.path.join( os.path.split(sys.executable)[0], "..\\PC\\pyc.ico" )) + if os.path.isfile(iconPathName): + icon_flags = win32con.LR_LOADFROMFILE | win32con.LR_DEFAULTSIZE + hicon = LoadImage(hinst, iconPathName, win32con.IMAGE_ICON, 0, 0, icon_flags) + else: + iconPathName = None + print("Can't find a Python icon file - using default") + hicon = LoadIcon(0, win32con.IDI_APPLICATION) + self.iconPathName = iconPathName + + # Load up some information about menus needed by our owner-draw code. + # The font to use on the menu. + ncm = SystemParametersInfo(win32con.SPI_GETNONCLIENTMETRICS) + self.font_menu = CreateFontIndirect(ncm['lfMenuFont']) + # spacing for our ownerdraw menus - not sure exactly what constants + # should be used (and if you owner-draw all items on the menu, it + # doesn't matter!) + self.menu_icon_height = GetSystemMetrics(win32con.SM_CYMENU) - 4 + self.menu_icon_width = self.menu_icon_height + self.icon_x_pad = 8 # space from end of icon to start of text. + # A map we use to stash away data we need for ownerdraw. Keyed + # by integer ID - that ID will be set in dwTypeData of the menu item. + self.menu_item_map = {} + + # Finally, create the menu + self.createMenu() + + flags = NIF_ICON | NIF_MESSAGE | NIF_TIP + nid = (self.hwnd, 0, flags, win32con.WM_USER+20, hicon, "Python Demo") + Shell_NotifyIcon(NIM_ADD, nid) + print("Please right-click on the Python icon in the taskbar") + + def createMenu(self): + self.hmenu = menu = CreatePopupMenu() + # Create our 'Exit' item with the standard, ugly 'close' icon. + item, extras = PackMENUITEMINFO(text = "Exit", + hbmpItem=win32con.HBMMENU_MBAR_CLOSE, + wID=1000) + InsertMenuItem(menu, 0, 1, item) + # Create a 'text only' menu via InsertMenuItem rather then + # AppendMenu, just to prove we can! + item, extras = PackMENUITEMINFO(text = "Text only item", + wID=1001) + InsertMenuItem(menu, 0, 1, item) + + load_bmp_flags=win32con.LR_LOADFROMFILE | \ + win32con.LR_LOADTRANSPARENT + # These images are "over sized", so we load them scaled. + hbmp = LoadImage(0, os.path.join(this_dir, "images/smiley.bmp"), + win32con.IMAGE_BITMAP, 20, 20, load_bmp_flags) + + # Create a top-level menu with a bitmap + item, extras = PackMENUITEMINFO(text="Menu with bitmap", + hbmpItem=hbmp, + wID=1002) + InsertMenuItem(menu, 0, 1, item) + + # Owner-draw menus mainly from: + # http://windowssdk.msdn.microsoft.com/en-us/library/ms647558.aspx + # and: + # http://www.codeguru.com/cpp/controls/menu/bitmappedmenus/article.php/c165 + + # Create one with an icon - this is *lots* more work - we do it + # owner-draw! The primary reason is to handle transparency better - + # converting to a bitmap causes the background to be incorrect when + # the menu item is selected. I can't see a simpler way. + # First, load the icon we want to use. + ico_x = GetSystemMetrics(win32con.SM_CXSMICON) + ico_y = GetSystemMetrics(win32con.SM_CYSMICON) + if self.iconPathName: + hicon = LoadImage(0, self.iconPathName, win32con.IMAGE_ICON, ico_x, ico_y, win32con.LR_LOADFROMFILE) + else: + shell_dll = os.path.join(GetSystemDirectory(), "shell32.dll") + large, small = win32gui.ExtractIconEx(shell_dll, 4, 1) + hicon = small[0] + DestroyIcon(large[0]) + + # Stash away the text and hicon in our map, and add the owner-draw + # item to the menu. + index = 0 + self.menu_item_map[index] = (hicon, "Menu with owner-draw icon") + item, extras = PackMENUITEMINFO(fType=win32con.MFT_OWNERDRAW, + dwItemData=index, + wID=1009) + InsertMenuItem(menu, 0, 1, item) + + # Add another icon-based icon - but this time using HBMMENU_CALLBACK + # in the hbmpItem elt, so we only need to draw the icon (ie, not the + # text or checkmark) + index = 1 + self.menu_item_map[index] = (hicon, None) + item, extras = PackMENUITEMINFO(text="Menu with o-d icon 2", + dwItemData=index, + hbmpItem=win32con.HBMMENU_CALLBACK, + wID=1010) + InsertMenuItem(menu, 0, 1, item) + + # Add another icon-based icon - this time by converting + # via bitmap. Note the icon background when selected is ugly :( + hdcBitmap = CreateCompatibleDC(0) + hdcScreen = GetDC(0) + hbm = CreateCompatibleBitmap(hdcScreen, ico_x, ico_y) + hbmOld = SelectObject(hdcBitmap, hbm) + SetBkMode(hdcBitmap, win32con.TRANSPARENT) + # Fill the background. + brush = GetSysColorBrush(win32con.COLOR_MENU) + FillRect(hdcBitmap, (0, 0, 16, 16), brush) + # unclear if brush needs to be freed. Best clue I can find is: + # "GetSysColorBrush returns a cached brush instead of allocating a new + # one." - implies no DeleteObject. + # draw the icon + DrawIconEx(hdcBitmap, 0, 0, hicon, ico_x, ico_y, 0, 0, win32con.DI_NORMAL) + SelectObject(hdcBitmap, hbmOld) + DeleteDC(hdcBitmap) + item, extras = PackMENUITEMINFO(text="Menu with icon", + hbmpItem=hbm.Detach(), + wID=1011) + InsertMenuItem(menu, 0, 1, item) + + # Create a sub-menu, and put a few funky ones there. + self.sub_menu = sub_menu = CreatePopupMenu() + # A 'checkbox' menu. + item, extras = PackMENUITEMINFO(fState=win32con.MFS_CHECKED, + text="Checkbox menu", + hbmpItem=hbmp, + wID=1003) + InsertMenuItem(sub_menu, 0, 1, item) + # A 'radio' menu. + InsertMenu(sub_menu, 0, win32con.MF_BYPOSITION, win32con.MF_SEPARATOR, None) + item, extras = PackMENUITEMINFO(fType=win32con.MFT_RADIOCHECK, + fState=win32con.MFS_CHECKED, + text="Checkbox menu - bullet 1", + hbmpItem=hbmp, + wID=1004) + InsertMenuItem(sub_menu, 0, 1, item) + item, extras = PackMENUITEMINFO(fType=win32con.MFT_RADIOCHECK, + fState=win32con.MFS_UNCHECKED, + text="Checkbox menu - bullet 2", + hbmpItem=hbmp, + wID=1005) + InsertMenuItem(sub_menu, 0, 1, item) + # And add the sub-menu to the top-level menu. + item, extras = PackMENUITEMINFO(text="Sub-Menu", + hSubMenu=sub_menu) + InsertMenuItem(menu, 0, 1, item) + + # Set 'Exit' as the default option. + SetMenuDefaultItem(menu, 1000, 0) + + + def OnDestroy(self, hwnd, msg, wparam, lparam): + nid = (self.hwnd, 0) + Shell_NotifyIcon(NIM_DELETE, nid) + PostQuitMessage(0) # Terminate the app. + + def OnTaskbarNotify(self, hwnd, msg, wparam, lparam): + if lparam==win32con.WM_RBUTTONUP: + print("You right clicked me.") + # display the menu at the cursor pos. + pos = GetCursorPos() + SetForegroundWindow(self.hwnd) + TrackPopupMenu(self.hmenu, win32con.TPM_LEFTALIGN, pos[0], pos[1], 0, self.hwnd, None) + PostMessage(self.hwnd, win32con.WM_NULL, 0, 0) + elif lparam==win32con.WM_LBUTTONDBLCLK: + print("You double-clicked me") + # find the default menu item and fire it. + cmd = GetMenuDefaultItem(self.hmenu, False, 0) + if cmd == -1: + print("Can't find a default!") + # and just pretend it came from the menu + self.OnCommand(hwnd, win32con.WM_COMMAND, cmd, 0) + return 1 + + def OnCommand(self, hwnd, msg, wparam, lparam): + id = LOWORD(wparam) + if id == 1000: + print("Goodbye") + DestroyWindow(self.hwnd) + elif id in (1003, 1004, 1005): + # Our 'checkbox' and 'radio' items + state = GetMenuState(self.sub_menu, id, win32con.MF_BYCOMMAND) + if state==-1: + raise RuntimeError("No item found") + if state & win32con.MF_CHECKED: + check_flags = win32con.MF_UNCHECKED + print("Menu was checked - unchecking") + else: + check_flags = win32con.MF_CHECKED + print("Menu was unchecked - checking") + + if id == 1003: + # simple checkbox + rc = CheckMenuItem(self.sub_menu, id, + win32con.MF_BYCOMMAND | check_flags) + else: + # radio button - must pass the first and last IDs in the + # "group", and the ID in the group that is to be selected. + rc = CheckMenuRadioItem(self.sub_menu, 1004, 1005, id, + win32con.MF_BYCOMMAND) + # Get and check the new state - first the simple way... + new_state = GetMenuState(self.sub_menu, id, win32con.MF_BYCOMMAND) + if new_state & win32con.MF_CHECKED != check_flags: + raise RuntimeError("The new item didn't get the new checked state!") + # Now the long-winded way via GetMenuItemInfo... + buf, extras = EmptyMENUITEMINFO() + win32gui.GetMenuItemInfo(self.sub_menu, id, False, buf) + fType, fState, wID, hSubMenu, hbmpChecked, hbmpUnchecked, \ + dwItemData, text, hbmpItem = UnpackMENUITEMINFO(buf) + + if fState & win32con.MF_CHECKED != check_flags: + raise RuntimeError("The new item didn't get the new checked state!") + else: + print("OnCommand for ID", id) + + # Owner-draw related functions. We only have 1 owner-draw item, but + # we pretend we have more than that :) + def OnMeasureItem(self, hwnd, msg, wparam, lparam): + ## Last item of MEASUREITEMSTRUCT is a ULONG_PTR + fmt = "5iP" + buf = PyMakeBuffer(struct.calcsize(fmt), lparam) + data = struct.unpack(fmt, buf) + ctlType, ctlID, itemID, itemWidth, itemHeight, itemData = data + + hicon, text = self.menu_item_map[itemData] + if text is None: + # Only drawing icon due to HBMMENU_CALLBACK + cx = self.menu_icon_width + cy = self.menu_icon_height + else: + # drawing the lot! + dc = GetDC(hwnd) + oldFont = SelectObject(dc, self.font_menu) + cx, cy = GetTextExtentPoint32(dc, text) + SelectObject(dc, oldFont) + ReleaseDC(hwnd, dc) + + cx += GetSystemMetrics(win32con.SM_CXMENUCHECK) + cx += self.menu_icon_width + self.icon_x_pad + + cy = GetSystemMetrics(win32con.SM_CYMENU) + + new_data = struct.pack(fmt, ctlType, ctlID, itemID, cx, cy, itemData) + PySetMemory(lparam, new_data) + return True + + def OnDrawItem(self, hwnd, msg, wparam, lparam): + ## lparam is a DRAWITEMSTRUCT + fmt = "5i2P4iP" + data = struct.unpack(fmt, PyGetMemory(lparam, struct.calcsize(fmt))) + ctlType, ctlID, itemID, itemAction, itemState, hwndItem, \ + hDC, left, top, right, bot, itemData = data + + rect = left, top, right, bot + hicon, text = self.menu_item_map[itemData] + + if text is None: + # This means the menu-item had HBMMENU_CALLBACK - so all we + # draw is the icon. rect is the entire area we should use. + DrawIconEx(hDC, left, top, hicon, right-left, bot-top, + 0, 0, win32con.DI_NORMAL) + else: + # If the user has selected the item, use the selected + # text and background colors to display the item. + selected = itemState & win32con.ODS_SELECTED + if selected: + crText = SetTextColor(hDC, GetSysColor(win32con.COLOR_HIGHLIGHTTEXT)) + crBkgnd = SetBkColor(hDC, GetSysColor(win32con.COLOR_HIGHLIGHT)) + + each_pad = self.icon_x_pad // 2 + x_icon = left + GetSystemMetrics(win32con.SM_CXMENUCHECK) + each_pad + x_text = x_icon + self.menu_icon_width + each_pad + + # Draw text first, specifying a complete rect to fill - this sets + # up the background (but overwrites anything else already there!) + # Select the font, draw it, and restore the previous font. + hfontOld = SelectObject(hDC, self.font_menu) + ExtTextOut(hDC, x_text, top+2, win32con.ETO_OPAQUE, rect, text) + SelectObject(hDC, hfontOld) + + # Icon image next. Icons are transparent - no need to handle + # selection specially. + DrawIconEx(hDC, x_icon, top+2, hicon, + self.menu_icon_width, self.menu_icon_height, + 0, 0, win32con.DI_NORMAL) + + # Return the text and background colors to their + # normal state (not selected). + if selected: + SetTextColor(hDC, crText) + SetBkColor(hDC, crBkgnd) + +def main(): + w=MainWindow() + PumpMessages() + +if __name__=='__main__': + main() diff --git a/venv/Lib/site-packages/win32/Demos/win32gui_taskbar.py b/venv/Lib/site-packages/win32/Demos/win32gui_taskbar.py new file mode 100644 index 00000000..bfc8d5c2 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/win32gui_taskbar.py @@ -0,0 +1,112 @@ +# Creates a task-bar icon. Run from Python.exe to see the +# messages printed. +import win32api, win32gui +import win32con, winerror +import sys, os + +class MainWindow: + def __init__(self): + msg_TaskbarRestart = win32gui.RegisterWindowMessage("TaskbarCreated"); + message_map = { + msg_TaskbarRestart: self.OnRestart, + win32con.WM_DESTROY: self.OnDestroy, + win32con.WM_COMMAND: self.OnCommand, + win32con.WM_USER+20 : self.OnTaskbarNotify, + } + # Register the Window class. + wc = win32gui.WNDCLASS() + hinst = wc.hInstance = win32api.GetModuleHandle(None) + wc.lpszClassName = "PythonTaskbarDemo" + wc.style = win32con.CS_VREDRAW | win32con.CS_HREDRAW; + wc.hCursor = win32api.LoadCursor( 0, win32con.IDC_ARROW ) + wc.hbrBackground = win32con.COLOR_WINDOW + wc.lpfnWndProc = message_map # could also specify a wndproc. + + # Don't blow up if class already registered to make testing easier + try: + classAtom = win32gui.RegisterClass(wc) + except win32gui.error as err_info: + if err_info.winerror!=winerror.ERROR_CLASS_ALREADY_EXISTS: + raise + + # Create the Window. + style = win32con.WS_OVERLAPPED | win32con.WS_SYSMENU + self.hwnd = win32gui.CreateWindow( wc.lpszClassName, "Taskbar Demo", style, \ + 0, 0, win32con.CW_USEDEFAULT, win32con.CW_USEDEFAULT, \ + 0, 0, hinst, None) + win32gui.UpdateWindow(self.hwnd) + self._DoCreateIcons() + def _DoCreateIcons(self): + # Try and find a custom icon + hinst = win32api.GetModuleHandle(None) + iconPathName = os.path.abspath(os.path.join( os.path.split(sys.executable)[0], "pyc.ico" )) + if not os.path.isfile(iconPathName): + # Look in DLLs dir, a-la py 2.5 + iconPathName = os.path.abspath(os.path.join( os.path.split(sys.executable)[0], "DLLs", "pyc.ico" )) + if not os.path.isfile(iconPathName): + # Look in the source tree. + iconPathName = os.path.abspath(os.path.join( os.path.split(sys.executable)[0], "..\\PC\\pyc.ico" )) + if os.path.isfile(iconPathName): + icon_flags = win32con.LR_LOADFROMFILE | win32con.LR_DEFAULTSIZE + hicon = win32gui.LoadImage(hinst, iconPathName, win32con.IMAGE_ICON, 0, 0, icon_flags) + else: + print("Can't find a Python icon file - using default") + hicon = win32gui.LoadIcon(0, win32con.IDI_APPLICATION) + + flags = win32gui.NIF_ICON | win32gui.NIF_MESSAGE | win32gui.NIF_TIP + nid = (self.hwnd, 0, flags, win32con.WM_USER+20, hicon, "Python Demo") + try: + win32gui.Shell_NotifyIcon(win32gui.NIM_ADD, nid) + except win32gui.error: + # This is common when windows is starting, and this code is hit + # before the taskbar has been created. + print("Failed to add the taskbar icon - is explorer running?") + # but keep running anyway - when explorer starts, we get the + # TaskbarCreated message. + + def OnRestart(self, hwnd, msg, wparam, lparam): + self._DoCreateIcons() + + def OnDestroy(self, hwnd, msg, wparam, lparam): + nid = (self.hwnd, 0) + win32gui.Shell_NotifyIcon(win32gui.NIM_DELETE, nid) + win32gui.PostQuitMessage(0) # Terminate the app. + + def OnTaskbarNotify(self, hwnd, msg, wparam, lparam): + if lparam==win32con.WM_LBUTTONUP: + print("You clicked me.") + elif lparam==win32con.WM_LBUTTONDBLCLK: + print("You double-clicked me - goodbye") + win32gui.DestroyWindow(self.hwnd) + elif lparam==win32con.WM_RBUTTONUP: + print("You right clicked me.") + menu = win32gui.CreatePopupMenu() + win32gui.AppendMenu( menu, win32con.MF_STRING, 1023, "Display Dialog") + win32gui.AppendMenu( menu, win32con.MF_STRING, 1024, "Say Hello") + win32gui.AppendMenu( menu, win32con.MF_STRING, 1025, "Exit program" ) + pos = win32gui.GetCursorPos() + # See http://msdn.microsoft.com/library/default.asp?url=/library/en-us/winui/menus_0hdi.asp + win32gui.SetForegroundWindow(self.hwnd) + win32gui.TrackPopupMenu(menu, win32con.TPM_LEFTALIGN, pos[0], pos[1], 0, self.hwnd, None) + win32gui.PostMessage(self.hwnd, win32con.WM_NULL, 0, 0) + return 1 + + def OnCommand(self, hwnd, msg, wparam, lparam): + id = win32api.LOWORD(wparam) + if id == 1023: + import win32gui_dialog + win32gui_dialog.DemoModal() + elif id == 1024: + print("Hello") + elif id == 1025: + print("Goodbye") + win32gui.DestroyWindow(self.hwnd) + else: + print("Unknown command -", id) + +def main(): + w=MainWindow() + win32gui.PumpMessages() + +if __name__=='__main__': + main() diff --git a/venv/Lib/site-packages/win32/Demos/win32netdemo.py b/venv/Lib/site-packages/win32/Demos/win32netdemo.py new file mode 100644 index 00000000..b1c890ab --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/win32netdemo.py @@ -0,0 +1,237 @@ +import sys +import win32api +import win32net +import win32netcon +import win32security +import getopt +import traceback + +verbose_level = 0 + +server = None # Run on local machine. + +def verbose(msg): + if verbose_level: + print(msg) + +def CreateUser(): + "Creates a new test user, then deletes the user" + testName = "PyNetTestUser" + try: + win32net.NetUserDel(server, testName) + print("Warning - deleted user before creating it!") + except win32net.error: + pass + + d = {} + d['name'] = testName + d['password'] = 'deleteme' + d['priv'] = win32netcon.USER_PRIV_USER + d['comment'] = "Delete me - created by Python test code" + d['flags'] = win32netcon.UF_NORMAL_ACCOUNT | win32netcon.UF_SCRIPT + win32net.NetUserAdd(server, 1, d) + try: + try: + win32net.NetUserChangePassword(server, testName, "wrong", "new") + print("ERROR: NetUserChangePassword worked with a wrong password!") + except win32net.error: + pass + win32net.NetUserChangePassword(server, testName, "deleteme", "new") + finally: + win32net.NetUserDel(server, testName) + print("Created a user, changed their password, and deleted them!") + +def UserEnum(): + "Enumerates all the local users" + resume = 0 + nuser = 0 + while 1: + data, total, resume = win32net.NetUserEnum(server, 3, win32netcon.FILTER_NORMAL_ACCOUNT, resume) + verbose("Call to NetUserEnum obtained %d entries of %d total" % (len(data), total)) + for user in data: + verbose("Found user %s" % user['name']) + nuser = nuser + 1 + if not resume: + break + assert nuser, "Could not find any users!" + print("Enumerated all the local users") + +def GroupEnum(): + "Enumerates all the domain groups" + nmembers = 0 + resume = 0 + while 1: + data, total, resume = win32net.NetGroupEnum(server, 1, resume) +# print "Call to NetGroupEnum obtained %d entries of %d total" % (len(data), total) + for group in data: + verbose("Found group %(name)s:%(comment)s " % group) + memberresume = 0 + while 1: + memberdata, total, memberresume = win32net.NetGroupGetUsers(server, group['name'], 0, resume) + for member in memberdata: + verbose(" Member %(name)s" % member) + nmembers = nmembers + 1 + if memberresume==0: + break + if not resume: + break + assert nmembers, "Couldnt find a single member in a single group!" + print("Enumerated all the groups") + +def LocalGroupEnum(): + "Enumerates all the local groups" + resume = 0 + nmembers = 0 + while 1: + data, total, resume = win32net.NetLocalGroupEnum(server, 1, resume) + for group in data: + verbose("Found group %(name)s:%(comment)s " % group) + memberresume = 0 + while 1: + memberdata, total, memberresume = win32net.NetLocalGroupGetMembers(server, group['name'], 2, resume) + for member in memberdata: + # Just for the sake of it, we convert the SID to a username + username, domain, type = win32security.LookupAccountSid(server, member['sid']) + nmembers = nmembers + 1 + verbose(" Member %s (%s)" % (username, member['domainandname'])) + if memberresume==0: + break + if not resume: + break + assert nmembers, "Couldnt find a single member in a single group!" + print("Enumerated all the local groups") + +def ServerEnum(): + "Enumerates all servers on the network" + resume = 0 + while 1: + data, total, resume = win32net.NetServerEnum(server, 100, win32netcon.SV_TYPE_ALL, None, resume) + for s in data: + verbose("Found server %s" % s['name']) + # Now loop over the shares. + shareresume=0 + while 1: + sharedata, total, shareresume = win32net.NetShareEnum(server, 2, shareresume) + for share in sharedata: + verbose(" %(netname)s (%(path)s):%(remark)s - in use by %(current_uses)d users" % share) + if not shareresume: + break + if not resume: + break + print("Enumerated all the servers on the network") + +def LocalGroup(uname=None): + "Creates a local group, adds some members, deletes them, then removes the group" + level = 3 + if uname is None: uname=win32api.GetUserName() + if uname.find("\\")<0: + uname = win32api.GetDomainName() + "\\" + uname + group = 'python_test_group' + # delete the group if it already exists + try: + win32net.NetLocalGroupDel(server, group) + print("WARNING: existing local group '%s' has been deleted.") + except win32net.error: + pass + group_data = {'name': group} + win32net.NetLocalGroupAdd(server, 1, group_data) + try: + u={'domainandname': uname} + win32net.NetLocalGroupAddMembers(server, group, level, [u]) + mem, tot, res = win32net.NetLocalGroupGetMembers(server, group, level) + print("members are", mem) + if mem[0]['domainandname'] != uname: + print("ERROR: LocalGroup just added %s, but members are %r" % (uname, mem)) + # Convert the list of dicts to a list of strings. + win32net.NetLocalGroupDelMembers(server, group, [m['domainandname'] for m in mem]) + finally: + win32net.NetLocalGroupDel(server, group) + print("Created a local group, added and removed members, then deleted the group") + +def GetInfo(userName=None): + "Dumps level 3 information about the current user" + if userName is None: userName=win32api.GetUserName() + print("Dumping level 3 information about user") + info = win32net.NetUserGetInfo(server, userName, 3) + for key, val in list(info.items()): + verbose("%s=%s" % (key,val)) + +def SetInfo(userName=None): + "Attempts to change the current users comment, then set it back" + if userName is None: userName=win32api.GetUserName() + oldData = win32net.NetUserGetInfo(server, userName, 3) + try: + d = oldData.copy() + d["usr_comment"] = "Test comment" + win32net.NetUserSetInfo(server, userName, 3, d) + new = win32net.NetUserGetInfo(server, userName, 3)['usr_comment'] + if str(new) != "Test comment": + raise RuntimeError("Could not read the same comment back - got %s" % new) + print("Changed the data for the user") + finally: + win32net.NetUserSetInfo(server, userName, 3, oldData) + +def SetComputerInfo(): + "Doesnt actually change anything, just make sure we could ;-)" + info = win32net.NetWkstaGetInfo(None, 502) + # *sob* - but we can't! Why not!!! + # win32net.NetWkstaSetInfo(None, 502, info) + +def usage(tests): + import os + print("Usage: %s [-s server ] [-v] [Test ...]" % os.path.basename(sys.argv[0])) + print(" -v : Verbose - print more information") + print(" -s : server - execute the tests against the named server") + print(" -c : include the CreateUser test by default") + print("where Test is one of:") + for t in tests: + print(t.__name__,":", t.__doc__) + print() + print("If not tests are specified, all tests are run") + sys.exit(1) + +def main(): + tests = [] + for ob in list(globals().values()): + if type(ob)==type(main) and ob.__doc__: + tests.append(ob) + opts, args = getopt.getopt(sys.argv[1:], "s:hvc") + create_user = False + for opt, val in opts: + if opt=="-s": + global server + server = val + if opt=="-h": + usage(tests) + if opt=="-v": + global verbose_level + verbose_level = verbose_level + 1 + if opt=="-c": + create_user = True + + if len(args)==0: + print("Running all tests - use '-h' to see command-line options...") + dotests = tests + if not create_user: + dotests.remove(CreateUser) + else: + dotests = [] + for arg in args: + for t in tests: + if t.__name__==arg: + dotests.append(t) + break + else: + print("Test '%s' unknown - skipping" % arg) + if not len(dotests): + print("Nothing to do!") + usage(tests) + for test in dotests: + try: + test() + except: + print("Test %s failed" % test.__name__) + traceback.print_exc() + +if __name__=='__main__': + main() diff --git a/venv/Lib/site-packages/win32/Demos/win32rcparser_demo.py b/venv/Lib/site-packages/win32/Demos/win32rcparser_demo.py new file mode 100644 index 00000000..62acecc7 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/win32rcparser_demo.py @@ -0,0 +1,77 @@ +# A demo of the win32rcparser module and using win32gui + +import win32gui +import win32api +import win32con +import win32rcparser +import commctrl +import sys, os + +this_dir = os.path.abspath(os.path.dirname(__file__)) +g_rcname = os.path.abspath( + os.path.join( this_dir, "..", "test", "win32rcparser", "test.rc")) + +if not os.path.isfile(g_rcname): + raise RuntimeError("Can't locate test.rc (should be at '%s')" % (g_rcname,)) + +class DemoWindow: + def __init__(self, dlg_template): + self.dlg_template = dlg_template + + def CreateWindow(self): + self._DoCreate(win32gui.CreateDialogIndirect) + + def DoModal(self): + return self._DoCreate(win32gui.DialogBoxIndirect) + + def _DoCreate(self, fn): + message_map = { + win32con.WM_INITDIALOG: self.OnInitDialog, + win32con.WM_CLOSE: self.OnClose, + win32con.WM_DESTROY: self.OnDestroy, + win32con.WM_COMMAND: self.OnCommand, + } + return fn(0, self.dlg_template, 0, message_map) + + def OnInitDialog(self, hwnd, msg, wparam, lparam): + self.hwnd = hwnd + # centre the dialog + desktop = win32gui.GetDesktopWindow() + l,t,r,b = win32gui.GetWindowRect(self.hwnd) + dt_l, dt_t, dt_r, dt_b = win32gui.GetWindowRect(desktop) + centre_x, centre_y = win32gui.ClientToScreen( desktop, ( (dt_r-dt_l)//2, (dt_b-dt_t)//2) ) + win32gui.MoveWindow(hwnd, centre_x-(r//2), centre_y-(b//2), r-l, b-t, 0) + + def OnCommand(self, hwnd, msg, wparam, lparam): + # Needed to make OK/Cancel work - no other controls are handled. + id = win32api.LOWORD(wparam) + if id in [win32con.IDOK, win32con.IDCANCEL]: + win32gui.EndDialog(hwnd, id) + + def OnClose(self, hwnd, msg, wparam, lparam): + win32gui.EndDialog(hwnd, 0) + + def OnDestroy(self, hwnd, msg, wparam, lparam): + pass + +def DemoModal(): + # Load the .rc file. + resources = win32rcparser.Parse(g_rcname) + for id, ddef in resources.dialogs.items(): + print("Displaying dialog", id) + w=DemoWindow(ddef) + w.DoModal() + +if __name__=='__main__': + flags = 0 + for flag in """ICC_DATE_CLASSES ICC_ANIMATE_CLASS ICC_ANIMATE_CLASS + ICC_BAR_CLASSES ICC_COOL_CLASSES ICC_DATE_CLASSES + ICC_HOTKEY_CLASS ICC_INTERNET_CLASSES ICC_LISTVIEW_CLASSES + ICC_PAGESCROLLER_CLASS ICC_PROGRESS_CLASS ICC_TAB_CLASSES + ICC_TREEVIEW_CLASSES ICC_UPDOWN_CLASS ICC_USEREX_CLASSES + ICC_WIN95_CLASSES """.split(): + flags |= getattr(commctrl, flag) + win32gui.InitCommonControlsEx(flags) + # Need to do this go get rich-edit working. + win32api.LoadLibrary("riched20.dll") + DemoModal() diff --git a/venv/Lib/site-packages/win32/Demos/win32servicedemo.py b/venv/Lib/site-packages/win32/Demos/win32servicedemo.py new file mode 100644 index 00000000..a95456f4 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/win32servicedemo.py @@ -0,0 +1,23 @@ +import win32service +import win32con + + +def EnumServices(): + resume = 0 + accessSCM = win32con.GENERIC_READ + accessSrv = win32service.SC_MANAGER_ALL_ACCESS + + #Open Service Control Manager + hscm = win32service.OpenSCManager(None, None, accessSCM) + + #Enumerate Service Control Manager DB + + typeFilter = win32service.SERVICE_WIN32 + stateFilter = win32service.SERVICE_STATE_ALL + + statuses = win32service.EnumServicesStatus(hscm, typeFilter, stateFilter) + for (short_name, desc, status) in statuses: + print(short_name, desc, status) + + +EnumServices() diff --git a/venv/Lib/site-packages/win32/Demos/win32ts_logoff_disconnected.py b/venv/Lib/site-packages/win32/Demos/win32ts_logoff_disconnected.py new file mode 100644 index 00000000..70e178fc --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/win32ts_logoff_disconnected.py @@ -0,0 +1,21 @@ +""" Finds any disconnected terminal service sessions and logs them off""" +import win32ts +import pywintypes +import winerror +sessions=win32ts.WTSEnumerateSessions(win32ts.WTS_CURRENT_SERVER_HANDLE) +for session in sessions: + """ + WTS_CONNECTSTATE_CLASS: WTSActive,WTSConnected,WTSConnectQuery,WTSShadow,WTSDisconnected, + WTSIdle,WTSListen,WTSReset,WTSDown,WTSInit + """ + if session['State']==win32ts.WTSDisconnected: + sessionid=session['SessionId'] + username=win32ts.WTSQuerySessionInformation(win32ts.WTS_CURRENT_SERVER_HANDLE, sessionid, win32ts.WTSUserName) + print('Logging off disconnected user:',username) + try: + win32ts.WTSLogoffSession(win32ts.WTS_CURRENT_SERVER_HANDLE, sessionid, True) + except pywintypes.error as e: + if e.winerror == winerror.ERROR_ACCESS_DENIED: + print("Can't kill that session:", e.strerror) + else: + raise diff --git a/venv/Lib/site-packages/win32/Demos/win32wnet/__pycache__/testwnet.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/win32wnet/__pycache__/testwnet.cpython-36.pyc new file mode 100644 index 00000000..882308bc Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/win32wnet/__pycache__/testwnet.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/win32wnet/__pycache__/winnetwk.cpython-36.pyc b/venv/Lib/site-packages/win32/Demos/win32wnet/__pycache__/winnetwk.cpython-36.pyc new file mode 100644 index 00000000..e41c688e Binary files /dev/null and b/venv/Lib/site-packages/win32/Demos/win32wnet/__pycache__/winnetwk.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/Demos/win32wnet/testwnet.py b/venv/Lib/site-packages/win32/Demos/win32wnet/testwnet.py new file mode 100644 index 00000000..9190a815 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/win32wnet/testwnet.py @@ -0,0 +1,105 @@ +import win32api +import win32wnet +import sys +from winnetwk import * +import os + +possible_shares = [] + +def _doDumpHandle(handle, level = 0): + indent = " " * level + while 1: + items = win32wnet.WNetEnumResource(handle, 0) + if len(items)==0: + break + for item in items: + try: + if item.dwDisplayType == RESOURCEDISPLAYTYPE_SHARE: + print(indent + "Have share with name:", item.lpRemoteName) + possible_shares.append(item) + elif item.dwDisplayType == RESOURCEDISPLAYTYPE_GENERIC: + print(indent + "Have generic resource with name:", item.lpRemoteName) + else: + # Try generic! + print(indent + "Enumerating " + item.lpRemoteName, end=' ') + k = win32wnet.WNetOpenEnum(RESOURCE_GLOBALNET, RESOURCETYPE_ANY,0,item) + print() + _doDumpHandle(k, level + 1) + win32wnet.WNetCloseEnum(k) # could do k.Close(), but this is a good test! + except win32wnet.error as details: + print(indent + "Couldn't enumerate this resource: " + details.strerror) + +def TestOpenEnum(): + print("Enumerating all resources on the network - this may take some time...") + handle = win32wnet.WNetOpenEnum(RESOURCE_GLOBALNET,RESOURCETYPE_ANY,0,None) + + try: + _doDumpHandle(handle) + finally: + handle.Close() + print("Finished dumping all resources.") + +def findUnusedDriveLetter(): + existing = [x[0].lower() for x in win32api.GetLogicalDriveStrings().split('\0') if x] + handle = win32wnet.WNetOpenEnum(RESOURCE_REMEMBERED,RESOURCETYPE_DISK,0,None) + try: + while 1: + items = win32wnet.WNetEnumResource(handle, 0) + if len(items)==0: + break + xtra = [i.lpLocalName[0].lower() for i in items if i.lpLocalName] + existing.extend(xtra) + finally: + handle.Close() + for maybe in 'defghijklmnopqrstuvwxyz': + if maybe not in existing: + return maybe + raise RuntimeError("All drive mappings are taken?") + +def TestConnection(): + if len(possible_shares)==0: + print("Couldn't find any potential shares to connect to") + return + localName = findUnusedDriveLetter() + ':' + for share in possible_shares: + print("Attempting connection of", localName, "to", share.lpRemoteName) + try: + win32wnet.WNetAddConnection2(share.dwType, localName, share.lpRemoteName) + except win32wnet.error as details: + print("Couldn't connect: " + details.strerror) + continue + # Have a connection. + try: + fname = os.path.join(localName + "\\", os.listdir(localName + "\\")[0]) + try: + print("Universal name of '%s' is '%s'" % (fname, win32wnet.WNetGetUniversalName(fname))) + except win32wnet.error as details: + print("Couldn't get universal name of '%s': %s" % (fname, details.strerror)) + print("User name for this connection is", win32wnet.WNetGetUser(localName)) + finally: + win32wnet.WNetCancelConnection2(localName, 0, 0) + # and do it again, but this time by using the more modern + # NETRESOURCE way. + nr = win32wnet.NETRESOURCE() + nr.dwType = share.dwType + nr.lpLocalName = localName + nr.lpRemoteName = share.lpRemoteName + win32wnet.WNetAddConnection2(nr) + win32wnet.WNetCancelConnection2(localName, 0, 0) + + # and one more time using WNetAddConnection3 + win32wnet.WNetAddConnection3(0, nr) + win32wnet.WNetCancelConnection2(localName, 0, 0) + + # Only do the first share that succeeds. + break + +def TestGetUser(): + u = win32wnet.WNetGetUser() + print("Current global user is", repr(u)) + if u != win32wnet.WNetGetUser(None): + raise RuntimeError("Default value didnt seem to work!") + +TestGetUser() +TestOpenEnum() +TestConnection() diff --git a/venv/Lib/site-packages/win32/Demos/win32wnet/winnetwk.py b/venv/Lib/site-packages/win32/Demos/win32wnet/winnetwk.py new file mode 100644 index 00000000..192538b8 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/win32wnet/winnetwk.py @@ -0,0 +1,98 @@ +# Generated by h2py from d:\mssdk\include\winnetwk.h +WNNC_NET_MSNET = 0x00010000 +WNNC_NET_LANMAN = 0x00020000 +WNNC_NET_NETWARE = 0x00030000 +WNNC_NET_VINES = 0x00040000 +WNNC_NET_10NET = 0x00050000 +WNNC_NET_LOCUS = 0x00060000 +WNNC_NET_SUN_PC_NFS = 0x00070000 +WNNC_NET_LANSTEP = 0x00080000 +WNNC_NET_9TILES = 0x00090000 +WNNC_NET_LANTASTIC = 0x000A0000 +WNNC_NET_AS400 = 0x000B0000 +WNNC_NET_FTP_NFS = 0x000C0000 +WNNC_NET_PATHWORKS = 0x000D0000 +WNNC_NET_LIFENET = 0x000E0000 +WNNC_NET_POWERLAN = 0x000F0000 +WNNC_NET_BWNFS = 0x00100000 +WNNC_NET_COGENT = 0x00110000 +WNNC_NET_FARALLON = 0x00120000 +WNNC_NET_APPLETALK = 0x00130000 +WNNC_NET_INTERGRAPH = 0x00140000 +WNNC_NET_SYMFONET = 0x00150000 +WNNC_NET_CLEARCASE = 0x00160000 +WNNC_NET_FRONTIER = 0x00170000 +WNNC_NET_BMC = 0x00180000 +WNNC_NET_DCE = 0x00190000 +WNNC_NET_DECORB = 0x00200000 +WNNC_NET_PROTSTOR = 0x00210000 +WNNC_NET_FJ_REDIR = 0x00220000 +WNNC_NET_DISTINCT = 0x00230000 +WNNC_NET_TWINS = 0x00240000 +WNNC_NET_RDR2SAMPLE = 0x00250000 +RESOURCE_CONNECTED = 0x00000001 +RESOURCE_GLOBALNET = 0x00000002 +RESOURCE_REMEMBERED = 0x00000003 +RESOURCE_RECENT = 0x00000004 +RESOURCE_CONTEXT = 0x00000005 +RESOURCETYPE_ANY = 0x00000000 +RESOURCETYPE_DISK = 0x00000001 +RESOURCETYPE_PRINT = 0x00000002 +RESOURCETYPE_RESERVED = 0x00000008 +RESOURCETYPE_UNKNOWN = 0xFFFFFFFF +RESOURCEUSAGE_CONNECTABLE = 0x00000001 +RESOURCEUSAGE_CONTAINER = 0x00000002 +RESOURCEUSAGE_NOLOCALDEVICE = 0x00000004 +RESOURCEUSAGE_SIBLING = 0x00000008 +RESOURCEUSAGE_ATTACHED = 0x00000010 +RESOURCEUSAGE_ALL = (RESOURCEUSAGE_CONNECTABLE | RESOURCEUSAGE_CONTAINER | RESOURCEUSAGE_ATTACHED) +RESOURCEUSAGE_RESERVED = 0x80000000 +RESOURCEDISPLAYTYPE_GENERIC = 0x00000000 +RESOURCEDISPLAYTYPE_DOMAIN = 0x00000001 +RESOURCEDISPLAYTYPE_SERVER = 0x00000002 +RESOURCEDISPLAYTYPE_SHARE = 0x00000003 +RESOURCEDISPLAYTYPE_FILE = 0x00000004 +RESOURCEDISPLAYTYPE_GROUP = 0x00000005 +RESOURCEDISPLAYTYPE_NETWORK = 0x00000006 +RESOURCEDISPLAYTYPE_ROOT = 0x00000007 +RESOURCEDISPLAYTYPE_SHAREADMIN = 0x00000008 +RESOURCEDISPLAYTYPE_DIRECTORY = 0x00000009 +RESOURCEDISPLAYTYPE_TREE = 0x0000000A +RESOURCEDISPLAYTYPE_NDSCONTAINER = 0x0000000B +NETPROPERTY_PERSISTENT = 1 +CONNECT_UPDATE_PROFILE = 0x00000001 +CONNECT_UPDATE_RECENT = 0x00000002 +CONNECT_TEMPORARY = 0x00000004 +CONNECT_INTERACTIVE = 0x00000008 +CONNECT_PROMPT = 0x00000010 +CONNECT_NEED_DRIVE = 0x00000020 +CONNECT_REFCOUNT = 0x00000040 +CONNECT_REDIRECT = 0x00000080 +CONNECT_LOCALDRIVE = 0x00000100 +CONNECT_CURRENT_MEDIA = 0x00000200 +CONNECT_DEFERRED = 0x00000400 +CONNECT_RESERVED = 0xFF000000 +CONNDLG_RO_PATH = 0x00000001 +CONNDLG_CONN_POINT = 0x00000002 +CONNDLG_USE_MRU = 0x00000004 +CONNDLG_HIDE_BOX = 0x00000008 +CONNDLG_PERSIST = 0x00000010 +CONNDLG_NOT_PERSIST = 0x00000020 +DISC_UPDATE_PROFILE = 0x00000001 +DISC_NO_FORCE = 0x00000040 +UNIVERSAL_NAME_INFO_LEVEL = 0x00000001 +REMOTE_NAME_INFO_LEVEL = 0x00000002 +WNFMT_MULTILINE = 0x01 +WNFMT_ABBREVIATED = 0x02 +WNFMT_INENUM = 0x10 +WNFMT_CONNECTION = 0x20 +NETINFO_DLL16 = 0x00000001 +NETINFO_DISKRED = 0x00000004 +NETINFO_PRINTERRED = 0x00000008 +RP_LOGON = 0x01 +RP_INIFILE = 0x02 +PP_DISPLAYERRORS = 0x01 +WNCON_FORNETCARD = 0x00000001 +WNCON_NOTROUTED = 0x00000002 +WNCON_SLOWLINK = 0x00000004 +WNCON_DYNAMIC = 0x00000008 diff --git a/venv/Lib/site-packages/win32/Demos/winprocess.py b/venv/Lib/site-packages/win32/Demos/winprocess.py new file mode 100644 index 00000000..fe9d6ab0 --- /dev/null +++ b/venv/Lib/site-packages/win32/Demos/winprocess.py @@ -0,0 +1,206 @@ +""" +Windows Process Control + +winprocess.run launches a child process and returns the exit code. +Optionally, it can: + redirect stdin, stdout & stderr to files + run the command as another user + limit the process's running time + control the process window (location, size, window state, desktop) +Works on Windows NT, 2000 & XP. Requires Mark Hammond's win32 +extensions. + +This code is free for any purpose, with no warranty of any kind. +-- John B. Dell'Aquila +""" + +import win32api, win32process, win32security +import win32event, win32con, msvcrt, win32gui +import os + + +def logonUser(loginString): + """ + Login as specified user and return handle. + loginString: 'Domain\nUser\nPassword'; for local + login use . or empty string as domain + e.g. '.\nadministrator\nsecret_password' + """ + domain, user, passwd = loginString.split('\n') + return win32security.LogonUser( + user, + domain, + passwd, + win32con.LOGON32_LOGON_INTERACTIVE, + win32con.LOGON32_PROVIDER_DEFAULT + ) + + +class Process: + """ + A Windows process. + """ + + def __init__(self, cmd, login=None, + hStdin=None, hStdout=None, hStderr=None, + show=1, xy=None, xySize=None, + desktop=None): + """ + Create a Windows process. + cmd: command to run + login: run as user 'Domain\nUser\nPassword' + hStdin, hStdout, hStderr: + handles for process I/O; default is caller's stdin, + stdout & stderr + show: wShowWindow (0=SW_HIDE, 1=SW_NORMAL, ...) + xy: window offset (x, y) of upper left corner in pixels + xySize: window size (width, height) in pixels + desktop: lpDesktop - name of desktop e.g. 'winsta0\\default' + None = inherit current desktop + '' = create new desktop if necessary + + User calling login requires additional privileges: + Act as part of the operating system [not needed on Windows XP] + Increase quotas + Replace a process level token + Login string must EITHER be an administrator's account + (ordinary user can't access current desktop - see Microsoft + Q165194) OR use desktop='' to run another desktop invisibly + (may be very slow to startup & finalize). + """ + si = win32process.STARTUPINFO() + si.dwFlags = (win32con.STARTF_USESTDHANDLES ^ + win32con.STARTF_USESHOWWINDOW) + if hStdin is None: + si.hStdInput = win32api.GetStdHandle(win32api.STD_INPUT_HANDLE) + else: + si.hStdInput = hStdin + if hStdout is None: + si.hStdOutput = win32api.GetStdHandle(win32api.STD_OUTPUT_HANDLE) + else: + si.hStdOutput = hStdout + if hStderr is None: + si.hStdError = win32api.GetStdHandle(win32api.STD_ERROR_HANDLE) + else: + si.hStdError = hStderr + si.wShowWindow = show + if xy is not None: + si.dwX, si.dwY = xy + si.dwFlags ^= win32con.STARTF_USEPOSITION + if xySize is not None: + si.dwXSize, si.dwYSize = xySize + si.dwFlags ^= win32con.STARTF_USESIZE + if desktop is not None: + si.lpDesktop = desktop + procArgs = (None, # appName + cmd, # commandLine + None, # processAttributes + None, # threadAttributes + 1, # bInheritHandles + win32process.CREATE_NEW_CONSOLE, # dwCreationFlags + None, # newEnvironment + None, # currentDirectory + si) # startupinfo + if login is not None: + hUser = logonUser(login) + win32security.ImpersonateLoggedOnUser(hUser) + procHandles = win32process.CreateProcessAsUser(hUser, *procArgs) + win32security.RevertToSelf() + else: + procHandles = win32process.CreateProcess(*procArgs) + self.hProcess, self.hThread, self.PId, self.TId = procHandles + + def wait(self, mSec=None): + """ + Wait for process to finish or for specified number of + milliseconds to elapse. + """ + if mSec is None: + mSec = win32event.INFINITE + return win32event.WaitForSingleObject(self.hProcess, mSec) + + def kill(self, gracePeriod=5000): + """ + Kill process. Try for an orderly shutdown via WM_CLOSE. If + still running after gracePeriod (5 sec. default), terminate. + """ + win32gui.EnumWindows(self.__close__, 0) + if self.wait(gracePeriod) != win32event.WAIT_OBJECT_0: + win32process.TerminateProcess(self.hProcess, 0) + win32api.Sleep(100) # wait for resources to be released + + def __close__(self, hwnd, dummy): + """ + EnumWindows callback - sends WM_CLOSE to any window + owned by this process. + """ + TId, PId = win32process.GetWindowThreadProcessId(hwnd) + if PId == self.PId: + win32gui.PostMessage(hwnd, win32con.WM_CLOSE, 0, 0) + + def exitCode(self): + """ + Return process exit code. + """ + return win32process.GetExitCodeProcess(self.hProcess) + + +def run(cmd, mSec=None, stdin=None, stdout=None, stderr=None, **kw): + """ + Run cmd as a child process and return exit code. + mSec: terminate cmd after specified number of milliseconds + stdin, stdout, stderr: + file objects for child I/O (use hStdin etc. to attach + handles instead of files); default is caller's stdin, + stdout & stderr; + kw: see Process.__init__ for more keyword options + """ + if stdin is not None: + kw['hStdin'] = msvcrt.get_osfhandle(stdin.fileno()) + if stdout is not None: + kw['hStdout'] = msvcrt.get_osfhandle(stdout.fileno()) + if stderr is not None: + kw['hStderr'] = msvcrt.get_osfhandle(stderr.fileno()) + child = Process(cmd, **kw) + if child.wait(mSec) != win32event.WAIT_OBJECT_0: + child.kill() + raise WindowsError('process timeout exceeded') + return child.exitCode() + + +if __name__ == '__main__': + + # Pipe commands to a shell and display the output in notepad + print('Testing winprocess.py...') + + import tempfile + + timeoutSeconds = 15 + cmdString = """\ +REM Test of winprocess.py piping commands to a shell.\r +REM This 'notepad' process will terminate in %d seconds.\r +vol\r +net user\r +_this_is_a_test_of_stderr_\r +""" % timeoutSeconds + + cmd_name = tempfile.mktemp() + out_name = cmd_name + '.txt' + try: + cmd = open(cmd_name, "w+b") + out = open(out_name, "w+b") + cmd.write(cmdString.encode('mbcs')) + cmd.seek(0) + print('CMD.EXE exit code:', run('cmd.exe', show=0, stdin=cmd, + stdout=out, stderr=out)) + cmd.close() + print('NOTEPAD exit code:', run('notepad.exe %s' % out.name, + show=win32con.SW_MAXIMIZE, + mSec=timeoutSeconds*1000)) + out.close() + finally: + for n in (cmd_name, out_name): + try: + os.unlink(cmd_name) + except os.error: + pass diff --git a/venv/Lib/site-packages/win32/_win32sysloader.pyd b/venv/Lib/site-packages/win32/_win32sysloader.pyd new file mode 100644 index 00000000..cdb69bf7 Binary files /dev/null and b/venv/Lib/site-packages/win32/_win32sysloader.pyd differ diff --git a/venv/Lib/site-packages/win32/_winxptheme.pyd b/venv/Lib/site-packages/win32/_winxptheme.pyd new file mode 100644 index 00000000..ca00eeed Binary files /dev/null and b/venv/Lib/site-packages/win32/_winxptheme.pyd differ diff --git a/venv/Lib/site-packages/win32/include/PyWinTypes.h b/venv/Lib/site-packages/win32/include/PyWinTypes.h new file mode 100644 index 00000000..45c2b287 --- /dev/null +++ b/venv/Lib/site-packages/win32/include/PyWinTypes.h @@ -0,0 +1,704 @@ + +#ifndef __PYWINTYPES_H__ +#define __PYWINTYPES_H__ + +// If building under a GCC, tweak what we need. +#if defined(__GNUC__) && defined(_POSIX_C_SOURCE) +// python.h complains if _POSIX_C_SOURCE is already defined +#undef _POSIX_C_SOURCE +#endif + +// windows rpc.h defines "small" as "char" which breaks Python's accu.h, +// so we undefine it before including python. +#ifdef small +#undef small +#endif + +#include "Python.h" +// many many files need python's structmember.h, and its possible people +// #included windows.h before including us... +#ifdef WRITE_RESTRICTED +#undef WRITE_RESTRICTED +#endif +#include "structmember.h" +// and python's structmember.h #defines this, conflicting with windows.h +#ifdef WRITE_RESTRICTED +#undef WRITE_RESTRICTED +#endif +#include "windows.h" +#undef WRITE_RESTRICTED // stop anyone using the wrong one accidently... + +// Helpers for our modules. +// Some macros to help the pywin32 modules co-exist in py2x and py3k. +// Creates and initializes local variables called 'module' and 'dict'. + +// Maybe these should all be removed - they existed to help in the py2->3 +// transition. +// On one hand: the code would be cleaner if they were all just re-inlined? +// On the other: high confidence everything uses the exact same patterns? +// (Regardless, *some*, eg, PYWIN_MODULE_INIT_RETURN_* should be re-inlined!) + +// Use to define the function itself (ie, its name, linkage, params) +#define PYWIN_MODULE_INIT_FUNC(module_name) extern "C" __declspec(dllexport) PyObject *PyInit_##module_name(void) + +// If the module needs to early-exit on an error condition. +#define PYWIN_MODULE_INIT_RETURN_ERROR return NULL; + +// When the module has successfully initialized. +#define PYWIN_MODULE_INIT_RETURN_SUCCESS return module; + +// To setup the module object itself and the module's dictionary. +#define PYWIN_MODULE_INIT_PREPARE(module_name, functions, docstring) \ + PyObject *dict, *module; \ + static PyModuleDef module_name##_def = {PyModuleDef_HEAD_INIT, #module_name, docstring, -1, functions}; \ + if (PyWinGlobals_Ensure() == -1) \ + return NULL; \ + if (!(module = PyModule_Create(&module_name##_def))) \ + return NULL; \ + if (!(dict = PyModule_GetDict(module))) \ + return NULL; + +// Helpers for our types. +// Macro to handle PyObject layout changes in Py3k +#define PYWIN_OBJECT_HEAD PyVarObject_HEAD_INIT(NULL, 0) + +/* Attribute names are passed as Unicode in Py3k, so use a macro to + switch between string and unicode conversion. This function is not + documented, but is used extensively in the Python codebase itself, + so it's reasonable to assume it won't disappear anytime soon. +*/ +#define PYWIN_ATTR_CONVERT (char *)_PyUnicode_AsString + +/* Some API functions changed/removed in python 3.0 + Definitions for the string functions are in stringobject.h, + but comments indicate that this header is likely to go away in 3.1. +*/ +#define PyString_Check PyBytes_Check +#define PyString_Size PyBytes_Size +#define PyString_AsString PyBytes_AsString +#define PyString_AsStringAndSize PyBytes_AsStringAndSize +#define PyString_FromString PyBytes_FromString +#define PyString_FromStringAndSize PyBytes_FromStringAndSize +#define _PyString_Resize _PyBytes_Resize +#define PyString_AS_STRING PyBytes_AS_STRING +#define PyString_GET_SIZE PyBytes_GET_SIZE +#define PyString_Concat PyBytes_Concat +#define PyInt_Check PyLong_Check +#define PyInt_FromLong PyLong_FromLong +#define PyInt_AsLong PyLong_AsLong +#define PyInt_AS_LONG PyLong_AS_LONG +#define PyInt_FromSsize_t PyLong_FromSsize_t +#define PyInt_AsSsize_t PyLong_AsSsize_t +#define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask +#define PyNumber_Int PyNumber_Long + +typedef Py_ssize_t Py_hash_t; + +// This only enables runtime checks in debug builds - so we use +// our own so we can enable it always should we desire... +#define PyWin_SAFE_DOWNCAST Py_SAFE_DOWNCAST + +// Lars: for WAVEFORMATEX +#include "mmsystem.h" + +#ifdef BUILD_PYWINTYPES +/* We are building pywintypesxx.dll */ +#define PYWINTYPES_EXPORT __declspec(dllexport) +#else +/* This module uses pywintypesxx.dll */ +#define PYWINTYPES_EXPORT __declspec(dllimport) +#if defined(_MSC_VER) +#if defined(DEBUG) || defined(_DEBUG) +#pragma comment(lib, "pywintypes_d.lib") +#else +#pragma comment(lib, "pywintypes.lib") +#endif // DEBUG/_DEBUG +#endif // _MSC_VER +#endif // BUILD_PYWINTYPES + +// Py3k uses memoryview object in place of buffer, and we don't yet. +extern PYWINTYPES_EXPORT PyObject *PyBuffer_New(Py_ssize_t size); +extern PYWINTYPES_EXPORT PyObject *PyBuffer_FromMemory(void *buf, Py_ssize_t size); + +// Formats a python traceback into a character string - result must be free()ed +PYWINTYPES_EXPORT char *GetPythonTraceback(PyObject *exc_type, PyObject *exc_value, PyObject *exc_tb); + +#include +/* +** Error/Exception handling +*/ +extern PYWINTYPES_EXPORT PyObject *PyWinExc_ApiError; +// Register a Windows DLL that contains the messages in the specified range. +extern PYWINTYPES_EXPORT BOOL PyWin_RegisterErrorMessageModule(DWORD first, DWORD last, HINSTANCE hmod); +// Get the previously registered hmodule for an error code. +extern PYWINTYPES_EXPORT HINSTANCE PyWin_GetErrorMessageModule(DWORD err); + +/* A global function that sets an API style error (ie, (code, fn, errTest)) */ +PYWINTYPES_EXPORT PyObject *PyWin_SetAPIError(char *fnName, long err = 0); + +/* Basic COM Exception handling. The main COM exception object + is actually defined here. However, the most useful functions + for raising the exception are still in the COM package. Therefore, + you can use the fn below to raise a basic COM exception - no fancy error + messages available, just the HRESULT. It will, however, _be_ a COM + exception, and therefore trappable like any other COM exception +*/ +extern PYWINTYPES_EXPORT PyObject *PyWinExc_COMError; +PYWINTYPES_EXPORT PyObject *PyWin_SetBasicCOMError(HRESULT hr); + +// Given a PyObject (string, Unicode, etc) create a "BSTR" with the value +PYWINTYPES_EXPORT BOOL PyWinObject_AsBstr(PyObject *stringObject, BSTR *pResult, BOOL bNoneOK = FALSE, + DWORD *pResultLen = NULL); +// And free it when finished. +PYWINTYPES_EXPORT void PyWinObject_FreeBstr(BSTR pResult); + +PYWINTYPES_EXPORT PyObject *PyWinObject_FromBstr(const BSTR bstr, BOOL takeOwnership = FALSE); + +// Given a string or Unicode object, get WCHAR characters. +PYWINTYPES_EXPORT BOOL PyWinObject_AsWCHAR(PyObject *stringObject, WCHAR **pResult, BOOL bNoneOK = FALSE, + DWORD *pResultLen = NULL); +// And free it when finished. +PYWINTYPES_EXPORT void PyWinObject_FreeWCHAR(WCHAR *pResult); + +inline BOOL PyWinObject_AsWCHAR(PyObject *stringObject, unsigned short **pResult, BOOL bNoneOK = FALSE, + DWORD *pResultLen = NULL) +{ + return PyWinObject_AsWCHAR(stringObject, (WCHAR **)pResult, bNoneOK, pResultLen); +} +inline void PyWinObject_FreeWCHAR(unsigned short *pResult) { PyWinObject_FreeWCHAR((WCHAR *)pResult); } + +// Given a PyObject (string, Unicode, etc) create a "char *" with the value +// if pResultLen != NULL, it will be set to the result size NOT INCLUDING +// TERMINATOR (to be in line with SysStringLen, PyString_*, etc) +PYWINTYPES_EXPORT BOOL PyWinObject_AsString(PyObject *stringObject, char **pResult, BOOL bNoneOK = FALSE, + DWORD *pResultLen = NULL); +// And free it when finished. +PYWINTYPES_EXPORT void PyWinObject_FreeString(char *pResult); +PYWINTYPES_EXPORT void PyWinObject_FreeString(WCHAR *pResult); + +// Automatically freed WCHAR that can be used anywhere WCHAR * is required +class TmpWCHAR { + public: + WCHAR *tmp; + TmpWCHAR() { tmp = NULL; } + TmpWCHAR(WCHAR *t) { tmp = t; } + WCHAR *operator=(WCHAR *t) + { + PyWinObject_FreeWCHAR(tmp); + tmp = t; + return t; + } + WCHAR **operator&() { return &tmp; } + boolean operator==(WCHAR *t) { return tmp == t; } + operator WCHAR *() { return tmp; } + ~TmpWCHAR() { PyWinObject_FreeWCHAR(tmp); } +}; + +// For 64-bit python compatibility, convert sequence to tuple and check length fits in a DWORD +PYWINTYPES_EXPORT PyObject *PyWinSequence_Tuple(PyObject *obseq, DWORD *len); + +// replacement for PyWinObject_AsReadBuffer and PyWinObject_AsWriteBuffer +class PYWINTYPES_EXPORT PyWinBufferView +{ +public: + PyWinBufferView(); + PyWinBufferView(PyObject *ob, bool bWrite = false, bool bNoneOk = false); + ~PyWinBufferView(); + bool init(PyObject *ob, bool bWrite = false, bool bNoneOk = false); + void release(); + bool ok(); + void* ptr(); + DWORD len(); +private: + Py_buffer m_view; + + // don't copy objects and don't use C++ >= 11 -> not implemented private + // copy ctor and assignment operator + PyWinBufferView(const PyWinBufferView& src); + PyWinBufferView& operator=(PyWinBufferView const &); +}; + +/* ANSI/Unicode Support */ +/* If UNICODE defined, will be a BSTR - otherwise a char * + Either way - PyWinObject_FreeTCHAR() must be called +*/ + +// Helpers with py3k in mind: the result object is always a "core string" +// object; ie, a string in py2k and unicode in py3k. Mainly to be used for +// objects that *must* be that type - tp_str slots, __dict__ items, etc. If +// Python doesn't *insist* the result be this type, consider using a function +// that always returns a unicode object (ie, most of the "PyWinObject_From*CHAR" +// functions) +PYWINTYPES_EXPORT PyObject *PyWinCoreString_FromString(const char *str, Py_ssize_t len = (Py_ssize_t)-1); +PYWINTYPES_EXPORT PyObject *PyWinCoreString_FromString(const WCHAR *str, Py_ssize_t len = (Py_ssize_t)-1); + +#define PyWinObject_FromWCHAR PyWinObject_FromOLECHAR + +// Converts a series of consecutive null terminated strings into a list +PYWINTYPES_EXPORT PyObject *PyWinObject_FromMultipleString(WCHAR *multistring); +PYWINTYPES_EXPORT PyObject *PyWinObject_FromMultipleString(char *multistring); +// Converts a sequence of str/unicode objects into a series of consecutive null-terminated +// wide character strings with extra terminating null +PYWINTYPES_EXPORT BOOL PyWinObject_AsMultipleString(PyObject *ob, WCHAR **pmultistring, BOOL bNoneOK = TRUE, + DWORD *chars_returned = NULL); +PYWINTYPES_EXPORT void PyWinObject_FreeMultipleString(WCHAR *pmultistring); + +// Converts a sequence of str/unicode objects into a series of consecutive character strings +// terminated by double null +PYWINTYPES_EXPORT BOOL PyWinObject_AsMultipleString(PyObject *ob, char **pmultistring, BOOL bNoneOK = TRUE, + DWORD *chars_returned = NULL); +PYWINTYPES_EXPORT void PyWinObject_FreeMultipleString(char *pmultistring); + +// Convert a sequence of strings to an array of WCHAR pointers +PYWINTYPES_EXPORT void PyWinObject_FreeWCHARArray(LPWSTR *wchars, DWORD str_cnt); +PYWINTYPES_EXPORT BOOL PyWinObject_AsWCHARArray(PyObject *str_seq, LPWSTR **wchars, DWORD *str_cnt, + BOOL bNoneOK = FALSE); + +// Convert a sequence of string or unicode objects to an array of char * +PYWINTYPES_EXPORT void PyWinObject_FreeCharArray(char **pchars, DWORD str_cnt); +PYWINTYPES_EXPORT BOOL PyWinObject_AsCharArray(PyObject *str_seq, char ***pchars, DWORD *str_cnt, BOOL bNoneOK = FALSE); + +PYWINTYPES_EXPORT PyObject *PyWinObject_FromOLECHAR(const OLECHAR *str); +PYWINTYPES_EXPORT PyObject *PyWinObject_FromOLECHAR(const OLECHAR *str, int numChars); + +// String support for buffers allocated via a function of your choice. +PYWINTYPES_EXPORT BOOL PyWinObject_AsPfnAllocatedWCHAR(PyObject *stringObject, void *(*pfnAllocator)(ULONG), + WCHAR **ppResult, BOOL bNoneOK = FALSE, + DWORD *pResultLen = NULL); + +#ifdef UNICODE +// XXX - "AsTCHAR" functions should all die - the type of the Python object +// being returned should not depend on UNICODE or not. +#define PyWinObject_AsTCHAR PyWinObject_AsWCHAR +#define PyWinObject_FreeTCHAR PyWinObject_FreeWCHAR +#define PyWinObject_FromTCHAR PyWinObject_FromOLECHAR +#else /* not UNICODE */ +#define PyWinObject_AsTCHAR PyWinObject_AsString +#define PyWinObject_FreeTCHAR PyWinObject_FreeString + +// PyWinObject_FromTCHAR in a non-unicode build still depends on py3k or not: +// py2x a string object is returned (no conversions). py3x a unicode object +// is returned (ie, the string is decoded) +PYWINTYPES_EXPORT PyObject *PyWinObject_FromTCHAR(const char *str, Py_ssize_t len = (Py_ssize_t)-1); + +#endif // UNICODE + +// String support for buffers allocated via CoTaskMemAlloc and CoTaskMemFree +PYWINTYPES_EXPORT BOOL PyWinObject_AsTaskAllocatedWCHAR(PyObject *stringObject, WCHAR **ppResult, BOOL bNoneOK = FALSE, + DWORD *pResultLen = NULL); +PYWINTYPES_EXPORT void PyWinObject_FreeTaskAllocatedWCHAR(WCHAR *str); + +PYWINTYPES_EXPORT void PyWinObject_FreeString(char *str); +PYWINTYPES_EXPORT void PyWinObject_FreeString(WCHAR *str); + +// Copy null terminated string with same allocator as PyWinObject_AsWCHAR, etc +PYWINTYPES_EXPORT WCHAR *PyWin_CopyString(const WCHAR *input); +PYWINTYPES_EXPORT char *PyWin_CopyString(const char *input); + +// Pointers. +// Substitute for Python's inconsistent PyLong_AsVoidPtr +PYWINTYPES_EXPORT BOOL PyWinLong_AsVoidPtr(PyObject *ob, void **pptr); +PYWINTYPES_EXPORT PyObject *PyWinLong_FromVoidPtr(const void *ptr); + +/* +** LARGE_INTEGER objects +*/ +// AsLARGE_INTEGER takes either int or long +PYWINTYPES_EXPORT BOOL PyWinObject_AsLARGE_INTEGER(PyObject *ob, LARGE_INTEGER *pResult); +PYWINTYPES_EXPORT BOOL PyWinObject_AsULARGE_INTEGER(PyObject *ob, ULARGE_INTEGER *pResult); +PYWINTYPES_EXPORT PyObject *PyWinObject_FromLARGE_INTEGER(const LARGE_INTEGER &val); +PYWINTYPES_EXPORT PyObject *PyWinObject_FromULARGE_INTEGER(const ULARGE_INTEGER &val); +// Helpers that take a Py_LONG_LONG, but (a) have pywin32 consistent signatures +// and (b) handle int *and* long (where Python only starts doing that in the +// PyLong_* APIs post 2.4) +// We also happen to know a LARGE_INTEGER is an __int64, so do it the easy way +#define PyWinObject_AsPY_LONG_LONG(ob, pResult) PyWinObject_AsLARGE_INTEGER((ob), (LARGE_INTEGER *)(pResult)) +#define PyWinObject_AsUPY_LONG_LONG(ob, pResult) PyWinObject_AsULARGE_INTEGER((ob), (ULARGE_INTEGER *)(pResult)) +#define PyWinObject_FromPY_LONG_LONG(val) PyWinObject_FromLARGE_INTEGER((LARGE_INTEGER)val) +#define PyWinObject_FromUPY_LONG_LONG(val) PyWinObject_FromULARGE_INTEGER((ULARGE_INTEGER)val) + +// A DWORD_PTR and ULONG_PTR appear to mean "integer long enough to hold a pointer" +// It is *not* actually a pointer (but is the same size as a pointer) +inline PyObject *PyWinObject_FromULONG_PTR(ULONG_PTR v) { return PyWinLong_FromVoidPtr((void *)v); } +inline BOOL PyWinLong_AsULONG_PTR(PyObject *ob, ULONG_PTR *r) { return PyWinLong_AsVoidPtr(ob, (void **)r); } + +inline PyObject *PyWinObject_FromDWORD_PTR(DWORD_PTR v) { return PyLong_FromVoidPtr((void *)v); } +inline BOOL PyWinLong_AsDWORD_PTR(PyObject *ob, DWORD_PTR *r) { return PyWinLong_AsVoidPtr(ob, (void **)r); } + +/* +** OVERLAPPED Object and API +*/ +class PyOVERLAPPED; // forward declare +extern PYWINTYPES_EXPORT PyTypeObject PyOVERLAPPEDType; // the Type for PyOVERLAPPED +#define PyOVERLAPPED_Check(ob) ((ob)->ob_type == &PyOVERLAPPEDType) +PYWINTYPES_EXPORT BOOL PyWinObject_AsOVERLAPPED(PyObject *ob, OVERLAPPED **ppOverlapped, BOOL bNoneOK = TRUE); +PYWINTYPES_EXPORT BOOL PyWinObject_AsPyOVERLAPPED(PyObject *ob, PyOVERLAPPED **ppOverlapped, BOOL bNoneOK = TRUE); +PYWINTYPES_EXPORT PyObject *PyWinObject_FromOVERLAPPED(const OVERLAPPED *pOverlapped); + +// A global function that can work as a module method for making an OVERLAPPED object. +PYWINTYPES_EXPORT PyObject *PyWinMethod_NewOVERLAPPED(PyObject *self, PyObject *args); + +#ifndef NO_PYWINTYPES_IID +/* +** IID/GUID support +*/ + +extern PYWINTYPES_EXPORT PyTypeObject PyIIDType; // the Type for PyIID +#define PyIID_Check(ob) ((ob)->ob_type == &PyIIDType) + +// Given an object repring a CLSID (either PyIID or string), fill the CLSID. +PYWINTYPES_EXPORT BOOL PyWinObject_AsIID(PyObject *obCLSID, CLSID *clsid); + +// return a native PyIID object representing an IID +PYWINTYPES_EXPORT PyObject *PyWinObject_FromIID(const IID &riid); + +// return a string/Unicode object representing an IID +PYWINTYPES_EXPORT PyObject *PyWinCoreString_FromIID(const IID &riid); + +// A global function that can work as a module method for making an IID object. +PYWINTYPES_EXPORT PyObject *PyWinMethod_NewIID(PyObject *self, PyObject *args); +#endif /*NO_PYWINTYPES_IID */ + +/* +** TIME support +** +** We use a subclass of the builtin datetime. +*/ + +PYWINTYPES_EXPORT PyObject *PyWinObject_FromSYSTEMTIME(const SYSTEMTIME &t); +PYWINTYPES_EXPORT PyObject *PyWinObject_FromFILETIME(const FILETIME &t); + +// Converts a TimeStamp, which is in 100 nanosecond units like a FILETIME +// TimeStamp is actually defined as a LARGE_INTEGER, so this function will also +// accept Windows security "TimeStamp" objects directly - however, we use a +// LARGE_INTEGER prototype to avoid pulling in the windows security headers. +PYWINTYPES_EXPORT PyObject *PyWinObject_FromTimeStamp(const LARGE_INTEGER &t); +PYWINTYPES_EXPORT PyObject *PyWinTimeObject_Fromtime_t(time_t t); +PYWINTYPES_EXPORT PyObject *PyWinObject_FromDATE(DATE t); + +PYWINTYPES_EXPORT BOOL PyWinObject_AsDATE(PyObject *ob, DATE *pDate); +PYWINTYPES_EXPORT BOOL PyWinObject_AsFILETIME(PyObject *ob, FILETIME *pDate); +PYWINTYPES_EXPORT BOOL PyWinObject_AsSYSTEMTIME(PyObject *ob, SYSTEMTIME *pDate); + +// A global function that can work as a module method for making a time object. +PYWINTYPES_EXPORT PyObject *PyWinMethod_NewTime(PyObject *self, PyObject *args); +PYWINTYPES_EXPORT PyObject *PyWinMethod_NewTimeStamp(PyObject *self, PyObject *args); + +PYWINTYPES_EXPORT BOOL PyWinTime_Check(PyObject *ob); + +// functions to return WIN32_FIND_DATA tuples, used in shell, win32api, and win32file +PYWINTYPES_EXPORT PyObject *PyObject_FromWIN32_FIND_DATAA(WIN32_FIND_DATAA *pData); +PYWINTYPES_EXPORT PyObject *PyObject_FromWIN32_FIND_DATAW(WIN32_FIND_DATAW *pData); +#ifdef UNICODE +#define PyObject_FromWIN32_FIND_DATA PyObject_FromWIN32_FIND_DATAW +#else +#define PyObject_FromWIN32_FIND_DATA PyObject_FromWIN32_FIND_DATAA +#endif + +// POINT tuple, used in win32api_display.cpp and win32gui.i +PYWINTYPES_EXPORT BOOL PyWinObject_AsPOINT(PyObject *obpoint, LPPOINT ppoint); + +// IO_COUNTERS dict, used in win32process and win32job +PYWINTYPES_EXPORT PyObject *PyWinObject_FromIO_COUNTERS(PIO_COUNTERS pioc); + +// Make an array of DWORD's from a sequence of Python ints +PYWINTYPES_EXPORT BOOL PyWinObject_AsDWORDArray(PyObject *obdwords, DWORD **pdwords, DWORD *item_cnt, + BOOL bNoneOk = TRUE); + +// Conversion for resource id/name and class atom +PYWINTYPES_EXPORT BOOL PyWinObject_AsResourceIdA(PyObject *ob, char **presource_id, BOOL bNoneOK = FALSE); +PYWINTYPES_EXPORT BOOL PyWinObject_AsResourceIdW(PyObject *ob, WCHAR **presource_id, BOOL bNoneOK = FALSE); +PYWINTYPES_EXPORT void PyWinObject_FreeResourceId(char *resource_id); +PYWINTYPES_EXPORT void PyWinObject_FreeResourceId(WCHAR *resource_id); +#ifdef UNICODE +#define PyWinObject_AsResourceId PyWinObject_AsResourceIdW +#else +#define PyWinObject_AsResourceId PyWinObject_AsResourceIdA +#endif + +// WPARAM and LPARAM conversion +PYWINTYPES_EXPORT BOOL PyWinObject_AsPARAM(PyObject *ob, WPARAM *pparam); +inline PyObject *PyWinObject_FromPARAM(WPARAM param) { return PyWinObject_FromULONG_PTR(param); } +inline BOOL PyWinObject_AsPARAM(PyObject *ob, LPARAM *pparam) { return PyWinObject_AsPARAM(ob, (WPARAM *)pparam); } +inline PyObject *PyWinObject_FromPARAM(LPARAM param) { return PyWinObject_FromULONG_PTR(param); } + +// RECT conversions +// @object PyRECT|Tuple of 4 ints defining a rectangle: (left, top, right, bottom) +PYWINTYPES_EXPORT BOOL PyWinObject_AsRECT(PyObject *obrect, LPRECT prect); +PYWINTYPES_EXPORT PyObject *PyWinObject_FromRECT(LPRECT prect); + +/* +** SECURITY_ATTRIBUTES support +*/ +extern PYWINTYPES_EXPORT PyTypeObject PySECURITY_ATTRIBUTESType; +#define PySECURITY_ATTRIBUTES_Check(ob) ((ob)->ob_type == &PySECURITY_ATTRIBUTESType) +extern PYWINTYPES_EXPORT PyTypeObject PyDEVMODEAType; +extern PYWINTYPES_EXPORT PyTypeObject PyDEVMODEWType; + +PYWINTYPES_EXPORT PyObject *PyWinMethod_NewSECURITY_ATTRIBUTES(PyObject *self, PyObject *args); +PYWINTYPES_EXPORT BOOL PyWinObject_AsSECURITY_ATTRIBUTES(PyObject *ob, SECURITY_ATTRIBUTES **ppSECURITY_ATTRIBUTES, + BOOL bNoneOK = TRUE); +PYWINTYPES_EXPORT PyObject *PyWinObject_FromSECURITY_ATTRIBUTES(const SECURITY_ATTRIBUTES &sa); +PYWINTYPES_EXPORT BOOL PyWinObject_AsDEVMODE(PyObject *ob, PDEVMODEA *ppDEVMODE, BOOL bNoneOK = TRUE); +PYWINTYPES_EXPORT BOOL PyWinObject_AsDEVMODE(PyObject *ob, PDEVMODEW *ppDEVMODE, BOOL bNoneOK); +PYWINTYPES_EXPORT PyObject *PyWinObject_FromDEVMODE(PDEVMODEA); +PYWINTYPES_EXPORT PyObject *PyWinObject_FromDEVMODE(PDEVMODEW); + +/* +** WAVEFORMATEX support +*/ + +PYWINTYPES_EXPORT PyObject *PyWinMethod_NewWAVEFORMATEX(PyObject *self, PyObject *args); +PYWINTYPES_EXPORT PyObject *PyWinObject_FromWAVEFROMATEX(const WAVEFORMATEX &wfx); +PYWINTYPES_EXPORT BOOL PyWinObject_AsWAVEFORMATEX(PyObject *ob, WAVEFORMATEX **ppWAVEFORMATEX, BOOL bNoneOK = TRUE); +extern PYWINTYPES_EXPORT PyTypeObject PyWAVEFORMATEXType; +#define PyWAVEFORMATEX_Check(ob) ((ob)->ob_type == &PyWAVEFORMATEXType) + +/* +** SECURITY_DESCRIPTOR support +*/ +extern PYWINTYPES_EXPORT PyTypeObject PySECURITY_DESCRIPTORType; +#define PySECURITY_DESCRIPTOR_Check(ob) ((ob)->ob_type == &PySECURITY_DESCRIPTORType) + +PYWINTYPES_EXPORT PyObject *PyWinMethod_NewSECURITY_DESCRIPTOR(PyObject *self, PyObject *args); +PYWINTYPES_EXPORT BOOL PyWinObject_AsSECURITY_DESCRIPTOR(PyObject *ob, PSECURITY_DESCRIPTOR *ppSECURITY_DESCRIPTOR, + BOOL bNoneOK = TRUE); +PYWINTYPES_EXPORT PyObject *PyWinObject_FromSECURITY_DESCRIPTOR(PSECURITY_DESCRIPTOR psd); + +PYWINTYPES_EXPORT BOOL _MakeAbsoluteSD(PSECURITY_DESCRIPTOR psd_relative, PSECURITY_DESCRIPTOR *ppsd_absolute); +PYWINTYPES_EXPORT void FreeAbsoluteSD(PSECURITY_DESCRIPTOR psd); + +/* +** SID support +*/ +extern PYWINTYPES_EXPORT PyTypeObject PySIDType; +#define PySID_Check(ob) ((ob)->ob_type == &PySIDType) + +PYWINTYPES_EXPORT PyObject *PyWinMethod_NewSID(PyObject *self, PyObject *args); +PYWINTYPES_EXPORT BOOL PyWinObject_AsSID(PyObject *ob, PSID *ppSID, BOOL bNoneOK = FALSE); +PYWINTYPES_EXPORT PyObject *PyWinObject_FromSID(PSID pSID); + +/* +** ACL support +*/ +extern PYWINTYPES_EXPORT PyTypeObject PyACLType; +#define PyACL_Check(ob) ((ob)->ob_type == &PyACLType) + +PYWINTYPES_EXPORT PyObject *PyWinMethod_NewACL(PyObject *self, PyObject *args); +PYWINTYPES_EXPORT BOOL PyWinObject_AsACL(PyObject *ob, PACL *ppACL, BOOL bNoneOK = FALSE); + +/* +** Win32 HANDLE wrapper - any handle closable by "CloseHandle()" +*/ +extern PYWINTYPES_EXPORT PyTypeObject PyHANDLEType; // the Type for PyHANDLE +#define PyHANDLE_Check(ob) ((ob)->ob_type == &PyHANDLEType) + +// Convert an object to a HANDLE - None is always OK, as are ints, etc. +PYWINTYPES_EXPORT BOOL PyWinObject_AsHANDLE(PyObject *ob, HANDLE *pRes); +// For handles that use PyHANDLE. +PYWINTYPES_EXPORT PyObject *PyWinObject_FromHANDLE(HANDLE h); +// For handles that aren't returned as PyHANDLE or a subclass thereof (HDC, HWND, etc). +// Return as python ints or longs +PYWINTYPES_EXPORT PyObject *PyWinLong_FromHANDLE(HANDLE h); + +// A global function that can work as a module method for making a HANDLE object. +PYWINTYPES_EXPORT PyObject *PyWinMethod_NewHANDLE(PyObject *self, PyObject *args); + +// A global function that does the right thing wrt closing a "handle". +// The object can be either a PyHANDLE or an integer. +// If result is FALSE, a Python error is all setup (cf PyHANDLE::Close(), which doesnt set the Python error) +PYWINTYPES_EXPORT BOOL PyWinObject_CloseHANDLE(PyObject *obHandle); + +PYWINTYPES_EXPORT BOOL PyWinObject_AsHKEY(PyObject *ob, HKEY *pRes); +PYWINTYPES_EXPORT PyObject *PyWinObject_FromHKEY(HKEY h); +PYWINTYPES_EXPORT BOOL PyWinObject_CloseHKEY(PyObject *obHandle); + +// MSG structure keeps coming up... +PYWINTYPES_EXPORT BOOL PyWinObject_AsMSG(PyObject *ob, MSG *pMsg); +PYWINTYPES_EXPORT PyObject *PyWinObject_FromMSG(const MSG *pMsg); + +#include "winsock.h" +/* +** SOCKET support. +*/ +PYWINTYPES_EXPORT +BOOL PySocket_AsSOCKET + //------------------------------------------------------------------------- + // Helper function for dealing with socket arguments. + (PyObject *obSocket, + // [in] Python object being converted into a SOCKET handle. + SOCKET *ps + // [out] Returned socket handle + ); + +/* +** Other Utilities +*/ +// ---------------------------------------------------------------------- +// WARNING - NEVER EVER USE new() ON THIS CLASS +// This class can be used as a local variable, typically in a Python/C +// function, and can be passed whereever a TCHAR/WCHAR is expected. +// Typical Usage: +// PyWin_AutoFreeBstr arg; +// PyArg_ParseTuple("O", &obStr); +// PyWinObject_AsAutoFreeBstr(obStr, &arg); +// CallTheFunction(arg); // Will correctly pass BSTR/OLECHAR +// -- when the function goes out of scope, the string owned by "arg" will +// -- automatically be freed. +// ---------------------------------------------------------------------- +class PYWINTYPES_EXPORT PyWin_AutoFreeBstr { + public: + PyWin_AutoFreeBstr(BSTR bstr = NULL); + ~PyWin_AutoFreeBstr(); + void SetBstr(BSTR bstr); + operator BSTR() { return m_bstr; } + + private: + BSTR m_bstr; +}; + +inline BOOL PyWinObject_AsAutoFreeBstr(PyObject *stringObject, PyWin_AutoFreeBstr *pResult, BOOL bNoneOK = FALSE) +{ + if (bNoneOK && stringObject == Py_None) { + pResult->SetBstr(NULL); + return TRUE; + } + BSTR bs; + if (!PyWinObject_AsBstr(stringObject, &bs, bNoneOK)) + return FALSE; + pResult->SetBstr(bs); + return TRUE; +} + +// ---------------------------------------------------------------------- +// +// THREAD MANAGEMENT +// + +// ### need to rename the PYCOM_ stuff soon... + +// We have 2 discrete locks in use (when no free-threaded is used, anyway). +// The first type of lock is the global Python lock. This is the standard lock +// in use by Python, and must be used as documented by Python. Specifically, no +// 2 threads may _ever_ call _any_ Python code (including INCREF/DECREF) without +// first having this thread lock. +// +// The second type of lock is a "global framework lock". This lock is simply a +// critical section, and used whenever 2 threads of C code need access to global +// data. This is different than the Python lock - this lock is used when no Python +// code can ever be called by the threads, but the C code still needs thread-safety. + +// We also supply helper classes which make the usage of these locks a one-liner. + +// The "framework" lock, implemented as a critical section. +PYWINTYPES_EXPORT void PyWin_AcquireGlobalLock(void); +PYWINTYPES_EXPORT void PyWin_ReleaseGlobalLock(void); + +// Helper class for the DLL global lock. +// +// This class magically waits for the Win32/COM framework global lock, and releases it +// when finished. +// NEVER new one of these objects - only use on the stack! +class CEnterLeaveFramework { + public: + CEnterLeaveFramework() { PyWin_AcquireGlobalLock(); } + ~CEnterLeaveFramework() { PyWin_ReleaseGlobalLock(); } +}; + +// Python thread-lock stuff. Free-threading patches use different semantics, but +// these are abstracted away here... +#ifndef FORCE_NO_FREE_THREAD +#ifdef WITH_FREE_THREAD +#define PYCOM_USE_FREE_THREAD +#endif +#endif +#ifdef PYCOM_USE_FREE_THREAD +#include +#else +#include +#endif + +// Helper class for Enter/Leave Python +// +// This class magically waits for the Python global lock, and releases it +// when finished. + +// Nested invocations will deadlock, so be careful. + +// NEVER new one of these objects - only use on the stack! +#ifndef PYCOM_USE_FREE_THREAD +extern PYWINTYPES_EXPORT PyInterpreterState *PyWin_InterpreterState; +extern PYWINTYPES_EXPORT BOOL PyWinThreadState_Ensure(); +extern PYWINTYPES_EXPORT void PyWinThreadState_Free(); +extern PYWINTYPES_EXPORT void PyWinThreadState_Clear(); +extern PYWINTYPES_EXPORT void PyWinInterpreterLock_Acquire(); +extern PYWINTYPES_EXPORT void PyWinInterpreterLock_Release(); + +extern PYWINTYPES_EXPORT int PyWinGlobals_Ensure(); +extern PYWINTYPES_EXPORT void PyWinGlobals_Free(); +#else +#define PyWinThreadState_Ensure PyThreadState_Ensure +#define PyWinThreadState_Free PyThreadState_Free +#define PyWinThreadState_Clear PyThreadState_ClearExc + +#endif + +extern PYWINTYPES_EXPORT void PyWin_MakePendingCalls(); + +class CEnterLeavePython { + public: + CEnterLeavePython() { acquire(); } + void acquire(void) + { + state = PyGILState_Ensure(); + released = FALSE; + } + ~CEnterLeavePython() { release(); } + void release(void) + { + if (!released) { + PyGILState_Release(state); + released = TRUE; + } + } + + private: + PyGILState_STATE state; + BOOL released; +}; + +// A helper for simple exception handling. +// try/__try +#if defined(__MINGW32__) || defined(MAINWIN) +#define PYWINTYPES_TRY try +#else +#define PYWINTYPES_TRY __try +#endif /* MAINWIN */ + +// catch/__except +#if defined(__MINGW32__) || defined(MAINWIN) +#define PYWINTYPES_EXCEPT catch (...) +#else +#define PYWINTYPES_EXCEPT __except (EXCEPTION_EXECUTE_HANDLER) +#endif +// End of exception helper macros. + +// Class to hold a temporary reference that decrements itself +class TmpPyObject { + public: + PyObject *tmp; + TmpPyObject() { tmp = NULL; } + TmpPyObject(PyObject *ob) { tmp = ob; } + PyObject *operator=(PyObject *ob) + { + Py_XDECREF(tmp); + tmp = ob; + return tmp; + } + + boolean operator==(PyObject *ob) { return tmp == ob; } + operator PyObject *() { return tmp; } + ~TmpPyObject() { Py_XDECREF(tmp); } +}; + +#endif // __PYWINTYPES_H__ diff --git a/venv/Lib/site-packages/win32/lib/__pycache__/afxres.cpython-36.pyc b/venv/Lib/site-packages/win32/lib/__pycache__/afxres.cpython-36.pyc new file mode 100644 index 00000000..d68180c0 Binary files /dev/null and b/venv/Lib/site-packages/win32/lib/__pycache__/afxres.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/lib/__pycache__/commctrl.cpython-36.pyc b/venv/Lib/site-packages/win32/lib/__pycache__/commctrl.cpython-36.pyc new file mode 100644 index 00000000..7864ff0b Binary files /dev/null and b/venv/Lib/site-packages/win32/lib/__pycache__/commctrl.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/lib/__pycache__/dbi.cpython-36.pyc b/venv/Lib/site-packages/win32/lib/__pycache__/dbi.cpython-36.pyc new file mode 100644 index 00000000..ed9588a1 Binary files /dev/null and b/venv/Lib/site-packages/win32/lib/__pycache__/dbi.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/lib/__pycache__/mmsystem.cpython-36.pyc b/venv/Lib/site-packages/win32/lib/__pycache__/mmsystem.cpython-36.pyc new file mode 100644 index 00000000..ab657f1f Binary files /dev/null and b/venv/Lib/site-packages/win32/lib/__pycache__/mmsystem.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/lib/__pycache__/netbios.cpython-36.pyc b/venv/Lib/site-packages/win32/lib/__pycache__/netbios.cpython-36.pyc new file mode 100644 index 00000000..cc1d61a3 Binary files /dev/null and b/venv/Lib/site-packages/win32/lib/__pycache__/netbios.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/lib/__pycache__/ntsecuritycon.cpython-36.pyc b/venv/Lib/site-packages/win32/lib/__pycache__/ntsecuritycon.cpython-36.pyc new file mode 100644 index 00000000..c1fa376f Binary files /dev/null and b/venv/Lib/site-packages/win32/lib/__pycache__/ntsecuritycon.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/lib/__pycache__/pywin32_bootstrap.cpython-36.pyc b/venv/Lib/site-packages/win32/lib/__pycache__/pywin32_bootstrap.cpython-36.pyc new file mode 100644 index 00000000..71ad177f Binary files /dev/null and b/venv/Lib/site-packages/win32/lib/__pycache__/pywin32_bootstrap.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/lib/__pycache__/pywin32_testutil.cpython-36.pyc b/venv/Lib/site-packages/win32/lib/__pycache__/pywin32_testutil.cpython-36.pyc new file mode 100644 index 00000000..aa453ab5 Binary files /dev/null and b/venv/Lib/site-packages/win32/lib/__pycache__/pywin32_testutil.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/lib/__pycache__/pywintypes.cpython-36.pyc b/venv/Lib/site-packages/win32/lib/__pycache__/pywintypes.cpython-36.pyc new file mode 100644 index 00000000..83c233db Binary files /dev/null and b/venv/Lib/site-packages/win32/lib/__pycache__/pywintypes.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/lib/__pycache__/rasutil.cpython-36.pyc b/venv/Lib/site-packages/win32/lib/__pycache__/rasutil.cpython-36.pyc new file mode 100644 index 00000000..4b50d919 Binary files /dev/null and b/venv/Lib/site-packages/win32/lib/__pycache__/rasutil.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/lib/__pycache__/regcheck.cpython-36.pyc b/venv/Lib/site-packages/win32/lib/__pycache__/regcheck.cpython-36.pyc new file mode 100644 index 00000000..51290063 Binary files /dev/null and b/venv/Lib/site-packages/win32/lib/__pycache__/regcheck.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/lib/__pycache__/regutil.cpython-36.pyc b/venv/Lib/site-packages/win32/lib/__pycache__/regutil.cpython-36.pyc new file mode 100644 index 00000000..deb6d1f9 Binary files /dev/null and b/venv/Lib/site-packages/win32/lib/__pycache__/regutil.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/lib/__pycache__/sspi.cpython-36.pyc b/venv/Lib/site-packages/win32/lib/__pycache__/sspi.cpython-36.pyc new file mode 100644 index 00000000..afd7970e Binary files /dev/null and b/venv/Lib/site-packages/win32/lib/__pycache__/sspi.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/lib/__pycache__/sspicon.cpython-36.pyc b/venv/Lib/site-packages/win32/lib/__pycache__/sspicon.cpython-36.pyc new file mode 100644 index 00000000..7a250a47 Binary files /dev/null and b/venv/Lib/site-packages/win32/lib/__pycache__/sspicon.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/lib/__pycache__/win32con.cpython-36.pyc b/venv/Lib/site-packages/win32/lib/__pycache__/win32con.cpython-36.pyc new file mode 100644 index 00000000..ea5d5c1e Binary files /dev/null and b/venv/Lib/site-packages/win32/lib/__pycache__/win32con.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/lib/__pycache__/win32cryptcon.cpython-36.pyc b/venv/Lib/site-packages/win32/lib/__pycache__/win32cryptcon.cpython-36.pyc new file mode 100644 index 00000000..ed9f63bc Binary files /dev/null and b/venv/Lib/site-packages/win32/lib/__pycache__/win32cryptcon.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/lib/__pycache__/win32evtlogutil.cpython-36.pyc b/venv/Lib/site-packages/win32/lib/__pycache__/win32evtlogutil.cpython-36.pyc new file mode 100644 index 00000000..2fe7cc63 Binary files /dev/null and b/venv/Lib/site-packages/win32/lib/__pycache__/win32evtlogutil.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/lib/__pycache__/win32gui_struct.cpython-36.pyc b/venv/Lib/site-packages/win32/lib/__pycache__/win32gui_struct.cpython-36.pyc new file mode 100644 index 00000000..a3d488ac Binary files /dev/null and b/venv/Lib/site-packages/win32/lib/__pycache__/win32gui_struct.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/lib/__pycache__/win32inetcon.cpython-36.pyc b/venv/Lib/site-packages/win32/lib/__pycache__/win32inetcon.cpython-36.pyc new file mode 100644 index 00000000..f3ced943 Binary files /dev/null and b/venv/Lib/site-packages/win32/lib/__pycache__/win32inetcon.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/lib/__pycache__/win32netcon.cpython-36.pyc b/venv/Lib/site-packages/win32/lib/__pycache__/win32netcon.cpython-36.pyc new file mode 100644 index 00000000..44dbb5e8 Binary files /dev/null and b/venv/Lib/site-packages/win32/lib/__pycache__/win32netcon.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/lib/__pycache__/win32pdhquery.cpython-36.pyc b/venv/Lib/site-packages/win32/lib/__pycache__/win32pdhquery.cpython-36.pyc new file mode 100644 index 00000000..bfce9ea1 Binary files /dev/null and b/venv/Lib/site-packages/win32/lib/__pycache__/win32pdhquery.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/lib/__pycache__/win32pdhutil.cpython-36.pyc b/venv/Lib/site-packages/win32/lib/__pycache__/win32pdhutil.cpython-36.pyc new file mode 100644 index 00000000..f2e0ea25 Binary files /dev/null and b/venv/Lib/site-packages/win32/lib/__pycache__/win32pdhutil.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/lib/__pycache__/win32rcparser.cpython-36.pyc b/venv/Lib/site-packages/win32/lib/__pycache__/win32rcparser.cpython-36.pyc new file mode 100644 index 00000000..f39fd772 Binary files /dev/null and b/venv/Lib/site-packages/win32/lib/__pycache__/win32rcparser.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/lib/__pycache__/win32serviceutil.cpython-36.pyc b/venv/Lib/site-packages/win32/lib/__pycache__/win32serviceutil.cpython-36.pyc new file mode 100644 index 00000000..92b98b12 Binary files /dev/null and b/venv/Lib/site-packages/win32/lib/__pycache__/win32serviceutil.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/lib/__pycache__/win32timezone.cpython-36.pyc b/venv/Lib/site-packages/win32/lib/__pycache__/win32timezone.cpython-36.pyc new file mode 100644 index 00000000..90d674eb Binary files /dev/null and b/venv/Lib/site-packages/win32/lib/__pycache__/win32timezone.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/lib/__pycache__/win32traceutil.cpython-36.pyc b/venv/Lib/site-packages/win32/lib/__pycache__/win32traceutil.cpython-36.pyc new file mode 100644 index 00000000..55f3631b Binary files /dev/null and b/venv/Lib/site-packages/win32/lib/__pycache__/win32traceutil.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/lib/__pycache__/win32verstamp.cpython-36.pyc b/venv/Lib/site-packages/win32/lib/__pycache__/win32verstamp.cpython-36.pyc new file mode 100644 index 00000000..486a19d4 Binary files /dev/null and b/venv/Lib/site-packages/win32/lib/__pycache__/win32verstamp.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/lib/__pycache__/winerror.cpython-36.pyc b/venv/Lib/site-packages/win32/lib/__pycache__/winerror.cpython-36.pyc new file mode 100644 index 00000000..39071ba6 Binary files /dev/null and b/venv/Lib/site-packages/win32/lib/__pycache__/winerror.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/lib/__pycache__/winioctlcon.cpython-36.pyc b/venv/Lib/site-packages/win32/lib/__pycache__/winioctlcon.cpython-36.pyc new file mode 100644 index 00000000..91cbc2ef Binary files /dev/null and b/venv/Lib/site-packages/win32/lib/__pycache__/winioctlcon.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/lib/__pycache__/winnt.cpython-36.pyc b/venv/Lib/site-packages/win32/lib/__pycache__/winnt.cpython-36.pyc new file mode 100644 index 00000000..5d775753 Binary files /dev/null and b/venv/Lib/site-packages/win32/lib/__pycache__/winnt.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/lib/__pycache__/winperf.cpython-36.pyc b/venv/Lib/site-packages/win32/lib/__pycache__/winperf.cpython-36.pyc new file mode 100644 index 00000000..dec86b69 Binary files /dev/null and b/venv/Lib/site-packages/win32/lib/__pycache__/winperf.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/lib/__pycache__/winxptheme.cpython-36.pyc b/venv/Lib/site-packages/win32/lib/__pycache__/winxptheme.cpython-36.pyc new file mode 100644 index 00000000..f310e1cd Binary files /dev/null and b/venv/Lib/site-packages/win32/lib/__pycache__/winxptheme.cpython-36.pyc differ diff --git a/venv/Lib/site-packages/win32/lib/afxres.py b/venv/Lib/site-packages/win32/lib/afxres.py new file mode 100644 index 00000000..d9fb1f2a --- /dev/null +++ b/venv/Lib/site-packages/win32/lib/afxres.py @@ -0,0 +1,497 @@ +# Generated by h2py from stdin +TCS_MULTILINE = 0x0200 +CBRS_ALIGN_LEFT = 0x1000 +CBRS_ALIGN_TOP = 0x2000 +CBRS_ALIGN_RIGHT = 0x4000 +CBRS_ALIGN_BOTTOM = 0x8000 +CBRS_ALIGN_ANY = 0xF000 +CBRS_BORDER_LEFT = 0x0100 +CBRS_BORDER_TOP = 0x0200 +CBRS_BORDER_RIGHT = 0x0400 +CBRS_BORDER_BOTTOM = 0x0800 +CBRS_BORDER_ANY = 0x0F00 +CBRS_TOOLTIPS = 0x0010 +CBRS_FLYBY = 0x0020 +CBRS_FLOAT_MULTI = 0x0040 +CBRS_BORDER_3D = 0x0080 +CBRS_HIDE_INPLACE = 0x0008 +CBRS_SIZE_DYNAMIC = 0x0004 +CBRS_SIZE_FIXED = 0x0002 +CBRS_FLOATING = 0x0001 +CBRS_GRIPPER = 0x00400000 +CBRS_ORIENT_HORZ = (CBRS_ALIGN_TOP|CBRS_ALIGN_BOTTOM) +CBRS_ORIENT_VERT = (CBRS_ALIGN_LEFT|CBRS_ALIGN_RIGHT) +CBRS_ORIENT_ANY = (CBRS_ORIENT_HORZ|CBRS_ORIENT_VERT) +CBRS_ALL = 0xFFFF +CBRS_NOALIGN = 0x00000000 +CBRS_LEFT = (CBRS_ALIGN_LEFT|CBRS_BORDER_RIGHT) +CBRS_TOP = (CBRS_ALIGN_TOP|CBRS_BORDER_BOTTOM) +CBRS_RIGHT = (CBRS_ALIGN_RIGHT|CBRS_BORDER_LEFT) +CBRS_BOTTOM = (CBRS_ALIGN_BOTTOM|CBRS_BORDER_TOP) +SBPS_NORMAL = 0x0000 +SBPS_NOBORDERS = 0x0100 +SBPS_POPOUT = 0x0200 +SBPS_OWNERDRAW = 0x1000 +SBPS_DISABLED = 0x04000000 +SBPS_STRETCH = 0x08000000 +ID_INDICATOR_EXT = 0xE700 +ID_INDICATOR_CAPS = 0xE701 +ID_INDICATOR_NUM = 0xE702 +ID_INDICATOR_SCRL = 0xE703 +ID_INDICATOR_OVR = 0xE704 +ID_INDICATOR_REC = 0xE705 +ID_INDICATOR_KANA = 0xE706 +ID_SEPARATOR = 0 +AFX_IDW_CONTROLBAR_FIRST = 0xE800 +AFX_IDW_CONTROLBAR_LAST = 0xE8FF +AFX_IDW_TOOLBAR = 0xE800 +AFX_IDW_STATUS_BAR = 0xE801 +AFX_IDW_PREVIEW_BAR = 0xE802 +AFX_IDW_RESIZE_BAR = 0xE803 +AFX_IDW_DOCKBAR_TOP = 0xE81B +AFX_IDW_DOCKBAR_LEFT = 0xE81C +AFX_IDW_DOCKBAR_RIGHT = 0xE81D +AFX_IDW_DOCKBAR_BOTTOM = 0xE81E +AFX_IDW_DOCKBAR_FLOAT = 0xE81F +def AFX_CONTROLBAR_MASK(nIDC): return (1 << (nIDC - AFX_IDW_CONTROLBAR_FIRST)) + +AFX_IDW_PANE_FIRST = 0xE900 +AFX_IDW_PANE_LAST = 0xE9ff +AFX_IDW_HSCROLL_FIRST = 0xEA00 +AFX_IDW_VSCROLL_FIRST = 0xEA10 +AFX_IDW_SIZE_BOX = 0xEA20 +AFX_IDW_PANE_SAVE = 0xEA21 +AFX_IDS_APP_TITLE = 0xE000 +AFX_IDS_IDLEMESSAGE = 0xE001 +AFX_IDS_HELPMODEMESSAGE = 0xE002 +AFX_IDS_APP_TITLE_EMBEDDING = 0xE003 +AFX_IDS_COMPANY_NAME = 0xE004 +AFX_IDS_OBJ_TITLE_INPLACE = 0xE005 +ID_FILE_NEW = 0xE100 +ID_FILE_OPEN = 0xE101 +ID_FILE_CLOSE = 0xE102 +ID_FILE_SAVE = 0xE103 +ID_FILE_SAVE_AS = 0xE104 +ID_FILE_PAGE_SETUP = 0xE105 +ID_FILE_PRINT_SETUP = 0xE106 +ID_FILE_PRINT = 0xE107 +ID_FILE_PRINT_DIRECT = 0xE108 +ID_FILE_PRINT_PREVIEW = 0xE109 +ID_FILE_UPDATE = 0xE10A +ID_FILE_SAVE_COPY_AS = 0xE10B +ID_FILE_SEND_MAIL = 0xE10C +ID_FILE_MRU_FIRST = 0xE110 +ID_FILE_MRU_FILE1 = 0xE110 +ID_FILE_MRU_FILE2 = 0xE111 +ID_FILE_MRU_FILE3 = 0xE112 +ID_FILE_MRU_FILE4 = 0xE113 +ID_FILE_MRU_FILE5 = 0xE114 +ID_FILE_MRU_FILE6 = 0xE115 +ID_FILE_MRU_FILE7 = 0xE116 +ID_FILE_MRU_FILE8 = 0xE117 +ID_FILE_MRU_FILE9 = 0xE118 +ID_FILE_MRU_FILE10 = 0xE119 +ID_FILE_MRU_FILE11 = 0xE11A +ID_FILE_MRU_FILE12 = 0xE11B +ID_FILE_MRU_FILE13 = 0xE11C +ID_FILE_MRU_FILE14 = 0xE11D +ID_FILE_MRU_FILE15 = 0xE11E +ID_FILE_MRU_FILE16 = 0xE11F +ID_FILE_MRU_LAST = 0xE11F +ID_EDIT_CLEAR = 0xE120 +ID_EDIT_CLEAR_ALL = 0xE121 +ID_EDIT_COPY = 0xE122 +ID_EDIT_CUT = 0xE123 +ID_EDIT_FIND = 0xE124 +ID_EDIT_PASTE = 0xE125 +ID_EDIT_PASTE_LINK = 0xE126 +ID_EDIT_PASTE_SPECIAL = 0xE127 +ID_EDIT_REPEAT = 0xE128 +ID_EDIT_REPLACE = 0xE129 +ID_EDIT_SELECT_ALL = 0xE12A +ID_EDIT_UNDO = 0xE12B +ID_EDIT_REDO = 0xE12C +ID_WINDOW_NEW = 0xE130 +ID_WINDOW_ARRANGE = 0xE131 +ID_WINDOW_CASCADE = 0xE132 +ID_WINDOW_TILE_HORZ = 0xE133 +ID_WINDOW_TILE_VERT = 0xE134 +ID_WINDOW_SPLIT = 0xE135 +AFX_IDM_WINDOW_FIRST = 0xE130 +AFX_IDM_WINDOW_LAST = 0xE13F +AFX_IDM_FIRST_MDICHILD = 0xFF00 +ID_APP_ABOUT = 0xE140 +ID_APP_EXIT = 0xE141 +ID_HELP_INDEX = 0xE142 +ID_HELP_FINDER = 0xE143 +ID_HELP_USING = 0xE144 +ID_CONTEXT_HELP = 0xE145 +ID_HELP = 0xE146 +ID_DEFAULT_HELP = 0xE147 +ID_NEXT_PANE = 0xE150 +ID_PREV_PANE = 0xE151 +ID_FORMAT_FONT = 0xE160 +ID_OLE_INSERT_NEW = 0xE200 +ID_OLE_EDIT_LINKS = 0xE201 +ID_OLE_EDIT_CONVERT = 0xE202 +ID_OLE_EDIT_CHANGE_ICON = 0xE203 +ID_OLE_EDIT_PROPERTIES = 0xE204 +ID_OLE_VERB_FIRST = 0xE210 +ID_OLE_VERB_LAST = 0xE21F +AFX_ID_PREVIEW_CLOSE = 0xE300 +AFX_ID_PREVIEW_NUMPAGE = 0xE301 +AFX_ID_PREVIEW_NEXT = 0xE302 +AFX_ID_PREVIEW_PREV = 0xE303 +AFX_ID_PREVIEW_PRINT = 0xE304 +AFX_ID_PREVIEW_ZOOMIN = 0xE305 +AFX_ID_PREVIEW_ZOOMOUT = 0xE306 +ID_VIEW_TOOLBAR = 0xE800 +ID_VIEW_STATUS_BAR = 0xE801 +ID_RECORD_FIRST = 0xE900 +ID_RECORD_LAST = 0xE901 +ID_RECORD_NEXT = 0xE902 +ID_RECORD_PREV = 0xE903 +IDC_STATIC = (-1) +AFX_IDS_SCFIRST = 0xEF00 +AFX_IDS_SCSIZE = 0xEF00 +AFX_IDS_SCMOVE = 0xEF01 +AFX_IDS_SCMINIMIZE = 0xEF02 +AFX_IDS_SCMAXIMIZE = 0xEF03 +AFX_IDS_SCNEXTWINDOW = 0xEF04 +AFX_IDS_SCPREVWINDOW = 0xEF05 +AFX_IDS_SCCLOSE = 0xEF06 +AFX_IDS_SCRESTORE = 0xEF12 +AFX_IDS_SCTASKLIST = 0xEF13 +AFX_IDS_MDICHILD = 0xEF1F +AFX_IDS_DESKACCESSORY = 0xEFDA +AFX_IDS_OPENFILE = 0xF000 +AFX_IDS_SAVEFILE = 0xF001 +AFX_IDS_ALLFILTER = 0xF002 +AFX_IDS_UNTITLED = 0xF003 +AFX_IDS_SAVEFILECOPY = 0xF004 +AFX_IDS_PREVIEW_CLOSE = 0xF005 +AFX_IDS_UNNAMED_FILE = 0xF006 +AFX_IDS_ABOUT = 0xF010 +AFX_IDS_HIDE = 0xF011 +AFX_IDP_NO_ERROR_AVAILABLE = 0xF020 +AFX_IDS_NOT_SUPPORTED_EXCEPTION = 0xF021 +AFX_IDS_RESOURCE_EXCEPTION = 0xF022 +AFX_IDS_MEMORY_EXCEPTION = 0xF023 +AFX_IDS_USER_EXCEPTION = 0xF024 +AFX_IDS_PRINTONPORT = 0xF040 +AFX_IDS_ONEPAGE = 0xF041 +AFX_IDS_TWOPAGE = 0xF042 +AFX_IDS_PRINTPAGENUM = 0xF043 +AFX_IDS_PREVIEWPAGEDESC = 0xF044 +AFX_IDS_PRINTDEFAULTEXT = 0xF045 +AFX_IDS_PRINTDEFAULT = 0xF046 +AFX_IDS_PRINTFILTER = 0xF047 +AFX_IDS_PRINTCAPTION = 0xF048 +AFX_IDS_PRINTTOFILE = 0xF049 +AFX_IDS_OBJECT_MENUITEM = 0xF080 +AFX_IDS_EDIT_VERB = 0xF081 +AFX_IDS_ACTIVATE_VERB = 0xF082 +AFX_IDS_CHANGE_LINK = 0xF083 +AFX_IDS_AUTO = 0xF084 +AFX_IDS_MANUAL = 0xF085 +AFX_IDS_FROZEN = 0xF086 +AFX_IDS_ALL_FILES = 0xF087 +AFX_IDS_SAVE_MENU = 0xF088 +AFX_IDS_UPDATE_MENU = 0xF089 +AFX_IDS_SAVE_AS_MENU = 0xF08A +AFX_IDS_SAVE_COPY_AS_MENU = 0xF08B +AFX_IDS_EXIT_MENU = 0xF08C +AFX_IDS_UPDATING_ITEMS = 0xF08D +AFX_IDS_METAFILE_FORMAT = 0xF08E +AFX_IDS_DIB_FORMAT = 0xF08F +AFX_IDS_BITMAP_FORMAT = 0xF090 +AFX_IDS_LINKSOURCE_FORMAT = 0xF091 +AFX_IDS_EMBED_FORMAT = 0xF092 +AFX_IDS_PASTELINKEDTYPE = 0xF094 +AFX_IDS_UNKNOWNTYPE = 0xF095 +AFX_IDS_RTF_FORMAT = 0xF096 +AFX_IDS_TEXT_FORMAT = 0xF097 +AFX_IDS_INVALID_CURRENCY = 0xF098 +AFX_IDS_INVALID_DATETIME = 0xF099 +AFX_IDS_INVALID_DATETIMESPAN = 0xF09A +AFX_IDP_INVALID_FILENAME = 0xF100 +AFX_IDP_FAILED_TO_OPEN_DOC = 0xF101 +AFX_IDP_FAILED_TO_SAVE_DOC = 0xF102 +AFX_IDP_ASK_TO_SAVE = 0xF103 +AFX_IDP_FAILED_TO_CREATE_DOC = 0xF104 +AFX_IDP_FILE_TOO_LARGE = 0xF105 +AFX_IDP_FAILED_TO_START_PRINT = 0xF106 +AFX_IDP_FAILED_TO_LAUNCH_HELP = 0xF107 +AFX_IDP_INTERNAL_FAILURE = 0xF108 +AFX_IDP_COMMAND_FAILURE = 0xF109 +AFX_IDP_FAILED_MEMORY_ALLOC = 0xF10A +AFX_IDP_PARSE_INT = 0xF110 +AFX_IDP_PARSE_REAL = 0xF111 +AFX_IDP_PARSE_INT_RANGE = 0xF112 +AFX_IDP_PARSE_REAL_RANGE = 0xF113 +AFX_IDP_PARSE_STRING_SIZE = 0xF114 +AFX_IDP_PARSE_RADIO_BUTTON = 0xF115 +AFX_IDP_PARSE_BYTE = 0xF116 +AFX_IDP_PARSE_UINT = 0xF117 +AFX_IDP_PARSE_DATETIME = 0xF118 +AFX_IDP_PARSE_CURRENCY = 0xF119 +AFX_IDP_FAILED_INVALID_FORMAT = 0xF120 +AFX_IDP_FAILED_INVALID_PATH = 0xF121 +AFX_IDP_FAILED_DISK_FULL = 0xF122 +AFX_IDP_FAILED_ACCESS_READ = 0xF123 +AFX_IDP_FAILED_ACCESS_WRITE = 0xF124 +AFX_IDP_FAILED_IO_ERROR_READ = 0xF125 +AFX_IDP_FAILED_IO_ERROR_WRITE = 0xF126 +AFX_IDP_STATIC_OBJECT = 0xF180 +AFX_IDP_FAILED_TO_CONNECT = 0xF181 +AFX_IDP_SERVER_BUSY = 0xF182 +AFX_IDP_BAD_VERB = 0xF183 +AFX_IDP_FAILED_TO_NOTIFY = 0xF185 +AFX_IDP_FAILED_TO_LAUNCH = 0xF186 +AFX_IDP_ASK_TO_UPDATE = 0xF187 +AFX_IDP_FAILED_TO_UPDATE = 0xF188 +AFX_IDP_FAILED_TO_REGISTER = 0xF189 +AFX_IDP_FAILED_TO_AUTO_REGISTER = 0xF18A +AFX_IDP_FAILED_TO_CONVERT = 0xF18B +AFX_IDP_GET_NOT_SUPPORTED = 0xF18C +AFX_IDP_SET_NOT_SUPPORTED = 0xF18D +AFX_IDP_ASK_TO_DISCARD = 0xF18E +AFX_IDP_FAILED_TO_CREATE = 0xF18F +AFX_IDP_FAILED_MAPI_LOAD = 0xF190 +AFX_IDP_INVALID_MAPI_DLL = 0xF191 +AFX_IDP_FAILED_MAPI_SEND = 0xF192 +AFX_IDP_FILE_NONE = 0xF1A0 +AFX_IDP_FILE_GENERIC = 0xF1A1 +AFX_IDP_FILE_NOT_FOUND = 0xF1A2 +AFX_IDP_FILE_BAD_PATH = 0xF1A3 +AFX_IDP_FILE_TOO_MANY_OPEN = 0xF1A4 +AFX_IDP_FILE_ACCESS_DENIED = 0xF1A5 +AFX_IDP_FILE_INVALID_FILE = 0xF1A6 +AFX_IDP_FILE_REMOVE_CURRENT = 0xF1A7 +AFX_IDP_FILE_DIR_FULL = 0xF1A8 +AFX_IDP_FILE_BAD_SEEK = 0xF1A9 +AFX_IDP_FILE_HARD_IO = 0xF1AA +AFX_IDP_FILE_SHARING = 0xF1AB +AFX_IDP_FILE_LOCKING = 0xF1AC +AFX_IDP_FILE_DISKFULL = 0xF1AD +AFX_IDP_FILE_EOF = 0xF1AE +AFX_IDP_ARCH_NONE = 0xF1B0 +AFX_IDP_ARCH_GENERIC = 0xF1B1 +AFX_IDP_ARCH_READONLY = 0xF1B2 +AFX_IDP_ARCH_ENDOFFILE = 0xF1B3 +AFX_IDP_ARCH_WRITEONLY = 0xF1B4 +AFX_IDP_ARCH_BADINDEX = 0xF1B5 +AFX_IDP_ARCH_BADCLASS = 0xF1B6 +AFX_IDP_ARCH_BADSCHEMA = 0xF1B7 +AFX_IDS_OCC_SCALEUNITS_PIXELS = 0xF1C0 +AFX_IDS_STATUS_FONT = 0xF230 +AFX_IDS_TOOLTIP_FONT = 0xF231 +AFX_IDS_UNICODE_FONT = 0xF232 +AFX_IDS_MINI_FONT = 0xF233 +AFX_IDP_SQL_FIRST = 0xF280 +AFX_IDP_SQL_CONNECT_FAIL = 0xF281 +AFX_IDP_SQL_RECORDSET_FORWARD_ONLY = 0xF282 +AFX_IDP_SQL_EMPTY_COLUMN_LIST = 0xF283 +AFX_IDP_SQL_FIELD_SCHEMA_MISMATCH = 0xF284 +AFX_IDP_SQL_ILLEGAL_MODE = 0xF285 +AFX_IDP_SQL_MULTIPLE_ROWS_AFFECTED = 0xF286 +AFX_IDP_SQL_NO_CURRENT_RECORD = 0xF287 +AFX_IDP_SQL_NO_ROWS_AFFECTED = 0xF288 +AFX_IDP_SQL_RECORDSET_READONLY = 0xF289 +AFX_IDP_SQL_SQL_NO_TOTAL = 0xF28A +AFX_IDP_SQL_ODBC_LOAD_FAILED = 0xF28B +AFX_IDP_SQL_DYNASET_NOT_SUPPORTED = 0xF28C +AFX_IDP_SQL_SNAPSHOT_NOT_SUPPORTED = 0xF28D +AFX_IDP_SQL_API_CONFORMANCE = 0xF28E +AFX_IDP_SQL_SQL_CONFORMANCE = 0xF28F +AFX_IDP_SQL_NO_DATA_FOUND = 0xF290 +AFX_IDP_SQL_ROW_UPDATE_NOT_SUPPORTED = 0xF291 +AFX_IDP_SQL_ODBC_V2_REQUIRED = 0xF292 +AFX_IDP_SQL_NO_POSITIONED_UPDATES = 0xF293 +AFX_IDP_SQL_LOCK_MODE_NOT_SUPPORTED = 0xF294 +AFX_IDP_SQL_DATA_TRUNCATED = 0xF295 +AFX_IDP_SQL_ROW_FETCH = 0xF296 +AFX_IDP_SQL_INCORRECT_ODBC = 0xF297 +AFX_IDP_SQL_UPDATE_DELETE_FAILED = 0xF298 +AFX_IDP_SQL_DYNAMIC_CURSOR_NOT_SUPPORTED = 0xF299 +AFX_IDP_DAO_FIRST = 0xF2A0 +AFX_IDP_DAO_ENGINE_INITIALIZATION = 0xF2A0 +AFX_IDP_DAO_DFX_BIND = 0xF2A1 +AFX_IDP_DAO_OBJECT_NOT_OPEN = 0xF2A2 +AFX_IDP_DAO_ROWTOOSHORT = 0xF2A3 +AFX_IDP_DAO_BADBINDINFO = 0xF2A4 +AFX_IDP_DAO_COLUMNUNAVAILABLE = 0xF2A5 +AFX_IDC_LISTBOX = 100 +AFX_IDC_CHANGE = 101 +AFX_IDC_PRINT_DOCNAME = 201 +AFX_IDC_PRINT_PRINTERNAME = 202 +AFX_IDC_PRINT_PORTNAME = 203 +AFX_IDC_PRINT_PAGENUM = 204 +ID_APPLY_NOW = 0x3021 +ID_WIZBACK = 0x3023 +ID_WIZNEXT = 0x3024 +ID_WIZFINISH = 0x3025 +AFX_IDC_TAB_CONTROL = 0x3020 +AFX_IDD_FILEOPEN = 28676 +AFX_IDD_FILESAVE = 28677 +AFX_IDD_FONT = 28678 +AFX_IDD_COLOR = 28679 +AFX_IDD_PRINT = 28680 +AFX_IDD_PRINTSETUP = 28681 +AFX_IDD_FIND = 28682 +AFX_IDD_REPLACE = 28683 +AFX_IDD_NEWTYPEDLG = 30721 +AFX_IDD_PRINTDLG = 30722 +AFX_IDD_PREVIEW_TOOLBAR = 30723 +AFX_IDD_PREVIEW_SHORTTOOLBAR = 30731 +AFX_IDD_INSERTOBJECT = 30724 +AFX_IDD_CHANGEICON = 30725 +AFX_IDD_CONVERT = 30726 +AFX_IDD_PASTESPECIAL = 30727 +AFX_IDD_EDITLINKS = 30728 +AFX_IDD_FILEBROWSE = 30729 +AFX_IDD_BUSY = 30730 +AFX_IDD_OBJECTPROPERTIES = 30732 +AFX_IDD_CHANGESOURCE = 30733 +AFX_IDC_CONTEXTHELP = 30977 +AFX_IDC_MAGNIFY = 30978 +AFX_IDC_SMALLARROWS = 30979 +AFX_IDC_HSPLITBAR = 30980 +AFX_IDC_VSPLITBAR = 30981 +AFX_IDC_NODROPCRSR = 30982 +AFX_IDC_TRACKNWSE = 30983 +AFX_IDC_TRACKNESW = 30984 +AFX_IDC_TRACKNS = 30985 +AFX_IDC_TRACKWE = 30986 +AFX_IDC_TRACK4WAY = 30987 +AFX_IDC_MOVE4WAY = 30988 +AFX_IDB_MINIFRAME_MENU = 30994 +AFX_IDB_CHECKLISTBOX_NT = 30995 +AFX_IDB_CHECKLISTBOX_95 = 30996 +AFX_IDR_PREVIEW_ACCEL = 30997 +AFX_IDI_STD_MDIFRAME = 31233 +AFX_IDI_STD_FRAME = 31234 +AFX_IDC_FONTPROP = 1000 +AFX_IDC_FONTNAMES = 1001 +AFX_IDC_FONTSTYLES = 1002 +AFX_IDC_FONTSIZES = 1003 +AFX_IDC_STRIKEOUT = 1004 +AFX_IDC_UNDERLINE = 1005 +AFX_IDC_SAMPLEBOX = 1006 +AFX_IDC_COLOR_BLACK = 1100 +AFX_IDC_COLOR_WHITE = 1101 +AFX_IDC_COLOR_RED = 1102 +AFX_IDC_COLOR_GREEN = 1103 +AFX_IDC_COLOR_BLUE = 1104 +AFX_IDC_COLOR_YELLOW = 1105 +AFX_IDC_COLOR_MAGENTA = 1106 +AFX_IDC_COLOR_CYAN = 1107 +AFX_IDC_COLOR_GRAY = 1108 +AFX_IDC_COLOR_LIGHTGRAY = 1109 +AFX_IDC_COLOR_DARKRED = 1110 +AFX_IDC_COLOR_DARKGREEN = 1111 +AFX_IDC_COLOR_DARKBLUE = 1112 +AFX_IDC_COLOR_LIGHTBROWN = 1113 +AFX_IDC_COLOR_DARKMAGENTA = 1114 +AFX_IDC_COLOR_DARKCYAN = 1115 +AFX_IDC_COLORPROP = 1116 +AFX_IDC_SYSTEMCOLORS = 1117 +AFX_IDC_PROPNAME = 1201 +AFX_IDC_PICTURE = 1202 +AFX_IDC_BROWSE = 1203 +AFX_IDC_CLEAR = 1204 +AFX_IDD_PROPPAGE_COLOR = 32257 +AFX_IDD_PROPPAGE_FONT = 32258 +AFX_IDD_PROPPAGE_PICTURE = 32259 +AFX_IDB_TRUETYPE = 32384 +AFX_IDS_PROPPAGE_UNKNOWN = 0xFE01 +AFX_IDS_COLOR_DESKTOP = 0xFE04 +AFX_IDS_COLOR_APPWORKSPACE = 0xFE05 +AFX_IDS_COLOR_WNDBACKGND = 0xFE06 +AFX_IDS_COLOR_WNDTEXT = 0xFE07 +AFX_IDS_COLOR_MENUBAR = 0xFE08 +AFX_IDS_COLOR_MENUTEXT = 0xFE09 +AFX_IDS_COLOR_ACTIVEBAR = 0xFE0A +AFX_IDS_COLOR_INACTIVEBAR = 0xFE0B +AFX_IDS_COLOR_ACTIVETEXT = 0xFE0C +AFX_IDS_COLOR_INACTIVETEXT = 0xFE0D +AFX_IDS_COLOR_ACTIVEBORDER = 0xFE0E +AFX_IDS_COLOR_INACTIVEBORDER = 0xFE0F +AFX_IDS_COLOR_WNDFRAME = 0xFE10 +AFX_IDS_COLOR_SCROLLBARS = 0xFE11 +AFX_IDS_COLOR_BTNFACE = 0xFE12 +AFX_IDS_COLOR_BTNSHADOW = 0xFE13 +AFX_IDS_COLOR_BTNTEXT = 0xFE14 +AFX_IDS_COLOR_BTNHIGHLIGHT = 0xFE15 +AFX_IDS_COLOR_DISABLEDTEXT = 0xFE16 +AFX_IDS_COLOR_HIGHLIGHT = 0xFE17 +AFX_IDS_COLOR_HIGHLIGHTTEXT = 0xFE18 +AFX_IDS_REGULAR = 0xFE19 +AFX_IDS_BOLD = 0xFE1A +AFX_IDS_ITALIC = 0xFE1B +AFX_IDS_BOLDITALIC = 0xFE1C +AFX_IDS_SAMPLETEXT = 0xFE1D +AFX_IDS_DISPLAYSTRING_FONT = 0xFE1E +AFX_IDS_DISPLAYSTRING_COLOR = 0xFE1F +AFX_IDS_DISPLAYSTRING_PICTURE = 0xFE20 +AFX_IDS_PICTUREFILTER = 0xFE21 +AFX_IDS_PICTYPE_UNKNOWN = 0xFE22 +AFX_IDS_PICTYPE_NONE = 0xFE23 +AFX_IDS_PICTYPE_BITMAP = 0xFE24 +AFX_IDS_PICTYPE_METAFILE = 0xFE25 +AFX_IDS_PICTYPE_ICON = 0xFE26 +AFX_IDS_COLOR_PPG = 0xFE28 +AFX_IDS_COLOR_PPG_CAPTION = 0xFE29 +AFX_IDS_FONT_PPG = 0xFE2A +AFX_IDS_FONT_PPG_CAPTION = 0xFE2B +AFX_IDS_PICTURE_PPG = 0xFE2C +AFX_IDS_PICTURE_PPG_CAPTION = 0xFE2D +AFX_IDS_PICTUREBROWSETITLE = 0xFE30 +AFX_IDS_BORDERSTYLE_0 = 0xFE31 +AFX_IDS_BORDERSTYLE_1 = 0xFE32 +AFX_IDS_VERB_EDIT = 0xFE40 +AFX_IDS_VERB_PROPERTIES = 0xFE41 +AFX_IDP_PICTURECANTOPEN = 0xFE83 +AFX_IDP_PICTURECANTLOAD = 0xFE84 +AFX_IDP_PICTURETOOLARGE = 0xFE85 +AFX_IDP_PICTUREREADFAILED = 0xFE86 +AFX_IDP_E_ILLEGALFUNCTIONCALL = 0xFEA0 +AFX_IDP_E_OVERFLOW = 0xFEA1 +AFX_IDP_E_OUTOFMEMORY = 0xFEA2 +AFX_IDP_E_DIVISIONBYZERO = 0xFEA3 +AFX_IDP_E_OUTOFSTRINGSPACE = 0xFEA4 +AFX_IDP_E_OUTOFSTACKSPACE = 0xFEA5 +AFX_IDP_E_BADFILENAMEORNUMBER = 0xFEA6 +AFX_IDP_E_FILENOTFOUND = 0xFEA7 +AFX_IDP_E_BADFILEMODE = 0xFEA8 +AFX_IDP_E_FILEALREADYOPEN = 0xFEA9 +AFX_IDP_E_DEVICEIOERROR = 0xFEAA +AFX_IDP_E_FILEALREADYEXISTS = 0xFEAB +AFX_IDP_E_BADRECORDLENGTH = 0xFEAC +AFX_IDP_E_DISKFULL = 0xFEAD +AFX_IDP_E_BADRECORDNUMBER = 0xFEAE +AFX_IDP_E_BADFILENAME = 0xFEAF +AFX_IDP_E_TOOMANYFILES = 0xFEB0 +AFX_IDP_E_DEVICEUNAVAILABLE = 0xFEB1 +AFX_IDP_E_PERMISSIONDENIED = 0xFEB2 +AFX_IDP_E_DISKNOTREADY = 0xFEB3 +AFX_IDP_E_PATHFILEACCESSERROR = 0xFEB4 +AFX_IDP_E_PATHNOTFOUND = 0xFEB5 +AFX_IDP_E_INVALIDPATTERNSTRING = 0xFEB6 +AFX_IDP_E_INVALIDUSEOFNULL = 0xFEB7 +AFX_IDP_E_INVALIDFILEFORMAT = 0xFEB8 +AFX_IDP_E_INVALIDPROPERTYVALUE = 0xFEB9 +AFX_IDP_E_INVALIDPROPERTYARRAYINDEX = 0xFEBA +AFX_IDP_E_SETNOTSUPPORTEDATRUNTIME = 0xFEBB +AFX_IDP_E_SETNOTSUPPORTED = 0xFEBC +AFX_IDP_E_NEEDPROPERTYARRAYINDEX = 0xFEBD +AFX_IDP_E_SETNOTPERMITTED = 0xFEBE +AFX_IDP_E_GETNOTSUPPORTEDATRUNTIME = 0xFEBF +AFX_IDP_E_GETNOTSUPPORTED = 0xFEC0 +AFX_IDP_E_PROPERTYNOTFOUND = 0xFEC1 +AFX_IDP_E_INVALIDCLIPBOARDFORMAT = 0xFEC2 +AFX_IDP_E_INVALIDPICTURE = 0xFEC3 +AFX_IDP_E_PRINTERERROR = 0xFEC4 +AFX_IDP_E_CANTSAVEFILETOTEMP = 0xFEC5 +AFX_IDP_E_SEARCHTEXTNOTFOUND = 0xFEC6 +AFX_IDP_E_REPLACEMENTSTOOLONG = 0xFEC7 diff --git a/venv/Lib/site-packages/win32/lib/commctrl.py b/venv/Lib/site-packages/win32/lib/commctrl.py new file mode 100644 index 00000000..f436e2e0 --- /dev/null +++ b/venv/Lib/site-packages/win32/lib/commctrl.py @@ -0,0 +1,1549 @@ +# Generated by h2py from COMMCTRL.H +WM_USER = 1024 +ICC_LISTVIEW_CLASSES = 1 # listview, header +ICC_TREEVIEW_CLASSES = 2 # treeview, tooltips +ICC_BAR_CLASSES = 4 # toolbar, statusbar, trackbar, tooltips +ICC_TAB_CLASSES = 8 # tab, tooltips +ICC_UPDOWN_CLASS = 16 # updown +ICC_PROGRESS_CLASS = 32 # progress +ICC_HOTKEY_CLASS = 64 # hotkey +ICC_ANIMATE_CLASS = 128 # animate +ICC_WIN95_CLASSES = 255 +ICC_DATE_CLASSES = 256 # month picker, date picker, time picker, updown +ICC_USEREX_CLASSES = 512 # comboex +ICC_COOL_CLASSES = 1024 # rebar (coolbar) control +ICC_INTERNET_CLASSES = 2048 +ICC_PAGESCROLLER_CLASS = 4096 # page scroller +ICC_NATIVEFNTCTL_CLASS = 8192 # native font control +ODT_HEADER = 100 +ODT_TAB = 101 +ODT_LISTVIEW = 102 +PY_0U = 0 +NM_FIRST = (PY_0U) # generic to all controls +NM_LAST = (PY_0U- 99) +LVN_FIRST = (PY_0U-100) # listview +LVN_LAST = (PY_0U-199) +HDN_FIRST = (PY_0U-300) # header +HDN_LAST = (PY_0U-399) +TVN_FIRST = (PY_0U-400) # treeview +TVN_LAST = (PY_0U-499) +TTN_FIRST = (PY_0U-520) # tooltips +TTN_LAST = (PY_0U-549) +TCN_FIRST = (PY_0U-550) # tab control +TCN_LAST = (PY_0U-580) +CDN_FIRST = (PY_0U-601) # common dialog (new) +CDN_LAST = (PY_0U-699) +TBN_FIRST = (PY_0U-700) # toolbar +TBN_LAST = (PY_0U-720) +UDN_FIRST = (PY_0U-721) # updown +UDN_LAST = (PY_0U-740) +MCN_FIRST = (PY_0U-750) # monthcal +MCN_LAST = (PY_0U-759) +DTN_FIRST = (PY_0U-760) # datetimepick +DTN_LAST = (PY_0U-799) +CBEN_FIRST = (PY_0U-800) # combo box ex +CBEN_LAST = (PY_0U-830) +RBN_FIRST = (PY_0U-831) # rebar +RBN_LAST = (PY_0U-859) +IPN_FIRST = (PY_0U-860) # internet address +IPN_LAST = (PY_0U-879) # internet address +SBN_FIRST = (PY_0U-880) # status bar +SBN_LAST = (PY_0U-899) +PGN_FIRST = (PY_0U-900) # Pager Control +PGN_LAST = (PY_0U-950) +LVM_FIRST = 4096 # ListView messages +TV_FIRST = 4352 # TreeView messages +HDM_FIRST = 4608 # Header messages +TCM_FIRST = 4864 # Tab control messages +PGM_FIRST = 5120 # Pager control messages +CCM_FIRST = 8192 # Common control shared messages +CCM_SETBKCOLOR = (CCM_FIRST + 1) # lParam is bkColor +CCM_SETCOLORSCHEME = (CCM_FIRST + 2) # lParam is color scheme +CCM_GETCOLORSCHEME = (CCM_FIRST + 3) # fills in COLORSCHEME pointed to by lParam +CCM_GETDROPTARGET = (CCM_FIRST + 4) +CCM_SETUNICODEFORMAT = (CCM_FIRST + 5) +CCM_GETUNICODEFORMAT = (CCM_FIRST + 6) +INFOTIPSIZE = 1024 +NM_OUTOFMEMORY = (NM_FIRST-1) +NM_CLICK = (NM_FIRST-2) # uses NMCLICK struct +NM_DBLCLK = (NM_FIRST-3) +NM_RETURN = (NM_FIRST-4) +NM_RCLICK = (NM_FIRST-5) # uses NMCLICK struct +NM_RDBLCLK = (NM_FIRST-6) +NM_SETFOCUS = (NM_FIRST-7) +NM_KILLFOCUS = (NM_FIRST-8) +NM_CUSTOMDRAW = (NM_FIRST-12) +NM_HOVER = (NM_FIRST-13) +NM_NCHITTEST = (NM_FIRST-14) # uses NMMOUSE struct +NM_KEYDOWN = (NM_FIRST-15) # uses NMKEY struct +NM_RELEASEDCAPTURE = (NM_FIRST-16) +NM_SETCURSOR = (NM_FIRST-17) # uses NMMOUSE struct +NM_CHAR = (NM_FIRST-18) # uses NMCHAR struct +MSGF_COMMCTRL_BEGINDRAG = 16896 +MSGF_COMMCTRL_SIZEHEADER = 16897 +MSGF_COMMCTRL_DRAGSELECT = 16898 +MSGF_COMMCTRL_TOOLBARCUST = 16899 +CDRF_DODEFAULT = 0 +CDRF_NEWFONT = 2 +CDRF_SKIPDEFAULT = 4 +CDRF_NOTIFYPOSTPAINT = 16 +CDRF_NOTIFYITEMDRAW = 32 +CDRF_NOTIFYSUBITEMDRAW = 32 # flags are the same, we can distinguish by context +CDRF_NOTIFYPOSTERASE = 64 +CDDS_PREPAINT = 1 +CDDS_POSTPAINT = 2 +CDDS_PREERASE = 3 +CDDS_POSTERASE = 4 +CDDS_ITEM = 65536 +CDDS_ITEMPREPAINT = (CDDS_ITEM | CDDS_PREPAINT) +CDDS_ITEMPOSTPAINT = (CDDS_ITEM | CDDS_POSTPAINT) +CDDS_ITEMPREERASE = (CDDS_ITEM | CDDS_PREERASE) +CDDS_ITEMPOSTERASE = (CDDS_ITEM | CDDS_POSTERASE) +CDDS_SUBITEM = 131072 +CDIS_SELECTED = 1 +CDIS_GRAYED = 2 +CDIS_DISABLED = 4 +CDIS_CHECKED = 8 +CDIS_FOCUS = 16 +CDIS_DEFAULT = 32 +CDIS_HOT = 64 +CDIS_MARKED = 128 +CDIS_INDETERMINATE = 256 +CLR_NONE = -1 # 0xFFFFFFFFL +CLR_DEFAULT = -16777216 # 0xFF000000L +ILC_MASK = 1 +ILC_COLOR = 0 +ILC_COLORDDB = 254 +ILC_COLOR4 = 4 +ILC_COLOR8 = 8 +ILC_COLOR16 = 16 +ILC_COLOR24 = 24 +ILC_COLOR32 = 32 +ILC_PALETTE = 2048 # (not implemented) +ILD_NORMAL = 0 +ILD_TRANSPARENT = 1 +ILD_MASK = 16 +ILD_IMAGE = 32 +ILD_ROP = 64 +ILD_BLEND25 = 2 +ILD_BLEND50 = 4 +ILD_OVERLAYMASK = 3840 +ILD_SELECTED = ILD_BLEND50 +ILD_FOCUS = ILD_BLEND25 +ILD_BLEND = ILD_BLEND50 +CLR_HILIGHT = CLR_DEFAULT +ILCF_MOVE = (0) +ILCF_SWAP = (1) +WC_HEADERA = "SysHeader32" +WC_HEADER = WC_HEADERA +HDS_HORZ = 0 +HDS_BUTTONS = 2 +HDS_HOTTRACK = 4 +HDS_HIDDEN = 8 +HDS_DRAGDROP = 64 +HDS_FULLDRAG = 128 +HDI_WIDTH = 1 +HDI_HEIGHT = HDI_WIDTH +HDI_TEXT = 2 +HDI_FORMAT = 4 +HDI_LPARAM = 8 +HDI_BITMAP = 16 +HDI_IMAGE = 32 +HDI_DI_SETITEM = 64 +HDI_ORDER = 128 +HDF_LEFT = 0 +HDF_RIGHT = 1 +HDF_CENTER = 2 +HDF_JUSTIFYMASK = 3 +HDF_RTLREADING = 4 +HDF_OWNERDRAW = 32768 +HDF_STRING = 16384 +HDF_BITMAP = 8192 +HDF_BITMAP_ON_RIGHT = 4096 +HDF_IMAGE = 2048 +HDM_GETITEMCOUNT = (HDM_FIRST + 0) +HDM_INSERTITEMA = (HDM_FIRST + 1) +HDM_INSERTITEMW = (HDM_FIRST + 10) +HDM_INSERTITEM = HDM_INSERTITEMA +HDM_DELETEITEM = (HDM_FIRST + 2) +HDM_GETITEMA = (HDM_FIRST + 3) +HDM_GETITEMW = (HDM_FIRST + 11) +HDM_GETITEM = HDM_GETITEMA +HDM_SETITEMA = (HDM_FIRST + 4) +HDM_SETITEMW = (HDM_FIRST + 12) +HDM_SETITEM = HDM_SETITEMA +HDM_LAYOUT = (HDM_FIRST + 5) +HHT_NOWHERE = 1 +HHT_ONHEADER = 2 +HHT_ONDIVIDER = 4 +HHT_ONDIVOPEN = 8 +HHT_ABOVE = 256 +HHT_BELOW = 512 +HHT_TORIGHT = 1024 +HHT_TOLEFT = 2048 +HDM_HITTEST = (HDM_FIRST + 6) +HDM_GETITEMRECT = (HDM_FIRST + 7) +HDM_SETIMAGELIST = (HDM_FIRST + 8) +HDM_GETIMAGELIST = (HDM_FIRST + 9) +HDM_ORDERTOINDEX = (HDM_FIRST + 15) +HDM_CREATEDRAGIMAGE = (HDM_FIRST + 16) # wparam = which item (by index) +HDM_GETORDERARRAY = (HDM_FIRST + 17) +HDM_SETORDERARRAY = (HDM_FIRST + 18) +HDM_SETHOTDIVIDER = (HDM_FIRST + 19) +HDM_SETUNICODEFORMAT = CCM_SETUNICODEFORMAT +HDM_GETUNICODEFORMAT = CCM_GETUNICODEFORMAT +HDN_ITEMCHANGINGA = (HDN_FIRST-0) +HDN_ITEMCHANGINGW = (HDN_FIRST-20) +HDN_ITEMCHANGEDA = (HDN_FIRST-1) +HDN_ITEMCHANGEDW = (HDN_FIRST-21) +HDN_ITEMCLICKA = (HDN_FIRST-2) +HDN_ITEMCLICKW = (HDN_FIRST-22) +HDN_ITEMDBLCLICKA = (HDN_FIRST-3) +HDN_ITEMDBLCLICKW = (HDN_FIRST-23) +HDN_DIVIDERDBLCLICKA = (HDN_FIRST-5) +HDN_DIVIDERDBLCLICKW = (HDN_FIRST-25) +HDN_BEGINTRACKA = (HDN_FIRST-6) +HDN_BEGINTRACKW = (HDN_FIRST-26) +HDN_ENDTRACKA = (HDN_FIRST-7) +HDN_ENDTRACKW = (HDN_FIRST-27) +HDN_TRACKA = (HDN_FIRST-8) +HDN_TRACKW = (HDN_FIRST-28) +HDN_GETDISPINFOA = (HDN_FIRST-9) +HDN_GETDISPINFOW = (HDN_FIRST-29) +HDN_BEGINDRAG = (HDN_FIRST-10) +HDN_ENDDRAG = (HDN_FIRST-11) +HDN_ITEMCHANGING = HDN_ITEMCHANGINGA +HDN_ITEMCHANGED = HDN_ITEMCHANGEDA +HDN_ITEMCLICK = HDN_ITEMCLICKA +HDN_ITEMDBLCLICK = HDN_ITEMDBLCLICKA +HDN_DIVIDERDBLCLICK = HDN_DIVIDERDBLCLICKA +HDN_BEGINTRACK = HDN_BEGINTRACKA +HDN_ENDTRACK = HDN_ENDTRACKA +HDN_TRACK = HDN_TRACKA +HDN_GETDISPINFO = HDN_GETDISPINFOA +TOOLBARCLASSNAMEA = "ToolbarWindow32" +TOOLBARCLASSNAME = TOOLBARCLASSNAMEA +CMB_MASKED = 2 +TBSTATE_CHECKED = 1 +TBSTATE_PRESSED = 2 +TBSTATE_ENABLED = 4 +TBSTATE_HIDDEN = 8 +TBSTATE_INDETERMINATE = 16 +TBSTATE_WRAP = 32 +TBSTATE_ELLIPSES = 64 +TBSTATE_MARKED = 128 +TBSTYLE_BUTTON = 0 +TBSTYLE_SEP = 1 +TBSTYLE_CHECK = 2 +TBSTYLE_GROUP = 4 +TBSTYLE_CHECKGROUP = (TBSTYLE_GROUP | TBSTYLE_CHECK) +TBSTYLE_DROPDOWN = 8 +TBSTYLE_AUTOSIZE = 16 # automatically calculate the cx of the button +TBSTYLE_NOPREFIX = 32 # if this button should not have accel prefix +TBSTYLE_TOOLTIPS = 256 +TBSTYLE_WRAPABLE = 512 +TBSTYLE_ALTDRAG = 1024 +TBSTYLE_FLAT = 2048 +TBSTYLE_LIST = 4096 +TBSTYLE_CUSTOMERASE = 8192 +TBSTYLE_REGISTERDROP = 16384 +TBSTYLE_TRANSPARENT = 32768 +TBSTYLE_EX_DRAWDDARROWS = 1 +BTNS_BUTTON = TBSTYLE_BUTTON +BTNS_SEP = TBSTYLE_SEP # 0x0001 +BTNS_CHECK = TBSTYLE_CHECK # 0x0002 +BTNS_GROUP = TBSTYLE_GROUP # 0x0004 +BTNS_CHECKGROUP = TBSTYLE_CHECKGROUP # (TBSTYLE_GROUP | TBSTYLE_CHECK) +BTNS_DROPDOWN = TBSTYLE_DROPDOWN # 0x0008 +BTNS_AUTOSIZE = TBSTYLE_AUTOSIZE # 0x0010; automatically calculate the cx of the button +BTNS_NOPREFIX = TBSTYLE_NOPREFIX # 0x0020; this button should not have accel prefix +BTNS_SHOWTEXT = 64 # 0x0040 // ignored unless TBSTYLE_EX_MIXEDBUTTONS is set +BTNS_WHOLEDROPDOWN = 128 # 0x0080 // draw drop-down arrow, but without split arrow section +TBCDRF_NOEDGES = 65536 # Don't draw button edges +TBCDRF_HILITEHOTTRACK = 131072 # Use color of the button bk when hottracked +TBCDRF_NOOFFSET = 262144 # Don't offset button if pressed +TBCDRF_NOMARK = 524288 # Don't draw default highlight of image/text for TBSTATE_MARKED +TBCDRF_NOETCHEDEFFECT = 1048576 # Don't draw etched effect for disabled items +TB_ENABLEBUTTON = (WM_USER + 1) +TB_CHECKBUTTON = (WM_USER + 2) +TB_PRESSBUTTON = (WM_USER + 3) +TB_HIDEBUTTON = (WM_USER + 4) +TB_INDETERMINATE = (WM_USER + 5) +TB_MARKBUTTON = (WM_USER + 6) +TB_ISBUTTONENABLED = (WM_USER + 9) +TB_ISBUTTONCHECKED = (WM_USER + 10) +TB_ISBUTTONPRESSED = (WM_USER + 11) +TB_ISBUTTONHIDDEN = (WM_USER + 12) +TB_ISBUTTONINDETERMINATE = (WM_USER + 13) +TB_ISBUTTONHIGHLIGHTED = (WM_USER + 14) +TB_SETSTATE = (WM_USER + 17) +TB_GETSTATE = (WM_USER + 18) +TB_ADDBITMAP = (WM_USER + 19) +HINST_COMMCTRL = -1 +IDB_STD_SMALL_COLOR = 0 +IDB_STD_LARGE_COLOR = 1 +IDB_VIEW_SMALL_COLOR = 4 +IDB_VIEW_LARGE_COLOR = 5 +IDB_HIST_SMALL_COLOR = 8 +IDB_HIST_LARGE_COLOR = 9 +STD_CUT = 0 +STD_COPY = 1 +STD_PASTE = 2 +STD_UNDO = 3 +STD_REDOW = 4 +STD_DELETE = 5 +STD_FILENEW = 6 +STD_FILEOPEN = 7 +STD_FILESAVE = 8 +STD_PRINTPRE = 9 +STD_PROPERTIES = 10 +STD_HELP = 11 +STD_FIND = 12 +STD_REPLACE = 13 +STD_PRINT = 14 +VIEW_LARGEICONS = 0 +VIEW_SMALLICONS = 1 +VIEW_LIST = 2 +VIEW_DETAILS = 3 +VIEW_SORTNAME = 4 +VIEW_SORTSIZE = 5 +VIEW_SORTDATE = 6 +VIEW_SORTTYPE = 7 +VIEW_PARENTFOLDER = 8 +VIEW_NETCONNECT = 9 +VIEW_NETDISCONNECT = 10 +VIEW_NEWFOLDER = 11 +VIEW_VIEWMENU = 12 +HIST_BACK = 0 +HIST_FORWARD = 1 +HIST_FAVORITES = 2 +HIST_ADDTOFAVORITES = 3 +HIST_VIEWTREE = 4 +TB_ADDBUTTONSA = (WM_USER + 20) +TB_INSERTBUTTONA = (WM_USER + 21) +TB_ADDBUTTONS = (WM_USER + 20) +TB_INSERTBUTTON = (WM_USER + 21) +TB_DELETEBUTTON = (WM_USER + 22) +TB_GETBUTTON = (WM_USER + 23) +TB_BUTTONCOUNT = (WM_USER + 24) +TB_COMMANDTOINDEX = (WM_USER + 25) +TB_SAVERESTOREA = (WM_USER + 26) +TB_SAVERESTOREW = (WM_USER + 76) +TB_CUSTOMIZE = (WM_USER + 27) +TB_ADDSTRINGA = (WM_USER + 28) +TB_ADDSTRINGW = (WM_USER + 77) +TB_GETITEMRECT = (WM_USER + 29) +TB_BUTTONSTRUCTSIZE = (WM_USER + 30) +TB_SETBUTTONSIZE = (WM_USER + 31) +TB_SETBITMAPSIZE = (WM_USER + 32) +TB_AUTOSIZE = (WM_USER + 33) +TB_GETTOOLTIPS = (WM_USER + 35) +TB_SETTOOLTIPS = (WM_USER + 36) +TB_SETPARENT = (WM_USER + 37) +TB_SETROWS = (WM_USER + 39) +TB_GETROWS = (WM_USER + 40) +TB_SETCMDID = (WM_USER + 42) +TB_CHANGEBITMAP = (WM_USER + 43) +TB_GETBITMAP = (WM_USER + 44) +TB_GETBUTTONTEXTA = (WM_USER + 45) +TB_GETBUTTONTEXTW = (WM_USER + 75) +TB_REPLACEBITMAP = (WM_USER + 46) +TB_SETINDENT = (WM_USER + 47) +TB_SETIMAGELIST = (WM_USER + 48) +TB_GETIMAGELIST = (WM_USER + 49) +TB_LOADIMAGES = (WM_USER + 50) +TB_GETRECT = (WM_USER + 51) # wParam is the Cmd instead of index +TB_SETHOTIMAGELIST = (WM_USER + 52) +TB_GETHOTIMAGELIST = (WM_USER + 53) +TB_SETDISABLEDIMAGELIST = (WM_USER + 54) +TB_GETDISABLEDIMAGELIST = (WM_USER + 55) +TB_SETSTYLE = (WM_USER + 56) +TB_GETSTYLE = (WM_USER + 57) +TB_GETBUTTONSIZE = (WM_USER + 58) +TB_SETBUTTONWIDTH = (WM_USER + 59) +TB_SETMAXTEXTROWS = (WM_USER + 60) +TB_GETTEXTROWS = (WM_USER + 61) +TB_GETBUTTONTEXT = TB_GETBUTTONTEXTW +TB_SAVERESTORE = TB_SAVERESTOREW +TB_ADDSTRING = TB_ADDSTRINGW +TB_GETBUTTONTEXT = TB_GETBUTTONTEXTA +TB_SAVERESTORE = TB_SAVERESTOREA +TB_ADDSTRING = TB_ADDSTRINGA +TB_GETOBJECT = (WM_USER + 62) # wParam == IID, lParam void **ppv +TB_GETHOTITEM = (WM_USER + 71) +TB_SETHOTITEM = (WM_USER + 72) # wParam == iHotItem +TB_SETANCHORHIGHLIGHT = (WM_USER + 73) # wParam == TRUE/FALSE +TB_GETANCHORHIGHLIGHT = (WM_USER + 74) +TB_MAPACCELERATORA = (WM_USER + 78) # wParam == ch, lParam int * pidBtn +TBIMHT_AFTER = 1 # TRUE = insert After iButton, otherwise before +TBIMHT_BACKGROUND = 2 # TRUE iff missed buttons completely +TB_GETINSERTMARK = (WM_USER + 79) # lParam == LPTBINSERTMARK +TB_SETINSERTMARK = (WM_USER + 80) # lParam == LPTBINSERTMARK +TB_INSERTMARKHITTEST = (WM_USER + 81) # wParam == LPPOINT lParam == LPTBINSERTMARK +TB_MOVEBUTTON = (WM_USER + 82) +TB_GETMAXSIZE = (WM_USER + 83) # lParam == LPSIZE +TB_SETEXTENDEDSTYLE = (WM_USER + 84) # For TBSTYLE_EX_* +TB_GETEXTENDEDSTYLE = (WM_USER + 85) # For TBSTYLE_EX_* +TB_GETPADDING = (WM_USER + 86) +TB_SETPADDING = (WM_USER + 87) +TB_SETINSERTMARKCOLOR = (WM_USER + 88) +TB_GETINSERTMARKCOLOR = (WM_USER + 89) +TB_SETCOLORSCHEME = CCM_SETCOLORSCHEME # lParam is color scheme +TB_GETCOLORSCHEME = CCM_GETCOLORSCHEME # fills in COLORSCHEME pointed to by lParam +TB_SETUNICODEFORMAT = CCM_SETUNICODEFORMAT +TB_GETUNICODEFORMAT = CCM_GETUNICODEFORMAT +TB_MAPACCELERATORW = (WM_USER + 90) # wParam == ch, lParam int * pidBtn +TB_MAPACCELERATOR = TB_MAPACCELERATORW +TB_MAPACCELERATOR = TB_MAPACCELERATORA +TBBF_LARGE = 1 +TB_GETBITMAPFLAGS = (WM_USER + 41) +TBIF_IMAGE = 1 +TBIF_TEXT = 2 +TBIF_STATE = 4 +TBIF_STYLE = 8 +TBIF_LPARAM = 16 +TBIF_COMMAND = 32 +TBIF_SIZE = 64 +TB_GETBUTTONINFOW = (WM_USER + 63) +TB_SETBUTTONINFOW = (WM_USER + 64) +TB_GETBUTTONINFOA = (WM_USER + 65) +TB_SETBUTTONINFOA = (WM_USER + 66) +TB_INSERTBUTTONW = (WM_USER + 67) +TB_ADDBUTTONSW = (WM_USER + 68) +TB_HITTEST = (WM_USER + 69) +TB_SETDRAWTEXTFLAGS = (WM_USER + 70) # wParam == mask lParam == bit values +TBN_GETBUTTONINFOA = (TBN_FIRST-0) +TBN_GETBUTTONINFOW = (TBN_FIRST-20) +TBN_BEGINDRAG = (TBN_FIRST-1) +TBN_ENDDRAG = (TBN_FIRST-2) +TBN_BEGINADJUST = (TBN_FIRST-3) +TBN_ENDADJUST = (TBN_FIRST-4) +TBN_RESET = (TBN_FIRST-5) +TBN_QUERYINSERT = (TBN_FIRST-6) +TBN_QUERYDELETE = (TBN_FIRST-7) +TBN_TOOLBARCHANGE = (TBN_FIRST-8) +TBN_CUSTHELP = (TBN_FIRST-9) +TBN_DROPDOWN = (TBN_FIRST - 10) +TBN_GETOBJECT = (TBN_FIRST - 12) +HICF_OTHER = 0 +HICF_MOUSE = 1 # Triggered by mouse +HICF_ARROWKEYS = 2 # Triggered by arrow keys +HICF_ACCELERATOR = 4 # Triggered by accelerator +HICF_DUPACCEL = 8 # This accelerator is not unique +HICF_ENTERING = 16 # idOld is invalid +HICF_LEAVING = 32 # idNew is invalid +HICF_RESELECT = 64 # hot item reselected +TBN_HOTITEMCHANGE = (TBN_FIRST - 13) +TBN_DRAGOUT = (TBN_FIRST - 14) # this is sent when the user clicks down on a button then drags off the button +TBN_DELETINGBUTTON = (TBN_FIRST - 15) # uses TBNOTIFY +TBN_GETDISPINFOA = (TBN_FIRST - 16) # This is sent when the toolbar needs some display information +TBN_GETDISPINFOW = (TBN_FIRST - 17) # This is sent when the toolbar needs some display information +TBN_GETINFOTIPA = (TBN_FIRST - 18) +TBN_GETINFOTIPW = (TBN_FIRST - 19) +TBN_GETINFOTIP = TBN_GETINFOTIPA +TBNF_IMAGE = 1 +TBNF_TEXT = 2 +TBNF_DI_SETITEM = 268435456 +TBN_GETDISPINFO = TBN_GETDISPINFOA +TBDDRET_DEFAULT = 0 +TBDDRET_NODEFAULT = 1 +TBDDRET_TREATPRESSED = 2 # Treat as a standard press button +TBN_GETBUTTONINFO = TBN_GETBUTTONINFOA +REBARCLASSNAMEA = "ReBarWindow32" +REBARCLASSNAME = REBARCLASSNAMEA +RBIM_IMAGELIST = 1 +RBS_TOOLTIPS = 256 +RBS_VARHEIGHT = 512 +RBS_BANDBORDERS = 1024 +RBS_FIXEDORDER = 2048 +RBS_REGISTERDROP = 4096 +RBS_AUTOSIZE = 8192 +RBS_VERTICALGRIPPER = 16384 # this always has the vertical gripper (default for horizontal mode) +RBS_DBLCLKTOGGLE = 32768 +RBS_TOOLTIPS = 256 +RBS_VARHEIGHT = 512 +RBS_BANDBORDERS = 1024 +RBS_FIXEDORDER = 2048 +RBBS_BREAK = 1 # break to new line +RBBS_FIXEDSIZE = 2 # band can't be sized +RBBS_CHILDEDGE = 4 # edge around top & bottom of child window +RBBS_HIDDEN = 8 # don't show +RBBS_NOVERT = 16 # don't show when vertical +RBBS_FIXEDBMP = 32 # bitmap doesn't move during band resize +RBBS_VARIABLEHEIGHT = 64 # allow autosizing of this child vertically +RBBS_GRIPPERALWAYS = 128 # always show the gripper +RBBS_NOGRIPPER = 256 # never show the gripper +RBBIM_STYLE = 1 +RBBIM_COLORS = 2 +RBBIM_TEXT = 4 +RBBIM_IMAGE = 8 +RBBIM_CHILD = 16 +RBBIM_CHILDSIZE = 32 +RBBIM_SIZE = 64 +RBBIM_BACKGROUND = 128 +RBBIM_ID = 256 +RBBIM_IDEALSIZE = 512 +RBBIM_LPARAM = 1024 +RB_INSERTBANDA = (WM_USER + 1) +RB_DELETEBAND = (WM_USER + 2) +RB_GETBARINFO = (WM_USER + 3) +RB_SETBARINFO = (WM_USER + 4) +RB_GETBANDINFO = (WM_USER + 5) +RB_SETBANDINFOA = (WM_USER + 6) +RB_SETPARENT = (WM_USER + 7) +RB_HITTEST = (WM_USER + 8) +RB_GETRECT = (WM_USER + 9) +RB_INSERTBANDW = (WM_USER + 10) +RB_SETBANDINFOW = (WM_USER + 11) +RB_GETBANDCOUNT = (WM_USER + 12) +RB_GETROWCOUNT = (WM_USER + 13) +RB_GETROWHEIGHT = (WM_USER + 14) +RB_IDTOINDEX = (WM_USER + 16) # wParam == id +RB_GETTOOLTIPS = (WM_USER + 17) +RB_SETTOOLTIPS = (WM_USER + 18) +RB_SETBKCOLOR = (WM_USER + 19) # sets the default BK color +RB_GETBKCOLOR = (WM_USER + 20) # defaults to CLR_NONE +RB_SETTEXTCOLOR = (WM_USER + 21) +RB_GETTEXTCOLOR = (WM_USER + 22) # defaults to 0x00000000 +RB_SIZETORECT = (WM_USER + 23) # resize the rebar/break bands and such to this rect (lparam) +RB_SETCOLORSCHEME = CCM_SETCOLORSCHEME # lParam is color scheme +RB_GETCOLORSCHEME = CCM_GETCOLORSCHEME # fills in COLORSCHEME pointed to by lParam +RB_INSERTBAND = RB_INSERTBANDA +RB_SETBANDINFO = RB_SETBANDINFOA +RB_BEGINDRAG = (WM_USER + 24) +RB_ENDDRAG = (WM_USER + 25) +RB_DRAGMOVE = (WM_USER + 26) +RB_GETBARHEIGHT = (WM_USER + 27) +RB_GETBANDINFOW = (WM_USER + 28) +RB_GETBANDINFOA = (WM_USER + 29) +RB_GETBANDINFO = RB_GETBANDINFOA +RB_MINIMIZEBAND = (WM_USER + 30) +RB_MAXIMIZEBAND = (WM_USER + 31) +RB_GETDROPTARGET = (CCM_GETDROPTARGET) +RB_GETBANDBORDERS = (WM_USER + 34) # returns in lparam = lprc the amount of edges added to band wparam +RB_SHOWBAND = (WM_USER + 35) # show/hide band +RB_SETPALETTE = (WM_USER + 37) +RB_GETPALETTE = (WM_USER + 38) +RB_MOVEBAND = (WM_USER + 39) +RB_SETUNICODEFORMAT = CCM_SETUNICODEFORMAT +RB_GETUNICODEFORMAT = CCM_GETUNICODEFORMAT +RBN_HEIGHTCHANGE = (RBN_FIRST - 0) +RBN_GETOBJECT = (RBN_FIRST - 1) +RBN_LAYOUTCHANGED = (RBN_FIRST - 2) +RBN_AUTOSIZE = (RBN_FIRST - 3) +RBN_BEGINDRAG = (RBN_FIRST - 4) +RBN_ENDDRAG = (RBN_FIRST - 5) +RBN_DELETINGBAND = (RBN_FIRST - 6) # Uses NMREBAR +RBN_DELETEDBAND = (RBN_FIRST - 7) # Uses NMREBAR +RBN_CHILDSIZE = (RBN_FIRST - 8) +RBNM_ID = 1 +RBNM_STYLE = 2 +RBNM_LPARAM = 4 +RBHT_NOWHERE = 1 +RBHT_CAPTION = 2 +RBHT_CLIENT = 3 +RBHT_GRABBER = 4 +TOOLTIPS_CLASSA = "tooltips_class32" +TOOLTIPS_CLASS = TOOLTIPS_CLASSA +TTS_ALWAYSTIP = 1 +TTS_NOPREFIX = 2 +TTF_IDISHWND = 1 +TTF_CENTERTIP = 2 +TTF_RTLREADING = 4 +TTF_SUBCLASS = 16 +TTF_TRACK = 32 +TTF_ABSOLUTE = 128 +TTF_TRANSPARENT = 256 +TTF_DI_SETITEM = 32768 # valid only on the TTN_NEEDTEXT callback +TTDT_AUTOMATIC = 0 +TTDT_RESHOW = 1 +TTDT_AUTOPOP = 2 +TTDT_INITIAL = 3 +TTM_ACTIVATE = (WM_USER + 1) +TTM_SETDELAYTIME = (WM_USER + 3) +TTM_ADDTOOLA = (WM_USER + 4) +TTM_ADDTOOLW = (WM_USER + 50) +TTM_DELTOOLA = (WM_USER + 5) +TTM_DELTOOLW = (WM_USER + 51) +TTM_NEWTOOLRECTA = (WM_USER + 6) +TTM_NEWTOOLRECTW = (WM_USER + 52) +TTM_RELAYEVENT = (WM_USER + 7) +TTM_GETTOOLINFOA = (WM_USER + 8) +TTM_GETTOOLINFOW = (WM_USER + 53) +TTM_SETTOOLINFOA = (WM_USER + 9) +TTM_SETTOOLINFOW = (WM_USER + 54) +TTM_HITTESTA = (WM_USER +10) +TTM_HITTESTW = (WM_USER +55) +TTM_GETTEXTA = (WM_USER +11) +TTM_GETTEXTW = (WM_USER +56) +TTM_UPDATETIPTEXTA = (WM_USER +12) +TTM_UPDATETIPTEXTW = (WM_USER +57) +TTM_GETTOOLCOUNT = (WM_USER +13) +TTM_ENUMTOOLSA = (WM_USER +14) +TTM_ENUMTOOLSW = (WM_USER +58) +TTM_GETCURRENTTOOLA = (WM_USER + 15) +TTM_GETCURRENTTOOLW = (WM_USER + 59) +TTM_WINDOWFROMPOINT = (WM_USER + 16) +TTM_TRACKACTIVATE = (WM_USER + 17) # wParam = TRUE/FALSE start end lparam = LPTOOLINFO +TTM_TRACKPOSITION = (WM_USER + 18) # lParam = dwPos +TTM_SETTIPBKCOLOR = (WM_USER + 19) +TTM_SETTIPTEXTCOLOR = (WM_USER + 20) +TTM_GETDELAYTIME = (WM_USER + 21) +TTM_GETTIPBKCOLOR = (WM_USER + 22) +TTM_GETTIPTEXTCOLOR = (WM_USER + 23) +TTM_SETMAXTIPWIDTH = (WM_USER + 24) +TTM_GETMAXTIPWIDTH = (WM_USER + 25) +TTM_SETMARGIN = (WM_USER + 26) # lParam = lprc +TTM_GETMARGIN = (WM_USER + 27) # lParam = lprc +TTM_POP = (WM_USER + 28) +TTM_UPDATE = (WM_USER + 29) +TTM_ADDTOOL = TTM_ADDTOOLA +TTM_DELTOOL = TTM_DELTOOLA +TTM_NEWTOOLRECT = TTM_NEWTOOLRECTA +TTM_GETTOOLINFO = TTM_GETTOOLINFOA +TTM_SETTOOLINFO = TTM_SETTOOLINFOA +TTM_HITTEST = TTM_HITTESTA +TTM_GETTEXT = TTM_GETTEXTA +TTM_UPDATETIPTEXT = TTM_UPDATETIPTEXTA +TTM_ENUMTOOLS = TTM_ENUMTOOLSA +TTM_GETCURRENTTOOL = TTM_GETCURRENTTOOLA +TTN_GETDISPINFOA = (TTN_FIRST - 0) +TTN_GETDISPINFOW = (TTN_FIRST - 10) +TTN_SHOW = (TTN_FIRST - 1) +TTN_POP = (TTN_FIRST - 2) +TTN_GETDISPINFO = TTN_GETDISPINFOA +TTN_NEEDTEXT = TTN_GETDISPINFO +TTN_NEEDTEXTA = TTN_GETDISPINFOA +TTN_NEEDTEXTW = TTN_GETDISPINFOW +SBARS_SIZEGRIP = 256 +SBARS_TOOLTIPS = 2048 +STATUSCLASSNAMEA = "msctls_statusbar32" +STATUSCLASSNAME = STATUSCLASSNAMEA +SB_SETTEXTA = (WM_USER+1) +SB_SETTEXTW = (WM_USER+11) +SB_GETTEXTA = (WM_USER+2) +SB_GETTEXTW = (WM_USER+13) +SB_GETTEXTLENGTHA = (WM_USER+3) +SB_GETTEXTLENGTHW = (WM_USER+12) +SB_GETTEXT = SB_GETTEXTA +SB_SETTEXT = SB_SETTEXTA +SB_GETTEXTLENGTH = SB_GETTEXTLENGTHA +SB_SETPARTS = (WM_USER+4) +SB_GETPARTS = (WM_USER+6) +SB_GETBORDERS = (WM_USER+7) +SB_SETMINHEIGHT = (WM_USER+8) +SB_SIMPLE = (WM_USER+9) +SB_GETRECT = (WM_USER+10) +SB_ISSIMPLE = (WM_USER+14) +SB_SETICON = (WM_USER+15) +SB_SETTIPTEXTA = (WM_USER+16) +SB_SETTIPTEXTW = (WM_USER+17) +SB_GETTIPTEXTA = (WM_USER+18) +SB_GETTIPTEXTW = (WM_USER+19) +SB_GETICON = (WM_USER+20) +SB_SETTIPTEXT = SB_SETTIPTEXTA +SB_GETTIPTEXT = SB_GETTIPTEXTA +SB_SETUNICODEFORMAT = CCM_SETUNICODEFORMAT +SB_GETUNICODEFORMAT = CCM_GETUNICODEFORMAT +SBT_OWNERDRAW = 4096 +SBT_NOBORDERS = 256 +SBT_POPOUT = 512 +SBT_RTLREADING = 1024 +SBT_NOTABPARSING = 2048 +SBT_TOOLTIPS = 2048 +SB_SETBKCOLOR = CCM_SETBKCOLOR # lParam = bkColor +SBN_SIMPLEMODECHANGE = (SBN_FIRST - 0) +TRACKBAR_CLASSA = "msctls_trackbar32" +TRACKBAR_CLASS = TRACKBAR_CLASSA +TBS_AUTOTICKS = 1 +TBS_VERT = 2 +TBS_HORZ = 0 +TBS_TOP = 4 +TBS_BOTTOM = 0 +TBS_LEFT = 4 +TBS_RIGHT = 0 +TBS_BOTH = 8 +TBS_NOTICKS = 16 +TBS_ENABLESELRANGE = 32 +TBS_FIXEDLENGTH = 64 +TBS_NOTHUMB = 128 +TBS_TOOLTIPS = 256 +TBM_GETPOS = (WM_USER) +TBM_GETRANGEMIN = (WM_USER+1) +TBM_GETRANGEMAX = (WM_USER+2) +TBM_GETTIC = (WM_USER+3) +TBM_SETTIC = (WM_USER+4) +TBM_SETPOS = (WM_USER+5) +TBM_SETRANGE = (WM_USER+6) +TBM_SETRANGEMIN = (WM_USER+7) +TBM_SETRANGEMAX = (WM_USER+8) +TBM_CLEARTICS = (WM_USER+9) +TBM_SETSEL = (WM_USER+10) +TBM_SETSELSTART = (WM_USER+11) +TBM_SETSELEND = (WM_USER+12) +TBM_GETPTICS = (WM_USER+14) +TBM_GETTICPOS = (WM_USER+15) +TBM_GETNUMTICS = (WM_USER+16) +TBM_GETSELSTART = (WM_USER+17) +TBM_GETSELEND = (WM_USER+18) +TBM_CLEARSEL = (WM_USER+19) +TBM_SETTICFREQ = (WM_USER+20) +TBM_SETPAGESIZE = (WM_USER+21) +TBM_GETPAGESIZE = (WM_USER+22) +TBM_SETLINESIZE = (WM_USER+23) +TBM_GETLINESIZE = (WM_USER+24) +TBM_GETTHUMBRECT = (WM_USER+25) +TBM_GETCHANNELRECT = (WM_USER+26) +TBM_SETTHUMBLENGTH = (WM_USER+27) +TBM_GETTHUMBLENGTH = (WM_USER+28) +TBM_SETTOOLTIPS = (WM_USER+29) +TBM_GETTOOLTIPS = (WM_USER+30) +TBM_SETTIPSIDE = (WM_USER+31) +TBTS_TOP = 0 +TBTS_LEFT = 1 +TBTS_BOTTOM = 2 +TBTS_RIGHT = 3 +TBM_SETBUDDY = (WM_USER+32) # wparam = BOOL fLeft; (or right) +TBM_GETBUDDY = (WM_USER+33) # wparam = BOOL fLeft; (or right) +TBM_SETUNICODEFORMAT = CCM_SETUNICODEFORMAT +TBM_GETUNICODEFORMAT = CCM_GETUNICODEFORMAT +TB_LINEUP = 0 +TB_LINEDOWN = 1 +TB_PAGEUP = 2 +TB_PAGEDOWN = 3 +TB_THUMBPOSITION = 4 +TB_THUMBTRACK = 5 +TB_TOP = 6 +TB_BOTTOM = 7 +TB_ENDTRACK = 8 +TBCD_TICS = 1 +TBCD_THUMB = 2 +TBCD_CHANNEL = 3 +DL_BEGINDRAG = (WM_USER+133) +DL_DRAGGING = (WM_USER+134) +DL_DROPPED = (WM_USER+135) +DL_CANCELDRAG = (WM_USER+136) +DL_CURSORSET = 0 +DL_STOPCURSOR = 1 +DL_COPYCURSOR = 2 +DL_MOVECURSOR = 3 +DRAGLISTMSGSTRING = "commctrl_DragListMsg" +UPDOWN_CLASSA = "msctls_updown32" +UPDOWN_CLASS = UPDOWN_CLASSA +UD_MAXVAL = 32767 +UD_MINVAL = (-UD_MAXVAL) +UDS_WRAP = 1 +UDS_SETBUDDYINT = 2 +UDS_ALIGNRIGHT = 4 +UDS_ALIGNLEFT = 8 +UDS_AUTOBUDDY = 16 +UDS_ARROWKEYS = 32 +UDS_HORZ = 64 +UDS_NOTHOUSANDS = 128 +UDS_HOTTRACK = 256 +UDM_SETRANGE = (WM_USER+101) +UDM_GETRANGE = (WM_USER+102) +UDM_SETPOS = (WM_USER+103) +UDM_GETPOS = (WM_USER+104) +UDM_SETBUDDY = (WM_USER+105) +UDM_GETBUDDY = (WM_USER+106) +UDM_SETACCEL = (WM_USER+107) +UDM_GETACCEL = (WM_USER+108) +UDM_SETBASE = (WM_USER+109) +UDM_GETBASE = (WM_USER+110) +UDM_SETRANGE32 = (WM_USER+111) +UDM_GETRANGE32 = (WM_USER+112) # wParam & lParam are LPINT +UDM_SETUNICODEFORMAT = CCM_SETUNICODEFORMAT +UDM_GETUNICODEFORMAT = CCM_GETUNICODEFORMAT +UDN_DELTAPOS = (UDN_FIRST - 1) +PROGRESS_CLASSA = "msctls_progress32" +PROGRESS_CLASS = PROGRESS_CLASSA +PBS_SMOOTH = 1 +PBS_VERTICAL = 4 +PBM_SETRANGE = (WM_USER+1) +PBM_SETPOS = (WM_USER+2) +PBM_DELTAPOS = (WM_USER+3) +PBM_SETSTEP = (WM_USER+4) +PBM_STEPIT = (WM_USER+5) +PBM_SETRANGE32 = (WM_USER+6) # lParam = high, wParam = low +PBM_GETRANGE = (WM_USER+7) # wParam = return (TRUE ? low : high). lParam = PPBRANGE or NULL +PBM_GETPOS = (WM_USER+8) +PBM_SETBARCOLOR = (WM_USER+9) # lParam = bar color +PBM_SETBKCOLOR = CCM_SETBKCOLOR # lParam = bkColor +HOTKEYF_SHIFT = 1 +HOTKEYF_CONTROL = 2 +HOTKEYF_ALT = 4 +HOTKEYF_EXT = 128 +HOTKEYF_EXT = 8 +HKCOMB_NONE = 1 +HKCOMB_S = 2 +HKCOMB_C = 4 +HKCOMB_A = 8 +HKCOMB_SC = 16 +HKCOMB_SA = 32 +HKCOMB_CA = 64 +HKCOMB_SCA = 128 +HKM_SETHOTKEY = (WM_USER+1) +HKM_GETHOTKEY = (WM_USER+2) +HKM_SETRULES = (WM_USER+3) +HOTKEY_CLASSA = "msctls_hotkey32" +HOTKEY_CLASS = HOTKEY_CLASSA +CCS_TOP = 0x00000001 +CCS_NOMOVEY = 0x00000002 +CCS_BOTTOM = 0x00000003 +CCS_NORESIZE = 0x00000004 +CCS_NOPARENTALIGN = 0x00000008 +CCS_ADJUSTABLE = 0x00000020 +CCS_NODIVIDER = 0x00000040 +CCS_VERT = 0x00000080 +CCS_LEFT = (CCS_VERT | CCS_TOP) +CCS_RIGHT = (CCS_VERT | CCS_BOTTOM) +CCS_NOMOVEX = (CCS_VERT | CCS_NOMOVEY) +WC_LISTVIEWA = "SysListView32" +WC_LISTVIEW = WC_LISTVIEWA +LVS_ICON = 0 +LVS_REPORT = 1 +LVS_SMALLICON = 2 +LVS_LIST = 3 +LVS_TYPEMASK = 3 +LVS_SINGLESEL = 4 +LVS_SHOWSELALWAYS = 8 +LVS_SORTASCENDING = 16 +LVS_SORTDESCENDING = 32 +LVS_SHAREIMAGELISTS = 64 +LVS_NOLABELWRAP = 128 +LVS_AUTOARRANGE = 256 +LVS_EDITLABELS = 512 +LVS_OWNERDATA = 4096 +LVS_NOSCROLL = 8192 +LVS_TYPESTYLEMASK = 64512 +LVS_ALIGNTOP = 0 +LVS_ALIGNLEFT = 2048 +LVS_ALIGNMASK = 3072 +LVS_OWNERDRAWFIXED = 1024 +LVS_NOCOLUMNHEADER = 16384 +LVS_NOSORTHEADER = 32768 +LVM_SETUNICODEFORMAT = CCM_SETUNICODEFORMAT +LVM_GETUNICODEFORMAT = CCM_GETUNICODEFORMAT +LVM_GETBKCOLOR = (LVM_FIRST + 0) +LVM_SETBKCOLOR = (LVM_FIRST + 1) +LVM_GETIMAGELIST = (LVM_FIRST + 2) +LVSIL_NORMAL = 0 +LVSIL_SMALL = 1 +LVSIL_STATE = 2 +LVM_SETIMAGELIST = (LVM_FIRST + 3) +LVM_GETITEMCOUNT = (LVM_FIRST + 4) +LVIF_TEXT = 1 +LVIF_IMAGE = 2 +LVIF_PARAM = 4 +LVIF_STATE = 8 +LVIF_INDENT = 16 +LVIF_NORECOMPUTE = 2048 +LVIS_FOCUSED = 1 +LVIS_SELECTED = 2 +LVIS_CUT = 4 +LVIS_DROPHILITED = 8 +LVIS_ACTIVATING = 32 +LVIS_OVERLAYMASK = 3840 +LVIS_STATEIMAGEMASK = 61440 +I_INDENTCALLBACK = (-1) +LPSTR_TEXTCALLBACKA = -1 +LPSTR_TEXTCALLBACK = LPSTR_TEXTCALLBACKA +I_IMAGECALLBACK = (-1) +LVM_GETITEMA = (LVM_FIRST + 5) +LVM_GETITEMW = (LVM_FIRST + 75) +LVM_GETITEM = LVM_GETITEMW +LVM_GETITEM = LVM_GETITEMA +LVM_SETITEMA = (LVM_FIRST + 6) +LVM_SETITEMW = (LVM_FIRST + 76) +LVM_SETITEM = LVM_SETITEMW +LVM_SETITEM = LVM_SETITEMA +LVM_INSERTITEMA = (LVM_FIRST + 7) +LVM_INSERTITEMW = (LVM_FIRST + 77) +LVM_INSERTITEM = LVM_INSERTITEMA +LVM_DELETEITEM = (LVM_FIRST + 8) +LVM_DELETEALLITEMS = (LVM_FIRST + 9) +LVM_GETCALLBACKMASK = (LVM_FIRST + 10) +LVM_SETCALLBACKMASK = (LVM_FIRST + 11) +LVNI_ALL = 0 +LVNI_FOCUSED = 1 +LVNI_SELECTED = 2 +LVNI_CUT = 4 +LVNI_DROPHILITED = 8 +LVNI_ABOVE = 256 +LVNI_BELOW = 512 +LVNI_TOLEFT = 1024 +LVNI_TORIGHT = 2048 +LVM_GETNEXTITEM = (LVM_FIRST + 12) +LVFI_PARAM = 1 +LVFI_STRING = 2 +LVFI_PARTIAL = 8 +LVFI_WRAP = 32 +LVFI_NEARESTXY = 64 +LVM_FINDITEMA = (LVM_FIRST + 13) +LVM_FINDITEMW = (LVM_FIRST + 83) +LVM_FINDITEM = LVM_FINDITEMA +LVIR_BOUNDS = 0 +LVIR_ICON = 1 +LVIR_LABEL = 2 +LVIR_SELECTBOUNDS = 3 +LVM_GETITEMRECT = (LVM_FIRST + 14) +LVM_SETITEMPOSITION = (LVM_FIRST + 15) +LVM_GETITEMPOSITION = (LVM_FIRST + 16) +LVM_GETSTRINGWIDTHA = (LVM_FIRST + 17) +LVM_GETSTRINGWIDTHW = (LVM_FIRST + 87) +LVM_GETSTRINGWIDTH = LVM_GETSTRINGWIDTHA +LVHT_NOWHERE = 1 +LVHT_ONITEMICON = 2 +LVHT_ONITEMLABEL = 4 +LVHT_ONITEMSTATEICON = 8 +LVHT_ONITEM = (LVHT_ONITEMICON | LVHT_ONITEMLABEL | LVHT_ONITEMSTATEICON) +LVHT_ABOVE = 8 +LVHT_BELOW = 16 +LVHT_TORIGHT = 32 +LVHT_TOLEFT = 64 +LVM_HITTEST = (LVM_FIRST + 18) +LVM_ENSUREVISIBLE = (LVM_FIRST + 19) +LVM_SCROLL = (LVM_FIRST + 20) +LVM_REDRAWITEMS = (LVM_FIRST + 21) +LVA_DEFAULT = 0 +LVA_ALIGNLEFT = 1 +LVA_ALIGNTOP = 2 +LVA_SNAPTOGRID = 5 +LVM_ARRANGE = (LVM_FIRST + 22) +LVM_EDITLABELA = (LVM_FIRST + 23) +LVM_EDITLABELW = (LVM_FIRST + 118) +LVM_EDITLABEL = LVM_EDITLABELW +LVM_EDITLABEL = LVM_EDITLABELA +LVM_GETEDITCONTROL = (LVM_FIRST + 24) +LVCF_FMT = 1 +LVCF_WIDTH = 2 +LVCF_TEXT = 4 +LVCF_SUBITEM = 8 +LVCF_IMAGE = 16 +LVCF_ORDER = 32 +LVCFMT_LEFT = 0 +LVCFMT_RIGHT = 1 +LVCFMT_CENTER = 2 +LVCFMT_JUSTIFYMASK = 3 +LVCFMT_IMAGE = 2048 +LVCFMT_BITMAP_ON_RIGHT = 4096 +LVCFMT_COL_HAS_IMAGES = 32768 +LVM_GETCOLUMNA = (LVM_FIRST + 25) +LVM_GETCOLUMNW = (LVM_FIRST + 95) +LVM_GETCOLUMN = LVM_GETCOLUMNA +LVM_SETCOLUMNA = (LVM_FIRST + 26) +LVM_SETCOLUMNW = (LVM_FIRST + 96) +LVM_SETCOLUMN = LVM_SETCOLUMNA +LVM_INSERTCOLUMNA = (LVM_FIRST + 27) +LVM_INSERTCOLUMNW = (LVM_FIRST + 97) +LVM_INSERTCOLUMN = LVM_INSERTCOLUMNA +LVM_DELETECOLUMN = (LVM_FIRST + 28) +LVM_GETCOLUMNWIDTH = (LVM_FIRST + 29) +LVSCW_AUTOSIZE = -1 +LVSCW_AUTOSIZE_USEHEADER = -2 +LVM_SETCOLUMNWIDTH = (LVM_FIRST + 30) +LVM_GETHEADER = (LVM_FIRST + 31) +LVM_CREATEDRAGIMAGE = (LVM_FIRST + 33) +LVM_GETVIEWRECT = (LVM_FIRST + 34) +LVM_GETTEXTCOLOR = (LVM_FIRST + 35) +LVM_SETTEXTCOLOR = (LVM_FIRST + 36) +LVM_GETTEXTBKCOLOR = (LVM_FIRST + 37) +LVM_SETTEXTBKCOLOR = (LVM_FIRST + 38) +LVM_GETTOPINDEX = (LVM_FIRST + 39) +LVM_GETCOUNTPERPAGE = (LVM_FIRST + 40) +LVM_GETORIGIN = (LVM_FIRST + 41) +LVM_UPDATE = (LVM_FIRST + 42) +LVM_SETITEMSTATE = (LVM_FIRST + 43) +LVM_GETITEMSTATE = (LVM_FIRST + 44) +LVM_GETITEMTEXTA = (LVM_FIRST + 45) +LVM_GETITEMTEXTW = (LVM_FIRST + 115) +LVM_GETITEMTEXT = LVM_GETITEMTEXTW +LVM_GETITEMTEXT = LVM_GETITEMTEXTA +LVM_SETITEMTEXTA = (LVM_FIRST + 46) +LVM_SETITEMTEXTW = (LVM_FIRST + 116) +LVM_SETITEMTEXT = LVM_SETITEMTEXTW +LVM_SETITEMTEXT = LVM_SETITEMTEXTA +LVSICF_NOINVALIDATEALL = 1 +LVSICF_NOSCROLL = 2 +LVM_SETITEMCOUNT = (LVM_FIRST + 47) +LVM_SORTITEMS = (LVM_FIRST + 48) +LVM_SETITEMPOSITION32 = (LVM_FIRST + 49) +LVM_GETSELECTEDCOUNT = (LVM_FIRST + 50) +LVM_GETITEMSPACING = (LVM_FIRST + 51) +LVM_GETISEARCHSTRINGA = (LVM_FIRST + 52) +LVM_GETISEARCHSTRINGW = (LVM_FIRST + 117) +LVM_GETISEARCHSTRING = LVM_GETISEARCHSTRINGA +LVM_SETICONSPACING = (LVM_FIRST + 53) +LVM_SETEXTENDEDLISTVIEWSTYLE = (LVM_FIRST + 54) # optional wParam == mask +LVM_GETEXTENDEDLISTVIEWSTYLE = (LVM_FIRST + 55) +LVS_EX_GRIDLINES = 1 +LVS_EX_SUBITEMIMAGES = 2 +LVS_EX_CHECKBOXES = 4 +LVS_EX_TRACKSELECT = 8 +LVS_EX_HEADERDRAGDROP = 16 +LVS_EX_FULLROWSELECT = 32 # applies to report mode only +LVS_EX_ONECLICKACTIVATE = 64 +LVS_EX_TWOCLICKACTIVATE = 128 +LVS_EX_FLATSB = 256 +LVS_EX_REGIONAL = 512 +LVS_EX_INFOTIP = 1024 # listview does InfoTips for you +LVS_EX_UNDERLINEHOT = 2048 +LVS_EX_UNDERLINECOLD = 4096 +LVS_EX_MULTIWORKAREAS = 8192 +LVM_GETSUBITEMRECT = (LVM_FIRST + 56) +LVM_SUBITEMHITTEST = (LVM_FIRST + 57) +LVM_SETCOLUMNORDERARRAY = (LVM_FIRST + 58) +LVM_GETCOLUMNORDERARRAY = (LVM_FIRST + 59) +LVM_SETHOTITEM = (LVM_FIRST + 60) +LVM_GETHOTITEM = (LVM_FIRST + 61) +LVM_SETHOTCURSOR = (LVM_FIRST + 62) +LVM_GETHOTCURSOR = (LVM_FIRST + 63) +LVM_APPROXIMATEVIEWRECT = (LVM_FIRST + 64) +LV_MAX_WORKAREAS = 16 +LVM_SETWORKAREAS = (LVM_FIRST + 65) +LVM_GETWORKAREAS = (LVM_FIRST + 70) +LVM_GETNUMBEROFWORKAREAS = (LVM_FIRST + 73) +LVM_GETSELECTIONMARK = (LVM_FIRST + 66) +LVM_SETSELECTIONMARK = (LVM_FIRST + 67) +LVM_SETHOVERTIME = (LVM_FIRST + 71) +LVM_GETHOVERTIME = (LVM_FIRST + 72) +LVM_SETTOOLTIPS = (LVM_FIRST + 74) +LVM_GETTOOLTIPS = (LVM_FIRST + 78) +LVBKIF_SOURCE_NONE = 0 +LVBKIF_SOURCE_HBITMAP = 1 +LVBKIF_SOURCE_URL = 2 +LVBKIF_SOURCE_MASK = 3 +LVBKIF_STYLE_NORMAL = 0 +LVBKIF_STYLE_TILE = 16 +LVBKIF_STYLE_MASK = 16 +LVM_SETBKIMAGEA = (LVM_FIRST + 68) +LVM_SETBKIMAGEW = (LVM_FIRST + 138) +LVM_GETBKIMAGEA = (LVM_FIRST + 69) +LVM_GETBKIMAGEW = (LVM_FIRST + 139) +LVKF_ALT = 1 +LVKF_CONTROL = 2 +LVKF_SHIFT = 4 +LVN_ITEMCHANGING = (LVN_FIRST-0) +LVN_ITEMCHANGED = (LVN_FIRST-1) +LVN_INSERTITEM = (LVN_FIRST-2) +LVN_DELETEITEM = (LVN_FIRST-3) +LVN_DELETEALLITEMS = (LVN_FIRST-4) +LVN_BEGINLABELEDITA = (LVN_FIRST-5) +LVN_BEGINLABELEDITW = (LVN_FIRST-75) +LVN_ENDLABELEDITA = (LVN_FIRST-6) +LVN_ENDLABELEDITW = (LVN_FIRST-76) +LVN_COLUMNCLICK = (LVN_FIRST-8) +LVN_BEGINDRAG = (LVN_FIRST-9) +LVN_BEGINRDRAG = (LVN_FIRST-11) +LVN_ODCACHEHINT = (LVN_FIRST-13) +LVN_ODFINDITEMA = (LVN_FIRST-52) +LVN_ODFINDITEMW = (LVN_FIRST-79) +LVN_ITEMACTIVATE = (LVN_FIRST-14) +LVN_ODSTATECHANGED = (LVN_FIRST-15) +LVN_ODFINDITEM = LVN_ODFINDITEMA +LVN_HOTTRACK = (LVN_FIRST-21) +LVN_GETDISPINFOA = (LVN_FIRST-50) +LVN_GETDISPINFOW = (LVN_FIRST-77) +LVN_SETDISPINFOA = (LVN_FIRST-51) +LVN_SETDISPINFOW = (LVN_FIRST-78) +LVN_BEGINLABELEDIT = LVN_BEGINLABELEDITA +LVN_ENDLABELEDIT = LVN_ENDLABELEDITA +LVN_GETDISPINFO = LVN_GETDISPINFOA +LVN_SETDISPINFO = LVN_SETDISPINFOA +LVIF_DI_SETITEM = 4096 +LVN_KEYDOWN = (LVN_FIRST-55) +LVN_MARQUEEBEGIN = (LVN_FIRST-56) +LVGIT_UNFOLDED = 1 +LVN_GETINFOTIPA = (LVN_FIRST-57) +LVN_GETINFOTIPW = (LVN_FIRST-58) +LVN_GETINFOTIP = LVN_GETINFOTIPA +WC_TREEVIEWA = "SysTreeView32" +WC_TREEVIEW = WC_TREEVIEWA +TVS_HASBUTTONS = 1 +TVS_HASLINES = 2 +TVS_LINESATROOT = 4 +TVS_EDITLABELS = 8 +TVS_DISABLEDRAGDROP = 16 +TVS_SHOWSELALWAYS = 32 +TVS_RTLREADING = 64 +TVS_NOTOOLTIPS = 128 +TVS_CHECKBOXES = 256 +TVS_TRACKSELECT = 512 +TVS_SINGLEEXPAND = 1024 +TVS_INFOTIP = 2048 +TVS_FULLROWSELECT = 4096 +TVS_NOSCROLL = 8192 +TVS_NONEVENHEIGHT = 16384 +TVIF_TEXT = 1 +TVIF_IMAGE = 2 +TVIF_PARAM = 4 +TVIF_STATE = 8 +TVIF_HANDLE = 16 +TVIF_SELECTEDIMAGE = 32 +TVIF_CHILDREN = 64 +TVIF_INTEGRAL = 128 +TVIS_SELECTED = 2 +TVIS_CUT = 4 +TVIS_DROPHILITED = 8 +TVIS_BOLD = 16 +TVIS_EXPANDED = 32 +TVIS_EXPANDEDONCE = 64 +TVIS_EXPANDPARTIAL = 128 +TVIS_OVERLAYMASK = 3840 +TVIS_STATEIMAGEMASK = 61440 +TVIS_USERMASK = 61440 +I_CHILDRENCALLBACK = (-1) +TVI_ROOT = -65536 +TVI_FIRST = -65535 +TVI_LAST = -65534 +TVI_SORT = -65533 +TVM_INSERTITEMA = (TV_FIRST + 0) +TVM_INSERTITEMW = (TV_FIRST + 50) +TVM_INSERTITEM = TVM_INSERTITEMW +TVM_INSERTITEM = TVM_INSERTITEMA +TVM_DELETEITEM = (TV_FIRST + 1) +TVM_EXPAND = (TV_FIRST + 2) +TVE_COLLAPSE = 1 +TVE_EXPAND = 2 +TVE_TOGGLE = 3 +TVE_EXPANDPARTIAL = 16384 +TVE_COLLAPSERESET = 32768 +TVM_GETITEMRECT = (TV_FIRST + 4) +TVM_GETCOUNT = (TV_FIRST + 5) +TVM_GETINDENT = (TV_FIRST + 6) +TVM_SETINDENT = (TV_FIRST + 7) +TVM_GETIMAGELIST = (TV_FIRST + 8) +TVSIL_NORMAL = 0 +TVSIL_STATE = 2 +TVM_SETIMAGELIST = (TV_FIRST + 9) +TVM_GETNEXTITEM = (TV_FIRST + 10) +TVGN_ROOT = 0 +TVGN_NEXT = 1 +TVGN_PREVIOUS = 2 +TVGN_PARENT = 3 +TVGN_CHILD = 4 +TVGN_FIRSTVISIBLE = 5 +TVGN_NEXTVISIBLE = 6 +TVGN_PREVIOUSVISIBLE = 7 +TVGN_DROPHILITE = 8 +TVGN_CARET = 9 +TVGN_LASTVISIBLE = 10 +TVM_SELECTITEM = (TV_FIRST + 11) +TVM_GETITEMA = (TV_FIRST + 12) +TVM_GETITEMW = (TV_FIRST + 62) +TVM_GETITEM = TVM_GETITEMW +TVM_GETITEM = TVM_GETITEMA +TVM_SETITEMA = (TV_FIRST + 13) +TVM_SETITEMW = (TV_FIRST + 63) +TVM_SETITEM = TVM_SETITEMW +TVM_SETITEM = TVM_SETITEMA +TVM_EDITLABELA = (TV_FIRST + 14) +TVM_EDITLABELW = (TV_FIRST + 65) +TVM_EDITLABEL = TVM_EDITLABELW +TVM_EDITLABEL = TVM_EDITLABELA +TVM_GETEDITCONTROL = (TV_FIRST + 15) +TVM_GETVISIBLECOUNT = (TV_FIRST + 16) +TVM_HITTEST = (TV_FIRST + 17) +TVHT_NOWHERE = 1 +TVHT_ONITEMICON = 2 +TVHT_ONITEMLABEL = 4 +TVHT_ONITEMINDENT = 8 +TVHT_ONITEMBUTTON = 16 +TVHT_ONITEMRIGHT = 32 +TVHT_ONITEMSTATEICON = 64 +TVHT_ABOVE = 256 +TVHT_BELOW = 512 +TVHT_TORIGHT = 1024 +TVHT_TOLEFT = 2048 +TVHT_ONITEM = (TVHT_ONITEMICON | TVHT_ONITEMLABEL | TVHT_ONITEMSTATEICON) +TVM_CREATEDRAGIMAGE = (TV_FIRST + 18) +TVM_SORTCHILDREN = (TV_FIRST + 19) +TVM_ENSUREVISIBLE = (TV_FIRST + 20) +TVM_SORTCHILDRENCB = (TV_FIRST + 21) +TVM_ENDEDITLABELNOW = (TV_FIRST + 22) +TVM_GETISEARCHSTRINGA = (TV_FIRST + 23) +TVM_GETISEARCHSTRINGW = (TV_FIRST + 64) +TVM_GETISEARCHSTRING = TVM_GETISEARCHSTRINGA +TVM_SETTOOLTIPS = (TV_FIRST + 24) +TVM_GETTOOLTIPS = (TV_FIRST + 25) +TVM_SETINSERTMARK = (TV_FIRST + 26) +TVM_SETUNICODEFORMAT = CCM_SETUNICODEFORMAT +TVM_GETUNICODEFORMAT = CCM_GETUNICODEFORMAT +TVM_SETITEMHEIGHT = (TV_FIRST + 27) +TVM_GETITEMHEIGHT = (TV_FIRST + 28) +TVM_SETBKCOLOR = (TV_FIRST + 29) +TVM_SETTEXTCOLOR = (TV_FIRST + 30) +TVM_GETBKCOLOR = (TV_FIRST + 31) +TVM_GETTEXTCOLOR = (TV_FIRST + 32) +TVM_SETSCROLLTIME = (TV_FIRST + 33) +TVM_GETSCROLLTIME = (TV_FIRST + 34) +TVM_SETINSERTMARKCOLOR = (TV_FIRST + 37) +TVM_GETINSERTMARKCOLOR = (TV_FIRST + 38) +TVN_SELCHANGINGA = (TVN_FIRST-1) +TVN_SELCHANGINGW = (TVN_FIRST-50) +TVN_SELCHANGEDA = (TVN_FIRST-2) +TVN_SELCHANGEDW = (TVN_FIRST-51) +TVC_UNKNOWN = 0 +TVC_BYMOUSE = 1 +TVC_BYKEYBOARD = 2 +TVN_GETDISPINFOA = (TVN_FIRST-3) +TVN_GETDISPINFOW = (TVN_FIRST-52) +TVN_SETDISPINFOA = (TVN_FIRST-4) +TVN_SETDISPINFOW = (TVN_FIRST-53) +TVIF_DI_SETITEM = 4096 +TVN_ITEMEXPANDINGA = (TVN_FIRST-5) +TVN_ITEMEXPANDINGW = (TVN_FIRST-54) +TVN_ITEMEXPANDEDA = (TVN_FIRST-6) +TVN_ITEMEXPANDEDW = (TVN_FIRST-55) +TVN_BEGINDRAGA = (TVN_FIRST-7) +TVN_BEGINDRAGW = (TVN_FIRST-56) +TVN_BEGINRDRAGA = (TVN_FIRST-8) +TVN_BEGINRDRAGW = (TVN_FIRST-57) +TVN_DELETEITEMA = (TVN_FIRST-9) +TVN_DELETEITEMW = (TVN_FIRST-58) +TVN_BEGINLABELEDITA = (TVN_FIRST-10) +TVN_BEGINLABELEDITW = (TVN_FIRST-59) +TVN_ENDLABELEDITA = (TVN_FIRST-11) +TVN_ENDLABELEDITW = (TVN_FIRST-60) +TVN_KEYDOWN = (TVN_FIRST-12) +TVN_GETINFOTIPA = (TVN_FIRST-13) +TVN_GETINFOTIPW = (TVN_FIRST-14) +TVN_SINGLEEXPAND = (TVN_FIRST-15) +TVN_SELCHANGING = TVN_SELCHANGINGA +TVN_SELCHANGED = TVN_SELCHANGEDA +TVN_GETDISPINFO = TVN_GETDISPINFOA +TVN_SETDISPINFO = TVN_SETDISPINFOA +TVN_ITEMEXPANDING = TVN_ITEMEXPANDINGA +TVN_ITEMEXPANDED = TVN_ITEMEXPANDEDA +TVN_BEGINDRAG = TVN_BEGINDRAGA +TVN_BEGINRDRAG = TVN_BEGINRDRAGA +TVN_DELETEITEM = TVN_DELETEITEMA +TVN_BEGINLABELEDIT = TVN_BEGINLABELEDITA +TVN_ENDLABELEDIT = TVN_ENDLABELEDITA +TVN_GETINFOTIP = TVN_GETINFOTIPA +TVCDRF_NOIMAGES = 65536 +WC_COMBOBOXEXA = "ComboBoxEx32" +WC_COMBOBOXEX = WC_COMBOBOXEXA +CBEIF_TEXT = 1 +CBEIF_IMAGE = 2 +CBEIF_SELECTEDIMAGE = 4 +CBEIF_OVERLAY = 8 +CBEIF_INDENT = 16 +CBEIF_LPARAM = 32 +CBEIF_DI_SETITEM = 268435456 +CBEM_INSERTITEMA = (WM_USER + 1) +CBEM_SETIMAGELIST = (WM_USER + 2) +CBEM_GETIMAGELIST = (WM_USER + 3) +CBEM_GETITEMA = (WM_USER + 4) +CBEM_SETITEMA = (WM_USER + 5) +#CBEM_DELETEITEM = CB_DELETESTRING +CBEM_GETCOMBOCONTROL = (WM_USER + 6) +CBEM_GETEDITCONTROL = (WM_USER + 7) +CBEM_SETEXSTYLE = (WM_USER + 8) # use SETEXTENDEDSTYLE instead +CBEM_SETEXTENDEDSTYLE = (WM_USER + 14) # lparam == new style, wParam (optional) == mask +CBEM_GETEXSTYLE = (WM_USER + 9) # use GETEXTENDEDSTYLE instead +CBEM_GETEXTENDEDSTYLE = (WM_USER + 9) +CBEM_SETUNICODEFORMAT = CCM_SETUNICODEFORMAT +CBEM_GETUNICODEFORMAT = CCM_GETUNICODEFORMAT +CBEM_SETEXSTYLE = (WM_USER + 8) +CBEM_GETEXSTYLE = (WM_USER + 9) +CBEM_HASEDITCHANGED = (WM_USER + 10) +CBEM_INSERTITEMW = (WM_USER + 11) +CBEM_SETITEMW = (WM_USER + 12) +CBEM_GETITEMW = (WM_USER + 13) +CBEM_INSERTITEM = CBEM_INSERTITEMA +CBEM_SETITEM = CBEM_SETITEMA +CBEM_GETITEM = CBEM_GETITEMA +CBES_EX_NOEDITIMAGE = 1 +CBES_EX_NOEDITIMAGEINDENT = 2 +CBES_EX_PATHWORDBREAKPROC = 4 +CBES_EX_NOSIZELIMIT = 8 +CBES_EX_CASESENSITIVE = 16 +CBEN_GETDISPINFO = (CBEN_FIRST - 0) +CBEN_GETDISPINFOA = (CBEN_FIRST - 0) +CBEN_INSERTITEM = (CBEN_FIRST - 1) +CBEN_DELETEITEM = (CBEN_FIRST - 2) +CBEN_BEGINEDIT = (CBEN_FIRST - 4) +CBEN_ENDEDITA = (CBEN_FIRST - 5) +CBEN_ENDEDITW = (CBEN_FIRST - 6) +CBEN_GETDISPINFOW = (CBEN_FIRST - 7) +CBEN_DRAGBEGINA = (CBEN_FIRST - 8) +CBEN_DRAGBEGINW = (CBEN_FIRST - 9) +CBEN_DRAGBEGIN = CBEN_DRAGBEGINA +CBEN_ENDEDIT = CBEN_ENDEDITA +CBENF_KILLFOCUS = 1 +CBENF_RETURN = 2 +CBENF_ESCAPE = 3 +CBENF_DROPDOWN = 4 +CBEMAXSTRLEN = 260 +WC_TABCONTROLA = "SysTabControl32" +WC_TABCONTROL = WC_TABCONTROLA +TCS_SCROLLOPPOSITE = 1 # assumes multiline tab +TCS_BOTTOM = 2 +TCS_RIGHT = 2 +TCS_MULTISELECT = 4 # allow multi-select in button mode +TCS_FLATBUTTONS = 8 +TCS_FORCEICONLEFT = 16 +TCS_FORCELABELLEFT = 32 +TCS_HOTTRACK = 64 +TCS_VERTICAL = 128 +TCS_TABS = 0 +TCS_BUTTONS = 256 +TCS_SINGLELINE = 0 +TCS_MULTILINE = 512 +TCS_RIGHTJUSTIFY = 0 +TCS_FIXEDWIDTH = 1024 +TCS_RAGGEDRIGHT = 2048 +TCS_FOCUSONBUTTONDOWN = 4096 +TCS_OWNERDRAWFIXED = 8192 +TCS_TOOLTIPS = 16384 +TCS_FOCUSNEVER = 32768 +TCS_EX_FLATSEPARATORS = 1 +TCS_EX_REGISTERDROP = 2 +TCM_GETIMAGELIST = (TCM_FIRST + 2) +TCM_SETIMAGELIST = (TCM_FIRST + 3) +TCM_GETITEMCOUNT = (TCM_FIRST + 4) +TCIF_TEXT = 1 +TCIF_IMAGE = 2 +TCIF_RTLREADING = 4 +TCIF_PARAM = 8 +TCIF_STATE = 16 +TCIS_BUTTONPRESSED = 1 +TCIS_HIGHLIGHTED = 2 +TCM_GETITEMA = (TCM_FIRST + 5) +TCM_GETITEMW = (TCM_FIRST + 60) +TCM_GETITEM = TCM_GETITEMA +TCM_SETITEMA = (TCM_FIRST + 6) +TCM_SETITEMW = (TCM_FIRST + 61) +TCM_SETITEM = TCM_SETITEMA +TCM_INSERTITEMA = (TCM_FIRST + 7) +TCM_INSERTITEMW = (TCM_FIRST + 62) +TCM_INSERTITEM = TCM_INSERTITEMA +TCM_DELETEITEM = (TCM_FIRST + 8) +TCM_DELETEALLITEMS = (TCM_FIRST + 9) +TCM_GETITEMRECT = (TCM_FIRST + 10) +TCM_GETCURSEL = (TCM_FIRST + 11) +TCM_SETCURSEL = (TCM_FIRST + 12) +TCHT_NOWHERE = 1 +TCHT_ONITEMICON = 2 +TCHT_ONITEMLABEL = 4 +TCHT_ONITEM = (TCHT_ONITEMICON | TCHT_ONITEMLABEL) +TCM_HITTEST = (TCM_FIRST + 13) +TCM_SETITEMEXTRA = (TCM_FIRST + 14) +TCM_ADJUSTRECT = (TCM_FIRST + 40) +TCM_SETITEMSIZE = (TCM_FIRST + 41) +TCM_REMOVEIMAGE = (TCM_FIRST + 42) +TCM_SETPADDING = (TCM_FIRST + 43) +TCM_GETROWCOUNT = (TCM_FIRST + 44) +TCM_GETTOOLTIPS = (TCM_FIRST + 45) +TCM_SETTOOLTIPS = (TCM_FIRST + 46) +TCM_GETCURFOCUS = (TCM_FIRST + 47) +TCM_SETCURFOCUS = (TCM_FIRST + 48) +TCM_SETMINTABWIDTH = (TCM_FIRST + 49) +TCM_DESELECTALL = (TCM_FIRST + 50) +TCM_HIGHLIGHTITEM = (TCM_FIRST + 51) +TCM_SETEXTENDEDSTYLE = (TCM_FIRST + 52) # optional wParam == mask +TCM_GETEXTENDEDSTYLE = (TCM_FIRST + 53) +TCM_SETUNICODEFORMAT = CCM_SETUNICODEFORMAT +TCM_GETUNICODEFORMAT = CCM_GETUNICODEFORMAT +TCN_KEYDOWN = (TCN_FIRST - 0) +ANIMATE_CLASSA = "SysAnimate32" +ANIMATE_CLASS = ANIMATE_CLASSA +ACS_CENTER = 1 +ACS_TRANSPARENT = 2 +ACS_AUTOPLAY = 4 +ACS_TIMER = 8 # don't use threads... use timers +ACM_OPENA = (WM_USER+100) +ACM_OPENW = (WM_USER+103) +ACM_OPEN = ACM_OPENW +ACM_OPEN = ACM_OPENA +ACM_PLAY = (WM_USER+101) +ACM_STOP = (WM_USER+102) +ACN_START = 1 +ACN_STOP = 2 +MONTHCAL_CLASSA = "SysMonthCal32" +MONTHCAL_CLASS = MONTHCAL_CLASSA +MCM_FIRST = 4096 +MCM_GETCURSEL = (MCM_FIRST + 1) +MCM_SETCURSEL = (MCM_FIRST + 2) +MCM_GETMAXSELCOUNT = (MCM_FIRST + 3) +MCM_SETMAXSELCOUNT = (MCM_FIRST + 4) +MCM_GETSELRANGE = (MCM_FIRST + 5) +MCM_SETSELRANGE = (MCM_FIRST + 6) +MCM_GETMONTHRANGE = (MCM_FIRST + 7) +MCM_SETDAYSTATE = (MCM_FIRST + 8) +MCM_GETMINREQRECT = (MCM_FIRST + 9) +MCM_SETCOLOR = (MCM_FIRST + 10) +MCM_GETCOLOR = (MCM_FIRST + 11) +MCSC_BACKGROUND = 0 # the background color (between months) +MCSC_TEXT = 1 # the dates +MCSC_TITLEBK = 2 # background of the title +MCSC_TITLETEXT = 3 +MCSC_MONTHBK = 4 # background within the month cal +MCSC_TRAILINGTEXT = 5 # the text color of header & trailing days +MCM_SETTODAY = (MCM_FIRST + 12) +MCM_GETTODAY = (MCM_FIRST + 13) +MCM_HITTEST = (MCM_FIRST + 14) +MCHT_TITLE = 65536 +MCHT_CALENDAR = 131072 +MCHT_TODAYLINK = 196608 +MCHT_NEXT = 16777216 # these indicate that hitting +MCHT_PREV = 33554432 # here will go to the next/prev month +MCHT_NOWHERE = 0 +MCHT_TITLEBK = (MCHT_TITLE) +MCHT_TITLEMONTH = (MCHT_TITLE | 1) +MCHT_TITLEYEAR = (MCHT_TITLE | 2) +MCHT_TITLEBTNNEXT = (MCHT_TITLE | MCHT_NEXT | 3) +MCHT_TITLEBTNPREV = (MCHT_TITLE | MCHT_PREV | 3) +MCHT_CALENDARBK = (MCHT_CALENDAR) +MCHT_CALENDARDATE = (MCHT_CALENDAR | 1) +MCHT_CALENDARDATENEXT = (MCHT_CALENDARDATE | MCHT_NEXT) +MCHT_CALENDARDATEPREV = (MCHT_CALENDARDATE | MCHT_PREV) +MCHT_CALENDARDAY = (MCHT_CALENDAR | 2) +MCHT_CALENDARWEEKNUM = (MCHT_CALENDAR | 3) +MCM_SETFIRSTDAYOFWEEK = (MCM_FIRST + 15) +MCM_GETFIRSTDAYOFWEEK = (MCM_FIRST + 16) +MCM_GETRANGE = (MCM_FIRST + 17) +MCM_SETRANGE = (MCM_FIRST + 18) +MCM_GETMONTHDELTA = (MCM_FIRST + 19) +MCM_SETMONTHDELTA = (MCM_FIRST + 20) +MCM_GETMAXTODAYWIDTH = (MCM_FIRST + 21) +MCM_SETUNICODEFORMAT = CCM_SETUNICODEFORMAT +MCM_GETUNICODEFORMAT = CCM_GETUNICODEFORMAT +MCN_SELCHANGE = (MCN_FIRST + 1) +MCN_GETDAYSTATE = (MCN_FIRST + 3) +MCN_SELECT = (MCN_FIRST + 4) +MCS_DAYSTATE = 1 +MCS_MULTISELECT = 2 +MCS_WEEKNUMBERS = 4 +MCS_NOTODAYCIRCLE = 8 +MCS_NOTODAY = 16 +MCS_NOTODAY = 8 +GMR_VISIBLE = 0 # visible portion of display +GMR_DAYSTATE = 1 # above plus the grayed out parts of +DATETIMEPICK_CLASSA = "SysDateTimePick32" +DATETIMEPICK_CLASS = DATETIMEPICK_CLASSA +DTM_FIRST = 4096 +DTM_GETSYSTEMTIME = (DTM_FIRST + 1) +DTM_SETSYSTEMTIME = (DTM_FIRST + 2) +DTM_GETRANGE = (DTM_FIRST + 3) +DTM_SETRANGE = (DTM_FIRST + 4) +DTM_SETFORMATA = (DTM_FIRST + 5) +DTM_SETFORMATW = (DTM_FIRST + 50) +DTM_SETFORMAT = DTM_SETFORMATW +DTM_SETFORMAT = DTM_SETFORMATA +DTM_SETMCCOLOR = (DTM_FIRST + 6) +DTM_GETMCCOLOR = (DTM_FIRST + 7) +DTM_GETMONTHCAL = (DTM_FIRST + 8) +DTM_SETMCFONT = (DTM_FIRST + 9) +DTM_GETMCFONT = (DTM_FIRST + 10) +DTS_UPDOWN = 1 # use UPDOWN instead of MONTHCAL +DTS_SHOWNONE = 2 # allow a NONE selection +DTS_SHORTDATEFORMAT = 0 # use the short date format (app must forward WM_WININICHANGE messages) +DTS_LONGDATEFORMAT = 4 # use the long date format (app must forward WM_WININICHANGE messages) +DTS_TIMEFORMAT = 9 # use the time format (app must forward WM_WININICHANGE messages) +DTS_APPCANPARSE = 16 # allow user entered strings (app MUST respond to DTN_USERSTRING) +DTS_RIGHTALIGN = 32 # right-align popup instead of left-align it +DTN_DATETIMECHANGE = (DTN_FIRST + 1) # the systemtime has changed +DTN_USERSTRINGA = (DTN_FIRST + 2) # the user has entered a string +DTN_USERSTRINGW = (DTN_FIRST + 15) +DTN_USERSTRING = DTN_USERSTRINGW +DTN_WMKEYDOWNA = (DTN_FIRST + 3) # modify keydown on app format field (X) +DTN_WMKEYDOWNW = (DTN_FIRST + 16) +DTN_WMKEYDOWN = DTN_WMKEYDOWNA +DTN_FORMATA = (DTN_FIRST + 4) # query display for app format field (X) +DTN_FORMATW = (DTN_FIRST + 17) +DTN_FORMAT = DTN_FORMATA +DTN_FORMATQUERYA = (DTN_FIRST + 5) # query formatting info for app format field (X) +DTN_FORMATQUERYW = (DTN_FIRST + 18) +DTN_FORMATQUERY = DTN_FORMATQUERYA +DTN_DROPDOWN = (DTN_FIRST + 6) # MonthCal has dropped down +DTN_CLOSEUP = (DTN_FIRST + 7) # MonthCal is popping up +GDTR_MIN = 1 +GDTR_MAX = 2 +GDT_ERROR = -1 +GDT_VALID = 0 +GDT_NONE = 1 +IPM_CLEARADDRESS = (WM_USER+100) # no parameters +IPM_SETADDRESS = (WM_USER+101) # lparam = TCP/IP address +IPM_GETADDRESS = (WM_USER+102) # lresult = # of non black fields. lparam = LPDWORD for TCP/IP address +IPM_SETRANGE = (WM_USER+103) # wparam = field, lparam = range +IPM_SETFOCUS = (WM_USER+104) # wparam = field +IPM_ISBLANK = (WM_USER+105) # no parameters +WC_IPADDRESSA = "SysIPAddress32" +WC_IPADDRESS = WC_IPADDRESSA +IPN_FIELDCHANGED = (IPN_FIRST - 0) +WC_PAGESCROLLERA = "SysPager" +WC_PAGESCROLLER = WC_PAGESCROLLERA +PGS_VERT = 0 +PGS_HORZ = 1 +PGS_AUTOSCROLL = 2 +PGS_DRAGNDROP = 4 +PGF_INVISIBLE = 0 # Scroll button is not visible +PGF_NORMAL = 1 # Scroll button is in normal state +PGF_GRAYED = 2 # Scroll button is in grayed state +PGF_DEPRESSED = 4 # Scroll button is in depressed state +PGF_HOT = 8 # Scroll button is in hot state +PGB_TOPORLEFT = 0 +PGB_BOTTOMORRIGHT = 1 +PGM_SETCHILD = (PGM_FIRST + 1) # lParam == hwnd +PGM_RECALCSIZE = (PGM_FIRST + 2) +PGM_FORWARDMOUSE = (PGM_FIRST + 3) +PGM_SETBKCOLOR = (PGM_FIRST + 4) +PGM_GETBKCOLOR = (PGM_FIRST + 5) +PGM_SETBORDER = (PGM_FIRST + 6) +PGM_GETBORDER = (PGM_FIRST + 7) +PGM_SETPOS = (PGM_FIRST + 8) +PGM_GETPOS = (PGM_FIRST + 9) +PGM_SETBUTTONSIZE = (PGM_FIRST + 10) +PGM_GETBUTTONSIZE = (PGM_FIRST + 11) +PGM_GETBUTTONSTATE = (PGM_FIRST + 12) +PGM_GETDROPTARGET = CCM_GETDROPTARGET +PGN_SCROLL = (PGN_FIRST-1) +PGF_SCROLLUP = 1 +PGF_SCROLLDOWN = 2 +PGF_SCROLLLEFT = 4 +PGF_SCROLLRIGHT = 8 +PGK_SHIFT = 1 +PGK_CONTROL = 2 +PGK_MENU = 4 +PGN_CALCSIZE = (PGN_FIRST-2) +PGF_CALCWIDTH = 1 +PGF_CALCHEIGHT = 2 +WC_NATIVEFONTCTLA = "NativeFontCtl" +WC_NATIVEFONTCTL = WC_NATIVEFONTCTLA +NFS_EDIT = 1 +NFS_STATIC = 2 +NFS_LISTCOMBO = 4 +NFS_BUTTON = 8 +NFS_ALL = 16 +WM_MOUSEHOVER = 673 +WM_MOUSELEAVE = 675 +TME_HOVER = 1 +TME_LEAVE = 2 +TME_QUERY = 1073741824 +TME_CANCEL = -2147483648 +HOVER_DEFAULT = -1 +WSB_PROP_CYVSCROLL = 0x00000001 +WSB_PROP_CXHSCROLL = 0x00000002 +WSB_PROP_CYHSCROLL = 0x00000004 +WSB_PROP_CXVSCROLL = 0x00000008 +WSB_PROP_CXHTHUMB = 0x00000010 +WSB_PROP_CYVTHUMB = 0x00000020 +WSB_PROP_VBKGCOLOR = 0x00000040 +WSB_PROP_HBKGCOLOR = 0x00000080 +WSB_PROP_VSTYLE = 0x00000100 +WSB_PROP_HSTYLE = 0x00000200 +WSB_PROP_WINSTYLE = 0x00000400 +WSB_PROP_PALETTE = 0x00000800 +WSB_PROP_MASK = 0x00000FFF +FSB_FLAT_MODE = 2 +FSB_ENCARTA_MODE = 1 +FSB_REGULAR_MODE = 0 + +def INDEXTOOVERLAYMASK(i): + return i << 8 + +def INDEXTOSTATEIMAGEMASK(i): + return i << 12 diff --git a/venv/Lib/site-packages/win32/lib/dbi.py b/venv/Lib/site-packages/win32/lib/dbi.py new file mode 100644 index 00000000..2c462750 --- /dev/null +++ b/venv/Lib/site-packages/win32/lib/dbi.py @@ -0,0 +1,24 @@ +""" +Skeleton replacement for removed dbi module. +Use of objects created by this module should be replaced with native Python objects. +Dates are now returned as datetime.datetime objects, but will still accept PyTime +objects also. +Raw data for binary fields should be passed as buffer objects for Python 2.x, +and memoryview objects in Py3k. +""" + +import warnings +warnings.warn( + "dbi module is obsolete, code should now use native python datetime and buffer/memoryview objects", + DeprecationWarning) + +import datetime +dbDate = dbiDate = datetime.datetime + +try: + dbRaw = dbiRaw = buffer +except NameError: + dbRaw = dbiRaw = memoryview + +# type names are still exported by odbc module +from odbc import * diff --git a/venv/Lib/site-packages/win32/lib/mmsystem.py b/venv/Lib/site-packages/win32/lib/mmsystem.py new file mode 100644 index 00000000..54c7fcfb --- /dev/null +++ b/venv/Lib/site-packages/win32/lib/mmsystem.py @@ -0,0 +1,867 @@ +# Generated by h2py from d:/msdev/include/mmsystem.h +MAXPNAMELEN = 32 +MAXERRORLENGTH = 256 +MAX_JOYSTICKOEMVXDNAME = 260 +MM_MICROSOFT = 1 +MM_MIDI_MAPPER = 1 +MM_WAVE_MAPPER = 2 +MM_SNDBLST_MIDIOUT = 3 +MM_SNDBLST_MIDIIN = 4 +MM_SNDBLST_SYNTH = 5 +MM_SNDBLST_WAVEOUT = 6 +MM_SNDBLST_WAVEIN = 7 +MM_ADLIB = 9 +MM_MPU401_MIDIOUT = 10 +MM_MPU401_MIDIIN = 11 +MM_PC_JOYSTICK = 12 +TIME_MS = 0x0001 +TIME_SAMPLES = 0x0002 +TIME_BYTES = 0x0004 +TIME_SMPTE = 0x0008 +TIME_MIDI = 0x0010 +TIME_TICKS = 0x0020 +MM_JOY1MOVE = 0x3A0 +MM_JOY2MOVE = 0x3A1 +MM_JOY1ZMOVE = 0x3A2 +MM_JOY2ZMOVE = 0x3A3 +MM_JOY1BUTTONDOWN = 0x3B5 +MM_JOY2BUTTONDOWN = 0x3B6 +MM_JOY1BUTTONUP = 0x3B7 +MM_JOY2BUTTONUP = 0x3B8 +MM_MCINOTIFY = 0x3B9 +MM_WOM_OPEN = 0x3BB +MM_WOM_CLOSE = 0x3BC +MM_WOM_DONE = 0x3BD +MM_WIM_OPEN = 0x3BE +MM_WIM_CLOSE = 0x3BF +MM_WIM_DATA = 0x3C0 +MM_MIM_OPEN = 0x3C1 +MM_MIM_CLOSE = 0x3C2 +MM_MIM_DATA = 0x3C3 +MM_MIM_LONGDATA = 0x3C4 +MM_MIM_ERROR = 0x3C5 +MM_MIM_LONGERROR = 0x3C6 +MM_MOM_OPEN = 0x3C7 +MM_MOM_CLOSE = 0x3C8 +MM_MOM_DONE = 0x3C9 +MM_STREAM_OPEN = 0x3D4 +MM_STREAM_CLOSE = 0x3D5 +MM_STREAM_DONE = 0x3D6 +MM_STREAM_ERROR = 0x3D7 +MM_MOM_POSITIONCB = 0x3CA +MM_MIM_MOREDATA = 0x3CC +MM_MIXM_LINE_CHANGE = 0x3D0 +MM_MIXM_CONTROL_CHANGE = 0x3D1 +MMSYSERR_BASE = 0 +WAVERR_BASE = 32 +MIDIERR_BASE = 64 +TIMERR_BASE = 96 +JOYERR_BASE = 160 +MCIERR_BASE = 256 +MIXERR_BASE = 1024 +MCI_STRING_OFFSET = 512 +MCI_VD_OFFSET = 1024 +MCI_CD_OFFSET = 1088 +MCI_WAVE_OFFSET = 1152 +MCI_SEQ_OFFSET = 1216 +MMSYSERR_NOERROR = 0 +MMSYSERR_ERROR = (MMSYSERR_BASE + 1) +MMSYSERR_BADDEVICEID = (MMSYSERR_BASE + 2) +MMSYSERR_NOTENABLED = (MMSYSERR_BASE + 3) +MMSYSERR_ALLOCATED = (MMSYSERR_BASE + 4) +MMSYSERR_INVALHANDLE = (MMSYSERR_BASE + 5) +MMSYSERR_NODRIVER = (MMSYSERR_BASE + 6) +MMSYSERR_NOMEM = (MMSYSERR_BASE + 7) +MMSYSERR_NOTSUPPORTED = (MMSYSERR_BASE + 8) +MMSYSERR_BADERRNUM = (MMSYSERR_BASE + 9) +MMSYSERR_INVALFLAG = (MMSYSERR_BASE + 10) +MMSYSERR_INVALPARAM = (MMSYSERR_BASE + 11) +MMSYSERR_HANDLEBUSY = (MMSYSERR_BASE + 12) +MMSYSERR_INVALIDALIAS = (MMSYSERR_BASE + 13) +MMSYSERR_BADDB = (MMSYSERR_BASE + 14) +MMSYSERR_KEYNOTFOUND = (MMSYSERR_BASE + 15) +MMSYSERR_READERROR = (MMSYSERR_BASE + 16) +MMSYSERR_WRITEERROR = (MMSYSERR_BASE + 17) +MMSYSERR_DELETEERROR = (MMSYSERR_BASE + 18) +MMSYSERR_VALNOTFOUND = (MMSYSERR_BASE + 19) +MMSYSERR_NODRIVERCB = (MMSYSERR_BASE + 20) +MMSYSERR_LASTERROR = (MMSYSERR_BASE + 20) +DRV_LOAD = 0x0001 +DRV_ENABLE = 0x0002 +DRV_OPEN = 0x0003 +DRV_CLOSE = 0x0004 +DRV_DISABLE = 0x0005 +DRV_FREE = 0x0006 +DRV_CONFIGURE = 0x0007 +DRV_QUERYCONFIGURE = 0x0008 +DRV_INSTALL = 0x0009 +DRV_REMOVE = 0x000A +DRV_EXITSESSION = 0x000B +DRV_POWER = 0x000F +DRV_RESERVED = 0x0800 +DRV_USER = 0x4000 +DRVCNF_CANCEL = 0x0000 +DRVCNF_OK = 0x0001 +DRVCNF_RESTART = 0x0002 +DRV_CANCEL = DRVCNF_CANCEL +DRV_OK = DRVCNF_OK +DRV_RESTART = DRVCNF_RESTART +DRV_MCI_FIRST = DRV_RESERVED +DRV_MCI_LAST = (DRV_RESERVED + 0xFFF) +CALLBACK_TYPEMASK = 0x00070000 +CALLBACK_NULL = 0x00000000 +CALLBACK_WINDOW = 0x00010000 +CALLBACK_TASK = 0x00020000 +CALLBACK_FUNCTION = 0x00030000 +CALLBACK_THREAD = (CALLBACK_TASK) +CALLBACK_EVENT = 0x00050000 +SND_SYNC = 0x0000 +SND_ASYNC = 0x0001 +SND_NODEFAULT = 0x0002 +SND_MEMORY = 0x0004 +SND_LOOP = 0x0008 +SND_NOSTOP = 0x0010 +SND_NOWAIT = 0x00002000 +SND_ALIAS = 0x00010000 +SND_ALIAS_ID = 0x00110000 +SND_FILENAME = 0x00020000 +SND_RESOURCE = 0x00040004 +SND_PURGE = 0x0040 +SND_APPLICATION = 0x0080 +SND_ALIAS_START = 0 +WAVERR_BADFORMAT = (WAVERR_BASE + 0) +WAVERR_STILLPLAYING = (WAVERR_BASE + 1) +WAVERR_UNPREPARED = (WAVERR_BASE + 2) +WAVERR_SYNC = (WAVERR_BASE + 3) +WAVERR_LASTERROR = (WAVERR_BASE + 3) +WOM_OPEN = MM_WOM_OPEN +WOM_CLOSE = MM_WOM_CLOSE +WOM_DONE = MM_WOM_DONE +WIM_OPEN = MM_WIM_OPEN +WIM_CLOSE = MM_WIM_CLOSE +WIM_DATA = MM_WIM_DATA +WAVE_MAPPER = -1 # 0xFFFFFFFF +WAVE_FORMAT_QUERY = 0x0001 +WAVE_ALLOWSYNC = 0x0002 +WAVE_MAPPED = 0x0004 +WAVE_FORMAT_DIRECT = 0x0008 +WAVE_FORMAT_DIRECT_QUERY = (WAVE_FORMAT_QUERY | WAVE_FORMAT_DIRECT) +WHDR_DONE = 0x00000001 +WHDR_PREPARED = 0x00000002 +WHDR_BEGINLOOP = 0x00000004 +WHDR_ENDLOOP = 0x00000008 +WHDR_INQUEUE = 0x00000010 +WAVECAPS_PITCH = 0x0001 +WAVECAPS_PLAYBACKRATE = 0x0002 +WAVECAPS_VOLUME = 0x0004 +WAVECAPS_LRVOLUME = 0x0008 +WAVECAPS_SYNC = 0x0010 +WAVECAPS_SAMPLEACCURATE = 0x0020 +WAVECAPS_DIRECTSOUND = 0x0040 +WAVE_INVALIDFORMAT = 0x00000000 +WAVE_FORMAT_1M08 = 0x00000001 +WAVE_FORMAT_1S08 = 0x00000002 +WAVE_FORMAT_1M16 = 0x00000004 +WAVE_FORMAT_1S16 = 0x00000008 +WAVE_FORMAT_2M08 = 0x00000010 +WAVE_FORMAT_2S08 = 0x00000020 +WAVE_FORMAT_2M16 = 0x00000040 +WAVE_FORMAT_2S16 = 0x00000080 +WAVE_FORMAT_4M08 = 0x00000100 +WAVE_FORMAT_4S08 = 0x00000200 +WAVE_FORMAT_4M16 = 0x00000400 +WAVE_FORMAT_4S16 = 0x00000800 +WAVE_FORMAT_PCM = 1 +WAVE_FORMAT_IEEE_FLOAT = 3 +MIDIERR_UNPREPARED = (MIDIERR_BASE + 0) +MIDIERR_STILLPLAYING = (MIDIERR_BASE + 1) +MIDIERR_NOMAP = (MIDIERR_BASE + 2) +MIDIERR_NOTREADY = (MIDIERR_BASE + 3) +MIDIERR_NODEVICE = (MIDIERR_BASE + 4) +MIDIERR_INVALIDSETUP = (MIDIERR_BASE + 5) +MIDIERR_BADOPENMODE = (MIDIERR_BASE + 6) +MIDIERR_DONT_CONTINUE = (MIDIERR_BASE + 7) +MIDIERR_LASTERROR = (MIDIERR_BASE + 7) +MIDIPATCHSIZE = 128 +MIM_OPEN = MM_MIM_OPEN +MIM_CLOSE = MM_MIM_CLOSE +MIM_DATA = MM_MIM_DATA +MIM_LONGDATA = MM_MIM_LONGDATA +MIM_ERROR = MM_MIM_ERROR +MIM_LONGERROR = MM_MIM_LONGERROR +MOM_OPEN = MM_MOM_OPEN +MOM_CLOSE = MM_MOM_CLOSE +MOM_DONE = MM_MOM_DONE +MIM_MOREDATA = MM_MIM_MOREDATA +MOM_POSITIONCB = MM_MOM_POSITIONCB +MIDI_IO_STATUS = 0x00000020 +MIDI_CACHE_ALL = 1 +MIDI_CACHE_BESTFIT = 2 +MIDI_CACHE_QUERY = 3 +MIDI_UNCACHE = 4 +MOD_MIDIPORT = 1 +MOD_SYNTH = 2 +MOD_SQSYNTH = 3 +MOD_FMSYNTH = 4 +MOD_MAPPER = 5 +MIDICAPS_VOLUME = 0x0001 +MIDICAPS_LRVOLUME = 0x0002 +MIDICAPS_CACHE = 0x0004 +MIDICAPS_STREAM = 0x0008 +MHDR_DONE = 0x00000001 +MHDR_PREPARED = 0x00000002 +MHDR_INQUEUE = 0x00000004 +MHDR_ISSTRM = 0x00000008 +MEVT_F_SHORT = 0x00000000 +MEVT_F_LONG = -2147483648 # 0x80000000 +MEVT_F_CALLBACK = 0x40000000 +def MEVT_EVENTTYPE(x): return ((BYTE)(((x)>>24)&0xFF)) + +def MEVT_EVENTPARM(x): return ((DWORD)((x)&0x00FFFFFF)) + +MIDISTRM_ERROR = (-2) +MIDIPROP_SET = -2147483648 # 0x80000000 +MIDIPROP_GET = 0x40000000 +MIDIPROP_TIMEDIV = 0x00000001 +MIDIPROP_TEMPO = 0x00000002 +AUXCAPS_CDAUDIO = 1 +AUXCAPS_AUXIN = 2 +AUXCAPS_VOLUME = 0x0001 +AUXCAPS_LRVOLUME = 0x0002 +MIXER_SHORT_NAME_CHARS = 16 +MIXER_LONG_NAME_CHARS = 64 +MIXERR_INVALLINE = (MIXERR_BASE + 0) +MIXERR_INVALCONTROL = (MIXERR_BASE + 1) +MIXERR_INVALVALUE = (MIXERR_BASE + 2) +MIXERR_LASTERROR = (MIXERR_BASE + 2) +MIXER_OBJECTF_HANDLE = -2147483648 # 0x80000000 +MIXER_OBJECTF_MIXER = 0x00000000 +MIXER_OBJECTF_HMIXER = (MIXER_OBJECTF_HANDLE|MIXER_OBJECTF_MIXER) +MIXER_OBJECTF_WAVEOUT = 0x10000000 +MIXER_OBJECTF_HWAVEOUT = (MIXER_OBJECTF_HANDLE|MIXER_OBJECTF_WAVEOUT) +MIXER_OBJECTF_WAVEIN = 0x20000000 +MIXER_OBJECTF_HWAVEIN = (MIXER_OBJECTF_HANDLE|MIXER_OBJECTF_WAVEIN) +MIXER_OBJECTF_MIDIOUT = 0x30000000 +MIXER_OBJECTF_HMIDIOUT = (MIXER_OBJECTF_HANDLE|MIXER_OBJECTF_MIDIOUT) +MIXER_OBJECTF_MIDIIN = 0x40000000 +MIXER_OBJECTF_HMIDIIN = (MIXER_OBJECTF_HANDLE|MIXER_OBJECTF_MIDIIN) +MIXER_OBJECTF_AUX = 0x50000000 +MIXERLINE_LINEF_ACTIVE = 0x00000001 +MIXERLINE_LINEF_DISCONNECTED = 0x00008000 +MIXERLINE_LINEF_SOURCE = -2147483648 # 0x80000000 +MIXERLINE_COMPONENTTYPE_DST_FIRST = 0x00000000 +MIXERLINE_COMPONENTTYPE_DST_UNDEFINED = (MIXERLINE_COMPONENTTYPE_DST_FIRST + 0) +MIXERLINE_COMPONENTTYPE_DST_DIGITAL = (MIXERLINE_COMPONENTTYPE_DST_FIRST + 1) +MIXERLINE_COMPONENTTYPE_DST_LINE = (MIXERLINE_COMPONENTTYPE_DST_FIRST + 2) +MIXERLINE_COMPONENTTYPE_DST_MONITOR = (MIXERLINE_COMPONENTTYPE_DST_FIRST + 3) +MIXERLINE_COMPONENTTYPE_DST_SPEAKERS = (MIXERLINE_COMPONENTTYPE_DST_FIRST + 4) +MIXERLINE_COMPONENTTYPE_DST_HEADPHONES = (MIXERLINE_COMPONENTTYPE_DST_FIRST + 5) +MIXERLINE_COMPONENTTYPE_DST_TELEPHONE = (MIXERLINE_COMPONENTTYPE_DST_FIRST + 6) +MIXERLINE_COMPONENTTYPE_DST_WAVEIN = (MIXERLINE_COMPONENTTYPE_DST_FIRST + 7) +MIXERLINE_COMPONENTTYPE_DST_VOICEIN = (MIXERLINE_COMPONENTTYPE_DST_FIRST + 8) +MIXERLINE_COMPONENTTYPE_DST_LAST = (MIXERLINE_COMPONENTTYPE_DST_FIRST + 8) +MIXERLINE_COMPONENTTYPE_SRC_FIRST = 0x00001000 +MIXERLINE_COMPONENTTYPE_SRC_UNDEFINED = (MIXERLINE_COMPONENTTYPE_SRC_FIRST + 0) +MIXERLINE_COMPONENTTYPE_SRC_DIGITAL = (MIXERLINE_COMPONENTTYPE_SRC_FIRST + 1) +MIXERLINE_COMPONENTTYPE_SRC_LINE = (MIXERLINE_COMPONENTTYPE_SRC_FIRST + 2) +MIXERLINE_COMPONENTTYPE_SRC_MICROPHONE = (MIXERLINE_COMPONENTTYPE_SRC_FIRST + 3) +MIXERLINE_COMPONENTTYPE_SRC_SYNTHESIZER = (MIXERLINE_COMPONENTTYPE_SRC_FIRST + 4) +MIXERLINE_COMPONENTTYPE_SRC_COMPACTDISC = (MIXERLINE_COMPONENTTYPE_SRC_FIRST + 5) +MIXERLINE_COMPONENTTYPE_SRC_TELEPHONE = (MIXERLINE_COMPONENTTYPE_SRC_FIRST + 6) +MIXERLINE_COMPONENTTYPE_SRC_PCSPEAKER = (MIXERLINE_COMPONENTTYPE_SRC_FIRST + 7) +MIXERLINE_COMPONENTTYPE_SRC_WAVEOUT = (MIXERLINE_COMPONENTTYPE_SRC_FIRST + 8) +MIXERLINE_COMPONENTTYPE_SRC_AUXILIARY = (MIXERLINE_COMPONENTTYPE_SRC_FIRST + 9) +MIXERLINE_COMPONENTTYPE_SRC_ANALOG = (MIXERLINE_COMPONENTTYPE_SRC_FIRST + 10) +MIXERLINE_COMPONENTTYPE_SRC_LAST = (MIXERLINE_COMPONENTTYPE_SRC_FIRST + 10) +MIXERLINE_TARGETTYPE_UNDEFINED = 0 +MIXERLINE_TARGETTYPE_WAVEOUT = 1 +MIXERLINE_TARGETTYPE_WAVEIN = 2 +MIXERLINE_TARGETTYPE_MIDIOUT = 3 +MIXERLINE_TARGETTYPE_MIDIIN = 4 +MIXERLINE_TARGETTYPE_AUX = 5 +MIXER_GETLINEINFOF_DESTINATION = 0x00000000 +MIXER_GETLINEINFOF_SOURCE = 0x00000001 +MIXER_GETLINEINFOF_LINEID = 0x00000002 +MIXER_GETLINEINFOF_COMPONENTTYPE = 0x00000003 +MIXER_GETLINEINFOF_TARGETTYPE = 0x00000004 +MIXER_GETLINEINFOF_QUERYMASK = 0x0000000F +MIXERCONTROL_CONTROLF_UNIFORM = 0x00000001 +MIXERCONTROL_CONTROLF_MULTIPLE = 0x00000002 +MIXERCONTROL_CONTROLF_DISABLED = -2147483648 # 0x80000000 +MIXERCONTROL_CT_CLASS_MASK = -268435456 # 0xF0000000 +MIXERCONTROL_CT_CLASS_CUSTOM = 0x00000000 +MIXERCONTROL_CT_CLASS_METER = 0x10000000 +MIXERCONTROL_CT_CLASS_SWITCH = 0x20000000 +MIXERCONTROL_CT_CLASS_NUMBER = 0x30000000 +MIXERCONTROL_CT_CLASS_SLIDER = 0x40000000 +MIXERCONTROL_CT_CLASS_FADER = 0x50000000 +MIXERCONTROL_CT_CLASS_TIME = 0x60000000 +MIXERCONTROL_CT_CLASS_LIST = 0x70000000 +MIXERCONTROL_CT_SUBCLASS_MASK = 0x0F000000 +MIXERCONTROL_CT_SC_SWITCH_BOOLEAN = 0x00000000 +MIXERCONTROL_CT_SC_SWITCH_BUTTON = 0x01000000 +MIXERCONTROL_CT_SC_METER_POLLED = 0x00000000 +MIXERCONTROL_CT_SC_TIME_MICROSECS = 0x00000000 +MIXERCONTROL_CT_SC_TIME_MILLISECS = 0x01000000 +MIXERCONTROL_CT_SC_LIST_SINGLE = 0x00000000 +MIXERCONTROL_CT_SC_LIST_MULTIPLE = 0x01000000 +MIXERCONTROL_CT_UNITS_MASK = 0x00FF0000 +MIXERCONTROL_CT_UNITS_CUSTOM = 0x00000000 +MIXERCONTROL_CT_UNITS_BOOLEAN = 0x00010000 +MIXERCONTROL_CT_UNITS_SIGNED = 0x00020000 +MIXERCONTROL_CT_UNITS_UNSIGNED = 0x00030000 +MIXERCONTROL_CT_UNITS_DECIBELS = 0x00040000 +MIXERCONTROL_CT_UNITS_PERCENT = 0x00050000 +MIXERCONTROL_CONTROLTYPE_CUSTOM = (MIXERCONTROL_CT_CLASS_CUSTOM | MIXERCONTROL_CT_UNITS_CUSTOM) +MIXERCONTROL_CONTROLTYPE_BOOLEANMETER = (MIXERCONTROL_CT_CLASS_METER | MIXERCONTROL_CT_SC_METER_POLLED | MIXERCONTROL_CT_UNITS_BOOLEAN) +MIXERCONTROL_CONTROLTYPE_SIGNEDMETER = (MIXERCONTROL_CT_CLASS_METER | MIXERCONTROL_CT_SC_METER_POLLED | MIXERCONTROL_CT_UNITS_SIGNED) +MIXERCONTROL_CONTROLTYPE_PEAKMETER = (MIXERCONTROL_CONTROLTYPE_SIGNEDMETER + 1) +MIXERCONTROL_CONTROLTYPE_UNSIGNEDMETER = (MIXERCONTROL_CT_CLASS_METER | MIXERCONTROL_CT_SC_METER_POLLED | MIXERCONTROL_CT_UNITS_UNSIGNED) +MIXERCONTROL_CONTROLTYPE_BOOLEAN = (MIXERCONTROL_CT_CLASS_SWITCH | MIXERCONTROL_CT_SC_SWITCH_BOOLEAN | MIXERCONTROL_CT_UNITS_BOOLEAN) +MIXERCONTROL_CONTROLTYPE_ONOFF = (MIXERCONTROL_CONTROLTYPE_BOOLEAN + 1) +MIXERCONTROL_CONTROLTYPE_MUTE = (MIXERCONTROL_CONTROLTYPE_BOOLEAN + 2) +MIXERCONTROL_CONTROLTYPE_MONO = (MIXERCONTROL_CONTROLTYPE_BOOLEAN + 3) +MIXERCONTROL_CONTROLTYPE_LOUDNESS = (MIXERCONTROL_CONTROLTYPE_BOOLEAN + 4) +MIXERCONTROL_CONTROLTYPE_STEREOENH = (MIXERCONTROL_CONTROLTYPE_BOOLEAN + 5) +MIXERCONTROL_CONTROLTYPE_BUTTON = (MIXERCONTROL_CT_CLASS_SWITCH | MIXERCONTROL_CT_SC_SWITCH_BUTTON | MIXERCONTROL_CT_UNITS_BOOLEAN) +MIXERCONTROL_CONTROLTYPE_DECIBELS = (MIXERCONTROL_CT_CLASS_NUMBER | MIXERCONTROL_CT_UNITS_DECIBELS) +MIXERCONTROL_CONTROLTYPE_SIGNED = (MIXERCONTROL_CT_CLASS_NUMBER | MIXERCONTROL_CT_UNITS_SIGNED) +MIXERCONTROL_CONTROLTYPE_UNSIGNED = (MIXERCONTROL_CT_CLASS_NUMBER | MIXERCONTROL_CT_UNITS_UNSIGNED) +MIXERCONTROL_CONTROLTYPE_PERCENT = (MIXERCONTROL_CT_CLASS_NUMBER | MIXERCONTROL_CT_UNITS_PERCENT) +MIXERCONTROL_CONTROLTYPE_SLIDER = (MIXERCONTROL_CT_CLASS_SLIDER | MIXERCONTROL_CT_UNITS_SIGNED) +MIXERCONTROL_CONTROLTYPE_PAN = (MIXERCONTROL_CONTROLTYPE_SLIDER + 1) +MIXERCONTROL_CONTROLTYPE_QSOUNDPAN = (MIXERCONTROL_CONTROLTYPE_SLIDER + 2) +MIXERCONTROL_CONTROLTYPE_FADER = (MIXERCONTROL_CT_CLASS_FADER | MIXERCONTROL_CT_UNITS_UNSIGNED) +MIXERCONTROL_CONTROLTYPE_VOLUME = (MIXERCONTROL_CONTROLTYPE_FADER + 1) +MIXERCONTROL_CONTROLTYPE_BASS = (MIXERCONTROL_CONTROLTYPE_FADER + 2) +MIXERCONTROL_CONTROLTYPE_TREBLE = (MIXERCONTROL_CONTROLTYPE_FADER + 3) +MIXERCONTROL_CONTROLTYPE_EQUALIZER = (MIXERCONTROL_CONTROLTYPE_FADER + 4) +MIXERCONTROL_CONTROLTYPE_SINGLESELECT = (MIXERCONTROL_CT_CLASS_LIST | MIXERCONTROL_CT_SC_LIST_SINGLE | MIXERCONTROL_CT_UNITS_BOOLEAN) +MIXERCONTROL_CONTROLTYPE_MUX = (MIXERCONTROL_CONTROLTYPE_SINGLESELECT + 1) +MIXERCONTROL_CONTROLTYPE_MULTIPLESELECT = (MIXERCONTROL_CT_CLASS_LIST | MIXERCONTROL_CT_SC_LIST_MULTIPLE | MIXERCONTROL_CT_UNITS_BOOLEAN) +MIXERCONTROL_CONTROLTYPE_MIXER = (MIXERCONTROL_CONTROLTYPE_MULTIPLESELECT + 1) +MIXERCONTROL_CONTROLTYPE_MICROTIME = (MIXERCONTROL_CT_CLASS_TIME | MIXERCONTROL_CT_SC_TIME_MICROSECS | MIXERCONTROL_CT_UNITS_UNSIGNED) +MIXERCONTROL_CONTROLTYPE_MILLITIME = (MIXERCONTROL_CT_CLASS_TIME | MIXERCONTROL_CT_SC_TIME_MILLISECS | MIXERCONTROL_CT_UNITS_UNSIGNED) +MIXER_GETLINECONTROLSF_ALL = 0x00000000 +MIXER_GETLINECONTROLSF_ONEBYID = 0x00000001 +MIXER_GETLINECONTROLSF_ONEBYTYPE = 0x00000002 +MIXER_GETLINECONTROLSF_QUERYMASK = 0x0000000F +MIXER_GETCONTROLDETAILSF_VALUE = 0x00000000 +MIXER_GETCONTROLDETAILSF_LISTTEXT = 0x00000001 +MIXER_GETCONTROLDETAILSF_QUERYMASK = 0x0000000F +MIXER_SETCONTROLDETAILSF_VALUE = 0x00000000 +MIXER_SETCONTROLDETAILSF_CUSTOM = 0x00000001 +MIXER_SETCONTROLDETAILSF_QUERYMASK = 0x0000000F +TIMERR_NOERROR = (0) +TIMERR_NOCANDO = (TIMERR_BASE+1) +TIMERR_STRUCT = (TIMERR_BASE+33) +TIME_ONESHOT = 0x0000 +TIME_PERIODIC = 0x0001 +TIME_CALLBACK_FUNCTION = 0x0000 +TIME_CALLBACK_EVENT_SET = 0x0010 +TIME_CALLBACK_EVENT_PULSE = 0x0020 +JOYERR_NOERROR = (0) +JOYERR_PARMS = (JOYERR_BASE+5) +JOYERR_NOCANDO = (JOYERR_BASE+6) +JOYERR_UNPLUGGED = (JOYERR_BASE+7) +JOY_BUTTON1 = 0x0001 +JOY_BUTTON2 = 0x0002 +JOY_BUTTON3 = 0x0004 +JOY_BUTTON4 = 0x0008 +JOY_BUTTON1CHG = 0x0100 +JOY_BUTTON2CHG = 0x0200 +JOY_BUTTON3CHG = 0x0400 +JOY_BUTTON4CHG = 0x0800 +JOY_BUTTON5 = 0x00000010 +JOY_BUTTON6 = 0x00000020 +JOY_BUTTON7 = 0x00000040 +JOY_BUTTON8 = 0x00000080 +JOY_BUTTON9 = 0x00000100 +JOY_BUTTON10 = 0x00000200 +JOY_BUTTON11 = 0x00000400 +JOY_BUTTON12 = 0x00000800 +JOY_BUTTON13 = 0x00001000 +JOY_BUTTON14 = 0x00002000 +JOY_BUTTON15 = 0x00004000 +JOY_BUTTON16 = 0x00008000 +JOY_BUTTON17 = 0x00010000 +JOY_BUTTON18 = 0x00020000 +JOY_BUTTON19 = 0x00040000 +JOY_BUTTON20 = 0x00080000 +JOY_BUTTON21 = 0x00100000 +JOY_BUTTON22 = 0x00200000 +JOY_BUTTON23 = 0x00400000 +JOY_BUTTON24 = 0x00800000 +JOY_BUTTON25 = 0x01000000 +JOY_BUTTON26 = 0x02000000 +JOY_BUTTON27 = 0x04000000 +JOY_BUTTON28 = 0x08000000 +JOY_BUTTON29 = 0x10000000 +JOY_BUTTON30 = 0x20000000 +JOY_BUTTON31 = 0x40000000 +JOY_BUTTON32 = -2147483648 # 0x80000000 +JOY_POVFORWARD = 0 +JOY_POVRIGHT = 9000 +JOY_POVBACKWARD = 18000 +JOY_POVLEFT = 27000 +JOY_RETURNX = 0x00000001 +JOY_RETURNY = 0x00000002 +JOY_RETURNZ = 0x00000004 +JOY_RETURNR = 0x00000008 +JOY_RETURNU = 0x00000010 +JOY_RETURNV = 0x00000020 +JOY_RETURNPOV = 0x00000040 +JOY_RETURNBUTTONS = 0x00000080 +JOY_RETURNRAWDATA = 0x00000100 +JOY_RETURNPOVCTS = 0x00000200 +JOY_RETURNCENTERED = 0x00000400 +JOY_USEDEADZONE = 0x00000800 +JOY_RETURNALL = (JOY_RETURNX | JOY_RETURNY | JOY_RETURNZ | \ + JOY_RETURNR | JOY_RETURNU | JOY_RETURNV | \ + JOY_RETURNPOV | JOY_RETURNBUTTONS) +JOY_CAL_READALWAYS = 0x00010000 +JOY_CAL_READXYONLY = 0x00020000 +JOY_CAL_READ3 = 0x00040000 +JOY_CAL_READ4 = 0x00080000 +JOY_CAL_READXONLY = 0x00100000 +JOY_CAL_READYONLY = 0x00200000 +JOY_CAL_READ5 = 0x00400000 +JOY_CAL_READ6 = 0x00800000 +JOY_CAL_READZONLY = 0x01000000 +JOY_CAL_READRONLY = 0x02000000 +JOY_CAL_READUONLY = 0x04000000 +JOY_CAL_READVONLY = 0x08000000 +JOYSTICKID1 = 0 +JOYSTICKID2 = 1 +JOYCAPS_HASZ = 0x0001 +JOYCAPS_HASR = 0x0002 +JOYCAPS_HASU = 0x0004 +JOYCAPS_HASV = 0x0008 +JOYCAPS_HASPOV = 0x0010 +JOYCAPS_POV4DIR = 0x0020 +JOYCAPS_POVCTS = 0x0040 +MMIOERR_BASE = 256 +MMIOERR_FILENOTFOUND = (MMIOERR_BASE + 1) +MMIOERR_OUTOFMEMORY = (MMIOERR_BASE + 2) +MMIOERR_CANNOTOPEN = (MMIOERR_BASE + 3) +MMIOERR_CANNOTCLOSE = (MMIOERR_BASE + 4) +MMIOERR_CANNOTREAD = (MMIOERR_BASE + 5) +MMIOERR_CANNOTWRITE = (MMIOERR_BASE + 6) +MMIOERR_CANNOTSEEK = (MMIOERR_BASE + 7) +MMIOERR_CANNOTEXPAND = (MMIOERR_BASE + 8) +MMIOERR_CHUNKNOTFOUND = (MMIOERR_BASE + 9) +MMIOERR_UNBUFFERED = (MMIOERR_BASE + 10) +MMIOERR_PATHNOTFOUND = (MMIOERR_BASE + 11) +MMIOERR_ACCESSDENIED = (MMIOERR_BASE + 12) +MMIOERR_SHARINGVIOLATION = (MMIOERR_BASE + 13) +MMIOERR_NETWORKERROR = (MMIOERR_BASE + 14) +MMIOERR_TOOMANYOPENFILES = (MMIOERR_BASE + 15) +MMIOERR_INVALIDFILE = (MMIOERR_BASE + 16) +CFSEPCHAR = ord('+') +MMIO_RWMODE = 0x00000003 +MMIO_SHAREMODE = 0x00000070 +MMIO_CREATE = 0x00001000 +MMIO_PARSE = 0x00000100 +MMIO_DELETE = 0x00000200 +MMIO_EXIST = 0x00004000 +MMIO_ALLOCBUF = 0x00010000 +MMIO_GETTEMP = 0x00020000 +MMIO_DIRTY = 0x10000000 +MMIO_READ = 0x00000000 +MMIO_WRITE = 0x00000001 +MMIO_READWRITE = 0x00000002 +MMIO_COMPAT = 0x00000000 +MMIO_EXCLUSIVE = 0x00000010 +MMIO_DENYWRITE = 0x00000020 +MMIO_DENYREAD = 0x00000030 +MMIO_DENYNONE = 0x00000040 +MMIO_FHOPEN = 0x0010 +MMIO_EMPTYBUF = 0x0010 +MMIO_TOUPPER = 0x0010 +MMIO_INSTALLPROC = 0x00010000 +MMIO_GLOBALPROC = 0x10000000 +MMIO_REMOVEPROC = 0x00020000 +MMIO_UNICODEPROC = 0x01000000 +MMIO_FINDPROC = 0x00040000 +MMIO_FINDCHUNK = 0x0010 +MMIO_FINDRIFF = 0x0020 +MMIO_FINDLIST = 0x0040 +MMIO_CREATERIFF = 0x0020 +MMIO_CREATELIST = 0x0040 +MMIOM_READ = MMIO_READ +MMIOM_WRITE = MMIO_WRITE +MMIOM_SEEK = 2 +MMIOM_OPEN = 3 +MMIOM_CLOSE = 4 +MMIOM_WRITEFLUSH = 5 +MMIOM_RENAME = 6 +MMIOM_USER = 0x8000 +SEEK_SET = 0 +SEEK_CUR = 1 +SEEK_END = 2 +MMIO_DEFAULTBUFFER = 8192 +MCIERR_INVALID_DEVICE_ID = (MCIERR_BASE + 1) +MCIERR_UNRECOGNIZED_KEYWORD = (MCIERR_BASE + 3) +MCIERR_UNRECOGNIZED_COMMAND = (MCIERR_BASE + 5) +MCIERR_HARDWARE = (MCIERR_BASE + 6) +MCIERR_INVALID_DEVICE_NAME = (MCIERR_BASE + 7) +MCIERR_OUT_OF_MEMORY = (MCIERR_BASE + 8) +MCIERR_DEVICE_OPEN = (MCIERR_BASE + 9) +MCIERR_CANNOT_LOAD_DRIVER = (MCIERR_BASE + 10) +MCIERR_MISSING_COMMAND_STRING = (MCIERR_BASE + 11) +MCIERR_PARAM_OVERFLOW = (MCIERR_BASE + 12) +MCIERR_MISSING_STRING_ARGUMENT = (MCIERR_BASE + 13) +MCIERR_BAD_INTEGER = (MCIERR_BASE + 14) +MCIERR_PARSER_INTERNAL = (MCIERR_BASE + 15) +MCIERR_DRIVER_INTERNAL = (MCIERR_BASE + 16) +MCIERR_MISSING_PARAMETER = (MCIERR_BASE + 17) +MCIERR_UNSUPPORTED_FUNCTION = (MCIERR_BASE + 18) +MCIERR_FILE_NOT_FOUND = (MCIERR_BASE + 19) +MCIERR_DEVICE_NOT_READY = (MCIERR_BASE + 20) +MCIERR_INTERNAL = (MCIERR_BASE + 21) +MCIERR_DRIVER = (MCIERR_BASE + 22) +MCIERR_CANNOT_USE_ALL = (MCIERR_BASE + 23) +MCIERR_MULTIPLE = (MCIERR_BASE + 24) +MCIERR_EXTENSION_NOT_FOUND = (MCIERR_BASE + 25) +MCIERR_OUTOFRANGE = (MCIERR_BASE + 26) +MCIERR_FLAGS_NOT_COMPATIBLE = (MCIERR_BASE + 28) +MCIERR_FILE_NOT_SAVED = (MCIERR_BASE + 30) +MCIERR_DEVICE_TYPE_REQUIRED = (MCIERR_BASE + 31) +MCIERR_DEVICE_LOCKED = (MCIERR_BASE + 32) +MCIERR_DUPLICATE_ALIAS = (MCIERR_BASE + 33) +MCIERR_BAD_CONSTANT = (MCIERR_BASE + 34) +MCIERR_MUST_USE_SHAREABLE = (MCIERR_BASE + 35) +MCIERR_MISSING_DEVICE_NAME = (MCIERR_BASE + 36) +MCIERR_BAD_TIME_FORMAT = (MCIERR_BASE + 37) +MCIERR_NO_CLOSING_QUOTE = (MCIERR_BASE + 38) +MCIERR_DUPLICATE_FLAGS = (MCIERR_BASE + 39) +MCIERR_INVALID_FILE = (MCIERR_BASE + 40) +MCIERR_NULL_PARAMETER_BLOCK = (MCIERR_BASE + 41) +MCIERR_UNNAMED_RESOURCE = (MCIERR_BASE + 42) +MCIERR_NEW_REQUIRES_ALIAS = (MCIERR_BASE + 43) +MCIERR_NOTIFY_ON_AUTO_OPEN = (MCIERR_BASE + 44) +MCIERR_NO_ELEMENT_ALLOWED = (MCIERR_BASE + 45) +MCIERR_NONAPPLICABLE_FUNCTION = (MCIERR_BASE + 46) +MCIERR_ILLEGAL_FOR_AUTO_OPEN = (MCIERR_BASE + 47) +MCIERR_FILENAME_REQUIRED = (MCIERR_BASE + 48) +MCIERR_EXTRA_CHARACTERS = (MCIERR_BASE + 49) +MCIERR_DEVICE_NOT_INSTALLED = (MCIERR_BASE + 50) +MCIERR_GET_CD = (MCIERR_BASE + 51) +MCIERR_SET_CD = (MCIERR_BASE + 52) +MCIERR_SET_DRIVE = (MCIERR_BASE + 53) +MCIERR_DEVICE_LENGTH = (MCIERR_BASE + 54) +MCIERR_DEVICE_ORD_LENGTH = (MCIERR_BASE + 55) +MCIERR_NO_INTEGER = (MCIERR_BASE + 56) +MCIERR_WAVE_OUTPUTSINUSE = (MCIERR_BASE + 64) +MCIERR_WAVE_SETOUTPUTINUSE = (MCIERR_BASE + 65) +MCIERR_WAVE_INPUTSINUSE = (MCIERR_BASE + 66) +MCIERR_WAVE_SETINPUTINUSE = (MCIERR_BASE + 67) +MCIERR_WAVE_OUTPUTUNSPECIFIED = (MCIERR_BASE + 68) +MCIERR_WAVE_INPUTUNSPECIFIED = (MCIERR_BASE + 69) +MCIERR_WAVE_OUTPUTSUNSUITABLE = (MCIERR_BASE + 70) +MCIERR_WAVE_SETOUTPUTUNSUITABLE = (MCIERR_BASE + 71) +MCIERR_WAVE_INPUTSUNSUITABLE = (MCIERR_BASE + 72) +MCIERR_WAVE_SETINPUTUNSUITABLE = (MCIERR_BASE + 73) +MCIERR_SEQ_DIV_INCOMPATIBLE = (MCIERR_BASE + 80) +MCIERR_SEQ_PORT_INUSE = (MCIERR_BASE + 81) +MCIERR_SEQ_PORT_NONEXISTENT = (MCIERR_BASE + 82) +MCIERR_SEQ_PORT_MAPNODEVICE = (MCIERR_BASE + 83) +MCIERR_SEQ_PORT_MISCERROR = (MCIERR_BASE + 84) +MCIERR_SEQ_TIMER = (MCIERR_BASE + 85) +MCIERR_SEQ_PORTUNSPECIFIED = (MCIERR_BASE + 86) +MCIERR_SEQ_NOMIDIPRESENT = (MCIERR_BASE + 87) +MCIERR_NO_WINDOW = (MCIERR_BASE + 90) +MCIERR_CREATEWINDOW = (MCIERR_BASE + 91) +MCIERR_FILE_READ = (MCIERR_BASE + 92) +MCIERR_FILE_WRITE = (MCIERR_BASE + 93) +MCIERR_NO_IDENTITY = (MCIERR_BASE + 94) +MCIERR_CUSTOM_DRIVER_BASE = (MCIERR_BASE + 256) +MCI_FIRST = DRV_MCI_FIRST +MCI_OPEN = 0x0803 +MCI_CLOSE = 0x0804 +MCI_ESCAPE = 0x0805 +MCI_PLAY = 0x0806 +MCI_SEEK = 0x0807 +MCI_STOP = 0x0808 +MCI_PAUSE = 0x0809 +MCI_INFO = 0x080A +MCI_GETDEVCAPS = 0x080B +MCI_SPIN = 0x080C +MCI_SET = 0x080D +MCI_STEP = 0x080E +MCI_RECORD = 0x080F +MCI_SYSINFO = 0x0810 +MCI_BREAK = 0x0811 +MCI_SAVE = 0x0813 +MCI_STATUS = 0x0814 +MCI_CUE = 0x0830 +MCI_REALIZE = 0x0840 +MCI_WINDOW = 0x0841 +MCI_PUT = 0x0842 +MCI_WHERE = 0x0843 +MCI_FREEZE = 0x0844 +MCI_UNFREEZE = 0x0845 +MCI_LOAD = 0x0850 +MCI_CUT = 0x0851 +MCI_COPY = 0x0852 +MCI_PASTE = 0x0853 +MCI_UPDATE = 0x0854 +MCI_RESUME = 0x0855 +MCI_DELETE = 0x0856 +MCI_USER_MESSAGES = (DRV_MCI_FIRST + 0x400) +MCI_LAST = 0x0FFF +MCI_DEVTYPE_VCR = 513 +MCI_DEVTYPE_VIDEODISC = 514 +MCI_DEVTYPE_OVERLAY = 515 +MCI_DEVTYPE_CD_AUDIO = 516 +MCI_DEVTYPE_DAT = 517 +MCI_DEVTYPE_SCANNER = 518 +MCI_DEVTYPE_ANIMATION = 519 +MCI_DEVTYPE_DIGITAL_VIDEO = 520 +MCI_DEVTYPE_OTHER = 521 +MCI_DEVTYPE_WAVEFORM_AUDIO = 522 +MCI_DEVTYPE_SEQUENCER = 523 +MCI_DEVTYPE_FIRST = MCI_DEVTYPE_VCR +MCI_DEVTYPE_LAST = MCI_DEVTYPE_SEQUENCER +MCI_DEVTYPE_FIRST_USER = 0x1000 +MCI_MODE_NOT_READY = (MCI_STRING_OFFSET + 12) +MCI_MODE_STOP = (MCI_STRING_OFFSET + 13) +MCI_MODE_PLAY = (MCI_STRING_OFFSET + 14) +MCI_MODE_RECORD = (MCI_STRING_OFFSET + 15) +MCI_MODE_SEEK = (MCI_STRING_OFFSET + 16) +MCI_MODE_PAUSE = (MCI_STRING_OFFSET + 17) +MCI_MODE_OPEN = (MCI_STRING_OFFSET + 18) +MCI_FORMAT_MILLISECONDS = 0 +MCI_FORMAT_HMS = 1 +MCI_FORMAT_MSF = 2 +MCI_FORMAT_FRAMES = 3 +MCI_FORMAT_SMPTE_24 = 4 +MCI_FORMAT_SMPTE_25 = 5 +MCI_FORMAT_SMPTE_30 = 6 +MCI_FORMAT_SMPTE_30DROP = 7 +MCI_FORMAT_BYTES = 8 +MCI_FORMAT_SAMPLES = 9 +MCI_FORMAT_TMSF = 10 +def MCI_MSF_MINUTE(msf): return ((BYTE)(msf)) + +def MCI_MSF_SECOND(msf): return ((BYTE)(((WORD)(msf)) >> 8)) + +def MCI_MSF_FRAME(msf): return ((BYTE)((msf)>>16)) + +def MCI_TMSF_TRACK(tmsf): return ((BYTE)(tmsf)) + +def MCI_TMSF_MINUTE(tmsf): return ((BYTE)(((WORD)(tmsf)) >> 8)) + +def MCI_TMSF_SECOND(tmsf): return ((BYTE)((tmsf)>>16)) + +def MCI_TMSF_FRAME(tmsf): return ((BYTE)((tmsf)>>24)) + +def MCI_HMS_HOUR(hms): return ((BYTE)(hms)) + +def MCI_HMS_MINUTE(hms): return ((BYTE)(((WORD)(hms)) >> 8)) + +def MCI_HMS_SECOND(hms): return ((BYTE)((hms)>>16)) + +MCI_NOTIFY_SUCCESSFUL = 0x0001 +MCI_NOTIFY_SUPERSEDED = 0x0002 +MCI_NOTIFY_ABORTED = 0x0004 +MCI_NOTIFY_FAILURE = 0x0008 +MCI_NOTIFY = 0x00000001 +MCI_WAIT = 0x00000002 +MCI_FROM = 0x00000004 +MCI_TO = 0x00000008 +MCI_TRACK = 0x00000010 +MCI_OPEN_SHAREABLE = 0x00000100 +MCI_OPEN_ELEMENT = 0x00000200 +MCI_OPEN_ALIAS = 0x00000400 +MCI_OPEN_ELEMENT_ID = 0x00000800 +MCI_OPEN_TYPE_ID = 0x00001000 +MCI_OPEN_TYPE = 0x00002000 +MCI_SEEK_TO_START = 0x00000100 +MCI_SEEK_TO_END = 0x00000200 +MCI_STATUS_ITEM = 0x00000100 +MCI_STATUS_START = 0x00000200 +MCI_STATUS_LENGTH = 0x00000001 +MCI_STATUS_POSITION = 0x00000002 +MCI_STATUS_NUMBER_OF_TRACKS = 0x00000003 +MCI_STATUS_MODE = 0x00000004 +MCI_STATUS_MEDIA_PRESENT = 0x00000005 +MCI_STATUS_TIME_FORMAT = 0x00000006 +MCI_STATUS_READY = 0x00000007 +MCI_STATUS_CURRENT_TRACK = 0x00000008 +MCI_INFO_PRODUCT = 0x00000100 +MCI_INFO_FILE = 0x00000200 +MCI_INFO_MEDIA_UPC = 0x00000400 +MCI_INFO_MEDIA_IDENTITY = 0x00000800 +MCI_INFO_NAME = 0x00001000 +MCI_INFO_COPYRIGHT = 0x00002000 +MCI_GETDEVCAPS_ITEM = 0x00000100 +MCI_GETDEVCAPS_CAN_RECORD = 0x00000001 +MCI_GETDEVCAPS_HAS_AUDIO = 0x00000002 +MCI_GETDEVCAPS_HAS_VIDEO = 0x00000003 +MCI_GETDEVCAPS_DEVICE_TYPE = 0x00000004 +MCI_GETDEVCAPS_USES_FILES = 0x00000005 +MCI_GETDEVCAPS_COMPOUND_DEVICE = 0x00000006 +MCI_GETDEVCAPS_CAN_EJECT = 0x00000007 +MCI_GETDEVCAPS_CAN_PLAY = 0x00000008 +MCI_GETDEVCAPS_CAN_SAVE = 0x00000009 +MCI_SYSINFO_QUANTITY = 0x00000100 +MCI_SYSINFO_OPEN = 0x00000200 +MCI_SYSINFO_NAME = 0x00000400 +MCI_SYSINFO_INSTALLNAME = 0x00000800 +MCI_SET_DOOR_OPEN = 0x00000100 +MCI_SET_DOOR_CLOSED = 0x00000200 +MCI_SET_TIME_FORMAT = 0x00000400 +MCI_SET_AUDIO = 0x00000800 +MCI_SET_VIDEO = 0x00001000 +MCI_SET_ON = 0x00002000 +MCI_SET_OFF = 0x00004000 +MCI_SET_AUDIO_ALL = 0x00000000 +MCI_SET_AUDIO_LEFT = 0x00000001 +MCI_SET_AUDIO_RIGHT = 0x00000002 +MCI_BREAK_KEY = 0x00000100 +MCI_BREAK_HWND = 0x00000200 +MCI_BREAK_OFF = 0x00000400 +MCI_RECORD_INSERT = 0x00000100 +MCI_RECORD_OVERWRITE = 0x00000200 +MCI_SAVE_FILE = 0x00000100 +MCI_LOAD_FILE = 0x00000100 +MCI_VD_MODE_PARK = (MCI_VD_OFFSET + 1) +MCI_VD_MEDIA_CLV = (MCI_VD_OFFSET + 2) +MCI_VD_MEDIA_CAV = (MCI_VD_OFFSET + 3) +MCI_VD_MEDIA_OTHER = (MCI_VD_OFFSET + 4) +MCI_VD_FORMAT_TRACK = 0x4001 +MCI_VD_PLAY_REVERSE = 0x00010000 +MCI_VD_PLAY_FAST = 0x00020000 +MCI_VD_PLAY_SPEED = 0x00040000 +MCI_VD_PLAY_SCAN = 0x00080000 +MCI_VD_PLAY_SLOW = 0x00100000 +MCI_VD_SEEK_REVERSE = 0x00010000 +MCI_VD_STATUS_SPEED = 0x00004002 +MCI_VD_STATUS_FORWARD = 0x00004003 +MCI_VD_STATUS_MEDIA_TYPE = 0x00004004 +MCI_VD_STATUS_SIDE = 0x00004005 +MCI_VD_STATUS_DISC_SIZE = 0x00004006 +MCI_VD_GETDEVCAPS_CLV = 0x00010000 +MCI_VD_GETDEVCAPS_CAV = 0x00020000 +MCI_VD_SPIN_UP = 0x00010000 +MCI_VD_SPIN_DOWN = 0x00020000 +MCI_VD_GETDEVCAPS_CAN_REVERSE = 0x00004002 +MCI_VD_GETDEVCAPS_FAST_RATE = 0x00004003 +MCI_VD_GETDEVCAPS_SLOW_RATE = 0x00004004 +MCI_VD_GETDEVCAPS_NORMAL_RATE = 0x00004005 +MCI_VD_STEP_FRAMES = 0x00010000 +MCI_VD_STEP_REVERSE = 0x00020000 +MCI_VD_ESCAPE_STRING = 0x00000100 +MCI_CDA_STATUS_TYPE_TRACK = 0x00004001 +MCI_CDA_TRACK_AUDIO = (MCI_CD_OFFSET + 0) +MCI_CDA_TRACK_OTHER = (MCI_CD_OFFSET + 1) +MCI_WAVE_PCM = (MCI_WAVE_OFFSET + 0) +MCI_WAVE_MAPPER = (MCI_WAVE_OFFSET + 1) +MCI_WAVE_OPEN_BUFFER = 0x00010000 +MCI_WAVE_SET_FORMATTAG = 0x00010000 +MCI_WAVE_SET_CHANNELS = 0x00020000 +MCI_WAVE_SET_SAMPLESPERSEC = 0x00040000 +MCI_WAVE_SET_AVGBYTESPERSEC = 0x00080000 +MCI_WAVE_SET_BLOCKALIGN = 0x00100000 +MCI_WAVE_SET_BITSPERSAMPLE = 0x00200000 +MCI_WAVE_INPUT = 0x00400000 +MCI_WAVE_OUTPUT = 0x00800000 +MCI_WAVE_STATUS_FORMATTAG = 0x00004001 +MCI_WAVE_STATUS_CHANNELS = 0x00004002 +MCI_WAVE_STATUS_SAMPLESPERSEC = 0x00004003 +MCI_WAVE_STATUS_AVGBYTESPERSEC = 0x00004004 +MCI_WAVE_STATUS_BLOCKALIGN = 0x00004005 +MCI_WAVE_STATUS_BITSPERSAMPLE = 0x00004006 +MCI_WAVE_STATUS_LEVEL = 0x00004007 +MCI_WAVE_SET_ANYINPUT = 0x04000000 +MCI_WAVE_SET_ANYOUTPUT = 0x08000000 +MCI_WAVE_GETDEVCAPS_INPUTS = 0x00004001 +MCI_WAVE_GETDEVCAPS_OUTPUTS = 0x00004002 +MCI_SEQ_DIV_PPQN = (0 + MCI_SEQ_OFFSET) +MCI_SEQ_DIV_SMPTE_24 = (1 + MCI_SEQ_OFFSET) +MCI_SEQ_DIV_SMPTE_25 = (2 + MCI_SEQ_OFFSET) +MCI_SEQ_DIV_SMPTE_30DROP = (3 + MCI_SEQ_OFFSET) +MCI_SEQ_DIV_SMPTE_30 = (4 + MCI_SEQ_OFFSET) +MCI_SEQ_FORMAT_SONGPTR = 0x4001 +MCI_SEQ_FILE = 0x4002 +MCI_SEQ_MIDI = 0x4003 +MCI_SEQ_SMPTE = 0x4004 +MCI_SEQ_NONE = 65533 +MCI_SEQ_MAPPER = 65535 +MCI_SEQ_STATUS_TEMPO = 0x00004002 +MCI_SEQ_STATUS_PORT = 0x00004003 +MCI_SEQ_STATUS_SLAVE = 0x00004007 +MCI_SEQ_STATUS_MASTER = 0x00004008 +MCI_SEQ_STATUS_OFFSET = 0x00004009 +MCI_SEQ_STATUS_DIVTYPE = 0x0000400A +MCI_SEQ_STATUS_NAME = 0x0000400B +MCI_SEQ_STATUS_COPYRIGHT = 0x0000400C +MCI_SEQ_SET_TEMPO = 0x00010000 +MCI_SEQ_SET_PORT = 0x00020000 +MCI_SEQ_SET_SLAVE = 0x00040000 +MCI_SEQ_SET_MASTER = 0x00080000 +MCI_SEQ_SET_OFFSET = 0x01000000 +MCI_ANIM_OPEN_WS = 0x00010000 +MCI_ANIM_OPEN_PARENT = 0x00020000 +MCI_ANIM_OPEN_NOSTATIC = 0x00040000 +MCI_ANIM_PLAY_SPEED = 0x00010000 +MCI_ANIM_PLAY_REVERSE = 0x00020000 +MCI_ANIM_PLAY_FAST = 0x00040000 +MCI_ANIM_PLAY_SLOW = 0x00080000 +MCI_ANIM_PLAY_SCAN = 0x00100000 +MCI_ANIM_STEP_REVERSE = 0x00010000 +MCI_ANIM_STEP_FRAMES = 0x00020000 +MCI_ANIM_STATUS_SPEED = 0x00004001 +MCI_ANIM_STATUS_FORWARD = 0x00004002 +MCI_ANIM_STATUS_HWND = 0x00004003 +MCI_ANIM_STATUS_HPAL = 0x00004004 +MCI_ANIM_STATUS_STRETCH = 0x00004005 +MCI_ANIM_INFO_TEXT = 0x00010000 +MCI_ANIM_GETDEVCAPS_CAN_REVERSE = 0x00004001 +MCI_ANIM_GETDEVCAPS_FAST_RATE = 0x00004002 +MCI_ANIM_GETDEVCAPS_SLOW_RATE = 0x00004003 +MCI_ANIM_GETDEVCAPS_NORMAL_RATE = 0x00004004 +MCI_ANIM_GETDEVCAPS_PALETTES = 0x00004006 +MCI_ANIM_GETDEVCAPS_CAN_STRETCH = 0x00004007 +MCI_ANIM_GETDEVCAPS_MAX_WINDOWS = 0x00004008 +MCI_ANIM_REALIZE_NORM = 0x00010000 +MCI_ANIM_REALIZE_BKGD = 0x00020000 +MCI_ANIM_WINDOW_HWND = 0x00010000 +MCI_ANIM_WINDOW_STATE = 0x00040000 +MCI_ANIM_WINDOW_TEXT = 0x00080000 +MCI_ANIM_WINDOW_ENABLE_STRETCH = 0x00100000 +MCI_ANIM_WINDOW_DISABLE_STRETCH = 0x00200000 +MCI_ANIM_WINDOW_DEFAULT = 0x00000000 +MCI_ANIM_RECT = 0x00010000 +MCI_ANIM_PUT_SOURCE = 0x00020000 +MCI_ANIM_PUT_DESTINATION = 0x00040000 +MCI_ANIM_WHERE_SOURCE = 0x00020000 +MCI_ANIM_WHERE_DESTINATION = 0x00040000 +MCI_ANIM_UPDATE_HDC = 0x00020000 +MCI_OVLY_OPEN_WS = 0x00010000 +MCI_OVLY_OPEN_PARENT = 0x00020000 +MCI_OVLY_STATUS_HWND = 0x00004001 +MCI_OVLY_STATUS_STRETCH = 0x00004002 +MCI_OVLY_INFO_TEXT = 0x00010000 +MCI_OVLY_GETDEVCAPS_CAN_STRETCH = 0x00004001 +MCI_OVLY_GETDEVCAPS_CAN_FREEZE = 0x00004002 +MCI_OVLY_GETDEVCAPS_MAX_WINDOWS = 0x00004003 +MCI_OVLY_WINDOW_HWND = 0x00010000 +MCI_OVLY_WINDOW_STATE = 0x00040000 +MCI_OVLY_WINDOW_TEXT = 0x00080000 +MCI_OVLY_WINDOW_ENABLE_STRETCH = 0x00100000 +MCI_OVLY_WINDOW_DISABLE_STRETCH = 0x00200000 +MCI_OVLY_WINDOW_DEFAULT = 0x00000000 +MCI_OVLY_RECT = 0x00010000 +MCI_OVLY_PUT_SOURCE = 0x00020000 +MCI_OVLY_PUT_DESTINATION = 0x00040000 +MCI_OVLY_PUT_FRAME = 0x00080000 +MCI_OVLY_PUT_VIDEO = 0x00100000 +MCI_OVLY_WHERE_SOURCE = 0x00020000 +MCI_OVLY_WHERE_DESTINATION = 0x00040000 +MCI_OVLY_WHERE_FRAME = 0x00080000 +MCI_OVLY_WHERE_VIDEO = 0x00100000 +SELECTDIB = 41 +def DIBINDEX(n): return MAKELONG((n),0x10FF) + diff --git a/venv/Lib/site-packages/win32/lib/netbios.py b/venv/Lib/site-packages/win32/lib/netbios.py new file mode 100644 index 00000000..43c64def --- /dev/null +++ b/venv/Lib/site-packages/win32/lib/netbios.py @@ -0,0 +1,292 @@ +import sys +import win32wnet +import struct + +# Constants generated by h2py from nb30.h +NCBNAMSZ = 16 +MAX_LANA = 254 +NAME_FLAGS_MASK = 0x87 +GROUP_NAME = 0x80 +UNIQUE_NAME = 0x00 +REGISTERING = 0x00 +REGISTERED = 0x04 +DEREGISTERED = 0x05 +DUPLICATE = 0x06 +DUPLICATE_DEREG = 0x07 +LISTEN_OUTSTANDING = 0x01 +CALL_PENDING = 0x02 +SESSION_ESTABLISHED = 0x03 +HANGUP_PENDING = 0x04 +HANGUP_COMPLETE = 0x05 +SESSION_ABORTED = 0x06 +ALL_TRANSPORTS = "M\0\0\0" +MS_NBF = "MNBF" +NCBCALL = 0x10 +NCBLISTEN = 0x11 +NCBHANGUP = 0x12 +NCBSEND = 0x14 +NCBRECV = 0x15 +NCBRECVANY = 0x16 +NCBCHAINSEND = 0x17 +NCBDGSEND = 0x20 +NCBDGRECV = 0x21 +NCBDGSENDBC = 0x22 +NCBDGRECVBC = 0x23 +NCBADDNAME = 0x30 +NCBDELNAME = 0x31 +NCBRESET = 0x32 +NCBASTAT = 0x33 +NCBSSTAT = 0x34 +NCBCANCEL = 0x35 +NCBADDGRNAME = 0x36 +NCBENUM = 0x37 +NCBUNLINK = 0x70 +NCBSENDNA = 0x71 +NCBCHAINSENDNA = 0x72 +NCBLANSTALERT = 0x73 +NCBACTION = 0x77 +NCBFINDNAME = 0x78 +NCBTRACE = 0x79 +ASYNCH = 0x80 +NRC_GOODRET = 0x00 +NRC_BUFLEN = 0x01 +NRC_ILLCMD = 0x03 +NRC_CMDTMO = 0x05 +NRC_INCOMP = 0x06 +NRC_BADDR = 0x07 +NRC_SNUMOUT = 0x08 +NRC_NORES = 0x09 +NRC_SCLOSED = 0x0a +NRC_CMDCAN = 0x0b +NRC_DUPNAME = 0x0d +NRC_NAMTFUL = 0x0e +NRC_ACTSES = 0x0f +NRC_LOCTFUL = 0x11 +NRC_REMTFUL = 0x12 +NRC_ILLNN = 0x13 +NRC_NOCALL = 0x14 +NRC_NOWILD = 0x15 +NRC_INUSE = 0x16 +NRC_NAMERR = 0x17 +NRC_SABORT = 0x18 +NRC_NAMCONF = 0x19 +NRC_IFBUSY = 0x21 +NRC_TOOMANY = 0x22 +NRC_BRIDGE = 0x23 +NRC_CANOCCR = 0x24 +NRC_CANCEL = 0x26 +NRC_DUPENV = 0x30 +NRC_ENVNOTDEF = 0x34 +NRC_OSRESNOTAV = 0x35 +NRC_MAXAPPS = 0x36 +NRC_NOSAPS = 0x37 +NRC_NORESOURCES = 0x38 +NRC_INVADDRESS = 0x39 +NRC_INVDDID = 0x3B +NRC_LOCKFAIL = 0x3C +NRC_OPENERR = 0x3f +NRC_SYSTEM = 0x40 +NRC_PENDING = 0xff + + +UCHAR = "B" +WORD = "H" +DWORD = "I" +USHORT = "H" +ULONG = "I" + +ADAPTER_STATUS_ITEMS = [ + ("6s", "adapter_address"), + (UCHAR, "rev_major"), + (UCHAR, "reserved0"), + (UCHAR, "adapter_type"), + (UCHAR, "rev_minor"), + (WORD, "duration"), + (WORD, "frmr_recv"), + (WORD, "frmr_xmit"), + + (WORD, "iframe_recv_err"), + + (WORD, "xmit_aborts"), + (DWORD, "xmit_success"), + (DWORD, "recv_success"), + + (WORD, "iframe_xmit_err"), + + (WORD, "recv_buff_unavail"), + (WORD, "t1_timeouts"), + (WORD, "ti_timeouts"), + (DWORD, "reserved1"), + (WORD, "free_ncbs"), + (WORD, "max_cfg_ncbs"), + (WORD, "max_ncbs"), + (WORD, "xmit_buf_unavail"), + (WORD, "max_dgram_size"), + (WORD, "pending_sess"), + (WORD, "max_cfg_sess"), + (WORD, "max_sess"), + (WORD, "max_sess_pkt_size"), + (WORD, "name_count"), +] + +NAME_BUFFER_ITEMS = [ + (str(NCBNAMSZ) + "s", "name"), + (UCHAR, "name_num"), + (UCHAR, "name_flags"), +] + +SESSION_HEADER_ITEMS = [ + (UCHAR, "sess_name"), + (UCHAR, "num_sess"), + (UCHAR, "rcv_dg_outstanding"), + (UCHAR, "rcv_any_outstanding"), +] + +SESSION_BUFFER_ITEMS = [ + (UCHAR, "lsn"), + (UCHAR, "state"), + (str(NCBNAMSZ)+"s", "local_name"), + (str(NCBNAMSZ)+"s", "remote_name"), + (UCHAR, "rcvs_outstanding"), + (UCHAR, "sends_outstanding"), +] + +LANA_ENUM_ITEMS = [ + ("B", "length"), # Number of valid entries in lana[] + (str(MAX_LANA+1) + "s", "lana"), +] + +FIND_NAME_HEADER_ITEMS = [ + (WORD, "node_count"), + (UCHAR, "reserved"), + (UCHAR, "unique_group"), +] + +FIND_NAME_BUFFER_ITEMS = [ + (UCHAR, "length"), + (UCHAR, "access_control"), + (UCHAR, "frame_control"), + ("6s", "destination_addr"), + ("6s", "source_addr"), + ("18s", "routing_info"), +] + +ACTION_HEADER_ITEMS = [ + (ULONG, "transport_id"), + (USHORT, "action_code"), + (USHORT, "reserved"), +] + +del UCHAR, WORD, DWORD, USHORT, ULONG + +NCB = win32wnet.NCB +def Netbios(ncb): + ob = ncb.Buffer + is_ours = hasattr(ob, "_pack") + if is_ours: + ob._pack() + try: + return win32wnet.Netbios(ncb) + finally: + if is_ours: + ob._unpack() + +class NCBStruct: + def __init__(self, items): + self._format = "".join([item[0] for item in items]) + self._items = items + self._buffer_ = win32wnet.NCBBuffer(struct.calcsize(self._format)) + + for format, name in self._items: + if len(format)==1: + if format == 'c': + val = '\0' + else: + val = 0 + else: + l = int(format[:-1]) + val = '\0' * l + self.__dict__[name] = val + + def _pack(self): + vals = [] + for format, name in self._items: + try: + vals.append(self.__dict__[name]) + except KeyError: + vals.append(None) + + self._buffer_[:] = struct.pack(*(self._format,) + tuple(vals)) + + def _unpack(self): + items = struct.unpack(self._format, self._buffer_) + assert len(items)==len(self._items), "unexpected number of items to unpack!" + for (format, name), val in zip(self._items, items): + self.__dict__[name] = val + + def __setattr__(self, attr, val): + if attr not in self.__dict__ and attr[0]!='_': + for format, attr_name in self._items: + if attr==attr_name: + break + else: + raise AttributeError(attr) + self.__dict__[attr] = val + +def ADAPTER_STATUS(): + return NCBStruct(ADAPTER_STATUS_ITEMS) + +def NAME_BUFFER(): + return NCBStruct(NAME_BUFFER_ITEMS) + +def SESSION_HEADER(): + return NCBStruct(SESSION_HEADER_ITEMS) + +def SESSION_BUFFER(): + return NCBStruct(SESSION_BUFFER_ITEMS) + +def LANA_ENUM(): + return NCBStruct(LANA_ENUM_ITEMS) + +def FIND_NAME_HEADER(): + return NCBStruct(FIND_NAME_HEADER_ITEMS) + +def FIND_NAME_BUFFER(): + return NCBStruct(FIND_NAME_BUFFER_ITEMS) + +def ACTION_HEADER(): + return NCBStruct(ACTION_HEADER_ITEMS) + +def byte_to_int(b): + """Given an element in a binary buffer, return its integer value""" + if sys.version_info >= (3,0): + # a byte is already an int in py3k + return b + return ord(b) # its a char from a string in py2k. + +if __name__=='__main__': + # code ported from "HOWTO: Get the MAC Address for an Ethernet Adapter" + # MS KB ID: Q118623 + ncb = NCB() + ncb.Command = NCBENUM + la_enum = LANA_ENUM() + ncb.Buffer = la_enum + rc = Netbios(ncb) + if rc != 0: raise RuntimeError("Unexpected result %d" % (rc,)) + for i in range(la_enum.length): + ncb.Reset() + ncb.Command = NCBRESET + ncb.Lana_num = byte_to_int(la_enum.lana[i]) + rc = Netbios(ncb) + if rc != 0: raise RuntimeError("Unexpected result %d" % (rc,)) + ncb.Reset() + ncb.Command = NCBASTAT + ncb.Lana_num = byte_to_int(la_enum.lana[i]) + ncb.Callname = "* ".encode("ascii") # ensure bytes on py2x and 3k + adapter = ADAPTER_STATUS() + ncb.Buffer = adapter + Netbios(ncb) + print("Adapter address:", end=' ') + for ch in adapter.adapter_address: + print("%02x" % (byte_to_int(ch),), end=' ') + print() diff --git a/venv/Lib/site-packages/win32/lib/ntsecuritycon.py b/venv/Lib/site-packages/win32/lib/ntsecuritycon.py new file mode 100644 index 00000000..716d811d --- /dev/null +++ b/venv/Lib/site-packages/win32/lib/ntsecuritycon.py @@ -0,0 +1,689 @@ +# Hacked from winnt.h + +DELETE = (65536) +READ_CONTROL = (131072) +WRITE_DAC = (262144) +WRITE_OWNER = (524288) +SYNCHRONIZE = (1048576) +STANDARD_RIGHTS_REQUIRED = (983040) +STANDARD_RIGHTS_READ = (READ_CONTROL) +STANDARD_RIGHTS_WRITE = (READ_CONTROL) +STANDARD_RIGHTS_EXECUTE = (READ_CONTROL) +STANDARD_RIGHTS_ALL = (2031616) +SPECIFIC_RIGHTS_ALL = (65535) +ACCESS_SYSTEM_SECURITY = (16777216) +MAXIMUM_ALLOWED = (33554432) +GENERIC_READ = (-2147483648) +GENERIC_WRITE = (1073741824) +GENERIC_EXECUTE = (536870912) +GENERIC_ALL = (268435456) + +# file security permissions +FILE_READ_DATA= ( 1 ) +FILE_LIST_DIRECTORY= ( 1 ) +FILE_WRITE_DATA= ( 2 ) +FILE_ADD_FILE= ( 2 ) +FILE_APPEND_DATA= ( 4 ) +FILE_ADD_SUBDIRECTORY= ( 4 ) +FILE_CREATE_PIPE_INSTANCE= ( 4 ) +FILE_READ_EA= ( 8 ) +FILE_WRITE_EA= ( 16 ) +FILE_EXECUTE= ( 32 ) +FILE_TRAVERSE= ( 32 ) +FILE_DELETE_CHILD= ( 64 ) +FILE_READ_ATTRIBUTES= ( 128 ) +FILE_WRITE_ATTRIBUTES= ( 256 ) +FILE_ALL_ACCESS= (STANDARD_RIGHTS_REQUIRED | SYNCHRONIZE | 511) +FILE_GENERIC_READ= (STANDARD_RIGHTS_READ | FILE_READ_DATA | FILE_READ_ATTRIBUTES | FILE_READ_EA | SYNCHRONIZE) +FILE_GENERIC_WRITE= (STANDARD_RIGHTS_WRITE | FILE_WRITE_DATA | FILE_WRITE_ATTRIBUTES | FILE_WRITE_EA | FILE_APPEND_DATA | SYNCHRONIZE) +FILE_GENERIC_EXECUTE= (STANDARD_RIGHTS_EXECUTE | FILE_READ_ATTRIBUTES | FILE_EXECUTE | SYNCHRONIZE) + + +SECURITY_NULL_SID_AUTHORITY = (0,0,0,0,0,0) +SECURITY_WORLD_SID_AUTHORITY = (0,0,0,0,0,1) +SECURITY_LOCAL_SID_AUTHORITY = (0,0,0,0,0,2) +SECURITY_CREATOR_SID_AUTHORITY = (0,0,0,0,0,3) +SECURITY_NON_UNIQUE_AUTHORITY = (0,0,0,0,0,4) +SECURITY_RESOURCE_MANAGER_AUTHORITY = (0,0,0,0,0,9) + +SECURITY_NULL_RID = 0 +SECURITY_WORLD_RID = 0 +SECURITY_LOCAL_RID = 0X00000000 + +SECURITY_CREATOR_OWNER_RID = 0 +SECURITY_CREATOR_GROUP_RID = 1 + +SECURITY_CREATOR_OWNER_SERVER_RID = 2 +SECURITY_CREATOR_GROUP_SERVER_RID = 3 +SECURITY_CREATOR_OWNER_RIGHTS_RID = 4 + +# NT well-known SIDs +SECURITY_NT_AUTHORITY = (0,0,0,0,0,5) + +SECURITY_DIALUP_RID = 1 +SECURITY_NETWORK_RID = 2 +SECURITY_BATCH_RID = 3 +SECURITY_INTERACTIVE_RID = 4 +SECURITY_SERVICE_RID = 6 +SECURITY_ANONYMOUS_LOGON_RID = 7 +SECURITY_PROXY_RID = 8 +SECURITY_SERVER_LOGON_RID = 9 + +SECURITY_LOGON_IDS_RID = 5 +SECURITY_LOGON_IDS_RID_COUNT = 3 + +SECURITY_LOCAL_SYSTEM_RID = 18 + +SECURITY_NT_NON_UNIQUE = 21 + +SECURITY_BUILTIN_DOMAIN_RID = 32 + +# well-known domain relative sub-authority values (RIDs)... +DOMAIN_USER_RID_ADMIN = 500 +DOMAIN_USER_RID_GUEST = 501 +DOMAIN_USER_RID_KRBTGT = 502 +DOMAIN_USER_RID_MAX = 999 + +# well-known groups ... +DOMAIN_GROUP_RID_ADMINS = 512 +DOMAIN_GROUP_RID_USERS = 513 +DOMAIN_GROUP_RID_GUESTS = 514 +DOMAIN_GROUP_RID_COMPUTERS = 515 +DOMAIN_GROUP_RID_CONTROLLERS = 516 +DOMAIN_GROUP_RID_CERT_ADMINS = 517 +DOMAIN_GROUP_RID_SCHEMA_ADMINS = 518 +DOMAIN_GROUP_RID_ENTERPRISE_ADMINS = 519 +DOMAIN_GROUP_RID_POLICY_ADMINS = 520 +DOMAIN_GROUP_RID_READONLY_CONTROLLERS = 521 + +# well-known aliases ... +DOMAIN_ALIAS_RID_ADMINS = 544 +DOMAIN_ALIAS_RID_USERS = 545 +DOMAIN_ALIAS_RID_GUESTS = 546 +DOMAIN_ALIAS_RID_POWER_USERS = 547 +DOMAIN_ALIAS_RID_ACCOUNT_OPS = 548 +DOMAIN_ALIAS_RID_SYSTEM_OPS = 549 +DOMAIN_ALIAS_RID_PRINT_OPS = 550 +DOMAIN_ALIAS_RID_BACKUP_OPS = 551 +DOMAIN_ALIAS_RID_REPLICATOR = 552 +DOMAIN_ALIAS_RID_RAS_SERVERS = 553 +DOMAIN_ALIAS_RID_PREW2KCOMPACCESS = 554 +DOMAIN_ALIAS_RID_REMOTE_DESKTOP_USERS = 555 +DOMAIN_ALIAS_RID_NETWORK_CONFIGURATION_OPS = 556 +DOMAIN_ALIAS_RID_INCOMING_FOREST_TRUST_BUILDERS = 557 +DOMAIN_ALIAS_RID_MONITORING_USERS = 558 +DOMAIN_ALIAS_RID_LOGGING_USERS = 559 +DOMAIN_ALIAS_RID_AUTHORIZATIONACCESS = 560 +DOMAIN_ALIAS_RID_TS_LICENSE_SERVERS = 561 +DOMAIN_ALIAS_RID_DCOM_USERS = 562 +DOMAIN_ALIAS_RID_IUSERS = 568 +DOMAIN_ALIAS_RID_CRYPTO_OPERATORS = 569 +DOMAIN_ALIAS_RID_CACHEABLE_PRINCIPALS_GROUP = 571 +DOMAIN_ALIAS_RID_NON_CACHEABLE_PRINCIPALS_GROUP = 572 +DOMAIN_ALIAS_RID_EVENT_LOG_READERS_GROUP = 573 + +SECURITY_MANDATORY_LABEL_AUTHORITY = (0,0,0,0,0,16) +SECURITY_MANDATORY_UNTRUSTED_RID = 0x00000000 +SECURITY_MANDATORY_LOW_RID = 0x00001000 +SECURITY_MANDATORY_MEDIUM_RID = 0x00002000 +SECURITY_MANDATORY_HIGH_RID = 0x00003000 +SECURITY_MANDATORY_SYSTEM_RID = 0x00004000 +SECURITY_MANDATORY_PROTECTED_PROCESS_RID = 0x00005000 +SECURITY_MANDATORY_MAXIMUM_USER_RID = SECURITY_MANDATORY_SYSTEM_RID + +SYSTEM_LUID = (999, 0) +ANONYMOUS_LOGON_LUID = (998, 0) +LOCALSERVICE_LUID = (997, 0) +NETWORKSERVICE_LUID = (996, 0) +IUSER_LUID = (995, 0) + +# Group attributes + +SE_GROUP_MANDATORY = 1 +SE_GROUP_ENABLED_BY_DEFAULT = 2 +SE_GROUP_ENABLED = 4 +SE_GROUP_OWNER = 8 +SE_GROUP_USE_FOR_DENY_ONLY = 16 +SE_GROUP_INTEGRITY = 32 +SE_GROUP_INTEGRITY_ENABLED = 64 +SE_GROUP_RESOURCE = 536870912 +SE_GROUP_LOGON_ID = -1073741824 + + +# User attributes +# (None yet defined.) + +# ACE types +ACCESS_MIN_MS_ACE_TYPE = (0) +ACCESS_ALLOWED_ACE_TYPE = (0) +ACCESS_DENIED_ACE_TYPE = (1) +SYSTEM_AUDIT_ACE_TYPE = (2) +SYSTEM_ALARM_ACE_TYPE = (3) +ACCESS_MAX_MS_V2_ACE_TYPE = (3) +ACCESS_ALLOWED_COMPOUND_ACE_TYPE = (4) +ACCESS_MAX_MS_V3_ACE_TYPE = (4) +ACCESS_MIN_MS_OBJECT_ACE_TYPE = (5) +ACCESS_ALLOWED_OBJECT_ACE_TYPE = (5) +ACCESS_DENIED_OBJECT_ACE_TYPE = (6) +SYSTEM_AUDIT_OBJECT_ACE_TYPE = (7) +SYSTEM_ALARM_OBJECT_ACE_TYPE = (8) +ACCESS_MAX_MS_OBJECT_ACE_TYPE = (8) +ACCESS_MAX_MS_V4_ACE_TYPE = (8) +ACCESS_MAX_MS_ACE_TYPE = (8) +ACCESS_ALLOWED_CALLBACK_ACE_TYPE = 9 +ACCESS_DENIED_CALLBACK_ACE_TYPE = 10 +ACCESS_ALLOWED_CALLBACK_OBJECT_ACE_TYPE = 11 +ACCESS_DENIED_CALLBACK_OBJECT_ACE_TYPE = 12 +SYSTEM_AUDIT_CALLBACK_ACE_TYPE = 13 +SYSTEM_ALARM_CALLBACK_ACE_TYPE = 14 +SYSTEM_AUDIT_CALLBACK_OBJECT_ACE_TYPE = 15 +SYSTEM_ALARM_CALLBACK_OBJECT_ACE_TYPE = 16 +SYSTEM_MANDATORY_LABEL_ACE_TYPE = 17 +ACCESS_MAX_MS_V5_ACE_TYPE = 17 + +# The following are the inherit flags that go into the AceFlags field +# of an Ace header. + +OBJECT_INHERIT_ACE = 1 +CONTAINER_INHERIT_ACE = 2 +NO_PROPAGATE_INHERIT_ACE = 4 +INHERIT_ONLY_ACE = 8 +VALID_INHERIT_FLAGS = 15 + + +SUCCESSFUL_ACCESS_ACE_FLAG = 64 +FAILED_ACCESS_ACE_FLAG = 128 + +SE_OWNER_DEFAULTED = 1 +SE_GROUP_DEFAULTED = 2 +SE_DACL_PRESENT = 4 +SE_DACL_DEFAULTED = 8 +SE_SACL_PRESENT = 16 +SE_SACL_DEFAULTED = 32 +SE_SELF_RELATIVE = 32768 + + +SE_PRIVILEGE_ENABLED_BY_DEFAULT = 1 +SE_PRIVILEGE_ENABLED = 2 +SE_PRIVILEGE_USED_FOR_ACCESS = -2147483648 + +PRIVILEGE_SET_ALL_NECESSARY = 1 + +# NT Defined Privileges + +SE_CREATE_TOKEN_NAME = "SeCreateTokenPrivilege" +SE_ASSIGNPRIMARYTOKEN_NAME = "SeAssignPrimaryTokenPrivilege" +SE_LOCK_MEMORY_NAME = "SeLockMemoryPrivilege" +SE_INCREASE_QUOTA_NAME = "SeIncreaseQuotaPrivilege" +SE_UNSOLICITED_INPUT_NAME = "SeUnsolicitedInputPrivilege" +SE_MACHINE_ACCOUNT_NAME = "SeMachineAccountPrivilege" +SE_TCB_NAME = "SeTcbPrivilege" +SE_SECURITY_NAME = "SeSecurityPrivilege" +SE_TAKE_OWNERSHIP_NAME = "SeTakeOwnershipPrivilege" +SE_LOAD_DRIVER_NAME = "SeLoadDriverPrivilege" +SE_SYSTEM_PROFILE_NAME = "SeSystemProfilePrivilege" +SE_SYSTEMTIME_NAME = "SeSystemtimePrivilege" +SE_PROF_SINGLE_PROCESS_NAME = "SeProfileSingleProcessPrivilege" +SE_INC_BASE_PRIORITY_NAME = "SeIncreaseBasePriorityPrivilege" +SE_CREATE_PAGEFILE_NAME = "SeCreatePagefilePrivilege" +SE_CREATE_PERMANENT_NAME = "SeCreatePermanentPrivilege" +SE_BACKUP_NAME = "SeBackupPrivilege" +SE_RESTORE_NAME = "SeRestorePrivilege" +SE_SHUTDOWN_NAME = "SeShutdownPrivilege" +SE_DEBUG_NAME = "SeDebugPrivilege" +SE_AUDIT_NAME = "SeAuditPrivilege" +SE_SYSTEM_ENVIRONMENT_NAME = "SeSystemEnvironmentPrivilege" +SE_CHANGE_NOTIFY_NAME = "SeChangeNotifyPrivilege" +SE_REMOTE_SHUTDOWN_NAME = "SeRemoteShutdownPrivilege" + + +# Enum SECURITY_IMPERSONATION_LEVEL: +SecurityAnonymous = 0 +SecurityIdentification = 1 +SecurityImpersonation = 2 +SecurityDelegation = 3 + +SECURITY_MAX_IMPERSONATION_LEVEL = SecurityDelegation + +DEFAULT_IMPERSONATION_LEVEL = SecurityImpersonation + +TOKEN_ASSIGN_PRIMARY = 1 +TOKEN_DUPLICATE = 2 +TOKEN_IMPERSONATE = 4 +TOKEN_QUERY = 8 +TOKEN_QUERY_SOURCE = 16 +TOKEN_ADJUST_PRIVILEGES = 32 +TOKEN_ADJUST_GROUPS = 64 +TOKEN_ADJUST_DEFAULT = 128 + +TOKEN_ALL_ACCESS = (STANDARD_RIGHTS_REQUIRED |\ + TOKEN_ASSIGN_PRIMARY |\ + TOKEN_DUPLICATE |\ + TOKEN_IMPERSONATE |\ + TOKEN_QUERY |\ + TOKEN_QUERY_SOURCE |\ + TOKEN_ADJUST_PRIVILEGES |\ + TOKEN_ADJUST_GROUPS |\ + TOKEN_ADJUST_DEFAULT) + + +TOKEN_READ = (STANDARD_RIGHTS_READ |\ + TOKEN_QUERY) + + +TOKEN_WRITE = (STANDARD_RIGHTS_WRITE |\ + TOKEN_ADJUST_PRIVILEGES |\ + TOKEN_ADJUST_GROUPS |\ + TOKEN_ADJUST_DEFAULT) + +TOKEN_EXECUTE = (STANDARD_RIGHTS_EXECUTE) + +SidTypeUser = 1 +SidTypeGroup = 2 +SidTypeDomain =3 +SidTypeAlias = 4 +SidTypeWellKnownGroup = 5 +SidTypeDeletedAccount = 6 +SidTypeInvalid = 7 +SidTypeUnknown = 8 +SidTypeComputer = 9 +SidTypeLabel = 10 + +# Token types +TokenPrimary = 1 +TokenImpersonation = 2 + +# TOKEN_INFORMATION_CLASS, used with Get/SetTokenInformation +TokenUser = 1 +TokenGroups = 2 +TokenPrivileges = 3 +TokenOwner = 4 +TokenPrimaryGroup = 5 +TokenDefaultDacl = 6 +TokenSource = 7 +TokenType = 8 +TokenImpersonationLevel = 9 +TokenStatistics = 10 +TokenRestrictedSids = 11 +TokenSessionId = 12 +TokenGroupsAndPrivileges = 13 +TokenSessionReference = 14 +TokenSandBoxInert = 15 +TokenAuditPolicy = 16 +TokenOrigin = 17 +TokenElevationType = 18 +TokenLinkedToken = 19 +TokenElevation = 20 +TokenHasRestrictions = 21 +TokenAccessInformation = 22 +TokenVirtualizationAllowed = 23 +TokenVirtualizationEnabled = 24 +TokenIntegrityLevel = 25 +TokenUIAccess = 26 +TokenMandatoryPolicy = 27 +TokenLogonSid = 28 + +# DirectoryService related constants. +# Generated by h2py from NtDsAPI.h +DS_BEHAVIOR_WIN2000 = 0 +DS_BEHAVIOR_WIN2003_WITH_MIXED_DOMAINS = 1 +DS_BEHAVIOR_WIN2003 = 2 +DS_SYNCED_EVENT_NAME = "NTDSInitialSyncsCompleted" +ACTRL_DS_OPEN = 0x00000000 +ACTRL_DS_CREATE_CHILD = 0x00000001 +ACTRL_DS_DELETE_CHILD = 0x00000002 +ACTRL_DS_LIST = 0x00000004 +ACTRL_DS_SELF = 0x00000008 +ACTRL_DS_READ_PROP = 0x00000010 +ACTRL_DS_WRITE_PROP = 0x00000020 +ACTRL_DS_DELETE_TREE = 0x00000040 +ACTRL_DS_LIST_OBJECT = 0x00000080 +ACTRL_DS_CONTROL_ACCESS = 0x00000100 +NTDSAPI_BIND_ALLOW_DELEGATION = (0x00000001) +DS_REPSYNC_ASYNCHRONOUS_OPERATION = 0x00000001 +DS_REPSYNC_WRITEABLE = 0x00000002 +DS_REPSYNC_PERIODIC = 0x00000004 +DS_REPSYNC_INTERSITE_MESSAGING = 0x00000008 +DS_REPSYNC_ALL_SOURCES = 0x00000010 +DS_REPSYNC_FULL = 0x00000020 +DS_REPSYNC_URGENT = 0x00000040 +DS_REPSYNC_NO_DISCARD = 0x00000080 +DS_REPSYNC_FORCE = 0x00000100 +DS_REPSYNC_ADD_REFERENCE = 0x00000200 +DS_REPSYNC_NEVER_COMPLETED = 0x00000400 +DS_REPSYNC_TWO_WAY = 0x00000800 +DS_REPSYNC_NEVER_NOTIFY = 0x00001000 +DS_REPSYNC_INITIAL = 0x00002000 +DS_REPSYNC_USE_COMPRESSION = 0x00004000 +DS_REPSYNC_ABANDONED = 0x00008000 +DS_REPSYNC_INITIAL_IN_PROGRESS = 0x00010000 +DS_REPSYNC_PARTIAL_ATTRIBUTE_SET = 0x00020000 +DS_REPSYNC_REQUEUE = 0x00040000 +DS_REPSYNC_NOTIFICATION = 0x00080000 +DS_REPSYNC_ASYNCHRONOUS_REPLICA = 0x00100000 +DS_REPSYNC_CRITICAL = 0x00200000 +DS_REPSYNC_FULL_IN_PROGRESS = 0x00400000 +DS_REPSYNC_PREEMPTED = 0x00800000 +DS_REPADD_ASYNCHRONOUS_OPERATION = 0x00000001 +DS_REPADD_WRITEABLE = 0x00000002 +DS_REPADD_INITIAL = 0x00000004 +DS_REPADD_PERIODIC = 0x00000008 +DS_REPADD_INTERSITE_MESSAGING = 0x00000010 +DS_REPADD_ASYNCHRONOUS_REPLICA = 0x00000020 +DS_REPADD_DISABLE_NOTIFICATION = 0x00000040 +DS_REPADD_DISABLE_PERIODIC = 0x00000080 +DS_REPADD_USE_COMPRESSION = 0x00000100 +DS_REPADD_NEVER_NOTIFY = 0x00000200 +DS_REPADD_TWO_WAY = 0x00000400 +DS_REPADD_CRITICAL = 0x00000800 +DS_REPDEL_ASYNCHRONOUS_OPERATION = 0x00000001 +DS_REPDEL_WRITEABLE = 0x00000002 +DS_REPDEL_INTERSITE_MESSAGING = 0x00000004 +DS_REPDEL_IGNORE_ERRORS = 0x00000008 +DS_REPDEL_LOCAL_ONLY = 0x00000010 +DS_REPDEL_NO_SOURCE = 0x00000020 +DS_REPDEL_REF_OK = 0x00000040 +DS_REPMOD_ASYNCHRONOUS_OPERATION = 0x00000001 +DS_REPMOD_WRITEABLE = 0x00000002 +DS_REPMOD_UPDATE_FLAGS = 0x00000001 +DS_REPMOD_UPDATE_ADDRESS = 0x00000002 +DS_REPMOD_UPDATE_SCHEDULE = 0x00000004 +DS_REPMOD_UPDATE_RESULT = 0x00000008 +DS_REPMOD_UPDATE_TRANSPORT = 0x00000010 +DS_REPUPD_ASYNCHRONOUS_OPERATION = 0x00000001 +DS_REPUPD_WRITEABLE = 0x00000002 +DS_REPUPD_ADD_REFERENCE = 0x00000004 +DS_REPUPD_DELETE_REFERENCE = 0x00000008 +DS_INSTANCETYPE_IS_NC_HEAD = 0x00000001 +DS_INSTANCETYPE_NC_IS_WRITEABLE = 0x00000004 +DS_INSTANCETYPE_NC_COMING = 0x00000010 +DS_INSTANCETYPE_NC_GOING = 0x00000020 +NTDSDSA_OPT_IS_GC = ( 1 << 0 ) +NTDSDSA_OPT_DISABLE_INBOUND_REPL = ( 1 << 1 ) +NTDSDSA_OPT_DISABLE_OUTBOUND_REPL = ( 1 << 2 ) +NTDSDSA_OPT_DISABLE_NTDSCONN_XLATE = ( 1 << 3 ) +NTDSCONN_OPT_IS_GENERATED = ( 1 << 0 ) +NTDSCONN_OPT_TWOWAY_SYNC = ( 1 << 1 ) +NTDSCONN_OPT_OVERRIDE_NOTIFY_DEFAULT = (1 << 2 ) +NTDSCONN_OPT_USE_NOTIFY = (1 << 3) +NTDSCONN_OPT_DISABLE_INTERSITE_COMPRESSION = (1 << 4) +NTDSCONN_OPT_USER_OWNED_SCHEDULE = (1 << 5) +NTDSCONN_KCC_NO_REASON = ( 0 ) +NTDSCONN_KCC_GC_TOPOLOGY = ( 1 << 0 ) +NTDSCONN_KCC_RING_TOPOLOGY = ( 1 << 1 ) +NTDSCONN_KCC_MINIMIZE_HOPS_TOPOLOGY = ( 1 << 2 ) +NTDSCONN_KCC_STALE_SERVERS_TOPOLOGY = ( 1 << 3 ) +NTDSCONN_KCC_OSCILLATING_CONNECTION_TOPOLOGY = ( 1 << 4 ) +NTDSCONN_KCC_INTERSITE_GC_TOPOLOGY = (1 << 5) +NTDSCONN_KCC_INTERSITE_TOPOLOGY = (1 << 6) +NTDSCONN_KCC_SERVER_FAILOVER_TOPOLOGY = (1 << 7) +NTDSCONN_KCC_SITE_FAILOVER_TOPOLOGY = (1 << 8) +NTDSCONN_KCC_REDUNDANT_SERVER_TOPOLOGY = (1 << 9) +FRSCONN_PRIORITY_MASK = 0x70000000 +FRSCONN_MAX_PRIORITY = 0x8 +NTDSCONN_OPT_IGNORE_SCHEDULE_MASK = (-2147483648) + +NTDSSETTINGS_OPT_IS_AUTO_TOPOLOGY_DISABLED = ( 1 << 0 ) +NTDSSETTINGS_OPT_IS_TOPL_CLEANUP_DISABLED = ( 1 << 1 ) +NTDSSETTINGS_OPT_IS_TOPL_MIN_HOPS_DISABLED = ( 1 << 2 ) +NTDSSETTINGS_OPT_IS_TOPL_DETECT_STALE_DISABLED = ( 1 << 3 ) +NTDSSETTINGS_OPT_IS_INTER_SITE_AUTO_TOPOLOGY_DISABLED = ( 1 << 4 ) +NTDSSETTINGS_OPT_IS_GROUP_CACHING_ENABLED = ( 1 << 5 ) +NTDSSETTINGS_OPT_FORCE_KCC_WHISTLER_BEHAVIOR = ( 1 << 6 ) +NTDSSETTINGS_OPT_FORCE_KCC_W2K_ELECTION = ( 1 << 7 ) +NTDSSETTINGS_OPT_IS_RAND_BH_SELECTION_DISABLED = ( 1 << 8 ) +NTDSSETTINGS_OPT_IS_SCHEDULE_HASHING_ENABLED = ( 1 << 9 ) +NTDSSETTINGS_OPT_IS_REDUNDANT_SERVER_TOPOLOGY_ENABLED = ( 1 << 10 ) +NTDSSETTINGS_DEFAULT_SERVER_REDUNDANCY = 2 +NTDSTRANSPORT_OPT_IGNORE_SCHEDULES = ( 1 << 0 ) +NTDSTRANSPORT_OPT_BRIDGES_REQUIRED = (1 << 1 ) +NTDSSITECONN_OPT_USE_NOTIFY = ( 1 << 0 ) +NTDSSITECONN_OPT_TWOWAY_SYNC = ( 1 << 1 ) +NTDSSITECONN_OPT_DISABLE_COMPRESSION = ( 1 << 2 ) +NTDSSITELINK_OPT_USE_NOTIFY = ( 1 << 0 ) +NTDSSITELINK_OPT_TWOWAY_SYNC = ( 1 << 1 ) +NTDSSITELINK_OPT_DISABLE_COMPRESSION = ( 1 << 2 ) +GUID_USERS_CONTAINER_A = "a9d1ca15768811d1aded00c04fd8d5cd" +GUID_COMPUTRS_CONTAINER_A = "aa312825768811d1aded00c04fd8d5cd" +GUID_SYSTEMS_CONTAINER_A = "ab1d30f3768811d1aded00c04fd8d5cd" +GUID_DOMAIN_CONTROLLERS_CONTAINER_A = "a361b2ffffd211d1aa4b00c04fd7d83a" +GUID_INFRASTRUCTURE_CONTAINER_A = "2fbac1870ade11d297c400c04fd8d5cd" +GUID_DELETED_OBJECTS_CONTAINER_A = "18e2ea80684f11d2b9aa00c04f79f805" +GUID_LOSTANDFOUND_CONTAINER_A = "ab8153b7768811d1aded00c04fd8d5cd" +GUID_FOREIGNSECURITYPRINCIPALS_CONTAINER_A = "22b70c67d56e4efb91e9300fca3dc1aa" +GUID_PROGRAM_DATA_CONTAINER_A = "09460c08ae1e4a4ea0f64aee7daa1e5a" +GUID_MICROSOFT_PROGRAM_DATA_CONTAINER_A = "f4be92a4c777485e878e9421d53087db" +GUID_NTDS_QUOTAS_CONTAINER_A = "6227f0af1fc2410d8e3bb10615bb5b0f" +GUID_USERS_CONTAINER_BYTE = "\xa9\xd1\xca\x15\x76\x88\x11\xd1\xad\xed\x00\xc0\x4f\xd8\xd5\xcd" +GUID_COMPUTRS_CONTAINER_BYTE = "\xaa\x31\x28\x25\x76\x88\x11\xd1\xad\xed\x00\xc0\x4f\xd8\xd5\xcd" +GUID_SYSTEMS_CONTAINER_BYTE = "\xab\x1d\x30\xf3\x76\x88\x11\xd1\xad\xed\x00\xc0\x4f\xd8\xd5\xcd" +GUID_DOMAIN_CONTROLLERS_CONTAINER_BYTE = "\xa3\x61\xb2\xff\xff\xd2\x11\xd1\xaa\x4b\x00\xc0\x4f\xd7\xd8\x3a" +GUID_INFRASTRUCTURE_CONTAINER_BYTE = "\x2f\xba\xc1\x87\x0a\xde\x11\xd2\x97\xc4\x00\xc0\x4f\xd8\xd5\xcd" +GUID_DELETED_OBJECTS_CONTAINER_BYTE = "\x18\xe2\xea\x80\x68\x4f\x11\xd2\xb9\xaa\x00\xc0\x4f\x79\xf8\x05" +GUID_LOSTANDFOUND_CONTAINER_BYTE = "\xab\x81\x53\xb7\x76\x88\x11\xd1\xad\xed\x00\xc0\x4f\xd8\xd5\xcd" +GUID_FOREIGNSECURITYPRINCIPALS_CONTAINER_BYTE = "\x22\xb7\x0c\x67\xd5\x6e\x4e\xfb\x91\xe9\x30\x0f\xca\x3d\xc1\xaa" +GUID_PROGRAM_DATA_CONTAINER_BYTE = "\x09\x46\x0c\x08\xae\x1e\x4a\x4e\xa0\xf6\x4a\xee\x7d\xaa\x1e\x5a" +GUID_MICROSOFT_PROGRAM_DATA_CONTAINER_BYTE = "\xf4\xbe\x92\xa4\xc7\x77\x48\x5e\x87\x8e\x94\x21\xd5\x30\x87\xdb" +GUID_NTDS_QUOTAS_CONTAINER_BYTE = "\x62\x27\xf0\xaf\x1f\xc2\x41\x0d\x8e\x3b\xb1\x06\x15\xbb\x5b\x0f" +DS_REPSYNCALL_NO_OPTIONS = 0x00000000 +DS_REPSYNCALL_ABORT_IF_SERVER_UNAVAILABLE = 0x00000001 +DS_REPSYNCALL_SYNC_ADJACENT_SERVERS_ONLY = 0x00000002 +DS_REPSYNCALL_ID_SERVERS_BY_DN = 0x00000004 +DS_REPSYNCALL_DO_NOT_SYNC = 0x00000008 +DS_REPSYNCALL_SKIP_INITIAL_CHECK = 0x00000010 +DS_REPSYNCALL_PUSH_CHANGES_OUTWARD = 0x00000020 +DS_REPSYNCALL_CROSS_SITE_BOUNDARIES = 0x00000040 +DS_LIST_DSA_OBJECT_FOR_SERVER = 0 +DS_LIST_DNS_HOST_NAME_FOR_SERVER = 1 +DS_LIST_ACCOUNT_OBJECT_FOR_SERVER = 2 +DS_ROLE_SCHEMA_OWNER = 0 +DS_ROLE_DOMAIN_OWNER = 1 +DS_ROLE_PDC_OWNER = 2 +DS_ROLE_RID_OWNER = 3 +DS_ROLE_INFRASTRUCTURE_OWNER = 4 +DS_SCHEMA_GUID_NOT_FOUND = 0 +DS_SCHEMA_GUID_ATTR = 1 +DS_SCHEMA_GUID_ATTR_SET = 2 +DS_SCHEMA_GUID_CLASS = 3 +DS_SCHEMA_GUID_CONTROL_RIGHT = 4 +DS_KCC_FLAG_ASYNC_OP = (1 << 0) +DS_KCC_FLAG_DAMPED = (1 << 1) +DS_EXIST_ADVISORY_MODE = (0x1) +DS_REPL_INFO_FLAG_IMPROVE_LINKED_ATTRS = (0x00000001) +DS_REPL_NBR_WRITEABLE = (0x00000010) +DS_REPL_NBR_SYNC_ON_STARTUP = (0x00000020) +DS_REPL_NBR_DO_SCHEDULED_SYNCS = (0x00000040) +DS_REPL_NBR_USE_ASYNC_INTERSITE_TRANSPORT = (0x00000080) +DS_REPL_NBR_TWO_WAY_SYNC = (0x00000200) +DS_REPL_NBR_RETURN_OBJECT_PARENTS = (0x00000800) +DS_REPL_NBR_FULL_SYNC_IN_PROGRESS = (0x00010000) +DS_REPL_NBR_FULL_SYNC_NEXT_PACKET = (0x00020000) +DS_REPL_NBR_NEVER_SYNCED = (0x00200000) +DS_REPL_NBR_PREEMPTED = (0x01000000) +DS_REPL_NBR_IGNORE_CHANGE_NOTIFICATIONS = (0x04000000) +DS_REPL_NBR_DISABLE_SCHEDULED_SYNC = (0x08000000) +DS_REPL_NBR_COMPRESS_CHANGES = (0x10000000) +DS_REPL_NBR_NO_CHANGE_NOTIFICATIONS = (0x20000000) +DS_REPL_NBR_PARTIAL_ATTRIBUTE_SET = (0x40000000) +DS_REPL_NBR_MODIFIABLE_MASK = \ + ( \ + DS_REPL_NBR_SYNC_ON_STARTUP | \ + DS_REPL_NBR_DO_SCHEDULED_SYNCS | \ + DS_REPL_NBR_TWO_WAY_SYNC | \ + DS_REPL_NBR_IGNORE_CHANGE_NOTIFICATIONS | \ + DS_REPL_NBR_DISABLE_SCHEDULED_SYNC | \ + DS_REPL_NBR_COMPRESS_CHANGES | \ + DS_REPL_NBR_NO_CHANGE_NOTIFICATIONS \ + ) + +# from enum DS_NAME_FORMAT +DS_UNKNOWN_NAME = 0 +DS_FQDN_1779_NAME = 1 +DS_NT4_ACCOUNT_NAME = 2 +DS_DISPLAY_NAME = 3 +DS_UNIQUE_ID_NAME = 6 +DS_CANONICAL_NAME = 7 +DS_USER_PRINCIPAL_NAME = 8 +DS_CANONICAL_NAME_EX = 9 +DS_SERVICE_PRINCIPAL_NAME = 10 +DS_SID_OR_SID_HISTORY_NAME = 11 +DS_DNS_DOMAIN_NAME = 12 + +DS_DOMAIN_SIMPLE_NAME = DS_USER_PRINCIPAL_NAME +DS_ENTERPRISE_SIMPLE_NAME = DS_USER_PRINCIPAL_NAME + +# from enum DS_NAME_FLAGS +DS_NAME_NO_FLAGS = 0x0 +DS_NAME_FLAG_SYNTACTICAL_ONLY = 0x1 +DS_NAME_FLAG_EVAL_AT_DC = 0x2 +DS_NAME_FLAG_GCVERIFY = 0x4 +DS_NAME_FLAG_TRUST_REFERRAL = 0x8 + +# from enum DS_NAME_ERROR +DS_NAME_NO_ERROR = 0 +DS_NAME_ERROR_RESOLVING = 1 +DS_NAME_ERROR_NOT_FOUND = 2 +DS_NAME_ERROR_NOT_UNIQUE = 3 +DS_NAME_ERROR_NO_MAPPING = 4 +DS_NAME_ERROR_DOMAIN_ONLY = 5 +DS_NAME_ERROR_NO_SYNTACTICAL_MAPPING = 6 +DS_NAME_ERROR_TRUST_REFERRAL = 7 + + +# from enum DS_SPN_NAME_TYPE +DS_SPN_DNS_HOST = 0 +DS_SPN_DN_HOST = 1 +DS_SPN_NB_HOST = 2 +DS_SPN_DOMAIN = 3 +DS_SPN_NB_DOMAIN = 4 +DS_SPN_SERVICE = 5 + +# from enum DS_SPN_WRITE_OP +DS_SPN_ADD_SPN_OP = 0 +DS_SPN_REPLACE_SPN_OP = 1 +DS_SPN_DELETE_SPN_OP = 2 + +# Generated by h2py from DsGetDC.h +DS_FORCE_REDISCOVERY = 0x00000001 +DS_DIRECTORY_SERVICE_REQUIRED = 0x00000010 +DS_DIRECTORY_SERVICE_PREFERRED = 0x00000020 +DS_GC_SERVER_REQUIRED = 0x00000040 +DS_PDC_REQUIRED = 0x00000080 +DS_BACKGROUND_ONLY = 0x00000100 +DS_IP_REQUIRED = 0x00000200 +DS_KDC_REQUIRED = 0x00000400 +DS_TIMESERV_REQUIRED = 0x00000800 +DS_WRITABLE_REQUIRED = 0x00001000 +DS_GOOD_TIMESERV_PREFERRED = 0x00002000 +DS_AVOID_SELF = 0x00004000 +DS_ONLY_LDAP_NEEDED = 0x00008000 +DS_IS_FLAT_NAME = 0x00010000 +DS_IS_DNS_NAME = 0x00020000 +DS_RETURN_DNS_NAME = 0x40000000 +DS_RETURN_FLAT_NAME = (-2147483648) +DSGETDC_VALID_FLAGS = ( \ + DS_FORCE_REDISCOVERY | \ + DS_DIRECTORY_SERVICE_REQUIRED | \ + DS_DIRECTORY_SERVICE_PREFERRED | \ + DS_GC_SERVER_REQUIRED | \ + DS_PDC_REQUIRED | \ + DS_BACKGROUND_ONLY | \ + DS_IP_REQUIRED | \ + DS_KDC_REQUIRED | \ + DS_TIMESERV_REQUIRED | \ + DS_WRITABLE_REQUIRED | \ + DS_GOOD_TIMESERV_PREFERRED | \ + DS_AVOID_SELF | \ + DS_ONLY_LDAP_NEEDED | \ + DS_IS_FLAT_NAME | \ + DS_IS_DNS_NAME | \ + DS_RETURN_FLAT_NAME | \ + DS_RETURN_DNS_NAME ) +DS_INET_ADDRESS = 1 +DS_NETBIOS_ADDRESS = 2 +DS_PDC_FLAG = 0x00000001 +DS_GC_FLAG = 0x00000004 +DS_LDAP_FLAG = 0x00000008 +DS_DS_FLAG = 0x00000010 +DS_KDC_FLAG = 0x00000020 +DS_TIMESERV_FLAG = 0x00000040 +DS_CLOSEST_FLAG = 0x00000080 +DS_WRITABLE_FLAG = 0x00000100 +DS_GOOD_TIMESERV_FLAG = 0x00000200 +DS_NDNC_FLAG = 0x00000400 +DS_PING_FLAGS = 0x0000FFFF +DS_DNS_CONTROLLER_FLAG = 0x20000000 +DS_DNS_DOMAIN_FLAG = 0x40000000 +DS_DNS_FOREST_FLAG = (-2147483648) +DS_DOMAIN_IN_FOREST = 0x0001 +DS_DOMAIN_DIRECT_OUTBOUND = 0x0002 +DS_DOMAIN_TREE_ROOT = 0x0004 +DS_DOMAIN_PRIMARY = 0x0008 +DS_DOMAIN_NATIVE_MODE = 0x0010 +DS_DOMAIN_DIRECT_INBOUND = 0x0020 +DS_DOMAIN_VALID_FLAGS = ( \ + DS_DOMAIN_IN_FOREST | \ + DS_DOMAIN_DIRECT_OUTBOUND | \ + DS_DOMAIN_TREE_ROOT | \ + DS_DOMAIN_PRIMARY | \ + DS_DOMAIN_NATIVE_MODE | \ + DS_DOMAIN_DIRECT_INBOUND ) +DS_GFTI_UPDATE_TDO = 0x1 +DS_GFTI_VALID_FLAGS = 0x1 +DS_ONLY_DO_SITE_NAME = 0x01 +DS_NOTIFY_AFTER_SITE_RECORDS = 0x02 +DS_OPEN_VALID_OPTION_FLAGS = ( DS_ONLY_DO_SITE_NAME | DS_NOTIFY_AFTER_SITE_RECORDS ) +DS_OPEN_VALID_FLAGS = ( \ + DS_FORCE_REDISCOVERY | \ + DS_ONLY_LDAP_NEEDED | \ + DS_KDC_REQUIRED | \ + DS_PDC_REQUIRED | \ + DS_GC_SERVER_REQUIRED | \ + DS_WRITABLE_REQUIRED ) + +## from aclui.h +# SI_OBJECT_INFO.dwFlags +SI_EDIT_PERMS = 0x00000000 +SI_EDIT_OWNER = 0x00000001 +SI_EDIT_AUDITS = 0x00000002 +SI_CONTAINER = 0x00000004 +SI_READONLY = 0x00000008 +SI_ADVANCED = 0x00000010 +SI_RESET = 0x00000020 +SI_OWNER_READONLY = 0x00000040 +SI_EDIT_PROPERTIES = 0x00000080 +SI_OWNER_RECURSE = 0x00000100 +SI_NO_ACL_PROTECT = 0x00000200 +SI_NO_TREE_APPLY = 0x00000400 +SI_PAGE_TITLE = 0x00000800 +SI_SERVER_IS_DC = 0x00001000 +SI_RESET_DACL_TREE = 0x00004000 +SI_RESET_SACL_TREE = 0x00008000 +SI_OBJECT_GUID = 0x00010000 +SI_EDIT_EFFECTIVE = 0x00020000 +SI_RESET_DACL = 0x00040000 +SI_RESET_SACL = 0x00080000 +SI_RESET_OWNER = 0x00100000 +SI_NO_ADDITIONAL_PERMISSION = 0x00200000 +SI_MAY_WRITE = 0x10000000 +SI_EDIT_ALL = (SI_EDIT_PERMS | SI_EDIT_OWNER | SI_EDIT_AUDITS) +SI_AUDITS_ELEVATION_REQUIRED = 0x02000000 +SI_VIEW_ONLY = 0x00400000 +SI_OWNER_ELEVATION_REQUIRED = 0x04000000 +SI_PERMS_ELEVATION_REQUIRED = 0x01000000 + +# SI_ACCESS.dwFlags +SI_ACCESS_SPECIFIC = 0x00010000 +SI_ACCESS_GENERAL = 0x00020000 +SI_ACCESS_CONTAINER = 0x00040000 +SI_ACCESS_PROPERTY = 0x00080000 + +# SI_PAGE_TYPE enum +SI_PAGE_PERM = 0 +SI_PAGE_ADVPERM = 1 +SI_PAGE_AUDIT = 2 +SI_PAGE_OWNER = 3 +SI_PAGE_EFFECTIVE =4 + +CFSTR_ACLUI_SID_INFO_LIST = "CFSTR_ACLUI_SID_INFO_LIST" +PSPCB_SI_INITDIALOG = 1025 ## WM_USER+1 diff --git a/venv/Lib/site-packages/win32/lib/pywin32_bootstrap.py b/venv/Lib/site-packages/win32/lib/pywin32_bootstrap.py new file mode 100644 index 00000000..686a59a8 --- /dev/null +++ b/venv/Lib/site-packages/win32/lib/pywin32_bootstrap.py @@ -0,0 +1,28 @@ +# Imported by pywin32.pth to bootstrap the pywin32 environment in "portable" +# environments or any other case where the post-install script isn't run. +# +# In short, there's a directory installed by pywin32 named 'pywin32_system32' +# with some important DLLs which need to be found by Python when some pywin32 +# modules are imported. +# If Python has `os.add_dll_directory()`, we need to call it with this path. +# Otherwise, we add this path to PATH. + + +try: + import pywin32_system32 +except ImportError: # Python ≥3.6: replace ImportError with ModuleNotFoundError + pass +else: + import os + # We're guaranteed only that __path__: Iterable[str] + # https://docs.python.org/3/reference/import.html#__path__ + for path in pywin32_system32.__path__: + if os.path.isdir(path): + if hasattr(os, "add_dll_directory"): + os.add_dll_directory(path) + # This is to ensure the pywin32 path is in the beginning to find the + # pywin32 DLLs first and prevent other PATH entries to shadow them + elif not os.environ["PATH"].startswith(path): + os.environ["PATH"] = os.environ["PATH"].replace(os.pathsep + path, "") + os.environ["PATH"] = path + os.pathsep + os.environ["PATH"] + break diff --git a/venv/Lib/site-packages/win32/lib/pywin32_testutil.py b/venv/Lib/site-packages/win32/lib/pywin32_testutil.py new file mode 100644 index 00000000..9b7433b8 --- /dev/null +++ b/venv/Lib/site-packages/win32/lib/pywin32_testutil.py @@ -0,0 +1,270 @@ +# Utilities for the pywin32 tests +import sys +import unittest +import gc +import winerror + +## +## General purpose utilities for the test suite. +## + +def int2long(val): + """return a long on py2k""" + return val + 0x100000000 - 0x100000000 + +# The test suite has lots of string constants containing binary data, but +# the strings are used in various "bytes" contexts. +def str2bytes(sval): + if sys.version_info < (3,0) and isinstance(sval, str): + sval = sval.decode("latin1") + return sval.encode("latin1") + + +# Sometimes we want to pass a string that should explicitly be treated as +# a memory blob. +def str2memory(sval): + if sys.version_info < (3,0): + return buffer(sval) + # py3k. + return memoryview(sval.encode("latin1")) + + +# Sometimes we want to pass an object that exposes its memory +def ob2memory(ob): + if sys.version_info < (3,0): + return buffer(ob) + # py3k. + return memoryview(ob) + + +# Note: no str2unicode: we use u'' literals or unicode() function, and 2to3 +# + +## +## unittest related stuff +## + +# This is a specialized TestCase adaptor which wraps a real test. +class LeakTestCase(unittest.TestCase): + """An 'adaptor' which takes another test. In debug builds we execute the + test once to remove one-off side-effects, then capture the total + reference count, then execute the test a few times. If the total + refcount at the end is greater than we first captured, we have a leak! + + In release builds the test is executed just once, as normal. + + Generally used automatically by the test runner - you can safely + ignore this. + """ + def __init__(self, real_test): + unittest.TestCase.__init__(self) + self.real_test = real_test + self.num_test_cases = 1 + self.num_leak_iters = 2 # seems to be enough! + if hasattr(sys, "gettotalrefcount"): + self.num_test_cases = self.num_test_cases + self.num_leak_iters + + def countTestCases(self): + return self.num_test_cases + + def __call__(self, result = None): + # For the COM suite's sake, always ensure we don't leak + # gateways/interfaces + from pythoncom import _GetInterfaceCount, _GetGatewayCount + gc.collect() + ni = _GetInterfaceCount() + ng = _GetGatewayCount() + self.real_test(result) + # Failed - no point checking anything else + if result.shouldStop or not result.wasSuccessful(): + return + self._do_leak_tests(result) + gc.collect() + lost_i = _GetInterfaceCount() - ni + lost_g = _GetGatewayCount() - ng + if lost_i or lost_g: + msg = "%d interface objects and %d gateway objects leaked" \ + % (lost_i, lost_g) + exc = AssertionError(msg) + result.addFailure(self.real_test, (exc.__class__, exc, None)) + + def runTest(self): + assert 0, "not used" + + def _do_leak_tests(self, result = None): + try: + gtrc = sys.gettotalrefcount + except AttributeError: + return # can't do leak tests in this build + # Assume already called once, to prime any caches etc + gc.collect() + trc = gtrc() + for i in range(self.num_leak_iters): + self.real_test(result) + if result.shouldStop: + break + del i # created after we remembered the refcount! + # int division here means one or 2 stray references won't force + # failure, but one per loop + gc.collect() + lost = (gtrc() - trc) // self.num_leak_iters + if lost < 0: + msg = "LeakTest: %s appeared to gain %d references!!" % (self.real_test, -lost) + result.addFailure(self.real_test, (AssertionError, msg, None)) + if lost > 0: + msg = "LeakTest: %s lost %d references" % (self.real_test, lost) + exc = AssertionError(msg) + result.addFailure(self.real_test, (exc.__class__, exc, None)) + + +class TestLoader(unittest.TestLoader): + def loadTestsFromTestCase(self, testCaseClass): + """Return a suite of all tests cases contained in testCaseClass""" + leak_tests = [] + for name in self.getTestCaseNames(testCaseClass): + real_test = testCaseClass(name) + leak_test = self._getTestWrapper(real_test) + leak_tests.append(leak_test) + return self.suiteClass(leak_tests) + + def fixupTestsForLeakTests(self, test): + if isinstance(test, unittest.TestSuite): + test._tests = [self.fixupTestsForLeakTests(t) for t in test._tests] + return test + else: + # just a normal test case. + return self._getTestWrapper(test) + + def _getTestWrapper(self, test): + # one or 2 tests in the COM test suite set this... + no_leak_tests = getattr(test, "no_leak_tests", False) + if no_leak_tests: + print("Test says it doesn't want leak tests!") + return test + return LeakTestCase(test) + + def loadTestsFromModule(self, mod): + if hasattr(mod, "suite"): + tests = mod.suite() + else: + tests = unittest.TestLoader.loadTestsFromModule(self, mod) + return self.fixupTestsForLeakTests(tests) + + def loadTestsFromName(self, name, module=None): + test = unittest.TestLoader.loadTestsFromName(self, name, module) + if isinstance(test, unittest.TestSuite): + pass # hmmm? print "Don't wrap suites yet!", test._tests + elif isinstance(test, unittest.TestCase): + test = self._getTestWrapper(test) + else: + print("XXX - what is", test) + return test + +# Lots of classes necessary to support one simple feature: we want a 3rd +# test result state - "SKIPPED" - to indicate that the test wasn't able +# to be executed for various reasons. Inspired by bzr's tests, but it +# has other concepts, such as "Expected Failure", which we don't bother +# with. + +# win32 error codes that probably mean we need to be elevated (ie, if we +# aren't elevated, we treat these error codes as 'skipped') +non_admin_error_codes = [winerror.ERROR_ACCESS_DENIED, + winerror.ERROR_PRIVILEGE_NOT_HELD] + +_is_admin = None +def check_is_admin(): + global _is_admin + if _is_admin is None: + from win32com.shell.shell import IsUserAnAdmin + import pythoncom + try: + _is_admin = IsUserAnAdmin() + except pythoncom.com_error as exc: + if exc.hresult != winerror.E_NOTIMPL: + raise + # not impl on this platform - must be old - assume is admin + _is_admin = True + return _is_admin + + +# If this exception is raised by a test, the test is reported as a 'skip' +class TestSkipped(Exception): + pass + + +# The 'TestResult' subclass that records the failures and has the special +# handling for the TestSkipped exception. +class TestResult(unittest._TextTestResult): + def __init__(self, *args, **kw): + super(TestResult, self).__init__(*args, **kw) + self.skips = {} # count of skips for each reason. + + def addError(self, test, err): + """Called when an error has occurred. 'err' is a tuple of values as + returned by sys.exc_info(). + """ + # translate a couple of 'well-known' exceptions into 'skipped' + import pywintypes + exc_val = err[1] + # translate ERROR_ACCESS_DENIED for non-admin users to be skipped. + # (access denied errors for an admin user aren't expected.) + if isinstance(exc_val, pywintypes.error) \ + and exc_val.winerror in non_admin_error_codes \ + and not check_is_admin(): + exc_val = TestSkipped(exc_val) + # and COM errors due to objects not being registered (the com test + # suite will attempt to catch this and handle it itself if the user + # is admin) + elif isinstance(exc_val, pywintypes.com_error) and \ + exc_val.hresult in [winerror.CO_E_CLASSSTRING, + winerror.REGDB_E_CLASSNOTREG, + winerror.TYPE_E_LIBNOTREGISTERED]: + exc_val = TestSkipped(exc_val) + # NotImplemented generally means the platform doesn't support the + # functionality. + elif isinstance(exc_val, NotImplementedError): + exc_val = TestSkipped(NotImplementedError) + + if isinstance(exc_val, TestSkipped): + reason = exc_val.args[0] + # if the reason itself is another exception, get its args. + try: + reason = tuple(reason.args) + except (AttributeError, TypeError): + pass + self.skips.setdefault(reason, 0) + self.skips[reason] += 1 + if self.showAll: + self.stream.writeln("SKIP (%s)" % (reason,)) + elif self.dots: + self.stream.write('S') + self.stream.flush() + return + super(TestResult, self).addError(test, err) + + def printErrors(self): + super(TestResult, self).printErrors() + for reason, num_skipped in self.skips.items(): + self.stream.writeln("SKIPPED: %d tests - %s" % (num_skipped, reason)) + +# TestRunner subclass necessary just to get our TestResult hooked up. +class TestRunner(unittest.TextTestRunner): + def _makeResult(self): + return TestResult(self.stream, self.descriptions, self.verbosity) + + +# TestProgream subclass necessary just to get our TestRunner hooked up, +# which is necessary to get our TestResult hooked up *sob* +class TestProgram(unittest.TestProgram): + def runTests(self): + # clobber existing runner - *sob* - it shouldn't be this hard + self.testRunner = TestRunner(verbosity=self.verbosity) + unittest.TestProgram.runTests(self) + +# A convenient entry-point - if used, 'SKIPPED' exceptions will be supressed. +def testmain(*args, **kw): + new_kw = kw.copy() + if 'testLoader' not in new_kw: + new_kw['testLoader'] = TestLoader() + program_class = new_kw.get('testProgram', TestProgram) + program_class(*args, **new_kw) diff --git a/venv/Lib/site-packages/win32/lib/pywintypes.py b/venv/Lib/site-packages/win32/lib/pywintypes.py new file mode 100644 index 00000000..5fcced31 --- /dev/null +++ b/venv/Lib/site-packages/win32/lib/pywintypes.py @@ -0,0 +1,105 @@ +# Magic utility that "redirects" to pywintypesxx.dll +import importlib.util, importlib.machinery, sys, os +def __import_pywin32_system_module__(modname, globs): + # This has been through a number of iterations. The problem: how to + # locate pywintypesXX.dll when it may be in a number of places, and how + # to avoid ever loading it twice. This problem is compounded by the + # fact that the "right" way to do this requires win32api, but this + # itself requires pywintypesXX. + # And the killer problem is that someone may have done 'import win32api' + # before this code is called. In that case Windows will have already + # loaded pywintypesXX as part of loading win32api - but by the time + # we get here, we may locate a different one. This appears to work, but + # then starts raising bizarre TypeErrors complaining that something + # is not a pywintypes type when it clearly is! + + # So in what we hope is the last major iteration of this, we now + # rely on a _win32sysloader module, implemented in C but not relying + # on pywintypesXX.dll. It then can check if the DLL we are looking for + # lib is already loaded. + # See if this is a debug build. + suffix = '_d' if '_d.pyd' in importlib.machinery.EXTENSION_SUFFIXES else '' + filename = "%s%d%d%s.dll" % \ + (modname, sys.version_info[0], sys.version_info[1], suffix) + if hasattr(sys, "frozen"): + # If we are running from a frozen program (py2exe, McMillan, freeze) + # then we try and load the DLL from our sys.path + # XXX - This path may also benefit from _win32sysloader? However, + # MarkH has never seen the DLL load problem with py2exe programs... + for look in sys.path: + # If the sys.path entry is a (presumably) .zip file, use the + # directory + if os.path.isfile(look): + look = os.path.dirname(look) + found = os.path.join(look, filename) + if os.path.isfile(found): + break + else: + raise ImportError("Module '%s' isn't in frozen sys.path %s" % (modname, sys.path)) + else: + # First see if it already in our process - if so, we must use that. + import _win32sysloader + found = _win32sysloader.GetModuleFilename(filename) + if found is None: + # We ask Windows to load it next. This is in an attempt to + # get the exact same module loaded should pywintypes be imported + # first (which is how we are here) or if, eg, win32api was imported + # first thereby implicitly loading the DLL. + + # Sadly though, it doesn't quite work - if pywintypesxx.dll + # is in system32 *and* the executable's directory, on XP SP2, an + # import of win32api will cause Windows to load pywintypes + # from system32, where LoadLibrary for that name will + # load the one in the exe's dir. + # That shouldn't really matter though, so long as we only ever + # get one loaded. + found = _win32sysloader.LoadModule(filename) + if found is None: + # Windows can't find it - which although isn't relevent here, + # means that we *must* be the first win32 import, as an attempt + # to import win32api etc would fail when Windows attempts to + # locate the DLL. + # This is most likely to happen for "non-admin" installs, where + # we can't put the files anywhere else on the global path. + + # If there is a version in our Python directory, use that + if os.path.isfile(os.path.join(sys.prefix, filename)): + found = os.path.join(sys.prefix, filename) + if found is None: + # Not in the Python directory? Maybe we were installed via + # easy_install... + if os.path.isfile(os.path.join(os.path.dirname(__file__), filename)): + found = os.path.join(os.path.dirname(__file__), filename) + if found is None: + # We might have been installed via PIP and without the post-install + # script having been run, so they might be in the + # lib/site-packages/pywin32_system32 directory. + # This isn't ideal as it means, say 'python -c "import win32api"' + # will not work but 'python -c "import pywintypes, win32api"' will, + # but it's better than nothing... + import distutils.sysconfig + maybe = os.path.join(distutils.sysconfig.get_python_lib(plat_specific=1), + "pywin32_system32", filename) + if os.path.isfile(maybe): + found = maybe + if found is None: + # give up in disgust. + raise ImportError("No system module '%s' (%s)" % (modname, filename)) + # After importing the module, sys.modules is updated to the DLL we just + # loaded - which isn't what we want. So we update sys.modules to refer to + # this module, and update our globals from it. + old_mod = sys.modules[modname] + # Load the DLL. + loader = importlib.machinery.ExtensionFileLoader(modname, found) + spec = importlib.machinery.ModuleSpec(name=modname, loader=loader, origin=found) + mod = importlib.util.module_from_spec(spec) + spec.loader.exec_module(mod) + + # Check the sys.modules[] behaviour we describe above is true... + assert sys.modules[modname] is not old_mod + assert sys.modules[modname] is mod + # as above - re-reset to the *old* module object then update globs. + sys.modules[modname] = old_mod + globs.update(mod.__dict__) + +__import_pywin32_system_module__("pywintypes", globals()) diff --git a/venv/Lib/site-packages/win32/lib/rasutil.py b/venv/Lib/site-packages/win32/lib/rasutil.py new file mode 100644 index 00000000..50d86d42 --- /dev/null +++ b/venv/Lib/site-packages/win32/lib/rasutil.py @@ -0,0 +1,38 @@ +import win32ras + +stateStrings = { + win32ras.RASCS_OpenPort : "OpenPort", + win32ras.RASCS_PortOpened : "PortOpened", + win32ras.RASCS_ConnectDevice : "ConnectDevice", + win32ras.RASCS_DeviceConnected : "DeviceConnected", + win32ras.RASCS_AllDevicesConnected : "AllDevicesConnected", + win32ras.RASCS_Authenticate : "Authenticate", + win32ras.RASCS_AuthNotify : "AuthNotify", + win32ras.RASCS_AuthRetry : "AuthRetry", + win32ras.RASCS_AuthCallback : "AuthCallback", + win32ras.RASCS_AuthChangePassword : "AuthChangePassword", + win32ras.RASCS_AuthProject : "AuthProject", + win32ras.RASCS_AuthLinkSpeed : "AuthLinkSpeed", + win32ras.RASCS_AuthAck : "AuthAck", + win32ras.RASCS_ReAuthenticate : "ReAuthenticate", + win32ras.RASCS_Authenticated : "Authenticated", + win32ras.RASCS_PrepareForCallback : "PrepareForCallback", + win32ras.RASCS_WaitForModemReset : "WaitForModemReset", + win32ras.RASCS_WaitForCallback : "WaitForCallback", + win32ras.RASCS_Projected : "Projected", + win32ras.RASCS_StartAuthentication : "StartAuthentication", + win32ras.RASCS_CallbackComplete : "CallbackComplete", + win32ras.RASCS_LogonNetwork : "LogonNetwork", + win32ras.RASCS_Interactive : "Interactive", + win32ras.RASCS_RetryAuthentication : "RetryAuthentication", + win32ras.RASCS_CallbackSetByCaller : "CallbackSetByCaller", + win32ras.RASCS_PasswordExpired : "PasswordExpired", + win32ras.RASCS_Connected : "Connected", + win32ras.RASCS_Disconnected : "Disconnected" +} + +def TestCallback( hras, msg, state, error, exterror): + print("Callback called with ", hras, msg, stateStrings[state], error, exterror) + +def test(rasName = "_ Divert Off"): + return win32ras.Dial(None, None, (rasName,),TestCallback) \ No newline at end of file diff --git a/venv/Lib/site-packages/win32/lib/regcheck.py b/venv/Lib/site-packages/win32/lib/regcheck.py new file mode 100644 index 00000000..f767fa16 --- /dev/null +++ b/venv/Lib/site-packages/win32/lib/regcheck.py @@ -0,0 +1,124 @@ +# This module is very old and useless in this day and age! It will be +# removed in a few years (ie, 2009 or so...) + +import warnings +warnings.warn("The regcheck module has been pending deprecation since build 210", + category=PendingDeprecationWarning) + +import win32con +import regutil +import win32api +import os +import sys + + +def CheckRegisteredExe(exename): + try: + os.stat(win32api.RegQueryValue(regutil.GetRootKey() , regutil.GetAppPathsKey() + "\\" + exename)) +# except SystemError: + except (os.error,win32api.error): + print("Registration of %s - Not registered correctly" % exename) + +def CheckPathString(pathString): + for path in pathString.split(";"): + if not os.path.isdir(path): + return "'%s' is not a valid directory!" % path + return None + +def CheckPythonPaths(verbose): + if verbose: print("Python Paths:") + # Check the core path + if verbose: print("\tCore Path:", end=' ') + try: + appPath = win32api.RegQueryValue(regutil.GetRootKey(), regutil.BuildDefaultPythonKey() + "\\PythonPath") + except win32api.error as exc: + print("** does not exist - ", exc.strerror) + problem = CheckPathString(appPath) + if problem: + print(problem) + else: + if verbose: print(appPath) + + key = win32api.RegOpenKey(regutil.GetRootKey(), regutil.BuildDefaultPythonKey() + "\\PythonPath", 0, win32con.KEY_READ) + try: + keyNo = 0 + while 1: + try: + appName = win32api.RegEnumKey(key, keyNo) + appPath = win32api.RegQueryValue(key, appName) + if verbose: print("\t"+appName+":", end=' ') + if appPath: + problem = CheckPathString(appPath) + if problem: + print(problem) + else: + if verbose: print(appPath) + else: + if verbose: print("(empty)") + keyNo = keyNo + 1 + except win32api.error: + break + finally: + win32api.RegCloseKey(key) + +def CheckHelpFiles(verbose): + if verbose: print("Help Files:") + try: + key = win32api.RegOpenKey(regutil.GetRootKey(), regutil.BuildDefaultPythonKey() + "\\Help", 0, win32con.KEY_READ) + except win32api.error as exc: + import winerror + if exc.winerror!=winerror.ERROR_FILE_NOT_FOUND: + raise + return + + try: + keyNo = 0 + while 1: + try: + helpDesc = win32api.RegEnumKey(key, keyNo) + helpFile = win32api.RegQueryValue(key, helpDesc) + if verbose: print("\t"+helpDesc+":", end=' ') + # query the os section. + try: + os.stat(helpFile ) + if verbose: print(helpFile) + except os.error: + print("** Help file %s does not exist" % helpFile) + keyNo = keyNo + 1 + except win32api.error as exc: + import winerror + if exc.winerror!=winerror.ERROR_NO_MORE_ITEMS: + raise + break + finally: + win32api.RegCloseKey(key) + +def CheckRegisteredModules(verbose): + # Check out all registered modules. + k=regutil.BuildDefaultPythonKey() + "\\Modules" + try: + keyhandle = win32api.RegOpenKey(regutil.GetRootKey(), k) + print("WARNING: 'Modules' registry entry is deprectated and evil!") + except win32api.error as exc: + import winerror + if exc.winerror!=winerror.ERROR_FILE_NOT_FOUND: + raise + return + +def CheckRegistry(verbose=0): + # check the registered modules + if verbose and 'pythonpath' in os.environ: + print("Warning - PythonPath in environment - please check it!") + # Check out all paths on sys.path + + CheckPythonPaths(verbose) + CheckHelpFiles(verbose) + CheckRegisteredModules(verbose) + CheckRegisteredExe("Python.exe") + +if __name__=='__main__': + if len(sys.argv)>1 and sys.argv[1]=='-q': + verbose = 0 + else: + verbose = 1 + CheckRegistry(verbose) diff --git a/venv/Lib/site-packages/win32/lib/regutil.py b/venv/Lib/site-packages/win32/lib/regutil.py new file mode 100644 index 00000000..087f69b4 --- /dev/null +++ b/venv/Lib/site-packages/win32/lib/regutil.py @@ -0,0 +1,284 @@ +# Some registry helpers. +import win32api +import win32con +import sys +import os + +error = "Registry utility error" + +# A .py file has a CLSID associated with it (why? - dunno!) +CLSIDPyFile = "{b51df050-06ae-11cf-ad3b-524153480001}" + +RegistryIDPyFile = "Python.File" # The registry "file type" of a .py file +RegistryIDPycFile = "Python.CompiledFile" # The registry "file type" of a .pyc file + +def BuildDefaultPythonKey(): + """Builds a string containing the path to the current registry key. + + The Python registry key contains the Python version. This function + uses the version of the DLL used by the current process to get the + registry key currently in use. + """ + return "Software\\Python\\PythonCore\\" + sys.winver + +def GetRootKey(): + """Retrieves the Registry root in use by Python. + """ + keyname = BuildDefaultPythonKey() + try: + k = win32api.RegOpenKey(win32con.HKEY_CURRENT_USER, keyname) + k.close() + return win32con.HKEY_CURRENT_USER + except win32api.error: + return win32con.HKEY_LOCAL_MACHINE + +def GetRegistryDefaultValue(subkey, rootkey = None): + """A helper to return the default value for a key in the registry. + """ + if rootkey is None: rootkey = GetRootKey() + return win32api.RegQueryValue(rootkey, subkey) + +def SetRegistryDefaultValue(subKey, value, rootkey = None): + """A helper to set the default value for a key in the registry + """ + if rootkey is None: rootkey = GetRootKey() + if type(value)==str: + typeId = win32con.REG_SZ + elif type(value)==int: + typeId = win32con.REG_DWORD + else: + raise TypeError("Value must be string or integer - was passed " + repr(value)) + + win32api.RegSetValue(rootkey, subKey, typeId ,value) + +def GetAppPathsKey(): + return "Software\\Microsoft\\Windows\\CurrentVersion\\App Paths" + +def RegisterPythonExe(exeFullPath, exeAlias = None, exeAppPath = None): + """Register a .exe file that uses Python. + + Registers the .exe with the OS. This allows the specified .exe to + be run from the command-line or start button without using the full path, + and also to setup application specific path (ie, os.environ['PATH']). + + Currently the exeAppPath is not supported, so this function is general + purpose, and not specific to Python at all. Later, exeAppPath may provide + a reasonable default that is used. + + exeFullPath -- The full path to the .exe + exeAlias = None -- An alias for the exe - if none, the base portion + of the filename is used. + exeAppPath -- Not supported. + """ + # Note - Dont work on win32s (but we dont care anymore!) + if exeAppPath: + raise error("Do not support exeAppPath argument currently") + if exeAlias is None: + exeAlias = os.path.basename(exeFullPath) + win32api.RegSetValue(GetRootKey(), GetAppPathsKey() + "\\" + exeAlias, win32con.REG_SZ, exeFullPath) + +def GetRegisteredExe(exeAlias): + """Get a registered .exe + """ + return win32api.RegQueryValue(GetRootKey(), GetAppPathsKey() + "\\" + exeAlias) + +def UnregisterPythonExe(exeAlias): + """Unregister a .exe file that uses Python. + """ + try: + win32api.RegDeleteKey(GetRootKey(), GetAppPathsKey() + "\\" + exeAlias) + except win32api.error as exc: + import winerror + if exc.winerror!=winerror.ERROR_FILE_NOT_FOUND: + raise + return + +def RegisterNamedPath(name, path): + """Register a named path - ie, a named PythonPath entry. + """ + keyStr = BuildDefaultPythonKey() + "\\PythonPath" + if name: keyStr = keyStr + "\\" + name + win32api.RegSetValue(GetRootKey(), keyStr, win32con.REG_SZ, path) + +def UnregisterNamedPath(name): + """Unregister a named path - ie, a named PythonPath entry. + """ + keyStr = BuildDefaultPythonKey() + "\\PythonPath\\" + name + try: + win32api.RegDeleteKey(GetRootKey(), keyStr) + except win32api.error as exc: + import winerror + if exc.winerror!=winerror.ERROR_FILE_NOT_FOUND: + raise + return + +def GetRegisteredNamedPath(name): + """Get a registered named path, or None if it doesnt exist. + """ + keyStr = BuildDefaultPythonKey() + "\\PythonPath" + if name: keyStr = keyStr + "\\" + name + try: + return win32api.RegQueryValue(GetRootKey(), keyStr) + except win32api.error as exc: + import winerror + if exc.winerror!=winerror.ERROR_FILE_NOT_FOUND: + raise + return None + + +def RegisterModule(modName, modPath): + """Register an explicit module in the registry. This forces the Python import + mechanism to locate this module directly, without a sys.path search. Thus + a registered module need not appear in sys.path at all. + + modName -- The name of the module, as used by import. + modPath -- The full path and file name of the module. + """ + try: + import os + os.stat(modPath) + except os.error: + print("Warning: Registering non-existant module %s" % modPath) + win32api.RegSetValue(GetRootKey(), + BuildDefaultPythonKey() + "\\Modules\\%s" % modName, + win32con.REG_SZ, modPath) + +def UnregisterModule(modName): + """Unregister an explicit module in the registry. + + modName -- The name of the module, as used by import. + """ + try: + win32api.RegDeleteKey(GetRootKey(), + BuildDefaultPythonKey() + "\\Modules\\%s" % modName) + except win32api.error as exc: + import winerror + if exc.winerror!=winerror.ERROR_FILE_NOT_FOUND: + raise + +def GetRegisteredHelpFile(helpDesc): + """Given a description, return the registered entry. + """ + try: + return GetRegistryDefaultValue(BuildDefaultPythonKey() + "\\Help\\" + helpDesc) + except win32api.error: + try: + return GetRegistryDefaultValue(BuildDefaultPythonKey() + "\\Help\\" + helpDesc, win32con.HKEY_CURRENT_USER) + except win32api.error: + pass + return None + +def RegisterHelpFile(helpFile, helpPath, helpDesc = None, bCheckFile = 1): + """Register a help file in the registry. + + Note that this used to support writing to the Windows Help + key, however this is no longer done, as it seems to be incompatible. + + helpFile -- the base name of the help file. + helpPath -- the path to the help file + helpDesc -- A description for the help file. If None, the helpFile param is used. + bCheckFile -- A flag indicating if the file existence should be checked. + """ + if helpDesc is None: helpDesc = helpFile + fullHelpFile = os.path.join(helpPath, helpFile) + try: + if bCheckFile: os.stat(fullHelpFile) + except os.error: + raise ValueError("Help file does not exist") + # Now register with Python itself. + win32api.RegSetValue(GetRootKey(), + BuildDefaultPythonKey() + "\\Help\\%s" % helpDesc, win32con.REG_SZ, fullHelpFile) + +def UnregisterHelpFile(helpFile, helpDesc = None): + """Unregister a help file in the registry. + + helpFile -- the base name of the help file. + helpDesc -- A description for the help file. If None, the helpFile param is used. + """ + key = win32api.RegOpenKey(win32con.HKEY_LOCAL_MACHINE, "Software\\Microsoft\\Windows\\Help", 0, win32con.KEY_ALL_ACCESS) + try: + try: + win32api.RegDeleteValue(key, helpFile) + except win32api.error as exc: + import winerror + if exc.winerror!=winerror.ERROR_FILE_NOT_FOUND: + raise + finally: + win32api.RegCloseKey(key) + + # Now de-register with Python itself. + if helpDesc is None: helpDesc = helpFile + try: + win32api.RegDeleteKey(GetRootKey(), + BuildDefaultPythonKey() + "\\Help\\%s" % helpDesc) + except win32api.error as exc: + import winerror + if exc.winerror!=winerror.ERROR_FILE_NOT_FOUND: + raise + +def RegisterCoreDLL(coredllName = None): + """Registers the core DLL in the registry. + + If no params are passed, the name of the Python DLL used in + the current process is used and registered. + """ + if coredllName is None: + coredllName = win32api.GetModuleFileName(sys.dllhandle) + # must exist! + else: + try: + os.stat(coredllName) + except os.error: + print("Warning: Registering non-existant core DLL %s" % coredllName) + + hKey = win32api.RegCreateKey(GetRootKey() , BuildDefaultPythonKey()) + try: + win32api.RegSetValue(hKey, "Dll", win32con.REG_SZ, coredllName) + finally: + win32api.RegCloseKey(hKey) + # Lastly, setup the current version to point to me. + win32api.RegSetValue(GetRootKey(), "Software\\Python\\PythonCore\\CurrentVersion", win32con.REG_SZ, sys.winver) + +def RegisterFileExtensions(defPyIcon, defPycIcon, runCommand): + """Register the core Python file extensions. + + defPyIcon -- The default icon to use for .py files, in 'fname,offset' format. + defPycIcon -- The default icon to use for .pyc files, in 'fname,offset' format. + runCommand -- The command line to use for running .py files + """ + # Register the file extensions. + pythonFileId = RegistryIDPyFile + win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , ".py", win32con.REG_SZ, pythonFileId) + win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , pythonFileId , win32con.REG_SZ, "Python File") + win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , "%s\\CLSID" % pythonFileId , win32con.REG_SZ, CLSIDPyFile) + win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , "%s\\DefaultIcon" % pythonFileId, win32con.REG_SZ, defPyIcon) + base = "%s\\Shell" % RegistryIDPyFile + win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , base + "\\Open", win32con.REG_SZ, "Run") + win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , base + "\\Open\\Command", win32con.REG_SZ, runCommand) + + # Register the .PYC. + pythonFileId = RegistryIDPycFile + win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , ".pyc", win32con.REG_SZ, pythonFileId) + win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , pythonFileId , win32con.REG_SZ, "Compiled Python File") + win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , "%s\\DefaultIcon" % pythonFileId, win32con.REG_SZ, defPycIcon) + base = "%s\\Shell" % pythonFileId + win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , base + "\\Open", win32con.REG_SZ, "Run") + win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , base + "\\Open\\Command", win32con.REG_SZ, runCommand) + +def RegisterShellCommand(shellCommand, exeCommand, shellUserCommand = None): + # Last param for "Open" - for a .py file to be executed by the command line + # or shell execute (eg, just entering "foo.py"), the Command must be "Open", + # but you may associate a different name for the right-click menu. + # In our case, normally we have "Open=Run" + base = "%s\\Shell" % RegistryIDPyFile + if shellUserCommand: + win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , base + "\\%s" % (shellCommand), win32con.REG_SZ, shellUserCommand) + + win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , base + "\\%s\\Command" % (shellCommand), win32con.REG_SZ, exeCommand) + +def RegisterDDECommand(shellCommand, ddeApp, ddeTopic, ddeCommand): + base = "%s\\Shell" % RegistryIDPyFile + win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , base + "\\%s\\ddeexec" % (shellCommand), win32con.REG_SZ, ddeCommand) + win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , base + "\\%s\\ddeexec\\Application" % (shellCommand), win32con.REG_SZ, ddeApp) + win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , base + "\\%s\\ddeexec\\Topic" % (shellCommand), win32con.REG_SZ, ddeTopic) + diff --git a/venv/Lib/site-packages/win32/lib/sspi.py b/venv/Lib/site-packages/win32/lib/sspi.py new file mode 100644 index 00000000..0bcc0777 --- /dev/null +++ b/venv/Lib/site-packages/win32/lib/sspi.py @@ -0,0 +1,362 @@ +""" +Helper classes for SSPI authentication via the win32security module. + +SSPI authentication involves a token-exchange "dance", the exact details +of which depends on the authentication provider used. There are also +a number of complex flags and constants that need to be used - in most +cases, there are reasonable defaults. + +These classes attempt to hide these details from you until you really need +to know. They are not designed to handle all cases, just the common ones. +If you need finer control than offered here, just use the win32security +functions directly. +""" +# Based on Roger Upole's sspi demos. +# $Id$ +import win32security, sspicon + +error = win32security.error + +class _BaseAuth(object): + def __init__(self): + self.reset() + + def reset(self): + """Reset everything to an unauthorized state""" + self.ctxt = None + self.authenticated = False + self.initiator_name = None + self.service_name = None + + # The next seq_num for an encrypt/sign operation + self.next_seq_num = 0 + + def _get_next_seq_num(self): + """Get the next sequence number for a transmission. Default + implementation is to increment a counter + """ + ret = self.next_seq_num + self.next_seq_num = self.next_seq_num + 1 + return ret + + def encrypt(self, data): + """Encrypt a string, returning a tuple of (encrypted_data, trailer). + These can be passed to decrypt to get back the original string. + """ + pkg_size_info=self.ctxt.QueryContextAttributes(sspicon.SECPKG_ATTR_SIZES) + trailersize=pkg_size_info['SecurityTrailer'] + + encbuf=win32security.PySecBufferDescType() + encbuf.append(win32security.PySecBufferType(len(data), sspicon.SECBUFFER_DATA)) + encbuf.append(win32security.PySecBufferType(trailersize, sspicon.SECBUFFER_TOKEN)) + encbuf[0].Buffer=data + self.ctxt.EncryptMessage(0,encbuf,self._get_next_seq_num()) + return encbuf[0].Buffer, encbuf[1].Buffer + + def decrypt(self, data, trailer): + """Decrypt a previously encrypted string, returning the orignal data""" + encbuf=win32security.PySecBufferDescType() + encbuf.append(win32security.PySecBufferType(len(data), sspicon.SECBUFFER_DATA)) + encbuf.append(win32security.PySecBufferType(len(trailer), sspicon.SECBUFFER_TOKEN)) + encbuf[0].Buffer=data + encbuf[1].Buffer=trailer + self.ctxt.DecryptMessage(encbuf,self._get_next_seq_num()) + return encbuf[0].Buffer + + def sign(self, data): + """sign a string suitable for transmission, returning the signature. + Passing the data and signature to verify will determine if the data + is unchanged. + """ + pkg_size_info=self.ctxt.QueryContextAttributes(sspicon.SECPKG_ATTR_SIZES) + sigsize=pkg_size_info['MaxSignature'] + sigbuf=win32security.PySecBufferDescType() + sigbuf.append(win32security.PySecBufferType(len(data), sspicon.SECBUFFER_DATA)) + sigbuf.append(win32security.PySecBufferType(sigsize, sspicon.SECBUFFER_TOKEN)) + sigbuf[0].Buffer=data + + self.ctxt.MakeSignature(0,sigbuf,self._get_next_seq_num()) + return sigbuf[1].Buffer + + def verify(self, data, sig): + """Verifies data and its signature. If verification fails, an sspi.error + will be raised. + """ + sigbuf=win32security.PySecBufferDescType() + sigbuf.append(win32security.PySecBufferType(len(data), sspicon.SECBUFFER_DATA)) + sigbuf.append(win32security.PySecBufferType(len(sig), sspicon.SECBUFFER_TOKEN)) + + sigbuf[0].Buffer=data + sigbuf[1].Buffer=sig + self.ctxt.VerifySignature(sigbuf,self._get_next_seq_num()) + + def unwrap(self, token): + """ + GSSAPI's unwrap with SSPI. + https://docs.microsoft.com/en-us/windows/win32/secauthn/sspi-kerberos-interoperability-with-gssapi + + Usable mainly with Kerberos SSPI package, but this is not enforced. + + Return the clear text, and a boolean that is True if the token was encrypted. + """ + buffer = win32security.PySecBufferDescType() + # This buffer will contain a "stream", which is the token coming from the other side + buffer.append(win32security.PySecBufferType(len(token), sspicon.SECBUFFER_STREAM)) + buffer[0].Buffer = token + + # This buffer will receive the clear, or just unwrapped text if no encryption was used. + # Will be resized by the lib. + buffer.append(win32security.PySecBufferType(0, sspicon.SECBUFFER_DATA)) + + pfQOP = self.ctxt.DecryptMessage(buffer, self._get_next_seq_num()) + + r = buffer[1].Buffer + return r, not (pfQOP == sspicon.SECQOP_WRAP_NO_ENCRYPT) + + def wrap(self, msg, encrypt=False): + """ + GSSAPI's wrap with SSPI. + https://docs.microsoft.com/en-us/windows/win32/secauthn/sspi-kerberos-interoperability-with-gssapi + + Usable mainly with Kerberos SSPI package, but this is not enforced. + + Wrap a message to be sent to the other side. Encrypted if encrypt is True. + """ + + size_info = self.ctxt.QueryContextAttributes(sspicon.SECPKG_ATTR_SIZES) + trailer_size = size_info['SecurityTrailer'] + block_size = size_info['BlockSize'] + + buffer = win32security.PySecBufferDescType() + + # This buffer will contain unencrypted data to wrap, and maybe encrypt. + buffer.append(win32security.PySecBufferType(len(msg), sspicon.SECBUFFER_DATA)) + buffer[0].Buffer = msg + + # Will receive the token that forms the beginning of the msg + buffer.append(win32security.PySecBufferType(trailer_size, sspicon.SECBUFFER_TOKEN)) + + # The trailer is needed in case of block encryption + buffer.append(win32security.PySecBufferType(block_size, sspicon.SECBUFFER_PADDING)) + + fQOP = 0 if encrypt else sspicon.SECQOP_WRAP_NO_ENCRYPT + self.ctxt.EncryptMessage(fQOP, buffer, self._get_next_seq_num()) + + # Sec token, then data, then padding + r = buffer[1].Buffer + buffer[0].Buffer + buffer[2].Buffer + return r + + def _amend_ctx_name(self): + """Adds initiator and service names in the security context for ease of use""" + if not self.authenticated: + raise ValueError("Sec context is not completely authenticated") + + try: + names = self.ctxt.QueryContextAttributes(sspicon.SECPKG_ATTR_NATIVE_NAMES) + except error: + # The SSP doesn't provide these attributes. + pass + else: + self.initiator_name, self.service_name = names + + +class ClientAuth(_BaseAuth): + """Manages the client side of an SSPI authentication handshake + """ + def __init__(self, + pkg_name, # Name of the package to used. + client_name = None, # User for whom credentials are used. + auth_info = None, # or a tuple of (username, domain, password) + targetspn = None, # Target security context provider name. + scflags=None, # security context flags + datarep=sspicon.SECURITY_NETWORK_DREP): + if scflags is None: + scflags = sspicon.ISC_REQ_INTEGRITY|sspicon.ISC_REQ_SEQUENCE_DETECT|\ + sspicon.ISC_REQ_REPLAY_DETECT|sspicon.ISC_REQ_CONFIDENTIALITY + self.scflags=scflags + self.datarep=datarep + self.targetspn=targetspn + self.pkg_info=win32security.QuerySecurityPackageInfo(pkg_name) + self.credentials, \ + self.credentials_expiry=win32security.AcquireCredentialsHandle( + client_name, self.pkg_info['Name'], + sspicon.SECPKG_CRED_OUTBOUND, + None, auth_info) + _BaseAuth.__init__(self) + + + def authorize(self, sec_buffer_in): + """Perform *one* step of the client authentication process. Pass None for the first round""" + if sec_buffer_in is not None and type(sec_buffer_in) != win32security.PySecBufferDescType: + # User passed us the raw data - wrap it into a SecBufferDesc + sec_buffer_new=win32security.PySecBufferDescType() + tokenbuf=win32security.PySecBufferType(self.pkg_info['MaxToken'], + sspicon.SECBUFFER_TOKEN) + tokenbuf.Buffer=sec_buffer_in + sec_buffer_new.append(tokenbuf) + sec_buffer_in = sec_buffer_new + sec_buffer_out=win32security.PySecBufferDescType() + tokenbuf=win32security.PySecBufferType(self.pkg_info['MaxToken'], sspicon.SECBUFFER_TOKEN) + sec_buffer_out.append(tokenbuf) + ## input context handle should be NULL on first call + ctxtin=self.ctxt + if self.ctxt is None: + self.ctxt=win32security.PyCtxtHandleType() + err, attr, exp=win32security.InitializeSecurityContext( + self.credentials, + ctxtin, + self.targetspn, + self.scflags, + self.datarep, + sec_buffer_in, + self.ctxt, + sec_buffer_out) + # Stash these away incase someone needs to know the state from the + # final call. + self.ctxt_attr = attr + self.ctxt_expiry = exp + + if err in (sspicon.SEC_I_COMPLETE_NEEDED,sspicon.SEC_I_COMPLETE_AND_CONTINUE): + self.ctxt.CompleteAuthToken(sec_buffer_out) + + self.authenticated = err == 0 + if self.authenticated: + self._amend_ctx_name() + + return err, sec_buffer_out + +class ServerAuth(_BaseAuth): + """Manages the server side of an SSPI authentication handshake + """ + def __init__(self, + pkg_name, + spn = None, + scflags=None, + datarep=sspicon.SECURITY_NETWORK_DREP): + self.spn=spn + self.datarep=datarep + + if scflags is None: + scflags = sspicon.ASC_REQ_INTEGRITY|sspicon.ASC_REQ_SEQUENCE_DETECT|\ + sspicon.ASC_REQ_REPLAY_DETECT|sspicon.ASC_REQ_CONFIDENTIALITY + # Should we default to sspicon.KerbAddExtraCredentialsMessage + # if pkg_name=='Kerberos'? + self.scflags=scflags + + self.pkg_info=win32security.QuerySecurityPackageInfo(pkg_name) + + self.credentials, \ + self.credentials_expiry=win32security.AcquireCredentialsHandle(spn, + self.pkg_info['Name'], sspicon.SECPKG_CRED_INBOUND, None, None) + _BaseAuth.__init__(self) + + def authorize(self, sec_buffer_in): + """Perform *one* step of the server authentication process.""" + if sec_buffer_in is not None and type(sec_buffer_in) != win32security.PySecBufferDescType: + # User passed us the raw data - wrap it into a SecBufferDesc + sec_buffer_new=win32security.PySecBufferDescType() + tokenbuf=win32security.PySecBufferType(self.pkg_info['MaxToken'], + sspicon.SECBUFFER_TOKEN) + tokenbuf.Buffer=sec_buffer_in + sec_buffer_new.append(tokenbuf) + sec_buffer_in = sec_buffer_new + + sec_buffer_out=win32security.PySecBufferDescType() + tokenbuf=win32security.PySecBufferType(self.pkg_info['MaxToken'], sspicon.SECBUFFER_TOKEN) + sec_buffer_out.append(tokenbuf) + ## input context handle is None initially, then handle returned from last call thereafter + ctxtin=self.ctxt + if self.ctxt is None: + self.ctxt=win32security.PyCtxtHandleType() + err, attr, exp = win32security.AcceptSecurityContext(self.credentials, ctxtin, + sec_buffer_in, self.scflags, + self.datarep, self.ctxt, sec_buffer_out) + + # Stash these away incase someone needs to know the state from the + # final call. + self.ctxt_attr = attr + self.ctxt_expiry = exp + + if err in (sspicon.SEC_I_COMPLETE_NEEDED,sspicon.SEC_I_COMPLETE_AND_CONTINUE): + self.ctxt.CompleteAuthToken(sec_buffer_out) + + self.authenticated = err == 0 + if self.authenticated: + self._amend_ctx_name() + + return err, sec_buffer_out + +if __name__=='__main__': + # This is the security package (the security support provider / the security backend) + # we want to use for this example. + ssp = "Kerberos" # or "NTLM" or "Negotiate" which enable negotiation between + # Kerberos (prefered) and NTLM (if not supported on the other side). + + flags = ( + sspicon.ISC_REQ_MUTUAL_AUTH | # mutual authentication + sspicon.ISC_REQ_INTEGRITY | # check for integrity + sspicon.ISC_REQ_SEQUENCE_DETECT | # enable out-of-order messages + sspicon.ISC_REQ_CONFIDENTIALITY | # request confidentiality + sspicon.ISC_REQ_REPLAY_DETECT # request replay detection + ) + + # Get our identity, mandatory for the Kerberos case *for this example* + # Kerberos cannot be used if we don't tell it the target we want + # to authenticate to. + cred_handle, exp = win32security.AcquireCredentialsHandle( + None, ssp, sspicon.SECPKG_CRED_INBOUND, None, None + ) + cred = cred_handle.QueryCredentialsAttributes(sspicon.SECPKG_CRED_ATTR_NAMES) + print("We are:", cred) + + # Setup the 2 contexts. In real life, only one is needed: the other one is + # created in the process we want to communicate with. + sspiclient=ClientAuth(ssp, scflags=flags, targetspn=cred) + sspiserver=ServerAuth(ssp, scflags=flags) + + print("SSP : %s (%s)" % (sspiclient.pkg_info["Name"], sspiclient.pkg_info["Comment"])) + + # Perform the authentication dance, each loop exchanging more information + # on the way to completing authentication. + sec_buffer=None + client_step = 0 + server_step = 0 + while not(sspiclient.authenticated) or len(sec_buffer[0].Buffer): + client_step += 1 + err, sec_buffer = sspiclient.authorize(sec_buffer) + print("Client step %s" % client_step) + if sspiserver.authenticated and len(sec_buffer[0].Buffer) == 0: + break + + server_step += 1 + err, sec_buffer = sspiserver.authorize(sec_buffer) + print("Server step %s" % server_step) + + # Authentication process is finished. + print("Initiator name from the service side:", sspiserver.initiator_name) + print("Service name from the client side: ", sspiclient.service_name) + + data = "hello".encode("ascii") # py3k-friendly + + # Simple signature, not compatible with GSSAPI. + sig = sspiclient.sign(data) + sspiserver.verify(data, sig) + + # Encryption + encrypted, sig = sspiclient.encrypt(data) + decrypted = sspiserver.decrypt(encrypted, sig) + assert decrypted == data + + # GSSAPI wrapping, no encryption (NTLM always encrypts) + wrapped = sspiclient.wrap(data) + unwrapped, was_encrypted = sspiserver.unwrap(wrapped) + print("encrypted ?", was_encrypted) + assert data == unwrapped + + # GSSAPI wrapping, with encryption + wrapped = sspiserver.wrap(data, encrypt=True) + unwrapped, was_encrypted = sspiclient.unwrap(wrapped) + print("encrypted ?", was_encrypted) + assert data == unwrapped + + print("cool!") diff --git a/venv/Lib/site-packages/win32/lib/sspicon.py b/venv/Lib/site-packages/win32/lib/sspicon.py new file mode 100644 index 00000000..1e61d498 --- /dev/null +++ b/venv/Lib/site-packages/win32/lib/sspicon.py @@ -0,0 +1,479 @@ +# Generated by h2py from c:\microsoft sdk\include\sspi.h +ISSP_LEVEL = 32 +ISSP_MODE = 1 +ISSP_LEVEL = 32 +ISSP_MODE = 0 +ISSP_LEVEL = 32 +ISSP_MODE = 1 +def SEC_SUCCESS(Status): return ((Status) >= 0) + +SECPKG_FLAG_INTEGRITY = 1 +SECPKG_FLAG_PRIVACY = 2 +SECPKG_FLAG_TOKEN_ONLY = 4 +SECPKG_FLAG_DATAGRAM = 8 +SECPKG_FLAG_CONNECTION = 16 +SECPKG_FLAG_MULTI_REQUIRED = 32 +SECPKG_FLAG_CLIENT_ONLY = 64 +SECPKG_FLAG_EXTENDED_ERROR = 128 +SECPKG_FLAG_IMPERSONATION = 256 +SECPKG_FLAG_ACCEPT_WIN32_NAME = 512 +SECPKG_FLAG_STREAM = 1024 +SECPKG_FLAG_NEGOTIABLE = 2048 +SECPKG_FLAG_GSS_COMPATIBLE = 4096 +SECPKG_FLAG_LOGON = 8192 +SECPKG_FLAG_ASCII_BUFFERS = 16384 +SECPKG_FLAG_FRAGMENT = 32768 +SECPKG_FLAG_MUTUAL_AUTH = 65536 +SECPKG_FLAG_DELEGATION = 131072 +SECPKG_FLAG_READONLY_WITH_CHECKSUM = 262144 +SECPKG_ID_NONE = 65535 + +SECBUFFER_VERSION = 0 +SECBUFFER_EMPTY = 0 +SECBUFFER_DATA = 1 +SECBUFFER_TOKEN = 2 +SECBUFFER_PKG_PARAMS = 3 +SECBUFFER_MISSING = 4 +SECBUFFER_EXTRA = 5 +SECBUFFER_STREAM_TRAILER = 6 +SECBUFFER_STREAM_HEADER = 7 +SECBUFFER_NEGOTIATION_INFO = 8 +SECBUFFER_PADDING = 9 +SECBUFFER_STREAM = 10 +SECBUFFER_MECHLIST = 11 +SECBUFFER_MECHLIST_SIGNATURE = 12 +SECBUFFER_TARGET = 13 +SECBUFFER_CHANNEL_BINDINGS = 14 +SECBUFFER_ATTRMASK = (-268435456) +SECBUFFER_READONLY = (-2147483648) +SECBUFFER_READONLY_WITH_CHECKSUM = 268435456 +SECBUFFER_RESERVED = 1610612736 + +SECURITY_NATIVE_DREP = 16 +SECURITY_NETWORK_DREP = 0 + +SECPKG_CRED_INBOUND = 1 +SECPKG_CRED_OUTBOUND = 2 +SECPKG_CRED_BOTH = 3 +SECPKG_CRED_DEFAULT = 4 +SECPKG_CRED_RESERVED = -268435456 + +ISC_REQ_DELEGATE = 1 +ISC_REQ_MUTUAL_AUTH = 2 +ISC_REQ_REPLAY_DETECT = 4 +ISC_REQ_SEQUENCE_DETECT = 8 +ISC_REQ_CONFIDENTIALITY = 16 +ISC_REQ_USE_SESSION_KEY = 32 +ISC_REQ_PROMPT_FOR_CREDS = 64 +ISC_REQ_USE_SUPPLIED_CREDS = 128 +ISC_REQ_ALLOCATE_MEMORY = 256 +ISC_REQ_USE_DCE_STYLE = 512 +ISC_REQ_DATAGRAM = 1024 +ISC_REQ_CONNECTION = 2048 +ISC_REQ_CALL_LEVEL = 4096 +ISC_REQ_FRAGMENT_SUPPLIED = 8192 +ISC_REQ_EXTENDED_ERROR = 16384 +ISC_REQ_STREAM = 32768 +ISC_REQ_INTEGRITY = 65536 +ISC_REQ_IDENTIFY = 131072 +ISC_REQ_NULL_SESSION = 262144 +ISC_REQ_MANUAL_CRED_VALIDATION = 524288 +ISC_REQ_RESERVED1 = 1048576 +ISC_REQ_FRAGMENT_TO_FIT = 2097152 +ISC_REQ_HTTP = 0x10000000 +ISC_RET_DELEGATE = 1 +ISC_RET_MUTUAL_AUTH = 2 +ISC_RET_REPLAY_DETECT = 4 +ISC_RET_SEQUENCE_DETECT = 8 +ISC_RET_CONFIDENTIALITY = 16 +ISC_RET_USE_SESSION_KEY = 32 +ISC_RET_USED_COLLECTED_CREDS = 64 +ISC_RET_USED_SUPPLIED_CREDS = 128 +ISC_RET_ALLOCATED_MEMORY = 256 +ISC_RET_USED_DCE_STYLE = 512 +ISC_RET_DATAGRAM = 1024 +ISC_RET_CONNECTION = 2048 +ISC_RET_INTERMEDIATE_RETURN = 4096 +ISC_RET_CALL_LEVEL = 8192 +ISC_RET_EXTENDED_ERROR = 16384 +ISC_RET_STREAM = 32768 +ISC_RET_INTEGRITY = 65536 +ISC_RET_IDENTIFY = 131072 +ISC_RET_NULL_SESSION = 262144 +ISC_RET_MANUAL_CRED_VALIDATION = 524288 +ISC_RET_RESERVED1 = 1048576 +ISC_RET_FRAGMENT_ONLY = 2097152 + +ASC_REQ_DELEGATE = 1 +ASC_REQ_MUTUAL_AUTH = 2 +ASC_REQ_REPLAY_DETECT = 4 +ASC_REQ_SEQUENCE_DETECT = 8 +ASC_REQ_CONFIDENTIALITY = 16 +ASC_REQ_USE_SESSION_KEY = 32 +ASC_REQ_ALLOCATE_MEMORY = 256 +ASC_REQ_USE_DCE_STYLE = 512 +ASC_REQ_DATAGRAM = 1024 +ASC_REQ_CONNECTION = 2048 +ASC_REQ_CALL_LEVEL = 4096 +ASC_REQ_EXTENDED_ERROR = 32768 +ASC_REQ_STREAM = 65536 +ASC_REQ_INTEGRITY = 131072 +ASC_REQ_LICENSING = 262144 +ASC_REQ_IDENTIFY = 524288 +ASC_REQ_ALLOW_NULL_SESSION = 1048576 +ASC_REQ_ALLOW_NON_USER_LOGONS = 2097152 +ASC_REQ_ALLOW_CONTEXT_REPLAY = 4194304 +ASC_REQ_FRAGMENT_TO_FIT = 8388608 +ASC_REQ_FRAGMENT_SUPPLIED = 8192 +ASC_REQ_NO_TOKEN = 16777216 +ASC_RET_DELEGATE = 1 +ASC_RET_MUTUAL_AUTH = 2 +ASC_RET_REPLAY_DETECT = 4 +ASC_RET_SEQUENCE_DETECT = 8 +ASC_RET_CONFIDENTIALITY = 16 +ASC_RET_USE_SESSION_KEY = 32 +ASC_RET_ALLOCATED_MEMORY = 256 +ASC_RET_USED_DCE_STYLE = 512 +ASC_RET_DATAGRAM = 1024 +ASC_RET_CONNECTION = 2048 +ASC_RET_CALL_LEVEL = 8192 +ASC_RET_THIRD_LEG_FAILED = 16384 +ASC_RET_EXTENDED_ERROR = 32768 +ASC_RET_STREAM = 65536 +ASC_RET_INTEGRITY = 131072 +ASC_RET_LICENSING = 262144 +ASC_RET_IDENTIFY = 524288 +ASC_RET_NULL_SESSION = 1048576 +ASC_RET_ALLOW_NON_USER_LOGONS = 2097152 +ASC_RET_ALLOW_CONTEXT_REPLAY = 4194304 +ASC_RET_FRAGMENT_ONLY = 8388608 + +SECPKG_CRED_ATTR_NAMES = 1 +SECPKG_ATTR_SIZES = 0 +SECPKG_ATTR_NAMES = 1 +SECPKG_ATTR_LIFESPAN = 2 +SECPKG_ATTR_DCE_INFO = 3 +SECPKG_ATTR_STREAM_SIZES = 4 +SECPKG_ATTR_KEY_INFO = 5 +SECPKG_ATTR_AUTHORITY = 6 +SECPKG_ATTR_PROTO_INFO = 7 +SECPKG_ATTR_PASSWORD_EXPIRY = 8 +SECPKG_ATTR_SESSION_KEY = 9 +SECPKG_ATTR_PACKAGE_INFO = 10 +SECPKG_ATTR_USER_FLAGS = 11 +SECPKG_ATTR_NEGOTIATION_INFO = 12 +SECPKG_ATTR_NATIVE_NAMES = 13 +SECPKG_ATTR_FLAGS = 14 +SECPKG_ATTR_USE_VALIDATED = 15 +SECPKG_ATTR_CREDENTIAL_NAME = 16 +SECPKG_ATTR_TARGET_INFORMATION = 17 +SECPKG_ATTR_ACCESS_TOKEN = 18 +SECPKG_ATTR_TARGET = 19 +SECPKG_ATTR_AUTHENTICATION_ID = 20 + +## attributes from schannel.h +SECPKG_ATTR_REMOTE_CERT_CONTEXT = 83 +SECPKG_ATTR_LOCAL_CERT_CONTEXT = 84 +SECPKG_ATTR_ROOT_STORE = 85 +SECPKG_ATTR_SUPPORTED_ALGS = 86 +SECPKG_ATTR_CIPHER_STRENGTHS = 87 +SECPKG_ATTR_SUPPORTED_PROTOCOLS = 88 +SECPKG_ATTR_ISSUER_LIST_EX = 89 +SECPKG_ATTR_CONNECTION_INFO = 90 +SECPKG_ATTR_EAP_KEY_BLOCK = 91 +SECPKG_ATTR_MAPPED_CRED_ATTR = 92 +SECPKG_ATTR_SESSION_INFO = 93 +SECPKG_ATTR_APP_DATA = 94 + +SECPKG_NEGOTIATION_COMPLETE = 0 +SECPKG_NEGOTIATION_OPTIMISTIC = 1 +SECPKG_NEGOTIATION_IN_PROGRESS = 2 +SECPKG_NEGOTIATION_DIRECT = 3 +SECPKG_NEGOTIATION_TRY_MULTICRED = 4 +SECPKG_CONTEXT_EXPORT_RESET_NEW = 1 +SECPKG_CONTEXT_EXPORT_DELETE_OLD = 2 +SECQOP_WRAP_NO_ENCRYPT = (-2147483647) +SECURITY_ENTRYPOINT_ANSIW = "InitSecurityInterfaceW" +SECURITY_ENTRYPOINT_ANSIA = "InitSecurityInterfaceA" +SECURITY_ENTRYPOINT16 = "INITSECURITYINTERFACEA" +SECURITY_ENTRYPOINT_ANSI = SECURITY_ENTRYPOINT_ANSIW +SECURITY_ENTRYPOINT_ANSI = SECURITY_ENTRYPOINT_ANSIA +SECURITY_ENTRYPOINT = SECURITY_ENTRYPOINT16 +SECURITY_ENTRYPOINT_ANSI = SECURITY_ENTRYPOINT16 +SECURITY_SUPPORT_PROVIDER_INTERFACE_VERSION = 1 +SECURITY_SUPPORT_PROVIDER_INTERFACE_VERSION_2 = 2 +SASL_OPTION_SEND_SIZE = 1 +SASL_OPTION_RECV_SIZE = 2 +SASL_OPTION_AUTHZ_STRING = 3 +SASL_OPTION_AUTHZ_PROCESSING = 4 +SEC_WINNT_AUTH_IDENTITY_ANSI = 1 +SEC_WINNT_AUTH_IDENTITY_UNICODE = 2 +SEC_WINNT_AUTH_IDENTITY_VERSION = 512 +SEC_WINNT_AUTH_IDENTITY_MARSHALLED = 4 +SEC_WINNT_AUTH_IDENTITY_ONLY = 8 +SECPKG_OPTIONS_TYPE_UNKNOWN = 0 +SECPKG_OPTIONS_TYPE_LSA = 1 +SECPKG_OPTIONS_TYPE_SSPI = 2 +SECPKG_OPTIONS_PERMANENT = 1 + +SEC_E_INSUFFICIENT_MEMORY = -2146893056 +SEC_E_INVALID_HANDLE = -2146893055 +SEC_E_UNSUPPORTED_FUNCTION = -2146893054 +SEC_E_TARGET_UNKNOWN = -2146893053 +SEC_E_INTERNAL_ERROR = -2146893052 +SEC_E_SECPKG_NOT_FOUND = -2146893051 +SEC_E_NOT_OWNER = -2146893050 +SEC_E_CANNOT_INSTALL = -2146893049 +SEC_E_INVALID_TOKEN = -2146893048 +SEC_E_CANNOT_PACK = -2146893047 +SEC_E_QOP_NOT_SUPPORTED = -2146893046 +SEC_E_NO_IMPERSONATION = -2146893045 +SEC_E_LOGON_DENIED = -2146893044 +SEC_E_UNKNOWN_CREDENTIALS = -2146893043 +SEC_E_NO_CREDENTIALS = -2146893042 +SEC_E_MESSAGE_ALTERED = -2146893041 +SEC_E_OUT_OF_SEQUENCE = -2146893040 +SEC_E_NO_AUTHENTICATING_AUTHORITY = -2146893039 +SEC_I_CONTINUE_NEEDED = 590610 +SEC_I_COMPLETE_NEEDED = 590611 +SEC_I_COMPLETE_AND_CONTINUE = 590612 +SEC_I_LOCAL_LOGON = 590613 +SEC_E_BAD_PKGID = -2146893034 +SEC_E_CONTEXT_EXPIRED = -2146893033 +SEC_I_CONTEXT_EXPIRED = 590615 +SEC_E_INCOMPLETE_MESSAGE = -2146893032 +SEC_E_INCOMPLETE_CREDENTIALS = -2146893024 +SEC_E_BUFFER_TOO_SMALL = -2146893023 +SEC_I_INCOMPLETE_CREDENTIALS = 590624 +SEC_I_RENEGOTIATE = 590625 +SEC_E_WRONG_PRINCIPAL = -2146893022 +SEC_I_NO_LSA_CONTEXT = 590627 +SEC_E_TIME_SKEW = -2146893020 +SEC_E_UNTRUSTED_ROOT = -2146893019 +SEC_E_ILLEGAL_MESSAGE = -2146893018 +SEC_E_CERT_UNKNOWN = -2146893017 +SEC_E_CERT_EXPIRED = -2146893016 +SEC_E_ENCRYPT_FAILURE = -2146893015 +SEC_E_DECRYPT_FAILURE = -2146893008 +SEC_E_ALGORITHM_MISMATCH = -2146893007 +SEC_E_SECURITY_QOS_FAILED = -2146893006 +SEC_E_UNFINISHED_CONTEXT_DELETED = -2146893005 +SEC_E_NO_TGT_REPLY = -2146893004 +SEC_E_NO_IP_ADDRESSES = -2146893003 +SEC_E_WRONG_CREDENTIAL_HANDLE = -2146893002 +SEC_E_CRYPTO_SYSTEM_INVALID = -2146893001 +SEC_E_MAX_REFERRALS_EXCEEDED = -2146893000 +SEC_E_MUST_BE_KDC = -2146892999 +SEC_E_STRONG_CRYPTO_NOT_SUPPORTED = -2146892998 +SEC_E_TOO_MANY_PRINCIPALS = -2146892997 +SEC_E_NO_PA_DATA = -2146892996 +SEC_E_PKINIT_NAME_MISMATCH = -2146892995 +SEC_E_SMARTCARD_LOGON_REQUIRED = -2146892994 +SEC_E_SHUTDOWN_IN_PROGRESS = -2146892993 +SEC_E_KDC_INVALID_REQUEST = -2146892992 +SEC_E_KDC_UNABLE_TO_REFER = -2146892991 +SEC_E_KDC_UNKNOWN_ETYPE = -2146892990 +SEC_E_UNSUPPORTED_PREAUTH = -2146892989 +SEC_E_DELEGATION_REQUIRED = -2146892987 +SEC_E_BAD_BINDINGS = -2146892986 +SEC_E_MULTIPLE_ACCOUNTS = -2146892985 +SEC_E_NO_KERB_KEY = -2146892984 + +ERROR_IPSEC_QM_POLICY_EXISTS = 13000 +ERROR_IPSEC_QM_POLICY_NOT_FOUND = 13001 +ERROR_IPSEC_QM_POLICY_IN_USE = 13002 +ERROR_IPSEC_MM_POLICY_EXISTS = 13003 +ERROR_IPSEC_MM_POLICY_NOT_FOUND = 13004 +ERROR_IPSEC_MM_POLICY_IN_USE = 13005 +ERROR_IPSEC_MM_FILTER_EXISTS = 13006 +ERROR_IPSEC_MM_FILTER_NOT_FOUND = 13007 +ERROR_IPSEC_TRANSPORT_FILTER_EXISTS = 13008 +ERROR_IPSEC_TRANSPORT_FILTER_NOT_FOUND = 13009 +ERROR_IPSEC_MM_AUTH_EXISTS = 13010 +ERROR_IPSEC_MM_AUTH_NOT_FOUND = 13011 +ERROR_IPSEC_MM_AUTH_IN_USE = 13012 +ERROR_IPSEC_DEFAULT_MM_POLICY_NOT_FOUND = 13013 +ERROR_IPSEC_DEFAULT_MM_AUTH_NOT_FOUND = 13014 +ERROR_IPSEC_DEFAULT_QM_POLICY_NOT_FOUND = 13015 +ERROR_IPSEC_TUNNEL_FILTER_EXISTS = 13016 +ERROR_IPSEC_TUNNEL_FILTER_NOT_FOUND = 13017 +ERROR_IPSEC_MM_FILTER_PENDING_DELETION = 13018 +ERROR_IPSEC_TRANSPORT_FILTER_PENDING_DELETION = 13019 +ERROR_IPSEC_TUNNEL_FILTER_PENDING_DELETION = 13020 +ERROR_IPSEC_MM_POLICY_PENDING_DELETION = 13021 +ERROR_IPSEC_MM_AUTH_PENDING_DELETION = 13022 +ERROR_IPSEC_QM_POLICY_PENDING_DELETION = 13023 +WARNING_IPSEC_MM_POLICY_PRUNED = 13024 +WARNING_IPSEC_QM_POLICY_PRUNED = 13025 +ERROR_IPSEC_IKE_NEG_STATUS_BEGIN = 13800 +ERROR_IPSEC_IKE_AUTH_FAIL = 13801 +ERROR_IPSEC_IKE_ATTRIB_FAIL = 13802 +ERROR_IPSEC_IKE_NEGOTIATION_PENDING = 13803 +ERROR_IPSEC_IKE_GENERAL_PROCESSING_ERROR = 13804 +ERROR_IPSEC_IKE_TIMED_OUT = 13805 +ERROR_IPSEC_IKE_NO_CERT = 13806 +ERROR_IPSEC_IKE_SA_DELETED = 13807 +ERROR_IPSEC_IKE_SA_REAPED = 13808 +ERROR_IPSEC_IKE_MM_ACQUIRE_DROP = 13809 +ERROR_IPSEC_IKE_QM_ACQUIRE_DROP = 13810 +ERROR_IPSEC_IKE_QUEUE_DROP_MM = 13811 +ERROR_IPSEC_IKE_QUEUE_DROP_NO_MM = 13812 +ERROR_IPSEC_IKE_DROP_NO_RESPONSE = 13813 +ERROR_IPSEC_IKE_MM_DELAY_DROP = 13814 +ERROR_IPSEC_IKE_QM_DELAY_DROP = 13815 +ERROR_IPSEC_IKE_ERROR = 13816 +ERROR_IPSEC_IKE_CRL_FAILED = 13817 +ERROR_IPSEC_IKE_INVALID_KEY_USAGE = 13818 +ERROR_IPSEC_IKE_INVALID_CERT_TYPE = 13819 +ERROR_IPSEC_IKE_NO_PRIVATE_KEY = 13820 +ERROR_IPSEC_IKE_DH_FAIL = 13822 +ERROR_IPSEC_IKE_INVALID_HEADER = 13824 +ERROR_IPSEC_IKE_NO_POLICY = 13825 +ERROR_IPSEC_IKE_INVALID_SIGNATURE = 13826 +ERROR_IPSEC_IKE_KERBEROS_ERROR = 13827 +ERROR_IPSEC_IKE_NO_PUBLIC_KEY = 13828 +ERROR_IPSEC_IKE_PROCESS_ERR = 13829 +ERROR_IPSEC_IKE_PROCESS_ERR_SA = 13830 +ERROR_IPSEC_IKE_PROCESS_ERR_PROP = 13831 +ERROR_IPSEC_IKE_PROCESS_ERR_TRANS = 13832 +ERROR_IPSEC_IKE_PROCESS_ERR_KE = 13833 +ERROR_IPSEC_IKE_PROCESS_ERR_ID = 13834 +ERROR_IPSEC_IKE_PROCESS_ERR_CERT = 13835 +ERROR_IPSEC_IKE_PROCESS_ERR_CERT_REQ = 13836 +ERROR_IPSEC_IKE_PROCESS_ERR_HASH = 13837 +ERROR_IPSEC_IKE_PROCESS_ERR_SIG = 13838 +ERROR_IPSEC_IKE_PROCESS_ERR_NONCE = 13839 +ERROR_IPSEC_IKE_PROCESS_ERR_NOTIFY = 13840 +ERROR_IPSEC_IKE_PROCESS_ERR_DELETE = 13841 +ERROR_IPSEC_IKE_PROCESS_ERR_VENDOR = 13842 +ERROR_IPSEC_IKE_INVALID_PAYLOAD = 13843 +ERROR_IPSEC_IKE_LOAD_SOFT_SA = 13844 +ERROR_IPSEC_IKE_SOFT_SA_TORN_DOWN = 13845 +ERROR_IPSEC_IKE_INVALID_COOKIE = 13846 +ERROR_IPSEC_IKE_NO_PEER_CERT = 13847 +ERROR_IPSEC_IKE_PEER_CRL_FAILED = 13848 +ERROR_IPSEC_IKE_POLICY_CHANGE = 13849 +ERROR_IPSEC_IKE_NO_MM_POLICY = 13850 +ERROR_IPSEC_IKE_NOTCBPRIV = 13851 +ERROR_IPSEC_IKE_SECLOADFAIL = 13852 +ERROR_IPSEC_IKE_FAILSSPINIT = 13853 +ERROR_IPSEC_IKE_FAILQUERYSSP = 13854 +ERROR_IPSEC_IKE_SRVACQFAIL = 13855 +ERROR_IPSEC_IKE_SRVQUERYCRED = 13856 +ERROR_IPSEC_IKE_GETSPIFAIL = 13857 +ERROR_IPSEC_IKE_INVALID_FILTER = 13858 +ERROR_IPSEC_IKE_OUT_OF_MEMORY = 13859 +ERROR_IPSEC_IKE_ADD_UPDATE_KEY_FAILED = 13860 +ERROR_IPSEC_IKE_INVALID_POLICY = 13861 +ERROR_IPSEC_IKE_UNKNOWN_DOI = 13862 +ERROR_IPSEC_IKE_INVALID_SITUATION = 13863 +ERROR_IPSEC_IKE_DH_FAILURE = 13864 +ERROR_IPSEC_IKE_INVALID_GROUP = 13865 +ERROR_IPSEC_IKE_ENCRYPT = 13866 +ERROR_IPSEC_IKE_DECRYPT = 13867 +ERROR_IPSEC_IKE_POLICY_MATCH = 13868 +ERROR_IPSEC_IKE_UNSUPPORTED_ID = 13869 +ERROR_IPSEC_IKE_INVALID_HASH = 13870 +ERROR_IPSEC_IKE_INVALID_HASH_ALG = 13871 +ERROR_IPSEC_IKE_INVALID_HASH_SIZE = 13872 +ERROR_IPSEC_IKE_INVALID_ENCRYPT_ALG = 13873 +ERROR_IPSEC_IKE_INVALID_AUTH_ALG = 13874 +ERROR_IPSEC_IKE_INVALID_SIG = 13875 +ERROR_IPSEC_IKE_LOAD_FAILED = 13876 +ERROR_IPSEC_IKE_RPC_DELETE = 13877 +ERROR_IPSEC_IKE_BENIGN_REINIT = 13878 +ERROR_IPSEC_IKE_INVALID_RESPONDER_LIFETIME_NOTIFY = 13879 +ERROR_IPSEC_IKE_INVALID_CERT_KEYLEN = 13881 +ERROR_IPSEC_IKE_MM_LIMIT = 13882 +ERROR_IPSEC_IKE_NEGOTIATION_DISABLED = 13883 +ERROR_IPSEC_IKE_NEG_STATUS_END = 13884 +CRYPT_E_MSG_ERROR = ((-2146889727)) +CRYPT_E_UNKNOWN_ALGO = ((-2146889726)) +CRYPT_E_OID_FORMAT = ((-2146889725)) +CRYPT_E_INVALID_MSG_TYPE = ((-2146889724)) +CRYPT_E_UNEXPECTED_ENCODING = ((-2146889723)) +CRYPT_E_AUTH_ATTR_MISSING = ((-2146889722)) +CRYPT_E_HASH_VALUE = ((-2146889721)) +CRYPT_E_INVALID_INDEX = ((-2146889720)) +CRYPT_E_ALREADY_DECRYPTED = ((-2146889719)) +CRYPT_E_NOT_DECRYPTED = ((-2146889718)) +CRYPT_E_RECIPIENT_NOT_FOUND = ((-2146889717)) +CRYPT_E_CONTROL_TYPE = ((-2146889716)) +CRYPT_E_ISSUER_SERIALNUMBER = ((-2146889715)) +CRYPT_E_SIGNER_NOT_FOUND = ((-2146889714)) +CRYPT_E_ATTRIBUTES_MISSING = ((-2146889713)) +CRYPT_E_STREAM_MSG_NOT_READY = ((-2146889712)) +CRYPT_E_STREAM_INSUFFICIENT_DATA = ((-2146889711)) +CRYPT_I_NEW_PROTECTION_REQUIRED = (593938) +CRYPT_E_BAD_LEN = ((-2146885631)) +CRYPT_E_BAD_ENCODE = ((-2146885630)) +CRYPT_E_FILE_ERROR = ((-2146885629)) +CRYPT_E_NOT_FOUND = ((-2146885628)) +CRYPT_E_EXISTS = ((-2146885627)) +CRYPT_E_NO_PROVIDER = ((-2146885626)) +CRYPT_E_SELF_SIGNED = ((-2146885625)) +CRYPT_E_DELETED_PREV = ((-2146885624)) +CRYPT_E_NO_MATCH = ((-2146885623)) +CRYPT_E_UNEXPECTED_MSG_TYPE = ((-2146885622)) +CRYPT_E_NO_KEY_PROPERTY = ((-2146885621)) +CRYPT_E_NO_DECRYPT_CERT = ((-2146885620)) +CRYPT_E_BAD_MSG = ((-2146885619)) +CRYPT_E_NO_SIGNER = ((-2146885618)) +CRYPT_E_PENDING_CLOSE = ((-2146885617)) +CRYPT_E_REVOKED = ((-2146885616)) +CRYPT_E_NO_REVOCATION_DLL = ((-2146885615)) +CRYPT_E_NO_REVOCATION_CHECK = ((-2146885614)) +CRYPT_E_REVOCATION_OFFLINE = ((-2146885613)) +CRYPT_E_NOT_IN_REVOCATION_DATABASE = ((-2146885612)) +CRYPT_E_INVALID_NUMERIC_STRING = ((-2146885600)) +CRYPT_E_INVALID_PRINTABLE_STRING = ((-2146885599)) +CRYPT_E_INVALID_IA5_STRING = ((-2146885598)) +CRYPT_E_INVALID_X500_STRING = ((-2146885597)) +CRYPT_E_NOT_CHAR_STRING = ((-2146885596)) +CRYPT_E_FILERESIZED = ((-2146885595)) +CRYPT_E_SECURITY_SETTINGS = ((-2146885594)) +CRYPT_E_NO_VERIFY_USAGE_DLL = ((-2146885593)) +CRYPT_E_NO_VERIFY_USAGE_CHECK = ((-2146885592)) +CRYPT_E_VERIFY_USAGE_OFFLINE = ((-2146885591)) +CRYPT_E_NOT_IN_CTL = ((-2146885590)) +CRYPT_E_NO_TRUSTED_SIGNER = ((-2146885589)) +CRYPT_E_MISSING_PUBKEY_PARA = ((-2146885588)) +CRYPT_E_OSS_ERROR = ((-2146881536)) + +## Kerberos message types for LsaCallAuthenticationPackage (from ntsecapi.h) +KerbDebugRequestMessage = 0 +KerbQueryTicketCacheMessage = 1 +KerbChangeMachinePasswordMessage = 2 +KerbVerifyPacMessage = 3 +KerbRetrieveTicketMessage = 4 +KerbUpdateAddressesMessage = 5 +KerbPurgeTicketCacheMessage = 6 +KerbChangePasswordMessage = 7 +KerbRetrieveEncodedTicketMessage = 8 +KerbDecryptDataMessage = 9 +KerbAddBindingCacheEntryMessage = 10 +KerbSetPasswordMessage = 11 +KerbSetPasswordExMessage = 12 +KerbVerifyCredentialsMessage = 13 +KerbQueryTicketCacheExMessage = 14 +KerbPurgeTicketCacheExMessage = 15 +KerbRefreshSmartcardCredentialsMessage = 16 +KerbAddExtraCredentialsMessage = 17 +KerbQuerySupplementalCredentialsMessage = 18 + +## messages used with msv1_0 from ntsecapi.h +MsV1_0Lm20ChallengeRequest = 0 +MsV1_0Lm20GetChallengeResponse = 1 +MsV1_0EnumerateUsers = 2 +MsV1_0GetUserInfo = 3 +MsV1_0ReLogonUsers = 4 +MsV1_0ChangePassword = 5 +MsV1_0ChangeCachedPassword = 6 +MsV1_0GenericPassthrough = 7 +MsV1_0CacheLogon = 8 +MsV1_0SubAuth = 9 +MsV1_0DeriveCredential = 10 +MsV1_0CacheLookup = 11 +MsV1_0SetProcessOption = 12 + +SEC_E_OK = 0 diff --git a/venv/Lib/site-packages/win32/lib/win32con.py b/venv/Lib/site-packages/win32/lib/win32con.py new file mode 100644 index 00000000..70cf62f4 --- /dev/null +++ b/venv/Lib/site-packages/win32/lib/win32con.py @@ -0,0 +1,5071 @@ +# Generated by h2py from commdlg.h (plus modifications 4jan98) +WINVER = 1280 +WM_USER = 1024 +PY_0U = 0 +OFN_READONLY = 1 +OFN_OVERWRITEPROMPT = 2 +OFN_HIDEREADONLY = 4 +OFN_NOCHANGEDIR = 8 +OFN_SHOWHELP = 16 +OFN_ENABLEHOOK = 32 +OFN_ENABLETEMPLATE = 64 +OFN_ENABLETEMPLATEHANDLE = 128 +OFN_NOVALIDATE = 256 +OFN_ALLOWMULTISELECT = 512 +OFN_EXTENSIONDIFFERENT = 1024 +OFN_PATHMUSTEXIST = 2048 +OFN_FILEMUSTEXIST = 4096 +OFN_CREATEPROMPT = 8192 +OFN_SHAREAWARE = 16384 +OFN_NOREADONLYRETURN = 32768 +OFN_NOTESTFILECREATE = 65536 +OFN_NONETWORKBUTTON = 131072 +OFN_NOLONGNAMES = 262144 +OFN_EXPLORER = 524288 # new look commdlg +OFN_NODEREFERENCELINKS = 1048576 +OFN_LONGNAMES = 2097152 # force long names for 3.x modules +OFN_ENABLEINCLUDENOTIFY = 4194304 # send include message to callback +OFN_ENABLESIZING = 8388608 +OFN_DONTADDTORECENT = 33554432 +OFN_FORCESHOWHIDDEN = 268435456 # Show All files including System and hidden files +OFN_EX_NOPLACESBAR = 1 +OFN_SHAREFALLTHROUGH = 2 +OFN_SHARENOWARN = 1 +OFN_SHAREWARN = 0 +CDN_FIRST = (PY_0U-601) +CDN_LAST = (PY_0U-699) +CDN_INITDONE = (CDN_FIRST - 0) +CDN_SELCHANGE = (CDN_FIRST - 1) +CDN_FOLDERCHANGE = (CDN_FIRST - 2) +CDN_SHAREVIOLATION = (CDN_FIRST - 3) +CDN_HELP = (CDN_FIRST - 4) +CDN_FILEOK = (CDN_FIRST - 5) +CDN_TYPECHANGE = (CDN_FIRST - 6) +CDN_INCLUDEITEM = (CDN_FIRST - 7) +CDM_FIRST = (WM_USER + 100) +CDM_LAST = (WM_USER + 200) +CDM_GETSPEC = (CDM_FIRST + 0) +CDM_GETFILEPATH = (CDM_FIRST + 1) +CDM_GETFOLDERPATH = (CDM_FIRST + 2) +CDM_GETFOLDERIDLIST = (CDM_FIRST + 3) +CDM_SETCONTROLTEXT = (CDM_FIRST + 4) +CDM_HIDECONTROL = (CDM_FIRST + 5) +CDM_SETDEFEXT = (CDM_FIRST + 6) +CC_RGBINIT = 1 +CC_FULLOPEN = 2 +CC_PREVENTFULLOPEN = 4 +CC_SHOWHELP = 8 +CC_ENABLEHOOK = 16 +CC_ENABLETEMPLATE = 32 +CC_ENABLETEMPLATEHANDLE = 64 +CC_SOLIDCOLOR = 128 +CC_ANYCOLOR = 256 +FR_DOWN = 1 +FR_WHOLEWORD = 2 +FR_MATCHCASE = 4 +FR_FINDNEXT = 8 +FR_REPLACE = 16 +FR_REPLACEALL = 32 +FR_DIALOGTERM = 64 +FR_SHOWHELP = 128 +FR_ENABLEHOOK = 256 +FR_ENABLETEMPLATE = 512 +FR_NOUPDOWN = 1024 +FR_NOMATCHCASE = 2048 +FR_NOWHOLEWORD = 4096 +FR_ENABLETEMPLATEHANDLE = 8192 +FR_HIDEUPDOWN = 16384 +FR_HIDEMATCHCASE = 32768 +FR_HIDEWHOLEWORD = 65536 +CF_SCREENFONTS = 1 +CF_PRINTERFONTS = 2 +CF_BOTH = (CF_SCREENFONTS | CF_PRINTERFONTS) +CF_SHOWHELP = 4 +CF_ENABLEHOOK = 8 +CF_ENABLETEMPLATE = 16 +CF_ENABLETEMPLATEHANDLE = 32 +CF_INITTOLOGFONTSTRUCT = 64 +CF_USESTYLE = 128 +CF_EFFECTS = 256 +CF_APPLY = 512 +CF_ANSIONLY = 1024 +CF_SCRIPTSONLY = CF_ANSIONLY +CF_NOVECTORFONTS = 2048 +CF_NOOEMFONTS = CF_NOVECTORFONTS +CF_NOSIMULATIONS = 4096 +CF_LIMITSIZE = 8192 +CF_FIXEDPITCHONLY = 16384 +CF_WYSIWYG = 32768 # must also have CF_SCREENFONTS & CF_PRINTERFONTS +CF_FORCEFONTEXIST = 65536 +CF_SCALABLEONLY = 131072 +CF_TTONLY = 262144 +CF_NOFACESEL = 524288 +CF_NOSTYLESEL = 1048576 +CF_NOSIZESEL = 2097152 +CF_SELECTSCRIPT = 4194304 +CF_NOSCRIPTSEL = 8388608 +CF_NOVERTFONTS = 16777216 +SIMULATED_FONTTYPE = 32768 +PRINTER_FONTTYPE = 16384 +SCREEN_FONTTYPE = 8192 +BOLD_FONTTYPE = 256 +ITALIC_FONTTYPE = 512 +REGULAR_FONTTYPE = 1024 +OPENTYPE_FONTTYPE = 65536 +TYPE1_FONTTYPE = 131072 +DSIG_FONTTYPE = 262144 +WM_CHOOSEFONT_GETLOGFONT = (WM_USER + 1) +WM_CHOOSEFONT_SETLOGFONT = (WM_USER + 101) +WM_CHOOSEFONT_SETFLAGS = (WM_USER + 102) +LBSELCHSTRINGA = "commdlg_LBSelChangedNotify" +SHAREVISTRINGA = "commdlg_ShareViolation" +FILEOKSTRINGA = "commdlg_FileNameOK" +COLOROKSTRINGA = "commdlg_ColorOK" +SETRGBSTRINGA = "commdlg_SetRGBColor" +HELPMSGSTRINGA = "commdlg_help" +FINDMSGSTRINGA = "commdlg_FindReplace" +LBSELCHSTRING = LBSELCHSTRINGA +SHAREVISTRING = SHAREVISTRINGA +FILEOKSTRING = FILEOKSTRINGA +COLOROKSTRING = COLOROKSTRINGA +SETRGBSTRING = SETRGBSTRINGA +HELPMSGSTRING = HELPMSGSTRINGA +FINDMSGSTRING = FINDMSGSTRINGA +CD_LBSELNOITEMS = -1 +CD_LBSELCHANGE = 0 +CD_LBSELSUB = 1 +CD_LBSELADD = 2 +PD_ALLPAGES = 0 +PD_SELECTION = 1 +PD_PAGENUMS = 2 +PD_NOSELECTION = 4 +PD_NOPAGENUMS = 8 +PD_COLLATE = 16 +PD_PRINTTOFILE = 32 +PD_PRINTSETUP = 64 +PD_NOWARNING = 128 +PD_RETURNDC = 256 +PD_RETURNIC = 512 +PD_RETURNDEFAULT = 1024 +PD_SHOWHELP = 2048 +PD_ENABLEPRINTHOOK = 4096 +PD_ENABLESETUPHOOK = 8192 +PD_ENABLEPRINTTEMPLATE = 16384 +PD_ENABLESETUPTEMPLATE = 32768 +PD_ENABLEPRINTTEMPLATEHANDLE = 65536 +PD_ENABLESETUPTEMPLATEHANDLE = 131072 +PD_USEDEVMODECOPIES = 262144 +PD_DISABLEPRINTTOFILE = 524288 +PD_HIDEPRINTTOFILE = 1048576 +PD_NONETWORKBUTTON = 2097152 +DN_DEFAULTPRN = 1 +WM_PSD_PAGESETUPDLG = (WM_USER ) +WM_PSD_FULLPAGERECT = (WM_USER+1) +WM_PSD_MINMARGINRECT = (WM_USER+2) +WM_PSD_MARGINRECT = (WM_USER+3) +WM_PSD_GREEKTEXTRECT = (WM_USER+4) +WM_PSD_ENVSTAMPRECT = (WM_USER+5) +WM_PSD_YAFULLPAGERECT = (WM_USER+6) +PSD_DEFAULTMINMARGINS = 0 # default (printer's) +PSD_INWININIINTLMEASURE = 0 # 1st of 4 possible +PSD_MINMARGINS = 1 # use caller's +PSD_MARGINS = 2 # use caller's +PSD_INTHOUSANDTHSOFINCHES = 4 # 2nd of 4 possible +PSD_INHUNDREDTHSOFMILLIMETERS = 8 # 3rd of 4 possible +PSD_DISABLEMARGINS = 16 +PSD_DISABLEPRINTER = 32 +PSD_NOWARNING = 128 # must be same as PD_* +PSD_DISABLEORIENTATION = 256 +PSD_RETURNDEFAULT = 1024 # must be same as PD_* +PSD_DISABLEPAPER = 512 +PSD_SHOWHELP = 2048 # must be same as PD_* +PSD_ENABLEPAGESETUPHOOK = 8192 # must be same as PD_* +PSD_ENABLEPAGESETUPTEMPLATE = 32768 # must be same as PD_* +PSD_ENABLEPAGESETUPTEMPLATEHANDLE = 131072 # must be same as PD_* +PSD_ENABLEPAGEPAINTHOOK = 262144 +PSD_DISABLEPAGEPAINTING = 524288 +PSD_NONETWORKBUTTON = 2097152 # must be same as PD_* + +# Generated by h2py from winreg.h +HKEY_CLASSES_ROOT = -2147483648 +HKEY_CURRENT_USER = -2147483647 +HKEY_LOCAL_MACHINE = -2147483646 +HKEY_USERS = -2147483645 +HKEY_PERFORMANCE_DATA = -2147483644 +HKEY_CURRENT_CONFIG = -2147483643 +HKEY_DYN_DATA = -2147483642 +HKEY_PERFORMANCE_TEXT = -2147483568 # ?? 4Jan98 +HKEY_PERFORMANCE_NLSTEXT = -2147483552 # ?? 4Jan98 + +# Generated by h2py from winuser.h +HWND_BROADCAST = 65535 +HWND_DESKTOP = 0 +HWND_TOP = 0 +HWND_BOTTOM = 1 +HWND_TOPMOST = -1 +HWND_NOTOPMOST = -2 +HWND_MESSAGE = -3 + +# winuser.h line 4601 +SM_CXSCREEN = 0 +SM_CYSCREEN = 1 +SM_CXVSCROLL = 2 +SM_CYHSCROLL = 3 +SM_CYCAPTION = 4 +SM_CXBORDER = 5 +SM_CYBORDER = 6 +SM_CXDLGFRAME = 7 +SM_CYDLGFRAME = 8 +SM_CYVTHUMB = 9 +SM_CXHTHUMB = 10 +SM_CXICON = 11 +SM_CYICON = 12 +SM_CXCURSOR = 13 +SM_CYCURSOR = 14 +SM_CYMENU = 15 +SM_CXFULLSCREEN = 16 +SM_CYFULLSCREEN = 17 +SM_CYKANJIWINDOW = 18 +SM_MOUSEPRESENT = 19 +SM_CYVSCROLL = 20 +SM_CXHSCROLL = 21 +SM_DEBUG = 22 +SM_SWAPBUTTON = 23 +SM_RESERVED1 = 24 +SM_RESERVED2 = 25 +SM_RESERVED3 = 26 +SM_RESERVED4 = 27 +SM_CXMIN = 28 +SM_CYMIN = 29 +SM_CXSIZE = 30 +SM_CYSIZE = 31 +SM_CXFRAME = 32 +SM_CYFRAME = 33 +SM_CXMINTRACK = 34 +SM_CYMINTRACK = 35 +SM_CXDOUBLECLK = 36 +SM_CYDOUBLECLK = 37 +SM_CXICONSPACING = 38 +SM_CYICONSPACING = 39 +SM_MENUDROPALIGNMENT = 40 +SM_PENWINDOWS = 41 +SM_DBCSENABLED = 42 +SM_CMOUSEBUTTONS = 43 +SM_CXFIXEDFRAME = SM_CXDLGFRAME +SM_CYFIXEDFRAME = SM_CYDLGFRAME +SM_CXSIZEFRAME = SM_CXFRAME +SM_CYSIZEFRAME = SM_CYFRAME +SM_SECURE = 44 +SM_CXEDGE = 45 +SM_CYEDGE = 46 +SM_CXMINSPACING = 47 +SM_CYMINSPACING = 48 +SM_CXSMICON = 49 +SM_CYSMICON = 50 +SM_CYSMCAPTION = 51 +SM_CXSMSIZE = 52 +SM_CYSMSIZE = 53 +SM_CXMENUSIZE = 54 +SM_CYMENUSIZE = 55 +SM_ARRANGE = 56 +SM_CXMINIMIZED = 57 +SM_CYMINIMIZED = 58 +SM_CXMAXTRACK = 59 +SM_CYMAXTRACK = 60 +SM_CXMAXIMIZED = 61 +SM_CYMAXIMIZED = 62 +SM_NETWORK = 63 +SM_CLEANBOOT = 67 +SM_CXDRAG = 68 +SM_CYDRAG = 69 +SM_SHOWSOUNDS = 70 +SM_CXMENUCHECK = 71 +SM_CYMENUCHECK = 72 +SM_SLOWMACHINE = 73 +SM_MIDEASTENABLED = 74 +SM_MOUSEWHEELPRESENT = 75 +SM_XVIRTUALSCREEN = 76 +SM_YVIRTUALSCREEN = 77 +SM_CXVIRTUALSCREEN = 78 +SM_CYVIRTUALSCREEN = 79 +SM_CMONITORS = 80 +SM_SAMEDISPLAYFORMAT = 81 +SM_CMETRICS = 83 +MNC_IGNORE = 0 +MNC_CLOSE = 1 +MNC_EXECUTE = 2 +MNC_SELECT = 3 +MNS_NOCHECK = -2147483648 +MNS_MODELESS = 1073741824 +MNS_DRAGDROP = 536870912 +MNS_AUTODISMISS = 268435456 +MNS_NOTIFYBYPOS = 134217728 +MNS_CHECKORBMP = 67108864 +MIM_MAXHEIGHT = 1 +MIM_BACKGROUND = 2 +MIM_HELPID = 4 +MIM_MENUDATA = 8 +MIM_STYLE = 16 +MIM_APPLYTOSUBMENUS = -2147483648 +MND_CONTINUE = 0 +MND_ENDMENU = 1 +MNGOF_GAP = 3 +MNGO_NOINTERFACE = 0 +MNGO_NOERROR = 1 +MIIM_STATE = 1 +MIIM_ID = 2 +MIIM_SUBMENU = 4 +MIIM_CHECKMARKS = 8 +MIIM_TYPE = 16 +MIIM_DATA = 32 +MIIM_STRING = 64 +MIIM_BITMAP = 128 +MIIM_FTYPE = 256 +HBMMENU_CALLBACK = -1 +HBMMENU_SYSTEM = 1 +HBMMENU_MBAR_RESTORE = 2 +HBMMENU_MBAR_MINIMIZE = 3 +HBMMENU_MBAR_CLOSE = 5 +HBMMENU_MBAR_CLOSE_D = 6 +HBMMENU_MBAR_MINIMIZE_D = 7 +HBMMENU_POPUP_CLOSE = 8 +HBMMENU_POPUP_RESTORE = 9 +HBMMENU_POPUP_MAXIMIZE = 10 +HBMMENU_POPUP_MINIMIZE = 11 +GMDI_USEDISABLED = 1 +GMDI_GOINTOPOPUPS = 2 +TPM_LEFTBUTTON = 0 +TPM_RIGHTBUTTON = 2 +TPM_LEFTALIGN = 0 +TPM_CENTERALIGN = 4 +TPM_RIGHTALIGN = 8 +TPM_TOPALIGN = 0 +TPM_VCENTERALIGN = 16 +TPM_BOTTOMALIGN = 32 +TPM_HORIZONTAL = 0 +TPM_VERTICAL = 64 +TPM_NONOTIFY = 128 +TPM_RETURNCMD = 256 +TPM_RECURSE = 1 +DOF_EXECUTABLE = 32769 +DOF_DOCUMENT = 32770 +DOF_DIRECTORY = 32771 +DOF_MULTIPLE = 32772 +DOF_PROGMAN = 1 +DOF_SHELLDATA = 2 +DO_DROPFILE = 1162627398 +DO_PRINTFILE = 1414419024 +DT_TOP = 0 +DT_LEFT = 0 +DT_CENTER = 1 +DT_RIGHT = 2 +DT_VCENTER = 4 +DT_BOTTOM = 8 +DT_WORDBREAK = 16 +DT_SINGLELINE = 32 +DT_EXPANDTABS = 64 +DT_TABSTOP = 128 +DT_NOCLIP = 256 +DT_EXTERNALLEADING = 512 +DT_CALCRECT = 1024 +DT_NOPREFIX = 2048 +DT_INTERNAL = 4096 +DT_EDITCONTROL = 8192 +DT_PATH_ELLIPSIS = 16384 +DT_END_ELLIPSIS = 32768 +DT_MODIFYSTRING = 65536 +DT_RTLREADING = 131072 +DT_WORD_ELLIPSIS = 262144 +DST_COMPLEX = 0 +DST_TEXT = 1 +DST_PREFIXTEXT = 2 +DST_ICON = 3 +DST_BITMAP = 4 +DSS_NORMAL = 0 +DSS_UNION = 16 +DSS_DISABLED = 32 +DSS_MONO = 128 +DSS_RIGHT = 32768 +DCX_WINDOW = 1 +DCX_CACHE = 2 +DCX_NORESETATTRS = 4 +DCX_CLIPCHILDREN = 8 +DCX_CLIPSIBLINGS = 16 +DCX_PARENTCLIP = 32 +DCX_EXCLUDERGN = 64 +DCX_INTERSECTRGN = 128 +DCX_EXCLUDEUPDATE = 256 +DCX_INTERSECTUPDATE = 512 +DCX_LOCKWINDOWUPDATE = 1024 +DCX_VALIDATE = 2097152 +CUDR_NORMAL = 0 +CUDR_NOSNAPTOGRID = 1 +CUDR_NORESOLVEPOSITIONS = 2 +CUDR_NOCLOSEGAPS = 4 +CUDR_NEGATIVECOORDS = 8 +CUDR_NOPRIMARY = 16 +RDW_INVALIDATE = 1 +RDW_INTERNALPAINT = 2 +RDW_ERASE = 4 +RDW_VALIDATE = 8 +RDW_NOINTERNALPAINT = 16 +RDW_NOERASE = 32 +RDW_NOCHILDREN = 64 +RDW_ALLCHILDREN = 128 +RDW_UPDATENOW = 256 +RDW_ERASENOW = 512 +RDW_FRAME = 1024 +RDW_NOFRAME = 2048 +SW_SCROLLCHILDREN = 1 +SW_INVALIDATE = 2 +SW_ERASE = 4 +SW_SMOOTHSCROLL = 16 # Use smooth scrolling +ESB_ENABLE_BOTH = 0 +ESB_DISABLE_BOTH = 3 +ESB_DISABLE_LEFT = 1 +ESB_DISABLE_RIGHT = 2 +ESB_DISABLE_UP = 1 +ESB_DISABLE_DOWN = 2 +ESB_DISABLE_LTUP = ESB_DISABLE_LEFT +ESB_DISABLE_RTDN = ESB_DISABLE_RIGHT +HELPINFO_WINDOW = 1 +HELPINFO_MENUITEM = 2 +MB_OK = 0 +MB_OKCANCEL = 1 +MB_ABORTRETRYIGNORE = 2 +MB_YESNOCANCEL = 3 +MB_YESNO = 4 +MB_RETRYCANCEL = 5 +MB_ICONHAND = 16 +MB_ICONQUESTION = 32 +MB_ICONEXCLAMATION = 48 +MB_ICONASTERISK = 64 +MB_ICONWARNING = MB_ICONEXCLAMATION +MB_ICONERROR = MB_ICONHAND +MB_ICONINFORMATION = MB_ICONASTERISK +MB_ICONSTOP = MB_ICONHAND +MB_DEFBUTTON1 = 0 +MB_DEFBUTTON2 = 256 +MB_DEFBUTTON3 = 512 +MB_DEFBUTTON4 = 768 +MB_APPLMODAL = 0 +MB_SYSTEMMODAL = 4096 +MB_TASKMODAL = 8192 +MB_HELP = 16384 +MB_NOFOCUS = 32768 +MB_SETFOREGROUND = 65536 +MB_DEFAULT_DESKTOP_ONLY = 131072 +MB_TOPMOST = 262144 +MB_RIGHT = 524288 +MB_RTLREADING = 1048576 +MB_SERVICE_NOTIFICATION = 2097152 +MB_TYPEMASK = 15 +MB_USERICON = 128 +MB_ICONMASK = 240 +MB_DEFMASK = 3840 +MB_MODEMASK = 12288 +MB_MISCMASK = 49152 +# winuser.h line 6373 +CWP_ALL = 0 +CWP_SKIPINVISIBLE = 1 +CWP_SKIPDISABLED = 2 +CWP_SKIPTRANSPARENT = 4 +CTLCOLOR_MSGBOX = 0 +CTLCOLOR_EDIT = 1 +CTLCOLOR_LISTBOX = 2 +CTLCOLOR_BTN = 3 +CTLCOLOR_DLG = 4 +CTLCOLOR_SCROLLBAR = 5 +CTLCOLOR_STATIC = 6 +CTLCOLOR_MAX = 7 +COLOR_SCROLLBAR = 0 +COLOR_BACKGROUND = 1 +COLOR_ACTIVECAPTION = 2 +COLOR_INACTIVECAPTION = 3 +COLOR_MENU = 4 +COLOR_WINDOW = 5 +COLOR_WINDOWFRAME = 6 +COLOR_MENUTEXT = 7 +COLOR_WINDOWTEXT = 8 +COLOR_CAPTIONTEXT = 9 +COLOR_ACTIVEBORDER = 10 +COLOR_INACTIVEBORDER = 11 +COLOR_APPWORKSPACE = 12 +COLOR_HIGHLIGHT = 13 +COLOR_HIGHLIGHTTEXT = 14 +COLOR_BTNFACE = 15 +COLOR_BTNSHADOW = 16 +COLOR_GRAYTEXT = 17 +COLOR_BTNTEXT = 18 +COLOR_INACTIVECAPTIONTEXT = 19 +COLOR_BTNHIGHLIGHT = 20 +COLOR_3DDKSHADOW = 21 +COLOR_3DLIGHT = 22 +COLOR_INFOTEXT = 23 +COLOR_INFOBK = 24 +COLOR_HOTLIGHT = 26 +COLOR_GRADIENTACTIVECAPTION = 27 +COLOR_GRADIENTINACTIVECAPTION = 28 +COLOR_DESKTOP = COLOR_BACKGROUND +COLOR_3DFACE = COLOR_BTNFACE +COLOR_3DSHADOW = COLOR_BTNSHADOW +COLOR_3DHIGHLIGHT = COLOR_BTNHIGHLIGHT +COLOR_3DHILIGHT = COLOR_BTNHIGHLIGHT +COLOR_BTNHILIGHT = COLOR_BTNHIGHLIGHT +GW_HWNDFIRST = 0 +GW_HWNDLAST = 1 +GW_HWNDNEXT = 2 +GW_HWNDPREV = 3 +GW_OWNER = 4 +GW_CHILD = 5 +GW_ENABLEDPOPUP = 6 +GW_MAX = 6 +MF_INSERT = 0 +MF_CHANGE = 128 +MF_APPEND = 256 +MF_DELETE = 512 +MF_REMOVE = 4096 +MF_BYCOMMAND = 0 +MF_BYPOSITION = 1024 +MF_SEPARATOR = 2048 +MF_ENABLED = 0 +MF_GRAYED = 1 +MF_DISABLED = 2 +MF_UNCHECKED = 0 +MF_CHECKED = 8 +MF_USECHECKBITMAPS = 512 +MF_STRING = 0 +MF_BITMAP = 4 +MF_OWNERDRAW = 256 +MF_POPUP = 16 +MF_MENUBARBREAK = 32 +MF_MENUBREAK = 64 +MF_UNHILITE = 0 +MF_HILITE = 128 +MF_DEFAULT = 4096 +MF_SYSMENU = 8192 +MF_HELP = 16384 +MF_RIGHTJUSTIFY = 16384 +MF_MOUSESELECT = 32768 +MF_END = 128 +MFT_STRING = MF_STRING +MFT_BITMAP = MF_BITMAP +MFT_MENUBARBREAK = MF_MENUBARBREAK +MFT_MENUBREAK = MF_MENUBREAK +MFT_OWNERDRAW = MF_OWNERDRAW +MFT_RADIOCHECK = 512 +MFT_SEPARATOR = MF_SEPARATOR +MFT_RIGHTORDER = 8192 +MFT_RIGHTJUSTIFY = MF_RIGHTJUSTIFY +MFS_GRAYED = 3 +MFS_DISABLED = MFS_GRAYED +MFS_CHECKED = MF_CHECKED +MFS_HILITE = MF_HILITE +MFS_ENABLED = MF_ENABLED +MFS_UNCHECKED = MF_UNCHECKED +MFS_UNHILITE = MF_UNHILITE +MFS_DEFAULT = MF_DEFAULT +MFS_MASK = 4235 +MFS_HOTTRACKDRAWN = 268435456 +MFS_CACHEDBMP = 536870912 +MFS_BOTTOMGAPDROP = 1073741824 +MFS_TOPGAPDROP = -2147483648 +MFS_GAPDROP = -1073741824 +SC_SIZE = 61440 +SC_MOVE = 61456 +SC_MINIMIZE = 61472 +SC_MAXIMIZE = 61488 +SC_NEXTWINDOW = 61504 +SC_PREVWINDOW = 61520 +SC_CLOSE = 61536 +SC_VSCROLL = 61552 +SC_HSCROLL = 61568 +SC_MOUSEMENU = 61584 +SC_KEYMENU = 61696 +SC_ARRANGE = 61712 +SC_RESTORE = 61728 +SC_TASKLIST = 61744 +SC_SCREENSAVE = 61760 +SC_HOTKEY = 61776 +SC_DEFAULT = 61792 +SC_MONITORPOWER = 61808 +SC_CONTEXTHELP = 61824 +SC_SEPARATOR = 61455 +SC_ICON = SC_MINIMIZE +SC_ZOOM = SC_MAXIMIZE +IDC_ARROW = 32512 +IDC_IBEAM = 32513 +IDC_WAIT = 32514 +IDC_CROSS = 32515 +IDC_UPARROW = 32516 +IDC_SIZE = 32640 # OBSOLETE: use IDC_SIZEALL +IDC_ICON = 32641 # OBSOLETE: use IDC_ARROW +IDC_SIZENWSE = 32642 +IDC_SIZENESW = 32643 +IDC_SIZEWE = 32644 +IDC_SIZENS = 32645 +IDC_SIZEALL = 32646 +IDC_NO = 32648 +IDC_HAND = 32649 +IDC_APPSTARTING = 32650 +IDC_HELP = 32651 +IMAGE_BITMAP = 0 +IMAGE_ICON = 1 +IMAGE_CURSOR = 2 +IMAGE_ENHMETAFILE = 3 +LR_DEFAULTCOLOR = 0 +LR_MONOCHROME = 1 +LR_COLOR = 2 +LR_COPYRETURNORG = 4 +LR_COPYDELETEORG = 8 +LR_LOADFROMFILE = 16 +LR_LOADTRANSPARENT = 32 +LR_DEFAULTSIZE = 64 +LR_LOADREALSIZE = 128 +LR_LOADMAP3DCOLORS = 4096 +LR_CREATEDIBSECTION = 8192 +LR_COPYFROMRESOURCE = 16384 +LR_SHARED = 32768 +DI_MASK = 1 +DI_IMAGE = 2 +DI_NORMAL = 3 +DI_COMPAT = 4 +DI_DEFAULTSIZE = 8 +RES_ICON = 1 +RES_CURSOR = 2 +OBM_CLOSE = 32754 +OBM_UPARROW = 32753 +OBM_DNARROW = 32752 +OBM_RGARROW = 32751 +OBM_LFARROW = 32750 +OBM_REDUCE = 32749 +OBM_ZOOM = 32748 +OBM_RESTORE = 32747 +OBM_REDUCED = 32746 +OBM_ZOOMD = 32745 +OBM_RESTORED = 32744 +OBM_UPARROWD = 32743 +OBM_DNARROWD = 32742 +OBM_RGARROWD = 32741 +OBM_LFARROWD = 32740 +OBM_MNARROW = 32739 +OBM_COMBO = 32738 +OBM_UPARROWI = 32737 +OBM_DNARROWI = 32736 +OBM_RGARROWI = 32735 +OBM_LFARROWI = 32734 +OBM_OLD_CLOSE = 32767 +OBM_SIZE = 32766 +OBM_OLD_UPARROW = 32765 +OBM_OLD_DNARROW = 32764 +OBM_OLD_RGARROW = 32763 +OBM_OLD_LFARROW = 32762 +OBM_BTSIZE = 32761 +OBM_CHECK = 32760 +OBM_CHECKBOXES = 32759 +OBM_BTNCORNERS = 32758 +OBM_OLD_REDUCE = 32757 +OBM_OLD_ZOOM = 32756 +OBM_OLD_RESTORE = 32755 +OCR_NORMAL = 32512 +OCR_IBEAM = 32513 +OCR_WAIT = 32514 +OCR_CROSS = 32515 +OCR_UP = 32516 +OCR_SIZE = 32640 +OCR_ICON = 32641 +OCR_SIZENWSE = 32642 +OCR_SIZENESW = 32643 +OCR_SIZEWE = 32644 +OCR_SIZENS = 32645 +OCR_SIZEALL = 32646 +OCR_ICOCUR = 32647 +OCR_NO = 32648 +OCR_HAND = 32649 +OCR_APPSTARTING = 32650 +# winuser.h line 7455 +OIC_SAMPLE = 32512 +OIC_HAND = 32513 +OIC_QUES = 32514 +OIC_BANG = 32515 +OIC_NOTE = 32516 +OIC_WINLOGO = 32517 +OIC_WARNING = OIC_BANG +OIC_ERROR = OIC_HAND +OIC_INFORMATION = OIC_NOTE +ORD_LANGDRIVER = 1 +IDI_APPLICATION = 32512 +IDI_HAND = 32513 +IDI_QUESTION = 32514 +IDI_EXCLAMATION = 32515 +IDI_ASTERISK = 32516 +IDI_WINLOGO = 32517 +IDI_WARNING = IDI_EXCLAMATION +IDI_ERROR = IDI_HAND +IDI_INFORMATION = IDI_ASTERISK +IDOK = 1 +IDCANCEL = 2 +IDABORT = 3 +IDRETRY = 4 +IDIGNORE = 5 +IDYES = 6 +IDNO = 7 +IDCLOSE = 8 +IDHELP = 9 +ES_LEFT = 0 +ES_CENTER = 1 +ES_RIGHT = 2 +ES_MULTILINE = 4 +ES_UPPERCASE = 8 +ES_LOWERCASE = 16 +ES_PASSWORD = 32 +ES_AUTOVSCROLL = 64 +ES_AUTOHSCROLL = 128 +ES_NOHIDESEL = 256 +ES_OEMCONVERT = 1024 +ES_READONLY = 2048 +ES_WANTRETURN = 4096 +ES_NUMBER = 8192 +EN_SETFOCUS = 256 +EN_KILLFOCUS = 512 +EN_CHANGE = 768 +EN_UPDATE = 1024 +EN_ERRSPACE = 1280 +EN_MAXTEXT = 1281 +EN_HSCROLL = 1537 +EN_VSCROLL = 1538 +EC_LEFTMARGIN = 1 +EC_RIGHTMARGIN = 2 +EC_USEFONTINFO = 65535 +EMSIS_COMPOSITIONSTRING = 1 +EIMES_GETCOMPSTRATONCE = 1 +EIMES_CANCELCOMPSTRINFOCUS = 2 +EIMES_COMPLETECOMPSTRKILLFOCUS = 4 +EM_GETSEL = 176 +EM_SETSEL = 177 +EM_GETRECT = 178 +EM_SETRECT = 179 +EM_SETRECTNP = 180 +EM_SCROLL = 181 +EM_LINESCROLL = 182 +EM_SCROLLCARET = 183 +EM_GETMODIFY = 184 +EM_SETMODIFY = 185 +EM_GETLINECOUNT = 186 +EM_LINEINDEX = 187 +EM_SETHANDLE = 188 +EM_GETHANDLE = 189 +EM_GETTHUMB = 190 +EM_LINELENGTH = 193 +EM_REPLACESEL = 194 +EM_GETLINE = 196 +EM_LIMITTEXT = 197 +EM_CANUNDO = 198 +EM_UNDO = 199 +EM_FMTLINES = 200 +EM_LINEFROMCHAR = 201 +EM_SETTABSTOPS = 203 +EM_SETPASSWORDCHAR = 204 +EM_EMPTYUNDOBUFFER = 205 +EM_GETFIRSTVISIBLELINE = 206 +EM_SETREADONLY = 207 +EM_SETWORDBREAKPROC = 208 +EM_GETWORDBREAKPROC = 209 +EM_GETPASSWORDCHAR = 210 +EM_SETMARGINS = 211 +EM_GETMARGINS = 212 +EM_SETLIMITTEXT = EM_LIMITTEXT +EM_GETLIMITTEXT = 213 +EM_POSFROMCHAR = 214 +EM_CHARFROMPOS = 215 +EM_SETIMESTATUS = 216 +EM_GETIMESTATUS = 217 +WB_LEFT = 0 +WB_RIGHT = 1 +WB_ISDELIMITER = 2 +BS_PUSHBUTTON = 0 +BS_DEFPUSHBUTTON = 1 +BS_CHECKBOX = 2 +BS_AUTOCHECKBOX = 3 +BS_RADIOBUTTON = 4 +BS_3STATE = 5 +BS_AUTO3STATE = 6 +BS_GROUPBOX = 7 +BS_USERBUTTON = 8 +BS_AUTORADIOBUTTON = 9 +BS_OWNERDRAW = 11 +BS_LEFTTEXT = 32 +BS_TEXT = 0 +BS_ICON = 64 +BS_BITMAP = 128 +BS_LEFT = 256 +BS_RIGHT = 512 +BS_CENTER = 768 +BS_TOP = 1024 +BS_BOTTOM = 2048 +BS_VCENTER = 3072 +BS_PUSHLIKE = 4096 +BS_MULTILINE = 8192 +BS_NOTIFY = 16384 +BS_FLAT = 32768 +BS_RIGHTBUTTON = BS_LEFTTEXT +BN_CLICKED = 0 +BN_PAINT = 1 +BN_HILITE = 2 +BN_UNHILITE = 3 +BN_DISABLE = 4 +BN_DOUBLECLICKED = 5 +BN_PUSHED = BN_HILITE +BN_UNPUSHED = BN_UNHILITE +BN_DBLCLK = BN_DOUBLECLICKED +BN_SETFOCUS = 6 +BN_KILLFOCUS = 7 +BM_GETCHECK = 240 +BM_SETCHECK = 241 +BM_GETSTATE = 242 +BM_SETSTATE = 243 +BM_SETSTYLE = 244 +BM_CLICK = 245 +BM_GETIMAGE = 246 +BM_SETIMAGE = 247 +BST_UNCHECKED = 0 +BST_CHECKED = 1 +BST_INDETERMINATE = 2 +BST_PUSHED = 4 +BST_FOCUS = 8 +SS_LEFT = 0 +SS_CENTER = 1 +SS_RIGHT = 2 +SS_ICON = 3 +SS_BLACKRECT = 4 +SS_GRAYRECT = 5 +SS_WHITERECT = 6 +SS_BLACKFRAME = 7 +SS_GRAYFRAME = 8 +SS_WHITEFRAME = 9 +SS_USERITEM = 10 +SS_SIMPLE = 11 +SS_LEFTNOWORDWRAP = 12 +SS_BITMAP = 14 +SS_OWNERDRAW = 13 +SS_ENHMETAFILE = 15 +SS_ETCHEDHORZ = 16 +SS_ETCHEDVERT = 17 +SS_ETCHEDFRAME = 18 +SS_TYPEMASK = 31 +SS_NOPREFIX = 128 +SS_NOTIFY = 256 +SS_CENTERIMAGE = 512 +SS_RIGHTJUST = 1024 +SS_REALSIZEIMAGE = 2048 +SS_SUNKEN = 4096 +SS_ENDELLIPSIS = 16384 +SS_PATHELLIPSIS = 32768 +SS_WORDELLIPSIS = 49152 +SS_ELLIPSISMASK = 49152 +STM_SETICON = 368 +STM_GETICON = 369 +STM_SETIMAGE = 370 +STM_GETIMAGE = 371 +STN_CLICKED = 0 +STN_DBLCLK = 1 +STN_ENABLE = 2 +STN_DISABLE = 3 +STM_MSGMAX = 372 +DWL_MSGRESULT = 0 +DWL_DLGPROC = 4 +DWL_USER = 8 +DDL_READWRITE = 0 +DDL_READONLY = 1 +DDL_HIDDEN = 2 +DDL_SYSTEM = 4 +DDL_DIRECTORY = 16 +DDL_ARCHIVE = 32 +DDL_POSTMSGS = 8192 +DDL_DRIVES = 16384 +DDL_EXCLUSIVE = 32768 + +#from winuser.h line 153 +RT_CURSOR = 1 +RT_BITMAP = 2 +RT_ICON = 3 +RT_MENU = 4 +RT_DIALOG = 5 +RT_STRING = 6 +RT_FONTDIR = 7 +RT_FONT = 8 +RT_ACCELERATOR = 9 +RT_RCDATA = 10 +RT_MESSAGETABLE = 11 +DIFFERENCE = 11 +RT_GROUP_CURSOR = (RT_CURSOR + DIFFERENCE) +RT_GROUP_ICON = (RT_ICON + DIFFERENCE) +RT_VERSION = 16 +RT_DLGINCLUDE = 17 +RT_PLUGPLAY = 19 +RT_VXD = 20 +RT_ANICURSOR = 21 +RT_ANIICON = 22 +RT_HTML = 23 +# from winuser.h line 218 +SB_HORZ = 0 +SB_VERT = 1 +SB_CTL = 2 +SB_BOTH = 3 +SB_LINEUP = 0 +SB_LINELEFT = 0 +SB_LINEDOWN = 1 +SB_LINERIGHT = 1 +SB_PAGEUP = 2 +SB_PAGELEFT = 2 +SB_PAGEDOWN = 3 +SB_PAGERIGHT = 3 +SB_THUMBPOSITION = 4 +SB_THUMBTRACK = 5 +SB_TOP = 6 +SB_LEFT = 6 +SB_BOTTOM = 7 +SB_RIGHT = 7 +SB_ENDSCROLL = 8 +SW_HIDE = 0 +SW_SHOWNORMAL = 1 +SW_NORMAL = 1 +SW_SHOWMINIMIZED = 2 +SW_SHOWMAXIMIZED = 3 +SW_MAXIMIZE = 3 +SW_SHOWNOACTIVATE = 4 +SW_SHOW = 5 +SW_MINIMIZE = 6 +SW_SHOWMINNOACTIVE = 7 +SW_SHOWNA = 8 +SW_RESTORE = 9 +SW_SHOWDEFAULT = 10 +SW_FORCEMINIMIZE = 11 +SW_MAX = 11 +HIDE_WINDOW = 0 +SHOW_OPENWINDOW = 1 +SHOW_ICONWINDOW = 2 +SHOW_FULLSCREEN = 3 +SHOW_OPENNOACTIVATE = 4 +SW_PARENTCLOSING = 1 +SW_OTHERZOOM = 2 +SW_PARENTOPENING = 3 +SW_OTHERUNZOOM = 4 +AW_HOR_POSITIVE = 1 +AW_HOR_NEGATIVE = 2 +AW_VER_POSITIVE = 4 +AW_VER_NEGATIVE = 8 +AW_CENTER = 16 +AW_HIDE = 65536 +AW_ACTIVATE = 131072 +AW_SLIDE = 262144 +AW_BLEND = 524288 +KF_EXTENDED = 256 +KF_DLGMODE = 2048 +KF_MENUMODE = 4096 +KF_ALTDOWN = 8192 +KF_REPEAT = 16384 +KF_UP = 32768 +VK_LBUTTON = 1 +VK_RBUTTON = 2 +VK_CANCEL = 3 +VK_MBUTTON = 4 +VK_BACK = 8 +VK_TAB = 9 +VK_CLEAR = 12 +VK_RETURN = 13 +VK_SHIFT = 16 +VK_CONTROL = 17 +VK_MENU = 18 +VK_PAUSE = 19 +VK_CAPITAL = 20 +VK_KANA = 21 +VK_HANGEUL = 21 # old name - should be here for compatibility +VK_HANGUL = 21 +VK_JUNJA = 23 +VK_FINAL = 24 +VK_HANJA = 25 +VK_KANJI = 25 +VK_ESCAPE = 27 +VK_CONVERT = 28 +VK_NONCONVERT = 29 +VK_ACCEPT = 30 +VK_MODECHANGE = 31 +VK_SPACE = 32 +VK_PRIOR = 33 +VK_NEXT = 34 +VK_END = 35 +VK_HOME = 36 +VK_LEFT = 37 +VK_UP = 38 +VK_RIGHT = 39 +VK_DOWN = 40 +VK_SELECT = 41 +VK_PRINT = 42 +VK_EXECUTE = 43 +VK_SNAPSHOT = 44 +VK_INSERT = 45 +VK_DELETE = 46 +VK_HELP = 47 +VK_LWIN = 91 +VK_RWIN = 92 +VK_APPS = 93 +VK_NUMPAD0 = 96 +VK_NUMPAD1 = 97 +VK_NUMPAD2 = 98 +VK_NUMPAD3 = 99 +VK_NUMPAD4 = 100 +VK_NUMPAD5 = 101 +VK_NUMPAD6 = 102 +VK_NUMPAD7 = 103 +VK_NUMPAD8 = 104 +VK_NUMPAD9 = 105 +VK_MULTIPLY = 106 +VK_ADD = 107 +VK_SEPARATOR = 108 +VK_SUBTRACT = 109 +VK_DECIMAL = 110 +VK_DIVIDE = 111 +VK_F1 = 112 +VK_F2 = 113 +VK_F3 = 114 +VK_F4 = 115 +VK_F5 = 116 +VK_F6 = 117 +VK_F7 = 118 +VK_F8 = 119 +VK_F9 = 120 +VK_F10 = 121 +VK_F11 = 122 +VK_F12 = 123 +VK_F13 = 124 +VK_F14 = 125 +VK_F15 = 126 +VK_F16 = 127 +VK_F17 = 128 +VK_F18 = 129 +VK_F19 = 130 +VK_F20 = 131 +VK_F21 = 132 +VK_F22 = 133 +VK_F23 = 134 +VK_F24 = 135 +VK_NUMLOCK = 144 +VK_SCROLL = 145 +VK_LSHIFT = 160 +VK_RSHIFT = 161 +VK_LCONTROL = 162 +VK_RCONTROL = 163 +VK_LMENU = 164 +VK_RMENU = 165 +VK_PROCESSKEY = 229 +VK_ATTN = 246 +VK_CRSEL = 247 +VK_EXSEL = 248 +VK_EREOF = 249 +VK_PLAY = 250 +VK_ZOOM = 251 +VK_NONAME = 252 +VK_PA1 = 253 +VK_OEM_CLEAR = 254 +# multi-media related "keys" +MOUSEEVENTF_XDOWN = 0x0080 +MOUSEEVENTF_XUP = 0x0100 +MOUSEEVENTF_WHEEL = 0x0800 +VK_XBUTTON1 = 0x05 +VK_XBUTTON2 = 0x06 +VK_VOLUME_MUTE = 0xAD +VK_VOLUME_DOWN = 0xAE +VK_VOLUME_UP = 0xAF +VK_MEDIA_NEXT_TRACK = 0xB0 +VK_MEDIA_PREV_TRACK = 0xB1 +VK_MEDIA_PLAY_PAUSE = 0xB3 +VK_BROWSER_BACK = 0xA6 +VK_BROWSER_FORWARD = 0xA7 +WH_MIN = (-1) +WH_MSGFILTER = (-1) +WH_JOURNALRECORD = 0 +WH_JOURNALPLAYBACK = 1 +WH_KEYBOARD = 2 +WH_GETMESSAGE = 3 +WH_CALLWNDPROC = 4 +WH_CBT = 5 +WH_SYSMSGFILTER = 6 +WH_MOUSE = 7 +WH_HARDWARE = 8 +WH_DEBUG = 9 +WH_SHELL = 10 +WH_FOREGROUNDIDLE = 11 +WH_CALLWNDPROCRET = 12 +WH_KEYBOARD_LL = 13 +WH_MOUSE_LL = 14 +WH_MAX = 14 +WH_MINHOOK = WH_MIN +WH_MAXHOOK = WH_MAX +HC_ACTION = 0 +HC_GETNEXT = 1 +HC_SKIP = 2 +HC_NOREMOVE = 3 +HC_NOREM = HC_NOREMOVE +HC_SYSMODALON = 4 +HC_SYSMODALOFF = 5 +HCBT_MOVESIZE = 0 +HCBT_MINMAX = 1 +HCBT_QS = 2 +HCBT_CREATEWND = 3 +HCBT_DESTROYWND = 4 +HCBT_ACTIVATE = 5 +HCBT_CLICKSKIPPED = 6 +HCBT_KEYSKIPPED = 7 +HCBT_SYSCOMMAND = 8 +HCBT_SETFOCUS = 9 +MSGF_DIALOGBOX = 0 +MSGF_MESSAGEBOX = 1 +MSGF_MENU = 2 +#MSGF_MOVE = 3 +#MSGF_SIZE = 4 +MSGF_SCROLLBAR = 5 +MSGF_NEXTWINDOW = 6 +#MSGF_MAINLOOP = 8 +MSGF_MAX = 8 +MSGF_USER = 4096 +HSHELL_WINDOWCREATED = 1 +HSHELL_WINDOWDESTROYED = 2 +HSHELL_ACTIVATESHELLWINDOW = 3 +HSHELL_WINDOWACTIVATED = 4 +HSHELL_GETMINRECT = 5 +HSHELL_REDRAW = 6 +HSHELL_TASKMAN = 7 +HSHELL_LANGUAGE = 8 +HSHELL_ACCESSIBILITYSTATE = 11 +ACCESS_STICKYKEYS = 1 +ACCESS_FILTERKEYS = 2 +ACCESS_MOUSEKEYS = 3 +# winuser.h line 624 +LLKHF_EXTENDED = 1 +LLKHF_INJECTED = 16 +LLKHF_ALTDOWN = 32 +LLKHF_UP = 128 +LLKHF_LOWER_IL_INJECTED = 2 +LLMHF_INJECTED = 1 +LLMHF_LOWER_IL_INJECTED = 2 +# line 692 +HKL_PREV = 0 +HKL_NEXT = 1 +KLF_ACTIVATE = 1 +KLF_SUBSTITUTE_OK = 2 +KLF_UNLOADPREVIOUS = 4 +KLF_REORDER = 8 +KLF_REPLACELANG = 16 +KLF_NOTELLSHELL = 128 +KLF_SETFORPROCESS = 256 +KL_NAMELENGTH = 9 +DESKTOP_READOBJECTS = 1 +DESKTOP_CREATEWINDOW = 2 +DESKTOP_CREATEMENU = 4 +DESKTOP_HOOKCONTROL = 8 +DESKTOP_JOURNALRECORD = 16 +DESKTOP_JOURNALPLAYBACK = 32 +DESKTOP_ENUMERATE = 64 +DESKTOP_WRITEOBJECTS = 128 +DESKTOP_SWITCHDESKTOP = 256 +DF_ALLOWOTHERACCOUNTHOOK = 1 +WINSTA_ENUMDESKTOPS = 1 +WINSTA_READATTRIBUTES = 2 +WINSTA_ACCESSCLIPBOARD = 4 +WINSTA_CREATEDESKTOP = 8 +WINSTA_WRITEATTRIBUTES = 16 +WINSTA_ACCESSGLOBALATOMS = 32 +WINSTA_EXITWINDOWS = 64 +WINSTA_ENUMERATE = 256 +WINSTA_READSCREEN = 512 +WSF_VISIBLE = 1 +UOI_FLAGS = 1 +UOI_NAME = 2 +UOI_TYPE = 3 +UOI_USER_SID = 4 +GWL_WNDPROC = (-4) +GWL_HINSTANCE = (-6) +GWL_HWNDPARENT = (-8) +GWL_STYLE = (-16) +GWL_EXSTYLE = (-20) +GWL_USERDATA = (-21) +GWL_ID = (-12) +GCL_MENUNAME = (-8) +GCL_HBRBACKGROUND = (-10) +GCL_HCURSOR = (-12) +GCL_HICON = (-14) +GCL_HMODULE = (-16) +GCL_CBWNDEXTRA = (-18) +GCL_CBCLSEXTRA = (-20) +GCL_WNDPROC = (-24) +GCL_STYLE = (-26) +GCW_ATOM = (-32) +GCL_HICONSM = (-34) +# line 1291 +WM_NULL = 0 +WM_CREATE = 1 +WM_DESTROY = 2 +WM_MOVE = 3 +WM_SIZE = 5 +WM_ACTIVATE = 6 +WA_INACTIVE = 0 +WA_ACTIVE = 1 +WA_CLICKACTIVE = 2 +WM_SETFOCUS = 7 +WM_KILLFOCUS = 8 +WM_ENABLE = 10 +WM_SETREDRAW = 11 +WM_SETTEXT = 12 +WM_GETTEXT = 13 +WM_GETTEXTLENGTH = 14 +WM_PAINT = 15 +WM_CLOSE = 16 +WM_QUERYENDSESSION = 17 +WM_QUIT = 18 +WM_QUERYOPEN = 19 +WM_ERASEBKGND = 20 +WM_SYSCOLORCHANGE = 21 +WM_ENDSESSION = 22 +WM_SHOWWINDOW = 24 +WM_WININICHANGE = 26 +WM_SETTINGCHANGE = WM_WININICHANGE +WM_DEVMODECHANGE = 27 +WM_ACTIVATEAPP = 28 +WM_FONTCHANGE = 29 +WM_TIMECHANGE = 30 +WM_CANCELMODE = 31 +WM_SETCURSOR = 32 +WM_MOUSEACTIVATE = 33 +WM_CHILDACTIVATE = 34 +WM_QUEUESYNC = 35 +WM_GETMINMAXINFO = 36 +WM_PAINTICON = 38 +WM_ICONERASEBKGND = 39 +WM_NEXTDLGCTL = 40 +WM_SPOOLERSTATUS = 42 +WM_DRAWITEM = 43 +WM_MEASUREITEM = 44 +WM_DELETEITEM = 45 +WM_VKEYTOITEM = 46 +WM_CHARTOITEM = 47 +WM_SETFONT = 48 +WM_GETFONT = 49 +WM_SETHOTKEY = 50 +WM_GETHOTKEY = 51 +WM_QUERYDRAGICON = 55 +WM_COMPAREITEM = 57 +WM_GETOBJECT = 61 +WM_COMPACTING = 65 +WM_COMMNOTIFY = 68 +WM_WINDOWPOSCHANGING = 70 +WM_WINDOWPOSCHANGED = 71 +WM_POWER = 72 +PWR_OK = 1 +PWR_FAIL = (-1) +PWR_SUSPENDREQUEST = 1 +PWR_SUSPENDRESUME = 2 +PWR_CRITICALRESUME = 3 +WM_COPYDATA = 74 +WM_CANCELJOURNAL = 75 +WM_NOTIFY = 78 +WM_INPUTLANGCHANGEREQUEST = 80 +WM_INPUTLANGCHANGE = 81 +WM_TCARD = 82 +WM_HELP = 83 +WM_USERCHANGED = 84 +WM_NOTIFYFORMAT = 85 +NFR_ANSI = 1 +NFR_UNICODE = 2 +NF_QUERY = 3 +NF_REQUERY = 4 +WM_CONTEXTMENU = 123 +WM_STYLECHANGING = 124 +WM_STYLECHANGED = 125 +WM_DISPLAYCHANGE = 126 +WM_GETICON = 127 +WM_SETICON = 128 +WM_NCCREATE = 129 +WM_NCDESTROY = 130 +WM_NCCALCSIZE = 131 +WM_NCHITTEST = 132 +WM_NCPAINT = 133 +WM_NCACTIVATE = 134 +WM_GETDLGCODE = 135 +WM_SYNCPAINT = 136 +WM_NCMOUSEMOVE = 160 +WM_NCLBUTTONDOWN = 161 +WM_NCLBUTTONUP = 162 +WM_NCLBUTTONDBLCLK = 163 +WM_NCRBUTTONDOWN = 164 +WM_NCRBUTTONUP = 165 +WM_NCRBUTTONDBLCLK = 166 +WM_NCMBUTTONDOWN = 167 +WM_NCMBUTTONUP = 168 +WM_NCMBUTTONDBLCLK = 169 +WM_KEYFIRST = 256 +WM_KEYDOWN = 256 +WM_KEYUP = 257 +WM_CHAR = 258 +WM_DEADCHAR = 259 +WM_SYSKEYDOWN = 260 +WM_SYSKEYUP = 261 +WM_SYSCHAR = 262 +WM_SYSDEADCHAR = 263 +WM_KEYLAST = 264 +WM_IME_STARTCOMPOSITION = 269 +WM_IME_ENDCOMPOSITION = 270 +WM_IME_COMPOSITION = 271 +WM_IME_KEYLAST = 271 +WM_INITDIALOG = 272 +WM_COMMAND = 273 +WM_SYSCOMMAND = 274 +WM_TIMER = 275 +WM_HSCROLL = 276 +WM_VSCROLL = 277 +WM_INITMENU = 278 +WM_INITMENUPOPUP = 279 +WM_MENUSELECT = 287 +WM_MENUCHAR = 288 +WM_ENTERIDLE = 289 +WM_MENURBUTTONUP = 290 +WM_MENUDRAG = 291 +WM_MENUGETOBJECT = 292 +WM_UNINITMENUPOPUP = 293 +WM_MENUCOMMAND = 294 +WM_CTLCOLORMSGBOX = 306 +WM_CTLCOLOREDIT = 307 +WM_CTLCOLORLISTBOX = 308 +WM_CTLCOLORBTN = 309 +WM_CTLCOLORDLG = 310 +WM_CTLCOLORSCROLLBAR = 311 +WM_CTLCOLORSTATIC = 312 +WM_MOUSEFIRST = 512 +WM_MOUSEMOVE = 512 +WM_LBUTTONDOWN = 513 +WM_LBUTTONUP = 514 +WM_LBUTTONDBLCLK = 515 +WM_RBUTTONDOWN = 516 +WM_RBUTTONUP = 517 +WM_RBUTTONDBLCLK = 518 +WM_MBUTTONDOWN = 519 +WM_MBUTTONUP = 520 +WM_MBUTTONDBLCLK = 521 +WM_MOUSEWHEEL = 522 +WM_MOUSELAST = 522 +WHEEL_DELTA = 120 # Value for rolling one detent +WHEEL_PAGESCROLL = -1 # Scroll one page +WM_PARENTNOTIFY = 528 +MENULOOP_WINDOW = 0 +MENULOOP_POPUP = 1 +WM_ENTERMENULOOP = 529 +WM_EXITMENULOOP = 530 +WM_NEXTMENU = 531 +WM_SIZING = 532 +WM_CAPTURECHANGED = 533 +WM_MOVING = 534 +WM_POWERBROADCAST = 536 +PBT_APMQUERYSUSPEND = 0 +PBT_APMQUERYSTANDBY = 1 +PBT_APMQUERYSUSPENDFAILED = 2 +PBT_APMQUERYSTANDBYFAILED = 3 +PBT_APMSUSPEND = 4 +PBT_APMSTANDBY = 5 +PBT_APMRESUMECRITICAL = 6 +PBT_APMRESUMESUSPEND = 7 +PBT_APMRESUMESTANDBY = 8 +PBTF_APMRESUMEFROMFAILURE = 1 +PBT_APMBATTERYLOW = 9 +PBT_APMPOWERSTATUSCHANGE = 10 +PBT_APMOEMEVENT = 11 +PBT_APMRESUMEAUTOMATIC = 18 +WM_DEVICECHANGE = 537 +WM_MDICREATE = 544 +WM_MDIDESTROY = 545 +WM_MDIACTIVATE = 546 +WM_MDIRESTORE = 547 +WM_MDINEXT = 548 +WM_MDIMAXIMIZE = 549 +WM_MDITILE = 550 +WM_MDICASCADE = 551 +WM_MDIICONARRANGE = 552 +WM_MDIGETACTIVE = 553 +WM_MDISETMENU = 560 +WM_ENTERSIZEMOVE = 561 +WM_EXITSIZEMOVE = 562 +WM_DROPFILES = 563 +WM_MDIREFRESHMENU = 564 +WM_IME_SETCONTEXT = 641 +WM_IME_NOTIFY = 642 +WM_IME_CONTROL = 643 +WM_IME_COMPOSITIONFULL = 644 +WM_IME_SELECT = 645 +WM_IME_CHAR = 646 +WM_IME_REQUEST = 648 +WM_IME_KEYDOWN = 656 +WM_IME_KEYUP = 657 +WM_MOUSEHOVER = 673 +WM_MOUSELEAVE = 675 +WM_CUT = 768 +WM_COPY = 769 +WM_PASTE = 770 +WM_CLEAR = 771 +WM_UNDO = 772 +WM_RENDERFORMAT = 773 +WM_RENDERALLFORMATS = 774 +WM_DESTROYCLIPBOARD = 775 +WM_DRAWCLIPBOARD = 776 +WM_PAINTCLIPBOARD = 777 +WM_VSCROLLCLIPBOARD = 778 +WM_SIZECLIPBOARD = 779 +WM_ASKCBFORMATNAME = 780 +WM_CHANGECBCHAIN = 781 +WM_HSCROLLCLIPBOARD = 782 +WM_QUERYNEWPALETTE = 783 +WM_PALETTEISCHANGING = 784 +WM_PALETTECHANGED = 785 +WM_HOTKEY = 786 +WM_PRINT = 791 +WM_PRINTCLIENT = 792 +WM_HANDHELDFIRST = 856 +WM_HANDHELDLAST = 863 +WM_AFXFIRST = 864 +WM_AFXLAST = 895 +WM_PENWINFIRST = 896 +WM_PENWINLAST = 911 +WM_APP = 32768 +WMSZ_LEFT = 1 +WMSZ_RIGHT = 2 +WMSZ_TOP = 3 +WMSZ_TOPLEFT = 4 +WMSZ_TOPRIGHT = 5 +WMSZ_BOTTOM = 6 +WMSZ_BOTTOMLEFT = 7 +WMSZ_BOTTOMRIGHT = 8 +#ST_BEGINSWP = 0 +#ST_ENDSWP = 1 +HTERROR = (-2) +HTTRANSPARENT = (-1) +HTNOWHERE = 0 +HTCLIENT = 1 +HTCAPTION = 2 +HTSYSMENU = 3 +HTGROWBOX = 4 +HTSIZE = HTGROWBOX +HTMENU = 5 +HTHSCROLL = 6 +HTVSCROLL = 7 +HTMINBUTTON = 8 +HTMAXBUTTON = 9 +HTLEFT = 10 +HTRIGHT = 11 +HTTOP = 12 +HTTOPLEFT = 13 +HTTOPRIGHT = 14 +HTBOTTOM = 15 +HTBOTTOMLEFT = 16 +HTBOTTOMRIGHT = 17 +HTBORDER = 18 +HTREDUCE = HTMINBUTTON +HTZOOM = HTMAXBUTTON +HTSIZEFIRST = HTLEFT +HTSIZELAST = HTBOTTOMRIGHT +HTOBJECT = 19 +HTCLOSE = 20 +HTHELP = 21 +SMTO_NORMAL = 0 +SMTO_BLOCK = 1 +SMTO_ABORTIFHUNG = 2 +SMTO_NOTIMEOUTIFNOTHUNG = 8 +MA_ACTIVATE = 1 +MA_ACTIVATEANDEAT = 2 +MA_NOACTIVATE = 3 +MA_NOACTIVATEANDEAT = 4 +ICON_SMALL = 0 +ICON_BIG = 1 +SIZE_RESTORED = 0 +SIZE_MINIMIZED = 1 +SIZE_MAXIMIZED = 2 +SIZE_MAXSHOW = 3 +SIZE_MAXHIDE = 4 +SIZENORMAL = SIZE_RESTORED +SIZEICONIC = SIZE_MINIMIZED +SIZEFULLSCREEN = SIZE_MAXIMIZED +SIZEZOOMSHOW = SIZE_MAXSHOW +SIZEZOOMHIDE = SIZE_MAXHIDE +WVR_ALIGNTOP = 16 +WVR_ALIGNLEFT = 32 +WVR_ALIGNBOTTOM = 64 +WVR_ALIGNRIGHT = 128 +WVR_HREDRAW = 256 +WVR_VREDRAW = 512 +WVR_REDRAW = (WVR_HREDRAW | WVR_VREDRAW) +WVR_VALIDRECTS = 1024 +MK_LBUTTON = 1 +MK_RBUTTON = 2 +MK_SHIFT = 4 +MK_CONTROL = 8 +MK_MBUTTON = 16 +TME_HOVER = 1 +TME_LEAVE = 2 +TME_QUERY = 1073741824 +TME_CANCEL = -2147483648 +HOVER_DEFAULT = -1 +WS_OVERLAPPED = 0 +WS_POPUP = -2147483648 +WS_CHILD = 1073741824 +WS_MINIMIZE = 536870912 +WS_VISIBLE = 268435456 +WS_DISABLED = 134217728 +WS_CLIPSIBLINGS = 67108864 +WS_CLIPCHILDREN = 33554432 +WS_MAXIMIZE = 16777216 +WS_CAPTION = 12582912 +WS_BORDER = 8388608 +WS_DLGFRAME = 4194304 +WS_VSCROLL = 2097152 +WS_HSCROLL = 1048576 +WS_SYSMENU = 524288 +WS_THICKFRAME = 262144 +WS_GROUP = 131072 +WS_TABSTOP = 65536 +WS_MINIMIZEBOX = 131072 +WS_MAXIMIZEBOX = 65536 +WS_TILED = WS_OVERLAPPED +WS_ICONIC = WS_MINIMIZE +WS_SIZEBOX = WS_THICKFRAME +WS_OVERLAPPEDWINDOW = (WS_OVERLAPPED | \ + WS_CAPTION | \ + WS_SYSMENU | \ + WS_THICKFRAME | \ + WS_MINIMIZEBOX | \ + WS_MAXIMIZEBOX) +WS_POPUPWINDOW = (WS_POPUP | \ + WS_BORDER | \ + WS_SYSMENU) +WS_CHILDWINDOW = (WS_CHILD) +WS_TILEDWINDOW = WS_OVERLAPPEDWINDOW +WS_EX_DLGMODALFRAME = 1 +WS_EX_NOPARENTNOTIFY = 4 +WS_EX_TOPMOST = 8 +WS_EX_ACCEPTFILES = 16 +WS_EX_TRANSPARENT = 32 +WS_EX_MDICHILD = 64 +WS_EX_TOOLWINDOW = 128 +WS_EX_WINDOWEDGE = 256 +WS_EX_CLIENTEDGE = 512 +WS_EX_CONTEXTHELP = 1024 +WS_EX_RIGHT = 4096 +WS_EX_LEFT = 0 +WS_EX_RTLREADING = 8192 +WS_EX_LTRREADING = 0 +WS_EX_LEFTSCROLLBAR = 16384 +WS_EX_RIGHTSCROLLBAR = 0 +WS_EX_CONTROLPARENT = 65536 +WS_EX_STATICEDGE = 131072 +WS_EX_APPWINDOW = 262144 +WS_EX_OVERLAPPEDWINDOW = (WS_EX_WINDOWEDGE | WS_EX_CLIENTEDGE) +WS_EX_PALETTEWINDOW = (WS_EX_WINDOWEDGE | WS_EX_TOOLWINDOW | WS_EX_TOPMOST) +WS_EX_LAYERED = 0x00080000 +WS_EX_NOINHERITLAYOUT = 0x00100000 +WS_EX_LAYOUTRTL = 0x00400000 +WS_EX_COMPOSITED = 0x02000000 +WS_EX_NOACTIVATE = 0x08000000 + +CS_VREDRAW = 1 +CS_HREDRAW = 2 +#CS_KEYCVTWINDOW = 0x0004 +CS_DBLCLKS = 8 +CS_OWNDC = 32 +CS_CLASSDC = 64 +CS_PARENTDC = 128 +#CS_NOKEYCVT = 0x0100 +CS_NOCLOSE = 512 +CS_SAVEBITS = 2048 +CS_BYTEALIGNCLIENT = 4096 +CS_BYTEALIGNWINDOW = 8192 +CS_GLOBALCLASS = 16384 +CS_IME = 65536 +PRF_CHECKVISIBLE = 1 +PRF_NONCLIENT = 2 +PRF_CLIENT = 4 +PRF_ERASEBKGND = 8 +PRF_CHILDREN = 16 +PRF_OWNED = 32 +BDR_RAISEDOUTER = 1 +BDR_SUNKENOUTER = 2 +BDR_RAISEDINNER = 4 +BDR_SUNKENINNER = 8 +BDR_OUTER = 3 +BDR_INNER = 12 +#BDR_RAISED = 0x0005 +#BDR_SUNKEN = 0x000a +EDGE_RAISED = (BDR_RAISEDOUTER | BDR_RAISEDINNER) +EDGE_SUNKEN = (BDR_SUNKENOUTER | BDR_SUNKENINNER) +EDGE_ETCHED = (BDR_SUNKENOUTER | BDR_RAISEDINNER) +EDGE_BUMP = (BDR_RAISEDOUTER | BDR_SUNKENINNER) + +# winuser.h line 2879 +ISMEX_NOSEND = 0 +ISMEX_SEND = 1 +ISMEX_NOTIFY = 2 +ISMEX_CALLBACK = 4 +ISMEX_REPLIED = 8 +CW_USEDEFAULT = -2147483648 +FLASHW_STOP = 0 +FLASHW_CAPTION = 1 +FLASHW_TRAY = 2 +FLASHW_ALL = (FLASHW_CAPTION | FLASHW_TRAY) +FLASHW_TIMER = 4 +FLASHW_TIMERNOFG = 12 + +# winuser.h line 7963 +DS_ABSALIGN = 1 +DS_SYSMODAL = 2 +DS_LOCALEDIT = 32 +DS_SETFONT = 64 +DS_MODALFRAME = 128 +DS_NOIDLEMSG = 256 +DS_SETFOREGROUND = 512 +DS_3DLOOK = 4 +DS_FIXEDSYS = 8 +DS_NOFAILCREATE = 16 +DS_CONTROL = 1024 +DS_CENTER = 2048 +DS_CENTERMOUSE = 4096 +DS_CONTEXTHELP = 8192 +DM_GETDEFID = (WM_USER+0) +DM_SETDEFID = (WM_USER+1) +DM_REPOSITION = (WM_USER+2) +#PSM_PAGEINFO = (WM_USER+100) +#PSM_SHEETINFO = (WM_USER+101) +#PSI_SETACTIVE = 0x0001 +#PSI_KILLACTIVE = 0x0002 +#PSI_APPLY = 0x0003 +#PSI_RESET = 0x0004 +#PSI_HASHELP = 0x0005 +#PSI_HELP = 0x0006 +#PSI_CHANGED = 0x0001 +#PSI_GUISTART = 0x0002 +#PSI_REBOOT = 0x0003 +#PSI_GETSIBLINGS = 0x0004 +DC_HASDEFID = 21323 +DLGC_WANTARROWS = 1 +DLGC_WANTTAB = 2 +DLGC_WANTALLKEYS = 4 +DLGC_WANTMESSAGE = 4 +DLGC_HASSETSEL = 8 +DLGC_DEFPUSHBUTTON = 16 +DLGC_UNDEFPUSHBUTTON = 32 +DLGC_RADIOBUTTON = 64 +DLGC_WANTCHARS = 128 +DLGC_STATIC = 256 +DLGC_BUTTON = 8192 +LB_CTLCODE = 0 +LB_OKAY = 0 +LB_ERR = (-1) +LB_ERRSPACE = (-2) +LBN_ERRSPACE = (-2) +LBN_SELCHANGE = 1 +LBN_DBLCLK = 2 +LBN_SELCANCEL = 3 +LBN_SETFOCUS = 4 +LBN_KILLFOCUS = 5 +LB_ADDSTRING = 384 +LB_INSERTSTRING = 385 +LB_DELETESTRING = 386 +LB_SELITEMRANGEEX = 387 +LB_RESETCONTENT = 388 +LB_SETSEL = 389 +LB_SETCURSEL = 390 +LB_GETSEL = 391 +LB_GETCURSEL = 392 +LB_GETTEXT = 393 +LB_GETTEXTLEN = 394 +LB_GETCOUNT = 395 +LB_SELECTSTRING = 396 +LB_DIR = 397 +LB_GETTOPINDEX = 398 +LB_FINDSTRING = 399 +LB_GETSELCOUNT = 400 +LB_GETSELITEMS = 401 +LB_SETTABSTOPS = 402 +LB_GETHORIZONTALEXTENT = 403 +LB_SETHORIZONTALEXTENT = 404 +LB_SETCOLUMNWIDTH = 405 +LB_ADDFILE = 406 +LB_SETTOPINDEX = 407 +LB_GETITEMRECT = 408 +LB_GETITEMDATA = 409 +LB_SETITEMDATA = 410 +LB_SELITEMRANGE = 411 +LB_SETANCHORINDEX = 412 +LB_GETANCHORINDEX = 413 +LB_SETCARETINDEX = 414 +LB_GETCARETINDEX = 415 +LB_SETITEMHEIGHT = 416 +LB_GETITEMHEIGHT = 417 +LB_FINDSTRINGEXACT = 418 +LB_SETLOCALE = 421 +LB_GETLOCALE = 422 +LB_SETCOUNT = 423 +LB_INITSTORAGE = 424 +LB_ITEMFROMPOINT = 425 +LB_MSGMAX = 432 +LBS_NOTIFY = 1 +LBS_SORT = 2 +LBS_NOREDRAW = 4 +LBS_MULTIPLESEL = 8 +LBS_OWNERDRAWFIXED = 16 +LBS_OWNERDRAWVARIABLE = 32 +LBS_HASSTRINGS = 64 +LBS_USETABSTOPS = 128 +LBS_NOINTEGRALHEIGHT = 256 +LBS_MULTICOLUMN = 512 +LBS_WANTKEYBOARDINPUT = 1024 +LBS_EXTENDEDSEL = 2048 +LBS_DISABLENOSCROLL = 4096 +LBS_NODATA = 8192 +LBS_NOSEL = 16384 +LBS_STANDARD = (LBS_NOTIFY | LBS_SORT | WS_VSCROLL | WS_BORDER) +CB_OKAY = 0 +CB_ERR = (-1) +CB_ERRSPACE = (-2) +CBN_ERRSPACE = (-1) +CBN_SELCHANGE = 1 +CBN_DBLCLK = 2 +CBN_SETFOCUS = 3 +CBN_KILLFOCUS = 4 +CBN_EDITCHANGE = 5 +CBN_EDITUPDATE = 6 +CBN_DROPDOWN = 7 +CBN_CLOSEUP = 8 +CBN_SELENDOK = 9 +CBN_SELENDCANCEL = 10 +CBS_SIMPLE = 1 +CBS_DROPDOWN = 2 +CBS_DROPDOWNLIST = 3 +CBS_OWNERDRAWFIXED = 16 +CBS_OWNERDRAWVARIABLE = 32 +CBS_AUTOHSCROLL = 64 +CBS_OEMCONVERT = 128 +CBS_SORT = 256 +CBS_HASSTRINGS = 512 +CBS_NOINTEGRALHEIGHT = 1024 +CBS_DISABLENOSCROLL = 2048 +CBS_UPPERCASE = 8192 +CBS_LOWERCASE = 16384 +CB_GETEDITSEL = 320 +CB_LIMITTEXT = 321 +CB_SETEDITSEL = 322 +CB_ADDSTRING = 323 +CB_DELETESTRING = 324 +CB_DIR = 325 +CB_GETCOUNT = 326 +CB_GETCURSEL = 327 +CB_GETLBTEXT = 328 +CB_GETLBTEXTLEN = 329 +CB_INSERTSTRING = 330 +CB_RESETCONTENT = 331 +CB_FINDSTRING = 332 +CB_SELECTSTRING = 333 +CB_SETCURSEL = 334 +CB_SHOWDROPDOWN = 335 +CB_GETITEMDATA = 336 +CB_SETITEMDATA = 337 +CB_GETDROPPEDCONTROLRECT = 338 +CB_SETITEMHEIGHT = 339 +CB_GETITEMHEIGHT = 340 +CB_SETEXTENDEDUI = 341 +CB_GETEXTENDEDUI = 342 +CB_GETDROPPEDSTATE = 343 +CB_FINDSTRINGEXACT = 344 +CB_SETLOCALE = 345 +CB_GETLOCALE = 346 +CB_GETTOPINDEX = 347 +CB_SETTOPINDEX = 348 +CB_GETHORIZONTALEXTENT = 349 +CB_SETHORIZONTALEXTENT = 350 +CB_GETDROPPEDWIDTH = 351 +CB_SETDROPPEDWIDTH = 352 +CB_INITSTORAGE = 353 +CB_MSGMAX = 354 +SBS_HORZ = 0 +SBS_VERT = 1 +SBS_TOPALIGN = 2 +SBS_LEFTALIGN = 2 +SBS_BOTTOMALIGN = 4 +SBS_RIGHTALIGN = 4 +SBS_SIZEBOXTOPLEFTALIGN = 2 +SBS_SIZEBOXBOTTOMRIGHTALIGN = 4 +SBS_SIZEBOX = 8 +SBS_SIZEGRIP = 16 +SBM_SETPOS = 224 +SBM_GETPOS = 225 +SBM_SETRANGE = 226 +SBM_SETRANGEREDRAW = 230 +SBM_GETRANGE = 227 +SBM_ENABLE_ARROWS = 228 +SBM_SETSCROLLINFO = 233 +SBM_GETSCROLLINFO = 234 +SIF_RANGE = 1 +SIF_PAGE = 2 +SIF_POS = 4 +SIF_DISABLENOSCROLL = 8 +SIF_TRACKPOS = 16 +SIF_ALL = (SIF_RANGE | SIF_PAGE | SIF_POS | SIF_TRACKPOS) +MDIS_ALLCHILDSTYLES = 1 +MDITILE_VERTICAL = 0 +MDITILE_HORIZONTAL = 1 +MDITILE_SKIPDISABLED = 2 + +IMC_GETCANDIDATEPOS = 7 +IMC_SETCANDIDATEPOS = 8 +IMC_GETCOMPOSITIONFONT = 9 +IMC_SETCOMPOSITIONFONT = 10 +IMC_GETCOMPOSITIONWINDOW = 11 +IMC_SETCOMPOSITIONWINDOW = 12 +IMC_GETSTATUSWINDOWPOS = 15 +IMC_SETSTATUSWINDOWPOS = 16 +IMC_CLOSESTATUSWINDOW = 33 +IMC_OPENSTATUSWINDOW = 34 +# Generated by h2py from \msvc20\include\winnt.h +# hacked and split by mhammond. +DELETE = (65536) +READ_CONTROL = (131072) +WRITE_DAC = (262144) +WRITE_OWNER = (524288) +SYNCHRONIZE = (1048576) +STANDARD_RIGHTS_REQUIRED = (983040) +STANDARD_RIGHTS_READ = (READ_CONTROL) +STANDARD_RIGHTS_WRITE = (READ_CONTROL) +STANDARD_RIGHTS_EXECUTE = (READ_CONTROL) +STANDARD_RIGHTS_ALL = (2031616) +SPECIFIC_RIGHTS_ALL = (65535) +ACCESS_SYSTEM_SECURITY = (16777216) +MAXIMUM_ALLOWED = (33554432) +GENERIC_READ = (-2147483648) +GENERIC_WRITE = (1073741824) +GENERIC_EXECUTE = (536870912) +GENERIC_ALL = (268435456) + +SERVICE_KERNEL_DRIVER = 1 +SERVICE_FILE_SYSTEM_DRIVER = 2 +SERVICE_ADAPTER = 4 +SERVICE_RECOGNIZER_DRIVER = 8 +SERVICE_DRIVER = (SERVICE_KERNEL_DRIVER | \ + SERVICE_FILE_SYSTEM_DRIVER | \ + SERVICE_RECOGNIZER_DRIVER) +SERVICE_WIN32_OWN_PROCESS = 16 +SERVICE_WIN32_SHARE_PROCESS = 32 +SERVICE_WIN32 = (SERVICE_WIN32_OWN_PROCESS | \ + SERVICE_WIN32_SHARE_PROCESS) +SERVICE_INTERACTIVE_PROCESS = 256 +SERVICE_TYPE_ALL = (SERVICE_WIN32 | \ + SERVICE_ADAPTER | \ + SERVICE_DRIVER | \ + SERVICE_INTERACTIVE_PROCESS) +SERVICE_BOOT_START = 0 +SERVICE_SYSTEM_START = 1 +SERVICE_AUTO_START = 2 +SERVICE_DEMAND_START = 3 +SERVICE_DISABLED = 4 +SERVICE_ERROR_IGNORE = 0 +SERVICE_ERROR_NORMAL = 1 +SERVICE_ERROR_SEVERE = 2 +SERVICE_ERROR_CRITICAL = 3 +TAPE_ERASE_SHORT = 0 +TAPE_ERASE_LONG = 1 +TAPE_LOAD = 0 +TAPE_UNLOAD = 1 +TAPE_TENSION = 2 +TAPE_LOCK = 3 +TAPE_UNLOCK = 4 +TAPE_FORMAT = 5 +TAPE_SETMARKS = 0 +TAPE_FILEMARKS = 1 +TAPE_SHORT_FILEMARKS = 2 +TAPE_LONG_FILEMARKS = 3 +TAPE_ABSOLUTE_POSITION = 0 +TAPE_LOGICAL_POSITION = 1 +TAPE_PSEUDO_LOGICAL_POSITION = 2 +TAPE_REWIND = 0 +TAPE_ABSOLUTE_BLOCK = 1 +TAPE_LOGICAL_BLOCK = 2 +TAPE_PSEUDO_LOGICAL_BLOCK = 3 +TAPE_SPACE_END_OF_DATA = 4 +TAPE_SPACE_RELATIVE_BLOCKS = 5 +TAPE_SPACE_FILEMARKS = 6 +TAPE_SPACE_SEQUENTIAL_FMKS = 7 +TAPE_SPACE_SETMARKS = 8 +TAPE_SPACE_SEQUENTIAL_SMKS = 9 +TAPE_DRIVE_FIXED = 1 +TAPE_DRIVE_SELECT = 2 +TAPE_DRIVE_INITIATOR = 4 +TAPE_DRIVE_ERASE_SHORT = 16 +TAPE_DRIVE_ERASE_LONG = 32 +TAPE_DRIVE_ERASE_BOP_ONLY = 64 +TAPE_DRIVE_ERASE_IMMEDIATE = 128 +TAPE_DRIVE_TAPE_CAPACITY = 256 +TAPE_DRIVE_TAPE_REMAINING = 512 +TAPE_DRIVE_FIXED_BLOCK = 1024 +TAPE_DRIVE_VARIABLE_BLOCK = 2048 +TAPE_DRIVE_WRITE_PROTECT = 4096 +TAPE_DRIVE_EOT_WZ_SIZE = 8192 +TAPE_DRIVE_ECC = 65536 +TAPE_DRIVE_COMPRESSION = 131072 +TAPE_DRIVE_PADDING = 262144 +TAPE_DRIVE_REPORT_SMKS = 524288 +TAPE_DRIVE_GET_ABSOLUTE_BLK = 1048576 +TAPE_DRIVE_GET_LOGICAL_BLK = 2097152 +TAPE_DRIVE_SET_EOT_WZ_SIZE = 4194304 +TAPE_DRIVE_LOAD_UNLOAD = -2147483647 +TAPE_DRIVE_TENSION = -2147483646 +TAPE_DRIVE_LOCK_UNLOCK = -2147483644 +TAPE_DRIVE_REWIND_IMMEDIATE = -2147483640 +TAPE_DRIVE_SET_BLOCK_SIZE = -2147483632 +TAPE_DRIVE_LOAD_UNLD_IMMED = -2147483616 +TAPE_DRIVE_TENSION_IMMED = -2147483584 +TAPE_DRIVE_LOCK_UNLK_IMMED = -2147483520 +TAPE_DRIVE_SET_ECC = -2147483392 +TAPE_DRIVE_SET_COMPRESSION = -2147483136 +TAPE_DRIVE_SET_PADDING = -2147482624 +TAPE_DRIVE_SET_REPORT_SMKS = -2147481600 +TAPE_DRIVE_ABSOLUTE_BLK = -2147479552 +TAPE_DRIVE_ABS_BLK_IMMED = -2147475456 +TAPE_DRIVE_LOGICAL_BLK = -2147467264 +TAPE_DRIVE_LOG_BLK_IMMED = -2147450880 +TAPE_DRIVE_END_OF_DATA = -2147418112 +TAPE_DRIVE_RELATIVE_BLKS = -2147352576 +TAPE_DRIVE_FILEMARKS = -2147221504 +TAPE_DRIVE_SEQUENTIAL_FMKS = -2146959360 +TAPE_DRIVE_SETMARKS = -2146435072 +TAPE_DRIVE_SEQUENTIAL_SMKS = -2145386496 +TAPE_DRIVE_REVERSE_POSITION = -2143289344 +TAPE_DRIVE_SPACE_IMMEDIATE = -2139095040 +TAPE_DRIVE_WRITE_SETMARKS = -2130706432 +TAPE_DRIVE_WRITE_FILEMARKS = -2113929216 +TAPE_DRIVE_WRITE_SHORT_FMKS = -2080374784 +TAPE_DRIVE_WRITE_LONG_FMKS = -2013265920 +TAPE_DRIVE_WRITE_MARK_IMMED = -1879048192 +TAPE_DRIVE_FORMAT = -1610612736 +TAPE_DRIVE_FORMAT_IMMEDIATE = -1073741824 +TAPE_FIXED_PARTITIONS = 0 +TAPE_SELECT_PARTITIONS = 1 +TAPE_INITIATOR_PARTITIONS = 2 +# Generated by h2py from \msvc20\include\winnt.h +# hacked and split by mhammond. + +APPLICATION_ERROR_MASK = 536870912 +ERROR_SEVERITY_SUCCESS = 0 +ERROR_SEVERITY_INFORMATIONAL = 1073741824 +ERROR_SEVERITY_WARNING = -2147483648 +ERROR_SEVERITY_ERROR = -1073741824 +MINCHAR = 128 +MAXCHAR = 127 +MINSHORT = 32768 +MAXSHORT = 32767 +MINLONG = -2147483648 +MAXLONG = 2147483647 +MAXBYTE = 255 +MAXWORD = 65535 +MAXDWORD = -1 +LANG_NEUTRAL = 0 +LANG_BULGARIAN = 2 +LANG_CHINESE = 4 +LANG_CROATIAN = 26 +LANG_CZECH = 5 +LANG_DANISH = 6 +LANG_DUTCH = 19 +LANG_ENGLISH = 9 +LANG_FINNISH = 11 +LANG_FRENCH = 12 +LANG_GERMAN = 7 +LANG_GREEK = 8 +LANG_HUNGARIAN = 14 +LANG_ICELANDIC = 15 +LANG_ITALIAN = 16 +LANG_JAPANESE = 17 +LANG_KOREAN = 18 +LANG_NORWEGIAN = 20 +LANG_POLISH = 21 +LANG_PORTUGUESE = 22 +LANG_ROMANIAN = 24 +LANG_RUSSIAN = 25 +LANG_SLOVAK = 27 +LANG_SLOVENIAN = 36 +LANG_SPANISH = 10 +LANG_SWEDISH = 29 +LANG_TURKISH = 31 +SUBLANG_NEUTRAL = 0 +SUBLANG_DEFAULT = 1 +SUBLANG_SYS_DEFAULT = 2 +SUBLANG_CHINESE_TRADITIONAL = 1 +SUBLANG_CHINESE_SIMPLIFIED = 2 +SUBLANG_CHINESE_HONGKONG = 3 +SUBLANG_CHINESE_SINGAPORE = 4 +SUBLANG_DUTCH = 1 +SUBLANG_DUTCH_BELGIAN = 2 +SUBLANG_ENGLISH_US = 1 +SUBLANG_ENGLISH_UK = 2 +SUBLANG_ENGLISH_AUS = 3 +SUBLANG_ENGLISH_CAN = 4 +SUBLANG_ENGLISH_NZ = 5 +SUBLANG_ENGLISH_EIRE = 6 +SUBLANG_FRENCH = 1 +SUBLANG_FRENCH_BELGIAN = 2 +SUBLANG_FRENCH_CANADIAN = 3 +SUBLANG_FRENCH_SWISS = 4 +SUBLANG_GERMAN = 1 +SUBLANG_GERMAN_SWISS = 2 +SUBLANG_GERMAN_AUSTRIAN = 3 +SUBLANG_ITALIAN = 1 +SUBLANG_ITALIAN_SWISS = 2 +SUBLANG_NORWEGIAN_BOKMAL = 1 +SUBLANG_NORWEGIAN_NYNORSK = 2 +SUBLANG_PORTUGUESE = 2 +SUBLANG_PORTUGUESE_BRAZILIAN = 1 +SUBLANG_SPANISH = 1 +SUBLANG_SPANISH_MEXICAN = 2 +SUBLANG_SPANISH_MODERN = 3 +SORT_DEFAULT = 0 +SORT_JAPANESE_XJIS = 0 +SORT_JAPANESE_UNICODE = 1 +SORT_CHINESE_BIG5 = 0 +SORT_CHINESE_UNICODE = 1 +SORT_KOREAN_KSC = 0 +SORT_KOREAN_UNICODE = 1 +def PRIMARYLANGID(lgid): return ((lgid) & 1023) + +def SUBLANGID(lgid): return ((lgid) >> 10) + +NLS_VALID_LOCALE_MASK = 1048575 +CONTEXT_PORTABLE_32BIT = 1048576 +CONTEXT_ALPHA = 131072 +CONTEXT_CONTROL = (CONTEXT_ALPHA | 1) +CONTEXT_FLOATING_POINT = (CONTEXT_ALPHA | 2) +CONTEXT_INTEGER = (CONTEXT_ALPHA | 4) +CONTEXT_FULL = (CONTEXT_CONTROL | CONTEXT_FLOATING_POINT | CONTEXT_INTEGER) +SIZE_OF_80387_REGISTERS = 80 +CONTEXT_FULL = (CONTEXT_CONTROL | CONTEXT_FLOATING_POINT | CONTEXT_INTEGER) +CONTEXT_CONTROL = 1 +CONTEXT_FLOATING_POINT = 2 +CONTEXT_INTEGER = 4 +CONTEXT_FULL = (CONTEXT_CONTROL | CONTEXT_FLOATING_POINT | CONTEXT_INTEGER) +PROCESS_TERMINATE = (1) +PROCESS_CREATE_THREAD = (2) +PROCESS_VM_OPERATION = (8) +PROCESS_VM_READ = (16) +PROCESS_VM_WRITE = (32) +PROCESS_DUP_HANDLE = (64) +PROCESS_CREATE_PROCESS = (128) +PROCESS_SET_QUOTA = (256) +PROCESS_SET_INFORMATION = (512) +PROCESS_QUERY_INFORMATION = (1024) +PROCESS_SUSPEND_RESUME = (2048) +PROCESS_QUERY_LIMITED_INFORMATION = (4096) +PROCESS_SET_LIMITED_INFORMATION = (8192) +PROCESS_ALL_ACCESS = (STANDARD_RIGHTS_REQUIRED | SYNCHRONIZE | 4095) +THREAD_TERMINATE = (1) +THREAD_SUSPEND_RESUME = (2) +THREAD_GET_CONTEXT = (8) +THREAD_SET_CONTEXT = (16) +THREAD_SET_INFORMATION = (32) +THREAD_QUERY_INFORMATION = (64) +THREAD_SET_THREAD_TOKEN = (128) +THREAD_IMPERSONATE = (256) +THREAD_DIRECT_IMPERSONATION = (512) +THREAD_SET_LIMITED_INFORMATION = (1024) +THREAD_QUERY_LIMITED_INFORMATION = (2048) +THREAD_RESUME = (4096) +TLS_MINIMUM_AVAILABLE = 64 +EVENT_MODIFY_STATE = 2 +MUTANT_QUERY_STATE = 1 +SEMAPHORE_MODIFY_STATE = 2 +TIME_ZONE_ID_UNKNOWN = 0 +TIME_ZONE_ID_STANDARD = 1 +TIME_ZONE_ID_DAYLIGHT = 2 +PROCESSOR_INTEL_386 = 386 +PROCESSOR_INTEL_486 = 486 +PROCESSOR_INTEL_PENTIUM = 586 +PROCESSOR_INTEL_860 = 860 +PROCESSOR_MIPS_R2000 = 2000 +PROCESSOR_MIPS_R3000 = 3000 +PROCESSOR_MIPS_R4000 = 4000 +PROCESSOR_ALPHA_21064 = 21064 +PROCESSOR_PPC_601 = 601 +PROCESSOR_PPC_603 = 603 +PROCESSOR_PPC_604 = 604 +PROCESSOR_PPC_620 = 620 +SECTION_QUERY = 1 +SECTION_MAP_WRITE = 2 +SECTION_MAP_READ = 4 +SECTION_MAP_EXECUTE = 8 +SECTION_EXTEND_SIZE = 16 +PAGE_NOACCESS = 1 +PAGE_READONLY = 2 +PAGE_READWRITE = 4 +PAGE_WRITECOPY = 8 +PAGE_EXECUTE = 16 +PAGE_EXECUTE_READ = 32 +PAGE_EXECUTE_READWRITE = 64 +PAGE_EXECUTE_WRITECOPY = 128 +PAGE_GUARD = 256 +PAGE_NOCACHE = 512 +MEM_COMMIT = 4096 +MEM_RESERVE = 8192 +MEM_DECOMMIT = 16384 +MEM_RELEASE = 32768 +MEM_FREE = 65536 +MEM_PRIVATE = 131072 +MEM_MAPPED = 262144 +MEM_TOP_DOWN = 1048576 + +# Generated by h2py from \msvc20\include\winnt.h +# hacked and split by mhammond. +SEC_FILE = 8388608 +SEC_IMAGE = 16777216 +SEC_RESERVE = 67108864 +SEC_COMMIT = 134217728 +SEC_NOCACHE = 268435456 +MEM_IMAGE = SEC_IMAGE +FILE_SHARE_READ = 1 +FILE_SHARE_WRITE = 2 +FILE_SHARE_DELETE = 4 +FILE_ATTRIBUTE_READONLY = 1 +FILE_ATTRIBUTE_HIDDEN = 2 +FILE_ATTRIBUTE_SYSTEM = 4 +FILE_ATTRIBUTE_DIRECTORY = 16 +FILE_ATTRIBUTE_ARCHIVE = 32 +FILE_ATTRIBUTE_DEVICE = 64 +FILE_ATTRIBUTE_NORMAL = 128 +FILE_ATTRIBUTE_TEMPORARY = 256 +FILE_ATTRIBUTE_SPARSE_FILE = 512 +FILE_ATTRIBUTE_REPARSE_POINT = 1024 +FILE_ATTRIBUTE_COMPRESSED = 2048 +FILE_ATTRIBUTE_OFFLINE = 4096 +FILE_ATTRIBUTE_NOT_CONTENT_INDEXED = 8192 +FILE_ATTRIBUTE_ENCRYPTED = 16384 +FILE_ATTRIBUTE_VIRTUAL = 65536 +# These FILE_ATTRIBUTE_* flags are apparently old definitions from Windows 95 +# and conflict with current values above - but they live on for b/w compat... +FILE_ATTRIBUTE_ATOMIC_WRITE = 512 +FILE_ATTRIBUTE_XACTION_WRITE = 1024 + +FILE_NOTIFY_CHANGE_FILE_NAME = 1 +FILE_NOTIFY_CHANGE_DIR_NAME = 2 +FILE_NOTIFY_CHANGE_ATTRIBUTES = 4 +FILE_NOTIFY_CHANGE_SIZE = 8 +FILE_NOTIFY_CHANGE_LAST_WRITE = 16 +FILE_NOTIFY_CHANGE_SECURITY = 256 +FILE_CASE_SENSITIVE_SEARCH = 1 +FILE_CASE_PRESERVED_NAMES = 2 +FILE_UNICODE_ON_DISK = 4 +FILE_PERSISTENT_ACLS = 8 +FILE_FILE_COMPRESSION = 16 +FILE_NAMED_STREAMS = 262144 +FILE_PERSISTENT_ACLS = 0x00000008 +FILE_READ_ONLY_VOLUME = 0x00080000 +FILE_SEQUENTIAL_WRITE_ONCE = 0x00100000 +FILE_SUPPORTS_ENCRYPTION = 0x00020000 +FILE_SUPPORTS_EXTENDED_ATTRIBUTES = 0x00800000 +FILE_SUPPORTS_HARD_LINKS = 0x00400000 +FILE_SUPPORTS_OBJECT_IDS = 0x00010000 +FILE_SUPPORTS_OPEN_BY_FILE_ID = 0x01000000 +FILE_SUPPORTS_REPARSE_POINTS = 0x00000080 +FILE_SUPPORTS_SPARSE_FILES = 0x00000040 +FILE_SUPPORTS_TRANSACTIONS = 0x00200000 +FILE_SUPPORTS_USN_JOURNAL = 0x02000000 +FILE_UNICODE_ON_DISK = 0x00000004 +FILE_VOLUME_IS_COMPRESSED = 0x00008000 +FILE_VOLUME_QUOTAS = 0x00000020 +FILE_VOLUME_IS_COMPRESSED = 32768 +IO_COMPLETION_MODIFY_STATE = 2 +DUPLICATE_CLOSE_SOURCE = 1 +DUPLICATE_SAME_ACCESS = 2 +SID_MAX_SUB_AUTHORITIES = (15) +SECURITY_NULL_RID = (0) +SECURITY_WORLD_RID = (0) +SECURITY_LOCAL_RID = (0X00000000) +SECURITY_CREATOR_OWNER_RID = (0) +SECURITY_CREATOR_GROUP_RID = (1) +SECURITY_DIALUP_RID = (1) +SECURITY_NETWORK_RID = (2) +SECURITY_BATCH_RID = (3) +SECURITY_INTERACTIVE_RID = (4) +SECURITY_SERVICE_RID = (6) +SECURITY_ANONYMOUS_LOGON_RID = (7) +SECURITY_LOGON_IDS_RID = (5) +SECURITY_LOGON_IDS_RID_COUNT = (3) +SECURITY_LOCAL_SYSTEM_RID = (18) +SECURITY_NT_NON_UNIQUE = (21) +SECURITY_BUILTIN_DOMAIN_RID = (32) +DOMAIN_USER_RID_ADMIN = (500) +DOMAIN_USER_RID_GUEST = (501) +DOMAIN_GROUP_RID_ADMINS = (512) +DOMAIN_GROUP_RID_USERS = (513) +DOMAIN_GROUP_RID_GUESTS = (514) +DOMAIN_ALIAS_RID_ADMINS = (544) +DOMAIN_ALIAS_RID_USERS = (545) +DOMAIN_ALIAS_RID_GUESTS = (546) +DOMAIN_ALIAS_RID_POWER_USERS = (547) +DOMAIN_ALIAS_RID_ACCOUNT_OPS = (548) +DOMAIN_ALIAS_RID_SYSTEM_OPS = (549) +DOMAIN_ALIAS_RID_PRINT_OPS = (550) +DOMAIN_ALIAS_RID_BACKUP_OPS = (551) +DOMAIN_ALIAS_RID_REPLICATOR = (552) +SE_GROUP_MANDATORY = (1) +SE_GROUP_ENABLED_BY_DEFAULT = (2) +SE_GROUP_ENABLED = (4) +SE_GROUP_OWNER = (8) +SE_GROUP_LOGON_ID = (-1073741824) +ACL_REVISION = (2) +ACL_REVISION1 = (1) +ACL_REVISION2 = (2) +ACCESS_ALLOWED_ACE_TYPE = (0) +ACCESS_DENIED_ACE_TYPE = (1) +SYSTEM_AUDIT_ACE_TYPE = (2) +SYSTEM_ALARM_ACE_TYPE = (3) +OBJECT_INHERIT_ACE = (1) +CONTAINER_INHERIT_ACE = (2) +NO_PROPAGATE_INHERIT_ACE = (4) +INHERIT_ONLY_ACE = (8) +VALID_INHERIT_FLAGS = (15) +SUCCESSFUL_ACCESS_ACE_FLAG = (64) +FAILED_ACCESS_ACE_FLAG = (128) +SECURITY_DESCRIPTOR_REVISION = (1) +SECURITY_DESCRIPTOR_REVISION1 = (1) +SECURITY_DESCRIPTOR_MIN_LENGTH = (20) +SE_OWNER_DEFAULTED = (1) +SE_GROUP_DEFAULTED = (2) +SE_DACL_PRESENT = (4) +SE_DACL_DEFAULTED = (8) +SE_SACL_PRESENT = (16) +SE_SACL_DEFAULTED = (32) +SE_SELF_RELATIVE = (32768) +SE_PRIVILEGE_ENABLED_BY_DEFAULT = (1) +SE_PRIVILEGE_ENABLED = (2) +SE_PRIVILEGE_USED_FOR_ACCESS = (-2147483648) +PRIVILEGE_SET_ALL_NECESSARY = (1) +SE_CREATE_TOKEN_NAME = "SeCreateTokenPrivilege" +SE_ASSIGNPRIMARYTOKEN_NAME = "SeAssignPrimaryTokenPrivilege" +SE_LOCK_MEMORY_NAME = "SeLockMemoryPrivilege" +SE_INCREASE_QUOTA_NAME = "SeIncreaseQuotaPrivilege" +SE_UNSOLICITED_INPUT_NAME = "SeUnsolicitedInputPrivilege" +SE_MACHINE_ACCOUNT_NAME = "SeMachineAccountPrivilege" +SE_TCB_NAME = "SeTcbPrivilege" +SE_SECURITY_NAME = "SeSecurityPrivilege" +SE_TAKE_OWNERSHIP_NAME = "SeTakeOwnershipPrivilege" +SE_LOAD_DRIVER_NAME = "SeLoadDriverPrivilege" +SE_SYSTEM_PROFILE_NAME = "SeSystemProfilePrivilege" +SE_SYSTEMTIME_NAME = "SeSystemtimePrivilege" +SE_PROF_SINGLE_PROCESS_NAME = "SeProfileSingleProcessPrivilege" +SE_INC_BASE_PRIORITY_NAME = "SeIncreaseBasePriorityPrivilege" +SE_CREATE_PAGEFILE_NAME = "SeCreatePagefilePrivilege" +SE_CREATE_PERMANENT_NAME = "SeCreatePermanentPrivilege" +SE_BACKUP_NAME = "SeBackupPrivilege" +SE_RESTORE_NAME = "SeRestorePrivilege" +SE_SHUTDOWN_NAME = "SeShutdownPrivilege" +SE_DEBUG_NAME = "SeDebugPrivilege" +SE_AUDIT_NAME = "SeAuditPrivilege" +SE_SYSTEM_ENVIRONMENT_NAME = "SeSystemEnvironmentPrivilege" +SE_CHANGE_NOTIFY_NAME = "SeChangeNotifyPrivilege" +SE_REMOTE_SHUTDOWN_NAME = "SeRemoteShutdownPrivilege" + +TOKEN_ASSIGN_PRIMARY = (1) +TOKEN_DUPLICATE = (2) +TOKEN_IMPERSONATE = (4) +TOKEN_QUERY = (8) +TOKEN_QUERY_SOURCE = (16) +TOKEN_ADJUST_PRIVILEGES = (32) +TOKEN_ADJUST_GROUPS = (64) +TOKEN_ADJUST_DEFAULT = (128) +TOKEN_ALL_ACCESS = (STANDARD_RIGHTS_REQUIRED |\ + TOKEN_ASSIGN_PRIMARY |\ + TOKEN_DUPLICATE |\ + TOKEN_IMPERSONATE |\ + TOKEN_QUERY |\ + TOKEN_QUERY_SOURCE |\ + TOKEN_ADJUST_PRIVILEGES |\ + TOKEN_ADJUST_GROUPS |\ + TOKEN_ADJUST_DEFAULT) +TOKEN_READ = (STANDARD_RIGHTS_READ |\ + TOKEN_QUERY) +TOKEN_WRITE = (STANDARD_RIGHTS_WRITE |\ + TOKEN_ADJUST_PRIVILEGES |\ + TOKEN_ADJUST_GROUPS |\ + TOKEN_ADJUST_DEFAULT) +TOKEN_EXECUTE = (STANDARD_RIGHTS_EXECUTE) +TOKEN_SOURCE_LENGTH = 8 + +KEY_QUERY_VALUE = (1) +KEY_SET_VALUE = (2) +KEY_CREATE_SUB_KEY = (4) +KEY_ENUMERATE_SUB_KEYS = (8) +KEY_NOTIFY = (16) +KEY_CREATE_LINK = (32) +KEY_WOW64_32KEY = 512 +KEY_WOW64_64KEY = 256 +KEY_WOW64_RES = 768 +KEY_READ = ((STANDARD_RIGHTS_READ |\ + KEY_QUERY_VALUE |\ + KEY_ENUMERATE_SUB_KEYS |\ + KEY_NOTIFY) \ + & \ + (~SYNCHRONIZE)) +KEY_WRITE = ((STANDARD_RIGHTS_WRITE |\ + KEY_SET_VALUE |\ + KEY_CREATE_SUB_KEY) \ + & \ + (~SYNCHRONIZE)) +KEY_EXECUTE = ((KEY_READ) \ + & \ + (~SYNCHRONIZE)) +KEY_ALL_ACCESS = ((STANDARD_RIGHTS_ALL |\ + KEY_QUERY_VALUE |\ + KEY_SET_VALUE |\ + KEY_CREATE_SUB_KEY |\ + KEY_ENUMERATE_SUB_KEYS |\ + KEY_NOTIFY |\ + KEY_CREATE_LINK) \ + & \ + (~SYNCHRONIZE)) +REG_NOTIFY_CHANGE_ATTRIBUTES = (2) +REG_NOTIFY_CHANGE_SECURITY = (8) +REG_RESOURCE_REQUIREMENTS_LIST = ( 10 ) +REG_NONE = ( 0 ) # No value type +REG_SZ = ( 1 ) # Unicode nul terminated string +REG_EXPAND_SZ = ( 2 ) # Unicode nul terminated string + # (with environment variable references) +REG_BINARY = ( 3 ) # Free form binary +REG_DWORD = ( 4 ) # 32-bit number +REG_DWORD_LITTLE_ENDIAN = ( 4 ) # 32-bit number (same as REG_DWORD) +REG_DWORD_BIG_ENDIAN = ( 5 ) # 32-bit number +REG_LINK = ( 6 ) # Symbolic Link (unicode) +REG_MULTI_SZ = ( 7 ) # Multiple Unicode strings +REG_RESOURCE_LIST = ( 8 ) # Resource list in the resource map +REG_FULL_RESOURCE_DESCRIPTOR =( 9 ) # Resource list in the hardware description +REG_RESOURCE_REQUIREMENTS_LIST = ( 10 ) +REG_QWORD = ( 11 ) # 64-bit number +REG_QWORD_LITTLE_ENDIAN = ( 11 ) # 64-bit number (same as REG_QWORD) + + +# Generated by h2py from \msvc20\include\winnt.h +# hacked and split by mhammond. +# Included from string.h +_NLSCMPERROR = 2147483647 +NULL = 0 +HEAP_NO_SERIALIZE = 1 +HEAP_GROWABLE = 2 +HEAP_GENERATE_EXCEPTIONS = 4 +HEAP_ZERO_MEMORY = 8 +HEAP_REALLOC_IN_PLACE_ONLY = 16 +HEAP_TAIL_CHECKING_ENABLED = 32 +HEAP_FREE_CHECKING_ENABLED = 64 +HEAP_DISABLE_COALESCE_ON_FREE = 128 +IS_TEXT_UNICODE_ASCII16 = 1 +IS_TEXT_UNICODE_REVERSE_ASCII16 = 16 +IS_TEXT_UNICODE_STATISTICS = 2 +IS_TEXT_UNICODE_REVERSE_STATISTICS = 32 +IS_TEXT_UNICODE_CONTROLS = 4 +IS_TEXT_UNICODE_REVERSE_CONTROLS = 64 +IS_TEXT_UNICODE_SIGNATURE = 8 +IS_TEXT_UNICODE_REVERSE_SIGNATURE = 128 +IS_TEXT_UNICODE_ILLEGAL_CHARS = 256 +IS_TEXT_UNICODE_ODD_LENGTH = 512 +IS_TEXT_UNICODE_DBCS_LEADBYTE = 1024 +IS_TEXT_UNICODE_NULL_BYTES = 4096 +IS_TEXT_UNICODE_UNICODE_MASK = 15 +IS_TEXT_UNICODE_REVERSE_MASK = 240 +IS_TEXT_UNICODE_NOT_UNICODE_MASK = 3840 +IS_TEXT_UNICODE_NOT_ASCII_MASK = 61440 +COMPRESSION_FORMAT_NONE = (0) +COMPRESSION_FORMAT_DEFAULT = (1) +COMPRESSION_FORMAT_LZNT1 = (2) +COMPRESSION_ENGINE_STANDARD = (0) +COMPRESSION_ENGINE_MAXIMUM = (256) +MESSAGE_RESOURCE_UNICODE = 1 +RTL_CRITSECT_TYPE = 0 +RTL_RESOURCE_TYPE = 1 +DLL_PROCESS_ATTACH = 1 +DLL_THREAD_ATTACH = 2 +DLL_THREAD_DETACH = 3 +DLL_PROCESS_DETACH = 0 +EVENTLOG_SEQUENTIAL_READ = 0X0001 +EVENTLOG_SEEK_READ = 0X0002 +EVENTLOG_FORWARDS_READ = 0X0004 +EVENTLOG_BACKWARDS_READ = 0X0008 +EVENTLOG_SUCCESS = 0X0000 +EVENTLOG_ERROR_TYPE = 1 +EVENTLOG_WARNING_TYPE = 2 +EVENTLOG_INFORMATION_TYPE = 4 +EVENTLOG_AUDIT_SUCCESS = 8 +EVENTLOG_AUDIT_FAILURE = 16 +EVENTLOG_START_PAIRED_EVENT = 1 +EVENTLOG_END_PAIRED_EVENT = 2 +EVENTLOG_END_ALL_PAIRED_EVENTS = 4 +EVENTLOG_PAIRED_EVENT_ACTIVE = 8 +EVENTLOG_PAIRED_EVENT_INACTIVE = 16 +# Generated by h2py from \msvc20\include\winnt.h +# hacked and split by mhammond. +OWNER_SECURITY_INFORMATION = (0X00000001) +GROUP_SECURITY_INFORMATION = (0X00000002) +DACL_SECURITY_INFORMATION = (0X00000004) +SACL_SECURITY_INFORMATION = (0X00000008) +IMAGE_SIZEOF_FILE_HEADER = 20 +IMAGE_FILE_MACHINE_UNKNOWN = 0 +IMAGE_NUMBEROF_DIRECTORY_ENTRIES = 16 +IMAGE_SIZEOF_ROM_OPTIONAL_HEADER = 56 +IMAGE_SIZEOF_STD_OPTIONAL_HEADER = 28 +IMAGE_SIZEOF_NT_OPTIONAL_HEADER = 224 +IMAGE_NT_OPTIONAL_HDR_MAGIC = 267 +IMAGE_ROM_OPTIONAL_HDR_MAGIC = 263 +IMAGE_SIZEOF_SHORT_NAME = 8 +IMAGE_SIZEOF_SECTION_HEADER = 40 +IMAGE_SIZEOF_SYMBOL = 18 +IMAGE_SYM_CLASS_NULL = 0 +IMAGE_SYM_CLASS_AUTOMATIC = 1 +IMAGE_SYM_CLASS_EXTERNAL = 2 +IMAGE_SYM_CLASS_STATIC = 3 +IMAGE_SYM_CLASS_REGISTER = 4 +IMAGE_SYM_CLASS_EXTERNAL_DEF = 5 +IMAGE_SYM_CLASS_LABEL = 6 +IMAGE_SYM_CLASS_UNDEFINED_LABEL = 7 +IMAGE_SYM_CLASS_MEMBER_OF_STRUCT = 8 +IMAGE_SYM_CLASS_ARGUMENT = 9 +IMAGE_SYM_CLASS_STRUCT_TAG = 10 +IMAGE_SYM_CLASS_MEMBER_OF_UNION = 11 +IMAGE_SYM_CLASS_UNION_TAG = 12 +IMAGE_SYM_CLASS_TYPE_DEFINITION = 13 +IMAGE_SYM_CLASS_UNDEFINED_STATIC = 14 +IMAGE_SYM_CLASS_ENUM_TAG = 15 +IMAGE_SYM_CLASS_MEMBER_OF_ENUM = 16 +IMAGE_SYM_CLASS_REGISTER_PARAM = 17 +IMAGE_SYM_CLASS_BIT_FIELD = 18 +IMAGE_SYM_CLASS_BLOCK = 100 +IMAGE_SYM_CLASS_FUNCTION = 101 +IMAGE_SYM_CLASS_END_OF_STRUCT = 102 +IMAGE_SYM_CLASS_FILE = 103 +IMAGE_SYM_CLASS_SECTION = 104 +IMAGE_SYM_CLASS_WEAK_EXTERNAL = 105 +N_BTMASK = 15 +N_TMASK = 48 +N_TMASK1 = 192 +N_TMASK2 = 240 +N_BTSHFT = 4 +N_TSHIFT = 2 +IMAGE_SIZEOF_AUX_SYMBOL = 18 +IMAGE_COMDAT_SELECT_NODUPLICATES = 1 +IMAGE_COMDAT_SELECT_ANY = 2 +IMAGE_COMDAT_SELECT_SAME_SIZE = 3 +IMAGE_COMDAT_SELECT_EXACT_MATCH = 4 +IMAGE_COMDAT_SELECT_ASSOCIATIVE = 5 +IMAGE_WEAK_EXTERN_SEARCH_NOLIBRARY = 1 +IMAGE_WEAK_EXTERN_SEARCH_LIBRARY = 2 +IMAGE_WEAK_EXTERN_SEARCH_ALIAS = 3 +IMAGE_SIZEOF_RELOCATION = 10 +IMAGE_REL_I386_SECTION = 10 +IMAGE_REL_I386_SECREL = 11 +IMAGE_REL_MIPS_REFHALF = 1 +IMAGE_REL_MIPS_REFWORD = 2 +IMAGE_REL_MIPS_JMPADDR = 3 +IMAGE_REL_MIPS_REFHI = 4 +IMAGE_REL_MIPS_REFLO = 5 +IMAGE_REL_MIPS_GPREL = 6 +IMAGE_REL_MIPS_LITERAL = 7 +IMAGE_REL_MIPS_SECTION = 10 +IMAGE_REL_MIPS_SECREL = 11 +IMAGE_REL_MIPS_REFWORDNB = 34 +IMAGE_REL_MIPS_PAIR = 37 +IMAGE_REL_ALPHA_ABSOLUTE = 0 +IMAGE_REL_ALPHA_REFLONG = 1 +IMAGE_REL_ALPHA_REFQUAD = 2 +IMAGE_REL_ALPHA_GPREL32 = 3 +IMAGE_REL_ALPHA_LITERAL = 4 +IMAGE_REL_ALPHA_LITUSE = 5 +IMAGE_REL_ALPHA_GPDISP = 6 +IMAGE_REL_ALPHA_BRADDR = 7 +IMAGE_REL_ALPHA_HINT = 8 +IMAGE_REL_ALPHA_INLINE_REFLONG = 9 +IMAGE_REL_ALPHA_REFHI = 10 +IMAGE_REL_ALPHA_REFLO = 11 +IMAGE_REL_ALPHA_PAIR = 12 +IMAGE_REL_ALPHA_MATCH = 13 +IMAGE_REL_ALPHA_SECTION = 14 +IMAGE_REL_ALPHA_SECREL = 15 +IMAGE_REL_ALPHA_REFLONGNB = 16 +IMAGE_SIZEOF_BASE_RELOCATION = 8 +IMAGE_REL_BASED_ABSOLUTE = 0 +IMAGE_REL_BASED_HIGH = 1 +IMAGE_REL_BASED_LOW = 2 +IMAGE_REL_BASED_HIGHLOW = 3 +IMAGE_REL_BASED_HIGHADJ = 4 +IMAGE_REL_BASED_MIPS_JMPADDR = 5 +IMAGE_SIZEOF_LINENUMBER = 6 +IMAGE_ARCHIVE_START_SIZE = 8 +IMAGE_ARCHIVE_START = "!\n" +IMAGE_ARCHIVE_END = "`\n" +IMAGE_ARCHIVE_PAD = "\n" +IMAGE_ARCHIVE_LINKER_MEMBER = "/ " +IMAGE_ARCHIVE_LONGNAMES_MEMBER = "// " +IMAGE_SIZEOF_ARCHIVE_MEMBER_HDR = 60 +IMAGE_ORDINAL_FLAG = -2147483648 +def IMAGE_SNAP_BY_ORDINAL(Ordinal): return ((Ordinal & IMAGE_ORDINAL_FLAG) != 0) + +def IMAGE_ORDINAL(Ordinal): return (Ordinal & 65535) + +IMAGE_RESOURCE_NAME_IS_STRING = -2147483648 +IMAGE_RESOURCE_DATA_IS_DIRECTORY = -2147483648 +IMAGE_DEBUG_TYPE_UNKNOWN = 0 +IMAGE_DEBUG_TYPE_COFF = 1 +IMAGE_DEBUG_TYPE_CODEVIEW = 2 +IMAGE_DEBUG_TYPE_FPO = 3 +IMAGE_DEBUG_TYPE_MISC = 4 +IMAGE_DEBUG_TYPE_EXCEPTION = 5 +IMAGE_DEBUG_TYPE_FIXUP = 6 +IMAGE_DEBUG_TYPE_OMAP_TO_SRC = 7 +IMAGE_DEBUG_TYPE_OMAP_FROM_SRC = 8 +FRAME_FPO = 0 +FRAME_TRAP = 1 +FRAME_TSS = 2 +SIZEOF_RFPO_DATA = 16 +IMAGE_DEBUG_MISC_EXENAME = 1 +IMAGE_SEPARATE_DEBUG_SIGNATURE = 18756 +# Generated by h2py from \msvcnt\include\wingdi.h +# hacked and split manually by mhammond. +NEWFRAME = 1 +ABORTDOC = 2 +NEXTBAND = 3 +SETCOLORTABLE = 4 +GETCOLORTABLE = 5 +FLUSHOUTPUT = 6 +DRAFTMODE = 7 +QUERYESCSUPPORT = 8 +SETABORTPROC = 9 +STARTDOC = 10 +ENDDOC = 11 +GETPHYSPAGESIZE = 12 +GETPRINTINGOFFSET = 13 +GETSCALINGFACTOR = 14 +MFCOMMENT = 15 +GETPENWIDTH = 16 +SETCOPYCOUNT = 17 +SELECTPAPERSOURCE = 18 +DEVICEDATA = 19 +PASSTHROUGH = 19 +GETTECHNOLGY = 20 +GETTECHNOLOGY = 20 +SETLINECAP = 21 +SETLINEJOIN = 22 +SETMITERLIMIT = 23 +BANDINFO = 24 +DRAWPATTERNRECT = 25 +GETVECTORPENSIZE = 26 +GETVECTORBRUSHSIZE = 27 +ENABLEDUPLEX = 28 +GETSETPAPERBINS = 29 +GETSETPRINTORIENT = 30 +ENUMPAPERBINS = 31 +SETDIBSCALING = 32 +EPSPRINTING = 33 +ENUMPAPERMETRICS = 34 +GETSETPAPERMETRICS = 35 +POSTSCRIPT_DATA = 37 +POSTSCRIPT_IGNORE = 38 +MOUSETRAILS = 39 +GETDEVICEUNITS = 42 +GETEXTENDEDTEXTMETRICS = 256 +GETEXTENTTABLE = 257 +GETPAIRKERNTABLE = 258 +GETTRACKKERNTABLE = 259 +EXTTEXTOUT = 512 +GETFACENAME = 513 +DOWNLOADFACE = 514 +ENABLERELATIVEWIDTHS = 768 +ENABLEPAIRKERNING = 769 +SETKERNTRACK = 770 +SETALLJUSTVALUES = 771 +SETCHARSET = 772 +STRETCHBLT = 2048 +GETSETSCREENPARAMS = 3072 +BEGIN_PATH = 4096 +CLIP_TO_PATH = 4097 +END_PATH = 4098 +EXT_DEVICE_CAPS = 4099 +RESTORE_CTM = 4100 +SAVE_CTM = 4101 +SET_ARC_DIRECTION = 4102 +SET_BACKGROUND_COLOR = 4103 +SET_POLY_MODE = 4104 +SET_SCREEN_ANGLE = 4105 +SET_SPREAD = 4106 +TRANSFORM_CTM = 4107 +SET_CLIP_BOX = 4108 +SET_BOUNDS = 4109 +SET_MIRROR_MODE = 4110 +OPENCHANNEL = 4110 +DOWNLOADHEADER = 4111 +CLOSECHANNEL = 4112 +POSTSCRIPT_PASSTHROUGH = 4115 +ENCAPSULATED_POSTSCRIPT = 4116 +SP_NOTREPORTED = 16384 +SP_ERROR = (-1) +SP_APPABORT = (-2) +SP_USERABORT = (-3) +SP_OUTOFDISK = (-4) +SP_OUTOFMEMORY = (-5) +PR_JOBSTATUS = 0 + +## GDI object types +OBJ_PEN = 1 +OBJ_BRUSH = 2 +OBJ_DC = 3 +OBJ_METADC = 4 +OBJ_PAL = 5 +OBJ_FONT = 6 +OBJ_BITMAP = 7 +OBJ_REGION = 8 +OBJ_METAFILE = 9 +OBJ_MEMDC = 10 +OBJ_EXTPEN = 11 +OBJ_ENHMETADC = 12 +OBJ_ENHMETAFILE = 13 +OBJ_COLORSPACE = 14 + +MWT_IDENTITY = 1 +MWT_LEFTMULTIPLY = 2 +MWT_RIGHTMULTIPLY = 3 +MWT_MIN = MWT_IDENTITY +MWT_MAX = MWT_RIGHTMULTIPLY +BI_RGB = 0 +BI_RLE8 = 1 +BI_RLE4 = 2 +BI_BITFIELDS = 3 +TMPF_FIXED_PITCH = 1 +TMPF_VECTOR = 2 +TMPF_DEVICE = 8 +TMPF_TRUETYPE = 4 +NTM_REGULAR = 64 +NTM_BOLD = 32 +NTM_ITALIC = 1 +LF_FACESIZE = 32 +LF_FULLFACESIZE = 64 +OUT_DEFAULT_PRECIS = 0 +OUT_STRING_PRECIS = 1 +OUT_CHARACTER_PRECIS = 2 +OUT_STROKE_PRECIS = 3 +OUT_TT_PRECIS = 4 +OUT_DEVICE_PRECIS = 5 +OUT_RASTER_PRECIS = 6 +OUT_TT_ONLY_PRECIS = 7 +OUT_OUTLINE_PRECIS = 8 +CLIP_DEFAULT_PRECIS = 0 +CLIP_CHARACTER_PRECIS = 1 +CLIP_STROKE_PRECIS = 2 +CLIP_MASK = 15 +CLIP_LH_ANGLES = (1<<4) +CLIP_TT_ALWAYS = (2<<4) +CLIP_EMBEDDED = (8<<4) +DEFAULT_QUALITY = 0 +DRAFT_QUALITY = 1 +PROOF_QUALITY = 2 +NONANTIALIASED_QUALITY = 3 +ANTIALIASED_QUALITY = 4 +CLEARTYPE_QUALITY = 5 +CLEARTYPE_NATURAL_QUALITY = 6 +DEFAULT_PITCH = 0 +FIXED_PITCH = 1 +VARIABLE_PITCH = 2 +ANSI_CHARSET = 0 +DEFAULT_CHARSET = 1 +SYMBOL_CHARSET = 2 +SHIFTJIS_CHARSET = 128 +HANGEUL_CHARSET = 129 +CHINESEBIG5_CHARSET = 136 +OEM_CHARSET = 255 +JOHAB_CHARSET = 130 +HEBREW_CHARSET = 177 +ARABIC_CHARSET = 178 +GREEK_CHARSET = 161 +TURKISH_CHARSET = 162 +VIETNAMESE_CHARSET = 163 +THAI_CHARSET = 222 +EASTEUROPE_CHARSET = 238 +RUSSIAN_CHARSET = 204 +MAC_CHARSET = 77 +BALTIC_CHARSET = 186 +FF_DONTCARE = (0<<4) +FF_ROMAN = (1<<4) +FF_SWISS = (2<<4) +FF_MODERN = (3<<4) +FF_SCRIPT = (4<<4) +FF_DECORATIVE = (5<<4) +FW_DONTCARE = 0 +FW_THIN = 100 +FW_EXTRALIGHT = 200 +FW_LIGHT = 300 +FW_NORMAL = 400 +FW_MEDIUM = 500 +FW_SEMIBOLD = 600 +FW_BOLD = 700 +FW_EXTRABOLD = 800 +FW_HEAVY = 900 +FW_ULTRALIGHT = FW_EXTRALIGHT +FW_REGULAR = FW_NORMAL +FW_DEMIBOLD = FW_SEMIBOLD +FW_ULTRABOLD = FW_EXTRABOLD +FW_BLACK = FW_HEAVY +# Generated by h2py from \msvcnt\include\wingdi.h +# hacked and split manually by mhammond. +BS_SOLID = 0 +BS_NULL = 1 +BS_HOLLOW = BS_NULL +BS_HATCHED = 2 +BS_PATTERN = 3 +BS_INDEXED = 4 +BS_DIBPATTERN = 5 +BS_DIBPATTERNPT = 6 +BS_PATTERN8X8 = 7 +BS_DIBPATTERN8X8 = 8 +HS_HORIZONTAL = 0 +HS_VERTICAL = 1 +HS_FDIAGONAL = 2 +HS_BDIAGONAL = 3 +HS_CROSS = 4 +HS_DIAGCROSS = 5 +HS_FDIAGONAL1 = 6 +HS_BDIAGONAL1 = 7 +HS_SOLID = 8 +HS_DENSE1 = 9 +HS_DENSE2 = 10 +HS_DENSE3 = 11 +HS_DENSE4 = 12 +HS_DENSE5 = 13 +HS_DENSE6 = 14 +HS_DENSE7 = 15 +HS_DENSE8 = 16 +HS_NOSHADE = 17 +HS_HALFTONE = 18 +HS_SOLIDCLR = 19 +HS_DITHEREDCLR = 20 +HS_SOLIDTEXTCLR = 21 +HS_DITHEREDTEXTCLR = 22 +HS_SOLIDBKCLR = 23 +HS_DITHEREDBKCLR = 24 +HS_API_MAX = 25 +PS_SOLID = 0 +PS_DASH = 1 +PS_DOT = 2 +PS_DASHDOT = 3 +PS_DASHDOTDOT = 4 +PS_NULL = 5 +PS_INSIDEFRAME = 6 +PS_USERSTYLE = 7 +PS_ALTERNATE = 8 +PS_STYLE_MASK = 15 +PS_ENDCAP_ROUND = 0 +PS_ENDCAP_SQUARE = 256 +PS_ENDCAP_FLAT = 512 +PS_ENDCAP_MASK = 3840 +PS_JOIN_ROUND = 0 +PS_JOIN_BEVEL = 4096 +PS_JOIN_MITER = 8192 +PS_JOIN_MASK = 61440 +PS_COSMETIC = 0 +PS_GEOMETRIC = 65536 +PS_TYPE_MASK = 983040 +AD_COUNTERCLOCKWISE = 1 +AD_CLOCKWISE = 2 +DRIVERVERSION = 0 +TECHNOLOGY = 2 +HORZSIZE = 4 +VERTSIZE = 6 +HORZRES = 8 +VERTRES = 10 +BITSPIXEL = 12 +PLANES = 14 +NUMBRUSHES = 16 +NUMPENS = 18 +NUMMARKERS = 20 +NUMFONTS = 22 +NUMCOLORS = 24 +PDEVICESIZE = 26 +CURVECAPS = 28 +LINECAPS = 30 +POLYGONALCAPS = 32 +TEXTCAPS = 34 +CLIPCAPS = 36 +RASTERCAPS = 38 +ASPECTX = 40 +ASPECTY = 42 +ASPECTXY = 44 +LOGPIXELSX = 88 +LOGPIXELSY = 90 +SIZEPALETTE = 104 +NUMRESERVED = 106 +COLORRES = 108 + +PHYSICALWIDTH = 110 +PHYSICALHEIGHT = 111 +PHYSICALOFFSETX = 112 +PHYSICALOFFSETY = 113 +SCALINGFACTORX = 114 +SCALINGFACTORY = 115 +VREFRESH = 116 +DESKTOPVERTRES = 117 +DESKTOPHORZRES = 118 +BLTALIGNMENT = 119 +SHADEBLENDCAPS = 120 +COLORMGMTCAPS = 121 + +DT_PLOTTER = 0 +DT_RASDISPLAY = 1 +DT_RASPRINTER = 2 +DT_RASCAMERA = 3 +DT_CHARSTREAM = 4 +DT_METAFILE = 5 +DT_DISPFILE = 6 +CC_NONE = 0 +CC_CIRCLES = 1 +CC_PIE = 2 +CC_CHORD = 4 +CC_ELLIPSES = 8 +CC_WIDE = 16 +CC_STYLED = 32 +CC_WIDESTYLED = 64 +CC_INTERIORS = 128 +CC_ROUNDRECT = 256 +LC_NONE = 0 +LC_POLYLINE = 2 +LC_MARKER = 4 +LC_POLYMARKER = 8 +LC_WIDE = 16 +LC_STYLED = 32 +LC_WIDESTYLED = 64 +LC_INTERIORS = 128 +PC_NONE = 0 +PC_POLYGON = 1 +PC_RECTANGLE = 2 +PC_WINDPOLYGON = 4 +PC_TRAPEZOID = 4 +PC_SCANLINE = 8 +PC_WIDE = 16 +PC_STYLED = 32 +PC_WIDESTYLED = 64 +PC_INTERIORS = 128 +CP_NONE = 0 +CP_RECTANGLE = 1 +CP_REGION = 2 +TC_OP_CHARACTER = 1 +TC_OP_STROKE = 2 +TC_CP_STROKE = 4 +TC_CR_90 = 8 +TC_CR_ANY = 16 +TC_SF_X_YINDEP = 32 +TC_SA_DOUBLE = 64 +TC_SA_INTEGER = 128 +TC_SA_CONTIN = 256 +TC_EA_DOUBLE = 512 +TC_IA_ABLE = 1024 +TC_UA_ABLE = 2048 +TC_SO_ABLE = 4096 +TC_RA_ABLE = 8192 +TC_VA_ABLE = 16384 +TC_RESERVED = 32768 +TC_SCROLLBLT = 65536 +RC_BITBLT = 1 +RC_BANDING = 2 +RC_SCALING = 4 +RC_BITMAP64 = 8 +RC_GDI20_OUTPUT = 16 +RC_GDI20_STATE = 32 +RC_SAVEBITMAP = 64 +RC_DI_BITMAP = 128 +RC_PALETTE = 256 +RC_DIBTODEV = 512 +RC_BIGFONT = 1024 +RC_STRETCHBLT = 2048 +RC_FLOODFILL = 4096 +RC_STRETCHDIB = 8192 +RC_OP_DX_OUTPUT = 16384 +RC_DEVBITS = 32768 +DIB_RGB_COLORS = 0 +DIB_PAL_COLORS = 1 +DIB_PAL_INDICES = 2 +DIB_PAL_PHYSINDICES = 2 +DIB_PAL_LOGINDICES = 4 +SYSPAL_ERROR = 0 +SYSPAL_STATIC = 1 +SYSPAL_NOSTATIC = 2 +CBM_CREATEDIB = 2 +CBM_INIT = 4 +FLOODFILLBORDER = 0 +FLOODFILLSURFACE = 1 +CCHDEVICENAME = 32 +CCHFORMNAME = 32 +# Generated by h2py from \msvcnt\include\wingdi.h +# hacked and split manually by mhammond. + +# DEVMODE.dmFields +DM_SPECVERSION = 800 +DM_ORIENTATION = 1 +DM_PAPERSIZE = 2 +DM_PAPERLENGTH = 4 +DM_PAPERWIDTH = 8 +DM_SCALE = 16 +DM_POSITION = 32 +DM_NUP = 64 +DM_DISPLAYORIENTATION = 128 +DM_COPIES = 256 +DM_DEFAULTSOURCE = 512 +DM_PRINTQUALITY = 1024 +DM_COLOR = 2048 +DM_DUPLEX = 4096 +DM_YRESOLUTION = 8192 +DM_TTOPTION = 16384 +DM_COLLATE = 32768 +DM_FORMNAME = 65536 +DM_LOGPIXELS = 131072 +DM_BITSPERPEL = 262144 +DM_PELSWIDTH = 524288 +DM_PELSHEIGHT = 1048576 +DM_DISPLAYFLAGS = 2097152 +DM_DISPLAYFREQUENCY = 4194304 +DM_ICMMETHOD = 8388608 +DM_ICMINTENT = 16777216 +DM_MEDIATYPE = 33554432 +DM_DITHERTYPE = 67108864 +DM_PANNINGWIDTH = 134217728 +DM_PANNINGHEIGHT = 268435456 +DM_DISPLAYFIXEDOUTPUT = 536870912 + +# DEVMODE.dmOrientation +DMORIENT_PORTRAIT = 1 +DMORIENT_LANDSCAPE = 2 + +# DEVMODE.dmDisplayOrientation +DMDO_DEFAULT = 0 +DMDO_90 = 1 +DMDO_180 = 2 +DMDO_270 = 3 + +# DEVMODE.dmDisplayFixedOutput +DMDFO_DEFAULT = 0 +DMDFO_STRETCH = 1 +DMDFO_CENTER = 2 + +# DEVMODE.dmPaperSize +DMPAPER_LETTER = 1 +DMPAPER_LETTERSMALL = 2 +DMPAPER_TABLOID = 3 +DMPAPER_LEDGER = 4 +DMPAPER_LEGAL = 5 +DMPAPER_STATEMENT = 6 +DMPAPER_EXECUTIVE = 7 +DMPAPER_A3 = 8 +DMPAPER_A4 = 9 +DMPAPER_A4SMALL = 10 +DMPAPER_A5 = 11 +DMPAPER_B4 = 12 +DMPAPER_B5 = 13 +DMPAPER_FOLIO = 14 +DMPAPER_QUARTO = 15 +DMPAPER_10X14 = 16 +DMPAPER_11X17 = 17 +DMPAPER_NOTE = 18 +DMPAPER_ENV_9 = 19 +DMPAPER_ENV_10 = 20 +DMPAPER_ENV_11 = 21 +DMPAPER_ENV_12 = 22 +DMPAPER_ENV_14 = 23 +DMPAPER_CSHEET = 24 +DMPAPER_DSHEET = 25 +DMPAPER_ESHEET = 26 +DMPAPER_ENV_DL = 27 +DMPAPER_ENV_C5 = 28 +DMPAPER_ENV_C3 = 29 +DMPAPER_ENV_C4 = 30 +DMPAPER_ENV_C6 = 31 +DMPAPER_ENV_C65 = 32 +DMPAPER_ENV_B4 = 33 +DMPAPER_ENV_B5 = 34 +DMPAPER_ENV_B6 = 35 +DMPAPER_ENV_ITALY = 36 +DMPAPER_ENV_MONARCH = 37 +DMPAPER_ENV_PERSONAL = 38 +DMPAPER_FANFOLD_US = 39 +DMPAPER_FANFOLD_STD_GERMAN = 40 +DMPAPER_FANFOLD_LGL_GERMAN = 41 +DMPAPER_ISO_B4 = 42 +DMPAPER_JAPANESE_POSTCARD = 43 +DMPAPER_9X11 = 44 +DMPAPER_10X11 = 45 +DMPAPER_15X11 = 46 +DMPAPER_ENV_INVITE = 47 +DMPAPER_RESERVED_48 = 48 +DMPAPER_RESERVED_49 = 49 +DMPAPER_LETTER_EXTRA = 50 +DMPAPER_LEGAL_EXTRA = 51 +DMPAPER_TABLOID_EXTRA = 52 +DMPAPER_A4_EXTRA = 53 +DMPAPER_LETTER_TRANSVERSE = 54 +DMPAPER_A4_TRANSVERSE = 55 +DMPAPER_LETTER_EXTRA_TRANSVERSE = 56 +DMPAPER_A_PLUS = 57 +DMPAPER_B_PLUS = 58 +DMPAPER_LETTER_PLUS = 59 +DMPAPER_A4_PLUS = 60 +DMPAPER_A5_TRANSVERSE = 61 +DMPAPER_B5_TRANSVERSE = 62 +DMPAPER_A3_EXTRA = 63 +DMPAPER_A5_EXTRA = 64 +DMPAPER_B5_EXTRA = 65 +DMPAPER_A2 = 66 +DMPAPER_A3_TRANSVERSE = 67 +DMPAPER_A3_EXTRA_TRANSVERSE = 68 +DMPAPER_DBL_JAPANESE_POSTCARD = 69 +DMPAPER_A6 = 70 +DMPAPER_JENV_KAKU2 = 71 +DMPAPER_JENV_KAKU3 = 72 +DMPAPER_JENV_CHOU3 = 73 +DMPAPER_JENV_CHOU4 = 74 +DMPAPER_LETTER_ROTATED = 75 +DMPAPER_A3_ROTATED = 76 +DMPAPER_A4_ROTATED = 77 +DMPAPER_A5_ROTATED = 78 +DMPAPER_B4_JIS_ROTATED = 79 +DMPAPER_B5_JIS_ROTATED = 80 +DMPAPER_JAPANESE_POSTCARD_ROTATED = 81 +DMPAPER_DBL_JAPANESE_POSTCARD_ROTATED = 82 +DMPAPER_A6_ROTATED = 83 +DMPAPER_JENV_KAKU2_ROTATED = 84 +DMPAPER_JENV_KAKU3_ROTATED = 85 +DMPAPER_JENV_CHOU3_ROTATED = 86 +DMPAPER_JENV_CHOU4_ROTATED = 87 +DMPAPER_B6_JIS = 88 +DMPAPER_B6_JIS_ROTATED = 89 +DMPAPER_12X11 = 90 +DMPAPER_JENV_YOU4 = 91 +DMPAPER_JENV_YOU4_ROTATED = 92 +DMPAPER_P16K = 93 +DMPAPER_P32K = 94 +DMPAPER_P32KBIG = 95 +DMPAPER_PENV_1 = 96 +DMPAPER_PENV_2 = 97 +DMPAPER_PENV_3 = 98 +DMPAPER_PENV_4 = 99 +DMPAPER_PENV_5 = 100 +DMPAPER_PENV_6 = 101 +DMPAPER_PENV_7 = 102 +DMPAPER_PENV_8 = 103 +DMPAPER_PENV_9 = 104 +DMPAPER_PENV_10 = 105 +DMPAPER_P16K_ROTATED = 106 +DMPAPER_P32K_ROTATED = 107 +DMPAPER_P32KBIG_ROTATED = 108 +DMPAPER_PENV_1_ROTATED = 109 +DMPAPER_PENV_2_ROTATED = 110 +DMPAPER_PENV_3_ROTATED = 111 +DMPAPER_PENV_4_ROTATED = 112 +DMPAPER_PENV_5_ROTATED = 113 +DMPAPER_PENV_6_ROTATED = 114 +DMPAPER_PENV_7_ROTATED = 115 +DMPAPER_PENV_8_ROTATED = 116 +DMPAPER_PENV_9_ROTATED = 117 +DMPAPER_PENV_10_ROTATED = 118 +DMPAPER_LAST = DMPAPER_PENV_10_ROTATED +DMPAPER_USER = 256 + +# DEVMODE.dmDefaultSource +DMBIN_UPPER = 1 +DMBIN_ONLYONE = 1 +DMBIN_LOWER = 2 +DMBIN_MIDDLE = 3 +DMBIN_MANUAL = 4 +DMBIN_ENVELOPE = 5 +DMBIN_ENVMANUAL = 6 +DMBIN_AUTO = 7 +DMBIN_TRACTOR = 8 +DMBIN_SMALLFMT = 9 +DMBIN_LARGEFMT = 10 +DMBIN_LARGECAPACITY = 11 +DMBIN_CASSETTE = 14 +DMBIN_FORMSOURCE = 15 +DMBIN_LAST = DMBIN_FORMSOURCE +DMBIN_USER = 256 + +# DEVMODE.dmPrintQuality +DMRES_DRAFT = (-1) +DMRES_LOW = (-2) +DMRES_MEDIUM = (-3) +DMRES_HIGH = (-4) + +# DEVMODE.dmColor +DMCOLOR_MONOCHROME = 1 +DMCOLOR_COLOR = 2 + +# DEVMODE.dmDuplex +DMDUP_SIMPLEX = 1 +DMDUP_VERTICAL = 2 +DMDUP_HORIZONTAL = 3 + +# DEVMODE.dmTTOption +DMTT_BITMAP = 1 +DMTT_DOWNLOAD = 2 +DMTT_SUBDEV = 3 +DMTT_DOWNLOAD_OUTLINE = 4 + +# DEVMODE.dmCollate +DMCOLLATE_FALSE = 0 +DMCOLLATE_TRUE = 1 + +# DEVMODE.dmDisplayFlags +DM_GRAYSCALE = 1 +DM_INTERLACED = 2 + +# DEVMODE.dmICMMethod +DMICMMETHOD_NONE = 1 +DMICMMETHOD_SYSTEM = 2 +DMICMMETHOD_DRIVER = 3 +DMICMMETHOD_DEVICE = 4 +DMICMMETHOD_USER = 256 + +# DEVMODE.dmICMIntent +DMICM_SATURATE = 1 +DMICM_CONTRAST = 2 +DMICM_COLORIMETRIC = 3 +DMICM_ABS_COLORIMETRIC = 4 +DMICM_USER = 256 + +# DEVMODE.dmMediaType +DMMEDIA_STANDARD = 1 +DMMEDIA_TRANSPARENCY = 2 +DMMEDIA_GLOSSY = 3 +DMMEDIA_USER = 256 + +# DEVMODE.dmDitherType +DMDITHER_NONE = 1 +DMDITHER_COARSE = 2 +DMDITHER_FINE = 3 +DMDITHER_LINEART = 4 +DMDITHER_ERRORDIFFUSION = 5 +DMDITHER_RESERVED6 = 6 +DMDITHER_RESERVED7 = 7 +DMDITHER_RESERVED8 = 8 +DMDITHER_RESERVED9 = 9 +DMDITHER_GRAYSCALE = 10 +DMDITHER_USER = 256 + +# DEVMODE.dmNup +DMNUP_SYSTEM = 1 +DMNUP_ONEUP = 2 + +# used with ExtEscape +FEATURESETTING_NUP = 0 +FEATURESETTING_OUTPUT = 1 +FEATURESETTING_PSLEVEL = 2 +FEATURESETTING_CUSTPAPER = 3 +FEATURESETTING_MIRROR = 4 +FEATURESETTING_NEGATIVE = 5 +FEATURESETTING_PROTOCOL = 6 +FEATURESETTING_PRIVATE_BEGIN = 0x1000 +FEATURESETTING_PRIVATE_END = 0x1FFF + +RDH_RECTANGLES = 1 +GGO_METRICS = 0 +GGO_BITMAP = 1 +GGO_NATIVE = 2 +TT_POLYGON_TYPE = 24 +TT_PRIM_LINE = 1 +TT_PRIM_QSPLINE = 2 +TT_AVAILABLE = 1 +TT_ENABLED = 2 +DM_UPDATE = 1 +DM_COPY = 2 +DM_PROMPT = 4 +DM_MODIFY = 8 +DM_IN_BUFFER = DM_MODIFY +DM_IN_PROMPT = DM_PROMPT +DM_OUT_BUFFER = DM_COPY +DM_OUT_DEFAULT = DM_UPDATE + +# DISPLAY_DEVICE.StateFlags +DISPLAY_DEVICE_ATTACHED_TO_DESKTOP = 1 +DISPLAY_DEVICE_MULTI_DRIVER = 2 +DISPLAY_DEVICE_PRIMARY_DEVICE = 4 +DISPLAY_DEVICE_MIRRORING_DRIVER = 8 +DISPLAY_DEVICE_VGA_COMPATIBLE = 16 +DISPLAY_DEVICE_REMOVABLE = 32 +DISPLAY_DEVICE_MODESPRUNED = 134217728 +DISPLAY_DEVICE_REMOTE = 67108864 +DISPLAY_DEVICE_DISCONNECT = 33554432 + +# DeviceCapabilities types +DC_FIELDS = 1 +DC_PAPERS = 2 +DC_PAPERSIZE = 3 +DC_MINEXTENT = 4 +DC_MAXEXTENT = 5 +DC_BINS = 6 +DC_DUPLEX = 7 +DC_SIZE = 8 +DC_EXTRA = 9 +DC_VERSION = 10 +DC_DRIVER = 11 +DC_BINNAMES = 12 +DC_ENUMRESOLUTIONS = 13 +DC_FILEDEPENDENCIES = 14 +DC_TRUETYPE = 15 +DC_PAPERNAMES = 16 +DC_ORIENTATION = 17 +DC_COPIES = 18 +DC_BINADJUST = 19 +DC_EMF_COMPLIANT = 20 +DC_DATATYPE_PRODUCED = 21 +DC_COLLATE = 22 +DC_MANUFACTURER = 23 +DC_MODEL = 24 +DC_PERSONALITY = 25 +DC_PRINTRATE = 26 +DC_PRINTRATEUNIT = 27 +DC_PRINTERMEM = 28 +DC_MEDIAREADY = 29 +DC_STAPLE = 30 +DC_PRINTRATEPPM = 31 +DC_COLORDEVICE = 32 +DC_NUP = 33 +DC_MEDIATYPENAMES = 34 +DC_MEDIATYPES = 35 + +PRINTRATEUNIT_PPM = 1 +PRINTRATEUNIT_CPS = 2 +PRINTRATEUNIT_LPM = 3 +PRINTRATEUNIT_IPM = 4 + +# TrueType constants +DCTT_BITMAP = 1 +DCTT_DOWNLOAD = 2 +DCTT_SUBDEV = 4 +DCTT_DOWNLOAD_OUTLINE = 8 + +DCBA_FACEUPNONE = 0 +DCBA_FACEUPCENTER = 1 +DCBA_FACEUPLEFT = 2 +DCBA_FACEUPRIGHT = 3 +DCBA_FACEDOWNNONE = 256 +DCBA_FACEDOWNCENTER = 257 +DCBA_FACEDOWNLEFT = 258 +DCBA_FACEDOWNRIGHT = 259 + +CA_NEGATIVE = 1 +CA_LOG_FILTER = 2 +ILLUMINANT_DEVICE_DEFAULT = 0 +ILLUMINANT_A = 1 +ILLUMINANT_B = 2 +ILLUMINANT_C = 3 +ILLUMINANT_D50 = 4 +ILLUMINANT_D55 = 5 +ILLUMINANT_D65 = 6 +ILLUMINANT_D75 = 7 +ILLUMINANT_F2 = 8 +ILLUMINANT_MAX_INDEX = ILLUMINANT_F2 +ILLUMINANT_TUNGSTEN = ILLUMINANT_A +ILLUMINANT_DAYLIGHT = ILLUMINANT_C +ILLUMINANT_FLUORESCENT = ILLUMINANT_F2 +ILLUMINANT_NTSC = ILLUMINANT_C + +# Generated by h2py from \msvcnt\include\wingdi.h +# hacked and split manually by mhammond. +FONTMAPPER_MAX = 10 +ENHMETA_SIGNATURE = 1179469088 +ENHMETA_STOCK_OBJECT = -2147483648 +EMR_HEADER = 1 +EMR_POLYBEZIER = 2 +EMR_POLYGON = 3 +EMR_POLYLINE = 4 +EMR_POLYBEZIERTO = 5 +EMR_POLYLINETO = 6 +EMR_POLYPOLYLINE = 7 +EMR_POLYPOLYGON = 8 +EMR_SETWINDOWEXTEX = 9 +EMR_SETWINDOWORGEX = 10 +EMR_SETVIEWPORTEXTEX = 11 +EMR_SETVIEWPORTORGEX = 12 +EMR_SETBRUSHORGEX = 13 +EMR_EOF = 14 +EMR_SETPIXELV = 15 +EMR_SETMAPPERFLAGS = 16 +EMR_SETMAPMODE = 17 +EMR_SETBKMODE = 18 +EMR_SETPOLYFILLMODE = 19 +EMR_SETROP2 = 20 +EMR_SETSTRETCHBLTMODE = 21 +EMR_SETTEXTALIGN = 22 +EMR_SETCOLORADJUSTMENT = 23 +EMR_SETTEXTCOLOR = 24 +EMR_SETBKCOLOR = 25 +EMR_OFFSETCLIPRGN = 26 +EMR_MOVETOEX = 27 +EMR_SETMETARGN = 28 +EMR_EXCLUDECLIPRECT = 29 +EMR_INTERSECTCLIPRECT = 30 +EMR_SCALEVIEWPORTEXTEX = 31 +EMR_SCALEWINDOWEXTEX = 32 +EMR_SAVEDC = 33 +EMR_RESTOREDC = 34 +EMR_SETWORLDTRANSFORM = 35 +EMR_MODIFYWORLDTRANSFORM = 36 +EMR_SELECTOBJECT = 37 +EMR_CREATEPEN = 38 +EMR_CREATEBRUSHINDIRECT = 39 +EMR_DELETEOBJECT = 40 +EMR_ANGLEARC = 41 +EMR_ELLIPSE = 42 +EMR_RECTANGLE = 43 +EMR_ROUNDRECT = 44 +EMR_ARC = 45 +EMR_CHORD = 46 +EMR_PIE = 47 +EMR_SELECTPALETTE = 48 +EMR_CREATEPALETTE = 49 +EMR_SETPALETTEENTRIES = 50 +EMR_RESIZEPALETTE = 51 +EMR_REALIZEPALETTE = 52 +EMR_EXTFLOODFILL = 53 +EMR_LINETO = 54 +EMR_ARCTO = 55 +EMR_POLYDRAW = 56 +EMR_SETARCDIRECTION = 57 +EMR_SETMITERLIMIT = 58 +EMR_BEGINPATH = 59 +EMR_ENDPATH = 60 +EMR_CLOSEFIGURE = 61 +EMR_FILLPATH = 62 +EMR_STROKEANDFILLPATH = 63 +EMR_STROKEPATH = 64 +EMR_FLATTENPATH = 65 +EMR_WIDENPATH = 66 +EMR_SELECTCLIPPATH = 67 +EMR_ABORTPATH = 68 +EMR_GDICOMMENT = 70 +EMR_FILLRGN = 71 +EMR_FRAMERGN = 72 +EMR_INVERTRGN = 73 +EMR_PAINTRGN = 74 +EMR_EXTSELECTCLIPRGN = 75 +EMR_BITBLT = 76 +EMR_STRETCHBLT = 77 +EMR_MASKBLT = 78 +EMR_PLGBLT = 79 +EMR_SETDIBITSTODEVICE = 80 +EMR_STRETCHDIBITS = 81 +EMR_EXTCREATEFONTINDIRECTW = 82 +EMR_EXTTEXTOUTA = 83 +EMR_EXTTEXTOUTW = 84 +EMR_POLYBEZIER16 = 85 +EMR_POLYGON16 = 86 +EMR_POLYLINE16 = 87 +EMR_POLYBEZIERTO16 = 88 +EMR_POLYLINETO16 = 89 +EMR_POLYPOLYLINE16 = 90 +EMR_POLYPOLYGON16 = 91 +EMR_POLYDRAW16 = 92 +EMR_CREATEMONOBRUSH = 93 +EMR_CREATEDIBPATTERNBRUSHPT = 94 +EMR_EXTCREATEPEN = 95 +EMR_POLYTEXTOUTA = 96 +EMR_POLYTEXTOUTW = 97 +EMR_MIN = 1 +EMR_MAX = 97 +# Generated by h2py from \msvcnt\include\wingdi.h +# hacked and split manually by mhammond. +PANOSE_COUNT = 10 +PAN_FAMILYTYPE_INDEX = 0 +PAN_SERIFSTYLE_INDEX = 1 +PAN_WEIGHT_INDEX = 2 +PAN_PROPORTION_INDEX = 3 +PAN_CONTRAST_INDEX = 4 +PAN_STROKEVARIATION_INDEX = 5 +PAN_ARMSTYLE_INDEX = 6 +PAN_LETTERFORM_INDEX = 7 +PAN_MIDLINE_INDEX = 8 +PAN_XHEIGHT_INDEX = 9 +PAN_CULTURE_LATIN = 0 +PAN_ANY = 0 +PAN_NO_FIT = 1 +PAN_FAMILY_TEXT_DISPLAY = 2 +PAN_FAMILY_SCRIPT = 3 +PAN_FAMILY_DECORATIVE = 4 +PAN_FAMILY_PICTORIAL = 5 +PAN_SERIF_COVE = 2 +PAN_SERIF_OBTUSE_COVE = 3 +PAN_SERIF_SQUARE_COVE = 4 +PAN_SERIF_OBTUSE_SQUARE_COVE = 5 +PAN_SERIF_SQUARE = 6 +PAN_SERIF_THIN = 7 +PAN_SERIF_BONE = 8 +PAN_SERIF_EXAGGERATED = 9 +PAN_SERIF_TRIANGLE = 10 +PAN_SERIF_NORMAL_SANS = 11 +PAN_SERIF_OBTUSE_SANS = 12 +PAN_SERIF_PERP_SANS = 13 +PAN_SERIF_FLARED = 14 +PAN_SERIF_ROUNDED = 15 +PAN_WEIGHT_VERY_LIGHT = 2 +PAN_WEIGHT_LIGHT = 3 +PAN_WEIGHT_THIN = 4 +PAN_WEIGHT_BOOK = 5 +PAN_WEIGHT_MEDIUM = 6 +PAN_WEIGHT_DEMI = 7 +PAN_WEIGHT_BOLD = 8 +PAN_WEIGHT_HEAVY = 9 +PAN_WEIGHT_BLACK = 10 +PAN_WEIGHT_NORD = 11 +PAN_PROP_OLD_STYLE = 2 +PAN_PROP_MODERN = 3 +PAN_PROP_EVEN_WIDTH = 4 +PAN_PROP_EXPANDED = 5 +PAN_PROP_CONDENSED = 6 +PAN_PROP_VERY_EXPANDED = 7 +PAN_PROP_VERY_CONDENSED = 8 +PAN_PROP_MONOSPACED = 9 +PAN_CONTRAST_NONE = 2 +PAN_CONTRAST_VERY_LOW = 3 +PAN_CONTRAST_LOW = 4 +PAN_CONTRAST_MEDIUM_LOW = 5 +PAN_CONTRAST_MEDIUM = 6 +PAN_CONTRAST_MEDIUM_HIGH = 7 +PAN_CONTRAST_HIGH = 8 +PAN_CONTRAST_VERY_HIGH = 9 +PAN_STROKE_GRADUAL_DIAG = 2 +PAN_STROKE_GRADUAL_TRAN = 3 +PAN_STROKE_GRADUAL_VERT = 4 +PAN_STROKE_GRADUAL_HORZ = 5 +PAN_STROKE_RAPID_VERT = 6 +PAN_STROKE_RAPID_HORZ = 7 +PAN_STROKE_INSTANT_VERT = 8 +PAN_STRAIGHT_ARMS_HORZ = 2 +PAN_STRAIGHT_ARMS_WEDGE = 3 +PAN_STRAIGHT_ARMS_VERT = 4 +PAN_STRAIGHT_ARMS_SINGLE_SERIF = 5 +PAN_STRAIGHT_ARMS_DOUBLE_SERIF = 6 +PAN_BENT_ARMS_HORZ = 7 +PAN_BENT_ARMS_WEDGE = 8 +PAN_BENT_ARMS_VERT = 9 +PAN_BENT_ARMS_SINGLE_SERIF = 10 +PAN_BENT_ARMS_DOUBLE_SERIF = 11 +PAN_LETT_NORMAL_CONTACT = 2 +PAN_LETT_NORMAL_WEIGHTED = 3 +PAN_LETT_NORMAL_BOXED = 4 +PAN_LETT_NORMAL_FLATTENED = 5 +PAN_LETT_NORMAL_ROUNDED = 6 +PAN_LETT_NORMAL_OFF_CENTER = 7 +PAN_LETT_NORMAL_SQUARE = 8 +PAN_LETT_OBLIQUE_CONTACT = 9 +PAN_LETT_OBLIQUE_WEIGHTED = 10 +PAN_LETT_OBLIQUE_BOXED = 11 +PAN_LETT_OBLIQUE_FLATTENED = 12 +PAN_LETT_OBLIQUE_ROUNDED = 13 +PAN_LETT_OBLIQUE_OFF_CENTER = 14 +PAN_LETT_OBLIQUE_SQUARE = 15 +PAN_MIDLINE_STANDARD_TRIMMED = 2 +PAN_MIDLINE_STANDARD_POINTED = 3 +PAN_MIDLINE_STANDARD_SERIFED = 4 +PAN_MIDLINE_HIGH_TRIMMED = 5 +PAN_MIDLINE_HIGH_POINTED = 6 +PAN_MIDLINE_HIGH_SERIFED = 7 +PAN_MIDLINE_CONSTANT_TRIMMED = 8 +PAN_MIDLINE_CONSTANT_POINTED = 9 +PAN_MIDLINE_CONSTANT_SERIFED = 10 +PAN_MIDLINE_LOW_TRIMMED = 11 +PAN_MIDLINE_LOW_POINTED = 12 +PAN_MIDLINE_LOW_SERIFED = 13 +PAN_XHEIGHT_CONSTANT_SMALL = 2 +PAN_XHEIGHT_CONSTANT_STD = 3 +PAN_XHEIGHT_CONSTANT_LARGE = 4 +PAN_XHEIGHT_DUCKING_SMALL = 5 +PAN_XHEIGHT_DUCKING_STD = 6 +PAN_XHEIGHT_DUCKING_LARGE = 7 +ELF_VENDOR_SIZE = 4 +ELF_VERSION = 0 +ELF_CULTURE_LATIN = 0 +RASTER_FONTTYPE = 1 +DEVICE_FONTTYPE = 2 +TRUETYPE_FONTTYPE = 4 +def PALETTEINDEX(i): return ((16777216 | (i))) + +PC_RESERVED = 1 +PC_EXPLICIT = 2 +PC_NOCOLLAPSE = 4 +def GetRValue(rgb): return rgb & 0xff + +def GetGValue(rgb): return (rgb >> 8) & 0xff + +def GetBValue(rgb): return (rgb >> 16) & 0xff + +TRANSPARENT = 1 +OPAQUE = 2 +BKMODE_LAST = 2 +GM_COMPATIBLE = 1 +GM_ADVANCED = 2 +GM_LAST = 2 +PT_CLOSEFIGURE = 1 +PT_LINETO = 2 +PT_BEZIERTO = 4 +PT_MOVETO = 6 +MM_TEXT = 1 +MM_LOMETRIC = 2 +MM_HIMETRIC = 3 +MM_LOENGLISH = 4 +MM_HIENGLISH = 5 +MM_TWIPS = 6 +MM_ISOTROPIC = 7 +MM_ANISOTROPIC = 8 +MM_MIN = MM_TEXT +MM_MAX = MM_ANISOTROPIC +MM_MAX_FIXEDSCALE = MM_TWIPS +ABSOLUTE = 1 +RELATIVE = 2 +WHITE_BRUSH = 0 +LTGRAY_BRUSH = 1 +GRAY_BRUSH = 2 +DKGRAY_BRUSH = 3 +BLACK_BRUSH = 4 +NULL_BRUSH = 5 +HOLLOW_BRUSH = NULL_BRUSH +WHITE_PEN = 6 +BLACK_PEN = 7 +NULL_PEN = 8 +OEM_FIXED_FONT = 10 +ANSI_FIXED_FONT = 11 +ANSI_VAR_FONT = 12 +SYSTEM_FONT = 13 +DEVICE_DEFAULT_FONT = 14 +DEFAULT_PALETTE = 15 +SYSTEM_FIXED_FONT = 16 +STOCK_LAST = 16 +CLR_INVALID = -1 + +DC_BRUSH = 18 +DC_PEN = 19 + +# Exception/Status codes from winuser.h and winnt.h +STATUS_WAIT_0 = 0 +STATUS_ABANDONED_WAIT_0 = 128 +STATUS_USER_APC = 192 +STATUS_TIMEOUT = 258 +STATUS_PENDING = 259 +STATUS_SEGMENT_NOTIFICATION = 1073741829 +STATUS_GUARD_PAGE_VIOLATION = -2147483647 +STATUS_DATATYPE_MISALIGNMENT = -2147483646 +STATUS_BREAKPOINT = -2147483645 +STATUS_SINGLE_STEP = -2147483644 +STATUS_ACCESS_VIOLATION = -1073741819 +STATUS_IN_PAGE_ERROR = -1073741818 +STATUS_INVALID_HANDLE = -1073741816 +STATUS_NO_MEMORY = -1073741801 +STATUS_ILLEGAL_INSTRUCTION = -1073741795 +STATUS_NONCONTINUABLE_EXCEPTION = -1073741787 +STATUS_INVALID_DISPOSITION = -1073741786 +STATUS_ARRAY_BOUNDS_EXCEEDED = -1073741684 +STATUS_FLOAT_DENORMAL_OPERAND = -1073741683 +STATUS_FLOAT_DIVIDE_BY_ZERO = -1073741682 +STATUS_FLOAT_INEXACT_RESULT = -1073741681 +STATUS_FLOAT_INVALID_OPERATION = -1073741680 +STATUS_FLOAT_OVERFLOW = -1073741679 +STATUS_FLOAT_STACK_CHECK = -1073741678 +STATUS_FLOAT_UNDERFLOW = -1073741677 +STATUS_INTEGER_DIVIDE_BY_ZERO = -1073741676 +STATUS_INTEGER_OVERFLOW = -1073741675 +STATUS_PRIVILEGED_INSTRUCTION = -1073741674 +STATUS_STACK_OVERFLOW = -1073741571 +STATUS_CONTROL_C_EXIT = -1073741510 + + +WAIT_FAILED = -1 +WAIT_OBJECT_0 = STATUS_WAIT_0 + 0 + +WAIT_ABANDONED = STATUS_ABANDONED_WAIT_0 + 0 +WAIT_ABANDONED_0 = STATUS_ABANDONED_WAIT_0 + 0 + +WAIT_TIMEOUT = STATUS_TIMEOUT +WAIT_IO_COMPLETION = STATUS_USER_APC +STILL_ACTIVE = STATUS_PENDING +EXCEPTION_ACCESS_VIOLATION = STATUS_ACCESS_VIOLATION +EXCEPTION_DATATYPE_MISALIGNMENT = STATUS_DATATYPE_MISALIGNMENT +EXCEPTION_BREAKPOINT = STATUS_BREAKPOINT +EXCEPTION_SINGLE_STEP = STATUS_SINGLE_STEP +EXCEPTION_ARRAY_BOUNDS_EXCEEDED = STATUS_ARRAY_BOUNDS_EXCEEDED +EXCEPTION_FLT_DENORMAL_OPERAND = STATUS_FLOAT_DENORMAL_OPERAND +EXCEPTION_FLT_DIVIDE_BY_ZERO = STATUS_FLOAT_DIVIDE_BY_ZERO +EXCEPTION_FLT_INEXACT_RESULT = STATUS_FLOAT_INEXACT_RESULT +EXCEPTION_FLT_INVALID_OPERATION = STATUS_FLOAT_INVALID_OPERATION +EXCEPTION_FLT_OVERFLOW = STATUS_FLOAT_OVERFLOW +EXCEPTION_FLT_STACK_CHECK = STATUS_FLOAT_STACK_CHECK +EXCEPTION_FLT_UNDERFLOW = STATUS_FLOAT_UNDERFLOW +EXCEPTION_INT_DIVIDE_BY_ZERO = STATUS_INTEGER_DIVIDE_BY_ZERO +EXCEPTION_INT_OVERFLOW = STATUS_INTEGER_OVERFLOW +EXCEPTION_PRIV_INSTRUCTION = STATUS_PRIVILEGED_INSTRUCTION +EXCEPTION_IN_PAGE_ERROR = STATUS_IN_PAGE_ERROR +EXCEPTION_ILLEGAL_INSTRUCTION = STATUS_ILLEGAL_INSTRUCTION +EXCEPTION_NONCONTINUABLE_EXCEPTION = STATUS_NONCONTINUABLE_EXCEPTION +EXCEPTION_STACK_OVERFLOW = STATUS_STACK_OVERFLOW +EXCEPTION_INVALID_DISPOSITION = STATUS_INVALID_DISPOSITION +EXCEPTION_GUARD_PAGE = STATUS_GUARD_PAGE_VIOLATION +EXCEPTION_INVALID_HANDLE = STATUS_INVALID_HANDLE +CONTROL_C_EXIT = STATUS_CONTROL_C_EXIT + +# winuser.h line 8594 +# constants used with SystemParametersInfo +SPI_GETBEEP = 1 +SPI_SETBEEP = 2 +SPI_GETMOUSE = 3 +SPI_SETMOUSE = 4 +SPI_GETBORDER = 5 +SPI_SETBORDER = 6 +SPI_GETKEYBOARDSPEED = 10 +SPI_SETKEYBOARDSPEED = 11 +SPI_LANGDRIVER = 12 +SPI_ICONHORIZONTALSPACING = 13 +SPI_GETSCREENSAVETIMEOUT = 14 +SPI_SETSCREENSAVETIMEOUT = 15 +SPI_GETSCREENSAVEACTIVE = 16 +SPI_SETSCREENSAVEACTIVE = 17 +SPI_GETGRIDGRANULARITY = 18 +SPI_SETGRIDGRANULARITY = 19 +SPI_SETDESKWALLPAPER = 20 +SPI_SETDESKPATTERN = 21 +SPI_GETKEYBOARDDELAY = 22 +SPI_SETKEYBOARDDELAY = 23 +SPI_ICONVERTICALSPACING = 24 +SPI_GETICONTITLEWRAP = 25 +SPI_SETICONTITLEWRAP = 26 +SPI_GETMENUDROPALIGNMENT = 27 +SPI_SETMENUDROPALIGNMENT = 28 +SPI_SETDOUBLECLKWIDTH = 29 +SPI_SETDOUBLECLKHEIGHT = 30 +SPI_GETICONTITLELOGFONT = 31 +SPI_SETDOUBLECLICKTIME = 32 +SPI_SETMOUSEBUTTONSWAP = 33 +SPI_SETICONTITLELOGFONT = 34 +SPI_GETFASTTASKSWITCH = 35 +SPI_SETFASTTASKSWITCH = 36 +SPI_SETDRAGFULLWINDOWS = 37 +SPI_GETDRAGFULLWINDOWS = 38 +SPI_GETNONCLIENTMETRICS = 41 +SPI_SETNONCLIENTMETRICS = 42 +SPI_GETMINIMIZEDMETRICS = 43 +SPI_SETMINIMIZEDMETRICS = 44 +SPI_GETICONMETRICS = 45 +SPI_SETICONMETRICS = 46 +SPI_SETWORKAREA = 47 +SPI_GETWORKAREA = 48 +SPI_SETPENWINDOWS = 49 +SPI_GETFILTERKEYS = 50 +SPI_SETFILTERKEYS = 51 +SPI_GETTOGGLEKEYS = 52 +SPI_SETTOGGLEKEYS = 53 +SPI_GETMOUSEKEYS = 54 +SPI_SETMOUSEKEYS = 55 +SPI_GETSHOWSOUNDS = 56 +SPI_SETSHOWSOUNDS = 57 +SPI_GETSTICKYKEYS = 58 +SPI_SETSTICKYKEYS = 59 +SPI_GETACCESSTIMEOUT = 60 +SPI_SETACCESSTIMEOUT = 61 +SPI_GETSERIALKEYS = 62 +SPI_SETSERIALKEYS = 63 +SPI_GETSOUNDSENTRY = 64 +SPI_SETSOUNDSENTRY = 65 +SPI_GETHIGHCONTRAST = 66 +SPI_SETHIGHCONTRAST = 67 +SPI_GETKEYBOARDPREF = 68 +SPI_SETKEYBOARDPREF = 69 +SPI_GETSCREENREADER = 70 +SPI_SETSCREENREADER = 71 +SPI_GETANIMATION = 72 +SPI_SETANIMATION = 73 +SPI_GETFONTSMOOTHING = 74 +SPI_SETFONTSMOOTHING = 75 +SPI_SETDRAGWIDTH = 76 +SPI_SETDRAGHEIGHT = 77 +SPI_SETHANDHELD = 78 +SPI_GETLOWPOWERTIMEOUT = 79 +SPI_GETPOWEROFFTIMEOUT = 80 +SPI_SETLOWPOWERTIMEOUT = 81 +SPI_SETPOWEROFFTIMEOUT = 82 +SPI_GETLOWPOWERACTIVE = 83 +SPI_GETPOWEROFFACTIVE = 84 +SPI_SETLOWPOWERACTIVE = 85 +SPI_SETPOWEROFFACTIVE = 86 +SPI_SETCURSORS = 87 +SPI_SETICONS = 88 +SPI_GETDEFAULTINPUTLANG = 89 +SPI_SETDEFAULTINPUTLANG = 90 +SPI_SETLANGTOGGLE = 91 +SPI_GETWINDOWSEXTENSION = 92 +SPI_SETMOUSETRAILS = 93 +SPI_GETMOUSETRAILS = 94 +SPI_GETSNAPTODEFBUTTON = 95 +SPI_SETSNAPTODEFBUTTON = 96 +SPI_SETSCREENSAVERRUNNING = 97 +SPI_SCREENSAVERRUNNING = SPI_SETSCREENSAVERRUNNING +SPI_GETMOUSEHOVERWIDTH = 98 +SPI_SETMOUSEHOVERWIDTH = 99 +SPI_GETMOUSEHOVERHEIGHT = 100 +SPI_SETMOUSEHOVERHEIGHT = 101 +SPI_GETMOUSEHOVERTIME = 102 +SPI_SETMOUSEHOVERTIME = 103 +SPI_GETWHEELSCROLLLINES = 104 +SPI_SETWHEELSCROLLLINES = 105 +SPI_GETMENUSHOWDELAY = 106 +SPI_SETMENUSHOWDELAY = 107 + +SPI_GETSHOWIMEUI = 110 +SPI_SETSHOWIMEUI = 111 +SPI_GETMOUSESPEED = 112 +SPI_SETMOUSESPEED = 113 +SPI_GETSCREENSAVERRUNNING = 114 +SPI_GETDESKWALLPAPER = 115 + +SPI_GETACTIVEWINDOWTRACKING = 4096 +SPI_SETACTIVEWINDOWTRACKING = 4097 +SPI_GETMENUANIMATION = 4098 +SPI_SETMENUANIMATION = 4099 +SPI_GETCOMBOBOXANIMATION = 4100 +SPI_SETCOMBOBOXANIMATION = 4101 +SPI_GETLISTBOXSMOOTHSCROLLING = 4102 +SPI_SETLISTBOXSMOOTHSCROLLING = 4103 +SPI_GETGRADIENTCAPTIONS = 4104 +SPI_SETGRADIENTCAPTIONS = 4105 +SPI_GETKEYBOARDCUES = 4106 +SPI_SETKEYBOARDCUES = 4107 +SPI_GETMENUUNDERLINES = 4106 +SPI_SETMENUUNDERLINES = 4107 +SPI_GETACTIVEWNDTRKZORDER = 4108 +SPI_SETACTIVEWNDTRKZORDER = 4109 +SPI_GETHOTTRACKING = 4110 +SPI_SETHOTTRACKING = 4111 + +SPI_GETMENUFADE = 4114 +SPI_SETMENUFADE = 4115 +SPI_GETSELECTIONFADE = 4116 +SPI_SETSELECTIONFADE = 4117 +SPI_GETTOOLTIPANIMATION = 4118 +SPI_SETTOOLTIPANIMATION = 4119 +SPI_GETTOOLTIPFADE = 4120 +SPI_SETTOOLTIPFADE = 4121 +SPI_GETCURSORSHADOW = 4122 +SPI_SETCURSORSHADOW = 4123 +SPI_GETMOUSESONAR = 4124 +SPI_SETMOUSESONAR = 4125 +SPI_GETMOUSECLICKLOCK = 4126 +SPI_SETMOUSECLICKLOCK = 4127 +SPI_GETMOUSEVANISH = 4128 +SPI_SETMOUSEVANISH = 4129 +SPI_GETFLATMENU = 4130 +SPI_SETFLATMENU = 4131 +SPI_GETDROPSHADOW = 4132 +SPI_SETDROPSHADOW = 4133 +SPI_GETBLOCKSENDINPUTRESETS = 4134 +SPI_SETBLOCKSENDINPUTRESETS = 4135 +SPI_GETUIEFFECTS = 4158 +SPI_SETUIEFFECTS = 4159 + +SPI_GETFOREGROUNDLOCKTIMEOUT = 8192 +SPI_SETFOREGROUNDLOCKTIMEOUT = 8193 +SPI_GETACTIVEWNDTRKTIMEOUT = 8194 +SPI_SETACTIVEWNDTRKTIMEOUT = 8195 +SPI_GETFOREGROUNDFLASHCOUNT = 8196 +SPI_SETFOREGROUNDFLASHCOUNT = 8197 +SPI_GETCARETWIDTH = 8198 +SPI_SETCARETWIDTH = 8199 +SPI_GETMOUSECLICKLOCKTIME = 8200 +SPI_SETMOUSECLICKLOCKTIME = 8201 +SPI_GETFONTSMOOTHINGTYPE = 8202 +SPI_SETFONTSMOOTHINGTYPE = 8203 +SPI_GETFONTSMOOTHINGCONTRAST = 8204 +SPI_SETFONTSMOOTHINGCONTRAST = 8205 +SPI_GETFOCUSBORDERWIDTH = 8206 +SPI_SETFOCUSBORDERWIDTH = 8207 +SPI_GETFOCUSBORDERHEIGHT = 8208 +SPI_SETFOCUSBORDERHEIGHT = 8209 +SPI_GETFONTSMOOTHINGORIENTATION = 8210 +SPI_SETFONTSMOOTHINGORIENTATION = 8211 + +# fWinIni flags for SystemParametersInfo +SPIF_UPDATEINIFILE = 1 +SPIF_SENDWININICHANGE = 2 +SPIF_SENDCHANGE = SPIF_SENDWININICHANGE + +# used with SystemParametersInfo and SPI_GETFONTSMOOTHINGTYPE/SPI_SETFONTSMOOTHINGTYPE +FE_FONTSMOOTHINGSTANDARD = 1 +FE_FONTSMOOTHINGCLEARTYPE = 2 +FE_FONTSMOOTHINGDOCKING = 32768 + +METRICS_USEDEFAULT = -1 +ARW_BOTTOMLEFT = 0 +ARW_BOTTOMRIGHT = 1 +ARW_TOPLEFT = 2 +ARW_TOPRIGHT = 3 +ARW_STARTMASK = 3 +ARW_STARTRIGHT = 1 +ARW_STARTTOP = 2 +ARW_LEFT = 0 +ARW_RIGHT = 0 +ARW_UP = 4 +ARW_DOWN = 4 +ARW_HIDE = 8 +#ARW_VALID = 0x000F +SERKF_SERIALKEYSON = 1 +SERKF_AVAILABLE = 2 +SERKF_INDICATOR = 4 +HCF_HIGHCONTRASTON = 1 +HCF_AVAILABLE = 2 +HCF_HOTKEYACTIVE = 4 +HCF_CONFIRMHOTKEY = 8 +HCF_HOTKEYSOUND = 16 +HCF_INDICATOR = 32 +HCF_HOTKEYAVAILABLE = 64 +CDS_UPDATEREGISTRY = 1 +CDS_TEST = 2 +CDS_FULLSCREEN = 4 +CDS_GLOBAL = 8 +CDS_SET_PRIMARY = 16 +CDS_RESET = 1073741824 +CDS_SETRECT = 536870912 +CDS_NORESET = 268435456 + +# return values from ChangeDisplaySettings and ChangeDisplaySettingsEx +DISP_CHANGE_SUCCESSFUL = 0 +DISP_CHANGE_RESTART = 1 +DISP_CHANGE_FAILED = -1 +DISP_CHANGE_BADMODE = -2 +DISP_CHANGE_NOTUPDATED = -3 +DISP_CHANGE_BADFLAGS = -4 +DISP_CHANGE_BADPARAM = -5 +DISP_CHANGE_BADDUALVIEW = -6 + +ENUM_CURRENT_SETTINGS = -1 +ENUM_REGISTRY_SETTINGS = -2 +FKF_FILTERKEYSON = 1 +FKF_AVAILABLE = 2 +FKF_HOTKEYACTIVE = 4 +FKF_CONFIRMHOTKEY = 8 +FKF_HOTKEYSOUND = 16 +FKF_INDICATOR = 32 +FKF_CLICKON = 64 +SKF_STICKYKEYSON = 1 +SKF_AVAILABLE = 2 +SKF_HOTKEYACTIVE = 4 +SKF_CONFIRMHOTKEY = 8 +SKF_HOTKEYSOUND = 16 +SKF_INDICATOR = 32 +SKF_AUDIBLEFEEDBACK = 64 +SKF_TRISTATE = 128 +SKF_TWOKEYSOFF = 256 +SKF_LALTLATCHED = 268435456 +SKF_LCTLLATCHED = 67108864 +SKF_LSHIFTLATCHED = 16777216 +SKF_RALTLATCHED = 536870912 +SKF_RCTLLATCHED = 134217728 +SKF_RSHIFTLATCHED = 33554432 +SKF_LWINLATCHED = 1073741824 +SKF_RWINLATCHED = -2147483648 +SKF_LALTLOCKED = 1048576 +SKF_LCTLLOCKED = 262144 +SKF_LSHIFTLOCKED = 65536 +SKF_RALTLOCKED = 2097152 +SKF_RCTLLOCKED = 524288 +SKF_RSHIFTLOCKED = 131072 +SKF_LWINLOCKED = 4194304 +SKF_RWINLOCKED = 8388608 +MKF_MOUSEKEYSON = 1 +MKF_AVAILABLE = 2 +MKF_HOTKEYACTIVE = 4 +MKF_CONFIRMHOTKEY = 8 +MKF_HOTKEYSOUND = 16 +MKF_INDICATOR = 32 +MKF_MODIFIERS = 64 +MKF_REPLACENUMBERS = 128 +MKF_LEFTBUTTONSEL = 268435456 +MKF_RIGHTBUTTONSEL = 536870912 +MKF_LEFTBUTTONDOWN = 16777216 +MKF_RIGHTBUTTONDOWN = 33554432 +MKF_MOUSEMODE = -2147483648 +ATF_TIMEOUTON = 1 +ATF_ONOFFFEEDBACK = 2 +SSGF_NONE = 0 +SSGF_DISPLAY = 3 +SSTF_NONE = 0 +SSTF_CHARS = 1 +SSTF_BORDER = 2 +SSTF_DISPLAY = 3 +SSWF_NONE = 0 +SSWF_TITLE = 1 +SSWF_WINDOW = 2 +SSWF_DISPLAY = 3 +SSWF_CUSTOM = 4 +SSF_SOUNDSENTRYON = 1 +SSF_AVAILABLE = 2 +SSF_INDICATOR = 4 +TKF_TOGGLEKEYSON = 1 +TKF_AVAILABLE = 2 +TKF_HOTKEYACTIVE = 4 +TKF_CONFIRMHOTKEY = 8 +TKF_HOTKEYSOUND = 16 +TKF_INDICATOR = 32 +SLE_ERROR = 1 +SLE_MINORERROR = 2 +SLE_WARNING = 3 +MONITOR_DEFAULTTONULL = 0 +MONITOR_DEFAULTTOPRIMARY = 1 +MONITOR_DEFAULTTONEAREST = 2 +MONITORINFOF_PRIMARY = 1 +CCHDEVICENAME = 32 +CHILDID_SELF = 0 +INDEXID_OBJECT = 0 +INDEXID_CONTAINER = 0 +OBJID_WINDOW = 0 +OBJID_SYSMENU = -1 +OBJID_TITLEBAR = -2 +OBJID_MENU = -3 +OBJID_CLIENT = -4 +OBJID_VSCROLL = -5 +OBJID_HSCROLL = -6 +OBJID_SIZEGRIP = -7 +OBJID_CARET = -8 +OBJID_CURSOR = -9 +OBJID_ALERT = -10 +OBJID_SOUND = -11 +EVENT_MIN = 1 +EVENT_MAX = 2147483647 +EVENT_SYSTEM_SOUND = 1 +EVENT_SYSTEM_ALERT = 2 +EVENT_SYSTEM_FOREGROUND = 3 +EVENT_SYSTEM_MENUSTART = 4 +EVENT_SYSTEM_MENUEND = 5 +EVENT_SYSTEM_MENUPOPUPSTART = 6 +EVENT_SYSTEM_MENUPOPUPEND = 7 +EVENT_SYSTEM_CAPTURESTART = 8 +EVENT_SYSTEM_CAPTUREEND = 9 +EVENT_SYSTEM_MOVESIZESTART = 10 +EVENT_SYSTEM_MOVESIZEEND = 11 +EVENT_SYSTEM_CONTEXTHELPSTART = 12 +EVENT_SYSTEM_CONTEXTHELPEND = 13 +EVENT_SYSTEM_DRAGDROPSTART = 14 +EVENT_SYSTEM_DRAGDROPEND = 15 +EVENT_SYSTEM_DIALOGSTART = 16 +EVENT_SYSTEM_DIALOGEND = 17 +EVENT_SYSTEM_SCROLLINGSTART = 18 +EVENT_SYSTEM_SCROLLINGEND = 19 +EVENT_SYSTEM_SWITCHSTART = 20 +EVENT_SYSTEM_SWITCHEND = 21 +EVENT_SYSTEM_MINIMIZESTART = 22 +EVENT_SYSTEM_MINIMIZEEND = 23 +EVENT_OBJECT_CREATE = 32768 +EVENT_OBJECT_DESTROY = 32769 +EVENT_OBJECT_SHOW = 32770 +EVENT_OBJECT_HIDE = 32771 +EVENT_OBJECT_REORDER = 32772 +EVENT_OBJECT_FOCUS = 32773 +EVENT_OBJECT_SELECTION = 32774 +EVENT_OBJECT_SELECTIONADD = 32775 +EVENT_OBJECT_SELECTIONREMOVE = 32776 +EVENT_OBJECT_SELECTIONWITHIN = 32777 +EVENT_OBJECT_STATECHANGE = 32778 +EVENT_OBJECT_LOCATIONCHANGE = 32779 +EVENT_OBJECT_NAMECHANGE = 32780 +EVENT_OBJECT_DESCRIPTIONCHANGE = 32781 +EVENT_OBJECT_VALUECHANGE = 32782 +EVENT_OBJECT_PARENTCHANGE = 32783 +EVENT_OBJECT_HELPCHANGE = 32784 +EVENT_OBJECT_DEFACTIONCHANGE = 32785 +EVENT_OBJECT_ACCELERATORCHANGE = 32786 +SOUND_SYSTEM_STARTUP = 1 +SOUND_SYSTEM_SHUTDOWN = 2 +SOUND_SYSTEM_BEEP = 3 +SOUND_SYSTEM_ERROR = 4 +SOUND_SYSTEM_QUESTION = 5 +SOUND_SYSTEM_WARNING = 6 +SOUND_SYSTEM_INFORMATION = 7 +SOUND_SYSTEM_MAXIMIZE = 8 +SOUND_SYSTEM_MINIMIZE = 9 +SOUND_SYSTEM_RESTOREUP = 10 +SOUND_SYSTEM_RESTOREDOWN = 11 +SOUND_SYSTEM_APPSTART = 12 +SOUND_SYSTEM_FAULT = 13 +SOUND_SYSTEM_APPEND = 14 +SOUND_SYSTEM_MENUCOMMAND = 15 +SOUND_SYSTEM_MENUPOPUP = 16 +CSOUND_SYSTEM = 16 +ALERT_SYSTEM_INFORMATIONAL = 1 +ALERT_SYSTEM_WARNING = 2 +ALERT_SYSTEM_ERROR = 3 +ALERT_SYSTEM_QUERY = 4 +ALERT_SYSTEM_CRITICAL = 5 +CALERT_SYSTEM = 6 +WINEVENT_OUTOFCONTEXT = 0 +WINEVENT_SKIPOWNTHREAD = 1 +WINEVENT_SKIPOWNPROCESS = 2 +WINEVENT_INCONTEXT = 4 +GUI_CARETBLINKING = 1 +GUI_INMOVESIZE = 2 +GUI_INMENUMODE = 4 +GUI_SYSTEMMENUMODE = 8 +GUI_POPUPMENUMODE = 16 +STATE_SYSTEM_UNAVAILABLE = 1 +STATE_SYSTEM_SELECTED = 2 +STATE_SYSTEM_FOCUSED = 4 +STATE_SYSTEM_PRESSED = 8 +STATE_SYSTEM_CHECKED = 16 +STATE_SYSTEM_MIXED = 32 +STATE_SYSTEM_READONLY = 64 +STATE_SYSTEM_HOTTRACKED = 128 +STATE_SYSTEM_DEFAULT = 256 +STATE_SYSTEM_EXPANDED = 512 +STATE_SYSTEM_COLLAPSED = 1024 +STATE_SYSTEM_BUSY = 2048 +STATE_SYSTEM_FLOATING = 4096 +STATE_SYSTEM_MARQUEED = 8192 +STATE_SYSTEM_ANIMATED = 16384 +STATE_SYSTEM_INVISIBLE = 32768 +STATE_SYSTEM_OFFSCREEN = 65536 +STATE_SYSTEM_SIZEABLE = 131072 +STATE_SYSTEM_MOVEABLE = 262144 +STATE_SYSTEM_SELFVOICING = 524288 +STATE_SYSTEM_FOCUSABLE = 1048576 +STATE_SYSTEM_SELECTABLE = 2097152 +STATE_SYSTEM_LINKED = 4194304 +STATE_SYSTEM_TRAVERSED = 8388608 +STATE_SYSTEM_MULTISELECTABLE = 16777216 +STATE_SYSTEM_EXTSELECTABLE = 33554432 +STATE_SYSTEM_ALERT_LOW = 67108864 +STATE_SYSTEM_ALERT_MEDIUM = 134217728 +STATE_SYSTEM_ALERT_HIGH = 268435456 +STATE_SYSTEM_VALID = 536870911 +CCHILDREN_TITLEBAR = 5 +CCHILDREN_SCROLLBAR = 5 +CURSOR_SHOWING = 1 +WS_ACTIVECAPTION = 1 +GA_MIC = 1 +GA_PARENT = 1 +GA_ROOT = 2 +GA_ROOTOWNER = 3 +GA_MAC = 4 + +# winuser.h line 1979 +BF_LEFT = 1 +BF_TOP = 2 +BF_RIGHT = 4 +BF_BOTTOM = 8 +BF_TOPLEFT = (BF_TOP | BF_LEFT) +BF_TOPRIGHT = (BF_TOP | BF_RIGHT) +BF_BOTTOMLEFT = (BF_BOTTOM | BF_LEFT) +BF_BOTTOMRIGHT = (BF_BOTTOM | BF_RIGHT) +BF_RECT = (BF_LEFT | BF_TOP | BF_RIGHT | BF_BOTTOM) +BF_DIAGONAL = 16 +BF_DIAGONAL_ENDTOPRIGHT = (BF_DIAGONAL | BF_TOP | BF_RIGHT) +BF_DIAGONAL_ENDTOPLEFT = (BF_DIAGONAL | BF_TOP | BF_LEFT) +BF_DIAGONAL_ENDBOTTOMLEFT = (BF_DIAGONAL | BF_BOTTOM | BF_LEFT) +BF_DIAGONAL_ENDBOTTOMRIGHT = (BF_DIAGONAL | BF_BOTTOM | BF_RIGHT) +BF_MIDDLE = 2048 +BF_SOFT = 4096 +BF_ADJUST = 8192 +BF_FLAT = 16384 +BF_MONO = 32768 +DFC_CAPTION = 1 +DFC_MENU = 2 +DFC_SCROLL = 3 +DFC_BUTTON = 4 +DFC_POPUPMENU = 5 +DFCS_CAPTIONCLOSE = 0 +DFCS_CAPTIONMIN = 1 +DFCS_CAPTIONMAX = 2 +DFCS_CAPTIONRESTORE = 3 +DFCS_CAPTIONHELP = 4 +DFCS_MENUARROW = 0 +DFCS_MENUCHECK = 1 +DFCS_MENUBULLET = 2 +DFCS_MENUARROWRIGHT = 4 +DFCS_SCROLLUP = 0 +DFCS_SCROLLDOWN = 1 +DFCS_SCROLLLEFT = 2 +DFCS_SCROLLRIGHT = 3 +DFCS_SCROLLCOMBOBOX = 5 +DFCS_SCROLLSIZEGRIP = 8 +DFCS_SCROLLSIZEGRIPRIGHT = 16 +DFCS_BUTTONCHECK = 0 +DFCS_BUTTONRADIOIMAGE = 1 +DFCS_BUTTONRADIOMASK = 2 +DFCS_BUTTONRADIO = 4 +DFCS_BUTTON3STATE = 8 +DFCS_BUTTONPUSH = 16 +DFCS_INACTIVE = 256 +DFCS_PUSHED = 512 +DFCS_CHECKED = 1024 +DFCS_TRANSPARENT = 2048 +DFCS_HOT = 4096 +DFCS_ADJUSTRECT = 8192 +DFCS_FLAT = 16384 +DFCS_MONO = 32768 +DC_ACTIVE = 1 +DC_SMALLCAP = 2 +DC_ICON = 4 +DC_TEXT = 8 +DC_INBUTTON = 16 +DC_GRADIENT = 32 +IDANI_OPEN = 1 +IDANI_CLOSE = 2 +IDANI_CAPTION = 3 +CF_TEXT = 1 +CF_BITMAP = 2 +CF_METAFILEPICT = 3 +CF_SYLK = 4 +CF_DIF = 5 +CF_TIFF = 6 +CF_OEMTEXT = 7 +CF_DIB = 8 +CF_PALETTE = 9 +CF_PENDATA = 10 +CF_RIFF = 11 +CF_WAVE = 12 +CF_UNICODETEXT = 13 +CF_ENHMETAFILE = 14 +CF_HDROP = 15 +CF_LOCALE = 16 +CF_DIBV5 = 17 +CF_MAX = 18 +CF_OWNERDISPLAY = 128 +CF_DSPTEXT = 129 +CF_DSPBITMAP = 130 +CF_DSPMETAFILEPICT = 131 +CF_DSPENHMETAFILE = 142 +CF_PRIVATEFIRST = 512 +CF_PRIVATELAST = 767 +CF_GDIOBJFIRST = 768 +CF_GDIOBJLAST = 1023 +FVIRTKEY =1 +FNOINVERT = 2 +FSHIFT = 4 +FCONTROL = 8 +FALT = 16 +WPF_SETMINPOSITION = 1 +WPF_RESTORETOMAXIMIZED = 2 +ODT_MENU = 1 +ODT_LISTBOX = 2 +ODT_COMBOBOX = 3 +ODT_BUTTON = 4 +ODT_STATIC = 5 +ODA_DRAWENTIRE = 1 +ODA_SELECT = 2 +ODA_FOCUS = 4 +ODS_SELECTED = 1 +ODS_GRAYED = 2 +ODS_DISABLED = 4 +ODS_CHECKED = 8 +ODS_FOCUS = 16 +ODS_DEFAULT = 32 +ODS_COMBOBOXEDIT = 4096 +ODS_HOTLIGHT = 64 +ODS_INACTIVE = 128 +PM_NOREMOVE = 0 +PM_REMOVE = 1 +PM_NOYIELD = 2 +MOD_ALT = 1 +MOD_CONTROL = 2 +MOD_SHIFT = 4 +MOD_WIN = 8 +IDHOT_SNAPWINDOW = (-1) +IDHOT_SNAPDESKTOP = (-2) +#EW_RESTARTWINDOWS = 0x0042 +#EW_REBOOTSYSTEM = 0x0043 +#EW_EXITANDEXECAPP = 0x0044 +ENDSESSION_LOGOFF = -2147483648 +EWX_LOGOFF = 0 +EWX_SHUTDOWN = 1 +EWX_REBOOT = 2 +EWX_FORCE = 4 +EWX_POWEROFF = 8 +EWX_FORCEIFHUNG = 16 +BSM_ALLCOMPONENTS = 0 +BSM_VXDS = 1 +BSM_NETDRIVER = 2 +BSM_INSTALLABLEDRIVERS = 4 +BSM_APPLICATIONS = 8 +BSM_ALLDESKTOPS = 16 +BSF_QUERY = 1 +BSF_IGNORECURRENTTASK = 2 +BSF_FLUSHDISK = 4 +BSF_NOHANG = 8 +BSF_POSTMESSAGE = 16 +BSF_FORCEIFHUNG = 32 +BSF_NOTIMEOUTIFNOTHUNG = 64 +BROADCAST_QUERY_DENY = 1112363332 # Return this value to deny a query. + +DBWF_LPARAMPOINTER = 32768 + +# winuser.h line 3232 +SWP_NOSIZE = 1 +SWP_NOMOVE = 2 +SWP_NOZORDER = 4 +SWP_NOREDRAW = 8 +SWP_NOACTIVATE = 16 +SWP_FRAMECHANGED = 32 +SWP_SHOWWINDOW = 64 +SWP_HIDEWINDOW = 128 +SWP_NOCOPYBITS = 256 +SWP_NOOWNERZORDER = 512 +SWP_NOSENDCHANGING = 1024 +SWP_DRAWFRAME = SWP_FRAMECHANGED +SWP_NOREPOSITION = SWP_NOOWNERZORDER +SWP_DEFERERASE = 8192 +SWP_ASYNCWINDOWPOS = 16384 + +DLGWINDOWEXTRA = 30 +# winuser.h line 4249 +KEYEVENTF_EXTENDEDKEY = 1 +KEYEVENTF_KEYUP = 2 +# if(_WIN32_WINNT >= 0x0500) +KEYEVENTF_UNICODE = 4 +KEYEVENTF_SCANCODE = 8 +# endif /* _WIN32_WINNT >= 0x0500 */ +MOUSEEVENTF_MOVE = 1 +MOUSEEVENTF_LEFTDOWN = 2 +MOUSEEVENTF_LEFTUP = 4 +MOUSEEVENTF_RIGHTDOWN = 8 +MOUSEEVENTF_RIGHTUP = 16 +MOUSEEVENTF_MIDDLEDOWN = 32 +MOUSEEVENTF_MIDDLEUP = 64 +MOUSEEVENTF_XDOWN = 128 +MOUSEEVENTF_XUP = 256 +MOUSEEVENTF_WHEEL = 2048 +# if (_WIN32_WINNT >= 0x0600) +MOUSEEVENTF_HWHEEL = 4096 +# endif +# if(WINVER >= 0x0600) +MOUSEEVENTF_MOVE_NOCOALESCE = 8192 +# endif /* WINVER >= 0x0600 */ +MOUSEEVENTF_VIRTUALDESK = 16384 +MOUSEEVENTF_ABSOLUTE = 32768 +INPUT_MOUSE = 0 +INPUT_KEYBOARD = 1 +INPUT_HARDWARE = 2 +MWMO_WAITALL = 1 +MWMO_ALERTABLE = 2 +MWMO_INPUTAVAILABLE = 4 +QS_KEY = 1 +QS_MOUSEMOVE = 2 +QS_MOUSEBUTTON = 4 +QS_POSTMESSAGE = 8 +QS_TIMER = 16 +QS_PAINT = 32 +QS_SENDMESSAGE = 64 +QS_HOTKEY = 128 +QS_MOUSE = (QS_MOUSEMOVE | \ + QS_MOUSEBUTTON) +QS_INPUT = (QS_MOUSE | \ + QS_KEY) +QS_ALLEVENTS = (QS_INPUT | \ + QS_POSTMESSAGE | \ + QS_TIMER | \ + QS_PAINT | \ + QS_HOTKEY) +QS_ALLINPUT = (QS_INPUT | \ + QS_POSTMESSAGE | \ + QS_TIMER | \ + QS_PAINT | \ + QS_HOTKEY | \ + QS_SENDMESSAGE) + + +IMN_CLOSESTATUSWINDOW = 1 +IMN_OPENSTATUSWINDOW = 2 +IMN_CHANGECANDIDATE = 3 +IMN_CLOSECANDIDATE = 4 +IMN_OPENCANDIDATE = 5 +IMN_SETCONVERSIONMODE = 6 +IMN_SETSENTENCEMODE = 7 +IMN_SETOPENSTATUS = 8 +IMN_SETCANDIDATEPOS = 9 +IMN_SETCOMPOSITIONFONT = 10 +IMN_SETCOMPOSITIONWINDOW = 11 +IMN_SETSTATUSWINDOWPOS = 12 +IMN_GUIDELINE = 13 +IMN_PRIVATE = 14 + +# winuser.h line 8518 +HELP_CONTEXT = 1 +HELP_QUIT = 2 +HELP_INDEX = 3 +HELP_CONTENTS = 3 +HELP_HELPONHELP = 4 +HELP_SETINDEX = 5 +HELP_SETCONTENTS = 5 +HELP_CONTEXTPOPUP = 8 +HELP_FORCEFILE = 9 +HELP_KEY = 257 +HELP_COMMAND = 258 +HELP_PARTIALKEY = 261 +HELP_MULTIKEY = 513 +HELP_SETWINPOS = 515 +HELP_CONTEXTMENU = 10 +HELP_FINDER = 11 +HELP_WM_HELP = 12 +HELP_SETPOPUP_POS = 13 +HELP_TCARD = 32768 +HELP_TCARD_DATA = 16 +HELP_TCARD_OTHER_CALLER = 17 +IDH_NO_HELP = 28440 +IDH_MISSING_CONTEXT = 28441 # Control doesn't have matching help context +IDH_GENERIC_HELP_BUTTON = 28442 # Property sheet help button +IDH_OK = 28443 +IDH_CANCEL = 28444 +IDH_HELP = 28445 +GR_GDIOBJECTS = 0 # Count of GDI objects +GR_USEROBJECTS = 1 # Count of USER objects +# Generated by h2py from \msvcnt\include\wingdi.h +# manually added (missed by generation some how! +SRCCOPY = 13369376 # dest = source +SRCPAINT = 15597702 # dest = source OR dest +SRCAND = 8913094 # dest = source AND dest +SRCINVERT = 6684742 # dest = source XOR dest +SRCERASE = 4457256 # dest = source AND (NOT dest ) +NOTSRCCOPY = 3342344 # dest = (NOT source) +NOTSRCERASE = 1114278 # dest = (NOT src) AND (NOT dest) +MERGECOPY = 12583114 # dest = (source AND pattern) +MERGEPAINT = 12255782 # dest = (NOT source) OR dest +PATCOPY = 15728673 # dest = pattern +PATPAINT = 16452105 # dest = DPSnoo +PATINVERT = 5898313 # dest = pattern XOR dest +DSTINVERT = 5570569 # dest = (NOT dest) +BLACKNESS = 66 # dest = BLACK +WHITENESS = 16711778 # dest = WHITE + +# hacked and split manually by mhammond. +R2_BLACK = 1 +R2_NOTMERGEPEN = 2 +R2_MASKNOTPEN = 3 +R2_NOTCOPYPEN = 4 +R2_MASKPENNOT = 5 +R2_NOT = 6 +R2_XORPEN = 7 +R2_NOTMASKPEN = 8 +R2_MASKPEN = 9 +R2_NOTXORPEN = 10 +R2_NOP = 11 +R2_MERGENOTPEN = 12 +R2_COPYPEN = 13 +R2_MERGEPENNOT = 14 +R2_MERGEPEN = 15 +R2_WHITE = 16 +R2_LAST = 16 +GDI_ERROR = (-1) +ERROR = 0 +NULLREGION = 1 +SIMPLEREGION = 2 +COMPLEXREGION = 3 +RGN_ERROR = ERROR +RGN_AND = 1 +RGN_OR = 2 +RGN_XOR = 3 +RGN_DIFF = 4 +RGN_COPY = 5 +RGN_MIN = RGN_AND +RGN_MAX = RGN_COPY + +## Stretching modes used with Get/SetStretchBltMode +BLACKONWHITE = 1 +WHITEONBLACK = 2 +COLORONCOLOR = 3 +HALFTONE = 4 +MAXSTRETCHBLTMODE = 4 +STRETCH_ANDSCANS = BLACKONWHITE +STRETCH_ORSCANS = WHITEONBLACK +STRETCH_DELETESCANS = COLORONCOLOR +STRETCH_HALFTONE = HALFTONE + +ALTERNATE = 1 +WINDING = 2 +POLYFILL_LAST = 2 + +## flags used with SetLayout +LAYOUT_RTL = 1 +LAYOUT_BTT = 2 +LAYOUT_VBH = 4 +LAYOUT_ORIENTATIONMASK = LAYOUT_RTL|LAYOUT_BTT|LAYOUT_VBH +LAYOUT_BITMAPORIENTATIONPRESERVED = 8 + +TA_NOUPDATECP = 0 +TA_UPDATECP = 1 +TA_LEFT = 0 +TA_RIGHT = 2 +TA_CENTER = 6 +TA_TOP = 0 +TA_BOTTOM = 8 +TA_BASELINE = 24 +TA_MASK = (TA_BASELINE+TA_CENTER+TA_UPDATECP) +VTA_BASELINE = TA_BASELINE +VTA_LEFT = TA_BOTTOM +VTA_RIGHT = TA_TOP +VTA_CENTER = TA_CENTER +VTA_BOTTOM = TA_RIGHT +VTA_TOP = TA_LEFT +ETO_GRAYED = 1 +ETO_OPAQUE = 2 +ETO_CLIPPED = 4 +ASPECT_FILTERING = 1 +DCB_RESET = 1 +DCB_ACCUMULATE = 2 +DCB_DIRTY = DCB_ACCUMULATE +DCB_SET = (DCB_RESET | DCB_ACCUMULATE) +DCB_ENABLE = 4 +DCB_DISABLE = 8 +META_SETBKCOLOR = 513 +META_SETBKMODE = 258 +META_SETMAPMODE = 259 +META_SETROP2 = 260 +META_SETRELABS = 261 +META_SETPOLYFILLMODE = 262 +META_SETSTRETCHBLTMODE = 263 +META_SETTEXTCHAREXTRA = 264 +META_SETTEXTCOLOR = 521 +META_SETTEXTJUSTIFICATION = 522 +META_SETWINDOWORG = 523 +META_SETWINDOWEXT = 524 +META_SETVIEWPORTORG = 525 +META_SETVIEWPORTEXT = 526 +META_OFFSETWINDOWORG = 527 +META_SCALEWINDOWEXT = 1040 +META_OFFSETVIEWPORTORG = 529 +META_SCALEVIEWPORTEXT = 1042 +META_LINETO = 531 +META_MOVETO = 532 +META_EXCLUDECLIPRECT = 1045 +META_INTERSECTCLIPRECT = 1046 +META_ARC = 2071 +META_ELLIPSE = 1048 +META_FLOODFILL = 1049 +META_PIE = 2074 +META_RECTANGLE = 1051 +META_ROUNDRECT = 1564 +META_PATBLT = 1565 +META_SAVEDC = 30 +META_SETPIXEL = 1055 +META_OFFSETCLIPRGN = 544 +META_TEXTOUT = 1313 +META_BITBLT = 2338 +META_STRETCHBLT = 2851 +META_POLYGON = 804 +META_POLYLINE = 805 +META_ESCAPE = 1574 +META_RESTOREDC = 295 +META_FILLREGION = 552 +META_FRAMEREGION = 1065 +META_INVERTREGION = 298 +META_PAINTREGION = 299 +META_SELECTCLIPREGION = 300 +META_SELECTOBJECT = 301 +META_SETTEXTALIGN = 302 +META_CHORD = 2096 +META_SETMAPPERFLAGS = 561 +META_EXTTEXTOUT = 2610 +META_SETDIBTODEV = 3379 +META_SELECTPALETTE = 564 +META_REALIZEPALETTE = 53 +META_ANIMATEPALETTE = 1078 +META_SETPALENTRIES = 55 +META_POLYPOLYGON = 1336 +META_RESIZEPALETTE = 313 +META_DIBBITBLT = 2368 +META_DIBSTRETCHBLT = 2881 +META_DIBCREATEPATTERNBRUSH = 322 +META_STRETCHDIB = 3907 +META_EXTFLOODFILL = 1352 +META_DELETEOBJECT = 496 +META_CREATEPALETTE = 247 +META_CREATEPATTERNBRUSH = 505 +META_CREATEPENINDIRECT = 762 +META_CREATEFONTINDIRECT = 763 +META_CREATEBRUSHINDIRECT = 764 +META_CREATEREGION = 1791 +FILE_BEGIN = 0 +FILE_CURRENT = 1 +FILE_END = 2 +FILE_FLAG_WRITE_THROUGH = -2147483648 +FILE_FLAG_OVERLAPPED = 1073741824 +FILE_FLAG_NO_BUFFERING = 536870912 +FILE_FLAG_RANDOM_ACCESS = 268435456 +FILE_FLAG_SEQUENTIAL_SCAN = 134217728 +FILE_FLAG_DELETE_ON_CLOSE = 67108864 +FILE_FLAG_BACKUP_SEMANTICS = 33554432 +FILE_FLAG_POSIX_SEMANTICS = 16777216 +CREATE_NEW = 1 +CREATE_ALWAYS = 2 +OPEN_EXISTING = 3 +OPEN_ALWAYS = 4 +TRUNCATE_EXISTING = 5 +PIPE_ACCESS_INBOUND = 1 +PIPE_ACCESS_OUTBOUND = 2 +PIPE_ACCESS_DUPLEX = 3 +PIPE_CLIENT_END = 0 +PIPE_SERVER_END = 1 +PIPE_WAIT = 0 +PIPE_NOWAIT = 1 +PIPE_READMODE_BYTE = 0 +PIPE_READMODE_MESSAGE = 2 +PIPE_TYPE_BYTE = 0 +PIPE_TYPE_MESSAGE = 4 +PIPE_UNLIMITED_INSTANCES = 255 +SECURITY_CONTEXT_TRACKING = 262144 +SECURITY_EFFECTIVE_ONLY = 524288 +SECURITY_SQOS_PRESENT = 1048576 +SECURITY_VALID_SQOS_FLAGS = 2031616 +DTR_CONTROL_DISABLE = 0 +DTR_CONTROL_ENABLE = 1 +DTR_CONTROL_HANDSHAKE = 2 +RTS_CONTROL_DISABLE = 0 +RTS_CONTROL_ENABLE = 1 +RTS_CONTROL_HANDSHAKE = 2 +RTS_CONTROL_TOGGLE = 3 +GMEM_FIXED = 0 +GMEM_MOVEABLE = 2 +GMEM_NOCOMPACT = 16 +GMEM_NODISCARD = 32 +GMEM_ZEROINIT = 64 +GMEM_MODIFY = 128 +GMEM_DISCARDABLE = 256 +GMEM_NOT_BANKED = 4096 +GMEM_SHARE = 8192 +GMEM_DDESHARE = 8192 +GMEM_NOTIFY = 16384 +GMEM_LOWER = GMEM_NOT_BANKED +GMEM_VALID_FLAGS = 32626 +GMEM_INVALID_HANDLE = 32768 +GHND = (GMEM_MOVEABLE | GMEM_ZEROINIT) +GPTR = (GMEM_FIXED | GMEM_ZEROINIT) +GMEM_DISCARDED = 16384 +GMEM_LOCKCOUNT = 255 +LMEM_FIXED = 0 +LMEM_MOVEABLE = 2 +LMEM_NOCOMPACT = 16 +LMEM_NODISCARD = 32 +LMEM_ZEROINIT = 64 +LMEM_MODIFY = 128 +LMEM_DISCARDABLE = 3840 +LMEM_VALID_FLAGS = 3954 +LMEM_INVALID_HANDLE = 32768 +LHND = (LMEM_MOVEABLE | LMEM_ZEROINIT) +LPTR = (LMEM_FIXED | LMEM_ZEROINIT) +NONZEROLHND = (LMEM_MOVEABLE) +NONZEROLPTR = (LMEM_FIXED) +LMEM_DISCARDED = 16384 +LMEM_LOCKCOUNT = 255 +DEBUG_PROCESS = 1 +DEBUG_ONLY_THIS_PROCESS = 2 +CREATE_SUSPENDED = 4 +DETACHED_PROCESS = 8 +CREATE_NEW_CONSOLE = 16 +NORMAL_PRIORITY_CLASS = 32 +IDLE_PRIORITY_CLASS = 64 +HIGH_PRIORITY_CLASS = 128 +REALTIME_PRIORITY_CLASS = 256 +CREATE_NEW_PROCESS_GROUP = 512 +CREATE_UNICODE_ENVIRONMENT = 1024 +CREATE_SEPARATE_WOW_VDM = 2048 +CREATE_SHARED_WOW_VDM = 4096 +CREATE_DEFAULT_ERROR_MODE = 67108864 +CREATE_NO_WINDOW = 134217728 +PROFILE_USER = 268435456 +PROFILE_KERNEL = 536870912 +PROFILE_SERVER = 1073741824 +THREAD_BASE_PRIORITY_LOWRT = 15 +THREAD_BASE_PRIORITY_MAX = 2 +THREAD_BASE_PRIORITY_MIN = -2 +THREAD_BASE_PRIORITY_IDLE = -15 +THREAD_PRIORITY_LOWEST = THREAD_BASE_PRIORITY_MIN +THREAD_PRIORITY_BELOW_NORMAL = THREAD_PRIORITY_LOWEST+1 +THREAD_PRIORITY_HIGHEST = THREAD_BASE_PRIORITY_MAX +THREAD_PRIORITY_ABOVE_NORMAL = THREAD_PRIORITY_HIGHEST-1 +THREAD_PRIORITY_ERROR_RETURN = MAXLONG +THREAD_PRIORITY_TIME_CRITICAL = THREAD_BASE_PRIORITY_LOWRT +THREAD_PRIORITY_IDLE = THREAD_BASE_PRIORITY_IDLE +THREAD_PRIORITY_NORMAL = 0 +THREAD_MODE_BACKGROUND_BEGIN = 0x00010000 +THREAD_MODE_BACKGROUND_END = 0x00020000 + +EXCEPTION_DEBUG_EVENT = 1 +CREATE_THREAD_DEBUG_EVENT = 2 +CREATE_PROCESS_DEBUG_EVENT = 3 +EXIT_THREAD_DEBUG_EVENT = 4 +EXIT_PROCESS_DEBUG_EVENT = 5 +LOAD_DLL_DEBUG_EVENT = 6 +UNLOAD_DLL_DEBUG_EVENT = 7 +OUTPUT_DEBUG_STRING_EVENT = 8 +RIP_EVENT = 9 +DRIVE_UNKNOWN = 0 +DRIVE_NO_ROOT_DIR = 1 +DRIVE_REMOVABLE = 2 +DRIVE_FIXED = 3 +DRIVE_REMOTE = 4 +DRIVE_CDROM = 5 +DRIVE_RAMDISK = 6 +FILE_TYPE_UNKNOWN = 0 +FILE_TYPE_DISK = 1 +FILE_TYPE_CHAR = 2 +FILE_TYPE_PIPE = 3 +FILE_TYPE_REMOTE = 32768 +NOPARITY = 0 +ODDPARITY = 1 +EVENPARITY = 2 +MARKPARITY = 3 +SPACEPARITY = 4 +ONESTOPBIT = 0 +ONE5STOPBITS = 1 +TWOSTOPBITS = 2 +CBR_110 = 110 +CBR_300 = 300 +CBR_600 = 600 +CBR_1200 = 1200 +CBR_2400 = 2400 +CBR_4800 = 4800 +CBR_9600 = 9600 +CBR_14400 = 14400 +CBR_19200 = 19200 +CBR_38400 = 38400 +CBR_56000 = 56000 +CBR_57600 = 57600 +CBR_115200 = 115200 +CBR_128000 = 128000 +CBR_256000 = 256000 +S_QUEUEEMPTY = 0 +S_THRESHOLD = 1 +S_ALLTHRESHOLD = 2 +S_NORMAL = 0 +S_LEGATO = 1 +S_STACCATO = 2 +NMPWAIT_WAIT_FOREVER = -1 +NMPWAIT_NOWAIT = 1 +NMPWAIT_USE_DEFAULT_WAIT = 0 +OF_READ = 0 +OF_WRITE = 1 +OF_READWRITE = 2 +OF_SHARE_COMPAT = 0 +OF_SHARE_EXCLUSIVE = 16 +OF_SHARE_DENY_WRITE = 32 +OF_SHARE_DENY_READ = 48 +OF_SHARE_DENY_NONE = 64 +OF_PARSE = 256 +OF_DELETE = 512 +OF_VERIFY = 1024 +OF_CANCEL = 2048 +OF_CREATE = 4096 +OF_PROMPT = 8192 +OF_EXIST = 16384 +OF_REOPEN = 32768 +OFS_MAXPATHNAME = 128 +MAXINTATOM = 49152 + +# winbase.h +PROCESS_HEAP_REGION = 1 +PROCESS_HEAP_UNCOMMITTED_RANGE = 2 +PROCESS_HEAP_ENTRY_BUSY = 4 +PROCESS_HEAP_ENTRY_MOVEABLE = 16 +PROCESS_HEAP_ENTRY_DDESHARE = 32 +SCS_32BIT_BINARY = 0 +SCS_DOS_BINARY = 1 +SCS_WOW_BINARY = 2 +SCS_PIF_BINARY = 3 +SCS_POSIX_BINARY = 4 +SCS_OS216_BINARY = 5 +SEM_FAILCRITICALERRORS = 1 +SEM_NOGPFAULTERRORBOX = 2 +SEM_NOALIGNMENTFAULTEXCEPT = 4 +SEM_NOOPENFILEERRORBOX = 32768 +LOCKFILE_FAIL_IMMEDIATELY = 1 +LOCKFILE_EXCLUSIVE_LOCK = 2 +HANDLE_FLAG_INHERIT = 1 +HANDLE_FLAG_PROTECT_FROM_CLOSE = 2 +HINSTANCE_ERROR = 32 +GET_TAPE_MEDIA_INFORMATION = 0 +GET_TAPE_DRIVE_INFORMATION = 1 +SET_TAPE_MEDIA_INFORMATION = 0 +SET_TAPE_DRIVE_INFORMATION = 1 +FORMAT_MESSAGE_ALLOCATE_BUFFER = 256 +FORMAT_MESSAGE_IGNORE_INSERTS = 512 +FORMAT_MESSAGE_FROM_STRING = 1024 +FORMAT_MESSAGE_FROM_HMODULE = 2048 +FORMAT_MESSAGE_FROM_SYSTEM = 4096 +FORMAT_MESSAGE_ARGUMENT_ARRAY = 8192 +FORMAT_MESSAGE_MAX_WIDTH_MASK = 255 +BACKUP_INVALID = 0 +BACKUP_DATA = 1 +BACKUP_EA_DATA = 2 +BACKUP_SECURITY_DATA = 3 +BACKUP_ALTERNATE_DATA = 4 +BACKUP_LINK = 5 +BACKUP_PROPERTY_DATA = 6 +BACKUP_OBJECT_ID = 7 +BACKUP_REPARSE_DATA = 8 +BACKUP_SPARSE_BLOCK = 9 + +STREAM_NORMAL_ATTRIBUTE = 0 +STREAM_MODIFIED_WHEN_READ = 1 +STREAM_CONTAINS_SECURITY = 2 +STREAM_CONTAINS_PROPERTIES = 4 +STARTF_USESHOWWINDOW = 1 +STARTF_USESIZE = 2 +STARTF_USEPOSITION = 4 +STARTF_USECOUNTCHARS = 8 +STARTF_USEFILLATTRIBUTE = 16 +STARTF_FORCEONFEEDBACK = 64 +STARTF_FORCEOFFFEEDBACK = 128 +STARTF_USESTDHANDLES = 256 +STARTF_USEHOTKEY = 512 +SHUTDOWN_NORETRY = 1 +DONT_RESOLVE_DLL_REFERENCES = 1 +LOAD_LIBRARY_AS_DATAFILE = 2 +LOAD_WITH_ALTERED_SEARCH_PATH = 8 +DDD_RAW_TARGET_PATH = 1 +DDD_REMOVE_DEFINITION = 2 +DDD_EXACT_MATCH_ON_REMOVE = 4 +MOVEFILE_REPLACE_EXISTING = 1 +MOVEFILE_COPY_ALLOWED = 2 +MOVEFILE_DELAY_UNTIL_REBOOT = 4 +MAX_COMPUTERNAME_LENGTH = 15 +LOGON32_LOGON_INTERACTIVE = 2 +LOGON32_LOGON_NETWORK = 3 +LOGON32_LOGON_BATCH = 4 +LOGON32_LOGON_SERVICE = 5 +LOGON32_LOGON_UNLOCK = 7 +LOGON32_LOGON_NETWORK_CLEARTEXT = 8 +LOGON32_LOGON_NEW_CREDENTIALS = 9 +LOGON32_PROVIDER_DEFAULT = 0 +LOGON32_PROVIDER_WINNT35 = 1 +LOGON32_PROVIDER_WINNT40 = 2 +LOGON32_PROVIDER_WINNT50 = 3 +VER_PLATFORM_WIN32s = 0 +VER_PLATFORM_WIN32_WINDOWS = 1 +VER_PLATFORM_WIN32_NT = 2 +TC_NORMAL = 0 +TC_HARDERR = 1 +TC_GP_TRAP = 2 +TC_SIGNAL = 3 +AC_LINE_OFFLINE = 0 +AC_LINE_ONLINE = 1 +AC_LINE_BACKUP_POWER = 2 +AC_LINE_UNKNOWN = 255 +BATTERY_FLAG_HIGH = 1 +BATTERY_FLAG_LOW = 2 +BATTERY_FLAG_CRITICAL = 4 +BATTERY_FLAG_CHARGING = 8 +BATTERY_FLAG_NO_BATTERY = 128 +BATTERY_FLAG_UNKNOWN = 255 +BATTERY_PERCENTAGE_UNKNOWN = 255 +BATTERY_LIFE_UNKNOWN = -1 + +# Generated by h2py from d:\msdev\include\richedit.h +cchTextLimitDefault = 32767 +WM_CONTEXTMENU = 123 +WM_PRINTCLIENT = 792 +EN_MSGFILTER = 1792 +EN_REQUESTRESIZE = 1793 +EN_SELCHANGE = 1794 +EN_DROPFILES = 1795 +EN_PROTECTED = 1796 +EN_CORRECTTEXT = 1797 +EN_STOPNOUNDO = 1798 +EN_IMECHANGE = 1799 +EN_SAVECLIPBOARD = 1800 +EN_OLEOPFAILED = 1801 +ENM_NONE = 0 +ENM_CHANGE = 1 +ENM_UPDATE = 2 +ENM_SCROLL = 4 +ENM_KEYEVENTS = 65536 +ENM_MOUSEEVENTS = 131072 +ENM_REQUESTRESIZE = 262144 +ENM_SELCHANGE = 524288 +ENM_DROPFILES = 1048576 +ENM_PROTECTED = 2097152 +ENM_CORRECTTEXT = 4194304 +ENM_IMECHANGE = 8388608 +ES_SAVESEL = 32768 +ES_SUNKEN = 16384 +ES_DISABLENOSCROLL = 8192 +ES_SELECTIONBAR = 16777216 +ES_EX_NOCALLOLEINIT = 16777216 +ES_VERTICAL = 4194304 +ES_NOIME = 524288 +ES_SELFIME = 262144 +ECO_AUTOWORDSELECTION = 1 +ECO_AUTOVSCROLL = 64 +ECO_AUTOHSCROLL = 128 +ECO_NOHIDESEL = 256 +ECO_READONLY = 2048 +ECO_WANTRETURN = 4096 +ECO_SAVESEL = 32768 +ECO_SELECTIONBAR = 16777216 +ECO_VERTICAL = 4194304 +ECOOP_SET = 1 +ECOOP_OR = 2 +ECOOP_AND = 3 +ECOOP_XOR = 4 +WB_CLASSIFY = 3 +WB_MOVEWORDLEFT = 4 +WB_MOVEWORDRIGHT = 5 +WB_LEFTBREAK = 6 +WB_RIGHTBREAK = 7 +WB_MOVEWORDPREV = 4 +WB_MOVEWORDNEXT = 5 +WB_PREVBREAK = 6 +WB_NEXTBREAK = 7 +PC_FOLLOWING = 1 +PC_LEADING = 2 +PC_OVERFLOW = 3 +PC_DELIMITER = 4 +WBF_WORDWRAP = 16 +WBF_WORDBREAK = 32 +WBF_OVERFLOW = 64 +WBF_LEVEL1 = 128 +WBF_LEVEL2 = 256 +WBF_CUSTOM = 512 +CFM_BOLD = 1 +CFM_ITALIC = 2 +CFM_UNDERLINE = 4 +CFM_STRIKEOUT = 8 +CFM_PROTECTED = 16 +CFM_SIZE = -2147483648 +CFM_COLOR = 1073741824 +CFM_FACE = 536870912 +CFM_OFFSET = 268435456 +CFM_CHARSET = 134217728 +CFE_BOLD = 1 +CFE_ITALIC = 2 +CFE_UNDERLINE = 4 +CFE_STRIKEOUT = 8 +CFE_PROTECTED = 16 +CFE_AUTOCOLOR = 1073741824 +yHeightCharPtsMost = 1638 +SCF_SELECTION = 1 +SCF_WORD = 2 +SF_TEXT = 1 +SF_RTF = 2 +SF_RTFNOOBJS = 3 +SF_TEXTIZED = 4 +SFF_SELECTION = 32768 +SFF_PLAINRTF = 16384 +MAX_TAB_STOPS = 32 +lDefaultTab = 720 +PFM_STARTINDENT = 1 +PFM_RIGHTINDENT = 2 +PFM_OFFSET = 4 +PFM_ALIGNMENT = 8 +PFM_TABSTOPS = 16 +PFM_NUMBERING = 32 +PFM_OFFSETINDENT = -2147483648 +PFN_BULLET = 1 +PFA_LEFT = 1 +PFA_RIGHT = 2 +PFA_CENTER = 3 +WM_NOTIFY = 78 +SEL_EMPTY = 0 +SEL_TEXT = 1 +SEL_OBJECT = 2 +SEL_MULTICHAR = 4 +SEL_MULTIOBJECT = 8 +OLEOP_DOVERB = 1 +CF_RTF = "Rich Text Format" +CF_RTFNOOBJS = "Rich Text Format Without Objects" +CF_RETEXTOBJ = "RichEdit Text and Objects" + +# From wincon.h +RIGHT_ALT_PRESSED = 1 # the right alt key is pressed. +LEFT_ALT_PRESSED = 2 # the left alt key is pressed. +RIGHT_CTRL_PRESSED = 4 # the right ctrl key is pressed. +LEFT_CTRL_PRESSED = 8 # the left ctrl key is pressed. +SHIFT_PRESSED = 16 # the shift key is pressed. +NUMLOCK_ON = 32 # the numlock light is on. +SCROLLLOCK_ON = 64 # the scrolllock light is on. +CAPSLOCK_ON = 128 # the capslock light is on. +ENHANCED_KEY = 256 # the key is enhanced. +NLS_DBCSCHAR = 65536 # DBCS for JPN: SBCS/DBCS mode. +NLS_ALPHANUMERIC = 0 # DBCS for JPN: Alphanumeric mode. +NLS_KATAKANA = 131072 # DBCS for JPN: Katakana mode. +NLS_HIRAGANA = 262144 # DBCS for JPN: Hiragana mode. +NLS_ROMAN = 4194304 # DBCS for JPN: Roman/Noroman mode. +NLS_IME_CONVERSION = 8388608 # DBCS for JPN: IME conversion. +NLS_IME_DISABLE = 536870912 # DBCS for JPN: IME enable/disable. + +FROM_LEFT_1ST_BUTTON_PRESSED = 1 +RIGHTMOST_BUTTON_PRESSED = 2 +FROM_LEFT_2ND_BUTTON_PRESSED = 4 +FROM_LEFT_3RD_BUTTON_PRESSED = 8 +FROM_LEFT_4TH_BUTTON_PRESSED = 16 + +CTRL_C_EVENT = 0 +CTRL_BREAK_EVENT = 1 +CTRL_CLOSE_EVENT = 2 +CTRL_LOGOFF_EVENT = 5 +CTRL_SHUTDOWN_EVENT = 6 + +MOUSE_MOVED = 1 +DOUBLE_CLICK = 2 +MOUSE_WHEELED = 4 + +#property sheet window messages from prsht.h +PSM_SETCURSEL = (WM_USER + 101) +PSM_REMOVEPAGE = (WM_USER + 102) +PSM_ADDPAGE = (WM_USER + 103) +PSM_CHANGED = (WM_USER + 104) +PSM_RESTARTWINDOWS = (WM_USER + 105) +PSM_REBOOTSYSTEM = (WM_USER + 106) +PSM_CANCELTOCLOSE = (WM_USER + 107) +PSM_QUERYSIBLINGS = (WM_USER + 108) +PSM_UNCHANGED = (WM_USER + 109) +PSM_APPLY = (WM_USER + 110) +PSM_SETTITLEA = (WM_USER + 111) +PSM_SETTITLEW = (WM_USER + 120) +PSM_SETWIZBUTTONS = (WM_USER + 112) +PSM_PRESSBUTTON = (WM_USER + 113) +PSM_SETCURSELID = (WM_USER + 114) +PSM_SETFINISHTEXTA = (WM_USER + 115) +PSM_SETFINISHTEXTW = (WM_USER + 121) +PSM_GETTABCONTROL = (WM_USER + 116) +PSM_ISDIALOGMESSAGE = (WM_USER + 117) +PSM_GETCURRENTPAGEHWND = (WM_USER + 118) +PSM_INSERTPAGE = (WM_USER + 119) +PSM_SETHEADERTITLEA = (WM_USER + 125) +PSM_SETHEADERTITLEW = (WM_USER + 126) +PSM_SETHEADERSUBTITLEA = (WM_USER + 127) +PSM_SETHEADERSUBTITLEW = (WM_USER + 128) +PSM_HWNDTOINDEX = (WM_USER + 129) +PSM_INDEXTOHWND = (WM_USER + 130) +PSM_PAGETOINDEX = (WM_USER + 131) +PSM_INDEXTOPAGE = (WM_USER + 132) +PSM_IDTOINDEX = (WM_USER + 133) +PSM_INDEXTOID = (WM_USER + 134) +PSM_GETRESULT = (WM_USER + 135) +PSM_RECALCPAGESIZES = (WM_USER + 136) + +# GetUserNameEx/GetComputerNameEx +NameUnknown = 0 +NameFullyQualifiedDN = 1 +NameSamCompatible = 2 +NameDisplay = 3 +NameUniqueId = 6 +NameCanonical = 7 +NameUserPrincipal = 8 +NameCanonicalEx = 9 +NameServicePrincipal = 10 +NameDnsDomain = 12 + +ComputerNameNetBIOS = 0 +ComputerNameDnsHostname = 1 +ComputerNameDnsDomain = 2 +ComputerNameDnsFullyQualified = 3 +ComputerNamePhysicalNetBIOS = 4 +ComputerNamePhysicalDnsHostname = 5 +ComputerNamePhysicalDnsDomain = 6 +ComputerNamePhysicalDnsFullyQualified = 7 + +LWA_COLORKEY = 0x00000001 +LWA_ALPHA = 0x00000002 +ULW_COLORKEY = 0x00000001 +ULW_ALPHA = 0x00000002 +ULW_OPAQUE = 0x00000004 + +# WinDef.h +TRUE = 1 +FALSE = 0 +MAX_PATH = 260 +# WinGDI.h +AC_SRC_OVER = 0 +AC_SRC_ALPHA = 1 +GRADIENT_FILL_RECT_H = 0 +GRADIENT_FILL_RECT_V = 1 +GRADIENT_FILL_TRIANGLE = 2 +GRADIENT_FILL_OP_FLAG = 255 + +## flags used with Get/SetSystemFileCacheSize +MM_WORKING_SET_MAX_HARD_ENABLE = 1 +MM_WORKING_SET_MAX_HARD_DISABLE = 2 +MM_WORKING_SET_MIN_HARD_ENABLE = 4 +MM_WORKING_SET_MIN_HARD_DISABLE = 8 + +## Flags for GetFinalPathNameByHandle +VOLUME_NAME_DOS = 0 +VOLUME_NAME_GUID = 1 +VOLUME_NAME_NT = 2 +VOLUME_NAME_NONE = 4 +FILE_NAME_NORMALIZED = 0 +FILE_NAME_OPENED = 8 + +DEVICE_NOTIFY_WINDOW_HANDLE = 0x00000000 +DEVICE_NOTIFY_SERVICE_HANDLE = 0x00000001 + +# From Dbt.h +# Generated by h2py from Dbt.h +WM_DEVICECHANGE = 0x0219 +BSF_QUERY = 0x00000001 +BSF_IGNORECURRENTTASK = 0x00000002 +BSF_FLUSHDISK = 0x00000004 +BSF_NOHANG = 0x00000008 +BSF_POSTMESSAGE = 0x00000010 +BSF_FORCEIFHUNG = 0x00000020 +BSF_NOTIMEOUTIFNOTHUNG = 0x00000040 +BSF_MSGSRV32ISOK = (-2147483648) +BSF_MSGSRV32ISOK_BIT = 31 +BSM_ALLCOMPONENTS = 0x00000000 +BSM_VXDS = 0x00000001 +BSM_NETDRIVER = 0x00000002 +BSM_INSTALLABLEDRIVERS = 0x00000004 +BSM_APPLICATIONS = 0x00000008 +DBT_APPYBEGIN = 0x0000 +DBT_APPYEND = 0x0001 +DBT_DEVNODES_CHANGED = 0x0007 +DBT_QUERYCHANGECONFIG = 0x0017 +DBT_CONFIGCHANGED = 0x0018 +DBT_CONFIGCHANGECANCELED = 0x0019 +DBT_MONITORCHANGE = 0x001B +DBT_SHELLLOGGEDON = 0x0020 +DBT_CONFIGMGAPI32 = 0x0022 +DBT_VXDINITCOMPLETE = 0x0023 +DBT_VOLLOCKQUERYLOCK = 0x8041 +DBT_VOLLOCKLOCKTAKEN = 0x8042 +DBT_VOLLOCKLOCKFAILED = 0x8043 +DBT_VOLLOCKQUERYUNLOCK = 0x8044 +DBT_VOLLOCKLOCKRELEASED = 0x8045 +DBT_VOLLOCKUNLOCKFAILED = 0x8046 +LOCKP_ALLOW_WRITES = 0x01 +LOCKP_FAIL_WRITES = 0x00 +LOCKP_FAIL_MEM_MAPPING = 0x02 +LOCKP_ALLOW_MEM_MAPPING = 0x00 +LOCKP_USER_MASK = 0x03 +LOCKP_LOCK_FOR_FORMAT = 0x04 +LOCKF_LOGICAL_LOCK = 0x00 +LOCKF_PHYSICAL_LOCK = 0x01 +DBT_NO_DISK_SPACE = 0x0047 +DBT_LOW_DISK_SPACE = 0x0048 +DBT_CONFIGMGPRIVATE = 0x7FFF +DBT_DEVICEARRIVAL = 0x8000 +DBT_DEVICEQUERYREMOVE = 0x8001 +DBT_DEVICEQUERYREMOVEFAILED = 0x8002 +DBT_DEVICEREMOVEPENDING = 0x8003 +DBT_DEVICEREMOVECOMPLETE = 0x8004 +DBT_DEVICETYPESPECIFIC = 0x8005 +DBT_CUSTOMEVENT = 0x8006 +DBT_DEVTYP_OEM = 0x00000000 +DBT_DEVTYP_DEVNODE = 0x00000001 +DBT_DEVTYP_VOLUME = 0x00000002 +DBT_DEVTYP_PORT = 0x00000003 +DBT_DEVTYP_NET = 0x00000004 +DBT_DEVTYP_DEVICEINTERFACE = 0x00000005 +DBT_DEVTYP_HANDLE = 0x00000006 +DBTF_MEDIA = 0x0001 +DBTF_NET = 0x0002 +DBTF_RESOURCE = 0x00000001 +DBTF_XPORT = 0x00000002 +DBTF_SLOWNET = 0x00000004 +DBT_VPOWERDAPI = 0x8100 +DBT_USERDEFINED = 0xFFFF diff --git a/venv/Lib/site-packages/win32/lib/win32cryptcon.py b/venv/Lib/site-packages/win32/lib/win32cryptcon.py new file mode 100644 index 00000000..7ad7ed81 --- /dev/null +++ b/venv/Lib/site-packages/win32/lib/win32cryptcon.py @@ -0,0 +1,1900 @@ +# Generated by h2py from WinCrypt.h +def GET_ALG_CLASS(x): return (x & (7 << 13)) + +def GET_ALG_TYPE(x): return (x & (15 << 9)) + +def GET_ALG_SID(x): return (x & (511)) + +ALG_CLASS_ANY = (0) +ALG_CLASS_SIGNATURE = (1 << 13) +ALG_CLASS_MSG_ENCRYPT = (2 << 13) +ALG_CLASS_DATA_ENCRYPT = (3 << 13) +ALG_CLASS_HASH = (4 << 13) +ALG_CLASS_KEY_EXCHANGE = (5 << 13) +ALG_CLASS_ALL = (7 << 13) +ALG_TYPE_ANY = (0) +ALG_TYPE_DSS = (1 << 9) +ALG_TYPE_RSA = (2 << 9) +ALG_TYPE_BLOCK = (3 << 9) +ALG_TYPE_STREAM = (4 << 9) +ALG_TYPE_DH = (5 << 9) +ALG_TYPE_SECURECHANNEL = (6 << 9) +ALG_SID_ANY = (0) +ALG_SID_RSA_ANY = 0 +ALG_SID_RSA_PKCS = 1 +ALG_SID_RSA_MSATWORK = 2 +ALG_SID_RSA_ENTRUST = 3 +ALG_SID_RSA_PGP = 4 +ALG_SID_DSS_ANY = 0 +ALG_SID_DSS_PKCS = 1 +ALG_SID_DSS_DMS = 2 +ALG_SID_DES = 1 +ALG_SID_3DES = 3 +ALG_SID_DESX = 4 +ALG_SID_IDEA = 5 +ALG_SID_CAST = 6 +ALG_SID_SAFERSK64 = 7 +ALG_SID_SAFERSK128 = 8 +ALG_SID_3DES_112 = 9 +ALG_SID_CYLINK_MEK = 12 +ALG_SID_RC5 = 13 +ALG_SID_AES_128 = 14 +ALG_SID_AES_192 = 15 +ALG_SID_AES_256 = 16 +ALG_SID_AES = 17 +ALG_SID_SKIPJACK = 10 +ALG_SID_TEK = 11 +CRYPT_MODE_CBCI = 6 +CRYPT_MODE_CFBP = 7 +CRYPT_MODE_OFBP = 8 +CRYPT_MODE_CBCOFM = 9 +CRYPT_MODE_CBCOFMI = 10 +ALG_SID_RC2 = 2 +ALG_SID_RC4 = 1 +ALG_SID_SEAL = 2 +ALG_SID_DH_SANDF = 1 +ALG_SID_DH_EPHEM = 2 +ALG_SID_AGREED_KEY_ANY = 3 +ALG_SID_KEA = 4 +ALG_SID_MD2 = 1 +ALG_SID_MD4 = 2 +ALG_SID_MD5 = 3 +ALG_SID_SHA = 4 +ALG_SID_SHA1 = 4 +ALG_SID_MAC = 5 +ALG_SID_RIPEMD = 6 +ALG_SID_RIPEMD160 = 7 +ALG_SID_SSL3SHAMD5 = 8 +ALG_SID_HMAC = 9 +ALG_SID_TLS1PRF = 10 +ALG_SID_HASH_REPLACE_OWF = 11 +ALG_SID_SHA_256 = 12 +ALG_SID_SHA_384 = 13 +ALG_SID_SHA_512 = 14 +ALG_SID_SSL3_MASTER = 1 +ALG_SID_SCHANNEL_MASTER_HASH = 2 +ALG_SID_SCHANNEL_MAC_KEY = 3 +ALG_SID_PCT1_MASTER = 4 +ALG_SID_SSL2_MASTER = 5 +ALG_SID_TLS1_MASTER = 6 +ALG_SID_SCHANNEL_ENC_KEY = 7 +ALG_SID_EXAMPLE = 80 +CALG_MD2 = (ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_MD2) +CALG_MD4 = (ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_MD4) +CALG_MD5 = (ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_MD5) +CALG_SHA = (ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_SHA) +CALG_SHA1 = (ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_SHA1) +CALG_MAC = (ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_MAC) +CALG_RSA_SIGN = (ALG_CLASS_SIGNATURE | ALG_TYPE_RSA | ALG_SID_RSA_ANY) +CALG_DSS_SIGN = (ALG_CLASS_SIGNATURE | ALG_TYPE_DSS | ALG_SID_DSS_ANY) +CALG_NO_SIGN = (ALG_CLASS_SIGNATURE | ALG_TYPE_ANY | ALG_SID_ANY) +CALG_RSA_KEYX = (ALG_CLASS_KEY_EXCHANGE|ALG_TYPE_RSA|ALG_SID_RSA_ANY) +CALG_DES = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_BLOCK|ALG_SID_DES) +CALG_3DES_112 = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_BLOCK|ALG_SID_3DES_112) +CALG_3DES = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_BLOCK|ALG_SID_3DES) +CALG_DESX = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_BLOCK|ALG_SID_DESX) +CALG_RC2 = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_BLOCK|ALG_SID_RC2) +CALG_RC4 = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_STREAM|ALG_SID_RC4) +CALG_SEAL = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_STREAM|ALG_SID_SEAL) +CALG_DH_SF = (ALG_CLASS_KEY_EXCHANGE|ALG_TYPE_DH|ALG_SID_DH_SANDF) +CALG_DH_EPHEM = (ALG_CLASS_KEY_EXCHANGE|ALG_TYPE_DH|ALG_SID_DH_EPHEM) +CALG_AGREEDKEY_ANY = (ALG_CLASS_KEY_EXCHANGE|ALG_TYPE_DH|ALG_SID_AGREED_KEY_ANY) +CALG_KEA_KEYX = (ALG_CLASS_KEY_EXCHANGE|ALG_TYPE_DH|ALG_SID_KEA) +CALG_HUGHES_MD5 = (ALG_CLASS_KEY_EXCHANGE|ALG_TYPE_ANY|ALG_SID_MD5) +CALG_SKIPJACK = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_BLOCK|ALG_SID_SKIPJACK) +CALG_TEK = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_BLOCK|ALG_SID_TEK) +CALG_CYLINK_MEK = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_BLOCK|ALG_SID_CYLINK_MEK) +CALG_SSL3_SHAMD5 = (ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_SSL3SHAMD5) +CALG_SSL3_MASTER = (ALG_CLASS_MSG_ENCRYPT|ALG_TYPE_SECURECHANNEL|ALG_SID_SSL3_MASTER) +CALG_SCHANNEL_MASTER_HASH = (ALG_CLASS_MSG_ENCRYPT|ALG_TYPE_SECURECHANNEL|ALG_SID_SCHANNEL_MASTER_HASH) +CALG_SCHANNEL_MAC_KEY = (ALG_CLASS_MSG_ENCRYPT|ALG_TYPE_SECURECHANNEL|ALG_SID_SCHANNEL_MAC_KEY) +CALG_SCHANNEL_ENC_KEY = (ALG_CLASS_MSG_ENCRYPT|ALG_TYPE_SECURECHANNEL|ALG_SID_SCHANNEL_ENC_KEY) +CALG_PCT1_MASTER = (ALG_CLASS_MSG_ENCRYPT|ALG_TYPE_SECURECHANNEL|ALG_SID_PCT1_MASTER) +CALG_SSL2_MASTER = (ALG_CLASS_MSG_ENCRYPT|ALG_TYPE_SECURECHANNEL|ALG_SID_SSL2_MASTER) +CALG_TLS1_MASTER = (ALG_CLASS_MSG_ENCRYPT|ALG_TYPE_SECURECHANNEL|ALG_SID_TLS1_MASTER) +CALG_RC5 = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_BLOCK|ALG_SID_RC5) +CALG_HMAC = (ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_HMAC) +CALG_TLS1PRF = (ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_TLS1PRF) +CALG_HASH_REPLACE_OWF = (ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_HASH_REPLACE_OWF) +CALG_AES_128 = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_BLOCK|ALG_SID_AES_128) +CALG_AES_192 = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_BLOCK|ALG_SID_AES_192) +CALG_AES_256 = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_BLOCK|ALG_SID_AES_256) +CALG_AES = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_BLOCK|ALG_SID_AES) +CALG_SHA_256 = (ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_SHA_256) +CALG_SHA_384 = (ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_SHA_384) +CALG_SHA_512 = (ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_SHA_512) +CRYPT_VERIFYCONTEXT = (-268435456) +CRYPT_NEWKEYSET = 0x00000008 +CRYPT_DELETEKEYSET = 0x00000010 +CRYPT_MACHINE_KEYSET = 0x00000020 +CRYPT_SILENT = 0x00000040 +CRYPT_EXPORTABLE = 0x00000001 +CRYPT_USER_PROTECTED = 0x00000002 +CRYPT_CREATE_SALT = 0x00000004 +CRYPT_UPDATE_KEY = 0x00000008 +CRYPT_NO_SALT = 0x00000010 +CRYPT_PREGEN = 0x00000040 +CRYPT_RECIPIENT = 0x00000010 +CRYPT_INITIATOR = 0x00000040 +CRYPT_ONLINE = 0x00000080 +CRYPT_SF = 0x00000100 +CRYPT_CREATE_IV = 0x00000200 +CRYPT_KEK = 0x00000400 +CRYPT_DATA_KEY = 0x00000800 +CRYPT_VOLATILE = 0x00001000 +CRYPT_SGCKEY = 0x00002000 +CRYPT_ARCHIVABLE = 0x00004000 +RSA1024BIT_KEY = 0x04000000 +CRYPT_SERVER = 0x00000400 +KEY_LENGTH_MASK = (-65536) +CRYPT_Y_ONLY = 0x00000001 +CRYPT_SSL2_FALLBACK = 0x00000002 +CRYPT_DESTROYKEY = 0x00000004 +CRYPT_OAEP = 0x00000040 +CRYPT_BLOB_VER3 = 0x00000080 +CRYPT_IPSEC_HMAC_KEY = 0x00000100 +CRYPT_DECRYPT_RSA_NO_PADDING_CHECK = 0x00000020 +CRYPT_SECRETDIGEST = 0x00000001 +CRYPT_OWF_REPL_LM_HASH = 0x00000001 +CRYPT_LITTLE_ENDIAN = 0x00000001 +CRYPT_NOHASHOID = 0x00000001 +CRYPT_TYPE2_FORMAT = 0x00000002 +CRYPT_X931_FORMAT = 0x00000004 +CRYPT_MACHINE_DEFAULT = 0x00000001 +CRYPT_USER_DEFAULT = 0x00000002 +CRYPT_DELETE_DEFAULT = 0x00000004 +SIMPLEBLOB = 0x1 +PUBLICKEYBLOB = 0x6 +PRIVATEKEYBLOB = 0x7 +PLAINTEXTKEYBLOB = 0x8 +OPAQUEKEYBLOB = 0x9 +PUBLICKEYBLOBEX = 0xA +SYMMETRICWRAPKEYBLOB = 0xB +AT_KEYEXCHANGE = 1 +AT_SIGNATURE = 2 +CRYPT_USERDATA = 1 +KP_IV = 1 +KP_SALT = 2 +KP_PADDING = 3 +KP_MODE = 4 +KP_MODE_BITS = 5 +KP_PERMISSIONS = 6 +KP_ALGID = 7 +KP_BLOCKLEN = 8 +KP_KEYLEN = 9 +KP_SALT_EX = 10 +KP_P = 11 +KP_G = 12 +KP_Q = 13 +KP_X = 14 +KP_Y = 15 +KP_RA = 16 +KP_RB = 17 +KP_INFO = 18 +KP_EFFECTIVE_KEYLEN = 19 +KP_SCHANNEL_ALG = 20 +KP_CLIENT_RANDOM = 21 +KP_SERVER_RANDOM = 22 +KP_RP = 23 +KP_PRECOMP_MD5 = 24 +KP_PRECOMP_SHA = 25 +KP_CERTIFICATE = 26 +KP_CLEAR_KEY = 27 +KP_PUB_EX_LEN = 28 +KP_PUB_EX_VAL = 29 +KP_KEYVAL = 30 +KP_ADMIN_PIN = 31 +KP_KEYEXCHANGE_PIN = 32 +KP_SIGNATURE_PIN = 33 +KP_PREHASH = 34 +KP_ROUNDS = 35 +KP_OAEP_PARAMS = 36 +KP_CMS_KEY_INFO = 37 +KP_CMS_DH_KEY_INFO = 38 +KP_PUB_PARAMS = 39 +KP_VERIFY_PARAMS = 40 +KP_HIGHEST_VERSION = 41 +KP_GET_USE_COUNT = 42 +PKCS5_PADDING = 1 +RANDOM_PADDING = 2 +ZERO_PADDING = 3 +CRYPT_MODE_CBC = 1 +CRYPT_MODE_ECB = 2 +CRYPT_MODE_OFB = 3 +CRYPT_MODE_CFB = 4 +CRYPT_MODE_CTS = 5 +CRYPT_ENCRYPT = 0x0001 +CRYPT_DECRYPT = 0x0002 +CRYPT_EXPORT = 0x0004 +CRYPT_READ = 0x0008 +CRYPT_WRITE = 0x0010 +CRYPT_MAC = 0x0020 +CRYPT_EXPORT_KEY = 0x0040 +CRYPT_IMPORT_KEY = 0x0080 +CRYPT_ARCHIVE = 0x0100 +HP_ALGID = 0x0001 +HP_HASHVAL = 0x0002 +HP_HASHSIZE = 0x0004 +HP_HMAC_INFO = 0x0005 +HP_TLS1PRF_LABEL = 0x0006 +HP_TLS1PRF_SEED = 0x0007 + +CRYPT_FAILED = 0 +CRYPT_SUCCEED = 1 +def RCRYPT_SUCCEEDED(rt): return ((rt) == CRYPT_SUCCEED) +def RCRYPT_FAILED(rt): return ((rt) == CRYPT_FAILED) + +PP_ENUMALGS = 1 +PP_ENUMCONTAINERS = 2 +PP_IMPTYPE = 3 +PP_NAME = 4 +PP_VERSION = 5 +PP_CONTAINER = 6 +PP_CHANGE_PASSWORD = 7 +PP_KEYSET_SEC_DESCR = 8 +PP_CERTCHAIN = 9 +PP_KEY_TYPE_SUBTYPE = 10 +PP_PROVTYPE = 16 +PP_KEYSTORAGE = 17 +PP_APPLI_CERT = 18 +PP_SYM_KEYSIZE = 19 +PP_SESSION_KEYSIZE = 20 +PP_UI_PROMPT = 21 +PP_ENUMALGS_EX = 22 +PP_ENUMMANDROOTS = 25 +PP_ENUMELECTROOTS = 26 +PP_KEYSET_TYPE = 27 +PP_ADMIN_PIN = 31 +PP_KEYEXCHANGE_PIN = 32 +PP_SIGNATURE_PIN = 33 +PP_SIG_KEYSIZE_INC = 34 +PP_KEYX_KEYSIZE_INC = 35 +PP_UNIQUE_CONTAINER = 36 +PP_SGC_INFO = 37 +PP_USE_HARDWARE_RNG = 38 +PP_KEYSPEC = 39 +PP_ENUMEX_SIGNING_PROT = 40 +PP_CRYPT_COUNT_KEY_USE = 41 +CRYPT_FIRST = 1 +CRYPT_NEXT = 2 +CRYPT_SGC_ENUM = 4 +CRYPT_IMPL_HARDWARE = 1 +CRYPT_IMPL_SOFTWARE = 2 +CRYPT_IMPL_MIXED = 3 +CRYPT_IMPL_UNKNOWN = 4 +CRYPT_IMPL_REMOVABLE = 8 +CRYPT_SEC_DESCR = 0x00000001 +CRYPT_PSTORE = 0x00000002 +CRYPT_UI_PROMPT = 0x00000004 +CRYPT_FLAG_PCT1 = 0x0001 +CRYPT_FLAG_SSL2 = 0x0002 +CRYPT_FLAG_SSL3 = 0x0004 +CRYPT_FLAG_TLS1 = 0x0008 +CRYPT_FLAG_IPSEC = 0x0010 +CRYPT_FLAG_SIGNING = 0x0020 +CRYPT_SGC = 0x0001 +CRYPT_FASTSGC = 0x0002 +PP_CLIENT_HWND = 1 +PP_CONTEXT_INFO = 11 +PP_KEYEXCHANGE_KEYSIZE = 12 +PP_SIGNATURE_KEYSIZE = 13 +PP_KEYEXCHANGE_ALG = 14 +PP_SIGNATURE_ALG = 15 +PP_DELETEKEY = 24 +PROV_RSA_FULL = 1 +PROV_RSA_SIG = 2 +PROV_DSS = 3 +PROV_FORTEZZA = 4 +PROV_MS_EXCHANGE = 5 +PROV_SSL = 6 +PROV_RSA_SCHANNEL = 12 +PROV_DSS_DH = 13 +PROV_EC_ECDSA_SIG = 14 +PROV_EC_ECNRA_SIG = 15 +PROV_EC_ECDSA_FULL = 16 +PROV_EC_ECNRA_FULL = 17 +PROV_DH_SCHANNEL = 18 +PROV_SPYRUS_LYNKS = 20 +PROV_RNG = 21 +PROV_INTEL_SEC = 22 +PROV_REPLACE_OWF = 23 +PROV_RSA_AES = 24 +MS_DEF_PROV_A = "Microsoft Base Cryptographic Provider v1.0" +MS_DEF_PROV = MS_DEF_PROV_A +MS_ENHANCED_PROV_A = "Microsoft Enhanced Cryptographic Provider v1.0" +MS_ENHANCED_PROV = MS_ENHANCED_PROV_A +MS_STRONG_PROV_A = "Microsoft Strong Cryptographic Provider" +MS_STRONG_PROV = MS_STRONG_PROV_A +MS_DEF_RSA_SIG_PROV_A = "Microsoft RSA Signature Cryptographic Provider" +MS_DEF_RSA_SIG_PROV = MS_DEF_RSA_SIG_PROV_A +MS_DEF_RSA_SCHANNEL_PROV_A = "Microsoft RSA SChannel Cryptographic Provider" +MS_DEF_RSA_SCHANNEL_PROV = MS_DEF_RSA_SCHANNEL_PROV_A +MS_DEF_DSS_PROV_A = "Microsoft Base DSS Cryptographic Provider" +MS_DEF_DSS_PROV = MS_DEF_DSS_PROV_A +MS_DEF_DSS_DH_PROV_A = "Microsoft Base DSS and Diffie-Hellman Cryptographic Provider" +MS_DEF_DSS_DH_PROV = MS_DEF_DSS_DH_PROV_A +MS_ENH_DSS_DH_PROV_A = "Microsoft Enhanced DSS and Diffie-Hellman Cryptographic Provider" +MS_ENH_DSS_DH_PROV = MS_ENH_DSS_DH_PROV_A +MS_DEF_DH_SCHANNEL_PROV_A = "Microsoft DH SChannel Cryptographic Provider" +MS_DEF_DH_SCHANNEL_PROV = MS_DEF_DH_SCHANNEL_PROV_A +MS_SCARD_PROV_A = "Microsoft Base Smart Card Crypto Provider" +MS_SCARD_PROV = MS_SCARD_PROV_A +MS_ENH_RSA_AES_PROV_A = "Microsoft Enhanced RSA and AES Cryptographic Provider" +MS_ENH_RSA_AES_PROV = MS_ENH_RSA_AES_PROV_A +MAXUIDLEN = 64 +EXPO_OFFLOAD_REG_VALUE = "ExpoOffload" +EXPO_OFFLOAD_FUNC_NAME = "OffloadModExpo" +szKEY_CRYPTOAPI_PRIVATE_KEY_OPTIONS = \ + "Software\\Policies\\Microsoft\\Cryptography" +szFORCE_KEY_PROTECTION = "ForceKeyProtection" +dwFORCE_KEY_PROTECTION_DISABLED = 0x0 +dwFORCE_KEY_PROTECTION_USER_SELECT = 0x1 +dwFORCE_KEY_PROTECTION_HIGH = 0x2 +szKEY_CACHE_ENABLED = "CachePrivateKeys" +szKEY_CACHE_SECONDS = "PrivateKeyLifetimeSeconds" +CUR_BLOB_VERSION = 2 +SCHANNEL_MAC_KEY = 0x00000000 +SCHANNEL_ENC_KEY = 0x00000001 +INTERNATIONAL_USAGE = 0x00000001 +szOID_RSA = "1.2.840.113549" +szOID_PKCS = "1.2.840.113549.1" +szOID_RSA_HASH = "1.2.840.113549.2" +szOID_RSA_ENCRYPT = "1.2.840.113549.3" +szOID_PKCS_1 = "1.2.840.113549.1.1" +szOID_PKCS_2 = "1.2.840.113549.1.2" +szOID_PKCS_3 = "1.2.840.113549.1.3" +szOID_PKCS_4 = "1.2.840.113549.1.4" +szOID_PKCS_5 = "1.2.840.113549.1.5" +szOID_PKCS_6 = "1.2.840.113549.1.6" +szOID_PKCS_7 = "1.2.840.113549.1.7" +szOID_PKCS_8 = "1.2.840.113549.1.8" +szOID_PKCS_9 = "1.2.840.113549.1.9" +szOID_PKCS_10 = "1.2.840.113549.1.10" +szOID_PKCS_12 = "1.2.840.113549.1.12" +szOID_RSA_RSA = "1.2.840.113549.1.1.1" +szOID_RSA_MD2RSA = "1.2.840.113549.1.1.2" +szOID_RSA_MD4RSA = "1.2.840.113549.1.1.3" +szOID_RSA_MD5RSA = "1.2.840.113549.1.1.4" +szOID_RSA_SHA1RSA = "1.2.840.113549.1.1.5" +szOID_RSA_SETOAEP_RSA = "1.2.840.113549.1.1.6" +szOID_RSA_DH = "1.2.840.113549.1.3.1" +szOID_RSA_data = "1.2.840.113549.1.7.1" +szOID_RSA_signedData = "1.2.840.113549.1.7.2" +szOID_RSA_envelopedData = "1.2.840.113549.1.7.3" +szOID_RSA_signEnvData = "1.2.840.113549.1.7.4" +szOID_RSA_digestedData = "1.2.840.113549.1.7.5" +szOID_RSA_hashedData = "1.2.840.113549.1.7.5" +szOID_RSA_encryptedData = "1.2.840.113549.1.7.6" +szOID_RSA_emailAddr = "1.2.840.113549.1.9.1" +szOID_RSA_unstructName = "1.2.840.113549.1.9.2" +szOID_RSA_contentType = "1.2.840.113549.1.9.3" +szOID_RSA_messageDigest = "1.2.840.113549.1.9.4" +szOID_RSA_signingTime = "1.2.840.113549.1.9.5" +szOID_RSA_counterSign = "1.2.840.113549.1.9.6" +szOID_RSA_challengePwd = "1.2.840.113549.1.9.7" +szOID_RSA_unstructAddr = "1.2.840.113549.1.9.8" +szOID_RSA_extCertAttrs = "1.2.840.113549.1.9.9" +szOID_RSA_certExtensions = "1.2.840.113549.1.9.14" +szOID_RSA_SMIMECapabilities = "1.2.840.113549.1.9.15" +szOID_RSA_preferSignedData = "1.2.840.113549.1.9.15.1" +szOID_RSA_SMIMEalg = "1.2.840.113549.1.9.16.3" +szOID_RSA_SMIMEalgESDH = "1.2.840.113549.1.9.16.3.5" +szOID_RSA_SMIMEalgCMS3DESwrap = "1.2.840.113549.1.9.16.3.6" +szOID_RSA_SMIMEalgCMSRC2wrap = "1.2.840.113549.1.9.16.3.7" +szOID_RSA_MD2 = "1.2.840.113549.2.2" +szOID_RSA_MD4 = "1.2.840.113549.2.4" +szOID_RSA_MD5 = "1.2.840.113549.2.5" +szOID_RSA_RC2CBC = "1.2.840.113549.3.2" +szOID_RSA_RC4 = "1.2.840.113549.3.4" +szOID_RSA_DES_EDE3_CBC = "1.2.840.113549.3.7" +szOID_RSA_RC5_CBCPad = "1.2.840.113549.3.9" +szOID_ANSI_X942 = "1.2.840.10046" +szOID_ANSI_X942_DH = "1.2.840.10046.2.1" +szOID_X957 = "1.2.840.10040" +szOID_X957_DSA = "1.2.840.10040.4.1" +szOID_X957_SHA1DSA = "1.2.840.10040.4.3" +szOID_DS = "2.5" +szOID_DSALG = "2.5.8" +szOID_DSALG_CRPT = "2.5.8.1" +szOID_DSALG_HASH = "2.5.8.2" +szOID_DSALG_SIGN = "2.5.8.3" +szOID_DSALG_RSA = "2.5.8.1.1" +szOID_OIW = "1.3.14" +szOID_OIWSEC = "1.3.14.3.2" +szOID_OIWSEC_md4RSA = "1.3.14.3.2.2" +szOID_OIWSEC_md5RSA = "1.3.14.3.2.3" +szOID_OIWSEC_md4RSA2 = "1.3.14.3.2.4" +szOID_OIWSEC_desECB = "1.3.14.3.2.6" +szOID_OIWSEC_desCBC = "1.3.14.3.2.7" +szOID_OIWSEC_desOFB = "1.3.14.3.2.8" +szOID_OIWSEC_desCFB = "1.3.14.3.2.9" +szOID_OIWSEC_desMAC = "1.3.14.3.2.10" +szOID_OIWSEC_rsaSign = "1.3.14.3.2.11" +szOID_OIWSEC_dsa = "1.3.14.3.2.12" +szOID_OIWSEC_shaDSA = "1.3.14.3.2.13" +szOID_OIWSEC_mdc2RSA = "1.3.14.3.2.14" +szOID_OIWSEC_shaRSA = "1.3.14.3.2.15" +szOID_OIWSEC_dhCommMod = "1.3.14.3.2.16" +szOID_OIWSEC_desEDE = "1.3.14.3.2.17" +szOID_OIWSEC_sha = "1.3.14.3.2.18" +szOID_OIWSEC_mdc2 = "1.3.14.3.2.19" +szOID_OIWSEC_dsaComm = "1.3.14.3.2.20" +szOID_OIWSEC_dsaCommSHA = "1.3.14.3.2.21" +szOID_OIWSEC_rsaXchg = "1.3.14.3.2.22" +szOID_OIWSEC_keyHashSeal = "1.3.14.3.2.23" +szOID_OIWSEC_md2RSASign = "1.3.14.3.2.24" +szOID_OIWSEC_md5RSASign = "1.3.14.3.2.25" +szOID_OIWSEC_sha1 = "1.3.14.3.2.26" +szOID_OIWSEC_dsaSHA1 = "1.3.14.3.2.27" +szOID_OIWSEC_dsaCommSHA1 = "1.3.14.3.2.28" +szOID_OIWSEC_sha1RSASign = "1.3.14.3.2.29" +szOID_OIWDIR = "1.3.14.7.2" +szOID_OIWDIR_CRPT = "1.3.14.7.2.1" +szOID_OIWDIR_HASH = "1.3.14.7.2.2" +szOID_OIWDIR_SIGN = "1.3.14.7.2.3" +szOID_OIWDIR_md2 = "1.3.14.7.2.2.1" +szOID_OIWDIR_md2RSA = "1.3.14.7.2.3.1" +szOID_INFOSEC = "2.16.840.1.101.2.1" +szOID_INFOSEC_sdnsSignature = "2.16.840.1.101.2.1.1.1" +szOID_INFOSEC_mosaicSignature = "2.16.840.1.101.2.1.1.2" +szOID_INFOSEC_sdnsConfidentiality = "2.16.840.1.101.2.1.1.3" +szOID_INFOSEC_mosaicConfidentiality = "2.16.840.1.101.2.1.1.4" +szOID_INFOSEC_sdnsIntegrity = "2.16.840.1.101.2.1.1.5" +szOID_INFOSEC_mosaicIntegrity = "2.16.840.1.101.2.1.1.6" +szOID_INFOSEC_sdnsTokenProtection = "2.16.840.1.101.2.1.1.7" +szOID_INFOSEC_mosaicTokenProtection = "2.16.840.1.101.2.1.1.8" +szOID_INFOSEC_sdnsKeyManagement = "2.16.840.1.101.2.1.1.9" +szOID_INFOSEC_mosaicKeyManagement = "2.16.840.1.101.2.1.1.10" +szOID_INFOSEC_sdnsKMandSig = "2.16.840.1.101.2.1.1.11" +szOID_INFOSEC_mosaicKMandSig = "2.16.840.1.101.2.1.1.12" +szOID_INFOSEC_SuiteASignature = "2.16.840.1.101.2.1.1.13" +szOID_INFOSEC_SuiteAConfidentiality = "2.16.840.1.101.2.1.1.14" +szOID_INFOSEC_SuiteAIntegrity = "2.16.840.1.101.2.1.1.15" +szOID_INFOSEC_SuiteATokenProtection = "2.16.840.1.101.2.1.1.16" +szOID_INFOSEC_SuiteAKeyManagement = "2.16.840.1.101.2.1.1.17" +szOID_INFOSEC_SuiteAKMandSig = "2.16.840.1.101.2.1.1.18" +szOID_INFOSEC_mosaicUpdatedSig = "2.16.840.1.101.2.1.1.19" +szOID_INFOSEC_mosaicKMandUpdSig = "2.16.840.1.101.2.1.1.20" +szOID_INFOSEC_mosaicUpdatedInteg = "2.16.840.1.101.2.1.1.21" +szOID_COMMON_NAME = "2.5.4.3" +szOID_SUR_NAME = "2.5.4.4" +szOID_DEVICE_SERIAL_NUMBER = "2.5.4.5" +szOID_COUNTRY_NAME = "2.5.4.6" +szOID_LOCALITY_NAME = "2.5.4.7" +szOID_STATE_OR_PROVINCE_NAME = "2.5.4.8" +szOID_STREET_ADDRESS = "2.5.4.9" +szOID_ORGANIZATION_NAME = "2.5.4.10" +szOID_ORGANIZATIONAL_UNIT_NAME = "2.5.4.11" +szOID_TITLE = "2.5.4.12" +szOID_DESCRIPTION = "2.5.4.13" +szOID_SEARCH_GUIDE = "2.5.4.14" +szOID_BUSINESS_CATEGORY = "2.5.4.15" +szOID_POSTAL_ADDRESS = "2.5.4.16" +szOID_POSTAL_CODE = "2.5.4.17" +szOID_POST_OFFICE_BOX = "2.5.4.18" +szOID_PHYSICAL_DELIVERY_OFFICE_NAME = "2.5.4.19" +szOID_TELEPHONE_NUMBER = "2.5.4.20" +szOID_TELEX_NUMBER = "2.5.4.21" +szOID_TELETEXT_TERMINAL_IDENTIFIER = "2.5.4.22" +szOID_FACSIMILE_TELEPHONE_NUMBER = "2.5.4.23" +szOID_X21_ADDRESS = "2.5.4.24" +szOID_INTERNATIONAL_ISDN_NUMBER = "2.5.4.25" +szOID_REGISTERED_ADDRESS = "2.5.4.26" +szOID_DESTINATION_INDICATOR = "2.5.4.27" +szOID_PREFERRED_DELIVERY_METHOD = "2.5.4.28" +szOID_PRESENTATION_ADDRESS = "2.5.4.29" +szOID_SUPPORTED_APPLICATION_CONTEXT = "2.5.4.30" +szOID_MEMBER = "2.5.4.31" +szOID_OWNER = "2.5.4.32" +szOID_ROLE_OCCUPANT = "2.5.4.33" +szOID_SEE_ALSO = "2.5.4.34" +szOID_USER_PASSWORD = "2.5.4.35" +szOID_USER_CERTIFICATE = "2.5.4.36" +szOID_CA_CERTIFICATE = "2.5.4.37" +szOID_AUTHORITY_REVOCATION_LIST = "2.5.4.38" +szOID_CERTIFICATE_REVOCATION_LIST = "2.5.4.39" +szOID_CROSS_CERTIFICATE_PAIR = "2.5.4.40" +szOID_GIVEN_NAME = "2.5.4.42" +szOID_INITIALS = "2.5.4.43" +szOID_DN_QUALIFIER = "2.5.4.46" +szOID_DOMAIN_COMPONENT = "0.9.2342.19200300.100.1.25" +szOID_PKCS_12_FRIENDLY_NAME_ATTR = "1.2.840.113549.1.9.20" +szOID_PKCS_12_LOCAL_KEY_ID = "1.2.840.113549.1.9.21" +szOID_PKCS_12_KEY_PROVIDER_NAME_ATTR = "1.3.6.1.4.1.311.17.1" +szOID_LOCAL_MACHINE_KEYSET = "1.3.6.1.4.1.311.17.2" +szOID_KEYID_RDN = "1.3.6.1.4.1.311.10.7.1" +CERT_RDN_ANY_TYPE = 0 +CERT_RDN_ENCODED_BLOB = 1 +CERT_RDN_OCTET_STRING = 2 +CERT_RDN_NUMERIC_STRING = 3 +CERT_RDN_PRINTABLE_STRING = 4 +CERT_RDN_TELETEX_STRING = 5 +CERT_RDN_T61_STRING = 5 +CERT_RDN_VIDEOTEX_STRING = 6 +CERT_RDN_IA5_STRING = 7 +CERT_RDN_GRAPHIC_STRING = 8 +CERT_RDN_VISIBLE_STRING = 9 +CERT_RDN_ISO646_STRING = 9 +CERT_RDN_GENERAL_STRING = 10 +CERT_RDN_UNIVERSAL_STRING = 11 +CERT_RDN_INT4_STRING = 11 +CERT_RDN_BMP_STRING = 12 +CERT_RDN_UNICODE_STRING = 12 +CERT_RDN_UTF8_STRING = 13 +CERT_RDN_TYPE_MASK = 0x000000FF +CERT_RDN_FLAGS_MASK = (-16777216) +CERT_RDN_ENABLE_T61_UNICODE_FLAG = (-2147483648) +CERT_RDN_ENABLE_UTF8_UNICODE_FLAG = 0x20000000 +CERT_RDN_DISABLE_CHECK_TYPE_FLAG = 0x40000000 +CERT_RDN_DISABLE_IE4_UTF8_FLAG = 0x01000000 +CERT_RSA_PUBLIC_KEY_OBJID = szOID_RSA_RSA +CERT_DEFAULT_OID_PUBLIC_KEY_SIGN = szOID_RSA_RSA +CERT_DEFAULT_OID_PUBLIC_KEY_XCHG = szOID_RSA_RSA +CERT_V1 = 0 +CERT_V2 = 1 +CERT_V3 = 2 +CERT_INFO_VERSION_FLAG = 1 +CERT_INFO_SERIAL_NUMBER_FLAG = 2 +CERT_INFO_SIGNATURE_ALGORITHM_FLAG = 3 +CERT_INFO_ISSUER_FLAG = 4 +CERT_INFO_NOT_BEFORE_FLAG = 5 +CERT_INFO_NOT_AFTER_FLAG = 6 +CERT_INFO_SUBJECT_FLAG = 7 +CERT_INFO_SUBJECT_PUBLIC_KEY_INFO_FLAG = 8 +CERT_INFO_ISSUER_UNIQUE_ID_FLAG = 9 +CERT_INFO_SUBJECT_UNIQUE_ID_FLAG = 10 +CERT_INFO_EXTENSION_FLAG = 11 +CRL_V1 = 0 +CRL_V2 = 1 +CERT_REQUEST_V1 = 0 +CERT_KEYGEN_REQUEST_V1 = 0 +CTL_V1 = 0 +CERT_ENCODING_TYPE_MASK = 0x0000FFFF +CMSG_ENCODING_TYPE_MASK = (-65536) +def GET_CERT_ENCODING_TYPE(X): return (X & CERT_ENCODING_TYPE_MASK) + +def GET_CMSG_ENCODING_TYPE(X): return (X & CMSG_ENCODING_TYPE_MASK) + +CRYPT_ASN_ENCODING = 0x00000001 +CRYPT_NDR_ENCODING = 0x00000002 +X509_ASN_ENCODING = 0x00000001 +X509_NDR_ENCODING = 0x00000002 +PKCS_7_ASN_ENCODING = 0x00010000 +PKCS_7_NDR_ENCODING = 0x00020000 +CRYPT_FORMAT_STR_MULTI_LINE = 0x0001 +CRYPT_FORMAT_STR_NO_HEX = 0x0010 +CRYPT_FORMAT_SIMPLE = 0x0001 +CRYPT_FORMAT_X509 = 0x0002 +CRYPT_FORMAT_OID = 0x0004 +CRYPT_FORMAT_RDN_SEMICOLON = 0x0100 +CRYPT_FORMAT_RDN_CRLF = 0x0200 +CRYPT_FORMAT_RDN_UNQUOTE = 0x0400 +CRYPT_FORMAT_RDN_REVERSE = 0x0800 +CRYPT_FORMAT_COMMA = 0x1000 +CRYPT_FORMAT_SEMICOLON = CRYPT_FORMAT_RDN_SEMICOLON +CRYPT_FORMAT_CRLF = CRYPT_FORMAT_RDN_CRLF +CRYPT_ENCODE_NO_SIGNATURE_BYTE_REVERSAL_FLAG = 0x8 +CRYPT_ENCODE_ALLOC_FLAG = 0x8000 +CRYPT_UNICODE_NAME_ENCODE_ENABLE_T61_UNICODE_FLAG = \ + CERT_RDN_ENABLE_T61_UNICODE_FLAG +CRYPT_UNICODE_NAME_ENCODE_ENABLE_UTF8_UNICODE_FLAG = \ + CERT_RDN_ENABLE_UTF8_UNICODE_FLAG +CRYPT_UNICODE_NAME_ENCODE_DISABLE_CHECK_TYPE_FLAG = \ + CERT_RDN_DISABLE_CHECK_TYPE_FLAG +CRYPT_SORTED_CTL_ENCODE_HASHED_SUBJECT_IDENTIFIER_FLAG = 0x10000 +CRYPT_DECODE_NOCOPY_FLAG = 0x1 +CRYPT_DECODE_TO_BE_SIGNED_FLAG = 0x2 +CRYPT_DECODE_SHARE_OID_STRING_FLAG = 0x4 +CRYPT_DECODE_NO_SIGNATURE_BYTE_REVERSAL_FLAG = 0x8 +CRYPT_DECODE_ALLOC_FLAG = 0x8000 +CRYPT_UNICODE_NAME_DECODE_DISABLE_IE4_UTF8_FLAG = \ + CERT_RDN_DISABLE_IE4_UTF8_FLAG + +CRYPT_ENCODE_DECODE_NONE = 0 +X509_CERT = 1 +X509_CERT_TO_BE_SIGNED = 2 +X509_CERT_CRL_TO_BE_SIGNED = 3 +X509_CERT_REQUEST_TO_BE_SIGNED = 4 +X509_EXTENSIONS = 5 +X509_NAME_VALUE = 6 +X509_NAME = 7 +X509_PUBLIC_KEY_INFO = 8 +X509_AUTHORITY_KEY_ID = 9 +X509_KEY_ATTRIBUTES = 10 +X509_KEY_USAGE_RESTRICTION = 11 +X509_ALTERNATE_NAME = 12 +X509_BASIC_CONSTRAINTS = 13 +X509_KEY_USAGE = 14 +X509_BASIC_CONSTRAINTS2 = 15 +X509_CERT_POLICIES = 16 +PKCS_UTC_TIME = 17 +PKCS_TIME_REQUEST = 18 +RSA_CSP_PUBLICKEYBLOB = 19 +X509_UNICODE_NAME = 20 +X509_KEYGEN_REQUEST_TO_BE_SIGNED = 21 +PKCS_ATTRIBUTE = 22 +PKCS_CONTENT_INFO_SEQUENCE_OF_ANY = 23 +X509_UNICODE_NAME_VALUE = 24 +X509_ANY_STRING = X509_NAME_VALUE +X509_UNICODE_ANY_STRING = X509_UNICODE_NAME_VALUE +X509_OCTET_STRING = 25 +X509_BITS = 26 +X509_INTEGER = 27 +X509_MULTI_BYTE_INTEGER = 28 +X509_ENUMERATED = 29 +X509_CHOICE_OF_TIME = 30 +X509_AUTHORITY_KEY_ID2 = 31 +X509_AUTHORITY_INFO_ACCESS = 32 +X509_SUBJECT_INFO_ACCESS = X509_AUTHORITY_INFO_ACCESS +X509_CRL_REASON_CODE = X509_ENUMERATED +PKCS_CONTENT_INFO = 33 +X509_SEQUENCE_OF_ANY = 34 +X509_CRL_DIST_POINTS = 35 +X509_ENHANCED_KEY_USAGE = 36 +PKCS_CTL = 37 +X509_MULTI_BYTE_UINT = 38 +X509_DSS_PUBLICKEY = X509_MULTI_BYTE_UINT +X509_DSS_PARAMETERS = 39 +X509_DSS_SIGNATURE = 40 +PKCS_RC2_CBC_PARAMETERS = 41 +PKCS_SMIME_CAPABILITIES = 42 +X509_QC_STATEMENTS_EXT = 42 +PKCS_RSA_PRIVATE_KEY = 43 +PKCS_PRIVATE_KEY_INFO = 44 +PKCS_ENCRYPTED_PRIVATE_KEY_INFO = 45 +X509_PKIX_POLICY_QUALIFIER_USERNOTICE = 46 +X509_DH_PUBLICKEY = X509_MULTI_BYTE_UINT +X509_DH_PARAMETERS = 47 +PKCS_ATTRIBUTES = 48 +PKCS_SORTED_CTL = 49 +X509_ECC_SIGNATURE = 47 +X942_DH_PARAMETERS = 50 +X509_BITS_WITHOUT_TRAILING_ZEROES = 51 +X942_OTHER_INFO = 52 +X509_CERT_PAIR = 53 +X509_ISSUING_DIST_POINT = 54 +X509_NAME_CONSTRAINTS = 55 +X509_POLICY_MAPPINGS = 56 +X509_POLICY_CONSTRAINTS = 57 +X509_CROSS_CERT_DIST_POINTS = 58 +CMC_DATA = 59 +CMC_RESPONSE = 60 +CMC_STATUS = 61 +CMC_ADD_EXTENSIONS = 62 +CMC_ADD_ATTRIBUTES = 63 +X509_CERTIFICATE_TEMPLATE = 64 +OCSP_SIGNED_REQUEST = 65 +OCSP_REQUEST = 66 +OCSP_RESPONSE = 67 +OCSP_BASIC_SIGNED_RESPONSE = 68 +OCSP_BASIC_RESPONSE = 69 +X509_LOGOTYPE_EXT = 70 +X509_BIOMETRIC_EXT = 71 +CNG_RSA_PUBLIC_KEY_BLOB = 72 +X509_OBJECT_IDENTIFIER = 73 +X509_ALGORITHM_IDENTIFIER = 74 +PKCS_RSA_SSA_PSS_PARAMETERS = 75 +PKCS_RSAES_OAEP_PARAMETERS = 76 +ECC_CMS_SHARED_INFO = 77 +TIMESTAMP_REQUEST = 78 +TIMESTAMP_RESPONSE = 79 +TIMESTAMP_INFO = 80 +X509_CERT_BUNDLE = 81 +PKCS7_SIGNER_INFO = 500 +CMS_SIGNER_INFO = 501 + +szOID_AUTHORITY_KEY_IDENTIFIER = "2.5.29.1" +szOID_KEY_ATTRIBUTES = "2.5.29.2" +szOID_CERT_POLICIES_95 = "2.5.29.3" +szOID_KEY_USAGE_RESTRICTION = "2.5.29.4" +szOID_SUBJECT_ALT_NAME = "2.5.29.7" +szOID_ISSUER_ALT_NAME = "2.5.29.8" +szOID_BASIC_CONSTRAINTS = "2.5.29.10" +szOID_KEY_USAGE = "2.5.29.15" +szOID_PRIVATEKEY_USAGE_PERIOD = "2.5.29.16" +szOID_BASIC_CONSTRAINTS2 = "2.5.29.19" +szOID_CERT_POLICIES = "2.5.29.32" +szOID_ANY_CERT_POLICY = "2.5.29.32.0" +szOID_AUTHORITY_KEY_IDENTIFIER2 = "2.5.29.35" +szOID_SUBJECT_KEY_IDENTIFIER = "2.5.29.14" +szOID_SUBJECT_ALT_NAME2 = "2.5.29.17" +szOID_ISSUER_ALT_NAME2 = "2.5.29.18" +szOID_CRL_REASON_CODE = "2.5.29.21" +szOID_REASON_CODE_HOLD = "2.5.29.23" +szOID_CRL_DIST_POINTS = "2.5.29.31" +szOID_ENHANCED_KEY_USAGE = "2.5.29.37" +szOID_CRL_NUMBER = "2.5.29.20" +szOID_DELTA_CRL_INDICATOR = "2.5.29.27" +szOID_ISSUING_DIST_POINT = "2.5.29.28" +szOID_FRESHEST_CRL = "2.5.29.46" +szOID_NAME_CONSTRAINTS = "2.5.29.30" +szOID_POLICY_MAPPINGS = "2.5.29.33" +szOID_LEGACY_POLICY_MAPPINGS = "2.5.29.5" +szOID_POLICY_CONSTRAINTS = "2.5.29.36" +szOID_RENEWAL_CERTIFICATE = "1.3.6.1.4.1.311.13.1" +szOID_ENROLLMENT_NAME_VALUE_PAIR = "1.3.6.1.4.1.311.13.2.1" +szOID_ENROLLMENT_CSP_PROVIDER = "1.3.6.1.4.1.311.13.2.2" +szOID_OS_VERSION = "1.3.6.1.4.1.311.13.2.3" +szOID_ENROLLMENT_AGENT = "1.3.6.1.4.1.311.20.2.1" +szOID_PKIX = "1.3.6.1.5.5.7" +szOID_PKIX_PE = "1.3.6.1.5.5.7.1" +szOID_AUTHORITY_INFO_ACCESS = "1.3.6.1.5.5.7.1.1" +szOID_CERT_EXTENSIONS = "1.3.6.1.4.1.311.2.1.14" +szOID_NEXT_UPDATE_LOCATION = "1.3.6.1.4.1.311.10.2" +szOID_REMOVE_CERTIFICATE = "1.3.6.1.4.1.311.10.8.1" +szOID_CROSS_CERT_DIST_POINTS = "1.3.6.1.4.1.311.10.9.1" +szOID_CTL = "1.3.6.1.4.1.311.10.1" +szOID_SORTED_CTL = "1.3.6.1.4.1.311.10.1.1" +szOID_SERIALIZED = "1.3.6.1.4.1.311.10.3.3.1" +szOID_NT_PRINCIPAL_NAME = "1.3.6.1.4.1.311.20.2.3" +szOID_PRODUCT_UPDATE = "1.3.6.1.4.1.311.31.1" +szOID_ANY_APPLICATION_POLICY = "1.3.6.1.4.1.311.10.12.1" +szOID_AUTO_ENROLL_CTL_USAGE = "1.3.6.1.4.1.311.20.1" +szOID_ENROLL_CERTTYPE_EXTENSION = "1.3.6.1.4.1.311.20.2" +szOID_CERT_MANIFOLD = "1.3.6.1.4.1.311.20.3" +szOID_CERTSRV_CA_VERSION = "1.3.6.1.4.1.311.21.1" +szOID_CERTSRV_PREVIOUS_CERT_HASH = "1.3.6.1.4.1.311.21.2" +szOID_CRL_VIRTUAL_BASE = "1.3.6.1.4.1.311.21.3" +szOID_CRL_NEXT_PUBLISH = "1.3.6.1.4.1.311.21.4" +szOID_KP_CA_EXCHANGE = "1.3.6.1.4.1.311.21.5" +szOID_KP_KEY_RECOVERY_AGENT = "1.3.6.1.4.1.311.21.6" +szOID_CERTIFICATE_TEMPLATE = "1.3.6.1.4.1.311.21.7" +szOID_ENTERPRISE_OID_ROOT = "1.3.6.1.4.1.311.21.8" +szOID_RDN_DUMMY_SIGNER = "1.3.6.1.4.1.311.21.9" +szOID_APPLICATION_CERT_POLICIES = "1.3.6.1.4.1.311.21.10" +szOID_APPLICATION_POLICY_MAPPINGS = "1.3.6.1.4.1.311.21.11" +szOID_APPLICATION_POLICY_CONSTRAINTS = "1.3.6.1.4.1.311.21.12" +szOID_ARCHIVED_KEY_ATTR = "1.3.6.1.4.1.311.21.13" +szOID_CRL_SELF_CDP = "1.3.6.1.4.1.311.21.14" +szOID_REQUIRE_CERT_CHAIN_POLICY = "1.3.6.1.4.1.311.21.15" +szOID_ARCHIVED_KEY_CERT_HASH = "1.3.6.1.4.1.311.21.16" +szOID_ISSUED_CERT_HASH = "1.3.6.1.4.1.311.21.17" +szOID_DS_EMAIL_REPLICATION = "1.3.6.1.4.1.311.21.19" +szOID_REQUEST_CLIENT_INFO = "1.3.6.1.4.1.311.21.20" +szOID_ENCRYPTED_KEY_HASH = "1.3.6.1.4.1.311.21.21" +szOID_CERTSRV_CROSSCA_VERSION = "1.3.6.1.4.1.311.21.22" +szOID_NTDS_REPLICATION = "1.3.6.1.4.1.311.25.1" +szOID_SUBJECT_DIR_ATTRS = "2.5.29.9" +szOID_PKIX_KP = "1.3.6.1.5.5.7.3" +szOID_PKIX_KP_SERVER_AUTH = "1.3.6.1.5.5.7.3.1" +szOID_PKIX_KP_CLIENT_AUTH = "1.3.6.1.5.5.7.3.2" +szOID_PKIX_KP_CODE_SIGNING = "1.3.6.1.5.5.7.3.3" +szOID_PKIX_KP_EMAIL_PROTECTION = "1.3.6.1.5.5.7.3.4" +szOID_PKIX_KP_IPSEC_END_SYSTEM = "1.3.6.1.5.5.7.3.5" +szOID_PKIX_KP_IPSEC_TUNNEL = "1.3.6.1.5.5.7.3.6" +szOID_PKIX_KP_IPSEC_USER = "1.3.6.1.5.5.7.3.7" +szOID_PKIX_KP_TIMESTAMP_SIGNING = "1.3.6.1.5.5.7.3.8" +szOID_IPSEC_KP_IKE_INTERMEDIATE = "1.3.6.1.5.5.8.2.2" +szOID_KP_CTL_USAGE_SIGNING = "1.3.6.1.4.1.311.10.3.1" +szOID_KP_TIME_STAMP_SIGNING = "1.3.6.1.4.1.311.10.3.2" +szOID_SERVER_GATED_CRYPTO = "1.3.6.1.4.1.311.10.3.3" +szOID_SGC_NETSCAPE = "2.16.840.1.113730.4.1" +szOID_KP_EFS = "1.3.6.1.4.1.311.10.3.4" +szOID_EFS_RECOVERY = "1.3.6.1.4.1.311.10.3.4.1" +szOID_WHQL_CRYPTO = "1.3.6.1.4.1.311.10.3.5" +szOID_NT5_CRYPTO = "1.3.6.1.4.1.311.10.3.6" +szOID_OEM_WHQL_CRYPTO = "1.3.6.1.4.1.311.10.3.7" +szOID_EMBEDDED_NT_CRYPTO = "1.3.6.1.4.1.311.10.3.8" +szOID_ROOT_LIST_SIGNER = "1.3.6.1.4.1.311.10.3.9" +szOID_KP_QUALIFIED_SUBORDINATION = "1.3.6.1.4.1.311.10.3.10" +szOID_KP_KEY_RECOVERY = "1.3.6.1.4.1.311.10.3.11" +szOID_KP_DOCUMENT_SIGNING = "1.3.6.1.4.1.311.10.3.12" +szOID_KP_LIFETIME_SIGNING = "1.3.6.1.4.1.311.10.3.13" +szOID_KP_MOBILE_DEVICE_SOFTWARE = "1.3.6.1.4.1.311.10.3.14" +szOID_DRM = "1.3.6.1.4.1.311.10.5.1" +szOID_DRM_INDIVIDUALIZATION = "1.3.6.1.4.1.311.10.5.2" +szOID_LICENSES = "1.3.6.1.4.1.311.10.6.1" +szOID_LICENSE_SERVER = "1.3.6.1.4.1.311.10.6.2" +szOID_KP_SMARTCARD_LOGON = "1.3.6.1.4.1.311.20.2.2" +szOID_YESNO_TRUST_ATTR = "1.3.6.1.4.1.311.10.4.1" +szOID_PKIX_POLICY_QUALIFIER_CPS = "1.3.6.1.5.5.7.2.1" +szOID_PKIX_POLICY_QUALIFIER_USERNOTICE = "1.3.6.1.5.5.7.2.2" +szOID_CERT_POLICIES_95_QUALIFIER1 = "2.16.840.1.113733.1.7.1.1" +CERT_UNICODE_RDN_ERR_INDEX_MASK = 0x3FF +CERT_UNICODE_RDN_ERR_INDEX_SHIFT = 22 +CERT_UNICODE_ATTR_ERR_INDEX_MASK = 0x003F +CERT_UNICODE_ATTR_ERR_INDEX_SHIFT = 16 +CERT_UNICODE_VALUE_ERR_INDEX_MASK = 0x0000FFFF +CERT_UNICODE_VALUE_ERR_INDEX_SHIFT = 0 +CERT_DIGITAL_SIGNATURE_KEY_USAGE = 0x80 +CERT_NON_REPUDIATION_KEY_USAGE = 0x40 +CERT_KEY_ENCIPHERMENT_KEY_USAGE = 0x20 +CERT_DATA_ENCIPHERMENT_KEY_USAGE = 0x10 +CERT_KEY_AGREEMENT_KEY_USAGE = 0x08 +CERT_KEY_CERT_SIGN_KEY_USAGE = 0x04 +CERT_OFFLINE_CRL_SIGN_KEY_USAGE = 0x02 +CERT_CRL_SIGN_KEY_USAGE = 0x02 +CERT_ENCIPHER_ONLY_KEY_USAGE = 0x01 +CERT_DECIPHER_ONLY_KEY_USAGE = 0x80 +CERT_ALT_NAME_OTHER_NAME = 1 +CERT_ALT_NAME_RFC822_NAME = 2 +CERT_ALT_NAME_DNS_NAME = 3 +CERT_ALT_NAME_X400_ADDRESS = 4 +CERT_ALT_NAME_DIRECTORY_NAME = 5 +CERT_ALT_NAME_EDI_PARTY_NAME = 6 +CERT_ALT_NAME_URL = 7 +CERT_ALT_NAME_IP_ADDRESS = 8 +CERT_ALT_NAME_REGISTERED_ID = 9 +CERT_ALT_NAME_ENTRY_ERR_INDEX_MASK = 0xFF +CERT_ALT_NAME_ENTRY_ERR_INDEX_SHIFT = 16 +CERT_ALT_NAME_VALUE_ERR_INDEX_MASK = 0x0000FFFF +CERT_ALT_NAME_VALUE_ERR_INDEX_SHIFT = 0 +CERT_CA_SUBJECT_FLAG = 0x80 +CERT_END_ENTITY_SUBJECT_FLAG = 0x40 +szOID_PKIX_ACC_DESCR = "1.3.6.1.5.5.7.48" +szOID_PKIX_OCSP = "1.3.6.1.5.5.7.48.1" +szOID_PKIX_CA_ISSUERS = "1.3.6.1.5.5.7.48.2" +CRL_REASON_UNSPECIFIED = 0 +CRL_REASON_KEY_COMPROMISE = 1 +CRL_REASON_CA_COMPROMISE = 2 +CRL_REASON_AFFILIATION_CHANGED = 3 +CRL_REASON_SUPERSEDED = 4 +CRL_REASON_CESSATION_OF_OPERATION = 5 +CRL_REASON_CERTIFICATE_HOLD = 6 +CRL_REASON_REMOVE_FROM_CRL = 8 +CRL_DIST_POINT_NO_NAME = 0 +CRL_DIST_POINT_FULL_NAME = 1 +CRL_DIST_POINT_ISSUER_RDN_NAME = 2 +CRL_REASON_UNUSED_FLAG = 0x80 +CRL_REASON_KEY_COMPROMISE_FLAG = 0x40 +CRL_REASON_CA_COMPROMISE_FLAG = 0x20 +CRL_REASON_AFFILIATION_CHANGED_FLAG = 0x10 +CRL_REASON_SUPERSEDED_FLAG = 0x08 +CRL_REASON_CESSATION_OF_OPERATION_FLAG = 0x04 +CRL_REASON_CERTIFICATE_HOLD_FLAG = 0x02 +CRL_DIST_POINT_ERR_INDEX_MASK = 0x7F +CRL_DIST_POINT_ERR_INDEX_SHIFT = 24 + +CRL_DIST_POINT_ERR_CRL_ISSUER_BIT = (-2147483648) + +CROSS_CERT_DIST_POINT_ERR_INDEX_MASK = 0xFF +CROSS_CERT_DIST_POINT_ERR_INDEX_SHIFT = 24 + +CERT_EXCLUDED_SUBTREE_BIT = (-2147483648) + +SORTED_CTL_EXT_FLAGS_OFFSET = (0*4) +SORTED_CTL_EXT_COUNT_OFFSET = (1*4) +SORTED_CTL_EXT_MAX_COLLISION_OFFSET = (2*4) +SORTED_CTL_EXT_HASH_BUCKET_OFFSET = (3*4) +SORTED_CTL_EXT_HASHED_SUBJECT_IDENTIFIER_FLAG = 0x1 +CERT_DSS_R_LEN = 20 +CERT_DSS_S_LEN = 20 +CERT_DSS_SIGNATURE_LEN = (CERT_DSS_R_LEN + CERT_DSS_S_LEN) +CERT_MAX_ASN_ENCODED_DSS_SIGNATURE_LEN = (2 + 2*(2 + 20 +1)) +CRYPT_X942_COUNTER_BYTE_LENGTH = 4 +CRYPT_X942_KEY_LENGTH_BYTE_LENGTH = 4 +CRYPT_X942_PUB_INFO_BYTE_LENGTH = (512/8) +CRYPT_RC2_40BIT_VERSION = 160 +CRYPT_RC2_56BIT_VERSION = 52 +CRYPT_RC2_64BIT_VERSION = 120 +CRYPT_RC2_128BIT_VERSION = 58 +szOID_VERISIGN_PRIVATE_6_9 = "2.16.840.1.113733.1.6.9" +szOID_VERISIGN_ONSITE_JURISDICTION_HASH = "2.16.840.1.113733.1.6.11" +szOID_VERISIGN_BITSTRING_6_13 = "2.16.840.1.113733.1.6.13" +szOID_VERISIGN_ISS_STRONG_CRYPTO = "2.16.840.1.113733.1.8.1" +szOID_NETSCAPE = "2.16.840.1.113730" +szOID_NETSCAPE_CERT_EXTENSION = "2.16.840.1.113730.1" +szOID_NETSCAPE_CERT_TYPE = "2.16.840.1.113730.1.1" +szOID_NETSCAPE_BASE_URL = "2.16.840.1.113730.1.2" +szOID_NETSCAPE_REVOCATION_URL = "2.16.840.1.113730.1.3" +szOID_NETSCAPE_CA_REVOCATION_URL = "2.16.840.1.113730.1.4" +szOID_NETSCAPE_CERT_RENEWAL_URL = "2.16.840.1.113730.1.7" +szOID_NETSCAPE_CA_POLICY_URL = "2.16.840.1.113730.1.8" +szOID_NETSCAPE_SSL_SERVER_NAME = "2.16.840.1.113730.1.12" +szOID_NETSCAPE_COMMENT = "2.16.840.1.113730.1.13" +szOID_NETSCAPE_DATA_TYPE = "2.16.840.1.113730.2" +szOID_NETSCAPE_CERT_SEQUENCE = "2.16.840.1.113730.2.5" +NETSCAPE_SSL_CLIENT_AUTH_CERT_TYPE = 0x80 +NETSCAPE_SSL_SERVER_AUTH_CERT_TYPE = 0x40 +NETSCAPE_SMIME_CERT_TYPE = 0x20 +NETSCAPE_SIGN_CERT_TYPE = 0x10 +NETSCAPE_SSL_CA_CERT_TYPE = 0x04 +NETSCAPE_SMIME_CA_CERT_TYPE = 0x02 +NETSCAPE_SIGN_CA_CERT_TYPE = 0x01 +szOID_CT_PKI_DATA = "1.3.6.1.5.5.7.12.2" +szOID_CT_PKI_RESPONSE = "1.3.6.1.5.5.7.12.3" +szOID_PKIX_NO_SIGNATURE = "1.3.6.1.5.5.7.6.2" +szOID_CMC = "1.3.6.1.5.5.7.7" +szOID_CMC_STATUS_INFO = "1.3.6.1.5.5.7.7.1" +szOID_CMC_IDENTIFICATION = "1.3.6.1.5.5.7.7.2" +szOID_CMC_IDENTITY_PROOF = "1.3.6.1.5.5.7.7.3" +szOID_CMC_DATA_RETURN = "1.3.6.1.5.5.7.7.4" +szOID_CMC_TRANSACTION_ID = "1.3.6.1.5.5.7.7.5" +szOID_CMC_SENDER_NONCE = "1.3.6.1.5.5.7.7.6" +szOID_CMC_RECIPIENT_NONCE = "1.3.6.1.5.5.7.7.7" +szOID_CMC_ADD_EXTENSIONS = "1.3.6.1.5.5.7.7.8" +szOID_CMC_ENCRYPTED_POP = "1.3.6.1.5.5.7.7.9" +szOID_CMC_DECRYPTED_POP = "1.3.6.1.5.5.7.7.10" +szOID_CMC_LRA_POP_WITNESS = "1.3.6.1.5.5.7.7.11" +szOID_CMC_GET_CERT = "1.3.6.1.5.5.7.7.15" +szOID_CMC_GET_CRL = "1.3.6.1.5.5.7.7.16" +szOID_CMC_REVOKE_REQUEST = "1.3.6.1.5.5.7.7.17" +szOID_CMC_REG_INFO = "1.3.6.1.5.5.7.7.18" +szOID_CMC_RESPONSE_INFO = "1.3.6.1.5.5.7.7.19" +szOID_CMC_QUERY_PENDING = "1.3.6.1.5.5.7.7.21" +szOID_CMC_ID_POP_LINK_RANDOM = "1.3.6.1.5.5.7.7.22" +szOID_CMC_ID_POP_LINK_WITNESS = "1.3.6.1.5.5.7.7.23" +szOID_CMC_ID_CONFIRM_CERT_ACCEPTANCE = "1.3.6.1.5.5.7.7.24" +szOID_CMC_ADD_ATTRIBUTES = "1.3.6.1.4.1.311.10.10.1" +CMC_TAGGED_CERT_REQUEST_CHOICE = 1 +CMC_OTHER_INFO_NO_CHOICE = 0 +CMC_OTHER_INFO_FAIL_CHOICE = 1 +CMC_OTHER_INFO_PEND_CHOICE = 2 +CMC_STATUS_SUCCESS = 0 +CMC_STATUS_FAILED = 2 +CMC_STATUS_PENDING = 3 +CMC_STATUS_NO_SUPPORT = 4 +CMC_STATUS_CONFIRM_REQUIRED = 5 +CMC_FAIL_BAD_ALG = 0 +CMC_FAIL_BAD_MESSAGE_CHECK = 1 +CMC_FAIL_BAD_REQUEST = 2 +CMC_FAIL_BAD_TIME = 3 +CMC_FAIL_BAD_CERT_ID = 4 +CMC_FAIL_UNSUPORTED_EXT = 5 +CMC_FAIL_MUST_ARCHIVE_KEYS = 6 +CMC_FAIL_BAD_IDENTITY = 7 +CMC_FAIL_POP_REQUIRED = 8 +CMC_FAIL_POP_FAILED = 9 +CMC_FAIL_NO_KEY_REUSE = 10 +CMC_FAIL_INTERNAL_CA_ERROR = 11 +CMC_FAIL_TRY_LATER = 12 +CRYPT_OID_ENCODE_OBJECT_FUNC = "CryptDllEncodeObject" +CRYPT_OID_DECODE_OBJECT_FUNC = "CryptDllDecodeObject" +CRYPT_OID_ENCODE_OBJECT_EX_FUNC = "CryptDllEncodeObjectEx" +CRYPT_OID_DECODE_OBJECT_EX_FUNC = "CryptDllDecodeObjectEx" +CRYPT_OID_CREATE_COM_OBJECT_FUNC = "CryptDllCreateCOMObject" +CRYPT_OID_VERIFY_REVOCATION_FUNC = "CertDllVerifyRevocation" +CRYPT_OID_VERIFY_CTL_USAGE_FUNC = "CertDllVerifyCTLUsage" +CRYPT_OID_FORMAT_OBJECT_FUNC = "CryptDllFormatObject" +CRYPT_OID_FIND_OID_INFO_FUNC = "CryptDllFindOIDInfo" +CRYPT_OID_FIND_LOCALIZED_NAME_FUNC = "CryptDllFindLocalizedName" + +CRYPT_OID_REGPATH = "Software\\Microsoft\\Cryptography\\OID" +CRYPT_OID_REG_ENCODING_TYPE_PREFIX = "EncodingType " +CRYPT_OID_REG_DLL_VALUE_NAME = "Dll" +CRYPT_OID_REG_FUNC_NAME_VALUE_NAME = "FuncName" +CRYPT_OID_REG_FUNC_NAME_VALUE_NAME_A = "FuncName" +CRYPT_OID_REG_FLAGS_VALUE_NAME = "CryptFlags" +CRYPT_DEFAULT_OID = "DEFAULT" +CRYPT_INSTALL_OID_FUNC_BEFORE_FLAG = 1 +CRYPT_GET_INSTALLED_OID_FUNC_FLAG = 0x1 +CRYPT_REGISTER_FIRST_INDEX = 0 +CRYPT_REGISTER_LAST_INDEX = (-1) +CRYPT_MATCH_ANY_ENCODING_TYPE = (-1) +CRYPT_HASH_ALG_OID_GROUP_ID = 1 +CRYPT_ENCRYPT_ALG_OID_GROUP_ID = 2 +CRYPT_PUBKEY_ALG_OID_GROUP_ID = 3 +CRYPT_SIGN_ALG_OID_GROUP_ID = 4 +CRYPT_RDN_ATTR_OID_GROUP_ID = 5 +CRYPT_EXT_OR_ATTR_OID_GROUP_ID = 6 +CRYPT_ENHKEY_USAGE_OID_GROUP_ID = 7 +CRYPT_POLICY_OID_GROUP_ID = 8 +CRYPT_TEMPLATE_OID_GROUP_ID = 9 +CRYPT_LAST_OID_GROUP_ID = 9 +CRYPT_FIRST_ALG_OID_GROUP_ID = CRYPT_HASH_ALG_OID_GROUP_ID +CRYPT_LAST_ALG_OID_GROUP_ID = CRYPT_SIGN_ALG_OID_GROUP_ID +CRYPT_OID_INHIBIT_SIGNATURE_FORMAT_FLAG = 0x1 +CRYPT_OID_USE_PUBKEY_PARA_FOR_PKCS7_FLAG = 0x2 +CRYPT_OID_NO_NULL_ALGORITHM_PARA_FLAG = 0x4 +CRYPT_OID_INFO_OID_KEY = 1 +CRYPT_OID_INFO_NAME_KEY = 2 +CRYPT_OID_INFO_ALGID_KEY = 3 +CRYPT_OID_INFO_SIGN_KEY = 4 +CRYPT_INSTALL_OID_INFO_BEFORE_FLAG = 1 +CRYPT_LOCALIZED_NAME_ENCODING_TYPE = 0 +CRYPT_LOCALIZED_NAME_OID = "LocalizedNames" +szOID_PKCS_7_DATA = "1.2.840.113549.1.7.1" +szOID_PKCS_7_SIGNED = "1.2.840.113549.1.7.2" +szOID_PKCS_7_ENVELOPED = "1.2.840.113549.1.7.3" +szOID_PKCS_7_SIGNEDANDENVELOPED = "1.2.840.113549.1.7.4" +szOID_PKCS_7_DIGESTED = "1.2.840.113549.1.7.5" +szOID_PKCS_7_ENCRYPTED = "1.2.840.113549.1.7.6" +szOID_PKCS_9_CONTENT_TYPE = "1.2.840.113549.1.9.3" +szOID_PKCS_9_MESSAGE_DIGEST = "1.2.840.113549.1.9.4" +CMSG_DATA = 1 +CMSG_SIGNED = 2 +CMSG_ENVELOPED = 3 +CMSG_SIGNED_AND_ENVELOPED = 4 +CMSG_HASHED = 5 +CMSG_ENCRYPTED = 6 + +CMSG_ALL_FLAGS = -1 +CMSG_DATA_FLAG = (1 << CMSG_DATA) +CMSG_SIGNED_FLAG = (1 << CMSG_SIGNED) +CMSG_ENVELOPED_FLAG = (1 << CMSG_ENVELOPED) +CMSG_SIGNED_AND_ENVELOPED_FLAG = (1 << CMSG_SIGNED_AND_ENVELOPED) +CMSG_HASHED_FLAG = (1 << CMSG_HASHED) +CMSG_ENCRYPTED_FLAG = (1 << CMSG_ENCRYPTED) +CERT_ID_ISSUER_SERIAL_NUMBER = 1 +CERT_ID_KEY_IDENTIFIER = 2 +CERT_ID_SHA1_HASH = 3 +CMSG_KEY_AGREE_EPHEMERAL_KEY_CHOICE = 1 +CMSG_KEY_AGREE_STATIC_KEY_CHOICE = 2 +CMSG_MAIL_LIST_HANDLE_KEY_CHOICE = 1 +CMSG_KEY_TRANS_RECIPIENT = 1 +CMSG_KEY_AGREE_RECIPIENT = 2 +CMSG_MAIL_LIST_RECIPIENT = 3 +CMSG_SP3_COMPATIBLE_ENCRYPT_FLAG = (-2147483648) +CMSG_RC4_NO_SALT_FLAG = 0x40000000 +CMSG_INDEFINITE_LENGTH = ((-1)) +CMSG_BARE_CONTENT_FLAG = 0x00000001 +CMSG_LENGTH_ONLY_FLAG = 0x00000002 +CMSG_DETACHED_FLAG = 0x00000004 +CMSG_AUTHENTICATED_ATTRIBUTES_FLAG = 0x00000008 +CMSG_CONTENTS_OCTETS_FLAG = 0x00000010 +CMSG_MAX_LENGTH_FLAG = 0x00000020 +CMSG_CMS_ENCAPSULATED_CONTENT_FLAG = 0x00000040 +CMSG_CRYPT_RELEASE_CONTEXT_FLAG = 0x00008000 +CMSG_TYPE_PARAM = 1 +CMSG_CONTENT_PARAM = 2 +CMSG_BARE_CONTENT_PARAM = 3 +CMSG_INNER_CONTENT_TYPE_PARAM = 4 +CMSG_SIGNER_COUNT_PARAM = 5 +CMSG_SIGNER_INFO_PARAM = 6 +CMSG_SIGNER_CERT_INFO_PARAM = 7 +CMSG_SIGNER_HASH_ALGORITHM_PARAM = 8 +CMSG_SIGNER_AUTH_ATTR_PARAM = 9 +CMSG_SIGNER_UNAUTH_ATTR_PARAM = 10 +CMSG_CERT_COUNT_PARAM = 11 +CMSG_CERT_PARAM = 12 +CMSG_CRL_COUNT_PARAM = 13 +CMSG_CRL_PARAM = 14 +CMSG_ENVELOPE_ALGORITHM_PARAM = 15 +CMSG_RECIPIENT_COUNT_PARAM = 17 +CMSG_RECIPIENT_INDEX_PARAM = 18 +CMSG_RECIPIENT_INFO_PARAM = 19 +CMSG_HASH_ALGORITHM_PARAM = 20 +CMSG_HASH_DATA_PARAM = 21 +CMSG_COMPUTED_HASH_PARAM = 22 +CMSG_ENCRYPT_PARAM = 26 +CMSG_ENCRYPTED_DIGEST = 27 +CMSG_ENCODED_SIGNER = 28 +CMSG_ENCODED_MESSAGE = 29 +CMSG_VERSION_PARAM = 30 +CMSG_ATTR_CERT_COUNT_PARAM = 31 +CMSG_ATTR_CERT_PARAM = 32 +CMSG_CMS_RECIPIENT_COUNT_PARAM = 33 +CMSG_CMS_RECIPIENT_INDEX_PARAM = 34 +CMSG_CMS_RECIPIENT_ENCRYPTED_KEY_INDEX_PARAM = 35 +CMSG_CMS_RECIPIENT_INFO_PARAM = 36 +CMSG_UNPROTECTED_ATTR_PARAM = 37 +CMSG_SIGNER_CERT_ID_PARAM = 38 +CMSG_CMS_SIGNER_INFO_PARAM = 39 +CMSG_SIGNED_DATA_V1 = 1 +CMSG_SIGNED_DATA_V3 = 3 +CMSG_SIGNED_DATA_PKCS_1_5_VERSION = CMSG_SIGNED_DATA_V1 +CMSG_SIGNED_DATA_CMS_VERSION = CMSG_SIGNED_DATA_V3 +CMSG_SIGNER_INFO_V1 = 1 +CMSG_SIGNER_INFO_V3 = 3 +CMSG_SIGNER_INFO_PKCS_1_5_VERSION = CMSG_SIGNER_INFO_V1 +CMSG_SIGNER_INFO_CMS_VERSION = CMSG_SIGNER_INFO_V3 +CMSG_HASHED_DATA_V0 = 0 +CMSG_HASHED_DATA_V2 = 2 +CMSG_HASHED_DATA_PKCS_1_5_VERSION = CMSG_HASHED_DATA_V0 +CMSG_HASHED_DATA_CMS_VERSION = CMSG_HASHED_DATA_V2 +CMSG_ENVELOPED_DATA_V0 = 0 +CMSG_ENVELOPED_DATA_V2 = 2 +CMSG_ENVELOPED_DATA_PKCS_1_5_VERSION = CMSG_ENVELOPED_DATA_V0 +CMSG_ENVELOPED_DATA_CMS_VERSION = CMSG_ENVELOPED_DATA_V2 +CMSG_KEY_AGREE_ORIGINATOR_CERT = 1 +CMSG_KEY_AGREE_ORIGINATOR_PUBLIC_KEY = 2 +CMSG_ENVELOPED_RECIPIENT_V0 = 0 +CMSG_ENVELOPED_RECIPIENT_V2 = 2 +CMSG_ENVELOPED_RECIPIENT_V3 = 3 +CMSG_ENVELOPED_RECIPIENT_V4 = 4 +CMSG_KEY_TRANS_PKCS_1_5_VERSION = CMSG_ENVELOPED_RECIPIENT_V0 +CMSG_KEY_TRANS_CMS_VERSION = CMSG_ENVELOPED_RECIPIENT_V2 +CMSG_KEY_AGREE_VERSION = CMSG_ENVELOPED_RECIPIENT_V3 +CMSG_MAIL_LIST_VERSION = CMSG_ENVELOPED_RECIPIENT_V4 +CMSG_CTRL_VERIFY_SIGNATURE = 1 +CMSG_CTRL_DECRYPT = 2 +CMSG_CTRL_VERIFY_HASH = 5 +CMSG_CTRL_ADD_SIGNER = 6 +CMSG_CTRL_DEL_SIGNER = 7 +CMSG_CTRL_ADD_SIGNER_UNAUTH_ATTR = 8 +CMSG_CTRL_DEL_SIGNER_UNAUTH_ATTR = 9 +CMSG_CTRL_ADD_CERT = 10 +CMSG_CTRL_DEL_CERT = 11 +CMSG_CTRL_ADD_CRL = 12 +CMSG_CTRL_DEL_CRL = 13 +CMSG_CTRL_ADD_ATTR_CERT = 14 +CMSG_CTRL_DEL_ATTR_CERT = 15 +CMSG_CTRL_KEY_TRANS_DECRYPT = 16 +CMSG_CTRL_KEY_AGREE_DECRYPT = 17 +CMSG_CTRL_MAIL_LIST_DECRYPT = 18 +CMSG_CTRL_VERIFY_SIGNATURE_EX = 19 +CMSG_CTRL_ADD_CMS_SIGNER_INFO = 20 +CMSG_VERIFY_SIGNER_PUBKEY = 1 +CMSG_VERIFY_SIGNER_CERT = 2 +CMSG_VERIFY_SIGNER_CHAIN = 3 +CMSG_VERIFY_SIGNER_NULL = 4 +CMSG_OID_GEN_ENCRYPT_KEY_FUNC = "CryptMsgDllGenEncryptKey" +CMSG_OID_EXPORT_ENCRYPT_KEY_FUNC = "CryptMsgDllExportEncryptKey" +CMSG_OID_IMPORT_ENCRYPT_KEY_FUNC = "CryptMsgDllImportEncryptKey" +CMSG_CONTENT_ENCRYPT_PAD_ENCODED_LEN_FLAG = 0x00000001 +CMSG_DEFAULT_INSTALLABLE_FUNC_OID = 1 +CMSG_CONTENT_ENCRYPT_FREE_PARA_FLAG = 0x00000001 +CMSG_CONTENT_ENCRYPT_RELEASE_CONTEXT_FLAG = 0x00008000 +CMSG_OID_GEN_CONTENT_ENCRYPT_KEY_FUNC = "CryptMsgDllGenContentEncryptKey" +CMSG_KEY_TRANS_ENCRYPT_FREE_PARA_FLAG = 0x00000001 +CMSG_OID_EXPORT_KEY_TRANS_FUNC = "CryptMsgDllExportKeyTrans" +CMSG_KEY_AGREE_ENCRYPT_FREE_PARA_FLAG = 0x00000001 +CMSG_KEY_AGREE_ENCRYPT_FREE_MATERIAL_FLAG = 0x00000002 +CMSG_KEY_AGREE_ENCRYPT_FREE_PUBKEY_ALG_FLAG = 0x00000004 +CMSG_KEY_AGREE_ENCRYPT_FREE_PUBKEY_PARA_FLAG = 0x00000008 +CMSG_KEY_AGREE_ENCRYPT_FREE_PUBKEY_BITS_FLAG = 0x00000010 +CMSG_OID_EXPORT_KEY_AGREE_FUNC = "CryptMsgDllExportKeyAgree" +CMSG_MAIL_LIST_ENCRYPT_FREE_PARA_FLAG = 0x00000001 +CMSG_OID_EXPORT_MAIL_LIST_FUNC = "CryptMsgDllExportMailList" +CMSG_OID_IMPORT_KEY_TRANS_FUNC = "CryptMsgDllImportKeyTrans" +CMSG_OID_IMPORT_KEY_AGREE_FUNC = "CryptMsgDllImportKeyAgree" +CMSG_OID_IMPORT_MAIL_LIST_FUNC = "CryptMsgDllImportMailList" + +# Certificate property id's used with CertGetCertificateContextProperty +CERT_KEY_PROV_HANDLE_PROP_ID = 1 +CERT_KEY_PROV_INFO_PROP_ID = 2 +CERT_SHA1_HASH_PROP_ID = 3 +CERT_MD5_HASH_PROP_ID = 4 +CERT_HASH_PROP_ID = CERT_SHA1_HASH_PROP_ID +CERT_KEY_CONTEXT_PROP_ID = 5 +CERT_KEY_SPEC_PROP_ID = 6 +CERT_IE30_RESERVED_PROP_ID = 7 +CERT_PUBKEY_HASH_RESERVED_PROP_ID = 8 +CERT_ENHKEY_USAGE_PROP_ID = 9 +CERT_CTL_USAGE_PROP_ID = CERT_ENHKEY_USAGE_PROP_ID +CERT_NEXT_UPDATE_LOCATION_PROP_ID = 10 +CERT_FRIENDLY_NAME_PROP_ID = 11 +CERT_PVK_FILE_PROP_ID = 12 +CERT_DESCRIPTION_PROP_ID = 13 +CERT_ACCESS_STATE_PROP_ID = 14 +CERT_SIGNATURE_HASH_PROP_ID = 15 +CERT_SMART_CARD_DATA_PROP_ID = 16 +CERT_EFS_PROP_ID = 17 +CERT_FORTEZZA_DATA_PROP_ID = 18 +CERT_ARCHIVED_PROP_ID = 19 +CERT_KEY_IDENTIFIER_PROP_ID = 20 +CERT_AUTO_ENROLL_PROP_ID = 21 +CERT_PUBKEY_ALG_PARA_PROP_ID = 22 +CERT_CROSS_CERT_DIST_POINTS_PROP_ID = 23 +CERT_ISSUER_PUBLIC_KEY_MD5_HASH_PROP_ID = 24 +CERT_SUBJECT_PUBLIC_KEY_MD5_HASH_PROP_ID = 25 +CERT_ENROLLMENT_PROP_ID = 26 +CERT_DATE_STAMP_PROP_ID = 27 +CERT_ISSUER_SERIAL_NUMBER_MD5_HASH_PROP_ID = 28 +CERT_SUBJECT_NAME_MD5_HASH_PROP_ID = 29 +CERT_EXTENDED_ERROR_INFO_PROP_ID = 30 +CERT_RENEWAL_PROP_ID = 64 +CERT_ARCHIVED_KEY_HASH_PROP_ID = 65 +CERT_AUTO_ENROLL_RETRY_PROP_ID = 66 +CERT_AIA_URL_RETRIEVED_PROP_ID = 67 +CERT_AUTHORITY_INFO_ACCESS_PROP_ID = 68 +CERT_BACKED_UP_PROP_ID = 69 +CERT_OCSP_RESPONSE_PROP_ID = 70 +CERT_REQUEST_ORIGINATOR_PROP_ID = 71 +CERT_SOURCE_LOCATION_PROP_ID = 72 +CERT_SOURCE_URL_PROP_ID = 73 +CERT_NEW_KEY_PROP_ID = 74 +CERT_OCSP_CACHE_PREFIX_PROP_ID = 75 +CERT_SMART_CARD_ROOT_INFO_PROP_ID = 76 +CERT_NO_AUTO_EXPIRE_CHECK_PROP_ID = 77 +CERT_NCRYPT_KEY_HANDLE_PROP_ID = 78 +CERT_HCRYPTPROV_OR_NCRYPT_KEY_HANDLE_PROP_ID = 79 +CERT_SUBJECT_INFO_ACCESS_PROP_ID = 80 +CERT_CA_OCSP_AUTHORITY_INFO_ACCESS_PROP_ID = 81 +CERT_CA_DISABLE_CRL_PROP_ID = 82 +CERT_ROOT_PROGRAM_CERT_POLICIES_PROP_ID = 83 +CERT_ROOT_PROGRAM_NAME_CONSTRAINTS_PROP_ID = 84 +CERT_SUBJECT_OCSP_AUTHORITY_INFO_ACCESS_PROP_ID = 85 +CERT_SUBJECT_DISABLE_CRL_PROP_ID = 86 +CERT_CEP_PROP_ID = 87 +CERT_SIGN_HASH_CNG_ALG_PROP_ID = 89 +CERT_SCARD_PIN_ID_PROP_ID = 90 +CERT_SCARD_PIN_INFO_PROP_ID = 91 +CERT_FIRST_RESERVED_PROP_ID = 92 +CERT_LAST_RESERVED_PROP_ID = 0x00007FFF +CERT_FIRST_USER_PROP_ID = 0x00008000 +CERT_LAST_USER_PROP_ID = 0x0000FFFF + +szOID_CERT_PROP_ID_PREFIX = "1.3.6.1.4.1.311.10.11." +szOID_CERT_KEY_IDENTIFIER_PROP_ID = "1.3.6.1.4.1.311.10.11.20" +szOID_CERT_ISSUER_SERIAL_NUMBER_MD5_HASH_PROP_ID = \ + "1.3.6.1.4.1.311.10.11.28" +szOID_CERT_SUBJECT_NAME_MD5_HASH_PROP_ID = \ + "1.3.6.1.4.1.311.10.11.29" +CERT_ACCESS_STATE_WRITE_PERSIST_FLAG = 0x1 +CERT_ACCESS_STATE_SYSTEM_STORE_FLAG = 0x2 +CERT_ACCESS_STATE_LM_SYSTEM_STORE_FLAG = 0x4 +CERT_SET_KEY_PROV_HANDLE_PROP_ID = 0x00000001 +CERT_SET_KEY_CONTEXT_PROP_ID = 0x00000001 +sz_CERT_STORE_PROV_MEMORY = "Memory" +sz_CERT_STORE_PROV_FILENAME_W = "File" +sz_CERT_STORE_PROV_FILENAME = sz_CERT_STORE_PROV_FILENAME_W +sz_CERT_STORE_PROV_SYSTEM_W = "System" +sz_CERT_STORE_PROV_SYSTEM = sz_CERT_STORE_PROV_SYSTEM_W +sz_CERT_STORE_PROV_PKCS7 = "PKCS7" +sz_CERT_STORE_PROV_SERIALIZED = "Serialized" +sz_CERT_STORE_PROV_COLLECTION = "Collection" +sz_CERT_STORE_PROV_SYSTEM_REGISTRY_W = "SystemRegistry" +sz_CERT_STORE_PROV_SYSTEM_REGISTRY = sz_CERT_STORE_PROV_SYSTEM_REGISTRY_W +sz_CERT_STORE_PROV_PHYSICAL_W = "Physical" +sz_CERT_STORE_PROV_PHYSICAL = sz_CERT_STORE_PROV_PHYSICAL_W +sz_CERT_STORE_PROV_SMART_CARD_W = "SmartCard" +sz_CERT_STORE_PROV_SMART_CARD = sz_CERT_STORE_PROV_SMART_CARD_W +sz_CERT_STORE_PROV_LDAP_W = "Ldap" +sz_CERT_STORE_PROV_LDAP = sz_CERT_STORE_PROV_LDAP_W +CERT_STORE_SIGNATURE_FLAG = 0x00000001 +CERT_STORE_TIME_VALIDITY_FLAG = 0x00000002 +CERT_STORE_REVOCATION_FLAG = 0x00000004 +CERT_STORE_NO_CRL_FLAG = 0x00010000 +CERT_STORE_NO_ISSUER_FLAG = 0x00020000 +CERT_STORE_BASE_CRL_FLAG = 0x00000100 +CERT_STORE_DELTA_CRL_FLAG = 0x00000200 +CERT_STORE_NO_CRYPT_RELEASE_FLAG = 0x00000001 +CERT_STORE_SET_LOCALIZED_NAME_FLAG = 0x00000002 +CERT_STORE_DEFER_CLOSE_UNTIL_LAST_FREE_FLAG = 0x00000004 +CERT_STORE_DELETE_FLAG = 0x00000010 +CERT_STORE_UNSAFE_PHYSICAL_FLAG = 0x00000020 +CERT_STORE_SHARE_STORE_FLAG = 0x00000040 +CERT_STORE_SHARE_CONTEXT_FLAG = 0x00000080 +CERT_STORE_MANIFOLD_FLAG = 0x00000100 +CERT_STORE_ENUM_ARCHIVED_FLAG = 0x00000200 +CERT_STORE_UPDATE_KEYID_FLAG = 0x00000400 +CERT_STORE_BACKUP_RESTORE_FLAG = 0x00000800 +CERT_STORE_READONLY_FLAG = 0x00008000 +CERT_STORE_OPEN_EXISTING_FLAG = 0x00004000 +CERT_STORE_CREATE_NEW_FLAG = 0x00002000 +CERT_STORE_MAXIMUM_ALLOWED_FLAG = 0x00001000 +CERT_SYSTEM_STORE_MASK = (-65536) +CERT_SYSTEM_STORE_RELOCATE_FLAG = (-2147483648) +CERT_SYSTEM_STORE_UNPROTECTED_FLAG = 0x40000000 +CERT_SYSTEM_STORE_LOCATION_MASK = 0x00FF0000 +CERT_SYSTEM_STORE_LOCATION_SHIFT = 16 +CERT_SYSTEM_STORE_CURRENT_USER_ID = 1 +CERT_SYSTEM_STORE_LOCAL_MACHINE_ID = 2 +CERT_SYSTEM_STORE_CURRENT_SERVICE_ID = 4 +CERT_SYSTEM_STORE_SERVICES_ID = 5 +CERT_SYSTEM_STORE_USERS_ID = 6 +CERT_SYSTEM_STORE_CURRENT_USER_GROUP_POLICY_ID = 7 +CERT_SYSTEM_STORE_LOCAL_MACHINE_GROUP_POLICY_ID = 8 +CERT_SYSTEM_STORE_LOCAL_MACHINE_ENTERPRISE_ID = 9 +CERT_SYSTEM_STORE_CURRENT_USER = \ + (CERT_SYSTEM_STORE_CURRENT_USER_ID << CERT_SYSTEM_STORE_LOCATION_SHIFT) +CERT_SYSTEM_STORE_LOCAL_MACHINE = \ + (CERT_SYSTEM_STORE_LOCAL_MACHINE_ID << CERT_SYSTEM_STORE_LOCATION_SHIFT) +CERT_SYSTEM_STORE_CURRENT_SERVICE = \ + (CERT_SYSTEM_STORE_CURRENT_SERVICE_ID << CERT_SYSTEM_STORE_LOCATION_SHIFT) +CERT_SYSTEM_STORE_SERVICES = \ + (CERT_SYSTEM_STORE_SERVICES_ID << CERT_SYSTEM_STORE_LOCATION_SHIFT) +CERT_SYSTEM_STORE_USERS = \ + (CERT_SYSTEM_STORE_USERS_ID << CERT_SYSTEM_STORE_LOCATION_SHIFT) +CERT_SYSTEM_STORE_CURRENT_USER_GROUP_POLICY = \ + (CERT_SYSTEM_STORE_CURRENT_USER_GROUP_POLICY_ID << \ + CERT_SYSTEM_STORE_LOCATION_SHIFT) +CERT_SYSTEM_STORE_LOCAL_MACHINE_GROUP_POLICY = \ + (CERT_SYSTEM_STORE_LOCAL_MACHINE_GROUP_POLICY_ID << \ + CERT_SYSTEM_STORE_LOCATION_SHIFT) +CERT_SYSTEM_STORE_LOCAL_MACHINE_ENTERPRISE = \ + (CERT_SYSTEM_STORE_LOCAL_MACHINE_ENTERPRISE_ID << \ + CERT_SYSTEM_STORE_LOCATION_SHIFT) +CERT_PROT_ROOT_DISABLE_CURRENT_USER_FLAG = 0x1 +CERT_PROT_ROOT_INHIBIT_ADD_AT_INIT_FLAG = 0x2 +CERT_PROT_ROOT_INHIBIT_PURGE_LM_FLAG = 0x4 +CERT_PROT_ROOT_DISABLE_LM_AUTH_FLAG = 0x8 +CERT_PROT_ROOT_ONLY_LM_GPT_FLAG = 0x8 +CERT_PROT_ROOT_DISABLE_NT_AUTH_REQUIRED_FLAG = 0x10 +CERT_PROT_ROOT_DISABLE_NOT_DEFINED_NAME_CONSTRAINT_FLAG = 0x20 +CERT_TRUST_PUB_ALLOW_TRUST_MASK = 0x00000003 +CERT_TRUST_PUB_ALLOW_END_USER_TRUST = 0x00000000 +CERT_TRUST_PUB_ALLOW_MACHINE_ADMIN_TRUST = 0x00000001 +CERT_TRUST_PUB_ALLOW_ENTERPRISE_ADMIN_TRUST = 0x00000002 +CERT_TRUST_PUB_CHECK_PUBLISHER_REV_FLAG = 0x00000100 +CERT_TRUST_PUB_CHECK_TIMESTAMP_REV_FLAG = 0x00000200 + +CERT_AUTH_ROOT_AUTO_UPDATE_LOCAL_MACHINE_REGPATH = r"Software\Microsoft\SystemCertificates\AuthRoot\AutoUpdate" +CERT_AUTH_ROOT_AUTO_UPDATE_DISABLE_UNTRUSTED_ROOT_LOGGING_FLAG = 0x1 +CERT_AUTH_ROOT_AUTO_UPDATE_DISABLE_PARTIAL_CHAIN_LOGGING_FLAG = 0x2 +CERT_AUTH_ROOT_AUTO_UPDATE_ROOT_DIR_URL_VALUE_NAME = "RootDirUrl" +CERT_AUTH_ROOT_AUTO_UPDATE_SYNC_DELTA_TIME_VALUE_NAME = "SyncDeltaTime" +CERT_AUTH_ROOT_AUTO_UPDATE_FLAGS_VALUE_NAME = "Flags" +CERT_AUTH_ROOT_CTL_FILENAME = "authroot.stl" +CERT_AUTH_ROOT_CTL_FILENAME_A = "authroot.stl" +CERT_AUTH_ROOT_CAB_FILENAME = "authrootstl.cab" +CERT_AUTH_ROOT_SEQ_FILENAME = "authrootseq.txt" +CERT_AUTH_ROOT_CERT_EXT = ".crt" + +CERT_GROUP_POLICY_SYSTEM_STORE_REGPATH = r"Software\Policies\Microsoft\SystemCertificates" +CERT_EFSBLOB_REGPATH = CERT_GROUP_POLICY_SYSTEM_STORE_REGPATH + r"\EFS" +CERT_EFSBLOB_VALUE_NAME = "EFSBlob" +CERT_PROT_ROOT_FLAGS_REGPATH = CERT_GROUP_POLICY_SYSTEM_STORE_REGPATH +r"\Root\ProtectedRoots" +CERT_PROT_ROOT_FLAGS_VALUE_NAME = "Flags" +CERT_TRUST_PUB_SAFER_GROUP_POLICY_REGPATH = CERT_GROUP_POLICY_SYSTEM_STORE_REGPATH + r"\TrustedPublisher\Safer" +CERT_LOCAL_MACHINE_SYSTEM_STORE_REGPATH = r"Software\Microsoft\SystemCertificates" +CERT_TRUST_PUB_SAFER_LOCAL_MACHINE_REGPATH = CERT_LOCAL_MACHINE_SYSTEM_STORE_REGPATH + r"\TrustedPublisher\Safer" +CERT_TRUST_PUB_AUTHENTICODE_FLAGS_VALUE_NAME = "AuthenticodeFlags" +CERT_OCM_SUBCOMPONENTS_LOCAL_MACHINE_REGPATH = r"SOFTWARE\Microsoft\Windows\CurrentVersion\Setup\OC Manager\Subcomponents" +CERT_OCM_SUBCOMPONENTS_ROOT_AUTO_UPDATE_VALUE_NAME = r"RootAutoUpdate" +CERT_DISABLE_ROOT_AUTO_UPDATE_REGPATH = CERT_GROUP_POLICY_SYSTEM_STORE_REGPATH + r"\AuthRoot" +CERT_DISABLE_ROOT_AUTO_UPDATE_VALUE_NAME = "DisableRootAutoUpdate" +CERT_AUTH_ROOT_AUTO_UPDATE_LOCAL_MACHINE_REGPATH = CERT_LOCAL_MACHINE_SYSTEM_STORE_REGPATH + r"\AuthRoot\AutoUpdate" + +CERT_REGISTRY_STORE_REMOTE_FLAG = 0x10000 +CERT_REGISTRY_STORE_SERIALIZED_FLAG = 0x20000 +CERT_REGISTRY_STORE_CLIENT_GPT_FLAG = (-2147483648) +CERT_REGISTRY_STORE_LM_GPT_FLAG = 0x01000000 +CERT_REGISTRY_STORE_ROAMING_FLAG = 0x40000 +CERT_REGISTRY_STORE_MY_IE_DIRTY_FLAG = 0x80000 +CERT_IE_DIRTY_FLAGS_REGPATH = r"Software\Microsoft\Cryptography\IEDirtyFlags" + +CERT_FILE_STORE_COMMIT_ENABLE_FLAG = 0x10000 +CERT_LDAP_STORE_SIGN_FLAG = 0x10000 +CERT_LDAP_STORE_AREC_EXCLUSIVE_FLAG = 0x20000 +CERT_LDAP_STORE_OPENED_FLAG = 0x40000 +CERT_LDAP_STORE_UNBIND_FLAG = 0x80000 +CRYPT_OID_OPEN_STORE_PROV_FUNC = "CertDllOpenStoreProv" + +CERT_STORE_PROV_EXTERNAL_FLAG = 0x1 +CERT_STORE_PROV_DELETED_FLAG = 0x2 +CERT_STORE_PROV_NO_PERSIST_FLAG = 0x4 +CERT_STORE_PROV_SYSTEM_STORE_FLAG = 0x8 +CERT_STORE_PROV_LM_SYSTEM_STORE_FLAG = 0x10 +CERT_STORE_PROV_CLOSE_FUNC = 0 +CERT_STORE_PROV_READ_CERT_FUNC = 1 +CERT_STORE_PROV_WRITE_CERT_FUNC = 2 +CERT_STORE_PROV_DELETE_CERT_FUNC = 3 +CERT_STORE_PROV_SET_CERT_PROPERTY_FUNC = 4 +CERT_STORE_PROV_READ_CRL_FUNC = 5 +CERT_STORE_PROV_WRITE_CRL_FUNC = 6 +CERT_STORE_PROV_DELETE_CRL_FUNC = 7 +CERT_STORE_PROV_SET_CRL_PROPERTY_FUNC = 8 +CERT_STORE_PROV_READ_CTL_FUNC = 9 +CERT_STORE_PROV_WRITE_CTL_FUNC = 10 +CERT_STORE_PROV_DELETE_CTL_FUNC = 11 +CERT_STORE_PROV_SET_CTL_PROPERTY_FUNC = 12 +CERT_STORE_PROV_CONTROL_FUNC = 13 +CERT_STORE_PROV_FIND_CERT_FUNC = 14 +CERT_STORE_PROV_FREE_FIND_CERT_FUNC = 15 +CERT_STORE_PROV_GET_CERT_PROPERTY_FUNC = 16 +CERT_STORE_PROV_FIND_CRL_FUNC = 17 +CERT_STORE_PROV_FREE_FIND_CRL_FUNC = 18 +CERT_STORE_PROV_GET_CRL_PROPERTY_FUNC = 19 +CERT_STORE_PROV_FIND_CTL_FUNC = 20 +CERT_STORE_PROV_FREE_FIND_CTL_FUNC = 21 +CERT_STORE_PROV_GET_CTL_PROPERTY_FUNC = 22 +CERT_STORE_PROV_WRITE_ADD_FLAG = 0x1 +CERT_STORE_SAVE_AS_STORE = 1 +CERT_STORE_SAVE_AS_PKCS7 = 2 +CERT_STORE_SAVE_TO_FILE = 1 +CERT_STORE_SAVE_TO_MEMORY = 2 +CERT_STORE_SAVE_TO_FILENAME_A = 3 +CERT_STORE_SAVE_TO_FILENAME_W = 4 +CERT_STORE_SAVE_TO_FILENAME = CERT_STORE_SAVE_TO_FILENAME_W +CERT_CLOSE_STORE_FORCE_FLAG = 0x00000001 +CERT_CLOSE_STORE_CHECK_FLAG = 0x00000002 +CERT_COMPARE_MASK = 0xFFFF +CERT_COMPARE_SHIFT = 16 +CERT_COMPARE_ANY = 0 +CERT_COMPARE_SHA1_HASH = 1 +CERT_COMPARE_NAME = 2 +CERT_COMPARE_ATTR = 3 +CERT_COMPARE_MD5_HASH = 4 +CERT_COMPARE_PROPERTY = 5 +CERT_COMPARE_PUBLIC_KEY = 6 +CERT_COMPARE_HASH = CERT_COMPARE_SHA1_HASH +CERT_COMPARE_NAME_STR_A = 7 +CERT_COMPARE_NAME_STR_W = 8 +CERT_COMPARE_KEY_SPEC = 9 +CERT_COMPARE_ENHKEY_USAGE = 10 +CERT_COMPARE_CTL_USAGE = CERT_COMPARE_ENHKEY_USAGE +CERT_COMPARE_SUBJECT_CERT = 11 +CERT_COMPARE_ISSUER_OF = 12 +CERT_COMPARE_EXISTING = 13 +CERT_COMPARE_SIGNATURE_HASH = 14 +CERT_COMPARE_KEY_IDENTIFIER = 15 +CERT_COMPARE_CERT_ID = 16 +CERT_COMPARE_CROSS_CERT_DIST_POINTS = 17 +CERT_COMPARE_PUBKEY_MD5_HASH = 18 +CERT_FIND_ANY = (CERT_COMPARE_ANY << CERT_COMPARE_SHIFT) +CERT_FIND_SHA1_HASH = (CERT_COMPARE_SHA1_HASH << CERT_COMPARE_SHIFT) +CERT_FIND_MD5_HASH = (CERT_COMPARE_MD5_HASH << CERT_COMPARE_SHIFT) +CERT_FIND_SIGNATURE_HASH = (CERT_COMPARE_SIGNATURE_HASH << CERT_COMPARE_SHIFT) +CERT_FIND_KEY_IDENTIFIER = (CERT_COMPARE_KEY_IDENTIFIER << CERT_COMPARE_SHIFT) +CERT_FIND_HASH = CERT_FIND_SHA1_HASH +CERT_FIND_PROPERTY = (CERT_COMPARE_PROPERTY << CERT_COMPARE_SHIFT) +CERT_FIND_PUBLIC_KEY = (CERT_COMPARE_PUBLIC_KEY << CERT_COMPARE_SHIFT) +CERT_FIND_SUBJECT_NAME = (CERT_COMPARE_NAME << CERT_COMPARE_SHIFT | \ + CERT_INFO_SUBJECT_FLAG) +CERT_FIND_SUBJECT_ATTR = (CERT_COMPARE_ATTR << CERT_COMPARE_SHIFT | \ + CERT_INFO_SUBJECT_FLAG) +CERT_FIND_ISSUER_NAME = (CERT_COMPARE_NAME << CERT_COMPARE_SHIFT | \ + CERT_INFO_ISSUER_FLAG) +CERT_FIND_ISSUER_ATTR = (CERT_COMPARE_ATTR << CERT_COMPARE_SHIFT | \ + CERT_INFO_ISSUER_FLAG) +CERT_FIND_SUBJECT_STR_A = (CERT_COMPARE_NAME_STR_A << CERT_COMPARE_SHIFT | \ + CERT_INFO_SUBJECT_FLAG) +CERT_FIND_SUBJECT_STR_W = (CERT_COMPARE_NAME_STR_W << CERT_COMPARE_SHIFT | \ + CERT_INFO_SUBJECT_FLAG) +CERT_FIND_SUBJECT_STR = CERT_FIND_SUBJECT_STR_W +CERT_FIND_ISSUER_STR_A = (CERT_COMPARE_NAME_STR_A << CERT_COMPARE_SHIFT | \ + CERT_INFO_ISSUER_FLAG) +CERT_FIND_ISSUER_STR_W = (CERT_COMPARE_NAME_STR_W << CERT_COMPARE_SHIFT | \ + CERT_INFO_ISSUER_FLAG) +CERT_FIND_ISSUER_STR = CERT_FIND_ISSUER_STR_W +CERT_FIND_KEY_SPEC = (CERT_COMPARE_KEY_SPEC << CERT_COMPARE_SHIFT) +CERT_FIND_ENHKEY_USAGE = (CERT_COMPARE_ENHKEY_USAGE << CERT_COMPARE_SHIFT) +CERT_FIND_CTL_USAGE = CERT_FIND_ENHKEY_USAGE +CERT_FIND_SUBJECT_CERT = (CERT_COMPARE_SUBJECT_CERT << CERT_COMPARE_SHIFT) +CERT_FIND_ISSUER_OF = (CERT_COMPARE_ISSUER_OF << CERT_COMPARE_SHIFT) +CERT_FIND_EXISTING = (CERT_COMPARE_EXISTING << CERT_COMPARE_SHIFT) +CERT_FIND_CERT_ID = (CERT_COMPARE_CERT_ID << CERT_COMPARE_SHIFT) +CERT_FIND_CROSS_CERT_DIST_POINTS = \ + (CERT_COMPARE_CROSS_CERT_DIST_POINTS << CERT_COMPARE_SHIFT) +CERT_FIND_PUBKEY_MD5_HASH = \ + (CERT_COMPARE_PUBKEY_MD5_HASH << CERT_COMPARE_SHIFT) +CERT_FIND_OPTIONAL_ENHKEY_USAGE_FLAG = 0x1 +CERT_FIND_EXT_ONLY_ENHKEY_USAGE_FLAG = 0x2 +CERT_FIND_PROP_ONLY_ENHKEY_USAGE_FLAG = 0x4 +CERT_FIND_NO_ENHKEY_USAGE_FLAG = 0x8 +CERT_FIND_OR_ENHKEY_USAGE_FLAG = 0x10 +CERT_FIND_VALID_ENHKEY_USAGE_FLAG = 0x20 +CERT_FIND_OPTIONAL_CTL_USAGE_FLAG = CERT_FIND_OPTIONAL_ENHKEY_USAGE_FLAG +CERT_FIND_EXT_ONLY_CTL_USAGE_FLAG = \ + CERT_FIND_EXT_ONLY_ENHKEY_USAGE_FLAG +CERT_FIND_PROP_ONLY_CTL_USAGE_FLAG = \ + CERT_FIND_PROP_ONLY_ENHKEY_USAGE_FLAG +CERT_FIND_NO_CTL_USAGE_FLAG = CERT_FIND_NO_ENHKEY_USAGE_FLAG +CERT_FIND_OR_CTL_USAGE_FLAG = CERT_FIND_OR_ENHKEY_USAGE_FLAG +CERT_FIND_VALID_CTL_USAGE_FLAG = CERT_FIND_VALID_ENHKEY_USAGE_FLAG +CERT_SET_PROPERTY_IGNORE_PERSIST_ERROR_FLAG = (-2147483648) +CERT_SET_PROPERTY_INHIBIT_PERSIST_FLAG = 0x40000000 +CTL_ENTRY_FROM_PROP_CHAIN_FLAG = 0x1 +CRL_FIND_ANY = 0 +CRL_FIND_ISSUED_BY = 1 +CRL_FIND_EXISTING = 2 +CRL_FIND_ISSUED_FOR = 3 +CRL_FIND_ISSUED_BY_AKI_FLAG = 0x1 +CRL_FIND_ISSUED_BY_SIGNATURE_FLAG = 0x2 +CRL_FIND_ISSUED_BY_DELTA_FLAG = 0x4 +CRL_FIND_ISSUED_BY_BASE_FLAG = 0x8 +CERT_STORE_ADD_NEW = 1 +CERT_STORE_ADD_USE_EXISTING = 2 +CERT_STORE_ADD_REPLACE_EXISTING = 3 +CERT_STORE_ADD_ALWAYS = 4 +CERT_STORE_ADD_REPLACE_EXISTING_INHERIT_PROPERTIES = 5 +CERT_STORE_ADD_NEWER = 6 +CERT_STORE_ADD_NEWER_INHERIT_PROPERTIES = 7 +CERT_STORE_CERTIFICATE_CONTEXT = 1 +CERT_STORE_CRL_CONTEXT = 2 +CERT_STORE_CTL_CONTEXT = 3 + +CERT_STORE_ALL_CONTEXT_FLAG = -1 +CERT_STORE_CERTIFICATE_CONTEXT_FLAG = \ + (1 << CERT_STORE_CERTIFICATE_CONTEXT) +CERT_STORE_CRL_CONTEXT_FLAG = \ + (1 << CERT_STORE_CRL_CONTEXT) +CERT_STORE_CTL_CONTEXT_FLAG = \ + (1 << CERT_STORE_CTL_CONTEXT) +CTL_ANY_SUBJECT_TYPE = 1 +CTL_CERT_SUBJECT_TYPE = 2 +CTL_FIND_ANY = 0 +CTL_FIND_SHA1_HASH = 1 +CTL_FIND_MD5_HASH = 2 +CTL_FIND_USAGE = 3 +CTL_FIND_SUBJECT = 4 +CTL_FIND_EXISTING = 5 +CTL_FIND_NO_LIST_ID_CBDATA = (-1) +CTL_FIND_SAME_USAGE_FLAG = 0x1 +CERT_STORE_CTRL_RESYNC = 1 +CERT_STORE_CTRL_NOTIFY_CHANGE = 2 +CERT_STORE_CTRL_COMMIT = 3 +CERT_STORE_CTRL_AUTO_RESYNC = 4 +CERT_STORE_CTRL_CANCEL_NOTIFY = 5 +CERT_STORE_CTRL_INHIBIT_DUPLICATE_HANDLE_FLAG = 0x1 +CERT_STORE_CTRL_COMMIT_FORCE_FLAG = 0x1 +CERT_STORE_CTRL_COMMIT_CLEAR_FLAG = 0x2 +CERT_STORE_LOCALIZED_NAME_PROP_ID = 0x1000 +CERT_CREATE_CONTEXT_NOCOPY_FLAG = 0x1 +CERT_CREATE_CONTEXT_SORTED_FLAG = 0x2 +CERT_CREATE_CONTEXT_NO_HCRYPTMSG_FLAG = 0x4 +CERT_CREATE_CONTEXT_NO_ENTRY_FLAG = 0x8 + +CERT_PHYSICAL_STORE_ADD_ENABLE_FLAG = 0x1 +CERT_PHYSICAL_STORE_OPEN_DISABLE_FLAG = 0x2 +CERT_PHYSICAL_STORE_REMOTE_OPEN_DISABLE_FLAG = 0x4 +CERT_PHYSICAL_STORE_INSERT_COMPUTER_NAME_ENABLE_FLAG = 0x8 +CERT_PHYSICAL_STORE_PREDEFINED_ENUM_FLAG = 0x1 + +# Names of physical cert stores +CERT_PHYSICAL_STORE_DEFAULT_NAME = ".Default" +CERT_PHYSICAL_STORE_GROUP_POLICY_NAME = ".GroupPolicy" +CERT_PHYSICAL_STORE_LOCAL_MACHINE_NAME = ".LocalMachine" +CERT_PHYSICAL_STORE_DS_USER_CERTIFICATE_NAME = ".UserCertificate" +CERT_PHYSICAL_STORE_LOCAL_MACHINE_GROUP_POLICY_NAME = ".LocalMachineGroupPolicy" +CERT_PHYSICAL_STORE_ENTERPRISE_NAME = ".Enterprise" +CERT_PHYSICAL_STORE_AUTH_ROOT_NAME = ".AuthRoot" +CERT_PHYSICAL_STORE_SMART_CARD_NAME = ".SmartCard" + +CRYPT_OID_OPEN_SYSTEM_STORE_PROV_FUNC = "CertDllOpenSystemStoreProv" +CRYPT_OID_REGISTER_SYSTEM_STORE_FUNC = "CertDllRegisterSystemStore" +CRYPT_OID_UNREGISTER_SYSTEM_STORE_FUNC = "CertDllUnregisterSystemStore" +CRYPT_OID_ENUM_SYSTEM_STORE_FUNC = "CertDllEnumSystemStore" +CRYPT_OID_REGISTER_PHYSICAL_STORE_FUNC = "CertDllRegisterPhysicalStore" +CRYPT_OID_UNREGISTER_PHYSICAL_STORE_FUNC = "CertDllUnregisterPhysicalStore" +CRYPT_OID_ENUM_PHYSICAL_STORE_FUNC = "CertDllEnumPhysicalStore" +CRYPT_OID_SYSTEM_STORE_LOCATION_VALUE_NAME = "SystemStoreLocation" + +CMSG_TRUSTED_SIGNER_FLAG = 0x1 +CMSG_SIGNER_ONLY_FLAG = 0x2 +CMSG_USE_SIGNER_INDEX_FLAG = 0x4 +CMSG_CMS_ENCAPSULATED_CTL_FLAG = 0x00008000 +CMSG_ENCODE_SORTED_CTL_FLAG = 0x1 +CMSG_ENCODE_HASHED_SUBJECT_IDENTIFIER_FLAG = 0x2 +CERT_VERIFY_INHIBIT_CTL_UPDATE_FLAG = 0x1 +CERT_VERIFY_TRUSTED_SIGNERS_FLAG = 0x2 +CERT_VERIFY_NO_TIME_CHECK_FLAG = 0x4 +CERT_VERIFY_ALLOW_MORE_USAGE_FLAG = 0x8 +CERT_VERIFY_UPDATED_CTL_FLAG = 0x1 +CERT_CONTEXT_REVOCATION_TYPE = 1 +CERT_VERIFY_REV_CHAIN_FLAG = 0x00000001 +CERT_VERIFY_CACHE_ONLY_BASED_REVOCATION = 0x00000002 +CERT_VERIFY_REV_ACCUMULATIVE_TIMEOUT_FLAG = 0x00000004 +CERT_UNICODE_IS_RDN_ATTRS_FLAG = 0x1 +CERT_CASE_INSENSITIVE_IS_RDN_ATTRS_FLAG = 0x2 +CRYPT_VERIFY_CERT_SIGN_SUBJECT_BLOB = 1 +CRYPT_VERIFY_CERT_SIGN_SUBJECT_CERT = 2 +CRYPT_VERIFY_CERT_SIGN_SUBJECT_CRL = 3 +CRYPT_VERIFY_CERT_SIGN_ISSUER_PUBKEY = 1 +CRYPT_VERIFY_CERT_SIGN_ISSUER_CERT = 2 +CRYPT_VERIFY_CERT_SIGN_ISSUER_CHAIN = 3 +CRYPT_VERIFY_CERT_SIGN_ISSUER_NULL = 4 +CRYPT_DEFAULT_CONTEXT_AUTO_RELEASE_FLAG = 0x00000001 +CRYPT_DEFAULT_CONTEXT_PROCESS_FLAG = 0x00000002 +CRYPT_DEFAULT_CONTEXT_CERT_SIGN_OID = 1 +CRYPT_DEFAULT_CONTEXT_MULTI_CERT_SIGN_OID = 2 +CRYPT_OID_EXPORT_PUBLIC_KEY_INFO_FUNC = "CryptDllExportPublicKeyInfoEx" +CRYPT_OID_IMPORT_PUBLIC_KEY_INFO_FUNC = "CryptDllImportPublicKeyInfoEx" +CRYPT_ACQUIRE_CACHE_FLAG = 0x00000001 +CRYPT_ACQUIRE_USE_PROV_INFO_FLAG = 0x00000002 +CRYPT_ACQUIRE_COMPARE_KEY_FLAG = 0x00000004 +CRYPT_ACQUIRE_SILENT_FLAG = 0x00000040 +CRYPT_FIND_USER_KEYSET_FLAG = 0x00000001 +CRYPT_FIND_MACHINE_KEYSET_FLAG = 0x00000002 +CRYPT_FIND_SILENT_KEYSET_FLAG = 0x00000040 +CRYPT_OID_IMPORT_PRIVATE_KEY_INFO_FUNC = "CryptDllImportPrivateKeyInfoEx" +CRYPT_OID_EXPORT_PRIVATE_KEY_INFO_FUNC = "CryptDllExportPrivateKeyInfoEx" +CRYPT_DELETE_KEYSET = CRYPT_DELETEKEYSET +CERT_SIMPLE_NAME_STR = 1 +CERT_OID_NAME_STR = 2 +CERT_X500_NAME_STR = 3 +CERT_NAME_STR_SEMICOLON_FLAG = 0x40000000 +CERT_NAME_STR_NO_PLUS_FLAG = 0x20000000 +CERT_NAME_STR_NO_QUOTING_FLAG = 0x10000000 +CERT_NAME_STR_CRLF_FLAG = 0x08000000 +CERT_NAME_STR_COMMA_FLAG = 0x04000000 +CERT_NAME_STR_REVERSE_FLAG = 0x02000000 +CERT_NAME_STR_DISABLE_IE4_UTF8_FLAG = 0x00010000 +CERT_NAME_STR_ENABLE_T61_UNICODE_FLAG = 0x00020000 +CERT_NAME_STR_ENABLE_UTF8_UNICODE_FLAG = 0x00040000 +CERT_NAME_EMAIL_TYPE = 1 +CERT_NAME_RDN_TYPE = 2 +CERT_NAME_ATTR_TYPE = 3 +CERT_NAME_SIMPLE_DISPLAY_TYPE = 4 +CERT_NAME_FRIENDLY_DISPLAY_TYPE = 5 +CERT_NAME_DNS_TYPE = 6 +CERT_NAME_URL_TYPE = 7 +CERT_NAME_UPN_TYPE = 8 +CERT_NAME_ISSUER_FLAG = 0x1 +CERT_NAME_DISABLE_IE4_UTF8_FLAG = 0x00010000 +CRYPT_MESSAGE_BARE_CONTENT_OUT_FLAG = 0x00000001 +CRYPT_MESSAGE_ENCAPSULATED_CONTENT_OUT_FLAG = 0x00000002 +CRYPT_MESSAGE_KEYID_SIGNER_FLAG = 0x00000004 +CRYPT_MESSAGE_SILENT_KEYSET_FLAG = 0x00000040 +CRYPT_MESSAGE_KEYID_RECIPIENT_FLAG = 0x4 +CERT_QUERY_OBJECT_FILE = 0x00000001 +CERT_QUERY_OBJECT_BLOB = 0x00000002 +CERT_QUERY_CONTENT_CERT = 1 +CERT_QUERY_CONTENT_CTL = 2 +CERT_QUERY_CONTENT_CRL = 3 +CERT_QUERY_CONTENT_SERIALIZED_STORE = 4 +CERT_QUERY_CONTENT_SERIALIZED_CERT = 5 +CERT_QUERY_CONTENT_SERIALIZED_CTL = 6 +CERT_QUERY_CONTENT_SERIALIZED_CRL = 7 +CERT_QUERY_CONTENT_PKCS7_SIGNED = 8 +CERT_QUERY_CONTENT_PKCS7_UNSIGNED = 9 +CERT_QUERY_CONTENT_PKCS7_SIGNED_EMBED = 10 +CERT_QUERY_CONTENT_PKCS10 = 11 +CERT_QUERY_CONTENT_PFX = 12 +CERT_QUERY_CONTENT_CERT_PAIR = 13 +CERT_QUERY_CONTENT_FLAG_CERT = \ + ( 1 << CERT_QUERY_CONTENT_CERT) +CERT_QUERY_CONTENT_FLAG_CTL = \ + ( 1 << CERT_QUERY_CONTENT_CTL) +CERT_QUERY_CONTENT_FLAG_CRL = \ + ( 1 << CERT_QUERY_CONTENT_CRL) +CERT_QUERY_CONTENT_FLAG_SERIALIZED_STORE = \ + ( 1 << CERT_QUERY_CONTENT_SERIALIZED_STORE) +CERT_QUERY_CONTENT_FLAG_SERIALIZED_CERT = \ + ( 1 << CERT_QUERY_CONTENT_SERIALIZED_CERT) +CERT_QUERY_CONTENT_FLAG_SERIALIZED_CTL = \ + ( 1 << CERT_QUERY_CONTENT_SERIALIZED_CTL) +CERT_QUERY_CONTENT_FLAG_SERIALIZED_CRL = \ + ( 1 << CERT_QUERY_CONTENT_SERIALIZED_CRL) +CERT_QUERY_CONTENT_FLAG_PKCS7_SIGNED = \ + ( 1 << CERT_QUERY_CONTENT_PKCS7_SIGNED) +CERT_QUERY_CONTENT_FLAG_PKCS7_UNSIGNED = \ + ( 1 << CERT_QUERY_CONTENT_PKCS7_UNSIGNED) +CERT_QUERY_CONTENT_FLAG_PKCS7_SIGNED_EMBED = \ + ( 1 << CERT_QUERY_CONTENT_PKCS7_SIGNED_EMBED) +CERT_QUERY_CONTENT_FLAG_PKCS10 = \ + ( 1 << CERT_QUERY_CONTENT_PKCS10) +CERT_QUERY_CONTENT_FLAG_PFX = \ + ( 1 << CERT_QUERY_CONTENT_PFX) +CERT_QUERY_CONTENT_FLAG_CERT_PAIR = \ + ( 1 << CERT_QUERY_CONTENT_CERT_PAIR) +CERT_QUERY_CONTENT_FLAG_ALL = \ + CERT_QUERY_CONTENT_FLAG_CERT | \ + CERT_QUERY_CONTENT_FLAG_CTL | \ + CERT_QUERY_CONTENT_FLAG_CRL | \ + CERT_QUERY_CONTENT_FLAG_SERIALIZED_STORE | \ + CERT_QUERY_CONTENT_FLAG_SERIALIZED_CERT | \ + CERT_QUERY_CONTENT_FLAG_SERIALIZED_CTL | \ + CERT_QUERY_CONTENT_FLAG_SERIALIZED_CRL | \ + CERT_QUERY_CONTENT_FLAG_PKCS7_SIGNED | \ + CERT_QUERY_CONTENT_FLAG_PKCS7_UNSIGNED | \ + CERT_QUERY_CONTENT_FLAG_PKCS7_SIGNED_EMBED | \ + CERT_QUERY_CONTENT_FLAG_PKCS10 | \ + CERT_QUERY_CONTENT_FLAG_PFX | \ + CERT_QUERY_CONTENT_FLAG_CERT_PAIR +CERT_QUERY_FORMAT_BINARY = 1 +CERT_QUERY_FORMAT_BASE64_ENCODED = 2 +CERT_QUERY_FORMAT_ASN_ASCII_HEX_ENCODED = 3 +CERT_QUERY_FORMAT_FLAG_BINARY = \ + ( 1 << CERT_QUERY_FORMAT_BINARY) +CERT_QUERY_FORMAT_FLAG_BASE64_ENCODED = \ + ( 1 << CERT_QUERY_FORMAT_BASE64_ENCODED) +CERT_QUERY_FORMAT_FLAG_ASN_ASCII_HEX_ENCODED = \ + ( 1 << CERT_QUERY_FORMAT_ASN_ASCII_HEX_ENCODED) +CERT_QUERY_FORMAT_FLAG_ALL = \ + CERT_QUERY_FORMAT_FLAG_BINARY | \ + CERT_QUERY_FORMAT_FLAG_BASE64_ENCODED | \ + CERT_QUERY_FORMAT_FLAG_ASN_ASCII_HEX_ENCODED + +CREDENTIAL_OID_PASSWORD_CREDENTIALS_A = 1 +CREDENTIAL_OID_PASSWORD_CREDENTIALS_W = 2 +CREDENTIAL_OID_PASSWORD_CREDENTIALS = CREDENTIAL_OID_PASSWORD_CREDENTIALS_W + +SCHEME_OID_RETRIEVE_ENCODED_OBJECT_FUNC = "SchemeDllRetrieveEncodedObject" +SCHEME_OID_RETRIEVE_ENCODED_OBJECTW_FUNC = "SchemeDllRetrieveEncodedObjectW" +CONTEXT_OID_CREATE_OBJECT_CONTEXT_FUNC = "ContextDllCreateObjectContext" +CONTEXT_OID_CERTIFICATE = 1 +CONTEXT_OID_CRL = 2 +CONTEXT_OID_CTL = 3 +CONTEXT_OID_PKCS7 = 4 +CONTEXT_OID_CAPI2_ANY = 5 +CONTEXT_OID_OCSP_RESP = 6 + +CRYPT_RETRIEVE_MULTIPLE_OBJECTS = 0x00000001 +CRYPT_CACHE_ONLY_RETRIEVAL = 0x00000002 +CRYPT_WIRE_ONLY_RETRIEVAL = 0x00000004 +CRYPT_DONT_CACHE_RESULT = 0x00000008 +CRYPT_ASYNC_RETRIEVAL = 0x00000010 +CRYPT_STICKY_CACHE_RETRIEVAL = 0x00001000 +CRYPT_LDAP_SCOPE_BASE_ONLY_RETRIEVAL = 0x00002000 +CRYPT_OFFLINE_CHECK_RETRIEVAL = 0x00004000 +CRYPT_LDAP_INSERT_ENTRY_ATTRIBUTE = 0x00008000 +CRYPT_LDAP_SIGN_RETRIEVAL = 0x00010000 +CRYPT_NO_AUTH_RETRIEVAL = 0x00020000 +CRYPT_LDAP_AREC_EXCLUSIVE_RETRIEVAL = 0x00040000 +CRYPT_AIA_RETRIEVAL = 0x00080000 +CRYPT_VERIFY_CONTEXT_SIGNATURE = 0x00000020 +CRYPT_VERIFY_DATA_HASH = 0x00000040 +CRYPT_KEEP_TIME_VALID = 0x00000080 +CRYPT_DONT_VERIFY_SIGNATURE = 0x00000100 +CRYPT_DONT_CHECK_TIME_VALIDITY = 0x00000200 +CRYPT_CHECK_FRESHNESS_TIME_VALIDITY = 0x00000400 +CRYPT_ACCUMULATIVE_TIMEOUT = 0x00000800 +CRYPT_PARAM_ASYNC_RETRIEVAL_COMPLETION = 1 +CRYPT_PARAM_CANCEL_ASYNC_RETRIEVAL = 2 +CRYPT_GET_URL_FROM_PROPERTY = 0x00000001 +CRYPT_GET_URL_FROM_EXTENSION = 0x00000002 +CRYPT_GET_URL_FROM_UNAUTH_ATTRIBUTE = 0x00000004 +CRYPT_GET_URL_FROM_AUTH_ATTRIBUTE = 0x00000008 +URL_OID_GET_OBJECT_URL_FUNC = "UrlDllGetObjectUrl" +TIME_VALID_OID_GET_OBJECT_FUNC = "TimeValidDllGetObject" +TIME_VALID_OID_FLUSH_OBJECT_FUNC = "TimeValidDllFlushObject" + +TIME_VALID_OID_GET_CTL = 1 +TIME_VALID_OID_GET_CRL = 2 +TIME_VALID_OID_GET_CRL_FROM_CERT = 3 +TIME_VALID_OID_GET_FRESHEST_CRL_FROM_CERT = 4 +TIME_VALID_OID_GET_FRESHEST_CRL_FROM_CRL = 5 + +TIME_VALID_OID_FLUSH_CTL = 1 +TIME_VALID_OID_FLUSH_CRL = 2 +TIME_VALID_OID_FLUSH_CRL_FROM_CERT = 3 +TIME_VALID_OID_FLUSH_FRESHEST_CRL_FROM_CERT = 4 +TIME_VALID_OID_FLUSH_FRESHEST_CRL_FROM_CRL = 5 + +CRYPTPROTECT_PROMPT_ON_UNPROTECT = 0x1 +CRYPTPROTECT_PROMPT_ON_PROTECT = 0x2 +CRYPTPROTECT_PROMPT_RESERVED = 0x04 +CRYPTPROTECT_PROMPT_STRONG = 0x08 +CRYPTPROTECT_PROMPT_REQUIRE_STRONG = 0x10 +CRYPTPROTECT_UI_FORBIDDEN = 0x1 +CRYPTPROTECT_LOCAL_MACHINE = 0x4 +CRYPTPROTECT_CRED_SYNC = 0x8 +CRYPTPROTECT_AUDIT = 0x10 +CRYPTPROTECT_NO_RECOVERY = 0x20 +CRYPTPROTECT_VERIFY_PROTECTION = 0x40 +CRYPTPROTECT_CRED_REGENERATE = 0x80 +CRYPTPROTECT_FIRST_RESERVED_FLAGVAL = 0x0FFFFFFF +CRYPTPROTECT_LAST_RESERVED_FLAGVAL = (-1) +CRYPTPROTECTMEMORY_BLOCK_SIZE = 16 +CRYPTPROTECTMEMORY_SAME_PROCESS = 0x00 +CRYPTPROTECTMEMORY_CROSS_PROCESS = 0x01 +CRYPTPROTECTMEMORY_SAME_LOGON = 0x02 +CERT_CREATE_SELFSIGN_NO_SIGN = 1 +CERT_CREATE_SELFSIGN_NO_KEY_INFO = 2 +CRYPT_KEYID_MACHINE_FLAG = 0x00000020 +CRYPT_KEYID_ALLOC_FLAG = 0x00008000 +CRYPT_KEYID_DELETE_FLAG = 0x00000010 +CRYPT_KEYID_SET_NEW_FLAG = 0x00002000 +CERT_CHAIN_MAX_AIA_URL_COUNT_IN_CERT_DEFAULT = 5 +CERT_CHAIN_MAX_AIA_URL_RETRIEVAL_COUNT_PER_CHAIN_DEFAULT = 10 +CERT_CHAIN_MAX_AIA_URL_RETRIEVAL_BYTE_COUNT_DEFAULT = 100000 +CERT_CHAIN_MAX_AIA_URL_RETRIEVAL_CERT_COUNT_DEFAULT = 10 +CERT_CHAIN_CACHE_END_CERT = 0x00000001 +CERT_CHAIN_THREAD_STORE_SYNC = 0x00000002 +CERT_CHAIN_CACHE_ONLY_URL_RETRIEVAL = 0x00000004 +CERT_CHAIN_USE_LOCAL_MACHINE_STORE = 0x00000008 +CERT_CHAIN_ENABLE_CACHE_AUTO_UPDATE = 0x00000010 +CERT_CHAIN_ENABLE_SHARE_STORE = 0x00000020 +CERT_TRUST_NO_ERROR = 0x00000000 +CERT_TRUST_IS_NOT_TIME_VALID = 0x00000001 +CERT_TRUST_IS_NOT_TIME_NESTED = 0x00000002 +CERT_TRUST_IS_REVOKED = 0x00000004 +CERT_TRUST_IS_NOT_SIGNATURE_VALID = 0x00000008 +CERT_TRUST_IS_NOT_VALID_FOR_USAGE = 0x00000010 +CERT_TRUST_IS_UNTRUSTED_ROOT = 0x00000020 +CERT_TRUST_REVOCATION_STATUS_UNKNOWN = 0x00000040 +CERT_TRUST_IS_CYCLIC = 0x00000080 +CERT_TRUST_INVALID_EXTENSION = 0x00000100 +CERT_TRUST_INVALID_POLICY_CONSTRAINTS = 0x00000200 +CERT_TRUST_INVALID_BASIC_CONSTRAINTS = 0x00000400 +CERT_TRUST_INVALID_NAME_CONSTRAINTS = 0x00000800 +CERT_TRUST_HAS_NOT_SUPPORTED_NAME_CONSTRAINT = 0x00001000 +CERT_TRUST_HAS_NOT_DEFINED_NAME_CONSTRAINT = 0x00002000 +CERT_TRUST_HAS_NOT_PERMITTED_NAME_CONSTRAINT = 0x00004000 +CERT_TRUST_HAS_EXCLUDED_NAME_CONSTRAINT = 0x00008000 +CERT_TRUST_IS_OFFLINE_REVOCATION = 0x01000000 +CERT_TRUST_NO_ISSUANCE_CHAIN_POLICY = 0x02000000 +CERT_TRUST_IS_PARTIAL_CHAIN = 0x00010000 +CERT_TRUST_CTL_IS_NOT_TIME_VALID = 0x00020000 +CERT_TRUST_CTL_IS_NOT_SIGNATURE_VALID = 0x00040000 +CERT_TRUST_CTL_IS_NOT_VALID_FOR_USAGE = 0x00080000 +CERT_TRUST_HAS_EXACT_MATCH_ISSUER = 0x00000001 +CERT_TRUST_HAS_KEY_MATCH_ISSUER = 0x00000002 +CERT_TRUST_HAS_NAME_MATCH_ISSUER = 0x00000004 +CERT_TRUST_IS_SELF_SIGNED = 0x00000008 +CERT_TRUST_HAS_PREFERRED_ISSUER = 0x00000100 +CERT_TRUST_HAS_ISSUANCE_CHAIN_POLICY = 0x00000200 +CERT_TRUST_HAS_VALID_NAME_CONSTRAINTS = 0x00000400 +CERT_TRUST_IS_COMPLEX_CHAIN = 0x00010000 +USAGE_MATCH_TYPE_AND = 0x00000000 +USAGE_MATCH_TYPE_OR = 0x00000001 +CERT_CHAIN_REVOCATION_CHECK_END_CERT = 0x10000000 +CERT_CHAIN_REVOCATION_CHECK_CHAIN = 0x20000000 +CERT_CHAIN_REVOCATION_CHECK_CHAIN_EXCLUDE_ROOT = 0x40000000 +CERT_CHAIN_REVOCATION_CHECK_CACHE_ONLY = (-2147483648) +CERT_CHAIN_REVOCATION_ACCUMULATIVE_TIMEOUT = 0x08000000 +CERT_CHAIN_DISABLE_PASS1_QUALITY_FILTERING = 0x00000040 +CERT_CHAIN_RETURN_LOWER_QUALITY_CONTEXTS = 0x00000080 +CERT_CHAIN_DISABLE_AUTH_ROOT_AUTO_UPDATE = 0x00000100 +CERT_CHAIN_TIMESTAMP_TIME = 0x00000200 +REVOCATION_OID_CRL_REVOCATION = 1 +CERT_CHAIN_FIND_BY_ISSUER = 1 +CERT_CHAIN_FIND_BY_ISSUER_COMPARE_KEY_FLAG = 0x0001 +CERT_CHAIN_FIND_BY_ISSUER_COMPLEX_CHAIN_FLAG = 0x0002 +CERT_CHAIN_FIND_BY_ISSUER_CACHE_ONLY_URL_FLAG = 0x0004 +CERT_CHAIN_FIND_BY_ISSUER_LOCAL_MACHINE_FLAG = 0x0008 +CERT_CHAIN_FIND_BY_ISSUER_NO_KEY_FLAG = 0x4000 +CERT_CHAIN_FIND_BY_ISSUER_CACHE_ONLY_FLAG = 0x8000 +CERT_CHAIN_POLICY_IGNORE_NOT_TIME_VALID_FLAG = 0x00000001 +CERT_CHAIN_POLICY_IGNORE_CTL_NOT_TIME_VALID_FLAG = 0x00000002 +CERT_CHAIN_POLICY_IGNORE_NOT_TIME_NESTED_FLAG = 0x00000004 +CERT_CHAIN_POLICY_IGNORE_INVALID_BASIC_CONSTRAINTS_FLAG = 0x00000008 +CERT_CHAIN_POLICY_IGNORE_ALL_NOT_TIME_VALID_FLAGS = ( \ + CERT_CHAIN_POLICY_IGNORE_NOT_TIME_VALID_FLAG | \ + CERT_CHAIN_POLICY_IGNORE_CTL_NOT_TIME_VALID_FLAG | \ + CERT_CHAIN_POLICY_IGNORE_NOT_TIME_NESTED_FLAG \ + ) +CERT_CHAIN_POLICY_ALLOW_UNKNOWN_CA_FLAG = 0x00000010 +CERT_CHAIN_POLICY_IGNORE_WRONG_USAGE_FLAG = 0x00000020 +CERT_CHAIN_POLICY_IGNORE_INVALID_NAME_FLAG = 0x00000040 +CERT_CHAIN_POLICY_IGNORE_INVALID_POLICY_FLAG = 0x00000080 +CERT_CHAIN_POLICY_IGNORE_END_REV_UNKNOWN_FLAG = 0x00000100 +CERT_CHAIN_POLICY_IGNORE_CTL_SIGNER_REV_UNKNOWN_FLAG = 0x00000200 +CERT_CHAIN_POLICY_IGNORE_CA_REV_UNKNOWN_FLAG = 0x00000400 +CERT_CHAIN_POLICY_IGNORE_ROOT_REV_UNKNOWN_FLAG = 0x00000800 +CERT_CHAIN_POLICY_IGNORE_ALL_REV_UNKNOWN_FLAGS = ( \ + CERT_CHAIN_POLICY_IGNORE_END_REV_UNKNOWN_FLAG | \ + CERT_CHAIN_POLICY_IGNORE_CTL_SIGNER_REV_UNKNOWN_FLAG | \ + CERT_CHAIN_POLICY_IGNORE_CA_REV_UNKNOWN_FLAG | \ + CERT_CHAIN_POLICY_IGNORE_ROOT_REV_UNKNOWN_FLAG \ + ) +CERT_CHAIN_POLICY_ALLOW_TESTROOT_FLAG = 0x00008000 +CERT_CHAIN_POLICY_TRUST_TESTROOT_FLAG = 0x00004000 +CRYPT_OID_VERIFY_CERTIFICATE_CHAIN_POLICY_FUNC = \ + "CertDllVerifyCertificateChainPolicy" +AUTHTYPE_CLIENT = 1 +AUTHTYPE_SERVER = 2 +BASIC_CONSTRAINTS_CERT_CHAIN_POLICY_CA_FLAG = (-2147483648) +BASIC_CONSTRAINTS_CERT_CHAIN_POLICY_END_ENTITY_FLAG = 0x40000000 +MICROSOFT_ROOT_CERT_CHAIN_POLICY_ENABLE_TEST_ROOT_FLAG = 0x00010000 +CRYPT_STRING_BASE64HEADER = 0x00000000 +CRYPT_STRING_BASE64 = 0x00000001 +CRYPT_STRING_BINARY = 0x00000002 +CRYPT_STRING_BASE64REQUESTHEADER = 0x00000003 +CRYPT_STRING_HEX = 0x00000004 +CRYPT_STRING_HEXASCII = 0x00000005 +CRYPT_STRING_BASE64_ANY = 0x00000006 +CRYPT_STRING_ANY = 0x00000007 +CRYPT_STRING_HEX_ANY = 0x00000008 +CRYPT_STRING_BASE64X509CRLHEADER = 0x00000009 +CRYPT_STRING_HEXADDR = 0x0000000a +CRYPT_STRING_HEXASCIIADDR = 0x0000000b +CRYPT_STRING_NOCR = (-2147483648) +CRYPT_USER_KEYSET = 0x00001000 +PKCS12_IMPORT_RESERVED_MASK = (-65536) +REPORT_NO_PRIVATE_KEY = 0x0001 +REPORT_NOT_ABLE_TO_EXPORT_PRIVATE_KEY = 0x0002 +EXPORT_PRIVATE_KEYS = 0x0004 +PKCS12_EXPORT_RESERVED_MASK = (-65536) + +# Certificate store provider types used with CertOpenStore +CERT_STORE_PROV_MSG = 1 +CERT_STORE_PROV_MEMORY = 2 +CERT_STORE_PROV_FILE = 3 +CERT_STORE_PROV_REG = 4 +CERT_STORE_PROV_PKCS7 = 5 +CERT_STORE_PROV_SERIALIZED = 6 +CERT_STORE_PROV_FILENAME = 8 +CERT_STORE_PROV_SYSTEM = 10 +CERT_STORE_PROV_COLLECTION = 11 +CERT_STORE_PROV_SYSTEM_REGISTRY = 13 +CERT_STORE_PROV_PHYSICAL = 14 +CERT_STORE_PROV_SMART_CARD = 15 +CERT_STORE_PROV_LDAP = 16 + +URL_OID_CERTIFICATE_ISSUER = 1 +URL_OID_CERTIFICATE_CRL_DIST_POINT = 2 +URL_OID_CTL_ISSUER = 3 +URL_OID_CTL_NEXT_UPDATE = 4 +URL_OID_CRL_ISSUER = 5 +URL_OID_CERTIFICATE_FRESHEST_CRL = 6 +URL_OID_CRL_FRESHEST_CRL = 7 +URL_OID_CROSS_CERT_DIST_POINT = 8 +URL_OID_CERTIFICATE_OCSP = 9 +URL_OID_CERTIFICATE_OCSP_AND_CRL_DIST_POINT = 10 +URL_OID_CERTIFICATE_CRL_DIST_POINT_AND_OCSP = 11 +URL_OID_CROSS_CERT_SUBJECT_INFO_ACCESS = 12 +URL_OID_CERTIFICATE_ONLY_OCSP = 13 diff --git a/venv/Lib/site-packages/win32/lib/win32evtlogutil.py b/venv/Lib/site-packages/win32/lib/win32evtlogutil.py new file mode 100644 index 00000000..5f9cb551 --- /dev/null +++ b/venv/Lib/site-packages/win32/lib/win32evtlogutil.py @@ -0,0 +1,152 @@ +"""Event Log Utilities - helper for win32evtlog.pyd +""" + +import win32api, win32con, winerror, win32evtlog + +error = win32api.error # The error the evtlog module raises. + +langid = win32api.MAKELANGID(win32con.LANG_NEUTRAL, win32con.SUBLANG_NEUTRAL) + +def AddSourceToRegistry(appName, msgDLL = None, eventLogType = "Application", eventLogFlags = None): + """Add a source of messages to the event log. + + Allows Python program to register a custom source of messages in the + registry. You must also provide the DLL name that has the message table, so the + full message text appears in the event log. + + Note that the win32evtlog.pyd file has a number of string entries with just "%1" + built in, so many Python programs can simply use this DLL. Disadvantages are that + you do not get language translation, and the full text is stored in the event log, + blowing the size of the log up. + """ + + # When an application uses the RegisterEventSource or OpenEventLog + # function to get a handle of an event log, the event loggging service + # searches for the specified source name in the registry. You can add a + # new source name to the registry by opening a new registry subkey + # under the Application key and adding registry values to the new + # subkey. + + if msgDLL is None: + msgDLL = win32evtlog.__file__ + + # Create a new key for our application + hkey = win32api.RegCreateKey(win32con.HKEY_LOCAL_MACHINE, \ + "SYSTEM\\CurrentControlSet\\Services\\EventLog\\%s\\%s" % (eventLogType, appName)) + + # Add the Event-ID message-file name to the subkey. + win32api.RegSetValueEx(hkey, + "EventMessageFile", # value name \ + 0, # reserved \ + win32con.REG_EXPAND_SZ,# value type \ + msgDLL) + + # Set the supported types flags and add it to the subkey. + if eventLogFlags is None: + eventLogFlags = win32evtlog.EVENTLOG_ERROR_TYPE | win32evtlog.EVENTLOG_WARNING_TYPE | win32evtlog.EVENTLOG_INFORMATION_TYPE + win32api.RegSetValueEx(hkey, # subkey handle \ + "TypesSupported", # value name \ + 0, # reserved \ + win32con.REG_DWORD, # value type \ + eventLogFlags) + win32api.RegCloseKey(hkey) + +def RemoveSourceFromRegistry(appName, eventLogType = "Application"): + """Removes a source of messages from the event log. + """ + + # Delete our key + try: + win32api.RegDeleteKey(win32con.HKEY_LOCAL_MACHINE, \ + "SYSTEM\\CurrentControlSet\\Services\\EventLog\\%s\\%s" % (eventLogType, appName)) + except win32api.error as exc: + if exc.winerror != winerror.ERROR_FILE_NOT_FOUND: + raise + + +def ReportEvent(appName, eventID, eventCategory = 0, eventType=win32evtlog.EVENTLOG_ERROR_TYPE, strings = None, data = None, sid=None): + """Report an event for a previously added event source. + """ + # Get a handle to the Application event log + hAppLog = win32evtlog.RegisterEventSource(None, appName) + + # Now report the event, which will add this event to the event log */ + win32evtlog.ReportEvent(hAppLog, # event-log handle \ + eventType, + eventCategory, + eventID, + sid, + strings, + data) + + win32evtlog.DeregisterEventSource(hAppLog); + +def FormatMessage( eventLogRecord, logType="Application" ): + """Given a tuple from ReadEventLog, and optionally where the event + record came from, load the message, and process message inserts. + + Note that this function may raise win32api.error. See also the + function SafeFormatMessage which will return None if the message can + not be processed. + """ + + # From the event log source name, we know the name of the registry + # key to look under for the name of the message DLL that contains + # the messages we need to extract with FormatMessage. So first get + # the event log source name... + keyName = "SYSTEM\\CurrentControlSet\\Services\\EventLog\\%s\\%s" % (logType, eventLogRecord.SourceName) + + # Now open this key and get the EventMessageFile value, which is + # the name of the message DLL. + handle = win32api.RegOpenKey(win32con.HKEY_LOCAL_MACHINE, keyName) + try: + dllNames = win32api.RegQueryValueEx(handle, "EventMessageFile")[0].split(";") + # Win2k etc appear to allow multiple DLL names + data = None + for dllName in dllNames: + try: + # Expand environment variable strings in the message DLL path name, + # in case any are there. + dllName = win32api.ExpandEnvironmentStrings(dllName) + + dllHandle = win32api.LoadLibraryEx(dllName, 0, win32con.LOAD_LIBRARY_AS_DATAFILE) + try: + data = win32api.FormatMessageW(win32con.FORMAT_MESSAGE_FROM_HMODULE, + dllHandle, eventLogRecord.EventID, langid, eventLogRecord.StringInserts) + finally: + win32api.FreeLibrary(dllHandle) + except win32api.error: + pass # Not in this DLL - try the next + if data is not None: + break + finally: + win32api.RegCloseKey(handle) + return data or '' # Don't want "None" ever being returned. + +def SafeFormatMessage( eventLogRecord, logType=None ): + """As for FormatMessage, except returns an error message if + the message can not be processed. + """ + if logType is None: logType = "Application" + try: + return FormatMessage(eventLogRecord, logType) + except win32api.error: + if eventLogRecord.StringInserts is None: + desc = "" + else: + desc = ", ".join(eventLogRecord.StringInserts) + return "" % (winerror.HRESULT_CODE(eventLogRecord.EventID), eventLogRecord.SourceName, desc) + +def FeedEventLogRecords(feeder, machineName = None, logName = "Application", readFlags = None): + if readFlags is None: + readFlags = win32evtlog.EVENTLOG_BACKWARDS_READ|win32evtlog.EVENTLOG_SEQUENTIAL_READ + + h=win32evtlog.OpenEventLog(machineName, logName) + try: + while 1: + objects = win32evtlog.ReadEventLog(h, readFlags, 0) + if not objects: + break + map(lambda item, feeder = feeder: feeder(*(item,)), objects) + finally: + win32evtlog.CloseEventLog(h) diff --git a/venv/Lib/site-packages/win32/lib/win32gui_struct.py b/venv/Lib/site-packages/win32/lib/win32gui_struct.py new file mode 100644 index 00000000..a4963112 --- /dev/null +++ b/venv/Lib/site-packages/win32/lib/win32gui_struct.py @@ -0,0 +1,729 @@ +# This is a work in progress - see Demos/win32gui_menu.py + +# win32gui_struct.py - helpers for working with various win32gui structures. +# As win32gui is "light-weight", it does not define objects for all possible +# win32 structures - in general, "buffer" objects are passed around - it is +# the callers responsibility to pack the buffer in the correct format. +# +# This module defines some helpers for the commonly used structures. +# +# In general, each structure has 3 functions: +# +# buffer, extras = PackSTRUCTURE(items, ...) +# item, ... = UnpackSTRUCTURE(buffer) +# buffer, extras = EmtpySTRUCTURE(...) +# +# 'extras' is always items that must be held along with the buffer, as the +# buffer refers to these object's memory. +# For structures that support a 'mask', this mask is hidden from the user - if +# 'None' is passed, the mask flag will not be set, or on return, None will +# be returned for the value if the mask is not set. +# +# NOTE: I considered making these structures look like real classes, and +# support 'attributes' etc - however, ctypes already has a good structure +# mechanism - I think it makes more sense to support ctype structures +# at the win32gui level, then there will be no need for this module at all. +# XXX - the above makes sense in terms of what is built and passed to +# win32gui (ie, the Pack* functions) - but doesn't make as much sense for +# the Unpack* functions, where the aim is user convenience. + +import sys +import win32gui +import win32con +import struct +import array +import commctrl +import pywintypes + +is64bit = "64 bit" in sys.version + +try: + from collections import namedtuple + def _MakeResult(names_str, values): + names = names_str.split() + nt = namedtuple(names[0], names[1:]) + return nt(*values) +except ImportError: + # no namedtuple support - just return the values as a normal tuple. + def _MakeResult(names_str, values): + return values + +_nmhdr_fmt = "PPi" +if is64bit: + # When the item past the NMHDR gets aligned (eg, when it is a struct) + # we need this many bytes padding. + _nmhdr_align_padding = "xxxx" +else: + _nmhdr_align_padding = "" + +# Encode a string suitable for passing in a win32gui related structure +# If win32gui is built with UNICODE defined (ie, py3k), then functions +# like InsertMenuItem are actually calling InsertMenuItemW etc, so all +# strings will need to be unicode. +if win32gui.UNICODE: + def _make_text_buffer(text): + # XXX - at this stage win32gui.UNICODE is only True in py3k, + # and in py3k is makes sense to reject bytes. + if not isinstance(text, str): + raise TypeError('MENUITEMINFO text must be unicode') + data = (text+'\0').encode("utf-16le") + return array.array("b", data) + +else: + def _make_text_buffer(text): + if isinstance(text, str): + text = text.encode("mbcs") + return array.array("b", text+'\0') + +# make an 'empty' buffer, ready for filling with cch characters. +def _make_empty_text_buffer(cch): + return _make_text_buffer("\0" * cch) + +if sys.version_info < (3,0): + def _make_memory(ob): + return str(buffer(ob)) + + def _make_bytes(sval): + return sval +else: + def _make_memory(ob): + return bytes(memoryview(ob)) + + def _make_bytes(sval): + return sval.encode('ascii') + +# Generic WM_NOTIFY unpacking +def UnpackWMNOTIFY(lparam): + format = "PPi" + buf = win32gui.PyGetMemory(lparam, struct.calcsize(format)) + return _MakeResult("WMNOTIFY hwndFrom idFrom code", struct.unpack(format, buf)) + +def UnpackNMITEMACTIVATE(lparam): + format = _nmhdr_fmt + _nmhdr_align_padding + if is64bit: + # the struct module doesn't handle this correctly as some of the items + # are actually structs in structs, which get individually aligned. + format = format + "iiiiiiixxxxP" + else: + format = format + "iiiiiiiP" + buf = win32gui.PyMakeBuffer(struct.calcsize(format), lparam) + return _MakeResult("NMITEMACTIVATE hwndFrom idFrom code iItem iSubItem uNewState uOldState uChanged actionx actiony lParam", + struct.unpack(format, buf)) + +# MENUITEMINFO struct +# http://msdn.microsoft.com/library/default.asp?url=/library/en-us/winui/WinUI/WindowsUserInterface/Resources/Menus/MenuReference/MenuStructures/MENUITEMINFO.asp +# We use the struct module to pack and unpack strings as MENUITEMINFO +# structures. We also have special handling for the 'fMask' item in that +# structure to avoid the caller needing to explicitly check validity +# (None is used if the mask excludes/should exclude the value) +_menuiteminfo_fmt = '5i5PiP' + +def PackMENUITEMINFO(fType=None, fState=None, wID=None, hSubMenu=None, + hbmpChecked=None, hbmpUnchecked=None, dwItemData=None, + text=None, hbmpItem=None, dwTypeData=None): + # 'extras' are objects the caller must keep a reference to (as their + # memory is used) for the lifetime of the INFO item. + extras = [] + # ack - dwItemData and dwTypeData were confused for a while... + assert dwItemData is None or dwTypeData is None, \ + "sorry - these were confused - you probably want dwItemData" + # if we are a long way past 209, then we can nuke the above... + if dwTypeData is not None: + import warnings + warnings.warn("PackMENUITEMINFO: please use dwItemData instead of dwTypeData") + if dwItemData is None: + dwItemData = dwTypeData or 0 + + fMask = 0 + if fType is None: fType = 0 + else: fMask |= win32con.MIIM_FTYPE + if fState is None: fState = 0 + else: fMask |= win32con.MIIM_STATE + if wID is None: wID = 0 + else: fMask |= win32con.MIIM_ID + if hSubMenu is None: hSubMenu = 0 + else: fMask |= win32con.MIIM_SUBMENU + if hbmpChecked is None: + assert hbmpUnchecked is None, \ + "neither or both checkmark bmps must be given" + hbmpChecked = hbmpUnchecked = 0 + else: + assert hbmpUnchecked is not None, \ + "neither or both checkmark bmps must be given" + fMask |= win32con.MIIM_CHECKMARKS + if dwItemData is None: dwItemData = 0 + else: fMask |= win32con.MIIM_DATA + if hbmpItem is None: hbmpItem = 0 + else: fMask |= win32con.MIIM_BITMAP + if text is not None: + fMask |= win32con.MIIM_STRING + str_buf = _make_text_buffer(text) + cch = len(text) + # We are taking address of strbuf - it must not die until windows + # has finished with our structure. + lptext = str_buf.buffer_info()[0] + extras.append(str_buf) + else: + lptext = 0 + cch = 0 + # Create the struct. + # 'P' format does not accept PyHANDLE's ! + item = struct.pack( + _menuiteminfo_fmt, + struct.calcsize(_menuiteminfo_fmt), # cbSize + fMask, + fType, + fState, + wID, + int(hSubMenu), + int(hbmpChecked), + int(hbmpUnchecked), + dwItemData, + lptext, + cch, + int(hbmpItem) + ) + # Now copy the string to a writable buffer, so that the result + # could be passed to a 'Get' function + return array.array("b", item), extras + +def UnpackMENUITEMINFO(s): + (cb, + fMask, + fType, + fState, + wID, + hSubMenu, + hbmpChecked, + hbmpUnchecked, + dwItemData, + lptext, + cch, + hbmpItem) = struct.unpack(_menuiteminfo_fmt, s) + assert cb==len(s) + if fMask & win32con.MIIM_FTYPE==0: fType = None + if fMask & win32con.MIIM_STATE==0: fState = None + if fMask & win32con.MIIM_ID==0: wID = None + if fMask & win32con.MIIM_SUBMENU==0: hSubMenu = None + if fMask & win32con.MIIM_CHECKMARKS==0: hbmpChecked = hbmpUnchecked = None + if fMask & win32con.MIIM_DATA==0: dwItemData = None + if fMask & win32con.MIIM_BITMAP==0: hbmpItem = None + if fMask & win32con.MIIM_STRING: + text = win32gui.PyGetString(lptext, cch) + else: + text = None + return _MakeResult("MENUITEMINFO fType fState wID hSubMenu hbmpChecked " + "hbmpUnchecked dwItemData text hbmpItem", + (fType, fState, wID, hSubMenu, hbmpChecked, hbmpUnchecked, \ + dwItemData, text, hbmpItem)) + +def EmptyMENUITEMINFO(mask = None, text_buf_size=512): + # text_buf_size is number of *characters* - not necessarily no of bytes. + extra = [] + if mask is None: + mask = win32con.MIIM_BITMAP | win32con.MIIM_CHECKMARKS | \ + win32con.MIIM_DATA | win32con.MIIM_FTYPE | \ + win32con.MIIM_ID | win32con.MIIM_STATE | \ + win32con.MIIM_STRING | win32con.MIIM_SUBMENU + # Note: No MIIM_TYPE - this screws win2k/98. + + if mask & win32con.MIIM_STRING: + text_buffer = _make_empty_text_buffer(text_buf_size) + extra.append(text_buffer) + text_addr, _ = text_buffer.buffer_info() + else: + text_addr = text_buf_size = 0 + + # Now copy the string to a writable buffer, so that the result + # could be passed to a 'Get' function + buf = struct.pack( + _menuiteminfo_fmt, + struct.calcsize(_menuiteminfo_fmt), # cbSize + mask, + 0, #fType, + 0, #fState, + 0, #wID, + 0, #hSubMenu, + 0, #hbmpChecked, + 0, #hbmpUnchecked, + 0, #dwItemData, + text_addr, + text_buf_size, + 0, #hbmpItem + ) + return array.array("b", buf), extra + +# MENUINFO struct +_menuinfo_fmt = 'iiiiPiP' + +def PackMENUINFO(dwStyle = None, cyMax = None, + hbrBack = None, dwContextHelpID = None, dwMenuData = None, + fMask = 0): + if dwStyle is None: dwStyle = 0 + else: fMask |= win32con.MIM_STYLE + if cyMax is None: cyMax = 0 + else: fMask |= win32con.MIM_MAXHEIGHT + if hbrBack is None: hbrBack = 0 + else: fMask |= win32con.MIM_BACKGROUND + if dwContextHelpID is None: dwContextHelpID = 0 + else: fMask |= win32con.MIM_HELPID + if dwMenuData is None: dwMenuData = 0 + else: fMask |= win32con.MIM_MENUDATA + # Create the struct. + item = struct.pack( + _menuinfo_fmt, + struct.calcsize(_menuinfo_fmt), # cbSize + fMask, + dwStyle, + cyMax, + hbrBack, + dwContextHelpID, + dwMenuData) + return array.array("b", item) + +def UnpackMENUINFO(s): + (cb, + fMask, + dwStyle, + cyMax, + hbrBack, + dwContextHelpID, + dwMenuData) = struct.unpack(_menuinfo_fmt, s) + assert cb==len(s) + if fMask & win32con.MIM_STYLE==0: dwStyle = None + if fMask & win32con.MIM_MAXHEIGHT==0: cyMax = None + if fMask & win32con.MIM_BACKGROUND==0: hbrBack = None + if fMask & win32con.MIM_HELPID==0: dwContextHelpID = None + if fMask & win32con.MIM_MENUDATA==0: dwMenuData = None + return _MakeResult("MENUINFO dwStyle cyMax hbrBack dwContextHelpID dwMenuData", + (dwStyle, cyMax, hbrBack, dwContextHelpID, dwMenuData)) + +def EmptyMENUINFO(mask = None): + if mask is None: + mask = win32con.MIM_STYLE | win32con.MIM_MAXHEIGHT| \ + win32con.MIM_BACKGROUND | win32con.MIM_HELPID | \ + win32con.MIM_MENUDATA + + buf = struct.pack( + _menuinfo_fmt, + struct.calcsize(_menuinfo_fmt), # cbSize + mask, + 0, #dwStyle + 0, #cyMax + 0, #hbrBack, + 0, #dwContextHelpID, + 0, #dwMenuData, + ) + return array.array("b", buf) + +########################################################################## +# +# Tree View structure support - TVITEM, TVINSERTSTRUCT and TVDISPINFO +# +########################################################################## + +# XXX - Note that the following implementation of TreeView structures is ripped +# XXX - from the SpamBayes project. It may not quite work correctly yet - I +# XXX - intend checking them later - but having them is better than not at all! + +_tvitem_fmt = "iPiiPiiiiP" +# Helpers for the ugly win32 structure packing/unpacking +# XXX - Note that functions using _GetMaskAndVal run 3x faster if they are +# 'inlined' into the function - see PackLVITEM. If the profiler points at +# _GetMaskAndVal(), you should nuke it (patches welcome once they have been +# tested) +def _GetMaskAndVal(val, default, mask, flag): + if val is None: + return mask, default + else: + if flag is not None: + mask |= flag + return mask, val + +def PackTVINSERTSTRUCT(parent, insertAfter, tvitem): + tvitem_buf, extra = PackTVITEM(*tvitem) + tvitem_buf = tvitem_buf.tobytes() + format = "PP%ds" % len(tvitem_buf) + return struct.pack(format, parent, insertAfter, tvitem_buf), extra + +def PackTVITEM(hitem, state, stateMask, text, image, selimage, citems, param): + extra = [] # objects we must keep references to + mask = 0 + mask, hitem = _GetMaskAndVal(hitem, 0, mask, commctrl.TVIF_HANDLE) + mask, state = _GetMaskAndVal(state, 0, mask, commctrl.TVIF_STATE) + if not mask & commctrl.TVIF_STATE: + stateMask = 0 + mask, text = _GetMaskAndVal(text, None, mask, commctrl.TVIF_TEXT) + mask, image = _GetMaskAndVal(image, 0, mask, commctrl.TVIF_IMAGE) + mask, selimage = _GetMaskAndVal(selimage, 0, mask, commctrl.TVIF_SELECTEDIMAGE) + mask, citems = _GetMaskAndVal(citems, 0, mask, commctrl.TVIF_CHILDREN) + mask, param = _GetMaskAndVal(param, 0, mask, commctrl.TVIF_PARAM) + if text is None: + text_addr = text_len = 0 + else: + text_buffer = _make_text_buffer(text) + text_len = len(text) + extra.append(text_buffer) + text_addr, _ = text_buffer.buffer_info() + buf = struct.pack(_tvitem_fmt, + mask, hitem, + state, stateMask, + text_addr, text_len, # text + image, selimage, + citems, param) + return array.array("b", buf), extra + +# Make a new buffer suitable for querying hitem's attributes. +def EmptyTVITEM(hitem, mask = None, text_buf_size=512): + extra = [] # objects we must keep references to + if mask is None: + mask = commctrl.TVIF_HANDLE | commctrl.TVIF_STATE | commctrl.TVIF_TEXT | \ + commctrl.TVIF_IMAGE | commctrl.TVIF_SELECTEDIMAGE | \ + commctrl.TVIF_CHILDREN | commctrl.TVIF_PARAM + if mask & commctrl.TVIF_TEXT: + text_buffer = _make_empty_text_buffer(text_buf_size) + extra.append(text_buffer) + text_addr, _ = text_buffer.buffer_info() + else: + text_addr = text_buf_size = 0 + buf = struct.pack(_tvitem_fmt, + mask, hitem, + 0, 0, + text_addr, text_buf_size, # text + 0, 0, + 0, 0) + return array.array("b", buf), extra + +def UnpackTVITEM(buffer): + item_mask, item_hItem, item_state, item_stateMask, \ + item_textptr, item_cchText, item_image, item_selimage, \ + item_cChildren, item_param = struct.unpack(_tvitem_fmt, buffer) + # ensure only items listed by the mask are valid (except we assume the + # handle is always valid - some notifications (eg, TVN_ENDLABELEDIT) set a + # mask that doesn't include the handle, but the docs explicity say it is.) + if not (item_mask & commctrl.TVIF_TEXT): item_textptr = item_cchText = None + if not (item_mask & commctrl.TVIF_CHILDREN): item_cChildren = None + if not (item_mask & commctrl.TVIF_IMAGE): item_image = None + if not (item_mask & commctrl.TVIF_PARAM): item_param = None + if not (item_mask & commctrl.TVIF_SELECTEDIMAGE): item_selimage = None + if not (item_mask & commctrl.TVIF_STATE): item_state = item_stateMask = None + + if item_textptr: + text = win32gui.PyGetString(item_textptr) + else: + text = None + return _MakeResult("TVITEM item_hItem item_state item_stateMask " + "text item_image item_selimage item_cChildren item_param", + (item_hItem, item_state, item_stateMask, text, + item_image, item_selimage, item_cChildren, item_param)) + +# Unpack the lparm from a "TVNOTIFY" message +def UnpackTVNOTIFY(lparam): + item_size = struct.calcsize(_tvitem_fmt) + format = _nmhdr_fmt + _nmhdr_align_padding + if is64bit: + format = format + "ixxxx" + else: + format = format + "i" + format = format + "%ds%ds" % (item_size, item_size) + buf = win32gui.PyGetMemory(lparam, struct.calcsize(format)) + hwndFrom, id, code, action, buf_old, buf_new \ + = struct.unpack(format, buf) + item_old = UnpackTVITEM(buf_old) + item_new = UnpackTVITEM(buf_new) + return _MakeResult("TVNOTIFY hwndFrom id code action item_old item_new", + (hwndFrom, id, code, action, item_old, item_new)) + +def UnpackTVDISPINFO(lparam): + item_size = struct.calcsize(_tvitem_fmt) + format = "PPi%ds" % (item_size,) + buf = win32gui.PyGetMemory(lparam, struct.calcsize(format)) + hwndFrom, id, code, buf_item = struct.unpack(format, buf) + item = UnpackTVITEM(buf_item) + return _MakeResult("TVDISPINFO hwndFrom id code item", + (hwndFrom, id, code, item)) + +# +# List view items +_lvitem_fmt = "iiiiiPiiPi" + +def PackLVITEM(item=None, subItem=None, state=None, stateMask=None, text=None, image=None, param=None, indent=None): + extra = [] # objects we must keep references to + mask = 0 + # _GetMaskAndVal adds quite a bit of overhead to this function. + if item is None: item = 0 # No mask for item + if subItem is None: subItem = 0 # No mask for sibItem + if state is None: + state = 0 + stateMask = 0 + else: + mask |= commctrl.LVIF_STATE + if stateMask is None: stateMask = state + + if image is None: image = 0 + else: mask |= commctrl.LVIF_IMAGE + if param is None: param = 0 + else: mask |= commctrl.LVIF_PARAM + if indent is None: indent = 0 + else: mask |= commctrl.LVIF_INDENT + + if text is None: + text_addr = text_len = 0 + else: + mask |= commctrl.LVIF_TEXT + text_buffer = _make_text_buffer(text) + text_len = len(text) + extra.append(text_buffer) + text_addr, _ = text_buffer.buffer_info() + buf = struct.pack(_lvitem_fmt, + mask, item, subItem, + state, stateMask, + text_addr, text_len, # text + image, param, indent) + return array.array("b", buf), extra + +def UnpackLVITEM(buffer): + item_mask, item_item, item_subItem, \ + item_state, item_stateMask, \ + item_textptr, item_cchText, item_image, \ + item_param, item_indent = struct.unpack(_lvitem_fmt, buffer) + # ensure only items listed by the mask are valid + if not (item_mask & commctrl.LVIF_TEXT): item_textptr = item_cchText = None + if not (item_mask & commctrl.LVIF_IMAGE): item_image = None + if not (item_mask & commctrl.LVIF_PARAM): item_param = None + if not (item_mask & commctrl.LVIF_INDENT): item_indent = None + if not (item_mask & commctrl.LVIF_STATE): item_state = item_stateMask = None + + if item_textptr: + text = win32gui.PyGetString(item_textptr) + else: + text = None + return _MakeResult("LVITEM item_item item_subItem item_state " + "item_stateMask text item_image item_param item_indent", + (item_item, item_subItem, item_state, item_stateMask, + text, item_image, item_param, item_indent)) + +# Unpack an "LVNOTIFY" message +def UnpackLVDISPINFO(lparam): + item_size = struct.calcsize(_lvitem_fmt) + format = _nmhdr_fmt + _nmhdr_align_padding + ("%ds" % (item_size,)) + buf = win32gui.PyGetMemory(lparam, struct.calcsize(format)) + hwndFrom, id, code, buf_item = struct.unpack(format, buf) + item = UnpackLVITEM(buf_item) + return _MakeResult("LVDISPINFO hwndFrom id code item", + (hwndFrom, id, code, item)) + +def UnpackLVNOTIFY(lparam): + format = _nmhdr_fmt + _nmhdr_align_padding + "7i" + if is64bit: + format = format + "xxxx" # point needs padding. + format = format + "P" + buf = win32gui.PyGetMemory(lparam, struct.calcsize(format)) + hwndFrom, id, code, item, subitem, newstate, oldstate, \ + changed, pt_x, pt_y, lparam = struct.unpack(format, buf) + return _MakeResult("UnpackLVNOTIFY hwndFrom id code item subitem " + "newstate oldstate changed pt lparam", + (hwndFrom, id, code, item, subitem, newstate, oldstate, + changed, (pt_x, pt_y), lparam)) + + +# Make a new buffer suitable for querying an items attributes. +def EmptyLVITEM(item, subitem, mask = None, text_buf_size=512): + extra = [] # objects we must keep references to + if mask is None: + mask = commctrl.LVIF_IMAGE | commctrl.LVIF_INDENT | commctrl.LVIF_TEXT | \ + commctrl.LVIF_PARAM | commctrl.LVIF_STATE + if mask & commctrl.LVIF_TEXT: + text_buffer = _make_empty_text_buffer(text_buf_size) + extra.append(text_buffer) + text_addr, _ = text_buffer.buffer_info() + else: + text_addr = text_buf_size = 0 + buf = struct.pack(_lvitem_fmt, + mask, item, subitem, + 0, 0, + text_addr, text_buf_size, # text + 0, 0, 0) + return array.array("b", buf), extra + + +# List view column structure +_lvcolumn_fmt = "iiiPiiii" +def PackLVCOLUMN(fmt=None, cx=None, text=None, subItem=None, image=None, order=None): + extra = [] # objects we must keep references to + mask = 0 + mask, fmt = _GetMaskAndVal(fmt, 0, mask, commctrl.LVCF_FMT) + mask, cx = _GetMaskAndVal(cx, 0, mask, commctrl.LVCF_WIDTH) + mask, text = _GetMaskAndVal(text, None, mask, commctrl.LVCF_TEXT) + mask, subItem = _GetMaskAndVal(subItem, 0, mask, commctrl.LVCF_SUBITEM) + mask, image = _GetMaskAndVal(image, 0, mask, commctrl.LVCF_IMAGE) + mask, order= _GetMaskAndVal(order, 0, mask, commctrl.LVCF_ORDER) + if text is None: + text_addr = text_len = 0 + else: + text_buffer = _make_text_buffer(text) + extra.append(text_buffer) + text_addr, _ = text_buffer.buffer_info() + text_len = len(text) + buf = struct.pack(_lvcolumn_fmt, + mask, fmt, cx, + text_addr, text_len, # text + subItem, image, order) + return array.array("b", buf), extra + +def UnpackLVCOLUMN(lparam): + mask, fmt, cx, text_addr, text_size, subItem, image, order = \ + struct.unpack(_lvcolumn_fmt, lparam) + # ensure only items listed by the mask are valid + if not (mask & commctrl.LVCF_FMT): fmt = None + if not (mask & commctrl.LVCF_WIDTH): cx = None + if not (mask & commctrl.LVCF_TEXT): text_addr = text_size = None + if not (mask & commctrl.LVCF_SUBITEM): subItem = None + if not (mask & commctrl.LVCF_IMAGE): image = None + if not (mask & commctrl.LVCF_ORDER): order = None + if text_addr: + text = win32gui.PyGetString(text_addr) + else: + text = None + return _MakeResult("LVCOLUMN fmt cx text subItem image order", + (fmt, cx, text, subItem, image, order)) + + +# Make a new buffer suitable for querying an items attributes. +def EmptyLVCOLUMN(mask = None, text_buf_size=512): + extra = [] # objects we must keep references to + if mask is None: + mask = commctrl.LVCF_FMT | commctrl.LVCF_WIDTH | commctrl.LVCF_TEXT | \ + commctrl.LVCF_SUBITEM | commctrl.LVCF_IMAGE | commctrl.LVCF_ORDER + if mask & commctrl.LVCF_TEXT: + text_buffer = _make_empty_text_buffer(text_buf_size) + extra.append(text_buffer) + text_addr, _ = text_buffer.buffer_info() + else: + text_addr = text_buf_size = 0 + buf = struct.pack(_lvcolumn_fmt, + mask, 0, 0, + text_addr, text_buf_size, # text + 0, 0, 0) + return array.array("b", buf), extra + +# List view hit-test. +def PackLVHITTEST(pt): + format = "iiiii" + buf = struct.pack(format, + pt[0], pt[1], + 0, 0, 0) + return array.array("b", buf), None + +def UnpackLVHITTEST(buf): + format = "iiiii" + x, y, flags, item, subitem = struct.unpack(format, buf) + return _MakeResult("LVHITTEST pt flags item subitem", + ((x,y), flags, item, subitem)) + +def PackHDITEM(cxy = None, text = None, hbm = None, fmt = None, + param = None, image = None, order = None): + extra = [] # objects we must keep references to + mask = 0 + mask, cxy = _GetMaskAndVal(cxy, 0, mask, commctrl.HDI_HEIGHT) + mask, text = _GetMaskAndVal(text, None, mask, commctrl.LVCF_TEXT) + mask, hbm = _GetMaskAndVal(hbm, 0, mask, commctrl.HDI_BITMAP) + mask, fmt = _GetMaskAndVal(fmt, 0, mask, commctrl.HDI_FORMAT) + mask, param = _GetMaskAndVal(param, 0, mask, commctrl.HDI_LPARAM) + mask, image = _GetMaskAndVal(image, 0, mask, commctrl.HDI_IMAGE) + mask, order = _GetMaskAndVal(order, 0, mask, commctrl.HDI_ORDER) + + if text is None: + text_addr = text_len = 0 + else: + text_buffer = _make_text_buffer(text) + extra.append(text_buffer) + text_addr, _ = text_buffer.buffer_info() + text_len = len(text) + + format = "iiPPiiPiiii" + buf = struct.pack(format, + mask, cxy, text_addr, hbm, text_len, + fmt, param, image, order, 0, 0) + return array.array("b", buf), extra + +# Device notification stuff + +# Generic function for packing a DEV_BROADCAST_* structure - generally used +# by the other PackDEV_BROADCAST_* functions in this module. +def PackDEV_BROADCAST(devicetype, rest_fmt, rest_data, extra_data=_make_bytes('')): + # It seems a requirement is 4 byte alignment, even for the 'BYTE data[1]' + # field (eg, that would make DEV_BROADCAST_HANDLE 41 bytes, but we must + # be 44. + extra_data += _make_bytes('\0' * (4-len(extra_data)%4)) + format = "iii" + rest_fmt + full_size = struct.calcsize(format) + len(extra_data) + data = (full_size, devicetype, 0) + rest_data + return struct.pack(format, *data) + extra_data + +def PackDEV_BROADCAST_HANDLE(handle, hdevnotify=0, guid=_make_bytes("\0"*16), name_offset=0, data=_make_bytes("\0")): + return PackDEV_BROADCAST(win32con.DBT_DEVTYP_HANDLE, "PP16sl", + (int(handle), int(hdevnotify), _make_memory(guid), name_offset), + data) + +def PackDEV_BROADCAST_VOLUME(unitmask, flags): + return PackDEV_BROADCAST(win32con.DBT_DEVTYP_VOLUME, "II", + (unitmask, flags)) + +def PackDEV_BROADCAST_DEVICEINTERFACE(classguid, name=""): + if win32gui.UNICODE: + # This really means "is py3k?" - so not accepting bytes is OK + if not isinstance(name, str): + raise TypeError("Must provide unicode for the name") + name = name.encode('utf-16le') + else: + # py2k was passed a unicode object - encode as mbcs. + if isinstance(name, str): + name = name.encode('mbcs') + + # 16 bytes for the IID followed by \0 term'd string. + rest_fmt = "16s%ds" % len(name) + # _make_memory(iid) hoops necessary to get the raw IID bytes. + rest_data = (_make_memory(pywintypes.IID(classguid)), name) + return PackDEV_BROADCAST(win32con.DBT_DEVTYP_DEVICEINTERFACE, rest_fmt, rest_data) + +# An object returned by UnpackDEV_BROADCAST. +class DEV_BROADCAST_INFO: + def __init__(self, devicetype, **kw): + self.devicetype = devicetype + self.__dict__.update(kw) + def __str__(self): + return "DEV_BROADCAST_INFO:" + str(self.__dict__) + +# Support for unpacking the 'lparam' +def UnpackDEV_BROADCAST(lparam): + if lparam == 0: + return None + hdr_format = "iii" + hdr_size = struct.calcsize(hdr_format) + hdr_buf = win32gui.PyGetMemory(lparam, hdr_size) + size, devtype, reserved = struct.unpack("iii", hdr_buf) + # Due to x64 alignment issues, we need to use the full format string over + # the entire buffer. ie, on x64: + # calcsize('iiiP') != calcsize('iii')+calcsize('P') + buf = win32gui.PyGetMemory(lparam, size) + + extra = x = {} + if devtype == win32con.DBT_DEVTYP_HANDLE: + # 2 handles, a GUID, a LONG and possibly an array following... + fmt = hdr_format + "PP16sl" + _, _, _, x['handle'], x['hdevnotify'], guid_bytes, x['nameoffset'] = \ + struct.unpack(fmt, buf[:struct.calcsize(fmt)]) + x['eventguid'] = pywintypes.IID(guid_bytes, True) + elif devtype == win32con.DBT_DEVTYP_DEVICEINTERFACE: + fmt = hdr_format + "16s" + _, _, _, guid_bytes = struct.unpack(fmt, buf[:struct.calcsize(fmt)]) + x['classguid'] = pywintypes.IID(guid_bytes, True) + x['name'] = win32gui.PyGetString(lparam + struct.calcsize(fmt)) + elif devtype == win32con.DBT_DEVTYP_VOLUME: + # int mask and flags + fmt = hdr_format + "II" + _, _, _, x['unitmask'], x['flags'] = struct.unpack(fmt, buf[:struct.calcsize(fmt)]) + else: + raise NotImplementedError("unknown device type %d" % (devtype,)) + return DEV_BROADCAST_INFO(devtype, **extra) diff --git a/venv/Lib/site-packages/win32/lib/win32inetcon.py b/venv/Lib/site-packages/win32/lib/win32inetcon.py new file mode 100644 index 00000000..3338fe25 --- /dev/null +++ b/venv/Lib/site-packages/win32/lib/win32inetcon.py @@ -0,0 +1,1103 @@ +# Generated by h2py from \mssdk\include\WinInet.h + +INTERNET_INVALID_PORT_NUMBER = 0 +INTERNET_DEFAULT_FTP_PORT = 21 +INTERNET_DEFAULT_GOPHER_PORT = 70 +INTERNET_DEFAULT_HTTP_PORT = 80 +INTERNET_DEFAULT_HTTPS_PORT = 443 +INTERNET_DEFAULT_SOCKS_PORT = 1080 +INTERNET_MAX_HOST_NAME_LENGTH = 256 +INTERNET_MAX_USER_NAME_LENGTH = 128 +INTERNET_MAX_PASSWORD_LENGTH = 128 +INTERNET_MAX_PORT_NUMBER_LENGTH = 5 +INTERNET_MAX_PORT_NUMBER_VALUE = 65535 +INTERNET_MAX_PATH_LENGTH = 2048 +INTERNET_MAX_SCHEME_LENGTH = 32 +INTERNET_KEEP_ALIVE_ENABLED = 1 +INTERNET_KEEP_ALIVE_DISABLED = 0 +INTERNET_REQFLAG_FROM_CACHE = 0x00000001 +INTERNET_REQFLAG_ASYNC = 0x00000002 +INTERNET_REQFLAG_VIA_PROXY = 0x00000004 +INTERNET_REQFLAG_NO_HEADERS = 0x00000008 +INTERNET_REQFLAG_PASSIVE = 0x00000010 +INTERNET_REQFLAG_CACHE_WRITE_DISABLED = 0x00000040 +INTERNET_REQFLAG_NET_TIMEOUT = 0x00000080 +INTERNET_FLAG_RELOAD = (-2147483648) +INTERNET_FLAG_RAW_DATA = 0x40000000 +INTERNET_FLAG_EXISTING_CONNECT = 0x20000000 +INTERNET_FLAG_ASYNC = 0x10000000 +INTERNET_FLAG_PASSIVE = 0x08000000 +INTERNET_FLAG_NO_CACHE_WRITE = 0x04000000 +INTERNET_FLAG_DONT_CACHE = INTERNET_FLAG_NO_CACHE_WRITE +INTERNET_FLAG_MAKE_PERSISTENT = 0x02000000 +INTERNET_FLAG_FROM_CACHE = 0x01000000 +INTERNET_FLAG_OFFLINE = INTERNET_FLAG_FROM_CACHE +INTERNET_FLAG_SECURE = 0x00800000 +INTERNET_FLAG_KEEP_CONNECTION = 0x00400000 +INTERNET_FLAG_NO_AUTO_REDIRECT = 0x00200000 +INTERNET_FLAG_READ_PREFETCH = 0x00100000 +INTERNET_FLAG_NO_COOKIES = 0x00080000 +INTERNET_FLAG_NO_AUTH = 0x00040000 +INTERNET_FLAG_RESTRICTED_ZONE = 0x00020000 +INTERNET_FLAG_CACHE_IF_NET_FAIL = 0x00010000 +INTERNET_FLAG_IGNORE_REDIRECT_TO_HTTP = 0x00008000 +INTERNET_FLAG_IGNORE_REDIRECT_TO_HTTPS = 0x00004000 +INTERNET_FLAG_IGNORE_CERT_DATE_INVALID = 0x00002000 +INTERNET_FLAG_IGNORE_CERT_CN_INVALID = 0x00001000 +INTERNET_FLAG_RESYNCHRONIZE = 0x00000800 +INTERNET_FLAG_HYPERLINK = 0x00000400 +INTERNET_FLAG_NO_UI = 0x00000200 +INTERNET_FLAG_PRAGMA_NOCACHE = 0x00000100 +INTERNET_FLAG_CACHE_ASYNC = 0x00000080 +INTERNET_FLAG_FORMS_SUBMIT = 0x00000040 +INTERNET_FLAG_FWD_BACK = 0x00000020 +INTERNET_FLAG_NEED_FILE = 0x00000010 +INTERNET_FLAG_MUST_CACHE_REQUEST = INTERNET_FLAG_NEED_FILE +SECURITY_INTERNET_MASK = (INTERNET_FLAG_IGNORE_CERT_CN_INVALID | \ + INTERNET_FLAG_IGNORE_CERT_DATE_INVALID | \ + INTERNET_FLAG_IGNORE_REDIRECT_TO_HTTPS | \ + INTERNET_FLAG_IGNORE_REDIRECT_TO_HTTP ) +INTERNET_ERROR_MASK_INSERT_CDROM = 0x1 +INTERNET_ERROR_MASK_COMBINED_SEC_CERT = 0x2 +INTERNET_ERROR_MASK_NEED_MSN_SSPI_PKG = 0X4 +INTERNET_ERROR_MASK_LOGIN_FAILURE_DISPLAY_ENTITY_BODY = 0x8 +WININET_API_FLAG_ASYNC = 0x00000001 +WININET_API_FLAG_SYNC = 0x00000004 +WININET_API_FLAG_USE_CONTEXT = 0x00000008 +INTERNET_NO_CALLBACK = 0 +IDSI_FLAG_KEEP_ALIVE = 0x00000001 +IDSI_FLAG_SECURE = 0x00000002 +IDSI_FLAG_PROXY = 0x00000004 +IDSI_FLAG_TUNNEL = 0x00000008 +INTERNET_PER_CONN_FLAGS = 1 +INTERNET_PER_CONN_PROXY_SERVER = 2 +INTERNET_PER_CONN_PROXY_BYPASS = 3 +INTERNET_PER_CONN_AUTOCONFIG_URL = 4 +INTERNET_PER_CONN_AUTODISCOVERY_FLAGS = 5 +INTERNET_PER_CONN_AUTOCONFIG_SECONDARY_URL = 6 +INTERNET_PER_CONN_AUTOCONFIG_RELOAD_DELAY_MINS = 7 +INTERNET_PER_CONN_AUTOCONFIG_LAST_DETECT_TIME = 8 +INTERNET_PER_CONN_AUTOCONFIG_LAST_DETECT_URL = 9 +PROXY_TYPE_DIRECT = 0x00000001 +PROXY_TYPE_PROXY = 0x00000002 +PROXY_TYPE_AUTO_PROXY_URL = 0x00000004 +PROXY_TYPE_AUTO_DETECT = 0x00000008 +AUTO_PROXY_FLAG_USER_SET = 0x00000001 +AUTO_PROXY_FLAG_ALWAYS_DETECT = 0x00000002 +AUTO_PROXY_FLAG_DETECTION_RUN = 0x00000004 +AUTO_PROXY_FLAG_MIGRATED = 0x00000008 +AUTO_PROXY_FLAG_DONT_CACHE_PROXY_RESULT = 0x00000010 +AUTO_PROXY_FLAG_CACHE_INIT_RUN = 0x00000020 +AUTO_PROXY_FLAG_DETECTION_SUSPECT = 0x00000040 +ISO_FORCE_DISCONNECTED = 0x00000001 +INTERNET_RFC1123_FORMAT = 0 +INTERNET_RFC1123_BUFSIZE = 30 +ICU_ESCAPE = (-2147483648) +ICU_USERNAME = 0x40000000 +ICU_NO_ENCODE = 0x20000000 +ICU_DECODE = 0x10000000 +ICU_NO_META = 0x08000000 +ICU_ENCODE_SPACES_ONLY = 0x04000000 +ICU_BROWSER_MODE = 0x02000000 +ICU_ENCODE_PERCENT = 0x00001000 +INTERNET_OPEN_TYPE_PRECONFIG = 0 +INTERNET_OPEN_TYPE_DIRECT = 1 +INTERNET_OPEN_TYPE_PROXY = 3 +INTERNET_OPEN_TYPE_PRECONFIG_WITH_NO_AUTOPROXY = 4 +PRE_CONFIG_INTERNET_ACCESS = INTERNET_OPEN_TYPE_PRECONFIG +LOCAL_INTERNET_ACCESS = INTERNET_OPEN_TYPE_DIRECT +CERN_PROXY_INTERNET_ACCESS = INTERNET_OPEN_TYPE_PROXY +INTERNET_SERVICE_FTP = 1 +INTERNET_SERVICE_GOPHER = 2 +INTERNET_SERVICE_HTTP = 3 +IRF_ASYNC = WININET_API_FLAG_ASYNC +IRF_SYNC = WININET_API_FLAG_SYNC +IRF_USE_CONTEXT = WININET_API_FLAG_USE_CONTEXT +IRF_NO_WAIT = 0x00000008 +ISO_GLOBAL = 0x00000001 +ISO_REGISTRY = 0x00000002 +ISO_VALID_FLAGS = (ISO_GLOBAL | ISO_REGISTRY) +INTERNET_OPTION_CALLBACK = 1 +INTERNET_OPTION_CONNECT_TIMEOUT = 2 +INTERNET_OPTION_CONNECT_RETRIES = 3 +INTERNET_OPTION_CONNECT_BACKOFF = 4 +INTERNET_OPTION_SEND_TIMEOUT = 5 +INTERNET_OPTION_CONTROL_SEND_TIMEOUT = INTERNET_OPTION_SEND_TIMEOUT +INTERNET_OPTION_RECEIVE_TIMEOUT = 6 +INTERNET_OPTION_CONTROL_RECEIVE_TIMEOUT = INTERNET_OPTION_RECEIVE_TIMEOUT +INTERNET_OPTION_DATA_SEND_TIMEOUT = 7 +INTERNET_OPTION_DATA_RECEIVE_TIMEOUT = 8 +INTERNET_OPTION_HANDLE_TYPE = 9 +INTERNET_OPTION_LISTEN_TIMEOUT = 11 +INTERNET_OPTION_READ_BUFFER_SIZE = 12 +INTERNET_OPTION_WRITE_BUFFER_SIZE = 13 +INTERNET_OPTION_ASYNC_ID = 15 +INTERNET_OPTION_ASYNC_PRIORITY = 16 +INTERNET_OPTION_PARENT_HANDLE = 21 +INTERNET_OPTION_KEEP_CONNECTION = 22 +INTERNET_OPTION_REQUEST_FLAGS = 23 +INTERNET_OPTION_EXTENDED_ERROR = 24 +INTERNET_OPTION_OFFLINE_MODE = 26 +INTERNET_OPTION_CACHE_STREAM_HANDLE = 27 +INTERNET_OPTION_USERNAME = 28 +INTERNET_OPTION_PASSWORD = 29 +INTERNET_OPTION_ASYNC = 30 +INTERNET_OPTION_SECURITY_FLAGS = 31 +INTERNET_OPTION_SECURITY_CERTIFICATE_STRUCT = 32 +INTERNET_OPTION_DATAFILE_NAME = 33 +INTERNET_OPTION_URL = 34 +INTERNET_OPTION_SECURITY_CERTIFICATE = 35 +INTERNET_OPTION_SECURITY_KEY_BITNESS = 36 +INTERNET_OPTION_REFRESH = 37 +INTERNET_OPTION_PROXY = 38 +INTERNET_OPTION_SETTINGS_CHANGED = 39 +INTERNET_OPTION_VERSION = 40 +INTERNET_OPTION_USER_AGENT = 41 +INTERNET_OPTION_END_BROWSER_SESSION = 42 +INTERNET_OPTION_PROXY_USERNAME = 43 +INTERNET_OPTION_PROXY_PASSWORD = 44 +INTERNET_OPTION_CONTEXT_VALUE = 45 +INTERNET_OPTION_CONNECT_LIMIT = 46 +INTERNET_OPTION_SECURITY_SELECT_CLIENT_CERT = 47 +INTERNET_OPTION_POLICY = 48 +INTERNET_OPTION_DISCONNECTED_TIMEOUT = 49 +INTERNET_OPTION_CONNECTED_STATE = 50 +INTERNET_OPTION_IDLE_STATE = 51 +INTERNET_OPTION_OFFLINE_SEMANTICS = 52 +INTERNET_OPTION_SECONDARY_CACHE_KEY = 53 +INTERNET_OPTION_CALLBACK_FILTER = 54 +INTERNET_OPTION_CONNECT_TIME = 55 +INTERNET_OPTION_SEND_THROUGHPUT = 56 +INTERNET_OPTION_RECEIVE_THROUGHPUT = 57 +INTERNET_OPTION_REQUEST_PRIORITY = 58 +INTERNET_OPTION_HTTP_VERSION = 59 +INTERNET_OPTION_RESET_URLCACHE_SESSION = 60 +INTERNET_OPTION_ERROR_MASK = 62 +INTERNET_OPTION_FROM_CACHE_TIMEOUT = 63 +INTERNET_OPTION_BYPASS_EDITED_ENTRY = 64 +INTERNET_OPTION_DIAGNOSTIC_SOCKET_INFO = 67 +INTERNET_OPTION_CODEPAGE = 68 +INTERNET_OPTION_CACHE_TIMESTAMPS = 69 +INTERNET_OPTION_DISABLE_AUTODIAL = 70 +INTERNET_OPTION_MAX_CONNS_PER_SERVER = 73 +INTERNET_OPTION_MAX_CONNS_PER_1_0_SERVER = 74 +INTERNET_OPTION_PER_CONNECTION_OPTION = 75 +INTERNET_OPTION_DIGEST_AUTH_UNLOAD = 76 +INTERNET_OPTION_IGNORE_OFFLINE = 77 +INTERNET_OPTION_IDENTITY = 78 +INTERNET_OPTION_REMOVE_IDENTITY = 79 +INTERNET_OPTION_ALTER_IDENTITY = 80 +INTERNET_OPTION_SUPPRESS_BEHAVIOR = 81 +INTERNET_OPTION_AUTODIAL_MODE = 82 +INTERNET_OPTION_AUTODIAL_CONNECTION = 83 +INTERNET_OPTION_CLIENT_CERT_CONTEXT = 84 +INTERNET_OPTION_AUTH_FLAGS = 85 +INTERNET_OPTION_COOKIES_3RD_PARTY = 86 +INTERNET_OPTION_DISABLE_PASSPORT_AUTH = 87 +INTERNET_OPTION_SEND_UTF8_SERVERNAME_TO_PROXY = 88 +INTERNET_OPTION_EXEMPT_CONNECTION_LIMIT = 89 +INTERNET_OPTION_ENABLE_PASSPORT_AUTH = 90 +INTERNET_OPTION_HIBERNATE_INACTIVE_WORKER_THREADS = 91 +INTERNET_OPTION_ACTIVATE_WORKER_THREADS = 92 +INTERNET_OPTION_RESTORE_WORKER_THREAD_DEFAULTS = 93 +INTERNET_OPTION_SOCKET_SEND_BUFFER_LENGTH = 94 +INTERNET_OPTION_PROXY_SETTINGS_CHANGED = 95 +INTERNET_FIRST_OPTION = INTERNET_OPTION_CALLBACK +INTERNET_LAST_OPTION = INTERNET_OPTION_PROXY_SETTINGS_CHANGED +INTERNET_PRIORITY_FOREGROUND = 1000 +INTERNET_HANDLE_TYPE_INTERNET = 1 +INTERNET_HANDLE_TYPE_CONNECT_FTP = 2 +INTERNET_HANDLE_TYPE_CONNECT_GOPHER = 3 +INTERNET_HANDLE_TYPE_CONNECT_HTTP = 4 +INTERNET_HANDLE_TYPE_FTP_FIND = 5 +INTERNET_HANDLE_TYPE_FTP_FIND_HTML = 6 +INTERNET_HANDLE_TYPE_FTP_FILE = 7 +INTERNET_HANDLE_TYPE_FTP_FILE_HTML = 8 +INTERNET_HANDLE_TYPE_GOPHER_FIND = 9 +INTERNET_HANDLE_TYPE_GOPHER_FIND_HTML = 10 +INTERNET_HANDLE_TYPE_GOPHER_FILE = 11 +INTERNET_HANDLE_TYPE_GOPHER_FILE_HTML = 12 +INTERNET_HANDLE_TYPE_HTTP_REQUEST = 13 +INTERNET_HANDLE_TYPE_FILE_REQUEST = 14 +AUTH_FLAG_DISABLE_NEGOTIATE = 0x00000001 +AUTH_FLAG_ENABLE_NEGOTIATE = 0x00000002 +SECURITY_FLAG_SECURE = 0x00000001 +SECURITY_FLAG_STRENGTH_WEAK = 0x10000000 +SECURITY_FLAG_STRENGTH_MEDIUM = 0x40000000 +SECURITY_FLAG_STRENGTH_STRONG = 0x20000000 +SECURITY_FLAG_UNKNOWNBIT = (-2147483648) +SECURITY_FLAG_FORTEZZA = 0x08000000 +SECURITY_FLAG_NORMALBITNESS = SECURITY_FLAG_STRENGTH_WEAK +SECURITY_FLAG_SSL = 0x00000002 +SECURITY_FLAG_SSL3 = 0x00000004 +SECURITY_FLAG_PCT = 0x00000008 +SECURITY_FLAG_PCT4 = 0x00000010 +SECURITY_FLAG_IETFSSL4 = 0x00000020 +SECURITY_FLAG_40BIT = SECURITY_FLAG_STRENGTH_WEAK +SECURITY_FLAG_128BIT = SECURITY_FLAG_STRENGTH_STRONG +SECURITY_FLAG_56BIT = SECURITY_FLAG_STRENGTH_MEDIUM +SECURITY_FLAG_IGNORE_REVOCATION = 0x00000080 +SECURITY_FLAG_IGNORE_UNKNOWN_CA = 0x00000100 +SECURITY_FLAG_IGNORE_WRONG_USAGE = 0x00000200 +SECURITY_FLAG_IGNORE_CERT_CN_INVALID = INTERNET_FLAG_IGNORE_CERT_CN_INVALID +SECURITY_FLAG_IGNORE_CERT_DATE_INVALID = INTERNET_FLAG_IGNORE_CERT_DATE_INVALID +SECURITY_FLAG_IGNORE_REDIRECT_TO_HTTPS = INTERNET_FLAG_IGNORE_REDIRECT_TO_HTTPS +SECURITY_FLAG_IGNORE_REDIRECT_TO_HTTP = INTERNET_FLAG_IGNORE_REDIRECT_TO_HTTP +SECURITY_SET_MASK = (SECURITY_FLAG_IGNORE_REVOCATION |\ + SECURITY_FLAG_IGNORE_UNKNOWN_CA |\ + SECURITY_FLAG_IGNORE_CERT_CN_INVALID |\ + SECURITY_FLAG_IGNORE_CERT_DATE_INVALID |\ + SECURITY_FLAG_IGNORE_WRONG_USAGE) +AUTODIAL_MODE_NEVER = 1 +AUTODIAL_MODE_ALWAYS = 2 +AUTODIAL_MODE_NO_NETWORK_PRESENT = 4 +INTERNET_STATUS_RESOLVING_NAME = 10 +INTERNET_STATUS_NAME_RESOLVED = 11 +INTERNET_STATUS_CONNECTING_TO_SERVER = 20 +INTERNET_STATUS_CONNECTED_TO_SERVER = 21 +INTERNET_STATUS_SENDING_REQUEST = 30 +INTERNET_STATUS_REQUEST_SENT = 31 +INTERNET_STATUS_RECEIVING_RESPONSE = 40 +INTERNET_STATUS_RESPONSE_RECEIVED = 41 +INTERNET_STATUS_CTL_RESPONSE_RECEIVED = 42 +INTERNET_STATUS_PREFETCH = 43 +INTERNET_STATUS_CLOSING_CONNECTION = 50 +INTERNET_STATUS_CONNECTION_CLOSED = 51 +INTERNET_STATUS_HANDLE_CREATED = 60 +INTERNET_STATUS_HANDLE_CLOSING = 70 +INTERNET_STATUS_DETECTING_PROXY = 80 +INTERNET_STATUS_REQUEST_COMPLETE = 100 +INTERNET_STATUS_REDIRECT = 110 +INTERNET_STATUS_INTERMEDIATE_RESPONSE = 120 +INTERNET_STATUS_USER_INPUT_REQUIRED = 140 +INTERNET_STATUS_STATE_CHANGE = 200 +INTERNET_STATUS_COOKIE_SENT = 320 +INTERNET_STATUS_COOKIE_RECEIVED = 321 +INTERNET_STATUS_PRIVACY_IMPACTED = 324 +INTERNET_STATUS_P3P_HEADER = 325 +INTERNET_STATUS_P3P_POLICYREF = 326 +INTERNET_STATUS_COOKIE_HISTORY = 327 +INTERNET_STATE_CONNECTED = 0x00000001 +INTERNET_STATE_DISCONNECTED = 0x00000002 +INTERNET_STATE_DISCONNECTED_BY_USER = 0x00000010 +INTERNET_STATE_IDLE = 0x00000100 +INTERNET_STATE_BUSY = 0x00000200 +FTP_TRANSFER_TYPE_UNKNOWN = 0x00000000 +FTP_TRANSFER_TYPE_ASCII = 0x00000001 +FTP_TRANSFER_TYPE_BINARY = 0x00000002 +FTP_TRANSFER_TYPE_MASK = (FTP_TRANSFER_TYPE_ASCII | FTP_TRANSFER_TYPE_BINARY) +MAX_GOPHER_DISPLAY_TEXT = 128 +MAX_GOPHER_SELECTOR_TEXT = 256 +MAX_GOPHER_HOST_NAME = INTERNET_MAX_HOST_NAME_LENGTH +MAX_GOPHER_LOCATOR_LENGTH = (1 \ + + MAX_GOPHER_DISPLAY_TEXT \ + + 1 \ + + MAX_GOPHER_SELECTOR_TEXT \ + + 1 \ + + MAX_GOPHER_HOST_NAME \ + + 1 \ + + INTERNET_MAX_PORT_NUMBER_LENGTH \ + + 1 \ + + 1 \ + + 2 \ + ) +GOPHER_TYPE_TEXT_FILE = 0x00000001 +GOPHER_TYPE_DIRECTORY = 0x00000002 +GOPHER_TYPE_CSO = 0x00000004 +GOPHER_TYPE_ERROR = 0x00000008 +GOPHER_TYPE_MAC_BINHEX = 0x00000010 +GOPHER_TYPE_DOS_ARCHIVE = 0x00000020 +GOPHER_TYPE_UNIX_UUENCODED = 0x00000040 +GOPHER_TYPE_INDEX_SERVER = 0x00000080 +GOPHER_TYPE_TELNET = 0x00000100 +GOPHER_TYPE_BINARY = 0x00000200 +GOPHER_TYPE_REDUNDANT = 0x00000400 +GOPHER_TYPE_TN3270 = 0x00000800 +GOPHER_TYPE_GIF = 0x00001000 +GOPHER_TYPE_IMAGE = 0x00002000 +GOPHER_TYPE_BITMAP = 0x00004000 +GOPHER_TYPE_MOVIE = 0x00008000 +GOPHER_TYPE_SOUND = 0x00010000 +GOPHER_TYPE_HTML = 0x00020000 +GOPHER_TYPE_PDF = 0x00040000 +GOPHER_TYPE_CALENDAR = 0x00080000 +GOPHER_TYPE_INLINE = 0x00100000 +GOPHER_TYPE_UNKNOWN = 0x20000000 +GOPHER_TYPE_ASK = 0x40000000 +GOPHER_TYPE_GOPHER_PLUS = (-2147483648) +GOPHER_TYPE_FILE_MASK = (GOPHER_TYPE_TEXT_FILE \ + | GOPHER_TYPE_MAC_BINHEX \ + | GOPHER_TYPE_DOS_ARCHIVE \ + | GOPHER_TYPE_UNIX_UUENCODED \ + | GOPHER_TYPE_BINARY \ + | GOPHER_TYPE_GIF \ + | GOPHER_TYPE_IMAGE \ + | GOPHER_TYPE_BITMAP \ + | GOPHER_TYPE_MOVIE \ + | GOPHER_TYPE_SOUND \ + | GOPHER_TYPE_HTML \ + | GOPHER_TYPE_PDF \ + | GOPHER_TYPE_CALENDAR \ + | GOPHER_TYPE_INLINE \ + ) +MAX_GOPHER_CATEGORY_NAME = 128 +MAX_GOPHER_ATTRIBUTE_NAME = 128 +MIN_GOPHER_ATTRIBUTE_LENGTH = 256 +GOPHER_ATTRIBUTE_ID_BASE = (-1412641792) +GOPHER_CATEGORY_ID_ALL = (GOPHER_ATTRIBUTE_ID_BASE + 1) +GOPHER_CATEGORY_ID_INFO = (GOPHER_ATTRIBUTE_ID_BASE + 2) +GOPHER_CATEGORY_ID_ADMIN = (GOPHER_ATTRIBUTE_ID_BASE + 3) +GOPHER_CATEGORY_ID_VIEWS = (GOPHER_ATTRIBUTE_ID_BASE + 4) +GOPHER_CATEGORY_ID_ABSTRACT = (GOPHER_ATTRIBUTE_ID_BASE + 5) +GOPHER_CATEGORY_ID_VERONICA = (GOPHER_ATTRIBUTE_ID_BASE + 6) +GOPHER_CATEGORY_ID_ASK = (GOPHER_ATTRIBUTE_ID_BASE + 7) +GOPHER_CATEGORY_ID_UNKNOWN = (GOPHER_ATTRIBUTE_ID_BASE + 8) +GOPHER_ATTRIBUTE_ID_ALL = (GOPHER_ATTRIBUTE_ID_BASE + 9) +GOPHER_ATTRIBUTE_ID_ADMIN = (GOPHER_ATTRIBUTE_ID_BASE + 10) +GOPHER_ATTRIBUTE_ID_MOD_DATE = (GOPHER_ATTRIBUTE_ID_BASE + 11) +GOPHER_ATTRIBUTE_ID_TTL = (GOPHER_ATTRIBUTE_ID_BASE + 12) +GOPHER_ATTRIBUTE_ID_SCORE = (GOPHER_ATTRIBUTE_ID_BASE + 13) +GOPHER_ATTRIBUTE_ID_RANGE = (GOPHER_ATTRIBUTE_ID_BASE + 14) +GOPHER_ATTRIBUTE_ID_SITE = (GOPHER_ATTRIBUTE_ID_BASE + 15) +GOPHER_ATTRIBUTE_ID_ORG = (GOPHER_ATTRIBUTE_ID_BASE + 16) +GOPHER_ATTRIBUTE_ID_LOCATION = (GOPHER_ATTRIBUTE_ID_BASE + 17) +GOPHER_ATTRIBUTE_ID_GEOG = (GOPHER_ATTRIBUTE_ID_BASE + 18) +GOPHER_ATTRIBUTE_ID_TIMEZONE = (GOPHER_ATTRIBUTE_ID_BASE + 19) +GOPHER_ATTRIBUTE_ID_PROVIDER = (GOPHER_ATTRIBUTE_ID_BASE + 20) +GOPHER_ATTRIBUTE_ID_VERSION = (GOPHER_ATTRIBUTE_ID_BASE + 21) +GOPHER_ATTRIBUTE_ID_ABSTRACT = (GOPHER_ATTRIBUTE_ID_BASE + 22) +GOPHER_ATTRIBUTE_ID_VIEW = (GOPHER_ATTRIBUTE_ID_BASE + 23) +GOPHER_ATTRIBUTE_ID_TREEWALK = (GOPHER_ATTRIBUTE_ID_BASE + 24) +GOPHER_ATTRIBUTE_ID_UNKNOWN = (GOPHER_ATTRIBUTE_ID_BASE + 25) +HTTP_MAJOR_VERSION = 1 +HTTP_MINOR_VERSION = 0 +HTTP_VERSIONA = "HTTP/1.0" +HTTP_VERSION = HTTP_VERSIONA +HTTP_QUERY_MIME_VERSION = 0 +HTTP_QUERY_CONTENT_TYPE = 1 +HTTP_QUERY_CONTENT_TRANSFER_ENCODING = 2 +HTTP_QUERY_CONTENT_ID = 3 +HTTP_QUERY_CONTENT_DESCRIPTION = 4 +HTTP_QUERY_CONTENT_LENGTH = 5 +HTTP_QUERY_CONTENT_LANGUAGE = 6 +HTTP_QUERY_ALLOW = 7 +HTTP_QUERY_PUBLIC = 8 +HTTP_QUERY_DATE = 9 +HTTP_QUERY_EXPIRES = 10 +HTTP_QUERY_LAST_MODIFIED = 11 +HTTP_QUERY_MESSAGE_ID = 12 +HTTP_QUERY_URI = 13 +HTTP_QUERY_DERIVED_FROM = 14 +HTTP_QUERY_COST = 15 +HTTP_QUERY_LINK = 16 +HTTP_QUERY_PRAGMA = 17 +HTTP_QUERY_VERSION = 18 +HTTP_QUERY_STATUS_CODE = 19 +HTTP_QUERY_STATUS_TEXT = 20 +HTTP_QUERY_RAW_HEADERS = 21 +HTTP_QUERY_RAW_HEADERS_CRLF = 22 +HTTP_QUERY_CONNECTION = 23 +HTTP_QUERY_ACCEPT = 24 +HTTP_QUERY_ACCEPT_CHARSET = 25 +HTTP_QUERY_ACCEPT_ENCODING = 26 +HTTP_QUERY_ACCEPT_LANGUAGE = 27 +HTTP_QUERY_AUTHORIZATION = 28 +HTTP_QUERY_CONTENT_ENCODING = 29 +HTTP_QUERY_FORWARDED = 30 +HTTP_QUERY_FROM = 31 +HTTP_QUERY_IF_MODIFIED_SINCE = 32 +HTTP_QUERY_LOCATION = 33 +HTTP_QUERY_ORIG_URI = 34 +HTTP_QUERY_REFERER = 35 +HTTP_QUERY_RETRY_AFTER = 36 +HTTP_QUERY_SERVER = 37 +HTTP_QUERY_TITLE = 38 +HTTP_QUERY_USER_AGENT = 39 +HTTP_QUERY_WWW_AUTHENTICATE = 40 +HTTP_QUERY_PROXY_AUTHENTICATE = 41 +HTTP_QUERY_ACCEPT_RANGES = 42 +HTTP_QUERY_SET_COOKIE = 43 +HTTP_QUERY_COOKIE = 44 +HTTP_QUERY_REQUEST_METHOD = 45 +HTTP_QUERY_REFRESH = 46 +HTTP_QUERY_CONTENT_DISPOSITION = 47 +HTTP_QUERY_AGE = 48 +HTTP_QUERY_CACHE_CONTROL = 49 +HTTP_QUERY_CONTENT_BASE = 50 +HTTP_QUERY_CONTENT_LOCATION = 51 +HTTP_QUERY_CONTENT_MD5 = 52 +HTTP_QUERY_CONTENT_RANGE = 53 +HTTP_QUERY_ETAG = 54 +HTTP_QUERY_HOST = 55 +HTTP_QUERY_IF_MATCH = 56 +HTTP_QUERY_IF_NONE_MATCH = 57 +HTTP_QUERY_IF_RANGE = 58 +HTTP_QUERY_IF_UNMODIFIED_SINCE = 59 +HTTP_QUERY_MAX_FORWARDS = 60 +HTTP_QUERY_PROXY_AUTHORIZATION = 61 +HTTP_QUERY_RANGE = 62 +HTTP_QUERY_TRANSFER_ENCODING = 63 +HTTP_QUERY_UPGRADE = 64 +HTTP_QUERY_VARY = 65 +HTTP_QUERY_VIA = 66 +HTTP_QUERY_WARNING = 67 +HTTP_QUERY_EXPECT = 68 +HTTP_QUERY_PROXY_CONNECTION = 69 +HTTP_QUERY_UNLESS_MODIFIED_SINCE = 70 +HTTP_QUERY_ECHO_REQUEST = 71 +HTTP_QUERY_ECHO_REPLY = 72 +HTTP_QUERY_ECHO_HEADERS = 73 +HTTP_QUERY_ECHO_HEADERS_CRLF = 74 +HTTP_QUERY_PROXY_SUPPORT = 75 +HTTP_QUERY_AUTHENTICATION_INFO = 76 +HTTP_QUERY_PASSPORT_URLS = 77 +HTTP_QUERY_PASSPORT_CONFIG = 78 +HTTP_QUERY_MAX = 78 +HTTP_QUERY_CUSTOM = 65535 +HTTP_QUERY_FLAG_REQUEST_HEADERS = (-2147483648) +HTTP_QUERY_FLAG_SYSTEMTIME = 0x40000000 +HTTP_QUERY_FLAG_NUMBER = 0x20000000 +HTTP_QUERY_FLAG_COALESCE = 0x10000000 +HTTP_QUERY_MODIFIER_FLAGS_MASK = (HTTP_QUERY_FLAG_REQUEST_HEADERS \ + | HTTP_QUERY_FLAG_SYSTEMTIME \ + | HTTP_QUERY_FLAG_NUMBER \ + | HTTP_QUERY_FLAG_COALESCE \ + ) +HTTP_QUERY_HEADER_MASK = (~HTTP_QUERY_MODIFIER_FLAGS_MASK) +HTTP_STATUS_CONTINUE = 100 +HTTP_STATUS_SWITCH_PROTOCOLS = 101 +HTTP_STATUS_OK = 200 +HTTP_STATUS_CREATED = 201 +HTTP_STATUS_ACCEPTED = 202 +HTTP_STATUS_PARTIAL = 203 +HTTP_STATUS_NO_CONTENT = 204 +HTTP_STATUS_RESET_CONTENT = 205 +HTTP_STATUS_PARTIAL_CONTENT = 206 +HTTP_STATUS_AMBIGUOUS = 300 +HTTP_STATUS_MOVED = 301 +HTTP_STATUS_REDIRECT = 302 +HTTP_STATUS_REDIRECT_METHOD = 303 +HTTP_STATUS_NOT_MODIFIED = 304 +HTTP_STATUS_USE_PROXY = 305 +HTTP_STATUS_REDIRECT_KEEP_VERB = 307 +HTTP_STATUS_BAD_REQUEST = 400 +HTTP_STATUS_DENIED = 401 +HTTP_STATUS_PAYMENT_REQ = 402 +HTTP_STATUS_FORBIDDEN = 403 +HTTP_STATUS_NOT_FOUND = 404 +HTTP_STATUS_BAD_METHOD = 405 +HTTP_STATUS_NONE_ACCEPTABLE = 406 +HTTP_STATUS_PROXY_AUTH_REQ = 407 +HTTP_STATUS_REQUEST_TIMEOUT = 408 +HTTP_STATUS_CONFLICT = 409 +HTTP_STATUS_GONE = 410 +HTTP_STATUS_LENGTH_REQUIRED = 411 +HTTP_STATUS_PRECOND_FAILED = 412 +HTTP_STATUS_REQUEST_TOO_LARGE = 413 +HTTP_STATUS_URI_TOO_LONG = 414 +HTTP_STATUS_UNSUPPORTED_MEDIA = 415 +HTTP_STATUS_RETRY_WITH = 449 +HTTP_STATUS_SERVER_ERROR = 500 +HTTP_STATUS_NOT_SUPPORTED = 501 +HTTP_STATUS_BAD_GATEWAY = 502 +HTTP_STATUS_SERVICE_UNAVAIL = 503 +HTTP_STATUS_GATEWAY_TIMEOUT = 504 +HTTP_STATUS_VERSION_NOT_SUP = 505 +HTTP_STATUS_FIRST = HTTP_STATUS_CONTINUE +HTTP_STATUS_LAST = HTTP_STATUS_VERSION_NOT_SUP +HTTP_ADDREQ_INDEX_MASK = 0x0000FFFF +HTTP_ADDREQ_FLAGS_MASK = (-65536) +HTTP_ADDREQ_FLAG_ADD_IF_NEW = 0x10000000 +HTTP_ADDREQ_FLAG_ADD = 0x20000000 +HTTP_ADDREQ_FLAG_COALESCE_WITH_COMMA = 0x40000000 +HTTP_ADDREQ_FLAG_COALESCE_WITH_SEMICOLON = 0x01000000 +HTTP_ADDREQ_FLAG_COALESCE = HTTP_ADDREQ_FLAG_COALESCE_WITH_COMMA +HTTP_ADDREQ_FLAG_REPLACE = (-2147483648) +HSR_ASYNC = WININET_API_FLAG_ASYNC +HSR_SYNC = WININET_API_FLAG_SYNC +HSR_USE_CONTEXT = WININET_API_FLAG_USE_CONTEXT +HSR_INITIATE = 0x00000008 +HSR_DOWNLOAD = 0x00000010 +HSR_CHUNKED = 0x00000020 +INTERNET_COOKIE_IS_SECURE = 0x01 +INTERNET_COOKIE_IS_SESSION = 0x02 +INTERNET_COOKIE_THIRD_PARTY = 0x10 +INTERNET_COOKIE_PROMPT_REQUIRED = 0x20 +INTERNET_COOKIE_EVALUATE_P3P = 0x40 +INTERNET_COOKIE_APPLY_P3P = 0x80 +INTERNET_COOKIE_P3P_ENABLED = 0x100 +INTERNET_COOKIE_IS_RESTRICTED = 0x200 +INTERNET_COOKIE_IE6 = 0x400 +INTERNET_COOKIE_IS_LEGACY = 0x800 +FLAG_ICC_FORCE_CONNECTION = 0x00000001 +FLAGS_ERROR_UI_FILTER_FOR_ERRORS = 0x01 +FLAGS_ERROR_UI_FLAGS_CHANGE_OPTIONS = 0x02 +FLAGS_ERROR_UI_FLAGS_GENERATE_DATA = 0x04 +FLAGS_ERROR_UI_FLAGS_NO_UI = 0x08 +FLAGS_ERROR_UI_SERIALIZE_DIALOGS = 0x10 +INTERNET_ERROR_BASE = 12000 +ERROR_INTERNET_OUT_OF_HANDLES = (INTERNET_ERROR_BASE + 1) +ERROR_INTERNET_TIMEOUT = (INTERNET_ERROR_BASE + 2) +ERROR_INTERNET_EXTENDED_ERROR = (INTERNET_ERROR_BASE + 3) +ERROR_INTERNET_INTERNAL_ERROR = (INTERNET_ERROR_BASE + 4) +ERROR_INTERNET_INVALID_URL = (INTERNET_ERROR_BASE + 5) +ERROR_INTERNET_UNRECOGNIZED_SCHEME = (INTERNET_ERROR_BASE + 6) +ERROR_INTERNET_NAME_NOT_RESOLVED = (INTERNET_ERROR_BASE + 7) +ERROR_INTERNET_PROTOCOL_NOT_FOUND = (INTERNET_ERROR_BASE + 8) +ERROR_INTERNET_INVALID_OPTION = (INTERNET_ERROR_BASE + 9) +ERROR_INTERNET_BAD_OPTION_LENGTH = (INTERNET_ERROR_BASE + 10) +ERROR_INTERNET_OPTION_NOT_SETTABLE = (INTERNET_ERROR_BASE + 11) +ERROR_INTERNET_SHUTDOWN = (INTERNET_ERROR_BASE + 12) +ERROR_INTERNET_INCORRECT_USER_NAME = (INTERNET_ERROR_BASE + 13) +ERROR_INTERNET_INCORRECT_PASSWORD = (INTERNET_ERROR_BASE + 14) +ERROR_INTERNET_LOGIN_FAILURE = (INTERNET_ERROR_BASE + 15) +ERROR_INTERNET_INVALID_OPERATION = (INTERNET_ERROR_BASE + 16) +ERROR_INTERNET_OPERATION_CANCELLED = (INTERNET_ERROR_BASE + 17) +ERROR_INTERNET_INCORRECT_HANDLE_TYPE = (INTERNET_ERROR_BASE + 18) +ERROR_INTERNET_INCORRECT_HANDLE_STATE = (INTERNET_ERROR_BASE + 19) +ERROR_INTERNET_NOT_PROXY_REQUEST = (INTERNET_ERROR_BASE + 20) +ERROR_INTERNET_REGISTRY_VALUE_NOT_FOUND = (INTERNET_ERROR_BASE + 21) +ERROR_INTERNET_BAD_REGISTRY_PARAMETER = (INTERNET_ERROR_BASE + 22) +ERROR_INTERNET_NO_DIRECT_ACCESS = (INTERNET_ERROR_BASE + 23) +ERROR_INTERNET_NO_CONTEXT = (INTERNET_ERROR_BASE + 24) +ERROR_INTERNET_NO_CALLBACK = (INTERNET_ERROR_BASE + 25) +ERROR_INTERNET_REQUEST_PENDING = (INTERNET_ERROR_BASE + 26) +ERROR_INTERNET_INCORRECT_FORMAT = (INTERNET_ERROR_BASE + 27) +ERROR_INTERNET_ITEM_NOT_FOUND = (INTERNET_ERROR_BASE + 28) +ERROR_INTERNET_CANNOT_CONNECT = (INTERNET_ERROR_BASE + 29) +ERROR_INTERNET_CONNECTION_ABORTED = (INTERNET_ERROR_BASE + 30) +ERROR_INTERNET_CONNECTION_RESET = (INTERNET_ERROR_BASE + 31) +ERROR_INTERNET_FORCE_RETRY = (INTERNET_ERROR_BASE + 32) +ERROR_INTERNET_INVALID_PROXY_REQUEST = (INTERNET_ERROR_BASE + 33) +ERROR_INTERNET_NEED_UI = (INTERNET_ERROR_BASE + 34) +ERROR_INTERNET_HANDLE_EXISTS = (INTERNET_ERROR_BASE + 36) +ERROR_INTERNET_SEC_CERT_DATE_INVALID = (INTERNET_ERROR_BASE + 37) +ERROR_INTERNET_SEC_CERT_CN_INVALID = (INTERNET_ERROR_BASE + 38) +ERROR_INTERNET_HTTP_TO_HTTPS_ON_REDIR = (INTERNET_ERROR_BASE + 39) +ERROR_INTERNET_HTTPS_TO_HTTP_ON_REDIR = (INTERNET_ERROR_BASE + 40) +ERROR_INTERNET_MIXED_SECURITY = (INTERNET_ERROR_BASE + 41) +ERROR_INTERNET_CHG_POST_IS_NON_SECURE = (INTERNET_ERROR_BASE + 42) +ERROR_INTERNET_POST_IS_NON_SECURE = (INTERNET_ERROR_BASE + 43) +ERROR_INTERNET_CLIENT_AUTH_CERT_NEEDED = (INTERNET_ERROR_BASE + 44) +ERROR_INTERNET_INVALID_CA = (INTERNET_ERROR_BASE + 45) +ERROR_INTERNET_CLIENT_AUTH_NOT_SETUP = (INTERNET_ERROR_BASE + 46) +ERROR_INTERNET_ASYNC_THREAD_FAILED = (INTERNET_ERROR_BASE + 47) +ERROR_INTERNET_REDIRECT_SCHEME_CHANGE = (INTERNET_ERROR_BASE + 48) +ERROR_INTERNET_DIALOG_PENDING = (INTERNET_ERROR_BASE + 49) +ERROR_INTERNET_RETRY_DIALOG = (INTERNET_ERROR_BASE + 50) +ERROR_INTERNET_HTTPS_HTTP_SUBMIT_REDIR = (INTERNET_ERROR_BASE + 52) +ERROR_INTERNET_INSERT_CDROM = (INTERNET_ERROR_BASE + 53) +ERROR_INTERNET_FORTEZZA_LOGIN_NEEDED = (INTERNET_ERROR_BASE + 54) +ERROR_INTERNET_SEC_CERT_ERRORS = (INTERNET_ERROR_BASE + 55) +ERROR_INTERNET_SEC_CERT_NO_REV = (INTERNET_ERROR_BASE + 56) +ERROR_INTERNET_SEC_CERT_REV_FAILED = (INTERNET_ERROR_BASE + 57) +ERROR_FTP_TRANSFER_IN_PROGRESS = (INTERNET_ERROR_BASE + 110) +ERROR_FTP_DROPPED = (INTERNET_ERROR_BASE + 111) +ERROR_FTP_NO_PASSIVE_MODE = (INTERNET_ERROR_BASE + 112) +ERROR_GOPHER_PROTOCOL_ERROR = (INTERNET_ERROR_BASE + 130) +ERROR_GOPHER_NOT_FILE = (INTERNET_ERROR_BASE + 131) +ERROR_GOPHER_DATA_ERROR = (INTERNET_ERROR_BASE + 132) +ERROR_GOPHER_END_OF_DATA = (INTERNET_ERROR_BASE + 133) +ERROR_GOPHER_INVALID_LOCATOR = (INTERNET_ERROR_BASE + 134) +ERROR_GOPHER_INCORRECT_LOCATOR_TYPE = (INTERNET_ERROR_BASE + 135) +ERROR_GOPHER_NOT_GOPHER_PLUS = (INTERNET_ERROR_BASE + 136) +ERROR_GOPHER_ATTRIBUTE_NOT_FOUND = (INTERNET_ERROR_BASE + 137) +ERROR_GOPHER_UNKNOWN_LOCATOR = (INTERNET_ERROR_BASE + 138) +ERROR_HTTP_HEADER_NOT_FOUND = (INTERNET_ERROR_BASE + 150) +ERROR_HTTP_DOWNLEVEL_SERVER = (INTERNET_ERROR_BASE + 151) +ERROR_HTTP_INVALID_SERVER_RESPONSE = (INTERNET_ERROR_BASE + 152) +ERROR_HTTP_INVALID_HEADER = (INTERNET_ERROR_BASE + 153) +ERROR_HTTP_INVALID_QUERY_REQUEST = (INTERNET_ERROR_BASE + 154) +ERROR_HTTP_HEADER_ALREADY_EXISTS = (INTERNET_ERROR_BASE + 155) +ERROR_HTTP_REDIRECT_FAILED = (INTERNET_ERROR_BASE + 156) +ERROR_HTTP_NOT_REDIRECTED = (INTERNET_ERROR_BASE + 160) +ERROR_HTTP_COOKIE_NEEDS_CONFIRMATION = (INTERNET_ERROR_BASE + 161) +ERROR_HTTP_COOKIE_DECLINED = (INTERNET_ERROR_BASE + 162) +ERROR_HTTP_REDIRECT_NEEDS_CONFIRMATION = (INTERNET_ERROR_BASE + 168) +ERROR_INTERNET_SECURITY_CHANNEL_ERROR = (INTERNET_ERROR_BASE + 157) +ERROR_INTERNET_UNABLE_TO_CACHE_FILE = (INTERNET_ERROR_BASE + 158) +ERROR_INTERNET_TCPIP_NOT_INSTALLED = (INTERNET_ERROR_BASE + 159) +ERROR_INTERNET_DISCONNECTED = (INTERNET_ERROR_BASE + 163) +ERROR_INTERNET_SERVER_UNREACHABLE = (INTERNET_ERROR_BASE + 164) +ERROR_INTERNET_PROXY_SERVER_UNREACHABLE = (INTERNET_ERROR_BASE + 165) +ERROR_INTERNET_BAD_AUTO_PROXY_SCRIPT = (INTERNET_ERROR_BASE + 166) +ERROR_INTERNET_UNABLE_TO_DOWNLOAD_SCRIPT = (INTERNET_ERROR_BASE + 167) +ERROR_INTERNET_SEC_INVALID_CERT = (INTERNET_ERROR_BASE + 169) +ERROR_INTERNET_SEC_CERT_REVOKED = (INTERNET_ERROR_BASE + 170) +ERROR_INTERNET_FAILED_DUETOSECURITYCHECK = (INTERNET_ERROR_BASE + 171) +ERROR_INTERNET_NOT_INITIALIZED = (INTERNET_ERROR_BASE + 172) +ERROR_INTERNET_NEED_MSN_SSPI_PKG = (INTERNET_ERROR_BASE + 173) +ERROR_INTERNET_LOGIN_FAILURE_DISPLAY_ENTITY_BODY = (INTERNET_ERROR_BASE + 174) +INTERNET_ERROR_LAST = ERROR_INTERNET_LOGIN_FAILURE_DISPLAY_ENTITY_BODY +NORMAL_CACHE_ENTRY = 0x00000001 +STICKY_CACHE_ENTRY = 0x00000004 +EDITED_CACHE_ENTRY = 0x00000008 +TRACK_OFFLINE_CACHE_ENTRY = 0x00000010 +TRACK_ONLINE_CACHE_ENTRY = 0x00000020 +SPARSE_CACHE_ENTRY = 0x00010000 +COOKIE_CACHE_ENTRY = 0x00100000 +URLHISTORY_CACHE_ENTRY = 0x00200000 +URLCACHE_FIND_DEFAULT_FILTER = NORMAL_CACHE_ENTRY \ + | COOKIE_CACHE_ENTRY \ + | URLHISTORY_CACHE_ENTRY \ + | TRACK_OFFLINE_CACHE_ENTRY \ + | TRACK_ONLINE_CACHE_ENTRY \ + | STICKY_CACHE_ENTRY +CACHEGROUP_ATTRIBUTE_GET_ALL = (-1) +CACHEGROUP_ATTRIBUTE_BASIC = 0x00000001 +CACHEGROUP_ATTRIBUTE_FLAG = 0x00000002 +CACHEGROUP_ATTRIBUTE_TYPE = 0x00000004 +CACHEGROUP_ATTRIBUTE_QUOTA = 0x00000008 +CACHEGROUP_ATTRIBUTE_GROUPNAME = 0x00000010 +CACHEGROUP_ATTRIBUTE_STORAGE = 0x00000020 +CACHEGROUP_FLAG_NONPURGEABLE = 0x00000001 +CACHEGROUP_FLAG_GIDONLY = 0x00000004 +CACHEGROUP_FLAG_FLUSHURL_ONDELETE = 0x00000002 +CACHEGROUP_SEARCH_ALL = 0x00000000 +CACHEGROUP_SEARCH_BYURL = 0x00000001 +CACHEGROUP_TYPE_INVALID = 0x00000001 +CACHEGROUP_READWRITE_MASK = \ + CACHEGROUP_ATTRIBUTE_TYPE \ + | CACHEGROUP_ATTRIBUTE_QUOTA \ + | CACHEGROUP_ATTRIBUTE_GROUPNAME \ + | CACHEGROUP_ATTRIBUTE_STORAGE +GROUPNAME_MAX_LENGTH = 120 +GROUP_OWNER_STORAGE_SIZE = 4 +CACHE_ENTRY_ATTRIBUTE_FC = 0x00000004 +CACHE_ENTRY_HITRATE_FC = 0x00000010 +CACHE_ENTRY_MODTIME_FC = 0x00000040 +CACHE_ENTRY_EXPTIME_FC = 0x00000080 +CACHE_ENTRY_ACCTIME_FC = 0x00000100 +CACHE_ENTRY_SYNCTIME_FC = 0x00000200 +CACHE_ENTRY_HEADERINFO_FC = 0x00000400 +CACHE_ENTRY_EXEMPT_DELTA_FC = 0x00000800 +INTERNET_CACHE_GROUP_ADD = 0 +INTERNET_CACHE_GROUP_REMOVE = 1 +INTERNET_DIAL_FORCE_PROMPT = 0x2000 +INTERNET_DIAL_SHOW_OFFLINE = 0x4000 +INTERNET_DIAL_UNATTENDED = 0x8000 +INTERENT_GOONLINE_REFRESH = 0x00000001 +INTERENT_GOONLINE_MASK = 0x00000001 +INTERNET_AUTODIAL_FORCE_ONLINE = 1 +INTERNET_AUTODIAL_FORCE_UNATTENDED = 2 +INTERNET_AUTODIAL_FAILIFSECURITYCHECK = 4 +INTERNET_AUTODIAL_OVERRIDE_NET_PRESENT = 8 +INTERNET_AUTODIAL_FLAGS_MASK = (INTERNET_AUTODIAL_FORCE_ONLINE | INTERNET_AUTODIAL_FORCE_UNATTENDED | INTERNET_AUTODIAL_FAILIFSECURITYCHECK | INTERNET_AUTODIAL_OVERRIDE_NET_PRESENT) +PROXY_AUTO_DETECT_TYPE_DHCP = 1 +PROXY_AUTO_DETECT_TYPE_DNS_A = 2 +INTERNET_CONNECTION_MODEM = 0x01 +INTERNET_CONNECTION_LAN = 0x02 +INTERNET_CONNECTION_PROXY = 0x04 +INTERNET_CONNECTION_MODEM_BUSY = 0x08 +INTERNET_RAS_INSTALLED = 0x10 +INTERNET_CONNECTION_OFFLINE = 0x20 +INTERNET_CONNECTION_CONFIGURED = 0x40 +INTERNET_CUSTOMDIAL_CONNECT = 0 +INTERNET_CUSTOMDIAL_UNATTENDED = 1 +INTERNET_CUSTOMDIAL_DISCONNECT = 2 +INTERNET_CUSTOMDIAL_SHOWOFFLINE = 4 +INTERNET_CUSTOMDIAL_SAFE_FOR_UNATTENDED = 1 +INTERNET_CUSTOMDIAL_WILL_SUPPLY_STATE = 2 +INTERNET_CUSTOMDIAL_CAN_HANGUP = 4 +INTERNET_DIALSTATE_DISCONNECTED = 1 +INTERNET_IDENTITY_FLAG_PRIVATE_CACHE = 0x01 +INTERNET_IDENTITY_FLAG_SHARED_CACHE = 0x02 +INTERNET_IDENTITY_FLAG_CLEAR_DATA = 0x04 +INTERNET_IDENTITY_FLAG_CLEAR_COOKIES = 0x08 +INTERNET_IDENTITY_FLAG_CLEAR_HISTORY = 0x10 +INTERNET_IDENTITY_FLAG_CLEAR_CONTENT = 0x20 +INTERNET_SUPPRESS_RESET_ALL = 0x00 +INTERNET_SUPPRESS_COOKIE_POLICY = 0x01 +INTERNET_SUPPRESS_COOKIE_POLICY_RESET = 0x02 +PRIVACY_TEMPLATE_NO_COOKIES = 0 +PRIVACY_TEMPLATE_HIGH = 1 +PRIVACY_TEMPLATE_MEDIUM_HIGH = 2 +PRIVACY_TEMPLATE_MEDIUM = 3 +PRIVACY_TEMPLATE_MEDIUM_LOW = 4 +PRIVACY_TEMPLATE_LOW = 5 +PRIVACY_TEMPLATE_CUSTOM = 100 +PRIVACY_TEMPLATE_ADVANCED = 101 +PRIVACY_TEMPLATE_MAX = PRIVACY_TEMPLATE_LOW +PRIVACY_TYPE_FIRST_PARTY = 0 +PRIVACY_TYPE_THIRD_PARTY = 1 + +# Generated by h2py from winhttp.h +INTERNET_DEFAULT_PORT = 0 +INTERNET_DEFAULT_HTTP_PORT = 80 +INTERNET_DEFAULT_HTTPS_PORT = 443 +WINHTTP_FLAG_ASYNC = 0x10000000 +WINHTTP_FLAG_SECURE = 0x00800000 +WINHTTP_FLAG_ESCAPE_PERCENT = 0x00000004 +WINHTTP_FLAG_NULL_CODEPAGE = 0x00000008 +WINHTTP_FLAG_BYPASS_PROXY_CACHE = 0x00000100 +WINHTTP_FLAG_REFRESH = WINHTTP_FLAG_BYPASS_PROXY_CACHE +WINHTTP_FLAG_ESCAPE_DISABLE = 0x00000040 +WINHTTP_FLAG_ESCAPE_DISABLE_QUERY = 0x00000080 +SECURITY_FLAG_IGNORE_UNKNOWN_CA = 0x00000100 +SECURITY_FLAG_IGNORE_CERT_DATE_INVALID = 0x00002000 +SECURITY_FLAG_IGNORE_CERT_CN_INVALID = 0x00001000 +SECURITY_FLAG_IGNORE_CERT_WRONG_USAGE = 0x00000200 +INTERNET_SCHEME_HTTP = (1) +INTERNET_SCHEME_HTTPS = (2) +WINHTTP_AUTOPROXY_AUTO_DETECT = 0x00000001 +WINHTTP_AUTOPROXY_CONFIG_URL = 0x00000002 +WINHTTP_AUTOPROXY_RUN_INPROCESS = 0x00010000 +WINHTTP_AUTOPROXY_RUN_OUTPROCESS_ONLY = 0x00020000 +WINHTTP_AUTO_DETECT_TYPE_DHCP = 0x00000001 +WINHTTP_AUTO_DETECT_TYPE_DNS_A = 0x00000002 +WINHTTP_TIME_FORMAT_BUFSIZE = 62 +ICU_NO_ENCODE = 0x20000000 +ICU_DECODE = 0x10000000 +ICU_NO_META = 0x08000000 +ICU_ENCODE_SPACES_ONLY = 0x04000000 +ICU_BROWSER_MODE = 0x02000000 +ICU_ENCODE_PERCENT = 0x00001000 +ICU_ESCAPE = (-2147483648) +ICU_ESCAPE_AUTHORITY = 0x00002000 +ICU_REJECT_USERPWD = 0x00004000 +WINHTTP_ACCESS_TYPE_DEFAULT_PROXY = 0 +WINHTTP_ACCESS_TYPE_NO_PROXY = 1 +WINHTTP_ACCESS_TYPE_NAMED_PROXY = 3 +WINHTTP_OPTION_CALLBACK = 1 +WINHTTP_OPTION_RESOLVE_TIMEOUT = 2 +WINHTTP_OPTION_CONNECT_TIMEOUT = 3 +WINHTTP_OPTION_CONNECT_RETRIES = 4 +WINHTTP_OPTION_SEND_TIMEOUT = 5 +WINHTTP_OPTION_RECEIVE_TIMEOUT = 6 +WINHTTP_OPTION_RECEIVE_RESPONSE_TIMEOUT = 7 +WINHTTP_OPTION_HANDLE_TYPE = 9 +WINHTTP_OPTION_READ_BUFFER_SIZE = 12 +WINHTTP_OPTION_WRITE_BUFFER_SIZE = 13 +WINHTTP_OPTION_PARENT_HANDLE = 21 +WINHTTP_OPTION_EXTENDED_ERROR = 24 +WINHTTP_OPTION_SECURITY_FLAGS = 31 +WINHTTP_OPTION_SECURITY_CERTIFICATE_STRUCT = 32 +WINHTTP_OPTION_URL = 34 +WINHTTP_OPTION_SECURITY_KEY_BITNESS = 36 +WINHTTP_OPTION_PROXY = 38 +WINHTTP_OPTION_USER_AGENT = 41 +WINHTTP_OPTION_CONTEXT_VALUE = 45 +WINHTTP_OPTION_CLIENT_CERT_CONTEXT = 47 +WINHTTP_OPTION_REQUEST_PRIORITY = 58 +WINHTTP_OPTION_HTTP_VERSION = 59 +WINHTTP_OPTION_DISABLE_FEATURE = 63 +WINHTTP_OPTION_CODEPAGE = 68 +WINHTTP_OPTION_MAX_CONNS_PER_SERVER = 73 +WINHTTP_OPTION_MAX_CONNS_PER_1_0_SERVER = 74 +WINHTTP_OPTION_AUTOLOGON_POLICY = 77 +WINHTTP_OPTION_SERVER_CERT_CONTEXT = 78 +WINHTTP_OPTION_ENABLE_FEATURE = 79 +WINHTTP_OPTION_WORKER_THREAD_COUNT = 80 +WINHTTP_OPTION_PASSPORT_COBRANDING_TEXT = 81 +WINHTTP_OPTION_PASSPORT_COBRANDING_URL = 82 +WINHTTP_OPTION_CONFIGURE_PASSPORT_AUTH = 83 +WINHTTP_OPTION_SECURE_PROTOCOLS = 84 +WINHTTP_OPTION_ENABLETRACING = 85 +WINHTTP_OPTION_PASSPORT_SIGN_OUT = 86 +WINHTTP_OPTION_PASSPORT_RETURN_URL = 87 +WINHTTP_OPTION_REDIRECT_POLICY = 88 +WINHTTP_OPTION_MAX_HTTP_AUTOMATIC_REDIRECTS = 89 +WINHTTP_OPTION_MAX_HTTP_STATUS_CONTINUE = 90 +WINHTTP_OPTION_MAX_RESPONSE_HEADER_SIZE = 91 +WINHTTP_OPTION_MAX_RESPONSE_DRAIN_SIZE = 92 +WINHTTP_OPTION_CONNECTION_INFO = 93 +WINHTTP_OPTION_CLIENT_CERT_ISSUER_LIST = 94 +WINHTTP_OPTION_SPN = 96 +WINHTTP_OPTION_GLOBAL_PROXY_CREDS = 97 +WINHTTP_OPTION_GLOBAL_SERVER_CREDS = 98 +WINHTTP_OPTION_UNLOAD_NOTIFY_EVENT = 99 +WINHTTP_OPTION_REJECT_USERPWD_IN_URL = 100 +WINHTTP_OPTION_USE_GLOBAL_SERVER_CREDENTIALS = 101 +WINHTTP_LAST_OPTION = WINHTTP_OPTION_USE_GLOBAL_SERVER_CREDENTIALS +WINHTTP_OPTION_USERNAME = 0x1000 +WINHTTP_OPTION_PASSWORD = 0x1001 +WINHTTP_OPTION_PROXY_USERNAME = 0x1002 +WINHTTP_OPTION_PROXY_PASSWORD = 0x1003 +WINHTTP_CONNS_PER_SERVER_UNLIMITED = (-1) +WINHTTP_AUTOLOGON_SECURITY_LEVEL_MEDIUM = 0 +WINHTTP_AUTOLOGON_SECURITY_LEVEL_LOW = 1 +WINHTTP_AUTOLOGON_SECURITY_LEVEL_HIGH = 2 +WINHTTP_AUTOLOGON_SECURITY_LEVEL_DEFAULT = WINHTTP_AUTOLOGON_SECURITY_LEVEL_MEDIUM +WINHTTP_OPTION_REDIRECT_POLICY_NEVER = 0 +WINHTTP_OPTION_REDIRECT_POLICY_DISALLOW_HTTPS_TO_HTTP = 1 +WINHTTP_OPTION_REDIRECT_POLICY_ALWAYS = 2 +WINHTTP_OPTION_REDIRECT_POLICY_LAST = WINHTTP_OPTION_REDIRECT_POLICY_ALWAYS +WINHTTP_OPTION_REDIRECT_POLICY_DEFAULT = WINHTTP_OPTION_REDIRECT_POLICY_DISALLOW_HTTPS_TO_HTTP +WINHTTP_DISABLE_PASSPORT_AUTH = 0x00000000 +WINHTTP_ENABLE_PASSPORT_AUTH = 0x10000000 +WINHTTP_DISABLE_PASSPORT_KEYRING = 0x20000000 +WINHTTP_ENABLE_PASSPORT_KEYRING = 0x40000000 +WINHTTP_DISABLE_COOKIES = 0x00000001 +WINHTTP_DISABLE_REDIRECTS = 0x00000002 +WINHTTP_DISABLE_AUTHENTICATION = 0x00000004 +WINHTTP_DISABLE_KEEP_ALIVE = 0x00000008 +WINHTTP_ENABLE_SSL_REVOCATION = 0x00000001 +WINHTTP_ENABLE_SSL_REVERT_IMPERSONATION = 0x00000002 +WINHTTP_DISABLE_SPN_SERVER_PORT = 0x00000000 +WINHTTP_ENABLE_SPN_SERVER_PORT = 0x00000001 +WINHTTP_OPTION_SPN_MASK = WINHTTP_ENABLE_SPN_SERVER_PORT +WINHTTP_HANDLE_TYPE_SESSION = 1 +WINHTTP_HANDLE_TYPE_CONNECT = 2 +WINHTTP_HANDLE_TYPE_REQUEST = 3 +WINHTTP_AUTH_SCHEME_BASIC = 0x00000001 +WINHTTP_AUTH_SCHEME_NTLM = 0x00000002 +WINHTTP_AUTH_SCHEME_PASSPORT = 0x00000004 +WINHTTP_AUTH_SCHEME_DIGEST = 0x00000008 +WINHTTP_AUTH_SCHEME_NEGOTIATE = 0x00000010 +WINHTTP_AUTH_TARGET_SERVER = 0x00000000 +WINHTTP_AUTH_TARGET_PROXY = 0x00000001 +SECURITY_FLAG_SECURE = 0x00000001 +SECURITY_FLAG_STRENGTH_WEAK = 0x10000000 +SECURITY_FLAG_STRENGTH_MEDIUM = 0x40000000 +SECURITY_FLAG_STRENGTH_STRONG = 0x20000000 +WINHTTP_CALLBACK_STATUS_FLAG_CERT_REV_FAILED = 0x00000001 +WINHTTP_CALLBACK_STATUS_FLAG_INVALID_CERT = 0x00000002 +WINHTTP_CALLBACK_STATUS_FLAG_CERT_REVOKED = 0x00000004 +WINHTTP_CALLBACK_STATUS_FLAG_INVALID_CA = 0x00000008 +WINHTTP_CALLBACK_STATUS_FLAG_CERT_CN_INVALID = 0x00000010 +WINHTTP_CALLBACK_STATUS_FLAG_CERT_DATE_INVALID = 0x00000020 +WINHTTP_CALLBACK_STATUS_FLAG_CERT_WRONG_USAGE = 0x00000040 +WINHTTP_CALLBACK_STATUS_FLAG_SECURITY_CHANNEL_ERROR = (-2147483648) +WINHTTP_FLAG_SECURE_PROTOCOL_SSL2 = 0x00000008 +WINHTTP_FLAG_SECURE_PROTOCOL_SSL3 = 0x00000020 +WINHTTP_FLAG_SECURE_PROTOCOL_TLS1 = 0x00000080 +WINHTTP_FLAG_SECURE_PROTOCOL_ALL = (WINHTTP_FLAG_SECURE_PROTOCOL_SSL2 | \ + WINHTTP_FLAG_SECURE_PROTOCOL_SSL3 | \ + WINHTTP_FLAG_SECURE_PROTOCOL_TLS1) +WINHTTP_CALLBACK_STATUS_RESOLVING_NAME = 0x00000001 +WINHTTP_CALLBACK_STATUS_NAME_RESOLVED = 0x00000002 +WINHTTP_CALLBACK_STATUS_CONNECTING_TO_SERVER = 0x00000004 +WINHTTP_CALLBACK_STATUS_CONNECTED_TO_SERVER = 0x00000008 +WINHTTP_CALLBACK_STATUS_SENDING_REQUEST = 0x00000010 +WINHTTP_CALLBACK_STATUS_REQUEST_SENT = 0x00000020 +WINHTTP_CALLBACK_STATUS_RECEIVING_RESPONSE = 0x00000040 +WINHTTP_CALLBACK_STATUS_RESPONSE_RECEIVED = 0x00000080 +WINHTTP_CALLBACK_STATUS_CLOSING_CONNECTION = 0x00000100 +WINHTTP_CALLBACK_STATUS_CONNECTION_CLOSED = 0x00000200 +WINHTTP_CALLBACK_STATUS_HANDLE_CREATED = 0x00000400 +WINHTTP_CALLBACK_STATUS_HANDLE_CLOSING = 0x00000800 +WINHTTP_CALLBACK_STATUS_DETECTING_PROXY = 0x00001000 +WINHTTP_CALLBACK_STATUS_REDIRECT = 0x00004000 +WINHTTP_CALLBACK_STATUS_INTERMEDIATE_RESPONSE = 0x00008000 +WINHTTP_CALLBACK_STATUS_SECURE_FAILURE = 0x00010000 +WINHTTP_CALLBACK_STATUS_HEADERS_AVAILABLE = 0x00020000 +WINHTTP_CALLBACK_STATUS_DATA_AVAILABLE = 0x00040000 +WINHTTP_CALLBACK_STATUS_READ_COMPLETE = 0x00080000 +WINHTTP_CALLBACK_STATUS_WRITE_COMPLETE = 0x00100000 +WINHTTP_CALLBACK_STATUS_REQUEST_ERROR = 0x00200000 +WINHTTP_CALLBACK_STATUS_SENDREQUEST_COMPLETE = 0x00400000 +API_RECEIVE_RESPONSE = (1) +API_QUERY_DATA_AVAILABLE = (2) +API_READ_DATA = (3) +API_WRITE_DATA = (4) +API_SEND_REQUEST = (5) +WINHTTP_CALLBACK_FLAG_RESOLVE_NAME = (WINHTTP_CALLBACK_STATUS_RESOLVING_NAME | WINHTTP_CALLBACK_STATUS_NAME_RESOLVED) +WINHTTP_CALLBACK_FLAG_CONNECT_TO_SERVER = (WINHTTP_CALLBACK_STATUS_CONNECTING_TO_SERVER | WINHTTP_CALLBACK_STATUS_CONNECTED_TO_SERVER) +WINHTTP_CALLBACK_FLAG_SEND_REQUEST = (WINHTTP_CALLBACK_STATUS_SENDING_REQUEST | WINHTTP_CALLBACK_STATUS_REQUEST_SENT) +WINHTTP_CALLBACK_FLAG_RECEIVE_RESPONSE = (WINHTTP_CALLBACK_STATUS_RECEIVING_RESPONSE | WINHTTP_CALLBACK_STATUS_RESPONSE_RECEIVED) +WINHTTP_CALLBACK_FLAG_CLOSE_CONNECTION = (WINHTTP_CALLBACK_STATUS_CLOSING_CONNECTION | WINHTTP_CALLBACK_STATUS_CONNECTION_CLOSED) +WINHTTP_CALLBACK_FLAG_HANDLES = (WINHTTP_CALLBACK_STATUS_HANDLE_CREATED | WINHTTP_CALLBACK_STATUS_HANDLE_CLOSING) +WINHTTP_CALLBACK_FLAG_DETECTING_PROXY = WINHTTP_CALLBACK_STATUS_DETECTING_PROXY +WINHTTP_CALLBACK_FLAG_REDIRECT = WINHTTP_CALLBACK_STATUS_REDIRECT +WINHTTP_CALLBACK_FLAG_INTERMEDIATE_RESPONSE = WINHTTP_CALLBACK_STATUS_INTERMEDIATE_RESPONSE +WINHTTP_CALLBACK_FLAG_SECURE_FAILURE = WINHTTP_CALLBACK_STATUS_SECURE_FAILURE +WINHTTP_CALLBACK_FLAG_SENDREQUEST_COMPLETE = WINHTTP_CALLBACK_STATUS_SENDREQUEST_COMPLETE +WINHTTP_CALLBACK_FLAG_HEADERS_AVAILABLE = WINHTTP_CALLBACK_STATUS_HEADERS_AVAILABLE +WINHTTP_CALLBACK_FLAG_DATA_AVAILABLE = WINHTTP_CALLBACK_STATUS_DATA_AVAILABLE +WINHTTP_CALLBACK_FLAG_READ_COMPLETE = WINHTTP_CALLBACK_STATUS_READ_COMPLETE +WINHTTP_CALLBACK_FLAG_WRITE_COMPLETE = WINHTTP_CALLBACK_STATUS_WRITE_COMPLETE +WINHTTP_CALLBACK_FLAG_REQUEST_ERROR = WINHTTP_CALLBACK_STATUS_REQUEST_ERROR +WINHTTP_CALLBACK_FLAG_ALL_COMPLETIONS = (WINHTTP_CALLBACK_STATUS_SENDREQUEST_COMPLETE \ + | WINHTTP_CALLBACK_STATUS_HEADERS_AVAILABLE \ + | WINHTTP_CALLBACK_STATUS_DATA_AVAILABLE \ + | WINHTTP_CALLBACK_STATUS_READ_COMPLETE \ + | WINHTTP_CALLBACK_STATUS_WRITE_COMPLETE \ + | WINHTTP_CALLBACK_STATUS_REQUEST_ERROR) +WINHTTP_CALLBACK_FLAG_ALL_NOTIFICATIONS = (-1) +WINHTTP_QUERY_MIME_VERSION = 0 +WINHTTP_QUERY_CONTENT_TYPE = 1 +WINHTTP_QUERY_CONTENT_TRANSFER_ENCODING = 2 +WINHTTP_QUERY_CONTENT_ID = 3 +WINHTTP_QUERY_CONTENT_DESCRIPTION = 4 +WINHTTP_QUERY_CONTENT_LENGTH = 5 +WINHTTP_QUERY_CONTENT_LANGUAGE = 6 +WINHTTP_QUERY_ALLOW = 7 +WINHTTP_QUERY_PUBLIC = 8 +WINHTTP_QUERY_DATE = 9 +WINHTTP_QUERY_EXPIRES = 10 +WINHTTP_QUERY_LAST_MODIFIED = 11 +WINHTTP_QUERY_MESSAGE_ID = 12 +WINHTTP_QUERY_URI = 13 +WINHTTP_QUERY_DERIVED_FROM = 14 +WINHTTP_QUERY_COST = 15 +WINHTTP_QUERY_LINK = 16 +WINHTTP_QUERY_PRAGMA = 17 +WINHTTP_QUERY_VERSION = 18 +WINHTTP_QUERY_STATUS_CODE = 19 +WINHTTP_QUERY_STATUS_TEXT = 20 +WINHTTP_QUERY_RAW_HEADERS = 21 +WINHTTP_QUERY_RAW_HEADERS_CRLF = 22 +WINHTTP_QUERY_CONNECTION = 23 +WINHTTP_QUERY_ACCEPT = 24 +WINHTTP_QUERY_ACCEPT_CHARSET = 25 +WINHTTP_QUERY_ACCEPT_ENCODING = 26 +WINHTTP_QUERY_ACCEPT_LANGUAGE = 27 +WINHTTP_QUERY_AUTHORIZATION = 28 +WINHTTP_QUERY_CONTENT_ENCODING = 29 +WINHTTP_QUERY_FORWARDED = 30 +WINHTTP_QUERY_FROM = 31 +WINHTTP_QUERY_IF_MODIFIED_SINCE = 32 +WINHTTP_QUERY_LOCATION = 33 +WINHTTP_QUERY_ORIG_URI = 34 +WINHTTP_QUERY_REFERER = 35 +WINHTTP_QUERY_RETRY_AFTER = 36 +WINHTTP_QUERY_SERVER = 37 +WINHTTP_QUERY_TITLE = 38 +WINHTTP_QUERY_USER_AGENT = 39 +WINHTTP_QUERY_WWW_AUTHENTICATE = 40 +WINHTTP_QUERY_PROXY_AUTHENTICATE = 41 +WINHTTP_QUERY_ACCEPT_RANGES = 42 +WINHTTP_QUERY_SET_COOKIE = 43 +WINHTTP_QUERY_COOKIE = 44 +WINHTTP_QUERY_REQUEST_METHOD = 45 +WINHTTP_QUERY_REFRESH = 46 +WINHTTP_QUERY_CONTENT_DISPOSITION = 47 +WINHTTP_QUERY_AGE = 48 +WINHTTP_QUERY_CACHE_CONTROL = 49 +WINHTTP_QUERY_CONTENT_BASE = 50 +WINHTTP_QUERY_CONTENT_LOCATION = 51 +WINHTTP_QUERY_CONTENT_MD5 = 52 +WINHTTP_QUERY_CONTENT_RANGE = 53 +WINHTTP_QUERY_ETAG = 54 +WINHTTP_QUERY_HOST = 55 +WINHTTP_QUERY_IF_MATCH = 56 +WINHTTP_QUERY_IF_NONE_MATCH = 57 +WINHTTP_QUERY_IF_RANGE = 58 +WINHTTP_QUERY_IF_UNMODIFIED_SINCE = 59 +WINHTTP_QUERY_MAX_FORWARDS = 60 +WINHTTP_QUERY_PROXY_AUTHORIZATION = 61 +WINHTTP_QUERY_RANGE = 62 +WINHTTP_QUERY_TRANSFER_ENCODING = 63 +WINHTTP_QUERY_UPGRADE = 64 +WINHTTP_QUERY_VARY = 65 +WINHTTP_QUERY_VIA = 66 +WINHTTP_QUERY_WARNING = 67 +WINHTTP_QUERY_EXPECT = 68 +WINHTTP_QUERY_PROXY_CONNECTION = 69 +WINHTTP_QUERY_UNLESS_MODIFIED_SINCE = 70 +WINHTTP_QUERY_PROXY_SUPPORT = 75 +WINHTTP_QUERY_AUTHENTICATION_INFO = 76 +WINHTTP_QUERY_PASSPORT_URLS = 77 +WINHTTP_QUERY_PASSPORT_CONFIG = 78 +WINHTTP_QUERY_MAX = 78 +WINHTTP_QUERY_CUSTOM = 65535 +WINHTTP_QUERY_FLAG_REQUEST_HEADERS = (-2147483648) +WINHTTP_QUERY_FLAG_SYSTEMTIME = 0x40000000 +WINHTTP_QUERY_FLAG_NUMBER = 0x20000000 +HTTP_STATUS_CONTINUE = 100 +HTTP_STATUS_SWITCH_PROTOCOLS = 101 +HTTP_STATUS_OK = 200 +HTTP_STATUS_CREATED = 201 +HTTP_STATUS_ACCEPTED = 202 +HTTP_STATUS_PARTIAL = 203 +HTTP_STATUS_NO_CONTENT = 204 +HTTP_STATUS_RESET_CONTENT = 205 +HTTP_STATUS_PARTIAL_CONTENT = 206 +HTTP_STATUS_WEBDAV_MULTI_STATUS = 207 +HTTP_STATUS_AMBIGUOUS = 300 +HTTP_STATUS_MOVED = 301 +HTTP_STATUS_REDIRECT = 302 +HTTP_STATUS_REDIRECT_METHOD = 303 +HTTP_STATUS_NOT_MODIFIED = 304 +HTTP_STATUS_USE_PROXY = 305 +HTTP_STATUS_REDIRECT_KEEP_VERB = 307 +HTTP_STATUS_BAD_REQUEST = 400 +HTTP_STATUS_DENIED = 401 +HTTP_STATUS_PAYMENT_REQ = 402 +HTTP_STATUS_FORBIDDEN = 403 +HTTP_STATUS_NOT_FOUND = 404 +HTTP_STATUS_BAD_METHOD = 405 +HTTP_STATUS_NONE_ACCEPTABLE = 406 +HTTP_STATUS_PROXY_AUTH_REQ = 407 +HTTP_STATUS_REQUEST_TIMEOUT = 408 +HTTP_STATUS_CONFLICT = 409 +HTTP_STATUS_GONE = 410 +HTTP_STATUS_LENGTH_REQUIRED = 411 +HTTP_STATUS_PRECOND_FAILED = 412 +HTTP_STATUS_REQUEST_TOO_LARGE = 413 +HTTP_STATUS_URI_TOO_LONG = 414 +HTTP_STATUS_UNSUPPORTED_MEDIA = 415 +HTTP_STATUS_RETRY_WITH = 449 +HTTP_STATUS_SERVER_ERROR = 500 +HTTP_STATUS_NOT_SUPPORTED = 501 +HTTP_STATUS_BAD_GATEWAY = 502 +HTTP_STATUS_SERVICE_UNAVAIL = 503 +HTTP_STATUS_GATEWAY_TIMEOUT = 504 +HTTP_STATUS_VERSION_NOT_SUP = 505 +HTTP_STATUS_FIRST = HTTP_STATUS_CONTINUE +HTTP_STATUS_LAST = HTTP_STATUS_VERSION_NOT_SUP +WINHTTP_ADDREQ_INDEX_MASK = 0x0000FFFF +WINHTTP_ADDREQ_FLAGS_MASK = (-65536) +WINHTTP_ADDREQ_FLAG_ADD_IF_NEW = 0x10000000 +WINHTTP_ADDREQ_FLAG_ADD = 0x20000000 +WINHTTP_ADDREQ_FLAG_COALESCE_WITH_COMMA = 0x40000000 +WINHTTP_ADDREQ_FLAG_COALESCE_WITH_SEMICOLON = 0x01000000 +WINHTTP_ADDREQ_FLAG_COALESCE = WINHTTP_ADDREQ_FLAG_COALESCE_WITH_COMMA +WINHTTP_ADDREQ_FLAG_REPLACE = (-2147483648) +WINHTTP_IGNORE_REQUEST_TOTAL_LENGTH = 0 +WINHTTP_ERROR_BASE = 12000 +ERROR_WINHTTP_OUT_OF_HANDLES = (WINHTTP_ERROR_BASE + 1) +ERROR_WINHTTP_TIMEOUT = (WINHTTP_ERROR_BASE + 2) +ERROR_WINHTTP_INTERNAL_ERROR = (WINHTTP_ERROR_BASE + 4) +ERROR_WINHTTP_INVALID_URL = (WINHTTP_ERROR_BASE + 5) +ERROR_WINHTTP_UNRECOGNIZED_SCHEME = (WINHTTP_ERROR_BASE + 6) +ERROR_WINHTTP_NAME_NOT_RESOLVED = (WINHTTP_ERROR_BASE + 7) +ERROR_WINHTTP_INVALID_OPTION = (WINHTTP_ERROR_BASE + 9) +ERROR_WINHTTP_OPTION_NOT_SETTABLE = (WINHTTP_ERROR_BASE + 11) +ERROR_WINHTTP_SHUTDOWN = (WINHTTP_ERROR_BASE + 12) +ERROR_WINHTTP_LOGIN_FAILURE = (WINHTTP_ERROR_BASE + 15) +ERROR_WINHTTP_OPERATION_CANCELLED = (WINHTTP_ERROR_BASE + 17) +ERROR_WINHTTP_INCORRECT_HANDLE_TYPE = (WINHTTP_ERROR_BASE + 18) +ERROR_WINHTTP_INCORRECT_HANDLE_STATE = (WINHTTP_ERROR_BASE + 19) +ERROR_WINHTTP_CANNOT_CONNECT = (WINHTTP_ERROR_BASE + 29) +ERROR_WINHTTP_CONNECTION_ERROR = (WINHTTP_ERROR_BASE + 30) +ERROR_WINHTTP_RESEND_REQUEST = (WINHTTP_ERROR_BASE + 32) +ERROR_WINHTTP_CLIENT_AUTH_CERT_NEEDED = (WINHTTP_ERROR_BASE + 44) +ERROR_WINHTTP_CANNOT_CALL_BEFORE_OPEN = (WINHTTP_ERROR_BASE + 100) +ERROR_WINHTTP_CANNOT_CALL_BEFORE_SEND = (WINHTTP_ERROR_BASE + 101) +ERROR_WINHTTP_CANNOT_CALL_AFTER_SEND = (WINHTTP_ERROR_BASE + 102) +ERROR_WINHTTP_CANNOT_CALL_AFTER_OPEN = (WINHTTP_ERROR_BASE + 103) +ERROR_WINHTTP_HEADER_NOT_FOUND = (WINHTTP_ERROR_BASE + 150) +ERROR_WINHTTP_INVALID_SERVER_RESPONSE = (WINHTTP_ERROR_BASE + 152) +ERROR_WINHTTP_INVALID_HEADER = (WINHTTP_ERROR_BASE + 153) +ERROR_WINHTTP_INVALID_QUERY_REQUEST = (WINHTTP_ERROR_BASE + 154) +ERROR_WINHTTP_HEADER_ALREADY_EXISTS = (WINHTTP_ERROR_BASE + 155) +ERROR_WINHTTP_REDIRECT_FAILED = (WINHTTP_ERROR_BASE + 156) +ERROR_WINHTTP_AUTO_PROXY_SERVICE_ERROR = (WINHTTP_ERROR_BASE + 178) +ERROR_WINHTTP_BAD_AUTO_PROXY_SCRIPT = (WINHTTP_ERROR_BASE + 166) +ERROR_WINHTTP_UNABLE_TO_DOWNLOAD_SCRIPT = (WINHTTP_ERROR_BASE + 167) +ERROR_WINHTTP_NOT_INITIALIZED = (WINHTTP_ERROR_BASE + 172) +ERROR_WINHTTP_SECURE_FAILURE = (WINHTTP_ERROR_BASE + 175) +ERROR_WINHTTP_SECURE_CERT_DATE_INVALID = (WINHTTP_ERROR_BASE + 37) +ERROR_WINHTTP_SECURE_CERT_CN_INVALID = (WINHTTP_ERROR_BASE + 38) +ERROR_WINHTTP_SECURE_INVALID_CA = (WINHTTP_ERROR_BASE + 45) +ERROR_WINHTTP_SECURE_CERT_REV_FAILED = (WINHTTP_ERROR_BASE + 57) +ERROR_WINHTTP_SECURE_CHANNEL_ERROR = (WINHTTP_ERROR_BASE + 157) +ERROR_WINHTTP_SECURE_INVALID_CERT = (WINHTTP_ERROR_BASE + 169) +ERROR_WINHTTP_SECURE_CERT_REVOKED = (WINHTTP_ERROR_BASE + 170) +ERROR_WINHTTP_SECURE_CERT_WRONG_USAGE = (WINHTTP_ERROR_BASE + 179) +ERROR_WINHTTP_AUTODETECTION_FAILED = (WINHTTP_ERROR_BASE + 180) +ERROR_WINHTTP_HEADER_COUNT_EXCEEDED = (WINHTTP_ERROR_BASE + 181) +ERROR_WINHTTP_HEADER_SIZE_OVERFLOW = (WINHTTP_ERROR_BASE + 182) +ERROR_WINHTTP_CHUNKED_ENCODING_HEADER_SIZE_OVERFLOW = (WINHTTP_ERROR_BASE + 183) +ERROR_WINHTTP_RESPONSE_DRAIN_OVERFLOW = (WINHTTP_ERROR_BASE + 184) +ERROR_WINHTTP_CLIENT_CERT_NO_PRIVATE_KEY = (WINHTTP_ERROR_BASE + 185) +ERROR_WINHTTP_CLIENT_CERT_NO_ACCESS_PRIVATE_KEY = (WINHTTP_ERROR_BASE + 186) +WINHTTP_ERROR_LAST = (WINHTTP_ERROR_BASE + 186) + +WINHTTP_NO_PROXY_NAME = None +WINHTTP_NO_PROXY_BYPASS = None +WINHTTP_NO_REFERER = None +WINHTTP_DEFAULT_ACCEPT_TYPES = None +WINHTTP_NO_ADDITIONAL_HEADERS = None +WINHTTP_NO_REQUEST_DATA = None diff --git a/venv/Lib/site-packages/win32/lib/win32netcon.py b/venv/Lib/site-packages/win32/lib/win32netcon.py new file mode 100644 index 00000000..73795969 --- /dev/null +++ b/venv/Lib/site-packages/win32/lib/win32netcon.py @@ -0,0 +1,655 @@ +# Generated by h2py from lmaccess.h + +# Included from lmcons.h +CNLEN = 15 +LM20_CNLEN = 15 +DNLEN = CNLEN +LM20_DNLEN = LM20_CNLEN +UNCLEN = (CNLEN+2) +LM20_UNCLEN = (LM20_CNLEN+2) +NNLEN = 80 +LM20_NNLEN = 12 +RMLEN = (UNCLEN+1+NNLEN) +LM20_RMLEN = (LM20_UNCLEN+1+LM20_NNLEN) +SNLEN = 80 +LM20_SNLEN = 15 +STXTLEN = 256 +LM20_STXTLEN = 63 +PATHLEN = 256 +LM20_PATHLEN = 256 +DEVLEN = 80 +LM20_DEVLEN = 8 +EVLEN = 16 +UNLEN = 256 +LM20_UNLEN = 20 +GNLEN = UNLEN +LM20_GNLEN = LM20_UNLEN +PWLEN = 256 +LM20_PWLEN = 14 +SHPWLEN = 8 +CLTYPE_LEN = 12 +MAXCOMMENTSZ = 256 +LM20_MAXCOMMENTSZ = 48 +QNLEN = NNLEN +LM20_QNLEN = LM20_NNLEN +ALERTSZ = 128 +NETBIOS_NAME_LEN = 16 +CRYPT_KEY_LEN = 7 +CRYPT_TXT_LEN = 8 +ENCRYPTED_PWLEN = 16 +SESSION_PWLEN = 24 +SESSION_CRYPT_KLEN = 21 +PARMNUM_ALL = 0 +PARM_ERROR_NONE = 0 +PARMNUM_BASE_INFOLEVEL = 1000 +NULL = 0 +PLATFORM_ID_DOS = 300 +PLATFORM_ID_OS2 = 400 +PLATFORM_ID_NT = 500 +PLATFORM_ID_OSF = 600 +PLATFORM_ID_VMS = 700 +MAX_LANMAN_MESSAGE_ID = 5799 +UF_SCRIPT = 1 +UF_ACCOUNTDISABLE = 2 +UF_HOMEDIR_REQUIRED = 8 +UF_LOCKOUT = 16 +UF_PASSWD_NOTREQD = 32 +UF_PASSWD_CANT_CHANGE = 64 +UF_TEMP_DUPLICATE_ACCOUNT = 256 +UF_NORMAL_ACCOUNT = 512 +UF_INTERDOMAIN_TRUST_ACCOUNT = 2048 +UF_WORKSTATION_TRUST_ACCOUNT = 4096 +UF_SERVER_TRUST_ACCOUNT = 8192 +UF_MACHINE_ACCOUNT_MASK = ( UF_INTERDOMAIN_TRUST_ACCOUNT | \ + UF_WORKSTATION_TRUST_ACCOUNT | \ + UF_SERVER_TRUST_ACCOUNT ) +UF_ACCOUNT_TYPE_MASK = ( \ + UF_TEMP_DUPLICATE_ACCOUNT | \ + UF_NORMAL_ACCOUNT | \ + UF_INTERDOMAIN_TRUST_ACCOUNT | \ + UF_WORKSTATION_TRUST_ACCOUNT | \ + UF_SERVER_TRUST_ACCOUNT \ + ) +UF_DONT_EXPIRE_PASSWD = 65536 +UF_MNS_LOGON_ACCOUNT = 131072 +UF_SETTABLE_BITS = ( \ + UF_SCRIPT | \ + UF_ACCOUNTDISABLE | \ + UF_LOCKOUT | \ + UF_HOMEDIR_REQUIRED | \ + UF_PASSWD_NOTREQD | \ + UF_PASSWD_CANT_CHANGE | \ + UF_ACCOUNT_TYPE_MASK | \ + UF_DONT_EXPIRE_PASSWD | \ + UF_MNS_LOGON_ACCOUNT \ + ) +FILTER_TEMP_DUPLICATE_ACCOUNT = (1) +FILTER_NORMAL_ACCOUNT = (2) +FILTER_INTERDOMAIN_TRUST_ACCOUNT = (8) +FILTER_WORKSTATION_TRUST_ACCOUNT = (16) +FILTER_SERVER_TRUST_ACCOUNT = (32) +LG_INCLUDE_INDIRECT = (1) +AF_OP_PRINT = 1 +AF_OP_COMM = 2 +AF_OP_SERVER = 4 +AF_OP_ACCOUNTS = 8 +AF_SETTABLE_BITS = (AF_OP_PRINT | AF_OP_COMM | \ + AF_OP_SERVER | AF_OP_ACCOUNTS) +UAS_ROLE_STANDALONE = 0 +UAS_ROLE_MEMBER = 1 +UAS_ROLE_BACKUP = 2 +UAS_ROLE_PRIMARY = 3 +USER_NAME_PARMNUM = 1 +USER_PASSWORD_PARMNUM = 3 +USER_PASSWORD_AGE_PARMNUM = 4 +USER_PRIV_PARMNUM = 5 +USER_HOME_DIR_PARMNUM = 6 +USER_COMMENT_PARMNUM = 7 +USER_FLAGS_PARMNUM = 8 +USER_SCRIPT_PATH_PARMNUM = 9 +USER_AUTH_FLAGS_PARMNUM = 10 +USER_FULL_NAME_PARMNUM = 11 +USER_USR_COMMENT_PARMNUM = 12 +USER_PARMS_PARMNUM = 13 +USER_WORKSTATIONS_PARMNUM = 14 +USER_LAST_LOGON_PARMNUM = 15 +USER_LAST_LOGOFF_PARMNUM = 16 +USER_ACCT_EXPIRES_PARMNUM = 17 +USER_MAX_STORAGE_PARMNUM = 18 +USER_UNITS_PER_WEEK_PARMNUM = 19 +USER_LOGON_HOURS_PARMNUM = 20 +USER_PAD_PW_COUNT_PARMNUM = 21 +USER_NUM_LOGONS_PARMNUM = 22 +USER_LOGON_SERVER_PARMNUM = 23 +USER_COUNTRY_CODE_PARMNUM = 24 +USER_CODE_PAGE_PARMNUM = 25 +USER_PRIMARY_GROUP_PARMNUM = 51 +USER_PROFILE = 52 +USER_PROFILE_PARMNUM = 52 +USER_HOME_DIR_DRIVE_PARMNUM = 53 +USER_NAME_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + USER_NAME_PARMNUM) +USER_PASSWORD_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + USER_PASSWORD_PARMNUM) +USER_PASSWORD_AGE_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + USER_PASSWORD_AGE_PARMNUM) +USER_PRIV_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + USER_PRIV_PARMNUM) +USER_HOME_DIR_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + USER_HOME_DIR_PARMNUM) +USER_COMMENT_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + USER_COMMENT_PARMNUM) +USER_FLAGS_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + USER_FLAGS_PARMNUM) +USER_SCRIPT_PATH_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + USER_SCRIPT_PATH_PARMNUM) +USER_AUTH_FLAGS_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + USER_AUTH_FLAGS_PARMNUM) +USER_FULL_NAME_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + USER_FULL_NAME_PARMNUM) +USER_USR_COMMENT_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + USER_USR_COMMENT_PARMNUM) +USER_PARMS_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + USER_PARMS_PARMNUM) +USER_WORKSTATIONS_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + USER_WORKSTATIONS_PARMNUM) +USER_LAST_LOGON_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + USER_LAST_LOGON_PARMNUM) +USER_LAST_LOGOFF_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + USER_LAST_LOGOFF_PARMNUM) +USER_ACCT_EXPIRES_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + USER_ACCT_EXPIRES_PARMNUM) +USER_MAX_STORAGE_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + USER_MAX_STORAGE_PARMNUM) +USER_UNITS_PER_WEEK_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + USER_UNITS_PER_WEEK_PARMNUM) +USER_LOGON_HOURS_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + USER_LOGON_HOURS_PARMNUM) +USER_PAD_PW_COUNT_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + USER_PAD_PW_COUNT_PARMNUM) +USER_NUM_LOGONS_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + USER_NUM_LOGONS_PARMNUM) +USER_LOGON_SERVER_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + USER_LOGON_SERVER_PARMNUM) +USER_COUNTRY_CODE_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + USER_COUNTRY_CODE_PARMNUM) +USER_CODE_PAGE_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + USER_CODE_PAGE_PARMNUM) +USER_PRIMARY_GROUP_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + USER_PRIMARY_GROUP_PARMNUM) +USER_HOME_DIR_DRIVE_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + USER_HOME_DIR_DRIVE_PARMNUM) +NULL_USERSETINFO_PASSWD = " " +UNITS_PER_DAY = 24 +UNITS_PER_WEEK = UNITS_PER_DAY * 7 +USER_PRIV_MASK = 3 +USER_PRIV_GUEST = 0 +USER_PRIV_USER = 1 +USER_PRIV_ADMIN = 2 +MAX_PASSWD_LEN = PWLEN +DEF_MIN_PWLEN = 6 +DEF_PWUNIQUENESS = 5 +DEF_MAX_PWHIST = 8 +DEF_MAX_BADPW = 0 +VALIDATED_LOGON = 0 +PASSWORD_EXPIRED = 2 +NON_VALIDATED_LOGON = 3 +VALID_LOGOFF = 1 +MODALS_MIN_PASSWD_LEN_PARMNUM = 1 +MODALS_MAX_PASSWD_AGE_PARMNUM = 2 +MODALS_MIN_PASSWD_AGE_PARMNUM = 3 +MODALS_FORCE_LOGOFF_PARMNUM = 4 +MODALS_PASSWD_HIST_LEN_PARMNUM = 5 +MODALS_ROLE_PARMNUM = 6 +MODALS_PRIMARY_PARMNUM = 7 +MODALS_DOMAIN_NAME_PARMNUM = 8 +MODALS_DOMAIN_ID_PARMNUM = 9 +MODALS_LOCKOUT_DURATION_PARMNUM = 10 +MODALS_LOCKOUT_OBSERVATION_WINDOW_PARMNUM = 11 +MODALS_LOCKOUT_THRESHOLD_PARMNUM = 12 +MODALS_MIN_PASSWD_LEN_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + MODALS_MIN_PASSWD_LEN_PARMNUM) +MODALS_MAX_PASSWD_AGE_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + MODALS_MAX_PASSWD_AGE_PARMNUM) +MODALS_MIN_PASSWD_AGE_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + MODALS_MIN_PASSWD_AGE_PARMNUM) +MODALS_FORCE_LOGOFF_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + MODALS_FORCE_LOGOFF_PARMNUM) +MODALS_PASSWD_HIST_LEN_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + MODALS_PASSWD_HIST_LEN_PARMNUM) +MODALS_ROLE_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + MODALS_ROLE_PARMNUM) +MODALS_PRIMARY_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + MODALS_PRIMARY_PARMNUM) +MODALS_DOMAIN_NAME_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + MODALS_DOMAIN_NAME_PARMNUM) +MODALS_DOMAIN_ID_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + MODALS_DOMAIN_ID_PARMNUM) +GROUPIDMASK = 32768 +GROUP_ALL_PARMNUM = 0 +GROUP_NAME_PARMNUM = 1 +GROUP_COMMENT_PARMNUM = 2 +GROUP_ATTRIBUTES_PARMNUM = 3 +GROUP_ALL_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + GROUP_ALL_PARMNUM) +GROUP_NAME_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + GROUP_NAME_PARMNUM) +GROUP_COMMENT_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + GROUP_COMMENT_PARMNUM) +GROUP_ATTRIBUTES_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + GROUP_ATTRIBUTES_PARMNUM) +LOCALGROUP_NAME_PARMNUM = 1 +LOCALGROUP_COMMENT_PARMNUM = 2 +MAXPERMENTRIES = 64 +ACCESS_NONE = 0 +ACCESS_READ = 1 +ACCESS_WRITE = 2 +ACCESS_CREATE = 4 +ACCESS_EXEC = 8 +ACCESS_DELETE = 16 +ACCESS_ATRIB = 32 +ACCESS_PERM = 64 +ACCESS_GROUP = 32768 +ACCESS_AUDIT = 1 +ACCESS_SUCCESS_OPEN = 16 +ACCESS_SUCCESS_WRITE = 32 +ACCESS_SUCCESS_DELETE = 64 +ACCESS_SUCCESS_ACL = 128 +ACCESS_SUCCESS_MASK = 240 +ACCESS_FAIL_OPEN = 256 +ACCESS_FAIL_WRITE = 512 +ACCESS_FAIL_DELETE = 1024 +ACCESS_FAIL_ACL = 2048 +ACCESS_FAIL_MASK = 3840 +ACCESS_FAIL_SHIFT = 4 +ACCESS_RESOURCE_NAME_PARMNUM = 1 +ACCESS_ATTR_PARMNUM = 2 +ACCESS_COUNT_PARMNUM = 3 +ACCESS_ACCESS_LIST_PARMNUM = 4 +ACCESS_RESOURCE_NAME_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + ACCESS_RESOURCE_NAME_PARMNUM) +ACCESS_ATTR_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + ACCESS_ATTR_PARMNUM) +ACCESS_COUNT_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + ACCESS_COUNT_PARMNUM) +ACCESS_ACCESS_LIST_INFOLEVEL = \ + (PARMNUM_BASE_INFOLEVEL + ACCESS_ACCESS_LIST_PARMNUM) +ACCESS_LETTERS = "RWCXDAP " +NETLOGON_CONTROL_QUERY = 1 +NETLOGON_CONTROL_REPLICATE = 2 +NETLOGON_CONTROL_SYNCHRONIZE = 3 +NETLOGON_CONTROL_PDC_REPLICATE = 4 +NETLOGON_CONTROL_REDISCOVER = 5 +NETLOGON_CONTROL_TC_QUERY = 6 +NETLOGON_CONTROL_TRANSPORT_NOTIFY = 7 +NETLOGON_CONTROL_FIND_USER = 8 +NETLOGON_CONTROL_UNLOAD_NETLOGON_DLL = 65531 +NETLOGON_CONTROL_BACKUP_CHANGE_LOG = 65532 +NETLOGON_CONTROL_TRUNCATE_LOG = 65533 +NETLOGON_CONTROL_SET_DBFLAG = 65534 +NETLOGON_CONTROL_BREAKPOINT = 65535 +NETLOGON_REPLICATION_NEEDED = 1 +NETLOGON_REPLICATION_IN_PROGRESS = 2 +NETLOGON_FULL_SYNC_REPLICATION = 4 +NETLOGON_REDO_NEEDED = 8 + +###################### +# Manual stuff + +TEXT=lambda x:x + +MAX_PREFERRED_LENGTH = -1 +PARM_ERROR_UNKNOWN = -1 +MESSAGE_FILENAME = TEXT("NETMSG") +OS2MSG_FILENAME = TEXT("BASE") +HELP_MSG_FILENAME = TEXT("NETH") +BACKUP_MSG_FILENAME = TEXT("BAK.MSG") +TIMEQ_FOREVER = -1 +USER_MAXSTORAGE_UNLIMITED = -1 +USER_NO_LOGOFF = -1 +DEF_MAX_PWAGE = TIMEQ_FOREVER +DEF_MIN_PWAGE = 0 +DEF_FORCE_LOGOFF = -1 +ONE_DAY = 1*24*3600 +GROUP_SPECIALGRP_USERS = "USERS" +GROUP_SPECIALGRP_ADMINS = "ADMINS" +GROUP_SPECIALGRP_GUESTS = "GUESTS" +GROUP_SPECIALGRP_LOCAL = "LOCAL" +ACCESS_ALL = ( ACCESS_READ | ACCESS_WRITE | ACCESS_CREATE | ACCESS_EXEC | ACCESS_DELETE | ACCESS_ATRIB | ACCESS_PERM ) + +# From lmserver.h +SV_PLATFORM_ID_OS2 = 400 +SV_PLATFORM_ID_NT = 500 +MAJOR_VERSION_MASK = 15 +SV_TYPE_WORKSTATION = 1 +SV_TYPE_SERVER = 2 +SV_TYPE_SQLSERVER = 4 +SV_TYPE_DOMAIN_CTRL = 8 +SV_TYPE_DOMAIN_BAKCTRL = 16 +SV_TYPE_TIME_SOURCE = 32 +SV_TYPE_AFP = 64 +SV_TYPE_NOVELL = 128 +SV_TYPE_DOMAIN_MEMBER = 256 +SV_TYPE_PRINTQ_SERVER = 512 +SV_TYPE_DIALIN_SERVER = 1024 +SV_TYPE_XENIX_SERVER = 2048 +SV_TYPE_SERVER_UNIX = SV_TYPE_XENIX_SERVER +SV_TYPE_NT = 4096 +SV_TYPE_WFW = 8192 +SV_TYPE_SERVER_MFPN = 16384 +SV_TYPE_SERVER_NT = 32768 +SV_TYPE_POTENTIAL_BROWSER = 65536 +SV_TYPE_BACKUP_BROWSER = 131072 +SV_TYPE_MASTER_BROWSER = 262144 +SV_TYPE_DOMAIN_MASTER = 524288 +SV_TYPE_SERVER_OSF = 1048576 +SV_TYPE_SERVER_VMS = 2097152 +SV_TYPE_WINDOWS = 4194304 +SV_TYPE_DFS = 8388608 +SV_TYPE_CLUSTER_NT = 16777216 +SV_TYPE_DCE = 268435456 +SV_TYPE_ALTERNATE_XPORT = 536870912 +SV_TYPE_LOCAL_LIST_ONLY = 1073741824 +SV_TYPE_DOMAIN_ENUM = -2147483648 +SV_TYPE_ALL = -1 +SV_NODISC = -1 +SV_USERSECURITY = 1 +SV_SHARESECURITY = 0 +SV_HIDDEN = 1 +SV_VISIBLE = 0 +SV_PLATFORM_ID_PARMNUM = 101 +SV_NAME_PARMNUM = 102 +SV_VERSION_MAJOR_PARMNUM = 103 +SV_VERSION_MINOR_PARMNUM = 104 +SV_TYPE_PARMNUM = 105 +SV_COMMENT_PARMNUM = 5 +SV_USERS_PARMNUM = 107 +SV_DISC_PARMNUM = 10 +SV_HIDDEN_PARMNUM = 16 +SV_ANNOUNCE_PARMNUM = 17 +SV_ANNDELTA_PARMNUM = 18 +SV_USERPATH_PARMNUM = 112 +SV_ULIST_MTIME_PARMNUM = 401 +SV_GLIST_MTIME_PARMNUM = 402 +SV_ALIST_MTIME_PARMNUM = 403 +SV_ALERTS_PARMNUM = 11 +SV_SECURITY_PARMNUM = 405 +SV_NUMADMIN_PARMNUM = 406 +SV_LANMASK_PARMNUM = 407 +SV_GUESTACC_PARMNUM = 408 +SV_CHDEVQ_PARMNUM = 410 +SV_CHDEVJOBS_PARMNUM = 411 +SV_CONNECTIONS_PARMNUM = 412 +SV_SHARES_PARMNUM = 413 +SV_OPENFILES_PARMNUM = 414 +SV_SESSREQS_PARMNUM = 417 +SV_ACTIVELOCKS_PARMNUM = 419 +SV_NUMREQBUF_PARMNUM = 420 +SV_NUMBIGBUF_PARMNUM = 422 +SV_NUMFILETASKS_PARMNUM = 423 +SV_ALERTSCHED_PARMNUM = 37 +SV_ERRORALERT_PARMNUM = 38 +SV_LOGONALERT_PARMNUM = 39 +SV_ACCESSALERT_PARMNUM = 40 +SV_DISKALERT_PARMNUM = 41 +SV_NETIOALERT_PARMNUM = 42 +SV_MAXAUDITSZ_PARMNUM = 43 +SV_SRVHEURISTICS_PARMNUM = 431 +SV_SESSOPENS_PARMNUM = 501 +SV_SESSVCS_PARMNUM = 502 +SV_OPENSEARCH_PARMNUM = 503 +SV_SIZREQBUF_PARMNUM = 504 +SV_INITWORKITEMS_PARMNUM = 505 +SV_MAXWORKITEMS_PARMNUM = 506 +SV_RAWWORKITEMS_PARMNUM = 507 +SV_IRPSTACKSIZE_PARMNUM = 508 +SV_MAXRAWBUFLEN_PARMNUM = 509 +SV_SESSUSERS_PARMNUM = 510 +SV_SESSCONNS_PARMNUM = 511 +SV_MAXNONPAGEDMEMORYUSAGE_PARMNUM = 512 +SV_MAXPAGEDMEMORYUSAGE_PARMNUM = 513 +SV_ENABLESOFTCOMPAT_PARMNUM = 514 +SV_ENABLEFORCEDLOGOFF_PARMNUM = 515 +SV_TIMESOURCE_PARMNUM = 516 +SV_ACCEPTDOWNLEVELAPIS_PARMNUM = 517 +SV_LMANNOUNCE_PARMNUM = 518 +SV_DOMAIN_PARMNUM = 519 +SV_MAXCOPYREADLEN_PARMNUM = 520 +SV_MAXCOPYWRITELEN_PARMNUM = 521 +SV_MINKEEPSEARCH_PARMNUM = 522 +SV_MAXKEEPSEARCH_PARMNUM = 523 +SV_MINKEEPCOMPLSEARCH_PARMNUM = 524 +SV_MAXKEEPCOMPLSEARCH_PARMNUM = 525 +SV_THREADCOUNTADD_PARMNUM = 526 +SV_NUMBLOCKTHREADS_PARMNUM = 527 +SV_SCAVTIMEOUT_PARMNUM = 528 +SV_MINRCVQUEUE_PARMNUM = 529 +SV_MINFREEWORKITEMS_PARMNUM = 530 +SV_XACTMEMSIZE_PARMNUM = 531 +SV_THREADPRIORITY_PARMNUM = 532 +SV_MAXMPXCT_PARMNUM = 533 +SV_OPLOCKBREAKWAIT_PARMNUM = 534 +SV_OPLOCKBREAKRESPONSEWAIT_PARMNUM = 535 +SV_ENABLEOPLOCKS_PARMNUM = 536 +SV_ENABLEOPLOCKFORCECLOSE_PARMNUM = 537 +SV_ENABLEFCBOPENS_PARMNUM = 538 +SV_ENABLERAW_PARMNUM = 539 +SV_ENABLESHAREDNETDRIVES_PARMNUM = 540 +SV_MINFREECONNECTIONS_PARMNUM = 541 +SV_MAXFREECONNECTIONS_PARMNUM = 542 +SV_INITSESSTABLE_PARMNUM = 543 +SV_INITCONNTABLE_PARMNUM = 544 +SV_INITFILETABLE_PARMNUM = 545 +SV_INITSEARCHTABLE_PARMNUM = 546 +SV_ALERTSCHEDULE_PARMNUM = 547 +SV_ERRORTHRESHOLD_PARMNUM = 548 +SV_NETWORKERRORTHRESHOLD_PARMNUM = 549 +SV_DISKSPACETHRESHOLD_PARMNUM = 550 +SV_MAXLINKDELAY_PARMNUM = 552 +SV_MINLINKTHROUGHPUT_PARMNUM = 553 +SV_LINKINFOVALIDTIME_PARMNUM = 554 +SV_SCAVQOSINFOUPDATETIME_PARMNUM = 555 +SV_MAXWORKITEMIDLETIME_PARMNUM = 556 +SV_MAXRAWWORKITEMS_PARMNUM = 557 +SV_PRODUCTTYPE_PARMNUM = 560 +SV_SERVERSIZE_PARMNUM = 561 +SV_CONNECTIONLESSAUTODISC_PARMNUM = 562 +SV_SHARINGVIOLATIONRETRIES_PARMNUM = 563 +SV_SHARINGVIOLATIONDELAY_PARMNUM = 564 +SV_MAXGLOBALOPENSEARCH_PARMNUM = 565 +SV_REMOVEDUPLICATESEARCHES_PARMNUM = 566 +SV_LOCKVIOLATIONRETRIES_PARMNUM = 567 +SV_LOCKVIOLATIONOFFSET_PARMNUM = 568 +SV_LOCKVIOLATIONDELAY_PARMNUM = 569 +SV_MDLREADSWITCHOVER_PARMNUM = 570 +SV_CACHEDOPENLIMIT_PARMNUM = 571 +SV_CRITICALTHREADS_PARMNUM = 572 +SV_RESTRICTNULLSESSACCESS_PARMNUM = 573 +SV_ENABLEWFW311DIRECTIPX_PARMNUM = 574 +SV_OTHERQUEUEAFFINITY_PARMNUM = 575 +SV_QUEUESAMPLESECS_PARMNUM = 576 +SV_BALANCECOUNT_PARMNUM = 577 +SV_PREFERREDAFFINITY_PARMNUM = 578 +SV_MAXFREERFCBS_PARMNUM = 579 +SV_MAXFREEMFCBS_PARMNUM = 580 +SV_MAXFREELFCBS_PARMNUM = 581 +SV_MAXFREEPAGEDPOOLCHUNKS_PARMNUM = 582 +SV_MINPAGEDPOOLCHUNKSIZE_PARMNUM = 583 +SV_MAXPAGEDPOOLCHUNKSIZE_PARMNUM = 584 +SV_SENDSFROMPREFERREDPROCESSOR_PARMNUM = 585 +SV_MAXTHREADSPERQUEUE_PARMNUM = 586 +SV_CACHEDDIRECTORYLIMIT_PARMNUM = 587 +SV_MAXCOPYLENGTH_PARMNUM = 588 +SV_ENABLEBULKTRANSFER_PARMNUM = 589 +SV_ENABLECOMPRESSION_PARMNUM = 590 +SV_AUTOSHAREWKS_PARMNUM = 591 +SV_AUTOSHARESERVER_PARMNUM = 592 +SV_ENABLESECURITYSIGNATURE_PARMNUM = 593 +SV_REQUIRESECURITYSIGNATURE_PARMNUM = 594 +SV_MINCLIENTBUFFERSIZE_PARMNUM = 595 +SV_CONNECTIONNOSESSIONSTIMEOUT_PARMNUM = 596 +SVI1_NUM_ELEMENTS = 5 +SVI2_NUM_ELEMENTS = 40 +SVI3_NUM_ELEMENTS = 44 +SW_AUTOPROF_LOAD_MASK = 1 +SW_AUTOPROF_SAVE_MASK = 2 +SV_MAX_SRV_HEUR_LEN = 32 +SV_USERS_PER_LICENSE = 5 +SVTI2_REMAP_PIPE_NAMES = 2 + +# Generated by h2py from lmshare.h +SHARE_NETNAME_PARMNUM = 1 +SHARE_TYPE_PARMNUM = 3 +SHARE_REMARK_PARMNUM = 4 +SHARE_PERMISSIONS_PARMNUM = 5 +SHARE_MAX_USES_PARMNUM = 6 +SHARE_CURRENT_USES_PARMNUM = 7 +SHARE_PATH_PARMNUM = 8 +SHARE_PASSWD_PARMNUM = 9 +SHARE_FILE_SD_PARMNUM = 501 +SHI1_NUM_ELEMENTS = 4 +SHI2_NUM_ELEMENTS = 10 +STYPE_DISKTREE = 0 +STYPE_PRINTQ = 1 +STYPE_DEVICE = 2 +STYPE_IPC = 3 +STYPE_SPECIAL = -2147483648 +SHI1005_FLAGS_DFS = 1 +SHI1005_FLAGS_DFS_ROOT = 2 +COW_PERMACHINE = 4 +COW_PERUSER = 8 +CSC_CACHEABLE = 16 +CSC_NOFLOWOPS = 32 +CSC_AUTO_INWARD = 64 +CSC_AUTO_OUTWARD = 128 +SHI1005_VALID_FLAGS_SET = ( CSC_CACHEABLE | \ + CSC_NOFLOWOPS | \ + CSC_AUTO_INWARD | \ + CSC_AUTO_OUTWARD| \ + COW_PERMACHINE | \ + COW_PERUSER ) +SHI1007_VALID_FLAGS_SET = SHI1005_VALID_FLAGS_SET +SESS_GUEST = 1 +SESS_NOENCRYPTION = 2 +SESI1_NUM_ELEMENTS = 8 +SESI2_NUM_ELEMENTS = 9 +PERM_FILE_READ = 1 +PERM_FILE_WRITE = 2 +PERM_FILE_CREATE = 4 + +# Generated by h2py from d:\mssdk\include\winnetwk.h +WNNC_NET_MSNET = 65536 +WNNC_NET_LANMAN = 131072 +WNNC_NET_NETWARE = 196608 +WNNC_NET_VINES = 262144 +WNNC_NET_10NET = 327680 +WNNC_NET_LOCUS = 393216 +WNNC_NET_SUN_PC_NFS = 458752 +WNNC_NET_LANSTEP = 524288 +WNNC_NET_9TILES = 589824 +WNNC_NET_LANTASTIC = 655360 +WNNC_NET_AS400 = 720896 +WNNC_NET_FTP_NFS = 786432 +WNNC_NET_PATHWORKS = 851968 +WNNC_NET_LIFENET = 917504 +WNNC_NET_POWERLAN = 983040 +WNNC_NET_BWNFS = 1048576 +WNNC_NET_COGENT = 1114112 +WNNC_NET_FARALLON = 1179648 +WNNC_NET_APPLETALK = 1245184 +WNNC_NET_INTERGRAPH = 1310720 +WNNC_NET_SYMFONET = 1376256 +WNNC_NET_CLEARCASE = 1441792 +WNNC_NET_FRONTIER = 1507328 +WNNC_NET_BMC = 1572864 +WNNC_NET_DCE = 1638400 +WNNC_NET_DECORB = 2097152 +WNNC_NET_PROTSTOR = 2162688 +WNNC_NET_FJ_REDIR = 2228224 +WNNC_NET_DISTINCT = 2293760 +WNNC_NET_TWINS = 2359296 +WNNC_NET_RDR2SAMPLE = 2424832 +RESOURCE_CONNECTED = 1 +RESOURCE_GLOBALNET = 2 +RESOURCE_REMEMBERED = 3 +RESOURCE_RECENT = 4 +RESOURCE_CONTEXT = 5 +RESOURCETYPE_ANY = 0 +RESOURCETYPE_DISK = 1 +RESOURCETYPE_PRINT = 2 +RESOURCETYPE_RESERVED = 8 +RESOURCETYPE_UNKNOWN = -1 +RESOURCEUSAGE_CONNECTABLE = 1 +RESOURCEUSAGE_CONTAINER = 2 +RESOURCEUSAGE_NOLOCALDEVICE = 4 +RESOURCEUSAGE_SIBLING = 8 +RESOURCEUSAGE_ATTACHED = 16 +RESOURCEUSAGE_ALL = (RESOURCEUSAGE_CONNECTABLE | RESOURCEUSAGE_CONTAINER | RESOURCEUSAGE_ATTACHED) +RESOURCEUSAGE_RESERVED = -2147483648 +RESOURCEDISPLAYTYPE_GENERIC = 0 +RESOURCEDISPLAYTYPE_DOMAIN = 1 +RESOURCEDISPLAYTYPE_SERVER = 2 +RESOURCEDISPLAYTYPE_SHARE = 3 +RESOURCEDISPLAYTYPE_FILE = 4 +RESOURCEDISPLAYTYPE_GROUP = 5 +RESOURCEDISPLAYTYPE_NETWORK = 6 +RESOURCEDISPLAYTYPE_ROOT = 7 +RESOURCEDISPLAYTYPE_SHAREADMIN = 8 +RESOURCEDISPLAYTYPE_DIRECTORY = 9 +RESOURCEDISPLAYTYPE_TREE = 10 +RESOURCEDISPLAYTYPE_NDSCONTAINER = 11 +NETPROPERTY_PERSISTENT = 1 +CONNECT_UPDATE_PROFILE = 1 +CONNECT_UPDATE_RECENT = 2 +CONNECT_TEMPORARY = 4 +CONNECT_INTERACTIVE = 8 +CONNECT_PROMPT = 16 +CONNECT_NEED_DRIVE = 32 +CONNECT_REFCOUNT = 64 +CONNECT_REDIRECT = 128 +CONNECT_LOCALDRIVE = 256 +CONNECT_CURRENT_MEDIA = 512 +CONNECT_DEFERRED = 1024 +CONNECT_RESERVED = -16777216 +CONNDLG_RO_PATH = 1 +CONNDLG_CONN_POINT = 2 +CONNDLG_USE_MRU = 4 +CONNDLG_HIDE_BOX = 8 +CONNDLG_PERSIST = 16 +CONNDLG_NOT_PERSIST = 32 +DISC_UPDATE_PROFILE = 1 +DISC_NO_FORCE = 64 +UNIVERSAL_NAME_INFO_LEVEL = 1 +REMOTE_NAME_INFO_LEVEL = 2 +WNFMT_MULTILINE = 1 +WNFMT_ABBREVIATED = 2 +WNFMT_INENUM = 16 +WNFMT_CONNECTION = 32 +NETINFO_DLL16 = 1 +NETINFO_DISKRED = 4 +NETINFO_PRINTERRED = 8 +RP_LOGON = 1 +RP_INIFILE = 2 +PP_DISPLAYERRORS = 1 +WNCON_FORNETCARD = 1 +WNCON_NOTROUTED = 2 +WNCON_SLOWLINK = 4 +WNCON_DYNAMIC = 8 + +## NETSETUP_NAME_TYPE, used with NetValidateName +NetSetupUnknown = 0 +NetSetupMachine = 1 +NetSetupWorkgroup = 2 +NetSetupDomain = 3 +NetSetupNonExistentDomain = 4 +NetSetupDnsMachine = 5 + +## NETSETUP_JOIN_STATUS, use with NetGetJoinInformation +NetSetupUnknownStatus = 0 +NetSetupUnjoined = 1 +NetSetupWorkgroupName = 2 +NetSetupDomainName = 3 + +NetValidateAuthentication = 1 +NetValidatePasswordChange = 2 +NetValidatePasswordReset = 3 diff --git a/venv/Lib/site-packages/win32/lib/win32pdhquery.py b/venv/Lib/site-packages/win32/lib/win32pdhquery.py new file mode 100644 index 00000000..eadcf0ac --- /dev/null +++ b/venv/Lib/site-packages/win32/lib/win32pdhquery.py @@ -0,0 +1,515 @@ +''' +Performance Data Helper (PDH) Query Classes + +Wrapper classes for end-users and high-level access to the PDH query +mechanisms. PDH is a win32-specific mechanism for accessing the +performance data made available by the system. The Python for Windows +PDH module does not implement the "Registry" interface, implementing +the more straightforward Query-based mechanism. + +The basic idea of a PDH Query is an object which can query the system +about the status of any number of "counters." The counters are paths +to a particular piece of performance data. For instance, the path +'\\Memory\\Available Bytes' describes just about exactly what it says +it does, the amount of free memory on the default computer expressed +in Bytes. These paths can be considerably more complex than this, +but part of the point of this wrapper module is to hide that +complexity from the end-user/programmer. + +EXAMPLE: A more complex Path + '\\\\RAISTLIN\\PhysicalDisk(_Total)\\Avg. Disk Bytes/Read' + Raistlin --> Computer Name + PhysicalDisk --> Object Name + _Total --> The particular Instance (in this case, all instances, i.e. all drives) + Avg. Disk Bytes/Read --> The piece of data being monitored. + +EXAMPLE: Collecting Data with a Query + As an example, the following code implements a logger which allows the + user to choose what counters they would like to log, and logs those + counters for 30 seconds, at two-second intervals. + + query = Query() + query.addcounterbybrowsing() + query.collectdatafor(30,2) + + The data is now stored in a list of lists as: + query.curresults + + The counters(paths) which were used to collect the data are: + query.curpaths + + You can use the win32pdh.ParseCounterPath(path) utility function + to turn the paths into more easily read values for your task, or + write the data to a file, or do whatever you want with it. + +OTHER NOTABLE METHODS: + query.collectdatawhile(period) # start a logging thread for collecting data + query.collectdatawhile_stop() # signal the logging thread to stop logging + query.collectdata() # run the query only once + query.addperfcounter(object, counter, machine=None) # add a standard performance counter + query.addinstcounter(object, counter,machine=None,objtype = 'Process',volatile=1,format = win32pdh.PDH_FMT_LONG) # add a possibly volatile counter + +### Known bugs and limitations ### +Due to a problem with threading under the PythonWin interpreter, there +will be no data logged if the PythonWin window is not the foreground +application. Workaround: scripts using threading should be run in the +python.exe interpreter. + +The volatile-counter handlers are possibly buggy, they haven't been +tested to any extent. The wrapper Query makes it safe to pass invalid +paths (a -1 will be returned, or the Query will be totally ignored, +depending on the missing element), so you should be able to work around +the error by including all possible paths and filtering out the -1's. + +There is no way I know of to stop a thread which is currently sleeping, +so you have to wait until the thread in collectdatawhile is activated +again. This might become a problem in situations where the collection +period is multiple minutes (or hours, or whatever). + +Should make the win32pdh.ParseCounter function available to the Query +classes as a method or something similar, so that it can be accessed +by programmes that have just picked up an instance from somewhere. + +Should explicitly mention where QueryErrors can be raised, and create a +full test set to see if there are any uncaught win32api.error's still +hanging around. + +When using the python.exe interpreter, the addcounterbybrowsing- +generated browser window is often hidden behind other windows. No known +workaround other than Alt-tabing to reach the browser window. + +### Other References ### +The win32pdhutil module (which should be in the %pythonroot%/win32/lib +directory) provides quick-and-dirty utilities for one-off access to +variables from the PDH. Almost everything in that module can be done +with a Query object, but it provides task-oriented functions for a +number of common one-off tasks. + +If you can access the MS Developers Network Library, you can find +information about the PDH API as MS describes it. For a background article, +try: +http://msdn.microsoft.com/library/en-us/dnperfmo/html/msdn_pdhlib.asp + +The reference guide for the PDH API was last spotted at: +http://msdn.microsoft.com/library/en-us/perfmon/base/using_the_pdh_interface.asp + + +In general the Python version of the API is just a wrapper around the +Query-based version of this API (as far as I can see), so you can learn what +you need to from there. From what I understand, the MSDN Online +resources are available for the price of signing up for them. I can't +guarantee how long that's supposed to last. (Or anything for that +matter). +http://premium.microsoft.com/isapi/devonly/prodinfo/msdnprod/msdnlib.idc?theURL=/msdn/library/sdkdoc/perfdata_4982.htm + +The eventual plan is for my (Mike Fletcher's) Starship account to include +a section on NT Administration, and the Query is the first project +in this plan. There should be an article describing the creation of +a simple logger there, but the example above is 90% of the work of +that project, so don't sweat it if you don't find anything there. +(currently the account hasn't been set up). +http://starship.skyport.net/crew/mcfletch/ + +If you need to contact me immediately, (why I can't imagine), you can +email me at mcfletch@golden.net, or just post your question to the +Python newsgroup with a catchy subject line. +news:comp.lang.python + +### Other Stuff ### +The Query classes are by Mike Fletcher, with the working code +being corruptions of Mark Hammonds win32pdhutil module. + +Use at your own risk, no warranties, no guarantees, no assurances, +if you use it, you accept the risk of using it, etceteras. + +''' +# Feb 12, 98 - MH added "rawaddcounter" so caller can get exception details. + +import win32pdh, win32api,time, _thread,copy + +class BaseQuery: + ''' + Provides wrapped access to the Performance Data Helper query + objects, generally you should use the child class Query + unless you have need of doing weird things :) + + This class supports two major working paradigms. In the first, + you open the query, and run it as many times as you need, closing + the query when you're done with it. This is suitable for static + queries (ones where processes being monitored don't disappear). + + In the second, you allow the query to be opened each time and + closed afterward. This causes the base query object to be + destroyed after each call. Suitable for dynamic queries (ones + which watch processes which might be closed while watching.) + ''' + def __init__(self,paths=None): + ''' + The PDH Query object is initialised with a single, optional + list argument, that must be properly formatted PDH Counter + paths. Generally this list will only be provided by the class + when it is being unpickled (removed from storage). Normal + use is to call the class with no arguments and use the various + addcounter functions (particularly, for end user's, the use of + addcounterbybrowsing is the most common approach) You might + want to provide the list directly if you want to hard-code the + elements with which your query deals (and thereby avoid the + overhead of unpickling the class). + ''' + self.counters = [] + if paths: + self.paths = paths + else: + self.paths = [] + self._base = None + self.active = 0 + self.curpaths = [] + def addcounterbybrowsing(self, flags = win32pdh.PERF_DETAIL_WIZARD, windowtitle="Python Browser"): + ''' + Adds possibly multiple paths to the paths attribute of the query, + does this by calling the standard counter browsing dialogue. Within + this dialogue, find the counter you want to log, and click: Add, + repeat for every path you want to log, then click on close. The + paths are appended to the non-volatile paths list for this class, + subclasses may create a function which parses the paths and decides + (via heuristics) whether to add the path to the volatile or non-volatile + path list. + e.g.: + query.addcounter() + ''' + win32pdh.BrowseCounters(None,0, self.paths.append, flags, windowtitle) + def rawaddcounter(self,object, counter, instance = None, inum=-1, machine=None): + ''' + Adds a single counter path, without catching any exceptions. + + See addcounter for details. + ''' + path = win32pdh.MakeCounterPath( (machine,object,instance, None, inum,counter) ) + self.paths.append(path) + + def addcounter(self,object, counter, instance = None, inum=-1, machine=None): + ''' + Adds a single counter path to the paths attribute. Normally + this will be called by a child class' speciality functions, + rather than being called directly by the user. (Though it isn't + hard to call manually, since almost everything is given a default) + This method is only functional when the query is closed (or hasn't + yet been opened). This is to prevent conflict in multi-threaded + query applications). + e.g.: + query.addcounter('Memory','Available Bytes') + ''' + if not self.active: + try: + self.rawaddcounter(object, counter, instance, inum, machine) + return 0 + except win32api.error: + return -1 + else: + return -1 + + def open(self): + ''' + Build the base query object for this wrapper, + then add all of the counters required for the query. + Raise a QueryError if we can't complete the functions. + If we are already open, then do nothing. + ''' + if not self.active: # to prevent having multiple open queries + # curpaths are made accessible here because of the possibility of volatile paths + # which may be dynamically altered by subclasses. + self.curpaths = copy.copy(self.paths) + try: + base = win32pdh.OpenQuery() + for path in self.paths: + try: + self.counters.append(win32pdh.AddCounter(base, path)) + except win32api.error: # we passed a bad path + self.counters.append(0) + pass + self._base = base + self.active = 1 + return 0 # open succeeded + except: # if we encounter any errors, kill the Query + try: + self.killbase(base) + except NameError: # failed in creating query + pass + self.active = 0 + self.curpaths = [] + raise QueryError(self) + return 1 # already open + + def killbase(self,base=None): + ''' + ### This is not a public method + Mission critical function to kill the win32pdh objects held + by this object. User's should generally use the close method + instead of this method, in case a sub-class has overridden + close to provide some special functionality. + ''' + # Kill Pythonic references to the objects in this object's namespace + self._base = None + counters = self.counters + self.counters = [] + # we don't kill the curpaths for convenience, this allows the + # user to close a query and still access the last paths + self.active = 0 + # Now call the delete functions on all of the objects + try: + map(win32pdh.RemoveCounter,counters) + except: + pass + try: + win32pdh.CloseQuery(base) + except: + pass + del(counters) + del(base) + def close(self): + ''' + Makes certain that the underlying query object has been closed, + and that all counters have been removed from it. This is + important for reference counting. + You should only need to call close if you have previously called + open. The collectdata methods all can handle opening and + closing the query. Calling close multiple times is acceptable. + ''' + try: + self.killbase(self._base) + except AttributeError: + self.killbase() + __del__ = close + def collectdata(self,format = win32pdh.PDH_FMT_LONG): + ''' + Returns the formatted current values for the Query + ''' + if self._base: # we are currently open, don't change this + return self.collectdataslave(format) + else: # need to open and then close the _base, should be used by one-offs and elements tracking application instances + self.open() # will raise QueryError if couldn't open the query + temp = self.collectdataslave(format) + self.close() # will always close + return temp + def collectdataslave(self,format = win32pdh.PDH_FMT_LONG): + ''' + ### Not a public method + Called only when the Query is known to be open, runs over + the whole set of counters, appending results to the temp, + returns the values as a list. + ''' + try: + win32pdh.CollectQueryData(self._base) + temp = [] + for counter in self.counters: + ok = 0 + try: + if counter: + temp.append(win32pdh.GetFormattedCounterValue(counter, format)[1]) + ok = 1 + except win32api.error: + pass + if not ok: + temp.append(-1) # a better way to signal failure??? + return temp + except win32api.error: # will happen if, for instance, no counters are part of the query and we attempt to collect data for it. + return [-1] * len(self.counters) + # pickle functions + def __getinitargs__(self): + ''' + ### Not a public method + ''' + return (self.paths,) + +class Query(BaseQuery): + ''' + Performance Data Helper(PDH) Query object: + + Provides a wrapper around the native PDH query object which + allows for query reuse, query storage, and general maintenance + functions (adding counter paths in various ways being the most + obvious ones). + ''' + def __init__(self,*args,**namedargs): + ''' + The PDH Query object is initialised with a single, optional + list argument, that must be properly formatted PDH Counter + paths. Generally this list will only be provided by the class + when it is being unpickled (removed from storage). Normal + use is to call the class with no arguments and use the various + addcounter functions (particularly, for end user's, the use of + addcounterbybrowsing is the most common approach) You might + want to provide the list directly if you want to hard-code the + elements with which your query deals (and thereby avoid the + overhead of unpickling the class). + ''' + self.volatilecounters = [] + BaseQuery.__init__(*(self,)+args, **namedargs) + def addperfcounter(self, object, counter, machine=None): + ''' + A "Performance Counter" is a stable, known, common counter, + such as Memory, or Processor. The use of addperfcounter by + end-users is deprecated, since the use of + addcounterbybrowsing is considerably more flexible and general. + It is provided here to allow the easy development of scripts + which need to access variables so common we know them by name + (such as Memory|Available Bytes), and to provide symmetry with + the add inst counter method. + usage: + query.addperfcounter('Memory', 'Available Bytes') + It is just as easy to access addcounter directly, the following + has an identicle effect. + query.addcounter('Memory', 'Available Bytes') + ''' + BaseQuery.addcounter(self, object=object, counter=counter, machine=machine) + def addinstcounter(self, object, counter,machine=None,objtype = 'Process',volatile=1,format = win32pdh.PDH_FMT_LONG): + ''' + The purpose of using an instcounter is to track particular + instances of a counter object (e.g. a single processor, a single + running copy of a process). For instance, to track all python.exe + instances, you would need merely to ask: + query.addinstcounter('python','Virtual Bytes') + You can find the names of the objects and their available counters + by doing an addcounterbybrowsing() call on a query object (or by + looking in performance monitor's add dialog.) + + Beyond merely rearranging the call arguments to make more sense, + if the volatile flag is true, the instcounters also recalculate + the paths of the available instances on every call to open the + query. + ''' + if volatile: + self.volatilecounters.append((object,counter,machine,objtype,format)) + else: + self.paths[len(self.paths):] = self.getinstpaths(object,counter,machine,objtype,format) + + def getinstpaths(self,object,counter,machine=None,objtype='Process',format = win32pdh.PDH_FMT_LONG): + ''' + ### Not an end-user function + Calculate the paths for an instance object. Should alter + to allow processing for lists of object-counter pairs. + ''' + items, instances = win32pdh.EnumObjectItems(None,None,objtype, -1) + # find out how many instances of this element we have... + instances.sort() + try: + cur = instances.index(object) + except ValueError: + return [] # no instances of this object + temp = [object] + try: + while instances[cur+1] == object: + temp.append(object) + cur = cur+1 + except IndexError: # if we went over the end + pass + paths = [] + for ind in range(len(temp)): + # can this raise an error? + paths.append(win32pdh.MakeCounterPath( (machine,'Process',object,None,ind,counter) ) ) + return paths # should also return the number of elements for naming purposes + + def open(self,*args,**namedargs): + ''' + Explicitly open a query: + When you are needing to make multiple calls to the same query, + it is most efficient to open the query, run all of the calls, + then close the query, instead of having the collectdata method + automatically open and close the query each time it runs. + There are currently no arguments to open. + ''' + # do all the normal opening stuff, self._base is now the query object + BaseQuery.open(*(self,)+args, **namedargs) + # should rewrite getinstpaths to take a single tuple + paths = [] + for tup in self.volatilecounters: + paths[len(paths):] = self.getinstpaths(*tup) + for path in paths: + try: + self.counters.append(win32pdh.AddCounter(self._base, path)) + self.curpaths.append(path) # if we fail on the line above, this path won't be in the table or the counters + except win32api.error: + pass # again, what to do with a malformed path??? + def collectdatafor(self, totalperiod, period=1): + ''' + Non-threaded collection of performance data: + This method allows you to specify the total period for which you would + like to run the Query, and the time interval between individual + runs. The collected data is stored in query.curresults at the + _end_ of the run. The pathnames for the query are stored in + query.curpaths. + e.g.: + query.collectdatafor(30,2) + Will collect data for 30seconds at 2 second intervals + ''' + tempresults = [] + try: + self.open() + for ind in range(totalperiod/period): + tempresults.append(self.collectdata()) + time.sleep(period) + self.curresults = tempresults + finally: + self.close() + def collectdatawhile(self, period=1): + ''' + Threaded collection of performance data: + This method sets up a simple semaphor system for signalling + when you would like to start and stop a threaded data collection + method. The collection runs every period seconds until the + semaphor attribute is set to a non-true value (which normally + should be done by calling query.collectdatawhile_stop() .) + e.g.: + query.collectdatawhile(2) + # starts the query running, returns control to the caller immediately + # is collecting data every two seconds. + # do whatever you want to do while the thread runs, then call: + query.collectdatawhile_stop() + # when you want to deal with the data. It is generally a good idea + # to sleep for period seconds yourself, since the query will not copy + # the required data until the next iteration: + time.sleep(2) + # now you can access the data from the attributes of the query + query.curresults + query.curpaths + ''' + self.collectdatawhile_active = 1 + _thread.start_new_thread(self.collectdatawhile_slave,(period,)) + def collectdatawhile_stop(self): + ''' + Signals the collectdatawhile slave thread to stop collecting data + on the next logging iteration. + ''' + self.collectdatawhile_active = 0 + def collectdatawhile_slave(self, period): + ''' + ### Not a public function + Does the threaded work of collecting the data and storing it + in an attribute of the class. + ''' + tempresults = [] + try: + self.open() # also sets active, so can't be changed. + while self.collectdatawhile_active: + tempresults.append(self.collectdata()) + time.sleep(period) + self.curresults = tempresults + finally: + self.close() + + # pickle functions + def __getinitargs__(self): + return (self.paths,) + def __getstate__(self): + return self.volatilecounters + def __setstate__(self, volatilecounters): + self.volatilecounters = volatilecounters + + +class QueryError: + def __init__(self, query): + self.query = query + def __repr__(self): + return ''%repr(self.query) + __str__ = __repr__ + diff --git a/venv/Lib/site-packages/win32/lib/win32pdhutil.py b/venv/Lib/site-packages/win32/lib/win32pdhutil.py new file mode 100644 index 00000000..e3fb7aa4 --- /dev/null +++ b/venv/Lib/site-packages/win32/lib/win32pdhutil.py @@ -0,0 +1,168 @@ +"""Utilities for the win32 Performance Data Helper module + +Example: + To get a single bit of data: + >>> import win32pdhutil + >>> win32pdhutil.GetPerformanceAttributes("Memory", "Available Bytes") + 6053888 + >>> win32pdhutil.FindPerformanceAttributesByName("python", counter="Virtual Bytes") + [22278144] + + First example returns data which is not associated with any specific instance. + + The second example reads data for a specific instance - hence the list return - + it would return one result for each instance of Python running. + + In general, it can be tricky finding exactly the "name" of the data you wish to query. + Although you can use (None,None,(eg)"Memory", -1) to do this, + the easiest way is often to simply use PerfMon to find out the names. +""" + +import win32pdh, time + +error = win32pdh.error + +# Handle some localization issues. +# see http://support.microsoft.com/default.aspx?scid=http://support.microsoft.com:80/support/kb/articles/Q287/1/59.asp&NoWebContent=1 +# Build a map of english_counter_name: counter_id +counter_english_map = {} + +def find_pdh_counter_localized_name(english_name, machine_name = None): + if not counter_english_map: + import win32api, win32con + counter_reg_value = win32api.RegQueryValueEx(win32con.HKEY_PERFORMANCE_DATA, + "Counter 009") + counter_list = counter_reg_value[0] + for i in range(0, len(counter_list) - 1, 2): + try: + counter_id = int(counter_list[i]) + except ValueError: + continue + counter_english_map[counter_list[i+1].lower()] = counter_id + return win32pdh.LookupPerfNameByIndex(machine_name, counter_english_map[english_name.lower()]) + +def GetPerformanceAttributes(object, counter, instance = None, inum=-1, + format = win32pdh.PDH_FMT_LONG, machine=None): + # NOTE: Many counters require 2 samples to give accurate results, + # including "% Processor Time" (as by definition, at any instant, a + # thread's CPU usage is either 0 or 100). To read counters like this, + # you should copy this function, but keep the counter open, and call + # CollectQueryData() each time you need to know. + # See http://support.microsoft.com/default.aspx?scid=kb;EN-US;q262938 + # and http://msdn.microsoft.com/library/en-us/dnperfmo/html/perfmonpt2.asp + # My older explanation for this was that the "AddCounter" process forced + # the CPU to 100%, but the above makes more sense :) + path = win32pdh.MakeCounterPath( (machine,object,instance, None, inum,counter) ) + hq = win32pdh.OpenQuery() + try: + hc = win32pdh.AddCounter(hq, path) + try: + win32pdh.CollectQueryData(hq) + type, val = win32pdh.GetFormattedCounterValue(hc, format) + return val + finally: + win32pdh.RemoveCounter(hc) + finally: + win32pdh.CloseQuery(hq) + +def FindPerformanceAttributesByName(instanceName, object = None, + counter = None, + format = win32pdh.PDH_FMT_LONG, + machine = None, bRefresh=0): + """Find peformance attributes by (case insensitive) instance name. + + Given a process name, return a list with the requested attributes. + Most useful for returning a tuple of PIDs given a process name. + """ + if object is None: object = find_pdh_counter_localized_name("Process", machine) + if counter is None: counter = find_pdh_counter_localized_name("ID Process", machine) + if bRefresh: # PDH docs say this is how you do a refresh. + win32pdh.EnumObjects(None, machine, 0, 1) + instanceName = instanceName.lower() + items, instances = win32pdh.EnumObjectItems(None,None,object, -1) + # Track multiple instances. + instance_dict = {} + for instance in instances: + try: + instance_dict[instance] = instance_dict[instance] + 1 + except KeyError: + instance_dict[instance] = 0 + + ret = [] + for instance, max_instances in instance_dict.items(): + for inum in range(max_instances+1): + if instance.lower() == instanceName: + ret.append(GetPerformanceAttributes(object, counter, + instance, inum, format, + machine)) + return ret + +def ShowAllProcesses(): + object = find_pdh_counter_localized_name("Process") + items, instances = win32pdh.EnumObjectItems(None,None,object, + win32pdh.PERF_DETAIL_WIZARD) + # Need to track multiple instances of the same name. + instance_dict = {} + for instance in instances: + try: + instance_dict[instance] = instance_dict[instance] + 1 + except KeyError: + instance_dict[instance] = 0 + + # Bit of a hack to get useful info. + items = [find_pdh_counter_localized_name("ID Process")] + items[:5] + print("Process Name", ",".join(items)) + for instance, max_instances in instance_dict.items(): + for inum in range(max_instances+1): + hq = win32pdh.OpenQuery() + hcs = [] + for item in items: + path = win32pdh.MakeCounterPath( (None,object,instance, + None, inum, item) ) + hcs.append(win32pdh.AddCounter(hq, path)) + win32pdh.CollectQueryData(hq) + # as per http://support.microsoft.com/default.aspx?scid=kb;EN-US;q262938, some "%" based + # counters need two collections + time.sleep(0.01) + win32pdh.CollectQueryData(hq) + print("%-15s\t" % (instance[:15]), end=' ') + for hc in hcs: + type, val = win32pdh.GetFormattedCounterValue(hc, win32pdh.PDH_FMT_LONG) + print("%5d" % (val), end=' ') + win32pdh.RemoveCounter(hc) + print() + win32pdh.CloseQuery(hq) + +# NOTE: This BrowseCallback doesn't seem to work on Vista for markh. +# XXX - look at why!? +# Some counters on Vista require elevation, and callback would previously +# clear exceptions without printing them. +def BrowseCallBackDemo(counters): + ## BrowseCounters can now return multiple counter paths + for counter in counters: + machine, object, instance, parentInstance, index, counterName = \ + win32pdh.ParseCounterPath(counter) + + result = GetPerformanceAttributes(object, counterName, instance, index, + win32pdh.PDH_FMT_DOUBLE, machine) + print("Value of '%s' is" % counter, result) + print("Added '%s' on object '%s' (machine %s), instance %s(%d)-parent of %s" \ + % (counterName, object, machine, instance, index, parentInstance)) + return 0 + +def browse(callback = BrowseCallBackDemo, title="Python Browser", + level=win32pdh.PERF_DETAIL_WIZARD): + win32pdh.BrowseCounters(None,0, callback, level, title, ReturnMultiple=True) + +if __name__=='__main__': + ShowAllProcesses() + # Show how to get a couple of attributes by name. + counter = find_pdh_counter_localized_name("Virtual Bytes") + print("Virtual Bytes = ", FindPerformanceAttributesByName("python", + counter=counter)) + print("Available Bytes = ", GetPerformanceAttributes( + find_pdh_counter_localized_name("Memory"), + find_pdh_counter_localized_name("Available Bytes"))) + # And a browser. + print("Browsing for counters...") + browse() diff --git a/venv/Lib/site-packages/win32/lib/win32rcparser.py b/venv/Lib/site-packages/win32/lib/win32rcparser.py new file mode 100644 index 00000000..5e0279b7 --- /dev/null +++ b/venv/Lib/site-packages/win32/lib/win32rcparser.py @@ -0,0 +1,602 @@ +# Windows dialog .RC file parser, by Adam Walker. + +# This module was adapted from the spambayes project, and is Copyright +# 2003/2004 The Python Software Foundation and is covered by the Python +# Software Foundation license. +""" +This is a parser for Windows .rc files, which are text files which define +dialogs and other Windows UI resources. +""" +__author__="Adam Walker" +__version__="0.11" + +import sys, os, shlex, stat +import pprint +import win32con +import commctrl + +_controlMap = {"DEFPUSHBUTTON":0x80, + "PUSHBUTTON":0x80, + "Button":0x80, + "GROUPBOX":0x80, + "Static":0x82, + "CTEXT":0x82, + "RTEXT":0x82, + "LTEXT":0x82, + "LISTBOX":0x83, + "SCROLLBAR":0x84, + "COMBOBOX":0x85, + "EDITTEXT":0x81, + "ICON":0x82, + "RICHEDIT":"RichEdit20A" + } + +# These are "default styles" for certain controls - ie, Visual Studio assumes +# the styles will be applied, and emits a "NOT {STYLE_NAME}" if it is to be +# disabled. These defaults have been determined by experimentation, so may +# not be completely accurate (most notably, some styles and/or control-types +# may be missing. +_addDefaults = {"EDITTEXT":win32con.WS_BORDER | win32con.WS_TABSTOP, + "GROUPBOX":win32con.BS_GROUPBOX, + "LTEXT":win32con.SS_LEFT, + "DEFPUSHBUTTON":win32con.BS_DEFPUSHBUTTON | win32con.WS_TABSTOP, + "PUSHBUTTON": win32con.WS_TABSTOP, + "CTEXT":win32con.SS_CENTER, + "RTEXT":win32con.SS_RIGHT, + "ICON":win32con.SS_ICON, + "LISTBOX":win32con.LBS_NOTIFY, + } + +defaultControlStyle = win32con.WS_CHILD | win32con.WS_VISIBLE +defaultControlStyleEx = 0 + +class DialogDef: + name = "" + id = 0 + style = 0 + styleEx = None + caption = "" + font = "MS Sans Serif" + fontSize = 8 + x = 0 + y = 0 + w = 0 + h = 0 + template = None + def __init__(self, n, i): + self.name = n + self.id = i + self.styles = [] + self.stylesEx = [] + self.controls = [] + #print "dialog def for ",self.name, self.id + def createDialogTemplate(self): + t = None + self.template = [[self.caption, + (self.x,self.y,self.w,self.h), + self.style, self.styleEx, + (self.fontSize, self.font)] + ] + # Add the controls + for control in self.controls: + self.template.append(control.createDialogTemplate()) + return self.template + +class ControlDef: + id = "" + controlType = "" + subType = "" + idNum = 0 + style = defaultControlStyle + styleEx = defaultControlStyleEx + label = "" + x = 0 + y = 0 + w = 0 + h = 0 + def __init__(self): + self.styles = [] + self.stylesEx = [] + def toString(self): + s = "" + return s + def createDialogTemplate(self): + ct = self.controlType + if "CONTROL"==ct: + ct = self.subType + if ct in _controlMap: + ct = _controlMap[ct] + t = [ct, self.label, self.idNum, (self.x, self.y, self.w, self.h), self.style, self.styleEx] + #print t + return t + +class StringDef: + def __init__(self, id, idNum, value): + self.id = id + self.idNum = idNum + self.value = value + + def __repr__(self): + return "StringDef(%r, %r, %r)" % (self.id, self.idNum, self.value) + +class RCParser: + next_id = 1001 + dialogs = {} + _dialogs = {} + debugEnabled = False + token = "" + + def __init__(self): + self.ungot = False + self.ids = {"IDC_STATIC": -1} + self.names = {-1:"IDC_STATIC"} + self.bitmaps = {} + self.stringTable = {} + self.icons = {} + + def debug(self, *args): + if self.debugEnabled: + print(args) + + def getToken(self): + if self.ungot: + self.ungot = False + self.debug("getToken returns (ungot):", self.token) + return self.token + self.token = self.lex.get_token() + self.debug("getToken returns:", self.token) + if self.token=="": + self.token = None + return self.token + + def ungetToken(self): + self.ungot = True + + def getCheckToken(self, expected): + tok = self.getToken() + assert tok == expected, "Expected token '%s', but got token '%s'!" % (expected, tok) + return tok + + def getCommaToken(self): + return self.getCheckToken(",") + + # Return the *current* token as a number, only consuming a token + # if it is the negative-sign. + def currentNumberToken(self): + mult = 1 + if self.token=='-': + mult = -1 + self.getToken() + return int(self.token) * mult + + # Return the *current* token as a string literal (ie, self.token will be a + # quote. consumes all tokens until the end of the string + def currentQuotedString(self): + # Handle quoted strings - pity shlex doesn't handle it. + assert self.token.startswith('"'), self.token + bits = [self.token] + while 1: + tok = self.getToken() + if not tok.startswith('"'): + self.ungetToken() + break + bits.append(tok) + sval = "".join(bits)[1:-1] # Remove end quotes. + # Fixup quotes in the body, and all (some?) quoted characters back + # to their raw value. + for i, o in ('""', '"'), ("\\r", "\r"), ("\\n", "\n"), ("\\t", "\t"): + sval = sval.replace(i, o) + return sval + + def load(self, rcstream): + """ + RCParser.loadDialogs(rcFileName) -> None + Load the dialog information into the parser. Dialog Definations can then be accessed + using the "dialogs" dictionary member (name->DialogDef). The "ids" member contains the dictionary of id->name. + The "names" member contains the dictionary of name->id + """ + self.open(rcstream) + self.getToken() + while self.token!=None: + self.parse() + self.getToken() + + def open(self, rcstream): + self.lex = shlex.shlex(rcstream) + self.lex.commenters = "//#" + + def parseH(self, file): + lex = shlex.shlex(file) + lex.commenters = "//" + token = " " + while token is not None: + token = lex.get_token() + if token == "" or token is None: + token = None + else: + if token=='define': + n = lex.get_token() + i = int(lex.get_token()) + self.ids[n] = i + if i in self.names: + # Dupe ID really isn't a problem - most consumers + # want to go from name->id, and this is OK. + # It means you can't go from id->name though. + pass + # ignore AppStudio special ones + #if not n.startswith("_APS_"): + # print "Duplicate id",i,"for",n,"is", self.names[i] + else: + self.names[i] = n + if self.next_id<=i: + self.next_id = i+1 + + def parse(self): + noid_parsers = { + "STRINGTABLE": self.parse_stringtable, + } + + id_parsers = { + "DIALOG" : self.parse_dialog, + "DIALOGEX": self.parse_dialog, +# "TEXTINCLUDE": self.parse_textinclude, + "BITMAP": self.parse_bitmap, + "ICON": self.parse_icon, + } + deep = 0 + base_token = self.token + rp = noid_parsers.get(base_token) + if rp is not None: + rp() + else: + # Not something we parse that isn't prefixed by an ID + # See if it is an ID prefixed item - if it is, our token + # is the resource ID. + resource_id = self.token + self.getToken() + if self.token is None: + return + + if "BEGIN" == self.token: + # A 'BEGIN' for a structure we don't understand - skip to the + # matching 'END' + deep = 1 + while deep!=0 and self.token is not None: + self.getToken() + self.debug("Zooming over", self.token) + if "BEGIN" == self.token: + deep += 1 + elif "END" == self.token: + deep -= 1 + else: + rp = id_parsers.get(self.token) + if rp is not None: + self.debug("Dispatching '%s'" % (self.token,)) + rp(resource_id) + else: + # We don't know what the resource type is, but we + # have already consumed the next, which can cause problems, + # so push it back. + self.debug("Skipping top-level '%s'" % base_token) + self.ungetToken() + + def addId(self, id_name): + if id_name in self.ids: + id = self.ids[id_name] + else: + # IDOK, IDCANCEL etc are special - if a real resource has this value + for n in ["IDOK","IDCANCEL","IDYES","IDNO", "IDABORT"]: + if id_name == n: + v = getattr(win32con, n) + self.ids[n] = v + self.names[v] = n + return v + id = self.next_id + self.next_id += 1 + self.ids[id_name] = id + self.names[id] = id_name + return id + + def lang(self): + while self.token[0:4]=="LANG" or self.token[0:7]=="SUBLANG" or self.token==',': + self.getToken(); + + def parse_textinclude(self, res_id): + while self.getToken() != "BEGIN": + pass + while 1: + if self.token == "END": + break + s = self.getToken() + + def parse_stringtable(self): + while self.getToken() != "BEGIN": + pass + while 1: + self.getToken() + if self.token == "END": + break + sid = self.token + self.getToken() + sd = StringDef(sid, self.addId(sid), self.currentQuotedString()) + self.stringTable[sid] = sd + + def parse_bitmap(self, name): + return self.parse_bitmap_or_icon(name, self.bitmaps) + + def parse_icon(self, name): + return self.parse_bitmap_or_icon(name, self.icons) + + def parse_bitmap_or_icon(self, name, dic): + self.getToken() + while not self.token.startswith('"'): + self.getToken() + bmf = self.token[1:-1] # quotes + dic[name] = bmf + + def parse_dialog(self, name): + dlg = DialogDef(name,self.addId(name)) + assert len(dlg.controls)==0 + self._dialogs[name] = dlg + extras = [] + self.getToken() + while not self.token.isdigit(): + self.debug("extra", self.token) + extras.append(self.token) + self.getToken() + dlg.x = int(self.token) + self.getCommaToken() + self.getToken() # number + dlg.y = int(self.token) + self.getCommaToken() + self.getToken() # number + dlg.w = int(self.token) + self.getCommaToken() + self.getToken() # number + dlg.h = int(self.token) + self.getToken() + while not (self.token==None or self.token=="" or self.token=="END"): + if self.token=="STYLE": + self.dialogStyle(dlg) + elif self.token=="EXSTYLE": + self.dialogExStyle(dlg) + elif self.token=="CAPTION": + self.dialogCaption(dlg) + elif self.token=="FONT": + self.dialogFont(dlg) + elif self.token=="BEGIN": + self.controls(dlg) + else: + break + self.dialogs[name] = dlg.createDialogTemplate() + + def dialogStyle(self, dlg): + dlg.style, dlg.styles = self.styles( [], win32con.DS_SETFONT) + def dialogExStyle(self, dlg): + self.getToken() + dlg.styleEx, dlg.stylesEx = self.styles( [], 0) + + def styles(self, defaults, defaultStyle): + list = defaults + style = defaultStyle + + if "STYLE"==self.token: + self.getToken() + i = 0 + Not = False + while ((i%2==1 and ("|"==self.token or "NOT"==self.token)) or (i%2==0)) and not self.token==None: + Not = False; + if "NOT"==self.token: + Not = True + self.getToken() + i += 1 + if self.token!="|": + if self.token in win32con.__dict__: + value = getattr(win32con,self.token) + else: + if self.token in commctrl.__dict__: + value = getattr(commctrl,self.token) + else: + value = 0 + if Not: + list.append("NOT "+self.token) + self.debug("styles add Not",self.token, value) + style &= ~value + else: + list.append(self.token) + self.debug("styles add", self.token, value) + style |= value + self.getToken() + self.debug("style is ",style) + + return style, list + + def dialogCaption(self, dlg): + if "CAPTION"==self.token: + self.getToken() + self.token = self.token[1:-1] + self.debug("Caption is:",self.token) + dlg.caption = self.token + self.getToken() + def dialogFont(self, dlg): + if "FONT"==self.token: + self.getToken() + dlg.fontSize = int(self.token) + self.getCommaToken() + self.getToken() # Font name + dlg.font = self.token[1:-1] # it's quoted + self.getToken() + while "BEGIN"!=self.token: + self.getToken() + def controls(self, dlg): + if self.token=="BEGIN": self.getToken() + # All controls look vaguely like: + # TYPE [text, ] Control_id, l, t, r, b [, style] + # .rc parser documents all control types as: + # CHECKBOX, COMBOBOX, CONTROL, CTEXT, DEFPUSHBUTTON, EDITTEXT, GROUPBOX, + # ICON, LISTBOX, LTEXT, PUSHBUTTON, RADIOBUTTON, RTEXT, SCROLLBAR + without_text = ["EDITTEXT", "COMBOBOX", "LISTBOX", "SCROLLBAR"] + while self.token!="END": + control = ControlDef() + control.controlType = self.token; + self.getToken() + if control.controlType not in without_text: + if self.token[0:1]=='"': + control.label = self.currentQuotedString() + # Some funny controls, like icons and picture controls use + # the "window text" as extra resource ID (ie, the ID of the + # icon itself). This may be either a literal, or an ID string. + elif self.token=="-" or self.token.isdigit(): + control.label = str(self.currentNumberToken()) + else: + # An ID - use the numeric equiv. + control.label = str(self.addId(self.token)) + self.getCommaToken() + self.getToken() + # Control IDs may be "names" or literal ints + if self.token=="-" or self.token.isdigit(): + control.id = self.currentNumberToken() + control.idNum = control.id + else: + # name of an ID + control.id = self.token + control.idNum = self.addId(control.id) + self.getCommaToken() + + if control.controlType == "CONTROL": + self.getToken() + control.subType = self.token[1:-1] + thisDefaultStyle = defaultControlStyle | \ + _addDefaults.get(control.subType, 0) + # Styles + self.getCommaToken() + self.getToken() + control.style, control.styles = self.styles([], thisDefaultStyle) + else: + thisDefaultStyle = defaultControlStyle | \ + _addDefaults.get(control.controlType, 0) + # incase no style is specified. + control.style = thisDefaultStyle + # Rect + control.x = int(self.getToken()) + self.getCommaToken() + control.y = int(self.getToken()) + self.getCommaToken() + control.w = int(self.getToken()) + self.getCommaToken() + self.getToken() + control.h = int(self.token) + self.getToken() + if self.token==",": + self.getToken() + control.style, control.styles = self.styles([], thisDefaultStyle) + if self.token==",": + self.getToken() + control.styleEx, control.stylesEx = self.styles([], defaultControlStyleEx) + #print control.toString() + dlg.controls.append(control) + +def ParseStreams(rc_file, h_file): + rcp = RCParser() + if h_file: + rcp.parseH(h_file) + try: + rcp.load(rc_file) + except: + lex = getattr(rcp, "lex", None) + if lex: + print("ERROR parsing dialogs at line", lex.lineno) + print("Next 10 tokens are:") + for i in range(10): + print(lex.get_token(), end=' ') + print() + raise + return rcp + +def Parse(rc_name, h_name = None): + if h_name: + h_file = open(h_name, "r") + else: + # See if same basename as the .rc + h_name = rc_name[:-2]+"h" + try: + h_file = open(h_name, "r") + except IOError: + # See if MSVC default of 'resource.h' in the same dir. + h_name = os.path.join(os.path.dirname(rc_name), "resource.h") + try: + h_file = open(h_name, "r") + except IOError: + # .h files are optional anyway + h_file = None + rc_file = open(rc_name, "r") + try: + return ParseStreams(rc_file, h_file) + finally: + if h_file is not None: + h_file.close() + rc_file.close() + return rcp + +def GenerateFrozenResource(rc_name, output_name, h_name = None): + """Converts an .rc windows resource source file into a python source file + with the same basic public interface as the rest of this module. + Particularly useful for py2exe or other 'freeze' type solutions, + where a frozen .py file can be used inplace of a real .rc file. + """ + rcp = Parse(rc_name, h_name) + in_stat = os.stat(rc_name) + + out = open(output_name, "wt") + out.write("#%s\n" % output_name) + out.write("#This is a generated file. Please edit %s instead.\n" % rc_name) + out.write("__version__=%r\n" % __version__) + out.write("_rc_size_=%d\n_rc_mtime_=%d\n" % (in_stat[stat.ST_SIZE], in_stat[stat.ST_MTIME])) + + out.write("class StringDef:\n") + out.write("\tdef __init__(self, id, idNum, value):\n") + out.write("\t\tself.id = id\n") + out.write("\t\tself.idNum = idNum\n") + out.write("\t\tself.value = value\n") + out.write("\tdef __repr__(self):\n") + out.write("\t\treturn \"StringDef(%r, %r, %r)\" % (self.id, self.idNum, self.value)\n") + + out.write("class FakeParser:\n") + + for name in "dialogs", "ids", "names", "bitmaps", "icons", "stringTable": + out.write("\t%s = \\\n" % (name,)) + pprint.pprint(getattr(rcp, name), out) + out.write("\n") + + out.write("def Parse(s):\n") + out.write("\treturn FakeParser()\n") + out.close() + +if __name__=='__main__': + if len(sys.argv) <= 1: + print(__doc__) + print() + print("See test_win32rcparser.py, and the win32rcparser directory (both") + print("in the test suite) for an example of this module's usage.") + else: + import pprint + filename = sys.argv[1] + if "-v" in sys.argv: + RCParser.debugEnabled = 1 + print("Dumping all resources in '%s'" % filename) + resources = Parse(filename) + for id, ddef in resources.dialogs.items(): + print("Dialog %s (%d controls)" % (id, len(ddef))) + pprint.pprint(ddef) + print() + for id, sdef in resources.stringTable.items(): + print("String %s=%r" % (id, sdef.value)) + print() + for id, sdef in resources.bitmaps.items(): + print("Bitmap %s=%r" % (id, sdef)) + print() + for id, sdef in resources.icons.items(): + print("Icon %s=%r" % (id, sdef)) + print() diff --git a/venv/Lib/site-packages/win32/lib/win32serviceutil.py b/venv/Lib/site-packages/win32/lib/win32serviceutil.py new file mode 100644 index 00000000..67a81264 --- /dev/null +++ b/venv/Lib/site-packages/win32/lib/win32serviceutil.py @@ -0,0 +1,851 @@ +# General purpose service utilities, both for standard Python scripts, +# and for for Python programs which run as services... +# +# Note that most utility functions here will raise win32api.error's +# (which is == win32service.error, pywintypes.error, etc) +# when things go wrong - eg, not enough permissions to hit the +# registry etc. + +import win32service, win32api, win32con, winerror +import sys, pywintypes, os, warnings +error = RuntimeError + +def LocatePythonServiceExe(exeName = None): + if not exeName and hasattr(sys, "frozen"): + # If py2exe etc calls this with no exeName, default is current exe. + return sys.executable + + # Try and find the specified EXE somewhere. If specifically registered, + # use it. Otherwise look down sys.path, and the global PATH environment. + if exeName is None: + if os.path.splitext(win32service.__file__)[0].endswith("_d"): + exeName = "PythonService_d.exe" + else: + exeName = "PythonService.exe" + # See if it exists as specified + if os.path.isfile(exeName): return win32api.GetFullPathName(exeName) + baseName = os.path.splitext(os.path.basename(exeName))[0] + try: + exeName = win32api.RegQueryValue(win32con.HKEY_LOCAL_MACHINE, + "Software\\Python\\%s\\%s" % (baseName, sys.winver)) + if os.path.isfile(exeName): + return exeName + raise RuntimeError("The executable '%s' is registered as the Python " \ + "service exe, but it does not exist as specified" \ + % exeName) + except win32api.error: + # OK - not there - lets go a-searchin' + for path in [sys.prefix] + sys.path: + look = os.path.join(path, exeName) + if os.path.isfile(look): + return win32api.GetFullPathName(look) + # Try the global Path. + try: + return win32api.SearchPath(None, exeName)[0] + except win32api.error: + msg = "%s is not correctly registered\nPlease locate and run %s, and it will self-register\nThen run this service registration process again." % (exeName, exeName) + raise error(msg) + +def _GetServiceShortName(longName): + # looks up a services name + # from the display name + # Thanks to Andy McKay for this code. + access = win32con.KEY_READ | win32con.KEY_ENUMERATE_SUB_KEYS | win32con.KEY_QUERY_VALUE + hkey = win32api.RegOpenKey(win32con.HKEY_LOCAL_MACHINE, "SYSTEM\\CurrentControlSet\\Services", 0, access) + num = win32api.RegQueryInfoKey(hkey)[0] + longName = longName.lower() + # loop through number of subkeys + for x in range(0, num): + # find service name, open subkey + svc = win32api.RegEnumKey(hkey, x) + skey = win32api.RegOpenKey(hkey, svc, 0, access) + try: + # find display name + thisName = str(win32api.RegQueryValueEx(skey, "DisplayName")[0]) + if thisName.lower() == longName: + return svc + except win32api.error: + # in case there is no key called DisplayName + pass + return None + +# Open a service given either it's long or short name. +def SmartOpenService(hscm, name, access): + try: + return win32service.OpenService(hscm, name, access) + except win32api.error as details: + if details.winerror not in [winerror.ERROR_SERVICE_DOES_NOT_EXIST, + winerror.ERROR_INVALID_NAME]: + raise + name = win32service.GetServiceKeyName(hscm, name) + return win32service.OpenService(hscm, name, access) + +def LocateSpecificServiceExe(serviceName): + # Given the name of a specific service, return the .EXE name _it_ uses + # (which may or may not be the Python Service EXE + hkey = win32api.RegOpenKey(win32con.HKEY_LOCAL_MACHINE, "SYSTEM\\CurrentControlSet\\Services\\%s" % (serviceName), 0, win32con.KEY_ALL_ACCESS) + try: + return win32api.RegQueryValueEx(hkey, "ImagePath")[0] + finally: + hkey.Close() + +def InstallPerfmonForService(serviceName, iniName, dllName = None): + # If no DLL name, look it up in the INI file name + if not dllName: # May be empty string! + dllName = win32api.GetProfileVal("Python", "dll", "", iniName) + # Still not found - look for the standard one in the same dir as win32service.pyd + if not dllName: + try: + tryName = os.path.join(os.path.split(win32service.__file__)[0], "perfmondata.dll") + if os.path.isfile(tryName): + dllName = tryName + except AttributeError: + # Frozen app? - anyway, can't find it! + pass + if not dllName: + raise ValueError("The name of the performance DLL must be available") + dllName = win32api.GetFullPathName(dllName) + # Now setup all the required "Performance" entries. + hkey = win32api.RegOpenKey(win32con.HKEY_LOCAL_MACHINE, "SYSTEM\\CurrentControlSet\\Services\\%s" % (serviceName), 0, win32con.KEY_ALL_ACCESS) + try: + subKey = win32api.RegCreateKey(hkey, "Performance") + try: + win32api.RegSetValueEx(subKey, "Library", 0, win32con.REG_SZ, dllName) + win32api.RegSetValueEx(subKey, "Open", 0, win32con.REG_SZ, "OpenPerformanceData") + win32api.RegSetValueEx(subKey, "Close", 0, win32con.REG_SZ, "ClosePerformanceData") + win32api.RegSetValueEx(subKey, "Collect", 0, win32con.REG_SZ, "CollectPerformanceData") + finally: + win32api.RegCloseKey(subKey) + finally: + win32api.RegCloseKey(hkey) + # Now do the "Lodctr" thang... + + try: + import perfmon + path, fname = os.path.split(iniName) + oldPath = os.getcwd() + if path: + os.chdir(path) + try: + perfmon.LoadPerfCounterTextStrings("python.exe " + fname) + finally: + os.chdir(oldPath) + except win32api.error as details: + print("The service was installed OK, but the performance monitor") + print("data could not be loaded.", details) + +def _GetCommandLine(exeName, exeArgs): + if exeArgs is not None: + return exeName + " " + exeArgs + else: + return exeName + +def InstallService(pythonClassString, serviceName, displayName, startType = None, errorControl = None, bRunInteractive = 0, serviceDeps = None, userName = None, password = None, exeName = None, perfMonIni = None, perfMonDll = None, exeArgs = None, + description = None, delayedstart = None): + # Handle the default arguments. + if startType is None: + startType = win32service.SERVICE_DEMAND_START + serviceType = win32service.SERVICE_WIN32_OWN_PROCESS + if bRunInteractive: + serviceType = serviceType | win32service.SERVICE_INTERACTIVE_PROCESS + if errorControl is None: + errorControl = win32service.SERVICE_ERROR_NORMAL + + exeName = '"%s"' % LocatePythonServiceExe(exeName) # None here means use default PythonService.exe + commandLine = _GetCommandLine(exeName, exeArgs) + hscm = win32service.OpenSCManager(None,None,win32service.SC_MANAGER_ALL_ACCESS) + try: + hs = win32service.CreateService(hscm, + serviceName, + displayName, + win32service.SERVICE_ALL_ACCESS, # desired access + serviceType, # service type + startType, + errorControl, # error control type + commandLine, + None, + 0, + serviceDeps, + userName, + password) + if description is not None: + try: + win32service.ChangeServiceConfig2(hs,win32service.SERVICE_CONFIG_DESCRIPTION,description) + except NotImplementedError: + pass ## ChangeServiceConfig2 and description do not exist on NT + if delayedstart is not None: + try: + win32service.ChangeServiceConfig2(hs,win32service.SERVICE_CONFIG_DELAYED_AUTO_START_INFO, delayedstart) + except (win32service.error, NotImplementedError): + ## delayed start only exists on Vista and later - warn only when trying to set delayed to True + if delayedstart: + warnings.warn('Delayed Start not available on this system') + win32service.CloseServiceHandle(hs) + finally: + win32service.CloseServiceHandle(hscm) + InstallPythonClassString(pythonClassString, serviceName) + # If I have performance monitor info to install, do that. + if perfMonIni is not None: + InstallPerfmonForService(serviceName, perfMonIni, perfMonDll) + +def ChangeServiceConfig(pythonClassString, serviceName, startType = None, errorControl = None, bRunInteractive = 0, + serviceDeps = None, userName = None, password = None, + exeName = None, displayName = None, perfMonIni = None, perfMonDll = None, + exeArgs = None, description = None, delayedstart = None): + # Before doing anything, remove any perfmon counters. + try: + import perfmon + perfmon.UnloadPerfCounterTextStrings("python.exe "+serviceName) + except (ImportError, win32api.error): + pass + + # The EXE location may have changed + exeName = '"%s"' % LocatePythonServiceExe(exeName) + + # Handle the default arguments. + if startType is None: startType = win32service.SERVICE_NO_CHANGE + if errorControl is None: errorControl = win32service.SERVICE_NO_CHANGE + + hscm = win32service.OpenSCManager(None,None,win32service.SC_MANAGER_ALL_ACCESS) + serviceType = win32service.SERVICE_WIN32_OWN_PROCESS + if bRunInteractive: + serviceType = serviceType | win32service.SERVICE_INTERACTIVE_PROCESS + commandLine = _GetCommandLine(exeName, exeArgs) + try: + hs = SmartOpenService(hscm, serviceName, win32service.SERVICE_ALL_ACCESS) + try: + + win32service.ChangeServiceConfig(hs, + serviceType, # service type + startType, + errorControl, # error control type + commandLine, + None, + 0, + serviceDeps, + userName, + password, + displayName) + if description is not None: + try: + win32service.ChangeServiceConfig2(hs,win32service.SERVICE_CONFIG_DESCRIPTION,description) + except NotImplementedError: + pass ## ChangeServiceConfig2 and description do not exist on NT + if delayedstart is not None: + try: + win32service.ChangeServiceConfig2(hs,win32service.SERVICE_CONFIG_DELAYED_AUTO_START_INFO, delayedstart) + except (win32service.error, NotImplementedError): + ## Delayed start only exists on Vista and later. On Nt, will raise NotImplementedError since ChangeServiceConfig2 + ## doensn't exist. On Win2k and XP, will fail with ERROR_INVALID_LEVEL + ## Warn only if trying to set delayed to True + if delayedstart: + warnings.warn('Delayed Start not available on this system') + finally: + win32service.CloseServiceHandle(hs) + finally: + win32service.CloseServiceHandle(hscm) + InstallPythonClassString(pythonClassString, serviceName) + # If I have performance monitor info to install, do that. + if perfMonIni is not None: + InstallPerfmonForService(serviceName, perfMonIni, perfMonDll) + +def InstallPythonClassString(pythonClassString, serviceName): + # Now setup our Python specific entries. + if pythonClassString: + key = win32api.RegCreateKey(win32con.HKEY_LOCAL_MACHINE, "System\\CurrentControlSet\\Services\\%s\\PythonClass" % serviceName) + try: + win32api.RegSetValue(key, None, win32con.REG_SZ, pythonClassString); + finally: + win32api.RegCloseKey(key) + +# Utility functions for Services, to allow persistant properties. +def SetServiceCustomOption(serviceName, option, value): + try: + serviceName = serviceName._svc_name_ + except AttributeError: + pass + key = win32api.RegCreateKey(win32con.HKEY_LOCAL_MACHINE, "System\\CurrentControlSet\\Services\\%s\\Parameters" % serviceName) + try: + if type(value)==type(0): + win32api.RegSetValueEx(key, option, 0, win32con.REG_DWORD, value); + else: + win32api.RegSetValueEx(key, option, 0, win32con.REG_SZ, value); + finally: + win32api.RegCloseKey(key) + +def GetServiceCustomOption(serviceName, option, defaultValue = None): + # First param may also be a service class/instance. + # This allows services to pass "self" + try: + serviceName = serviceName._svc_name_ + except AttributeError: + pass + key = win32api.RegCreateKey(win32con.HKEY_LOCAL_MACHINE, "System\\CurrentControlSet\\Services\\%s\\Parameters" % serviceName) + try: + try: + return win32api.RegQueryValueEx(key, option)[0] + except win32api.error: # No value. + return defaultValue + finally: + win32api.RegCloseKey(key) + + +def RemoveService(serviceName): + try: + import perfmon + perfmon.UnloadPerfCounterTextStrings("python.exe "+serviceName) + except (ImportError, win32api.error): + pass + + hscm = win32service.OpenSCManager(None,None,win32service.SC_MANAGER_ALL_ACCESS) + try: + hs = SmartOpenService(hscm, serviceName, win32service.SERVICE_ALL_ACCESS) + win32service.DeleteService(hs) + win32service.CloseServiceHandle(hs) + finally: + win32service.CloseServiceHandle(hscm) + + import win32evtlogutil + try: + win32evtlogutil.RemoveSourceFromRegistry(serviceName) + except win32api.error: + pass + +def ControlService(serviceName, code, machine = None): + hscm = win32service.OpenSCManager(machine,None,win32service.SC_MANAGER_ALL_ACCESS) + try: + + hs = SmartOpenService(hscm, serviceName, win32service.SERVICE_ALL_ACCESS) + try: + status = win32service.ControlService(hs, code) + finally: + win32service.CloseServiceHandle(hs) + finally: + win32service.CloseServiceHandle(hscm) + return status + +def __FindSvcDeps(findName): + if type(findName) is pywintypes.UnicodeType: findName = str(findName) + dict = {} + k = win32api.RegOpenKey(win32con.HKEY_LOCAL_MACHINE, "SYSTEM\\CurrentControlSet\\Services") + num = 0 + while 1: + try: + svc = win32api.RegEnumKey(k, num) + except win32api.error: + break + num = num + 1 + sk = win32api.RegOpenKey(k, svc) + try: + deps, typ = win32api.RegQueryValueEx(sk, "DependOnService") + except win32api.error: + deps = () + for dep in deps: + dep = dep.lower() + dep_on = dict.get(dep, []) + dep_on.append(svc) + dict[dep]=dep_on + + return __ResolveDeps(findName, dict) + + +def __ResolveDeps(findName, dict): + items = dict.get(findName.lower(), []) + retList = [] + for svc in items: + retList.insert(0, svc) + retList = __ResolveDeps(svc, dict) + retList + return retList + +def WaitForServiceStatus(serviceName, status, waitSecs, machine=None): + """Waits for the service to return the specified status. You + should have already requested the service to enter that state""" + for i in range(waitSecs*4): + now_status = QueryServiceStatus(serviceName, machine)[1] + if now_status == status: + break + win32api.Sleep(250) + else: + raise pywintypes.error(winerror.ERROR_SERVICE_REQUEST_TIMEOUT, "QueryServiceStatus", win32api.FormatMessage(winerror.ERROR_SERVICE_REQUEST_TIMEOUT)[:-2]) + +def __StopServiceWithTimeout(hs, waitSecs = 30): + try: + status = win32service.ControlService(hs, win32service.SERVICE_CONTROL_STOP) + except pywintypes.error as exc: + if exc.winerror!=winerror.ERROR_SERVICE_NOT_ACTIVE: + raise + for i in range(waitSecs): + status = win32service.QueryServiceStatus(hs) + if status[1] == win32service.SERVICE_STOPPED: + break + win32api.Sleep(1000) + else: + raise pywintypes.error(winerror.ERROR_SERVICE_REQUEST_TIMEOUT, "ControlService", win32api.FormatMessage(winerror.ERROR_SERVICE_REQUEST_TIMEOUT)[:-2]) + + +def StopServiceWithDeps(serviceName, machine = None, waitSecs = 30): + # Stop a service recursively looking for dependant services + hscm = win32service.OpenSCManager(machine,None,win32service.SC_MANAGER_ALL_ACCESS) + try: + deps = __FindSvcDeps(serviceName) + for dep in deps: + hs = win32service.OpenService(hscm, dep, win32service.SERVICE_ALL_ACCESS) + try: + __StopServiceWithTimeout(hs, waitSecs) + finally: + win32service.CloseServiceHandle(hs) + # Now my service! + hs = win32service.OpenService(hscm, serviceName, win32service.SERVICE_ALL_ACCESS) + try: + __StopServiceWithTimeout(hs, waitSecs) + finally: + win32service.CloseServiceHandle(hs) + + finally: + win32service.CloseServiceHandle(hscm) + + +def StopService(serviceName, machine = None): + return ControlService(serviceName, win32service.SERVICE_CONTROL_STOP, machine) + +def StartService(serviceName, args = None, machine = None): + hscm = win32service.OpenSCManager(machine,None,win32service.SC_MANAGER_ALL_ACCESS) + try: + + hs = SmartOpenService(hscm, serviceName, win32service.SERVICE_ALL_ACCESS) + try: + win32service.StartService(hs, args) + finally: + win32service.CloseServiceHandle(hs) + finally: + win32service.CloseServiceHandle(hscm) + +def RestartService(serviceName, args = None, waitSeconds = 30, machine = None): + "Stop the service, and then start it again (with some tolerance for allowing it to stop.)" + try: + StopService(serviceName, machine) + except pywintypes.error as exc: + # Allow only "service not running" error + if exc.winerror!=winerror.ERROR_SERVICE_NOT_ACTIVE: + raise + # Give it a few goes, as the service may take time to stop + for i in range(waitSeconds): + try: + StartService(serviceName, args, machine) + break + except pywintypes.error as exc: + if exc.winerror!=winerror.ERROR_SERVICE_ALREADY_RUNNING: + raise + win32api.Sleep(1000) + else: + print("Gave up waiting for the old service to stop!") + +def _DebugCtrlHandler(evt): + if evt in (win32con.CTRL_C_EVENT, win32con.CTRL_BREAK_EVENT): + assert g_debugService + print("Stopping debug service.") + g_debugService.SvcStop() + return True + return False + +def DebugService(cls, argv = []): + # Run a service in "debug" mode. Re-implements what pythonservice.exe + # does when it sees a "-debug" param. + # Currently only used by "frozen" (ie, py2exe) programs (but later may + # end up being used for all services should we ever remove + # pythonservice.exe) + import servicemanager + global g_debugService + + print("Debugging service %s - press Ctrl+C to stop." % (cls._svc_name_,)) + servicemanager.Debugging(True) + servicemanager.PrepareToHostSingle(cls) + g_debugService = cls(argv) + # Setup a ctrl+c handler to simulate a "stop" + win32api.SetConsoleCtrlHandler(_DebugCtrlHandler, True) + try: + g_debugService.SvcRun() + finally: + win32api.SetConsoleCtrlHandler(_DebugCtrlHandler, False) + servicemanager.Debugging(False) + g_debugService = None + +def GetServiceClassString(cls, argv = None): + if argv is None: + argv = sys.argv + import pickle + modName = pickle.whichmodule(cls, cls.__name__) + if modName == '__main__': + try: + fname = win32api.GetFullPathName(argv[0]) + path = os.path.split(fname)[0] + # Eaaaahhhh - sometimes this will be a short filename, which causes + # problems with 1.5.1 and the silly filename case rule. + filelist = win32api.FindFiles(fname) + # win32api.FindFiles will not detect files in a zip or exe. If list is empty, + # skip the test and hope the file really exists. + if len(filelist) != 0: + # Get the long name + fname = os.path.join(path, filelist[0][8]) + except win32api.error: + raise error("Could not resolve the path name '%s' to a full path" % (argv[0])) + modName = os.path.splitext(fname)[0] + return modName + "." + cls.__name__ + +def QueryServiceStatus(serviceName, machine=None): + hscm = win32service.OpenSCManager(machine,None,win32service.SC_MANAGER_CONNECT) + try: + + hs = SmartOpenService(hscm, serviceName, win32service.SERVICE_QUERY_STATUS) + try: + status = win32service.QueryServiceStatus(hs) + finally: + win32service.CloseServiceHandle(hs) + finally: + win32service.CloseServiceHandle(hscm) + return status + +def usage(): + try: + fname = os.path.split(sys.argv[0])[1] + except: + fname = sys.argv[0] + print("Usage: '%s [options] install|update|remove|start [...]|stop|restart [...]|debug [...]'" % fname) + print("Options for 'install' and 'update' commands only:") + print(" --username domain\\username : The Username the service is to run under") + print(" --password password : The password for the username") + print(" --startup [manual|auto|disabled|delayed] : How the service starts, default = manual") + print(" --interactive : Allow the service to interact with the desktop.") + print(" --perfmonini file: .ini file to use for registering performance monitor data") + print(" --perfmondll file: .dll file to use when querying the service for") + print(" performance data, default = perfmondata.dll") + print("Options for 'start' and 'stop' commands only:") + print(" --wait seconds: Wait for the service to actually start or stop.") + print(" If you specify --wait with the 'stop' option, the service") + print(" and all dependent services will be stopped, each waiting") + print(" the specified period.") + sys.exit(1) + +def HandleCommandLine(cls, serviceClassString = None, argv = None, customInstallOptions = "", customOptionHandler = None): + """Utility function allowing services to process the command line. + + Allows standard commands such as 'start', 'stop', 'debug', 'install' etc. + + Install supports 'standard' command line options prefixed with '--', such as + --username, --password, etc. In addition, + the function allows custom command line options to be handled by the calling function. + """ + err = 0 + + if argv is None: argv = sys.argv + + if len(argv)<=1: + usage() + + serviceName = cls._svc_name_ + serviceDisplayName = cls._svc_display_name_ + if serviceClassString is None: + serviceClassString = GetServiceClassString(cls) + + # Pull apart the command line + import getopt + try: + opts, args = getopt.getopt(argv[1:], customInstallOptions,["password=","username=","startup=","perfmonini=", "perfmondll=", "interactive", "wait="]) + except getopt.error as details: + print(details) + usage() + userName = None + password = None + perfMonIni = perfMonDll = None + startup = None + delayedstart = None + interactive = None + waitSecs = 0 + for opt, val in opts: + if opt=='--username': + userName = val + elif opt=='--password': + password = val + elif opt=='--perfmonini': + perfMonIni = val + elif opt=='--perfmondll': + perfMonDll = val + elif opt=='--interactive': + interactive = 1 + elif opt=='--startup': + map = {"manual": win32service.SERVICE_DEMAND_START, + "auto" : win32service.SERVICE_AUTO_START, + "delayed": win32service.SERVICE_AUTO_START, ## ChangeServiceConfig2 called later + "disabled": win32service.SERVICE_DISABLED} + try: + startup = map[val.lower()] + except KeyError: + print("'%s' is not a valid startup option" % val) + if val.lower() == "delayed": + delayedstart = True + elif val.lower() == "auto": + delayedstart = False + ## else no change + elif opt=='--wait': + try: + waitSecs = int(val) + except ValueError: + print("--wait must specify an integer number of seconds.") + usage() + + arg=args[0] + knownArg = 0 + # First we process all arguments which pass additional args on + if arg=="start": + knownArg = 1 + print("Starting service %s" % (serviceName)) + try: + StartService(serviceName, args[1:]) + if waitSecs: + WaitForServiceStatus(serviceName, win32service.SERVICE_RUNNING, waitSecs) + except win32service.error as exc: + print("Error starting service: %s" % exc.strerror) + err = exc.winerror + + elif arg=="restart": + knownArg = 1 + print("Restarting service %s" % (serviceName)) + RestartService(serviceName, args[1:]) + if waitSecs: + WaitForServiceStatus(serviceName, win32service.SERVICE_RUNNING, waitSecs) + + elif arg=="debug": + knownArg = 1 + if not hasattr(sys, "frozen"): + # non-frozen services use pythonservice.exe which handles a + # -debug option + svcArgs = " ".join(args[1:]) + try: + exeName = LocateSpecificServiceExe(serviceName) + except win32api.error as exc: + if exc.winerror == winerror.ERROR_FILE_NOT_FOUND: + print("The service does not appear to be installed.") + print("Please install the service before debugging it.") + sys.exit(1) + raise + try: + os.system("%s -debug %s %s" % (exeName, serviceName, svcArgs)) + # ^C is used to kill the debug service. Sometimes Python also gets + # interrupted - ignore it... + except KeyboardInterrupt: + pass + else: + # py2exe services don't use pythonservice - so we simulate + # debugging here. + DebugService(cls, args) + + if not knownArg and len(args)!=1: + usage() # the rest of the cmds don't take addn args + + if arg=="install": + knownArg = 1 + try: + serviceDeps = cls._svc_deps_ + except AttributeError: + serviceDeps = None + try: + exeName = cls._exe_name_ + except AttributeError: + exeName = None # Default to PythonService.exe + try: + exeArgs = cls._exe_args_ + except AttributeError: + exeArgs = None + try: + description = cls._svc_description_ + except AttributeError: + description = None + print("Installing service %s" % (serviceName,)) + # Note that we install the service before calling the custom option + # handler, so if the custom handler fails, we have an installed service (from NT's POV) + # but is unlikely to work, as the Python code controlling it failed. Therefore + # we remove the service if the first bit works, but the second doesnt! + try: + InstallService(serviceClassString, serviceName, serviceDisplayName, serviceDeps = serviceDeps, startType=startup, bRunInteractive=interactive, userName=userName,password=password, exeName=exeName, perfMonIni=perfMonIni,perfMonDll=perfMonDll,exeArgs=exeArgs, + description=description, delayedstart=delayedstart) + if customOptionHandler: + customOptionHandler(*(opts,)) + print("Service installed") + except win32service.error as exc: + if exc.winerror==winerror.ERROR_SERVICE_EXISTS: + arg = "update" # Fall through to the "update" param! + else: + print("Error installing service: %s (%d)" % (exc.strerror, exc.winerror)) + err = exc.winerror + except ValueError as msg: # Can be raised by custom option handler. + print("Error installing service: %s" % str(msg)) + err = -1 + # xxx - maybe I should remove after _any_ failed install - however, + # xxx - it may be useful to help debug to leave the service as it failed. + # xxx - We really _must_ remove as per the comments above... + # As we failed here, remove the service, so the next installation + # attempt works. + try: + RemoveService(serviceName) + except win32api.error: + print("Warning - could not remove the partially installed service.") + + if arg == "update": + knownArg = 1 + try: + serviceDeps = cls._svc_deps_ + except AttributeError: + serviceDeps = None + try: + exeName = cls._exe_name_ + except AttributeError: + exeName = None # Default to PythonService.exe + try: + exeArgs = cls._exe_args_ + except AttributeError: + exeArgs = None + try: + description=cls._svc_description_ + except AttributeError: + description=None + print("Changing service configuration") + try: + ChangeServiceConfig(serviceClassString, serviceName, serviceDeps = serviceDeps, startType=startup, bRunInteractive=interactive, userName=userName,password=password, exeName=exeName, displayName = serviceDisplayName, perfMonIni=perfMonIni,perfMonDll=perfMonDll,exeArgs=exeArgs, + description=description, delayedstart=delayedstart) + if customOptionHandler: + customOptionHandler(*(opts,)) + print("Service updated") + except win32service.error as exc: + print("Error changing service configuration: %s (%d)" % (exc.strerror,exc.winerror)) + err = exc.winerror + + elif arg=="remove": + knownArg = 1 + print("Removing service %s" % (serviceName)) + try: + RemoveService(serviceName) + print("Service removed") + except win32service.error as exc: + print("Error removing service: %s (%d)" % (exc.strerror,exc.winerror)) + err = exc.winerror + elif arg=="stop": + knownArg = 1 + print("Stopping service %s" % (serviceName)) + try: + if waitSecs: + StopServiceWithDeps(serviceName, waitSecs = waitSecs) + else: + StopService(serviceName) + except win32service.error as exc: + print("Error stopping service: %s (%d)" % (exc.strerror,exc.winerror)) + err = exc.winerror + if not knownArg: + err = -1 + print("Unknown command - '%s'" % arg) + usage() + return err + +# +# Useful base class to build services from. +# +class ServiceFramework: + # Required Attributes: + # _svc_name_ = The service name + # _svc_display_name_ = The service display name + + # Optional Attributes: + _svc_deps_ = None # sequence of service names on which this depends + _exe_name_ = None # Default to PythonService.exe + _exe_args_ = None # Default to no arguments + _svc_description_ = None # Only exists on Windows 2000 or later, ignored on windows NT + + def __init__(self, args): + import servicemanager + self.ssh = servicemanager.RegisterServiceCtrlHandler(args[0], self.ServiceCtrlHandlerEx, True) + servicemanager.SetEventSourceName(self._svc_name_) + self.checkPoint = 0 + + def GetAcceptedControls(self): + # Setup the service controls we accept based on our attributes. Note + # that if you need to handle controls via SvcOther[Ex](), you must + # override this. + accepted = 0 + if hasattr(self, "SvcStop"): accepted = accepted | win32service.SERVICE_ACCEPT_STOP + if hasattr(self, "SvcPause") and hasattr(self, "SvcContinue"): + accepted = accepted | win32service.SERVICE_ACCEPT_PAUSE_CONTINUE + if hasattr(self, "SvcShutdown"): accepted = accepted | win32service.SERVICE_ACCEPT_SHUTDOWN + return accepted + + def ReportServiceStatus(self, serviceStatus, waitHint = 5000, win32ExitCode = 0, svcExitCode = 0): + if self.ssh is None: # Debugging! + return + if serviceStatus == win32service.SERVICE_START_PENDING: + accepted = 0 + else: + accepted = self.GetAcceptedControls() + + if serviceStatus in [win32service.SERVICE_RUNNING, win32service.SERVICE_STOPPED]: + checkPoint = 0 + else: + self.checkPoint = self.checkPoint + 1 + checkPoint = self.checkPoint + + # Now report the status to the control manager + status = (win32service.SERVICE_WIN32_OWN_PROCESS, + serviceStatus, + accepted, # dwControlsAccepted, + win32ExitCode, # dwWin32ExitCode; + svcExitCode, # dwServiceSpecificExitCode; + checkPoint, # dwCheckPoint; + waitHint) + win32service.SetServiceStatus( self.ssh, status) + + def SvcInterrogate(self): + # Assume we are running, and everyone is happy. + self.ReportServiceStatus(win32service.SERVICE_RUNNING) + + def SvcOther(self, control): + try: + print("Unknown control status - %d" % control) + except IOError: + # services may not have a valid stdout! + pass + + def ServiceCtrlHandler(self, control): + return self.ServiceCtrlHandlerEx(control, 0, None) + + # The 'Ex' functions, which take additional params + def SvcOtherEx(self, control, event_type, data): + # The default here is to call self.SvcOther as that is the old behaviour. + # If you want to take advantage of the extra data, override this method + return self.SvcOther(control) + + def ServiceCtrlHandlerEx(self, control, event_type, data): + if control==win32service.SERVICE_CONTROL_STOP: + return self.SvcStop() + elif control==win32service.SERVICE_CONTROL_PAUSE: + return self.SvcPause() + elif control==win32service.SERVICE_CONTROL_CONTINUE: + return self.SvcContinue() + elif control==win32service.SERVICE_CONTROL_INTERROGATE: + return self.SvcInterrogate() + elif control==win32service.SERVICE_CONTROL_SHUTDOWN: + return self.SvcShutdown() + else: + return self.SvcOtherEx(control, event_type, data) + + def SvcRun(self): + # This is the entry point the C framework calls when the Service is + # started. Your Service class should implement SvcDoRun(). + # Or you can override this method for more control over the Service + # statuses reported to the SCM. + + # If this method raises an exception, the C framework will detect this + # and report a SERVICE_STOPPED status with a non-zero error code. + + self.ReportServiceStatus(win32service.SERVICE_RUNNING) + self.SvcDoRun() + # Once SvcDoRun terminates, the service has stopped. + # We tell the SCM the service is still stopping - the C framework + # will automatically tell the SCM it has stopped when this returns. + self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING) diff --git a/venv/Lib/site-packages/win32/lib/win32timezone.py b/venv/Lib/site-packages/win32/lib/win32timezone.py new file mode 100644 index 00000000..31a157cd --- /dev/null +++ b/venv/Lib/site-packages/win32/lib/win32timezone.py @@ -0,0 +1,1003 @@ +# -*- coding: UTF-8 -*- + +""" +win32timezone: + Module for handling datetime.tzinfo time zones using the windows +registry for time zone information. The time zone names are dependent +on the registry entries defined by the operating system. + + This module may be tested using the doctest module. + + Written by Jason R. Coombs (jaraco@jaraco.com). + Copyright © 2003-2012. + All Rights Reserved. + + This module is licenced for use in Mark Hammond's pywin32 +library under the same terms as the pywin32 library. + + To use this time zone module with the datetime module, simply pass +the TimeZoneInfo object to the datetime constructor. For example, + +>>> import win32timezone, datetime +>>> assert 'Mountain Standard Time' in win32timezone.TimeZoneInfo.get_sorted_time_zone_names() +>>> MST = win32timezone.TimeZoneInfo('Mountain Standard Time') +>>> now = datetime.datetime.now(MST) + + The now object is now a time-zone aware object, and daylight savings- +aware methods may be called on it. + +>>> now.utcoffset() in (datetime.timedelta(-1, 61200), datetime.timedelta(-1, 64800)) +True + +(note that the result of utcoffset call will be different based on when now was +generated, unless standard time is always used) + +>>> now = datetime.datetime.now(TimeZoneInfo('Mountain Standard Time', True)) +>>> now.utcoffset() +datetime.timedelta(days=-1, seconds=61200) + +>>> aug2 = datetime.datetime(2003, 8, 2, tzinfo = MST) +>>> tuple(aug2.utctimetuple()) +(2003, 8, 2, 6, 0, 0, 5, 214, 0) +>>> nov2 = datetime.datetime(2003, 11, 25, tzinfo = MST) +>>> tuple(nov2.utctimetuple()) +(2003, 11, 25, 7, 0, 0, 1, 329, 0) + +To convert from one timezone to another, just use the astimezone method. + +>>> aug2.isoformat() +'2003-08-02T00:00:00-06:00' +>>> aug2est = aug2.astimezone(win32timezone.TimeZoneInfo('Eastern Standard Time')) +>>> aug2est.isoformat() +'2003-08-02T02:00:00-04:00' + +calling the displayName member will return the display name as set in the +registry. + +>>> est = win32timezone.TimeZoneInfo('Eastern Standard Time') +>>> str(est.displayName) +'(UTC-05:00) Eastern Time (US & Canada)' + +>>> gmt = win32timezone.TimeZoneInfo('GMT Standard Time', True) +>>> str(gmt.displayName) +'(UTC+00:00) Dublin, Edinburgh, Lisbon, London' + +To get the complete list of available time zone keys, +>>> zones = win32timezone.TimeZoneInfo.get_all_time_zones() + +If you want to get them in an order that's sorted longitudinally +>>> zones = win32timezone.TimeZoneInfo.get_sorted_time_zones() + +TimeZoneInfo now supports being pickled and comparison +>>> import pickle +>>> tz = win32timezone.TimeZoneInfo('China Standard Time') +>>> tz == pickle.loads(pickle.dumps(tz)) +True + +It's possible to construct a TimeZoneInfo from a TimeZoneDescription +including the currently-defined zone. +>>> tz = win32timezone.TimeZoneInfo(TimeZoneDefinition.current()) +>>> tz == pickle.loads(pickle.dumps(tz)) +True + +>>> aest = win32timezone.TimeZoneInfo('AUS Eastern Standard Time') +>>> est = win32timezone.TimeZoneInfo('E. Australia Standard Time') +>>> dt = datetime.datetime(2006, 11, 11, 1, 0, 0, tzinfo = aest) +>>> estdt = dt.astimezone(est) +>>> estdt.strftime('%Y-%m-%d %H:%M:%S') +'2006-11-11 00:00:00' + +>>> dt = datetime.datetime(2007, 1, 12, 1, 0, 0, tzinfo = aest) +>>> estdt = dt.astimezone(est) +>>> estdt.strftime('%Y-%m-%d %H:%M:%S') +'2007-01-12 00:00:00' + +>>> dt = datetime.datetime(2007, 6, 13, 1, 0, 0, tzinfo = aest) +>>> estdt = dt.astimezone(est) +>>> estdt.strftime('%Y-%m-%d %H:%M:%S') +'2007-06-13 01:00:00' + +Microsoft now has a patch for handling time zones in 2007 (see +http://support.microsoft.com/gp/cp_dst) + +As a result, patched systems will give an incorrect result for +dates prior to the designated year except for Vista and its +successors, which have dynamic time zone support. +>>> nov2_pre_change = datetime.datetime(2003, 11, 2, tzinfo = MST) +>>> old_response = (2003, 11, 2, 7, 0, 0, 6, 306, 0) +>>> incorrect_patch_response = (2003, 11, 2, 6, 0, 0, 6, 306, 0) +>>> pre_response = nov2_pre_change.utctimetuple() +>>> pre_response in (old_response, incorrect_patch_response) +True + +Furthermore, unpatched systems pre-Vista will give an incorrect +result for dates after 2007. +>>> nov2_post_change = datetime.datetime(2007, 11, 2, tzinfo = MST) +>>> incorrect_unpatched_response = (2007, 11, 2, 7, 0, 0, 4, 306, 0) +>>> new_response = (2007, 11, 2, 6, 0, 0, 4, 306, 0) +>>> post_response = nov2_post_change.utctimetuple() +>>> post_response in (new_response, incorrect_unpatched_response) +True + + +There is a function you can call to get some capabilities of the time +zone data. +>>> caps = GetTZCapabilities() +>>> isinstance(caps, dict) +True +>>> 'MissingTZPatch' in caps +True +>>> 'DynamicTZSupport' in caps +True + +>>> both_dates_correct = (pre_response == old_response and post_response == new_response) +>>> old_dates_wrong = (pre_response == incorrect_patch_response) +>>> new_dates_wrong = (post_response == incorrect_unpatched_response) + +>>> caps['DynamicTZSupport'] == both_dates_correct +True + +>>> (not caps['DynamicTZSupport'] and caps['MissingTZPatch']) == new_dates_wrong +True + +>>> (not caps['DynamicTZSupport'] and not caps['MissingTZPatch']) == old_dates_wrong +True + +This test helps ensure language support for unicode characters +>>> x = TIME_ZONE_INFORMATION(0, u'français') + + +Test conversion from one time zone to another at a DST boundary +=============================================================== + +>>> tz_hi = TimeZoneInfo('Hawaiian Standard Time') +>>> tz_pac = TimeZoneInfo('Pacific Standard Time') +>>> time_before = datetime.datetime(2011, 11, 5, 15, 59, 59, tzinfo=tz_hi) +>>> tz_hi.utcoffset(time_before) +datetime.timedelta(days=-1, seconds=50400) +>>> tz_hi.dst(time_before) +datetime.timedelta(0) + +Hawaii doesn't need dynamic TZ info +>>> getattr(tz_hi, 'dynamicInfo', None) + +Here's a time that gave some trouble as reported in #3523104 +because one minute later, the equivalent UTC time changes from DST +in the U.S. +>>> dt_hi = datetime.datetime(2011, 11, 5, 15, 59, 59, 0, tzinfo=tz_hi) +>>> dt_hi.timetuple() +time.struct_time(tm_year=2011, tm_mon=11, tm_mday=5, tm_hour=15, tm_min=59, tm_sec=59, tm_wday=5, tm_yday=309, tm_isdst=0) +>>> dt_hi.utctimetuple() +time.struct_time(tm_year=2011, tm_mon=11, tm_mday=6, tm_hour=1, tm_min=59, tm_sec=59, tm_wday=6, tm_yday=310, tm_isdst=0) + +Convert the time to pacific time. +>>> dt_pac = dt_hi.astimezone(tz_pac) +>>> dt_pac.timetuple() +time.struct_time(tm_year=2011, tm_mon=11, tm_mday=5, tm_hour=18, tm_min=59, tm_sec=59, tm_wday=5, tm_yday=309, tm_isdst=1) + +Notice that the UTC time is almost 2am. +>>> dt_pac.utctimetuple() +time.struct_time(tm_year=2011, tm_mon=11, tm_mday=6, tm_hour=1, tm_min=59, tm_sec=59, tm_wday=6, tm_yday=310, tm_isdst=0) + +Now do the same tests one minute later in Hawaii. +>>> time_after = datetime.datetime(2011, 11, 5, 16, 0, 0, 0, tzinfo=tz_hi) +>>> tz_hi.utcoffset(time_after) +datetime.timedelta(days=-1, seconds=50400) +>>> tz_hi.dst(time_before) +datetime.timedelta(0) + +>>> dt_hi = datetime.datetime(2011, 11, 5, 16, 0, 0, 0, tzinfo=tz_hi) +>>> print(dt_hi.timetuple()) +time.struct_time(tm_year=2011, tm_mon=11, tm_mday=5, tm_hour=16, tm_min=0, tm_sec=0, tm_wday=5, tm_yday=309, tm_isdst=0) +>>> print(dt_hi.utctimetuple()) +time.struct_time(tm_year=2011, tm_mon=11, tm_mday=6, tm_hour=2, tm_min=0, tm_sec=0, tm_wday=6, tm_yday=310, tm_isdst=0) + +According to the docs, this is what astimezone does. +>>> utc = (dt_hi - dt_hi.utcoffset()).replace(tzinfo=tz_pac) +>>> utc +datetime.datetime(2011, 11, 6, 2, 0, tzinfo=TimeZoneInfo('Pacific Standard Time')) +>>> tz_pac.fromutc(utc) == dt_hi.astimezone(tz_pac) +True +>>> tz_pac.fromutc(utc) +datetime.datetime(2011, 11, 5, 19, 0, tzinfo=TimeZoneInfo('Pacific Standard Time')) + +Make sure the converted time is correct. +>>> dt_pac = dt_hi.astimezone(tz_pac) +>>> dt_pac.timetuple() +time.struct_time(tm_year=2011, tm_mon=11, tm_mday=5, tm_hour=19, tm_min=0, tm_sec=0, tm_wday=5, tm_yday=309, tm_isdst=1) +>>> dt_pac.utctimetuple() +time.struct_time(tm_year=2011, tm_mon=11, tm_mday=6, tm_hour=2, tm_min=0, tm_sec=0, tm_wday=6, tm_yday=310, tm_isdst=0) + +Check some internal methods +>>> tz_pac._getStandardBias(datetime.datetime(2011, 1, 1)) +datetime.timedelta(seconds=28800) +>>> tz_pac._getDaylightBias(datetime.datetime(2011, 1, 1)) +datetime.timedelta(seconds=25200) + +Test the offsets +>>> offset = tz_pac.utcoffset(datetime.datetime(2011, 11, 6, 2, 0)) +>>> offset == datetime.timedelta(hours=-8) +True +>>> dst_offset = tz_pac.dst(datetime.datetime(2011, 11, 6, 2, 0) + offset) +>>> dst_offset == datetime.timedelta(hours=1) +True +>>> (offset + dst_offset) == datetime.timedelta(hours=-7) +True + + +Test offsets that occur right at the DST changeover +>>> datetime.datetime.utcfromtimestamp(1320570000).replace( +... tzinfo=TimeZoneInfo.utc()).astimezone(tz_pac) +datetime.datetime(2011, 11, 6, 1, 0, tzinfo=TimeZoneInfo('Pacific Standard Time')) + +""" +__author__ = 'Jason R. Coombs ' + +import winreg +import struct +import datetime +import win32api +import re +import operator +from itertools import count + +import logging +log = logging.getLogger(__file__) + + +# A couple of objects for working with objects as if they were native C-type +# structures. +class _SimpleStruct(object): + _fields_ = None # must be overridden by subclasses + + def __init__(self, *args, **kw): + for i, (name, typ) in enumerate(self._fields_): + def_arg = None + if i < len(args): + def_arg = args[i] + if name in kw: + def_arg = kw[name] + if def_arg is not None: + if not isinstance(def_arg, tuple): + def_arg = (def_arg,) + else: + def_arg = () + if len(def_arg) == 1 and isinstance(def_arg[0], typ): + # already an object of this type. + # XXX - should copy.copy??? + def_val = def_arg[0] + else: + def_val = typ(*def_arg) + setattr(self, name, def_val) + + def field_names(self): + return [f[0] for f in self._fields_] + + def __eq__(self, other): + if not hasattr(other, "_fields_"): + return False + if self._fields_ != other._fields_: + return False + for name, _ in self._fields_: + if getattr(self, name) != getattr(other, name): + return False + return True + + def __ne__(self, other): + return not self.__eq__(other) + + +class SYSTEMTIME(_SimpleStruct): + _fields_ = [ + ('year', int), + ('month', int), + ('day_of_week', int), + ('day', int), + ('hour', int), + ('minute', int), + ('second', int), + ('millisecond', int), + ] + + +class TIME_ZONE_INFORMATION(_SimpleStruct): + _fields_ = [ + ('bias', int), + ('standard_name', str), + ('standard_start', SYSTEMTIME), + ('standard_bias', int), + ('daylight_name', str), + ('daylight_start', SYSTEMTIME), + ('daylight_bias', int), + ] + + +class DYNAMIC_TIME_ZONE_INFORMATION(_SimpleStruct): + _fields_ = TIME_ZONE_INFORMATION._fields_ + [ + ('key_name', str), + ('dynamic_daylight_time_disabled', bool), + ] + + +class TimeZoneDefinition(DYNAMIC_TIME_ZONE_INFORMATION): + """ + A time zone definition class based on the win32 + DYNAMIC_TIME_ZONE_INFORMATION structure. + + Describes a bias against UTC (bias), and two dates at which a separate + additional bias applies (standard_bias and daylight_bias). + """ + + def __init__(self, *args, **kwargs): + """ + Try to construct a TimeZoneDefinition from + a) [DYNAMIC_]TIME_ZONE_INFORMATION args + b) another TimeZoneDefinition + c) a byte structure (using _from_bytes) + """ + try: + super(TimeZoneDefinition, self).__init__(*args, **kwargs) + return + except (TypeError, ValueError): + pass + + try: + self.__init_from_other(*args, **kwargs) + return + except TypeError: + pass + + try: + self.__init_from_bytes(*args, **kwargs) + return + except TypeError: + pass + + raise TypeError("Invalid arguments for %s" % self.__class__) + + def __init_from_bytes( + self, bytes, standard_name='', daylight_name='', key_name='', + daylight_disabled=False): + format = '3l8h8h' + components = struct.unpack(format, bytes) + bias, standard_bias, daylight_bias = components[:3] + standard_start = SYSTEMTIME(*components[3:11]) + daylight_start = SYSTEMTIME(*components[11:19]) + super(TimeZoneDefinition, self).__init__( + bias, + standard_name, standard_start, standard_bias, + daylight_name, daylight_start, daylight_bias, + key_name, daylight_disabled,) + + def __init_from_other(self, other): + if not isinstance(other, TIME_ZONE_INFORMATION): + raise TypeError("Not a TIME_ZONE_INFORMATION") + for name in other.field_names(): + # explicitly get the value from the underlying structure + value = super(TimeZoneDefinition, other).__getattribute__(other, name) + setattr(self, name, value) + # consider instead of the loop above just copying the memory directly + # size = max(ctypes.sizeof(DYNAMIC_TIME_ZONE_INFO), ctypes.sizeof(other)) + # ctypes.memmove(ctypes.addressof(self), other, size) + + def __getattribute__(self, attr): + value = super(TimeZoneDefinition, self).__getattribute__(attr) + if 'bias' in attr: + value = datetime.timedelta(minutes=value) + return value + + @classmethod + def current(class_): + "Windows Platform SDK GetTimeZoneInformation" + code, tzi = win32api.GetTimeZoneInformation(True) + return code, class_(*tzi) + + def set(self): + tzi = tuple(getattr(self, n) for n, t in self._fields_) + win32api.SetTimeZoneInformation(tzi) + + def copy(self): + # XXX - this is no longer a copy! + return self.__class__(self) + + def locate_daylight_start(self, year): + return self._locate_day(year, self.daylight_start) + + def locate_standard_start(self, year): + return self._locate_day(year, self.standard_start) + + @staticmethod + def _locate_day(year, cutoff): + """ + Takes a SYSTEMTIME object, such as retrieved from a TIME_ZONE_INFORMATION + structure or call to GetTimeZoneInformation and interprets it based on the given + year to identify the actual day. + + This method is necessary because the SYSTEMTIME structure refers to a day by its + day of the week and week of the month (e.g. 4th saturday in March). + + >>> SATURDAY = 6 + >>> MARCH = 3 + >>> st = SYSTEMTIME(2000, MARCH, SATURDAY, 4, 0, 0, 0, 0) + + # according to my calendar, the 4th Saturday in March in 2009 was the 28th + >>> expected_date = datetime.datetime(2009, 3, 28) + >>> TimeZoneDefinition._locate_day(2009, st) == expected_date + True + """ + # MS stores Sunday as 0, Python datetime stores Monday as zero + target_weekday = (cutoff.day_of_week + 6) % 7 + # For SYSTEMTIMEs relating to time zone inforamtion, cutoff.day + # is the week of the month + week_of_month = cutoff.day + # so the following is the first day of that week + day = (week_of_month - 1) * 7 + 1 + result = datetime.datetime( + year, cutoff.month, day, + cutoff.hour, cutoff.minute, cutoff.second, cutoff.millisecond) + # now the result is the correct week, but not necessarily the correct day of the week + days_to_go = (target_weekday - result.weekday()) % 7 + result += datetime.timedelta(days_to_go) + # if we selected a day in the month following the target month, + # move back a week or two. + # This is necessary because Microsoft defines the fifth week in a month + # to be the last week in a month and adding the time delta might have + # pushed the result into the next month. + while result.month == cutoff.month + 1: + result -= datetime.timedelta(weeks = 1) + return result + + +class TimeZoneInfo(datetime.tzinfo): + """ + Main class for handling Windows time zones. + Usage: + TimeZoneInfo(